diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e9b07400..0984299a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -57,7 +57,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - name: Install PyOxidizer run: | pip install pyoxidizer==0.22.0 @@ -113,7 +113,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - name: Install PyOxidizer run: | pip install pyoxidizer==0.22.0 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 881ced4e..9fa19d68 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -51,7 +51,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - name: Install PyOxidizer run: | pip install pyoxidizer==0.22.0 @@ -91,7 +91,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - name: Install PyOxidizer run: | pip install pyoxidizer==0.22.0 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 00f08369..f88ac334 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,6 +8,7 @@ on: - "**" paths: - .github/workflows/test.yml + - Makefile - vendor/** pull_request: @@ -16,7 +17,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.9] + python-version: ["3.10"] steps: - uses: actions/checkout@v2 @@ -32,7 +33,7 @@ jobs: runs-on: macos-latest strategy: matrix: - python-version: [3.9] + python-version: ["3.10"] steps: - uses: actions/checkout@v2 diff --git a/Makefile b/Makefile index bf86f73a..e79a9c39 100644 --- a/Makefile +++ b/Makefile @@ -9,8 +9,6 @@ BUILD_VERSION := latest _path_build: $(eval BUILDPATH := build/${ARCH}/release/${TARGET}) -_path_bin: - $(eval BINPATH := ${BUILDPATH}${BIN_SUFFIX}) _path_lib: _path_build $(eval LIBPATH := ${BUILDPATH}/lib) _path_assets: _path_build @@ -19,24 +17,37 @@ _path_assets: _path_build clean_build: _path_build @rm -rf ${BUILDPATH} +clean_src: + @rm -rf src/* + clean_dist: @rm -rf dist clean_vendor: @rm -rf vendor +sources: clean_src + @git clone https://github.com/python/importlib_metadata.git src/importlib_metadata && cd src/importlib_metadata && git checkout v4.12.0 + @git clone https://github.com/python-jsonschema/jsonschema.git src/jsonschema && cd src/jsonschema && git checkout v4.10.3 + @git clone https://github.com/lark-parser/lark.git src/lark && cd src/lark && git checkout 1.1.2 + @git clone https://github.com/python-poetry/poetry.git src/poetry && cd src/poetry && git checkout 1.2.0 + @git clone https://github.com/python-poetry/poetry-core.git src/poetry-core && cd src/poetry-core && git checkout 1.1.0 + @git clone https://github.com/pypa/virtualenv.git src/virtualenv && cd src/virtualenv && git checkout 20.16.3 + patches: @cd src/importlib_metadata && git diff --binary HEAD > ../../patches/importlib_metadata.patch + @cd src/jsonschema && git diff --binary HEAD > ../../patches/jsonschema.patch + @cd src/lark && git diff --binary HEAD > ../../patches/lark.patch @cd src/poetry-core && git diff --binary HEAD > ../../patches/poetry-core.patch @cd src/poetry && git diff --binary HEAD > ../../patches/poetry.patch - @cd src/requests && git diff --binary HEAD > ../../patches/requests.patch @cd src/virtualenv && git diff --binary HEAD > ../../patches/virtualenv.patch apply_patches: @cd src/importlib_metadata && git apply --reject --ignore-whitespace ../../patches/importlib_metadata.patch + @cd src/jsonschema && git apply --reject --ignore-whitespace ../../patches/jsonschema.patch + @cd src/lark && git apply --reject --ignore-whitespace ../../patches/lark.patch @cd src/poetry-core && git apply --reject --ignore-whitespace ../../patches/poetry-core.patch @cd src/poetry && git apply --reject --ignore-whitespace ../../patches/poetry.patch - @cd src/requests && git apply --reject --ignore-whitespace ../../patches/requests.patch @cd src/virtualenv && git apply --reject --ignore-whitespace ../../patches/virtualenv.patch vendor: clean_vendor @@ -50,12 +61,6 @@ tests: .venv/bin/pip install .[testing] pyfakefs && \ .venv/bin/python -m unittest discover && \ rm -r .venv - @cd vendor/requests && \ - python -m venv .venv && \ - .venv/bin/pip install -e .[socks] && \ - .venv/bin/pip install -r requirements-dev.txt && \ - .venv/bin/pytest tests && \ - rm -r .venv @cd vendor/virtualenv && \ python -m venv .venv && \ .venv/bin/pip install .[testing] && \ @@ -63,12 +68,17 @@ tests: rm -r .venv @cd vendor/poetry-core && \ python -m venv .venv && \ - .venv/bin/pip install ../requests ../virtualenv . pep517 pytest pytest-mock && \ + .venv/bin/pip install -r vendors/deps.txt && \ + .venv/bin/pip install ../jsonschema ../lark ../virtualenv . && \ + .venv/bin/pip install build pytest pytest-mock && \ .venv/bin/pytest && \ rm -r .venv @cd vendor/poetry && \ python -m venv .venv && \ - .venv/bin/pip install ../importlib_metadata ../requests ../virtualenv ../poetry-core . httpretty pytest pytest-mock==1.13.0 && \ + .venv/bin/pip install ../importlib_metadata ../jsonschema ../lark ../virtualenv && \ + .venv/bin/pip install -r ../poetry-core/vendors/deps.txt && \ + .venv/bin/pip install ../poetry-core . && \ + .venv/bin/pip install deepdiff flatdict httpretty pytest pytest-mock && \ .venv/bin/pytest && \ rm -r .venv @@ -83,12 +93,10 @@ build_win: _build_win assets _build_posix: _path_build _path_lib clean_build pyoxidizer build --release --target-triple=${ARCH} - @mv ${BUILDPATH}/bin/lib ${BUILDPATH} - @cp -a vendor/poetry-core/poetry/core/_vendor/. ${LIBPATH} + @rm ${BUILDPATH}/COPYING.txt _build_win: _path_build _path_lib clean_build pyoxidizer build --release --target-triple=${ARCH} --var WIN_BUILD 1 - @cp -a vendor/poetry-core/poetry/core/_vendor/. ${LIBPATH} assets: _path_assets @mkdir -p ${ASSETSPATH} @@ -96,30 +104,28 @@ assets: _path_assets @mkdir -p ${ASSETSPATH}/virtualenv/create @mkdir -p ${ASSETSPATH}/virtualenv/discovery @mkdir -p ${ASSETSPATH}/virtualenv/seed/wheels - @cp -R vendor/poetry-core/poetry/core/version/grammars ${ASSETSPATH}/core/version/grammars + @cp -R vendor/poetry-core/src/poetry/core/version/grammars ${ASSETSPATH}/core/version/grammars @cp vendor/virtualenv/src/virtualenv/create/debug.py ${ASSETSPATH}/virtualenv/create/debug.py @cp vendor/virtualenv/src/virtualenv/discovery/py_info.py ${ASSETSPATH}/virtualenv/discovery/py_info.py @cp vendor/virtualenv/src/virtualenv/seed/wheels/embed/*.whl ${ASSETSPATH}/virtualenv/seed/wheels sign: _path_build _path_lib - @codesign -s - ${BUILDPATH}/bin/poetry + @codesign -s - ${BUILDPATH}/poetry @find ${LIBPATH} -name '*.so' -type f | xargs -I $$ codesign -s - $$ verify_build_linux: ARCH := ${ARCH_LINUX} -verify_build_linux: BIN_SUFFIX := /bin verify_build_linux: _verify_build verify_build_mac: ARCH := ${ARCH_MAC_INTEL} -verify_build_mac: BIN_SUFFIX := /bin verify_build_mac: _verify_build verify_build_win: ARCH := ${ARCH_WIN} verify_build_win: _verify_build -_verify_build: _path_build _path_bin - ${BINPATH}/poetry --version - ${BINPATH}/poetry config virtualenvs.in-project true - @cd tests && ../${BINPATH}/poetry install +_verify_build: _path_build + ${BUILDPATH}/poetry --version + ${BUILDPATH}/poetry config virtualenvs.in-project true + @cd tests && ../${BUILDPATH}/poetry install -vvv @rm -rf tests/.venv pack_linux: ARCH := ${ARCH_LINUX} diff --git a/README.md b/README.md index cce68c4b..d4050636 100644 --- a/README.md +++ b/README.md @@ -2,10 +2,16 @@ This project builds [Poetry](https://github.com/python-poetry/poetry) Python dependency management tool into a binary executable using [PyOxidizer](https://github.com/indygreg/PyOxidizer). -The aim is to have a Poetry instance which is fully independant of the local Python environment. +The aim is to have a Poetry instance fully independent of the local Python environment. > **Note:** due to patches implemented over Poetry components, this build might introduce unwanted bugs over Poetry project, use at your own risk. +Due to its nature, `poetry-bin` has some key differences compared to the "vanilla version", specifically: + +- `self` commands are dropped +- 1.2 plugins are not supported (yet?). The only included plugin is the `export` one +- the selection of the Python interpreter to use is slightly different, as it won't use `sys.executable` to make decisions + ## Installation You can install Poetry binary build using the install script: diff --git a/patches/importlib_metadata.patch b/patches/importlib_metadata.patch index e8895d78..e5695d08 100644 --- a/patches/importlib_metadata.patch +++ b/patches/importlib_metadata.patch @@ -1,48 +1,48 @@ diff --git a/importlib_metadata/__init__.py b/importlib_metadata/__init__.py -index b01e7e3..0d8feb5 100644 +index 8761307..9a5d8d2 100644 --- a/importlib_metadata/__init__.py +++ b/importlib_metadata/__init__.py -@@ -228,10 +228,13 @@ class Distribution: +@@ -579,8 +579,12 @@ class Distribution: if context and kwargs: raise ValueError("cannot accept context and kwargs") context = context or DistributionFinder.Context(**kwargs) - return itertools.chain.from_iterable( -- resolver(context) -- for resolver in cls._discover_resolvers() +- resolver(context) for resolver in cls._discover_resolvers() + return filter( + lambda v: hasattr(v, "_path"), + itertools.chain.from_iterable( + resolver(context) + for resolver in cls._discover_resolvers() - ) -+ ) ++ ) + ) @staticmethod - def at(path): -@@ -620,4 +623,4 @@ def requires(distribution_name): - return distribution(distribution_name).requires +diff --git a/pyproject.toml b/pyproject.toml +index 60de242..b240064 100644 +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -1,12 +1,10 @@ + [build-system] +-requires = ["setuptools>=56", "setuptools_scm[toml]>=3.4.1"] ++requires = ["setuptools>=56"] + build-backend = "setuptools.build_meta" + [tool.black] + skip-string-normalization = true + +-[tool.setuptools_scm] +- + [tool.pytest-enabler.black] + addopts = "--black" --__version__ = version(__name__) -+__version__ = "1.7.0" diff --git a/setup.cfg b/setup.cfg -index 5dd09a4..d989cd0 100644 +index efd0a36..062f10b 100644 --- a/setup.cfg +++ b/setup.cfg -@@ -16,7 +16,6 @@ classifiers = - - [options] - python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.* --setup_requires = setuptools-scm - install_requires = - zipp>=0.5 - pathlib2; python_version < '3' -diff --git a/setup.py b/setup.py -index d5d43d7..0c9823c 100644 ---- a/setup.py -+++ b/setup.py -@@ -1,3 +1,3 @@ - from setuptools import setup - --setup(use_scm_version=True) -+setup(use_scm_version=False) +@@ -1,5 +1,6 @@ + [metadata] + name = importlib_metadata ++version = 4.12.0 + author = Jason R. Coombs + author_email = jaraco@jaraco.com + description = Read metadata from Python packages diff --git a/patches/jsonschema.patch b/patches/jsonschema.patch new file mode 100644 index 00000000..a0cd77cd --- /dev/null +++ b/patches/jsonschema.patch @@ -0,0 +1,56 @@ +diff --git a/jsonschema/__version__.py b/jsonschema/__version__.py +new file mode 100644 +index 0000000..d32a48d +--- /dev/null ++++ b/jsonschema/__version__.py +@@ -0,0 +1 @@ ++__version__ = "4.10.3" +diff --git a/jsonschema/_utils.py b/jsonschema/_utils.py +index 7e11325..687b95a 100644 +--- a/jsonschema/_utils.py ++++ b/jsonschema/_utils.py +@@ -5,11 +5,7 @@ import json + import re + import sys + +-# The files() API was added in Python 3.9. +-if sys.version_info >= (3, 9): # pragma: no cover +- from importlib import resources +-else: # pragma: no cover +- import importlib_resources as resources # type: ignore ++from importlib import resources + + + class URIDict(MutableMapping): +@@ -56,10 +52,8 @@ def load_schema(name): + """ + Load a schema from ./schemas/``name``.json and return it. + """ +- +- path = resources.files(__package__).joinpath(f"schemas/{name}.json") +- data = path.read_text(encoding="utf-8") +- return json.loads(data) ++ from . import __name__ ++ return json.loads(resources.read_text(f"{__name__}.schemas", f"{name}.json")) + + + def format_as_index(container, indices): +diff --git a/jsonschema/schemas/__init__.py b/jsonschema/schemas/__init__.py +new file mode 100644 +index 0000000..e69de29 +diff --git a/pyproject.toml b/pyproject.toml +index 5041a10..46a3e40 100644 +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -1,9 +1,9 @@ + [build-system] +-requires = ["hatchling", "hatch-vcs"] ++requires = ["hatchling"] + build-backend = "hatchling.build" + + [tool.hatch.version] +-source = "vcs" ++path = "jsonschema/__version__.py" + + [project] + name = "jsonschema" diff --git a/patches/lark.patch b/patches/lark.patch new file mode 100644 index 00000000..6da6ebcf --- /dev/null +++ b/patches/lark.patch @@ -0,0 +1,70 @@ +diff --git a/lark/grammars/__init__.py b/lark/grammars/__init__.py +new file mode 100644 +index 0000000..e69de29 +diff --git a/lark/load_grammar.py b/lark/load_grammar.py +index fcdd9d0..581efce 100644 +--- a/lark/load_grammar.py ++++ b/lark/load_grammar.py +@@ -8,6 +8,7 @@ import pkgutil + from ast import literal_eval + from contextlib import suppress + from typing import List, Tuple, Union, Callable, Dict, Optional, Sequence ++from importlib import resources + + from .utils import bfs, logger, classify_bool, is_id_continue, is_id_start, bfs_all_unique, small_factors + from .lexer import Token, TerminalDef, PatternStr, PatternRE +@@ -23,7 +24,6 @@ from .tree import Tree, SlottedTree as ST + from .visitors import Transformer, Visitor, v_args, Transformer_InPlace, Transformer_NonRecursive + inline_args = v_args(inline=True) + +-__path__ = os.path.dirname(__file__) + IMPORT_PATHS = ['grammars'] + + EXT = '.lark' +@@ -318,7 +318,7 @@ class EBNF_to_BNF(Transformer_InPlace): + if mx < REPEAT_BREAK_THRESHOLD: + return ST('expansions', [ST('expansion', [rule] * n) for n in range(mn, mx + 1)]) + +- # For large repeat values, we break the repetition into sub-rules. ++ # For large repeat values, we break the repetition into sub-rules. + # We treat ``rule~mn..mx`` as ``rule~mn rule~0..(diff=mx-mn)``. + # We then use small_factors to split up mn and diff up into values [(a, b), ...] + # This values are used with the help of _add_repeat_rule and _add_repeat_rule_opt +@@ -846,7 +846,9 @@ class FromPackageLoader: + for path in to_try: + full_path = os.path.join(path, grammar_path) + try: +- text: Optional[bytes] = pkgutil.get_data(self.pkg_name, full_path) ++ pkg = ".".join([self.pkg_name] + full_path.split(os.path.sep)[:-1]) ++ item = full_path.split(os.path.sep)[-1] ++ text: Optional[bytes] = resources.read_binary(pkg, item) + except IOError as e: + err = e + continue +@@ -1233,7 +1235,7 @@ class GrammarBuilder: + tree = _parse_grammar(grammar_text, grammar_name) + + imports: Dict[Tuple[str, ...], Tuple[Optional[str], Dict[str, str]]] = {} +- ++ + for stmt in tree.children: + if stmt.data == 'import': + dotted_path, base_path, aliases = self._unpack_import(stmt, grammar_name) +@@ -1316,7 +1318,7 @@ class GrammarBuilder: + if self.used_files.get(joined_path, h) != h: + raise RuntimeError("Grammar file was changed during importing") + self.used_files[joined_path] = h +- ++ + gb = GrammarBuilder(self.global_keep_all_tokens, self.import_paths, self.used_files) + gb.load_grammar(text, joined_path, mangle) + gb._remove_unused(map(mangle, aliases)) +@@ -1390,7 +1392,7 @@ def verify_used_files(file_hashes): + text = pkgutil.get_data(*path).decode('utf-8') + if text is None: # We don't know how to load the path. ignore it. + continue +- ++ + current = hashlib.md5(text.encode()).hexdigest() + if old != current: + logger.info("File %r changed, rebuilding Parser" % path) diff --git a/patches/poetry-core.patch b/patches/poetry-core.patch index 8c9bb675..3a5742fd 100644 --- a/patches/poetry-core.patch +++ b/patches/poetry-core.patch @@ -1,88 +1,44896 @@ -diff --git a/poetry/core/__init__.py b/poetry/core/__init__.py -index f7d95ec..99b5600 100644 ---- a/poetry/core/__init__.py -+++ b/poetry/core/__init__.py -@@ -9,7 +9,8 @@ except ImportError: +diff --git a/pyproject.toml b/pyproject.toml +index b705682..697a684 100644 +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -38,9 +38,6 @@ generate-setup-file = false + [tool.poetry.dependencies] + python = "^3.7" - __version__ = "1.0.8" +-# required for compatibility +-importlib-metadata = {version = ">=1.7.0", python = "<3.8"} +- + [tool.poetry.dev-dependencies] + pre-commit = "^2.15.0" + pyrsistent = "^0.18.0" +@@ -116,6 +113,5 @@ drop = [ + pyrsistent = "https://raw.githubusercontent.com/tobgu/pyrsistent/master/LICENSE.mit" --__vendor_site__ = (Path(__file__).parent / "_vendor").as_posix() -+if not getattr(sys, "oxidized", False): -+ __vendor_site__ = (Path(__file__).parent / "_vendor").as_posix() + [build-system] +-requires = [] ++requires = ["poetry-core>=1.0.0"] + build-backend = "poetry.core.masonry.api" +-backend-path = ["src"] +diff --git a/src/poetry/core/__init__.py b/src/poetry/core/__init__.py +index 35854e9..9d63535 100644 +--- a/src/poetry/core/__init__.py ++++ b/src/poetry/core/__init__.py +@@ -1,15 +1,6 @@ + from __future__ import annotations + +-import sys +- +-from pathlib import Path +- + # this cannot presently be replaced with importlib.metadata.version as when building + # itself, poetry-core is not available as an installed distribution. + __version__ = "1.1.0" +- +-__vendor_site__ = (Path(__file__).parent / "_vendor").as_posix() +- -if __vendor_site__ not in sys.path: - sys.path.insert(0, __vendor_site__) -+ if __vendor_site__ not in sys.path: -+ sys.path.insert(0, __vendor_site__) -diff --git a/poetry/core/json/__init__.py b/poetry/core/json/__init__.py -index 83ecab7..9f94540 100644 ---- a/poetry/core/json/__init__.py -+++ b/poetry/core/json/__init__.py -@@ -1,29 +1,26 @@ +diff --git a/src/poetry/core/_vendor/_pyrsistent_version.py b/src/poetry/core/_vendor/_pyrsistent_version.py +deleted file mode 100644 +index 5877c8d..0000000 +--- a/src/poetry/core/_vendor/_pyrsistent_version.py ++++ /dev/null +@@ -1 +0,0 @@ +-__version__ = '0.18.1' +diff --git a/src/poetry/core/_vendor/attr/__init__.py b/src/poetry/core/_vendor/attr/__init__.py +deleted file mode 100644 +index 386305d..0000000 +--- a/src/poetry/core/_vendor/attr/__init__.py ++++ /dev/null +@@ -1,79 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +- +-import sys +- +-from functools import partial +- +-from . import converters, exceptions, filters, setters, validators +-from ._cmp import cmp_using +-from ._config import get_run_validators, set_run_validators +-from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +-from ._make import ( +- NOTHING, +- Attribute, +- Factory, +- attrib, +- attrs, +- fields, +- fields_dict, +- make_class, +- validate, +-) +-from ._version_info import VersionInfo +- +- +-__version__ = "22.1.0" +-__version_info__ = VersionInfo._from_version_string(__version__) +- +-__title__ = "attrs" +-__description__ = "Classes Without Boilerplate" +-__url__ = "https://www.attrs.org/" +-__uri__ = __url__ +-__doc__ = __description__ + " <" + __uri__ + ">" +- +-__author__ = "Hynek Schlawack" +-__email__ = "hs@ox.cx" +- +-__license__ = "MIT" +-__copyright__ = "Copyright (c) 2015 Hynek Schlawack" +- +- +-s = attributes = attrs +-ib = attr = attrib +-dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) +- +-__all__ = [ +- "Attribute", +- "Factory", +- "NOTHING", +- "asdict", +- "assoc", +- "astuple", +- "attr", +- "attrib", +- "attributes", +- "attrs", +- "cmp_using", +- "converters", +- "evolve", +- "exceptions", +- "fields", +- "fields_dict", +- "filters", +- "get_run_validators", +- "has", +- "ib", +- "make_class", +- "resolve_types", +- "s", +- "set_run_validators", +- "setters", +- "validate", +- "validators", +-] +- +-if sys.version_info[:2] >= (3, 6): +- from ._next_gen import define, field, frozen, mutable # noqa: F401 +- +- __all__.extend(("define", "field", "frozen", "mutable")) +diff --git a/src/poetry/core/_vendor/attr/_cmp.py b/src/poetry/core/_vendor/attr/_cmp.py +deleted file mode 100644 +index 81b99e4..0000000 +--- a/src/poetry/core/_vendor/attr/_cmp.py ++++ /dev/null +@@ -1,155 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +- +-import functools +-import types +- +-from ._make import _make_ne +- +- +-_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} +- +- +-def cmp_using( +- eq=None, +- lt=None, +- le=None, +- gt=None, +- ge=None, +- require_same_type=True, +- class_name="Comparable", +-): +- """ +- Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and +- ``cmp`` arguments to customize field comparison. +- +- The resulting class will have a full set of ordering methods if +- at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. +- +- :param Optional[callable] eq: `callable` used to evaluate equality +- of two objects. +- :param Optional[callable] lt: `callable` used to evaluate whether +- one object is less than another object. +- :param Optional[callable] le: `callable` used to evaluate whether +- one object is less than or equal to another object. +- :param Optional[callable] gt: `callable` used to evaluate whether +- one object is greater than another object. +- :param Optional[callable] ge: `callable` used to evaluate whether +- one object is greater than or equal to another object. +- +- :param bool require_same_type: When `True`, equality and ordering methods +- will return `NotImplemented` if objects are not of the same type. +- +- :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. +- +- See `comparison` for more details. +- +- .. versionadded:: 21.1.0 +- """ +- +- body = { +- "__slots__": ["value"], +- "__init__": _make_init(), +- "_requirements": [], +- "_is_comparable_to": _is_comparable_to, +- } +- +- # Add operations. +- num_order_functions = 0 +- has_eq_function = False +- +- if eq is not None: +- has_eq_function = True +- body["__eq__"] = _make_operator("eq", eq) +- body["__ne__"] = _make_ne() +- +- if lt is not None: +- num_order_functions += 1 +- body["__lt__"] = _make_operator("lt", lt) +- +- if le is not None: +- num_order_functions += 1 +- body["__le__"] = _make_operator("le", le) +- +- if gt is not None: +- num_order_functions += 1 +- body["__gt__"] = _make_operator("gt", gt) +- +- if ge is not None: +- num_order_functions += 1 +- body["__ge__"] = _make_operator("ge", ge) +- +- type_ = types.new_class( +- class_name, (object,), {}, lambda ns: ns.update(body) +- ) +- +- # Add same type requirement. +- if require_same_type: +- type_._requirements.append(_check_same_type) +- +- # Add total ordering if at least one operation was defined. +- if 0 < num_order_functions < 4: +- if not has_eq_function: +- # functools.total_ordering requires __eq__ to be defined, +- # so raise early error here to keep a nice stack. +- raise ValueError( +- "eq must be define is order to complete ordering from " +- "lt, le, gt, ge." +- ) +- type_ = functools.total_ordering(type_) +- +- return type_ +- +- +-def _make_init(): +- """ +- Create __init__ method. +- """ +- +- def __init__(self, value): +- """ +- Initialize object with *value*. +- """ +- self.value = value +- +- return __init__ +- +- +-def _make_operator(name, func): +- """ +- Create operator method. +- """ +- +- def method(self, other): +- if not self._is_comparable_to(other): +- return NotImplemented +- +- result = func(self.value, other.value) +- if result is NotImplemented: +- return NotImplemented +- +- return result +- +- method.__name__ = "__%s__" % (name,) +- method.__doc__ = "Return a %s b. Computed by attrs." % ( +- _operation_names[name], +- ) +- +- return method +- +- +-def _is_comparable_to(self, other): +- """ +- Check whether `other` is comparable to `self`. +- """ +- for func in self._requirements: +- if not func(self, other): +- return False +- return True +- +- +-def _check_same_type(self, other): +- """ +- Return True if *self* and *other* are of the same type, False otherwise. +- """ +- return other.value.__class__ is self.value.__class__ +diff --git a/src/poetry/core/_vendor/attr/_compat.py b/src/poetry/core/_vendor/attr/_compat.py +deleted file mode 100644 +index 5826493..0000000 +--- a/src/poetry/core/_vendor/attr/_compat.py ++++ /dev/null +@@ -1,185 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +- +-import inspect +-import platform +-import sys +-import threading +-import types +-import warnings +- +-from collections.abc import Mapping, Sequence # noqa +- +- +-PYPY = platform.python_implementation() == "PyPy" +-PY36 = sys.version_info[:2] >= (3, 6) +-HAS_F_STRINGS = PY36 +-PY310 = sys.version_info[:2] >= (3, 10) +- +- +-if PYPY or PY36: +- ordered_dict = dict +-else: +- from collections import OrderedDict +- +- ordered_dict = OrderedDict +- +- +-def just_warn(*args, **kw): +- warnings.warn( +- "Running interpreter doesn't sufficiently support code object " +- "introspection. Some features like bare super() or accessing " +- "__class__ will not work with slotted classes.", +- RuntimeWarning, +- stacklevel=2, +- ) +- +- +-class _AnnotationExtractor: +- """ +- Extract type annotations from a callable, returning None whenever there +- is none. +- """ +- +- __slots__ = ["sig"] +- +- def __init__(self, callable): +- try: +- self.sig = inspect.signature(callable) +- except (ValueError, TypeError): # inspect failed +- self.sig = None +- +- def get_first_param_type(self): +- """ +- Return the type annotation of the first argument if it's not empty. +- """ +- if not self.sig: +- return None +- +- params = list(self.sig.parameters.values()) +- if params and params[0].annotation is not inspect.Parameter.empty: +- return params[0].annotation +- +- return None +- +- def get_return_type(self): +- """ +- Return the return type if it's not empty. +- """ +- if ( +- self.sig +- and self.sig.return_annotation is not inspect.Signature.empty +- ): +- return self.sig.return_annotation +- +- return None +- +- +-def make_set_closure_cell(): +- """Return a function of two arguments (cell, value) which sets +- the value stored in the closure cell `cell` to `value`. +- """ +- # pypy makes this easy. (It also supports the logic below, but +- # why not do the easy/fast thing?) +- if PYPY: +- +- def set_closure_cell(cell, value): +- cell.__setstate__((value,)) +- +- return set_closure_cell +- +- # Otherwise gotta do it the hard way. +- +- # Create a function that will set its first cellvar to `value`. +- def set_first_cellvar_to(value): +- x = value +- return +- +- # This function will be eliminated as dead code, but +- # not before its reference to `x` forces `x` to be +- # represented as a closure cell rather than a local. +- def force_x_to_be_a_cell(): # pragma: no cover +- return x +- +- try: +- # Extract the code object and make sure our assumptions about +- # the closure behavior are correct. +- co = set_first_cellvar_to.__code__ +- if co.co_cellvars != ("x",) or co.co_freevars != (): +- raise AssertionError # pragma: no cover +- +- # Convert this code object to a code object that sets the +- # function's first _freevar_ (not cellvar) to the argument. +- if sys.version_info >= (3, 8): +- +- def set_closure_cell(cell, value): +- cell.cell_contents = value +- +- else: +- args = [co.co_argcount] +- args.append(co.co_kwonlyargcount) +- args.extend( +- [ +- co.co_nlocals, +- co.co_stacksize, +- co.co_flags, +- co.co_code, +- co.co_consts, +- co.co_names, +- co.co_varnames, +- co.co_filename, +- co.co_name, +- co.co_firstlineno, +- co.co_lnotab, +- # These two arguments are reversed: +- co.co_cellvars, +- co.co_freevars, +- ] +- ) +- set_first_freevar_code = types.CodeType(*args) +- +- def set_closure_cell(cell, value): +- # Create a function using the set_first_freevar_code, +- # whose first closure cell is `cell`. Calling it will +- # change the value of that cell. +- setter = types.FunctionType( +- set_first_freevar_code, {}, "setter", (), (cell,) +- ) +- # And call it to set the cell. +- setter(value) +- +- # Make sure it works on this interpreter: +- def make_func_with_cell(): +- x = None +- +- def func(): +- return x # pragma: no cover +- +- return func +- +- cell = make_func_with_cell().__closure__[0] +- set_closure_cell(cell, 100) +- if cell.cell_contents != 100: +- raise AssertionError # pragma: no cover +- +- except Exception: +- return just_warn +- else: +- return set_closure_cell +- +- +-set_closure_cell = make_set_closure_cell() +- +-# Thread-local global to track attrs instances which are already being repr'd. +-# This is needed because there is no other (thread-safe) way to pass info +-# about the instances that are already being repr'd through the call stack +-# in order to ensure we don't perform infinite recursion. +-# +-# For instance, if an instance contains a dict which contains that instance, +-# we need to know that we're already repr'ing the outside instance from within +-# the dict's repr() call. +-# +-# This lives here rather than in _make.py so that the functions in _make.py +-# don't have a direct reference to the thread-local in their globals dict. +-# If they have such a reference, it breaks cloudpickle. +-repr_context = threading.local() +diff --git a/src/poetry/core/_vendor/attr/_config.py b/src/poetry/core/_vendor/attr/_config.py +deleted file mode 100644 +index 96d4200..0000000 +--- a/src/poetry/core/_vendor/attr/_config.py ++++ /dev/null +@@ -1,31 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +- +-__all__ = ["set_run_validators", "get_run_validators"] +- +-_run_validators = True +- +- +-def set_run_validators(run): +- """ +- Set whether or not validators are run. By default, they are run. +- +- .. deprecated:: 21.3.0 It will not be removed, but it also will not be +- moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` +- instead. +- """ +- if not isinstance(run, bool): +- raise TypeError("'run' must be bool.") +- global _run_validators +- _run_validators = run +- +- +-def get_run_validators(): +- """ +- Return whether or not validators are run. +- +- .. deprecated:: 21.3.0 It will not be removed, but it also will not be +- moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` +- instead. +- """ +- return _run_validators +diff --git a/src/poetry/core/_vendor/attr/_funcs.py b/src/poetry/core/_vendor/attr/_funcs.py +deleted file mode 100644 +index a982d7c..0000000 +--- a/src/poetry/core/_vendor/attr/_funcs.py ++++ /dev/null +@@ -1,420 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +- +-import copy +- +-from ._make import NOTHING, _obj_setattr, fields +-from .exceptions import AttrsAttributeNotFoundError +- +- +-def asdict( +- inst, +- recurse=True, +- filter=None, +- dict_factory=dict, +- retain_collection_types=False, +- value_serializer=None, +-): +- """ +- Return the ``attrs`` attribute values of *inst* as a dict. +- +- Optionally recurse into other ``attrs``-decorated classes. +- +- :param inst: Instance of an ``attrs``-decorated class. +- :param bool recurse: Recurse into classes that are also +- ``attrs``-decorated. +- :param callable filter: A callable whose return code determines whether an +- attribute or element is included (``True``) or dropped (``False``). Is +- called with the `attrs.Attribute` as the first argument and the +- value as the second argument. +- :param callable dict_factory: A callable to produce dictionaries from. For +- example, to produce ordered dictionaries instead of normal Python +- dictionaries, pass in ``collections.OrderedDict``. +- :param bool retain_collection_types: Do not convert to ``list`` when +- encountering an attribute whose type is ``tuple`` or ``set``. Only +- meaningful if ``recurse`` is ``True``. +- :param Optional[callable] value_serializer: A hook that is called for every +- attribute or dict key/value. It receives the current instance, field +- and value and must return the (updated) value. The hook is run *after* +- the optional *filter* has been applied. +- +- :rtype: return type of *dict_factory* +- +- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` +- class. +- +- .. versionadded:: 16.0.0 *dict_factory* +- .. versionadded:: 16.1.0 *retain_collection_types* +- .. versionadded:: 20.3.0 *value_serializer* +- .. versionadded:: 21.3.0 If a dict has a collection for a key, it is +- serialized as a tuple. +- """ +- attrs = fields(inst.__class__) +- rv = dict_factory() +- for a in attrs: +- v = getattr(inst, a.name) +- if filter is not None and not filter(a, v): +- continue +- +- if value_serializer is not None: +- v = value_serializer(inst, a, v) +- +- if recurse is True: +- if has(v.__class__): +- rv[a.name] = asdict( +- v, +- recurse=True, +- filter=filter, +- dict_factory=dict_factory, +- retain_collection_types=retain_collection_types, +- value_serializer=value_serializer, +- ) +- elif isinstance(v, (tuple, list, set, frozenset)): +- cf = v.__class__ if retain_collection_types is True else list +- rv[a.name] = cf( +- [ +- _asdict_anything( +- i, +- is_key=False, +- filter=filter, +- dict_factory=dict_factory, +- retain_collection_types=retain_collection_types, +- value_serializer=value_serializer, +- ) +- for i in v +- ] +- ) +- elif isinstance(v, dict): +- df = dict_factory +- rv[a.name] = df( +- ( +- _asdict_anything( +- kk, +- is_key=True, +- filter=filter, +- dict_factory=df, +- retain_collection_types=retain_collection_types, +- value_serializer=value_serializer, +- ), +- _asdict_anything( +- vv, +- is_key=False, +- filter=filter, +- dict_factory=df, +- retain_collection_types=retain_collection_types, +- value_serializer=value_serializer, +- ), +- ) +- for kk, vv in v.items() +- ) +- else: +- rv[a.name] = v +- else: +- rv[a.name] = v +- return rv +- +- +-def _asdict_anything( +- val, +- is_key, +- filter, +- dict_factory, +- retain_collection_types, +- value_serializer, +-): +- """ +- ``asdict`` only works on attrs instances, this works on anything. +- """ +- if getattr(val.__class__, "__attrs_attrs__", None) is not None: +- # Attrs class. +- rv = asdict( +- val, +- recurse=True, +- filter=filter, +- dict_factory=dict_factory, +- retain_collection_types=retain_collection_types, +- value_serializer=value_serializer, +- ) +- elif isinstance(val, (tuple, list, set, frozenset)): +- if retain_collection_types is True: +- cf = val.__class__ +- elif is_key: +- cf = tuple +- else: +- cf = list +- +- rv = cf( +- [ +- _asdict_anything( +- i, +- is_key=False, +- filter=filter, +- dict_factory=dict_factory, +- retain_collection_types=retain_collection_types, +- value_serializer=value_serializer, +- ) +- for i in val +- ] +- ) +- elif isinstance(val, dict): +- df = dict_factory +- rv = df( +- ( +- _asdict_anything( +- kk, +- is_key=True, +- filter=filter, +- dict_factory=df, +- retain_collection_types=retain_collection_types, +- value_serializer=value_serializer, +- ), +- _asdict_anything( +- vv, +- is_key=False, +- filter=filter, +- dict_factory=df, +- retain_collection_types=retain_collection_types, +- value_serializer=value_serializer, +- ), +- ) +- for kk, vv in val.items() +- ) +- else: +- rv = val +- if value_serializer is not None: +- rv = value_serializer(None, None, rv) +- +- return rv +- +- +-def astuple( +- inst, +- recurse=True, +- filter=None, +- tuple_factory=tuple, +- retain_collection_types=False, +-): +- """ +- Return the ``attrs`` attribute values of *inst* as a tuple. +- +- Optionally recurse into other ``attrs``-decorated classes. +- +- :param inst: Instance of an ``attrs``-decorated class. +- :param bool recurse: Recurse into classes that are also +- ``attrs``-decorated. +- :param callable filter: A callable whose return code determines whether an +- attribute or element is included (``True``) or dropped (``False``). Is +- called with the `attrs.Attribute` as the first argument and the +- value as the second argument. +- :param callable tuple_factory: A callable to produce tuples from. For +- example, to produce lists instead of tuples. +- :param bool retain_collection_types: Do not convert to ``list`` +- or ``dict`` when encountering an attribute which type is +- ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is +- ``True``. +- +- :rtype: return type of *tuple_factory* +- +- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` +- class. +- +- .. versionadded:: 16.2.0 +- """ +- attrs = fields(inst.__class__) +- rv = [] +- retain = retain_collection_types # Very long. :/ +- for a in attrs: +- v = getattr(inst, a.name) +- if filter is not None and not filter(a, v): +- continue +- if recurse is True: +- if has(v.__class__): +- rv.append( +- astuple( +- v, +- recurse=True, +- filter=filter, +- tuple_factory=tuple_factory, +- retain_collection_types=retain, +- ) +- ) +- elif isinstance(v, (tuple, list, set, frozenset)): +- cf = v.__class__ if retain is True else list +- rv.append( +- cf( +- [ +- astuple( +- j, +- recurse=True, +- filter=filter, +- tuple_factory=tuple_factory, +- retain_collection_types=retain, +- ) +- if has(j.__class__) +- else j +- for j in v +- ] +- ) +- ) +- elif isinstance(v, dict): +- df = v.__class__ if retain is True else dict +- rv.append( +- df( +- ( +- astuple( +- kk, +- tuple_factory=tuple_factory, +- retain_collection_types=retain, +- ) +- if has(kk.__class__) +- else kk, +- astuple( +- vv, +- tuple_factory=tuple_factory, +- retain_collection_types=retain, +- ) +- if has(vv.__class__) +- else vv, +- ) +- for kk, vv in v.items() +- ) +- ) +- else: +- rv.append(v) +- else: +- rv.append(v) +- +- return rv if tuple_factory is list else tuple_factory(rv) +- +- +-def has(cls): +- """ +- Check whether *cls* is a class with ``attrs`` attributes. +- +- :param type cls: Class to introspect. +- :raise TypeError: If *cls* is not a class. +- +- :rtype: bool +- """ +- return getattr(cls, "__attrs_attrs__", None) is not None +- +- +-def assoc(inst, **changes): +- """ +- Copy *inst* and apply *changes*. +- +- :param inst: Instance of a class with ``attrs`` attributes. +- :param changes: Keyword changes in the new copy. +- +- :return: A copy of inst with *changes* incorporated. +- +- :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't +- be found on *cls*. +- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` +- class. +- +- .. deprecated:: 17.1.0 +- Use `attrs.evolve` instead if you can. +- This function will not be removed du to the slightly different approach +- compared to `attrs.evolve`. +- """ +- import warnings +- +- warnings.warn( +- "assoc is deprecated and will be removed after 2018/01.", +- DeprecationWarning, +- stacklevel=2, +- ) +- new = copy.copy(inst) +- attrs = fields(inst.__class__) +- for k, v in changes.items(): +- a = getattr(attrs, k, NOTHING) +- if a is NOTHING: +- raise AttrsAttributeNotFoundError( +- "{k} is not an attrs attribute on {cl}.".format( +- k=k, cl=new.__class__ +- ) +- ) +- _obj_setattr(new, k, v) +- return new +- +- +-def evolve(inst, **changes): +- """ +- Create a new instance, based on *inst* with *changes* applied. +- +- :param inst: Instance of a class with ``attrs`` attributes. +- :param changes: Keyword changes in the new copy. +- +- :return: A copy of inst with *changes* incorporated. +- +- :raise TypeError: If *attr_name* couldn't be found in the class +- ``__init__``. +- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` +- class. +- +- .. versionadded:: 17.1.0 +- """ +- cls = inst.__class__ +- attrs = fields(cls) +- for a in attrs: +- if not a.init: +- continue +- attr_name = a.name # To deal with private attributes. +- init_name = attr_name if attr_name[0] != "_" else attr_name[1:] +- if init_name not in changes: +- changes[init_name] = getattr(inst, attr_name) +- +- return cls(**changes) +- +- +-def resolve_types(cls, globalns=None, localns=None, attribs=None): +- """ +- Resolve any strings and forward annotations in type annotations. +- +- This is only required if you need concrete types in `Attribute`'s *type* +- field. In other words, you don't need to resolve your types if you only +- use them for static type checking. +- +- With no arguments, names will be looked up in the module in which the class +- was created. If this is not what you want, e.g. if the name only exists +- inside a method, you may pass *globalns* or *localns* to specify other +- dictionaries in which to look up these names. See the docs of +- `typing.get_type_hints` for more details. +- +- :param type cls: Class to resolve. +- :param Optional[dict] globalns: Dictionary containing global variables. +- :param Optional[dict] localns: Dictionary containing local variables. +- :param Optional[list] attribs: List of attribs for the given class. +- This is necessary when calling from inside a ``field_transformer`` +- since *cls* is not an ``attrs`` class yet. +- +- :raise TypeError: If *cls* is not a class. +- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` +- class and you didn't pass any attribs. +- :raise NameError: If types cannot be resolved because of missing variables. +- +- :returns: *cls* so you can use this function also as a class decorator. +- Please note that you have to apply it **after** `attrs.define`. That +- means the decorator has to come in the line **before** `attrs.define`. +- +- .. versionadded:: 20.1.0 +- .. versionadded:: 21.1.0 *attribs* +- +- """ +- # Since calling get_type_hints is expensive we cache whether we've +- # done it already. +- if getattr(cls, "__attrs_types_resolved__", None) != cls: +- import typing +- +- hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) +- for field in fields(cls) if attribs is None else attribs: +- if field.name in hints: +- # Since fields have been frozen we must work around it. +- _obj_setattr(field, "type", hints[field.name]) +- # We store the class we resolved so that subclasses know they haven't +- # been resolved. +- cls.__attrs_types_resolved__ = cls +- +- # Return the class so you can use it as a decorator too. +- return cls +diff --git a/src/poetry/core/_vendor/attr/_make.py b/src/poetry/core/_vendor/attr/_make.py +deleted file mode 100644 +index 4d1afe3..0000000 +--- a/src/poetry/core/_vendor/attr/_make.py ++++ /dev/null +@@ -1,3006 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-import copy +-import linecache +-import sys +-import types +-import typing +- +-from operator import itemgetter +- +-# We need to import _compat itself in addition to the _compat members to avoid +-# having the thread-local in the globals here. +-from . import _compat, _config, setters +-from ._compat import ( +- HAS_F_STRINGS, +- PY310, +- PYPY, +- _AnnotationExtractor, +- ordered_dict, +- set_closure_cell, +-) +-from .exceptions import ( +- DefaultAlreadySetError, +- FrozenInstanceError, +- NotAnAttrsClassError, +- UnannotatedAttributeError, +-) +- +- +-# This is used at least twice, so cache it here. +-_obj_setattr = object.__setattr__ +-_init_converter_pat = "__attr_converter_%s" +-_init_factory_pat = "__attr_factory_{}" +-_tuple_property_pat = ( +- " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +-) +-_classvar_prefixes = ( +- "typing.ClassVar", +- "t.ClassVar", +- "ClassVar", +- "typing_extensions.ClassVar", +-) +-# we don't use a double-underscore prefix because that triggers +-# name mangling when trying to create a slot for the field +-# (when slots=True) +-_hash_cache_field = "_attrs_cached_hash" +- +-_empty_metadata_singleton = types.MappingProxyType({}) +- +-# Unique object for unequivocal getattr() defaults. +-_sentinel = object() +- +-_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) +- +- +-class _Nothing: +- """ +- Sentinel class to indicate the lack of a value when ``None`` is ambiguous. +- +- ``_Nothing`` is a singleton. There is only ever one of it. +- +- .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. +- """ +- +- _singleton = None +- +- def __new__(cls): +- if _Nothing._singleton is None: +- _Nothing._singleton = super().__new__(cls) +- return _Nothing._singleton +- +- def __repr__(self): +- return "NOTHING" +- +- def __bool__(self): +- return False +- +- +-NOTHING = _Nothing() +-""" +-Sentinel to indicate the lack of a value when ``None`` is ambiguous. +-""" +- +- +-class _CacheHashWrapper(int): +- """ +- An integer subclass that pickles / copies as None +- +- This is used for non-slots classes with ``cache_hash=True``, to avoid +- serializing a potentially (even likely) invalid hash value. Since ``None`` +- is the default value for uncalculated hashes, whenever this is copied, +- the copy's value for the hash should automatically reset. +- +- See GH #613 for more details. +- """ +- +- def __reduce__(self, _none_constructor=type(None), _args=()): +- return _none_constructor, _args +- +- +-def attrib( +- default=NOTHING, +- validator=None, +- repr=True, +- cmp=None, +- hash=None, +- init=True, +- metadata=None, +- type=None, +- converter=None, +- factory=None, +- kw_only=False, +- eq=None, +- order=None, +- on_setattr=None, +-): +- """ +- Create a new attribute on a class. +- +- .. warning:: +- +- Does *not* do anything unless the class is also decorated with +- `attr.s`! +- +- :param default: A value that is used if an ``attrs``-generated ``__init__`` +- is used and no value is passed while instantiating or the attribute is +- excluded using ``init=False``. +- +- If the value is an instance of `attrs.Factory`, its callable will be +- used to construct a new value (useful for mutable data types like lists +- or dicts). +- +- If a default is not set (or set manually to `attrs.NOTHING`), a value +- *must* be supplied when instantiating; otherwise a `TypeError` +- will be raised. +- +- The default can also be set using decorator notation as shown below. +- +- :type default: Any value +- +- :param callable factory: Syntactic sugar for +- ``default=attr.Factory(factory)``. +- +- :param validator: `callable` that is called by ``attrs``-generated +- ``__init__`` methods after the instance has been initialized. They +- receive the initialized instance, the :func:`~attrs.Attribute`, and the +- passed value. +- +- The return value is *not* inspected so the validator has to throw an +- exception itself. +- +- If a `list` is passed, its items are treated as validators and must +- all pass. +- +- Validators can be globally disabled and re-enabled using +- `get_run_validators`. +- +- The validator can also be set using decorator notation as shown below. +- +- :type validator: `callable` or a `list` of `callable`\\ s. +- +- :param repr: Include this attribute in the generated ``__repr__`` +- method. If ``True``, include the attribute; if ``False``, omit it. By +- default, the built-in ``repr()`` function is used. To override how the +- attribute value is formatted, pass a ``callable`` that takes a single +- value and returns a string. Note that the resulting string is used +- as-is, i.e. it will be used directly *instead* of calling ``repr()`` +- (the default). +- :type repr: a `bool` or a `callable` to use a custom function. +- +- :param eq: If ``True`` (default), include this attribute in the +- generated ``__eq__`` and ``__ne__`` methods that check two instances +- for equality. To override how the attribute value is compared, +- pass a ``callable`` that takes a single value and returns the value +- to be compared. +- :type eq: a `bool` or a `callable`. +- +- :param order: If ``True`` (default), include this attributes in the +- generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. +- To override how the attribute value is ordered, +- pass a ``callable`` that takes a single value and returns the value +- to be ordered. +- :type order: a `bool` or a `callable`. +- +- :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the +- same value. Must not be mixed with *eq* or *order*. +- :type cmp: a `bool` or a `callable`. +- +- :param Optional[bool] hash: Include this attribute in the generated +- ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This +- is the correct behavior according the Python spec. Setting this value +- to anything else than ``None`` is *discouraged*. +- :param bool init: Include this attribute in the generated ``__init__`` +- method. It is possible to set this to ``False`` and set a default +- value. In that case this attributed is unconditionally initialized +- with the specified default value or factory. +- :param callable converter: `callable` that is called by +- ``attrs``-generated ``__init__`` methods to convert attribute's value +- to the desired format. It is given the passed-in value, and the +- returned value will be used as the new value of the attribute. The +- value is converted before being passed to the validator, if any. +- :param metadata: An arbitrary mapping, to be used by third-party +- components. See `extending_metadata`. +- :param type: The type of the attribute. In Python 3.6 or greater, the +- preferred method to specify the type is using a variable annotation +- (see :pep:`526`). +- This argument is provided for backward compatibility. +- Regardless of the approach used, the type will be stored on +- ``Attribute.type``. +- +- Please note that ``attrs`` doesn't do anything with this metadata by +- itself. You can use it as part of your own code or for +- `static type checking `. +- :param kw_only: Make this attribute keyword-only (Python 3+) +- in the generated ``__init__`` (if ``init`` is ``False``, this +- parameter is ignored). +- :param on_setattr: Allows to overwrite the *on_setattr* setting from +- `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. +- Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this +- attribute -- regardless of the setting in `attr.s`. +- :type on_setattr: `callable`, or a list of callables, or `None`, or +- `attrs.setters.NO_OP` +- +- .. versionadded:: 15.2.0 *convert* +- .. versionadded:: 16.3.0 *metadata* +- .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. +- .. versionchanged:: 17.1.0 +- *hash* is ``None`` and therefore mirrors *eq* by default. +- .. versionadded:: 17.3.0 *type* +- .. deprecated:: 17.4.0 *convert* +- .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated +- *convert* to achieve consistency with other noun-based arguments. +- .. versionadded:: 18.1.0 +- ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. +- .. versionadded:: 18.2.0 *kw_only* +- .. versionchanged:: 19.2.0 *convert* keyword argument removed. +- .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. +- .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. +- .. versionadded:: 19.2.0 *eq* and *order* +- .. versionadded:: 20.1.0 *on_setattr* +- .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 +- .. versionchanged:: 21.1.0 +- *eq*, *order*, and *cmp* also accept a custom callable +- .. versionchanged:: 21.1.0 *cmp* undeprecated +- """ +- eq, eq_key, order, order_key = _determine_attrib_eq_order( +- cmp, eq, order, True +- ) +- +- if hash is not None and hash is not True and hash is not False: +- raise TypeError( +- "Invalid value for hash. Must be True, False, or None." +- ) +- +- if factory is not None: +- if default is not NOTHING: +- raise ValueError( +- "The `default` and `factory` arguments are mutually " +- "exclusive." +- ) +- if not callable(factory): +- raise ValueError("The `factory` argument must be a callable.") +- default = Factory(factory) +- +- if metadata is None: +- metadata = {} +- +- # Apply syntactic sugar by auto-wrapping. +- if isinstance(on_setattr, (list, tuple)): +- on_setattr = setters.pipe(*on_setattr) +- +- if validator and isinstance(validator, (list, tuple)): +- validator = and_(*validator) +- +- if converter and isinstance(converter, (list, tuple)): +- converter = pipe(*converter) +- +- return _CountingAttr( +- default=default, +- validator=validator, +- repr=repr, +- cmp=None, +- hash=hash, +- init=init, +- converter=converter, +- metadata=metadata, +- type=type, +- kw_only=kw_only, +- eq=eq, +- eq_key=eq_key, +- order=order, +- order_key=order_key, +- on_setattr=on_setattr, +- ) +- +- +-def _compile_and_eval(script, globs, locs=None, filename=""): +- """ +- "Exec" the script with the given global (globs) and local (locs) variables. +- """ +- bytecode = compile(script, filename, "exec") +- eval(bytecode, globs, locs) +- +- +-def _make_method(name, script, filename, globs): +- """ +- Create the method with the script given and return the method object. +- """ +- locs = {} +- +- # In order of debuggers like PDB being able to step through the code, +- # we add a fake linecache entry. +- count = 1 +- base_filename = filename +- while True: +- linecache_tuple = ( +- len(script), +- None, +- script.splitlines(True), +- filename, +- ) +- old_val = linecache.cache.setdefault(filename, linecache_tuple) +- if old_val == linecache_tuple: +- break +- else: +- filename = "{}-{}>".format(base_filename[:-1], count) +- count += 1 +- +- _compile_and_eval(script, globs, locs, filename) +- +- return locs[name] +- +- +-def _make_attr_tuple_class(cls_name, attr_names): +- """ +- Create a tuple subclass to hold `Attribute`s for an `attrs` class. +- +- The subclass is a bare tuple with properties for names. +- +- class MyClassAttributes(tuple): +- __slots__ = () +- x = property(itemgetter(0)) +- """ +- attr_class_name = "{}Attributes".format(cls_name) +- attr_class_template = [ +- "class {}(tuple):".format(attr_class_name), +- " __slots__ = ()", +- ] +- if attr_names: +- for i, attr_name in enumerate(attr_names): +- attr_class_template.append( +- _tuple_property_pat.format(index=i, attr_name=attr_name) +- ) +- else: +- attr_class_template.append(" pass") +- globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} +- _compile_and_eval("\n".join(attr_class_template), globs) +- return globs[attr_class_name] +- +- +-# Tuple class for extracted attributes from a class definition. +-# `base_attrs` is a subset of `attrs`. +-_Attributes = _make_attr_tuple_class( +- "_Attributes", +- [ +- # all attributes to build dunder methods for +- "attrs", +- # attributes that have been inherited +- "base_attrs", +- # map inherited attributes to their originating classes +- "base_attrs_map", +- ], +-) +- +- +-def _is_class_var(annot): +- """ +- Check whether *annot* is a typing.ClassVar. +- +- The string comparison hack is used to avoid evaluating all string +- annotations which would put attrs-based classes at a performance +- disadvantage compared to plain old classes. +- """ +- annot = str(annot) +- +- # Annotation can be quoted. +- if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): +- annot = annot[1:-1] +- +- return annot.startswith(_classvar_prefixes) +- +- +-def _has_own_attribute(cls, attrib_name): +- """ +- Check whether *cls* defines *attrib_name* (and doesn't just inherit it). +- +- Requires Python 3. +- """ +- attr = getattr(cls, attrib_name, _sentinel) +- if attr is _sentinel: +- return False +- +- for base_cls in cls.__mro__[1:]: +- a = getattr(base_cls, attrib_name, None) +- if attr is a: +- return False +- +- return True +- +- +-def _get_annotations(cls): +- """ +- Get annotations for *cls*. +- """ +- if _has_own_attribute(cls, "__annotations__"): +- return cls.__annotations__ +- +- return {} +- +- +-def _counter_getter(e): +- """ +- Key function for sorting to avoid re-creating a lambda for every class. +- """ +- return e[1].counter +- +- +-def _collect_base_attrs(cls, taken_attr_names): +- """ +- Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. +- """ +- base_attrs = [] +- base_attr_map = {} # A dictionary of base attrs to their classes. +- +- # Traverse the MRO and collect attributes. +- for base_cls in reversed(cls.__mro__[1:-1]): +- for a in getattr(base_cls, "__attrs_attrs__", []): +- if a.inherited or a.name in taken_attr_names: +- continue +- +- a = a.evolve(inherited=True) +- base_attrs.append(a) +- base_attr_map[a.name] = base_cls +- +- # For each name, only keep the freshest definition i.e. the furthest at the +- # back. base_attr_map is fine because it gets overwritten with every new +- # instance. +- filtered = [] +- seen = set() +- for a in reversed(base_attrs): +- if a.name in seen: +- continue +- filtered.insert(0, a) +- seen.add(a.name) +- +- return filtered, base_attr_map +- +- +-def _collect_base_attrs_broken(cls, taken_attr_names): +- """ +- Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. +- +- N.B. *taken_attr_names* will be mutated. +- +- Adhere to the old incorrect behavior. +- +- Notably it collects from the front and considers inherited attributes which +- leads to the buggy behavior reported in #428. +- """ +- base_attrs = [] +- base_attr_map = {} # A dictionary of base attrs to their classes. +- +- # Traverse the MRO and collect attributes. +- for base_cls in cls.__mro__[1:-1]: +- for a in getattr(base_cls, "__attrs_attrs__", []): +- if a.name in taken_attr_names: +- continue +- +- a = a.evolve(inherited=True) +- taken_attr_names.add(a.name) +- base_attrs.append(a) +- base_attr_map[a.name] = base_cls +- +- return base_attrs, base_attr_map +- +- +-def _transform_attrs( +- cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +-): +- """ +- Transform all `_CountingAttr`s on a class into `Attribute`s. +- +- If *these* is passed, use that and don't look for them on the class. +- +- *collect_by_mro* is True, collect them in the correct MRO order, otherwise +- use the old -- incorrect -- order. See #428. +- +- Return an `_Attributes`. +- """ +- cd = cls.__dict__ +- anns = _get_annotations(cls) +- +- if these is not None: +- ca_list = [(name, ca) for name, ca in these.items()] +- +- if not isinstance(these, ordered_dict): +- ca_list.sort(key=_counter_getter) +- elif auto_attribs is True: +- ca_names = { +- name +- for name, attr in cd.items() +- if isinstance(attr, _CountingAttr) +- } +- ca_list = [] +- annot_names = set() +- for attr_name, type in anns.items(): +- if _is_class_var(type): +- continue +- annot_names.add(attr_name) +- a = cd.get(attr_name, NOTHING) +- +- if not isinstance(a, _CountingAttr): +- if a is NOTHING: +- a = attrib() +- else: +- a = attrib(default=a) +- ca_list.append((attr_name, a)) +- +- unannotated = ca_names - annot_names +- if len(unannotated) > 0: +- raise UnannotatedAttributeError( +- "The following `attr.ib`s lack a type annotation: " +- + ", ".join( +- sorted(unannotated, key=lambda n: cd.get(n).counter) +- ) +- + "." +- ) +- else: +- ca_list = sorted( +- ( +- (name, attr) +- for name, attr in cd.items() +- if isinstance(attr, _CountingAttr) +- ), +- key=lambda e: e[1].counter, +- ) +- +- own_attrs = [ +- Attribute.from_counting_attr( +- name=attr_name, ca=ca, type=anns.get(attr_name) +- ) +- for attr_name, ca in ca_list +- ] +- +- if collect_by_mro: +- base_attrs, base_attr_map = _collect_base_attrs( +- cls, {a.name for a in own_attrs} +- ) +- else: +- base_attrs, base_attr_map = _collect_base_attrs_broken( +- cls, {a.name for a in own_attrs} +- ) +- +- if kw_only: +- own_attrs = [a.evolve(kw_only=True) for a in own_attrs] +- base_attrs = [a.evolve(kw_only=True) for a in base_attrs] +- +- attrs = base_attrs + own_attrs +- +- # Mandatory vs non-mandatory attr order only matters when they are part of +- # the __init__ signature and when they aren't kw_only (which are moved to +- # the end and can be mandatory or non-mandatory in any order, as they will +- # be specified as keyword args anyway). Check the order of those attrs: +- had_default = False +- for a in (a for a in attrs if a.init is not False and a.kw_only is False): +- if had_default is True and a.default is NOTHING: +- raise ValueError( +- "No mandatory attributes allowed after an attribute with a " +- "default value or factory. Attribute in question: %r" % (a,) +- ) +- +- if had_default is False and a.default is not NOTHING: +- had_default = True +- +- if field_transformer is not None: +- attrs = field_transformer(cls, attrs) +- +- # Create AttrsClass *after* applying the field_transformer since it may +- # add or remove attributes! +- attr_names = [a.name for a in attrs] +- AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) +- +- return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) +- +- +-if PYPY: +- +- def _frozen_setattrs(self, name, value): +- """ +- Attached to frozen classes as __setattr__. +- """ +- if isinstance(self, BaseException) and name in ( +- "__cause__", +- "__context__", +- ): +- BaseException.__setattr__(self, name, value) +- return +- +- raise FrozenInstanceError() +- +-else: +- +- def _frozen_setattrs(self, name, value): +- """ +- Attached to frozen classes as __setattr__. +- """ +- raise FrozenInstanceError() +- +- +-def _frozen_delattrs(self, name): +- """ +- Attached to frozen classes as __delattr__. +- """ +- raise FrozenInstanceError() +- +- +-class _ClassBuilder: +- """ +- Iteratively build *one* class. +- """ +- +- __slots__ = ( +- "_attr_names", +- "_attrs", +- "_base_attr_map", +- "_base_names", +- "_cache_hash", +- "_cls", +- "_cls_dict", +- "_delete_attribs", +- "_frozen", +- "_has_pre_init", +- "_has_post_init", +- "_is_exc", +- "_on_setattr", +- "_slots", +- "_weakref_slot", +- "_wrote_own_setattr", +- "_has_custom_setattr", +- ) +- +- def __init__( +- self, +- cls, +- these, +- slots, +- frozen, +- weakref_slot, +- getstate_setstate, +- auto_attribs, +- kw_only, +- cache_hash, +- is_exc, +- collect_by_mro, +- on_setattr, +- has_custom_setattr, +- field_transformer, +- ): +- attrs, base_attrs, base_map = _transform_attrs( +- cls, +- these, +- auto_attribs, +- kw_only, +- collect_by_mro, +- field_transformer, +- ) +- +- self._cls = cls +- self._cls_dict = dict(cls.__dict__) if slots else {} +- self._attrs = attrs +- self._base_names = {a.name for a in base_attrs} +- self._base_attr_map = base_map +- self._attr_names = tuple(a.name for a in attrs) +- self._slots = slots +- self._frozen = frozen +- self._weakref_slot = weakref_slot +- self._cache_hash = cache_hash +- self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) +- self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) +- self._delete_attribs = not bool(these) +- self._is_exc = is_exc +- self._on_setattr = on_setattr +- +- self._has_custom_setattr = has_custom_setattr +- self._wrote_own_setattr = False +- +- self._cls_dict["__attrs_attrs__"] = self._attrs +- +- if frozen: +- self._cls_dict["__setattr__"] = _frozen_setattrs +- self._cls_dict["__delattr__"] = _frozen_delattrs +- +- self._wrote_own_setattr = True +- elif on_setattr in ( +- _ng_default_on_setattr, +- setters.validate, +- setters.convert, +- ): +- has_validator = has_converter = False +- for a in attrs: +- if a.validator is not None: +- has_validator = True +- if a.converter is not None: +- has_converter = True +- +- if has_validator and has_converter: +- break +- if ( +- ( +- on_setattr == _ng_default_on_setattr +- and not (has_validator or has_converter) +- ) +- or (on_setattr == setters.validate and not has_validator) +- or (on_setattr == setters.convert and not has_converter) +- ): +- # If class-level on_setattr is set to convert + validate, but +- # there's no field to convert or validate, pretend like there's +- # no on_setattr. +- self._on_setattr = None +- +- if getstate_setstate: +- ( +- self._cls_dict["__getstate__"], +- self._cls_dict["__setstate__"], +- ) = self._make_getstate_setstate() +- +- def __repr__(self): +- return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) +- +- def build_class(self): +- """ +- Finalize class based on the accumulated configuration. +- +- Builder cannot be used after calling this method. +- """ +- if self._slots is True: +- return self._create_slots_class() +- else: +- return self._patch_original_class() +- +- def _patch_original_class(self): +- """ +- Apply accumulated methods and return the class. +- """ +- cls = self._cls +- base_names = self._base_names +- +- # Clean class of attribute definitions (`attr.ib()`s). +- if self._delete_attribs: +- for name in self._attr_names: +- if ( +- name not in base_names +- and getattr(cls, name, _sentinel) is not _sentinel +- ): +- try: +- delattr(cls, name) +- except AttributeError: +- # This can happen if a base class defines a class +- # variable and we want to set an attribute with the +- # same name by using only a type annotation. +- pass +- +- # Attach our dunder methods. +- for name, value in self._cls_dict.items(): +- setattr(cls, name, value) +- +- # If we've inherited an attrs __setattr__ and don't write our own, +- # reset it to object's. +- if not self._wrote_own_setattr and getattr( +- cls, "__attrs_own_setattr__", False +- ): +- cls.__attrs_own_setattr__ = False +- +- if not self._has_custom_setattr: +- cls.__setattr__ = _obj_setattr +- +- return cls +- +- def _create_slots_class(self): +- """ +- Build and return a new class with a `__slots__` attribute. +- """ +- cd = { +- k: v +- for k, v in self._cls_dict.items() +- if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") +- } +- +- # If our class doesn't have its own implementation of __setattr__ +- # (either from the user or by us), check the bases, if one of them has +- # an attrs-made __setattr__, that needs to be reset. We don't walk the +- # MRO because we only care about our immediate base classes. +- # XXX: This can be confused by subclassing a slotted attrs class with +- # XXX: a non-attrs class and subclass the resulting class with an attrs +- # XXX: class. See `test_slotted_confused` for details. For now that's +- # XXX: OK with us. +- if not self._wrote_own_setattr: +- cd["__attrs_own_setattr__"] = False +- +- if not self._has_custom_setattr: +- for base_cls in self._cls.__bases__: +- if base_cls.__dict__.get("__attrs_own_setattr__", False): +- cd["__setattr__"] = _obj_setattr +- break +- +- # Traverse the MRO to collect existing slots +- # and check for an existing __weakref__. +- existing_slots = dict() +- weakref_inherited = False +- for base_cls in self._cls.__mro__[1:-1]: +- if base_cls.__dict__.get("__weakref__", None) is not None: +- weakref_inherited = True +- existing_slots.update( +- { +- name: getattr(base_cls, name) +- for name in getattr(base_cls, "__slots__", []) +- } +- ) +- +- base_names = set(self._base_names) +- +- names = self._attr_names +- if ( +- self._weakref_slot +- and "__weakref__" not in getattr(self._cls, "__slots__", ()) +- and "__weakref__" not in names +- and not weakref_inherited +- ): +- names += ("__weakref__",) +- +- # We only add the names of attributes that aren't inherited. +- # Setting __slots__ to inherited attributes wastes memory. +- slot_names = [name for name in names if name not in base_names] +- # There are slots for attributes from current class +- # that are defined in parent classes. +- # As their descriptors may be overridden by a child class, +- # we collect them here and update the class dict +- reused_slots = { +- slot: slot_descriptor +- for slot, slot_descriptor in existing_slots.items() +- if slot in slot_names +- } +- slot_names = [name for name in slot_names if name not in reused_slots] +- cd.update(reused_slots) +- if self._cache_hash: +- slot_names.append(_hash_cache_field) +- cd["__slots__"] = tuple(slot_names) +- +- cd["__qualname__"] = self._cls.__qualname__ +- +- # Create new class based on old class and our methods. +- cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) +- +- # The following is a fix for +- # . On Python 3, +- # if a method mentions `__class__` or uses the no-arg super(), the +- # compiler will bake a reference to the class in the method itself +- # as `method.__closure__`. Since we replace the class with a +- # clone, we rewrite these references so it keeps working. +- for item in cls.__dict__.values(): +- if isinstance(item, (classmethod, staticmethod)): +- # Class- and staticmethods hide their functions inside. +- # These might need to be rewritten as well. +- closure_cells = getattr(item.__func__, "__closure__", None) +- elif isinstance(item, property): +- # Workaround for property `super()` shortcut (PY3-only). +- # There is no universal way for other descriptors. +- closure_cells = getattr(item.fget, "__closure__", None) +- else: +- closure_cells = getattr(item, "__closure__", None) +- +- if not closure_cells: # Catch None or the empty list. +- continue +- for cell in closure_cells: +- try: +- match = cell.cell_contents is self._cls +- except ValueError: # ValueError: Cell is empty +- pass +- else: +- if match: +- set_closure_cell(cell, cls) +- +- return cls +- +- def add_repr(self, ns): +- self._cls_dict["__repr__"] = self._add_method_dunders( +- _make_repr(self._attrs, ns, self._cls) +- ) +- return self +- +- def add_str(self): +- repr = self._cls_dict.get("__repr__") +- if repr is None: +- raise ValueError( +- "__str__ can only be generated if a __repr__ exists." +- ) +- +- def __str__(self): +- return self.__repr__() +- +- self._cls_dict["__str__"] = self._add_method_dunders(__str__) +- return self +- +- def _make_getstate_setstate(self): +- """ +- Create custom __setstate__ and __getstate__ methods. +- """ +- # __weakref__ is not writable. +- state_attr_names = tuple( +- an for an in self._attr_names if an != "__weakref__" +- ) +- +- def slots_getstate(self): +- """ +- Automatically created by attrs. +- """ +- return tuple(getattr(self, name) for name in state_attr_names) +- +- hash_caching_enabled = self._cache_hash +- +- def slots_setstate(self, state): +- """ +- Automatically created by attrs. +- """ +- __bound_setattr = _obj_setattr.__get__(self, Attribute) +- for name, value in zip(state_attr_names, state): +- __bound_setattr(name, value) +- +- # The hash code cache is not included when the object is +- # serialized, but it still needs to be initialized to None to +- # indicate that the first call to __hash__ should be a cache +- # miss. +- if hash_caching_enabled: +- __bound_setattr(_hash_cache_field, None) +- +- return slots_getstate, slots_setstate +- +- def make_unhashable(self): +- self._cls_dict["__hash__"] = None +- return self +- +- def add_hash(self): +- self._cls_dict["__hash__"] = self._add_method_dunders( +- _make_hash( +- self._cls, +- self._attrs, +- frozen=self._frozen, +- cache_hash=self._cache_hash, +- ) +- ) +- +- return self +- +- def add_init(self): +- self._cls_dict["__init__"] = self._add_method_dunders( +- _make_init( +- self._cls, +- self._attrs, +- self._has_pre_init, +- self._has_post_init, +- self._frozen, +- self._slots, +- self._cache_hash, +- self._base_attr_map, +- self._is_exc, +- self._on_setattr, +- attrs_init=False, +- ) +- ) +- +- return self +- +- def add_match_args(self): +- self._cls_dict["__match_args__"] = tuple( +- field.name +- for field in self._attrs +- if field.init and not field.kw_only +- ) +- +- def add_attrs_init(self): +- self._cls_dict["__attrs_init__"] = self._add_method_dunders( +- _make_init( +- self._cls, +- self._attrs, +- self._has_pre_init, +- self._has_post_init, +- self._frozen, +- self._slots, +- self._cache_hash, +- self._base_attr_map, +- self._is_exc, +- self._on_setattr, +- attrs_init=True, +- ) +- ) +- +- return self +- +- def add_eq(self): +- cd = self._cls_dict +- +- cd["__eq__"] = self._add_method_dunders( +- _make_eq(self._cls, self._attrs) +- ) +- cd["__ne__"] = self._add_method_dunders(_make_ne()) +- +- return self +- +- def add_order(self): +- cd = self._cls_dict +- +- cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( +- self._add_method_dunders(meth) +- for meth in _make_order(self._cls, self._attrs) +- ) +- +- return self +- +- def add_setattr(self): +- if self._frozen: +- return self +- +- sa_attrs = {} +- for a in self._attrs: +- on_setattr = a.on_setattr or self._on_setattr +- if on_setattr and on_setattr is not setters.NO_OP: +- sa_attrs[a.name] = a, on_setattr +- +- if not sa_attrs: +- return self +- +- if self._has_custom_setattr: +- # We need to write a __setattr__ but there already is one! +- raise ValueError( +- "Can't combine custom __setattr__ with on_setattr hooks." +- ) +- +- # docstring comes from _add_method_dunders +- def __setattr__(self, name, val): +- try: +- a, hook = sa_attrs[name] +- except KeyError: +- nval = val +- else: +- nval = hook(self, a, val) +- +- _obj_setattr(self, name, nval) +- +- self._cls_dict["__attrs_own_setattr__"] = True +- self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) +- self._wrote_own_setattr = True +- +- return self +- +- def _add_method_dunders(self, method): +- """ +- Add __module__ and __qualname__ to a *method* if possible. +- """ +- try: +- method.__module__ = self._cls.__module__ +- except AttributeError: +- pass +- +- try: +- method.__qualname__ = ".".join( +- (self._cls.__qualname__, method.__name__) +- ) +- except AttributeError: +- pass +- +- try: +- method.__doc__ = "Method generated by attrs for class %s." % ( +- self._cls.__qualname__, +- ) +- except AttributeError: +- pass +- +- return method +- +- +-def _determine_attrs_eq_order(cmp, eq, order, default_eq): +- """ +- Validate the combination of *cmp*, *eq*, and *order*. Derive the effective +- values of eq and order. If *eq* is None, set it to *default_eq*. +- """ +- if cmp is not None and any((eq is not None, order is not None)): +- raise ValueError("Don't mix `cmp` with `eq' and `order`.") +- +- # cmp takes precedence due to bw-compatibility. +- if cmp is not None: +- return cmp, cmp +- +- # If left None, equality is set to the specified default and ordering +- # mirrors equality. +- if eq is None: +- eq = default_eq +- +- if order is None: +- order = eq +- +- if eq is False and order is True: +- raise ValueError("`order` can only be True if `eq` is True too.") +- +- return eq, order +- +- +-def _determine_attrib_eq_order(cmp, eq, order, default_eq): +- """ +- Validate the combination of *cmp*, *eq*, and *order*. Derive the effective +- values of eq and order. If *eq* is None, set it to *default_eq*. +- """ +- if cmp is not None and any((eq is not None, order is not None)): +- raise ValueError("Don't mix `cmp` with `eq' and `order`.") +- +- def decide_callable_or_boolean(value): +- """ +- Decide whether a key function is used. +- """ +- if callable(value): +- value, key = True, value +- else: +- key = None +- return value, key +- +- # cmp takes precedence due to bw-compatibility. +- if cmp is not None: +- cmp, cmp_key = decide_callable_or_boolean(cmp) +- return cmp, cmp_key, cmp, cmp_key +- +- # If left None, equality is set to the specified default and ordering +- # mirrors equality. +- if eq is None: +- eq, eq_key = default_eq, None +- else: +- eq, eq_key = decide_callable_or_boolean(eq) +- +- if order is None: +- order, order_key = eq, eq_key +- else: +- order, order_key = decide_callable_or_boolean(order) +- +- if eq is False and order is True: +- raise ValueError("`order` can only be True if `eq` is True too.") +- +- return eq, eq_key, order, order_key +- +- +-def _determine_whether_to_implement( +- cls, flag, auto_detect, dunders, default=True +-): +- """ +- Check whether we should implement a set of methods for *cls*. +- +- *flag* is the argument passed into @attr.s like 'init', *auto_detect* the +- same as passed into @attr.s and *dunders* is a tuple of attribute names +- whose presence signal that the user has implemented it themselves. +- +- Return *default* if no reason for either for or against is found. +- """ +- if flag is True or flag is False: +- return flag +- +- if flag is None and auto_detect is False: +- return default +- +- # Logically, flag is None and auto_detect is True here. +- for dunder in dunders: +- if _has_own_attribute(cls, dunder): +- return False +- +- return default +- +- +-def attrs( +- maybe_cls=None, +- these=None, +- repr_ns=None, +- repr=None, +- cmp=None, +- hash=None, +- init=None, +- slots=False, +- frozen=False, +- weakref_slot=True, +- str=False, +- auto_attribs=False, +- kw_only=False, +- cache_hash=False, +- auto_exc=False, +- eq=None, +- order=None, +- auto_detect=False, +- collect_by_mro=False, +- getstate_setstate=None, +- on_setattr=None, +- field_transformer=None, +- match_args=True, +-): +- r""" +- A class decorator that adds `dunder +- `_\ -methods according to the +- specified attributes using `attr.ib` or the *these* argument. +- +- :param these: A dictionary of name to `attr.ib` mappings. This is +- useful to avoid the definition of your attributes within the class body +- because you can't (e.g. if you want to add ``__repr__`` methods to +- Django models) or don't want to. +- +- If *these* is not ``None``, ``attrs`` will *not* search the class body +- for attributes and will *not* remove any attributes from it. +- +- If *these* is an ordered dict (`dict` on Python 3.6+, +- `collections.OrderedDict` otherwise), the order is deduced from +- the order of the attributes inside *these*. Otherwise the order +- of the definition of the attributes is used. +- +- :type these: `dict` of `str` to `attr.ib` +- +- :param str repr_ns: When using nested classes, there's no way in Python 2 +- to automatically detect that. Therefore it's possible to set the +- namespace explicitly for a more meaningful ``repr`` output. +- :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, +- *order*, and *hash* arguments explicitly, assume they are set to +- ``True`` **unless any** of the involved methods for one of the +- arguments is implemented in the *current* class (i.e. it is *not* +- inherited from some base class). +- +- So for example by implementing ``__eq__`` on a class yourself, +- ``attrs`` will deduce ``eq=False`` and will create *neither* +- ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible +- ``__ne__`` by default, so it *should* be enough to only implement +- ``__eq__`` in most cases). +- +- .. warning:: +- +- If you prevent ``attrs`` from creating the ordering methods for you +- (``order=False``, e.g. by implementing ``__le__``), it becomes +- *your* responsibility to make sure its ordering is sound. The best +- way is to use the `functools.total_ordering` decorator. +- +- +- Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, +- *cmp*, or *hash* overrides whatever *auto_detect* would determine. +- +- *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises +- an `attrs.exceptions.PythonTooOldError`. +- +- :param bool repr: Create a ``__repr__`` method with a human readable +- representation of ``attrs`` attributes.. +- :param bool str: Create a ``__str__`` method that is identical to +- ``__repr__``. This is usually not necessary except for +- `Exception`\ s. +- :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` +- and ``__ne__`` methods that check two instances for equality. +- +- They compare the instances as if they were tuples of their ``attrs`` +- attributes if and only if the types of both classes are *identical*! +- :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, +- ``__gt__``, and ``__ge__`` methods that behave like *eq* above and +- allow instances to be ordered. If ``None`` (default) mirror value of +- *eq*. +- :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* +- and *order* to the same value. Must not be mixed with *eq* or *order*. +- :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method +- is generated according how *eq* and *frozen* are set. +- +- 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. +- 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to +- None, marking it unhashable (which it is). +- 3. If *eq* is False, ``__hash__`` will be left untouched meaning the +- ``__hash__`` method of the base class will be used (if base class is +- ``object``, this means it will fall back to id-based hashing.). +- +- Although not recommended, you can decide for yourself and force +- ``attrs`` to create one (e.g. if the class is immutable even though you +- didn't freeze it programmatically) by passing ``True`` or not. Both of +- these cases are rather special and should be used carefully. +- +- See our documentation on `hashing`, Python's documentation on +- `object.__hash__`, and the `GitHub issue that led to the default \ +- behavior `_ for more +- details. +- :param bool init: Create a ``__init__`` method that initializes the +- ``attrs`` attributes. Leading underscores are stripped for the argument +- name. If a ``__attrs_pre_init__`` method exists on the class, it will +- be called before the class is initialized. If a ``__attrs_post_init__`` +- method exists on the class, it will be called after the class is fully +- initialized. +- +- If ``init`` is ``False``, an ``__attrs_init__`` method will be +- injected instead. This allows you to define a custom ``__init__`` +- method that can do pre-init work such as ``super().__init__()``, +- and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. +- :param bool slots: Create a `slotted class ` that's more +- memory-efficient. Slotted classes are generally superior to the default +- dict classes, but have some gotchas you should know about, so we +- encourage you to read the `glossary entry `. +- :param bool frozen: Make instances immutable after initialization. If +- someone attempts to modify a frozen instance, +- `attr.exceptions.FrozenInstanceError` is raised. +- +- .. note:: +- +- 1. This is achieved by installing a custom ``__setattr__`` method +- on your class, so you can't implement your own. +- +- 2. True immutability is impossible in Python. +- +- 3. This *does* have a minor a runtime performance `impact +- ` when initializing new instances. In other words: +- ``__init__`` is slightly slower with ``frozen=True``. +- +- 4. If a class is frozen, you cannot modify ``self`` in +- ``__attrs_post_init__`` or a self-written ``__init__``. You can +- circumvent that limitation by using +- ``object.__setattr__(self, "attribute_name", value)``. +- +- 5. Subclasses of a frozen class are frozen too. +- +- :param bool weakref_slot: Make instances weak-referenceable. This has no +- effect unless ``slots`` is also enabled. +- :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated +- attributes (Python 3.6 and later only) from the class body. +- +- In this case, you **must** annotate every field. If ``attrs`` +- encounters a field that is set to an `attr.ib` but lacks a type +- annotation, an `attr.exceptions.UnannotatedAttributeError` is +- raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't +- want to set a type. +- +- If you assign a value to those attributes (e.g. ``x: int = 42``), that +- value becomes the default value like if it were passed using +- ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also +- works as expected in most cases (see warning below). +- +- Attributes annotated as `typing.ClassVar`, and attributes that are +- neither annotated nor set to an `attr.ib` are **ignored**. +- +- .. warning:: +- For features that use the attribute name to create decorators (e.g. +- `validators `), you still *must* assign `attr.ib` to +- them. Otherwise Python will either not find the name or try to use +- the default value to call e.g. ``validator`` on it. +- +- These errors can be quite confusing and probably the most common bug +- report on our bug tracker. +- +- :param bool kw_only: Make all attributes keyword-only (Python 3+) +- in the generated ``__init__`` (if ``init`` is ``False``, this +- parameter is ignored). +- :param bool cache_hash: Ensure that the object's hash code is computed +- only once and stored on the object. If this is set to ``True``, +- hashing must be either explicitly or implicitly enabled for this +- class. If the hash code is cached, avoid any reassignments of +- fields involved in hash code computation or mutations of the objects +- those fields point to after object creation. If such changes occur, +- the behavior of the object's hash code is undefined. +- :param bool auto_exc: If the class subclasses `BaseException` +- (which implicitly includes any subclass of any exception), the +- following happens to behave like a well-behaved Python exceptions +- class: +- +- - the values for *eq*, *order*, and *hash* are ignored and the +- instances compare and hash by the instance's ids (N.B. ``attrs`` will +- *not* remove existing implementations of ``__hash__`` or the equality +- methods. It just won't add own ones.), +- - all attributes that are either passed into ``__init__`` or have a +- default value are additionally available as a tuple in the ``args`` +- attribute, +- - the value of *str* is ignored leaving ``__str__`` to base classes. +- :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` +- collects attributes from base classes. The default behavior is +- incorrect in certain cases of multiple inheritance. It should be on by +- default but is kept off for backward-compatibility. +- +- See issue `#428 `_ for +- more details. +- +- :param Optional[bool] getstate_setstate: +- .. note:: +- This is usually only interesting for slotted classes and you should +- probably just set *auto_detect* to `True`. +- +- If `True`, ``__getstate__`` and +- ``__setstate__`` are generated and attached to the class. This is +- necessary for slotted classes to be pickleable. If left `None`, it's +- `True` by default for slotted classes and ``False`` for dict classes. +- +- If *auto_detect* is `True`, and *getstate_setstate* is left `None`, +- and **either** ``__getstate__`` or ``__setstate__`` is detected directly +- on the class (i.e. not inherited), it is set to `False` (this is usually +- what you want). +- +- :param on_setattr: A callable that is run whenever the user attempts to set +- an attribute (either by assignment like ``i.x = 42`` or by using +- `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments +- as validators: the instance, the attribute that is being modified, and +- the new value. +- +- If no exception is raised, the attribute is set to the return value of +- the callable. +- +- If a list of callables is passed, they're automatically wrapped in an +- `attrs.setters.pipe`. +- :type on_setattr: `callable`, or a list of callables, or `None`, or +- `attrs.setters.NO_OP` +- +- :param Optional[callable] field_transformer: +- A function that is called with the original class object and all +- fields right before ``attrs`` finalizes the class. You can use +- this, e.g., to automatically add converters or validators to +- fields based on their types. See `transform-fields` for more details. +- +- :param bool match_args: +- If `True` (default), set ``__match_args__`` on the class to support +- :pep:`634` (Structural Pattern Matching). It is a tuple of all +- non-keyword-only ``__init__`` parameter names on Python 3.10 and later. +- Ignored on older Python versions. +- +- .. versionadded:: 16.0.0 *slots* +- .. versionadded:: 16.1.0 *frozen* +- .. versionadded:: 16.3.0 *str* +- .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. +- .. versionchanged:: 17.1.0 +- *hash* supports ``None`` as value which is also the default now. +- .. versionadded:: 17.3.0 *auto_attribs* +- .. versionchanged:: 18.1.0 +- If *these* is passed, no attributes are deleted from the class body. +- .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. +- .. versionadded:: 18.2.0 *weakref_slot* +- .. deprecated:: 18.2.0 +- ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a +- `DeprecationWarning` if the classes compared are subclasses of +- each other. ``__eq`` and ``__ne__`` never tried to compared subclasses +- to each other. +- .. versionchanged:: 19.2.0 +- ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider +- subclasses comparable anymore. +- .. versionadded:: 18.2.0 *kw_only* +- .. versionadded:: 18.2.0 *cache_hash* +- .. versionadded:: 19.1.0 *auto_exc* +- .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. +- .. versionadded:: 19.2.0 *eq* and *order* +- .. versionadded:: 20.1.0 *auto_detect* +- .. versionadded:: 20.1.0 *collect_by_mro* +- .. versionadded:: 20.1.0 *getstate_setstate* +- .. versionadded:: 20.1.0 *on_setattr* +- .. versionadded:: 20.3.0 *field_transformer* +- .. versionchanged:: 21.1.0 +- ``init=False`` injects ``__attrs_init__`` +- .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` +- .. versionchanged:: 21.1.0 *cmp* undeprecated +- .. versionadded:: 21.3.0 *match_args* +- """ +- eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) +- hash_ = hash # work around the lack of nonlocal +- +- if isinstance(on_setattr, (list, tuple)): +- on_setattr = setters.pipe(*on_setattr) +- +- def wrap(cls): +- is_frozen = frozen or _has_frozen_base_class(cls) +- is_exc = auto_exc is True and issubclass(cls, BaseException) +- has_own_setattr = auto_detect and _has_own_attribute( +- cls, "__setattr__" +- ) +- +- if has_own_setattr and is_frozen: +- raise ValueError("Can't freeze a class with a custom __setattr__.") +- +- builder = _ClassBuilder( +- cls, +- these, +- slots, +- is_frozen, +- weakref_slot, +- _determine_whether_to_implement( +- cls, +- getstate_setstate, +- auto_detect, +- ("__getstate__", "__setstate__"), +- default=slots, +- ), +- auto_attribs, +- kw_only, +- cache_hash, +- is_exc, +- collect_by_mro, +- on_setattr, +- has_own_setattr, +- field_transformer, +- ) +- if _determine_whether_to_implement( +- cls, repr, auto_detect, ("__repr__",) +- ): +- builder.add_repr(repr_ns) +- if str is True: +- builder.add_str() +- +- eq = _determine_whether_to_implement( +- cls, eq_, auto_detect, ("__eq__", "__ne__") +- ) +- if not is_exc and eq is True: +- builder.add_eq() +- if not is_exc and _determine_whether_to_implement( +- cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") +- ): +- builder.add_order() +- +- builder.add_setattr() +- +- if ( +- hash_ is None +- and auto_detect is True +- and _has_own_attribute(cls, "__hash__") +- ): +- hash = False +- else: +- hash = hash_ +- if hash is not True and hash is not False and hash is not None: +- # Can't use `hash in` because 1 == True for example. +- raise TypeError( +- "Invalid value for hash. Must be True, False, or None." +- ) +- elif hash is False or (hash is None and eq is False) or is_exc: +- # Don't do anything. Should fall back to __object__'s __hash__ +- # which is by id. +- if cache_hash: +- raise TypeError( +- "Invalid value for cache_hash. To use hash caching," +- " hashing must be either explicitly or implicitly " +- "enabled." +- ) +- elif hash is True or ( +- hash is None and eq is True and is_frozen is True +- ): +- # Build a __hash__ if told so, or if it's safe. +- builder.add_hash() +- else: +- # Raise TypeError on attempts to hash. +- if cache_hash: +- raise TypeError( +- "Invalid value for cache_hash. To use hash caching," +- " hashing must be either explicitly or implicitly " +- "enabled." +- ) +- builder.make_unhashable() +- +- if _determine_whether_to_implement( +- cls, init, auto_detect, ("__init__",) +- ): +- builder.add_init() +- else: +- builder.add_attrs_init() +- if cache_hash: +- raise TypeError( +- "Invalid value for cache_hash. To use hash caching," +- " init must be True." +- ) +- +- if ( +- PY310 +- and match_args +- and not _has_own_attribute(cls, "__match_args__") +- ): +- builder.add_match_args() +- +- return builder.build_class() +- +- # maybe_cls's type depends on the usage of the decorator. It's a class +- # if it's used as `@attrs` but ``None`` if used as `@attrs()`. +- if maybe_cls is None: +- return wrap +- else: +- return wrap(maybe_cls) +- +- +-_attrs = attrs +-""" +-Internal alias so we can use it in functions that take an argument called +-*attrs*. +-""" +- +- +-def _has_frozen_base_class(cls): +- """ +- Check whether *cls* has a frozen ancestor by looking at its +- __setattr__. +- """ +- return cls.__setattr__ is _frozen_setattrs +- +- +-def _generate_unique_filename(cls, func_name): +- """ +- Create a "filename" suitable for a function being generated. +- """ +- unique_filename = "".format( +- func_name, +- cls.__module__, +- getattr(cls, "__qualname__", cls.__name__), +- ) +- return unique_filename +- +- +-def _make_hash(cls, attrs, frozen, cache_hash): +- attrs = tuple( +- a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) +- ) +- +- tab = " " +- +- unique_filename = _generate_unique_filename(cls, "hash") +- type_hash = hash(unique_filename) +- # If eq is custom generated, we need to include the functions in globs +- globs = {} +- +- hash_def = "def __hash__(self" +- hash_func = "hash((" +- closing_braces = "))" +- if not cache_hash: +- hash_def += "):" +- else: +- hash_def += ", *" +- +- hash_def += ( +- ", _cache_wrapper=" +- + "__import__('attr._make')._make._CacheHashWrapper):" +- ) +- hash_func = "_cache_wrapper(" + hash_func +- closing_braces += ")" +- +- method_lines = [hash_def] +- +- def append_hash_computation_lines(prefix, indent): +- """ +- Generate the code for actually computing the hash code. +- Below this will either be returned directly or used to compute +- a value which is then cached, depending on the value of cache_hash +- """ +- +- method_lines.extend( +- [ +- indent + prefix + hash_func, +- indent + " %d," % (type_hash,), +- ] +- ) +- +- for a in attrs: +- if a.eq_key: +- cmp_name = "_%s_key" % (a.name,) +- globs[cmp_name] = a.eq_key +- method_lines.append( +- indent + " %s(self.%s)," % (cmp_name, a.name) +- ) +- else: +- method_lines.append(indent + " self.%s," % a.name) +- +- method_lines.append(indent + " " + closing_braces) +- +- if cache_hash: +- method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) +- if frozen: +- append_hash_computation_lines( +- "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 +- ) +- method_lines.append(tab * 2 + ")") # close __setattr__ +- else: +- append_hash_computation_lines( +- "self.%s = " % _hash_cache_field, tab * 2 +- ) +- method_lines.append(tab + "return self.%s" % _hash_cache_field) +- else: +- append_hash_computation_lines("return ", tab) +- +- script = "\n".join(method_lines) +- return _make_method("__hash__", script, unique_filename, globs) +- +- +-def _add_hash(cls, attrs): +- """ +- Add a hash method to *cls*. +- """ +- cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) +- return cls +- +- +-def _make_ne(): +- """ +- Create __ne__ method. +- """ +- +- def __ne__(self, other): +- """ +- Check equality and either forward a NotImplemented or +- return the result negated. +- """ +- result = self.__eq__(other) +- if result is NotImplemented: +- return NotImplemented +- +- return not result +- +- return __ne__ +- +- +-def _make_eq(cls, attrs): +- """ +- Create __eq__ method for *cls* with *attrs*. +- """ +- attrs = [a for a in attrs if a.eq] +- +- unique_filename = _generate_unique_filename(cls, "eq") +- lines = [ +- "def __eq__(self, other):", +- " if other.__class__ is not self.__class__:", +- " return NotImplemented", +- ] +- +- # We can't just do a big self.x = other.x and... clause due to +- # irregularities like nan == nan is false but (nan,) == (nan,) is true. +- globs = {} +- if attrs: +- lines.append(" return (") +- others = [" ) == ("] +- for a in attrs: +- if a.eq_key: +- cmp_name = "_%s_key" % (a.name,) +- # Add the key function to the global namespace +- # of the evaluated function. +- globs[cmp_name] = a.eq_key +- lines.append( +- " %s(self.%s)," +- % ( +- cmp_name, +- a.name, +- ) +- ) +- others.append( +- " %s(other.%s)," +- % ( +- cmp_name, +- a.name, +- ) +- ) +- else: +- lines.append(" self.%s," % (a.name,)) +- others.append(" other.%s," % (a.name,)) +- +- lines += others + [" )"] +- else: +- lines.append(" return True") +- +- script = "\n".join(lines) +- +- return _make_method("__eq__", script, unique_filename, globs) +- +- +-def _make_order(cls, attrs): +- """ +- Create ordering methods for *cls* with *attrs*. +- """ +- attrs = [a for a in attrs if a.order] +- +- def attrs_to_tuple(obj): +- """ +- Save us some typing. +- """ +- return tuple( +- key(value) if key else value +- for value, key in ( +- (getattr(obj, a.name), a.order_key) for a in attrs +- ) +- ) +- +- def __lt__(self, other): +- """ +- Automatically created by attrs. +- """ +- if other.__class__ is self.__class__: +- return attrs_to_tuple(self) < attrs_to_tuple(other) +- +- return NotImplemented +- +- def __le__(self, other): +- """ +- Automatically created by attrs. +- """ +- if other.__class__ is self.__class__: +- return attrs_to_tuple(self) <= attrs_to_tuple(other) +- +- return NotImplemented +- +- def __gt__(self, other): +- """ +- Automatically created by attrs. +- """ +- if other.__class__ is self.__class__: +- return attrs_to_tuple(self) > attrs_to_tuple(other) +- +- return NotImplemented +- +- def __ge__(self, other): +- """ +- Automatically created by attrs. +- """ +- if other.__class__ is self.__class__: +- return attrs_to_tuple(self) >= attrs_to_tuple(other) +- +- return NotImplemented +- +- return __lt__, __le__, __gt__, __ge__ +- +- +-def _add_eq(cls, attrs=None): +- """ +- Add equality methods to *cls* with *attrs*. +- """ +- if attrs is None: +- attrs = cls.__attrs_attrs__ +- +- cls.__eq__ = _make_eq(cls, attrs) +- cls.__ne__ = _make_ne() +- +- return cls +- +- +-if HAS_F_STRINGS: +- +- def _make_repr(attrs, ns, cls): +- unique_filename = _generate_unique_filename(cls, "repr") +- # Figure out which attributes to include, and which function to use to +- # format them. The a.repr value can be either bool or a custom +- # callable. +- attr_names_with_reprs = tuple( +- (a.name, (repr if a.repr is True else a.repr), a.init) +- for a in attrs +- if a.repr is not False +- ) +- globs = { +- name + "_repr": r +- for name, r, _ in attr_names_with_reprs +- if r != repr +- } +- globs["_compat"] = _compat +- globs["AttributeError"] = AttributeError +- globs["NOTHING"] = NOTHING +- attribute_fragments = [] +- for name, r, i in attr_names_with_reprs: +- accessor = ( +- "self." + name +- if i +- else 'getattr(self, "' + name + '", NOTHING)' +- ) +- fragment = ( +- "%s={%s!r}" % (name, accessor) +- if r == repr +- else "%s={%s_repr(%s)}" % (name, name, accessor) +- ) +- attribute_fragments.append(fragment) +- repr_fragment = ", ".join(attribute_fragments) +- +- if ns is None: +- cls_name_fragment = ( +- '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' +- ) +- else: +- cls_name_fragment = ns + ".{self.__class__.__name__}" +- +- lines = [ +- "def __repr__(self):", +- " try:", +- " already_repring = _compat.repr_context.already_repring", +- " except AttributeError:", +- " already_repring = {id(self),}", +- " _compat.repr_context.already_repring = already_repring", +- " else:", +- " if id(self) in already_repring:", +- " return '...'", +- " else:", +- " already_repring.add(id(self))", +- " try:", +- " return f'%s(%s)'" % (cls_name_fragment, repr_fragment), +- " finally:", +- " already_repring.remove(id(self))", +- ] +- +- return _make_method( +- "__repr__", "\n".join(lines), unique_filename, globs=globs +- ) +- +-else: +- +- def _make_repr(attrs, ns, _): +- """ +- Make a repr method that includes relevant *attrs*, adding *ns* to the +- full name. +- """ +- +- # Figure out which attributes to include, and which function to use to +- # format them. The a.repr value can be either bool or a custom +- # callable. +- attr_names_with_reprs = tuple( +- (a.name, repr if a.repr is True else a.repr) +- for a in attrs +- if a.repr is not False +- ) +- +- def __repr__(self): +- """ +- Automatically created by attrs. +- """ +- try: +- already_repring = _compat.repr_context.already_repring +- except AttributeError: +- already_repring = set() +- _compat.repr_context.already_repring = already_repring +- +- if id(self) in already_repring: +- return "..." +- real_cls = self.__class__ +- if ns is None: +- class_name = real_cls.__qualname__.rsplit(">.", 1)[-1] +- else: +- class_name = ns + "." + real_cls.__name__ +- +- # Since 'self' remains on the stack (i.e.: strongly referenced) +- # for the duration of this call, it's safe to depend on id(...) +- # stability, and not need to track the instance and therefore +- # worry about properties like weakref- or hash-ability. +- already_repring.add(id(self)) +- try: +- result = [class_name, "("] +- first = True +- for name, attr_repr in attr_names_with_reprs: +- if first: +- first = False +- else: +- result.append(", ") +- result.extend( +- (name, "=", attr_repr(getattr(self, name, NOTHING))) +- ) +- return "".join(result) + ")" +- finally: +- already_repring.remove(id(self)) +- +- return __repr__ +- +- +-def _add_repr(cls, ns=None, attrs=None): +- """ +- Add a repr method to *cls*. +- """ +- if attrs is None: +- attrs = cls.__attrs_attrs__ +- +- cls.__repr__ = _make_repr(attrs, ns, cls) +- return cls +- +- +-def fields(cls): +- """ +- Return the tuple of ``attrs`` attributes for a class. +- +- The tuple also allows accessing the fields by their names (see below for +- examples). +- +- :param type cls: Class to introspect. +- +- :raise TypeError: If *cls* is not a class. +- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` +- class. +- +- :rtype: tuple (with name accessors) of `attrs.Attribute` +- +- .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields +- by name. +- """ +- if not isinstance(cls, type): +- raise TypeError("Passed object must be a class.") +- attrs = getattr(cls, "__attrs_attrs__", None) +- if attrs is None: +- raise NotAnAttrsClassError( +- "{cls!r} is not an attrs-decorated class.".format(cls=cls) +- ) +- return attrs +- +- +-def fields_dict(cls): +- """ +- Return an ordered dictionary of ``attrs`` attributes for a class, whose +- keys are the attribute names. +- +- :param type cls: Class to introspect. +- +- :raise TypeError: If *cls* is not a class. +- :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` +- class. +- +- :rtype: an ordered dict where keys are attribute names and values are +- `attrs.Attribute`\\ s. This will be a `dict` if it's +- naturally ordered like on Python 3.6+ or an +- :class:`~collections.OrderedDict` otherwise. +- +- .. versionadded:: 18.1.0 +- """ +- if not isinstance(cls, type): +- raise TypeError("Passed object must be a class.") +- attrs = getattr(cls, "__attrs_attrs__", None) +- if attrs is None: +- raise NotAnAttrsClassError( +- "{cls!r} is not an attrs-decorated class.".format(cls=cls) +- ) +- return ordered_dict((a.name, a) for a in attrs) +- +- +-def validate(inst): +- """ +- Validate all attributes on *inst* that have a validator. +- +- Leaves all exceptions through. +- +- :param inst: Instance of a class with ``attrs`` attributes. +- """ +- if _config._run_validators is False: +- return +- +- for a in fields(inst.__class__): +- v = a.validator +- if v is not None: +- v(inst, a, getattr(inst, a.name)) +- +- +-def _is_slot_cls(cls): +- return "__slots__" in cls.__dict__ +- +- +-def _is_slot_attr(a_name, base_attr_map): +- """ +- Check if the attribute name comes from a slot class. +- """ +- return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) +- +- +-def _make_init( +- cls, +- attrs, +- pre_init, +- post_init, +- frozen, +- slots, +- cache_hash, +- base_attr_map, +- is_exc, +- cls_on_setattr, +- attrs_init, +-): +- has_cls_on_setattr = ( +- cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP +- ) +- +- if frozen and has_cls_on_setattr: +- raise ValueError("Frozen classes can't use on_setattr.") +- +- needs_cached_setattr = cache_hash or frozen +- filtered_attrs = [] +- attr_dict = {} +- for a in attrs: +- if not a.init and a.default is NOTHING: +- continue +- +- filtered_attrs.append(a) +- attr_dict[a.name] = a +- +- if a.on_setattr is not None: +- if frozen is True: +- raise ValueError("Frozen classes can't use on_setattr.") +- +- needs_cached_setattr = True +- elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: +- needs_cached_setattr = True +- +- unique_filename = _generate_unique_filename(cls, "init") +- +- script, globs, annotations = _attrs_to_init_script( +- filtered_attrs, +- frozen, +- slots, +- pre_init, +- post_init, +- cache_hash, +- base_attr_map, +- is_exc, +- has_cls_on_setattr, +- attrs_init, +- ) +- if cls.__module__ in sys.modules: +- # This makes typing.get_type_hints(CLS.__init__) resolve string types. +- globs.update(sys.modules[cls.__module__].__dict__) +- +- globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) +- +- if needs_cached_setattr: +- # Save the lookup overhead in __init__ if we need to circumvent +- # setattr hooks. +- globs["_setattr"] = _obj_setattr +- +- init = _make_method( +- "__attrs_init__" if attrs_init else "__init__", +- script, +- unique_filename, +- globs, +- ) +- init.__annotations__ = annotations +- +- return init +- +- +-def _setattr(attr_name, value_var, has_on_setattr): +- """ +- Use the cached object.setattr to set *attr_name* to *value_var*. +- """ +- return "_setattr(self, '%s', %s)" % (attr_name, value_var) +- +- +-def _setattr_with_converter(attr_name, value_var, has_on_setattr): +- """ +- Use the cached object.setattr to set *attr_name* to *value_var*, but run +- its converter first. +- """ +- return "_setattr(self, '%s', %s(%s))" % ( +- attr_name, +- _init_converter_pat % (attr_name,), +- value_var, +- ) +- +- +-def _assign(attr_name, value, has_on_setattr): +- """ +- Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise +- relegate to _setattr. +- """ +- if has_on_setattr: +- return _setattr(attr_name, value, True) +- +- return "self.%s = %s" % (attr_name, value) +- +- +-def _assign_with_converter(attr_name, value_var, has_on_setattr): +- """ +- Unless *attr_name* has an on_setattr hook, use normal assignment after +- conversion. Otherwise relegate to _setattr_with_converter. +- """ +- if has_on_setattr: +- return _setattr_with_converter(attr_name, value_var, True) +- +- return "self.%s = %s(%s)" % ( +- attr_name, +- _init_converter_pat % (attr_name,), +- value_var, +- ) +- +- +-def _attrs_to_init_script( +- attrs, +- frozen, +- slots, +- pre_init, +- post_init, +- cache_hash, +- base_attr_map, +- is_exc, +- has_cls_on_setattr, +- attrs_init, +-): +- """ +- Return a script of an initializer for *attrs* and a dict of globals. +- +- The globals are expected by the generated script. +- +- If *frozen* is True, we cannot set the attributes directly so we use +- a cached ``object.__setattr__``. +- """ +- lines = [] +- if pre_init: +- lines.append("self.__attrs_pre_init__()") +- +- if frozen is True: +- if slots is True: +- fmt_setter = _setattr +- fmt_setter_with_converter = _setattr_with_converter +- else: +- # Dict frozen classes assign directly to __dict__. +- # But only if the attribute doesn't come from an ancestor slot +- # class. +- # Note _inst_dict will be used again below if cache_hash is True +- lines.append("_inst_dict = self.__dict__") +- +- def fmt_setter(attr_name, value_var, has_on_setattr): +- if _is_slot_attr(attr_name, base_attr_map): +- return _setattr(attr_name, value_var, has_on_setattr) +- +- return "_inst_dict['%s'] = %s" % (attr_name, value_var) +- +- def fmt_setter_with_converter( +- attr_name, value_var, has_on_setattr +- ): +- if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): +- return _setattr_with_converter( +- attr_name, value_var, has_on_setattr +- ) +- +- return "_inst_dict['%s'] = %s(%s)" % ( +- attr_name, +- _init_converter_pat % (attr_name,), +- value_var, +- ) +- +- else: +- # Not frozen. +- fmt_setter = _assign +- fmt_setter_with_converter = _assign_with_converter +- +- args = [] +- kw_only_args = [] +- attrs_to_validate = [] +- +- # This is a dictionary of names to validator and converter callables. +- # Injecting this into __init__ globals lets us avoid lookups. +- names_for_globals = {} +- annotations = {"return": None} +- +- for a in attrs: +- if a.validator: +- attrs_to_validate.append(a) +- +- attr_name = a.name +- has_on_setattr = a.on_setattr is not None or ( +- a.on_setattr is not setters.NO_OP and has_cls_on_setattr +- ) +- arg_name = a.name.lstrip("_") +- +- has_factory = isinstance(a.default, Factory) +- if has_factory and a.default.takes_self: +- maybe_self = "self" +- else: +- maybe_self = "" +- +- if a.init is False: +- if has_factory: +- init_factory_name = _init_factory_pat.format(a.name) +- if a.converter is not None: +- lines.append( +- fmt_setter_with_converter( +- attr_name, +- init_factory_name + "(%s)" % (maybe_self,), +- has_on_setattr, +- ) +- ) +- conv_name = _init_converter_pat % (a.name,) +- names_for_globals[conv_name] = a.converter +- else: +- lines.append( +- fmt_setter( +- attr_name, +- init_factory_name + "(%s)" % (maybe_self,), +- has_on_setattr, +- ) +- ) +- names_for_globals[init_factory_name] = a.default.factory +- else: +- if a.converter is not None: +- lines.append( +- fmt_setter_with_converter( +- attr_name, +- "attr_dict['%s'].default" % (attr_name,), +- has_on_setattr, +- ) +- ) +- conv_name = _init_converter_pat % (a.name,) +- names_for_globals[conv_name] = a.converter +- else: +- lines.append( +- fmt_setter( +- attr_name, +- "attr_dict['%s'].default" % (attr_name,), +- has_on_setattr, +- ) +- ) +- elif a.default is not NOTHING and not has_factory: +- arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) +- if a.kw_only: +- kw_only_args.append(arg) +- else: +- args.append(arg) +- +- if a.converter is not None: +- lines.append( +- fmt_setter_with_converter( +- attr_name, arg_name, has_on_setattr +- ) +- ) +- names_for_globals[ +- _init_converter_pat % (a.name,) +- ] = a.converter +- else: +- lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) +- +- elif has_factory: +- arg = "%s=NOTHING" % (arg_name,) +- if a.kw_only: +- kw_only_args.append(arg) +- else: +- args.append(arg) +- lines.append("if %s is not NOTHING:" % (arg_name,)) +- +- init_factory_name = _init_factory_pat.format(a.name) +- if a.converter is not None: +- lines.append( +- " " +- + fmt_setter_with_converter( +- attr_name, arg_name, has_on_setattr +- ) +- ) +- lines.append("else:") +- lines.append( +- " " +- + fmt_setter_with_converter( +- attr_name, +- init_factory_name + "(" + maybe_self + ")", +- has_on_setattr, +- ) +- ) +- names_for_globals[ +- _init_converter_pat % (a.name,) +- ] = a.converter +- else: +- lines.append( +- " " + fmt_setter(attr_name, arg_name, has_on_setattr) +- ) +- lines.append("else:") +- lines.append( +- " " +- + fmt_setter( +- attr_name, +- init_factory_name + "(" + maybe_self + ")", +- has_on_setattr, +- ) +- ) +- names_for_globals[init_factory_name] = a.default.factory +- else: +- if a.kw_only: +- kw_only_args.append(arg_name) +- else: +- args.append(arg_name) +- +- if a.converter is not None: +- lines.append( +- fmt_setter_with_converter( +- attr_name, arg_name, has_on_setattr +- ) +- ) +- names_for_globals[ +- _init_converter_pat % (a.name,) +- ] = a.converter +- else: +- lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) +- +- if a.init is True: +- if a.type is not None and a.converter is None: +- annotations[arg_name] = a.type +- elif a.converter is not None: +- # Try to get the type from the converter. +- t = _AnnotationExtractor(a.converter).get_first_param_type() +- if t: +- annotations[arg_name] = t +- +- if attrs_to_validate: # we can skip this if there are no validators. +- names_for_globals["_config"] = _config +- lines.append("if _config._run_validators is True:") +- for a in attrs_to_validate: +- val_name = "__attr_validator_" + a.name +- attr_name = "__attr_" + a.name +- lines.append( +- " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) +- ) +- names_for_globals[val_name] = a.validator +- names_for_globals[attr_name] = a +- +- if post_init: +- lines.append("self.__attrs_post_init__()") +- +- # because this is set only after __attrs_post_init__ is called, a crash +- # will result if post-init tries to access the hash code. This seemed +- # preferable to setting this beforehand, in which case alteration to +- # field values during post-init combined with post-init accessing the +- # hash code would result in silent bugs. +- if cache_hash: +- if frozen: +- if slots: +- # if frozen and slots, then _setattr defined above +- init_hash_cache = "_setattr(self, '%s', %s)" +- else: +- # if frozen and not slots, then _inst_dict defined above +- init_hash_cache = "_inst_dict['%s'] = %s" +- else: +- init_hash_cache = "self.%s = %s" +- lines.append(init_hash_cache % (_hash_cache_field, "None")) +- +- # For exceptions we rely on BaseException.__init__ for proper +- # initialization. +- if is_exc: +- vals = ",".join("self." + a.name for a in attrs if a.init) +- +- lines.append("BaseException.__init__(self, %s)" % (vals,)) +- +- args = ", ".join(args) +- if kw_only_args: +- args += "%s*, %s" % ( +- ", " if args else "", # leading comma +- ", ".join(kw_only_args), # kw_only args +- ) +- return ( +- """\ +-def {init_name}(self, {args}): +- {lines} +-""".format( +- init_name=("__attrs_init__" if attrs_init else "__init__"), +- args=args, +- lines="\n ".join(lines) if lines else "pass", +- ), +- names_for_globals, +- annotations, +- ) +- +- +-class Attribute: +- """ +- *Read-only* representation of an attribute. +- +- The class has *all* arguments of `attr.ib` (except for ``factory`` +- which is only syntactic sugar for ``default=Factory(...)`` plus the +- following: +- +- - ``name`` (`str`): The name of the attribute. +- - ``inherited`` (`bool`): Whether or not that attribute has been inherited +- from a base class. +- - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables +- that are used for comparing and ordering objects by this attribute, +- respectively. These are set by passing a callable to `attr.ib`'s ``eq``, +- ``order``, or ``cmp`` arguments. See also :ref:`comparison customization +- `. +- +- Instances of this class are frequently used for introspection purposes +- like: +- +- - `fields` returns a tuple of them. +- - Validators get them passed as the first argument. +- - The :ref:`field transformer ` hook receives a list of +- them. +- +- .. versionadded:: 20.1.0 *inherited* +- .. versionadded:: 20.1.0 *on_setattr* +- .. versionchanged:: 20.2.0 *inherited* is not taken into account for +- equality checks and hashing anymore. +- .. versionadded:: 21.1.0 *eq_key* and *order_key* +- +- For the full version history of the fields, see `attr.ib`. +- """ +- +- __slots__ = ( +- "name", +- "default", +- "validator", +- "repr", +- "eq", +- "eq_key", +- "order", +- "order_key", +- "hash", +- "init", +- "metadata", +- "type", +- "converter", +- "kw_only", +- "inherited", +- "on_setattr", +- ) +- +- def __init__( +- self, +- name, +- default, +- validator, +- repr, +- cmp, # XXX: unused, remove along with other cmp code. +- hash, +- init, +- inherited, +- metadata=None, +- type=None, +- converter=None, +- kw_only=False, +- eq=None, +- eq_key=None, +- order=None, +- order_key=None, +- on_setattr=None, +- ): +- eq, eq_key, order, order_key = _determine_attrib_eq_order( +- cmp, eq_key or eq, order_key or order, True +- ) +- +- # Cache this descriptor here to speed things up later. +- bound_setattr = _obj_setattr.__get__(self, Attribute) +- +- # Despite the big red warning, people *do* instantiate `Attribute` +- # themselves. +- bound_setattr("name", name) +- bound_setattr("default", default) +- bound_setattr("validator", validator) +- bound_setattr("repr", repr) +- bound_setattr("eq", eq) +- bound_setattr("eq_key", eq_key) +- bound_setattr("order", order) +- bound_setattr("order_key", order_key) +- bound_setattr("hash", hash) +- bound_setattr("init", init) +- bound_setattr("converter", converter) +- bound_setattr( +- "metadata", +- ( +- types.MappingProxyType(dict(metadata)) # Shallow copy +- if metadata +- else _empty_metadata_singleton +- ), +- ) +- bound_setattr("type", type) +- bound_setattr("kw_only", kw_only) +- bound_setattr("inherited", inherited) +- bound_setattr("on_setattr", on_setattr) +- +- def __setattr__(self, name, value): +- raise FrozenInstanceError() +- +- @classmethod +- def from_counting_attr(cls, name, ca, type=None): +- # type holds the annotated value. deal with conflicts: +- if type is None: +- type = ca.type +- elif ca.type is not None: +- raise ValueError( +- "Type annotation and type argument cannot both be present" +- ) +- inst_dict = { +- k: getattr(ca, k) +- for k in Attribute.__slots__ +- if k +- not in ( +- "name", +- "validator", +- "default", +- "type", +- "inherited", +- ) # exclude methods and deprecated alias +- } +- return cls( +- name=name, +- validator=ca._validator, +- default=ca._default, +- type=type, +- cmp=None, +- inherited=False, +- **inst_dict +- ) +- +- # Don't use attr.evolve since fields(Attribute) doesn't work +- def evolve(self, **changes): +- """ +- Copy *self* and apply *changes*. +- +- This works similarly to `attr.evolve` but that function does not work +- with ``Attribute``. +- +- It is mainly meant to be used for `transform-fields`. +- +- .. versionadded:: 20.3.0 +- """ +- new = copy.copy(self) +- +- new._setattrs(changes.items()) +- +- return new +- +- # Don't use _add_pickle since fields(Attribute) doesn't work +- def __getstate__(self): +- """ +- Play nice with pickle. +- """ +- return tuple( +- getattr(self, name) if name != "metadata" else dict(self.metadata) +- for name in self.__slots__ +- ) +- +- def __setstate__(self, state): +- """ +- Play nice with pickle. +- """ +- self._setattrs(zip(self.__slots__, state)) +- +- def _setattrs(self, name_values_pairs): +- bound_setattr = _obj_setattr.__get__(self, Attribute) +- for name, value in name_values_pairs: +- if name != "metadata": +- bound_setattr(name, value) +- else: +- bound_setattr( +- name, +- types.MappingProxyType(dict(value)) +- if value +- else _empty_metadata_singleton, +- ) +- +- +-_a = [ +- Attribute( +- name=name, +- default=NOTHING, +- validator=None, +- repr=True, +- cmp=None, +- eq=True, +- order=False, +- hash=(name != "metadata"), +- init=True, +- inherited=False, +- ) +- for name in Attribute.__slots__ +-] +- +-Attribute = _add_hash( +- _add_eq( +- _add_repr(Attribute, attrs=_a), +- attrs=[a for a in _a if a.name != "inherited"], +- ), +- attrs=[a for a in _a if a.hash and a.name != "inherited"], +-) +- +- +-class _CountingAttr: +- """ +- Intermediate representation of attributes that uses a counter to preserve +- the order in which the attributes have been defined. +- +- *Internal* data structure of the attrs library. Running into is most +- likely the result of a bug like a forgotten `@attr.s` decorator. +- """ +- +- __slots__ = ( +- "counter", +- "_default", +- "repr", +- "eq", +- "eq_key", +- "order", +- "order_key", +- "hash", +- "init", +- "metadata", +- "_validator", +- "converter", +- "type", +- "kw_only", +- "on_setattr", +- ) +- __attrs_attrs__ = tuple( +- Attribute( +- name=name, +- default=NOTHING, +- validator=None, +- repr=True, +- cmp=None, +- hash=True, +- init=True, +- kw_only=False, +- eq=True, +- eq_key=None, +- order=False, +- order_key=None, +- inherited=False, +- on_setattr=None, +- ) +- for name in ( +- "counter", +- "_default", +- "repr", +- "eq", +- "order", +- "hash", +- "init", +- "on_setattr", +- ) +- ) + ( +- Attribute( +- name="metadata", +- default=None, +- validator=None, +- repr=True, +- cmp=None, +- hash=False, +- init=True, +- kw_only=False, +- eq=True, +- eq_key=None, +- order=False, +- order_key=None, +- inherited=False, +- on_setattr=None, +- ), +- ) +- cls_counter = 0 +- +- def __init__( +- self, +- default, +- validator, +- repr, +- cmp, +- hash, +- init, +- converter, +- metadata, +- type, +- kw_only, +- eq, +- eq_key, +- order, +- order_key, +- on_setattr, +- ): +- _CountingAttr.cls_counter += 1 +- self.counter = _CountingAttr.cls_counter +- self._default = default +- self._validator = validator +- self.converter = converter +- self.repr = repr +- self.eq = eq +- self.eq_key = eq_key +- self.order = order +- self.order_key = order_key +- self.hash = hash +- self.init = init +- self.metadata = metadata +- self.type = type +- self.kw_only = kw_only +- self.on_setattr = on_setattr +- +- def validator(self, meth): +- """ +- Decorator that adds *meth* to the list of validators. +- +- Returns *meth* unchanged. +- +- .. versionadded:: 17.1.0 +- """ +- if self._validator is None: +- self._validator = meth +- else: +- self._validator = and_(self._validator, meth) +- return meth +- +- def default(self, meth): +- """ +- Decorator that allows to set the default for an attribute. +- +- Returns *meth* unchanged. +- +- :raises DefaultAlreadySetError: If default has been set before. +- +- .. versionadded:: 17.1.0 +- """ +- if self._default is not NOTHING: +- raise DefaultAlreadySetError() +- +- self._default = Factory(meth, takes_self=True) +- +- return meth +- +- +-_CountingAttr = _add_eq(_add_repr(_CountingAttr)) +- +- +-class Factory: +- """ +- Stores a factory callable. +- +- If passed as the default value to `attrs.field`, the factory is used to +- generate a new value. +- +- :param callable factory: A callable that takes either none or exactly one +- mandatory positional argument depending on *takes_self*. +- :param bool takes_self: Pass the partially initialized instance that is +- being initialized as a positional argument. +- +- .. versionadded:: 17.1.0 *takes_self* +- """ +- +- __slots__ = ("factory", "takes_self") +- +- def __init__(self, factory, takes_self=False): +- """ +- `Factory` is part of the default machinery so if we want a default +- value here, we have to implement it ourselves. +- """ +- self.factory = factory +- self.takes_self = takes_self +- +- def __getstate__(self): +- """ +- Play nice with pickle. +- """ +- return tuple(getattr(self, name) for name in self.__slots__) +- +- def __setstate__(self, state): +- """ +- Play nice with pickle. +- """ +- for name, value in zip(self.__slots__, state): +- setattr(self, name, value) +- +- +-_f = [ +- Attribute( +- name=name, +- default=NOTHING, +- validator=None, +- repr=True, +- cmp=None, +- eq=True, +- order=False, +- hash=True, +- init=True, +- inherited=False, +- ) +- for name in Factory.__slots__ +-] +- +-Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) +- +- +-def make_class(name, attrs, bases=(object,), **attributes_arguments): +- """ +- A quick way to create a new class called *name* with *attrs*. +- +- :param str name: The name for the new class. +- +- :param attrs: A list of names or a dictionary of mappings of names to +- attributes. +- +- If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, +- `collections.OrderedDict` otherwise), the order is deduced from +- the order of the names or attributes inside *attrs*. Otherwise the +- order of the definition of the attributes is used. +- :type attrs: `list` or `dict` +- +- :param tuple bases: Classes that the new class will subclass. +- +- :param attributes_arguments: Passed unmodified to `attr.s`. +- +- :return: A new class with *attrs*. +- :rtype: type +- +- .. versionadded:: 17.1.0 *bases* +- .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. +- """ +- if isinstance(attrs, dict): +- cls_dict = attrs +- elif isinstance(attrs, (list, tuple)): +- cls_dict = {a: attrib() for a in attrs} +- else: +- raise TypeError("attrs argument must be a dict or a list.") +- +- pre_init = cls_dict.pop("__attrs_pre_init__", None) +- post_init = cls_dict.pop("__attrs_post_init__", None) +- user_init = cls_dict.pop("__init__", None) +- +- body = {} +- if pre_init is not None: +- body["__attrs_pre_init__"] = pre_init +- if post_init is not None: +- body["__attrs_post_init__"] = post_init +- if user_init is not None: +- body["__init__"] = user_init +- +- type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) +- +- # For pickling to work, the __module__ variable needs to be set to the +- # frame where the class is created. Bypass this step in environments where +- # sys._getframe is not defined (Jython for example) or sys._getframe is not +- # defined for arguments greater than 0 (IronPython). +- try: +- type_.__module__ = sys._getframe(1).f_globals.get( +- "__name__", "__main__" +- ) +- except (AttributeError, ValueError): +- pass +- +- # We do it here for proper warnings with meaningful stacklevel. +- cmp = attributes_arguments.pop("cmp", None) +- ( +- attributes_arguments["eq"], +- attributes_arguments["order"], +- ) = _determine_attrs_eq_order( +- cmp, +- attributes_arguments.get("eq"), +- attributes_arguments.get("order"), +- True, +- ) +- +- return _attrs(these=cls_dict, **attributes_arguments)(type_) +- +- +-# These are required by within this module so we define them here and merely +-# import into .validators / .converters. +- +- +-@attrs(slots=True, hash=True) +-class _AndValidator: +- """ +- Compose many validators to a single one. +- """ +- +- _validators = attrib() +- +- def __call__(self, inst, attr, value): +- for v in self._validators: +- v(inst, attr, value) +- +- +-def and_(*validators): +- """ +- A validator that composes multiple validators into one. +- +- When called on a value, it runs all wrapped validators. +- +- :param callables validators: Arbitrary number of validators. +- +- .. versionadded:: 17.1.0 +- """ +- vals = [] +- for validator in validators: +- vals.extend( +- validator._validators +- if isinstance(validator, _AndValidator) +- else [validator] +- ) +- +- return _AndValidator(tuple(vals)) +- +- +-def pipe(*converters): +- """ +- A converter that composes multiple converters into one. +- +- When called on a value, it runs all wrapped converters, returning the +- *last* value. +- +- Type annotations will be inferred from the wrapped converters', if +- they have any. +- +- :param callables converters: Arbitrary number of converters. +- +- .. versionadded:: 20.1.0 +- """ +- +- def pipe_converter(val): +- for converter in converters: +- val = converter(val) +- +- return val +- +- if not converters: +- # If the converter list is empty, pipe_converter is the identity. +- A = typing.TypeVar("A") +- pipe_converter.__annotations__ = {"val": A, "return": A} +- else: +- # Get parameter type from first converter. +- t = _AnnotationExtractor(converters[0]).get_first_param_type() +- if t: +- pipe_converter.__annotations__["val"] = t +- +- # Get return type from last converter. +- rt = _AnnotationExtractor(converters[-1]).get_return_type() +- if rt: +- pipe_converter.__annotations__["return"] = rt +- +- return pipe_converter +diff --git a/src/poetry/core/_vendor/attr/_next_gen.py b/src/poetry/core/_vendor/attr/_next_gen.py +deleted file mode 100644 +index 5a06a74..0000000 +--- a/src/poetry/core/_vendor/attr/_next_gen.py ++++ /dev/null +@@ -1,220 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-""" +-These are Python 3.6+-only and keyword-only APIs that call `attr.s` and +-`attr.ib` with different default values. +-""" +- +- +-from functools import partial +- +-from . import setters +-from ._funcs import asdict as _asdict +-from ._funcs import astuple as _astuple +-from ._make import ( +- NOTHING, +- _frozen_setattrs, +- _ng_default_on_setattr, +- attrib, +- attrs, +-) +-from .exceptions import UnannotatedAttributeError +- +- +-def define( +- maybe_cls=None, +- *, +- these=None, +- repr=None, +- hash=None, +- init=None, +- slots=True, +- frozen=False, +- weakref_slot=True, +- str=False, +- auto_attribs=None, +- kw_only=False, +- cache_hash=False, +- auto_exc=True, +- eq=None, +- order=False, +- auto_detect=True, +- getstate_setstate=None, +- on_setattr=None, +- field_transformer=None, +- match_args=True, +-): +- r""" +- Define an ``attrs`` class. +- +- Differences to the classic `attr.s` that it uses underneath: +- +- - Automatically detect whether or not *auto_attribs* should be `True` (c.f. +- *auto_attribs* parameter). +- - If *frozen* is `False`, run converters and validators when setting an +- attribute by default. +- - *slots=True* +- +- .. caution:: +- +- Usually this has only upsides and few visible effects in everyday +- programming. But it *can* lead to some suprising behaviors, so please +- make sure to read :term:`slotted classes`. +- - *auto_exc=True* +- - *auto_detect=True* +- - *order=False* +- - Some options that were only relevant on Python 2 or were kept around for +- backwards-compatibility have been removed. +- +- Please note that these are all defaults and you can change them as you +- wish. +- +- :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves +- exactly like `attr.s`. If left `None`, `attr.s` will try to guess: +- +- 1. If any attributes are annotated and no unannotated `attrs.fields`\ s +- are found, it assumes *auto_attribs=True*. +- 2. Otherwise it assumes *auto_attribs=False* and tries to collect +- `attrs.fields`\ s. +- +- For now, please refer to `attr.s` for the rest of the parameters. +- +- .. versionadded:: 20.1.0 +- .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. +- """ +- +- def do_it(cls, auto_attribs): +- return attrs( +- maybe_cls=cls, +- these=these, +- repr=repr, +- hash=hash, +- init=init, +- slots=slots, +- frozen=frozen, +- weakref_slot=weakref_slot, +- str=str, +- auto_attribs=auto_attribs, +- kw_only=kw_only, +- cache_hash=cache_hash, +- auto_exc=auto_exc, +- eq=eq, +- order=order, +- auto_detect=auto_detect, +- collect_by_mro=True, +- getstate_setstate=getstate_setstate, +- on_setattr=on_setattr, +- field_transformer=field_transformer, +- match_args=match_args, +- ) +- +- def wrap(cls): +- """ +- Making this a wrapper ensures this code runs during class creation. +- +- We also ensure that frozen-ness of classes is inherited. +- """ +- nonlocal frozen, on_setattr +- +- had_on_setattr = on_setattr not in (None, setters.NO_OP) +- +- # By default, mutable classes convert & validate on setattr. +- if frozen is False and on_setattr is None: +- on_setattr = _ng_default_on_setattr +- +- # However, if we subclass a frozen class, we inherit the immutability +- # and disable on_setattr. +- for base_cls in cls.__bases__: +- if base_cls.__setattr__ is _frozen_setattrs: +- if had_on_setattr: +- raise ValueError( +- "Frozen classes can't use on_setattr " +- "(frozen-ness was inherited)." +- ) +- +- on_setattr = setters.NO_OP +- break +- +- if auto_attribs is not None: +- return do_it(cls, auto_attribs) +- +- try: +- return do_it(cls, True) +- except UnannotatedAttributeError: +- return do_it(cls, False) +- +- # maybe_cls's type depends on the usage of the decorator. It's a class +- # if it's used as `@attrs` but ``None`` if used as `@attrs()`. +- if maybe_cls is None: +- return wrap +- else: +- return wrap(maybe_cls) +- +- +-mutable = define +-frozen = partial(define, frozen=True, on_setattr=None) +- +- +-def field( +- *, +- default=NOTHING, +- validator=None, +- repr=True, +- hash=None, +- init=True, +- metadata=None, +- converter=None, +- factory=None, +- kw_only=False, +- eq=None, +- order=None, +- on_setattr=None, +-): +- """ +- Identical to `attr.ib`, except keyword-only and with some arguments +- removed. +- +- .. versionadded:: 20.1.0 +- """ +- return attrib( +- default=default, +- validator=validator, +- repr=repr, +- hash=hash, +- init=init, +- metadata=metadata, +- converter=converter, +- factory=factory, +- kw_only=kw_only, +- eq=eq, +- order=order, +- on_setattr=on_setattr, +- ) +- +- +-def asdict(inst, *, recurse=True, filter=None, value_serializer=None): +- """ +- Same as `attr.asdict`, except that collections types are always retained +- and dict is always used as *dict_factory*. +- +- .. versionadded:: 21.3.0 +- """ +- return _asdict( +- inst=inst, +- recurse=recurse, +- filter=filter, +- value_serializer=value_serializer, +- retain_collection_types=True, +- ) +- +- +-def astuple(inst, *, recurse=True, filter=None): +- """ +- Same as `attr.astuple`, except that collections types are always retained +- and `tuple` is always used as the *tuple_factory*. +- +- .. versionadded:: 21.3.0 +- """ +- return _astuple( +- inst=inst, recurse=recurse, filter=filter, retain_collection_types=True +- ) +diff --git a/src/poetry/core/_vendor/attr/_version_info.py b/src/poetry/core/_vendor/attr/_version_info.py +deleted file mode 100644 +index 51a1312..0000000 +--- a/src/poetry/core/_vendor/attr/_version_info.py ++++ /dev/null +@@ -1,86 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +- +-from functools import total_ordering +- +-from ._funcs import astuple +-from ._make import attrib, attrs +- +- +-@total_ordering +-@attrs(eq=False, order=False, slots=True, frozen=True) +-class VersionInfo: +- """ +- A version object that can be compared to tuple of length 1--4: +- +- >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) +- True +- >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) +- True +- >>> vi = attr.VersionInfo(19, 2, 0, "final") +- >>> vi < (19, 1, 1) +- False +- >>> vi < (19,) +- False +- >>> vi == (19, 2,) +- True +- >>> vi == (19, 2, 1) +- False +- +- .. versionadded:: 19.2 +- """ +- +- year = attrib(type=int) +- minor = attrib(type=int) +- micro = attrib(type=int) +- releaselevel = attrib(type=str) +- +- @classmethod +- def _from_version_string(cls, s): +- """ +- Parse *s* and return a _VersionInfo. +- """ +- v = s.split(".") +- if len(v) == 3: +- v.append("final") +- +- return cls( +- year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] +- ) +- +- def _ensure_tuple(self, other): +- """ +- Ensure *other* is a tuple of a valid length. +- +- Returns a possibly transformed *other* and ourselves as a tuple of +- the same length as *other*. +- """ +- +- if self.__class__ is other.__class__: +- other = astuple(other) +- +- if not isinstance(other, tuple): +- raise NotImplementedError +- +- if not (1 <= len(other) <= 4): +- raise NotImplementedError +- +- return astuple(self)[: len(other)], other +- +- def __eq__(self, other): +- try: +- us, them = self._ensure_tuple(other) +- except NotImplementedError: +- return NotImplemented +- +- return us == them +- +- def __lt__(self, other): +- try: +- us, them = self._ensure_tuple(other) +- except NotImplementedError: +- return NotImplemented +- +- # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't +- # have to do anything special with releaselevel for now. +- return us < them +diff --git a/src/poetry/core/_vendor/attr/converters.py b/src/poetry/core/_vendor/attr/converters.py +deleted file mode 100644 +index a73626c..0000000 +--- a/src/poetry/core/_vendor/attr/converters.py ++++ /dev/null +@@ -1,144 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-""" +-Commonly useful converters. +-""" +- +- +-import typing +- +-from ._compat import _AnnotationExtractor +-from ._make import NOTHING, Factory, pipe +- +- +-__all__ = [ +- "default_if_none", +- "optional", +- "pipe", +- "to_bool", +-] +- +- +-def optional(converter): +- """ +- A converter that allows an attribute to be optional. An optional attribute +- is one which can be set to ``None``. +- +- Type annotations will be inferred from the wrapped converter's, if it +- has any. +- +- :param callable converter: the converter that is used for non-``None`` +- values. +- +- .. versionadded:: 17.1.0 +- """ +- +- def optional_converter(val): +- if val is None: +- return None +- return converter(val) +- +- xtr = _AnnotationExtractor(converter) +- +- t = xtr.get_first_param_type() +- if t: +- optional_converter.__annotations__["val"] = typing.Optional[t] +- +- rt = xtr.get_return_type() +- if rt: +- optional_converter.__annotations__["return"] = typing.Optional[rt] +- +- return optional_converter +- +- +-def default_if_none(default=NOTHING, factory=None): +- """ +- A converter that allows to replace ``None`` values by *default* or the +- result of *factory*. +- +- :param default: Value to be used if ``None`` is passed. Passing an instance +- of `attrs.Factory` is supported, however the ``takes_self`` option +- is *not*. +- :param callable factory: A callable that takes no parameters whose result +- is used if ``None`` is passed. +- +- :raises TypeError: If **neither** *default* or *factory* is passed. +- :raises TypeError: If **both** *default* and *factory* are passed. +- :raises ValueError: If an instance of `attrs.Factory` is passed with +- ``takes_self=True``. +- +- .. versionadded:: 18.2.0 +- """ +- if default is NOTHING and factory is None: +- raise TypeError("Must pass either `default` or `factory`.") +- +- if default is not NOTHING and factory is not None: +- raise TypeError( +- "Must pass either `default` or `factory` but not both." +- ) +- +- if factory is not None: +- default = Factory(factory) +- +- if isinstance(default, Factory): +- if default.takes_self: +- raise ValueError( +- "`takes_self` is not supported by default_if_none." +- ) +- +- def default_if_none_converter(val): +- if val is not None: +- return val +- +- return default.factory() +- +- else: +- +- def default_if_none_converter(val): +- if val is not None: +- return val +- +- return default +- +- return default_if_none_converter +- +- +-def to_bool(val): +- """ +- Convert "boolean" strings (e.g., from env. vars.) to real booleans. +- +- Values mapping to :code:`True`: +- +- - :code:`True` +- - :code:`"true"` / :code:`"t"` +- - :code:`"yes"` / :code:`"y"` +- - :code:`"on"` +- - :code:`"1"` +- - :code:`1` +- +- Values mapping to :code:`False`: +- +- - :code:`False` +- - :code:`"false"` / :code:`"f"` +- - :code:`"no"` / :code:`"n"` +- - :code:`"off"` +- - :code:`"0"` +- - :code:`0` +- +- :raises ValueError: for any other value. +- +- .. versionadded:: 21.3.0 +- """ +- if isinstance(val, str): +- val = val.lower() +- truthy = {True, "true", "t", "yes", "y", "on", "1", 1} +- falsy = {False, "false", "f", "no", "n", "off", "0", 0} +- try: +- if val in truthy: +- return True +- if val in falsy: +- return False +- except TypeError: +- # Raised when "val" is not hashable (e.g., lists) +- pass +- raise ValueError("Cannot convert value to bool: {}".format(val)) +diff --git a/src/poetry/core/_vendor/attr/exceptions.py b/src/poetry/core/_vendor/attr/exceptions.py +deleted file mode 100644 +index 5dc51e0..0000000 +--- a/src/poetry/core/_vendor/attr/exceptions.py ++++ /dev/null +@@ -1,92 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +- +-class FrozenError(AttributeError): +- """ +- A frozen/immutable instance or attribute have been attempted to be +- modified. +- +- It mirrors the behavior of ``namedtuples`` by using the same error message +- and subclassing `AttributeError`. +- +- .. versionadded:: 20.1.0 +- """ +- +- msg = "can't set attribute" +- args = [msg] +- +- +-class FrozenInstanceError(FrozenError): +- """ +- A frozen instance has been attempted to be modified. +- +- .. versionadded:: 16.1.0 +- """ +- +- +-class FrozenAttributeError(FrozenError): +- """ +- A frozen attribute has been attempted to be modified. +- +- .. versionadded:: 20.1.0 +- """ +- +- +-class AttrsAttributeNotFoundError(ValueError): +- """ +- An ``attrs`` function couldn't find an attribute that the user asked for. +- +- .. versionadded:: 16.2.0 +- """ +- +- +-class NotAnAttrsClassError(ValueError): +- """ +- A non-``attrs`` class has been passed into an ``attrs`` function. +- +- .. versionadded:: 16.2.0 +- """ +- +- +-class DefaultAlreadySetError(RuntimeError): +- """ +- A default has been set using ``attr.ib()`` and is attempted to be reset +- using the decorator. +- +- .. versionadded:: 17.1.0 +- """ +- +- +-class UnannotatedAttributeError(RuntimeError): +- """ +- A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type +- annotation. +- +- .. versionadded:: 17.3.0 +- """ +- +- +-class PythonTooOldError(RuntimeError): +- """ +- It was attempted to use an ``attrs`` feature that requires a newer Python +- version. +- +- .. versionadded:: 18.2.0 +- """ +- +- +-class NotCallableError(TypeError): +- """ +- A ``attr.ib()`` requiring a callable has been set with a value +- that is not callable. +- +- .. versionadded:: 19.2.0 +- """ +- +- def __init__(self, msg, value): +- super(TypeError, self).__init__(msg, value) +- self.msg = msg +- self.value = value +- +- def __str__(self): +- return str(self.msg) +diff --git a/src/poetry/core/_vendor/attr/filters.py b/src/poetry/core/_vendor/attr/filters.py +deleted file mode 100644 +index baa25e9..0000000 +--- a/src/poetry/core/_vendor/attr/filters.py ++++ /dev/null +@@ -1,51 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-""" +-Commonly useful filters for `attr.asdict`. +-""" +- +-from ._make import Attribute +- +- +-def _split_what(what): +- """ +- Returns a tuple of `frozenset`s of classes and attributes. +- """ +- return ( +- frozenset(cls for cls in what if isinstance(cls, type)), +- frozenset(cls for cls in what if isinstance(cls, Attribute)), +- ) +- +- +-def include(*what): +- """ +- Include *what*. +- +- :param what: What to include. +- :type what: `list` of `type` or `attrs.Attribute`\\ s +- +- :rtype: `callable` +- """ +- cls, attrs = _split_what(what) +- +- def include_(attribute, value): +- return value.__class__ in cls or attribute in attrs +- +- return include_ +- +- +-def exclude(*what): +- """ +- Exclude *what*. +- +- :param what: What to exclude. +- :type what: `list` of classes or `attrs.Attribute`\\ s. +- +- :rtype: `callable` +- """ +- cls, attrs = _split_what(what) +- +- def exclude_(attribute, value): +- return value.__class__ not in cls and attribute not in attrs +- +- return exclude_ +diff --git a/src/poetry/core/_vendor/attr/setters.py b/src/poetry/core/_vendor/attr/setters.py +deleted file mode 100644 +index 12ed675..0000000 +--- a/src/poetry/core/_vendor/attr/setters.py ++++ /dev/null +@@ -1,73 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-""" +-Commonly used hooks for on_setattr. +-""" +- +- +-from . import _config +-from .exceptions import FrozenAttributeError +- +- +-def pipe(*setters): +- """ +- Run all *setters* and return the return value of the last one. +- +- .. versionadded:: 20.1.0 +- """ +- +- def wrapped_pipe(instance, attrib, new_value): +- rv = new_value +- +- for setter in setters: +- rv = setter(instance, attrib, rv) +- +- return rv +- +- return wrapped_pipe +- +- +-def frozen(_, __, ___): +- """ +- Prevent an attribute to be modified. +- +- .. versionadded:: 20.1.0 +- """ +- raise FrozenAttributeError() +- +- +-def validate(instance, attrib, new_value): +- """ +- Run *attrib*'s validator on *new_value* if it has one. +- +- .. versionadded:: 20.1.0 +- """ +- if _config._run_validators is False: +- return new_value +- +- v = attrib.validator +- if not v: +- return new_value +- +- v(instance, attrib, new_value) +- +- return new_value +- +- +-def convert(instance, attrib, new_value): +- """ +- Run *attrib*'s converter -- if it has one -- on *new_value* and return the +- result. +- +- .. versionadded:: 20.1.0 +- """ +- c = attrib.converter +- if c: +- return c(new_value) +- +- return new_value +- +- +-# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +-# autodata stopped working, so the docstring is inlined in the API docs. +-NO_OP = object() +diff --git a/src/poetry/core/_vendor/attr/validators.py b/src/poetry/core/_vendor/attr/validators.py +deleted file mode 100644 +index eece517..0000000 +--- a/src/poetry/core/_vendor/attr/validators.py ++++ /dev/null +@@ -1,594 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-""" +-Commonly useful validators. +-""" +- +- +-import operator +-import re +- +-from contextlib import contextmanager +- +-from ._config import get_run_validators, set_run_validators +-from ._make import _AndValidator, and_, attrib, attrs +-from .exceptions import NotCallableError +- +- +-try: +- Pattern = re.Pattern +-except AttributeError: # Python <3.7 lacks a Pattern type. +- Pattern = type(re.compile("")) +- +- +-__all__ = [ +- "and_", +- "deep_iterable", +- "deep_mapping", +- "disabled", +- "ge", +- "get_disabled", +- "gt", +- "in_", +- "instance_of", +- "is_callable", +- "le", +- "lt", +- "matches_re", +- "max_len", +- "min_len", +- "optional", +- "provides", +- "set_disabled", +-] +- +- +-def set_disabled(disabled): +- """ +- Globally disable or enable running validators. +- +- By default, they are run. +- +- :param disabled: If ``True``, disable running all validators. +- :type disabled: bool +- +- .. warning:: +- +- This function is not thread-safe! +- +- .. versionadded:: 21.3.0 +- """ +- set_run_validators(not disabled) +- +- +-def get_disabled(): +- """ +- Return a bool indicating whether validators are currently disabled or not. +- +- :return: ``True`` if validators are currently disabled. +- :rtype: bool +- +- .. versionadded:: 21.3.0 +- """ +- return not get_run_validators() +- +- +-@contextmanager +-def disabled(): +- """ +- Context manager that disables running validators within its context. +- +- .. warning:: +- +- This context manager is not thread-safe! +- +- .. versionadded:: 21.3.0 +- """ +- set_run_validators(False) +- try: +- yield +- finally: +- set_run_validators(True) +- +- +-@attrs(repr=False, slots=True, hash=True) +-class _InstanceOfValidator: +- type = attrib() +- +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if not isinstance(value, self.type): +- raise TypeError( +- "'{name}' must be {type!r} (got {value!r} that is a " +- "{actual!r}).".format( +- name=attr.name, +- type=self.type, +- actual=value.__class__, +- value=value, +- ), +- attr, +- self.type, +- value, +- ) +- +- def __repr__(self): +- return "".format( +- type=self.type +- ) +- +- +-def instance_of(type): +- """ +- A validator that raises a `TypeError` if the initializer is called +- with a wrong type for this particular attribute (checks are performed using +- `isinstance` therefore it's also valid to pass a tuple of types). +- +- :param type: The type to check for. +- :type type: type or tuple of types +- +- :raises TypeError: With a human readable error message, the attribute +- (of type `attrs.Attribute`), the expected type, and the value it +- got. +- """ +- return _InstanceOfValidator(type) +- +- +-@attrs(repr=False, frozen=True, slots=True) +-class _MatchesReValidator: +- pattern = attrib() +- match_func = attrib() +- +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if not self.match_func(value): +- raise ValueError( +- "'{name}' must match regex {pattern!r}" +- " ({value!r} doesn't)".format( +- name=attr.name, pattern=self.pattern.pattern, value=value +- ), +- attr, +- self.pattern, +- value, +- ) +- +- def __repr__(self): +- return "".format( +- pattern=self.pattern +- ) +- +- +-def matches_re(regex, flags=0, func=None): +- r""" +- A validator that raises `ValueError` if the initializer is called +- with a string that doesn't match *regex*. +- +- :param regex: a regex string or precompiled pattern to match against +- :param int flags: flags that will be passed to the underlying re function +- (default 0) +- :param callable func: which underlying `re` function to call. Valid options +- are `re.fullmatch`, `re.search`, and `re.match`; the default ``None`` +- means `re.fullmatch`. For performance reasons, the pattern is always +- precompiled using `re.compile`. +- +- .. versionadded:: 19.2.0 +- .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. +- """ +- valid_funcs = (re.fullmatch, None, re.search, re.match) +- if func not in valid_funcs: +- raise ValueError( +- "'func' must be one of {}.".format( +- ", ".join( +- sorted( +- e and e.__name__ or "None" for e in set(valid_funcs) +- ) +- ) +- ) +- ) +- +- if isinstance(regex, Pattern): +- if flags: +- raise TypeError( +- "'flags' can only be used with a string pattern; " +- "pass flags to re.compile() instead" +- ) +- pattern = regex +- else: +- pattern = re.compile(regex, flags) +- +- if func is re.match: +- match_func = pattern.match +- elif func is re.search: +- match_func = pattern.search +- else: +- match_func = pattern.fullmatch +- +- return _MatchesReValidator(pattern, match_func) +- +- +-@attrs(repr=False, slots=True, hash=True) +-class _ProvidesValidator: +- interface = attrib() +- +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if not self.interface.providedBy(value): +- raise TypeError( +- "'{name}' must provide {interface!r} which {value!r} " +- "doesn't.".format( +- name=attr.name, interface=self.interface, value=value +- ), +- attr, +- self.interface, +- value, +- ) +- +- def __repr__(self): +- return "".format( +- interface=self.interface +- ) +- +- +-def provides(interface): +- """ +- A validator that raises a `TypeError` if the initializer is called +- with an object that does not provide the requested *interface* (checks are +- performed using ``interface.providedBy(value)`` (see `zope.interface +- `_). +- +- :param interface: The interface to check for. +- :type interface: ``zope.interface.Interface`` +- +- :raises TypeError: With a human readable error message, the attribute +- (of type `attrs.Attribute`), the expected interface, and the +- value it got. +- """ +- return _ProvidesValidator(interface) +- +- +-@attrs(repr=False, slots=True, hash=True) +-class _OptionalValidator: +- validator = attrib() +- +- def __call__(self, inst, attr, value): +- if value is None: +- return +- +- self.validator(inst, attr, value) +- +- def __repr__(self): +- return "".format( +- what=repr(self.validator) +- ) +- +- +-def optional(validator): +- """ +- A validator that makes an attribute optional. An optional attribute is one +- which can be set to ``None`` in addition to satisfying the requirements of +- the sub-validator. +- +- :param validator: A validator (or a list of validators) that is used for +- non-``None`` values. +- :type validator: callable or `list` of callables. +- +- .. versionadded:: 15.1.0 +- .. versionchanged:: 17.1.0 *validator* can be a list of validators. +- """ +- if isinstance(validator, list): +- return _OptionalValidator(_AndValidator(validator)) +- return _OptionalValidator(validator) +- +- +-@attrs(repr=False, slots=True, hash=True) +-class _InValidator: +- options = attrib() +- +- def __call__(self, inst, attr, value): +- try: +- in_options = value in self.options +- except TypeError: # e.g. `1 in "abc"` +- in_options = False +- +- if not in_options: +- raise ValueError( +- "'{name}' must be in {options!r} (got {value!r})".format( +- name=attr.name, options=self.options, value=value +- ), +- attr, +- self.options, +- value, +- ) +- +- def __repr__(self): +- return "".format( +- options=self.options +- ) +- +- +-def in_(options): +- """ +- A validator that raises a `ValueError` if the initializer is called +- with a value that does not belong in the options provided. The check is +- performed using ``value in options``. +- +- :param options: Allowed options. +- :type options: list, tuple, `enum.Enum`, ... +- +- :raises ValueError: With a human readable error message, the attribute (of +- type `attrs.Attribute`), the expected options, and the value it +- got. +- +- .. versionadded:: 17.1.0 +- .. versionchanged:: 22.1.0 +- The ValueError was incomplete until now and only contained the human +- readable error message. Now it contains all the information that has +- been promised since 17.1.0. +- """ +- return _InValidator(options) +- +- +-@attrs(repr=False, slots=False, hash=True) +-class _IsCallableValidator: +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if not callable(value): +- message = ( +- "'{name}' must be callable " +- "(got {value!r} that is a {actual!r})." +- ) +- raise NotCallableError( +- msg=message.format( +- name=attr.name, value=value, actual=value.__class__ +- ), +- value=value, +- ) +- +- def __repr__(self): +- return "" +- +- +-def is_callable(): +- """ +- A validator that raises a `attr.exceptions.NotCallableError` if the +- initializer is called with a value for this particular attribute +- that is not callable. +- +- .. versionadded:: 19.1.0 +- +- :raises `attr.exceptions.NotCallableError`: With a human readable error +- message containing the attribute (`attrs.Attribute`) name, +- and the value it got. +- """ +- return _IsCallableValidator() +- +- +-@attrs(repr=False, slots=True, hash=True) +-class _DeepIterable: +- member_validator = attrib(validator=is_callable()) +- iterable_validator = attrib( +- default=None, validator=optional(is_callable()) +- ) +- +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if self.iterable_validator is not None: +- self.iterable_validator(inst, attr, value) +- +- for member in value: +- self.member_validator(inst, attr, member) +- +- def __repr__(self): +- iterable_identifier = ( +- "" +- if self.iterable_validator is None +- else " {iterable!r}".format(iterable=self.iterable_validator) +- ) +- return ( +- "" +- ).format( +- iterable_identifier=iterable_identifier, +- member=self.member_validator, +- ) +- +- +-def deep_iterable(member_validator, iterable_validator=None): +- """ +- A validator that performs deep validation of an iterable. +- +- :param member_validator: Validator(s) to apply to iterable members +- :param iterable_validator: Validator to apply to iterable itself +- (optional) +- +- .. versionadded:: 19.1.0 +- +- :raises TypeError: if any sub-validators fail +- """ +- if isinstance(member_validator, (list, tuple)): +- member_validator = and_(*member_validator) +- return _DeepIterable(member_validator, iterable_validator) +- +- +-@attrs(repr=False, slots=True, hash=True) +-class _DeepMapping: +- key_validator = attrib(validator=is_callable()) +- value_validator = attrib(validator=is_callable()) +- mapping_validator = attrib(default=None, validator=optional(is_callable())) +- +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if self.mapping_validator is not None: +- self.mapping_validator(inst, attr, value) +- +- for key in value: +- self.key_validator(inst, attr, key) +- self.value_validator(inst, attr, value[key]) +- +- def __repr__(self): +- return ( +- "" +- ).format(key=self.key_validator, value=self.value_validator) +- +- +-def deep_mapping(key_validator, value_validator, mapping_validator=None): +- """ +- A validator that performs deep validation of a dictionary. +- +- :param key_validator: Validator to apply to dictionary keys +- :param value_validator: Validator to apply to dictionary values +- :param mapping_validator: Validator to apply to top-level mapping +- attribute (optional) +- +- .. versionadded:: 19.1.0 +- +- :raises TypeError: if any sub-validators fail +- """ +- return _DeepMapping(key_validator, value_validator, mapping_validator) +- +- +-@attrs(repr=False, frozen=True, slots=True) +-class _NumberValidator: +- bound = attrib() +- compare_op = attrib() +- compare_func = attrib() +- +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if not self.compare_func(value, self.bound): +- raise ValueError( +- "'{name}' must be {op} {bound}: {value}".format( +- name=attr.name, +- op=self.compare_op, +- bound=self.bound, +- value=value, +- ) +- ) +- +- def __repr__(self): +- return "".format( +- op=self.compare_op, bound=self.bound +- ) +- +- +-def lt(val): +- """ +- A validator that raises `ValueError` if the initializer is called +- with a number larger or equal to *val*. +- +- :param val: Exclusive upper bound for values +- +- .. versionadded:: 21.3.0 +- """ +- return _NumberValidator(val, "<", operator.lt) +- +- +-def le(val): +- """ +- A validator that raises `ValueError` if the initializer is called +- with a number greater than *val*. +- +- :param val: Inclusive upper bound for values +- +- .. versionadded:: 21.3.0 +- """ +- return _NumberValidator(val, "<=", operator.le) +- +- +-def ge(val): +- """ +- A validator that raises `ValueError` if the initializer is called +- with a number smaller than *val*. +- +- :param val: Inclusive lower bound for values +- +- .. versionadded:: 21.3.0 +- """ +- return _NumberValidator(val, ">=", operator.ge) +- +- +-def gt(val): +- """ +- A validator that raises `ValueError` if the initializer is called +- with a number smaller or equal to *val*. +- +- :param val: Exclusive lower bound for values +- +- .. versionadded:: 21.3.0 +- """ +- return _NumberValidator(val, ">", operator.gt) +- +- +-@attrs(repr=False, frozen=True, slots=True) +-class _MaxLengthValidator: +- max_length = attrib() +- +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if len(value) > self.max_length: +- raise ValueError( +- "Length of '{name}' must be <= {max}: {len}".format( +- name=attr.name, max=self.max_length, len=len(value) +- ) +- ) +- +- def __repr__(self): +- return "".format(max=self.max_length) +- +- +-def max_len(length): +- """ +- A validator that raises `ValueError` if the initializer is called +- with a string or iterable that is longer than *length*. +- +- :param int length: Maximum length of the string or iterable +- +- .. versionadded:: 21.3.0 +- """ +- return _MaxLengthValidator(length) +- +- +-@attrs(repr=False, frozen=True, slots=True) +-class _MinLengthValidator: +- min_length = attrib() +- +- def __call__(self, inst, attr, value): +- """ +- We use a callable class to be able to change the ``__repr__``. +- """ +- if len(value) < self.min_length: +- raise ValueError( +- "Length of '{name}' must be => {min}: {len}".format( +- name=attr.name, min=self.min_length, len=len(value) +- ) +- ) +- +- def __repr__(self): +- return "".format(min=self.min_length) +- +- +-def min_len(length): +- """ +- A validator that raises `ValueError` if the initializer is called +- with a string or iterable that is shorter than *length*. +- +- :param int length: Minimum length of the string or iterable +- +- .. versionadded:: 22.1.0 +- """ +- return _MinLengthValidator(length) +diff --git a/src/poetry/core/_vendor/attrs/LICENSE b/src/poetry/core/_vendor/attrs/LICENSE +deleted file mode 100644 +index 2bd6453..0000000 +--- a/src/poetry/core/_vendor/attrs/LICENSE ++++ /dev/null +@@ -1,21 +0,0 @@ +-The MIT License (MIT) +- +-Copyright (c) 2015 Hynek Schlawack and the attrs contributors +- +-Permission is hereby granted, free of charge, to any person obtaining a copy +-of this software and associated documentation files (the "Software"), to deal +-in the Software without restriction, including without limitation the rights +-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +-copies of the Software, and to permit persons to whom the Software is +-furnished to do so, subject to the following conditions: +- +-The above copyright notice and this permission notice shall be included in all +-copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +-SOFTWARE. +diff --git a/src/poetry/core/_vendor/attrs/__init__.py b/src/poetry/core/_vendor/attrs/__init__.py +deleted file mode 100644 +index a704b8b..0000000 +--- a/src/poetry/core/_vendor/attrs/__init__.py ++++ /dev/null +@@ -1,70 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-from attr import ( +- NOTHING, +- Attribute, +- Factory, +- __author__, +- __copyright__, +- __description__, +- __doc__, +- __email__, +- __license__, +- __title__, +- __url__, +- __version__, +- __version_info__, +- assoc, +- cmp_using, +- define, +- evolve, +- field, +- fields, +- fields_dict, +- frozen, +- has, +- make_class, +- mutable, +- resolve_types, +- validate, +-) +-from attr._next_gen import asdict, astuple +- +-from . import converters, exceptions, filters, setters, validators +- +- +-__all__ = [ +- "__author__", +- "__copyright__", +- "__description__", +- "__doc__", +- "__email__", +- "__license__", +- "__title__", +- "__url__", +- "__version__", +- "__version_info__", +- "asdict", +- "assoc", +- "astuple", +- "Attribute", +- "cmp_using", +- "converters", +- "define", +- "evolve", +- "exceptions", +- "Factory", +- "field", +- "fields_dict", +- "fields", +- "filters", +- "frozen", +- "has", +- "make_class", +- "mutable", +- "NOTHING", +- "resolve_types", +- "setters", +- "validate", +- "validators", +-] +diff --git a/src/poetry/core/_vendor/attrs/converters.py b/src/poetry/core/_vendor/attrs/converters.py +deleted file mode 100644 +index edfa8d3..0000000 +--- a/src/poetry/core/_vendor/attrs/converters.py ++++ /dev/null +@@ -1,3 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-from attr.converters import * # noqa +diff --git a/src/poetry/core/_vendor/attrs/exceptions.py b/src/poetry/core/_vendor/attrs/exceptions.py +deleted file mode 100644 +index bd9efed..0000000 +--- a/src/poetry/core/_vendor/attrs/exceptions.py ++++ /dev/null +@@ -1,3 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-from attr.exceptions import * # noqa +diff --git a/src/poetry/core/_vendor/attrs/filters.py b/src/poetry/core/_vendor/attrs/filters.py +deleted file mode 100644 +index 5295900..0000000 +--- a/src/poetry/core/_vendor/attrs/filters.py ++++ /dev/null +@@ -1,3 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-from attr.filters import * # noqa +diff --git a/src/poetry/core/_vendor/attrs/py.typed b/src/poetry/core/_vendor/attrs/py.typed +deleted file mode 100644 +index e69de29..0000000 +diff --git a/src/poetry/core/_vendor/attrs/setters.py b/src/poetry/core/_vendor/attrs/setters.py +deleted file mode 100644 +index 9b50770..0000000 +--- a/src/poetry/core/_vendor/attrs/setters.py ++++ /dev/null +@@ -1,3 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-from attr.setters import * # noqa +diff --git a/src/poetry/core/_vendor/attrs/validators.py b/src/poetry/core/_vendor/attrs/validators.py +deleted file mode 100644 +index ab2c9b3..0000000 +--- a/src/poetry/core/_vendor/attrs/validators.py ++++ /dev/null +@@ -1,3 +0,0 @@ +-# SPDX-License-Identifier: MIT +- +-from attr.validators import * # noqa +diff --git a/src/poetry/core/_vendor/jsonschema/COPYING b/src/poetry/core/_vendor/jsonschema/COPYING +deleted file mode 100644 +index af9cfbd..0000000 +--- a/src/poetry/core/_vendor/jsonschema/COPYING ++++ /dev/null +@@ -1,19 +0,0 @@ +-Copyright (c) 2013 Julian Berman +- +-Permission is hereby granted, free of charge, to any person obtaining a copy +-of this software and associated documentation files (the "Software"), to deal +-in the Software without restriction, including without limitation the rights +-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +-copies of the Software, and to permit persons to whom the Software is +-furnished to do so, subject to the following conditions: +- +-The above copyright notice and this permission notice shall be included in +-all copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +-THE SOFTWARE. +diff --git a/src/poetry/core/_vendor/jsonschema/__init__.py b/src/poetry/core/_vendor/jsonschema/__init__.py +deleted file mode 100644 +index 75f2946..0000000 +--- a/src/poetry/core/_vendor/jsonschema/__init__.py ++++ /dev/null +@@ -1,58 +0,0 @@ +-""" +-An implementation of JSON Schema for Python +- +-The main functionality is provided by the validator classes for each of the +-supported JSON Schema versions. +- +-Most commonly, `validate` is the quickest way to simply validate a given +-instance under a schema, and will create a validator for you. +-""" +-import warnings +- +-from jsonschema._format import ( +- FormatChecker, +- draft3_format_checker, +- draft4_format_checker, +- draft6_format_checker, +- draft7_format_checker, +- draft201909_format_checker, +- draft202012_format_checker, +-) +-from jsonschema._types import TypeChecker +-from jsonschema.exceptions import ( +- ErrorTree, +- FormatError, +- RefResolutionError, +- SchemaError, +- ValidationError, +-) +-from jsonschema.protocols import Validator +-from jsonschema.validators import ( +- Draft3Validator, +- Draft4Validator, +- Draft6Validator, +- Draft7Validator, +- Draft201909Validator, +- Draft202012Validator, +- RefResolver, +- validate, +-) +- +- +-def __getattr__(name): +- if name == "__version__": +- warnings.warn( +- "Accessing jsonschema.__version__ is deprecated and will be " +- "removed in a future release. Use importlib.metadata directly " +- "to query for jsonschema's version.", +- DeprecationWarning, +- stacklevel=2, +- ) +- +- try: +- from importlib import metadata +- except ImportError: +- import importlib_metadata as metadata +- +- return metadata.version("jsonschema") +- raise AttributeError(f"module {__name__} has no attribute {name}") +diff --git a/src/poetry/core/_vendor/jsonschema/__main__.py b/src/poetry/core/_vendor/jsonschema/__main__.py +deleted file mode 100644 +index fdc21e2..0000000 +--- a/src/poetry/core/_vendor/jsonschema/__main__.py ++++ /dev/null +@@ -1,3 +0,0 @@ +-from jsonschema.cli import main +- +-main() +diff --git a/src/poetry/core/_vendor/jsonschema/_format.py b/src/poetry/core/_vendor/jsonschema/_format.py +deleted file mode 100644 +index da4bb79..0000000 +--- a/src/poetry/core/_vendor/jsonschema/_format.py ++++ /dev/null +@@ -1,498 +0,0 @@ +-from __future__ import annotations +- +-from contextlib import suppress +-from uuid import UUID +-import datetime +-import ipaddress +-import re +-import typing +- +-from jsonschema.exceptions import FormatError +- +-_FormatCheckCallable = typing.Callable[[object], bool] +-_F = typing.TypeVar("_F", bound=_FormatCheckCallable) +-_RaisesType = typing.Union[ +- typing.Type[Exception], typing.Tuple[typing.Type[Exception], ...], +-] +- +- +-class FormatChecker(object): +- """ +- A ``format`` property checker. +- +- JSON Schema does not mandate that the ``format`` property actually do any +- validation. If validation is desired however, instances of this class can +- be hooked into validators to enable format validation. +- +- `FormatChecker` objects always return ``True`` when asked about +- formats that they do not know how to validate. +- +- To check a custom format using a function that takes an instance and +- returns a ``bool``, use the `FormatChecker.checks` or +- `FormatChecker.cls_checks` decorators. +- +- Arguments: +- +- formats (~collections.abc.Iterable): +- +- The known formats to validate. This argument can be used to +- limit which formats will be used during validation. +- """ +- +- checkers: dict[ +- str, +- tuple[_FormatCheckCallable, _RaisesType], +- ] = {} +- +- def __init__(self, formats: typing.Iterable[str] | None = None): +- if formats is None: +- self.checkers = self.checkers.copy() +- else: +- self.checkers = dict((k, self.checkers[k]) for k in formats) +- +- def __repr__(self): +- return "".format(sorted(self.checkers)) +- +- def checks( +- self, format: str, raises: _RaisesType = (), +- ) -> typing.Callable[[_F], _F]: +- """ +- Register a decorated function as validating a new format. +- +- Arguments: +- +- format (str): +- +- The format that the decorated function will check. +- +- raises (Exception): +- +- The exception(s) raised by the decorated function when an +- invalid instance is found. +- +- The exception object will be accessible as the +- `jsonschema.exceptions.ValidationError.cause` attribute of the +- resulting validation error. +- """ +- +- def _checks(func: _F) -> _F: +- self.checkers[format] = (func, raises) +- return func +- +- return _checks +- +- @classmethod +- def cls_checks( +- cls, format: str, raises: _RaisesType = (), +- ) -> typing.Callable[[_F], _F]: +- def _checks(func: _F) -> _F: +- cls.checkers[format] = (func, raises) +- return func +- +- return _checks +- +- def check(self, instance: object, format: str) -> None: +- """ +- Check whether the instance conforms to the given format. +- +- Arguments: +- +- instance (*any primitive type*, i.e. str, number, bool): +- +- The instance to check +- +- format (str): +- +- The format that instance should conform to +- +- +- Raises: +- +- FormatError: if the instance does not conform to ``format`` +- """ +- +- if format not in self.checkers: +- return +- +- func, raises = self.checkers[format] +- result, cause = None, None +- try: +- result = func(instance) +- except raises as e: +- cause = e +- if not result: +- raise FormatError(f"{instance!r} is not a {format!r}", cause=cause) +- +- def conforms(self, instance: object, format: str) -> bool: +- """ +- Check whether the instance conforms to the given format. +- +- Arguments: +- +- instance (*any primitive type*, i.e. str, number, bool): +- +- The instance to check +- +- format (str): +- +- The format that instance should conform to +- +- Returns: +- +- bool: whether it conformed +- """ +- +- try: +- self.check(instance, format) +- except FormatError: +- return False +- else: +- return True +- +- +-draft3_format_checker = FormatChecker() +-draft4_format_checker = FormatChecker() +-draft6_format_checker = FormatChecker() +-draft7_format_checker = FormatChecker() +-draft201909_format_checker = FormatChecker() +-draft202012_format_checker = FormatChecker() +- +-_draft_checkers: dict[str, FormatChecker] = dict( +- draft3=draft3_format_checker, +- draft4=draft4_format_checker, +- draft6=draft6_format_checker, +- draft7=draft7_format_checker, +- draft201909=draft201909_format_checker, +- draft202012=draft202012_format_checker, +-) +- +- +-def _checks_drafts( +- name=None, +- draft3=None, +- draft4=None, +- draft6=None, +- draft7=None, +- draft201909=None, +- draft202012=None, +- raises=(), +-) -> typing.Callable[[_F], _F]: +- draft3 = draft3 or name +- draft4 = draft4 or name +- draft6 = draft6 or name +- draft7 = draft7 or name +- draft201909 = draft201909 or name +- draft202012 = draft202012 or name +- +- def wrap(func: _F) -> _F: +- if draft3: +- func = _draft_checkers["draft3"].checks(draft3, raises)(func) +- if draft4: +- func = _draft_checkers["draft4"].checks(draft4, raises)(func) +- if draft6: +- func = _draft_checkers["draft6"].checks(draft6, raises)(func) +- if draft7: +- func = _draft_checkers["draft7"].checks(draft7, raises)(func) +- if draft201909: +- func = _draft_checkers["draft201909"].checks(draft201909, raises)( +- func, +- ) +- if draft202012: +- func = _draft_checkers["draft202012"].checks(draft202012, raises)( +- func, +- ) +- +- # Oy. This is bad global state, but relied upon for now, until +- # deprecation. See #519 and test_format_checkers_come_with_defaults +- FormatChecker.cls_checks( +- draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3, +- raises, +- )(func) +- return func +- +- return wrap +- +- +-@_checks_drafts(name="idn-email") +-@_checks_drafts(name="email") +-def is_email(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return "@" in instance +- +- +-@_checks_drafts( +- draft3="ip-address", +- draft4="ipv4", +- draft6="ipv4", +- draft7="ipv4", +- draft201909="ipv4", +- draft202012="ipv4", +- raises=ipaddress.AddressValueError, +-) +-def is_ipv4(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return bool(ipaddress.IPv4Address(instance)) +- +- +-@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError) +-def is_ipv6(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- address = ipaddress.IPv6Address(instance) +- return not getattr(address, "scope_id", "") +- +- +-with suppress(ImportError): +- from fqdn import FQDN +- +- @_checks_drafts( +- draft3="host-name", +- draft4="hostname", +- draft6="hostname", +- draft7="hostname", +- draft201909="hostname", +- draft202012="hostname", +- ) +- def is_host_name(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return FQDN(instance).is_valid +- +- +-with suppress(ImportError): +- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere. +- import idna +- +- @_checks_drafts( +- draft7="idn-hostname", +- draft201909="idn-hostname", +- draft202012="idn-hostname", +- raises=(idna.IDNAError, UnicodeError), +- ) +- def is_idn_host_name(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- idna.encode(instance) +- return True +- +- +-try: +- import rfc3987 +-except ImportError: +- with suppress(ImportError): +- from rfc3986_validator import validate_rfc3986 +- +- @_checks_drafts(name="uri") +- def is_uri(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return validate_rfc3986(instance, rule="URI") +- +- @_checks_drafts( +- draft6="uri-reference", +- draft7="uri-reference", +- draft201909="uri-reference", +- draft202012="uri-reference", +- raises=ValueError, +- ) +- def is_uri_reference(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return validate_rfc3986(instance, rule="URI_reference") +- +-else: +- +- @_checks_drafts( +- draft7="iri", +- draft201909="iri", +- draft202012="iri", +- raises=ValueError, +- ) +- def is_iri(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return rfc3987.parse(instance, rule="IRI") +- +- @_checks_drafts( +- draft7="iri-reference", +- draft201909="iri-reference", +- draft202012="iri-reference", +- raises=ValueError, +- ) +- def is_iri_reference(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return rfc3987.parse(instance, rule="IRI_reference") +- +- @_checks_drafts(name="uri", raises=ValueError) +- def is_uri(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return rfc3987.parse(instance, rule="URI") +- +- @_checks_drafts( +- draft6="uri-reference", +- draft7="uri-reference", +- draft201909="uri-reference", +- draft202012="uri-reference", +- raises=ValueError, +- ) +- def is_uri_reference(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return rfc3987.parse(instance, rule="URI_reference") +- +- +-with suppress(ImportError): +- from rfc3339_validator import validate_rfc3339 +- +- @_checks_drafts(name="date-time") +- def is_datetime(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return validate_rfc3339(instance.upper()) +- +- @_checks_drafts( +- draft7="time", +- draft201909="time", +- draft202012="time", +- ) +- def is_time(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return is_datetime("1970-01-01T" + instance) +- +- +-@_checks_drafts(name="regex", raises=re.error) +-def is_regex(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return bool(re.compile(instance)) +- +- +-@_checks_drafts( +- draft3="date", +- draft7="date", +- draft201909="date", +- draft202012="date", +- raises=ValueError, +-) +-def is_date(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return bool(instance.isascii() and datetime.date.fromisoformat(instance)) +- +- +-@_checks_drafts(draft3="time", raises=ValueError) +-def is_draft3_time(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return bool(datetime.datetime.strptime(instance, "%H:%M:%S")) +- +- +-with suppress(ImportError): +- from webcolors import CSS21_NAMES_TO_HEX +- import webcolors +- +- def is_css_color_code(instance: object) -> bool: +- return webcolors.normalize_hex(instance) +- +- @_checks_drafts(draft3="color", raises=(ValueError, TypeError)) +- def is_css21_color(instance: object) -> bool: +- if ( +- not isinstance(instance, str) +- or instance.lower() in CSS21_NAMES_TO_HEX +- ): +- return True +- return is_css_color_code(instance) +- +- +-with suppress(ImportError): +- import jsonpointer +- +- @_checks_drafts( +- draft6="json-pointer", +- draft7="json-pointer", +- draft201909="json-pointer", +- draft202012="json-pointer", +- raises=jsonpointer.JsonPointerException, +- ) +- def is_json_pointer(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return bool(jsonpointer.JsonPointer(instance)) +- +- # TODO: I don't want to maintain this, so it +- # needs to go either into jsonpointer (pending +- # https://github.com/stefankoegl/python-json-pointer/issues/34) or +- # into a new external library. +- @_checks_drafts( +- draft7="relative-json-pointer", +- draft201909="relative-json-pointer", +- draft202012="relative-json-pointer", +- raises=jsonpointer.JsonPointerException, +- ) +- def is_relative_json_pointer(instance: object) -> bool: +- # Definition taken from: +- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 +- if not isinstance(instance, str): +- return True +- non_negative_integer, rest = [], "" +- for i, character in enumerate(instance): +- if character.isdigit(): +- # digits with a leading "0" are not allowed +- if i > 0 and int(instance[i - 1]) == 0: +- return False +- +- non_negative_integer.append(character) +- continue +- +- if not non_negative_integer: +- return False +- +- rest = instance[i:] +- break +- return (rest == "#") or bool(jsonpointer.JsonPointer(rest)) +- +- +-with suppress(ImportError): +- import uri_template +- +- @_checks_drafts( +- draft6="uri-template", +- draft7="uri-template", +- draft201909="uri-template", +- draft202012="uri-template", +- ) +- def is_uri_template(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return uri_template.validate(instance) +- +- +-with suppress(ImportError): +- import isoduration +- +- @_checks_drafts( +- draft201909="duration", +- draft202012="duration", +- raises=isoduration.DurationParsingException, +- ) +- def is_duration(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- return bool(isoduration.parse_duration(instance)) +- +- +-@_checks_drafts( +- draft201909="uuid", +- draft202012="uuid", +- raises=ValueError, +-) +-def is_uuid(instance: object) -> bool: +- if not isinstance(instance, str): +- return True +- UUID(instance) +- return all(instance[position] == "-" for position in (8, 13, 18, 23)) +diff --git a/src/poetry/core/_vendor/jsonschema/_legacy_validators.py b/src/poetry/core/_vendor/jsonschema/_legacy_validators.py +deleted file mode 100644 +index fa45053..0000000 +--- a/src/poetry/core/_vendor/jsonschema/_legacy_validators.py ++++ /dev/null +@@ -1,228 +0,0 @@ +-from jsonschema import _utils +-from jsonschema.exceptions import ValidationError +- +- +-def ignore_ref_siblings(schema): +- """ +- Ignore siblings of ``$ref`` if it is present. +- +- Otherwise, return all keywords. +- +- Suitable for use with `create`'s ``applicable_validators`` argument. +- """ +- ref = schema.get("$ref") +- if ref is not None: +- return [("$ref", ref)] +- else: +- return schema.items() +- +- +-def dependencies_draft3(validator, dependencies, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- +- for property, dependency in dependencies.items(): +- if property not in instance: +- continue +- +- if validator.is_type(dependency, "object"): +- yield from validator.descend( +- instance, dependency, schema_path=property, +- ) +- elif validator.is_type(dependency, "string"): +- if dependency not in instance: +- message = f"{dependency!r} is a dependency of {property!r}" +- yield ValidationError(message) +- else: +- for each in dependency: +- if each not in instance: +- message = f"{each!r} is a dependency of {property!r}" +- yield ValidationError(message) +- +- +-def dependencies_draft4_draft6_draft7( +- validator, +- dependencies, +- instance, +- schema, +-): +- """ +- Support for the ``dependencies`` keyword from pre-draft 2019-09. +- +- In later drafts, the keyword was split into separate +- ``dependentRequired`` and ``dependentSchemas`` validators. +- """ +- if not validator.is_type(instance, "object"): +- return +- +- for property, dependency in dependencies.items(): +- if property not in instance: +- continue +- +- if validator.is_type(dependency, "array"): +- for each in dependency: +- if each not in instance: +- message = f"{each!r} is a dependency of {property!r}" +- yield ValidationError(message) +- else: +- yield from validator.descend( +- instance, dependency, schema_path=property, +- ) +- +- +-def disallow_draft3(validator, disallow, instance, schema): +- for disallowed in _utils.ensure_list(disallow): +- if validator.evolve(schema={"type": [disallowed]}).is_valid(instance): +- message = f"{disallowed!r} is disallowed for {instance!r}" +- yield ValidationError(message) +- +- +-def extends_draft3(validator, extends, instance, schema): +- if validator.is_type(extends, "object"): +- yield from validator.descend(instance, extends) +- return +- for index, subschema in enumerate(extends): +- yield from validator.descend(instance, subschema, schema_path=index) +- +- +-def items_draft3_draft4(validator, items, instance, schema): +- if not validator.is_type(instance, "array"): +- return +- +- if validator.is_type(items, "object"): +- for index, item in enumerate(instance): +- yield from validator.descend(item, items, path=index) +- else: +- for (index, item), subschema in zip(enumerate(instance), items): +- yield from validator.descend( +- item, subschema, path=index, schema_path=index, +- ) +- +- +-def items_draft6_draft7_draft201909(validator, items, instance, schema): +- if not validator.is_type(instance, "array"): +- return +- +- if validator.is_type(items, "array"): +- for (index, item), subschema in zip(enumerate(instance), items): +- yield from validator.descend( +- item, subschema, path=index, schema_path=index, +- ) +- else: +- for index, item in enumerate(instance): +- yield from validator.descend(item, items, path=index) +- +- +-def minimum_draft3_draft4(validator, minimum, instance, schema): +- if not validator.is_type(instance, "number"): +- return +- +- if schema.get("exclusiveMinimum", False): +- failed = instance <= minimum +- cmp = "less than or equal to" +- else: +- failed = instance < minimum +- cmp = "less than" +- +- if failed: +- message = f"{instance!r} is {cmp} the minimum of {minimum!r}" +- yield ValidationError(message) +- +- +-def maximum_draft3_draft4(validator, maximum, instance, schema): +- if not validator.is_type(instance, "number"): +- return +- +- if schema.get("exclusiveMaximum", False): +- failed = instance >= maximum +- cmp = "greater than or equal to" +- else: +- failed = instance > maximum +- cmp = "greater than" +- +- if failed: +- message = f"{instance!r} is {cmp} the maximum of {maximum!r}" +- yield ValidationError(message) +- +- +-def properties_draft3(validator, properties, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- +- for property, subschema in properties.items(): +- if property in instance: +- yield from validator.descend( +- instance[property], +- subschema, +- path=property, +- schema_path=property, +- ) +- elif subschema.get("required", False): +- error = ValidationError(f"{property!r} is a required property") +- error._set( +- validator="required", +- validator_value=subschema["required"], +- instance=instance, +- schema=schema, +- ) +- error.path.appendleft(property) +- error.schema_path.extend([property, "required"]) +- yield error +- +- +-def type_draft3(validator, types, instance, schema): +- types = _utils.ensure_list(types) +- +- all_errors = [] +- for index, type in enumerate(types): +- if validator.is_type(type, "object"): +- errors = list(validator.descend(instance, type, schema_path=index)) +- if not errors: +- return +- all_errors.extend(errors) +- else: +- if validator.is_type(instance, type): +- return +- else: +- reprs = [] +- for type in types: +- try: +- reprs.append(repr(type["name"])) +- except Exception: +- reprs.append(repr(type)) +- yield ValidationError( +- f"{instance!r} is not of type {', '.join(reprs)}", +- context=all_errors, +- ) +- +- +-def contains_draft6_draft7(validator, contains, instance, schema): +- if not validator.is_type(instance, "array"): +- return +- +- if not any( +- validator.evolve(schema=contains).is_valid(element) +- for element in instance +- ): +- yield ValidationError( +- f"None of {instance!r} are valid under the given schema", +- ) +- +- +-def recursiveRef(validator, recursiveRef, instance, schema): +- lookup_url, target = validator.resolver.resolution_scope, validator.schema +- +- for each in reversed(validator.resolver._scopes_stack[1:]): +- lookup_url, next_target = validator.resolver.resolve(each) +- if next_target.get("$recursiveAnchor"): +- target = next_target +- else: +- break +- +- fragment = recursiveRef.lstrip("#") +- subschema = validator.resolver.resolve_fragment(target, fragment) +- # FIXME: This is gutted (and not calling .descend) because it can trigger +- # recursion errors, so there's a bug here. Re-enable the tests to +- # see it. +- subschema +- return [] +diff --git a/src/poetry/core/_vendor/jsonschema/_types.py b/src/poetry/core/_vendor/jsonschema/_types.py +deleted file mode 100644 +index 9d59eb3..0000000 +--- a/src/poetry/core/_vendor/jsonschema/_types.py ++++ /dev/null +@@ -1,217 +0,0 @@ +-from __future__ import annotations +- +-import numbers +-import typing +- +-from pyrsistent import pmap +-import attr +- +-from jsonschema.exceptions import UndefinedTypeCheck +- +- +-# unfortunately, the type of pmap is generic, and if used as the attr.ib +-# converter, the generic type is presented to mypy, which then fails to match +-# the concrete type of a type checker mapping +-# this "do nothing" wrapper presents the correct information to mypy +-def _typed_pmap_converter( +- init_val: typing.Mapping[ +- str, +- typing.Callable[["TypeChecker", typing.Any], bool], +- ], +-) -> typing.Mapping[str, typing.Callable[["TypeChecker", typing.Any], bool]]: +- return typing.cast( +- typing.Mapping[ +- str, +- typing.Callable[["TypeChecker", typing.Any], bool], +- ], +- pmap(init_val), +- ) +- +- +-def is_array(checker, instance): +- return isinstance(instance, list) +- +- +-def is_bool(checker, instance): +- return isinstance(instance, bool) +- +- +-def is_integer(checker, instance): +- # bool inherits from int, so ensure bools aren't reported as ints +- if isinstance(instance, bool): +- return False +- return isinstance(instance, int) +- +- +-def is_null(checker, instance): +- return instance is None +- +- +-def is_number(checker, instance): +- # bool inherits from int, so ensure bools aren't reported as ints +- if isinstance(instance, bool): +- return False +- return isinstance(instance, numbers.Number) +- +- +-def is_object(checker, instance): +- return isinstance(instance, dict) +- +- +-def is_string(checker, instance): +- return isinstance(instance, str) +- +- +-def is_any(checker, instance): +- return True +- +- +-@attr.s(frozen=True) +-class TypeChecker(object): +- """ +- A ``type`` property checker. +- +- A `TypeChecker` performs type checking for a `Validator`. Type +- checks to perform are updated using `TypeChecker.redefine` or +- `TypeChecker.redefine_many` and removed via `TypeChecker.remove`. +- Each of these return a new `TypeChecker` object. +- +- Arguments: +- +- type_checkers (dict): +- +- The initial mapping of types to their checking functions. +- """ +- +- _type_checkers: typing.Mapping[ +- str, typing.Callable[["TypeChecker", typing.Any], bool], +- ] = attr.ib( +- default=pmap(), +- converter=_typed_pmap_converter, +- ) +- +- def is_type(self, instance, type): +- """ +- Check if the instance is of the appropriate type. +- +- Arguments: +- +- instance (object): +- +- The instance to check +- +- type (str): +- +- The name of the type that is expected. +- +- Returns: +- +- bool: Whether it conformed. +- +- +- Raises: +- +- `jsonschema.exceptions.UndefinedTypeCheck`: +- if type is unknown to this object. +- """ +- try: +- fn = self._type_checkers[type] +- except KeyError: +- raise UndefinedTypeCheck(type) from None +- +- return fn(self, instance) +- +- def redefine(self, type, fn): +- """ +- Produce a new checker with the given type redefined. +- +- Arguments: +- +- type (str): +- +- The name of the type to check. +- +- fn (collections.abc.Callable): +- +- A function taking exactly two parameters - the type +- checker calling the function and the instance to check. +- The function should return true if instance is of this +- type and false otherwise. +- +- Returns: +- +- A new `TypeChecker` instance. +- """ +- return self.redefine_many({type: fn}) +- +- def redefine_many(self, definitions=()): +- """ +- Produce a new checker with the given types redefined. +- +- Arguments: +- +- definitions (dict): +- +- A dictionary mapping types to their checking functions. +- +- Returns: +- +- A new `TypeChecker` instance. +- """ +- return attr.evolve( +- self, type_checkers=self._type_checkers.update(definitions), +- ) +- +- def remove(self, *types): +- """ +- Produce a new checker with the given types forgotten. +- +- Arguments: +- +- types (~collections.abc.Iterable): +- +- the names of the types to remove. +- +- Returns: +- +- A new `TypeChecker` instance +- +- Raises: +- +- `jsonschema.exceptions.UndefinedTypeCheck`: +- +- if any given type is unknown to this object +- """ +- +- checkers = self._type_checkers +- for each in types: +- try: +- checkers = checkers.remove(each) +- except KeyError: +- raise UndefinedTypeCheck(each) +- return attr.evolve(self, type_checkers=checkers) +- +- +-draft3_type_checker = TypeChecker( +- { +- "any": is_any, +- "array": is_array, +- "boolean": is_bool, +- "integer": is_integer, +- "object": is_object, +- "null": is_null, +- "number": is_number, +- "string": is_string, +- }, +-) +-draft4_type_checker = draft3_type_checker.remove("any") +-draft6_type_checker = draft4_type_checker.redefine( +- "integer", +- lambda checker, instance: ( +- is_integer(checker, instance) +- or isinstance(instance, float) and instance.is_integer() +- ), +-) +-draft7_type_checker = draft6_type_checker +-draft201909_type_checker = draft7_type_checker +-draft202012_type_checker = draft201909_type_checker +diff --git a/src/poetry/core/_vendor/jsonschema/_utils.py b/src/poetry/core/_vendor/jsonschema/_utils.py +deleted file mode 100644 +index df505fe..0000000 +--- a/src/poetry/core/_vendor/jsonschema/_utils.py ++++ /dev/null +@@ -1,345 +0,0 @@ +-from collections.abc import Mapping, MutableMapping, Sequence +-from urllib.parse import urlsplit +-import itertools +-import json +-import os +-import re +- +-class URIDict(MutableMapping): +- """ +- Dictionary which uses normalized URIs as keys. +- """ +- +- def normalize(self, uri): +- return urlsplit(uri).geturl() +- +- def __init__(self, *args, **kwargs): +- self.store = dict() +- self.store.update(*args, **kwargs) +- +- def __getitem__(self, uri): +- return self.store[self.normalize(uri)] +- +- def __setitem__(self, uri, value): +- self.store[self.normalize(uri)] = value +- +- def __delitem__(self, uri): +- del self.store[self.normalize(uri)] +- +- def __iter__(self): +- return iter(self.store) +- +- def __len__(self): +- return len(self.store) +- +- def __repr__(self): +- return repr(self.store) +- +- +-class Unset(object): +- """ +- An as-of-yet unset attribute or unprovided default parameter. +- """ +- +- def __repr__(self): +- return "" +- +- +-def load_schema(name): +- """ +- Load a schema from ./schemas/``name``.json and return it. +- """ +- with open( +- os.path.join(os.path.dirname(__file__), "schemas", "{0}.json".format(name)), +- encoding="utf-8" +- ) as f: +- data = f.read() +- +- return json.loads(data) +- +- +-def format_as_index(container, indices): +- """ +- Construct a single string containing indexing operations for the indices. +- +- For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"] +- +- Arguments: +- +- container (str): +- +- A word to use for the thing being indexed +- +- indices (sequence): +- +- The indices to format. +- """ +- +- if not indices: +- return container +- return f"{container}[{']['.join(repr(index) for index in indices)}]" +- +- +-def find_additional_properties(instance, schema): +- """ +- Return the set of additional properties for the given ``instance``. +- +- Weeds out properties that should have been validated by ``properties`` and +- / or ``patternProperties``. +- +- Assumes ``instance`` is dict-like already. +- """ +- +- properties = schema.get("properties", {}) +- patterns = "|".join(schema.get("patternProperties", {})) +- for property in instance: +- if property not in properties: +- if patterns and re.search(patterns, property): +- continue +- yield property +- +- +-def extras_msg(extras): +- """ +- Create an error message for extra items or properties. +- """ +- +- if len(extras) == 1: +- verb = "was" +- else: +- verb = "were" +- return ", ".join(repr(extra) for extra in sorted(extras)), verb +- +- +-def ensure_list(thing): +- """ +- Wrap ``thing`` in a list if it's a single str. +- +- Otherwise, return it unchanged. +- """ +- +- if isinstance(thing, str): +- return [thing] +- return thing +- +- +-def _mapping_equal(one, two): +- """ +- Check if two mappings are equal using the semantics of `equal`. +- """ +- if len(one) != len(two): +- return False +- return all( +- key in two and equal(value, two[key]) +- for key, value in one.items() +- ) +- +- +-def _sequence_equal(one, two): +- """ +- Check if two sequences are equal using the semantics of `equal`. +- """ +- if len(one) != len(two): +- return False +- return all(equal(i, j) for i, j in zip(one, two)) +- +- +-def equal(one, two): +- """ +- Check if two things are equal evading some Python type hierarchy semantics. +- +- Specifically in JSON Schema, evade `bool` inheriting from `int`, +- recursing into sequences to do the same. +- """ +- if isinstance(one, str) or isinstance(two, str): +- return one == two +- if isinstance(one, Sequence) and isinstance(two, Sequence): +- return _sequence_equal(one, two) +- if isinstance(one, Mapping) and isinstance(two, Mapping): +- return _mapping_equal(one, two) +- return unbool(one) == unbool(two) +- +- +-def unbool(element, true=object(), false=object()): +- """ +- A hack to make True and 1 and False and 0 unique for ``uniq``. +- """ +- +- if element is True: +- return true +- elif element is False: +- return false +- return element +- +- +-def uniq(container): +- """ +- Check if all of a container's elements are unique. +- +- Tries to rely on the container being recursively sortable, or otherwise +- falls back on (slow) brute force. +- """ +- try: +- sort = sorted(unbool(i) for i in container) +- sliced = itertools.islice(sort, 1, None) +- +- for i, j in zip(sort, sliced): +- if equal(i, j): +- return False +- +- except (NotImplementedError, TypeError): +- seen = [] +- for e in container: +- e = unbool(e) +- +- for i in seen: +- if equal(i, e): +- return False +- +- seen.append(e) +- return True +- +- +-def find_evaluated_item_indexes_by_schema(validator, instance, schema): +- """ +- Get all indexes of items that get evaluated under the current schema +- +- Covers all keywords related to unevaluatedItems: items, prefixItems, if, +- then, else, contains, unevaluatedItems, allOf, oneOf, anyOf +- """ +- if validator.is_type(schema, "boolean"): +- return [] +- evaluated_indexes = [] +- +- if "items" in schema: +- return list(range(0, len(instance))) +- +- if "$ref" in schema: +- scope, resolved = validator.resolver.resolve(schema["$ref"]) +- validator.resolver.push_scope(scope) +- +- try: +- evaluated_indexes += find_evaluated_item_indexes_by_schema( +- validator, instance, resolved, +- ) +- finally: +- validator.resolver.pop_scope() +- +- if "prefixItems" in schema: +- evaluated_indexes += list(range(0, len(schema["prefixItems"]))) +- +- if "if" in schema: +- if validator.evolve(schema=schema["if"]).is_valid(instance): +- evaluated_indexes += find_evaluated_item_indexes_by_schema( +- validator, instance, schema["if"], +- ) +- if "then" in schema: +- evaluated_indexes += find_evaluated_item_indexes_by_schema( +- validator, instance, schema["then"], +- ) +- else: +- if "else" in schema: +- evaluated_indexes += find_evaluated_item_indexes_by_schema( +- validator, instance, schema["else"], +- ) +- +- for keyword in ["contains", "unevaluatedItems"]: +- if keyword in schema: +- for k, v in enumerate(instance): +- if validator.evolve(schema=schema[keyword]).is_valid(v): +- evaluated_indexes.append(k) +- +- for keyword in ["allOf", "oneOf", "anyOf"]: +- if keyword in schema: +- for subschema in schema[keyword]: +- errs = list(validator.descend(instance, subschema)) +- if not errs: +- evaluated_indexes += find_evaluated_item_indexes_by_schema( +- validator, instance, subschema, +- ) +- +- return evaluated_indexes +- +- +-def find_evaluated_property_keys_by_schema(validator, instance, schema): +- """ +- Get all keys of items that get evaluated under the current schema +- +- Covers all keywords related to unevaluatedProperties: properties, +- additionalProperties, unevaluatedProperties, patternProperties, +- dependentSchemas, allOf, oneOf, anyOf, if, then, else +- """ +- if validator.is_type(schema, "boolean"): +- return [] +- evaluated_keys = [] +- +- if "$ref" in schema: +- scope, resolved = validator.resolver.resolve(schema["$ref"]) +- validator.resolver.push_scope(scope) +- +- try: +- evaluated_keys += find_evaluated_property_keys_by_schema( +- validator, instance, resolved, +- ) +- finally: +- validator.resolver.pop_scope() +- +- for keyword in [ +- "properties", "additionalProperties", "unevaluatedProperties", +- ]: +- if keyword in schema: +- if validator.is_type(schema[keyword], "boolean"): +- for property, value in instance.items(): +- if validator.evolve(schema=schema[keyword]).is_valid( +- {property: value}, +- ): +- evaluated_keys.append(property) +- +- if validator.is_type(schema[keyword], "object"): +- for property, subschema in schema[keyword].items(): +- if property in instance and validator.evolve( +- schema=subschema, +- ).is_valid(instance[property]): +- evaluated_keys.append(property) +- +- if "patternProperties" in schema: +- for property, value in instance.items(): +- for pattern, _ in schema["patternProperties"].items(): +- if re.search(pattern, property) and validator.evolve( +- schema=schema["patternProperties"], +- ).is_valid({property: value}): +- evaluated_keys.append(property) +- +- if "dependentSchemas" in schema: +- for property, subschema in schema["dependentSchemas"].items(): +- if property not in instance: +- continue +- evaluated_keys += find_evaluated_property_keys_by_schema( +- validator, instance, subschema, +- ) +- +- for keyword in ["allOf", "oneOf", "anyOf"]: +- if keyword in schema: +- for subschema in schema[keyword]: +- errs = list(validator.descend(instance, subschema)) +- if not errs: +- evaluated_keys += find_evaluated_property_keys_by_schema( +- validator, instance, subschema, +- ) +- +- if "if" in schema: +- if validator.evolve(schema=schema["if"]).is_valid(instance): +- evaluated_keys += find_evaluated_property_keys_by_schema( +- validator, instance, schema["if"], +- ) +- if "then" in schema: +- evaluated_keys += find_evaluated_property_keys_by_schema( +- validator, instance, schema["then"], +- ) +- else: +- if "else" in schema: +- evaluated_keys += find_evaluated_property_keys_by_schema( +- validator, instance, schema["else"], +- ) +- +- return evaluated_keys +diff --git a/src/poetry/core/_vendor/jsonschema/_validators.py b/src/poetry/core/_vendor/jsonschema/_validators.py +deleted file mode 100644 +index 874e879..0000000 +--- a/src/poetry/core/_vendor/jsonschema/_validators.py ++++ /dev/null +@@ -1,467 +0,0 @@ +-from fractions import Fraction +-from urllib.parse import urldefrag, urljoin +-import re +- +-from jsonschema._utils import ( +- ensure_list, +- equal, +- extras_msg, +- find_additional_properties, +- find_evaluated_item_indexes_by_schema, +- find_evaluated_property_keys_by_schema, +- unbool, +- uniq, +-) +-from jsonschema.exceptions import FormatError, ValidationError +- +- +-def patternProperties(validator, patternProperties, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- +- for pattern, subschema in patternProperties.items(): +- for k, v in instance.items(): +- if re.search(pattern, k): +- yield from validator.descend( +- v, subschema, path=k, schema_path=pattern, +- ) +- +- +-def propertyNames(validator, propertyNames, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- +- for property in instance: +- yield from validator.descend(instance=property, schema=propertyNames) +- +- +-def additionalProperties(validator, aP, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- +- extras = set(find_additional_properties(instance, schema)) +- +- if validator.is_type(aP, "object"): +- for extra in extras: +- yield from validator.descend(instance[extra], aP, path=extra) +- elif not aP and extras: +- if "patternProperties" in schema: +- if len(extras) == 1: +- verb = "does" +- else: +- verb = "do" +- +- joined = ", ".join(repr(each) for each in sorted(extras)) +- patterns = ", ".join( +- repr(each) for each in sorted(schema["patternProperties"]) +- ) +- error = f"{joined} {verb} not match any of the regexes: {patterns}" +- yield ValidationError(error) +- else: +- error = "Additional properties are not allowed (%s %s unexpected)" +- yield ValidationError(error % extras_msg(extras)) +- +- +-def items(validator, items, instance, schema): +- if not validator.is_type(instance, "array"): +- return +- +- prefix = len(schema.get("prefixItems", [])) +- total = len(instance) +- if items is False and total > prefix: +- message = f"Expected at most {prefix} items, but found {total}" +- yield ValidationError(message) +- else: +- for index in range(prefix, total): +- yield from validator.descend( +- instance=instance[index], +- schema=items, +- path=index, +- ) +- +- +-def additionalItems(validator, aI, instance, schema): +- if ( +- not validator.is_type(instance, "array") +- or validator.is_type(schema.get("items", {}), "object") +- ): +- return +- +- len_items = len(schema.get("items", [])) +- if validator.is_type(aI, "object"): +- for index, item in enumerate(instance[len_items:], start=len_items): +- yield from validator.descend(item, aI, path=index) +- elif not aI and len(instance) > len(schema.get("items", [])): +- error = "Additional items are not allowed (%s %s unexpected)" +- yield ValidationError( +- error % extras_msg(instance[len(schema.get("items", [])):]), +- ) +- +- +-def const(validator, const, instance, schema): +- if not equal(instance, const): +- yield ValidationError(f"{const!r} was expected") +- +- +-def contains(validator, contains, instance, schema): +- if not validator.is_type(instance, "array"): +- return +- +- matches = 0 +- min_contains = schema.get("minContains", 1) +- max_contains = schema.get("maxContains", len(instance)) +- +- for each in instance: +- if validator.evolve(schema=contains).is_valid(each): +- matches += 1 +- if matches > max_contains: +- yield ValidationError( +- "Too many items match the given schema " +- f"(expected at most {max_contains})", +- validator="maxContains", +- validator_value=max_contains, +- ) +- return +- +- if matches < min_contains: +- if not matches: +- yield ValidationError( +- f"{instance!r} does not contain items " +- "matching the given schema", +- ) +- else: +- yield ValidationError( +- "Too few items match the given schema (expected at least " +- f"{min_contains} but only {matches} matched)", +- validator="minContains", +- validator_value=min_contains, +- ) +- +- +-def exclusiveMinimum(validator, minimum, instance, schema): +- if not validator.is_type(instance, "number"): +- return +- +- if instance <= minimum: +- yield ValidationError( +- f"{instance!r} is less than or equal to " +- f"the minimum of {minimum!r}", +- ) +- +- +-def exclusiveMaximum(validator, maximum, instance, schema): +- if not validator.is_type(instance, "number"): +- return +- +- if instance >= maximum: +- yield ValidationError( +- f"{instance!r} is greater than or equal " +- f"to the maximum of {maximum!r}", +- ) +- +- +-def minimum(validator, minimum, instance, schema): +- if not validator.is_type(instance, "number"): +- return +- +- if instance < minimum: +- message = f"{instance!r} is less than the minimum of {minimum!r}" +- yield ValidationError(message) +- +- +-def maximum(validator, maximum, instance, schema): +- if not validator.is_type(instance, "number"): +- return +- +- if instance > maximum: +- message = f"{instance!r} is greater than the maximum of {maximum!r}" +- yield ValidationError(message) +- +- +-def multipleOf(validator, dB, instance, schema): +- if not validator.is_type(instance, "number"): +- return +- +- if isinstance(dB, float): +- quotient = instance / dB +- try: +- failed = int(quotient) != quotient +- except OverflowError: +- # When `instance` is large and `dB` is less than one, +- # quotient can overflow to infinity; and then casting to int +- # raises an error. +- # +- # In this case we fall back to Fraction logic, which is +- # exact and cannot overflow. The performance is also +- # acceptable: we try the fast all-float option first, and +- # we know that fraction(dB) can have at most a few hundred +- # digits in each part. The worst-case slowdown is therefore +- # for already-slow enormous integers or Decimals. +- failed = (Fraction(instance) / Fraction(dB)).denominator != 1 +- else: +- failed = instance % dB +- +- if failed: +- yield ValidationError(f"{instance!r} is not a multiple of {dB}") +- +- +-def minItems(validator, mI, instance, schema): +- if validator.is_type(instance, "array") and len(instance) < mI: +- yield ValidationError(f"{instance!r} is too short") +- +- +-def maxItems(validator, mI, instance, schema): +- if validator.is_type(instance, "array") and len(instance) > mI: +- yield ValidationError(f"{instance!r} is too long") +- +- +-def uniqueItems(validator, uI, instance, schema): +- if ( +- uI +- and validator.is_type(instance, "array") +- and not uniq(instance) +- ): +- yield ValidationError(f"{instance!r} has non-unique elements") +- +- +-def pattern(validator, patrn, instance, schema): +- if ( +- validator.is_type(instance, "string") +- and not re.search(patrn, instance) +- ): +- yield ValidationError(f"{instance!r} does not match {patrn!r}") +- +- +-def format(validator, format, instance, schema): +- if validator.format_checker is not None: +- try: +- validator.format_checker.check(instance, format) +- except FormatError as error: +- yield ValidationError(error.message, cause=error.cause) +- +- +-def minLength(validator, mL, instance, schema): +- if validator.is_type(instance, "string") and len(instance) < mL: +- yield ValidationError(f"{instance!r} is too short") +- +- +-def maxLength(validator, mL, instance, schema): +- if validator.is_type(instance, "string") and len(instance) > mL: +- yield ValidationError(f"{instance!r} is too long") +- +- +-def dependentRequired(validator, dependentRequired, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- +- for property, dependency in dependentRequired.items(): +- if property not in instance: +- continue +- +- for each in dependency: +- if each not in instance: +- message = f"{each!r} is a dependency of {property!r}" +- yield ValidationError(message) +- +- +-def dependentSchemas(validator, dependentSchemas, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- +- for property, dependency in dependentSchemas.items(): +- if property not in instance: +- continue +- yield from validator.descend( +- instance, dependency, schema_path=property, +- ) +- +- +-def enum(validator, enums, instance, schema): +- if instance == 0 or instance == 1: +- unbooled = unbool(instance) +- if all(unbooled != unbool(each) for each in enums): +- yield ValidationError(f"{instance!r} is not one of {enums!r}") +- elif instance not in enums: +- yield ValidationError(f"{instance!r} is not one of {enums!r}") +- +- +-def ref(validator, ref, instance, schema): +- resolve = getattr(validator.resolver, "resolve", None) +- if resolve is None: +- with validator.resolver.resolving(ref) as resolved: +- yield from validator.descend(instance, resolved) +- else: +- scope, resolved = validator.resolver.resolve(ref) +- validator.resolver.push_scope(scope) +- +- try: +- yield from validator.descend(instance, resolved) +- finally: +- validator.resolver.pop_scope() +- +- +-def dynamicRef(validator, dynamicRef, instance, schema): +- _, fragment = urldefrag(dynamicRef) +- +- for url in validator.resolver._scopes_stack: +- lookup_url = urljoin(url, dynamicRef) +- with validator.resolver.resolving(lookup_url) as subschema: +- if ("$dynamicAnchor" in subschema +- and fragment == subschema["$dynamicAnchor"]): +- yield from validator.descend(instance, subschema) +- break +- else: +- with validator.resolver.resolving(dynamicRef) as subschema: +- yield from validator.descend(instance, subschema) +- +- +-def type(validator, types, instance, schema): +- types = ensure_list(types) +- +- if not any(validator.is_type(instance, type) for type in types): +- reprs = ", ".join(repr(type) for type in types) +- yield ValidationError(f"{instance!r} is not of type {reprs}") +- +- +-def properties(validator, properties, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- +- for property, subschema in properties.items(): +- if property in instance: +- yield from validator.descend( +- instance[property], +- subschema, +- path=property, +- schema_path=property, +- ) +- +- +-def required(validator, required, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- for property in required: +- if property not in instance: +- yield ValidationError(f"{property!r} is a required property") +- +- +-def minProperties(validator, mP, instance, schema): +- if validator.is_type(instance, "object") and len(instance) < mP: +- yield ValidationError(f"{instance!r} does not have enough properties") +- +- +-def maxProperties(validator, mP, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- if validator.is_type(instance, "object") and len(instance) > mP: +- yield ValidationError(f"{instance!r} has too many properties") +- +- +-def allOf(validator, allOf, instance, schema): +- for index, subschema in enumerate(allOf): +- yield from validator.descend(instance, subschema, schema_path=index) +- +- +-def anyOf(validator, anyOf, instance, schema): +- all_errors = [] +- for index, subschema in enumerate(anyOf): +- errs = list(validator.descend(instance, subschema, schema_path=index)) +- if not errs: +- break +- all_errors.extend(errs) +- else: +- yield ValidationError( +- f"{instance!r} is not valid under any of the given schemas", +- context=all_errors, +- ) +- +- +-def oneOf(validator, oneOf, instance, schema): +- subschemas = enumerate(oneOf) +- all_errors = [] +- for index, subschema in subschemas: +- errs = list(validator.descend(instance, subschema, schema_path=index)) +- if not errs: +- first_valid = subschema +- break +- all_errors.extend(errs) +- else: +- yield ValidationError( +- f"{instance!r} is not valid under any of the given schemas", +- context=all_errors, +- ) +- +- more_valid = [ +- each for _, each in subschemas +- if validator.evolve(schema=each).is_valid(instance) +- ] +- if more_valid: +- more_valid.append(first_valid) +- reprs = ", ".join(repr(schema) for schema in more_valid) +- yield ValidationError(f"{instance!r} is valid under each of {reprs}") +- +- +-def not_(validator, not_schema, instance, schema): +- if validator.evolve(schema=not_schema).is_valid(instance): +- message = f"{instance!r} should not be valid under {not_schema!r}" +- yield ValidationError(message) +- +- +-def if_(validator, if_schema, instance, schema): +- if validator.evolve(schema=if_schema).is_valid(instance): +- if "then" in schema: +- then = schema["then"] +- yield from validator.descend(instance, then, schema_path="then") +- elif "else" in schema: +- else_ = schema["else"] +- yield from validator.descend(instance, else_, schema_path="else") +- +- +-def unevaluatedItems(validator, unevaluatedItems, instance, schema): +- if not validator.is_type(instance, "array"): +- return +- evaluated_item_indexes = find_evaluated_item_indexes_by_schema( +- validator, instance, schema, +- ) +- unevaluated_items = [ +- item for index, item in enumerate(instance) +- if index not in evaluated_item_indexes +- ] +- if unevaluated_items: +- error = "Unevaluated items are not allowed (%s %s unexpected)" +- yield ValidationError(error % extras_msg(unevaluated_items)) +- +- +-def unevaluatedProperties(validator, unevaluatedProperties, instance, schema): +- if not validator.is_type(instance, "object"): +- return +- evaluated_property_keys = find_evaluated_property_keys_by_schema( +- validator, instance, schema, +- ) +- unevaluated_property_keys = [] +- for property in instance: +- if property not in evaluated_property_keys: +- for _ in validator.descend( +- instance[property], +- unevaluatedProperties, +- path=property, +- schema_path=property, +- ): +- unevaluated_property_keys.append(property) +- +- if unevaluated_property_keys: +- error = "Unevaluated properties are not allowed (%s %s unexpected)" +- yield ValidationError(error % extras_msg(unevaluated_property_keys)) +- +- +-def prefixItems(validator, prefixItems, instance, schema): +- if not validator.is_type(instance, "array"): +- return +- +- for (index, item), subschema in zip(enumerate(instance), prefixItems): +- yield from validator.descend( +- instance=item, +- schema=subschema, +- schema_path=index, +- path=index, +- ) +diff --git a/src/poetry/core/_vendor/jsonschema/benchmarks/__init__.py b/src/poetry/core/_vendor/jsonschema/benchmarks/__init__.py +deleted file mode 100644 +index e3dcc68..0000000 +--- a/src/poetry/core/_vendor/jsonschema/benchmarks/__init__.py ++++ /dev/null +@@ -1,5 +0,0 @@ +-""" +-Benchmarks for validation. +- +-This package is *not* public API. +-""" +diff --git a/src/poetry/core/_vendor/jsonschema/benchmarks/issue232.py b/src/poetry/core/_vendor/jsonschema/benchmarks/issue232.py +deleted file mode 100644 +index bf357e9..0000000 +--- a/src/poetry/core/_vendor/jsonschema/benchmarks/issue232.py ++++ /dev/null +@@ -1,25 +0,0 @@ +-""" +-A performance benchmark using the example from issue #232. +- +-See https://github.com/python-jsonschema/jsonschema/pull/232. +-""" +-from pathlib import Path +- +-from pyperf import Runner +-from pyrsistent import m +- +-from jsonschema.tests._suite import Version +-import jsonschema +- +-issue232 = Version( +- path=Path(__file__).parent / "issue232", +- remotes=m(), +- name="issue232", +-) +- +- +-if __name__ == "__main__": +- issue232.benchmark( +- runner=Runner(), +- Validator=jsonschema.Draft4Validator, +- ) +diff --git a/src/poetry/core/_vendor/jsonschema/benchmarks/json_schema_test_suite.py b/src/poetry/core/_vendor/jsonschema/benchmarks/json_schema_test_suite.py +deleted file mode 100644 +index 905fb6a..0000000 +--- a/src/poetry/core/_vendor/jsonschema/benchmarks/json_schema_test_suite.py ++++ /dev/null +@@ -1,12 +0,0 @@ +-""" +-A performance benchmark using the official test suite. +- +-This benchmarks jsonschema using every valid example in the +-JSON-Schema-Test-Suite. It will take some time to complete. +-""" +-from pyperf import Runner +- +-from jsonschema.tests._suite import Suite +- +-if __name__ == "__main__": +- Suite().benchmark(runner=Runner()) +diff --git a/src/poetry/core/_vendor/jsonschema/cli.py b/src/poetry/core/_vendor/jsonschema/cli.py +deleted file mode 100644 +index f19b680..0000000 +--- a/src/poetry/core/_vendor/jsonschema/cli.py ++++ /dev/null +@@ -1,288 +0,0 @@ +-""" +-The ``jsonschema`` command line. +-""" +- +-from json import JSONDecodeError +-from textwrap import dedent +-import argparse +-import json +-import sys +-import traceback +- +-try: +- from importlib import metadata +-except ImportError: +- import importlib_metadata as metadata # type: ignore +- +-try: +- from pkgutil import resolve_name +-except ImportError: +- from pkgutil_resolve_name import resolve_name # type: ignore +- +-import attr +- +-from jsonschema.exceptions import SchemaError +-from jsonschema.validators import RefResolver, validator_for +- +- +-class _CannotLoadFile(Exception): +- pass +- +- +-@attr.s +-class _Outputter(object): +- +- _formatter = attr.ib() +- _stdout = attr.ib() +- _stderr = attr.ib() +- +- @classmethod +- def from_arguments(cls, arguments, stdout, stderr): +- if arguments["output"] == "plain": +- formatter = _PlainFormatter(arguments["error_format"]) +- elif arguments["output"] == "pretty": +- formatter = _PrettyFormatter() +- return cls(formatter=formatter, stdout=stdout, stderr=stderr) +- +- def load(self, path): +- try: +- file = open(path) +- except FileNotFoundError: +- self.filenotfound_error(path=path, exc_info=sys.exc_info()) +- raise _CannotLoadFile() +- +- with file: +- try: +- return json.load(file) +- except JSONDecodeError: +- self.parsing_error(path=path, exc_info=sys.exc_info()) +- raise _CannotLoadFile() +- +- def filenotfound_error(self, **kwargs): +- self._stderr.write(self._formatter.filenotfound_error(**kwargs)) +- +- def parsing_error(self, **kwargs): +- self._stderr.write(self._formatter.parsing_error(**kwargs)) +- +- def validation_error(self, **kwargs): +- self._stderr.write(self._formatter.validation_error(**kwargs)) +- +- def validation_success(self, **kwargs): +- self._stdout.write(self._formatter.validation_success(**kwargs)) +- +- +-@attr.s +-class _PrettyFormatter(object): +- +- _ERROR_MSG = dedent( +- """\ +- ===[{type}]===({path})=== +- +- {body} +- ----------------------------- +- """, +- ) +- _SUCCESS_MSG = "===[SUCCESS]===({path})===\n" +- +- def filenotfound_error(self, path, exc_info): +- return self._ERROR_MSG.format( +- path=path, +- type="FileNotFoundError", +- body="{!r} does not exist.".format(path), +- ) +- +- def parsing_error(self, path, exc_info): +- exc_type, exc_value, exc_traceback = exc_info +- exc_lines = "".join( +- traceback.format_exception(exc_type, exc_value, exc_traceback), +- ) +- return self._ERROR_MSG.format( +- path=path, +- type=exc_type.__name__, +- body=exc_lines, +- ) +- +- def validation_error(self, instance_path, error): +- return self._ERROR_MSG.format( +- path=instance_path, +- type=error.__class__.__name__, +- body=error, +- ) +- +- def validation_success(self, instance_path): +- return self._SUCCESS_MSG.format(path=instance_path) +- +- +-@attr.s +-class _PlainFormatter(object): +- +- _error_format = attr.ib() +- +- def filenotfound_error(self, path, exc_info): +- return "{!r} does not exist.\n".format(path) +- +- def parsing_error(self, path, exc_info): +- return "Failed to parse {}: {}\n".format( +- "" if path == "" else repr(path), +- exc_info[1], +- ) +- +- def validation_error(self, instance_path, error): +- return self._error_format.format(file_name=instance_path, error=error) +- +- def validation_success(self, instance_path): +- return "" +- +- +-def _resolve_name_with_default(name): +- if "." not in name: +- name = "jsonschema." + name +- return resolve_name(name) +- +- +-parser = argparse.ArgumentParser( +- description="JSON Schema Validation CLI", +-) +-parser.add_argument( +- "-i", "--instance", +- action="append", +- dest="instances", +- help=""" +- a path to a JSON instance (i.e. filename.json) to validate (may +- be specified multiple times). If no instances are provided via this +- option, one will be expected on standard input. +- """, +-) +-parser.add_argument( +- "-F", "--error-format", +- help=""" +- the format to use for each validation error message, specified +- in a form suitable for str.format. This string will be passed +- one formatted object named 'error' for each ValidationError. +- Only provide this option when using --output=plain, which is the +- default. If this argument is unprovided and --output=plain is +- used, a simple default representation will be used. +- """, +-) +-parser.add_argument( +- "-o", "--output", +- choices=["plain", "pretty"], +- default="plain", +- help=""" +- an output format to use. 'plain' (default) will produce minimal +- text with one line for each error, while 'pretty' will produce +- more detailed human-readable output on multiple lines. +- """, +-) +-parser.add_argument( +- "-V", "--validator", +- type=_resolve_name_with_default, +- help=""" +- the fully qualified object name of a validator to use, or, for +- validators that are registered with jsonschema, simply the name +- of the class. +- """, +-) +-parser.add_argument( +- "--base-uri", +- help=""" +- a base URI to assign to the provided schema, even if it does not +- declare one (via e.g. $id). This option can be used if you wish to +- resolve relative references to a particular URI (or local path) +- """, +-) +-parser.add_argument( +- "--version", +- action="version", +- version=metadata.version("jsonschema"), +-) +-parser.add_argument( +- "schema", +- help="the path to a JSON Schema to validate with (i.e. schema.json)", +-) +- +- +-def parse_args(args): +- arguments = vars(parser.parse_args(args=args or ["--help"])) +- if arguments["output"] != "plain" and arguments["error_format"]: +- raise parser.error( +- "--error-format can only be used with --output plain", +- ) +- if arguments["output"] == "plain" and arguments["error_format"] is None: +- arguments["error_format"] = "{error.instance}: {error.message}\n" +- return arguments +- +- +-def _validate_instance(instance_path, instance, validator, outputter): +- invalid = False +- for error in validator.iter_errors(instance): +- invalid = True +- outputter.validation_error(instance_path=instance_path, error=error) +- +- if not invalid: +- outputter.validation_success(instance_path=instance_path) +- return invalid +- +- +-def main(args=sys.argv[1:]): +- sys.exit(run(arguments=parse_args(args=args))) +- +- +-def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin): +- outputter = _Outputter.from_arguments( +- arguments=arguments, +- stdout=stdout, +- stderr=stderr, +- ) +- +- try: +- schema = outputter.load(arguments["schema"]) +- except _CannotLoadFile: +- return 1 +- +- if arguments["validator"] is None: +- arguments["validator"] = validator_for(schema) +- +- try: +- arguments["validator"].check_schema(schema) +- except SchemaError as error: +- outputter.validation_error( +- instance_path=arguments["schema"], +- error=error, +- ) +- return 1 +- +- if arguments["instances"]: +- load, instances = outputter.load, arguments["instances"] +- else: +- def load(_): +- try: +- return json.load(stdin) +- except JSONDecodeError: +- outputter.parsing_error( +- path="", exc_info=sys.exc_info(), +- ) +- raise _CannotLoadFile() +- instances = [""] +- +- resolver = RefResolver( +- base_uri=arguments["base_uri"], +- referrer=schema, +- ) if arguments["base_uri"] is not None else None +- +- validator = arguments["validator"](schema, resolver=resolver) +- exit_code = 0 +- for each in instances: +- try: +- instance = load(each) +- except _CannotLoadFile: +- exit_code = 1 +- else: +- exit_code |= _validate_instance( +- instance_path=each, +- instance=instance, +- validator=validator, +- outputter=outputter, +- ) +- +- return exit_code +diff --git a/src/poetry/core/_vendor/jsonschema/exceptions.py b/src/poetry/core/_vendor/jsonschema/exceptions.py +deleted file mode 100644 +index d1351c4..0000000 +--- a/src/poetry/core/_vendor/jsonschema/exceptions.py ++++ /dev/null +@@ -1,394 +0,0 @@ +-""" +-Validation errors, and some surrounding helpers. +-""" +-from __future__ import annotations +- +-from collections import defaultdict, deque +-from pprint import pformat +-from textwrap import dedent, indent +-import heapq +-import itertools +- +-import attr +- +-from jsonschema import _utils +- +-WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"]) +-STRONG_MATCHES: frozenset[str] = frozenset() +- +-_unset = _utils.Unset() +- +- +-class _Error(Exception): +- def __init__( +- self, +- message, +- validator=_unset, +- path=(), +- cause=None, +- context=(), +- validator_value=_unset, +- instance=_unset, +- schema=_unset, +- schema_path=(), +- parent=None, +- type_checker=_unset, +- ): +- super(_Error, self).__init__( +- message, +- validator, +- path, +- cause, +- context, +- validator_value, +- instance, +- schema, +- schema_path, +- parent, +- ) +- self.message = message +- self.path = self.relative_path = deque(path) +- self.schema_path = self.relative_schema_path = deque(schema_path) +- self.context = list(context) +- self.cause = self.__cause__ = cause +- self.validator = validator +- self.validator_value = validator_value +- self.instance = instance +- self.schema = schema +- self.parent = parent +- self._type_checker = type_checker +- +- for error in context: +- error.parent = self +- +- def __repr__(self): +- return f"<{self.__class__.__name__}: {self.message!r}>" +- +- def __str__(self): +- essential_for_verbose = ( +- self.validator, self.validator_value, self.instance, self.schema, +- ) +- if any(m is _unset for m in essential_for_verbose): +- return self.message +- +- schema_path = _utils.format_as_index( +- container=self._word_for_schema_in_error_message, +- indices=list(self.relative_schema_path)[:-1], +- ) +- instance_path = _utils.format_as_index( +- container=self._word_for_instance_in_error_message, +- indices=self.relative_path, +- ) +- prefix = 16 * " " +- +- return dedent( +- f"""\ +- {self.message} +- +- Failed validating {self.validator!r} in {schema_path}: +- {indent(pformat(self.schema, width=72), prefix).lstrip()} +- +- On {instance_path}: +- {indent(pformat(self.instance, width=72), prefix).lstrip()} +- """.rstrip(), +- ) +- +- @classmethod +- def create_from(cls, other): +- return cls(**other._contents()) +- +- @property +- def absolute_path(self): +- parent = self.parent +- if parent is None: +- return self.relative_path +- +- path = deque(self.relative_path) +- path.extendleft(reversed(parent.absolute_path)) +- return path +- +- @property +- def absolute_schema_path(self): +- parent = self.parent +- if parent is None: +- return self.relative_schema_path +- +- path = deque(self.relative_schema_path) +- path.extendleft(reversed(parent.absolute_schema_path)) +- return path +- +- @property +- def json_path(self): +- path = "$" +- for elem in self.absolute_path: +- if isinstance(elem, int): +- path += "[" + str(elem) + "]" +- else: +- path += "." + elem +- return path +- +- def _set(self, type_checker=None, **kwargs): +- if type_checker is not None and self._type_checker is _unset: +- self._type_checker = type_checker +- +- for k, v in kwargs.items(): +- if getattr(self, k) is _unset: +- setattr(self, k, v) +- +- def _contents(self): +- attrs = ( +- "message", "cause", "context", "validator", "validator_value", +- "path", "schema_path", "instance", "schema", "parent", +- ) +- return dict((attr, getattr(self, attr)) for attr in attrs) +- +- def _matches_type(self): +- try: +- expected = self.schema["type"] +- except (KeyError, TypeError): +- return False +- +- if isinstance(expected, str): +- return self._type_checker.is_type(self.instance, expected) +- +- return any( +- self._type_checker.is_type(self.instance, expected_type) +- for expected_type in expected +- ) +- +- +-class ValidationError(_Error): +- """ +- An instance was invalid under a provided schema. +- """ +- +- _word_for_schema_in_error_message = "schema" +- _word_for_instance_in_error_message = "instance" +- +- +-class SchemaError(_Error): +- """ +- A schema was invalid under its corresponding metaschema. +- """ +- +- _word_for_schema_in_error_message = "metaschema" +- _word_for_instance_in_error_message = "schema" +- +- +-@attr.s(hash=True) +-class RefResolutionError(Exception): +- """ +- A ref could not be resolved. +- """ +- +- _cause = attr.ib() +- +- def __str__(self): +- return str(self._cause) +- +- +-class UndefinedTypeCheck(Exception): +- """ +- A type checker was asked to check a type it did not have registered. +- """ +- +- def __init__(self, type): +- self.type = type +- +- def __str__(self): +- return f"Type {self.type!r} is unknown to this type checker" +- +- +-class UnknownType(Exception): +- """ +- A validator was asked to validate an instance against an unknown type. +- """ +- +- def __init__(self, type, instance, schema): +- self.type = type +- self.instance = instance +- self.schema = schema +- +- def __str__(self): +- prefix = 16 * " " +- +- return dedent( +- f"""\ +- Unknown type {self.type!r} for validator with schema: +- {indent(pformat(self.schema, width=72), prefix).lstrip()} +- +- While checking instance: +- {indent(pformat(self.instance, width=72), prefix).lstrip()} +- """.rstrip(), +- ) +- +- +-class FormatError(Exception): +- """ +- Validating a format failed. +- """ +- +- def __init__(self, message, cause=None): +- super(FormatError, self).__init__(message, cause) +- self.message = message +- self.cause = self.__cause__ = cause +- +- def __str__(self): +- return self.message +- +- +-class ErrorTree(object): +- """ +- ErrorTrees make it easier to check which validations failed. +- """ +- +- _instance = _unset +- +- def __init__(self, errors=()): +- self.errors = {} +- self._contents = defaultdict(self.__class__) +- +- for error in errors: +- container = self +- for element in error.path: +- container = container[element] +- container.errors[error.validator] = error +- +- container._instance = error.instance +- +- def __contains__(self, index): +- """ +- Check whether ``instance[index]`` has any errors. +- """ +- +- return index in self._contents +- +- def __getitem__(self, index): +- """ +- Retrieve the child tree one level down at the given ``index``. +- +- If the index is not in the instance that this tree corresponds +- to and is not known by this tree, whatever error would be raised +- by ``instance.__getitem__`` will be propagated (usually this is +- some subclass of `LookupError`. +- """ +- +- if self._instance is not _unset and index not in self: +- self._instance[index] +- return self._contents[index] +- +- def __setitem__(self, index, value): +- """ +- Add an error to the tree at the given ``index``. +- """ +- self._contents[index] = value +- +- def __iter__(self): +- """ +- Iterate (non-recursively) over the indices in the instance with errors. +- """ +- +- return iter(self._contents) +- +- def __len__(self): +- """ +- Return the `total_errors`. +- """ +- return self.total_errors +- +- def __repr__(self): +- return f"<{self.__class__.__name__} ({len(self)} total errors)>" +- +- @property +- def total_errors(self): +- """ +- The total number of errors in the entire tree, including children. +- """ +- +- child_errors = sum(len(tree) for _, tree in self._contents.items()) +- return len(self.errors) + child_errors +- +- +-def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES): +- """ +- Create a key function that can be used to sort errors by relevance. +- +- Arguments: +- weak (set): +- a collection of validation keywords to consider to be +- "weak". If there are two errors at the same level of the +- instance and one is in the set of weak validation keywords, +- the other error will take priority. By default, :kw:`anyOf` +- and :kw:`oneOf` are considered weak keywords and will be +- superseded by other same-level validation errors. +- +- strong (set): +- a collection of validation keywords to consider to be +- "strong" +- """ +- def relevance(error): +- validator = error.validator +- return ( +- -len(error.path), +- validator not in weak, +- validator in strong, +- not error._matches_type(), +- ) +- return relevance +- +- +-relevance = by_relevance() +- +- +-def best_match(errors, key=relevance): +- """ +- Try to find an error that appears to be the best match among given errors. +- +- In general, errors that are higher up in the instance (i.e. for which +- `ValidationError.path` is shorter) are considered better matches, +- since they indicate "more" is wrong with the instance. +- +- If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the +- *opposite* assumption is made -- i.e. the deepest error is picked, +- since these keywords only need to match once, and any other errors +- may not be relevant. +- +- Arguments: +- errors (collections.abc.Iterable): +- +- the errors to select from. Do not provide a mixture of +- errors from different validation attempts (i.e. from +- different instances or schemas), since it won't produce +- sensical output. +- +- key (collections.abc.Callable): +- +- the key to use when sorting errors. See `relevance` and +- transitively `by_relevance` for more details (the default is +- to sort with the defaults of that function). Changing the +- default is only useful if you want to change the function +- that rates errors but still want the error context descent +- done by this function. +- +- Returns: +- the best matching error, or ``None`` if the iterable was empty +- +- .. note:: +- +- This function is a heuristic. Its return value may change for a given +- set of inputs from version to version if better heuristics are added. +- """ +- errors = iter(errors) +- best = next(errors, None) +- if best is None: +- return +- best = max(itertools.chain([best], errors), key=key) +- +- while best.context: +- # Calculate the minimum via nsmallest, because we don't recurse if +- # all nested errors have the same relevance (i.e. if min == max == all) +- smallest = heapq.nsmallest(2, best.context, key=key) +- if len(smallest) == 2 and key(smallest[0]) == key(smallest[1]): +- return best +- best = smallest[0] +- return best +diff --git a/src/poetry/core/_vendor/jsonschema/protocols.py b/src/poetry/core/_vendor/jsonschema/protocols.py +deleted file mode 100644 +index 0e96eff..0000000 +--- a/src/poetry/core/_vendor/jsonschema/protocols.py ++++ /dev/null +@@ -1,181 +0,0 @@ +-""" +-typing.Protocol classes for jsonschema interfaces. +-""" +- +-# for reference material on Protocols, see +-# https://www.python.org/dev/peps/pep-0544/ +- +-from __future__ import annotations +- +-from typing import TYPE_CHECKING, Any, ClassVar, Iterator +-import sys +- +-# doing these imports with `try ... except ImportError` doesn't pass mypy +-# checking because mypy sees `typing._SpecialForm` and +-# `typing_extensions._SpecialForm` as incompatible +-# +-# see: +-# https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module +-# https://github.com/python/mypy/issues/4427 +-if sys.version_info >= (3, 8): +- from typing import Protocol, runtime_checkable +-else: +- from typing_extensions import Protocol, runtime_checkable +- +-# in order for Sphinx to resolve references accurately from type annotations, +-# it needs to see names like `jsonschema.TypeChecker` +-# therefore, only import at type-checking time (to avoid circular references), +-# but use `jsonschema` for any types which will otherwise not be resolvable +-if TYPE_CHECKING: +- import jsonschema +- +-from jsonschema.exceptions import ValidationError +-from jsonschema.validators import RefResolver +- +-# For code authors working on the validator protocol, these are the three +-# use-cases which should be kept in mind: +-# +-# 1. As a protocol class, it can be used in type annotations to describe the +-# available methods and attributes of a validator +-# 2. It is the source of autodoc for the validator documentation +-# 3. It is runtime_checkable, meaning that it can be used in isinstance() +-# checks. +-# +-# Since protocols are not base classes, isinstance() checking is limited in +-# its capabilities. See docs on runtime_checkable for detail +- +- +-@runtime_checkable +-class Validator(Protocol): +- """ +- The protocol to which all validator classes should adhere. +- +- :argument schema: the schema that the validator object +- will validate with. It is assumed to be valid, and providing +- an invalid schema can lead to undefined behavior. See +- `Validator.check_schema` to validate a schema first. +- :argument resolver: an instance of `jsonschema.RefResolver` that will be +- used to resolve :kw:`$ref` properties (JSON references). If +- unprovided, one will be created. +- :argument format_checker: an instance of `jsonschema.FormatChecker` +- whose `jsonschema.FormatChecker.conforms` method will be called to +- check and see if instances conform to each :kw:`format` +- property present in the schema. If unprovided, no validation +- will be done for :kw:`format`. Certain formats require +- additional packages to be installed (ipv5, uri, color, date-time). +- The required packages can be found at the bottom of this page. +- """ +- +- #: An object representing the validator's meta schema (the schema that +- #: describes valid schemas in the given version). +- META_SCHEMA: ClassVar[dict] +- +- #: A mapping of validation keywords (`str`\s) to functions that +- #: validate the keyword with that name. For more information see +- #: `creating-validators`. +- VALIDATORS: ClassVar[dict] +- +- #: A `jsonschema.TypeChecker` that will be used when validating +- #: :kw:`type` keywords in JSON schemas. +- TYPE_CHECKER: ClassVar[jsonschema.TypeChecker] +- +- #: A `jsonschema.FormatChecker` that will be used when validating +- #: :kw:`format` properties in JSON schemas. +- FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker] +- +- #: The schema that was passed in when initializing the object. +- schema: dict | bool +- +- def __init__( +- self, +- schema: dict | bool, +- resolver: RefResolver | None = None, +- format_checker: jsonschema.FormatChecker | None = None, +- ) -> None: +- ... +- +- @classmethod +- def check_schema(cls, schema: dict) -> None: +- """ +- Validate the given schema against the validator's `META_SCHEMA`. +- +- :raises: `jsonschema.exceptions.SchemaError` if the schema +- is invalid +- """ +- +- def is_type(self, instance: Any, type: str) -> bool: +- """ +- Check if the instance is of the given (JSON Schema) type. +- +- :type type: str +- :rtype: bool +- :raises: `jsonschema.exceptions.UnknownType` if ``type`` +- is not a known type. +- """ +- +- def is_valid(self, instance: dict) -> bool: +- """ +- Check if the instance is valid under the current `schema`. +- +- :rtype: bool +- +- >>> schema = {"maxItems" : 2} +- >>> Draft202012Validator(schema).is_valid([2, 3, 4]) +- False +- """ +- +- def iter_errors(self, instance: dict) -> Iterator[ValidationError]: +- r""" +- Lazily yield each of the validation errors in the given instance. +- +- :rtype: an `collections.abc.Iterable` of +- `jsonschema.exceptions.ValidationError`\s +- +- >>> schema = { +- ... "type" : "array", +- ... "items" : {"enum" : [1, 2, 3]}, +- ... "maxItems" : 2, +- ... } +- >>> v = Draft202012Validator(schema) +- >>> for error in sorted(v.iter_errors([2, 3, 4]), key=str): +- ... print(error.message) +- 4 is not one of [1, 2, 3] +- [2, 3, 4] is too long +- """ +- +- def validate(self, instance: dict) -> None: +- """ +- Check if the instance is valid under the current `schema`. +- +- :raises: `jsonschema.exceptions.ValidationError` if the +- instance is invalid +- +- >>> schema = {"maxItems" : 2} +- >>> Draft202012Validator(schema).validate([2, 3, 4]) +- Traceback (most recent call last): +- ... +- ValidationError: [2, 3, 4] is too long +- """ +- +- def evolve(self, **kwargs) -> "Validator": +- """ +- Create a new validator like this one, but with given changes. +- +- Preserves all other attributes, so can be used to e.g. create a +- validator with a different schema but with the same :kw:`$ref` +- resolution behavior. +- +- >>> validator = Draft202012Validator({}) +- >>> validator.evolve(schema={"type": "number"}) +- Draft202012Validator(schema={'type': 'number'}, format_checker=None) +- +- The returned object satisfies the validator protocol, but may not +- be of the same concrete class! In particular this occurs +- when a :kw:`$ref` occurs to a schema with a different +- :kw:`$schema` than this one (i.e. for a different draft). +- +- >>> validator.evolve( +- ... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]} +- ... ) +- Draft7Validator(schema=..., format_checker=None) +- """ +diff --git a/src/poetry/core/_vendor/jsonschema/schemas/draft2019-09.json b/src/poetry/core/_vendor/jsonschema/schemas/draft2019-09.json +deleted file mode 100644 +index 2248a0c..0000000 +--- a/src/poetry/core/_vendor/jsonschema/schemas/draft2019-09.json ++++ /dev/null +@@ -1,42 +0,0 @@ +-{ +- "$schema": "https://json-schema.org/draft/2019-09/schema", +- "$id": "https://json-schema.org/draft/2019-09/schema", +- "$vocabulary": { +- "https://json-schema.org/draft/2019-09/vocab/core": true, +- "https://json-schema.org/draft/2019-09/vocab/applicator": true, +- "https://json-schema.org/draft/2019-09/vocab/validation": true, +- "https://json-schema.org/draft/2019-09/vocab/meta-data": true, +- "https://json-schema.org/draft/2019-09/vocab/format": false, +- "https://json-schema.org/draft/2019-09/vocab/content": true +- }, +- "$recursiveAnchor": true, +- +- "title": "Core and Validation specifications meta-schema", +- "allOf": [ +- {"$ref": "meta/core"}, +- {"$ref": "meta/applicator"}, +- {"$ref": "meta/validation"}, +- {"$ref": "meta/meta-data"}, +- {"$ref": "meta/format"}, +- {"$ref": "meta/content"} +- ], +- "type": ["object", "boolean"], +- "properties": { +- "definitions": { +- "$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.", +- "type": "object", +- "additionalProperties": { "$recursiveRef": "#" }, +- "default": {} +- }, +- "dependencies": { +- "$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"", +- "type": "object", +- "additionalProperties": { +- "anyOf": [ +- { "$recursiveRef": "#" }, +- { "$ref": "meta/validation#/$defs/stringArray" } +- ] +- } +- } +- } +-} +diff --git a/src/poetry/core/_vendor/jsonschema/schemas/draft2020-12.json b/src/poetry/core/_vendor/jsonschema/schemas/draft2020-12.json +deleted file mode 100644 +index d5e2d31..0000000 +--- a/src/poetry/core/_vendor/jsonschema/schemas/draft2020-12.json ++++ /dev/null +@@ -1,58 +0,0 @@ +-{ +- "$schema": "https://json-schema.org/draft/2020-12/schema", +- "$id": "https://json-schema.org/draft/2020-12/schema", +- "$vocabulary": { +- "https://json-schema.org/draft/2020-12/vocab/core": true, +- "https://json-schema.org/draft/2020-12/vocab/applicator": true, +- "https://json-schema.org/draft/2020-12/vocab/unevaluated": true, +- "https://json-schema.org/draft/2020-12/vocab/validation": true, +- "https://json-schema.org/draft/2020-12/vocab/meta-data": true, +- "https://json-schema.org/draft/2020-12/vocab/format-annotation": true, +- "https://json-schema.org/draft/2020-12/vocab/content": true +- }, +- "$dynamicAnchor": "meta", +- +- "title": "Core and Validation specifications meta-schema", +- "allOf": [ +- {"$ref": "meta/core"}, +- {"$ref": "meta/applicator"}, +- {"$ref": "meta/unevaluated"}, +- {"$ref": "meta/validation"}, +- {"$ref": "meta/meta-data"}, +- {"$ref": "meta/format-annotation"}, +- {"$ref": "meta/content"} +- ], +- "type": ["object", "boolean"], +- "$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.", +- "properties": { +- "definitions": { +- "$comment": "\"definitions\" has been replaced by \"$defs\".", +- "type": "object", +- "additionalProperties": { "$dynamicRef": "#meta" }, +- "deprecated": true, +- "default": {} +- }, +- "dependencies": { +- "$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.", +- "type": "object", +- "additionalProperties": { +- "anyOf": [ +- { "$dynamicRef": "#meta" }, +- { "$ref": "meta/validation#/$defs/stringArray" } +- ] +- }, +- "deprecated": true, +- "default": {} +- }, +- "$recursiveAnchor": { +- "$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".", +- "$ref": "meta/core#/$defs/anchorString", +- "deprecated": true +- }, +- "$recursiveRef": { +- "$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".", +- "$ref": "meta/core#/$defs/uriReferenceString", +- "deprecated": true +- } +- } +-} +diff --git a/src/poetry/core/_vendor/jsonschema/schemas/draft3.json b/src/poetry/core/_vendor/jsonschema/schemas/draft3.json +deleted file mode 100644 +index 23d59b6..0000000 +--- a/src/poetry/core/_vendor/jsonschema/schemas/draft3.json ++++ /dev/null +@@ -1,177 +0,0 @@ +-{ +- "$schema" : "http://json-schema.org/draft-03/schema#", +- "id" : "http://json-schema.org/draft-03/schema#", +- "type" : "object", +- +- "properties" : { +- "type" : { +- "type" : ["string", "array"], +- "items" : { +- "type" : ["string", {"$ref" : "#"}] +- }, +- "uniqueItems" : true, +- "default" : "any" +- }, +- +- "properties" : { +- "type" : "object", +- "additionalProperties" : {"$ref" : "#", "type" : "object"}, +- "default" : {} +- }, +- +- "patternProperties" : { +- "type" : "object", +- "additionalProperties" : {"$ref" : "#"}, +- "default" : {} +- }, +- +- "additionalProperties" : { +- "type" : [{"$ref" : "#"}, "boolean"], +- "default" : {} +- }, +- +- "items" : { +- "type" : [{"$ref" : "#"}, "array"], +- "items" : {"$ref" : "#"}, +- "default" : {} +- }, +- +- "additionalItems" : { +- "type" : [{"$ref" : "#"}, "boolean"], +- "default" : {} +- }, +- +- "required" : { +- "type" : "boolean", +- "default" : false +- }, +- +- "dependencies" : { +- "type" : ["string", "array", "object"], +- "additionalProperties" : { +- "type" : ["string", "array", {"$ref" : "#"}], +- "items" : { +- "type" : "string" +- } +- }, +- "default" : {} +- }, +- +- "minimum" : { +- "type" : "number" +- }, +- +- "maximum" : { +- "type" : "number" +- }, +- +- "exclusiveMinimum" : { +- "type" : "boolean", +- "default" : false +- }, +- +- "exclusiveMaximum" : { +- "type" : "boolean", +- "default" : false +- }, +- +- "maxDecimal": { +- "minimum": 0, +- "type": "number" +- }, +- +- "minItems" : { +- "type" : "integer", +- "minimum" : 0, +- "default" : 0 +- }, +- +- "maxItems" : { +- "type" : "integer", +- "minimum" : 0 +- }, +- +- "uniqueItems" : { +- "type" : "boolean", +- "default" : false +- }, +- +- "pattern" : { +- "type" : "string", +- "format" : "regex" +- }, +- +- "minLength" : { +- "type" : "integer", +- "minimum" : 0, +- "default" : 0 +- }, +- +- "maxLength" : { +- "type" : "integer" +- }, +- +- "enum" : { +- "type" : "array" +- }, +- +- "default" : { +- "type" : "any" +- }, +- +- "title" : { +- "type" : "string" +- }, +- +- "description" : { +- "type" : "string" +- }, +- +- "format" : { +- "type" : "string" +- }, +- +- "divisibleBy" : { +- "type" : "number", +- "minimum" : 0, +- "exclusiveMinimum" : true, +- "default" : 1 +- }, +- +- "disallow" : { +- "type" : ["string", "array"], +- "items" : { +- "type" : ["string", {"$ref" : "#"}] +- }, +- "uniqueItems" : true +- }, +- +- "extends" : { +- "type" : [{"$ref" : "#"}, "array"], +- "items" : {"$ref" : "#"}, +- "default" : {} +- }, +- +- "id" : { +- "type" : "string", +- "format" : "uri" +- }, +- +- "$ref" : { +- "type" : "string", +- "format" : "uri" +- }, +- +- "$schema" : { +- "type" : "string", +- "format" : "uri" +- } +- }, +- +- "dependencies" : { +- "exclusiveMinimum" : "minimum", +- "exclusiveMaximum" : "maximum" +- }, +- +- "default" : {} +-} +diff --git a/src/poetry/core/_vendor/jsonschema/schemas/draft4.json b/src/poetry/core/_vendor/jsonschema/schemas/draft4.json +deleted file mode 100644 +index ba0c117..0000000 +--- a/src/poetry/core/_vendor/jsonschema/schemas/draft4.json ++++ /dev/null +@@ -1,149 +0,0 @@ +-{ +- "id": "http://json-schema.org/draft-04/schema#", +- "$schema": "http://json-schema.org/draft-04/schema#", +- "description": "Core schema meta-schema", +- "definitions": { +- "schemaArray": { +- "type": "array", +- "minItems": 1, +- "items": { "$ref": "#" } +- }, +- "positiveInteger": { +- "type": "integer", +- "minimum": 0 +- }, +- "positiveIntegerDefault0": { +- "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] +- }, +- "simpleTypes": { +- "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] +- }, +- "stringArray": { +- "type": "array", +- "items": { "type": "string" }, +- "minItems": 1, +- "uniqueItems": true +- } +- }, +- "type": "object", +- "properties": { +- "id": { +- "format": "uri", +- "type": "string" +- }, +- "$schema": { +- "type": "string", +- "format": "uri" +- }, +- "title": { +- "type": "string" +- }, +- "description": { +- "type": "string" +- }, +- "default": {}, +- "multipleOf": { +- "type": "number", +- "minimum": 0, +- "exclusiveMinimum": true +- }, +- "maximum": { +- "type": "number" +- }, +- "exclusiveMaximum": { +- "type": "boolean", +- "default": false +- }, +- "minimum": { +- "type": "number" +- }, +- "exclusiveMinimum": { +- "type": "boolean", +- "default": false +- }, +- "maxLength": { "$ref": "#/definitions/positiveInteger" }, +- "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, +- "pattern": { +- "type": "string", +- "format": "regex" +- }, +- "additionalItems": { +- "anyOf": [ +- { "type": "boolean" }, +- { "$ref": "#" } +- ], +- "default": {} +- }, +- "items": { +- "anyOf": [ +- { "$ref": "#" }, +- { "$ref": "#/definitions/schemaArray" } +- ], +- "default": {} +- }, +- "maxItems": { "$ref": "#/definitions/positiveInteger" }, +- "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, +- "uniqueItems": { +- "type": "boolean", +- "default": false +- }, +- "maxProperties": { "$ref": "#/definitions/positiveInteger" }, +- "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, +- "required": { "$ref": "#/definitions/stringArray" }, +- "additionalProperties": { +- "anyOf": [ +- { "type": "boolean" }, +- { "$ref": "#" } +- ], +- "default": {} +- }, +- "definitions": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "default": {} +- }, +- "properties": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "default": {} +- }, +- "patternProperties": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "default": {} +- }, +- "dependencies": { +- "type": "object", +- "additionalProperties": { +- "anyOf": [ +- { "$ref": "#" }, +- { "$ref": "#/definitions/stringArray" } +- ] +- } +- }, +- "enum": { +- "type": "array" +- }, +- "type": { +- "anyOf": [ +- { "$ref": "#/definitions/simpleTypes" }, +- { +- "type": "array", +- "items": { "$ref": "#/definitions/simpleTypes" }, +- "minItems": 1, +- "uniqueItems": true +- } +- ] +- }, +- "format": { "type": "string" }, +- "allOf": { "$ref": "#/definitions/schemaArray" }, +- "anyOf": { "$ref": "#/definitions/schemaArray" }, +- "oneOf": { "$ref": "#/definitions/schemaArray" }, +- "not": { "$ref": "#" } +- }, +- "dependencies": { +- "exclusiveMaximum": [ "maximum" ], +- "exclusiveMinimum": [ "minimum" ] +- }, +- "default": {} +-} +diff --git a/src/poetry/core/_vendor/jsonschema/schemas/draft6.json b/src/poetry/core/_vendor/jsonschema/schemas/draft6.json +deleted file mode 100644 +index a0d2bf7..0000000 +--- a/src/poetry/core/_vendor/jsonschema/schemas/draft6.json ++++ /dev/null +@@ -1,153 +0,0 @@ +-{ +- "$schema": "http://json-schema.org/draft-06/schema#", +- "$id": "http://json-schema.org/draft-06/schema#", +- "title": "Core schema meta-schema", +- "definitions": { +- "schemaArray": { +- "type": "array", +- "minItems": 1, +- "items": { "$ref": "#" } +- }, +- "nonNegativeInteger": { +- "type": "integer", +- "minimum": 0 +- }, +- "nonNegativeIntegerDefault0": { +- "allOf": [ +- { "$ref": "#/definitions/nonNegativeInteger" }, +- { "default": 0 } +- ] +- }, +- "simpleTypes": { +- "enum": [ +- "array", +- "boolean", +- "integer", +- "null", +- "number", +- "object", +- "string" +- ] +- }, +- "stringArray": { +- "type": "array", +- "items": { "type": "string" }, +- "uniqueItems": true, +- "default": [] +- } +- }, +- "type": ["object", "boolean"], +- "properties": { +- "$id": { +- "type": "string", +- "format": "uri-reference" +- }, +- "$schema": { +- "type": "string", +- "format": "uri" +- }, +- "$ref": { +- "type": "string", +- "format": "uri-reference" +- }, +- "title": { +- "type": "string" +- }, +- "description": { +- "type": "string" +- }, +- "default": {}, +- "examples": { +- "type": "array", +- "items": {} +- }, +- "multipleOf": { +- "type": "number", +- "exclusiveMinimum": 0 +- }, +- "maximum": { +- "type": "number" +- }, +- "exclusiveMaximum": { +- "type": "number" +- }, +- "minimum": { +- "type": "number" +- }, +- "exclusiveMinimum": { +- "type": "number" +- }, +- "maxLength": { "$ref": "#/definitions/nonNegativeInteger" }, +- "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, +- "pattern": { +- "type": "string", +- "format": "regex" +- }, +- "additionalItems": { "$ref": "#" }, +- "items": { +- "anyOf": [ +- { "$ref": "#" }, +- { "$ref": "#/definitions/schemaArray" } +- ], +- "default": {} +- }, +- "maxItems": { "$ref": "#/definitions/nonNegativeInteger" }, +- "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, +- "uniqueItems": { +- "type": "boolean", +- "default": false +- }, +- "contains": { "$ref": "#" }, +- "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" }, +- "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, +- "required": { "$ref": "#/definitions/stringArray" }, +- "additionalProperties": { "$ref": "#" }, +- "definitions": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "default": {} +- }, +- "properties": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "default": {} +- }, +- "patternProperties": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "propertyNames": { "format": "regex" }, +- "default": {} +- }, +- "dependencies": { +- "type": "object", +- "additionalProperties": { +- "anyOf": [ +- { "$ref": "#" }, +- { "$ref": "#/definitions/stringArray" } +- ] +- } +- }, +- "propertyNames": { "$ref": "#" }, +- "const": {}, +- "enum": { +- "type": "array" +- }, +- "type": { +- "anyOf": [ +- { "$ref": "#/definitions/simpleTypes" }, +- { +- "type": "array", +- "items": { "$ref": "#/definitions/simpleTypes" }, +- "minItems": 1, +- "uniqueItems": true +- } +- ] +- }, +- "format": { "type": "string" }, +- "allOf": { "$ref": "#/definitions/schemaArray" }, +- "anyOf": { "$ref": "#/definitions/schemaArray" }, +- "oneOf": { "$ref": "#/definitions/schemaArray" }, +- "not": { "$ref": "#" } +- }, +- "default": {} +-} +diff --git a/src/poetry/core/_vendor/jsonschema/schemas/draft7.json b/src/poetry/core/_vendor/jsonschema/schemas/draft7.json +deleted file mode 100644 +index 746cde9..0000000 +--- a/src/poetry/core/_vendor/jsonschema/schemas/draft7.json ++++ /dev/null +@@ -1,166 +0,0 @@ +-{ +- "$schema": "http://json-schema.org/draft-07/schema#", +- "$id": "http://json-schema.org/draft-07/schema#", +- "title": "Core schema meta-schema", +- "definitions": { +- "schemaArray": { +- "type": "array", +- "minItems": 1, +- "items": { "$ref": "#" } +- }, +- "nonNegativeInteger": { +- "type": "integer", +- "minimum": 0 +- }, +- "nonNegativeIntegerDefault0": { +- "allOf": [ +- { "$ref": "#/definitions/nonNegativeInteger" }, +- { "default": 0 } +- ] +- }, +- "simpleTypes": { +- "enum": [ +- "array", +- "boolean", +- "integer", +- "null", +- "number", +- "object", +- "string" +- ] +- }, +- "stringArray": { +- "type": "array", +- "items": { "type": "string" }, +- "uniqueItems": true, +- "default": [] +- } +- }, +- "type": ["object", "boolean"], +- "properties": { +- "$id": { +- "type": "string", +- "format": "uri-reference" +- }, +- "$schema": { +- "type": "string", +- "format": "uri" +- }, +- "$ref": { +- "type": "string", +- "format": "uri-reference" +- }, +- "$comment": { +- "type": "string" +- }, +- "title": { +- "type": "string" +- }, +- "description": { +- "type": "string" +- }, +- "default": true, +- "readOnly": { +- "type": "boolean", +- "default": false +- }, +- "examples": { +- "type": "array", +- "items": true +- }, +- "multipleOf": { +- "type": "number", +- "exclusiveMinimum": 0 +- }, +- "maximum": { +- "type": "number" +- }, +- "exclusiveMaximum": { +- "type": "number" +- }, +- "minimum": { +- "type": "number" +- }, +- "exclusiveMinimum": { +- "type": "number" +- }, +- "maxLength": { "$ref": "#/definitions/nonNegativeInteger" }, +- "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, +- "pattern": { +- "type": "string", +- "format": "regex" +- }, +- "additionalItems": { "$ref": "#" }, +- "items": { +- "anyOf": [ +- { "$ref": "#" }, +- { "$ref": "#/definitions/schemaArray" } +- ], +- "default": true +- }, +- "maxItems": { "$ref": "#/definitions/nonNegativeInteger" }, +- "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, +- "uniqueItems": { +- "type": "boolean", +- "default": false +- }, +- "contains": { "$ref": "#" }, +- "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" }, +- "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, +- "required": { "$ref": "#/definitions/stringArray" }, +- "additionalProperties": { "$ref": "#" }, +- "definitions": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "default": {} +- }, +- "properties": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "default": {} +- }, +- "patternProperties": { +- "type": "object", +- "additionalProperties": { "$ref": "#" }, +- "propertyNames": { "format": "regex" }, +- "default": {} +- }, +- "dependencies": { +- "type": "object", +- "additionalProperties": { +- "anyOf": [ +- { "$ref": "#" }, +- { "$ref": "#/definitions/stringArray" } +- ] +- } +- }, +- "propertyNames": { "$ref": "#" }, +- "const": true, +- "enum": { +- "type": "array", +- "items": true +- }, +- "type": { +- "anyOf": [ +- { "$ref": "#/definitions/simpleTypes" }, +- { +- "type": "array", +- "items": { "$ref": "#/definitions/simpleTypes" }, +- "minItems": 1, +- "uniqueItems": true +- } +- ] +- }, +- "format": { "type": "string" }, +- "contentMediaType": { "type": "string" }, +- "contentEncoding": { "type": "string" }, +- "if": {"$ref": "#"}, +- "then": {"$ref": "#"}, +- "else": {"$ref": "#"}, +- "allOf": { "$ref": "#/definitions/schemaArray" }, +- "anyOf": { "$ref": "#/definitions/schemaArray" }, +- "oneOf": { "$ref": "#/definitions/schemaArray" }, +- "not": { "$ref": "#" } +- }, +- "default": true +-} +diff --git a/src/poetry/core/_vendor/jsonschema/schemas/vocabularies.json b/src/poetry/core/_vendor/jsonschema/schemas/vocabularies.json +deleted file mode 100644 +index bca1705..0000000 +--- a/src/poetry/core/_vendor/jsonschema/schemas/vocabularies.json ++++ /dev/null +@@ -1 +0,0 @@ +-{"https://json-schema.org/draft/2020-12/meta/content": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/content", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/content": true}, "$dynamicAnchor": "meta", "title": "Content vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"contentEncoding": {"type": "string"}, "contentMediaType": {"type": "string"}, "contentSchema": {"$dynamicRef": "#meta"}}}, "https://json-schema.org/draft/2020-12/meta/unevaluated": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/unevaluated", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/unevaluated": true}, "$dynamicAnchor": "meta", "title": "Unevaluated applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"unevaluatedItems": {"$dynamicRef": "#meta"}, "unevaluatedProperties": {"$dynamicRef": "#meta"}}}, "https://json-schema.org/draft/2020-12/meta/format-annotation": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/format-annotation", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/format-annotation": true}, "$dynamicAnchor": "meta", "title": "Format vocabulary meta-schema for annotation results", "type": ["object", "boolean"], "properties": {"format": {"type": "string"}}}, "https://json-schema.org/draft/2020-12/meta/applicator": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/applicator", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/applicator": true}, "$dynamicAnchor": "meta", "title": "Applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"prefixItems": {"$ref": "#/$defs/schemaArray"}, "items": {"$dynamicRef": "#meta"}, "contains": {"$dynamicRef": "#meta"}, "additionalProperties": {"$dynamicRef": "#meta"}, "properties": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "default": {}}, "patternProperties": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "propertyNames": {"format": "regex"}, "default": {}}, "dependentSchemas": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "default": {}}, "propertyNames": {"$dynamicRef": "#meta"}, "if": {"$dynamicRef": "#meta"}, "then": {"$dynamicRef": "#meta"}, "else": {"$dynamicRef": "#meta"}, "allOf": {"$ref": "#/$defs/schemaArray"}, "anyOf": {"$ref": "#/$defs/schemaArray"}, "oneOf": {"$ref": "#/$defs/schemaArray"}, "not": {"$dynamicRef": "#meta"}}, "$defs": {"schemaArray": {"type": "array", "minItems": 1, "items": {"$dynamicRef": "#meta"}}}}, "https://json-schema.org/draft/2020-12/meta/meta-data": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/meta-data", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/meta-data": true}, "$dynamicAnchor": "meta", "title": "Meta-data vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"title": {"type": "string"}, "description": {"type": "string"}, "default": true, "deprecated": {"type": "boolean", "default": false}, "readOnly": {"type": "boolean", "default": false}, "writeOnly": {"type": "boolean", "default": false}, "examples": {"type": "array", "items": true}}}, "https://json-schema.org/draft/2020-12/meta/core": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/core", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/core": true}, "$dynamicAnchor": "meta", "title": "Core vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"$id": {"$ref": "#/$defs/uriReferenceString", "$comment": "Non-empty fragments not allowed.", "pattern": "^[^#]*#?$"}, "$schema": {"$ref": "#/$defs/uriString"}, "$ref": {"$ref": "#/$defs/uriReferenceString"}, "$anchor": {"$ref": "#/$defs/anchorString"}, "$dynamicRef": {"$ref": "#/$defs/uriReferenceString"}, "$dynamicAnchor": {"$ref": "#/$defs/anchorString"}, "$vocabulary": {"type": "object", "propertyNames": {"$ref": "#/$defs/uriString"}, "additionalProperties": {"type": "boolean"}}, "$comment": {"type": "string"}, "$defs": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}}}, "$defs": {"anchorString": {"type": "string", "pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"}, "uriString": {"type": "string", "format": "uri"}, "uriReferenceString": {"type": "string", "format": "uri-reference"}}}, "https://json-schema.org/draft/2020-12/meta/validation": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/validation", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/validation": true}, "$dynamicAnchor": "meta", "title": "Validation vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"type": {"anyOf": [{"$ref": "#/$defs/simpleTypes"}, {"type": "array", "items": {"$ref": "#/$defs/simpleTypes"}, "minItems": 1, "uniqueItems": true}]}, "const": true, "enum": {"type": "array", "items": true}, "multipleOf": {"type": "number", "exclusiveMinimum": 0}, "maximum": {"type": "number"}, "exclusiveMaximum": {"type": "number"}, "minimum": {"type": "number"}, "exclusiveMinimum": {"type": "number"}, "maxLength": {"$ref": "#/$defs/nonNegativeInteger"}, "minLength": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "pattern": {"type": "string", "format": "regex"}, "maxItems": {"$ref": "#/$defs/nonNegativeInteger"}, "minItems": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "uniqueItems": {"type": "boolean", "default": false}, "maxContains": {"$ref": "#/$defs/nonNegativeInteger"}, "minContains": {"$ref": "#/$defs/nonNegativeInteger", "default": 1}, "maxProperties": {"$ref": "#/$defs/nonNegativeInteger"}, "minProperties": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "required": {"$ref": "#/$defs/stringArray"}, "dependentRequired": {"type": "object", "additionalProperties": {"$ref": "#/$defs/stringArray"}}}, "$defs": {"nonNegativeInteger": {"type": "integer", "minimum": 0}, "nonNegativeIntegerDefault0": {"$ref": "#/$defs/nonNegativeInteger", "default": 0}, "simpleTypes": {"enum": ["array", "boolean", "integer", "null", "number", "object", "string"]}, "stringArray": {"type": "array", "items": {"type": "string"}, "uniqueItems": true, "default": []}}}, "https://json-schema.org/draft/2019-09/meta/content": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/content", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/content": true}, "$recursiveAnchor": true, "title": "Content vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"contentMediaType": {"type": "string"}, "contentEncoding": {"type": "string"}, "contentSchema": {"$recursiveRef": "#"}}}, "https://json-schema.org/draft/2019-09/meta/applicator": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/applicator", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/applicator": true}, "$recursiveAnchor": true, "title": "Applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"additionalItems": {"$recursiveRef": "#"}, "unevaluatedItems": {"$recursiveRef": "#"}, "items": {"anyOf": [{"$recursiveRef": "#"}, {"$ref": "#/$defs/schemaArray"}]}, "contains": {"$recursiveRef": "#"}, "additionalProperties": {"$recursiveRef": "#"}, "unevaluatedProperties": {"$recursiveRef": "#"}, "properties": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "default": {}}, "patternProperties": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "propertyNames": {"format": "regex"}, "default": {}}, "dependentSchemas": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}}, "propertyNames": {"$recursiveRef": "#"}, "if": {"$recursiveRef": "#"}, "then": {"$recursiveRef": "#"}, "else": {"$recursiveRef": "#"}, "allOf": {"$ref": "#/$defs/schemaArray"}, "anyOf": {"$ref": "#/$defs/schemaArray"}, "oneOf": {"$ref": "#/$defs/schemaArray"}, "not": {"$recursiveRef": "#"}}, "$defs": {"schemaArray": {"type": "array", "minItems": 1, "items": {"$recursiveRef": "#"}}}}, "https://json-schema.org/draft/2019-09/meta/meta-data": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/meta-data", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/meta-data": true}, "$recursiveAnchor": true, "title": "Meta-data vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"title": {"type": "string"}, "description": {"type": "string"}, "default": true, "deprecated": {"type": "boolean", "default": false}, "readOnly": {"type": "boolean", "default": false}, "writeOnly": {"type": "boolean", "default": false}, "examples": {"type": "array", "items": true}}}, "https://json-schema.org/draft/2019-09/meta/core": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/core", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/core": true}, "$recursiveAnchor": true, "title": "Core vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"$id": {"type": "string", "format": "uri-reference", "$comment": "Non-empty fragments not allowed.", "pattern": "^[^#]*#?$"}, "$schema": {"type": "string", "format": "uri"}, "$anchor": {"type": "string", "pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"}, "$ref": {"type": "string", "format": "uri-reference"}, "$recursiveRef": {"type": "string", "format": "uri-reference"}, "$recursiveAnchor": {"type": "boolean", "default": false}, "$vocabulary": {"type": "object", "propertyNames": {"type": "string", "format": "uri"}, "additionalProperties": {"type": "boolean"}}, "$comment": {"type": "string"}, "$defs": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "default": {}}}}, "https://json-schema.org/draft/2019-09/meta/validation": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/validation", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/validation": true}, "$recursiveAnchor": true, "title": "Validation vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"multipleOf": {"type": "number", "exclusiveMinimum": 0}, "maximum": {"type": "number"}, "exclusiveMaximum": {"type": "number"}, "minimum": {"type": "number"}, "exclusiveMinimum": {"type": "number"}, "maxLength": {"$ref": "#/$defs/nonNegativeInteger"}, "minLength": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "pattern": {"type": "string", "format": "regex"}, "maxItems": {"$ref": "#/$defs/nonNegativeInteger"}, "minItems": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "uniqueItems": {"type": "boolean", "default": false}, "maxContains": {"$ref": "#/$defs/nonNegativeInteger"}, "minContains": {"$ref": "#/$defs/nonNegativeInteger", "default": 1}, "maxProperties": {"$ref": "#/$defs/nonNegativeInteger"}, "minProperties": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "required": {"$ref": "#/$defs/stringArray"}, "dependentRequired": {"type": "object", "additionalProperties": {"$ref": "#/$defs/stringArray"}}, "const": true, "enum": {"type": "array", "items": true}, "type": {"anyOf": [{"$ref": "#/$defs/simpleTypes"}, {"type": "array", "items": {"$ref": "#/$defs/simpleTypes"}, "minItems": 1, "uniqueItems": true}]}}, "$defs": {"nonNegativeInteger": {"type": "integer", "minimum": 0}, "nonNegativeIntegerDefault0": {"$ref": "#/$defs/nonNegativeInteger", "default": 0}, "simpleTypes": {"enum": ["array", "boolean", "integer", "null", "number", "object", "string"]}, "stringArray": {"type": "array", "items": {"type": "string"}, "uniqueItems": true, "default": []}}}, "https://json-schema.org/draft/2019-09/meta/hyper-schema": {"$schema": "https://json-schema.org/draft/2019-09/hyper-schema", "$id": "https://json-schema.org/draft/2019-09/meta/hyper-schema", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/hyper-schema": true}, "$recursiveAnchor": true, "title": "JSON Hyper-Schema Vocabulary Schema", "type": ["object", "boolean"], "properties": {"base": {"type": "string", "format": "uri-template"}, "links": {"type": "array", "items": {"$ref": "https://json-schema.org/draft/2019-09/links"}}}, "links": [{"rel": "self", "href": "{+%24id}"}]}, "https://json-schema.org/draft/2019-09/meta/format": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/format", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/format": true}, "$recursiveAnchor": true, "title": "Format vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"format": {"type": "string"}}}} +diff --git a/src/poetry/core/_vendor/jsonschema/validators.py b/src/poetry/core/_vendor/jsonschema/validators.py +deleted file mode 100644 +index 4d6ced0..0000000 +--- a/src/poetry/core/_vendor/jsonschema/validators.py ++++ /dev/null +@@ -1,1112 +0,0 @@ +-""" +-Creation and extension of validators, with implementations for existing drafts. +-""" +-from __future__ import annotations +- +-from collections import deque +-from collections.abc import Sequence +-from functools import lru_cache +-from operator import methodcaller +-from urllib.parse import unquote, urldefrag, urljoin, urlsplit +-from urllib.request import urlopen +-from warnings import warn +-import contextlib +-import json +-import reprlib +-import typing +-import warnings +- +-import attr +- +-from jsonschema import ( +- _format, +- _legacy_validators, +- _types, +- _utils, +- _validators, +- exceptions, +-) +- +-_VALIDATORS: dict[str, typing.Any] = {} +-_META_SCHEMAS = _utils.URIDict() +-_VOCABULARIES: list[tuple[str, typing.Any]] = [] +- +- +-def __getattr__(name): +- if name == "ErrorTree": +- warnings.warn( +- "Importing ErrorTree from jsonschema.validators is deprecated. " +- "Instead import it from jsonschema.exceptions.", +- DeprecationWarning, +- stacklevel=2, +- ) +- from jsonschema.exceptions import ErrorTree +- return ErrorTree +- elif name == "validators": +- warnings.warn( +- "Accessing jsonschema.validators.validators is deprecated. " +- "Use jsonschema.validators.validator_for with a given schema.", +- DeprecationWarning, +- stacklevel=2, +- ) +- return _VALIDATORS +- elif name == "meta_schemas": +- warnings.warn( +- "Accessing jsonschema.validators.meta_schemas is deprecated. " +- "Use jsonschema.validators.validator_for with a given schema.", +- DeprecationWarning, +- stacklevel=2, +- ) +- return _META_SCHEMAS +- raise AttributeError(f"module {__name__} has no attribute {name}") +- +- +-def validates(version): +- """ +- Register the decorated validator for a ``version`` of the specification. +- +- Registered validators and their meta schemas will be considered when +- parsing :kw:`$schema` keywords' URIs. +- +- Arguments: +- +- version (str): +- +- An identifier to use as the version's name +- +- Returns: +- +- collections.abc.Callable: +- +- a class decorator to decorate the validator with the version +- """ +- +- def _validates(cls): +- _VALIDATORS[version] = cls +- meta_schema_id = cls.ID_OF(cls.META_SCHEMA) +- _META_SCHEMAS[meta_schema_id] = cls +- return cls +- return _validates +- +- +-def _id_of(schema): +- """ +- Return the ID of a schema for recent JSON Schema drafts. +- """ +- if schema is True or schema is False: +- return "" +- return schema.get("$id", "") +- +- +-def _store_schema_list(): +- if not _VOCABULARIES: +- _VOCABULARIES.extend(_utils.load_schema("vocabularies").items()) +- return [ +- (id, validator.META_SCHEMA) for id, validator in _META_SCHEMAS.items() +- ] + _VOCABULARIES +- +- +-def create( +- meta_schema, +- validators=(), +- version=None, +- type_checker=_types.draft202012_type_checker, +- format_checker=_format.draft202012_format_checker, +- id_of=_id_of, +- applicable_validators=methodcaller("items"), +-): +- """ +- Create a new validator class. +- +- Arguments: +- +- meta_schema (collections.abc.Mapping): +- +- the meta schema for the new validator class +- +- validators (collections.abc.Mapping): +- +- a mapping from names to callables, where each callable will +- validate the schema property with the given name. +- +- Each callable should take 4 arguments: +- +- 1. a validator instance, +- 2. the value of the property being validated within the +- instance +- 3. the instance +- 4. the schema +- +- version (str): +- +- an identifier for the version that this validator class will +- validate. If provided, the returned validator class will +- have its ``__name__`` set to include the version, and also +- will have `jsonschema.validators.validates` automatically +- called for the given version. +- +- type_checker (jsonschema.TypeChecker): +- +- a type checker, used when applying the :kw:`type` keyword. +- +- If unprovided, a `jsonschema.TypeChecker` will be created +- with a set of default types typical of JSON Schema drafts. +- +- format_checker (jsonschema.FormatChecker): +- +- a format checker, used when applying the :kw:`format` keyword. +- +- If unprovided, a `jsonschema.FormatChecker` will be created +- with a set of default formats typical of JSON Schema drafts. +- +- id_of (collections.abc.Callable): +- +- A function that given a schema, returns its ID. +- +- applicable_validators (collections.abc.Callable): +- +- A function that given a schema, returns the list of +- applicable validators (validation keywords and callables) +- which will be used to validate the instance. +- +- Returns: +- +- a new `jsonschema.protocols.Validator` class +- """ +- # preemptively don't shadow the `Validator.format_checker` local +- format_checker_arg = format_checker +- +- @attr.s +- class Validator: +- +- VALIDATORS = dict(validators) +- META_SCHEMA = dict(meta_schema) +- TYPE_CHECKER = type_checker +- FORMAT_CHECKER = format_checker_arg +- ID_OF = staticmethod(id_of) +- +- schema = attr.ib(repr=reprlib.repr) +- resolver = attr.ib(default=None, repr=False) +- format_checker = attr.ib(default=None) +- +- def __attrs_post_init__(self): +- if self.resolver is None: +- self.resolver = RefResolver.from_schema( +- self.schema, +- id_of=id_of, +- ) +- +- @classmethod +- def check_schema(cls, schema): +- for error in cls(cls.META_SCHEMA).iter_errors(schema): +- raise exceptions.SchemaError.create_from(error) +- +- def evolve(self, **changes): +- schema = changes.setdefault("schema", self.schema) +- NewValidator = validator_for(schema, default=Validator) +- +- # Essentially reproduces attr.evolve, but may involve instantiating +- # a different class than this one. +- for field in attr.fields(Validator): +- if not field.init: +- continue +- attr_name = field.name # To deal with private attributes. +- init_name = attr_name if attr_name[0] != "_" else attr_name[1:] +- if init_name not in changes: +- changes[init_name] = getattr(self, attr_name) +- +- return NewValidator(**changes) +- +- def iter_errors(self, instance, _schema=None): +- if _schema is not None: +- warnings.warn( +- ( +- "Passing a schema to Validator.iter_errors " +- "is deprecated and will be removed in a future " +- "release. Call validator.evolve(schema=new_schema)." +- "iter_errors(...) instead." +- ), +- DeprecationWarning, +- stacklevel=2, +- ) +- else: +- _schema = self.schema +- +- if _schema is True: +- return +- elif _schema is False: +- yield exceptions.ValidationError( +- f"False schema does not allow {instance!r}", +- validator=None, +- validator_value=None, +- instance=instance, +- schema=_schema, +- ) +- return +- +- scope = id_of(_schema) +- if scope: +- self.resolver.push_scope(scope) +- try: +- for k, v in applicable_validators(_schema): +- validator = self.VALIDATORS.get(k) +- if validator is None: +- continue +- +- errors = validator(self, v, instance, _schema) or () +- for error in errors: +- # set details if not already set by the called fn +- error._set( +- validator=k, +- validator_value=v, +- instance=instance, +- schema=_schema, +- type_checker=self.TYPE_CHECKER, +- ) +- if k not in {"if", "$ref"}: +- error.schema_path.appendleft(k) +- yield error +- finally: +- if scope: +- self.resolver.pop_scope() +- +- def descend(self, instance, schema, path=None, schema_path=None): +- for error in self.evolve(schema=schema).iter_errors(instance): +- if path is not None: +- error.path.appendleft(path) +- if schema_path is not None: +- error.schema_path.appendleft(schema_path) +- yield error +- +- def validate(self, *args, **kwargs): +- for error in self.iter_errors(*args, **kwargs): +- raise error +- +- def is_type(self, instance, type): +- try: +- return self.TYPE_CHECKER.is_type(instance, type) +- except exceptions.UndefinedTypeCheck: +- raise exceptions.UnknownType(type, instance, self.schema) +- +- def is_valid(self, instance, _schema=None): +- if _schema is not None: +- warnings.warn( +- ( +- "Passing a schema to Validator.is_valid is deprecated " +- "and will be removed in a future release. Call " +- "validator.evolve(schema=new_schema).is_valid(...) " +- "instead." +- ), +- DeprecationWarning, +- stacklevel=2, +- ) +- self = self.evolve(schema=_schema) +- +- error = next(self.iter_errors(instance), None) +- return error is None +- +- if version is not None: +- safe = version.title().replace(" ", "").replace("-", "") +- Validator.__name__ = Validator.__qualname__ = f"{safe}Validator" +- Validator = validates(version)(Validator) +- +- return Validator +- +- +-def extend( +- validator, +- validators=(), +- version=None, +- type_checker=None, +- format_checker=None, +-): +- """ +- Create a new validator class by extending an existing one. +- +- Arguments: +- +- validator (jsonschema.protocols.Validator): +- +- an existing validator class +- +- validators (collections.abc.Mapping): +- +- a mapping of new validator callables to extend with, whose +- structure is as in `create`. +- +- .. note:: +- +- Any validator callables with the same name as an +- existing one will (silently) replace the old validator +- callable entirely, effectively overriding any validation +- done in the "parent" validator class. +- +- If you wish to instead extend the behavior of a parent's +- validator callable, delegate and call it directly in +- the new validator function by retrieving it using +- ``OldValidator.VALIDATORS["validation_keyword_name"]``. +- +- version (str): +- +- a version for the new validator class +- +- type_checker (jsonschema.TypeChecker): +- +- a type checker, used when applying the :kw:`type` keyword. +- +- If unprovided, the type checker of the extended +- `jsonschema.protocols.Validator` will be carried along. +- +- format_checker (jsonschema.FormatChecker): +- +- a format checker, used when applying the :kw:`format` keyword. +- +- If unprovided, the format checker of the extended +- `jsonschema.protocols.Validator` will be carried along. +- +- Returns: +- +- a new `jsonschema.protocols.Validator` class extending the one +- provided +- +- .. note:: Meta Schemas +- +- The new validator class will have its parent's meta schema. +- +- If you wish to change or extend the meta schema in the new +- validator class, modify ``META_SCHEMA`` directly on the returned +- class. Note that no implicit copying is done, so a copy should +- likely be made before modifying it, in order to not affect the +- old validator. +- """ +- +- all_validators = dict(validator.VALIDATORS) +- all_validators.update(validators) +- +- if type_checker is None: +- type_checker = validator.TYPE_CHECKER +- if format_checker is None: +- format_checker = validator.FORMAT_CHECKER +- return create( +- meta_schema=validator.META_SCHEMA, +- validators=all_validators, +- version=version, +- type_checker=type_checker, +- format_checker=format_checker, +- id_of=validator.ID_OF, +- ) +- +- +-Draft3Validator = create( +- meta_schema=_utils.load_schema("draft3"), +- validators={ +- "$ref": _validators.ref, +- "additionalItems": _validators.additionalItems, +- "additionalProperties": _validators.additionalProperties, +- "dependencies": _legacy_validators.dependencies_draft3, +- "disallow": _legacy_validators.disallow_draft3, +- "divisibleBy": _validators.multipleOf, +- "enum": _validators.enum, +- "extends": _legacy_validators.extends_draft3, +- "format": _validators.format, +- "items": _legacy_validators.items_draft3_draft4, +- "maxItems": _validators.maxItems, +- "maxLength": _validators.maxLength, +- "maximum": _legacy_validators.maximum_draft3_draft4, +- "minItems": _validators.minItems, +- "minLength": _validators.minLength, +- "minimum": _legacy_validators.minimum_draft3_draft4, +- "pattern": _validators.pattern, +- "patternProperties": _validators.patternProperties, +- "properties": _legacy_validators.properties_draft3, +- "type": _legacy_validators.type_draft3, +- "uniqueItems": _validators.uniqueItems, +- }, +- type_checker=_types.draft3_type_checker, +- format_checker=_format.draft3_format_checker, +- version="draft3", +- id_of=lambda schema: schema.get("id", ""), +- applicable_validators=_legacy_validators.ignore_ref_siblings, +-) +- +-Draft4Validator = create( +- meta_schema=_utils.load_schema("draft4"), +- validators={ +- "$ref": _validators.ref, +- "additionalItems": _validators.additionalItems, +- "additionalProperties": _validators.additionalProperties, +- "allOf": _validators.allOf, +- "anyOf": _validators.anyOf, +- "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, +- "enum": _validators.enum, +- "format": _validators.format, +- "items": _legacy_validators.items_draft3_draft4, +- "maxItems": _validators.maxItems, +- "maxLength": _validators.maxLength, +- "maxProperties": _validators.maxProperties, +- "maximum": _legacy_validators.maximum_draft3_draft4, +- "minItems": _validators.minItems, +- "minLength": _validators.minLength, +- "minProperties": _validators.minProperties, +- "minimum": _legacy_validators.minimum_draft3_draft4, +- "multipleOf": _validators.multipleOf, +- "not": _validators.not_, +- "oneOf": _validators.oneOf, +- "pattern": _validators.pattern, +- "patternProperties": _validators.patternProperties, +- "properties": _validators.properties, +- "required": _validators.required, +- "type": _validators.type, +- "uniqueItems": _validators.uniqueItems, +- }, +- type_checker=_types.draft4_type_checker, +- format_checker=_format.draft4_format_checker, +- version="draft4", +- id_of=lambda schema: schema.get("id", ""), +- applicable_validators=_legacy_validators.ignore_ref_siblings, +-) +- +-Draft6Validator = create( +- meta_schema=_utils.load_schema("draft6"), +- validators={ +- "$ref": _validators.ref, +- "additionalItems": _validators.additionalItems, +- "additionalProperties": _validators.additionalProperties, +- "allOf": _validators.allOf, +- "anyOf": _validators.anyOf, +- "const": _validators.const, +- "contains": _legacy_validators.contains_draft6_draft7, +- "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, +- "enum": _validators.enum, +- "exclusiveMaximum": _validators.exclusiveMaximum, +- "exclusiveMinimum": _validators.exclusiveMinimum, +- "format": _validators.format, +- "items": _legacy_validators.items_draft6_draft7_draft201909, +- "maxItems": _validators.maxItems, +- "maxLength": _validators.maxLength, +- "maxProperties": _validators.maxProperties, +- "maximum": _validators.maximum, +- "minItems": _validators.minItems, +- "minLength": _validators.minLength, +- "minProperties": _validators.minProperties, +- "minimum": _validators.minimum, +- "multipleOf": _validators.multipleOf, +- "not": _validators.not_, +- "oneOf": _validators.oneOf, +- "pattern": _validators.pattern, +- "patternProperties": _validators.patternProperties, +- "properties": _validators.properties, +- "propertyNames": _validators.propertyNames, +- "required": _validators.required, +- "type": _validators.type, +- "uniqueItems": _validators.uniqueItems, +- }, +- type_checker=_types.draft6_type_checker, +- format_checker=_format.draft6_format_checker, +- version="draft6", +- applicable_validators=_legacy_validators.ignore_ref_siblings, +-) +- +-Draft7Validator = create( +- meta_schema=_utils.load_schema("draft7"), +- validators={ +- "$ref": _validators.ref, +- "additionalItems": _validators.additionalItems, +- "additionalProperties": _validators.additionalProperties, +- "allOf": _validators.allOf, +- "anyOf": _validators.anyOf, +- "const": _validators.const, +- "contains": _legacy_validators.contains_draft6_draft7, +- "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, +- "enum": _validators.enum, +- "exclusiveMaximum": _validators.exclusiveMaximum, +- "exclusiveMinimum": _validators.exclusiveMinimum, +- "format": _validators.format, +- "if": _validators.if_, +- "items": _legacy_validators.items_draft6_draft7_draft201909, +- "maxItems": _validators.maxItems, +- "maxLength": _validators.maxLength, +- "maxProperties": _validators.maxProperties, +- "maximum": _validators.maximum, +- "minItems": _validators.minItems, +- "minLength": _validators.minLength, +- "minProperties": _validators.minProperties, +- "minimum": _validators.minimum, +- "multipleOf": _validators.multipleOf, +- "not": _validators.not_, +- "oneOf": _validators.oneOf, +- "pattern": _validators.pattern, +- "patternProperties": _validators.patternProperties, +- "properties": _validators.properties, +- "propertyNames": _validators.propertyNames, +- "required": _validators.required, +- "type": _validators.type, +- "uniqueItems": _validators.uniqueItems, +- }, +- type_checker=_types.draft7_type_checker, +- format_checker=_format.draft7_format_checker, +- version="draft7", +- applicable_validators=_legacy_validators.ignore_ref_siblings, +-) +- +-Draft201909Validator = create( +- meta_schema=_utils.load_schema("draft2019-09"), +- validators={ +- "$recursiveRef": _legacy_validators.recursiveRef, +- "$ref": _validators.ref, +- "additionalItems": _validators.additionalItems, +- "additionalProperties": _validators.additionalProperties, +- "allOf": _validators.allOf, +- "anyOf": _validators.anyOf, +- "const": _validators.const, +- "contains": _validators.contains, +- "dependentRequired": _validators.dependentRequired, +- "dependentSchemas": _validators.dependentSchemas, +- "enum": _validators.enum, +- "exclusiveMaximum": _validators.exclusiveMaximum, +- "exclusiveMinimum": _validators.exclusiveMinimum, +- "format": _validators.format, +- "if": _validators.if_, +- "items": _legacy_validators.items_draft6_draft7_draft201909, +- "maxItems": _validators.maxItems, +- "maxLength": _validators.maxLength, +- "maxProperties": _validators.maxProperties, +- "maximum": _validators.maximum, +- "minItems": _validators.minItems, +- "minLength": _validators.minLength, +- "minProperties": _validators.minProperties, +- "minimum": _validators.minimum, +- "multipleOf": _validators.multipleOf, +- "not": _validators.not_, +- "oneOf": _validators.oneOf, +- "pattern": _validators.pattern, +- "patternProperties": _validators.patternProperties, +- "properties": _validators.properties, +- "propertyNames": _validators.propertyNames, +- "required": _validators.required, +- "type": _validators.type, +- "unevaluatedItems": _validators.unevaluatedItems, +- "unevaluatedProperties": _validators.unevaluatedProperties, +- "uniqueItems": _validators.uniqueItems, +- }, +- type_checker=_types.draft201909_type_checker, +- format_checker=_format.draft201909_format_checker, +- version="draft2019-09", +-) +- +-Draft202012Validator = create( +- meta_schema=_utils.load_schema("draft2020-12"), +- validators={ +- "$dynamicRef": _validators.dynamicRef, +- "$ref": _validators.ref, +- "additionalItems": _validators.additionalItems, +- "additionalProperties": _validators.additionalProperties, +- "allOf": _validators.allOf, +- "anyOf": _validators.anyOf, +- "const": _validators.const, +- "contains": _validators.contains, +- "dependentRequired": _validators.dependentRequired, +- "dependentSchemas": _validators.dependentSchemas, +- "enum": _validators.enum, +- "exclusiveMaximum": _validators.exclusiveMaximum, +- "exclusiveMinimum": _validators.exclusiveMinimum, +- "format": _validators.format, +- "if": _validators.if_, +- "items": _validators.items, +- "maxItems": _validators.maxItems, +- "maxLength": _validators.maxLength, +- "maxProperties": _validators.maxProperties, +- "maximum": _validators.maximum, +- "minItems": _validators.minItems, +- "minLength": _validators.minLength, +- "minProperties": _validators.minProperties, +- "minimum": _validators.minimum, +- "multipleOf": _validators.multipleOf, +- "not": _validators.not_, +- "oneOf": _validators.oneOf, +- "pattern": _validators.pattern, +- "patternProperties": _validators.patternProperties, +- "prefixItems": _validators.prefixItems, +- "properties": _validators.properties, +- "propertyNames": _validators.propertyNames, +- "required": _validators.required, +- "type": _validators.type, +- "unevaluatedItems": _validators.unevaluatedItems, +- "unevaluatedProperties": _validators.unevaluatedProperties, +- "uniqueItems": _validators.uniqueItems, +- }, +- type_checker=_types.draft202012_type_checker, +- format_checker=_format.draft202012_format_checker, +- version="draft2020-12", +-) +- +-_LATEST_VERSION = Draft202012Validator +- +- +-class RefResolver(object): +- """ +- Resolve JSON References. +- +- Arguments: +- +- base_uri (str): +- +- The URI of the referring document +- +- referrer: +- +- The actual referring document +- +- store (dict): +- +- A mapping from URIs to documents to cache +- +- cache_remote (bool): +- +- Whether remote refs should be cached after first resolution +- +- handlers (dict): +- +- A mapping from URI schemes to functions that should be used +- to retrieve them +- +- urljoin_cache (:func:`functools.lru_cache`): +- +- A cache that will be used for caching the results of joining +- the resolution scope to subscopes. +- +- remote_cache (:func:`functools.lru_cache`): +- +- A cache that will be used for caching the results of +- resolved remote URLs. +- +- Attributes: +- +- cache_remote (bool): +- +- Whether remote refs should be cached after first resolution +- """ +- +- def __init__( +- self, +- base_uri, +- referrer, +- store=(), +- cache_remote=True, +- handlers=(), +- urljoin_cache=None, +- remote_cache=None, +- ): +- if urljoin_cache is None: +- urljoin_cache = lru_cache(1024)(urljoin) +- if remote_cache is None: +- remote_cache = lru_cache(1024)(self.resolve_from_url) +- +- self.referrer = referrer +- self.cache_remote = cache_remote +- self.handlers = dict(handlers) +- +- self._scopes_stack = [base_uri] +- self.store = _utils.URIDict(_store_schema_list()) +- self.store.update(store) +- self.store[base_uri] = referrer +- +- self._urljoin_cache = urljoin_cache +- self._remote_cache = remote_cache +- +- @classmethod +- def from_schema(cls, schema, id_of=_id_of, *args, **kwargs): +- """ +- Construct a resolver from a JSON schema object. +- +- Arguments: +- +- schema: +- +- the referring schema +- +- Returns: +- +- `RefResolver` +- """ +- +- return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs) +- +- def push_scope(self, scope): +- """ +- Enter a given sub-scope. +- +- Treats further dereferences as being performed underneath the +- given scope. +- """ +- self._scopes_stack.append( +- self._urljoin_cache(self.resolution_scope, scope), +- ) +- +- def pop_scope(self): +- """ +- Exit the most recent entered scope. +- +- Treats further dereferences as being performed underneath the +- original scope. +- +- Don't call this method more times than `push_scope` has been +- called. +- """ +- try: +- self._scopes_stack.pop() +- except IndexError: +- raise exceptions.RefResolutionError( +- "Failed to pop the scope from an empty stack. " +- "`pop_scope()` should only be called once for every " +- "`push_scope()`", +- ) +- +- @property +- def resolution_scope(self): +- """ +- Retrieve the current resolution scope. +- """ +- return self._scopes_stack[-1] +- +- @property +- def base_uri(self): +- """ +- Retrieve the current base URI, not including any fragment. +- """ +- uri, _ = urldefrag(self.resolution_scope) +- return uri +- +- @contextlib.contextmanager +- def in_scope(self, scope): +- """ +- Temporarily enter the given scope for the duration of the context. +- """ +- warnings.warn( +- "jsonschema.RefResolver.in_scope is deprecated and will be " +- "removed in a future release.", +- DeprecationWarning, +- stacklevel=3, +- ) +- self.push_scope(scope) +- try: +- yield +- finally: +- self.pop_scope() +- +- @contextlib.contextmanager +- def resolving(self, ref): +- """ +- Resolve the given ``ref`` and enter its resolution scope. +- +- Exits the scope on exit of this context manager. +- +- Arguments: +- +- ref (str): +- +- The reference to resolve +- """ +- +- url, resolved = self.resolve(ref) +- self.push_scope(url) +- try: +- yield resolved +- finally: +- self.pop_scope() +- +- def _find_in_referrer(self, key): +- return self._get_subschemas_cache()[key] +- +- @lru_cache() # noqa: B019 +- def _get_subschemas_cache(self): +- cache = {key: [] for key in _SUBSCHEMAS_KEYWORDS} +- for keyword, subschema in _search_schema( +- self.referrer, _match_subschema_keywords, +- ): +- cache[keyword].append(subschema) +- return cache +- +- @lru_cache() # noqa: B019 +- def _find_in_subschemas(self, url): +- subschemas = self._get_subschemas_cache()["$id"] +- if not subschemas: +- return None +- uri, fragment = urldefrag(url) +- for subschema in subschemas: +- target_uri = self._urljoin_cache( +- self.resolution_scope, subschema["$id"], +- ) +- if target_uri.rstrip("/") == uri.rstrip("/"): +- if fragment: +- subschema = self.resolve_fragment(subschema, fragment) +- return url, subschema +- return None +- +- def resolve(self, ref): +- """ +- Resolve the given reference. +- """ +- url = self._urljoin_cache(self.resolution_scope, ref).rstrip("/") +- +- match = self._find_in_subschemas(url) +- if match is not None: +- return match +- +- return url, self._remote_cache(url) +- +- def resolve_from_url(self, url): +- """ +- Resolve the given URL. +- """ +- url, fragment = urldefrag(url) +- if url: +- try: +- document = self.store[url] +- except KeyError: +- try: +- document = self.resolve_remote(url) +- except Exception as exc: +- raise exceptions.RefResolutionError(exc) +- else: +- document = self.referrer +- +- return self.resolve_fragment(document, fragment) +- +- def resolve_fragment(self, document, fragment): +- """ +- Resolve a ``fragment`` within the referenced ``document``. +- +- Arguments: +- +- document: +- +- The referent document +- +- fragment (str): +- +- a URI fragment to resolve within it +- """ +- +- fragment = fragment.lstrip("/") +- +- if not fragment: +- return document +- +- if document is self.referrer: +- find = self._find_in_referrer +- else: +- +- def find(key): +- yield from _search_schema(document, _match_keyword(key)) +- +- for keyword in ["$anchor", "$dynamicAnchor"]: +- for subschema in find(keyword): +- if fragment == subschema[keyword]: +- return subschema +- for keyword in ["id", "$id"]: +- for subschema in find(keyword): +- if "#" + fragment == subschema[keyword]: +- return subschema +- +- # Resolve via path +- parts = unquote(fragment).split("/") if fragment else [] +- for part in parts: +- part = part.replace("~1", "/").replace("~0", "~") +- +- if isinstance(document, Sequence): +- # Array indexes should be turned into integers +- try: +- part = int(part) +- except ValueError: +- pass +- try: +- document = document[part] +- except (TypeError, LookupError): +- raise exceptions.RefResolutionError( +- f"Unresolvable JSON pointer: {fragment!r}", +- ) +- +- return document +- +- def resolve_remote(self, uri): +- """ +- Resolve a remote ``uri``. +- +- If called directly, does not check the store first, but after +- retrieving the document at the specified URI it will be saved in +- the store if :attr:`cache_remote` is True. +- +- .. note:: +- +- If the requests_ library is present, ``jsonschema`` will use it to +- request the remote ``uri``, so that the correct encoding is +- detected and used. +- +- If it isn't, or if the scheme of the ``uri`` is not ``http`` or +- ``https``, UTF-8 is assumed. +- +- Arguments: +- +- uri (str): +- +- The URI to resolve +- +- Returns: +- +- The retrieved document +- +- .. _requests: https://pypi.org/project/requests/ +- """ +- try: +- import requests +- except ImportError: +- requests = None +- +- scheme = urlsplit(uri).scheme +- +- if scheme in self.handlers: +- result = self.handlers[scheme](uri) +- elif scheme in ["http", "https"] and requests: +- # Requests has support for detecting the correct encoding of +- # json over http +- result = requests.get(uri).json() +- else: +- # Otherwise, pass off to urllib and assume utf-8 +- with urlopen(uri) as url: +- result = json.loads(url.read().decode("utf-8")) +- +- if self.cache_remote: +- self.store[uri] = result +- return result +- +- +-_SUBSCHEMAS_KEYWORDS = ("$id", "id", "$anchor", "$dynamicAnchor") +- +- +-def _match_keyword(keyword): +- +- def matcher(value): +- if keyword in value: +- yield value +- +- return matcher +- +- +-def _match_subschema_keywords(value): +- for keyword in _SUBSCHEMAS_KEYWORDS: +- if keyword in value: +- yield keyword, value +- +- +-def _search_schema(schema, matcher): +- """Breadth-first search routine.""" +- values = deque([schema]) +- while values: +- value = values.pop() +- if not isinstance(value, dict): +- continue +- yield from matcher(value) +- values.extendleft(value.values()) +- +- +-def validate(instance, schema, cls=None, *args, **kwargs): +- """ +- Validate an instance under the given schema. +- +- >>> validate([2, 3, 4], {"maxItems": 2}) +- Traceback (most recent call last): +- ... +- ValidationError: [2, 3, 4] is too long +- +- :func:`validate` will first verify that the provided schema is +- itself valid, since not doing so can lead to less obvious error +- messages and fail in less obvious or consistent ways. +- +- If you know you have a valid schema already, especially if you +- intend to validate multiple instances with the same schema, you +- likely would prefer using the `Validator.validate` method directly +- on a specific validator (e.g. ``Draft7Validator.validate``). +- +- +- Arguments: +- +- instance: +- +- The instance to validate +- +- schema: +- +- The schema to validate with +- +- cls (Validator): +- +- The class that will be used to validate the instance. +- +- If the ``cls`` argument is not provided, two things will happen +- in accordance with the specification. First, if the schema has a +- :kw:`$schema` keyword containing a known meta-schema [#]_ then the +- proper validator will be used. The specification recommends that +- all schemas contain :kw:`$schema` properties for this reason. If no +- :kw:`$schema` property is found, the default validator class is the +- latest released draft. +- +- Any other provided positional and keyword arguments will be passed +- on when instantiating the ``cls``. +- +- Raises: +- +- `jsonschema.exceptions.ValidationError` if the instance +- is invalid +- +- `jsonschema.exceptions.SchemaError` if the schema itself +- is invalid +- +- .. rubric:: Footnotes +- .. [#] known by a validator registered with +- `jsonschema.validators.validates` +- """ +- if cls is None: +- cls = validator_for(schema) +- +- cls.check_schema(schema) +- validator = cls(schema, *args, **kwargs) +- error = exceptions.best_match(validator.iter_errors(instance)) +- if error is not None: +- raise error +- +- +-def validator_for(schema, default=_LATEST_VERSION): +- """ +- Retrieve the validator class appropriate for validating the given schema. +- +- Uses the :kw:`$schema` keyword that should be present in the given +- schema to look up the appropriate validator class. +- +- Arguments: +- +- schema (collections.abc.Mapping or bool): +- +- the schema to look at +- +- default: +- +- the default to return if the appropriate validator class +- cannot be determined. +- +- If unprovided, the default is to return the latest supported +- draft. +- """ +- if schema is True or schema is False or "$schema" not in schema: +- return default +- if schema["$schema"] not in _META_SCHEMAS: +- warn( +- ( +- "The metaschema specified by $schema was not found. " +- "Using the latest draft to validate, but this will raise " +- "an error in the future." +- ), +- DeprecationWarning, +- stacklevel=2, +- ) +- return _META_SCHEMAS.get(schema["$schema"], _LATEST_VERSION) +diff --git a/src/poetry/core/_vendor/lark/LICENSE b/src/poetry/core/_vendor/lark/LICENSE +deleted file mode 100644 +index e9bacec..0000000 +--- a/src/poetry/core/_vendor/lark/LICENSE ++++ /dev/null +@@ -1,19 +0,0 @@ +-Copyright © 2017 Erez Shinan +- +-Permission is hereby granted, free of charge, to any person obtaining a copy of +-this software and associated documentation files (the "Software"), to deal in +-the Software without restriction, including without limitation the rights to +-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +-the Software, and to permit persons to whom the Software is furnished to do so, +-subject to the following conditions: +- +-The above copyright notice and this permission notice shall be included in all +-copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- +diff --git a/src/poetry/core/_vendor/lark/__init__.py b/src/poetry/core/_vendor/lark/__init__.py +deleted file mode 100644 +index 99af3b3..0000000 +--- a/src/poetry/core/_vendor/lark/__init__.py ++++ /dev/null +@@ -1,9 +0,0 @@ +-from .utils import logger +-from .tree import Tree, ParseTree +-from .visitors import Transformer, Visitor, v_args, Discard, Transformer_NonRecursive +-from .exceptions import (ParseError, LexError, GrammarError, UnexpectedToken, +- UnexpectedInput, UnexpectedCharacters, UnexpectedEOF, LarkError) +-from .lexer import Token +-from .lark import Lark +- +-__version__: str = "1.1.2" +diff --git a/src/poetry/core/_vendor/lark/__pyinstaller/__init__.py b/src/poetry/core/_vendor/lark/__pyinstaller/__init__.py +deleted file mode 100644 +index fa02fc9..0000000 +--- a/src/poetry/core/_vendor/lark/__pyinstaller/__init__.py ++++ /dev/null +@@ -1,6 +0,0 @@ +-# For usage of lark with PyInstaller. See https://pyinstaller-sample-hook.readthedocs.io/en/latest/index.html +- +-import os +- +-def get_hook_dirs(): +- return [os.path.dirname(__file__)] +\ No newline at end of file +diff --git a/src/poetry/core/_vendor/lark/__pyinstaller/hook-lark.py b/src/poetry/core/_vendor/lark/__pyinstaller/hook-lark.py +deleted file mode 100644 +index cf3d8e3..0000000 +--- a/src/poetry/core/_vendor/lark/__pyinstaller/hook-lark.py ++++ /dev/null +@@ -1,14 +0,0 @@ +-#----------------------------------------------------------------------------- +-# Copyright (c) 2017-2020, PyInstaller Development Team. +-# +-# Distributed under the terms of the GNU General Public License (version 2 +-# or later) with exception for distributing the bootloader. +-# +-# The full license is in the file COPYING.txt, distributed with this software. +-# +-# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception) +-#----------------------------------------------------------------------------- +- +-from PyInstaller.utils.hooks import collect_data_files +- +-datas = collect_data_files('lark') +diff --git a/src/poetry/core/_vendor/lark/ast_utils.py b/src/poetry/core/_vendor/lark/ast_utils.py +deleted file mode 100644 +index faa17d0..0000000 +--- a/src/poetry/core/_vendor/lark/ast_utils.py ++++ /dev/null +@@ -1,59 +0,0 @@ +-""" +- Module of utilities for transforming a lark.Tree into a custom Abstract Syntax Tree +-""" +- +-import inspect, re +-import types +-from typing import Optional, Callable +- +-from lark import Transformer, v_args +- +-class Ast: +- """Abstract class +- +- Subclasses will be collected by `create_transformer()` +- """ +- pass +- +-class AsList: +- """Abstract class +- +- Subclasses will be instantiated with the parse results as a single list, instead of as arguments. +- """ +- +-class WithMeta: +- """Abstract class +- +- Subclasses will be instantiated with the Meta instance of the tree. (see ``v_args`` for more detail) +- """ +- pass +- +-def camel_to_snake(name): +- return re.sub(r'(? Transformer: +- """Collects `Ast` subclasses from the given module, and creates a Lark transformer that builds the AST. +- +- For each class, we create a corresponding rule in the transformer, with a matching name. +- CamelCase names will be converted into snake_case. Example: "CodeBlock" -> "code_block". +- +- Classes starting with an underscore (`_`) will be skipped. +- +- Parameters: +- ast_module: A Python module containing all the subclasses of ``ast_utils.Ast`` +- transformer (Optional[Transformer]): An initial transformer. Its attributes may be overwritten. +- decorator_factory (Callable): An optional callable accepting two booleans, inline, and meta, +- and returning a decorator for the methods of ``transformer``. (default: ``v_args``). +- """ +- t = transformer or Transformer() +- +- for name, obj in inspect.getmembers(ast_module): +- if not name.startswith('_') and inspect.isclass(obj): +- if issubclass(obj, Ast): +- wrapper = decorator_factory(inline=not issubclass(obj, AsList), meta=issubclass(obj, WithMeta)) +- obj = wrapper(obj).__get__(t) +- setattr(t, camel_to_snake(name), obj) +- +- return t +diff --git a/src/poetry/core/_vendor/lark/common.py b/src/poetry/core/_vendor/lark/common.py +deleted file mode 100644 +index d716add..0000000 +--- a/src/poetry/core/_vendor/lark/common.py ++++ /dev/null +@@ -1,82 +0,0 @@ +-from copy import deepcopy +-import sys +-from types import ModuleType +-from typing import Callable, Collection, Dict, Optional, TYPE_CHECKING +- +-if TYPE_CHECKING: +- from .lark import PostLex +- from .lexer import Lexer +- from typing import Union, Type +- if sys.version_info >= (3, 8): +- from typing import Literal +- else: +- from typing_extensions import Literal +- if sys.version_info >= (3, 10): +- from typing import TypeAlias +- else: +- from typing_extensions import TypeAlias +- +-from .utils import Serialize +-from .lexer import TerminalDef, Token +- +-###{standalone +- +-_ParserArgType: 'TypeAlias' = 'Literal["earley", "lalr", "cyk", "auto"]' +-_LexerArgType: 'TypeAlias' = 'Union[Literal["auto", "basic", "contextual", "dynamic", "dynamic_complete"], Type[Lexer]]' +-_Callback = Callable[[Token], Token] +- +-class LexerConf(Serialize): +- __serialize_fields__ = 'terminals', 'ignore', 'g_regex_flags', 'use_bytes', 'lexer_type' +- __serialize_namespace__ = TerminalDef, +- +- terminals: Collection[TerminalDef] +- re_module: ModuleType +- ignore: Collection[str] +- postlex: 'Optional[PostLex]' +- callbacks: Dict[str, _Callback] +- g_regex_flags: int +- skip_validation: bool +- use_bytes: bool +- lexer_type: Optional[_LexerArgType] +- +- def __init__(self, terminals: Collection[TerminalDef], re_module: ModuleType, ignore: Collection[str]=(), postlex: 'Optional[PostLex]'=None, callbacks: Optional[Dict[str, _Callback]]=None, g_regex_flags: int=0, skip_validation: bool=False, use_bytes: bool=False): +- self.terminals = terminals +- self.terminals_by_name = {t.name: t for t in self.terminals} +- assert len(self.terminals) == len(self.terminals_by_name) +- self.ignore = ignore +- self.postlex = postlex +- self.callbacks = callbacks or {} +- self.g_regex_flags = g_regex_flags +- self.re_module = re_module +- self.skip_validation = skip_validation +- self.use_bytes = use_bytes +- self.lexer_type = None +- +- def _deserialize(self): +- self.terminals_by_name = {t.name: t for t in self.terminals} +- +- def __deepcopy__(self, memo=None): +- return type(self)( +- deepcopy(self.terminals, memo), +- self.re_module, +- deepcopy(self.ignore, memo), +- deepcopy(self.postlex, memo), +- deepcopy(self.callbacks, memo), +- deepcopy(self.g_regex_flags, memo), +- deepcopy(self.skip_validation, memo), +- deepcopy(self.use_bytes, memo), +- ) +- +- +-class ParserConf(Serialize): +- __serialize_fields__ = 'rules', 'start', 'parser_type' +- +- def __init__(self, rules, callbacks, start): +- assert isinstance(start, list) +- self.rules = rules +- self.callbacks = callbacks +- self.start = start +- +- self.parser_type = None +- +-###} +diff --git a/src/poetry/core/_vendor/lark/exceptions.py b/src/poetry/core/_vendor/lark/exceptions.py +deleted file mode 100644 +index da982e3..0000000 +--- a/src/poetry/core/_vendor/lark/exceptions.py ++++ /dev/null +@@ -1,292 +0,0 @@ +-from .utils import logger, NO_VALUE +-from typing import Mapping, Iterable, Callable, Union, TypeVar, Tuple, Any, List, Set, Optional, Collection, TYPE_CHECKING +- +-if TYPE_CHECKING: +- from .lexer import Token +- from .parsers.lalr_interactive_parser import InteractiveParser +- from .tree import Tree +- +-###{standalone +- +-class LarkError(Exception): +- pass +- +- +-class ConfigurationError(LarkError, ValueError): +- pass +- +- +-def assert_config(value, options: Collection, msg='Got %r, expected one of %s'): +- if value not in options: +- raise ConfigurationError(msg % (value, options)) +- +- +-class GrammarError(LarkError): +- pass +- +- +-class ParseError(LarkError): +- pass +- +- +-class LexError(LarkError): +- pass +- +-T = TypeVar('T') +- +-class UnexpectedInput(LarkError): +- """UnexpectedInput Error. +- +- Used as a base class for the following exceptions: +- +- - ``UnexpectedCharacters``: The lexer encountered an unexpected string +- - ``UnexpectedToken``: The parser received an unexpected token +- - ``UnexpectedEOF``: The parser expected a token, but the input ended +- +- After catching one of these exceptions, you may call the following helper methods to create a nicer error message. +- """ +- line: int +- column: int +- pos_in_stream = None +- state: Any +- _terminals_by_name = None +- +- def get_context(self, text: str, span: int=40) -> str: +- """Returns a pretty string pinpointing the error in the text, +- with span amount of context characters around it. +- +- Note: +- The parser doesn't hold a copy of the text it has to parse, +- so you have to provide it again +- """ +- assert self.pos_in_stream is not None, self +- pos = self.pos_in_stream +- start = max(pos - span, 0) +- end = pos + span +- if not isinstance(text, bytes): +- before = text[start:pos].rsplit('\n', 1)[-1] +- after = text[pos:end].split('\n', 1)[0] +- return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n' +- else: +- before = text[start:pos].rsplit(b'\n', 1)[-1] +- after = text[pos:end].split(b'\n', 1)[0] +- return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace") +- +- def match_examples(self, parse_fn: 'Callable[[str], Tree]', +- examples: Union[Mapping[T, Iterable[str]], Iterable[Tuple[T, Iterable[str]]]], +- token_type_match_fallback: bool=False, +- use_accepts: bool=True +- ) -> Optional[T]: +- """Allows you to detect what's wrong in the input text by matching +- against example errors. +- +- Given a parser instance and a dictionary mapping some label with +- some malformed syntax examples, it'll return the label for the +- example that bests matches the current error. The function will +- iterate the dictionary until it finds a matching error, and +- return the corresponding value. +- +- For an example usage, see `examples/error_reporting_lalr.py` +- +- Parameters: +- parse_fn: parse function (usually ``lark_instance.parse``) +- examples: dictionary of ``{'example_string': value}``. +- use_accepts: Recommended to keep this as ``use_accepts=True``. +- """ +- assert self.state is not None, "Not supported for this exception" +- +- if isinstance(examples, Mapping): +- examples = examples.items() +- +- candidate = (None, False) +- for i, (label, example) in enumerate(examples): +- assert not isinstance(example, str), "Expecting a list" +- +- for j, malformed in enumerate(example): +- try: +- parse_fn(malformed) +- except UnexpectedInput as ut: +- if ut.state == self.state: +- if ( +- use_accepts +- and isinstance(self, UnexpectedToken) +- and isinstance(ut, UnexpectedToken) +- and ut.accepts != self.accepts +- ): +- logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" % +- (self.state, self.accepts, ut.accepts, i, j)) +- continue +- if ( +- isinstance(self, (UnexpectedToken, UnexpectedEOF)) +- and isinstance(ut, (UnexpectedToken, UnexpectedEOF)) +- ): +- if ut.token == self.token: # Try exact match first +- logger.debug("Exact Match at example [%s][%s]" % (i, j)) +- return label +- +- if token_type_match_fallback: +- # Fallback to token types match +- if (ut.token.type == self.token.type) and not candidate[-1]: +- logger.debug("Token Type Fallback at example [%s][%s]" % (i, j)) +- candidate = label, True +- +- if candidate[0] is None: +- logger.debug("Same State match at example [%s][%s]" % (i, j)) +- candidate = label, False +- +- return candidate[0] +- +- def _format_expected(self, expected): +- if self._terminals_by_name: +- d = self._terminals_by_name +- expected = [d[t_name].user_repr() if t_name in d else t_name for t_name in expected] +- return "Expected one of: \n\t* %s\n" % '\n\t* '.join(expected) +- +- +-class UnexpectedEOF(ParseError, UnexpectedInput): +- """An exception that is raised by the parser, when the input ends while it still expects a token. +- """ +- expected: 'List[Token]' +- +- def __init__(self, expected, state=None, terminals_by_name=None): +- super(UnexpectedEOF, self).__init__() +- +- self.expected = expected +- self.state = state +- from .lexer import Token +- self.token = Token("", "") # , line=-1, column=-1, pos_in_stream=-1) +- self.pos_in_stream = -1 +- self.line = -1 +- self.column = -1 +- self._terminals_by_name = terminals_by_name +- +- +- def __str__(self): +- message = "Unexpected end-of-input. " +- message += self._format_expected(self.expected) +- return message +- +- +-class UnexpectedCharacters(LexError, UnexpectedInput): +- """An exception that is raised by the lexer, when it cannot match the next +- string of characters to any of its terminals. +- """ +- +- allowed: Set[str] +- considered_tokens: Set[Any] +- +- def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None, +- terminals_by_name=None, considered_rules=None): +- super(UnexpectedCharacters, self).__init__() +- +- # TODO considered_tokens and allowed can be figured out using state +- self.line = line +- self.column = column +- self.pos_in_stream = lex_pos +- self.state = state +- self._terminals_by_name = terminals_by_name +- +- self.allowed = allowed +- self.considered_tokens = considered_tokens +- self.considered_rules = considered_rules +- self.token_history = token_history +- +- if isinstance(seq, bytes): +- self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace") +- else: +- self.char = seq[lex_pos] +- self._context = self.get_context(seq) +- +- +- def __str__(self): +- message = "No terminal matches '%s' in the current parser context, at line %d col %d" % (self.char, self.line, self.column) +- message += '\n\n' + self._context +- if self.allowed: +- message += self._format_expected(self.allowed) +- if self.token_history: +- message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history) +- return message +- +- +-class UnexpectedToken(ParseError, UnexpectedInput): +- """An exception that is raised by the parser, when the token it received +- doesn't match any valid step forward. +- +- Parameters: +- token: The mismatched token +- expected: The set of expected tokens +- considered_rules: Which rules were considered, to deduce the expected tokens +- state: A value representing the parser state. Do not rely on its value or type. +- interactive_parser: An instance of ``InteractiveParser``, that is initialized to the point of failture, +- and can be used for debugging and error handling. +- +- Note: These parameters are available as attributes of the instance. +- """ +- +- expected: Set[str] +- considered_rules: Set[str] +- interactive_parser: 'InteractiveParser' +- +- def __init__(self, token, expected, considered_rules=None, state=None, interactive_parser=None, terminals_by_name=None, token_history=None): +- super(UnexpectedToken, self).__init__() +- +- # TODO considered_rules and expected can be figured out using state +- self.line = getattr(token, 'line', '?') +- self.column = getattr(token, 'column', '?') +- self.pos_in_stream = getattr(token, 'start_pos', None) +- self.state = state +- +- self.token = token +- self.expected = expected # XXX deprecate? `accepts` is better +- self._accepts = NO_VALUE +- self.considered_rules = considered_rules +- self.interactive_parser = interactive_parser +- self._terminals_by_name = terminals_by_name +- self.token_history = token_history +- +- +- @property +- def accepts(self) -> Set[str]: +- if self._accepts is NO_VALUE: +- self._accepts = self.interactive_parser and self.interactive_parser.accepts() +- return self._accepts +- +- def __str__(self): +- message = ("Unexpected token %r at line %s, column %s.\n%s" +- % (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected))) +- if self.token_history: +- message += "Previous tokens: %r\n" % self.token_history +- +- return message +- +- +- +-class VisitError(LarkError): +- """VisitError is raised when visitors are interrupted by an exception +- +- It provides the following attributes for inspection: +- +- Parameters: +- rule: the name of the visit rule that failed +- obj: the tree-node or token that was being processed +- orig_exc: the exception that cause it to fail +- +- Note: These parameters are available as attributes +- """ +- +- obj: 'Union[Tree, Token]' +- orig_exc: Exception +- +- def __init__(self, rule, obj, orig_exc): +- message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc) +- super(VisitError, self).__init__(message) +- +- self.rule = rule +- self.obj = obj +- self.orig_exc = orig_exc +- +- +-class MissingVariableError(LarkError): +- pass +- +-###} +diff --git a/src/poetry/core/_vendor/lark/grammar.py b/src/poetry/core/_vendor/lark/grammar.py +deleted file mode 100644 +index 4f4fa90..0000000 +--- a/src/poetry/core/_vendor/lark/grammar.py ++++ /dev/null +@@ -1,122 +0,0 @@ +-from typing import Optional, Tuple, ClassVar +- +-from .utils import Serialize +- +-###{standalone +-TOKEN_DEFAULT_PRIORITY = 0 +- +- +-class Symbol(Serialize): +- __slots__ = ('name',) +- +- name: str +- is_term: ClassVar[bool] = NotImplemented +- +- def __init__(self, name: str) -> None: +- self.name = name +- +- def __eq__(self, other): +- assert isinstance(other, Symbol), other +- return self.is_term == other.is_term and self.name == other.name +- +- def __ne__(self, other): +- return not (self == other) +- +- def __hash__(self): +- return hash(self.name) +- +- def __repr__(self): +- return '%s(%r)' % (type(self).__name__, self.name) +- +- fullrepr = property(__repr__) +- +- def renamed(self, f): +- return type(self)(f(self.name)) +- +- +-class Terminal(Symbol): +- __serialize_fields__ = 'name', 'filter_out' +- +- is_term: ClassVar[bool] = True +- +- def __init__(self, name, filter_out=False): +- self.name = name +- self.filter_out = filter_out +- +- @property +- def fullrepr(self): +- return '%s(%r, %r)' % (type(self).__name__, self.name, self.filter_out) +- +- def renamed(self, f): +- return type(self)(f(self.name), self.filter_out) +- +- +-class NonTerminal(Symbol): +- __serialize_fields__ = 'name', +- +- is_term: ClassVar[bool] = False +- +- +-class RuleOptions(Serialize): +- __serialize_fields__ = 'keep_all_tokens', 'expand1', 'priority', 'template_source', 'empty_indices' +- +- keep_all_tokens: bool +- expand1: bool +- priority: Optional[int] +- template_source: Optional[str] +- empty_indices: Tuple[bool, ...] +- +- def __init__(self, keep_all_tokens: bool=False, expand1: bool=False, priority: Optional[int]=None, template_source: Optional[str]=None, empty_indices: Tuple[bool, ...]=()) -> None: +- self.keep_all_tokens = keep_all_tokens +- self.expand1 = expand1 +- self.priority = priority +- self.template_source = template_source +- self.empty_indices = empty_indices +- +- def __repr__(self): +- return 'RuleOptions(%r, %r, %r, %r)' % ( +- self.keep_all_tokens, +- self.expand1, +- self.priority, +- self.template_source +- ) +- +- +-class Rule(Serialize): +- """ +- origin : a symbol +- expansion : a list of symbols +- order : index of this expansion amongst all rules of the same name +- """ +- __slots__ = ('origin', 'expansion', 'alias', 'options', 'order', '_hash') +- +- __serialize_fields__ = 'origin', 'expansion', 'order', 'alias', 'options' +- __serialize_namespace__ = Terminal, NonTerminal, RuleOptions +- +- def __init__(self, origin, expansion, order=0, alias=None, options=None): +- self.origin = origin +- self.expansion = expansion +- self.alias = alias +- self.order = order +- self.options = options or RuleOptions() +- self._hash = hash((self.origin, tuple(self.expansion))) +- +- def _deserialize(self): +- self._hash = hash((self.origin, tuple(self.expansion))) +- +- def __str__(self): +- return '<%s : %s>' % (self.origin.name, ' '.join(x.name for x in self.expansion)) +- +- def __repr__(self): +- return 'Rule(%r, %r, %r, %r)' % (self.origin, self.expansion, self.alias, self.options) +- +- def __hash__(self): +- return self._hash +- +- def __eq__(self, other): +- if not isinstance(other, Rule): +- return False +- return self.origin == other.origin and self.expansion == other.expansion +- +- +-###} +diff --git a/src/poetry/core/_vendor/lark/grammars/common.lark b/src/poetry/core/_vendor/lark/grammars/common.lark +deleted file mode 100644 +index e15b163..0000000 +--- a/src/poetry/core/_vendor/lark/grammars/common.lark ++++ /dev/null +@@ -1,59 +0,0 @@ +-// Basic terminals for common use +- +- +-// +-// Numbers +-// +- +-DIGIT: "0".."9" +-HEXDIGIT: "a".."f"|"A".."F"|DIGIT +- +-INT: DIGIT+ +-SIGNED_INT: ["+"|"-"] INT +-DECIMAL: INT "." INT? | "." INT +- +-// float = /-?\d+(\.\d+)?([eE][+-]?\d+)?/ +-_EXP: ("e"|"E") SIGNED_INT +-FLOAT: INT _EXP | DECIMAL _EXP? +-SIGNED_FLOAT: ["+"|"-"] FLOAT +- +-NUMBER: FLOAT | INT +-SIGNED_NUMBER: ["+"|"-"] NUMBER +- +-// +-// Strings +-// +-_STRING_INNER: /.*?/ +-_STRING_ESC_INNER: _STRING_INNER /(? ignore +- | "%import" import_path ["->" name] -> import +- | "%import" import_path name_list -> multi_import +- | "%override" rule -> override_rule +- | "%declare" name+ -> declare +- +-!import_path: "."? name ("." name)* +-name_list: "(" name ("," name)* ")" +- +-?expansions: alias (_VBAR alias)* +- +-?alias: expansion ["->" RULE] +- +-?expansion: expr* +- +-?expr: atom [OP | "~" NUMBER [".." NUMBER]] +- +-?atom: "(" expansions ")" +- | "[" expansions "]" -> maybe +- | value +- +-?value: STRING ".." STRING -> literal_range +- | name +- | (REGEXP | STRING) -> literal +- | name "{" value ("," value)* "}" -> template_usage +- +-name: RULE +- | TOKEN +- +-_VBAR: _NL? "|" +-OP: /[+*]|[?](?![a-z])/ +-RULE: /!?[_?]?[a-z][_a-z0-9]*/ +-TOKEN: /_?[A-Z][_A-Z0-9]*/ +-STRING: _STRING "i"? +-REGEXP: /\/(?!\/)(\\\/|\\\\|[^\/])*?\/[imslux]*/ +-_NL: /(\r?\n)+\s*/ +- +-%import common.ESCAPED_STRING -> _STRING +-%import common.SIGNED_INT -> NUMBER +-%import common.WS_INLINE +- +-COMMENT: /\s*/ "//" /[^\n]/* +- +-%ignore WS_INLINE +-%ignore COMMENT +diff --git a/src/poetry/core/_vendor/lark/grammars/python.lark b/src/poetry/core/_vendor/lark/grammars/python.lark +deleted file mode 100644 +index 24d68fa..0000000 +--- a/src/poetry/core/_vendor/lark/grammars/python.lark ++++ /dev/null +@@ -1,253 +0,0 @@ +-// Python 3 grammar for Lark +- +-// This grammar should parse all python 3.x code successfully. +- +-// Adapted from: https://docs.python.org/3/reference/grammar.html +- +-// Start symbols for the grammar: +-// single_input is a single interactive statement; +-// file_input is a module or sequence of commands read from an input file; +-// eval_input is the input for the eval() functions. +-// NB: compound_stmt in single_input is followed by extra NEWLINE! +-// +- +-single_input: _NEWLINE | simple_stmt | compound_stmt _NEWLINE +-file_input: (_NEWLINE | stmt)* +-eval_input: testlist _NEWLINE* +- +-decorator: "@" dotted_name [ "(" [arguments] ")" ] _NEWLINE +-decorators: decorator+ +-decorated: decorators (classdef | funcdef | async_funcdef) +- +-async_funcdef: "async" funcdef +-funcdef: "def" NAME "(" [parameters] ")" ["->" test] ":" suite +- +-parameters: paramvalue ("," paramvalue)* ["," SLASH ("," paramvalue)*] ["," [starparams | kwparams]] +- | starparams +- | kwparams +- +-SLASH: "/" // Otherwise the it will completely disappear and it will be undisguisable in the result +-starparams: (starparam | starguard) poststarparams +-starparam: "*" typedparam +-starguard: "*" +-poststarparams: ("," paramvalue)* ["," kwparams] +-kwparams: "**" typedparam ","? +- +-?paramvalue: typedparam ("=" test)? +-?typedparam: NAME (":" test)? +- +- +-lambdef: "lambda" [lambda_params] ":" test +-lambdef_nocond: "lambda" [lambda_params] ":" test_nocond +-lambda_params: lambda_paramvalue ("," lambda_paramvalue)* ["," [lambda_starparams | lambda_kwparams]] +- | lambda_starparams +- | lambda_kwparams +-?lambda_paramvalue: NAME ("=" test)? +-lambda_starparams: "*" [NAME] ("," lambda_paramvalue)* ["," [lambda_kwparams]] +-lambda_kwparams: "**" NAME ","? +- +- +-?stmt: simple_stmt | compound_stmt +-?simple_stmt: small_stmt (";" small_stmt)* [";"] _NEWLINE +-?small_stmt: (expr_stmt | assign_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +-expr_stmt: testlist_star_expr +-assign_stmt: annassign | augassign | assign +- +-annassign: testlist_star_expr ":" test ["=" test] +-assign: testlist_star_expr ("=" (yield_expr|testlist_star_expr))+ +-augassign: testlist_star_expr augassign_op (yield_expr|testlist) +-!augassign_op: "+=" | "-=" | "*=" | "@=" | "/=" | "%=" | "&=" | "|=" | "^=" | "<<=" | ">>=" | "**=" | "//=" +-?testlist_star_expr: test_or_star_expr +- | test_or_star_expr ("," test_or_star_expr)+ ","? -> tuple +- | test_or_star_expr "," -> tuple +- +-// For normal and annotated assignments, additional restrictions enforced by the interpreter +-del_stmt: "del" exprlist +-pass_stmt: "pass" +-?flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +-break_stmt: "break" +-continue_stmt: "continue" +-return_stmt: "return" [testlist] +-yield_stmt: yield_expr +-raise_stmt: "raise" [test ["from" test]] +-import_stmt: import_name | import_from +-import_name: "import" dotted_as_names +-// note below: the ("." | "...") is necessary because "..." is tokenized as ELLIPSIS +-import_from: "from" (dots? dotted_name | dots) "import" ("*" | "(" import_as_names ")" | import_as_names) +-!dots: "."+ +-import_as_name: NAME ["as" NAME] +-dotted_as_name: dotted_name ["as" NAME] +-import_as_names: import_as_name ("," import_as_name)* [","] +-dotted_as_names: dotted_as_name ("," dotted_as_name)* +-dotted_name: NAME ("." NAME)* +-global_stmt: "global" NAME ("," NAME)* +-nonlocal_stmt: "nonlocal" NAME ("," NAME)* +-assert_stmt: "assert" test ["," test] +- +-?compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +-async_stmt: "async" (funcdef | with_stmt | for_stmt) +-if_stmt: "if" test ":" suite elifs ["else" ":" suite] +-elifs: elif_* +-elif_: "elif" test ":" suite +-while_stmt: "while" test ":" suite ["else" ":" suite] +-for_stmt: "for" exprlist "in" testlist ":" suite ["else" ":" suite] +-try_stmt: "try" ":" suite except_clauses ["else" ":" suite] [finally] +- | "try" ":" suite finally -> try_finally +-finally: "finally" ":" suite +-except_clauses: except_clause+ +-except_clause: "except" [test ["as" NAME]] ":" suite +- +-with_stmt: "with" with_items ":" suite +-with_items: with_item ("," with_item)* +-with_item: test ["as" expr] +-// NB compile.c makes sure that the default except clause is last +-suite: simple_stmt | _NEWLINE _INDENT stmt+ _DEDENT +- +-?test: or_test ("if" or_test "else" test)? +- | lambdef +- | assign_expr +- +-assign_expr: NAME ":=" test +- +-?test_nocond: or_test | lambdef_nocond +- +-?or_test: and_test ("or" and_test)* +-?and_test: not_test_ ("and" not_test_)* +-?not_test_: "not" not_test_ -> not_test +- | comparison +-?comparison: expr (comp_op expr)* +-star_expr: "*" expr +- +-?expr: or_expr +-?or_expr: xor_expr ("|" xor_expr)* +-?xor_expr: and_expr ("^" and_expr)* +-?and_expr: shift_expr ("&" shift_expr)* +-?shift_expr: arith_expr (_shift_op arith_expr)* +-?arith_expr: term (_add_op term)* +-?term: factor (_mul_op factor)* +-?factor: _unary_op factor | power +- +-!_unary_op: "+"|"-"|"~" +-!_add_op: "+"|"-" +-!_shift_op: "<<"|">>" +-!_mul_op: "*"|"@"|"/"|"%"|"//" +-// <> isn't actually a valid comparison operator in Python. It's here for the +-// sake of a __future__ import described in PEP 401 (which really works :-) +-!comp_op: "<"|">"|"=="|">="|"<="|"<>"|"!="|"in"|"not" "in"|"is"|"is" "not" +- +-?power: await_expr ("**" factor)? +-?await_expr: AWAIT? atom_expr +-AWAIT: "await" +- +-?atom_expr: atom_expr "(" [arguments] ")" -> funccall +- | atom_expr "[" subscriptlist "]" -> getitem +- | atom_expr "." NAME -> getattr +- | atom +- +-?atom: "(" yield_expr ")" +- | "(" _tuple_inner? ")" -> tuple +- | "(" comprehension{test_or_star_expr} ")" -> tuple_comprehension +- | "[" _testlist_comp? "]" -> list +- | "[" comprehension{test_or_star_expr} "]" -> list_comprehension +- | "{" _dict_exprlist? "}" -> dict +- | "{" comprehension{key_value} "}" -> dict_comprehension +- | "{" _set_exprlist "}" -> set +- | "{" comprehension{test} "}" -> set_comprehension +- | NAME -> var +- | number +- | string_concat +- | "(" test ")" +- | "..." -> ellipsis +- | "None" -> const_none +- | "True" -> const_true +- | "False" -> const_false +- +- +-?string_concat: string+ +- +-_testlist_comp: test | _tuple_inner +-_tuple_inner: test_or_star_expr (("," test_or_star_expr)+ [","] | ",") +- +- +-?test_or_star_expr: test +- | star_expr +- +-?subscriptlist: subscript +- | subscript (("," subscript)+ [","] | ",") -> subscript_tuple +-?subscript: test | ([test] ":" [test] [sliceop]) -> slice +-sliceop: ":" [test] +-?exprlist: (expr|star_expr) +- | (expr|star_expr) (("," (expr|star_expr))+ [","]|",") +-?testlist: test | testlist_tuple +-testlist_tuple: test (("," test)+ [","] | ",") +-_dict_exprlist: (key_value | "**" expr) ("," (key_value | "**" expr))* [","] +- +-key_value: test ":" test +- +-_set_exprlist: test_or_star_expr ("," test_or_star_expr)* [","] +- +-classdef: "class" NAME ["(" [arguments] ")"] ":" suite +- +- +- +-arguments: argvalue ("," argvalue)* ("," [ starargs | kwargs])? +- | starargs +- | kwargs +- | comprehension{test} +- +-starargs: stararg ("," stararg)* ("," argvalue)* ["," kwargs] +-stararg: "*" test +-kwargs: "**" test ("," argvalue)* +- +-?argvalue: test ("=" test)? +- +- +-comprehension{comp_result}: comp_result comp_fors [comp_if] +-comp_fors: comp_for+ +-comp_for: [ASYNC] "for" exprlist "in" or_test +-ASYNC: "async" +-?comp_if: "if" test_nocond +- +-// not used in grammar, but may appear in "node" passed from Parser to Compiler +-encoding_decl: NAME +- +-yield_expr: "yield" [testlist] +- | "yield" "from" test -> yield_from +- +-number: DEC_NUMBER | HEX_NUMBER | BIN_NUMBER | OCT_NUMBER | FLOAT_NUMBER | IMAG_NUMBER +-string: STRING | LONG_STRING +- +-// Other terminals +- +-_NEWLINE: ( /\r?\n[\t ]*/ | COMMENT )+ +- +-%ignore /[\t \f]+/ // WS +-%ignore /\\[\t \f]*\r?\n/ // LINE_CONT +-%ignore COMMENT +-%declare _INDENT _DEDENT +- +- +-// Python terminals +- +-NAME: /[^\W\d]\w*/ +-COMMENT: /#[^\n]*/ +- +-STRING: /([ubf]?r?|r[ubf])("(?!"").*?(? None: +- self.paren_level = 0 +- self.indent_level = [0] +- assert self.tab_len > 0 +- +- def handle_NL(self, token: Token) -> Iterator[Token]: +- if self.paren_level > 0: +- return +- +- yield token +- +- indent_str = token.rsplit('\n', 1)[1] # Tabs and spaces +- indent = indent_str.count(' ') + indent_str.count('\t') * self.tab_len +- +- if indent > self.indent_level[-1]: +- self.indent_level.append(indent) +- yield Token.new_borrow_pos(self.INDENT_type, indent_str, token) +- else: +- while indent < self.indent_level[-1]: +- self.indent_level.pop() +- yield Token.new_borrow_pos(self.DEDENT_type, indent_str, token) +- +- if indent != self.indent_level[-1]: +- raise DedentError('Unexpected dedent to column %s. Expected dedent to %s' % (indent, self.indent_level[-1])) +- +- def _process(self, stream): +- for token in stream: +- if token.type == self.NL_type: +- yield from self.handle_NL(token) +- else: +- yield token +- +- if token.type in self.OPEN_PAREN_types: +- self.paren_level += 1 +- elif token.type in self.CLOSE_PAREN_types: +- self.paren_level -= 1 +- assert self.paren_level >= 0 +- +- while len(self.indent_level) > 1: +- self.indent_level.pop() +- yield Token(self.DEDENT_type, '') +- +- assert self.indent_level == [0], self.indent_level +- +- def process(self, stream): +- self.paren_level = 0 +- self.indent_level = [0] +- return self._process(stream) +- +- # XXX Hack for ContextualLexer. Maybe there's a more elegant solution? +- @property +- def always_accept(self): +- return (self.NL_type,) +- +- @property +- @abstractmethod +- def NL_type(self) -> str: +- raise NotImplementedError() +- +- @property +- @abstractmethod +- def OPEN_PAREN_types(self) -> List[str]: +- raise NotImplementedError() +- +- @property +- @abstractmethod +- def CLOSE_PAREN_types(self) -> List[str]: +- raise NotImplementedError() +- +- @property +- @abstractmethod +- def INDENT_type(self) -> str: +- raise NotImplementedError() +- +- @property +- @abstractmethod +- def DEDENT_type(self) -> str: +- raise NotImplementedError() +- +- @property +- @abstractmethod +- def tab_len(self) -> int: +- raise NotImplementedError() +- +- +-class PythonIndenter(Indenter): +- NL_type = '_NEWLINE' +- OPEN_PAREN_types = ['LPAR', 'LSQB', 'LBRACE'] +- CLOSE_PAREN_types = ['RPAR', 'RSQB', 'RBRACE'] +- INDENT_type = '_INDENT' +- DEDENT_type = '_DEDENT' +- tab_len = 8 +- +-###} +diff --git a/src/poetry/core/_vendor/lark/lark.py b/src/poetry/core/_vendor/lark/lark.py +deleted file mode 100644 +index afebdd1..0000000 +--- a/src/poetry/core/_vendor/lark/lark.py ++++ /dev/null +@@ -1,628 +0,0 @@ +-from abc import ABC, abstractmethod +-import sys, os, pickle, hashlib +-import tempfile +-from typing import ( +- TypeVar, Type, List, Dict, Iterator, Callable, Union, Optional, Sequence, +- Tuple, Iterable, IO, Any, TYPE_CHECKING, Collection +-) +-if TYPE_CHECKING: +- from .parsers.lalr_interactive_parser import InteractiveParser +- from .tree import ParseTree +- from .visitors import Transformer +- if sys.version_info >= (3, 8): +- from typing import Literal +- else: +- from typing_extensions import Literal +- +-from .exceptions import ConfigurationError, assert_config, UnexpectedInput +-from .utils import Serialize, SerializeMemoizer, FS, isascii, logger +-from .load_grammar import load_grammar, FromPackageLoader, Grammar, verify_used_files, PackageResource +-from .tree import Tree +-from .common import LexerConf, ParserConf, _ParserArgType, _LexerArgType +- +-from .lexer import Lexer, BasicLexer, TerminalDef, LexerThread, Token +-from .parse_tree_builder import ParseTreeBuilder +-from .parser_frontends import _validate_frontend_args, _get_lexer_callbacks, _deserialize_parsing_frontend, _construct_parsing_frontend +-from .grammar import Rule +- +-import re +-try: +- import regex # type: ignore +-except ImportError: +- regex = None +- +- +-###{standalone +- +- +-class PostLex(ABC): +- @abstractmethod +- def process(self, stream: Iterator[Token]) -> Iterator[Token]: +- return stream +- +- always_accept: Iterable[str] = () +- +-class LarkOptions(Serialize): +- """Specifies the options for Lark +- +- """ +- +- start: List[str] +- debug: bool +- transformer: 'Optional[Transformer]' +- propagate_positions: Union[bool, str] +- maybe_placeholders: bool +- cache: Union[bool, str] +- regex: bool +- g_regex_flags: int +- keep_all_tokens: bool +- tree_class: Any +- parser: _ParserArgType +- lexer: _LexerArgType +- ambiguity: 'Literal["auto", "resolve", "explicit", "forest"]' +- postlex: Optional[PostLex] +- priority: 'Optional[Literal["auto", "normal", "invert"]]' +- lexer_callbacks: Dict[str, Callable[[Token], Token]] +- use_bytes: bool +- edit_terminals: Optional[Callable[[TerminalDef], TerminalDef]] +- import_paths: 'List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]' +- source_path: Optional[str] +- +- OPTIONS_DOC = """ +- **=== General Options ===** +- +- start +- The start symbol. Either a string, or a list of strings for multiple possible starts (Default: "start") +- debug +- Display debug information and extra warnings. Use only when debugging (Default: ``False``) +- When used with Earley, it generates a forest graph as "sppf.png", if 'dot' is installed. +- transformer +- Applies the transformer to every parse tree (equivalent to applying it after the parse, but faster) +- propagate_positions +- Propagates (line, column, end_line, end_column) attributes into all tree branches. +- Accepts ``False``, ``True``, or a callable, which will filter which nodes to ignore when propagating. +- maybe_placeholders +- When ``True``, the ``[]`` operator returns ``None`` when not matched. +- When ``False``, ``[]`` behaves like the ``?`` operator, and returns no value at all. +- (default= ``True``) +- cache +- Cache the results of the Lark grammar analysis, for x2 to x3 faster loading. LALR only for now. +- +- - When ``False``, does nothing (default) +- - When ``True``, caches to a temporary file in the local directory +- - When given a string, caches to the path pointed by the string +- regex +- When True, uses the ``regex`` module instead of the stdlib ``re``. +- g_regex_flags +- Flags that are applied to all terminals (both regex and strings) +- keep_all_tokens +- Prevent the tree builder from automagically removing "punctuation" tokens (Default: ``False``) +- tree_class +- Lark will produce trees comprised of instances of this class instead of the default ``lark.Tree``. +- +- **=== Algorithm Options ===** +- +- parser +- Decides which parser engine to use. Accepts "earley" or "lalr". (Default: "earley"). +- (there is also a "cyk" option for legacy) +- lexer +- Decides whether or not to use a lexer stage +- +- - "auto" (default): Choose for me based on the parser +- - "basic": Use a basic lexer +- - "contextual": Stronger lexer (only works with parser="lalr") +- - "dynamic": Flexible and powerful (only with parser="earley") +- - "dynamic_complete": Same as dynamic, but tries *every* variation of tokenizing possible. +- ambiguity +- Decides how to handle ambiguity in the parse. Only relevant if parser="earley" +- +- - "resolve": The parser will automatically choose the simplest derivation +- (it chooses consistently: greedy for tokens, non-greedy for rules) +- - "explicit": The parser will return all derivations wrapped in "_ambig" tree nodes (i.e. a forest). +- - "forest": The parser will return the root of the shared packed parse forest. +- +- **=== Misc. / Domain Specific Options ===** +- +- postlex +- Lexer post-processing (Default: ``None``) Only works with the basic and contextual lexers. +- priority +- How priorities should be evaluated - "auto", ``None``, "normal", "invert" (Default: "auto") +- lexer_callbacks +- Dictionary of callbacks for the lexer. May alter tokens during lexing. Use with caution. +- use_bytes +- Accept an input of type ``bytes`` instead of ``str``. +- edit_terminals +- A callback for editing the terminals before parse. +- import_paths +- A List of either paths or loader functions to specify from where grammars are imported +- source_path +- Override the source of from where the grammar was loaded. Useful for relative imports and unconventional grammar loading +- **=== End of Options ===** +- """ +- if __doc__: +- __doc__ += OPTIONS_DOC +- +- +- # Adding a new option needs to be done in multiple places: +- # - In the dictionary below. This is the primary truth of which options `Lark.__init__` accepts +- # - In the docstring above. It is used both for the docstring of `LarkOptions` and `Lark`, and in readthedocs +- # - As an attribute of `LarkOptions` above +- # - Potentially in `_LOAD_ALLOWED_OPTIONS` below this class, when the option doesn't change how the grammar is loaded +- # - Potentially in `lark.tools.__init__`, if it makes sense, and it can easily be passed as a cmd argument +- _defaults: Dict[str, Any] = { +- 'debug': False, +- 'keep_all_tokens': False, +- 'tree_class': None, +- 'cache': False, +- 'postlex': None, +- 'parser': 'earley', +- 'lexer': 'auto', +- 'transformer': None, +- 'start': 'start', +- 'priority': 'auto', +- 'ambiguity': 'auto', +- 'regex': False, +- 'propagate_positions': False, +- 'lexer_callbacks': {}, +- 'maybe_placeholders': True, +- 'edit_terminals': None, +- 'g_regex_flags': 0, +- 'use_bytes': False, +- 'import_paths': [], +- 'source_path': None, +- '_plugins': {}, +- } +- +- def __init__(self, options_dict): +- o = dict(options_dict) +- +- options = {} +- for name, default in self._defaults.items(): +- if name in o: +- value = o.pop(name) +- if isinstance(default, bool) and name not in ('cache', 'use_bytes', 'propagate_positions'): +- value = bool(value) +- else: +- value = default +- +- options[name] = value +- +- if isinstance(options['start'], str): +- options['start'] = [options['start']] +- +- self.__dict__['options'] = options +- +- +- assert_config(self.parser, ('earley', 'lalr', 'cyk', None)) +- +- if self.parser == 'earley' and self.transformer: +- raise ConfigurationError('Cannot specify an embedded transformer when using the Earley algorithm. ' +- 'Please use your transformer on the resulting parse tree, or use a different algorithm (i.e. LALR)') +- +- if o: +- raise ConfigurationError("Unknown options: %s" % o.keys()) +- +- def __getattr__(self, name): +- try: +- return self.__dict__['options'][name] +- except KeyError as e: +- raise AttributeError(e) +- +- def __setattr__(self, name, value): +- assert_config(name, self.options.keys(), "%r isn't a valid option. Expected one of: %s") +- self.options[name] = value +- +- def serialize(self, memo): +- return self.options +- +- @classmethod +- def deserialize(cls, data, memo): +- return cls(data) +- +- +-# Options that can be passed to the Lark parser, even when it was loaded from cache/standalone. +-# These options are only used outside of `load_grammar`. +-_LOAD_ALLOWED_OPTIONS = {'postlex', 'transformer', 'lexer_callbacks', 'use_bytes', 'debug', 'g_regex_flags', 'regex', 'propagate_positions', 'tree_class', '_plugins'} +- +-_VALID_PRIORITY_OPTIONS = ('auto', 'normal', 'invert', None) +-_VALID_AMBIGUITY_OPTIONS = ('auto', 'resolve', 'explicit', 'forest') +- +- +-_T = TypeVar('_T', bound="Lark") +- +-class Lark(Serialize): +- """Main interface for the library. +- +- It's mostly a thin wrapper for the many different parsers, and for the tree constructor. +- +- Parameters: +- grammar: a string or file-object containing the grammar spec (using Lark's ebnf syntax) +- options: a dictionary controlling various aspects of Lark. +- +- Example: +- >>> Lark(r'''start: "foo" ''') +- Lark(...) +- """ +- +- source_path: str +- source_grammar: str +- grammar: 'Grammar' +- options: LarkOptions +- lexer: Lexer +- terminals: List[TerminalDef] +- +- def __init__(self, grammar: 'Union[Grammar, str, IO[str]]', **options) -> None: +- self.options = LarkOptions(options) +- +- # Set regex or re module +- use_regex = self.options.regex +- if use_regex: +- if regex: +- re_module = regex +- else: +- raise ImportError('`regex` module must be installed if calling `Lark(regex=True)`.') +- else: +- re_module = re +- +- # Some, but not all file-like objects have a 'name' attribute +- if self.options.source_path is None: +- try: +- self.source_path = grammar.name +- except AttributeError: +- self.source_path = '' +- else: +- self.source_path = self.options.source_path +- +- # Drain file-like objects to get their contents +- try: +- read = grammar.read +- except AttributeError: +- pass +- else: +- grammar = read() +- +- cache_fn = None +- cache_md5 = None +- if isinstance(grammar, str): +- self.source_grammar = grammar +- if self.options.use_bytes: +- if not isascii(grammar): +- raise ConfigurationError("Grammar must be ascii only, when use_bytes=True") +- +- if self.options.cache: +- if self.options.parser != 'lalr': +- raise ConfigurationError("cache only works with parser='lalr' for now") +- +- unhashable = ('transformer', 'postlex', 'lexer_callbacks', 'edit_terminals', '_plugins') +- options_str = ''.join(k+str(v) for k, v in options.items() if k not in unhashable) +- from . import __version__ +- s = grammar + options_str + __version__ + str(sys.version_info[:2]) +- cache_md5 = hashlib.md5(s.encode('utf8')).hexdigest() +- +- if isinstance(self.options.cache, str): +- cache_fn = self.options.cache +- else: +- if self.options.cache is not True: +- raise ConfigurationError("cache argument must be bool or str") +- +- cache_fn = tempfile.gettempdir() + '/.lark_cache_%s_%s_%s.tmp' % (cache_md5, *sys.version_info[:2]) +- +- if FS.exists(cache_fn): +- logger.debug('Loading grammar from cache: %s', cache_fn) +- # Remove options that aren't relevant for loading from cache +- for name in (set(options) - _LOAD_ALLOWED_OPTIONS): +- del options[name] +- with FS.open(cache_fn, 'rb') as f: +- old_options = self.options +- try: +- file_md5 = f.readline().rstrip(b'\n') +- cached_used_files = pickle.load(f) +- if file_md5 == cache_md5.encode('utf8') and verify_used_files(cached_used_files): +- cached_parser_data = pickle.load(f) +- self._load(cached_parser_data, **options) +- return +- except Exception: # We should probably narrow done which errors we catch here. +- logger.exception("Failed to load Lark from cache: %r. We will try to carry on." % cache_fn) +- +- # In theory, the Lark instance might have been messed up by the call to `_load`. +- # In practice the only relevant thing that might have been overriden should be `options` +- self.options = old_options +- +- +- # Parse the grammar file and compose the grammars +- self.grammar, used_files = load_grammar(grammar, self.source_path, self.options.import_paths, self.options.keep_all_tokens) +- else: +- assert isinstance(grammar, Grammar) +- self.grammar = grammar +- +- +- if self.options.lexer == 'auto': +- if self.options.parser == 'lalr': +- self.options.lexer = 'contextual' +- elif self.options.parser == 'earley': +- if self.options.postlex is not None: +- logger.info("postlex can't be used with the dynamic lexer, so we use 'basic' instead. " +- "Consider using lalr with contextual instead of earley") +- self.options.lexer = 'basic' +- else: +- self.options.lexer = 'dynamic' +- elif self.options.parser == 'cyk': +- self.options.lexer = 'basic' +- else: +- assert False, self.options.parser +- lexer = self.options.lexer +- if isinstance(lexer, type): +- assert issubclass(lexer, Lexer) # XXX Is this really important? Maybe just ensure interface compliance +- else: +- assert_config(lexer, ('basic', 'contextual', 'dynamic', 'dynamic_complete')) +- if self.options.postlex is not None and 'dynamic' in lexer: +- raise ConfigurationError("Can't use postlex with a dynamic lexer. Use basic or contextual instead") +- +- if self.options.ambiguity == 'auto': +- if self.options.parser == 'earley': +- self.options.ambiguity = 'resolve' +- else: +- assert_config(self.options.parser, ('earley', 'cyk'), "%r doesn't support disambiguation. Use one of these parsers instead: %s") +- +- if self.options.priority == 'auto': +- self.options.priority = 'normal' +- +- if self.options.priority not in _VALID_PRIORITY_OPTIONS: +- raise ConfigurationError("invalid priority option: %r. Must be one of %r" % (self.options.priority, _VALID_PRIORITY_OPTIONS)) +- if self.options.ambiguity not in _VALID_AMBIGUITY_OPTIONS: +- raise ConfigurationError("invalid ambiguity option: %r. Must be one of %r" % (self.options.ambiguity, _VALID_AMBIGUITY_OPTIONS)) +- +- if self.options.parser is None: +- terminals_to_keep = '*' +- elif self.options.postlex is not None: +- terminals_to_keep = set(self.options.postlex.always_accept) +- else: +- terminals_to_keep = set() +- +- # Compile the EBNF grammar into BNF +- self.terminals, self.rules, self.ignore_tokens = self.grammar.compile(self.options.start, terminals_to_keep) +- +- if self.options.edit_terminals: +- for t in self.terminals: +- self.options.edit_terminals(t) +- +- self._terminals_dict = {t.name: t for t in self.terminals} +- +- # If the user asked to invert the priorities, negate them all here. +- if self.options.priority == 'invert': +- for rule in self.rules: +- if rule.options.priority is not None: +- rule.options.priority = -rule.options.priority +- for term in self.terminals: +- term.priority = -term.priority +- # Else, if the user asked to disable priorities, strip them from the +- # rules and terminals. This allows the Earley parsers to skip an extra forest walk +- # for improved performance, if you don't need them (or didn't specify any). +- elif self.options.priority is None: +- for rule in self.rules: +- if rule.options.priority is not None: +- rule.options.priority = None +- for term in self.terminals: +- term.priority = 0 +- +- # TODO Deprecate lexer_callbacks? +- self.lexer_conf = LexerConf( +- self.terminals, re_module, self.ignore_tokens, self.options.postlex, +- self.options.lexer_callbacks, self.options.g_regex_flags, use_bytes=self.options.use_bytes +- ) +- +- if self.options.parser: +- self.parser = self._build_parser() +- elif lexer: +- self.lexer = self._build_lexer() +- +- if cache_fn: +- logger.debug('Saving grammar to cache: %s', cache_fn) +- with FS.open(cache_fn, 'wb') as f: +- assert cache_md5 is not None +- f.write(cache_md5.encode('utf8') + b'\n') +- pickle.dump(used_files, f) +- self.save(f, _LOAD_ALLOWED_OPTIONS) +- +- if __doc__: +- __doc__ += "\n\n" + LarkOptions.OPTIONS_DOC +- +- __serialize_fields__ = 'parser', 'rules', 'options' +- +- def _build_lexer(self, dont_ignore=False): +- lexer_conf = self.lexer_conf +- if dont_ignore: +- from copy import copy +- lexer_conf = copy(lexer_conf) +- lexer_conf.ignore = () +- return BasicLexer(lexer_conf) +- +- def _prepare_callbacks(self): +- self._callbacks = {} +- # we don't need these callbacks if we aren't building a tree +- if self.options.ambiguity != 'forest': +- self._parse_tree_builder = ParseTreeBuilder( +- self.rules, +- self.options.tree_class or Tree, +- self.options.propagate_positions, +- self.options.parser != 'lalr' and self.options.ambiguity == 'explicit', +- self.options.maybe_placeholders +- ) +- self._callbacks = self._parse_tree_builder.create_callback(self.options.transformer) +- self._callbacks.update(_get_lexer_callbacks(self.options.transformer, self.terminals)) +- +- def _build_parser(self): +- self._prepare_callbacks() +- _validate_frontend_args(self.options.parser, self.options.lexer) +- parser_conf = ParserConf(self.rules, self._callbacks, self.options.start) +- return _construct_parsing_frontend( +- self.options.parser, +- self.options.lexer, +- self.lexer_conf, +- parser_conf, +- options=self.options +- ) +- +- def save(self, f, exclude_options: Collection[str] = ()): +- """Saves the instance into the given file object +- +- Useful for caching and multiprocessing. +- """ +- data, m = self.memo_serialize([TerminalDef, Rule]) +- if exclude_options: +- data["options"] = {n: v for n, v in data["options"].items() if n not in exclude_options} +- pickle.dump({'data': data, 'memo': m}, f, protocol=pickle.HIGHEST_PROTOCOL) +- +- @classmethod +- def load(cls, f): +- """Loads an instance from the given file object +- +- Useful for caching and multiprocessing. +- """ +- inst = cls.__new__(cls) +- return inst._load(f) +- +- def _deserialize_lexer_conf(self, data, memo, options): +- lexer_conf = LexerConf.deserialize(data['lexer_conf'], memo) +- lexer_conf.callbacks = options.lexer_callbacks or {} +- lexer_conf.re_module = regex if options.regex else re +- lexer_conf.use_bytes = options.use_bytes +- lexer_conf.g_regex_flags = options.g_regex_flags +- lexer_conf.skip_validation = True +- lexer_conf.postlex = options.postlex +- return lexer_conf +- +- def _load(self, f, **kwargs): +- if isinstance(f, dict): +- d = f +- else: +- d = pickle.load(f) +- memo_json = d['memo'] +- data = d['data'] +- +- assert memo_json +- memo = SerializeMemoizer.deserialize(memo_json, {'Rule': Rule, 'TerminalDef': TerminalDef}, {}) +- options = dict(data['options']) +- if (set(kwargs) - _LOAD_ALLOWED_OPTIONS) & set(LarkOptions._defaults): +- raise ConfigurationError("Some options are not allowed when loading a Parser: {}" +- .format(set(kwargs) - _LOAD_ALLOWED_OPTIONS)) +- options.update(kwargs) +- self.options = LarkOptions.deserialize(options, memo) +- self.rules = [Rule.deserialize(r, memo) for r in data['rules']] +- self.source_path = '' +- _validate_frontend_args(self.options.parser, self.options.lexer) +- self.lexer_conf = self._deserialize_lexer_conf(data['parser'], memo, self.options) +- self.terminals = self.lexer_conf.terminals +- self._prepare_callbacks() +- self._terminals_dict = {t.name: t for t in self.terminals} +- self.parser = _deserialize_parsing_frontend( +- data['parser'], +- memo, +- self.lexer_conf, +- self._callbacks, +- self.options, # Not all, but multiple attributes are used +- ) +- return self +- +- @classmethod +- def _load_from_dict(cls, data, memo, **kwargs): +- inst = cls.__new__(cls) +- return inst._load({'data': data, 'memo': memo}, **kwargs) +- +- @classmethod +- def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str]=None, **options) -> _T: +- """Create an instance of Lark with the grammar given by its filename +- +- If ``rel_to`` is provided, the function will find the grammar filename in relation to it. +- +- Example: +- +- >>> Lark.open("grammar_file.lark", rel_to=__file__, parser="lalr") +- Lark(...) +- +- """ +- if rel_to: +- basepath = os.path.dirname(rel_to) +- grammar_filename = os.path.join(basepath, grammar_filename) +- with open(grammar_filename, encoding='utf8') as f: +- return cls(f, **options) +- +- @classmethod +- def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: 'Sequence[str]'=[""], **options) -> _T: +- """Create an instance of Lark with the grammar loaded from within the package `package`. +- This allows grammar loading from zipapps. +- +- Imports in the grammar will use the `package` and `search_paths` provided, through `FromPackageLoader` +- +- Example: +- +- Lark.open_from_package(__name__, "example.lark", ("grammars",), parser=...) +- """ +- package_loader = FromPackageLoader(package, search_paths) +- full_path, text = package_loader(None, grammar_path) +- options.setdefault('source_path', full_path) +- options.setdefault('import_paths', []) +- options['import_paths'].append(package_loader) +- return cls(text, **options) +- +- def __repr__(self): +- return 'Lark(open(%r), parser=%r, lexer=%r, ...)' % (self.source_path, self.options.parser, self.options.lexer) +- +- +- def lex(self, text: str, dont_ignore: bool=False) -> Iterator[Token]: +- """Only lex (and postlex) the text, without parsing it. Only relevant when lexer='basic' +- +- When dont_ignore=True, the lexer will return all tokens, even those marked for %ignore. +- +- :raises UnexpectedCharacters: In case the lexer cannot find a suitable match. +- """ +- if not hasattr(self, 'lexer') or dont_ignore: +- lexer = self._build_lexer(dont_ignore) +- else: +- lexer = self.lexer +- lexer_thread = LexerThread.from_text(lexer, text) +- stream = lexer_thread.lex(None) +- if self.options.postlex: +- return self.options.postlex.process(stream) +- return stream +- +- def get_terminal(self, name: str) -> TerminalDef: +- """Get information about a terminal""" +- return self._terminals_dict[name] +- +- def parse_interactive(self, text: Optional[str]=None, start: Optional[str]=None) -> 'InteractiveParser': +- """Start an interactive parsing session. +- +- Parameters: +- text (str, optional): Text to be parsed. Required for ``resume_parse()``. +- start (str, optional): Start symbol +- +- Returns: +- A new InteractiveParser instance. +- +- See Also: ``Lark.parse()`` +- """ +- return self.parser.parse_interactive(text, start=start) +- +- def parse(self, text: str, start: Optional[str]=None, on_error: 'Optional[Callable[[UnexpectedInput], bool]]'=None) -> 'ParseTree': +- """Parse the given text, according to the options provided. +- +- Parameters: +- text (str): Text to be parsed. +- start (str, optional): Required if Lark was given multiple possible start symbols (using the start option). +- on_error (function, optional): if provided, will be called on UnexpectedToken error. Return true to resume parsing. +- LALR only. See examples/advanced/error_handling.py for an example of how to use on_error. +- +- Returns: +- If a transformer is supplied to ``__init__``, returns whatever is the +- result of the transformation. Otherwise, returns a Tree instance. +- +- :raises UnexpectedInput: On a parse error, one of these sub-exceptions will rise: +- ``UnexpectedCharacters``, ``UnexpectedToken``, or ``UnexpectedEOF``. +- For convenience, these sub-exceptions also inherit from ``ParserError`` and ``LexerError``. +- +- """ +- return self.parser.parse(text, start=start, on_error=on_error) +- +- +-###} +diff --git a/src/poetry/core/_vendor/lark/lexer.py b/src/poetry/core/_vendor/lark/lexer.py +deleted file mode 100644 +index ec71a12..0000000 +--- a/src/poetry/core/_vendor/lark/lexer.py ++++ /dev/null +@@ -1,541 +0,0 @@ +-# Lexer Implementation +- +-from abc import abstractmethod, ABC +-import re +-from contextlib import suppress +-from typing import ( +- TypeVar, Type, List, Dict, Iterator, Collection, Callable, Optional, FrozenSet, Any, +- Pattern as REPattern, ClassVar, TYPE_CHECKING +-) +-from types import ModuleType +-if TYPE_CHECKING: +- from .common import LexerConf +- +-from .utils import classify, get_regexp_width, Serialize +-from .exceptions import UnexpectedCharacters, LexError, UnexpectedToken +-from .grammar import TOKEN_DEFAULT_PRIORITY +- +-###{standalone +-from copy import copy +- +- +-class Pattern(Serialize, ABC): +- +- value: str +- flags: Collection[str] +- raw: Optional[str] +- type: ClassVar[str] +- +- def __init__(self, value: str, flags: Collection[str]=(), raw: Optional[str]=None) -> None: +- self.value = value +- self.flags = frozenset(flags) +- self.raw = raw +- +- def __repr__(self): +- return repr(self.to_regexp()) +- +- # Pattern Hashing assumes all subclasses have a different priority! +- def __hash__(self): +- return hash((type(self), self.value, self.flags)) +- +- def __eq__(self, other): +- return type(self) == type(other) and self.value == other.value and self.flags == other.flags +- +- @abstractmethod +- def to_regexp(self) -> str: +- raise NotImplementedError() +- +- @property +- @abstractmethod +- def min_width(self) -> int: +- raise NotImplementedError() +- +- @property +- @abstractmethod +- def max_width(self) -> int: +- raise NotImplementedError() +- +- def _get_flags(self, value): +- for f in self.flags: +- value = ('(?%s:%s)' % (f, value)) +- return value +- +- +-class PatternStr(Pattern): +- __serialize_fields__ = 'value', 'flags' +- +- type: ClassVar[str] = "str" +- +- def to_regexp(self) -> str: +- return self._get_flags(re.escape(self.value)) +- +- @property +- def min_width(self) -> int: +- return len(self.value) +- +- @property +- def max_width(self) -> int: +- return len(self.value) +- +- +-class PatternRE(Pattern): +- __serialize_fields__ = 'value', 'flags', '_width' +- +- type: ClassVar[str] = "re" +- +- def to_regexp(self) -> str: +- return self._get_flags(self.value) +- +- _width = None +- def _get_width(self): +- if self._width is None: +- self._width = get_regexp_width(self.to_regexp()) +- return self._width +- +- @property +- def min_width(self) -> int: +- return self._get_width()[0] +- +- @property +- def max_width(self) -> int: +- return self._get_width()[1] +- +- +-class TerminalDef(Serialize): +- __serialize_fields__ = 'name', 'pattern', 'priority' +- __serialize_namespace__ = PatternStr, PatternRE +- +- name: str +- pattern: Pattern +- priority: int +- +- def __init__(self, name: str, pattern: Pattern, priority: int=TOKEN_DEFAULT_PRIORITY) -> None: +- assert isinstance(pattern, Pattern), pattern +- self.name = name +- self.pattern = pattern +- self.priority = priority +- +- def __repr__(self): +- return '%s(%r, %r)' % (type(self).__name__, self.name, self.pattern) +- +- def user_repr(self) -> str: +- if self.name.startswith('__'): # We represent a generated terminal +- return self.pattern.raw or self.name +- else: +- return self.name +- +-_T = TypeVar('_T', bound="Token") +- +-class Token(str): +- """A string with meta-information, that is produced by the lexer. +- +- When parsing text, the resulting chunks of the input that haven't been discarded, +- will end up in the tree as Token instances. The Token class inherits from Python's ``str``, +- so normal string comparisons and operations will work as expected. +- +- Attributes: +- type: Name of the token (as specified in grammar) +- value: Value of the token (redundant, as ``token.value == token`` will always be true) +- start_pos: The index of the token in the text +- line: The line of the token in the text (starting with 1) +- column: The column of the token in the text (starting with 1) +- end_line: The line where the token ends +- end_column: The next column after the end of the token. For example, +- if the token is a single character with a column value of 4, +- end_column will be 5. +- end_pos: the index where the token ends (basically ``start_pos + len(token)``) +- """ +- __slots__ = ('type', 'start_pos', 'value', 'line', 'column', 'end_line', 'end_column', 'end_pos') +- +- type: str +- start_pos: int +- value: Any +- line: int +- column: int +- end_line: int +- end_column: int +- end_pos: int +- +- def __new__(cls, type_, value, start_pos=None, line=None, column=None, end_line=None, end_column=None, end_pos=None): +- inst = super(Token, cls).__new__(cls, value) +- inst.type = type_ +- inst.start_pos = start_pos +- inst.value = value +- inst.line = line +- inst.column = column +- inst.end_line = end_line +- inst.end_column = end_column +- inst.end_pos = end_pos +- return inst +- +- def update(self, type_: Optional[str]=None, value: Optional[Any]=None) -> 'Token': +- return Token.new_borrow_pos( +- type_ if type_ is not None else self.type, +- value if value is not None else self.value, +- self +- ) +- +- @classmethod +- def new_borrow_pos(cls: Type[_T], type_: str, value: Any, borrow_t: 'Token') -> _T: +- return cls(type_, value, borrow_t.start_pos, borrow_t.line, borrow_t.column, borrow_t.end_line, borrow_t.end_column, borrow_t.end_pos) +- +- def __reduce__(self): +- return (self.__class__, (self.type, self.value, self.start_pos, self.line, self.column)) +- +- def __repr__(self): +- return 'Token(%r, %r)' % (self.type, self.value) +- +- def __deepcopy__(self, memo): +- return Token(self.type, self.value, self.start_pos, self.line, self.column) +- +- def __eq__(self, other): +- if isinstance(other, Token) and self.type != other.type: +- return False +- +- return str.__eq__(self, other) +- +- __hash__ = str.__hash__ +- +- +-class LineCounter: +- __slots__ = 'char_pos', 'line', 'column', 'line_start_pos', 'newline_char' +- +- def __init__(self, newline_char): +- self.newline_char = newline_char +- self.char_pos = 0 +- self.line = 1 +- self.column = 1 +- self.line_start_pos = 0 +- +- def __eq__(self, other): +- if not isinstance(other, LineCounter): +- return NotImplemented +- +- return self.char_pos == other.char_pos and self.newline_char == other.newline_char +- +- def feed(self, token: Token, test_newline=True): +- """Consume a token and calculate the new line & column. +- +- As an optional optimization, set test_newline=False if token doesn't contain a newline. +- """ +- if test_newline: +- newlines = token.count(self.newline_char) +- if newlines: +- self.line += newlines +- self.line_start_pos = self.char_pos + token.rindex(self.newline_char) + 1 +- +- self.char_pos += len(token) +- self.column = self.char_pos - self.line_start_pos + 1 +- +- +-class UnlessCallback: +- def __init__(self, scanner): +- self.scanner = scanner +- +- def __call__(self, t): +- res = self.scanner.match(t.value, 0) +- if res: +- _value, t.type = res +- return t +- +- +-class CallChain: +- def __init__(self, callback1, callback2, cond): +- self.callback1 = callback1 +- self.callback2 = callback2 +- self.cond = cond +- +- def __call__(self, t): +- t2 = self.callback1(t) +- return self.callback2(t) if self.cond(t2) else t2 +- +- +-def _get_match(re_, regexp, s, flags): +- m = re_.match(regexp, s, flags) +- if m: +- return m.group(0) +- +-def _create_unless(terminals, g_regex_flags, re_, use_bytes): +- tokens_by_type = classify(terminals, lambda t: type(t.pattern)) +- assert len(tokens_by_type) <= 2, tokens_by_type.keys() +- embedded_strs = set() +- callback = {} +- for retok in tokens_by_type.get(PatternRE, []): +- unless = [] +- for strtok in tokens_by_type.get(PatternStr, []): +- if strtok.priority != retok.priority: +- continue +- s = strtok.pattern.value +- if s == _get_match(re_, retok.pattern.to_regexp(), s, g_regex_flags): +- unless.append(strtok) +- if strtok.pattern.flags <= retok.pattern.flags: +- embedded_strs.add(strtok) +- if unless: +- callback[retok.name] = UnlessCallback(Scanner(unless, g_regex_flags, re_, match_whole=True, use_bytes=use_bytes)) +- +- new_terminals = [t for t in terminals if t not in embedded_strs] +- return new_terminals, callback +- +- +-class Scanner: +- def __init__(self, terminals, g_regex_flags, re_, use_bytes, match_whole=False): +- self.terminals = terminals +- self.g_regex_flags = g_regex_flags +- self.re_ = re_ +- self.use_bytes = use_bytes +- self.match_whole = match_whole +- +- self.allowed_types = {t.name for t in self.terminals} +- +- self._mres = self._build_mres(terminals, len(terminals)) +- +- def _build_mres(self, terminals, max_size): +- # Python sets an unreasonable group limit (currently 100) in its re module +- # Worse, the only way to know we reached it is by catching an AssertionError! +- # This function recursively tries less and less groups until it's successful. +- postfix = '$' if self.match_whole else '' +- mres = [] +- while terminals: +- pattern = u'|'.join(u'(?P<%s>%s)' % (t.name, t.pattern.to_regexp() + postfix) for t in terminals[:max_size]) +- if self.use_bytes: +- pattern = pattern.encode('latin-1') +- try: +- mre = self.re_.compile(pattern, self.g_regex_flags) +- except AssertionError: # Yes, this is what Python provides us.. :/ +- return self._build_mres(terminals, max_size//2) +- +- mres.append((mre, {i: n for n, i in mre.groupindex.items()})) +- terminals = terminals[max_size:] +- return mres +- +- def match(self, text, pos): +- for mre, type_from_index in self._mres: +- m = mre.match(text, pos) +- if m: +- return m.group(0), type_from_index[m.lastindex] +- +- +-def _regexp_has_newline(r: str): +- r"""Expressions that may indicate newlines in a regexp: +- - newlines (\n) +- - escaped newline (\\n) +- - anything but ([^...]) +- - any-char (.) when the flag (?s) exists +- - spaces (\s) +- """ +- return '\n' in r or '\\n' in r or '\\s' in r or '[^' in r or ('(?s' in r and '.' in r) +- +- +-class LexerState: +- """Represents the current state of the lexer as it scans the text +- (Lexer objects are only instanciated per grammar, not per text) +- """ +- +- __slots__ = 'text', 'line_ctr', 'last_token' +- +- def __init__(self, text, line_ctr=None, last_token=None): +- self.text = text +- self.line_ctr = line_ctr or LineCounter(b'\n' if isinstance(text, bytes) else '\n') +- self.last_token = last_token +- +- def __eq__(self, other): +- if not isinstance(other, LexerState): +- return NotImplemented +- +- return self.text is other.text and self.line_ctr == other.line_ctr and self.last_token == other.last_token +- +- def __copy__(self): +- return type(self)(self.text, copy(self.line_ctr), self.last_token) +- +- +-class LexerThread: +- """A thread that ties a lexer instance and a lexer state, to be used by the parser +- """ +- +- def __init__(self, lexer: 'Lexer', lexer_state: LexerState): +- self.lexer = lexer +- self.state = lexer_state +- +- @classmethod +- def from_text(cls, lexer: 'Lexer', text: str): +- return cls(lexer, LexerState(text)) +- +- def lex(self, parser_state): +- return self.lexer.lex(self.state, parser_state) +- +- def __copy__(self): +- return type(self)(self.lexer, copy(self.state)) +- +- _Token = Token +- +- +-_Callback = Callable[[Token], Token] +- +-class Lexer(ABC): +- """Lexer interface +- +- Method Signatures: +- lex(self, lexer_state, parser_state) -> Iterator[Token] +- """ +- @abstractmethod +- def lex(self, lexer_state: LexerState, parser_state: Any) -> Iterator[Token]: +- return NotImplemented +- +- def make_lexer_state(self, text): +- "Deprecated" +- return LexerState(text) +- +- +-class BasicLexer(Lexer): +- +- terminals: Collection[TerminalDef] +- ignore_types: FrozenSet[str] +- newline_types: FrozenSet[str] +- user_callbacks: Dict[str, _Callback] +- callback: Dict[str, _Callback] +- re: ModuleType +- +- def __init__(self, conf: 'LexerConf') -> None: +- terminals = list(conf.terminals) +- assert all(isinstance(t, TerminalDef) for t in terminals), terminals +- +- self.re = conf.re_module +- +- if not conf.skip_validation: +- # Sanitization +- for t in terminals: +- try: +- self.re.compile(t.pattern.to_regexp(), conf.g_regex_flags) +- except self.re.error: +- raise LexError("Cannot compile token %s: %s" % (t.name, t.pattern)) +- +- if t.pattern.min_width == 0: +- raise LexError("Lexer does not allow zero-width terminals. (%s: %s)" % (t.name, t.pattern)) +- +- if not (set(conf.ignore) <= {t.name for t in terminals}): +- raise LexError("Ignore terminals are not defined: %s" % (set(conf.ignore) - {t.name for t in terminals})) +- +- # Init +- self.newline_types = frozenset(t.name for t in terminals if _regexp_has_newline(t.pattern.to_regexp())) +- self.ignore_types = frozenset(conf.ignore) +- +- terminals.sort(key=lambda x: (-x.priority, -x.pattern.max_width, -len(x.pattern.value), x.name)) +- self.terminals = terminals +- self.user_callbacks = conf.callbacks +- self.g_regex_flags = conf.g_regex_flags +- self.use_bytes = conf.use_bytes +- self.terminals_by_name = conf.terminals_by_name +- +- self._scanner = None +- +- def _build_scanner(self): +- terminals, self.callback = _create_unless(self.terminals, self.g_regex_flags, self.re, self.use_bytes) +- assert all(self.callback.values()) +- +- for type_, f in self.user_callbacks.items(): +- if type_ in self.callback: +- # Already a callback there, probably UnlessCallback +- self.callback[type_] = CallChain(self.callback[type_], f, lambda t: t.type == type_) +- else: +- self.callback[type_] = f +- +- self._scanner = Scanner(terminals, self.g_regex_flags, self.re, self.use_bytes) +- +- @property +- def scanner(self): +- if self._scanner is None: +- self._build_scanner() +- return self._scanner +- +- def match(self, text, pos): +- return self.scanner.match(text, pos) +- +- def lex(self, state: LexerState, parser_state: Any) -> Iterator[Token]: +- with suppress(EOFError): +- while True: +- yield self.next_token(state, parser_state) +- +- def next_token(self, lex_state: LexerState, parser_state: Any=None) -> Token: +- line_ctr = lex_state.line_ctr +- while line_ctr.char_pos < len(lex_state.text): +- res = self.match(lex_state.text, line_ctr.char_pos) +- if not res: +- allowed = self.scanner.allowed_types - self.ignore_types +- if not allowed: +- allowed = {""} +- raise UnexpectedCharacters(lex_state.text, line_ctr.char_pos, line_ctr.line, line_ctr.column, +- allowed=allowed, token_history=lex_state.last_token and [lex_state.last_token], +- state=parser_state, terminals_by_name=self.terminals_by_name) +- +- value, type_ = res +- +- if type_ not in self.ignore_types: +- t = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column) +- line_ctr.feed(value, type_ in self.newline_types) +- t.end_line = line_ctr.line +- t.end_column = line_ctr.column +- t.end_pos = line_ctr.char_pos +- if t.type in self.callback: +- t = self.callback[t.type](t) +- if not isinstance(t, Token): +- raise LexError("Callbacks must return a token (returned %r)" % t) +- lex_state.last_token = t +- return t +- else: +- if type_ in self.callback: +- t2 = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column) +- self.callback[type_](t2) +- line_ctr.feed(value, type_ in self.newline_types) +- +- # EOF +- raise EOFError(self) +- +- +-class ContextualLexer(Lexer): +- +- lexers: Dict[str, BasicLexer] +- root_lexer: BasicLexer +- +- def __init__(self, conf: 'LexerConf', states: Dict[str, Collection[str]], always_accept: Collection[str]=()) -> None: +- terminals = list(conf.terminals) +- terminals_by_name = conf.terminals_by_name +- +- trad_conf = copy(conf) +- trad_conf.terminals = terminals +- +- lexer_by_tokens: Dict[FrozenSet[str], BasicLexer] = {} +- self.lexers = {} +- for state, accepts in states.items(): +- key = frozenset(accepts) +- try: +- lexer = lexer_by_tokens[key] +- except KeyError: +- accepts = set(accepts) | set(conf.ignore) | set(always_accept) +- lexer_conf = copy(trad_conf) +- lexer_conf.terminals = [terminals_by_name[n] for n in accepts if n in terminals_by_name] +- lexer = BasicLexer(lexer_conf) +- lexer_by_tokens[key] = lexer +- +- self.lexers[state] = lexer +- +- assert trad_conf.terminals is terminals +- self.root_lexer = BasicLexer(trad_conf) +- +- def lex(self, lexer_state: LexerState, parser_state: Any) -> Iterator[Token]: +- try: +- while True: +- lexer = self.lexers[parser_state.position] +- yield lexer.next_token(lexer_state, parser_state) +- except EOFError: +- pass +- except UnexpectedCharacters as e: +- # In the contextual lexer, UnexpectedCharacters can mean that the terminal is defined, but not in the current context. +- # This tests the input against the global context, to provide a nicer error. +- try: +- last_token = lexer_state.last_token # Save last_token. Calling root_lexer.next_token will change this to the wrong token +- token = self.root_lexer.next_token(lexer_state, parser_state) +- raise UnexpectedToken(token, e.allowed, state=parser_state, token_history=[last_token], terminals_by_name=self.root_lexer.terminals_by_name) +- except UnexpectedCharacters: +- raise e # Raise the original UnexpectedCharacters. The root lexer raises it with the wrong expected set. +- +-###} +diff --git a/src/poetry/core/_vendor/lark/load_grammar.py b/src/poetry/core/_vendor/lark/load_grammar.py +deleted file mode 100644 +index fcdd9d0..0000000 +--- a/src/poetry/core/_vendor/lark/load_grammar.py ++++ /dev/null +@@ -1,1409 +0,0 @@ +-"""Parses and creates Grammar objects""" +-import hashlib +-import os.path +-import sys +-from collections import namedtuple +-from copy import copy, deepcopy +-import pkgutil +-from ast import literal_eval +-from contextlib import suppress +-from typing import List, Tuple, Union, Callable, Dict, Optional, Sequence +- +-from .utils import bfs, logger, classify_bool, is_id_continue, is_id_start, bfs_all_unique, small_factors +-from .lexer import Token, TerminalDef, PatternStr, PatternRE +- +-from .parse_tree_builder import ParseTreeBuilder +-from .parser_frontends import ParsingFrontend +-from .common import LexerConf, ParserConf +-from .grammar import RuleOptions, Rule, Terminal, NonTerminal, Symbol, TOKEN_DEFAULT_PRIORITY +-from .utils import classify, dedup_list +-from .exceptions import GrammarError, UnexpectedCharacters, UnexpectedToken, ParseError, UnexpectedInput +- +-from .tree import Tree, SlottedTree as ST +-from .visitors import Transformer, Visitor, v_args, Transformer_InPlace, Transformer_NonRecursive +-inline_args = v_args(inline=True) +- +-__path__ = os.path.dirname(__file__) +-IMPORT_PATHS = ['grammars'] +- +-EXT = '.lark' +- +-_RE_FLAGS = 'imslux' +- +-_EMPTY = Symbol('__empty__') +- +-_TERMINAL_NAMES = { +- '.' : 'DOT', +- ',' : 'COMMA', +- ':' : 'COLON', +- ';' : 'SEMICOLON', +- '+' : 'PLUS', +- '-' : 'MINUS', +- '*' : 'STAR', +- '/' : 'SLASH', +- '\\' : 'BACKSLASH', +- '|' : 'VBAR', +- '?' : 'QMARK', +- '!' : 'BANG', +- '@' : 'AT', +- '#' : 'HASH', +- '$' : 'DOLLAR', +- '%' : 'PERCENT', +- '^' : 'CIRCUMFLEX', +- '&' : 'AMPERSAND', +- '_' : 'UNDERSCORE', +- '<' : 'LESSTHAN', +- '>' : 'MORETHAN', +- '=' : 'EQUAL', +- '"' : 'DBLQUOTE', +- '\'' : 'QUOTE', +- '`' : 'BACKQUOTE', +- '~' : 'TILDE', +- '(' : 'LPAR', +- ')' : 'RPAR', +- '{' : 'LBRACE', +- '}' : 'RBRACE', +- '[' : 'LSQB', +- ']' : 'RSQB', +- '\n' : 'NEWLINE', +- '\r\n' : 'CRLF', +- '\t' : 'TAB', +- ' ' : 'SPACE', +-} +- +-# Grammar Parser +-TERMINALS = { +- '_LPAR': r'\(', +- '_RPAR': r'\)', +- '_LBRA': r'\[', +- '_RBRA': r'\]', +- '_LBRACE': r'\{', +- '_RBRACE': r'\}', +- 'OP': '[+*]|[?](?![a-z])', +- '_COLON': ':', +- '_COMMA': ',', +- '_OR': r'\|', +- '_DOT': r'\.(?!\.)', +- '_DOTDOT': r'\.\.', +- 'TILDE': '~', +- 'RULE_MODIFIERS': '(!|![?]?|[?]!?)(?=[_a-z])', +- 'RULE': '_?[a-z][_a-z0-9]*', +- 'TERMINAL': '_?[A-Z][_A-Z0-9]*', +- 'STRING': r'"(\\"|\\\\|[^"\n])*?"i?', +- 'REGEXP': r'/(?!/)(\\/|\\\\|[^/])*?/[%s]*' % _RE_FLAGS, +- '_NL': r'(\r?\n)+\s*', +- '_NL_OR': r'(\r?\n)+\s*\|', +- 'WS': r'[ \t]+', +- 'COMMENT': r'\s*//[^\n]*', +- 'BACKSLASH': r'\\[ ]*\n', +- '_TO': '->', +- '_IGNORE': r'%ignore', +- '_OVERRIDE': r'%override', +- '_DECLARE': r'%declare', +- '_EXTEND': r'%extend', +- '_IMPORT': r'%import', +- 'NUMBER': r'[+-]?\d+', +-} +- +-RULES = { +- 'start': ['_list'], +- '_list': ['_item', '_list _item'], +- '_item': ['rule', 'term', 'ignore', 'import', 'declare', 'override', 'extend', '_NL'], +- +- 'rule': ['rule_modifiers RULE template_params priority _COLON expansions _NL'], +- 'rule_modifiers': ['RULE_MODIFIERS', +- ''], +- 'priority': ['_DOT NUMBER', +- ''], +- 'template_params': ['_LBRACE _template_params _RBRACE', +- ''], +- '_template_params': ['RULE', +- '_template_params _COMMA RULE'], +- 'expansions': ['_expansions'], +- '_expansions': ['alias', +- '_expansions _OR alias', +- '_expansions _NL_OR alias'], +- +- '?alias': ['expansion _TO nonterminal', 'expansion'], +- 'expansion': ['_expansion'], +- +- '_expansion': ['', '_expansion expr'], +- +- '?expr': ['atom', +- 'atom OP', +- 'atom TILDE NUMBER', +- 'atom TILDE NUMBER _DOTDOT NUMBER', +- ], +- +- '?atom': ['_LPAR expansions _RPAR', +- 'maybe', +- 'value'], +- +- 'value': ['terminal', +- 'nonterminal', +- 'literal', +- 'range', +- 'template_usage'], +- +- 'terminal': ['TERMINAL'], +- 'nonterminal': ['RULE'], +- +- '?name': ['RULE', 'TERMINAL'], +- '?symbol': ['terminal', 'nonterminal'], +- +- 'maybe': ['_LBRA expansions _RBRA'], +- 'range': ['STRING _DOTDOT STRING'], +- +- 'template_usage': ['nonterminal _LBRACE _template_args _RBRACE'], +- '_template_args': ['value', +- '_template_args _COMMA value'], +- +- 'term': ['TERMINAL _COLON expansions _NL', +- 'TERMINAL _DOT NUMBER _COLON expansions _NL'], +- 'override': ['_OVERRIDE rule', +- '_OVERRIDE term'], +- 'extend': ['_EXTEND rule', +- '_EXTEND term'], +- 'ignore': ['_IGNORE expansions _NL'], +- 'declare': ['_DECLARE _declare_args _NL'], +- 'import': ['_IMPORT _import_path _NL', +- '_IMPORT _import_path _LPAR name_list _RPAR _NL', +- '_IMPORT _import_path _TO name _NL'], +- +- '_import_path': ['import_lib', 'import_rel'], +- 'import_lib': ['_import_args'], +- 'import_rel': ['_DOT _import_args'], +- '_import_args': ['name', '_import_args _DOT name'], +- +- 'name_list': ['_name_list'], +- '_name_list': ['name', '_name_list _COMMA name'], +- +- '_declare_args': ['symbol', '_declare_args symbol'], +- 'literal': ['REGEXP', 'STRING'], +-} +- +- +-# Value 5 keeps the number of states in the lalr parser somewhat minimal +-# It isn't optimal, but close to it. See PR #949 +-SMALL_FACTOR_THRESHOLD = 5 +-# The Threshold whether repeat via ~ are split up into different rules +-# 50 is chosen since it keeps the number of states low and therefore lalr analysis time low, +-# while not being to overaggressive and unnecessarily creating rules that might create shift/reduce conflicts. +-# (See PR #949) +-REPEAT_BREAK_THRESHOLD = 50 +- +- +-class FindRuleSize(Transformer): +- def __init__(self, keep_all_tokens): +- self.keep_all_tokens = keep_all_tokens +- +- def _will_not_get_removed(self, sym): +- if isinstance(sym, NonTerminal): +- return not sym.name.startswith('_') +- if isinstance(sym, Terminal): +- return self.keep_all_tokens or not sym.filter_out +- if sym is _EMPTY: +- return False +- assert False, sym +- +- def _args_as_int(self, args): +- for a in args: +- if isinstance(a, int): +- yield a +- elif isinstance(a, Symbol): +- yield 1 if self._will_not_get_removed(a) else 0 +- else: +- assert False +- +- def expansion(self, args): +- return sum(self._args_as_int(args)) +- +- def expansions(self, args): +- return max(self._args_as_int(args)) +- +- +-@inline_args +-class EBNF_to_BNF(Transformer_InPlace): +- def __init__(self): +- self.new_rules = [] +- self.rules_cache = {} +- self.prefix = 'anon' +- self.i = 0 +- self.rule_options = None +- +- def _name_rule(self, inner): +- new_name = '__%s_%s_%d' % (self.prefix, inner, self.i) +- self.i += 1 +- return new_name +- +- def _add_rule(self, key, name, expansions): +- t = NonTerminal(name) +- self.new_rules.append((name, expansions, self.rule_options)) +- self.rules_cache[key] = t +- return t +- +- def _add_recurse_rule(self, type_, expr): +- try: +- return self.rules_cache[expr] +- except KeyError: +- new_name = self._name_rule(type_) +- t = NonTerminal(new_name) +- tree = ST('expansions', [ +- ST('expansion', [expr]), +- ST('expansion', [t, expr]) +- ]) +- return self._add_rule(expr, new_name, tree) +- +- def _add_repeat_rule(self, a, b, target, atom): +- """Generate a rule that repeats target ``a`` times, and repeats atom ``b`` times. +- +- When called recursively (into target), it repeats atom for x(n) times, where: +- x(0) = 1 +- x(n) = a(n) * x(n-1) + b +- +- Example rule when a=3, b=4: +- +- new_rule: target target target atom atom atom atom +- +- """ +- key = (a, b, target, atom) +- try: +- return self.rules_cache[key] +- except KeyError: +- new_name = self._name_rule('repeat_a%d_b%d' % (a, b)) +- tree = ST('expansions', [ST('expansion', [target] * a + [atom] * b)]) +- return self._add_rule(key, new_name, tree) +- +- def _add_repeat_opt_rule(self, a, b, target, target_opt, atom): +- """Creates a rule that matches atom 0 to (a*n+b)-1 times. +- +- When target matches n times atom, and target_opt 0 to n-1 times target_opt, +- +- First we generate target * i followed by target_opt, for i from 0 to a-1 +- These match 0 to n*a - 1 times atom +- +- Then we generate target * a followed by atom * i, for i from 0 to b-1 +- These match n*a to n*a + b-1 times atom +- +- The created rule will not have any shift/reduce conflicts so that it can be used with lalr +- +- Example rule when a=3, b=4: +- +- new_rule: target_opt +- | target target_opt +- | target target target_opt +- +- | target target target +- | target target target atom +- | target target target atom atom +- | target target target atom atom atom +- +- """ +- key = (a, b, target, atom, "opt") +- try: +- return self.rules_cache[key] +- except KeyError: +- new_name = self._name_rule('repeat_a%d_b%d_opt' % (a, b)) +- tree = ST('expansions', [ +- ST('expansion', [target]*i + [target_opt]) for i in range(a) +- ] + [ +- ST('expansion', [target]*a + [atom]*i) for i in range(b) +- ]) +- return self._add_rule(key, new_name, tree) +- +- def _generate_repeats(self, rule, mn, mx): +- """Generates a rule tree that repeats ``rule`` exactly between ``mn`` to ``mx`` times. +- """ +- # For a small number of repeats, we can take the naive approach +- if mx < REPEAT_BREAK_THRESHOLD: +- return ST('expansions', [ST('expansion', [rule] * n) for n in range(mn, mx + 1)]) +- +- # For large repeat values, we break the repetition into sub-rules. +- # We treat ``rule~mn..mx`` as ``rule~mn rule~0..(diff=mx-mn)``. +- # We then use small_factors to split up mn and diff up into values [(a, b), ...] +- # This values are used with the help of _add_repeat_rule and _add_repeat_rule_opt +- # to generate a complete rule/expression that matches the corresponding number of repeats +- mn_target = rule +- for a, b in small_factors(mn, SMALL_FACTOR_THRESHOLD): +- mn_target = self._add_repeat_rule(a, b, mn_target, rule) +- if mx == mn: +- return mn_target +- +- diff = mx - mn + 1 # We add one because _add_repeat_opt_rule generates rules that match one less +- diff_factors = small_factors(diff, SMALL_FACTOR_THRESHOLD) +- diff_target = rule # Match rule 1 times +- diff_opt_target = ST('expansion', []) # match rule 0 times (e.g. up to 1 -1 times) +- for a, b in diff_factors[:-1]: +- diff_opt_target = self._add_repeat_opt_rule(a, b, diff_target, diff_opt_target, rule) +- diff_target = self._add_repeat_rule(a, b, diff_target, rule) +- +- a, b = diff_factors[-1] +- diff_opt_target = self._add_repeat_opt_rule(a, b, diff_target, diff_opt_target, rule) +- +- return ST('expansions', [ST('expansion', [mn_target] + [diff_opt_target])]) +- +- def expr(self, rule, op, *args): +- if op.value == '?': +- empty = ST('expansion', []) +- return ST('expansions', [rule, empty]) +- elif op.value == '+': +- # a : b c+ d +- # --> +- # a : b _c d +- # _c : _c c | c; +- return self._add_recurse_rule('plus', rule) +- elif op.value == '*': +- # a : b c* d +- # --> +- # a : b _c? d +- # _c : _c c | c; +- new_name = self._add_recurse_rule('star', rule) +- return ST('expansions', [new_name, ST('expansion', [])]) +- elif op.value == '~': +- if len(args) == 1: +- mn = mx = int(args[0]) +- else: +- mn, mx = map(int, args) +- if mx < mn or mn < 0: +- raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (rule, mn, mx)) +- +- return self._generate_repeats(rule, mn, mx) +- +- assert False, op +- +- def maybe(self, rule): +- keep_all_tokens = self.rule_options and self.rule_options.keep_all_tokens +- rule_size = FindRuleSize(keep_all_tokens).transform(rule) +- empty = ST('expansion', [_EMPTY] * rule_size) +- return ST('expansions', [rule, empty]) +- +- +-class SimplifyRule_Visitor(Visitor): +- +- @staticmethod +- def _flatten(tree): +- while tree.expand_kids_by_data(tree.data): +- pass +- +- def expansion(self, tree): +- # rules_list unpacking +- # a : b (c|d) e +- # --> +- # a : b c e | b d e +- # +- # In AST terms: +- # expansion(b, expansions(c, d), e) +- # --> +- # expansions( expansion(b, c, e), expansion(b, d, e) ) +- +- self._flatten(tree) +- +- for i, child in enumerate(tree.children): +- if isinstance(child, Tree) and child.data == 'expansions': +- tree.data = 'expansions' +- tree.children = [self.visit(ST('expansion', [option if i == j else other +- for j, other in enumerate(tree.children)])) +- for option in dedup_list(child.children)] +- self._flatten(tree) +- break +- +- def alias(self, tree): +- rule, alias_name = tree.children +- if rule.data == 'expansions': +- aliases = [] +- for child in tree.children[0].children: +- aliases.append(ST('alias', [child, alias_name])) +- tree.data = 'expansions' +- tree.children = aliases +- +- def expansions(self, tree): +- self._flatten(tree) +- # Ensure all children are unique +- if len(set(tree.children)) != len(tree.children): +- tree.children = dedup_list(tree.children) # dedup is expensive, so try to minimize its use +- +- +-class RuleTreeToText(Transformer): +- def expansions(self, x): +- return x +- +- def expansion(self, symbols): +- return symbols, None +- +- def alias(self, x): +- (expansion, _alias), alias = x +- assert _alias is None, (alias, expansion, '-', _alias) # Double alias not allowed +- return expansion, alias.name +- +- +-class PrepareAnonTerminals(Transformer_InPlace): +- """Create a unique list of anonymous terminals. Attempt to give meaningful names to them when we add them""" +- +- def __init__(self, terminals): +- self.terminals = terminals +- self.term_set = {td.name for td in self.terminals} +- self.term_reverse = {td.pattern: td for td in terminals} +- self.i = 0 +- self.rule_options = None +- +- @inline_args +- def pattern(self, p): +- value = p.value +- if p in self.term_reverse and p.flags != self.term_reverse[p].pattern.flags: +- raise GrammarError(u'Conflicting flags for the same terminal: %s' % p) +- +- term_name = None +- +- if isinstance(p, PatternStr): +- try: +- # If already defined, use the user-defined terminal name +- term_name = self.term_reverse[p].name +- except KeyError: +- # Try to assign an indicative anon-terminal name +- try: +- term_name = _TERMINAL_NAMES[value] +- except KeyError: +- if value and is_id_continue(value) and is_id_start(value[0]) and value.upper() not in self.term_set: +- term_name = value.upper() +- +- if term_name in self.term_set: +- term_name = None +- +- elif isinstance(p, PatternRE): +- if p in self.term_reverse: # Kind of a weird placement.name +- term_name = self.term_reverse[p].name +- else: +- assert False, p +- +- if term_name is None: +- term_name = '__ANON_%d' % self.i +- self.i += 1 +- +- if term_name not in self.term_set: +- assert p not in self.term_reverse +- self.term_set.add(term_name) +- termdef = TerminalDef(term_name, p) +- self.term_reverse[p] = termdef +- self.terminals.append(termdef) +- +- filter_out = False if self.rule_options and self.rule_options.keep_all_tokens else isinstance(p, PatternStr) +- +- return Terminal(term_name, filter_out=filter_out) +- +- +-class _ReplaceSymbols(Transformer_InPlace): +- """Helper for ApplyTemplates""" +- +- def __init__(self): +- self.names = {} +- +- def value(self, c): +- if len(c) == 1 and isinstance(c[0], Symbol) and c[0].name in self.names: +- return self.names[c[0].name] +- return self.__default__('value', c, None) +- +- def template_usage(self, c): +- name = c[0].name +- if name in self.names: +- return self.__default__('template_usage', [self.names[name]] + c[1:], None) +- return self.__default__('template_usage', c, None) +- +- +-class ApplyTemplates(Transformer_InPlace): +- """Apply the templates, creating new rules that represent the used templates""" +- +- def __init__(self, rule_defs): +- self.rule_defs = rule_defs +- self.replacer = _ReplaceSymbols() +- self.created_templates = set() +- +- def template_usage(self, c): +- name = c[0].name +- args = c[1:] +- result_name = "%s{%s}" % (name, ",".join(a.name for a in args)) +- if result_name not in self.created_templates: +- self.created_templates.add(result_name) +- (_n, params, tree, options) ,= (t for t in self.rule_defs if t[0] == name) +- assert len(params) == len(args), args +- result_tree = deepcopy(tree) +- self.replacer.names = dict(zip(params, args)) +- self.replacer.transform(result_tree) +- self.rule_defs.append((result_name, [], result_tree, deepcopy(options))) +- return NonTerminal(result_name) +- +- +-def _rfind(s, choices): +- return max(s.rfind(c) for c in choices) +- +- +-def eval_escaping(s): +- w = '' +- i = iter(s) +- for n in i: +- w += n +- if n == '\\': +- try: +- n2 = next(i) +- except StopIteration: +- raise GrammarError("Literal ended unexpectedly (bad escaping): `%r`" % s) +- if n2 == '\\': +- w += '\\\\' +- elif n2 not in 'Uuxnftr': +- w += '\\' +- w += n2 +- w = w.replace('\\"', '"').replace("'", "\\'") +- +- to_eval = "u'''%s'''" % w +- try: +- s = literal_eval(to_eval) +- except SyntaxError as e: +- raise GrammarError(s, e) +- +- return s +- +- +-def _literal_to_pattern(literal): +- assert isinstance(literal, Token) +- v = literal.value +- flag_start = _rfind(v, '/"')+1 +- assert flag_start > 0 +- flags = v[flag_start:] +- assert all(f in _RE_FLAGS for f in flags), flags +- +- if literal.type == 'STRING' and '\n' in v: +- raise GrammarError('You cannot put newlines in string literals') +- +- if literal.type == 'REGEXP' and '\n' in v and 'x' not in flags: +- raise GrammarError('You can only use newlines in regular expressions ' +- 'with the `x` (verbose) flag') +- +- v = v[:flag_start] +- assert v[0] == v[-1] and v[0] in '"/' +- x = v[1:-1] +- +- s = eval_escaping(x) +- +- if s == "": +- raise GrammarError("Empty terminals are not allowed (%s)" % literal) +- +- if literal.type == 'STRING': +- s = s.replace('\\\\', '\\') +- return PatternStr(s, flags, raw=literal.value) +- elif literal.type == 'REGEXP': +- return PatternRE(s, flags, raw=literal.value) +- else: +- assert False, 'Invariant failed: literal.type not in ["STRING", "REGEXP"]' +- +- +-@inline_args +-class PrepareLiterals(Transformer_InPlace): +- def literal(self, literal): +- return ST('pattern', [_literal_to_pattern(literal)]) +- +- def range(self, start, end): +- assert start.type == end.type == 'STRING' +- start = start.value[1:-1] +- end = end.value[1:-1] +- assert len(eval_escaping(start)) == len(eval_escaping(end)) == 1 +- regexp = '[%s-%s]' % (start, end) +- return ST('pattern', [PatternRE(regexp)]) +- +- +-def _make_joined_pattern(regexp, flags_set): +- return PatternRE(regexp, ()) +- +-class TerminalTreeToPattern(Transformer_NonRecursive): +- def pattern(self, ps): +- p ,= ps +- return p +- +- def expansion(self, items): +- assert items +- if len(items) == 1: +- return items[0] +- +- pattern = ''.join(i.to_regexp() for i in items) +- return _make_joined_pattern(pattern, {i.flags for i in items}) +- +- def expansions(self, exps): +- if len(exps) == 1: +- return exps[0] +- +- # Do a bit of sorting to make sure that the longest option is returned +- # (Python's re module otherwise prefers just 'l' when given (l|ll) and both could match) +- exps.sort(key=lambda x: (-x.max_width, -x.min_width, -len(x.value))) +- +- pattern = '(?:%s)' % ('|'.join(i.to_regexp() for i in exps)) +- return _make_joined_pattern(pattern, {i.flags for i in exps}) +- +- def expr(self, args): +- inner, op = args[:2] +- if op == '~': +- if len(args) == 3: +- op = "{%d}" % int(args[2]) +- else: +- mn, mx = map(int, args[2:]) +- if mx < mn: +- raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (inner, mn, mx)) +- op = "{%d,%d}" % (mn, mx) +- else: +- assert len(args) == 2 +- return PatternRE('(?:%s)%s' % (inner.to_regexp(), op), inner.flags) +- +- def maybe(self, expr): +- return self.expr(expr + ['?']) +- +- def alias(self, t): +- raise GrammarError("Aliasing not allowed in terminals (You used -> in the wrong place)") +- +- def value(self, v): +- return v[0] +- +- +-class ValidateSymbols(Transformer_InPlace): +- def value(self, v): +- v ,= v +- assert isinstance(v, (Tree, Symbol)) +- return v +- +- +-def nr_deepcopy_tree(t): +- """Deepcopy tree `t` without recursion""" +- return Transformer_NonRecursive(False).transform(t) +- +- +-class Grammar: +- +- term_defs: List[Tuple[str, Tuple[Tree, int]]] +- rule_defs: List[Tuple[str, Tuple[str, ...], Tree, RuleOptions]] +- ignore: List[str] +- +- def __init__(self, rule_defs: List[Tuple[str, Tuple[str, ...], Tree, RuleOptions]], term_defs: List[Tuple[str, Tuple[Tree, int]]], ignore: List[str]) -> None: +- self.term_defs = term_defs +- self.rule_defs = rule_defs +- self.ignore = ignore +- +- def compile(self, start, terminals_to_keep): +- # We change the trees in-place (to support huge grammars) +- # So deepcopy allows calling compile more than once. +- term_defs = [(n, (nr_deepcopy_tree(t), p)) for n, (t, p) in self.term_defs] +- rule_defs = [(n, p, nr_deepcopy_tree(t), o) for n, p, t, o in self.rule_defs] +- +- # =================== +- # Compile Terminals +- # =================== +- +- # Convert terminal-trees to strings/regexps +- +- for name, (term_tree, priority) in term_defs: +- if term_tree is None: # Terminal added through %declare +- continue +- expansions = list(term_tree.find_data('expansion')) +- if len(expansions) == 1 and not expansions[0].children: +- raise GrammarError("Terminals cannot be empty (%s)" % name) +- +- transformer = PrepareLiterals() * TerminalTreeToPattern() +- terminals = [TerminalDef(name, transformer.transform(term_tree), priority) +- for name, (term_tree, priority) in term_defs if term_tree] +- +- # ================= +- # Compile Rules +- # ================= +- +- # 1. Pre-process terminals +- anon_tokens_transf = PrepareAnonTerminals(terminals) +- transformer = PrepareLiterals() * ValidateSymbols() * anon_tokens_transf # Adds to terminals +- +- # 2. Inline Templates +- +- transformer *= ApplyTemplates(rule_defs) +- +- # 3. Convert EBNF to BNF (and apply step 1 & 2) +- ebnf_to_bnf = EBNF_to_BNF() +- rules = [] +- i = 0 +- while i < len(rule_defs): # We have to do it like this because rule_defs might grow due to templates +- name, params, rule_tree, options = rule_defs[i] +- i += 1 +- if len(params) != 0: # Dont transform templates +- continue +- rule_options = RuleOptions(keep_all_tokens=True) if options and options.keep_all_tokens else None +- ebnf_to_bnf.rule_options = rule_options +- ebnf_to_bnf.prefix = name +- anon_tokens_transf.rule_options = rule_options +- tree = transformer.transform(rule_tree) +- res = ebnf_to_bnf.transform(tree) +- rules.append((name, res, options)) +- rules += ebnf_to_bnf.new_rules +- +- assert len(rules) == len({name for name, _t, _o in rules}), "Whoops, name collision" +- +- # 4. Compile tree to Rule objects +- rule_tree_to_text = RuleTreeToText() +- +- simplify_rule = SimplifyRule_Visitor() +- compiled_rules = [] +- for rule_content in rules: +- name, tree, options = rule_content +- simplify_rule.visit(tree) +- expansions = rule_tree_to_text.transform(tree) +- +- for i, (expansion, alias) in enumerate(expansions): +- if alias and name.startswith('_'): +- raise GrammarError("Rule %s is marked for expansion (it starts with an underscore) and isn't allowed to have aliases (alias=%s)"% (name, alias)) +- +- empty_indices = [x==_EMPTY for x in expansion] +- if any(empty_indices): +- exp_options = copy(options) or RuleOptions() +- exp_options.empty_indices = empty_indices +- expansion = [x for x in expansion if x!=_EMPTY] +- else: +- exp_options = options +- +- for sym in expansion: +- assert isinstance(sym, Symbol) +- if sym.is_term and exp_options and exp_options.keep_all_tokens: +- sym.filter_out = False +- rule = Rule(NonTerminal(name), expansion, i, alias, exp_options) +- compiled_rules.append(rule) +- +- # Remove duplicates of empty rules, throw error for non-empty duplicates +- if len(set(compiled_rules)) != len(compiled_rules): +- duplicates = classify(compiled_rules, lambda x: x) +- for dups in duplicates.values(): +- if len(dups) > 1: +- if dups[0].expansion: +- raise GrammarError("Rules defined twice: %s\n\n(Might happen due to colliding expansion of optionals: [] or ?)" +- % ''.join('\n * %s' % i for i in dups)) +- +- # Empty rule; assert all other attributes are equal +- assert len({(r.alias, r.order, r.options) for r in dups}) == len(dups) +- +- # Remove duplicates +- compiled_rules = list(set(compiled_rules)) +- +- # Filter out unused rules +- while True: +- c = len(compiled_rules) +- used_rules = {s for r in compiled_rules +- for s in r.expansion +- if isinstance(s, NonTerminal) +- and s != r.origin} +- used_rules |= {NonTerminal(s) for s in start} +- compiled_rules, unused = classify_bool(compiled_rules, lambda r: r.origin in used_rules) +- for r in unused: +- logger.debug("Unused rule: %s", r) +- if len(compiled_rules) == c: +- break +- +- # Filter out unused terminals +- if terminals_to_keep != '*': +- used_terms = {t.name for r in compiled_rules +- for t in r.expansion +- if isinstance(t, Terminal)} +- terminals, unused = classify_bool(terminals, lambda t: t.name in used_terms or t.name in self.ignore or t.name in terminals_to_keep) +- if unused: +- logger.debug("Unused terminals: %s", [t.name for t in unused]) +- +- return terminals, compiled_rules, self.ignore +- +- +-PackageResource = namedtuple('PackageResource', 'pkg_name path') +- +- +-class FromPackageLoader: +- """ +- Provides a simple way of creating custom import loaders that load from packages via ``pkgutil.get_data`` instead of using `open`. +- This allows them to be compatible even from within zip files. +- +- Relative imports are handled, so you can just freely use them. +- +- pkg_name: The name of the package. You can probably provide `__name__` most of the time +- search_paths: All the path that will be search on absolute imports. +- """ +- +- pkg_name: str +- search_paths: Sequence[str] +- +- def __init__(self, pkg_name: str, search_paths: Sequence[str]=("", )) -> None: +- self.pkg_name = pkg_name +- self.search_paths = search_paths +- +- def __repr__(self): +- return "%s(%r, %r)" % (type(self).__name__, self.pkg_name, self.search_paths) +- +- def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: +- if base_path is None: +- to_try = self.search_paths +- else: +- # Check whether or not the importing grammar was loaded by this module. +- if not isinstance(base_path, PackageResource) or base_path.pkg_name != self.pkg_name: +- # Technically false, but FileNotFound doesn't exist in python2.7, and this message should never reach the end user anyway +- raise IOError() +- to_try = [base_path.path] +- +- err = None +- for path in to_try: +- full_path = os.path.join(path, grammar_path) +- try: +- text: Optional[bytes] = pkgutil.get_data(self.pkg_name, full_path) +- except IOError as e: +- err = e +- continue +- else: +- return PackageResource(self.pkg_name, full_path), (text.decode() if text else '') +- +- raise IOError('Cannot find grammar in given paths') from err +- +- +-stdlib_loader = FromPackageLoader('lark', IMPORT_PATHS) +- +- +- +-def resolve_term_references(term_dict): +- # TODO Solve with transitive closure (maybe) +- +- while True: +- changed = False +- for name, token_tree in term_dict.items(): +- if token_tree is None: # Terminal added through %declare +- continue +- for exp in token_tree.find_data('value'): +- item ,= exp.children +- if isinstance(item, NonTerminal): +- raise GrammarError("Rules aren't allowed inside terminals (%s in %s)" % (item, name)) +- elif isinstance(item, Terminal): +- try: +- term_value = term_dict[item.name] +- except KeyError: +- raise GrammarError("Terminal used but not defined: %s" % item.name) +- assert term_value is not None +- exp.children[0] = term_value +- changed = True +- else: +- assert isinstance(item, Tree) +- if not changed: +- break +- +- for name, term in term_dict.items(): +- if term: # Not just declared +- for child in term.children: +- ids = [id(x) for x in child.iter_subtrees()] +- if id(term) in ids: +- raise GrammarError("Recursion in terminal '%s' (recursion is only allowed in rules, not terminals)" % name) +- +- +- +-def symbol_from_strcase(s): +- assert isinstance(s, str) +- return Terminal(s, filter_out=s.startswith('_')) if s.isupper() else NonTerminal(s) +- +-@inline_args +-class PrepareGrammar(Transformer_InPlace): +- def terminal(self, name): +- return Terminal(str(name), filter_out=name.startswith('_')) +- +- def nonterminal(self, name): +- return NonTerminal(name.value) +- +- +-def _find_used_symbols(tree): +- assert tree.data == 'expansions' +- return {t.name for x in tree.find_data('expansion') +- for t in x.scan_values(lambda t: isinstance(t, Symbol))} +- +- +-def _get_parser(): +- try: +- return _get_parser.cache +- except AttributeError: +- terminals = [TerminalDef(name, PatternRE(value)) for name, value in TERMINALS.items()] +- +- rules = [(name.lstrip('?'), x, RuleOptions(expand1=name.startswith('?'))) +- for name, x in RULES.items()] +- rules = [Rule(NonTerminal(r), [symbol_from_strcase(s) for s in x.split()], i, None, o) +- for r, xs, o in rules for i, x in enumerate(xs)] +- +- callback = ParseTreeBuilder(rules, ST).create_callback() +- import re +- lexer_conf = LexerConf(terminals, re, ['WS', 'COMMENT', 'BACKSLASH']) +- parser_conf = ParserConf(rules, callback, ['start']) +- lexer_conf.lexer_type = 'basic' +- parser_conf.parser_type = 'lalr' +- _get_parser.cache = ParsingFrontend(lexer_conf, parser_conf, None) +- return _get_parser.cache +- +-GRAMMAR_ERRORS = [ +- ('Incorrect type of value', ['a: 1\n']), +- ('Unclosed parenthesis', ['a: (\n']), +- ('Unmatched closing parenthesis', ['a: )\n', 'a: [)\n', 'a: (]\n']), +- ('Expecting rule or terminal definition (missing colon)', ['a\n', 'A\n', 'a->\n', 'A->\n', 'a A\n']), +- ('Illegal name for rules or terminals', ['Aa:\n']), +- ('Alias expects lowercase name', ['a: -> "a"\n']), +- ('Unexpected colon', ['a::\n', 'a: b:\n', 'a: B:\n', 'a: "a":\n']), +- ('Misplaced operator', ['a: b??', 'a: b(?)', 'a:+\n', 'a:?\n', 'a:*\n', 'a:|*\n']), +- ('Expecting option ("|") or a new rule or terminal definition', ['a:a\n()\n']), +- ('Terminal names cannot contain dots', ['A.B\n']), +- ('Expecting rule or terminal definition', ['"a"\n']), +- ('%import expects a name', ['%import "a"\n']), +- ('%ignore expects a value', ['%ignore %import\n']), +- ] +- +-def _translate_parser_exception(parse, e): +- error = e.match_examples(parse, GRAMMAR_ERRORS, use_accepts=True) +- if error: +- return error +- elif 'STRING' in e.expected: +- return "Expecting a value" +- +-def _parse_grammar(text, name, start='start'): +- try: +- tree = _get_parser().parse(text + '\n', start) +- except UnexpectedCharacters as e: +- context = e.get_context(text) +- raise GrammarError("Unexpected input at line %d column %d in %s: \n\n%s" % +- (e.line, e.column, name, context)) +- except UnexpectedToken as e: +- context = e.get_context(text) +- error = _translate_parser_exception(_get_parser().parse, e) +- if error: +- raise GrammarError("%s, at line %s column %s\n\n%s" % (error, e.line, e.column, context)) +- raise +- +- return PrepareGrammar().transform(tree) +- +- +-def _error_repr(error): +- if isinstance(error, UnexpectedToken): +- error2 = _translate_parser_exception(_get_parser().parse, error) +- if error2: +- return error2 +- expected = ', '.join(error.accepts or error.expected) +- return "Unexpected token %r. Expected one of: {%s}" % (str(error.token), expected) +- else: +- return str(error) +- +-def _search_interactive_parser(interactive_parser, predicate): +- def expand(node): +- path, p = node +- for choice in p.choices(): +- t = Token(choice, '') +- try: +- new_p = p.feed_token(t) +- except ParseError: # Illegal +- pass +- else: +- yield path + (choice,), new_p +- +- for path, p in bfs_all_unique([((), interactive_parser)], expand): +- if predicate(p): +- return path, p +- +-def find_grammar_errors(text: str, start: str='start') -> List[Tuple[UnexpectedInput, str]]: +- errors = [] +- def on_error(e): +- errors.append((e, _error_repr(e))) +- +- # recover to a new line +- token_path, _ = _search_interactive_parser(e.interactive_parser.as_immutable(), lambda p: '_NL' in p.choices()) +- for token_type in token_path: +- e.interactive_parser.feed_token(Token(token_type, '')) +- e.interactive_parser.feed_token(Token('_NL', '\n')) +- return True +- +- _tree = _get_parser().parse(text + '\n', start, on_error=on_error) +- +- errors_by_line = classify(errors, lambda e: e[0].line) +- errors = [el[0] for el in errors_by_line.values()] # already sorted +- +- for e in errors: +- e[0].interactive_parser = None +- return errors +- +- +-def _get_mangle(prefix, aliases, base_mangle=None): +- def mangle(s): +- if s in aliases: +- s = aliases[s] +- else: +- if s[0] == '_': +- s = '_%s__%s' % (prefix, s[1:]) +- else: +- s = '%s__%s' % (prefix, s) +- if base_mangle is not None: +- s = base_mangle(s) +- return s +- return mangle +- +-def _mangle_definition_tree(exp, mangle): +- if mangle is None: +- return exp +- exp = deepcopy(exp) # TODO: is this needed? +- for t in exp.iter_subtrees(): +- for i, c in enumerate(t.children): +- if isinstance(c, Symbol): +- t.children[i] = c.renamed(mangle) +- +- return exp +- +-def _make_rule_tuple(modifiers_tree, name, params, priority_tree, expansions): +- if modifiers_tree.children: +- m ,= modifiers_tree.children +- expand1 = '?' in m +- keep_all_tokens = '!' in m +- else: +- keep_all_tokens = False +- expand1 = False +- +- if priority_tree.children: +- p ,= priority_tree.children +- priority = int(p) +- else: +- priority = None +- +- if params is not None: +- params = [t.value for t in params.children] # For the grammar parser +- +- return name, params, expansions, RuleOptions(keep_all_tokens, expand1, priority=priority, +- template_source=(name if params else None)) +- +- +-class Definition: +- def __init__(self, is_term, tree, params=(), options=None): +- self.is_term = is_term +- self.tree = tree +- self.params = tuple(params) +- self.options = options +- +-class GrammarBuilder: +- +- global_keep_all_tokens: bool +- import_paths: List[Union[str, Callable]] +- used_files: Dict[str, str] +- +- _definitions: Dict[str, Definition] +- _ignore_names: List[str] +- +- def __init__(self, global_keep_all_tokens: bool=False, import_paths: Optional[List[Union[str, Callable]]]=None, used_files: Optional[Dict[str, str]]=None) -> None: +- self.global_keep_all_tokens = global_keep_all_tokens +- self.import_paths = import_paths or [] +- self.used_files = used_files or {} +- +- self._definitions: Dict[str, Definition] = {} +- self._ignore_names: List[str] = [] +- +- def _grammar_error(self, is_term, msg, *names): +- args = {} +- for i, name in enumerate(names, start=1): +- postfix = '' if i == 1 else str(i) +- args['name' + postfix] = name +- args['type' + postfix] = lowercase_type = ("rule", "terminal")[is_term] +- args['Type' + postfix] = lowercase_type.title() +- raise GrammarError(msg.format(**args)) +- +- def _check_options(self, is_term, options): +- if is_term: +- if options is None: +- options = 1 +- elif not isinstance(options, int): +- raise GrammarError("Terminal require a single int as 'options' (e.g. priority), got %s" % (type(options),)) +- else: +- if options is None: +- options = RuleOptions() +- elif not isinstance(options, RuleOptions): +- raise GrammarError("Rules require a RuleOptions instance as 'options'") +- if self.global_keep_all_tokens: +- options.keep_all_tokens = True +- return options +- +- +- def _define(self, name, is_term, exp, params=(), options=None, *, override=False): +- if name in self._definitions: +- if not override: +- self._grammar_error(is_term, "{Type} '{name}' defined more than once", name) +- elif override: +- self._grammar_error(is_term, "Cannot override a nonexisting {type} {name}", name) +- +- if name.startswith('__'): +- self._grammar_error(is_term, 'Names starting with double-underscore are reserved (Error at {name})', name) +- +- self._definitions[name] = Definition(is_term, exp, params, self._check_options(is_term, options)) +- +- def _extend(self, name, is_term, exp, params=(), options=None): +- if name not in self._definitions: +- self._grammar_error(is_term, "Can't extend {type} {name} as it wasn't defined before", name) +- +- d = self._definitions[name] +- +- if is_term != d.is_term: +- self._grammar_error(is_term, "Cannot extend {type} {name} - one is a terminal, while the other is not.", name) +- if tuple(params) != d.params: +- self._grammar_error(is_term, "Cannot extend {type} with different parameters: {name}", name) +- +- if d.tree is None: +- self._grammar_error(is_term, "Can't extend {type} {name} - it is abstract.", name) +- +- # TODO: think about what to do with 'options' +- base = d.tree +- +- assert isinstance(base, Tree) and base.data == 'expansions' +- base.children.insert(0, exp) +- +- def _ignore(self, exp_or_name): +- if isinstance(exp_or_name, str): +- self._ignore_names.append(exp_or_name) +- else: +- assert isinstance(exp_or_name, Tree) +- t = exp_or_name +- if t.data == 'expansions' and len(t.children) == 1: +- t2 ,= t.children +- if t2.data=='expansion' and len(t2.children) == 1: +- item ,= t2.children +- if item.data == 'value': +- item ,= item.children +- if isinstance(item, Terminal): +- # Keep terminal name, no need to create a new definition +- self._ignore_names.append(item.name) +- return +- +- name = '__IGNORE_%d'% len(self._ignore_names) +- self._ignore_names.append(name) +- self._definitions[name] = Definition(True, t, options=TOKEN_DEFAULT_PRIORITY) +- +- def _unpack_import(self, stmt, grammar_name): +- if len(stmt.children) > 1: +- path_node, arg1 = stmt.children +- else: +- path_node, = stmt.children +- arg1 = None +- +- if isinstance(arg1, Tree): # Multi import +- dotted_path = tuple(path_node.children) +- names = arg1.children +- aliases = dict(zip(names, names)) # Can't have aliased multi import, so all aliases will be the same as names +- else: # Single import +- dotted_path = tuple(path_node.children[:-1]) +- if not dotted_path: +- name ,= path_node.children +- raise GrammarError("Nothing was imported from grammar `%s`" % name) +- name = path_node.children[-1] # Get name from dotted path +- aliases = {name.value: (arg1 or name).value} # Aliases if exist +- +- if path_node.data == 'import_lib': # Import from library +- base_path = None +- else: # Relative import +- if grammar_name == '': # Import relative to script file path if grammar is coded in script +- try: +- base_file = os.path.abspath(sys.modules['__main__'].__file__) +- except AttributeError: +- base_file = None +- else: +- base_file = grammar_name # Import relative to grammar file path if external grammar file +- if base_file: +- if isinstance(base_file, PackageResource): +- base_path = PackageResource(base_file.pkg_name, os.path.split(base_file.path)[0]) +- else: +- base_path = os.path.split(base_file)[0] +- else: +- base_path = os.path.abspath(os.path.curdir) +- +- return dotted_path, base_path, aliases +- +- def _unpack_definition(self, tree, mangle): +- +- if tree.data == 'rule': +- name, params, exp, opts = _make_rule_tuple(*tree.children) +- is_term = False +- else: +- name = tree.children[0].value +- params = () # TODO terminal templates +- opts = int(tree.children[1]) if len(tree.children) == 3 else TOKEN_DEFAULT_PRIORITY # priority +- exp = tree.children[-1] +- is_term = True +- +- if mangle is not None: +- params = tuple(mangle(p) for p in params) +- name = mangle(name) +- +- exp = _mangle_definition_tree(exp, mangle) +- return name, is_term, exp, params, opts +- +- +- def load_grammar(self, grammar_text: str, grammar_name: str="", mangle: Optional[Callable[[str], str]]=None) -> None: +- tree = _parse_grammar(grammar_text, grammar_name) +- +- imports: Dict[Tuple[str, ...], Tuple[Optional[str], Dict[str, str]]] = {} +- +- for stmt in tree.children: +- if stmt.data == 'import': +- dotted_path, base_path, aliases = self._unpack_import(stmt, grammar_name) +- try: +- import_base_path, import_aliases = imports[dotted_path] +- assert base_path == import_base_path, 'Inconsistent base_path for %s.' % '.'.join(dotted_path) +- import_aliases.update(aliases) +- except KeyError: +- imports[dotted_path] = base_path, aliases +- +- for dotted_path, (base_path, aliases) in imports.items(): +- self.do_import(dotted_path, base_path, aliases, mangle) +- +- for stmt in tree.children: +- if stmt.data in ('term', 'rule'): +- self._define(*self._unpack_definition(stmt, mangle)) +- elif stmt.data == 'override': +- r ,= stmt.children +- self._define(*self._unpack_definition(r, mangle), override=True) +- elif stmt.data == 'extend': +- r ,= stmt.children +- self._extend(*self._unpack_definition(r, mangle)) +- elif stmt.data == 'ignore': +- # if mangle is not None, we shouldn't apply ignore, since we aren't in a toplevel grammar +- if mangle is None: +- self._ignore(*stmt.children) +- elif stmt.data == 'declare': +- for symbol in stmt.children: +- assert isinstance(symbol, Symbol), symbol +- is_term = isinstance(symbol, Terminal) +- if mangle is None: +- name = symbol.name +- else: +- name = mangle(symbol.name) +- self._define(name, is_term, None) +- elif stmt.data == 'import': +- pass +- else: +- assert False, stmt +- +- +- term_defs = { name: d.tree +- for name, d in self._definitions.items() +- if d.is_term +- } +- resolve_term_references(term_defs) +- +- +- def _remove_unused(self, used): +- def rule_dependencies(symbol): +- try: +- d = self._definitions[symbol] +- except KeyError: +- return [] +- if d.is_term: +- return [] +- return _find_used_symbols(d.tree) - set(d.params) +- +- _used = set(bfs(used, rule_dependencies)) +- self._definitions = {k: v for k, v in self._definitions.items() if k in _used} +- +- +- def do_import(self, dotted_path: Tuple[str, ...], base_path: Optional[str], aliases: Dict[str, str], base_mangle: Optional[Callable[[str], str]]=None) -> None: +- assert dotted_path +- mangle = _get_mangle('__'.join(dotted_path), aliases, base_mangle) +- grammar_path = os.path.join(*dotted_path) + EXT +- to_try = self.import_paths + ([base_path] if base_path is not None else []) + [stdlib_loader] +- for source in to_try: +- try: +- if callable(source): +- joined_path, text = source(base_path, grammar_path) +- else: +- joined_path = os.path.join(source, grammar_path) +- with open(joined_path, encoding='utf8') as f: +- text = f.read() +- except IOError: +- continue +- else: +- h = hashlib.md5(text.encode('utf8')).hexdigest() +- if self.used_files.get(joined_path, h) != h: +- raise RuntimeError("Grammar file was changed during importing") +- self.used_files[joined_path] = h +- +- gb = GrammarBuilder(self.global_keep_all_tokens, self.import_paths, self.used_files) +- gb.load_grammar(text, joined_path, mangle) +- gb._remove_unused(map(mangle, aliases)) +- for name in gb._definitions: +- if name in self._definitions: +- raise GrammarError("Cannot import '%s' from '%s': Symbol already defined." % (name, grammar_path)) +- +- self._definitions.update(**gb._definitions) +- break +- else: +- # Search failed. Make Python throw a nice error. +- open(grammar_path, encoding='utf8') +- assert False, "Couldn't import grammar %s, but a corresponding file was found at a place where lark doesn't search for it" % (dotted_path,) +- +- +- def validate(self) -> None: +- for name, d in self._definitions.items(): +- params = d.params +- exp = d.tree +- +- for i, p in enumerate(params): +- if p in self._definitions: +- raise GrammarError("Template Parameter conflicts with rule %s (in template %s)" % (p, name)) +- if p in params[:i]: +- raise GrammarError("Duplicate Template Parameter %s (in template %s)" % (p, name)) +- +- if exp is None: # Remaining checks don't apply to abstract rules/terminals (created with %declare) +- continue +- +- for temp in exp.find_data('template_usage'): +- sym = temp.children[0].name +- args = temp.children[1:] +- if sym not in params: +- if sym not in self._definitions: +- self._grammar_error(d.is_term, "Template '%s' used but not defined (in {type} {name})" % sym, name) +- if len(args) != len(self._definitions[sym].params): +- expected, actual = len(self._definitions[sym].params), len(args) +- self._grammar_error(d.is_term, "Wrong number of template arguments used for {name} " +- "(expected %s, got %s) (in {type2} {name2})" % (expected, actual), sym, name) +- +- for sym in _find_used_symbols(exp): +- if sym not in self._definitions and sym not in params: +- self._grammar_error(d.is_term, "{Type} '{name}' used but not defined (in {type2} {name2})", sym, name) +- +- if not set(self._definitions).issuperset(self._ignore_names): +- raise GrammarError("Terminals %s were marked to ignore but were not defined!" % (set(self._ignore_names) - set(self._definitions))) +- +- def build(self) -> Grammar: +- self.validate() +- rule_defs = [] +- term_defs = [] +- for name, d in self._definitions.items(): +- (params, exp, options) = d.params, d.tree, d.options +- if d.is_term: +- assert len(params) == 0 +- term_defs.append((name, (exp, options))) +- else: +- rule_defs.append((name, params, exp, options)) +- # resolve_term_references(term_defs) +- return Grammar(rule_defs, term_defs, self._ignore_names) +- +- +-def verify_used_files(file_hashes): +- for path, old in file_hashes.items(): +- text = None +- if isinstance(path, str) and os.path.exists(path): +- with open(path, encoding='utf8') as f: +- text = f.read() +- elif isinstance(path, PackageResource): +- with suppress(IOError): +- text = pkgutil.get_data(*path).decode('utf-8') +- if text is None: # We don't know how to load the path. ignore it. +- continue +- +- current = hashlib.md5(text.encode()).hexdigest() +- if old != current: +- logger.info("File %r changed, rebuilding Parser" % path) +- return False +- return True +- +-def list_grammar_imports(grammar, import_paths=[]): +- "Returns a list of paths to the lark grammars imported by the given grammar (recursively)" +- builder = GrammarBuilder(False, import_paths) +- builder.load_grammar(grammar, '') +- return list(builder.used_files.keys()) +- +-def load_grammar(grammar, source, import_paths, global_keep_all_tokens): +- builder = GrammarBuilder(global_keep_all_tokens, import_paths) +- builder.load_grammar(grammar, source) +- return builder.build(), builder.used_files +diff --git a/src/poetry/core/_vendor/lark/parse_tree_builder.py b/src/poetry/core/_vendor/lark/parse_tree_builder.py +deleted file mode 100644 +index 888cc73..0000000 +--- a/src/poetry/core/_vendor/lark/parse_tree_builder.py ++++ /dev/null +@@ -1,385 +0,0 @@ +-from typing import List +- +-from .exceptions import GrammarError, ConfigurationError +-from .lexer import Token +-from .tree import Tree +-from .visitors import Transformer_InPlace +-from .visitors import _vargs_meta, _vargs_meta_inline +- +-###{standalone +-from functools import partial, wraps +-from itertools import repeat, product +- +- +-class ExpandSingleChild: +- def __init__(self, node_builder): +- self.node_builder = node_builder +- +- def __call__(self, children): +- if len(children) == 1: +- return children[0] +- else: +- return self.node_builder(children) +- +- +- +-class PropagatePositions: +- def __init__(self, node_builder, node_filter=None): +- self.node_builder = node_builder +- self.node_filter = node_filter +- +- def __call__(self, children): +- res = self.node_builder(children) +- +- if isinstance(res, Tree): +- # Calculate positions while the tree is streaming, according to the rule: +- # - nodes start at the start of their first child's container, +- # and end at the end of their last child's container. +- # Containers are nodes that take up space in text, but have been inlined in the tree. +- +- res_meta = res.meta +- +- first_meta = self._pp_get_meta(children) +- if first_meta is not None: +- if not hasattr(res_meta, 'line'): +- # meta was already set, probably because the rule has been inlined (e.g. `?rule`) +- res_meta.line = getattr(first_meta, 'container_line', first_meta.line) +- res_meta.column = getattr(first_meta, 'container_column', first_meta.column) +- res_meta.start_pos = getattr(first_meta, 'container_start_pos', first_meta.start_pos) +- res_meta.empty = False +- +- res_meta.container_line = getattr(first_meta, 'container_line', first_meta.line) +- res_meta.container_column = getattr(first_meta, 'container_column', first_meta.column) +- +- last_meta = self._pp_get_meta(reversed(children)) +- if last_meta is not None: +- if not hasattr(res_meta, 'end_line'): +- res_meta.end_line = getattr(last_meta, 'container_end_line', last_meta.end_line) +- res_meta.end_column = getattr(last_meta, 'container_end_column', last_meta.end_column) +- res_meta.end_pos = getattr(last_meta, 'container_end_pos', last_meta.end_pos) +- res_meta.empty = False +- +- res_meta.container_end_line = getattr(last_meta, 'container_end_line', last_meta.end_line) +- res_meta.container_end_column = getattr(last_meta, 'container_end_column', last_meta.end_column) +- +- return res +- +- def _pp_get_meta(self, children): +- for c in children: +- if self.node_filter is not None and not self.node_filter(c): +- continue +- if isinstance(c, Tree): +- if not c.meta.empty: +- return c.meta +- elif isinstance(c, Token): +- return c +- +-def make_propagate_positions(option): +- if callable(option): +- return partial(PropagatePositions, node_filter=option) +- elif option is True: +- return PropagatePositions +- elif option is False: +- return None +- +- raise ConfigurationError('Invalid option for propagate_positions: %r' % option) +- +- +-class ChildFilter: +- def __init__(self, to_include, append_none, node_builder): +- self.node_builder = node_builder +- self.to_include = to_include +- self.append_none = append_none +- +- def __call__(self, children): +- filtered = [] +- +- for i, to_expand, add_none in self.to_include: +- if add_none: +- filtered += [None] * add_none +- if to_expand: +- filtered += children[i].children +- else: +- filtered.append(children[i]) +- +- if self.append_none: +- filtered += [None] * self.append_none +- +- return self.node_builder(filtered) +- +- +-class ChildFilterLALR(ChildFilter): +- """Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)""" +- +- def __call__(self, children): +- filtered = [] +- for i, to_expand, add_none in self.to_include: +- if add_none: +- filtered += [None] * add_none +- if to_expand: +- if filtered: +- filtered += children[i].children +- else: # Optimize for left-recursion +- filtered = children[i].children +- else: +- filtered.append(children[i]) +- +- if self.append_none: +- filtered += [None] * self.append_none +- +- return self.node_builder(filtered) +- +- +-class ChildFilterLALR_NoPlaceholders(ChildFilter): +- "Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)" +- def __init__(self, to_include, node_builder): +- self.node_builder = node_builder +- self.to_include = to_include +- +- def __call__(self, children): +- filtered = [] +- for i, to_expand in self.to_include: +- if to_expand: +- if filtered: +- filtered += children[i].children +- else: # Optimize for left-recursion +- filtered = children[i].children +- else: +- filtered.append(children[i]) +- return self.node_builder(filtered) +- +- +-def _should_expand(sym): +- return not sym.is_term and sym.name.startswith('_') +- +- +-def maybe_create_child_filter(expansion, keep_all_tokens, ambiguous, _empty_indices: List[bool]): +- # Prepare empty_indices as: How many Nones to insert at each index? +- if _empty_indices: +- assert _empty_indices.count(False) == len(expansion) +- s = ''.join(str(int(b)) for b in _empty_indices) +- empty_indices = [len(ones) for ones in s.split('0')] +- assert len(empty_indices) == len(expansion)+1, (empty_indices, len(expansion)) +- else: +- empty_indices = [0] * (len(expansion)+1) +- +- to_include = [] +- nones_to_add = 0 +- for i, sym in enumerate(expansion): +- nones_to_add += empty_indices[i] +- if keep_all_tokens or not (sym.is_term and sym.filter_out): +- to_include.append((i, _should_expand(sym), nones_to_add)) +- nones_to_add = 0 +- +- nones_to_add += empty_indices[len(expansion)] +- +- if _empty_indices or len(to_include) < len(expansion) or any(to_expand for i, to_expand,_ in to_include): +- if _empty_indices or ambiguous: +- return partial(ChildFilter if ambiguous else ChildFilterLALR, to_include, nones_to_add) +- else: +- # LALR without placeholders +- return partial(ChildFilterLALR_NoPlaceholders, [(i, x) for i,x,_ in to_include]) +- +- +-class AmbiguousExpander: +- """Deal with the case where we're expanding children ('_rule') into a parent but the children +- are ambiguous. i.e. (parent->_ambig->_expand_this_rule). In this case, make the parent itself +- ambiguous with as many copies as their are ambiguous children, and then copy the ambiguous children +- into the right parents in the right places, essentially shifting the ambiguity up the tree.""" +- def __init__(self, to_expand, tree_class, node_builder): +- self.node_builder = node_builder +- self.tree_class = tree_class +- self.to_expand = to_expand +- +- def __call__(self, children): +- def _is_ambig_tree(t): +- return hasattr(t, 'data') and t.data == '_ambig' +- +- # -- When we're repeatedly expanding ambiguities we can end up with nested ambiguities. +- # All children of an _ambig node should be a derivation of that ambig node, hence +- # it is safe to assume that if we see an _ambig node nested within an ambig node +- # it is safe to simply expand it into the parent _ambig node as an alternative derivation. +- ambiguous = [] +- for i, child in enumerate(children): +- if _is_ambig_tree(child): +- if i in self.to_expand: +- ambiguous.append(i) +- +- child.expand_kids_by_data('_ambig') +- +- if not ambiguous: +- return self.node_builder(children) +- +- expand = [iter(child.children) if i in ambiguous else repeat(child) for i, child in enumerate(children)] +- return self.tree_class('_ambig', [self.node_builder(list(f[0])) for f in product(zip(*expand))]) +- +- +-def maybe_create_ambiguous_expander(tree_class, expansion, keep_all_tokens): +- to_expand = [i for i, sym in enumerate(expansion) +- if keep_all_tokens or ((not (sym.is_term and sym.filter_out)) and _should_expand(sym))] +- if to_expand: +- return partial(AmbiguousExpander, to_expand, tree_class) +- +- +-class AmbiguousIntermediateExpander: +- """ +- Propagate ambiguous intermediate nodes and their derivations up to the +- current rule. +- +- In general, converts +- +- rule +- _iambig +- _inter +- someChildren1 +- ... +- _inter +- someChildren2 +- ... +- someChildren3 +- ... +- +- to +- +- _ambig +- rule +- someChildren1 +- ... +- someChildren3 +- ... +- rule +- someChildren2 +- ... +- someChildren3 +- ... +- rule +- childrenFromNestedIambigs +- ... +- someChildren3 +- ... +- ... +- +- propagating up any nested '_iambig' nodes along the way. +- """ +- +- def __init__(self, tree_class, node_builder): +- self.node_builder = node_builder +- self.tree_class = tree_class +- +- def __call__(self, children): +- def _is_iambig_tree(child): +- return hasattr(child, 'data') and child.data == '_iambig' +- +- def _collapse_iambig(children): +- """ +- Recursively flatten the derivations of the parent of an '_iambig' +- node. Returns a list of '_inter' nodes guaranteed not +- to contain any nested '_iambig' nodes, or None if children does +- not contain an '_iambig' node. +- """ +- +- # Due to the structure of the SPPF, +- # an '_iambig' node can only appear as the first child +- if children and _is_iambig_tree(children[0]): +- iambig_node = children[0] +- result = [] +- for grandchild in iambig_node.children: +- collapsed = _collapse_iambig(grandchild.children) +- if collapsed: +- for child in collapsed: +- child.children += children[1:] +- result += collapsed +- else: +- new_tree = self.tree_class('_inter', grandchild.children + children[1:]) +- result.append(new_tree) +- return result +- +- collapsed = _collapse_iambig(children) +- if collapsed: +- processed_nodes = [self.node_builder(c.children) for c in collapsed] +- return self.tree_class('_ambig', processed_nodes) +- +- return self.node_builder(children) +- +- +- +-def inplace_transformer(func): +- @wraps(func) +- def f(children): +- # function name in a Transformer is a rule name. +- tree = Tree(func.__name__, children) +- return func(tree) +- return f +- +- +-def apply_visit_wrapper(func, name, wrapper): +- if wrapper is _vargs_meta or wrapper is _vargs_meta_inline: +- raise NotImplementedError("Meta args not supported for internal transformer") +- +- @wraps(func) +- def f(children): +- return wrapper(func, name, children, None) +- return f +- +- +-class ParseTreeBuilder: +- def __init__(self, rules, tree_class, propagate_positions=False, ambiguous=False, maybe_placeholders=False): +- self.tree_class = tree_class +- self.propagate_positions = propagate_positions +- self.ambiguous = ambiguous +- self.maybe_placeholders = maybe_placeholders +- +- self.rule_builders = list(self._init_builders(rules)) +- +- def _init_builders(self, rules): +- propagate_positions = make_propagate_positions(self.propagate_positions) +- +- for rule in rules: +- options = rule.options +- keep_all_tokens = options.keep_all_tokens +- expand_single_child = options.expand1 +- +- wrapper_chain = list(filter(None, [ +- (expand_single_child and not rule.alias) and ExpandSingleChild, +- maybe_create_child_filter(rule.expansion, keep_all_tokens, self.ambiguous, options.empty_indices if self.maybe_placeholders else None), +- propagate_positions, +- self.ambiguous and maybe_create_ambiguous_expander(self.tree_class, rule.expansion, keep_all_tokens), +- self.ambiguous and partial(AmbiguousIntermediateExpander, self.tree_class) +- ])) +- +- yield rule, wrapper_chain +- +- def create_callback(self, transformer=None): +- callbacks = {} +- +- default_handler = getattr(transformer, '__default__', None) +- if default_handler: +- def default_callback(data, children): +- return default_handler(data, children, None) +- else: +- default_callback = self.tree_class +- +- for rule, wrapper_chain in self.rule_builders: +- +- user_callback_name = rule.alias or rule.options.template_source or rule.origin.name +- try: +- f = getattr(transformer, user_callback_name) +- wrapper = getattr(f, 'visit_wrapper', None) +- if wrapper is not None: +- f = apply_visit_wrapper(f, user_callback_name, wrapper) +- elif isinstance(transformer, Transformer_InPlace): +- f = inplace_transformer(f) +- except AttributeError: +- f = partial(default_callback, user_callback_name) +- +- for w in wrapper_chain: +- f = w(f) +- +- if rule in callbacks: +- raise GrammarError("Rule '%s' already exists" % (rule,)) +- +- callbacks[rule] = f +- +- return callbacks +- +-###} +diff --git a/src/poetry/core/_vendor/lark/parser_frontends.py b/src/poetry/core/_vendor/lark/parser_frontends.py +deleted file mode 100644 +index e162edf..0000000 +--- a/src/poetry/core/_vendor/lark/parser_frontends.py ++++ /dev/null +@@ -1,245 +0,0 @@ +-from typing import Any, Callable, Dict, Tuple +- +-from .exceptions import ConfigurationError, GrammarError, assert_config +-from .utils import get_regexp_width, Serialize +-from .parsers.grammar_analysis import GrammarAnalyzer +-from .lexer import LexerThread, BasicLexer, ContextualLexer, Lexer +-from .parsers import earley, xearley, cyk +-from .parsers.lalr_parser import LALR_Parser +-from .tree import Tree +-from .common import LexerConf, ParserConf, _ParserArgType, _LexerArgType +- +-###{standalone +- +-def _wrap_lexer(lexer_class): +- future_interface = getattr(lexer_class, '__future_interface__', False) +- if future_interface: +- return lexer_class +- else: +- class CustomLexerWrapper(Lexer): +- def __init__(self, lexer_conf): +- self.lexer = lexer_class(lexer_conf) +- def lex(self, lexer_state, parser_state): +- return self.lexer.lex(lexer_state.text) +- return CustomLexerWrapper +- +- +-def _deserialize_parsing_frontend(data, memo, lexer_conf, callbacks, options): +- parser_conf = ParserConf.deserialize(data['parser_conf'], memo) +- cls = (options and options._plugins.get('LALR_Parser')) or LALR_Parser +- parser = cls.deserialize(data['parser'], memo, callbacks, options.debug) +- parser_conf.callbacks = callbacks +- return ParsingFrontend(lexer_conf, parser_conf, options, parser=parser) +- +- +-_parser_creators: 'Dict[str, Callable[[LexerConf, Any, Any], Any]]' = {} +- +- +-class ParsingFrontend(Serialize): +- __serialize_fields__ = 'lexer_conf', 'parser_conf', 'parser' +- +- def __init__(self, lexer_conf, parser_conf, options, parser=None): +- self.parser_conf = parser_conf +- self.lexer_conf = lexer_conf +- self.options = options +- +- # Set-up parser +- if parser: # From cache +- self.parser = parser +- else: +- create_parser = _parser_creators.get(parser_conf.parser_type) +- assert create_parser is not None, "{} is not supported in standalone mode".format( +- parser_conf.parser_type +- ) +- self.parser = create_parser(lexer_conf, parser_conf, options) +- +- # Set-up lexer +- lexer_type = lexer_conf.lexer_type +- self.skip_lexer = False +- if lexer_type in ('dynamic', 'dynamic_complete'): +- assert lexer_conf.postlex is None +- self.skip_lexer = True +- return +- +- try: +- create_lexer = { +- 'basic': create_basic_lexer, +- 'contextual': create_contextual_lexer, +- }[lexer_type] +- except KeyError: +- assert issubclass(lexer_type, Lexer), lexer_type +- self.lexer = _wrap_lexer(lexer_type)(lexer_conf) +- else: +- self.lexer = create_lexer(lexer_conf, self.parser, lexer_conf.postlex, options) +- +- if lexer_conf.postlex: +- self.lexer = PostLexConnector(self.lexer, lexer_conf.postlex) +- +- def _verify_start(self, start=None): +- if start is None: +- start_decls = self.parser_conf.start +- if len(start_decls) > 1: +- raise ConfigurationError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start_decls) +- start ,= start_decls +- elif start not in self.parser_conf.start: +- raise ConfigurationError("Unknown start rule %s. Must be one of %r" % (start, self.parser_conf.start)) +- return start +- +- def _make_lexer_thread(self, text): +- cls = (self.options and self.options._plugins.get('LexerThread')) or LexerThread +- return text if self.skip_lexer else cls.from_text(self.lexer, text) +- +- def parse(self, text, start=None, on_error=None): +- chosen_start = self._verify_start(start) +- kw = {} if on_error is None else {'on_error': on_error} +- stream = self._make_lexer_thread(text) +- return self.parser.parse(stream, chosen_start, **kw) +- +- def parse_interactive(self, text=None, start=None): +- chosen_start = self._verify_start(start) +- if self.parser_conf.parser_type != 'lalr': +- raise ConfigurationError("parse_interactive() currently only works with parser='lalr' ") +- stream = self._make_lexer_thread(text) +- return self.parser.parse_interactive(stream, chosen_start) +- +- +-def _validate_frontend_args(parser, lexer) -> None: +- assert_config(parser, ('lalr', 'earley', 'cyk')) +- if not isinstance(lexer, type): # not custom lexer? +- expected = { +- 'lalr': ('basic', 'contextual'), +- 'earley': ('basic', 'dynamic', 'dynamic_complete'), +- 'cyk': ('basic', ), +- }[parser] +- assert_config(lexer, expected, 'Parser %r does not support lexer %%r, expected one of %%s' % parser) +- +- +-def _get_lexer_callbacks(transformer, terminals): +- result = {} +- for terminal in terminals: +- callback = getattr(transformer, terminal.name, None) +- if callback is not None: +- result[terminal.name] = callback +- return result +- +-class PostLexConnector: +- def __init__(self, lexer, postlexer): +- self.lexer = lexer +- self.postlexer = postlexer +- +- def lex(self, lexer_state, parser_state): +- i = self.lexer.lex(lexer_state, parser_state) +- return self.postlexer.process(i) +- +- +- +-def create_basic_lexer(lexer_conf, parser, postlex, options): +- cls = (options and options._plugins.get('BasicLexer')) or BasicLexer +- return cls(lexer_conf) +- +-def create_contextual_lexer(lexer_conf, parser, postlex, options): +- cls = (options and options._plugins.get('ContextualLexer')) or ContextualLexer +- states = {idx:list(t.keys()) for idx, t in parser._parse_table.states.items()} +- always_accept = postlex.always_accept if postlex else () +- return cls(lexer_conf, states, always_accept=always_accept) +- +-def create_lalr_parser(lexer_conf, parser_conf, options=None): +- debug = options.debug if options else False +- cls = (options and options._plugins.get('LALR_Parser')) or LALR_Parser +- return cls(parser_conf, debug=debug) +- +-_parser_creators['lalr'] = create_lalr_parser +- +-###} +- +-class EarleyRegexpMatcher: +- def __init__(self, lexer_conf): +- self.regexps = {} +- for t in lexer_conf.terminals: +- regexp = t.pattern.to_regexp() +- try: +- width = get_regexp_width(regexp)[0] +- except ValueError: +- raise GrammarError("Bad regexp in token %s: %s" % (t.name, regexp)) +- else: +- if width == 0: +- raise GrammarError("Dynamic Earley doesn't allow zero-width regexps", t) +- if lexer_conf.use_bytes: +- regexp = regexp.encode('utf-8') +- +- self.regexps[t.name] = lexer_conf.re_module.compile(regexp, lexer_conf.g_regex_flags) +- +- def match(self, term, text, index=0): +- return self.regexps[term.name].match(text, index) +- +- +-def create_earley_parser__dynamic(lexer_conf, parser_conf, options=None, **kw): +- if lexer_conf.callbacks: +- raise GrammarError("Earley's dynamic lexer doesn't support lexer_callbacks.") +- +- earley_matcher = EarleyRegexpMatcher(lexer_conf) +- return xearley.Parser(lexer_conf, parser_conf, earley_matcher.match, **kw) +- +-def _match_earley_basic(term, token): +- return term.name == token.type +- +-def create_earley_parser__basic(lexer_conf, parser_conf, options, **kw): +- return earley.Parser(lexer_conf, parser_conf, _match_earley_basic, **kw) +- +-def create_earley_parser(lexer_conf, parser_conf, options): +- resolve_ambiguity = options.ambiguity == 'resolve' +- debug = options.debug if options else False +- tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None +- +- extra = {} +- if lexer_conf.lexer_type == 'dynamic': +- f = create_earley_parser__dynamic +- elif lexer_conf.lexer_type == 'dynamic_complete': +- extra['complete_lex'] =True +- f = create_earley_parser__dynamic +- else: +- f = create_earley_parser__basic +- +- return f(lexer_conf, parser_conf, options, resolve_ambiguity=resolve_ambiguity, debug=debug, tree_class=tree_class, **extra) +- +- +- +-class CYK_FrontEnd: +- def __init__(self, lexer_conf, parser_conf, options=None): +- self._analysis = GrammarAnalyzer(parser_conf) +- self.parser = cyk.Parser(parser_conf.rules) +- +- self.callbacks = parser_conf.callbacks +- +- def parse(self, lexer_thread, start): +- tokens = list(lexer_thread.lex(None)) +- tree = self.parser.parse(tokens, start) +- return self._transform(tree) +- +- def _transform(self, tree): +- subtrees = list(tree.iter_subtrees()) +- for subtree in subtrees: +- subtree.children = [self._apply_callback(c) if isinstance(c, Tree) else c for c in subtree.children] +- +- return self._apply_callback(tree) +- +- def _apply_callback(self, tree): +- return self.callbacks[tree.rule](tree.children) +- +- +-_parser_creators['earley'] = create_earley_parser +-_parser_creators['cyk'] = CYK_FrontEnd +- +- +-def _construct_parsing_frontend( +- parser_type: _ParserArgType, +- lexer_type: _LexerArgType, +- lexer_conf, +- parser_conf, +- options +-): +- assert isinstance(lexer_conf, LexerConf) +- assert isinstance(parser_conf, ParserConf) +- parser_conf.parser_type = parser_type +- lexer_conf.lexer_type = lexer_type +- return ParsingFrontend(lexer_conf, parser_conf, options) +diff --git a/src/poetry/core/_vendor/lark/parsers/cyk.py b/src/poetry/core/_vendor/lark/parsers/cyk.py +deleted file mode 100644 +index 82818cc..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/cyk.py ++++ /dev/null +@@ -1,345 +0,0 @@ +-"""This module implements a CYK parser.""" +- +-# Author: https://github.com/ehudt (2018) +-# +-# Adapted by Erez +- +- +-from collections import defaultdict +-import itertools +- +-from ..exceptions import ParseError +-from ..lexer import Token +-from ..tree import Tree +-from ..grammar import Terminal as T, NonTerminal as NT, Symbol +- +-try: +- xrange +-except NameError: +- xrange = range +- +-def match(t, s): +- assert isinstance(t, T) +- return t.name == s.type +- +- +-class Rule: +- """Context-free grammar rule.""" +- +- def __init__(self, lhs, rhs, weight, alias): +- super(Rule, self).__init__() +- assert isinstance(lhs, NT), lhs +- assert all(isinstance(x, NT) or isinstance(x, T) for x in rhs), rhs +- self.lhs = lhs +- self.rhs = rhs +- self.weight = weight +- self.alias = alias +- +- def __str__(self): +- return '%s -> %s' % (str(self.lhs), ' '.join(str(x) for x in self.rhs)) +- +- def __repr__(self): +- return str(self) +- +- def __hash__(self): +- return hash((self.lhs, tuple(self.rhs))) +- +- def __eq__(self, other): +- return self.lhs == other.lhs and self.rhs == other.rhs +- +- def __ne__(self, other): +- return not (self == other) +- +- +-class Grammar: +- """Context-free grammar.""" +- +- def __init__(self, rules): +- self.rules = frozenset(rules) +- +- def __eq__(self, other): +- return self.rules == other.rules +- +- def __str__(self): +- return '\n' + '\n'.join(sorted(repr(x) for x in self.rules)) + '\n' +- +- def __repr__(self): +- return str(self) +- +- +-# Parse tree data structures +-class RuleNode: +- """A node in the parse tree, which also contains the full rhs rule.""" +- +- def __init__(self, rule, children, weight=0): +- self.rule = rule +- self.children = children +- self.weight = weight +- +- def __repr__(self): +- return 'RuleNode(%s, [%s])' % (repr(self.rule.lhs), ', '.join(str(x) for x in self.children)) +- +- +- +-class Parser: +- """Parser wrapper.""" +- +- def __init__(self, rules): +- super(Parser, self).__init__() +- self.orig_rules = {rule: rule for rule in rules} +- rules = [self._to_rule(rule) for rule in rules] +- self.grammar = to_cnf(Grammar(rules)) +- +- def _to_rule(self, lark_rule): +- """Converts a lark rule, (lhs, rhs, callback, options), to a Rule.""" +- assert isinstance(lark_rule.origin, NT) +- assert all(isinstance(x, Symbol) for x in lark_rule.expansion) +- return Rule( +- lark_rule.origin, lark_rule.expansion, +- weight=lark_rule.options.priority if lark_rule.options.priority else 0, +- alias=lark_rule) +- +- def parse(self, tokenized, start): # pylint: disable=invalid-name +- """Parses input, which is a list of tokens.""" +- assert start +- start = NT(start) +- +- table, trees = _parse(tokenized, self.grammar) +- # Check if the parse succeeded. +- if all(r.lhs != start for r in table[(0, len(tokenized) - 1)]): +- raise ParseError('Parsing failed.') +- parse = trees[(0, len(tokenized) - 1)][start] +- return self._to_tree(revert_cnf(parse)) +- +- def _to_tree(self, rule_node): +- """Converts a RuleNode parse tree to a lark Tree.""" +- orig_rule = self.orig_rules[rule_node.rule.alias] +- children = [] +- for child in rule_node.children: +- if isinstance(child, RuleNode): +- children.append(self._to_tree(child)) +- else: +- assert isinstance(child.name, Token) +- children.append(child.name) +- t = Tree(orig_rule.origin, children) +- t.rule=orig_rule +- return t +- +- +-def print_parse(node, indent=0): +- if isinstance(node, RuleNode): +- print(' ' * (indent * 2) + str(node.rule.lhs)) +- for child in node.children: +- print_parse(child, indent + 1) +- else: +- print(' ' * (indent * 2) + str(node.s)) +- +- +-def _parse(s, g): +- """Parses sentence 's' using CNF grammar 'g'.""" +- # The CYK table. Indexed with a 2-tuple: (start pos, end pos) +- table = defaultdict(set) +- # Top-level structure is similar to the CYK table. Each cell is a dict from +- # rule name to the best (lightest) tree for that rule. +- trees = defaultdict(dict) +- # Populate base case with existing terminal production rules +- for i, w in enumerate(s): +- for terminal, rules in g.terminal_rules.items(): +- if match(terminal, w): +- for rule in rules: +- table[(i, i)].add(rule) +- if (rule.lhs not in trees[(i, i)] or +- rule.weight < trees[(i, i)][rule.lhs].weight): +- trees[(i, i)][rule.lhs] = RuleNode(rule, [T(w)], weight=rule.weight) +- +- # Iterate over lengths of sub-sentences +- for l in xrange(2, len(s) + 1): +- # Iterate over sub-sentences with the given length +- for i in xrange(len(s) - l + 1): +- # Choose partition of the sub-sentence in [1, l) +- for p in xrange(i + 1, i + l): +- span1 = (i, p - 1) +- span2 = (p, i + l - 1) +- for r1, r2 in itertools.product(table[span1], table[span2]): +- for rule in g.nonterminal_rules.get((r1.lhs, r2.lhs), []): +- table[(i, i + l - 1)].add(rule) +- r1_tree = trees[span1][r1.lhs] +- r2_tree = trees[span2][r2.lhs] +- rule_total_weight = rule.weight + r1_tree.weight + r2_tree.weight +- if (rule.lhs not in trees[(i, i + l - 1)] +- or rule_total_weight < trees[(i, i + l - 1)][rule.lhs].weight): +- trees[(i, i + l - 1)][rule.lhs] = RuleNode(rule, [r1_tree, r2_tree], weight=rule_total_weight) +- return table, trees +- +- +-# This section implements context-free grammar converter to Chomsky normal form. +-# It also implements a conversion of parse trees from its CNF to the original +-# grammar. +-# Overview: +-# Applies the following operations in this order: +-# * TERM: Eliminates non-solitary terminals from all rules +-# * BIN: Eliminates rules with more than 2 symbols on their right-hand-side. +-# * UNIT: Eliminates non-terminal unit rules +-# +-# The following grammar characteristics aren't featured: +-# * Start symbol appears on RHS +-# * Empty rules (epsilon rules) +- +- +-class CnfWrapper: +- """CNF wrapper for grammar. +- +- Validates that the input grammar is CNF and provides helper data structures. +- """ +- +- def __init__(self, grammar): +- super(CnfWrapper, self).__init__() +- self.grammar = grammar +- self.rules = grammar.rules +- self.terminal_rules = defaultdict(list) +- self.nonterminal_rules = defaultdict(list) +- for r in self.rules: +- # Validate that the grammar is CNF and populate auxiliary data structures. +- assert isinstance(r.lhs, NT), r +- if len(r.rhs) not in [1, 2]: +- raise ParseError("CYK doesn't support empty rules") +- if len(r.rhs) == 1 and isinstance(r.rhs[0], T): +- self.terminal_rules[r.rhs[0]].append(r) +- elif len(r.rhs) == 2 and all(isinstance(x, NT) for x in r.rhs): +- self.nonterminal_rules[tuple(r.rhs)].append(r) +- else: +- assert False, r +- +- def __eq__(self, other): +- return self.grammar == other.grammar +- +- def __repr__(self): +- return repr(self.grammar) +- +- +-class UnitSkipRule(Rule): +- """A rule that records NTs that were skipped during transformation.""" +- +- def __init__(self, lhs, rhs, skipped_rules, weight, alias): +- super(UnitSkipRule, self).__init__(lhs, rhs, weight, alias) +- self.skipped_rules = skipped_rules +- +- def __eq__(self, other): +- return isinstance(other, type(self)) and self.skipped_rules == other.skipped_rules +- +- __hash__ = Rule.__hash__ +- +- +-def build_unit_skiprule(unit_rule, target_rule): +- skipped_rules = [] +- if isinstance(unit_rule, UnitSkipRule): +- skipped_rules += unit_rule.skipped_rules +- skipped_rules.append(target_rule) +- if isinstance(target_rule, UnitSkipRule): +- skipped_rules += target_rule.skipped_rules +- return UnitSkipRule(unit_rule.lhs, target_rule.rhs, skipped_rules, +- weight=unit_rule.weight + target_rule.weight, alias=unit_rule.alias) +- +- +-def get_any_nt_unit_rule(g): +- """Returns a non-terminal unit rule from 'g', or None if there is none.""" +- for rule in g.rules: +- if len(rule.rhs) == 1 and isinstance(rule.rhs[0], NT): +- return rule +- return None +- +- +-def _remove_unit_rule(g, rule): +- """Removes 'rule' from 'g' without changing the langugage produced by 'g'.""" +- new_rules = [x for x in g.rules if x != rule] +- refs = [x for x in g.rules if x.lhs == rule.rhs[0]] +- new_rules += [build_unit_skiprule(rule, ref) for ref in refs] +- return Grammar(new_rules) +- +- +-def _split(rule): +- """Splits a rule whose len(rhs) > 2 into shorter rules.""" +- rule_str = str(rule.lhs) + '__' + '_'.join(str(x) for x in rule.rhs) +- rule_name = '__SP_%s' % (rule_str) + '_%d' +- yield Rule(rule.lhs, [rule.rhs[0], NT(rule_name % 1)], weight=rule.weight, alias=rule.alias) +- for i in xrange(1, len(rule.rhs) - 2): +- yield Rule(NT(rule_name % i), [rule.rhs[i], NT(rule_name % (i + 1))], weight=0, alias='Split') +- yield Rule(NT(rule_name % (len(rule.rhs) - 2)), rule.rhs[-2:], weight=0, alias='Split') +- +- +-def _term(g): +- """Applies the TERM rule on 'g' (see top comment).""" +- all_t = {x for rule in g.rules for x in rule.rhs if isinstance(x, T)} +- t_rules = {t: Rule(NT('__T_%s' % str(t)), [t], weight=0, alias='Term') for t in all_t} +- new_rules = [] +- for rule in g.rules: +- if len(rule.rhs) > 1 and any(isinstance(x, T) for x in rule.rhs): +- new_rhs = [t_rules[x].lhs if isinstance(x, T) else x for x in rule.rhs] +- new_rules.append(Rule(rule.lhs, new_rhs, weight=rule.weight, alias=rule.alias)) +- new_rules.extend(v for k, v in t_rules.items() if k in rule.rhs) +- else: +- new_rules.append(rule) +- return Grammar(new_rules) +- +- +-def _bin(g): +- """Applies the BIN rule to 'g' (see top comment).""" +- new_rules = [] +- for rule in g.rules: +- if len(rule.rhs) > 2: +- new_rules += _split(rule) +- else: +- new_rules.append(rule) +- return Grammar(new_rules) +- +- +-def _unit(g): +- """Applies the UNIT rule to 'g' (see top comment).""" +- nt_unit_rule = get_any_nt_unit_rule(g) +- while nt_unit_rule: +- g = _remove_unit_rule(g, nt_unit_rule) +- nt_unit_rule = get_any_nt_unit_rule(g) +- return g +- +- +-def to_cnf(g): +- """Creates a CNF grammar from a general context-free grammar 'g'.""" +- g = _unit(_bin(_term(g))) +- return CnfWrapper(g) +- +- +-def unroll_unit_skiprule(lhs, orig_rhs, skipped_rules, children, weight, alias): +- if not skipped_rules: +- return RuleNode(Rule(lhs, orig_rhs, weight=weight, alias=alias), children, weight=weight) +- else: +- weight = weight - skipped_rules[0].weight +- return RuleNode( +- Rule(lhs, [skipped_rules[0].lhs], weight=weight, alias=alias), [ +- unroll_unit_skiprule(skipped_rules[0].lhs, orig_rhs, +- skipped_rules[1:], children, +- skipped_rules[0].weight, skipped_rules[0].alias) +- ], weight=weight) +- +- +-def revert_cnf(node): +- """Reverts a parse tree (RuleNode) to its original non-CNF form (Node).""" +- if isinstance(node, T): +- return node +- # Reverts TERM rule. +- if node.rule.lhs.name.startswith('__T_'): +- return node.children[0] +- else: +- children = [] +- for child in map(revert_cnf, node.children): +- # Reverts BIN rule. +- if isinstance(child, RuleNode) and child.rule.lhs.name.startswith('__SP_'): +- children += child.children +- else: +- children.append(child) +- # Reverts UNIT rule. +- if isinstance(node.rule, UnitSkipRule): +- return unroll_unit_skiprule(node.rule.lhs, node.rule.rhs, +- node.rule.skipped_rules, children, +- node.rule.weight, node.rule.alias) +- else: +- return RuleNode(node.rule, children) +diff --git a/src/poetry/core/_vendor/lark/parsers/earley.py b/src/poetry/core/_vendor/lark/parsers/earley.py +deleted file mode 100644 +index 044f7b0..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/earley.py ++++ /dev/null +@@ -1,295 +0,0 @@ +-"""This module implements an Earley parser. +- +-The core Earley algorithm used here is based on Elizabeth Scott's implementation, here: +- https://www.sciencedirect.com/science/article/pii/S1571066108001497 +- +-That is probably the best reference for understanding the algorithm here. +- +-The Earley parser outputs an SPPF-tree as per that document. The SPPF tree format +-is explained here: https://lark-parser.readthedocs.io/en/latest/_static/sppf/sppf.html +-""" +- +-from collections import deque +- +-from ..lexer import Token +-from ..tree import Tree +-from ..exceptions import UnexpectedEOF, UnexpectedToken +-from ..utils import logger +-from .grammar_analysis import GrammarAnalyzer +-from ..grammar import NonTerminal +-from .earley_common import Item +-from .earley_forest import ForestSumVisitor, SymbolNode, TokenNode, ForestToParseTree +- +-class Parser: +- def __init__(self, lexer_conf, parser_conf, term_matcher, resolve_ambiguity=True, debug=False, tree_class=Tree): +- analysis = GrammarAnalyzer(parser_conf) +- self.lexer_conf = lexer_conf +- self.parser_conf = parser_conf +- self.resolve_ambiguity = resolve_ambiguity +- self.debug = debug +- self.tree_class = tree_class +- +- self.FIRST = analysis.FIRST +- self.NULLABLE = analysis.NULLABLE +- self.callbacks = parser_conf.callbacks +- self.predictions = {} +- +- ## These could be moved to the grammar analyzer. Pre-computing these is *much* faster than +- # the slow 'isupper' in is_terminal. +- self.TERMINALS = { sym for r in parser_conf.rules for sym in r.expansion if sym.is_term } +- self.NON_TERMINALS = { sym for r in parser_conf.rules for sym in r.expansion if not sym.is_term } +- +- self.forest_sum_visitor = None +- for rule in parser_conf.rules: +- if rule.origin not in self.predictions: +- self.predictions[rule.origin] = [x.rule for x in analysis.expand_rule(rule.origin)] +- +- ## Detect if any rules/terminals have priorities set. If the user specified priority = None, then +- # the priorities will be stripped from all rules/terminals before they reach us, allowing us to +- # skip the extra tree walk. We'll also skip this if the user just didn't specify priorities +- # on any rules/terminals. +- if self.forest_sum_visitor is None and rule.options.priority is not None: +- self.forest_sum_visitor = ForestSumVisitor +- +- # Check terminals for priorities +- # Ignore terminal priorities if the basic lexer is used +- if self.lexer_conf.lexer_type != 'basic' and self.forest_sum_visitor is None: +- for term in self.lexer_conf.terminals: +- if term.priority: +- self.forest_sum_visitor = ForestSumVisitor +- break +- +- self.term_matcher = term_matcher +- +- +- def predict_and_complete(self, i, to_scan, columns, transitives): +- """The core Earley Predictor and Completer. +- +- At each stage of the input, we handling any completed items (things +- that matched on the last cycle) and use those to predict what should +- come next in the input stream. The completions and any predicted +- non-terminals are recursively processed until we reach a set of, +- which can be added to the scan list for the next scanner cycle.""" +- # Held Completions (H in E.Scotts paper). +- node_cache = {} +- held_completions = {} +- +- column = columns[i] +- # R (items) = Ei (column.items) +- items = deque(column) +- while items: +- item = items.pop() # remove an element, A say, from R +- +- ### The Earley completer +- if item.is_complete: ### (item.s == string) +- if item.node is None: +- label = (item.s, item.start, i) +- item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) +- item.node.add_family(item.s, item.rule, item.start, None, None) +- +- # create_leo_transitives(item.rule.origin, item.start) +- +- ###R Joop Leo right recursion Completer +- if item.rule.origin in transitives[item.start]: +- transitive = transitives[item.start][item.s] +- if transitive.previous in transitives[transitive.column]: +- root_transitive = transitives[transitive.column][transitive.previous] +- else: +- root_transitive = transitive +- +- new_item = Item(transitive.rule, transitive.ptr, transitive.start) +- label = (root_transitive.s, root_transitive.start, i) +- new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) +- new_item.node.add_path(root_transitive, item.node) +- if new_item.expect in self.TERMINALS: +- # Add (B :: aC.B, h, y) to Q +- to_scan.add(new_item) +- elif new_item not in column: +- # Add (B :: aC.B, h, y) to Ei and R +- column.add(new_item) +- items.append(new_item) +- ###R Regular Earley completer +- else: +- # Empty has 0 length. If we complete an empty symbol in a particular +- # parse step, we need to be able to use that same empty symbol to complete +- # any predictions that result, that themselves require empty. Avoids +- # infinite recursion on empty symbols. +- # held_completions is 'H' in E.Scott's paper. +- is_empty_item = item.start == i +- if is_empty_item: +- held_completions[item.rule.origin] = item.node +- +- originators = [originator for originator in columns[item.start] if originator.expect is not None and originator.expect == item.s] +- for originator in originators: +- new_item = originator.advance() +- label = (new_item.s, originator.start, i) +- new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) +- new_item.node.add_family(new_item.s, new_item.rule, i, originator.node, item.node) +- if new_item.expect in self.TERMINALS: +- # Add (B :: aC.B, h, y) to Q +- to_scan.add(new_item) +- elif new_item not in column: +- # Add (B :: aC.B, h, y) to Ei and R +- column.add(new_item) +- items.append(new_item) +- +- ### The Earley predictor +- elif item.expect in self.NON_TERMINALS: ### (item.s == lr0) +- new_items = [] +- for rule in self.predictions[item.expect]: +- new_item = Item(rule, 0, i) +- new_items.append(new_item) +- +- # Process any held completions (H). +- if item.expect in held_completions: +- new_item = item.advance() +- label = (new_item.s, item.start, i) +- new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) +- new_item.node.add_family(new_item.s, new_item.rule, new_item.start, item.node, held_completions[item.expect]) +- new_items.append(new_item) +- +- for new_item in new_items: +- if new_item.expect in self.TERMINALS: +- to_scan.add(new_item) +- elif new_item not in column: +- column.add(new_item) +- items.append(new_item) +- +- def _parse(self, lexer, columns, to_scan, start_symbol=None): +- def is_quasi_complete(item): +- if item.is_complete: +- return True +- +- quasi = item.advance() +- while not quasi.is_complete: +- if quasi.expect not in self.NULLABLE: +- return False +- if quasi.rule.origin == start_symbol and quasi.expect == start_symbol: +- return False +- quasi = quasi.advance() +- return True +- +- # def create_leo_transitives(origin, start): +- # ... # removed at commit 4c1cfb2faf24e8f8bff7112627a00b94d261b420 +- +- def scan(i, token, to_scan): +- """The core Earley Scanner. +- +- This is a custom implementation of the scanner that uses the +- Lark lexer to match tokens. The scan list is built by the +- Earley predictor, based on the previously completed tokens. +- This ensures that at each phase of the parse we have a custom +- lexer context, allowing for more complex ambiguities.""" +- next_to_scan = set() +- next_set = set() +- columns.append(next_set) +- transitives.append({}) +- node_cache = {} +- +- for item in set(to_scan): +- if match(item.expect, token): +- new_item = item.advance() +- label = (new_item.s, new_item.start, i) +- # 'terminals' may not contain token.type when using %declare +- # Additionally, token is not always a Token +- # For example, it can be a Tree when using TreeMatcher +- term = terminals.get(token.type) if isinstance(token, Token) else None +- # Set the priority of the token node to 0 so that the +- # terminal priorities do not affect the Tree chosen by +- # ForestSumVisitor after the basic lexer has already +- # "used up" the terminal priorities +- token_node = TokenNode(token, term, priority=0) +- new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) +- new_item.node.add_family(new_item.s, item.rule, new_item.start, item.node, token_node) +- +- if new_item.expect in self.TERMINALS: +- # add (B ::= Aai+1.B, h, y) to Q' +- next_to_scan.add(new_item) +- else: +- # add (B ::= Aa+1.B, h, y) to Ei+1 +- next_set.add(new_item) +- +- if not next_set and not next_to_scan: +- expect = {i.expect.name for i in to_scan} +- raise UnexpectedToken(token, expect, considered_rules=set(to_scan), state=frozenset(i.s for i in to_scan)) +- +- return next_to_scan +- +- +- # Define parser functions +- match = self.term_matcher +- +- terminals = self.lexer_conf.terminals_by_name +- +- # Cache for nodes & tokens created in a particular parse step. +- transitives = [{}] +- +- ## The main Earley loop. +- # Run the Prediction/Completion cycle for any Items in the current Earley set. +- # Completions will be added to the SPPF tree, and predictions will be recursively +- # processed down to terminals/empty nodes to be added to the scanner for the next +- # step. +- expects = {i.expect for i in to_scan} +- i = 0 +- for token in lexer.lex(expects): +- self.predict_and_complete(i, to_scan, columns, transitives) +- +- to_scan = scan(i, token, to_scan) +- i += 1 +- +- expects.clear() +- expects |= {i.expect for i in to_scan} +- +- self.predict_and_complete(i, to_scan, columns, transitives) +- +- ## Column is now the final column in the parse. +- assert i == len(columns)-1 +- return to_scan +- +- def parse(self, lexer, start): +- assert start, start +- start_symbol = NonTerminal(start) +- +- columns = [set()] +- to_scan = set() # The scan buffer. 'Q' in E.Scott's paper. +- +- ## Predict for the start_symbol. +- # Add predicted items to the first Earley set (for the predictor) if they +- # result in a non-terminal, or the scanner if they result in a terminal. +- for rule in self.predictions[start_symbol]: +- item = Item(rule, 0, 0) +- if item.expect in self.TERMINALS: +- to_scan.add(item) +- else: +- columns[0].add(item) +- +- to_scan = self._parse(lexer, columns, to_scan, start_symbol) +- +- # If the parse was successful, the start +- # symbol should have been completed in the last step of the Earley cycle, and will be in +- # this column. Find the item for the start_symbol, which is the root of the SPPF tree. +- solutions = [n.node for n in columns[-1] if n.is_complete and n.node is not None and n.s == start_symbol and n.start == 0] +- if not solutions: +- expected_terminals = [t.expect.name for t in to_scan] +- raise UnexpectedEOF(expected_terminals, state=frozenset(i.s for i in to_scan)) +- +- if self.debug: +- from .earley_forest import ForestToPyDotVisitor +- try: +- debug_walker = ForestToPyDotVisitor() +- except ImportError: +- logger.warning("Cannot find dependency 'pydot', will not generate sppf debug image") +- else: +- debug_walker.visit(solutions[0], "sppf.png") +- +- +- if len(solutions) > 1: +- assert False, 'Earley should not generate multiple start symbol items!' +- +- if self.tree_class is not None: +- # Perform our SPPF -> AST conversion +- transformer = ForestToParseTree(self.tree_class, self.callbacks, self.forest_sum_visitor and self.forest_sum_visitor(), self.resolve_ambiguity) +- return transformer.transform(solutions[0]) +- +- # return the root of the SPPF +- return solutions[0] +diff --git a/src/poetry/core/_vendor/lark/parsers/earley_common.py b/src/poetry/core/_vendor/lark/parsers/earley_common.py +deleted file mode 100644 +index 844ff10..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/earley_common.py ++++ /dev/null +@@ -1,42 +0,0 @@ +-"""This module implements useful building blocks for the Earley parser +-""" +- +- +-class Item: +- "An Earley Item, the atom of the algorithm." +- +- __slots__ = ('s', 'rule', 'ptr', 'start', 'is_complete', 'expect', 'previous', 'node', '_hash') +- def __init__(self, rule, ptr, start): +- self.is_complete = len(rule.expansion) == ptr +- self.rule = rule # rule +- self.ptr = ptr # ptr +- self.start = start # j +- self.node = None # w +- if self.is_complete: +- self.s = rule.origin +- self.expect = None +- self.previous = rule.expansion[ptr - 1] if ptr > 0 and len(rule.expansion) else None +- else: +- self.s = (rule, ptr) +- self.expect = rule.expansion[ptr] +- self.previous = rule.expansion[ptr - 1] if ptr > 0 and len(rule.expansion) else None +- self._hash = hash((self.s, self.start)) +- +- def advance(self): +- return Item(self.rule, self.ptr + 1, self.start) +- +- def __eq__(self, other): +- return self is other or (self.s == other.s and self.start == other.start) +- +- def __hash__(self): +- return self._hash +- +- def __repr__(self): +- before = ( expansion.name for expansion in self.rule.expansion[:self.ptr] ) +- after = ( expansion.name for expansion in self.rule.expansion[self.ptr:] ) +- symbol = "{} ::= {}* {}".format(self.rule.origin.name, ' '.join(before), ' '.join(after)) +- return '%s (%d)' % (symbol, self.start) +- +- +-# class TransitiveItem(Item): +-# ... # removed at commit 4c1cfb2faf24e8f8bff7112627a00b94d261b420 +\ No newline at end of file +diff --git a/src/poetry/core/_vendor/lark/parsers/earley_forest.py b/src/poetry/core/_vendor/lark/parsers/earley_forest.py +deleted file mode 100644 +index 2d602a3..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/earley_forest.py ++++ /dev/null +@@ -1,804 +0,0 @@ +-""""This module implements an SPPF implementation +- +-This is used as the primary output mechanism for the Earley parser +-in order to store complex ambiguities. +- +-Full reference and more details is here: +-http://www.bramvandersanden.com/post/2014/06/shared-packed-parse-forest/ +-""" +- +-from random import randint +-from collections import deque +-from operator import attrgetter +-from importlib import import_module +-from functools import partial +- +-from ..parse_tree_builder import AmbiguousIntermediateExpander +-from ..visitors import Discard +-from ..lexer import Token +-from ..utils import logger +-from ..tree import Tree +- +-class ForestNode: +- pass +- +-class SymbolNode(ForestNode): +- """ +- A Symbol Node represents a symbol (or Intermediate LR0). +- +- Symbol nodes are keyed by the symbol (s). For intermediate nodes +- s will be an LR0, stored as a tuple of (rule, ptr). For completed symbol +- nodes, s will be a string representing the non-terminal origin (i.e. +- the left hand side of the rule). +- +- The children of a Symbol or Intermediate Node will always be Packed Nodes; +- with each Packed Node child representing a single derivation of a production. +- +- Hence a Symbol Node with a single child is unambiguous. +- +- Parameters: +- s: A Symbol, or a tuple of (rule, ptr) for an intermediate node. +- start: The index of the start of the substring matched by this symbol (inclusive). +- end: The index of the end of the substring matched by this symbol (exclusive). +- +- Properties: +- is_intermediate: True if this node is an intermediate node. +- priority: The priority of the node's symbol. +- """ +- __slots__ = ('s', 'start', 'end', '_children', 'paths', 'paths_loaded', 'priority', 'is_intermediate', '_hash') +- def __init__(self, s, start, end): +- self.s = s +- self.start = start +- self.end = end +- self._children = set() +- self.paths = set() +- self.paths_loaded = False +- +- ### We use inf here as it can be safely negated without resorting to conditionals, +- # unlike None or float('NaN'), and sorts appropriately. +- self.priority = float('-inf') +- self.is_intermediate = isinstance(s, tuple) +- self._hash = hash((self.s, self.start, self.end)) +- +- def add_family(self, lr0, rule, start, left, right): +- self._children.add(PackedNode(self, lr0, rule, start, left, right)) +- +- def add_path(self, transitive, node): +- self.paths.add((transitive, node)) +- +- def load_paths(self): +- for transitive, node in self.paths: +- if transitive.next_titem is not None: +- vn = SymbolNode(transitive.next_titem.s, transitive.next_titem.start, self.end) +- vn.add_path(transitive.next_titem, node) +- self.add_family(transitive.reduction.rule.origin, transitive.reduction.rule, transitive.reduction.start, transitive.reduction.node, vn) +- else: +- self.add_family(transitive.reduction.rule.origin, transitive.reduction.rule, transitive.reduction.start, transitive.reduction.node, node) +- self.paths_loaded = True +- +- @property +- def is_ambiguous(self): +- """Returns True if this node is ambiguous.""" +- return len(self.children) > 1 +- +- @property +- def children(self): +- """Returns a list of this node's children sorted from greatest to +- least priority.""" +- if not self.paths_loaded: self.load_paths() +- return sorted(self._children, key=attrgetter('sort_key')) +- +- def __iter__(self): +- return iter(self._children) +- +- def __eq__(self, other): +- if not isinstance(other, SymbolNode): +- return False +- return self is other or (type(self.s) == type(other.s) and self.s == other.s and self.start == other.start and self.end is other.end) +- +- def __hash__(self): +- return self._hash +- +- def __repr__(self): +- if self.is_intermediate: +- rule = self.s[0] +- ptr = self.s[1] +- before = ( expansion.name for expansion in rule.expansion[:ptr] ) +- after = ( expansion.name for expansion in rule.expansion[ptr:] ) +- symbol = "{} ::= {}* {}".format(rule.origin.name, ' '.join(before), ' '.join(after)) +- else: +- symbol = self.s.name +- return "({}, {}, {}, {})".format(symbol, self.start, self.end, self.priority) +- +-class PackedNode(ForestNode): +- """ +- A Packed Node represents a single derivation in a symbol node. +- +- Parameters: +- rule: The rule associated with this node. +- parent: The parent of this node. +- left: The left child of this node. ``None`` if one does not exist. +- right: The right child of this node. ``None`` if one does not exist. +- priority: The priority of this node. +- """ +- __slots__ = ('parent', 's', 'rule', 'start', 'left', 'right', 'priority', '_hash') +- def __init__(self, parent, s, rule, start, left, right): +- self.parent = parent +- self.s = s +- self.start = start +- self.rule = rule +- self.left = left +- self.right = right +- self.priority = float('-inf') +- self._hash = hash((self.left, self.right)) +- +- @property +- def is_empty(self): +- return self.left is None and self.right is None +- +- @property +- def sort_key(self): +- """ +- Used to sort PackedNode children of SymbolNodes. +- A SymbolNode has multiple PackedNodes if it matched +- ambiguously. Hence, we use the sort order to identify +- the order in which ambiguous children should be considered. +- """ +- return self.is_empty, -self.priority, self.rule.order +- +- @property +- def children(self): +- """Returns a list of this node's children.""" +- return [x for x in [self.left, self.right] if x is not None] +- +- def __iter__(self): +- yield self.left +- yield self.right +- +- def __eq__(self, other): +- if not isinstance(other, PackedNode): +- return False +- return self is other or (self.left == other.left and self.right == other.right) +- +- def __hash__(self): +- return self._hash +- +- def __repr__(self): +- if isinstance(self.s, tuple): +- rule = self.s[0] +- ptr = self.s[1] +- before = ( expansion.name for expansion in rule.expansion[:ptr] ) +- after = ( expansion.name for expansion in rule.expansion[ptr:] ) +- symbol = "{} ::= {}* {}".format(rule.origin.name, ' '.join(before), ' '.join(after)) +- else: +- symbol = self.s.name +- return "({}, {}, {}, {})".format(symbol, self.start, self.priority, self.rule.order) +- +-class TokenNode(ForestNode): +- """ +- A Token Node represents a matched terminal and is always a leaf node. +- +- Parameters: +- token: The Token associated with this node. +- term: The TerminalDef matched by the token. +- priority: The priority of this node. +- """ +- __slots__ = ('token', 'term', 'priority', '_hash') +- def __init__(self, token, term, priority=None): +- self.token = token +- self.term = term +- if priority is not None: +- self.priority = priority +- else: +- self.priority = term.priority if term is not None else 0 +- self._hash = hash(token) +- +- def __eq__(self, other): +- if not isinstance(other, TokenNode): +- return False +- return self is other or (self.token == other.token) +- +- def __hash__(self): +- return self._hash +- +- def __repr__(self): +- return repr(self.token) +- +-class ForestVisitor: +- """ +- An abstract base class for building forest visitors. +- +- This class performs a controllable depth-first walk of an SPPF. +- The visitor will not enter cycles and will backtrack if one is encountered. +- Subclasses are notified of cycles through the ``on_cycle`` method. +- +- Behavior for visit events is defined by overriding the +- ``visit*node*`` functions. +- +- The walk is controlled by the return values of the ``visit*node_in`` +- methods. Returning a node(s) will schedule them to be visited. The visitor +- will begin to backtrack if no nodes are returned. +- +- Parameters: +- single_visit: If ``True``, non-Token nodes will only be visited once. +- """ +- +- def __init__(self, single_visit=False): +- self.single_visit = single_visit +- +- def visit_token_node(self, node): +- """Called when a ``Token`` is visited. ``Token`` nodes are always leaves.""" +- pass +- +- def visit_symbol_node_in(self, node): +- """Called when a symbol node is visited. Nodes that are returned +- will be scheduled to be visited. If ``visit_intermediate_node_in`` +- is not implemented, this function will be called for intermediate +- nodes as well.""" +- pass +- +- def visit_symbol_node_out(self, node): +- """Called after all nodes returned from a corresponding ``visit_symbol_node_in`` +- call have been visited. If ``visit_intermediate_node_out`` +- is not implemented, this function will be called for intermediate +- nodes as well.""" +- pass +- +- def visit_packed_node_in(self, node): +- """Called when a packed node is visited. Nodes that are returned +- will be scheduled to be visited. """ +- pass +- +- def visit_packed_node_out(self, node): +- """Called after all nodes returned from a corresponding ``visit_packed_node_in`` +- call have been visited.""" +- pass +- +- def on_cycle(self, node, path): +- """Called when a cycle is encountered. +- +- Parameters: +- node: The node that causes a cycle. +- path: The list of nodes being visited: nodes that have been +- entered but not exited. The first element is the root in a forest +- visit, and the last element is the node visited most recently. +- ``path`` should be treated as read-only. +- """ +- pass +- +- def get_cycle_in_path(self, node, path): +- """A utility function for use in ``on_cycle`` to obtain a slice of +- ``path`` that only contains the nodes that make up the cycle.""" +- index = len(path) - 1 +- while id(path[index]) != id(node): +- index -= 1 +- return path[index:] +- +- def visit(self, root): +- # Visiting is a list of IDs of all symbol/intermediate nodes currently in +- # the stack. It serves two purposes: to detect when we 'recurse' in and out +- # of a symbol/intermediate so that we can process both up and down. Also, +- # since the SPPF can have cycles it allows us to detect if we're trying +- # to recurse into a node that's already on the stack (infinite recursion). +- visiting = set() +- +- # set of all nodes that have been visited +- visited = set() +- +- # a list of nodes that are currently being visited +- # used for the `on_cycle` callback +- path = [] +- +- # We do not use recursion here to walk the Forest due to the limited +- # stack size in python. Therefore input_stack is essentially our stack. +- input_stack = deque([root]) +- +- # It is much faster to cache these as locals since they are called +- # many times in large parses. +- vpno = getattr(self, 'visit_packed_node_out') +- vpni = getattr(self, 'visit_packed_node_in') +- vsno = getattr(self, 'visit_symbol_node_out') +- vsni = getattr(self, 'visit_symbol_node_in') +- vino = getattr(self, 'visit_intermediate_node_out', vsno) +- vini = getattr(self, 'visit_intermediate_node_in', vsni) +- vtn = getattr(self, 'visit_token_node') +- oc = getattr(self, 'on_cycle') +- +- while input_stack: +- current = next(reversed(input_stack)) +- try: +- next_node = next(current) +- except StopIteration: +- input_stack.pop() +- continue +- except TypeError: +- ### If the current object is not an iterator, pass through to Token/SymbolNode +- pass +- else: +- if next_node is None: +- continue +- +- if id(next_node) in visiting: +- oc(next_node, path) +- continue +- +- input_stack.append(next_node) +- continue +- +- if isinstance(current, TokenNode): +- vtn(current.token) +- input_stack.pop() +- continue +- +- current_id = id(current) +- if current_id in visiting: +- if isinstance(current, PackedNode): +- vpno(current) +- elif current.is_intermediate: +- vino(current) +- else: +- vsno(current) +- input_stack.pop() +- path.pop() +- visiting.remove(current_id) +- visited.add(current_id) +- elif self.single_visit and current_id in visited: +- input_stack.pop() +- else: +- visiting.add(current_id) +- path.append(current) +- if isinstance(current, PackedNode): +- next_node = vpni(current) +- elif current.is_intermediate: +- next_node = vini(current) +- else: +- next_node = vsni(current) +- if next_node is None: +- continue +- +- if not isinstance(next_node, ForestNode): +- next_node = iter(next_node) +- elif id(next_node) in visiting: +- oc(next_node, path) +- continue +- +- input_stack.append(next_node) +- +-class ForestTransformer(ForestVisitor): +- """The base class for a bottom-up forest transformation. Most users will +- want to use ``TreeForestTransformer`` instead as it has a friendlier +- interface and covers most use cases. +- +- Transformations are applied via inheritance and overriding of the +- ``transform*node`` methods. +- +- ``transform_token_node`` receives a ``Token`` as an argument. +- All other methods receive the node that is being transformed and +- a list of the results of the transformations of that node's children. +- The return value of these methods are the resulting transformations. +- +- If ``Discard`` is raised in a node's transformation, no data from that node +- will be passed to its parent's transformation. +- """ +- +- def __init__(self): +- super(ForestTransformer, self).__init__() +- # results of transformations +- self.data = dict() +- # used to track parent nodes +- self.node_stack = deque() +- +- def transform(self, root): +- """Perform a transformation on an SPPF.""" +- self.node_stack.append('result') +- self.data['result'] = [] +- self.visit(root) +- assert len(self.data['result']) <= 1 +- if self.data['result']: +- return self.data['result'][0] +- +- def transform_symbol_node(self, node, data): +- """Transform a symbol node.""" +- return node +- +- def transform_intermediate_node(self, node, data): +- """Transform an intermediate node.""" +- return node +- +- def transform_packed_node(self, node, data): +- """Transform a packed node.""" +- return node +- +- def transform_token_node(self, node): +- """Transform a ``Token``.""" +- return node +- +- def visit_symbol_node_in(self, node): +- self.node_stack.append(id(node)) +- self.data[id(node)] = [] +- return node.children +- +- def visit_packed_node_in(self, node): +- self.node_stack.append(id(node)) +- self.data[id(node)] = [] +- return node.children +- +- def visit_token_node(self, node): +- transformed = self.transform_token_node(node) +- if transformed is not Discard: +- self.data[self.node_stack[-1]].append(transformed) +- +- def _visit_node_out_helper(self, node, method): +- self.node_stack.pop() +- transformed = method(node, self.data[id(node)]) +- if transformed is not Discard: +- self.data[self.node_stack[-1]].append(transformed) +- del self.data[id(node)] +- +- def visit_symbol_node_out(self, node): +- self._visit_node_out_helper(node, self.transform_symbol_node) +- +- def visit_intermediate_node_out(self, node): +- self._visit_node_out_helper(node, self.transform_intermediate_node) +- +- def visit_packed_node_out(self, node): +- self._visit_node_out_helper(node, self.transform_packed_node) +- +- +-class ForestSumVisitor(ForestVisitor): +- """ +- A visitor for prioritizing ambiguous parts of the Forest. +- +- This visitor is used when support for explicit priorities on +- rules is requested (whether normal, or invert). It walks the +- forest (or subsets thereof) and cascades properties upwards +- from the leaves. +- +- It would be ideal to do this during parsing, however this would +- require processing each Earley item multiple times. That's +- a big performance drawback; so running a forest walk is the +- lesser of two evils: there can be significantly more Earley +- items created during parsing than there are SPPF nodes in the +- final tree. +- """ +- def __init__(self): +- super(ForestSumVisitor, self).__init__(single_visit=True) +- +- def visit_packed_node_in(self, node): +- yield node.left +- yield node.right +- +- def visit_symbol_node_in(self, node): +- return iter(node.children) +- +- def visit_packed_node_out(self, node): +- priority = node.rule.options.priority if not node.parent.is_intermediate and node.rule.options.priority else 0 +- priority += getattr(node.right, 'priority', 0) +- priority += getattr(node.left, 'priority', 0) +- node.priority = priority +- +- def visit_symbol_node_out(self, node): +- node.priority = max(child.priority for child in node.children) +- +-class PackedData(): +- """Used in transformationss of packed nodes to distinguish the data +- that comes from the left child and the right child. +- """ +- +- class _NoData(): +- pass +- +- NO_DATA = _NoData() +- +- def __init__(self, node, data): +- self.left = self.NO_DATA +- self.right = self.NO_DATA +- if data: +- if node.left is not None: +- self.left = data[0] +- if len(data) > 1: +- self.right = data[1] +- else: +- self.right = data[0] +- +-class ForestToParseTree(ForestTransformer): +- """Used by the earley parser when ambiguity equals 'resolve' or +- 'explicit'. Transforms an SPPF into an (ambiguous) parse tree. +- +- Parameters: +- tree_class: The tree class to use for construction +- callbacks: A dictionary of rules to functions that output a tree +- prioritizer: A ``ForestVisitor`` that manipulates the priorities of ForestNodes +- resolve_ambiguity: If True, ambiguities will be resolved based on +- priorities. Otherwise, `_ambig` nodes will be in the resulting tree. +- use_cache: If True, the results of packed node transformations will be cached. +- """ +- +- def __init__(self, tree_class=Tree, callbacks=dict(), prioritizer=ForestSumVisitor(), resolve_ambiguity=True, use_cache=True): +- super(ForestToParseTree, self).__init__() +- self.tree_class = tree_class +- self.callbacks = callbacks +- self.prioritizer = prioritizer +- self.resolve_ambiguity = resolve_ambiguity +- self._use_cache = use_cache +- self._cache = {} +- self._on_cycle_retreat = False +- self._cycle_node = None +- self._successful_visits = set() +- +- def visit(self, root): +- if self.prioritizer: +- self.prioritizer.visit(root) +- super(ForestToParseTree, self).visit(root) +- self._cache = {} +- +- def on_cycle(self, node, path): +- logger.debug("Cycle encountered in the SPPF at node: %s. " +- "As infinite ambiguities cannot be represented in a tree, " +- "this family of derivations will be discarded.", node) +- self._cycle_node = node +- self._on_cycle_retreat = True +- +- def _check_cycle(self, node): +- if self._on_cycle_retreat: +- if id(node) == id(self._cycle_node) or id(node) in self._successful_visits: +- self._cycle_node = None +- self._on_cycle_retreat = False +- else: +- return Discard +- +- def _collapse_ambig(self, children): +- new_children = [] +- for child in children: +- if hasattr(child, 'data') and child.data == '_ambig': +- new_children += child.children +- else: +- new_children.append(child) +- return new_children +- +- def _call_rule_func(self, node, data): +- # called when transforming children of symbol nodes +- # data is a list of trees or tokens that correspond to the +- # symbol's rule expansion +- return self.callbacks[node.rule](data) +- +- def _call_ambig_func(self, node, data): +- # called when transforming a symbol node +- # data is a list of trees where each tree's data is +- # equal to the name of the symbol or one of its aliases. +- if len(data) > 1: +- return self.tree_class('_ambig', data) +- elif data: +- return data[0] +- return Discard +- +- def transform_symbol_node(self, node, data): +- if id(node) not in self._successful_visits: +- return Discard +- r = self._check_cycle(node) +- if r is Discard: +- return r +- self._successful_visits.remove(id(node)) +- data = self._collapse_ambig(data) +- return self._call_ambig_func(node, data) +- +- def transform_intermediate_node(self, node, data): +- if id(node) not in self._successful_visits: +- return Discard +- r = self._check_cycle(node) +- if r is Discard: +- return r +- self._successful_visits.remove(id(node)) +- if len(data) > 1: +- children = [self.tree_class('_inter', c) for c in data] +- return self.tree_class('_iambig', children) +- return data[0] +- +- def transform_packed_node(self, node, data): +- r = self._check_cycle(node) +- if r is Discard: +- return r +- if self.resolve_ambiguity and id(node.parent) in self._successful_visits: +- return Discard +- if self._use_cache and id(node) in self._cache: +- return self._cache[id(node)] +- children = [] +- assert len(data) <= 2 +- data = PackedData(node, data) +- if data.left is not PackedData.NO_DATA: +- if node.left.is_intermediate and isinstance(data.left, list): +- children += data.left +- else: +- children.append(data.left) +- if data.right is not PackedData.NO_DATA: +- children.append(data.right) +- if node.parent.is_intermediate: +- return self._cache.setdefault(id(node), children) +- return self._cache.setdefault(id(node), self._call_rule_func(node, children)) +- +- def visit_symbol_node_in(self, node): +- super(ForestToParseTree, self).visit_symbol_node_in(node) +- if self._on_cycle_retreat: +- return +- return node.children +- +- def visit_packed_node_in(self, node): +- self._on_cycle_retreat = False +- to_visit = super(ForestToParseTree, self).visit_packed_node_in(node) +- if not self.resolve_ambiguity or id(node.parent) not in self._successful_visits: +- if not self._use_cache or id(node) not in self._cache: +- return to_visit +- +- def visit_packed_node_out(self, node): +- super(ForestToParseTree, self).visit_packed_node_out(node) +- if not self._on_cycle_retreat: +- self._successful_visits.add(id(node.parent)) +- +-def handles_ambiguity(func): +- """Decorator for methods of subclasses of ``TreeForestTransformer``. +- Denotes that the method should receive a list of transformed derivations.""" +- func.handles_ambiguity = True +- return func +- +-class TreeForestTransformer(ForestToParseTree): +- """A ``ForestTransformer`` with a tree ``Transformer``-like interface. +- By default, it will construct a tree. +- +- Methods provided via inheritance are called based on the rule/symbol +- names of nodes in the forest. +- +- Methods that act on rules will receive a list of the results of the +- transformations of the rule's children. By default, trees and tokens. +- +- Methods that act on tokens will receive a token. +- +- Alternatively, methods that act on rules may be annotated with +- ``handles_ambiguity``. In this case, the function will receive a list +- of all the transformations of all the derivations of the rule. +- By default, a list of trees where each tree.data is equal to the +- rule name or one of its aliases. +- +- Non-tree transformations are made possible by override of +- ``__default__``, ``__default_token__``, and ``__default_ambig__``. +- +- Note: +- Tree shaping features such as inlined rules and token filtering are +- not built into the transformation. Positions are also not propagated. +- +- Parameters: +- tree_class: The tree class to use for construction +- prioritizer: A ``ForestVisitor`` that manipulates the priorities of nodes in the SPPF. +- resolve_ambiguity: If True, ambiguities will be resolved based on priorities. +- use_cache (bool): If True, caches the results of some transformations, +- potentially improving performance when ``resolve_ambiguity==False``. +- Only use if you know what you are doing: i.e. All transformation +- functions are pure and referentially transparent. +- """ +- +- def __init__(self, tree_class=Tree, prioritizer=ForestSumVisitor(), resolve_ambiguity=True, use_cache=False): +- super(TreeForestTransformer, self).__init__(tree_class, dict(), prioritizer, resolve_ambiguity, use_cache) +- +- def __default__(self, name, data): +- """Default operation on tree (for override). +- +- Returns a tree with name with data as children. +- """ +- return self.tree_class(name, data) +- +- def __default_ambig__(self, name, data): +- """Default operation on ambiguous rule (for override). +- +- Wraps data in an '_ambig_' node if it contains more than +- one element. +- """ +- if len(data) > 1: +- return self.tree_class('_ambig', data) +- elif data: +- return data[0] +- return Discard +- +- def __default_token__(self, node): +- """Default operation on ``Token`` (for override). +- +- Returns ``node``. +- """ +- return node +- +- def transform_token_node(self, node): +- return getattr(self, node.type, self.__default_token__)(node) +- +- def _call_rule_func(self, node, data): +- name = node.rule.alias or node.rule.options.template_source or node.rule.origin.name +- user_func = getattr(self, name, self.__default__) +- if user_func == self.__default__ or hasattr(user_func, 'handles_ambiguity'): +- user_func = partial(self.__default__, name) +- if not self.resolve_ambiguity: +- wrapper = partial(AmbiguousIntermediateExpander, self.tree_class) +- user_func = wrapper(user_func) +- return user_func(data) +- +- def _call_ambig_func(self, node, data): +- name = node.s.name +- user_func = getattr(self, name, self.__default_ambig__) +- if user_func == self.__default_ambig__ or not hasattr(user_func, 'handles_ambiguity'): +- user_func = partial(self.__default_ambig__, name) +- return user_func(data) +- +-class ForestToPyDotVisitor(ForestVisitor): +- """ +- A Forest visitor which writes the SPPF to a PNG. +- +- The SPPF can get really large, really quickly because +- of the amount of meta-data it stores, so this is probably +- only useful for trivial trees and learning how the SPPF +- is structured. +- """ +- def __init__(self, rankdir="TB"): +- super(ForestToPyDotVisitor, self).__init__(single_visit=True) +- self.pydot = import_module('pydot') +- self.graph = self.pydot.Dot(graph_type='digraph', rankdir=rankdir) +- +- def visit(self, root, filename): +- super(ForestToPyDotVisitor, self).visit(root) +- try: +- self.graph.write_png(filename) +- except FileNotFoundError as e: +- logger.error("Could not write png: ", e) +- +- def visit_token_node(self, node): +- graph_node_id = str(id(node)) +- graph_node_label = "\"{}\"".format(node.value.replace('"', '\\"')) +- graph_node_color = 0x808080 +- graph_node_style = "\"filled,rounded\"" +- graph_node_shape = "diamond" +- graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) +- self.graph.add_node(graph_node) +- +- def visit_packed_node_in(self, node): +- graph_node_id = str(id(node)) +- graph_node_label = repr(node) +- graph_node_color = 0x808080 +- graph_node_style = "filled" +- graph_node_shape = "diamond" +- graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) +- self.graph.add_node(graph_node) +- yield node.left +- yield node.right +- +- def visit_packed_node_out(self, node): +- graph_node_id = str(id(node)) +- graph_node = self.graph.get_node(graph_node_id)[0] +- for child in [node.left, node.right]: +- if child is not None: +- child_graph_node_id = str(id(child)) +- child_graph_node = self.graph.get_node(child_graph_node_id)[0] +- self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node)) +- else: +- #### Try and be above the Python object ID range; probably impl. specific, but maybe this is okay. +- child_graph_node_id = str(randint(100000000000000000000000000000,123456789012345678901234567890)) +- child_graph_node_style = "invis" +- child_graph_node = self.pydot.Node(child_graph_node_id, style=child_graph_node_style, label="None") +- child_edge_style = "invis" +- self.graph.add_node(child_graph_node) +- self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node, style=child_edge_style)) +- +- def visit_symbol_node_in(self, node): +- graph_node_id = str(id(node)) +- graph_node_label = repr(node) +- graph_node_color = 0x808080 +- graph_node_style = "\"filled\"" +- if node.is_intermediate: +- graph_node_shape = "ellipse" +- else: +- graph_node_shape = "rectangle" +- graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) +- self.graph.add_node(graph_node) +- return iter(node.children) +- +- def visit_symbol_node_out(self, node): +- graph_node_id = str(id(node)) +- graph_node = self.graph.get_node(graph_node_id)[0] +- for child in node.children: +- child_graph_node_id = str(id(child)) +- child_graph_node = self.graph.get_node(child_graph_node_id)[0] +- self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node)) +diff --git a/src/poetry/core/_vendor/lark/parsers/grammar_analysis.py b/src/poetry/core/_vendor/lark/parsers/grammar_analysis.py +deleted file mode 100644 +index b526e47..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/grammar_analysis.py ++++ /dev/null +@@ -1,185 +0,0 @@ +-from collections import Counter, defaultdict +- +-from ..utils import bfs, fzset, classify +-from ..exceptions import GrammarError +-from ..grammar import Rule, Terminal, NonTerminal +- +- +-class RulePtr: +- __slots__ = ('rule', 'index') +- +- def __init__(self, rule, index): +- assert isinstance(rule, Rule) +- assert index <= len(rule.expansion) +- self.rule = rule +- self.index = index +- +- def __repr__(self): +- before = [x.name for x in self.rule.expansion[:self.index]] +- after = [x.name for x in self.rule.expansion[self.index:]] +- return '<%s : %s * %s>' % (self.rule.origin.name, ' '.join(before), ' '.join(after)) +- +- @property +- def next(self): +- return self.rule.expansion[self.index] +- +- def advance(self, sym): +- assert self.next == sym +- return RulePtr(self.rule, self.index+1) +- +- @property +- def is_satisfied(self): +- return self.index == len(self.rule.expansion) +- +- def __eq__(self, other): +- return self.rule == other.rule and self.index == other.index +- def __hash__(self): +- return hash((self.rule, self.index)) +- +- +-# state generation ensures no duplicate LR0ItemSets +-class LR0ItemSet: +- __slots__ = ('kernel', 'closure', 'transitions', 'lookaheads') +- +- def __init__(self, kernel, closure): +- self.kernel = fzset(kernel) +- self.closure = fzset(closure) +- self.transitions = {} +- self.lookaheads = defaultdict(set) +- +- def __repr__(self): +- return '{%s | %s}' % (', '.join([repr(r) for r in self.kernel]), ', '.join([repr(r) for r in self.closure])) +- +- +-def update_set(set1, set2): +- if not set2 or set1 > set2: +- return False +- +- copy = set(set1) +- set1 |= set2 +- return set1 != copy +- +-def calculate_sets(rules): +- """Calculate FOLLOW sets. +- +- Adapted from: http://lara.epfl.ch/w/cc09:algorithm_for_first_and_follow_sets""" +- symbols = {sym for rule in rules for sym in rule.expansion} | {rule.origin for rule in rules} +- +- # foreach grammar rule X ::= Y(1) ... Y(k) +- # if k=0 or {Y(1),...,Y(k)} subset of NULLABLE then +- # NULLABLE = NULLABLE union {X} +- # for i = 1 to k +- # if i=1 or {Y(1),...,Y(i-1)} subset of NULLABLE then +- # FIRST(X) = FIRST(X) union FIRST(Y(i)) +- # for j = i+1 to k +- # if i=k or {Y(i+1),...Y(k)} subset of NULLABLE then +- # FOLLOW(Y(i)) = FOLLOW(Y(i)) union FOLLOW(X) +- # if i+1=j or {Y(i+1),...,Y(j-1)} subset of NULLABLE then +- # FOLLOW(Y(i)) = FOLLOW(Y(i)) union FIRST(Y(j)) +- # until none of NULLABLE,FIRST,FOLLOW changed in last iteration +- +- NULLABLE = set() +- FIRST = {} +- FOLLOW = {} +- for sym in symbols: +- FIRST[sym]={sym} if sym.is_term else set() +- FOLLOW[sym]=set() +- +- # Calculate NULLABLE and FIRST +- changed = True +- while changed: +- changed = False +- +- for rule in rules: +- if set(rule.expansion) <= NULLABLE: +- if update_set(NULLABLE, {rule.origin}): +- changed = True +- +- for i, sym in enumerate(rule.expansion): +- if set(rule.expansion[:i]) <= NULLABLE: +- if update_set(FIRST[rule.origin], FIRST[sym]): +- changed = True +- else: +- break +- +- # Calculate FOLLOW +- changed = True +- while changed: +- changed = False +- +- for rule in rules: +- for i, sym in enumerate(rule.expansion): +- if i==len(rule.expansion)-1 or set(rule.expansion[i+1:]) <= NULLABLE: +- if update_set(FOLLOW[sym], FOLLOW[rule.origin]): +- changed = True +- +- for j in range(i+1, len(rule.expansion)): +- if set(rule.expansion[i+1:j]) <= NULLABLE: +- if update_set(FOLLOW[sym], FIRST[rule.expansion[j]]): +- changed = True +- +- return FIRST, FOLLOW, NULLABLE +- +- +-class GrammarAnalyzer: +- def __init__(self, parser_conf, debug=False): +- self.debug = debug +- +- root_rules = {start: Rule(NonTerminal('$root_' + start), [NonTerminal(start), Terminal('$END')]) +- for start in parser_conf.start} +- +- rules = parser_conf.rules + list(root_rules.values()) +- self.rules_by_origin = classify(rules, lambda r: r.origin) +- +- if len(rules) != len(set(rules)): +- duplicates = [item for item, count in Counter(rules).items() if count > 1] +- raise GrammarError("Rules defined twice: %s" % ', '.join(str(i) for i in duplicates)) +- +- for r in rules: +- for sym in r.expansion: +- if not (sym.is_term or sym in self.rules_by_origin): +- raise GrammarError("Using an undefined rule: %s" % sym) +- +- self.start_states = {start: self.expand_rule(root_rule.origin) +- for start, root_rule in root_rules.items()} +- +- self.end_states = {start: fzset({RulePtr(root_rule, len(root_rule.expansion))}) +- for start, root_rule in root_rules.items()} +- +- lr0_root_rules = {start: Rule(NonTerminal('$root_' + start), [NonTerminal(start)]) +- for start in parser_conf.start} +- +- lr0_rules = parser_conf.rules + list(lr0_root_rules.values()) +- assert(len(lr0_rules) == len(set(lr0_rules))) +- +- self.lr0_rules_by_origin = classify(lr0_rules, lambda r: r.origin) +- +- # cache RulePtr(r, 0) in r (no duplicate RulePtr objects) +- self.lr0_start_states = {start: LR0ItemSet([RulePtr(root_rule, 0)], self.expand_rule(root_rule.origin, self.lr0_rules_by_origin)) +- for start, root_rule in lr0_root_rules.items()} +- +- self.FIRST, self.FOLLOW, self.NULLABLE = calculate_sets(rules) +- +- def expand_rule(self, source_rule, rules_by_origin=None): +- "Returns all init_ptrs accessible by rule (recursive)" +- +- if rules_by_origin is None: +- rules_by_origin = self.rules_by_origin +- +- init_ptrs = set() +- def _expand_rule(rule): +- assert not rule.is_term, rule +- +- for r in rules_by_origin[rule]: +- init_ptr = RulePtr(r, 0) +- init_ptrs.add(init_ptr) +- +- if r.expansion: # if not empty rule +- new_r = init_ptr.next +- if not new_r.is_term: +- yield new_r +- +- for _ in bfs([source_rule], _expand_rule): +- pass +- +- return fzset(init_ptrs) +diff --git a/src/poetry/core/_vendor/lark/parsers/lalr_analysis.py b/src/poetry/core/_vendor/lark/parsers/lalr_analysis.py +deleted file mode 100644 +index f6a993b..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/lalr_analysis.py ++++ /dev/null +@@ -1,304 +0,0 @@ +-"""This module builds a LALR(1) transition-table for lalr_parser.py +- +-For now, shift/reduce conflicts are automatically resolved as shifts. +-""" +- +-# Author: Erez Shinan (2017) +-# Email : erezshin@gmail.com +- +-from collections import defaultdict +- +-from ..utils import classify, classify_bool, bfs, fzset, Enumerator, logger +-from ..exceptions import GrammarError +- +-from .grammar_analysis import GrammarAnalyzer, Terminal, LR0ItemSet +-from ..grammar import Rule +- +-###{standalone +- +-class Action: +- def __init__(self, name): +- self.name = name +- def __str__(self): +- return self.name +- def __repr__(self): +- return str(self) +- +-Shift = Action('Shift') +-Reduce = Action('Reduce') +- +- +-class ParseTable: +- def __init__(self, states, start_states, end_states): +- self.states = states +- self.start_states = start_states +- self.end_states = end_states +- +- def serialize(self, memo): +- tokens = Enumerator() +- rules = Enumerator() +- +- states = { +- state: {tokens.get(token): ((1, arg.serialize(memo)) if action is Reduce else (0, arg)) +- for token, (action, arg) in actions.items()} +- for state, actions in self.states.items() +- } +- +- return { +- 'tokens': tokens.reversed(), +- 'states': states, +- 'start_states': self.start_states, +- 'end_states': self.end_states, +- } +- +- @classmethod +- def deserialize(cls, data, memo): +- tokens = data['tokens'] +- states = { +- state: {tokens[token]: ((Reduce, Rule.deserialize(arg, memo)) if action==1 else (Shift, arg)) +- for token, (action, arg) in actions.items()} +- for state, actions in data['states'].items() +- } +- return cls(states, data['start_states'], data['end_states']) +- +- +-class IntParseTable(ParseTable): +- +- @classmethod +- def from_ParseTable(cls, parse_table): +- enum = list(parse_table.states) +- state_to_idx = {s:i for i,s in enumerate(enum)} +- int_states = {} +- +- for s, la in parse_table.states.items(): +- la = {k:(v[0], state_to_idx[v[1]]) if v[0] is Shift else v +- for k,v in la.items()} +- int_states[ state_to_idx[s] ] = la +- +- +- start_states = {start:state_to_idx[s] for start, s in parse_table.start_states.items()} +- end_states = {start:state_to_idx[s] for start, s in parse_table.end_states.items()} +- return cls(int_states, start_states, end_states) +- +-###} +- +- +-# digraph and traverse, see The Theory and Practice of Compiler Writing +- +-# computes F(x) = G(x) union (union { G(y) | x R y }) +-# X: nodes +-# R: relation (function mapping node -> list of nodes that satisfy the relation) +-# G: set valued function +-def digraph(X, R, G): +- F = {} +- S = [] +- N = {} +- for x in X: +- N[x] = 0 +- for x in X: +- # this is always true for the first iteration, but N[x] may be updated in traverse below +- if N[x] == 0: +- traverse(x, S, N, X, R, G, F) +- return F +- +-# x: single node +-# S: stack +-# N: weights +-# X: nodes +-# R: relation (see above) +-# G: set valued function +-# F: set valued function we are computing (map of input -> output) +-def traverse(x, S, N, X, R, G, F): +- S.append(x) +- d = len(S) +- N[x] = d +- F[x] = G[x] +- for y in R[x]: +- if N[y] == 0: +- traverse(y, S, N, X, R, G, F) +- n_x = N[x] +- assert(n_x > 0) +- n_y = N[y] +- assert(n_y != 0) +- if (n_y > 0) and (n_y < n_x): +- N[x] = n_y +- F[x].update(F[y]) +- if N[x] == d: +- f_x = F[x] +- while True: +- z = S.pop() +- N[z] = -1 +- F[z] = f_x +- if z == x: +- break +- +- +-class LALR_Analyzer(GrammarAnalyzer): +- def __init__(self, parser_conf, debug=False): +- GrammarAnalyzer.__init__(self, parser_conf, debug) +- self.nonterminal_transitions = [] +- self.directly_reads = defaultdict(set) +- self.reads = defaultdict(set) +- self.includes = defaultdict(set) +- self.lookback = defaultdict(set) +- +- +- def compute_lr0_states(self): +- self.lr0_states = set() +- # map of kernels to LR0ItemSets +- cache = {} +- +- def step(state): +- _, unsat = classify_bool(state.closure, lambda rp: rp.is_satisfied) +- +- d = classify(unsat, lambda rp: rp.next) +- for sym, rps in d.items(): +- kernel = fzset({rp.advance(sym) for rp in rps}) +- new_state = cache.get(kernel, None) +- if new_state is None: +- closure = set(kernel) +- for rp in kernel: +- if not rp.is_satisfied and not rp.next.is_term: +- closure |= self.expand_rule(rp.next, self.lr0_rules_by_origin) +- new_state = LR0ItemSet(kernel, closure) +- cache[kernel] = new_state +- +- state.transitions[sym] = new_state +- yield new_state +- +- self.lr0_states.add(state) +- +- for _ in bfs(self.lr0_start_states.values(), step): +- pass +- +- def compute_reads_relations(self): +- # handle start state +- for root in self.lr0_start_states.values(): +- assert(len(root.kernel) == 1) +- for rp in root.kernel: +- assert(rp.index == 0) +- self.directly_reads[(root, rp.next)] = set([ Terminal('$END') ]) +- +- for state in self.lr0_states: +- seen = set() +- for rp in state.closure: +- if rp.is_satisfied: +- continue +- s = rp.next +- # if s is a not a nonterminal +- if s not in self.lr0_rules_by_origin: +- continue +- if s in seen: +- continue +- seen.add(s) +- nt = (state, s) +- self.nonterminal_transitions.append(nt) +- dr = self.directly_reads[nt] +- r = self.reads[nt] +- next_state = state.transitions[s] +- for rp2 in next_state.closure: +- if rp2.is_satisfied: +- continue +- s2 = rp2.next +- # if s2 is a terminal +- if s2 not in self.lr0_rules_by_origin: +- dr.add(s2) +- if s2 in self.NULLABLE: +- r.add((next_state, s2)) +- +- def compute_includes_lookback(self): +- for nt in self.nonterminal_transitions: +- state, nonterminal = nt +- includes = [] +- lookback = self.lookback[nt] +- for rp in state.closure: +- if rp.rule.origin != nonterminal: +- continue +- # traverse the states for rp(.rule) +- state2 = state +- for i in range(rp.index, len(rp.rule.expansion)): +- s = rp.rule.expansion[i] +- nt2 = (state2, s) +- state2 = state2.transitions[s] +- if nt2 not in self.reads: +- continue +- for j in range(i + 1, len(rp.rule.expansion)): +- if not rp.rule.expansion[j] in self.NULLABLE: +- break +- else: +- includes.append(nt2) +- # state2 is at the final state for rp.rule +- if rp.index == 0: +- for rp2 in state2.closure: +- if (rp2.rule == rp.rule) and rp2.is_satisfied: +- lookback.add((state2, rp2.rule)) +- for nt2 in includes: +- self.includes[nt2].add(nt) +- +- def compute_lookaheads(self): +- read_sets = digraph(self.nonterminal_transitions, self.reads, self.directly_reads) +- follow_sets = digraph(self.nonterminal_transitions, self.includes, read_sets) +- +- for nt, lookbacks in self.lookback.items(): +- for state, rule in lookbacks: +- for s in follow_sets[nt]: +- state.lookaheads[s].add(rule) +- +- def compute_lalr1_states(self): +- m = {} +- reduce_reduce = [] +- for state in self.lr0_states: +- actions = {} +- for la, next_state in state.transitions.items(): +- actions[la] = (Shift, next_state.closure) +- for la, rules in state.lookaheads.items(): +- if len(rules) > 1: +- # Try to resolve conflict based on priority +- p = [(r.options.priority or 0, r) for r in rules] +- p.sort(key=lambda r: r[0], reverse=True) +- best, second_best = p[:2] +- if best[0] > second_best[0]: +- rules = [best[1]] +- else: +- reduce_reduce.append((state, la, rules)) +- if la in actions: +- if self.debug: +- logger.warning('Shift/Reduce conflict for terminal %s: (resolving as shift)', la.name) +- logger.warning(' * %s', list(rules)[0]) +- else: +- actions[la] = (Reduce, list(rules)[0]) +- m[state] = { k.name: v for k, v in actions.items() } +- +- if reduce_reduce: +- msgs = [] +- for state, la, rules in reduce_reduce: +- msg = 'Reduce/Reduce collision in %s between the following rules: %s' % (la, ''.join([ '\n\t- ' + str(r) for r in rules ])) +- if self.debug: +- msg += '\n collision occurred in state: {%s\n }' % ''.join(['\n\t' + str(x) for x in state.closure]) +- msgs.append(msg) +- raise GrammarError('\n\n'.join(msgs)) +- +- states = { k.closure: v for k, v in m.items() } +- +- # compute end states +- end_states = {} +- for state in states: +- for rp in state: +- for start in self.lr0_start_states: +- if rp.rule.origin.name == ('$root_' + start) and rp.is_satisfied: +- assert(not start in end_states) +- end_states[start] = state +- +- _parse_table = ParseTable(states, { start: state.closure for start, state in self.lr0_start_states.items() }, end_states) +- +- if self.debug: +- self.parse_table = _parse_table +- else: +- self.parse_table = IntParseTable.from_ParseTable(_parse_table) +- +- def compute_lalr(self): +- self.compute_lr0_states() +- self.compute_reads_relations() +- self.compute_includes_lookback() +- self.compute_lookaheads() +- self.compute_lalr1_states() +diff --git a/src/poetry/core/_vendor/lark/parsers/lalr_interactive_parser.py b/src/poetry/core/_vendor/lark/parsers/lalr_interactive_parser.py +deleted file mode 100644 +index c9658da..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/lalr_interactive_parser.py ++++ /dev/null +@@ -1,149 +0,0 @@ +-# This module provides a LALR interactive parser, which is used for debugging and error handling +- +-from typing import Iterator, List +-from copy import copy +-import warnings +- +-from lark.exceptions import UnexpectedToken +-from lark.lexer import Token, LexerThread +- +- +-class InteractiveParser: +- """InteractiveParser gives you advanced control over parsing and error handling when parsing with LALR. +- +- For a simpler interface, see the ``on_error`` argument to ``Lark.parse()``. +- """ +- def __init__(self, parser, parser_state, lexer_thread: LexerThread): +- self.parser = parser +- self.parser_state = parser_state +- self.lexer_thread = lexer_thread +- self.result = None +- +- @property +- def lexer_state(self) -> LexerThread: +- warnings.warn("lexer_state will be removed in subsequent releases. Use lexer_thread instead.", DeprecationWarning) +- return self.lexer_thread +- +- def feed_token(self, token: Token): +- """Feed the parser with a token, and advance it to the next state, as if it received it from the lexer. +- +- Note that ``token`` has to be an instance of ``Token``. +- """ +- return self.parser_state.feed_token(token, token.type == '$END') +- +- def iter_parse(self) -> Iterator[Token]: +- """Step through the different stages of the parse, by reading tokens from the lexer +- and feeding them to the parser, one per iteration. +- +- Returns an iterator of the tokens it encounters. +- +- When the parse is over, the resulting tree can be found in ``InteractiveParser.result``. +- """ +- for token in self.lexer_thread.lex(self.parser_state): +- yield token +- self.result = self.feed_token(token) +- +- def exhaust_lexer(self) -> List[Token]: +- """Try to feed the rest of the lexer state into the interactive parser. +- +- Note that this modifies the instance in place and does not feed an '$END' Token +- """ +- return list(self.iter_parse()) +- +- +- def feed_eof(self, last_token=None): +- """Feed a '$END' Token. Borrows from 'last_token' if given.""" +- eof = Token.new_borrow_pos('$END', '', last_token) if last_token is not None else self.lexer_thread._Token('$END', '', 0, 1, 1) +- return self.feed_token(eof) +- +- +- def __copy__(self): +- """Create a new interactive parser with a separate state. +- +- Calls to feed_token() won't affect the old instance, and vice-versa. +- """ +- return type(self)( +- self.parser, +- copy(self.parser_state), +- copy(self.lexer_thread), +- ) +- +- def copy(self): +- return copy(self) +- +- def __eq__(self, other): +- if not isinstance(other, InteractiveParser): +- return False +- +- return self.parser_state == other.parser_state and self.lexer_thread == other.lexer_thread +- +- def as_immutable(self): +- """Convert to an ``ImmutableInteractiveParser``.""" +- p = copy(self) +- return ImmutableInteractiveParser(p.parser, p.parser_state, p.lexer_thread) +- +- def pretty(self): +- """Print the output of ``choices()`` in a way that's easier to read.""" +- out = ["Parser choices:"] +- for k, v in self.choices().items(): +- out.append('\t- %s -> %r' % (k, v)) +- out.append('stack size: %s' % len(self.parser_state.state_stack)) +- return '\n'.join(out) +- +- def choices(self): +- """Returns a dictionary of token types, matched to their action in the parser. +- +- Only returns token types that are accepted by the current state. +- +- Updated by ``feed_token()``. +- """ +- return self.parser_state.parse_conf.parse_table.states[self.parser_state.position] +- +- def accepts(self): +- """Returns the set of possible tokens that will advance the parser into a new valid state.""" +- accepts = set() +- for t in self.choices(): +- if t.isupper(): # is terminal? +- new_cursor = copy(self) +- try: +- new_cursor.feed_token(self.lexer_thread._Token(t, '')) +- except UnexpectedToken: +- pass +- else: +- accepts.add(t) +- return accepts +- +- def resume_parse(self): +- """Resume automated parsing from the current state.""" +- return self.parser.parse_from_state(self.parser_state) +- +- +- +-class ImmutableInteractiveParser(InteractiveParser): +- """Same as ``InteractiveParser``, but operations create a new instance instead +- of changing it in-place. +- """ +- +- result = None +- +- def __hash__(self): +- return hash((self.parser_state, self.lexer_thread)) +- +- def feed_token(self, token): +- c = copy(self) +- c.result = InteractiveParser.feed_token(c, token) +- return c +- +- def exhaust_lexer(self): +- """Try to feed the rest of the lexer state into the parser. +- +- Note that this returns a new ImmutableInteractiveParser and does not feed an '$END' Token""" +- cursor = self.as_mutable() +- cursor.exhaust_lexer() +- return cursor.as_immutable() +- +- def as_mutable(self): +- """Convert to an ``InteractiveParser``.""" +- p = copy(self) +- return InteractiveParser(p.parser, p.parser_state, p.lexer_thread) +- +diff --git a/src/poetry/core/_vendor/lark/parsers/lalr_parser.py b/src/poetry/core/_vendor/lark/parsers/lalr_parser.py +deleted file mode 100644 +index 2837b29..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/lalr_parser.py ++++ /dev/null +@@ -1,200 +0,0 @@ +-"""This module implements a LALR(1) Parser +-""" +-# Author: Erez Shinan (2017) +-# Email : erezshin@gmail.com +-from copy import deepcopy, copy +-from ..exceptions import UnexpectedInput, UnexpectedToken +-from ..lexer import Token +-from ..utils import Serialize +- +-from .lalr_analysis import LALR_Analyzer, Shift, Reduce, IntParseTable +-from .lalr_interactive_parser import InteractiveParser +-from lark.exceptions import UnexpectedCharacters, UnexpectedInput, UnexpectedToken +- +-###{standalone +- +-class LALR_Parser(Serialize): +- def __init__(self, parser_conf, debug=False): +- analysis = LALR_Analyzer(parser_conf, debug=debug) +- analysis.compute_lalr() +- callbacks = parser_conf.callbacks +- +- self._parse_table = analysis.parse_table +- self.parser_conf = parser_conf +- self.parser = _Parser(analysis.parse_table, callbacks, debug) +- +- @classmethod +- def deserialize(cls, data, memo, callbacks, debug=False): +- inst = cls.__new__(cls) +- inst._parse_table = IntParseTable.deserialize(data, memo) +- inst.parser = _Parser(inst._parse_table, callbacks, debug) +- return inst +- +- def serialize(self, memo): +- return self._parse_table.serialize(memo) +- +- def parse_interactive(self, lexer, start): +- return self.parser.parse(lexer, start, start_interactive=True) +- +- def parse(self, lexer, start, on_error=None): +- try: +- return self.parser.parse(lexer, start) +- except UnexpectedInput as e: +- if on_error is None: +- raise +- +- while True: +- if isinstance(e, UnexpectedCharacters): +- s = e.interactive_parser.lexer_thread.state +- p = s.line_ctr.char_pos +- +- if not on_error(e): +- raise e +- +- if isinstance(e, UnexpectedCharacters): +- # If user didn't change the character position, then we should +- if p == s.line_ctr.char_pos: +- s.line_ctr.feed(s.text[p:p+1]) +- +- try: +- return e.interactive_parser.resume_parse() +- except UnexpectedToken as e2: +- if (isinstance(e, UnexpectedToken) +- and e.token.type == e2.token.type == '$END' +- and e.interactive_parser == e2.interactive_parser): +- # Prevent infinite loop +- raise e2 +- e = e2 +- except UnexpectedCharacters as e2: +- e = e2 +- +- +-class ParseConf: +- __slots__ = 'parse_table', 'callbacks', 'start', 'start_state', 'end_state', 'states' +- +- def __init__(self, parse_table, callbacks, start): +- self.parse_table = parse_table +- +- self.start_state = self.parse_table.start_states[start] +- self.end_state = self.parse_table.end_states[start] +- self.states = self.parse_table.states +- +- self.callbacks = callbacks +- self.start = start +- +- +-class ParserState: +- __slots__ = 'parse_conf', 'lexer', 'state_stack', 'value_stack' +- +- def __init__(self, parse_conf, lexer, state_stack=None, value_stack=None): +- self.parse_conf = parse_conf +- self.lexer = lexer +- self.state_stack = state_stack or [self.parse_conf.start_state] +- self.value_stack = value_stack or [] +- +- @property +- def position(self): +- return self.state_stack[-1] +- +- # Necessary for match_examples() to work +- def __eq__(self, other): +- if not isinstance(other, ParserState): +- return NotImplemented +- return len(self.state_stack) == len(other.state_stack) and self.position == other.position +- +- def __copy__(self): +- return type(self)( +- self.parse_conf, +- self.lexer, # XXX copy +- copy(self.state_stack), +- deepcopy(self.value_stack), +- ) +- +- def copy(self): +- return copy(self) +- +- def feed_token(self, token, is_end=False): +- state_stack = self.state_stack +- value_stack = self.value_stack +- states = self.parse_conf.states +- end_state = self.parse_conf.end_state +- callbacks = self.parse_conf.callbacks +- +- while True: +- state = state_stack[-1] +- try: +- action, arg = states[state][token.type] +- except KeyError: +- expected = {s for s in states[state].keys() if s.isupper()} +- raise UnexpectedToken(token, expected, state=self, interactive_parser=None) +- +- assert arg != end_state +- +- if action is Shift: +- # shift once and return +- assert not is_end +- state_stack.append(arg) +- value_stack.append(token if token.type not in callbacks else callbacks[token.type](token)) +- return +- else: +- # reduce+shift as many times as necessary +- rule = arg +- size = len(rule.expansion) +- if size: +- s = value_stack[-size:] +- del state_stack[-size:] +- del value_stack[-size:] +- else: +- s = [] +- +- value = callbacks[rule](s) +- +- _action, new_state = states[state_stack[-1]][rule.origin.name] +- assert _action is Shift +- state_stack.append(new_state) +- value_stack.append(value) +- +- if is_end and state_stack[-1] == end_state: +- return value_stack[-1] +- +-class _Parser: +- def __init__(self, parse_table, callbacks, debug=False): +- self.parse_table = parse_table +- self.callbacks = callbacks +- self.debug = debug +- +- def parse(self, lexer, start, value_stack=None, state_stack=None, start_interactive=False): +- parse_conf = ParseConf(self.parse_table, self.callbacks, start) +- parser_state = ParserState(parse_conf, lexer, state_stack, value_stack) +- if start_interactive: +- return InteractiveParser(self, parser_state, parser_state.lexer) +- return self.parse_from_state(parser_state) +- +- +- def parse_from_state(self, state): +- # Main LALR-parser loop +- try: +- token = None +- for token in state.lexer.lex(state): +- state.feed_token(token) +- +- end_token = Token.new_borrow_pos('$END', '', token) if token else Token('$END', '', 0, 1, 1) +- return state.feed_token(end_token, True) +- except UnexpectedInput as e: +- try: +- e.interactive_parser = InteractiveParser(self, state, state.lexer) +- except NameError: +- pass +- raise e +- except Exception as e: +- if self.debug: +- print("") +- print("STATE STACK DUMP") +- print("----------------") +- for i, s in enumerate(state.state_stack): +- print('%d)' % i , s) +- print("") +- +- raise +-###} +- +diff --git a/src/poetry/core/_vendor/lark/parsers/xearley.py b/src/poetry/core/_vendor/lark/parsers/xearley.py +deleted file mode 100644 +index 343e5c0..0000000 +--- a/src/poetry/core/_vendor/lark/parsers/xearley.py ++++ /dev/null +@@ -1,159 +0,0 @@ +-"""This module implements an experimental Earley parser with a dynamic lexer +- +-The core Earley algorithm used here is based on Elizabeth Scott's implementation, here: +- https://www.sciencedirect.com/science/article/pii/S1571066108001497 +- +-That is probably the best reference for understanding the algorithm here. +- +-The Earley parser outputs an SPPF-tree as per that document. The SPPF tree format +-is better documented here: +- http://www.bramvandersanden.com/post/2014/06/shared-packed-parse-forest/ +- +-Instead of running a lexer beforehand, or using a costy char-by-char method, this parser +-uses regular expressions by necessity, achieving high-performance while maintaining all of +-Earley's power in parsing any CFG. +-""" +- +-from collections import defaultdict +- +-from ..tree import Tree +-from ..exceptions import UnexpectedCharacters +-from ..lexer import Token +-from ..grammar import Terminal +-from .earley import Parser as BaseParser +-from .earley_forest import SymbolNode, TokenNode +- +- +-class Parser(BaseParser): +- def __init__(self, lexer_conf, parser_conf, term_matcher, resolve_ambiguity=True, complete_lex = False, debug=False, tree_class=Tree): +- BaseParser.__init__(self, lexer_conf, parser_conf, term_matcher, resolve_ambiguity, debug, tree_class) +- self.ignore = [Terminal(t) for t in lexer_conf.ignore] +- self.complete_lex = complete_lex +- +- def _parse(self, stream, columns, to_scan, start_symbol=None): +- +- def scan(i, to_scan): +- """The core Earley Scanner. +- +- This is a custom implementation of the scanner that uses the +- Lark lexer to match tokens. The scan list is built by the +- Earley predictor, based on the previously completed tokens. +- This ensures that at each phase of the parse we have a custom +- lexer context, allowing for more complex ambiguities.""" +- +- node_cache = {} +- +- # 1) Loop the expectations and ask the lexer to match. +- # Since regexp is forward looking on the input stream, and we only +- # want to process tokens when we hit the point in the stream at which +- # they complete, we push all tokens into a buffer (delayed_matches), to +- # be held possibly for a later parse step when we reach the point in the +- # input stream at which they complete. +- for item in set(to_scan): +- m = match(item.expect, stream, i) +- if m: +- t = Token(item.expect.name, m.group(0), i, text_line, text_column) +- delayed_matches[m.end()].append( (item, i, t) ) +- +- if self.complete_lex: +- s = m.group(0) +- for j in range(1, len(s)): +- m = match(item.expect, s[:-j]) +- if m: +- t = Token(item.expect.name, m.group(0), i, text_line, text_column) +- delayed_matches[i+m.end()].append( (item, i, t) ) +- +- # XXX The following 3 lines were commented out for causing a bug. See issue #768 +- # # Remove any items that successfully matched in this pass from the to_scan buffer. +- # # This ensures we don't carry over tokens that already matched, if we're ignoring below. +- # to_scan.remove(item) +- +- # 3) Process any ignores. This is typically used for e.g. whitespace. +- # We carry over any unmatched items from the to_scan buffer to be matched again after +- # the ignore. This should allow us to use ignored symbols in non-terminals to implement +- # e.g. mandatory spacing. +- for x in self.ignore: +- m = match(x, stream, i) +- if m: +- # Carry over any items still in the scan buffer, to past the end of the ignored items. +- delayed_matches[m.end()].extend([(item, i, None) for item in to_scan ]) +- +- # If we're ignoring up to the end of the file, # carry over the start symbol if it already completed. +- delayed_matches[m.end()].extend([(item, i, None) for item in columns[i] if item.is_complete and item.s == start_symbol]) +- +- next_to_scan = set() +- next_set = set() +- columns.append(next_set) +- transitives.append({}) +- +- ## 4) Process Tokens from delayed_matches. +- # This is the core of the Earley scanner. Create an SPPF node for each Token, +- # and create the symbol node in the SPPF tree. Advance the item that completed, +- # and add the resulting new item to either the Earley set (for processing by the +- # completer/predictor) or the to_scan buffer for the next parse step. +- for item, start, token in delayed_matches[i+1]: +- if token is not None: +- token.end_line = text_line +- token.end_column = text_column + 1 +- token.end_pos = i + 1 +- +- new_item = item.advance() +- label = (new_item.s, new_item.start, i) +- token_node = TokenNode(token, terminals[token.type]) +- new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) +- new_item.node.add_family(new_item.s, item.rule, new_item.start, item.node, token_node) +- else: +- new_item = item +- +- if new_item.expect in self.TERMINALS: +- # add (B ::= Aai+1.B, h, y) to Q' +- next_to_scan.add(new_item) +- else: +- # add (B ::= Aa+1.B, h, y) to Ei+1 +- next_set.add(new_item) +- +- del delayed_matches[i+1] # No longer needed, so unburden memory +- +- if not next_set and not delayed_matches and not next_to_scan: +- considered_rules = list(sorted(to_scan, key=lambda key: key.rule.origin.name)) +- raise UnexpectedCharacters(stream, i, text_line, text_column, {item.expect.name for item in to_scan}, +- set(to_scan), state=frozenset(i.s for i in to_scan), +- considered_rules=considered_rules +- ) +- +- return next_to_scan +- +- +- delayed_matches = defaultdict(list) +- match = self.term_matcher +- terminals = self.lexer_conf.terminals_by_name +- +- # Cache for nodes & tokens created in a particular parse step. +- transitives = [{}] +- +- text_line = 1 +- text_column = 1 +- +- ## The main Earley loop. +- # Run the Prediction/Completion cycle for any Items in the current Earley set. +- # Completions will be added to the SPPF tree, and predictions will be recursively +- # processed down to terminals/empty nodes to be added to the scanner for the next +- # step. +- i = 0 +- for token in stream: +- self.predict_and_complete(i, to_scan, columns, transitives) +- +- to_scan = scan(i, to_scan) +- +- if token == '\n': +- text_line += 1 +- text_column = 1 +- else: +- text_column += 1 +- i += 1 +- +- self.predict_and_complete(i, to_scan, columns, transitives) +- +- ## Column is now the final column in the parse. +- assert i == len(columns)-1 +- return to_scan +diff --git a/src/poetry/core/_vendor/lark/py.typed b/src/poetry/core/_vendor/lark/py.typed +deleted file mode 100644 +index e69de29..0000000 +diff --git a/src/poetry/core/_vendor/lark/reconstruct.py b/src/poetry/core/_vendor/lark/reconstruct.py +deleted file mode 100644 +index c574668..0000000 +--- a/src/poetry/core/_vendor/lark/reconstruct.py ++++ /dev/null +@@ -1,107 +0,0 @@ +-"""Reconstruct text from a tree, based on Lark grammar""" +- +-from typing import List, Dict, Union, Callable, Iterable, Optional +-import unicodedata +- +-from .lark import Lark +-from .tree import Tree, ParseTree +-from .visitors import Transformer_InPlace +-from .lexer import Token, PatternStr, TerminalDef +-from .grammar import Terminal, NonTerminal, Symbol +- +-from .tree_matcher import TreeMatcher, is_discarded_terminal +-from .utils import is_id_continue +- +-def is_iter_empty(i): +- try: +- _ = next(i) +- return False +- except StopIteration: +- return True +- +- +-class WriteTokensTransformer(Transformer_InPlace): +- "Inserts discarded tokens into their correct place, according to the rules of grammar" +- +- tokens: Dict[str, TerminalDef] +- term_subs: Dict[str, Callable[[Symbol], str]] +- +- def __init__(self, tokens: Dict[str, TerminalDef], term_subs: Dict[str, Callable[[Symbol], str]]) -> None: +- self.tokens = tokens +- self.term_subs = term_subs +- +- def __default__(self, data, children, meta): +- if not getattr(meta, 'match_tree', False): +- return Tree(data, children) +- +- iter_args = iter(children) +- to_write = [] +- for sym in meta.orig_expansion: +- if is_discarded_terminal(sym): +- try: +- v = self.term_subs[sym.name](sym) +- except KeyError: +- t = self.tokens[sym.name] +- if not isinstance(t.pattern, PatternStr): +- raise NotImplementedError("Reconstructing regexps not supported yet: %s" % t) +- +- v = t.pattern.value +- to_write.append(v) +- else: +- x = next(iter_args) +- if isinstance(x, list): +- to_write += x +- else: +- if isinstance(x, Token): +- assert Terminal(x.type) == sym, x +- else: +- assert NonTerminal(x.data) == sym, (sym, x) +- to_write.append(x) +- +- assert is_iter_empty(iter_args) +- return to_write +- +- +-class Reconstructor(TreeMatcher): +- """ +- A Reconstructor that will, given a full parse Tree, generate source code. +- +- Note: +- The reconstructor cannot generate values from regexps. If you need to produce discarded +- regexes, such as newlines, use `term_subs` and provide default values for them. +- +- Paramters: +- parser: a Lark instance +- term_subs: a dictionary of [Terminal name as str] to [output text as str] +- """ +- +- write_tokens: WriteTokensTransformer +- +- def __init__(self, parser: Lark, term_subs: Optional[Dict[str, Callable[[Symbol], str]]]=None) -> None: +- TreeMatcher.__init__(self, parser) +- +- self.write_tokens = WriteTokensTransformer({t.name:t for t in self.tokens}, term_subs or {}) +- +- def _reconstruct(self, tree): +- unreduced_tree = self.match_tree(tree, tree.data) +- +- res = self.write_tokens.transform(unreduced_tree) +- for item in res: +- if isinstance(item, Tree): +- # TODO use orig_expansion.rulename to support templates +- yield from self._reconstruct(item) +- else: +- yield item +- +- def reconstruct(self, tree: ParseTree, postproc: Optional[Callable[[Iterable[str]], Iterable[str]]]=None, insert_spaces: bool=True) -> str: +- x = self._reconstruct(tree) +- if postproc: +- x = postproc(x) +- y = [] +- prev_item = '' +- for item in x: +- if insert_spaces and prev_item and item and is_id_continue(prev_item[-1]) and is_id_continue(item[0]): +- y.append(' ') +- y.append(item) +- prev_item = item +- return ''.join(y) +diff --git a/src/poetry/core/_vendor/lark/tools/__init__.py b/src/poetry/core/_vendor/lark/tools/__init__.py +deleted file mode 100644 +index 6b0bd6a..0000000 +--- a/src/poetry/core/_vendor/lark/tools/__init__.py ++++ /dev/null +@@ -1,64 +0,0 @@ +-import sys +-from argparse import ArgumentParser, FileType +-from textwrap import indent +-from logging import DEBUG, INFO, WARN, ERROR +-from typing import Optional +-import warnings +- +-from lark import Lark, logger +- +-lalr_argparser = ArgumentParser(add_help=False, epilog='Look at the Lark documentation for more info on the options') +- +-flags = [ +- ('d', 'debug'), +- 'keep_all_tokens', +- 'regex', +- 'propagate_positions', +- 'maybe_placeholders', +- 'use_bytes' +-] +- +-options = ['start', 'lexer'] +- +-lalr_argparser.add_argument('-v', '--verbose', action='count', default=0, help="Increase Logger output level, up to three times") +-lalr_argparser.add_argument('-s', '--start', action='append', default=[]) +-lalr_argparser.add_argument('-l', '--lexer', default='contextual', choices=('basic', 'contextual')) +-encoding: Optional[str] = 'utf-8' if sys.version_info > (3, 4) else None +-lalr_argparser.add_argument('-o', '--out', type=FileType('w', encoding=encoding), default=sys.stdout, help='the output file (default=stdout)') +-lalr_argparser.add_argument('grammar_file', type=FileType('r', encoding=encoding), help='A valid .lark file') +- +-for flag in flags: +- if isinstance(flag, tuple): +- options.append(flag[1]) +- lalr_argparser.add_argument('-' + flag[0], '--' + flag[1], action='store_true') +- elif isinstance(flag, str): +- options.append(flag) +- lalr_argparser.add_argument('--' + flag, action='store_true') +- else: +- raise NotImplementedError("flags must only contain strings or tuples of strings") +- +- +-def build_lalr(namespace): +- logger.setLevel((ERROR, WARN, INFO, DEBUG)[min(namespace.verbose, 3)]) +- if len(namespace.start) == 0: +- namespace.start.append('start') +- kwargs = {n: getattr(namespace, n) for n in options} +- return Lark(namespace.grammar_file, parser='lalr', **kwargs), namespace.out +- +- +-def showwarning_as_comment(message, category, filename, lineno, file=None, line=None): +- # Based on warnings._showwarnmsg_impl +- text = warnings.formatwarning(message, category, filename, lineno, line) +- text = indent(text, '# ') +- if file is None: +- file = sys.stderr +- if file is None: +- return +- try: +- file.write(text) +- except OSError: +- pass +- +- +-def make_warnings_comments(): +- warnings.showwarning = showwarning_as_comment +diff --git a/src/poetry/core/_vendor/lark/tools/nearley.py b/src/poetry/core/_vendor/lark/tools/nearley.py +deleted file mode 100644 +index 1fc27d5..0000000 +--- a/src/poetry/core/_vendor/lark/tools/nearley.py ++++ /dev/null +@@ -1,202 +0,0 @@ +-"Converts Nearley grammars to Lark" +- +-import os.path +-import sys +-import codecs +-import argparse +- +- +-from lark import Lark, Transformer, v_args +- +-nearley_grammar = r""" +- start: (ruledef|directive)+ +- +- directive: "@" NAME (STRING|NAME) +- | "@" JS -> js_code +- ruledef: NAME "->" expansions +- | NAME REGEXP "->" expansions -> macro +- expansions: expansion ("|" expansion)* +- +- expansion: expr+ js +- +- ?expr: item (":" /[+*?]/)? +- +- ?item: rule|string|regexp|null +- | "(" expansions ")" +- +- rule: NAME +- string: STRING +- regexp: REGEXP +- null: "null" +- JS: /{%.*?%}/s +- js: JS? +- +- NAME: /[a-zA-Z_$]\w*/ +- COMMENT: /#[^\n]*/ +- REGEXP: /\[.*?\]/ +- +- STRING: _STRING "i"? +- +- %import common.ESCAPED_STRING -> _STRING +- %import common.WS +- %ignore WS +- %ignore COMMENT +- +- """ +- +-nearley_grammar_parser = Lark(nearley_grammar, parser='earley', lexer='basic') +- +-def _get_rulename(name): +- name = {'_': '_ws_maybe', '__': '_ws'}.get(name, name) +- return 'n_' + name.replace('$', '__DOLLAR__').lower() +- +-@v_args(inline=True) +-class NearleyToLark(Transformer): +- def __init__(self): +- self._count = 0 +- self.extra_rules = {} +- self.extra_rules_rev = {} +- self.alias_js_code = {} +- +- def _new_function(self, code): +- name = 'alias_%d' % self._count +- self._count += 1 +- +- self.alias_js_code[name] = code +- return name +- +- def _extra_rule(self, rule): +- if rule in self.extra_rules_rev: +- return self.extra_rules_rev[rule] +- +- name = 'xrule_%d' % len(self.extra_rules) +- assert name not in self.extra_rules +- self.extra_rules[name] = rule +- self.extra_rules_rev[rule] = name +- return name +- +- def rule(self, name): +- return _get_rulename(name) +- +- def ruledef(self, name, exps): +- return '!%s: %s' % (_get_rulename(name), exps) +- +- def expr(self, item, op): +- rule = '(%s)%s' % (item, op) +- return self._extra_rule(rule) +- +- def regexp(self, r): +- return '/%s/' % r +- +- def null(self): +- return '' +- +- def string(self, s): +- return self._extra_rule(s) +- +- def expansion(self, *x): +- x, js = x[:-1], x[-1] +- if js.children: +- js_code ,= js.children +- js_code = js_code[2:-2] +- alias = '-> ' + self._new_function(js_code) +- else: +- alias = '' +- return ' '.join(x) + alias +- +- def expansions(self, *x): +- return '%s' % ('\n |'.join(x)) +- +- def start(self, *rules): +- return '\n'.join(filter(None, rules)) +- +-def _nearley_to_lark(g, builtin_path, n2l, js_code, folder_path, includes): +- rule_defs = [] +- +- tree = nearley_grammar_parser.parse(g) +- for statement in tree.children: +- if statement.data == 'directive': +- directive, arg = statement.children +- if directive in ('builtin', 'include'): +- folder = builtin_path if directive == 'builtin' else folder_path +- path = os.path.join(folder, arg[1:-1]) +- if path not in includes: +- includes.add(path) +- with codecs.open(path, encoding='utf8') as f: +- text = f.read() +- rule_defs += _nearley_to_lark(text, builtin_path, n2l, js_code, os.path.abspath(os.path.dirname(path)), includes) +- else: +- assert False, directive +- elif statement.data == 'js_code': +- code ,= statement.children +- code = code[2:-2] +- js_code.append(code) +- elif statement.data == 'macro': +- pass # TODO Add support for macros! +- elif statement.data == 'ruledef': +- rule_defs.append(n2l.transform(statement)) +- else: +- raise Exception("Unknown statement: %s" % statement) +- +- return rule_defs +- +- +-def create_code_for_nearley_grammar(g, start, builtin_path, folder_path, es6=False): +- import js2py +- +- emit_code = [] +- def emit(x=None): +- if x: +- emit_code.append(x) +- emit_code.append('\n') +- +- js_code = ['function id(x) {return x[0];}'] +- n2l = NearleyToLark() +- rule_defs = _nearley_to_lark(g, builtin_path, n2l, js_code, folder_path, set()) +- lark_g = '\n'.join(rule_defs) +- lark_g += '\n'+'\n'.join('!%s: %s' % item for item in n2l.extra_rules.items()) +- +- emit('from lark import Lark, Transformer') +- emit() +- emit('grammar = ' + repr(lark_g)) +- emit() +- +- for alias, code in n2l.alias_js_code.items(): +- js_code.append('%s = (%s);' % (alias, code)) +- +- if es6: +- emit(js2py.translate_js6('\n'.join(js_code))) +- else: +- emit(js2py.translate_js('\n'.join(js_code))) +- emit('class TransformNearley(Transformer):') +- for alias in n2l.alias_js_code: +- emit(" %s = var.get('%s').to_python()" % (alias, alias)) +- emit(" __default__ = lambda self, n, c, m: c if c else None") +- +- emit() +- emit('parser = Lark(grammar, start="n_%s", maybe_placeholders=False)' % start) +- emit('def parse(text):') +- emit(' return TransformNearley().transform(parser.parse(text))') +- +- return ''.join(emit_code) +- +-def main(fn, start, nearley_lib, es6=False): +- with codecs.open(fn, encoding='utf8') as f: +- grammar = f.read() +- return create_code_for_nearley_grammar(grammar, start, os.path.join(nearley_lib, 'builtin'), os.path.abspath(os.path.dirname(fn)), es6=es6) +- +-def get_arg_parser(): +- parser = argparse.ArgumentParser(description='Reads a Nearley grammar (with js functions), and outputs an equivalent lark parser.') +- parser.add_argument('nearley_grammar', help='Path to the file containing the nearley grammar') +- parser.add_argument('start_rule', help='Rule within the nearley grammar to make the base rule') +- parser.add_argument('nearley_lib', help='Path to root directory of nearley codebase (used for including builtins)') +- parser.add_argument('--es6', help='Enable experimental ES6 support', action='store_true') +- return parser +- +-if __name__ == '__main__': +- parser = get_arg_parser() +- if len(sys.argv) == 1: +- parser.print_help(sys.stderr) +- sys.exit(1) +- args = parser.parse_args() +- print(main(fn=args.nearley_grammar, start=args.start_rule, nearley_lib=args.nearley_lib, es6=args.es6)) +diff --git a/src/poetry/core/_vendor/lark/tools/serialize.py b/src/poetry/core/_vendor/lark/tools/serialize.py +deleted file mode 100644 +index 6154024..0000000 +--- a/src/poetry/core/_vendor/lark/tools/serialize.py ++++ /dev/null +@@ -1,34 +0,0 @@ +-import codecs +-import sys +-import json +- +-from lark import Lark +-from lark.grammar import RuleOptions, Rule +-from lark.lexer import TerminalDef +-from lark.tools import lalr_argparser, build_lalr +- +-import argparse +- +-argparser = argparse.ArgumentParser(prog='python -m lark.tools.serialize', parents=[lalr_argparser], +- description="Lark Serialization Tool - Stores Lark's internal state & LALR analysis as a JSON file", +- epilog='Look at the Lark documentation for more info on the options') +- +- +-def serialize(lark_inst, outfile): +- data, memo = lark_inst.memo_serialize([TerminalDef, Rule]) +- outfile.write('{\n') +- outfile.write(' "data": %s,\n' % json.dumps(data)) +- outfile.write(' "memo": %s\n' % json.dumps(memo)) +- outfile.write('}\n') +- +- +-def main(): +- if len(sys.argv)==1: +- argparser.print_help(sys.stderr) +- sys.exit(1) +- ns = argparser.parse_args() +- serialize(*build_lalr(ns)) +- +- +-if __name__ == '__main__': +- main() +diff --git a/src/poetry/core/_vendor/lark/tools/standalone.py b/src/poetry/core/_vendor/lark/tools/standalone.py +deleted file mode 100644 +index 9989f87..0000000 +--- a/src/poetry/core/_vendor/lark/tools/standalone.py ++++ /dev/null +@@ -1,190 +0,0 @@ +-###{standalone +-# +-# +-# Lark Stand-alone Generator Tool +-# ---------------------------------- +-# Generates a stand-alone LALR(1) parser +-# +-# Git: https://github.com/erezsh/lark +-# Author: Erez Shinan (erezshin@gmail.com) +-# +-# +-# >>> LICENSE +-# +-# This tool and its generated code use a separate license from Lark, +-# and are subject to the terms of the Mozilla Public License, v. 2.0. +-# If a copy of the MPL was not distributed with this +-# file, You can obtain one at https://mozilla.org/MPL/2.0/. +-# +-# If you wish to purchase a commercial license for this tool and its +-# generated code, you may contact me via email or otherwise. +-# +-# If MPL2 is incompatible with your free or open-source project, +-# contact me and we'll work it out. +-# +-# +- +-from abc import ABC, abstractmethod +-from collections.abc import Sequence +-from types import ModuleType +-from typing import ( +- TypeVar, Generic, Type, Tuple, List, Dict, Iterator, Collection, Callable, Optional, FrozenSet, Any, +- Union, Iterable, IO, TYPE_CHECKING, +- Pattern as REPattern, ClassVar, Set, Mapping +-) +-###} +- +-import sys +-import token, tokenize +-import os +-from os import path +-from collections import defaultdict +-from functools import partial +-from argparse import ArgumentParser +- +-import lark +-from lark.tools import lalr_argparser, build_lalr, make_warnings_comments +- +- +-from lark.grammar import Rule +-from lark.lexer import TerminalDef +- +-_dir = path.dirname(__file__) +-_larkdir = path.join(_dir, path.pardir) +- +- +-EXTRACT_STANDALONE_FILES = [ +- 'tools/standalone.py', +- 'exceptions.py', +- 'utils.py', +- 'tree.py', +- 'visitors.py', +- 'grammar.py', +- 'lexer.py', +- 'common.py', +- 'parse_tree_builder.py', +- 'parsers/lalr_parser.py', +- 'parsers/lalr_analysis.py', +- 'parser_frontends.py', +- 'lark.py', +- 'indenter.py', +-] +- +-def extract_sections(lines): +- section = None +- text = [] +- sections = defaultdict(list) +- for line in lines: +- if line.startswith('###'): +- if line[3] == '{': +- section = line[4:].strip() +- elif line[3] == '}': +- sections[section] += text +- section = None +- text = [] +- else: +- raise ValueError(line) +- elif section: +- text.append(line) +- +- return {name: ''.join(text) for name, text in sections.items()} +- +- +-def strip_docstrings(line_gen): +- """ Strip comments and docstrings from a file. +- Based on code from: https://stackoverflow.com/questions/1769332/script-to-remove-python-comments-docstrings +- """ +- res = [] +- +- prev_toktype = token.INDENT +- last_lineno = -1 +- last_col = 0 +- +- tokgen = tokenize.generate_tokens(line_gen) +- for toktype, ttext, (slineno, scol), (elineno, ecol), ltext in tokgen: +- if slineno > last_lineno: +- last_col = 0 +- if scol > last_col: +- res.append(" " * (scol - last_col)) +- if toktype == token.STRING and prev_toktype == token.INDENT: +- # Docstring +- res.append("#--") +- elif toktype == tokenize.COMMENT: +- # Comment +- res.append("##\n") +- else: +- res.append(ttext) +- prev_toktype = toktype +- last_col = ecol +- last_lineno = elineno +- +- return ''.join(res) +- +- +-def gen_standalone(lark_inst, output=None, out=sys.stdout, compress=False): +- if output is None: +- output = partial(print, file=out) +- +- import pickle, zlib, base64 +- def compressed_output(obj): +- s = pickle.dumps(obj, pickle.HIGHEST_PROTOCOL) +- c = zlib.compress(s) +- output(repr(base64.b64encode(c))) +- +- def output_decompress(name): +- output('%(name)s = pickle.loads(zlib.decompress(base64.b64decode(%(name)s)))' % locals()) +- +- output('# The file was automatically generated by Lark v%s' % lark.__version__) +- output('__version__ = "%s"' % lark.__version__) +- output() +- +- for i, pyfile in enumerate(EXTRACT_STANDALONE_FILES): +- with open(os.path.join(_larkdir, pyfile)) as f: +- code = extract_sections(f)['standalone'] +- if i: # if not this file +- code = strip_docstrings(partial(next, iter(code.splitlines(True)))) +- output(code) +- +- data, m = lark_inst.memo_serialize([TerminalDef, Rule]) +- output('import pickle, zlib, base64') +- if compress: +- output('DATA = (') +- compressed_output(data) +- output(')') +- output_decompress('DATA') +- output('MEMO = (') +- compressed_output(m) +- output(')') +- output_decompress('MEMO') +- else: +- output('DATA = (') +- output(data) +- output(')') +- output('MEMO = (') +- output(m) +- output(')') +- +- +- output('Shift = 0') +- output('Reduce = 1') +- output("def Lark_StandAlone(**kwargs):") +- output(" return Lark._load_from_dict(DATA, MEMO, **kwargs)") +- +- +- +- +-def main(): +- make_warnings_comments() +- parser = ArgumentParser(prog="prog='python -m lark.tools.standalone'", description="Lark Stand-alone Generator Tool", +- parents=[lalr_argparser], epilog='Look at the Lark documentation for more info on the options') +- parser.add_argument('-c', '--compress', action='store_true', default=0, help="Enable compression") +- if len(sys.argv) == 1: +- parser.print_help(sys.stderr) +- sys.exit(1) +- ns = parser.parse_args() +- +- lark_inst, out = build_lalr(ns) +- gen_standalone(lark_inst, out=out, compress=ns.compress) +- +-if __name__ == '__main__': +- main() +\ No newline at end of file +diff --git a/src/poetry/core/_vendor/lark/tree.py b/src/poetry/core/_vendor/lark/tree.py +deleted file mode 100644 +index 51c962d..0000000 +--- a/src/poetry/core/_vendor/lark/tree.py ++++ /dev/null +@@ -1,262 +0,0 @@ +-import sys +-from copy import deepcopy +- +-from typing import List, Callable, Iterator, Union, Optional, Generic, TypeVar, Any, TYPE_CHECKING +- +-if TYPE_CHECKING: +- from .lexer import TerminalDef, Token +- if sys.version_info >= (3, 8): +- from typing import Literal +- else: +- from typing_extensions import Literal +- +-###{standalone +-from collections import OrderedDict +- +-class Meta: +- +- empty: bool +- line: int +- column: int +- start_pos: int +- end_line: int +- end_column: int +- end_pos: int +- orig_expansion: 'List[TerminalDef]' +- match_tree: bool +- +- def __init__(self): +- self.empty = True +- +- +-_Leaf_T = TypeVar("_Leaf_T") +-Branch = Union[_Leaf_T, 'Tree[_Leaf_T]'] +- +- +-class Tree(Generic[_Leaf_T]): +- """The main tree class. +- +- Creates a new tree, and stores "data" and "children" in attributes of the same name. +- Trees can be hashed and compared. +- +- Parameters: +- data: The name of the rule or alias +- children: List of matched sub-rules and terminals +- meta: Line & Column numbers (if ``propagate_positions`` is enabled). +- meta attributes: line, column, start_pos, end_line, end_column, end_pos +- """ +- +- data: str +- children: 'List[Branch[_Leaf_T]]' +- +- def __init__(self, data: str, children: 'List[Branch[_Leaf_T]]', meta: Optional[Meta]=None) -> None: +- self.data = data +- self.children = children +- self._meta = meta +- +- @property +- def meta(self) -> Meta: +- if self._meta is None: +- self._meta = Meta() +- return self._meta +- +- def __repr__(self): +- return 'Tree(%r, %r)' % (self.data, self.children) +- +- def _pretty_label(self): +- return self.data +- +- def _pretty(self, level, indent_str): +- if len(self.children) == 1 and not isinstance(self.children[0], Tree): +- return [indent_str*level, self._pretty_label(), '\t', '%s' % (self.children[0],), '\n'] +- +- l = [indent_str*level, self._pretty_label(), '\n'] +- for n in self.children: +- if isinstance(n, Tree): +- l += n._pretty(level+1, indent_str) +- else: +- l += [indent_str*(level+1), '%s' % (n,), '\n'] +- +- return l +- +- def pretty(self, indent_str: str=' ') -> str: +- """Returns an indented string representation of the tree. +- +- Great for debugging. +- """ +- return ''.join(self._pretty(0, indent_str)) +- +- def __rich__(self, parent:'rich.tree.Tree'=None) -> 'rich.tree.Tree': +- """Returns a tree widget for the 'rich' library. +- +- Example: +- :: +- from rich import print +- from lark import Tree +- +- tree = Tree('root', ['node1', 'node2']) +- print(tree) +- """ +- return self._rich(parent) +- +- def _rich(self, parent): +- if parent: +- tree = parent.add(f'[bold]{self.data}[/bold]') +- else: +- import rich.tree +- tree = rich.tree.Tree(self.data) +- +- for c in self.children: +- if isinstance(c, Tree): +- c._rich(tree) +- else: +- tree.add(f'[green]{c}[/green]') +- +- return tree +- +- def __eq__(self, other): +- try: +- return self.data == other.data and self.children == other.children +- except AttributeError: +- return False +- +- def __ne__(self, other): +- return not (self == other) +- +- def __hash__(self) -> int: +- return hash((self.data, tuple(self.children))) +- +- def iter_subtrees(self) -> 'Iterator[Tree[_Leaf_T]]': +- """Depth-first iteration. +- +- Iterates over all the subtrees, never returning to the same node twice (Lark's parse-tree is actually a DAG). +- """ +- queue = [self] +- subtrees = OrderedDict() +- for subtree in queue: +- subtrees[id(subtree)] = subtree +- # Reason for type ignore https://github.com/python/mypy/issues/10999 +- queue += [c for c in reversed(subtree.children) # type: ignore[misc] +- if isinstance(c, Tree) and id(c) not in subtrees] +- +- del queue +- return reversed(list(subtrees.values())) +- +- def find_pred(self, pred: 'Callable[[Tree[_Leaf_T]], bool]') -> 'Iterator[Tree[_Leaf_T]]': +- """Returns all nodes of the tree that evaluate pred(node) as true.""" +- return filter(pred, self.iter_subtrees()) +- +- def find_data(self, data: str) -> 'Iterator[Tree[_Leaf_T]]': +- """Returns all nodes of the tree whose data equals the given data.""" +- return self.find_pred(lambda t: t.data == data) +- +-###} +- +- def expand_kids_by_data(self, *data_values): +- """Expand (inline) children with any of the given data values. Returns True if anything changed""" +- changed = False +- for i in range(len(self.children)-1, -1, -1): +- child = self.children[i] +- if isinstance(child, Tree) and child.data in data_values: +- self.children[i:i+1] = child.children +- changed = True +- return changed +- +- +- def scan_values(self, pred: 'Callable[[Branch[_Leaf_T]], bool]') -> Iterator[_Leaf_T]: +- """Return all values in the tree that evaluate pred(value) as true. +- +- This can be used to find all the tokens in the tree. +- +- Example: +- >>> all_tokens = tree.scan_values(lambda v: isinstance(v, Token)) +- """ +- for c in self.children: +- if isinstance(c, Tree): +- for t in c.scan_values(pred): +- yield t +- else: +- if pred(c): +- yield c +- +- def iter_subtrees_topdown(self): +- """Breadth-first iteration. +- +- Iterates over all the subtrees, return nodes in order like pretty() does. +- """ +- stack = [self] +- while stack: +- node = stack.pop() +- if not isinstance(node, Tree): +- continue +- yield node +- for child in reversed(node.children): +- stack.append(child) +- +- def __deepcopy__(self, memo): +- return type(self)(self.data, deepcopy(self.children, memo), meta=self._meta) +- +- def copy(self) -> 'Tree[_Leaf_T]': +- return type(self)(self.data, self.children) +- +- def set(self, data: str, children: 'List[Branch[_Leaf_T]]') -> None: +- self.data = data +- self.children = children +- +- +-ParseTree = Tree['Token'] +- +- +-class SlottedTree(Tree): +- __slots__ = 'data', 'children', 'rule', '_meta' +- +- +-def pydot__tree_to_png(tree: Tree, filename: str, rankdir: 'Literal["TB", "LR", "BT", "RL"]'="LR", **kwargs) -> None: +- graph = pydot__tree_to_graph(tree, rankdir, **kwargs) +- graph.write_png(filename) +- +- +-def pydot__tree_to_dot(tree: Tree, filename, rankdir="LR", **kwargs): +- graph = pydot__tree_to_graph(tree, rankdir, **kwargs) +- graph.write(filename) +- +- +-def pydot__tree_to_graph(tree: Tree, rankdir="LR", **kwargs): +- """Creates a colorful image that represents the tree (data+children, without meta) +- +- Possible values for `rankdir` are "TB", "LR", "BT", "RL", corresponding to +- directed graphs drawn from top to bottom, from left to right, from bottom to +- top, and from right to left, respectively. +- +- `kwargs` can be any graph attribute (e. g. `dpi=200`). For a list of +- possible attributes, see https://www.graphviz.org/doc/info/attrs.html. +- """ +- +- import pydot # type: ignore[import] +- graph = pydot.Dot(graph_type='digraph', rankdir=rankdir, **kwargs) +- +- i = [0] +- +- def new_leaf(leaf): +- node = pydot.Node(i[0], label=repr(leaf)) +- i[0] += 1 +- graph.add_node(node) +- return node +- +- def _to_pydot(subtree): +- color = hash(subtree.data) & 0xffffff +- color |= 0x808080 +- +- subnodes = [_to_pydot(child) if isinstance(child, Tree) else new_leaf(child) +- for child in subtree.children] +- node = pydot.Node(i[0], style="filled", fillcolor="#%x" % color, label=subtree.data) +- i[0] += 1 +- graph.add_node(node) +- +- for subnode in subnodes: +- graph.add_edge(pydot.Edge(node, subnode)) +- +- return node +- +- _to_pydot(tree) +- return graph +diff --git a/src/poetry/core/_vendor/lark/tree_matcher.py b/src/poetry/core/_vendor/lark/tree_matcher.py +deleted file mode 100644 +index fe6bd5f..0000000 +--- a/src/poetry/core/_vendor/lark/tree_matcher.py ++++ /dev/null +@@ -1,186 +0,0 @@ +-"""Tree matcher based on Lark grammar""" +- +-import re +-from collections import defaultdict +- +-from . import Tree, Token +-from .common import ParserConf +-from .parsers import earley +-from .grammar import Rule, Terminal, NonTerminal +- +- +-def is_discarded_terminal(t): +- return t.is_term and t.filter_out +- +- +-class _MakeTreeMatch: +- def __init__(self, name, expansion): +- self.name = name +- self.expansion = expansion +- +- def __call__(self, args): +- t = Tree(self.name, args) +- t.meta.match_tree = True +- t.meta.orig_expansion = self.expansion +- return t +- +- +-def _best_from_group(seq, group_key, cmp_key): +- d = {} +- for item in seq: +- key = group_key(item) +- if key in d: +- v1 = cmp_key(item) +- v2 = cmp_key(d[key]) +- if v2 > v1: +- d[key] = item +- else: +- d[key] = item +- return list(d.values()) +- +- +-def _best_rules_from_group(rules): +- rules = _best_from_group(rules, lambda r: r, lambda r: -len(r.expansion)) +- rules.sort(key=lambda r: len(r.expansion)) +- return rules +- +- +-def _match(term, token): +- if isinstance(token, Tree): +- name, _args = parse_rulename(term.name) +- return token.data == name +- elif isinstance(token, Token): +- return term == Terminal(token.type) +- assert False, (term, token) +- +- +-def make_recons_rule(origin, expansion, old_expansion): +- return Rule(origin, expansion, alias=_MakeTreeMatch(origin.name, old_expansion)) +- +- +-def make_recons_rule_to_term(origin, term): +- return make_recons_rule(origin, [Terminal(term.name)], [term]) +- +- +-def parse_rulename(s): +- "Parse rule names that may contain a template syntax (like rule{a, b, ...})" +- name, args_str = re.match(r'(\w+)(?:{(.+)})?', s).groups() +- args = args_str and [a.strip() for a in args_str.split(',')] +- return name, args +- +- +- +-class ChildrenLexer: +- def __init__(self, children): +- self.children = children +- +- def lex(self, parser_state): +- return self.children +- +-class TreeMatcher: +- """Match the elements of a tree node, based on an ontology +- provided by a Lark grammar. +- +- Supports templates and inlined rules (`rule{a, b,..}` and `_rule`) +- +- Initiialize with an instance of Lark. +- """ +- +- def __init__(self, parser): +- # XXX TODO calling compile twice returns different results! +- assert parser.options.maybe_placeholders == False +- # XXX TODO: we just ignore the potential existence of a postlexer +- self.tokens, rules, _extra = parser.grammar.compile(parser.options.start, set()) +- +- self.rules_for_root = defaultdict(list) +- +- self.rules = list(self._build_recons_rules(rules)) +- self.rules.reverse() +- +- # Choose the best rule from each group of {rule => [rule.alias]}, since we only really need one derivation. +- self.rules = _best_rules_from_group(self.rules) +- +- self.parser = parser +- self._parser_cache = {} +- +- def _build_recons_rules(self, rules): +- "Convert tree-parsing/construction rules to tree-matching rules" +- expand1s = {r.origin for r in rules if r.options.expand1} +- +- aliases = defaultdict(list) +- for r in rules: +- if r.alias: +- aliases[r.origin].append(r.alias) +- +- rule_names = {r.origin for r in rules} +- nonterminals = {sym for sym in rule_names +- if sym.name.startswith('_') or sym in expand1s or sym in aliases} +- +- seen = set() +- for r in rules: +- recons_exp = [sym if sym in nonterminals else Terminal(sym.name) +- for sym in r.expansion if not is_discarded_terminal(sym)] +- +- # Skip self-recursive constructs +- if recons_exp == [r.origin] and r.alias is None: +- continue +- +- sym = NonTerminal(r.alias) if r.alias else r.origin +- rule = make_recons_rule(sym, recons_exp, r.expansion) +- +- if sym in expand1s and len(recons_exp) != 1: +- self.rules_for_root[sym.name].append(rule) +- +- if sym.name not in seen: +- yield make_recons_rule_to_term(sym, sym) +- seen.add(sym.name) +- else: +- if sym.name.startswith('_') or sym in expand1s: +- yield rule +- else: +- self.rules_for_root[sym.name].append(rule) +- +- for origin, rule_aliases in aliases.items(): +- for alias in rule_aliases: +- yield make_recons_rule_to_term(origin, NonTerminal(alias)) +- yield make_recons_rule_to_term(origin, origin) +- +- def match_tree(self, tree, rulename): +- """Match the elements of `tree` to the symbols of rule `rulename`. +- +- Parameters: +- tree (Tree): the tree node to match +- rulename (str): The expected full rule name (including template args) +- +- Returns: +- Tree: an unreduced tree that matches `rulename` +- +- Raises: +- UnexpectedToken: If no match was found. +- +- Note: +- It's the callers' responsibility match the tree recursively. +- """ +- if rulename: +- # validate +- name, _args = parse_rulename(rulename) +- assert tree.data == name +- else: +- rulename = tree.data +- +- # TODO: ambiguity? +- try: +- parser = self._parser_cache[rulename] +- except KeyError: +- rules = self.rules + _best_rules_from_group(self.rules_for_root[rulename]) +- +- # TODO pass callbacks through dict, instead of alias? +- callbacks = {rule: rule.alias for rule in rules} +- conf = ParserConf(rules, callbacks, [rulename]) +- parser = earley.Parser(self.parser.lexer_conf, conf, _match, resolve_ambiguity=True) +- self._parser_cache[rulename] = parser +- +- # find a full derivation +- unreduced_tree = parser.parse(ChildrenLexer(tree.children), rulename) +- assert unreduced_tree.data == rulename +- return unreduced_tree +diff --git a/src/poetry/core/_vendor/lark/tree_templates.py b/src/poetry/core/_vendor/lark/tree_templates.py +deleted file mode 100644 +index b551509..0000000 +--- a/src/poetry/core/_vendor/lark/tree_templates.py ++++ /dev/null +@@ -1,170 +0,0 @@ +-"""This module defines utilities for matching and translation tree templates. +- +-A tree templates is a tree that contains nodes that are template variables. +- +-""" +- +-from typing import Union, Optional, Mapping, Dict, Tuple, Iterator +- +-from lark import Tree, Transformer +-from lark.exceptions import MissingVariableError +- +-TreeOrCode = Union[Tree[str], str] +-_TEMPLATE_MARKER = '$' +- +- +-class TemplateConf: +- """Template Configuration +- +- Allows customization for different uses of Template +- """ +- +- def __init__(self, parse=None): +- self._parse = parse +- +- def test_var(self, var: Union[Tree[str], str]) -> Optional[str]: +- """Given a tree node, if it is a template variable return its name. Otherwise, return None. +- +- This method may be overridden for customization +- +- Parameters: +- var: Tree | str - The tree node to test +- +- """ +- if isinstance(var, str): +- return _get_template_name(var) +- +- if ( +- isinstance(var, Tree) +- and var.data == "var" +- and len(var.children) > 0 +- and isinstance(var.children[0], str) +- ): +- return _get_template_name(var.children[0]) +- +- return None +- +- def _get_tree(self, template: TreeOrCode) -> Tree[str]: +- if isinstance(template, str): +- assert self._parse +- template = self._parse(template) +- +- assert isinstance(template, Tree) +- return template +- +- def __call__(self, template: Tree[str]) -> 'Template': +- return Template(template, conf=self) +- +- def _match_tree_template(self, template: TreeOrCode, tree: TreeOrCode) -> Optional[Dict[str, TreeOrCode]]: +- template_var = self.test_var(template) +- if template_var: +- return {template_var: tree} +- +- if isinstance(template, str): +- if template == tree: +- return {} +- return None +- +- assert isinstance(template, Tree) and isinstance(tree, Tree), f"template={template} tree={tree}" +- +- if template.data == tree.data and len(template.children) == len(tree.children): +- res = {} +- for t1, t2 in zip(template.children, tree.children): +- matches = self._match_tree_template(t1, t2) +- if matches is None: +- return None +- +- res.update(matches) +- +- return res +- +- return None +- +- +-class _ReplaceVars(Transformer[str, Tree[str]]): +- def __init__(self, conf: TemplateConf, vars: Mapping[str, Tree[str]]) -> None: +- super().__init__() +- self._conf = conf +- self._vars = vars +- +- def __default__(self, data, children, meta) -> Tree[str]: +- tree = super().__default__(data, children, meta) +- +- var = self._conf.test_var(tree) +- if var: +- try: +- return self._vars[var] +- except KeyError: +- raise MissingVariableError(f"No mapping for template variable ({var})") +- return tree +- +- +-class Template: +- """Represents a tree templates, tied to a specific configuration +- +- A tree template is a tree that contains nodes that are template variables. +- Those variables will match any tree. +- (future versions may support annotations on the variables, to allow more complex templates) +- """ +- +- def __init__(self, tree: Tree[str], conf: TemplateConf = TemplateConf()): +- self.conf = conf +- self.tree = conf._get_tree(tree) +- +- def match(self, tree: TreeOrCode) -> Optional[Dict[str, TreeOrCode]]: +- """Match a tree template to a tree. +- +- A tree template without variables will only match ``tree`` if it is equal to the template. +- +- Parameters: +- tree (Tree): The tree to match to the template +- +- Returns: +- Optional[Dict[str, Tree]]: If match is found, returns a dictionary mapping +- template variable names to their matching tree nodes. +- If no match was found, returns None. +- """ +- tree = self.conf._get_tree(tree) +- return self.conf._match_tree_template(self.tree, tree) +- +- def search(self, tree: TreeOrCode) -> Iterator[Tuple[Tree[str], Dict[str, TreeOrCode]]]: +- """Search for all occurances of the tree template inside ``tree``. +- """ +- tree = self.conf._get_tree(tree) +- for subtree in tree.iter_subtrees(): +- res = self.match(subtree) +- if res: +- yield subtree, res +- +- def apply_vars(self, vars: Mapping[str, Tree[str]]) -> Tree[str]: +- """Apply vars to the template tree +- """ +- return _ReplaceVars(self.conf, vars).transform(self.tree) +- +- +-def translate(t1: Template, t2: Template, tree: TreeOrCode): +- """Search tree and translate each occurrance of t1 into t2. +- """ +- tree = t1.conf._get_tree(tree) # ensure it's a tree, parse if necessary and possible +- for subtree, vars in t1.search(tree): +- res = t2.apply_vars(vars) +- subtree.set(res.data, res.children) +- return tree +- +- +-class TemplateTranslator: +- """Utility class for translating a collection of patterns +- """ +- +- def __init__(self, translations: Mapping[Template, Template]): +- assert all(isinstance(k, Template) and isinstance(v, Template) for k, v in translations.items()) +- self.translations = translations +- +- def translate(self, tree: Tree[str]): +- for k, v in self.translations.items(): +- tree = translate(k, v, tree) +- return tree +- +- +-def _get_template_name(value: str) -> Optional[str]: +- return value.lstrip(_TEMPLATE_MARKER) if value.startswith(_TEMPLATE_MARKER) else None +diff --git a/src/poetry/core/_vendor/lark/utils.py b/src/poetry/core/_vendor/lark/utils.py +deleted file mode 100644 +index d7bab6f..0000000 +--- a/src/poetry/core/_vendor/lark/utils.py ++++ /dev/null +@@ -1,322 +0,0 @@ +-import unicodedata +-import os +-from functools import reduce +-from collections import deque +- +-###{standalone +-import sys, re +-import logging +-logger: logging.Logger = logging.getLogger("lark") +-logger.addHandler(logging.StreamHandler()) +-# Set to highest level, since we have some warnings amongst the code +-# By default, we should not output any log messages +-logger.setLevel(logging.CRITICAL) +- +- +-NO_VALUE = object() +- +- +-def classify(seq, key=None, value=None): +- d = {} +- for item in seq: +- k = key(item) if (key is not None) else item +- v = value(item) if (value is not None) else item +- if k in d: +- d[k].append(v) +- else: +- d[k] = [v] +- return d +- +- +-def _deserialize(data, namespace, memo): +- if isinstance(data, dict): +- if '__type__' in data: # Object +- class_ = namespace[data['__type__']] +- return class_.deserialize(data, memo) +- elif '@' in data: +- return memo[data['@']] +- return {key:_deserialize(value, namespace, memo) for key, value in data.items()} +- elif isinstance(data, list): +- return [_deserialize(value, namespace, memo) for value in data] +- return data +- +- +-class Serialize: +- """Safe-ish serialization interface that doesn't rely on Pickle +- +- Attributes: +- __serialize_fields__ (List[str]): Fields (aka attributes) to serialize. +- __serialize_namespace__ (list): List of classes that deserialization is allowed to instantiate. +- Should include all field types that aren't builtin types. +- """ +- +- def memo_serialize(self, types_to_memoize): +- memo = SerializeMemoizer(types_to_memoize) +- return self.serialize(memo), memo.serialize() +- +- def serialize(self, memo=None): +- if memo and memo.in_types(self): +- return {'@': memo.memoized.get(self)} +- +- fields = getattr(self, '__serialize_fields__') +- res = {f: _serialize(getattr(self, f), memo) for f in fields} +- res['__type__'] = type(self).__name__ +- if hasattr(self, '_serialize'): +- self._serialize(res, memo) +- return res +- +- @classmethod +- def deserialize(cls, data, memo): +- namespace = getattr(cls, '__serialize_namespace__', []) +- namespace = {c.__name__:c for c in namespace} +- +- fields = getattr(cls, '__serialize_fields__') +- +- if '@' in data: +- return memo[data['@']] +- +- inst = cls.__new__(cls) +- for f in fields: +- try: +- setattr(inst, f, _deserialize(data[f], namespace, memo)) +- except KeyError as e: +- raise KeyError("Cannot find key for class", cls, e) +- +- if hasattr(inst, '_deserialize'): +- inst._deserialize() +- +- return inst +- +- +-class SerializeMemoizer(Serialize): +- "A version of serialize that memoizes objects to reduce space" +- +- __serialize_fields__ = 'memoized', +- +- def __init__(self, types_to_memoize): +- self.types_to_memoize = tuple(types_to_memoize) +- self.memoized = Enumerator() +- +- def in_types(self, value): +- return isinstance(value, self.types_to_memoize) +- +- def serialize(self): +- return _serialize(self.memoized.reversed(), None) +- +- @classmethod +- def deserialize(cls, data, namespace, memo): +- return _deserialize(data, namespace, memo) +- +- +-try: +- import regex # type: ignore +-except ImportError: +- regex = None +- +-import sre_parse +-import sre_constants +-categ_pattern = re.compile(r'\\p{[A-Za-z_]+}') +- +-def get_regexp_width(expr): +- if regex: +- # Since `sre_parse` cannot deal with Unicode categories of the form `\p{Mn}`, we replace these with +- # a simple letter, which makes no difference as we are only trying to get the possible lengths of the regex +- # match here below. +- regexp_final = re.sub(categ_pattern, 'A', expr) +- else: +- if re.search(categ_pattern, expr): +- raise ImportError('`regex` module must be installed in order to use Unicode categories.', expr) +- regexp_final = expr +- try: +- return [int(x) for x in sre_parse.parse(regexp_final).getwidth()] +- except sre_constants.error: +- if not regex: +- raise ValueError(expr) +- else: +- # sre_parse does not support the new features in regex. To not completely fail in that case, +- # we manually test for the most important info (whether the empty string is matched) +- c = regex.compile(regexp_final) +- if c.match('') is None: +- # MAXREPEAT is a none pickable subclass of int, therefore needs to be converted to enable caching +- return 1, int(sre_constants.MAXREPEAT) +- else: +- return 0, int(sre_constants.MAXREPEAT) +- +-###} +- +- +-_ID_START = 'Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Mn', 'Mc', 'Pc' +-_ID_CONTINUE = _ID_START + ('Nd', 'Nl',) +- +-def _test_unicode_category(s, categories): +- if len(s) != 1: +- return all(_test_unicode_category(char, categories) for char in s) +- return s == '_' or unicodedata.category(s) in categories +- +-def is_id_continue(s): +- """ +- Checks if all characters in `s` are alphanumeric characters (Unicode standard, so diacritics, indian vowels, non-latin +- numbers, etc. all pass). Synonymous with a Python `ID_CONTINUE` identifier. See PEP 3131 for details. +- """ +- return _test_unicode_category(s, _ID_CONTINUE) +- +-def is_id_start(s): +- """ +- Checks if all characters in `s` are alphabetic characters (Unicode standard, so diacritics, indian vowels, non-latin +- numbers, etc. all pass). Synonymous with a Python `ID_START` identifier. See PEP 3131 for details. +- """ +- return _test_unicode_category(s, _ID_START) +- +- +-def dedup_list(l): +- """Given a list (l) will removing duplicates from the list, +- preserving the original order of the list. Assumes that +- the list entries are hashable.""" +- dedup = set() +- return [x for x in l if not (x in dedup or dedup.add(x))] +- +- +-class Enumerator(Serialize): +- def __init__(self): +- self.enums = {} +- +- def get(self, item): +- if item not in self.enums: +- self.enums[item] = len(self.enums) +- return self.enums[item] +- +- def __len__(self): +- return len(self.enums) +- +- def reversed(self): +- r = {v: k for k, v in self.enums.items()} +- assert len(r) == len(self.enums) +- return r +- +- +- +-def combine_alternatives(lists): +- """ +- Accepts a list of alternatives, and enumerates all their possible concatinations. +- +- Examples: +- >>> combine_alternatives([range(2), [4,5]]) +- [[0, 4], [0, 5], [1, 4], [1, 5]] +- +- >>> combine_alternatives(["abc", "xy", '$']) +- [['a', 'x', '$'], ['a', 'y', '$'], ['b', 'x', '$'], ['b', 'y', '$'], ['c', 'x', '$'], ['c', 'y', '$']] +- +- >>> combine_alternatives([]) +- [[]] +- """ +- if not lists: +- return [[]] +- assert all(l for l in lists), lists +- init = [[x] for x in lists[0]] +- return reduce(lambda a,b: [i+[j] for i in a for j in b], lists[1:], init) +- +- +-try: +- import atomicwrites +-except ImportError: +- atomicwrites = None # type: ignore[assigment] +- +-class FS: +- exists = staticmethod(os.path.exists) +- +- @staticmethod +- def open(name, mode="r", **kwargs): +- if atomicwrites and "w" in mode: +- return atomicwrites.atomic_write(name, mode=mode, overwrite=True, **kwargs) +- else: +- return open(name, mode, **kwargs) +- +- +- +-def isascii(s): +- """ str.isascii only exists in python3.7+ """ +- try: +- return s.isascii() +- except AttributeError: +- try: +- s.encode('ascii') +- return True +- except (UnicodeDecodeError, UnicodeEncodeError): +- return False +- +- +-class fzset(frozenset): +- def __repr__(self): +- return '{%s}' % ', '.join(map(repr, self)) +- +- +-def classify_bool(seq, pred): +- true_elems = [] +- false_elems = [] +- +- for elem in seq: +- if pred(elem): +- true_elems.append(elem) +- else: +- false_elems.append(elem) +- +- return true_elems, false_elems +- +- +-def bfs(initial, expand): +- open_q = deque(list(initial)) +- visited = set(open_q) +- while open_q: +- node = open_q.popleft() +- yield node +- for next_node in expand(node): +- if next_node not in visited: +- visited.add(next_node) +- open_q.append(next_node) +- +-def bfs_all_unique(initial, expand): +- "bfs, but doesn't keep track of visited (aka seen), because there can be no repetitions" +- open_q = deque(list(initial)) +- while open_q: +- node = open_q.popleft() +- yield node +- open_q += expand(node) +- +- +-def _serialize(value, memo): +- if isinstance(value, Serialize): +- return value.serialize(memo) +- elif isinstance(value, list): +- return [_serialize(elem, memo) for elem in value] +- elif isinstance(value, frozenset): +- return list(value) # TODO reversible? +- elif isinstance(value, dict): +- return {key:_serialize(elem, memo) for key, elem in value.items()} +- # assert value is None or isinstance(value, (int, float, str, tuple)), value +- return value +- +- +- +- +-def small_factors(n, max_factor): +- """ +- Splits n up into smaller factors and summands <= max_factor. +- Returns a list of [(a, b), ...] +- so that the following code returns n: +- +- n = 1 +- for a, b in values: +- n = n * a + b +- +- Currently, we also keep a + b <= max_factor, but that might change +- """ +- assert n >= 0 +- assert max_factor > 2 +- if n <= max_factor: +- return [(n, 0)] +- +- for a in range(max_factor, 1, -1): +- r, b = divmod(n, a) +- if a + b <= max_factor: +- return small_factors(r, max_factor) + [(a, b)] +- assert False, "Failed to factorize %s" % n +diff --git a/src/poetry/core/_vendor/lark/visitors.py b/src/poetry/core/_vendor/lark/visitors.py +deleted file mode 100644 +index 9feced1..0000000 +--- a/src/poetry/core/_vendor/lark/visitors.py ++++ /dev/null +@@ -1,577 +0,0 @@ +-from typing import TypeVar, Tuple, List, Callable, Generic, Type, Union, Optional, Any, cast +-from abc import ABC +- +-from .utils import combine_alternatives +-from .tree import Tree, Branch +-from .exceptions import VisitError, GrammarError +-from .lexer import Token +- +-###{standalone +-from functools import wraps, update_wrapper +-from inspect import getmembers, getmro +- +-_Return_T = TypeVar('_Return_T') +-_Return_V = TypeVar('_Return_V') +-_Leaf_T = TypeVar('_Leaf_T') +-_Leaf_U = TypeVar('_Leaf_U') +-_R = TypeVar('_R') +-_FUNC = Callable[..., _Return_T] +-_DECORATED = Union[_FUNC, type] +- +-class _DiscardType: +- """When the Discard value is returned from a transformer callback, +- that node is discarded and won't appear in the parent. +- +- Example: +- :: +- +- class T(Transformer): +- def ignore_tree(self, children): +- return Discard +- +- def IGNORE_TOKEN(self, token): +- return Discard +- """ +- +- def __repr__(self): +- return "lark.visitors.Discard" +- +-Discard = _DiscardType() +- +-# Transformers +- +-class _Decoratable: +- "Provides support for decorating methods with @v_args" +- +- @classmethod +- def _apply_v_args(cls, visit_wrapper): +- mro = getmro(cls) +- assert mro[0] is cls +- libmembers = {name for _cls in mro[1:] for name, _ in getmembers(_cls)} +- for name, value in getmembers(cls): +- +- # Make sure the function isn't inherited (unless it's overwritten) +- if name.startswith('_') or (name in libmembers and name not in cls.__dict__): +- continue +- if not callable(value): +- continue +- +- # Skip if v_args already applied (at the function level) +- if isinstance(cls.__dict__[name], _VArgsWrapper): +- continue +- +- setattr(cls, name, _VArgsWrapper(cls.__dict__[name], visit_wrapper)) +- return cls +- +- def __class_getitem__(cls, _): +- return cls +- +- +-class Transformer(_Decoratable, ABC, Generic[_Leaf_T, _Return_T]): +- """Transformers visit each node of the tree, and run the appropriate method on it according to the node's data. +- +- Methods are provided by the user via inheritance, and called according to ``tree.data``. +- The returned value from each method replaces the node in the tree structure. +- +- Transformers work bottom-up (or depth-first), starting with the leaves and ending at the root of the tree. +- Transformers can be used to implement map & reduce patterns. Because nodes are reduced from leaf to root, +- at any point the callbacks may assume the children have already been transformed (if applicable). +- +- ``Transformer`` can do anything ``Visitor`` can do, but because it reconstructs the tree, +- it is slightly less efficient. +- +- To discard a node, return Discard (``lark.visitors.Discard``). +- +- All these classes implement the transformer interface: +- +- - ``Transformer`` - Recursively transforms the tree. This is the one you probably want. +- - ``Transformer_InPlace`` - Non-recursive. Changes the tree in-place instead of returning new instances +- - ``Transformer_InPlaceRecursive`` - Recursive. Changes the tree in-place instead of returning new instances +- +- Parameters: +- visit_tokens (bool, optional): Should the transformer visit tokens in addition to rules. +- Setting this to ``False`` is slightly faster. Defaults to ``True``. +- (For processing ignored tokens, use the ``lexer_callbacks`` options) +- +- NOTE: A transformer without methods essentially performs a non-memoized partial deepcopy. +- """ +- __visit_tokens__ = True # For backwards compatibility +- +- def __init__(self, visit_tokens: bool=True) -> None: +- self.__visit_tokens__ = visit_tokens +- +- def _call_userfunc(self, tree, new_children=None): +- # Assumes tree is already transformed +- children = new_children if new_children is not None else tree.children +- try: +- f = getattr(self, tree.data) +- except AttributeError: +- return self.__default__(tree.data, children, tree.meta) +- else: +- try: +- wrapper = getattr(f, 'visit_wrapper', None) +- if wrapper is not None: +- return f.visit_wrapper(f, tree.data, children, tree.meta) +- else: +- return f(children) +- except GrammarError: +- raise +- except Exception as e: +- raise VisitError(tree.data, tree, e) +- +- def _call_userfunc_token(self, token): +- try: +- f = getattr(self, token.type) +- except AttributeError: +- return self.__default_token__(token) +- else: +- try: +- return f(token) +- except GrammarError: +- raise +- except Exception as e: +- raise VisitError(token.type, token, e) +- +- def _transform_children(self, children): +- for c in children: +- if isinstance(c, Tree): +- res = self._transform_tree(c) +- elif self.__visit_tokens__ and isinstance(c, Token): +- res = self._call_userfunc_token(c) +- else: +- res = c +- +- if res is not Discard: +- yield res +- +- def _transform_tree(self, tree): +- children = list(self._transform_children(tree.children)) +- return self._call_userfunc(tree, children) +- +- def transform(self, tree: Tree[_Leaf_T]) -> _Return_T: +- "Transform the given tree, and return the final result" +- return self._transform_tree(tree) +- +- def __mul__( +- self: 'Transformer[_Leaf_T, Tree[_Leaf_U]]', +- other: 'Union[Transformer[_Leaf_U, _Return_V], TransformerChain[_Leaf_U, _Return_V,]]' +- ) -> 'TransformerChain[_Leaf_T, _Return_V]': +- """Chain two transformers together, returning a new transformer. +- """ +- return TransformerChain(self, other) +- +- def __default__(self, data, children, meta): +- """Default function that is called if there is no attribute matching ``data`` +- +- Can be overridden. Defaults to creating a new copy of the tree node (i.e. ``return Tree(data, children, meta)``) +- """ +- return Tree(data, children, meta) +- +- def __default_token__(self, token): +- """Default function that is called if there is no attribute matching ``token.type`` +- +- Can be overridden. Defaults to returning the token as-is. +- """ +- return token +- +- +-def merge_transformers(base_transformer=None, **transformers_to_merge): +- """Merge a collection of transformers into the base_transformer, each into its own 'namespace'. +- +- When called, it will collect the methods from each transformer, and assign them to base_transformer, +- with their name prefixed with the given keyword, as ``prefix__methodname``. +- +- This function is especially useful for processing grammars that import other grammars, +- thereby creating some of their rules in a 'namespace'. (i.e with a consistent name prefix). +- In this case, the key for the transformer should match the name of the imported grammar. +- +- Parameters: +- base_transformer (Transformer, optional): The transformer that all other transformers will be added to. +- **transformers_to_merge: Keyword arguments, in the form of ``name_prefix = transformer``. +- +- Raises: +- AttributeError: In case of a name collision in the merged methods +- +- Example: +- :: +- +- class TBase(Transformer): +- def start(self, children): +- return children[0] + 'bar' +- +- class TImportedGrammar(Transformer): +- def foo(self, children): +- return "foo" +- +- composed_transformer = merge_transformers(TBase(), imported=TImportedGrammar()) +- +- t = Tree('start', [ Tree('imported__foo', []) ]) +- +- assert composed_transformer.transform(t) == 'foobar' +- +- """ +- if base_transformer is None: +- base_transformer = Transformer() +- for prefix, transformer in transformers_to_merge.items(): +- for method_name in dir(transformer): +- method = getattr(transformer, method_name) +- if not callable(method): +- continue +- if method_name.startswith("_") or method_name == "transform": +- continue +- prefixed_method = prefix + "__" + method_name +- if hasattr(base_transformer, prefixed_method): +- raise AttributeError("Cannot merge: method '%s' appears more than once" % prefixed_method) +- +- setattr(base_transformer, prefixed_method, method) +- +- return base_transformer +- +- +-class InlineTransformer(Transformer): # XXX Deprecated +- def _call_userfunc(self, tree, new_children=None): +- # Assumes tree is already transformed +- children = new_children if new_children is not None else tree.children +- try: +- f = getattr(self, tree.data) +- except AttributeError: +- return self.__default__(tree.data, children, tree.meta) +- else: +- return f(*children) +- +- +-class TransformerChain(Generic[_Leaf_T, _Return_T]): +- +- transformers: 'Tuple[Union[Transformer, TransformerChain], ...]' +- +- def __init__(self, *transformers: 'Union[Transformer, TransformerChain]') -> None: +- self.transformers = transformers +- +- def transform(self, tree: Tree[_Leaf_T]) -> _Return_T: +- for t in self.transformers: +- tree = t.transform(tree) +- return cast(_Return_T, tree) +- +- def __mul__( +- self: 'TransformerChain[_Leaf_T, Tree[_Leaf_U]]', +- other: 'Union[Transformer[_Leaf_U, _Return_V], TransformerChain[_Leaf_U, _Return_V]]' +- ) -> 'TransformerChain[_Leaf_T, _Return_V]': +- return TransformerChain(*self.transformers + (other,)) +- +- +-class Transformer_InPlace(Transformer): +- """Same as Transformer, but non-recursive, and changes the tree in-place instead of returning new instances +- +- Useful for huge trees. Conservative in memory. +- """ +- def _transform_tree(self, tree): # Cancel recursion +- return self._call_userfunc(tree) +- +- def transform(self, tree: Tree[_Leaf_T]) -> _Return_T: +- for subtree in tree.iter_subtrees(): +- subtree.children = list(self._transform_children(subtree.children)) +- +- return self._transform_tree(tree) +- +- +-class Transformer_NonRecursive(Transformer): +- """Same as Transformer but non-recursive. +- +- Like Transformer, it doesn't change the original tree. +- +- Useful for huge trees. +- """ +- +- def transform(self, tree: Tree[_Leaf_T]) -> _Return_T: +- # Tree to postfix +- rev_postfix = [] +- q: List[Branch[_Leaf_T]] = [tree] +- while q: +- t = q.pop() +- rev_postfix.append(t) +- if isinstance(t, Tree): +- q += t.children +- +- # Postfix to tree +- stack: List = [] +- for x in reversed(rev_postfix): +- if isinstance(x, Tree): +- size = len(x.children) +- if size: +- args = stack[-size:] +- del stack[-size:] +- else: +- args = [] +- +- res = self._call_userfunc(x, args) +- if res is not Discard: +- stack.append(res) +- +- elif self.__visit_tokens__ and isinstance(x, Token): +- res = self._call_userfunc_token(x) +- if res is not Discard: +- stack.append(res) +- else: +- stack.append(x) +- +- result, = stack # We should have only one tree remaining +- # There are no guarantees on the type of the value produced by calling a user func for a +- # child will produce. This means type system can't statically know that the final result is +- # _Return_T. As a result a cast is required. +- return cast(_Return_T, result) +- +- +-class Transformer_InPlaceRecursive(Transformer): +- "Same as Transformer, recursive, but changes the tree in-place instead of returning new instances" +- def _transform_tree(self, tree): +- tree.children = list(self._transform_children(tree.children)) +- return self._call_userfunc(tree) +- +- +-# Visitors +- +-class VisitorBase: +- def _call_userfunc(self, tree): +- return getattr(self, tree.data, self.__default__)(tree) +- +- def __default__(self, tree): +- """Default function that is called if there is no attribute matching ``tree.data`` +- +- Can be overridden. Defaults to doing nothing. +- """ +- return tree +- +- def __class_getitem__(cls, _): +- return cls +- +- +-class Visitor(VisitorBase, ABC, Generic[_Leaf_T]): +- """Tree visitor, non-recursive (can handle huge trees). +- +- Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data`` +- """ +- +- def visit(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]: +- "Visits the tree, starting with the leaves and finally the root (bottom-up)" +- for subtree in tree.iter_subtrees(): +- self._call_userfunc(subtree) +- return tree +- +- def visit_topdown(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]: +- "Visit the tree, starting at the root, and ending at the leaves (top-down)" +- for subtree in tree.iter_subtrees_topdown(): +- self._call_userfunc(subtree) +- return tree +- +- +-class Visitor_Recursive(VisitorBase, Generic[_Leaf_T]): +- """Bottom-up visitor, recursive. +- +- Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data`` +- +- Slightly faster than the non-recursive version. +- """ +- +- def visit(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]: +- "Visits the tree, starting with the leaves and finally the root (bottom-up)" +- for child in tree.children: +- if isinstance(child, Tree): +- self.visit(child) +- +- self._call_userfunc(tree) +- return tree +- +- def visit_topdown(self,tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]: +- "Visit the tree, starting at the root, and ending at the leaves (top-down)" +- self._call_userfunc(tree) +- +- for child in tree.children: +- if isinstance(child, Tree): +- self.visit_topdown(child) +- +- return tree +- +- +-class Interpreter(_Decoratable, ABC, Generic[_Leaf_T, _Return_T]): +- """Interpreter walks the tree starting at the root. +- +- Visits the tree, starting with the root and finally the leaves (top-down) +- +- For each tree node, it calls its methods (provided by user via inheritance) according to ``tree.data``. +- +- Unlike ``Transformer`` and ``Visitor``, the Interpreter doesn't automatically visit its sub-branches. +- The user has to explicitly call ``visit``, ``visit_children``, or use the ``@visit_children_decor``. +- This allows the user to implement branching and loops. +- """ +- +- def visit(self, tree: Tree[_Leaf_T]) -> _Return_T: +- # There are no guarantees on the type of the value produced by calling a user func for a +- # child will produce. So only annotate the public method and use an internal method when +- # visiting child trees. +- return self._visit_tree(tree) +- +- def _visit_tree(self, tree: Tree[_Leaf_T]): +- f = getattr(self, tree.data) +- wrapper = getattr(f, 'visit_wrapper', None) +- if wrapper is not None: +- return f.visit_wrapper(f, tree.data, tree.children, tree.meta) +- else: +- return f(tree) +- +- def visit_children(self, tree: Tree[_Leaf_T]) -> List: +- return [self._visit_tree(child) if isinstance(child, Tree) else child +- for child in tree.children] +- +- def __getattr__(self, name): +- return self.__default__ +- +- def __default__(self, tree): +- return self.visit_children(tree) +- +- +-_InterMethod = Callable[[Type[Interpreter], _Return_T], _R] +- +-def visit_children_decor(func: _InterMethod) -> _InterMethod: +- "See Interpreter" +- @wraps(func) +- def inner(cls, tree): +- values = cls.visit_children(tree) +- return func(cls, values) +- return inner +- +-# Decorators +- +-def _apply_v_args(obj, visit_wrapper): +- try: +- _apply = obj._apply_v_args +- except AttributeError: +- return _VArgsWrapper(obj, visit_wrapper) +- else: +- return _apply(visit_wrapper) +- +- +-class _VArgsWrapper: +- """ +- A wrapper around a Callable. It delegates `__call__` to the Callable. +- If the Callable has a `__get__`, that is also delegate and the resulting function is wrapped. +- Otherwise, we use the original function mirroring the behaviour without a __get__. +- We also have the visit_wrapper attribute to be used by Transformers. +- """ +- base_func: Callable +- +- def __init__(self, func: Callable, visit_wrapper: Callable[[Callable, str, list, Any], Any]): +- if isinstance(func, _VArgsWrapper): +- func = func.base_func +- # https://github.com/python/mypy/issues/708 +- self.base_func = func # type: ignore[assignment] +- self.visit_wrapper = visit_wrapper +- update_wrapper(self, func) +- +- def __call__(self, *args, **kwargs): +- return self.base_func(*args, **kwargs) +- +- def __get__(self, instance, owner=None): +- try: +- g = self.base_func.__get__ +- except AttributeError: +- return self +- else: +- return _VArgsWrapper(g(instance, owner), self.visit_wrapper) +- +- def __set_name__(self, owner, name): +- try: +- f = self.base_func.__set_name__ +- except AttributeError: +- return +- else: +- f(owner, name) +- +- +-def _vargs_inline(f, _data, children, _meta): +- return f(*children) +-def _vargs_meta_inline(f, _data, children, meta): +- return f(meta, *children) +-def _vargs_meta(f, _data, children, meta): +- return f(meta, children) +-def _vargs_tree(f, data, children, meta): +- return f(Tree(data, children, meta)) +- +- +-def v_args(inline: bool = False, meta: bool = False, tree: bool = False, wrapper: Optional[Callable] = None) -> Callable[[_DECORATED], _DECORATED]: +- """A convenience decorator factory for modifying the behavior of user-supplied visitor methods. +- +- By default, callback methods of transformers/visitors accept one argument - a list of the node's children. +- +- ``v_args`` can modify this behavior. When used on a transformer/visitor class definition, +- it applies to all the callback methods inside it. +- +- ``v_args`` can be applied to a single method, or to an entire class. When applied to both, +- the options given to the method take precedence. +- +- Parameters: +- inline (bool, optional): Children are provided as ``*args`` instead of a list argument (not recommended for very long lists). +- meta (bool, optional): Provides two arguments: ``children`` and ``meta`` (instead of just the first) +- tree (bool, optional): Provides the entire tree as the argument, instead of the children. +- wrapper (function, optional): Provide a function to decorate all methods. +- +- Example: +- :: +- +- @v_args(inline=True) +- class SolveArith(Transformer): +- def add(self, left, right): +- return left + right +- +- +- class ReverseNotation(Transformer_InPlace): +- @v_args(tree=True) +- def tree_node(self, tree): +- tree.children = tree.children[::-1] +- """ +- if tree and (meta or inline): +- raise ValueError("Visitor functions cannot combine 'tree' with 'meta' or 'inline'.") +- +- func = None +- if meta: +- if inline: +- func = _vargs_meta_inline +- else: +- func = _vargs_meta +- elif inline: +- func = _vargs_inline +- elif tree: +- func = _vargs_tree +- +- if wrapper is not None: +- if func is not None: +- raise ValueError("Cannot use 'wrapper' along with 'tree', 'meta' or 'inline'.") +- func = wrapper +- +- def _visitor_args_dec(obj): +- return _apply_v_args(obj, func) +- return _visitor_args_dec +- +- +-###} +- +- +-# --- Visitor Utilities --- +- +-class CollapseAmbiguities(Transformer): +- """ +- Transforms a tree that contains any number of _ambig nodes into a list of trees, +- each one containing an unambiguous tree. +- +- The length of the resulting list is the product of the length of all _ambig nodes. +- +- Warning: This may quickly explode for highly ambiguous trees. +- +- """ +- def _ambig(self, options): +- return sum(options, []) +- +- def __default__(self, data, children_lists, meta): +- return [Tree(data, children, meta) for children in combine_alternatives(children_lists)] +- +- def __default_token__(self, t): +- return [t] +diff --git a/src/poetry/core/_vendor/packaging/LICENSE b/src/poetry/core/_vendor/packaging/LICENSE +deleted file mode 100644 +index 6f62d44..0000000 +--- a/src/poetry/core/_vendor/packaging/LICENSE ++++ /dev/null +@@ -1,3 +0,0 @@ +-This software is made available under the terms of *either* of the licenses +-found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +-under the terms of *both* these licenses. +diff --git a/src/poetry/core/_vendor/packaging/LICENSE.APACHE b/src/poetry/core/_vendor/packaging/LICENSE.APACHE +deleted file mode 100644 +index f433b1a..0000000 +--- a/src/poetry/core/_vendor/packaging/LICENSE.APACHE ++++ /dev/null +@@ -1,177 +0,0 @@ +- +- Apache License +- Version 2.0, January 2004 +- http://www.apache.org/licenses/ +- +- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +- +- 1. Definitions. +- +- "License" shall mean the terms and conditions for use, reproduction, +- and distribution as defined by Sections 1 through 9 of this document. +- +- "Licensor" shall mean the copyright owner or entity authorized by +- the copyright owner that is granting the License. +- +- "Legal Entity" shall mean the union of the acting entity and all +- other entities that control, are controlled by, or are under common +- control with that entity. For the purposes of this definition, +- "control" means (i) the power, direct or indirect, to cause the +- direction or management of such entity, whether by contract or +- otherwise, or (ii) ownership of fifty percent (50%) or more of the +- outstanding shares, or (iii) beneficial ownership of such entity. +- +- "You" (or "Your") shall mean an individual or Legal Entity +- exercising permissions granted by this License. +- +- "Source" form shall mean the preferred form for making modifications, +- including but not limited to software source code, documentation +- source, and configuration files. +- +- "Object" form shall mean any form resulting from mechanical +- transformation or translation of a Source form, including but +- not limited to compiled object code, generated documentation, +- and conversions to other media types. +- +- "Work" shall mean the work of authorship, whether in Source or +- Object form, made available under the License, as indicated by a +- copyright notice that is included in or attached to the work +- (an example is provided in the Appendix below). +- +- "Derivative Works" shall mean any work, whether in Source or Object +- form, that is based on (or derived from) the Work and for which the +- editorial revisions, annotations, elaborations, or other modifications +- represent, as a whole, an original work of authorship. For the purposes +- of this License, Derivative Works shall not include works that remain +- separable from, or merely link (or bind by name) to the interfaces of, +- the Work and Derivative Works thereof. +- +- "Contribution" shall mean any work of authorship, including +- the original version of the Work and any modifications or additions +- to that Work or Derivative Works thereof, that is intentionally +- submitted to Licensor for inclusion in the Work by the copyright owner +- or by an individual or Legal Entity authorized to submit on behalf of +- the copyright owner. For the purposes of this definition, "submitted" +- means any form of electronic, verbal, or written communication sent +- to the Licensor or its representatives, including but not limited to +- communication on electronic mailing lists, source code control systems, +- and issue tracking systems that are managed by, or on behalf of, the +- Licensor for the purpose of discussing and improving the Work, but +- excluding communication that is conspicuously marked or otherwise +- designated in writing by the copyright owner as "Not a Contribution." +- +- "Contributor" shall mean Licensor and any individual or Legal Entity +- on behalf of whom a Contribution has been received by Licensor and +- subsequently incorporated within the Work. +- +- 2. Grant of Copyright License. Subject to the terms and conditions of +- this License, each Contributor hereby grants to You a perpetual, +- worldwide, non-exclusive, no-charge, royalty-free, irrevocable +- copyright license to reproduce, prepare Derivative Works of, +- publicly display, publicly perform, sublicense, and distribute the +- Work and such Derivative Works in Source or Object form. +- +- 3. Grant of Patent License. Subject to the terms and conditions of +- this License, each Contributor hereby grants to You a perpetual, +- worldwide, non-exclusive, no-charge, royalty-free, irrevocable +- (except as stated in this section) patent license to make, have made, +- use, offer to sell, sell, import, and otherwise transfer the Work, +- where such license applies only to those patent claims licensable +- by such Contributor that are necessarily infringed by their +- Contribution(s) alone or by combination of their Contribution(s) +- with the Work to which such Contribution(s) was submitted. If You +- institute patent litigation against any entity (including a +- cross-claim or counterclaim in a lawsuit) alleging that the Work +- or a Contribution incorporated within the Work constitutes direct +- or contributory patent infringement, then any patent licenses +- granted to You under this License for that Work shall terminate +- as of the date such litigation is filed. +- +- 4. Redistribution. You may reproduce and distribute copies of the +- Work or Derivative Works thereof in any medium, with or without +- modifications, and in Source or Object form, provided that You +- meet the following conditions: +- +- (a) You must give any other recipients of the Work or +- Derivative Works a copy of this License; and +- +- (b) You must cause any modified files to carry prominent notices +- stating that You changed the files; and +- +- (c) You must retain, in the Source form of any Derivative Works +- that You distribute, all copyright, patent, trademark, and +- attribution notices from the Source form of the Work, +- excluding those notices that do not pertain to any part of +- the Derivative Works; and +- +- (d) If the Work includes a "NOTICE" text file as part of its +- distribution, then any Derivative Works that You distribute must +- include a readable copy of the attribution notices contained +- within such NOTICE file, excluding those notices that do not +- pertain to any part of the Derivative Works, in at least one +- of the following places: within a NOTICE text file distributed +- as part of the Derivative Works; within the Source form or +- documentation, if provided along with the Derivative Works; or, +- within a display generated by the Derivative Works, if and +- wherever such third-party notices normally appear. The contents +- of the NOTICE file are for informational purposes only and +- do not modify the License. You may add Your own attribution +- notices within Derivative Works that You distribute, alongside +- or as an addendum to the NOTICE text from the Work, provided +- that such additional attribution notices cannot be construed +- as modifying the License. +- +- You may add Your own copyright statement to Your modifications and +- may provide additional or different license terms and conditions +- for use, reproduction, or distribution of Your modifications, or +- for any such Derivative Works as a whole, provided Your use, +- reproduction, and distribution of the Work otherwise complies with +- the conditions stated in this License. +- +- 5. Submission of Contributions. Unless You explicitly state otherwise, +- any Contribution intentionally submitted for inclusion in the Work +- by You to the Licensor shall be under the terms and conditions of +- this License, without any additional terms or conditions. +- Notwithstanding the above, nothing herein shall supersede or modify +- the terms of any separate license agreement you may have executed +- with Licensor regarding such Contributions. +- +- 6. Trademarks. This License does not grant permission to use the trade +- names, trademarks, service marks, or product names of the Licensor, +- except as required for reasonable and customary use in describing the +- origin of the Work and reproducing the content of the NOTICE file. +- +- 7. Disclaimer of Warranty. Unless required by applicable law or +- agreed to in writing, Licensor provides the Work (and each +- Contributor provides its Contributions) on an "AS IS" BASIS, +- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +- implied, including, without limitation, any warranties or conditions +- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +- PARTICULAR PURPOSE. You are solely responsible for determining the +- appropriateness of using or redistributing the Work and assume any +- risks associated with Your exercise of permissions under this License. +- +- 8. Limitation of Liability. In no event and under no legal theory, +- whether in tort (including negligence), contract, or otherwise, +- unless required by applicable law (such as deliberate and grossly +- negligent acts) or agreed to in writing, shall any Contributor be +- liable to You for damages, including any direct, indirect, special, +- incidental, or consequential damages of any character arising as a +- result of this License or out of the use or inability to use the +- Work (including but not limited to damages for loss of goodwill, +- work stoppage, computer failure or malfunction, or any and all +- other commercial damages or losses), even if such Contributor +- has been advised of the possibility of such damages. +- +- 9. Accepting Warranty or Additional Liability. While redistributing +- the Work or Derivative Works thereof, You may choose to offer, +- and charge a fee for, acceptance of support, warranty, indemnity, +- or other liability obligations and/or rights consistent with this +- License. However, in accepting such obligations, You may act only +- on Your own behalf and on Your sole responsibility, not on behalf +- of any other Contributor, and only if You agree to indemnify, +- defend, and hold each Contributor harmless for any liability +- incurred by, or claims asserted against, such Contributor by reason +- of your accepting any such warranty or additional liability. +- +- END OF TERMS AND CONDITIONS +diff --git a/src/poetry/core/_vendor/packaging/LICENSE.BSD b/src/poetry/core/_vendor/packaging/LICENSE.BSD +deleted file mode 100644 +index 42ce7b7..0000000 +--- a/src/poetry/core/_vendor/packaging/LICENSE.BSD ++++ /dev/null +@@ -1,23 +0,0 @@ +-Copyright (c) Donald Stufft and individual contributors. +-All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are met: +- +- 1. Redistributions of source code must retain the above copyright notice, +- this list of conditions and the following disclaimer. +- +- 2. Redistributions in binary form must reproduce the above copyright +- notice, this list of conditions and the following disclaimer in the +- documentation and/or other materials provided with the distribution. +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +diff --git a/src/poetry/core/_vendor/packaging/__about__.py b/src/poetry/core/_vendor/packaging/__about__.py +deleted file mode 100644 +index 3551bc2..0000000 +--- a/src/poetry/core/_vendor/packaging/__about__.py ++++ /dev/null +@@ -1,26 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +-__all__ = [ +- "__title__", +- "__summary__", +- "__uri__", +- "__version__", +- "__author__", +- "__email__", +- "__license__", +- "__copyright__", +-] +- +-__title__ = "packaging" +-__summary__ = "Core utilities for Python packages" +-__uri__ = "https://github.com/pypa/packaging" +- +-__version__ = "21.3" +- +-__author__ = "Donald Stufft and individual contributors" +-__email__ = "donald@stufft.io" +- +-__license__ = "BSD-2-Clause or Apache-2.0" +-__copyright__ = "2014-2019 %s" % __author__ +diff --git a/src/poetry/core/_vendor/packaging/__init__.py b/src/poetry/core/_vendor/packaging/__init__.py +deleted file mode 100644 +index 3c50c5d..0000000 +--- a/src/poetry/core/_vendor/packaging/__init__.py ++++ /dev/null +@@ -1,25 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +-from .__about__ import ( +- __author__, +- __copyright__, +- __email__, +- __license__, +- __summary__, +- __title__, +- __uri__, +- __version__, +-) +- +-__all__ = [ +- "__title__", +- "__summary__", +- "__uri__", +- "__version__", +- "__author__", +- "__email__", +- "__license__", +- "__copyright__", +-] +diff --git a/src/poetry/core/_vendor/packaging/_manylinux.py b/src/poetry/core/_vendor/packaging/_manylinux.py +deleted file mode 100644 +index 4c379aa..0000000 +--- a/src/poetry/core/_vendor/packaging/_manylinux.py ++++ /dev/null +@@ -1,301 +0,0 @@ +-import collections +-import functools +-import os +-import re +-import struct +-import sys +-import warnings +-from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple +- +- +-# Python does not provide platform information at sufficient granularity to +-# identify the architecture of the running executable in some cases, so we +-# determine it dynamically by reading the information from the running +-# process. This only applies on Linux, which uses the ELF format. +-class _ELFFileHeader: +- # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header +- class _InvalidELFFileHeader(ValueError): +- """ +- An invalid ELF file header was found. +- """ +- +- ELF_MAGIC_NUMBER = 0x7F454C46 +- ELFCLASS32 = 1 +- ELFCLASS64 = 2 +- ELFDATA2LSB = 1 +- ELFDATA2MSB = 2 +- EM_386 = 3 +- EM_S390 = 22 +- EM_ARM = 40 +- EM_X86_64 = 62 +- EF_ARM_ABIMASK = 0xFF000000 +- EF_ARM_ABI_VER5 = 0x05000000 +- EF_ARM_ABI_FLOAT_HARD = 0x00000400 +- +- def __init__(self, file: IO[bytes]) -> None: +- def unpack(fmt: str) -> int: +- try: +- data = file.read(struct.calcsize(fmt)) +- result: Tuple[int, ...] = struct.unpack(fmt, data) +- except struct.error: +- raise _ELFFileHeader._InvalidELFFileHeader() +- return result[0] +- +- self.e_ident_magic = unpack(">I") +- if self.e_ident_magic != self.ELF_MAGIC_NUMBER: +- raise _ELFFileHeader._InvalidELFFileHeader() +- self.e_ident_class = unpack("B") +- if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: +- raise _ELFFileHeader._InvalidELFFileHeader() +- self.e_ident_data = unpack("B") +- if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: +- raise _ELFFileHeader._InvalidELFFileHeader() +- self.e_ident_version = unpack("B") +- self.e_ident_osabi = unpack("B") +- self.e_ident_abiversion = unpack("B") +- self.e_ident_pad = file.read(7) +- format_h = "H" +- format_i = "I" +- format_q = "Q" +- format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q +- self.e_type = unpack(format_h) +- self.e_machine = unpack(format_h) +- self.e_version = unpack(format_i) +- self.e_entry = unpack(format_p) +- self.e_phoff = unpack(format_p) +- self.e_shoff = unpack(format_p) +- self.e_flags = unpack(format_i) +- self.e_ehsize = unpack(format_h) +- self.e_phentsize = unpack(format_h) +- self.e_phnum = unpack(format_h) +- self.e_shentsize = unpack(format_h) +- self.e_shnum = unpack(format_h) +- self.e_shstrndx = unpack(format_h) +- +- +-def _get_elf_header() -> Optional[_ELFFileHeader]: +- try: +- with open(sys.executable, "rb") as f: +- elf_header = _ELFFileHeader(f) +- except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): +- return None +- return elf_header +- +- +-def _is_linux_armhf() -> bool: +- # hard-float ABI can be detected from the ELF header of the running +- # process +- # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf +- elf_header = _get_elf_header() +- if elf_header is None: +- return False +- result = elf_header.e_ident_class == elf_header.ELFCLASS32 +- result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB +- result &= elf_header.e_machine == elf_header.EM_ARM +- result &= ( +- elf_header.e_flags & elf_header.EF_ARM_ABIMASK +- ) == elf_header.EF_ARM_ABI_VER5 +- result &= ( +- elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD +- ) == elf_header.EF_ARM_ABI_FLOAT_HARD +- return result +- +- +-def _is_linux_i686() -> bool: +- elf_header = _get_elf_header() +- if elf_header is None: +- return False +- result = elf_header.e_ident_class == elf_header.ELFCLASS32 +- result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB +- result &= elf_header.e_machine == elf_header.EM_386 +- return result +- +- +-def _have_compatible_abi(arch: str) -> bool: +- if arch == "armv7l": +- return _is_linux_armhf() +- if arch == "i686": +- return _is_linux_i686() +- return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} +- +- +-# If glibc ever changes its major version, we need to know what the last +-# minor version was, so we can build the complete list of all versions. +-# For now, guess what the highest minor version might be, assume it will +-# be 50 for testing. Once this actually happens, update the dictionary +-# with the actual value. +-_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) +- +- +-class _GLibCVersion(NamedTuple): +- major: int +- minor: int +- +- +-def _glibc_version_string_confstr() -> Optional[str]: +- """ +- Primary implementation of glibc_version_string using os.confstr. +- """ +- # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely +- # to be broken or missing. This strategy is used in the standard library +- # platform module. +- # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 +- try: +- # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". +- version_string = os.confstr("CS_GNU_LIBC_VERSION") +- assert version_string is not None +- _, version = version_string.split() +- except (AssertionError, AttributeError, OSError, ValueError): +- # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... +- return None +- return version +- +- +-def _glibc_version_string_ctypes() -> Optional[str]: +- """ +- Fallback implementation of glibc_version_string using ctypes. +- """ +- try: +- import ctypes +- except ImportError: +- return None +- +- # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen +- # manpage says, "If filename is NULL, then the returned handle is for the +- # main program". This way we can let the linker do the work to figure out +- # which libc our process is actually using. +- # +- # We must also handle the special case where the executable is not a +- # dynamically linked executable. This can occur when using musl libc, +- # for example. In this situation, dlopen() will error, leading to an +- # OSError. Interestingly, at least in the case of musl, there is no +- # errno set on the OSError. The single string argument used to construct +- # OSError comes from libc itself and is therefore not portable to +- # hard code here. In any case, failure to call dlopen() means we +- # can proceed, so we bail on our attempt. +- try: +- process_namespace = ctypes.CDLL(None) +- except OSError: +- return None +- +- try: +- gnu_get_libc_version = process_namespace.gnu_get_libc_version +- except AttributeError: +- # Symbol doesn't exist -> therefore, we are not linked to +- # glibc. +- return None +- +- # Call gnu_get_libc_version, which returns a string like "2.5" +- gnu_get_libc_version.restype = ctypes.c_char_p +- version_str: str = gnu_get_libc_version() +- # py2 / py3 compatibility: +- if not isinstance(version_str, str): +- version_str = version_str.decode("ascii") +- +- return version_str +- +- +-def _glibc_version_string() -> Optional[str]: +- """Returns glibc version string, or None if not using glibc.""" +- return _glibc_version_string_confstr() or _glibc_version_string_ctypes() +- +- +-def _parse_glibc_version(version_str: str) -> Tuple[int, int]: +- """Parse glibc version. +- +- We use a regexp instead of str.split because we want to discard any +- random junk that might come after the minor version -- this might happen +- in patched/forked versions of glibc (e.g. Linaro's version of glibc +- uses version strings like "2.20-2014.11"). See gh-3588. +- """ +- m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) +- if not m: +- warnings.warn( +- "Expected glibc version with 2 components major.minor," +- " got: %s" % version_str, +- RuntimeWarning, +- ) +- return -1, -1 +- return int(m.group("major")), int(m.group("minor")) +- +- +-@functools.lru_cache() +-def _get_glibc_version() -> Tuple[int, int]: +- version_str = _glibc_version_string() +- if version_str is None: +- return (-1, -1) +- return _parse_glibc_version(version_str) +- +- +-# From PEP 513, PEP 600 +-def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: +- sys_glibc = _get_glibc_version() +- if sys_glibc < version: +- return False +- # Check for presence of _manylinux module. +- try: +- import _manylinux # noqa +- except ImportError: +- return True +- if hasattr(_manylinux, "manylinux_compatible"): +- result = _manylinux.manylinux_compatible(version[0], version[1], arch) +- if result is not None: +- return bool(result) +- return True +- if version == _GLibCVersion(2, 5): +- if hasattr(_manylinux, "manylinux1_compatible"): +- return bool(_manylinux.manylinux1_compatible) +- if version == _GLibCVersion(2, 12): +- if hasattr(_manylinux, "manylinux2010_compatible"): +- return bool(_manylinux.manylinux2010_compatible) +- if version == _GLibCVersion(2, 17): +- if hasattr(_manylinux, "manylinux2014_compatible"): +- return bool(_manylinux.manylinux2014_compatible) +- return True +- +- +-_LEGACY_MANYLINUX_MAP = { +- # CentOS 7 w/ glibc 2.17 (PEP 599) +- (2, 17): "manylinux2014", +- # CentOS 6 w/ glibc 2.12 (PEP 571) +- (2, 12): "manylinux2010", +- # CentOS 5 w/ glibc 2.5 (PEP 513) +- (2, 5): "manylinux1", +-} +- +- +-def platform_tags(linux: str, arch: str) -> Iterator[str]: +- if not _have_compatible_abi(arch): +- return +- # Oldest glibc to be supported regardless of architecture is (2, 17). +- too_old_glibc2 = _GLibCVersion(2, 16) +- if arch in {"x86_64", "i686"}: +- # On x86/i686 also oldest glibc to be supported is (2, 5). +- too_old_glibc2 = _GLibCVersion(2, 4) +- current_glibc = _GLibCVersion(*_get_glibc_version()) +- glibc_max_list = [current_glibc] +- # We can assume compatibility across glibc major versions. +- # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 +- # +- # Build a list of maximum glibc versions so that we can +- # output the canonical list of all glibc from current_glibc +- # down to too_old_glibc2, including all intermediary versions. +- for glibc_major in range(current_glibc.major - 1, 1, -1): +- glibc_minor = _LAST_GLIBC_MINOR[glibc_major] +- glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) +- for glibc_max in glibc_max_list: +- if glibc_max.major == too_old_glibc2.major: +- min_minor = too_old_glibc2.minor +- else: +- # For other glibc major versions oldest supported is (x, 0). +- min_minor = -1 +- for glibc_minor in range(glibc_max.minor, min_minor, -1): +- glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) +- tag = "manylinux_{}_{}".format(*glibc_version) +- if _is_compatible(tag, arch, glibc_version): +- yield linux.replace("linux", tag) +- # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. +- if glibc_version in _LEGACY_MANYLINUX_MAP: +- legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] +- if _is_compatible(legacy_tag, arch, glibc_version): +- yield linux.replace("linux", legacy_tag) +diff --git a/src/poetry/core/_vendor/packaging/_musllinux.py b/src/poetry/core/_vendor/packaging/_musllinux.py +deleted file mode 100644 +index 8ac3059..0000000 +--- a/src/poetry/core/_vendor/packaging/_musllinux.py ++++ /dev/null +@@ -1,136 +0,0 @@ +-"""PEP 656 support. +- +-This module implements logic to detect if the currently running Python is +-linked against musl, and what musl version is used. +-""" +- +-import contextlib +-import functools +-import operator +-import os +-import re +-import struct +-import subprocess +-import sys +-from typing import IO, Iterator, NamedTuple, Optional, Tuple +- +- +-def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: +- return struct.unpack(fmt, f.read(struct.calcsize(fmt))) +- +- +-def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: +- """Detect musl libc location by parsing the Python executable. +- +- Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +- ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +- """ +- f.seek(0) +- try: +- ident = _read_unpacked(f, "16B") +- except struct.error: +- return None +- if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. +- return None +- f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. +- +- try: +- # e_fmt: Format for program header. +- # p_fmt: Format for section header. +- # p_idx: Indexes to find p_type, p_offset, and p_filesz. +- e_fmt, p_fmt, p_idx = { +- 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. +- 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. +- }[ident[4]] +- except KeyError: +- return None +- else: +- p_get = operator.itemgetter(*p_idx) +- +- # Find the interpreter section and return its content. +- try: +- _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) +- except struct.error: +- return None +- for i in range(e_phnum + 1): +- f.seek(e_phoff + e_phentsize * i) +- try: +- p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) +- except struct.error: +- return None +- if p_type != 3: # Not PT_INTERP. +- continue +- f.seek(p_offset) +- interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") +- if "musl" not in interpreter: +- return None +- return interpreter +- return None +- +- +-class _MuslVersion(NamedTuple): +- major: int +- minor: int +- +- +-def _parse_musl_version(output: str) -> Optional[_MuslVersion]: +- lines = [n for n in (n.strip() for n in output.splitlines()) if n] +- if len(lines) < 2 or lines[0][:4] != "musl": +- return None +- m = re.match(r"Version (\d+)\.(\d+)", lines[1]) +- if not m: +- return None +- return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) +- +- +-@functools.lru_cache() +-def _get_musl_version(executable: str) -> Optional[_MuslVersion]: +- """Detect currently-running musl runtime version. +- +- This is done by checking the specified executable's dynamic linking +- information, and invoking the loader to parse its output for a version +- string. If the loader is musl, the output would be something like:: +- +- musl libc (x86_64) +- Version 1.2.2 +- Dynamic Program Loader +- """ +- with contextlib.ExitStack() as stack: +- try: +- f = stack.enter_context(open(executable, "rb")) +- except OSError: +- return None +- ld = _parse_ld_musl_from_elf(f) +- if not ld: +- return None +- proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) +- return _parse_musl_version(proc.stderr) +- +- +-def platform_tags(arch: str) -> Iterator[str]: +- """Generate musllinux tags compatible to the current platform. +- +- :param arch: Should be the part of platform tag after the ``linux_`` +- prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a +- prerequisite for the current platform to be musllinux-compatible. +- +- :returns: An iterator of compatible musllinux tags. +- """ +- sys_musl = _get_musl_version(sys.executable) +- if sys_musl is None: # Python not dynamically linked against musl. +- return +- for minor in range(sys_musl.minor, -1, -1): +- yield f"musllinux_{sys_musl.major}_{minor}_{arch}" +- +- +-if __name__ == "__main__": # pragma: no cover +- import sysconfig +- +- plat = sysconfig.get_platform() +- assert plat.startswith("linux-"), "not linux" +- +- print("plat:", plat) +- print("musl:", _get_musl_version(sys.executable)) +- print("tags:", end=" ") +- for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): +- print(t, end="\n ") +diff --git a/src/poetry/core/_vendor/packaging/_structures.py b/src/poetry/core/_vendor/packaging/_structures.py +deleted file mode 100644 +index 90a6465..0000000 +--- a/src/poetry/core/_vendor/packaging/_structures.py ++++ /dev/null +@@ -1,61 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +- +-class InfinityType: +- def __repr__(self) -> str: +- return "Infinity" +- +- def __hash__(self) -> int: +- return hash(repr(self)) +- +- def __lt__(self, other: object) -> bool: +- return False +- +- def __le__(self, other: object) -> bool: +- return False +- +- def __eq__(self, other: object) -> bool: +- return isinstance(other, self.__class__) +- +- def __gt__(self, other: object) -> bool: +- return True +- +- def __ge__(self, other: object) -> bool: +- return True +- +- def __neg__(self: object) -> "NegativeInfinityType": +- return NegativeInfinity +- +- +-Infinity = InfinityType() +- +- +-class NegativeInfinityType: +- def __repr__(self) -> str: +- return "-Infinity" +- +- def __hash__(self) -> int: +- return hash(repr(self)) +- +- def __lt__(self, other: object) -> bool: +- return True +- +- def __le__(self, other: object) -> bool: +- return True +- +- def __eq__(self, other: object) -> bool: +- return isinstance(other, self.__class__) +- +- def __gt__(self, other: object) -> bool: +- return False +- +- def __ge__(self, other: object) -> bool: +- return False +- +- def __neg__(self: object) -> InfinityType: +- return Infinity +- +- +-NegativeInfinity = NegativeInfinityType() +diff --git a/src/poetry/core/_vendor/packaging/markers.py b/src/poetry/core/_vendor/packaging/markers.py +deleted file mode 100644 +index cb640e8..0000000 +--- a/src/poetry/core/_vendor/packaging/markers.py ++++ /dev/null +@@ -1,304 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +-import operator +-import os +-import platform +-import sys +-from typing import Any, Callable, Dict, List, Optional, Tuple, Union +- +-from pyparsing import ( # noqa: N817 +- Forward, +- Group, +- Literal as L, +- ParseException, +- ParseResults, +- QuotedString, +- ZeroOrMore, +- stringEnd, +- stringStart, +-) +- +-from .specifiers import InvalidSpecifier, Specifier +- +-__all__ = [ +- "InvalidMarker", +- "UndefinedComparison", +- "UndefinedEnvironmentName", +- "Marker", +- "default_environment", +-] +- +-Operator = Callable[[str, str], bool] +- +- +-class InvalidMarker(ValueError): +- """ +- An invalid marker was found, users should refer to PEP 508. +- """ +- +- +-class UndefinedComparison(ValueError): +- """ +- An invalid operation was attempted on a value that doesn't support it. +- """ +- +- +-class UndefinedEnvironmentName(ValueError): +- """ +- A name was attempted to be used that does not exist inside of the +- environment. +- """ +- +- +-class Node: +- def __init__(self, value: Any) -> None: +- self.value = value +- +- def __str__(self) -> str: +- return str(self.value) +- +- def __repr__(self) -> str: +- return f"<{self.__class__.__name__}('{self}')>" +- +- def serialize(self) -> str: +- raise NotImplementedError +- +- +-class Variable(Node): +- def serialize(self) -> str: +- return str(self) +- +- +-class Value(Node): +- def serialize(self) -> str: +- return f'"{self}"' +- +- +-class Op(Node): +- def serialize(self) -> str: +- return str(self) +- +- +-VARIABLE = ( +- L("implementation_version") +- | L("platform_python_implementation") +- | L("implementation_name") +- | L("python_full_version") +- | L("platform_release") +- | L("platform_version") +- | L("platform_machine") +- | L("platform_system") +- | L("python_version") +- | L("sys_platform") +- | L("os_name") +- | L("os.name") # PEP-345 +- | L("sys.platform") # PEP-345 +- | L("platform.version") # PEP-345 +- | L("platform.machine") # PEP-345 +- | L("platform.python_implementation") # PEP-345 +- | L("python_implementation") # undocumented setuptools legacy +- | L("extra") # PEP-508 +-) +-ALIASES = { +- "os.name": "os_name", +- "sys.platform": "sys_platform", +- "platform.version": "platform_version", +- "platform.machine": "platform_machine", +- "platform.python_implementation": "platform_python_implementation", +- "python_implementation": "platform_python_implementation", +-} +-VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) +- +-VERSION_CMP = ( +- L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") +-) +- +-MARKER_OP = VERSION_CMP | L("not in") | L("in") +-MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) +- +-MARKER_VALUE = QuotedString("'") | QuotedString('"') +-MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) +- +-BOOLOP = L("and") | L("or") +- +-MARKER_VAR = VARIABLE | MARKER_VALUE +- +-MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +-MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) +- +-LPAREN = L("(").suppress() +-RPAREN = L(")").suppress() +- +-MARKER_EXPR = Forward() +-MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +-MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) +- +-MARKER = stringStart + MARKER_EXPR + stringEnd +- +- +-def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]: +- if isinstance(results, ParseResults): +- return [_coerce_parse_result(i) for i in results] +- else: +- return results +- +- +-def _format_marker( +- marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True +-) -> str: +- +- assert isinstance(marker, (list, tuple, str)) +- +- # Sometimes we have a structure like [[...]] which is a single item list +- # where the single item is itself it's own list. In that case we want skip +- # the rest of this function so that we don't get extraneous () on the +- # outside. +- if ( +- isinstance(marker, list) +- and len(marker) == 1 +- and isinstance(marker[0], (list, tuple)) +- ): +- return _format_marker(marker[0]) +- +- if isinstance(marker, list): +- inner = (_format_marker(m, first=False) for m in marker) +- if first: +- return " ".join(inner) +- else: +- return "(" + " ".join(inner) + ")" +- elif isinstance(marker, tuple): +- return " ".join([m.serialize() for m in marker]) +- else: +- return marker +- +- +-_operators: Dict[str, Operator] = { +- "in": lambda lhs, rhs: lhs in rhs, +- "not in": lambda lhs, rhs: lhs not in rhs, +- "<": operator.lt, +- "<=": operator.le, +- "==": operator.eq, +- "!=": operator.ne, +- ">=": operator.ge, +- ">": operator.gt, +-} +- +- +-def _eval_op(lhs: str, op: Op, rhs: str) -> bool: +- try: +- spec = Specifier("".join([op.serialize(), rhs])) +- except InvalidSpecifier: +- pass +- else: +- return spec.contains(lhs) +- +- oper: Optional[Operator] = _operators.get(op.serialize()) +- if oper is None: +- raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") +- +- return oper(lhs, rhs) +- +- +-class Undefined: +- pass +- +- +-_undefined = Undefined() +- +- +-def _get_env(environment: Dict[str, str], name: str) -> str: +- value: Union[str, Undefined] = environment.get(name, _undefined) +- +- if isinstance(value, Undefined): +- raise UndefinedEnvironmentName( +- f"{name!r} does not exist in evaluation environment." +- ) +- +- return value +- +- +-def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: +- groups: List[List[bool]] = [[]] +- +- for marker in markers: +- assert isinstance(marker, (list, tuple, str)) +- +- if isinstance(marker, list): +- groups[-1].append(_evaluate_markers(marker, environment)) +- elif isinstance(marker, tuple): +- lhs, op, rhs = marker +- +- if isinstance(lhs, Variable): +- lhs_value = _get_env(environment, lhs.value) +- rhs_value = rhs.value +- else: +- lhs_value = lhs.value +- rhs_value = _get_env(environment, rhs.value) +- +- groups[-1].append(_eval_op(lhs_value, op, rhs_value)) +- else: +- assert marker in ["and", "or"] +- if marker == "or": +- groups.append([]) +- +- return any(all(item) for item in groups) +- +- +-def format_full_version(info: "sys._version_info") -> str: +- version = "{0.major}.{0.minor}.{0.micro}".format(info) +- kind = info.releaselevel +- if kind != "final": +- version += kind[0] + str(info.serial) +- return version +- +- +-def default_environment() -> Dict[str, str]: +- iver = format_full_version(sys.implementation.version) +- implementation_name = sys.implementation.name +- return { +- "implementation_name": implementation_name, +- "implementation_version": iver, +- "os_name": os.name, +- "platform_machine": platform.machine(), +- "platform_release": platform.release(), +- "platform_system": platform.system(), +- "platform_version": platform.version(), +- "python_full_version": platform.python_version(), +- "platform_python_implementation": platform.python_implementation(), +- "python_version": ".".join(platform.python_version_tuple()[:2]), +- "sys_platform": sys.platform, +- } +- +- +-class Marker: +- def __init__(self, marker: str) -> None: +- try: +- self._markers = _coerce_parse_result(MARKER.parseString(marker)) +- except ParseException as e: +- raise InvalidMarker( +- f"Invalid marker: {marker!r}, parse error at " +- f"{marker[e.loc : e.loc + 8]!r}" +- ) +- +- def __str__(self) -> str: +- return _format_marker(self._markers) +- +- def __repr__(self) -> str: +- return f"" +- +- def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: +- """Evaluate a marker. +- +- Return the boolean from evaluating the given marker against the +- environment. environment is an optional argument to override all or +- part of the determined environment. +- +- The environment is determined from the current Python process. +- """ +- current_environment = default_environment() +- if environment is not None: +- current_environment.update(environment) +- +- return _evaluate_markers(self._markers, current_environment) +diff --git a/src/poetry/core/_vendor/packaging/py.typed b/src/poetry/core/_vendor/packaging/py.typed +deleted file mode 100644 +index e69de29..0000000 +diff --git a/src/poetry/core/_vendor/packaging/requirements.py b/src/poetry/core/_vendor/packaging/requirements.py +deleted file mode 100644 +index 53f9a3a..0000000 +--- a/src/poetry/core/_vendor/packaging/requirements.py ++++ /dev/null +@@ -1,146 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +-import re +-import string +-import urllib.parse +-from typing import List, Optional as TOptional, Set +- +-from pyparsing import ( # noqa +- Combine, +- Literal as L, +- Optional, +- ParseException, +- Regex, +- Word, +- ZeroOrMore, +- originalTextFor, +- stringEnd, +- stringStart, +-) +- +-from .markers import MARKER_EXPR, Marker +-from .specifiers import LegacySpecifier, Specifier, SpecifierSet +- +- +-class InvalidRequirement(ValueError): +- """ +- An invalid requirement was found, users should refer to PEP 508. +- """ +- +- +-ALPHANUM = Word(string.ascii_letters + string.digits) +- +-LBRACKET = L("[").suppress() +-RBRACKET = L("]").suppress() +-LPAREN = L("(").suppress() +-RPAREN = L(")").suppress() +-COMMA = L(",").suppress() +-SEMICOLON = L(";").suppress() +-AT = L("@").suppress() +- +-PUNCTUATION = Word("-_.") +-IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +-IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) +- +-NAME = IDENTIFIER("name") +-EXTRA = IDENTIFIER +- +-URI = Regex(r"[^ ]+")("url") +-URL = AT + URI +- +-EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +-EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") +- +-VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +-VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) +- +-VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +-VERSION_MANY = Combine( +- VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False +-)("_raw_spec") +-_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY) +-_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") +- +-VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +-VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) +- +-MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +-MARKER_EXPR.setParseAction( +- lambda s, l, t: Marker(s[t._original_start : t._original_end]) +-) +-MARKER_SEPARATOR = SEMICOLON +-MARKER = MARKER_SEPARATOR + MARKER_EXPR +- +-VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +-URL_AND_MARKER = URL + Optional(MARKER) +- +-NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) +- +-REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +-# pyparsing isn't thread safe during initialization, so we do it eagerly, see +-# issue #104 +-REQUIREMENT.parseString("x[]") +- +- +-class Requirement: +- """Parse a requirement. +- +- Parse a given requirement string into its parts, such as name, specifier, +- URL, and extras. Raises InvalidRequirement on a badly-formed requirement +- string. +- """ +- +- # TODO: Can we test whether something is contained within a requirement? +- # If so how do we do that? Do we need to test against the _name_ of +- # the thing as well as the version? What about the markers? +- # TODO: Can we normalize the name and extra name? +- +- def __init__(self, requirement_string: str) -> None: +- try: +- req = REQUIREMENT.parseString(requirement_string) +- except ParseException as e: +- raise InvalidRequirement( +- f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}' +- ) +- +- self.name: str = req.name +- if req.url: +- parsed_url = urllib.parse.urlparse(req.url) +- if parsed_url.scheme == "file": +- if urllib.parse.urlunparse(parsed_url) != req.url: +- raise InvalidRequirement("Invalid URL given") +- elif not (parsed_url.scheme and parsed_url.netloc) or ( +- not parsed_url.scheme and not parsed_url.netloc +- ): +- raise InvalidRequirement(f"Invalid URL: {req.url}") +- self.url: TOptional[str] = req.url +- else: +- self.url = None +- self.extras: Set[str] = set(req.extras.asList() if req.extras else []) +- self.specifier: SpecifierSet = SpecifierSet(req.specifier) +- self.marker: TOptional[Marker] = req.marker if req.marker else None +- +- def __str__(self) -> str: +- parts: List[str] = [self.name] +- +- if self.extras: +- formatted_extras = ",".join(sorted(self.extras)) +- parts.append(f"[{formatted_extras}]") +- +- if self.specifier: +- parts.append(str(self.specifier)) +- +- if self.url: +- parts.append(f"@ {self.url}") +- if self.marker: +- parts.append(" ") +- +- if self.marker: +- parts.append(f"; {self.marker}") +- +- return "".join(parts) +- +- def __repr__(self) -> str: +- return f"" +diff --git a/src/poetry/core/_vendor/packaging/specifiers.py b/src/poetry/core/_vendor/packaging/specifiers.py +deleted file mode 100644 +index 0e218a6..0000000 +--- a/src/poetry/core/_vendor/packaging/specifiers.py ++++ /dev/null +@@ -1,802 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +-import abc +-import functools +-import itertools +-import re +-import warnings +-from typing import ( +- Callable, +- Dict, +- Iterable, +- Iterator, +- List, +- Optional, +- Pattern, +- Set, +- Tuple, +- TypeVar, +- Union, +-) +- +-from .utils import canonicalize_version +-from .version import LegacyVersion, Version, parse +- +-ParsedVersion = Union[Version, LegacyVersion] +-UnparsedVersion = Union[Version, LegacyVersion, str] +-VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion) +-CallableOperator = Callable[[ParsedVersion, str], bool] +- +- +-class InvalidSpecifier(ValueError): +- """ +- An invalid specifier was found, users should refer to PEP 440. +- """ +- +- +-class BaseSpecifier(metaclass=abc.ABCMeta): +- @abc.abstractmethod +- def __str__(self) -> str: +- """ +- Returns the str representation of this Specifier like object. This +- should be representative of the Specifier itself. +- """ +- +- @abc.abstractmethod +- def __hash__(self) -> int: +- """ +- Returns a hash value for this Specifier like object. +- """ +- +- @abc.abstractmethod +- def __eq__(self, other: object) -> bool: +- """ +- Returns a boolean representing whether or not the two Specifier like +- objects are equal. +- """ +- +- @abc.abstractproperty +- def prereleases(self) -> Optional[bool]: +- """ +- Returns whether or not pre-releases as a whole are allowed by this +- specifier. +- """ +- +- @prereleases.setter +- def prereleases(self, value: bool) -> None: +- """ +- Sets whether or not pre-releases as a whole are allowed by this +- specifier. +- """ +- +- @abc.abstractmethod +- def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: +- """ +- Determines if the given item is contained within this specifier. +- """ +- +- @abc.abstractmethod +- def filter( +- self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None +- ) -> Iterable[VersionTypeVar]: +- """ +- Takes an iterable of items and filters them so that only items which +- are contained within this specifier are allowed in it. +- """ +- +- +-class _IndividualSpecifier(BaseSpecifier): +- +- _operators: Dict[str, str] = {} +- _regex: Pattern[str] +- +- def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: +- match = self._regex.search(spec) +- if not match: +- raise InvalidSpecifier(f"Invalid specifier: '{spec}'") +- +- self._spec: Tuple[str, str] = ( +- match.group("operator").strip(), +- match.group("version").strip(), +- ) +- +- # Store whether or not this Specifier should accept prereleases +- self._prereleases = prereleases +- +- def __repr__(self) -> str: +- pre = ( +- f", prereleases={self.prereleases!r}" +- if self._prereleases is not None +- else "" +- ) +- +- return f"<{self.__class__.__name__}({str(self)!r}{pre})>" +- +- def __str__(self) -> str: +- return "{}{}".format(*self._spec) +- +- @property +- def _canonical_spec(self) -> Tuple[str, str]: +- return self._spec[0], canonicalize_version(self._spec[1]) +- +- def __hash__(self) -> int: +- return hash(self._canonical_spec) +- +- def __eq__(self, other: object) -> bool: +- if isinstance(other, str): +- try: +- other = self.__class__(str(other)) +- except InvalidSpecifier: +- return NotImplemented +- elif not isinstance(other, self.__class__): +- return NotImplemented +- +- return self._canonical_spec == other._canonical_spec +- +- def _get_operator(self, op: str) -> CallableOperator: +- operator_callable: CallableOperator = getattr( +- self, f"_compare_{self._operators[op]}" +- ) +- return operator_callable +- +- def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion: +- if not isinstance(version, (LegacyVersion, Version)): +- version = parse(version) +- return version +- +- @property +- def operator(self) -> str: +- return self._spec[0] +- +- @property +- def version(self) -> str: +- return self._spec[1] +- +- @property +- def prereleases(self) -> Optional[bool]: +- return self._prereleases +- +- @prereleases.setter +- def prereleases(self, value: bool) -> None: +- self._prereleases = value +- +- def __contains__(self, item: str) -> bool: +- return self.contains(item) +- +- def contains( +- self, item: UnparsedVersion, prereleases: Optional[bool] = None +- ) -> bool: +- +- # Determine if prereleases are to be allowed or not. +- if prereleases is None: +- prereleases = self.prereleases +- +- # Normalize item to a Version or LegacyVersion, this allows us to have +- # a shortcut for ``"2.0" in Specifier(">=2") +- normalized_item = self._coerce_version(item) +- +- # Determine if we should be supporting prereleases in this specifier +- # or not, if we do not support prereleases than we can short circuit +- # logic if this version is a prereleases. +- if normalized_item.is_prerelease and not prereleases: +- return False +- +- # Actually do the comparison to determine if this item is contained +- # within this Specifier or not. +- operator_callable: CallableOperator = self._get_operator(self.operator) +- return operator_callable(normalized_item, self.version) +- +- def filter( +- self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None +- ) -> Iterable[VersionTypeVar]: +- +- yielded = False +- found_prereleases = [] +- +- kw = {"prereleases": prereleases if prereleases is not None else True} +- +- # Attempt to iterate over all the values in the iterable and if any of +- # them match, yield them. +- for version in iterable: +- parsed_version = self._coerce_version(version) +- +- if self.contains(parsed_version, **kw): +- # If our version is a prerelease, and we were not set to allow +- # prereleases, then we'll store it for later in case nothing +- # else matches this specifier. +- if parsed_version.is_prerelease and not ( +- prereleases or self.prereleases +- ): +- found_prereleases.append(version) +- # Either this is not a prerelease, or we should have been +- # accepting prereleases from the beginning. +- else: +- yielded = True +- yield version +- +- # Now that we've iterated over everything, determine if we've yielded +- # any values, and if we have not and we have any prereleases stored up +- # then we will go ahead and yield the prereleases. +- if not yielded and found_prereleases: +- for version in found_prereleases: +- yield version +- +- +-class LegacySpecifier(_IndividualSpecifier): +- +- _regex_str = r""" +- (?P(==|!=|<=|>=|<|>)) +- \s* +- (?P +- [^,;\s)]* # Since this is a "legacy" specifier, and the version +- # string can be just about anything, we match everything +- # except for whitespace, a semi-colon for marker support, +- # a closing paren since versions can be enclosed in +- # them, and a comma since it's a version separator. +- ) +- """ +- +- _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) +- +- _operators = { +- "==": "equal", +- "!=": "not_equal", +- "<=": "less_than_equal", +- ">=": "greater_than_equal", +- "<": "less_than", +- ">": "greater_than", +- } +- +- def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: +- super().__init__(spec, prereleases) +- +- warnings.warn( +- "Creating a LegacyVersion has been deprecated and will be " +- "removed in the next major release", +- DeprecationWarning, +- ) +- +- def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion: +- if not isinstance(version, LegacyVersion): +- version = LegacyVersion(str(version)) +- return version +- +- def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool: +- return prospective == self._coerce_version(spec) +- +- def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool: +- return prospective != self._coerce_version(spec) +- +- def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool: +- return prospective <= self._coerce_version(spec) +- +- def _compare_greater_than_equal( +- self, prospective: LegacyVersion, spec: str +- ) -> bool: +- return prospective >= self._coerce_version(spec) +- +- def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool: +- return prospective < self._coerce_version(spec) +- +- def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool: +- return prospective > self._coerce_version(spec) +- +- +-def _require_version_compare( +- fn: Callable[["Specifier", ParsedVersion, str], bool] +-) -> Callable[["Specifier", ParsedVersion, str], bool]: +- @functools.wraps(fn) +- def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool: +- if not isinstance(prospective, Version): +- return False +- return fn(self, prospective, spec) +- +- return wrapped +- +- +-class Specifier(_IndividualSpecifier): +- +- _regex_str = r""" +- (?P(~=|==|!=|<=|>=|<|>|===)) +- (?P +- (?: +- # The identity operators allow for an escape hatch that will +- # do an exact string match of the version you wish to install. +- # This will not be parsed by PEP 440 and we cannot determine +- # any semantic meaning from it. This operator is discouraged +- # but included entirely as an escape hatch. +- (?<====) # Only match for the identity operator +- \s* +- [^\s]* # We just match everything, except for whitespace +- # since we are only testing for strict identity. +- ) +- | +- (?: +- # The (non)equality operators allow for wild card and local +- # versions to be specified so we have to define these two +- # operators separately to enable that. +- (?<===|!=) # Only match for equals and not equals +- +- \s* +- v? +- (?:[0-9]+!)? # epoch +- [0-9]+(?:\.[0-9]+)* # release +- (?: # pre release +- [-_\.]? +- (a|b|c|rc|alpha|beta|pre|preview) +- [-_\.]? +- [0-9]* +- )? +- (?: # post release +- (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) +- )? +- +- # You cannot use a wild card and a dev or local version +- # together so group them with a | and make them optional. +- (?: +- (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release +- (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local +- | +- \.\* # Wild card syntax of .* +- )? +- ) +- | +- (?: +- # The compatible operator requires at least two digits in the +- # release segment. +- (?<=~=) # Only match for the compatible operator +- +- \s* +- v? +- (?:[0-9]+!)? # epoch +- [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) +- (?: # pre release +- [-_\.]? +- (a|b|c|rc|alpha|beta|pre|preview) +- [-_\.]? +- [0-9]* +- )? +- (?: # post release +- (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) +- )? +- (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release +- ) +- | +- (?: +- # All other operators only allow a sub set of what the +- # (non)equality operators do. Specifically they do not allow +- # local versions to be specified nor do they allow the prefix +- # matching wild cards. +- (?=": "greater_than_equal", +- "<": "less_than", +- ">": "greater_than", +- "===": "arbitrary", +- } +- +- @_require_version_compare +- def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: +- +- # Compatible releases have an equivalent combination of >= and ==. That +- # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to +- # implement this in terms of the other specifiers instead of +- # implementing it ourselves. The only thing we need to do is construct +- # the other specifiers. +- +- # We want everything but the last item in the version, but we want to +- # ignore suffix segments. +- prefix = ".".join( +- list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] +- ) +- +- # Add the prefix notation to the end of our string +- prefix += ".*" +- +- return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( +- prospective, prefix +- ) +- +- @_require_version_compare +- def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: +- +- # We need special logic to handle prefix matching +- if spec.endswith(".*"): +- # In the case of prefix matching we want to ignore local segment. +- prospective = Version(prospective.public) +- # Split the spec out by dots, and pretend that there is an implicit +- # dot in between a release segment and a pre-release segment. +- split_spec = _version_split(spec[:-2]) # Remove the trailing .* +- +- # Split the prospective version out by dots, and pretend that there +- # is an implicit dot in between a release segment and a pre-release +- # segment. +- split_prospective = _version_split(str(prospective)) +- +- # Shorten the prospective version to be the same length as the spec +- # so that we can determine if the specifier is a prefix of the +- # prospective version or not. +- shortened_prospective = split_prospective[: len(split_spec)] +- +- # Pad out our two sides with zeros so that they both equal the same +- # length. +- padded_spec, padded_prospective = _pad_version( +- split_spec, shortened_prospective +- ) +- +- return padded_prospective == padded_spec +- else: +- # Convert our spec string into a Version +- spec_version = Version(spec) +- +- # If the specifier does not have a local segment, then we want to +- # act as if the prospective version also does not have a local +- # segment. +- if not spec_version.local: +- prospective = Version(prospective.public) +- +- return prospective == spec_version +- +- @_require_version_compare +- def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool: +- return not self._compare_equal(prospective, spec) +- +- @_require_version_compare +- def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool: +- +- # NB: Local version identifiers are NOT permitted in the version +- # specifier, so local version labels can be universally removed from +- # the prospective version. +- return Version(prospective.public) <= Version(spec) +- +- @_require_version_compare +- def _compare_greater_than_equal( +- self, prospective: ParsedVersion, spec: str +- ) -> bool: +- +- # NB: Local version identifiers are NOT permitted in the version +- # specifier, so local version labels can be universally removed from +- # the prospective version. +- return Version(prospective.public) >= Version(spec) +- +- @_require_version_compare +- def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: +- +- # Convert our spec to a Version instance, since we'll want to work with +- # it as a version. +- spec = Version(spec_str) +- +- # Check to see if the prospective version is less than the spec +- # version. If it's not we can short circuit and just return False now +- # instead of doing extra unneeded work. +- if not prospective < spec: +- return False +- +- # This special case is here so that, unless the specifier itself +- # includes is a pre-release version, that we do not accept pre-release +- # versions for the version mentioned in the specifier (e.g. <3.1 should +- # not match 3.1.dev0, but should match 3.0.dev0). +- if not spec.is_prerelease and prospective.is_prerelease: +- if Version(prospective.base_version) == Version(spec.base_version): +- return False +- +- # If we've gotten to here, it means that prospective version is both +- # less than the spec version *and* it's not a pre-release of the same +- # version in the spec. +- return True +- +- @_require_version_compare +- def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool: +- +- # Convert our spec to a Version instance, since we'll want to work with +- # it as a version. +- spec = Version(spec_str) +- +- # Check to see if the prospective version is greater than the spec +- # version. If it's not we can short circuit and just return False now +- # instead of doing extra unneeded work. +- if not prospective > spec: +- return False +- +- # This special case is here so that, unless the specifier itself +- # includes is a post-release version, that we do not accept +- # post-release versions for the version mentioned in the specifier +- # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). +- if not spec.is_postrelease and prospective.is_postrelease: +- if Version(prospective.base_version) == Version(spec.base_version): +- return False +- +- # Ensure that we do not allow a local version of the version mentioned +- # in the specifier, which is technically greater than, to match. +- if prospective.local is not None: +- if Version(prospective.base_version) == Version(spec.base_version): +- return False +- +- # If we've gotten to here, it means that prospective version is both +- # greater than the spec version *and* it's not a pre-release of the +- # same version in the spec. +- return True +- +- def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: +- return str(prospective).lower() == str(spec).lower() +- +- @property +- def prereleases(self) -> bool: +- +- # If there is an explicit prereleases set for this, then we'll just +- # blindly use that. +- if self._prereleases is not None: +- return self._prereleases +- +- # Look at all of our specifiers and determine if they are inclusive +- # operators, and if they are if they are including an explicit +- # prerelease. +- operator, version = self._spec +- if operator in ["==", ">=", "<=", "~=", "==="]: +- # The == specifier can include a trailing .*, if it does we +- # want to remove before parsing. +- if operator == "==" and version.endswith(".*"): +- version = version[:-2] +- +- # Parse the version, and if it is a pre-release than this +- # specifier allows pre-releases. +- if parse(version).is_prerelease: +- return True +- +- return False +- +- @prereleases.setter +- def prereleases(self, value: bool) -> None: +- self._prereleases = value +- +- +-_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") +- +- +-def _version_split(version: str) -> List[str]: +- result: List[str] = [] +- for item in version.split("."): +- match = _prefix_regex.search(item) +- if match: +- result.extend(match.groups()) +- else: +- result.append(item) +- return result +- +- +-def _is_not_suffix(segment: str) -> bool: +- return not any( +- segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") +- ) +- +- +-def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: +- left_split, right_split = [], [] +- +- # Get the release segment of our versions +- left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) +- right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) +- +- # Get the rest of our versions +- left_split.append(left[len(left_split[0]) :]) +- right_split.append(right[len(right_split[0]) :]) +- +- # Insert our padding +- left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) +- right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) +- +- return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) +- +- +-class SpecifierSet(BaseSpecifier): +- def __init__( +- self, specifiers: str = "", prereleases: Optional[bool] = None +- ) -> None: +- +- # Split on , to break each individual specifier into it's own item, and +- # strip each item to remove leading/trailing whitespace. +- split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] +- +- # Parsed each individual specifier, attempting first to make it a +- # Specifier and falling back to a LegacySpecifier. +- parsed: Set[_IndividualSpecifier] = set() +- for specifier in split_specifiers: +- try: +- parsed.add(Specifier(specifier)) +- except InvalidSpecifier: +- parsed.add(LegacySpecifier(specifier)) +- +- # Turn our parsed specifiers into a frozen set and save them for later. +- self._specs = frozenset(parsed) +- +- # Store our prereleases value so we can use it later to determine if +- # we accept prereleases or not. +- self._prereleases = prereleases +- +- def __repr__(self) -> str: +- pre = ( +- f", prereleases={self.prereleases!r}" +- if self._prereleases is not None +- else "" +- ) +- +- return f"" +- +- def __str__(self) -> str: +- return ",".join(sorted(str(s) for s in self._specs)) +- +- def __hash__(self) -> int: +- return hash(self._specs) +- +- def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": +- if isinstance(other, str): +- other = SpecifierSet(other) +- elif not isinstance(other, SpecifierSet): +- return NotImplemented +- +- specifier = SpecifierSet() +- specifier._specs = frozenset(self._specs | other._specs) +- +- if self._prereleases is None and other._prereleases is not None: +- specifier._prereleases = other._prereleases +- elif self._prereleases is not None and other._prereleases is None: +- specifier._prereleases = self._prereleases +- elif self._prereleases == other._prereleases: +- specifier._prereleases = self._prereleases +- else: +- raise ValueError( +- "Cannot combine SpecifierSets with True and False prerelease " +- "overrides." +- ) +- +- return specifier +- +- def __eq__(self, other: object) -> bool: +- if isinstance(other, (str, _IndividualSpecifier)): +- other = SpecifierSet(str(other)) +- elif not isinstance(other, SpecifierSet): +- return NotImplemented +- +- return self._specs == other._specs +- +- def __len__(self) -> int: +- return len(self._specs) +- +- def __iter__(self) -> Iterator[_IndividualSpecifier]: +- return iter(self._specs) +- +- @property +- def prereleases(self) -> Optional[bool]: +- +- # If we have been given an explicit prerelease modifier, then we'll +- # pass that through here. +- if self._prereleases is not None: +- return self._prereleases +- +- # If we don't have any specifiers, and we don't have a forced value, +- # then we'll just return None since we don't know if this should have +- # pre-releases or not. +- if not self._specs: +- return None +- +- # Otherwise we'll see if any of the given specifiers accept +- # prereleases, if any of them do we'll return True, otherwise False. +- return any(s.prereleases for s in self._specs) +- +- @prereleases.setter +- def prereleases(self, value: bool) -> None: +- self._prereleases = value +- +- def __contains__(self, item: UnparsedVersion) -> bool: +- return self.contains(item) +- +- def contains( +- self, item: UnparsedVersion, prereleases: Optional[bool] = None +- ) -> bool: +- +- # Ensure that our item is a Version or LegacyVersion instance. +- if not isinstance(item, (LegacyVersion, Version)): +- item = parse(item) +- +- # Determine if we're forcing a prerelease or not, if we're not forcing +- # one for this particular filter call, then we'll use whatever the +- # SpecifierSet thinks for whether or not we should support prereleases. +- if prereleases is None: +- prereleases = self.prereleases +- +- # We can determine if we're going to allow pre-releases by looking to +- # see if any of the underlying items supports them. If none of them do +- # and this item is a pre-release then we do not allow it and we can +- # short circuit that here. +- # Note: This means that 1.0.dev1 would not be contained in something +- # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 +- if not prereleases and item.is_prerelease: +- return False +- +- # We simply dispatch to the underlying specs here to make sure that the +- # given version is contained within all of them. +- # Note: This use of all() here means that an empty set of specifiers +- # will always return True, this is an explicit design decision. +- return all(s.contains(item, prereleases=prereleases) for s in self._specs) +- +- def filter( +- self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None +- ) -> Iterable[VersionTypeVar]: +- +- # Determine if we're forcing a prerelease or not, if we're not forcing +- # one for this particular filter call, then we'll use whatever the +- # SpecifierSet thinks for whether or not we should support prereleases. +- if prereleases is None: +- prereleases = self.prereleases +- +- # If we have any specifiers, then we want to wrap our iterable in the +- # filter method for each one, this will act as a logical AND amongst +- # each specifier. +- if self._specs: +- for spec in self._specs: +- iterable = spec.filter(iterable, prereleases=bool(prereleases)) +- return iterable +- # If we do not have any specifiers, then we need to have a rough filter +- # which will filter out any pre-releases, unless there are no final +- # releases, and which will filter out LegacyVersion in general. +- else: +- filtered: List[VersionTypeVar] = [] +- found_prereleases: List[VersionTypeVar] = [] +- +- item: UnparsedVersion +- parsed_version: Union[Version, LegacyVersion] +- +- for item in iterable: +- # Ensure that we some kind of Version class for this item. +- if not isinstance(item, (LegacyVersion, Version)): +- parsed_version = parse(item) +- else: +- parsed_version = item +- +- # Filter out any item which is parsed as a LegacyVersion +- if isinstance(parsed_version, LegacyVersion): +- continue +- +- # Store any item which is a pre-release for later unless we've +- # already found a final version or we are accepting prereleases +- if parsed_version.is_prerelease and not prereleases: +- if not filtered: +- found_prereleases.append(item) +- else: +- filtered.append(item) +- +- # If we've found no items except for pre-releases, then we'll go +- # ahead and use the pre-releases +- if not filtered and found_prereleases and prereleases is None: +- return found_prereleases +- +- return filtered +diff --git a/src/poetry/core/_vendor/packaging/tags.py b/src/poetry/core/_vendor/packaging/tags.py +deleted file mode 100644 +index 9a3d25a..0000000 +--- a/src/poetry/core/_vendor/packaging/tags.py ++++ /dev/null +@@ -1,487 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +-import logging +-import platform +-import sys +-import sysconfig +-from importlib.machinery import EXTENSION_SUFFIXES +-from typing import ( +- Dict, +- FrozenSet, +- Iterable, +- Iterator, +- List, +- Optional, +- Sequence, +- Tuple, +- Union, +- cast, +-) +- +-from . import _manylinux, _musllinux +- +-logger = logging.getLogger(__name__) +- +-PythonVersion = Sequence[int] +-MacVersion = Tuple[int, int] +- +-INTERPRETER_SHORT_NAMES: Dict[str, str] = { +- "python": "py", # Generic. +- "cpython": "cp", +- "pypy": "pp", +- "ironpython": "ip", +- "jython": "jy", +-} +- +- +-_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 +- +- +-class Tag: +- """ +- A representation of the tag triple for a wheel. +- +- Instances are considered immutable and thus are hashable. Equality checking +- is also supported. +- """ +- +- __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] +- +- def __init__(self, interpreter: str, abi: str, platform: str) -> None: +- self._interpreter = interpreter.lower() +- self._abi = abi.lower() +- self._platform = platform.lower() +- # The __hash__ of every single element in a Set[Tag] will be evaluated each time +- # that a set calls its `.disjoint()` method, which may be called hundreds of +- # times when scanning a page of links for packages with tags matching that +- # Set[Tag]. Pre-computing the value here produces significant speedups for +- # downstream consumers. +- self._hash = hash((self._interpreter, self._abi, self._platform)) +- +- @property +- def interpreter(self) -> str: +- return self._interpreter +- +- @property +- def abi(self) -> str: +- return self._abi +- +- @property +- def platform(self) -> str: +- return self._platform +- +- def __eq__(self, other: object) -> bool: +- if not isinstance(other, Tag): +- return NotImplemented +- +- return ( +- (self._hash == other._hash) # Short-circuit ASAP for perf reasons. +- and (self._platform == other._platform) +- and (self._abi == other._abi) +- and (self._interpreter == other._interpreter) +- ) +- +- def __hash__(self) -> int: +- return self._hash +- +- def __str__(self) -> str: +- return f"{self._interpreter}-{self._abi}-{self._platform}" +- +- def __repr__(self) -> str: +- return f"<{self} @ {id(self)}>" +- +- +-def parse_tag(tag: str) -> FrozenSet[Tag]: +- """ +- Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. +- +- Returning a set is required due to the possibility that the tag is a +- compressed tag set. +- """ +- tags = set() +- interpreters, abis, platforms = tag.split("-") +- for interpreter in interpreters.split("."): +- for abi in abis.split("."): +- for platform_ in platforms.split("."): +- tags.add(Tag(interpreter, abi, platform_)) +- return frozenset(tags) +- +- +-def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: +- value = sysconfig.get_config_var(name) +- if value is None and warn: +- logger.debug( +- "Config variable '%s' is unset, Python ABI tag may be incorrect", name +- ) +- return value +- +- +-def _normalize_string(string: str) -> str: +- return string.replace(".", "_").replace("-", "_") +- +- +-def _abi3_applies(python_version: PythonVersion) -> bool: +- """ +- Determine if the Python version supports abi3. +- +- PEP 384 was first implemented in Python 3.2. +- """ +- return len(python_version) > 1 and tuple(python_version) >= (3, 2) +- +- +-def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: +- py_version = tuple(py_version) # To allow for version comparison. +- abis = [] +- version = _version_nodot(py_version[:2]) +- debug = pymalloc = ucs4 = "" +- with_debug = _get_config_var("Py_DEBUG", warn) +- has_refcount = hasattr(sys, "gettotalrefcount") +- # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled +- # extension modules is the best option. +- # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 +- has_ext = "_d.pyd" in EXTENSION_SUFFIXES +- if with_debug or (with_debug is None and (has_refcount or has_ext)): +- debug = "d" +- if py_version < (3, 8): +- with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) +- if with_pymalloc or with_pymalloc is None: +- pymalloc = "m" +- if py_version < (3, 3): +- unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) +- if unicode_size == 4 or ( +- unicode_size is None and sys.maxunicode == 0x10FFFF +- ): +- ucs4 = "u" +- elif debug: +- # Debug builds can also load "normal" extension modules. +- # We can also assume no UCS-4 or pymalloc requirement. +- abis.append(f"cp{version}") +- abis.insert( +- 0, +- "cp{version}{debug}{pymalloc}{ucs4}".format( +- version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 +- ), +- ) +- return abis +- +- +-def cpython_tags( +- python_version: Optional[PythonVersion] = None, +- abis: Optional[Iterable[str]] = None, +- platforms: Optional[Iterable[str]] = None, +- *, +- warn: bool = False, +-) -> Iterator[Tag]: +- """ +- Yields the tags for a CPython interpreter. +- +- The tags consist of: +- - cp-- +- - cp-abi3- +- - cp-none- +- - cp-abi3- # Older Python versions down to 3.2. +- +- If python_version only specifies a major version then user-provided ABIs and +- the 'none' ABItag will be used. +- +- If 'abi3' or 'none' are specified in 'abis' then they will be yielded at +- their normal position and not at the beginning. +- """ +- if not python_version: +- python_version = sys.version_info[:2] +- +- interpreter = f"cp{_version_nodot(python_version[:2])}" +- +- if abis is None: +- if len(python_version) > 1: +- abis = _cpython_abis(python_version, warn) +- else: +- abis = [] +- abis = list(abis) +- # 'abi3' and 'none' are explicitly handled later. +- for explicit_abi in ("abi3", "none"): +- try: +- abis.remove(explicit_abi) +- except ValueError: +- pass +- +- platforms = list(platforms or platform_tags()) +- for abi in abis: +- for platform_ in platforms: +- yield Tag(interpreter, abi, platform_) +- if _abi3_applies(python_version): +- yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) +- yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) +- +- if _abi3_applies(python_version): +- for minor_version in range(python_version[1] - 1, 1, -1): +- for platform_ in platforms: +- interpreter = "cp{version}".format( +- version=_version_nodot((python_version[0], minor_version)) +- ) +- yield Tag(interpreter, "abi3", platform_) +- +- +-def _generic_abi() -> Iterator[str]: +- abi = sysconfig.get_config_var("SOABI") +- if abi: +- yield _normalize_string(abi) +- +- +-def generic_tags( +- interpreter: Optional[str] = None, +- abis: Optional[Iterable[str]] = None, +- platforms: Optional[Iterable[str]] = None, +- *, +- warn: bool = False, +-) -> Iterator[Tag]: +- """ +- Yields the tags for a generic interpreter. +- +- The tags consist of: +- - -- +- +- The "none" ABI will be added if it was not explicitly provided. +- """ +- if not interpreter: +- interp_name = interpreter_name() +- interp_version = interpreter_version(warn=warn) +- interpreter = "".join([interp_name, interp_version]) +- if abis is None: +- abis = _generic_abi() +- platforms = list(platforms or platform_tags()) +- abis = list(abis) +- if "none" not in abis: +- abis.append("none") +- for abi in abis: +- for platform_ in platforms: +- yield Tag(interpreter, abi, platform_) +- +- +-def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: +- """ +- Yields Python versions in descending order. +- +- After the latest version, the major-only version will be yielded, and then +- all previous versions of that major version. +- """ +- if len(py_version) > 1: +- yield f"py{_version_nodot(py_version[:2])}" +- yield f"py{py_version[0]}" +- if len(py_version) > 1: +- for minor in range(py_version[1] - 1, -1, -1): +- yield f"py{_version_nodot((py_version[0], minor))}" +- +- +-def compatible_tags( +- python_version: Optional[PythonVersion] = None, +- interpreter: Optional[str] = None, +- platforms: Optional[Iterable[str]] = None, +-) -> Iterator[Tag]: +- """ +- Yields the sequence of tags that are compatible with a specific version of Python. +- +- The tags consist of: +- - py*-none- +- - -none-any # ... if `interpreter` is provided. +- - py*-none-any +- """ +- if not python_version: +- python_version = sys.version_info[:2] +- platforms = list(platforms or platform_tags()) +- for version in _py_interpreter_range(python_version): +- for platform_ in platforms: +- yield Tag(version, "none", platform_) +- if interpreter: +- yield Tag(interpreter, "none", "any") +- for version in _py_interpreter_range(python_version): +- yield Tag(version, "none", "any") +- +- +-def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: +- if not is_32bit: +- return arch +- +- if arch.startswith("ppc"): +- return "ppc" +- +- return "i386" +- +- +-def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: +- formats = [cpu_arch] +- if cpu_arch == "x86_64": +- if version < (10, 4): +- return [] +- formats.extend(["intel", "fat64", "fat32"]) +- +- elif cpu_arch == "i386": +- if version < (10, 4): +- return [] +- formats.extend(["intel", "fat32", "fat"]) +- +- elif cpu_arch == "ppc64": +- # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? +- if version > (10, 5) or version < (10, 4): +- return [] +- formats.append("fat64") +- +- elif cpu_arch == "ppc": +- if version > (10, 6): +- return [] +- formats.extend(["fat32", "fat"]) +- +- if cpu_arch in {"arm64", "x86_64"}: +- formats.append("universal2") +- +- if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: +- formats.append("universal") +- +- return formats +- +- +-def mac_platforms( +- version: Optional[MacVersion] = None, arch: Optional[str] = None +-) -> Iterator[str]: +- """ +- Yields the platform tags for a macOS system. +- +- The `version` parameter is a two-item tuple specifying the macOS version to +- generate platform tags for. The `arch` parameter is the CPU architecture to +- generate platform tags for. Both parameters default to the appropriate value +- for the current system. +- """ +- version_str, _, cpu_arch = platform.mac_ver() +- if version is None: +- version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) +- else: +- version = version +- if arch is None: +- arch = _mac_arch(cpu_arch) +- else: +- arch = arch +- +- if (10, 0) <= version and version < (11, 0): +- # Prior to Mac OS 11, each yearly release of Mac OS bumped the +- # "minor" version number. The major version was always 10. +- for minor_version in range(version[1], -1, -1): +- compat_version = 10, minor_version +- binary_formats = _mac_binary_formats(compat_version, arch) +- for binary_format in binary_formats: +- yield "macosx_{major}_{minor}_{binary_format}".format( +- major=10, minor=minor_version, binary_format=binary_format +- ) +- +- if version >= (11, 0): +- # Starting with Mac OS 11, each yearly release bumps the major version +- # number. The minor versions are now the midyear updates. +- for major_version in range(version[0], 10, -1): +- compat_version = major_version, 0 +- binary_formats = _mac_binary_formats(compat_version, arch) +- for binary_format in binary_formats: +- yield "macosx_{major}_{minor}_{binary_format}".format( +- major=major_version, minor=0, binary_format=binary_format +- ) +- +- if version >= (11, 0): +- # Mac OS 11 on x86_64 is compatible with binaries from previous releases. +- # Arm64 support was introduced in 11.0, so no Arm binaries from previous +- # releases exist. +- # +- # However, the "universal2" binary format can have a +- # macOS version earlier than 11.0 when the x86_64 part of the binary supports +- # that version of macOS. +- if arch == "x86_64": +- for minor_version in range(16, 3, -1): +- compat_version = 10, minor_version +- binary_formats = _mac_binary_formats(compat_version, arch) +- for binary_format in binary_formats: +- yield "macosx_{major}_{minor}_{binary_format}".format( +- major=compat_version[0], +- minor=compat_version[1], +- binary_format=binary_format, +- ) +- else: +- for minor_version in range(16, 3, -1): +- compat_version = 10, minor_version +- binary_format = "universal2" +- yield "macosx_{major}_{minor}_{binary_format}".format( +- major=compat_version[0], +- minor=compat_version[1], +- binary_format=binary_format, +- ) +- +- +-def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: +- linux = _normalize_string(sysconfig.get_platform()) +- if is_32bit: +- if linux == "linux_x86_64": +- linux = "linux_i686" +- elif linux == "linux_aarch64": +- linux = "linux_armv7l" +- _, arch = linux.split("_", 1) +- yield from _manylinux.platform_tags(linux, arch) +- yield from _musllinux.platform_tags(arch) +- yield linux +- +- +-def _generic_platforms() -> Iterator[str]: +- yield _normalize_string(sysconfig.get_platform()) +- +- +-def platform_tags() -> Iterator[str]: +- """ +- Provides the platform tags for this installation. +- """ +- if platform.system() == "Darwin": +- return mac_platforms() +- elif platform.system() == "Linux": +- return _linux_platforms() +- else: +- return _generic_platforms() +- +- +-def interpreter_name() -> str: +- """ +- Returns the name of the running interpreter. +- """ +- name = sys.implementation.name +- return INTERPRETER_SHORT_NAMES.get(name) or name +- +- +-def interpreter_version(*, warn: bool = False) -> str: +- """ +- Returns the version of the running interpreter. +- """ +- version = _get_config_var("py_version_nodot", warn=warn) +- if version: +- version = str(version) +- else: +- version = _version_nodot(sys.version_info[:2]) +- return version +- +- +-def _version_nodot(version: PythonVersion) -> str: +- return "".join(map(str, version)) +- +- +-def sys_tags(*, warn: bool = False) -> Iterator[Tag]: +- """ +- Returns the sequence of tag triples for the running interpreter. +- +- The order of the sequence corresponds to priority order for the +- interpreter, from most to least important. +- """ +- +- interp_name = interpreter_name() +- if interp_name == "cp": +- yield from cpython_tags(warn=warn) +- else: +- yield from generic_tags() +- +- if interp_name == "pp": +- yield from compatible_tags(interpreter="pp3") +- else: +- yield from compatible_tags() +diff --git a/src/poetry/core/_vendor/packaging/utils.py b/src/poetry/core/_vendor/packaging/utils.py +deleted file mode 100644 +index bab11b8..0000000 +--- a/src/poetry/core/_vendor/packaging/utils.py ++++ /dev/null +@@ -1,136 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +-import re +-from typing import FrozenSet, NewType, Tuple, Union, cast +- +-from .tags import Tag, parse_tag +-from .version import InvalidVersion, Version +- +-BuildTag = Union[Tuple[()], Tuple[int, str]] +-NormalizedName = NewType("NormalizedName", str) +- +- +-class InvalidWheelFilename(ValueError): +- """ +- An invalid wheel filename was found, users should refer to PEP 427. +- """ +- +- +-class InvalidSdistFilename(ValueError): +- """ +- An invalid sdist filename was found, users should refer to the packaging user guide. +- """ +- +- +-_canonicalize_regex = re.compile(r"[-_.]+") +-# PEP 427: The build number must start with a digit. +-_build_tag_regex = re.compile(r"(\d+)(.*)") +- +- +-def canonicalize_name(name: str) -> NormalizedName: +- # This is taken from PEP 503. +- value = _canonicalize_regex.sub("-", name).lower() +- return cast(NormalizedName, value) +- +- +-def canonicalize_version(version: Union[Version, str]) -> str: +- """ +- This is very similar to Version.__str__, but has one subtle difference +- with the way it handles the release segment. +- """ +- if isinstance(version, str): +- try: +- parsed = Version(version) +- except InvalidVersion: +- # Legacy versions cannot be normalized +- return version +- else: +- parsed = version +- +- parts = [] +- +- # Epoch +- if parsed.epoch != 0: +- parts.append(f"{parsed.epoch}!") +- +- # Release segment +- # NB: This strips trailing '.0's to normalize +- parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release))) +- +- # Pre-release +- if parsed.pre is not None: +- parts.append("".join(str(x) for x in parsed.pre)) +- +- # Post-release +- if parsed.post is not None: +- parts.append(f".post{parsed.post}") +- +- # Development release +- if parsed.dev is not None: +- parts.append(f".dev{parsed.dev}") +- +- # Local version segment +- if parsed.local is not None: +- parts.append(f"+{parsed.local}") +- +- return "".join(parts) +- +- +-def parse_wheel_filename( +- filename: str, +-) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: +- if not filename.endswith(".whl"): +- raise InvalidWheelFilename( +- f"Invalid wheel filename (extension must be '.whl'): {filename}" +- ) +- +- filename = filename[:-4] +- dashes = filename.count("-") +- if dashes not in (4, 5): +- raise InvalidWheelFilename( +- f"Invalid wheel filename (wrong number of parts): {filename}" +- ) +- +- parts = filename.split("-", dashes - 2) +- name_part = parts[0] +- # See PEP 427 for the rules on escaping the project name +- if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: +- raise InvalidWheelFilename(f"Invalid project name: {filename}") +- name = canonicalize_name(name_part) +- version = Version(parts[1]) +- if dashes == 5: +- build_part = parts[2] +- build_match = _build_tag_regex.match(build_part) +- if build_match is None: +- raise InvalidWheelFilename( +- f"Invalid build number: {build_part} in '{filename}'" +- ) +- build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) +- else: +- build = () +- tags = parse_tag(parts[-1]) +- return (name, version, build, tags) +- +- +-def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: +- if filename.endswith(".tar.gz"): +- file_stem = filename[: -len(".tar.gz")] +- elif filename.endswith(".zip"): +- file_stem = filename[: -len(".zip")] +- else: +- raise InvalidSdistFilename( +- f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" +- f" {filename}" +- ) +- +- # We are requiring a PEP 440 version, which cannot contain dashes, +- # so we split on the last dash. +- name_part, sep, version_part = file_stem.rpartition("-") +- if not sep: +- raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") +- +- name = canonicalize_name(name_part) +- version = Version(version_part) +- return (name, version) +diff --git a/src/poetry/core/_vendor/packaging/version.py b/src/poetry/core/_vendor/packaging/version.py +deleted file mode 100644 +index de9a09a..0000000 +--- a/src/poetry/core/_vendor/packaging/version.py ++++ /dev/null +@@ -1,504 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +- +-import collections +-import itertools +-import re +-import warnings +-from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union +- +-from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType +- +-__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] +- +-InfiniteTypes = Union[InfinityType, NegativeInfinityType] +-PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] +-SubLocalType = Union[InfiniteTypes, int, str] +-LocalType = Union[ +- NegativeInfinityType, +- Tuple[ +- Union[ +- SubLocalType, +- Tuple[SubLocalType, str], +- Tuple[NegativeInfinityType, SubLocalType], +- ], +- ..., +- ], +-] +-CmpKey = Tuple[ +- int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType +-] +-LegacyCmpKey = Tuple[int, Tuple[str, ...]] +-VersionComparisonMethod = Callable[ +- [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool +-] +- +-_Version = collections.namedtuple( +- "_Version", ["epoch", "release", "dev", "pre", "post", "local"] +-) +- +- +-def parse(version: str) -> Union["LegacyVersion", "Version"]: +- """ +- Parse the given version string and return either a :class:`Version` object +- or a :class:`LegacyVersion` object depending on if the given version is +- a valid PEP 440 version or a legacy version. +- """ +- try: +- return Version(version) +- except InvalidVersion: +- return LegacyVersion(version) +- +- +-class InvalidVersion(ValueError): +- """ +- An invalid version was found, users should refer to PEP 440. +- """ +- +- +-class _BaseVersion: +- _key: Union[CmpKey, LegacyCmpKey] +- +- def __hash__(self) -> int: +- return hash(self._key) +- +- # Please keep the duplicated `isinstance` check +- # in the six comparisons hereunder +- # unless you find a way to avoid adding overhead function calls. +- def __lt__(self, other: "_BaseVersion") -> bool: +- if not isinstance(other, _BaseVersion): +- return NotImplemented +- +- return self._key < other._key +- +- def __le__(self, other: "_BaseVersion") -> bool: +- if not isinstance(other, _BaseVersion): +- return NotImplemented +- +- return self._key <= other._key +- +- def __eq__(self, other: object) -> bool: +- if not isinstance(other, _BaseVersion): +- return NotImplemented +- +- return self._key == other._key +- +- def __ge__(self, other: "_BaseVersion") -> bool: +- if not isinstance(other, _BaseVersion): +- return NotImplemented +- +- return self._key >= other._key +- +- def __gt__(self, other: "_BaseVersion") -> bool: +- if not isinstance(other, _BaseVersion): +- return NotImplemented +- +- return self._key > other._key +- +- def __ne__(self, other: object) -> bool: +- if not isinstance(other, _BaseVersion): +- return NotImplemented +- +- return self._key != other._key +- +- +-class LegacyVersion(_BaseVersion): +- def __init__(self, version: str) -> None: +- self._version = str(version) +- self._key = _legacy_cmpkey(self._version) +- +- warnings.warn( +- "Creating a LegacyVersion has been deprecated and will be " +- "removed in the next major release", +- DeprecationWarning, +- ) +- +- def __str__(self) -> str: +- return self._version +- +- def __repr__(self) -> str: +- return f"" +- +- @property +- def public(self) -> str: +- return self._version +- +- @property +- def base_version(self) -> str: +- return self._version +- +- @property +- def epoch(self) -> int: +- return -1 +- +- @property +- def release(self) -> None: +- return None +- +- @property +- def pre(self) -> None: +- return None +- +- @property +- def post(self) -> None: +- return None +- +- @property +- def dev(self) -> None: +- return None +- +- @property +- def local(self) -> None: +- return None +- +- @property +- def is_prerelease(self) -> bool: +- return False +- +- @property +- def is_postrelease(self) -> bool: +- return False +- +- @property +- def is_devrelease(self) -> bool: +- return False +- +- +-_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) +- +-_legacy_version_replacement_map = { +- "pre": "c", +- "preview": "c", +- "-": "final-", +- "rc": "c", +- "dev": "@", +-} +- +- +-def _parse_version_parts(s: str) -> Iterator[str]: +- for part in _legacy_version_component_re.split(s): +- part = _legacy_version_replacement_map.get(part, part) +- +- if not part or part == ".": +- continue +- +- if part[:1] in "0123456789": +- # pad for numeric comparison +- yield part.zfill(8) +- else: +- yield "*" + part +- +- # ensure that alpha/beta/candidate are before final +- yield "*final" +- +- +-def _legacy_cmpkey(version: str) -> LegacyCmpKey: +- +- # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch +- # greater than or equal to 0. This will effectively put the LegacyVersion, +- # which uses the defacto standard originally implemented by setuptools, +- # as before all PEP 440 versions. +- epoch = -1 +- +- # This scheme is taken from pkg_resources.parse_version setuptools prior to +- # it's adoption of the packaging library. +- parts: List[str] = [] +- for part in _parse_version_parts(version.lower()): +- if part.startswith("*"): +- # remove "-" before a prerelease tag +- if part < "*final": +- while parts and parts[-1] == "*final-": +- parts.pop() +- +- # remove trailing zeros from each series of numeric parts +- while parts and parts[-1] == "00000000": +- parts.pop() +- +- parts.append(part) +- +- return epoch, tuple(parts) +- +- +-# Deliberately not anchored to the start and end of the string, to make it +-# easier for 3rd party code to reuse +-VERSION_PATTERN = r""" +- v? +- (?: +- (?:(?P[0-9]+)!)? # epoch +- (?P[0-9]+(?:\.[0-9]+)*) # release segment +- (?P
                                          # pre-release
+-            [-_\.]?
+-            (?P(a|b|c|rc|alpha|beta|pre|preview))
+-            [-_\.]?
+-            (?P[0-9]+)?
+-        )?
+-        (?P                                         # post release
+-            (?:-(?P[0-9]+))
+-            |
+-            (?:
+-                [-_\.]?
+-                (?Ppost|rev|r)
+-                [-_\.]?
+-                (?P[0-9]+)?
+-            )
+-        )?
+-        (?P                                          # dev release
+-            [-_\.]?
+-            (?Pdev)
+-            [-_\.]?
+-            (?P[0-9]+)?
+-        )?
+-    )
+-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+-"""
+-
+-
+-class Version(_BaseVersion):
+-
+-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+-
+-    def __init__(self, version: str) -> None:
+-
+-        # Validate the version and parse it into pieces
+-        match = self._regex.search(version)
+-        if not match:
+-            raise InvalidVersion(f"Invalid version: '{version}'")
+-
+-        # Store the parsed out pieces of the version
+-        self._version = _Version(
+-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+-            release=tuple(int(i) for i in match.group("release").split(".")),
+-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+-            post=_parse_letter_version(
+-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+-            ),
+-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+-            local=_parse_local_version(match.group("local")),
+-        )
+-
+-        # Generate a key which will be used for sorting
+-        self._key = _cmpkey(
+-            self._version.epoch,
+-            self._version.release,
+-            self._version.pre,
+-            self._version.post,
+-            self._version.dev,
+-            self._version.local,
+-        )
+-
+-    def __repr__(self) -> str:
+-        return f""
+-
+-    def __str__(self) -> str:
+-        parts = []
+-
+-        # Epoch
+-        if self.epoch != 0:
+-            parts.append(f"{self.epoch}!")
+-
+-        # Release segment
+-        parts.append(".".join(str(x) for x in self.release))
+-
+-        # Pre-release
+-        if self.pre is not None:
+-            parts.append("".join(str(x) for x in self.pre))
+-
+-        # Post-release
+-        if self.post is not None:
+-            parts.append(f".post{self.post}")
+-
+-        # Development release
+-        if self.dev is not None:
+-            parts.append(f".dev{self.dev}")
+-
+-        # Local version segment
+-        if self.local is not None:
+-            parts.append(f"+{self.local}")
+-
+-        return "".join(parts)
+-
+-    @property
+-    def epoch(self) -> int:
+-        _epoch: int = self._version.epoch
+-        return _epoch
+-
+-    @property
+-    def release(self) -> Tuple[int, ...]:
+-        _release: Tuple[int, ...] = self._version.release
+-        return _release
+-
+-    @property
+-    def pre(self) -> Optional[Tuple[str, int]]:
+-        _pre: Optional[Tuple[str, int]] = self._version.pre
+-        return _pre
+-
+-    @property
+-    def post(self) -> Optional[int]:
+-        return self._version.post[1] if self._version.post else None
+-
+-    @property
+-    def dev(self) -> Optional[int]:
+-        return self._version.dev[1] if self._version.dev else None
+-
+-    @property
+-    def local(self) -> Optional[str]:
+-        if self._version.local:
+-            return ".".join(str(x) for x in self._version.local)
+-        else:
+-            return None
+-
+-    @property
+-    def public(self) -> str:
+-        return str(self).split("+", 1)[0]
+-
+-    @property
+-    def base_version(self) -> str:
+-        parts = []
+-
+-        # Epoch
+-        if self.epoch != 0:
+-            parts.append(f"{self.epoch}!")
+-
+-        # Release segment
+-        parts.append(".".join(str(x) for x in self.release))
+-
+-        return "".join(parts)
+-
+-    @property
+-    def is_prerelease(self) -> bool:
+-        return self.dev is not None or self.pre is not None
+-
+-    @property
+-    def is_postrelease(self) -> bool:
+-        return self.post is not None
+-
+-    @property
+-    def is_devrelease(self) -> bool:
+-        return self.dev is not None
+-
+-    @property
+-    def major(self) -> int:
+-        return self.release[0] if len(self.release) >= 1 else 0
+-
+-    @property
+-    def minor(self) -> int:
+-        return self.release[1] if len(self.release) >= 2 else 0
+-
+-    @property
+-    def micro(self) -> int:
+-        return self.release[2] if len(self.release) >= 3 else 0
+-
+-
+-def _parse_letter_version(
+-    letter: str, number: Union[str, bytes, SupportsInt]
+-) -> Optional[Tuple[str, int]]:
+-
+-    if letter:
+-        # We consider there to be an implicit 0 in a pre-release if there is
+-        # not a numeral associated with it.
+-        if number is None:
+-            number = 0
+-
+-        # We normalize any letters to their lower case form
+-        letter = letter.lower()
+-
+-        # We consider some words to be alternate spellings of other words and
+-        # in those cases we want to normalize the spellings to our preferred
+-        # spelling.
+-        if letter == "alpha":
+-            letter = "a"
+-        elif letter == "beta":
+-            letter = "b"
+-        elif letter in ["c", "pre", "preview"]:
+-            letter = "rc"
+-        elif letter in ["rev", "r"]:
+-            letter = "post"
+-
+-        return letter, int(number)
+-    if not letter and number:
+-        # We assume if we are given a number, but we are not given a letter
+-        # then this is using the implicit post release syntax (e.g. 1.0-1)
+-        letter = "post"
+-
+-        return letter, int(number)
+-
+-    return None
+-
+-
+-_local_version_separators = re.compile(r"[\._-]")
+-
+-
+-def _parse_local_version(local: str) -> Optional[LocalType]:
+-    """
+-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+-    """
+-    if local is not None:
+-        return tuple(
+-            part.lower() if not part.isdigit() else int(part)
+-            for part in _local_version_separators.split(local)
+-        )
+-    return None
+-
+-
+-def _cmpkey(
+-    epoch: int,
+-    release: Tuple[int, ...],
+-    pre: Optional[Tuple[str, int]],
+-    post: Optional[Tuple[str, int]],
+-    dev: Optional[Tuple[str, int]],
+-    local: Optional[Tuple[SubLocalType]],
+-) -> CmpKey:
+-
+-    # When we compare a release version, we want to compare it with all of the
+-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+-    # leading zeros until we come to something non zero, then take the rest
+-    # re-reverse it back into the correct order and make it a tuple and use
+-    # that for our sorting key.
+-    _release = tuple(
+-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+-    )
+-
+-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+-    # We'll do this by abusing the pre segment, but we _only_ want to do this
+-    # if there is not a pre or a post segment. If we have one of those then
+-    # the normal sorting rules will handle this case correctly.
+-    if pre is None and post is None and dev is not None:
+-        _pre: PrePostDevType = NegativeInfinity
+-    # Versions without a pre-release (except as noted above) should sort after
+-    # those with one.
+-    elif pre is None:
+-        _pre = Infinity
+-    else:
+-        _pre = pre
+-
+-    # Versions without a post segment should sort before those with one.
+-    if post is None:
+-        _post: PrePostDevType = NegativeInfinity
+-
+-    else:
+-        _post = post
+-
+-    # Versions without a development segment should sort after those with one.
+-    if dev is None:
+-        _dev: PrePostDevType = Infinity
+-
+-    else:
+-        _dev = dev
+-
+-    if local is None:
+-        # Versions without a local segment should sort before those with one.
+-        _local: LocalType = NegativeInfinity
+-    else:
+-        # Versions with a local segment need that segment parsed to implement
+-        # the sorting rules in PEP440.
+-        # - Alpha numeric segments sort before numeric segments
+-        # - Alpha numeric segments sort lexicographically
+-        # - Numeric segments sort numerically
+-        # - Shorter versions sort before longer versions when the prefixes
+-        #   match exactly
+-        _local = tuple(
+-            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+-        )
+-
+-    return epoch, _release, _pre, _post, _dev, _local
+diff --git a/src/poetry/core/_vendor/pyparsing/LICENSE b/src/poetry/core/_vendor/pyparsing/LICENSE
+deleted file mode 100644
+index 1bf9852..0000000
+--- a/src/poetry/core/_vendor/pyparsing/LICENSE
++++ /dev/null
+@@ -1,18 +0,0 @@
+-Permission is hereby granted, free of charge, to any person obtaining
+-a copy of this software and associated documentation files (the
+-"Software"), to deal in the Software without restriction, including
+-without limitation the rights to use, copy, modify, merge, publish,
+-distribute, sublicense, and/or sell copies of the Software, and to
+-permit persons to whom the Software is furnished to do so, subject to
+-the following conditions:
+-
+-The above copyright notice and this permission notice shall be
+-included in all copies or substantial portions of the Software.
+-
+-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+diff --git a/src/poetry/core/_vendor/pyparsing/__init__.py b/src/poetry/core/_vendor/pyparsing/__init__.py
+deleted file mode 100644
+index 7802ff1..0000000
+--- a/src/poetry/core/_vendor/pyparsing/__init__.py
++++ /dev/null
+@@ -1,331 +0,0 @@
+-# module pyparsing.py
+-#
+-# Copyright (c) 2003-2022  Paul T. McGuire
+-#
+-# Permission is hereby granted, free of charge, to any person obtaining
+-# a copy of this software and associated documentation files (the
+-# "Software"), to deal in the Software without restriction, including
+-# without limitation the rights to use, copy, modify, merge, publish,
+-# distribute, sublicense, and/or sell copies of the Software, and to
+-# permit persons to whom the Software is furnished to do so, subject to
+-# the following conditions:
+-#
+-# The above copyright notice and this permission notice shall be
+-# included in all copies or substantial portions of the Software.
+-#
+-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+-#
+-
+-__doc__ = """
+-pyparsing module - Classes and methods to define and execute parsing grammars
+-=============================================================================
+-
+-The pyparsing module is an alternative approach to creating and
+-executing simple grammars, vs. the traditional lex/yacc approach, or the
+-use of regular expressions.  With pyparsing, you don't need to learn
+-a new syntax for defining grammars or matching expressions - the parsing
+-module provides a library of classes that you use to construct the
+-grammar directly in Python.
+-
+-Here is a program to parse "Hello, World!" (or any greeting of the form
+-``", !"``), built up using :class:`Word`,
+-:class:`Literal`, and :class:`And` elements
+-(the :meth:`'+'` operators create :class:`And` expressions,
+-and the strings are auto-converted to :class:`Literal` expressions)::
+-
+-    from pyparsing import Word, alphas
+-
+-    # define grammar of a greeting
+-    greet = Word(alphas) + "," + Word(alphas) + "!"
+-
+-    hello = "Hello, World!"
+-    print(hello, "->", greet.parse_string(hello))
+-
+-The program outputs the following::
+-
+-    Hello, World! -> ['Hello', ',', 'World', '!']
+-
+-The Python representation of the grammar is quite readable, owing to the
+-self-explanatory class names, and the use of :class:`'+'`,
+-:class:`'|'`, :class:`'^'` and :class:`'&'` operators.
+-
+-The :class:`ParseResults` object returned from
+-:class:`ParserElement.parseString` can be
+-accessed as a nested list, a dictionary, or an object with named
+-attributes.
+-
+-The pyparsing module handles some of the problems that are typically
+-vexing when writing text parsers:
+-
+-  - extra or missing whitespace (the above program will also handle
+-    "Hello,World!", "Hello  ,  World  !", etc.)
+-  - quoted strings
+-  - embedded comments
+-
+-
+-Getting Started -
+------------------
+-Visit the classes :class:`ParserElement` and :class:`ParseResults` to
+-see the base classes that most other pyparsing
+-classes inherit from. Use the docstrings for examples of how to:
+-
+- - construct literal match expressions from :class:`Literal` and
+-   :class:`CaselessLiteral` classes
+- - construct character word-group expressions using the :class:`Word`
+-   class
+- - see how to create repetitive expressions using :class:`ZeroOrMore`
+-   and :class:`OneOrMore` classes
+- - use :class:`'+'`, :class:`'|'`, :class:`'^'`,
+-   and :class:`'&'` operators to combine simple expressions into
+-   more complex ones
+- - associate names with your parsed results using
+-   :class:`ParserElement.setResultsName`
+- - access the parsed data, which is returned as a :class:`ParseResults`
+-   object
+- - find some helpful expression short-cuts like :class:`delimitedList`
+-   and :class:`oneOf`
+- - find more useful common expressions in the :class:`pyparsing_common`
+-   namespace class
+-"""
+-from typing import NamedTuple
+-
+-
+-class version_info(NamedTuple):
+-    major: int
+-    minor: int
+-    micro: int
+-    releaselevel: str
+-    serial: int
+-
+-    @property
+-    def __version__(self):
+-        return (
+-            "{}.{}.{}".format(self.major, self.minor, self.micro)
+-            + (
+-                "{}{}{}".format(
+-                    "r" if self.releaselevel[0] == "c" else "",
+-                    self.releaselevel[0],
+-                    self.serial,
+-                ),
+-                "",
+-            )[self.releaselevel == "final"]
+-        )
+-
+-    def __str__(self):
+-        return "{} {} / {}".format(__name__, self.__version__, __version_time__)
+-
+-    def __repr__(self):
+-        return "{}.{}({})".format(
+-            __name__,
+-            type(self).__name__,
+-            ", ".join("{}={!r}".format(*nv) for nv in zip(self._fields, self)),
+-        )
+-
+-
+-__version_info__ = version_info(3, 0, 9, "final", 0)
+-__version_time__ = "05 May 2022 07:02 UTC"
+-__version__ = __version_info__.__version__
+-__versionTime__ = __version_time__
+-__author__ = "Paul McGuire "
+-
+-from .util import *
+-from .exceptions import *
+-from .actions import *
+-from .core import __diag__, __compat__
+-from .results import *
+-from .core import *
+-from .core import _builtin_exprs as core_builtin_exprs
+-from .helpers import *
+-from .helpers import _builtin_exprs as helper_builtin_exprs
+-
+-from .unicode import unicode_set, UnicodeRangeList, pyparsing_unicode as unicode
+-from .testing import pyparsing_test as testing
+-from .common import (
+-    pyparsing_common as common,
+-    _builtin_exprs as common_builtin_exprs,
+-)
+-
+-# define backward compat synonyms
+-if "pyparsing_unicode" not in globals():
+-    pyparsing_unicode = unicode
+-if "pyparsing_common" not in globals():
+-    pyparsing_common = common
+-if "pyparsing_test" not in globals():
+-    pyparsing_test = testing
+-
+-core_builtin_exprs += common_builtin_exprs + helper_builtin_exprs
+-
+-
+-__all__ = [
+-    "__version__",
+-    "__version_time__",
+-    "__author__",
+-    "__compat__",
+-    "__diag__",
+-    "And",
+-    "AtLineStart",
+-    "AtStringStart",
+-    "CaselessKeyword",
+-    "CaselessLiteral",
+-    "CharsNotIn",
+-    "Combine",
+-    "Dict",
+-    "Each",
+-    "Empty",
+-    "FollowedBy",
+-    "Forward",
+-    "GoToColumn",
+-    "Group",
+-    "IndentedBlock",
+-    "Keyword",
+-    "LineEnd",
+-    "LineStart",
+-    "Literal",
+-    "Located",
+-    "PrecededBy",
+-    "MatchFirst",
+-    "NoMatch",
+-    "NotAny",
+-    "OneOrMore",
+-    "OnlyOnce",
+-    "OpAssoc",
+-    "Opt",
+-    "Optional",
+-    "Or",
+-    "ParseBaseException",
+-    "ParseElementEnhance",
+-    "ParseException",
+-    "ParseExpression",
+-    "ParseFatalException",
+-    "ParseResults",
+-    "ParseSyntaxException",
+-    "ParserElement",
+-    "PositionToken",
+-    "QuotedString",
+-    "RecursiveGrammarException",
+-    "Regex",
+-    "SkipTo",
+-    "StringEnd",
+-    "StringStart",
+-    "Suppress",
+-    "Token",
+-    "TokenConverter",
+-    "White",
+-    "Word",
+-    "WordEnd",
+-    "WordStart",
+-    "ZeroOrMore",
+-    "Char",
+-    "alphanums",
+-    "alphas",
+-    "alphas8bit",
+-    "any_close_tag",
+-    "any_open_tag",
+-    "c_style_comment",
+-    "col",
+-    "common_html_entity",
+-    "counted_array",
+-    "cpp_style_comment",
+-    "dbl_quoted_string",
+-    "dbl_slash_comment",
+-    "delimited_list",
+-    "dict_of",
+-    "empty",
+-    "hexnums",
+-    "html_comment",
+-    "identchars",
+-    "identbodychars",
+-    "java_style_comment",
+-    "line",
+-    "line_end",
+-    "line_start",
+-    "lineno",
+-    "make_html_tags",
+-    "make_xml_tags",
+-    "match_only_at_col",
+-    "match_previous_expr",
+-    "match_previous_literal",
+-    "nested_expr",
+-    "null_debug_action",
+-    "nums",
+-    "one_of",
+-    "printables",
+-    "punc8bit",
+-    "python_style_comment",
+-    "quoted_string",
+-    "remove_quotes",
+-    "replace_with",
+-    "replace_html_entity",
+-    "rest_of_line",
+-    "sgl_quoted_string",
+-    "srange",
+-    "string_end",
+-    "string_start",
+-    "trace_parse_action",
+-    "unicode_string",
+-    "with_attribute",
+-    "indentedBlock",
+-    "original_text_for",
+-    "ungroup",
+-    "infix_notation",
+-    "locatedExpr",
+-    "with_class",
+-    "CloseMatch",
+-    "token_map",
+-    "pyparsing_common",
+-    "pyparsing_unicode",
+-    "unicode_set",
+-    "condition_as_parse_action",
+-    "pyparsing_test",
+-    # pre-PEP8 compatibility names
+-    "__versionTime__",
+-    "anyCloseTag",
+-    "anyOpenTag",
+-    "cStyleComment",
+-    "commonHTMLEntity",
+-    "countedArray",
+-    "cppStyleComment",
+-    "dblQuotedString",
+-    "dblSlashComment",
+-    "delimitedList",
+-    "dictOf",
+-    "htmlComment",
+-    "javaStyleComment",
+-    "lineEnd",
+-    "lineStart",
+-    "makeHTMLTags",
+-    "makeXMLTags",
+-    "matchOnlyAtCol",
+-    "matchPreviousExpr",
+-    "matchPreviousLiteral",
+-    "nestedExpr",
+-    "nullDebugAction",
+-    "oneOf",
+-    "opAssoc",
+-    "pythonStyleComment",
+-    "quotedString",
+-    "removeQuotes",
+-    "replaceHTMLEntity",
+-    "replaceWith",
+-    "restOfLine",
+-    "sglQuotedString",
+-    "stringEnd",
+-    "stringStart",
+-    "traceParseAction",
+-    "unicodeString",
+-    "withAttribute",
+-    "indentedBlock",
+-    "originalTextFor",
+-    "infixNotation",
+-    "locatedExpr",
+-    "withClass",
+-    "tokenMap",
+-    "conditionAsParseAction",
+-    "autoname_elements",
+-]
+diff --git a/src/poetry/core/_vendor/pyparsing/actions.py b/src/poetry/core/_vendor/pyparsing/actions.py
+deleted file mode 100644
+index f72c66e..0000000
+--- a/src/poetry/core/_vendor/pyparsing/actions.py
++++ /dev/null
+@@ -1,207 +0,0 @@
+-# actions.py
+-
+-from .exceptions import ParseException
+-from .util import col
+-
+-
+-class OnlyOnce:
+-    """
+-    Wrapper for parse actions, to ensure they are only called once.
+-    """
+-
+-    def __init__(self, method_call):
+-        from .core import _trim_arity
+-
+-        self.callable = _trim_arity(method_call)
+-        self.called = False
+-
+-    def __call__(self, s, l, t):
+-        if not self.called:
+-            results = self.callable(s, l, t)
+-            self.called = True
+-            return results
+-        raise ParseException(s, l, "OnlyOnce obj called multiple times w/out reset")
+-
+-    def reset(self):
+-        """
+-        Allow the associated parse action to be called once more.
+-        """
+-
+-        self.called = False
+-
+-
+-def match_only_at_col(n):
+-    """
+-    Helper method for defining parse actions that require matching at
+-    a specific column in the input text.
+-    """
+-
+-    def verify_col(strg, locn, toks):
+-        if col(locn, strg) != n:
+-            raise ParseException(strg, locn, "matched token not at column {}".format(n))
+-
+-    return verify_col
+-
+-
+-def replace_with(repl_str):
+-    """
+-    Helper method for common parse actions that simply return
+-    a literal value.  Especially useful when used with
+-    :class:`transform_string` ().
+-
+-    Example::
+-
+-        num = Word(nums).set_parse_action(lambda toks: int(toks[0]))
+-        na = one_of("N/A NA").set_parse_action(replace_with(math.nan))
+-        term = na | num
+-
+-        term[1, ...].parse_string("324 234 N/A 234") # -> [324, 234, nan, 234]
+-    """
+-    return lambda s, l, t: [repl_str]
+-
+-
+-def remove_quotes(s, l, t):
+-    """
+-    Helper parse action for removing quotation marks from parsed
+-    quoted strings.
+-
+-    Example::
+-
+-        # by default, quotation marks are included in parsed results
+-        quoted_string.parse_string("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+-
+-        # use remove_quotes to strip quotation marks from parsed results
+-        quoted_string.set_parse_action(remove_quotes)
+-        quoted_string.parse_string("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+-    """
+-    return t[0][1:-1]
+-
+-
+-def with_attribute(*args, **attr_dict):
+-    """
+-    Helper to create a validating parse action to be used with start
+-    tags created with :class:`make_xml_tags` or
+-    :class:`make_html_tags`. Use ``with_attribute`` to qualify
+-    a starting tag with a required attribute value, to avoid false
+-    matches on common tags such as ```` or ``
``. +- +- Call ``with_attribute`` with a series of attribute names and +- values. Specify the list of filter attributes names and values as: +- +- - keyword arguments, as in ``(align="right")``, or +- - as an explicit dict with ``**`` operator, when an attribute +- name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` +- - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))`` +- +- For attribute names with a namespace prefix, you must use the second +- form. Attribute names are matched insensitive to upper/lower case. +- +- If just testing for ``class`` (with or without a namespace), use +- :class:`with_class`. +- +- To verify that the attribute exists, but without specifying a value, +- pass ``with_attribute.ANY_VALUE`` as the value. +- +- Example:: +- +- html = ''' +-
+- Some text +-
1 4 0 1 0
+-
1,3 2,3 1,1
+-
this has no type
+-
+- +- ''' +- div,div_end = make_html_tags("div") +- +- # only match div tag having a type attribute with value "grid" +- div_grid = div().set_parse_action(with_attribute(type="grid")) +- grid_expr = div_grid + SkipTo(div | div_end)("body") +- for grid_header in grid_expr.search_string(html): +- print(grid_header.body) +- +- # construct a match with any div tag having a type attribute, regardless of the value +- div_any_type = div().set_parse_action(with_attribute(type=with_attribute.ANY_VALUE)) +- div_expr = div_any_type + SkipTo(div | div_end)("body") +- for div_header in div_expr.search_string(html): +- print(div_header.body) +- +- prints:: +- +- 1 4 0 1 0 +- +- 1 4 0 1 0 +- 1,3 2,3 1,1 +- """ +- if args: +- attrs = args[:] +- else: +- attrs = attr_dict.items() +- attrs = [(k, v) for k, v in attrs] +- +- def pa(s, l, tokens): +- for attrName, attrValue in attrs: +- if attrName not in tokens: +- raise ParseException(s, l, "no matching attribute " + attrName) +- if attrValue != with_attribute.ANY_VALUE and tokens[attrName] != attrValue: +- raise ParseException( +- s, +- l, +- "attribute {!r} has value {!r}, must be {!r}".format( +- attrName, tokens[attrName], attrValue +- ), +- ) +- +- return pa +- +- +-with_attribute.ANY_VALUE = object() +- +- +-def with_class(classname, namespace=""): +- """ +- Simplified version of :class:`with_attribute` when +- matching on a div class - made difficult because ``class`` is +- a reserved word in Python. +- +- Example:: +- +- html = ''' +-
+- Some text +-
1 4 0 1 0
+-
1,3 2,3 1,1
+-
this <div> has no class
+-
+- +- ''' +- div,div_end = make_html_tags("div") +- div_grid = div().set_parse_action(with_class("grid")) +- +- grid_expr = div_grid + SkipTo(div | div_end)("body") +- for grid_header in grid_expr.search_string(html): +- print(grid_header.body) +- +- div_any_type = div().set_parse_action(with_class(withAttribute.ANY_VALUE)) +- div_expr = div_any_type + SkipTo(div | div_end)("body") +- for div_header in div_expr.search_string(html): +- print(div_header.body) +- +- prints:: +- +- 1 4 0 1 0 +- +- 1 4 0 1 0 +- 1,3 2,3 1,1 +- """ +- classattr = "{}:class".format(namespace) if namespace else "class" +- return with_attribute(**{classattr: classname}) +- +- +-# pre-PEP8 compatibility symbols +-replaceWith = replace_with +-removeQuotes = remove_quotes +-withAttribute = with_attribute +-withClass = with_class +-matchOnlyAtCol = match_only_at_col +diff --git a/src/poetry/core/_vendor/pyparsing/common.py b/src/poetry/core/_vendor/pyparsing/common.py +deleted file mode 100644 +index 1859fb7..0000000 +--- a/src/poetry/core/_vendor/pyparsing/common.py ++++ /dev/null +@@ -1,424 +0,0 @@ +-# common.py +-from .core import * +-from .helpers import delimited_list, any_open_tag, any_close_tag +-from datetime import datetime +- +- +-# some other useful expressions - using lower-case class name since we are really using this as a namespace +-class pyparsing_common: +- """Here are some common low-level expressions that may be useful in +- jump-starting parser development: +- +- - numeric forms (:class:`integers`, :class:`reals`, +- :class:`scientific notation`) +- - common :class:`programming identifiers` +- - network addresses (:class:`MAC`, +- :class:`IPv4`, :class:`IPv6`) +- - ISO8601 :class:`dates` and +- :class:`datetime` +- - :class:`UUID` +- - :class:`comma-separated list` +- - :class:`url` +- +- Parse actions: +- +- - :class:`convertToInteger` +- - :class:`convertToFloat` +- - :class:`convertToDate` +- - :class:`convertToDatetime` +- - :class:`stripHTMLTags` +- - :class:`upcaseTokens` +- - :class:`downcaseTokens` +- +- Example:: +- +- pyparsing_common.number.runTests(''' +- # any int or real number, returned as the appropriate type +- 100 +- -100 +- +100 +- 3.14159 +- 6.02e23 +- 1e-12 +- ''') +- +- pyparsing_common.fnumber.runTests(''' +- # any int or real number, returned as float +- 100 +- -100 +- +100 +- 3.14159 +- 6.02e23 +- 1e-12 +- ''') +- +- pyparsing_common.hex_integer.runTests(''' +- # hex numbers +- 100 +- FF +- ''') +- +- pyparsing_common.fraction.runTests(''' +- # fractions +- 1/2 +- -3/4 +- ''') +- +- pyparsing_common.mixed_integer.runTests(''' +- # mixed fractions +- 1 +- 1/2 +- -3/4 +- 1-3/4 +- ''') +- +- import uuid +- pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) +- pyparsing_common.uuid.runTests(''' +- # uuid +- 12345678-1234-5678-1234-567812345678 +- ''') +- +- prints:: +- +- # any int or real number, returned as the appropriate type +- 100 +- [100] +- +- -100 +- [-100] +- +- +100 +- [100] +- +- 3.14159 +- [3.14159] +- +- 6.02e23 +- [6.02e+23] +- +- 1e-12 +- [1e-12] +- +- # any int or real number, returned as float +- 100 +- [100.0] +- +- -100 +- [-100.0] +- +- +100 +- [100.0] +- +- 3.14159 +- [3.14159] +- +- 6.02e23 +- [6.02e+23] +- +- 1e-12 +- [1e-12] +- +- # hex numbers +- 100 +- [256] +- +- FF +- [255] +- +- # fractions +- 1/2 +- [0.5] +- +- -3/4 +- [-0.75] +- +- # mixed fractions +- 1 +- [1] +- +- 1/2 +- [0.5] +- +- -3/4 +- [-0.75] +- +- 1-3/4 +- [1.75] +- +- # uuid +- 12345678-1234-5678-1234-567812345678 +- [UUID('12345678-1234-5678-1234-567812345678')] +- """ +- +- convert_to_integer = token_map(int) +- """ +- Parse action for converting parsed integers to Python int +- """ +- +- convert_to_float = token_map(float) +- """ +- Parse action for converting parsed numbers to Python float +- """ +- +- integer = Word(nums).set_name("integer").set_parse_action(convert_to_integer) +- """expression that parses an unsigned integer, returns an int""" +- +- hex_integer = ( +- Word(hexnums).set_name("hex integer").set_parse_action(token_map(int, 16)) +- ) +- """expression that parses a hexadecimal integer, returns an int""" +- +- signed_integer = ( +- Regex(r"[+-]?\d+") +- .set_name("signed integer") +- .set_parse_action(convert_to_integer) +- ) +- """expression that parses an integer with optional leading sign, returns an int""" +- +- fraction = ( +- signed_integer().set_parse_action(convert_to_float) +- + "/" +- + signed_integer().set_parse_action(convert_to_float) +- ).set_name("fraction") +- """fractional expression of an integer divided by an integer, returns a float""" +- fraction.add_parse_action(lambda tt: tt[0] / tt[-1]) +- +- mixed_integer = ( +- fraction | signed_integer + Opt(Opt("-").suppress() + fraction) +- ).set_name("fraction or mixed integer-fraction") +- """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" +- mixed_integer.add_parse_action(sum) +- +- real = ( +- Regex(r"[+-]?(?:\d+\.\d*|\.\d+)") +- .set_name("real number") +- .set_parse_action(convert_to_float) +- ) +- """expression that parses a floating point number and returns a float""" +- +- sci_real = ( +- Regex(r"[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)") +- .set_name("real number with scientific notation") +- .set_parse_action(convert_to_float) +- ) +- """expression that parses a floating point number with optional +- scientific notation and returns a float""" +- +- # streamlining this expression makes the docs nicer-looking +- number = (sci_real | real | signed_integer).setName("number").streamline() +- """any numeric expression, returns the corresponding Python type""" +- +- fnumber = ( +- Regex(r"[+-]?\d+\.?\d*([eE][+-]?\d+)?") +- .set_name("fnumber") +- .set_parse_action(convert_to_float) +- ) +- """any int or real number, returned as float""" +- +- identifier = Word(identchars, identbodychars).set_name("identifier") +- """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" +- +- ipv4_address = Regex( +- r"(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}" +- ).set_name("IPv4 address") +- "IPv4 address (``0.0.0.0 - 255.255.255.255``)" +- +- _ipv6_part = Regex(r"[0-9a-fA-F]{1,4}").set_name("hex_integer") +- _full_ipv6_address = (_ipv6_part + (":" + _ipv6_part) * 7).set_name( +- "full IPv6 address" +- ) +- _short_ipv6_address = ( +- Opt(_ipv6_part + (":" + _ipv6_part) * (0, 6)) +- + "::" +- + Opt(_ipv6_part + (":" + _ipv6_part) * (0, 6)) +- ).set_name("short IPv6 address") +- _short_ipv6_address.add_condition( +- lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8 +- ) +- _mixed_ipv6_address = ("::ffff:" + ipv4_address).set_name("mixed IPv6 address") +- ipv6_address = Combine( +- (_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).set_name( +- "IPv6 address" +- ) +- ).set_name("IPv6 address") +- "IPv6 address (long, short, or mixed form)" +- +- mac_address = Regex( +- r"[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}" +- ).set_name("MAC address") +- "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" +- +- @staticmethod +- def convert_to_date(fmt: str = "%Y-%m-%d"): +- """ +- Helper to create a parse action for converting parsed date string to Python datetime.date +- +- Params - +- - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``) +- +- Example:: +- +- date_expr = pyparsing_common.iso8601_date.copy() +- date_expr.setParseAction(pyparsing_common.convertToDate()) +- print(date_expr.parseString("1999-12-31")) +- +- prints:: +- +- [datetime.date(1999, 12, 31)] +- """ +- +- def cvt_fn(ss, ll, tt): +- try: +- return datetime.strptime(tt[0], fmt).date() +- except ValueError as ve: +- raise ParseException(ss, ll, str(ve)) +- +- return cvt_fn +- +- @staticmethod +- def convert_to_datetime(fmt: str = "%Y-%m-%dT%H:%M:%S.%f"): +- """Helper to create a parse action for converting parsed +- datetime string to Python datetime.datetime +- +- Params - +- - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``) +- +- Example:: +- +- dt_expr = pyparsing_common.iso8601_datetime.copy() +- dt_expr.setParseAction(pyparsing_common.convertToDatetime()) +- print(dt_expr.parseString("1999-12-31T23:59:59.999")) +- +- prints:: +- +- [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] +- """ +- +- def cvt_fn(s, l, t): +- try: +- return datetime.strptime(t[0], fmt) +- except ValueError as ve: +- raise ParseException(s, l, str(ve)) +- +- return cvt_fn +- +- iso8601_date = Regex( +- r"(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?" +- ).set_name("ISO8601 date") +- "ISO8601 date (``yyyy-mm-dd``)" +- +- iso8601_datetime = Regex( +- r"(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?" +- ).set_name("ISO8601 datetime") +- "ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``" +- +- uuid = Regex(r"[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}").set_name("UUID") +- "UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)" +- +- _html_stripper = any_open_tag.suppress() | any_close_tag.suppress() +- +- @staticmethod +- def strip_html_tags(s: str, l: int, tokens: ParseResults): +- """Parse action to remove HTML tags from web page HTML source +- +- Example:: +- +- # strip HTML links from normal text +- text = 'More info at the pyparsing wiki page' +- td, td_end = makeHTMLTags("TD") +- table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end +- print(table_text.parseString(text).body) +- +- Prints:: +- +- More info at the pyparsing wiki page +- """ +- return pyparsing_common._html_stripper.transform_string(tokens[0]) +- +- _commasepitem = ( +- Combine( +- OneOrMore( +- ~Literal(",") +- + ~LineEnd() +- + Word(printables, exclude_chars=",") +- + Opt(White(" \t") + ~FollowedBy(LineEnd() | ",")) +- ) +- ) +- .streamline() +- .set_name("commaItem") +- ) +- comma_separated_list = delimited_list( +- Opt(quoted_string.copy() | _commasepitem, default="") +- ).set_name("comma separated list") +- """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" +- +- upcase_tokens = staticmethod(token_map(lambda t: t.upper())) +- """Parse action to convert tokens to upper case.""" +- +- downcase_tokens = staticmethod(token_map(lambda t: t.lower())) +- """Parse action to convert tokens to lower case.""" +- +- # fmt: off +- url = Regex( +- # https://mathiasbynens.be/demo/url-regex +- # https://gist.github.com/dperini/729294 +- r"^" + +- # protocol identifier (optional) +- # short syntax // still required +- r"(?:(?:(?Phttps?|ftp):)?\/\/)" + +- # user:pass BasicAuth (optional) +- r"(?:(?P\S+(?::\S*)?)@)?" + +- r"(?P" + +- # IP address exclusion +- # private & local networks +- r"(?!(?:10|127)(?:\.\d{1,3}){3})" + +- r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})" + +- r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})" + +- # IP address dotted notation octets +- # excludes loopback network 0.0.0.0 +- # excludes reserved space >= 224.0.0.0 +- # excludes network & broadcast addresses +- # (first & last IP address of each class) +- r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" + +- r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}" + +- r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))" + +- r"|" + +- # host & domain names, may end with dot +- # can be replaced by a shortest alternative +- # (?![-_])(?:[-\w\u00a1-\uffff]{0,63}[^-_]\.)+ +- r"(?:" + +- r"(?:" + +- r"[a-z0-9\u00a1-\uffff]" + +- r"[a-z0-9\u00a1-\uffff_-]{0,62}" + +- r")?" + +- r"[a-z0-9\u00a1-\uffff]\." + +- r")+" + +- # TLD identifier name, may end with dot +- r"(?:[a-z\u00a1-\uffff]{2,}\.?)" + +- r")" + +- # port number (optional) +- r"(:(?P\d{2,5}))?" + +- # resource path (optional) +- r"(?P\/[^?# ]*)?" + +- # query string (optional) +- r"(\?(?P[^#]*))?" + +- # fragment (optional) +- r"(#(?P\S*))?" + +- r"$" +- ).set_name("url") +- # fmt: on +- +- # pre-PEP8 compatibility names +- convertToInteger = convert_to_integer +- convertToFloat = convert_to_float +- convertToDate = convert_to_date +- convertToDatetime = convert_to_datetime +- stripHTMLTags = strip_html_tags +- upcaseTokens = upcase_tokens +- downcaseTokens = downcase_tokens +- +- +-_builtin_exprs = [ +- v for v in vars(pyparsing_common).values() if isinstance(v, ParserElement) +-] +diff --git a/src/poetry/core/_vendor/pyparsing/core.py b/src/poetry/core/_vendor/pyparsing/core.py +deleted file mode 100644 +index 9acba3f..0000000 +--- a/src/poetry/core/_vendor/pyparsing/core.py ++++ /dev/null +@@ -1,5814 +0,0 @@ +-# +-# core.py +-# +-import os +-import typing +-from typing import ( +- NamedTuple, +- Union, +- Callable, +- Any, +- Generator, +- Tuple, +- List, +- TextIO, +- Set, +- Sequence, +-) +-from abc import ABC, abstractmethod +-from enum import Enum +-import string +-import copy +-import warnings +-import re +-import sys +-from collections.abc import Iterable +-import traceback +-import types +-from operator import itemgetter +-from functools import wraps +-from threading import RLock +-from pathlib import Path +- +-from .util import ( +- _FifoCache, +- _UnboundedCache, +- __config_flags, +- _collapse_string_to_ranges, +- _escape_regex_range_chars, +- _bslash, +- _flatten, +- LRUMemo as _LRUMemo, +- UnboundedMemo as _UnboundedMemo, +-) +-from .exceptions import * +-from .actions import * +-from .results import ParseResults, _ParseResultsWithOffset +-from .unicode import pyparsing_unicode +- +-_MAX_INT = sys.maxsize +-str_type: Tuple[type, ...] = (str, bytes) +- +-# +-# Copyright (c) 2003-2022 Paul T. McGuire +-# +-# Permission is hereby granted, free of charge, to any person obtaining +-# a copy of this software and associated documentation files (the +-# "Software"), to deal in the Software without restriction, including +-# without limitation the rights to use, copy, modify, merge, publish, +-# distribute, sublicense, and/or sell copies of the Software, and to +-# permit persons to whom the Software is furnished to do so, subject to +-# the following conditions: +-# +-# The above copyright notice and this permission notice shall be +-# included in all copies or substantial portions of the Software. +-# +-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +-# +- +- +-if sys.version_info >= (3, 8): +- from functools import cached_property +-else: +- +- class cached_property: +- def __init__(self, func): +- self._func = func +- +- def __get__(self, instance, owner=None): +- ret = instance.__dict__[self._func.__name__] = self._func(instance) +- return ret +- +- +-class __compat__(__config_flags): +- """ +- A cross-version compatibility configuration for pyparsing features that will be +- released in a future version. By setting values in this configuration to True, +- those features can be enabled in prior versions for compatibility development +- and testing. +- +- - ``collect_all_And_tokens`` - flag to enable fix for Issue #63 that fixes erroneous grouping +- of results names when an :class:`And` expression is nested within an :class:`Or` or :class:`MatchFirst`; +- maintained for compatibility, but setting to ``False`` no longer restores pre-2.3.1 +- behavior +- """ +- +- _type_desc = "compatibility" +- +- collect_all_And_tokens = True +- +- _all_names = [__ for __ in locals() if not __.startswith("_")] +- _fixed_names = """ +- collect_all_And_tokens +- """.split() +- +- +-class __diag__(__config_flags): +- _type_desc = "diagnostic" +- +- warn_multiple_tokens_in_named_alternation = False +- warn_ungrouped_named_tokens_in_collection = False +- warn_name_set_on_empty_Forward = False +- warn_on_parse_using_empty_Forward = False +- warn_on_assignment_to_Forward = False +- warn_on_multiple_string_args_to_oneof = False +- warn_on_match_first_with_lshift_operator = False +- enable_debug_on_named_expressions = False +- +- _all_names = [__ for __ in locals() if not __.startswith("_")] +- _warning_names = [name for name in _all_names if name.startswith("warn")] +- _debug_names = [name for name in _all_names if name.startswith("enable_debug")] +- +- @classmethod +- def enable_all_warnings(cls) -> None: +- for name in cls._warning_names: +- cls.enable(name) +- +- +-class Diagnostics(Enum): +- """ +- Diagnostic configuration (all default to disabled) +- - ``warn_multiple_tokens_in_named_alternation`` - flag to enable warnings when a results +- name is defined on a :class:`MatchFirst` or :class:`Or` expression with one or more :class:`And` subexpressions +- - ``warn_ungrouped_named_tokens_in_collection`` - flag to enable warnings when a results +- name is defined on a containing expression with ungrouped subexpressions that also +- have results names +- - ``warn_name_set_on_empty_Forward`` - flag to enable warnings when a :class:`Forward` is defined +- with a results name, but has no contents defined +- - ``warn_on_parse_using_empty_Forward`` - flag to enable warnings when a :class:`Forward` is +- defined in a grammar but has never had an expression attached to it +- - ``warn_on_assignment_to_Forward`` - flag to enable warnings when a :class:`Forward` is defined +- but is overwritten by assigning using ``'='`` instead of ``'<<='`` or ``'<<'`` +- - ``warn_on_multiple_string_args_to_oneof`` - flag to enable warnings when :class:`one_of` is +- incorrectly called with multiple str arguments +- - ``enable_debug_on_named_expressions`` - flag to auto-enable debug on all subsequent +- calls to :class:`ParserElement.set_name` +- +- Diagnostics are enabled/disabled by calling :class:`enable_diag` and :class:`disable_diag`. +- All warnings can be enabled by calling :class:`enable_all_warnings`. +- """ +- +- warn_multiple_tokens_in_named_alternation = 0 +- warn_ungrouped_named_tokens_in_collection = 1 +- warn_name_set_on_empty_Forward = 2 +- warn_on_parse_using_empty_Forward = 3 +- warn_on_assignment_to_Forward = 4 +- warn_on_multiple_string_args_to_oneof = 5 +- warn_on_match_first_with_lshift_operator = 6 +- enable_debug_on_named_expressions = 7 +- +- +-def enable_diag(diag_enum: Diagnostics) -> None: +- """ +- Enable a global pyparsing diagnostic flag (see :class:`Diagnostics`). +- """ +- __diag__.enable(diag_enum.name) +- +- +-def disable_diag(diag_enum: Diagnostics) -> None: +- """ +- Disable a global pyparsing diagnostic flag (see :class:`Diagnostics`). +- """ +- __diag__.disable(diag_enum.name) +- +- +-def enable_all_warnings() -> None: +- """ +- Enable all global pyparsing diagnostic warnings (see :class:`Diagnostics`). +- """ +- __diag__.enable_all_warnings() +- +- +-# hide abstract class +-del __config_flags +- +- +-def _should_enable_warnings( +- cmd_line_warn_options: typing.Iterable[str], warn_env_var: typing.Optional[str] +-) -> bool: +- enable = bool(warn_env_var) +- for warn_opt in cmd_line_warn_options: +- w_action, w_message, w_category, w_module, w_line = (warn_opt + "::::").split( +- ":" +- )[:5] +- if not w_action.lower().startswith("i") and ( +- not (w_message or w_category or w_module) or w_module == "pyparsing" +- ): +- enable = True +- elif w_action.lower().startswith("i") and w_module in ("pyparsing", ""): +- enable = False +- return enable +- +- +-if _should_enable_warnings( +- sys.warnoptions, os.environ.get("PYPARSINGENABLEALLWARNINGS") +-): +- enable_all_warnings() +- +- +-# build list of single arg builtins, that can be used as parse actions +-_single_arg_builtins = { +- sum, +- len, +- sorted, +- reversed, +- list, +- tuple, +- set, +- any, +- all, +- min, +- max, +-} +- +-_generatorType = types.GeneratorType +-ParseAction = Union[ +- Callable[[], Any], +- Callable[[ParseResults], Any], +- Callable[[int, ParseResults], Any], +- Callable[[str, int, ParseResults], Any], +-] +-ParseCondition = Union[ +- Callable[[], bool], +- Callable[[ParseResults], bool], +- Callable[[int, ParseResults], bool], +- Callable[[str, int, ParseResults], bool], +-] +-ParseFailAction = Callable[[str, int, "ParserElement", Exception], None] +-DebugStartAction = Callable[[str, int, "ParserElement", bool], None] +-DebugSuccessAction = Callable[ +- [str, int, int, "ParserElement", ParseResults, bool], None +-] +-DebugExceptionAction = Callable[[str, int, "ParserElement", Exception, bool], None] +- +- +-alphas = string.ascii_uppercase + string.ascii_lowercase +-identchars = pyparsing_unicode.Latin1.identchars +-identbodychars = pyparsing_unicode.Latin1.identbodychars +-nums = "0123456789" +-hexnums = nums + "ABCDEFabcdef" +-alphanums = alphas + nums +-printables = "".join([c for c in string.printable if c not in string.whitespace]) +- +-_trim_arity_call_line: traceback.StackSummary = None +- +- +-def _trim_arity(func, max_limit=3): +- """decorator to trim function calls to match the arity of the target""" +- global _trim_arity_call_line +- +- if func in _single_arg_builtins: +- return lambda s, l, t: func(t) +- +- limit = 0 +- found_arity = False +- +- def extract_tb(tb, limit=0): +- frames = traceback.extract_tb(tb, limit=limit) +- frame_summary = frames[-1] +- return [frame_summary[:2]] +- +- # synthesize what would be returned by traceback.extract_stack at the call to +- # user's parse action 'func', so that we don't incur call penalty at parse time +- +- # fmt: off +- LINE_DIFF = 7 +- # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND +- # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! +- _trim_arity_call_line = (_trim_arity_call_line or traceback.extract_stack(limit=2)[-1]) +- pa_call_line_synth = (_trim_arity_call_line[0], _trim_arity_call_line[1] + LINE_DIFF) +- +- def wrapper(*args): +- nonlocal found_arity, limit +- while 1: +- try: +- ret = func(*args[limit:]) +- found_arity = True +- return ret +- except TypeError as te: +- # re-raise TypeErrors if they did not come from our arity testing +- if found_arity: +- raise +- else: +- tb = te.__traceback__ +- trim_arity_type_error = ( +- extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth +- ) +- del tb +- +- if trim_arity_type_error: +- if limit < max_limit: +- limit += 1 +- continue +- +- raise +- # fmt: on +- +- # copy func name to wrapper for sensible debug output +- # (can't use functools.wraps, since that messes with function signature) +- func_name = getattr(func, "__name__", getattr(func, "__class__").__name__) +- wrapper.__name__ = func_name +- wrapper.__doc__ = func.__doc__ +- +- return wrapper +- +- +-def condition_as_parse_action( +- fn: ParseCondition, message: str = None, fatal: bool = False +-) -> ParseAction: +- """ +- Function to convert a simple predicate function that returns ``True`` or ``False`` +- into a parse action. Can be used in places when a parse action is required +- and :class:`ParserElement.add_condition` cannot be used (such as when adding a condition +- to an operator level in :class:`infix_notation`). +- +- Optional keyword arguments: +- +- - ``message`` - define a custom message to be used in the raised exception +- - ``fatal`` - if True, will raise :class:`ParseFatalException` to stop parsing immediately; +- otherwise will raise :class:`ParseException` +- +- """ +- msg = message if message is not None else "failed user-defined condition" +- exc_type = ParseFatalException if fatal else ParseException +- fn = _trim_arity(fn) +- +- @wraps(fn) +- def pa(s, l, t): +- if not bool(fn(s, l, t)): +- raise exc_type(s, l, msg) +- +- return pa +- +- +-def _default_start_debug_action( +- instring: str, loc: int, expr: "ParserElement", cache_hit: bool = False +-): +- cache_hit_str = "*" if cache_hit else "" +- print( +- ( +- "{}Match {} at loc {}({},{})\n {}\n {}^".format( +- cache_hit_str, +- expr, +- loc, +- lineno(loc, instring), +- col(loc, instring), +- line(loc, instring), +- " " * (col(loc, instring) - 1), +- ) +- ) +- ) +- +- +-def _default_success_debug_action( +- instring: str, +- startloc: int, +- endloc: int, +- expr: "ParserElement", +- toks: ParseResults, +- cache_hit: bool = False, +-): +- cache_hit_str = "*" if cache_hit else "" +- print("{}Matched {} -> {}".format(cache_hit_str, expr, toks.as_list())) +- +- +-def _default_exception_debug_action( +- instring: str, +- loc: int, +- expr: "ParserElement", +- exc: Exception, +- cache_hit: bool = False, +-): +- cache_hit_str = "*" if cache_hit else "" +- print( +- "{}Match {} failed, {} raised: {}".format( +- cache_hit_str, expr, type(exc).__name__, exc +- ) +- ) +- +- +-def null_debug_action(*args): +- """'Do-nothing' debug action, to suppress debugging output during parsing.""" +- +- +-class ParserElement(ABC): +- """Abstract base level parser element class.""" +- +- DEFAULT_WHITE_CHARS: str = " \n\t\r" +- verbose_stacktrace: bool = False +- _literalStringClass: typing.Optional[type] = None +- +- @staticmethod +- def set_default_whitespace_chars(chars: str) -> None: +- r""" +- Overrides the default whitespace chars +- +- Example:: +- +- # default whitespace chars are space, and newline +- Word(alphas)[1, ...].parse_string("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] +- +- # change to just treat newline as significant +- ParserElement.set_default_whitespace_chars(" \t") +- Word(alphas)[1, ...].parse_string("abc def\nghi jkl") # -> ['abc', 'def'] +- """ +- ParserElement.DEFAULT_WHITE_CHARS = chars +- +- # update whitespace all parse expressions defined in this module +- for expr in _builtin_exprs: +- if expr.copyDefaultWhiteChars: +- expr.whiteChars = set(chars) +- +- @staticmethod +- def inline_literals_using(cls: type) -> None: +- """ +- Set class to be used for inclusion of string literals into a parser. +- +- Example:: +- +- # default literal class used is Literal +- integer = Word(nums) +- date_str = integer("year") + '/' + integer("month") + '/' + integer("day") +- +- date_str.parse_string("1999/12/31") # -> ['1999', '/', '12', '/', '31'] +- +- +- # change to Suppress +- ParserElement.inline_literals_using(Suppress) +- date_str = integer("year") + '/' + integer("month") + '/' + integer("day") +- +- date_str.parse_string("1999/12/31") # -> ['1999', '12', '31'] +- """ +- ParserElement._literalStringClass = cls +- +- class DebugActions(NamedTuple): +- debug_try: typing.Optional[DebugStartAction] +- debug_match: typing.Optional[DebugSuccessAction] +- debug_fail: typing.Optional[DebugExceptionAction] +- +- def __init__(self, savelist: bool = False): +- self.parseAction: List[ParseAction] = list() +- self.failAction: typing.Optional[ParseFailAction] = None +- self.customName = None +- self._defaultName = None +- self.resultsName = None +- self.saveAsList = savelist +- self.skipWhitespace = True +- self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS) +- self.copyDefaultWhiteChars = True +- # used when checking for left-recursion +- self.mayReturnEmpty = False +- self.keepTabs = False +- self.ignoreExprs: List["ParserElement"] = list() +- self.debug = False +- self.streamlined = False +- # optimize exception handling for subclasses that don't advance parse index +- self.mayIndexError = True +- self.errmsg = "" +- # mark results names as modal (report only last) or cumulative (list all) +- self.modalResults = True +- # custom debug actions +- self.debugActions = self.DebugActions(None, None, None) +- # avoid redundant calls to preParse +- self.callPreparse = True +- self.callDuringTry = False +- self.suppress_warnings_: List[Diagnostics] = [] +- +- def suppress_warning(self, warning_type: Diagnostics) -> "ParserElement": +- """ +- Suppress warnings emitted for a particular diagnostic on this expression. +- +- Example:: +- +- base = pp.Forward() +- base.suppress_warning(Diagnostics.warn_on_parse_using_empty_Forward) +- +- # statement would normally raise a warning, but is now suppressed +- print(base.parseString("x")) +- +- """ +- self.suppress_warnings_.append(warning_type) +- return self +- +- def copy(self) -> "ParserElement": +- """ +- Make a copy of this :class:`ParserElement`. Useful for defining +- different parse actions for the same parsing pattern, using copies of +- the original parse element. +- +- Example:: +- +- integer = Word(nums).set_parse_action(lambda toks: int(toks[0])) +- integerK = integer.copy().add_parse_action(lambda toks: toks[0] * 1024) + Suppress("K") +- integerM = integer.copy().add_parse_action(lambda toks: toks[0] * 1024 * 1024) + Suppress("M") +- +- print((integerK | integerM | integer)[1, ...].parse_string("5K 100 640K 256M")) +- +- prints:: +- +- [5120, 100, 655360, 268435456] +- +- Equivalent form of ``expr.copy()`` is just ``expr()``:: +- +- integerM = integer().add_parse_action(lambda toks: toks[0] * 1024 * 1024) + Suppress("M") +- """ +- cpy = copy.copy(self) +- cpy.parseAction = self.parseAction[:] +- cpy.ignoreExprs = self.ignoreExprs[:] +- if self.copyDefaultWhiteChars: +- cpy.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS) +- return cpy +- +- def set_results_name( +- self, name: str, list_all_matches: bool = False, *, listAllMatches: bool = False +- ) -> "ParserElement": +- """ +- Define name for referencing matching tokens as a nested attribute +- of the returned parse results. +- +- Normally, results names are assigned as you would assign keys in a dict: +- any existing value is overwritten by later values. If it is necessary to +- keep all values captured for a particular results name, call ``set_results_name`` +- with ``list_all_matches`` = True. +- +- NOTE: ``set_results_name`` returns a *copy* of the original :class:`ParserElement` object; +- this is so that the client can define a basic element, such as an +- integer, and reference it in multiple places with different names. +- +- You can also set results names using the abbreviated syntax, +- ``expr("name")`` in place of ``expr.set_results_name("name")`` +- - see :class:`__call__`. If ``list_all_matches`` is required, use +- ``expr("name*")``. +- +- Example:: +- +- date_str = (integer.set_results_name("year") + '/' +- + integer.set_results_name("month") + '/' +- + integer.set_results_name("day")) +- +- # equivalent form: +- date_str = integer("year") + '/' + integer("month") + '/' + integer("day") +- """ +- listAllMatches = listAllMatches or list_all_matches +- return self._setResultsName(name, listAllMatches) +- +- def _setResultsName(self, name, listAllMatches=False): +- if name is None: +- return self +- newself = self.copy() +- if name.endswith("*"): +- name = name[:-1] +- listAllMatches = True +- newself.resultsName = name +- newself.modalResults = not listAllMatches +- return newself +- +- def set_break(self, break_flag: bool = True) -> "ParserElement": +- """ +- Method to invoke the Python pdb debugger when this element is +- about to be parsed. Set ``break_flag`` to ``True`` to enable, ``False`` to +- disable. +- """ +- if break_flag: +- _parseMethod = self._parse +- +- def breaker(instring, loc, doActions=True, callPreParse=True): +- import pdb +- +- # this call to pdb.set_trace() is intentional, not a checkin error +- pdb.set_trace() +- return _parseMethod(instring, loc, doActions, callPreParse) +- +- breaker._originalParseMethod = _parseMethod +- self._parse = breaker +- else: +- if hasattr(self._parse, "_originalParseMethod"): +- self._parse = self._parse._originalParseMethod +- return self +- +- def set_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": +- """ +- Define one or more actions to perform when successfully matching parse element definition. +- +- Parse actions can be called to perform data conversions, do extra validation, +- update external data structures, or enhance or replace the parsed tokens. +- Each parse action ``fn`` is a callable method with 0-3 arguments, called as +- ``fn(s, loc, toks)`` , ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where: +- +- - s = the original string being parsed (see note below) +- - loc = the location of the matching substring +- - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object +- +- The parsed tokens are passed to the parse action as ParseResults. They can be +- modified in place using list-style append, extend, and pop operations to update +- the parsed list elements; and with dictionary-style item set and del operations +- to add, update, or remove any named results. If the tokens are modified in place, +- it is not necessary to return them with a return statement. +- +- Parse actions can also completely replace the given tokens, with another ``ParseResults`` +- object, or with some entirely different object (common for parse actions that perform data +- conversions). A convenient way to build a new parse result is to define the values +- using a dict, and then create the return value using :class:`ParseResults.from_dict`. +- +- If None is passed as the ``fn`` parse action, all previously added parse actions for this +- expression are cleared. +- +- Optional keyword arguments: +- +- - call_during_try = (default= ``False``) indicate if parse action should be run during +- lookaheads and alternate testing. For parse actions that have side effects, it is +- important to only call the parse action once it is determined that it is being +- called as part of a successful parse. For parse actions that perform additional +- validation, then call_during_try should be passed as True, so that the validation +- code is included in the preliminary "try" parses. +- +- Note: the default parsing behavior is to expand tabs in the input string +- before starting the parsing process. See :class:`parse_string` for more +- information on parsing strings containing ```` s, and suggested +- methods to maintain a consistent view of the parsed string, the parse +- location, and line and column positions within the parsed string. +- +- Example:: +- +- # parse dates in the form YYYY/MM/DD +- +- # use parse action to convert toks from str to int at parse time +- def convert_to_int(toks): +- return int(toks[0]) +- +- # use a parse action to verify that the date is a valid date +- def is_valid_date(instring, loc, toks): +- from datetime import date +- year, month, day = toks[::2] +- try: +- date(year, month, day) +- except ValueError: +- raise ParseException(instring, loc, "invalid date given") +- +- integer = Word(nums) +- date_str = integer + '/' + integer + '/' + integer +- +- # add parse actions +- integer.set_parse_action(convert_to_int) +- date_str.set_parse_action(is_valid_date) +- +- # note that integer fields are now ints, not strings +- date_str.run_tests(''' +- # successful parse - note that integer fields were converted to ints +- 1999/12/31 +- +- # fail - invalid date +- 1999/13/31 +- ''') +- """ +- if list(fns) == [None]: +- self.parseAction = [] +- else: +- if not all(callable(fn) for fn in fns): +- raise TypeError("parse actions must be callable") +- self.parseAction = [_trim_arity(fn) for fn in fns] +- self.callDuringTry = kwargs.get( +- "call_during_try", kwargs.get("callDuringTry", False) +- ) +- return self +- +- def add_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": +- """ +- Add one or more parse actions to expression's list of parse actions. See :class:`set_parse_action`. +- +- See examples in :class:`copy`. +- """ +- self.parseAction += [_trim_arity(fn) for fn in fns] +- self.callDuringTry = self.callDuringTry or kwargs.get( +- "call_during_try", kwargs.get("callDuringTry", False) +- ) +- return self +- +- def add_condition(self, *fns: ParseCondition, **kwargs) -> "ParserElement": +- """Add a boolean predicate function to expression's list of parse actions. See +- :class:`set_parse_action` for function call signatures. Unlike ``set_parse_action``, +- functions passed to ``add_condition`` need to return boolean success/fail of the condition. +- +- Optional keyword arguments: +- +- - message = define a custom message to be used in the raised exception +- - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise +- ParseException +- - call_during_try = boolean to indicate if this method should be called during internal tryParse calls, +- default=False +- +- Example:: +- +- integer = Word(nums).set_parse_action(lambda toks: int(toks[0])) +- year_int = integer.copy() +- year_int.add_condition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") +- date_str = year_int + '/' + integer + '/' + integer +- +- result = date_str.parse_string("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), +- (line:1, col:1) +- """ +- for fn in fns: +- self.parseAction.append( +- condition_as_parse_action( +- fn, message=kwargs.get("message"), fatal=kwargs.get("fatal", False) +- ) +- ) +- +- self.callDuringTry = self.callDuringTry or kwargs.get( +- "call_during_try", kwargs.get("callDuringTry", False) +- ) +- return self +- +- def set_fail_action(self, fn: ParseFailAction) -> "ParserElement": +- """ +- Define action to perform if parsing fails at this expression. +- Fail acton fn is a callable function that takes the arguments +- ``fn(s, loc, expr, err)`` where: +- +- - s = string being parsed +- - loc = location where expression match was attempted and failed +- - expr = the parse expression that failed +- - err = the exception thrown +- +- The function returns no value. It may throw :class:`ParseFatalException` +- if it is desired to stop parsing immediately.""" +- self.failAction = fn +- return self +- +- def _skipIgnorables(self, instring, loc): +- exprsFound = True +- while exprsFound: +- exprsFound = False +- for e in self.ignoreExprs: +- try: +- while 1: +- loc, dummy = e._parse(instring, loc) +- exprsFound = True +- except ParseException: +- pass +- return loc +- +- def preParse(self, instring, loc): +- if self.ignoreExprs: +- loc = self._skipIgnorables(instring, loc) +- +- if self.skipWhitespace: +- instrlen = len(instring) +- white_chars = self.whiteChars +- while loc < instrlen and instring[loc] in white_chars: +- loc += 1 +- +- return loc +- +- def parseImpl(self, instring, loc, doActions=True): +- return loc, [] +- +- def postParse(self, instring, loc, tokenlist): +- return tokenlist +- +- # @profile +- def _parseNoCache( +- self, instring, loc, doActions=True, callPreParse=True +- ) -> Tuple[int, ParseResults]: +- TRY, MATCH, FAIL = 0, 1, 2 +- debugging = self.debug # and doActions) +- len_instring = len(instring) +- +- if debugging or self.failAction: +- # print("Match {} at loc {}({}, {})".format(self, loc, lineno(loc, instring), col(loc, instring))) +- try: +- if callPreParse and self.callPreparse: +- pre_loc = self.preParse(instring, loc) +- else: +- pre_loc = loc +- tokens_start = pre_loc +- if self.debugActions.debug_try: +- self.debugActions.debug_try(instring, tokens_start, self, False) +- if self.mayIndexError or pre_loc >= len_instring: +- try: +- loc, tokens = self.parseImpl(instring, pre_loc, doActions) +- except IndexError: +- raise ParseException(instring, len_instring, self.errmsg, self) +- else: +- loc, tokens = self.parseImpl(instring, pre_loc, doActions) +- except Exception as err: +- # print("Exception raised:", err) +- if self.debugActions.debug_fail: +- self.debugActions.debug_fail( +- instring, tokens_start, self, err, False +- ) +- if self.failAction: +- self.failAction(instring, tokens_start, self, err) +- raise +- else: +- if callPreParse and self.callPreparse: +- pre_loc = self.preParse(instring, loc) +- else: +- pre_loc = loc +- tokens_start = pre_loc +- if self.mayIndexError or pre_loc >= len_instring: +- try: +- loc, tokens = self.parseImpl(instring, pre_loc, doActions) +- except IndexError: +- raise ParseException(instring, len_instring, self.errmsg, self) +- else: +- loc, tokens = self.parseImpl(instring, pre_loc, doActions) +- +- tokens = self.postParse(instring, loc, tokens) +- +- ret_tokens = ParseResults( +- tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults +- ) +- if self.parseAction and (doActions or self.callDuringTry): +- if debugging: +- try: +- for fn in self.parseAction: +- try: +- tokens = fn(instring, tokens_start, ret_tokens) +- except IndexError as parse_action_exc: +- exc = ParseException("exception raised in parse action") +- raise exc from parse_action_exc +- +- if tokens is not None and tokens is not ret_tokens: +- ret_tokens = ParseResults( +- tokens, +- self.resultsName, +- asList=self.saveAsList +- and isinstance(tokens, (ParseResults, list)), +- modal=self.modalResults, +- ) +- except Exception as err: +- # print "Exception raised in user parse action:", err +- if self.debugActions.debug_fail: +- self.debugActions.debug_fail( +- instring, tokens_start, self, err, False +- ) +- raise +- else: +- for fn in self.parseAction: +- try: +- tokens = fn(instring, tokens_start, ret_tokens) +- except IndexError as parse_action_exc: +- exc = ParseException("exception raised in parse action") +- raise exc from parse_action_exc +- +- if tokens is not None and tokens is not ret_tokens: +- ret_tokens = ParseResults( +- tokens, +- self.resultsName, +- asList=self.saveAsList +- and isinstance(tokens, (ParseResults, list)), +- modal=self.modalResults, +- ) +- if debugging: +- # print("Matched", self, "->", ret_tokens.as_list()) +- if self.debugActions.debug_match: +- self.debugActions.debug_match( +- instring, tokens_start, loc, self, ret_tokens, False +- ) +- +- return loc, ret_tokens +- +- def try_parse(self, instring: str, loc: int, raise_fatal: bool = False) -> int: +- try: +- return self._parse(instring, loc, doActions=False)[0] +- except ParseFatalException: +- if raise_fatal: +- raise +- raise ParseException(instring, loc, self.errmsg, self) +- +- def can_parse_next(self, instring: str, loc: int) -> bool: +- try: +- self.try_parse(instring, loc) +- except (ParseException, IndexError): +- return False +- else: +- return True +- +- # cache for left-recursion in Forward references +- recursion_lock = RLock() +- recursion_memos: typing.Dict[ +- Tuple[int, "Forward", bool], Tuple[int, Union[ParseResults, Exception]] +- ] = {} +- +- # argument cache for optimizing repeated calls when backtracking through recursive expressions +- packrat_cache = ( +- {} +- ) # this is set later by enabled_packrat(); this is here so that reset_cache() doesn't fail +- packrat_cache_lock = RLock() +- packrat_cache_stats = [0, 0] +- +- # this method gets repeatedly called during backtracking with the same arguments - +- # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression +- def _parseCache( +- self, instring, loc, doActions=True, callPreParse=True +- ) -> Tuple[int, ParseResults]: +- HIT, MISS = 0, 1 +- TRY, MATCH, FAIL = 0, 1, 2 +- lookup = (self, instring, loc, callPreParse, doActions) +- with ParserElement.packrat_cache_lock: +- cache = ParserElement.packrat_cache +- value = cache.get(lookup) +- if value is cache.not_in_cache: +- ParserElement.packrat_cache_stats[MISS] += 1 +- try: +- value = self._parseNoCache(instring, loc, doActions, callPreParse) +- except ParseBaseException as pe: +- # cache a copy of the exception, without the traceback +- cache.set(lookup, pe.__class__(*pe.args)) +- raise +- else: +- cache.set(lookup, (value[0], value[1].copy(), loc)) +- return value +- else: +- ParserElement.packrat_cache_stats[HIT] += 1 +- if self.debug and self.debugActions.debug_try: +- try: +- self.debugActions.debug_try(instring, loc, self, cache_hit=True) +- except TypeError: +- pass +- if isinstance(value, Exception): +- if self.debug and self.debugActions.debug_fail: +- try: +- self.debugActions.debug_fail( +- instring, loc, self, value, cache_hit=True +- ) +- except TypeError: +- pass +- raise value +- +- loc_, result, endloc = value[0], value[1].copy(), value[2] +- if self.debug and self.debugActions.debug_match: +- try: +- self.debugActions.debug_match( +- instring, loc_, endloc, self, result, cache_hit=True +- ) +- except TypeError: +- pass +- +- return loc_, result +- +- _parse = _parseNoCache +- +- @staticmethod +- def reset_cache() -> None: +- ParserElement.packrat_cache.clear() +- ParserElement.packrat_cache_stats[:] = [0] * len( +- ParserElement.packrat_cache_stats +- ) +- ParserElement.recursion_memos.clear() +- +- _packratEnabled = False +- _left_recursion_enabled = False +- +- @staticmethod +- def disable_memoization() -> None: +- """ +- Disables active Packrat or Left Recursion parsing and their memoization +- +- This method also works if neither Packrat nor Left Recursion are enabled. +- This makes it safe to call before activating Packrat nor Left Recursion +- to clear any previous settings. +- """ +- ParserElement.reset_cache() +- ParserElement._left_recursion_enabled = False +- ParserElement._packratEnabled = False +- ParserElement._parse = ParserElement._parseNoCache +- +- @staticmethod +- def enable_left_recursion( +- cache_size_limit: typing.Optional[int] = None, *, force=False +- ) -> None: +- """ +- Enables "bounded recursion" parsing, which allows for both direct and indirect +- left-recursion. During parsing, left-recursive :class:`Forward` elements are +- repeatedly matched with a fixed recursion depth that is gradually increased +- until finding the longest match. +- +- Example:: +- +- import pyparsing as pp +- pp.ParserElement.enable_left_recursion() +- +- E = pp.Forward("E") +- num = pp.Word(pp.nums) +- # match `num`, or `num '+' num`, or `num '+' num '+' num`, ... +- E <<= E + '+' - num | num +- +- print(E.parse_string("1+2+3")) +- +- Recursion search naturally memoizes matches of ``Forward`` elements and may +- thus skip reevaluation of parse actions during backtracking. This may break +- programs with parse actions which rely on strict ordering of side-effects. +- +- Parameters: +- +- - cache_size_limit - (default=``None``) - memoize at most this many +- ``Forward`` elements during matching; if ``None`` (the default), +- memoize all ``Forward`` elements. +- +- Bounded Recursion parsing works similar but not identical to Packrat parsing, +- thus the two cannot be used together. Use ``force=True`` to disable any +- previous, conflicting settings. +- """ +- if force: +- ParserElement.disable_memoization() +- elif ParserElement._packratEnabled: +- raise RuntimeError("Packrat and Bounded Recursion are not compatible") +- if cache_size_limit is None: +- ParserElement.recursion_memos = _UnboundedMemo() +- elif cache_size_limit > 0: +- ParserElement.recursion_memos = _LRUMemo(capacity=cache_size_limit) +- else: +- raise NotImplementedError("Memo size of %s" % cache_size_limit) +- ParserElement._left_recursion_enabled = True +- +- @staticmethod +- def enable_packrat(cache_size_limit: int = 128, *, force: bool = False) -> None: +- """ +- Enables "packrat" parsing, which adds memoizing to the parsing logic. +- Repeated parse attempts at the same string location (which happens +- often in many complex grammars) can immediately return a cached value, +- instead of re-executing parsing/validating code. Memoizing is done of +- both valid results and parsing exceptions. +- +- Parameters: +- +- - cache_size_limit - (default= ``128``) - if an integer value is provided +- will limit the size of the packrat cache; if None is passed, then +- the cache size will be unbounded; if 0 is passed, the cache will +- be effectively disabled. +- +- This speedup may break existing programs that use parse actions that +- have side-effects. For this reason, packrat parsing is disabled when +- you first import pyparsing. To activate the packrat feature, your +- program must call the class method :class:`ParserElement.enable_packrat`. +- For best results, call ``enable_packrat()`` immediately after +- importing pyparsing. +- +- Example:: +- +- import pyparsing +- pyparsing.ParserElement.enable_packrat() +- +- Packrat parsing works similar but not identical to Bounded Recursion parsing, +- thus the two cannot be used together. Use ``force=True`` to disable any +- previous, conflicting settings. +- """ +- if force: +- ParserElement.disable_memoization() +- elif ParserElement._left_recursion_enabled: +- raise RuntimeError("Packrat and Bounded Recursion are not compatible") +- if not ParserElement._packratEnabled: +- ParserElement._packratEnabled = True +- if cache_size_limit is None: +- ParserElement.packrat_cache = _UnboundedCache() +- else: +- ParserElement.packrat_cache = _FifoCache(cache_size_limit) +- ParserElement._parse = ParserElement._parseCache +- +- def parse_string( +- self, instring: str, parse_all: bool = False, *, parseAll: bool = False +- ) -> ParseResults: +- """ +- Parse a string with respect to the parser definition. This function is intended as the primary interface to the +- client code. +- +- :param instring: The input string to be parsed. +- :param parse_all: If set, the entire input string must match the grammar. +- :param parseAll: retained for pre-PEP8 compatibility, will be removed in a future release. +- :raises ParseException: Raised if ``parse_all`` is set and the input string does not match the whole grammar. +- :returns: the parsed data as a :class:`ParseResults` object, which may be accessed as a `list`, a `dict`, or +- an object with attributes if the given parser includes results names. +- +- If the input string is required to match the entire grammar, ``parse_all`` flag must be set to ``True``. This +- is also equivalent to ending the grammar with :class:`StringEnd`(). +- +- To report proper column numbers, ``parse_string`` operates on a copy of the input string where all tabs are +- converted to spaces (8 spaces per tab, as per the default in ``string.expandtabs``). If the input string +- contains tabs and the grammar uses parse actions that use the ``loc`` argument to index into the string +- being parsed, one can ensure a consistent view of the input string by doing one of the following: +- +- - calling ``parse_with_tabs`` on your grammar before calling ``parse_string`` (see :class:`parse_with_tabs`), +- - define your parse action using the full ``(s,loc,toks)`` signature, and reference the input string using the +- parse action's ``s`` argument, or +- - explicitly expand the tabs in your input string before calling ``parse_string``. +- +- Examples: +- +- By default, partial matches are OK. +- +- >>> res = Word('a').parse_string('aaaaabaaa') +- >>> print(res) +- ['aaaaa'] +- +- The parsing behavior varies by the inheriting class of this abstract class. Please refer to the children +- directly to see more examples. +- +- It raises an exception if parse_all flag is set and instring does not match the whole grammar. +- +- >>> res = Word('a').parse_string('aaaaabaaa', parse_all=True) +- Traceback (most recent call last): +- ... +- pyparsing.ParseException: Expected end of text, found 'b' (at char 5), (line:1, col:6) +- """ +- parseAll = parse_all or parseAll +- +- ParserElement.reset_cache() +- if not self.streamlined: +- self.streamline() +- for e in self.ignoreExprs: +- e.streamline() +- if not self.keepTabs: +- instring = instring.expandtabs() +- try: +- loc, tokens = self._parse(instring, 0) +- if parseAll: +- loc = self.preParse(instring, loc) +- se = Empty() + StringEnd() +- se._parse(instring, loc) +- except ParseBaseException as exc: +- if ParserElement.verbose_stacktrace: +- raise +- else: +- # catch and re-raise exception from here, clearing out pyparsing internal stack trace +- raise exc.with_traceback(None) +- else: +- return tokens +- +- def scan_string( +- self, +- instring: str, +- max_matches: int = _MAX_INT, +- overlap: bool = False, +- *, +- debug: bool = False, +- maxMatches: int = _MAX_INT, +- ) -> Generator[Tuple[ParseResults, int, int], None, None]: +- """ +- Scan the input string for expression matches. Each match will return the +- matching tokens, start location, and end location. May be called with optional +- ``max_matches`` argument, to clip scanning after 'n' matches are found. If +- ``overlap`` is specified, then overlapping matches will be reported. +- +- Note that the start and end locations are reported relative to the string +- being parsed. See :class:`parse_string` for more information on parsing +- strings with embedded tabs. +- +- Example:: +- +- source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" +- print(source) +- for tokens, start, end in Word(alphas).scan_string(source): +- print(' '*start + '^'*(end-start)) +- print(' '*start + tokens[0]) +- +- prints:: +- +- sldjf123lsdjjkf345sldkjf879lkjsfd987 +- ^^^^^ +- sldjf +- ^^^^^^^ +- lsdjjkf +- ^^^^^^ +- sldkjf +- ^^^^^^ +- lkjsfd +- """ +- maxMatches = min(maxMatches, max_matches) +- if not self.streamlined: +- self.streamline() +- for e in self.ignoreExprs: +- e.streamline() +- +- if not self.keepTabs: +- instring = str(instring).expandtabs() +- instrlen = len(instring) +- loc = 0 +- preparseFn = self.preParse +- parseFn = self._parse +- ParserElement.resetCache() +- matches = 0 +- try: +- while loc <= instrlen and matches < maxMatches: +- try: +- preloc = preparseFn(instring, loc) +- nextLoc, tokens = parseFn(instring, preloc, callPreParse=False) +- except ParseException: +- loc = preloc + 1 +- else: +- if nextLoc > loc: +- matches += 1 +- if debug: +- print( +- { +- "tokens": tokens.asList(), +- "start": preloc, +- "end": nextLoc, +- } +- ) +- yield tokens, preloc, nextLoc +- if overlap: +- nextloc = preparseFn(instring, loc) +- if nextloc > loc: +- loc = nextLoc +- else: +- loc += 1 +- else: +- loc = nextLoc +- else: +- loc = preloc + 1 +- except ParseBaseException as exc: +- if ParserElement.verbose_stacktrace: +- raise +- else: +- # catch and re-raise exception from here, clears out pyparsing internal stack trace +- raise exc.with_traceback(None) +- +- def transform_string(self, instring: str, *, debug: bool = False) -> str: +- """ +- Extension to :class:`scan_string`, to modify matching text with modified tokens that may +- be returned from a parse action. To use ``transform_string``, define a grammar and +- attach a parse action to it that modifies the returned token list. +- Invoking ``transform_string()`` on a target string will then scan for matches, +- and replace the matched text patterns according to the logic in the parse +- action. ``transform_string()`` returns the resulting transformed string. +- +- Example:: +- +- wd = Word(alphas) +- wd.set_parse_action(lambda toks: toks[0].title()) +- +- print(wd.transform_string("now is the winter of our discontent made glorious summer by this sun of york.")) +- +- prints:: +- +- Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. +- """ +- out: List[str] = [] +- lastE = 0 +- # force preservation of s, to minimize unwanted transformation of string, and to +- # keep string locs straight between transform_string and scan_string +- self.keepTabs = True +- try: +- for t, s, e in self.scan_string(instring, debug=debug): +- out.append(instring[lastE:s]) +- if t: +- if isinstance(t, ParseResults): +- out += t.as_list() +- elif isinstance(t, Iterable) and not isinstance(t, str_type): +- out.extend(t) +- else: +- out.append(t) +- lastE = e +- out.append(instring[lastE:]) +- out = [o for o in out if o] +- return "".join([str(s) for s in _flatten(out)]) +- except ParseBaseException as exc: +- if ParserElement.verbose_stacktrace: +- raise +- else: +- # catch and re-raise exception from here, clears out pyparsing internal stack trace +- raise exc.with_traceback(None) +- +- def search_string( +- self, +- instring: str, +- max_matches: int = _MAX_INT, +- *, +- debug: bool = False, +- maxMatches: int = _MAX_INT, +- ) -> ParseResults: +- """ +- Another extension to :class:`scan_string`, simplifying the access to the tokens found +- to match the given parse expression. May be called with optional +- ``max_matches`` argument, to clip searching after 'n' matches are found. +- +- Example:: +- +- # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters +- cap_word = Word(alphas.upper(), alphas.lower()) +- +- print(cap_word.search_string("More than Iron, more than Lead, more than Gold I need Electricity")) +- +- # the sum() builtin can be used to merge results into a single ParseResults object +- print(sum(cap_word.search_string("More than Iron, more than Lead, more than Gold I need Electricity"))) +- +- prints:: +- +- [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] +- ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] +- """ +- maxMatches = min(maxMatches, max_matches) +- try: +- return ParseResults( +- [t for t, s, e in self.scan_string(instring, maxMatches, debug=debug)] +- ) +- except ParseBaseException as exc: +- if ParserElement.verbose_stacktrace: +- raise +- else: +- # catch and re-raise exception from here, clears out pyparsing internal stack trace +- raise exc.with_traceback(None) +- +- def split( +- self, +- instring: str, +- maxsplit: int = _MAX_INT, +- include_separators: bool = False, +- *, +- includeSeparators=False, +- ) -> Generator[str, None, None]: +- """ +- Generator method to split a string using the given expression as a separator. +- May be called with optional ``maxsplit`` argument, to limit the number of splits; +- and the optional ``include_separators`` argument (default= ``False``), if the separating +- matching text should be included in the split results. +- +- Example:: +- +- punc = one_of(list(".,;:/-!?")) +- print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) +- +- prints:: +- +- ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] +- """ +- includeSeparators = includeSeparators or include_separators +- last = 0 +- for t, s, e in self.scan_string(instring, max_matches=maxsplit): +- yield instring[last:s] +- if includeSeparators: +- yield t[0] +- last = e +- yield instring[last:] +- +- def __add__(self, other) -> "ParserElement": +- """ +- Implementation of ``+`` operator - returns :class:`And`. Adding strings to a :class:`ParserElement` +- converts them to :class:`Literal`s by default. +- +- Example:: +- +- greet = Word(alphas) + "," + Word(alphas) + "!" +- hello = "Hello, World!" +- print(hello, "->", greet.parse_string(hello)) +- +- prints:: +- +- Hello, World! -> ['Hello', ',', 'World', '!'] +- +- ``...`` may be used as a parse expression as a short form of :class:`SkipTo`. +- +- Literal('start') + ... + Literal('end') +- +- is equivalent to: +- +- Literal('start') + SkipTo('end')("_skipped*") + Literal('end') +- +- Note that the skipped text is returned with '_skipped' as a results name, +- and to support having multiple skips in the same parser, the value returned is +- a list of all skipped text. +- """ +- if other is Ellipsis: +- return _PendingSkip(self) +- +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return And([self, other]) +- +- def __radd__(self, other) -> "ParserElement": +- """ +- Implementation of ``+`` operator when left operand is not a :class:`ParserElement` +- """ +- if other is Ellipsis: +- return SkipTo(self)("_skipped*") + self +- +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return other + self +- +- def __sub__(self, other) -> "ParserElement": +- """ +- Implementation of ``-`` operator, returns :class:`And` with error stop +- """ +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return self + And._ErrorStop() + other +- +- def __rsub__(self, other) -> "ParserElement": +- """ +- Implementation of ``-`` operator when left operand is not a :class:`ParserElement` +- """ +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return other - self +- +- def __mul__(self, other) -> "ParserElement": +- """ +- Implementation of ``*`` operator, allows use of ``expr * 3`` in place of +- ``expr + expr + expr``. Expressions may also be multiplied by a 2-integer +- tuple, similar to ``{min, max}`` multipliers in regular expressions. Tuples +- may also include ``None`` as in: +- - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent +- to ``expr*n + ZeroOrMore(expr)`` +- (read as "at least n instances of ``expr``") +- - ``expr*(None, n)`` is equivalent to ``expr*(0, n)`` +- (read as "0 to n instances of ``expr``") +- - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)`` +- - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)`` +- +- Note that ``expr*(None, n)`` does not raise an exception if +- more than n exprs exist in the input stream; that is, +- ``expr*(None, n)`` does not enforce a maximum number of expr +- occurrences. If this behavior is desired, then write +- ``expr*(None, n) + ~expr`` +- """ +- if other is Ellipsis: +- other = (0, None) +- elif isinstance(other, tuple) and other[:1] == (Ellipsis,): +- other = ((0,) + other[1:] + (None,))[:2] +- +- if isinstance(other, int): +- minElements, optElements = other, 0 +- elif isinstance(other, tuple): +- other = tuple(o if o is not Ellipsis else None for o in other) +- other = (other + (None, None))[:2] +- if other[0] is None: +- other = (0, other[1]) +- if isinstance(other[0], int) and other[1] is None: +- if other[0] == 0: +- return ZeroOrMore(self) +- if other[0] == 1: +- return OneOrMore(self) +- else: +- return self * other[0] + ZeroOrMore(self) +- elif isinstance(other[0], int) and isinstance(other[1], int): +- minElements, optElements = other +- optElements -= minElements +- else: +- raise TypeError( +- "cannot multiply ParserElement and ({}) objects".format( +- ",".join(type(item).__name__ for item in other) +- ) +- ) +- else: +- raise TypeError( +- "cannot multiply ParserElement and {} objects".format( +- type(other).__name__ +- ) +- ) +- +- if minElements < 0: +- raise ValueError("cannot multiply ParserElement by negative value") +- if optElements < 0: +- raise ValueError( +- "second tuple value must be greater or equal to first tuple value" +- ) +- if minElements == optElements == 0: +- return And([]) +- +- if optElements: +- +- def makeOptionalList(n): +- if n > 1: +- return Opt(self + makeOptionalList(n - 1)) +- else: +- return Opt(self) +- +- if minElements: +- if minElements == 1: +- ret = self + makeOptionalList(optElements) +- else: +- ret = And([self] * minElements) + makeOptionalList(optElements) +- else: +- ret = makeOptionalList(optElements) +- else: +- if minElements == 1: +- ret = self +- else: +- ret = And([self] * minElements) +- return ret +- +- def __rmul__(self, other) -> "ParserElement": +- return self.__mul__(other) +- +- def __or__(self, other) -> "ParserElement": +- """ +- Implementation of ``|`` operator - returns :class:`MatchFirst` +- """ +- if other is Ellipsis: +- return _PendingSkip(self, must_skip=True) +- +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return MatchFirst([self, other]) +- +- def __ror__(self, other) -> "ParserElement": +- """ +- Implementation of ``|`` operator when left operand is not a :class:`ParserElement` +- """ +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return other | self +- +- def __xor__(self, other) -> "ParserElement": +- """ +- Implementation of ``^`` operator - returns :class:`Or` +- """ +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return Or([self, other]) +- +- def __rxor__(self, other) -> "ParserElement": +- """ +- Implementation of ``^`` operator when left operand is not a :class:`ParserElement` +- """ +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return other ^ self +- +- def __and__(self, other) -> "ParserElement": +- """ +- Implementation of ``&`` operator - returns :class:`Each` +- """ +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return Each([self, other]) +- +- def __rand__(self, other) -> "ParserElement": +- """ +- Implementation of ``&`` operator when left operand is not a :class:`ParserElement` +- """ +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- if not isinstance(other, ParserElement): +- raise TypeError( +- "Cannot combine element of type {} with ParserElement".format( +- type(other).__name__ +- ) +- ) +- return other & self +- +- def __invert__(self) -> "ParserElement": +- """ +- Implementation of ``~`` operator - returns :class:`NotAny` +- """ +- return NotAny(self) +- +- # disable __iter__ to override legacy use of sequential access to __getitem__ to +- # iterate over a sequence +- __iter__ = None +- +- def __getitem__(self, key): +- """ +- use ``[]`` indexing notation as a short form for expression repetition: +- +- - ``expr[n]`` is equivalent to ``expr*n`` +- - ``expr[m, n]`` is equivalent to ``expr*(m, n)`` +- - ``expr[n, ...]`` or ``expr[n,]`` is equivalent +- to ``expr*n + ZeroOrMore(expr)`` +- (read as "at least n instances of ``expr``") +- - ``expr[..., n]`` is equivalent to ``expr*(0, n)`` +- (read as "0 to n instances of ``expr``") +- - ``expr[...]`` and ``expr[0, ...]`` are equivalent to ``ZeroOrMore(expr)`` +- - ``expr[1, ...]`` is equivalent to ``OneOrMore(expr)`` +- +- ``None`` may be used in place of ``...``. +- +- Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception +- if more than ``n`` ``expr``s exist in the input stream. If this behavior is +- desired, then write ``expr[..., n] + ~expr``. +- """ +- +- # convert single arg keys to tuples +- try: +- if isinstance(key, str_type): +- key = (key,) +- iter(key) +- except TypeError: +- key = (key, key) +- +- if len(key) > 2: +- raise TypeError( +- "only 1 or 2 index arguments supported ({}{})".format( +- key[:5], "... [{}]".format(len(key)) if len(key) > 5 else "" +- ) +- ) +- +- # clip to 2 elements +- ret = self * tuple(key[:2]) +- return ret +- +- def __call__(self, name: str = None) -> "ParserElement": +- """ +- Shortcut for :class:`set_results_name`, with ``list_all_matches=False``. +- +- If ``name`` is given with a trailing ``'*'`` character, then ``list_all_matches`` will be +- passed as ``True``. +- +- If ``name` is omitted, same as calling :class:`copy`. +- +- Example:: +- +- # these are equivalent +- userdata = Word(alphas).set_results_name("name") + Word(nums + "-").set_results_name("socsecno") +- userdata = Word(alphas)("name") + Word(nums + "-")("socsecno") +- """ +- if name is not None: +- return self._setResultsName(name) +- else: +- return self.copy() +- +- def suppress(self) -> "ParserElement": +- """ +- Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from +- cluttering up returned output. +- """ +- return Suppress(self) +- +- def ignore_whitespace(self, recursive: bool = True) -> "ParserElement": +- """ +- Enables the skipping of whitespace before matching the characters in the +- :class:`ParserElement`'s defined pattern. +- +- :param recursive: If ``True`` (the default), also enable whitespace skipping in child elements (if any) +- """ +- self.skipWhitespace = True +- return self +- +- def leave_whitespace(self, recursive: bool = True) -> "ParserElement": +- """ +- Disables the skipping of whitespace before matching the characters in the +- :class:`ParserElement`'s defined pattern. This is normally only used internally by +- the pyparsing module, but may be needed in some whitespace-sensitive grammars. +- +- :param recursive: If true (the default), also disable whitespace skipping in child elements (if any) +- """ +- self.skipWhitespace = False +- return self +- +- def set_whitespace_chars( +- self, chars: Union[Set[str], str], copy_defaults: bool = False +- ) -> "ParserElement": +- """ +- Overrides the default whitespace chars +- """ +- self.skipWhitespace = True +- self.whiteChars = set(chars) +- self.copyDefaultWhiteChars = copy_defaults +- return self +- +- def parse_with_tabs(self) -> "ParserElement": +- """ +- Overrides default behavior to expand ```` s to spaces before parsing the input string. +- Must be called before ``parse_string`` when the input grammar contains elements that +- match ```` characters. +- """ +- self.keepTabs = True +- return self +- +- def ignore(self, other: "ParserElement") -> "ParserElement": +- """ +- Define expression to be ignored (e.g., comments) while doing pattern +- matching; may be called repeatedly, to define multiple comment or other +- ignorable patterns. +- +- Example:: +- +- patt = Word(alphas)[1, ...] +- patt.parse_string('ablaj /* comment */ lskjd') +- # -> ['ablaj'] +- +- patt.ignore(c_style_comment) +- patt.parse_string('ablaj /* comment */ lskjd') +- # -> ['ablaj', 'lskjd'] +- """ +- import typing +- +- if isinstance(other, str_type): +- other = Suppress(other) +- +- if isinstance(other, Suppress): +- if other not in self.ignoreExprs: +- self.ignoreExprs.append(other) +- else: +- self.ignoreExprs.append(Suppress(other.copy())) +- return self +- +- def set_debug_actions( +- self, +- start_action: DebugStartAction, +- success_action: DebugSuccessAction, +- exception_action: DebugExceptionAction, +- ) -> "ParserElement": +- """ +- Customize display of debugging messages while doing pattern matching: +- +- - ``start_action`` - method to be called when an expression is about to be parsed; +- should have the signature ``fn(input_string: str, location: int, expression: ParserElement, cache_hit: bool)`` +- +- - ``success_action`` - method to be called when an expression has successfully parsed; +- should have the signature ``fn(input_string: str, start_location: int, end_location: int, expression: ParserELement, parsed_tokens: ParseResults, cache_hit: bool)`` +- +- - ``exception_action`` - method to be called when expression fails to parse; +- should have the signature ``fn(input_string: str, location: int, expression: ParserElement, exception: Exception, cache_hit: bool)`` +- """ +- self.debugActions = self.DebugActions( +- start_action or _default_start_debug_action, +- success_action or _default_success_debug_action, +- exception_action or _default_exception_debug_action, +- ) +- self.debug = True +- return self +- +- def set_debug(self, flag: bool = True) -> "ParserElement": +- """ +- Enable display of debugging messages while doing pattern matching. +- Set ``flag`` to ``True`` to enable, ``False`` to disable. +- +- Example:: +- +- wd = Word(alphas).set_name("alphaword") +- integer = Word(nums).set_name("numword") +- term = wd | integer +- +- # turn on debugging for wd +- wd.set_debug() +- +- term[1, ...].parse_string("abc 123 xyz 890") +- +- prints:: +- +- Match alphaword at loc 0(1,1) +- Matched alphaword -> ['abc'] +- Match alphaword at loc 3(1,4) +- Exception raised:Expected alphaword (at char 4), (line:1, col:5) +- Match alphaword at loc 7(1,8) +- Matched alphaword -> ['xyz'] +- Match alphaword at loc 11(1,12) +- Exception raised:Expected alphaword (at char 12), (line:1, col:13) +- Match alphaword at loc 15(1,16) +- Exception raised:Expected alphaword (at char 15), (line:1, col:16) +- +- The output shown is that produced by the default debug actions - custom debug actions can be +- specified using :class:`set_debug_actions`. Prior to attempting +- to match the ``wd`` expression, the debugging message ``"Match at loc (,)"`` +- is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"`` +- message is shown. Also note the use of :class:`set_name` to assign a human-readable name to the expression, +- which makes debugging and exception messages easier to understand - for instance, the default +- name created for the :class:`Word` expression without calling ``set_name`` is ``"W:(A-Za-z)"``. +- """ +- if flag: +- self.set_debug_actions( +- _default_start_debug_action, +- _default_success_debug_action, +- _default_exception_debug_action, +- ) +- else: +- self.debug = False +- return self +- +- @property +- def default_name(self) -> str: +- if self._defaultName is None: +- self._defaultName = self._generateDefaultName() +- return self._defaultName +- +- @abstractmethod +- def _generateDefaultName(self): +- """ +- Child classes must define this method, which defines how the ``default_name`` is set. +- """ +- +- def set_name(self, name: str) -> "ParserElement": +- """ +- Define name for this expression, makes debugging and exception messages clearer. +- Example:: +- Word(nums).parse_string("ABC") # -> Exception: Expected W:(0-9) (at char 0), (line:1, col:1) +- Word(nums).set_name("integer").parse_string("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) +- """ +- self.customName = name +- self.errmsg = "Expected " + self.name +- if __diag__.enable_debug_on_named_expressions: +- self.set_debug() +- return self +- +- @property +- def name(self) -> str: +- # This will use a user-defined name if available, but otherwise defaults back to the auto-generated name +- return self.customName if self.customName is not None else self.default_name +- +- def __str__(self) -> str: +- return self.name +- +- def __repr__(self) -> str: +- return str(self) +- +- def streamline(self) -> "ParserElement": +- self.streamlined = True +- self._defaultName = None +- return self +- +- def recurse(self) -> Sequence["ParserElement"]: +- return [] +- +- def _checkRecursion(self, parseElementList): +- subRecCheckList = parseElementList[:] + [self] +- for e in self.recurse(): +- e._checkRecursion(subRecCheckList) +- +- def validate(self, validateTrace=None) -> None: +- """ +- Check defined expressions for valid structure, check for infinite recursive definitions. +- """ +- self._checkRecursion([]) +- +- def parse_file( +- self, +- file_or_filename: Union[str, Path, TextIO], +- encoding: str = "utf-8", +- parse_all: bool = False, +- *, +- parseAll: bool = False, +- ) -> ParseResults: +- """ +- Execute the parse expression on the given file or filename. +- If a filename is specified (instead of a file object), +- the entire file is opened, read, and closed before parsing. +- """ +- parseAll = parseAll or parse_all +- try: +- file_contents = file_or_filename.read() +- except AttributeError: +- with open(file_or_filename, "r", encoding=encoding) as f: +- file_contents = f.read() +- try: +- return self.parse_string(file_contents, parseAll) +- except ParseBaseException as exc: +- if ParserElement.verbose_stacktrace: +- raise +- else: +- # catch and re-raise exception from here, clears out pyparsing internal stack trace +- raise exc.with_traceback(None) +- +- def __eq__(self, other): +- if self is other: +- return True +- elif isinstance(other, str_type): +- return self.matches(other, parse_all=True) +- elif isinstance(other, ParserElement): +- return vars(self) == vars(other) +- return False +- +- def __hash__(self): +- return id(self) +- +- def matches( +- self, test_string: str, parse_all: bool = True, *, parseAll: bool = True +- ) -> bool: +- """ +- Method for quick testing of a parser against a test string. Good for simple +- inline microtests of sub expressions while building up larger parser. +- +- Parameters: +- - ``test_string`` - to test against this expression for a match +- - ``parse_all`` - (default= ``True``) - flag to pass to :class:`parse_string` when running tests +- +- Example:: +- +- expr = Word(nums) +- assert expr.matches("100") +- """ +- parseAll = parseAll and parse_all +- try: +- self.parse_string(str(test_string), parse_all=parseAll) +- return True +- except ParseBaseException: +- return False +- +- def run_tests( +- self, +- tests: Union[str, List[str]], +- parse_all: bool = True, +- comment: typing.Optional[Union["ParserElement", str]] = "#", +- full_dump: bool = True, +- print_results: bool = True, +- failure_tests: bool = False, +- post_parse: Callable[[str, ParseResults], str] = None, +- file: typing.Optional[TextIO] = None, +- with_line_numbers: bool = False, +- *, +- parseAll: bool = True, +- fullDump: bool = True, +- printResults: bool = True, +- failureTests: bool = False, +- postParse: Callable[[str, ParseResults], str] = None, +- ) -> Tuple[bool, List[Tuple[str, Union[ParseResults, Exception]]]]: +- """ +- Execute the parse expression on a series of test strings, showing each +- test, the parsed results or where the parse failed. Quick and easy way to +- run a parse expression against a list of sample strings. +- +- Parameters: +- - ``tests`` - a list of separate test strings, or a multiline string of test strings +- - ``parse_all`` - (default= ``True``) - flag to pass to :class:`parse_string` when running tests +- - ``comment`` - (default= ``'#'``) - expression for indicating embedded comments in the test +- string; pass None to disable comment filtering +- - ``full_dump`` - (default= ``True``) - dump results as list followed by results names in nested outline; +- if False, only dump nested list +- - ``print_results`` - (default= ``True``) prints test output to stdout +- - ``failure_tests`` - (default= ``False``) indicates if these tests are expected to fail parsing +- - ``post_parse`` - (default= ``None``) optional callback for successful parse results; called as +- `fn(test_string, parse_results)` and returns a string to be added to the test output +- - ``file`` - (default= ``None``) optional file-like object to which test output will be written; +- if None, will default to ``sys.stdout`` +- - ``with_line_numbers`` - default= ``False``) show test strings with line and column numbers +- +- Returns: a (success, results) tuple, where success indicates that all tests succeeded +- (or failed if ``failure_tests`` is True), and the results contain a list of lines of each +- test's output +- +- Example:: +- +- number_expr = pyparsing_common.number.copy() +- +- result = number_expr.run_tests(''' +- # unsigned integer +- 100 +- # negative integer +- -100 +- # float with scientific notation +- 6.02e23 +- # integer with scientific notation +- 1e-12 +- ''') +- print("Success" if result[0] else "Failed!") +- +- result = number_expr.run_tests(''' +- # stray character +- 100Z +- # missing leading digit before '.' +- -.100 +- # too many '.' +- 3.14.159 +- ''', failure_tests=True) +- print("Success" if result[0] else "Failed!") +- +- prints:: +- +- # unsigned integer +- 100 +- [100] +- +- # negative integer +- -100 +- [-100] +- +- # float with scientific notation +- 6.02e23 +- [6.02e+23] +- +- # integer with scientific notation +- 1e-12 +- [1e-12] +- +- Success +- +- # stray character +- 100Z +- ^ +- FAIL: Expected end of text (at char 3), (line:1, col:4) +- +- # missing leading digit before '.' +- -.100 +- ^ +- FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) +- +- # too many '.' +- 3.14.159 +- ^ +- FAIL: Expected end of text (at char 4), (line:1, col:5) +- +- Success +- +- Each test string must be on a single line. If you want to test a string that spans multiple +- lines, create a test like this:: +- +- expr.run_tests(r"this is a test\\n of strings that spans \\n 3 lines") +- +- (Note that this is a raw string literal, you must include the leading ``'r'``.) +- """ +- from .testing import pyparsing_test +- +- parseAll = parseAll and parse_all +- fullDump = fullDump and full_dump +- printResults = printResults and print_results +- failureTests = failureTests or failure_tests +- postParse = postParse or post_parse +- if isinstance(tests, str_type): +- line_strip = type(tests).strip +- tests = [line_strip(test_line) for test_line in tests.rstrip().splitlines()] +- if isinstance(comment, str_type): +- comment = Literal(comment) +- if file is None: +- file = sys.stdout +- print_ = file.write +- +- result: Union[ParseResults, Exception] +- allResults = [] +- comments = [] +- success = True +- NL = Literal(r"\n").add_parse_action(replace_with("\n")).ignore(quoted_string) +- BOM = "\ufeff" +- for t in tests: +- if comment is not None and comment.matches(t, False) or comments and not t: +- comments.append( +- pyparsing_test.with_line_numbers(t) if with_line_numbers else t +- ) +- continue +- if not t: +- continue +- out = [ +- "\n" + "\n".join(comments) if comments else "", +- pyparsing_test.with_line_numbers(t) if with_line_numbers else t, +- ] +- comments = [] +- try: +- # convert newline marks to actual newlines, and strip leading BOM if present +- t = NL.transform_string(t.lstrip(BOM)) +- result = self.parse_string(t, parse_all=parseAll) +- except ParseBaseException as pe: +- fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" +- out.append(pe.explain()) +- out.append("FAIL: " + str(pe)) +- if ParserElement.verbose_stacktrace: +- out.extend(traceback.format_tb(pe.__traceback__)) +- success = success and failureTests +- result = pe +- except Exception as exc: +- out.append("FAIL-EXCEPTION: {}: {}".format(type(exc).__name__, exc)) +- if ParserElement.verbose_stacktrace: +- out.extend(traceback.format_tb(exc.__traceback__)) +- success = success and failureTests +- result = exc +- else: +- success = success and not failureTests +- if postParse is not None: +- try: +- pp_value = postParse(t, result) +- if pp_value is not None: +- if isinstance(pp_value, ParseResults): +- out.append(pp_value.dump()) +- else: +- out.append(str(pp_value)) +- else: +- out.append(result.dump()) +- except Exception as e: +- out.append(result.dump(full=fullDump)) +- out.append( +- "{} failed: {}: {}".format( +- postParse.__name__, type(e).__name__, e +- ) +- ) +- else: +- out.append(result.dump(full=fullDump)) +- out.append("") +- +- if printResults: +- print_("\n".join(out)) +- +- allResults.append((t, result)) +- +- return success, allResults +- +- def create_diagram( +- self, +- output_html: Union[TextIO, Path, str], +- vertical: int = 3, +- show_results_names: bool = False, +- show_groups: bool = False, +- **kwargs, +- ) -> None: +- """ +- Create a railroad diagram for the parser. +- +- Parameters: +- - output_html (str or file-like object) - output target for generated +- diagram HTML +- - vertical (int) - threshold for formatting multiple alternatives vertically +- instead of horizontally (default=3) +- - show_results_names - bool flag whether diagram should show annotations for +- defined results names +- - show_groups - bool flag whether groups should be highlighted with an unlabeled surrounding box +- Additional diagram-formatting keyword arguments can also be included; +- see railroad.Diagram class. +- """ +- +- try: +- from .diagram import to_railroad, railroad_to_html +- except ImportError as ie: +- raise Exception( +- "must ``pip install pyparsing[diagrams]`` to generate parser railroad diagrams" +- ) from ie +- +- self.streamline() +- +- railroad = to_railroad( +- self, +- vertical=vertical, +- show_results_names=show_results_names, +- show_groups=show_groups, +- diagram_kwargs=kwargs, +- ) +- if isinstance(output_html, (str, Path)): +- with open(output_html, "w", encoding="utf-8") as diag_file: +- diag_file.write(railroad_to_html(railroad)) +- else: +- # we were passed a file-like object, just write to it +- output_html.write(railroad_to_html(railroad)) +- +- setDefaultWhitespaceChars = set_default_whitespace_chars +- inlineLiteralsUsing = inline_literals_using +- setResultsName = set_results_name +- setBreak = set_break +- setParseAction = set_parse_action +- addParseAction = add_parse_action +- addCondition = add_condition +- setFailAction = set_fail_action +- tryParse = try_parse +- canParseNext = can_parse_next +- resetCache = reset_cache +- enableLeftRecursion = enable_left_recursion +- enablePackrat = enable_packrat +- parseString = parse_string +- scanString = scan_string +- searchString = search_string +- transformString = transform_string +- setWhitespaceChars = set_whitespace_chars +- parseWithTabs = parse_with_tabs +- setDebugActions = set_debug_actions +- setDebug = set_debug +- defaultName = default_name +- setName = set_name +- parseFile = parse_file +- runTests = run_tests +- ignoreWhitespace = ignore_whitespace +- leaveWhitespace = leave_whitespace +- +- +-class _PendingSkip(ParserElement): +- # internal placeholder class to hold a place were '...' is added to a parser element, +- # once another ParserElement is added, this placeholder will be replaced with a SkipTo +- def __init__(self, expr: ParserElement, must_skip: bool = False): +- super().__init__() +- self.anchor = expr +- self.must_skip = must_skip +- +- def _generateDefaultName(self): +- return str(self.anchor + Empty()).replace("Empty", "...") +- +- def __add__(self, other) -> "ParserElement": +- skipper = SkipTo(other).set_name("...")("_skipped*") +- if self.must_skip: +- +- def must_skip(t): +- if not t._skipped or t._skipped.as_list() == [""]: +- del t[0] +- t.pop("_skipped", None) +- +- def show_skip(t): +- if t._skipped.as_list()[-1:] == [""]: +- t.pop("_skipped") +- t["_skipped"] = "missing <" + repr(self.anchor) + ">" +- +- return ( +- self.anchor + skipper().add_parse_action(must_skip) +- | skipper().add_parse_action(show_skip) +- ) + other +- +- return self.anchor + skipper + other +- +- def __repr__(self): +- return self.defaultName +- +- def parseImpl(self, *args): +- raise Exception( +- "use of `...` expression without following SkipTo target expression" +- ) +- +- +-class Token(ParserElement): +- """Abstract :class:`ParserElement` subclass, for defining atomic +- matching patterns. +- """ +- +- def __init__(self): +- super().__init__(savelist=False) +- +- def _generateDefaultName(self): +- return type(self).__name__ +- +- +-class Empty(Token): +- """ +- An empty token, will always match. +- """ +- +- def __init__(self): +- super().__init__() +- self.mayReturnEmpty = True +- self.mayIndexError = False +- +- +-class NoMatch(Token): +- """ +- A token that will never match. +- """ +- +- def __init__(self): +- super().__init__() +- self.mayReturnEmpty = True +- self.mayIndexError = False +- self.errmsg = "Unmatchable token" +- +- def parseImpl(self, instring, loc, doActions=True): +- raise ParseException(instring, loc, self.errmsg, self) +- +- +-class Literal(Token): +- """ +- Token to exactly match a specified string. +- +- Example:: +- +- Literal('blah').parse_string('blah') # -> ['blah'] +- Literal('blah').parse_string('blahfooblah') # -> ['blah'] +- Literal('blah').parse_string('bla') # -> Exception: Expected "blah" +- +- For case-insensitive matching, use :class:`CaselessLiteral`. +- +- For keyword matching (force word break before and after the matched string), +- use :class:`Keyword` or :class:`CaselessKeyword`. +- """ +- +- def __init__(self, match_string: str = "", *, matchString: str = ""): +- super().__init__() +- match_string = matchString or match_string +- self.match = match_string +- self.matchLen = len(match_string) +- try: +- self.firstMatchChar = match_string[0] +- except IndexError: +- raise ValueError("null string passed to Literal; use Empty() instead") +- self.errmsg = "Expected " + self.name +- self.mayReturnEmpty = False +- self.mayIndexError = False +- +- # Performance tuning: modify __class__ to select +- # a parseImpl optimized for single-character check +- if self.matchLen == 1 and type(self) is Literal: +- self.__class__ = _SingleCharLiteral +- +- def _generateDefaultName(self): +- return repr(self.match) +- +- def parseImpl(self, instring, loc, doActions=True): +- if instring[loc] == self.firstMatchChar and instring.startswith( +- self.match, loc +- ): +- return loc + self.matchLen, self.match +- raise ParseException(instring, loc, self.errmsg, self) +- +- +-class _SingleCharLiteral(Literal): +- def parseImpl(self, instring, loc, doActions=True): +- if instring[loc] == self.firstMatchChar: +- return loc + 1, self.match +- raise ParseException(instring, loc, self.errmsg, self) +- +- +-ParserElement._literalStringClass = Literal +- +- +-class Keyword(Token): +- """ +- Token to exactly match a specified string as a keyword, that is, +- it must be immediately followed by a non-keyword character. Compare +- with :class:`Literal`: +- +- - ``Literal("if")`` will match the leading ``'if'`` in +- ``'ifAndOnlyIf'``. +- - ``Keyword("if")`` will not; it will only match the leading +- ``'if'`` in ``'if x=1'``, or ``'if(y==2)'`` +- +- Accepts two optional constructor arguments in addition to the +- keyword string: +- +- - ``identChars`` is a string of characters that would be valid +- identifier characters, defaulting to all alphanumerics + "_" and +- "$" +- - ``caseless`` allows case-insensitive matching, default is ``False``. +- +- Example:: +- +- Keyword("start").parse_string("start") # -> ['start'] +- Keyword("start").parse_string("starting") # -> Exception +- +- For case-insensitive matching, use :class:`CaselessKeyword`. +- """ +- +- DEFAULT_KEYWORD_CHARS = alphanums + "_$" +- +- def __init__( +- self, +- match_string: str = "", +- ident_chars: typing.Optional[str] = None, +- caseless: bool = False, +- *, +- matchString: str = "", +- identChars: typing.Optional[str] = None, +- ): +- super().__init__() +- identChars = identChars or ident_chars +- if identChars is None: +- identChars = Keyword.DEFAULT_KEYWORD_CHARS +- match_string = matchString or match_string +- self.match = match_string +- self.matchLen = len(match_string) +- try: +- self.firstMatchChar = match_string[0] +- except IndexError: +- raise ValueError("null string passed to Keyword; use Empty() instead") +- self.errmsg = "Expected {} {}".format(type(self).__name__, self.name) +- self.mayReturnEmpty = False +- self.mayIndexError = False +- self.caseless = caseless +- if caseless: +- self.caselessmatch = match_string.upper() +- identChars = identChars.upper() +- self.identChars = set(identChars) +- +- def _generateDefaultName(self): +- return repr(self.match) +- +- def parseImpl(self, instring, loc, doActions=True): +- errmsg = self.errmsg +- errloc = loc +- if self.caseless: +- if instring[loc : loc + self.matchLen].upper() == self.caselessmatch: +- if loc == 0 or instring[loc - 1].upper() not in self.identChars: +- if ( +- loc >= len(instring) - self.matchLen +- or instring[loc + self.matchLen].upper() not in self.identChars +- ): +- return loc + self.matchLen, self.match +- else: +- # followed by keyword char +- errmsg += ", was immediately followed by keyword character" +- errloc = loc + self.matchLen +- else: +- # preceded by keyword char +- errmsg += ", keyword was immediately preceded by keyword character" +- errloc = loc - 1 +- # else no match just raise plain exception +- +- else: +- if ( +- instring[loc] == self.firstMatchChar +- and self.matchLen == 1 +- or instring.startswith(self.match, loc) +- ): +- if loc == 0 or instring[loc - 1] not in self.identChars: +- if ( +- loc >= len(instring) - self.matchLen +- or instring[loc + self.matchLen] not in self.identChars +- ): +- return loc + self.matchLen, self.match +- else: +- # followed by keyword char +- errmsg += ( +- ", keyword was immediately followed by keyword character" +- ) +- errloc = loc + self.matchLen +- else: +- # preceded by keyword char +- errmsg += ", keyword was immediately preceded by keyword character" +- errloc = loc - 1 +- # else no match just raise plain exception +- +- raise ParseException(instring, errloc, errmsg, self) +- +- @staticmethod +- def set_default_keyword_chars(chars) -> None: +- """ +- Overrides the default characters used by :class:`Keyword` expressions. +- """ +- Keyword.DEFAULT_KEYWORD_CHARS = chars +- +- setDefaultKeywordChars = set_default_keyword_chars +- +- +-class CaselessLiteral(Literal): +- """ +- Token to match a specified string, ignoring case of letters. +- Note: the matched results will always be in the case of the given +- match string, NOT the case of the input text. +- +- Example:: +- +- CaselessLiteral("CMD")[1, ...].parse_string("cmd CMD Cmd10") +- # -> ['CMD', 'CMD', 'CMD'] +- +- (Contrast with example for :class:`CaselessKeyword`.) +- """ +- +- def __init__(self, match_string: str = "", *, matchString: str = ""): +- match_string = matchString or match_string +- super().__init__(match_string.upper()) +- # Preserve the defining literal. +- self.returnString = match_string +- self.errmsg = "Expected " + self.name +- +- def parseImpl(self, instring, loc, doActions=True): +- if instring[loc : loc + self.matchLen].upper() == self.match: +- return loc + self.matchLen, self.returnString +- raise ParseException(instring, loc, self.errmsg, self) +- +- +-class CaselessKeyword(Keyword): +- """ +- Caseless version of :class:`Keyword`. +- +- Example:: +- +- CaselessKeyword("CMD")[1, ...].parse_string("cmd CMD Cmd10") +- # -> ['CMD', 'CMD'] +- +- (Contrast with example for :class:`CaselessLiteral`.) +- """ +- +- def __init__( +- self, +- match_string: str = "", +- ident_chars: typing.Optional[str] = None, +- *, +- matchString: str = "", +- identChars: typing.Optional[str] = None, +- ): +- identChars = identChars or ident_chars +- match_string = matchString or match_string +- super().__init__(match_string, identChars, caseless=True) +- +- +-class CloseMatch(Token): +- """A variation on :class:`Literal` which matches "close" matches, +- that is, strings with at most 'n' mismatching characters. +- :class:`CloseMatch` takes parameters: +- +- - ``match_string`` - string to be matched +- - ``caseless`` - a boolean indicating whether to ignore casing when comparing characters +- - ``max_mismatches`` - (``default=1``) maximum number of +- mismatches allowed to count as a match +- +- The results from a successful parse will contain the matched text +- from the input string and the following named results: +- +- - ``mismatches`` - a list of the positions within the +- match_string where mismatches were found +- - ``original`` - the original match_string used to compare +- against the input string +- +- If ``mismatches`` is an empty list, then the match was an exact +- match. +- +- Example:: +- +- patt = CloseMatch("ATCATCGAATGGA") +- patt.parse_string("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) +- patt.parse_string("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) +- +- # exact match +- patt.parse_string("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) +- +- # close match allowing up to 2 mismatches +- patt = CloseMatch("ATCATCGAATGGA", max_mismatches=2) +- patt.parse_string("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) +- """ +- +- def __init__( +- self, +- match_string: str, +- max_mismatches: int = None, +- *, +- maxMismatches: int = 1, +- caseless=False, +- ): +- maxMismatches = max_mismatches if max_mismatches is not None else maxMismatches +- super().__init__() +- self.match_string = match_string +- self.maxMismatches = maxMismatches +- self.errmsg = "Expected {!r} (with up to {} mismatches)".format( +- self.match_string, self.maxMismatches +- ) +- self.caseless = caseless +- self.mayIndexError = False +- self.mayReturnEmpty = False +- +- def _generateDefaultName(self): +- return "{}:{!r}".format(type(self).__name__, self.match_string) +- +- def parseImpl(self, instring, loc, doActions=True): +- start = loc +- instrlen = len(instring) +- maxloc = start + len(self.match_string) +- +- if maxloc <= instrlen: +- match_string = self.match_string +- match_stringloc = 0 +- mismatches = [] +- maxMismatches = self.maxMismatches +- +- for match_stringloc, s_m in enumerate( +- zip(instring[loc:maxloc], match_string) +- ): +- src, mat = s_m +- if self.caseless: +- src, mat = src.lower(), mat.lower() +- +- if src != mat: +- mismatches.append(match_stringloc) +- if len(mismatches) > maxMismatches: +- break +- else: +- loc = start + match_stringloc + 1 +- results = ParseResults([instring[start:loc]]) +- results["original"] = match_string +- results["mismatches"] = mismatches +- return loc, results +- +- raise ParseException(instring, loc, self.errmsg, self) +- +- +-class Word(Token): +- """Token for matching words composed of allowed character sets. +- Parameters: +- - ``init_chars`` - string of all characters that should be used to +- match as a word; "ABC" will match "AAA", "ABAB", "CBAC", etc.; +- if ``body_chars`` is also specified, then this is the string of +- initial characters +- - ``body_chars`` - string of characters that +- can be used for matching after a matched initial character as +- given in ``init_chars``; if omitted, same as the initial characters +- (default=``None``) +- - ``min`` - minimum number of characters to match (default=1) +- - ``max`` - maximum number of characters to match (default=0) +- - ``exact`` - exact number of characters to match (default=0) +- - ``as_keyword`` - match as a keyword (default=``False``) +- - ``exclude_chars`` - characters that might be +- found in the input ``body_chars`` string but which should not be +- accepted for matching ;useful to define a word of all +- printables except for one or two characters, for instance +- (default=``None``) +- +- :class:`srange` is useful for defining custom character set strings +- for defining :class:`Word` expressions, using range notation from +- regular expression character sets. +- +- A common mistake is to use :class:`Word` to match a specific literal +- string, as in ``Word("Address")``. Remember that :class:`Word` +- uses the string argument to define *sets* of matchable characters. +- This expression would match "Add", "AAA", "dAred", or any other word +- made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an +- exact literal string, use :class:`Literal` or :class:`Keyword`. +- +- pyparsing includes helper strings for building Words: +- +- - :class:`alphas` +- - :class:`nums` +- - :class:`alphanums` +- - :class:`hexnums` +- - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255 +- - accented, tilded, umlauted, etc.) +- - :class:`punc8bit` (non-alphabetic characters in ASCII range +- 128-255 - currency, symbols, superscripts, diacriticals, etc.) +- - :class:`printables` (any non-whitespace character) +- +- ``alphas``, ``nums``, and ``printables`` are also defined in several +- Unicode sets - see :class:`pyparsing_unicode``. +- +- Example:: +- +- # a word composed of digits +- integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) +- +- # a word with a leading capital, and zero or more lowercase +- capital_word = Word(alphas.upper(), alphas.lower()) +- +- # hostnames are alphanumeric, with leading alpha, and '-' +- hostname = Word(alphas, alphanums + '-') +- +- # roman numeral (not a strict parser, accepts invalid mix of characters) +- roman = Word("IVXLCDM") +- +- # any string of non-whitespace characters, except for ',' +- csv_value = Word(printables, exclude_chars=",") +- """ +- +- def __init__( +- self, +- init_chars: str = "", +- body_chars: typing.Optional[str] = None, +- min: int = 1, +- max: int = 0, +- exact: int = 0, +- as_keyword: bool = False, +- exclude_chars: typing.Optional[str] = None, +- *, +- initChars: typing.Optional[str] = None, +- bodyChars: typing.Optional[str] = None, +- asKeyword: bool = False, +- excludeChars: typing.Optional[str] = None, +- ): +- initChars = initChars or init_chars +- bodyChars = bodyChars or body_chars +- asKeyword = asKeyword or as_keyword +- excludeChars = excludeChars or exclude_chars +- super().__init__() +- if not initChars: +- raise ValueError( +- "invalid {}, initChars cannot be empty string".format( +- type(self).__name__ +- ) +- ) +- +- initChars = set(initChars) +- self.initChars = initChars +- if excludeChars: +- excludeChars = set(excludeChars) +- initChars -= excludeChars +- if bodyChars: +- bodyChars = set(bodyChars) - excludeChars +- self.initCharsOrig = "".join(sorted(initChars)) +- +- if bodyChars: +- self.bodyCharsOrig = "".join(sorted(bodyChars)) +- self.bodyChars = set(bodyChars) +- else: +- self.bodyCharsOrig = "".join(sorted(initChars)) +- self.bodyChars = set(initChars) +- +- self.maxSpecified = max > 0 +- +- if min < 1: +- raise ValueError( +- "cannot specify a minimum length < 1; use Opt(Word()) if zero-length word is permitted" +- ) +- +- self.minLen = min +- +- if max > 0: +- self.maxLen = max +- else: +- self.maxLen = _MAX_INT +- +- if exact > 0: +- self.maxLen = exact +- self.minLen = exact +- +- self.errmsg = "Expected " + self.name +- self.mayIndexError = False +- self.asKeyword = asKeyword +- +- # see if we can make a regex for this Word +- if " " not in self.initChars | self.bodyChars and (min == 1 and exact == 0): +- if self.bodyChars == self.initChars: +- if max == 0: +- repeat = "+" +- elif max == 1: +- repeat = "" +- else: +- repeat = "{{{},{}}}".format( +- self.minLen, "" if self.maxLen == _MAX_INT else self.maxLen +- ) +- self.reString = "[{}]{}".format( +- _collapse_string_to_ranges(self.initChars), +- repeat, +- ) +- elif len(self.initChars) == 1: +- if max == 0: +- repeat = "*" +- else: +- repeat = "{{0,{}}}".format(max - 1) +- self.reString = "{}[{}]{}".format( +- re.escape(self.initCharsOrig), +- _collapse_string_to_ranges(self.bodyChars), +- repeat, +- ) +- else: +- if max == 0: +- repeat = "*" +- elif max == 2: +- repeat = "" +- else: +- repeat = "{{0,{}}}".format(max - 1) +- self.reString = "[{}][{}]{}".format( +- _collapse_string_to_ranges(self.initChars), +- _collapse_string_to_ranges(self.bodyChars), +- repeat, +- ) +- if self.asKeyword: +- self.reString = r"\b" + self.reString + r"\b" +- +- try: +- self.re = re.compile(self.reString) +- except re.error: +- self.re = None +- else: +- self.re_match = self.re.match +- self.__class__ = _WordRegex +- +- def _generateDefaultName(self): +- def charsAsStr(s): +- max_repr_len = 16 +- s = _collapse_string_to_ranges(s, re_escape=False) +- if len(s) > max_repr_len: +- return s[: max_repr_len - 3] + "..." +- else: +- return s +- +- if self.initChars != self.bodyChars: +- base = "W:({}, {})".format( +- charsAsStr(self.initChars), charsAsStr(self.bodyChars) +- ) +- else: +- base = "W:({})".format(charsAsStr(self.initChars)) +- +- # add length specification +- if self.minLen > 1 or self.maxLen != _MAX_INT: +- if self.minLen == self.maxLen: +- if self.minLen == 1: +- return base[2:] +- else: +- return base + "{{{}}}".format(self.minLen) +- elif self.maxLen == _MAX_INT: +- return base + "{{{},...}}".format(self.minLen) +- else: +- return base + "{{{},{}}}".format(self.minLen, self.maxLen) +- return base +- +- def parseImpl(self, instring, loc, doActions=True): +- if instring[loc] not in self.initChars: +- raise ParseException(instring, loc, self.errmsg, self) +- +- start = loc +- loc += 1 +- instrlen = len(instring) +- bodychars = self.bodyChars +- maxloc = start + self.maxLen +- maxloc = min(maxloc, instrlen) +- while loc < maxloc and instring[loc] in bodychars: +- loc += 1 +- +- throwException = False +- if loc - start < self.minLen: +- throwException = True +- elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars: +- throwException = True +- elif self.asKeyword: +- if ( +- start > 0 +- and instring[start - 1] in bodychars +- or loc < instrlen +- and instring[loc] in bodychars +- ): +- throwException = True +- +- if throwException: +- raise ParseException(instring, loc, self.errmsg, self) +- +- return loc, instring[start:loc] +- +- +-class _WordRegex(Word): +- def parseImpl(self, instring, loc, doActions=True): +- result = self.re_match(instring, loc) +- if not result: +- raise ParseException(instring, loc, self.errmsg, self) +- +- loc = result.end() +- return loc, result.group() +- +- +-class Char(_WordRegex): +- """A short-cut class for defining :class:`Word` ``(characters, exact=1)``, +- when defining a match of any single character in a string of +- characters. +- """ +- +- def __init__( +- self, +- charset: str, +- as_keyword: bool = False, +- exclude_chars: typing.Optional[str] = None, +- *, +- asKeyword: bool = False, +- excludeChars: typing.Optional[str] = None, +- ): +- asKeyword = asKeyword or as_keyword +- excludeChars = excludeChars or exclude_chars +- super().__init__( +- charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars +- ) +- self.reString = "[{}]".format(_collapse_string_to_ranges(self.initChars)) +- if asKeyword: +- self.reString = r"\b{}\b".format(self.reString) +- self.re = re.compile(self.reString) +- self.re_match = self.re.match +- +- +-class Regex(Token): +- r"""Token for matching strings that match a given regular +- expression. Defined with string specifying the regular expression in +- a form recognized by the stdlib Python `re module `_. +- If the given regex contains named groups (defined using ``(?P...)``), +- these will be preserved as named :class:`ParseResults`. +- +- If instead of the Python stdlib ``re`` module you wish to use a different RE module +- (such as the ``regex`` module), you can do so by building your ``Regex`` object with +- a compiled RE that was compiled using ``regex``. +- +- Example:: +- +- realnum = Regex(r"[+-]?\d+\.\d*") +- # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression +- roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") +- +- # named fields in a regex will be returned as named results +- date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') +- +- # the Regex class will accept re's compiled using the regex module +- import regex +- parser = pp.Regex(regex.compile(r'[0-9]')) +- """ +- +- def __init__( +- self, +- pattern: Any, +- flags: Union[re.RegexFlag, int] = 0, +- as_group_list: bool = False, +- as_match: bool = False, +- *, +- asGroupList: bool = False, +- asMatch: bool = False, +- ): +- """The parameters ``pattern`` and ``flags`` are passed +- to the ``re.compile()`` function as-is. See the Python +- `re module `_ module for an +- explanation of the acceptable patterns and flags. +- """ +- super().__init__() +- asGroupList = asGroupList or as_group_list +- asMatch = asMatch or as_match +- +- if isinstance(pattern, str_type): +- if not pattern: +- raise ValueError("null string passed to Regex; use Empty() instead") +- +- self._re = None +- self.reString = self.pattern = pattern +- self.flags = flags +- +- elif hasattr(pattern, "pattern") and hasattr(pattern, "match"): +- self._re = pattern +- self.pattern = self.reString = pattern.pattern +- self.flags = flags +- +- else: +- raise TypeError( +- "Regex may only be constructed with a string or a compiled RE object" +- ) +- +- self.errmsg = "Expected " + self.name +- self.mayIndexError = False +- self.asGroupList = asGroupList +- self.asMatch = asMatch +- if self.asGroupList: +- self.parseImpl = self.parseImplAsGroupList +- if self.asMatch: +- self.parseImpl = self.parseImplAsMatch +- +- @cached_property +- def re(self): +- if self._re: +- return self._re +- else: +- try: +- return re.compile(self.pattern, self.flags) +- except re.error: +- raise ValueError( +- "invalid pattern ({!r}) passed to Regex".format(self.pattern) +- ) +- +- @cached_property +- def re_match(self): +- return self.re.match +- +- @cached_property +- def mayReturnEmpty(self): +- return self.re_match("") is not None +- +- def _generateDefaultName(self): +- return "Re:({})".format(repr(self.pattern).replace("\\\\", "\\")) +- +- def parseImpl(self, instring, loc, doActions=True): +- result = self.re_match(instring, loc) +- if not result: +- raise ParseException(instring, loc, self.errmsg, self) +- +- loc = result.end() +- ret = ParseResults(result.group()) +- d = result.groupdict() +- if d: +- for k, v in d.items(): +- ret[k] = v +- return loc, ret +- +- def parseImplAsGroupList(self, instring, loc, doActions=True): +- result = self.re_match(instring, loc) +- if not result: +- raise ParseException(instring, loc, self.errmsg, self) +- +- loc = result.end() +- ret = result.groups() +- return loc, ret +- +- def parseImplAsMatch(self, instring, loc, doActions=True): +- result = self.re_match(instring, loc) +- if not result: +- raise ParseException(instring, loc, self.errmsg, self) +- +- loc = result.end() +- ret = result +- return loc, ret +- +- def sub(self, repl: str) -> ParserElement: +- r""" +- Return :class:`Regex` with an attached parse action to transform the parsed +- result as if called using `re.sub(expr, repl, string) `_. +- +- Example:: +- +- make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2") +- print(make_html.transform_string("h1:main title:")) +- # prints "

main title

" +- """ +- if self.asGroupList: +- raise TypeError("cannot use sub() with Regex(asGroupList=True)") +- +- if self.asMatch and callable(repl): +- raise TypeError("cannot use sub() with a callable with Regex(asMatch=True)") +- +- if self.asMatch: +- +- def pa(tokens): +- return tokens[0].expand(repl) +- +- else: +- +- def pa(tokens): +- return self.re.sub(repl, tokens[0]) +- +- return self.add_parse_action(pa) +- +- +-class QuotedString(Token): +- r""" +- Token for matching strings that are delimited by quoting characters. +- +- Defined with the following parameters: +- +- - ``quote_char`` - string of one or more characters defining the +- quote delimiting string +- - ``esc_char`` - character to re_escape quotes, typically backslash +- (default= ``None``) +- - ``esc_quote`` - special quote sequence to re_escape an embedded quote +- string (such as SQL's ``""`` to re_escape an embedded ``"``) +- (default= ``None``) +- - ``multiline`` - boolean indicating whether quotes can span +- multiple lines (default= ``False``) +- - ``unquote_results`` - boolean indicating whether the matched text +- should be unquoted (default= ``True``) +- - ``end_quote_char`` - string of one or more characters defining the +- end of the quote delimited string (default= ``None`` => same as +- quote_char) +- - ``convert_whitespace_escapes`` - convert escaped whitespace +- (``'\t'``, ``'\n'``, etc.) to actual whitespace +- (default= ``True``) +- +- Example:: +- +- qs = QuotedString('"') +- print(qs.search_string('lsjdf "This is the quote" sldjf')) +- complex_qs = QuotedString('{{', end_quote_char='}}') +- print(complex_qs.search_string('lsjdf {{This is the "quote"}} sldjf')) +- sql_qs = QuotedString('"', esc_quote='""') +- print(sql_qs.search_string('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) +- +- prints:: +- +- [['This is the quote']] +- [['This is the "quote"']] +- [['This is the quote with "embedded" quotes']] +- """ +- ws_map = ((r"\t", "\t"), (r"\n", "\n"), (r"\f", "\f"), (r"\r", "\r")) +- +- def __init__( +- self, +- quote_char: str = "", +- esc_char: typing.Optional[str] = None, +- esc_quote: typing.Optional[str] = None, +- multiline: bool = False, +- unquote_results: bool = True, +- end_quote_char: typing.Optional[str] = None, +- convert_whitespace_escapes: bool = True, +- *, +- quoteChar: str = "", +- escChar: typing.Optional[str] = None, +- escQuote: typing.Optional[str] = None, +- unquoteResults: bool = True, +- endQuoteChar: typing.Optional[str] = None, +- convertWhitespaceEscapes: bool = True, +- ): +- super().__init__() +- escChar = escChar or esc_char +- escQuote = escQuote or esc_quote +- unquoteResults = unquoteResults and unquote_results +- endQuoteChar = endQuoteChar or end_quote_char +- convertWhitespaceEscapes = ( +- convertWhitespaceEscapes and convert_whitespace_escapes +- ) +- quote_char = quoteChar or quote_char +- +- # remove white space from quote chars - wont work anyway +- quote_char = quote_char.strip() +- if not quote_char: +- raise ValueError("quote_char cannot be the empty string") +- +- if endQuoteChar is None: +- endQuoteChar = quote_char +- else: +- endQuoteChar = endQuoteChar.strip() +- if not endQuoteChar: +- raise ValueError("endQuoteChar cannot be the empty string") +- +- self.quoteChar = quote_char +- self.quoteCharLen = len(quote_char) +- self.firstQuoteChar = quote_char[0] +- self.endQuoteChar = endQuoteChar +- self.endQuoteCharLen = len(endQuoteChar) +- self.escChar = escChar +- self.escQuote = escQuote +- self.unquoteResults = unquoteResults +- self.convertWhitespaceEscapes = convertWhitespaceEscapes +- +- sep = "" +- inner_pattern = "" +- +- if escQuote: +- inner_pattern += r"{}(?:{})".format(sep, re.escape(escQuote)) +- sep = "|" +- +- if escChar: +- inner_pattern += r"{}(?:{}.)".format(sep, re.escape(escChar)) +- sep = "|" +- self.escCharReplacePattern = re.escape(self.escChar) + "(.)" +- +- if len(self.endQuoteChar) > 1: +- inner_pattern += ( +- "{}(?:".format(sep) +- + "|".join( +- "(?:{}(?!{}))".format( +- re.escape(self.endQuoteChar[:i]), +- re.escape(self.endQuoteChar[i:]), +- ) +- for i in range(len(self.endQuoteChar) - 1, 0, -1) +- ) +- + ")" +- ) +- sep = "|" +- +- if multiline: +- self.flags = re.MULTILINE | re.DOTALL +- inner_pattern += r"{}(?:[^{}{}])".format( +- sep, +- _escape_regex_range_chars(self.endQuoteChar[0]), +- (_escape_regex_range_chars(escChar) if escChar is not None else ""), +- ) +- else: +- self.flags = 0 +- inner_pattern += r"{}(?:[^{}\n\r{}])".format( +- sep, +- _escape_regex_range_chars(self.endQuoteChar[0]), +- (_escape_regex_range_chars(escChar) if escChar is not None else ""), +- ) +- +- self.pattern = "".join( +- [ +- re.escape(self.quoteChar), +- "(?:", +- inner_pattern, +- ")*", +- re.escape(self.endQuoteChar), +- ] +- ) +- +- try: +- self.re = re.compile(self.pattern, self.flags) +- self.reString = self.pattern +- self.re_match = self.re.match +- except re.error: +- raise ValueError( +- "invalid pattern {!r} passed to Regex".format(self.pattern) +- ) +- +- self.errmsg = "Expected " + self.name +- self.mayIndexError = False +- self.mayReturnEmpty = True +- +- def _generateDefaultName(self): +- if self.quoteChar == self.endQuoteChar and isinstance(self.quoteChar, str_type): +- return "string enclosed in {!r}".format(self.quoteChar) +- +- return "quoted string, starting with {} ending with {}".format( +- self.quoteChar, self.endQuoteChar +- ) +- +- def parseImpl(self, instring, loc, doActions=True): +- result = ( +- instring[loc] == self.firstQuoteChar +- and self.re_match(instring, loc) +- or None +- ) +- if not result: +- raise ParseException(instring, loc, self.errmsg, self) +- +- loc = result.end() +- ret = result.group() +- +- if self.unquoteResults: +- +- # strip off quotes +- ret = ret[self.quoteCharLen : -self.endQuoteCharLen] +- +- if isinstance(ret, str_type): +- # replace escaped whitespace +- if "\\" in ret and self.convertWhitespaceEscapes: +- for wslit, wschar in self.ws_map: +- ret = ret.replace(wslit, wschar) +- +- # replace escaped characters +- if self.escChar: +- ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) +- +- # replace escaped quotes +- if self.escQuote: +- ret = ret.replace(self.escQuote, self.endQuoteChar) +- +- return loc, ret +- +- +-class CharsNotIn(Token): +- """Token for matching words composed of characters *not* in a given +- set (will include whitespace in matched characters if not listed in +- the provided exclusion set - see example). Defined with string +- containing all disallowed characters, and an optional minimum, +- maximum, and/or exact length. The default value for ``min`` is +- 1 (a minimum value < 1 is not valid); the default values for +- ``max`` and ``exact`` are 0, meaning no maximum or exact +- length restriction. +- +- Example:: +- +- # define a comma-separated-value as anything that is not a ',' +- csv_value = CharsNotIn(',') +- print(delimited_list(csv_value).parse_string("dkls,lsdkjf,s12 34,@!#,213")) +- +- prints:: +- +- ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] +- """ +- +- def __init__( +- self, +- not_chars: str = "", +- min: int = 1, +- max: int = 0, +- exact: int = 0, +- *, +- notChars: str = "", +- ): +- super().__init__() +- self.skipWhitespace = False +- self.notChars = not_chars or notChars +- self.notCharsSet = set(self.notChars) +- +- if min < 1: +- raise ValueError( +- "cannot specify a minimum length < 1; use " +- "Opt(CharsNotIn()) if zero-length char group is permitted" +- ) +- +- self.minLen = min +- +- if max > 0: +- self.maxLen = max +- else: +- self.maxLen = _MAX_INT +- +- if exact > 0: +- self.maxLen = exact +- self.minLen = exact +- +- self.errmsg = "Expected " + self.name +- self.mayReturnEmpty = self.minLen == 0 +- self.mayIndexError = False +- +- def _generateDefaultName(self): +- not_chars_str = _collapse_string_to_ranges(self.notChars) +- if len(not_chars_str) > 16: +- return "!W:({}...)".format(self.notChars[: 16 - 3]) +- else: +- return "!W:({})".format(self.notChars) +- +- def parseImpl(self, instring, loc, doActions=True): +- notchars = self.notCharsSet +- if instring[loc] in notchars: +- raise ParseException(instring, loc, self.errmsg, self) +- +- start = loc +- loc += 1 +- maxlen = min(start + self.maxLen, len(instring)) +- while loc < maxlen and instring[loc] not in notchars: +- loc += 1 +- +- if loc - start < self.minLen: +- raise ParseException(instring, loc, self.errmsg, self) +- +- return loc, instring[start:loc] +- +- +-class White(Token): +- """Special matching class for matching whitespace. Normally, +- whitespace is ignored by pyparsing grammars. This class is included +- when some whitespace structures are significant. Define with +- a string containing the whitespace characters to be matched; default +- is ``" \\t\\r\\n"``. Also takes optional ``min``, +- ``max``, and ``exact`` arguments, as defined for the +- :class:`Word` class. +- """ +- +- whiteStrs = { +- " ": "", +- "\t": "", +- "\n": "", +- "\r": "", +- "\f": "", +- "\u00A0": "", +- "\u1680": "", +- "\u180E": "", +- "\u2000": "", +- "\u2001": "", +- "\u2002": "", +- "\u2003": "", +- "\u2004": "", +- "\u2005": "", +- "\u2006": "", +- "\u2007": "", +- "\u2008": "", +- "\u2009": "", +- "\u200A": "", +- "\u200B": "", +- "\u202F": "", +- "\u205F": "", +- "\u3000": "", +- } +- +- def __init__(self, ws: str = " \t\r\n", min: int = 1, max: int = 0, exact: int = 0): +- super().__init__() +- self.matchWhite = ws +- self.set_whitespace_chars( +- "".join(c for c in self.whiteStrs if c not in self.matchWhite), +- copy_defaults=True, +- ) +- # self.leave_whitespace() +- self.mayReturnEmpty = True +- self.errmsg = "Expected " + self.name +- +- self.minLen = min +- +- if max > 0: +- self.maxLen = max +- else: +- self.maxLen = _MAX_INT +- +- if exact > 0: +- self.maxLen = exact +- self.minLen = exact +- +- def _generateDefaultName(self): +- return "".join(White.whiteStrs[c] for c in self.matchWhite) +- +- def parseImpl(self, instring, loc, doActions=True): +- if instring[loc] not in self.matchWhite: +- raise ParseException(instring, loc, self.errmsg, self) +- start = loc +- loc += 1 +- maxloc = start + self.maxLen +- maxloc = min(maxloc, len(instring)) +- while loc < maxloc and instring[loc] in self.matchWhite: +- loc += 1 +- +- if loc - start < self.minLen: +- raise ParseException(instring, loc, self.errmsg, self) +- +- return loc, instring[start:loc] +- +- +-class PositionToken(Token): +- def __init__(self): +- super().__init__() +- self.mayReturnEmpty = True +- self.mayIndexError = False +- +- +-class GoToColumn(PositionToken): +- """Token to advance to a specific column of input text; useful for +- tabular report scraping. +- """ +- +- def __init__(self, colno: int): +- super().__init__() +- self.col = colno +- +- def preParse(self, instring, loc): +- if col(loc, instring) != self.col: +- instrlen = len(instring) +- if self.ignoreExprs: +- loc = self._skipIgnorables(instring, loc) +- while ( +- loc < instrlen +- and instring[loc].isspace() +- and col(loc, instring) != self.col +- ): +- loc += 1 +- return loc +- +- def parseImpl(self, instring, loc, doActions=True): +- thiscol = col(loc, instring) +- if thiscol > self.col: +- raise ParseException(instring, loc, "Text not in expected column", self) +- newloc = loc + self.col - thiscol +- ret = instring[loc:newloc] +- return newloc, ret +- +- +-class LineStart(PositionToken): +- r"""Matches if current position is at the beginning of a line within +- the parse string +- +- Example:: +- +- test = '''\ +- AAA this line +- AAA and this line +- AAA but not this one +- B AAA and definitely not this one +- ''' +- +- for t in (LineStart() + 'AAA' + restOfLine).search_string(test): +- print(t) +- +- prints:: +- +- ['AAA', ' this line'] +- ['AAA', ' and this line'] +- +- """ +- +- def __init__(self): +- super().__init__() +- self.leave_whitespace() +- self.orig_whiteChars = set() | self.whiteChars +- self.whiteChars.discard("\n") +- self.skipper = Empty().set_whitespace_chars(self.whiteChars) +- self.errmsg = "Expected start of line" +- +- def preParse(self, instring, loc): +- if loc == 0: +- return loc +- else: +- ret = self.skipper.preParse(instring, loc) +- if "\n" in self.orig_whiteChars: +- while instring[ret : ret + 1] == "\n": +- ret = self.skipper.preParse(instring, ret + 1) +- return ret +- +- def parseImpl(self, instring, loc, doActions=True): +- if col(loc, instring) == 1: +- return loc, [] +- raise ParseException(instring, loc, self.errmsg, self) +- +- +-class LineEnd(PositionToken): +- """Matches if current position is at the end of a line within the +- parse string +- """ +- +- def __init__(self): +- super().__init__() +- self.whiteChars.discard("\n") +- self.set_whitespace_chars(self.whiteChars, copy_defaults=False) +- self.errmsg = "Expected end of line" +- +- def parseImpl(self, instring, loc, doActions=True): +- if loc < len(instring): +- if instring[loc] == "\n": +- return loc + 1, "\n" +- else: +- raise ParseException(instring, loc, self.errmsg, self) +- elif loc == len(instring): +- return loc + 1, [] +- else: +- raise ParseException(instring, loc, self.errmsg, self) +- +- +-class StringStart(PositionToken): +- """Matches if current position is at the beginning of the parse +- string +- """ +- +- def __init__(self): +- super().__init__() +- self.errmsg = "Expected start of text" +- +- def parseImpl(self, instring, loc, doActions=True): +- if loc != 0: +- # see if entire string up to here is just whitespace and ignoreables +- if loc != self.preParse(instring, 0): +- raise ParseException(instring, loc, self.errmsg, self) +- return loc, [] +- +- +-class StringEnd(PositionToken): +- """ +- Matches if current position is at the end of the parse string +- """ +- +- def __init__(self): +- super().__init__() +- self.errmsg = "Expected end of text" +- +- def parseImpl(self, instring, loc, doActions=True): +- if loc < len(instring): +- raise ParseException(instring, loc, self.errmsg, self) +- elif loc == len(instring): +- return loc + 1, [] +- elif loc > len(instring): +- return loc, [] +- else: +- raise ParseException(instring, loc, self.errmsg, self) +- +- +-class WordStart(PositionToken): +- """Matches if the current position is at the beginning of a +- :class:`Word`, and is not preceded by any character in a given +- set of ``word_chars`` (default= ``printables``). To emulate the +- ``\b`` behavior of regular expressions, use +- ``WordStart(alphanums)``. ``WordStart`` will also match at +- the beginning of the string being parsed, or at the beginning of +- a line. +- """ +- +- def __init__(self, word_chars: str = printables, *, wordChars: str = printables): +- wordChars = word_chars if wordChars == printables else wordChars +- super().__init__() +- self.wordChars = set(wordChars) +- self.errmsg = "Not at the start of a word" +- +- def parseImpl(self, instring, loc, doActions=True): +- if loc != 0: +- if ( +- instring[loc - 1] in self.wordChars +- or instring[loc] not in self.wordChars +- ): +- raise ParseException(instring, loc, self.errmsg, self) +- return loc, [] +- +- +-class WordEnd(PositionToken): +- """Matches if the current position is at the end of a :class:`Word`, +- and is not followed by any character in a given set of ``word_chars`` +- (default= ``printables``). To emulate the ``\b`` behavior of +- regular expressions, use ``WordEnd(alphanums)``. ``WordEnd`` +- will also match at the end of the string being parsed, or at the end +- of a line. +- """ +- +- def __init__(self, word_chars: str = printables, *, wordChars: str = printables): +- wordChars = word_chars if wordChars == printables else wordChars +- super().__init__() +- self.wordChars = set(wordChars) +- self.skipWhitespace = False +- self.errmsg = "Not at the end of a word" +- +- def parseImpl(self, instring, loc, doActions=True): +- instrlen = len(instring) +- if instrlen > 0 and loc < instrlen: +- if ( +- instring[loc] in self.wordChars +- or instring[loc - 1] not in self.wordChars +- ): +- raise ParseException(instring, loc, self.errmsg, self) +- return loc, [] +- +- +-class ParseExpression(ParserElement): +- """Abstract subclass of ParserElement, for combining and +- post-processing parsed tokens. +- """ +- +- def __init__(self, exprs: typing.Iterable[ParserElement], savelist: bool = False): +- super().__init__(savelist) +- self.exprs: List[ParserElement] +- if isinstance(exprs, _generatorType): +- exprs = list(exprs) +- +- if isinstance(exprs, str_type): +- self.exprs = [self._literalStringClass(exprs)] +- elif isinstance(exprs, ParserElement): +- self.exprs = [exprs] +- elif isinstance(exprs, Iterable): +- exprs = list(exprs) +- # if sequence of strings provided, wrap with Literal +- if any(isinstance(expr, str_type) for expr in exprs): +- exprs = ( +- self._literalStringClass(e) if isinstance(e, str_type) else e +- for e in exprs +- ) +- self.exprs = list(exprs) +- else: +- try: +- self.exprs = list(exprs) +- except TypeError: +- self.exprs = [exprs] +- self.callPreparse = False +- +- def recurse(self) -> Sequence[ParserElement]: +- return self.exprs[:] +- +- def append(self, other) -> ParserElement: +- self.exprs.append(other) +- self._defaultName = None +- return self +- +- def leave_whitespace(self, recursive: bool = True) -> ParserElement: +- """ +- Extends ``leave_whitespace`` defined in base class, and also invokes ``leave_whitespace`` on +- all contained expressions. +- """ +- super().leave_whitespace(recursive) +- +- if recursive: +- self.exprs = [e.copy() for e in self.exprs] +- for e in self.exprs: +- e.leave_whitespace(recursive) +- return self +- +- def ignore_whitespace(self, recursive: bool = True) -> ParserElement: +- """ +- Extends ``ignore_whitespace`` defined in base class, and also invokes ``leave_whitespace`` on +- all contained expressions. +- """ +- super().ignore_whitespace(recursive) +- if recursive: +- self.exprs = [e.copy() for e in self.exprs] +- for e in self.exprs: +- e.ignore_whitespace(recursive) +- return self +- +- def ignore(self, other) -> ParserElement: +- if isinstance(other, Suppress): +- if other not in self.ignoreExprs: +- super().ignore(other) +- for e in self.exprs: +- e.ignore(self.ignoreExprs[-1]) +- else: +- super().ignore(other) +- for e in self.exprs: +- e.ignore(self.ignoreExprs[-1]) +- return self +- +- def _generateDefaultName(self): +- return "{}:({})".format(self.__class__.__name__, str(self.exprs)) +- +- def streamline(self) -> ParserElement: +- if self.streamlined: +- return self +- +- super().streamline() +- +- for e in self.exprs: +- e.streamline() +- +- # collapse nested :class:`And`'s of the form ``And(And(And(a, b), c), d)`` to ``And(a, b, c, d)`` +- # but only if there are no parse actions or resultsNames on the nested And's +- # (likewise for :class:`Or`'s and :class:`MatchFirst`'s) +- if len(self.exprs) == 2: +- other = self.exprs[0] +- if ( +- isinstance(other, self.__class__) +- and not other.parseAction +- and other.resultsName is None +- and not other.debug +- ): +- self.exprs = other.exprs[:] + [self.exprs[1]] +- self._defaultName = None +- self.mayReturnEmpty |= other.mayReturnEmpty +- self.mayIndexError |= other.mayIndexError +- +- other = self.exprs[-1] +- if ( +- isinstance(other, self.__class__) +- and not other.parseAction +- and other.resultsName is None +- and not other.debug +- ): +- self.exprs = self.exprs[:-1] + other.exprs[:] +- self._defaultName = None +- self.mayReturnEmpty |= other.mayReturnEmpty +- self.mayIndexError |= other.mayIndexError +- +- self.errmsg = "Expected " + str(self) +- +- return self +- +- def validate(self, validateTrace=None) -> None: +- tmp = (validateTrace if validateTrace is not None else [])[:] + [self] +- for e in self.exprs: +- e.validate(tmp) +- self._checkRecursion([]) +- +- def copy(self) -> ParserElement: +- ret = super().copy() +- ret.exprs = [e.copy() for e in self.exprs] +- return ret +- +- def _setResultsName(self, name, listAllMatches=False): +- if ( +- __diag__.warn_ungrouped_named_tokens_in_collection +- and Diagnostics.warn_ungrouped_named_tokens_in_collection +- not in self.suppress_warnings_ +- ): +- for e in self.exprs: +- if ( +- isinstance(e, ParserElement) +- and e.resultsName +- and Diagnostics.warn_ungrouped_named_tokens_in_collection +- not in e.suppress_warnings_ +- ): +- warnings.warn( +- "{}: setting results name {!r} on {} expression " +- "collides with {!r} on contained expression".format( +- "warn_ungrouped_named_tokens_in_collection", +- name, +- type(self).__name__, +- e.resultsName, +- ), +- stacklevel=3, +- ) +- +- return super()._setResultsName(name, listAllMatches) +- +- ignoreWhitespace = ignore_whitespace +- leaveWhitespace = leave_whitespace +- +- +-class And(ParseExpression): +- """ +- Requires all given :class:`ParseExpression` s to be found in the given order. +- Expressions may be separated by whitespace. +- May be constructed using the ``'+'`` operator. +- May also be constructed using the ``'-'`` operator, which will +- suppress backtracking. +- +- Example:: +- +- integer = Word(nums) +- name_expr = Word(alphas)[1, ...] +- +- expr = And([integer("id"), name_expr("name"), integer("age")]) +- # more easily written as: +- expr = integer("id") + name_expr("name") + integer("age") +- """ +- +- class _ErrorStop(Empty): +- def __init__(self, *args, **kwargs): +- super().__init__(*args, **kwargs) +- self.leave_whitespace() +- +- def _generateDefaultName(self): +- return "-" +- +- def __init__( +- self, exprs_arg: typing.Iterable[ParserElement], savelist: bool = True +- ): +- exprs: List[ParserElement] = list(exprs_arg) +- if exprs and Ellipsis in exprs: +- tmp = [] +- for i, expr in enumerate(exprs): +- if expr is Ellipsis: +- if i < len(exprs) - 1: +- skipto_arg: ParserElement = (Empty() + exprs[i + 1]).exprs[-1] +- tmp.append(SkipTo(skipto_arg)("_skipped*")) +- else: +- raise Exception( +- "cannot construct And with sequence ending in ..." +- ) +- else: +- tmp.append(expr) +- exprs[:] = tmp +- super().__init__(exprs, savelist) +- if self.exprs: +- self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) +- if not isinstance(self.exprs[0], White): +- self.set_whitespace_chars( +- self.exprs[0].whiteChars, +- copy_defaults=self.exprs[0].copyDefaultWhiteChars, +- ) +- self.skipWhitespace = self.exprs[0].skipWhitespace +- else: +- self.skipWhitespace = False +- else: +- self.mayReturnEmpty = True +- self.callPreparse = True +- +- def streamline(self) -> ParserElement: +- # collapse any _PendingSkip's +- if self.exprs: +- if any( +- isinstance(e, ParseExpression) +- and e.exprs +- and isinstance(e.exprs[-1], _PendingSkip) +- for e in self.exprs[:-1] +- ): +- for i, e in enumerate(self.exprs[:-1]): +- if e is None: +- continue +- if ( +- isinstance(e, ParseExpression) +- and e.exprs +- and isinstance(e.exprs[-1], _PendingSkip) +- ): +- e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] +- self.exprs[i + 1] = None +- self.exprs = [e for e in self.exprs if e is not None] +- +- super().streamline() +- +- # link any IndentedBlocks to the prior expression +- for prev, cur in zip(self.exprs, self.exprs[1:]): +- # traverse cur or any first embedded expr of cur looking for an IndentedBlock +- # (but watch out for recursive grammar) +- seen = set() +- while cur: +- if id(cur) in seen: +- break +- seen.add(id(cur)) +- if isinstance(cur, IndentedBlock): +- prev.add_parse_action( +- lambda s, l, t, cur_=cur: setattr( +- cur_, "parent_anchor", col(l, s) +- ) +- ) +- break +- subs = cur.recurse() +- cur = next(iter(subs), None) +- +- self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) +- return self +- +- def parseImpl(self, instring, loc, doActions=True): +- # pass False as callPreParse arg to _parse for first element, since we already +- # pre-parsed the string as part of our And pre-parsing +- loc, resultlist = self.exprs[0]._parse( +- instring, loc, doActions, callPreParse=False +- ) +- errorStop = False +- for e in self.exprs[1:]: +- # if isinstance(e, And._ErrorStop): +- if type(e) is And._ErrorStop: +- errorStop = True +- continue +- if errorStop: +- try: +- loc, exprtokens = e._parse(instring, loc, doActions) +- except ParseSyntaxException: +- raise +- except ParseBaseException as pe: +- pe.__traceback__ = None +- raise ParseSyntaxException._from_exception(pe) +- except IndexError: +- raise ParseSyntaxException( +- instring, len(instring), self.errmsg, self +- ) +- else: +- loc, exprtokens = e._parse(instring, loc, doActions) +- if exprtokens or exprtokens.haskeys(): +- resultlist += exprtokens +- return loc, resultlist +- +- def __iadd__(self, other): +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- return self.append(other) # And([self, other]) +- +- def _checkRecursion(self, parseElementList): +- subRecCheckList = parseElementList[:] + [self] +- for e in self.exprs: +- e._checkRecursion(subRecCheckList) +- if not e.mayReturnEmpty: +- break +- +- def _generateDefaultName(self): +- inner = " ".join(str(e) for e in self.exprs) +- # strip off redundant inner {}'s +- while len(inner) > 1 and inner[0 :: len(inner) - 1] == "{}": +- inner = inner[1:-1] +- return "{" + inner + "}" +- +- +-class Or(ParseExpression): +- """Requires that at least one :class:`ParseExpression` is found. If +- two expressions match, the expression that matches the longest +- string will be used. May be constructed using the ``'^'`` +- operator. +- +- Example:: +- +- # construct Or using '^' operator +- +- number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) +- print(number.search_string("123 3.1416 789")) +- +- prints:: +- +- [['123'], ['3.1416'], ['789']] +- """ +- +- def __init__(self, exprs: typing.Iterable[ParserElement], savelist: bool = False): +- super().__init__(exprs, savelist) +- if self.exprs: +- self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) +- self.skipWhitespace = all(e.skipWhitespace for e in self.exprs) +- else: +- self.mayReturnEmpty = True +- +- def streamline(self) -> ParserElement: +- super().streamline() +- if self.exprs: +- self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) +- self.saveAsList = any(e.saveAsList for e in self.exprs) +- self.skipWhitespace = all( +- e.skipWhitespace and not isinstance(e, White) for e in self.exprs +- ) +- else: +- self.saveAsList = False +- return self +- +- def parseImpl(self, instring, loc, doActions=True): +- maxExcLoc = -1 +- maxException = None +- matches = [] +- fatals = [] +- if all(e.callPreparse for e in self.exprs): +- loc = self.preParse(instring, loc) +- for e in self.exprs: +- try: +- loc2 = e.try_parse(instring, loc, raise_fatal=True) +- except ParseFatalException as pfe: +- pfe.__traceback__ = None +- pfe.parserElement = e +- fatals.append(pfe) +- maxException = None +- maxExcLoc = -1 +- except ParseException as err: +- if not fatals: +- err.__traceback__ = None +- if err.loc > maxExcLoc: +- maxException = err +- maxExcLoc = err.loc +- except IndexError: +- if len(instring) > maxExcLoc: +- maxException = ParseException( +- instring, len(instring), e.errmsg, self +- ) +- maxExcLoc = len(instring) +- else: +- # save match among all matches, to retry longest to shortest +- matches.append((loc2, e)) +- +- if matches: +- # re-evaluate all matches in descending order of length of match, in case attached actions +- # might change whether or how much they match of the input. +- matches.sort(key=itemgetter(0), reverse=True) +- +- if not doActions: +- # no further conditions or parse actions to change the selection of +- # alternative, so the first match will be the best match +- best_expr = matches[0][1] +- return best_expr._parse(instring, loc, doActions) +- +- longest = -1, None +- for loc1, expr1 in matches: +- if loc1 <= longest[0]: +- # already have a longer match than this one will deliver, we are done +- return longest +- +- try: +- loc2, toks = expr1._parse(instring, loc, doActions) +- except ParseException as err: +- err.__traceback__ = None +- if err.loc > maxExcLoc: +- maxException = err +- maxExcLoc = err.loc +- else: +- if loc2 >= loc1: +- return loc2, toks +- # didn't match as much as before +- elif loc2 > longest[0]: +- longest = loc2, toks +- +- if longest != (-1, None): +- return longest +- +- if fatals: +- if len(fatals) > 1: +- fatals.sort(key=lambda e: -e.loc) +- if fatals[0].loc == fatals[1].loc: +- fatals.sort(key=lambda e: (-e.loc, -len(str(e.parserElement)))) +- max_fatal = fatals[0] +- raise max_fatal +- +- if maxException is not None: +- maxException.msg = self.errmsg +- raise maxException +- else: +- raise ParseException( +- instring, loc, "no defined alternatives to match", self +- ) +- +- def __ixor__(self, other): +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- return self.append(other) # Or([self, other]) +- +- def _generateDefaultName(self): +- return "{" + " ^ ".join(str(e) for e in self.exprs) + "}" +- +- def _setResultsName(self, name, listAllMatches=False): +- if ( +- __diag__.warn_multiple_tokens_in_named_alternation +- and Diagnostics.warn_multiple_tokens_in_named_alternation +- not in self.suppress_warnings_ +- ): +- if any( +- isinstance(e, And) +- and Diagnostics.warn_multiple_tokens_in_named_alternation +- not in e.suppress_warnings_ +- for e in self.exprs +- ): +- warnings.warn( +- "{}: setting results name {!r} on {} expression " +- "will return a list of all parsed tokens in an And alternative, " +- "in prior versions only the first token was returned; enclose " +- "contained argument in Group".format( +- "warn_multiple_tokens_in_named_alternation", +- name, +- type(self).__name__, +- ), +- stacklevel=3, +- ) +- +- return super()._setResultsName(name, listAllMatches) +- +- +-class MatchFirst(ParseExpression): +- """Requires that at least one :class:`ParseExpression` is found. If +- more than one expression matches, the first one listed is the one that will +- match. May be constructed using the ``'|'`` operator. +- +- Example:: +- +- # construct MatchFirst using '|' operator +- +- # watch the order of expressions to match +- number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) +- print(number.search_string("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] +- +- # put more selective expression first +- number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) +- print(number.search_string("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] +- """ +- +- def __init__(self, exprs: typing.Iterable[ParserElement], savelist: bool = False): +- super().__init__(exprs, savelist) +- if self.exprs: +- self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) +- self.skipWhitespace = all(e.skipWhitespace for e in self.exprs) +- else: +- self.mayReturnEmpty = True +- +- def streamline(self) -> ParserElement: +- if self.streamlined: +- return self +- +- super().streamline() +- if self.exprs: +- self.saveAsList = any(e.saveAsList for e in self.exprs) +- self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) +- self.skipWhitespace = all( +- e.skipWhitespace and not isinstance(e, White) for e in self.exprs +- ) +- else: +- self.saveAsList = False +- self.mayReturnEmpty = True +- return self +- +- def parseImpl(self, instring, loc, doActions=True): +- maxExcLoc = -1 +- maxException = None +- +- for e in self.exprs: +- try: +- return e._parse( +- instring, +- loc, +- doActions, +- ) +- except ParseFatalException as pfe: +- pfe.__traceback__ = None +- pfe.parserElement = e +- raise +- except ParseException as err: +- if err.loc > maxExcLoc: +- maxException = err +- maxExcLoc = err.loc +- except IndexError: +- if len(instring) > maxExcLoc: +- maxException = ParseException( +- instring, len(instring), e.errmsg, self +- ) +- maxExcLoc = len(instring) +- +- if maxException is not None: +- maxException.msg = self.errmsg +- raise maxException +- else: +- raise ParseException( +- instring, loc, "no defined alternatives to match", self +- ) +- +- def __ior__(self, other): +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- return self.append(other) # MatchFirst([self, other]) +- +- def _generateDefaultName(self): +- return "{" + " | ".join(str(e) for e in self.exprs) + "}" +- +- def _setResultsName(self, name, listAllMatches=False): +- if ( +- __diag__.warn_multiple_tokens_in_named_alternation +- and Diagnostics.warn_multiple_tokens_in_named_alternation +- not in self.suppress_warnings_ +- ): +- if any( +- isinstance(e, And) +- and Diagnostics.warn_multiple_tokens_in_named_alternation +- not in e.suppress_warnings_ +- for e in self.exprs +- ): +- warnings.warn( +- "{}: setting results name {!r} on {} expression " +- "will return a list of all parsed tokens in an And alternative, " +- "in prior versions only the first token was returned; enclose " +- "contained argument in Group".format( +- "warn_multiple_tokens_in_named_alternation", +- name, +- type(self).__name__, +- ), +- stacklevel=3, +- ) +- +- return super()._setResultsName(name, listAllMatches) +- +- +-class Each(ParseExpression): +- """Requires all given :class:`ParseExpression` s to be found, but in +- any order. Expressions may be separated by whitespace. +- +- May be constructed using the ``'&'`` operator. +- +- Example:: +- +- color = one_of("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") +- shape_type = one_of("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") +- integer = Word(nums) +- shape_attr = "shape:" + shape_type("shape") +- posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") +- color_attr = "color:" + color("color") +- size_attr = "size:" + integer("size") +- +- # use Each (using operator '&') to accept attributes in any order +- # (shape and posn are required, color and size are optional) +- shape_spec = shape_attr & posn_attr & Opt(color_attr) & Opt(size_attr) +- +- shape_spec.run_tests(''' +- shape: SQUARE color: BLACK posn: 100, 120 +- shape: CIRCLE size: 50 color: BLUE posn: 50,80 +- color:GREEN size:20 shape:TRIANGLE posn:20,40 +- ''' +- ) +- +- prints:: +- +- shape: SQUARE color: BLACK posn: 100, 120 +- ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] +- - color: BLACK +- - posn: ['100', ',', '120'] +- - x: 100 +- - y: 120 +- - shape: SQUARE +- +- +- shape: CIRCLE size: 50 color: BLUE posn: 50,80 +- ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] +- - color: BLUE +- - posn: ['50', ',', '80'] +- - x: 50 +- - y: 80 +- - shape: CIRCLE +- - size: 50 +- +- +- color: GREEN size: 20 shape: TRIANGLE posn: 20,40 +- ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] +- - color: GREEN +- - posn: ['20', ',', '40'] +- - x: 20 +- - y: 40 +- - shape: TRIANGLE +- - size: 20 +- """ +- +- def __init__(self, exprs: typing.Iterable[ParserElement], savelist: bool = True): +- super().__init__(exprs, savelist) +- if self.exprs: +- self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) +- else: +- self.mayReturnEmpty = True +- self.skipWhitespace = True +- self.initExprGroups = True +- self.saveAsList = True +- +- def streamline(self) -> ParserElement: +- super().streamline() +- if self.exprs: +- self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) +- else: +- self.mayReturnEmpty = True +- return self +- +- def parseImpl(self, instring, loc, doActions=True): +- if self.initExprGroups: +- self.opt1map = dict( +- (id(e.expr), e) for e in self.exprs if isinstance(e, Opt) +- ) +- opt1 = [e.expr for e in self.exprs if isinstance(e, Opt)] +- opt2 = [ +- e +- for e in self.exprs +- if e.mayReturnEmpty and not isinstance(e, (Opt, Regex, ZeroOrMore)) +- ] +- self.optionals = opt1 + opt2 +- self.multioptionals = [ +- e.expr.set_results_name(e.resultsName, list_all_matches=True) +- for e in self.exprs +- if isinstance(e, _MultipleMatch) +- ] +- self.multirequired = [ +- e.expr.set_results_name(e.resultsName, list_all_matches=True) +- for e in self.exprs +- if isinstance(e, OneOrMore) +- ] +- self.required = [ +- e for e in self.exprs if not isinstance(e, (Opt, ZeroOrMore, OneOrMore)) +- ] +- self.required += self.multirequired +- self.initExprGroups = False +- +- tmpLoc = loc +- tmpReqd = self.required[:] +- tmpOpt = self.optionals[:] +- multis = self.multioptionals[:] +- matchOrder = [] +- +- keepMatching = True +- failed = [] +- fatals = [] +- while keepMatching: +- tmpExprs = tmpReqd + tmpOpt + multis +- failed.clear() +- fatals.clear() +- for e in tmpExprs: +- try: +- tmpLoc = e.try_parse(instring, tmpLoc, raise_fatal=True) +- except ParseFatalException as pfe: +- pfe.__traceback__ = None +- pfe.parserElement = e +- fatals.append(pfe) +- failed.append(e) +- except ParseException: +- failed.append(e) +- else: +- matchOrder.append(self.opt1map.get(id(e), e)) +- if e in tmpReqd: +- tmpReqd.remove(e) +- elif e in tmpOpt: +- tmpOpt.remove(e) +- if len(failed) == len(tmpExprs): +- keepMatching = False +- +- # look for any ParseFatalExceptions +- if fatals: +- if len(fatals) > 1: +- fatals.sort(key=lambda e: -e.loc) +- if fatals[0].loc == fatals[1].loc: +- fatals.sort(key=lambda e: (-e.loc, -len(str(e.parserElement)))) +- max_fatal = fatals[0] +- raise max_fatal +- +- if tmpReqd: +- missing = ", ".join([str(e) for e in tmpReqd]) +- raise ParseException( +- instring, +- loc, +- "Missing one or more required elements ({})".format(missing), +- ) +- +- # add any unmatched Opts, in case they have default values defined +- matchOrder += [e for e in self.exprs if isinstance(e, Opt) and e.expr in tmpOpt] +- +- total_results = ParseResults([]) +- for e in matchOrder: +- loc, results = e._parse(instring, loc, doActions) +- total_results += results +- +- return loc, total_results +- +- def _generateDefaultName(self): +- return "{" + " & ".join(str(e) for e in self.exprs) + "}" +- +- +-class ParseElementEnhance(ParserElement): +- """Abstract subclass of :class:`ParserElement`, for combining and +- post-processing parsed tokens. +- """ +- +- def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): +- super().__init__(savelist) +- if isinstance(expr, str_type): +- if issubclass(self._literalStringClass, Token): +- expr = self._literalStringClass(expr) +- elif issubclass(type(self), self._literalStringClass): +- expr = Literal(expr) +- else: +- expr = self._literalStringClass(Literal(expr)) +- self.expr = expr +- if expr is not None: +- self.mayIndexError = expr.mayIndexError +- self.mayReturnEmpty = expr.mayReturnEmpty +- self.set_whitespace_chars( +- expr.whiteChars, copy_defaults=expr.copyDefaultWhiteChars +- ) +- self.skipWhitespace = expr.skipWhitespace +- self.saveAsList = expr.saveAsList +- self.callPreparse = expr.callPreparse +- self.ignoreExprs.extend(expr.ignoreExprs) +- +- def recurse(self) -> Sequence[ParserElement]: +- return [self.expr] if self.expr is not None else [] +- +- def parseImpl(self, instring, loc, doActions=True): +- if self.expr is not None: +- return self.expr._parse(instring, loc, doActions, callPreParse=False) +- else: +- raise ParseException(instring, loc, "No expression defined", self) +- +- def leave_whitespace(self, recursive: bool = True) -> ParserElement: +- super().leave_whitespace(recursive) +- +- if recursive: +- self.expr = self.expr.copy() +- if self.expr is not None: +- self.expr.leave_whitespace(recursive) +- return self +- +- def ignore_whitespace(self, recursive: bool = True) -> ParserElement: +- super().ignore_whitespace(recursive) +- +- if recursive: +- self.expr = self.expr.copy() +- if self.expr is not None: +- self.expr.ignore_whitespace(recursive) +- return self +- +- def ignore(self, other) -> ParserElement: +- if isinstance(other, Suppress): +- if other not in self.ignoreExprs: +- super().ignore(other) +- if self.expr is not None: +- self.expr.ignore(self.ignoreExprs[-1]) +- else: +- super().ignore(other) +- if self.expr is not None: +- self.expr.ignore(self.ignoreExprs[-1]) +- return self +- +- def streamline(self) -> ParserElement: +- super().streamline() +- if self.expr is not None: +- self.expr.streamline() +- return self +- +- def _checkRecursion(self, parseElementList): +- if self in parseElementList: +- raise RecursiveGrammarException(parseElementList + [self]) +- subRecCheckList = parseElementList[:] + [self] +- if self.expr is not None: +- self.expr._checkRecursion(subRecCheckList) +- +- def validate(self, validateTrace=None) -> None: +- if validateTrace is None: +- validateTrace = [] +- tmp = validateTrace[:] + [self] +- if self.expr is not None: +- self.expr.validate(tmp) +- self._checkRecursion([]) +- +- def _generateDefaultName(self): +- return "{}:({})".format(self.__class__.__name__, str(self.expr)) +- +- ignoreWhitespace = ignore_whitespace +- leaveWhitespace = leave_whitespace +- +- +-class IndentedBlock(ParseElementEnhance): +- """ +- Expression to match one or more expressions at a given indentation level. +- Useful for parsing text where structure is implied by indentation (like Python source code). +- """ +- +- class _Indent(Empty): +- def __init__(self, ref_col: int): +- super().__init__() +- self.errmsg = "expected indent at column {}".format(ref_col) +- self.add_condition(lambda s, l, t: col(l, s) == ref_col) +- +- class _IndentGreater(Empty): +- def __init__(self, ref_col: int): +- super().__init__() +- self.errmsg = "expected indent at column greater than {}".format(ref_col) +- self.add_condition(lambda s, l, t: col(l, s) > ref_col) +- +- def __init__( +- self, expr: ParserElement, *, recursive: bool = False, grouped: bool = True +- ): +- super().__init__(expr, savelist=True) +- # if recursive: +- # raise NotImplementedError("IndentedBlock with recursive is not implemented") +- self._recursive = recursive +- self._grouped = grouped +- self.parent_anchor = 1 +- +- def parseImpl(self, instring, loc, doActions=True): +- # advance parse position to non-whitespace by using an Empty() +- # this should be the column to be used for all subsequent indented lines +- anchor_loc = Empty().preParse(instring, loc) +- +- # see if self.expr matches at the current location - if not it will raise an exception +- # and no further work is necessary +- self.expr.try_parse(instring, anchor_loc, doActions) +- +- indent_col = col(anchor_loc, instring) +- peer_detect_expr = self._Indent(indent_col) +- +- inner_expr = Empty() + peer_detect_expr + self.expr +- if self._recursive: +- sub_indent = self._IndentGreater(indent_col) +- nested_block = IndentedBlock( +- self.expr, recursive=self._recursive, grouped=self._grouped +- ) +- nested_block.set_debug(self.debug) +- nested_block.parent_anchor = indent_col +- inner_expr += Opt(sub_indent + nested_block) +- +- inner_expr.set_name(f"inner {hex(id(inner_expr))[-4:].upper()}@{indent_col}") +- block = OneOrMore(inner_expr) +- +- trailing_undent = self._Indent(self.parent_anchor) | StringEnd() +- +- if self._grouped: +- wrapper = Group +- else: +- wrapper = lambda expr: expr +- return (wrapper(block) + Optional(trailing_undent)).parseImpl( +- instring, anchor_loc, doActions +- ) +- +- +-class AtStringStart(ParseElementEnhance): +- """Matches if expression matches at the beginning of the parse +- string:: +- +- AtStringStart(Word(nums)).parse_string("123") +- # prints ["123"] +- +- AtStringStart(Word(nums)).parse_string(" 123") +- # raises ParseException +- """ +- +- def __init__(self, expr: Union[ParserElement, str]): +- super().__init__(expr) +- self.callPreparse = False +- +- def parseImpl(self, instring, loc, doActions=True): +- if loc != 0: +- raise ParseException(instring, loc, "not found at string start") +- return super().parseImpl(instring, loc, doActions) +- +- +-class AtLineStart(ParseElementEnhance): +- r"""Matches if an expression matches at the beginning of a line within +- the parse string +- +- Example:: +- +- test = '''\ +- AAA this line +- AAA and this line +- AAA but not this one +- B AAA and definitely not this one +- ''' +- +- for t in (AtLineStart('AAA') + restOfLine).search_string(test): +- print(t) +- +- prints:: +- +- ['AAA', ' this line'] +- ['AAA', ' and this line'] +- +- """ +- +- def __init__(self, expr: Union[ParserElement, str]): +- super().__init__(expr) +- self.callPreparse = False +- +- def parseImpl(self, instring, loc, doActions=True): +- if col(loc, instring) != 1: +- raise ParseException(instring, loc, "not found at line start") +- return super().parseImpl(instring, loc, doActions) +- +- +-class FollowedBy(ParseElementEnhance): +- """Lookahead matching of the given parse expression. +- ``FollowedBy`` does *not* advance the parsing position within +- the input string, it only verifies that the specified parse +- expression matches at the current position. ``FollowedBy`` +- always returns a null token list. If any results names are defined +- in the lookahead expression, those *will* be returned for access by +- name. +- +- Example:: +- +- # use FollowedBy to match a label only if it is followed by a ':' +- data_word = Word(alphas) +- label = data_word + FollowedBy(':') +- attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join)) +- +- attr_expr[1, ...].parse_string("shape: SQUARE color: BLACK posn: upper left").pprint() +- +- prints:: +- +- [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] +- """ +- +- def __init__(self, expr: Union[ParserElement, str]): +- super().__init__(expr) +- self.mayReturnEmpty = True +- +- def parseImpl(self, instring, loc, doActions=True): +- # by using self._expr.parse and deleting the contents of the returned ParseResults list +- # we keep any named results that were defined in the FollowedBy expression +- _, ret = self.expr._parse(instring, loc, doActions=doActions) +- del ret[:] +- +- return loc, ret +- +- +-class PrecededBy(ParseElementEnhance): +- """Lookbehind matching of the given parse expression. +- ``PrecededBy`` does not advance the parsing position within the +- input string, it only verifies that the specified parse expression +- matches prior to the current position. ``PrecededBy`` always +- returns a null token list, but if a results name is defined on the +- given expression, it is returned. +- +- Parameters: +- +- - expr - expression that must match prior to the current parse +- location +- - retreat - (default= ``None``) - (int) maximum number of characters +- to lookbehind prior to the current parse location +- +- If the lookbehind expression is a string, :class:`Literal`, +- :class:`Keyword`, or a :class:`Word` or :class:`CharsNotIn` +- with a specified exact or maximum length, then the retreat +- parameter is not required. Otherwise, retreat must be specified to +- give a maximum number of characters to look back from +- the current parse position for a lookbehind match. +- +- Example:: +- +- # VB-style variable names with type prefixes +- int_var = PrecededBy("#") + pyparsing_common.identifier +- str_var = PrecededBy("$") + pyparsing_common.identifier +- +- """ +- +- def __init__( +- self, expr: Union[ParserElement, str], retreat: typing.Optional[int] = None +- ): +- super().__init__(expr) +- self.expr = self.expr().leave_whitespace() +- self.mayReturnEmpty = True +- self.mayIndexError = False +- self.exact = False +- if isinstance(expr, str_type): +- retreat = len(expr) +- self.exact = True +- elif isinstance(expr, (Literal, Keyword)): +- retreat = expr.matchLen +- self.exact = True +- elif isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT: +- retreat = expr.maxLen +- self.exact = True +- elif isinstance(expr, PositionToken): +- retreat = 0 +- self.exact = True +- self.retreat = retreat +- self.errmsg = "not preceded by " + str(expr) +- self.skipWhitespace = False +- self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) +- +- def parseImpl(self, instring, loc=0, doActions=True): +- if self.exact: +- if loc < self.retreat: +- raise ParseException(instring, loc, self.errmsg) +- start = loc - self.retreat +- _, ret = self.expr._parse(instring, start) +- else: +- # retreat specified a maximum lookbehind window, iterate +- test_expr = self.expr + StringEnd() +- instring_slice = instring[max(0, loc - self.retreat) : loc] +- last_expr = ParseException(instring, loc, self.errmsg) +- for offset in range(1, min(loc, self.retreat + 1) + 1): +- try: +- # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) +- _, ret = test_expr._parse( +- instring_slice, len(instring_slice) - offset +- ) +- except ParseBaseException as pbe: +- last_expr = pbe +- else: +- break +- else: +- raise last_expr +- return loc, ret +- +- +-class Located(ParseElementEnhance): +- """ +- Decorates a returned token with its starting and ending +- locations in the input string. +- +- This helper adds the following results names: +- +- - ``locn_start`` - location where matched expression begins +- - ``locn_end`` - location where matched expression ends +- - ``value`` - the actual parsed results +- +- Be careful if the input text contains ```` characters, you +- may want to call :class:`ParserElement.parse_with_tabs` +- +- Example:: +- +- wd = Word(alphas) +- for match in Located(wd).search_string("ljsdf123lksdjjf123lkkjj1222"): +- print(match) +- +- prints:: +- +- [0, ['ljsdf'], 5] +- [8, ['lksdjjf'], 15] +- [18, ['lkkjj'], 23] +- +- """ +- +- def parseImpl(self, instring, loc, doActions=True): +- start = loc +- loc, tokens = self.expr._parse(instring, start, doActions, callPreParse=False) +- ret_tokens = ParseResults([start, tokens, loc]) +- ret_tokens["locn_start"] = start +- ret_tokens["value"] = tokens +- ret_tokens["locn_end"] = loc +- if self.resultsName: +- # must return as a list, so that the name will be attached to the complete group +- return loc, [ret_tokens] +- else: +- return loc, ret_tokens +- +- +-class NotAny(ParseElementEnhance): +- """ +- Lookahead to disallow matching with the given parse expression. +- ``NotAny`` does *not* advance the parsing position within the +- input string, it only verifies that the specified parse expression +- does *not* match at the current position. Also, ``NotAny`` does +- *not* skip over leading whitespace. ``NotAny`` always returns +- a null token list. May be constructed using the ``'~'`` operator. +- +- Example:: +- +- AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split()) +- +- # take care not to mistake keywords for identifiers +- ident = ~(AND | OR | NOT) + Word(alphas) +- boolean_term = Opt(NOT) + ident +- +- # very crude boolean expression - to support parenthesis groups and +- # operation hierarchy, use infix_notation +- boolean_expr = boolean_term + ((AND | OR) + boolean_term)[...] +- +- # integers that are followed by "." are actually floats +- integer = Word(nums) + ~Char(".") +- """ +- +- def __init__(self, expr: Union[ParserElement, str]): +- super().__init__(expr) +- # do NOT use self.leave_whitespace(), don't want to propagate to exprs +- # self.leave_whitespace() +- self.skipWhitespace = False +- +- self.mayReturnEmpty = True +- self.errmsg = "Found unwanted token, " + str(self.expr) +- +- def parseImpl(self, instring, loc, doActions=True): +- if self.expr.can_parse_next(instring, loc): +- raise ParseException(instring, loc, self.errmsg, self) +- return loc, [] +- +- def _generateDefaultName(self): +- return "~{" + str(self.expr) + "}" +- +- +-class _MultipleMatch(ParseElementEnhance): +- def __init__( +- self, +- expr: ParserElement, +- stop_on: typing.Optional[Union[ParserElement, str]] = None, +- *, +- stopOn: typing.Optional[Union[ParserElement, str]] = None, +- ): +- super().__init__(expr) +- stopOn = stopOn or stop_on +- self.saveAsList = True +- ender = stopOn +- if isinstance(ender, str_type): +- ender = self._literalStringClass(ender) +- self.stopOn(ender) +- +- def stopOn(self, ender) -> ParserElement: +- if isinstance(ender, str_type): +- ender = self._literalStringClass(ender) +- self.not_ender = ~ender if ender is not None else None +- return self +- +- def parseImpl(self, instring, loc, doActions=True): +- self_expr_parse = self.expr._parse +- self_skip_ignorables = self._skipIgnorables +- check_ender = self.not_ender is not None +- if check_ender: +- try_not_ender = self.not_ender.tryParse +- +- # must be at least one (but first see if we are the stopOn sentinel; +- # if so, fail) +- if check_ender: +- try_not_ender(instring, loc) +- loc, tokens = self_expr_parse(instring, loc, doActions) +- try: +- hasIgnoreExprs = not not self.ignoreExprs +- while 1: +- if check_ender: +- try_not_ender(instring, loc) +- if hasIgnoreExprs: +- preloc = self_skip_ignorables(instring, loc) +- else: +- preloc = loc +- loc, tmptokens = self_expr_parse(instring, preloc, doActions) +- if tmptokens or tmptokens.haskeys(): +- tokens += tmptokens +- except (ParseException, IndexError): +- pass +- +- return loc, tokens +- +- def _setResultsName(self, name, listAllMatches=False): +- if ( +- __diag__.warn_ungrouped_named_tokens_in_collection +- and Diagnostics.warn_ungrouped_named_tokens_in_collection +- not in self.suppress_warnings_ +- ): +- for e in [self.expr] + self.expr.recurse(): +- if ( +- isinstance(e, ParserElement) +- and e.resultsName +- and Diagnostics.warn_ungrouped_named_tokens_in_collection +- not in e.suppress_warnings_ +- ): +- warnings.warn( +- "{}: setting results name {!r} on {} expression " +- "collides with {!r} on contained expression".format( +- "warn_ungrouped_named_tokens_in_collection", +- name, +- type(self).__name__, +- e.resultsName, +- ), +- stacklevel=3, +- ) +- +- return super()._setResultsName(name, listAllMatches) +- +- +-class OneOrMore(_MultipleMatch): +- """ +- Repetition of one or more of the given expression. +- +- Parameters: +- - expr - expression that must match one or more times +- - stop_on - (default= ``None``) - expression for a terminating sentinel +- (only required if the sentinel would ordinarily match the repetition +- expression) +- +- Example:: +- +- data_word = Word(alphas) +- label = data_word + FollowedBy(':') +- attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).set_parse_action(' '.join)) +- +- text = "shape: SQUARE posn: upper left color: BLACK" +- attr_expr[1, ...].parse_string(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] +- +- # use stop_on attribute for OneOrMore to avoid reading label string as part of the data +- attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join)) +- OneOrMore(attr_expr).parse_string(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] +- +- # could also be written as +- (attr_expr * (1,)).parse_string(text).pprint() +- """ +- +- def _generateDefaultName(self): +- return "{" + str(self.expr) + "}..." +- +- +-class ZeroOrMore(_MultipleMatch): +- """ +- Optional repetition of zero or more of the given expression. +- +- Parameters: +- - ``expr`` - expression that must match zero or more times +- - ``stop_on`` - expression for a terminating sentinel +- (only required if the sentinel would ordinarily match the repetition +- expression) - (default= ``None``) +- +- Example: similar to :class:`OneOrMore` +- """ +- +- def __init__( +- self, +- expr: ParserElement, +- stop_on: typing.Optional[Union[ParserElement, str]] = None, +- *, +- stopOn: typing.Optional[Union[ParserElement, str]] = None, +- ): +- super().__init__(expr, stopOn=stopOn or stop_on) +- self.mayReturnEmpty = True +- +- def parseImpl(self, instring, loc, doActions=True): +- try: +- return super().parseImpl(instring, loc, doActions) +- except (ParseException, IndexError): +- return loc, ParseResults([], name=self.resultsName) +- +- def _generateDefaultName(self): +- return "[" + str(self.expr) + "]..." +- +- +-class _NullToken: +- def __bool__(self): +- return False +- +- def __str__(self): +- return "" +- +- +-class Opt(ParseElementEnhance): +- """ +- Optional matching of the given expression. +- +- Parameters: +- - ``expr`` - expression that must match zero or more times +- - ``default`` (optional) - value to be returned if the optional expression is not found. +- +- Example:: +- +- # US postal code can be a 5-digit zip, plus optional 4-digit qualifier +- zip = Combine(Word(nums, exact=5) + Opt('-' + Word(nums, exact=4))) +- zip.run_tests(''' +- # traditional ZIP code +- 12345 +- +- # ZIP+4 form +- 12101-0001 +- +- # invalid ZIP +- 98765- +- ''') +- +- prints:: +- +- # traditional ZIP code +- 12345 +- ['12345'] +- +- # ZIP+4 form +- 12101-0001 +- ['12101-0001'] +- +- # invalid ZIP +- 98765- +- ^ +- FAIL: Expected end of text (at char 5), (line:1, col:6) +- """ +- +- __optionalNotMatched = _NullToken() +- +- def __init__( +- self, expr: Union[ParserElement, str], default: Any = __optionalNotMatched +- ): +- super().__init__(expr, savelist=False) +- self.saveAsList = self.expr.saveAsList +- self.defaultValue = default +- self.mayReturnEmpty = True +- +- def parseImpl(self, instring, loc, doActions=True): +- self_expr = self.expr +- try: +- loc, tokens = self_expr._parse(instring, loc, doActions, callPreParse=False) +- except (ParseException, IndexError): +- default_value = self.defaultValue +- if default_value is not self.__optionalNotMatched: +- if self_expr.resultsName: +- tokens = ParseResults([default_value]) +- tokens[self_expr.resultsName] = default_value +- else: +- tokens = [default_value] +- else: +- tokens = [] +- return loc, tokens +- +- def _generateDefaultName(self): +- inner = str(self.expr) +- # strip off redundant inner {}'s +- while len(inner) > 1 and inner[0 :: len(inner) - 1] == "{}": +- inner = inner[1:-1] +- return "[" + inner + "]" +- +- +-Optional = Opt +- +- +-class SkipTo(ParseElementEnhance): +- """ +- Token for skipping over all undefined text until the matched +- expression is found. +- +- Parameters: +- - ``expr`` - target expression marking the end of the data to be skipped +- - ``include`` - if ``True``, the target expression is also parsed +- (the skipped text and target expression are returned as a 2-element +- list) (default= ``False``). +- - ``ignore`` - (default= ``None``) used to define grammars (typically quoted strings and +- comments) that might contain false matches to the target expression +- - ``fail_on`` - (default= ``None``) define expressions that are not allowed to be +- included in the skipped test; if found before the target expression is found, +- the :class:`SkipTo` is not a match +- +- Example:: +- +- report = ''' +- Outstanding Issues Report - 1 Jan 2000 +- +- # | Severity | Description | Days Open +- -----+----------+-------------------------------------------+----------- +- 101 | Critical | Intermittent system crash | 6 +- 94 | Cosmetic | Spelling error on Login ('log|n') | 14 +- 79 | Minor | System slow when running too many reports | 47 +- ''' +- integer = Word(nums) +- SEP = Suppress('|') +- # use SkipTo to simply match everything up until the next SEP +- # - ignore quoted strings, so that a '|' character inside a quoted string does not match +- # - parse action will call token.strip() for each matched token, i.e., the description body +- string_data = SkipTo(SEP, ignore=quoted_string) +- string_data.set_parse_action(token_map(str.strip)) +- ticket_expr = (integer("issue_num") + SEP +- + string_data("sev") + SEP +- + string_data("desc") + SEP +- + integer("days_open")) +- +- for tkt in ticket_expr.search_string(report): +- print tkt.dump() +- +- prints:: +- +- ['101', 'Critical', 'Intermittent system crash', '6'] +- - days_open: '6' +- - desc: 'Intermittent system crash' +- - issue_num: '101' +- - sev: 'Critical' +- ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] +- - days_open: '14' +- - desc: "Spelling error on Login ('log|n')" +- - issue_num: '94' +- - sev: 'Cosmetic' +- ['79', 'Minor', 'System slow when running too many reports', '47'] +- - days_open: '47' +- - desc: 'System slow when running too many reports' +- - issue_num: '79' +- - sev: 'Minor' +- """ +- +- def __init__( +- self, +- other: Union[ParserElement, str], +- include: bool = False, +- ignore: bool = None, +- fail_on: typing.Optional[Union[ParserElement, str]] = None, +- *, +- failOn: Union[ParserElement, str] = None, +- ): +- super().__init__(other) +- failOn = failOn or fail_on +- self.ignoreExpr = ignore +- self.mayReturnEmpty = True +- self.mayIndexError = False +- self.includeMatch = include +- self.saveAsList = False +- if isinstance(failOn, str_type): +- self.failOn = self._literalStringClass(failOn) +- else: +- self.failOn = failOn +- self.errmsg = "No match found for " + str(self.expr) +- +- def parseImpl(self, instring, loc, doActions=True): +- startloc = loc +- instrlen = len(instring) +- self_expr_parse = self.expr._parse +- self_failOn_canParseNext = ( +- self.failOn.canParseNext if self.failOn is not None else None +- ) +- self_ignoreExpr_tryParse = ( +- self.ignoreExpr.tryParse if self.ignoreExpr is not None else None +- ) +- +- tmploc = loc +- while tmploc <= instrlen: +- if self_failOn_canParseNext is not None: +- # break if failOn expression matches +- if self_failOn_canParseNext(instring, tmploc): +- break +- +- if self_ignoreExpr_tryParse is not None: +- # advance past ignore expressions +- while 1: +- try: +- tmploc = self_ignoreExpr_tryParse(instring, tmploc) +- except ParseBaseException: +- break +- +- try: +- self_expr_parse(instring, tmploc, doActions=False, callPreParse=False) +- except (ParseException, IndexError): +- # no match, advance loc in string +- tmploc += 1 +- else: +- # matched skipto expr, done +- break +- +- else: +- # ran off the end of the input string without matching skipto expr, fail +- raise ParseException(instring, loc, self.errmsg, self) +- +- # build up return values +- loc = tmploc +- skiptext = instring[startloc:loc] +- skipresult = ParseResults(skiptext) +- +- if self.includeMatch: +- loc, mat = self_expr_parse(instring, loc, doActions, callPreParse=False) +- skipresult += mat +- +- return loc, skipresult +- +- +-class Forward(ParseElementEnhance): +- """ +- Forward declaration of an expression to be defined later - +- used for recursive grammars, such as algebraic infix notation. +- When the expression is known, it is assigned to the ``Forward`` +- variable using the ``'<<'`` operator. +- +- Note: take care when assigning to ``Forward`` not to overlook +- precedence of operators. +- +- Specifically, ``'|'`` has a lower precedence than ``'<<'``, so that:: +- +- fwd_expr << a | b | c +- +- will actually be evaluated as:: +- +- (fwd_expr << a) | b | c +- +- thereby leaving b and c out as parseable alternatives. It is recommended that you +- explicitly group the values inserted into the ``Forward``:: +- +- fwd_expr << (a | b | c) +- +- Converting to use the ``'<<='`` operator instead will avoid this problem. +- +- See :class:`ParseResults.pprint` for an example of a recursive +- parser created using ``Forward``. +- """ +- +- def __init__(self, other: typing.Optional[Union[ParserElement, str]] = None): +- self.caller_frame = traceback.extract_stack(limit=2)[0] +- super().__init__(other, savelist=False) +- self.lshift_line = None +- +- def __lshift__(self, other): +- if hasattr(self, "caller_frame"): +- del self.caller_frame +- if isinstance(other, str_type): +- other = self._literalStringClass(other) +- self.expr = other +- self.mayIndexError = self.expr.mayIndexError +- self.mayReturnEmpty = self.expr.mayReturnEmpty +- self.set_whitespace_chars( +- self.expr.whiteChars, copy_defaults=self.expr.copyDefaultWhiteChars +- ) +- self.skipWhitespace = self.expr.skipWhitespace +- self.saveAsList = self.expr.saveAsList +- self.ignoreExprs.extend(self.expr.ignoreExprs) +- self.lshift_line = traceback.extract_stack(limit=2)[-2] +- return self +- +- def __ilshift__(self, other): +- return self << other +- +- def __or__(self, other): +- caller_line = traceback.extract_stack(limit=2)[-2] +- if ( +- __diag__.warn_on_match_first_with_lshift_operator +- and caller_line == self.lshift_line +- and Diagnostics.warn_on_match_first_with_lshift_operator +- not in self.suppress_warnings_ +- ): +- warnings.warn( +- "using '<<' operator with '|' is probably an error, use '<<='", +- stacklevel=2, +- ) +- ret = super().__or__(other) +- return ret +- +- def __del__(self): +- # see if we are getting dropped because of '=' reassignment of var instead of '<<=' or '<<' +- if ( +- self.expr is None +- and __diag__.warn_on_assignment_to_Forward +- and Diagnostics.warn_on_assignment_to_Forward not in self.suppress_warnings_ +- ): +- warnings.warn_explicit( +- "Forward defined here but no expression attached later using '<<=' or '<<'", +- UserWarning, +- filename=self.caller_frame.filename, +- lineno=self.caller_frame.lineno, +- ) +- +- def parseImpl(self, instring, loc, doActions=True): +- if ( +- self.expr is None +- and __diag__.warn_on_parse_using_empty_Forward +- and Diagnostics.warn_on_parse_using_empty_Forward +- not in self.suppress_warnings_ +- ): +- # walk stack until parse_string, scan_string, search_string, or transform_string is found +- parse_fns = [ +- "parse_string", +- "scan_string", +- "search_string", +- "transform_string", +- ] +- tb = traceback.extract_stack(limit=200) +- for i, frm in enumerate(reversed(tb), start=1): +- if frm.name in parse_fns: +- stacklevel = i + 1 +- break +- else: +- stacklevel = 2 +- warnings.warn( +- "Forward expression was never assigned a value, will not parse any input", +- stacklevel=stacklevel, +- ) +- if not ParserElement._left_recursion_enabled: +- return super().parseImpl(instring, loc, doActions) +- # ## Bounded Recursion algorithm ## +- # Recursion only needs to be processed at ``Forward`` elements, since they are +- # the only ones that can actually refer to themselves. The general idea is +- # to handle recursion stepwise: We start at no recursion, then recurse once, +- # recurse twice, ..., until more recursion offers no benefit (we hit the bound). +- # +- # The "trick" here is that each ``Forward`` gets evaluated in two contexts +- # - to *match* a specific recursion level, and +- # - to *search* the bounded recursion level +- # and the two run concurrently. The *search* must *match* each recursion level +- # to find the best possible match. This is handled by a memo table, which +- # provides the previous match to the next level match attempt. +- # +- # See also "Left Recursion in Parsing Expression Grammars", Medeiros et al. +- # +- # There is a complication since we not only *parse* but also *transform* via +- # actions: We do not want to run the actions too often while expanding. Thus, +- # we expand using `doActions=False` and only run `doActions=True` if the next +- # recursion level is acceptable. +- with ParserElement.recursion_lock: +- memo = ParserElement.recursion_memos +- try: +- # we are parsing at a specific recursion expansion - use it as-is +- prev_loc, prev_result = memo[loc, self, doActions] +- if isinstance(prev_result, Exception): +- raise prev_result +- return prev_loc, prev_result.copy() +- except KeyError: +- act_key = (loc, self, True) +- peek_key = (loc, self, False) +- # we are searching for the best recursion expansion - keep on improving +- # both `doActions` cases must be tracked separately here! +- prev_loc, prev_peek = memo[peek_key] = ( +- loc - 1, +- ParseException( +- instring, loc, "Forward recursion without base case", self +- ), +- ) +- if doActions: +- memo[act_key] = memo[peek_key] +- while True: +- try: +- new_loc, new_peek = super().parseImpl(instring, loc, False) +- except ParseException: +- # we failed before getting any match – do not hide the error +- if isinstance(prev_peek, Exception): +- raise +- new_loc, new_peek = prev_loc, prev_peek +- # the match did not get better: we are done +- if new_loc <= prev_loc: +- if doActions: +- # replace the match for doActions=False as well, +- # in case the action did backtrack +- prev_loc, prev_result = memo[peek_key] = memo[act_key] +- del memo[peek_key], memo[act_key] +- return prev_loc, prev_result.copy() +- del memo[peek_key] +- return prev_loc, prev_peek.copy() +- # the match did get better: see if we can improve further +- else: +- if doActions: +- try: +- memo[act_key] = super().parseImpl(instring, loc, True) +- except ParseException as e: +- memo[peek_key] = memo[act_key] = (new_loc, e) +- raise +- prev_loc, prev_peek = memo[peek_key] = new_loc, new_peek +- +- def leave_whitespace(self, recursive: bool = True) -> ParserElement: +- self.skipWhitespace = False +- return self +- +- def ignore_whitespace(self, recursive: bool = True) -> ParserElement: +- self.skipWhitespace = True +- return self +- +- def streamline(self) -> ParserElement: +- if not self.streamlined: +- self.streamlined = True +- if self.expr is not None: +- self.expr.streamline() +- return self +- +- def validate(self, validateTrace=None) -> None: +- if validateTrace is None: +- validateTrace = [] +- +- if self not in validateTrace: +- tmp = validateTrace[:] + [self] +- if self.expr is not None: +- self.expr.validate(tmp) +- self._checkRecursion([]) +- +- def _generateDefaultName(self): +- # Avoid infinite recursion by setting a temporary _defaultName +- self._defaultName = ": ..." +- +- # Use the string representation of main expression. +- retString = "..." +- try: +- if self.expr is not None: +- retString = str(self.expr)[:1000] +- else: +- retString = "None" +- finally: +- return self.__class__.__name__ + ": " + retString +- +- def copy(self) -> ParserElement: +- if self.expr is not None: +- return super().copy() +- else: +- ret = Forward() +- ret <<= self +- return ret +- +- def _setResultsName(self, name, list_all_matches=False): +- if ( +- __diag__.warn_name_set_on_empty_Forward +- and Diagnostics.warn_name_set_on_empty_Forward +- not in self.suppress_warnings_ +- ): +- if self.expr is None: +- warnings.warn( +- "{}: setting results name {!r} on {} expression " +- "that has no contained expression".format( +- "warn_name_set_on_empty_Forward", name, type(self).__name__ +- ), +- stacklevel=3, +- ) +- +- return super()._setResultsName(name, list_all_matches) +- +- ignoreWhitespace = ignore_whitespace +- leaveWhitespace = leave_whitespace +- +- +-class TokenConverter(ParseElementEnhance): +- """ +- Abstract subclass of :class:`ParseExpression`, for converting parsed results. +- """ +- +- def __init__(self, expr: Union[ParserElement, str], savelist=False): +- super().__init__(expr) # , savelist) +- self.saveAsList = False +- +- +-class Combine(TokenConverter): +- """Converter to concatenate all matching tokens to a single string. +- By default, the matching patterns must also be contiguous in the +- input string; this can be disabled by specifying +- ``'adjacent=False'`` in the constructor. +- +- Example:: +- +- real = Word(nums) + '.' + Word(nums) +- print(real.parse_string('3.1416')) # -> ['3', '.', '1416'] +- # will also erroneously match the following +- print(real.parse_string('3. 1416')) # -> ['3', '.', '1416'] +- +- real = Combine(Word(nums) + '.' + Word(nums)) +- print(real.parse_string('3.1416')) # -> ['3.1416'] +- # no match when there are internal spaces +- print(real.parse_string('3. 1416')) # -> Exception: Expected W:(0123...) +- """ +- +- def __init__( +- self, +- expr: ParserElement, +- join_string: str = "", +- adjacent: bool = True, +- *, +- joinString: typing.Optional[str] = None, +- ): +- super().__init__(expr) +- joinString = joinString if joinString is not None else join_string +- # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself +- if adjacent: +- self.leave_whitespace() +- self.adjacent = adjacent +- self.skipWhitespace = True +- self.joinString = joinString +- self.callPreparse = True +- +- def ignore(self, other) -> ParserElement: +- if self.adjacent: +- ParserElement.ignore(self, other) +- else: +- super().ignore(other) +- return self +- +- def postParse(self, instring, loc, tokenlist): +- retToks = tokenlist.copy() +- del retToks[:] +- retToks += ParseResults( +- ["".join(tokenlist._asStringList(self.joinString))], modal=self.modalResults +- ) +- +- if self.resultsName and retToks.haskeys(): +- return [retToks] +- else: +- return retToks +- +- +-class Group(TokenConverter): +- """Converter to return the matched tokens as a list - useful for +- returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions. +- +- The optional ``aslist`` argument when set to True will return the +- parsed tokens as a Python list instead of a pyparsing ParseResults. +- +- Example:: +- +- ident = Word(alphas) +- num = Word(nums) +- term = ident | num +- func = ident + Opt(delimited_list(term)) +- print(func.parse_string("fn a, b, 100")) +- # -> ['fn', 'a', 'b', '100'] +- +- func = ident + Group(Opt(delimited_list(term))) +- print(func.parse_string("fn a, b, 100")) +- # -> ['fn', ['a', 'b', '100']] +- """ +- +- def __init__(self, expr: ParserElement, aslist: bool = False): +- super().__init__(expr) +- self.saveAsList = True +- self._asPythonList = aslist +- +- def postParse(self, instring, loc, tokenlist): +- if self._asPythonList: +- return ParseResults.List( +- tokenlist.asList() +- if isinstance(tokenlist, ParseResults) +- else list(tokenlist) +- ) +- else: +- return [tokenlist] +- +- +-class Dict(TokenConverter): +- """Converter to return a repetitive expression as a list, but also +- as a dictionary. Each element can also be referenced using the first +- token in the expression as its key. Useful for tabular report +- scraping when the first column can be used as a item key. +- +- The optional ``asdict`` argument when set to True will return the +- parsed tokens as a Python dict instead of a pyparsing ParseResults. +- +- Example:: +- +- data_word = Word(alphas) +- label = data_word + FollowedBy(':') +- +- text = "shape: SQUARE posn: upper left color: light blue texture: burlap" +- attr_expr = (label + Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join)) +- +- # print attributes as plain groups +- print(attr_expr[1, ...].parse_string(text).dump()) +- +- # instead of OneOrMore(expr), parse using Dict(Group(expr)[1, ...]) - Dict will auto-assign names +- result = Dict(Group(attr_expr)[1, ...]).parse_string(text) +- print(result.dump()) +- +- # access named fields as dict entries, or output as dict +- print(result['shape']) +- print(result.as_dict()) +- +- prints:: +- +- ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] +- [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] +- - color: 'light blue' +- - posn: 'upper left' +- - shape: 'SQUARE' +- - texture: 'burlap' +- SQUARE +- {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} +- +- See more examples at :class:`ParseResults` of accessing fields by results name. +- """ +- +- def __init__(self, expr: ParserElement, asdict: bool = False): +- super().__init__(expr) +- self.saveAsList = True +- self._asPythonDict = asdict +- +- def postParse(self, instring, loc, tokenlist): +- for i, tok in enumerate(tokenlist): +- if len(tok) == 0: +- continue +- +- ikey = tok[0] +- if isinstance(ikey, int): +- ikey = str(ikey).strip() +- +- if len(tok) == 1: +- tokenlist[ikey] = _ParseResultsWithOffset("", i) +- +- elif len(tok) == 2 and not isinstance(tok[1], ParseResults): +- tokenlist[ikey] = _ParseResultsWithOffset(tok[1], i) +- +- else: +- try: +- dictvalue = tok.copy() # ParseResults(i) +- except Exception: +- exc = TypeError( +- "could not extract dict values from parsed results" +- " - Dict expression must contain Grouped expressions" +- ) +- raise exc from None +- +- del dictvalue[0] +- +- if len(dictvalue) != 1 or ( +- isinstance(dictvalue, ParseResults) and dictvalue.haskeys() +- ): +- tokenlist[ikey] = _ParseResultsWithOffset(dictvalue, i) +- else: +- tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0], i) +- +- if self._asPythonDict: +- return [tokenlist.as_dict()] if self.resultsName else tokenlist.as_dict() +- else: +- return [tokenlist] if self.resultsName else tokenlist +- +- +-class Suppress(TokenConverter): +- """Converter for ignoring the results of a parsed expression. +- +- Example:: +- +- source = "a, b, c,d" +- wd = Word(alphas) +- wd_list1 = wd + (',' + wd)[...] +- print(wd_list1.parse_string(source)) +- +- # often, delimiters that are useful during parsing are just in the +- # way afterward - use Suppress to keep them out of the parsed output +- wd_list2 = wd + (Suppress(',') + wd)[...] +- print(wd_list2.parse_string(source)) +- +- # Skipped text (using '...') can be suppressed as well +- source = "lead in START relevant text END trailing text" +- start_marker = Keyword("START") +- end_marker = Keyword("END") +- find_body = Suppress(...) + start_marker + ... + end_marker +- print(find_body.parse_string(source) +- +- prints:: +- +- ['a', ',', 'b', ',', 'c', ',', 'd'] +- ['a', 'b', 'c', 'd'] +- ['START', 'relevant text ', 'END'] +- +- (See also :class:`delimited_list`.) +- """ +- +- def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): +- if expr is ...: +- expr = _PendingSkip(NoMatch()) +- super().__init__(expr) +- +- def __add__(self, other) -> "ParserElement": +- if isinstance(self.expr, _PendingSkip): +- return Suppress(SkipTo(other)) + other +- else: +- return super().__add__(other) +- +- def __sub__(self, other) -> "ParserElement": +- if isinstance(self.expr, _PendingSkip): +- return Suppress(SkipTo(other)) - other +- else: +- return super().__sub__(other) +- +- def postParse(self, instring, loc, tokenlist): +- return [] +- +- def suppress(self) -> ParserElement: +- return self +- +- +-def trace_parse_action(f: ParseAction) -> ParseAction: +- """Decorator for debugging parse actions. +- +- When the parse action is called, this decorator will print +- ``">> entering method-name(line:, , )"``. +- When the parse action completes, the decorator will print +- ``"<<"`` followed by the returned value, or any exception that the parse action raised. +- +- Example:: +- +- wd = Word(alphas) +- +- @trace_parse_action +- def remove_duplicate_chars(tokens): +- return ''.join(sorted(set(''.join(tokens)))) +- +- wds = wd[1, ...].set_parse_action(remove_duplicate_chars) +- print(wds.parse_string("slkdjs sld sldd sdlf sdljf")) +- +- prints:: +- +- >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) +- < 3: +- thisFunc = paArgs[0].__class__.__name__ + "." + thisFunc +- sys.stderr.write( +- ">>entering {}(line: {!r}, {}, {!r})\n".format(thisFunc, line(l, s), l, t) +- ) +- try: +- ret = f(*paArgs) +- except Exception as exc: +- sys.stderr.write("< str: +- r"""Helper to easily define string ranges for use in :class:`Word` +- construction. Borrows syntax from regexp ``'[]'`` string range +- definitions:: +- +- srange("[0-9]") -> "0123456789" +- srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" +- srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" +- +- The input string must be enclosed in []'s, and the returned string +- is the expanded character set joined into a single string. The +- values enclosed in the []'s may be: +- +- - a single character +- - an escaped character with a leading backslash (such as ``\-`` +- or ``\]``) +- - an escaped hex character with a leading ``'\x'`` +- (``\x21``, which is a ``'!'`` character) (``\0x##`` +- is also supported for backwards compatibility) +- - an escaped octal character with a leading ``'\0'`` +- (``\041``, which is a ``'!'`` character) +- - a range of any of the above, separated by a dash (``'a-z'``, +- etc.) +- - any combination of the above (``'aeiouy'``, +- ``'a-zA-Z0-9_$'``, etc.) +- """ +- _expanded = ( +- lambda p: p +- if not isinstance(p, ParseResults) +- else "".join(chr(c) for c in range(ord(p[0]), ord(p[1]) + 1)) +- ) +- try: +- return "".join(_expanded(part) for part in _reBracketExpr.parse_string(s).body) +- except Exception: +- return "" +- +- +-def token_map(func, *args) -> ParseAction: +- """Helper to define a parse action by mapping a function to all +- elements of a :class:`ParseResults` list. If any additional args are passed, +- they are forwarded to the given function as additional arguments +- after the token, as in +- ``hex_integer = Word(hexnums).set_parse_action(token_map(int, 16))``, +- which will convert the parsed data to an integer using base 16. +- +- Example (compare the last to example in :class:`ParserElement.transform_string`:: +- +- hex_ints = Word(hexnums)[1, ...].set_parse_action(token_map(int, 16)) +- hex_ints.run_tests(''' +- 00 11 22 aa FF 0a 0d 1a +- ''') +- +- upperword = Word(alphas).set_parse_action(token_map(str.upper)) +- upperword[1, ...].run_tests(''' +- my kingdom for a horse +- ''') +- +- wd = Word(alphas).set_parse_action(token_map(str.title)) +- wd[1, ...].set_parse_action(' '.join).run_tests(''' +- now is the winter of our discontent made glorious summer by this sun of york +- ''') +- +- prints:: +- +- 00 11 22 aa FF 0a 0d 1a +- [0, 17, 34, 170, 255, 10, 13, 26] +- +- my kingdom for a horse +- ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] +- +- now is the winter of our discontent made glorious summer by this sun of york +- ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] +- """ +- +- def pa(s, l, t): +- return [func(tokn, *args) for tokn in t] +- +- func_name = getattr(func, "__name__", getattr(func, "__class__").__name__) +- pa.__name__ = func_name +- +- return pa +- +- +-def autoname_elements() -> None: +- """ +- Utility to simplify mass-naming of parser elements, for +- generating railroad diagram with named subdiagrams. +- """ +- for name, var in sys._getframe().f_back.f_locals.items(): +- if isinstance(var, ParserElement) and not var.customName: +- var.set_name(name) +- +- +-dbl_quoted_string = Combine( +- Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' +-).set_name("string enclosed in double quotes") +- +-sgl_quoted_string = Combine( +- Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'" +-).set_name("string enclosed in single quotes") +- +-quoted_string = Combine( +- Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' +- | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'" +-).set_name("quotedString using single or double quotes") +- +-unicode_string = Combine("u" + quoted_string.copy()).set_name("unicode string literal") +- +- +-alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +-punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") +- +-# build list of built-in expressions, for future reference if a global default value +-# gets updated +-_builtin_exprs: List[ParserElement] = [ +- v for v in vars().values() if isinstance(v, ParserElement) +-] +- +-# backward compatibility names +-tokenMap = token_map +-conditionAsParseAction = condition_as_parse_action +-nullDebugAction = null_debug_action +-sglQuotedString = sgl_quoted_string +-dblQuotedString = dbl_quoted_string +-quotedString = quoted_string +-unicodeString = unicode_string +-lineStart = line_start +-lineEnd = line_end +-stringStart = string_start +-stringEnd = string_end +-traceParseAction = trace_parse_action +diff --git a/src/poetry/core/_vendor/pyparsing/diagram/__init__.py b/src/poetry/core/_vendor/pyparsing/diagram/__init__.py +deleted file mode 100644 +index 8986447..0000000 +--- a/src/poetry/core/_vendor/pyparsing/diagram/__init__.py ++++ /dev/null +@@ -1,642 +0,0 @@ +-import railroad +-import pyparsing +-import typing +-from typing import ( +- List, +- NamedTuple, +- Generic, +- TypeVar, +- Dict, +- Callable, +- Set, +- Iterable, +-) +-from jinja2 import Template +-from io import StringIO +-import inspect +- +- +-jinja2_template_source = """\ +- +- +- +- {% if not head %} +- +- {% else %} +- {{ head | safe }} +- {% endif %} +- +- +-{{ body | safe }} +-{% for diagram in diagrams %} +-
+-

{{ diagram.title }}

+-
{{ diagram.text }}
+-
+- {{ diagram.svg }} +-
+-
+-{% endfor %} +- +- +-""" +- +-template = Template(jinja2_template_source) +- +-# Note: ideally this would be a dataclass, but we're supporting Python 3.5+ so we can't do this yet +-NamedDiagram = NamedTuple( +- "NamedDiagram", +- [("name", str), ("diagram", typing.Optional[railroad.DiagramItem]), ("index", int)], +-) +-""" +-A simple structure for associating a name with a railroad diagram +-""" +- +-T = TypeVar("T") +- +- +-class EachItem(railroad.Group): +- """ +- Custom railroad item to compose a: +- - Group containing a +- - OneOrMore containing a +- - Choice of the elements in the Each +- with the group label indicating that all must be matched +- """ +- +- all_label = "[ALL]" +- +- def __init__(self, *items): +- choice_item = railroad.Choice(len(items) - 1, *items) +- one_or_more_item = railroad.OneOrMore(item=choice_item) +- super().__init__(one_or_more_item, label=self.all_label) +- +- +-class AnnotatedItem(railroad.Group): +- """ +- Simple subclass of Group that creates an annotation label +- """ +- +- def __init__(self, label: str, item): +- super().__init__(item=item, label="[{}]".format(label) if label else label) +- +- +-class EditablePartial(Generic[T]): +- """ +- Acts like a functools.partial, but can be edited. In other words, it represents a type that hasn't yet been +- constructed. +- """ +- +- # We need this here because the railroad constructors actually transform the data, so can't be called until the +- # entire tree is assembled +- +- def __init__(self, func: Callable[..., T], args: list, kwargs: dict): +- self.func = func +- self.args = args +- self.kwargs = kwargs +- +- @classmethod +- def from_call(cls, func: Callable[..., T], *args, **kwargs) -> "EditablePartial[T]": +- """ +- If you call this function in the same way that you would call the constructor, it will store the arguments +- as you expect. For example EditablePartial.from_call(Fraction, 1, 3)() == Fraction(1, 3) +- """ +- return EditablePartial(func=func, args=list(args), kwargs=kwargs) +- +- @property +- def name(self): +- return self.kwargs["name"] +- +- def __call__(self) -> T: +- """ +- Evaluate the partial and return the result +- """ +- args = self.args.copy() +- kwargs = self.kwargs.copy() +- +- # This is a helpful hack to allow you to specify varargs parameters (e.g. *args) as keyword args (e.g. +- # args=['list', 'of', 'things']) +- arg_spec = inspect.getfullargspec(self.func) +- if arg_spec.varargs in self.kwargs: +- args += kwargs.pop(arg_spec.varargs) +- +- return self.func(*args, **kwargs) +- +- +-def railroad_to_html(diagrams: List[NamedDiagram], **kwargs) -> str: +- """ +- Given a list of NamedDiagram, produce a single HTML string that visualises those diagrams +- :params kwargs: kwargs to be passed in to the template +- """ +- data = [] +- for diagram in diagrams: +- if diagram.diagram is None: +- continue +- io = StringIO() +- diagram.diagram.writeSvg(io.write) +- title = diagram.name +- if diagram.index == 0: +- title += " (root)" +- data.append({"title": title, "text": "", "svg": io.getvalue()}) +- +- return template.render(diagrams=data, **kwargs) +- +- +-def resolve_partial(partial: "EditablePartial[T]") -> T: +- """ +- Recursively resolves a collection of Partials into whatever type they are +- """ +- if isinstance(partial, EditablePartial): +- partial.args = resolve_partial(partial.args) +- partial.kwargs = resolve_partial(partial.kwargs) +- return partial() +- elif isinstance(partial, list): +- return [resolve_partial(x) for x in partial] +- elif isinstance(partial, dict): +- return {key: resolve_partial(x) for key, x in partial.items()} +- else: +- return partial +- +- +-def to_railroad( +- element: pyparsing.ParserElement, +- diagram_kwargs: typing.Optional[dict] = None, +- vertical: int = 3, +- show_results_names: bool = False, +- show_groups: bool = False, +-) -> List[NamedDiagram]: +- """ +- Convert a pyparsing element tree into a list of diagrams. This is the recommended entrypoint to diagram +- creation if you want to access the Railroad tree before it is converted to HTML +- :param element: base element of the parser being diagrammed +- :param diagram_kwargs: kwargs to pass to the Diagram() constructor +- :param vertical: (optional) - int - limit at which number of alternatives should be +- shown vertically instead of horizontally +- :param show_results_names - bool to indicate whether results name annotations should be +- included in the diagram +- :param show_groups - bool to indicate whether groups should be highlighted with an unlabeled +- surrounding box +- """ +- # Convert the whole tree underneath the root +- lookup = ConverterState(diagram_kwargs=diagram_kwargs or {}) +- _to_diagram_element( +- element, +- lookup=lookup, +- parent=None, +- vertical=vertical, +- show_results_names=show_results_names, +- show_groups=show_groups, +- ) +- +- root_id = id(element) +- # Convert the root if it hasn't been already +- if root_id in lookup: +- if not element.customName: +- lookup[root_id].name = "" +- lookup[root_id].mark_for_extraction(root_id, lookup, force=True) +- +- # Now that we're finished, we can convert from intermediate structures into Railroad elements +- diags = list(lookup.diagrams.values()) +- if len(diags) > 1: +- # collapse out duplicate diags with the same name +- seen = set() +- deduped_diags = [] +- for d in diags: +- # don't extract SkipTo elements, they are uninformative as subdiagrams +- if d.name == "...": +- continue +- if d.name is not None and d.name not in seen: +- seen.add(d.name) +- deduped_diags.append(d) +- resolved = [resolve_partial(partial) for partial in deduped_diags] +- else: +- # special case - if just one diagram, always display it, even if +- # it has no name +- resolved = [resolve_partial(partial) for partial in diags] +- return sorted(resolved, key=lambda diag: diag.index) +- +- +-def _should_vertical( +- specification: int, exprs: Iterable[pyparsing.ParserElement] +-) -> bool: +- """ +- Returns true if we should return a vertical list of elements +- """ +- if specification is None: +- return False +- else: +- return len(_visible_exprs(exprs)) >= specification +- +- +-class ElementState: +- """ +- State recorded for an individual pyparsing Element +- """ +- +- # Note: this should be a dataclass, but we have to support Python 3.5 +- def __init__( +- self, +- element: pyparsing.ParserElement, +- converted: EditablePartial, +- parent: EditablePartial, +- number: int, +- name: str = None, +- parent_index: typing.Optional[int] = None, +- ): +- #: The pyparsing element that this represents +- self.element: pyparsing.ParserElement = element +- #: The name of the element +- self.name: typing.Optional[str] = name +- #: The output Railroad element in an unconverted state +- self.converted: EditablePartial = converted +- #: The parent Railroad element, which we store so that we can extract this if it's duplicated +- self.parent: EditablePartial = parent +- #: The order in which we found this element, used for sorting diagrams if this is extracted into a diagram +- self.number: int = number +- #: The index of this inside its parent +- self.parent_index: typing.Optional[int] = parent_index +- #: If true, we should extract this out into a subdiagram +- self.extract: bool = False +- #: If true, all of this element's children have been filled out +- self.complete: bool = False +- +- def mark_for_extraction( +- self, el_id: int, state: "ConverterState", name: str = None, force: bool = False +- ): +- """ +- Called when this instance has been seen twice, and thus should eventually be extracted into a sub-diagram +- :param el_id: id of the element +- :param state: element/diagram state tracker +- :param name: name to use for this element's text +- :param force: If true, force extraction now, regardless of the state of this. Only useful for extracting the +- root element when we know we're finished +- """ +- self.extract = True +- +- # Set the name +- if not self.name: +- if name: +- # Allow forcing a custom name +- self.name = name +- elif self.element.customName: +- self.name = self.element.customName +- else: +- self.name = "" +- +- # Just because this is marked for extraction doesn't mean we can do it yet. We may have to wait for children +- # to be added +- # Also, if this is just a string literal etc, don't bother extracting it +- if force or (self.complete and _worth_extracting(self.element)): +- state.extract_into_diagram(el_id) +- +- +-class ConverterState: +- """ +- Stores some state that persists between recursions into the element tree +- """ +- +- def __init__(self, diagram_kwargs: typing.Optional[dict] = None): +- #: A dictionary mapping ParserElements to state relating to them +- self._element_diagram_states: Dict[int, ElementState] = {} +- #: A dictionary mapping ParserElement IDs to subdiagrams generated from them +- self.diagrams: Dict[int, EditablePartial[NamedDiagram]] = {} +- #: The index of the next unnamed element +- self.unnamed_index: int = 1 +- #: The index of the next element. This is used for sorting +- self.index: int = 0 +- #: Shared kwargs that are used to customize the construction of diagrams +- self.diagram_kwargs: dict = diagram_kwargs or {} +- self.extracted_diagram_names: Set[str] = set() +- +- def __setitem__(self, key: int, value: ElementState): +- self._element_diagram_states[key] = value +- +- def __getitem__(self, key: int) -> ElementState: +- return self._element_diagram_states[key] +- +- def __delitem__(self, key: int): +- del self._element_diagram_states[key] +- +- def __contains__(self, key: int): +- return key in self._element_diagram_states +- +- def generate_unnamed(self) -> int: +- """ +- Generate a number used in the name of an otherwise unnamed diagram +- """ +- self.unnamed_index += 1 +- return self.unnamed_index +- +- def generate_index(self) -> int: +- """ +- Generate a number used to index a diagram +- """ +- self.index += 1 +- return self.index +- +- def extract_into_diagram(self, el_id: int): +- """ +- Used when we encounter the same token twice in the same tree. When this +- happens, we replace all instances of that token with a terminal, and +- create a new subdiagram for the token +- """ +- position = self[el_id] +- +- # Replace the original definition of this element with a regular block +- if position.parent: +- ret = EditablePartial.from_call(railroad.NonTerminal, text=position.name) +- if "item" in position.parent.kwargs: +- position.parent.kwargs["item"] = ret +- elif "items" in position.parent.kwargs: +- position.parent.kwargs["items"][position.parent_index] = ret +- +- # If the element we're extracting is a group, skip to its content but keep the title +- if position.converted.func == railroad.Group: +- content = position.converted.kwargs["item"] +- else: +- content = position.converted +- +- self.diagrams[el_id] = EditablePartial.from_call( +- NamedDiagram, +- name=position.name, +- diagram=EditablePartial.from_call( +- railroad.Diagram, content, **self.diagram_kwargs +- ), +- index=position.number, +- ) +- +- del self[el_id] +- +- +-def _worth_extracting(element: pyparsing.ParserElement) -> bool: +- """ +- Returns true if this element is worth having its own sub-diagram. Simply, if any of its children +- themselves have children, then its complex enough to extract +- """ +- children = element.recurse() +- return any(child.recurse() for child in children) +- +- +-def _apply_diagram_item_enhancements(fn): +- """ +- decorator to ensure enhancements to a diagram item (such as results name annotations) +- get applied on return from _to_diagram_element (we do this since there are several +- returns in _to_diagram_element) +- """ +- +- def _inner( +- element: pyparsing.ParserElement, +- parent: typing.Optional[EditablePartial], +- lookup: ConverterState = None, +- vertical: int = None, +- index: int = 0, +- name_hint: str = None, +- show_results_names: bool = False, +- show_groups: bool = False, +- ) -> typing.Optional[EditablePartial]: +- +- ret = fn( +- element, +- parent, +- lookup, +- vertical, +- index, +- name_hint, +- show_results_names, +- show_groups, +- ) +- +- # apply annotation for results name, if present +- if show_results_names and ret is not None: +- element_results_name = element.resultsName +- if element_results_name: +- # add "*" to indicate if this is a "list all results" name +- element_results_name += "" if element.modalResults else "*" +- ret = EditablePartial.from_call( +- railroad.Group, item=ret, label=element_results_name +- ) +- +- return ret +- +- return _inner +- +- +-def _visible_exprs(exprs: Iterable[pyparsing.ParserElement]): +- non_diagramming_exprs = ( +- pyparsing.ParseElementEnhance, +- pyparsing.PositionToken, +- pyparsing.And._ErrorStop, +- ) +- return [ +- e +- for e in exprs +- if not (e.customName or e.resultsName or isinstance(e, non_diagramming_exprs)) +- ] +- +- +-@_apply_diagram_item_enhancements +-def _to_diagram_element( +- element: pyparsing.ParserElement, +- parent: typing.Optional[EditablePartial], +- lookup: ConverterState = None, +- vertical: int = None, +- index: int = 0, +- name_hint: str = None, +- show_results_names: bool = False, +- show_groups: bool = False, +-) -> typing.Optional[EditablePartial]: +- """ +- Recursively converts a PyParsing Element to a railroad Element +- :param lookup: The shared converter state that keeps track of useful things +- :param index: The index of this element within the parent +- :param parent: The parent of this element in the output tree +- :param vertical: Controls at what point we make a list of elements vertical. If this is an integer (the default), +- it sets the threshold of the number of items before we go vertical. If True, always go vertical, if False, never +- do so +- :param name_hint: If provided, this will override the generated name +- :param show_results_names: bool flag indicating whether to add annotations for results names +- :returns: The converted version of the input element, but as a Partial that hasn't yet been constructed +- :param show_groups: bool flag indicating whether to show groups using bounding box +- """ +- exprs = element.recurse() +- name = name_hint or element.customName or element.__class__.__name__ +- +- # Python's id() is used to provide a unique identifier for elements +- el_id = id(element) +- +- element_results_name = element.resultsName +- +- # Here we basically bypass processing certain wrapper elements if they contribute nothing to the diagram +- if not element.customName: +- if isinstance( +- element, +- ( +- # pyparsing.TokenConverter, +- # pyparsing.Forward, +- pyparsing.Located, +- ), +- ): +- # However, if this element has a useful custom name, and its child does not, we can pass it on to the child +- if exprs: +- if not exprs[0].customName: +- propagated_name = name +- else: +- propagated_name = None +- +- return _to_diagram_element( +- element.expr, +- parent=parent, +- lookup=lookup, +- vertical=vertical, +- index=index, +- name_hint=propagated_name, +- show_results_names=show_results_names, +- show_groups=show_groups, +- ) +- +- # If the element isn't worth extracting, we always treat it as the first time we say it +- if _worth_extracting(element): +- if el_id in lookup: +- # If we've seen this element exactly once before, we are only just now finding out that it's a duplicate, +- # so we have to extract it into a new diagram. +- looked_up = lookup[el_id] +- looked_up.mark_for_extraction(el_id, lookup, name=name_hint) +- ret = EditablePartial.from_call(railroad.NonTerminal, text=looked_up.name) +- return ret +- +- elif el_id in lookup.diagrams: +- # If we have seen the element at least twice before, and have already extracted it into a subdiagram, we +- # just put in a marker element that refers to the sub-diagram +- ret = EditablePartial.from_call( +- railroad.NonTerminal, text=lookup.diagrams[el_id].kwargs["name"] +- ) +- return ret +- +- # Recursively convert child elements +- # Here we find the most relevant Railroad element for matching pyparsing Element +- # We use ``items=[]`` here to hold the place for where the child elements will go once created +- if isinstance(element, pyparsing.And): +- # detect And's created with ``expr*N`` notation - for these use a OneOrMore with a repeat +- # (all will have the same name, and resultsName) +- if not exprs: +- return None +- if len(set((e.name, e.resultsName) for e in exprs)) == 1: +- ret = EditablePartial.from_call( +- railroad.OneOrMore, item="", repeat=str(len(exprs)) +- ) +- elif _should_vertical(vertical, exprs): +- ret = EditablePartial.from_call(railroad.Stack, items=[]) +- else: +- ret = EditablePartial.from_call(railroad.Sequence, items=[]) +- elif isinstance(element, (pyparsing.Or, pyparsing.MatchFirst)): +- if not exprs: +- return None +- if _should_vertical(vertical, exprs): +- ret = EditablePartial.from_call(railroad.Choice, 0, items=[]) +- else: +- ret = EditablePartial.from_call(railroad.HorizontalChoice, items=[]) +- elif isinstance(element, pyparsing.Each): +- if not exprs: +- return None +- ret = EditablePartial.from_call(EachItem, items=[]) +- elif isinstance(element, pyparsing.NotAny): +- ret = EditablePartial.from_call(AnnotatedItem, label="NOT", item="") +- elif isinstance(element, pyparsing.FollowedBy): +- ret = EditablePartial.from_call(AnnotatedItem, label="LOOKAHEAD", item="") +- elif isinstance(element, pyparsing.PrecededBy): +- ret = EditablePartial.from_call(AnnotatedItem, label="LOOKBEHIND", item="") +- elif isinstance(element, pyparsing.Group): +- if show_groups: +- ret = EditablePartial.from_call(AnnotatedItem, label="", item="") +- else: +- ret = EditablePartial.from_call(railroad.Group, label="", item="") +- elif isinstance(element, pyparsing.TokenConverter): +- ret = EditablePartial.from_call( +- AnnotatedItem, label=type(element).__name__.lower(), item="" +- ) +- elif isinstance(element, pyparsing.Opt): +- ret = EditablePartial.from_call(railroad.Optional, item="") +- elif isinstance(element, pyparsing.OneOrMore): +- ret = EditablePartial.from_call(railroad.OneOrMore, item="") +- elif isinstance(element, pyparsing.ZeroOrMore): +- ret = EditablePartial.from_call(railroad.ZeroOrMore, item="") +- elif isinstance(element, pyparsing.Group): +- ret = EditablePartial.from_call( +- railroad.Group, item=None, label=element_results_name +- ) +- elif isinstance(element, pyparsing.Empty) and not element.customName: +- # Skip unnamed "Empty" elements +- ret = None +- elif len(exprs) > 1: +- ret = EditablePartial.from_call(railroad.Sequence, items=[]) +- elif len(exprs) > 0 and not element_results_name: +- ret = EditablePartial.from_call(railroad.Group, item="", label=name) +- else: +- terminal = EditablePartial.from_call(railroad.Terminal, element.defaultName) +- ret = terminal +- +- if ret is None: +- return +- +- # Indicate this element's position in the tree so we can extract it if necessary +- lookup[el_id] = ElementState( +- element=element, +- converted=ret, +- parent=parent, +- parent_index=index, +- number=lookup.generate_index(), +- ) +- if element.customName: +- lookup[el_id].mark_for_extraction(el_id, lookup, element.customName) +- +- i = 0 +- for expr in exprs: +- # Add a placeholder index in case we have to extract the child before we even add it to the parent +- if "items" in ret.kwargs: +- ret.kwargs["items"].insert(i, None) +- +- item = _to_diagram_element( +- expr, +- parent=ret, +- lookup=lookup, +- vertical=vertical, +- index=i, +- show_results_names=show_results_names, +- show_groups=show_groups, +- ) +- +- # Some elements don't need to be shown in the diagram +- if item is not None: +- if "item" in ret.kwargs: +- ret.kwargs["item"] = item +- elif "items" in ret.kwargs: +- # If we've already extracted the child, don't touch this index, since it's occupied by a nonterminal +- ret.kwargs["items"][i] = item +- i += 1 +- elif "items" in ret.kwargs: +- # If we're supposed to skip this element, remove it from the parent +- del ret.kwargs["items"][i] +- +- # If all this items children are none, skip this item +- if ret and ( +- ("items" in ret.kwargs and len(ret.kwargs["items"]) == 0) +- or ("item" in ret.kwargs and ret.kwargs["item"] is None) +- ): +- ret = EditablePartial.from_call(railroad.Terminal, name) +- +- # Mark this element as "complete", ie it has all of its children +- if el_id in lookup: +- lookup[el_id].complete = True +- +- if el_id in lookup and lookup[el_id].extract and lookup[el_id].complete: +- lookup.extract_into_diagram(el_id) +- if ret is not None: +- ret = EditablePartial.from_call( +- railroad.NonTerminal, text=lookup.diagrams[el_id].kwargs["name"] +- ) +- +- return ret +diff --git a/src/poetry/core/_vendor/pyparsing/exceptions.py b/src/poetry/core/_vendor/pyparsing/exceptions.py +deleted file mode 100644 +index a38447b..0000000 +--- a/src/poetry/core/_vendor/pyparsing/exceptions.py ++++ /dev/null +@@ -1,267 +0,0 @@ +-# exceptions.py +- +-import re +-import sys +-import typing +- +-from .util import col, line, lineno, _collapse_string_to_ranges +-from .unicode import pyparsing_unicode as ppu +- +- +-class ExceptionWordUnicode(ppu.Latin1, ppu.LatinA, ppu.LatinB, ppu.Greek, ppu.Cyrillic): +- pass +- +- +-_extract_alphanums = _collapse_string_to_ranges(ExceptionWordUnicode.alphanums) +-_exception_word_extractor = re.compile("([" + _extract_alphanums + "]{1,16})|.") +- +- +-class ParseBaseException(Exception): +- """base exception class for all parsing runtime exceptions""" +- +- # Performance tuning: we construct a *lot* of these, so keep this +- # constructor as small and fast as possible +- def __init__( +- self, +- pstr: str, +- loc: int = 0, +- msg: typing.Optional[str] = None, +- elem=None, +- ): +- self.loc = loc +- if msg is None: +- self.msg = pstr +- self.pstr = "" +- else: +- self.msg = msg +- self.pstr = pstr +- self.parser_element = self.parserElement = elem +- self.args = (pstr, loc, msg) +- +- @staticmethod +- def explain_exception(exc, depth=16): +- """ +- Method to take an exception and translate the Python internal traceback into a list +- of the pyparsing expressions that caused the exception to be raised. +- +- Parameters: +- +- - exc - exception raised during parsing (need not be a ParseException, in support +- of Python exceptions that might be raised in a parse action) +- - depth (default=16) - number of levels back in the stack trace to list expression +- and function names; if None, the full stack trace names will be listed; if 0, only +- the failing input line, marker, and exception string will be shown +- +- Returns a multi-line string listing the ParserElements and/or function names in the +- exception's stack trace. +- """ +- import inspect +- from .core import ParserElement +- +- if depth is None: +- depth = sys.getrecursionlimit() +- ret = [] +- if isinstance(exc, ParseBaseException): +- ret.append(exc.line) +- ret.append(" " * (exc.column - 1) + "^") +- ret.append("{}: {}".format(type(exc).__name__, exc)) +- +- if depth > 0: +- callers = inspect.getinnerframes(exc.__traceback__, context=depth) +- seen = set() +- for i, ff in enumerate(callers[-depth:]): +- frm = ff[0] +- +- f_self = frm.f_locals.get("self", None) +- if isinstance(f_self, ParserElement): +- if frm.f_code.co_name not in ("parseImpl", "_parseNoCache"): +- continue +- if id(f_self) in seen: +- continue +- seen.add(id(f_self)) +- +- self_type = type(f_self) +- ret.append( +- "{}.{} - {}".format( +- self_type.__module__, self_type.__name__, f_self +- ) +- ) +- +- elif f_self is not None: +- self_type = type(f_self) +- ret.append("{}.{}".format(self_type.__module__, self_type.__name__)) +- +- else: +- code = frm.f_code +- if code.co_name in ("wrapper", ""): +- continue +- +- ret.append("{}".format(code.co_name)) +- +- depth -= 1 +- if not depth: +- break +- +- return "\n".join(ret) +- +- @classmethod +- def _from_exception(cls, pe): +- """ +- internal factory method to simplify creating one type of ParseException +- from another - avoids having __init__ signature conflicts among subclasses +- """ +- return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) +- +- @property +- def line(self) -> str: +- """ +- Return the line of text where the exception occurred. +- """ +- return line(self.loc, self.pstr) +- +- @property +- def lineno(self) -> int: +- """ +- Return the 1-based line number of text where the exception occurred. +- """ +- return lineno(self.loc, self.pstr) +- +- @property +- def col(self) -> int: +- """ +- Return the 1-based column on the line of text where the exception occurred. +- """ +- return col(self.loc, self.pstr) +- +- @property +- def column(self) -> int: +- """ +- Return the 1-based column on the line of text where the exception occurred. +- """ +- return col(self.loc, self.pstr) +- +- def __str__(self) -> str: +- if self.pstr: +- if self.loc >= len(self.pstr): +- foundstr = ", found end of text" +- else: +- # pull out next word at error location +- found_match = _exception_word_extractor.match(self.pstr, self.loc) +- if found_match is not None: +- found = found_match.group(0) +- else: +- found = self.pstr[self.loc : self.loc + 1] +- foundstr = (", found %r" % found).replace(r"\\", "\\") +- else: +- foundstr = "" +- return "{}{} (at char {}), (line:{}, col:{})".format( +- self.msg, foundstr, self.loc, self.lineno, self.column +- ) +- +- def __repr__(self): +- return str(self) +- +- def mark_input_line(self, marker_string: str = None, *, markerString=">!<") -> str: +- """ +- Extracts the exception line from the input string, and marks +- the location of the exception with a special symbol. +- """ +- markerString = marker_string if marker_string is not None else markerString +- line_str = self.line +- line_column = self.column - 1 +- if markerString: +- line_str = "".join( +- (line_str[:line_column], markerString, line_str[line_column:]) +- ) +- return line_str.strip() +- +- def explain(self, depth=16) -> str: +- """ +- Method to translate the Python internal traceback into a list +- of the pyparsing expressions that caused the exception to be raised. +- +- Parameters: +- +- - depth (default=16) - number of levels back in the stack trace to list expression +- and function names; if None, the full stack trace names will be listed; if 0, only +- the failing input line, marker, and exception string will be shown +- +- Returns a multi-line string listing the ParserElements and/or function names in the +- exception's stack trace. +- +- Example:: +- +- expr = pp.Word(pp.nums) * 3 +- try: +- expr.parse_string("123 456 A789") +- except pp.ParseException as pe: +- print(pe.explain(depth=0)) +- +- prints:: +- +- 123 456 A789 +- ^ +- ParseException: Expected W:(0-9), found 'A' (at char 8), (line:1, col:9) +- +- Note: the diagnostic output will include string representations of the expressions +- that failed to parse. These representations will be more helpful if you use `set_name` to +- give identifiable names to your expressions. Otherwise they will use the default string +- forms, which may be cryptic to read. +- +- Note: pyparsing's default truncation of exception tracebacks may also truncate the +- stack of expressions that are displayed in the ``explain`` output. To get the full listing +- of parser expressions, you may have to set ``ParserElement.verbose_stacktrace = True`` +- """ +- return self.explain_exception(self, depth) +- +- markInputline = mark_input_line +- +- +-class ParseException(ParseBaseException): +- """ +- Exception thrown when a parse expression doesn't match the input string +- +- Example:: +- +- try: +- Word(nums).set_name("integer").parse_string("ABC") +- except ParseException as pe: +- print(pe) +- print("column: {}".format(pe.column)) +- +- prints:: +- +- Expected integer (at char 0), (line:1, col:1) +- column: 1 +- +- """ +- +- +-class ParseFatalException(ParseBaseException): +- """ +- User-throwable exception thrown when inconsistent parse content +- is found; stops all parsing immediately +- """ +- +- +-class ParseSyntaxException(ParseFatalException): +- """ +- Just like :class:`ParseFatalException`, but thrown internally +- when an :class:`ErrorStop` ('-' operator) indicates +- that parsing is to stop immediately because an unbacktrackable +- syntax error has been found. +- """ +- +- +-class RecursiveGrammarException(Exception): +- """ +- Exception thrown by :class:`ParserElement.validate` if the +- grammar could be left-recursive; parser may need to enable +- left recursion using :class:`ParserElement.enable_left_recursion` +- """ +- +- def __init__(self, parseElementList): +- self.parseElementTrace = parseElementList +- +- def __str__(self) -> str: +- return "RecursiveGrammarException: {}".format(self.parseElementTrace) +diff --git a/src/poetry/core/_vendor/pyparsing/helpers.py b/src/poetry/core/_vendor/pyparsing/helpers.py +deleted file mode 100644 +index 9588b3b..0000000 +--- a/src/poetry/core/_vendor/pyparsing/helpers.py ++++ /dev/null +@@ -1,1088 +0,0 @@ +-# helpers.py +-import html.entities +-import re +-import typing +- +-from . import __diag__ +-from .core import * +-from .util import _bslash, _flatten, _escape_regex_range_chars +- +- +-# +-# global helpers +-# +-def delimited_list( +- expr: Union[str, ParserElement], +- delim: Union[str, ParserElement] = ",", +- combine: bool = False, +- min: typing.Optional[int] = None, +- max: typing.Optional[int] = None, +- *, +- allow_trailing_delim: bool = False, +-) -> ParserElement: +- """Helper to define a delimited list of expressions - the delimiter +- defaults to ','. By default, the list elements and delimiters can +- have intervening whitespace, and comments, but this can be +- overridden by passing ``combine=True`` in the constructor. If +- ``combine`` is set to ``True``, the matching tokens are +- returned as a single token string, with the delimiters included; +- otherwise, the matching tokens are returned as a list of tokens, +- with the delimiters suppressed. +- +- If ``allow_trailing_delim`` is set to True, then the list may end with +- a delimiter. +- +- Example:: +- +- delimited_list(Word(alphas)).parse_string("aa,bb,cc") # -> ['aa', 'bb', 'cc'] +- delimited_list(Word(hexnums), delim=':', combine=True).parse_string("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] +- """ +- if isinstance(expr, str_type): +- expr = ParserElement._literalStringClass(expr) +- +- dlName = "{expr} [{delim} {expr}]...{end}".format( +- expr=str(expr.copy().streamline()), +- delim=str(delim), +- end=" [{}]".format(str(delim)) if allow_trailing_delim else "", +- ) +- +- if not combine: +- delim = Suppress(delim) +- +- if min is not None: +- if min < 1: +- raise ValueError("min must be greater than 0") +- min -= 1 +- if max is not None: +- if min is not None and max <= min: +- raise ValueError("max must be greater than, or equal to min") +- max -= 1 +- delimited_list_expr = expr + (delim + expr)[min, max] +- +- if allow_trailing_delim: +- delimited_list_expr += Opt(delim) +- +- if combine: +- return Combine(delimited_list_expr).set_name(dlName) +- else: +- return delimited_list_expr.set_name(dlName) +- +- +-def counted_array( +- expr: ParserElement, +- int_expr: typing.Optional[ParserElement] = None, +- *, +- intExpr: typing.Optional[ParserElement] = None, +-) -> ParserElement: +- """Helper to define a counted list of expressions. +- +- This helper defines a pattern of the form:: +- +- integer expr expr expr... +- +- where the leading integer tells how many expr expressions follow. +- The matched tokens returns the array of expr tokens as a list - the +- leading count token is suppressed. +- +- If ``int_expr`` is specified, it should be a pyparsing expression +- that produces an integer value. +- +- Example:: +- +- counted_array(Word(alphas)).parse_string('2 ab cd ef') # -> ['ab', 'cd'] +- +- # in this parser, the leading integer value is given in binary, +- # '10' indicating that 2 values are in the array +- binary_constant = Word('01').set_parse_action(lambda t: int(t[0], 2)) +- counted_array(Word(alphas), int_expr=binary_constant).parse_string('10 ab cd ef') # -> ['ab', 'cd'] +- +- # if other fields must be parsed after the count but before the +- # list items, give the fields results names and they will +- # be preserved in the returned ParseResults: +- count_with_metadata = integer + Word(alphas)("type") +- typed_array = counted_array(Word(alphanums), int_expr=count_with_metadata)("items") +- result = typed_array.parse_string("3 bool True True False") +- print(result.dump()) +- +- # prints +- # ['True', 'True', 'False'] +- # - items: ['True', 'True', 'False'] +- # - type: 'bool' +- """ +- intExpr = intExpr or int_expr +- array_expr = Forward() +- +- def count_field_parse_action(s, l, t): +- nonlocal array_expr +- n = t[0] +- array_expr <<= (expr * n) if n else Empty() +- # clear list contents, but keep any named results +- del t[:] +- +- if intExpr is None: +- intExpr = Word(nums).set_parse_action(lambda t: int(t[0])) +- else: +- intExpr = intExpr.copy() +- intExpr.set_name("arrayLen") +- intExpr.add_parse_action(count_field_parse_action, call_during_try=True) +- return (intExpr + array_expr).set_name("(len) " + str(expr) + "...") +- +- +-def match_previous_literal(expr: ParserElement) -> ParserElement: +- """Helper to define an expression that is indirectly defined from +- the tokens matched in a previous expression, that is, it looks for +- a 'repeat' of a previous expression. For example:: +- +- first = Word(nums) +- second = match_previous_literal(first) +- match_expr = first + ":" + second +- +- will match ``"1:1"``, but not ``"1:2"``. Because this +- matches a previous literal, will also match the leading +- ``"1:1"`` in ``"1:10"``. If this is not desired, use +- :class:`match_previous_expr`. Do *not* use with packrat parsing +- enabled. +- """ +- rep = Forward() +- +- def copy_token_to_repeater(s, l, t): +- if t: +- if len(t) == 1: +- rep << t[0] +- else: +- # flatten t tokens +- tflat = _flatten(t.as_list()) +- rep << And(Literal(tt) for tt in tflat) +- else: +- rep << Empty() +- +- expr.add_parse_action(copy_token_to_repeater, callDuringTry=True) +- rep.set_name("(prev) " + str(expr)) +- return rep +- +- +-def match_previous_expr(expr: ParserElement) -> ParserElement: +- """Helper to define an expression that is indirectly defined from +- the tokens matched in a previous expression, that is, it looks for +- a 'repeat' of a previous expression. For example:: +- +- first = Word(nums) +- second = match_previous_expr(first) +- match_expr = first + ":" + second +- +- will match ``"1:1"``, but not ``"1:2"``. Because this +- matches by expressions, will *not* match the leading ``"1:1"`` +- in ``"1:10"``; the expressions are evaluated first, and then +- compared, so ``"1"`` is compared with ``"10"``. Do *not* use +- with packrat parsing enabled. +- """ +- rep = Forward() +- e2 = expr.copy() +- rep <<= e2 +- +- def copy_token_to_repeater(s, l, t): +- matchTokens = _flatten(t.as_list()) +- +- def must_match_these_tokens(s, l, t): +- theseTokens = _flatten(t.as_list()) +- if theseTokens != matchTokens: +- raise ParseException( +- s, l, "Expected {}, found{}".format(matchTokens, theseTokens) +- ) +- +- rep.set_parse_action(must_match_these_tokens, callDuringTry=True) +- +- expr.add_parse_action(copy_token_to_repeater, callDuringTry=True) +- rep.set_name("(prev) " + str(expr)) +- return rep +- +- +-def one_of( +- strs: Union[typing.Iterable[str], str], +- caseless: bool = False, +- use_regex: bool = True, +- as_keyword: bool = False, +- *, +- useRegex: bool = True, +- asKeyword: bool = False, +-) -> ParserElement: +- """Helper to quickly define a set of alternative :class:`Literal` s, +- and makes sure to do longest-first testing when there is a conflict, +- regardless of the input order, but returns +- a :class:`MatchFirst` for best performance. +- +- Parameters: +- +- - ``strs`` - a string of space-delimited literals, or a collection of +- string literals +- - ``caseless`` - treat all literals as caseless - (default= ``False``) +- - ``use_regex`` - as an optimization, will +- generate a :class:`Regex` object; otherwise, will generate +- a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if +- creating a :class:`Regex` raises an exception) - (default= ``True``) +- - ``as_keyword`` - enforce :class:`Keyword`-style matching on the +- generated expressions - (default= ``False``) +- - ``asKeyword`` and ``useRegex`` are retained for pre-PEP8 compatibility, +- but will be removed in a future release +- +- Example:: +- +- comp_oper = one_of("< = > <= >= !=") +- var = Word(alphas) +- number = Word(nums) +- term = var | number +- comparison_expr = term + comp_oper + term +- print(comparison_expr.search_string("B = 12 AA=23 B<=AA AA>12")) +- +- prints:: +- +- [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] +- """ +- asKeyword = asKeyword or as_keyword +- useRegex = useRegex and use_regex +- +- if ( +- isinstance(caseless, str_type) +- and __diag__.warn_on_multiple_string_args_to_oneof +- ): +- warnings.warn( +- "More than one string argument passed to one_of, pass" +- " choices as a list or space-delimited string", +- stacklevel=2, +- ) +- +- if caseless: +- isequal = lambda a, b: a.upper() == b.upper() +- masks = lambda a, b: b.upper().startswith(a.upper()) +- parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral +- else: +- isequal = lambda a, b: a == b +- masks = lambda a, b: b.startswith(a) +- parseElementClass = Keyword if asKeyword else Literal +- +- symbols: List[str] = [] +- if isinstance(strs, str_type): +- symbols = strs.split() +- elif isinstance(strs, Iterable): +- symbols = list(strs) +- else: +- raise TypeError("Invalid argument to one_of, expected string or iterable") +- if not symbols: +- return NoMatch() +- +- # reorder given symbols to take care to avoid masking longer choices with shorter ones +- # (but only if the given symbols are not just single characters) +- if any(len(sym) > 1 for sym in symbols): +- i = 0 +- while i < len(symbols) - 1: +- cur = symbols[i] +- for j, other in enumerate(symbols[i + 1 :]): +- if isequal(other, cur): +- del symbols[i + j + 1] +- break +- elif masks(cur, other): +- del symbols[i + j + 1] +- symbols.insert(i, other) +- break +- else: +- i += 1 +- +- if useRegex: +- re_flags: int = re.IGNORECASE if caseless else 0 +- +- try: +- if all(len(sym) == 1 for sym in symbols): +- # symbols are just single characters, create range regex pattern +- patt = "[{}]".format( +- "".join(_escape_regex_range_chars(sym) for sym in symbols) +- ) +- else: +- patt = "|".join(re.escape(sym) for sym in symbols) +- +- # wrap with \b word break markers if defining as keywords +- if asKeyword: +- patt = r"\b(?:{})\b".format(patt) +- +- ret = Regex(patt, flags=re_flags).set_name(" | ".join(symbols)) +- +- if caseless: +- # add parse action to return symbols as specified, not in random +- # casing as found in input string +- symbol_map = {sym.lower(): sym for sym in symbols} +- ret.add_parse_action(lambda s, l, t: symbol_map[t[0].lower()]) +- +- return ret +- +- except re.error: +- warnings.warn( +- "Exception creating Regex for one_of, building MatchFirst", stacklevel=2 +- ) +- +- # last resort, just use MatchFirst +- return MatchFirst(parseElementClass(sym) for sym in symbols).set_name( +- " | ".join(symbols) +- ) +- +- +-def dict_of(key: ParserElement, value: ParserElement) -> ParserElement: +- """Helper to easily and clearly define a dictionary by specifying +- the respective patterns for the key and value. Takes care of +- defining the :class:`Dict`, :class:`ZeroOrMore`, and +- :class:`Group` tokens in the proper order. The key pattern +- can include delimiting markers or punctuation, as long as they are +- suppressed, thereby leaving the significant key text. The value +- pattern can include named results, so that the :class:`Dict` results +- can include named token fields. +- +- Example:: +- +- text = "shape: SQUARE posn: upper left color: light blue texture: burlap" +- attr_expr = (label + Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join)) +- print(attr_expr[1, ...].parse_string(text).dump()) +- +- attr_label = label +- attr_value = Suppress(':') + OneOrMore(data_word, stop_on=label).set_parse_action(' '.join) +- +- # similar to Dict, but simpler call format +- result = dict_of(attr_label, attr_value).parse_string(text) +- print(result.dump()) +- print(result['shape']) +- print(result.shape) # object attribute access works too +- print(result.as_dict()) +- +- prints:: +- +- [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] +- - color: 'light blue' +- - posn: 'upper left' +- - shape: 'SQUARE' +- - texture: 'burlap' +- SQUARE +- SQUARE +- {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} +- """ +- return Dict(OneOrMore(Group(key + value))) +- +- +-def original_text_for( +- expr: ParserElement, as_string: bool = True, *, asString: bool = True +-) -> ParserElement: +- """Helper to return the original, untokenized text for a given +- expression. Useful to restore the parsed fields of an HTML start +- tag into the raw tag text itself, or to revert separate tokens with +- intervening whitespace back to the original matching input text. By +- default, returns astring containing the original parsed text. +- +- If the optional ``as_string`` argument is passed as +- ``False``, then the return value is +- a :class:`ParseResults` containing any results names that +- were originally matched, and a single token containing the original +- matched text from the input string. So if the expression passed to +- :class:`original_text_for` contains expressions with defined +- results names, you must set ``as_string`` to ``False`` if you +- want to preserve those results name values. +- +- The ``asString`` pre-PEP8 argument is retained for compatibility, +- but will be removed in a future release. +- +- Example:: +- +- src = "this is test bold text normal text " +- for tag in ("b", "i"): +- opener, closer = make_html_tags(tag) +- patt = original_text_for(opener + SkipTo(closer) + closer) +- print(patt.search_string(src)[0]) +- +- prints:: +- +- [' bold text '] +- ['text'] +- """ +- asString = asString and as_string +- +- locMarker = Empty().set_parse_action(lambda s, loc, t: loc) +- endlocMarker = locMarker.copy() +- endlocMarker.callPreparse = False +- matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") +- if asString: +- extractText = lambda s, l, t: s[t._original_start : t._original_end] +- else: +- +- def extractText(s, l, t): +- t[:] = [s[t.pop("_original_start") : t.pop("_original_end")]] +- +- matchExpr.set_parse_action(extractText) +- matchExpr.ignoreExprs = expr.ignoreExprs +- matchExpr.suppress_warning(Diagnostics.warn_ungrouped_named_tokens_in_collection) +- return matchExpr +- +- +-def ungroup(expr: ParserElement) -> ParserElement: +- """Helper to undo pyparsing's default grouping of And expressions, +- even if all but one are non-empty. +- """ +- return TokenConverter(expr).add_parse_action(lambda t: t[0]) +- +- +-def locatedExpr(expr: ParserElement) -> ParserElement: +- """ +- (DEPRECATED - future code should use the Located class) +- Helper to decorate a returned token with its starting and ending +- locations in the input string. +- +- This helper adds the following results names: +- +- - ``locn_start`` - location where matched expression begins +- - ``locn_end`` - location where matched expression ends +- - ``value`` - the actual parsed results +- +- Be careful if the input text contains ```` characters, you +- may want to call :class:`ParserElement.parseWithTabs` +- +- Example:: +- +- wd = Word(alphas) +- for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): +- print(match) +- +- prints:: +- +- [[0, 'ljsdf', 5]] +- [[8, 'lksdjjf', 15]] +- [[18, 'lkkjj', 23]] +- """ +- locator = Empty().set_parse_action(lambda ss, ll, tt: ll) +- return Group( +- locator("locn_start") +- + expr("value") +- + locator.copy().leaveWhitespace()("locn_end") +- ) +- +- +-def nested_expr( +- opener: Union[str, ParserElement] = "(", +- closer: Union[str, ParserElement] = ")", +- content: typing.Optional[ParserElement] = None, +- ignore_expr: ParserElement = quoted_string(), +- *, +- ignoreExpr: ParserElement = quoted_string(), +-) -> ParserElement: +- """Helper method for defining nested lists enclosed in opening and +- closing delimiters (``"("`` and ``")"`` are the default). +- +- Parameters: +- - ``opener`` - opening character for a nested list +- (default= ``"("``); can also be a pyparsing expression +- - ``closer`` - closing character for a nested list +- (default= ``")"``); can also be a pyparsing expression +- - ``content`` - expression for items within the nested lists +- (default= ``None``) +- - ``ignore_expr`` - expression for ignoring opening and closing delimiters +- (default= :class:`quoted_string`) +- - ``ignoreExpr`` - this pre-PEP8 argument is retained for compatibility +- but will be removed in a future release +- +- If an expression is not provided for the content argument, the +- nested expression will capture all whitespace-delimited content +- between delimiters as a list of separate values. +- +- Use the ``ignore_expr`` argument to define expressions that may +- contain opening or closing characters that should not be treated as +- opening or closing characters for nesting, such as quoted_string or +- a comment expression. Specify multiple expressions using an +- :class:`Or` or :class:`MatchFirst`. The default is +- :class:`quoted_string`, but if no expressions are to be ignored, then +- pass ``None`` for this argument. +- +- Example:: +- +- data_type = one_of("void int short long char float double") +- decl_data_type = Combine(data_type + Opt(Word('*'))) +- ident = Word(alphas+'_', alphanums+'_') +- number = pyparsing_common.number +- arg = Group(decl_data_type + ident) +- LPAR, RPAR = map(Suppress, "()") +- +- code_body = nested_expr('{', '}', ignore_expr=(quoted_string | c_style_comment)) +- +- c_function = (decl_data_type("type") +- + ident("name") +- + LPAR + Opt(delimited_list(arg), [])("args") + RPAR +- + code_body("body")) +- c_function.ignore(c_style_comment) +- +- source_code = ''' +- int is_odd(int x) { +- return (x%2); +- } +- +- int dec_to_hex(char hchar) { +- if (hchar >= '0' && hchar <= '9') { +- return (ord(hchar)-ord('0')); +- } else { +- return (10+ord(hchar)-ord('A')); +- } +- } +- ''' +- for func in c_function.search_string(source_code): +- print("%(name)s (%(type)s) args: %(args)s" % func) +- +- +- prints:: +- +- is_odd (int) args: [['int', 'x']] +- dec_to_hex (int) args: [['char', 'hchar']] +- """ +- if ignoreExpr != ignore_expr: +- ignoreExpr = ignore_expr if ignoreExpr == quoted_string() else ignoreExpr +- if opener == closer: +- raise ValueError("opening and closing strings cannot be the same") +- if content is None: +- if isinstance(opener, str_type) and isinstance(closer, str_type): +- if len(opener) == 1 and len(closer) == 1: +- if ignoreExpr is not None: +- content = Combine( +- OneOrMore( +- ~ignoreExpr +- + CharsNotIn( +- opener + closer + ParserElement.DEFAULT_WHITE_CHARS, +- exact=1, +- ) +- ) +- ).set_parse_action(lambda t: t[0].strip()) +- else: +- content = empty.copy() + CharsNotIn( +- opener + closer + ParserElement.DEFAULT_WHITE_CHARS +- ).set_parse_action(lambda t: t[0].strip()) +- else: +- if ignoreExpr is not None: +- content = Combine( +- OneOrMore( +- ~ignoreExpr +- + ~Literal(opener) +- + ~Literal(closer) +- + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1) +- ) +- ).set_parse_action(lambda t: t[0].strip()) +- else: +- content = Combine( +- OneOrMore( +- ~Literal(opener) +- + ~Literal(closer) +- + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1) +- ) +- ).set_parse_action(lambda t: t[0].strip()) +- else: +- raise ValueError( +- "opening and closing arguments must be strings if no content expression is given" +- ) +- ret = Forward() +- if ignoreExpr is not None: +- ret <<= Group( +- Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer) +- ) +- else: +- ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) +- ret.set_name("nested %s%s expression" % (opener, closer)) +- return ret +- +- +-def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")): +- """Internal helper to construct opening and closing tag expressions, given a tag name""" +- if isinstance(tagStr, str_type): +- resname = tagStr +- tagStr = Keyword(tagStr, caseless=not xml) +- else: +- resname = tagStr.name +- +- tagAttrName = Word(alphas, alphanums + "_-:") +- if xml: +- tagAttrValue = dbl_quoted_string.copy().set_parse_action(remove_quotes) +- openTag = ( +- suppress_LT +- + tagStr("tag") +- + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) +- + Opt("/", default=[False])("empty").set_parse_action( +- lambda s, l, t: t[0] == "/" +- ) +- + suppress_GT +- ) +- else: +- tagAttrValue = quoted_string.copy().set_parse_action(remove_quotes) | Word( +- printables, exclude_chars=">" +- ) +- openTag = ( +- suppress_LT +- + tagStr("tag") +- + Dict( +- ZeroOrMore( +- Group( +- tagAttrName.set_parse_action(lambda t: t[0].lower()) +- + Opt(Suppress("=") + tagAttrValue) +- ) +- ) +- ) +- + Opt("/", default=[False])("empty").set_parse_action( +- lambda s, l, t: t[0] == "/" +- ) +- + suppress_GT +- ) +- closeTag = Combine(Literal("", adjacent=False) +- +- openTag.set_name("<%s>" % resname) +- # add start results name in parse action now that ungrouped names are not reported at two levels +- openTag.add_parse_action( +- lambda t: t.__setitem__( +- "start" + "".join(resname.replace(":", " ").title().split()), t.copy() +- ) +- ) +- closeTag = closeTag( +- "end" + "".join(resname.replace(":", " ").title().split()) +- ).set_name("" % resname) +- openTag.tag = resname +- closeTag.tag = resname +- openTag.tag_body = SkipTo(closeTag()) +- return openTag, closeTag +- +- +-def make_html_tags( +- tag_str: Union[str, ParserElement] +-) -> Tuple[ParserElement, ParserElement]: +- """Helper to construct opening and closing tag expressions for HTML, +- given a tag name. Matches tags in either upper or lower case, +- attributes with namespaces and with quoted or unquoted values. +- +- Example:: +- +- text = 'More info at the pyparsing wiki page' +- # make_html_tags returns pyparsing expressions for the opening and +- # closing tags as a 2-tuple +- a, a_end = make_html_tags("A") +- link_expr = a + SkipTo(a_end)("link_text") + a_end +- +- for link in link_expr.search_string(text): +- # attributes in the tag (like "href" shown here) are +- # also accessible as named results +- print(link.link_text, '->', link.href) +- +- prints:: +- +- pyparsing -> https://github.com/pyparsing/pyparsing/wiki +- """ +- return _makeTags(tag_str, False) +- +- +-def make_xml_tags( +- tag_str: Union[str, ParserElement] +-) -> Tuple[ParserElement, ParserElement]: +- """Helper to construct opening and closing tag expressions for XML, +- given a tag name. Matches tags only in the given upper/lower case. +- +- Example: similar to :class:`make_html_tags` +- """ +- return _makeTags(tag_str, True) +- +- +-any_open_tag: ParserElement +-any_close_tag: ParserElement +-any_open_tag, any_close_tag = make_html_tags( +- Word(alphas, alphanums + "_:").set_name("any tag") +-) +- +-_htmlEntityMap = {k.rstrip(";"): v for k, v in html.entities.html5.items()} +-common_html_entity = Regex("&(?P" + "|".join(_htmlEntityMap) + ");").set_name( +- "common HTML entity" +-) +- +- +-def replace_html_entity(t): +- """Helper parser action to replace common HTML entities with their special characters""" +- return _htmlEntityMap.get(t.entity) +- +- +-class OpAssoc(Enum): +- LEFT = 1 +- RIGHT = 2 +- +- +-InfixNotationOperatorArgType = Union[ +- ParserElement, str, Tuple[Union[ParserElement, str], Union[ParserElement, str]] +-] +-InfixNotationOperatorSpec = Union[ +- Tuple[ +- InfixNotationOperatorArgType, +- int, +- OpAssoc, +- typing.Optional[ParseAction], +- ], +- Tuple[ +- InfixNotationOperatorArgType, +- int, +- OpAssoc, +- ], +-] +- +- +-def infix_notation( +- base_expr: ParserElement, +- op_list: List[InfixNotationOperatorSpec], +- lpar: Union[str, ParserElement] = Suppress("("), +- rpar: Union[str, ParserElement] = Suppress(")"), +-) -> ParserElement: +- """Helper method for constructing grammars of expressions made up of +- operators working in a precedence hierarchy. Operators may be unary +- or binary, left- or right-associative. Parse actions can also be +- attached to operator expressions. The generated parser will also +- recognize the use of parentheses to override operator precedences +- (see example below). +- +- Note: if you define a deep operator list, you may see performance +- issues when using infix_notation. See +- :class:`ParserElement.enable_packrat` for a mechanism to potentially +- improve your parser performance. +- +- Parameters: +- - ``base_expr`` - expression representing the most basic operand to +- be used in the expression +- - ``op_list`` - list of tuples, one for each operator precedence level +- in the expression grammar; each tuple is of the form ``(op_expr, +- num_operands, right_left_assoc, (optional)parse_action)``, where: +- +- - ``op_expr`` is the pyparsing expression for the operator; may also +- be a string, which will be converted to a Literal; if ``num_operands`` +- is 3, ``op_expr`` is a tuple of two expressions, for the two +- operators separating the 3 terms +- - ``num_operands`` is the number of terms for this operator (must be 1, +- 2, or 3) +- - ``right_left_assoc`` is the indicator whether the operator is right +- or left associative, using the pyparsing-defined constants +- ``OpAssoc.RIGHT`` and ``OpAssoc.LEFT``. +- - ``parse_action`` is the parse action to be associated with +- expressions matching this operator expression (the parse action +- tuple member may be omitted); if the parse action is passed +- a tuple or list of functions, this is equivalent to calling +- ``set_parse_action(*fn)`` +- (:class:`ParserElement.set_parse_action`) +- - ``lpar`` - expression for matching left-parentheses; if passed as a +- str, then will be parsed as Suppress(lpar). If lpar is passed as +- an expression (such as ``Literal('(')``), then it will be kept in +- the parsed results, and grouped with them. (default= ``Suppress('(')``) +- - ``rpar`` - expression for matching right-parentheses; if passed as a +- str, then will be parsed as Suppress(rpar). If rpar is passed as +- an expression (such as ``Literal(')')``), then it will be kept in +- the parsed results, and grouped with them. (default= ``Suppress(')')``) +- +- Example:: +- +- # simple example of four-function arithmetic with ints and +- # variable names +- integer = pyparsing_common.signed_integer +- varname = pyparsing_common.identifier +- +- arith_expr = infix_notation(integer | varname, +- [ +- ('-', 1, OpAssoc.RIGHT), +- (one_of('* /'), 2, OpAssoc.LEFT), +- (one_of('+ -'), 2, OpAssoc.LEFT), +- ]) +- +- arith_expr.run_tests(''' +- 5+3*6 +- (5+3)*6 +- -2--11 +- ''', full_dump=False) +- +- prints:: +- +- 5+3*6 +- [[5, '+', [3, '*', 6]]] +- +- (5+3)*6 +- [[[5, '+', 3], '*', 6]] +- +- -2--11 +- [[['-', 2], '-', ['-', 11]]] +- """ +- # captive version of FollowedBy that does not do parse actions or capture results names +- class _FB(FollowedBy): +- def parseImpl(self, instring, loc, doActions=True): +- self.expr.try_parse(instring, loc) +- return loc, [] +- +- _FB.__name__ = "FollowedBy>" +- +- ret = Forward() +- if isinstance(lpar, str): +- lpar = Suppress(lpar) +- if isinstance(rpar, str): +- rpar = Suppress(rpar) +- +- # if lpar and rpar are not suppressed, wrap in group +- if not (isinstance(rpar, Suppress) and isinstance(rpar, Suppress)): +- lastExpr = base_expr | Group(lpar + ret + rpar) +- else: +- lastExpr = base_expr | (lpar + ret + rpar) +- +- for i, operDef in enumerate(op_list): +- opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4] +- if isinstance(opExpr, str_type): +- opExpr = ParserElement._literalStringClass(opExpr) +- if arity == 3: +- if not isinstance(opExpr, (tuple, list)) or len(opExpr) != 2: +- raise ValueError( +- "if numterms=3, opExpr must be a tuple or list of two expressions" +- ) +- opExpr1, opExpr2 = opExpr +- term_name = "{}{} term".format(opExpr1, opExpr2) +- else: +- term_name = "{} term".format(opExpr) +- +- if not 1 <= arity <= 3: +- raise ValueError("operator must be unary (1), binary (2), or ternary (3)") +- +- if rightLeftAssoc not in (OpAssoc.LEFT, OpAssoc.RIGHT): +- raise ValueError("operator must indicate right or left associativity") +- +- thisExpr: Forward = Forward().set_name(term_name) +- if rightLeftAssoc is OpAssoc.LEFT: +- if arity == 1: +- matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + opExpr[1, ...]) +- elif arity == 2: +- if opExpr is not None: +- matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group( +- lastExpr + (opExpr + lastExpr)[1, ...] +- ) +- else: +- matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr[2, ...]) +- elif arity == 3: +- matchExpr = _FB( +- lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr +- ) + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr)) +- elif rightLeftAssoc is OpAssoc.RIGHT: +- if arity == 1: +- # try to avoid LR with this extra test +- if not isinstance(opExpr, Opt): +- opExpr = Opt(opExpr) +- matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) +- elif arity == 2: +- if opExpr is not None: +- matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group( +- lastExpr + (opExpr + thisExpr)[1, ...] +- ) +- else: +- matchExpr = _FB(lastExpr + thisExpr) + Group( +- lastExpr + thisExpr[1, ...] +- ) +- elif arity == 3: +- matchExpr = _FB( +- lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr +- ) + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) +- if pa: +- if isinstance(pa, (tuple, list)): +- matchExpr.set_parse_action(*pa) +- else: +- matchExpr.set_parse_action(pa) +- thisExpr <<= (matchExpr | lastExpr).setName(term_name) +- lastExpr = thisExpr +- ret <<= lastExpr +- return ret +- +- +-def indentedBlock(blockStatementExpr, indentStack, indent=True, backup_stacks=[]): +- """ +- (DEPRECATED - use IndentedBlock class instead) +- Helper method for defining space-delimited indentation blocks, +- such as those used to define block statements in Python source code. +- +- Parameters: +- +- - ``blockStatementExpr`` - expression defining syntax of statement that +- is repeated within the indented block +- - ``indentStack`` - list created by caller to manage indentation stack +- (multiple ``statementWithIndentedBlock`` expressions within a single +- grammar should share a common ``indentStack``) +- - ``indent`` - boolean indicating whether block must be indented beyond +- the current level; set to ``False`` for block of left-most statements +- (default= ``True``) +- +- A valid block must contain at least one ``blockStatement``. +- +- (Note that indentedBlock uses internal parse actions which make it +- incompatible with packrat parsing.) +- +- Example:: +- +- data = ''' +- def A(z): +- A1 +- B = 100 +- G = A2 +- A2 +- A3 +- B +- def BB(a,b,c): +- BB1 +- def BBA(): +- bba1 +- bba2 +- bba3 +- C +- D +- def spam(x,y): +- def eggs(z): +- pass +- ''' +- +- +- indentStack = [1] +- stmt = Forward() +- +- identifier = Word(alphas, alphanums) +- funcDecl = ("def" + identifier + Group("(" + Opt(delimitedList(identifier)) + ")") + ":") +- func_body = indentedBlock(stmt, indentStack) +- funcDef = Group(funcDecl + func_body) +- +- rvalue = Forward() +- funcCall = Group(identifier + "(" + Opt(delimitedList(rvalue)) + ")") +- rvalue << (funcCall | identifier | Word(nums)) +- assignment = Group(identifier + "=" + rvalue) +- stmt << (funcDef | assignment | identifier) +- +- module_body = stmt[1, ...] +- +- parseTree = module_body.parseString(data) +- parseTree.pprint() +- +- prints:: +- +- [['def', +- 'A', +- ['(', 'z', ')'], +- ':', +- [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], +- 'B', +- ['def', +- 'BB', +- ['(', 'a', 'b', 'c', ')'], +- ':', +- [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], +- 'C', +- 'D', +- ['def', +- 'spam', +- ['(', 'x', 'y', ')'], +- ':', +- [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] +- """ +- backup_stacks.append(indentStack[:]) +- +- def reset_stack(): +- indentStack[:] = backup_stacks[-1] +- +- def checkPeerIndent(s, l, t): +- if l >= len(s): +- return +- curCol = col(l, s) +- if curCol != indentStack[-1]: +- if curCol > indentStack[-1]: +- raise ParseException(s, l, "illegal nesting") +- raise ParseException(s, l, "not a peer entry") +- +- def checkSubIndent(s, l, t): +- curCol = col(l, s) +- if curCol > indentStack[-1]: +- indentStack.append(curCol) +- else: +- raise ParseException(s, l, "not a subentry") +- +- def checkUnindent(s, l, t): +- if l >= len(s): +- return +- curCol = col(l, s) +- if not (indentStack and curCol in indentStack): +- raise ParseException(s, l, "not an unindent") +- if curCol < indentStack[-1]: +- indentStack.pop() +- +- NL = OneOrMore(LineEnd().set_whitespace_chars("\t ").suppress()) +- INDENT = (Empty() + Empty().set_parse_action(checkSubIndent)).set_name("INDENT") +- PEER = Empty().set_parse_action(checkPeerIndent).set_name("") +- UNDENT = Empty().set_parse_action(checkUnindent).set_name("UNINDENT") +- if indent: +- smExpr = Group( +- Opt(NL) +- + INDENT +- + OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL)) +- + UNDENT +- ) +- else: +- smExpr = Group( +- Opt(NL) +- + OneOrMore(PEER + Group(blockStatementExpr) + Opt(NL)) +- + Opt(UNDENT) +- ) +- +- # add a parse action to remove backup_stack from list of backups +- smExpr.add_parse_action( +- lambda: backup_stacks.pop(-1) and None if backup_stacks else None +- ) +- smExpr.set_fail_action(lambda a, b, c, d: reset_stack()) +- blockStatementExpr.ignore(_bslash + LineEnd()) +- return smExpr.set_name("indented block") +- +- +-# it's easy to get these comment structures wrong - they're very common, so may as well make them available +-c_style_comment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + "*/").set_name( +- "C style comment" +-) +-"Comment of the form ``/* ... */``" +- +-html_comment = Regex(r"").set_name("HTML comment") +-"Comment of the form ````" +- +-rest_of_line = Regex(r".*").leave_whitespace().set_name("rest of line") +-dbl_slash_comment = Regex(r"//(?:\\\n|[^\n])*").set_name("// comment") +-"Comment of the form ``// ... (to end of line)``" +- +-cpp_style_comment = Combine( +- Regex(r"/\*(?:[^*]|\*(?!/))*") + "*/" | dbl_slash_comment +-).set_name("C++ style comment") +-"Comment of either form :class:`c_style_comment` or :class:`dbl_slash_comment`" +- +-java_style_comment = cpp_style_comment +-"Same as :class:`cpp_style_comment`" +- +-python_style_comment = Regex(r"#.*").set_name("Python style comment") +-"Comment of the form ``# ... (to end of line)``" +- +- +-# build list of built-in expressions, for future reference if a global default value +-# gets updated +-_builtin_exprs: List[ParserElement] = [ +- v for v in vars().values() if isinstance(v, ParserElement) +-] +- +- +-# pre-PEP8 compatible names +-delimitedList = delimited_list +-countedArray = counted_array +-matchPreviousLiteral = match_previous_literal +-matchPreviousExpr = match_previous_expr +-oneOf = one_of +-dictOf = dict_of +-originalTextFor = original_text_for +-nestedExpr = nested_expr +-makeHTMLTags = make_html_tags +-makeXMLTags = make_xml_tags +-anyOpenTag, anyCloseTag = any_open_tag, any_close_tag +-commonHTMLEntity = common_html_entity +-replaceHTMLEntity = replace_html_entity +-opAssoc = OpAssoc +-infixNotation = infix_notation +-cStyleComment = c_style_comment +-htmlComment = html_comment +-restOfLine = rest_of_line +-dblSlashComment = dbl_slash_comment +-cppStyleComment = cpp_style_comment +-javaStyleComment = java_style_comment +-pythonStyleComment = python_style_comment +diff --git a/src/poetry/core/_vendor/pyparsing/py.typed b/src/poetry/core/_vendor/pyparsing/py.typed +deleted file mode 100644 +index e69de29..0000000 +diff --git a/src/poetry/core/_vendor/pyparsing/results.py b/src/poetry/core/_vendor/pyparsing/results.py +deleted file mode 100644 +index 00c9421..0000000 +--- a/src/poetry/core/_vendor/pyparsing/results.py ++++ /dev/null +@@ -1,760 +0,0 @@ +-# results.py +-from collections.abc import MutableMapping, Mapping, MutableSequence, Iterator +-import pprint +-from weakref import ref as wkref +-from typing import Tuple, Any +- +-str_type: Tuple[type, ...] = (str, bytes) +-_generator_type = type((_ for _ in ())) +- +- +-class _ParseResultsWithOffset: +- __slots__ = ["tup"] +- +- def __init__(self, p1, p2): +- self.tup = (p1, p2) +- +- def __getitem__(self, i): +- return self.tup[i] +- +- def __getstate__(self): +- return self.tup +- +- def __setstate__(self, *args): +- self.tup = args[0] +- +- +-class ParseResults: +- """Structured parse results, to provide multiple means of access to +- the parsed data: +- +- - as a list (``len(results)``) +- - by list index (``results[0], results[1]``, etc.) +- - by attribute (``results.`` - see :class:`ParserElement.set_results_name`) +- +- Example:: +- +- integer = Word(nums) +- date_str = (integer.set_results_name("year") + '/' +- + integer.set_results_name("month") + '/' +- + integer.set_results_name("day")) +- # equivalent form: +- # date_str = (integer("year") + '/' +- # + integer("month") + '/' +- # + integer("day")) +- +- # parse_string returns a ParseResults object +- result = date_str.parse_string("1999/12/31") +- +- def test(s, fn=repr): +- print("{} -> {}".format(s, fn(eval(s)))) +- test("list(result)") +- test("result[0]") +- test("result['month']") +- test("result.day") +- test("'month' in result") +- test("'minutes' in result") +- test("result.dump()", str) +- +- prints:: +- +- list(result) -> ['1999', '/', '12', '/', '31'] +- result[0] -> '1999' +- result['month'] -> '12' +- result.day -> '31' +- 'month' in result -> True +- 'minutes' in result -> False +- result.dump() -> ['1999', '/', '12', '/', '31'] +- - day: '31' +- - month: '12' +- - year: '1999' +- """ +- +- _null_values: Tuple[Any, ...] = (None, [], "", ()) +- +- __slots__ = [ +- "_name", +- "_parent", +- "_all_names", +- "_modal", +- "_toklist", +- "_tokdict", +- "__weakref__", +- ] +- +- class List(list): +- """ +- Simple wrapper class to distinguish parsed list results that should be preserved +- as actual Python lists, instead of being converted to :class:`ParseResults`: +- +- LBRACK, RBRACK = map(pp.Suppress, "[]") +- element = pp.Forward() +- item = ppc.integer +- element_list = LBRACK + pp.delimited_list(element) + RBRACK +- +- # add parse actions to convert from ParseResults to actual Python collection types +- def as_python_list(t): +- return pp.ParseResults.List(t.as_list()) +- element_list.add_parse_action(as_python_list) +- +- element <<= item | element_list +- +- element.run_tests(''' +- 100 +- [2,3,4] +- [[2, 1],3,4] +- [(2, 1),3,4] +- (2,3,4) +- ''', post_parse=lambda s, r: (r[0], type(r[0]))) +- +- prints: +- +- 100 +- (100, ) +- +- [2,3,4] +- ([2, 3, 4], ) +- +- [[2, 1],3,4] +- ([[2, 1], 3, 4], ) +- +- (Used internally by :class:`Group` when `aslist=True`.) +- """ +- +- def __new__(cls, contained=None): +- if contained is None: +- contained = [] +- +- if not isinstance(contained, list): +- raise TypeError( +- "{} may only be constructed with a list," +- " not {}".format(cls.__name__, type(contained).__name__) +- ) +- +- return list.__new__(cls) +- +- def __new__(cls, toklist=None, name=None, **kwargs): +- if isinstance(toklist, ParseResults): +- return toklist +- self = object.__new__(cls) +- self._name = None +- self._parent = None +- self._all_names = set() +- +- if toklist is None: +- self._toklist = [] +- elif isinstance(toklist, (list, _generator_type)): +- self._toklist = ( +- [toklist[:]] +- if isinstance(toklist, ParseResults.List) +- else list(toklist) +- ) +- else: +- self._toklist = [toklist] +- self._tokdict = dict() +- return self +- +- # Performance tuning: we construct a *lot* of these, so keep this +- # constructor as small and fast as possible +- def __init__( +- self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance +- ): +- self._modal = modal +- if name is not None and name != "": +- if isinstance(name, int): +- name = str(name) +- if not modal: +- self._all_names = {name} +- self._name = name +- if toklist not in self._null_values: +- if isinstance(toklist, (str_type, type)): +- toklist = [toklist] +- if asList: +- if isinstance(toklist, ParseResults): +- self[name] = _ParseResultsWithOffset( +- ParseResults(toklist._toklist), 0 +- ) +- else: +- self[name] = _ParseResultsWithOffset( +- ParseResults(toklist[0]), 0 +- ) +- self[name]._name = name +- else: +- try: +- self[name] = toklist[0] +- except (KeyError, TypeError, IndexError): +- if toklist is not self: +- self[name] = toklist +- else: +- self._name = name +- +- def __getitem__(self, i): +- if isinstance(i, (int, slice)): +- return self._toklist[i] +- else: +- if i not in self._all_names: +- return self._tokdict[i][-1][0] +- else: +- return ParseResults([v[0] for v in self._tokdict[i]]) +- +- def __setitem__(self, k, v, isinstance=isinstance): +- if isinstance(v, _ParseResultsWithOffset): +- self._tokdict[k] = self._tokdict.get(k, list()) + [v] +- sub = v[0] +- elif isinstance(k, (int, slice)): +- self._toklist[k] = v +- sub = v +- else: +- self._tokdict[k] = self._tokdict.get(k, list()) + [ +- _ParseResultsWithOffset(v, 0) +- ] +- sub = v +- if isinstance(sub, ParseResults): +- sub._parent = wkref(self) +- +- def __delitem__(self, i): +- if isinstance(i, (int, slice)): +- mylen = len(self._toklist) +- del self._toklist[i] +- +- # convert int to slice +- if isinstance(i, int): +- if i < 0: +- i += mylen +- i = slice(i, i + 1) +- # get removed indices +- removed = list(range(*i.indices(mylen))) +- removed.reverse() +- # fixup indices in token dictionary +- for name, occurrences in self._tokdict.items(): +- for j in removed: +- for k, (value, position) in enumerate(occurrences): +- occurrences[k] = _ParseResultsWithOffset( +- value, position - (position > j) +- ) +- else: +- del self._tokdict[i] +- +- def __contains__(self, k) -> bool: +- return k in self._tokdict +- +- def __len__(self) -> int: +- return len(self._toklist) +- +- def __bool__(self) -> bool: +- return not not (self._toklist or self._tokdict) +- +- def __iter__(self) -> Iterator: +- return iter(self._toklist) +- +- def __reversed__(self) -> Iterator: +- return iter(self._toklist[::-1]) +- +- def keys(self): +- return iter(self._tokdict) +- +- def values(self): +- return (self[k] for k in self.keys()) +- +- def items(self): +- return ((k, self[k]) for k in self.keys()) +- +- def haskeys(self) -> bool: +- """ +- Since ``keys()`` returns an iterator, this method is helpful in bypassing +- code that looks for the existence of any defined results names.""" +- return bool(self._tokdict) +- +- def pop(self, *args, **kwargs): +- """ +- Removes and returns item at specified index (default= ``last``). +- Supports both ``list`` and ``dict`` semantics for ``pop()``. If +- passed no argument or an integer argument, it will use ``list`` +- semantics and pop tokens from the list of parsed tokens. If passed +- a non-integer argument (most likely a string), it will use ``dict`` +- semantics and pop the corresponding value from any defined results +- names. A second default return value argument is supported, just as in +- ``dict.pop()``. +- +- Example:: +- +- numlist = Word(nums)[...] +- print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] +- +- def remove_first(tokens): +- tokens.pop(0) +- numlist.add_parse_action(remove_first) +- print(numlist.parse_string("0 123 321")) # -> ['123', '321'] +- +- label = Word(alphas) +- patt = label("LABEL") + Word(nums)[1, ...] +- print(patt.parse_string("AAB 123 321").dump()) +- +- # Use pop() in a parse action to remove named result (note that corresponding value is not +- # removed from list form of results) +- def remove_LABEL(tokens): +- tokens.pop("LABEL") +- return tokens +- patt.add_parse_action(remove_LABEL) +- print(patt.parse_string("AAB 123 321").dump()) +- +- prints:: +- +- ['AAB', '123', '321'] +- - LABEL: 'AAB' +- +- ['AAB', '123', '321'] +- """ +- if not args: +- args = [-1] +- for k, v in kwargs.items(): +- if k == "default": +- args = (args[0], v) +- else: +- raise TypeError( +- "pop() got an unexpected keyword argument {!r}".format(k) +- ) +- if isinstance(args[0], int) or len(args) == 1 or args[0] in self: +- index = args[0] +- ret = self[index] +- del self[index] +- return ret +- else: +- defaultvalue = args[1] +- return defaultvalue +- +- def get(self, key, default_value=None): +- """ +- Returns named result matching the given key, or if there is no +- such name, then returns the given ``default_value`` or ``None`` if no +- ``default_value`` is specified. +- +- Similar to ``dict.get()``. +- +- Example:: +- +- integer = Word(nums) +- date_str = integer("year") + '/' + integer("month") + '/' + integer("day") +- +- result = date_str.parse_string("1999/12/31") +- print(result.get("year")) # -> '1999' +- print(result.get("hour", "not specified")) # -> 'not specified' +- print(result.get("hour")) # -> None +- """ +- if key in self: +- return self[key] +- else: +- return default_value +- +- def insert(self, index, ins_string): +- """ +- Inserts new element at location index in the list of parsed tokens. +- +- Similar to ``list.insert()``. +- +- Example:: +- +- numlist = Word(nums)[...] +- print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] +- +- # use a parse action to insert the parse location in the front of the parsed results +- def insert_locn(locn, tokens): +- tokens.insert(0, locn) +- numlist.add_parse_action(insert_locn) +- print(numlist.parse_string("0 123 321")) # -> [0, '0', '123', '321'] +- """ +- self._toklist.insert(index, ins_string) +- # fixup indices in token dictionary +- for name, occurrences in self._tokdict.items(): +- for k, (value, position) in enumerate(occurrences): +- occurrences[k] = _ParseResultsWithOffset( +- value, position + (position > index) +- ) +- +- def append(self, item): +- """ +- Add single element to end of ``ParseResults`` list of elements. +- +- Example:: +- +- numlist = Word(nums)[...] +- print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321'] +- +- # use a parse action to compute the sum of the parsed integers, and add it to the end +- def append_sum(tokens): +- tokens.append(sum(map(int, tokens))) +- numlist.add_parse_action(append_sum) +- print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321', 444] +- """ +- self._toklist.append(item) +- +- def extend(self, itemseq): +- """ +- Add sequence of elements to end of ``ParseResults`` list of elements. +- +- Example:: +- +- patt = Word(alphas)[1, ...] +- +- # use a parse action to append the reverse of the matched strings, to make a palindrome +- def make_palindrome(tokens): +- tokens.extend(reversed([t[::-1] for t in tokens])) +- return ''.join(tokens) +- patt.add_parse_action(make_palindrome) +- print(patt.parse_string("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' +- """ +- if isinstance(itemseq, ParseResults): +- self.__iadd__(itemseq) +- else: +- self._toklist.extend(itemseq) +- +- def clear(self): +- """ +- Clear all elements and results names. +- """ +- del self._toklist[:] +- self._tokdict.clear() +- +- def __getattr__(self, name): +- try: +- return self[name] +- except KeyError: +- if name.startswith("__"): +- raise AttributeError(name) +- return "" +- +- def __add__(self, other) -> "ParseResults": +- ret = self.copy() +- ret += other +- return ret +- +- def __iadd__(self, other) -> "ParseResults": +- if other._tokdict: +- offset = len(self._toklist) +- addoffset = lambda a: offset if a < 0 else a + offset +- otheritems = other._tokdict.items() +- otherdictitems = [ +- (k, _ParseResultsWithOffset(v[0], addoffset(v[1]))) +- for k, vlist in otheritems +- for v in vlist +- ] +- for k, v in otherdictitems: +- self[k] = v +- if isinstance(v[0], ParseResults): +- v[0]._parent = wkref(self) +- +- self._toklist += other._toklist +- self._all_names |= other._all_names +- return self +- +- def __radd__(self, other) -> "ParseResults": +- if isinstance(other, int) and other == 0: +- # useful for merging many ParseResults using sum() builtin +- return self.copy() +- else: +- # this may raise a TypeError - so be it +- return other + self +- +- def __repr__(self) -> str: +- return "{}({!r}, {})".format(type(self).__name__, self._toklist, self.as_dict()) +- +- def __str__(self) -> str: +- return ( +- "[" +- + ", ".join( +- [ +- str(i) if isinstance(i, ParseResults) else repr(i) +- for i in self._toklist +- ] +- ) +- + "]" +- ) +- +- def _asStringList(self, sep=""): +- out = [] +- for item in self._toklist: +- if out and sep: +- out.append(sep) +- if isinstance(item, ParseResults): +- out += item._asStringList() +- else: +- out.append(str(item)) +- return out +- +- def as_list(self) -> list: +- """ +- Returns the parse results as a nested list of matching tokens, all converted to strings. +- +- Example:: +- +- patt = Word(alphas)[1, ...] +- result = patt.parse_string("sldkj lsdkj sldkj") +- # even though the result prints in string-like form, it is actually a pyparsing ParseResults +- print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] +- +- # Use as_list() to create an actual list +- result_list = result.as_list() +- print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] +- """ +- return [ +- res.as_list() if isinstance(res, ParseResults) else res +- for res in self._toklist +- ] +- +- def as_dict(self) -> dict: +- """ +- Returns the named parse results as a nested dictionary. +- +- Example:: +- +- integer = Word(nums) +- date_str = integer("year") + '/' + integer("month") + '/' + integer("day") +- +- result = date_str.parse_string('12/31/1999') +- print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) +- +- result_dict = result.as_dict() +- print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} +- +- # even though a ParseResults supports dict-like access, sometime you just need to have a dict +- import json +- print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable +- print(json.dumps(result.as_dict())) # -> {"month": "31", "day": "1999", "year": "12"} +- """ +- +- def to_item(obj): +- if isinstance(obj, ParseResults): +- return obj.as_dict() if obj.haskeys() else [to_item(v) for v in obj] +- else: +- return obj +- +- return dict((k, to_item(v)) for k, v in self.items()) +- +- def copy(self) -> "ParseResults": +- """ +- Returns a new copy of a :class:`ParseResults` object. +- """ +- ret = ParseResults(self._toklist) +- ret._tokdict = self._tokdict.copy() +- ret._parent = self._parent +- ret._all_names |= self._all_names +- ret._name = self._name +- return ret +- +- def get_name(self): +- r""" +- Returns the results name for this token expression. Useful when several +- different expressions might match at a particular location. +- +- Example:: +- +- integer = Word(nums) +- ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") +- house_number_expr = Suppress('#') + Word(nums, alphanums) +- user_data = (Group(house_number_expr)("house_number") +- | Group(ssn_expr)("ssn") +- | Group(integer)("age")) +- user_info = user_data[1, ...] +- +- result = user_info.parse_string("22 111-22-3333 #221B") +- for item in result: +- print(item.get_name(), ':', item[0]) +- +- prints:: +- +- age : 22 +- ssn : 111-22-3333 +- house_number : 221B +- """ +- if self._name: +- return self._name +- elif self._parent: +- par = self._parent() +- +- def find_in_parent(sub): +- return next( +- ( +- k +- for k, vlist in par._tokdict.items() +- for v, loc in vlist +- if sub is v +- ), +- None, +- ) +- +- return find_in_parent(self) if par else None +- elif ( +- len(self) == 1 +- and len(self._tokdict) == 1 +- and next(iter(self._tokdict.values()))[0][1] in (0, -1) +- ): +- return next(iter(self._tokdict.keys())) +- else: +- return None +- +- def dump(self, indent="", full=True, include_list=True, _depth=0) -> str: +- """ +- Diagnostic method for listing out the contents of +- a :class:`ParseResults`. Accepts an optional ``indent`` argument so +- that this string can be embedded in a nested display of other data. +- +- Example:: +- +- integer = Word(nums) +- date_str = integer("year") + '/' + integer("month") + '/' + integer("day") +- +- result = date_str.parse_string('1999/12/31') +- print(result.dump()) +- +- prints:: +- +- ['1999', '/', '12', '/', '31'] +- - day: '31' +- - month: '12' +- - year: '1999' +- """ +- out = [] +- NL = "\n" +- out.append(indent + str(self.as_list()) if include_list else "") +- +- if full: +- if self.haskeys(): +- items = sorted((str(k), v) for k, v in self.items()) +- for k, v in items: +- if out: +- out.append(NL) +- out.append("{}{}- {}: ".format(indent, (" " * _depth), k)) +- if isinstance(v, ParseResults): +- if v: +- out.append( +- v.dump( +- indent=indent, +- full=full, +- include_list=include_list, +- _depth=_depth + 1, +- ) +- ) +- else: +- out.append(str(v)) +- else: +- out.append(repr(v)) +- if any(isinstance(vv, ParseResults) for vv in self): +- v = self +- for i, vv in enumerate(v): +- if isinstance(vv, ParseResults): +- out.append( +- "\n{}{}[{}]:\n{}{}{}".format( +- indent, +- (" " * (_depth)), +- i, +- indent, +- (" " * (_depth + 1)), +- vv.dump( +- indent=indent, +- full=full, +- include_list=include_list, +- _depth=_depth + 1, +- ), +- ) +- ) +- else: +- out.append( +- "\n%s%s[%d]:\n%s%s%s" +- % ( +- indent, +- (" " * (_depth)), +- i, +- indent, +- (" " * (_depth + 1)), +- str(vv), +- ) +- ) +- +- return "".join(out) +- +- def pprint(self, *args, **kwargs): +- """ +- Pretty-printer for parsed results as a list, using the +- `pprint `_ module. +- Accepts additional positional or keyword args as defined for +- `pprint.pprint `_ . +- +- Example:: +- +- ident = Word(alphas, alphanums) +- num = Word(nums) +- func = Forward() +- term = ident | num | Group('(' + func + ')') +- func <<= ident + Group(Optional(delimited_list(term))) +- result = func.parse_string("fna a,b,(fnb c,d,200),100") +- result.pprint(width=40) +- +- prints:: +- +- ['fna', +- ['a', +- 'b', +- ['(', 'fnb', ['c', 'd', '200'], ')'], +- '100']] +- """ +- pprint.pprint(self.as_list(), *args, **kwargs) +- +- # add support for pickle protocol +- def __getstate__(self): +- return ( +- self._toklist, +- ( +- self._tokdict.copy(), +- self._parent is not None and self._parent() or None, +- self._all_names, +- self._name, +- ), +- ) +- +- def __setstate__(self, state): +- self._toklist, (self._tokdict, par, inAccumNames, self._name) = state +- self._all_names = set(inAccumNames) +- if par is not None: +- self._parent = wkref(par) +- else: +- self._parent = None +- +- def __getnewargs__(self): +- return self._toklist, self._name +- +- def __dir__(self): +- return dir(type(self)) + list(self.keys()) +- +- @classmethod +- def from_dict(cls, other, name=None) -> "ParseResults": +- """ +- Helper classmethod to construct a ``ParseResults`` from a ``dict``, preserving the +- name-value relations as results names. If an optional ``name`` argument is +- given, a nested ``ParseResults`` will be returned. +- """ +- +- def is_iterable(obj): +- try: +- iter(obj) +- except Exception: +- return False +- else: +- return not isinstance(obj, str_type) +- +- ret = cls([]) +- for k, v in other.items(): +- if isinstance(v, Mapping): +- ret += cls.from_dict(v, name=k) +- else: +- ret += cls([v], name=k, asList=is_iterable(v)) +- if name is not None: +- ret = cls([ret], name=name) +- return ret +- +- asList = as_list +- asDict = as_dict +- getName = get_name +- +- +-MutableMapping.register(ParseResults) +-MutableSequence.register(ParseResults) +diff --git a/src/poetry/core/_vendor/pyparsing/testing.py b/src/poetry/core/_vendor/pyparsing/testing.py +deleted file mode 100644 +index 84a0ef1..0000000 +--- a/src/poetry/core/_vendor/pyparsing/testing.py ++++ /dev/null +@@ -1,331 +0,0 @@ +-# testing.py +- +-from contextlib import contextmanager +-import typing +- +-from .core import ( +- ParserElement, +- ParseException, +- Keyword, +- __diag__, +- __compat__, +-) +- +- +-class pyparsing_test: +- """ +- namespace class for classes useful in writing unit tests +- """ +- +- class reset_pyparsing_context: +- """ +- Context manager to be used when writing unit tests that modify pyparsing config values: +- - packrat parsing +- - bounded recursion parsing +- - default whitespace characters. +- - default keyword characters +- - literal string auto-conversion class +- - __diag__ settings +- +- Example:: +- +- with reset_pyparsing_context(): +- # test that literals used to construct a grammar are automatically suppressed +- ParserElement.inlineLiteralsUsing(Suppress) +- +- term = Word(alphas) | Word(nums) +- group = Group('(' + term[...] + ')') +- +- # assert that the '()' characters are not included in the parsed tokens +- self.assertParseAndCheckList(group, "(abc 123 def)", ['abc', '123', 'def']) +- +- # after exiting context manager, literals are converted to Literal expressions again +- """ +- +- def __init__(self): +- self._save_context = {} +- +- def save(self): +- self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS +- self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS +- +- self._save_context[ +- "literal_string_class" +- ] = ParserElement._literalStringClass +- +- self._save_context["verbose_stacktrace"] = ParserElement.verbose_stacktrace +- +- self._save_context["packrat_enabled"] = ParserElement._packratEnabled +- if ParserElement._packratEnabled: +- self._save_context[ +- "packrat_cache_size" +- ] = ParserElement.packrat_cache.size +- else: +- self._save_context["packrat_cache_size"] = None +- self._save_context["packrat_parse"] = ParserElement._parse +- self._save_context[ +- "recursion_enabled" +- ] = ParserElement._left_recursion_enabled +- +- self._save_context["__diag__"] = { +- name: getattr(__diag__, name) for name in __diag__._all_names +- } +- +- self._save_context["__compat__"] = { +- "collect_all_And_tokens": __compat__.collect_all_And_tokens +- } +- +- return self +- +- def restore(self): +- # reset pyparsing global state +- if ( +- ParserElement.DEFAULT_WHITE_CHARS +- != self._save_context["default_whitespace"] +- ): +- ParserElement.set_default_whitespace_chars( +- self._save_context["default_whitespace"] +- ) +- +- ParserElement.verbose_stacktrace = self._save_context["verbose_stacktrace"] +- +- Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] +- ParserElement.inlineLiteralsUsing( +- self._save_context["literal_string_class"] +- ) +- +- for name, value in self._save_context["__diag__"].items(): +- (__diag__.enable if value else __diag__.disable)(name) +- +- ParserElement._packratEnabled = False +- if self._save_context["packrat_enabled"]: +- ParserElement.enable_packrat(self._save_context["packrat_cache_size"]) +- else: +- ParserElement._parse = self._save_context["packrat_parse"] +- ParserElement._left_recursion_enabled = self._save_context[ +- "recursion_enabled" +- ] +- +- __compat__.collect_all_And_tokens = self._save_context["__compat__"] +- +- return self +- +- def copy(self): +- ret = type(self)() +- ret._save_context.update(self._save_context) +- return ret +- +- def __enter__(self): +- return self.save() +- +- def __exit__(self, *args): +- self.restore() +- +- class TestParseResultsAsserts: +- """ +- A mixin class to add parse results assertion methods to normal unittest.TestCase classes. +- """ +- +- def assertParseResultsEquals( +- self, result, expected_list=None, expected_dict=None, msg=None +- ): +- """ +- Unit test assertion to compare a :class:`ParseResults` object with an optional ``expected_list``, +- and compare any defined results names with an optional ``expected_dict``. +- """ +- if expected_list is not None: +- self.assertEqual(expected_list, result.as_list(), msg=msg) +- if expected_dict is not None: +- self.assertEqual(expected_dict, result.as_dict(), msg=msg) +- +- def assertParseAndCheckList( +- self, expr, test_string, expected_list, msg=None, verbose=True +- ): +- """ +- Convenience wrapper assert to test a parser element and input string, and assert that +- the resulting ``ParseResults.asList()`` is equal to the ``expected_list``. +- """ +- result = expr.parse_string(test_string, parse_all=True) +- if verbose: +- print(result.dump()) +- else: +- print(result.as_list()) +- self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) +- +- def assertParseAndCheckDict( +- self, expr, test_string, expected_dict, msg=None, verbose=True +- ): +- """ +- Convenience wrapper assert to test a parser element and input string, and assert that +- the resulting ``ParseResults.asDict()`` is equal to the ``expected_dict``. +- """ +- result = expr.parse_string(test_string, parseAll=True) +- if verbose: +- print(result.dump()) +- else: +- print(result.as_list()) +- self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) +- +- def assertRunTestResults( +- self, run_tests_report, expected_parse_results=None, msg=None +- ): +- """ +- Unit test assertion to evaluate output of ``ParserElement.runTests()``. If a list of +- list-dict tuples is given as the ``expected_parse_results`` argument, then these are zipped +- with the report tuples returned by ``runTests`` and evaluated using ``assertParseResultsEquals``. +- Finally, asserts that the overall ``runTests()`` success value is ``True``. +- +- :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests +- :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] +- """ +- run_test_success, run_test_results = run_tests_report +- +- if expected_parse_results is not None: +- merged = [ +- (*rpt, expected) +- for rpt, expected in zip(run_test_results, expected_parse_results) +- ] +- for test_string, result, expected in merged: +- # expected should be a tuple containing a list and/or a dict or an exception, +- # and optional failure message string +- # an empty tuple will skip any result validation +- fail_msg = next( +- (exp for exp in expected if isinstance(exp, str)), None +- ) +- expected_exception = next( +- ( +- exp +- for exp in expected +- if isinstance(exp, type) and issubclass(exp, Exception) +- ), +- None, +- ) +- if expected_exception is not None: +- with self.assertRaises( +- expected_exception=expected_exception, msg=fail_msg or msg +- ): +- if isinstance(result, Exception): +- raise result +- else: +- expected_list = next( +- (exp for exp in expected if isinstance(exp, list)), None +- ) +- expected_dict = next( +- (exp for exp in expected if isinstance(exp, dict)), None +- ) +- if (expected_list, expected_dict) != (None, None): +- self.assertParseResultsEquals( +- result, +- expected_list=expected_list, +- expected_dict=expected_dict, +- msg=fail_msg or msg, +- ) +- else: +- # warning here maybe? +- print("no validation for {!r}".format(test_string)) +- +- # do this last, in case some specific test results can be reported instead +- self.assertTrue( +- run_test_success, msg=msg if msg is not None else "failed runTests" +- ) +- +- @contextmanager +- def assertRaisesParseException(self, exc_type=ParseException, msg=None): +- with self.assertRaises(exc_type, msg=msg): +- yield +- +- @staticmethod +- def with_line_numbers( +- s: str, +- start_line: typing.Optional[int] = None, +- end_line: typing.Optional[int] = None, +- expand_tabs: bool = True, +- eol_mark: str = "|", +- mark_spaces: typing.Optional[str] = None, +- mark_control: typing.Optional[str] = None, +- ) -> str: +- """ +- Helpful method for debugging a parser - prints a string with line and column numbers. +- (Line and column numbers are 1-based.) +- +- :param s: tuple(bool, str - string to be printed with line and column numbers +- :param start_line: int - (optional) starting line number in s to print (default=1) +- :param end_line: int - (optional) ending line number in s to print (default=len(s)) +- :param expand_tabs: bool - (optional) expand tabs to spaces, to match the pyparsing default +- :param eol_mark: str - (optional) string to mark the end of lines, helps visualize trailing spaces (default="|") +- :param mark_spaces: str - (optional) special character to display in place of spaces +- :param mark_control: str - (optional) convert non-printing control characters to a placeholding +- character; valid values: +- - "unicode" - replaces control chars with Unicode symbols, such as "â" and "âŠ" +- - any single character string - replace control characters with given string +- - None (default) - string is displayed as-is +- +- :return: str - input string with leading line numbers and column number headers +- """ +- if expand_tabs: +- s = s.expandtabs() +- if mark_control is not None: +- if mark_control == "unicode": +- tbl = str.maketrans( +- {c: u for c, u in zip(range(0, 33), range(0x2400, 0x2433))} +- | {127: 0x2421} +- ) +- eol_mark = "" +- else: +- tbl = str.maketrans( +- {c: mark_control for c in list(range(0, 32)) + [127]} +- ) +- s = s.translate(tbl) +- if mark_spaces is not None and mark_spaces != " ": +- if mark_spaces == "unicode": +- tbl = str.maketrans({9: 0x2409, 32: 0x2423}) +- s = s.translate(tbl) +- else: +- s = s.replace(" ", mark_spaces) +- if start_line is None: +- start_line = 1 +- if end_line is None: +- end_line = len(s) +- end_line = min(end_line, len(s)) +- start_line = min(max(1, start_line), end_line) +- +- if mark_control != "unicode": +- s_lines = s.splitlines()[start_line - 1 : end_line] +- else: +- s_lines = [line + "âŠ" for line in s.split("âŠ")[start_line - 1 : end_line]] +- if not s_lines: +- return "" +- +- lineno_width = len(str(end_line)) +- max_line_len = max(len(line) for line in s_lines) +- lead = " " * (lineno_width + 1) +- if max_line_len >= 99: +- header0 = ( +- lead +- + "".join( +- "{}{}".format(" " * 99, (i + 1) % 100) +- for i in range(max(max_line_len // 100, 1)) +- ) +- + "\n" +- ) +- else: +- header0 = "" +- header1 = ( +- header0 +- + lead +- + "".join( +- " {}".format((i + 1) % 10) +- for i in range(-(-max_line_len // 10)) +- ) +- + "\n" +- ) +- header2 = lead + "1234567890" * (-(-max_line_len // 10)) + "\n" +- return ( +- header1 +- + header2 +- + "\n".join( +- "{:{}d}:{}{}".format(i, lineno_width, line, eol_mark) +- for i, line in enumerate(s_lines, start=start_line) +- ) +- + "\n" +- ) +diff --git a/src/poetry/core/_vendor/pyparsing/unicode.py b/src/poetry/core/_vendor/pyparsing/unicode.py +deleted file mode 100644 +index 0652620..0000000 +--- a/src/poetry/core/_vendor/pyparsing/unicode.py ++++ /dev/null +@@ -1,352 +0,0 @@ +-# unicode.py +- +-import sys +-from itertools import filterfalse +-from typing import List, Tuple, Union +- +- +-class _lazyclassproperty: +- def __init__(self, fn): +- self.fn = fn +- self.__doc__ = fn.__doc__ +- self.__name__ = fn.__name__ +- +- def __get__(self, obj, cls): +- if cls is None: +- cls = type(obj) +- if not hasattr(cls, "_intern") or any( +- cls._intern is getattr(superclass, "_intern", []) +- for superclass in cls.__mro__[1:] +- ): +- cls._intern = {} +- attrname = self.fn.__name__ +- if attrname not in cls._intern: +- cls._intern[attrname] = self.fn(cls) +- return cls._intern[attrname] +- +- +-UnicodeRangeList = List[Union[Tuple[int, int], Tuple[int]]] +- +- +-class unicode_set: +- """ +- A set of Unicode characters, for language-specific strings for +- ``alphas``, ``nums``, ``alphanums``, and ``printables``. +- A unicode_set is defined by a list of ranges in the Unicode character +- set, in a class attribute ``_ranges``. Ranges can be specified using +- 2-tuples or a 1-tuple, such as:: +- +- _ranges = [ +- (0x0020, 0x007e), +- (0x00a0, 0x00ff), +- (0x0100,), +- ] +- +- Ranges are left- and right-inclusive. A 1-tuple of (x,) is treated as (x, x). +- +- A unicode set can also be defined using multiple inheritance of other unicode sets:: +- +- class CJK(Chinese, Japanese, Korean): +- pass +- """ +- +- _ranges: UnicodeRangeList = [] +- +- @_lazyclassproperty +- def _chars_for_ranges(cls): +- ret = [] +- for cc in cls.__mro__: +- if cc is unicode_set: +- break +- for rr in getattr(cc, "_ranges", ()): +- ret.extend(range(rr[0], rr[-1] + 1)) +- return [chr(c) for c in sorted(set(ret))] +- +- @_lazyclassproperty +- def printables(cls): +- "all non-whitespace characters in this range" +- return "".join(filterfalse(str.isspace, cls._chars_for_ranges)) +- +- @_lazyclassproperty +- def alphas(cls): +- "all alphabetic characters in this range" +- return "".join(filter(str.isalpha, cls._chars_for_ranges)) +- +- @_lazyclassproperty +- def nums(cls): +- "all numeric digit characters in this range" +- return "".join(filter(str.isdigit, cls._chars_for_ranges)) +- +- @_lazyclassproperty +- def alphanums(cls): +- "all alphanumeric characters in this range" +- return cls.alphas + cls.nums +- +- @_lazyclassproperty +- def identchars(cls): +- "all characters in this range that are valid identifier characters, plus underscore '_'" +- return "".join( +- sorted( +- set( +- "".join(filter(str.isidentifier, cls._chars_for_ranges)) +- + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµº" +- + "ÀÃÂÃÄÅÆÇÈÉÊËÌÃÃŽÃÃÑÒÓÔÕÖØÙÚÛÜÃÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ" +- + "_" +- ) +- ) +- ) +- +- @_lazyclassproperty +- def identbodychars(cls): +- """ +- all characters in this range that are valid identifier body characters, +- plus the digits 0-9 +- """ +- return "".join( +- sorted( +- set( +- cls.identchars +- + "0123456789" +- + "".join( +- [c for c in cls._chars_for_ranges if ("_" + c).isidentifier()] +- ) +- ) +- ) +- ) +- +- +-class pyparsing_unicode(unicode_set): +- """ +- A namespace class for defining common language unicode_sets. +- """ +- +- # fmt: off +- +- # define ranges in language character sets +- _ranges: UnicodeRangeList = [ +- (0x0020, sys.maxunicode), +- ] +- +- class BasicMultilingualPlane(unicode_set): +- "Unicode set for the Basic Multilingual Plane" +- _ranges: UnicodeRangeList = [ +- (0x0020, 0xFFFF), +- ] +- +- class Latin1(unicode_set): +- "Unicode set for Latin-1 Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x0020, 0x007E), +- (0x00A0, 0x00FF), +- ] +- +- class LatinA(unicode_set): +- "Unicode set for Latin-A Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x0100, 0x017F), +- ] +- +- class LatinB(unicode_set): +- "Unicode set for Latin-B Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x0180, 0x024F), +- ] +- +- class Greek(unicode_set): +- "Unicode set for Greek Unicode Character Ranges" +- _ranges: UnicodeRangeList = [ +- (0x0342, 0x0345), +- (0x0370, 0x0377), +- (0x037A, 0x037F), +- (0x0384, 0x038A), +- (0x038C,), +- (0x038E, 0x03A1), +- (0x03A3, 0x03E1), +- (0x03F0, 0x03FF), +- (0x1D26, 0x1D2A), +- (0x1D5E,), +- (0x1D60,), +- (0x1D66, 0x1D6A), +- (0x1F00, 0x1F15), +- (0x1F18, 0x1F1D), +- (0x1F20, 0x1F45), +- (0x1F48, 0x1F4D), +- (0x1F50, 0x1F57), +- (0x1F59,), +- (0x1F5B,), +- (0x1F5D,), +- (0x1F5F, 0x1F7D), +- (0x1F80, 0x1FB4), +- (0x1FB6, 0x1FC4), +- (0x1FC6, 0x1FD3), +- (0x1FD6, 0x1FDB), +- (0x1FDD, 0x1FEF), +- (0x1FF2, 0x1FF4), +- (0x1FF6, 0x1FFE), +- (0x2129,), +- (0x2719, 0x271A), +- (0xAB65,), +- (0x10140, 0x1018D), +- (0x101A0,), +- (0x1D200, 0x1D245), +- (0x1F7A1, 0x1F7A7), +- ] +- +- class Cyrillic(unicode_set): +- "Unicode set for Cyrillic Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x0400, 0x052F), +- (0x1C80, 0x1C88), +- (0x1D2B,), +- (0x1D78,), +- (0x2DE0, 0x2DFF), +- (0xA640, 0xA672), +- (0xA674, 0xA69F), +- (0xFE2E, 0xFE2F), +- ] +- +- class Chinese(unicode_set): +- "Unicode set for Chinese Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x2E80, 0x2E99), +- (0x2E9B, 0x2EF3), +- (0x31C0, 0x31E3), +- (0x3400, 0x4DB5), +- (0x4E00, 0x9FEF), +- (0xA700, 0xA707), +- (0xF900, 0xFA6D), +- (0xFA70, 0xFAD9), +- (0x16FE2, 0x16FE3), +- (0x1F210, 0x1F212), +- (0x1F214, 0x1F23B), +- (0x1F240, 0x1F248), +- (0x20000, 0x2A6D6), +- (0x2A700, 0x2B734), +- (0x2B740, 0x2B81D), +- (0x2B820, 0x2CEA1), +- (0x2CEB0, 0x2EBE0), +- (0x2F800, 0x2FA1D), +- ] +- +- class Japanese(unicode_set): +- "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" +- _ranges: UnicodeRangeList = [] +- +- class Kanji(unicode_set): +- "Unicode set for Kanji Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x4E00, 0x9FBF), +- (0x3000, 0x303F), +- ] +- +- class Hiragana(unicode_set): +- "Unicode set for Hiragana Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x3041, 0x3096), +- (0x3099, 0x30A0), +- (0x30FC,), +- (0xFF70,), +- (0x1B001,), +- (0x1B150, 0x1B152), +- (0x1F200,), +- ] +- +- class Katakana(unicode_set): +- "Unicode set for Katakana Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x3099, 0x309C), +- (0x30A0, 0x30FF), +- (0x31F0, 0x31FF), +- (0x32D0, 0x32FE), +- (0xFF65, 0xFF9F), +- (0x1B000,), +- (0x1B164, 0x1B167), +- (0x1F201, 0x1F202), +- (0x1F213,), +- ] +- +- class Hangul(unicode_set): +- "Unicode set for Hangul (Korean) Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x1100, 0x11FF), +- (0x302E, 0x302F), +- (0x3131, 0x318E), +- (0x3200, 0x321C), +- (0x3260, 0x327B), +- (0x327E,), +- (0xA960, 0xA97C), +- (0xAC00, 0xD7A3), +- (0xD7B0, 0xD7C6), +- (0xD7CB, 0xD7FB), +- (0xFFA0, 0xFFBE), +- (0xFFC2, 0xFFC7), +- (0xFFCA, 0xFFCF), +- (0xFFD2, 0xFFD7), +- (0xFFDA, 0xFFDC), +- ] +- +- Korean = Hangul +- +- class CJK(Chinese, Japanese, Hangul): +- "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" +- +- class Thai(unicode_set): +- "Unicode set for Thai Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x0E01, 0x0E3A), +- (0x0E3F, 0x0E5B) +- ] +- +- class Arabic(unicode_set): +- "Unicode set for Arabic Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x0600, 0x061B), +- (0x061E, 0x06FF), +- (0x0700, 0x077F), +- ] +- +- class Hebrew(unicode_set): +- "Unicode set for Hebrew Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x0591, 0x05C7), +- (0x05D0, 0x05EA), +- (0x05EF, 0x05F4), +- (0xFB1D, 0xFB36), +- (0xFB38, 0xFB3C), +- (0xFB3E,), +- (0xFB40, 0xFB41), +- (0xFB43, 0xFB44), +- (0xFB46, 0xFB4F), +- ] +- +- class Devanagari(unicode_set): +- "Unicode set for Devanagari Unicode Character Range" +- _ranges: UnicodeRangeList = [ +- (0x0900, 0x097F), +- (0xA8E0, 0xA8FF) +- ] +- +- # fmt: on +- +- +-pyparsing_unicode.Japanese._ranges = ( +- pyparsing_unicode.Japanese.Kanji._ranges +- + pyparsing_unicode.Japanese.Hiragana._ranges +- + pyparsing_unicode.Japanese.Katakana._ranges +-) +- +-pyparsing_unicode.BMP = pyparsing_unicode.BasicMultilingualPlane +- +-# add language identifiers using language Unicode +-pyparsing_unicode.العربية = pyparsing_unicode.Arabic +-pyparsing_unicode.中文 = pyparsing_unicode.Chinese +-pyparsing_unicode.кириллица = pyparsing_unicode.Cyrillic +-pyparsing_unicode.Ελληνικά = pyparsing_unicode.Greek +-pyparsing_unicode.עִברִית = pyparsing_unicode.Hebrew +-pyparsing_unicode.日本語 = pyparsing_unicode.Japanese +-pyparsing_unicode.Japanese.漢字 = pyparsing_unicode.Japanese.Kanji +-pyparsing_unicode.Japanese.カタカナ = pyparsing_unicode.Japanese.Katakana +-pyparsing_unicode.Japanese.ã²ã‚‰ãŒãª = pyparsing_unicode.Japanese.Hiragana +-pyparsing_unicode.한국어 = pyparsing_unicode.Korean +-pyparsing_unicode.ไทย = pyparsing_unicode.Thai +-pyparsing_unicode.देवनागरी = pyparsing_unicode.Devanagari +diff --git a/src/poetry/core/_vendor/pyparsing/util.py b/src/poetry/core/_vendor/pyparsing/util.py +deleted file mode 100644 +index 34ce092..0000000 +--- a/src/poetry/core/_vendor/pyparsing/util.py ++++ /dev/null +@@ -1,235 +0,0 @@ +-# util.py +-import warnings +-import types +-import collections +-import itertools +-from functools import lru_cache +-from typing import List, Union, Iterable +- +-_bslash = chr(92) +- +- +-class __config_flags: +- """Internal class for defining compatibility and debugging flags""" +- +- _all_names: List[str] = [] +- _fixed_names: List[str] = [] +- _type_desc = "configuration" +- +- @classmethod +- def _set(cls, dname, value): +- if dname in cls._fixed_names: +- warnings.warn( +- "{}.{} {} is {} and cannot be overridden".format( +- cls.__name__, +- dname, +- cls._type_desc, +- str(getattr(cls, dname)).upper(), +- ) +- ) +- return +- if dname in cls._all_names: +- setattr(cls, dname, value) +- else: +- raise ValueError("no such {} {!r}".format(cls._type_desc, dname)) +- +- enable = classmethod(lambda cls, name: cls._set(name, True)) +- disable = classmethod(lambda cls, name: cls._set(name, False)) +- +- +-@lru_cache(maxsize=128) +-def col(loc: int, strg: str) -> int: +- """ +- Returns current column within a string, counting newlines as line separators. +- The first column is number 1. +- +- Note: the default parsing behavior is to expand tabs in the input string +- before starting the parsing process. See +- :class:`ParserElement.parseString` for more +- information on parsing strings containing ```` s, and suggested +- methods to maintain a consistent view of the parsed string, the parse +- location, and line and column positions within the parsed string. +- """ +- s = strg +- return 1 if 0 < loc < len(s) and s[loc - 1] == "\n" else loc - s.rfind("\n", 0, loc) +- +- +-@lru_cache(maxsize=128) +-def lineno(loc: int, strg: str) -> int: +- """Returns current line number within a string, counting newlines as line separators. +- The first line is number 1. +- +- Note - the default parsing behavior is to expand tabs in the input string +- before starting the parsing process. See :class:`ParserElement.parseString` +- for more information on parsing strings containing ```` s, and +- suggested methods to maintain a consistent view of the parsed string, the +- parse location, and line and column positions within the parsed string. +- """ +- return strg.count("\n", 0, loc) + 1 +- +- +-@lru_cache(maxsize=128) +-def line(loc: int, strg: str) -> str: +- """ +- Returns the line of text containing loc within a string, counting newlines as line separators. +- """ +- last_cr = strg.rfind("\n", 0, loc) +- next_cr = strg.find("\n", loc) +- return strg[last_cr + 1 : next_cr] if next_cr >= 0 else strg[last_cr + 1 :] +- +- +-class _UnboundedCache: +- def __init__(self): +- cache = {} +- cache_get = cache.get +- self.not_in_cache = not_in_cache = object() +- +- def get(_, key): +- return cache_get(key, not_in_cache) +- +- def set_(_, key, value): +- cache[key] = value +- +- def clear(_): +- cache.clear() +- +- self.size = None +- self.get = types.MethodType(get, self) +- self.set = types.MethodType(set_, self) +- self.clear = types.MethodType(clear, self) +- +- +-class _FifoCache: +- def __init__(self, size): +- self.not_in_cache = not_in_cache = object() +- cache = collections.OrderedDict() +- cache_get = cache.get +- +- def get(_, key): +- return cache_get(key, not_in_cache) +- +- def set_(_, key, value): +- cache[key] = value +- while len(cache) > size: +- cache.popitem(last=False) +- +- def clear(_): +- cache.clear() +- +- self.size = size +- self.get = types.MethodType(get, self) +- self.set = types.MethodType(set_, self) +- self.clear = types.MethodType(clear, self) +- +- +-class LRUMemo: +- """ +- A memoizing mapping that retains `capacity` deleted items +- +- The memo tracks retained items by their access order; once `capacity` items +- are retained, the least recently used item is discarded. +- """ +- +- def __init__(self, capacity): +- self._capacity = capacity +- self._active = {} +- self._memory = collections.OrderedDict() +- +- def __getitem__(self, key): +- try: +- return self._active[key] +- except KeyError: +- self._memory.move_to_end(key) +- return self._memory[key] +- +- def __setitem__(self, key, value): +- self._memory.pop(key, None) +- self._active[key] = value +- +- def __delitem__(self, key): +- try: +- value = self._active.pop(key) +- except KeyError: +- pass +- else: +- while len(self._memory) >= self._capacity: +- self._memory.popitem(last=False) +- self._memory[key] = value +- +- def clear(self): +- self._active.clear() +- self._memory.clear() +- +- +-class UnboundedMemo(dict): +- """ +- A memoizing mapping that retains all deleted items +- """ +- +- def __delitem__(self, key): +- pass +- +- +-def _escape_regex_range_chars(s: str) -> str: +- # escape these chars: ^-[] +- for c in r"\^-[]": +- s = s.replace(c, _bslash + c) +- s = s.replace("\n", r"\n") +- s = s.replace("\t", r"\t") +- return str(s) +- +- +-def _collapse_string_to_ranges( +- s: Union[str, Iterable[str]], re_escape: bool = True +-) -> str: +- def is_consecutive(c): +- c_int = ord(c) +- is_consecutive.prev, prev = c_int, is_consecutive.prev +- if c_int - prev > 1: +- is_consecutive.value = next(is_consecutive.counter) +- return is_consecutive.value +- +- is_consecutive.prev = 0 +- is_consecutive.counter = itertools.count() +- is_consecutive.value = -1 +- +- def escape_re_range_char(c): +- return "\\" + c if c in r"\^-][" else c +- +- def no_escape_re_range_char(c): +- return c +- +- if not re_escape: +- escape_re_range_char = no_escape_re_range_char +- +- ret = [] +- s = "".join(sorted(set(s))) +- if len(s) > 3: +- for _, chars in itertools.groupby(s, key=is_consecutive): +- first = last = next(chars) +- last = collections.deque( +- itertools.chain(iter([last]), chars), maxlen=1 +- ).pop() +- if first == last: +- ret.append(escape_re_range_char(first)) +- else: +- sep = "" if ord(last) == ord(first) + 1 else "-" +- ret.append( +- "{}{}{}".format( +- escape_re_range_char(first), sep, escape_re_range_char(last) +- ) +- ) +- else: +- ret = [escape_re_range_char(c) for c in s] +- +- return "".join(ret) +- +- +-def _flatten(ll: list) -> list: +- ret = [] +- for i in ll: +- if isinstance(i, list): +- ret.extend(_flatten(i)) +- else: +- ret.append(i) +- return ret +diff --git a/src/poetry/core/_vendor/pyrsistent/LICENSE.mit b/src/poetry/core/_vendor/pyrsistent/LICENSE.mit +deleted file mode 100644 +index 6cbf251..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/LICENSE.mit ++++ /dev/null +@@ -1,22 +0,0 @@ +-Copyright (c) 2022 Tobias Gustafsson +- +-Permission is hereby granted, free of charge, to any person +-obtaining a copy of this software and associated documentation +-files (the "Software"), to deal in the Software without +-restriction, including without limitation the rights to use, +-copy, modify, merge, publish, distribute, sublicense, and/or sell +-copies of the Software, and to permit persons to whom the +-Software is furnished to do so, subject to the following +-conditions: +- +-The above copyright notice and this permission notice shall be +-included in all copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +-OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +-HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +-OTHER DEALINGS IN THE SOFTWARE. +\ No newline at end of file +diff --git a/src/poetry/core/_vendor/pyrsistent/__init__.py b/src/poetry/core/_vendor/pyrsistent/__init__.py +deleted file mode 100644 +index be29965..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/__init__.py ++++ /dev/null +@@ -1,47 +0,0 @@ +-# -*- coding: utf-8 -*- +- +-from pyrsistent._pmap import pmap, m, PMap +- +-from pyrsistent._pvector import pvector, v, PVector +- +-from pyrsistent._pset import pset, s, PSet +- +-from pyrsistent._pbag import pbag, b, PBag +- +-from pyrsistent._plist import plist, l, PList +- +-from pyrsistent._pdeque import pdeque, dq, PDeque +- +-from pyrsistent._checked_types import ( +- CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError, +- CheckedValueTypeError, CheckedType, optional) +- +-from pyrsistent._field_common import ( +- field, PTypeError, pset_field, pmap_field, pvector_field) +- +-from pyrsistent._precord import PRecord +- +-from pyrsistent._pclass import PClass, PClassMeta +- +-from pyrsistent._immutable import immutable +- +-from pyrsistent._helpers import freeze, thaw, mutant +- +-from pyrsistent._transformations import inc, discard, rex, ny +- +-from pyrsistent._toolz import get_in +- +- +-__all__ = ('pmap', 'm', 'PMap', +- 'pvector', 'v', 'PVector', +- 'pset', 's', 'PSet', +- 'pbag', 'b', 'PBag', +- 'plist', 'l', 'PList', +- 'pdeque', 'dq', 'PDeque', +- 'CheckedPMap', 'CheckedPVector', 'CheckedPSet', 'InvariantException', 'CheckedKeyTypeError', 'CheckedValueTypeError', 'CheckedType', 'optional', +- 'PRecord', 'field', 'pset_field', 'pmap_field', 'pvector_field', +- 'PClass', 'PClassMeta', +- 'immutable', +- 'freeze', 'thaw', 'mutant', +- 'get_in', +- 'inc', 'discard', 'rex', 'ny') +diff --git a/src/poetry/core/_vendor/pyrsistent/_checked_types.py b/src/poetry/core/_vendor/pyrsistent/_checked_types.py +deleted file mode 100644 +index 8ab8c2a..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_checked_types.py ++++ /dev/null +@@ -1,542 +0,0 @@ +-from enum import Enum +- +-from abc import abstractmethod, ABCMeta +-from collections.abc import Iterable +- +-from pyrsistent._pmap import PMap, pmap +-from pyrsistent._pset import PSet, pset +-from pyrsistent._pvector import PythonPVector, python_pvector +- +- +-class CheckedType(object): +- """ +- Marker class to enable creation and serialization of checked object graphs. +- """ +- __slots__ = () +- +- @classmethod +- @abstractmethod +- def create(cls, source_data, _factory_fields=None): +- raise NotImplementedError() +- +- @abstractmethod +- def serialize(self, format=None): +- raise NotImplementedError() +- +- +-def _restore_pickle(cls, data): +- return cls.create(data, _factory_fields=set()) +- +- +-class InvariantException(Exception): +- """ +- Exception raised from a :py:class:`CheckedType` when invariant tests fail or when a mandatory +- field is missing. +- +- Contains two fields of interest: +- invariant_errors, a tuple of error data for the failing invariants +- missing_fields, a tuple of strings specifying the missing names +- """ +- +- def __init__(self, error_codes=(), missing_fields=(), *args, **kwargs): +- self.invariant_errors = tuple(e() if callable(e) else e for e in error_codes) +- self.missing_fields = missing_fields +- super(InvariantException, self).__init__(*args, **kwargs) +- +- def __str__(self): +- return super(InvariantException, self).__str__() + \ +- ", invariant_errors=[{invariant_errors}], missing_fields=[{missing_fields}]".format( +- invariant_errors=', '.join(str(e) for e in self.invariant_errors), +- missing_fields=', '.join(self.missing_fields)) +- +- +-_preserved_iterable_types = ( +- Enum, +-) +-"""Some types are themselves iterable, but we want to use the type itself and +-not its members for the type specification. This defines a set of such types +-that we explicitly preserve. +- +-Note that strings are not such types because the string inputs we pass in are +-values, not types. +-""" +- +- +-def maybe_parse_user_type(t): +- """Try to coerce a user-supplied type directive into a list of types. +- +- This function should be used in all places where a user specifies a type, +- for consistency. +- +- The policy for what defines valid user input should be clear from the implementation. +- """ +- is_type = isinstance(t, type) +- is_preserved = isinstance(t, type) and issubclass(t, _preserved_iterable_types) +- is_string = isinstance(t, str) +- is_iterable = isinstance(t, Iterable) +- +- if is_preserved: +- return [t] +- elif is_string: +- return [t] +- elif is_type and not is_iterable: +- return [t] +- elif is_iterable: +- # Recur to validate contained types as well. +- ts = t +- return tuple(e for t in ts for e in maybe_parse_user_type(t)) +- else: +- # If this raises because `t` cannot be formatted, so be it. +- raise TypeError( +- 'Type specifications must be types or strings. Input: {}'.format(t) +- ) +- +- +-def maybe_parse_many_user_types(ts): +- # Just a different name to communicate that you're parsing multiple user +- # inputs. `maybe_parse_user_type` handles the iterable case anyway. +- return maybe_parse_user_type(ts) +- +- +-def _store_types(dct, bases, destination_name, source_name): +- maybe_types = maybe_parse_many_user_types([ +- d[source_name] +- for d in ([dct] + [b.__dict__ for b in bases]) if source_name in d +- ]) +- +- dct[destination_name] = maybe_types +- +- +-def _merge_invariant_results(result): +- verdict = True +- data = [] +- for verd, dat in result: +- if not verd: +- verdict = False +- data.append(dat) +- +- return verdict, tuple(data) +- +- +-def wrap_invariant(invariant): +- # Invariant functions may return the outcome of several tests +- # In those cases the results have to be merged before being passed +- # back to the client. +- def f(*args, **kwargs): +- result = invariant(*args, **kwargs) +- if isinstance(result[0], bool): +- return result +- +- return _merge_invariant_results(result) +- +- return f +- +- +-def _all_dicts(bases, seen=None): +- """ +- Yield each class in ``bases`` and each of their base classes. +- """ +- if seen is None: +- seen = set() +- for cls in bases: +- if cls in seen: +- continue +- seen.add(cls) +- yield cls.__dict__ +- for b in _all_dicts(cls.__bases__, seen): +- yield b +- +- +-def store_invariants(dct, bases, destination_name, source_name): +- # Invariants are inherited +- invariants = [] +- for ns in [dct] + list(_all_dicts(bases)): +- try: +- invariant = ns[source_name] +- except KeyError: +- continue +- invariants.append(invariant) +- +- if not all(callable(invariant) for invariant in invariants): +- raise TypeError('Invariants must be callable') +- dct[destination_name] = tuple(wrap_invariant(inv) for inv in invariants) +- +- +-class _CheckedTypeMeta(ABCMeta): +- def __new__(mcs, name, bases, dct): +- _store_types(dct, bases, '_checked_types', '__type__') +- store_invariants(dct, bases, '_checked_invariants', '__invariant__') +- +- def default_serializer(self, _, value): +- if isinstance(value, CheckedType): +- return value.serialize() +- return value +- +- dct.setdefault('__serializer__', default_serializer) +- +- dct['__slots__'] = () +- +- return super(_CheckedTypeMeta, mcs).__new__(mcs, name, bases, dct) +- +- +-class CheckedTypeError(TypeError): +- def __init__(self, source_class, expected_types, actual_type, actual_value, *args, **kwargs): +- super(CheckedTypeError, self).__init__(*args, **kwargs) +- self.source_class = source_class +- self.expected_types = expected_types +- self.actual_type = actual_type +- self.actual_value = actual_value +- +- +-class CheckedKeyTypeError(CheckedTypeError): +- """ +- Raised when trying to set a value using a key with a type that doesn't match the declared type. +- +- Attributes: +- source_class -- The class of the collection +- expected_types -- Allowed types +- actual_type -- The non matching type +- actual_value -- Value of the variable with the non matching type +- """ +- pass +- +- +-class CheckedValueTypeError(CheckedTypeError): +- """ +- Raised when trying to set a value using a key with a type that doesn't match the declared type. +- +- Attributes: +- source_class -- The class of the collection +- expected_types -- Allowed types +- actual_type -- The non matching type +- actual_value -- Value of the variable with the non matching type +- """ +- pass +- +- +-def _get_class(type_name): +- module_name, class_name = type_name.rsplit('.', 1) +- module = __import__(module_name, fromlist=[class_name]) +- return getattr(module, class_name) +- +- +-def get_type(typ): +- if isinstance(typ, type): +- return typ +- +- return _get_class(typ) +- +- +-def get_types(typs): +- return [get_type(typ) for typ in typs] +- +- +-def _check_types(it, expected_types, source_class, exception_type=CheckedValueTypeError): +- if expected_types: +- for e in it: +- if not any(isinstance(e, get_type(t)) for t in expected_types): +- actual_type = type(e) +- msg = "Type {source_class} can only be used with {expected_types}, not {actual_type}".format( +- source_class=source_class.__name__, +- expected_types=tuple(get_type(et).__name__ for et in expected_types), +- actual_type=actual_type.__name__) +- raise exception_type(source_class, expected_types, actual_type, e, msg) +- +- +-def _invariant_errors(elem, invariants): +- return [data for valid, data in (invariant(elem) for invariant in invariants) if not valid] +- +- +-def _invariant_errors_iterable(it, invariants): +- return sum([_invariant_errors(elem, invariants) for elem in it], []) +- +- +-def optional(*typs): +- """ Convenience function to specify that a value may be of any of the types in type 'typs' or None """ +- return tuple(typs) + (type(None),) +- +- +-def _checked_type_create(cls, source_data, _factory_fields=None, ignore_extra=False): +- if isinstance(source_data, cls): +- return source_data +- +- # Recursively apply create methods of checked types if the types of the supplied data +- # does not match any of the valid types. +- types = get_types(cls._checked_types) +- checked_type = next((t for t in types if issubclass(t, CheckedType)), None) +- if checked_type: +- return cls([checked_type.create(data, ignore_extra=ignore_extra) +- if not any(isinstance(data, t) for t in types) else data +- for data in source_data]) +- +- return cls(source_data) +- +-class CheckedPVector(PythonPVector, CheckedType, metaclass=_CheckedTypeMeta): +- """ +- A CheckedPVector is a PVector which allows specifying type and invariant checks. +- +- >>> class Positives(CheckedPVector): +- ... __type__ = (int, float) +- ... __invariant__ = lambda n: (n >= 0, 'Negative') +- ... +- >>> Positives([1, 2, 3]) +- Positives([1, 2, 3]) +- """ +- +- __slots__ = () +- +- def __new__(cls, initial=()): +- if type(initial) == PythonPVector: +- return super(CheckedPVector, cls).__new__(cls, initial._count, initial._shift, initial._root, initial._tail) +- +- return CheckedPVector.Evolver(cls, python_pvector()).extend(initial).persistent() +- +- def set(self, key, value): +- return self.evolver().set(key, value).persistent() +- +- def append(self, val): +- return self.evolver().append(val).persistent() +- +- def extend(self, it): +- return self.evolver().extend(it).persistent() +- +- create = classmethod(_checked_type_create) +- +- def serialize(self, format=None): +- serializer = self.__serializer__ +- return list(serializer(format, v) for v in self) +- +- def __reduce__(self): +- # Pickling support +- return _restore_pickle, (self.__class__, list(self),) +- +- class Evolver(PythonPVector.Evolver): +- __slots__ = ('_destination_class', '_invariant_errors') +- +- def __init__(self, destination_class, vector): +- super(CheckedPVector.Evolver, self).__init__(vector) +- self._destination_class = destination_class +- self._invariant_errors = [] +- +- def _check(self, it): +- _check_types(it, self._destination_class._checked_types, self._destination_class) +- error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants) +- self._invariant_errors.extend(error_data) +- +- def __setitem__(self, key, value): +- self._check([value]) +- return super(CheckedPVector.Evolver, self).__setitem__(key, value) +- +- def append(self, elem): +- self._check([elem]) +- return super(CheckedPVector.Evolver, self).append(elem) +- +- def extend(self, it): +- it = list(it) +- self._check(it) +- return super(CheckedPVector.Evolver, self).extend(it) +- +- def persistent(self): +- if self._invariant_errors: +- raise InvariantException(error_codes=self._invariant_errors) +- +- result = self._orig_pvector +- if self.is_dirty() or (self._destination_class != type(self._orig_pvector)): +- pv = super(CheckedPVector.Evolver, self).persistent().extend(self._extra_tail) +- result = self._destination_class(pv) +- self._reset(result) +- +- return result +- +- def __repr__(self): +- return self.__class__.__name__ + "({0})".format(self.tolist()) +- +- __str__ = __repr__ +- +- def evolver(self): +- return CheckedPVector.Evolver(self.__class__, self) +- +- +-class CheckedPSet(PSet, CheckedType, metaclass=_CheckedTypeMeta): +- """ +- A CheckedPSet is a PSet which allows specifying type and invariant checks. +- +- >>> class Positives(CheckedPSet): +- ... __type__ = (int, float) +- ... __invariant__ = lambda n: (n >= 0, 'Negative') +- ... +- >>> Positives([1, 2, 3]) +- Positives([1, 2, 3]) +- """ +- +- __slots__ = () +- +- def __new__(cls, initial=()): +- if type(initial) is PMap: +- return super(CheckedPSet, cls).__new__(cls, initial) +- +- evolver = CheckedPSet.Evolver(cls, pset()) +- for e in initial: +- evolver.add(e) +- +- return evolver.persistent() +- +- def __repr__(self): +- return self.__class__.__name__ + super(CheckedPSet, self).__repr__()[4:] +- +- def __str__(self): +- return self.__repr__() +- +- def serialize(self, format=None): +- serializer = self.__serializer__ +- return set(serializer(format, v) for v in self) +- +- create = classmethod(_checked_type_create) +- +- def __reduce__(self): +- # Pickling support +- return _restore_pickle, (self.__class__, list(self),) +- +- def evolver(self): +- return CheckedPSet.Evolver(self.__class__, self) +- +- class Evolver(PSet._Evolver): +- __slots__ = ('_destination_class', '_invariant_errors') +- +- def __init__(self, destination_class, original_set): +- super(CheckedPSet.Evolver, self).__init__(original_set) +- self._destination_class = destination_class +- self._invariant_errors = [] +- +- def _check(self, it): +- _check_types(it, self._destination_class._checked_types, self._destination_class) +- error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants) +- self._invariant_errors.extend(error_data) +- +- def add(self, element): +- self._check([element]) +- self._pmap_evolver[element] = True +- return self +- +- def persistent(self): +- if self._invariant_errors: +- raise InvariantException(error_codes=self._invariant_errors) +- +- if self.is_dirty() or self._destination_class != type(self._original_pset): +- return self._destination_class(self._pmap_evolver.persistent()) +- +- return self._original_pset +- +- +-class _CheckedMapTypeMeta(type): +- def __new__(mcs, name, bases, dct): +- _store_types(dct, bases, '_checked_key_types', '__key_type__') +- _store_types(dct, bases, '_checked_value_types', '__value_type__') +- store_invariants(dct, bases, '_checked_invariants', '__invariant__') +- +- def default_serializer(self, _, key, value): +- sk = key +- if isinstance(key, CheckedType): +- sk = key.serialize() +- +- sv = value +- if isinstance(value, CheckedType): +- sv = value.serialize() +- +- return sk, sv +- +- dct.setdefault('__serializer__', default_serializer) +- +- dct['__slots__'] = () +- +- return super(_CheckedMapTypeMeta, mcs).__new__(mcs, name, bases, dct) +- +-# Marker object +-_UNDEFINED_CHECKED_PMAP_SIZE = object() +- +- +-class CheckedPMap(PMap, CheckedType, metaclass=_CheckedMapTypeMeta): +- """ +- A CheckedPMap is a PMap which allows specifying type and invariant checks. +- +- >>> class IntToFloatMap(CheckedPMap): +- ... __key_type__ = int +- ... __value_type__ = float +- ... __invariant__ = lambda k, v: (int(v) == k, 'Invalid mapping') +- ... +- >>> IntToFloatMap({1: 1.5, 2: 2.25}) +- IntToFloatMap({1: 1.5, 2: 2.25}) +- """ +- +- __slots__ = () +- +- def __new__(cls, initial={}, size=_UNDEFINED_CHECKED_PMAP_SIZE): +- if size is not _UNDEFINED_CHECKED_PMAP_SIZE: +- return super(CheckedPMap, cls).__new__(cls, size, initial) +- +- evolver = CheckedPMap.Evolver(cls, pmap()) +- for k, v in initial.items(): +- evolver.set(k, v) +- +- return evolver.persistent() +- +- def evolver(self): +- return CheckedPMap.Evolver(self.__class__, self) +- +- def __repr__(self): +- return self.__class__.__name__ + "({0})".format(str(dict(self))) +- +- __str__ = __repr__ +- +- def serialize(self, format=None): +- serializer = self.__serializer__ +- return dict(serializer(format, k, v) for k, v in self.items()) +- +- @classmethod +- def create(cls, source_data, _factory_fields=None): +- if isinstance(source_data, cls): +- return source_data +- +- # Recursively apply create methods of checked types if the types of the supplied data +- # does not match any of the valid types. +- key_types = get_types(cls._checked_key_types) +- checked_key_type = next((t for t in key_types if issubclass(t, CheckedType)), None) +- value_types = get_types(cls._checked_value_types) +- checked_value_type = next((t for t in value_types if issubclass(t, CheckedType)), None) +- +- if checked_key_type or checked_value_type: +- return cls(dict((checked_key_type.create(key) if checked_key_type and not any(isinstance(key, t) for t in key_types) else key, +- checked_value_type.create(value) if checked_value_type and not any(isinstance(value, t) for t in value_types) else value) +- for key, value in source_data.items())) +- +- return cls(source_data) +- +- def __reduce__(self): +- # Pickling support +- return _restore_pickle, (self.__class__, dict(self),) +- +- class Evolver(PMap._Evolver): +- __slots__ = ('_destination_class', '_invariant_errors') +- +- def __init__(self, destination_class, original_map): +- super(CheckedPMap.Evolver, self).__init__(original_map) +- self._destination_class = destination_class +- self._invariant_errors = [] +- +- def set(self, key, value): +- _check_types([key], self._destination_class._checked_key_types, self._destination_class, CheckedKeyTypeError) +- _check_types([value], self._destination_class._checked_value_types, self._destination_class) +- self._invariant_errors.extend(data for valid, data in (invariant(key, value) +- for invariant in self._destination_class._checked_invariants) +- if not valid) +- +- return super(CheckedPMap.Evolver, self).set(key, value) +- +- def persistent(self): +- if self._invariant_errors: +- raise InvariantException(error_codes=self._invariant_errors) +- +- if self.is_dirty() or type(self._original_pmap) != self._destination_class: +- return self._destination_class(self._buckets_evolver.persistent(), self._size) +- +- return self._original_pmap +diff --git a/src/poetry/core/_vendor/pyrsistent/_field_common.py b/src/poetry/core/_vendor/pyrsistent/_field_common.py +deleted file mode 100644 +index 508dd2f..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_field_common.py ++++ /dev/null +@@ -1,332 +0,0 @@ +-from pyrsistent._checked_types import ( +- CheckedPMap, +- CheckedPSet, +- CheckedPVector, +- CheckedType, +- InvariantException, +- _restore_pickle, +- get_type, +- maybe_parse_user_type, +- maybe_parse_many_user_types, +-) +-from pyrsistent._checked_types import optional as optional_type +-from pyrsistent._checked_types import wrap_invariant +-import inspect +- +- +-def set_fields(dct, bases, name): +- dct[name] = dict(sum([list(b.__dict__.get(name, {}).items()) for b in bases], [])) +- +- for k, v in list(dct.items()): +- if isinstance(v, _PField): +- dct[name][k] = v +- del dct[k] +- +- +-def check_global_invariants(subject, invariants): +- error_codes = tuple(error_code for is_ok, error_code in +- (invariant(subject) for invariant in invariants) if not is_ok) +- if error_codes: +- raise InvariantException(error_codes, (), 'Global invariant failed') +- +- +-def serialize(serializer, format, value): +- if isinstance(value, CheckedType) and serializer is PFIELD_NO_SERIALIZER: +- return value.serialize(format) +- +- return serializer(format, value) +- +- +-def check_type(destination_cls, field, name, value): +- if field.type and not any(isinstance(value, get_type(t)) for t in field.type): +- actual_type = type(value) +- message = "Invalid type for field {0}.{1}, was {2}".format(destination_cls.__name__, name, actual_type.__name__) +- raise PTypeError(destination_cls, name, field.type, actual_type, message) +- +- +-def is_type_cls(type_cls, field_type): +- if type(field_type) is set: +- return True +- types = tuple(field_type) +- if len(types) == 0: +- return False +- return issubclass(get_type(types[0]), type_cls) +- +- +-def is_field_ignore_extra_complaint(type_cls, field, ignore_extra): +- # ignore_extra param has default False value, for speed purpose no need to propagate False +- if not ignore_extra: +- return False +- +- if not is_type_cls(type_cls, field.type): +- return False +- +- return 'ignore_extra' in inspect.signature(field.factory).parameters +- +- +- +-class _PField(object): +- __slots__ = ('type', 'invariant', 'initial', 'mandatory', '_factory', 'serializer') +- +- def __init__(self, type, invariant, initial, mandatory, factory, serializer): +- self.type = type +- self.invariant = invariant +- self.initial = initial +- self.mandatory = mandatory +- self._factory = factory +- self.serializer = serializer +- +- @property +- def factory(self): +- # If no factory is specified and the type is another CheckedType use the factory method of that CheckedType +- if self._factory is PFIELD_NO_FACTORY and len(self.type) == 1: +- typ = get_type(tuple(self.type)[0]) +- if issubclass(typ, CheckedType): +- return typ.create +- +- return self._factory +- +-PFIELD_NO_TYPE = () +-PFIELD_NO_INVARIANT = lambda _: (True, None) +-PFIELD_NO_FACTORY = lambda x: x +-PFIELD_NO_INITIAL = object() +-PFIELD_NO_SERIALIZER = lambda _, value: value +- +- +-def field(type=PFIELD_NO_TYPE, invariant=PFIELD_NO_INVARIANT, initial=PFIELD_NO_INITIAL, +- mandatory=False, factory=PFIELD_NO_FACTORY, serializer=PFIELD_NO_SERIALIZER): +- """ +- Field specification factory for :py:class:`PRecord`. +- +- :param type: a type or iterable with types that are allowed for this field +- :param invariant: a function specifying an invariant that must hold for the field +- :param initial: value of field if not specified when instantiating the record +- :param mandatory: boolean specifying if the field is mandatory or not +- :param factory: function called when field is set. +- :param serializer: function that returns a serialized version of the field +- """ +- +- # NB: We have to check this predicate separately from the predicates in +- # `maybe_parse_user_type` et al. because this one is related to supporting +- # the argspec for `field`, while those are related to supporting the valid +- # ways to specify types. +- +- # Multiple types must be passed in one of the following containers. Note +- # that a type that is a subclass of one of these containers, like a +- # `collections.namedtuple`, will work as expected, since we check +- # `isinstance` and not `issubclass`. +- if isinstance(type, (list, set, tuple)): +- types = set(maybe_parse_many_user_types(type)) +- else: +- types = set(maybe_parse_user_type(type)) +- +- invariant_function = wrap_invariant(invariant) if invariant != PFIELD_NO_INVARIANT and callable(invariant) else invariant +- field = _PField(type=types, invariant=invariant_function, initial=initial, +- mandatory=mandatory, factory=factory, serializer=serializer) +- +- _check_field_parameters(field) +- +- return field +- +- +-def _check_field_parameters(field): +- for t in field.type: +- if not isinstance(t, type) and not isinstance(t, str): +- raise TypeError('Type parameter expected, not {0}'.format(type(t))) +- +- if field.initial is not PFIELD_NO_INITIAL and \ +- not callable(field.initial) and \ +- field.type and not any(isinstance(field.initial, t) for t in field.type): +- raise TypeError('Initial has invalid type {0}'.format(type(field.initial))) +- +- if not callable(field.invariant): +- raise TypeError('Invariant must be callable') +- +- if not callable(field.factory): +- raise TypeError('Factory must be callable') +- +- if not callable(field.serializer): +- raise TypeError('Serializer must be callable') +- +- +-class PTypeError(TypeError): +- """ +- Raised when trying to assign a value with a type that doesn't match the declared type. +- +- Attributes: +- source_class -- The class of the record +- field -- Field name +- expected_types -- Types allowed for the field +- actual_type -- The non matching type +- """ +- def __init__(self, source_class, field, expected_types, actual_type, *args, **kwargs): +- super(PTypeError, self).__init__(*args, **kwargs) +- self.source_class = source_class +- self.field = field +- self.expected_types = expected_types +- self.actual_type = actual_type +- +- +-SEQ_FIELD_TYPE_SUFFIXES = { +- CheckedPVector: "PVector", +- CheckedPSet: "PSet", +-} +- +-# Global dictionary to hold auto-generated field types: used for unpickling +-_seq_field_types = {} +- +-def _restore_seq_field_pickle(checked_class, item_type, data): +- """Unpickling function for auto-generated PVec/PSet field types.""" +- type_ = _seq_field_types[checked_class, item_type] +- return _restore_pickle(type_, data) +- +-def _types_to_names(types): +- """Convert a tuple of types to a human-readable string.""" +- return "".join(get_type(typ).__name__.capitalize() for typ in types) +- +-def _make_seq_field_type(checked_class, item_type, item_invariant): +- """Create a subclass of the given checked class with the given item type.""" +- type_ = _seq_field_types.get((checked_class, item_type)) +- if type_ is not None: +- return type_ +- +- class TheType(checked_class): +- __type__ = item_type +- __invariant__ = item_invariant +- +- def __reduce__(self): +- return (_restore_seq_field_pickle, +- (checked_class, item_type, list(self))) +- +- suffix = SEQ_FIELD_TYPE_SUFFIXES[checked_class] +- TheType.__name__ = _types_to_names(TheType._checked_types) + suffix +- _seq_field_types[checked_class, item_type] = TheType +- return TheType +- +-def _sequence_field(checked_class, item_type, optional, initial, +- invariant=PFIELD_NO_INVARIANT, +- item_invariant=PFIELD_NO_INVARIANT): +- """ +- Create checked field for either ``PSet`` or ``PVector``. +- +- :param checked_class: ``CheckedPSet`` or ``CheckedPVector``. +- :param item_type: The required type for the items in the set. +- :param optional: If true, ``None`` can be used as a value for +- this field. +- :param initial: Initial value to pass to factory. +- +- :return: A ``field`` containing a checked class. +- """ +- TheType = _make_seq_field_type(checked_class, item_type, item_invariant) +- +- if optional: +- def factory(argument, _factory_fields=None, ignore_extra=False): +- if argument is None: +- return None +- else: +- return TheType.create(argument, _factory_fields=_factory_fields, ignore_extra=ignore_extra) +- else: +- factory = TheType.create +- +- return field(type=optional_type(TheType) if optional else TheType, +- factory=factory, mandatory=True, +- invariant=invariant, +- initial=factory(initial)) +- +- +-def pset_field(item_type, optional=False, initial=(), +- invariant=PFIELD_NO_INVARIANT, +- item_invariant=PFIELD_NO_INVARIANT): +- """ +- Create checked ``PSet`` field. +- +- :param item_type: The required type for the items in the set. +- :param optional: If true, ``None`` can be used as a value for +- this field. +- :param initial: Initial value to pass to factory if no value is given +- for the field. +- +- :return: A ``field`` containing a ``CheckedPSet`` of the given type. +- """ +- return _sequence_field(CheckedPSet, item_type, optional, initial, +- invariant=invariant, +- item_invariant=item_invariant) +- +- +-def pvector_field(item_type, optional=False, initial=(), +- invariant=PFIELD_NO_INVARIANT, +- item_invariant=PFIELD_NO_INVARIANT): +- """ +- Create checked ``PVector`` field. +- +- :param item_type: The required type for the items in the vector. +- :param optional: If true, ``None`` can be used as a value for +- this field. +- :param initial: Initial value to pass to factory if no value is given +- for the field. +- +- :return: A ``field`` containing a ``CheckedPVector`` of the given type. +- """ +- return _sequence_field(CheckedPVector, item_type, optional, initial, +- invariant=invariant, +- item_invariant=item_invariant) +- +- +-_valid = lambda item: (True, "") +- +- +-# Global dictionary to hold auto-generated field types: used for unpickling +-_pmap_field_types = {} +- +-def _restore_pmap_field_pickle(key_type, value_type, data): +- """Unpickling function for auto-generated PMap field types.""" +- type_ = _pmap_field_types[key_type, value_type] +- return _restore_pickle(type_, data) +- +-def _make_pmap_field_type(key_type, value_type): +- """Create a subclass of CheckedPMap with the given key and value types.""" +- type_ = _pmap_field_types.get((key_type, value_type)) +- if type_ is not None: +- return type_ +- +- class TheMap(CheckedPMap): +- __key_type__ = key_type +- __value_type__ = value_type +- +- def __reduce__(self): +- return (_restore_pmap_field_pickle, +- (self.__key_type__, self.__value_type__, dict(self))) +- +- TheMap.__name__ = "{0}To{1}PMap".format( +- _types_to_names(TheMap._checked_key_types), +- _types_to_names(TheMap._checked_value_types)) +- _pmap_field_types[key_type, value_type] = TheMap +- return TheMap +- +- +-def pmap_field(key_type, value_type, optional=False, invariant=PFIELD_NO_INVARIANT): +- """ +- Create a checked ``PMap`` field. +- +- :param key: The required type for the keys of the map. +- :param value: The required type for the values of the map. +- :param optional: If true, ``None`` can be used as a value for +- this field. +- :param invariant: Pass-through to ``field``. +- +- :return: A ``field`` containing a ``CheckedPMap``. +- """ +- TheMap = _make_pmap_field_type(key_type, value_type) +- +- if optional: +- def factory(argument): +- if argument is None: +- return None +- else: +- return TheMap.create(argument) +- else: +- factory = TheMap.create +- +- return field(mandatory=True, initial=TheMap(), +- type=optional_type(TheMap) if optional else TheMap, +- factory=factory, invariant=invariant) +diff --git a/src/poetry/core/_vendor/pyrsistent/_helpers.py b/src/poetry/core/_vendor/pyrsistent/_helpers.py +deleted file mode 100644 +index 1320e65..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_helpers.py ++++ /dev/null +@@ -1,97 +0,0 @@ +-from functools import wraps +-from pyrsistent._pmap import PMap, pmap +-from pyrsistent._pset import PSet, pset +-from pyrsistent._pvector import PVector, pvector +- +-def freeze(o, strict=True): +- """ +- Recursively convert simple Python containers into pyrsistent versions +- of those containers. +- +- - list is converted to pvector, recursively +- - dict is converted to pmap, recursively on values (but not keys) +- - set is converted to pset, but not recursively +- - tuple is converted to tuple, recursively. +- +- If strict == True (default): +- +- - freeze is called on elements of pvectors +- - freeze is called on values of pmaps +- +- Sets and dict keys are not recursively frozen because they do not contain +- mutable data by convention. The main exception to this rule is that +- dict keys and set elements are often instances of mutable objects that +- support hash-by-id, which this function can't convert anyway. +- +- >>> freeze(set([1, 2])) +- pset([1, 2]) +- >>> freeze([1, {'a': 3}]) +- pvector([1, pmap({'a': 3})]) +- >>> freeze((1, [])) +- (1, pvector([])) +- """ +- typ = type(o) +- if typ is dict or (strict and isinstance(o, PMap)): +- return pmap({k: freeze(v, strict) for k, v in o.items()}) +- if typ is list or (strict and isinstance(o, PVector)): +- curried_freeze = lambda x: freeze(x, strict) +- return pvector(map(curried_freeze, o)) +- if typ is tuple: +- curried_freeze = lambda x: freeze(x, strict) +- return tuple(map(curried_freeze, o)) +- if typ is set: +- # impossible to have anything that needs freezing inside a set or pset +- return pset(o) +- return o +- +- +-def thaw(o, strict=True): +- """ +- Recursively convert pyrsistent containers into simple Python containers. +- +- - pvector is converted to list, recursively +- - pmap is converted to dict, recursively on values (but not keys) +- - pset is converted to set, but not recursively +- - tuple is converted to tuple, recursively. +- +- If strict == True (the default): +- +- - thaw is called on elements of lists +- - thaw is called on values in dicts +- +- >>> from pyrsistent import s, m, v +- >>> thaw(s(1, 2)) +- {1, 2} +- >>> thaw(v(1, m(a=3))) +- [1, {'a': 3}] +- >>> thaw((1, v())) +- (1, []) +- """ +- typ = type(o) +- if isinstance(o, PVector) or (strict and typ is list): +- curried_thaw = lambda x: thaw(x, strict) +- return list(map(curried_thaw, o)) +- if isinstance(o, PMap) or (strict and typ is dict): +- return {k: thaw(v, strict) for k, v in o.items()} +- if typ is tuple: +- curried_thaw = lambda x: thaw(x, strict) +- return tuple(map(curried_thaw, o)) +- if isinstance(o, PSet): +- # impossible to thaw inside psets or sets +- return set(o) +- return o +- +- +-def mutant(fn): +- """ +- Convenience decorator to isolate mutation to within the decorated function (with respect +- to the input arguments). +- +- All arguments to the decorated function will be frozen so that they are guaranteed not to change. +- The return value is also frozen. +- """ +- @wraps(fn) +- def inner_f(*args, **kwargs): +- return freeze(fn(*[freeze(e) for e in args], **dict(freeze(item) for item in kwargs.items()))) +- +- return inner_f +diff --git a/src/poetry/core/_vendor/pyrsistent/_immutable.py b/src/poetry/core/_vendor/pyrsistent/_immutable.py +deleted file mode 100644 +index 7c75945..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_immutable.py ++++ /dev/null +@@ -1,103 +0,0 @@ +-import sys +- +- +-def immutable(members='', name='Immutable', verbose=False): +- """ +- Produces a class that either can be used standalone or as a base class for persistent classes. +- +- This is a thin wrapper around a named tuple. +- +- Constructing a type and using it to instantiate objects: +- +- >>> Point = immutable('x, y', name='Point') +- >>> p = Point(1, 2) +- >>> p2 = p.set(x=3) +- >>> p +- Point(x=1, y=2) +- >>> p2 +- Point(x=3, y=2) +- +- Inheriting from a constructed type. In this case no type name needs to be supplied: +- +- >>> class PositivePoint(immutable('x, y')): +- ... __slots__ = tuple() +- ... def __new__(cls, x, y): +- ... if x > 0 and y > 0: +- ... return super(PositivePoint, cls).__new__(cls, x, y) +- ... raise Exception('Coordinates must be positive!') +- ... +- >>> p = PositivePoint(1, 2) +- >>> p.set(x=3) +- PositivePoint(x=3, y=2) +- >>> p.set(y=-3) +- Traceback (most recent call last): +- Exception: Coordinates must be positive! +- +- The persistent class also supports the notion of frozen members. The value of a frozen member +- cannot be updated. For example it could be used to implement an ID that should remain the same +- over time. A frozen member is denoted by a trailing underscore. +- +- >>> Point = immutable('x, y, id_', name='Point') +- >>> p = Point(1, 2, id_=17) +- >>> p.set(x=3) +- Point(x=3, y=2, id_=17) +- >>> p.set(id_=18) +- Traceback (most recent call last): +- AttributeError: Cannot set frozen members id_ +- """ +- +- if isinstance(members, str): +- members = members.replace(',', ' ').split() +- +- def frozen_member_test(): +- frozen_members = ["'%s'" % f for f in members if f.endswith('_')] +- if frozen_members: +- return """ +- frozen_fields = fields_to_modify & set([{frozen_members}]) +- if frozen_fields: +- raise AttributeError('Cannot set frozen members %s' % ', '.join(frozen_fields)) +- """.format(frozen_members=', '.join(frozen_members)) +- +- return '' +- +- verbose_string = "" +- if sys.version_info < (3, 7): +- # Verbose is no longer supported in Python 3.7 +- verbose_string = ", verbose={verbose}".format(verbose=verbose) +- +- quoted_members = ', '.join("'%s'" % m for m in members) +- template = """ +-class {class_name}(namedtuple('ImmutableBase', [{quoted_members}]{verbose_string})): +- __slots__ = tuple() +- +- def __repr__(self): +- return super({class_name}, self).__repr__().replace('ImmutableBase', self.__class__.__name__) +- +- def set(self, **kwargs): +- if not kwargs: +- return self +- +- fields_to_modify = set(kwargs.keys()) +- if not fields_to_modify <= {member_set}: +- raise AttributeError("'%s' is not a member" % ', '.join(fields_to_modify - {member_set})) +- +- {frozen_member_test} +- +- return self.__class__.__new__(self.__class__, *map(kwargs.pop, [{quoted_members}], self)) +-""".format(quoted_members=quoted_members, +- member_set="set([%s])" % quoted_members if quoted_members else 'set()', +- frozen_member_test=frozen_member_test(), +- verbose_string=verbose_string, +- class_name=name) +- +- if verbose: +- print(template) +- +- from collections import namedtuple +- namespace = dict(namedtuple=namedtuple, __name__='pyrsistent_immutable') +- try: +- exec(template, namespace) +- except SyntaxError as e: +- raise SyntaxError(str(e) + ':\n' + template) from e +- +- return namespace[name] +diff --git a/src/poetry/core/_vendor/pyrsistent/_pbag.py b/src/poetry/core/_vendor/pyrsistent/_pbag.py +deleted file mode 100644 +index 9cf5840..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_pbag.py ++++ /dev/null +@@ -1,267 +0,0 @@ +-from collections.abc import Container, Iterable, Sized, Hashable +-from functools import reduce +-from pyrsistent._pmap import pmap +- +- +-def _add_to_counters(counters, element): +- return counters.set(element, counters.get(element, 0) + 1) +- +- +-class PBag(object): +- """ +- A persistent bag/multiset type. +- +- Requires elements to be hashable, and allows duplicates, but has no +- ordering. Bags are hashable. +- +- Do not instantiate directly, instead use the factory functions :py:func:`b` +- or :py:func:`pbag` to create an instance. +- +- Some examples: +- +- >>> s = pbag([1, 2, 3, 1]) +- >>> s2 = s.add(4) +- >>> s3 = s2.remove(1) +- >>> s +- pbag([1, 1, 2, 3]) +- >>> s2 +- pbag([1, 1, 2, 3, 4]) +- >>> s3 +- pbag([1, 2, 3, 4]) +- """ +- +- __slots__ = ('_counts', '__weakref__') +- +- def __init__(self, counts): +- self._counts = counts +- +- def add(self, element): +- """ +- Add an element to the bag. +- +- >>> s = pbag([1]) +- >>> s2 = s.add(1) +- >>> s3 = s.add(2) +- >>> s2 +- pbag([1, 1]) +- >>> s3 +- pbag([1, 2]) +- """ +- return PBag(_add_to_counters(self._counts, element)) +- +- def update(self, iterable): +- """ +- Update bag with all elements in iterable. +- +- >>> s = pbag([1]) +- >>> s.update([1, 2]) +- pbag([1, 1, 2]) +- """ +- if iterable: +- return PBag(reduce(_add_to_counters, iterable, self._counts)) +- +- return self +- +- def remove(self, element): +- """ +- Remove an element from the bag. +- +- >>> s = pbag([1, 1, 2]) +- >>> s2 = s.remove(1) +- >>> s3 = s.remove(2) +- >>> s2 +- pbag([1, 2]) +- >>> s3 +- pbag([1, 1]) +- """ +- if element not in self._counts: +- raise KeyError(element) +- elif self._counts[element] == 1: +- newc = self._counts.remove(element) +- else: +- newc = self._counts.set(element, self._counts[element] - 1) +- return PBag(newc) +- +- def count(self, element): +- """ +- Return the number of times an element appears. +- +- +- >>> pbag([]).count('non-existent') +- 0 +- >>> pbag([1, 1, 2]).count(1) +- 2 +- """ +- return self._counts.get(element, 0) +- +- def __len__(self): +- """ +- Return the length including duplicates. +- +- >>> len(pbag([1, 1, 2])) +- 3 +- """ +- return sum(self._counts.itervalues()) +- +- def __iter__(self): +- """ +- Return an iterator of all elements, including duplicates. +- +- >>> list(pbag([1, 1, 2])) +- [1, 1, 2] +- >>> list(pbag([1, 2])) +- [1, 2] +- """ +- for elt, count in self._counts.iteritems(): +- for i in range(count): +- yield elt +- +- def __contains__(self, elt): +- """ +- Check if an element is in the bag. +- +- >>> 1 in pbag([1, 1, 2]) +- True +- >>> 0 in pbag([1, 2]) +- False +- """ +- return elt in self._counts +- +- def __repr__(self): +- return "pbag({0})".format(list(self)) +- +- def __eq__(self, other): +- """ +- Check if two bags are equivalent, honoring the number of duplicates, +- and ignoring insertion order. +- +- >>> pbag([1, 1, 2]) == pbag([1, 2]) +- False +- >>> pbag([2, 1, 0]) == pbag([0, 1, 2]) +- True +- """ +- if type(other) is not PBag: +- raise TypeError("Can only compare PBag with PBags") +- return self._counts == other._counts +- +- def __lt__(self, other): +- raise TypeError('PBags are not orderable') +- +- __le__ = __lt__ +- __gt__ = __lt__ +- __ge__ = __lt__ +- +- # Multiset-style operations similar to collections.Counter +- +- def __add__(self, other): +- """ +- Combine elements from two PBags. +- +- >>> pbag([1, 2, 2]) + pbag([2, 3, 3]) +- pbag([1, 2, 2, 2, 3, 3]) +- """ +- if not isinstance(other, PBag): +- return NotImplemented +- result = self._counts.evolver() +- for elem, other_count in other._counts.iteritems(): +- result[elem] = self.count(elem) + other_count +- return PBag(result.persistent()) +- +- def __sub__(self, other): +- """ +- Remove elements from one PBag that are present in another. +- +- >>> pbag([1, 2, 2, 2, 3]) - pbag([2, 3, 3, 4]) +- pbag([1, 2, 2]) +- """ +- if not isinstance(other, PBag): +- return NotImplemented +- result = self._counts.evolver() +- for elem, other_count in other._counts.iteritems(): +- newcount = self.count(elem) - other_count +- if newcount > 0: +- result[elem] = newcount +- elif elem in self: +- result.remove(elem) +- return PBag(result.persistent()) +- +- def __or__(self, other): +- """ +- Union: Keep elements that are present in either of two PBags. +- +- >>> pbag([1, 2, 2, 2]) | pbag([2, 3, 3]) +- pbag([1, 2, 2, 2, 3, 3]) +- """ +- if not isinstance(other, PBag): +- return NotImplemented +- result = self._counts.evolver() +- for elem, other_count in other._counts.iteritems(): +- count = self.count(elem) +- newcount = max(count, other_count) +- result[elem] = newcount +- return PBag(result.persistent()) +- +- def __and__(self, other): +- """ +- Intersection: Only keep elements that are present in both PBags. +- +- >>> pbag([1, 2, 2, 2]) & pbag([2, 3, 3]) +- pbag([2]) +- """ +- if not isinstance(other, PBag): +- return NotImplemented +- result = pmap().evolver() +- for elem, count in self._counts.iteritems(): +- newcount = min(count, other.count(elem)) +- if newcount > 0: +- result[elem] = newcount +- return PBag(result.persistent()) +- +- def __hash__(self): +- """ +- Hash based on value of elements. +- +- >>> m = pmap({pbag([1, 2]): "it's here!"}) +- >>> m[pbag([2, 1])] +- "it's here!" +- >>> pbag([1, 1, 2]) in m +- False +- """ +- return hash(self._counts) +- +- +-Container.register(PBag) +-Iterable.register(PBag) +-Sized.register(PBag) +-Hashable.register(PBag) +- +- +-def b(*elements): +- """ +- Construct a persistent bag. +- +- Takes an arbitrary number of arguments to insert into the new persistent +- bag. +- +- >>> b(1, 2, 3, 2) +- pbag([1, 2, 2, 3]) +- """ +- return pbag(elements) +- +- +-def pbag(elements): +- """ +- Convert an iterable to a persistent bag. +- +- Takes an iterable with elements to insert. +- +- >>> pbag([1, 2, 3, 2]) +- pbag([1, 2, 2, 3]) +- """ +- if not elements: +- return _EMPTY_PBAG +- return PBag(reduce(_add_to_counters, elements, pmap())) +- +- +-_EMPTY_PBAG = PBag(pmap()) +- +diff --git a/src/poetry/core/_vendor/pyrsistent/_pclass.py b/src/poetry/core/_vendor/pyrsistent/_pclass.py +deleted file mode 100644 +index fd31a95..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_pclass.py ++++ /dev/null +@@ -1,262 +0,0 @@ +-from pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants) +-from pyrsistent._field_common import ( +- set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants +-) +-from pyrsistent._transformations import transform +- +- +-def _is_pclass(bases): +- return len(bases) == 1 and bases[0] == CheckedType +- +- +-class PClassMeta(type): +- def __new__(mcs, name, bases, dct): +- set_fields(dct, bases, name='_pclass_fields') +- store_invariants(dct, bases, '_pclass_invariants', '__invariant__') +- dct['__slots__'] = ('_pclass_frozen',) + tuple(key for key in dct['_pclass_fields']) +- +- # There must only be one __weakref__ entry in the inheritance hierarchy, +- # lets put it on the top level class. +- if _is_pclass(bases): +- dct['__slots__'] += ('__weakref__',) +- +- return super(PClassMeta, mcs).__new__(mcs, name, bases, dct) +- +-_MISSING_VALUE = object() +- +- +-def _check_and_set_attr(cls, field, name, value, result, invariant_errors): +- check_type(cls, field, name, value) +- is_ok, error_code = field.invariant(value) +- if not is_ok: +- invariant_errors.append(error_code) +- else: +- setattr(result, name, value) +- +- +-class PClass(CheckedType, metaclass=PClassMeta): +- """ +- A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting +- from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it +- is not a PMap and hence not a collection but rather a plain Python object. +- +- +- More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent +- """ +- def __new__(cls, **kwargs): # Support *args? +- result = super(PClass, cls).__new__(cls) +- factory_fields = kwargs.pop('_factory_fields', None) +- ignore_extra = kwargs.pop('ignore_extra', None) +- missing_fields = [] +- invariant_errors = [] +- for name, field in cls._pclass_fields.items(): +- if name in kwargs: +- if factory_fields is None or name in factory_fields: +- if is_field_ignore_extra_complaint(PClass, field, ignore_extra): +- value = field.factory(kwargs[name], ignore_extra=ignore_extra) +- else: +- value = field.factory(kwargs[name]) +- else: +- value = kwargs[name] +- _check_and_set_attr(cls, field, name, value, result, invariant_errors) +- del kwargs[name] +- elif field.initial is not PFIELD_NO_INITIAL: +- initial = field.initial() if callable(field.initial) else field.initial +- _check_and_set_attr( +- cls, field, name, initial, result, invariant_errors) +- elif field.mandatory: +- missing_fields.append('{0}.{1}'.format(cls.__name__, name)) +- +- if invariant_errors or missing_fields: +- raise InvariantException(tuple(invariant_errors), tuple(missing_fields), 'Field invariant failed') +- +- if kwargs: +- raise AttributeError("'{0}' are not among the specified fields for {1}".format( +- ', '.join(kwargs), cls.__name__)) +- +- check_global_invariants(result, cls._pclass_invariants) +- +- result._pclass_frozen = True +- return result +- +- def set(self, *args, **kwargs): +- """ +- Set a field in the instance. Returns a new instance with the updated value. The original instance remains +- unmodified. Accepts key-value pairs or single string representing the field name and a value. +- +- >>> from pyrsistent import PClass, field +- >>> class AClass(PClass): +- ... x = field() +- ... +- >>> a = AClass(x=1) +- >>> a2 = a.set(x=2) +- >>> a3 = a.set('x', 3) +- >>> a +- AClass(x=1) +- >>> a2 +- AClass(x=2) +- >>> a3 +- AClass(x=3) +- """ +- if args: +- kwargs[args[0]] = args[1] +- +- factory_fields = set(kwargs) +- +- for key in self._pclass_fields: +- if key not in kwargs: +- value = getattr(self, key, _MISSING_VALUE) +- if value is not _MISSING_VALUE: +- kwargs[key] = value +- +- return self.__class__(_factory_fields=factory_fields, **kwargs) +- +- @classmethod +- def create(cls, kwargs, _factory_fields=None, ignore_extra=False): +- """ +- Factory method. Will create a new PClass of the current type and assign the values +- specified in kwargs. +- +- :param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not +- in the set of fields on the PClass. +- """ +- if isinstance(kwargs, cls): +- return kwargs +- +- if ignore_extra: +- kwargs = {k: kwargs[k] for k in cls._pclass_fields if k in kwargs} +- +- return cls(_factory_fields=_factory_fields, ignore_extra=ignore_extra, **kwargs) +- +- def serialize(self, format=None): +- """ +- Serialize the current PClass using custom serializer functions for fields where +- such have been supplied. +- """ +- result = {} +- for name in self._pclass_fields: +- value = getattr(self, name, _MISSING_VALUE) +- if value is not _MISSING_VALUE: +- result[name] = serialize(self._pclass_fields[name].serializer, format, value) +- +- return result +- +- def transform(self, *transformations): +- """ +- Apply transformations to the currency PClass. For more details on transformations see +- the documentation for PMap. Transformations on PClasses do not support key matching +- since the PClass is not a collection. Apart from that the transformations available +- for other persistent types work as expected. +- """ +- return transform(self, transformations) +- +- def __eq__(self, other): +- if isinstance(other, self.__class__): +- for name in self._pclass_fields: +- if getattr(self, name, _MISSING_VALUE) != getattr(other, name, _MISSING_VALUE): +- return False +- +- return True +- +- return NotImplemented +- +- def __ne__(self, other): +- return not self == other +- +- def __hash__(self): +- # May want to optimize this by caching the hash somehow +- return hash(tuple((key, getattr(self, key, _MISSING_VALUE)) for key in self._pclass_fields)) +- +- def __setattr__(self, key, value): +- if getattr(self, '_pclass_frozen', False): +- raise AttributeError("Can't set attribute, key={0}, value={1}".format(key, value)) +- +- super(PClass, self).__setattr__(key, value) +- +- def __delattr__(self, key): +- raise AttributeError("Can't delete attribute, key={0}, use remove()".format(key)) +- +- def _to_dict(self): +- result = {} +- for key in self._pclass_fields: +- value = getattr(self, key, _MISSING_VALUE) +- if value is not _MISSING_VALUE: +- result[key] = value +- +- return result +- +- def __repr__(self): +- return "{0}({1})".format(self.__class__.__name__, +- ', '.join('{0}={1}'.format(k, repr(v)) for k, v in self._to_dict().items())) +- +- def __reduce__(self): +- # Pickling support +- data = dict((key, getattr(self, key)) for key in self._pclass_fields if hasattr(self, key)) +- return _restore_pickle, (self.__class__, data,) +- +- def evolver(self): +- """ +- Returns an evolver for this object. +- """ +- return _PClassEvolver(self, self._to_dict()) +- +- def remove(self, name): +- """ +- Remove attribute given by name from the current instance. Raises AttributeError if the +- attribute doesn't exist. +- """ +- evolver = self.evolver() +- del evolver[name] +- return evolver.persistent() +- +- +-class _PClassEvolver(object): +- __slots__ = ('_pclass_evolver_original', '_pclass_evolver_data', '_pclass_evolver_data_is_dirty', '_factory_fields') +- +- def __init__(self, original, initial_dict): +- self._pclass_evolver_original = original +- self._pclass_evolver_data = initial_dict +- self._pclass_evolver_data_is_dirty = False +- self._factory_fields = set() +- +- def __getitem__(self, item): +- return self._pclass_evolver_data[item] +- +- def set(self, key, value): +- if self._pclass_evolver_data.get(key, _MISSING_VALUE) is not value: +- self._pclass_evolver_data[key] = value +- self._factory_fields.add(key) +- self._pclass_evolver_data_is_dirty = True +- +- return self +- +- def __setitem__(self, key, value): +- self.set(key, value) +- +- def remove(self, item): +- if item in self._pclass_evolver_data: +- del self._pclass_evolver_data[item] +- self._factory_fields.discard(item) +- self._pclass_evolver_data_is_dirty = True +- return self +- +- raise AttributeError(item) +- +- def __delitem__(self, item): +- self.remove(item) +- +- def persistent(self): +- if self._pclass_evolver_data_is_dirty: +- return self._pclass_evolver_original.__class__(_factory_fields=self._factory_fields, +- **self._pclass_evolver_data) +- +- return self._pclass_evolver_original +- +- def __setattr__(self, key, value): +- if key not in self.__slots__: +- self.set(key, value) +- else: +- super(_PClassEvolver, self).__setattr__(key, value) +- +- def __getattr__(self, item): +- return self[item] +diff --git a/src/poetry/core/_vendor/pyrsistent/_pdeque.py b/src/poetry/core/_vendor/pyrsistent/_pdeque.py +deleted file mode 100644 +index bd11bfa..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_pdeque.py ++++ /dev/null +@@ -1,376 +0,0 @@ +-from collections.abc import Sequence, Hashable +-from itertools import islice, chain +-from numbers import Integral +-from pyrsistent._plist import plist +- +- +-class PDeque(object): +- """ +- Persistent double ended queue (deque). Allows quick appends and pops in both ends. Implemented +- using two persistent lists. +- +- A maximum length can be specified to create a bounded queue. +- +- Fully supports the Sequence and Hashable protocols including indexing and slicing but +- if you need fast random access go for the PVector instead. +- +- Do not instantiate directly, instead use the factory functions :py:func:`dq` or :py:func:`pdeque` to +- create an instance. +- +- Some examples: +- +- >>> x = pdeque([1, 2, 3]) +- >>> x.left +- 1 +- >>> x.right +- 3 +- >>> x[0] == x.left +- True +- >>> x[-1] == x.right +- True +- >>> x.pop() +- pdeque([1, 2]) +- >>> x.pop() == x[:-1] +- True +- >>> x.popleft() +- pdeque([2, 3]) +- >>> x.append(4) +- pdeque([1, 2, 3, 4]) +- >>> x.appendleft(4) +- pdeque([4, 1, 2, 3]) +- +- >>> y = pdeque([1, 2, 3], maxlen=3) +- >>> y.append(4) +- pdeque([2, 3, 4], maxlen=3) +- >>> y.appendleft(4) +- pdeque([4, 1, 2], maxlen=3) +- """ +- __slots__ = ('_left_list', '_right_list', '_length', '_maxlen', '__weakref__') +- +- def __new__(cls, left_list, right_list, length, maxlen=None): +- instance = super(PDeque, cls).__new__(cls) +- instance._left_list = left_list +- instance._right_list = right_list +- instance._length = length +- +- if maxlen is not None: +- if not isinstance(maxlen, Integral): +- raise TypeError('An integer is required as maxlen') +- +- if maxlen < 0: +- raise ValueError("maxlen must be non-negative") +- +- instance._maxlen = maxlen +- return instance +- +- @property +- def right(self): +- """ +- Rightmost element in dqueue. +- """ +- return PDeque._tip_from_lists(self._right_list, self._left_list) +- +- @property +- def left(self): +- """ +- Leftmost element in dqueue. +- """ +- return PDeque._tip_from_lists(self._left_list, self._right_list) +- +- @staticmethod +- def _tip_from_lists(primary_list, secondary_list): +- if primary_list: +- return primary_list.first +- +- if secondary_list: +- return secondary_list[-1] +- +- raise IndexError('No elements in empty deque') +- +- def __iter__(self): +- return chain(self._left_list, self._right_list.reverse()) +- +- def __repr__(self): +- return "pdeque({0}{1})".format(list(self), +- ', maxlen={0}'.format(self._maxlen) if self._maxlen is not None else '') +- __str__ = __repr__ +- +- @property +- def maxlen(self): +- """ +- Maximum length of the queue. +- """ +- return self._maxlen +- +- def pop(self, count=1): +- """ +- Return new deque with rightmost element removed. Popping the empty queue +- will return the empty queue. A optional count can be given to indicate the +- number of elements to pop. Popping with a negative index is the same as +- popleft. Executes in amortized O(k) where k is the number of elements to pop. +- +- >>> pdeque([1, 2]).pop() +- pdeque([1]) +- >>> pdeque([1, 2]).pop(2) +- pdeque([]) +- >>> pdeque([1, 2]).pop(-1) +- pdeque([2]) +- """ +- if count < 0: +- return self.popleft(-count) +- +- new_right_list, new_left_list = PDeque._pop_lists(self._right_list, self._left_list, count) +- return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen) +- +- def popleft(self, count=1): +- """ +- Return new deque with leftmost element removed. Otherwise functionally +- equivalent to pop(). +- +- >>> pdeque([1, 2]).popleft() +- pdeque([2]) +- """ +- if count < 0: +- return self.pop(-count) +- +- new_left_list, new_right_list = PDeque._pop_lists(self._left_list, self._right_list, count) +- return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen) +- +- @staticmethod +- def _pop_lists(primary_list, secondary_list, count): +- new_primary_list = primary_list +- new_secondary_list = secondary_list +- +- while count > 0 and (new_primary_list or new_secondary_list): +- count -= 1 +- if new_primary_list.rest: +- new_primary_list = new_primary_list.rest +- elif new_primary_list: +- new_primary_list = new_secondary_list.reverse() +- new_secondary_list = plist() +- else: +- new_primary_list = new_secondary_list.reverse().rest +- new_secondary_list = plist() +- +- return new_primary_list, new_secondary_list +- +- def _is_empty(self): +- return not self._left_list and not self._right_list +- +- def __lt__(self, other): +- if not isinstance(other, PDeque): +- return NotImplemented +- +- return tuple(self) < tuple(other) +- +- def __eq__(self, other): +- if not isinstance(other, PDeque): +- return NotImplemented +- +- if tuple(self) == tuple(other): +- # Sanity check of the length value since it is redundant (there for performance) +- assert len(self) == len(other) +- return True +- +- return False +- +- def __hash__(self): +- return hash(tuple(self)) +- +- def __len__(self): +- return self._length +- +- def append(self, elem): +- """ +- Return new deque with elem as the rightmost element. +- +- >>> pdeque([1, 2]).append(3) +- pdeque([1, 2, 3]) +- """ +- new_left_list, new_right_list, new_length = self._append(self._left_list, self._right_list, elem) +- return PDeque(new_left_list, new_right_list, new_length, self._maxlen) +- +- def appendleft(self, elem): +- """ +- Return new deque with elem as the leftmost element. +- +- >>> pdeque([1, 2]).appendleft(3) +- pdeque([3, 1, 2]) +- """ +- new_right_list, new_left_list, new_length = self._append(self._right_list, self._left_list, elem) +- return PDeque(new_left_list, new_right_list, new_length, self._maxlen) +- +- def _append(self, primary_list, secondary_list, elem): +- if self._maxlen is not None and self._length == self._maxlen: +- if self._maxlen == 0: +- return primary_list, secondary_list, 0 +- new_primary_list, new_secondary_list = PDeque._pop_lists(primary_list, secondary_list, 1) +- return new_primary_list, new_secondary_list.cons(elem), self._length +- +- return primary_list, secondary_list.cons(elem), self._length + 1 +- +- @staticmethod +- def _extend_list(the_list, iterable): +- count = 0 +- for elem in iterable: +- the_list = the_list.cons(elem) +- count += 1 +- +- return the_list, count +- +- def _extend(self, primary_list, secondary_list, iterable): +- new_primary_list, extend_count = PDeque._extend_list(primary_list, iterable) +- new_secondary_list = secondary_list +- current_len = self._length + extend_count +- if self._maxlen is not None and current_len > self._maxlen: +- pop_len = current_len - self._maxlen +- new_secondary_list, new_primary_list = PDeque._pop_lists(new_secondary_list, new_primary_list, pop_len) +- extend_count -= pop_len +- +- return new_primary_list, new_secondary_list, extend_count +- +- def extend(self, iterable): +- """ +- Return new deque with all elements of iterable appended to the right. +- +- >>> pdeque([1, 2]).extend([3, 4]) +- pdeque([1, 2, 3, 4]) +- """ +- new_right_list, new_left_list, extend_count = self._extend(self._right_list, self._left_list, iterable) +- return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen) +- +- def extendleft(self, iterable): +- """ +- Return new deque with all elements of iterable appended to the left. +- +- NB! The elements will be inserted in reverse order compared to the order in the iterable. +- +- >>> pdeque([1, 2]).extendleft([3, 4]) +- pdeque([4, 3, 1, 2]) +- """ +- new_left_list, new_right_list, extend_count = self._extend(self._left_list, self._right_list, iterable) +- return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen) +- +- def count(self, elem): +- """ +- Return the number of elements equal to elem present in the queue +- +- >>> pdeque([1, 2, 1]).count(1) +- 2 +- """ +- return self._left_list.count(elem) + self._right_list.count(elem) +- +- def remove(self, elem): +- """ +- Return new deque with first element from left equal to elem removed. If no such element is found +- a ValueError is raised. +- +- >>> pdeque([2, 1, 2]).remove(2) +- pdeque([1, 2]) +- """ +- try: +- return PDeque(self._left_list.remove(elem), self._right_list, self._length - 1) +- except ValueError: +- # Value not found in left list, try the right list +- try: +- # This is severely inefficient with a double reverse, should perhaps implement a remove_last()? +- return PDeque(self._left_list, +- self._right_list.reverse().remove(elem).reverse(), self._length - 1) +- except ValueError as e: +- raise ValueError('{0} not found in PDeque'.format(elem)) from e +- +- def reverse(self): +- """ +- Return reversed deque. +- +- >>> pdeque([1, 2, 3]).reverse() +- pdeque([3, 2, 1]) +- +- Also supports the standard python reverse function. +- +- >>> reversed(pdeque([1, 2, 3])) +- pdeque([3, 2, 1]) +- """ +- return PDeque(self._right_list, self._left_list, self._length) +- __reversed__ = reverse +- +- def rotate(self, steps): +- """ +- Return deque with elements rotated steps steps. +- +- >>> x = pdeque([1, 2, 3]) +- >>> x.rotate(1) +- pdeque([3, 1, 2]) +- >>> x.rotate(-2) +- pdeque([3, 1, 2]) +- """ +- popped_deque = self.pop(steps) +- if steps >= 0: +- return popped_deque.extendleft(islice(self.reverse(), steps)) +- +- return popped_deque.extend(islice(self, -steps)) +- +- def __reduce__(self): +- # Pickling support +- return pdeque, (list(self), self._maxlen) +- +- def __getitem__(self, index): +- if isinstance(index, slice): +- if index.step is not None and index.step != 1: +- # Too difficult, no structural sharing possible +- return pdeque(tuple(self)[index], maxlen=self._maxlen) +- +- result = self +- if index.start is not None: +- result = result.popleft(index.start % self._length) +- if index.stop is not None: +- result = result.pop(self._length - (index.stop % self._length)) +- +- return result +- +- if not isinstance(index, Integral): +- raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) +- +- if index >= 0: +- return self.popleft(index).left +- +- shifted = len(self) + index +- if shifted < 0: +- raise IndexError( +- "pdeque index {0} out of range {1}".format(index, len(self)), +- ) +- return self.popleft(shifted).left +- +- index = Sequence.index +- +-Sequence.register(PDeque) +-Hashable.register(PDeque) +- +- +-def pdeque(iterable=(), maxlen=None): +- """ +- Return deque containing the elements of iterable. If maxlen is specified then +- len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen. +- +- >>> pdeque([1, 2, 3]) +- pdeque([1, 2, 3]) +- >>> pdeque([1, 2, 3, 4], maxlen=2) +- pdeque([3, 4], maxlen=2) +- """ +- t = tuple(iterable) +- if maxlen is not None: +- t = t[-maxlen:] +- length = len(t) +- pivot = int(length / 2) +- left = plist(t[:pivot]) +- right = plist(t[pivot:], reverse=True) +- return PDeque(left, right, length, maxlen) +- +-def dq(*elements): +- """ +- Return deque containing all arguments. +- +- >>> dq(1, 2, 3) +- pdeque([1, 2, 3]) +- """ +- return pdeque(elements) +diff --git a/src/poetry/core/_vendor/pyrsistent/_plist.py b/src/poetry/core/_vendor/pyrsistent/_plist.py +deleted file mode 100644 +index bea7f5e..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_plist.py ++++ /dev/null +@@ -1,313 +0,0 @@ +-from collections.abc import Sequence, Hashable +-from numbers import Integral +-from functools import reduce +- +- +-class _PListBuilder(object): +- """ +- Helper class to allow construction of a list without +- having to reverse it in the end. +- """ +- __slots__ = ('_head', '_tail') +- +- def __init__(self): +- self._head = _EMPTY_PLIST +- self._tail = _EMPTY_PLIST +- +- def _append(self, elem, constructor): +- if not self._tail: +- self._head = constructor(elem) +- self._tail = self._head +- else: +- self._tail.rest = constructor(elem) +- self._tail = self._tail.rest +- +- return self._head +- +- def append_elem(self, elem): +- return self._append(elem, lambda e: PList(e, _EMPTY_PLIST)) +- +- def append_plist(self, pl): +- return self._append(pl, lambda l: l) +- +- def build(self): +- return self._head +- +- +-class _PListBase(object): +- __slots__ = ('__weakref__',) +- +- # Selected implementations can be taken straight from the Sequence +- # class, other are less suitable. Especially those that work with +- # index lookups. +- count = Sequence.count +- index = Sequence.index +- +- def __reduce__(self): +- # Pickling support +- return plist, (list(self),) +- +- def __len__(self): +- """ +- Return the length of the list, computed by traversing it. +- +- This is obviously O(n) but with the current implementation +- where a list is also a node the overhead of storing the length +- in every node would be quite significant. +- """ +- return sum(1 for _ in self) +- +- def __repr__(self): +- return "plist({0})".format(list(self)) +- __str__ = __repr__ +- +- def cons(self, elem): +- """ +- Return a new list with elem inserted as new head. +- +- >>> plist([1, 2]).cons(3) +- plist([3, 1, 2]) +- """ +- return PList(elem, self) +- +- def mcons(self, iterable): +- """ +- Return a new list with all elements of iterable repeatedly cons:ed to the current list. +- NB! The elements will be inserted in the reverse order of the iterable. +- Runs in O(len(iterable)). +- +- >>> plist([1, 2]).mcons([3, 4]) +- plist([4, 3, 1, 2]) +- """ +- head = self +- for elem in iterable: +- head = head.cons(elem) +- +- return head +- +- def reverse(self): +- """ +- Return a reversed version of list. Runs in O(n) where n is the length of the list. +- +- >>> plist([1, 2, 3]).reverse() +- plist([3, 2, 1]) +- +- Also supports the standard reversed function. +- +- >>> reversed(plist([1, 2, 3])) +- plist([3, 2, 1]) +- """ +- result = plist() +- head = self +- while head: +- result = result.cons(head.first) +- head = head.rest +- +- return result +- __reversed__ = reverse +- +- def split(self, index): +- """ +- Spilt the list at position specified by index. Returns a tuple containing the +- list up until index and the list after the index. Runs in O(index). +- +- >>> plist([1, 2, 3, 4]).split(2) +- (plist([1, 2]), plist([3, 4])) +- """ +- lb = _PListBuilder() +- right_list = self +- i = 0 +- while right_list and i < index: +- lb.append_elem(right_list.first) +- right_list = right_list.rest +- i += 1 +- +- if not right_list: +- # Just a small optimization in the cases where no split occurred +- return self, _EMPTY_PLIST +- +- return lb.build(), right_list +- +- def __iter__(self): +- li = self +- while li: +- yield li.first +- li = li.rest +- +- def __lt__(self, other): +- if not isinstance(other, _PListBase): +- return NotImplemented +- +- return tuple(self) < tuple(other) +- +- def __eq__(self, other): +- """ +- Traverses the lists, checking equality of elements. +- +- This is an O(n) operation, but preserves the standard semantics of list equality. +- """ +- if not isinstance(other, _PListBase): +- return NotImplemented +- +- self_head = self +- other_head = other +- while self_head and other_head: +- if not self_head.first == other_head.first: +- return False +- self_head = self_head.rest +- other_head = other_head.rest +- +- return not self_head and not other_head +- +- def __getitem__(self, index): +- # Don't use this this data structure if you plan to do a lot of indexing, it is +- # very inefficient! Use a PVector instead! +- +- if isinstance(index, slice): +- if index.start is not None and index.stop is None and (index.step is None or index.step == 1): +- return self._drop(index.start) +- +- # Take the easy way out for all other slicing cases, not much structural reuse possible anyway +- return plist(tuple(self)[index]) +- +- if not isinstance(index, Integral): +- raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) +- +- if index < 0: +- # NB: O(n)! +- index += len(self) +- +- try: +- return self._drop(index).first +- except AttributeError as e: +- raise IndexError("PList index out of range") from e +- +- def _drop(self, count): +- if count < 0: +- raise IndexError("PList index out of range") +- +- head = self +- while count > 0: +- head = head.rest +- count -= 1 +- +- return head +- +- def __hash__(self): +- return hash(tuple(self)) +- +- def remove(self, elem): +- """ +- Return new list with first element equal to elem removed. O(k) where k is the position +- of the element that is removed. +- +- Raises ValueError if no matching element is found. +- +- >>> plist([1, 2, 1]).remove(1) +- plist([2, 1]) +- """ +- +- builder = _PListBuilder() +- head = self +- while head: +- if head.first == elem: +- return builder.append_plist(head.rest) +- +- builder.append_elem(head.first) +- head = head.rest +- +- raise ValueError('{0} not found in PList'.format(elem)) +- +- +-class PList(_PListBase): +- """ +- Classical Lisp style singly linked list. Adding elements to the head using cons is O(1). +- Element access is O(k) where k is the position of the element in the list. Taking the +- length of the list is O(n). +- +- Fully supports the Sequence and Hashable protocols including indexing and slicing but +- if you need fast random access go for the PVector instead. +- +- Do not instantiate directly, instead use the factory functions :py:func:`l` or :py:func:`plist` to +- create an instance. +- +- Some examples: +- +- >>> x = plist([1, 2]) +- >>> y = x.cons(3) +- >>> x +- plist([1, 2]) +- >>> y +- plist([3, 1, 2]) +- >>> y.first +- 3 +- >>> y.rest == x +- True +- >>> y[:2] +- plist([3, 1]) +- """ +- __slots__ = ('first', 'rest') +- +- def __new__(cls, first, rest): +- instance = super(PList, cls).__new__(cls) +- instance.first = first +- instance.rest = rest +- return instance +- +- def __bool__(self): +- return True +- __nonzero__ = __bool__ +- +- +-Sequence.register(PList) +-Hashable.register(PList) +- +- +-class _EmptyPList(_PListBase): +- __slots__ = () +- +- def __bool__(self): +- return False +- __nonzero__ = __bool__ +- +- @property +- def first(self): +- raise AttributeError("Empty PList has no first") +- +- @property +- def rest(self): +- return self +- +- +-Sequence.register(_EmptyPList) +-Hashable.register(_EmptyPList) +- +-_EMPTY_PLIST = _EmptyPList() +- +- +-def plist(iterable=(), reverse=False): +- """ +- Creates a new persistent list containing all elements of iterable. +- Optional parameter reverse specifies if the elements should be inserted in +- reverse order or not. +- +- >>> plist([1, 2, 3]) +- plist([1, 2, 3]) +- >>> plist([1, 2, 3], reverse=True) +- plist([3, 2, 1]) +- """ +- if not reverse: +- iterable = list(iterable) +- iterable.reverse() +- +- return reduce(lambda pl, elem: pl.cons(elem), iterable, _EMPTY_PLIST) +- +- +-def l(*elements): +- """ +- Creates a new persistent list containing all arguments. +- +- >>> l(1, 2, 3) +- plist([1, 2, 3]) +- """ +- return plist(elements) +diff --git a/src/poetry/core/_vendor/pyrsistent/_pmap.py b/src/poetry/core/_vendor/pyrsistent/_pmap.py +deleted file mode 100644 +index 056d478..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_pmap.py ++++ /dev/null +@@ -1,461 +0,0 @@ +-from collections.abc import Mapping, Hashable +-from itertools import chain +-from pyrsistent._pvector import pvector +-from pyrsistent._transformations import transform +- +- +-class PMap(object): +- """ +- Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible. +- +- Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to +- create an instance. +- +- Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer +- re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are +- hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of +- the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid +- excessive hash collisions. +- +- This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the +- semantics are the same (more or less) the same function names have been used but for some cases it is not possible, +- for example assignments and deletion of values. +- +- PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for +- element access. +- +- Random access and insert is log32(n) where n is the size of the map. +- +- The following are examples of some common operations on persistent maps +- +- >>> m1 = m(a=1, b=3) +- >>> m2 = m1.set('c', 3) +- >>> m3 = m2.remove('a') +- >>> m1 == {'a': 1, 'b': 3} +- True +- >>> m2 == {'a': 1, 'b': 3, 'c': 3} +- True +- >>> m3 == {'b': 3, 'c': 3} +- True +- >>> m3['c'] +- 3 +- >>> m3.c +- 3 +- """ +- __slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash') +- +- def __new__(cls, size, buckets): +- self = super(PMap, cls).__new__(cls) +- self._size = size +- self._buckets = buckets +- return self +- +- @staticmethod +- def _get_bucket(buckets, key): +- index = hash(key) % len(buckets) +- bucket = buckets[index] +- return index, bucket +- +- @staticmethod +- def _getitem(buckets, key): +- _, bucket = PMap._get_bucket(buckets, key) +- if bucket: +- for k, v in bucket: +- if k == key: +- return v +- +- raise KeyError(key) +- +- def __getitem__(self, key): +- return PMap._getitem(self._buckets, key) +- +- @staticmethod +- def _contains(buckets, key): +- _, bucket = PMap._get_bucket(buckets, key) +- if bucket: +- for k, _ in bucket: +- if k == key: +- return True +- +- return False +- +- return False +- +- def __contains__(self, key): +- return self._contains(self._buckets, key) +- +- get = Mapping.get +- +- def __iter__(self): +- return self.iterkeys() +- +- def __getattr__(self, key): +- try: +- return self[key] +- except KeyError as e: +- raise AttributeError( +- "{0} has no attribute '{1}'".format(type(self).__name__, key) +- ) from e +- +- def iterkeys(self): +- for k, _ in self.iteritems(): +- yield k +- +- # These are more efficient implementations compared to the original +- # methods that are based on the keys iterator and then calls the +- # accessor functions to access the value for the corresponding key +- def itervalues(self): +- for _, v in self.iteritems(): +- yield v +- +- def iteritems(self): +- for bucket in self._buckets: +- if bucket: +- for k, v in bucket: +- yield k, v +- +- def values(self): +- return pvector(self.itervalues()) +- +- def keys(self): +- return pvector(self.iterkeys()) +- +- def items(self): +- return pvector(self.iteritems()) +- +- def __len__(self): +- return self._size +- +- def __repr__(self): +- return 'pmap({0})'.format(str(dict(self))) +- +- def __eq__(self, other): +- if self is other: +- return True +- if not isinstance(other, Mapping): +- return NotImplemented +- if len(self) != len(other): +- return False +- if isinstance(other, PMap): +- if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash') +- and self._cached_hash != other._cached_hash): +- return False +- if self._buckets == other._buckets: +- return True +- return dict(self.iteritems()) == dict(other.iteritems()) +- elif isinstance(other, dict): +- return dict(self.iteritems()) == other +- return dict(self.iteritems()) == dict(other.items()) +- +- __ne__ = Mapping.__ne__ +- +- def __lt__(self, other): +- raise TypeError('PMaps are not orderable') +- +- __le__ = __lt__ +- __gt__ = __lt__ +- __ge__ = __lt__ +- +- def __str__(self): +- return self.__repr__() +- +- def __hash__(self): +- if not hasattr(self, '_cached_hash'): +- self._cached_hash = hash(frozenset(self.iteritems())) +- return self._cached_hash +- +- def set(self, key, val): +- """ +- Return a new PMap with key and val inserted. +- +- >>> m1 = m(a=1, b=2) +- >>> m2 = m1.set('a', 3) +- >>> m3 = m1.set('c' ,4) +- >>> m1 == {'a': 1, 'b': 2} +- True +- >>> m2 == {'a': 3, 'b': 2} +- True +- >>> m3 == {'a': 1, 'b': 2, 'c': 4} +- True +- """ +- return self.evolver().set(key, val).persistent() +- +- def remove(self, key): +- """ +- Return a new PMap without the element specified by key. Raises KeyError if the element +- is not present. +- +- >>> m1 = m(a=1, b=2) +- >>> m1.remove('a') +- pmap({'b': 2}) +- """ +- return self.evolver().remove(key).persistent() +- +- def discard(self, key): +- """ +- Return a new PMap without the element specified by key. Returns reference to itself +- if element is not present. +- +- >>> m1 = m(a=1, b=2) +- >>> m1.discard('a') +- pmap({'b': 2}) +- >>> m1 is m1.discard('c') +- True +- """ +- try: +- return self.remove(key) +- except KeyError: +- return self +- +- def update(self, *maps): +- """ +- Return a new PMap with the items in Mappings inserted. If the same key is present in multiple +- maps the rightmost (last) value is inserted. +- +- >>> m1 = m(a=1, b=2) +- >>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35}) == {'a': 17, 'b': 2, 'c': 3, 'd': 35} +- True +- """ +- return self.update_with(lambda l, r: r, *maps) +- +- def update_with(self, update_fn, *maps): +- """ +- Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple +- maps the values will be merged using merge_fn going from left to right. +- +- >>> from operator import add +- >>> m1 = m(a=1, b=2) +- >>> m1.update_with(add, m(a=2)) == {'a': 3, 'b': 2} +- True +- +- The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost. +- +- >>> m1 = m(a=1) +- >>> m1.update_with(lambda l, r: l, m(a=2), {'a':3}) +- pmap({'a': 1}) +- """ +- evolver = self.evolver() +- for map in maps: +- for key, value in map.items(): +- evolver.set(key, update_fn(evolver[key], value) if key in evolver else value) +- +- return evolver.persistent() +- +- def __add__(self, other): +- return self.update(other) +- +- __or__ = __add__ +- +- def __reduce__(self): +- # Pickling support +- return pmap, (dict(self),) +- +- def transform(self, *transformations): +- """ +- Transform arbitrarily complex combinations of PVectors and PMaps. A transformation +- consists of two parts. One match expression that specifies which elements to transform +- and one transformation function that performs the actual transformation. +- +- >>> from pyrsistent import freeze, ny +- >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'}, +- ... {'author': 'Steve', 'content': 'A slightly longer article'}], +- ... 'weather': {'temperature': '11C', 'wind': '5m/s'}}) +- >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c) +- >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c) +- >>> very_short_news.articles[0].content +- 'A short article' +- >>> very_short_news.articles[1].content +- 'A slightly long...' +- +- When nothing has been transformed the original data structure is kept +- +- >>> short_news is news_paper +- True +- >>> very_short_news is news_paper +- False +- >>> very_short_news.articles[0] is news_paper.articles[0] +- True +- """ +- return transform(self, transformations) +- +- def copy(self): +- return self +- +- class _Evolver(object): +- __slots__ = ('_buckets_evolver', '_size', '_original_pmap') +- +- def __init__(self, original_pmap): +- self._original_pmap = original_pmap +- self._buckets_evolver = original_pmap._buckets.evolver() +- self._size = original_pmap._size +- +- def __getitem__(self, key): +- return PMap._getitem(self._buckets_evolver, key) +- +- def __setitem__(self, key, val): +- self.set(key, val) +- +- def set(self, key, val): +- if len(self._buckets_evolver) < 0.67 * self._size: +- self._reallocate(2 * len(self._buckets_evolver)) +- +- kv = (key, val) +- index, bucket = PMap._get_bucket(self._buckets_evolver, key) +- if bucket: +- for k, v in bucket: +- if k == key: +- if v is not val: +- new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket] +- self._buckets_evolver[index] = new_bucket +- +- return self +- +- new_bucket = [kv] +- new_bucket.extend(bucket) +- self._buckets_evolver[index] = new_bucket +- self._size += 1 +- else: +- self._buckets_evolver[index] = [kv] +- self._size += 1 +- +- return self +- +- def _reallocate(self, new_size): +- new_list = new_size * [None] +- buckets = self._buckets_evolver.persistent() +- for k, v in chain.from_iterable(x for x in buckets if x): +- index = hash(k) % new_size +- if new_list[index]: +- new_list[index].append((k, v)) +- else: +- new_list[index] = [(k, v)] +- +- # A reallocation should always result in a dirty buckets evolver to avoid +- # possible loss of elements when doing the reallocation. +- self._buckets_evolver = pvector().evolver() +- self._buckets_evolver.extend(new_list) +- +- def is_dirty(self): +- return self._buckets_evolver.is_dirty() +- +- def persistent(self): +- if self.is_dirty(): +- self._original_pmap = PMap(self._size, self._buckets_evolver.persistent()) +- +- return self._original_pmap +- +- def __len__(self): +- return self._size +- +- def __contains__(self, key): +- return PMap._contains(self._buckets_evolver, key) +- +- def __delitem__(self, key): +- self.remove(key) +- +- def remove(self, key): +- index, bucket = PMap._get_bucket(self._buckets_evolver, key) +- +- if bucket: +- new_bucket = [(k, v) for (k, v) in bucket if k != key] +- if len(bucket) > len(new_bucket): +- self._buckets_evolver[index] = new_bucket if new_bucket else None +- self._size -= 1 +- return self +- +- raise KeyError('{0}'.format(key)) +- +- def evolver(self): +- """ +- Create a new evolver for this pmap. For a discussion on evolvers in general see the +- documentation for the pvector evolver. +- +- Create the evolver and perform various mutating updates to it: +- +- >>> m1 = m(a=1, b=2) +- >>> e = m1.evolver() +- >>> e['c'] = 3 +- >>> len(e) +- 3 +- >>> del e['a'] +- +- The underlying pmap remains the same: +- +- >>> m1 == {'a': 1, 'b': 2} +- True +- +- The changes are kept in the evolver. An updated pmap can be created using the +- persistent() function on the evolver. +- +- >>> m2 = e.persistent() +- >>> m2 == {'b': 2, 'c': 3} +- True +- +- The new pmap will share data with the original pmap in the same way that would have +- been done if only using operations on the pmap. +- """ +- return self._Evolver(self) +- +-Mapping.register(PMap) +-Hashable.register(PMap) +- +- +-def _turbo_mapping(initial, pre_size): +- if pre_size: +- size = pre_size +- else: +- try: +- size = 2 * len(initial) or 8 +- except Exception: +- # Guess we can't figure out the length. Give up on length hinting, +- # we can always reallocate later. +- size = 8 +- +- buckets = size * [None] +- +- if not isinstance(initial, Mapping): +- # Make a dictionary of the initial data if it isn't already, +- # that will save us some job further down since we can assume no +- # key collisions +- initial = dict(initial) +- +- for k, v in initial.items(): +- h = hash(k) +- index = h % size +- bucket = buckets[index] +- +- if bucket: +- bucket.append((k, v)) +- else: +- buckets[index] = [(k, v)] +- +- return PMap(len(initial), pvector().extend(buckets)) +- +- +-_EMPTY_PMAP = _turbo_mapping({}, 0) +- +- +-def pmap(initial={}, pre_size=0): +- """ +- Create new persistent map, inserts all elements in initial into the newly created map. +- The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This +- may have a positive performance impact in the cases where you know beforehand that a large number of elements +- will be inserted into the map eventually since it will reduce the number of reallocations required. +- +- >>> pmap({'a': 13, 'b': 14}) == {'a': 13, 'b': 14} +- True +- """ +- if not initial and pre_size == 0: +- return _EMPTY_PMAP +- +- return _turbo_mapping(initial, pre_size) +- +- +-def m(**kwargs): +- """ +- Creates a new persistent map. Inserts all key value arguments into the newly created map. +- +- >>> m(a=13, b=14) == {'a': 13, 'b': 14} +- True +- """ +- return pmap(kwargs) +diff --git a/src/poetry/core/_vendor/pyrsistent/_precord.py b/src/poetry/core/_vendor/pyrsistent/_precord.py +deleted file mode 100644 +index 1ee8198..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_precord.py ++++ /dev/null +@@ -1,167 +0,0 @@ +-from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants +-from pyrsistent._field_common import ( +- set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants +-) +-from pyrsistent._pmap import PMap, pmap +- +- +-class _PRecordMeta(type): +- def __new__(mcs, name, bases, dct): +- set_fields(dct, bases, name='_precord_fields') +- store_invariants(dct, bases, '_precord_invariants', '__invariant__') +- +- dct['_precord_mandatory_fields'] = \ +- set(name for name, field in dct['_precord_fields'].items() if field.mandatory) +- +- dct['_precord_initial_values'] = \ +- dict((k, field.initial) for k, field in dct['_precord_fields'].items() if field.initial is not PFIELD_NO_INITIAL) +- +- +- dct['__slots__'] = () +- +- return super(_PRecordMeta, mcs).__new__(mcs, name, bases, dct) +- +- +-class PRecord(PMap, CheckedType, metaclass=_PRecordMeta): +- """ +- A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting +- from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element +- access using subscript notation. +- +- More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent +- """ +- def __new__(cls, **kwargs): +- # Hack total! If these two special attributes exist that means we can create +- # ourselves. Otherwise we need to go through the Evolver to create the structures +- # for us. +- if '_precord_size' in kwargs and '_precord_buckets' in kwargs: +- return super(PRecord, cls).__new__(cls, kwargs['_precord_size'], kwargs['_precord_buckets']) +- +- factory_fields = kwargs.pop('_factory_fields', None) +- ignore_extra = kwargs.pop('_ignore_extra', False) +- +- initial_values = kwargs +- if cls._precord_initial_values: +- initial_values = dict((k, v() if callable(v) else v) +- for k, v in cls._precord_initial_values.items()) +- initial_values.update(kwargs) +- +- e = _PRecordEvolver(cls, pmap(pre_size=len(cls._precord_fields)), _factory_fields=factory_fields, _ignore_extra=ignore_extra) +- for k, v in initial_values.items(): +- e[k] = v +- +- return e.persistent() +- +- def set(self, *args, **kwargs): +- """ +- Set a field in the record. This set function differs slightly from that in the PMap +- class. First of all it accepts key-value pairs. Second it accepts multiple key-value +- pairs to perform one, atomic, update of multiple fields. +- """ +- +- # The PRecord set() can accept kwargs since all fields that have been declared are +- # valid python identifiers. Also allow multiple fields to be set in one operation. +- if args: +- return super(PRecord, self).set(args[0], args[1]) +- +- return self.update(kwargs) +- +- def evolver(self): +- """ +- Returns an evolver of this object. +- """ +- return _PRecordEvolver(self.__class__, self) +- +- def __repr__(self): +- return "{0}({1})".format(self.__class__.__name__, +- ', '.join('{0}={1}'.format(k, repr(v)) for k, v in self.items())) +- +- @classmethod +- def create(cls, kwargs, _factory_fields=None, ignore_extra=False): +- """ +- Factory method. Will create a new PRecord of the current type and assign the values +- specified in kwargs. +- +- :param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not +- in the set of fields on the PRecord. +- """ +- if isinstance(kwargs, cls): +- return kwargs +- +- if ignore_extra: +- kwargs = {k: kwargs[k] for k in cls._precord_fields if k in kwargs} +- +- return cls(_factory_fields=_factory_fields, _ignore_extra=ignore_extra, **kwargs) +- +- def __reduce__(self): +- # Pickling support +- return _restore_pickle, (self.__class__, dict(self),) +- +- def serialize(self, format=None): +- """ +- Serialize the current PRecord using custom serializer functions for fields where +- such have been supplied. +- """ +- return dict((k, serialize(self._precord_fields[k].serializer, format, v)) for k, v in self.items()) +- +- +-class _PRecordEvolver(PMap._Evolver): +- __slots__ = ('_destination_cls', '_invariant_error_codes', '_missing_fields', '_factory_fields', '_ignore_extra') +- +- def __init__(self, cls, original_pmap, _factory_fields=None, _ignore_extra=False): +- super(_PRecordEvolver, self).__init__(original_pmap) +- self._destination_cls = cls +- self._invariant_error_codes = [] +- self._missing_fields = [] +- self._factory_fields = _factory_fields +- self._ignore_extra = _ignore_extra +- +- def __setitem__(self, key, original_value): +- self.set(key, original_value) +- +- def set(self, key, original_value): +- field = self._destination_cls._precord_fields.get(key) +- if field: +- if self._factory_fields is None or field in self._factory_fields: +- try: +- if is_field_ignore_extra_complaint(PRecord, field, self._ignore_extra): +- value = field.factory(original_value, ignore_extra=self._ignore_extra) +- else: +- value = field.factory(original_value) +- except InvariantException as e: +- self._invariant_error_codes += e.invariant_errors +- self._missing_fields += e.missing_fields +- return self +- else: +- value = original_value +- +- check_type(self._destination_cls, field, key, value) +- +- is_ok, error_code = field.invariant(value) +- if not is_ok: +- self._invariant_error_codes.append(error_code) +- +- return super(_PRecordEvolver, self).set(key, value) +- else: +- raise AttributeError("'{0}' is not among the specified fields for {1}".format(key, self._destination_cls.__name__)) +- +- def persistent(self): +- cls = self._destination_cls +- is_dirty = self.is_dirty() +- pm = super(_PRecordEvolver, self).persistent() +- if is_dirty or not isinstance(pm, cls): +- result = cls(_precord_buckets=pm._buckets, _precord_size=pm._size) +- else: +- result = pm +- +- if cls._precord_mandatory_fields: +- self._missing_fields += tuple('{0}.{1}'.format(cls.__name__, f) for f +- in (cls._precord_mandatory_fields - set(result.keys()))) +- +- if self._invariant_error_codes or self._missing_fields: +- raise InvariantException(tuple(self._invariant_error_codes), tuple(self._missing_fields), +- 'Field invariant failed') +- +- check_global_invariants(result, cls._precord_invariants) +- +- return result +diff --git a/src/poetry/core/_vendor/pyrsistent/_pset.py b/src/poetry/core/_vendor/pyrsistent/_pset.py +deleted file mode 100644 +index 4fae827..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_pset.py ++++ /dev/null +@@ -1,227 +0,0 @@ +-from collections.abc import Set, Hashable +-import sys +-from pyrsistent._pmap import pmap +- +- +-class PSet(object): +- """ +- Persistent set implementation. Built on top of the persistent map. The set supports all operations +- in the Set protocol and is Hashable. +- +- Do not instantiate directly, instead use the factory functions :py:func:`s` or :py:func:`pset` +- to create an instance. +- +- Random access and insert is log32(n) where n is the size of the set. +- +- Some examples: +- +- >>> s = pset([1, 2, 3, 1]) +- >>> s2 = s.add(4) +- >>> s3 = s2.remove(2) +- >>> s +- pset([1, 2, 3]) +- >>> s2 +- pset([1, 2, 3, 4]) +- >>> s3 +- pset([1, 3, 4]) +- """ +- __slots__ = ('_map', '__weakref__') +- +- def __new__(cls, m): +- self = super(PSet, cls).__new__(cls) +- self._map = m +- return self +- +- def __contains__(self, element): +- return element in self._map +- +- def __iter__(self): +- return iter(self._map) +- +- def __len__(self): +- return len(self._map) +- +- def __repr__(self): +- if not self: +- return 'p' + str(set(self)) +- +- return 'pset([{0}])'.format(str(set(self))[1:-1]) +- +- def __str__(self): +- return self.__repr__() +- +- def __hash__(self): +- return hash(self._map) +- +- def __reduce__(self): +- # Pickling support +- return pset, (list(self),) +- +- @classmethod +- def _from_iterable(cls, it, pre_size=8): +- return PSet(pmap(dict((k, True) for k in it), pre_size=pre_size)) +- +- def add(self, element): +- """ +- Return a new PSet with element added +- +- >>> s1 = s(1, 2) +- >>> s1.add(3) +- pset([1, 2, 3]) +- """ +- return self.evolver().add(element).persistent() +- +- def update(self, iterable): +- """ +- Return a new PSet with elements in iterable added +- +- >>> s1 = s(1, 2) +- >>> s1.update([3, 4, 4]) +- pset([1, 2, 3, 4]) +- """ +- e = self.evolver() +- for element in iterable: +- e.add(element) +- +- return e.persistent() +- +- def remove(self, element): +- """ +- Return a new PSet with element removed. Raises KeyError if element is not present. +- +- >>> s1 = s(1, 2) +- >>> s1.remove(2) +- pset([1]) +- """ +- if element in self._map: +- return self.evolver().remove(element).persistent() +- +- raise KeyError("Element '%s' not present in PSet" % repr(element)) +- +- def discard(self, element): +- """ +- Return a new PSet with element removed. Returns itself if element is not present. +- """ +- if element in self._map: +- return self.evolver().remove(element).persistent() +- +- return self +- +- class _Evolver(object): +- __slots__ = ('_original_pset', '_pmap_evolver') +- +- def __init__(self, original_pset): +- self._original_pset = original_pset +- self._pmap_evolver = original_pset._map.evolver() +- +- def add(self, element): +- self._pmap_evolver[element] = True +- return self +- +- def remove(self, element): +- del self._pmap_evolver[element] +- return self +- +- def is_dirty(self): +- return self._pmap_evolver.is_dirty() +- +- def persistent(self): +- if not self.is_dirty(): +- return self._original_pset +- +- return PSet(self._pmap_evolver.persistent()) +- +- def __len__(self): +- return len(self._pmap_evolver) +- +- def copy(self): +- return self +- +- def evolver(self): +- """ +- Create a new evolver for this pset. For a discussion on evolvers in general see the +- documentation for the pvector evolver. +- +- Create the evolver and perform various mutating updates to it: +- +- >>> s1 = s(1, 2, 3) +- >>> e = s1.evolver() +- >>> _ = e.add(4) +- >>> len(e) +- 4 +- >>> _ = e.remove(1) +- +- The underlying pset remains the same: +- +- >>> s1 +- pset([1, 2, 3]) +- +- The changes are kept in the evolver. An updated pmap can be created using the +- persistent() function on the evolver. +- +- >>> s2 = e.persistent() +- >>> s2 +- pset([2, 3, 4]) +- +- The new pset will share data with the original pset in the same way that would have +- been done if only using operations on the pset. +- """ +- return PSet._Evolver(self) +- +- # All the operations and comparisons you would expect on a set. +- # +- # This is not very beautiful. If we avoid inheriting from PSet we can use the +- # __slots__ concepts (which requires a new style class) and hopefully save some memory. +- __le__ = Set.__le__ +- __lt__ = Set.__lt__ +- __gt__ = Set.__gt__ +- __ge__ = Set.__ge__ +- __eq__ = Set.__eq__ +- __ne__ = Set.__ne__ +- +- __and__ = Set.__and__ +- __or__ = Set.__or__ +- __sub__ = Set.__sub__ +- __xor__ = Set.__xor__ +- +- issubset = __le__ +- issuperset = __ge__ +- union = __or__ +- intersection = __and__ +- difference = __sub__ +- symmetric_difference = __xor__ +- +- isdisjoint = Set.isdisjoint +- +-Set.register(PSet) +-Hashable.register(PSet) +- +-_EMPTY_PSET = PSet(pmap()) +- +- +-def pset(iterable=(), pre_size=8): +- """ +- Creates a persistent set from iterable. Optionally takes a sizing parameter equivalent to that +- used for :py:func:`pmap`. +- +- >>> s1 = pset([1, 2, 3, 2]) +- >>> s1 +- pset([1, 2, 3]) +- """ +- if not iterable: +- return _EMPTY_PSET +- +- return PSet._from_iterable(iterable, pre_size=pre_size) +- +- +-def s(*elements): +- """ +- Create a persistent set. +- +- Takes an arbitrary number of arguments to insert into the new set. +- +- >>> s1 = s(1, 2, 3, 2) +- >>> s1 +- pset([1, 2, 3]) +- """ +- return pset(elements) +diff --git a/src/poetry/core/_vendor/pyrsistent/_pvector.py b/src/poetry/core/_vendor/pyrsistent/_pvector.py +deleted file mode 100644 +index 2aff0e8..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_pvector.py ++++ /dev/null +@@ -1,711 +0,0 @@ +-from abc import abstractmethod, ABCMeta +-from collections.abc import Sequence, Hashable +-from numbers import Integral +-import operator +-from pyrsistent._transformations import transform +- +- +-def _bitcount(val): +- return bin(val).count("1") +- +-BRANCH_FACTOR = 32 +-BIT_MASK = BRANCH_FACTOR - 1 +-SHIFT = _bitcount(BIT_MASK) +- +- +-def compare_pvector(v, other, operator): +- return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other) +- +- +-def _index_or_slice(index, stop): +- if stop is None: +- return index +- +- return slice(index, stop) +- +- +-class PythonPVector(object): +- """ +- Support structure for PVector that implements structural sharing for vectors using a trie. +- """ +- __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__') +- +- def __new__(cls, count, shift, root, tail): +- self = super(PythonPVector, cls).__new__(cls) +- self._count = count +- self._shift = shift +- self._root = root +- self._tail = tail +- +- # Derived attribute stored for performance +- self._tail_offset = self._count - len(self._tail) +- return self +- +- def __len__(self): +- return self._count +- +- def __getitem__(self, index): +- if isinstance(index, slice): +- # There are more conditions than the below where it would be OK to +- # return ourselves, implement those... +- if index.start is None and index.stop is None and index.step is None: +- return self +- +- # This is a bit nasty realizing the whole structure as a list before +- # slicing it but it is the fastest way I've found to date, and it's easy :-) +- return _EMPTY_PVECTOR.extend(self.tolist()[index]) +- +- if index < 0: +- index += self._count +- +- return PythonPVector._node_for(self, index)[index & BIT_MASK] +- +- def __add__(self, other): +- return self.extend(other) +- +- def __repr__(self): +- return 'pvector({0})'.format(str(self.tolist())) +- +- def __str__(self): +- return self.__repr__() +- +- def __iter__(self): +- # This is kind of lazy and will produce some memory overhead but it is the fasted method +- # by far of those tried since it uses the speed of the built in python list directly. +- return iter(self.tolist()) +- +- def __ne__(self, other): +- return not self.__eq__(other) +- +- def __eq__(self, other): +- return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq) +- +- def __gt__(self, other): +- return compare_pvector(self, other, operator.gt) +- +- def __lt__(self, other): +- return compare_pvector(self, other, operator.lt) +- +- def __ge__(self, other): +- return compare_pvector(self, other, operator.ge) +- +- def __le__(self, other): +- return compare_pvector(self, other, operator.le) +- +- def __mul__(self, times): +- if times <= 0 or self is _EMPTY_PVECTOR: +- return _EMPTY_PVECTOR +- +- if times == 1: +- return self +- +- return _EMPTY_PVECTOR.extend(times * self.tolist()) +- +- __rmul__ = __mul__ +- +- def _fill_list(self, node, shift, the_list): +- if shift: +- shift -= SHIFT +- for n in node: +- self._fill_list(n, shift, the_list) +- else: +- the_list.extend(node) +- +- def tolist(self): +- """ +- The fastest way to convert the vector into a python list. +- """ +- the_list = [] +- self._fill_list(self._root, self._shift, the_list) +- the_list.extend(self._tail) +- return the_list +- +- def _totuple(self): +- """ +- Returns the content as a python tuple. +- """ +- return tuple(self.tolist()) +- +- def __hash__(self): +- # Taking the easy way out again... +- return hash(self._totuple()) +- +- def transform(self, *transformations): +- return transform(self, transformations) +- +- def __reduce__(self): +- # Pickling support +- return pvector, (self.tolist(),) +- +- def mset(self, *args): +- if len(args) % 2: +- raise TypeError("mset expected an even number of arguments") +- +- evolver = self.evolver() +- for i in range(0, len(args), 2): +- evolver[args[i]] = args[i+1] +- +- return evolver.persistent() +- +- class Evolver(object): +- __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes', +- '_extra_tail', '_cached_leafs', '_orig_pvector') +- +- def __init__(self, v): +- self._reset(v) +- +- def __getitem__(self, index): +- if not isinstance(index, Integral): +- raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) +- +- if index < 0: +- index += self._count + len(self._extra_tail) +- +- if self._count <= index < self._count + len(self._extra_tail): +- return self._extra_tail[index - self._count] +- +- return PythonPVector._node_for(self, index)[index & BIT_MASK] +- +- def _reset(self, v): +- self._count = v._count +- self._shift = v._shift +- self._root = v._root +- self._tail = v._tail +- self._tail_offset = v._tail_offset +- self._dirty_nodes = {} +- self._cached_leafs = {} +- self._extra_tail = [] +- self._orig_pvector = v +- +- def append(self, element): +- self._extra_tail.append(element) +- return self +- +- def extend(self, iterable): +- self._extra_tail.extend(iterable) +- return self +- +- def set(self, index, val): +- self[index] = val +- return self +- +- def __setitem__(self, index, val): +- if not isinstance(index, Integral): +- raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) +- +- if index < 0: +- index += self._count + len(self._extra_tail) +- +- if 0 <= index < self._count: +- node = self._cached_leafs.get(index >> SHIFT) +- if node: +- node[index & BIT_MASK] = val +- elif index >= self._tail_offset: +- if id(self._tail) not in self._dirty_nodes: +- self._tail = list(self._tail) +- self._dirty_nodes[id(self._tail)] = True +- self._cached_leafs[index >> SHIFT] = self._tail +- self._tail[index & BIT_MASK] = val +- else: +- self._root = self._do_set(self._shift, self._root, index, val) +- elif self._count <= index < self._count + len(self._extra_tail): +- self._extra_tail[index - self._count] = val +- elif index == self._count + len(self._extra_tail): +- self._extra_tail.append(val) +- else: +- raise IndexError("Index out of range: %s" % (index,)) +- +- def _do_set(self, level, node, i, val): +- if id(node) in self._dirty_nodes: +- ret = node +- else: +- ret = list(node) +- self._dirty_nodes[id(ret)] = True +- +- if level == 0: +- ret[i & BIT_MASK] = val +- self._cached_leafs[i >> SHIFT] = ret +- else: +- sub_index = (i >> level) & BIT_MASK # >>> +- ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val) +- +- return ret +- +- def delete(self, index): +- del self[index] +- return self +- +- def __delitem__(self, key): +- if self._orig_pvector: +- # All structural sharing bets are off, base evolver on _extra_tail only +- l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist() +- l.extend(self._extra_tail) +- self._reset(_EMPTY_PVECTOR) +- self._extra_tail = l +- +- del self._extra_tail[key] +- +- def persistent(self): +- result = self._orig_pvector +- if self.is_dirty(): +- result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail) +- self._reset(result) +- +- return result +- +- def __len__(self): +- return self._count + len(self._extra_tail) +- +- def is_dirty(self): +- return bool(self._dirty_nodes or self._extra_tail) +- +- def evolver(self): +- return PythonPVector.Evolver(self) +- +- def set(self, i, val): +- # This method could be implemented by a call to mset() but doing so would cause +- # a ~5 X performance penalty on PyPy (considered the primary platform for this implementation +- # of PVector) so we're keeping this implementation for now. +- +- if not isinstance(i, Integral): +- raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__) +- +- if i < 0: +- i += self._count +- +- if 0 <= i < self._count: +- if i >= self._tail_offset: +- new_tail = list(self._tail) +- new_tail[i & BIT_MASK] = val +- return PythonPVector(self._count, self._shift, self._root, new_tail) +- +- return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail) +- +- if i == self._count: +- return self.append(val) +- +- raise IndexError("Index out of range: %s" % (i,)) +- +- def _do_set(self, level, node, i, val): +- ret = list(node) +- if level == 0: +- ret[i & BIT_MASK] = val +- else: +- sub_index = (i >> level) & BIT_MASK # >>> +- ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val) +- +- return ret +- +- @staticmethod +- def _node_for(pvector_like, i): +- if 0 <= i < pvector_like._count: +- if i >= pvector_like._tail_offset: +- return pvector_like._tail +- +- node = pvector_like._root +- for level in range(pvector_like._shift, 0, -SHIFT): +- node = node[(i >> level) & BIT_MASK] # >>> +- +- return node +- +- raise IndexError("Index out of range: %s" % (i,)) +- +- def _create_new_root(self): +- new_shift = self._shift +- +- # Overflow root? +- if (self._count >> SHIFT) > (1 << self._shift): # >>> +- new_root = [self._root, self._new_path(self._shift, self._tail)] +- new_shift += SHIFT +- else: +- new_root = self._push_tail(self._shift, self._root, self._tail) +- +- return new_root, new_shift +- +- def append(self, val): +- if len(self._tail) < BRANCH_FACTOR: +- new_tail = list(self._tail) +- new_tail.append(val) +- return PythonPVector(self._count + 1, self._shift, self._root, new_tail) +- +- # Full tail, push into tree +- new_root, new_shift = self._create_new_root() +- return PythonPVector(self._count + 1, new_shift, new_root, [val]) +- +- def _new_path(self, level, node): +- if level == 0: +- return node +- +- return [self._new_path(level - SHIFT, node)] +- +- def _mutating_insert_tail(self): +- self._root, self._shift = self._create_new_root() +- self._tail = [] +- +- def _mutating_fill_tail(self, offset, sequence): +- max_delta_len = BRANCH_FACTOR - len(self._tail) +- delta = sequence[offset:offset + max_delta_len] +- self._tail.extend(delta) +- delta_len = len(delta) +- self._count += delta_len +- return offset + delta_len +- +- def _mutating_extend(self, sequence): +- offset = 0 +- sequence_len = len(sequence) +- while offset < sequence_len: +- offset = self._mutating_fill_tail(offset, sequence) +- if len(self._tail) == BRANCH_FACTOR: +- self._mutating_insert_tail() +- +- self._tail_offset = self._count - len(self._tail) +- +- def extend(self, obj): +- # Mutates the new vector directly for efficiency but that's only an +- # implementation detail, once it is returned it should be considered immutable +- l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj) +- if l: +- new_vector = self.append(l[0]) +- new_vector._mutating_extend(l[1:]) +- return new_vector +- +- return self +- +- def _push_tail(self, level, parent, tail_node): +- """ +- if parent is leaf, insert node, +- else does it map to an existing child? -> +- node_to_insert = push node one more level +- else alloc new path +- +- return node_to_insert placed in copy of parent +- """ +- ret = list(parent) +- +- if level == SHIFT: +- ret.append(tail_node) +- return ret +- +- sub_index = ((self._count - 1) >> level) & BIT_MASK # >>> +- if len(parent) > sub_index: +- ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node) +- return ret +- +- ret.append(self._new_path(level - SHIFT, tail_node)) +- return ret +- +- def index(self, value, *args, **kwargs): +- return self.tolist().index(value, *args, **kwargs) +- +- def count(self, value): +- return self.tolist().count(value) +- +- def delete(self, index, stop=None): +- l = self.tolist() +- del l[_index_or_slice(index, stop)] +- return _EMPTY_PVECTOR.extend(l) +- +- def remove(self, value): +- l = self.tolist() +- l.remove(value) +- return _EMPTY_PVECTOR.extend(l) +- +-class PVector(metaclass=ABCMeta): +- """ +- Persistent vector implementation. Meant as a replacement for the cases where you would normally +- use a Python list. +- +- Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to +- create an instance. +- +- Heavily influenced by the persistent vector available in Clojure. Initially this was more or +- less just a port of the Java code for the Clojure vector. It has since been modified and to +- some extent optimized for usage in Python. +- +- The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No +- updates are done to the original vector. Structural sharing between vectors are applied where possible to save +- space and to avoid making complete copies. +- +- This structure corresponds most closely to the built in list type and is intended as a replacement. Where the +- semantics are the same (more or less) the same function names have been used but for some cases it is not possible, +- for example assignments. +- +- The PVector implements the Sequence protocol and is Hashable. +- +- Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector. +- +- The following are examples of some common operations on persistent vectors: +- +- >>> p = v(1, 2, 3) +- >>> p2 = p.append(4) +- >>> p3 = p2.extend([5, 6, 7]) +- >>> p +- pvector([1, 2, 3]) +- >>> p2 +- pvector([1, 2, 3, 4]) +- >>> p3 +- pvector([1, 2, 3, 4, 5, 6, 7]) +- >>> p3[5] +- 6 +- >>> p.set(1, 99) +- pvector([1, 99, 3]) +- >>> +- """ +- +- @abstractmethod +- def __len__(self): +- """ +- >>> len(v(1, 2, 3)) +- 3 +- """ +- +- @abstractmethod +- def __getitem__(self, index): +- """ +- Get value at index. Full slicing support. +- +- >>> v1 = v(5, 6, 7, 8) +- >>> v1[2] +- 7 +- >>> v1[1:3] +- pvector([6, 7]) +- """ +- +- @abstractmethod +- def __add__(self, other): +- """ +- >>> v1 = v(1, 2) +- >>> v2 = v(3, 4) +- >>> v1 + v2 +- pvector([1, 2, 3, 4]) +- """ +- +- @abstractmethod +- def __mul__(self, times): +- """ +- >>> v1 = v(1, 2) +- >>> 3 * v1 +- pvector([1, 2, 1, 2, 1, 2]) +- """ +- +- @abstractmethod +- def __hash__(self): +- """ +- >>> v1 = v(1, 2, 3) +- >>> v2 = v(1, 2, 3) +- >>> hash(v1) == hash(v2) +- True +- """ +- +- @abstractmethod +- def evolver(self): +- """ +- Create a new evolver for this pvector. The evolver acts as a mutable view of the vector +- with "transaction like" semantics. No part of the underlying vector i updated, it is still +- fully immutable. Furthermore multiple evolvers created from the same pvector do not +- interfere with each other. +- +- You may want to use an evolver instead of working directly with the pvector in the +- following cases: +- +- * Multiple updates are done to the same vector and the intermediate results are of no +- interest. In this case using an evolver may be a more efficient and easier to work with. +- * You need to pass a vector into a legacy function or a function that you have no control +- over which performs in place mutations of lists. In this case pass an evolver instance +- instead and then create a new pvector from the evolver once the function returns. +- +- The following example illustrates a typical workflow when working with evolvers. It also +- displays most of the API (which i kept small by design, you should not be tempted to +- use evolvers in excess ;-)). +- +- Create the evolver and perform various mutating updates to it: +- +- >>> v1 = v(1, 2, 3, 4, 5) +- >>> e = v1.evolver() +- >>> e[1] = 22 +- >>> _ = e.append(6) +- >>> _ = e.extend([7, 8, 9]) +- >>> e[8] += 1 +- >>> len(e) +- 9 +- +- The underlying pvector remains the same: +- +- >>> v1 +- pvector([1, 2, 3, 4, 5]) +- +- The changes are kept in the evolver. An updated pvector can be created using the +- persistent() function on the evolver. +- +- >>> v2 = e.persistent() +- >>> v2 +- pvector([1, 22, 3, 4, 5, 6, 7, 8, 10]) +- +- The new pvector will share data with the original pvector in the same way that would have +- been done if only using operations on the pvector. +- """ +- +- @abstractmethod +- def mset(self, *args): +- """ +- Return a new vector with elements in specified positions replaced by values (multi set). +- +- Elements on even positions in the argument list are interpreted as indexes while +- elements on odd positions are considered values. +- +- >>> v1 = v(1, 2, 3) +- >>> v1.mset(0, 11, 2, 33) +- pvector([11, 2, 33]) +- """ +- +- @abstractmethod +- def set(self, i, val): +- """ +- Return a new vector with element at position i replaced with val. The original vector remains unchanged. +- +- Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will +- result in an IndexError. +- +- >>> v1 = v(1, 2, 3) +- >>> v1.set(1, 4) +- pvector([1, 4, 3]) +- >>> v1.set(3, 4) +- pvector([1, 2, 3, 4]) +- >>> v1.set(-1, 4) +- pvector([1, 2, 4]) +- """ +- +- @abstractmethod +- def append(self, val): +- """ +- Return a new vector with val appended. +- +- >>> v1 = v(1, 2) +- >>> v1.append(3) +- pvector([1, 2, 3]) +- """ +- +- @abstractmethod +- def extend(self, obj): +- """ +- Return a new vector with all values in obj appended to it. Obj may be another +- PVector or any other Iterable. +- +- >>> v1 = v(1, 2, 3) +- >>> v1.extend([4, 5]) +- pvector([1, 2, 3, 4, 5]) +- """ +- +- @abstractmethod +- def index(self, value, *args, **kwargs): +- """ +- Return first index of value. Additional indexes may be supplied to limit the search to a +- sub range of the vector. +- +- >>> v1 = v(1, 2, 3, 4, 3) +- >>> v1.index(3) +- 2 +- >>> v1.index(3, 3, 5) +- 4 +- """ +- +- @abstractmethod +- def count(self, value): +- """ +- Return the number of times that value appears in the vector. +- +- >>> v1 = v(1, 4, 3, 4) +- >>> v1.count(4) +- 2 +- """ +- +- @abstractmethod +- def transform(self, *transformations): +- """ +- Transform arbitrarily complex combinations of PVectors and PMaps. A transformation +- consists of two parts. One match expression that specifies which elements to transform +- and one transformation function that performs the actual transformation. +- +- >>> from pyrsistent import freeze, ny +- >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'}, +- ... {'author': 'Steve', 'content': 'A slightly longer article'}], +- ... 'weather': {'temperature': '11C', 'wind': '5m/s'}}) +- >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c) +- >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c) +- >>> very_short_news.articles[0].content +- 'A short article' +- >>> very_short_news.articles[1].content +- 'A slightly long...' +- +- When nothing has been transformed the original data structure is kept +- +- >>> short_news is news_paper +- True +- >>> very_short_news is news_paper +- False +- >>> very_short_news.articles[0] is news_paper.articles[0] +- True +- """ +- +- @abstractmethod +- def delete(self, index, stop=None): +- """ +- Delete a portion of the vector by index or range. +- +- >>> v1 = v(1, 2, 3, 4, 5) +- >>> v1.delete(1) +- pvector([1, 3, 4, 5]) +- >>> v1.delete(1, 3) +- pvector([1, 4, 5]) +- """ +- +- @abstractmethod +- def remove(self, value): +- """ +- Remove the first occurrence of a value from the vector. +- +- >>> v1 = v(1, 2, 3, 2, 1) +- >>> v2 = v1.remove(1) +- >>> v2 +- pvector([2, 3, 2, 1]) +- >>> v2.remove(1) +- pvector([2, 3, 2]) +- """ +- +- +-_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], []) +-PVector.register(PythonPVector) +-Sequence.register(PVector) +-Hashable.register(PVector) +- +-def python_pvector(iterable=()): +- """ +- Create a new persistent vector containing the elements in iterable. +- +- >>> v1 = pvector([1, 2, 3]) +- >>> v1 +- pvector([1, 2, 3]) +- """ +- return _EMPTY_PVECTOR.extend(iterable) +- +-try: +- # Use the C extension as underlying trie implementation if it is available +- import os +- if os.environ.get('PYRSISTENT_NO_C_EXTENSION'): +- pvector = python_pvector +- else: +- from pvectorc import pvector +- PVector.register(type(pvector())) +-except ImportError: +- pvector = python_pvector +- +- +-def v(*elements): +- """ +- Create a new persistent vector containing all parameters to this function. +- +- >>> v1 = v(1, 2, 3) +- >>> v1 +- pvector([1, 2, 3]) +- """ +- return pvector(elements) +diff --git a/src/poetry/core/_vendor/pyrsistent/_toolz.py b/src/poetry/core/_vendor/pyrsistent/_toolz.py +deleted file mode 100644 +index a7faed1..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_toolz.py ++++ /dev/null +@@ -1,83 +0,0 @@ +-""" +-Functionality copied from the toolz package to avoid having +-to add toolz as a dependency. +- +-See https://github.com/pytoolz/toolz/. +- +-toolz is relased under BSD licence. Below is the licence text +-from toolz as it appeared when copying the code. +- +--------------------------------------------------------------- +- +-Copyright (c) 2013 Matthew Rocklin +- +-All rights reserved. +- +-Redistribution and use in source and binary forms, with or without +-modification, are permitted provided that the following conditions are met: +- +- a. Redistributions of source code must retain the above copyright notice, +- this list of conditions and the following disclaimer. +- b. Redistributions in binary form must reproduce the above copyright +- notice, this list of conditions and the following disclaimer in the +- documentation and/or other materials provided with the distribution. +- c. Neither the name of toolz nor the names of its contributors +- may be used to endorse or promote products derived from this software +- without specific prior written permission. +- +- +-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +-ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR +-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +-LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +-OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +-DAMAGE. +-""" +-import operator +-from functools import reduce +- +- +-def get_in(keys, coll, default=None, no_default=False): +- """ +- NB: This is a straight copy of the get_in implementation found in +- the toolz library (https://github.com/pytoolz/toolz/). It works +- with persistent data structures as well as the corresponding +- datastructures from the stdlib. +- +- Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys. +- +- If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless +- ``no_default`` is specified, then it raises KeyError or IndexError. +- +- ``get_in`` is a generalization of ``operator.getitem`` for nested data +- structures such as dictionaries and lists. +- >>> from pyrsistent import freeze +- >>> transaction = freeze({'name': 'Alice', +- ... 'purchase': {'items': ['Apple', 'Orange'], +- ... 'costs': [0.50, 1.25]}, +- ... 'credit card': '5555-1234-1234-1234'}) +- >>> get_in(['purchase', 'items', 0], transaction) +- 'Apple' +- >>> get_in(['name'], transaction) +- 'Alice' +- >>> get_in(['purchase', 'total'], transaction) +- >>> get_in(['purchase', 'items', 'apple'], transaction) +- >>> get_in(['purchase', 'items', 10], transaction) +- >>> get_in(['purchase', 'total'], transaction, 0) +- 0 +- >>> get_in(['y'], {}, no_default=True) +- Traceback (most recent call last): +- ... +- KeyError: 'y' +- """ +- try: +- return reduce(operator.getitem, keys, coll) +- except (KeyError, IndexError, TypeError): +- if no_default: +- raise +- return default +diff --git a/src/poetry/core/_vendor/pyrsistent/_transformations.py b/src/poetry/core/_vendor/pyrsistent/_transformations.py +deleted file mode 100644 +index 7544843..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/_transformations.py ++++ /dev/null +@@ -1,139 +0,0 @@ +-import re +-try: +- from inspect import Parameter, signature +-except ImportError: +- signature = None +- from inspect import getfullargspec +- +- +-_EMPTY_SENTINEL = object() +- +- +-def inc(x): +- """ Add one to the current value """ +- return x + 1 +- +- +-def dec(x): +- """ Subtract one from the current value """ +- return x - 1 +- +- +-def discard(evolver, key): +- """ Discard the element and returns a structure without the discarded elements """ +- try: +- del evolver[key] +- except KeyError: +- pass +- +- +-# Matchers +-def rex(expr): +- """ Regular expression matcher to use together with transform functions """ +- r = re.compile(expr) +- return lambda key: isinstance(key, str) and r.match(key) +- +- +-def ny(_): +- """ Matcher that matches any value """ +- return True +- +- +-# Support functions +-def _chunks(l, n): +- for i in range(0, len(l), n): +- yield l[i:i + n] +- +- +-def transform(structure, transformations): +- r = structure +- for path, command in _chunks(transformations, 2): +- r = _do_to_path(r, path, command) +- return r +- +- +-def _do_to_path(structure, path, command): +- if not path: +- return command(structure) if callable(command) else command +- +- kvs = _get_keys_and_values(structure, path[0]) +- return _update_structure(structure, kvs, path[1:], command) +- +- +-def _items(structure): +- try: +- return structure.items() +- except AttributeError: +- # Support wider range of structures by adding a transform_items() or similar? +- return list(enumerate(structure)) +- +- +-def _get(structure, key, default): +- try: +- if hasattr(structure, '__getitem__'): +- return structure[key] +- +- return getattr(structure, key) +- +- except (IndexError, KeyError): +- return default +- +- +-def _get_keys_and_values(structure, key_spec): +- if callable(key_spec): +- # Support predicates as callable objects in the path +- arity = _get_arity(key_spec) +- if arity == 1: +- # Unary predicates are called with the "key" of the path +- # - eg a key in a mapping, an index in a sequence. +- return [(k, v) for k, v in _items(structure) if key_spec(k)] +- elif arity == 2: +- # Binary predicates are called with the key and the corresponding +- # value. +- return [(k, v) for k, v in _items(structure) if key_spec(k, v)] +- else: +- # Other arities are an error. +- raise ValueError( +- "callable in transform path must take 1 or 2 arguments" +- ) +- +- # Non-callables are used as-is as a key. +- return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))] +- +- +-if signature is None: +- def _get_arity(f): +- argspec = getfullargspec(f) +- return len(argspec.args) - len(argspec.defaults or ()) +-else: +- def _get_arity(f): +- return sum( +- 1 +- for p +- in signature(f).parameters.values() +- if p.default is Parameter.empty +- and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD) +- ) +- +- +-def _update_structure(structure, kvs, path, command): +- from pyrsistent._pmap import pmap +- e = structure.evolver() +- if not path and command is discard: +- # Do this in reverse to avoid index problems with vectors. See #92. +- for k, v in reversed(kvs): +- discard(e, k) +- else: +- for k, v in kvs: +- is_empty = False +- if v is _EMPTY_SENTINEL: +- # Allow expansion of structure but make sure to cover the case +- # when an empty pmap is added as leaf node. See #154. +- is_empty = True +- v = pmap() +- +- result = _do_to_path(v, path, command) +- if result is not v or is_empty: +- e[k] = result +- +- return e.persistent() +diff --git a/src/poetry/core/_vendor/pyrsistent/py.typed b/src/poetry/core/_vendor/pyrsistent/py.typed +deleted file mode 100644 +index e69de29..0000000 +diff --git a/src/poetry/core/_vendor/pyrsistent/typing.py b/src/poetry/core/_vendor/pyrsistent/typing.py +deleted file mode 100644 +index 6a86c83..0000000 +--- a/src/poetry/core/_vendor/pyrsistent/typing.py ++++ /dev/null +@@ -1,80 +0,0 @@ +-"""Helpers for use with type annotation. +- +-Use the empty classes in this module when annotating the types of Pyrsistent +-objects, instead of using the actual collection class. +- +-For example, +- +- from pyrsistent import pvector +- from pyrsistent.typing import PVector +- +- myvector: PVector[str] = pvector(['a', 'b', 'c']) +- +-""" +-from __future__ import absolute_import +- +-try: +- from typing import Container +- from typing import Hashable +- from typing import Generic +- from typing import Iterable +- from typing import Mapping +- from typing import Sequence +- from typing import Sized +- from typing import TypeVar +- +- __all__ = [ +- 'CheckedPMap', +- 'CheckedPSet', +- 'CheckedPVector', +- 'PBag', +- 'PDeque', +- 'PList', +- 'PMap', +- 'PSet', +- 'PVector', +- ] +- +- T = TypeVar('T') +- KT = TypeVar('KT') +- VT = TypeVar('VT') +- +- class CheckedPMap(Mapping[KT, VT], Hashable): +- pass +- +- # PSet.add and PSet.discard have different type signatures than that of Set. +- class CheckedPSet(Generic[T], Hashable): +- pass +- +- class CheckedPVector(Sequence[T], Hashable): +- pass +- +- class PBag(Container[T], Iterable[T], Sized, Hashable): +- pass +- +- class PDeque(Sequence[T], Hashable): +- pass +- +- class PList(Sequence[T], Hashable): +- pass +- +- class PMap(Mapping[KT, VT], Hashable): +- pass +- +- # PSet.add and PSet.discard have different type signatures than that of Set. +- class PSet(Generic[T], Hashable): +- pass +- +- class PVector(Sequence[T], Hashable): +- pass +- +- class PVectorEvolver(Generic[T]): +- pass +- +- class PMapEvolver(Generic[KT, VT]): +- pass +- +- class PSetEvolver(Generic[T]): +- pass +-except ImportError: +- pass +diff --git a/src/poetry/core/_vendor/tomlkit/LICENSE b/src/poetry/core/_vendor/tomlkit/LICENSE +deleted file mode 100644 +index 44cf2b3..0000000 +--- a/src/poetry/core/_vendor/tomlkit/LICENSE ++++ /dev/null +@@ -1,20 +0,0 @@ +-Copyright (c) 2018 Sébastien Eustace +- +-Permission is hereby granted, free of charge, to any person obtaining +-a copy of this software and associated documentation files (the +-"Software"), to deal in the Software without restriction, including +-without limitation the rights to use, copy, modify, merge, publish, +-distribute, sublicense, and/or sell copies of the Software, and to +-permit persons to whom the Software is furnished to do so, subject to +-the following conditions: +- +-The above copyright notice and this permission notice shall be +-included in all copies or substantial portions of the Software. +- +-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +diff --git a/src/poetry/core/_vendor/tomlkit/__init__.py b/src/poetry/core/_vendor/tomlkit/__init__.py +deleted file mode 100644 +index ade3eea..0000000 +--- a/src/poetry/core/_vendor/tomlkit/__init__.py ++++ /dev/null +@@ -1,55 +0,0 @@ +-from tomlkit.api import TOMLDocument +-from tomlkit.api import aot +-from tomlkit.api import array +-from tomlkit.api import boolean +-from tomlkit.api import comment +-from tomlkit.api import date +-from tomlkit.api import datetime +-from tomlkit.api import document +-from tomlkit.api import dump +-from tomlkit.api import dumps +-from tomlkit.api import float_ +-from tomlkit.api import inline_table +-from tomlkit.api import integer +-from tomlkit.api import item +-from tomlkit.api import key +-from tomlkit.api import key_value +-from tomlkit.api import load +-from tomlkit.api import loads +-from tomlkit.api import nl +-from tomlkit.api import parse +-from tomlkit.api import string +-from tomlkit.api import table +-from tomlkit.api import time +-from tomlkit.api import value +-from tomlkit.api import ws +- +- +-__version__ = "0.11.4" +-__all__ = [ +- "aot", +- "array", +- "boolean", +- "comment", +- "date", +- "datetime", +- "document", +- "dump", +- "dumps", +- "float_", +- "inline_table", +- "integer", +- "item", +- "key", +- "key_value", +- "load", +- "loads", +- "nl", +- "parse", +- "string", +- "table", +- "time", +- "TOMLDocument", +- "value", +- "ws", +-] +diff --git a/src/poetry/core/_vendor/tomlkit/_compat.py b/src/poetry/core/_vendor/tomlkit/_compat.py +deleted file mode 100644 +index f1d3bcc..0000000 +--- a/src/poetry/core/_vendor/tomlkit/_compat.py ++++ /dev/null +@@ -1,22 +0,0 @@ +-import contextlib +-import sys +- +-from typing import Any +-from typing import List +-from typing import Optional +- +- +-PY38 = sys.version_info >= (3, 8) +- +- +-def decode(string: Any, encodings: Optional[List[str]] = None): +- if not isinstance(string, bytes): +- return string +- +- encodings = encodings or ["utf-8", "latin1", "ascii"] +- +- for encoding in encodings: +- with contextlib.suppress(UnicodeEncodeError, UnicodeDecodeError): +- return string.decode(encoding) +- +- return string.decode(encodings[0], errors="ignore") +diff --git a/src/poetry/core/_vendor/tomlkit/_utils.py b/src/poetry/core/_vendor/tomlkit/_utils.py +deleted file mode 100644 +index 07ed7ba..0000000 +--- a/src/poetry/core/_vendor/tomlkit/_utils.py ++++ /dev/null +@@ -1,155 +0,0 @@ +-import re +- +-from collections.abc import Mapping +-from datetime import date +-from datetime import datetime +-from datetime import time +-from datetime import timedelta +-from datetime import timezone +-from typing import Collection +-from typing import Union +- +-from tomlkit._compat import decode +- +- +-RFC_3339_LOOSE = re.compile( +- "^" +- r"(([0-9]+)-(\d{2})-(\d{2}))?" # Date +- "(" +- "([Tt ])?" # Separator +- r"(\d{2}):(\d{2}):(\d{2})(\.([0-9]+))?" # Time +- r"(([Zz])|([\+|\-]([01][0-9]|2[0-3]):([0-5][0-9])))?" # Timezone +- ")?" +- "$" +-) +- +-RFC_3339_DATETIME = re.compile( +- "^" +- "([0-9]+)-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])" # Date +- "[Tt ]" # Separator +- r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.([0-9]+))?" # Time +- r"(([Zz])|([\+|\-]([01][0-9]|2[0-3]):([0-5][0-9])))?" # Timezone +- "$" +-) +- +-RFC_3339_DATE = re.compile("^([0-9]+)-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])$") +- +-RFC_3339_TIME = re.compile( +- r"^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.([0-9]+))?$" +-) +- +-_utc = timezone(timedelta(), "UTC") +- +- +-def parse_rfc3339(string: str) -> Union[datetime, date, time]: +- m = RFC_3339_DATETIME.match(string) +- if m: +- year = int(m.group(1)) +- month = int(m.group(2)) +- day = int(m.group(3)) +- hour = int(m.group(4)) +- minute = int(m.group(5)) +- second = int(m.group(6)) +- microsecond = 0 +- +- if m.group(7): +- microsecond = int((f"{m.group(8):<06s}")[:6]) +- +- if m.group(9): +- # Timezone +- tz = m.group(9) +- if tz.upper() == "Z": +- tzinfo = _utc +- else: +- sign = m.group(11)[0] +- hour_offset, minute_offset = int(m.group(12)), int(m.group(13)) +- offset = timedelta(seconds=hour_offset * 3600 + minute_offset * 60) +- if sign == "-": +- offset = -offset +- +- tzinfo = timezone(offset, f"{sign}{m.group(12)}:{m.group(13)}") +- +- return datetime( +- year, month, day, hour, minute, second, microsecond, tzinfo=tzinfo +- ) +- else: +- return datetime(year, month, day, hour, minute, second, microsecond) +- +- m = RFC_3339_DATE.match(string) +- if m: +- year = int(m.group(1)) +- month = int(m.group(2)) +- day = int(m.group(3)) +- +- return date(year, month, day) +- +- m = RFC_3339_TIME.match(string) +- if m: +- hour = int(m.group(1)) +- minute = int(m.group(2)) +- second = int(m.group(3)) +- microsecond = 0 +- +- if m.group(4): +- microsecond = int((f"{m.group(5):<06s}")[:6]) +- +- return time(hour, minute, second, microsecond) +- +- raise ValueError("Invalid RFC 339 string") +- +- +-# https://toml.io/en/v1.0.0#string +-CONTROL_CHARS = frozenset(chr(c) for c in range(0x20)) | {chr(0x7F)} +-_escaped = { +- "b": "\b", +- "t": "\t", +- "n": "\n", +- "f": "\f", +- "r": "\r", +- '"': '"', +- "\\": "\\", +-} +-_compact_escapes = { +- **{v: f"\\{k}" for k, v in _escaped.items()}, +- '"""': '""\\"', +-} +-_basic_escapes = CONTROL_CHARS | {'"'} +- +- +-def _unicode_escape(seq: str) -> str: +- return "".join(f"\\u{ord(c):04x}" for c in seq) +- +- +-def escape_string(s: str, escape_sequences: Collection[str] = _basic_escapes) -> str: +- s = decode(s) +- +- res = [] +- start = 0 +- +- def flush(inc=1): +- if start != i: +- res.append(s[start:i]) +- +- return i + inc +- +- i = 0 +- while i < len(s): +- for seq in escape_sequences: +- seq_len = len(seq) +- if s[i:].startswith(seq): +- start = flush(seq_len) +- res.append(_compact_escapes.get(seq) or _unicode_escape(seq)) +- i += seq_len - 1 # fast-forward escape sequence +- i += 1 +- +- flush() +- +- return "".join(res) +- +- +-def merge_dicts(d1: dict, d2: dict) -> dict: +- for k, v in d2.items(): +- if k in d1 and isinstance(d1[k], dict) and isinstance(v, Mapping): +- merge_dicts(d1[k], v) +- else: +- d1[k] = d2[k] +diff --git a/src/poetry/core/_vendor/tomlkit/api.py b/src/poetry/core/_vendor/tomlkit/api.py +deleted file mode 100644 +index 49573b5..0000000 +--- a/src/poetry/core/_vendor/tomlkit/api.py ++++ /dev/null +@@ -1,286 +0,0 @@ +-import datetime as _datetime +- +-from collections.abc import Mapping +-from typing import IO +-from typing import Iterable +-from typing import Tuple +-from typing import Union +- +-from tomlkit._utils import parse_rfc3339 +-from tomlkit.container import Container +-from tomlkit.exceptions import UnexpectedCharError +-from tomlkit.items import AoT +-from tomlkit.items import Array +-from tomlkit.items import Bool +-from tomlkit.items import Comment +-from tomlkit.items import Date +-from tomlkit.items import DateTime +-from tomlkit.items import DottedKey +-from tomlkit.items import Float +-from tomlkit.items import InlineTable +-from tomlkit.items import Integer +-from tomlkit.items import Item as _Item +-from tomlkit.items import Key +-from tomlkit.items import SingleKey +-from tomlkit.items import String +-from tomlkit.items import StringType as _StringType +-from tomlkit.items import Table +-from tomlkit.items import Time +-from tomlkit.items import Trivia +-from tomlkit.items import Whitespace +-from tomlkit.items import item +-from tomlkit.parser import Parser +-from tomlkit.toml_document import TOMLDocument +- +- +-def loads(string: Union[str, bytes]) -> TOMLDocument: +- """ +- Parses a string into a TOMLDocument. +- +- Alias for parse(). +- """ +- return parse(string) +- +- +-def dumps(data: Mapping, sort_keys: bool = False) -> str: +- """ +- Dumps a TOMLDocument into a string. +- """ +- if not isinstance(data, Container) and isinstance(data, Mapping): +- data = item(dict(data), _sort_keys=sort_keys) +- +- try: +- # data should be a `Container` (and therefore implement `as_string`) +- # for all type safe invocations of this function +- return data.as_string() # type: ignore[attr-defined] +- except AttributeError as ex: +- msg = f"Expecting Mapping or TOML Container, {type(data)} given" +- raise TypeError(msg) from ex +- +- +-def load(fp: IO) -> TOMLDocument: +- """ +- Load toml document from a file-like object. +- """ +- return parse(fp.read()) +- +- +-def dump(data: Mapping, fp: IO[str], *, sort_keys: bool = False) -> None: +- """ +- Dump a TOMLDocument into a writable file stream. +- +- :param data: a dict-like object to dump +- :param sort_keys: if true, sort the keys in alphabetic order +- """ +- fp.write(dumps(data, sort_keys=sort_keys)) +- +- +-def parse(string: Union[str, bytes]) -> TOMLDocument: +- """ +- Parses a string or bytes into a TOMLDocument. +- """ +- return Parser(string).parse() +- +- +-def document() -> TOMLDocument: +- """ +- Returns a new TOMLDocument instance. +- """ +- return TOMLDocument() +- +- +-# Items +-def integer(raw: Union[str, int]) -> Integer: +- """Create an integer item from a number or string.""" +- return item(int(raw)) +- +- +-def float_(raw: Union[str, float]) -> Float: +- """Create an float item from a number or string.""" +- return item(float(raw)) +- +- +-def boolean(raw: str) -> Bool: +- """Turn `true` or `false` into a boolean item.""" +- return item(raw == "true") +- +- +-def string( +- raw: str, +- *, +- literal: bool = False, +- multiline: bool = False, +- escape: bool = True, +-) -> String: +- """Create a string item. +- +- By default, this function will create *single line basic* strings, but +- boolean flags (e.g. ``literal=True`` and/or ``multiline=True``) +- can be used for personalization. +- +- For more information, please check the spec: `https://toml.io/en/v1.0.0#string`_. +- +- Common escaping rules will be applied for basic strings. +- This can be controlled by explicitly setting ``escape=False``. +- Please note that, if you disable escaping, you will have to make sure that +- the given strings don't contain any forbidden character or sequence. +- """ +- type_ = _StringType.select(literal, multiline) +- return String.from_raw(raw, type_, escape) +- +- +-def date(raw: str) -> Date: +- """Create a TOML date.""" +- value = parse_rfc3339(raw) +- if not isinstance(value, _datetime.date): +- raise ValueError("date() only accepts date strings.") +- +- return item(value) +- +- +-def time(raw: str) -> Time: +- """Create a TOML time.""" +- value = parse_rfc3339(raw) +- if not isinstance(value, _datetime.time): +- raise ValueError("time() only accepts time strings.") +- +- return item(value) +- +- +-def datetime(raw: str) -> DateTime: +- """Create a TOML datetime.""" +- value = parse_rfc3339(raw) +- if not isinstance(value, _datetime.datetime): +- raise ValueError("datetime() only accepts datetime strings.") +- +- return item(value) +- +- +-def array(raw: str = None) -> Array: +- """Create an array item for its string representation. +- +- :Example: +- +- >>> array("[1, 2, 3]") # Create from a string +- [1, 2, 3] +- >>> a = array() +- >>> a.extend([1, 2, 3]) # Create from a list +- >>> a +- [1, 2, 3] +- """ +- if raw is None: +- raw = "[]" +- +- return value(raw) +- +- +-def table(is_super_table: bool = False) -> Table: +- """Create an empty table. +- +- :param is_super_table: if true, the table is a super table +- +- :Example: +- +- >>> doc = document() +- >>> foo = table(True) +- >>> bar = table() +- >>> bar.update({'x': 1}) +- >>> foo.append('bar', bar) +- >>> doc.append('foo', foo) +- >>> print(doc.as_string()) +- [foo.bar] +- x = 1 +- """ +- return Table(Container(), Trivia(), False, is_super_table) +- +- +-def inline_table() -> InlineTable: +- """Create an inline table. +- +- :Example: +- +- >>> table = inline_table() +- >>> table.update({'x': 1, 'y': 2}) +- >>> print(table.as_string()) +- {x = 1, y = 2} +- """ +- return InlineTable(Container(), Trivia(), new=True) +- +- +-def aot() -> AoT: +- """Create an array of table. +- +- :Example: +- +- >>> doc = document() +- >>> aot = aot() +- >>> aot.append(item({'x': 1})) +- >>> doc.append('foo', aot) +- >>> print(doc.as_string()) +- [[foo]] +- x = 1 +- """ +- return AoT([]) +- +- +-def key(k: Union[str, Iterable[str]]) -> Key: +- """Create a key from a string. When a list of string is given, +- it will create a dotted key. +- +- :Example: +- +- >>> doc = document() +- >>> doc.append(key('foo'), 1) +- >>> doc.append(key(['bar', 'baz']), 2) +- >>> print(doc.as_string()) +- foo = 1 +- bar.baz = 2 +- """ +- if isinstance(k, str): +- return SingleKey(k) +- return DottedKey([key(_k) for _k in k]) +- +- +-def value(raw: str) -> _Item: +- """Parse a simple value from a string. +- +- :Example: +- +- >>> value("1") +- 1 +- >>> value("true") +- True +- >>> value("[1, 2, 3]") +- [1, 2, 3] +- """ +- parser = Parser(raw) +- v = parser._parse_value() +- if not parser.end(): +- raise parser.parse_error(UnexpectedCharError, char=parser._current) +- return v +- +- +-def key_value(src: str) -> Tuple[Key, _Item]: +- """Parse a key-value pair from a string. +- +- :Example: +- +- >>> key_value("foo = 1") +- (Key('foo'), 1) +- """ +- return Parser(src)._parse_key_value() +- +- +-def ws(src: str) -> Whitespace: +- """Create a whitespace from a string.""" +- return Whitespace(src, fixed=True) +- +- +-def nl() -> Whitespace: +- """Create a newline item.""" +- return ws("\n") +- +- +-def comment(string: str) -> Comment: +- """Create a comment item.""" +- return Comment(Trivia(comment_ws=" ", comment="# " + string)) +diff --git a/src/poetry/core/_vendor/tomlkit/container.py b/src/poetry/core/_vendor/tomlkit/container.py +deleted file mode 100644 +index 1a7e541..0000000 +--- a/src/poetry/core/_vendor/tomlkit/container.py ++++ /dev/null +@@ -1,907 +0,0 @@ +-import copy +- +-from typing import Any +-from typing import Dict +-from typing import Iterator +-from typing import List +-from typing import Optional +-from typing import Tuple +-from typing import Union +- +-from tomlkit._compat import decode +-from tomlkit._utils import merge_dicts +-from tomlkit.exceptions import KeyAlreadyPresent +-from tomlkit.exceptions import NonExistentKey +-from tomlkit.exceptions import TOMLKitError +-from tomlkit.items import AoT +-from tomlkit.items import Comment +-from tomlkit.items import Item +-from tomlkit.items import Key +-from tomlkit.items import Null +-from tomlkit.items import SingleKey +-from tomlkit.items import Table +-from tomlkit.items import Trivia +-from tomlkit.items import Whitespace +-from tomlkit.items import _CustomDict +-from tomlkit.items import item as _item +- +- +-_NOT_SET = object() +- +- +-class Container(_CustomDict): +- """ +- A container for items within a TOMLDocument. +- +- This class implements the `dict` interface with copy/deepcopy protocol. +- """ +- +- def __init__(self, parsed: bool = False) -> None: +- self._map: Dict[Key, int] = {} +- self._body: List[Tuple[Optional[Key], Item]] = [] +- self._parsed = parsed +- self._table_keys = [] +- +- @property +- def body(self) -> List[Tuple[Optional[Key], Item]]: +- return self._body +- +- def unwrap(self) -> str: +- unwrapped = {} +- for k, v in self.items(): +- if k is None: +- continue +- +- if isinstance(k, Key): +- k = k.key +- +- if isinstance(v, Item): +- v = v.unwrap() +- +- if k in unwrapped: +- merge_dicts(unwrapped[k], v) +- else: +- unwrapped[k] = v +- +- return unwrapped +- +- @property +- def value(self) -> Dict[Any, Any]: +- d = {} +- for k, v in self._body: +- if k is None: +- continue +- +- k = k.key +- v = v.value +- +- if isinstance(v, Container): +- v = v.value +- +- if k in d: +- merge_dicts(d[k], v) +- else: +- d[k] = v +- +- return d +- +- def parsing(self, parsing: bool) -> None: +- self._parsed = parsing +- +- for _, v in self._body: +- if isinstance(v, Table): +- v.value.parsing(parsing) +- elif isinstance(v, AoT): +- for t in v.body: +- t.value.parsing(parsing) +- +- def add( +- self, key: Union[Key, Item, str], item: Optional[Item] = None +- ) -> "Container": +- """ +- Adds an item to the current Container. +- +- :Example: +- +- >>> # add a key-value pair +- >>> doc.add('key', 'value') +- >>> # add a comment or whitespace or newline +- >>> doc.add(comment('# comment')) +- """ +- if item is None: +- if not isinstance(key, (Comment, Whitespace)): +- raise ValueError( +- "Non comment/whitespace items must have an associated key" +- ) +- +- key, item = None, key +- +- return self.append(key, item) +- +- def _handle_dotted_key(self, key: Key, value: Item) -> None: +- names = tuple(iter(key)) +- name = names[0] +- name._dotted = True +- if name in self: +- if not isinstance(value, Table): +- table = Table(Container(True), Trivia(), False, is_super_table=True) +- _table = table +- for i, _name in enumerate(names[1:]): +- if i == len(names) - 2: +- _name.sep = key.sep +- +- _table.append(_name, value) +- else: +- _name._dotted = True +- _table.append( +- _name, +- Table( +- Container(True), +- Trivia(), +- False, +- is_super_table=i < len(names) - 2, +- ), +- ) +- +- _table = _table[_name] +- +- value = table +- +- self.append(name, value) +- +- return +- else: +- table = Table(Container(True), Trivia(), False, is_super_table=True) +- self.append(name, table) +- +- for i, _name in enumerate(names[1:]): +- if i == len(names) - 2: +- _name.sep = key.sep +- +- table.append(_name, value) +- else: +- _name._dotted = True +- if _name in table.value: +- table = table.value[_name] +- else: +- table.append( +- _name, +- Table( +- Container(True), +- Trivia(), +- False, +- is_super_table=i < len(names) - 2, +- ), +- ) +- +- table = table[_name] +- +- def append(self, key: Union[Key, str, None], item: Item) -> "Container": +- """Similar to :meth:`add` but both key and value must be given.""" +- if not isinstance(key, Key) and key is not None: +- key = SingleKey(key) +- +- if not isinstance(item, Item): +- item = _item(item) +- +- if key is not None and key.is_multi(): +- self._handle_dotted_key(key, item) +- return self +- +- if isinstance(item, (AoT, Table)) and item.name is None: +- item.name = key.key +- +- prev = self._previous_item() +- prev_ws = isinstance(prev, Whitespace) or ends_with_whitespace(prev) +- if isinstance(item, Table): +- if item.name != key.key: +- item.invalidate_display_name() +- if self._body and not (self._parsed or item.trivia.indent or prev_ws): +- item.trivia.indent = "\n" +- +- if isinstance(item, AoT) and self._body and not self._parsed: +- item.invalidate_display_name() +- if item and not ("\n" in item[0].trivia.indent or prev_ws): +- item[0].trivia.indent = "\n" + item[0].trivia.indent +- +- if key is not None and key in self: +- current_idx = self._map[key] +- if isinstance(current_idx, tuple): +- current_body_element = self._body[current_idx[-1]] +- else: +- current_body_element = self._body[current_idx] +- +- current = current_body_element[1] +- +- if isinstance(item, Table): +- if not isinstance(current, (Table, AoT)): +- raise KeyAlreadyPresent(key) +- +- if item.is_aot_element(): +- # New AoT element found later on +- # Adding it to the current AoT +- if not isinstance(current, AoT): +- current = AoT([current, item], parsed=self._parsed) +- +- self._replace(key, key, current) +- else: +- current.append(item) +- +- return self +- elif current.is_aot(): +- if not item.is_aot_element(): +- # Tried to define a table after an AoT with the same name. +- raise KeyAlreadyPresent(key) +- +- current.append(item) +- +- return self +- elif current.is_super_table(): +- if item.is_super_table(): +- # We need to merge both super tables +- if ( +- self._table_keys[-1] != current_body_element[0] +- or key.is_dotted() +- or current_body_element[0].is_dotted() +- ): +- if not isinstance(current_idx, tuple): +- current_idx = (current_idx,) +- +- self._map[key] = current_idx + (len(self._body),) +- self._body.append((key, item)) +- self._table_keys.append(key) +- +- # Building a temporary proxy to check for errors +- OutOfOrderTableProxy(self, self._map[key]) +- +- return self +- +- # Create a new element to replace the old one +- current = copy.deepcopy(current) +- for k, v in item.value.body: +- current.append(k, v) +- self._body[ +- current_idx[-1] +- if isinstance(current_idx, tuple) +- else current_idx +- ] = (current_body_element[0], current) +- +- return self +- elif current_body_element[0].is_dotted(): +- raise TOMLKitError("Redefinition of an existing table") +- elif not item.is_super_table(): +- raise KeyAlreadyPresent(key) +- elif isinstance(item, AoT): +- if not isinstance(current, AoT): +- # Tried to define an AoT after a table with the same name. +- raise KeyAlreadyPresent(key) +- +- for table in item.body: +- current.append(table) +- +- return self +- else: +- raise KeyAlreadyPresent(key) +- +- is_table = isinstance(item, (Table, AoT)) +- if key is not None and self._body and not self._parsed: +- # If there is already at least one table in the current container +- # and the given item is not a table, we need to find the last +- # item that is not a table and insert after it +- # If no such item exists, insert at the top of the table +- key_after = None +- for i, (k, v) in enumerate(self._body): +- if isinstance(v, Null): +- continue # Null elements are inserted after deletion +- +- if isinstance(v, Whitespace) and not v.is_fixed(): +- continue +- +- if not is_table and isinstance(v, (Table, AoT)): +- break +- +- key_after = k or i # last scalar, Array or InlineTable value +- +- if key_after is not None: +- if isinstance(key_after, int): +- if key_after + 1 < len(self._body): +- return self._insert_at(key_after + 1, key, item) +- else: +- previous_item = self._body[-1][1] +- if not ( +- isinstance(previous_item, Whitespace) +- or ends_with_whitespace(previous_item) +- or is_table +- or "\n" in previous_item.trivia.trail +- ): +- previous_item.trivia.trail += "\n" +- else: +- return self._insert_after(key_after, key, item) +- else: +- return self._insert_at(0, key, item) +- +- if key in self._map: +- current_idx = self._map[key] +- if isinstance(current_idx, tuple): +- current_idx = current_idx[-1] +- +- current = self._body[current_idx][1] +- if key is not None and not isinstance(current, Table): +- raise KeyAlreadyPresent(key) +- +- # Adding sub tables to a currently existing table +- if not isinstance(current_idx, tuple): +- current_idx = (current_idx,) +- +- self._map[key] = current_idx + (len(self._body),) +- else: +- self._map[key] = len(self._body) +- +- self._body.append((key, item)) +- if item.is_table(): +- self._table_keys.append(key) +- +- if key is not None: +- dict.__setitem__(self, key.key, item.value) +- +- return self +- +- def _remove_at(self, idx: int) -> None: +- key = self._body[idx][0] +- index = self._map.get(key) +- if index is None: +- raise NonExistentKey(key) +- self._body[idx] = (None, Null()) +- +- if isinstance(index, tuple): +- index = list(index) +- index.remove(idx) +- if len(index) == 1: +- index = index.pop() +- else: +- index = tuple(index) +- self._map[key] = index +- else: +- dict.__delitem__(self, key.key) +- self._map.pop(key) +- +- def remove(self, key: Union[Key, str]) -> "Container": +- """Remove a key from the container.""" +- if not isinstance(key, Key): +- key = SingleKey(key) +- +- idx = self._map.pop(key, None) +- if idx is None: +- raise NonExistentKey(key) +- +- if isinstance(idx, tuple): +- for i in idx: +- self._body[i] = (None, Null()) +- else: +- self._body[idx] = (None, Null()) +- +- dict.__delitem__(self, key.key) +- +- return self +- +- def _insert_after( +- self, key: Union[Key, str], other_key: Union[Key, str], item: Any +- ) -> "Container": +- if key is None: +- raise ValueError("Key cannot be null in insert_after()") +- +- if key not in self: +- raise NonExistentKey(key) +- +- if not isinstance(key, Key): +- key = SingleKey(key) +- +- if not isinstance(other_key, Key): +- other_key = SingleKey(other_key) +- +- item = _item(item) +- +- idx = self._map[key] +- # Insert after the max index if there are many. +- if isinstance(idx, tuple): +- idx = max(idx) +- current_item = self._body[idx][1] +- if "\n" not in current_item.trivia.trail: +- current_item.trivia.trail += "\n" +- +- # Increment indices after the current index +- for k, v in self._map.items(): +- if isinstance(v, tuple): +- new_indices = [] +- for v_ in v: +- if v_ > idx: +- v_ = v_ + 1 +- +- new_indices.append(v_) +- +- self._map[k] = tuple(new_indices) +- elif v > idx: +- self._map[k] = v + 1 +- +- self._map[other_key] = idx + 1 +- self._body.insert(idx + 1, (other_key, item)) +- +- if key is not None: +- dict.__setitem__(self, other_key.key, item.value) +- +- return self +- +- def _insert_at(self, idx: int, key: Union[Key, str], item: Any) -> "Container": +- if idx > len(self._body) - 1: +- raise ValueError(f"Unable to insert at position {idx}") +- +- if not isinstance(key, Key): +- key = SingleKey(key) +- +- item = _item(item) +- +- if idx > 0: +- previous_item = self._body[idx - 1][1] +- if not ( +- isinstance(previous_item, Whitespace) +- or ends_with_whitespace(previous_item) +- or isinstance(item, (AoT, Table)) +- or "\n" in previous_item.trivia.trail +- ): +- previous_item.trivia.trail += "\n" +- +- # Increment indices after the current index +- for k, v in self._map.items(): +- if isinstance(v, tuple): +- new_indices = [] +- for v_ in v: +- if v_ >= idx: +- v_ = v_ + 1 +- +- new_indices.append(v_) +- +- self._map[k] = tuple(new_indices) +- elif v >= idx: +- self._map[k] = v + 1 +- +- self._map[key] = idx +- self._body.insert(idx, (key, item)) +- +- if key is not None: +- dict.__setitem__(self, key.key, item.value) +- +- return self +- +- def item(self, key: Union[Key, str]) -> Item: +- """Get an item for the given key.""" +- if not isinstance(key, Key): +- key = SingleKey(key) +- +- idx = self._map.get(key, None) +- if idx is None: +- raise NonExistentKey(key) +- +- if isinstance(idx, tuple): +- # The item we are getting is an out of order table +- # so we need a proxy to retrieve the proper objects +- # from the parent container +- return OutOfOrderTableProxy(self, idx) +- +- return self._body[idx][1] +- +- def last_item(self) -> Optional[Item]: +- """Get the last item.""" +- if self._body: +- return self._body[-1][1] +- +- def as_string(self) -> str: +- """Render as TOML string.""" +- s = "" +- for k, v in self._body: +- if k is not None: +- if isinstance(v, Table): +- s += self._render_table(k, v) +- elif isinstance(v, AoT): +- s += self._render_aot(k, v) +- else: +- s += self._render_simple_item(k, v) +- else: +- s += self._render_simple_item(k, v) +- +- return s +- +- def _render_table( +- self, key: Key, table: Table, prefix: Optional[str] = None +- ) -> str: +- cur = "" +- +- if table.display_name is not None: +- _key = table.display_name +- else: +- _key = key.as_string() +- +- if prefix is not None: +- _key = prefix + "." + _key +- +- if not table.is_super_table() or ( +- any( +- not isinstance(v, (Table, AoT, Whitespace, Null)) +- for _, v in table.value.body +- ) +- and not key.is_dotted() +- ): +- open_, close = "[", "]" +- if table.is_aot_element(): +- open_, close = "[[", "]]" +- +- newline_in_table_trivia = ( +- "\n" if "\n" not in table.trivia.trail and len(table.value) > 0 else "" +- ) +- cur += ( +- f"{table.trivia.indent}" +- f"{open_}" +- f"{decode(_key)}" +- f"{close}" +- f"{table.trivia.comment_ws}" +- f"{decode(table.trivia.comment)}" +- f"{table.trivia.trail}" +- f"{newline_in_table_trivia}" +- ) +- elif table.trivia.indent == "\n": +- cur += table.trivia.indent +- +- for k, v in table.value.body: +- if isinstance(v, Table): +- if v.is_super_table(): +- if k.is_dotted() and not key.is_dotted(): +- # Dotted key inside table +- cur += self._render_table(k, v) +- else: +- cur += self._render_table(k, v, prefix=_key) +- else: +- cur += self._render_table(k, v, prefix=_key) +- elif isinstance(v, AoT): +- cur += self._render_aot(k, v, prefix=_key) +- else: +- cur += self._render_simple_item( +- k, v, prefix=_key if key.is_dotted() else None +- ) +- +- return cur +- +- def _render_aot(self, key, aot, prefix=None): +- _key = key.as_string() +- if prefix is not None: +- _key = prefix + "." + _key +- +- cur = "" +- _key = decode(_key) +- for table in aot.body: +- cur += self._render_aot_table(table, prefix=_key) +- +- return cur +- +- def _render_aot_table(self, table: Table, prefix: Optional[str] = None) -> str: +- cur = "" +- +- _key = prefix or "" +- +- if not table.is_super_table(): +- open_, close = "[[", "]]" +- +- cur += ( +- f"{table.trivia.indent}" +- f"{open_}" +- f"{decode(_key)}" +- f"{close}" +- f"{table.trivia.comment_ws}" +- f"{decode(table.trivia.comment)}" +- f"{table.trivia.trail}" +- ) +- +- for k, v in table.value.body: +- if isinstance(v, Table): +- if v.is_super_table(): +- if k.is_dotted(): +- # Dotted key inside table +- cur += self._render_table(k, v) +- else: +- cur += self._render_table(k, v, prefix=_key) +- else: +- cur += self._render_table(k, v, prefix=_key) +- elif isinstance(v, AoT): +- cur += self._render_aot(k, v, prefix=_key) +- else: +- cur += self._render_simple_item(k, v) +- +- return cur +- +- def _render_simple_item(self, key, item, prefix=None): +- if key is None: +- return item.as_string() +- +- _key = key.as_string() +- if prefix is not None: +- _key = prefix + "." + _key +- +- return ( +- f"{item.trivia.indent}" +- f"{decode(_key)}" +- f"{key.sep}" +- f"{decode(item.as_string())}" +- f"{item.trivia.comment_ws}" +- f"{decode(item.trivia.comment)}" +- f"{item.trivia.trail}" +- ) +- +- def __len__(self) -> int: +- return dict.__len__(self) +- +- def __iter__(self) -> Iterator[str]: +- return iter(dict.keys(self)) +- +- # Dictionary methods +- def __getitem__(self, key: Union[Key, str]) -> Union[Item, "Container"]: +- if not isinstance(key, Key): +- key = SingleKey(key) +- +- idx = self._map.get(key, None) +- if idx is None: +- raise NonExistentKey(key) +- +- if isinstance(idx, tuple): +- # The item we are getting is an out of order table +- # so we need a proxy to retrieve the proper objects +- # from the parent container +- return OutOfOrderTableProxy(self, idx) +- +- item = self._body[idx][1] +- if item.is_boolean(): +- return item.value +- +- return item +- +- def __setitem__(self, key: Union[Key, str], value: Any) -> None: +- if key is not None and key in self: +- old_key = next(filter(lambda k: k == key, self._map)) +- self._replace(old_key, key, value) +- else: +- self.append(key, value) +- +- def __delitem__(self, key: Union[Key, str]) -> None: +- self.remove(key) +- +- def setdefault(self, key: Union[Key, str], default: Any) -> Any: +- super().setdefault(key, default=default) +- return self[key] +- +- def _replace( +- self, key: Union[Key, str], new_key: Union[Key, str], value: Item +- ) -> None: +- if not isinstance(key, Key): +- key = SingleKey(key) +- +- idx = self._map.get(key, None) +- if idx is None: +- raise NonExistentKey(key) +- +- self._replace_at(idx, new_key, value) +- +- def _replace_at( +- self, idx: Union[int, Tuple[int]], new_key: Union[Key, str], value: Item +- ) -> None: +- value = _item(value) +- +- if isinstance(idx, tuple): +- for i in idx[1:]: +- self._body[i] = (None, Null()) +- +- idx = idx[0] +- +- k, v = self._body[idx] +- if not isinstance(new_key, Key): +- if ( +- isinstance(value, (AoT, Table)) != isinstance(v, (AoT, Table)) +- or new_key != k.key +- ): +- new_key = SingleKey(new_key) +- else: # Inherit the sep of the old key +- new_key = k +- +- del self._map[k] +- self._map[new_key] = idx +- if new_key != k: +- dict.__delitem__(self, k) +- +- if isinstance(value, (AoT, Table)) != isinstance(v, (AoT, Table)): +- # new tables should appear after all non-table values +- self.remove(k) +- for i in range(idx, len(self._body)): +- if isinstance(self._body[i][1], (AoT, Table)): +- self._insert_at(i, new_key, value) +- idx = i +- break +- else: +- idx = -1 +- self.append(new_key, value) +- else: +- # Copying trivia +- if not isinstance(value, (Whitespace, AoT)): +- value.trivia.indent = v.trivia.indent +- value.trivia.comment_ws = value.trivia.comment_ws or v.trivia.comment_ws +- value.trivia.comment = value.trivia.comment or v.trivia.comment +- value.trivia.trail = v.trivia.trail +- self._body[idx] = (new_key, value) +- +- if hasattr(value, "invalidate_display_name"): +- value.invalidate_display_name() # type: ignore[attr-defined] +- +- if isinstance(value, Table): +- # Insert a cosmetic new line for tables if: +- # - it does not have it yet OR is not followed by one +- # - it is not the last item +- last, _ = self._previous_item_with_index() +- idx = last if idx < 0 else idx +- has_ws = ends_with_whitespace(value) +- next_ws = idx < last and isinstance(self._body[idx + 1][1], Whitespace) +- if idx < last and not (next_ws or has_ws): +- value.append(None, Whitespace("\n")) +- +- dict.__setitem__(self, new_key.key, value.value) +- +- def __str__(self) -> str: +- return str(self.value) +- +- def __repr__(self) -> str: +- return repr(self.value) +- +- def __eq__(self, other: dict) -> bool: +- if not isinstance(other, dict): +- return NotImplemented +- +- return self.value == other +- +- def _getstate(self, protocol): +- return (self._parsed,) +- +- def __reduce__(self): +- return self.__reduce_ex__(2) +- +- def __reduce_ex__(self, protocol): +- return ( +- self.__class__, +- self._getstate(protocol), +- (self._map, self._body, self._parsed, self._table_keys), +- ) +- +- def __setstate__(self, state): +- self._map = state[0] +- self._body = state[1] +- self._parsed = state[2] +- self._table_keys = state[3] +- +- for key, item in self._body: +- if key is not None: +- dict.__setitem__(self, key.key, item.value) +- +- def copy(self) -> "Container": +- return copy.copy(self) +- +- def __copy__(self) -> "Container": +- c = self.__class__(self._parsed) +- for k, v in dict.items(self): +- dict.__setitem__(c, k, v) +- +- c._body += self.body +- c._map.update(self._map) +- +- return c +- +- def _previous_item_with_index( +- self, idx: Optional[int] = None, ignore=(Null,) +- ) -> Optional[Tuple[int, Item]]: +- """Find the immediate previous item before index ``idx``""" +- if idx is None or idx > len(self._body): +- idx = len(self._body) +- for i in range(idx - 1, -1, -1): +- v = self._body[i][-1] +- if not isinstance(v, ignore): +- return i, v +- return None +- +- def _previous_item( +- self, idx: Optional[int] = None, ignore=(Null,) +- ) -> Optional[Item]: +- """Find the immediate previous item before index ``idx``. +- If ``idx`` is not given, the last item is returned. +- """ +- prev = self._previous_item_with_index(idx, ignore) +- return prev[-1] if prev else None +- +- +-class OutOfOrderTableProxy(_CustomDict): +- def __init__(self, container: Container, indices: Tuple[int]) -> None: +- self._container = container +- self._internal_container = Container(True) +- self._tables = [] +- self._tables_map = {} +- +- for i in indices: +- _, item = self._container._body[i] +- +- if isinstance(item, Table): +- self._tables.append(item) +- table_idx = len(self._tables) - 1 +- for k, v in item.value.body: +- self._internal_container.append(k, v) +- self._tables_map[k] = table_idx +- if k is not None: +- dict.__setitem__(self, k.key, v) +- +- def unwrap(self) -> str: +- return self._internal_container.unwrap() +- +- @property +- def value(self): +- return self._internal_container.value +- +- def __getitem__(self, key: Union[Key, str]) -> Any: +- if key not in self._internal_container: +- raise NonExistentKey(key) +- +- return self._internal_container[key] +- +- def __setitem__(self, key: Union[Key, str], item: Any) -> None: +- if key in self._tables_map: +- table = self._tables[self._tables_map[key]] +- table[key] = item +- elif self._tables: +- table = self._tables[0] +- table[key] = item +- else: +- self._container[key] = item +- +- self._internal_container[key] = item +- if key is not None: +- dict.__setitem__(self, key, item) +- +- def _remove_table(self, table: Table) -> None: +- """Remove table from the parent container""" +- self._tables.remove(table) +- for idx, item in enumerate(self._container._body): +- if item[1] is table: +- self._container._remove_at(idx) +- break +- +- def __delitem__(self, key: Union[Key, str]) -> None: +- if key in self._tables_map: +- table = self._tables[self._tables_map[key]] +- del table[key] +- if not table and len(self._tables) > 1: +- self._remove_table(table) +- del self._tables_map[key] +- else: +- raise NonExistentKey(key) +- +- del self._internal_container[key] +- if key is not None: +- dict.__delitem__(self, key) +- +- def __iter__(self) -> Iterator[str]: +- return iter(dict.keys(self)) +- +- def __len__(self) -> int: +- return dict.__len__(self) +- +- def setdefault(self, key: Union[Key, str], default: Any) -> Any: +- super().setdefault(key, default=default) +- return self[key] +- +- +-def ends_with_whitespace(it: Any) -> bool: +- """Returns ``True`` if the given item ``it`` is a ``Table`` or ``AoT`` object +- ending with a ``Whitespace``. +- """ +- return ( +- isinstance(it, Table) and isinstance(it.value._previous_item(), Whitespace) +- ) or (isinstance(it, AoT) and len(it) > 0 and isinstance(it[-1], Whitespace)) +diff --git a/src/poetry/core/_vendor/tomlkit/exceptions.py b/src/poetry/core/_vendor/tomlkit/exceptions.py +deleted file mode 100644 +index 3147ca2..0000000 +--- a/src/poetry/core/_vendor/tomlkit/exceptions.py ++++ /dev/null +@@ -1,227 +0,0 @@ +-from typing import Collection +-from typing import Optional +- +- +-class TOMLKitError(Exception): +- +- pass +- +- +-class ParseError(ValueError, TOMLKitError): +- """ +- This error occurs when the parser encounters a syntax error +- in the TOML being parsed. The error references the line and +- location within the line where the error was encountered. +- """ +- +- def __init__(self, line: int, col: int, message: Optional[str] = None) -> None: +- self._line = line +- self._col = col +- +- if message is None: +- message = "TOML parse error" +- +- super().__init__(f"{message} at line {self._line} col {self._col}") +- +- @property +- def line(self): +- return self._line +- +- @property +- def col(self): +- return self._col +- +- +-class MixedArrayTypesError(ParseError): +- """ +- An array was found that had two or more element types. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Mixed types found in array" +- +- super().__init__(line, col, message=message) +- +- +-class InvalidNumberError(ParseError): +- """ +- A numeric field was improperly specified. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Invalid number" +- +- super().__init__(line, col, message=message) +- +- +-class InvalidDateTimeError(ParseError): +- """ +- A datetime field was improperly specified. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Invalid datetime" +- +- super().__init__(line, col, message=message) +- +- +-class InvalidDateError(ParseError): +- """ +- A date field was improperly specified. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Invalid date" +- +- super().__init__(line, col, message=message) +- +- +-class InvalidTimeError(ParseError): +- """ +- A date field was improperly specified. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Invalid time" +- +- super().__init__(line, col, message=message) +- +- +-class InvalidNumberOrDateError(ParseError): +- """ +- A numeric or date field was improperly specified. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Invalid number or date format" +- +- super().__init__(line, col, message=message) +- +- +-class InvalidUnicodeValueError(ParseError): +- """ +- A unicode code was improperly specified. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Invalid unicode value" +- +- super().__init__(line, col, message=message) +- +- +-class UnexpectedCharError(ParseError): +- """ +- An unexpected character was found during parsing. +- """ +- +- def __init__(self, line: int, col: int, char: str) -> None: +- message = f"Unexpected character: {repr(char)}" +- +- super().__init__(line, col, message=message) +- +- +-class EmptyKeyError(ParseError): +- """ +- An empty key was found during parsing. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Empty key" +- +- super().__init__(line, col, message=message) +- +- +-class EmptyTableNameError(ParseError): +- """ +- An empty table name was found during parsing. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Empty table name" +- +- super().__init__(line, col, message=message) +- +- +-class InvalidCharInStringError(ParseError): +- """ +- The string being parsed contains an invalid character. +- """ +- +- def __init__(self, line: int, col: int, char: str) -> None: +- message = f"Invalid character {repr(char)} in string" +- +- super().__init__(line, col, message=message) +- +- +-class UnexpectedEofError(ParseError): +- """ +- The TOML being parsed ended before the end of a statement. +- """ +- +- def __init__(self, line: int, col: int) -> None: +- message = "Unexpected end of file" +- +- super().__init__(line, col, message=message) +- +- +-class InternalParserError(ParseError): +- """ +- An error that indicates a bug in the parser. +- """ +- +- def __init__(self, line: int, col: int, message: Optional[str] = None) -> None: +- msg = "Internal parser error" +- if message: +- msg += f" ({message})" +- +- super().__init__(line, col, message=msg) +- +- +-class NonExistentKey(KeyError, TOMLKitError): +- """ +- A non-existent key was used. +- """ +- +- def __init__(self, key): +- message = f'Key "{key}" does not exist.' +- +- super().__init__(message) +- +- +-class KeyAlreadyPresent(TOMLKitError): +- """ +- An already present key was used. +- """ +- +- def __init__(self, key): +- key = getattr(key, "key", key) +- message = f'Key "{key}" already exists.' +- +- super().__init__(message) +- +- +-class InvalidControlChar(ParseError): +- def __init__(self, line: int, col: int, char: int, type: str) -> None: +- display_code = "\\u00" +- +- if char < 16: +- display_code += "0" +- +- display_code += hex(char)[2:] +- +- message = ( +- "Control characters (codes less than 0x1f and 0x7f)" +- f" are not allowed in {type}, " +- f"use {display_code} instead" +- ) +- +- super().__init__(line, col, message=message) +- +- +-class InvalidStringError(ValueError, TOMLKitError): +- def __init__(self, value: str, invalid_sequences: Collection[str], delimiter: str): +- repr_ = repr(value)[1:-1] +- super().__init__( +- f"Invalid string: {delimiter}{repr_}{delimiter}. " +- f"The character sequences {invalid_sequences} are invalid." +- ) +diff --git a/src/poetry/core/_vendor/tomlkit/items.py b/src/poetry/core/_vendor/tomlkit/items.py +deleted file mode 100644 +index f51becc..0000000 +--- a/src/poetry/core/_vendor/tomlkit/items.py ++++ /dev/null +@@ -1,1940 +0,0 @@ +-import abc +-import copy +-import re +-import string +- +-from datetime import date +-from datetime import datetime +-from datetime import time +-from datetime import tzinfo +-from enum import Enum +-from typing import TYPE_CHECKING +-from typing import Any +-from typing import Collection +-from typing import Dict +-from typing import Iterable +-from typing import Iterator +-from typing import List +-from typing import Optional +-from typing import Sequence +-from typing import TypeVar +-from typing import Union +-from typing import cast +-from typing import overload +- +-from tomlkit._compat import PY38 +-from tomlkit._compat import decode +-from tomlkit._utils import CONTROL_CHARS +-from tomlkit._utils import escape_string +-from tomlkit.exceptions import InvalidStringError +- +- +-if TYPE_CHECKING: # pragma: no cover +- # Define _CustomList and _CustomDict as a workaround for: +- # https://github.com/python/mypy/issues/11427 +- # +- # According to this issue, the typeshed contains a "lie" +- # (it adds MutableSequence to the ancestry of list and MutableMapping to +- # the ancestry of dict) which completely messes with the type inference for +- # Table, InlineTable, Array and Container. +- # +- # Importing from builtins is preferred over simple assignment, see issues: +- # https://github.com/python/mypy/issues/8715 +- # https://github.com/python/mypy/issues/10068 +- from builtins import dict as _CustomDict # noqa: N812, TC004 +- from builtins import list as _CustomList # noqa: N812, TC004 +- +- # Allow type annotations but break circular imports +- from tomlkit import container +-else: +- from collections.abc import MutableMapping +- from collections.abc import MutableSequence +- +- class _CustomList(MutableSequence, list): +- """Adds MutableSequence mixin while pretending to be a builtin list""" +- +- class _CustomDict(MutableMapping, dict): +- """Adds MutableMapping mixin while pretending to be a builtin dict""" +- +- +-ItemT = TypeVar("ItemT", bound="Item") +- +- +-@overload +-def item( +- value: bool, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "Bool": +- ... +- +- +-@overload +-def item( +- value: int, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "Integer": +- ... +- +- +-@overload +-def item( +- value: float, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "Float": +- ... +- +- +-@overload +-def item( +- value: str, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "String": +- ... +- +- +-@overload +-def item( +- value: datetime, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "DateTime": +- ... +- +- +-@overload +-def item( +- value: date, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "Date": +- ... +- +- +-@overload +-def item( +- value: time, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "Time": +- ... +- +- +-@overload +-def item( +- value: Sequence[dict], _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "AoT": +- ... +- +- +-@overload +-def item( +- value: Sequence, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "Array": +- ... +- +- +-@overload +-def item(value: dict, _parent: "Array" = ..., _sort_keys: bool = ...) -> "InlineTable": +- ... +- +- +-@overload +-def item( +- value: dict, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> "Table": +- ... +- +- +-@overload +-def item( +- value: ItemT, _parent: Optional["Item"] = ..., _sort_keys: bool = ... +-) -> ItemT: +- ... +- +- +-def item( +- value: Any, _parent: Optional["Item"] = None, _sort_keys: bool = False +-) -> "Item": +- """Create a TOML item from a Python object. +- +- :Example: +- +- >>> item(42) +- 42 +- >>> item([1, 2, 3]) +- [1, 2, 3] +- >>> item({'a': 1, 'b': 2}) +- a = 1 +- b = 2 +- """ +- +- from tomlkit.container import Container +- +- if isinstance(value, Item): +- return value +- +- if isinstance(value, bool): +- return Bool(value, Trivia()) +- elif isinstance(value, int): +- return Integer(value, Trivia(), str(value)) +- elif isinstance(value, float): +- return Float(value, Trivia(), str(value)) +- elif isinstance(value, dict): +- table_constructor = InlineTable if isinstance(_parent, Array) else Table +- val = table_constructor(Container(), Trivia(), False) +- for k, v in sorted( +- value.items(), +- key=lambda i: (isinstance(i[1], dict), i[0] if _sort_keys else 1), +- ): +- val[k] = item(v, _parent=val, _sort_keys=_sort_keys) +- only_child = val[next(iter(value))] if len(value) == 1 else None +- if table_constructor is Table and isinstance(only_child, (AoT, Table)): +- # The table becomes super table if the only child is a table or AoT. +- val._is_super_table = True +- +- return val +- elif isinstance(value, (list, tuple)): +- if value and all(isinstance(v, dict) for v in value): +- a = AoT([]) +- table_constructor = Table +- else: +- a = Array([], Trivia()) +- table_constructor = InlineTable +- +- for v in value: +- if isinstance(v, dict): +- table = table_constructor(Container(), Trivia(), True) +- +- for k, _v in sorted( +- v.items(), +- key=lambda i: (isinstance(i[1], dict), i[0] if _sort_keys else 1), +- ): +- i = item(_v, _parent=a, _sort_keys=_sort_keys) +- if isinstance(table, InlineTable): +- i.trivia.trail = "" +- +- table[k] = i +- +- v = table +- +- a.append(v) +- +- return a +- elif isinstance(value, str): +- return String.from_raw(value) +- elif isinstance(value, datetime): +- return DateTime( +- value.year, +- value.month, +- value.day, +- value.hour, +- value.minute, +- value.second, +- value.microsecond, +- value.tzinfo, +- Trivia(), +- value.isoformat().replace("+00:00", "Z"), +- ) +- elif isinstance(value, date): +- return Date(value.year, value.month, value.day, Trivia(), value.isoformat()) +- elif isinstance(value, time): +- return Time( +- value.hour, +- value.minute, +- value.second, +- value.microsecond, +- value.tzinfo, +- Trivia(), +- value.isoformat(), +- ) +- +- raise ValueError(f"Invalid type {type(value)}") +- +- +-class StringType(Enum): +- # Single Line Basic +- SLB = '"' +- # Multi Line Basic +- MLB = '"""' +- # Single Line Literal +- SLL = "'" +- # Multi Line Literal +- MLL = "'''" +- +- @classmethod +- def select(cls, literal=False, multiline=False) -> "StringType": +- return { +- (False, False): cls.SLB, +- (False, True): cls.MLB, +- (True, False): cls.SLL, +- (True, True): cls.MLL, +- }[(literal, multiline)] +- +- @property +- def escaped_sequences(self) -> Collection[str]: +- # https://toml.io/en/v1.0.0#string +- escaped_in_basic = CONTROL_CHARS | {"\\"} +- allowed_in_multiline = {"\n", "\r"} +- return { +- StringType.SLB: escaped_in_basic | {'"'}, +- StringType.MLB: (escaped_in_basic | {'"""'}) - allowed_in_multiline, +- StringType.SLL: (), +- StringType.MLL: (), +- }[self] +- +- @property +- def invalid_sequences(self) -> Collection[str]: +- # https://toml.io/en/v1.0.0#string +- forbidden_in_literal = CONTROL_CHARS - {"\t"} +- allowed_in_multiline = {"\n", "\r"} +- return { +- StringType.SLB: (), +- StringType.MLB: (), +- StringType.SLL: forbidden_in_literal | {"'"}, +- StringType.MLL: (forbidden_in_literal | {"'''"}) - allowed_in_multiline, +- }[self] +- +- @property +- def unit(self) -> str: +- return self.value[0] +- +- def is_basic(self) -> bool: +- return self in {StringType.SLB, StringType.MLB} +- +- def is_literal(self) -> bool: +- return self in {StringType.SLL, StringType.MLL} +- +- def is_singleline(self) -> bool: +- return self in {StringType.SLB, StringType.SLL} +- +- def is_multiline(self) -> bool: +- return self in {StringType.MLB, StringType.MLL} +- +- def toggle(self) -> "StringType": +- return { +- StringType.SLB: StringType.MLB, +- StringType.MLB: StringType.SLB, +- StringType.SLL: StringType.MLL, +- StringType.MLL: StringType.SLL, +- }[self] +- +- +-class BoolType(Enum): +- TRUE = "true" +- FALSE = "false" +- +- def __bool__(self): +- return {BoolType.TRUE: True, BoolType.FALSE: False}[self] +- +- def __iter__(self): +- return iter(self.value) +- +- def __len__(self): +- return len(self.value) +- +- +-class Trivia: +- """ +- Trivia information (aka metadata). +- """ +- +- def __init__( +- self, +- indent: str = None, +- comment_ws: str = None, +- comment: str = None, +- trail: str = None, +- ) -> None: +- # Whitespace before a value. +- self.indent = indent or "" +- # Whitespace after a value, but before a comment. +- self.comment_ws = comment_ws or "" +- # Comment, starting with # character, or empty string if no comment. +- self.comment = comment or "" +- # Trailing newline. +- if trail is None: +- trail = "\n" +- +- self.trail = trail +- +- def copy(self) -> "Trivia": +- return type(self)(self.indent, self.comment_ws, self.comment, self.trail) +- +- +-class KeyType(Enum): +- """ +- The type of a Key. +- +- Keys can be bare (unquoted), or quoted using basic ("), or literal (') +- quotes following the same escaping rules as single-line StringType. +- """ +- +- Bare = "" +- Basic = '"' +- Literal = "'" +- +- +-class Key(abc.ABC): +- """Base class for a key""" +- +- sep: str +- _original: str +- _keys: List["SingleKey"] +- _dotted: bool +- key: str +- +- @abc.abstractmethod +- def __hash__(self) -> int: +- pass +- +- @abc.abstractmethod +- def __eq__(self, __o: object) -> bool: +- pass +- +- def is_dotted(self) -> bool: +- """If the key is followed by other keys""" +- return self._dotted +- +- def __iter__(self) -> Iterator["SingleKey"]: +- return iter(self._keys) +- +- def concat(self, other: "Key") -> "DottedKey": +- """Concatenate keys into a dotted key""" +- keys = self._keys + other._keys +- return DottedKey(keys, sep=self.sep) +- +- def is_multi(self) -> bool: +- """Check if the key contains multiple keys""" +- return len(self._keys) > 1 +- +- def as_string(self) -> str: +- """The TOML representation""" +- return self._original +- +- def __str__(self) -> str: +- return self.as_string() +- +- def __repr__(self) -> str: +- return f"" +- +- +-class SingleKey(Key): +- """A single key""" +- +- def __init__( +- self, +- k: str, +- t: Optional[KeyType] = None, +- sep: Optional[str] = None, +- original: Optional[str] = None, +- ) -> None: +- if t is None: +- if not k or any( +- c not in string.ascii_letters + string.digits + "-" + "_" for c in k +- ): +- t = KeyType.Basic +- else: +- t = KeyType.Bare +- +- self.t = t +- if sep is None: +- sep = " = " +- +- self.sep = sep +- self.key = k +- if original is None: +- key_str = escape_string(k) if t == KeyType.Basic else k +- original = f"{t.value}{key_str}{t.value}" +- +- self._original = original +- self._keys = [self] +- self._dotted = False +- +- @property +- def delimiter(self) -> str: +- """The delimiter: double quote/single quote/none""" +- return self.t.value +- +- def is_bare(self) -> bool: +- """Check if the key is bare""" +- return self.t == KeyType.Bare +- +- def __hash__(self) -> int: +- return hash(self.key) +- +- def __eq__(self, other: Any) -> bool: +- if isinstance(other, Key): +- return isinstance(other, SingleKey) and self.key == other.key +- +- return self.key == other +- +- +-class DottedKey(Key): +- def __init__( +- self, +- keys: Iterable[Key], +- sep: Optional[str] = None, +- original: Optional[str] = None, +- ) -> None: +- self._keys = list(keys) +- if original is None: +- original = ".".join(k.as_string() for k in self._keys) +- +- self.sep = " = " if sep is None else sep +- self._original = original +- self._dotted = False +- self.key = ".".join(k.key for k in self._keys) +- +- def __hash__(self) -> int: +- return hash(tuple(self._keys)) +- +- def __eq__(self, __o: object) -> bool: +- return isinstance(__o, DottedKey) and self._keys == __o._keys +- +- +-class Item: +- """ +- An item within a TOML document. +- """ +- +- def __init__(self, trivia: Trivia) -> None: +- self._trivia = trivia +- +- @property +- def trivia(self) -> Trivia: +- """The trivia element associated with this item""" +- return self._trivia +- +- @property +- def discriminant(self) -> int: +- raise NotImplementedError() +- +- def as_string(self) -> str: +- """The TOML representation""" +- raise NotImplementedError() +- +- def unwrap(self): +- """Returns as pure python object (ppo)""" +- raise NotImplementedError() +- +- # Helpers +- +- def comment(self, comment: str) -> "Item": +- """Attach a comment to this item""" +- if not comment.strip().startswith("#"): +- comment = "# " + comment +- +- self._trivia.comment_ws = " " +- self._trivia.comment = comment +- +- return self +- +- def indent(self, indent: int) -> "Item": +- """Indent this item with given number of spaces""" +- if self._trivia.indent.startswith("\n"): +- self._trivia.indent = "\n" + " " * indent +- else: +- self._trivia.indent = " " * indent +- +- return self +- +- def is_boolean(self) -> bool: +- return isinstance(self, Bool) +- +- def is_table(self) -> bool: +- return isinstance(self, Table) +- +- def is_inline_table(self) -> bool: +- return isinstance(self, InlineTable) +- +- def is_aot(self) -> bool: +- return isinstance(self, AoT) +- +- def _getstate(self, protocol=3): +- return (self._trivia,) +- +- def __reduce__(self): +- return self.__reduce_ex__(2) +- +- def __reduce_ex__(self, protocol): +- return self.__class__, self._getstate(protocol) +- +- +-class Whitespace(Item): +- """ +- A whitespace literal. +- """ +- +- def __init__(self, s: str, fixed: bool = False) -> None: +- self._s = s +- self._fixed = fixed +- +- @property +- def s(self) -> str: +- return self._s +- +- @property +- def value(self) -> str: +- """The wrapped string of the whitespace""" +- return self._s +- +- @property +- def trivia(self) -> Trivia: +- raise RuntimeError("Called trivia on a Whitespace variant.") +- +- @property +- def discriminant(self) -> int: +- return 0 +- +- def is_fixed(self) -> bool: +- """If the whitespace is fixed, it can't be merged or discarded from the output.""" +- return self._fixed +- +- def as_string(self) -> str: +- return self._s +- +- def __repr__(self) -> str: +- return f"<{self.__class__.__name__} {repr(self._s)}>" +- +- def _getstate(self, protocol=3): +- return self._s, self._fixed +- +- +-class Comment(Item): +- """ +- A comment literal. +- """ +- +- @property +- def discriminant(self) -> int: +- return 1 +- +- def as_string(self) -> str: +- return ( +- f"{self._trivia.indent}{decode(self._trivia.comment)}{self._trivia.trail}" +- ) +- +- def __str__(self) -> str: +- return f"{self._trivia.indent}{decode(self._trivia.comment)}" +- +- +-class Integer(int, Item): +- """ +- An integer literal. +- """ +- +- def __new__(cls, value: int, trivia: Trivia, raw: str) -> "Integer": +- return super().__new__(cls, value) +- +- def __init__(self, _: int, trivia: Trivia, raw: str) -> None: +- super().__init__(trivia) +- +- self._raw = raw +- self._sign = False +- +- if re.match(r"^[+\-]\d+$", raw): +- self._sign = True +- +- def unwrap(self) -> int: +- return int(self) +- +- @property +- def discriminant(self) -> int: +- return 2 +- +- @property +- def value(self) -> int: +- """The wrapped integer value""" +- return self +- +- def as_string(self) -> str: +- return self._raw +- +- def __add__(self, other): +- return self._new(int(self._raw) + other) +- +- def __radd__(self, other): +- result = super().__radd__(other) +- +- if isinstance(other, Integer): +- return self._new(result) +- +- return result +- +- def __sub__(self, other): +- result = super().__sub__(other) +- +- return self._new(result) +- +- def __rsub__(self, other): +- result = super().__rsub__(other) +- +- if isinstance(other, Integer): +- return self._new(result) +- +- return result +- +- def _new(self, result): +- raw = str(result) +- if self._sign: +- sign = "+" if result >= 0 else "-" +- raw = sign + raw +- +- return Integer(result, self._trivia, raw) +- +- def _getstate(self, protocol=3): +- return int(self), self._trivia, self._raw +- +- +-class Float(float, Item): +- """ +- A float literal. +- """ +- +- def __new__(cls, value: float, trivia: Trivia, raw: str) -> Integer: +- return super().__new__(cls, value) +- +- def __init__(self, _: float, trivia: Trivia, raw: str) -> None: +- super().__init__(trivia) +- +- self._raw = raw +- self._sign = False +- +- if re.match(r"^[+\-].+$", raw): +- self._sign = True +- +- def unwrap(self) -> float: +- return float(self) +- +- @property +- def discriminant(self) -> int: +- return 3 +- +- @property +- def value(self) -> float: +- """The wrapped float value""" +- return self +- +- def as_string(self) -> str: +- return self._raw +- +- def __add__(self, other): +- result = super().__add__(other) +- +- return self._new(result) +- +- def __radd__(self, other): +- result = super().__radd__(other) +- +- if isinstance(other, Float): +- return self._new(result) +- +- return result +- +- def __sub__(self, other): +- result = super().__sub__(other) +- +- return self._new(result) +- +- def __rsub__(self, other): +- result = super().__rsub__(other) +- +- if isinstance(other, Float): +- return self._new(result) +- +- return result +- +- def _new(self, result): +- raw = str(result) +- +- if self._sign: +- sign = "+" if result >= 0 else "-" +- raw = sign + raw +- +- return Float(result, self._trivia, raw) +- +- def _getstate(self, protocol=3): +- return float(self), self._trivia, self._raw +- +- +-class Bool(Item): +- """ +- A boolean literal. +- """ +- +- def __init__(self, t: int, trivia: Trivia) -> None: +- super().__init__(trivia) +- +- self._value = bool(t) +- +- def unwrap(self) -> bool: +- return bool(self) +- +- @property +- def discriminant(self) -> int: +- return 4 +- +- @property +- def value(self) -> bool: +- """The wrapped boolean value""" +- return self._value +- +- def as_string(self) -> str: +- return str(self._value).lower() +- +- def _getstate(self, protocol=3): +- return self._value, self._trivia +- +- def __bool__(self): +- return self._value +- +- __nonzero__ = __bool__ +- +- def __eq__(self, other): +- if not isinstance(other, bool): +- return NotImplemented +- +- return other == self._value +- +- def __hash__(self): +- return hash(self._value) +- +- def __repr__(self): +- return repr(self._value) +- +- +-class DateTime(Item, datetime): +- """ +- A datetime literal. +- """ +- +- def __new__( +- cls, +- year: int, +- month: int, +- day: int, +- hour: int, +- minute: int, +- second: int, +- microsecond: int, +- tzinfo: Optional[tzinfo], +- *_: Any, +- **kwargs: Any, +- ) -> datetime: +- return datetime.__new__( +- cls, +- year, +- month, +- day, +- hour, +- minute, +- second, +- microsecond, +- tzinfo=tzinfo, +- **kwargs, +- ) +- +- def __init__( +- self, +- year: int, +- month: int, +- day: int, +- hour: int, +- minute: int, +- second: int, +- microsecond: int, +- tzinfo: Optional[tzinfo], +- trivia: Optional[Trivia] = None, +- raw: Optional[str] = None, +- **kwargs: Any, +- ) -> None: +- super().__init__(trivia or Trivia()) +- +- self._raw = raw or self.isoformat() +- +- def unwrap(self) -> datetime: +- ( +- year, +- month, +- day, +- hour, +- minute, +- second, +- microsecond, +- tzinfo, +- _, +- _, +- ) = self._getstate() +- return datetime(year, month, day, hour, minute, second, microsecond, tzinfo) +- +- @property +- def discriminant(self) -> int: +- return 5 +- +- @property +- def value(self) -> datetime: +- return self +- +- def as_string(self) -> str: +- return self._raw +- +- def __add__(self, other): +- if PY38: +- result = datetime( +- self.year, +- self.month, +- self.day, +- self.hour, +- self.minute, +- self.second, +- self.microsecond, +- self.tzinfo, +- ).__add__(other) +- else: +- result = super().__add__(other) +- +- return self._new(result) +- +- def __sub__(self, other): +- if PY38: +- result = datetime( +- self.year, +- self.month, +- self.day, +- self.hour, +- self.minute, +- self.second, +- self.microsecond, +- self.tzinfo, +- ).__sub__(other) +- else: +- result = super().__sub__(other) +- +- if isinstance(result, datetime): +- result = self._new(result) +- +- return result +- +- def replace(self, *args: Any, **kwargs: Any) -> datetime: +- return self._new(super().replace(*args, **kwargs)) +- +- def astimezone(self, tz: tzinfo) -> datetime: +- result = super().astimezone(tz) +- if PY38: +- return result +- return self._new(result) +- +- def _new(self, result) -> "DateTime": +- raw = result.isoformat() +- +- return DateTime( +- result.year, +- result.month, +- result.day, +- result.hour, +- result.minute, +- result.second, +- result.microsecond, +- result.tzinfo, +- self._trivia, +- raw, +- ) +- +- def _getstate(self, protocol=3): +- return ( +- self.year, +- self.month, +- self.day, +- self.hour, +- self.minute, +- self.second, +- self.microsecond, +- self.tzinfo, +- self._trivia, +- self._raw, +- ) +- +- +-class Date(Item, date): +- """ +- A date literal. +- """ +- +- def __new__(cls, year: int, month: int, day: int, *_: Any) -> date: +- return date.__new__(cls, year, month, day) +- +- def __init__( +- self, year: int, month: int, day: int, trivia: Trivia, raw: str +- ) -> None: +- super().__init__(trivia) +- +- self._raw = raw +- +- def unwrap(self) -> date: +- (year, month, day, _, _) = self._getstate() +- return date(year, month, day) +- +- @property +- def discriminant(self) -> int: +- return 6 +- +- @property +- def value(self) -> date: +- return self +- +- def as_string(self) -> str: +- return self._raw +- +- def __add__(self, other): +- if PY38: +- result = date(self.year, self.month, self.day).__add__(other) +- else: +- result = super().__add__(other) +- +- return self._new(result) +- +- def __sub__(self, other): +- if PY38: +- result = date(self.year, self.month, self.day).__sub__(other) +- else: +- result = super().__sub__(other) +- +- if isinstance(result, date): +- result = self._new(result) +- +- return result +- +- def replace(self, *args: Any, **kwargs: Any) -> date: +- return self._new(super().replace(*args, **kwargs)) +- +- def _new(self, result): +- raw = result.isoformat() +- +- return Date(result.year, result.month, result.day, self._trivia, raw) +- +- def _getstate(self, protocol=3): +- return (self.year, self.month, self.day, self._trivia, self._raw) +- +- +-class Time(Item, time): +- """ +- A time literal. +- """ +- +- def __new__( +- cls, +- hour: int, +- minute: int, +- second: int, +- microsecond: int, +- tzinfo: Optional[tzinfo], +- *_: Any, +- ) -> time: +- return time.__new__(cls, hour, minute, second, microsecond, tzinfo) +- +- def __init__( +- self, +- hour: int, +- minute: int, +- second: int, +- microsecond: int, +- tzinfo: Optional[tzinfo], +- trivia: Trivia, +- raw: str, +- ) -> None: +- super().__init__(trivia) +- +- self._raw = raw +- +- def unwrap(self) -> datetime: +- (hour, minute, second, microsecond, tzinfo, _, _) = self._getstate() +- return time(hour, minute, second, microsecond, tzinfo) +- +- @property +- def discriminant(self) -> int: +- return 7 +- +- @property +- def value(self) -> time: +- return self +- +- def as_string(self) -> str: +- return self._raw +- +- def replace(self, *args: Any, **kwargs: Any) -> time: +- return self._new(super().replace(*args, **kwargs)) +- +- def _new(self, result): +- raw = result.isoformat() +- +- return Time( +- result.hour, +- result.minute, +- result.second, +- result.microsecond, +- result.tzinfo, +- self._trivia, +- raw, +- ) +- +- def _getstate(self, protocol: int = 3) -> tuple: +- return ( +- self.hour, +- self.minute, +- self.second, +- self.microsecond, +- self.tzinfo, +- self._trivia, +- self._raw, +- ) +- +- +-class _ArrayItemGroup: +- __slots__ = ("value", "indent", "comma", "comment") +- +- def __init__( +- self, +- value: Optional[Item] = None, +- indent: Optional[Whitespace] = None, +- comma: Optional[Whitespace] = None, +- comment: Optional[Comment] = None, +- ) -> None: +- self.value = value +- self.indent = indent +- self.comma = comma +- self.comment = comment +- +- def __iter__(self) -> Iterator[Item]: +- return filter( +- lambda x: x is not None, (self.indent, self.value, self.comma, self.comment) +- ) +- +- def __repr__(self) -> str: +- return repr(tuple(self)) +- +- def is_whitespace(self) -> bool: +- return self.value is None and self.comment is None +- +- def __bool__(self) -> bool: +- try: +- next(iter(self)) +- except StopIteration: +- return False +- return True +- +- +-class Array(Item, _CustomList): +- """ +- An array literal +- """ +- +- def __init__( +- self, value: List[Item], trivia: Trivia, multiline: bool = False +- ) -> None: +- super().__init__(trivia) +- list.__init__( +- self, +- [v.value for v in value if not isinstance(v, (Whitespace, Comment, Null))], +- ) +- self._index_map: Dict[int, int] = {} +- self._value = self._group_values(value) +- self._multiline = multiline +- self._reindex() +- +- def _group_values(self, value: List[Item]) -> List[_ArrayItemGroup]: +- """Group the values into (indent, value, comma, comment) tuples""" +- groups = [] +- this_group = _ArrayItemGroup() +- for item in value: +- if isinstance(item, Whitespace): +- if "," not in item.s: +- groups.append(this_group) +- this_group = _ArrayItemGroup(indent=item) +- else: +- if this_group.value is None: +- # when comma is met and no value is provided, add a dummy Null +- this_group.value = Null() +- this_group.comma = item +- elif isinstance(item, Comment): +- if this_group.value is None: +- this_group.value = Null() +- this_group.comment = item +- elif this_group.value is None: +- this_group.value = item +- else: +- groups.append(this_group) +- this_group = _ArrayItemGroup(value=item) +- groups.append(this_group) +- return [group for group in groups if group] +- +- def unwrap(self) -> str: +- unwrapped = [] +- for v in self: +- if isinstance(v, Item): +- unwrapped.append(v.unwrap()) +- else: +- unwrapped.append(v) +- return unwrapped +- +- @property +- def discriminant(self) -> int: +- return 8 +- +- @property +- def value(self) -> list: +- return self +- +- def _iter_items(self) -> Iterator[Item]: +- for v in self._value: +- yield from v +- +- def multiline(self, multiline: bool) -> "Array": +- """Change the array to display in multiline or not. +- +- :Example: +- +- >>> a = item([1, 2, 3]) +- >>> print(a.as_string()) +- [1, 2, 3] +- >>> print(a.multiline(True).as_string()) +- [ +- 1, +- 2, +- 3, +- ] +- """ +- self._multiline = multiline +- +- return self +- +- def as_string(self) -> str: +- if not self._multiline or not self._value: +- return f'[{"".join(v.as_string() for v in self._iter_items())}]' +- +- s = "[\n" +- s += "".join( +- self.trivia.indent +- + " " * 4 +- + v.value.as_string() +- + ("," if not isinstance(v.value, Null) else "") +- + (v.comment.as_string() if v.comment is not None else "") +- + "\n" +- for v in self._value +- if v.value is not None +- ) +- s += self.trivia.indent + "]" +- +- return s +- +- def _reindex(self) -> None: +- self._index_map.clear() +- index = 0 +- for i, v in enumerate(self._value): +- if v.value is None or isinstance(v.value, Null): +- continue +- self._index_map[index] = i +- index += 1 +- +- def add_line( +- self, +- *items: Any, +- indent: str = " ", +- comment: Optional[str] = None, +- add_comma: bool = True, +- newline: bool = True, +- ) -> None: +- """Add multiple items in a line to control the format precisely. +- When add_comma is True, only accept actual values and +- ", " will be added between values automatically. +- +- :Example: +- +- >>> a = array() +- >>> a.add_line(1, 2, 3) +- >>> a.add_line(4, 5, 6) +- >>> a.add_line(indent="") +- >>> print(a.as_string()) +- [ +- 1, 2, 3, +- 4, 5, 6, +- ] +- """ +- new_values: List[Item] = [] +- first_indent = f"\n{indent}" if newline else indent +- if first_indent: +- new_values.append(Whitespace(first_indent)) +- whitespace = "" +- data_values = [] +- for i, el in enumerate(items): +- it = item(el, _parent=self) +- if isinstance(it, Comment) or add_comma and isinstance(el, Whitespace): +- raise ValueError(f"item type {type(it)} is not allowed in add_line") +- if not isinstance(it, Whitespace): +- if whitespace: +- new_values.append(Whitespace(whitespace)) +- whitespace = "" +- new_values.append(it) +- data_values.append(it.value) +- if add_comma: +- new_values.append(Whitespace(",")) +- if i != len(items) - 1: +- new_values.append(Whitespace(" ")) +- elif "," not in it.s: +- whitespace += it.s +- else: +- new_values.append(it) +- if whitespace: +- new_values.append(Whitespace(whitespace)) +- if comment: +- indent = " " if items else "" +- new_values.append( +- Comment(Trivia(indent=indent, comment=f"# {comment}", trail="")) +- ) +- list.extend(self, data_values) +- if len(self._value) > 0: +- last_item = self._value[-1] +- last_value_item = next( +- ( +- v +- for v in self._value[::-1] +- if v.value is not None and not isinstance(v.value, Null) +- ), +- None, +- ) +- if last_value_item is not None: +- last_value_item.comma = Whitespace(",") +- if last_item.is_whitespace(): +- self._value[-1:-1] = self._group_values(new_values) +- else: +- self._value.extend(self._group_values(new_values)) +- else: +- self._value.extend(self._group_values(new_values)) +- self._reindex() +- +- def clear(self) -> None: +- """Clear the array.""" +- list.clear(self) +- self._index_map.clear() +- self._value.clear() +- +- def __len__(self) -> int: +- return list.__len__(self) +- +- def __getitem__(self, key: Union[int, slice]) -> Any: +- return list.__getitem__(self, key) +- +- def __setitem__(self, key: Union[int, slice], value: Any) -> Any: +- it = item(value, _parent=self) +- list.__setitem__(self, key, it.value) +- if isinstance(key, slice): +- raise ValueError("slice assignment is not supported") +- if key < 0: +- key += len(self) +- self._value[self._index_map[key]].value = it +- +- def insert(self, pos: int, value: Any) -> None: +- it = item(value, _parent=self) +- length = len(self) +- if not isinstance(it, (Comment, Whitespace)): +- list.insert(self, pos, it.value) +- if pos < 0: +- pos += length +- if pos < 0: +- pos = 0 +- +- idx = 0 # insert position of the self._value list +- default_indent = " " +- if pos < length: +- try: +- idx = self._index_map[pos] +- except KeyError as e: +- raise IndexError("list index out of range") from e +- else: +- idx = len(self._value) +- if idx >= 1 and self._value[idx - 1].is_whitespace(): +- # The last item is a pure whitespace(\n ), insert before it +- idx -= 1 +- if ( +- self._value[idx].indent is not None +- and "\n" in self._value[idx].indent.s +- ): +- default_indent = "\n " +- indent: Optional[Item] = None +- comma: Optional[Item] = Whitespace(",") if pos < length else None +- if idx < len(self._value) and not self._value[idx].is_whitespace(): +- # Prefer to copy the indentation from the item after +- indent = self._value[idx].indent +- if idx > 0: +- last_item = self._value[idx - 1] +- if indent is None: +- indent = last_item.indent +- if not isinstance(last_item.value, Null) and "\n" in default_indent: +- # Copy the comma from the last item if 1) it contains a value and +- # 2) the array is multiline +- comma = last_item.comma +- if last_item.comma is None and not isinstance(last_item.value, Null): +- # Add comma to the last item to separate it from the following items. +- last_item.comma = Whitespace(",") +- if indent is None and (idx > 0 or "\n" in default_indent): +- # apply default indent if it isn't the first item or the array is multiline. +- indent = Whitespace(default_indent) +- new_item = _ArrayItemGroup(value=it, indent=indent, comma=comma) +- self._value.insert(idx, new_item) +- self._reindex() +- +- def __delitem__(self, key: Union[int, slice]): +- length = len(self) +- list.__delitem__(self, key) +- +- if isinstance(key, slice): +- indices_to_remove = list( +- range(key.start or 0, key.stop or length, key.step or 1) +- ) +- else: +- indices_to_remove = [length + key if key < 0 else key] +- for i in sorted(indices_to_remove, reverse=True): +- try: +- idx = self._index_map[i] +- except KeyError as e: +- if not isinstance(key, slice): +- raise IndexError("list index out of range") from e +- else: +- del self._value[idx] +- if ( +- idx == 0 +- and len(self._value) > 0 +- and "\n" not in self._value[idx].indent.s +- ): +- # Remove the indentation of the first item if not newline +- self._value[idx].indent = None +- if len(self._value) > 0: +- v = self._value[-1] +- if not v.is_whitespace(): +- # remove the comma of the last item +- v.comma = None +- +- self._reindex() +- +- def __str__(self): +- return str([v.value.value for v in self._iter_items() if v.value is not None]) +- +- def _getstate(self, protocol=3): +- return list(self._iter_items()), self._trivia, self._multiline +- +- +-AT = TypeVar("AT", bound="AbstractTable") +- +- +-class AbstractTable(Item, _CustomDict): +- """Common behaviour of both :class:`Table` and :class:`InlineTable`""" +- +- def __init__(self, value: "container.Container", trivia: Trivia): +- Item.__init__(self, trivia) +- +- self._value = value +- +- for k, v in self._value.body: +- if k is not None: +- dict.__setitem__(self, k.key, v) +- +- def unwrap(self): +- unwrapped = {} +- for k, v in self.items(): +- if isinstance(k, Key): +- k = k.key +- if isinstance(v, Item): +- v = v.unwrap() +- unwrapped[k] = v +- +- return unwrapped +- +- @property +- def value(self) -> "container.Container": +- return self._value +- +- @overload +- def append(self: AT, key: None, value: Union[Comment, Whitespace]) -> AT: +- ... +- +- @overload +- def append(self: AT, key: Union[Key, str], value: Any) -> AT: +- ... +- +- def append(self, key, value): +- raise NotImplementedError +- +- @overload +- def add(self: AT, value: Union[Comment, Whitespace]) -> AT: +- ... +- +- @overload +- def add(self: AT, key: Union[Key, str], value: Any) -> AT: +- ... +- +- def add(self, key, value=None): +- if value is None: +- if not isinstance(key, (Comment, Whitespace)): +- msg = "Non comment/whitespace items must have an associated key" +- raise ValueError(msg) +- +- key, value = None, key +- +- return self.append(key, value) +- +- def remove(self: AT, key: Union[Key, str]) -> AT: +- self._value.remove(key) +- +- if isinstance(key, Key): +- key = key.key +- +- if key is not None: +- dict.__delitem__(self, key) +- +- return self +- +- def setdefault(self, key: Union[Key, str], default: Any) -> Any: +- super().setdefault(key, default) +- return self[key] +- +- def __str__(self): +- return str(self.value) +- +- def copy(self: AT) -> AT: +- return copy.copy(self) +- +- def __repr__(self) -> str: +- return repr(self.value) +- +- def __iter__(self) -> Iterator[str]: +- return iter(self._value) +- +- def __len__(self) -> int: +- return len(self._value) +- +- def __delitem__(self, key: Union[Key, str]) -> None: +- self.remove(key) +- +- def __getitem__(self, key: Union[Key, str]) -> Item: +- return cast(Item, self._value[key]) +- +- def __setitem__(self, key: Union[Key, str], value: Any) -> None: +- if not isinstance(value, Item): +- value = item(value) +- +- is_replace = key in self +- self._value[key] = value +- +- if key is not None: +- dict.__setitem__(self, key, value) +- +- if is_replace: +- return +- m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) +- if not m: +- return +- +- indent = m.group(1) +- +- if not isinstance(value, Whitespace): +- m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent) +- if not m: +- value.trivia.indent = indent +- else: +- value.trivia.indent = m.group(1) + indent + m.group(2) +- +- +-class Table(AbstractTable): +- """ +- A table literal. +- """ +- +- def __init__( +- self, +- value: "container.Container", +- trivia: Trivia, +- is_aot_element: bool, +- is_super_table: bool = False, +- name: Optional[str] = None, +- display_name: Optional[str] = None, +- ) -> None: +- super().__init__(value, trivia) +- +- self.name = name +- self.display_name = display_name +- self._is_aot_element = is_aot_element +- self._is_super_table = is_super_table +- +- @property +- def discriminant(self) -> int: +- return 9 +- +- def __copy__(self) -> "Table": +- return type(self)( +- self._value.copy(), +- self._trivia.copy(), +- self._is_aot_element, +- self._is_super_table, +- self.name, +- self.display_name, +- ) +- +- def append(self, key, _item): +- """ +- Appends a (key, item) to the table. +- """ +- if not isinstance(_item, Item): +- _item = item(_item) +- +- self._value.append(key, _item) +- +- if isinstance(key, Key): +- key = next(iter(key)).key +- _item = self._value[key] +- +- if key is not None: +- dict.__setitem__(self, key, _item) +- +- m = re.match(r"(?s)^[^ ]*([ ]+).*$", self._trivia.indent) +- if not m: +- return self +- +- indent = m.group(1) +- +- if not isinstance(_item, Whitespace): +- m = re.match("(?s)^([^ ]*)(.*)$", _item.trivia.indent) +- if not m: +- _item.trivia.indent = indent +- else: +- _item.trivia.indent = m.group(1) + indent + m.group(2) +- +- return self +- +- def raw_append(self, key: Union[Key, str], _item: Any) -> "Table": +- """Similar to :meth:`append` but does not copy indentation.""" +- if not isinstance(_item, Item): +- _item = item(_item) +- +- self._value.append(key, _item) +- +- if isinstance(key, Key): +- key = next(iter(key)).key +- _item = self._value[key] +- +- if key is not None: +- dict.__setitem__(self, key, _item) +- +- return self +- +- def is_aot_element(self) -> bool: +- """True if the table is the direct child of an AOT element.""" +- return self._is_aot_element +- +- def is_super_table(self) -> bool: +- """A super table is the intermediate parent of a nested table as in [a.b.c]. +- If true, it won't appear in the TOML representation.""" +- return self._is_super_table +- +- def as_string(self) -> str: +- return self._value.as_string() +- +- # Helpers +- +- def indent(self, indent: int) -> "Table": +- """Indent the table with given number of spaces.""" +- super().indent(indent) +- +- m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) +- if not m: +- indent_str = "" +- else: +- indent_str = m.group(1) +- +- for _, item in self._value.body: +- if not isinstance(item, Whitespace): +- item.trivia.indent = indent_str + item.trivia.indent +- +- return self +- +- def invalidate_display_name(self): +- self.display_name = None +- +- for child in self.values(): +- if hasattr(child, "invalidate_display_name"): +- child.invalidate_display_name() +- +- def _getstate(self, protocol: int = 3) -> tuple: +- return ( +- self._value, +- self._trivia, +- self._is_aot_element, +- self._is_super_table, +- self.name, +- self.display_name, +- ) +- +- +-class InlineTable(AbstractTable): +- """ +- An inline table literal. +- """ +- +- def __init__( +- self, value: "container.Container", trivia: Trivia, new: bool = False +- ) -> None: +- super().__init__(value, trivia) +- +- self._new = new +- +- @property +- def discriminant(self) -> int: +- return 10 +- +- def append(self, key, _item): +- """ +- Appends a (key, item) to the table. +- """ +- if not isinstance(_item, Item): +- _item = item(_item) +- +- if not isinstance(_item, (Whitespace, Comment)): +- if not _item.trivia.indent and len(self._value) > 0 and not self._new: +- _item.trivia.indent = " " +- if _item.trivia.comment: +- _item.trivia.comment = "" +- +- self._value.append(key, _item) +- +- if isinstance(key, Key): +- key = key.key +- +- if key is not None: +- dict.__setitem__(self, key, _item) +- +- return self +- +- def as_string(self) -> str: +- buf = "{" +- for i, (k, v) in enumerate(self._value.body): +- if k is None: +- if i == len(self._value.body) - 1: +- if self._new: +- buf = buf.rstrip(", ") +- else: +- buf = buf.rstrip(",") +- +- buf += v.as_string() +- +- continue +- +- v_trivia_trail = v.trivia.trail.replace("\n", "") +- buf += ( +- f"{v.trivia.indent}" +- f'{k.as_string() + ("." if k.is_dotted() else "")}' +- f"{k.sep}" +- f"{v.as_string()}" +- f"{v.trivia.comment}" +- f"{v_trivia_trail}" +- ) +- +- if i != len(self._value.body) - 1: +- buf += "," +- if self._new: +- buf += " " +- +- buf += "}" +- +- return buf +- +- def __setitem__(self, key: Union[Key, str], value: Any) -> None: +- if hasattr(value, "trivia") and value.trivia.comment: +- value.trivia.comment = "" +- super().__setitem__(key, value) +- +- def __copy__(self) -> "InlineTable": +- return type(self)(self._value.copy(), self._trivia.copy(), self._new) +- +- def _getstate(self, protocol: int = 3) -> tuple: +- return (self._value, self._trivia) +- +- +-class String(str, Item): +- """ +- A string literal. +- """ +- +- def __new__(cls, t, value, original, trivia): +- return super().__new__(cls, value) +- +- def __init__(self, t: StringType, _: str, original: str, trivia: Trivia) -> None: +- super().__init__(trivia) +- +- self._t = t +- self._original = original +- +- def unwrap(self) -> str: +- return str(self) +- +- @property +- def discriminant(self) -> int: +- return 11 +- +- @property +- def value(self) -> str: +- return self +- +- def as_string(self) -> str: +- return f"{self._t.value}{decode(self._original)}{self._t.value}" +- +- def __add__(self, other): +- result = super().__add__(other) +- +- return self._new(result) +- +- def __sub__(self, other): +- result = super().__sub__(other) +- +- return self._new(result) +- +- def _new(self, result): +- return String(self._t, result, result, self._trivia) +- +- def _getstate(self, protocol=3): +- return self._t, str(self), self._original, self._trivia +- +- @classmethod +- def from_raw(cls, value: str, type_=StringType.SLB, escape=True) -> "String": +- value = decode(value) +- +- invalid = type_.invalid_sequences +- if any(c in value for c in invalid): +- raise InvalidStringError(value, invalid, type_.value) +- +- escaped = type_.escaped_sequences +- string_value = escape_string(value, escaped) if escape and escaped else value +- +- return cls(type_, decode(value), string_value, Trivia()) +- +- +-class AoT(Item, _CustomList): +- """ +- An array of table literal +- """ +- +- def __init__( +- self, body: List[Table], name: Optional[str] = None, parsed: bool = False +- ) -> None: +- self.name = name +- self._body: List[Table] = [] +- self._parsed = parsed +- +- super().__init__(Trivia(trail="")) +- +- for table in body: +- self.append(table) +- +- def unwrap(self) -> str: +- unwrapped = [] +- for t in self._body: +- if isinstance(t, Item): +- unwrapped.append(t.unwrap()) +- else: +- unwrapped.append(t) +- return unwrapped +- +- @property +- def body(self) -> List[Table]: +- return self._body +- +- @property +- def discriminant(self) -> int: +- return 12 +- +- @property +- def value(self) -> List[Dict[Any, Any]]: +- return [v.value for v in self._body] +- +- def __len__(self) -> int: +- return len(self._body) +- +- @overload +- def __getitem__(self, key: slice) -> List[Table]: +- ... +- +- @overload +- def __getitem__(self, key: int) -> Table: +- ... +- +- def __getitem__(self, key): +- return self._body[key] +- +- def __setitem__(self, key: Union[slice, int], value: Any) -> None: +- raise NotImplementedError +- +- def __delitem__(self, key: Union[slice, int]) -> None: +- del self._body[key] +- list.__delitem__(self, key) +- +- def insert(self, index: int, value: dict) -> None: +- value = item(value, _parent=self) +- if not isinstance(value, Table): +- raise ValueError(f"Unsupported insert value type: {type(value)}") +- length = len(self) +- if index < 0: +- index += length +- if index < 0: +- index = 0 +- elif index >= length: +- index = length +- m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) +- if m: +- indent = m.group(1) +- +- m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent) +- if not m: +- value.trivia.indent = indent +- else: +- value.trivia.indent = m.group(1) + indent + m.group(2) +- prev_table = self._body[index - 1] if 0 < index and length else None +- next_table = self._body[index + 1] if index < length - 1 else None +- if not self._parsed: +- if prev_table and "\n" not in value.trivia.indent: +- value.trivia.indent = "\n" + value.trivia.indent +- if next_table and "\n" not in next_table.trivia.indent: +- next_table.trivia.indent = "\n" + next_table.trivia.indent +- self._body.insert(index, value) +- list.insert(self, index, value) +- +- def invalidate_display_name(self): +- """Call ``invalidate_display_name`` on the contained tables""" +- for child in self: +- if hasattr(child, "invalidate_display_name"): +- child.invalidate_display_name() +- +- def as_string(self) -> str: +- b = "" +- for table in self._body: +- b += table.as_string() +- +- return b +- +- def __repr__(self) -> str: +- return f"" +- +- def _getstate(self, protocol=3): +- return self._body, self.name, self._parsed +- +- +-class Null(Item): +- """ +- A null item. +- """ +- +- def __init__(self) -> None: +- pass +- +- def unwrap(self) -> str: +- return None +- +- @property +- def discriminant(self) -> int: +- return -1 +- +- @property +- def value(self) -> None: +- return None +- +- def as_string(self) -> str: +- return "" +- +- def _getstate(self, protocol=3) -> tuple: +- return () +diff --git a/src/poetry/core/_vendor/tomlkit/parser.py b/src/poetry/core/_vendor/tomlkit/parser.py +deleted file mode 100644 +index e390579..0000000 +--- a/src/poetry/core/_vendor/tomlkit/parser.py ++++ /dev/null +@@ -1,1132 +0,0 @@ +-import re +-import string +- +-from typing import List +-from typing import Optional +-from typing import Tuple +-from typing import Type +-from typing import Union +- +-from tomlkit._compat import decode +-from tomlkit._utils import RFC_3339_LOOSE +-from tomlkit._utils import _escaped +-from tomlkit._utils import parse_rfc3339 +-from tomlkit.container import Container +-from tomlkit.exceptions import EmptyKeyError +-from tomlkit.exceptions import EmptyTableNameError +-from tomlkit.exceptions import InternalParserError +-from tomlkit.exceptions import InvalidCharInStringError +-from tomlkit.exceptions import InvalidControlChar +-from tomlkit.exceptions import InvalidDateError +-from tomlkit.exceptions import InvalidDateTimeError +-from tomlkit.exceptions import InvalidNumberError +-from tomlkit.exceptions import InvalidTimeError +-from tomlkit.exceptions import InvalidUnicodeValueError +-from tomlkit.exceptions import ParseError +-from tomlkit.exceptions import UnexpectedCharError +-from tomlkit.exceptions import UnexpectedEofError +-from tomlkit.items import AoT +-from tomlkit.items import Array +-from tomlkit.items import Bool +-from tomlkit.items import BoolType +-from tomlkit.items import Comment +-from tomlkit.items import Date +-from tomlkit.items import DateTime +-from tomlkit.items import Float +-from tomlkit.items import InlineTable +-from tomlkit.items import Integer +-from tomlkit.items import Item +-from tomlkit.items import Key +-from tomlkit.items import KeyType +-from tomlkit.items import Null +-from tomlkit.items import SingleKey +-from tomlkit.items import String +-from tomlkit.items import StringType +-from tomlkit.items import Table +-from tomlkit.items import Time +-from tomlkit.items import Trivia +-from tomlkit.items import Whitespace +-from tomlkit.source import Source +-from tomlkit.toml_char import TOMLChar +-from tomlkit.toml_document import TOMLDocument +- +- +-CTRL_I = 0x09 # Tab +-CTRL_J = 0x0A # Line feed +-CTRL_M = 0x0D # Carriage return +-CTRL_CHAR_LIMIT = 0x1F +-CHR_DEL = 0x7F +- +- +-class Parser: +- """ +- Parser for TOML documents. +- """ +- +- def __init__(self, string: str) -> None: +- # Input to parse +- self._src = Source(decode(string)) +- +- self._aot_stack: List[Key] = [] +- +- @property +- def _state(self): +- return self._src.state +- +- @property +- def _idx(self): +- return self._src.idx +- +- @property +- def _current(self): +- return self._src.current +- +- @property +- def _marker(self): +- return self._src.marker +- +- def extract(self) -> str: +- """ +- Extracts the value between marker and index +- """ +- return self._src.extract() +- +- def inc(self, exception: Optional[Type[ParseError]] = None) -> bool: +- """ +- Increments the parser if the end of the input has not been reached. +- Returns whether or not it was able to advance. +- """ +- return self._src.inc(exception=exception) +- +- def inc_n(self, n: int, exception: Optional[Type[ParseError]] = None) -> bool: +- """ +- Increments the parser by n characters +- if the end of the input has not been reached. +- """ +- return self._src.inc_n(n=n, exception=exception) +- +- def consume(self, chars, min=0, max=-1): +- """ +- Consume chars until min/max is satisfied is valid. +- """ +- return self._src.consume(chars=chars, min=min, max=max) +- +- def end(self) -> bool: +- """ +- Returns True if the parser has reached the end of the input. +- """ +- return self._src.end() +- +- def mark(self) -> None: +- """ +- Sets the marker to the index's current position +- """ +- self._src.mark() +- +- def parse_error(self, exception=ParseError, *args, **kwargs): +- """ +- Creates a generic "parse error" at the current position. +- """ +- return self._src.parse_error(exception, *args, **kwargs) +- +- def parse(self) -> TOMLDocument: +- body = TOMLDocument(True) +- +- # Take all keyvals outside of tables/AoT's. +- while not self.end(): +- # Break out if a table is found +- if self._current == "[": +- break +- +- # Otherwise, take and append one KV +- item = self._parse_item() +- if not item: +- break +- +- key, value = item +- if (key is not None and key.is_multi()) or not self._merge_ws(value, body): +- # We actually have a table +- try: +- body.append(key, value) +- except Exception as e: +- raise self.parse_error(ParseError, str(e)) from e +- +- self.mark() +- +- while not self.end(): +- key, value = self._parse_table() +- if isinstance(value, Table) and value.is_aot_element(): +- # This is just the first table in an AoT. Parse the rest of the array +- # along with it. +- value = self._parse_aot(value, key) +- +- try: +- body.append(key, value) +- except Exception as e: +- raise self.parse_error(ParseError, str(e)) from e +- +- body.parsing(False) +- +- return body +- +- def _merge_ws(self, item: Item, container: Container) -> bool: +- """ +- Merges the given Item with the last one currently in the given Container if +- both are whitespace items. +- +- Returns True if the items were merged. +- """ +- last = container.last_item() +- if not last: +- return False +- +- if not isinstance(item, Whitespace) or not isinstance(last, Whitespace): +- return False +- +- start = self._idx - (len(last.s) + len(item.s)) +- container.body[-1] = ( +- container.body[-1][0], +- Whitespace(self._src[start : self._idx]), +- ) +- +- return True +- +- def _is_child(self, parent: Key, child: Key) -> bool: +- """ +- Returns whether a key is strictly a child of another key. +- AoT siblings are not considered children of one another. +- """ +- parent_parts = tuple(parent) +- child_parts = tuple(child) +- +- if parent_parts == child_parts: +- return False +- +- return parent_parts == child_parts[: len(parent_parts)] +- +- def _parse_item(self) -> Optional[Tuple[Optional[Key], Item]]: +- """ +- Attempts to parse the next item and returns it, along with its key +- if the item is value-like. +- """ +- self.mark() +- with self._state as state: +- while True: +- c = self._current +- if c == "\n": +- # Found a newline; Return all whitespace found up to this point. +- self.inc() +- +- return None, Whitespace(self.extract()) +- elif c in " \t\r": +- # Skip whitespace. +- if not self.inc(): +- return None, Whitespace(self.extract()) +- elif c == "#": +- # Found a comment, parse it +- indent = self.extract() +- cws, comment, trail = self._parse_comment_trail() +- +- return None, Comment(Trivia(indent, cws, comment, trail)) +- elif c == "[": +- # Found a table, delegate to the calling function. +- return +- else: +- # Beginning of a KV pair. +- # Return to beginning of whitespace so it gets included +- # as indentation for the KV about to be parsed. +- state.restore = True +- break +- +- return self._parse_key_value(True) +- +- def _parse_comment_trail(self, parse_trail: bool = True) -> Tuple[str, str, str]: +- """ +- Returns (comment_ws, comment, trail) +- If there is no comment, comment_ws and comment will +- simply be empty. +- """ +- if self.end(): +- return "", "", "" +- +- comment = "" +- comment_ws = "" +- self.mark() +- +- while True: +- c = self._current +- +- if c == "\n": +- break +- elif c == "#": +- comment_ws = self.extract() +- +- self.mark() +- self.inc() # Skip # +- +- # The comment itself +- while not self.end() and not self._current.is_nl(): +- code = ord(self._current) +- if code == CHR_DEL or code <= CTRL_CHAR_LIMIT and code != CTRL_I: +- raise self.parse_error(InvalidControlChar, code, "comments") +- +- if not self.inc(): +- break +- +- comment = self.extract() +- self.mark() +- +- break +- elif c in " \t\r": +- self.inc() +- else: +- raise self.parse_error(UnexpectedCharError, c) +- +- if self.end(): +- break +- +- trail = "" +- if parse_trail: +- while self._current.is_spaces() and self.inc(): +- pass +- +- if self._current == "\r": +- self.inc() +- +- if self._current == "\n": +- self.inc() +- +- if self._idx != self._marker or self._current.is_ws(): +- trail = self.extract() +- +- return comment_ws, comment, trail +- +- def _parse_key_value(self, parse_comment: bool = False) -> Tuple[Key, Item]: +- # Leading indent +- self.mark() +- +- while self._current.is_spaces() and self.inc(): +- pass +- +- indent = self.extract() +- +- # Key +- key = self._parse_key() +- +- self.mark() +- +- found_equals = self._current == "=" +- while self._current.is_kv_sep() and self.inc(): +- if self._current == "=": +- if found_equals: +- raise self.parse_error(UnexpectedCharError, "=") +- else: +- found_equals = True +- if not found_equals: +- raise self.parse_error(UnexpectedCharError, self._current) +- +- if not key.sep: +- key.sep = self.extract() +- else: +- key.sep += self.extract() +- +- # Value +- val = self._parse_value() +- # Comment +- if parse_comment: +- cws, comment, trail = self._parse_comment_trail() +- meta = val.trivia +- if not meta.comment_ws: +- meta.comment_ws = cws +- +- meta.comment = comment +- meta.trail = trail +- else: +- val.trivia.trail = "" +- +- val.trivia.indent = indent +- +- return key, val +- +- def _parse_key(self) -> Key: +- """ +- Parses a Key at the current position; +- WS before the key must be exhausted first at the callsite. +- """ +- self.mark() +- while self._current.is_spaces() and self.inc(): +- # Skip any leading whitespace +- pass +- if self._current in "\"'": +- return self._parse_quoted_key() +- else: +- return self._parse_bare_key() +- +- def _parse_quoted_key(self) -> Key: +- """ +- Parses a key enclosed in either single or double quotes. +- """ +- # Extract the leading whitespace +- original = self.extract() +- quote_style = self._current +- key_type = next((t for t in KeyType if t.value == quote_style), None) +- +- if key_type is None: +- raise RuntimeError("Should not have entered _parse_quoted_key()") +- +- key_str = self._parse_string( +- StringType.SLB if key_type == KeyType.Basic else StringType.SLL +- ) +- if key_str._t.is_multiline(): +- raise self.parse_error(UnexpectedCharError, key_str._t.value) +- original += key_str.as_string() +- self.mark() +- while self._current.is_spaces() and self.inc(): +- pass +- original += self.extract() +- key = SingleKey(str(key_str), t=key_type, sep="", original=original) +- if self._current == ".": +- self.inc() +- key = key.concat(self._parse_key()) +- +- return key +- +- def _parse_bare_key(self) -> Key: +- """ +- Parses a bare key. +- """ +- while ( +- self._current.is_bare_key_char() or self._current.is_spaces() +- ) and self.inc(): +- pass +- +- original = self.extract() +- key = original.strip() +- if not key: +- # Empty key +- raise self.parse_error(EmptyKeyError) +- +- if " " in key: +- # Bare key with spaces in it +- raise self.parse_error(ParseError, f'Invalid key "{key}"') +- +- key = SingleKey(key, KeyType.Bare, "", original) +- +- if self._current == ".": +- self.inc() +- key = key.concat(self._parse_key()) +- +- return key +- +- def _parse_value(self) -> Item: +- """ +- Attempts to parse a value at the current position. +- """ +- self.mark() +- c = self._current +- trivia = Trivia() +- +- if c == StringType.SLB.value: +- return self._parse_basic_string() +- elif c == StringType.SLL.value: +- return self._parse_literal_string() +- elif c == BoolType.TRUE.value[0]: +- return self._parse_true() +- elif c == BoolType.FALSE.value[0]: +- return self._parse_false() +- elif c == "[": +- return self._parse_array() +- elif c == "{": +- return self._parse_inline_table() +- elif c in "+-" or self._peek(4) in { +- "+inf", +- "-inf", +- "inf", +- "+nan", +- "-nan", +- "nan", +- }: +- # Number +- while self._current not in " \t\n\r#,]}" and self.inc(): +- pass +- +- raw = self.extract() +- +- item = self._parse_number(raw, trivia) +- if item is not None: +- return item +- +- raise self.parse_error(InvalidNumberError) +- elif c in string.digits: +- # Integer, Float, Date, Time or DateTime +- while self._current not in " \t\n\r#,]}" and self.inc(): +- pass +- +- raw = self.extract() +- +- m = RFC_3339_LOOSE.match(raw) +- if m: +- if m.group(1) and m.group(5): +- # datetime +- try: +- dt = parse_rfc3339(raw) +- return DateTime( +- dt.year, +- dt.month, +- dt.day, +- dt.hour, +- dt.minute, +- dt.second, +- dt.microsecond, +- dt.tzinfo, +- trivia, +- raw, +- ) +- except ValueError: +- raise self.parse_error(InvalidDateTimeError) +- +- if m.group(1): +- try: +- dt = parse_rfc3339(raw) +- date = Date(dt.year, dt.month, dt.day, trivia, raw) +- self.mark() +- while self._current not in "\t\n\r#,]}" and self.inc(): +- pass +- +- time_raw = self.extract() +- if not time_raw.strip(): +- trivia.comment_ws = time_raw +- return date +- +- dt = parse_rfc3339(raw + time_raw) +- return DateTime( +- dt.year, +- dt.month, +- dt.day, +- dt.hour, +- dt.minute, +- dt.second, +- dt.microsecond, +- dt.tzinfo, +- trivia, +- raw + time_raw, +- ) +- except ValueError: +- raise self.parse_error(InvalidDateError) +- +- if m.group(5): +- try: +- t = parse_rfc3339(raw) +- return Time( +- t.hour, +- t.minute, +- t.second, +- t.microsecond, +- t.tzinfo, +- trivia, +- raw, +- ) +- except ValueError: +- raise self.parse_error(InvalidTimeError) +- +- item = self._parse_number(raw, trivia) +- if item is not None: +- return item +- +- raise self.parse_error(InvalidNumberError) +- else: +- raise self.parse_error(UnexpectedCharError, c) +- +- def _parse_true(self): +- return self._parse_bool(BoolType.TRUE) +- +- def _parse_false(self): +- return self._parse_bool(BoolType.FALSE) +- +- def _parse_bool(self, style: BoolType) -> Bool: +- with self._state: +- style = BoolType(style) +- +- # only keep parsing for bool if the characters match the style +- # try consuming rest of chars in style +- for c in style: +- self.consume(c, min=1, max=1) +- +- return Bool(style, Trivia()) +- +- def _parse_array(self) -> Array: +- # Consume opening bracket, EOF here is an issue (middle of array) +- self.inc(exception=UnexpectedEofError) +- +- elems: List[Item] = [] +- prev_value = None +- while True: +- # consume whitespace +- mark = self._idx +- self.consume(TOMLChar.SPACES + TOMLChar.NL) +- indent = self._src[mark : self._idx] +- newline = set(TOMLChar.NL) & set(indent) +- if newline: +- elems.append(Whitespace(indent)) +- continue +- +- # consume comment +- if self._current == "#": +- cws, comment, trail = self._parse_comment_trail(parse_trail=False) +- elems.append(Comment(Trivia(indent, cws, comment, trail))) +- continue +- +- # consume indent +- if indent: +- elems.append(Whitespace(indent)) +- continue +- +- # consume value +- if not prev_value: +- try: +- elems.append(self._parse_value()) +- prev_value = True +- continue +- except UnexpectedCharError: +- pass +- +- # consume comma +- if prev_value and self._current == ",": +- self.inc(exception=UnexpectedEofError) +- elems.append(Whitespace(",")) +- prev_value = False +- continue +- +- # consume closing bracket +- if self._current == "]": +- # consume closing bracket, EOF here doesn't matter +- self.inc() +- break +- +- raise self.parse_error(UnexpectedCharError, self._current) +- +- try: +- res = Array(elems, Trivia()) +- except ValueError: +- pass +- else: +- return res +- +- def _parse_inline_table(self) -> InlineTable: +- # consume opening bracket, EOF here is an issue (middle of array) +- self.inc(exception=UnexpectedEofError) +- +- elems = Container(True) +- trailing_comma = None +- while True: +- # consume leading whitespace +- mark = self._idx +- self.consume(TOMLChar.SPACES) +- raw = self._src[mark : self._idx] +- if raw: +- elems.add(Whitespace(raw)) +- +- if not trailing_comma: +- # None: empty inline table +- # False: previous key-value pair was not followed by a comma +- if self._current == "}": +- # consume closing bracket, EOF here doesn't matter +- self.inc() +- break +- +- if ( +- trailing_comma is False +- or trailing_comma is None +- and self._current == "," +- ): +- # Either the previous key-value pair was not followed by a comma +- # or the table has an unexpected leading comma. +- raise self.parse_error(UnexpectedCharError, self._current) +- else: +- # True: previous key-value pair was followed by a comma +- if self._current == "}" or self._current == ",": +- raise self.parse_error(UnexpectedCharError, self._current) +- +- key, val = self._parse_key_value(False) +- elems.add(key, val) +- +- # consume trailing whitespace +- mark = self._idx +- self.consume(TOMLChar.SPACES) +- raw = self._src[mark : self._idx] +- if raw: +- elems.add(Whitespace(raw)) +- +- # consume trailing comma +- trailing_comma = self._current == "," +- if trailing_comma: +- # consume closing bracket, EOF here is an issue (middle of inline table) +- self.inc(exception=UnexpectedEofError) +- +- return InlineTable(elems, Trivia()) +- +- def _parse_number(self, raw: str, trivia: Trivia) -> Optional[Item]: +- # Leading zeros are not allowed +- sign = "" +- if raw.startswith(("+", "-")): +- sign = raw[0] +- raw = raw[1:] +- +- if len(raw) > 1 and ( +- raw.startswith("0") +- and not raw.startswith(("0.", "0o", "0x", "0b", "0e")) +- or sign +- and raw.startswith(".") +- ): +- return +- +- if raw.startswith(("0o", "0x", "0b")) and sign: +- return +- +- digits = "[0-9]" +- base = 10 +- if raw.startswith("0b"): +- digits = "[01]" +- base = 2 +- elif raw.startswith("0o"): +- digits = "[0-7]" +- base = 8 +- elif raw.startswith("0x"): +- digits = "[0-9a-f]" +- base = 16 +- +- # Underscores should be surrounded by digits +- clean = re.sub(f"(?i)(?<={digits})_(?={digits})", "", raw).lower() +- +- if "_" in clean: +- return +- +- if ( +- clean.endswith(".") +- or not clean.startswith("0x") +- and clean.split("e", 1)[0].endswith(".") +- ): +- return +- +- try: +- return Integer(int(sign + clean, base), trivia, sign + raw) +- except ValueError: +- try: +- return Float(float(sign + clean), trivia, sign + raw) +- except ValueError: +- return +- +- def _parse_literal_string(self) -> String: +- with self._state: +- return self._parse_string(StringType.SLL) +- +- def _parse_basic_string(self) -> String: +- with self._state: +- return self._parse_string(StringType.SLB) +- +- def _parse_escaped_char(self, multiline): +- if multiline and self._current.is_ws(): +- # When the last non-whitespace character on a line is +- # a \, it will be trimmed along with all whitespace +- # (including newlines) up to the next non-whitespace +- # character or closing delimiter. +- # """\ +- # hello \ +- # world""" +- tmp = "" +- while self._current.is_ws(): +- tmp += self._current +- # consume the whitespace, EOF here is an issue +- # (middle of string) +- self.inc(exception=UnexpectedEofError) +- continue +- +- # the escape followed by whitespace must have a newline +- # before any other chars +- if "\n" not in tmp: +- raise self.parse_error(InvalidCharInStringError, self._current) +- +- return "" +- +- if self._current in _escaped: +- c = _escaped[self._current] +- +- # consume this char, EOF here is an issue (middle of string) +- self.inc(exception=UnexpectedEofError) +- +- return c +- +- if self._current in {"u", "U"}: +- # this needs to be a unicode +- u, ue = self._peek_unicode(self._current == "U") +- if u is not None: +- # consume the U char and the unicode value +- self.inc_n(len(ue) + 1) +- +- return u +- +- raise self.parse_error(InvalidUnicodeValueError) +- +- raise self.parse_error(InvalidCharInStringError, self._current) +- +- def _parse_string(self, delim: StringType) -> String: +- # only keep parsing for string if the current character matches the delim +- if self._current != delim.unit: +- raise self.parse_error( +- InternalParserError, +- f"Invalid character for string type {delim}", +- ) +- +- # consume the opening/first delim, EOF here is an issue +- # (middle of string or middle of delim) +- self.inc(exception=UnexpectedEofError) +- +- if self._current == delim.unit: +- # consume the closing/second delim, we do not care if EOF occurs as +- # that would simply imply an empty single line string +- if not self.inc() or self._current != delim.unit: +- # Empty string +- return String(delim, "", "", Trivia()) +- +- # consume the third delim, EOF here is an issue (middle of string) +- self.inc(exception=UnexpectedEofError) +- +- delim = delim.toggle() # convert delim to multi delim +- +- self.mark() # to extract the original string with whitespace and all +- value = "" +- +- # A newline immediately following the opening delimiter will be trimmed. +- if delim.is_multiline() and self._current == "\n": +- # consume the newline, EOF here is an issue (middle of string) +- self.inc(exception=UnexpectedEofError) +- +- escaped = False # whether the previous key was ESCAPE +- while True: +- code = ord(self._current) +- if ( +- delim.is_singleline() +- and not escaped +- and (code == CHR_DEL or code <= CTRL_CHAR_LIMIT and code != CTRL_I) +- ) or ( +- delim.is_multiline() +- and not escaped +- and ( +- code == CHR_DEL +- or code <= CTRL_CHAR_LIMIT +- and code not in [CTRL_I, CTRL_J, CTRL_M] +- ) +- ): +- raise self.parse_error(InvalidControlChar, code, "strings") +- elif not escaped and self._current == delim.unit: +- # try to process current as a closing delim +- original = self.extract() +- +- close = "" +- if delim.is_multiline(): +- # Consume the delimiters to see if we are at the end of the string +- close = "" +- while self._current == delim.unit: +- close += self._current +- self.inc() +- +- if len(close) < 3: +- # Not a triple quote, leave in result as-is. +- # Adding back the characters we already consumed +- value += close +- continue +- +- if len(close) == 3: +- # We are at the end of the string +- return String(delim, value, original, Trivia()) +- +- if len(close) >= 6: +- raise self.parse_error(InvalidCharInStringError, self._current) +- +- value += close[:-3] +- original += close[:-3] +- +- return String(delim, value, original, Trivia()) +- else: +- # consume the closing delim, we do not care if EOF occurs as +- # that would simply imply the end of self._src +- self.inc() +- +- return String(delim, value, original, Trivia()) +- elif delim.is_basic() and escaped: +- # attempt to parse the current char as an escaped value, an exception +- # is raised if this fails +- value += self._parse_escaped_char(delim.is_multiline()) +- +- # no longer escaped +- escaped = False +- elif delim.is_basic() and self._current == "\\": +- # the next char is being escaped +- escaped = True +- +- # consume this char, EOF here is an issue (middle of string) +- self.inc(exception=UnexpectedEofError) +- else: +- # this is either a literal string where we keep everything as is, +- # or this is not a special escaped char in a basic string +- value += self._current +- +- # consume this char, EOF here is an issue (middle of string) +- self.inc(exception=UnexpectedEofError) +- +- def _parse_table( +- self, parent_name: Optional[Key] = None, parent: Optional[Table] = None +- ) -> Tuple[Key, Union[Table, AoT]]: +- """ +- Parses a table element. +- """ +- if self._current != "[": +- raise self.parse_error( +- InternalParserError, "_parse_table() called on non-bracket character." +- ) +- +- indent = self.extract() +- self.inc() # Skip opening bracket +- +- if self.end(): +- raise self.parse_error(UnexpectedEofError) +- +- is_aot = False +- if self._current == "[": +- if not self.inc(): +- raise self.parse_error(UnexpectedEofError) +- +- is_aot = True +- try: +- key = self._parse_key() +- except EmptyKeyError: +- raise self.parse_error(EmptyTableNameError) from None +- if self.end(): +- raise self.parse_error(UnexpectedEofError) +- elif self._current != "]": +- raise self.parse_error(UnexpectedCharError, self._current) +- elif not key.key.strip(): +- raise self.parse_error(EmptyTableNameError) +- +- key.sep = "" +- full_key = key +- name_parts = tuple(key) +- if any(" " in part.key.strip() and part.is_bare() for part in name_parts): +- raise self.parse_error( +- ParseError, f'Invalid table name "{full_key.as_string()}"' +- ) +- +- missing_table = False +- if parent_name: +- parent_name_parts = tuple(parent_name) +- else: +- parent_name_parts = () +- +- if len(name_parts) > len(parent_name_parts) + 1: +- missing_table = True +- +- name_parts = name_parts[len(parent_name_parts) :] +- +- values = Container(True) +- +- self.inc() # Skip closing bracket +- if is_aot: +- # TODO: Verify close bracket +- self.inc() +- +- cws, comment, trail = self._parse_comment_trail() +- +- result = Null() +- table = Table( +- values, +- Trivia(indent, cws, comment, trail), +- is_aot, +- name=name_parts[0].key if name_parts else key.key, +- display_name=full_key.as_string(), +- ) +- +- if len(name_parts) > 1: +- if missing_table: +- # Missing super table +- # i.e. a table initialized like this: [foo.bar] +- # without initializing [foo] +- # +- # So we have to create the parent tables +- table = Table( +- Container(True), +- Trivia(indent, cws, comment, trail), +- is_aot and name_parts[0] in self._aot_stack, +- is_super_table=True, +- name=name_parts[0].key, +- ) +- +- result = table +- key = name_parts[0] +- +- for i, _name in enumerate(name_parts[1:]): +- child = table.get( +- _name, +- Table( +- Container(True), +- Trivia(indent, cws, comment, trail), +- is_aot and i == len(name_parts) - 2, +- is_super_table=i < len(name_parts) - 2, +- name=_name.key, +- display_name=full_key.as_string() +- if i == len(name_parts) - 2 +- else None, +- ), +- ) +- +- if is_aot and i == len(name_parts) - 2: +- table.raw_append(_name, AoT([child], name=table.name, parsed=True)) +- else: +- table.raw_append(_name, child) +- +- table = child +- values = table.value +- else: +- if name_parts: +- key = name_parts[0] +- +- while not self.end(): +- item = self._parse_item() +- if item: +- _key, item = item +- if not self._merge_ws(item, values): +- table.raw_append(_key, item) +- else: +- if self._current == "[": +- _, key_next = self._peek_table() +- +- if self._is_child(full_key, key_next): +- key_next, table_next = self._parse_table(full_key, table) +- +- table.raw_append(key_next, table_next) +- +- # Picking up any sibling +- while not self.end(): +- _, key_next = self._peek_table() +- +- if not self._is_child(full_key, key_next): +- break +- +- key_next, table_next = self._parse_table(full_key, table) +- +- table.raw_append(key_next, table_next) +- +- break +- else: +- raise self.parse_error( +- InternalParserError, +- "_parse_item() returned None on a non-bracket character.", +- ) +- +- if isinstance(result, Null): +- result = table +- +- if is_aot and (not self._aot_stack or full_key != self._aot_stack[-1]): +- result = self._parse_aot(result, full_key) +- +- return key, result +- +- def _peek_table(self) -> Tuple[bool, Key]: +- """ +- Peeks ahead non-intrusively by cloning then restoring the +- initial state of the parser. +- +- Returns the name of the table about to be parsed, +- as well as whether it is part of an AoT. +- """ +- # we always want to restore after exiting this scope +- with self._state(save_marker=True, restore=True): +- if self._current != "[": +- raise self.parse_error( +- InternalParserError, +- "_peek_table() entered on non-bracket character", +- ) +- +- # AoT +- self.inc() +- is_aot = False +- if self._current == "[": +- self.inc() +- is_aot = True +- try: +- return is_aot, self._parse_key() +- except EmptyKeyError: +- raise self.parse_error(EmptyTableNameError) from None +- +- def _parse_aot(self, first: Table, name_first: Key) -> AoT: +- """ +- Parses all siblings of the provided table first and bundles them into +- an AoT. +- """ +- payload = [first] +- self._aot_stack.append(name_first) +- while not self.end(): +- is_aot_next, name_next = self._peek_table() +- if is_aot_next and name_next == name_first: +- _, table = self._parse_table(name_first) +- payload.append(table) +- else: +- break +- +- self._aot_stack.pop() +- +- return AoT(payload, parsed=True) +- +- def _peek(self, n: int) -> str: +- """ +- Peeks ahead n characters. +- +- n is the max number of characters that will be peeked. +- """ +- # we always want to restore after exiting this scope +- with self._state(restore=True): +- buf = "" +- for _ in range(n): +- if self._current not in " \t\n\r#,]}" + self._src.EOF: +- buf += self._current +- self.inc() +- continue +- +- break +- return buf +- +- def _peek_unicode(self, is_long: bool) -> Tuple[Optional[str], Optional[str]]: +- """ +- Peeks ahead non-intrusively by cloning then restoring the +- initial state of the parser. +- +- Returns the unicode value is it's a valid one else None. +- """ +- # we always want to restore after exiting this scope +- with self._state(save_marker=True, restore=True): +- if self._current not in {"u", "U"}: +- raise self.parse_error( +- InternalParserError, "_peek_unicode() entered on non-unicode value" +- ) +- +- self.inc() # Dropping prefix +- self.mark() +- +- if is_long: +- chars = 8 +- else: +- chars = 4 +- +- if not self.inc_n(chars): +- value, extracted = None, None +- else: +- extracted = self.extract() +- +- if extracted[0].lower() == "d" and extracted[1].strip("01234567"): +- return None, None +- +- try: +- value = chr(int(extracted, 16)) +- except (ValueError, OverflowError): +- value = None +- +- return value, extracted +diff --git a/src/poetry/core/_vendor/tomlkit/py.typed b/src/poetry/core/_vendor/tomlkit/py.typed +deleted file mode 100644 +index e69de29..0000000 +diff --git a/src/poetry/core/_vendor/tomlkit/source.py b/src/poetry/core/_vendor/tomlkit/source.py +deleted file mode 100644 +index d1a53cd..0000000 +--- a/src/poetry/core/_vendor/tomlkit/source.py ++++ /dev/null +@@ -1,181 +0,0 @@ +-from copy import copy +-from typing import Any +-from typing import Optional +-from typing import Tuple +-from typing import Type +- +-from tomlkit.exceptions import ParseError +-from tomlkit.exceptions import UnexpectedCharError +-from tomlkit.toml_char import TOMLChar +- +- +-class _State: +- def __init__( +- self, +- source: "Source", +- save_marker: Optional[bool] = False, +- restore: Optional[bool] = False, +- ) -> None: +- self._source = source +- self._save_marker = save_marker +- self.restore = restore +- +- def __enter__(self) -> "_State": +- # Entering this context manager - save the state +- self._chars = copy(self._source._chars) +- self._idx = self._source._idx +- self._current = self._source._current +- self._marker = self._source._marker +- +- return self +- +- def __exit__(self, exception_type, exception_val, trace): +- # Exiting this context manager - restore the prior state +- if self.restore or exception_type: +- self._source._chars = self._chars +- self._source._idx = self._idx +- self._source._current = self._current +- if self._save_marker: +- self._source._marker = self._marker +- +- +-class _StateHandler: +- """ +- State preserver for the Parser. +- """ +- +- def __init__(self, source: "Source") -> None: +- self._source = source +- self._states = [] +- +- def __call__(self, *args, **kwargs): +- return _State(self._source, *args, **kwargs) +- +- def __enter__(self) -> None: +- state = self() +- self._states.append(state) +- return state.__enter__() +- +- def __exit__(self, exception_type, exception_val, trace): +- state = self._states.pop() +- return state.__exit__(exception_type, exception_val, trace) +- +- +-class Source(str): +- EOF = TOMLChar("\0") +- +- def __init__(self, _: str) -> None: +- super().__init__() +- +- # Collection of TOMLChars +- self._chars = iter([(i, TOMLChar(c)) for i, c in enumerate(self)]) +- +- self._idx = 0 +- self._marker = 0 +- self._current = TOMLChar("") +- +- self._state = _StateHandler(self) +- +- self.inc() +- +- def reset(self): +- # initialize both idx and current +- self.inc() +- +- # reset marker +- self.mark() +- +- @property +- def state(self) -> _StateHandler: +- return self._state +- +- @property +- def idx(self) -> int: +- return self._idx +- +- @property +- def current(self) -> TOMLChar: +- return self._current +- +- @property +- def marker(self) -> int: +- return self._marker +- +- def extract(self) -> str: +- """ +- Extracts the value between marker and index +- """ +- return self[self._marker : self._idx] +- +- def inc(self, exception: Optional[Type[ParseError]] = None) -> bool: +- """ +- Increments the parser if the end of the input has not been reached. +- Returns whether or not it was able to advance. +- """ +- try: +- self._idx, self._current = next(self._chars) +- +- return True +- except StopIteration: +- self._idx = len(self) +- self._current = self.EOF +- if exception: +- raise self.parse_error(exception) +- +- return False +- +- def inc_n(self, n: int, exception: Optional[Type[ParseError]] = None) -> bool: +- """ +- Increments the parser by n characters +- if the end of the input has not been reached. +- """ +- return all(self.inc(exception=exception) for _ in range(n)) +- +- def consume(self, chars, min=0, max=-1): +- """ +- Consume chars until min/max is satisfied is valid. +- """ +- while self.current in chars and max != 0: +- min -= 1 +- max -= 1 +- if not self.inc(): +- break +- +- # failed to consume minimum number of characters +- if min > 0: +- raise self.parse_error(UnexpectedCharError, self.current) +- +- def end(self) -> bool: +- """ +- Returns True if the parser has reached the end of the input. +- """ +- return self._current is self.EOF +- +- def mark(self) -> None: +- """ +- Sets the marker to the index's current position +- """ +- self._marker = self._idx +- +- def parse_error( +- self, +- exception: Type[ParseError] = ParseError, +- *args: Any, +- **kwargs: Any, +- ) -> ParseError: +- """ +- Creates a generic "parse error" at the current position. +- """ +- line, col = self._to_linecol() +- +- return exception(line, col, *args, **kwargs) +- +- def _to_linecol(self) -> Tuple[int, int]: +- cur = 0 +- for i, line in enumerate(self.splitlines()): +- if cur + len(line) + 1 > self.idx: +- return (i + 1, self.idx - cur) +- +- cur += len(line) + 1 +- +- return len(self.splitlines()), 0 +diff --git a/src/poetry/core/_vendor/tomlkit/toml_char.py b/src/poetry/core/_vendor/tomlkit/toml_char.py +deleted file mode 100644 +index b4bb411..0000000 +--- a/src/poetry/core/_vendor/tomlkit/toml_char.py ++++ /dev/null +@@ -1,52 +0,0 @@ +-import string +- +- +-class TOMLChar(str): +- def __init__(self, c): +- super().__init__() +- +- if len(self) > 1: +- raise ValueError("A TOML character must be of length 1") +- +- BARE = string.ascii_letters + string.digits + "-_" +- KV = "= \t" +- NUMBER = string.digits + "+-_.e" +- SPACES = " \t" +- NL = "\n\r" +- WS = SPACES + NL +- +- def is_bare_key_char(self) -> bool: +- """ +- Whether the character is a valid bare key name or not. +- """ +- return self in self.BARE +- +- def is_kv_sep(self) -> bool: +- """ +- Whether the character is a valid key/value separator or not. +- """ +- return self in self.KV +- +- def is_int_float_char(self) -> bool: +- """ +- Whether the character if a valid integer or float value character or not. +- """ +- return self in self.NUMBER +- +- def is_ws(self) -> bool: +- """ +- Whether the character is a whitespace character or not. +- """ +- return self in self.WS +- +- def is_nl(self) -> bool: +- """ +- Whether the character is a new line character or not. +- """ +- return self in self.NL +- +- def is_spaces(self) -> bool: +- """ +- Whether the character is a space or not +- """ +- return self in self.SPACES +diff --git a/src/poetry/core/_vendor/tomlkit/toml_document.py b/src/poetry/core/_vendor/tomlkit/toml_document.py +deleted file mode 100644 +index 71fac2e..0000000 +--- a/src/poetry/core/_vendor/tomlkit/toml_document.py ++++ /dev/null +@@ -1,7 +0,0 @@ +-from tomlkit.container import Container +- +- +-class TOMLDocument(Container): +- """ +- A TOML document. +- """ +diff --git a/src/poetry/core/_vendor/tomlkit/toml_file.py b/src/poetry/core/_vendor/tomlkit/toml_file.py +deleted file mode 100644 +index d05a62f..0000000 +--- a/src/poetry/core/_vendor/tomlkit/toml_file.py ++++ /dev/null +@@ -1,48 +0,0 @@ +-import os +-import re +- +-from tomlkit.api import loads +-from tomlkit.toml_document import TOMLDocument +- +- +-class TOMLFile: +- """ +- Represents a TOML file. +- +- :param path: path to the TOML file +- """ +- +- def __init__(self, path: str) -> None: +- self._path = path +- self._linesep = os.linesep +- +- def read(self) -> TOMLDocument: +- """Read the file content as a :class:`tomlkit.toml_document.TOMLDocument`.""" +- with open(self._path, encoding="utf-8", newline="") as f: +- content = f.read() +- +- # check if consistent line endings +- num_newline = content.count("\n") +- if num_newline > 0: +- num_win_eol = content.count("\r\n") +- if num_win_eol == num_newline: +- self._linesep = "\r\n" +- elif num_win_eol == 0: +- self._linesep = "\n" +- else: +- self._linesep = "mixed" +- +- return loads(content) +- +- def write(self, data: TOMLDocument) -> None: +- """Write the TOMLDocument to the file.""" +- content = data.as_string() +- +- # apply linesep +- if self._linesep == "\n": +- content = content.replace("\r\n", "\n") +- elif self._linesep == "\r\n": +- content = re.sub(r"(? 0) and (alen >= elen - num_tv_tuples): +- return +- raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" +- f" actual {alen}, expected {elen}") +- +- +-if sys.version_info >= (3, 10): +- def _should_collect_from_parameters(t): +- return isinstance( +- t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) +- ) +-elif sys.version_info >= (3, 9): +- def _should_collect_from_parameters(t): +- return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) +-else: +- def _should_collect_from_parameters(t): +- return isinstance(t, typing._GenericAlias) and not t._special +- +- +-def _collect_type_vars(types, typevar_types=None): +- """Collect all type variable contained in types in order of +- first appearance (lexicographic order). For example:: +- +- _collect_type_vars((T, List[S, T])) == (T, S) +- """ +- if typevar_types is None: +- typevar_types = typing.TypeVar +- tvars = [] +- for t in types: +- if ( +- isinstance(t, typevar_types) and +- t not in tvars and +- not _is_unpack(t) +- ): +- tvars.append(t) +- if _should_collect_from_parameters(t): +- tvars.extend([t for t in t.__parameters__ if t not in tvars]) +- return tuple(tvars) +- +- +-NoReturn = typing.NoReturn +- +-# Some unconstrained type variables. These are used by the container types. +-# (These are not for export.) +-T = typing.TypeVar('T') # Any type. +-KT = typing.TypeVar('KT') # Key type. +-VT = typing.TypeVar('VT') # Value type. +-T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +-T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. +- +-ClassVar = typing.ClassVar +- +-# On older versions of typing there is an internal class named "Final". +-# 3.8+ +-if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): +- Final = typing.Final +-# 3.7 +-else: +- class _FinalForm(typing._SpecialForm, _root=True): +- +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- def __getitem__(self, parameters): +- item = typing._type_check(parameters, +- f'{self._name} accepts only a single type.') +- return typing._GenericAlias(self, (item,)) +- +- Final = _FinalForm('Final', +- doc="""A special typing construct to indicate that a name +- cannot be re-assigned or overridden in a subclass. +- For example: +- +- MAX_SIZE: Final = 9000 +- MAX_SIZE += 1 # Error reported by type checker +- +- class Connection: +- TIMEOUT: Final[int] = 10 +- class FastConnector(Connection): +- TIMEOUT = 1 # Error reported by type checker +- +- There is no runtime checking of these properties.""") +- +-if sys.version_info >= (3, 11): +- final = typing.final +-else: +- # @final exists in 3.8+, but we backport it for all versions +- # before 3.11 to keep support for the __final__ attribute. +- # See https://bugs.python.org/issue46342 +- def final(f): +- """This decorator can be used to indicate to type checkers that +- the decorated method cannot be overridden, and decorated class +- cannot be subclassed. For example: +- +- class Base: +- @final +- def done(self) -> None: +- ... +- class Sub(Base): +- def done(self) -> None: # Error reported by type checker +- ... +- @final +- class Leaf: +- ... +- class Other(Leaf): # Error reported by type checker +- ... +- +- There is no runtime checking of these properties. The decorator +- sets the ``__final__`` attribute to ``True`` on the decorated object +- to allow runtime introspection. +- """ +- try: +- f.__final__ = True +- except (AttributeError, TypeError): +- # Skip the attribute silently if it is not writable. +- # AttributeError happens if the object has __slots__ or a +- # read-only property, TypeError if it's a builtin class. +- pass +- return f +- +- +-def IntVar(name): +- return typing.TypeVar(name) +- +- +-# 3.8+: +-if hasattr(typing, 'Literal'): +- Literal = typing.Literal +-# 3.7: +-else: +- class _LiteralForm(typing._SpecialForm, _root=True): +- +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- def __getitem__(self, parameters): +- return typing._GenericAlias(self, parameters) +- +- Literal = _LiteralForm('Literal', +- doc="""A type that can be used to indicate to type checkers +- that the corresponding value has a value literally equivalent +- to the provided parameter. For example: +- +- var: Literal[4] = 4 +- +- The type checker understands that 'var' is literally equal to +- the value 4 and no other value. +- +- Literal[...] cannot be subclassed. There is no runtime +- checking verifying that the parameter is actually a value +- instead of a type.""") +- +- +-_overload_dummy = typing._overload_dummy # noqa +- +- +-if hasattr(typing, "get_overloads"): # 3.11+ +- overload = typing.overload +- get_overloads = typing.get_overloads +- clear_overloads = typing.clear_overloads +-else: +- # {module: {qualname: {firstlineno: func}}} +- _overload_registry = collections.defaultdict( +- functools.partial(collections.defaultdict, dict) +- ) +- +- def overload(func): +- """Decorator for overloaded functions/methods. +- +- In a stub file, place two or more stub definitions for the same +- function in a row, each decorated with @overload. For example: +- +- @overload +- def utf8(value: None) -> None: ... +- @overload +- def utf8(value: bytes) -> bytes: ... +- @overload +- def utf8(value: str) -> bytes: ... +- +- In a non-stub file (i.e. a regular .py file), do the same but +- follow it with an implementation. The implementation should *not* +- be decorated with @overload. For example: +- +- @overload +- def utf8(value: None) -> None: ... +- @overload +- def utf8(value: bytes) -> bytes: ... +- @overload +- def utf8(value: str) -> bytes: ... +- def utf8(value): +- # implementation goes here +- +- The overloads for a function can be retrieved at runtime using the +- get_overloads() function. +- """ +- # classmethod and staticmethod +- f = getattr(func, "__func__", func) +- try: +- _overload_registry[f.__module__][f.__qualname__][ +- f.__code__.co_firstlineno +- ] = func +- except AttributeError: +- # Not a normal function; ignore. +- pass +- return _overload_dummy +- +- def get_overloads(func): +- """Return all defined overloads for *func* as a sequence.""" +- # classmethod and staticmethod +- f = getattr(func, "__func__", func) +- if f.__module__ not in _overload_registry: +- return [] +- mod_dict = _overload_registry[f.__module__] +- if f.__qualname__ not in mod_dict: +- return [] +- return list(mod_dict[f.__qualname__].values()) +- +- def clear_overloads(): +- """Clear all overloads in the registry.""" +- _overload_registry.clear() +- +- +-# This is not a real generic class. Don't use outside annotations. +-Type = typing.Type +- +-# Various ABCs mimicking those in collections.abc. +-# A few are simply re-exported for completeness. +- +- +-Awaitable = typing.Awaitable +-Coroutine = typing.Coroutine +-AsyncIterable = typing.AsyncIterable +-AsyncIterator = typing.AsyncIterator +-Deque = typing.Deque +-ContextManager = typing.ContextManager +-AsyncContextManager = typing.AsyncContextManager +-DefaultDict = typing.DefaultDict +- +-# 3.7.2+ +-if hasattr(typing, 'OrderedDict'): +- OrderedDict = typing.OrderedDict +-# 3.7.0-3.7.2 +-else: +- OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) +- +-Counter = typing.Counter +-ChainMap = typing.ChainMap +-AsyncGenerator = typing.AsyncGenerator +-NewType = typing.NewType +-Text = typing.Text +-TYPE_CHECKING = typing.TYPE_CHECKING +- +- +-_PROTO_WHITELIST = ['Callable', 'Awaitable', +- 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', +- 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', +- 'ContextManager', 'AsyncContextManager'] +- +- +-def _get_protocol_attrs(cls): +- attrs = set() +- for base in cls.__mro__[:-1]: # without object +- if base.__name__ in ('Protocol', 'Generic'): +- continue +- annotations = getattr(base, '__annotations__', {}) +- for attr in list(base.__dict__.keys()) + list(annotations.keys()): +- if (not attr.startswith('_abc_') and attr not in ( +- '__abstractmethods__', '__annotations__', '__weakref__', +- '_is_protocol', '_is_runtime_protocol', '__dict__', +- '__args__', '__slots__', +- '__next_in_mro__', '__parameters__', '__origin__', +- '__orig_bases__', '__extra__', '__tree_hash__', +- '__doc__', '__subclasshook__', '__init__', '__new__', +- '__module__', '_MutableMapping__marker', '_gorg')): +- attrs.add(attr) +- return attrs +- +- +-def _is_callable_members_only(cls): +- return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) +- +- +-def _maybe_adjust_parameters(cls): +- """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. +- +- The contents of this function are very similar +- to logic found in typing.Generic.__init_subclass__ +- on the CPython main branch. +- """ +- tvars = [] +- if '__orig_bases__' in cls.__dict__: +- tvars = typing._collect_type_vars(cls.__orig_bases__) +- # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. +- # If found, tvars must be a subset of it. +- # If not found, tvars is it. +- # Also check for and reject plain Generic, +- # and reject multiple Generic[...] and/or Protocol[...]. +- gvars = None +- for base in cls.__orig_bases__: +- if (isinstance(base, typing._GenericAlias) and +- base.__origin__ in (typing.Generic, Protocol)): +- # for error messages +- the_base = base.__origin__.__name__ +- if gvars is not None: +- raise TypeError( +- "Cannot inherit from Generic[...]" +- " and/or Protocol[...] multiple types.") +- gvars = base.__parameters__ +- if gvars is None: +- gvars = tvars +- else: +- tvarset = set(tvars) +- gvarset = set(gvars) +- if not tvarset <= gvarset: +- s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) +- s_args = ', '.join(str(g) for g in gvars) +- raise TypeError(f"Some type variables ({s_vars}) are" +- f" not listed in {the_base}[{s_args}]") +- tvars = gvars +- cls.__parameters__ = tuple(tvars) +- +- +-# 3.8+ +-if hasattr(typing, 'Protocol'): +- Protocol = typing.Protocol +-# 3.7 +-else: +- +- def _no_init(self, *args, **kwargs): +- if type(self)._is_protocol: +- raise TypeError('Protocols cannot be instantiated') +- +- class _ProtocolMeta(abc.ABCMeta): +- # This metaclass is a bit unfortunate and exists only because of the lack +- # of __instancehook__. +- def __instancecheck__(cls, instance): +- # We need this method for situations where attributes are +- # assigned in __init__. +- if ((not getattr(cls, '_is_protocol', False) or +- _is_callable_members_only(cls)) and +- issubclass(instance.__class__, cls)): +- return True +- if cls._is_protocol: +- if all(hasattr(instance, attr) and +- (not callable(getattr(cls, attr, None)) or +- getattr(instance, attr) is not None) +- for attr in _get_protocol_attrs(cls)): +- return True +- return super().__instancecheck__(instance) +- +- class Protocol(metaclass=_ProtocolMeta): +- # There is quite a lot of overlapping code with typing.Generic. +- # Unfortunately it is hard to avoid this while these live in two different +- # modules. The duplicated code will be removed when Protocol is moved to typing. +- """Base class for protocol classes. Protocol classes are defined as:: +- +- class Proto(Protocol): +- def meth(self) -> int: +- ... +- +- Such classes are primarily used with static type checkers that recognize +- structural subtyping (static duck-typing), for example:: +- +- class C: +- def meth(self) -> int: +- return 0 +- +- def func(x: Proto) -> int: +- return x.meth() +- +- func(C()) # Passes static type check +- +- See PEP 544 for details. Protocol classes decorated with +- @typing_extensions.runtime act as simple-minded runtime protocol that checks +- only the presence of given attributes, ignoring their type signatures. +- +- Protocol classes can be generic, they are defined as:: +- +- class GenProto(Protocol[T]): +- def meth(self) -> T: +- ... +- """ +- __slots__ = () +- _is_protocol = True +- +- def __new__(cls, *args, **kwds): +- if cls is Protocol: +- raise TypeError("Type Protocol cannot be instantiated; " +- "it can only be used as a base class") +- return super().__new__(cls) +- +- @typing._tp_cache +- def __class_getitem__(cls, params): +- if not isinstance(params, tuple): +- params = (params,) +- if not params and cls is not typing.Tuple: +- raise TypeError( +- f"Parameter list to {cls.__qualname__}[...] cannot be empty") +- msg = "Parameters to generic types must be types." +- params = tuple(typing._type_check(p, msg) for p in params) # noqa +- if cls is Protocol: +- # Generic can only be subscripted with unique type variables. +- if not all(isinstance(p, typing.TypeVar) for p in params): +- i = 0 +- while isinstance(params[i], typing.TypeVar): +- i += 1 +- raise TypeError( +- "Parameters to Protocol[...] must all be type variables." +- f" Parameter {i + 1} is {params[i]}") +- if len(set(params)) != len(params): +- raise TypeError( +- "Parameters to Protocol[...] must all be unique") +- else: +- # Subscripting a regular Generic subclass. +- _check_generic(cls, params, len(cls.__parameters__)) +- return typing._GenericAlias(cls, params) +- +- def __init_subclass__(cls, *args, **kwargs): +- if '__orig_bases__' in cls.__dict__: +- error = typing.Generic in cls.__orig_bases__ +- else: +- error = typing.Generic in cls.__bases__ +- if error: +- raise TypeError("Cannot inherit from plain Generic") +- _maybe_adjust_parameters(cls) +- +- # Determine if this is a protocol or a concrete subclass. +- if not cls.__dict__.get('_is_protocol', None): +- cls._is_protocol = any(b is Protocol for b in cls.__bases__) +- +- # Set (or override) the protocol subclass hook. +- def _proto_hook(other): +- if not cls.__dict__.get('_is_protocol', None): +- return NotImplemented +- if not getattr(cls, '_is_runtime_protocol', False): +- if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: +- return NotImplemented +- raise TypeError("Instance and class checks can only be used with" +- " @runtime protocols") +- if not _is_callable_members_only(cls): +- if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: +- return NotImplemented +- raise TypeError("Protocols with non-method members" +- " don't support issubclass()") +- if not isinstance(other, type): +- # Same error as for issubclass(1, int) +- raise TypeError('issubclass() arg 1 must be a class') +- for attr in _get_protocol_attrs(cls): +- for base in other.__mro__: +- if attr in base.__dict__: +- if base.__dict__[attr] is None: +- return NotImplemented +- break +- annotations = getattr(base, '__annotations__', {}) +- if (isinstance(annotations, typing.Mapping) and +- attr in annotations and +- isinstance(other, _ProtocolMeta) and +- other._is_protocol): +- break +- else: +- return NotImplemented +- return True +- if '__subclasshook__' not in cls.__dict__: +- cls.__subclasshook__ = _proto_hook +- +- # We have nothing more to do for non-protocols. +- if not cls._is_protocol: +- return +- +- # Check consistency of bases. +- for base in cls.__bases__: +- if not (base in (object, typing.Generic) or +- base.__module__ == 'collections.abc' and +- base.__name__ in _PROTO_WHITELIST or +- isinstance(base, _ProtocolMeta) and base._is_protocol): +- raise TypeError('Protocols can only inherit from other' +- f' protocols, got {repr(base)}') +- cls.__init__ = _no_init +- +- +-# 3.8+ +-if hasattr(typing, 'runtime_checkable'): +- runtime_checkable = typing.runtime_checkable +-# 3.7 +-else: +- def runtime_checkable(cls): +- """Mark a protocol class as a runtime protocol, so that it +- can be used with isinstance() and issubclass(). Raise TypeError +- if applied to a non-protocol class. +- +- This allows a simple-minded structural check very similar to the +- one-offs in collections.abc such as Hashable. +- """ +- if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: +- raise TypeError('@runtime_checkable can be only applied to protocol classes,' +- f' got {cls!r}') +- cls._is_runtime_protocol = True +- return cls +- +- +-# Exists for backwards compatibility. +-runtime = runtime_checkable +- +- +-# 3.8+ +-if hasattr(typing, 'SupportsIndex'): +- SupportsIndex = typing.SupportsIndex +-# 3.7 +-else: +- @runtime_checkable +- class SupportsIndex(Protocol): +- __slots__ = () +- +- @abc.abstractmethod +- def __index__(self) -> int: +- pass +- +- +-if hasattr(typing, "Required"): +- # The standard library TypedDict in Python 3.8 does not store runtime information +- # about which (if any) keys are optional. See https://bugs.python.org/issue38834 +- # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" +- # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 +- # The standard library TypedDict below Python 3.11 does not store runtime +- # information about optional and required keys when using Required or NotRequired. +- # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. +- TypedDict = typing.TypedDict +- _TypedDictMeta = typing._TypedDictMeta +- is_typeddict = typing.is_typeddict +-else: +- def _check_fails(cls, other): +- try: +- if sys._getframe(1).f_globals['__name__'] not in ['abc', +- 'functools', +- 'typing']: +- # Typed dicts are only for static structural subtyping. +- raise TypeError('TypedDict does not support instance and class checks') +- except (AttributeError, ValueError): +- pass +- return False +- +- def _dict_new(*args, **kwargs): +- if not args: +- raise TypeError('TypedDict.__new__(): not enough arguments') +- _, args = args[0], args[1:] # allow the "cls" keyword be passed +- return dict(*args, **kwargs) +- +- _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' +- +- def _typeddict_new(*args, total=True, **kwargs): +- if not args: +- raise TypeError('TypedDict.__new__(): not enough arguments') +- _, args = args[0], args[1:] # allow the "cls" keyword be passed +- if args: +- typename, args = args[0], args[1:] # allow the "_typename" keyword be passed +- elif '_typename' in kwargs: +- typename = kwargs.pop('_typename') +- import warnings +- warnings.warn("Passing '_typename' as keyword argument is deprecated", +- DeprecationWarning, stacklevel=2) +- else: +- raise TypeError("TypedDict.__new__() missing 1 required positional " +- "argument: '_typename'") +- if args: +- try: +- fields, = args # allow the "_fields" keyword be passed +- except ValueError: +- raise TypeError('TypedDict.__new__() takes from 2 to 3 ' +- f'positional arguments but {len(args) + 2} ' +- 'were given') +- elif '_fields' in kwargs and len(kwargs) == 1: +- fields = kwargs.pop('_fields') +- import warnings +- warnings.warn("Passing '_fields' as keyword argument is deprecated", +- DeprecationWarning, stacklevel=2) +- else: +- fields = None +- +- if fields is None: +- fields = kwargs +- elif kwargs: +- raise TypeError("TypedDict takes either a dict or keyword arguments," +- " but not both") +- +- ns = {'__annotations__': dict(fields)} +- try: +- # Setting correct module is necessary to make typed dict classes pickleable. +- ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') +- except (AttributeError, ValueError): +- pass +- +- return _TypedDictMeta(typename, (), ns, total=total) +- +- _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' +- ' /, *, total=True, **kwargs)') +- +- class _TypedDictMeta(type): +- def __init__(cls, name, bases, ns, total=True): +- super().__init__(name, bases, ns) +- +- def __new__(cls, name, bases, ns, total=True): +- # Create new typed dict class object. +- # This method is called directly when TypedDict is subclassed, +- # or via _typeddict_new when TypedDict is instantiated. This way +- # TypedDict supports all three syntaxes described in its docstring. +- # Subclasses and instances of TypedDict return actual dictionaries +- # via _dict_new. +- ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new +- # Don't insert typing.Generic into __bases__ here, +- # or Generic.__init_subclass__ will raise TypeError +- # in the super().__new__() call. +- # Instead, monkey-patch __bases__ onto the class after it's been created. +- tp_dict = super().__new__(cls, name, (dict,), ns) +- +- if any(issubclass(base, typing.Generic) for base in bases): +- tp_dict.__bases__ = (typing.Generic, dict) +- _maybe_adjust_parameters(tp_dict) +- +- annotations = {} +- own_annotations = ns.get('__annotations__', {}) +- msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" +- own_annotations = { +- n: typing._type_check(tp, msg) for n, tp in own_annotations.items() +- } +- required_keys = set() +- optional_keys = set() +- +- for base in bases: +- annotations.update(base.__dict__.get('__annotations__', {})) +- required_keys.update(base.__dict__.get('__required_keys__', ())) +- optional_keys.update(base.__dict__.get('__optional_keys__', ())) +- +- annotations.update(own_annotations) +- for annotation_key, annotation_type in own_annotations.items(): +- annotation_origin = get_origin(annotation_type) +- if annotation_origin is Annotated: +- annotation_args = get_args(annotation_type) +- if annotation_args: +- annotation_type = annotation_args[0] +- annotation_origin = get_origin(annotation_type) +- +- if annotation_origin is Required: +- required_keys.add(annotation_key) +- elif annotation_origin is NotRequired: +- optional_keys.add(annotation_key) +- elif total: +- required_keys.add(annotation_key) +- else: +- optional_keys.add(annotation_key) +- +- tp_dict.__annotations__ = annotations +- tp_dict.__required_keys__ = frozenset(required_keys) +- tp_dict.__optional_keys__ = frozenset(optional_keys) +- if not hasattr(tp_dict, '__total__'): +- tp_dict.__total__ = total +- return tp_dict +- +- __instancecheck__ = __subclasscheck__ = _check_fails +- +- TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) +- TypedDict.__module__ = __name__ +- TypedDict.__doc__ = \ +- """A simple typed name space. At runtime it is equivalent to a plain dict. +- +- TypedDict creates a dictionary type that expects all of its +- instances to have a certain set of keys, with each key +- associated with a value of a consistent type. This expectation +- is not checked at runtime but is only enforced by type checkers. +- Usage:: +- +- class Point2D(TypedDict): +- x: int +- y: int +- label: str +- +- a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK +- b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check +- +- assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') +- +- The type info can be accessed via the Point2D.__annotations__ dict, and +- the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. +- TypedDict supports two additional equivalent forms:: +- +- Point2D = TypedDict('Point2D', x=int, y=int, label=str) +- Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) +- +- The class syntax is only supported in Python 3.6+, while two other +- syntax forms work for Python 2.7 and 3.2+ +- """ +- +- if hasattr(typing, "_TypedDictMeta"): +- _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) +- else: +- _TYPEDDICT_TYPES = (_TypedDictMeta,) +- +- def is_typeddict(tp): +- """Check if an annotation is a TypedDict class +- +- For example:: +- class Film(TypedDict): +- title: str +- year: int +- +- is_typeddict(Film) # => True +- is_typeddict(Union[list, str]) # => False +- """ +- return isinstance(tp, tuple(_TYPEDDICT_TYPES)) +- +- +-if hasattr(typing, "assert_type"): +- assert_type = typing.assert_type +- +-else: +- def assert_type(__val, __typ): +- """Assert (to the type checker) that the value is of the given type. +- +- When the type checker encounters a call to assert_type(), it +- emits an error if the value is not of the specified type:: +- +- def greet(name: str) -> None: +- assert_type(name, str) # ok +- assert_type(name, int) # type checker error +- +- At runtime this returns the first argument unchanged and otherwise +- does nothing. +- """ +- return __val +- +- +-if hasattr(typing, "Required"): +- get_type_hints = typing.get_type_hints +-else: +- import functools +- import types +- +- # replaces _strip_annotations() +- def _strip_extras(t): +- """Strips Annotated, Required and NotRequired from a given type.""" +- if isinstance(t, _AnnotatedAlias): +- return _strip_extras(t.__origin__) +- if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): +- return _strip_extras(t.__args__[0]) +- if isinstance(t, typing._GenericAlias): +- stripped_args = tuple(_strip_extras(a) for a in t.__args__) +- if stripped_args == t.__args__: +- return t +- return t.copy_with(stripped_args) +- if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): +- stripped_args = tuple(_strip_extras(a) for a in t.__args__) +- if stripped_args == t.__args__: +- return t +- return types.GenericAlias(t.__origin__, stripped_args) +- if hasattr(types, "UnionType") and isinstance(t, types.UnionType): +- stripped_args = tuple(_strip_extras(a) for a in t.__args__) +- if stripped_args == t.__args__: +- return t +- return functools.reduce(operator.or_, stripped_args) +- +- return t +- +- def get_type_hints(obj, globalns=None, localns=None, include_extras=False): +- """Return type hints for an object. +- +- This is often the same as obj.__annotations__, but it handles +- forward references encoded as string literals, adds Optional[t] if a +- default value equal to None is set and recursively replaces all +- 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' +- (unless 'include_extras=True'). +- +- The argument may be a module, class, method, or function. The annotations +- are returned as a dictionary. For classes, annotations include also +- inherited members. +- +- TypeError is raised if the argument is not of a type that can contain +- annotations, and an empty dictionary is returned if no annotations are +- present. +- +- BEWARE -- the behavior of globalns and localns is counterintuitive +- (unless you are familiar with how eval() and exec() work). The +- search order is locals first, then globals. +- +- - If no dict arguments are passed, an attempt is made to use the +- globals from obj (or the respective module's globals for classes), +- and these are also used as the locals. If the object does not appear +- to have globals, an empty dictionary is used. +- +- - If one dict argument is passed, it is used for both globals and +- locals. +- +- - If two dict arguments are passed, they specify globals and +- locals, respectively. +- """ +- if hasattr(typing, "Annotated"): +- hint = typing.get_type_hints( +- obj, globalns=globalns, localns=localns, include_extras=True +- ) +- else: +- hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) +- if include_extras: +- return hint +- return {k: _strip_extras(t) for k, t in hint.items()} +- +- +-# Python 3.9+ has PEP 593 (Annotated) +-if hasattr(typing, 'Annotated'): +- Annotated = typing.Annotated +- # Not exported and not a public API, but needed for get_origin() and get_args() +- # to work. +- _AnnotatedAlias = typing._AnnotatedAlias +-# 3.7-3.8 +-else: +- class _AnnotatedAlias(typing._GenericAlias, _root=True): +- """Runtime representation of an annotated type. +- +- At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' +- with extra annotations. The alias behaves like a normal typing alias, +- instantiating is the same as instantiating the underlying type, binding +- it to types is also the same. +- """ +- def __init__(self, origin, metadata): +- if isinstance(origin, _AnnotatedAlias): +- metadata = origin.__metadata__ + metadata +- origin = origin.__origin__ +- super().__init__(origin, origin) +- self.__metadata__ = metadata +- +- def copy_with(self, params): +- assert len(params) == 1 +- new_type = params[0] +- return _AnnotatedAlias(new_type, self.__metadata__) +- +- def __repr__(self): +- return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " +- f"{', '.join(repr(a) for a in self.__metadata__)}]") +- +- def __reduce__(self): +- return operator.getitem, ( +- Annotated, (self.__origin__,) + self.__metadata__ +- ) +- +- def __eq__(self, other): +- if not isinstance(other, _AnnotatedAlias): +- return NotImplemented +- if self.__origin__ != other.__origin__: +- return False +- return self.__metadata__ == other.__metadata__ +- +- def __hash__(self): +- return hash((self.__origin__, self.__metadata__)) +- +- class Annotated: +- """Add context specific metadata to a type. +- +- Example: Annotated[int, runtime_check.Unsigned] indicates to the +- hypothetical runtime_check module that this type is an unsigned int. +- Every other consumer of this type can ignore this metadata and treat +- this type as int. +- +- The first argument to Annotated must be a valid type (and will be in +- the __origin__ field), the remaining arguments are kept as a tuple in +- the __extra__ field. +- +- Details: +- +- - It's an error to call `Annotated` with less than two arguments. +- - Nested Annotated are flattened:: +- +- Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] +- +- - Instantiating an annotated type is equivalent to instantiating the +- underlying type:: +- +- Annotated[C, Ann1](5) == C(5) +- +- - Annotated can be used as a generic type alias:: +- +- Optimized = Annotated[T, runtime.Optimize()] +- Optimized[int] == Annotated[int, runtime.Optimize()] +- +- OptimizedList = Annotated[List[T], runtime.Optimize()] +- OptimizedList[int] == Annotated[List[int], runtime.Optimize()] +- """ +- +- __slots__ = () +- +- def __new__(cls, *args, **kwargs): +- raise TypeError("Type Annotated cannot be instantiated.") +- +- @typing._tp_cache +- def __class_getitem__(cls, params): +- if not isinstance(params, tuple) or len(params) < 2: +- raise TypeError("Annotated[...] should be used " +- "with at least two arguments (a type and an " +- "annotation).") +- allowed_special_forms = (ClassVar, Final) +- if get_origin(params[0]) in allowed_special_forms: +- origin = params[0] +- else: +- msg = "Annotated[t, ...]: t must be a type." +- origin = typing._type_check(params[0], msg) +- metadata = tuple(params[1:]) +- return _AnnotatedAlias(origin, metadata) +- +- def __init_subclass__(cls, *args, **kwargs): +- raise TypeError( +- f"Cannot subclass {cls.__module__}.Annotated" +- ) +- +-# Python 3.8 has get_origin() and get_args() but those implementations aren't +-# Annotated-aware, so we can't use those. Python 3.9's versions don't support +-# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +-if sys.version_info[:2] >= (3, 10): +- get_origin = typing.get_origin +- get_args = typing.get_args +-# 3.7-3.9 +-else: +- try: +- # 3.9+ +- from typing import _BaseGenericAlias +- except ImportError: +- _BaseGenericAlias = typing._GenericAlias +- try: +- # 3.9+ +- from typing import GenericAlias as _typing_GenericAlias +- except ImportError: +- _typing_GenericAlias = typing._GenericAlias +- +- def get_origin(tp): +- """Get the unsubscripted version of a type. +- +- This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar +- and Annotated. Return None for unsupported types. Examples:: +- +- get_origin(Literal[42]) is Literal +- get_origin(int) is None +- get_origin(ClassVar[int]) is ClassVar +- get_origin(Generic) is Generic +- get_origin(Generic[T]) is Generic +- get_origin(Union[T, int]) is Union +- get_origin(List[Tuple[T, T]][int]) == list +- get_origin(P.args) is P +- """ +- if isinstance(tp, _AnnotatedAlias): +- return Annotated +- if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, +- ParamSpecArgs, ParamSpecKwargs)): +- return tp.__origin__ +- if tp is typing.Generic: +- return typing.Generic +- return None +- +- def get_args(tp): +- """Get type arguments with all substitutions performed. +- +- For unions, basic simplifications used by Union constructor are performed. +- Examples:: +- get_args(Dict[str, int]) == (str, int) +- get_args(int) == () +- get_args(Union[int, Union[T, int], str][int]) == (int, str) +- get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) +- get_args(Callable[[], T][int]) == ([], int) +- """ +- if isinstance(tp, _AnnotatedAlias): +- return (tp.__origin__,) + tp.__metadata__ +- if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): +- if getattr(tp, "_special", False): +- return () +- res = tp.__args__ +- if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: +- res = (list(res[:-1]), res[-1]) +- return res +- return () +- +- +-# 3.10+ +-if hasattr(typing, 'TypeAlias'): +- TypeAlias = typing.TypeAlias +-# 3.9 +-elif sys.version_info[:2] >= (3, 9): +- class _TypeAliasForm(typing._SpecialForm, _root=True): +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- @_TypeAliasForm +- def TypeAlias(self, parameters): +- """Special marker indicating that an assignment should +- be recognized as a proper type alias definition by type +- checkers. +- +- For example:: +- +- Predicate: TypeAlias = Callable[..., bool] +- +- It's invalid when used anywhere except as in the example above. +- """ +- raise TypeError(f"{self} is not subscriptable") +-# 3.7-3.8 +-else: +- class _TypeAliasForm(typing._SpecialForm, _root=True): +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- TypeAlias = _TypeAliasForm('TypeAlias', +- doc="""Special marker indicating that an assignment should +- be recognized as a proper type alias definition by type +- checkers. +- +- For example:: +- +- Predicate: TypeAlias = Callable[..., bool] +- +- It's invalid when used anywhere except as in the example +- above.""") +- +- +-# Python 3.10+ has PEP 612 +-if hasattr(typing, 'ParamSpecArgs'): +- ParamSpecArgs = typing.ParamSpecArgs +- ParamSpecKwargs = typing.ParamSpecKwargs +-# 3.7-3.9 +-else: +- class _Immutable: +- """Mixin to indicate that object should not be copied.""" +- __slots__ = () +- +- def __copy__(self): +- return self +- +- def __deepcopy__(self, memo): +- return self +- +- class ParamSpecArgs(_Immutable): +- """The args for a ParamSpec object. +- +- Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. +- +- ParamSpecArgs objects have a reference back to their ParamSpec: +- +- P.args.__origin__ is P +- +- This type is meant for runtime introspection and has no special meaning to +- static type checkers. +- """ +- def __init__(self, origin): +- self.__origin__ = origin +- +- def __repr__(self): +- return f"{self.__origin__.__name__}.args" +- +- def __eq__(self, other): +- if not isinstance(other, ParamSpecArgs): +- return NotImplemented +- return self.__origin__ == other.__origin__ +- +- class ParamSpecKwargs(_Immutable): +- """The kwargs for a ParamSpec object. +- +- Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. +- +- ParamSpecKwargs objects have a reference back to their ParamSpec: +- +- P.kwargs.__origin__ is P +- +- This type is meant for runtime introspection and has no special meaning to +- static type checkers. +- """ +- def __init__(self, origin): +- self.__origin__ = origin +- +- def __repr__(self): +- return f"{self.__origin__.__name__}.kwargs" +- +- def __eq__(self, other): +- if not isinstance(other, ParamSpecKwargs): +- return NotImplemented +- return self.__origin__ == other.__origin__ +- +-# 3.10+ +-if hasattr(typing, 'ParamSpec'): +- ParamSpec = typing.ParamSpec +-# 3.7-3.9 +-else: +- +- # Inherits from list as a workaround for Callable checks in Python < 3.9.2. +- class ParamSpec(list): +- """Parameter specification variable. +- +- Usage:: +- +- P = ParamSpec('P') +- +- Parameter specification variables exist primarily for the benefit of static +- type checkers. They are used to forward the parameter types of one +- callable to another callable, a pattern commonly found in higher order +- functions and decorators. They are only valid when used in ``Concatenate``, +- or s the first argument to ``Callable``. In Python 3.10 and higher, +- they are also supported in user-defined Generics at runtime. +- See class Generic for more information on generic types. An +- example for annotating a decorator:: +- +- T = TypeVar('T') +- P = ParamSpec('P') +- +- def add_logging(f: Callable[P, T]) -> Callable[P, T]: +- '''A type-safe decorator to add logging to a function.''' +- def inner(*args: P.args, **kwargs: P.kwargs) -> T: +- logging.info(f'{f.__name__} was called') +- return f(*args, **kwargs) +- return inner +- +- @add_logging +- def add_two(x: float, y: float) -> float: +- '''Add two numbers together.''' +- return x + y +- +- Parameter specification variables defined with covariant=True or +- contravariant=True can be used to declare covariant or contravariant +- generic types. These keyword arguments are valid, but their actual semantics +- are yet to be decided. See PEP 612 for details. +- +- Parameter specification variables can be introspected. e.g.: +- +- P.__name__ == 'T' +- P.__bound__ == None +- P.__covariant__ == False +- P.__contravariant__ == False +- +- Note that only parameter specification variables defined in global scope can +- be pickled. +- """ +- +- # Trick Generic __parameters__. +- __class__ = typing.TypeVar +- +- @property +- def args(self): +- return ParamSpecArgs(self) +- +- @property +- def kwargs(self): +- return ParamSpecKwargs(self) +- +- def __init__(self, name, *, bound=None, covariant=False, contravariant=False): +- super().__init__([self]) +- self.__name__ = name +- self.__covariant__ = bool(covariant) +- self.__contravariant__ = bool(contravariant) +- if bound: +- self.__bound__ = typing._type_check(bound, 'Bound must be a type.') +- else: +- self.__bound__ = None +- +- # for pickling: +- try: +- def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') +- except (AttributeError, ValueError): +- def_mod = None +- if def_mod != 'typing_extensions': +- self.__module__ = def_mod +- +- def __repr__(self): +- if self.__covariant__: +- prefix = '+' +- elif self.__contravariant__: +- prefix = '-' +- else: +- prefix = '~' +- return prefix + self.__name__ +- +- def __hash__(self): +- return object.__hash__(self) +- +- def __eq__(self, other): +- return self is other +- +- def __reduce__(self): +- return self.__name__ +- +- # Hack to get typing._type_check to pass. +- def __call__(self, *args, **kwargs): +- pass +- +- +-# 3.7-3.9 +-if not hasattr(typing, 'Concatenate'): +- # Inherits from list as a workaround for Callable checks in Python < 3.9.2. +- class _ConcatenateGenericAlias(list): +- +- # Trick Generic into looking into this for __parameters__. +- __class__ = typing._GenericAlias +- +- # Flag in 3.8. +- _special = False +- +- def __init__(self, origin, args): +- super().__init__(args) +- self.__origin__ = origin +- self.__args__ = args +- +- def __repr__(self): +- _type_repr = typing._type_repr +- return (f'{_type_repr(self.__origin__)}' +- f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') +- +- def __hash__(self): +- return hash((self.__origin__, self.__args__)) +- +- # Hack to get typing._type_check to pass in Generic. +- def __call__(self, *args, **kwargs): +- pass +- +- @property +- def __parameters__(self): +- return tuple( +- tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) +- ) +- +- +-# 3.7-3.9 +-@typing._tp_cache +-def _concatenate_getitem(self, parameters): +- if parameters == (): +- raise TypeError("Cannot take a Concatenate of no types.") +- if not isinstance(parameters, tuple): +- parameters = (parameters,) +- if not isinstance(parameters[-1], ParamSpec): +- raise TypeError("The last parameter to Concatenate should be a " +- "ParamSpec variable.") +- msg = "Concatenate[arg, ...]: each arg must be a type." +- parameters = tuple(typing._type_check(p, msg) for p in parameters) +- return _ConcatenateGenericAlias(self, parameters) +- +- +-# 3.10+ +-if hasattr(typing, 'Concatenate'): +- Concatenate = typing.Concatenate +- _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa +-# 3.9 +-elif sys.version_info[:2] >= (3, 9): +- @_TypeAliasForm +- def Concatenate(self, parameters): +- """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a +- higher order function which adds, removes or transforms parameters of a +- callable. +- +- For example:: +- +- Callable[Concatenate[int, P], int] +- +- See PEP 612 for detailed information. +- """ +- return _concatenate_getitem(self, parameters) +-# 3.7-8 +-else: +- class _ConcatenateForm(typing._SpecialForm, _root=True): +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- def __getitem__(self, parameters): +- return _concatenate_getitem(self, parameters) +- +- Concatenate = _ConcatenateForm( +- 'Concatenate', +- doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a +- higher order function which adds, removes or transforms parameters of a +- callable. +- +- For example:: +- +- Callable[Concatenate[int, P], int] +- +- See PEP 612 for detailed information. +- """) +- +-# 3.10+ +-if hasattr(typing, 'TypeGuard'): +- TypeGuard = typing.TypeGuard +-# 3.9 +-elif sys.version_info[:2] >= (3, 9): +- class _TypeGuardForm(typing._SpecialForm, _root=True): +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- @_TypeGuardForm +- def TypeGuard(self, parameters): +- """Special typing form used to annotate the return type of a user-defined +- type guard function. ``TypeGuard`` only accepts a single type argument. +- At runtime, functions marked this way should return a boolean. +- +- ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static +- type checkers to determine a more precise type of an expression within a +- program's code flow. Usually type narrowing is done by analyzing +- conditional code flow and applying the narrowing to a block of code. The +- conditional expression here is sometimes referred to as a "type guard". +- +- Sometimes it would be convenient to use a user-defined boolean function +- as a type guard. Such a function should use ``TypeGuard[...]`` as its +- return type to alert static type checkers to this intention. +- +- Using ``-> TypeGuard`` tells the static type checker that for a given +- function: +- +- 1. The return value is a boolean. +- 2. If the return value is ``True``, the type of its argument +- is the type inside ``TypeGuard``. +- +- For example:: +- +- def is_str(val: Union[str, float]): +- # "isinstance" type guard +- if isinstance(val, str): +- # Type of ``val`` is narrowed to ``str`` +- ... +- else: +- # Else, type of ``val`` is narrowed to ``float``. +- ... +- +- Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower +- form of ``TypeA`` (it can even be a wider form) and this may lead to +- type-unsafe results. The main reason is to allow for things like +- narrowing ``List[object]`` to ``List[str]`` even though the latter is not +- a subtype of the former, since ``List`` is invariant. The responsibility of +- writing type-safe type guards is left to the user. +- +- ``TypeGuard`` also works with type variables. For more information, see +- PEP 647 (User-Defined Type Guards). +- """ +- item = typing._type_check(parameters, f'{self} accepts only a single type.') +- return typing._GenericAlias(self, (item,)) +-# 3.7-3.8 +-else: +- class _TypeGuardForm(typing._SpecialForm, _root=True): +- +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- def __getitem__(self, parameters): +- item = typing._type_check(parameters, +- f'{self._name} accepts only a single type') +- return typing._GenericAlias(self, (item,)) +- +- TypeGuard = _TypeGuardForm( +- 'TypeGuard', +- doc="""Special typing form used to annotate the return type of a user-defined +- type guard function. ``TypeGuard`` only accepts a single type argument. +- At runtime, functions marked this way should return a boolean. +- +- ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static +- type checkers to determine a more precise type of an expression within a +- program's code flow. Usually type narrowing is done by analyzing +- conditional code flow and applying the narrowing to a block of code. The +- conditional expression here is sometimes referred to as a "type guard". +- +- Sometimes it would be convenient to use a user-defined boolean function +- as a type guard. Such a function should use ``TypeGuard[...]`` as its +- return type to alert static type checkers to this intention. +- +- Using ``-> TypeGuard`` tells the static type checker that for a given +- function: +- +- 1. The return value is a boolean. +- 2. If the return value is ``True``, the type of its argument +- is the type inside ``TypeGuard``. +- +- For example:: +- +- def is_str(val: Union[str, float]): +- # "isinstance" type guard +- if isinstance(val, str): +- # Type of ``val`` is narrowed to ``str`` +- ... +- else: +- # Else, type of ``val`` is narrowed to ``float``. +- ... +- +- Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower +- form of ``TypeA`` (it can even be a wider form) and this may lead to +- type-unsafe results. The main reason is to allow for things like +- narrowing ``List[object]`` to ``List[str]`` even though the latter is not +- a subtype of the former, since ``List`` is invariant. The responsibility of +- writing type-safe type guards is left to the user. +- +- ``TypeGuard`` also works with type variables. For more information, see +- PEP 647 (User-Defined Type Guards). +- """) +- +- +-# Vendored from cpython typing._SpecialFrom +-class _SpecialForm(typing._Final, _root=True): +- __slots__ = ('_name', '__doc__', '_getitem') +- +- def __init__(self, getitem): +- self._getitem = getitem +- self._name = getitem.__name__ +- self.__doc__ = getitem.__doc__ +- +- def __getattr__(self, item): +- if item in {'__name__', '__qualname__'}: +- return self._name +- +- raise AttributeError(item) +- +- def __mro_entries__(self, bases): +- raise TypeError(f"Cannot subclass {self!r}") +- +- def __repr__(self): +- return f'typing_extensions.{self._name}' +- +- def __reduce__(self): +- return self._name +- +- def __call__(self, *args, **kwds): +- raise TypeError(f"Cannot instantiate {self!r}") +- +- def __or__(self, other): +- return typing.Union[self, other] +- +- def __ror__(self, other): +- return typing.Union[other, self] +- +- def __instancecheck__(self, obj): +- raise TypeError(f"{self} cannot be used with isinstance()") +- +- def __subclasscheck__(self, cls): +- raise TypeError(f"{self} cannot be used with issubclass()") +- +- @typing._tp_cache +- def __getitem__(self, parameters): +- return self._getitem(self, parameters) +- +- +-if hasattr(typing, "LiteralString"): +- LiteralString = typing.LiteralString +-else: +- @_SpecialForm +- def LiteralString(self, params): +- """Represents an arbitrary literal string. +- +- Example:: +- +- from typing_extensions import LiteralString +- +- def query(sql: LiteralString) -> ...: +- ... +- +- query("SELECT * FROM table") # ok +- query(f"SELECT * FROM {input()}") # not ok +- +- See PEP 675 for details. +- +- """ +- raise TypeError(f"{self} is not subscriptable") +- +- +-if hasattr(typing, "Self"): +- Self = typing.Self +-else: +- @_SpecialForm +- def Self(self, params): +- """Used to spell the type of "self" in classes. +- +- Example:: +- +- from typing import Self +- +- class ReturnsSelf: +- def parse(self, data: bytes) -> Self: +- ... +- return self +- +- """ +- +- raise TypeError(f"{self} is not subscriptable") +- +- +-if hasattr(typing, "Never"): +- Never = typing.Never +-else: +- @_SpecialForm +- def Never(self, params): +- """The bottom type, a type that has no members. +- +- This can be used to define a function that should never be +- called, or a function that never returns:: +- +- from typing_extensions import Never +- +- def never_call_me(arg: Never) -> None: +- pass +- +- def int_or_str(arg: int | str) -> None: +- never_call_me(arg) # type checker error +- match arg: +- case int(): +- print("It's an int") +- case str(): +- print("It's a str") +- case _: +- never_call_me(arg) # ok, arg is of type Never +- +- """ +- +- raise TypeError(f"{self} is not subscriptable") +- +- +-if hasattr(typing, 'Required'): +- Required = typing.Required +- NotRequired = typing.NotRequired +-elif sys.version_info[:2] >= (3, 9): +- class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- @_ExtensionsSpecialForm +- def Required(self, parameters): +- """A special typing construct to mark a key of a total=False TypedDict +- as required. For example: +- +- class Movie(TypedDict, total=False): +- title: Required[str] +- year: int +- +- m = Movie( +- title='The Matrix', # typechecker error if key is omitted +- year=1999, +- ) +- +- There is no runtime checking that a required key is actually provided +- when instantiating a related TypedDict. +- """ +- item = typing._type_check(parameters, f'{self._name} accepts only a single type.') +- return typing._GenericAlias(self, (item,)) +- +- @_ExtensionsSpecialForm +- def NotRequired(self, parameters): +- """A special typing construct to mark a key of a TypedDict as +- potentially missing. For example: +- +- class Movie(TypedDict): +- title: str +- year: NotRequired[int] +- +- m = Movie( +- title='The Matrix', # typechecker error if key is omitted +- year=1999, +- ) +- """ +- item = typing._type_check(parameters, f'{self._name} accepts only a single type.') +- return typing._GenericAlias(self, (item,)) +- +-else: +- class _RequiredForm(typing._SpecialForm, _root=True): +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- def __getitem__(self, parameters): +- item = typing._type_check(parameters, +- f'{self._name} accepts only a single type.') +- return typing._GenericAlias(self, (item,)) +- +- Required = _RequiredForm( +- 'Required', +- doc="""A special typing construct to mark a key of a total=False TypedDict +- as required. For example: +- +- class Movie(TypedDict, total=False): +- title: Required[str] +- year: int +- +- m = Movie( +- title='The Matrix', # typechecker error if key is omitted +- year=1999, +- ) +- +- There is no runtime checking that a required key is actually provided +- when instantiating a related TypedDict. +- """) +- NotRequired = _RequiredForm( +- 'NotRequired', +- doc="""A special typing construct to mark a key of a TypedDict as +- potentially missing. For example: +- +- class Movie(TypedDict): +- title: str +- year: NotRequired[int] +- +- m = Movie( +- title='The Matrix', # typechecker error if key is omitted +- year=1999, +- ) +- """) +- +- +-if hasattr(typing, "Unpack"): # 3.11+ +- Unpack = typing.Unpack +-elif sys.version_info[:2] >= (3, 9): +- class _UnpackSpecialForm(typing._SpecialForm, _root=True): +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- class _UnpackAlias(typing._GenericAlias, _root=True): +- __class__ = typing.TypeVar +- +- @_UnpackSpecialForm +- def Unpack(self, parameters): +- """A special typing construct to unpack a variadic type. For example: +- +- Shape = TypeVarTuple('Shape') +- Batch = NewType('Batch', int) +- +- def add_batch_axis( +- x: Array[Unpack[Shape]] +- ) -> Array[Batch, Unpack[Shape]]: ... +- +- """ +- item = typing._type_check(parameters, f'{self._name} accepts only a single type.') +- return _UnpackAlias(self, (item,)) +- +- def _is_unpack(obj): +- return isinstance(obj, _UnpackAlias) +- +-else: +- class _UnpackAlias(typing._GenericAlias, _root=True): +- __class__ = typing.TypeVar +- +- class _UnpackForm(typing._SpecialForm, _root=True): +- def __repr__(self): +- return 'typing_extensions.' + self._name +- +- def __getitem__(self, parameters): +- item = typing._type_check(parameters, +- f'{self._name} accepts only a single type.') +- return _UnpackAlias(self, (item,)) +- +- Unpack = _UnpackForm( +- 'Unpack', +- doc="""A special typing construct to unpack a variadic type. For example: +- +- Shape = TypeVarTuple('Shape') +- Batch = NewType('Batch', int) +- +- def add_batch_axis( +- x: Array[Unpack[Shape]] +- ) -> Array[Batch, Unpack[Shape]]: ... +- +- """) +- +- def _is_unpack(obj): +- return isinstance(obj, _UnpackAlias) +- +- +-if hasattr(typing, "TypeVarTuple"): # 3.11+ +- TypeVarTuple = typing.TypeVarTuple +-else: +- class TypeVarTuple: +- """Type variable tuple. +- +- Usage:: +- +- Ts = TypeVarTuple('Ts') +- +- In the same way that a normal type variable is a stand-in for a single +- type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* +- type such as ``Tuple[int, str]``. +- +- Type variable tuples can be used in ``Generic`` declarations. +- Consider the following example:: +- +- class Array(Generic[*Ts]): ... +- +- The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, +- where ``T1`` and ``T2`` are type variables. To use these type variables +- as type parameters of ``Array``, we must *unpack* the type variable tuple using +- the star operator: ``*Ts``. The signature of ``Array`` then behaves +- as if we had simply written ``class Array(Generic[T1, T2]): ...``. +- In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows +- us to parameterise the class with an *arbitrary* number of type parameters. +- +- Type variable tuples can be used anywhere a normal ``TypeVar`` can. +- This includes class definitions, as shown above, as well as function +- signatures and variable annotations:: +- +- class Array(Generic[*Ts]): +- +- def __init__(self, shape: Tuple[*Ts]): +- self._shape: Tuple[*Ts] = shape +- +- def get_shape(self) -> Tuple[*Ts]: +- return self._shape +- +- shape = (Height(480), Width(640)) +- x: Array[Height, Width] = Array(shape) +- y = abs(x) # Inferred type is Array[Height, Width] +- z = x + x # ... is Array[Height, Width] +- x.get_shape() # ... is tuple[Height, Width] +- +- """ +- +- # Trick Generic __parameters__. +- __class__ = typing.TypeVar +- +- def __iter__(self): +- yield self.__unpacked__ +- +- def __init__(self, name): +- self.__name__ = name +- +- # for pickling: +- try: +- def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') +- except (AttributeError, ValueError): +- def_mod = None +- if def_mod != 'typing_extensions': +- self.__module__ = def_mod +- +- self.__unpacked__ = Unpack[self] +- +- def __repr__(self): +- return self.__name__ +- +- def __hash__(self): +- return object.__hash__(self) +- +- def __eq__(self, other): +- return self is other +- +- def __reduce__(self): +- return self.__name__ +- +- def __init_subclass__(self, *args, **kwds): +- if '_root' not in kwds: +- raise TypeError("Cannot subclass special typing classes") +- +- +-if hasattr(typing, "reveal_type"): +- reveal_type = typing.reveal_type +-else: +- def reveal_type(__obj: T) -> T: +- """Reveal the inferred type of a variable. +- +- When a static type checker encounters a call to ``reveal_type()``, +- it will emit the inferred type of the argument:: +- +- x: int = 1 +- reveal_type(x) +- +- Running a static type checker (e.g., ``mypy``) on this example +- will produce output similar to 'Revealed type is "builtins.int"'. +- +- At runtime, the function prints the runtime type of the +- argument and returns it unchanged. +- +- """ +- print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr) +- return __obj +- +- +-if hasattr(typing, "assert_never"): +- assert_never = typing.assert_never +-else: +- def assert_never(__arg: Never) -> Never: +- """Assert to the type checker that a line of code is unreachable. +- +- Example:: +- +- def int_or_str(arg: int | str) -> None: +- match arg: +- case int(): +- print("It's an int") +- case str(): +- print("It's a str") +- case _: +- assert_never(arg) +- +- If a type checker finds that a call to assert_never() is +- reachable, it will emit an error. +- +- At runtime, this throws an exception when called. +- +- """ +- raise AssertionError("Expected code to be unreachable") +- +- +-if hasattr(typing, 'dataclass_transform'): +- dataclass_transform = typing.dataclass_transform +-else: +- def dataclass_transform( +- *, +- eq_default: bool = True, +- order_default: bool = False, +- kw_only_default: bool = False, +- field_specifiers: typing.Tuple[ +- typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], +- ... +- ] = (), +- **kwargs: typing.Any, +- ) -> typing.Callable[[T], T]: +- """Decorator that marks a function, class, or metaclass as providing +- dataclass-like behavior. +- +- Example: +- +- from typing_extensions import dataclass_transform +- +- _T = TypeVar("_T") +- +- # Used on a decorator function +- @dataclass_transform() +- def create_model(cls: type[_T]) -> type[_T]: +- ... +- return cls +- +- @create_model +- class CustomerModel: +- id: int +- name: str +- +- # Used on a base class +- @dataclass_transform() +- class ModelBase: ... +- +- class CustomerModel(ModelBase): +- id: int +- name: str +- +- # Used on a metaclass +- @dataclass_transform() +- class ModelMeta(type): ... +- +- class ModelBase(metaclass=ModelMeta): ... +- +- class CustomerModel(ModelBase): +- id: int +- name: str +- +- Each of the ``CustomerModel`` classes defined in this example will now +- behave similarly to a dataclass created with the ``@dataclasses.dataclass`` +- decorator. For example, the type checker will synthesize an ``__init__`` +- method. +- +- The arguments to this decorator can be used to customize this behavior: +- - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be +- True or False if it is omitted by the caller. +- - ``order_default`` indicates whether the ``order`` parameter is +- assumed to be True or False if it is omitted by the caller. +- - ``kw_only_default`` indicates whether the ``kw_only`` parameter is +- assumed to be True or False if it is omitted by the caller. +- - ``field_specifiers`` specifies a static list of supported classes +- or functions that describe fields, similar to ``dataclasses.field()``. +- +- At runtime, this decorator records its arguments in the +- ``__dataclass_transform__`` attribute on the decorated object. +- +- See PEP 681 for details. +- +- """ +- def decorator(cls_or_fn): +- cls_or_fn.__dataclass_transform__ = { +- "eq_default": eq_default, +- "order_default": order_default, +- "kw_only_default": kw_only_default, +- "field_specifiers": field_specifiers, +- "kwargs": kwargs, +- } +- return cls_or_fn +- return decorator +- +- +-# We have to do some monkey patching to deal with the dual nature of +-# Unpack/TypeVarTuple: +-# - We want Unpack to be a kind of TypeVar so it gets accepted in +-# Generic[Unpack[Ts]] +-# - We want it to *not* be treated as a TypeVar for the purposes of +-# counting generic parameters, so that when we subscript a generic, +-# the runtime doesn't try to substitute the Unpack with the subscripted type. +-if not hasattr(typing, "TypeVarTuple"): +- typing._collect_type_vars = _collect_type_vars +- typing._check_generic = _check_generic +- +- +-# Backport typing.NamedTuple as it exists in Python 3.11. +-# In 3.11, the ability to define generic `NamedTuple`s was supported. +-# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +-if sys.version_info >= (3, 11): +- NamedTuple = typing.NamedTuple +-else: +- def _caller(): +- try: +- return sys._getframe(2).f_globals.get('__name__', '__main__') +- except (AttributeError, ValueError): # For platforms without _getframe() +- return None +- +- def _make_nmtuple(name, types, module, defaults=()): +- fields = [n for n, t in types] +- annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") +- for n, t in types} +- nm_tpl = collections.namedtuple(name, fields, +- defaults=defaults, module=module) +- nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations +- # The `_field_types` attribute was removed in 3.9; +- # in earlier versions, it is the same as the `__annotations__` attribute +- if sys.version_info < (3, 9): +- nm_tpl._field_types = annotations +- return nm_tpl +- +- _prohibited_namedtuple_fields = typing._prohibited +- _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) +- +- class _NamedTupleMeta(type): +- def __new__(cls, typename, bases, ns): +- assert _NamedTuple in bases +- for base in bases: +- if base is not _NamedTuple and base is not typing.Generic: +- raise TypeError( +- 'can only inherit from a NamedTuple type and Generic') +- bases = tuple(tuple if base is _NamedTuple else base for base in bases) +- types = ns.get('__annotations__', {}) +- default_names = [] +- for field_name in types: +- if field_name in ns: +- default_names.append(field_name) +- elif default_names: +- raise TypeError(f"Non-default namedtuple field {field_name} " +- f"cannot follow default field" +- f"{'s' if len(default_names) > 1 else ''} " +- f"{', '.join(default_names)}") +- nm_tpl = _make_nmtuple( +- typename, types.items(), +- defaults=[ns[n] for n in default_names], +- module=ns['__module__'] +- ) +- nm_tpl.__bases__ = bases +- if typing.Generic in bases: +- class_getitem = typing.Generic.__class_getitem__.__func__ +- nm_tpl.__class_getitem__ = classmethod(class_getitem) +- # update from user namespace without overriding special namedtuple attributes +- for key in ns: +- if key in _prohibited_namedtuple_fields: +- raise AttributeError("Cannot overwrite NamedTuple attribute " + key) +- elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: +- setattr(nm_tpl, key, ns[key]) +- if typing.Generic in bases: +- nm_tpl.__init_subclass__() +- return nm_tpl +- +- def NamedTuple(__typename, __fields=None, **kwargs): +- if __fields is None: +- __fields = kwargs.items() +- elif kwargs: +- raise TypeError("Either list of fields or keywords" +- " can be provided to NamedTuple, not both") +- return _make_nmtuple(__typename, __fields, module=_caller()) +- +- NamedTuple.__doc__ = typing.NamedTuple.__doc__ +- _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) +- +- # On 3.8+, alter the signature so that it matches typing.NamedTuple. +- # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, +- # so just leave the signature as it is on 3.7. +- if sys.version_info >= (3, 8): +- NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)' +- +- def _namedtuple_mro_entries(bases): +- assert NamedTuple in bases +- return (_NamedTuple,) +- +- NamedTuple.__mro_entries__ = _namedtuple_mro_entries +diff --git a/src/poetry/core/_vendor/vendor.txt b/src/poetry/core/_vendor/vendor.txt +deleted file mode 100644 +index 23d0097..0000000 +--- a/src/poetry/core/_vendor/vendor.txt ++++ /dev/null +@@ -1,9 +0,0 @@ +-attrs==22.1.0; python_version >= "3.7" +-jsonschema==4.10.0; python_version >= "3.7" +-lark==1.1.2 +-packaging==21.3; python_version >= "3.6" +-pkgutil-resolve-name==1.3.10; python_version < "3.9" and python_version >= "3.7" +-pyparsing==3.0.9; python_full_version >= "3.6.8" and python_version >= "3.6" +-pyrsistent==0.18.1; python_version >= "3.7" +-tomlkit==0.11.4; python_version >= "3.6" and python_version < "4.0" +-typing-extensions==4.3.0; python_version >= "3.7" +diff --git a/src/poetry/core/json/__init__.py b/src/poetry/core/json/__init__.py +index c46a8d2..84ec3c8 100644 +--- a/src/poetry/core/json/__init__.py ++++ b/src/poetry/core/json/__init__.py +@@ -3,25 +3,22 @@ from __future__ import annotations import json --import os + import os --from io import open +from importlib import resources - from typing import List - - from jsonschema import Draft7Validator + from typing import Any -SCHEMA_DIR = os.path.join(os.path.dirname(__file__), "schemas") - - class ValidationError(ValueError): - pass - def validate_object(obj, schema_name): # type: (dict, str) -> List[str] -- schema = os.path.join(SCHEMA_DIR, "{}.json".format(schema_name)) + def validate_object(obj: dict[str, Any], schema_name: str) -> list[str]: +- schema_file = os.path.join(SCHEMA_DIR, f"{schema_name}.json") - -- if not os.path.exists(schema): +- if not os.path.exists(schema_file): + try: + schema = json.loads( -+ resources.read_text( -+ f"{__name__}.schemas", "{}.json".format(schema_name) -+ ) ++ resources.read_text(f"{__name__}.schemas", f"{schema_name}.json") + ) + except Exception: - raise ValueError("Schema {} does not exist.".format(schema_name)) + raise ValueError(f"Schema {schema_name} does not exist.") -- with open(schema, encoding="utf-8") as f: +- with open(schema_file, encoding="utf-8") as f: - schema = json.loads(f.read()) - - validator = Draft7Validator(schema) - validation_errors = sorted(validator.iter_errors(obj), key=lambda e: e.path) + from jsonschema import Draft7Validator -diff --git a/poetry/core/json/schemas/__init__.py b/poetry/core/json/schemas/__init__.py -new file mode 100644 -index 0000000..e69de29 -diff --git a/poetry/core/spdx/__init__.py b/poetry/core/spdx/__init__.py -index 713aa30..490c7a9 100644 ---- a/poetry/core/spdx/__init__.py -+++ b/poetry/core/spdx/__init__.py -@@ -1,14 +1,12 @@ + validator = Draft7Validator(schema) +diff --git a/src/poetry/core/_vendor/lark/parsers/__init__.py b/src/poetry/core/json/schemas/__init__.py +similarity index 100% +rename from src/poetry/core/_vendor/lark/parsers/__init__.py +rename to src/poetry/core/json/schemas/__init__.py +diff --git a/src/poetry/core/_vendor/attr/py.typed b/src/poetry/core/spdx/data/__init__.py +similarity index 100% +rename from src/poetry/core/_vendor/attr/py.typed +rename to src/poetry/core/spdx/data/__init__.py +diff --git a/src/poetry/core/spdx/helpers.py b/src/poetry/core/spdx/helpers.py +index 00d4bc6..1dde745 100644 +--- a/src/poetry/core/spdx/helpers.py ++++ b/src/poetry/core/spdx/helpers.py +@@ -4,6 +4,8 @@ import functools import json --import os + import os --from io import open +from importlib import resources - from typing import Dict - from typing import Optional - - from .license import License - from .updater import Updater - -- - _licenses = None # type: Optional[Dict[str, License]] - ++ + from poetry.core.spdx.license import License -@@ -31,10 +29,7 @@ def load_licenses(): # type: () -> None - _licenses = {} +@@ -19,11 +21,9 @@ def license_by_id(identifier: str) -> License: + @functools.lru_cache() + def _load_licenses() -> dict[str, License]: ++ from . import __name__ + licenses = {} - licenses_file = os.path.join(os.path.dirname(__file__), "data", "licenses.json") - - with open(licenses_file, encoding="utf-8") as f: @@ -91,80 +44899,33 @@ index 713aa30..490c7a9 100644 for name, license_info in data.items(): license = License(name, license_info[0], license_info[1], license_info[2]) -diff --git a/poetry/core/spdx/data/__init__.py b/poetry/core/spdx/data/__init__.py -new file mode 100644 -index 0000000..e69de29 -diff --git a/poetry/core/version/grammars/__init__.py b/poetry/core/version/grammars/__init__.py -index e69de29..e88bac8 100644 ---- a/poetry/core/version/grammars/__init__.py -+++ b/poetry/core/version/grammars/__init__.py -@@ -0,0 +1,12 @@ +diff --git a/src/poetry/core/version/grammars/__init__.py b/src/poetry/core/version/grammars/__init__.py +index caf504b..971104e 100644 +--- a/src/poetry/core/version/grammars/__init__.py ++++ b/src/poetry/core/version/grammars/__init__.py +@@ -1,9 +1,16 @@ + from __future__ import annotations + +import sys + -+from pathlib import Path -+ -+ + from pathlib import Path + + +-GRAMMAR_DIR = Path(__file__).parent +if getattr(sys, "oxidized", False): -+ parents = 4 if sys.platform.startswith("win") else 5 -+ __path_assets__ = ( -+ Path(__path__[0]).parents[parents] / "assets" / "core" / "version" / "grammars" ++ GRAMMAR_DIR = ( ++ Path(__path__[0]).parents[4] / "assets" / "core" / "version" / "grammars" + ) +else: -+ __path_assets__ = Path(__path__[0]) -diff --git a/poetry/core/version/markers.py b/poetry/core/version/markers.py -index ac6c2b6..5eb5d95 100644 ---- a/poetry/core/version/markers.py -+++ b/poetry/core/version/markers.py -@@ -1,4 +1,3 @@ --import os - import re - - from typing import TYPE_CHECKING -@@ -12,6 +11,8 @@ from lark import Lark - from lark import Token - from lark import Tree - -+from .grammars import __path_assets__ -+ - - if TYPE_CHECKING: - from poetry.core.semver import VersionTypes # noqa -@@ -49,7 +50,7 @@ ALIASES = { - "python_implementation": "platform_python_implementation", - } - _parser = Lark.open( -- os.path.join(os.path.dirname(__file__), "grammars", "markers.lark"), parser="lalr" -+ __path_assets__ / "markers.lark", parser="lalr" - ) - - -diff --git a/poetry/core/version/requirements.py b/poetry/core/version/requirements.py -index c9601a0..d0029fc 100644 ---- a/poetry/core/version/requirements.py -+++ b/poetry/core/version/requirements.py -@@ -5,8 +5,6 @@ from __future__ import absolute_import - from __future__ import division - from __future__ import print_function - --import os -- - from lark import Lark - from lark import UnexpectedCharacters - from lark import UnexpectedToken -@@ -14,6 +12,7 @@ from lark import UnexpectedToken - from poetry.core.semver import parse_constraint - from poetry.core.semver.exceptions import ParseConstraintError - -+from .grammars import __path_assets__ - from .markers import _compact_markers - - -@@ -30,7 +29,7 @@ class InvalidRequirement(ValueError): - - - _parser = Lark.open( -- os.path.join(os.path.dirname(__file__), "grammars", "pep508.lark"), parser="lalr" -+ __path_assets__ / "pep508.lark", parser="lalr" - ) ++ GRAMMAR_DIR = Path(__path__[0]) + GRAMMAR_PEP_508_CONSTRAINTS = GRAMMAR_DIR / "pep508.lark" +diff --git a/vendors/deps.txt b/vendors/deps.txt +new file mode 100644 +index 0000000..5d242f5 +--- /dev/null ++++ b/vendors/deps.txt +@@ -0,0 +1,2 @@ ++packaging==21.3 ++tomlkit==0.11.4 diff --git a/patches/poetry.patch b/patches/poetry.patch index a3851088..b510064f 100644 --- a/patches/poetry.patch +++ b/patches/poetry.patch @@ -1,173 +1,263 @@ -diff --git a/poetry/console/__init__.py b/poetry/console/__init__.py -index c0c25738..e69de29b 100644 ---- a/poetry/console/__init__.py -+++ b/poetry/console/__init__.py -@@ -1,5 +0,0 @@ --from .application import Application -- -- --def main(): -- return Application().run() -diff --git a/poetry/console/commands/env/info.py b/poetry/console/commands/env/info.py -index 301d88f9..b80ee500 100644 ---- a/poetry/console/commands/env/info.py -+++ b/poetry/console/commands/env/info.py -@@ -54,7 +54,7 @@ class EnvInfoCommand(Command): - [ - "Platform: {}".format(env.platform), - "OS: {}".format(env.os), -- "Python: {}".format(env.base), -+ "Python: {}".format(env.python), - ] - ) - ) -diff --git a/poetry/console/commands/init.py b/poetry/console/commands/init.py -index af72318c..1d616682 100644 ---- a/poetry/console/commands/init.py -+++ b/poetry/console/commands/init.py -@@ -66,7 +66,7 @@ The init command creates a basic pyproject.toml file in the +diff --git a/pyproject.toml b/pyproject.toml +index 626531b1..caa6d6b0 100644 +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -54,7 +54,7 @@ html5lib = "^1.0" + importlib-metadata = { version = "^4.4", python = "<3.10" } + jsonschema = "^4.10.0" + # keyring uses calver, so version is unclamped +-keyring = ">=21.2.0" ++keyring = "~22.3.0" + # packaging uses calver, so version is unclamped + packaging = ">=20.4" + pexpect = "^4.7.0" +diff --git a/src/poetry/__version__.py b/src/poetry/__version__.py +index 13fa08fc..4ab11ba1 100644 +--- a/src/poetry/__version__.py ++++ b/src/poetry/__version__.py +@@ -1,16 +1 @@ +-from __future__ import annotations +- +-from typing import TYPE_CHECKING +- +-from poetry.utils._compat import metadata +- +- +-if TYPE_CHECKING: +- from collections.abc import Callable +- +- +-# The metadata.version that we import for Python 3.7 is untyped, work around +-# that. +-version: Callable[[str], str] = metadata.version +- +-__version__ = version("poetry") ++__version__ = "1.2.0rc2" +diff --git a/src/poetry/console/application.py b/src/poetry/console/application.py +index 2b6bd633..151043d5 100644 +--- a/src/poetry/console/application.py ++++ b/src/poetry/console/application.py +@@ -79,13 +79,13 @@ COMMANDS = [ + "env remove", + "env use", + # Self commands +- "self add", +- "self install", +- "self lock", +- "self remove", +- "self update", +- "self show", +- "self show plugins", ++ # "self add", ++ # "self install", ++ # "self lock", ++ # "self remove", ++ # "self update", ++ # "self show", ++ # "self show plugins", + # Source commands + "source add", + "source remove", +diff --git a/src/poetry/console/commands/init.py b/src/poetry/console/commands/init.py +index e2ebf9cf..ef4c98f1 100644 +--- a/src/poetry/console/commands/init.py ++++ b/src/poetry/console/commands/init.py +@@ -75,7 +75,7 @@ The init command creates a basic pyproject.toml file in the from poetry.core.vcs.git import GitConfig + from poetry.layouts import layout - from poetry.utils._compat import Path - from poetry.utils.env import SystemEnv + from poetry.utils.env import SystemEnv, InterpreterLookup pyproject = PyProjectTOML(Path.cwd() / "pyproject.toml") -@@ -141,7 +141,8 @@ The init command creates a basic pyproject.toml file in the +@@ -147,7 +147,8 @@ The init command creates a basic pyproject.toml file in the python = self.option("python") if not python: - current_env = SystemEnv(Path(sys.executable)) -+ executable, py_minor, py_patch = InterpreterLookup.find() -+ current_env = SystemEnv(executable) - default_python = "^{}".format( - ".".join(str(v) for v in current_env.version_info[:2]) ++ executable, _, _ = InterpreterLookup.find() ++ current_env = SystemEnv(Path(executable)) + default_python = "^" + ".".join( + str(v) for v in current_env.version_info[:2] ) -diff --git a/poetry/console/commands/new.py b/poetry/console/commands/new.py -index 481b0577..e77fad50 100644 ---- a/poetry/console/commands/new.py -+++ b/poetry/console/commands/new.py -@@ -24,7 +24,7 @@ class NewCommand(Command): +diff --git a/src/poetry/console/commands/new.py b/src/poetry/console/commands/new.py +index cde571aa..0a25b2a0 100644 +--- a/src/poetry/console/commands/new.py ++++ b/src/poetry/console/commands/new.py +@@ -32,7 +32,7 @@ class NewCommand(Command): from poetry.core.vcs.git import GitConfig + from poetry.layouts import layout - from poetry.utils._compat import Path - from poetry.utils.env import SystemEnv + from poetry.utils.env import SystemEnv, InterpreterLookup if self.option("src"): - layout_ = layout("src") -@@ -54,7 +54,8 @@ class NewCommand(Command): + layout_cls = layout("src") +@@ -65,7 +65,8 @@ class NewCommand(Command): if author_email: - author += " <{}>".format(author_email) + author += f" <{author_email}>" - current_env = SystemEnv(Path(sys.executable)) -+ executable, py_minor, py_patch = InterpreterLookup.find() -+ current_env = SystemEnv(executable) - default_python = "^{}".format( - ".".join(str(v) for v in current_env.version_info[:2]) - ) -diff --git a/poetry/console/commands/self/self.py b/poetry/console/commands/self/self.py -index 3e5cafa9..ff1abefe 100644 ---- a/poetry/console/commands/self/self.py -+++ b/poetry/console/commands/self/self.py -@@ -1,5 +1,4 @@ - from ..command import Command --from .update import SelfUpdateCommand - - - class SelfCommand(Command): -@@ -7,7 +6,7 @@ class SelfCommand(Command): - name = "self" - description = "Interact with Poetry directly." - -- commands = [SelfUpdateCommand()] -+ commands = [] - - def handle(self): - return self.call("help", self._config.name) -diff --git a/poetry/repositories/installed_repository.py b/poetry/repositories/installed_repository.py -index 1320fdd6..03513103 100644 ---- a/poetry/repositories/installed_repository.py -+++ b/poetry/repositories/installed_repository.py -@@ -9,10 +9,11 @@ from poetry.utils._compat import Path - from poetry.utils._compat import metadata - from poetry.utils.env import Env - -+from . import __path__ - from .repository import Repository ++ executable, _, _ = InterpreterLookup.find() ++ current_env = SystemEnv(Path(executable)) + default_python = "^" + ".".join(str(v) for v in current_env.version_info[:2]) + + layout_ = layout_cls( +diff --git a/src/poetry/json/__init__.py b/src/poetry/json/__init__.py +index 1e237794..a5849e00 100644 +--- a/src/poetry/json/__init__.py ++++ b/src/poetry/json/__init__.py +@@ -1,26 +1,19 @@ + from __future__ import annotations + import json +-import os --_VENDORS = Path(__file__).parent.parent.joinpath("_vendor") -+_VENDORS = Path(__path__[0]).parent.joinpath("_vendor") +-from pathlib import Path ++from importlib import resources + from typing import Any + import jsonschema - try: -diff --git a/poetry/utils/_compat.py b/poetry/utils/_compat.py -index 937f9b30..40e41514 100644 ---- a/poetry/utils/_compat.py -+++ b/poetry/utils/_compat.py -@@ -1,5 +1,5 @@ - import sys +-from poetry.core.json import SCHEMA_DIR as CORE_SCHEMA_DIR +- +- +-SCHEMA_DIR = os.path.join(os.path.dirname(__file__), "schemas") - -+import importlib_metadata as metadata - try: - from functools32 import lru_cache -@@ -13,10 +13,7 @@ except ImportError: + class ValidationError(ValueError): + pass + + def validate_object(obj: dict[str, Any]) -> list[str]: +- schema_file = Path(SCHEMA_DIR, "poetry.json") +- schema = json.loads(schema_file.read_text(encoding="utf-8")) ++ schema = json.loads(resources.read_text(f"{__name__}.schemas", "poetry.json")) + + validator = jsonschema.Draft7Validator(schema) + validation_errors = sorted( +@@ -39,7 +32,7 @@ def validate_object(obj: dict[str, Any]) -> list[str]: + errors.append(message) + + core_schema = json.loads( +- Path(CORE_SCHEMA_DIR, "poetry-schema.json").read_text(encoding="utf-8") ++ resources.read_text(f"poetry.core.json.schemas", "poetry-schema.json") + ) + + if core_schema["additionalProperties"]: +diff --git a/src/poetry/json/schemas/__init__.py b/src/poetry/json/schemas/__init__.py +new file mode 100644 +index 00000000..e69de29b +diff --git a/src/poetry/repositories/installed_repository.py b/src/poetry/repositories/installed_repository.py +index 228df353..087d01de 100644 +--- a/src/poetry/repositories/installed_repository.py ++++ b/src/poetry/repositories/installed_repository.py +@@ -20,9 +20,6 @@ if TYPE_CHECKING: + from poetry.utils.env import Env + + +-_VENDORS = Path(__file__).parent.parent.joinpath("_vendor") +- +- try: - import zipfile as zipp + FileNotFoundError + except NameError: +@@ -281,13 +278,6 @@ class InstalledRepository(Repository): + if name in seen: + continue + +- try: +- path.relative_to(_VENDORS) +- except ValueError: +- pass +- else: +- continue - -- from importlib import metadata - except ImportError: + package = cls.create_package_from_distribution(distribution, env) + + if with_dependencies: +diff --git a/src/poetry/utils/_compat.py b/src/poetry/utils/_compat.py +index a1c81582..b06ce4fb 100644 +--- a/src/poetry/utils/_compat.py ++++ b/src/poetry/utils/_compat.py +@@ -4,14 +4,7 @@ import sys + + from contextlib import suppress + +- +-# TODO: use try/except ImportError when +-# https://github.com/python/mypy/issues/1393 is fixed +-if sys.version_info < (3, 10): +- # compatibility for python <3.10 - import importlib_metadata as metadata - import zipp +-else: +- from importlib import metadata ++import importlib_metadata as metadata - try: -diff --git a/poetry/utils/env.py b/poetry/utils/env.py -index 0b91aa22..1f533f44 100644 ---- a/poetry/utils/env.py -+++ b/poetry/utils/env.py -@@ -7,10 +7,10 @@ import re - import shutil - import sys - import sysconfig --import textwrap + WINDOWS = sys.platform == "win32" + +diff --git a/src/poetry/utils/env.py b/src/poetry/utils/env.py +index ab645bd7..3b8e444b 100644 +--- a/src/poetry/utils/env.py ++++ b/src/poetry/utils/env.py +@@ -16,6 +16,7 @@ import warnings from contextlib import contextmanager from copy import deepcopy +from importlib import resources - from typing import Any - from typing import Dict - from typing import List -@@ -18,7 +18,6 @@ from typing import Optional - from typing import Tuple - from typing import Union - --import packaging.tags - import tomlkit - import virtualenv - -@@ -28,6 +27,8 @@ from packaging.tags import interpreter_name + from pathlib import Path + from subprocess import CalledProcessError + from typing import TYPE_CHECKING +@@ -30,6 +31,7 @@ from packaging.tags import Tag + from packaging.tags import interpreter_name from packaging.tags import interpreter_version from packaging.tags import sys_tags - -+from poetry import __path__ as __pkgpath__ -+from poetry.core.packages import Package - from poetry.core.semver import parse_constraint ++from poetry.core.packages.package import Package + from poetry.core.semver.helpers import parse_constraint from poetry.core.semver.version import Version from poetry.core.toml.file import TOMLFile -@@ -43,6 +44,8 @@ from poetry.utils._compat import subprocess - from poetry.utils.helpers import is_dir_writable +@@ -46,6 +48,8 @@ from poetry.utils.helpers import is_dir_writable from poetry.utils.helpers import paths_csv + from poetry.utils.helpers import remove_directory +from . import __name__ as _pkg + + if TYPE_CHECKING: + from collections.abc import Iterable +@@ -59,29 +63,7 @@ if TYPE_CHECKING: + from poetry.poetry import Poetry + + +-GET_SYS_TAGS = f""" +-import importlib.util +-import json +-import sys +- +-from pathlib import Path +- +-spec = importlib.util.spec_from_file_location( +- "packaging", Path(r"{packaging.__file__}") +-) +-packaging = importlib.util.module_from_spec(spec) +-sys.modules[spec.name] = packaging +- +-spec = importlib.util.spec_from_file_location( +- "packaging.tags", Path(r"{packaging.tags.__file__}") +-) +-packaging_tags = importlib.util.module_from_spec(spec) +-spec.loader.exec_module(packaging_tags) +- +-print( +- json.dumps([(t.interpreter, t.abi, t.platform) for t in packaging_tags.sys_tags()]) +-) +-""" ++GET_SYS_TAGS = resources.read_text(_pkg, "packaging_tags.py.template") + + GET_ENVIRONMENT_INFO = """\ - import json -@@ -466,7 +469,10 @@ class EnvManager(object): +@@ -660,7 +642,10 @@ class EnvManager: if self._env is not None and not reload: return self._env @@ -177,11 +267,11 @@ index 0b91aa22..1f533f44 100644 + ".".join(str(c) for c in sys.version_info[:2]) + ) - venv_path = self._poetry.config.get("virtualenvs.path") - if venv_path is None: -@@ -679,8 +685,7 @@ class EnvManager(object): - if not name: + venv_path = self._poetry.config.virtualenvs_path + +@@ -861,8 +846,7 @@ class EnvManager: name = self._poetry.package.name + assert name is not None - python_patch = ".".join([str(v) for v in sys.version_info[:3]]) - python_minor = ".".join([str(v) for v in sys.version_info[:2]]) @@ -189,7 +279,7 @@ index 0b91aa22..1f533f44 100644 if executable: python_patch = decode( subprocess.check_output( -@@ -697,7 +702,10 @@ class EnvManager(object): +@@ -875,7 +859,10 @@ class EnvManager: python_minor = ".".join(python_patch.split(".")[:2]) supported_python = self._poetry.package.python_constraint @@ -201,30 +291,27 @@ index 0b91aa22..1f533f44 100644 # The currently activated or chosen Python version # is not compatible with the Python constraint specified # for the project. -@@ -709,60 +717,7 @@ class EnvManager(object): +@@ -887,53 +874,7 @@ class EnvManager: self._poetry.package.python_versions, python_patch ) -- io.write_line( -- "The currently activated Python version {} " -- "is not supported by the project ({}).\n" -- "Trying to find and use a compatible version. ".format( -- python_patch, self._poetry.package.python_versions -- ) +- io.write_error_line( +- f"The currently activated Python version {python_patch} is not" +- f" supported by the project ({self._poetry.package.python_versions}).\n" +- "Trying to find and use a compatible version. " - ) - -- for python_to_try in reversed( -- sorted( -- self._poetry.package.AVAILABLE_PYTHONS, -- key=lambda v: (v.startswith("3"), -len(v), v), -- ) +- for python_to_try in sorted( +- self._poetry.package.AVAILABLE_PYTHONS, +- key=lambda v: (v.startswith("3"), -len(v), v), +- reverse=True, - ): - if len(python_to_try) == 1: -- if not parse_constraint("^{}.0".format(python_to_try)).allows_any( +- if not parse_constraint(f"^{python_to_try}.0").allows_any( - supported_python - ): - continue -- elif not supported_python.allows_all( +- elif not supported_python.allows_any( - parse_constraint(python_to_try + ".*") - ): - continue @@ -232,17 +319,13 @@ index 0b91aa22..1f533f44 100644 - python = "python" + python_to_try - - if io.is_debug(): -- io.write_line("Trying {}".format(python)) +- io.write_line(f"Trying {python}") - - try: - python_patch = decode( - subprocess.check_output( - list_to_shell_command( -- [ -- python, -- "-c", -- "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"", -- ] +- [python, "-c", GET_PYTHON_VERSION_ONELINER] - ), - stderr=subprocess.STDOUT, - shell=True, @@ -255,7 +338,7 @@ index 0b91aa22..1f533f44 100644 - continue - - if supported_python.allows(Version.parse(python_patch)): -- io.write_line("Using {} ({})".format(python, python_patch)) +- io.write_line(f"Using {python} ({python_patch})") - executable = python - python_minor = ".".join(python_patch.split(".")[:2]) - break @@ -263,25 +346,10 @@ index 0b91aa22..1f533f44 100644 if not executable: raise NoCompatiblePythonVersionFound( -@@ -792,6 +747,14 @@ class EnvManager(object): +@@ -990,22 +931,6 @@ class EnvManager: + prompt=venv_prompt, ) - self.build_venv(venv, executable=executable) -+ -+ if not root_venv: -+ envs = tomlkit.document() -+ envs_file = TOMLFile(venv_path / self.ENVS_FILE) -+ if envs_file.exists(): -+ envs = envs_file.read() -+ envs[name] = {"minor": python_minor, "patch": python_patch} -+ envs_file.write(envs) - else: - if force: - if not env.is_sane(): -@@ -808,22 +771,6 @@ class EnvManager(object): - elif io.is_very_verbose(): - io.write_line("Virtualenv {} already exists.".format(name)) - - # venv detection: - # stdlib venv may symlink sys.executable, so we can't use realpath. - # but others can symlink *to* the venv Python, @@ -296,126 +364,131 @@ index 0b91aa22..1f533f44 100644 - p_venv = os.path.normcase(str(venv)) - if any(p.startswith(p_venv) for p in paths): - # Running properly in the virtualenv, don't need to do anything -- return SystemEnv(Path(sys.prefix), Path(self.get_base_prefix())) +- return self.get_system_env() - return VirtualEnv(venv) @classmethod -@@ -837,7 +784,7 @@ class EnvManager(object): - "--no-download", - "--no-periodic-update", - "--python", -- executable or sys.executable, -+ executable or "python", - str(path), - ] - ) -@@ -879,9 +826,13 @@ class EnvManager(object): +@@ -1050,7 +975,7 @@ class EnvManager: + "--no-download", + "--no-periodic-update", + "--python", +- executable or sys.executable, ++ executable or "python", + ] + + if prompt is not None: +@@ -1114,9 +1039,13 @@ class EnvManager: want to retrieve Poetry's custom virtual environment (e.g. plugin installation or self update). """ - prefix, base_prefix = Path(sys.prefix), Path(cls.get_base_prefix()) -- env = SystemEnv(prefix) +- env: Env = SystemEnv(prefix) - if not naive: + pydef_executable, _, _ = InterpreterLookup.find() + prefix, base_prefix = ( + Path(pydef_executable) if pydef_executable else None, + Path(cls.get_base_prefix()) + ) -+ env = SystemEnv(prefix) if prefix else NullEnv() ++ env: Env = SystemEnv(prefix) if prefix else NullEnv() + if not naive and prefix: if prefix.joinpath("poetry_env").exists(): env = GenericEnv(base_prefix, child_env=env) else: -@@ -1266,96 +1217,50 @@ class SystemEnv(Env): +@@ -1542,97 +1471,53 @@ class SystemEnv(Env): A system (i.e. not a virtualenv) Python environment. """ - @property -- def python(self): # type: () -> str +- def python(self) -> str: - return sys.executable -+ def __init__(self, path, base=None, auto_path=True): ++ def __init__(self, path: Path, base: Path | None = None, auto_path: bool = True) -> None: + self._is_windows = sys.platform == "win32" + if auto_path and path: -+ path = Path(self._run([str(path), "-"], input_=GET_BASE_PREFIX).strip()) ++ path = Path( ++ self._run( ++ [str(path), "-W", "ignore", "-"], ++ input_=GET_BASE_PREFIX ++ ).strip() ++ ) + super().__init__(path, base=base) @property -- def sys_path(self): # type: () -> List[str] + def sys_path(self) -> list[str]: - return sys.path -- -- def get_version_info(self): # type: () -> Tuple[int] -- return sys.version_info -- -- def get_python_implementation(self): # type: () -> str ++ output = self.run_python_script(GET_SYS_PATH) ++ return json.loads(output) + + def get_version_info(self) -> tuple[Any, ...]: +- return tuple(sys.version_info) ++ output = self.run_python_script(GET_PYTHON_VERSION) ++ return tuple([int(s) for s in output.strip().split(".")]) + + def get_python_implementation(self) -> str: - return platform.python_implementation() -- -- def get_pip_command(self): # type: () -> List[str] ++ return self.marker_env["platform_python_implementation"] + +- def get_pip_command(self, embedded: bool = False) -> list[str]: - # If we're not in a venv, assume the interpreter we're running on - # has a pip and use that -- return [sys.executable, "-m", "pip"] -- -- def get_paths(self): # type: () -> Dict[str, str] +- return [sys.executable, self.pip_embedded if embedded else self.pip] ++ def get_marker_env(self) -> dict[str, Any]: ++ output = self.run_python_script(GET_ENVIRONMENT_INFO) ++ return json.loads(output) + + def get_paths(self) -> dict[str, str]: - # We can't use sysconfig.get_paths() because - # on some distributions it does not return the proper paths - # (those used by pip for instance). We go through distutils - # to get the proper ones. - import site - -- from distutils.command.install import SCHEME_KEYS # noqa +- from distutils.command.install import SCHEME_KEYS - from distutils.core import Distribution - - d = Distribution() - d.parse_config_files() -- obj = d.get_command_obj("install", create=True) +- with warnings.catch_warnings(): +- warnings.filterwarnings("ignore", "setup.py install is deprecated") +- obj = d.get_command_obj("install", create=True) +- assert obj is not None - obj.finalize_options() -+ def sys_path(self): -+ output = self.run("python", "-", input_=GET_SYS_PATH) -+ return json.loads(output) - +- - paths = sysconfig.get_paths().copy() - for key in SCHEME_KEYS: - if key == "headers": - # headers is not a path returned by sysconfig.get_paths() - continue -+ def get_version_info(self): -+ output = self.run("python", "-", input_=GET_PYTHON_VERSION) -+ return tuple([int(s) for s in output.strip().split(".")]) - -- paths[key] = getattr(obj, "install_{}".format(key)) -+ def get_python_implementation(self): -+ return self.marker_env["platform_python_implementation"] - -- if site.check_enableusersite() and hasattr(obj, "install_usersite"): -- paths["usersite"] = getattr(obj, "install_usersite") -- paths["userbase"] = getattr(obj, "install_userbase") -+ def get_marker_env(self): -+ output = self.run("python", "-", input_=GET_ENVIRONMENT_INFO) -+ return json.loads(output) - +- +- paths[key] = getattr(obj, f"install_{key}") +- +- if site.check_enableusersite(): +- usersite = getattr(obj, "install_usersite", None) +- userbase = getattr(obj, "install_userbase", None) +- if usersite is not None and userbase is not None: +- paths["usersite"] = usersite +- paths["userbase"] = userbase +- - return paths -+ def get_paths(self): -+ output = self.run("python", "-", input_=GET_PATHS) ++ output = self.run_python_script(GET_PATHS) + return json.loads(output) - def get_supported_tags(self): # type: () -> List[Tag] + def get_supported_tags(self) -> list[Tag]: - return list(sys_tags()) - -- def get_marker_env(self): # type: () -> Dict[str, Any] +- def get_marker_env(self) -> dict[str, Any]: - if hasattr(sys, "implementation"): - info = sys.implementation.version -- iver = "{0.major}.{0.minor}.{0.micro}".format(info) +- iver = f"{info.major}.{info.minor}.{info.micro}" - kind = info.releaselevel - if kind != "final": - iver += kind[0] + str(info.serial) -+ script = resources.read_text(_pkg, "packaging_tags.py.template") - +- - implementation_name = sys.implementation.name - else: - iver = "0" - implementation_name = "" -+ output = self.run("python", "-", input_=script) - +- - return { - "implementation_name": implementation_name, - "implementation_version": iver, @@ -426,21 +499,22 @@ index 0b91aa22..1f533f44 100644 - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), -- "python_version": ".".join( -- v for v in platform.python_version().split(".")[:2] -- ), +- "python_version": ".".join(platform.python_version().split(".")[:2]), - "sys_platform": sys.platform, - "version_info": sys.version_info, -- # Extra information - "interpreter_name": interpreter_name(), - "interpreter_version": interpreter_version(), - } ++ output = self.run_python_script(GET_SYS_TAGS) + return [Tag(*t) for t in json.loads(output)] -- def get_pip_version(self): # type: () -> Version +- def get_pip_version(self) -> Version: - from pip import __version__ -+ def get_pip_command(self): -+ return [self.python, "-m", "pip"] ++ def get_pip_command(self, embedded: bool = False) -> list[str]: ++ return [ ++ self._bin(self._executable), ++ self.pip_embedded if embedded else self.pip, ++ ] - return Version.parse(__version__) + def get_pip_version(self): @@ -450,80 +524,55 @@ index 0b91aa22..1f533f44 100644 + return Version.parse("0.0") + return Version.parse(m.group(1)) -- def is_venv(self): # type: () -> bool -+ def is_venv(self): + def is_venv(self) -> bool: return self._path != self._base - - -@@ -1394,30 +1299,7 @@ class VirtualEnv(Env): - return [self._bin(self._pip_executable)] - - def get_supported_tags(self): # type: () -> List[Tag] -- file_path = Path(packaging.tags.__file__) -- if file_path.suffix == ".pyc": -- # Python 2 -- file_path = file_path.with_suffix(".py") -- -- with file_path.open(encoding="utf-8") as f: -- script = decode(f.read()) -- -- script = script.replace( -- "from ._typing import TYPE_CHECKING, cast", -- "TYPE_CHECKING = False\ncast = lambda type_, value: value", -- ) -- script = script.replace( -- "from ._typing import MYPY_CHECK_RUNNING, cast", -- "MYPY_CHECK_RUNNING = False\ncast = lambda type_, value: value", -- ) -- -- script += textwrap.dedent( -- """ -- import json -- -- print(json.dumps([(t.interpreter, t.abi, t.platform) for t in sys_tags()])) -- """ -- ) -+ script = resources.read_text(_pkg, "packaging_tags.py.template") - - output = self.run_python_script(script) - -@@ -1574,11 +1456,98 @@ class NullEnv(SystemEnv): +@@ -1853,16 +1738,100 @@ class NullEnv(SystemEnv): if path is None: path = Path(sys.prefix) -- super(NullEnv, self).__init__(path, base=base) +- super().__init__(path, base=base) + super().__init__(path, base=base, auto_path=False) self._execute = execute - self.executed = [] + self.executed: list[list[str]] = [] + @property -+ def python(self): # type: () -> str ++ def python(self) -> str: + return sys.executable + + @property -+ def sys_path(self): # type: () -> List[str] ++ def sys_path(self) -> list[str]: + return sys.path + -+ def get_version_info(self): # type: () -> Tuple[int] -+ return sys.version_info ++ def get_version_info(self) -> tuple[Any, ...]: ++ return tuple(sys.version_info) + -+ def get_python_implementation(self): # type: () -> str ++ def get_python_implementation(self) -> str: + return platform.python_implementation() + -+ def get_paths(self): # type: () -> Dict[str, str] + def get_pip_command(self, embedded: bool = False) -> list[str]: +- return [ +- self._bin(self._executable), +- self.pip_embedded if embedded else self.pip, +- ] ++ return [sys.executable, self.pip_embedded if embedded else self.pip] ++ ++ def get_paths(self) -> dict[str, str]: + # We can't use sysconfig.get_paths() because + # on some distributions it does not return the proper paths + # (those used by pip for instance). We go through distutils + # to get the proper ones. + import site + -+ from distutils.command.install import SCHEME_KEYS # noqa ++ from distutils.command.install import SCHEME_KEYS + from distutils.core import Distribution + + d = Distribution() + d.parse_config_files() -+ obj = d.get_command_obj("install", create=True) ++ with warnings.catch_warnings(): ++ warnings.filterwarnings("ignore", "setup.py install is deprecated") ++ obj = d.get_command_obj("install", create=True) ++ assert obj is not None + obj.finalize_options() + + paths = sysconfig.get_paths().copy() @@ -532,21 +581,24 @@ index 0b91aa22..1f533f44 100644 + # headers is not a path returned by sysconfig.get_paths() + continue + -+ paths[key] = getattr(obj, "install_{}".format(key)) ++ paths[key] = getattr(obj, f"install_{key}") + -+ if site.check_enableusersite() and hasattr(obj, "install_usersite"): -+ paths["usersite"] = getattr(obj, "install_usersite") -+ paths["userbase"] = getattr(obj, "install_userbase") ++ if site.check_enableusersite(): ++ usersite = getattr(obj, "install_usersite", None) ++ userbase = getattr(obj, "install_userbase", None) ++ if usersite is not None and userbase is not None: ++ paths["usersite"] = usersite ++ paths["userbase"] = userbase + + return paths + -+ def get_supported_tags(self): # type: () -> List[Tag] ++ def get_supported_tags(self) -> list[Tag]: + return list(sys_tags()) + -+ def get_marker_env(self): # type: () -> Dict[str, Any] ++ def get_marker_env(self) -> dict[str, Any]: + if hasattr(sys, "implementation"): + info = sys.implementation.version -+ iver = "{0.major}.{0.minor}.{0.micro}".format(info) ++ iver = f"{info.major}.{info.minor}.{info.micro}" + kind = info.releaselevel + if kind != "final": + iver += kind[0] + str(info.serial) @@ -566,41 +618,23 @@ index 0b91aa22..1f533f44 100644 + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), -+ "python_version": ".".join( -+ v for v in platform.python_version().split(".")[:2] -+ ), ++ "python_version": ".".join(platform.python_version().split(".")[:2]), + "sys_platform": sys.platform, + "version_info": sys.version_info, -+ # Extra information + "interpreter_name": interpreter_name(), + "interpreter_version": interpreter_version(), + } + -+ def get_pip_version(self): # type: () -> Version ++ def get_pip_version(self) -> Version: + from pip import __version__ + + return Version.parse(__version__) -+ -+ def is_venv(self): # type: () -> bool -+ return self._path != self._base -+ - def get_pip_command(self): # type: () -> List[str] - return [self._bin("python"), "-m", "pip"] - -@@ -1624,6 +1593,10 @@ class MockEnv(NullEnv): - self._mock_marker_env = marker_env - self._supported_tags = supported_tags -+ @property -+ def python(self): # type: () -> str -+ return self._base -+ - @property - def platform(self): # type: () -> str - return self._platform -@@ -1662,3 +1635,70 @@ class MockEnv(NullEnv): + def _run(self, cmd: list[str], **kwargs: Any) -> int | str: + self.executed.append(cmd) +@@ -2009,3 +1978,57 @@ class MockEnv(NullEnv): - def is_venv(self): # type: () -> bool + def is_venv(self) -> bool: return self._is_venv + + @@ -611,15 +645,10 @@ index 0b91aa22..1f533f44 100644 + python_patch = decode( + subprocess.check_output( + list_to_shell_command( -+ [ -+ executable, -+ "-c", -+ "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"", -+ ] ++ [executable, "-c", GET_PYTHON_VERSION_ONELINER] + ), -+ stderr=subprocess.STDOUT, -+ shell=True, -+ ).strip() ++ shell=True ++ ) + ) + except CalledProcessError: + return False, None, None @@ -645,179 +674,537 @@ index 0b91aa22..1f533f44 100644 + if match: + return guess, minor, patch + -+ for python_to_try in reversed( -+ sorted( -+ Package.AVAILABLE_PYTHONS, -+ key=lambda v: (v.startswith("3"), -len(v), v), -+ ) ++ for python_to_try in sorted( ++ Package.AVAILABLE_PYTHONS, ++ key=lambda v: ( ++ v.startswith("3"), ++ len(v) == 1, ++ int(v.split(".")[0]) * 100 + int((v.split(".") + ["0"])[1]) ++ ), ++ reverse=True + ): -+ if constraint: -+ if len(python_to_try) == 1: -+ if not parse_constraint("^{}.0".format(python_to_try)).allows_any( -+ constraint -+ ): -+ continue -+ elif not constraint.allows_all( -+ parse_constraint(python_to_try + ".*") -+ ): -+ continue -+ -+ guess = "python" + python_to_try ++ guess = f"python{python_to_try}" + match, minor, patch = cls._version_check(guess, constraint) + if match: + executable = guess + break + + return executable, minor, patch -diff --git a/poetry/utils/packaging_tags.py.template b/poetry/utils/packaging_tags.py.template +diff --git a/src/poetry/utils/packaging_tags.py.template b/src/poetry/utils/packaging_tags.py.template new file mode 100644 -index 00000000..15804593 +index 00000000..4e98b8b3 --- /dev/null -+++ b/poetry/utils/packaging_tags.py.template -@@ -0,0 +1,872 @@ ++++ b/src/poetry/utils/packaging_tags.py.template +@@ -0,0 +1,905 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + -+from __future__ import absolute_import -+ -+import distutils.util -+ -+try: -+ from importlib.machinery import EXTENSION_SUFFIXES -+except ImportError: # pragma: no cover -+ import imp -+ -+ EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] -+ del imp -+import collections +import json +import logging -+import os +import platform ++import sys ++import sysconfig ++from importlib.machinery import EXTENSION_SUFFIXES ++from typing import ( ++ Dict, ++ FrozenSet, ++ Iterable, ++ Iterator, ++ List, ++ Optional, ++ Sequence, ++ Tuple, ++ Union, ++ cast, ++) ++ ++import collections ++import contextlib ++import functools ++import operator ++import os +import re +import struct ++import subprocess +import sys -+import sysconfig +import warnings ++from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple + -+TYPE_CHECKING = False -+cast = lambda type_, value: value -+ -+if TYPE_CHECKING: # pragma: no cover -+ from typing import ( -+ IO, -+ Dict, -+ FrozenSet, -+ Iterable, -+ Iterator, -+ List, -+ Optional, -+ Sequence, -+ Tuple, -+ Union, -+ ) + -+ PythonVersion = Sequence[int] -+ MacVersion = Tuple[int, int] -+ GlibcVersion = Tuple[int, int] ++# Python does not provide platform information at sufficient granularity to ++# identify the architecture of the running executable in some cases, so we ++# determine it dynamically by reading the information from the running ++# process. This only applies on Linux, which uses the ELF format. ++class _ELFFileHeader: ++ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header ++ class _InvalidELFFileHeader(ValueError): ++ """ ++ An invalid ELF file header was found. ++ """ ++ ++ ELF_MAGIC_NUMBER = 0x7F454C46 ++ ELFCLASS32 = 1 ++ ELFCLASS64 = 2 ++ ELFDATA2LSB = 1 ++ ELFDATA2MSB = 2 ++ EM_386 = 3 ++ EM_S390 = 22 ++ EM_ARM = 40 ++ EM_X86_64 = 62 ++ EF_ARM_ABIMASK = 0xFF000000 ++ EF_ARM_ABI_VER5 = 0x05000000 ++ EF_ARM_ABI_FLOAT_HARD = 0x00000400 + ++ def __init__(self, file: IO[bytes]) -> None: ++ def unpack(fmt: str) -> int: ++ try: ++ data = file.read(struct.calcsize(fmt)) ++ result: Tuple[int, ...] = struct.unpack(fmt, data) ++ except struct.error: ++ raise _ELFFileHeader._InvalidELFFileHeader() ++ return result[0] + -+logger = logging.getLogger(__name__) ++ self.e_ident_magic = unpack(">I") ++ if self.e_ident_magic != self.ELF_MAGIC_NUMBER: ++ raise _ELFFileHeader._InvalidELFFileHeader() ++ self.e_ident_class = unpack("B") ++ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: ++ raise _ELFFileHeader._InvalidELFFileHeader() ++ self.e_ident_data = unpack("B") ++ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: ++ raise _ELFFileHeader._InvalidELFFileHeader() ++ self.e_ident_version = unpack("B") ++ self.e_ident_osabi = unpack("B") ++ self.e_ident_abiversion = unpack("B") ++ self.e_ident_pad = file.read(7) ++ format_h = "H" ++ format_i = "I" ++ format_q = "Q" ++ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q ++ self.e_type = unpack(format_h) ++ self.e_machine = unpack(format_h) ++ self.e_version = unpack(format_i) ++ self.e_entry = unpack(format_p) ++ self.e_phoff = unpack(format_p) ++ self.e_shoff = unpack(format_p) ++ self.e_flags = unpack(format_i) ++ self.e_ehsize = unpack(format_h) ++ self.e_phentsize = unpack(format_h) ++ self.e_phnum = unpack(format_h) ++ self.e_shentsize = unpack(format_h) ++ self.e_shnum = unpack(format_h) ++ self.e_shstrndx = unpack(format_h) + -+INTERPRETER_SHORT_NAMES = { -+ "python": "py", # Generic. -+ "cpython": "cp", -+ "pypy": "pp", -+ "ironpython": "ip", -+ "jython": "jy", -+} # type: Dict[str, str] + ++def _get_elf_header() -> Optional[_ELFFileHeader]: ++ try: ++ with open(sys.executable, "rb") as f: ++ elf_header = _ELFFileHeader(f) ++ except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): ++ return None ++ return elf_header + -+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + ++def _is_linux_armhf() -> bool: ++ # hard-float ABI can be detected from the ELF header of the running ++ # process ++ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf ++ elf_header = _get_elf_header() ++ if elf_header is None: ++ return False ++ result = elf_header.e_ident_class == elf_header.ELFCLASS32 ++ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB ++ result &= elf_header.e_machine == elf_header.EM_ARM ++ result &= ( ++ elf_header.e_flags & elf_header.EF_ARM_ABIMASK ++ ) == elf_header.EF_ARM_ABI_VER5 ++ result &= ( ++ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD ++ ) == elf_header.EF_ARM_ABI_FLOAT_HARD ++ return result ++ ++ ++def _is_linux_i686() -> bool: ++ elf_header = _get_elf_header() ++ if elf_header is None: ++ return False ++ result = elf_header.e_ident_class == elf_header.ELFCLASS32 ++ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB ++ result &= elf_header.e_machine == elf_header.EM_386 ++ return result ++ ++ ++def _have_compatible_abi(arch: str) -> bool: ++ if arch == "armv7l": ++ return _is_linux_armhf() ++ if arch == "i686": ++ return _is_linux_i686() ++ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} + -+_LEGACY_MANYLINUX_MAP = { -+ # CentOS 7 w/ glibc 2.17 (PEP 599) -+ (2, 17): "manylinux2014", -+ # CentOS 6 w/ glibc 2.12 (PEP 571) -+ (2, 12): "manylinux2010", -+ # CentOS 5 w/ glibc 2.5 (PEP 513) -+ (2, 5): "manylinux1", -+} + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. -+_LAST_GLIBC_MINOR = collections.defaultdict(lambda: 50) # type: Dict[int, int] -+glibcVersion = collections.namedtuple("Version", ["major", "minor"]) ++_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) + + -+class Tag(object): -+ """ -+ A representation of the tag triple for a wheel. ++class _GLibCVersion(NamedTuple): ++ major: int ++ minor: int + -+ Instances are considered immutable and thus are hashable. Equality checking -+ is also supported. ++ ++def _glibc_version_string_confstr() -> Optional[str]: ++ """ ++ Primary implementation of glibc_version_string using os.confstr. + """ ++ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely ++ # to be broken or missing. This strategy is used in the standard library ++ # platform module. ++ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 ++ try: ++ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". ++ version_string = os.confstr("CS_GNU_LIBC_VERSION") ++ assert version_string is not None ++ _, version = version_string.split() ++ except (AssertionError, AttributeError, OSError, ValueError): ++ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... ++ return None ++ return version + -+ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] + -+ def __init__(self, interpreter, abi, platform): -+ # type: (str, str, str) -> None -+ self._interpreter = interpreter.lower() -+ self._abi = abi.lower() -+ self._platform = platform.lower() -+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time -+ # that a set calls its `.disjoint()` method, which may be called hundreds of -+ # times when scanning a page of links for packages with tags matching that ++def _glibc_version_string_ctypes() -> Optional[str]: ++ """ ++ Fallback implementation of glibc_version_string using ctypes. ++ """ ++ try: ++ import ctypes ++ except ImportError: ++ return None ++ ++ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen ++ # manpage says, "If filename is NULL, then the returned handle is for the ++ # main program". This way we can let the linker do the work to figure out ++ # which libc our process is actually using. ++ # ++ # We must also handle the special case where the executable is not a ++ # dynamically linked executable. This can occur when using musl libc, ++ # for example. In this situation, dlopen() will error, leading to an ++ # OSError. Interestingly, at least in the case of musl, there is no ++ # errno set on the OSError. The single string argument used to construct ++ # OSError comes from libc itself and is therefore not portable to ++ # hard code here. In any case, failure to call dlopen() means we ++ # can proceed, so we bail on our attempt. ++ try: ++ process_namespace = ctypes.CDLL(None) ++ except OSError: ++ return None ++ ++ try: ++ gnu_get_libc_version = process_namespace.gnu_get_libc_version ++ except AttributeError: ++ # Symbol doesn't exist -> therefore, we are not linked to ++ # glibc. ++ return None ++ ++ # Call gnu_get_libc_version, which returns a string like "2.5" ++ gnu_get_libc_version.restype = ctypes.c_char_p ++ version_str: str = gnu_get_libc_version() ++ # py2 / py3 compatibility: ++ if not isinstance(version_str, str): ++ version_str = version_str.decode("ascii") ++ ++ return version_str ++ ++ ++def _glibc_version_string() -> Optional[str]: ++ """Returns glibc version string, or None if not using glibc.""" ++ return _glibc_version_string_confstr() or _glibc_version_string_ctypes() ++ ++ ++def _parse_glibc_version(version_str: str) -> Tuple[int, int]: ++ """Parse glibc version. ++ ++ We use a regexp instead of str.split because we want to discard any ++ random junk that might come after the minor version -- this might happen ++ in patched/forked versions of glibc (e.g. Linaro's version of glibc ++ uses version strings like "2.20-2014.11"). See gh-3588. ++ """ ++ m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) ++ if not m: ++ warnings.warn( ++ "Expected glibc version with 2 components major.minor," ++ " got: %s" % version_str, ++ RuntimeWarning, ++ ) ++ return -1, -1 ++ return int(m.group("major")), int(m.group("minor")) ++ ++ ++@functools.lru_cache() ++def _get_glibc_version() -> Tuple[int, int]: ++ version_str = _glibc_version_string() ++ if version_str is None: ++ return (-1, -1) ++ return _parse_glibc_version(version_str) ++ ++ ++# From PEP 513, PEP 600 ++def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: ++ sys_glibc = _get_glibc_version() ++ if sys_glibc < version: ++ return False ++ # Check for presence of _manylinux module. ++ try: ++ import _manylinux # noqa ++ except ImportError: ++ return True ++ if hasattr(_manylinux, "manylinux_compatible"): ++ result = _manylinux.manylinux_compatible(version[0], version[1], arch) ++ if result is not None: ++ return bool(result) ++ return True ++ if version == _GLibCVersion(2, 5): ++ if hasattr(_manylinux, "manylinux1_compatible"): ++ return bool(_manylinux.manylinux1_compatible) ++ if version == _GLibCVersion(2, 12): ++ if hasattr(_manylinux, "manylinux2010_compatible"): ++ return bool(_manylinux.manylinux2010_compatible) ++ if version == _GLibCVersion(2, 17): ++ if hasattr(_manylinux, "manylinux2014_compatible"): ++ return bool(_manylinux.manylinux2014_compatible) ++ return True ++ ++ ++_LEGACY_MANYLINUX_MAP = { ++ # CentOS 7 w/ glibc 2.17 (PEP 599) ++ (2, 17): "manylinux2014", ++ # CentOS 6 w/ glibc 2.12 (PEP 571) ++ (2, 12): "manylinux2010", ++ # CentOS 5 w/ glibc 2.5 (PEP 513) ++ (2, 5): "manylinux1", ++} ++ ++ ++def _manylinux_platform_tags(linux: str, arch: str) -> Iterator[str]: ++ if not _have_compatible_abi(arch): ++ return ++ # Oldest glibc to be supported regardless of architecture is (2, 17). ++ too_old_glibc2 = _GLibCVersion(2, 16) ++ if arch in {"x86_64", "i686"}: ++ # On x86/i686 also oldest glibc to be supported is (2, 5). ++ too_old_glibc2 = _GLibCVersion(2, 4) ++ current_glibc = _GLibCVersion(*_get_glibc_version()) ++ glibc_max_list = [current_glibc] ++ # We can assume compatibility across glibc major versions. ++ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 ++ # ++ # Build a list of maximum glibc versions so that we can ++ # output the canonical list of all glibc from current_glibc ++ # down to too_old_glibc2, including all intermediary versions. ++ for glibc_major in range(current_glibc.major - 1, 1, -1): ++ glibc_minor = _LAST_GLIBC_MINOR[glibc_major] ++ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) ++ for glibc_max in glibc_max_list: ++ if glibc_max.major == too_old_glibc2.major: ++ min_minor = too_old_glibc2.minor ++ else: ++ # For other glibc major versions oldest supported is (x, 0). ++ min_minor = -1 ++ for glibc_minor in range(glibc_max.minor, min_minor, -1): ++ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) ++ tag = "manylinux_{}_{}".format(*glibc_version) ++ if _is_compatible(tag, arch, glibc_version): ++ yield linux.replace("linux", tag) ++ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. ++ if glibc_version in _LEGACY_MANYLINUX_MAP: ++ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] ++ if _is_compatible(legacy_tag, arch, glibc_version): ++ yield linux.replace("linux", legacy_tag) ++ ++ ++def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: ++ return struct.unpack(fmt, f.read(struct.calcsize(fmt))) ++ ++ ++def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: ++ """Detect musl libc location by parsing the Python executable. ++ ++ Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca ++ ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html ++ """ ++ f.seek(0) ++ try: ++ ident = _read_unpacked(f, "16B") ++ except struct.error: ++ return None ++ if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. ++ return None ++ f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. ++ ++ try: ++ # e_fmt: Format for program header. ++ # p_fmt: Format for section header. ++ # p_idx: Indexes to find p_type, p_offset, and p_filesz. ++ e_fmt, p_fmt, p_idx = { ++ 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. ++ 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. ++ }[ident[4]] ++ except KeyError: ++ return None ++ else: ++ p_get = operator.itemgetter(*p_idx) ++ ++ # Find the interpreter section and return its content. ++ try: ++ _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) ++ except struct.error: ++ return None ++ for i in range(e_phnum + 1): ++ f.seek(e_phoff + e_phentsize * i) ++ try: ++ p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) ++ except struct.error: ++ return None ++ if p_type != 3: # Not PT_INTERP. ++ continue ++ f.seek(p_offset) ++ interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") ++ if "musl" not in interpreter: ++ return None ++ return interpreter ++ return None ++ ++ ++class _MuslVersion(NamedTuple): ++ major: int ++ minor: int ++ ++ ++def _parse_musl_version(output: str) -> Optional[_MuslVersion]: ++ lines = [n for n in (n.strip() for n in output.splitlines()) if n] ++ if len(lines) < 2 or lines[0][:4] != "musl": ++ return None ++ m = re.match(r"Version (\d+)\.(\d+)", lines[1]) ++ if not m: ++ return None ++ return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) ++ ++ ++@functools.lru_cache() ++def _get_musl_version(executable: str) -> Optional[_MuslVersion]: ++ """Detect currently-running musl runtime version. ++ ++ This is done by checking the specified executable's dynamic linking ++ information, and invoking the loader to parse its output for a version ++ string. If the loader is musl, the output would be something like:: ++ ++ musl libc (x86_64) ++ Version 1.2.2 ++ Dynamic Program Loader ++ """ ++ with contextlib.ExitStack() as stack: ++ try: ++ f = stack.enter_context(open(executable, "rb")) ++ except OSError: ++ return None ++ ld = _parse_ld_musl_from_elf(f) ++ if not ld: ++ return None ++ proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) ++ return _parse_musl_version(proc.stderr) ++ ++ ++def _musllinux_platform_tags(arch: str) -> Iterator[str]: ++ """Generate musllinux tags compatible to the current platform. ++ ++ :param arch: Should be the part of platform tag after the ``linux_`` ++ prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a ++ prerequisite for the current platform to be musllinux-compatible. ++ ++ :returns: An iterator of compatible musllinux tags. ++ """ ++ sys_musl = _get_musl_version(sys.executable) ++ if sys_musl is None: # Python not dynamically linked against musl. ++ return ++ for minor in range(sys_musl.minor, -1, -1): ++ yield f"musllinux_{sys_musl.major}_{minor}_{arch}" ++ ++ ++logger = logging.getLogger(__name__) ++ ++PythonVersion = Sequence[int] ++MacVersion = Tuple[int, int] ++ ++INTERPRETER_SHORT_NAMES: Dict[str, str] = { ++ "python": "py", # Generic. ++ "cpython": "cp", ++ "pypy": "pp", ++ "ironpython": "ip", ++ "jython": "jy", ++} ++ ++ ++_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 ++ ++ ++class Tag: ++ """ ++ A representation of the tag triple for a wheel. ++ ++ Instances are considered immutable and thus are hashable. Equality checking ++ is also supported. ++ """ ++ ++ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] ++ ++ def __init__(self, interpreter: str, abi: str, platform: str) -> None: ++ self._interpreter = interpreter.lower() ++ self._abi = abi.lower() ++ self._platform = platform.lower() ++ # The __hash__ of every single element in a Set[Tag] will be evaluated each time ++ # that a set calls its `.disjoint()` method, which may be called hundreds of ++ # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property -+ def interpreter(self): -+ # type: () -> str ++ def interpreter(self) -> str: + return self._interpreter + + @property -+ def abi(self): -+ # type: () -> str ++ def abi(self) -> str: + return self._abi + + @property -+ def platform(self): -+ # type: () -> str ++ def platform(self) -> str: + return self._platform + -+ def __eq__(self, other): -+ # type: (object) -> bool ++ def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( -+ (self.platform == other.platform) -+ and (self.abi == other.abi) -+ and (self.interpreter == other.interpreter) ++ (self._hash == other._hash) # Short-circuit ASAP for perf reasons. ++ and (self._platform == other._platform) ++ and (self._abi == other._abi) ++ and (self._interpreter == other._interpreter) + ) + -+ def __hash__(self): -+ # type: () -> int ++ def __hash__(self) -> int: + return self._hash + -+ def __str__(self): -+ # type: () -> str -+ return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) ++ def __str__(self) -> str: ++ return f"{self._interpreter}-{self._abi}-{self._platform}" + -+ def __repr__(self): -+ # type: () -> str -+ return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) ++ def __repr__(self) -> str: ++ return f"<{self} @ {id(self)}>" + + -+def parse_tag(tag): -+ # type: (str) -> FrozenSet[Tag] ++def parse_tag(tag: str) -> FrozenSet[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + @@ -833,24 +1220,7 @@ index 00000000..15804593 + return frozenset(tags) + + -+def _warn_keyword_parameter(func_name, kwargs): -+ # type: (str, Dict[str, bool]) -> bool -+ """ -+ Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. -+ """ -+ if not kwargs: -+ return False -+ elif len(kwargs) > 1 or "warn" not in kwargs: -+ kwargs.pop("warn", None) -+ arg = next(iter(kwargs.keys())) -+ raise TypeError( -+ "{}() got an unexpected keyword argument {!r}".format(func_name, arg) -+ ) -+ return kwargs["warn"] -+ -+ -+def _get_config_var(name, warn=False): -+ # type: (str, bool) -> Union[int, str, None] ++def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: + value = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( @@ -859,13 +1229,11 @@ index 00000000..15804593 + return value + + -+def _normalize_string(string): -+ # type: (str) -> str ++def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_") + + -+def _abi3_applies(python_version): -+ # type: (PythonVersion) -> bool ++def _abi3_applies(python_version: PythonVersion) -> bool: + """ + Determine if the Python version supports abi3. + @@ -874,8 +1242,7 @@ index 00000000..15804593 + return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + -+def _cpython_abis(py_version, warn=False): -+ # type: (PythonVersion, bool) -> List[str] ++def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) @@ -901,7 +1268,7 @@ index 00000000..15804593 + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. -+ abis.append("cp{version}".format(version=version)) ++ abis.append(f"cp{version}") + abis.insert( + 0, + "cp{version}{debug}{pymalloc}{ucs4}".format( @@ -912,12 +1279,12 @@ index 00000000..15804593 + + +def cpython_tags( -+ python_version=None, # type: Optional[PythonVersion] -+ abis=None, # type: Optional[Iterable[str]] -+ platforms=None, # type: Optional[Iterable[str]] -+ **kwargs # type: bool -+): -+ # type: (...) -> Iterator[Tag] ++ python_version: Optional[PythonVersion] = None, ++ abis: Optional[Iterable[str]] = None, ++ platforms: Optional[Iterable[str]] = None, ++ *, ++ warn: bool = False, ++) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + @@ -933,11 +1300,10 @@ index 00000000..15804593 + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ -+ warn = _warn_keyword_parameter("cpython_tags", kwargs) + if not python_version: + python_version = sys.version_info[:2] + -+ interpreter = "cp{}".format(_version_nodot(python_version[:2])) ++ interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + if len(python_version) > 1: @@ -952,15 +1318,13 @@ index 00000000..15804593 + except ValueError: + pass + -+ platforms = list(platforms or _platform_tags()) ++ platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + if _abi3_applies(python_version): -+ for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): -+ yield tag -+ for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): -+ yield tag ++ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) ++ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): @@ -971,20 +1335,19 @@ index 00000000..15804593 + yield Tag(interpreter, "abi3", platform_) + + -+def _generic_abi(): -+ # type: () -> Iterator[str] ++def _generic_abi() -> Iterator[str]: + abi = sysconfig.get_config_var("SOABI") + if abi: + yield _normalize_string(abi) + + +def generic_tags( -+ interpreter=None, # type: Optional[str] -+ abis=None, # type: Optional[Iterable[str]] -+ platforms=None, # type: Optional[Iterable[str]] -+ **kwargs # type: bool -+): -+ # type: (...) -> Iterator[Tag] ++ interpreter: Optional[str] = None, ++ abis: Optional[Iterable[str]] = None, ++ platforms: Optional[Iterable[str]] = None, ++ *, ++ warn: bool = False, ++) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + @@ -993,14 +1356,13 @@ index 00000000..15804593 + + The "none" ABI will be added if it was not explicitly provided. + """ -+ warn = _warn_keyword_parameter("generic_tags", kwargs) + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() -+ platforms = list(platforms or _platform_tags()) ++ platforms = list(platforms or platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") @@ -1009,8 +1371,7 @@ index 00000000..15804593 + yield Tag(interpreter, abi, platform_) + + -+def _py_interpreter_range(py_version): -+ # type: (PythonVersion) -> Iterator[str] ++def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + @@ -1018,19 +1379,18 @@ index 00000000..15804593 + all previous versions of that major version. + """ + if len(py_version) > 1: -+ yield "py{version}".format(version=_version_nodot(py_version[:2])) -+ yield "py{major}".format(major=py_version[0]) ++ yield f"py{_version_nodot(py_version[:2])}" ++ yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): -+ yield "py{version}".format(version=_version_nodot((py_version[0], minor))) ++ yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( -+ python_version=None, # type: Optional[PythonVersion] -+ interpreter=None, # type: Optional[str] -+ platforms=None, # type: Optional[Iterable[str]] -+): -+ # type: (...) -> Iterator[Tag] ++ python_version: Optional[PythonVersion] = None, ++ interpreter: Optional[str] = None, ++ platforms: Optional[Iterable[str]] = None, ++) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + @@ -1041,7 +1401,7 @@ index 00000000..15804593 + """ + if not python_version: + python_version = sys.version_info[:2] -+ platforms = list(platforms or _platform_tags()) ++ platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) @@ -1051,8 +1411,7 @@ index 00000000..15804593 + yield Tag(version, "none", "any") + + -+def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): -+ # type: (str, bool) -> str ++def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + @@ -1062,8 +1421,7 @@ index 00000000..15804593 + return "i386" + + -+def _mac_binary_formats(version, cpu_arch): -+ # type: (MacVersion, str) -> List[str] ++def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): @@ -1095,8 +1453,9 @@ index 00000000..15804593 + return formats + + -+def mac_platforms(version=None, arch=None): -+ # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] ++def mac_platforms( ++ version: Optional[MacVersion] = None, arch: Optional[str] = None ++) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + @@ -1105,7 +1464,7 @@ index 00000000..15804593 + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ -+ version_str, _, cpu_arch = platform.mac_ver() # type: ignore ++ version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: @@ -1166,445 +1525,82 @@ index 00000000..15804593 + ) + + -+# From PEP 513, PEP 600 -+def _is_manylinux_compatible(name, arch, glibc_version): -+ # type: (str, str, GlibcVersion) -> bool -+ sys_glibc = _get_glibc_version() -+ if sys_glibc < glibc_version: -+ return False -+ # Check for presence of _manylinux module. -+ try: -+ import _manylinux # noqa -+ except ImportError: -+ pass -+ else: -+ if hasattr(_manylinux, "manylinux_compatible"): -+ result = _manylinux.manylinux_compatible( -+ glibc_version[0], glibc_version[1], arch -+ ) -+ if result is not None: -+ return bool(result) -+ else: -+ if glibc_version == (2, 5): -+ if hasattr(_manylinux, "manylinux1_compatible"): -+ return bool(_manylinux.manylinux1_compatible) -+ if glibc_version == (2, 12): -+ if hasattr(_manylinux, "manylinux2010_compatible"): -+ return bool(_manylinux.manylinux2010_compatible) -+ if glibc_version == (2, 17): -+ if hasattr(_manylinux, "manylinux2014_compatible"): -+ return bool(_manylinux.manylinux2014_compatible) -+ return True ++def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: ++ linux = _normalize_string(sysconfig.get_platform()) ++ if is_32bit: ++ if linux == "linux_x86_64": ++ linux = "linux_i686" ++ elif linux == "linux_aarch64": ++ linux = "linux_armv7l" ++ _, arch = linux.split("_", 1) ++ yield from _manylinux_platform_tags(linux, arch) ++ yield from _musllinux_platform_tags(arch) ++ yield linux + + -+def _glibc_version_string(): -+ # type: () -> Optional[str] -+ # Returns glibc version string, or None if not using glibc. -+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes() ++def _generic_platforms() -> Iterator[str]: ++ yield _normalize_string(sysconfig.get_platform()) + + -+def _glibc_version_string_confstr(): -+ # type: () -> Optional[str] ++def platform_tags() -> Iterator[str]: + """ -+ Primary implementation of glibc_version_string using os.confstr. ++ Provides the platform tags for this installation. + """ -+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely -+ # to be broken or missing. This strategy is used in the standard library -+ # platform module. -+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 -+ try: -+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". -+ version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 -+ "CS_GNU_LIBC_VERSION" -+ ) -+ assert version_string is not None -+ _, version = version_string.split() # type: Tuple[str, str] -+ except (AssertionError, AttributeError, OSError, ValueError): -+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... -+ return None -+ return version ++ if platform.system() == "Darwin": ++ return mac_platforms() ++ elif platform.system() == "Linux": ++ return _linux_platforms() ++ else: ++ return _generic_platforms() + + -+def _glibc_version_string_ctypes(): -+ # type: () -> Optional[str] ++def interpreter_name() -> str: + """ -+ Fallback implementation of glibc_version_string using ctypes. ++ Returns the name of the running interpreter. + """ -+ try: -+ import ctypes -+ except ImportError: -+ return None ++ name = sys.implementation.name ++ return INTERPRETER_SHORT_NAMES.get(name) or name + -+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen -+ # manpage says, "If filename is NULL, then the returned handle is for the -+ # main program". This way we can let the linker do the work to figure out -+ # which libc our process is actually using. -+ # -+ # We must also handle the special case where the executable is not a -+ # dynamically linked executable. This can occur when using musl libc, -+ # for example. In this situation, dlopen() will error, leading to an -+ # OSError. Interestingly, at least in the case of musl, there is no -+ # errno set on the OSError. The single string argument used to construct -+ # OSError comes from libc itself and is therefore not portable to -+ # hard code here. In any case, failure to call dlopen() means we -+ # can proceed, so we bail on our attempt. -+ try: -+ # Note: typeshed is wrong here so we are ignoring this line. -+ process_namespace = ctypes.CDLL(None) # type: ignore -+ except OSError: -+ return None + -+ try: -+ gnu_get_libc_version = process_namespace.gnu_get_libc_version -+ except AttributeError: -+ # Symbol doesn't exist -> therefore, we are not linked to -+ # glibc. -+ return None ++def interpreter_version(*, warn: bool = False) -> str: ++ """ ++ Returns the version of the running interpreter. ++ """ ++ version = _get_config_var("py_version_nodot", warn=warn) ++ if version: ++ version = str(version) ++ else: ++ version = _version_nodot(sys.version_info[:2]) ++ return version + -+ # Call gnu_get_libc_version, which returns a string like "2.5" -+ gnu_get_libc_version.restype = ctypes.c_char_p -+ version_str = gnu_get_libc_version() # type: str -+ # py2 / py3 compatibility: -+ if not isinstance(version_str, str): -+ version_str = version_str.decode("ascii") + -+ return version_str ++def _version_nodot(version: PythonVersion) -> str: ++ return "".join(map(str, version)) + + -+def _parse_glibc_version(version_str): -+ # type: (str) -> Tuple[int, int] -+ # Parse glibc version. -+ # -+ # We use a regexp instead of str.split because we want to discard any -+ # random junk that might come after the minor version -- this might happen -+ # in patched/forked versions of glibc (e.g. Linaro's version of glibc -+ # uses version strings like "2.20-2014.11"). See gh-3588. -+ m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) -+ if not m: -+ warnings.warn( -+ "Expected glibc version with 2 components major.minor," -+ " got: %s" % version_str, -+ RuntimeWarning, -+ ) -+ return -1, -1 -+ return (int(m.group("major")), int(m.group("minor"))) -+ -+ -+_glibc_version = [] # type: List[Tuple[int, int]] -+ -+ -+def _get_glibc_version(): -+ # type: () -> Tuple[int, int] -+ if _glibc_version: -+ return _glibc_version[0] -+ version_str = _glibc_version_string() -+ if version_str is None: -+ _glibc_version.append((-1, -1)) -+ else: -+ _glibc_version.append(_parse_glibc_version(version_str)) -+ return _glibc_version[0] -+ -+ -+# Python does not provide platform information at sufficient granularity to -+# identify the architecture of the running executable in some cases, so we -+# determine it dynamically by reading the information from the running -+# process. This only applies on Linux, which uses the ELF format. -+class _ELFFileHeader(object): -+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header -+ class _InvalidELFFileHeader(ValueError): -+ """ -+ An invalid ELF file header was found. -+ """ -+ -+ ELF_MAGIC_NUMBER = 0x7F454C46 -+ ELFCLASS32 = 1 -+ ELFCLASS64 = 2 -+ ELFDATA2LSB = 1 -+ ELFDATA2MSB = 2 -+ EM_386 = 3 -+ EM_S390 = 22 -+ EM_ARM = 40 -+ EM_X86_64 = 62 -+ EF_ARM_ABIMASK = 0xFF000000 -+ EF_ARM_ABI_VER5 = 0x05000000 -+ EF_ARM_ABI_FLOAT_HARD = 0x00000400 -+ -+ def __init__(self, file): -+ # type: (IO[bytes]) -> None -+ def unpack(fmt): -+ # type: (str) -> int -+ try: -+ (result,) = struct.unpack( -+ fmt, file.read(struct.calcsize(fmt)) -+ ) # type: (int, ) -+ except struct.error: -+ raise _ELFFileHeader._InvalidELFFileHeader() -+ return result -+ -+ self.e_ident_magic = unpack(">I") -+ if self.e_ident_magic != self.ELF_MAGIC_NUMBER: -+ raise _ELFFileHeader._InvalidELFFileHeader() -+ self.e_ident_class = unpack("B") -+ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: -+ raise _ELFFileHeader._InvalidELFFileHeader() -+ self.e_ident_data = unpack("B") -+ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: -+ raise _ELFFileHeader._InvalidELFFileHeader() -+ self.e_ident_version = unpack("B") -+ self.e_ident_osabi = unpack("B") -+ self.e_ident_abiversion = unpack("B") -+ self.e_ident_pad = file.read(7) -+ format_h = "H" -+ format_i = "I" -+ format_q = "Q" -+ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q -+ self.e_type = unpack(format_h) -+ self.e_machine = unpack(format_h) -+ self.e_version = unpack(format_i) -+ self.e_entry = unpack(format_p) -+ self.e_phoff = unpack(format_p) -+ self.e_shoff = unpack(format_p) -+ self.e_flags = unpack(format_i) -+ self.e_ehsize = unpack(format_h) -+ self.e_phentsize = unpack(format_h) -+ self.e_phnum = unpack(format_h) -+ self.e_shentsize = unpack(format_h) -+ self.e_shnum = unpack(format_h) -+ self.e_shstrndx = unpack(format_h) -+ -+ -+def _get_elf_header(): -+ # type: () -> Optional[_ELFFileHeader] -+ try: -+ with open(sys.executable, "rb") as f: -+ elf_header = _ELFFileHeader(f) -+ except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): -+ return None -+ return elf_header -+ -+ -+def _is_linux_armhf(): -+ # type: () -> bool -+ # hard-float ABI can be detected from the ELF header of the running -+ # process -+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf -+ elf_header = _get_elf_header() -+ if elf_header is None: -+ return False -+ result = elf_header.e_ident_class == elf_header.ELFCLASS32 -+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB -+ result &= elf_header.e_machine == elf_header.EM_ARM -+ result &= ( -+ elf_header.e_flags & elf_header.EF_ARM_ABIMASK -+ ) == elf_header.EF_ARM_ABI_VER5 -+ result &= ( -+ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD -+ ) == elf_header.EF_ARM_ABI_FLOAT_HARD -+ return result -+ -+ -+def _is_linux_i686(): -+ # type: () -> bool -+ elf_header = _get_elf_header() -+ if elf_header is None: -+ return False -+ result = elf_header.e_ident_class == elf_header.ELFCLASS32 -+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB -+ result &= elf_header.e_machine == elf_header.EM_386 -+ return result -+ -+ -+def _have_compatible_manylinux_abi(arch): -+ # type: (str) -> bool -+ if arch == "armv7l": -+ return _is_linux_armhf() -+ if arch == "i686": -+ return _is_linux_i686() -+ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} -+ -+ -+def _manylinux_tags(linux, arch): -+ # type: (str, str) -> Iterator[str] -+ # Oldest glibc to be supported regardless of architecture is (2, 17). -+ too_old_glibc2 = glibcVersion(2, 16) -+ if arch in {"x86_64", "i686"}: -+ # On x86/i686 also oldest glibc to be supported is (2, 5). -+ too_old_glibc2 = glibcVersion(2, 4) -+ current_glibc = glibcVersion(*_get_glibc_version()) -+ glibc_max_list = [current_glibc] -+ # We can assume compatibility across glibc major versions. -+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 -+ # -+ # Build a list of maximum glibc versions so that we can -+ # output the canonical list of all glibc from current_glibc -+ # down to too_old_glibc2, including all intermediary versions. -+ for glibc_major in range(current_glibc.major - 1, 1, -1): -+ glibc_max_list.append(glibcVersion(glibc_major, _LAST_GLIBC_MINOR[glibc_major])) -+ for glibc_max in glibc_max_list: -+ if glibc_max.major == too_old_glibc2.major: -+ min_minor = too_old_glibc2.minor -+ else: -+ # For other glibc major versions oldest supported is (x, 0). -+ min_minor = -1 -+ for glibc_minor in range(glibc_max.minor, min_minor, -1): -+ glibc_version = (glibc_max.major, glibc_minor) -+ tag = "manylinux_{}_{}".format(*glibc_version) -+ if _is_manylinux_compatible(tag, arch, glibc_version): -+ yield linux.replace("linux", tag) -+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. -+ if glibc_version in _LEGACY_MANYLINUX_MAP: -+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] -+ if _is_manylinux_compatible(legacy_tag, arch, glibc_version): -+ yield linux.replace("linux", legacy_tag) -+ -+ -+def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): -+ # type: (bool) -> Iterator[str] -+ linux = _normalize_string(distutils.util.get_platform()) -+ if is_32bit: -+ if linux == "linux_x86_64": -+ linux = "linux_i686" -+ elif linux == "linux_aarch64": -+ linux = "linux_armv7l" -+ _, arch = linux.split("_", 1) -+ if _have_compatible_manylinux_abi(arch): -+ for tag in _manylinux_tags(linux, arch): -+ yield tag -+ yield linux -+ -+ -+def _generic_platforms(): -+ # type: () -> Iterator[str] -+ yield _normalize_string(distutils.util.get_platform()) -+ -+ -+def _platform_tags(): -+ # type: () -> Iterator[str] -+ """ -+ Provides the platform tags for this installation. -+ """ -+ if platform.system() == "Darwin": -+ return mac_platforms() -+ elif platform.system() == "Linux": -+ return _linux_platforms() -+ else: -+ return _generic_platforms() -+ -+ -+def interpreter_name(): -+ # type: () -> str -+ """ -+ Returns the name of the running interpreter. -+ """ -+ try: -+ name = sys.implementation.name # type: ignore -+ except AttributeError: # pragma: no cover -+ # Python 2.7 compatibility. -+ name = platform.python_implementation().lower() -+ return INTERPRETER_SHORT_NAMES.get(name) or name -+ -+ -+def interpreter_version(**kwargs): -+ # type: (bool) -> str -+ """ -+ Returns the version of the running interpreter. -+ """ -+ warn = _warn_keyword_parameter("interpreter_version", kwargs) -+ version = _get_config_var("py_version_nodot", warn=warn) -+ if version: -+ version = str(version) -+ else: -+ version = _version_nodot(sys.version_info[:2]) -+ return version -+ -+ -+def _version_nodot(version): -+ # type: (PythonVersion) -> str -+ return "".join(map(str, version)) -+ -+ -+def sys_tags(**kwargs): -+ # type: (bool) -> Iterator[Tag] ++def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ -+ warn = _warn_keyword_parameter("sys_tags", kwargs) + + interp_name = interpreter_name() + if interp_name == "cp": -+ for tag in cpython_tags(warn=warn): -+ yield tag ++ yield from cpython_tags(warn=warn) + else: -+ for tag in generic_tags(): -+ yield tag -+ -+ for tag in compatible_tags(): -+ yield tag ++ yield from generic_tags() + ++ if interp_name == "pp": ++ yield from compatible_tags(interpreter="pp3") ++ else: ++ yield from compatible_tags() + -+if __name__ == "__main__": -+ print(json.dumps([(t.interpreter, t.abi, t.platform) for t in sys_tags()])) -diff --git a/pyproject.toml b/pyproject.toml -index 1017157d..0ff512a9 100644 ---- a/pyproject.toml -+++ b/pyproject.toml -@@ -57,7 +57,7 @@ functools32 = { version = "^3.2.3", python = "~2.7" } - keyring = [ - { version = "^18.0.1", python = "~2.7" }, - { version = "^20.0.1", python = "~3.5" }, -- { version = ">=21.2.0", python = "^3.6" } -+ { version = "~22.3.0", python = "^3.6" } - ] - # Use subprocess32 for Python 2.7 - subprocess32 = { version = "^3.5", python = "~2.7" } -diff --git a/tests/config/test_config.py b/tests/config/test_config.py -index 4bd0cd04..d79e8631 100644 ---- a/tests/config/test_config.py -+++ b/tests/config/test_config.py -@@ -1,5 +1,7 @@ - import os - -+from pathlib import Path + - import pytest - - -@@ -11,7 +13,7 @@ def test_config_get_default_value(config, name, value): - - - def test_config_get_processes_depended_on_values(config): -- assert os.path.join("/foo", "virtualenvs") == config.get("virtualenvs.path") -+ assert str(Path.cwd() / ".pypoetrycache" / "virtualenvs") == config.get("virtualenvs.path") - - - @pytest.mark.parametrize( -diff --git a/tests/conftest.py b/tests/conftest.py -index e2b73936..22741344 100644 ---- a/tests/conftest.py -+++ b/tests/conftest.py -@@ -4,6 +4,7 @@ import shutil - import sys - import tempfile - -+from pathlib import Path - from typing import Any - from typing import Dict - -@@ -56,7 +57,7 @@ class Config(BaseConfig): - @pytest.fixture - def config_source(): - source = DictConfigSource() -- source.add_property("cache-dir", "/foo") -+ source.add_property("cache-dir", str(Path.cwd() / ".pypoetrycache")) - - return source - -diff --git a/tests/console/commands/self/__init__.py b/tests/console/commands/self/__init__.py -deleted file mode 100644 -index e69de29b..00000000 ++print( ++ json.dumps([(t.interpreter, t.abi, t.platform) for t in sys_tags()]) ++) diff --git a/tests/console/commands/self/fixtures/poetry-1.0.5-darwin.sha256sum b/tests/console/commands/self/fixtures/poetry-1.0.5-darwin.sha256sum deleted file mode 100644 index 3229630a..00000000 @@ -1642,128 +1638,699 @@ zoU8w684v0|UdHbJKbN|T(EGQ($MMGm-rVrp<_6vdl-g465O4@M1RMhY3W2`? LE~wAj04M+eXZ{jT -diff --git a/tests/console/commands/self/test_update.py b/tests/console/commands/self/test_update.py +diff --git a/tests/console/commands/self/test_add_plugins.py b/tests/console/commands/self/test_add_plugins.py deleted file mode 100644 -index 5b86d446..00000000 ---- a/tests/console/commands/self/test_update.py +index e8447a32..00000000 +--- a/tests/console/commands/self/test_add_plugins.py +++ /dev/null -@@ -1,159 +0,0 @@ --import os +@@ -1,307 +0,0 @@ +-from __future__ import annotations +- +-from typing import TYPE_CHECKING - -import pytest - --from poetry.__version__ import __version__ -from poetry.core.packages.package import Package --from poetry.core.semver.version import Version +- +-from poetry.console.commands.self.self_command import SelfCommand -from poetry.factory import Factory --from poetry.repositories.installed_repository import InstalledRepository --from poetry.repositories.pool import Pool --from poetry.repositories.repository import Repository --from poetry.utils._compat import WINDOWS --from poetry.utils._compat import Path --from poetry.utils.env import EnvManager +-from tests.console.commands.self.utils import get_self_command_dependencies - - --FIXTURES = Path(__file__).parent.joinpath("fixtures") +-if TYPE_CHECKING: +- from cleo.testers.command_tester import CommandTester +- +- from tests.helpers import TestRepository +- from tests.types import CommandTesterFactory - - -@pytest.fixture() --def tester(command_tester_factory): -- return command_tester_factory("self update") +-def tester(command_tester_factory: CommandTesterFactory) -> CommandTester: +- return command_tester_factory("self add") - - --def test_self_update_should_install_all_necessary_elements( -- tester, http, mocker, environ, tmp_dir +-def assert_plugin_add_result( +- tester: CommandTester, +- expected: str, +- constraint: str | dict[str, str], +-) -> None: +- assert tester.io.fetch_output() == expected +- dependencies = get_self_command_dependencies() +- +- assert "poetry-plugin" in dependencies +- assert dependencies["poetry-plugin"] == constraint +- +- +-def test_add_no_constraint( +- tester: CommandTester, +- repo: TestRepository, -): -- os.environ["POETRY_HOME"] = tmp_dir +- repo.add_package(Package("poetry-plugin", "0.1.0")) +- +- tester.execute("poetry-plugin") - -- command = tester._command +- expected = """\ +-Using version ^0.1.0 for poetry-plugin - -- version = Version.parse(__version__).next_minor.text -- repository = Repository() -- repository.add_package(Package("poetry", version)) +-Updating dependencies +-Resolving dependencies... - -- pool = Pool() -- pool.add_repository(repository) +-Writing lock file - -- command._pool = pool -- mocker.patch.object(command, "_check_recommended_installation", return_value=None) -- mocker.patch.object( -- command, "_get_release_name", return_value="poetry-{}-darwin".format(version) +-Package operations: 1 install, 0 updates, 0 removals +- +- • Installing poetry-plugin (0.1.0) +-""" +- assert_plugin_add_result(tester, expected, "^0.1.0") +- +- +-def test_add_with_constraint( +- tester: CommandTester, +- repo: TestRepository, +-): +- repo.add_package(Package("poetry-plugin", "0.1.0")) +- repo.add_package(Package("poetry-plugin", "0.2.0")) +- +- tester.execute("poetry-plugin@^0.2.0") +- +- expected = """ +-Updating dependencies +-Resolving dependencies... +- +-Writing lock file +- +-Package operations: 1 install, 0 updates, 0 removals +- +- • Installing poetry-plugin (0.2.0) +-""" +- +- assert_plugin_add_result(tester, expected, "^0.2.0") +- +- +-def test_add_with_git_constraint( +- tester: CommandTester, +- repo: TestRepository, +-): +- repo.add_package(Package("pendulum", "2.0.5")) +- +- tester.execute("git+https://github.com/demo/poetry-plugin.git") +- +- expected = """ +-Updating dependencies +-Resolving dependencies... +- +-Writing lock file +- +-Package operations: 2 installs, 0 updates, 0 removals +- +- • Installing pendulum (2.0.5) +- • Installing poetry-plugin (0.1.2 9cf87a2) +-""" +- +- assert_plugin_add_result( +- tester, expected, {"git": "https://github.com/demo/poetry-plugin.git"} +- ) +- +- +-def test_add_with_git_constraint_with_extras( +- tester: CommandTester, +- repo: TestRepository, +-): +- repo.add_package(Package("pendulum", "2.0.5")) +- repo.add_package(Package("tomlkit", "0.7.0")) +- +- tester.execute("git+https://github.com/demo/poetry-plugin.git[foo]") +- +- expected = """ +-Updating dependencies +-Resolving dependencies... +- +-Writing lock file +- +-Package operations: 3 installs, 0 updates, 0 removals +- +- • Installing pendulum (2.0.5) +- • Installing tomlkit (0.7.0) +- • Installing poetry-plugin (0.1.2 9cf87a2) +-""" +- +- assert_plugin_add_result( +- tester, +- expected, +- { +- "git": "https://github.com/demo/poetry-plugin.git", +- "extras": ["foo"], +- }, - ) -- mocker.patch("subprocess.check_output", return_value=b"Python 3.8.2") - -- http.register_uri( -- "GET", -- command.BASE_URL + "/{}/poetry-{}-darwin.sha256sum".format(version, version), -- body=FIXTURES.joinpath("poetry-1.0.5-darwin.sha256sum").read_bytes(), +- +-@pytest.mark.parametrize( +- "url, rev", +- [ +- ("git+https://github.com/demo/poetry-plugin2.git#subdirectory=subdir", None), +- ( +- "git+https://github.com/demo/poetry-plugin2.git@master#subdirectory=subdir", +- "master", +- ), +- ], +-) +-def test_add_with_git_constraint_with_subdirectory( +- url: str, +- rev: str | None, +- tester: CommandTester, +- repo: TestRepository, +-): +- repo.add_package(Package("pendulum", "2.0.5")) +- +- tester.execute(url) +- +- expected = """ +-Updating dependencies +-Resolving dependencies... +- +-Writing lock file +- +-Package operations: 2 installs, 0 updates, 0 removals +- +- • Installing pendulum (2.0.5) +- • Installing poetry-plugin (0.1.2 9cf87a2) +-""" +- +- constraint = { +- "git": "https://github.com/demo/poetry-plugin2.git", +- "subdirectory": "subdir", +- } +- +- if rev: +- constraint["rev"] = rev +- +- assert_plugin_add_result( +- tester, +- expected, +- constraint, - ) -- http.register_uri( -- "GET", -- command.BASE_URL + "/{}/poetry-{}-darwin.tar.gz".format(version, version), -- body=FIXTURES.joinpath("poetry-1.0.5-darwin.tar.gz").read_bytes(), +- +- +-def test_add_existing_plugin_warns_about_no_operation( +- tester: CommandTester, +- repo: TestRepository, +- installed: TestRepository, +-): +- SelfCommand.get_default_system_pyproject_file().write_text( +- f"""\ +-[tool.poetry] +-name = "poetry-instance" +-version = "1.2.0" +-description = "Python dependency management and packaging made easy." +-authors = [] +- +-[tool.poetry.dependencies] +-python = "^3.6" +- +-[tool.poetry.group.{SelfCommand.ADDITIONAL_PACKAGE_GROUP}.dependencies] +-poetry-plugin = "^1.2.3" +-""", +- encoding="utf-8", - ) - -- tester.execute() +- installed.add_package(Package("poetry-plugin", "1.2.3")) - -- bin_ = Path(tmp_dir).joinpath("bin") -- lib = Path(tmp_dir).joinpath("lib") -- assert bin_.exists() +- repo.add_package(Package("poetry-plugin", "1.2.3")) - -- script = bin_.joinpath("poetry") -- assert script.exists() +- tester.execute("poetry-plugin") - -- expected_script = """\ --# -*- coding: utf-8 -*- --import glob --import sys --import os +- expected = f"""\ +-The following packages are already present in the pyproject.toml and will be\ +- skipped: +- +- • poetry-plugin +-{tester.command._hint_update_packages} +-Nothing to add. +-""" +- +- assert tester.io.fetch_output() == expected +- +- +-def test_add_existing_plugin_updates_if_requested( +- tester: CommandTester, +- repo: TestRepository, +- installed: TestRepository, +-): +- SelfCommand.get_default_system_pyproject_file().write_text( +- f"""\ +-[tool.poetry] +-name = "poetry-instance" +-version = "1.2.0" +-description = "Python dependency management and packaging made easy." +-authors = [] +- +-[tool.poetry.dependencies] +-python = "^3.6" +- +-[tool.poetry.group.{SelfCommand.ADDITIONAL_PACKAGE_GROUP}.dependencies] +-poetry-plugin = "^1.2.3" +-""", +- encoding="utf-8", +- ) +- +- installed.add_package(Package("poetry-plugin", "1.2.3")) +- +- repo.add_package(Package("poetry-plugin", "1.2.3")) +- repo.add_package(Package("poetry-plugin", "2.3.4")) +- +- tester.execute("poetry-plugin@latest") +- +- expected = """\ +-Using version ^2.3.4 for poetry-plugin +- +-Updating dependencies +-Resolving dependencies... +- +-Writing lock file +- +-Package operations: 0 installs, 1 update, 0 removals +- +- • Updating poetry-plugin (1.2.3 -> 2.3.4) +-""" +- +- assert_plugin_add_result(tester, expected, "^2.3.4") +- +- +-def test_adding_a_plugin_can_update_poetry_dependencies_if_needed( +- tester: CommandTester, +- repo: TestRepository, +- installed: TestRepository, +-): +- poetry_package = Package("poetry", "1.2.0") +- poetry_package.add_dependency(Factory.create_dependency("tomlkit", "^0.7.0")) +- +- plugin_package = Package("poetry-plugin", "1.2.3") +- plugin_package.add_dependency(Factory.create_dependency("tomlkit", "^0.7.2")) +- +- installed.add_package(poetry_package) +- installed.add_package(Package("tomlkit", "0.7.1")) +- +- repo.add_package(plugin_package) +- repo.add_package(Package("tomlkit", "0.7.1")) +- repo.add_package(Package("tomlkit", "0.7.2")) +- +- tester.execute("poetry-plugin") +- +- expected = """\ +-Using version ^1.2.3 for poetry-plugin +- +-Updating dependencies +-Resolving dependencies... +- +-Writing lock file +- +-Package operations: 1 install, 1 update, 0 removals +- +- • Updating tomlkit (0.7.1 -> 0.7.2) +- • Installing poetry-plugin (1.2.3) +-""" +- +- assert_plugin_add_result(tester, expected, "^1.2.3") +diff --git a/tests/console/commands/self/test_remove_plugins.py b/tests/console/commands/self/test_remove_plugins.py +deleted file mode 100644 +index 644e5748..00000000 +--- a/tests/console/commands/self/test_remove_plugins.py ++++ /dev/null +@@ -1,108 +0,0 @@ +-from __future__ import annotations +- +-from typing import TYPE_CHECKING +- +-import pytest +-import tomlkit +- +-from poetry.core.packages.dependency import Dependency +-from poetry.core.packages.package import Package +-from poetry.core.packages.project_package import ProjectPackage +- +-from poetry.__version__ import __version__ +-from poetry.console.commands.self.self_command import SelfCommand +-from poetry.factory import Factory +-from tests.console.commands.self.utils import get_self_command_dependencies +- +- +-if TYPE_CHECKING: +- from cleo.testers.command_tester import CommandTester +- +- from poetry.repositories import Repository +- from tests.types import CommandTesterFactory +- +- +-@pytest.fixture() +-def tester(command_tester_factory: CommandTesterFactory) -> CommandTester: +- return command_tester_factory("self remove") +- +- +-@pytest.fixture(autouse=True) +-def install_plugin(installed: Repository) -> None: +- package = ProjectPackage("poetry-instance", __version__) +- plugin = Package("poetry-plugin", "1.2.3") +- +- package.add_dependency( +- Dependency(plugin.name, "^1.2.3", groups=[SelfCommand.ADDITIONAL_PACKAGE_GROUP]) +- ) +- content = Factory.create_pyproject_from_package(package) +- system_pyproject_file = SelfCommand.get_default_system_pyproject_file() +- system_pyproject_file.write_text(content.as_string(), encoding="utf-8") +- +- lock_content = { +- "package": [ +- { +- "name": "poetry-plugin", +- "version": "1.2.3", +- "category": "main", +- "optional": False, +- "platform": "*", +- "python-versions": "*", +- "checksum": [], +- }, +- ], +- "metadata": { +- "python-versions": "^3.6", +- "platform": "*", +- "content-hash": "123456789", +- "hashes": {"poetry-plugin": []}, +- }, +- } +- system_pyproject_file.parent.joinpath("poetry.lock").write_text( +- tomlkit.dumps(lock_content), encoding="utf-8" +- ) +- +- installed.add_package(plugin) +- +- +-def test_remove_installed_package(tester: CommandTester): +- tester.execute("poetry-plugin") +- +- expected = """\ +-Updating dependencies +-Resolving dependencies... +- +-Writing lock file +- +-Package operations: 0 installs, 0 updates, 1 removal +- +- • Removing poetry-plugin (1.2.3) +-""" +- assert tester.io.fetch_output() == expected +- +- dependencies = get_self_command_dependencies() +- +- assert "poetry-plugin" not in dependencies +- assert not dependencies +- +- +-def test_remove_installed_package_dry_run(tester: CommandTester): +- tester.execute("poetry-plugin --dry-run") +- +- expected = f"""\ +-Updating dependencies +-Resolving dependencies... +- +-Package operations: 0 installs, 0 updates, 1 removal, 1 skipped +- +- • Removing poetry-plugin (1.2.3) +- • Removing poetry-plugin (1.2.3) +- • Installing poetry ({__version__}): Skipped for the following reason: Already \ +-installed +-""" +- +- assert tester.io.fetch_output() == expected +- +- dependencies = get_self_command_dependencies() +- +- assert "poetry-plugin" in dependencies +diff --git a/tests/console/commands/self/test_show_plugins.py b/tests/console/commands/self/test_show_plugins.py +deleted file mode 100644 +index 3ce27786..00000000 +--- a/tests/console/commands/self/test_show_plugins.py ++++ /dev/null +@@ -1,220 +0,0 @@ +-from __future__ import annotations +- +-from pathlib import Path +-from typing import TYPE_CHECKING +-from typing import Any +-from typing import Callable +- +-import pytest +- +-from poetry.core.packages.dependency import Dependency +-from poetry.core.packages.package import Package +- +-from poetry.plugins.application_plugin import ApplicationPlugin +-from poetry.plugins.plugin import Plugin +-from poetry.utils._compat import metadata +- +- +-if TYPE_CHECKING: +- from os import PathLike +- +- from cleo.io.io import IO +- from cleo.testers.command_tester import CommandTester +- from pytest_mock import MockerFixture +- +- from poetry.plugins.base_plugin import BasePlugin +- from poetry.poetry import Poetry +- from poetry.repositories import Repository +- from poetry.utils.env import Env +- from tests.helpers import PoetryTestApplication +- from tests.types import CommandTesterFactory +- +- +-class DoNothingPlugin(Plugin): +- def activate(self, poetry: Poetry, io: IO) -> None: +- pass +- +- +-class EntryPoint(metadata.EntryPoint): +- def load(self) -> type[BasePlugin]: +- if self.group == ApplicationPlugin.group: +- return ApplicationPlugin +- +- return DoNothingPlugin +- +- +-@pytest.fixture() +-def tester(command_tester_factory: CommandTesterFactory) -> CommandTester: +- return command_tester_factory("self show plugins") +- +- +-@pytest.fixture() +-def plugin_package_requires_dist() -> list[str]: +- return [] +- +- +-@pytest.fixture() +-def plugin_package(plugin_package_requires_dist: list[str]) -> Package: +- package = Package("poetry-plugin", "1.2.3") +- +- for requirement in plugin_package_requires_dist: +- package.add_dependency(Dependency.create_from_pep_508(requirement)) +- +- return package +- +- +-@pytest.fixture() +-def plugin_distro(plugin_package: Package, tmp_dir: str) -> metadata.Distribution: +- class MockDistribution(metadata.Distribution): +- def read_text(self, filename: str) -> str | None: +- if filename == "METADATA": +- return "\n".join( +- [ +- f"Name: {plugin_package.name}", +- f"Version: {plugin_package.version}", +- *[ +- f"Requires-Dist: {dep.to_pep_508()}" +- for dep in plugin_package.requires +- ], +- ] +- ) +- return None +- +- def locate_file(self, path: PathLike[str]) -> PathLike[str]: +- return Path(tmp_dir, path) +- +- return MockDistribution() +- +- +-@pytest.fixture +-def entry_point_name() -> str: +- return "poetry-plugin" +- +- +-@pytest.fixture +-def entry_point_values_by_group() -> dict[str, list[str]]: +- return {} - --lib = os.path.normpath(os.path.join(os.path.realpath(__file__), "../..", "lib")) --vendors = os.path.join(lib, "poetry", "_vendor") --current_vendors = os.path.join( -- vendors, "py{}".format(".".join(str(v) for v in sys.version_info[:2])) +- +-@pytest.fixture +-def entry_points( +- entry_point_name: str, +- entry_point_values_by_group: dict[str, list[str]], +- plugin_distro: metadata.Distribution, +-) -> Callable[[...], list[metadata.EntryPoint]]: +- by_group = { +- key: [ +- EntryPoint(name=entry_point_name, group=key, value=value)._for( +- plugin_distro +- ) +- for value in values +- ] +- for key, values in entry_point_values_by_group.items() +- } +- +- def _entry_points(**params: Any) -> list[metadata.EntryPoint]: +- group = params.get("group") +- +- if group not in by_group: +- return [] +- +- return by_group.get(group) +- +- return _entry_points +- +- +-@pytest.fixture(autouse=True) +-def mock_metadata_entry_points( +- plugin_package: Package, +- plugin_distro: metadata.Distribution, +- installed: Repository, +- mocker: MockerFixture, +- tmp_venv: Env, +- entry_points: Callable[[...], metadata.EntryPoint], +-) -> None: +- installed.add_package(plugin_package) +- +- mocker.patch.object( +- tmp_venv.site_packages, "find_distribution", return_value=plugin_distro +- ) +- mocker.patch.object(metadata, "entry_points", entry_points) +- +- +-@pytest.mark.parametrize("entry_point_name", ["poetry-plugin", "not-package-name"]) +-@pytest.mark.parametrize( +- "entry_point_values_by_group", +- [ +- { +- ApplicationPlugin.group: ["FirstApplicationPlugin"], +- Plugin.group: ["FirstPlugin"], +- } +- ], -) --sys.path.insert(0, lib) --sys.path.insert(0, current_vendors) +-def test_show_displays_installed_plugins( +- app: PoetryTestApplication, +- tester: CommandTester, +-): +- tester.execute("") - --if __name__ == "__main__": -- from poetry.console import main -- main() +- expected = """ +- • poetry-plugin (1.2.3) +- 1 plugin and 1 application plugin -""" -- if not WINDOWS: -- expected_script = "#!/usr/bin/env python\n" + expected_script - -- assert expected_script == script.read_text() +- assert tester.io.fetch_output() == expected - -- if WINDOWS: -- bat = bin_.joinpath("poetry.bat") -- expected_bat = '@echo off\r\npython "{}" %*\r\n'.format( -- str(script).replace(os.environ.get("USERPROFILE", ""), "%USERPROFILE%") -- ) -- assert bat.exists() -- with bat.open(newline="") as f: -- assert expected_bat == f.read() - -- assert lib.exists() -- assert lib.joinpath("poetry").exists() +-@pytest.mark.parametrize( +- "entry_point_values_by_group", +- [ +- { +- ApplicationPlugin.group: [ +- "FirstApplicationPlugin", +- "SecondApplicationPlugin", +- ], +- Plugin.group: ["FirstPlugin", "SecondPlugin"], +- } +- ], +-) +-def test_show_displays_installed_plugins_with_multiple_plugins( +- app: PoetryTestApplication, +- tester: CommandTester, +-): +- tester.execute("") - +- expected = """ +- • poetry-plugin (1.2.3) +- 2 plugins and 2 application plugins +-""" - --def test_self_update_can_update_from_recommended_installation( -- tester, http, mocker, environ, tmp_venv +- assert tester.io.fetch_output() == expected +- +- +-@pytest.mark.parametrize( +- "plugin_package_requires_dist", [["foo (>=1.2.3)", "bar (<4.5.6)"]] +-) +-@pytest.mark.parametrize( +- "entry_point_values_by_group", +- [ +- { +- ApplicationPlugin.group: ["FirstApplicationPlugin"], +- Plugin.group: ["FirstPlugin"], +- } +- ], +-) +-def test_show_displays_installed_plugins_with_dependencies( +- app: PoetryTestApplication, +- tester: CommandTester, -): -- mocker.patch.object(EnvManager, "get_system_env", return_value=tmp_venv) -- target_script = tmp_venv.path.parent.joinpath("venv/bin/poetry") -- if WINDOWS: -- target_script = tmp_venv.path.parent.joinpath("venv/Scripts/poetry.exe") +- tester.execute("") +- +- expected = """ +- • poetry-plugin (1.2.3) +- 1 plugin and 1 application plugin +- +- Dependencies +- - foo (>=1.2.3) +- - bar (<4.5.6) +-""" +- +- assert tester.io.fetch_output() == expected +diff --git a/tests/console/commands/self/test_update.py b/tests/console/commands/self/test_update.py +deleted file mode 100644 +index 1c20920d..00000000 +--- a/tests/console/commands/self/test_update.py ++++ /dev/null +@@ -1,66 +0,0 @@ +-from __future__ import annotations +- +-from pathlib import Path +-from typing import TYPE_CHECKING +- +-import pytest +- +-from poetry.core.packages.package import Package +-from poetry.core.semver.version import Version +- +-from poetry.__version__ import __version__ +-from poetry.factory import Factory +- +- +-if TYPE_CHECKING: +- from cleo.testers.command_tester import CommandTester - -- target_script.parent.mkdir(parents=True, exist_ok=True) -- target_script.touch() +- from tests.helpers import TestRepository +- from tests.types import CommandTesterFactory +- +-FIXTURES = Path(__file__).parent.joinpath("fixtures") +- +- +-@pytest.fixture() +-def tester(command_tester_factory: CommandTesterFactory) -> CommandTester: +- return command_tester_factory("self update") - -- command = tester._command -- command._data_dir = tmp_venv.path.parent - -- new_version = Version.parse(__version__).next_minor.text +-def test_self_update_can_update_from_recommended_installation( +- tester: CommandTester, +- repo: TestRepository, +- installed: TestRepository, +-): +- new_version = Version.parse(__version__).next_minor().text - - old_poetry = Package("poetry", __version__) - old_poetry.add_dependency(Factory.create_dependency("cleo", "^0.8.2")) @@ -1771,349 +2338,205 @@ index 5b86d446..00000000 - new_poetry = Package("poetry", new_version) - new_poetry.add_dependency(Factory.create_dependency("cleo", "^1.0.0")) - -- installed_repository = Repository() -- installed_repository.add_package(old_poetry) -- installed_repository.add_package(Package("cleo", "0.8.2")) +- installed.add_package(old_poetry) +- installed.add_package(Package("cleo", "0.8.2")) - -- repository = Repository() -- repository.add_package(new_poetry) -- repository.add_package(Package("cleo", "1.0.0")) -- -- pool = Pool() -- pool.add_repository(repository) -- -- command._pool = pool -- -- mocker.patch.object(InstalledRepository, "load", return_value=installed_repository) +- repo.add_package(new_poetry) +- repo.add_package(Package("cleo", "1.0.0")) - - tester.execute() - -- expected_output = """\ --Updating Poetry to {} +- expected_output = f"""\ +-Updating Poetry version ... +- +-Using version ^{new_version} for poetry - -Updating dependencies -Resolving dependencies... - --Package operations: 0 installs, 2 updates, 0 removals -- -- - Updating cleo (0.8.2 -> 1.0.0) -- - Updating poetry ({} -> {}) +-Writing lock file - --Updating the poetry script +-Package operations: 0 installs, 2 updates, 0 removals - --Poetry (1.2.0) is installed now. Great! --""".format( -- new_version, __version__, new_version -- ) +- • Updating cleo (0.8.2 -> 1.0.0) +- • Updating poetry ({__version__} -> {new_version}) +-""" - - assert tester.io.fetch_output() == expected_output -diff --git a/tests/console/commands/test_config.py b/tests/console/commands/test_config.py -index fa0bea4a..df86d165 100644 ---- a/tests/console/commands/test_config.py -+++ b/tests/console/commands/test_config.py -@@ -1,6 +1,8 @@ - import json - import os - -+from pathlib import Path -+ - import pytest - - from poetry.config.config_source import ConfigSource -@@ -28,14 +30,16 @@ def test_show_config_with_local_config_file_empty(tester, mocker): - def test_list_displays_default_value_if_not_set(tester, config): - tester.execute("--list") - -- expected = """cache-dir = "/foo" -+ expected = """cache-dir = "{cache_dir}" - experimental.new-installer = true - installer.parallel = true - virtualenvs.create = true - virtualenvs.in-project = null --virtualenvs.path = {path} # /foo{sep}virtualenvs -+virtualenvs.path = {path} # {cache_dir}{sep}virtualenvs - """.format( -- path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")), sep=os.path.sep -+ path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")), -+ sep=os.path.sep, -+ cache_dir=str(Path.cwd() / ".pypoetrycache") - ) - - assert expected == tester.io.fetch_output() -@@ -46,14 +50,16 @@ def test_list_displays_set_get_setting(tester, config): - - tester.execute("--list") - -- expected = """cache-dir = "/foo" -+ expected = """cache-dir = "{cache_dir}" - experimental.new-installer = true - installer.parallel = true - virtualenvs.create = false - virtualenvs.in-project = null --virtualenvs.path = {path} # /foo{sep}virtualenvs -+virtualenvs.path = {path} # {cache_dir}{sep}virtualenvs - """.format( -- path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")), sep=os.path.sep -+ path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")), -+ sep=os.path.sep, -+ cache_dir=str(Path.cwd() / ".pypoetrycache") +diff --git a/tests/repositories/test_installed_repository.py b/tests/repositories/test_installed_repository.py +index 87dfe183..904a11cf 100644 +--- a/tests/repositories/test_installed_repository.py ++++ b/tests/repositories/test_installed_repository.py +@@ -80,11 +80,6 @@ def mock_git_info(mocker: MockerFixture) -> None: ) - assert 0 == config.set_config_source.call_count -@@ -86,14 +92,16 @@ def test_list_displays_set_get_local_setting(tester, config): - - tester.execute("--list") - -- expected = """cache-dir = "/foo" -+ expected = """cache-dir = "{cache_dir}" - experimental.new-installer = true - installer.parallel = true - virtualenvs.create = false - virtualenvs.in-project = null --virtualenvs.path = {path} # /foo{sep}virtualenvs -+virtualenvs.path = {path} # {cache_dir}{sep}virtualenvs - """.format( -- path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")), sep=os.path.sep -+ path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")), -+ sep=os.path.sep, -+ cache_dir=str(Path.cwd() / ".pypoetrycache") - ) - assert 1 == config.set_config_source.call_count -diff --git a/tests/helpers.py b/tests/helpers.py -index f380bb8d..1de03111 100644 ---- a/tests/helpers.py -+++ b/tests/helpers.py -@@ -1,7 +1,7 @@ - import os - import shutil - --from poetry.console import Application -+from poetry.console.application import Application - from poetry.core.masonry.utils.helpers import escape_name - from poetry.core.masonry.utils.helpers import escape_version - from poetry.core.packages import Dependency -diff --git a/tests/installation/test_chef.py b/tests/installation/test_chef.py -index 9fcbbea1..7d0d98b1 100644 ---- a/tests/installation/test_chef.py -+++ b/tests/installation/test_chef.py -@@ -19,14 +19,16 @@ def test_get_cached_archive_for_link(config, mocker): - ), - ) +-@pytest.fixture(autouse=True) +-def mock_installed_repository_vendors(mocker: MockerFixture) -> None: +- mocker.patch("poetry.repositories.installed_repository._VENDORS", str(VENDOR_DIR)) +- +- + @pytest.fixture + def repository(mocker: MockerFixture, env: MockEnv) -> InstalledRepository: + mocker.patch( +@@ -104,7 +99,7 @@ def get_package_from_repository( -+ cwd = Path.cwd() / ".pypoetrycache" -+ - mocker.patch.object( - chef, - "get_cached_archives_for_link", - return_value=[ -- Link("file:///foo/demo-0.1.0-py2.py3-none-any"), -- Link("file:///foo/demo-0.1.0.tar.gz"), -- Link("file:///foo/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"), -- Link("file:///foo/demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"), -+ Link(f"file:///{cwd}demo-0.1.0-py2.py3-none-any"), -+ Link(f"file:///{cwd}demo-0.1.0.tar.gz"), -+ Link(f"file:///{cwd}demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"), -+ Link(f"file:///{cwd}demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"), - ], - ) -@@ -34,7 +36,7 @@ def test_get_cached_archive_for_link(config, mocker): - Link("https://files.python-poetry.org/demo-0.1.0.tar.gz") - ) + def test_load_successful(repository: InstalledRepository): +- assert len(repository.packages) == len(INSTALLED_RESULTS) - 1 ++ assert len(repository.packages) == len(INSTALLED_RESULTS) -- assert Link("file:///foo/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl") == archive -+ assert Link(f"file:///{cwd}demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl") == archive + def test_load_successful_with_invalid_distribution( +@@ -119,7 +114,7 @@ def test_load_successful_with_invalid_distribution( + repository_with_invalid_distribution = InstalledRepository.load(env) - def test_get_cached_archives_for_link(config, mocker): -@@ -68,11 +70,13 @@ def test_get_cache_directory_for_link(config): - ), + assert ( +- len(repository_with_invalid_distribution.packages) == len(INSTALLED_RESULTS) - 1 ++ len(repository_with_invalid_distribution.packages) == len(INSTALLED_RESULTS) ) + assert len(caplog.messages) == 1 + +@@ -128,11 +123,6 @@ def test_load_successful_with_invalid_distribution( + assert str(invalid_dist_info) in message -+ cwd = Path.cwd() / ".pypoetrycache" -+ - directory = chef.get_cache_directory_for_link( - Link("https://files.python-poetry.org/poetry-1.1.0.tar.gz") - ) - expected = Path( -- "/foo/artifacts/ba/63/13/283a3b3b7f95f05e9e6f84182d276f7bb0951d5b0cc24422b33f7a4648" -+ f"{cwd}/artifacts/ba/63/13/283a3b3b7f95f05e9e6f84182d276f7bb0951d5b0cc24422b33f7a4648" - ) - assert expected == directory +-def test_load_ensure_isolation(repository: InstalledRepository): +- package = get_package_from_repository("attrs", repository) +- assert package is None +- +- + def test_load_standard_package(repository: InstalledRepository): + cleo = get_package_from_repository("cleo", repository) + assert cleo is not None diff --git a/tests/utils/test_env.py b/tests/utils/test_env.py -index 369cf7c9..eea60119 100644 +index 32fb5cb0..2c7f58b1 100644 --- a/tests/utils/test_env.py +++ b/tests/utils/test_env.py -@@ -150,7 +150,7 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file( +@@ -220,7 +220,7 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file( ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", - side_effect=[("/prefix", None), ("/prefix", None)], + side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) -@@ -399,21 +399,21 @@ def test_deactivate_non_activated_but_existing( - - ( - Path(tmp_dir) -- / "{}-py{}".format(venv_name, ".".join(str(c) for c in sys.version_info[:2])) -+ / "{}-py{}".format(venv_name, "3.9") - ).mkdir() - - config.merge({"virtualenvs": {"path": str(tmp_dir)}}) +@@ -522,7 +522,7 @@ def test_deactivate_non_activated_but_existing( mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", - side_effect=check_output_wrapper(), -+ side_effect=check_output_wrapper(Version.parse("3.9.5")), ++ side_effect=check_output_wrapper(Version.parse("3.10.5")), ) manager.deactivate(NullIO()) - env = manager.get() - - assert env.path == Path(tmp_dir) / "{}-py{}".format( -- venv_name, ".".join(str(c) for c in sys.version_info[:2]) -+ venv_name, "3.9" - ) - assert Path("/prefix") - -@@ -423,8 +423,8 @@ def test_deactivate_activated(tmp_dir, manager, poetry, config, mocker): - del os.environ["VIRTUAL_ENV"] - - venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) -- version = Version.parse(".".join(str(c) for c in sys.version_info[:3])) -- other_version = Version.parse("3.4") if version.major == 2 else version.next_minor -+ version = Version.parse("3.9.5") -+ other_version = Version.parse("3.4.0") - ( - Path(tmp_dir) / "{}-py{}.{}".format(venv_name, version.major, version.minor) - ).mkdir() -@@ -445,7 +445,7 @@ def test_deactivate_activated(tmp_dir, manager, poetry, config, mocker): +@@ -562,7 +562,7 @@ def test_deactivate_activated( mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", - side_effect=check_output_wrapper(), -+ side_effect=check_output_wrapper(Version.parse("3.9.5")), ++ side_effect=check_output_wrapper(Version.parse("3.10.5")), ) manager.deactivate(NullIO()) -@@ -647,7 +647,7 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_generic_ +@@ -858,7 +858,7 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_generic_ mocker.patch("sys.version_info", (2, 7, 16)) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", - side_effect=check_output_wrapper(Version.parse("3.7.5")), + side_effect=["3.7.5", "3.7.5", "2.7.16", "3.7.5"], ) m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" -@@ -656,7 +656,7 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_generic_ - manager.create_venv(NullIO()) - - m.assert_called_with( -- Path("/foo/virtualenvs/{}-py3.7".format(venv_name)), executable="python3" -+ Path("{}/virtualenvs/{}-py3.7".format(config.get("cache-dir"), venv_name)), executable="python3" - ) - - -@@ -671,7 +671,15 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_specific +@@ -893,7 +893,15 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_specific + venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) mocker.patch("sys.version_info", (2, 7, 16)) - mocker.patch( -- "poetry.utils._compat.subprocess.check_output", side_effect=["3.5.3", "3.9.0"] -+ "poetry.utils._compat.subprocess.check_output", side_effect=[ -+ "3.5.3", -+ "3.9.0", -+ "3.5.3", -+ "3.5.3", -+ "2.7.16", -+ "3.5.3", -+ "3.9.0" -+ ] - ) +- mocker.patch("subprocess.check_output", side_effect=["3.5.3", "3.9.0"]) ++ mocker.patch("subprocess.check_output", side_effect=[ ++ "3.5.3", ++ "3.10.0", ++ "3.5.3", ++ "3.5.3", ++ "2.7.16", ++ "3.5.3", ++ "3.10.0" ++ ]) m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" -@@ -680,7 +688,7 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_specific + ) +@@ -901,15 +909,15 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_specific manager.create_venv(NullIO()) m.assert_called_with( -- Path("/foo/virtualenvs/{}-py3.9".format(venv_name)), executable="python3.9" -+ Path("{}/virtualenvs/{}-py3.9".format(config.get("cache-dir"), venv_name)), executable="python3.9" +- config_virtualenvs_path / f"{venv_name}-py3.9", +- executable="python3.9", ++ config_virtualenvs_path / f"{venv_name}-py3.10", ++ executable="python3.10", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, +- prompt="simple-project-py3.9", ++ prompt="simple-project-py3.10", ) -@@ -693,7 +701,7 @@ def test_create_venv_fails_if_no_compatible_python_version_could_be_found( +@@ -921,7 +929,7 @@ def test_create_venv_fails_if_no_compatible_python_version_could_be_found( + poetry.package.python_versions = "^4.8" - mocker.patch( -- "poetry.utils._compat.subprocess.check_output", side_effect=["", "", "", ""] -+ "poetry.utils._compat.subprocess.check_output", side_effect=["" for _ in range(4 + 2 * 13)] - ) +- mocker.patch("subprocess.check_output", side_effect=["", "", "", ""]) ++ mocker.patch("subprocess.check_output", side_effect=lambda *args, **kwargs: "") m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" -@@ -720,7 +728,7 @@ def test_create_venv_does_not_try_to_find_compatible_versions_with_executable( + ) +@@ -947,7 +955,7 @@ def test_create_venv_does_not_try_to_find_compatible_versions_with_executable( poetry.package.python_versions = "^4.8" -- mocker.patch("poetry.utils._compat.subprocess.check_output", side_effect=["3.8.0"]) -+ mocker.patch("poetry.utils._compat.subprocess.check_output", side_effect=["3.8.0" for _ in range(1 + 12)]) +- mocker.patch("subprocess.check_output", side_effect=["3.8.0"]) ++ mocker.patch("subprocess.check_output", side_effect=["3.8.0" for _ in range(1 + 12)]) m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" ) -@@ -750,10 +758,9 @@ def test_create_venv_uses_patch_version_to_detect_compatibility( +@@ -981,10 +989,9 @@ def test_create_venv_uses_patch_version_to_detect_compatibility( ) venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - mocker.patch("sys.version_info", (version.major, version.minor, version.patch + 1)) -- check_output = mocker.patch( -+ mocker.patch( - "poetry.utils._compat.subprocess.check_output", + check_output = mocker.patch( + "subprocess.check_output", - side_effect=check_output_wrapper(Version.parse("3.6.9")), + side_effect=["2.7.16" for _ in range(3)] + [f"{version.major}.{version.minor}.{version.patch + 1}"], ) m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" -@@ -761,14 +768,13 @@ def test_create_venv_uses_patch_version_to_detect_compatibility( +@@ -992,10 +999,9 @@ def test_create_venv_uses_patch_version_to_detect_compatibility( manager.create_venv(NullIO()) - assert not check_output.called m.assert_called_with( - Path( -- "/foo/virtualenvs/{}-py{}.{}".format( -- venv_name, version.major, version.minor -+ "{}/virtualenvs/{}-py{}.{}".format( -+ config.get("cache-dir"), venv_name, version.major, version.minor - ) - ), + config_virtualenvs_path / f"{venv_name}-py{version.major}.{version.minor}", - executable=None, + executable="python3", - ) - - -@@ -801,8 +807,8 @@ def test_create_venv_uses_patch_version_to_detect_compatibility_with_executable( - assert check_output.called - m.assert_called_with( - Path( -- "/foo/virtualenvs/{}-py{}.{}".format( -- venv_name, version.major, version.minor - 1 -+ "{}/virtualenvs/{}-py{}.{}".format( -+ config.get("cache-dir"), venv_name, version.major, version.minor - 1 - ) - ), - executable="python{}.{}".format(version.major, version.minor - 1), -@@ -830,7 +836,7 @@ def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir( + flags={ + "always-copy": False, + "system-site-packages": False, +@@ -1100,7 +1106,7 @@ def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir( ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", - side_effect=[("/prefix", None), ("/prefix", None)], + side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv") -@@ -843,7 +849,7 @@ def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir( +@@ -1123,7 +1129,7 @@ def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir( def test_system_env_has_correct_paths(): @@ -2122,14 +2545,15 @@ index 369cf7c9..eea60119 100644 paths = env.paths -@@ -853,17 +859,6 @@ def test_system_env_has_correct_paths(): +@@ -1133,18 +1139,6 @@ def test_system_env_has_correct_paths(): assert env.site_packages.path == Path(paths["purelib"]) -@pytest.mark.parametrize( -- ("enabled",), [(True,), (False,)], +- "enabled", +- [True, False], -) --def test_system_env_usersite(mocker, enabled): +-def test_system_env_usersite(mocker: MockerFixture, enabled: bool): - mocker.patch("site.check_enableusersite", return_value=enabled) - env = SystemEnv(Path(sys.prefix)) - assert (enabled and env.usersite is not None) or ( @@ -2137,6 +2561,24 @@ index 369cf7c9..eea60119 100644 - ) - - - def test_venv_has_correct_paths(tmp_venv): + def test_venv_has_correct_paths(tmp_venv: VirtualEnv): paths = tmp_venv.paths +@@ -1392,7 +1386,7 @@ def test_build_environment_called_build_script_specified( + assert env == ephemeral_env + assert env.executed == [ + [ +- "python", ++ env.python, + env.pip_embedded, + "install", + "--disable-pip-version-check", +@@ -1446,7 +1440,7 @@ def test_create_venv_project_name_empty_sets_correct_prompt( + + m.assert_called_with( + config_virtualenvs_path / f"{venv_name}-py3.7", +- executable="python3", ++ executable="python", + flags={ + "always-copy": False, + "system-site-packages": False, diff --git a/patches/requests.patch b/patches/requests.patch index 3f1ab9aa..8d025b30 100644 --- a/patches/requests.patch +++ b/patches/requests.patch @@ -1,17 +1,17 @@ diff --git a/requests/adapters.py b/requests/adapters.py -index fa4d9b3c..cfe4a344 100644 +index d3b2d5bb..64ce2d4a 100644 --- a/requests/adapters.py +++ b/requests/adapters.py -@@ -39,6 +39,7 @@ from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, - ProxyError, RetryError, InvalidSchema, InvalidProxyURL, - InvalidURL) +@@ -28,6 +28,7 @@ from urllib3.util import parse_url + from urllib3.util.retry import Retry + from .auth import _basic_auth_str +from .certs import contents as ca_bundle_contents - - try: - from urllib3.contrib.socks import SOCKSProxyManager -@@ -215,21 +216,24 @@ class HTTPAdapter(BaseAdapter): - if url.lower().startswith('https') and verify: + from .compat import basestring, urlparse + from .cookies import extract_cookies_to_jar + from .exceptions import ( +@@ -251,15 +252,16 @@ class HTTPAdapter(BaseAdapter): + if url.lower().startswith("https") and verify: cert_loc = None + cert_data = None @@ -26,10 +26,12 @@ index fa4d9b3c..cfe4a344 100644 - if not cert_loc or not os.path.exists(cert_loc): + if not cert_data and (not cert_loc or not os.path.exists(cert_loc)): - raise IOError("Could not find a suitable TLS CA certificate bundle, " - "invalid path: {}".format(cert_loc)) + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" +@@ -267,7 +269,9 @@ class HTTPAdapter(BaseAdapter): - conn.cert_reqs = 'CERT_REQUIRED' + conn.cert_reqs = "CERT_REQUIRED" - if not os.path.isdir(cert_loc): + if cert_data: @@ -38,8 +40,8 @@ index fa4d9b3c..cfe4a344 100644 conn.ca_certs = cert_loc else: conn.ca_cert_dir = cert_loc -@@ -237,6 +241,7 @@ class HTTPAdapter(BaseAdapter): - conn.cert_reqs = 'CERT_NONE' +@@ -275,6 +279,7 @@ class HTTPAdapter(BaseAdapter): + conn.cert_reqs = "CERT_NONE" conn.ca_certs = None conn.ca_cert_dir = None + conn.ca_cert_data = None @@ -47,24 +49,15 @@ index fa4d9b3c..cfe4a344 100644 if cert: if not isinstance(cert, basestring): diff --git a/requests/certs.py b/requests/certs.py -index d1a378d7..8788b087 100644 +index be422c3e..61b7d0bd 100644 --- a/requests/certs.py +++ b/requests/certs.py -@@ -12,7 +12,7 @@ If you are packaging Requests, e.g., for a Linux distribution or a managed +@@ -11,7 +11,7 @@ If you are packaging Requests, e.g., for a Linux distribution or a managed environment, you can change the definition of where() to return a separately packaged CA bundle. """ -from certifi import where +from certifi import contents, where - if __name__ == '__main__': + if __name__ == "__main__": print(where()) -diff --git a/requirements-dev.txt b/requirements-dev.txt -index effb0c81..e19c95ca 100644 ---- a/requirements-dev.txt -+++ b/requirements-dev.txt -@@ -6,3 +6,4 @@ httpbin==0.7.0 - Flask>=1.0,<2.0 - trustme - wheel -+MarkupSafe==2.0.1 diff --git a/patches/virtualenv.patch b/patches/virtualenv.patch index f488cce6..5c383325 100644 --- a/patches/virtualenv.patch +++ b/patches/virtualenv.patch @@ -13,17 +13,17 @@ index f8aceb2..837bb1c 100644 *wheel-store* diff --git a/setup.cfg b/setup.cfg -index ccd2f8d..e16bf22 100644 +index 8836aba..9e559dc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,6 @@ [metadata] name = virtualenv -+version = 20.14.1 ++version = 20.16.3 description = Virtual Python Environment builder long_description = file: README.md long_description_content_type = text/markdown -@@ -112,6 +113,7 @@ virtualenv.activation.fish = *.fish +@@ -103,6 +104,7 @@ virtualenv.activation.fish = *.fish virtualenv.activation.nushell = *.nu virtualenv.activation.powershell = *.ps1 virtualenv.seed.wheels.embed = *.whl @@ -48,20 +48,17 @@ index cddd1d3..1b5dbe4 100644 -) +setup() diff --git a/src/virtualenv/__init__.py b/src/virtualenv/__init__.py -index 5f74e3e..300b12e 100644 +index e40e8b7..619ddf3 100644 --- a/src/virtualenv/__init__.py +++ b/src/virtualenv/__init__.py -@@ -1,5 +1,18 @@ - from __future__ import absolute_import, unicode_literals - +@@ -1,3 +1,15 @@ +import sys + +from pathlib import Path + +__path_pack__ = Path(__path__[0]) +if getattr(sys, "oxidized", False): -+ parents = 1 if sys.platform.startswith("win") else 2 -+ __path_assets__ = __path_pack__.parents[parents] / "assets" / "virtualenv" ++ __path_assets__ = __path_pack__.parents[1] / "assets" / "virtualenv" +else: + __path_assets__ = None + @@ -105,10 +102,10 @@ index 0000000..56d7087 + +setattr(_res_patch_target, "finder", finder) diff --git a/src/virtualenv/activation/python/__init__.py b/src/virtualenv/activation/python/__init__.py -index 9e57912..4f3d719 100644 +index ad38201..e54a7f6 100644 --- a/src/virtualenv/activation/python/__init__.py +++ b/src/virtualenv/activation/python/__init__.py -@@ -12,7 +12,17 @@ from ..via_template import ViaTemplateActivator +@@ -8,7 +8,17 @@ from ..via_template import ViaTemplateActivator class PythonActivator(ViaTemplateActivator): def templates(self): @@ -126,18 +123,16 @@ index 9e57912..4f3d719 100644 + return generated def replacements(self, creator, dest_folder): - replacements = super(PythonActivator, self).replacements(creator, dest_folder) + replacements = super().replacements(creator, dest_folder) diff --git a/src/virtualenv/activation/python/activate_this.py b/src/virtualenv/activation/python/activate_this.py.template similarity index 100% rename from src/virtualenv/activation/python/activate_this.py rename to src/virtualenv/activation/python/activate_this.py.template diff --git a/src/virtualenv/create/__init__.py b/src/virtualenv/create/__init__.py -index 01e6d4f..9e76424 100644 +index e69de29..7e40c92 100644 --- a/src/virtualenv/create/__init__.py +++ b/src/virtualenv/create/__init__.py -@@ -1 +1,10 @@ - from __future__ import absolute_import, unicode_literals -+ +@@ -0,0 +1,8 @@ +from pathlib import Path + +from virtualenv import __path_assets__ @@ -147,18 +142,16 @@ index 01e6d4f..9e76424 100644 + Path(__path__[0]) +) diff --git a/src/virtualenv/create/creator.py b/src/virtualenv/create/creator.py -index 6363f8b..9b00461 100644 +index a95b6e2..159c8ff 100644 --- a/src/virtualenv/create/creator.py +++ b/src/virtualenv/create/creator.py -@@ -12,6 +12,7 @@ from textwrap import dedent - - from six import add_metaclass +@@ -8,15 +8,17 @@ from ast import literal_eval + from collections import OrderedDict + from pathlib import Path +from virtualenv import __path_pack__ from virtualenv.discovery.cached_py_info import LogCmd - from virtualenv.info import WIN_CPYTHON_2 - from virtualenv.util.path import Path, safe_delete -@@ -19,10 +20,11 @@ from virtualenv.util.six import ensure_str, ensure_text + from virtualenv.util.path import safe_delete from virtualenv.util.subprocess import run_cmd from virtualenv.version import __version__ @@ -171,24 +164,24 @@ index 6363f8b..9b00461 100644 +DEBUG_SCRIPT = _PATH_ASSETS / "debug.py" - class CreatorMeta(object): + class CreatorMeta: diff --git a/src/virtualenv/create/via_global_ref/_virtualenv.py b/src/virtualenv/create/via_global_ref/_virtualenv.py.template similarity index 100% rename from src/virtualenv/create/via_global_ref/_virtualenv.py rename to src/virtualenv/create/via_global_ref/_virtualenv.py.template diff --git a/src/virtualenv/create/via_global_ref/api.py b/src/virtualenv/create/via_global_ref/api.py -index 6f296f4..f90eb04 100644 +index 7a4086f..005dd68 100644 --- a/src/virtualenv/create/via_global_ref/api.py +++ b/src/virtualenv/create/via_global_ref/api.py -@@ -3,6 +3,7 @@ from __future__ import absolute_import, unicode_literals +@@ -1,6 +1,7 @@ import logging import os from abc import ABCMeta +from importlib.resources import read_text + from pathlib import Path - from six import add_metaclass - -@@ -100,9 +101,9 @@ class ViaGlobalRefApi(Creator): + from virtualenv.info import fs_supports_symlink +@@ -94,9 +95,9 @@ class ViaGlobalRefApi(Creator, metaclass=ABCMeta): def env_patch_text(self): """Patch the distutils package to not be derailed by its configuration files""" @@ -200,29 +193,29 @@ index 6f296f4..f90eb04 100644 + return text.replace('"__SCRIPT_DIR__"', repr(os.path.relpath(str(self.script_dir), str(self.purelib)))) def _args(self): - return super(ViaGlobalRefApi, self)._args() + [("global", self.enable_system_site_package)] + return super()._args() + [("global", self.enable_system_site_package)] diff --git a/src/virtualenv/create/via_global_ref/builtin/python2/python2.py b/src/virtualenv/create/via_global_ref/builtin/python2/python2.py -index cacd56e..c29fbf4 100644 +index 9b963b3..e1e64a2 100644 --- a/src/virtualenv/create/via_global_ref/builtin/python2/python2.py +++ b/src/virtualenv/create/via_global_ref/builtin/python2/python2.py -@@ -3,6 +3,7 @@ from __future__ import absolute_import, unicode_literals +@@ -1,6 +1,7 @@ import abc import json import os +from importlib.resources import read_text + from pathlib import Path - from six import add_metaclass - -@@ -15,8 +16,6 @@ from virtualenv.util.zipapp import read as read_from_zipapp + from virtualenv.create.describe import Python2Supports +@@ -10,8 +11,6 @@ from virtualenv.util.zipapp import read as read_from_zipapp from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin -HERE = Path(os.path.abspath(__file__)).parent - - @add_metaclass(abc.ABCMeta) - class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports): -@@ -34,10 +33,7 @@ class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports): + class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports, metaclass=abc.ABCMeta): + def create(self): +@@ -28,10 +27,7 @@ class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports, metaclass=abc.ABCM site_py = site_py_in / "site.py" custom_site = get_custom_site() @@ -231,10 +224,10 @@ index cacd56e..c29fbf4 100644 - else: - custom_site_text = custom_site.read_text() + custom_site_text = read_text(custom_site, "site.py.template") - expected = json.dumps([os.path.relpath(ensure_text(str(i)), ensure_text(str(site_py))) for i in self.libs]) + expected = json.dumps([os.path.relpath(str(i), str(site_py)) for i in self.libs]) custom_site_text = custom_site_text.replace("___EXPECTED_SITE_PACKAGES___", expected) -@@ -108,4 +104,5 @@ class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports): +@@ -101,4 +97,5 @@ class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports, metaclass=abc.ABCM def get_custom_site(): @@ -246,12 +239,10 @@ similarity index 100% rename from src/virtualenv/create/via_global_ref/builtin/python2/site.py rename to src/virtualenv/create/via_global_ref/builtin/python2/site.py.template diff --git a/src/virtualenv/discovery/__init__.py b/src/virtualenv/discovery/__init__.py -index 01e6d4f..babf6ad 100644 +index e69de29..84bc7a4 100644 --- a/src/virtualenv/discovery/__init__.py +++ b/src/virtualenv/discovery/__init__.py -@@ -1 +1,10 @@ - from __future__ import absolute_import, unicode_literals -+ +@@ -0,0 +1,8 @@ +from pathlib import Path + +from virtualenv import __path_assets__ @@ -261,10 +252,10 @@ index 01e6d4f..babf6ad 100644 + Path(__path__[0]) +) diff --git a/src/virtualenv/discovery/builtin.py b/src/virtualenv/discovery/builtin.py -index 52f7398..0f1844c 100644 +index 40320d3..7cc5687 100644 --- a/src/virtualenv/discovery/builtin.py +++ b/src/virtualenv/discovery/builtin.py -@@ -97,8 +97,8 @@ def propose_interpreters(spec, try_first_with, app_data, env=None): +@@ -91,8 +91,8 @@ def propose_interpreters(spec, try_first_with, app_data, env=None): if spec.is_abs: return else: @@ -276,32 +267,32 @@ index 52f7398..0f1844c 100644 # 3. otherwise fallback to platform default logic if IS_WIN: diff --git a/src/virtualenv/discovery/cached_py_info.py b/src/virtualenv/discovery/cached_py_info.py -index 31beff5..35a0249 100644 +index 22ad249..75fbf70 100644 --- a/src/virtualenv/discovery/cached_py_info.py +++ b/src/virtualenv/discovery/cached_py_info.py -@@ -19,6 +19,8 @@ from virtualenv.util.path import Path - from virtualenv.util.six import ensure_text - from virtualenv.util.subprocess import Popen, subprocess +@@ -19,6 +19,8 @@ from virtualenv.app_data import AppDataDisabled + from virtualenv.discovery.py_info import PythonInfo + from virtualenv.util.subprocess import subprocess +from . import _PATH_ASSETS + _CACHE = OrderedDict() _CACHE[Path(sys.executable)] = PythonInfo() -@@ -82,7 +84,7 @@ def _get_via_file_cache(cls, app_data, path, exe, env): +@@ -89,7 +91,7 @@ def gen_cookie(): def _run_subprocess(cls, exe, app_data, env): - py_info_script = Path(os.path.abspath(__file__)).parent / "py_info.py" + py_info_script = _PATH_ASSETS / "py_info.py" - with app_data.ensure_extracted(py_info_script) as py_info_script: - cmd = [exe, str(py_info_script)] - # prevent sys.prefix from leaking into the child process - see https://bugs.python.org/issue22490 + # Cookies allow to split the serialized stdout output generated by the script collecting the info from the output + # generated by something else. The right way to deal with it is to create an anonymous pipe and pass its descriptor + # to the child and output to it. But AFAIK all of them are either not cross-platform or too big to implement and are diff --git a/src/virtualenv/info.py b/src/virtualenv/info.py -index 7d5e86d..f416921 100644 +index a4fc4bf..752108b 100644 --- a/src/virtualenv/info.py +++ b/src/virtualenv/info.py -@@ -6,6 +6,8 @@ import platform +@@ -4,13 +4,15 @@ import platform import sys import tempfile @@ -310,36 +301,33 @@ index 7d5e86d..f416921 100644 IMPLEMENTATION = platform.python_implementation() IS_PYPY = IMPLEMENTATION == "PyPy" IS_CPYTHON = IMPLEMENTATION == "CPython" -@@ -13,8 +15,8 @@ PY3 = sys.version_info[0] == 3 - PY2 = sys.version_info[0] == 2 IS_WIN = sys.platform == "win32" IS_MAC_ARM64 = sys.platform == "darwin" and platform.machine() == "arm64" -ROOT = os.path.realpath(os.path.join(os.path.abspath(__file__), os.path.pardir, os.path.pardir)) -IS_ZIPAPP = os.path.isfile(ROOT) +ROOT = os.path.realpath(__path_pack__.parent) +IS_ZIPAPP = False - WIN_CPYTHON_2 = IS_CPYTHON and IS_WIN and PY2 _CAN_SYMLINK = _FS_CASE_SENSITIVE = _CFG_DIR = _DATA_DIR = None + diff --git a/src/virtualenv/run/__init__.py b/src/virtualenv/run/__init__.py -index e8e7ab1..6bca684 100644 +index 6d22b71..23c146f 100644 --- a/src/virtualenv/run/__init__.py +++ b/src/virtualenv/run/__init__.py -@@ -130,7 +130,7 @@ def add_version_flag(parser): +@@ -128,7 +128,7 @@ def add_version_flag(parser): parser.add_argument( "--version", action="version", -- version="%(prog)s {} from {}".format(__version__, virtualenv.__file__), -+ version="%(prog)s {} from {}".format(__version__, virtualenv.__path_pack__), +- version=f"%(prog)s {__version__} from {virtualenv.__file__}", ++ version=f"%(prog)s {__version__} from {virtualenv.__path_pack__}", help="display the version of the virtualenv package and its location, then exit", ) diff --git a/src/virtualenv/run/plugin/activators.py b/src/virtualenv/run/plugin/activators.py -index 8180981..036b2c4 100644 +index 320cae7..74df18b 100644 --- a/src/virtualenv/run/plugin/activators.py +++ b/src/virtualenv/run/plugin/activators.py -@@ -2,11 +2,30 @@ from __future__ import absolute_import, unicode_literals - +@@ -1,10 +1,29 @@ from argparse import ArgumentTypeError from collections import OrderedDict +from importlib.metadata import EntryPoint @@ -370,13 +358,13 @@ index 8180981..036b2c4 100644 self.default = None possible = OrderedDict( diff --git a/src/virtualenv/run/plugin/base.py b/src/virtualenv/run/plugin/base.py -index 048c76a..0d3a04a 100644 +index 3eb8ab3..1e9f0f2 100644 --- a/src/virtualenv/run/plugin/base.py +++ b/src/virtualenv/run/plugin/base.py -@@ -3,14 +3,7 @@ from __future__ import absolute_import, unicode_literals +@@ -1,14 +1,6 @@ import sys from collections import OrderedDict - +- -if sys.version_info >= (3, 8): - from importlib.metadata import entry_points - @@ -388,8 +376,8 @@ index 048c76a..0d3a04a 100644 +from importlib.metadata import entry_points - class PluginLoader(object): -@@ -19,16 +12,13 @@ class PluginLoader(object): + class PluginLoader: +@@ -17,16 +9,13 @@ class PluginLoader: @classmethod def entry_points_for(cls, key): @@ -413,18 +401,16 @@ index 048c76a..0d3a04a 100644 class ComponentBuilder(PluginLoader): diff --git a/src/virtualenv/run/plugin/creators.py b/src/virtualenv/run/plugin/creators.py -index ef4177a..0a59e32 100644 +index 8953064..2514cdb 100644 --- a/src/virtualenv/run/plugin/creators.py +++ b/src/virtualenv/run/plugin/creators.py -@@ -1,6 +1,7 @@ - from __future__ import absolute_import, unicode_literals - +@@ -1,4 +1,5 @@ from collections import OrderedDict, defaultdict, namedtuple +from importlib.metadata import EntryPoint from virtualenv.create.describe import Describe from virtualenv.create.via_global_ref.builtin.builtin_way import VirtualenvBuiltin -@@ -11,6 +12,28 @@ CreatorInfo = namedtuple("CreatorInfo", ["key_to_class", "key_to_meta", "describ +@@ -9,6 +10,28 @@ CreatorInfo = namedtuple("CreatorInfo", ["key_to_class", "key_to_meta", "describ class CreatorSelector(ComponentBuilder): @@ -452,21 +438,19 @@ index ef4177a..0a59e32 100644 + def __init__(self, interpreter, parser): creators, self.key_to_meta, self.describe, self.builtin_key = self.for_interpreter(interpreter) - super(CreatorSelector, self).__init__(interpreter, parser, "creator", creators) + super().__init__(interpreter, parser, "creator", creators) diff --git a/src/virtualenv/run/plugin/discovery.py b/src/virtualenv/run/plugin/discovery.py -index ac9b7f5..17c0693 100644 +index 13f39ed..4be9f98 100644 --- a/src/virtualenv/run/plugin/discovery.py +++ b/src/virtualenv/run/plugin/discovery.py -@@ -1,10 +1,22 @@ - from __future__ import absolute_import, unicode_literals - +@@ -1,8 +1,21 @@ +from importlib.metadata import EntryPoint + from .base import PluginLoader class Discovery(PluginLoader): -- """ """ + """Discovery plugins""" + _ENTRY_POINTS = { + "virtualenv.discovery": [ + EntryPoint( @@ -482,12 +466,10 @@ index ac9b7f5..17c0693 100644 def get_discover(parser, args): diff --git a/src/virtualenv/run/plugin/seeders.py b/src/virtualenv/run/plugin/seeders.py -index d182c6f..ee3e5bc 100644 +index 1a51d2e..0837758 100644 --- a/src/virtualenv/run/plugin/seeders.py +++ b/src/virtualenv/run/plugin/seeders.py -@@ -1,9 +1,24 @@ - from __future__ import absolute_import, unicode_literals - +@@ -1,7 +1,22 @@ +from importlib.metadata import EntryPoint + from .base import ComponentBuilder @@ -509,17 +491,16 @@ index d182c6f..ee3e5bc 100644 + def __init__(self, interpreter, parser): possible = self.options("virtualenv.seed") - super(SeederSelector, self).__init__(interpreter, parser, "seeder", possible) + super().__init__(interpreter, parser, "seeder", possible) diff --git a/src/virtualenv/seed/wheels/embed/__init__.py b/src/virtualenv/seed/wheels/embed/__init__.py -index 5efbd7e..c294118 100644 +index b377ad2..e18e45c 100644 --- a/src/virtualenv/seed/wheels/embed/__init__.py +++ b/src/virtualenv/seed/wheels/embed/__init__.py -@@ -1,9 +1,14 @@ - from __future__ import absolute_import, unicode_literals +@@ -1,8 +1,13 @@ + from pathlib import Path +from virtualenv import __path_assets__ from virtualenv.seed.wheels.util import Wheel - from virtualenv.util.path import Path -BUNDLE_FOLDER = Path(__file__).absolute().parent +if __path_assets__: @@ -529,31 +510,29 @@ index 5efbd7e..c294118 100644 + BUNDLE_SUPPORT = { "3.11": { - "pip": "pip-22.0.4-py3-none-any.whl", + "pip": "pip-22.2.2-py3-none-any.whl", diff --git a/src/virtualenv/version.py b/src/virtualenv/version.py new file mode 100644 -index 0000000..f059df5 +index 0000000..a7a4f30 --- /dev/null +++ b/src/virtualenv/version.py @@ -0,0 +1 @@ -+__version__ = "20.14.1" ++__version__ = "20.16.3" diff --git a/tests/unit/create/test_interpreters.py b/tests/unit/create/test_interpreters.py -index 59a0991..76d40c7 100644 +index 5d36b23..65d5308 100644 --- a/tests/unit/create/test_interpreters.py +++ b/tests/unit/create/test_interpreters.py -@@ -29,6 +29,4 @@ def test_failed_to_find_implementation(of_id, mocker): +@@ -27,4 +27,4 @@ def test_failed_to_find_implementation(of_id, mocker): mocker.patch("virtualenv.run.plugin.creators.CreatorSelector._OPTIONS", return_value={}) with pytest.raises(RuntimeError) as context: cli_run(["-p", of_id]) -- assert repr(context.value) == repr( -- RuntimeError("No virtualenv implementation for {}".format(PythonInfo.current_system())), -- ) +- assert repr(context.value) == repr(RuntimeError(f"No virtualenv implementation for {PythonInfo.current_system()}")) + assert repr(context.value).startswith('RuntimeError("No virtualenv implementation for') diff --git a/tests/unit/discovery/py_info/test_py_info.py b/tests/unit/discovery/py_info/test_py_info.py -index c82e506..429294c 100644 +index 9d3d762..3a7d020 100644 --- a/tests/unit/discovery/py_info/test_py_info.py +++ b/tests/unit/discovery/py_info/test_py_info.py -@@ -37,20 +37,20 @@ def test_bad_exe_py_info_raise(tmp_path, session_app_data): +@@ -35,20 +35,20 @@ def test_bad_exe_py_info_raise(tmp_path, session_app_data): assert exe in msg @@ -589,10 +568,10 @@ index c82e506..429294c 100644 @pytest.mark.parametrize( diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py -index 53849f5..a6da734 100644 +index 513a76e..d71d0a7 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py -@@ -30,9 +30,6 @@ def test_version(capsys): +@@ -28,9 +28,6 @@ def test_version(capsys): assert not extra assert __version__ in content diff --git a/pyoxidizer.bzl b/pyoxidizer.bzl index 6cdc733b..9524d6b5 100644 --- a/pyoxidizer.bzl +++ b/pyoxidizer.bzl @@ -1,14 +1,23 @@ +def force_fs_libs(policy, resource): + for lib in ["certifi", "poetry-plugin-export"]: + if ( + resource.name == lib or + resource.name.startswith("{}.".format(lib)) or ( + hasattr(resource, "package") and + resource.package == lib + ) + ): + resource.add_location = "filesystem-relative:lib" + def make_exe(): - dist = default_python_distribution(python_version="3.9") + dist = default_python_distribution(python_version="3.10") policy = dist.make_python_packaging_policy() policy.resources_location_fallback = "filesystem-relative:lib" + policy.register_resource_callback(force_fs_libs) config = dist.make_python_interpreter_config() - if not VARS.get("WIN_BUILD"): - config.module_search_paths = ["$ORIGIN/../lib"] - else: - config.module_search_paths = ["$ORIGIN/lib"] + config.module_search_paths = ["$ORIGIN/lib"] config.run_module = "poetry.console.application" exe = dist.to_python_executable( @@ -21,18 +30,25 @@ def make_exe(): # skip patched packages if resource.name == "poetry.core" or resource.name.startswith("poetry.core."): continue - if resource.name == "requests" or resource.name.startswith("requests."): - continue - if resource.name == "virtualenv" or resource.name.startswith("virtualenv."): - continue + for lib in ["virtualenv"]: + if ( + resource.name == lib or + resource.name.startswith("{}.".format(lib)) or ( + hasattr(resource, "package") and + resource.package == lib + ) + ): + continue # skip wheels if resource.name.endswith(".whl"): continue exe.add_python_resource(resource) - exe.add_python_resources(exe.read_package_root("vendor/poetry-core", ["poetry"])) + exe.add_python_resources(exe.read_package_root("vendor/poetry-core/src", ["poetry"])) + exe.add_python_resources(exe.pip_install(["-r", "vendor/poetry-core/vendors/deps.txt"])) exe.add_python_resources(exe.pip_install(["./vendor/importlib_metadata"])) - exe.add_python_resources(exe.pip_install(["./vendor/requests"])) + exe.add_python_resources(exe.pip_install(["./vendor/jsonschema"])) + exe.add_python_resources(exe.pip_install(["./vendor/lark"])) for resource in exe.pip_install(["./vendor/virtualenv"]): # skip wheels @@ -47,10 +63,7 @@ def make_embedded_resources(exe): def make_install(exe): files = FileManifest() - if not VARS.get("WIN_BUILD"): - entrypoint = "bin" - else: - entrypoint = "." + entrypoint = "." files.add_python_resource(entrypoint, exe) return files diff --git a/vendor/importlib_metadata/.coveragerc b/vendor/importlib_metadata/.coveragerc new file mode 100644 index 00000000..1b8c50e9 --- /dev/null +++ b/vendor/importlib_metadata/.coveragerc @@ -0,0 +1,13 @@ +[run] +omit = + # leading `*/` for pytest-dev/pytest-cov#456 + */.tox/* + */pep517-build-env-* + tests/* + prepare/* + */_itertools.py + exercises.py + */pip-run-* + +[report] +show_missing = True diff --git a/vendor/importlib_metadata/.editorconfig b/vendor/importlib_metadata/.editorconfig new file mode 100644 index 00000000..304196f8 --- /dev/null +++ b/vendor/importlib_metadata/.editorconfig @@ -0,0 +1,19 @@ +root = true + +[*] +charset = utf-8 +indent_style = tab +indent_size = 4 +insert_final_newline = true +end_of_line = lf + +[*.py] +indent_style = space +max_line_length = 88 + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 + +[*.rst] +indent_style = space diff --git a/vendor/importlib_metadata/.flake8 b/vendor/importlib_metadata/.flake8 new file mode 100644 index 00000000..542d2986 --- /dev/null +++ b/vendor/importlib_metadata/.flake8 @@ -0,0 +1,10 @@ +[flake8] +max-line-length = 88 + +# jaraco/skeleton#34 +max-complexity = 10 + +extend-ignore = + # Black creates whitespace before colon + E203 +enable-extensions = U4 diff --git a/vendor/importlib_metadata/.github/dependabot.yml b/vendor/importlib_metadata/.github/dependabot.yml new file mode 100644 index 00000000..89ff3396 --- /dev/null +++ b/vendor/importlib_metadata/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + allow: + - dependency-type: "all" diff --git a/vendor/importlib_metadata/.github/workflows/main.yml b/vendor/importlib_metadata/.github/workflows/main.yml new file mode 100644 index 00000000..19b03429 --- /dev/null +++ b/vendor/importlib_metadata/.github/workflows/main.yml @@ -0,0 +1,87 @@ +name: tests + +on: [push, pull_request] + +jobs: + test: + strategy: + matrix: + python: + # Build on pre-releases until stable, then stable releases. + # actions/setup-python#213 + - ~3.7.0-0 + - ~3.10.0-0 + - ~3.11.0-0 + platform: + - ubuntu-latest + - macos-latest + - windows-latest + runs-on: ${{ matrix.platform }} + steps: + - uses: actions/checkout@v3 + with: + # fetch all branches and tags (to get tags for versioning) + # ref actions/checkout#448 + fetch-depth: 0 + - name: Setup Python + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python }} + - name: Install tox + run: | + python -m pip install tox + - name: Run tests + run: tox + + check: # This job does nothing and is only used for the branch protection + if: always() + + needs: + - test + + runs-on: ubuntu-latest + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} + + diffcov: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: 3.9 + - name: Install tox + run: | + python -m pip install tox + - name: Evaluate coverage + run: tox + env: + TOXENV: diffcov + + release: + needs: + - check + if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Setup Python + uses: actions/setup-python@v3 + with: + python-version: "3.10" + - name: Install tox + run: | + python -m pip install tox + - name: Release + run: tox -e release + env: + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/vendor/importlib_metadata/.gitlab-ci.yml b/vendor/importlib_metadata/.gitlab-ci.yml deleted file mode 100644 index a8d1bd28..00000000 --- a/vendor/importlib_metadata/.gitlab-ci.yml +++ /dev/null @@ -1,50 +0,0 @@ -image: quay.io/python-devs/ci-image - -stages: - - test - - qa - - docs - - codecov - - deploy - -qa: - script: - - tox -e qa - -tests: - script: - - tox -e py27,py35,py36,py37,py38 - -coverage: - script: - - tox -e py27-cov,py35-cov,py36-cov,py37-cov,py38-cov - artifacts: - paths: - - coverage.xml - -benchmark: - script: - - tox -e perf - -diffcov: - script: - - tox -e py27-diffcov,py35-diffcov,py36-diffcov,py37-diffcov,py38-diffcov - -docs: - script: - - tox -e docs - -codecov: - stage: codecov - dependencies: - - coverage - script: - - codecov - when: on_success - -release: - stage: deploy - only: - - /^v\d+\.\d+(\.\d+)?([abc]\d*)?$/ - script: - - tox -e release diff --git a/vendor/importlib_metadata/.pre-commit-config.yaml b/vendor/importlib_metadata/.pre-commit-config.yaml new file mode 100644 index 00000000..edf6f55f --- /dev/null +++ b/vendor/importlib_metadata/.pre-commit-config.yaml @@ -0,0 +1,5 @@ +repos: +- repo: https://github.com/psf/black + rev: 22.1.0 + hooks: + - id: black diff --git a/vendor/importlib_metadata/.readthedocs.yml b/vendor/importlib_metadata/.readthedocs.yml index 8ae44684..cc698548 100644 --- a/vendor/importlib_metadata/.readthedocs.yml +++ b/vendor/importlib_metadata/.readthedocs.yml @@ -1,5 +1,6 @@ +version: 2 python: - version: 3 - extra_requirements: - - docs - pip_install: true + install: + - path: . + extra_requirements: + - docs diff --git a/vendor/importlib_metadata/CHANGES.rst b/vendor/importlib_metadata/CHANGES.rst new file mode 100644 index 00000000..84684eec --- /dev/null +++ b/vendor/importlib_metadata/CHANGES.rst @@ -0,0 +1,736 @@ +v4.12.0 +======= + +* py-93259: Now raise ``ValueError`` when ``None`` or an empty + string are passed to ``Distribution.from_name`` (and other + callers). + +v4.11.4 +======= + +* #379: In ``PathDistribution._name_from_stem``, avoid including + parts of the extension in the result. +* #381: In ``PathDistribution._normalized_name``, ensure names + loaded from the stem of the filename are also normalized, ensuring + duplicate entry points by packages varying only by non-normalized + name are hidden. + +v4.11.3 +======= + +* #372: Removed cast of path items in FastPath, not needed. + +v4.11.2 +======= + +* #369: Fixed bug where ``EntryPoint.extras`` was returning + match objects and not the extras strings. + +v4.11.1 +======= + +* #367: In ``Distribution.requires`` for egg-info, if ``requires.txt`` + is empty, return an empty list. + +v4.11.0 +======= + +* bpo-46246: Added ``__slots__`` to ``EntryPoints``. + +v4.10.2 +======= + +* #365 and bpo-46546: Avoid leaking ``method_name`` in + ``DeprecatedList``. + +v4.10.1 +======= + +v2.1.3 +======= + +* #361: Avoid potential REDoS in ``EntryPoint.pattern``. + +v4.10.0 +======= + +* #354: Removed ``Distribution._local`` factory. This + functionality was created as a demonstration of the + possible implementation. Now, the + `pep517 `_ package + provides this functionality directly through + `pep517.meta.load `_. + +v4.9.0 +====== + +* Require Python 3.7 or later. + +v4.8.3 +====== + +* #357: Fixed requirement generation from egg-info when a + URL requirement is given. + +v4.8.2 +====== + +v2.1.2 +====== + +* #353: Fixed discovery of distributions when path is empty. + +v4.8.1 +====== + +* #348: Restored support for ``EntryPoint`` access by item, + deprecating support in the process. Users are advised + to use direct member access instead of item-based access:: + + - ep[0] -> ep.name + - ep[1] -> ep.value + - ep[2] -> ep.group + - ep[:] -> ep.name, ep.value, ep.group + +v4.8.0 +====== + +* #337: Rewrote ``EntryPoint`` as a simple class, still + immutable and still with the attributes, but without any + expectation for ``namedtuple`` functionality such as + ``_asdict``. + +v4.7.1 +====== + +* #344: Fixed regression in ``packages_distributions`` when + neither top-level.txt nor a files manifest is present. + +v4.7.0 +====== + +* #330: In ``packages_distributions``, now infer top-level + names from ``.files()`` when a ``top-level.txt`` + (Setuptools-specific metadata) is not present. + +v4.6.4 +====== + +* #334: Correct ``SimplePath`` protocol to match ``pathlib`` + protocol for ``__truediv__``. + +v4.6.3 +====== + +* Moved workaround for #327 to ``_compat`` module. + +v4.6.2 +====== + +* bpo-44784: Avoid errors in test suite when + DeprecationWarnings are treated as errors. + +v4.6.1 +====== + +* #327: Deprecation warnings now honor call stack variance + on PyPy. + +v4.6.0 +====== + +* #326: Performance tests now rely on + `pytest-perf `_. + To disable these tests, which require network access + and a git checkout, pass ``-p no:perf`` to pytest. + +v4.5.0 +====== + +* #319: Remove ``SelectableGroups`` deprecation exception + for flake8. + +v4.4.0 +====== + +* #300: Restore compatibility in the result from + ``Distribution.entry_points`` (``EntryPoints``) to honor + expectations in older implementations and issuing + deprecation warnings for these cases: + + - ``EntryPoints`` objects are once again mutable, allowing + for ``sort()`` and other list-based mutation operations. + Avoid deprecation warnings by casting to a + mutable sequence (e.g. + ``list(dist.entry_points).sort()``). + + - ``EntryPoints`` results once again allow + for access by index. To avoid deprecation warnings, + cast the result to a Sequence first + (e.g. ``tuple(dist.entry_points)[0]``). + +v4.3.1 +====== + +* #320: Fix issue where normalized name for eggs was + incorrectly solicited, leading to metadata being + unavailable for eggs. + +v4.3.0 +====== + +* #317: De-duplication of distributions no longer requires + loading the full metadata for ``PathDistribution`` objects, + entry point loading performance by ~10x. + +v4.2.0 +====== + +* Prefer f-strings to ``.format`` calls. + +v4.1.0 +====== + +* #312: Add support for metadata 2.2 (``Dynamic`` field). + +* #315: Add ``SimplePath`` protocol for interface clarity + in ``PathDistribution``. + +v4.0.1 +====== + +* #306: Clearer guidance about compatibility in readme. + +v4.0.0 +====== + +* #304: ``PackageMetadata`` as returned by ``metadata()`` + and ``Distribution.metadata()`` now provides normalized + metadata honoring PEP 566: + + - If a long description is provided in the payload of the + RFC 822 value, it can be retrieved as the ``Description`` + field. + - Any multi-line values in the metadata will be returned as + such. + - For any multi-line values, line continuation characters + are removed. This backward-incompatible change means + that any projects relying on the RFC 822 line continuation + characters being present must be tolerant to them having + been removed. + - Add a ``json`` property that provides the metadata + converted to a JSON-compatible form per PEP 566. + + +v3.10.1 +======= + +* Minor tweaks from CPython. + +v3.10.0 +======= + +* #295: Internal refactoring to unify section parsing logic. + +v3.9.1 +====== + +* #296: Exclude 'prepare' package. +* #297: Fix ValueError when entry points contains comments. + +v3.9.0 +====== + +* Use of Mapping (dict) interfaces on ``SelectableGroups`` + is now flagged as deprecated. Instead, users are advised + to use the select interface for future compatibility. + + Suppress the warning with this filter: + ``ignore:SelectableGroups dict interface``. + + Or with this invocation in the Python environment: + ``warnings.filterwarnings('ignore', 'SelectableGroups dict interface')``. + + Preferably, switch to the ``select`` interface introduced + in 3.7.0. See the + `entry points documentation `_ and changelog for the 3.6 + release below for more detail. + + For some use-cases, especially those that rely on + ``importlib.metadata`` in Python 3.8 and 3.9 or + those relying on older ``importlib_metadata`` (especially + on Python 3.5 and earlier), + `backports.entry_points_selectable `_ + was created to ease the transition. Please have a look + at that project if simply relying on importlib_metadata 3.6+ + is not straightforward. Background in #298. + +* #283: Entry point parsing no longer relies on ConfigParser + and instead uses a custom, one-pass parser to load the + config, resulting in a ~20% performance improvement when + loading entry points. + +v3.8.2 +====== + +* #293: Re-enabled lazy evaluation of path lookup through + a FreezableDefaultDict. + +v3.8.1 +====== + +* #293: Workaround for error in distribution search. + +v3.8.0 +====== + +* #290: Add mtime-based caching for ``FastPath`` and its + lookups, dramatically increasing performance for repeated + distribution lookups. + +v3.7.3 +====== + +* Docs enhancements and cleanup following review in + `GH-24782 `_. + +v3.7.2 +====== + +* Cleaned up cruft in entry_points docstring. + +v3.7.1 +====== + +* Internal refactoring to facilitate ``entry_points() -> dict`` + deprecation. + +v3.7.0 +====== + +* #131: Added ``packages_distributions`` to conveniently + resolve a top-level package or module to its distribution(s). + +v3.6.0 +====== + +* #284: Introduces new ``EntryPoints`` object, a tuple of + ``EntryPoint`` objects but with convenience properties for + selecting and inspecting the results: + + - ``.select()`` accepts ``group`` or ``name`` keyword + parameters and returns a new ``EntryPoints`` tuple + with only those that match the selection. + - ``.groups`` property presents all of the group names. + - ``.names`` property presents the names of the entry points. + - Item access (e.g. ``eps[name]``) retrieves a single + entry point by name. + + ``entry_points`` now accepts "selection parameters", + same as ``EntryPoint.select()``. + + ``entry_points()`` now provides a future-compatible + ``SelectableGroups`` object that supplies the above interface + (except item access) but remains a dict for compatibility. + + In the future, ``entry_points()`` will return an + ``EntryPoints`` object for all entry points. + + If passing selection parameters to ``entry_points``, the + future behavior is invoked and an ``EntryPoints`` is the + result. + +* #284: Construction of entry points using + ``dict([EntryPoint, ...])`` is now deprecated and raises + an appropriate DeprecationWarning and will be removed in + a future version. + +* #300: ``Distribution.entry_points`` now presents as an + ``EntryPoints`` object and access by index is no longer + allowed. If access by index is required, cast to a sequence + first. + +v3.5.0 +====== + +* #280: ``entry_points`` now only returns entry points for + unique distributions (by name). + +v3.4.0 +====== + +* #10: Project now declares itself as being typed. +* #272: Additional performance enhancements to distribution + discovery. +* #111: For PyPA projects, add test ensuring that + ``MetadataPathFinder._search_paths`` honors the needed + interface. Method is still private. + +v3.3.0 +====== + +* #265: ``EntryPoint`` objects now expose a ``.dist`` object + referencing the ``Distribution`` when constructed from a + Distribution. + +v3.2.0 +====== + +* The object returned by ``metadata()`` now has a + formally-defined protocol called ``PackageMetadata`` + with declared support for the ``.get_all()`` method. + Fixes #126. + +v3.1.1 +====== + +v2.1.1 +====== + +* #261: Restored compatibility for package discovery for + metadata without version in the name and for legacy + eggs. + +v3.1.0 +====== + +* Merge with 2.1.0. + +v2.1.0 +====== + +* #253: When querying for package metadata, the lookup + now honors + `package normalization rules `_. + +v3.0.0 +====== + +* Require Python 3.6 or later. + +v2.0.0 +====== + +* ``importlib_metadata`` no longer presents a + ``__version__`` attribute. Consumers wishing to + resolve the version of the package should query it + directly with + ``importlib_metadata.version('importlib-metadata')``. + Closes #71. + +v1.7.0 +====== + +* ``PathNotFoundError`` now has a custom ``__str__`` + mentioning "package metadata" being missing to help + guide users to the cause when the package is installed + but no metadata is present. Closes #124. + +v1.6.1 +====== + +* Added ``Distribution._local()`` as a provisional + demonstration of how to load metadata for a local + package. Implicitly requires that + `pep517 `_ is + installed. Ref #42. +* Ensure inputs to FastPath are Unicode. Closes #121. +* Tests now rely on ``importlib.resources.files`` (and + backport) instead of the older ``path`` function. +* Support any iterable from ``find_distributions``. + Closes #122. + +v1.6.0 +====== + +* Added ``module`` and ``attr`` attributes to ``EntryPoint`` + +v1.5.2 +====== + +* Fix redundant entries from ``FastPath.zip_children``. + Closes #117. + +v1.5.1 +====== + +* Improve reliability and consistency of compatibility + imports for contextlib and pathlib when running tests. + Closes #116. + +v1.5.0 +====== + +* Additional performance optimizations in FastPath now + saves an additional 20% on a typical call. +* Correct for issue where PyOxidizer finder has no + ``__module__`` attribute. Closes #110. + +v1.4.0 +====== + +* Through careful optimization, ``distribution()`` is + 3-4x faster. Thanks to Antony Lee for the + contribution. Closes #95. + +* When searching through ``sys.path``, if any error + occurs attempting to list a path entry, that entry + is skipped, making the system much more lenient + to errors. Closes #94. + +v1.3.0 +====== + +* Improve custom finders documentation. Closes #105. + +v1.2.0 +====== + +* Once again, drop support for Python 3.4. Ref #104. + +v1.1.3 +====== + +* Restored support for Python 3.4 due to improper version + compatibility declarations in the v1.1.0 and v1.1.1 + releases. Closes #104. + +v1.1.2 +====== + +* Repaired project metadata to correctly declare the + ``python_requires`` directive. Closes #103. + +v1.1.1 +====== + +* Fixed ``repr(EntryPoint)`` on PyPy 3 also. Closes #102. + +v1.1.0 +====== + +* Dropped support for Python 3.4. +* EntryPoints are now pickleable. Closes #96. +* Fixed ``repr(EntryPoint)`` on PyPy 2. Closes #97. + +v1.0.0 +====== + +* Project adopts semver for versioning. + +* Removed compatibility shim introduced in 0.23. + +* For better compatibility with the stdlib implementation and to + avoid the same distributions being discovered by the stdlib and + backport implementations, the backport now disables the + stdlib DistributionFinder during initialization (import time). + Closes #91 and closes #100. + +0.23 +==== + +* Added a compatibility shim to prevent failures on beta releases + of Python before the signature changed to accept the + "context" parameter on find_distributions. This workaround + will have a limited lifespan, not to extend beyond release of + Python 3.8 final. + +0.22 +==== + +* Renamed ``package`` parameter to ``distribution_name`` + as `recommended `_ + in the following functions: ``distribution``, ``metadata``, + ``version``, ``files``, and ``requires``. This + backward-incompatible change is expected to have little impact + as these functions are assumed to be primarily used with + positional parameters. + +0.21 +==== + +* ``importlib.metadata`` now exposes the ``DistributionFinder`` + metaclass and references it in the docs for extending the + search algorithm. +* Add ``Distribution.at`` for constructing a Distribution object + from a known metadata directory on the file system. Closes #80. +* Distribution finders now receive a context object that + supplies ``.path`` and ``.name`` properties. This change + introduces a fundamental backward incompatibility for + any projects implementing a ``find_distributions`` method + on a ``MetaPathFinder``. This new layer of abstraction + allows this context to be supplied directly or constructed + on demand and opens the opportunity for a + ``find_distributions`` method to solicit additional + context from the caller. Closes #85. + +0.20 +==== + +* Clarify in the docs that calls to ``.files`` could return + ``None`` when the metadata is not present. Closes #69. +* Return all requirements and not just the first for dist-info + packages. Closes #67. + +0.19 +==== + +* Restrain over-eager egg metadata resolution. +* Add support for entry points with colons in the name. Closes #75. + +0.18 +==== + +* Parse entry points case sensitively. Closes #68 +* Add a version constraint on the backport configparser package. Closes #66 + +0.17 +==== + +* Fix a permission problem in the tests on Windows. + +0.16 +==== + +* Don't crash if there exists an EGG-INFO directory on sys.path. + +0.15 +==== + +* Fix documentation. + +0.14 +==== + +* Removed ``local_distribution`` function from the API. + **This backward-incompatible change removes this + behavior summarily**. Projects should remove their + reliance on this behavior. A replacement behavior is + under review in the `pep517 project + `_. Closes #42. + +0.13 +==== + +* Update docstrings to match PEP 8. Closes #63. +* Merged modules into one module. Closes #62. + +0.12 +==== + +* Add support for eggs. !65; Closes #19. + +0.11 +==== + +* Support generic zip files (not just wheels). Closes #59 +* Support zip files with multiple distributions in them. Closes #60 +* Fully expose the public API in ``importlib_metadata.__all__``. + +0.10 +==== + +* The ``Distribution`` ABC is now officially part of the public API. + Closes #37. +* Fixed support for older single file egg-info formats. Closes #43. +* Fixed a testing bug when ``$CWD`` has spaces in the path. Closes #50. +* Add Python 3.8 to the ``tox`` testing matrix. + +0.9 +=== + +* Fixed issue where entry points without an attribute would raise an + Exception. Closes #40. +* Removed unused ``name`` parameter from ``entry_points()``. Closes #44. +* ``DistributionFinder`` classes must now be instantiated before + being placed on ``sys.meta_path``. + +0.8 +=== + +* This library can now discover/enumerate all installed packages. **This + backward-incompatible change alters the protocol finders must + implement to support distribution package discovery.** Closes #24. +* The signature of ``find_distributions()`` on custom installer finders + should now accept two parameters, ``name`` and ``path`` and + these parameters must supply defaults. +* The ``entry_points()`` method no longer accepts a package name + but instead returns all entry points in a dictionary keyed by the + ``EntryPoint.group``. The ``resolve`` method has been removed. Instead, + call ``EntryPoint.load()``, which has the same semantics as + ``pkg_resources`` and ``entrypoints``. **This is a backward incompatible + change.** +* Metadata is now always returned as Unicode text regardless of + Python version. Closes #29. +* This library can now discover metadata for a 'local' package (found + in the current-working directory). Closes #27. +* Added ``files()`` function for resolving files from a distribution. +* Added a new ``requires()`` function, which returns the requirements + for a package suitable for parsing by + ``packaging.requirements.Requirement``. Closes #18. +* The top-level ``read_text()`` function has been removed. Use + ``PackagePath.read_text()`` on instances returned by the ``files()`` + function. **This is a backward incompatible change.** +* Release dates are now automatically injected into the changelog + based on SCM tags. + +0.7 +=== + +* Fixed issue where packages with dashes in their names would + not be discovered. Closes #21. +* Distribution lookup is now case-insensitive. Closes #20. +* Wheel distributions can no longer be discovered by their module + name. Like Path distributions, they must be indicated by their + distribution package name. + +0.6 +=== + +* Removed ``importlib_metadata.distribution`` function. Now + the public interface is primarily the utility functions exposed + in ``importlib_metadata.__all__``. Closes #14. +* Added two new utility functions ``read_text`` and + ``metadata``. + +0.5 +=== + +* Updated README and removed details about Distribution + class, now considered private. Closes #15. +* Added test suite support for Python 3.4+. +* Fixed SyntaxErrors on Python 3.4 and 3.5. !12 +* Fixed errors on Windows joining Path elements. !15 + +0.4 +=== + +* Housekeeping. + +0.3 +=== + +* Added usage documentation. Closes #8 +* Add support for getting metadata from wheels on ``sys.path``. Closes #9 + +0.2 +=== + +* Added ``importlib_metadata.entry_points()``. Closes #1 +* Added ``importlib_metadata.resolve()``. Closes #12 +* Add support for Python 2.7. Closes #4 + +0.1 +=== + +* Initial release. + + +.. + Local Variables: + mode: change-log-mode + indent-tabs-mode: nil + sentence-end-double-space: t + fill-column: 78 + coding: utf-8 + End: diff --git a/vendor/importlib_metadata/LICENSE b/vendor/importlib_metadata/LICENSE index be7e092b..d6456956 100644 --- a/vendor/importlib_metadata/LICENSE +++ b/vendor/importlib_metadata/LICENSE @@ -1,13 +1,202 @@ -Copyright 2017-2019 Jason R. Coombs, Barry Warsaw -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ -http://www.apache.org/licenses/LICENSE-2.0 + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/importlib_metadata/MANIFEST.in b/vendor/importlib_metadata/MANIFEST.in deleted file mode 100644 index 3fcf6d63..00000000 --- a/vendor/importlib_metadata/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include *.py MANIFEST.in LICENSE README.rst -global-include *.txt *.rst *.ini *.cfg *.toml *.whl *.egg -exclude .gitignore -prune build -prune .tox diff --git a/vendor/importlib_metadata/README.rst b/vendor/importlib_metadata/README.rst index 2bdd4b8a..78a6af71 100644 --- a/vendor/importlib_metadata/README.rst +++ b/vendor/importlib_metadata/README.rst @@ -1,9 +1,51 @@ -========================= - ``importlib_metadata`` -========================= +.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg + :target: `PyPI link`_ -``importlib_metadata`` is a library to access the metadata for a Python -package. It is intended to be ported to Python 3.8. +.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/importlib_metadata + +.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest + :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + + +Library to access the metadata for a Python package. + +This package supplies third-party access to the functionality of +`importlib.metadata `_ +including improvements added to subsequent Python versions. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_metadata + - stdlib + * - 4.8 + - 3.11 + * - 4.4 + - 3.10 + * - 1.4 + - 3.8 Usage @@ -30,7 +72,7 @@ tools (or other conforming packages). It does not support: Project details =============== - * Project home: https://gitlab.com/python-devs/importlib_metadata - * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues - * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git - * Documentation: http://importlib_metadata.readthedocs.io/ + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib_metadata.readthedocs.io/ diff --git a/vendor/importlib_metadata/codecov.yml b/vendor/importlib_metadata/codecov.yml deleted file mode 100644 index 66c7f4bd..00000000 --- a/vendor/importlib_metadata/codecov.yml +++ /dev/null @@ -1,2 +0,0 @@ -codecov: - token: 5eb1bc45-1b7f-43e6-8bc1-f2b02833dba9 diff --git a/vendor/importlib_metadata/conftest.py b/vendor/importlib_metadata/conftest.py new file mode 100644 index 00000000..ab6c8cae --- /dev/null +++ b/vendor/importlib_metadata/conftest.py @@ -0,0 +1,25 @@ +import sys + + +collect_ignore = [ + # this module fails mypy tests because 'setup.py' matches './setup.py' + 'prepare/example/setup.py', +] + + +def pytest_configure(): + remove_importlib_metadata() + + +def remove_importlib_metadata(): + """ + Because pytest imports importlib_metadata, the coverage + reports are broken (#322). So work around the issue by + undoing the changes made by pytest's import of + importlib_metadata (if any). + """ + if sys.meta_path[-1].__class__.__name__ == 'MetadataPathFinder': + del sys.meta_path[-1] + for mod in list(sys.modules): + if mod.startswith('importlib_metadata'): + del sys.modules[mod] diff --git a/vendor/importlib_metadata/coverage.ini b/vendor/importlib_metadata/coverage.ini deleted file mode 100644 index b4d3102f..00000000 --- a/vendor/importlib_metadata/coverage.ini +++ /dev/null @@ -1,24 +0,0 @@ -[run] -branch = true -parallel = true -omit = - setup* - .tox/*/lib/python* - */tests/*.py - */testing/*.py - /usr/local/* - */mod.py -plugins = - coverplug - -[report] -exclude_lines = - pragma: nocover - raise NotImplementedError - raise AssertionError - assert\s - nocoverpy${PYV} - -[paths] -source = - importlib_metadata diff --git a/vendor/importlib_metadata/coverplug.py b/vendor/importlib_metadata/coverplug.py deleted file mode 100644 index 0b0c7cb5..00000000 --- a/vendor/importlib_metadata/coverplug.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Coverage plugin to add exclude lines based on the Python version.""" - -import sys - -from coverage import CoveragePlugin - - -class MyConfigPlugin(CoveragePlugin): - def configure(self, config): - opt_name = 'report:exclude_lines' - exclude_lines = config.get_option(opt_name) - # Python >= 3.6 has os.PathLike. - if sys.version_info >= (3, 6): - exclude_lines.append('pragma: >=36') - else: - exclude_lines.append('pragma: <=35') - config.set_option(opt_name, exclude_lines) - - -def coverage_init(reg, options): - reg.add_configurer(MyConfigPlugin()) diff --git a/vendor/importlib_metadata/importlib_metadata/docs/__init__.py b/vendor/importlib_metadata/docs/__init__.py similarity index 100% rename from vendor/importlib_metadata/importlib_metadata/docs/__init__.py rename to vendor/importlib_metadata/docs/__init__.py diff --git a/vendor/importlib_metadata/docs/api.rst b/vendor/importlib_metadata/docs/api.rst new file mode 100644 index 00000000..02b389ba --- /dev/null +++ b/vendor/importlib_metadata/docs/api.rst @@ -0,0 +1,11 @@ +============= +API Reference +============= + +``importlib_metadata`` module +----------------------------- + +.. automodule:: importlib_metadata + :members: + :undoc-members: + :show-inheritance: diff --git a/vendor/importlib_metadata/docs/conf.py b/vendor/importlib_metadata/docs/conf.py new file mode 100644 index 00000000..ec2bfe59 --- /dev/null +++ b/vendor/importlib_metadata/docs/conf.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 + +extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker'] + +master_doc = "index" + +link_files = { + '../CHANGES.rst': dict( + using=dict(GH='https://github.com'), + replace=[ + dict( + pattern=r'(Issue #|\B#)(?P\d+)', + url='{package_url}/issues/{issue}', + ), + dict( + pattern=r'(?m:^((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n)', + with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', + ), + dict( + pattern=r'PEP[- ](?P\d+)', + url='https://peps.python.org/pep-{pep_number:0>4}/', + ), + dict( + pattern=r'(Python #|py-)(?P\d+)', + url='https://github.com/python/cpython/issues/{python}', + ), + ], + ) +} + +# Be strict about any broken references: +nitpicky = True + +# Include Python intersphinx mapping to prevent failures +# jaraco/skeleton#51 +extensions += ['sphinx.ext.intersphinx'] +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), +} + +intersphinx_mapping.update( + importlib_resources=( + 'https://importlib-resources.readthedocs.io/en/latest/', + None, + ), +) + +# Workaround for #316 +nitpick_ignore = [ + ('py:class', 'importlib_metadata.EntryPoints'), + ('py:class', 'importlib_metadata.SelectableGroups'), + ('py:class', 'importlib_metadata._meta._T'), +] diff --git a/vendor/importlib_metadata/docs/history.rst b/vendor/importlib_metadata/docs/history.rst new file mode 100644 index 00000000..8e217503 --- /dev/null +++ b/vendor/importlib_metadata/docs/history.rst @@ -0,0 +1,8 @@ +:tocdepth: 2 + +.. _changes: + +History +******* + +.. include:: ../CHANGES (links).rst diff --git a/vendor/importlib_metadata/docs/index.rst b/vendor/importlib_metadata/docs/index.rst new file mode 100644 index 00000000..1ebbf345 --- /dev/null +++ b/vendor/importlib_metadata/docs/index.rst @@ -0,0 +1,49 @@ +Welcome to |project| documentation! +=================================== + +``importlib_metadata`` is a library which provides an API for accessing an +installed package's metadata (see :pep:`566`), such as its entry points or its top-level +name. This functionality intends to replace most uses of ``pkg_resources`` +`entry point API`_ and `metadata API`_. Along with :mod:`importlib.resources` +and newer (backported as :doc:`importlib_resources `), +this package can eliminate the need to use the older and less +efficient ``pkg_resources`` package. + +``importlib_metadata`` supplies a backport of :mod:`importlib.metadata`, +enabling early access to features of future Python versions and making +functionality available for older Python versions. Users are encouraged to +use the Python standard library where suitable and fall back to +this library for future compatibility. Developers looking for detailed API +descriptions should refer to the standard library documentation. + +The documentation here includes a general :ref:`usage ` guide. + + +.. toctree:: + :maxdepth: 1 + + using + api + migration + history + + +Project details +=============== + + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib_metadata.readthedocs.io/ + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + + +.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points +.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api diff --git a/vendor/importlib_metadata/docs/migration.rst b/vendor/importlib_metadata/docs/migration.rst new file mode 100644 index 00000000..3c700778 --- /dev/null +++ b/vendor/importlib_metadata/docs/migration.rst @@ -0,0 +1,84 @@ +.. _migration: + +================= + Migration guide +================= + +The following guide will help you migrate common ``pkg_resources`` +APIs to ``importlib_metadata``. ``importlib_metadata`` aims to +replace the following ``pkg_resources`` APIs: + +* ``pkg_resources.iter_entry_points()`` +* ``pkg_resources.require()`` +* convenience functions +* ``pkg_resources.find_distributions()`` +* ``pkg_resources.get_distribution()`` + +Other functionality from ``pkg_resources`` is replaced by other +packages such as +`importlib_resources `_ +and `packaging `_. + + +pkg_resources.iter_entry_points() +================================= + +``importlib_metadata`` provides :ref:`entry-points`. + +Compatibility note: entry points provided by importlib_metadata +do not have the following implicit behaviors found in those +from ``pkg_resources``: + +- Each EntryPoint is not automatically validated to match. To + ensure each one is validated, invoke any property on the + object (e.g. ``ep.name``). + +- When invoking ``EntryPoint.load()``, no checks are performed + to ensure the declared extras are installed. If this behavior + is desired/required, it is left to the user to perform the + check and install any dependencies. See + `importlib_metadata#368 `_ + for more details. + +pkg_resources.require() +======================= + +``importlib_metadata`` does not provide support for dynamically +discovering or requiring distributions nor does it provide any +support for managing the "working set". Furthermore, +``importlib_metadata`` assumes that only one version of a given +distribution is discoverable at any time (no support for multi-version +installs). Any projects that require the above behavior needs to +provide that behavior independently. + +``importlib_metadata`` does aim to resolve metadata concerns late +such that any dynamic changes to package availability should be +reflected immediately. + +Convenience functions +===================== + +In addition to the support for direct access to ``Distribution`` +objects (below), ``importlib_metadata`` presents some top-level +functions for easy access to the most common metadata: + +- :ref:`metadata` queries the metadata fields from the distribution. +- :ref:`version` provides quick access to the distribution version. +- :ref:`requirements` presents the requirements of the distribution. +- :ref:`files` provides file-like access to the data blobs backing + the metadata. + +pkg_resources.find_distributions() +================================== + +``importlib_metadata`` provides functionality +similar to ``find_distributions()``. Both ``distributions(...)`` and +``Distribution.discover(...)`` return an iterable of :ref:`distributions` +matching the indicated parameters. + +pkg_resources.get_distribution() +================================= + +Similar to ``distributions``, the ``distribution()`` function provides +access to a single distribution by name. + diff --git a/vendor/importlib_metadata/docs/using.rst b/vendor/importlib_metadata/docs/using.rst new file mode 100644 index 00000000..8bd92f62 --- /dev/null +++ b/vendor/importlib_metadata/docs/using.rst @@ -0,0 +1,341 @@ +.. _using: + +================================= + Using :mod:`!importlib_metadata` +================================= + +``importlib_metadata`` is a library that provides for access to installed +package metadata. Built in part on Python's import system, this library +intends to replace similar functionality in the `entry point +API`_ and `metadata API`_ of ``pkg_resources``. Along with +:mod:`importlib.resources` (with new features backported to +:doc:`importlib_resources `), +this package can eliminate the need to use the older +and less efficient +``pkg_resources`` package. + +By "installed package" we generally mean a third-party package installed into +Python's ``site-packages`` directory via tools such as `pip +`_. Specifically, +it means a package with either a discoverable ``dist-info`` or ``egg-info`` +directory, and metadata defined by :pep:`566` or its older specifications. +By default, package metadata can live on the file system or in zip archives on +:data:`sys.path`. Through an extension mechanism, the metadata can live almost +anywhere. + + +.. seealso:: + + https://importlib-metadata.readthedocs.io/ + The documentation for ``importlib_metadata``, which supplies a + backport of ``importlib.metadata``. + + +Overview +======== + +Let's say you wanted to get the version string for a package you've installed +using ``pip``. We start by creating a virtual environment and installing +something into it:: + + $ python3 -m venv example + $ source example/bin/activate + (example) $ pip install importlib_metadata + (example) $ pip install wheel + +You can get the version string for ``wheel`` by running the following:: + + (example) $ python + >>> from importlib_metadata import version + >>> version('wheel') + '0.32.3' + +You can also get a collection of entry points selectable by properties of the EntryPoint (typically 'group' or 'name'), such as +``console_scripts``, ``distutils.commands`` and others. Each group contains a +collection of :ref:`EntryPoint ` objects. + +You can get the :ref:`metadata for a distribution `:: + + >>> list(metadata('wheel')) + ['Metadata-Version', 'Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Project-URL', 'Project-URL', 'Project-URL', 'Keywords', 'Platform', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Requires-Python', 'Provides-Extra', 'Requires-Dist', 'Requires-Dist'] + +You can also get a :ref:`distribution's version number `, list its +:ref:`constituent files `, and get a list of the distribution's +:ref:`requirements`. + + +Functional API +============== + +This package provides the following functionality via its public API. + + +.. _entry-points: + +Entry points +------------ + +The ``entry_points()`` function returns a collection of entry points. +Entry points are represented by ``EntryPoint`` instances; +each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and +a ``.load()`` method to resolve the value. There are also ``.module``, +``.attr``, and ``.extras`` attributes for getting the components of the +``.value`` attribute. + +Query all entry points:: + + >>> eps = entry_points() + +The ``entry_points()`` function returns an ``EntryPoints`` object, +a collection of all ``EntryPoint`` objects with ``names`` and ``groups`` +attributes for convenience:: + + >>> sorted(eps.groups) + ['console_scripts', 'distutils.commands', 'distutils.setup_keywords', 'egg_info.writers', 'setuptools.installation'] + +``EntryPoints`` has a ``select`` method to select entry points +matching specific properties. Select entry points in the +``console_scripts`` group:: + + >>> scripts = eps.select(group='console_scripts') + +Equivalently, since ``entry_points`` passes keyword arguments +through to select:: + + >>> scripts = entry_points(group='console_scripts') + +Pick out a specific script named "wheel" (found in the wheel project):: + + >>> 'wheel' in scripts.names + True + >>> wheel = scripts['wheel'] + +Equivalently, query for that entry point during selection:: + + >>> (wheel,) = entry_points(group='console_scripts', name='wheel') + >>> (wheel,) = entry_points().select(group='console_scripts', name='wheel') + +Inspect the resolved entry point:: + + >>> wheel + EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts') + >>> wheel.module + 'wheel.cli' + >>> wheel.attr + 'main' + >>> wheel.extras + [] + >>> main = wheel.load() + >>> main + + +The ``group`` and ``name`` are arbitrary values defined by the package author +and usually a client will wish to resolve all entry points for a particular +group. Read `the setuptools docs +`_ +for more information on entry points, their definition, and usage. + +*Compatibility Note* + +The "selectable" entry points were introduced in ``importlib_metadata`` +3.6 and Python 3.10. Prior to those changes, ``entry_points`` accepted +no parameters and always returned a dictionary of entry points, keyed +by group. For compatibility, if no parameters are passed to entry_points, +a ``SelectableGroups`` object is returned, implementing that dict +interface. In the future, calling ``entry_points`` with no parameters +will return an ``EntryPoints`` object. Users should rely on the selection +interface to retrieve entry points by group. + + +.. _metadata: + +Distribution metadata +--------------------- + +Every distribution includes some metadata, which you can extract using the +``metadata()`` function:: + + >>> wheel_metadata = metadata('wheel') + +The keys of the returned data structure, a ``PackageMetadata``, +name the metadata keywords, and +the values are returned unparsed from the distribution metadata:: + + >>> wheel_metadata['Requires-Python'] + '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' + +``PackageMetadata`` also presents a ``json`` attribute that returns +all the metadata in a JSON-compatible form per PEP 566:: + + >>> wheel_metadata.json['requires_python'] + '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' + +.. note:: + + The actual type of the object returned by ``metadata()`` is an + implementation detail and should be accessed only through the interface + described by the + `PackageMetadata protocol `. + + +.. _version: + +Distribution versions +--------------------- + +The ``version()`` function is the quickest way to get a distribution's version +number, as a string:: + + >>> version('wheel') + '0.32.3' + + +.. _files: + +Distribution files +------------------ + +You can also get the full set of files contained within a distribution. The +``files()`` function takes a distribution package name and returns all of the +files installed by this distribution. Each file object returned is a +``PackagePath``, a :class:`pathlib.PurePath` derived object with additional ``dist``, +``size``, and ``hash`` properties as indicated by the metadata. For example:: + + >>> util = [p for p in files('wheel') if 'util.py' in str(p)][0] + >>> util + PackagePath('wheel/util.py') + >>> util.size + 859 + >>> util.dist + + >>> util.hash + + +Once you have the file, you can also read its contents:: + + >>> print(util.read_text()) + import base64 + import sys + ... + def as_bytes(s): + if isinstance(s, text_type): + return s.encode('utf-8') + return s + +You can also use the ``locate`` method to get a the absolute path to the +file:: + + >>> util.locate() # doctest: +SKIP + PosixPath('/home/gustav/example/lib/site-packages/wheel/util.py') + +In the case where the metadata file listing files +(RECORD or SOURCES.txt) is missing, ``files()`` will +return ``None``. The caller may wish to wrap calls to +``files()`` in `always_iterable +`_ +or otherwise guard against this condition if the target +distribution is not known to have the metadata present. + +.. _requirements: + +Distribution requirements +------------------------- + +To get the full set of requirements for a distribution, use the ``requires()`` +function:: + + >>> requires('wheel') + ["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"] + + +Package distributions +--------------------- + +A convenience method to resolve the distribution or +distributions (in the case of a namespace package) for top-level +Python packages or modules:: + + >>> packages_distributions() + {'importlib_metadata': ['importlib-metadata'], 'yaml': ['PyYAML'], 'jaraco': ['jaraco.classes', 'jaraco.functools'], ...} + +.. _distributions: + +Distributions +============= + +While the above API is the most common and convenient usage, you can get all +of that information from the ``Distribution`` class. A ``Distribution`` is an +abstract object that represents the metadata for a Python package. You can +get the ``Distribution`` instance:: + + >>> from importlib_metadata import distribution + >>> dist = distribution('wheel') + +Thus, an alternative way to get the version number is through the +``Distribution`` instance:: + + >>> dist.version + '0.32.3' + +There are all kinds of additional metadata available on the ``Distribution`` +instance:: + + >>> dist.metadata['Requires-Python'] + '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' + >>> dist.metadata['License'] + 'MIT' + +The full set of available metadata is not described here. See :pep:`566` +for additional details. + + +Distribution Discovery +====================== + +By default, this package provides built-in support for discovery of metadata for file system and zip file packages. This metadata finder search defaults to ``sys.path``, but varies slightly in how it interprets those values from how other import machinery does. In particular: + +- ``importlib_metadata`` does not honor :class:`bytes` objects on ``sys.path``. +- ``importlib_metadata`` will incidentally honor :py:class:`pathlib.Path` objects on ``sys.path`` even though such values will be ignored for imports. + + +Extending the search algorithm +============================== + +Because package metadata is not available through :data:`sys.path` searches, or +package loaders directly, the metadata for a package is found through import +system `finders`_. To find a distribution package's metadata, +``importlib.metadata`` queries the list of :term:`meta path finders ` on +:data:`sys.meta_path`. + +By default ``importlib_metadata`` installs a finder for distribution packages +found on the file system. This finder doesn't actually find any *packages*, +but it can find the packages' metadata. + +The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the +interface expected of finders by Python's import system. +``importlib_metadata`` extends this protocol by looking for an optional +``find_distributions`` callable on the finders from +:data:`sys.meta_path` and presents this extended interface as the +``DistributionFinder`` abstract base class, which defines this abstract +method:: + + @abc.abstractmethod + def find_distributions(context=DistributionFinder.Context()): + """Return an iterable of all Distribution instances capable of + loading the metadata for packages for the indicated ``context``. + """ + +The ``DistributionFinder.Context`` object provides ``.path`` and ``.name`` +properties indicating the path to search and name to match and may +supply other relevant context. + +What this means in practice is that to support finding distribution package +metadata in locations other than the file system, subclass +``Distribution`` and implement the abstract methods. Then from +a custom finder, return instances of this derived ``Distribution`` in the +``find_distributions()`` method. + + +.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points +.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api +.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders diff --git a/vendor/importlib_metadata/exercises.py b/vendor/importlib_metadata/exercises.py new file mode 100644 index 00000000..c88fa983 --- /dev/null +++ b/vendor/importlib_metadata/exercises.py @@ -0,0 +1,45 @@ +from pytest_perf.deco import extras + + +@extras('perf') +def discovery_perf(): + "discovery" + import importlib_metadata # end warmup + + importlib_metadata.distribution('ipython') + + +def entry_points_perf(): + "entry_points()" + import importlib_metadata # end warmup + + importlib_metadata.entry_points() + + +@extras('perf') +def cached_distribution_perf(): + "cached distribution" + import importlib_metadata + + importlib_metadata.distribution('ipython') # end warmup + importlib_metadata.distribution('ipython') + + +@extras('perf') +def uncached_distribution_perf(): + "uncached distribution" + import importlib + import importlib_metadata + + # end warmup + importlib.invalidate_caches() + importlib_metadata.distribution('ipython') + + +def entrypoint_regexp_perf(): + import importlib_metadata + import re + + input = '0' + ' ' * 2**10 + '0' # end warmup + + re.match(importlib_metadata.EntryPoint.pattern, input) diff --git a/vendor/importlib_metadata/importlib_metadata/__init__.py b/vendor/importlib_metadata/importlib_metadata/__init__.py index 0d8feb51..9a5d8d2f 100644 --- a/vendor/importlib_metadata/importlib_metadata/__init__.py +++ b/vendor/importlib_metadata/importlib_metadata/__init__.py @@ -1,85 +1,178 @@ -from __future__ import unicode_literals, absolute_import - -import io import os import re import abc import csv import sys import zipp +import email +import pathlib import operator +import textwrap +import warnings import functools import itertools import posixpath import collections +from . import _adapters, _meta +from ._collections import FreezableDefaultDict, Pair from ._compat import ( - install, NullFinder, - ConfigParser, - suppress, - map, - FileNotFoundError, - IsADirectoryError, - NotADirectoryError, - PermissionError, - pathlib, - ModuleNotFoundError, - MetaPathFinder, - email_message_from_string, - PyPy_repr, - unique_ordered, - str, - ) + install, + pypy_partial, +) +from ._functools import method_cache, pass_none +from ._itertools import always_iterable, unique_everseen +from ._meta import PackageMetadata, SimplePath + +from contextlib import suppress from importlib import import_module +from importlib.abc import MetaPathFinder from itertools import starmap - - -__metaclass__ = type +from typing import List, Mapping, Optional, Union __all__ = [ 'Distribution', 'DistributionFinder', + 'PackageMetadata', 'PackageNotFoundError', 'distribution', 'distributions', 'entry_points', 'files', 'metadata', + 'packages_distributions', 'requires', 'version', - ] +] class PackageNotFoundError(ModuleNotFoundError): """The package was not found.""" def __str__(self): - tmpl = "No package metadata was found for {self.name}" - return tmpl.format(**locals()) + return f"No package metadata was found for {self.name}" @property def name(self): - name, = self.args + (name,) = self.args return name -class EntryPoint( - PyPy_repr, - collections.namedtuple('EntryPointBase', 'name value group')): +class Sectioned: + """ + A simple entry point config parser for performance + + >>> for item in Sectioned.read(Sectioned._sample): + ... print(item) + Pair(name='sec1', value='# comments ignored') + Pair(name='sec1', value='a = 1') + Pair(name='sec1', value='b = 2') + Pair(name='sec2', value='a = 2') + + >>> res = Sectioned.section_pairs(Sectioned._sample) + >>> item = next(res) + >>> item.name + 'sec1' + >>> item.value + Pair(name='a', value='1') + >>> item = next(res) + >>> item.value + Pair(name='b', value='2') + >>> item = next(res) + >>> item.name + 'sec2' + >>> item.value + Pair(name='a', value='2') + >>> list(res) + [] + """ + + _sample = textwrap.dedent( + """ + [sec1] + # comments ignored + a = 1 + b = 2 + + [sec2] + a = 2 + """ + ).lstrip() + + @classmethod + def section_pairs(cls, text): + return ( + section._replace(value=Pair.parse(section.value)) + for section in cls.read(text, filter_=cls.valid) + if section.name is not None + ) + + @staticmethod + def read(text, filter_=None): + lines = filter(filter_, map(str.strip, text.splitlines())) + name = None + for value in lines: + section_match = value.startswith('[') and value.endswith(']') + if section_match: + name = value.strip('[]') + continue + yield Pair(name, value) + + @staticmethod + def valid(line): + return line and not line.startswith('#') + + +class DeprecatedTuple: + """ + Provide subscript item access for backward compatibility. + + >>> recwarn = getfixture('recwarn') + >>> ep = EntryPoint(name='name', value='value', group='group') + >>> ep[:] + ('name', 'value', 'group') + >>> ep[0] + 'name' + >>> len(recwarn) + 1 + """ + + _warn = functools.partial( + warnings.warn, + "EntryPoint tuple interface is deprecated. Access members by name.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def __getitem__(self, item): + self._warn() + return self._key()[item] + + +class EntryPoint(DeprecatedTuple): """An entry point as defined by Python packaging conventions. See `the packaging docs on entry points `_ for more information. + + >>> ep = EntryPoint( + ... name=None, group=None, value='package.module:attr [extra1, extra2]') + >>> ep.module + 'package.module' + >>> ep.attr + 'attr' + >>> ep.extras + ['extra1', 'extra2'] """ pattern = re.compile( r'(?P[\w.]+)\s*' - r'(:\s*(?P[\w.]+))?\s*' - r'(?P\[.*\])?\s*$' - ) + r'(:\s*(?P[\w.]+)\s*)?' + r'((?P\[.*\])\s*)?$' + ) """ A regular expression describing the syntax for an entry point, which might look like: @@ -96,6 +189,11 @@ class EntryPoint( following the attr, and following any extras. """ + dist: Optional['Distribution'] = None + + def __init__(self, name, value, group): + vars(self).update(name=name, value=value, group=group) + def load(self): """Load the entry point from its definition. If only a module is indicated by the value, return that module. Otherwise, @@ -119,39 +217,292 @@ def attr(self): @property def extras(self): match = self.pattern.match(self.value) - return list(re.finditer(r'\w+', match.group('extras') or '')) + return re.findall(r'\w+', match.group('extras') or '') - @classmethod - def _from_config(cls, config): - return [ - cls(name, value, group) - for group in config.sections() - for name, value in config.items(group) - ] - - @classmethod - def _from_text(cls, text): - config = ConfigParser(delimiters='=') - # case sensitive: https://stackoverflow.com/q/1611799/812183 - config.optionxform = str - try: - config.read_string(text) - except AttributeError: # pragma: nocover - # Python 2 has no read_string - config.readfp(io.StringIO(text)) - return EntryPoint._from_config(config) + def _for(self, dist): + vars(self).update(dist=dist) + return self def __iter__(self): """ - Supply iter so one may construct dicts of EntryPoints easily. + Supply iter so one may construct dicts of EntryPoints by name. """ + msg = ( + "Construction of dict of EntryPoints is deprecated in " + "favor of EntryPoints." + ) + warnings.warn(msg, DeprecationWarning) return iter((self.name, self)) - def __reduce__(self): + def matches(self, **params): + """ + EntryPoint matches the given parameters. + + >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') + >>> ep.matches(group='foo') + True + >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') + True + >>> ep.matches(group='foo', name='other') + False + >>> ep.matches() + True + >>> ep.matches(extras=['extra1', 'extra2']) + True + >>> ep.matches(module='bing') + True + >>> ep.matches(attr='bong') + True + """ + attrs = (getattr(self, param) for param in params) + return all(map(operator.eq, params.values(), attrs)) + + def _key(self): + return self.name, self.value, self.group + + def __lt__(self, other): + return self._key() < other._key() + + def __eq__(self, other): + return self._key() == other._key() + + def __setattr__(self, name, value): + raise AttributeError("EntryPoint objects are immutable.") + + def __repr__(self): return ( - self.__class__, - (self.name, self.value, self.group), + f'EntryPoint(name={self.name!r}, value={self.value!r}, ' + f'group={self.group!r})' + ) + + def __hash__(self): + return hash(self._key()) + + +class DeprecatedList(list): + """ + Allow an otherwise immutable object to implement mutability + for compatibility. + + >>> recwarn = getfixture('recwarn') + >>> dl = DeprecatedList(range(3)) + >>> dl[0] = 1 + >>> dl.append(3) + >>> del dl[3] + >>> dl.reverse() + >>> dl.sort() + >>> dl.extend([4]) + >>> dl.pop(-1) + 4 + >>> dl.remove(1) + >>> dl += [5] + >>> dl + [6] + [1, 2, 5, 6] + >>> dl + (6,) + [1, 2, 5, 6] + >>> dl.insert(0, 0) + >>> dl + [0, 1, 2, 5] + >>> dl == [0, 1, 2, 5] + True + >>> dl == (0, 1, 2, 5) + True + >>> len(recwarn) + 1 + """ + + __slots__ = () + + _warn = functools.partial( + warnings.warn, + "EntryPoints list interface is deprecated. Cast to list if needed.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def _wrap_deprecated_method(method_name: str): # type: ignore + def wrapped(self, *args, **kwargs): + self._warn() + return getattr(super(), method_name)(*args, **kwargs) + + return method_name, wrapped + + locals().update( + map( + _wrap_deprecated_method, + '__setitem__ __delitem__ append reverse extend pop remove ' + '__iadd__ insert sort'.split(), + ) + ) + + def __add__(self, other): + if not isinstance(other, tuple): + self._warn() + other = tuple(other) + return self.__class__(tuple(self) + other) + + def __eq__(self, other): + if not isinstance(other, tuple): + self._warn() + other = tuple(other) + + return tuple(self).__eq__(other) + + +class EntryPoints(DeprecatedList): + """ + An immutable collection of selectable EntryPoint objects. + """ + + __slots__ = () + + def __getitem__(self, name): # -> EntryPoint: + """ + Get the EntryPoint in self matching name. + """ + if isinstance(name, int): + warnings.warn( + "Accessing entry points by index is deprecated. " + "Cast to tuple if needed.", + DeprecationWarning, + stacklevel=2, ) + return super().__getitem__(name) + try: + return next(iter(self.select(name=name))) + except StopIteration: + raise KeyError(name) + + def select(self, **params): + """ + Select entry points from self that match the + given parameters (typically group and/or name). + """ + return EntryPoints(ep for ep in self if ep.matches(**params)) + + @property + def names(self): + """ + Return the set of all names of all entry points. + """ + return {ep.name for ep in self} + + @property + def groups(self): + """ + Return the set of all groups of all entry points. + + For coverage while SelectableGroups is present. + >>> EntryPoints().groups + set() + """ + return {ep.group for ep in self} + + @classmethod + def _from_text_for(cls, text, dist): + return cls(ep._for(dist) for ep in cls._from_text(text)) + + @staticmethod + def _from_text(text): + return ( + EntryPoint(name=item.value.name, value=item.value.value, group=item.name) + for item in Sectioned.section_pairs(text or '') + ) + + +class Deprecated: + """ + Compatibility add-in for mapping to indicate that + mapping behavior is deprecated. + + >>> recwarn = getfixture('recwarn') + >>> class DeprecatedDict(Deprecated, dict): pass + >>> dd = DeprecatedDict(foo='bar') + >>> dd.get('baz', None) + >>> dd['foo'] + 'bar' + >>> list(dd) + ['foo'] + >>> list(dd.keys()) + ['foo'] + >>> 'foo' in dd + True + >>> list(dd.values()) + ['bar'] + >>> len(recwarn) + 1 + """ + + _warn = functools.partial( + warnings.warn, + "SelectableGroups dict interface is deprecated. Use select.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def __getitem__(self, name): + self._warn() + return super().__getitem__(name) + + def get(self, name, default=None): + self._warn() + return super().get(name, default) + + def __iter__(self): + self._warn() + return super().__iter__() + + def __contains__(self, *args): + self._warn() + return super().__contains__(*args) + + def keys(self): + self._warn() + return super().keys() + + def values(self): + self._warn() + return super().values() + + +class SelectableGroups(Deprecated, dict): + """ + A backward- and forward-compatible result from + entry_points that fully implements the dict interface. + """ + + @classmethod + def load(cls, eps): + by_group = operator.attrgetter('group') + ordered = sorted(eps, key=by_group) + grouped = itertools.groupby(ordered, by_group) + return cls((group, EntryPoints(eps)) for group, eps in grouped) + + @property + def _all(self): + """ + Reconstruct a list of all entrypoints from the groups. + """ + groups = super(Deprecated, self).values() + return EntryPoints(itertools.chain.from_iterable(groups)) + + @property + def groups(self): + return self._all.groups + + @property + def names(self): + """ + for coverage: + >>> SelectableGroups().names + set() + """ + return self._all.names + + def select(self, **params): + if not params: + return self + return self._all.select(**params) class PackagePath(pathlib.PurePosixPath): @@ -175,7 +526,7 @@ def __init__(self, spec): self.mode, _, self.value = spec.partition('=') def __repr__(self): - return ''.format(self.mode, self.value) + return f'' class Distribution: @@ -197,7 +548,7 @@ def locate_file(self, path): """ @classmethod - def from_name(cls, name): + def from_name(cls, name: str): """Return the Distribution for the given package name. :param name: The name of the distribution package to search for. @@ -205,13 +556,13 @@ def from_name(cls, name): package, if found. :raises PackageNotFoundError: When the named package's distribution metadata cannot be found. + :raises ValueError: When an invalid value is supplied for name. """ - for resolver in cls._discover_resolvers(): - dists = resolver(DistributionFinder.Context(name=name)) - dist = next(iter(dists), None) - if dist is not None: - return dist - else: + if not name: + raise ValueError("A distribution name is required.") + try: + return next(cls.discover(name=name)) + except StopIteration: raise PackageNotFoundError(name) @classmethod @@ -233,7 +584,7 @@ def discover(cls, **kwargs): itertools.chain.from_iterable( resolver(context) for resolver in cls._discover_resolvers() - ) + ) ) @staticmethod @@ -249,24 +600,12 @@ def at(path): def _discover_resolvers(): """Search the meta_path for resolvers.""" declared = ( - getattr(finder, 'find_distributions', None) - for finder in sys.meta_path - ) + getattr(finder, 'find_distributions', None) for finder in sys.meta_path + ) return filter(None, declared) - @classmethod - def _local(cls, root='.'): - from pep517 import build, meta - system = build.compat_system(root) - builder = functools.partial( - meta.build, - source_dir=root, - system=system, - ) - return PathDistribution(zipp.Path(meta.build_as_zip(builder))) - @property - def metadata(self): + def metadata(self) -> _meta.PackageMetadata: """Return the parsed metadata for this Distribution. The returned object will have keys that name the various bits of @@ -279,8 +618,18 @@ def metadata(self): # effect is to just end up using the PathDistribution's self._path # (which points to the egg-info file) attribute unchanged. or self.read_text('') - ) - return email_message_from_string(text) + ) + return _adapters.Message(email.message_from_string(text)) + + @property + def name(self): + """Return the 'Name' metadata for the distribution package.""" + return self.metadata['Name'] + + @property + def _normalized_name(self): + """Return a normalized version of the name.""" + return Prepared.normalize(self.name) @property def version(self): @@ -289,7 +638,7 @@ def version(self): @property def entry_points(self): - return EntryPoint._from_text(self.read_text('entry_points.txt')) + return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) @property def files(self): @@ -302,7 +651,6 @@ def files(self): missing. Result may be empty if the metadata exists but is empty. """ - file_lines = self._read_files_distinfo() or self._read_files_egginfo() def make_file(name, hash=None, size_str=None): result = PackagePath(name) @@ -311,7 +659,11 @@ def make_file(name, hash=None, size_str=None): result.dist = self return result - return file_lines and list(starmap(make_file, csv.reader(file_lines))) + @pass_none + def make_files(lines): + return list(starmap(make_file, csv.reader(lines))) + + return make_files(self._read_files_distinfo() or self._read_files_egginfo()) def _read_files_distinfo(self): """ @@ -339,27 +691,11 @@ def _read_dist_info_reqs(self): def _read_egg_info_reqs(self): source = self.read_text('requires.txt') - return source and self._deps_from_requires_text(source) + return pass_none(self._deps_from_requires_text)(source) @classmethod def _deps_from_requires_text(cls, source): - section_pairs = cls._read_sections(source.splitlines()) - sections = { - section: list(map(operator.itemgetter('line'), results)) - for section, results in - itertools.groupby(section_pairs, operator.itemgetter('section')) - } - return cls._convert_egg_info_reqs_to_simple_reqs(sections) - - @staticmethod - def _read_sections(lines): - section = None - for line in filter(None, lines): - section_match = re.match(r'\[(.*)\]$', line) - if section_match: - section = section_match.group(1) - continue - yield locals() + return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source)) @staticmethod def _convert_egg_info_reqs_to_simple_reqs(sections): @@ -372,20 +708,29 @@ def _convert_egg_info_reqs_to_simple_reqs(sections): requirement. This method converts the former to the latter. See _test_deps_from_requires_text for an example. """ + def make_condition(name): - return name and 'extra == "{name}"'.format(name=name) + return name and f'extra == "{name}"' - def parse_condition(section): + def quoted_marker(section): section = section or '' extra, sep, markers = section.partition(':') if extra and markers: - markers = '({markers})'.format(markers=markers) + markers = f'({markers})' conditions = list(filter(None, [markers, make_condition(extra)])) return '; ' + ' and '.join(conditions) if conditions else '' - for section, deps in sections.items(): - for dep in deps: - yield dep + parse_condition(section) + def url_req_space(req): + """ + PEP 508 requires a space between the url_spec and the quoted_marker. + Ref python/importlib_metadata#357. + """ + # '@' is uniquely indicative of a url_req. + return ' ' * ('@' in req) + + for section in sections: + space = url_req_space(section.value) + yield section.value + space + quoted_marker(section.name) class DistributionFinder(MetaPathFinder): @@ -417,10 +762,11 @@ def __init__(self, **kwargs): @property def path(self): """ - The path that a distribution finder should search. + The sequence of directory path that a distribution finder + should search. - Typically refers to Python package paths and defaults - to ``sys.path``. + Typically refers to Python installed package paths such as + "site-packages" directories and defaults to ``sys.path``. """ return vars(self).get('path', sys.path) @@ -439,18 +785,24 @@ class FastPath: """ Micro-optimized class for searching a path for children. + + >>> FastPath('').children() + ['...'] """ + @functools.lru_cache() # type: ignore + def __new__(cls, root): + return super().__new__(cls) + def __init__(self, root): - self.root = str(root) - self.base = os.path.basename(self.root).lower() + self.root = root def joinpath(self, child): return pathlib.Path(self.root, child) def children(self): with suppress(Exception): - return os.listdir(self.root or '') + return os.listdir(self.root or '.') with suppress(Exception): return self.zip_children() return [] @@ -460,48 +812,90 @@ def zip_children(self): names = zip_path.root.namelist() self.joinpath = zip_path.joinpath - return unique_ordered( - child.split(posixpath.sep, 1)[0] - for child in names - ) - - def is_egg(self, search): - base = self.base - return ( - base == search.versionless_egg_name - or base.startswith(search.prefix) - and base.endswith('.egg')) + return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names) def search(self, name): - for child in self.children(): - n_low = child.lower() - if (n_low in name.exact_matches - or n_low.startswith(name.prefix) - and n_low.endswith(name.suffixes) - # legacy case: - or self.is_egg(name) and n_low == 'egg-info'): - yield self.joinpath(child) + return self.lookup(self.mtime).search(name) + + @property + def mtime(self): + with suppress(OSError): + return os.stat(self.root).st_mtime + self.lookup.cache_clear() + + @method_cache + def lookup(self, mtime): + return Lookup(self) + + +class Lookup: + def __init__(self, path: FastPath): + base = os.path.basename(path.root).lower() + base_is_egg = base.endswith(".egg") + self.infos = FreezableDefaultDict(list) + self.eggs = FreezableDefaultDict(list) + + for child in path.children(): + low = child.lower() + if low.endswith((".dist-info", ".egg-info")): + # rpartition is faster than splitext and suitable for this purpose. + name = low.rpartition(".")[0].partition("-")[0] + normalized = Prepared.normalize(name) + self.infos[normalized].append(path.joinpath(child)) + elif base_is_egg and low == "egg-info": + name = base.rpartition(".")[0].partition("-")[0] + legacy_normalized = Prepared.legacy_normalize(name) + self.eggs[legacy_normalized].append(path.joinpath(child)) + + self.infos.freeze() + self.eggs.freeze() + + def search(self, prepared): + infos = ( + self.infos[prepared.normalized] + if prepared + else itertools.chain.from_iterable(self.infos.values()) + ) + eggs = ( + self.eggs[prepared.legacy_normalized] + if prepared + else itertools.chain.from_iterable(self.eggs.values()) + ) + return itertools.chain(infos, eggs) class Prepared: """ A prepared search for metadata on a possibly-named package. """ - normalized = '' - prefix = '' - suffixes = '.dist-info', '.egg-info' - exact_matches = [''][:0] - versionless_egg_name = '' + + normalized = None + legacy_normalized = None def __init__(self, name): self.name = name if name is None: return - self.normalized = name.lower().replace('-', '_') - self.prefix = self.normalized + '-' - self.exact_matches = [ - self.normalized + suffix for suffix in self.suffixes] - self.versionless_egg_name = self.normalized + '.egg' + self.normalized = self.normalize(name) + self.legacy_normalized = self.legacy_normalize(name) + + @staticmethod + def normalize(name): + """ + PEP 503 normalization plus dashes as underscores. + """ + return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') + + @staticmethod + def legacy_normalize(name): + """ + Normalize the package name as found in the convention in + older packaging tools versions and specs. + """ + return name.lower().replace('-', '_') + + def __bool__(self): + return bool(self.name) @install @@ -527,30 +921,67 @@ def find_distributions(self, context=DistributionFinder.Context()): @classmethod def _search_paths(cls, name, paths): """Find metadata directories in paths heuristically.""" + prepared = Prepared(name) return itertools.chain.from_iterable( - path.search(Prepared(name)) - for path in map(FastPath, paths) - ) + path.search(prepared) for path in map(FastPath, paths) + ) + + def invalidate_caches(cls): + FastPath.__new__.cache_clear() class PathDistribution(Distribution): - def __init__(self, path): - """Construct a distribution from a path to the metadata directory. + def __init__(self, path: SimplePath): + """Construct a distribution. - :param path: A pathlib.Path or similar object supporting - .joinpath(), __div__, .parent, and .read_text(). + :param path: SimplePath indicating the metadata directory. """ self._path = path def read_text(self, filename): - with suppress(FileNotFoundError, IsADirectoryError, KeyError, - NotADirectoryError, PermissionError): + with suppress( + FileNotFoundError, + IsADirectoryError, + KeyError, + NotADirectoryError, + PermissionError, + ): return self._path.joinpath(filename).read_text(encoding='utf-8') + read_text.__doc__ = Distribution.read_text.__doc__ def locate_file(self, path): return self._path.parent / path + @property + def _normalized_name(self): + """ + Performance optimization: where possible, resolve the + normalized name from the file system path. + """ + stem = os.path.basename(str(self._path)) + return ( + pass_none(Prepared.normalize)(self._name_from_stem(stem)) + or super()._normalized_name + ) + + @staticmethod + def _name_from_stem(stem): + """ + >>> PathDistribution._name_from_stem('foo-3.0.egg-info') + 'foo' + >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info') + 'CherryPy' + >>> PathDistribution._name_from_stem('face.egg-info') + 'face' + >>> PathDistribution._name_from_stem('foo.bar') + """ + filename, ext = os.path.splitext(stem) + if ext not in ('.dist-info', '.egg-info'): + return + name, sep, rest = filename.partition('-') + return name + def distribution(distribution_name): """Get the ``Distribution`` instance for the named package. @@ -569,11 +1000,11 @@ def distributions(**kwargs): return Distribution.discover(**kwargs) -def metadata(distribution_name): +def metadata(distribution_name) -> _meta.PackageMetadata: """Get the metadata for the named package. :param distribution_name: The name of the distribution package to query. - :return: An email.Message containing the parsed metadata. + :return: A PackageMetadata containing the parsed metadata. """ return Distribution.from_name(distribution_name).metadata @@ -588,20 +1019,36 @@ def version(distribution_name): return distribution(distribution_name).version -def entry_points(): +_unique = functools.partial( + unique_everseen, + key=operator.attrgetter('_normalized_name'), +) +""" +Wrapper for ``distributions`` to return unique distributions by name. +""" + + +def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: """Return EntryPoint objects for all installed packages. - :return: EntryPoint objects for all installed packages. + Pass selection parameters (group or name) to filter the + result to entry points matching those properties (see + EntryPoints.select()). + + For compatibility, returns ``SelectableGroups`` object unless + selection parameters are supplied. In the future, this function + will return ``EntryPoints`` instead of ``SelectableGroups`` + even when no selection parameters are supplied. + + For maximum future compatibility, pass selection parameters + or invoke ``.select`` with parameters on the result. + + :return: EntryPoints or SelectableGroups for all installed packages. """ eps = itertools.chain.from_iterable( - dist.entry_points for dist in distributions()) - by_group = operator.attrgetter('group') - ordered = sorted(eps, key=by_group) - grouped = itertools.groupby(ordered, by_group) - return { - group: tuple(eps) - for group, eps in grouped - } + dist.entry_points for dist in _unique(distributions()) + ) + return SelectableGroups.load(eps).select(**params) def files(distribution_name): @@ -618,9 +1065,35 @@ def requires(distribution_name): Return a list of requirements for the named package. :return: An iterator of requirements, suitable for - packaging.requirement.Requirement. + packaging.requirement.Requirement. """ return distribution(distribution_name).requires -__version__ = "1.7.0" +def packages_distributions() -> Mapping[str, List[str]]: + """ + Return a mapping of top-level packages to their + distributions. + + >>> import collections.abc + >>> pkgs = packages_distributions() + >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) + True + """ + pkg_to_dist = collections.defaultdict(list) + for dist in distributions(): + for pkg in _top_level_declared(dist) or _top_level_inferred(dist): + pkg_to_dist[pkg].append(dist.metadata['Name']) + return dict(pkg_to_dist) + + +def _top_level_declared(dist): + return (dist.read_text('top_level.txt') or '').split() + + +def _top_level_inferred(dist): + return { + f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name + for f in always_iterable(dist.files) + if f.suffix == ".py" + } diff --git a/vendor/importlib_metadata/importlib_metadata/_adapters.py b/vendor/importlib_metadata/importlib_metadata/_adapters.py new file mode 100644 index 00000000..aa460d3e --- /dev/null +++ b/vendor/importlib_metadata/importlib_metadata/_adapters.py @@ -0,0 +1,68 @@ +import re +import textwrap +import email.message + +from ._text import FoldedCase + + +class Message(email.message.Message): + multiple_use_keys = set( + map( + FoldedCase, + [ + 'Classifier', + 'Obsoletes-Dist', + 'Platform', + 'Project-URL', + 'Provides-Dist', + 'Provides-Extra', + 'Requires-Dist', + 'Requires-External', + 'Supported-Platform', + 'Dynamic', + ], + ) + ) + """ + Keys that may be indicated multiple times per PEP 566. + """ + + def __new__(cls, orig: email.message.Message): + res = super().__new__(cls) + vars(res).update(vars(orig)) + return res + + def __init__(self, *args, **kwargs): + self._headers = self._repair_headers() + + # suppress spurious error from mypy + def __iter__(self): + return super().__iter__() + + def _repair_headers(self): + def redent(value): + "Correct for RFC822 indentation" + if not value or '\n' not in value: + return value + return textwrap.dedent(' ' * 8 + value) + + headers = [(key, redent(value)) for key, value in vars(self)['_headers']] + if self._payload: + headers.append(('Description', self.get_payload())) + return headers + + @property + def json(self): + """ + Convert PackageMetadata to a JSON-compatible format + per PEP 0566. + """ + + def transform(key): + value = self.get_all(key) if key in self.multiple_use_keys else self[key] + if key == 'Keywords': + value = re.split(r'\s+', value) + tk = key.lower().replace('-', '_') + return tk, value + + return dict(map(transform, map(FoldedCase, self))) diff --git a/vendor/importlib_metadata/importlib_metadata/_collections.py b/vendor/importlib_metadata/importlib_metadata/_collections.py new file mode 100644 index 00000000..cf0954e1 --- /dev/null +++ b/vendor/importlib_metadata/importlib_metadata/_collections.py @@ -0,0 +1,30 @@ +import collections + + +# from jaraco.collections 3.3 +class FreezableDefaultDict(collections.defaultdict): + """ + Often it is desirable to prevent the mutation of + a default dict after its initial construction, such + as to prevent mutation during iteration. + + >>> dd = FreezableDefaultDict(list) + >>> dd[0].append('1') + >>> dd.freeze() + >>> dd[1] + [] + >>> len(dd) + 1 + """ + + def __missing__(self, key): + return getattr(self, '_frozen', super().__missing__)(key) + + def freeze(self): + self._frozen = lambda key: self.default_factory() + + +class Pair(collections.namedtuple('Pair', 'name value')): + @classmethod + def parse(cls, text): + return cls(*map(str.strip, text.split("=", 1))) diff --git a/vendor/importlib_metadata/importlib_metadata/_compat.py b/vendor/importlib_metadata/importlib_metadata/_compat.py index 303d4a22..3d78566e 100644 --- a/vendor/importlib_metadata/importlib_metadata/_compat.py +++ b/vendor/importlib_metadata/importlib_metadata/_compat.py @@ -1,59 +1,15 @@ -from __future__ import absolute_import, unicode_literals - -import io -import abc import sys -import email - - -if sys.version_info > (3,): # pragma: nocover - import builtins - from configparser import ConfigParser - import contextlib - FileNotFoundError = builtins.FileNotFoundError - IsADirectoryError = builtins.IsADirectoryError - NotADirectoryError = builtins.NotADirectoryError - PermissionError = builtins.PermissionError - map = builtins.map - from itertools import filterfalse -else: # pragma: nocover - from backports.configparser import ConfigParser - from itertools import imap as map # type: ignore - from itertools import ifilterfalse as filterfalse - import contextlib2 as contextlib - FileNotFoundError = IOError, OSError - IsADirectoryError = IOError, OSError - NotADirectoryError = IOError, OSError - PermissionError = IOError, OSError - -str = type('') - -suppress = contextlib.suppress - -if sys.version_info > (3, 5): # pragma: nocover - import pathlib -else: # pragma: nocover - import pathlib2 as pathlib - -try: - ModuleNotFoundError = builtins.FileNotFoundError -except (NameError, AttributeError): # pragma: nocover - ModuleNotFoundError = ImportError # type: ignore +import platform -if sys.version_info >= (3,): # pragma: nocover - from importlib.abc import MetaPathFinder -else: # pragma: nocover - class MetaPathFinder(object): - __metaclass__ = abc.ABCMeta +__all__ = ['install', 'NullFinder', 'Protocol'] -__metaclass__ = type -__all__ = [ - 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError', - 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError', - 'NotADirectoryError', 'email_message_from_string', - ] +try: + from typing import Protocol +except ImportError: # pragma: no cover + # Python 3.7 compatibility + from typing_extensions import Protocol # type: ignore def install(cls): @@ -77,11 +33,12 @@ def disable_stdlib_finder(): See #91 for more background for rationale on this sketchy behavior. """ + def matches(finder): - return ( - getattr(finder, '__module__', None) == '_frozen_importlib_external' - and hasattr(finder, 'find_distributions') - ) + return getattr( + finder, '__module__', None + ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions') + for finder in filter(matches, sys.meta_path): # pragma: nocover del finder.find_distributions @@ -91,6 +48,7 @@ class NullFinder: A "Finder" (aka "MetaClassFinder") that never finds any modules, but may find distributions. """ + @staticmethod def find_spec(*args, **kwargs): return None @@ -104,49 +62,11 @@ def find_spec(*args, **kwargs): find_module = find_spec -def py2_message_from_string(text): # nocoverpy3 - # Work around https://bugs.python.org/issue25545 where - # email.message_from_string cannot handle Unicode on Python 2. - io_buffer = io.StringIO(text) - return email.message_from_file(io_buffer) - - -email_message_from_string = ( - py2_message_from_string - if sys.version_info < (3,) else - email.message_from_string - ) - - -class PyPy_repr: - """ - Override repr for EntryPoint objects on PyPy to avoid __iter__ access. - Ref #97, #102. +def pypy_partial(val): """ - affected = hasattr(sys, 'pypy_version_info') - - def __compat_repr__(self): # pragma: nocover - def make_param(name): - value = getattr(self, name) - return '{name}={value!r}'.format(**locals()) - params = ', '.join(map(make_param, self._fields)) - return 'EntryPoint({params})'.format(**locals()) - - if affected: # pragma: nocover - __repr__ = __compat_repr__ - del affected - + Adjust for variable stacklevel on partial under PyPy. -# from itertools recipes -def unique_everseen(iterable): # pragma: nocover - "List unique elements, preserving order. Remember all elements ever seen." - seen = set() - seen_add = seen.add - - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - - -unique_ordered = ( - unique_everseen if sys.version_info < (3, 7) else dict.fromkeys) + Workaround for #327. + """ + is_pypy = platform.python_implementation() == 'PyPy' + return val + is_pypy diff --git a/vendor/importlib_metadata/importlib_metadata/_functools.py b/vendor/importlib_metadata/importlib_metadata/_functools.py new file mode 100644 index 00000000..71f66bd0 --- /dev/null +++ b/vendor/importlib_metadata/importlib_metadata/_functools.py @@ -0,0 +1,104 @@ +import types +import functools + + +# from jaraco.functools 3.3 +def method_cache(method, cache_wrapper=None): + """ + Wrap lru_cache to support storing the cache data in the object instances. + + Abstracts the common paradigm where the method explicitly saves an + underscore-prefixed protected property on first call and returns that + subsequently. + + >>> class MyClass: + ... calls = 0 + ... + ... @method_cache + ... def method(self, value): + ... self.calls += 1 + ... return value + + >>> a = MyClass() + >>> a.method(3) + 3 + >>> for x in range(75): + ... res = a.method(x) + >>> a.calls + 75 + + Note that the apparent behavior will be exactly like that of lru_cache + except that the cache is stored on each instance, so values in one + instance will not flush values from another, and when an instance is + deleted, so are the cached values for that instance. + + >>> b = MyClass() + >>> for x in range(35): + ... res = b.method(x) + >>> b.calls + 35 + >>> a.method(0) + 0 + >>> a.calls + 75 + + Note that if method had been decorated with ``functools.lru_cache()``, + a.calls would have been 76 (due to the cached value of 0 having been + flushed by the 'b' instance). + + Clear the cache with ``.cache_clear()`` + + >>> a.method.cache_clear() + + Same for a method that hasn't yet been called. + + >>> c = MyClass() + >>> c.method.cache_clear() + + Another cache wrapper may be supplied: + + >>> cache = functools.lru_cache(maxsize=2) + >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) + >>> a = MyClass() + >>> a.method2() + 3 + + Caution - do not subsequently wrap the method with another decorator, such + as ``@property``, which changes the semantics of the function. + + See also + http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ + for another implementation and additional justification. + """ + cache_wrapper = cache_wrapper or functools.lru_cache() + + def wrapper(self, *args, **kwargs): + # it's the first call, replace the method with a cached, bound method + bound_method = types.MethodType(method, self) + cached_method = cache_wrapper(bound_method) + setattr(self, method.__name__, cached_method) + return cached_method(*args, **kwargs) + + # Support cache clear even before cache has been created. + wrapper.cache_clear = lambda: None + + return wrapper + + +# From jaraco.functools 3.3 +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper diff --git a/vendor/importlib_metadata/importlib_metadata/_itertools.py b/vendor/importlib_metadata/importlib_metadata/_itertools.py new file mode 100644 index 00000000..d4ca9b91 --- /dev/null +++ b/vendor/importlib_metadata/importlib_metadata/_itertools.py @@ -0,0 +1,73 @@ +from itertools import filterfalse + + +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +# copied from more_itertools 8.8 +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) diff --git a/vendor/importlib_metadata/importlib_metadata/_meta.py b/vendor/importlib_metadata/importlib_metadata/_meta.py new file mode 100644 index 00000000..37ee43e6 --- /dev/null +++ b/vendor/importlib_metadata/importlib_metadata/_meta.py @@ -0,0 +1,48 @@ +from ._compat import Protocol +from typing import Any, Dict, Iterator, List, TypeVar, Union + + +_T = TypeVar("_T") + + +class PackageMetadata(Protocol): + def __len__(self) -> int: + ... # pragma: no cover + + def __contains__(self, item: str) -> bool: + ... # pragma: no cover + + def __getitem__(self, key: str) -> str: + ... # pragma: no cover + + def __iter__(self) -> Iterator[str]: + ... # pragma: no cover + + def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]: + """ + Return all values associated with a possibly multi-valued key. + """ + + @property + def json(self) -> Dict[str, Union[str, List[str]]]: + """ + A JSON-compatible form of the metadata. + """ + + +class SimplePath(Protocol): + """ + A minimal subset of pathlib.Path required by PathDistribution. + """ + + def joinpath(self) -> 'SimplePath': + ... # pragma: no cover + + def __truediv__(self) -> 'SimplePath': + ... # pragma: no cover + + def parent(self) -> 'SimplePath': + ... # pragma: no cover + + def read_text(self) -> str: + ... # pragma: no cover diff --git a/vendor/importlib_metadata/importlib_metadata/_text.py b/vendor/importlib_metadata/importlib_metadata/_text.py new file mode 100644 index 00000000..c88cfbb2 --- /dev/null +++ b/vendor/importlib_metadata/importlib_metadata/_text.py @@ -0,0 +1,99 @@ +import re + +from ._functools import method_cache + + +# from jaraco.text 3.5 +class FoldedCase(str): + """ + A case insensitive string class; behaves just like str + except compares equal when the only variation is case. + + >>> s = FoldedCase('hello world') + + >>> s == 'Hello World' + True + + >>> 'Hello World' == s + True + + >>> s != 'Hello World' + False + + >>> s.index('O') + 4 + + >>> s.split('O') + ['hell', ' w', 'rld'] + + >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) + ['alpha', 'Beta', 'GAMMA'] + + Sequence membership is straightforward. + + >>> "Hello World" in [s] + True + >>> s in ["Hello World"] + True + + You may test for set inclusion, but candidate and elements + must both be folded. + + >>> FoldedCase("Hello World") in {s} + True + >>> s in {FoldedCase("Hello World")} + True + + String inclusion works as long as the FoldedCase object + is on the right. + + >>> "hello" in FoldedCase("Hello World") + True + + But not if the FoldedCase object is on the left: + + >>> FoldedCase('hello') in 'Hello World' + False + + In that case, use in_: + + >>> FoldedCase('hello').in_('Hello World') + True + + >>> FoldedCase('hello') > FoldedCase('Hello') + False + """ + + def __lt__(self, other): + return self.lower() < other.lower() + + def __gt__(self, other): + return self.lower() > other.lower() + + def __eq__(self, other): + return self.lower() == other.lower() + + def __ne__(self, other): + return self.lower() != other.lower() + + def __hash__(self): + return hash(self.lower()) + + def __contains__(self, other): + return super().lower().__contains__(other.lower()) + + def in_(self, other): + "Does self appear in other?" + return self in FoldedCase(other) + + # cache lower since it's likely to be called frequently. + @method_cache + def lower(self): + return super().lower() + + def index(self, sub): + return self.lower().index(sub.lower()) + + def split(self, splitter=' ', maxsplit=0): + pattern = re.compile(re.escape(splitter), re.I) + return pattern.split(self, maxsplit) diff --git a/vendor/importlib_metadata/importlib_metadata/docs/changelog.rst b/vendor/importlib_metadata/importlib_metadata/docs/changelog.rst deleted file mode 100644 index 0455e667..00000000 --- a/vendor/importlib_metadata/importlib_metadata/docs/changelog.rst +++ /dev/null @@ -1,297 +0,0 @@ -========================= - importlib_metadata NEWS -========================= - -v1.7.0 -====== - -* ``PathNotFoundError`` now has a custom ``__str__`` - mentioning "package metadata" being missing to help - guide users to the cause when the package is installed - but no metadata is present. Closes #124. - -v1.6.1 -====== - -* Added ``Distribution._local()`` as a provisional - demonstration of how to load metadata for a local - package. Implicitly requires that - `pep517 `_ is - installed. Ref #42. -* Ensure inputs to FastPath are Unicode. Closes #121. -* Tests now rely on ``importlib.resources.files`` (and - backport) instead of the older ``path`` function. -* Support any iterable from ``find_distributions``. - Closes #122. - -v1.6.0 -====== - -* Added ``module`` and ``attr`` attributes to ``EntryPoint`` - -v1.5.2 -====== - -* Fix redundant entries from ``FastPath.zip_children``. - Closes #117. - -v1.5.1 -====== - -* Improve reliability and consistency of compatibility - imports for contextlib and pathlib when running tests. - Closes #116. - -v1.5.0 -====== - -* Additional performance optimizations in FastPath now - saves an additional 20% on a typical call. -* Correct for issue where PyOxidizer finder has no - ``__module__`` attribute. Closes #110. - -v1.4.0 -====== - -* Through careful optimization, ``distribution()`` is - 3-4x faster. Thanks to Antony Lee for the - contribution. Closes #95. - -* When searching through ``sys.path``, if any error - occurs attempting to list a path entry, that entry - is skipped, making the system much more lenient - to errors. Closes #94. - -v1.3.0 -====== - -* Improve custom finders documentation. Closes #105. - -v1.2.0 -====== - -* Once again, drop support for Python 3.4. Ref #104. - -v1.1.3 -====== - -* Restored support for Python 3.4 due to improper version - compatibility declarations in the v1.1.0 and v1.1.1 - releases. Closes #104. - -v1.1.2 -====== - -* Repaired project metadata to correctly declare the - ``python_requires`` directive. Closes #103. - -v1.1.1 -====== - -* Fixed ``repr(EntryPoint)`` on PyPy 3 also. Closes #102. - -v1.1.0 -====== - -* Dropped support for Python 3.4. -* EntryPoints are now pickleable. Closes #96. -* Fixed ``repr(EntryPoint)`` on PyPy 2. Closes #97. - -v1.0.0 -====== - -* Project adopts semver for versioning. - -* Removed compatibility shim introduced in 0.23. - -* For better compatibility with the stdlib implementation and to - avoid the same distributions being discovered by the stdlib and - backport implementations, the backport now disables the - stdlib DistributionFinder during initialization (import time). - Closes #91 and closes #100. - -0.23 -==== -* Added a compatibility shim to prevent failures on beta releases - of Python before the signature changed to accept the - "context" parameter on find_distributions. This workaround - will have a limited lifespan, not to extend beyond release of - Python 3.8 final. - -0.22 -==== -* Renamed ``package`` parameter to ``distribution_name`` - as `recommended `_ - in the following functions: ``distribution``, ``metadata``, - ``version``, ``files``, and ``requires``. This - backward-incompatible change is expected to have little impact - as these functions are assumed to be primarily used with - positional parameters. - -0.21 -==== -* ``importlib.metadata`` now exposes the ``DistributionFinder`` - metaclass and references it in the docs for extending the - search algorithm. -* Add ``Distribution.at`` for constructing a Distribution object - from a known metadata directory on the file system. Closes #80. -* Distribution finders now receive a context object that - supplies ``.path`` and ``.name`` properties. This change - introduces a fundamental backward incompatibility for - any projects implementing a ``find_distributions`` method - on a ``MetaPathFinder``. This new layer of abstraction - allows this context to be supplied directly or constructed - on demand and opens the opportunity for a - ``find_distributions`` method to solicit additional - context from the caller. Closes #85. - -0.20 -==== -* Clarify in the docs that calls to ``.files`` could return - ``None`` when the metadata is not present. Closes #69. -* Return all requirements and not just the first for dist-info - packages. Closes #67. - -0.19 -==== -* Restrain over-eager egg metadata resolution. -* Add support for entry points with colons in the name. Closes #75. - -0.18 -==== -* Parse entry points case sensitively. Closes #68 -* Add a version constraint on the backport configparser package. Closes #66 - -0.17 -==== -* Fix a permission problem in the tests on Windows. - -0.16 -==== -* Don't crash if there exists an EGG-INFO directory on sys.path. - -0.15 -==== -* Fix documentation. - -0.14 -==== -* Removed ``local_distribution`` function from the API. - **This backward-incompatible change removes this - behavior summarily**. Projects should remove their - reliance on this behavior. A replacement behavior is - under review in the `pep517 project - `_. Closes #42. - -0.13 -==== -* Update docstrings to match PEP 8. Closes #63. -* Merged modules into one module. Closes #62. - -0.12 -==== -* Add support for eggs. !65; Closes #19. - -0.11 -==== -* Support generic zip files (not just wheels). Closes #59 -* Support zip files with multiple distributions in them. Closes #60 -* Fully expose the public API in ``importlib_metadata.__all__``. - -0.10 -==== -* The ``Distribution`` ABC is now officially part of the public API. - Closes #37. -* Fixed support for older single file egg-info formats. Closes #43. -* Fixed a testing bug when ``$CWD`` has spaces in the path. Closes #50. -* Add Python 3.8 to the ``tox`` testing matrix. - -0.9 -=== -* Fixed issue where entry points without an attribute would raise an - Exception. Closes #40. -* Removed unused ``name`` parameter from ``entry_points()``. Closes #44. -* ``DistributionFinder`` classes must now be instantiated before - being placed on ``sys.meta_path``. - -0.8 -=== -* This library can now discover/enumerate all installed packages. **This - backward-incompatible change alters the protocol finders must - implement to support distribution package discovery.** Closes #24. -* The signature of ``find_distributions()`` on custom installer finders - should now accept two parameters, ``name`` and ``path`` and - these parameters must supply defaults. -* The ``entry_points()`` method no longer accepts a package name - but instead returns all entry points in a dictionary keyed by the - ``EntryPoint.group``. The ``resolve`` method has been removed. Instead, - call ``EntryPoint.load()``, which has the same semantics as - ``pkg_resources`` and ``entrypoints``. **This is a backward incompatible - change.** -* Metadata is now always returned as Unicode text regardless of - Python version. Closes #29. -* This library can now discover metadata for a 'local' package (found - in the current-working directory). Closes #27. -* Added ``files()`` function for resolving files from a distribution. -* Added a new ``requires()`` function, which returns the requirements - for a package suitable for parsing by - ``packaging.requirements.Requirement``. Closes #18. -* The top-level ``read_text()`` function has been removed. Use - ``PackagePath.read_text()`` on instances returned by the ``files()`` - function. **This is a backward incompatible change.** -* Release dates are now automatically injected into the changelog - based on SCM tags. - -0.7 -=== -* Fixed issue where packages with dashes in their names would - not be discovered. Closes #21. -* Distribution lookup is now case-insensitive. Closes #20. -* Wheel distributions can no longer be discovered by their module - name. Like Path distributions, they must be indicated by their - distribution package name. - -0.6 -=== -* Removed ``importlib_metadata.distribution`` function. Now - the public interface is primarily the utility functions exposed - in ``importlib_metadata.__all__``. Closes #14. -* Added two new utility functions ``read_text`` and - ``metadata``. - -0.5 -=== -* Updated README and removed details about Distribution - class, now considered private. Closes #15. -* Added test suite support for Python 3.4+. -* Fixed SyntaxErrors on Python 3.4 and 3.5. !12 -* Fixed errors on Windows joining Path elements. !15 - -0.4 -=== -* Housekeeping. - -0.3 -=== -* Added usage documentation. Closes #8 -* Add support for getting metadata from wheels on ``sys.path``. Closes #9 - -0.2 -=== -* Added ``importlib_metadata.entry_points()``. Closes #1 -* Added ``importlib_metadata.resolve()``. Closes #12 -* Add support for Python 2.7. Closes #4 - -0.1 -=== -* Initial release. - - -.. - Local Variables: - mode: change-log-mode - indent-tabs-mode: nil - sentence-end-double-space: t - fill-column: 78 - coding: utf-8 - End: diff --git a/vendor/importlib_metadata/importlib_metadata/docs/conf.py b/vendor/importlib_metadata/importlib_metadata/docs/conf.py deleted file mode 100644 index 129a7a4e..00000000 --- a/vendor/importlib_metadata/importlib_metadata/docs/conf.py +++ /dev/null @@ -1,185 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# -# importlib_metadata documentation build configuration file, created by -# sphinx-quickstart on Thu Nov 30 10:21:00 2017. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'rst.linker', - 'sphinx.ext.autodoc', - 'sphinx.ext.coverage', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.viewcode', - ] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = 'importlib_metadata' -copyright = '2017-2019, Jason R. Coombs, Barry Warsaw' -author = 'Jason R. Coombs, Barry Warsaw' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '0.1' -# The full version, including alpha/beta/rc tags. -release = '0.1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'default' - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - '**': [ - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', - ] - } - - -# -- Options for HTMLHelp output ------------------------------------------ - -# Output file base name for HTML help builder. -htmlhelp_basename = 'importlib_metadatadoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', - } - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'importlib_metadata.tex', - 'importlib\\_metadata Documentation', - 'Brett Cannon, Barry Warsaw', 'manual'), - ] - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'importlib_metadata', 'importlib_metadata Documentation', - [author], 1) - ] - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'importlib_metadata', 'importlib_metadata Documentation', - author, 'importlib_metadata', 'One line description of project.', - 'Miscellaneous'), - ] - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - 'python': ('https://docs.python.org/3', None), - 'importlib_resources': ( - 'https://importlib-resources.readthedocs.io/en/latest/', None - ), - } - - -# For rst.linker, inject release dates into changelog.rst -link_files = { - 'changelog.rst': dict( - replace=[ - dict( - pattern=r'^(?m)((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n', - with_scm='{text}\n{rev[timestamp]:%Y-%m-%d}\n\n', - ), - ], - ), - } diff --git a/vendor/importlib_metadata/importlib_metadata/docs/index.rst b/vendor/importlib_metadata/importlib_metadata/docs/index.rst deleted file mode 100644 index 530197cf..00000000 --- a/vendor/importlib_metadata/importlib_metadata/docs/index.rst +++ /dev/null @@ -1,50 +0,0 @@ -=============================== - Welcome to importlib_metadata -=============================== - -``importlib_metadata`` is a library which provides an API for accessing an -installed package's metadata (see :pep:`566`), such as its entry points or its top-level -name. This functionality intends to replace most uses of ``pkg_resources`` -`entry point API`_ and `metadata API`_. Along with :mod:`importlib.resources` in -Python 3.7 and newer (backported as :doc:`importlib_resources ` for older -versions of Python), this can eliminate the need to use the older and less -efficient ``pkg_resources`` package. - -``importlib_metadata`` is a backport of Python 3.8's standard library -:doc:`importlib.metadata ` module for Python 2.7, and 3.4 through 3.7. Users of -Python 3.8 and beyond are encouraged to use the standard library module. -When imported on Python 3.8 and later, ``importlib_metadata`` replaces the -DistributionFinder behavior from the stdlib, but leaves the API in tact. -Developers looking for detailed API descriptions should refer to the Python -3.8 standard library documentation. - -The documentation here includes a general :ref:`usage ` guide. - - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - - using.rst - changelog (links).rst - - -Project details -=============== - - * Project home: https://gitlab.com/python-devs/importlib_metadata - * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues - * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git - * Documentation: http://importlib_metadata.readthedocs.io/ - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - - -.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points -.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api diff --git a/vendor/importlib_metadata/importlib_metadata/docs/using.rst b/vendor/importlib_metadata/importlib_metadata/docs/using.rst deleted file mode 100644 index 11965147..00000000 --- a/vendor/importlib_metadata/importlib_metadata/docs/using.rst +++ /dev/null @@ -1,260 +0,0 @@ -.. _using: - -================================= - Using :mod:`!importlib_metadata` -================================= - -``importlib_metadata`` is a library that provides for access to installed -package metadata. Built in part on Python's import system, this library -intends to replace similar functionality in the `entry point -API`_ and `metadata API`_ of ``pkg_resources``. Along with -:mod:`importlib.resources` in Python 3.7 -and newer (backported as :doc:`importlib_resources ` for older versions of -Python), this can eliminate the need to use the older and less efficient -``pkg_resources`` package. - -By "installed package" we generally mean a third-party package installed into -Python's ``site-packages`` directory via tools such as `pip -`_. Specifically, -it means a package with either a discoverable ``dist-info`` or ``egg-info`` -directory, and metadata defined by :pep:`566` or its older specifications. -By default, package metadata can live on the file system or in zip archives on -:data:`sys.path`. Through an extension mechanism, the metadata can live almost -anywhere. - - -Overview -======== - -Let's say you wanted to get the version string for a package you've installed -using ``pip``. We start by creating a virtual environment and installing -something into it:: - - $ python3 -m venv example - $ source example/bin/activate - (example) $ pip install importlib_metadata - (example) $ pip install wheel - -You can get the version string for ``wheel`` by running the following:: - - (example) $ python - >>> from importlib_metadata import version - >>> version('wheel') - '0.32.3' - -You can also get the set of entry points keyed by group, such as -``console_scripts``, ``distutils.commands`` and others. Each group contains a -sequence of :ref:`EntryPoint ` objects. - -You can get the :ref:`metadata for a distribution `:: - - >>> list(metadata('wheel')) - ['Metadata-Version', 'Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Project-URL', 'Project-URL', 'Project-URL', 'Keywords', 'Platform', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Requires-Python', 'Provides-Extra', 'Requires-Dist', 'Requires-Dist'] - -You can also get a :ref:`distribution's version number `, list its -:ref:`constituent files `, and get a list of the distribution's -:ref:`requirements`. - - -Functional API -============== - -This package provides the following functionality via its public API. - - -.. _entry-points: - -Entry points ------------- - -The ``entry_points()`` function returns a dictionary of all entry points, -keyed by group. Entry points are represented by ``EntryPoint`` instances; -each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and -a ``.load()`` method to resolve the value. There are also ``.module``, -``.attr``, and ``.extras`` attributes for getting the components of the -``.value`` attribute:: - - >>> eps = entry_points() - >>> list(eps) - ['console_scripts', 'distutils.commands', 'distutils.setup_keywords', 'egg_info.writers', 'setuptools.installation'] - >>> scripts = eps['console_scripts'] - >>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0] - >>> wheel - EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts') - >>> wheel.module - 'wheel.cli' - >>> wheel.attr - 'main' - >>> wheel.extras - [] - >>> main = wheel.load() - >>> main - - -The ``group`` and ``name`` are arbitrary values defined by the package author -and usually a client will wish to resolve all entry points for a particular -group. Read `the setuptools docs -`_ -for more information on entry points, their definition, and usage. - - -.. _metadata: - -Distribution metadata ---------------------- - -Every distribution includes some metadata, which you can extract using the -``metadata()`` function:: - - >>> wheel_metadata = metadata('wheel') - -The keys of the returned data structure [#f1]_ name the metadata keywords, and -their values are returned unparsed from the distribution metadata:: - - >>> wheel_metadata['Requires-Python'] - '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' - - -.. _version: - -Distribution versions ---------------------- - -The ``version()`` function is the quickest way to get a distribution's version -number, as a string:: - - >>> version('wheel') - '0.32.3' - - -.. _files: - -Distribution files ------------------- - -You can also get the full set of files contained within a distribution. The -``files()`` function takes a distribution package name and returns all of the -files installed by this distribution. Each file object returned is a -``PackagePath``, a :class:`pathlib.Path` derived object with additional ``dist``, -``size``, and ``hash`` properties as indicated by the metadata. For example:: - - >>> util = [p for p in files('wheel') if 'util.py' in str(p)][0] - >>> util - PackagePath('wheel/util.py') - >>> util.size - 859 - >>> util.dist - - >>> util.hash - - -Once you have the file, you can also read its contents:: - - >>> print(util.read_text()) - import base64 - import sys - ... - def as_bytes(s): - if isinstance(s, text_type): - return s.encode('utf-8') - return s - -In the case where the metadata file listing files -(RECORD or SOURCES.txt) is missing, ``files()`` will -return ``None``. The caller may wish to wrap calls to -``files()`` in `always_iterable -`_ -or otherwise guard against this condition if the target -distribution is not known to have the metadata present. - -.. _requirements: - -Distribution requirements -------------------------- - -To get the full set of requirements for a distribution, use the ``requires()`` -function:: - - >>> requires('wheel') - ["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"] - - -Distributions -============= - -While the above API is the most common and convenient usage, you can get all -of that information from the ``Distribution`` class. A ``Distribution`` is an -abstract object that represents the metadata for a Python package. You can -get the ``Distribution`` instance:: - - >>> from importlib_metadata import distribution - >>> dist = distribution('wheel') - -Thus, an alternative way to get the version number is through the -``Distribution`` instance:: - - >>> dist.version - '0.32.3' - -There are all kinds of additional metadata available on the ``Distribution`` -instance:: - - >>> d.metadata['Requires-Python'] - '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' - >>> d.metadata['License'] - 'MIT' - -The full set of available metadata is not described here. See :pep:`566` -for additional details. - - -Extending the search algorithm -============================== - -Because package metadata is not available through :data:`sys.path` searches, or -package loaders directly, the metadata for a package is found through import -system `finders`_. To find a distribution package's metadata, -``importlib.metadata`` queries the list of :term:`meta path finders ` on -:data:`sys.meta_path`. - -By default ``importlib_metadata`` installs a finder for distribution packages -found on the file system. This finder doesn't actually find any *packages*, -but it can find the packages' metadata. - -The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the -interface expected of finders by Python's import system. -``importlib_metadata`` extends this protocol by looking for an optional -``find_distributions`` callable on the finders from -:data:`sys.meta_path` and presents this extended interface as the -``DistributionFinder`` abstract base class, which defines this abstract -method:: - - @abc.abstractmethod - def find_distributions(context=DistributionFinder.Context()): - """Return an iterable of all Distribution instances capable of - loading the metadata for packages for the indicated ``context``. - """ - -The ``DistributionFinder.Context`` object provides ``.path`` and ``.name`` -properties indicating the path to search and name to match and may -supply other relevant context. - -What this means in practice is that to support finding distribution package -metadata in locations other than the file system, subclass -``Distribution`` and implement the abstract methods. Then from -a custom finder, return instances of this derived ``Distribution`` in the -``find_distributions()`` method. - - -.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points -.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api -.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders - - -.. rubric:: Footnotes - -.. [#f1] Technically, the returned distribution metadata object is an - :class:`email.message.EmailMessage` - instance, but this is an implementation detail, and not part of the - stable API. You should only use dictionary-like methods and syntax - to access the metadata contents. diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/py.typed b/vendor/importlib_metadata/importlib_metadata/py.typed similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/attr/py.typed rename to vendor/importlib_metadata/importlib_metadata/py.typed diff --git a/vendor/importlib_metadata/importlib_metadata/tests/fixtures.py b/vendor/importlib_metadata/importlib_metadata/tests/fixtures.py deleted file mode 100644 index 20982fa1..00000000 --- a/vendor/importlib_metadata/importlib_metadata/tests/fixtures.py +++ /dev/null @@ -1,232 +0,0 @@ -from __future__ import unicode_literals - -import os -import sys -import shutil -import tempfile -import textwrap -import test.support - -from .._compat import pathlib, contextlib - - -__metaclass__ = type - - -@contextlib.contextmanager -def tempdir(): - tmpdir = tempfile.mkdtemp() - try: - yield pathlib.Path(tmpdir) - finally: - shutil.rmtree(tmpdir) - - -@contextlib.contextmanager -def save_cwd(): - orig = os.getcwd() - try: - yield - finally: - os.chdir(orig) - - -@contextlib.contextmanager -def tempdir_as_cwd(): - with tempdir() as tmp: - with save_cwd(): - os.chdir(str(tmp)) - yield tmp - - -@contextlib.contextmanager -def install_finder(finder): - sys.meta_path.append(finder) - try: - yield - finally: - sys.meta_path.remove(finder) - - -class Fixtures: - def setUp(self): - self.fixtures = contextlib.ExitStack() - self.addCleanup(self.fixtures.close) - - -class SiteDir(Fixtures): - def setUp(self): - super(SiteDir, self).setUp() - self.site_dir = self.fixtures.enter_context(tempdir()) - - -class OnSysPath(Fixtures): - @staticmethod - @contextlib.contextmanager - def add_sys_path(dir): - sys.path[:0] = [str(dir)] - try: - yield - finally: - sys.path.remove(str(dir)) - - def setUp(self): - super(OnSysPath, self).setUp() - self.fixtures.enter_context(self.add_sys_path(self.site_dir)) - - -class DistInfoPkg(OnSysPath, SiteDir): - files = { - "distinfo_pkg-1.0.0.dist-info": { - "METADATA": """ - Name: distinfo-pkg - Author: Steven Ma - Version: 1.0.0 - Requires-Dist: wheel >= 1.0 - Requires-Dist: pytest; extra == 'test' - """, - "RECORD": "mod.py,sha256=abc,20\n", - "entry_points.txt": """ - [entries] - main = mod:main - ns:sub = mod:main - """ - }, - "mod.py": """ - def main(): - print("hello world") - """, - } - - def setUp(self): - super(DistInfoPkg, self).setUp() - build_files(DistInfoPkg.files, self.site_dir) - - -class DistInfoPkgOffPath(SiteDir): - def setUp(self): - super(DistInfoPkgOffPath, self).setUp() - build_files(DistInfoPkg.files, self.site_dir) - - -class EggInfoPkg(OnSysPath, SiteDir): - files = { - "egginfo_pkg.egg-info": { - "PKG-INFO": """ - Name: egginfo-pkg - Author: Steven Ma - License: Unknown - Version: 1.0.0 - Classifier: Intended Audience :: Developers - Classifier: Topic :: Software Development :: Libraries - """, - "SOURCES.txt": """ - mod.py - egginfo_pkg.egg-info/top_level.txt - """, - "entry_points.txt": """ - [entries] - main = mod:main - """, - "requires.txt": """ - wheel >= 1.0; python_version >= "2.7" - [test] - pytest - """, - "top_level.txt": "mod\n" - }, - "mod.py": """ - def main(): - print("hello world") - """, - } - - def setUp(self): - super(EggInfoPkg, self).setUp() - build_files(EggInfoPkg.files, prefix=self.site_dir) - - -class EggInfoFile(OnSysPath, SiteDir): - files = { - "egginfo_file.egg-info": """ - Metadata-Version: 1.0 - Name: egginfo_file - Version: 0.1 - Summary: An example package - Home-page: www.example.com - Author: Eric Haffa-Vee - Author-email: eric@example.coms - License: UNKNOWN - Description: UNKNOWN - Platform: UNKNOWN - """, - } - - def setUp(self): - super(EggInfoFile, self).setUp() - build_files(EggInfoFile.files, prefix=self.site_dir) - - -class LocalPackage: - files = { - "setup.py": """ - import setuptools - setuptools.setup(name="local-pkg", version="2.0.1") - """, - } - - def setUp(self): - self.fixtures = contextlib.ExitStack() - self.addCleanup(self.fixtures.close) - self.fixtures.enter_context(tempdir_as_cwd()) - build_files(self.files) - - -def build_files(file_defs, prefix=pathlib.Path()): - """Build a set of files/directories, as described by the - - file_defs dictionary. Each key/value pair in the dictionary is - interpreted as a filename/contents pair. If the contents value is a - dictionary, a directory is created, and the dictionary interpreted - as the files within it, recursively. - - For example: - - {"README.txt": "A README file", - "foo": { - "__init__.py": "", - "bar": { - "__init__.py": "", - }, - "baz.py": "# Some code", - } - } - """ - for name, contents in file_defs.items(): - full_name = prefix / name - if isinstance(contents, dict): - full_name.mkdir() - build_files(contents, prefix=full_name) - else: - if isinstance(contents, bytes): - with full_name.open('wb') as f: - f.write(contents) - else: - with full_name.open('w') as f: - f.write(DALS(contents)) - - -class FileBuilder: - def unicode_filename(self): - return test.support.FS_NONASCII or \ - self.skip("File system does not support non-ascii.") - - -def DALS(str): - "Dedent and left-strip" - return textwrap.dedent(str).lstrip() - - -class NullFinder: - def find_module(self, name): - pass diff --git a/vendor/importlib_metadata/importlib_metadata/tests/test_api.py b/vendor/importlib_metadata/importlib_metadata/tests/test_api.py deleted file mode 100644 index aa346ddb..00000000 --- a/vendor/importlib_metadata/importlib_metadata/tests/test_api.py +++ /dev/null @@ -1,176 +0,0 @@ -import re -import textwrap -import unittest - -from . import fixtures -from .. import ( - Distribution, PackageNotFoundError, __version__, distribution, - entry_points, files, metadata, requires, version, - ) - -try: - from collections.abc import Iterator -except ImportError: - from collections import Iterator # noqa: F401 - -try: - from builtins import str as text -except ImportError: - from __builtin__ import unicode as text - - -class APITests( - fixtures.EggInfoPkg, - fixtures.DistInfoPkg, - fixtures.EggInfoFile, - unittest.TestCase): - - version_pattern = r'\d+\.\d+(\.\d)?' - - def test_retrieves_version_of_self(self): - pkg_version = version('egginfo-pkg') - assert isinstance(pkg_version, text) - assert re.match(self.version_pattern, pkg_version) - - def test_retrieves_version_of_distinfo_pkg(self): - pkg_version = version('distinfo-pkg') - assert isinstance(pkg_version, text) - assert re.match(self.version_pattern, pkg_version) - - def test_for_name_does_not_exist(self): - with self.assertRaises(PackageNotFoundError): - distribution('does-not-exist') - - def test_for_top_level(self): - self.assertEqual( - distribution('egginfo-pkg').read_text('top_level.txt').strip(), - 'mod') - - def test_read_text(self): - top_level = [ - path for path in files('egginfo-pkg') - if path.name == 'top_level.txt' - ][0] - self.assertEqual(top_level.read_text(), 'mod\n') - - def test_entry_points(self): - entries = dict(entry_points()['entries']) - ep = entries['main'] - self.assertEqual(ep.value, 'mod:main') - self.assertEqual(ep.extras, []) - - def test_metadata_for_this_package(self): - md = metadata('egginfo-pkg') - assert md['author'] == 'Steven Ma' - assert md['LICENSE'] == 'Unknown' - assert md['Name'] == 'egginfo-pkg' - classifiers = md.get_all('Classifier') - assert 'Topic :: Software Development :: Libraries' in classifiers - - def test_importlib_metadata_version(self): - assert re.match(self.version_pattern, __version__) - - @staticmethod - def _test_files(files): - root = files[0].root - for file in files: - assert file.root == root - assert not file.hash or file.hash.value - assert not file.hash or file.hash.mode == 'sha256' - assert not file.size or file.size >= 0 - assert file.locate().exists() - assert isinstance(file.read_binary(), bytes) - if file.name.endswith('.py'): - file.read_text() - - def test_file_hash_repr(self): - try: - assertRegex = self.assertRegex - except AttributeError: - # Python 2 - assertRegex = self.assertRegexpMatches - - util = [ - p for p in files('distinfo-pkg') - if p.name == 'mod.py' - ][0] - assertRegex( - repr(util.hash), - '') - - def test_files_dist_info(self): - self._test_files(files('distinfo-pkg')) - - def test_files_egg_info(self): - self._test_files(files('egginfo-pkg')) - - def test_version_egg_info_file(self): - self.assertEqual(version('egginfo-file'), '0.1') - - def test_requires_egg_info_file(self): - requirements = requires('egginfo-file') - self.assertIsNone(requirements) - - def test_requires_egg_info(self): - deps = requires('egginfo-pkg') - assert len(deps) == 2 - assert any( - dep == 'wheel >= 1.0; python_version >= "2.7"' - for dep in deps - ) - - def test_requires_dist_info(self): - deps = requires('distinfo-pkg') - assert len(deps) == 2 - assert all(deps) - assert 'wheel >= 1.0' in deps - assert "pytest; extra == 'test'" in deps - - def test_more_complex_deps_requires_text(self): - requires = textwrap.dedent(""" - dep1 - dep2 - - [:python_version < "3"] - dep3 - - [extra1] - dep4 - - [extra2:python_version < "3"] - dep5 - """) - deps = sorted(Distribution._deps_from_requires_text(requires)) - expected = [ - 'dep1', - 'dep2', - 'dep3; python_version < "3"', - 'dep4; extra == "extra1"', - 'dep5; (python_version < "3") and extra == "extra2"', - ] - # It's important that the environment marker expression be - # wrapped in parentheses to avoid the following 'and' binding more - # tightly than some other part of the environment expression. - - assert deps == expected - - -class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase): - def test_find_distributions_specified_path(self): - dists = Distribution.discover(path=[str(self.site_dir)]) - assert any( - dist.metadata['Name'] == 'distinfo-pkg' - for dist in dists - ) - - def test_distribution_at_pathlib(self): - """Demonstrate how to load metadata direct from a directory. - """ - dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' - dist = Distribution.at(dist_info_path) - assert dist.version == '1.0.0' - - def test_distribution_at_str(self): - dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' - dist = Distribution.at(str(dist_info_path)) - assert dist.version == '1.0.0' diff --git a/vendor/importlib_metadata/importlib_metadata/tests/test_integration.py b/vendor/importlib_metadata/importlib_metadata/tests/test_integration.py deleted file mode 100644 index cbb940bd..00000000 --- a/vendor/importlib_metadata/importlib_metadata/tests/test_integration.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding: utf-8 - -from __future__ import unicode_literals - -import unittest -import packaging.requirements -import packaging.version - -from . import fixtures -from .. import ( - Distribution, - _compat, - version, - ) - - -class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase): - - def test_package_spec_installed(self): - """ - Illustrate the recommended procedure to determine if - a specified version of a package is installed. - """ - def is_installed(package_spec): - req = packaging.requirements.Requirement(package_spec) - return version(req.name) in req.specifier - - assert is_installed('distinfo-pkg==1.0') - assert is_installed('distinfo-pkg>=1.0,<2.0') - assert not is_installed('distinfo-pkg<1.0') - - -class FinderTests(fixtures.Fixtures, unittest.TestCase): - - def test_finder_without_module(self): - class ModuleFreeFinder(fixtures.NullFinder): - """ - A finder without an __module__ attribute - """ - def __getattribute__(self, name): - if name == '__module__': - raise AttributeError(name) - return super().__getattribute__(name) - - self.fixtures.enter_context( - fixtures.install_finder(ModuleFreeFinder())) - _compat.disable_stdlib_finder() - - -class LocalProjectTests(fixtures.LocalPackage, unittest.TestCase): - def test_find_local(self): - dist = Distribution._local() - assert dist.metadata['Name'] == 'local-pkg' - assert dist.version == '2.0.1' diff --git a/vendor/importlib_metadata/importlib_metadata/tests/test_main.py b/vendor/importlib_metadata/importlib_metadata/tests/test_main.py deleted file mode 100644 index 4ffdd5d6..00000000 --- a/vendor/importlib_metadata/importlib_metadata/tests/test_main.py +++ /dev/null @@ -1,285 +0,0 @@ -# coding: utf-8 -from __future__ import unicode_literals - -import re -import json -import pickle -import textwrap -import unittest -import importlib -import importlib_metadata -import pyfakefs.fake_filesystem_unittest as ffs - -from . import fixtures -from .. import ( - Distribution, EntryPoint, MetadataPathFinder, - PackageNotFoundError, distributions, - entry_points, metadata, version, - ) - -try: - from builtins import str as text -except ImportError: - from __builtin__ import unicode as text - - -class BasicTests(fixtures.DistInfoPkg, unittest.TestCase): - version_pattern = r'\d+\.\d+(\.\d)?' - - def test_retrieves_version_of_self(self): - dist = Distribution.from_name('distinfo-pkg') - assert isinstance(dist.version, text) - assert re.match(self.version_pattern, dist.version) - - def test_for_name_does_not_exist(self): - with self.assertRaises(PackageNotFoundError): - Distribution.from_name('does-not-exist') - - def test_package_not_found_mentions_metadata(self): - """ - When a package is not found, that could indicate that the - packgae is not installed or that it is installed without - metadata. Ensure the exception mentions metadata to help - guide users toward the cause. See #124. - """ - with self.assertRaises(PackageNotFoundError) as ctx: - Distribution.from_name('does-not-exist') - - assert "metadata" in str(ctx.exception) - - def test_new_style_classes(self): - self.assertIsInstance(Distribution, type) - self.assertIsInstance(MetadataPathFinder, type) - - -class ImportTests(fixtures.DistInfoPkg, unittest.TestCase): - def test_import_nonexistent_module(self): - # Ensure that the MetadataPathFinder does not crash an import of a - # non-existent module. - with self.assertRaises(ImportError): - importlib.import_module('does_not_exist') - - def test_resolve(self): - entries = dict(entry_points()['entries']) - ep = entries['main'] - self.assertEqual(ep.load().__name__, "main") - - def test_entrypoint_with_colon_in_name(self): - entries = dict(entry_points()['entries']) - ep = entries['ns:sub'] - self.assertEqual(ep.value, 'mod:main') - - def test_resolve_without_attr(self): - ep = EntryPoint( - name='ep', - value='importlib_metadata', - group='grp', - ) - assert ep.load() is importlib_metadata - - -class NameNormalizationTests( - fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): - @staticmethod - def pkg_with_dashes(site_dir): - """ - Create minimal metadata for a package with dashes - in the name (and thus underscores in the filename). - """ - metadata_dir = site_dir / 'my_pkg.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w') as strm: - strm.write('Version: 1.0\n') - return 'my-pkg' - - def test_dashes_in_dist_name_found_as_underscores(self): - """ - For a package with a dash in the name, the dist-info metadata - uses underscores in the name. Ensure the metadata loads. - """ - pkg_name = self.pkg_with_dashes(self.site_dir) - assert version(pkg_name) == '1.0' - - @staticmethod - def pkg_with_mixed_case(site_dir): - """ - Create minimal metadata for a package with mixed case - in the name. - """ - metadata_dir = site_dir / 'CherryPy.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w') as strm: - strm.write('Version: 1.0\n') - return 'CherryPy' - - def test_dist_name_found_as_any_case(self): - """ - Ensure the metadata loads when queried with any case. - """ - pkg_name = self.pkg_with_mixed_case(self.site_dir) - assert version(pkg_name) == '1.0' - assert version(pkg_name.lower()) == '1.0' - assert version(pkg_name.upper()) == '1.0' - - -class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): - @staticmethod - def pkg_with_non_ascii_description(site_dir): - """ - Create minimal metadata for a package with non-ASCII in - the description. - """ - metadata_dir = site_dir / 'portend.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w', encoding='utf-8') as fp: - fp.write('Description: pôrˈtend\n') - return 'portend' - - @staticmethod - def pkg_with_non_ascii_description_egg_info(site_dir): - """ - Create minimal metadata for an egg-info package with - non-ASCII in the description. - """ - metadata_dir = site_dir / 'portend.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w', encoding='utf-8') as fp: - fp.write(textwrap.dedent(""" - Name: portend - - pôrˈtend - """).lstrip()) - return 'portend' - - def test_metadata_loads(self): - pkg_name = self.pkg_with_non_ascii_description(self.site_dir) - meta = metadata(pkg_name) - assert meta['Description'] == 'pôrˈtend' - - def test_metadata_loads_egg_info(self): - pkg_name = self.pkg_with_non_ascii_description_egg_info(self.site_dir) - meta = metadata(pkg_name) - assert meta.get_payload() == 'pôrˈtend\n' - - -class DiscoveryTests(fixtures.EggInfoPkg, - fixtures.DistInfoPkg, - unittest.TestCase): - - def test_package_discovery(self): - dists = list(distributions()) - assert all( - isinstance(dist, Distribution) - for dist in dists - ) - assert any( - dist.metadata['Name'] == 'egginfo-pkg' - for dist in dists - ) - assert any( - dist.metadata['Name'] == 'distinfo-pkg' - for dist in dists - ) - - def test_invalid_usage(self): - with self.assertRaises(ValueError): - list(distributions(context='something', name='else')) - - -class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): - def test_egg_info(self): - # make an `EGG-INFO` directory that's unrelated - self.site_dir.joinpath('EGG-INFO').mkdir() - # used to crash with `IsADirectoryError` - with self.assertRaises(PackageNotFoundError): - version('unknown-package') - - def test_egg(self): - egg = self.site_dir.joinpath('foo-3.6.egg') - egg.mkdir() - with self.add_sys_path(egg): - with self.assertRaises(PackageNotFoundError): - version('foo') - - -class MissingSysPath(fixtures.OnSysPath, unittest.TestCase): - site_dir = '/does-not-exist' - - def test_discovery(self): - """ - Discovering distributions should succeed even if - there is an invalid path on sys.path. - """ - importlib_metadata.distributions() - - -class InaccessibleSysPath(fixtures.OnSysPath, ffs.TestCase): - site_dir = '/access-denied' - - def setUp(self): - super(InaccessibleSysPath, self).setUp() - self.setUpPyfakefs() - self.fs.create_dir(self.site_dir, perm_bits=000) - - def test_discovery(self): - """ - Discovering distributions should succeed even if - there is an invalid path on sys.path. - """ - list(importlib_metadata.distributions()) - - -class TestEntryPoints(unittest.TestCase): - def __init__(self, *args): - super(TestEntryPoints, self).__init__(*args) - self.ep = importlib_metadata.EntryPoint('name', 'value', 'group') - - def test_entry_point_pickleable(self): - revived = pickle.loads(pickle.dumps(self.ep)) - assert revived == self.ep - - def test_immutable(self): - """EntryPoints should be immutable""" - with self.assertRaises(AttributeError): - self.ep.name = 'badactor' - - def test_repr(self): - assert 'EntryPoint' in repr(self.ep) - assert 'name=' in repr(self.ep) - assert "'name'" in repr(self.ep) - - def test_hashable(self): - """EntryPoints should be hashable""" - hash(self.ep) - - def test_json_dump(self): - """ - json should not expect to be able to dump an EntryPoint - """ - with self.assertRaises(Exception): - json.dumps(self.ep) - - def test_module(self): - assert self.ep.module == 'value' - - def test_attr(self): - assert self.ep.attr is None - - -class FileSystem( - fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder, - unittest.TestCase): - def test_unicode_dir_on_sys_path(self): - """ - Ensure a Unicode subdirectory of a directory on sys.path - does not crash. - """ - fixtures.build_files( - {self.unicode_filename(): {}}, - prefix=self.site_dir, - ) - list(distributions()) diff --git a/vendor/importlib_metadata/importlib_metadata/tests/test_zip.py b/vendor/importlib_metadata/importlib_metadata/tests/test_zip.py deleted file mode 100644 index 4aae933d..00000000 --- a/vendor/importlib_metadata/importlib_metadata/tests/test_zip.py +++ /dev/null @@ -1,80 +0,0 @@ -import sys -import unittest - -from .. import ( - distribution, entry_points, files, PackageNotFoundError, - version, distributions, - ) - -try: - from importlib import resources - getattr(resources, 'files') - getattr(resources, 'as_file') -except (ImportError, AttributeError): - import importlib_resources as resources - -try: - from contextlib import ExitStack -except ImportError: - from contextlib2 import ExitStack - - -class TestZip(unittest.TestCase): - root = 'importlib_metadata.tests.data' - - def _fixture_on_path(self, filename): - pkg_file = resources.files(self.root).joinpath(filename) - file = self.resources.enter_context(resources.as_file(pkg_file)) - assert file.name.startswith('example-'), file.name - sys.path.insert(0, str(file)) - self.resources.callback(sys.path.pop, 0) - - def setUp(self): - # Find the path to the example-*.whl so we can add it to the front of - # sys.path, where we'll then try to find the metadata thereof. - self.resources = ExitStack() - self.addCleanup(self.resources.close) - self._fixture_on_path('example-21.12-py3-none-any.whl') - - def test_zip_version(self): - self.assertEqual(version('example'), '21.12') - - def test_zip_version_does_not_match(self): - with self.assertRaises(PackageNotFoundError): - version('definitely-not-installed') - - def test_zip_entry_points(self): - scripts = dict(entry_points()['console_scripts']) - entry_point = scripts['example'] - self.assertEqual(entry_point.value, 'example:main') - entry_point = scripts['Example'] - self.assertEqual(entry_point.value, 'example:main') - - def test_missing_metadata(self): - self.assertIsNone(distribution('example').read_text('does not exist')) - - def test_case_insensitive(self): - self.assertEqual(version('Example'), '21.12') - - def test_files(self): - for file in files('example'): - path = str(file.dist.locate_file(file)) - assert '.whl/' in path, path - - def test_one_distribution(self): - dists = list(distributions(path=sys.path[:1])) - assert len(dists) == 1 - - -class TestEgg(TestZip): - def setUp(self): - # Find the path to the example-*.egg so we can add it to the front of - # sys.path, where we'll then try to find the metadata thereof. - self.resources = ExitStack() - self.addCleanup(self.resources.close) - self._fixture_on_path('example-21.12-py3.6.egg') - - def test_files(self): - for file in files('example'): - path = str(file.dist.locate_file(file)) - assert '.egg/' in path, path diff --git a/vendor/importlib_metadata/mypy.ini b/vendor/importlib_metadata/mypy.ini new file mode 100644 index 00000000..976ba029 --- /dev/null +++ b/vendor/importlib_metadata/mypy.ini @@ -0,0 +1,2 @@ +[mypy] +ignore_missing_imports = True diff --git a/vendor/importlib_metadata/prepare/example/setup.py b/vendor/importlib_metadata/prepare/example/setup.py index 8663ad38..479488a0 100644 --- a/vendor/importlib_metadata/prepare/example/setup.py +++ b/vendor/importlib_metadata/prepare/example/setup.py @@ -1,4 +1,5 @@ from setuptools import setup + setup( name='example', version='21.12', @@ -6,5 +7,5 @@ packages=['example'], entry_points={ 'console_scripts': ['example = example:main', 'Example=example:main'], - }, - ) + }, +) diff --git a/vendor/importlib_metadata/prepare/example2/example2/__init__.py b/vendor/importlib_metadata/prepare/example2/example2/__init__.py new file mode 100644 index 00000000..de645c2e --- /dev/null +++ b/vendor/importlib_metadata/prepare/example2/example2/__init__.py @@ -0,0 +1,2 @@ +def main(): + return "example" diff --git a/vendor/importlib_metadata/prepare/example2/pyproject.toml b/vendor/importlib_metadata/prepare/example2/pyproject.toml new file mode 100644 index 00000000..011f4751 --- /dev/null +++ b/vendor/importlib_metadata/prepare/example2/pyproject.toml @@ -0,0 +1,10 @@ +[build-system] +build-backend = 'trampolim' +requires = ['trampolim'] + +[project] +name = 'example2' +version = '1.0.0' + +[project.scripts] +example = 'example2:main' diff --git a/vendor/importlib_metadata/pyproject.toml b/vendor/importlib_metadata/pyproject.toml index e5c3a6a4..b240064d 100644 --- a/vendor/importlib_metadata/pyproject.toml +++ b/vendor/importlib_metadata/pyproject.toml @@ -1,2 +1,18 @@ [build-system] -requires = ["setuptools>=30.3", "wheel", "setuptools_scm"] +requires = ["setuptools>=56"] +build-backend = "setuptools.build_meta" + +[tool.black] +skip-string-normalization = true + +[tool.pytest-enabler.black] +addopts = "--black" + +[tool.pytest-enabler.mypy] +addopts = "--mypy" + +[tool.pytest-enabler.flake8] +addopts = "--flake8" + +[tool.pytest-enabler.cov] +addopts = "--cov" diff --git a/vendor/importlib_metadata/pytest.ini b/vendor/importlib_metadata/pytest.ini new file mode 100644 index 00000000..80e98cc9 --- /dev/null +++ b/vendor/importlib_metadata/pytest.ini @@ -0,0 +1,17 @@ +[pytest] +norecursedirs=dist build .tox .eggs +addopts=--doctest-modules +doctest_optionflags=ALLOW_UNICODE ELLIPSIS +filterwarnings= + # Suppress deprecation warning in flake8 + ignore:SelectableGroups dict interface is deprecated::flake8 + + # shopkeep/pytest-black#55 + ignore: is not using a cooperative constructor:pytest.PytestDeprecationWarning + ignore:The \(fspath. py.path.local\) argument to BlackItem is deprecated.:pytest.PytestDeprecationWarning + ignore:BlackItem is an Item subclass and should not be a collector:pytest.PytestWarning + + # tholo/pytest-flake8#83 + ignore: is not using a cooperative constructor:pytest.PytestDeprecationWarning + ignore:The \(fspath. py.path.local\) argument to Flake8Item is deprecated.:pytest.PytestDeprecationWarning + ignore:Flake8Item is an Item subclass and should not be a collector:pytest.PytestWarning diff --git a/vendor/importlib_metadata/setup.cfg b/vendor/importlib_metadata/setup.cfg index d989cd0f..062f10b4 100644 --- a/vendor/importlib_metadata/setup.cfg +++ b/vendor/importlib_metadata/setup.cfg @@ -1,60 +1,65 @@ [metadata] name = importlib_metadata -author = Barry Warsaw -author_email = barry@python.org -url = http://importlib-metadata.readthedocs.io/ +version = 4.12.0 +author = Jason R. Coombs +author_email = jaraco@jaraco.com description = Read metadata from Python packages -long_description = file: README.rst -license = Apache Software License +long_description = file:README.rst +url = https://github.com/python/importlib_metadata classifiers = - Development Status :: 3 - Alpha - Intended Audience :: Developers - License :: OSI Approved :: Apache Software License - Topic :: Software Development :: Libraries - Programming Language :: Python :: 3 - Programming Language :: Python :: 2 + Development Status :: 5 - Production/Stable + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Programming Language :: Python :: 3 + Programming Language :: Python :: 3 :: Only [options] -python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.* +packages = find_namespace: +include_package_data = true +python_requires = >=3.7 install_requires = - zipp>=0.5 - pathlib2; python_version < '3' - contextlib2; python_version < '3' - configparser>=3.5; python_version < '3' -packages = find: - -[options.package_data] -* = *.zip, *.file, *.txt, *.toml -importlib_metadata = - docs/* - docs/_static/* -importlib_metadata.tests.data = - *.egg - *.whl - -[mypy] -ignore_missing_imports = True -# XXX We really should use the default `True` value here, but it causes too -# many warnings, so for now just disable it. E.g. a package's __spec__ is -# defined as Optional[ModuleSpec] so we can't just blindly pull attributes off -# of that attribute. The real fix is to add conditionals or asserts proving -# that package.__spec__ is not None. -strict_optional = False - -[mypy-importlib_metadata.docs.*] -ignore_errors: True - -[mypy-importlib_metadata.tests.*] -ignore_errors: True - -[wheel] -universal=1 + zipp>=0.5 + typing-extensions>=3.6.4; python_version < "3.8" + +[options.packages.find] +exclude = + build* + dist* + docs* + tests* + prepare* [options.extras_require] testing = - importlib_resources>=1.3; python_version < "3.9" - packaging - pep517 + # upstream + pytest >= 6 + pytest-checkdocs >= 2.4 + pytest-flake8 + pytest-black >= 0.3.7; \ + # workaround for jaraco/skeleton#22 + python_implementation != "PyPy" + pytest-cov + pytest-mypy >= 0.9.1; \ + # workaround for jaraco/skeleton#22 + python_implementation != "PyPy" + pytest-enabler >= 1.3 + + # local + importlib_resources>=1.3; python_version < "3.9" + packaging + pyfakefs + flufl.flake8 + pytest-perf >= 0.9.2 + docs = - sphinx - rst.linker + # upstream + sphinx + jaraco.packaging >= 9 + rst.linker >= 1.9 + + # local + +perf = + ipython + +[options.entry_points] diff --git a/vendor/importlib_metadata/setup.py b/vendor/importlib_metadata/setup.py deleted file mode 100644 index 0c9823c5..00000000 --- a/vendor/importlib_metadata/setup.py +++ /dev/null @@ -1,3 +0,0 @@ -from setuptools import setup - -setup(use_scm_version=False) diff --git a/vendor/importlib_metadata/importlib_metadata/tests/__init__.py b/vendor/importlib_metadata/tests/__init__.py similarity index 100% rename from vendor/importlib_metadata/importlib_metadata/tests/__init__.py rename to vendor/importlib_metadata/tests/__init__.py diff --git a/vendor/importlib_metadata/importlib_metadata/tests/data/__init__.py b/vendor/importlib_metadata/tests/data/__init__.py similarity index 100% rename from vendor/importlib_metadata/importlib_metadata/tests/data/__init__.py rename to vendor/importlib_metadata/tests/data/__init__.py diff --git a/vendor/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3-none-any.whl b/vendor/importlib_metadata/tests/data/example-21.12-py3-none-any.whl similarity index 100% rename from vendor/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3-none-any.whl rename to vendor/importlib_metadata/tests/data/example-21.12-py3-none-any.whl diff --git a/vendor/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3.6.egg b/vendor/importlib_metadata/tests/data/example-21.12-py3.6.egg similarity index 100% rename from vendor/importlib_metadata/importlib_metadata/tests/data/example-21.12-py3.6.egg rename to vendor/importlib_metadata/tests/data/example-21.12-py3.6.egg diff --git a/vendor/importlib_metadata/tests/data/example2-1.0.0-py3-none-any.whl b/vendor/importlib_metadata/tests/data/example2-1.0.0-py3-none-any.whl new file mode 100644 index 00000000..5ca93657 Binary files /dev/null and b/vendor/importlib_metadata/tests/data/example2-1.0.0-py3-none-any.whl differ diff --git a/vendor/importlib_metadata/tests/fixtures.py b/vendor/importlib_metadata/tests/fixtures.py new file mode 100644 index 00000000..6d9a9d2b --- /dev/null +++ b/vendor/importlib_metadata/tests/fixtures.py @@ -0,0 +1,312 @@ +import os +import sys +import copy +import shutil +import pathlib +import tempfile +import textwrap +import functools +import contextlib + +from .py39compat import FS_NONASCII +from typing import Dict, Union + +try: + from importlib import resources # type: ignore + + getattr(resources, 'files') + getattr(resources, 'as_file') +except (ImportError, AttributeError): + import importlib_resources as resources # type: ignore + + +@contextlib.contextmanager +def tempdir(): + tmpdir = tempfile.mkdtemp() + try: + yield pathlib.Path(tmpdir) + finally: + shutil.rmtree(tmpdir) + + +@contextlib.contextmanager +def save_cwd(): + orig = os.getcwd() + try: + yield + finally: + os.chdir(orig) + + +@contextlib.contextmanager +def tempdir_as_cwd(): + with tempdir() as tmp: + with save_cwd(): + os.chdir(str(tmp)) + yield tmp + + +@contextlib.contextmanager +def install_finder(finder): + sys.meta_path.append(finder) + try: + yield + finally: + sys.meta_path.remove(finder) + + +class Fixtures: + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + + +class SiteDir(Fixtures): + def setUp(self): + super().setUp() + self.site_dir = self.fixtures.enter_context(tempdir()) + + +class OnSysPath(Fixtures): + @staticmethod + @contextlib.contextmanager + def add_sys_path(dir): + sys.path[:0] = [str(dir)] + try: + yield + finally: + sys.path.remove(str(dir)) + + def setUp(self): + super().setUp() + self.fixtures.enter_context(self.add_sys_path(self.site_dir)) + + +# Except for python/mypy#731, prefer to define +# FilesDef = Dict[str, Union['FilesDef', str]] +FilesDef = Dict[str, Union[Dict[str, Union[Dict[str, str], str]], str]] + + +class DistInfoPkg(OnSysPath, SiteDir): + files: FilesDef = { + "distinfo_pkg-1.0.0.dist-info": { + "METADATA": """ + Name: distinfo-pkg + Author: Steven Ma + Version: 1.0.0 + Requires-Dist: wheel >= 1.0 + Requires-Dist: pytest; extra == 'test' + Keywords: sample package + + Once upon a time + There was a distinfo pkg + """, + "RECORD": "mod.py,sha256=abc,20\n", + "entry_points.txt": """ + [entries] + main = mod:main + ns:sub = mod:main + """, + }, + "mod.py": """ + def main(): + print("hello world") + """, + } + + def setUp(self): + super().setUp() + build_files(DistInfoPkg.files, self.site_dir) + + def make_uppercase(self): + """ + Rewrite metadata with everything uppercase. + """ + shutil.rmtree(self.site_dir / "distinfo_pkg-1.0.0.dist-info") + files = copy.deepcopy(DistInfoPkg.files) + info = files["distinfo_pkg-1.0.0.dist-info"] + info["METADATA"] = info["METADATA"].upper() + build_files(files, self.site_dir) + + +class DistInfoPkgWithDot(OnSysPath, SiteDir): + files: FilesDef = { + "pkg_dot-1.0.0.dist-info": { + "METADATA": """ + Name: pkg.dot + Version: 1.0.0 + """, + }, + } + + def setUp(self): + super().setUp() + build_files(DistInfoPkgWithDot.files, self.site_dir) + + +class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir): + files: FilesDef = { + "pkg.dot-1.0.0.dist-info": { + "METADATA": """ + Name: pkg.dot + Version: 1.0.0 + """, + }, + "pkg.lot.egg-info": { + "METADATA": """ + Name: pkg.lot + Version: 1.0.0 + """, + }, + } + + def setUp(self): + super().setUp() + build_files(DistInfoPkgWithDotLegacy.files, self.site_dir) + + +class DistInfoPkgOffPath(SiteDir): + def setUp(self): + super().setUp() + build_files(DistInfoPkg.files, self.site_dir) + + +class EggInfoPkg(OnSysPath, SiteDir): + files: FilesDef = { + "egginfo_pkg.egg-info": { + "PKG-INFO": """ + Name: egginfo-pkg + Author: Steven Ma + License: Unknown + Version: 1.0.0 + Classifier: Intended Audience :: Developers + Classifier: Topic :: Software Development :: Libraries + Keywords: sample package + Description: Once upon a time + There was an egginfo package + """, + "SOURCES.txt": """ + mod.py + egginfo_pkg.egg-info/top_level.txt + """, + "entry_points.txt": """ + [entries] + main = mod:main + """, + "requires.txt": """ + wheel >= 1.0; python_version >= "2.7" + [test] + pytest + """, + "top_level.txt": "mod\n", + }, + "mod.py": """ + def main(): + print("hello world") + """, + } + + def setUp(self): + super().setUp() + build_files(EggInfoPkg.files, prefix=self.site_dir) + + +class EggInfoFile(OnSysPath, SiteDir): + files: FilesDef = { + "egginfo_file.egg-info": """ + Metadata-Version: 1.0 + Name: egginfo_file + Version: 0.1 + Summary: An example package + Home-page: www.example.com + Author: Eric Haffa-Vee + Author-email: eric@example.coms + License: UNKNOWN + Description: UNKNOWN + Platform: UNKNOWN + """, + } + + def setUp(self): + super().setUp() + build_files(EggInfoFile.files, prefix=self.site_dir) + + +def build_files(file_defs, prefix=pathlib.Path()): + """Build a set of files/directories, as described by the + + file_defs dictionary. Each key/value pair in the dictionary is + interpreted as a filename/contents pair. If the contents value is a + dictionary, a directory is created, and the dictionary interpreted + as the files within it, recursively. + + For example: + + {"README.txt": "A README file", + "foo": { + "__init__.py": "", + "bar": { + "__init__.py": "", + }, + "baz.py": "# Some code", + } + } + """ + for name, contents in file_defs.items(): + full_name = prefix / name + if isinstance(contents, dict): + full_name.mkdir() + build_files(contents, prefix=full_name) + else: + if isinstance(contents, bytes): + with full_name.open('wb') as f: + f.write(contents) + else: + with full_name.open('w', encoding='utf-8') as f: + f.write(DALS(contents)) + + +class FileBuilder: + def unicode_filename(self): + return FS_NONASCII or self.skip("File system does not support non-ascii.") + + +def DALS(str): + "Dedent and left-strip" + return textwrap.dedent(str).lstrip() + + +class NullFinder: + def find_module(self, name): + pass + + +class ZipFixtures: + root = 'tests.data' + + def _fixture_on_path(self, filename): + pkg_file = resources.files(self.root).joinpath(filename) + file = self.resources.enter_context(resources.as_file(pkg_file)) + assert file.name.startswith('example'), file.name + sys.path.insert(0, str(file)) + self.resources.callback(sys.path.pop, 0) + + def setUp(self): + # Add self.zip_name to the front of sys.path. + self.resources = contextlib.ExitStack() + self.addCleanup(self.resources.close) + + +def parameterize(*args_set): + """Run test method with a series of parameters.""" + + def wrapper(func): + @functools.wraps(func) + def _inner(self): + for args in args_set: + with self.subTest(**args): + func(self, **args) + + return _inner + + return wrapper diff --git a/vendor/importlib_metadata/tests/py39compat.py b/vendor/importlib_metadata/tests/py39compat.py new file mode 100644 index 00000000..926dcad9 --- /dev/null +++ b/vendor/importlib_metadata/tests/py39compat.py @@ -0,0 +1,4 @@ +try: + from test.support.os_helper import FS_NONASCII +except ImportError: + from test.support import FS_NONASCII # noqa diff --git a/vendor/importlib_metadata/tests/test_api.py b/vendor/importlib_metadata/tests/test_api.py new file mode 100644 index 00000000..3f75cebb --- /dev/null +++ b/vendor/importlib_metadata/tests/test_api.py @@ -0,0 +1,342 @@ +import re +import textwrap +import unittest +import warnings +import importlib +import contextlib + +from . import fixtures +from importlib_metadata import ( + Distribution, + PackageNotFoundError, + distribution, + entry_points, + files, + metadata, + requires, + version, +) + + +@contextlib.contextmanager +def suppress_known_deprecation(): + with warnings.catch_warnings(record=True) as ctx: + warnings.simplefilter('default', category=DeprecationWarning) + yield ctx + + +class APITests( + fixtures.EggInfoPkg, + fixtures.DistInfoPkg, + fixtures.DistInfoPkgWithDot, + fixtures.EggInfoFile, + unittest.TestCase, +): + + version_pattern = r'\d+\.\d+(\.\d)?' + + def test_retrieves_version_of_self(self): + pkg_version = version('egginfo-pkg') + assert isinstance(pkg_version, str) + assert re.match(self.version_pattern, pkg_version) + + def test_retrieves_version_of_distinfo_pkg(self): + pkg_version = version('distinfo-pkg') + assert isinstance(pkg_version, str) + assert re.match(self.version_pattern, pkg_version) + + def test_for_name_does_not_exist(self): + with self.assertRaises(PackageNotFoundError): + distribution('does-not-exist') + + def test_name_normalization(self): + names = 'pkg.dot', 'pkg_dot', 'pkg-dot', 'pkg..dot', 'Pkg.Dot' + for name in names: + with self.subTest(name): + assert distribution(name).metadata['Name'] == 'pkg.dot' + + def test_prefix_not_matched(self): + prefixes = 'p', 'pkg', 'pkg.' + for prefix in prefixes: + with self.subTest(prefix): + with self.assertRaises(PackageNotFoundError): + distribution(prefix) + + def test_for_top_level(self): + self.assertEqual( + distribution('egginfo-pkg').read_text('top_level.txt').strip(), 'mod' + ) + + def test_read_text(self): + top_level = [ + path for path in files('egginfo-pkg') if path.name == 'top_level.txt' + ][0] + self.assertEqual(top_level.read_text(), 'mod\n') + + def test_entry_points(self): + eps = entry_points() + assert 'entries' in eps.groups + entries = eps.select(group='entries') + assert 'main' in entries.names + ep = entries['main'] + self.assertEqual(ep.value, 'mod:main') + self.assertEqual(ep.extras, []) + + def test_entry_points_distribution(self): + entries = entry_points(group='entries') + for entry in ("main", "ns:sub"): + ep = entries[entry] + self.assertIn(ep.dist.name, ('distinfo-pkg', 'egginfo-pkg')) + self.assertEqual(ep.dist.version, "1.0.0") + + def test_entry_points_unique_packages_normalized(self): + """ + Entry points should only be exposed for the first package + on sys.path with a given name (even when normalized). + """ + alt_site_dir = self.fixtures.enter_context(fixtures.tempdir()) + self.fixtures.enter_context(self.add_sys_path(alt_site_dir)) + alt_pkg = { + "DistInfo_pkg-1.1.0.dist-info": { + "METADATA": """ + Name: distinfo-pkg + Version: 1.1.0 + """, + "entry_points.txt": """ + [entries] + main = mod:altmain + """, + }, + } + fixtures.build_files(alt_pkg, alt_site_dir) + entries = entry_points(group='entries') + assert not any( + ep.dist.name == 'distinfo-pkg' and ep.dist.version == '1.0.0' + for ep in entries + ) + # ns:sub doesn't exist in alt_pkg + assert 'ns:sub' not in entries.names + + def test_entry_points_missing_name(self): + with self.assertRaises(KeyError): + entry_points(group='entries')['missing'] + + def test_entry_points_missing_group(self): + assert entry_points(group='missing') == () + + def test_entry_points_dict_construction(self): + """ + Prior versions of entry_points() returned simple lists and + allowed casting those lists into maps by name using ``dict()``. + Capture this now deprecated use-case. + """ + with suppress_known_deprecation() as caught: + eps = dict(entry_points(group='entries')) + + assert 'main' in eps + assert eps['main'] == entry_points(group='entries')['main'] + + # check warning + expected = next(iter(caught)) + assert expected.category is DeprecationWarning + assert "Construction of dict of EntryPoints is deprecated" in str(expected) + + def test_entry_points_by_index(self): + """ + Prior versions of Distribution.entry_points would return a + tuple that allowed access by index. + Capture this now deprecated use-case + See python/importlib_metadata#300 and bpo-44246. + """ + eps = distribution('distinfo-pkg').entry_points + with suppress_known_deprecation() as caught: + eps[0] + + # check warning + expected = next(iter(caught)) + assert expected.category is DeprecationWarning + assert "Accessing entry points by index is deprecated" in str(expected) + + def test_entry_points_groups_getitem(self): + """ + Prior versions of entry_points() returned a dict. Ensure + that callers using '.__getitem__()' are supported but warned to + migrate. + """ + with suppress_known_deprecation(): + entry_points()['entries'] == entry_points(group='entries') + + with self.assertRaises(KeyError): + entry_points()['missing'] + + def test_entry_points_groups_get(self): + """ + Prior versions of entry_points() returned a dict. Ensure + that callers using '.get()' are supported but warned to + migrate. + """ + with suppress_known_deprecation(): + entry_points().get('missing', 'default') == 'default' + entry_points().get('entries', 'default') == entry_points()['entries'] + entry_points().get('missing', ()) == () + + def test_entry_points_allows_no_attributes(self): + ep = entry_points().select(group='entries', name='main') + with self.assertRaises(AttributeError): + ep.foo = 4 + + def test_metadata_for_this_package(self): + md = metadata('egginfo-pkg') + assert md['author'] == 'Steven Ma' + assert md['LICENSE'] == 'Unknown' + assert md['Name'] == 'egginfo-pkg' + classifiers = md.get_all('Classifier') + assert 'Topic :: Software Development :: Libraries' in classifiers + + def test_importlib_metadata_version(self): + resolved = version('importlib-metadata') + assert re.match(self.version_pattern, resolved) + + @staticmethod + def _test_files(files): + root = files[0].root + for file in files: + assert file.root == root + assert not file.hash or file.hash.value + assert not file.hash or file.hash.mode == 'sha256' + assert not file.size or file.size >= 0 + assert file.locate().exists() + assert isinstance(file.read_binary(), bytes) + if file.name.endswith('.py'): + file.read_text() + + def test_file_hash_repr(self): + util = [p for p in files('distinfo-pkg') if p.name == 'mod.py'][0] + self.assertRegex(repr(util.hash), '') + + def test_files_dist_info(self): + self._test_files(files('distinfo-pkg')) + + def test_files_egg_info(self): + self._test_files(files('egginfo-pkg')) + + def test_version_egg_info_file(self): + self.assertEqual(version('egginfo-file'), '0.1') + + def test_requires_egg_info_file(self): + requirements = requires('egginfo-file') + self.assertIsNone(requirements) + + def test_requires_egg_info(self): + deps = requires('egginfo-pkg') + assert len(deps) == 2 + assert any(dep == 'wheel >= 1.0; python_version >= "2.7"' for dep in deps) + + def test_requires_egg_info_empty(self): + fixtures.build_files( + { + 'requires.txt': '', + }, + self.site_dir.joinpath('egginfo_pkg.egg-info'), + ) + deps = requires('egginfo-pkg') + assert deps == [] + + def test_requires_dist_info(self): + deps = requires('distinfo-pkg') + assert len(deps) == 2 + assert all(deps) + assert 'wheel >= 1.0' in deps + assert "pytest; extra == 'test'" in deps + + def test_more_complex_deps_requires_text(self): + requires = textwrap.dedent( + """ + dep1 + dep2 + + [:python_version < "3"] + dep3 + + [extra1] + dep4 + dep6@ git+https://example.com/python/dep.git@v1.0.0 + + [extra2:python_version < "3"] + dep5 + """ + ) + deps = sorted(Distribution._deps_from_requires_text(requires)) + expected = [ + 'dep1', + 'dep2', + 'dep3; python_version < "3"', + 'dep4; extra == "extra1"', + 'dep5; (python_version < "3") and extra == "extra2"', + 'dep6@ git+https://example.com/python/dep.git@v1.0.0 ; extra == "extra1"', + ] + # It's important that the environment marker expression be + # wrapped in parentheses to avoid the following 'and' binding more + # tightly than some other part of the environment expression. + + assert deps == expected + + def test_as_json(self): + md = metadata('distinfo-pkg').json + assert 'name' in md + assert md['keywords'] == ['sample', 'package'] + desc = md['description'] + assert desc.startswith('Once upon a time\nThere was') + assert len(md['requires_dist']) == 2 + + def test_as_json_egg_info(self): + md = metadata('egginfo-pkg').json + assert 'name' in md + assert md['keywords'] == ['sample', 'package'] + desc = md['description'] + assert desc.startswith('Once upon a time\nThere was') + assert len(md['classifier']) == 2 + + def test_as_json_odd_case(self): + self.make_uppercase() + md = metadata('distinfo-pkg').json + assert 'name' in md + assert len(md['requires_dist']) == 2 + assert md['keywords'] == ['SAMPLE', 'PACKAGE'] + + +class LegacyDots(fixtures.DistInfoPkgWithDotLegacy, unittest.TestCase): + def test_name_normalization(self): + names = 'pkg.dot', 'pkg_dot', 'pkg-dot', 'pkg..dot', 'Pkg.Dot' + for name in names: + with self.subTest(name): + assert distribution(name).metadata['Name'] == 'pkg.dot' + + def test_name_normalization_versionless_egg_info(self): + names = 'pkg.lot', 'pkg_lot', 'pkg-lot', 'pkg..lot', 'Pkg.Lot' + for name in names: + with self.subTest(name): + assert distribution(name).metadata['Name'] == 'pkg.lot' + + +class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase): + def test_find_distributions_specified_path(self): + dists = Distribution.discover(path=[str(self.site_dir)]) + assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists) + + def test_distribution_at_pathlib(self): + """Demonstrate how to load metadata direct from a directory.""" + dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' + dist = Distribution.at(dist_info_path) + assert dist.version == '1.0.0' + + def test_distribution_at_str(self): + dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' + dist = Distribution.at(str(dist_info_path)) + assert dist.version == '1.0.0' + + +class InvalidateCache(unittest.TestCase): + def test_invalidate_cache(self): + # No externally observable behavior, but ensures test coverage... + importlib.invalidate_caches() diff --git a/vendor/importlib_metadata/tests/test_integration.py b/vendor/importlib_metadata/tests/test_integration.py new file mode 100644 index 00000000..c382a506 --- /dev/null +++ b/vendor/importlib_metadata/tests/test_integration.py @@ -0,0 +1,67 @@ +import unittest +import packaging.requirements +import packaging.version + +from . import fixtures +from importlib_metadata import ( + MetadataPathFinder, + _compat, + distributions, + version, +) + + +class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase): + def test_package_spec_installed(self): + """ + Illustrate the recommended procedure to determine if + a specified version of a package is installed. + """ + + def is_installed(package_spec): + req = packaging.requirements.Requirement(package_spec) + return version(req.name) in req.specifier + + assert is_installed('distinfo-pkg==1.0') + assert is_installed('distinfo-pkg>=1.0,<2.0') + assert not is_installed('distinfo-pkg<1.0') + + +class FinderTests(fixtures.Fixtures, unittest.TestCase): + def test_finder_without_module(self): + class ModuleFreeFinder(fixtures.NullFinder): + """ + A finder without an __module__ attribute + """ + + def __getattribute__(self, name): + if name == '__module__': + raise AttributeError(name) + return super().__getattribute__(name) + + self.fixtures.enter_context(fixtures.install_finder(ModuleFreeFinder())) + _compat.disable_stdlib_finder() + + +class DistSearch(unittest.TestCase): + def test_search_dist_dirs(self): + """ + Pip needs the _search_paths interface to locate + distribution metadata dirs. Protect it for PyPA + use-cases (only). Ref python/importlib_metadata#111. + """ + res = MetadataPathFinder._search_paths('any-name', []) + assert list(res) == [] + + def test_interleaved_discovery(self): + """ + When the search is cached, it is + possible for searches to be interleaved, so make sure + those use-cases are safe. + + Ref #293 + """ + dists = distributions() + next(dists) + version('importlib_metadata') + next(dists) diff --git a/vendor/importlib_metadata/tests/test_main.py b/vendor/importlib_metadata/tests/test_main.py new file mode 100644 index 00000000..921f5d9c --- /dev/null +++ b/vendor/importlib_metadata/tests/test_main.py @@ -0,0 +1,335 @@ +import re +import json +import pickle +import unittest +import warnings +import importlib +import importlib_metadata +import pyfakefs.fake_filesystem_unittest as ffs + +from . import fixtures +from importlib_metadata import ( + Distribution, + EntryPoint, + MetadataPathFinder, + PackageNotFoundError, + _unique, + distributions, + entry_points, + metadata, + packages_distributions, + version, +) + + +class BasicTests(fixtures.DistInfoPkg, unittest.TestCase): + version_pattern = r'\d+\.\d+(\.\d)?' + + def test_retrieves_version_of_self(self): + dist = Distribution.from_name('distinfo-pkg') + assert isinstance(dist.version, str) + assert re.match(self.version_pattern, dist.version) + + def test_for_name_does_not_exist(self): + with self.assertRaises(PackageNotFoundError): + Distribution.from_name('does-not-exist') + + def test_package_not_found_mentions_metadata(self): + """ + When a package is not found, that could indicate that the + packgae is not installed or that it is installed without + metadata. Ensure the exception mentions metadata to help + guide users toward the cause. See #124. + """ + with self.assertRaises(PackageNotFoundError) as ctx: + Distribution.from_name('does-not-exist') + + assert "metadata" in str(ctx.exception) + + def test_new_style_classes(self): + self.assertIsInstance(Distribution, type) + self.assertIsInstance(MetadataPathFinder, type) + + @fixtures.parameterize( + dict(name=None), + dict(name=''), + ) + def test_invalid_inputs_to_from_name(self, name): + with self.assertRaises(Exception): + Distribution.from_name(name) + + +class ImportTests(fixtures.DistInfoPkg, unittest.TestCase): + def test_import_nonexistent_module(self): + # Ensure that the MetadataPathFinder does not crash an import of a + # non-existent module. + with self.assertRaises(ImportError): + importlib.import_module('does_not_exist') + + def test_resolve(self): + ep = entry_points(group='entries')['main'] + self.assertEqual(ep.load().__name__, "main") + + def test_entrypoint_with_colon_in_name(self): + ep = entry_points(group='entries')['ns:sub'] + self.assertEqual(ep.value, 'mod:main') + + def test_resolve_without_attr(self): + ep = EntryPoint( + name='ep', + value='importlib_metadata', + group='grp', + ) + assert ep.load() is importlib_metadata + + +class NameNormalizationTests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): + @staticmethod + def make_pkg(name): + """ + Create minimal metadata for a dist-info package with + the indicated name on the file system. + """ + return { + f'{name}.dist-info': { + 'METADATA': 'VERSION: 1.0\n', + }, + } + + def test_dashes_in_dist_name_found_as_underscores(self): + """ + For a package with a dash in the name, the dist-info metadata + uses underscores in the name. Ensure the metadata loads. + """ + fixtures.build_files(self.make_pkg('my_pkg'), self.site_dir) + assert version('my-pkg') == '1.0' + + def test_dist_name_found_as_any_case(self): + """ + Ensure the metadata loads when queried with any case. + """ + pkg_name = 'CherryPy' + fixtures.build_files(self.make_pkg(pkg_name), self.site_dir) + assert version(pkg_name) == '1.0' + assert version(pkg_name.lower()) == '1.0' + assert version(pkg_name.upper()) == '1.0' + + def test_unique_distributions(self): + """ + Two distributions varying only by non-normalized name on + the file system should resolve as the same. + """ + fixtures.build_files(self.make_pkg('abc'), self.site_dir) + before = list(_unique(distributions())) + + alt_site_dir = self.fixtures.enter_context(fixtures.tempdir()) + self.fixtures.enter_context(self.add_sys_path(alt_site_dir)) + fixtures.build_files(self.make_pkg('ABC'), alt_site_dir) + after = list(_unique(distributions())) + + assert len(after) == len(before) + + +class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): + @staticmethod + def pkg_with_non_ascii_description(site_dir): + """ + Create minimal metadata for a package with non-ASCII in + the description. + """ + contents = { + 'portend.dist-info': { + 'METADATA': 'Description: pôrˈtend', + }, + } + fixtures.build_files(contents, site_dir) + return 'portend' + + @staticmethod + def pkg_with_non_ascii_description_egg_info(site_dir): + """ + Create minimal metadata for an egg-info package with + non-ASCII in the description. + """ + contents = { + 'portend.dist-info': { + 'METADATA': """ + Name: portend + + pôrˈtend""", + }, + } + fixtures.build_files(contents, site_dir) + return 'portend' + + def test_metadata_loads(self): + pkg_name = self.pkg_with_non_ascii_description(self.site_dir) + meta = metadata(pkg_name) + assert meta['Description'] == 'pôrˈtend' + + def test_metadata_loads_egg_info(self): + pkg_name = self.pkg_with_non_ascii_description_egg_info(self.site_dir) + meta = metadata(pkg_name) + assert meta['Description'] == 'pôrˈtend' + + +class DiscoveryTests(fixtures.EggInfoPkg, fixtures.DistInfoPkg, unittest.TestCase): + def test_package_discovery(self): + dists = list(distributions()) + assert all(isinstance(dist, Distribution) for dist in dists) + assert any(dist.metadata['Name'] == 'egginfo-pkg' for dist in dists) + assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists) + + def test_invalid_usage(self): + with self.assertRaises(ValueError): + list(distributions(context='something', name='else')) + + +class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): + def test_egg_info(self): + # make an `EGG-INFO` directory that's unrelated + self.site_dir.joinpath('EGG-INFO').mkdir() + # used to crash with `IsADirectoryError` + with self.assertRaises(PackageNotFoundError): + version('unknown-package') + + def test_egg(self): + egg = self.site_dir.joinpath('foo-3.6.egg') + egg.mkdir() + with self.add_sys_path(egg): + with self.assertRaises(PackageNotFoundError): + version('foo') + + +class MissingSysPath(fixtures.OnSysPath, unittest.TestCase): + site_dir = '/does-not-exist' + + def test_discovery(self): + """ + Discovering distributions should succeed even if + there is an invalid path on sys.path. + """ + importlib_metadata.distributions() + + +class InaccessibleSysPath(fixtures.OnSysPath, ffs.TestCase): + site_dir = '/access-denied' + + def setUp(self): + super().setUp() + self.setUpPyfakefs() + self.fs.create_dir(self.site_dir, perm_bits=000) + + def test_discovery(self): + """ + Discovering distributions should succeed even if + there is an invalid path on sys.path. + """ + list(importlib_metadata.distributions()) + + +class TestEntryPoints(unittest.TestCase): + def __init__(self, *args): + super().__init__(*args) + self.ep = importlib_metadata.EntryPoint( + name='name', value='value', group='group' + ) + + def test_entry_point_pickleable(self): + revived = pickle.loads(pickle.dumps(self.ep)) + assert revived == self.ep + + def test_positional_args(self): + """ + Capture legacy (namedtuple) construction, discouraged. + """ + EntryPoint('name', 'value', 'group') + + def test_immutable(self): + """EntryPoints should be immutable""" + with self.assertRaises(AttributeError): + self.ep.name = 'badactor' + + def test_repr(self): + assert 'EntryPoint' in repr(self.ep) + assert 'name=' in repr(self.ep) + assert "'name'" in repr(self.ep) + + def test_hashable(self): + """EntryPoints should be hashable""" + hash(self.ep) + + def test_json_dump(self): + """ + json should not expect to be able to dump an EntryPoint + """ + with self.assertRaises(Exception): + with warnings.catch_warnings(record=True): + json.dumps(self.ep) + + def test_module(self): + assert self.ep.module == 'value' + + def test_attr(self): + assert self.ep.attr is None + + def test_sortable(self): + """ + EntryPoint objects are sortable, but result is undefined. + """ + sorted( + [ + EntryPoint(name='b', value='val', group='group'), + EntryPoint(name='a', value='val', group='group'), + ] + ) + + +class FileSystem( + fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder, unittest.TestCase +): + def test_unicode_dir_on_sys_path(self): + """ + Ensure a Unicode subdirectory of a directory on sys.path + does not crash. + """ + fixtures.build_files( + {self.unicode_filename(): {}}, + prefix=self.site_dir, + ) + list(distributions()) + + +class PackagesDistributionsPrebuiltTest(fixtures.ZipFixtures, unittest.TestCase): + def test_packages_distributions_example(self): + self._fixture_on_path('example-21.12-py3-none-any.whl') + assert packages_distributions()['example'] == ['example'] + + def test_packages_distributions_example2(self): + """ + Test packages_distributions on a wheel built + by trampolim. + """ + self._fixture_on_path('example2-1.0.0-py3-none-any.whl') + assert packages_distributions()['example2'] == ['example2'] + + +class PackagesDistributionsTest( + fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase +): + def test_packages_distributions_neither_toplevel_nor_files(self): + """ + Test a package built without 'top-level.txt' or a file list. + """ + fixtures.build_files( + { + 'trim_example-1.0.0.dist-info': { + 'METADATA': """ + Name: trim_example + Version: 1.0.0 + """, + } + }, + prefix=self.site_dir, + ) + packages_distributions() diff --git a/vendor/importlib_metadata/tests/test_zip.py b/vendor/importlib_metadata/tests/test_zip.py new file mode 100644 index 00000000..01aba6df --- /dev/null +++ b/vendor/importlib_metadata/tests/test_zip.py @@ -0,0 +1,62 @@ +import sys +import unittest + +from . import fixtures +from importlib_metadata import ( + PackageNotFoundError, + distribution, + distributions, + entry_points, + files, + version, +) + + +class TestZip(fixtures.ZipFixtures, unittest.TestCase): + def setUp(self): + super().setUp() + self._fixture_on_path('example-21.12-py3-none-any.whl') + + def test_zip_version(self): + self.assertEqual(version('example'), '21.12') + + def test_zip_version_does_not_match(self): + with self.assertRaises(PackageNotFoundError): + version('definitely-not-installed') + + def test_zip_entry_points(self): + scripts = entry_points(group='console_scripts') + entry_point = scripts['example'] + self.assertEqual(entry_point.value, 'example:main') + entry_point = scripts['Example'] + self.assertEqual(entry_point.value, 'example:main') + + def test_missing_metadata(self): + self.assertIsNone(distribution('example').read_text('does not exist')) + + def test_case_insensitive(self): + self.assertEqual(version('Example'), '21.12') + + def test_files(self): + for file in files('example'): + path = str(file.dist.locate_file(file)) + assert '.whl/' in path, path + + def test_one_distribution(self): + dists = list(distributions(path=sys.path[:1])) + assert len(dists) == 1 + + +class TestEgg(TestZip): + def setUp(self): + super().setUp() + self._fixture_on_path('example-21.12-py3.6.egg') + + def test_files(self): + for file in files('example'): + path = str(file.dist.locate_file(file)) + assert '.egg/' in path, path + + def test_normalized_name(self): + dist = distribution('example') + assert dist._normalized_name == 'example' diff --git a/vendor/importlib_metadata/tox.ini b/vendor/importlib_metadata/tox.ini index b2775cd1..a0ce7c61 100644 --- a/vendor/importlib_metadata/tox.ini +++ b/vendor/importlib_metadata/tox.ini @@ -1,97 +1,50 @@ [tox] -envlist = {py27,py35,py36,py37,py38}{,-cov,-diffcov},qa,docs,perf -skip_missing_interpreters = True +envlist = python minversion = 3.2 -# Ensure that a late version of pip is used even on tox-venv. -requires = - tox-pip-version>=0.0.6 +# https://github.com/jaraco/skeleton/issues/6 +tox_pip_extensions_ext_venv_update = true +toxworkdir={env:TOX_WORK_DIR:.tox} + [testenv] -pip_version = pip -commands = - !cov,!diffcov: python -m unittest discover {posargs} - cov,diffcov: python -m coverage run {[coverage]rc} -m unittest discover {posargs} - cov,diffcov: python -m coverage combine {[coverage]rc} - cov: python -m coverage html {[coverage]rc} - cov: python -m coverage xml {[coverage]rc} - cov: python -m coverage report -m {[coverage]rc} --fail-under=100 - diffcov: python -m coverage xml {[coverage]rc} - diffcov: diff-cover coverage.xml --html-report diffcov.html - diffcov: diff-cover coverage.xml --fail-under=100 -usedevelop = True -passenv = - PYTHON* - LANG* - LC_* - PYV deps = - cov,diffcov: coverage>=4.5 - diffcov: diff_cover - pyfakefs -setenv = - cov: COVERAGE_PROCESS_START={[coverage]rcfile} - cov: COVERAGE_OPTIONS="-p" - cov: COVERAGE_FILE={toxinidir}/.coverage - py27: PYV=2 - py35,py36,py37,py38: PYV=3 - # workaround deprecation warnings in pip's vendored packages - PYTHONWARNINGS=ignore:Using or importing the ABCs:DeprecationWarning:pip._vendor -extras = - testing - - -[testenv:qa] -basepython = python3.7 commands = - python -m flake8 importlib_metadata - mypy importlib_metadata -deps = - mypy - flake8 - flufl.flake8 -extras = + pytest {posargs} +passenv = + HOME +usedevelop = True +extras = testing [testenv:docs] -basepython = python3 -commands = - sphinx-build importlib_metadata/docs build/sphinx/html extras = - docs - + docs + testing +changedir = docs +commands = + python -m sphinx -W --keep-going . {toxinidir}/build/html -[testenv:perf] -use_develop = False +[testenv:diffcov] deps = - ipython + diff-cover commands = - python -m timeit -s 'import importlib_metadata' -- 'importlib_metadata.distribution("ipython")' - + pytest {posargs} --cov-report xml + diff-cover coverage.xml --compare-branch=origin/main --html-report diffcov.html + diff-cover coverage.xml --compare-branch=origin/main --fail-under=100 [testenv:release] -basepython = python3 +skip_install = True deps = - twine - wheel - setuptools - keyring - setuptools_scm + build + twine>=3 + jaraco.develop>=7.1 passenv = - TWINE_PASSWORD + TWINE_PASSWORD + GITHUB_TOKEN setenv = - TWINE_USERNAME = {env:TWINE_USERNAME:__token__} + TWINE_USERNAME = {env:TWINE_USERNAME:__token__} commands = - python setup.py sdist bdist_wheel - python -m twine {posargs} upload dist/* - - -[coverage] -rcfile = {toxinidir}/coverage.ini -rc = --rcfile="{[coverage]rcfile}" - - -[flake8] -hang-closing = True -jobs = 1 -max-line-length = 79 -enable-extensions = U4 + python -c "import shutil; shutil.rmtree('dist', ignore_errors=True)" + python -m build + python -m twine upload dist/* + python -m jaraco.develop.create-github-release diff --git a/vendor/jsonschema/.coveragerc b/vendor/jsonschema/.coveragerc new file mode 100644 index 00000000..5c6eb7ee --- /dev/null +++ b/vendor/jsonschema/.coveragerc @@ -0,0 +1,10 @@ +# vim: filetype=dosini: +[run] +branch = True +source = jsonschema +omit = */jsonschema/_reflect.py,*/jsonschema/__main__.py,*/jsonschema/benchmarks/*,*/jsonschema/tests/fuzz_validate.py + +[report] +exclude_lines = + pragma: no cover + if TYPE_CHECKING: diff --git a/vendor/jsonschema/.flake8 b/vendor/jsonschema/.flake8 new file mode 100644 index 00000000..6a6b5cfe --- /dev/null +++ b/vendor/jsonschema/.flake8 @@ -0,0 +1,10 @@ +[flake8] +ban-relative-imports = true +inline-quotes = " +exclude = + jsonschema/__init__.py + jsonschema/_reflect.py +ignore = + B008, # Barring function calls in default args. Ha, no. + B306, # See https://github.com/PyCQA/flake8-bugbear/issues/131 + W503, # (flake8 default) old PEP8 boolean operator line breaks diff --git a/vendor/jsonschema/.github/FUNDING.yml b/vendor/jsonschema/.github/FUNDING.yml new file mode 100644 index 00000000..39a1618f --- /dev/null +++ b/vendor/jsonschema/.github/FUNDING.yml @@ -0,0 +1,4 @@ +# These are supported funding model platforms + +github: "Julian" +tidelift: "pypi/jsonschema" diff --git a/vendor/jsonschema/.github/SECURITY.md b/vendor/jsonschema/.github/SECURITY.md new file mode 100644 index 00000000..fd524e94 --- /dev/null +++ b/vendor/jsonschema/.github/SECURITY.md @@ -0,0 +1,21 @@ +# Security Policy + +## Supported Versions + +In general, only the latest released ``jsonschema`` version is supported +and will receive updates. + +## Reporting a Vulnerability + +To report a security vulnerability, please send an email to +``Julian+Security@GrayVines.com`` with subject line ``SECURITY +(jsonschema)``. + +I will do my best to respond within 48 hours to acknowledge the message +and discuss further steps. + +If the vulnerability is accepted, an advisory will be sent out via +GitHub's security advisory functionality. + +For non-sensitive discussion related to this policy itself, feel free to +open an issue on the issue tracker. diff --git a/vendor/jsonschema/.github/workflows/ci.yml b/vendor/jsonschema/.github/workflows/ci.yml new file mode 100644 index 00000000..f12bf32d --- /dev/null +++ b/vendor/jsonschema/.github/workflows/ci.yml @@ -0,0 +1,191 @@ +name: CI + +on: + push: + pull_request: + release: + types: [published] + schedule: + # Daily at 3:21 + - cron: '21 3 * * *' + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + - uses: pre-commit/action@v3.0.0 + + ci: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + python-version: + - name: pypy-3.8 + toxenv: pypy3-noextra-build + - name: pypy-3.8 + toxenv: pypy3-noextra-tests + - name: pypy-3.8 + toxenv: pypy3-format-build + - name: pypy-3.8 + toxenv: pypy3-format-tests + - name: pypy-3.8 + toxenv: pypy3-formatnongpl-build + - name: pypy-3.8 + toxenv: pypy3-formatnongpl-tests + - name: 3.7 + toxenv: py37-noextra-build + - name: 3.7 + toxenv: py37-noextra-tests + - name: 3.7 + toxenv: py37-format-build + - name: 3.7 + toxenv: py37-format-tests + - name: 3.7 + toxenv: py37-formatnongpl-build + - name: 3.7 + toxenv: py37-formatnongpl-tests + - name: 3.8 + toxenv: py38-noextra-build + - name: 3.8 + toxenv: py38-noextra-tests + - name: 3.8 + toxenv: py38-format-build + - name: 3.8 + toxenv: py38-format-tests + - name: 3.8 + toxenv: py38-formatnongpl-build + - name: 3.8 + toxenv: py38-formatnongpl-tests + - name: 3.9 + toxenv: py39-noextra-build + - name: 3.9 + toxenv: py39-noextra-tests + - name: 3.9 + toxenv: py39-format-build + - name: 3.9 + toxenv: py39-format-tests + - name: 3.9 + toxenv: py39-formatnongpl-build + - name: 3.9 + toxenv: py39-formatnongpl-tests + - name: "3.10" + toxenv: py310-noextra-build + - name: "3.10" + toxenv: py310-noextra-tests + - name: "3.10" + toxenv: py310-format-build + - name: "3.10" + toxenv: py310-format-tests + - name: "3.10" + toxenv: py310-formatnongpl-build + - name: "3.10" + toxenv: py310-formatnongpl-tests + - name: "3.11.0-beta - 3.11.0" + toxenv: py311-noextra-build + - name: "3.11.0-beta - 3.11.0" + toxenv: py311-noextra-tests + - name: "3.11.0-beta - 3.11.0" + toxenv: py311-format-build + - name: "3.11.0-beta - 3.11.0" + toxenv: py311-format-tests + - name: "3.11.0-beta - 3.11.0" + toxenv: py311-formatnongpl-build + - name: "3.11.0-beta - 3.11.0" + toxenv: py311-formatnongpl-tests + - name: "3.10" + toxenv: docs-dirhtml + - name: "3.10" + toxenv: docs-doctest + - name: "3.10" + toxenv: docs-linkcheck + - name: "3.10" + toxenv: docs-spelling + - name: "3.10" + toxenv: docs-style + - name: "3.10" + toxenv: readme + - name: "3.10" + toxenv: safety + - name: "3.10" + toxenv: secrets + - name: "3.10" + toxenv: style + - name: "3.10" + toxenv: typing + exclude: + - os: windows-latest + python-version: + name: "3.10" + toxenv: readme + - os: windows-latest + python-version: + name: "3.10" + toxenv: docs-linkcheck + - os: windows-latest + python-version: + name: "3.10" + toxenv: docs-style + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version.name }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version.name }} + - name: Install dependencies + run: > + sudo apt-get update && + sudo apt-get install -y libenchant-dev libxml2-dev libxslt-dev + if: runner.os == 'Linux' && startsWith(matrix.python-version.toxenv, 'docs-') + - name: Install dependencies + run: brew install enchant + if: runner.os == 'macOS' && startsWith(matrix.python-version.toxenv, 'docs-') + - name: Install tox + run: python -m pip install tox + - name: Enable UTF-8 on Windows + run: echo "PYTHONUTF8=1" >> $env:GITHUB_ENV + if: runner.os == 'Windows' && startsWith(matrix.python-version.toxenv, 'py') + - name: Run tox + run: python -m tox -e "${{ matrix.python-version.toxenv }}" + + packaging: + needs: ci + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Install dependencies + run: python -m pip install build + - name: Create packages + run: python -m build . + - uses: actions/upload-artifact@v3 + with: + name: dist + path: dist + - name: Publish package + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') + uses: pypa/gh-action-pypi-publish@master + with: + user: __token__ + password: ${{ secrets.pypi_password }} + - name: Create Release Notes + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + await github.request(`POST /repos/${{ github.repository }}/releases`, { + tag_name: "${{ github.ref }}", + generate_release_notes: true + }); diff --git a/vendor/jsonschema/.github/workflows/coverage.yml b/vendor/jsonschema/.github/workflows/coverage.yml new file mode 100644 index 00000000..4af51b7d --- /dev/null +++ b/vendor/jsonschema/.github/workflows/coverage.yml @@ -0,0 +1,25 @@ +name: Coverage + +on: + push: + pull_request: + release: + types: [published] + +jobs: + coverage: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v3 + with: + python-version: "3.10" + - name: Install tox + run: python -m pip install tox + - name: Collect & Upload Coverage + # codecov.io is too flaky to fail for this right now + continue-on-error: true + run: python -m tox -e py310-format-codecov + env: + CODECOV_TOKEN: 2b38dae1-41c4-4435-a29d-79a1299e5617 diff --git a/vendor/jsonschema/.github/workflows/fuzz.yml b/vendor/jsonschema/.github/workflows/fuzz.yml new file mode 100644 index 00000000..dcc49a38 --- /dev/null +++ b/vendor/jsonschema/.github/workflows/fuzz.yml @@ -0,0 +1,30 @@ +name: CIFuzz + +on: + pull_request: + branches: + - main + +jobs: + Fuzzing: + runs-on: ubuntu-latest + steps: + - name: Build Fuzzers + id: build + uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master + with: + oss-fuzz-project-name: 'jsonschema' + language: python + continue-on-error: true + - name: Run Fuzzers + if: steps.build.outcome == 'success' + uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master + with: + oss-fuzz-project-name: 'jsonschema' + fuzz-seconds: 30 + - name: Upload Crash + uses: actions/upload-artifact@v3 + if: failure() && steps.build.outcome == 'success' + with: + name: artifacts + path: ./out/artifacts diff --git a/vendor/jsonschema/.gitignore b/vendor/jsonschema/.gitignore new file mode 100644 index 00000000..ec414962 --- /dev/null +++ b/vendor/jsonschema/.gitignore @@ -0,0 +1,157 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# User defined +_cache +_static +_templates diff --git a/vendor/jsonschema/.pre-commit-config.yaml b/vendor/jsonschema/.pre-commit-config.yaml new file mode 100644 index 00000000..5cec24c7 --- /dev/null +++ b/vendor/jsonschema/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +exclude: json/ + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: check-ast + - id: check-docstring-first + - id: check-json + - id: check-toml + - id: check-vcs-permalinks + - id: check-yaml + - id: debug-statements + exclude: '^jsonschema/tests/_suite.py$' + - id: end-of-file-fixer + - id: mixed-line-ending + args: [--fix, lf] + - id: trailing-whitespace + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort diff --git a/vendor/jsonschema/.pre-commit-hooks.yaml b/vendor/jsonschema/.pre-commit-hooks.yaml new file mode 100644 index 00000000..f8067154 --- /dev/null +++ b/vendor/jsonschema/.pre-commit-hooks.yaml @@ -0,0 +1,6 @@ +- id: jsonschema + name: jsonschema + description: json schema validation + language: python + pass_filenames: false + entry: jsonschema diff --git a/vendor/jsonschema/.readthedocs.yml b/vendor/jsonschema/.readthedocs.yml new file mode 100644 index 00000000..57501664 --- /dev/null +++ b/vendor/jsonschema/.readthedocs.yml @@ -0,0 +1,15 @@ +version: 2 + +sphinx: + builder: dirhtml + configuration: docs/conf.py + fail_on_warning: true + +formats: all + +python: + version: 3.8 + install: + - requirements: docs/requirements.txt + - method: pip + path: . diff --git a/vendor/jsonschema/CHANGELOG.rst b/vendor/jsonschema/CHANGELOG.rst new file mode 100644 index 00000000..61668ae4 --- /dev/null +++ b/vendor/jsonschema/CHANGELOG.rst @@ -0,0 +1,397 @@ +v4.10.3 +------- + +* ``jsonschema.validators.validator_for`` now properly uses the explicitly + provided default validator even if the ``$schema`` URI is not found. + +v4.10.2 +------- + +* Fix a second place where subclasses may have added attrs attributes (#982). + +v4.10.1 +------- + +* Fix Validator.evolve (and APIs like ``iter_errors`` which call it) for cases + where the validator class has been subclassed. Doing so wasn't intended to be + public API, but given it didn't warn or raise an error it's of course + understandable. The next release however will make it warn (and a future one + will make it error). If you need help migrating usage of inheriting from a + validator class feel free to open a discussion and I'll try to give some + guidance (#982). + +v4.10.0 +------- + +* Add support for referencing schemas with ``$ref`` across different versions + of the specification than the referrer's + +v4.9.1 +------ + +* Update some documentation examples to use newer validator releases in their + sample code. + +v4.9.0 +------ + +* Fix relative ``$ref`` resolution when the base URI is a URN or other scheme + (#544). +* ``pkgutil.resolve_name`` is now used to retrieve validators + provided on the command line. This function is only available on + 3.9+, so 3.7 and 3.8 (which are still supported) now rely on the + `pkgutil_resolve_name `_ + backport package. Note however that the CLI itself is due + to be deprecated shortly in favor of `check-jsonschema + `_. + +v4.8.0 +------ + +* ``best_match`` no longer traverses into ``anyOf`` and ``oneOf`` when all of + the errors within them seem equally applicable. This should lead to clearer + error messages in some cases where no branches were matched. + +v4.7.2 +------ + +* Also have ``best_match`` handle cases where the ``type`` validator is an + array. + +v4.7.1 +------ + +* Minor tweak of the PyPI hyperlink names + +v4.7.0 +------ + +* Enhance ``best_match`` to prefer errors from branches of the schema which + match the instance's type (#728) + +v4.6.2 +------ + +* Fix a number of minor typos in docstrings, mostly private ones (#969) + +v4.6.1 +------ + +* Gut the (incomplete) implementation of ``recursiveRef`` on draft 2019. It + needs completing, but for now can lead to recursion errors (e.g. #847). + +v4.6.0 +------ + +* Fix ``unevaluatedProperties`` and ``unevaluatedItems`` for types they should + ignore (#949) +* ``jsonschema`` now uses `hatch `_ for its build + process. This should be completely transparent to end-users (and only matters + to contributors). + +v4.5.1 +------ + +* Revert changes to ``$dynamicRef`` which caused a performance regression + in v4.5.0 + +v4.5.0 +------ + +* Validator classes for each version now maintain references to the correct + corresponding format checker (#905) +* Development has moved to a `GitHub organization + `_. + No functional behavior changes are expected from the change. + +v4.4.0 +------ + +* Add ``mypy`` support (#892) +* Add support for Python 3.11 + +v4.3.3 +------ + +* Properly report deprecation warnings at the right stack level (#899) + +v4.3.2 +------ + +* Additional performance improvements for resolving refs (#896) + +v4.3.1 +------ + +* Resolving refs has had performance improvements (#893) + +v4.3.0 +------ + +* Fix undesired fallback to brute force container uniqueness check on + certain input types (#893) +* Implement a PEP544 Protocol for validator classes (#890) + +v4.2.1 +------ + +* Pin ``importlib.resources`` from below (#877) + +v4.2.0 +------ + +* Use ``importlib.resources`` to load schemas (#873) +* Ensure all elements of arrays are verified for uniqueness by ``uniqueItems`` + (#866) + +v4.1.2 +------ + +* Fix ``dependentSchemas`` to properly consider non-object instances to be + valid (#850) + +v4.1.1 +------ + +* Fix ``prefixItems`` not indicating which item was invalid within the instance + path (#862) + +v4.1.0 +------ + +* Add Python 3.10 to the list of supported Python versions + +v4.0.1 +------ + +* Fix the declaration of minimum supported Python version (#846) + +v4.0.0 +------ + +* Partial support for Draft 2020-12 (as well as 2019-09). + Thanks to Thomas Schmidt and Harald Nezbeda. +* ``False`` and ``0`` are now properly considered non-equal even + recursively within a container (#686). As part of this change, + ``uniqueItems`` validation may be *slower* in some cases. Please feel + free to report any significant performance regressions, though in + some cases they may be difficult to address given the specification + requirement. +* The CLI has been improved, and in particular now supports a ``--output`` + option (with ``plain`` (default) or ``pretty`` arguments) to control the + output format. Future work may add additional machine-parsable output + formats. +* Code surrounding ``DEFAULT_TYPES`` and the legacy mechanism for + specifying types to validators have been removed, as per the deprecation + policy. Validators should use the ``TypeChecker`` object to customize + the set of Python types corresponding to JSON Schema types. +* Validation errors now have a ``json_path`` attribute, describing their + location in JSON path format +* Support for the IP address and domain name formats has been improved +* Support for Python 2 and 3.6 has been dropped, with ``python_requires`` + properly set. +* ``multipleOf`` could overflow when given sufficiently large numbers. Now, + when an overflow occurs, ``jsonschema`` will fall back to using fraction + division (#746). +* ``jsonschema.__version__``, ``jsonschema.validators.validators``, + ``jsonschema.validators.meta_schemas`` and + ``jsonschema.RefResolver.in_scope`` have been deprecated, as has + passing a second-argument schema to ``Validator.iter_errors`` and + ``Validator.is_valid``. + +v3.2.0 +------ + +* Added a ``format_nongpl`` setuptools extra, which installs only ``format`` + dependencies that are non-GPL (#619). + +v3.1.1 +------ + +* Temporarily revert the switch to ``js-regex`` until #611 and #612 are + resolved. + +v3.1.0 +------ + +* Regular expressions throughout schemas now respect the ECMA 262 dialect, as + recommended by the specification (#609). + +v3.0.2 +------ + +* Fixed a bug where ``0`` and ``False`` were considered equal by + ``const`` and ``enum`` (#575). + +v3.0.1 +------ + +* Fixed a bug where extending validators did not preserve their notion + of which validator property contains ``$id`` information. + +v3.0.0 +------ + +* Support for Draft 6 and Draft 7 +* Draft 7 is now the default +* New ``TypeChecker`` object for more complex type definitions (and overrides) +* Falling back to isodate for the date-time format checker is no longer + attempted, in accordance with the specification + +v2.6.0 +------ + +* Support for Python 2.6 has been dropped. +* Improve a few error messages for ``uniqueItems`` (#224) and + ``additionalProperties`` (#317) +* Fixed an issue with ``ErrorTree``'s handling of multiple errors (#288) + +v2.5.0 +------ + +* Improved performance on CPython by adding caching around ref resolution + (#203) + +v2.4.0 +------ + +* Added a CLI (#134) +* Added absolute path and absolute schema path to errors (#120) +* Added ``relevance`` +* Meta-schemas are now loaded via ``pkgutil`` + +v2.3.0 +------ + +* Added ``by_relevance`` and ``best_match`` (#91) +* Fixed ``format`` to allow adding formats for non-strings (#125) +* Fixed the ``uri`` format to reject URI references (#131) + +v2.2.0 +------ + +* Compile the host name regex (#127) +* Allow arbitrary objects to be types (#129) + +v2.1.0 +------ + +* Support RFC 3339 datetimes in conformance with the spec +* Fixed error paths for additionalItems + items (#122) +* Fixed wording for min / maxProperties (#117) + + +v2.0.0 +------ + +* Added ``create`` and ``extend`` to ``jsonschema.validators`` +* Removed ``ValidatorMixin`` +* Fixed array indices ref resolution (#95) +* Fixed unknown scheme defragmenting and handling (#102) + + +v1.3.0 +------ + +* Better error tracebacks (#83) +* Raise exceptions in ``ErrorTree``\s for keys not in the instance (#92) +* __cause__ (#93) + + +v1.2.0 +------ + +* More attributes for ValidationError (#86) +* Added ``ValidatorMixin.descend`` +* Fixed bad ``RefResolutionError`` message (#82) + + +v1.1.0 +------ + +* Canonicalize URIs (#70) +* Allow attaching exceptions to ``format`` errors (#77) + + +v1.0.0 +------ + +* Support for Draft 4 +* Support for format +* Longs are ints too! +* Fixed a number of issues with ``$ref`` support (#66) +* Draft4Validator is now the default +* ``ValidationError.path`` is now in sequential order +* Added ``ValidatorMixin`` + + +v0.8.0 +------ + +* Full support for JSON References +* ``validates`` for registering new validators +* Documentation +* Bugfixes + + * uniqueItems not so unique (#34) + * Improper any (#47) + + +v0.7 +---- + +* Partial support for (JSON Pointer) ``$ref`` +* Deprecations + + * ``Validator`` is replaced by ``Draft3Validator`` with a slightly different + interface + * ``validator(meta_validate=False)`` + + +v0.6 +---- + +* Bugfixes + + * Issue #30 - Wrong behavior for the dependencies property validation + * Fixed a miswritten test + + +v0.5 +---- + +* Bugfixes + + * Issue #17 - require path for error objects + * Issue #18 - multiple type validation for non-objects + + +v0.4 +---- + +* Preliminary support for programmatic access to error details (Issue #5). + There are certainly some corner cases that don't do the right thing yet, but + this works mostly. + + In order to make this happen (and also to clean things up a bit), a number + of deprecations are necessary: + + * ``stop_on_error`` is deprecated in ``Validator.__init__``. Use + ``Validator.iter_errors()`` instead. + * ``number_types`` and ``string_types`` are deprecated there as well. + Use ``types={"number" : ..., "string" : ...}`` instead. + * ``meta_validate`` is also deprecated, and instead is now accepted as + an argument to ``validate``, ``iter_errors`` and ``is_valid``. + +* A bugfix or two + + +v0.3 +---- + +* Default for unknown types and properties is now to *not* error (consistent + with the schema). +* Python 3 support +* Removed dependency on SecureTypes now that the hash bug has been resolved. +* "Numerous bug fixes" -- most notably, a divisibleBy error for floats and a + bunch of missing typechecks for irrelevant properties. diff --git a/vendor/jsonschema/CONTRIBUTING.rst b/vendor/jsonschema/CONTRIBUTING.rst new file mode 100644 index 00000000..c051d569 --- /dev/null +++ b/vendor/jsonschema/CONTRIBUTING.rst @@ -0,0 +1,60 @@ +============================ +Contributing to `jsonschema` +============================ + +Found a bug? +------------ + +If you suspect you may have found a security-related vulnerability, please follow the instructions in `the security policy `_. + +Otherwise, it is extremely helpful if you first search to see whether your bug has been `previously reported on the Issues tab `_. + +If it doesn't appear to be a known issue, please `file a new one `_, and include a **title and clear description**, along with as much relevant information as possible. +Including a *minimal*, *self-sufficient* bit of code (often an instance and schema) is the fastest way to get attention, along with a description of the behavior you expect, and if you're able, a link to where in the specification contains the behavior you're noticing is incorrect. + +Pull requests to fix your issue are of course very welcome. + + +Fixing a Bug? +------------- + +Please open a new GitHub pull request with the change, along with new tests. + +Ensure the PR description clearly describes the problem and solution. Include the relevant issue number if applicable. + +Continuous integration via GitHub actions should run to indicate whether your change passes both the test suite as well as linters. +Please ensure it passes, or indicate in a comment if you believe it fails spuriously. + + +Adding New Functionality? +------------------------- + +Please discuss any larger changes ahead of time for the sake of your own time! + +Improvements are very welcome, but large pull requests, disruptive ones, or backwards incompatible ones, can lead to long back and forth discussions. + +You're welcome to suggest a change in an issue and thereby get some initial feedback before embarking on an effort that may not get merged. + + +Improving the Documentation? +---------------------------- + +Writing good documentation is challenging both to prioritize and to do well. + +Any help you may have would be great, especially if you're a beginner who's struggled to understand a part of the library. + +Documentation is written in `Sphinx-flavored reStructuredText `_, so you'll want to familiarize yourself a bit with Sphinx. + +Feel free to file issues or pull requests. + + +Have a Question? +---------------- + +Please do not use the issue tracker for questions, it's reserved for things believed to be bugs, or new functionality. + +There is a `discussions tab `_ where general questions can be asked. + +Answers on it are best-effort. + +Any help you can offer to answer others' questions is of course very welcome as well. diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/COPYING b/vendor/jsonschema/COPYING similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/jsonschema/COPYING rename to vendor/jsonschema/COPYING diff --git a/vendor/jsonschema/README.rst b/vendor/jsonschema/README.rst new file mode 100644 index 00000000..0003c502 --- /dev/null +++ b/vendor/jsonschema/README.rst @@ -0,0 +1,148 @@ +========== +jsonschema +========== + +|PyPI| |Pythons| |CI| |ReadTheDocs| |Precommit| |Zenodo| + +.. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema.svg + :alt: PyPI version + :target: https://pypi.org/project/jsonschema/ + +.. |Pythons| image:: https://img.shields.io/pypi/pyversions/jsonschema.svg + :alt: Supported Python versions + :target: https://pypi.org/project/jsonschema/ + +.. |CI| image:: https://github.com/python-jsonschema/jsonschema/workflows/CI/badge.svg + :alt: Build status + :target: https://github.com/python-jsonschema/jsonschema/actions?query=workflow%3ACI + +.. |ReadTheDocs| image:: https://readthedocs.org/projects/python-jsonschema/badge/?version=stable&style=flat + :alt: ReadTheDocs status + :target: https://python-jsonschema.readthedocs.io/en/stable/ + +.. |Precommit| image:: https://results.pre-commit.ci/badge/github/python-jsonschema/jsonschema/main.svg + :alt: pre-commit.ci status + :target: https://results.pre-commit.ci/latest/github/python-jsonschema/jsonschema/main + +.. |Zenodo| image:: https://zenodo.org/badge/3072629.svg + :target: https://zenodo.org/badge/latestdoi/3072629 + + +``jsonschema`` is an implementation of the `JSON Schema +`_ specification for Python. + +.. code-block:: python + + >>> from jsonschema import validate + + >>> # A sample schema, like what we'd get from json.load() + >>> schema = { + ... "type" : "object", + ... "properties" : { + ... "price" : {"type" : "number"}, + ... "name" : {"type" : "string"}, + ... }, + ... } + + >>> # If no exception is raised by validate(), the instance is valid. + >>> validate(instance={"name" : "Eggs", "price" : 34.99}, schema=schema) + + >>> validate( + ... instance={"name" : "Eggs", "price" : "Invalid"}, schema=schema, + ... ) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValidationError: 'Invalid' is not of type 'number' + +It can also be used from console: + +.. code-block:: bash + + $ jsonschema --instance sample.json sample.schema + +Features +-------- + +* Partial support for + `Draft 2020-12 `_ and + `Draft 2019-09 `_, + except for ``dynamicRef`` / ``recursiveRef`` and ``$vocabulary`` (in-progress). + Full support for + `Draft 7 `_, + `Draft 6 `_, + `Draft 4 `_ + and + `Draft 3 `_ + +* `Lazy validation `_ + that can iteratively report *all* validation errors. + +* `Programmatic querying `_ + of which properties or items failed validation. + + +Installation +------------ + +``jsonschema`` is available on `PyPI `_. You can install using `pip `_: + +.. code-block:: bash + + $ pip install jsonschema + + +Running the Test Suite +---------------------- + +If you have ``tox`` installed (perhaps via ``pip install tox`` or your +package manager), running ``tox`` in the directory of your source +checkout will run ``jsonschema``'s test suite on all of the versions +of Python ``jsonschema`` supports. If you don't have all of the +versions that ``jsonschema`` is tested under, you'll likely want to run +using ``tox``'s ``--skip-missing-interpreters`` option. + +Of course you're also free to just run the tests on a single version with your +favorite test runner. The tests live in the ``jsonschema.tests`` package. + + +Benchmarks +---------- + +``jsonschema``'s benchmarks make use of `pyperf +`_. Running them can be done via:: + + $ tox -e perf + + +Community +--------- + +The JSON Schema specification has `a Slack +`_, with an `invite link on its home page +`_. Many folks knowledgeable on authoring +schemas can be found there. + +Otherwise, asking questions on Stack Overflow is another means of +getting help if you're stuck. + + +About +----- + +I'm Julian Berman. + +``jsonschema`` is on `GitHub `_. + +Get in touch, via GitHub or otherwise, if you've got something to contribute, +it'd be most welcome! + +You can also generally find me on Libera (nick: ``Julian``) in various +channels, including ``#python``. + +If you feel overwhelmingly grateful, you can also `sponsor me +`_. + +And for companies who appreciate ``jsonschema`` and its continued support +and growth, ``jsonschema`` is also now supportable via `TideLift +`_. diff --git a/vendor/jsonschema/codecov.yml b/vendor/jsonschema/codecov.yml new file mode 100644 index 00000000..640bd899 --- /dev/null +++ b/vendor/jsonschema/codecov.yml @@ -0,0 +1,5 @@ +coverage: + status: + patch: + default: + target: 100% diff --git a/vendor/jsonschema/docs/Makefile b/vendor/jsonschema/docs/Makefile new file mode 100644 index 00000000..f6315dfa --- /dev/null +++ b/vendor/jsonschema/docs/Makefile @@ -0,0 +1,227 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +PYTHON = python +PAPER = +BUILDDIR = _build +SOURCEDIR = $(dir $(abspath $(lastword $(MAKEFILE_LIST)))) + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCEDIR) +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " epub3 to make an epub3" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation" + @echo " coverage to run coverage check of the documentation (if enabled)" + @echo " spelling to run a spell check of the documentation" + @echo " dummy to check syntax errors of document sources" + +.PHONY: clean +clean: + rm -rf $(BUILDDIR)/* + +.PHONY: html +html: + $(PYTHON) -m sphinx -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: dirhtml +dirhtml: + $(PYTHON) -m sphinx -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +.PHONY: singlehtml +singlehtml: + $(PYTHON) -m sphinx -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +.PHONY: json +json: + $(PYTHON) -m sphinx -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +.PHONY: htmlhelp +htmlhelp: + $(PYTHON) -m sphinx -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +.PHONY: qthelp +qthelp: + $(PYTHON) -m sphinx -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/jsonschema.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/jsonschema.qhc" + +.PHONY: applehelp +applehelp: + $(PYTHON) -m sphinx -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +.PHONY: devhelp +devhelp: + $(PYTHON) -m sphinx -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/jsonschema" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/jsonschema" + @echo "# devhelp" + +.PHONY: epub +epub: + $(PYTHON) -m sphinx -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +.PHONY: epub3 +epub3: + $(PYTHON) -m sphinx -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 + @echo + @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." + +.PHONY: latex +latex: + $(PYTHON) -m sphinx -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +.PHONY: latexpdf +latexpdf: + $(PYTHON) -m sphinx -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: latexpdfja +latexpdfja: + $(PYTHON) -m sphinx -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: text +text: + $(PYTHON) -m sphinx -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +.PHONY: man +man: + $(PYTHON) -m sphinx -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +.PHONY: texinfo +texinfo: + $(PYTHON) -m sphinx -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +.PHONY: info +info: + $(PYTHON) -m sphinx -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +.PHONY: gettext +gettext: + $(PYTHON) -m sphinx -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +.PHONY: changes +changes: + $(PYTHON) -m sphinx -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +.PHONY: linkcheck +linkcheck: + $(PYTHON) -m sphinx -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +.PHONY: doctest +doctest: + $(PYTHON) -m sphinx -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +.PHONY: coverage +coverage: + $(PYTHON) -m sphinx -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +.PHONY: xml +xml: + $(PYTHON) -m sphinx -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +.PHONY: pseudoxml +pseudoxml: + $(PYTHON) -m sphinx -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." + +.PHONY: spelling +spelling: + $(PYTHON) -m sphinx -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling + @echo + @echo "Build finished. The spelling files are in $(BUILDDIR)/spelling." + +.PHONY: dummy +dummy: + $(PYTHON) -m sphinx -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy + @echo + @echo "Build finished. Dummy builder generates no files." diff --git a/vendor/jsonschema/docs/conf.py b/vendor/jsonschema/docs/conf.py new file mode 100644 index 00000000..d4199181 --- /dev/null +++ b/vendor/jsonschema/docs/conf.py @@ -0,0 +1,254 @@ +from importlib import metadata +import os +import re +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +ext_paths = [os.path.abspath(os.path.pardir), os.path.dirname(__file__)] +sys.path = ext_paths + sys.path + +# -- General configuration ------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = "1.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named "sphinx.ext.*") or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosectionlabel", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", + "sphinx_autodoc_typehints", + "sphinx_json_schema_spec", + "sphinxcontrib.spelling", +] + +cache_path = "_cache" + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix of source filenames. +source_suffix = ".rst" + +# The encoding of source files. +# source_encoding = "utf-8-sig" + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = "jsonschema" +author = "Julian Berman" +copyright = "2013, " + author + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# version: The short X.Y version +# release: The full version, including alpha/beta/rc tags. +release = metadata.version("jsonschema") +version = release.partition("-")[0] + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = "" +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = "%B %d, %Y" + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build", "_cache", "_static", "_templates"] + +# The reST default role (used for this markup: `text`) to use for all documents +default_role = "any" + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +doctest_global_setup = """ +from jsonschema import * +""" + +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), +} + + +# -- Options for HTML output ----------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "furo" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ["_static"] + +# If not "", a "Last updated on:" timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = "%b %d, %Y" + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = "" + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = "jsonschemadoc" + + +# -- Options for LaTeX output ---------------------------------------------- + +# Grouping the document tree into LaTeX files. List of tuples (source +# start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ("index", "jsonschema.tex", "jsonschema Documentation", author, "manual"), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output ---------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [("index", "jsonschema", "jsonschema Documentation", [author], 1)] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output -------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + "index", + "jsonschema", + "jsonschema Documentation", + author, + "jsonschema", + "One line description of project.", + "Miscellaneous", + ), +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: "footnote", "no", or "inline". +# texinfo_show_urls = "footnote" + +# -- Options for the linkcheck builder ------------------------------------ + + +def entire_domain(host): + return r"http.?://" + re.escape(host) + r"($|/.*)" + + +linkcheck_ignore = [ + entire_domain("img.shields.io"), + "https://github.com/python-jsonschema/jsonschema/actions", + "https://github.com/python-jsonschema/jsonschema/workflows/CI/badge.svg", +] + +# -- Options for sphinxcontrib-autosectionlabel --------------------------- + +autosectionlabel_prefix_document = True + +# -- Options for sphinxcontrib-spelling ----------------------------------- + +spelling_word_list_filename = "spelling-wordlist.txt" diff --git a/vendor/jsonschema/docs/creating.rst b/vendor/jsonschema/docs/creating.rst new file mode 100644 index 00000000..810293f9 --- /dev/null +++ b/vendor/jsonschema/docs/creating.rst @@ -0,0 +1,34 @@ +.. currentmodule:: jsonschema.validators + +.. _creating-validators: + +======================================= +Creating or Extending Validator Classes +======================================= + +.. autofunction:: create + +.. autofunction:: extend + +.. autofunction:: validator_for + +.. autofunction:: validates + + +Creating Validation Errors +-------------------------- + +Any validating function that validates against a subschema should call +``descend``, rather than ``iter_errors``. If it recurses into the +instance, or schema, it should pass one or both of the ``path`` or +``schema_path`` arguments to ``descend`` in order to properly maintain +where in the instance or schema respectively the error occurred. + +The Validator Protocol +---------------------- + +``jsonschema`` defines a `protocol `, +`jsonschema.protocols.Validator` which can be used in type annotations to +describe the type of a validator object. + +For full details, see `validator-protocol`. diff --git a/vendor/jsonschema/docs/errors.rst b/vendor/jsonschema/docs/errors.rst new file mode 100644 index 00000000..f3d3fd0a --- /dev/null +++ b/vendor/jsonschema/docs/errors.rst @@ -0,0 +1,405 @@ +========================== +Handling Validation Errors +========================== + +.. currentmodule:: jsonschema.exceptions + +When an invalid instance is encountered, a `ValidationError` will be +raised or returned, depending on which method or function is used. + +.. autoexception:: ValidationError + + The information carried by an error roughly breaks down into: + + =============== ================= ======================== + What Happened Why Did It Happen What Was Being Validated + =============== ================= ======================== + `message` `context` `instance` + + `cause` `json_path` + + `path` + + `schema` + + `schema_path` + + `validator` + + `validator_value` + =============== ================= ======================== + + + .. attribute:: message + + A human readable message explaining the error. + + .. attribute:: validator + + The name of the failed `keyword + `_. + + .. attribute:: validator_value + + The associated value for the failed keyword in the schema. + + .. attribute:: schema + + The full schema that this error came from. This is potentially a + subschema from within the schema that was passed in originally, + or even an entirely different schema if a :kw:`$ref` was + followed. + + .. attribute:: relative_schema_path + + A `collections.deque` containing the path to the failed keyword + within the schema. + + .. attribute:: absolute_schema_path + + A `collections.deque` containing the path to the failed + keyword within the schema, but always relative to the + *original* schema as opposed to any subschema (i.e. the one + originally passed into a validator class, *not* `schema`\). + + .. attribute:: schema_path + + Same as `relative_schema_path`. + + .. attribute:: relative_path + + A `collections.deque` containing the path to the + offending element within the instance. The deque can be empty if + the error happened at the root of the instance. + + .. attribute:: absolute_path + + A `collections.deque` containing the path to the + offending element within the instance. The absolute path + is always relative to the *original* instance that was + validated (i.e. the one passed into a validation method, *not* + `instance`\). The deque can be empty if the error happened + at the root of the instance. + + .. attribute:: json_path + + A `JSON path `_ + to the offending element within the instance. + + .. attribute:: path + + Same as `relative_path`. + + .. attribute:: instance + + The instance that was being validated. This will differ from + the instance originally passed into ``validate`` if the + validator object was in the process of validating a (possibly + nested) element within the top-level instance. The path within + the top-level instance (i.e. `ValidationError.path`) could + be used to find this object, but it is provided for convenience. + + .. attribute:: context + + If the error was caused by errors in subschemas, the list of errors + from the subschemas will be available on this property. The + `schema_path` and `path` of these errors will be relative + to the parent error. + + .. attribute:: cause + + If the error was caused by a *non*-validation error, the + exception object will be here. Currently this is only used + for the exception raised by a failed format checker in + `jsonschema.FormatChecker.check`. + + .. attribute:: parent + + A validation error which this error is the `context` of. + ``None`` if there wasn't one. + + +In case an invalid schema itself is encountered, a `SchemaError` is +raised. + +.. autoexception:: SchemaError + + The same attributes are present as for `ValidationError`\s. + + +These attributes can be clarified with a short example: + +.. testcode:: + + schema = { + "items": { + "anyOf": [ + {"type": "string", "maxLength": 2}, + {"type": "integer", "minimum": 5} + ] + } + } + instance = [{}, 3, "foo"] + v = Draft202012Validator(schema) + errors = sorted(v.iter_errors(instance), key=lambda e: e.path) + +The error messages in this situation are not very helpful on their own. + +.. testcode:: + + for error in errors: + print(error.message) + +outputs: + +.. testoutput:: + + {} is not valid under any of the given schemas + 3 is not valid under any of the given schemas + 'foo' is not valid under any of the given schemas + +If we look at `ValidationError.path` on each of the errors, we can find +out which elements in the instance correspond to each of the errors. In +this example, `ValidationError.path` will have only one element, which +will be the index in our list. + +.. testcode:: + + for error in errors: + print(list(error.path)) + +.. testoutput:: + + [0] + [1] + [2] + +Since our schema contained nested subschemas, it can be helpful to look at +the specific part of the instance and subschema that caused each of the errors. +This can be seen with the `ValidationError.instance` and +`ValidationError.schema` attributes. + +With keywords like :kw:`anyOf`, the `ValidationError.context` +attribute can be used to see the sub-errors which caused the failure. Since +these errors actually came from two separate subschemas, it can be helpful to +look at the `ValidationError.schema_path` attribute as well to see where +exactly in the schema each of these errors come from. In the case of sub-errors +from the `ValidationError.context` attribute, this path will be relative +to the `ValidationError.schema_path` of the parent error. + +.. testcode:: + + for error in errors: + for suberror in sorted(error.context, key=lambda e: e.schema_path): + print(list(suberror.schema_path), suberror.message, sep=", ") + +.. testoutput:: + + [0, 'type'], {} is not of type 'string' + [1, 'type'], {} is not of type 'integer' + [0, 'type'], 3 is not of type 'string' + [1, 'minimum'], 3 is less than the minimum of 5 + [0, 'maxLength'], 'foo' is too long + [1, 'type'], 'foo' is not of type 'integer' + +The string representation of an error combines some of these attributes for +easier debugging. + +.. testcode:: + + print(errors[1]) + +.. testoutput:: + + 3 is not valid under any of the given schemas + + Failed validating 'anyOf' in schema['items']: + {'anyOf': [{'maxLength': 2, 'type': 'string'}, + {'minimum': 5, 'type': 'integer'}]} + + On instance[1]: + 3 + + +ErrorTrees +---------- + +If you want to programmatically query which validation keywords +failed when validating a given instance, you may want to do so using +`jsonschema.exceptions.ErrorTree` objects. + +.. autoclass:: jsonschema.exceptions.ErrorTree + :members: + :special-members: + :exclude-members: __dict__,__weakref__ + + .. attribute:: errors + + The mapping of validator keywords to the error objects (usually + `jsonschema.exceptions.ValidationError`\s) at this level + of the tree. + +Consider the following example: + +.. testcode:: + + schema = { + "type" : "array", + "items" : {"type" : "number", "enum" : [1, 2, 3]}, + "minItems" : 3, + } + instance = ["spam", 2] + +For clarity's sake, the given instance has three errors under this schema: + +.. testcode:: + + v = Draft202012Validator(schema) + for error in sorted(v.iter_errors(["spam", 2]), key=str): + print(error.message) + +.. testoutput:: + + 'spam' is not of type 'number' + 'spam' is not one of [1, 2, 3] + ['spam', 2] is too short + +Let's construct an `jsonschema.exceptions.ErrorTree` so that we +can query the errors a bit more easily than by just iterating over the +error objects. + +.. testcode:: + + tree = ErrorTree(v.iter_errors(instance)) + +As you can see, `jsonschema.exceptions.ErrorTree` takes an +iterable of `ValidationError`\s when constructing a tree so +you can directly pass it the return value of a validator object's +`jsonschema.protocols.Validator.iter_errors` method. + +`ErrorTree`\s support a number of useful operations. The first one we +might want to perform is to check whether a given element in our instance +failed validation. We do so using the :keyword:`in` operator: + +.. doctest:: + + >>> 0 in tree + True + + >>> 1 in tree + False + +The interpretation here is that the 0th index into the instance (``"spam"``) +did have an error (in fact it had 2), while the 1th index (``2``) did not (i.e. +it was valid). + +If we want to see which errors a child had, we index into the tree and look at +the `ErrorTree.errors` attribute. + +.. doctest:: + + >>> sorted(tree[0].errors) + ['enum', 'type'] + +Here we see that the :kw:`enum` and :kw:`type` keywords failed for +index ``0``. In fact `ErrorTree.errors` is a dict, whose values are the +`ValidationError`\s, so we can get at those directly if we want them. + +.. doctest:: + + >>> print(tree[0].errors["type"].message) + 'spam' is not of type 'number' + +Of course this means that if we want to know if a given validation +keyword failed for a given index, we check for its presence in +`ErrorTree.errors`: + +.. doctest:: + + >>> "enum" in tree[0].errors + True + + >>> "minimum" in tree[0].errors + False + +Finally, if you were paying close enough attention, you'll notice that +we haven't seen our :kw:`minItems` error appear anywhere yet. This is +because :kw:`minItems` is an error that applies globally to the instance +itself. So it appears in the root node of the tree. + +.. doctest:: + + >>> "minItems" in tree.errors + True + +That's all you need to know to use error trees. + +To summarize, each tree contains child trees that can be accessed by +indexing the tree to get the corresponding child tree for a given +index into the instance. Each tree and child has a `ErrorTree.errors` +attribute, a dict, that maps the failed validation keyword to the +corresponding validation error. + + +best_match and relevance +------------------------ + +The `best_match` function is a simple but useful function for attempting +to guess the most relevant error in a given bunch. + +.. doctest:: + + >>> from jsonschema import Draft202012Validator + >>> from jsonschema.exceptions import best_match + + >>> schema = { + ... "type": "array", + ... "minItems": 3, + ... } + >>> print(best_match(Draft202012Validator(schema).iter_errors(11)).message) + 11 is not of type 'array' + + +.. autofunction:: best_match + + +.. function:: relevance(validation_error) + + A key function that sorts errors based on heuristic relevance. + + If you want to sort a bunch of errors entirely, you can use + this function to do so. Using this function as a key to e.g. + `sorted` or `max` will cause more relevant errors to be + considered greater than less relevant ones. + + Within the different validation keywords that can fail, this + function considers :kw:`anyOf` and :kw:`oneOf` to be *weak* + validation errors, and will sort them lower than other errors at the + same level in the instance. + + If you want to change the set of weak [or strong] validation + keywords you can create a custom version of this function with + `by_relevance` and provide a different set of each. + +.. doctest:: + + >>> schema = { + ... "properties": { + ... "name": {"type": "string"}, + ... "phones": { + ... "properties": { + ... "home": {"type": "string"} + ... }, + ... }, + ... }, + ... } + >>> instance = {"name": 123, "phones": {"home": [123]}} + >>> errors = Draft202012Validator(schema).iter_errors(instance) + >>> [ + ... e.path[-1] + ... for e in sorted(errors, key=exceptions.relevance) + ... ] + ['home', 'name'] + + +.. autofunction:: by_relevance diff --git a/vendor/jsonschema/docs/faq.rst b/vendor/jsonschema/docs/faq.rst new file mode 100644 index 00000000..629f9c50 --- /dev/null +++ b/vendor/jsonschema/docs/faq.rst @@ -0,0 +1,291 @@ +========================== +Frequently Asked Questions +========================== + + +My schema specifies format validation. Why do invalid instances seem valid? +--------------------------------------------------------------------------- + +The :kw:`format` keyword can be a bit of a stumbling block for new +users working with JSON Schema. + +In a schema such as: + +.. code-block:: json + + {"type": "string", "format": "date"} + +JSON Schema specifications have historically differentiated between the +:kw:`format` keyword and other keywords. In particular, the +:kw:`format` keyword was specified to be *informational* as much +as it may be used for validation. + +In other words, for many use cases, schema authors may wish to use +values for the :kw:`format` keyword but have no expectation +they be validated alongside other required assertions in a schema. + +Of course this does not represent all or even most use cases -- many +schema authors *do* wish to assert that instances conform fully, even to +the specific format mentioned. + +In drafts prior to ``draft2019-09``, the decision on whether to +automatically enable :kw:`format` validation was left up to +validation implementations such as this one. + +This library made the choice to leave it off by default, for two reasons: + + * for forward compatibility and implementation complexity reasons + -- if :kw:`format` validation were on by default, and a + future draft of JSON Schema introduced a hard-to-implement format, + either the implementation of that format would block releases of + this library until it were implemented, or the behavior surrounding + :kw:`format` would need to be even more complex than simply + defaulting to be on. It therefore was safer to start with it off, + and defend against the expectation that a given format would always + automatically work. + + * given that a common use of JSON Schema is for portability across + languages (and therefore implementations of JSON Schema), so that + users be aware of this point itself regarding :kw:`format` + validation, and therefore remember to check any *other* + implementations they were using to ensure they too were explicitly + enabled for :kw:`format` validation. + +As of ``draft2019-09`` however, the opt-out by default behavior +mentioned here is now *required* for all validators. + +Difficult as this may sound for new users, at this point it at least +means they should expect the same behavior that has always been +implemented here, across any other implementation they encounter. + +.. seealso:: + + `Draft 2019-09's release notes on format `_ + + for upstream details on the behavior of format and how it has changed + in ``draft2019-09`` + + `validating formats` + + for details on how to enable format validation + + `jsonschema.FormatChecker` + + the object which implements format validation + + +How do I configure a base URI for ``$ref`` resolution using local files? +------------------------------------------------------------------------ + +`jsonschema` supports loading schemas from the filesystem. + +The most common mistake when configuring a :class:`~jsonschema.RefResolver` +to retrieve schemas from the local filesystem is to give it a base URI +which points to a directory, but forget to add a trailing slash. + +For example, given a directory ``/tmp/foo/`` with ``bar/schema.json`` +within it, you should use something like: + +.. code-block:: python + + from pathlib import Path + + import jsonschema.validators + + path = Path("/tmp/foo") + resolver = jsonschema.validators.RefResolver( + base_uri=f"{path.as_uri()}/", + referrer=True, + ) + jsonschema.validate( + instance={}, + schema={"$ref": "bar/schema.json"}, + resolver=resolver, + ) + +where note: + + * the base URI has a trailing slash, even though + `pathlib.PurePath.as_uri` does not add it! + * any relative refs are now given relative to the provided directory + +If you forget the trailing slash, you'll find references are resolved a +directory too high. + +You're likely familiar with this behavior from your browser. If you +visit a page at ``https://example.com/foo``, then links on it like +```` take you to ``https://example.com/bar``, not +``https://example.com/foo/bar``. For this reason many sites will +redirect ``https://example.com/foo`` to ``https://example.com/foo/``, +i.e. add the trailing slash, so that relative links on the page will keep the +last path component. + +There are, in summary, 2 ways to do this properly: + +* Remember to include a trailing slash, so your base URI is + ``file:///foo/bar/`` rather than ``file:///foo/bar``, as shown above +* Use a file within the directory as your base URI rather than the + directory itself, i.e. ``file://foo/bar/baz.json``, which will of course + cause ``baz.json`` to be removed while resolving relative URIs + +Why doesn't my schema's default property set the default on my instance? +------------------------------------------------------------------------ + +The basic answer is that the specification does not require that +:kw:`default` actually do anything. + +For an inkling as to *why* it doesn't actually do anything, consider +that none of the other keywords modify the instance either. More +importantly, having :kw:`default` modify the instance can produce +quite peculiar things. It's perfectly valid (and perhaps even useful) +to have a default that is not valid under the schema it lives in! So an +instance modified by the default would pass validation the first time, +but fail the second! + +Still, filling in defaults is a thing that is useful. `jsonschema` +allows you to `define your own validator classes and callables +`, so you can easily create an `jsonschema.protocols.Validator` +that does do default setting. Here's some code to get you started. (In +this code, we add the default properties to each object *before* the +properties are validated, so the default values themselves will need to +be valid under the schema.) + + .. testcode:: + + from jsonschema import Draft202012Validator, validators + + + def extend_with_default(validator_class): + validate_properties = validator_class.VALIDATORS["properties"] + + def set_defaults(validator, properties, instance, schema): + for property, subschema in properties.items(): + if "default" in subschema: + instance.setdefault(property, subschema["default"]) + + for error in validate_properties( + validator, properties, instance, schema, + ): + yield error + + return validators.extend( + validator_class, {"properties" : set_defaults}, + ) + + + DefaultValidatingValidator = extend_with_default(Draft202012Validator) + + + # Example usage: + obj = {} + schema = {'properties': {'foo': {'default': 'bar'}}} + # Note jsonschema.validate(obj, schema, cls=DefaultValidatingValidator) + # will not work because the metaschema contains `default` keywords. + DefaultValidatingValidator(schema).validate(obj) + assert obj == {'foo': 'bar'} + + +See the above-linked document for more info on how this works, +but basically, it just extends the :kw:`properties` keyword on a +`jsonschema.Draft202012Validator` to then go ahead and update all the +defaults. + +.. note:: + + If you're interested in a more interesting solution to a larger + class of these types of transformations, keep an eye on `Seep + `_, which is an experimental + data transformation and extraction library written on top of + `jsonschema`. + + +.. hint:: + + The above code can provide default values for an entire object and + all of its properties, but only if your schema provides a default + value for the object itself, like so: + + .. testcode:: + + schema = { + "type": "object", + "properties": { + "outer-object": { + "type": "object", + "properties" : { + "inner-object": { + "type": "string", + "default": "INNER-DEFAULT" + } + }, + "default": {} # <-- MUST PROVIDE DEFAULT OBJECT + } + } + } + + obj = {} + DefaultValidatingValidator(schema).validate(obj) + assert obj == {'outer-object': {'inner-object': 'INNER-DEFAULT'}} + + ...but if you don't provide a default value for your object, then + it won't be instantiated at all, much less populated with default + properties. + + .. testcode:: + + del schema["properties"]["outer-object"]["default"] + obj2 = {} + DefaultValidatingValidator(schema).validate(obj2) + assert obj2 == {} # whoops + + +How do jsonschema version numbers work? +--------------------------------------- + +``jsonschema`` tries to follow the `Semantic Versioning +`_ specification. + +This means broadly that no backwards-incompatible changes should be made +in minor releases (and certainly not in dot releases). + +The full picture requires defining what constitutes a +backwards-incompatible change. + +The following are simple examples of things considered public API, +and therefore should *not* be changed without bumping a major version +number: + + * module names and contents, when not marked private by Python + convention (a single leading underscore) + + * function and object signature (parameter order and name) + +The following are *not* considered public API and may change without +notice: + + * the exact wording and contents of error messages; typical reasons + to rely on this seem to involve downstream tests in packages using + `jsonschema`. These use cases are encouraged to use the extensive + introspection provided in `jsonschema.exceptions.ValidationError`\s + instead to make meaningful assertions about what failed rather than + relying on *how* what failed is explained to a human. + + * the order in which validation errors are returned or raised + + * the contents of the ``jsonschema.tests`` package + + * the contents of the ``jsonschema.benchmarks`` package + + * the specific non-zero error codes presented by the command line + interface + + * the exact representation of errors presented by the command line + interface, other than that errors represented by the plain outputter + will be reported one per line + + * anything marked private + +With the exception of the last two of those, flippant changes are +avoided, but changes can and will be made if there is improvement to be +had. Feel free to open an issue ticket if there is a specific issue or +question worth raising. diff --git a/vendor/jsonschema/docs/index.rst b/vendor/jsonschema/docs/index.rst new file mode 100644 index 00000000..e4d72527 --- /dev/null +++ b/vendor/jsonschema/docs/index.rst @@ -0,0 +1,22 @@ +.. module:: jsonschema +.. include:: ../README.rst + + +Contents +-------- + +.. toctree:: + :maxdepth: 2 + + validate + errors + references + creating + faq + + +Indices and tables +================== + +* `genindex` +* `search` diff --git a/vendor/jsonschema/docs/make.bat b/vendor/jsonschema/docs/make.bat new file mode 100644 index 00000000..fcb914ff --- /dev/null +++ b/vendor/jsonschema/docs/make.bat @@ -0,0 +1,190 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\jsonschema.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\jsonschema.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +:end diff --git a/vendor/jsonschema/docs/references.rst b/vendor/jsonschema/docs/references.rst new file mode 100644 index 00000000..9f242994 --- /dev/null +++ b/vendor/jsonschema/docs/references.rst @@ -0,0 +1,13 @@ +========================= +Resolving JSON References +========================= + + +.. currentmodule:: jsonschema + +.. autoclass:: RefResolver + :members: + +.. autoexception:: RefResolutionError + + A JSON reference failed to resolve. diff --git a/vendor/jsonschema/docs/requirements.in b/vendor/jsonschema/docs/requirements.in new file mode 100644 index 00000000..f9ee77e7 --- /dev/null +++ b/vendor/jsonschema/docs/requirements.in @@ -0,0 +1,7 @@ +file:.#egg=jsonschema +furo +lxml +sphinx +sphinx-autodoc-typehints +sphinx-json-schema-spec +sphinxcontrib-spelling diff --git a/vendor/jsonschema/docs/requirements.txt b/vendor/jsonschema/docs/requirements.txt new file mode 100644 index 00000000..f28c7d81 --- /dev/null +++ b/vendor/jsonschema/docs/requirements.txt @@ -0,0 +1,86 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile docs/requirements.in +# +alabaster==0.7.12 + # via sphinx +attrs==22.1.0 + # via jsonschema +babel==2.10.3 + # via sphinx +beautifulsoup4==4.11.1 + # via furo +certifi==2022.6.15 + # via requests +charset-normalizer==2.1.0 + # via requests +docutils==0.19 + # via sphinx +furo==2022.6.21 + # via -r docs/requirements.in +idna==3.3 + # via requests +imagesize==1.4.1 + # via sphinx +jinja2==3.1.2 + # via sphinx +file:.#egg=jsonschema + # via -r docs/requirements.in +lxml==4.9.1 + # via + # -r docs/requirements.in + # sphinx-json-schema-spec +markupsafe==2.1.1 + # via jinja2 +packaging==21.3 + # via sphinx +pyenchant==3.2.2 + # via sphinxcontrib-spelling +pygments==2.13.0 + # via + # furo + # sphinx +pyparsing==3.0.9 + # via packaging +pyrsistent==0.18.1 + # via jsonschema +pytz==2022.2.1 + # via babel +requests==2.28.1 + # via sphinx +snowballstemmer==2.2.0 + # via sphinx +soupsieve==2.3.2.post1 + # via beautifulsoup4 +sphinx==5.1.1 + # via + # -r docs/requirements.in + # furo + # sphinx-autodoc-typehints + # sphinx-basic-ng + # sphinx-json-schema-spec + # sphinxcontrib-spelling +sphinx-autodoc-typehints==1.19.2 + # via -r docs/requirements.in +sphinx-basic-ng==0.0.1a12 + # via furo +sphinx-json-schema-spec==2.2.2 + # via -r docs/requirements.in +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +sphinxcontrib-spelling==7.6.0 + # via -r docs/requirements.in +urllib3==1.26.11 + # via requests diff --git a/vendor/jsonschema/docs/spelling-wordlist.txt b/vendor/jsonschema/docs/spelling-wordlist.txt new file mode 100644 index 00000000..ab420316 --- /dev/null +++ b/vendor/jsonschema/docs/spelling-wordlist.txt @@ -0,0 +1,45 @@ +# this appears to be misinterpreting Napoleon types as prose, sigh... +Validator +TypeChecker +UnknownType +ValidationError + +# 0th, sigh... +th +callables +deque +dereferences +filesystem +hostname +implementers +indices +# ipv4/6, sigh... +ipv +iterable +iteratively +jsonschema +majorly +metaschema +online +outputter +pre +programmatically +recurses +regex +repr +sensical +subschema +subschemas +subscopes +uri +validator +validators +versioned +schemas + +Zac +HD + +Berman +Libera +GPL diff --git a/vendor/jsonschema/docs/validate.rst b/vendor/jsonschema/docs/validate.rst new file mode 100644 index 00000000..a3eebd7b --- /dev/null +++ b/vendor/jsonschema/docs/validate.rst @@ -0,0 +1,288 @@ +================= +Schema Validation +================= + + +.. currentmodule:: jsonschema + + +The Basics +---------- + +The simplest way to validate an instance under a given schema is to use the +:func:`validate` function. + +.. autofunction:: validate + +.. [#] For information on creating JSON schemas to validate + your data, there is a good introduction to JSON Schema + fundamentals underway at `Understanding JSON Schema + `_ + +.. _validator-protocol: + +The Validator Protocol +----------------------- + +`jsonschema` defines a protocol that all validator classes should adhere +to. + +.. autoclass:: jsonschema.protocols.Validator + :members: + +All of the `versioned validators ` that are included with +`jsonschema` adhere to the protocol, and implementers of validator classes +that extend or complement the ones included should adhere to it as well. For +more information see `creating-validators`. + +Type Checking +------------- + +To handle JSON Schema's :kw:`type` keyword, a `Validator` uses +an associated `TypeChecker`. The type checker provides an immutable +mapping between names of types and functions that can test if an instance is +of that type. The defaults are suitable for most users - each of the +`versioned validators ` that are included with +`jsonschema` have a `TypeChecker` that can correctly handle their respective +versions. + +.. seealso:: `validating-types` + + For an example of providing a custom type check. + +.. autoclass:: TypeChecker + :members: + +.. autoexception:: jsonschema.exceptions.UndefinedTypeCheck + + Raised when trying to remove a type check that is not known to this + TypeChecker, or when calling `jsonschema.TypeChecker.is_type` + directly. + +.. _validating-types: + +Validating With Additional Types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Occasionally it can be useful to provide additional or alternate types when +validating JSON Schema's :kw:`type` keyword. + +`jsonschema` tries to strike a balance between performance in the common +case and generality. For instance, JSON Schema defines a ``number`` type, which +can be validated with a schema such as ``{"type" : "number"}``. By default, +this will accept instances of Python `numbers.Number`. This includes in +particular `int`\s and `float`\s, along with +`decimal.Decimal` objects, `complex` numbers etc. For +``integer`` and ``object``, however, rather than checking for +`numbers.Integral` and `collections.abc.Mapping`, +`jsonschema` simply checks for `int` and `dict`, since the +more general instance checks can introduce significant slowdown, especially +given how common validating these types are. + +If you *do* want the generality, or just want to add a few specific additional +types as being acceptable for a validator object, then you should update an +existing `TypeChecker` or create a new one. You may then create a new +`Validator` via `jsonschema.validators.extend`. + +.. testcode:: + + from jsonschema import validators + + class MyInteger(object): + pass + + def is_my_int(checker, instance): + return ( + Draft202012Validator.TYPE_CHECKER.is_type(instance, "number") or + isinstance(instance, MyInteger) + ) + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "number", is_my_int, + ) + + CustomValidator = validators.extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema={"type" : "number"}) + + +.. autoexception:: jsonschema.exceptions.UnknownType + +.. _versioned-validators: + +Versioned Validators +-------------------- + +`jsonschema` ships with validator classes for various versions of +the JSON Schema specification. For details on the methods and attributes +that each validator class provides see the `Validator` protocol, +which each included validator class implements. + +.. autoclass:: Draft202012Validator + +.. autoclass:: Draft201909Validator + +.. autoclass:: Draft7Validator + +.. autoclass:: Draft6Validator + +.. autoclass:: Draft4Validator + +.. autoclass:: Draft3Validator + + +For example, if you wanted to validate a schema you created against the +Draft 2020-12 meta-schema, you could use: + +.. testcode:: + + from jsonschema import Draft202012Validator + + schema = { + "$schema": Draft202012Validator.META_SCHEMA["$id"], + + "type": "object", + "properties": { + "name": {"type": "string"}, + "email": {"type": "string"}, + }, + "required": ["email"] + } + Draft202012Validator.check_schema(schema) + + +.. _validating formats: + +Validating Formats +------------------ + +JSON Schema defines the :kw:`format` keyword which can be used to check +if primitive types (``string``\s, ``number``\s, ``boolean``\s) conform to +well-defined formats. By default, no validation is enforced, but optionally, +validation can be enabled by hooking in a format-checking object into an +`Validator`. + +.. doctest:: + + >>> validate("127.0.0.1", {"format" : "ipv4"}) + >>> validate( + ... instance="-12", + ... schema={"format" : "ipv4"}, + ... format_checker=draft202012_format_checker, + ... ) + Traceback (most recent call last): + ... + ValidationError: "-12" is not a "ipv4" + +.. autoclass:: FormatChecker + :members: + :exclude-members: cls_checks + + .. attribute:: checkers + + A mapping of currently known formats to tuple of functions that + validate them and errors that should be caught. New checkers can be + added and removed either per-instance or globally for all checkers + using the `FormatChecker.checks` or `FormatChecker.cls_checks` + decorators respectively. + + .. classmethod:: cls_checks(format, raises=()) + + Register a decorated function as *globally* validating a new format. + + Any instance created after this function is called will pick up the + supplied checker. + + :argument str format: the format that the decorated function will check + :argument Exception raises: the exception(s) raised + by the decorated function when an invalid instance is + found. The exception object will be accessible as the + `jsonschema.exceptions.ValidationError.cause` attribute + of the resulting validation error. + + +.. autoexception:: FormatError + :members: + + +There are a number of default checkers that `FormatChecker`\s know how +to validate. Their names can be viewed by inspecting the +`FormatChecker.checkers` attribute. Certain checkers will only be +available if an appropriate package is available for use. The easiest way to +ensure you have what is needed is to install ``jsonschema`` using the +``format`` or ``format_nongpl`` collection of optional dependencies -- e.g. + +.. code-block:: sh + + $ pip install jsonschema[format] + +which will install all of the below dependencies for all formats. + +Or if you want to install MIT-license compatible dependencies only: + +.. code-block:: sh + + $ pip install jsonschema[format_nongpl] + +The non-GPL extra is intended to not install any direct dependencies +that are GPL (but that of course end-users should do their own verification). +At the moment, it supports all the available checkers except for ``iri`` and +``iri-reference``. + +The more specific list of available checkers, along with their requirement +(if any,) are listed below. + +.. note:: + + If the following packages are not installed when using a checker + that requires it, validation will succeed without throwing an error, + as specified by the JSON Schema specification. + +========================= ==================== +Checker Notes +========================= ==================== +``color`` requires webcolors_ +``date`` +``date-time`` requires rfc3339-validator_ +``duration`` requires isoduration_ +``email`` +``hostname`` requires fqdn_ +``idn-hostname`` requires idna_ +``ipv4`` +``ipv6`` OS must have `socket.inet_pton` function +``iri`` requires rfc3987_ +``iri-reference`` requires rfc3987_ +``json-pointer`` requires jsonpointer_ +``regex`` +``relative-json-pointer`` requires jsonpointer_ +``time`` requires rfc3339-validator_ +``uri`` requires rfc3987_ or rfc3986-validator_ +``uri-reference`` requires rfc3987_ or rfc3986-validator_ +``uri-template`` requires uri-template_ +========================= ==================== + + +.. _fqdn: https://pypi.org/pypi/fqdn/ +.. _idna: https://pypi.org/pypi/idna/ +.. _isoduration: https://pypi.org/pypi/isoduration/ +.. _jsonpointer: https://pypi.org/pypi/jsonpointer/ +.. _rfc3339-validator: https://pypi.org/project/rfc3339-validator/ +.. _rfc3986-validator: https://pypi.org/project/rfc3986-validator/ +.. _rfc3987: https://pypi.org/pypi/rfc3987/ +.. _rfc5322: https://tools.ietf.org/html/rfc5322#section-3.4.1 +.. _uri-template: https://pypi.org/pypi/uri-template/ +.. _webcolors: https://pypi.org/pypi/webcolors/ + +.. note:: + + Since in most cases "validating" an email address is an attempt + instead to confirm that mail sent to it will deliver to a recipient, + and that that recipient is the correct one the email is intended + for, and since many valid email addresses are in many places + incorrectly rejected, and many invalid email addresses are in many + places incorrectly accepted, the ``email`` format keyword only + provides a sanity check, not full rfc5322_ validation. + + The same applies to the ``idn-email`` format. diff --git a/vendor/jsonschema/json/.github/CODEOWNERS b/vendor/jsonschema/json/.github/CODEOWNERS new file mode 100644 index 00000000..15f4a2db --- /dev/null +++ b/vendor/jsonschema/json/.github/CODEOWNERS @@ -0,0 +1,2 @@ +# Ping the entire test suite team by default. +* @json-schema-org/test-suite-team diff --git a/vendor/jsonschema/json/.github/workflows/ci.yml b/vendor/jsonschema/json/.github/workflows/ci.yml new file mode 100644 index 00000000..a826069b --- /dev/null +++ b/vendor/jsonschema/json/.github/workflows/ci.yml @@ -0,0 +1,25 @@ +name: Test Suite Sanity Checking + +on: + push: + pull_request: + release: + types: [published] + schedule: + # Daily at 6:42, arbitrarily as a time that's possibly non-busy + - cron: '42 6 * * *' + +jobs: + ci: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install tox + run: python -m pip install tox + - name: Run the sanity checks + run: python -m tox diff --git a/vendor/jsonschema/json/.gitignore b/vendor/jsonschema/json/.gitignore new file mode 100644 index 00000000..68bc17f9 --- /dev/null +++ b/vendor/jsonschema/json/.gitignore @@ -0,0 +1,160 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/LICENSE b/vendor/jsonschema/json/LICENSE similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/jsonschema/LICENSE rename to vendor/jsonschema/json/LICENSE diff --git a/vendor/jsonschema/json/README.md b/vendor/jsonschema/json/README.md new file mode 100644 index 00000000..88b65c99 --- /dev/null +++ b/vendor/jsonschema/json/README.md @@ -0,0 +1,339 @@ +# JSON Schema Test Suite + +[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](https://github.com/json-schema-org/.github/blob/main/CODE_OF_CONDUCT.md) +[![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) +[![Financial Contributors on Open Collective](https://opencollective.com/json-schema/all/badge.svg?label=financial+contributors)](https://opencollective.com/json-schema) + +[![DOI](https://zenodo.org/badge/5952934.svg)](https://zenodo.org/badge/latestdoi/5952934) +[![Build Status](https://github.com/json-schema-org/JSON-Schema-Test-Suite/workflows/Test%20Suite%20Sanity%20Checking/badge.svg)](https://github.com/json-schema-org/JSON-Schema-Test-Suite/actions?query=workflow%3A%22Test+Suite+Sanity+Checking%22) + +This repository contains a set of JSON objects that implementers of JSON Schema validation libraries can use to test their validators. + +It is meant to be language agnostic and should require only a JSON parser. +The conversion of the JSON objects into tests within a specific language and test framework of choice is left to be done by the validator implementer. + +## Coverage + +All JSON Schema specification releases should be well covered by this suite, including drafts 2020-12, 2019-09, 07, 06, 04 and 03. +Drafts 04 and 03 are considered "frozen" in that less effort is put in to backport new tests to these versions. + +Additional coverage is always welcome, particularly for bugs encountered in real-world implementations. +If you see anything missing or incorrect, please feel free to [file an issue](https://github.com/json-schema-org/JSON-Schema-Test-Suite/issues) or [submit a PR](https://github.com/json-schema-org/JSON-Schema-Test-Suite). + +## Introduction to the Test Suite Structure + +The tests in this suite are contained in the `tests` directory at the root of this repository. +Inside that directory is a subdirectory for each released version of the specification. + +The structure and contents of each file in these directories is described below. + +In addition to the version-specific subdirectories, two additional directories are present: + +1. `draft-next/`: containing tests for the next version of the specification whilst it is in development +2. `latest/`: a symbolic link which points to the directory which is the most recent release (which may be useful for implementations providing specific entry points for validating against the latest version of the specification) + +Inside each version directory there are a number of `.json` files each containing a collection of related tests. +Often the grouping is by property under test, but not always. +In addition to the `.json` files, each version directory contains one or more special subdirectories whose purpose is [described below](#subdirectories-within-each-draft), and which contain additional `.json` files. + +Each `.json` file consists of a single JSON array of test cases. + +### Terminology + +For clarity, we first define this document's usage of some testing terminology: + +| term | definition | +|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **test suite** | the entirety of the contents of this repository, containing tests for multiple different releases of the JSON Schema specification | +| **test case** | a single schema, along with a description and an array of *test*s | +| **test** | within a *test case*, a single test example, containing a description, instance and a boolean indicating whether the instance is valid under the test case schema | +| **test runner** | a program, external to this repository and authored by a user of this suite, which is executing each of the tests in the suite | + +An example illustrating this structure is immediately below, and a JSON Schema containing a formal definition of the contents of test cases can be found [alongside this README](./test-schema.json). + +### Sample Test Case + +Here is a single *test case*, containing one or more tests: + +```json +{ + "description": "The test case description", + "schema": { "type": "string" }, + "tests": [ + { + "description": "a test with a valid instance", + "data": "a string", + "valid": true + }, + { + "description": "a test with an invalid instance", + "data": 15, + "valid": false + } + ] +} +``` + +### Subdirectories Within Each Draft + +There is currently only one additional subdirectory that may exist within each draft test directory. + +This is: + +1. `optional/`: Contains tests that are considered optional. + +Note, the `optional/` subdirectory today conflates many reasons why a test may be optional -- it may be because tests within a particular file are indeed not required by the specification but still potentially useful to an implementer, or it may be because tests within it only apply to programming languages with particular functionality (in +which case they are not truly optional in such a language). +In the future this directory structure will be made richer to reflect these differences more clearly. + +## Using the Suite to Test a Validator Implementation + +The test suite structure was described [above](#introduction-to-the-test-suite-structure). + +If you are authoring a new validator implementation, or adding support for an additional version of the specification, this section describes: + +1. How to implement a test runner which passes tests to your validator +2. Assumptions the suite makes about how the test runner will configure your validator +3. Invariants the test suite claims to hold for its tests + +### How to Implement a Test Runner + +Presented here is a possible implementation of a test runner. +The precise steps described do not need to be followed exactly, but the results of your own procedure should produce the same effects. + +To test a specific version: + +* Load any remote references [described below](additional-assumptions) and configure your implementation to retrieve them via their URIs +* Walk the filesystem tree for that version's subdirectory and for each `.json` file found: + + * if the file is located in the root of the version directory: + + * for each test case present in the file: + + * load the schema from the `"schema"` property + * load (or log) the test case description from the `"description"` property for debugging or outputting + * for each test in the `"tests"` property: + + * load the instance to be tested from the `"data"` property + * load (or log) the individual test description from the `"description"` property for debugging or outputting + + * use the schema loaded above to validate whether the instance is considered valid under your implementation + + * if the result from your implementation matches the value found in the `"valid"` property, your implementation correctly implements the specific example + * if the result does not match, or your implementation errors or crashes, your implementation does not correctly implement the specific example + + * otherwise it is located in a special subdirectory as described above. + Follow the additional assumptions and restrictions for the containing subdirectory, then run the test case as above. + +If your implementation supports multiple versions, run the above procedure for each version supported, configuring your implementation as appropriate to call each version individually. + +### Additional Assumptions + +1. The suite, notably in its `refRemote.json` file in each draft, expects a number of remote references to be configured. + These are JSON documents, identified by URI, which are used by the suite to test the behavior of the `$ref` keyword (and related keywords). + Depending on your implementation, you may configure how to "register" these *either*: + + * by directly retrieving them off the filesystem from the `remotes/` directory, in which case you should load each schema with a retrieval URI of `http://localhost:1234` followed by the relative path from the remotes directory -- e.g. a `$ref` to `http://localhost:1234/foo/bar/baz.json` is expected to resolve to the contents of the file at `remotes/foo/bar/baz.json` + + * or alternatively, by executing `bin/jsonschema_suite remotes` using the executable in the `bin/` directory, which will output a JSON object containing all of the remotes combined, e.g.: + + ``` + + $ bin/jsonschema_suite remotes + ``` + ```json + { + "http://localhost:1234/baseUriChange/folderInteger.json": { + "type": "integer" + }, + "http://localhost:1234/baseUriChangeFolder/folderInteger.json": { + "type": "integer" + } + } + ``` + +2. Test cases found within [special subdirectories](#subdirectories-within-each-draft) may require additional configuration to run. + In particular, tests within the `optional/format` subdirectory may require implementations to change the way they treat the `"format"`keyword (particularly on older drafts which did not have a notion of vocabularies). + +### Invariants & Guarantees + +The test suite guarantees a number of things about tests it defines. +Any deviation from the below is generally considered a bug. +If you suspect one, please [file an issue](https://github.com/json-schema-org/JSON-Schema-Test-Suite/issues/new): + +1. All files containing test cases are valid JSON. +2. The contents of the `"schema"` property in a test case are always valid + JSON Schemas under the corresponding specification. + + The rationale behind this is that we are testing instances in a test's `"data"` element, and not the schema itself. + A number of tests *do* test the validity of a schema itself, but do so by representing the schema as an instance inside a test, with the associated meta-schema in the `"schema"` property (via the `"$ref"` keyword): + + ```json + { + "description": "Test the \"type\" schema keyword", + "schema": { + "$ref": "https://json-schema.org/draft/2019-09/schema" + }, + "tests": [ + { + "description": "Valid: string", + "data": { + "type": "string" + }, + "valid": true + }, + { + "description": "Invalid: null", + "data": { + "type": null + }, + "valid": false + } + ] + } + ``` + See below for some [known limitations](#known-limitations). + +## Known Limitations + +This suite expresses its assertions about the behavior of an implementation *within* JSON Schema itself. +Each test is the application of a schema to a particular instance. +This means that the suite of tests can test against any behavior a schema can describe, and conversely cannot test against any behavior which a schema is incapable of representing, even if the behavior is mandated by the specification. + +For example, a schema can require that a string is a _URI-reference_ and even that it matches a certain pattern, but even though the specification contains [recommendations about URIs being normalized](https://json-schema.org/draft/2020-12/json-schema-core.html#name-the-id-keyword), a JSON schema cannot today represent this assertion within the core vocabularies of the specifications, so no test covers this behavior. + +## Who Uses the Test Suite + +This suite is being used by: + +### Clojure + +* [jinx](https://github.com/juxt/jinx) +* [json-schema](https://github.com/tatut/json-schema) + +### Coffeescript + +* [jsck](https://github.com/pandastrike/jsck) + +### Common Lisp + +* [json-schema](https://github.com/fisxoj/json-schema) + +### C++ + +* [Modern C++ JSON schema validator](https://github.com/pboettch/json-schema-validator) + +### Dart + +* [json\_schema](https://github.com/patefacio/json_schema) + +### Elixir + +* [ex\_json\_schema](https://github.com/jonasschmidt/ex_json_schema) + +### Erlang + +* [jesse](https://github.com/for-GET/jesse) + +### Go + +* [gojsonschema](https://github.com/sigu-399/gojsonschema) +* [validate-json](https://github.com/cesanta/validate-json) + +### Haskell + +* [aeson-schema](https://github.com/timjb/aeson-schema) +* [hjsonschema](https://github.com/seagreen/hjsonschema) + +### Java + +* [json-schema-validator](https://github.com/daveclayton/json-schema-validator) +* [everit-org/json-schema](https://github.com/everit-org/json-schema) +* [networknt/json-schema-validator](https://github.com/networknt/json-schema-validator) +* [Justify](https://github.com/leadpony/justify) +* [Snow](https://github.com/ssilverman/snowy-json) +* [jsonschemafriend](https://github.com/jimblackler/jsonschemafriend) + +### JavaScript + +* [json-schema-benchmark](https://github.com/Muscula/json-schema-benchmark) +* [direct-schema](https://github.com/IreneKnapp/direct-schema) +* [is-my-json-valid](https://github.com/mafintosh/is-my-json-valid) +* [jassi](https://github.com/iclanzan/jassi) +* [JaySchema](https://github.com/natesilva/jayschema) +* [json-schema-valid](https://github.com/ericgj/json-schema-valid) +* [Jsonary](https://github.com/jsonary-js/jsonary) +* [jsonschema](https://github.com/tdegrunt/jsonschema) +* [request-validator](https://github.com/bugventure/request-validator) +* [skeemas](https://github.com/Prestaul/skeemas) +* [tv4](https://github.com/geraintluff/tv4) +* [z-schema](https://github.com/zaggino/z-schema) +* [jsen](https://github.com/bugventure/jsen) +* [ajv](https://github.com/epoberezkin/ajv) +* [djv](https://github.com/korzio/djv) + +### Node.js + +For node.js developers, the suite is also available as an [npm](https://www.npmjs.com/package/@json-schema-org/tests) package. + +Node-specific support is maintained in a [separate repository](https://github.com/json-schema-org/json-schema-test-suite-npm) which also welcomes your contributions! + +### .NET + +* [Newtonsoft.Json.Schema](https://github.com/JamesNK/Newtonsoft.Json.Schema) +* [Manatee.Json](https://github.com/gregsdennis/Manatee.Json) + +### Perl + +* [Test::JSON::Schema::Acceptance](https://github.com/karenetheridge/Test-JSON-Schema-Acceptance) (a wrapper of this test suite) +* [JSON::Schema::Modern](https://github.com/karenetheridge/JSON-Schema-Modern) +* [JSON::Schema::Tiny](https://github.com/karenetheridge/JSON-Schema-Tiny) + +### PHP + +* [opis/json-schema](https://github.com/opis/json-schema) +* [json-schema](https://github.com/justinrainbow/json-schema) +* [json-guard](https://github.com/thephpleague/json-guard) + +### PostgreSQL + +* [postgres-json-schema](https://github.com/gavinwahl/postgres-json-schema) +* [is\_jsonb\_valid](https://github.com/furstenheim/is_jsonb_valid) + +### Python + +* [jsonschema](https://github.com/Julian/jsonschema) +* [fastjsonschema](https://github.com/seznam/python-fastjsonschema) +* [hypothesis-jsonschema](https://github.com/Zac-HD/hypothesis-jsonschema) +* [jschon](https://github.com/marksparkza/jschon) +* [python-experimental, OpenAPI Generator](https://github.com/OpenAPITools/openapi-generator/blob/master/docs/generators/python-experimental.md) + +### Ruby + +* [json-schema](https://github.com/hoxworth/json-schema) +* [json\_schemer](https://github.com/davishmcclurg/json_schemer) + +### Rust + +* [jsonschema](https://github.com/Stranger6667/jsonschema-rs) +* [valico](https://github.com/rustless/valico) + +### Scala + +* [typed-json](https://github.com/frawa/typed-json) + +### Swift + +* [JSONSchema](https://github.com/kylef/JSONSchema.swift) + +If you use it as well, please fork and send a pull request adding yourself to +the list :). + +## Contributing + +If you see something missing or incorrect, a pull request is most welcome! + +There are some sanity checks in place for testing the test suite. You can run +them with `bin/jsonschema_suite check` or `tox`. They will be run automatically +by [GitHub Actions](https://github.com/json-schema-org/JSON-Schema-Test-Suite/actions?query=workflow%3A%22Test+Suite+Sanity+Checking%22) +as well. diff --git a/vendor/jsonschema/json/bin/jsonschema_suite b/vendor/jsonschema/json/bin/jsonschema_suite new file mode 100755 index 00000000..8cc28508 --- /dev/null +++ b/vendor/jsonschema/json/bin/jsonschema_suite @@ -0,0 +1,350 @@ +#! /usr/bin/env python3 +from pathlib import Path +from urllib.parse import urljoin +import argparse +import json +import os +import random +import shutil +import sys +import textwrap +import unittest +import warnings + +try: + import jsonschema.validators +except ImportError: + jsonschema = None + VALIDATORS = {} +else: + VALIDATORS = { + "draft3": jsonschema.validators.Draft3Validator, + "draft4": jsonschema.validators.Draft4Validator, + "draft6": jsonschema.validators.Draft6Validator, + "draft7": jsonschema.validators.Draft7Validator, + "draft2019-09": jsonschema.validators.Draft201909Validator, + "draft2020-12": jsonschema.validators.Draft202012Validator, + "latest": jsonschema.validators.Draft202012Validator, + } + + +ROOT_DIR = Path(__file__).parent.parent +SUITE_ROOT_DIR = ROOT_DIR / "tests" + +REMOTES_DIR = ROOT_DIR / "remotes" +REMOTES_BASE_URL = "http://localhost:1234/" + +TESTSUITE_SCHEMA = json.loads((ROOT_DIR / "test-schema.json").read_text()) + + +def files(paths): + """ + Each test file in the provided paths, as an array of test cases. + """ + for path in paths: + yield path, json.loads(path.read_text()) + + +def cases(paths): + """ + Each test case within each file in the provided paths. + """ + for _, test_file in files(paths): + yield from test_file + + +def tests(paths): + """ + Each individual test within all cases within the provided paths. + """ + for case in cases(paths): + for test in case["tests"]: + test["schema"] = case["schema"] + yield test + + +def collect(root_dir): + """ + All of the test file paths within the given root directory, recursively. + """ + return root_dir.glob("**/*.json") + + +def url_for_path(path): + """ + Return the assumed remote URL for a file in the remotes/ directory. + + Tests in the refRemote.json file reference this URL, and assume the + corresponding contents are available at the URL. + """ + + return urljoin( + REMOTES_BASE_URL, + str(path.relative_to(REMOTES_DIR)).replace("\\", "/") # Windows... + ) + + +class SanityTests(unittest.TestCase): + @classmethod + def setUpClass(cls): + print(f"Looking for tests in {SUITE_ROOT_DIR}") + print(f"Looking for remotes in {REMOTES_DIR}") + + cls.test_files = list(collect(SUITE_ROOT_DIR)) + assert cls.test_files, "Didn't find the test files!" + print(f"Found {len(cls.test_files)} test files") + + cls.remote_files = list(collect(REMOTES_DIR)) + assert cls.remote_files, "Didn't find the remote files!" + print(f"Found {len(cls.remote_files)} remote files") + + def assertUnique(self, iterable): + """ + Assert that the elements of an iterable are unique. + """ + + seen, duplicated = set(), set() + for each in iterable: + if each in seen: + duplicated.add(each) + seen.add(each) + self.assertFalse(duplicated, "Elements are not unique.") + + def assertFollowsDescriptionStyle(self, description): + """ + Instead of saying "test that X frobs" or "X should frob" use "X frobs". + + See e.g. https://jml.io/pages/test-docstrings.html + + This test isn't comprehensive (it doesn't catch all the extra + verbiage there), but it's just to catch whatever it manages to + cover. + """ + + message = ( + "In descriptions, don't say 'Test that X frobs' or 'X should " + "frob' or 'X should be valid'. Just say 'X frobs' or 'X is " + "valid'. It's shorter, and the test suite is entirely about " + "what *should* be already. " + "See https://jml.io/pages/test-docstrings.html for help." + ) + self.assertNotRegex(description, r"\bshould\b", message) + self.assertNotRegex(description, r"(?i)\btest(s)? that\b", message) + + def test_all_test_files_are_valid_json(self): + """ + All test files contain valid JSON. + """ + for path in self.test_files: + with self.subTest(path=path): + try: + json.loads(path.read_text()) + except ValueError as error: + self.fail(f"{path} contains invalid JSON ({error})") + + def test_all_remote_files_are_valid_json(self): + """ + All remote files contain valid JSON. + """ + for path in self.remote_files: + with self.subTest(path=path): + try: + json.loads(path.read_text()) + except ValueError as error: + self.fail(f"{path} contains invalid JSON ({error})") + + def test_all_case_descriptions_have_reasonable_length(self): + """ + All cases have reasonably long descriptions. + """ + for case in cases(self.test_files): + with self.subTest(description=case["description"]): + self.assertLess( + len(case["description"]), + 150, + "Description is too long (keep it to less than 150 chars)." + ) + + def test_all_test_descriptions_have_reasonable_length(self): + """ + All tests have reasonably long descriptions. + """ + for count, test in enumerate(tests(self.test_files)): + with self.subTest(description=test["description"]): + self.assertLess( + len(test["description"]), + 70, + "Description is too long (keep it to less than 70 chars)." + ) + print(f"Found {count} tests.") + + def test_all_case_descriptions_are_unique(self): + """ + All cases have unique descriptions in their files. + """ + for path, cases in files(self.test_files): + with self.subTest(path=path): + self.assertUnique(case["description"] for case in cases) + + def test_all_test_descriptions_are_unique(self): + """ + All test cases have unique test descriptions in their tests. + """ + for count, case in enumerate(cases(self.test_files)): + with self.subTest(description=case["description"]): + self.assertUnique( + test["description"] for test in case["tests"] + ) + print(f"Found {count} test cases.") + + def test_case_descriptions_do_not_use_modal_verbs(self): + for case in cases(self.test_files): + with self.subTest(description=case["description"]): + self.assertFollowsDescriptionStyle(case["description"]) + + def test_test_descriptions_do_not_use_modal_verbs(self): + for test in tests(self.test_files): + with self.subTest(description=test["description"]): + self.assertFollowsDescriptionStyle(test["description"]) + + @unittest.skipIf(jsonschema is None, "Validation library not present!") + def test_all_schemas_are_valid(self): + """ + All schemas are valid under their metaschemas. + """ + for version in SUITE_ROOT_DIR.iterdir(): + if not version.is_dir(): + continue + + Validator = VALIDATORS.get(version.name) + if Validator is not None: + test_files = collect(version) + for case in cases(test_files): + with self.subTest(case=case): + try: + Validator.check_schema(case["schema"]) + except jsonschema.SchemaError: + self.fail( + "Found an invalid schema." + "See the traceback for details on why." + ) + else: + warnings.warn(f"No schema validator for {version.name}") + + @unittest.skipIf(jsonschema is None, "Validation library not present!") + def test_suites_are_valid(self): + """ + All test files are valid under test-schema.json. + """ + Validator = jsonschema.validators.validator_for(TESTSUITE_SCHEMA) + validator = Validator(TESTSUITE_SCHEMA) + for path, cases in files(self.test_files): + with self.subTest(path=path): + try: + validator.validate(cases) + except jsonschema.ValidationError as error: + self.fail(str(error)) + + +def main(arguments): + if arguments.command == "check": + suite = unittest.TestLoader().loadTestsFromTestCase(SanityTests) + result = unittest.TextTestRunner().run(suite) + sys.exit(not result.wasSuccessful()) + elif arguments.command == "flatten": + selected_cases = [case for case in cases(collect(arguments.version))] + + if arguments.randomize: + random.shuffle(selected_cases) + + json.dump(selected_cases, sys.stdout, indent=4, sort_keys=True) + elif arguments.command == "remotes": + remotes = { + url_for_path(path): json.loads(path.read_text()) + for path in collect(REMOTES_DIR) + } + json.dump(remotes, sys.stdout, indent=4, sort_keys=True) + elif arguments.command == "dump_remotes": + if arguments.update: + shutil.rmtree(arguments.out_dir, ignore_errors=True) + + try: + shutil.copytree(REMOTES_DIR, arguments.out_dir) + except FileExistsError: + print(f"{arguments.out_dir} already exists. Aborting.") + sys.exit(1) + elif arguments.command == "serve": + try: + import flask + except ImportError: + print(textwrap.dedent(""" + The Flask library is required to serve the remote schemas. + + You can install it by running `pip install Flask`. + + Alternatively, see the `jsonschema_suite remotes` or + `jsonschema_suite dump_remotes` commands to create static files + that can be served with your own web server. + """.strip("\n"))) + sys.exit(1) + + app = flask.Flask(__name__) + + @app.route("/") + def serve_path(path): + return flask.send_from_directory(REMOTES_DIR, path) + + app.run(port=1234) + + +parser = argparse.ArgumentParser( + description="JSON Schema Test Suite utilities", +) +subparsers = parser.add_subparsers( + help="utility commands", dest="command", metavar="COMMAND" +) +subparsers.required = True + +check = subparsers.add_parser("check", help="Sanity check the test suite.") + +flatten = subparsers.add_parser( + "flatten", + help="Output a flattened file containing a selected version's test cases." +) +flatten.add_argument( + "--randomize", + action="store_true", + help="Randomize the order of the outputted cases.", +) +flatten.add_argument( + "version", help="The directory containing the version to output", +) + +remotes = subparsers.add_parser( + "remotes", + help="Output the expected URLs and their associated schemas for remote " + "ref tests as a JSON object." +) + +dump_remotes = subparsers.add_parser( + "dump_remotes", help="Dump the remote ref schemas into a file tree", +) +dump_remotes.add_argument( + "--update", + action="store_true", + help="Update the remotes in an existing directory.", +) +dump_remotes.add_argument( + "--out-dir", + default=REMOTES_DIR, + type=os.path.abspath, + help="The output directory to create as the root of the file tree", +) + +serve = subparsers.add_parser( + "serve", + help="Start a webserver to serve schemas used by remote ref tests." +) + +if __name__ == "__main__": + main(parser.parse_args()) diff --git a/vendor/jsonschema/json/package.json b/vendor/jsonschema/json/package.json new file mode 100644 index 00000000..75da9e29 --- /dev/null +++ b/vendor/jsonschema/json/package.json @@ -0,0 +1,12 @@ +{ + "name": "json-schema-test-suite", + "version": "0.1.0", + "description": "A language agnostic test suite for the JSON Schema specifications", + "repository": "github:json-schema-org/JSON-Schema-Test-Suite", + "keywords": [ + "json-schema", + "tests" + ], + "author": "http://json-schema.org", + "license": "MIT" +} diff --git a/vendor/jsonschema/json/remotes/baseUriChange/folderInteger.json b/vendor/jsonschema/json/remotes/baseUriChange/folderInteger.json new file mode 100644 index 00000000..8b50ea30 --- /dev/null +++ b/vendor/jsonschema/json/remotes/baseUriChange/folderInteger.json @@ -0,0 +1,3 @@ +{ + "type": "integer" +} diff --git a/vendor/jsonschema/json/remotes/baseUriChangeFolder/folderInteger.json b/vendor/jsonschema/json/remotes/baseUriChangeFolder/folderInteger.json new file mode 100644 index 00000000..8b50ea30 --- /dev/null +++ b/vendor/jsonschema/json/remotes/baseUriChangeFolder/folderInteger.json @@ -0,0 +1,3 @@ +{ + "type": "integer" +} diff --git a/vendor/jsonschema/json/remotes/baseUriChangeFolderInSubschema/folderInteger.json b/vendor/jsonschema/json/remotes/baseUriChangeFolderInSubschema/folderInteger.json new file mode 100644 index 00000000..8b50ea30 --- /dev/null +++ b/vendor/jsonschema/json/remotes/baseUriChangeFolderInSubschema/folderInteger.json @@ -0,0 +1,3 @@ +{ + "type": "integer" +} diff --git a/vendor/jsonschema/json/remotes/draft-next/format-assertion-false.json b/vendor/jsonschema/json/remotes/draft-next/format-assertion-false.json new file mode 100644 index 00000000..1a55fedf --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft-next/format-assertion-false.json @@ -0,0 +1,12 @@ +{ + "$id": "http://localhost:1234/draft-next/format-assertion-false.json", + "$schema": "https://json-schema.org/draft/next/schema", + "$vocabulary": { + "https://json-schema.org/draft/next/vocab/core": true, + "https://json-schema.org/draft/next/vocab/format-assertion": false + }, + "allOf": [ + { "$ref": "https://json-schema.org/draft/next/schema/meta/core" }, + { "$ref": "https://json-schema.org/draft/next/schema/meta/format-assertion" } + ] +} diff --git a/vendor/jsonschema/json/remotes/draft-next/format-assertion-true.json b/vendor/jsonschema/json/remotes/draft-next/format-assertion-true.json new file mode 100644 index 00000000..5862dfea --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft-next/format-assertion-true.json @@ -0,0 +1,12 @@ +{ + "$id": "http://localhost:1234/draft-next/format-assertion-true.json", + "$schema": "https://json-schema.org/draft/next/schema", + "$vocabulary": { + "https://json-schema.org/draft/next/vocab/core": true, + "https://json-schema.org/draft/next/vocab/format-assertion": true + }, + "allOf": [ + { "$ref": "https://json-schema.org/draft/next/schema/meta/core" }, + { "$ref": "https://json-schema.org/draft/next/schema/meta/format-assertion" } + ] +} diff --git a/vendor/jsonschema/json/remotes/draft-next/metaschema-no-validation.json b/vendor/jsonschema/json/remotes/draft-next/metaschema-no-validation.json new file mode 100644 index 00000000..2b50c058 --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft-next/metaschema-no-validation.json @@ -0,0 +1,11 @@ +{ + "$id": "http://localhost:1234/draft-next/metaschema-no-validation.json", + "$vocabulary": { + "https://json-schema.org/draft/next/vocab/applicator": true, + "https://json-schema.org/draft/next/vocab/core": true + }, + "allOf": [ + { "$ref": "https://json-schema.org/draft/next/meta/applicator" }, + { "$ref": "https://json-schema.org/draft/next/meta/core" } + ] +} diff --git a/vendor/jsonschema/json/remotes/draft2019-09/dependentRequired.json b/vendor/jsonschema/json/remotes/draft2019-09/dependentRequired.json new file mode 100644 index 00000000..0d691d96 --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft2019-09/dependentRequired.json @@ -0,0 +1,7 @@ +{ + "$id": "http://localhost:1234/draft2019-09/dependentRequired.json", + "$schema": "https://json-schema.org/draft/2019-09/schema", + "dependentRequired": { + "foo": ["bar"] + } +} diff --git a/vendor/jsonschema/json/remotes/draft2019-09/ignore-prefixItems.json b/vendor/jsonschema/json/remotes/draft2019-09/ignore-prefixItems.json new file mode 100644 index 00000000..b5ef3928 --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft2019-09/ignore-prefixItems.json @@ -0,0 +1,7 @@ +{ + "$id": "http://localhost:1234/draft2019-09/ignore-prefixItems.json", + "$schema": "https://json-schema.org/draft/2019-09/schema", + "prefixItems": [ + {"type": "string"} + ] +} diff --git a/vendor/jsonschema/json/remotes/draft2019-09/metaschema-no-validation.json b/vendor/jsonschema/json/remotes/draft2019-09/metaschema-no-validation.json new file mode 100644 index 00000000..9a549447 --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft2019-09/metaschema-no-validation.json @@ -0,0 +1,11 @@ +{ + "$id": "http://localhost:1234/draft2019-09/metaschema-no-validation.json", + "$vocabulary": { + "https://json-schema.org/draft/2019-09/vocab/applicator": true, + "https://json-schema.org/draft/2019-09/vocab/core": true + }, + "allOf": [ + { "$ref": "https://json-schema.org/draft/2019-09/meta/applicator" }, + { "$ref": "https://json-schema.org/draft/2019-09/meta/core" } + ] +} diff --git a/vendor/jsonschema/json/remotes/draft2020-12/format-assertion-false.json b/vendor/jsonschema/json/remotes/draft2020-12/format-assertion-false.json new file mode 100644 index 00000000..f1c64d22 --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft2020-12/format-assertion-false.json @@ -0,0 +1,12 @@ +{ + "$id": "http://localhost:1234/draft2020-12/format-assertion-false.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$vocabulary": { + "https://json-schema.org/draft/2020-12/vocab/core": true, + "https://json-schema.org/draft/2020-12/vocab/format-assertion": false + }, + "allOf": [ + { "$ref": "https://json-schema.org/draft/2020-12/schema/meta/core" }, + { "$ref": "https://json-schema.org/draft/2020-12/schema/meta/format-assertion" } + ] +} diff --git a/vendor/jsonschema/json/remotes/draft2020-12/format-assertion-true.json b/vendor/jsonschema/json/remotes/draft2020-12/format-assertion-true.json new file mode 100644 index 00000000..e3d12c81 --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft2020-12/format-assertion-true.json @@ -0,0 +1,12 @@ +{ + "$id": "http://localhost:1234/draft2020-12/format-assertion-true.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$vocabulary": { + "https://json-schema.org/draft/2020-12/vocab/core": true, + "https://json-schema.org/draft/2020-12/vocab/format-assertion": true + }, + "allOf": [ + { "$ref": "https://json-schema.org/draft/2020-12/schema/meta/core" }, + { "$ref": "https://json-schema.org/draft/2020-12/schema/meta/format-assertion" } + ] +} diff --git a/vendor/jsonschema/json/remotes/draft2020-12/metaschema-no-validation.json b/vendor/jsonschema/json/remotes/draft2020-12/metaschema-no-validation.json new file mode 100644 index 00000000..d71f440a --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft2020-12/metaschema-no-validation.json @@ -0,0 +1,11 @@ +{ + "$id": "http://localhost:1234/draft2020-12/metaschema-no-validation.json", + "$vocabulary": { + "https://json-schema.org/draft/2020-12/vocab/applicator": true, + "https://json-schema.org/draft/2020-12/vocab/core": true + }, + "allOf": [ + { "$ref": "https://json-schema.org/draft/2020-12/meta/applicator" }, + { "$ref": "https://json-schema.org/draft/2020-12/meta/core" } + ] +} diff --git a/vendor/jsonschema/json/remotes/draft2020-12/prefixItems.json b/vendor/jsonschema/json/remotes/draft2020-12/prefixItems.json new file mode 100644 index 00000000..acd8293c --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft2020-12/prefixItems.json @@ -0,0 +1,7 @@ +{ + "$id": "http://localhost:1234/draft2020-12/prefixItems.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + {"type": "string"} + ] +} diff --git a/vendor/jsonschema/json/remotes/draft7/ignore-dependentRequired.json b/vendor/jsonschema/json/remotes/draft7/ignore-dependentRequired.json new file mode 100644 index 00000000..0ea927b5 --- /dev/null +++ b/vendor/jsonschema/json/remotes/draft7/ignore-dependentRequired.json @@ -0,0 +1,7 @@ +{ + "$id": "http://localhost:1234/draft7/integer.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "dependentRequired": { + "foo": ["bar"] + } +} \ No newline at end of file diff --git a/vendor/jsonschema/json/remotes/extendible-dynamic-ref.json b/vendor/jsonschema/json/remotes/extendible-dynamic-ref.json new file mode 100644 index 00000000..d0bcd37d --- /dev/null +++ b/vendor/jsonschema/json/remotes/extendible-dynamic-ref.json @@ -0,0 +1,20 @@ +{ + "description": "extendible array", + "$id": "http://localhost:1234/extendible-dynamic-ref.json", + "type": "object", + "properties": { + "elements": { + "type": "array", + "items": { + "$dynamicRef": "#elements" + } + } + }, + "required": ["elements"], + "additionalProperties": false, + "$defs": { + "elements": { + "$dynamicAnchor": "elements" + } + } +} diff --git a/vendor/jsonschema/json/remotes/integer.json b/vendor/jsonschema/json/remotes/integer.json new file mode 100644 index 00000000..8b50ea30 --- /dev/null +++ b/vendor/jsonschema/json/remotes/integer.json @@ -0,0 +1,3 @@ +{ + "type": "integer" +} diff --git a/vendor/jsonschema/json/remotes/locationIndependentIdentifier.json b/vendor/jsonschema/json/remotes/locationIndependentIdentifier.json new file mode 100644 index 00000000..96b17c3f --- /dev/null +++ b/vendor/jsonschema/json/remotes/locationIndependentIdentifier.json @@ -0,0 +1,11 @@ +{ + "$defs": { + "refToInteger": { + "$ref": "#foo" + }, + "A": { + "$anchor": "foo", + "type": "integer" + } + } +} diff --git a/vendor/jsonschema/json/remotes/locationIndependentIdentifierDraft4.json b/vendor/jsonschema/json/remotes/locationIndependentIdentifierDraft4.json new file mode 100644 index 00000000..eeff1eb2 --- /dev/null +++ b/vendor/jsonschema/json/remotes/locationIndependentIdentifierDraft4.json @@ -0,0 +1,11 @@ +{ + "definitions": { + "refToInteger": { + "$ref": "#foo" + }, + "A": { + "id": "#foo", + "type": "integer" + } + } +} diff --git a/vendor/jsonschema/json/remotes/locationIndependentIdentifierPre2019.json b/vendor/jsonschema/json/remotes/locationIndependentIdentifierPre2019.json new file mode 100644 index 00000000..e72815cd --- /dev/null +++ b/vendor/jsonschema/json/remotes/locationIndependentIdentifierPre2019.json @@ -0,0 +1,11 @@ +{ + "definitions": { + "refToInteger": { + "$ref": "#foo" + }, + "A": { + "$id": "#foo", + "type": "integer" + } + } +} diff --git a/vendor/jsonschema/json/remotes/name-defs.json b/vendor/jsonschema/json/remotes/name-defs.json new file mode 100644 index 00000000..1dab4a43 --- /dev/null +++ b/vendor/jsonschema/json/remotes/name-defs.json @@ -0,0 +1,15 @@ +{ + "$defs": { + "orNull": { + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#" + } + ] + } + }, + "type": "string" +} diff --git a/vendor/jsonschema/json/remotes/name.json b/vendor/jsonschema/json/remotes/name.json new file mode 100644 index 00000000..fceacb80 --- /dev/null +++ b/vendor/jsonschema/json/remotes/name.json @@ -0,0 +1,15 @@ +{ + "definitions": { + "orNull": { + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#" + } + ] + } + }, + "type": "string" +} diff --git a/vendor/jsonschema/json/remotes/nested/foo-ref-string.json b/vendor/jsonschema/json/remotes/nested/foo-ref-string.json new file mode 100644 index 00000000..9cd2527e --- /dev/null +++ b/vendor/jsonschema/json/remotes/nested/foo-ref-string.json @@ -0,0 +1,6 @@ +{ + "type": "object", + "properties": { + "foo": {"$ref": "string.json"} + } +} diff --git a/vendor/jsonschema/json/remotes/nested/string.json b/vendor/jsonschema/json/remotes/nested/string.json new file mode 100644 index 00000000..c2d48c06 --- /dev/null +++ b/vendor/jsonschema/json/remotes/nested/string.json @@ -0,0 +1,3 @@ +{ + "type": "string" +} diff --git a/vendor/jsonschema/json/remotes/ref-and-definitions.json b/vendor/jsonschema/json/remotes/ref-and-definitions.json new file mode 100644 index 00000000..e0ee802a --- /dev/null +++ b/vendor/jsonschema/json/remotes/ref-and-definitions.json @@ -0,0 +1,11 @@ +{ + "$id": "http://localhost:1234/ref-and-definitions.json", + "definitions": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "allOf": [ { "$ref": "#/definitions/inner" } ] +} diff --git a/vendor/jsonschema/json/remotes/ref-and-defs.json b/vendor/jsonschema/json/remotes/ref-and-defs.json new file mode 100644 index 00000000..85d06c39 --- /dev/null +++ b/vendor/jsonschema/json/remotes/ref-and-defs.json @@ -0,0 +1,11 @@ +{ + "$id": "http://localhost:1234/ref-and-defs.json", + "$defs": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "$ref": "#/$defs/inner" +} diff --git a/vendor/jsonschema/json/remotes/subSchemas-defs.json b/vendor/jsonschema/json/remotes/subSchemas-defs.json new file mode 100644 index 00000000..50b7b6dc --- /dev/null +++ b/vendor/jsonschema/json/remotes/subSchemas-defs.json @@ -0,0 +1,10 @@ +{ + "$defs": { + "integer": { + "type": "integer" + }, + "refToInteger": { + "$ref": "#/$defs/integer" + } + } +} diff --git a/vendor/jsonschema/json/remotes/subSchemas.json b/vendor/jsonschema/json/remotes/subSchemas.json new file mode 100644 index 00000000..9f8030bc --- /dev/null +++ b/vendor/jsonschema/json/remotes/subSchemas.json @@ -0,0 +1,8 @@ +{ + "integer": { + "type": "integer" + }, + "refToInteger": { + "$ref": "#/integer" + } +} diff --git a/vendor/jsonschema/json/remotes/tree.json b/vendor/jsonschema/json/remotes/tree.json new file mode 100644 index 00000000..a12d98b0 --- /dev/null +++ b/vendor/jsonschema/json/remotes/tree.json @@ -0,0 +1,16 @@ +{ + "description": "tree schema, extensible", + "$id": "http://localhost:1234/tree.json", + "$dynamicAnchor": "node", + + "type": "object", + "properties": { + "data": true, + "children": { + "type": "array", + "items": { + "$dynamicRef": "#node" + } + } + } +} diff --git a/vendor/jsonschema/json/test-schema.json b/vendor/jsonschema/json/test-schema.json new file mode 100644 index 00000000..5d250317 --- /dev/null +++ b/vendor/jsonschema/json/test-schema.json @@ -0,0 +1,60 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "description": "A schema for files contained within this suite", + + "type": "array", + "minItems": 1, + "items": { + "description": "An individual test case, containing multiple tests of a single schema's behavior", + + "type": "object", + "required": [ "description", "schema", "tests" ], + "properties": { + "description": { + "description": "The test case description", + "type": "string" + }, + "comment": { + "description": "Any additional comments about the test case", + "type": "string" + }, + "schema": { + "description": "A valid JSON Schema (one written for the corresponding version directory that the file sits within)." + }, + "tests": { + "description": "A set of related tests all using the same schema", + "type": "array", + "items": { "$ref": "#/$defs/test" }, + "minItems": 1 + } + }, + "additionalProperties": false + }, + + "$defs": { + "test": { + "description": "A single test", + + "type": "object", + "required": [ "description", "data", "valid" ], + "properties": { + "description": { + "description": "The test description, briefly explaining which behavior it exercises", + "type": "string" + }, + "comment": { + "description": "Any additional comments about the test", + "type": "string" + }, + "data": { + "description": "The instance which should be validated against the schema in \"schema\"." + }, + "valid": { + "description": "Whether the validation process of this instance should consider the instance valid or not", + "type": "boolean" + } + }, + "additionalProperties": false + } + } +} diff --git a/vendor/jsonschema/json/tests/draft-next/additionalProperties.json b/vendor/jsonschema/json/tests/draft-next/additionalProperties.json new file mode 100644 index 00000000..0f8e1627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/additionalProperties.json @@ -0,0 +1,147 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobarbaz", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": "non-ASCII pattern with additionalProperties", + "schema": { + "patternProperties": {"^á": {}}, + "additionalProperties": false + }, + "tests": [ + { + "description": "matching the pattern is valid", + "data": {"ármányos": 2}, + "valid": true + }, + { + "description": "not matching the pattern is invalid", + "data": {"élmény": 2}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with schema", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + }, + { + "description": "additionalProperties does not look in applicators", + "schema": { + "allOf": [ + {"properties": {"foo": {}}} + ], + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "properties defined in allOf are not examined", + "data": {"foo": 1, "bar": true}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with null valued instance properties", + "schema": { + "additionalProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/allOf.json b/vendor/jsonschema/json/tests/draft-next/allOf.json new file mode 100644 index 00000000..ec9319e1 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/allOf.json @@ -0,0 +1,294 @@ +[ + { + "description": "allOf", + "schema": { + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all true", + "schema": {"allOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "allOf with boolean schemas, some false", + "schema": {"allOf": [true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all false", + "schema": {"allOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with one empty schema", + "schema": { + "allOf": [ + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with two empty schemas", + "schema": { + "allOf": [ + {}, + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with the first empty schema", + "schema": { + "allOf": [ + {}, + { "type": "number" } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with the last empty schema", + "schema": { + "allOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "nested allOf, to check validation semantics", + "schema": { + "allOf": [ + { + "allOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "allOf combined with anyOf, oneOf", + "schema": { + "allOf": [ { "multipleOf": 2 } ], + "anyOf": [ { "multipleOf": 3 } ], + "oneOf": [ { "multipleOf": 5 } ] + }, + "tests": [ + { + "description": "allOf: false, anyOf: false, oneOf: false", + "data": 1, + "valid": false + }, + { + "description": "allOf: false, anyOf: false, oneOf: true", + "data": 5, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: false", + "data": 3, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: true", + "data": 15, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: false", + "data": 2, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: true", + "data": 10, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: false", + "data": 6, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: true", + "data": 30, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/anchor.json b/vendor/jsonschema/json/tests/draft-next/anchor.json new file mode 100644 index 00000000..9744a3c6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/anchor.json @@ -0,0 +1,225 @@ +[ + { + "description": "Location-independent identifier", + "schema": { + "$ref": "#foo", + "$defs": { + "A": { + "$anchor": "foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with absolute URI", + "schema": { + "$ref": "http://localhost:1234/bar#foo", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar", + "$anchor": "foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with base URI change in subschema", + "schema": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#foo", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$anchor": "foo", + "type": "integer" + } + } + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "$anchor inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an $anchor buried in the enum", + "schema": { + "$defs": { + "anchor_in_enum": { + "enum": [ + { + "$anchor": "my_anchor", + "type": "null" + } + ] + }, + "real_identifier_in_schema": { + "$anchor": "my_anchor", + "type": "string" + }, + "zzz_anchor_in_const": { + "const": { + "$anchor": "my_anchor", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/anchor_in_enum" }, + { "$ref": "#my_anchor" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "$anchor": "my_anchor", + "type": "null" + }, + "valid": true + }, + { + "description": "in implementations that strip $anchor, this may match either $def", + "data": { + "type": "null" + }, + "valid": false + }, + { + "description": "match $ref to $anchor", + "data": "a string to match #/$defs/anchor_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to $anchor", + "data": 1, + "valid": false + } + ] + }, + { + "description": "same $anchor with different base uri", + "schema": { + "$id": "http://localhost:1234/foobar", + "$defs": { + "A": { + "$id": "child1", + "allOf": [ + { + "$id": "child2", + "$anchor": "my_anchor", + "type": "number" + }, + { + "$anchor": "my_anchor", + "type": "string" + } + ] + } + }, + "$ref": "child1#my_anchor" + }, + "tests": [ + { + "description": "$ref resolves to /$defs/A/allOf/1", + "data": "a", + "valid": true + }, + { + "description": "$ref does not resolve to /$defs/A/allOf/0", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing an $anchor property", + "schema": { + "$defs": { + "const_not_anchor": { + "const": { + "$anchor": "not_a_real_anchor" + } + } + }, + "if": { + "const": "skip not_a_real_anchor" + }, + "then": true, + "else" : { + "$ref": "#/$defs/const_not_anchor" + } + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_anchor", + "valid": true + }, + { + "description": "const at const_not_anchor does not match", + "data": 1, + "valid": false + } + ] + }, + { + "description": "invalid anchors", + "schema": { "$ref": "https://json-schema.org/draft/next/schema" }, + "tests": [ + { + "description": "MUST start with a letter (and not #)", + "data": { "$anchor" : "#foo" }, + "valid": false + }, + { + "description": "JSON pointers are not valid", + "data": { "$anchor" : "/a/b" }, + "valid": false + }, + { + "description": "invalid with valid beginning", + "data": { "$anchor" : "foo#something" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/anyOf.json b/vendor/jsonschema/json/tests/draft-next/anyOf.json new file mode 100644 index 00000000..ab5eb386 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/anyOf.json @@ -0,0 +1,189 @@ +[ + { + "description": "anyOf", + "schema": { + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf with boolean schemas, all true", + "schema": {"anyOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, some true", + "schema": {"anyOf": [true, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, all false", + "schema": {"anyOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf complex types", + "schema": { + "anyOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first anyOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second anyOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both anyOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "neither anyOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "anyOf with one empty schema", + "schema": { + "anyOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is valid", + "data": 123, + "valid": true + } + ] + }, + { + "description": "nested anyOf, to check validation semantics", + "schema": { + "anyOf": [ + { + "anyOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/boolean_schema.json b/vendor/jsonschema/json/tests/draft-next/boolean_schema.json new file mode 100644 index 00000000..6d40f23f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/boolean_schema.json @@ -0,0 +1,104 @@ +[ + { + "description": "boolean schema 'true'", + "schema": true, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "boolean true is valid", + "data": true, + "valid": true + }, + { + "description": "boolean false is valid", + "data": false, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + }, + { + "description": "array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "boolean schema 'false'", + "schema": false, + "tests": [ + { + "description": "number is invalid", + "data": 1, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "boolean true is invalid", + "data": true, + "valid": false + }, + { + "description": "boolean false is invalid", + "data": false, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + }, + { + "description": "object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/const.json b/vendor/jsonschema/json/tests/draft-next/const.json new file mode 100644 index 00000000..1c2cafcc --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/const.json @@ -0,0 +1,342 @@ +[ + { + "description": "const validation", + "schema": {"const": 2}, + "tests": [ + { + "description": "same value is valid", + "data": 2, + "valid": true + }, + { + "description": "another value is invalid", + "data": 5, + "valid": false + }, + { + "description": "another type is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "const with object", + "schema": {"const": {"foo": "bar", "baz": "bax"}}, + "tests": [ + { + "description": "same object is valid", + "data": {"foo": "bar", "baz": "bax"}, + "valid": true + }, + { + "description": "same object with different property order is valid", + "data": {"baz": "bax", "foo": "bar"}, + "valid": true + }, + { + "description": "another object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "another type is invalid", + "data": [1, 2], + "valid": false + } + ] + }, + { + "description": "const with array", + "schema": {"const": [{ "foo": "bar" }]}, + "tests": [ + { + "description": "same array is valid", + "data": [{"foo": "bar"}], + "valid": true + }, + { + "description": "another array item is invalid", + "data": [2], + "valid": false + }, + { + "description": "array with additional items is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + }, + { + "description": "const with null", + "schema": {"const": null}, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "not null is invalid", + "data": 0, + "valid": false + } + ] + }, + { + "description": "const with false does not match 0", + "schema": {"const": false}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "const with true does not match 1", + "schema": {"const": true}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "const with [false] does not match [0]", + "schema": {"const": [false]}, + "tests": [ + { + "description": "[false] is valid", + "data": [false], + "valid": true + }, + { + "description": "[0] is invalid", + "data": [0], + "valid": false + }, + { + "description": "[0.0] is invalid", + "data": [0.0], + "valid": false + } + ] + }, + { + "description": "const with [true] does not match [1]", + "schema": {"const": [true]}, + "tests": [ + { + "description": "[true] is valid", + "data": [true], + "valid": true + }, + { + "description": "[1] is invalid", + "data": [1], + "valid": false + }, + { + "description": "[1.0] is invalid", + "data": [1.0], + "valid": false + } + ] + }, + { + "description": "const with {\"a\": false} does not match {\"a\": 0}", + "schema": {"const": {"a": false}}, + "tests": [ + { + "description": "{\"a\": false} is valid", + "data": {"a": false}, + "valid": true + }, + { + "description": "{\"a\": 0} is invalid", + "data": {"a": 0}, + "valid": false + }, + { + "description": "{\"a\": 0.0} is invalid", + "data": {"a": 0.0}, + "valid": false + } + ] + }, + { + "description": "const with {\"a\": true} does not match {\"a\": 1}", + "schema": {"const": {"a": true}}, + "tests": [ + { + "description": "{\"a\": true} is valid", + "data": {"a": true}, + "valid": true + }, + { + "description": "{\"a\": 1} is invalid", + "data": {"a": 1}, + "valid": false + }, + { + "description": "{\"a\": 1.0} is invalid", + "data": {"a": 1.0}, + "valid": false + } + ] + }, + { + "description": "const with 0 does not match other zero-like types", + "schema": {"const": 0}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "empty string is invalid", + "data": "", + "valid": false + } + ] + }, + { + "description": "const with 1 does not match true", + "schema": {"const": 1}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "const with -2.0 matches integer and float types", + "schema": {"const": -2.0}, + "tests": [ + { + "description": "integer -2 is valid", + "data": -2, + "valid": true + }, + { + "description": "integer 2 is invalid", + "data": 2, + "valid": false + }, + { + "description": "float -2.0 is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float 2.0 is invalid", + "data": 2.0, + "valid": false + }, + { + "description": "float -2.00001 is invalid", + "data": -2.00001, + "valid": false + } + ] + }, + { + "description": "float and integers are equal up to 64-bit representation limits", + "schema": {"const": 9007199254740992}, + "tests": [ + { + "description": "integer is valid", + "data": 9007199254740992, + "valid": true + }, + { + "description": "integer minus one is invalid", + "data": 9007199254740991, + "valid": false + }, + { + "description": "float is valid", + "data": 9007199254740992.0, + "valid": true + }, + { + "description": "float minus one is invalid", + "data": 9007199254740991.0, + "valid": false + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "const": "hello\u0000there" }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/contains.json b/vendor/jsonschema/json/tests/draft-next/contains.json new file mode 100644 index 00000000..4645a141 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/contains.json @@ -0,0 +1,256 @@ +[ + { + "description": "contains keyword validation", + "schema": { + "contains": { "minimum": 5 } + }, + "tests": [ + { + "description": "array with item matching schema (5) is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with item matching schema (6) is valid", + "data": [3, 4, 6], + "valid": true + }, + { + "description": "array with two items matching schema (5, 6) is valid", + "data": [3, 4, 5, 6], + "valid": true + }, + { + "description": "array without items matching schema is invalid", + "data": [2, 3, 4], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "object with property matching schema (5) is valid", + "data": { "a": 3, "b": 4, "c": 5 }, + "valid": true + }, + { + "description": "object with property matching schema (6) is valid", + "data": { "a": 3, "b": 4, "c": 6 }, + "valid": true + }, + { + "description": "object with two properties matching schema (5, 6) is valid", + "data": { "a": 3, "b": 4, "c": 5, "d": 6 }, + "valid": true + }, + { + "description": "object without properties matching schema is invalid", + "data": { "a": 2, "b": 3, "c": 4 }, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "not array or object is valid", + "data": 42, + "valid": true + } + ] + }, + { + "description": "contains keyword with const keyword", + "schema": { + "contains": { "const": 5 } + }, + "tests": [ + { + "description": "array with item 5 is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with two items 5 is valid", + "data": [3, 4, 5, 5], + "valid": true + }, + { + "description": "array without item 5 is invalid", + "data": [1, 2, 3, 4], + "valid": false + }, + { + "description": "object with property 5 is valid", + "data": { "a": 3, "b": 4, "c": 5 }, + "valid": true + }, + { + "description": "object with two properties 5 is valid", + "data": { "a": 3, "b": 4, "c": 5, "d": 5 }, + "valid": true + }, + { + "description": "object without property 5 is invalid", + "data": { "a": 1, "b": 2, "c": 3, "d": 4 }, + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema true", + "schema": { "contains": true }, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "any non-empty object is valid", + "data": { "a": "foo" }, + "valid": true + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema false", + "schema": { "contains": false }, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "any non-empty object is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "non-arrays/objects are valid", + "data": "contains does not apply to strings", + "valid": true + } + ] + }, + { + "description": "items + contains", + "schema": { + "additionalProperties": { "multipleOf": 2 }, + "items": { "multipleOf": 2 }, + "contains": { "multipleOf": 3 } + }, + "tests": [ + { + "description": "matches items, does not match contains", + "data": [2, 4, 8], + "valid": false + }, + { + "description": "does not match items, matches contains", + "data": [3, 6, 9], + "valid": false + }, + { + "description": "matches both items and contains", + "data": [6, 12], + "valid": true + }, + { + "description": "matches neither items nor contains", + "data": [1, 5], + "valid": false + }, + { + "description": "matches additionalProperties, does not match contains", + "data": { "a": 2, "b": 4, "c": 8 }, + "valid": false + }, + { + "description": "does not match additionalProperties, matches contains", + "data": { "a": 3, "b": 6, "c": 9 }, + "valid": false + }, + { + "description": "matches both additionalProperties and contains", + "data": { "a": 6, "b": 12 }, + "valid": true + }, + { + "description": "matches neither additionalProperties nor contains", + "data": { "a": 1, "b": 5 }, + "valid": false + } + ] + }, + { + "description": "contains with false if subschema", + "schema": { + "contains": { + "if": false, + "else": true + } + }, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "any non-empty object is valid", + "data": { "a": "foo" }, + "valid": true + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "contains with null instance elements", + "schema": { + "contains": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null items", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/content.json b/vendor/jsonschema/json/tests/draft-next/content.json new file mode 100644 index 00000000..44688e82 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/content.json @@ -0,0 +1,127 @@ +[ + { + "description": "validation of string-encoded content based on media type", + "schema": { + "contentMediaType": "application/json" + }, + "tests": [ + { + "description": "a valid JSON document", + "data": "{\"foo\": \"bar\"}", + "valid": true + }, + { + "description": "an invalid JSON document; validates true", + "data": "{:}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary string-encoding", + "schema": { + "contentEncoding": "base64" + }, + "tests": [ + { + "description": "a valid base64 string", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "an invalid base64 string (% is not a valid character); validates true", + "data": "eyJmb28iOi%iYmFyIn0K", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary-encoded media type documents", + "schema": { + "contentMediaType": "application/json", + "contentEncoding": "base64" + }, + "tests": [ + { + "description": "a valid base64-encoded JSON document", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "a validly-encoded invalid JSON document; validates true", + "data": "ezp9Cg==", + "valid": true + }, + { + "description": "an invalid base64 string that is valid JSON; validates true", + "data": "{}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary-encoded media type documents with schema", + "schema": { + "contentMediaType": "application/json", + "contentEncoding": "base64", + "contentSchema": { "required": ["foo"], "properties": { "foo": { "type": "string" } } } + }, + "tests": [ + { + "description": "a valid base64-encoded JSON document", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "another valid base64-encoded JSON document", + "data": "eyJib28iOiAyMCwgImZvbyI6ICJiYXoifQ==", + "valid": true + }, + { + "description": "an invalid base64-encoded JSON document; validates true", + "data": "eyJib28iOiAyMH0=", + "valid": true + }, + { + "description": "an empty object as a base64-encoded JSON document; validates true", + "data": "e30=", + "valid": true + }, + { + "description": "an empty array as a base64-encoded JSON document", + "data": "W10=", + "valid": true + }, + { + "description": "a validly-encoded invalid JSON document; validates true", + "data": "ezp9Cg==", + "valid": true + }, + { + "description": "an invalid base64 string that is valid JSON; validates true", + "data": "{}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/default.json b/vendor/jsonschema/json/tests/draft-next/default.json new file mode 100644 index 00000000..289a9b66 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/default.json @@ -0,0 +1,79 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "the default keyword does not do anything if the property is missing", + "schema": { + "type": "object", + "properties": { + "alpha": { + "type": "number", + "maximum": 3, + "default": 5 + } + } + }, + "tests": [ + { + "description": "an explicit property value is checked against maximum (passing)", + "data": { "alpha": 1 }, + "valid": true + }, + { + "description": "an explicit property value is checked against maximum (failing)", + "data": { "alpha": 5 }, + "valid": false + }, + { + "description": "missing properties are not filled in with the default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/defs.json b/vendor/jsonschema/json/tests/draft-next/defs.json new file mode 100644 index 00000000..12821646 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/defs.json @@ -0,0 +1,20 @@ +[ + { + "description": "validate definition against metaschema", + "schema": { + "$ref": "https://json-schema.org/draft/next/schema" + }, + "tests": [ + { + "description": "valid definition schema", + "data": {"$defs": {"foo": {"type": "integer"}}}, + "valid": true + }, + { + "description": "invalid definition schema", + "data": {"$defs": {"foo": {"type": 1}}}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/dependentRequired.json b/vendor/jsonschema/json/tests/draft-next/dependentRequired.json new file mode 100644 index 00000000..c817120d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/dependentRequired.json @@ -0,0 +1,142 @@ +[ + { + "description": "single dependency", + "schema": {"dependentRequired": {"bar": ["foo"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "empty dependents", + "schema": {"dependentRequired": {"bar": []}}, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependents required", + "schema": {"dependentRequired": {"quux": ["foo", "bar"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependentRequired": { + "foo\nbar": ["foo\rbar"], + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "CRLF", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "quoted quotes", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "CRLF missing dependent", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "quoted quotes missing dependent", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/dependentSchemas.json b/vendor/jsonschema/json/tests/draft-next/dependentSchemas.json new file mode 100644 index 00000000..2ba1a757 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/dependentSchemas.json @@ -0,0 +1,129 @@ +[ + { + "description": "single dependency", + "schema": { + "dependentSchemas": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "boolean subschemas", + "schema": { + "dependentSchemas": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependentSchemas": { + "foo\tbar": {"minProperties": 4}, + "foo'bar": {"required": ["foo\"bar"]} + } + }, + "tests": [ + { + "description": "quoted tab", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "quoted quote", + "data": { + "foo'bar": {"foo\"bar": 1} + }, + "valid": false + }, + { + "description": "quoted tab invalid under dependent schema", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "quoted quote invalid under dependent schema", + "data": {"foo'bar": 1}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/dynamicRef.json b/vendor/jsonschema/json/tests/draft-next/dynamicRef.json new file mode 100644 index 00000000..fdb0a1ab --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/dynamicRef.json @@ -0,0 +1,476 @@ +[ + { + "description": "A $dynamicRef to a $dynamicAnchor in the same schema resource behaves like a normal $ref to an $anchor", + "schema": { + "$id": "https://test.json-schema.org/dynamicRef-dynamicAnchor-same-schema/root", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "A $ref to a $dynamicAnchor in the same schema resource behaves like a normal $ref to an $anchor", + "schema": { + "$id": "https://test.json-schema.org/ref-dynamicAnchor-same-schema/root", + "type": "array", + "items": { "$ref": "#items" }, + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "A $dynamicRef resolves to the first $dynamicAnchor still in scope that is encountered when the schema is evaluated", + "schema": { + "$id": "https://test.json-schema.org/typical-dynamic-resolution/root", + "$ref": "list", + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + }, + "list": { + "$id": "list", + "type": "array", + "items": { "$dynamicRef": "#items" } + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "A $dynamicRef with intermediate scopes that don't include a matching $dynamicAnchor does not affect dynamic scope resolution", + "schema": { + "$id": "https://test.json-schema.org/dynamic-resolution-with-intermediate-scopes/root", + "$ref": "intermediate-scope", + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + }, + "intermediate-scope": { + "$id": "intermediate-scope", + "$ref": "list" + }, + "list": { + "$id": "list", + "type": "array", + "items": { "$dynamicRef": "#items" } + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "An $anchor with the same name as a $dynamicAnchor is not used for dynamic scope resolution", + "schema": { + "$id": "https://test.json-schema.org/dynamic-resolution-ignores-anchors/root", + "$ref": "list", + "$defs": { + "foo": { + "$anchor": "items", + "type": "string" + }, + "list": { + "$id": "list", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "items": { + "$dynamicAnchor": "items" + } + } + } + } + }, + "tests": [ + { + "description": "Any array is valid", + "data": ["foo", 42], + "valid": true + } + ] + }, + { + "description": "A $dynamicRef that initially resolves to a schema with a matching $dynamicAnchor resolves to the first $dynamicAnchor in the dynamic scope", + "schema": { + "$id": "https://test.json-schema.org/relative-dynamic-reference/root", + "$dynamicAnchor": "meta", + "type": "object", + "properties": { + "foo": { "const": "pass" } + }, + "$ref": "extended", + "$defs": { + "extended": { + "$id": "extended", + "$dynamicAnchor": "meta", + "type": "object", + "properties": { + "bar": { "$ref": "bar" } + } + }, + "bar": { + "$id": "bar", + "type": "object", + "properties": { + "baz": { "$dynamicRef": "extended#meta" } + } + } + } + }, + "tests": [ + { + "description": "The recursive part is valid against the root", + "data": { + "foo": "pass", + "bar": { + "baz": { "foo": "pass" } + } + }, + "valid": true + }, + { + "description": "The recursive part is not valid against the root", + "data": { + "foo": "pass", + "bar": { + "baz": { "foo": "fail" } + } + }, + "valid": false + } + ] + }, + { + "description": "multiple dynamic paths to the $dynamicRef keyword", + "schema": { + "$id": "https://test.json-schema.org/dynamic-ref-with-multiple-paths/main", + "$defs": { + "inner": { + "$id": "inner", + "$dynamicAnchor": "foo", + "title": "inner", + "additionalProperties": { + "$dynamicRef": "#foo" + } + } + }, + "if": { + "propertyNames": { + "pattern": "^[a-m]" + } + }, + "then": { + "title": "any type of node", + "$id": "anyLeafNode", + "$dynamicAnchor": "foo", + "$ref": "inner" + }, + "else": { + "title": "integer node", + "$id": "integerNode", + "$dynamicAnchor": "foo", + "type": [ "object", "integer" ], + "$ref": "inner" + } + }, + "tests": [ + { + "description": "recurse to anyLeafNode - floats are allowed", + "data": { "alpha": 1.1 }, + "valid": true + }, + { + "description": "recurse to integerNode - floats are not allowed", + "data": { "november": 1.1 }, + "valid": false + } + ] + }, + { + "description": "after leaving a dynamic scope, it is not used by a $dynamicRef", + "schema": { + "$id": "https://test.json-schema.org/dynamic-ref-leaving-dynamic-scope/main", + "if": { + "$id": "first_scope", + "$defs": { + "thingy": { + "$comment": "this is first_scope#thingy", + "$dynamicAnchor": "thingy", + "type": "number" + } + } + }, + "then": { + "$id": "second_scope", + "$ref": "start", + "$defs": { + "thingy": { + "$comment": "this is second_scope#thingy, the final destination of the $dynamicRef", + "$dynamicAnchor": "thingy", + "type": "null" + } + } + }, + "$defs": { + "start": { + "$comment": "this is the landing spot from $ref", + "$id": "start", + "$dynamicRef": "inner_scope#thingy" + }, + "thingy": { + "$comment": "this is the first stop for the $dynamicRef", + "$id": "inner_scope", + "$dynamicAnchor": "thingy", + "type": "string" + } + } + }, + "tests": [ + { + "description": "string matches /$defs/thingy, but the $dynamicRef does not stop here", + "data": "a string", + "valid": false + }, + { + "description": "first_scope is not in dynamic scope for the $dynamicRef", + "data": 42, + "valid": false + }, + { + "description": "/then/$defs/thingy is the final stop for the $dynamicRef", + "data": null, + "valid": true + } + ] + }, + { + "description": "strict-tree schema, guards against misspelled properties", + "schema": { + "$id": "http://localhost:1234/strict-tree.json", + "$dynamicAnchor": "node", + + "$ref": "tree.json", + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "instance with misspelled field", + "data": { + "children": [{ + "daat": 1 + }] + }, + "valid": false + }, + { + "description": "instance with correct field", + "data": { + "children": [{ + "data": 1 + }] + }, + "valid": true + } + ] + }, + { + "description": "tests for implementation dynamic anchor and reference link", + "schema": { + "$id": "http://localhost:1234/strict-extendible.json", + "$ref": "extendible-dynamic-ref.json", + "$defs": { + "elements": { + "$dynamicAnchor": "elements", + "properties": { + "a": true + }, + "required": ["a"], + "additionalProperties": false + } + } + }, + "tests": [ + { + "description": "incorrect parent schema", + "data": { + "a": true + }, + "valid": false + }, + { + "description": "incorrect extended schema", + "data": { + "elements": [ + { "b": 1 } + ] + }, + "valid": false + }, + { + "description": "correct extended schema", + "data": { + "elements": [ + { "a": 1 } + ] + }, + "valid": true + } + ] + }, + { + "description": "$ref and $dynamicAnchor are independent of order - $defs first", + "schema": { + "$id": "http://localhost:1234/strict-extendible-allof-defs-first.json", + "allOf": [ + { + "$ref": "extendible-dynamic-ref.json" + }, + { + "$defs": { + "elements": { + "$dynamicAnchor": "elements", + "properties": { + "a": true + }, + "required": ["a"], + "additionalProperties": false + } + } + } + ] + }, + "tests": [ + { + "description": "incorrect parent schema", + "data": { + "a": true + }, + "valid": false + }, + { + "description": "incorrect extended schema", + "data": { + "elements": [ + { "b": 1 } + ] + }, + "valid": false + }, + { + "description": "correct extended schema", + "data": { + "elements": [ + { "a": 1 } + ] + }, + "valid": true + } + ] + }, + { + "description": "$ref and $dynamicAnchor are independent of order - $ref first", + "schema": { + "$id": "http://localhost:1234/strict-extendible-allof-ref-first.json", + "allOf": [ + { + "$defs": { + "elements": { + "$dynamicAnchor": "elements", + "properties": { + "a": true + }, + "required": ["a"], + "additionalProperties": false + } + } + }, + { + "$ref": "extendible-dynamic-ref.json" + } + ] + }, + "tests": [ + { + "description": "incorrect parent schema", + "data": { + "a": true + }, + "valid": false + }, + { + "description": "incorrect extended schema", + "data": { + "elements": [ + { "b": 1 } + ] + }, + "valid": false + }, + { + "description": "correct extended schema", + "data": { + "elements": [ + { "a": 1 } + ] + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/enum.json b/vendor/jsonschema/json/tests/draft-next/enum.json new file mode 100644 index 00000000..f085097b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/enum.json @@ -0,0 +1,236 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + }, + { + "description": "valid object matches", + "data": {"foo": 12}, + "valid": true + }, + { + "description": "extra properties in object is invalid", + "data": {"foo": 12, "boo": 42}, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum-with-null validation", + "schema": { "enum": [6, null] }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is valid", + "data": 6, + "valid": true + }, + { + "description": "something else is invalid", + "data": "test", + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "wrong foo value", + "data": {"foo":"foot", "bar":"bar"}, + "valid": false + }, + { + "description": "wrong bar value", + "data": {"foo":"foo", "bar":"bart"}, + "valid": false + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "enum with escaped characters", + "schema": { + "enum": ["foo\nbar", "foo\rbar"] + }, + "tests": [ + { + "description": "member 1 is valid", + "data": "foo\nbar", + "valid": true + }, + { + "description": "member 2 is valid", + "data": "foo\rbar", + "valid": true + }, + { + "description": "another string is invalid", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "enum with false does not match 0", + "schema": {"enum": [false]}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "enum with true does not match 1", + "schema": {"enum": [true]}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "enum with 0 does not match false", + "schema": {"enum": [0]}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + } + ] + }, + { + "description": "enum with 1 does not match true", + "schema": {"enum": [1]}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "enum": [ "hello\u0000there" ] }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/exclusiveMaximum.json b/vendor/jsonschema/json/tests/draft-next/exclusiveMaximum.json new file mode 100644 index 00000000..dc3cd709 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/exclusiveMaximum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMaximum validation", + "schema": { + "exclusiveMaximum": 3.0 + }, + "tests": [ + { + "description": "below the exclusiveMaximum is valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + }, + { + "description": "above the exclusiveMaximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/exclusiveMinimum.json b/vendor/jsonschema/json/tests/draft-next/exclusiveMinimum.json new file mode 100644 index 00000000..b38d7ece --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/exclusiveMinimum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMinimum validation", + "schema": { + "exclusiveMinimum": 1.1 + }, + "tests": [ + { + "description": "above the exclusiveMinimum is valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "below the exclusiveMinimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/format.json b/vendor/jsonschema/json/tests/draft-next/format.json new file mode 100644 index 00000000..a4b51d28 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/format.json @@ -0,0 +1,686 @@ +[ + { + "description": "email format", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "idn-email format", + "schema": { "format": "idn-email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "regex format", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv4 format", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv6 format", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "idn-hostname format", + "schema": { "format": "idn-hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "hostname format", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date format", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date-time format", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "time format", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "json-pointer format", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "relative-json-pointer format", + "schema": { "format": "relative-json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "iri format", + "schema": { "format": "iri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "iri-reference format", + "schema": { "format": "iri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri format", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-reference format", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-template format", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uuid format", + "schema": { "format": "uuid" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "duration format", + "schema": { "format": "duration" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/id.json b/vendor/jsonschema/json/tests/draft-next/id.json new file mode 100644 index 00000000..db7afaf2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/id.json @@ -0,0 +1,289 @@ +[ + { + "description": "Invalid use of fragments in location-independent $id", + "schema": { + "$ref": "https://json-schema.org/draft/next/schema" + }, + "tests": [ + { + "description": "Identifier name", + "data": { + "$ref": "#foo", + "$defs": { + "A": { + "$id": "#foo", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name and no ref", + "data": { + "$defs": { + "A": { "$id": "#foo" } + } + }, + "valid": false + }, + { + "description": "Identifier path", + "data": { + "$ref": "#/a/b", + "$defs": { + "A": { + "$id": "#/a/b", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar#foo", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#foo", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier path with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar#/a/b", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#/a/b", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#foo", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#foo", + "type": "integer" + } + } + } + } + }, + "valid": false + }, + { + "description": "Identifier path with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#/a/b", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#/a/b", + "type": "integer" + } + } + } + } + }, + "valid": false + } + ] + }, + { + "description": "Valid use of empty fragments in location-independent $id", + "comment": "These are allowed but discouraged", + "schema": { + "$ref": "https://json-schema.org/draft/next/schema" + }, + "tests": [ + { + "description": "Identifier name with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Identifier name with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#/$defs/B", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#", + "type": "integer" + } + } + } + } + }, + "valid": true + } + ] + }, + { + "description": "Unnormalized $ids are allowed but discouraged", + "schema": { + "$ref": "https://json-schema.org/draft/next/schema" + }, + "tests": [ + { + "description": "Unnormalized identifier", + "data": { + "$ref": "http://localhost:1234/foo/baz", + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier and no ref", + "data": { + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier with empty fragment", + "data": { + "$ref": "http://localhost:1234/foo/baz", + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz#", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier with empty fragment and no ref", + "data": { + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz#", + "type": "integer" + } + } + }, + "valid": true + } + ] + }, + { + "description": "$id inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an $id buried in the enum", + "schema": { + "$defs": { + "id_in_enum": { + "enum": [ + { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + ] + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "string" + }, + "zzz_id_in_const": { + "const": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/id_in_enum" }, + { "$ref": "https://localhost:1234/id/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + }, + "valid": true + }, + { + "description": "match $ref to $id", + "data": "a string to match #/$defs/id_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to $id", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing an $id property", + "schema": { + "$defs": { + "const_not_id": { + "const": { + "$id": "not_a_real_id" + } + } + }, + "if": { + "const": "skip not_a_real_id" + }, + "then": true, + "else" : { + "$ref": "#/$defs/const_not_id" + } + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_id", + "valid": true + }, + { + "description": "const at const_not_id does not match", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/if-then-else.json b/vendor/jsonschema/json/tests/draft-next/if-then-else.json new file mode 100644 index 00000000..284e9191 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/if-then-else.json @@ -0,0 +1,258 @@ +[ + { + "description": "ignore if without then or else", + "schema": { + "if": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone if", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone if", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore then without if", + "schema": { + "then": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone then", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone then", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore else without if", + "schema": { + "else": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone else", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone else", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "if and then without else", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid when if test fails", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if and else without then", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid when if test passes", + "data": -1, + "valid": true + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "validate against correct branch, then vs else", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "non-interference across combined schemas", + "schema": { + "allOf": [ + { + "if": { + "exclusiveMaximum": 0 + } + }, + { + "then": { + "minimum": -10 + } + }, + { + "else": { + "multipleOf": 2 + } + } + ] + }, + "tests": [ + { + "description": "valid, but would have been invalid through then", + "data": -100, + "valid": true + }, + { + "description": "valid, but would have been invalid through else", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if with boolean schema true", + "schema": { + "if": true, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema true in if always chooses the then path (valid)", + "data": "then", + "valid": true + }, + { + "description": "boolean schema true in if always chooses the then path (invalid)", + "data": "else", + "valid": false + } + ] + }, + { + "description": "if with boolean schema false", + "schema": { + "if": false, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema false in if always chooses the else path (invalid)", + "data": "then", + "valid": false + }, + { + "description": "boolean schema false in if always chooses the else path (valid)", + "data": "else", + "valid": true + } + ] + }, + { + "description": "if appears at the end when serialized (keyword processing sequence)", + "schema": { + "then": { "const": "yes" }, + "else": { "const": "other" }, + "if": { "maxLength": 4 } + }, + "tests": [ + { + "description": "yes redirects to then and passes", + "data": "yes", + "valid": true + }, + { + "description": "other redirects to else and passes", + "data": "other", + "valid": true + }, + { + "description": "no redirects to then and fails", + "data": "no", + "valid": false + }, + { + "description": "invalid redirects to else and fails", + "data": "invalid", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/infinite-loop-detection.json b/vendor/jsonschema/json/tests/draft-next/infinite-loop-detection.json new file mode 100644 index 00000000..9c3c3627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/infinite-loop-detection.json @@ -0,0 +1,36 @@ +[ + { + "description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop", + "schema": { + "$defs": { + "int": { "type": "integer" } + }, + "allOf": [ + { + "properties": { + "foo": { + "$ref": "#/$defs/int" + } + } + }, + { + "additionalProperties": { + "$ref": "#/$defs/int" + } + } + ] + }, + "tests": [ + { + "description": "passing case", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "failing case", + "data": { "foo": "a string" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/items.json b/vendor/jsonschema/json/tests/draft-next/items.json new file mode 100644 index 00000000..38ab0e0a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/items.json @@ -0,0 +1,271 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "length": 1 + }, + "valid": true + } + ] + }, + { + "description": "items with boolean schema (true)", + "schema": {"items": true}, + "tests": [ + { + "description": "any array is valid", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schema (false)", + "schema": {"items": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": [ 1, "foo", true ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items and subitems", + "schema": { + "$defs": { + "item": { + "type": "array", + "items": false, + "prefixItems": [ + { "$ref": "#/$defs/sub-item" }, + { "$ref": "#/$defs/sub-item" } + ] + }, + "sub-item": { + "type": "object", + "required": ["foo"] + } + }, + "type": "array", + "items": false, + "prefixItems": [ + { "$ref": "#/$defs/item" }, + { "$ref": "#/$defs/item" }, + { "$ref": "#/$defs/item" } + ] + }, + "tests": [ + { + "description": "valid items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": true + }, + { + "description": "too many items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "too many sub-items", + "data": [ + [ {"foo": null}, {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong item", + "data": [ + {"foo": null}, + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong sub-item", + "data": [ + [ {}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "fewer items is valid", + "data": [ + [ {"foo": null} ], + [ {"foo": null} ] + ], + "valid": true + } + ] + }, + { + "description": "nested items", + "schema": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + }, + "tests": [ + { + "description": "valid nested array", + "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": true + }, + { + "description": "nested array with invalid type", + "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": false + }, + { + "description": "not deep enough", + "data": [[[1], [2],[3]], [[4], [5], [6]]], + "valid": false + } + ] + }, + { + "description": "prefixItems with no additional items allowed", + "schema": { + "prefixItems": [{}, {}, {}], + "items": false + }, + "tests": [ + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "fewer number of items present (1)", + "data": [ 1 ], + "valid": true + }, + { + "description": "fewer number of items present (2)", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "equal number of items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "items does not look in applicators, valid case", + "schema": { + "allOf": [ + { "prefixItems": [ { "minimum": 3 } ] } + ], + "items": { "minimum": 5 } + }, + "tests": [ + { + "description": "prefixItems in allOf does not constrain items, invalid case", + "data": [ 3, 5 ], + "valid": false + }, + { + "description": "prefixItems in allOf does not constrain items, valid case", + "data": [ 5, 5 ], + "valid": true + } + ] + }, + { + "description": "prefixItems validation adjusts the starting index for items", + "schema": { + "prefixItems": [ { "type": "string" } ], + "items": { "type": "integer" } + }, + "tests": [ + { + "description": "valid items", + "data": [ "x", 2, 3 ], + "valid": true + }, + { + "description": "wrong type of second item", + "data": [ "x", "y" ], + "valid": false + } + ] + }, + { + "description": "items with null instance elements", + "schema": { + "items": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/maxContains.json b/vendor/jsonschema/json/tests/draft-next/maxContains.json new file mode 100644 index 00000000..7ed1dcf8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/maxContains.json @@ -0,0 +1,148 @@ +[ + { + "description": "maxContains without contains is ignored", + "schema": { + "maxContains": 1 + }, + "tests": [ + { + "description": "one item valid against lone maxContains", + "data": [1], + "valid": true + }, + { + "description": "two items still valid against lone maxContains", + "data": [1, 2], + "valid": true + }, + { + "description": "one property valid against lone maxContains", + "data": { "a": 1 }, + "valid": true + }, + { + "description": "two properties still valid against lone maxContains", + "data": { "a": 1, "b": 2 }, + "valid": true + } + ] + }, + { + "description": "maxContains with contains", + "schema": { + "contains": { "const": 1 }, + "maxContains": 1 + }, + "tests": [ + { + "description": "empty array", + "data": [], + "valid": false + }, + { + "description": "all elements match, valid maxContains", + "data": [1], + "valid": true + }, + { + "description": "all elements match, invalid maxContains", + "data": [1, 1], + "valid": false + }, + { + "description": "some elements match, valid maxContains", + "data": [1, 2], + "valid": true + }, + { + "description": "some elements match, invalid maxContains", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "empty object", + "data": {}, + "valid": false + }, + { + "description": "all properties match, valid maxContains", + "data": { "a": 1 }, + "valid": true + }, + { + "description": "all properties match, invalid maxContains", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "some properties match, valid maxContains", + "data": { "a": 1, "b": 2 }, + "valid": true + }, + { + "description": "some properties match, invalid maxContains", + "data": { "a": 1, "b": 2, "c": 1 }, + "valid": false + } + ] + }, + { + "description": "maxContains with contains, value with a decimal", + "schema": { + "contains": {"const": 1}, + "maxContains": 1.0 + }, + "tests": [ + { + "description": "one element matches, valid maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "too many elements match, invalid maxContains", + "data": [ 1, 1 ], + "valid": false + } + ] + }, + { + "description": "minContains < maxContains", + "schema": { + "contains": { "const": 1 }, + "minContains": 1, + "maxContains": 3 + }, + "tests": [ + { + "description": "array with actual < minContains < maxContains", + "data": [], + "valid": false + }, + { + "description": "array with minContains < actual < maxContains", + "data": [1, 1], + "valid": true + }, + { + "description": "array with minContains < maxContains < actual", + "data": [1, 1, 1, 1], + "valid": false + }, + { + "description": "object with actual < minContains < maxContains", + "data": {}, + "valid": false + }, + { + "description": "object with minContains < actual < maxContains", + "data": { "a": 1, "b": 1 }, + "valid": true + }, + { + "description": "object with minContains < maxContains < actual", + "data": { "a": 1, "b": 1, "c": 1, "d": 1 }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/maxItems.json b/vendor/jsonschema/json/tests/draft-next/maxItems.json new file mode 100644 index 00000000..f0c36ab2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/maxItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + }, + { + "description": "maxItems validation with a decimal", + "schema": {"maxItems": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/maxLength.json b/vendor/jsonschema/json/tests/draft-next/maxLength.json new file mode 100644 index 00000000..748b4daa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/maxLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + }, + { + "description": "maxLength validation with a decimal", + "schema": {"maxLength": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/maxProperties.json b/vendor/jsonschema/json/tests/draft-next/maxProperties.json new file mode 100644 index 00000000..acec1420 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/maxProperties.json @@ -0,0 +1,70 @@ +[ + { + "description": "maxProperties validation", + "schema": {"maxProperties": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "maxProperties validation with a decimal", + "schema": {"maxProperties": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + } + ] + }, + { + "description": "maxProperties = 0 means the object is empty", + "schema": { "maxProperties": 0 }, + "tests": [ + { + "description": "no properties is valid", + "data": {}, + "valid": true + }, + { + "description": "one property is invalid", + "data": { "foo": 1 }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/maximum.json b/vendor/jsonschema/json/tests/draft-next/maximum.json new file mode 100644 index 00000000..6844a39e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/maximum.json @@ -0,0 +1,54 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "maximum validation with unsigned integer", + "schema": {"maximum": 300}, + "tests": [ + { + "description": "below the maximum is invalid", + "data": 299.97, + "valid": true + }, + { + "description": "boundary point integer is valid", + "data": 300, + "valid": true + }, + { + "description": "boundary point float is valid", + "data": 300.00, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 300.5, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/minContains.json b/vendor/jsonschema/json/tests/draft-next/minContains.json new file mode 100644 index 00000000..b83d1bdd --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/minContains.json @@ -0,0 +1,216 @@ +[ + { + "description": "minContains without contains is ignored", + "schema": { + "minContains": 1 + }, + "tests": [ + { + "description": "one item valid against lone minContains", + "data": [1], + "valid": true + }, + { + "description": "zero items still valid against lone minContains", + "data": [], + "valid": true + } + ] + }, + { + "description": "minContains=1 with contains", + "schema": { + "contains": { "const": 1 }, + "minContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [], + "valid": false + }, + { + "description": "no elements match", + "data": [2], + "valid": false + }, + { + "description": "single element matches, valid minContains", + "data": [1], + "valid": true + }, + { + "description": "some elements match, valid minContains", + "data": [1, 2], + "valid": true + }, + { + "description": "all elements match, valid minContains", + "data": [1, 1], + "valid": true + } + ] + }, + { + "description": "minContains=2 with contains", + "schema": { + "contains": { "const": 1 }, + "minContains": 2 + }, + "tests": [ + { + "description": "empty data", + "data": [], + "valid": false + }, + { + "description": "all elements match, invalid minContains", + "data": [1], + "valid": false + }, + { + "description": "some elements match, invalid minContains", + "data": [1, 2], + "valid": false + }, + { + "description": "all elements match, valid minContains (exactly as needed)", + "data": [1, 1], + "valid": true + }, + { + "description": "all elements match, valid minContains (more than needed)", + "data": [1, 1, 1], + "valid": true + }, + { + "description": "some elements match, valid minContains", + "data": [1, 2, 1], + "valid": true + } + ] + }, + { + "description": "minContains=2 with contains with a decimal value", + "schema": { + "contains": {"const": 1}, + "minContains": 2.0 + }, + "tests": [ + { + "description": "one element matches, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "both elements match, valid minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "maxContains = minContains", + "schema": { + "contains": { "const": 1 }, + "maxContains": 2, + "minContains": 2 + }, + "tests": [ + { + "description": "empty data", + "data": [], + "valid": false + }, + { + "description": "all elements match, invalid minContains", + "data": [1], + "valid": false + }, + { + "description": "all elements match, invalid maxContains", + "data": [1, 1, 1], + "valid": false + }, + { + "description": "all elements match, valid maxContains and minContains", + "data": [1, 1], + "valid": true + } + ] + }, + { + "description": "maxContains < minContains", + "schema": { + "contains": { "const": 1 }, + "maxContains": 1, + "minContains": 3 + }, + "tests": [ + { + "description": "empty data", + "data": [], + "valid": false + }, + { + "description": "invalid minContains", + "data": [1], + "valid": false + }, + { + "description": "invalid maxContains", + "data": [1, 1, 1], + "valid": false + }, + { + "description": "invalid maxContains and minContains", + "data": [1, 1], + "valid": false + } + ] + }, + { + "description": "minContains = 0", + "schema": { + "contains": { "const": 1 }, + "minContains": 0 + }, + "tests": [ + { + "description": "empty data", + "data": [], + "valid": true + }, + { + "description": "minContains = 0 makes contains always pass", + "data": [2], + "valid": true + } + ] + }, + { + "description": "minContains = 0 with maxContains", + "schema": { + "contains": {"const": 1}, + "minContains": 0, + "maxContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": true + }, + { + "description": "not more than maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "too many", + "data": [ 1, 1 ], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/minItems.json b/vendor/jsonschema/json/tests/draft-next/minItems.json new file mode 100644 index 00000000..d3b18720 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/minItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + }, + { + "description": "minItems validation with a decimal", + "schema": {"minItems": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/minLength.json b/vendor/jsonschema/json/tests/draft-next/minLength.json new file mode 100644 index 00000000..64db9480 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/minLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + }, + { + "description": "minLength validation with a decimal", + "schema": {"minLength": 2.0}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/minProperties.json b/vendor/jsonschema/json/tests/draft-next/minProperties.json new file mode 100644 index 00000000..9f74f789 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/minProperties.json @@ -0,0 +1,54 @@ +[ + { + "description": "minProperties validation", + "schema": {"minProperties": 1}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "minProperties validation with a decimal", + "schema": {"minProperties": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/minimum.json b/vendor/jsonschema/json/tests/draft-next/minimum.json new file mode 100644 index 00000000..21ae50e0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/minimum.json @@ -0,0 +1,69 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "minimum validation with signed integer", + "schema": {"minimum": -2}, + "tests": [ + { + "description": "negative above the minimum is valid", + "data": -1, + "valid": true + }, + { + "description": "positive above the minimum is valid", + "data": 0, + "valid": true + }, + { + "description": "boundary point is valid", + "data": -2, + "valid": true + }, + { + "description": "boundary point with float is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float below the minimum is invalid", + "data": -2.0001, + "valid": false + }, + { + "description": "int below the minimum is invalid", + "data": -3, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/multipleOf.json b/vendor/jsonschema/json/tests/draft-next/multipleOf.json new file mode 100644 index 00000000..25c25a91 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/multipleOf.json @@ -0,0 +1,71 @@ +[ + { + "description": "by int", + "schema": {"multipleOf": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"multipleOf": 1.5}, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"multipleOf": 0.0001}, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + }, + { + "description": "float division = inf", + "schema": {"type": "integer", "multipleOf": 0.123456789}, + "tests": [ + { + "description": "always invalid, but naive implementations may raise an overflow error", + "data": 1e308, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/not.json b/vendor/jsonschema/json/tests/draft-next/not.json new file mode 100644 index 00000000..98de0eda --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/not.json @@ -0,0 +1,117 @@ +[ + { + "description": "not", + "schema": { + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + }, + { + "description": "not with boolean schema true", + "schema": {"not": true}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "not with boolean schema false", + "schema": {"not": false}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/oneOf.json b/vendor/jsonschema/json/tests/draft-next/oneOf.json new file mode 100644 index 00000000..eeb7ae86 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/oneOf.json @@ -0,0 +1,274 @@ +[ + { + "description": "oneOf", + "schema": { + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all true", + "schema": {"oneOf": [true, true, true]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, one true", + "schema": {"oneOf": [true, false, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "oneOf with boolean schemas, more than one true", + "schema": {"oneOf": [true, true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all false", + "schema": {"oneOf": [false, false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf complex types", + "schema": { + "oneOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second oneOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both oneOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": false + }, + { + "description": "neither oneOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "oneOf with empty schema", + "schema": { + "oneOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "one valid - valid", + "data": "foo", + "valid": true + }, + { + "description": "both valid - invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "oneOf with required", + "schema": { + "type": "object", + "oneOf": [ + { "required": ["foo", "bar"] }, + { "required": ["foo", "baz"] } + ] + }, + "tests": [ + { + "description": "both invalid - invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "first valid - valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "second valid - valid", + "data": {"foo": 1, "baz": 3}, + "valid": true + }, + { + "description": "both valid - invalid", + "data": {"foo": 1, "bar": 2, "baz" : 3}, + "valid": false + } + ] + }, + { + "description": "oneOf with missing optional property", + "schema": { + "oneOf": [ + { + "properties": { + "bar": true, + "baz": true + }, + "required": ["bar"] + }, + { + "properties": { + "foo": true + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": {"bar": 8}, + "valid": true + }, + { + "description": "second oneOf valid", + "data": {"foo": "foo"}, + "valid": true + }, + { + "description": "both oneOf valid", + "data": {"foo": "foo", "bar": 8}, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": {"baz": "quux"}, + "valid": false + } + ] + }, + { + "description": "nested oneOf, to check validation semantics", + "schema": { + "oneOf": [ + { + "oneOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/bignum.json b/vendor/jsonschema/json/tests/draft-next/optional/bignum.json new file mode 100644 index 00000000..94b4a4e6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/bignum.json @@ -0,0 +1,93 @@ +[ + { + "description": "integer", + "schema": { "type": "integer" }, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + }, + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": { "type": "number" }, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + }, + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": { "type": "string" }, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "maximum integer comparison", + "schema": { "maximum": 18446744073709551615 }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "exclusiveMaximum": 972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "minimum integer comparison", + "schema": { "minimum": -18446744073709551615 }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "exclusiveMinimum": -972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/dependencies-compatibility.json b/vendor/jsonschema/json/tests/draft-next/optional/dependencies-compatibility.json new file mode 100644 index 00000000..6eafaf05 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/dependencies-compatibility.json @@ -0,0 +1,269 @@ +[ + { + "description": "single dependency", + "schema": {"dependencies": {"bar": ["foo"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "empty dependents", + "schema": {"dependencies": {"bar": []}}, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependents required", + "schema": {"dependencies": {"quux": ["foo", "bar"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\nbar": ["foo\rbar"], + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "CRLF", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "quoted quotes", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "CRLF missing dependent", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "quoted quotes missing dependent", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + }, + { + "description": "single schema dependency", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "boolean subschemas", + "schema": { + "dependencies": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "schema dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\tbar": {"minProperties": 4}, + "foo'bar": {"required": ["foo\"bar"]} + } + }, + "tests": [ + { + "description": "quoted tab", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "quoted quote", + "data": { + "foo'bar": {"foo\"bar": 1} + }, + "valid": false + }, + { + "description": "quoted tab invalid under dependent schema", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "quoted quote invalid under dependent schema", + "data": {"foo'bar": 1}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/ecmascript-regex.json b/vendor/jsonschema/json/tests/draft-next/optional/ecmascript-regex.json new file mode 100644 index 00000000..e273c95f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/ecmascript-regex.json @@ -0,0 +1,563 @@ +[ + { + "description": "ECMA 262 regex $ does not match trailing newline", + "schema": { + "type": "string", + "pattern": "^abc$" + }, + "tests": [ + { + "description": "matches in Python, but not in ECMA 262", + "data": "abc\\n", + "valid": false + }, + { + "description": "matches", + "data": "abc", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex converts \\t to horizontal tab", + "schema": { + "type": "string", + "pattern": "^\\t$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\t", + "valid": false + }, + { + "description": "matches", + "data": "\u0009", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and upper letter", + "schema": { + "type": "string", + "pattern": "^\\cC$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cC", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and lower letter", + "schema": { + "type": "string", + "pattern": "^\\cc$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cc", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\d matches ascii digits only", + "schema": { + "type": "string", + "pattern": "^\\d$" + }, + "tests": [ + { + "description": "ASCII zero matches", + "data": "0", + "valid": true + }, + { + "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)", + "data": "߀", + "valid": false + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) does not match", + "data": "\u07c0", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\D matches everything but ascii digits", + "schema": { + "type": "string", + "pattern": "^\\D$" + }, + "tests": [ + { + "description": "ASCII zero does not match", + "data": "0", + "valid": false + }, + { + "description": "NKO DIGIT ZERO matches (unlike e.g. Python)", + "data": "߀", + "valid": true + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) matches", + "data": "\u07c0", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\w matches ascii letters only", + "schema": { + "type": "string", + "pattern": "^\\w$" + }, + "tests": [ + { + "description": "ASCII 'a' matches", + "data": "a", + "valid": true + }, + { + "description": "latin-1 e-acute does not match (unlike e.g. Python)", + "data": "é", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\W matches everything but ascii letters", + "schema": { + "type": "string", + "pattern": "^\\W$" + }, + "tests": [ + { + "description": "ASCII 'a' does not match", + "data": "a", + "valid": false + }, + { + "description": "latin-1 e-acute matches (unlike e.g. Python)", + "data": "é", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\s matches whitespace", + "schema": { + "type": "string", + "pattern": "^\\s$" + }, + "tests": [ + { + "description": "ASCII space matches", + "data": " ", + "valid": true + }, + { + "description": "Character tabulation matches", + "data": "\t", + "valid": true + }, + { + "description": "Line tabulation matches", + "data": "\u000b", + "valid": true + }, + { + "description": "Form feed matches", + "data": "\u000c", + "valid": true + }, + { + "description": "latin-1 non-breaking-space matches", + "data": "\u00a0", + "valid": true + }, + { + "description": "zero-width whitespace matches", + "data": "\ufeff", + "valid": true + }, + { + "description": "line feed matches (line terminator)", + "data": "\u000a", + "valid": true + }, + { + "description": "paragraph separator matches (line terminator)", + "data": "\u2029", + "valid": true + }, + { + "description": "EM SPACE matches (Space_Separator)", + "data": "\u2003", + "valid": true + }, + { + "description": "Non-whitespace control does not match", + "data": "\u0001", + "valid": false + }, + { + "description": "Non-whitespace does not match", + "data": "\u2013", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\S matches everything but whitespace", + "schema": { + "type": "string", + "pattern": "^\\S$" + }, + "tests": [ + { + "description": "ASCII space does not match", + "data": " ", + "valid": false + }, + { + "description": "Character tabulation does not match", + "data": "\t", + "valid": false + }, + { + "description": "Line tabulation does not match", + "data": "\u000b", + "valid": false + }, + { + "description": "Form feed does not match", + "data": "\u000c", + "valid": false + }, + { + "description": "latin-1 non-breaking-space does not match", + "data": "\u00a0", + "valid": false + }, + { + "description": "zero-width whitespace does not match", + "data": "\ufeff", + "valid": false + }, + { + "description": "line feed does not match (line terminator)", + "data": "\u000a", + "valid": false + }, + { + "description": "paragraph separator does not match (line terminator)", + "data": "\u2029", + "valid": false + }, + { + "description": "EM SPACE does not match (Space_Separator)", + "data": "\u2003", + "valid": false + }, + { + "description": "Non-whitespace control matches", + "data": "\u0001", + "valid": true + }, + { + "description": "Non-whitespace matches", + "data": "\u2013", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with pattern", + "schema": { "pattern": "\\p{Letter}cole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "\\w in patterns matches [A-Za-z0-9_], not unicode letters", + "schema": { "pattern": "\\wcole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "pattern with ASCII ranges", + "schema": { "pattern": "[a-z]cole" }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "ascii characters match", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + } + ] + }, + { + "description": "\\d in pattern matches [0-9], not unicode digits", + "schema": { "pattern": "^\\d+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": false + } + ] + }, + { + "description": "\\a is not an ECMA 262 control escape", + "schema": { "$ref": "https://json-schema.org/draft/next/schema" }, + "tests": [ + { + "description": "when used as a pattern", + "data": { "pattern": "\\a" }, + "valid": false + } + ] + }, + { + "description": "pattern with non-ASCII digits", + "schema": { "pattern": "^\\p{digit}+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "\\p{Letter}cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "\\w in patternProperties matches [A-Za-z0-9_], not unicode letters", + "schema": { + "type": "object", + "patternProperties": { + "\\wcole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with ASCII ranges", + "schema": { + "type": "object", + "patternProperties": { + "[a-z]cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "ascii characters match", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + } + ] + }, + { + "description": "\\d in patternProperties matches [0-9], not unicode digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\d+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with non-ASCII digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\p{digit}+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/float-overflow.json b/vendor/jsonschema/json/tests/draft-next/optional/float-overflow.json new file mode 100644 index 00000000..52ff9827 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/float-overflow.json @@ -0,0 +1,13 @@ +[ + { + "description": "all integers are multiples of 0.5, if overflow is handled", + "schema": {"type": "integer", "multipleOf": 0.5}, + "tests": [ + { + "description": "valid if optional overflow handling is implemented", + "data": 1e308, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format-assertion.json b/vendor/jsonschema/json/tests/draft-next/optional/format-assertion.json new file mode 100644 index 00000000..ede922a2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format-assertion.json @@ -0,0 +1,42 @@ +[ + { + "description": "schema that uses custom metaschema with format-assertion: false", + "schema": { + "$id": "https://schema/using/format-assertion/false", + "$schema": "http://localhost:1234/draft-next/format-assertion-false.json", + "format": "ipv4" + }, + "tests": [ + { + "description": "format-assertion: false: valid string", + "data": "127.0.0.1", + "valid": true + }, + { + "description": "format-assertion: false: invalid string", + "data": "not-an-ipv4", + "valid": false + } + ] + }, + { + "description": "schema that uses custom metaschema with format-assertion: true", + "schema": { + "$id": "https://schema/using/format-assertion/true", + "$schema": "http://localhost:1234/draft-next/format-assertion-true.json", + "format": "ipv4" + }, + "tests": [ + { + "description": "format-assertion: true: valid string", + "data": "127.0.0.1", + "valid": true + }, + { + "description": "format-assertion: true: invalid string", + "data": "not-an-ipv4", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/date-time.json b/vendor/jsonschema/json/tests/draft-next/optional/format/date-time.json new file mode 100644 index 00000000..09112737 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/date-time.json @@ -0,0 +1,133 @@ +[ + { + "description": "validation of date-time strings", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid date-time string without second fraction", + "data": "1963-06-19T08:30:06Z", + "valid": true + }, + { + "description": "a valid date-time string with plus offset", + "data": "1937-01-01T12:00:27.87+00:20", + "valid": true + }, + { + "description": "a valid date-time string with minus offset", + "data": "1990-12-31T15:59:50.123-08:00", + "valid": true + }, + { + "description": "a valid date-time with a leap second, UTC", + "data": "1998-12-31T23:59:60Z", + "valid": true + }, + { + "description": "a valid date-time with a leap second, with minus offset", + "data": "1998-12-31T15:59:60.123-08:00", + "valid": true + }, + { + "description": "an invalid date-time past leap second, UTC", + "data": "1998-12-31T23:59:61Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong minute, UTC", + "data": "1998-12-31T23:58:60Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong hour, UTC", + "data": "1998-12-31T22:59:60Z", + "valid": false + }, + { + "description": "an invalid day in date-time string", + "data": "1990-02-31T15:59:59.123-08:00", + "valid": false + }, + { + "description": "an invalid offset in date-time string", + "data": "1990-12-31T15:59:59-24:00", + "valid": false + }, + { + "description": "an invalid closing Z after time-zone offset", + "data": "1963-06-19T08:30:06.28123+01:00Z", + "valid": false + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "case-insensitive T and Z", + "data": "1963-06-19t08:30:06.283185z", + "valid": true + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + }, + { + "description": "invalid non-padded month dates", + "data": "1963-6-19T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-padded day dates", + "data": "1963-06-1T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in date portion", + "data": "1963-06-1৪T00:00:00Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in time portion", + "data": "1963-06-11T0৪:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/date.json b/vendor/jsonschema/json/tests/draft-next/optional/format/date.json new file mode 100644 index 00000000..06c9ea0f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/date.json @@ -0,0 +1,223 @@ +[ + { + "description": "validation of date strings", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date string", + "data": "1963-06-19", + "valid": true + }, + { + "description": "a valid date string with 31 days in January", + "data": "2020-01-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in January", + "data": "2020-01-32", + "valid": false + }, + { + "description": "a valid date string with 28 days in February (normal)", + "data": "2021-02-28", + "valid": true + }, + { + "description": "a invalid date string with 29 days in February (normal)", + "data": "2021-02-29", + "valid": false + }, + { + "description": "a valid date string with 29 days in February (leap)", + "data": "2020-02-29", + "valid": true + }, + { + "description": "a invalid date string with 30 days in February (leap)", + "data": "2020-02-30", + "valid": false + }, + { + "description": "a valid date string with 31 days in March", + "data": "2020-03-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in March", + "data": "2020-03-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in April", + "data": "2020-04-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in April", + "data": "2020-04-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in May", + "data": "2020-05-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in May", + "data": "2020-05-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in June", + "data": "2020-06-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in June", + "data": "2020-06-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in July", + "data": "2020-07-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in July", + "data": "2020-07-32", + "valid": false + }, + { + "description": "a valid date string with 31 days in August", + "data": "2020-08-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in August", + "data": "2020-08-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in September", + "data": "2020-09-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in September", + "data": "2020-09-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in October", + "data": "2020-10-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in October", + "data": "2020-10-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in November", + "data": "2020-11-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in November", + "data": "2020-11-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in December", + "data": "2020-12-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in December", + "data": "2020-12-32", + "valid": false + }, + { + "description": "a invalid date string with invalid month", + "data": "2020-13-01", + "valid": false + }, + { + "description": "an invalid date string", + "data": "06/19/1963", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350", + "valid": false + }, + { + "description": "non-padded month dates are not valid", + "data": "1998-1-20", + "valid": false + }, + { + "description": "non-padded day dates are not valid", + "data": "1998-01-1", + "valid": false + }, + { + "description": "invalid month", + "data": "1998-13-01", + "valid": false + }, + { + "description": "invalid month-day combination", + "data": "1998-04-31", + "valid": false + }, + { + "description": "2021 is not a leap year", + "data": "2021-02-29", + "valid": false + }, + { + "description": "2020 is a leap year", + "data": "2020-02-29", + "valid": true + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1963-06-1৪", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/duration.json b/vendor/jsonschema/json/tests/draft-next/optional/format/duration.json new file mode 100644 index 00000000..a6acdc1c --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/duration.json @@ -0,0 +1,128 @@ +[ + { + "description": "validation of duration strings", + "schema": { "format": "duration" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid duration string", + "data": "P4DT12H30M5S", + "valid": true + }, + { + "description": "an invalid duration string", + "data": "PT1D", + "valid": false + }, + { + "description": "no elements present", + "data": "P", + "valid": false + }, + { + "description": "no time elements present", + "data": "P1YT", + "valid": false + }, + { + "description": "no date or time elements present", + "data": "PT", + "valid": false + }, + { + "description": "elements out of order", + "data": "P2D1Y", + "valid": false + }, + { + "description": "missing time separator", + "data": "P1D2H", + "valid": false + }, + { + "description": "time element in the date position", + "data": "P2S", + "valid": false + }, + { + "description": "four years duration", + "data": "P4Y", + "valid": true + }, + { + "description": "zero time, in seconds", + "data": "PT0S", + "valid": true + }, + { + "description": "zero time, in days", + "data": "P0D", + "valid": true + }, + { + "description": "one month duration", + "data": "P1M", + "valid": true + }, + { + "description": "one minute duration", + "data": "PT1M", + "valid": true + }, + { + "description": "one and a half days, in hours", + "data": "PT36H", + "valid": true + }, + { + "description": "one and a half days, in days and hours", + "data": "P1DT12H", + "valid": true + }, + { + "description": "two weeks", + "data": "P2W", + "valid": true + }, + { + "description": "weeks cannot be combined with other units", + "data": "P1Y2W", + "valid": false + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "P২Y", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/email.json b/vendor/jsonschema/json/tests/draft-next/optional/format/email.json new file mode 100644 index 00000000..5ce1c708 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/email.json @@ -0,0 +1,118 @@ +[ + { + "description": "validation of e-mail addresses", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "tilde in local part is valid", + "data": "te~st@example.com", + "valid": true + }, + { + "description": "tilde before local part is valid", + "data": "~test@example.com", + "valid": true + }, + { + "description": "tilde after local part is valid", + "data": "test~@example.com", + "valid": true + }, + { + "description": "a quoted string with a space in the local part is valid", + "data": "\"joe bloggs\"@example.com", + "valid": true + }, + { + "description": "a quoted string with a double dot in the local part is valid", + "data": "\"joe..bloggs\"@example.com", + "valid": true + }, + { + "description": "a quoted string with a @ in the local part is valid", + "data": "\"joe@bloggs\"@example.com", + "valid": true + }, + { + "description": "an IPv4-address-literal after the @ is valid", + "data": "joe.bloggs@[127.0.0.1]", + "valid": true + }, + { + "description": "an IPv6-address-literal after the @ is valid", + "data": "joe.bloggs@[IPv6:::1]", + "valid": true + }, + { + "description": "dot before local part is not valid", + "data": ".test@example.com", + "valid": false + }, + { + "description": "dot after local part is not valid", + "data": "test.@example.com", + "valid": false + }, + { + "description": "two separated dots inside local part are valid", + "data": "te.s.t@example.com", + "valid": true + }, + { + "description": "two subsequent dots inside local part are not valid", + "data": "te..st@example.com", + "valid": false + }, + { + "description": "an invalid domain", + "data": "joe.bloggs@invalid=domain.com", + "valid": false + }, + { + "description": "an invalid IPv4-address-literal", + "data": "joe.bloggs@[127.0.0.300]", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/hostname.json b/vendor/jsonschema/json/tests/draft-next/optional/format/hostname.json new file mode 100644 index 00000000..8a67fda8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/hostname.json @@ -0,0 +1,98 @@ +[ + { + "description": "validation of host names", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a valid punycoded IDN hostname", + "data": "xn--4gbwdl.xn--wgbh1c", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + }, + { + "description": "starts with hyphen", + "data": "-hostname", + "valid": false + }, + { + "description": "ends with hyphen", + "data": "hostname-", + "valid": false + }, + { + "description": "starts with underscore", + "data": "_hostname", + "valid": false + }, + { + "description": "ends with underscore", + "data": "hostname_", + "valid": false + }, + { + "description": "contains underscore", + "data": "host_name", + "valid": false + }, + { + "description": "maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com", + "valid": true + }, + { + "description": "exceeds maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkl.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/idn-email.json b/vendor/jsonschema/json/tests/draft-next/optional/format/idn-email.json new file mode 100644 index 00000000..6e213745 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/idn-email.json @@ -0,0 +1,58 @@ +[ + { + "description": "validation of an internationalized e-mail addresses", + "schema": { "format": "idn-email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid idn e-mail (example@example.test in Hangul)", + "data": "실례@실례.테스트", + "valid": true + }, + { + "description": "an invalid idn e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/idn-hostname.json b/vendor/jsonschema/json/tests/draft-next/optional/format/idn-hostname.json new file mode 100644 index 00000000..6c8f86a3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/idn-hostname.json @@ -0,0 +1,304 @@ +[ + { + "description": "validation of internationalized host names", + "schema": { "format": "idn-hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name (example.test in Hangul)", + "data": "실례.테스트", + "valid": true + }, + { + "description": "illegal first char U+302E Hangul single dot tone mark", + "data": "〮실례.테스트", + "valid": false + }, + { + "description": "contains illegal char U+302E Hangul single dot tone mark", + "data": "실〮례.테스트", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실례례테스트례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례테스트례례실례.테스트", + "valid": false + }, + { + "description": "invalid label, correct Punycode", + "comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc3492#section-7.1", + "data": "-> $1.00 <--", + "valid": false + }, + { + "description": "valid Chinese Punycode", + "comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4", + "data": "xn--ihqwcrb4cv8a8dqg056pqjye", + "valid": true + }, + { + "description": "invalid Punycode", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc5890#section-2.3.2.1", + "data": "xn--X", + "valid": false + }, + { + "description": "U-label contains \"--\" in the 3rd and 4th position", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1 https://tools.ietf.org/html/rfc5890#section-2.3.2.1", + "data": "XN--aa---o47jg78q", + "valid": false + }, + { + "description": "U-label starts with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "-hello", + "valid": false + }, + { + "description": "U-label ends with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "hello-", + "valid": false + }, + { + "description": "U-label starts and ends with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "-hello-", + "valid": false + }, + { + "description": "Begins with a Spacing Combining Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0903hello", + "valid": false + }, + { + "description": "Begins with a Nonspacing Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0300hello", + "valid": false + }, + { + "description": "Begins with an Enclosing Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0488hello", + "valid": false + }, + { + "description": "Exceptions that are PVALID, left-to-right chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u00df\u03c2\u0f0b\u3007", + "valid": true + }, + { + "description": "Exceptions that are PVALID, right-to-left chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u06fd\u06fe", + "valid": true + }, + { + "description": "Exceptions that are DISALLOWED, right-to-left chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u0640\u07fa", + "valid": false + }, + { + "description": "Exceptions that are DISALLOWED, left-to-right chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6 Note: The two combining marks (U+302E and U+302F) are in the middle and not at the start", + "data": "\u3031\u3032\u3033\u3034\u3035\u302e\u302f\u303b", + "valid": false + }, + { + "description": "MIDDLE DOT with no preceding 'l'", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "a\u00b7l", + "valid": false + }, + { + "description": "MIDDLE DOT with nothing preceding", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "\u00b7l", + "valid": false + }, + { + "description": "MIDDLE DOT with no following 'l'", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7a", + "valid": false + }, + { + "description": "MIDDLE DOT with nothing following", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7", + "valid": false + }, + { + "description": "MIDDLE DOT with surrounding 'l's", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7l", + "valid": true + }, + { + "description": "Greek KERAIA not followed by Greek", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375S", + "valid": false + }, + { + "description": "Greek KERAIA not followed by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375", + "valid": false + }, + { + "description": "Greek KERAIA followed by Greek", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375\u03b2", + "valid": true + }, + { + "description": "Hebrew GERESH not preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "A\u05f3\u05d1", + "valid": false + }, + { + "description": "Hebrew GERESH not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "\u05f3\u05d1", + "valid": false + }, + { + "description": "Hebrew GERESH preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "\u05d0\u05f3\u05d1", + "valid": true + }, + { + "description": "Hebrew GERSHAYIM not preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "A\u05f4\u05d1", + "valid": false + }, + { + "description": "Hebrew GERSHAYIM not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "\u05f4\u05d1", + "valid": false + }, + { + "description": "Hebrew GERSHAYIM preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "\u05d0\u05f4\u05d1", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with no Hiragana, Katakana, or Han", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "def\u30fbabc", + "valid": false + }, + { + "description": "KATAKANA MIDDLE DOT with no other characters", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb", + "valid": false + }, + { + "description": "KATAKANA MIDDLE DOT with Hiragana", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u3041", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with Katakana", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u30a1", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with Han", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u4e08", + "valid": true + }, + { + "description": "Arabic-Indic digits mixed with Extended Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8", + "data": "\u0660\u06f0", + "valid": false + }, + { + "description": "Arabic-Indic digits not mixed with Extended Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8", + "data": "\u0628\u0660\u0628", + "valid": true + }, + { + "description": "Extended Arabic-Indic digits not mixed with Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.9", + "data": "\u06f00", + "valid": true + }, + { + "description": "ZERO WIDTH JOINER not preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u0915\u200d\u0937", + "valid": false + }, + { + "description": "ZERO WIDTH JOINER not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u200d\u0937", + "valid": false + }, + { + "description": "ZERO WIDTH JOINER preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u0915\u094d\u200d\u0937", + "valid": true + }, + { + "description": "ZERO WIDTH NON-JOINER preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1", + "data": "\u0915\u094d\u200c\u0937", + "valid": true + }, + { + "description": "ZERO WIDTH NON-JOINER not preceded by Virama but matches regexp", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1 https://www.w3.org/TR/alreq/#h_disjoining_enforcement", + "data": "\u0628\u064a\u200c\u0628\u064a", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/ipv4.json b/vendor/jsonschema/json/tests/draft-next/optional/format/ipv4.json new file mode 100644 index 00000000..4706581f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/ipv4.json @@ -0,0 +1,84 @@ +[ + { + "description": "validation of IP addresses", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + }, + { + "description": "an IP address without 4 components", + "data": "127.0", + "valid": false + }, + { + "description": "an IP address as an integer", + "data": "0x7f000001", + "valid": false + }, + { + "description": "an IP address as an integer (decimal)", + "data": "2130706433", + "valid": false + }, + { + "description": "invalid leading zeroes, as they are treated as octals", + "comment": "see https://sick.codes/universal-netmask-npm-package-used-by-270000-projects-vulnerable-to-octal-input-data-server-side-request-forgery-remote-file-inclusion-local-file-inclusion-and-more-cve-2021-28918/", + "data": "087.10.0.1", + "valid": false + }, + { + "description": "value without leading zero is valid", + "data": "87.10.0.1", + "valid": true + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২7.0.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/ipv6.json b/vendor/jsonschema/json/tests/draft-next/optional/format/ipv6.json new file mode 100644 index 00000000..94368f2a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/ipv6.json @@ -0,0 +1,208 @@ +[ + { + "description": "validation of IPv6 addresses", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "trailing 4 hex symbols is valid", + "data": "::abef", + "valid": true + }, + { + "description": "trailing 5 hex symbols is invalid", + "data": "::abcef", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + }, + { + "description": "no digits is valid", + "data": "::", + "valid": true + }, + { + "description": "leading colons is valid", + "data": "::42:ff:1", + "valid": true + }, + { + "description": "trailing colons is valid", + "data": "d6::", + "valid": true + }, + { + "description": "missing leading octet is invalid", + "data": ":2:3:4:5:6:7:8", + "valid": false + }, + { + "description": "missing trailing octet is invalid", + "data": "1:2:3:4:5:6:7:", + "valid": false + }, + { + "description": "missing leading octet with omitted octets later", + "data": ":2:3:4::8", + "valid": false + }, + { + "description": "single set of double colons in the middle is valid", + "data": "1:d6::42", + "valid": true + }, + { + "description": "two sets of double colons is invalid", + "data": "1::d6::42", + "valid": false + }, + { + "description": "mixed format with the ipv4 section as decimal octets", + "data": "1::d6:192.168.0.1", + "valid": true + }, + { + "description": "mixed format with double colons between the sections", + "data": "1:2::192.168.0.1", + "valid": true + }, + { + "description": "mixed format with ipv4 section with octet out of range", + "data": "1::2:192.168.256.1", + "valid": false + }, + { + "description": "mixed format with ipv4 section with a hex octet", + "data": "1::2:192.168.ff.1", + "valid": false + }, + { + "description": "mixed format with leading double colons (ipv4-mapped ipv6 address)", + "data": "::ffff:192.168.0.1", + "valid": true + }, + { + "description": "triple colons is invalid", + "data": "1:2:3:4:5:::8", + "valid": false + }, + { + "description": "8 octets", + "data": "1:2:3:4:5:6:7:8", + "valid": true + }, + { + "description": "insufficient octets without double colons", + "data": "1:2:3:4:5:6:7", + "valid": false + }, + { + "description": "no colons is invalid", + "data": "1", + "valid": false + }, + { + "description": "ipv4 is not ipv6", + "data": "127.0.0.1", + "valid": false + }, + { + "description": "ipv4 segment must have 4 octets", + "data": "1:2:3:4:1.2.3", + "valid": false + }, + { + "description": "leading whitespace is invalid", + "data": " ::1", + "valid": false + }, + { + "description": "trailing whitespace is invalid", + "data": "::1 ", + "valid": false + }, + { + "description": "netmask is not a part of ipv6 address", + "data": "fe80::/64", + "valid": false + }, + { + "description": "zone id is not a part of ipv6 address", + "data": "fe80::a%eth1", + "valid": false + }, + { + "description": "a long valid ipv6", + "data": "1000:1000:1000:1000:1000:1000:255.255.255.255", + "valid": true + }, + { + "description": "a long invalid ipv6, below length limit, first", + "data": "100:100:100:100:100:100:255.255.255.255.255", + "valid": false + }, + { + "description": "a long invalid ipv6, below length limit, second", + "data": "100:100:100:100:100:100:100:255.255.255.255", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1:2:3:4:5:6:7:৪", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in the IPv4 portion", + "data": "1:2::192.16৪.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/iri-reference.json b/vendor/jsonschema/json/tests/draft-next/optional/format/iri-reference.json new file mode 100644 index 00000000..c6b4c22a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/iri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of IRI References", + "schema": { "format": "iri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IRI", + "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid protocol-relative IRI Reference", + "data": "//ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid relative IRI Reference", + "data": "/âππ", + "valid": true + }, + { + "description": "an invalid IRI Reference", + "data": "\\\\WINDOWS\\filëßåré", + "valid": false + }, + { + "description": "a valid IRI Reference", + "data": "âππ", + "valid": true + }, + { + "description": "a valid IRI fragment", + "data": "#Æ’rägmênt", + "valid": true + }, + { + "description": "an invalid IRI fragment", + "data": "#Æ’räg\\mênt", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/iri.json b/vendor/jsonschema/json/tests/draft-next/optional/format/iri.json new file mode 100644 index 00000000..a0d12aed --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/iri.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of IRIs", + "schema": { "format": "iri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IRI with anchor tag", + "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid IRI with anchor tag and parentheses", + "data": "http://ƒøø.com/blah_(wîkïpédiÃ¥)_blah#ßité-1", + "valid": true + }, + { + "description": "a valid IRI with URL-encoded stuff", + "data": "http://ƒøø.ßår/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid IRI with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid IRI based on IPv6", + "data": "http://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]", + "valid": true + }, + { + "description": "an invalid IRI based on IPv6", + "data": "http://2001:0db8:85a3:0000:0000:8a2e:0370:7334", + "valid": false + }, + { + "description": "an invalid relative IRI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid IRI", + "data": "\\\\WINDOWS\\filëßåré", + "valid": false + }, + { + "description": "an invalid IRI though valid IRI reference", + "data": "âππ", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/json-pointer.json b/vendor/jsonschema/json/tests/draft-next/optional/format/json-pointer.json new file mode 100644 index 00000000..a0346b57 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/json-pointer.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of JSON-pointers (JSON String Representation)", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid JSON-pointer", + "data": "/foo/bar~0/baz~1/%a", + "valid": true + }, + { + "description": "not a valid JSON-pointer (~ not escaped)", + "data": "/foo/bar~", + "valid": false + }, + { + "description": "valid JSON-pointer with empty segment", + "data": "/foo//bar", + "valid": true + }, + { + "description": "valid JSON-pointer with the last empty segment", + "data": "/foo/bar/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #1", + "data": "", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #2", + "data": "/foo", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #3", + "data": "/foo/0", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #4", + "data": "/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #5", + "data": "/a~1b", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #6", + "data": "/c%d", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #7", + "data": "/e^f", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #8", + "data": "/g|h", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #9", + "data": "/i\\j", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #10", + "data": "/k\"l", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #11", + "data": "/ ", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #12", + "data": "/m~0n", + "valid": true + }, + { + "description": "valid JSON-pointer used adding to the last array position", + "data": "/foo/-", + "valid": true + }, + { + "description": "valid JSON-pointer (- used as object member name)", + "data": "/foo/-/bar", + "valid": true + }, + { + "description": "valid JSON-pointer (multiple escaped characters)", + "data": "/~1~0~0~1~1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #1", + "data": "/~1.1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #2", + "data": "/~0.1", + "valid": true + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #1", + "data": "#", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #2", + "data": "#/", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #3", + "data": "#a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #1", + "data": "/~0~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #2", + "data": "/~0/~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #1", + "data": "/~2", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #2", + "data": "/~-1", + "valid": false + }, + { + "description": "not a valid JSON-pointer (multiple characters not escaped)", + "data": "/~~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1", + "data": "a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2", + "data": "0", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3", + "data": "a/a", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/regex.json b/vendor/jsonschema/json/tests/draft-next/optional/format/regex.json new file mode 100644 index 00000000..34491770 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/regex.json @@ -0,0 +1,48 @@ +[ + { + "description": "validation of regular expressions", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid regular expression", + "data": "([abc])+\\s+$", + "valid": true + }, + { + "description": "a regular expression with unclosed parens is invalid", + "data": "^(abc]", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/relative-json-pointer.json b/vendor/jsonschema/json/tests/draft-next/optional/format/relative-json-pointer.json new file mode 100644 index 00000000..9309986f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/relative-json-pointer.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of Relative JSON Pointers (RJP)", + "schema": { "format": "relative-json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid upwards RJP", + "data": "1", + "valid": true + }, + { + "description": "a valid downwards RJP", + "data": "0/foo/bar", + "valid": true + }, + { + "description": "a valid up and then down RJP, with array index", + "data": "2/0/baz/1/zip", + "valid": true + }, + { + "description": "a valid RJP taking the member or index name", + "data": "0#", + "valid": true + }, + { + "description": "an invalid RJP that is a valid JSON Pointer", + "data": "/foo/bar", + "valid": false + }, + { + "description": "negative prefix", + "data": "-1/foo/bar", + "valid": false + }, + { + "description": "## is not a valid json-pointer", + "data": "0##", + "valid": false + }, + { + "description": "zero cannot be followed by other digits, plus json-pointer", + "data": "01/a", + "valid": false + }, + { + "description": "zero cannot be followed by other digits, plus octothorpe", + "data": "01#", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/time.json b/vendor/jsonschema/json/tests/draft-next/optional/format/time.json new file mode 100644 index 00000000..31425871 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/time.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of time strings", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid time string", + "data": "08:30:06Z", + "valid": true + }, + { + "description": "a valid time string with leap second, Zulu", + "data": "23:59:60Z", + "valid": true + }, + { + "description": "invalid leap second, Zulu (wrong hour)", + "data": "22:59:60Z", + "valid": false + }, + { + "description": "invalid leap second, Zulu (wrong minute)", + "data": "23:58:60Z", + "valid": false + }, + { + "description": "valid leap second, zero time-offset", + "data": "23:59:60+00:00", + "valid": true + }, + { + "description": "invalid leap second, zero time-offset (wrong hour)", + "data": "22:59:60+00:00", + "valid": false + }, + { + "description": "invalid leap second, zero time-offset (wrong minute)", + "data": "23:58:60+00:00", + "valid": false + }, + { + "description": "valid leap second, positive time-offset", + "data": "01:29:60+01:30", + "valid": true + }, + { + "description": "valid leap second, large positive time-offset", + "data": "23:29:60+23:30", + "valid": true + }, + { + "description": "invalid leap second, positive time-offset (wrong hour)", + "data": "23:59:60+01:00", + "valid": false + }, + { + "description": "invalid leap second, positive time-offset (wrong minute)", + "data": "23:59:60+00:30", + "valid": false + }, + { + "description": "valid leap second, negative time-offset", + "data": "15:59:60-08:00", + "valid": true + }, + { + "description": "valid leap second, large negative time-offset", + "data": "00:29:60-23:30", + "valid": true + }, + { + "description": "invalid leap second, negative time-offset (wrong hour)", + "data": "23:59:60-01:00", + "valid": false + }, + { + "description": "invalid leap second, negative time-offset (wrong minute)", + "data": "23:59:60-00:30", + "valid": false + }, + { + "description": "a valid time string with second fraction", + "data": "23:20:50.52Z", + "valid": true + }, + { + "description": "a valid time string with precise second fraction", + "data": "08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid time string with plus offset", + "data": "08:30:06+00:20", + "valid": true + }, + { + "description": "a valid time string with minus offset", + "data": "08:30:06-08:00", + "valid": true + }, + { + "description": "a valid time string with case-insensitive Z", + "data": "08:30:06z", + "valid": true + }, + { + "description": "an invalid time string with invalid hour", + "data": "24:00:00Z", + "valid": false + }, + { + "description": "an invalid time string with invalid minute", + "data": "00:60:00Z", + "valid": false + }, + { + "description": "an invalid time string with invalid second", + "data": "00:00:61Z", + "valid": false + }, + { + "description": "an invalid time string with invalid leap second (wrong hour)", + "data": "22:59:60Z", + "valid": false + }, + { + "description": "an invalid time string with invalid leap second (wrong minute)", + "data": "23:58:60Z", + "valid": false + }, + { + "description": "an invalid time string with invalid time numoffset hour", + "data": "01:02:03+24:00", + "valid": false + }, + { + "description": "an invalid time string with invalid time numoffset minute", + "data": "01:02:03+00:60", + "valid": false + }, + { + "description": "an invalid time string with invalid time with both Z and numoffset", + "data": "01:02:03Z+00:30", + "valid": false + }, + { + "description": "an invalid offset indicator", + "data": "08:30:06 PST", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "01:01:01,1111", + "valid": false + }, + { + "description": "no time offset", + "data": "12:00:00", + "valid": false + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/uri-reference.json b/vendor/jsonschema/json/tests/draft-next/optional/format/uri-reference.json new file mode 100644 index 00000000..7cdf228d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/uri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of URI References", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid relative URI Reference", + "data": "/abc", + "valid": true + }, + { + "description": "an invalid URI Reference", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "a valid URI Reference", + "data": "abc", + "valid": true + }, + { + "description": "a valid URI fragment", + "data": "#fragment", + "valid": true + }, + { + "description": "an invalid URI fragment", + "data": "#frag\\ment", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/uri-template.json b/vendor/jsonschema/json/tests/draft-next/optional/format/uri-template.json new file mode 100644 index 00000000..df355c55 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/uri-template.json @@ -0,0 +1,58 @@ +[ + { + "description": "format: uri-template", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term}", + "valid": true + }, + { + "description": "an invalid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term", + "valid": false + }, + { + "description": "a valid uri-template without variables", + "data": "http://example.com/dictionary", + "valid": true + }, + { + "description": "a valid relative uri-template", + "data": "dictionary/{term:1}/{term}", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/uri.json b/vendor/jsonschema/json/tests/draft-next/optional/format/uri.json new file mode 100644 index 00000000..792d71a0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/uri.json @@ -0,0 +1,108 @@ +[ + { + "description": "validation of URIs", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "a valid URL with anchor tag", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid URL with anchor tag and parentheses", + "data": "http://foo.com/blah_(wikipedia)_blah#cite-1", + "valid": true + }, + { + "description": "a valid URL with URL-encoded stuff", + "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid puny-coded URL ", + "data": "http://xn--nw2a.xn--j6w193g/", + "valid": true + }, + { + "description": "a valid URL with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid URL based on IPv4", + "data": "http://223.255.255.254", + "valid": true + }, + { + "description": "a valid URL with ftp scheme", + "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt", + "valid": true + }, + { + "description": "a valid URL for a simple text file", + "data": "http://www.ietf.org/rfc/rfc2396.txt", + "valid": true + }, + { + "description": "a valid URL ", + "data": "ldap://[2001:db8::7]/c=GB?objectClass?one", + "valid": true + }, + { + "description": "a valid mailto URI", + "data": "mailto:John.Doe@example.com", + "valid": true + }, + { + "description": "a valid newsgroup URI", + "data": "news:comp.infosystems.www.servers.unix", + "valid": true + }, + { + "description": "a valid tel URI", + "data": "tel:+1-816-555-1212", + "valid": true + }, + { + "description": "a valid URN", + "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2", + "valid": true + }, + { + "description": "an invalid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": false + }, + { + "description": "an invalid relative URI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + }, + { + "description": "an invalid URI with spaces", + "data": "http:// shouldfail.com", + "valid": false + }, + { + "description": "an invalid URI with spaces and missing scheme", + "data": ":// should fail", + "valid": false + }, + { + "description": "an invalid URI with comma in scheme", + "data": "bar,baz:foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/format/uuid.json b/vendor/jsonschema/json/tests/draft-next/optional/format/uuid.json new file mode 100644 index 00000000..e54cbc0f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/format/uuid.json @@ -0,0 +1,85 @@ +[ + { + "description": "uuid format", + "schema": { + "format": "uuid" + }, + "tests": [ + { + "description": "all upper-case", + "data": "2EB8AA08-AA98-11EA-B4AA-73B441D16380", + "valid": true + }, + { + "description": "all lower-case", + "data": "2eb8aa08-aa98-11ea-b4aa-73b441d16380", + "valid": true + }, + { + "description": "mixed case", + "data": "2eb8aa08-AA98-11ea-B4Aa-73B441D16380", + "valid": true + }, + { + "description": "all zeroes is valid", + "data": "00000000-0000-0000-0000-000000000000", + "valid": true + }, + { + "description": "wrong length", + "data": "2eb8aa08-aa98-11ea-b4aa-73b441d1638", + "valid": false + }, + { + "description": "missing section", + "data": "2eb8aa08-aa98-11ea-73b441d16380", + "valid": false + }, + { + "description": "bad characters (not hex)", + "data": "2eb8aa08-aa98-11ea-b4ga-73b441d16380", + "valid": false + }, + { + "description": "no dashes", + "data": "2eb8aa08aa9811eab4aa73b441d16380", + "valid": false + }, + { + "description": "too few dashes", + "data": "2eb8aa08aa98-11ea-b4aa73b441d16380", + "valid": false + }, + { + "description": "too many dashes", + "data": "2eb8-aa08-aa98-11ea-b4aa73b44-1d16380", + "valid": false + }, + { + "description": "dashes in the wrong spot", + "data": "2eb8aa08aa9811eab4aa73b441d16380----", + "valid": false + }, + { + "description": "valid version 4", + "data": "98d80576-482e-427f-8434-7f86890ab222", + "valid": true + }, + { + "description": "valid version 5", + "data": "99c17cbb-656f-564a-940f-1a4568f03487", + "valid": true + }, + { + "description": "hypothetical version 6", + "data": "99c17cbb-656f-664a-940f-1a4568f03487", + "valid": true + }, + { + "description": "hypothetical version 15", + "data": "99c17cbb-656f-f64a-940f-1a4568f03487", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/non-bmp-regex.json b/vendor/jsonschema/json/tests/draft-next/optional/non-bmp-regex.json new file mode 100644 index 00000000..dd67af2b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/non-bmp-regex.json @@ -0,0 +1,82 @@ +[ + { + "description": "Proper UTF-16 surrogate pair handling: pattern", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { "pattern": "^ðŸ²*$" }, + "tests": [ + { + "description": "matches empty", + "data": "", + "valid": true + }, + { + "description": "matches single", + "data": "ðŸ²", + "valid": true + }, + { + "description": "matches two", + "data": "ðŸ²ðŸ²", + "valid": true + }, + { + "description": "doesn't match one", + "data": "ðŸ‰", + "valid": false + }, + { + "description": "doesn't match two", + "data": "ðŸ‰ðŸ‰", + "valid": false + }, + { + "description": "doesn't match one ASCII", + "data": "D", + "valid": false + }, + { + "description": "doesn't match two ASCII", + "data": "DD", + "valid": false + } + ] + }, + { + "description": "Proper UTF-16 surrogate pair handling: patternProperties", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { + "patternProperties": { + "^ðŸ²*$": { + "type": "integer" + } + } + }, + "tests": [ + { + "description": "matches empty", + "data": { "": 1 }, + "valid": true + }, + { + "description": "matches single", + "data": { "ðŸ²": 1 }, + "valid": true + }, + { + "description": "matches two", + "data": { "ðŸ²ðŸ²": 1 }, + "valid": true + }, + { + "description": "doesn't match one", + "data": { "ðŸ²": "hello" }, + "valid": false + }, + { + "description": "doesn't match two", + "data": { "ðŸ²ðŸ²": "hello" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/optional/refOfUnknownKeyword.json b/vendor/jsonschema/json/tests/draft-next/optional/refOfUnknownKeyword.json new file mode 100644 index 00000000..5b150df8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/optional/refOfUnknownKeyword.json @@ -0,0 +1,44 @@ +[ + { + "description": "reference of a root arbitrary keyword ", + "schema": { + "unknown-keyword": {"type": "integer"}, + "properties": { + "bar": {"$ref": "#/unknown-keyword"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "reference of an arbitrary keyword of a sub-schema", + "schema": { + "properties": { + "foo": {"unknown-keyword": {"type": "integer"}}, + "bar": {"$ref": "#/properties/foo/unknown-keyword"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/pattern.json b/vendor/jsonschema/json/tests/draft-next/pattern.json new file mode 100644 index 00000000..92db0f97 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/pattern.json @@ -0,0 +1,59 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/patternProperties.json b/vendor/jsonschema/json/tests/draft-next/patternProperties.json new file mode 100644 index 00000000..c276e647 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/patternProperties.json @@ -0,0 +1,171 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["foo"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + }, + { + "description": "patternProperties with boolean schemas", + "schema": { + "patternProperties": { + "f.*": true, + "b.*": false + } + }, + "tests": [ + { + "description": "object with property matching schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property matching schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "object with a property matching both true and false is invalid", + "data": {"foobar":1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "patternProperties with null valued instance properties", + "schema": { + "patternProperties": { + "^.*bar$": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foobar": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/prefixItems.json b/vendor/jsonschema/json/tests/draft-next/prefixItems.json new file mode 100644 index 00000000..aab1e365 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/prefixItems.json @@ -0,0 +1,98 @@ +[ + { + "description": "a schema given for prefixItems", + "schema": { + "prefixItems": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + }, + { + "description": "incomplete array of items", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with additional items", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "1": "valid", + "length": 2 + }, + "valid": true + } + ] + }, + { + "description": "prefixItems with boolean schemas", + "schema": { + "prefixItems": [true, false] + }, + "tests": [ + { + "description": "array with one item is valid", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with two items is invalid", + "data": [ 1, "foo" ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "additional items are allowed by default", + "schema": {"prefixItems": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + }, + { + "description": "prefixItems with null instance elements", + "schema": { + "prefixItems": [ + { + "type": "null" + } + ] + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/properties.json b/vendor/jsonschema/json/tests/draft-next/properties.json new file mode 100644 index 00000000..5b971ca0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/properties.json @@ -0,0 +1,236 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + }, + { + "description": "properties with boolean schema", + "schema": { + "properties": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "no property present is valid", + "data": {}, + "valid": true + }, + { + "description": "only 'true' property present is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "only 'false' property present is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "both properties present is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + } + ] + }, + { + "description": "properties with escaped characters", + "schema": { + "properties": { + "foo\nbar": {"type": "number"}, + "foo\"bar": {"type": "number"}, + "foo\\bar": {"type": "number"}, + "foo\rbar": {"type": "number"}, + "foo\tbar": {"type": "number"}, + "foo\fbar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with all numbers is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1", + "foo\\bar": "1", + "foo\rbar": "1", + "foo\tbar": "1", + "foo\fbar": "1" + }, + "valid": false + } + ] + }, + { + "description": "properties with null valued instance properties", + "schema": { + "properties": { + "foo": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { + "properties": { + "__proto__": {"type": "number"}, + "toString": { + "properties": { "length": { "type": "string" } } + }, + "constructor": {"type": "number"} + } + }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": true + }, + { + "description": "__proto__ not valid", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString not valid", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor not valid", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present and valid", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/propertyNames.json b/vendor/jsonschema/json/tests/draft-next/propertyNames.json new file mode 100644 index 00000000..8423690d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/propertyNames.json @@ -0,0 +1,78 @@ +[ + { + "description": "propertyNames validation", + "schema": { + "propertyNames": {"maxLength": 3} + }, + "tests": [ + { + "description": "all property names valid", + "data": { + "f": {}, + "foo": {} + }, + "valid": true + }, + { + "description": "some property names invalid", + "data": { + "foo": {}, + "foobar": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [1, 2, 3, 4], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema true", + "schema": {"propertyNames": true}, + "tests": [ + { + "description": "object with any properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema false", + "schema": {"propertyNames": false}, + "tests": [ + { + "description": "object with any properties is invalid", + "data": {"foo": 1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/ref.json b/vendor/jsonschema/json/tests/draft-next/ref.json new file mode 100644 index 00000000..189eb41a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/ref.json @@ -0,0 +1,834 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "prefixItems": [ + {"type": "integer"}, + {"$ref": "#/prefixItems/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "$defs": { + "tilde~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"} + }, + "properties": { + "tilde": {"$ref": "#/$defs/tilde~0field"}, + "slash": {"$ref": "#/$defs/slash~1field"}, + "percent": {"$ref": "#/$defs/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilde invalid", + "data": {"tilde": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilde valid", + "data": {"tilde": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "$defs": { + "a": {"type": "integer"}, + "b": {"$ref": "#/$defs/a"}, + "c": {"$ref": "#/$defs/b"} + }, + "$ref": "#/$defs/c" + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref applies alongside sibling keywords", + "schema": { + "$defs": { + "reffed": { + "type": "array" + } + }, + "properties": { + "foo": { + "$ref": "#/$defs/reffed", + "maxItems": 2 + } + } + }, + "tests": [ + { + "description": "ref valid, maxItems valid", + "data": { "foo": [] }, + "valid": true + }, + { + "description": "ref valid, maxItems invalid", + "data": { "foo": [1, 2, 3] }, + "valid": false + }, + { + "description": "ref invalid", + "data": { "foo": "string" }, + "valid": false + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": { + "$ref": "https://json-schema.org/draft/next/schema" + }, + "tests": [ + { + "description": "remote ref valid", + "data": {"minLength": 1}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"minLength": -1}, + "valid": false + } + ] + }, + { + "description": "property named $ref that is not a reference", + "schema": { + "properties": { + "$ref": {"type": "string"} + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "property named $ref, containing an actual $ref", + "schema": { + "properties": { + "$ref": {"$ref": "#/$defs/is-string"} + }, + "$defs": { + "is-string": { + "type": "string" + } + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "$ref to boolean schema true", + "schema": { + "$ref": "#/$defs/bool", + "$defs": { + "bool": true + } + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "$ref to boolean schema false", + "schema": { + "$ref": "#/$defs/bool", + "$defs": { + "bool": false + } + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "Recursive references between schemas", + "schema": { + "$id": "http://localhost:1234/tree", + "description": "tree of nodes", + "type": "object", + "properties": { + "meta": {"type": "string"}, + "nodes": { + "type": "array", + "items": {"$ref": "node"} + } + }, + "required": ["meta", "nodes"], + "$defs": { + "node": { + "$id": "http://localhost:1234/node", + "description": "node", + "type": "object", + "properties": { + "value": {"type": "number"}, + "subtree": {"$ref": "tree"} + }, + "required": ["value"] + } + } + }, + "tests": [ + { + "description": "valid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 1.1}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": true + }, + { + "description": "invalid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": "string is invalid"}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": false + } + ] + }, + { + "description": "refs with quote", + "schema": { + "properties": { + "foo\"bar": {"$ref": "#/$defs/foo%22bar"} + }, + "$defs": { + "foo\"bar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with numbers is valid", + "data": { + "foo\"bar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "ref creates new scope when adjacent to keywords", + "schema": { + "$defs": { + "A": { + "unevaluatedProperties": false + } + }, + "properties": { + "prop1": { + "type": "string" + } + }, + "$ref": "#/$defs/A" + }, + "tests": [ + { + "description": "referenced subschema doesn't see annotations from properties", + "data": { + "prop1": "match" + }, + "valid": false + } + ] + }, + { + "description": "naive replacement of $ref with its destination is not correct", + "schema": { + "$defs": { + "a_string": { "type": "string" } + }, + "enum": [ + { "$ref": "#/$defs/a_string" } + ] + }, + "tests": [ + { + "description": "do not evaluate the $ref inside the enum, matching any string", + "data": "this is a string", + "valid": false + }, + { + "description": "do not evaluate the $ref inside the enum, definition exact match", + "data": { "type": "string" }, + "valid": false + }, + { + "description": "match the enum exactly", + "data": { "$ref": "#/$defs/a_string" }, + "valid": true + } + ] + }, + { + "description": "refs with relative uris and defs", + "schema": { + "$id": "http://example.com/schema-relative-uri-defs1.json", + "properties": { + "foo": { + "$id": "schema-relative-uri-defs2.json", + "$defs": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "$ref": "#/$defs/inner" + } + }, + "$ref": "schema-relative-uri-defs2.json" + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "relative refs with absolute uris and defs", + "schema": { + "$id": "http://example.com/schema-refs-absolute-uris-defs1.json", + "properties": { + "foo": { + "$id": "http://example.com/schema-refs-absolute-uris-defs2.json", + "$defs": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "$ref": "#/$defs/inner" + } + }, + "$ref": "schema-refs-absolute-uris-defs2.json" + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "$id must be resolved against nearest parent, not just immediate parent", + "schema": { + "$id": "http://example.com/a.json", + "$defs": { + "x": { + "$id": "http://example.com/b/c.json", + "not": { + "$defs": { + "y": { + "$id": "d.json", + "type": "number" + } + } + } + } + }, + "allOf": [ + { + "$ref": "http://example.com/b/d.json" + } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "non-number is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "order of evaluation: $id and $ref", + "schema": { + "$comment": "$id must be evaluated before $ref to get the proper $ref destination", + "$id": "/ref-and-id1/base.json", + "$ref": "int.json", + "$defs": { + "bigint": { + "$comment": "canonical uri: /ref-and-id1/int.json", + "$id": "int.json", + "maximum": 10 + }, + "smallint": { + "$comment": "canonical uri: /ref-and-id1-int.json", + "$id": "/ref-and-id1-int.json", + "maximum": 2 + } + } + }, + "tests": [ + { + "description": "data is valid against first definition", + "data": 5, + "valid": true + }, + { + "description": "data is invalid against first definition", + "data": 50, + "valid": false + } + ] + }, + { + "description": "order of evaluation: $id and $anchor and $ref", + "schema": { + "$comment": "$id must be evaluated before $ref to get the proper $ref destination", + "$id": "/ref-and-id2/base.json", + "$ref": "#bigint", + "$defs": { + "bigint": { + "$comment": "canonical uri: /ref-and-id2/base.json/$defs/bigint; another valid uri for this location: /ref-and-id2/base.json#bigint", + "$anchor": "bigint", + "maximum": 10 + }, + "smallint": { + "$comment": "canonical uri: /ref-and-id2#/$defs/smallint; another valid uri for this location: /ref-and-id2/#bigint", + "$id": "/ref-and-id2/", + "$anchor": "bigint", + "maximum": 2 + } + } + }, + "tests": [ + { + "description": "data is valid against first definition", + "data": 5, + "valid": true + }, + { + "description": "data is invalid against first definition", + "data": 50, + "valid": false + } + ] + }, + { + "description": "simple URN base URI with $ref via the URN", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed", + "minimum": 30, + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed"} + } + }, + "tests": [ + { + "description": "valid under the URN IDed schema", + "data": {"foo": 37}, + "valid": true + }, + { + "description": "invalid under the URN IDed schema", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "simple URN base URI with JSON pointer", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-00ff-ff00-4321feebdaed", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with NSS", + "schema": { + "$comment": "RFC 8141 §2.2", + "$id": "urn:example:1/406/47452/2", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with r-component", + "schema": { + "$comment": "RFC 8141 §2.3.1", + "$id": "urn:example:foo-bar-baz-qux?+CCResolve:cc=uk", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with q-component", + "schema": { + "$comment": "RFC 8141 §2.3.2", + "$id": "urn:example:weather?=op=map&lat=39.56&lon=-104.85&datetime=1969-07-21T02:56:15Z", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with f-component", + "schema": { + "$comment": "RFC 8141 §2.3.3, but we don't allow fragments", + "$ref": "https://json-schema.org/draft/next/schema" + }, + "tests": [ + { + "description": "is invalid", + "data": {"$id": "urn:example:foo-bar-baz-qux#somepart"}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and JSON pointer ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and anchor ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed#something"} + }, + "$defs": { + "bar": { + "$anchor": "something", + "type": "string" + } + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/refRemote.json b/vendor/jsonschema/json/tests/draft-next/refRemote.json new file mode 100644 index 00000000..a8440396 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/refRemote.json @@ -0,0 +1,233 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas-defs.json#/$defs/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas-defs.json#/$defs/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "base URI change", + "schema": { + "$id": "http://localhost:1234/", + "items": { + "$id": "baseUriChange/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "base URI change ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "base URI change ref invalid", + "data": [["a"]], + "valid": false + } + ] + }, + { + "description": "base URI change - change folder", + "schema": { + "$id": "http://localhost:1234/scope_change_defs1.json", + "type" : "object", + "properties": {"list": {"$ref": "baseUriChangeFolder/"}}, + "$defs": { + "baz": { + "$id": "baseUriChangeFolder/", + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "base URI change - change folder in subschema", + "schema": { + "$id": "http://localhost:1234/scope_change_defs2.json", + "type" : "object", + "properties": {"list": {"$ref": "baseUriChangeFolderInSubschema/#/$defs/bar"}}, + "$defs": { + "baz": { + "$id": "baseUriChangeFolderInSubschema/", + "$defs": { + "bar": { + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "root ref in remote ref", + "schema": { + "$id": "http://localhost:1234/object", + "type": "object", + "properties": { + "name": {"$ref": "name-defs.json#/$defs/orNull"} + } + }, + "tests": [ + { + "description": "string is valid", + "data": { + "name": "foo" + }, + "valid": true + }, + { + "description": "null is valid", + "data": { + "name": null + }, + "valid": true + }, + { + "description": "object is invalid", + "data": { + "name": { + "name": null + } + }, + "valid": false + } + ] + }, + { + "description": "remote ref with ref to defs", + "schema": { + "$id": "http://localhost:1234/schema-remote-ref-ref-defs1.json", + "$ref": "ref-and-defs.json" + }, + "tests": [ + { + "description": "invalid", + "data": { + "bar": 1 + }, + "valid": false + }, + { + "description": "valid", + "data": { + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "Location-independent identifier in remote ref", + "schema": { + "$ref": "http://localhost:1234/locationIndependentIdentifier.json#/$defs/refToInteger" + }, + "tests": [ + { + "description": "integer is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "retrieved nested refs resolve relative to their URI not $id", + "schema": { + "$id": "http://localhost:1234/some-id", + "properties": { + "name": {"$ref": "nested/foo-ref-string.json"} + } + }, + "tests": [ + { + "description": "number is invalid", + "data": { + "name": {"foo": 1} + }, + "valid": false + }, + { + "description": "string is valid", + "data": { + "name": {"foo": "a"} + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/required.json b/vendor/jsonschema/json/tests/draft-next/required.json new file mode 100644 index 00000000..8d8087af --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/required.json @@ -0,0 +1,151 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with empty array", + "schema": { + "properties": { + "foo": {} + }, + "required": [] + }, + "tests": [ + { + "description": "property not required", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with escaped characters", + "schema": { + "required": [ + "foo\nbar", + "foo\"bar", + "foo\\bar", + "foo\rbar", + "foo\tbar", + "foo\fbar" + ] + }, + "tests": [ + { + "description": "object with all properties present is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with some properties missing is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "required properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { "required": ["__proto__", "toString", "constructor"] }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": false + }, + { + "description": "__proto__ present", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString present", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor present", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/type.json b/vendor/jsonschema/json/tests/draft-next/type.json new file mode 100644 index 00000000..83046470 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/type.json @@ -0,0 +1,474 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is an integer", + "data": 1.0, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not an integer, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is a number (and an integer)", + "data": 1.0, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not a number, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "a string is still a string, even if it looks like a number", + "data": "1", + "valid": true + }, + { + "description": "an empty string is still a string", + "data": "", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "zero is not a boolean", + "data": 0, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not a boolean", + "data": "", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "true is a boolean", + "data": true, + "valid": true + }, + { + "description": "false is a boolean", + "data": false, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "zero is not null", + "data": 0, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not null", + "data": "", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "true is not null", + "data": true, + "valid": false + }, + { + "description": "false is not null", + "data": false, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type as array with one item", + "schema": { + "type": ["string"] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "type: array or object", + "schema": { + "type": ["array", "object"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type: array, object or null", + "schema": { + "type": ["array", "object", "null"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/unevaluatedItems.json b/vendor/jsonschema/json/tests/draft-next/unevaluatedItems.json new file mode 100644 index 00000000..14c8d959 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/unevaluatedItems.json @@ -0,0 +1,644 @@ +[ + { + "description": "unevaluatedItems true", + "schema": { "unevaluatedItems": true }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems false", + "schema": { "unevaluatedItems": false }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems as schema", + "schema": { "unevaluatedItems": { "type": "string" } }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with valid unevaluated items", + "data": ["foo"], + "valid": true + }, + { + "description": "with invalid unevaluated items", + "data": [42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with uniform items", + "schema": { + "items": { "type": "string" }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "unevaluatedItems doesn't apply", + "data": ["foo", "bar"], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with tuple", + "schema": { + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with items", + "schema": { + "prefixItems": [ + { "type": "string" } + ], + "items": true, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "unevaluatedItems doesn't apply", + "data": ["foo", 42], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with nested tuple", + "schema": { + "prefixItems": [ + { "type": "string" } + ], + "allOf": [ + { + "prefixItems": [ + true, + { "type": "number" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", 42], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", 42, true], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested items", + "schema": { + "unevaluatedItems": {"type": "boolean"}, + "anyOf": [ + { "items": {"type": "string"} }, + true + ] + }, + "tests": [ + { + "description": "with only (valid) additional items", + "data": [true, false], + "valid": true + }, + { + "description": "with no additional items", + "data": ["yes", "no"], + "valid": true + }, + { + "description": "with invalid additional item", + "data": ["yes", false], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested prefixItems and items", + "schema": { + "allOf": [ + { + "prefixItems": [ + { "type": "string" } + ], + "items": true + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no additional items", + "data": ["foo"], + "valid": true + }, + { + "description": "with additional items", + "data": ["foo", 42, true], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with nested unevaluatedItems", + "schema": { + "allOf": [ + { + "prefixItems": [ + { "type": "string" } + ] + }, + { "unevaluatedItems": true } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no additional items", + "data": ["foo"], + "valid": true + }, + { + "description": "with additional items", + "data": ["foo", 42, true], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with anyOf", + "schema": { + "prefixItems": [ + { "const": "foo" } + ], + "anyOf": [ + { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + { + "prefixItems": [ + true, + true, + { "const": "baz" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "when one schema matches and has no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "when one schema matches and has unevaluated items", + "data": ["foo", "bar", 42], + "valid": false + }, + { + "description": "when two schemas match and has no unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "when two schemas match and has unevaluated items", + "data": ["foo", "bar", "baz", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with oneOf", + "schema": { + "prefixItems": [ + { "const": "foo" } + ], + "oneOf": [ + { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + { + "prefixItems": [ + true, + { "const": "baz" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with not", + "schema": { + "prefixItems": [ + { "const": "foo" } + ], + "not": { + "not": { + "prefixItems": [ + true, + { "const": "bar" } + ] + } + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with unevaluated items", + "data": ["foo", "bar"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with if/then/else", + "schema": { + "prefixItems": [ + { "const": "foo" } + ], + "if": { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + "then": { + "prefixItems": [ + true, + true, + { "const": "then" } + ] + }, + "else": { + "prefixItems": [ + true, + true, + true, + { "const": "else" } + ] + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "when if matches and it has no unevaluated items", + "data": ["foo", "bar", "then"], + "valid": true + }, + { + "description": "when if matches and it has unevaluated items", + "data": ["foo", "bar", "then", "else"], + "valid": false + }, + { + "description": "when if doesn't match and it has no unevaluated items", + "data": ["foo", 42, 42, "else"], + "valid": true + }, + { + "description": "when if doesn't match and it has unevaluated items", + "data": ["foo", 42, 42, "else", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with boolean schemas", + "schema": { + "allOf": [true], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with $ref", + "schema": { + "$ref": "#/$defs/bar", + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false, + "$defs": { + "bar": { + "prefixItems": [ + true, + { "type": "string" } + ] + } + } + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems can't see inside cousins", + "schema": { + "allOf": [ + { + "prefixItems": [ true ] + }, + { "unevaluatedItems": false } + ] + }, + "tests": [ + { + "description": "always fails", + "data": [ 1 ], + "valid": false + } + ] + }, + { + "description": "item is evaluated in an uncle schema to unevaluatedItems", + "schema": { + "properties": { + "foo": { + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false + } + }, + "anyOf": [ + { + "properties": { + "foo": { + "prefixItems": [ + true, + { "type": "string" } + ] + } + } + } + ] + }, + "tests": [ + { + "description": "no extra items", + "data": { + "foo": [ + "test" + ] + }, + "valid": true + }, + { + "description": "uncle keyword evaluation is not significant", + "data": { + "foo": [ + "test", + "test" + ] + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedItems depends on adjacent contains", + "schema": { + "prefixItems": [true], + "contains": {"type": "string"}, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "second item is evaluated by contains", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "contains fails, second item is not evaluated", + "data": [ 1, 2 ], + "valid": false + }, + { + "description": "contains passes, second item is not evaluated", + "data": [ 1, 2, "foo" ], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems depends on multiple nested contains", + "schema": { + "allOf": [ + { "contains": { "multipleOf": 2 } }, + { "contains": { "multipleOf": 3 } } + ], + "unevaluatedItems": { "multipleOf": 5 } + }, + "tests": [ + { + "description": "5 not evaluated, passes unevaluatedItems", + "data": [ 2, 3, 4, 5, 6 ], + "valid": true + }, + { + "description": "7 not evaluated, fails unevaluatedItems", + "data": [ 2, 3, 4, 7, 8 ], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems and contains interact to control item dependency relationship", + "schema": { + "if": { + "contains": {"const": "a"} + }, + "then": { + "if": { + "contains": {"const": "b"} + }, + "then": { + "if": { + "contains": {"const": "c"} + } + } + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "empty array is valid", + "data": [], + "valid": true + }, + { + "description": "only a's are valid", + "data": [ "a", "a" ], + "valid": true + }, + { + "description": "a's and b's are valid", + "data": [ "a", "b", "a", "b", "a" ], + "valid": true + }, + { + "description": "a's, b's and c's are valid", + "data": [ "c", "a", "c", "c", "b", "a" ], + "valid": true + }, + { + "description": "only b's are invalid", + "data": [ "b", "b" ], + "valid": false + }, + { + "description": "only c's are invalid", + "data": [ "c", "c" ], + "valid": false + }, + { + "description": "only b's and c's are invalid", + "data": [ "c", "b", "c", "b", "c" ], + "valid": false + }, + { + "description": "only a's and c's are invalid", + "data": [ "c", "a", "c", "a", "c" ], + "valid": false + } + ] + }, + { + "description": "non-array instances are valid", + "schema": {"unevaluatedItems": false}, + "tests": [ + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with null instance elements", + "schema": { + "unevaluatedItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/unevaluatedProperties.json b/vendor/jsonschema/json/tests/draft-next/unevaluatedProperties.json new file mode 100644 index 00000000..0d5c4b83 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/unevaluatedProperties.json @@ -0,0 +1,1411 @@ +[ + { + "description": "unevaluatedProperties true", + "schema": { + "type": "object", + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties schema", + "schema": { + "type": "object", + "unevaluatedProperties": { + "type": "string", + "minLength": 3 + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with valid unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with invalid unevaluated properties", + "data": { + "foo": "fo" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties false", + "schema": { + "type": "object", + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent properties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "^foo": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent additionalProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "additionalProperties": true, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with nested properties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "properties": { + "bar": { "type": "string" } + } + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with nested patternProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "patternProperties": { + "^bar": { "type": "string" } + } + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with nested additionalProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "additionalProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with nested unevaluatedProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": { + "type": "string", + "maxLength": 2 + } + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with anyOf", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "anyOf": [ + { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + }, + { + "properties": { + "baz": { "const": "baz" } + }, + "required": ["baz"] + }, + { + "properties": { + "quux": { "const": "quux" } + }, + "required": ["quux"] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when one matches and has no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when one matches and has unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "not-baz" + }, + "valid": false + }, + { + "description": "when two match and has no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": true + }, + { + "description": "when two match and has unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "quux": "not-quux" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with oneOf", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "oneOf": [ + { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + }, + { + "properties": { + "baz": { "const": "baz" } + }, + "required": ["baz"] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "quux": "quux" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with not", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "not": { + "not": { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "then": { + "properties": { + "bar": { "type": "string" } + }, + "required": ["bar"] + }, + "else": { + "properties": { + "baz": { "type": "string" } + }, + "required": ["baz"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": true + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else, then not defined", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "else": { + "properties": { + "baz": { "type": "string" } + }, + "required": ["baz"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": false + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": true + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else, else not defined", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "then": { + "properties": { + "bar": { "type": "string" } + }, + "required": ["bar"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with dependentSchemas", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "dependentSchemas": { + "foo": { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with boolean schemas", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [true], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with $ref", + "schema": { + "type": "object", + "$ref": "#/$defs/bar", + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false, + "$defs": { + "bar": { + "properties": { + "bar": { "type": "string" } + } + } + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties can't see inside cousins", + "schema": { + "allOf": [ + { + "properties": { + "foo": true + } + }, + { + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "always fails", + "data": { + "foo": 1 + }, + "valid": false + } + ] + }, + { + "description": "nested unevaluatedProperties, outer false, inner true, properties outside", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "nested unevaluatedProperties, outer false, inner true, properties inside", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "nested unevaluatedProperties, outer true, inner false, properties outside", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": false + } + ], + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "nested unevaluatedProperties, outer true, inner false, properties inside", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + } + ], + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "cousin unevaluatedProperties, true and false, true with properties", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": true + }, + { + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "cousin unevaluatedProperties, true and false, false with properties", + "schema": { + "type": "object", + "allOf": [ + { + "unevaluatedProperties": true + }, + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "property is evaluated in an uncle schema to unevaluatedProperties", + "comment": "see https://stackoverflow.com/questions/66936884/deeply-nested-unevaluatedproperties-and-their-expectations", + "schema": { + "type": "object", + "properties": { + "foo": { + "type": "object", + "properties": { + "bar": { + "type": "string" + } + }, + "unevaluatedProperties": false + } + }, + "anyOf": [ + { + "properties": { + "foo": { + "properties": { + "faz": { + "type": "string" + } + } + } + } + } + ] + }, + "tests": [ + { + "description": "no extra properties", + "data": { + "foo": { + "bar": "test" + } + }, + "valid": true + }, + { + "description": "uncle keyword evaluation is not significant", + "data": { + "foo": { + "bar": "test", + "faz": "test" + } + }, + "valid": false + } + ] + }, + { + "description": "in-place applicator siblings, allOf has unevaluated", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": true + }, + "unevaluatedProperties": false + } + ], + "anyOf": [ + { + "properties": { + "bar": true + } + } + ] + }, + "tests": [ + { + "description": "base case: both properties present", + "data": { + "foo": 1, + "bar": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, bar is missing", + "data": { + "foo": 1 + }, + "valid": true + }, + { + "description": "in place applicator siblings, foo is missing", + "data": { + "bar": 1 + }, + "valid": false + } + ] + }, + { + "description": "in-place applicator siblings, anyOf has unevaluated", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": true + } + } + ], + "anyOf": [ + { + "properties": { + "bar": true + }, + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "base case: both properties present", + "data": { + "foo": 1, + "bar": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, bar is missing", + "data": { + "foo": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, foo is missing", + "data": { + "bar": 1 + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties + single cyclic ref", + "schema": { + "type": "object", + "properties": { + "x": { "$ref": "#" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is valid", + "data": {}, + "valid": true + }, + { + "description": "Single is valid", + "data": { "x": {} }, + "valid": true + }, + { + "description": "Unevaluated on 1st level is invalid", + "data": { "x": {}, "y": {} }, + "valid": false + }, + { + "description": "Nested is valid", + "data": { "x": { "x": {} } }, + "valid": true + }, + { + "description": "Unevaluated on 2nd level is invalid", + "data": { "x": { "x": {}, "y": {} } }, + "valid": false + }, + { + "description": "Deep nested is valid", + "data": { "x": { "x": { "x": {} } } }, + "valid": true + }, + { + "description": "Unevaluated on 3rd level is invalid", + "data": { "x": { "x": { "x": {}, "y": {} } } }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties + ref inside allOf / oneOf", + "schema": { + "$defs": { + "one": { + "properties": { "a": true } + }, + "two": { + "required": ["x"], + "properties": { "x": true } + } + }, + "allOf": [ + { "$ref": "#/$defs/one" }, + { "properties": { "b": true } }, + { + "oneOf": [ + { "$ref": "#/$defs/two" }, + { + "required": ["y"], + "properties": { "y": true } + } + ] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is invalid (no x or y)", + "data": {}, + "valid": false + }, + { + "description": "a and b are invalid (no x or y)", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "x and y are invalid", + "data": { "x": 1, "y": 1 }, + "valid": false + }, + { + "description": "a and x are valid", + "data": { "a": 1, "x": 1 }, + "valid": true + }, + { + "description": "a and y are valid", + "data": { "a": 1, "y": 1 }, + "valid": true + }, + { + "description": "a and b and x are valid", + "data": { "a": 1, "b": 1, "x": 1 }, + "valid": true + }, + { + "description": "a and b and y are valid", + "data": { "a": 1, "b": 1, "y": 1 }, + "valid": true + }, + { + "description": "a and b and x and y are invalid", + "data": { "a": 1, "b": 1, "x": 1, "y": 1 }, + "valid": false + } + ] + }, + { + "description": "dynamic evalation inside nested refs", + "schema": { + "$defs": { + "one": { + "oneOf": [ + { "$ref": "#/$defs/two" }, + { "required": ["b"], "properties": { "b": true } }, + { "required": ["xx"], "patternProperties": { "x": true } }, + { "required": ["all"], "unevaluatedProperties": true } + ] + }, + "two": { + "oneOf": [ + { "required": ["c"], "properties": { "c": true } }, + { "required": ["d"], "properties": { "d": true } } + ] + } + }, + "oneOf": [ + { "$ref": "#/$defs/one" }, + { "required": ["a"], "properties": { "a": true } } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is invalid", + "data": {}, + "valid": false + }, + { + "description": "a is valid", + "data": { "a": 1 }, + "valid": true + }, + { + "description": "b is valid", + "data": { "b": 1 }, + "valid": true + }, + { + "description": "c is valid", + "data": { "c": 1 }, + "valid": true + }, + { + "description": "d is valid", + "data": { "d": 1 }, + "valid": true + }, + { + "description": "a + b is invalid", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "a + c is invalid", + "data": { "a": 1, "c": 1 }, + "valid": false + }, + { + "description": "a + d is invalid", + "data": { "a": 1, "d": 1 }, + "valid": false + }, + { + "description": "b + c is invalid", + "data": { "b": 1, "c": 1 }, + "valid": false + }, + { + "description": "b + d is invalid", + "data": { "b": 1, "d": 1 }, + "valid": false + }, + { + "description": "c + d is invalid", + "data": { "c": 1, "d": 1 }, + "valid": false + }, + { + "description": "xx is valid", + "data": { "xx": 1 }, + "valid": true + }, + { + "description": "xx + foox is valid", + "data": { "xx": 1, "foox": 1 }, + "valid": true + }, + { + "description": "xx + foo is invalid", + "data": { "xx": 1, "foo": 1 }, + "valid": false + }, + { + "description": "xx + a is invalid", + "data": { "xx": 1, "a": 1 }, + "valid": false + }, + { + "description": "xx + b is invalid", + "data": { "xx": 1, "b": 1 }, + "valid": false + }, + { + "description": "xx + c is invalid", + "data": { "xx": 1, "c": 1 }, + "valid": false + }, + { + "description": "xx + d is invalid", + "data": { "xx": 1, "d": 1 }, + "valid": false + }, + { + "description": "all is valid", + "data": { "all": 1 }, + "valid": true + }, + { + "description": "all + foo is valid", + "data": { "all": 1, "foo": 1 }, + "valid": true + }, + { + "description": "all + a is invalid", + "data": { "all": 1, "a": 1 }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties depends on adjacent contains", + "schema": { + "properties": { + "foo": { "type": "number" } + }, + "contains": { "type": "string" }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "bar is evaluated by contains", + "data": { "foo": 1, "bar": "foo" }, + "valid": true + }, + { + "description": "contains fails, bar is not evaluated", + "data": { "foo": 1, "bar": 2 }, + "valid": false + }, + { + "description": "contains passes, bar is not evaluated", + "data": { "foo": 1, "bar": 2, "baz": "foo" }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties depends on multiple nested contains", + "schema": { + "allOf": [ + { "contains": { "multipleOf": 2 } }, + { "contains": { "multipleOf": 3 } } + ], + "unevaluatedProperties": { "multipleOf": 5 } + }, + "tests": [ + { + "description": "5 not evaluated, passes unevaluatedItems", + "data": { "a": 2, "b": 3, "c": 4, "d": 5, "e": 6 }, + "valid": true + }, + { + "description": "7 not evaluated, fails unevaluatedItems", + "data": { "a": 2, "b": 3, "c": 4, "d": 7, "e": 8 }, + "valid": false + } + ] + }, + { + "description": "non-object instances are valid", + "schema": {"unevaluatedProperties": false}, + "tests": [ + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with null valued instance properties", + "schema": { + "unevaluatedProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null valued properties", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/uniqueItems.json b/vendor/jsonschema/json/tests/draft-next/uniqueItems.json new file mode 100644 index 00000000..85c619d9 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/uniqueItems.json @@ -0,0 +1,404 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "non-unique array of more than two integers is invalid", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of strings is valid", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "non-unique array of strings is invalid", + "data": ["foo", "bar", "foo"], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "non-unique array of more than two arrays is invalid", + "data": [["foo"], ["bar"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "[1] and [true] are unique", + "data": [[1], [true]], + "valid": true + }, + { + "description": "[0] and [false] are unique", + "data": [[0], [false]], + "valid": true + }, + { + "description": "nested [1] and [true] are unique", + "data": [[[1], "foo"], [[true], "foo"]], + "valid": true + }, + { + "description": "nested [0] and [false] are unique", + "data": [[[0], "foo"], [[false], "foo"]], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1, "{}"], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + }, + { + "description": "different objects are unique", + "data": [{"a": 1, "b": 2}, {"a": 2, "b": 1}], + "valid": true + }, + { + "description": "objects are non-unique despite key order", + "data": [{"a": 1, "b": 2}, {"b": 2, "a": 1}], + "valid": false + }, + { + "description": "{\"a\": false} and {\"a\": 0} are unique", + "data": [{"a": false}, {"a": 0}], + "valid": true + }, + { + "description": "{\"a\": true} and {\"a\": 1} are unique", + "data": [{"a": true}, {"a": 1}], + "valid": true + } + ] + }, + { + "description": "uniqueItems with an array of items", + "schema": { + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is not valid", + "data": [false, true, "foo", "foo"], + "valid": false + }, + { + "description": "non-unique array extended from [true, false] is not valid", + "data": [true, false, "foo", "foo"], + "valid": false + } + ] + }, + { + "description": "uniqueItems with an array of items and additionalItems=false", + "schema": { + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true, + "items": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + }, + { + "description": "uniqueItems=false validation", + "schema": { "uniqueItems": false }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is valid", + "data": [1, 1], + "valid": true + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": true + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": true + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": true + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is valid", + "data": [["foo"], ["foo"]], + "valid": true + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are valid", + "data": [{}, [1], true, null, {}, 1], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items", + "schema": { + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is valid", + "data": [false, true, "foo", "foo"], + "valid": true + }, + { + "description": "non-unique array extended from [true, false] is valid", + "data": [true, false, "foo", "foo"], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items and additionalItems=false", + "schema": { + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false, + "items": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/unknownKeyword.json b/vendor/jsonschema/json/tests/draft-next/unknownKeyword.json new file mode 100644 index 00000000..e46657d8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/unknownKeyword.json @@ -0,0 +1,56 @@ +[ + { + "description": "$id inside an unknown keyword is not a real identifier", + "comment": "the implementation must not be confused by an $id in locations we do not know how to parse", + "schema": { + "$defs": { + "id_in_unknown0": { + "not": { + "array_of_schemas": [ + { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "null" + } + ] + } + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "string" + }, + "id_in_unknown1": { + "not": { + "object_of_schemas": { + "foo": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "integer" + } + } + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/id_in_unknown0" }, + { "$ref": "#/$defs/id_in_unknown1" }, + { "$ref": "https://localhost:1234/unknownKeyword/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "type matches second anyOf, which has a real schema in it", + "data": "a string", + "valid": true + }, + { + "description": "type matches non-schema in first anyOf", + "data": null, + "valid": false + }, + { + "description": "type matches non-schema in third anyOf", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft-next/vocabulary.json b/vendor/jsonschema/json/tests/draft-next/vocabulary.json new file mode 100644 index 00000000..65b81ea1 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft-next/vocabulary.json @@ -0,0 +1,38 @@ +[ + { + "description": "schema that uses custom metaschema with with no validation vocabulary", + "schema": { + "$id": "https://schema/using/no/validation", + "$schema": "http://localhost:1234/draft-next/metaschema-no-validation.json", + "properties": { + "badProperty": false, + "numberProperty": { + "minimum": 10 + } + } + }, + "tests": [ + { + "description": "applicator vocabulary still works", + "data": { + "badProperty": "this property should not exist" + }, + "valid": false + }, + { + "description": "no validation: valid number", + "data": { + "numberProperty": 20 + }, + "valid": true + }, + { + "description": "no validation: invalid number, but it still validates", + "data": { + "numberProperty": 1 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/additionalItems.json b/vendor/jsonschema/json/tests/draft2019-09/additionalItems.json new file mode 100644 index 00000000..deb44fd3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/additionalItems.json @@ -0,0 +1,164 @@ +[ + { + "description": "additionalItems as schema", + "schema": { + "items": [{}], + "additionalItems": {"type": "integer"} + }, + "tests": [ + { + "description": "additional items match schema", + "data": [ null, 2, 3, 4 ], + "valid": true + }, + { + "description": "additional items do not match schema", + "data": [ null, 2, 3, "foo" ], + "valid": false + } + ] + }, + { + "description": "when items is schema, additionalItems does nothing", + "schema": { + "items": {}, + "additionalItems": false + }, + "tests": [ + { + "description": "all items match schema", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + } + ] + }, + { + "description": "array of items with no additionalItems permitted", + "schema": { + "items": [{}, {}, {}], + "additionalItems": false + }, + "tests": [ + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "fewer number of items present (1)", + "data": [ 1 ], + "valid": true + }, + { + "description": "fewer number of items present (2)", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "equal number of items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "additionalItems as false without items", + "schema": {"additionalItems": false}, + "tests": [ + { + "description": + "items defaults to empty schema so everything is valid", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "additionalItems are allowed by default", + "schema": {"items": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators, valid case", + "schema": { + "allOf": [ + { "items": [ { "type": "integer" } ] } + ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in allOf are not examined", + "data": [ 1, null ], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators, invalid case", + "schema": { + "allOf": [ + { "items": [ { "type": "integer" }, { "type": "string" } ] } + ], + "items": [ {"type": "integer" } ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in allOf are not examined", + "data": [ 1, "hello" ], + "valid": false + } + ] + }, + { + "description": "items validation adjusts the starting index for additionalItems", + "schema": { + "items": [ { "type": "string" } ], + "additionalItems": { "type": "integer" } + }, + "tests": [ + { + "description": "valid items", + "data": [ "x", 2, 3 ], + "valid": true + }, + { + "description": "wrong type of second item", + "data": [ "x", "y" ], + "valid": false + } + ] + }, + { + "description": "additionalItems with null instance elements", + "schema": { + "additionalItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/additionalProperties.json b/vendor/jsonschema/json/tests/draft2019-09/additionalProperties.json new file mode 100644 index 00000000..0f8e1627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/additionalProperties.json @@ -0,0 +1,147 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobarbaz", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": "non-ASCII pattern with additionalProperties", + "schema": { + "patternProperties": {"^á": {}}, + "additionalProperties": false + }, + "tests": [ + { + "description": "matching the pattern is valid", + "data": {"ármányos": 2}, + "valid": true + }, + { + "description": "not matching the pattern is invalid", + "data": {"élmény": 2}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with schema", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + }, + { + "description": "additionalProperties does not look in applicators", + "schema": { + "allOf": [ + {"properties": {"foo": {}}} + ], + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "properties defined in allOf are not examined", + "data": {"foo": 1, "bar": true}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with null valued instance properties", + "schema": { + "additionalProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/allOf.json b/vendor/jsonschema/json/tests/draft2019-09/allOf.json new file mode 100644 index 00000000..ec9319e1 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/allOf.json @@ -0,0 +1,294 @@ +[ + { + "description": "allOf", + "schema": { + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all true", + "schema": {"allOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "allOf with boolean schemas, some false", + "schema": {"allOf": [true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all false", + "schema": {"allOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with one empty schema", + "schema": { + "allOf": [ + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with two empty schemas", + "schema": { + "allOf": [ + {}, + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with the first empty schema", + "schema": { + "allOf": [ + {}, + { "type": "number" } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with the last empty schema", + "schema": { + "allOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "nested allOf, to check validation semantics", + "schema": { + "allOf": [ + { + "allOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "allOf combined with anyOf, oneOf", + "schema": { + "allOf": [ { "multipleOf": 2 } ], + "anyOf": [ { "multipleOf": 3 } ], + "oneOf": [ { "multipleOf": 5 } ] + }, + "tests": [ + { + "description": "allOf: false, anyOf: false, oneOf: false", + "data": 1, + "valid": false + }, + { + "description": "allOf: false, anyOf: false, oneOf: true", + "data": 5, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: false", + "data": 3, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: true", + "data": 15, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: false", + "data": 2, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: true", + "data": 10, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: false", + "data": 6, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: true", + "data": 30, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/anchor.json b/vendor/jsonschema/json/tests/draft2019-09/anchor.json new file mode 100644 index 00000000..7a9e32d3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/anchor.json @@ -0,0 +1,226 @@ +[ + { + "description": "Location-independent identifier", + "schema": { + "$ref": "#foo", + "$defs": { + "A": { + "$anchor": "foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with absolute URI", + "schema": { + "$ref": "http://localhost:1234/bar#foo", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar", + "$anchor": "foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with base URI change in subschema", + "schema": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#foo", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$anchor": "foo", + "type": "integer" + } + } + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "$anchor inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an $anchor buried in the enum", + "schema": { + "$defs": { + "anchor_in_enum": { + "enum": [ + { + "$anchor": "my_anchor", + "type": "null" + } + ] + }, + "real_identifier_in_schema": { + "$anchor": "my_anchor", + "type": "string" + }, + "zzz_anchor_in_const": { + "const": { + "$anchor": "my_anchor", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/anchor_in_enum" }, + { "$ref": "#my_anchor" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "$anchor": "my_anchor", + "type": "null" + }, + "valid": true + }, + { + "description": "in implementations that strip $anchor, this may match either $def", + "data": { + "type": "null" + }, + "valid": false + }, + { + "description": "match $ref to $anchor", + "data": "a string to match #/$defs/anchor_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to $anchor", + "data": 1, + "valid": false + } + ] + }, + { + "description": "same $anchor with different base uri", + "schema": { + "$id": "http://localhost:1234/foobar", + "$defs": { + "A": { + "$id": "child1", + "allOf": [ + { + "$id": "child2", + "$anchor": "my_anchor", + "type": "number" + }, + { + "$anchor": "my_anchor", + "type": "string" + } + ] + } + }, + "$ref": "child1#my_anchor" + }, + "tests": [ + { + "description": "$ref resolves to /$defs/A/allOf/1", + "data": "a", + "valid": true + }, + { + "description": "$ref does not resolve to /$defs/A/allOf/0", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing an $anchor property", + "schema": { + "$defs": { + "const_not_anchor": { + "const": { + "$anchor": "not_a_real_anchor" + } + } + }, + "if": { + "const": "skip not_a_real_anchor" + }, + "then": true, + "else" : { + "$ref": "#/$defs/const_not_anchor" + } + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_anchor", + "valid": true + }, + { + "description": "const at const_not_anchor does not match", + "data": 1, + "valid": false + } + ] + }, + { + "description": "invalid anchors", + "comment": "Section 8.2.3", + "schema": { "$ref": "https://json-schema.org/draft/2019-09/schema" }, + "tests": [ + { + "description": "MUST start with a letter (and not #)", + "data": { "$anchor" : "#foo" }, + "valid": false + }, + { + "description": "JSON pointers are not valid", + "data": { "$anchor" : "/a/b" }, + "valid": false + }, + { + "description": "invalid with valid beginning", + "data": { "$anchor" : "foo#something" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/anyOf.json b/vendor/jsonschema/json/tests/draft2019-09/anyOf.json new file mode 100644 index 00000000..ab5eb386 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/anyOf.json @@ -0,0 +1,189 @@ +[ + { + "description": "anyOf", + "schema": { + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf with boolean schemas, all true", + "schema": {"anyOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, some true", + "schema": {"anyOf": [true, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, all false", + "schema": {"anyOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf complex types", + "schema": { + "anyOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first anyOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second anyOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both anyOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "neither anyOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "anyOf with one empty schema", + "schema": { + "anyOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is valid", + "data": 123, + "valid": true + } + ] + }, + { + "description": "nested anyOf, to check validation semantics", + "schema": { + "anyOf": [ + { + "anyOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/boolean_schema.json b/vendor/jsonschema/json/tests/draft2019-09/boolean_schema.json new file mode 100644 index 00000000..6d40f23f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/boolean_schema.json @@ -0,0 +1,104 @@ +[ + { + "description": "boolean schema 'true'", + "schema": true, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "boolean true is valid", + "data": true, + "valid": true + }, + { + "description": "boolean false is valid", + "data": false, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + }, + { + "description": "array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "boolean schema 'false'", + "schema": false, + "tests": [ + { + "description": "number is invalid", + "data": 1, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "boolean true is invalid", + "data": true, + "valid": false + }, + { + "description": "boolean false is invalid", + "data": false, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + }, + { + "description": "object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/const.json b/vendor/jsonschema/json/tests/draft2019-09/const.json new file mode 100644 index 00000000..1c2cafcc --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/const.json @@ -0,0 +1,342 @@ +[ + { + "description": "const validation", + "schema": {"const": 2}, + "tests": [ + { + "description": "same value is valid", + "data": 2, + "valid": true + }, + { + "description": "another value is invalid", + "data": 5, + "valid": false + }, + { + "description": "another type is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "const with object", + "schema": {"const": {"foo": "bar", "baz": "bax"}}, + "tests": [ + { + "description": "same object is valid", + "data": {"foo": "bar", "baz": "bax"}, + "valid": true + }, + { + "description": "same object with different property order is valid", + "data": {"baz": "bax", "foo": "bar"}, + "valid": true + }, + { + "description": "another object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "another type is invalid", + "data": [1, 2], + "valid": false + } + ] + }, + { + "description": "const with array", + "schema": {"const": [{ "foo": "bar" }]}, + "tests": [ + { + "description": "same array is valid", + "data": [{"foo": "bar"}], + "valid": true + }, + { + "description": "another array item is invalid", + "data": [2], + "valid": false + }, + { + "description": "array with additional items is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + }, + { + "description": "const with null", + "schema": {"const": null}, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "not null is invalid", + "data": 0, + "valid": false + } + ] + }, + { + "description": "const with false does not match 0", + "schema": {"const": false}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "const with true does not match 1", + "schema": {"const": true}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "const with [false] does not match [0]", + "schema": {"const": [false]}, + "tests": [ + { + "description": "[false] is valid", + "data": [false], + "valid": true + }, + { + "description": "[0] is invalid", + "data": [0], + "valid": false + }, + { + "description": "[0.0] is invalid", + "data": [0.0], + "valid": false + } + ] + }, + { + "description": "const with [true] does not match [1]", + "schema": {"const": [true]}, + "tests": [ + { + "description": "[true] is valid", + "data": [true], + "valid": true + }, + { + "description": "[1] is invalid", + "data": [1], + "valid": false + }, + { + "description": "[1.0] is invalid", + "data": [1.0], + "valid": false + } + ] + }, + { + "description": "const with {\"a\": false} does not match {\"a\": 0}", + "schema": {"const": {"a": false}}, + "tests": [ + { + "description": "{\"a\": false} is valid", + "data": {"a": false}, + "valid": true + }, + { + "description": "{\"a\": 0} is invalid", + "data": {"a": 0}, + "valid": false + }, + { + "description": "{\"a\": 0.0} is invalid", + "data": {"a": 0.0}, + "valid": false + } + ] + }, + { + "description": "const with {\"a\": true} does not match {\"a\": 1}", + "schema": {"const": {"a": true}}, + "tests": [ + { + "description": "{\"a\": true} is valid", + "data": {"a": true}, + "valid": true + }, + { + "description": "{\"a\": 1} is invalid", + "data": {"a": 1}, + "valid": false + }, + { + "description": "{\"a\": 1.0} is invalid", + "data": {"a": 1.0}, + "valid": false + } + ] + }, + { + "description": "const with 0 does not match other zero-like types", + "schema": {"const": 0}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "empty string is invalid", + "data": "", + "valid": false + } + ] + }, + { + "description": "const with 1 does not match true", + "schema": {"const": 1}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "const with -2.0 matches integer and float types", + "schema": {"const": -2.0}, + "tests": [ + { + "description": "integer -2 is valid", + "data": -2, + "valid": true + }, + { + "description": "integer 2 is invalid", + "data": 2, + "valid": false + }, + { + "description": "float -2.0 is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float 2.0 is invalid", + "data": 2.0, + "valid": false + }, + { + "description": "float -2.00001 is invalid", + "data": -2.00001, + "valid": false + } + ] + }, + { + "description": "float and integers are equal up to 64-bit representation limits", + "schema": {"const": 9007199254740992}, + "tests": [ + { + "description": "integer is valid", + "data": 9007199254740992, + "valid": true + }, + { + "description": "integer minus one is invalid", + "data": 9007199254740991, + "valid": false + }, + { + "description": "float is valid", + "data": 9007199254740992.0, + "valid": true + }, + { + "description": "float minus one is invalid", + "data": 9007199254740991.0, + "valid": false + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "const": "hello\u0000there" }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/contains.json b/vendor/jsonschema/json/tests/draft2019-09/contains.json new file mode 100644 index 00000000..2b1a5152 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/contains.json @@ -0,0 +1,165 @@ +[ + { + "description": "contains keyword validation", + "schema": { + "contains": {"minimum": 5} + }, + "tests": [ + { + "description": "array with item matching schema (5) is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with item matching schema (6) is valid", + "data": [3, 4, 6], + "valid": true + }, + { + "description": "array with two items matching schema (5, 6) is valid", + "data": [3, 4, 5, 6], + "valid": true + }, + { + "description": "array without items matching schema is invalid", + "data": [2, 3, 4], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "not array is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "contains keyword with const keyword", + "schema": { + "contains": { "const": 5 } + }, + "tests": [ + { + "description": "array with item 5 is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with two items 5 is valid", + "data": [3, 4, 5, 5], + "valid": true + }, + { + "description": "array without item 5 is invalid", + "data": [1, 2, 3, 4], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema true", + "schema": {"contains": true}, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema false", + "schema": {"contains": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "non-arrays are valid", + "data": "contains does not apply to strings", + "valid": true + } + ] + }, + { + "description": "items + contains", + "schema": { + "items": { "multipleOf": 2 }, + "contains": { "multipleOf": 3 } + }, + "tests": [ + { + "description": "matches items, does not match contains", + "data": [ 2, 4, 8 ], + "valid": false + }, + { + "description": "does not match items, matches contains", + "data": [ 3, 6, 9 ], + "valid": false + }, + { + "description": "matches both items and contains", + "data": [ 6, 12 ], + "valid": true + }, + { + "description": "matches neither items nor contains", + "data": [ 1, 5 ], + "valid": false + } + ] + }, + { + "description": "contains with false if subschema", + "schema": { + "contains": { + "if": false, + "else": true + } + }, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains with null instance elements", + "schema": { + "contains": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null items", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/content.json b/vendor/jsonschema/json/tests/draft2019-09/content.json new file mode 100644 index 00000000..44688e82 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/content.json @@ -0,0 +1,127 @@ +[ + { + "description": "validation of string-encoded content based on media type", + "schema": { + "contentMediaType": "application/json" + }, + "tests": [ + { + "description": "a valid JSON document", + "data": "{\"foo\": \"bar\"}", + "valid": true + }, + { + "description": "an invalid JSON document; validates true", + "data": "{:}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary string-encoding", + "schema": { + "contentEncoding": "base64" + }, + "tests": [ + { + "description": "a valid base64 string", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "an invalid base64 string (% is not a valid character); validates true", + "data": "eyJmb28iOi%iYmFyIn0K", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary-encoded media type documents", + "schema": { + "contentMediaType": "application/json", + "contentEncoding": "base64" + }, + "tests": [ + { + "description": "a valid base64-encoded JSON document", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "a validly-encoded invalid JSON document; validates true", + "data": "ezp9Cg==", + "valid": true + }, + { + "description": "an invalid base64 string that is valid JSON; validates true", + "data": "{}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary-encoded media type documents with schema", + "schema": { + "contentMediaType": "application/json", + "contentEncoding": "base64", + "contentSchema": { "required": ["foo"], "properties": { "foo": { "type": "string" } } } + }, + "tests": [ + { + "description": "a valid base64-encoded JSON document", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "another valid base64-encoded JSON document", + "data": "eyJib28iOiAyMCwgImZvbyI6ICJiYXoifQ==", + "valid": true + }, + { + "description": "an invalid base64-encoded JSON document; validates true", + "data": "eyJib28iOiAyMH0=", + "valid": true + }, + { + "description": "an empty object as a base64-encoded JSON document; validates true", + "data": "e30=", + "valid": true + }, + { + "description": "an empty array as a base64-encoded JSON document", + "data": "W10=", + "valid": true + }, + { + "description": "a validly-encoded invalid JSON document; validates true", + "data": "ezp9Cg==", + "valid": true + }, + { + "description": "an invalid base64 string that is valid JSON; validates true", + "data": "{}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/default.json b/vendor/jsonschema/json/tests/draft2019-09/default.json new file mode 100644 index 00000000..289a9b66 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/default.json @@ -0,0 +1,79 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "the default keyword does not do anything if the property is missing", + "schema": { + "type": "object", + "properties": { + "alpha": { + "type": "number", + "maximum": 3, + "default": 5 + } + } + }, + "tests": [ + { + "description": "an explicit property value is checked against maximum (passing)", + "data": { "alpha": 1 }, + "valid": true + }, + { + "description": "an explicit property value is checked against maximum (failing)", + "data": { "alpha": 5 }, + "valid": false + }, + { + "description": "missing properties are not filled in with the default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/defs.json b/vendor/jsonschema/json/tests/draft2019-09/defs.json new file mode 100644 index 00000000..70e9dc0b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/defs.json @@ -0,0 +1,18 @@ +[ + { + "description": "validate definition against metaschema", + "schema": {"$ref": "https://json-schema.org/draft/2019-09/schema"}, + "tests": [ + { + "description": "valid definition schema", + "data": {"$defs": {"foo": {"type": "integer"}}}, + "valid": true + }, + { + "description": "invalid definition schema", + "data": {"$defs": {"foo": {"type": 1}}}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/dependentRequired.json b/vendor/jsonschema/json/tests/draft2019-09/dependentRequired.json new file mode 100644 index 00000000..c817120d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/dependentRequired.json @@ -0,0 +1,142 @@ +[ + { + "description": "single dependency", + "schema": {"dependentRequired": {"bar": ["foo"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "empty dependents", + "schema": {"dependentRequired": {"bar": []}}, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependents required", + "schema": {"dependentRequired": {"quux": ["foo", "bar"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependentRequired": { + "foo\nbar": ["foo\rbar"], + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "CRLF", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "quoted quotes", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "CRLF missing dependent", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "quoted quotes missing dependent", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/dependentSchemas.json b/vendor/jsonschema/json/tests/draft2019-09/dependentSchemas.json new file mode 100644 index 00000000..2ba1a757 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/dependentSchemas.json @@ -0,0 +1,129 @@ +[ + { + "description": "single dependency", + "schema": { + "dependentSchemas": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "boolean subschemas", + "schema": { + "dependentSchemas": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependentSchemas": { + "foo\tbar": {"minProperties": 4}, + "foo'bar": {"required": ["foo\"bar"]} + } + }, + "tests": [ + { + "description": "quoted tab", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "quoted quote", + "data": { + "foo'bar": {"foo\"bar": 1} + }, + "valid": false + }, + { + "description": "quoted tab invalid under dependent schema", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "quoted quote invalid under dependent schema", + "data": {"foo'bar": 1}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/enum.json b/vendor/jsonschema/json/tests/draft2019-09/enum.json new file mode 100644 index 00000000..f085097b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/enum.json @@ -0,0 +1,236 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + }, + { + "description": "valid object matches", + "data": {"foo": 12}, + "valid": true + }, + { + "description": "extra properties in object is invalid", + "data": {"foo": 12, "boo": 42}, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum-with-null validation", + "schema": { "enum": [6, null] }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is valid", + "data": 6, + "valid": true + }, + { + "description": "something else is invalid", + "data": "test", + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "wrong foo value", + "data": {"foo":"foot", "bar":"bar"}, + "valid": false + }, + { + "description": "wrong bar value", + "data": {"foo":"foo", "bar":"bart"}, + "valid": false + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "enum with escaped characters", + "schema": { + "enum": ["foo\nbar", "foo\rbar"] + }, + "tests": [ + { + "description": "member 1 is valid", + "data": "foo\nbar", + "valid": true + }, + { + "description": "member 2 is valid", + "data": "foo\rbar", + "valid": true + }, + { + "description": "another string is invalid", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "enum with false does not match 0", + "schema": {"enum": [false]}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "enum with true does not match 1", + "schema": {"enum": [true]}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "enum with 0 does not match false", + "schema": {"enum": [0]}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + } + ] + }, + { + "description": "enum with 1 does not match true", + "schema": {"enum": [1]}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "enum": [ "hello\u0000there" ] }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/exclusiveMaximum.json b/vendor/jsonschema/json/tests/draft2019-09/exclusiveMaximum.json new file mode 100644 index 00000000..dc3cd709 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/exclusiveMaximum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMaximum validation", + "schema": { + "exclusiveMaximum": 3.0 + }, + "tests": [ + { + "description": "below the exclusiveMaximum is valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + }, + { + "description": "above the exclusiveMaximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/exclusiveMinimum.json b/vendor/jsonschema/json/tests/draft2019-09/exclusiveMinimum.json new file mode 100644 index 00000000..b38d7ece --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/exclusiveMinimum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMinimum validation", + "schema": { + "exclusiveMinimum": 1.1 + }, + "tests": [ + { + "description": "above the exclusiveMinimum is valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "below the exclusiveMinimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/format.json b/vendor/jsonschema/json/tests/draft2019-09/format.json new file mode 100644 index 00000000..a4b51d28 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/format.json @@ -0,0 +1,686 @@ +[ + { + "description": "email format", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "idn-email format", + "schema": { "format": "idn-email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "regex format", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv4 format", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv6 format", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "idn-hostname format", + "schema": { "format": "idn-hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "hostname format", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date format", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date-time format", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "time format", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "json-pointer format", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "relative-json-pointer format", + "schema": { "format": "relative-json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "iri format", + "schema": { "format": "iri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "iri-reference format", + "schema": { "format": "iri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri format", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-reference format", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-template format", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uuid format", + "schema": { "format": "uuid" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "duration format", + "schema": { "format": "duration" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/id.json b/vendor/jsonschema/json/tests/draft2019-09/id.json new file mode 100644 index 00000000..0d825ea4 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/id.json @@ -0,0 +1,289 @@ +[ + { + "description": "Invalid use of fragments in location-independent $id", + "schema": { + "$ref": "https://json-schema.org/draft/2019-09/schema" + }, + "tests": [ + { + "description": "Identifier name", + "data": { + "$ref": "#foo", + "$defs": { + "A": { + "$id": "#foo", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name and no ref", + "data": { + "$defs": { + "A": { "$id": "#foo" } + } + }, + "valid": false + }, + { + "description": "Identifier path", + "data": { + "$ref": "#/a/b", + "$defs": { + "A": { + "$id": "#/a/b", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar#foo", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#foo", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier path with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar#/a/b", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#/a/b", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#foo", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#foo", + "type": "integer" + } + } + } + } + }, + "valid": false + }, + { + "description": "Identifier path with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#/a/b", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#/a/b", + "type": "integer" + } + } + } + } + }, + "valid": false + } + ] + }, + { + "description": "Valid use of empty fragments in location-independent $id", + "comment": "These are allowed but discouraged", + "schema": { + "$ref": "https://json-schema.org/draft/2019-09/schema" + }, + "tests": [ + { + "description": "Identifier name with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Identifier name with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#/$defs/B", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#", + "type": "integer" + } + } + } + } + }, + "valid": true + } + ] + }, + { + "description": "Unnormalized $ids are allowed but discouraged", + "schema": { + "$ref": "https://json-schema.org/draft/2019-09/schema" + }, + "tests": [ + { + "description": "Unnormalized identifier", + "data": { + "$ref": "http://localhost:1234/foo/baz", + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier and no ref", + "data": { + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier with empty fragment", + "data": { + "$ref": "http://localhost:1234/foo/baz", + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz#", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier with empty fragment and no ref", + "data": { + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz#", + "type": "integer" + } + } + }, + "valid": true + } + ] + }, + { + "description": "$id inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an $id buried in the enum", + "schema": { + "$defs": { + "id_in_enum": { + "enum": [ + { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + ] + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "string" + }, + "zzz_id_in_const": { + "const": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/id_in_enum" }, + { "$ref": "https://localhost:1234/id/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + }, + "valid": true + }, + { + "description": "match $ref to $id", + "data": "a string to match #/$defs/id_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to $id", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing an $id property", + "schema": { + "$defs": { + "const_not_id": { + "const": { + "$id": "not_a_real_id" + } + } + }, + "if": { + "const": "skip not_a_real_id" + }, + "then": true, + "else" : { + "$ref": "#/$defs/const_not_id" + } + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_id", + "valid": true + }, + { + "description": "const at const_not_id does not match", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/if-then-else.json b/vendor/jsonschema/json/tests/draft2019-09/if-then-else.json new file mode 100644 index 00000000..284e9191 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/if-then-else.json @@ -0,0 +1,258 @@ +[ + { + "description": "ignore if without then or else", + "schema": { + "if": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone if", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone if", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore then without if", + "schema": { + "then": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone then", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone then", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore else without if", + "schema": { + "else": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone else", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone else", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "if and then without else", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid when if test fails", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if and else without then", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid when if test passes", + "data": -1, + "valid": true + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "validate against correct branch, then vs else", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "non-interference across combined schemas", + "schema": { + "allOf": [ + { + "if": { + "exclusiveMaximum": 0 + } + }, + { + "then": { + "minimum": -10 + } + }, + { + "else": { + "multipleOf": 2 + } + } + ] + }, + "tests": [ + { + "description": "valid, but would have been invalid through then", + "data": -100, + "valid": true + }, + { + "description": "valid, but would have been invalid through else", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if with boolean schema true", + "schema": { + "if": true, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema true in if always chooses the then path (valid)", + "data": "then", + "valid": true + }, + { + "description": "boolean schema true in if always chooses the then path (invalid)", + "data": "else", + "valid": false + } + ] + }, + { + "description": "if with boolean schema false", + "schema": { + "if": false, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema false in if always chooses the else path (invalid)", + "data": "then", + "valid": false + }, + { + "description": "boolean schema false in if always chooses the else path (valid)", + "data": "else", + "valid": true + } + ] + }, + { + "description": "if appears at the end when serialized (keyword processing sequence)", + "schema": { + "then": { "const": "yes" }, + "else": { "const": "other" }, + "if": { "maxLength": 4 } + }, + "tests": [ + { + "description": "yes redirects to then and passes", + "data": "yes", + "valid": true + }, + { + "description": "other redirects to else and passes", + "data": "other", + "valid": true + }, + { + "description": "no redirects to then and fails", + "data": "no", + "valid": false + }, + { + "description": "invalid redirects to else and fails", + "data": "invalid", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/infinite-loop-detection.json b/vendor/jsonschema/json/tests/draft2019-09/infinite-loop-detection.json new file mode 100644 index 00000000..9c3c3627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/infinite-loop-detection.json @@ -0,0 +1,36 @@ +[ + { + "description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop", + "schema": { + "$defs": { + "int": { "type": "integer" } + }, + "allOf": [ + { + "properties": { + "foo": { + "$ref": "#/$defs/int" + } + } + }, + { + "additionalProperties": { + "$ref": "#/$defs/int" + } + } + ] + }, + "tests": [ + { + "description": "passing case", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "failing case", + "data": { "foo": "a string" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/items.json b/vendor/jsonschema/json/tests/draft2019-09/items.json new file mode 100644 index 00000000..e46a25b2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/items.json @@ -0,0 +1,282 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "length": 1 + }, + "valid": true + } + ] + }, + { + "description": "an array of schemas for items", + "schema": { + "items": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + }, + { + "description": "incomplete array of items", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with additional items", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "1": "valid", + "length": 2 + }, + "valid": true + } + ] + }, + { + "description": "items with boolean schema (true)", + "schema": {"items": true}, + "tests": [ + { + "description": "any array is valid", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schema (false)", + "schema": {"items": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": [ 1, "foo", true ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schemas", + "schema": { + "items": [true, false] + }, + "tests": [ + { + "description": "array with one item is valid", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with two items is invalid", + "data": [ 1, "foo" ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items and subitems", + "schema": { + "$defs": { + "item": { + "type": "array", + "additionalItems": false, + "items": [ + { "$ref": "#/$defs/sub-item" }, + { "$ref": "#/$defs/sub-item" } + ] + }, + "sub-item": { + "type": "object", + "required": ["foo"] + } + }, + "type": "array", + "additionalItems": false, + "items": [ + { "$ref": "#/$defs/item" }, + { "$ref": "#/$defs/item" }, + { "$ref": "#/$defs/item" } + ] + }, + "tests": [ + { + "description": "valid items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": true + }, + { + "description": "too many items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "too many sub-items", + "data": [ + [ {"foo": null}, {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong item", + "data": [ + {"foo": null}, + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong sub-item", + "data": [ + [ {}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "fewer items is valid", + "data": [ + [ {"foo": null} ], + [ {"foo": null} ] + ], + "valid": true + } + ] + }, + { + "description": "nested items", + "schema": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + }, + "tests": [ + { + "description": "valid nested array", + "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": true + }, + { + "description": "nested array with invalid type", + "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": false + }, + { + "description": "not deep enough", + "data": [[[1], [2],[3]], [[4], [5], [6]]], + "valid": false + } + ] + }, + { + "description": "single-form items with null instance elements", + "schema": { + "items": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + }, + { + "description": "array-form items with null instance elements", + "schema": { + "items": [ + { + "type": "null" + } + ] + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/maxContains.json b/vendor/jsonschema/json/tests/draft2019-09/maxContains.json new file mode 100644 index 00000000..61c967de --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/maxContains.json @@ -0,0 +1,98 @@ +[ + { + "description": "maxContains without contains is ignored", + "schema": { + "maxContains": 1 + }, + "tests": [ + { + "description": "one item valid against lone maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "two items still valid against lone maxContains", + "data": [ 1, 2 ], + "valid": true + } + ] + }, + { + "description": "maxContains with contains", + "schema": { + "contains": {"const": 1}, + "maxContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, valid maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "all elements match, invalid maxContains", + "data": [ 1, 1 ], + "valid": false + }, + { + "description": "some elements match, valid maxContains", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "some elements match, invalid maxContains", + "data": [ 1, 2, 1 ], + "valid": false + } + ] + }, + { + "description": "maxContains with contains, value with a decimal", + "schema": { + "contains": {"const": 1}, + "maxContains": 1.0 + }, + "tests": [ + { + "description": "one element matches, valid maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "too many elements match, invalid maxContains", + "data": [ 1, 1 ], + "valid": false + } + ] + }, + { + "description": "minContains < maxContains", + "schema": { + "contains": {"const": 1}, + "minContains": 1, + "maxContains": 3 + }, + "tests": [ + { + "description": "actual < minContains < maxContains", + "data": [ ], + "valid": false + }, + { + "description": "minContains < actual < maxContains", + "data": [ 1, 1 ], + "valid": true + }, + { + "description": "minContains < maxContains < actual", + "data": [ 1, 1, 1, 1 ], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/maxItems.json b/vendor/jsonschema/json/tests/draft2019-09/maxItems.json new file mode 100644 index 00000000..f0c36ab2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/maxItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + }, + { + "description": "maxItems validation with a decimal", + "schema": {"maxItems": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/maxLength.json b/vendor/jsonschema/json/tests/draft2019-09/maxLength.json new file mode 100644 index 00000000..748b4daa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/maxLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + }, + { + "description": "maxLength validation with a decimal", + "schema": {"maxLength": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/maxProperties.json b/vendor/jsonschema/json/tests/draft2019-09/maxProperties.json new file mode 100644 index 00000000..acec1420 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/maxProperties.json @@ -0,0 +1,70 @@ +[ + { + "description": "maxProperties validation", + "schema": {"maxProperties": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "maxProperties validation with a decimal", + "schema": {"maxProperties": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + } + ] + }, + { + "description": "maxProperties = 0 means the object is empty", + "schema": { "maxProperties": 0 }, + "tests": [ + { + "description": "no properties is valid", + "data": {}, + "valid": true + }, + { + "description": "one property is invalid", + "data": { "foo": 1 }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/maximum.json b/vendor/jsonschema/json/tests/draft2019-09/maximum.json new file mode 100644 index 00000000..6844a39e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/maximum.json @@ -0,0 +1,54 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "maximum validation with unsigned integer", + "schema": {"maximum": 300}, + "tests": [ + { + "description": "below the maximum is invalid", + "data": 299.97, + "valid": true + }, + { + "description": "boundary point integer is valid", + "data": 300, + "valid": true + }, + { + "description": "boundary point float is valid", + "data": 300.00, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 300.5, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/minContains.json b/vendor/jsonschema/json/tests/draft2019-09/minContains.json new file mode 100644 index 00000000..1d3b5a5c --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/minContains.json @@ -0,0 +1,216 @@ +[ + { + "description": "minContains without contains is ignored", + "schema": { + "minContains": 1 + }, + "tests": [ + { + "description": "one item valid against lone minContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "zero items still valid against lone minContains", + "data": [], + "valid": true + } + ] + }, + { + "description": "minContains=1 with contains", + "schema": { + "contains": {"const": 1}, + "minContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "no elements match", + "data": [ 2 ], + "valid": false + }, + { + "description": "single element matches, valid minContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "some elements match, valid minContains", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "all elements match, valid minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "minContains=2 with contains", + "schema": { + "contains": {"const": 1}, + "minContains": 2 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "some elements match, invalid minContains", + "data": [ 1, 2 ], + "valid": false + }, + { + "description": "all elements match, valid minContains (exactly as needed)", + "data": [ 1, 1 ], + "valid": true + }, + { + "description": "all elements match, valid minContains (more than needed)", + "data": [ 1, 1, 1 ], + "valid": true + }, + { + "description": "some elements match, valid minContains", + "data": [ 1, 2, 1 ], + "valid": true + } + ] + }, + { + "description": "minContains=2 with contains with a decimal value", + "schema": { + "contains": {"const": 1}, + "minContains": 2.0 + }, + "tests": [ + { + "description": "one element matches, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "both elements match, valid minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "maxContains = minContains", + "schema": { + "contains": {"const": 1}, + "maxContains": 2, + "minContains": 2 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "all elements match, invalid maxContains", + "data": [ 1, 1, 1 ], + "valid": false + }, + { + "description": "all elements match, valid maxContains and minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "maxContains < minContains", + "schema": { + "contains": {"const": 1}, + "maxContains": 1, + "minContains": 3 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "invalid maxContains", + "data": [ 1, 1, 1 ], + "valid": false + }, + { + "description": "invalid maxContains and minContains", + "data": [ 1, 1 ], + "valid": false + } + ] + }, + { + "description": "minContains = 0 with no maxContains", + "schema": { + "contains": {"const": 1}, + "minContains": 0 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": true + }, + { + "description": "minContains = 0 makes contains always pass", + "data": [ 2 ], + "valid": true + } + ] + }, + { + "description": "minContains = 0 with maxContains", + "schema": { + "contains": {"const": 1}, + "minContains": 0, + "maxContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": true + }, + { + "description": "not more than maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "too many", + "data": [ 1, 1 ], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/minItems.json b/vendor/jsonschema/json/tests/draft2019-09/minItems.json new file mode 100644 index 00000000..d3b18720 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/minItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + }, + { + "description": "minItems validation with a decimal", + "schema": {"minItems": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/minLength.json b/vendor/jsonschema/json/tests/draft2019-09/minLength.json new file mode 100644 index 00000000..64db9480 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/minLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + }, + { + "description": "minLength validation with a decimal", + "schema": {"minLength": 2.0}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/minProperties.json b/vendor/jsonschema/json/tests/draft2019-09/minProperties.json new file mode 100644 index 00000000..9f74f789 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/minProperties.json @@ -0,0 +1,54 @@ +[ + { + "description": "minProperties validation", + "schema": {"minProperties": 1}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "minProperties validation with a decimal", + "schema": {"minProperties": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/minimum.json b/vendor/jsonschema/json/tests/draft2019-09/minimum.json new file mode 100644 index 00000000..21ae50e0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/minimum.json @@ -0,0 +1,69 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "minimum validation with signed integer", + "schema": {"minimum": -2}, + "tests": [ + { + "description": "negative above the minimum is valid", + "data": -1, + "valid": true + }, + { + "description": "positive above the minimum is valid", + "data": 0, + "valid": true + }, + { + "description": "boundary point is valid", + "data": -2, + "valid": true + }, + { + "description": "boundary point with float is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float below the minimum is invalid", + "data": -2.0001, + "valid": false + }, + { + "description": "int below the minimum is invalid", + "data": -3, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/multipleOf.json b/vendor/jsonschema/json/tests/draft2019-09/multipleOf.json new file mode 100644 index 00000000..25c25a91 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/multipleOf.json @@ -0,0 +1,71 @@ +[ + { + "description": "by int", + "schema": {"multipleOf": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"multipleOf": 1.5}, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"multipleOf": 0.0001}, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + }, + { + "description": "float division = inf", + "schema": {"type": "integer", "multipleOf": 0.123456789}, + "tests": [ + { + "description": "always invalid, but naive implementations may raise an overflow error", + "data": 1e308, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/not.json b/vendor/jsonschema/json/tests/draft2019-09/not.json new file mode 100644 index 00000000..98de0eda --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/not.json @@ -0,0 +1,117 @@ +[ + { + "description": "not", + "schema": { + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + }, + { + "description": "not with boolean schema true", + "schema": {"not": true}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "not with boolean schema false", + "schema": {"not": false}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/oneOf.json b/vendor/jsonschema/json/tests/draft2019-09/oneOf.json new file mode 100644 index 00000000..eeb7ae86 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/oneOf.json @@ -0,0 +1,274 @@ +[ + { + "description": "oneOf", + "schema": { + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all true", + "schema": {"oneOf": [true, true, true]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, one true", + "schema": {"oneOf": [true, false, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "oneOf with boolean schemas, more than one true", + "schema": {"oneOf": [true, true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all false", + "schema": {"oneOf": [false, false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf complex types", + "schema": { + "oneOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second oneOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both oneOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": false + }, + { + "description": "neither oneOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "oneOf with empty schema", + "schema": { + "oneOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "one valid - valid", + "data": "foo", + "valid": true + }, + { + "description": "both valid - invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "oneOf with required", + "schema": { + "type": "object", + "oneOf": [ + { "required": ["foo", "bar"] }, + { "required": ["foo", "baz"] } + ] + }, + "tests": [ + { + "description": "both invalid - invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "first valid - valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "second valid - valid", + "data": {"foo": 1, "baz": 3}, + "valid": true + }, + { + "description": "both valid - invalid", + "data": {"foo": 1, "bar": 2, "baz" : 3}, + "valid": false + } + ] + }, + { + "description": "oneOf with missing optional property", + "schema": { + "oneOf": [ + { + "properties": { + "bar": true, + "baz": true + }, + "required": ["bar"] + }, + { + "properties": { + "foo": true + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": {"bar": 8}, + "valid": true + }, + { + "description": "second oneOf valid", + "data": {"foo": "foo"}, + "valid": true + }, + { + "description": "both oneOf valid", + "data": {"foo": "foo", "bar": 8}, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": {"baz": "quux"}, + "valid": false + } + ] + }, + { + "description": "nested oneOf, to check validation semantics", + "schema": { + "oneOf": [ + { + "oneOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/bignum.json b/vendor/jsonschema/json/tests/draft2019-09/optional/bignum.json new file mode 100644 index 00000000..94b4a4e6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/bignum.json @@ -0,0 +1,93 @@ +[ + { + "description": "integer", + "schema": { "type": "integer" }, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + }, + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": { "type": "number" }, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + }, + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": { "type": "string" }, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "maximum integer comparison", + "schema": { "maximum": 18446744073709551615 }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "exclusiveMaximum": 972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "minimum integer comparison", + "schema": { "minimum": -18446744073709551615 }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "exclusiveMinimum": -972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/cross-draft.json b/vendor/jsonschema/json/tests/draft2019-09/optional/cross-draft.json new file mode 100644 index 00000000..efd3f87d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/cross-draft.json @@ -0,0 +1,41 @@ +[ + { + "description": "refs to future drafts are processed as future drafts", + "schema": { + "$schema": "https://json-schema.org/draft/2019-09/schema", + "type": "array", + "$ref": "http://localhost:1234/draft2020-12/prefixItems.json" + }, + "tests": [ + { + "description": "first item not a string is invalid", + "comment": "if the implementation is not processing the $ref as a 2020-12 schema, this test will fail", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "first item is a string is valid", + "data": ["a string", 1, 2, 3], + "valid": true + } + ] + }, + { + "description": "refs to historic drafts are processed as historic drafts", + "schema": { + "type": "object", + "allOf": [ + { "properties": { "foo": true } }, + { "$ref": "http://localhost:1234/draft7/ignore-dependentRequired.json" } + ] + }, + "tests": [ + { + "description": "missing bar is valid", + "comment": "if the implementation is not processing the $ref as a draft 7 schema, this test will fail", + "data": {"foo": "any value"}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/dependencies-compatibility.json b/vendor/jsonschema/json/tests/draft2019-09/optional/dependencies-compatibility.json new file mode 100644 index 00000000..6eafaf05 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/dependencies-compatibility.json @@ -0,0 +1,269 @@ +[ + { + "description": "single dependency", + "schema": {"dependencies": {"bar": ["foo"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "empty dependents", + "schema": {"dependencies": {"bar": []}}, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependents required", + "schema": {"dependencies": {"quux": ["foo", "bar"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\nbar": ["foo\rbar"], + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "CRLF", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "quoted quotes", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "CRLF missing dependent", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "quoted quotes missing dependent", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + }, + { + "description": "single schema dependency", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "boolean subschemas", + "schema": { + "dependencies": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "schema dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\tbar": {"minProperties": 4}, + "foo'bar": {"required": ["foo\"bar"]} + } + }, + "tests": [ + { + "description": "quoted tab", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "quoted quote", + "data": { + "foo'bar": {"foo\"bar": 1} + }, + "valid": false + }, + { + "description": "quoted tab invalid under dependent schema", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "quoted quote invalid under dependent schema", + "data": {"foo'bar": 1}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/ecmascript-regex.json b/vendor/jsonschema/json/tests/draft2019-09/optional/ecmascript-regex.json new file mode 100644 index 00000000..c4886aaa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/ecmascript-regex.json @@ -0,0 +1,552 @@ +[ + { + "description": "ECMA 262 regex $ does not match trailing newline", + "schema": { + "type": "string", + "pattern": "^abc$" + }, + "tests": [ + { + "description": "matches in Python, but not in ECMA 262", + "data": "abc\\n", + "valid": false + }, + { + "description": "matches", + "data": "abc", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex converts \\t to horizontal tab", + "schema": { + "type": "string", + "pattern": "^\\t$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\t", + "valid": false + }, + { + "description": "matches", + "data": "\u0009", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and upper letter", + "schema": { + "type": "string", + "pattern": "^\\cC$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cC", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and lower letter", + "schema": { + "type": "string", + "pattern": "^\\cc$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cc", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\d matches ascii digits only", + "schema": { + "type": "string", + "pattern": "^\\d$" + }, + "tests": [ + { + "description": "ASCII zero matches", + "data": "0", + "valid": true + }, + { + "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)", + "data": "߀", + "valid": false + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) does not match", + "data": "\u07c0", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\D matches everything but ascii digits", + "schema": { + "type": "string", + "pattern": "^\\D$" + }, + "tests": [ + { + "description": "ASCII zero does not match", + "data": "0", + "valid": false + }, + { + "description": "NKO DIGIT ZERO matches (unlike e.g. Python)", + "data": "߀", + "valid": true + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) matches", + "data": "\u07c0", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\w matches ascii letters only", + "schema": { + "type": "string", + "pattern": "^\\w$" + }, + "tests": [ + { + "description": "ASCII 'a' matches", + "data": "a", + "valid": true + }, + { + "description": "latin-1 e-acute does not match (unlike e.g. Python)", + "data": "é", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\W matches everything but ascii letters", + "schema": { + "type": "string", + "pattern": "^\\W$" + }, + "tests": [ + { + "description": "ASCII 'a' does not match", + "data": "a", + "valid": false + }, + { + "description": "latin-1 e-acute matches (unlike e.g. Python)", + "data": "é", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\s matches whitespace", + "schema": { + "type": "string", + "pattern": "^\\s$" + }, + "tests": [ + { + "description": "ASCII space matches", + "data": " ", + "valid": true + }, + { + "description": "Character tabulation matches", + "data": "\t", + "valid": true + }, + { + "description": "Line tabulation matches", + "data": "\u000b", + "valid": true + }, + { + "description": "Form feed matches", + "data": "\u000c", + "valid": true + }, + { + "description": "latin-1 non-breaking-space matches", + "data": "\u00a0", + "valid": true + }, + { + "description": "zero-width whitespace matches", + "data": "\ufeff", + "valid": true + }, + { + "description": "line feed matches (line terminator)", + "data": "\u000a", + "valid": true + }, + { + "description": "paragraph separator matches (line terminator)", + "data": "\u2029", + "valid": true + }, + { + "description": "EM SPACE matches (Space_Separator)", + "data": "\u2003", + "valid": true + }, + { + "description": "Non-whitespace control does not match", + "data": "\u0001", + "valid": false + }, + { + "description": "Non-whitespace does not match", + "data": "\u2013", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\S matches everything but whitespace", + "schema": { + "type": "string", + "pattern": "^\\S$" + }, + "tests": [ + { + "description": "ASCII space does not match", + "data": " ", + "valid": false + }, + { + "description": "Character tabulation does not match", + "data": "\t", + "valid": false + }, + { + "description": "Line tabulation does not match", + "data": "\u000b", + "valid": false + }, + { + "description": "Form feed does not match", + "data": "\u000c", + "valid": false + }, + { + "description": "latin-1 non-breaking-space does not match", + "data": "\u00a0", + "valid": false + }, + { + "description": "zero-width whitespace does not match", + "data": "\ufeff", + "valid": false + }, + { + "description": "line feed does not match (line terminator)", + "data": "\u000a", + "valid": false + }, + { + "description": "paragraph separator does not match (line terminator)", + "data": "\u2029", + "valid": false + }, + { + "description": "EM SPACE does not match (Space_Separator)", + "data": "\u2003", + "valid": false + }, + { + "description": "Non-whitespace control matches", + "data": "\u0001", + "valid": true + }, + { + "description": "Non-whitespace matches", + "data": "\u2013", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with pattern", + "schema": { "pattern": "\\p{Letter}cole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "\\w in patterns matches [A-Za-z0-9_], not unicode letters", + "schema": { "pattern": "\\wcole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "pattern with ASCII ranges", + "schema": { "pattern": "[a-z]cole" }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "ascii characters match", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + } + ] + }, + { + "description": "\\d in pattern matches [0-9], not unicode digits", + "schema": { "pattern": "^\\d+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": false + } + ] + }, + { + "description": "pattern with non-ASCII digits", + "schema": { "pattern": "^\\p{digit}+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "\\p{Letter}cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "\\w in patternProperties matches [A-Za-z0-9_], not unicode letters", + "schema": { + "type": "object", + "patternProperties": { + "\\wcole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with ASCII ranges", + "schema": { + "type": "object", + "patternProperties": { + "[a-z]cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "ascii characters match", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + } + ] + }, + { + "description": "\\d in patternProperties matches [0-9], not unicode digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\d+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with non-ASCII digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\p{digit}+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/float-overflow.json b/vendor/jsonschema/json/tests/draft2019-09/optional/float-overflow.json new file mode 100644 index 00000000..52ff9827 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/float-overflow.json @@ -0,0 +1,13 @@ +[ + { + "description": "all integers are multiples of 0.5, if overflow is handled", + "schema": {"type": "integer", "multipleOf": 0.5}, + "tests": [ + { + "description": "valid if optional overflow handling is implemented", + "data": 1e308, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/date-time.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/date-time.json new file mode 100644 index 00000000..09112737 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/date-time.json @@ -0,0 +1,133 @@ +[ + { + "description": "validation of date-time strings", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid date-time string without second fraction", + "data": "1963-06-19T08:30:06Z", + "valid": true + }, + { + "description": "a valid date-time string with plus offset", + "data": "1937-01-01T12:00:27.87+00:20", + "valid": true + }, + { + "description": "a valid date-time string with minus offset", + "data": "1990-12-31T15:59:50.123-08:00", + "valid": true + }, + { + "description": "a valid date-time with a leap second, UTC", + "data": "1998-12-31T23:59:60Z", + "valid": true + }, + { + "description": "a valid date-time with a leap second, with minus offset", + "data": "1998-12-31T15:59:60.123-08:00", + "valid": true + }, + { + "description": "an invalid date-time past leap second, UTC", + "data": "1998-12-31T23:59:61Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong minute, UTC", + "data": "1998-12-31T23:58:60Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong hour, UTC", + "data": "1998-12-31T22:59:60Z", + "valid": false + }, + { + "description": "an invalid day in date-time string", + "data": "1990-02-31T15:59:59.123-08:00", + "valid": false + }, + { + "description": "an invalid offset in date-time string", + "data": "1990-12-31T15:59:59-24:00", + "valid": false + }, + { + "description": "an invalid closing Z after time-zone offset", + "data": "1963-06-19T08:30:06.28123+01:00Z", + "valid": false + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "case-insensitive T and Z", + "data": "1963-06-19t08:30:06.283185z", + "valid": true + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + }, + { + "description": "invalid non-padded month dates", + "data": "1963-6-19T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-padded day dates", + "data": "1963-06-1T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in date portion", + "data": "1963-06-1৪T00:00:00Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in time portion", + "data": "1963-06-11T0৪:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/date.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/date.json new file mode 100644 index 00000000..06c9ea0f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/date.json @@ -0,0 +1,223 @@ +[ + { + "description": "validation of date strings", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date string", + "data": "1963-06-19", + "valid": true + }, + { + "description": "a valid date string with 31 days in January", + "data": "2020-01-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in January", + "data": "2020-01-32", + "valid": false + }, + { + "description": "a valid date string with 28 days in February (normal)", + "data": "2021-02-28", + "valid": true + }, + { + "description": "a invalid date string with 29 days in February (normal)", + "data": "2021-02-29", + "valid": false + }, + { + "description": "a valid date string with 29 days in February (leap)", + "data": "2020-02-29", + "valid": true + }, + { + "description": "a invalid date string with 30 days in February (leap)", + "data": "2020-02-30", + "valid": false + }, + { + "description": "a valid date string with 31 days in March", + "data": "2020-03-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in March", + "data": "2020-03-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in April", + "data": "2020-04-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in April", + "data": "2020-04-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in May", + "data": "2020-05-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in May", + "data": "2020-05-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in June", + "data": "2020-06-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in June", + "data": "2020-06-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in July", + "data": "2020-07-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in July", + "data": "2020-07-32", + "valid": false + }, + { + "description": "a valid date string with 31 days in August", + "data": "2020-08-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in August", + "data": "2020-08-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in September", + "data": "2020-09-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in September", + "data": "2020-09-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in October", + "data": "2020-10-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in October", + "data": "2020-10-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in November", + "data": "2020-11-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in November", + "data": "2020-11-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in December", + "data": "2020-12-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in December", + "data": "2020-12-32", + "valid": false + }, + { + "description": "a invalid date string with invalid month", + "data": "2020-13-01", + "valid": false + }, + { + "description": "an invalid date string", + "data": "06/19/1963", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350", + "valid": false + }, + { + "description": "non-padded month dates are not valid", + "data": "1998-1-20", + "valid": false + }, + { + "description": "non-padded day dates are not valid", + "data": "1998-01-1", + "valid": false + }, + { + "description": "invalid month", + "data": "1998-13-01", + "valid": false + }, + { + "description": "invalid month-day combination", + "data": "1998-04-31", + "valid": false + }, + { + "description": "2021 is not a leap year", + "data": "2021-02-29", + "valid": false + }, + { + "description": "2020 is a leap year", + "data": "2020-02-29", + "valid": true + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1963-06-1৪", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/duration.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/duration.json new file mode 100644 index 00000000..a6acdc1c --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/duration.json @@ -0,0 +1,128 @@ +[ + { + "description": "validation of duration strings", + "schema": { "format": "duration" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid duration string", + "data": "P4DT12H30M5S", + "valid": true + }, + { + "description": "an invalid duration string", + "data": "PT1D", + "valid": false + }, + { + "description": "no elements present", + "data": "P", + "valid": false + }, + { + "description": "no time elements present", + "data": "P1YT", + "valid": false + }, + { + "description": "no date or time elements present", + "data": "PT", + "valid": false + }, + { + "description": "elements out of order", + "data": "P2D1Y", + "valid": false + }, + { + "description": "missing time separator", + "data": "P1D2H", + "valid": false + }, + { + "description": "time element in the date position", + "data": "P2S", + "valid": false + }, + { + "description": "four years duration", + "data": "P4Y", + "valid": true + }, + { + "description": "zero time, in seconds", + "data": "PT0S", + "valid": true + }, + { + "description": "zero time, in days", + "data": "P0D", + "valid": true + }, + { + "description": "one month duration", + "data": "P1M", + "valid": true + }, + { + "description": "one minute duration", + "data": "PT1M", + "valid": true + }, + { + "description": "one and a half days, in hours", + "data": "PT36H", + "valid": true + }, + { + "description": "one and a half days, in days and hours", + "data": "P1DT12H", + "valid": true + }, + { + "description": "two weeks", + "data": "P2W", + "valid": true + }, + { + "description": "weeks cannot be combined with other units", + "data": "P1Y2W", + "valid": false + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "P২Y", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/email.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/email.json new file mode 100644 index 00000000..d6761a46 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/email.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of e-mail addresses", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "tilde in local part is valid", + "data": "te~st@example.com", + "valid": true + }, + { + "description": "tilde before local part is valid", + "data": "~test@example.com", + "valid": true + }, + { + "description": "tilde after local part is valid", + "data": "test~@example.com", + "valid": true + }, + { + "description": "dot before local part is not valid", + "data": ".test@example.com", + "valid": false + }, + { + "description": "dot after local part is not valid", + "data": "test.@example.com", + "valid": false + }, + { + "description": "two separated dots inside local part are valid", + "data": "te.s.t@example.com", + "valid": true + }, + { + "description": "two subsequent dots inside local part are not valid", + "data": "te..st@example.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/hostname.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/hostname.json new file mode 100644 index 00000000..8a67fda8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/hostname.json @@ -0,0 +1,98 @@ +[ + { + "description": "validation of host names", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a valid punycoded IDN hostname", + "data": "xn--4gbwdl.xn--wgbh1c", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + }, + { + "description": "starts with hyphen", + "data": "-hostname", + "valid": false + }, + { + "description": "ends with hyphen", + "data": "hostname-", + "valid": false + }, + { + "description": "starts with underscore", + "data": "_hostname", + "valid": false + }, + { + "description": "ends with underscore", + "data": "hostname_", + "valid": false + }, + { + "description": "contains underscore", + "data": "host_name", + "valid": false + }, + { + "description": "maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com", + "valid": true + }, + { + "description": "exceeds maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkl.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/idn-email.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/idn-email.json new file mode 100644 index 00000000..6e213745 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/idn-email.json @@ -0,0 +1,58 @@ +[ + { + "description": "validation of an internationalized e-mail addresses", + "schema": { "format": "idn-email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid idn e-mail (example@example.test in Hangul)", + "data": "실례@실례.테스트", + "valid": true + }, + { + "description": "an invalid idn e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/idn-hostname.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/idn-hostname.json new file mode 100644 index 00000000..6c8f86a3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/idn-hostname.json @@ -0,0 +1,304 @@ +[ + { + "description": "validation of internationalized host names", + "schema": { "format": "idn-hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name (example.test in Hangul)", + "data": "실례.테스트", + "valid": true + }, + { + "description": "illegal first char U+302E Hangul single dot tone mark", + "data": "〮실례.테스트", + "valid": false + }, + { + "description": "contains illegal char U+302E Hangul single dot tone mark", + "data": "실〮례.테스트", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실례례테스트례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례테스트례례실례.테스트", + "valid": false + }, + { + "description": "invalid label, correct Punycode", + "comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc3492#section-7.1", + "data": "-> $1.00 <--", + "valid": false + }, + { + "description": "valid Chinese Punycode", + "comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4", + "data": "xn--ihqwcrb4cv8a8dqg056pqjye", + "valid": true + }, + { + "description": "invalid Punycode", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc5890#section-2.3.2.1", + "data": "xn--X", + "valid": false + }, + { + "description": "U-label contains \"--\" in the 3rd and 4th position", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1 https://tools.ietf.org/html/rfc5890#section-2.3.2.1", + "data": "XN--aa---o47jg78q", + "valid": false + }, + { + "description": "U-label starts with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "-hello", + "valid": false + }, + { + "description": "U-label ends with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "hello-", + "valid": false + }, + { + "description": "U-label starts and ends with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "-hello-", + "valid": false + }, + { + "description": "Begins with a Spacing Combining Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0903hello", + "valid": false + }, + { + "description": "Begins with a Nonspacing Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0300hello", + "valid": false + }, + { + "description": "Begins with an Enclosing Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0488hello", + "valid": false + }, + { + "description": "Exceptions that are PVALID, left-to-right chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u00df\u03c2\u0f0b\u3007", + "valid": true + }, + { + "description": "Exceptions that are PVALID, right-to-left chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u06fd\u06fe", + "valid": true + }, + { + "description": "Exceptions that are DISALLOWED, right-to-left chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u0640\u07fa", + "valid": false + }, + { + "description": "Exceptions that are DISALLOWED, left-to-right chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6 Note: The two combining marks (U+302E and U+302F) are in the middle and not at the start", + "data": "\u3031\u3032\u3033\u3034\u3035\u302e\u302f\u303b", + "valid": false + }, + { + "description": "MIDDLE DOT with no preceding 'l'", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "a\u00b7l", + "valid": false + }, + { + "description": "MIDDLE DOT with nothing preceding", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "\u00b7l", + "valid": false + }, + { + "description": "MIDDLE DOT with no following 'l'", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7a", + "valid": false + }, + { + "description": "MIDDLE DOT with nothing following", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7", + "valid": false + }, + { + "description": "MIDDLE DOT with surrounding 'l's", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7l", + "valid": true + }, + { + "description": "Greek KERAIA not followed by Greek", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375S", + "valid": false + }, + { + "description": "Greek KERAIA not followed by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375", + "valid": false + }, + { + "description": "Greek KERAIA followed by Greek", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375\u03b2", + "valid": true + }, + { + "description": "Hebrew GERESH not preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "A\u05f3\u05d1", + "valid": false + }, + { + "description": "Hebrew GERESH not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "\u05f3\u05d1", + "valid": false + }, + { + "description": "Hebrew GERESH preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "\u05d0\u05f3\u05d1", + "valid": true + }, + { + "description": "Hebrew GERSHAYIM not preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "A\u05f4\u05d1", + "valid": false + }, + { + "description": "Hebrew GERSHAYIM not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "\u05f4\u05d1", + "valid": false + }, + { + "description": "Hebrew GERSHAYIM preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "\u05d0\u05f4\u05d1", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with no Hiragana, Katakana, or Han", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "def\u30fbabc", + "valid": false + }, + { + "description": "KATAKANA MIDDLE DOT with no other characters", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb", + "valid": false + }, + { + "description": "KATAKANA MIDDLE DOT with Hiragana", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u3041", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with Katakana", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u30a1", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with Han", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u4e08", + "valid": true + }, + { + "description": "Arabic-Indic digits mixed with Extended Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8", + "data": "\u0660\u06f0", + "valid": false + }, + { + "description": "Arabic-Indic digits not mixed with Extended Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8", + "data": "\u0628\u0660\u0628", + "valid": true + }, + { + "description": "Extended Arabic-Indic digits not mixed with Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.9", + "data": "\u06f00", + "valid": true + }, + { + "description": "ZERO WIDTH JOINER not preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u0915\u200d\u0937", + "valid": false + }, + { + "description": "ZERO WIDTH JOINER not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u200d\u0937", + "valid": false + }, + { + "description": "ZERO WIDTH JOINER preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u0915\u094d\u200d\u0937", + "valid": true + }, + { + "description": "ZERO WIDTH NON-JOINER preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1", + "data": "\u0915\u094d\u200c\u0937", + "valid": true + }, + { + "description": "ZERO WIDTH NON-JOINER not preceded by Virama but matches regexp", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1 https://www.w3.org/TR/alreq/#h_disjoining_enforcement", + "data": "\u0628\u064a\u200c\u0628\u064a", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/ipv4.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/ipv4.json new file mode 100644 index 00000000..4706581f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/ipv4.json @@ -0,0 +1,84 @@ +[ + { + "description": "validation of IP addresses", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + }, + { + "description": "an IP address without 4 components", + "data": "127.0", + "valid": false + }, + { + "description": "an IP address as an integer", + "data": "0x7f000001", + "valid": false + }, + { + "description": "an IP address as an integer (decimal)", + "data": "2130706433", + "valid": false + }, + { + "description": "invalid leading zeroes, as they are treated as octals", + "comment": "see https://sick.codes/universal-netmask-npm-package-used-by-270000-projects-vulnerable-to-octal-input-data-server-side-request-forgery-remote-file-inclusion-local-file-inclusion-and-more-cve-2021-28918/", + "data": "087.10.0.1", + "valid": false + }, + { + "description": "value without leading zero is valid", + "data": "87.10.0.1", + "valid": true + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২7.0.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/ipv6.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/ipv6.json new file mode 100644 index 00000000..94368f2a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/ipv6.json @@ -0,0 +1,208 @@ +[ + { + "description": "validation of IPv6 addresses", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "trailing 4 hex symbols is valid", + "data": "::abef", + "valid": true + }, + { + "description": "trailing 5 hex symbols is invalid", + "data": "::abcef", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + }, + { + "description": "no digits is valid", + "data": "::", + "valid": true + }, + { + "description": "leading colons is valid", + "data": "::42:ff:1", + "valid": true + }, + { + "description": "trailing colons is valid", + "data": "d6::", + "valid": true + }, + { + "description": "missing leading octet is invalid", + "data": ":2:3:4:5:6:7:8", + "valid": false + }, + { + "description": "missing trailing octet is invalid", + "data": "1:2:3:4:5:6:7:", + "valid": false + }, + { + "description": "missing leading octet with omitted octets later", + "data": ":2:3:4::8", + "valid": false + }, + { + "description": "single set of double colons in the middle is valid", + "data": "1:d6::42", + "valid": true + }, + { + "description": "two sets of double colons is invalid", + "data": "1::d6::42", + "valid": false + }, + { + "description": "mixed format with the ipv4 section as decimal octets", + "data": "1::d6:192.168.0.1", + "valid": true + }, + { + "description": "mixed format with double colons between the sections", + "data": "1:2::192.168.0.1", + "valid": true + }, + { + "description": "mixed format with ipv4 section with octet out of range", + "data": "1::2:192.168.256.1", + "valid": false + }, + { + "description": "mixed format with ipv4 section with a hex octet", + "data": "1::2:192.168.ff.1", + "valid": false + }, + { + "description": "mixed format with leading double colons (ipv4-mapped ipv6 address)", + "data": "::ffff:192.168.0.1", + "valid": true + }, + { + "description": "triple colons is invalid", + "data": "1:2:3:4:5:::8", + "valid": false + }, + { + "description": "8 octets", + "data": "1:2:3:4:5:6:7:8", + "valid": true + }, + { + "description": "insufficient octets without double colons", + "data": "1:2:3:4:5:6:7", + "valid": false + }, + { + "description": "no colons is invalid", + "data": "1", + "valid": false + }, + { + "description": "ipv4 is not ipv6", + "data": "127.0.0.1", + "valid": false + }, + { + "description": "ipv4 segment must have 4 octets", + "data": "1:2:3:4:1.2.3", + "valid": false + }, + { + "description": "leading whitespace is invalid", + "data": " ::1", + "valid": false + }, + { + "description": "trailing whitespace is invalid", + "data": "::1 ", + "valid": false + }, + { + "description": "netmask is not a part of ipv6 address", + "data": "fe80::/64", + "valid": false + }, + { + "description": "zone id is not a part of ipv6 address", + "data": "fe80::a%eth1", + "valid": false + }, + { + "description": "a long valid ipv6", + "data": "1000:1000:1000:1000:1000:1000:255.255.255.255", + "valid": true + }, + { + "description": "a long invalid ipv6, below length limit, first", + "data": "100:100:100:100:100:100:255.255.255.255.255", + "valid": false + }, + { + "description": "a long invalid ipv6, below length limit, second", + "data": "100:100:100:100:100:100:100:255.255.255.255", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1:2:3:4:5:6:7:৪", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in the IPv4 portion", + "data": "1:2::192.16৪.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/iri-reference.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/iri-reference.json new file mode 100644 index 00000000..c6b4c22a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/iri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of IRI References", + "schema": { "format": "iri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IRI", + "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid protocol-relative IRI Reference", + "data": "//ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid relative IRI Reference", + "data": "/âππ", + "valid": true + }, + { + "description": "an invalid IRI Reference", + "data": "\\\\WINDOWS\\filëßåré", + "valid": false + }, + { + "description": "a valid IRI Reference", + "data": "âππ", + "valid": true + }, + { + "description": "a valid IRI fragment", + "data": "#Æ’rägmênt", + "valid": true + }, + { + "description": "an invalid IRI fragment", + "data": "#Æ’räg\\mênt", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/iri.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/iri.json new file mode 100644 index 00000000..a0d12aed --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/iri.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of IRIs", + "schema": { "format": "iri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IRI with anchor tag", + "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid IRI with anchor tag and parentheses", + "data": "http://ƒøø.com/blah_(wîkïpédiÃ¥)_blah#ßité-1", + "valid": true + }, + { + "description": "a valid IRI with URL-encoded stuff", + "data": "http://ƒøø.ßår/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid IRI with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid IRI based on IPv6", + "data": "http://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]", + "valid": true + }, + { + "description": "an invalid IRI based on IPv6", + "data": "http://2001:0db8:85a3:0000:0000:8a2e:0370:7334", + "valid": false + }, + { + "description": "an invalid relative IRI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid IRI", + "data": "\\\\WINDOWS\\filëßåré", + "valid": false + }, + { + "description": "an invalid IRI though valid IRI reference", + "data": "âππ", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/json-pointer.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/json-pointer.json new file mode 100644 index 00000000..a0346b57 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/json-pointer.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of JSON-pointers (JSON String Representation)", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid JSON-pointer", + "data": "/foo/bar~0/baz~1/%a", + "valid": true + }, + { + "description": "not a valid JSON-pointer (~ not escaped)", + "data": "/foo/bar~", + "valid": false + }, + { + "description": "valid JSON-pointer with empty segment", + "data": "/foo//bar", + "valid": true + }, + { + "description": "valid JSON-pointer with the last empty segment", + "data": "/foo/bar/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #1", + "data": "", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #2", + "data": "/foo", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #3", + "data": "/foo/0", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #4", + "data": "/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #5", + "data": "/a~1b", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #6", + "data": "/c%d", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #7", + "data": "/e^f", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #8", + "data": "/g|h", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #9", + "data": "/i\\j", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #10", + "data": "/k\"l", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #11", + "data": "/ ", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #12", + "data": "/m~0n", + "valid": true + }, + { + "description": "valid JSON-pointer used adding to the last array position", + "data": "/foo/-", + "valid": true + }, + { + "description": "valid JSON-pointer (- used as object member name)", + "data": "/foo/-/bar", + "valid": true + }, + { + "description": "valid JSON-pointer (multiple escaped characters)", + "data": "/~1~0~0~1~1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #1", + "data": "/~1.1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #2", + "data": "/~0.1", + "valid": true + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #1", + "data": "#", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #2", + "data": "#/", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #3", + "data": "#a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #1", + "data": "/~0~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #2", + "data": "/~0/~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #1", + "data": "/~2", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #2", + "data": "/~-1", + "valid": false + }, + { + "description": "not a valid JSON-pointer (multiple characters not escaped)", + "data": "/~~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1", + "data": "a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2", + "data": "0", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3", + "data": "a/a", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/regex.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/regex.json new file mode 100644 index 00000000..34491770 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/regex.json @@ -0,0 +1,48 @@ +[ + { + "description": "validation of regular expressions", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid regular expression", + "data": "([abc])+\\s+$", + "valid": true + }, + { + "description": "a regular expression with unclosed parens is invalid", + "data": "^(abc]", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/relative-json-pointer.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/relative-json-pointer.json new file mode 100644 index 00000000..9309986f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/relative-json-pointer.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of Relative JSON Pointers (RJP)", + "schema": { "format": "relative-json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid upwards RJP", + "data": "1", + "valid": true + }, + { + "description": "a valid downwards RJP", + "data": "0/foo/bar", + "valid": true + }, + { + "description": "a valid up and then down RJP, with array index", + "data": "2/0/baz/1/zip", + "valid": true + }, + { + "description": "a valid RJP taking the member or index name", + "data": "0#", + "valid": true + }, + { + "description": "an invalid RJP that is a valid JSON Pointer", + "data": "/foo/bar", + "valid": false + }, + { + "description": "negative prefix", + "data": "-1/foo/bar", + "valid": false + }, + { + "description": "## is not a valid json-pointer", + "data": "0##", + "valid": false + }, + { + "description": "zero cannot be followed by other digits, plus json-pointer", + "data": "01/a", + "valid": false + }, + { + "description": "zero cannot be followed by other digits, plus octothorpe", + "data": "01#", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/time.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/time.json new file mode 100644 index 00000000..31425871 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/time.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of time strings", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid time string", + "data": "08:30:06Z", + "valid": true + }, + { + "description": "a valid time string with leap second, Zulu", + "data": "23:59:60Z", + "valid": true + }, + { + "description": "invalid leap second, Zulu (wrong hour)", + "data": "22:59:60Z", + "valid": false + }, + { + "description": "invalid leap second, Zulu (wrong minute)", + "data": "23:58:60Z", + "valid": false + }, + { + "description": "valid leap second, zero time-offset", + "data": "23:59:60+00:00", + "valid": true + }, + { + "description": "invalid leap second, zero time-offset (wrong hour)", + "data": "22:59:60+00:00", + "valid": false + }, + { + "description": "invalid leap second, zero time-offset (wrong minute)", + "data": "23:58:60+00:00", + "valid": false + }, + { + "description": "valid leap second, positive time-offset", + "data": "01:29:60+01:30", + "valid": true + }, + { + "description": "valid leap second, large positive time-offset", + "data": "23:29:60+23:30", + "valid": true + }, + { + "description": "invalid leap second, positive time-offset (wrong hour)", + "data": "23:59:60+01:00", + "valid": false + }, + { + "description": "invalid leap second, positive time-offset (wrong minute)", + "data": "23:59:60+00:30", + "valid": false + }, + { + "description": "valid leap second, negative time-offset", + "data": "15:59:60-08:00", + "valid": true + }, + { + "description": "valid leap second, large negative time-offset", + "data": "00:29:60-23:30", + "valid": true + }, + { + "description": "invalid leap second, negative time-offset (wrong hour)", + "data": "23:59:60-01:00", + "valid": false + }, + { + "description": "invalid leap second, negative time-offset (wrong minute)", + "data": "23:59:60-00:30", + "valid": false + }, + { + "description": "a valid time string with second fraction", + "data": "23:20:50.52Z", + "valid": true + }, + { + "description": "a valid time string with precise second fraction", + "data": "08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid time string with plus offset", + "data": "08:30:06+00:20", + "valid": true + }, + { + "description": "a valid time string with minus offset", + "data": "08:30:06-08:00", + "valid": true + }, + { + "description": "a valid time string with case-insensitive Z", + "data": "08:30:06z", + "valid": true + }, + { + "description": "an invalid time string with invalid hour", + "data": "24:00:00Z", + "valid": false + }, + { + "description": "an invalid time string with invalid minute", + "data": "00:60:00Z", + "valid": false + }, + { + "description": "an invalid time string with invalid second", + "data": "00:00:61Z", + "valid": false + }, + { + "description": "an invalid time string with invalid leap second (wrong hour)", + "data": "22:59:60Z", + "valid": false + }, + { + "description": "an invalid time string with invalid leap second (wrong minute)", + "data": "23:58:60Z", + "valid": false + }, + { + "description": "an invalid time string with invalid time numoffset hour", + "data": "01:02:03+24:00", + "valid": false + }, + { + "description": "an invalid time string with invalid time numoffset minute", + "data": "01:02:03+00:60", + "valid": false + }, + { + "description": "an invalid time string with invalid time with both Z and numoffset", + "data": "01:02:03Z+00:30", + "valid": false + }, + { + "description": "an invalid offset indicator", + "data": "08:30:06 PST", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "01:01:01,1111", + "valid": false + }, + { + "description": "no time offset", + "data": "12:00:00", + "valid": false + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/unknown.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/unknown.json new file mode 100644 index 00000000..12339ae5 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/unknown.json @@ -0,0 +1,43 @@ +[ + { + "description": "unknown format", + "schema": { "format": "unknown" }, + "tests": [ + { + "description": "unknown formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "unknown formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "unknown formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "unknown formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "unknown formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "unknown formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "unknown formats ignore strings", + "data": "string", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri-reference.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri-reference.json new file mode 100644 index 00000000..7cdf228d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of URI References", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid relative URI Reference", + "data": "/abc", + "valid": true + }, + { + "description": "an invalid URI Reference", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "a valid URI Reference", + "data": "abc", + "valid": true + }, + { + "description": "a valid URI fragment", + "data": "#fragment", + "valid": true + }, + { + "description": "an invalid URI fragment", + "data": "#frag\\ment", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri-template.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri-template.json new file mode 100644 index 00000000..df355c55 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri-template.json @@ -0,0 +1,58 @@ +[ + { + "description": "format: uri-template", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term}", + "valid": true + }, + { + "description": "an invalid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term", + "valid": false + }, + { + "description": "a valid uri-template without variables", + "data": "http://example.com/dictionary", + "valid": true + }, + { + "description": "a valid relative uri-template", + "data": "dictionary/{term:1}/{term}", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri.json new file mode 100644 index 00000000..792d71a0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/uri.json @@ -0,0 +1,108 @@ +[ + { + "description": "validation of URIs", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "a valid URL with anchor tag", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid URL with anchor tag and parentheses", + "data": "http://foo.com/blah_(wikipedia)_blah#cite-1", + "valid": true + }, + { + "description": "a valid URL with URL-encoded stuff", + "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid puny-coded URL ", + "data": "http://xn--nw2a.xn--j6w193g/", + "valid": true + }, + { + "description": "a valid URL with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid URL based on IPv4", + "data": "http://223.255.255.254", + "valid": true + }, + { + "description": "a valid URL with ftp scheme", + "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt", + "valid": true + }, + { + "description": "a valid URL for a simple text file", + "data": "http://www.ietf.org/rfc/rfc2396.txt", + "valid": true + }, + { + "description": "a valid URL ", + "data": "ldap://[2001:db8::7]/c=GB?objectClass?one", + "valid": true + }, + { + "description": "a valid mailto URI", + "data": "mailto:John.Doe@example.com", + "valid": true + }, + { + "description": "a valid newsgroup URI", + "data": "news:comp.infosystems.www.servers.unix", + "valid": true + }, + { + "description": "a valid tel URI", + "data": "tel:+1-816-555-1212", + "valid": true + }, + { + "description": "a valid URN", + "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2", + "valid": true + }, + { + "description": "an invalid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": false + }, + { + "description": "an invalid relative URI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + }, + { + "description": "an invalid URI with spaces", + "data": "http:// shouldfail.com", + "valid": false + }, + { + "description": "an invalid URI with spaces and missing scheme", + "data": ":// should fail", + "valid": false + }, + { + "description": "an invalid URI with comma in scheme", + "data": "bar,baz:foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/format/uuid.json b/vendor/jsonschema/json/tests/draft2019-09/optional/format/uuid.json new file mode 100644 index 00000000..e54cbc0f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/format/uuid.json @@ -0,0 +1,85 @@ +[ + { + "description": "uuid format", + "schema": { + "format": "uuid" + }, + "tests": [ + { + "description": "all upper-case", + "data": "2EB8AA08-AA98-11EA-B4AA-73B441D16380", + "valid": true + }, + { + "description": "all lower-case", + "data": "2eb8aa08-aa98-11ea-b4aa-73b441d16380", + "valid": true + }, + { + "description": "mixed case", + "data": "2eb8aa08-AA98-11ea-B4Aa-73B441D16380", + "valid": true + }, + { + "description": "all zeroes is valid", + "data": "00000000-0000-0000-0000-000000000000", + "valid": true + }, + { + "description": "wrong length", + "data": "2eb8aa08-aa98-11ea-b4aa-73b441d1638", + "valid": false + }, + { + "description": "missing section", + "data": "2eb8aa08-aa98-11ea-73b441d16380", + "valid": false + }, + { + "description": "bad characters (not hex)", + "data": "2eb8aa08-aa98-11ea-b4ga-73b441d16380", + "valid": false + }, + { + "description": "no dashes", + "data": "2eb8aa08aa9811eab4aa73b441d16380", + "valid": false + }, + { + "description": "too few dashes", + "data": "2eb8aa08aa98-11ea-b4aa73b441d16380", + "valid": false + }, + { + "description": "too many dashes", + "data": "2eb8-aa08-aa98-11ea-b4aa73b44-1d16380", + "valid": false + }, + { + "description": "dashes in the wrong spot", + "data": "2eb8aa08aa9811eab4aa73b441d16380----", + "valid": false + }, + { + "description": "valid version 4", + "data": "98d80576-482e-427f-8434-7f86890ab222", + "valid": true + }, + { + "description": "valid version 5", + "data": "99c17cbb-656f-564a-940f-1a4568f03487", + "valid": true + }, + { + "description": "hypothetical version 6", + "data": "99c17cbb-656f-664a-940f-1a4568f03487", + "valid": true + }, + { + "description": "hypothetical version 15", + "data": "99c17cbb-656f-f64a-940f-1a4568f03487", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/non-bmp-regex.json b/vendor/jsonschema/json/tests/draft2019-09/optional/non-bmp-regex.json new file mode 100644 index 00000000..dd67af2b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/non-bmp-regex.json @@ -0,0 +1,82 @@ +[ + { + "description": "Proper UTF-16 surrogate pair handling: pattern", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { "pattern": "^ðŸ²*$" }, + "tests": [ + { + "description": "matches empty", + "data": "", + "valid": true + }, + { + "description": "matches single", + "data": "ðŸ²", + "valid": true + }, + { + "description": "matches two", + "data": "ðŸ²ðŸ²", + "valid": true + }, + { + "description": "doesn't match one", + "data": "ðŸ‰", + "valid": false + }, + { + "description": "doesn't match two", + "data": "ðŸ‰ðŸ‰", + "valid": false + }, + { + "description": "doesn't match one ASCII", + "data": "D", + "valid": false + }, + { + "description": "doesn't match two ASCII", + "data": "DD", + "valid": false + } + ] + }, + { + "description": "Proper UTF-16 surrogate pair handling: patternProperties", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { + "patternProperties": { + "^ðŸ²*$": { + "type": "integer" + } + } + }, + "tests": [ + { + "description": "matches empty", + "data": { "": 1 }, + "valid": true + }, + { + "description": "matches single", + "data": { "ðŸ²": 1 }, + "valid": true + }, + { + "description": "matches two", + "data": { "ðŸ²ðŸ²": 1 }, + "valid": true + }, + { + "description": "doesn't match one", + "data": { "ðŸ²": "hello" }, + "valid": false + }, + { + "description": "doesn't match two", + "data": { "ðŸ²ðŸ²": "hello" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/optional/refOfUnknownKeyword.json b/vendor/jsonschema/json/tests/draft2019-09/optional/refOfUnknownKeyword.json new file mode 100644 index 00000000..5b150df8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/optional/refOfUnknownKeyword.json @@ -0,0 +1,44 @@ +[ + { + "description": "reference of a root arbitrary keyword ", + "schema": { + "unknown-keyword": {"type": "integer"}, + "properties": { + "bar": {"$ref": "#/unknown-keyword"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "reference of an arbitrary keyword of a sub-schema", + "schema": { + "properties": { + "foo": {"unknown-keyword": {"type": "integer"}}, + "bar": {"$ref": "#/properties/foo/unknown-keyword"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/pattern.json b/vendor/jsonschema/json/tests/draft2019-09/pattern.json new file mode 100644 index 00000000..92db0f97 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/pattern.json @@ -0,0 +1,59 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/patternProperties.json b/vendor/jsonschema/json/tests/draft2019-09/patternProperties.json new file mode 100644 index 00000000..c276e647 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/patternProperties.json @@ -0,0 +1,171 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["foo"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + }, + { + "description": "patternProperties with boolean schemas", + "schema": { + "patternProperties": { + "f.*": true, + "b.*": false + } + }, + "tests": [ + { + "description": "object with property matching schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property matching schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "object with a property matching both true and false is invalid", + "data": {"foobar":1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "patternProperties with null valued instance properties", + "schema": { + "patternProperties": { + "^.*bar$": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foobar": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/properties.json b/vendor/jsonschema/json/tests/draft2019-09/properties.json new file mode 100644 index 00000000..5b971ca0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/properties.json @@ -0,0 +1,236 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + }, + { + "description": "properties with boolean schema", + "schema": { + "properties": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "no property present is valid", + "data": {}, + "valid": true + }, + { + "description": "only 'true' property present is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "only 'false' property present is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "both properties present is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + } + ] + }, + { + "description": "properties with escaped characters", + "schema": { + "properties": { + "foo\nbar": {"type": "number"}, + "foo\"bar": {"type": "number"}, + "foo\\bar": {"type": "number"}, + "foo\rbar": {"type": "number"}, + "foo\tbar": {"type": "number"}, + "foo\fbar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with all numbers is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1", + "foo\\bar": "1", + "foo\rbar": "1", + "foo\tbar": "1", + "foo\fbar": "1" + }, + "valid": false + } + ] + }, + { + "description": "properties with null valued instance properties", + "schema": { + "properties": { + "foo": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { + "properties": { + "__proto__": {"type": "number"}, + "toString": { + "properties": { "length": { "type": "string" } } + }, + "constructor": {"type": "number"} + } + }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": true + }, + { + "description": "__proto__ not valid", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString not valid", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor not valid", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present and valid", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/propertyNames.json b/vendor/jsonschema/json/tests/draft2019-09/propertyNames.json new file mode 100644 index 00000000..f0788e64 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/propertyNames.json @@ -0,0 +1,107 @@ +[ + { + "description": "propertyNames validation", + "schema": { + "propertyNames": {"maxLength": 3} + }, + "tests": [ + { + "description": "all property names valid", + "data": { + "f": {}, + "foo": {} + }, + "valid": true + }, + { + "description": "some property names invalid", + "data": { + "foo": {}, + "foobar": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [1, 2, 3, 4], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "propertyNames validation with pattern", + "schema": { + "propertyNames": { "pattern": "^a+$" } + }, + "tests": [ + { + "description": "matching property names valid", + "data": { + "a": {}, + "aa": {}, + "aaa": {} + }, + "valid": true + }, + { + "description": "non-matching property name is invalid", + "data": { + "aaA": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema true", + "schema": {"propertyNames": true}, + "tests": [ + { + "description": "object with any properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema false", + "schema": {"propertyNames": false}, + "tests": [ + { + "description": "object with any properties is invalid", + "data": {"foo": 1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/recursiveRef.json b/vendor/jsonschema/json/tests/draft2019-09/recursiveRef.json new file mode 100644 index 00000000..ebb098c4 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/recursiveRef.json @@ -0,0 +1,399 @@ +[ + { + "description": "$recursiveRef without $recursiveAnchor works like $ref", + "schema": { + "properties": { + "foo": { "$recursiveRef": "#" } + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": { "foo": { "foo": false } }, + "valid": true + }, + { + "description": "mismatch", + "data": { "bar": false }, + "valid": false + }, + { + "description": "recursive mismatch", + "data": { "foo": { "bar": false } }, + "valid": false + } + ] + }, + { + "description": "$recursiveRef without using nesting", + "schema": { + "$id": "http://localhost:4242/recursiveRef2/schema.json", + "$defs": { + "myobject": { + "$id": "myobject.json", + "$recursiveAnchor": true, + "anyOf": [ + { "type": "string" }, + { + "type": "object", + "additionalProperties": { "$recursiveRef": "#" } + } + ] + } + }, + "anyOf": [ + { "type": "integer" }, + { "$ref": "#/$defs/myobject" } + ] + }, + "tests": [ + { + "description": "integer matches at the outer level", + "data": 1, + "valid": true + }, + { + "description": "single level match", + "data": { "foo": "hi" }, + "valid": true + }, + { + "description": "integer does not match as a property value", + "data": { "foo": 1 }, + "valid": false + }, + { + "description": "two levels, properties match with inner definition", + "data": { "foo": { "bar": "hi" } }, + "valid": true + }, + { + "description": "two levels, no match", + "data": { "foo": { "bar": 1 } }, + "valid": false + } + ] + }, + { + "description": "$recursiveRef with nesting", + "schema": { + "$id": "http://localhost:4242/recursiveRef3/schema.json", + "$recursiveAnchor": true, + "$defs": { + "myobject": { + "$id": "myobject.json", + "$recursiveAnchor": true, + "anyOf": [ + { "type": "string" }, + { + "type": "object", + "additionalProperties": { "$recursiveRef": "#" } + } + ] + } + }, + "anyOf": [ + { "type": "integer" }, + { "$ref": "#/$defs/myobject" } + ] + }, + "tests": [ + { + "description": "integer matches at the outer level", + "data": 1, + "valid": true + }, + { + "description": "single level match", + "data": { "foo": "hi" }, + "valid": true + }, + { + "description": "integer now matches as a property value", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "two levels, properties match with inner definition", + "data": { "foo": { "bar": "hi" } }, + "valid": true + }, + { + "description": "two levels, properties match with $recursiveRef", + "data": { "foo": { "bar": 1 } }, + "valid": true + } + ] + }, + { + "description": "$recursiveRef with $recursiveAnchor: false works like $ref", + "schema": { + "$id": "http://localhost:4242/recursiveRef4/schema.json", + "$recursiveAnchor": false, + "$defs": { + "myobject": { + "$id": "myobject.json", + "$recursiveAnchor": false, + "anyOf": [ + { "type": "string" }, + { + "type": "object", + "additionalProperties": { "$recursiveRef": "#" } + } + ] + } + }, + "anyOf": [ + { "type": "integer" }, + { "$ref": "#/$defs/myobject" } + ] + }, + "tests": [ + { + "description": "integer matches at the outer level", + "data": 1, + "valid": true + }, + { + "description": "single level match", + "data": { "foo": "hi" }, + "valid": true + }, + { + "description": "integer does not match as a property value", + "data": { "foo": 1 }, + "valid": false + }, + { + "description": "two levels, properties match with inner definition", + "data": { "foo": { "bar": "hi" } }, + "valid": true + }, + { + "description": "two levels, integer does not match as a property value", + "data": { "foo": { "bar": 1 } }, + "valid": false + } + ] + }, + { + "description": "$recursiveRef with no $recursiveAnchor works like $ref", + "schema": { + "$id": "http://localhost:4242/recursiveRef5/schema.json", + "$defs": { + "myobject": { + "$id": "myobject.json", + "$recursiveAnchor": false, + "anyOf": [ + { "type": "string" }, + { + "type": "object", + "additionalProperties": { "$recursiveRef": "#" } + } + ] + } + }, + "anyOf": [ + { "type": "integer" }, + { "$ref": "#/$defs/myobject" } + ] + }, + "tests": [ + { + "description": "integer matches at the outer level", + "data": 1, + "valid": true + }, + { + "description": "single level match", + "data": { "foo": "hi" }, + "valid": true + }, + { + "description": "integer does not match as a property value", + "data": { "foo": 1 }, + "valid": false + }, + { + "description": "two levels, properties match with inner definition", + "data": { "foo": { "bar": "hi" } }, + "valid": true + }, + { + "description": "two levels, integer does not match as a property value", + "data": { "foo": { "bar": 1 } }, + "valid": false + } + ] + }, + { + "description": "$recursiveRef with no $recursiveAnchor in the initial target schema resource", + "schema": { + "$id": "http://localhost:4242/recursiveRef6/base.json", + "$recursiveAnchor": true, + "anyOf": [ + { "type": "boolean" }, + { + "type": "object", + "additionalProperties": { + "$id": "http://localhost:4242/recursiveRef6/inner.json", + "$comment": "there is no $recursiveAnchor: true here, so we do NOT recurse to the base", + "anyOf": [ + { "type": "integer" }, + { "type": "object", "additionalProperties": { "$recursiveRef": "#" } } + ] + } + } + ] + }, + "tests": [ + { + "description": "leaf node does not match; no recursion", + "data": { "foo": true }, + "valid": false + }, + { + "description": "leaf node matches: recursion uses the inner schema", + "data": { "foo": { "bar": 1 } }, + "valid": true + }, + { + "description": "leaf node does not match: recursion uses the inner schema", + "data": { "foo": { "bar": true } }, + "valid": false + } + ] + }, + { + "description": "$recursiveRef with no $recursiveAnchor in the outer schema resource", + "schema": { + "$id": "http://localhost:4242/recursiveRef7/base.json", + "anyOf": [ + { "type": "boolean" }, + { + "type": "object", + "additionalProperties": { + "$id": "http://localhost:4242/recursiveRef7/inner.json", + "$recursiveAnchor": true, + "anyOf": [ + { "type": "integer" }, + { "type": "object", "additionalProperties": { "$recursiveRef": "#" } } + ] + } + } + ] + }, + "tests": [ + { + "description": "leaf node does not match; no recursion", + "data": { "foo": true }, + "valid": false + }, + { + "description": "leaf node matches: recursion only uses inner schema", + "data": { "foo": { "bar": 1 } }, + "valid": true + }, + { + "description": "leaf node does not match: recursion only uses inner schema", + "data": { "foo": { "bar": true } }, + "valid": false + } + ] + }, + { + "description": "multiple dynamic paths to the $recursiveRef keyword", + "schema": { + "$id": "recursiveRef8_main.json", + "$defs": { + "inner": { + "$id": "recursiveRef8_inner.json", + "$recursiveAnchor": true, + "title": "inner", + "additionalProperties": { + "$recursiveRef": "#" + } + } + }, + "if": { + "propertyNames": { + "pattern": "^[a-m]" + } + }, + "then": { + "title": "any type of node", + "$id": "recursiveRef8_anyLeafNode.json", + "$recursiveAnchor": true, + "$ref": "recursiveRef8_inner.json" + }, + "else": { + "title": "integer node", + "$id": "recursiveRef8_integerNode.json", + "$recursiveAnchor": true, + "type": [ "object", "integer" ], + "$ref": "recursiveRef8_inner.json" + } + }, + "tests": [ + { + "description": "recurse to anyLeafNode - floats are allowed", + "data": { "alpha": 1.1 }, + "valid": true + }, + { + "description": "recurse to integerNode - floats are not allowed", + "data": { "november": 1.1 }, + "valid": false + } + ] + }, + { + "description": "dynamic $recursiveRef destination (not predictable at schema compile time)", + "schema": { + "$id": "main.json", + "$defs": { + "inner": { + "$id": "inner.json", + "$recursiveAnchor": true, + "title": "inner", + "additionalProperties": { + "$recursiveRef": "#" + } + } + + }, + "if": { "propertyNames": { "pattern": "^[a-m]" } }, + "then": { + "title": "any type of node", + "$id": "anyLeafNode.json", + "$recursiveAnchor": true, + "$ref": "main.json#/$defs/inner" + }, + "else": { + "title": "integer node", + "$id": "integerNode.json", + "$recursiveAnchor": true, + "type": [ "object", "integer" ], + "$ref": "main.json#/$defs/inner" + } + }, + "tests": [ + { + "description": "numeric node", + "data": { "alpha": 1.1 }, + "valid": true + }, + { + "description": "integer node", + "data": { "november": 1.1 }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/ref.json b/vendor/jsonschema/json/tests/draft2019-09/ref.json new file mode 100644 index 00000000..6d1e90e0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/ref.json @@ -0,0 +1,834 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "items": [ + {"type": "integer"}, + {"$ref": "#/items/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "$defs": { + "tilde~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"} + }, + "properties": { + "tilde": {"$ref": "#/$defs/tilde~0field"}, + "slash": {"$ref": "#/$defs/slash~1field"}, + "percent": {"$ref": "#/$defs/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilde invalid", + "data": {"tilde": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilde valid", + "data": {"tilde": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "$defs": { + "a": {"type": "integer"}, + "b": {"$ref": "#/$defs/a"}, + "c": {"$ref": "#/$defs/b"} + }, + "$ref": "#/$defs/c" + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref applies alongside sibling keywords", + "schema": { + "$defs": { + "reffed": { + "type": "array" + } + }, + "properties": { + "foo": { + "$ref": "#/$defs/reffed", + "maxItems": 2 + } + } + }, + "tests": [ + { + "description": "ref valid, maxItems valid", + "data": { "foo": [] }, + "valid": true + }, + { + "description": "ref valid, maxItems invalid", + "data": { "foo": [1, 2, 3] }, + "valid": false + }, + { + "description": "ref invalid", + "data": { "foo": "string" }, + "valid": false + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": { + "$ref": "https://json-schema.org/draft/2019-09/schema" + }, + "tests": [ + { + "description": "remote ref valid", + "data": {"minLength": 1}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"minLength": -1}, + "valid": false + } + ] + }, + { + "description": "property named $ref that is not a reference", + "schema": { + "properties": { + "$ref": {"type": "string"} + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "property named $ref, containing an actual $ref", + "schema": { + "properties": { + "$ref": {"$ref": "#/$defs/is-string"} + }, + "$defs": { + "is-string": { + "type": "string" + } + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "$ref to boolean schema true", + "schema": { + "$ref": "#/$defs/bool", + "$defs": { + "bool": true + } + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "$ref to boolean schema false", + "schema": { + "$ref": "#/$defs/bool", + "$defs": { + "bool": false + } + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "Recursive references between schemas", + "schema": { + "$id": "http://localhost:1234/tree", + "description": "tree of nodes", + "type": "object", + "properties": { + "meta": {"type": "string"}, + "nodes": { + "type": "array", + "items": {"$ref": "node"} + } + }, + "required": ["meta", "nodes"], + "$defs": { + "node": { + "$id": "http://localhost:1234/node", + "description": "node", + "type": "object", + "properties": { + "value": {"type": "number"}, + "subtree": {"$ref": "tree"} + }, + "required": ["value"] + } + } + }, + "tests": [ + { + "description": "valid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 1.1}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": true + }, + { + "description": "invalid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": "string is invalid"}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": false + } + ] + }, + { + "description": "refs with quote", + "schema": { + "properties": { + "foo\"bar": {"$ref": "#/$defs/foo%22bar"} + }, + "$defs": { + "foo\"bar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with numbers is valid", + "data": { + "foo\"bar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "ref creates new scope when adjacent to keywords", + "schema": { + "$defs": { + "A": { + "unevaluatedProperties": false + } + }, + "properties": { + "prop1": { + "type": "string" + } + }, + "$ref": "#/$defs/A" + }, + "tests": [ + { + "description": "referenced subschema doesn't see annotations from properties", + "data": { + "prop1": "match" + }, + "valid": false + } + ] + }, + { + "description": "naive replacement of $ref with its destination is not correct", + "schema": { + "$defs": { + "a_string": { "type": "string" } + }, + "enum": [ + { "$ref": "#/$defs/a_string" } + ] + }, + "tests": [ + { + "description": "do not evaluate the $ref inside the enum, matching any string", + "data": "this is a string", + "valid": false + }, + { + "description": "do not evaluate the $ref inside the enum, definition exact match", + "data": { "type": "string" }, + "valid": false + }, + { + "description": "match the enum exactly", + "data": { "$ref": "#/$defs/a_string" }, + "valid": true + } + ] + }, + { + "description": "refs with relative uris and defs", + "schema": { + "$id": "http://example.com/schema-relative-uri-defs1.json", + "properties": { + "foo": { + "$id": "schema-relative-uri-defs2.json", + "$defs": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "$ref": "#/$defs/inner" + } + }, + "$ref": "schema-relative-uri-defs2.json" + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "relative refs with absolute uris and defs", + "schema": { + "$id": "http://example.com/schema-refs-absolute-uris-defs1.json", + "properties": { + "foo": { + "$id": "http://example.com/schema-refs-absolute-uris-defs2.json", + "$defs": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "$ref": "#/$defs/inner" + } + }, + "$ref": "schema-refs-absolute-uris-defs2.json" + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "$id must be resolved against nearest parent, not just immediate parent", + "schema": { + "$id": "http://example.com/a.json", + "$defs": { + "x": { + "$id": "http://example.com/b/c.json", + "not": { + "$defs": { + "y": { + "$id": "d.json", + "type": "number" + } + } + } + } + }, + "allOf": [ + { + "$ref": "http://example.com/b/d.json" + } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "non-number is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "order of evaluation: $id and $ref", + "schema": { + "$comment": "$id must be evaluated before $ref to get the proper $ref destination", + "$id": "/ref-and-id1/base.json", + "$ref": "int.json", + "$defs": { + "bigint": { + "$comment": "canonical uri: /ref-and-id1/int.json", + "$id": "int.json", + "maximum": 10 + }, + "smallint": { + "$comment": "canonical uri: /ref-and-id1-int.json", + "$id": "/ref-and-id1-int.json", + "maximum": 2 + } + } + }, + "tests": [ + { + "description": "data is valid against first definition", + "data": 5, + "valid": true + }, + { + "description": "data is invalid against first definition", + "data": 50, + "valid": false + } + ] + }, + { + "description": "order of evaluation: $id and $anchor and $ref", + "schema": { + "$comment": "$id must be evaluated before $ref to get the proper $ref destination", + "$id": "/ref-and-id2/base.json", + "$ref": "#bigint", + "$defs": { + "bigint": { + "$comment": "canonical uri: /ref-and-id2/base.json/$defs/bigint; another valid uri for this location: /ref-and-id2/base.json#bigint", + "$anchor": "bigint", + "maximum": 10 + }, + "smallint": { + "$comment": "canonical uri: /ref-and-id2#/$defs/smallint; another valid uri for this location: /ref-and-id2/#bigint", + "$id": "/ref-and-id2/", + "$anchor": "bigint", + "maximum": 2 + } + } + }, + "tests": [ + { + "description": "data is valid against first definition", + "data": 5, + "valid": true + }, + { + "description": "data is invalid against first definition", + "data": 50, + "valid": false + } + ] + }, + { + "description": "simple URN base URI with $ref via the URN", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed", + "minimum": 30, + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed"} + } + }, + "tests": [ + { + "description": "valid under the URN IDed schema", + "data": {"foo": 37}, + "valid": true + }, + { + "description": "invalid under the URN IDed schema", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "simple URN base URI with JSON pointer", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-00ff-ff00-4321feebdaed", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with NSS", + "schema": { + "$comment": "RFC 8141 §2.2", + "$id": "urn:example:1/406/47452/2", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with r-component", + "schema": { + "$comment": "RFC 8141 §2.3.1", + "$id": "urn:example:foo-bar-baz-qux?+CCResolve:cc=uk", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with q-component", + "schema": { + "$comment": "RFC 8141 §2.3.2", + "$id": "urn:example:weather?=op=map&lat=39.56&lon=-104.85&datetime=1969-07-21T02:56:15Z", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with f-component", + "schema": { + "$comment": "RFC 8141 §2.3.3, but we don't allow fragments", + "$ref": "https://json-schema.org/draft/2019-09/schema" + }, + "tests": [ + { + "description": "is invalid", + "data": {"$id": "urn:example:foo-bar-baz-qux#somepart"}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and JSON pointer ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and anchor ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed#something"} + }, + "$defs": { + "bar": { + "$anchor": "something", + "type": "string" + } + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/refRemote.json b/vendor/jsonschema/json/tests/draft2019-09/refRemote.json new file mode 100644 index 00000000..a8440396 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/refRemote.json @@ -0,0 +1,233 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas-defs.json#/$defs/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas-defs.json#/$defs/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "base URI change", + "schema": { + "$id": "http://localhost:1234/", + "items": { + "$id": "baseUriChange/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "base URI change ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "base URI change ref invalid", + "data": [["a"]], + "valid": false + } + ] + }, + { + "description": "base URI change - change folder", + "schema": { + "$id": "http://localhost:1234/scope_change_defs1.json", + "type" : "object", + "properties": {"list": {"$ref": "baseUriChangeFolder/"}}, + "$defs": { + "baz": { + "$id": "baseUriChangeFolder/", + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "base URI change - change folder in subschema", + "schema": { + "$id": "http://localhost:1234/scope_change_defs2.json", + "type" : "object", + "properties": {"list": {"$ref": "baseUriChangeFolderInSubschema/#/$defs/bar"}}, + "$defs": { + "baz": { + "$id": "baseUriChangeFolderInSubschema/", + "$defs": { + "bar": { + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "root ref in remote ref", + "schema": { + "$id": "http://localhost:1234/object", + "type": "object", + "properties": { + "name": {"$ref": "name-defs.json#/$defs/orNull"} + } + }, + "tests": [ + { + "description": "string is valid", + "data": { + "name": "foo" + }, + "valid": true + }, + { + "description": "null is valid", + "data": { + "name": null + }, + "valid": true + }, + { + "description": "object is invalid", + "data": { + "name": { + "name": null + } + }, + "valid": false + } + ] + }, + { + "description": "remote ref with ref to defs", + "schema": { + "$id": "http://localhost:1234/schema-remote-ref-ref-defs1.json", + "$ref": "ref-and-defs.json" + }, + "tests": [ + { + "description": "invalid", + "data": { + "bar": 1 + }, + "valid": false + }, + { + "description": "valid", + "data": { + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "Location-independent identifier in remote ref", + "schema": { + "$ref": "http://localhost:1234/locationIndependentIdentifier.json#/$defs/refToInteger" + }, + "tests": [ + { + "description": "integer is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "retrieved nested refs resolve relative to their URI not $id", + "schema": { + "$id": "http://localhost:1234/some-id", + "properties": { + "name": {"$ref": "nested/foo-ref-string.json"} + } + }, + "tests": [ + { + "description": "number is invalid", + "data": { + "name": {"foo": 1} + }, + "valid": false + }, + { + "description": "string is valid", + "data": { + "name": {"foo": "a"} + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/required.json b/vendor/jsonschema/json/tests/draft2019-09/required.json new file mode 100644 index 00000000..8d8087af --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/required.json @@ -0,0 +1,151 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with empty array", + "schema": { + "properties": { + "foo": {} + }, + "required": [] + }, + "tests": [ + { + "description": "property not required", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with escaped characters", + "schema": { + "required": [ + "foo\nbar", + "foo\"bar", + "foo\\bar", + "foo\rbar", + "foo\tbar", + "foo\fbar" + ] + }, + "tests": [ + { + "description": "object with all properties present is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with some properties missing is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "required properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { "required": ["__proto__", "toString", "constructor"] }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": false + }, + { + "description": "__proto__ present", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString present", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor present", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/type.json b/vendor/jsonschema/json/tests/draft2019-09/type.json new file mode 100644 index 00000000..83046470 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/type.json @@ -0,0 +1,474 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is an integer", + "data": 1.0, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not an integer, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is a number (and an integer)", + "data": 1.0, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not a number, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "a string is still a string, even if it looks like a number", + "data": "1", + "valid": true + }, + { + "description": "an empty string is still a string", + "data": "", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "zero is not a boolean", + "data": 0, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not a boolean", + "data": "", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "true is a boolean", + "data": true, + "valid": true + }, + { + "description": "false is a boolean", + "data": false, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "zero is not null", + "data": 0, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not null", + "data": "", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "true is not null", + "data": true, + "valid": false + }, + { + "description": "false is not null", + "data": false, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type as array with one item", + "schema": { + "type": ["string"] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "type: array or object", + "schema": { + "type": ["array", "object"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type: array, object or null", + "schema": { + "type": ["array", "object", "null"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/unevaluatedItems.json b/vendor/jsonschema/json/tests/draft2019-09/unevaluatedItems.json new file mode 100644 index 00000000..fa7ad7ca --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/unevaluatedItems.json @@ -0,0 +1,534 @@ +[ + { + "description": "unevaluatedItems true", + "schema": { "unevaluatedItems": true }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems false", + "schema": { "unevaluatedItems": false }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems as schema", + "schema": { "unevaluatedItems": { "type": "string" } }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with valid unevaluated items", + "data": ["foo"], + "valid": true + }, + { + "description": "with invalid unevaluated items", + "data": [42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with uniform items", + "schema": { + "items": { "type": "string" }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "unevaluatedItems doesn't apply", + "data": ["foo", "bar"], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with tuple", + "schema": { + "items": [ + { "type": "string" } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with additionalItems", + "schema": { + "items": [ + { "type": "string" } + ], + "additionalItems": true, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "unevaluatedItems doesn't apply", + "data": ["foo", 42], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with nested tuple", + "schema": { + "items": [ + { "type": "string" } + ], + "allOf": [ + { + "items": [ + true, + { "type": "number" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", 42], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", 42, true], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested items", + "schema": { + "unevaluatedItems": {"type": "boolean"}, + "anyOf": [ + { "items": {"type": "string"} }, + true + ] + }, + "tests": [ + { + "description": "with only (valid) additional items", + "data": [true, false], + "valid": true + }, + { + "description": "with no additional items", + "data": ["yes", "no"], + "valid": true + }, + { + "description": "with invalid additional item", + "data": ["yes", false], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested items and additionalItems", + "schema": { + "allOf": [ + { + "items": [ + { "type": "string" } + ], + "additionalItems": true + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no additional items", + "data": ["foo"], + "valid": true + }, + { + "description": "with additional items", + "data": ["foo", 42, true], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with nested unevaluatedItems", + "schema": { + "allOf": [ + { + "items": [ + { "type": "string" } + ] + }, + { "unevaluatedItems": true } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no additional items", + "data": ["foo"], + "valid": true + }, + { + "description": "with additional items", + "data": ["foo", 42, true], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with anyOf", + "schema": { + "items": [ + { "const": "foo" } + ], + "anyOf": [ + { + "items": [ + true, + { "const": "bar" } + ] + }, + { + "items": [ + true, + true, + { "const": "baz" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "when one schema matches and has no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "when one schema matches and has unevaluated items", + "data": ["foo", "bar", 42], + "valid": false + }, + { + "description": "when two schemas match and has no unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "when two schemas match and has unevaluated items", + "data": ["foo", "bar", "baz", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with oneOf", + "schema": { + "items": [ + { "const": "foo" } + ], + "oneOf": [ + { + "items": [ + true, + { "const": "bar" } + ] + }, + { + "items": [ + true, + { "const": "baz" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with not", + "schema": { + "items": [ + { "const": "foo" } + ], + "not": { + "not": { + "items": [ + true, + { "const": "bar" } + ] + } + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with unevaluated items", + "data": ["foo", "bar"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with if/then/else", + "schema": { + "items": [ { "const": "foo" } ], + "if": { + "items": [ + true, + { "const": "bar" } + ] + }, + "then": { + "items": [ + true, + true, + { "const": "then" } + ] + }, + "else": { + "items": [ + true, + true, + true, + { "const": "else" } + ] + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "when if matches and it has no unevaluated items", + "data": ["foo", "bar", "then"], + "valid": true + }, + { + "description": "when if matches and it has unevaluated items", + "data": ["foo", "bar", "then", "else"], + "valid": false + }, + { + "description": "when if doesn't match and it has no unevaluated items", + "data": ["foo", 42, 42, "else"], + "valid": true + }, + { + "description": "when if doesn't match and it has unevaluated items", + "data": ["foo", 42, 42, "else", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with boolean schemas", + "schema": { + "allOf": [true], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with $ref", + "schema": { + "$ref": "#/$defs/bar", + "items": [ + { "type": "string" } + ], + "unevaluatedItems": false, + "$defs": { + "bar": { + "items": [ + true, + { "type": "string" } + ] + } + } + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems can't see inside cousins", + "schema": { + "allOf": [ + { + "items": [ true ] + }, + { "unevaluatedItems": false } + ] + }, + "tests": [ + { + "description": "always fails", + "data": [ 1 ], + "valid": false + } + ] + }, + { + "description": "item is evaluated in an uncle schema to unevaluatedItems", + "schema": { + "properties": { + "foo": { + "items": [ + { "type": "string" } + ], + "unevaluatedItems": false + } + }, + "anyOf": [ + { + "properties": { + "foo": { + "items": [ + true, + { "type": "string" } + ] + } + } + } + ] + }, + "tests": [ + { + "description": "no extra items", + "data": { + "foo": [ + "test" + ] + }, + "valid": true + }, + { + "description": "uncle keyword evaluation is not significant", + "data": { + "foo": [ + "test", + "test" + ] + }, + "valid": false + } + ] + }, + { + "description": "non-array instances are valid", + "schema": {"unevaluatedItems": false}, + "tests": [ + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with null instance elements", + "schema": { + "unevaluatedItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/unevaluatedProperties.json b/vendor/jsonschema/json/tests/draft2019-09/unevaluatedProperties.json new file mode 100644 index 00000000..e39b21d2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/unevaluatedProperties.json @@ -0,0 +1,1362 @@ +[ + { + "description": "unevaluatedProperties true", + "schema": { + "type": "object", + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties schema", + "schema": { + "type": "object", + "unevaluatedProperties": { + "type": "string", + "minLength": 3 + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with valid unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with invalid unevaluated properties", + "data": { + "foo": "fo" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties false", + "schema": { + "type": "object", + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent properties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "^foo": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent additionalProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "additionalProperties": true, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with nested properties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "properties": { + "bar": { "type": "string" } + } + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with nested patternProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "patternProperties": { + "^bar": { "type": "string" } + } + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with nested additionalProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "additionalProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with nested unevaluatedProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": { + "type": "string", + "maxLength": 2 + } + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with anyOf", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "anyOf": [ + { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + }, + { + "properties": { + "baz": { "const": "baz" } + }, + "required": ["baz"] + }, + { + "properties": { + "quux": { "const": "quux" } + }, + "required": ["quux"] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when one matches and has no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when one matches and has unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "not-baz" + }, + "valid": false + }, + { + "description": "when two match and has no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": true + }, + { + "description": "when two match and has unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "quux": "not-quux" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with oneOf", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "oneOf": [ + { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + }, + { + "properties": { + "baz": { "const": "baz" } + }, + "required": ["baz"] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "quux": "quux" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with not", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "not": { + "not": { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "then": { + "properties": { + "bar": { "type": "string" } + }, + "required": ["bar"] + }, + "else": { + "properties": { + "baz": { "type": "string" } + }, + "required": ["baz"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": true + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else, then not defined", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "else": { + "properties": { + "baz": { "type": "string" } + }, + "required": ["baz"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": false + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": true + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else, else not defined", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "then": { + "properties": { + "bar": { "type": "string" } + }, + "required": ["bar"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with dependentSchemas", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "dependentSchemas": { + "foo": { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with boolean schemas", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [true], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with $ref", + "schema": { + "type": "object", + "$ref": "#/$defs/bar", + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false, + "$defs": { + "bar": { + "properties": { + "bar": { "type": "string" } + } + } + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties can't see inside cousins", + "schema": { + "allOf": [ + { + "properties": { + "foo": true + } + }, + { + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "always fails", + "data": { + "foo": 1 + }, + "valid": false + } + ] + }, + { + "description": "nested unevaluatedProperties, outer false, inner true, properties outside", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "nested unevaluatedProperties, outer false, inner true, properties inside", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "nested unevaluatedProperties, outer true, inner false, properties outside", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": false + } + ], + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "nested unevaluatedProperties, outer true, inner false, properties inside", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + } + ], + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "cousin unevaluatedProperties, true and false, true with properties", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": true + }, + { + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "cousin unevaluatedProperties, true and false, false with properties", + "schema": { + "type": "object", + "allOf": [ + { + "unevaluatedProperties": true + }, + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "property is evaluated in an uncle schema to unevaluatedProperties", + "comment": "see https://stackoverflow.com/questions/66936884/deeply-nested-unevaluatedproperties-and-their-expectations", + "schema": { + "type": "object", + "properties": { + "foo": { + "type": "object", + "properties": { + "bar": { + "type": "string" + } + }, + "unevaluatedProperties": false + } + }, + "anyOf": [ + { + "properties": { + "foo": { + "properties": { + "faz": { + "type": "string" + } + } + } + } + } + ] + }, + "tests": [ + { + "description": "no extra properties", + "data": { + "foo": { + "bar": "test" + } + }, + "valid": true + }, + { + "description": "uncle keyword evaluation is not significant", + "data": { + "foo": { + "bar": "test", + "faz": "test" + } + }, + "valid": false + } + ] + }, + { + "description": "in-place applicator siblings, allOf has unevaluated", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": true + }, + "unevaluatedProperties": false + } + ], + "anyOf": [ + { + "properties": { + "bar": true + } + } + ] + }, + "tests": [ + { + "description": "base case: both properties present", + "data": { + "foo": 1, + "bar": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, bar is missing", + "data": { + "foo": 1 + }, + "valid": true + }, + { + "description": "in place applicator siblings, foo is missing", + "data": { + "bar": 1 + }, + "valid": false + } + ] + }, + { + "description": "in-place applicator siblings, anyOf has unevaluated", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": true + } + } + ], + "anyOf": [ + { + "properties": { + "bar": true + }, + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "base case: both properties present", + "data": { + "foo": 1, + "bar": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, bar is missing", + "data": { + "foo": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, foo is missing", + "data": { + "bar": 1 + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties + single cyclic ref", + "schema": { + "type": "object", + "properties": { + "x": { "$ref": "#" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is valid", + "data": {}, + "valid": true + }, + { + "description": "Single is valid", + "data": { "x": {} }, + "valid": true + }, + { + "description": "Unevaluated on 1st level is invalid", + "data": { "x": {}, "y": {} }, + "valid": false + }, + { + "description": "Nested is valid", + "data": { "x": { "x": {} } }, + "valid": true + }, + { + "description": "Unevaluated on 2nd level is invalid", + "data": { "x": { "x": {}, "y": {} } }, + "valid": false + }, + { + "description": "Deep nested is valid", + "data": { "x": { "x": { "x": {} } } }, + "valid": true + }, + { + "description": "Unevaluated on 3rd level is invalid", + "data": { "x": { "x": { "x": {}, "y": {} } } }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties + ref inside allOf / oneOf", + "schema": { + "$defs": { + "one": { + "properties": { "a": true } + }, + "two": { + "required": ["x"], + "properties": { "x": true } + } + }, + "allOf": [ + { "$ref": "#/$defs/one" }, + { "properties": { "b": true } }, + { + "oneOf": [ + { "$ref": "#/$defs/two" }, + { + "required": ["y"], + "properties": { "y": true } + } + ] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is invalid (no x or y)", + "data": {}, + "valid": false + }, + { + "description": "a and b are invalid (no x or y)", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "x and y are invalid", + "data": { "x": 1, "y": 1 }, + "valid": false + }, + { + "description": "a and x are valid", + "data": { "a": 1, "x": 1 }, + "valid": true + }, + { + "description": "a and y are valid", + "data": { "a": 1, "y": 1 }, + "valid": true + }, + { + "description": "a and b and x are valid", + "data": { "a": 1, "b": 1, "x": 1 }, + "valid": true + }, + { + "description": "a and b and y are valid", + "data": { "a": 1, "b": 1, "y": 1 }, + "valid": true + }, + { + "description": "a and b and x and y are invalid", + "data": { "a": 1, "b": 1, "x": 1, "y": 1 }, + "valid": false + } + ] + }, + { + "description": "dynamic evalation inside nested refs", + "schema": { + "$defs": { + "one": { + "oneOf": [ + { "$ref": "#/$defs/two" }, + { "required": ["b"], "properties": { "b": true } }, + { "required": ["xx"], "patternProperties": { "x": true } }, + { "required": ["all"], "unevaluatedProperties": true } + ] + }, + "two": { + "oneOf": [ + { "required": ["c"], "properties": { "c": true } }, + { "required": ["d"], "properties": { "d": true } } + ] + } + }, + "oneOf": [ + { "$ref": "#/$defs/one" }, + { "required": ["a"], "properties": { "a": true } } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is invalid", + "data": {}, + "valid": false + }, + { + "description": "a is valid", + "data": { "a": 1 }, + "valid": true + }, + { + "description": "b is valid", + "data": { "b": 1 }, + "valid": true + }, + { + "description": "c is valid", + "data": { "c": 1 }, + "valid": true + }, + { + "description": "d is valid", + "data": { "d": 1 }, + "valid": true + }, + { + "description": "a + b is invalid", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "a + c is invalid", + "data": { "a": 1, "c": 1 }, + "valid": false + }, + { + "description": "a + d is invalid", + "data": { "a": 1, "d": 1 }, + "valid": false + }, + { + "description": "b + c is invalid", + "data": { "b": 1, "c": 1 }, + "valid": false + }, + { + "description": "b + d is invalid", + "data": { "b": 1, "d": 1 }, + "valid": false + }, + { + "description": "c + d is invalid", + "data": { "c": 1, "d": 1 }, + "valid": false + }, + { + "description": "xx is valid", + "data": { "xx": 1 }, + "valid": true + }, + { + "description": "xx + foox is valid", + "data": { "xx": 1, "foox": 1 }, + "valid": true + }, + { + "description": "xx + foo is invalid", + "data": { "xx": 1, "foo": 1 }, + "valid": false + }, + { + "description": "xx + a is invalid", + "data": { "xx": 1, "a": 1 }, + "valid": false + }, + { + "description": "xx + b is invalid", + "data": { "xx": 1, "b": 1 }, + "valid": false + }, + { + "description": "xx + c is invalid", + "data": { "xx": 1, "c": 1 }, + "valid": false + }, + { + "description": "xx + d is invalid", + "data": { "xx": 1, "d": 1 }, + "valid": false + }, + { + "description": "all is valid", + "data": { "all": 1 }, + "valid": true + }, + { + "description": "all + foo is valid", + "data": { "all": 1, "foo": 1 }, + "valid": true + }, + { + "description": "all + a is invalid", + "data": { "all": 1, "a": 1 }, + "valid": false + } + ] + }, + { + "description": "non-object instances are valid", + "schema": {"unevaluatedProperties": false}, + "tests": [ + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with null valued instance properties", + "schema": { + "unevaluatedProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null valued properties", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/uniqueItems.json b/vendor/jsonschema/json/tests/draft2019-09/uniqueItems.json new file mode 100644 index 00000000..2ccf666d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/uniqueItems.json @@ -0,0 +1,404 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "non-unique array of more than two integers is invalid", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of strings is valid", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "non-unique array of strings is invalid", + "data": ["foo", "bar", "foo"], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "non-unique array of more than two arrays is invalid", + "data": [["foo"], ["bar"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "[1] and [true] are unique", + "data": [[1], [true]], + "valid": true + }, + { + "description": "[0] and [false] are unique", + "data": [[0], [false]], + "valid": true + }, + { + "description": "nested [1] and [true] are unique", + "data": [[[1], "foo"], [[true], "foo"]], + "valid": true + }, + { + "description": "nested [0] and [false] are unique", + "data": [[[0], "foo"], [[false], "foo"]], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1, "{}"], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + }, + { + "description": "different objects are unique", + "data": [{"a": 1, "b": 2}, {"a": 2, "b": 1}], + "valid": true + }, + { + "description": "objects are non-unique despite key order", + "data": [{"a": 1, "b": 2}, {"b": 2, "a": 1}], + "valid": false + }, + { + "description": "{\"a\": false} and {\"a\": 0} are unique", + "data": [{"a": false}, {"a": 0}], + "valid": true + }, + { + "description": "{\"a\": true} and {\"a\": 1} are unique", + "data": [{"a": true}, {"a": 1}], + "valid": true + } + ] + }, + { + "description": "uniqueItems with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is not valid", + "data": [false, true, "foo", "foo"], + "valid": false + }, + { + "description": "non-unique array extended from [true, false] is not valid", + "data": [true, false, "foo", "foo"], + "valid": false + } + ] + }, + { + "description": "uniqueItems with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + }, + { + "description": "uniqueItems=false validation", + "schema": { "uniqueItems": false }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is valid", + "data": [1, 1], + "valid": true + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": true + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": true + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": true + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is valid", + "data": [["foo"], ["foo"]], + "valid": true + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are valid", + "data": [{}, [1], true, null, {}, 1], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is valid", + "data": [false, true, "foo", "foo"], + "valid": true + }, + { + "description": "non-unique array extended from [true, false] is valid", + "data": [true, false, "foo", "foo"], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/unknownKeyword.json b/vendor/jsonschema/json/tests/draft2019-09/unknownKeyword.json new file mode 100644 index 00000000..e46657d8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/unknownKeyword.json @@ -0,0 +1,56 @@ +[ + { + "description": "$id inside an unknown keyword is not a real identifier", + "comment": "the implementation must not be confused by an $id in locations we do not know how to parse", + "schema": { + "$defs": { + "id_in_unknown0": { + "not": { + "array_of_schemas": [ + { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "null" + } + ] + } + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "string" + }, + "id_in_unknown1": { + "not": { + "object_of_schemas": { + "foo": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "integer" + } + } + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/id_in_unknown0" }, + { "$ref": "#/$defs/id_in_unknown1" }, + { "$ref": "https://localhost:1234/unknownKeyword/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "type matches second anyOf, which has a real schema in it", + "data": "a string", + "valid": true + }, + { + "description": "type matches non-schema in first anyOf", + "data": null, + "valid": false + }, + { + "description": "type matches non-schema in third anyOf", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2019-09/vocabulary.json b/vendor/jsonschema/json/tests/draft2019-09/vocabulary.json new file mode 100644 index 00000000..982e673d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2019-09/vocabulary.json @@ -0,0 +1,38 @@ +[ + { + "description": "schema that uses custom metaschema with with no validation vocabulary", + "schema": { + "$id": "https://schema/using/no/validation", + "$schema": "http://localhost:1234/draft2019-09/metaschema-no-validation.json", + "properties": { + "badProperty": false, + "numberProperty": { + "minimum": 10 + } + } + }, + "tests": [ + { + "description": "applicator vocabulary still works", + "data": { + "badProperty": "this property should not exist" + }, + "valid": false + }, + { + "description": "no validation: valid number", + "data": { + "numberProperty": 20 + }, + "valid": true + }, + { + "description": "no validation: invalid number, but it still validates", + "data": { + "numberProperty": 1 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/additionalProperties.json b/vendor/jsonschema/json/tests/draft2020-12/additionalProperties.json new file mode 100644 index 00000000..0f8e1627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/additionalProperties.json @@ -0,0 +1,147 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobarbaz", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": "non-ASCII pattern with additionalProperties", + "schema": { + "patternProperties": {"^á": {}}, + "additionalProperties": false + }, + "tests": [ + { + "description": "matching the pattern is valid", + "data": {"ármányos": 2}, + "valid": true + }, + { + "description": "not matching the pattern is invalid", + "data": {"élmény": 2}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with schema", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + }, + { + "description": "additionalProperties does not look in applicators", + "schema": { + "allOf": [ + {"properties": {"foo": {}}} + ], + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "properties defined in allOf are not examined", + "data": {"foo": 1, "bar": true}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with null valued instance properties", + "schema": { + "additionalProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/allOf.json b/vendor/jsonschema/json/tests/draft2020-12/allOf.json new file mode 100644 index 00000000..ec9319e1 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/allOf.json @@ -0,0 +1,294 @@ +[ + { + "description": "allOf", + "schema": { + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all true", + "schema": {"allOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "allOf with boolean schemas, some false", + "schema": {"allOf": [true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all false", + "schema": {"allOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with one empty schema", + "schema": { + "allOf": [ + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with two empty schemas", + "schema": { + "allOf": [ + {}, + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with the first empty schema", + "schema": { + "allOf": [ + {}, + { "type": "number" } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with the last empty schema", + "schema": { + "allOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "nested allOf, to check validation semantics", + "schema": { + "allOf": [ + { + "allOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "allOf combined with anyOf, oneOf", + "schema": { + "allOf": [ { "multipleOf": 2 } ], + "anyOf": [ { "multipleOf": 3 } ], + "oneOf": [ { "multipleOf": 5 } ] + }, + "tests": [ + { + "description": "allOf: false, anyOf: false, oneOf: false", + "data": 1, + "valid": false + }, + { + "description": "allOf: false, anyOf: false, oneOf: true", + "data": 5, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: false", + "data": 3, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: true", + "data": 15, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: false", + "data": 2, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: true", + "data": 10, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: false", + "data": 6, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: true", + "data": 30, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/anchor.json b/vendor/jsonschema/json/tests/draft2020-12/anchor.json new file mode 100644 index 00000000..8a6a5eaf --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/anchor.json @@ -0,0 +1,226 @@ +[ + { + "description": "Location-independent identifier", + "schema": { + "$ref": "#foo", + "$defs": { + "A": { + "$anchor": "foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with absolute URI", + "schema": { + "$ref": "http://localhost:1234/bar#foo", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar", + "$anchor": "foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with base URI change in subschema", + "schema": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#foo", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$anchor": "foo", + "type": "integer" + } + } + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "$anchor inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an $anchor buried in the enum", + "schema": { + "$defs": { + "anchor_in_enum": { + "enum": [ + { + "$anchor": "my_anchor", + "type": "null" + } + ] + }, + "real_identifier_in_schema": { + "$anchor": "my_anchor", + "type": "string" + }, + "zzz_anchor_in_const": { + "const": { + "$anchor": "my_anchor", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/anchor_in_enum" }, + { "$ref": "#my_anchor" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "$anchor": "my_anchor", + "type": "null" + }, + "valid": true + }, + { + "description": "in implementations that strip $anchor, this may match either $def", + "data": { + "type": "null" + }, + "valid": false + }, + { + "description": "match $ref to $anchor", + "data": "a string to match #/$defs/anchor_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to $anchor", + "data": 1, + "valid": false + } + ] + }, + { + "description": "same $anchor with different base uri", + "schema": { + "$id": "http://localhost:1234/foobar", + "$defs": { + "A": { + "$id": "child1", + "allOf": [ + { + "$id": "child2", + "$anchor": "my_anchor", + "type": "number" + }, + { + "$anchor": "my_anchor", + "type": "string" + } + ] + } + }, + "$ref": "child1#my_anchor" + }, + "tests": [ + { + "description": "$ref resolves to /$defs/A/allOf/1", + "data": "a", + "valid": true + }, + { + "description": "$ref does not resolve to /$defs/A/allOf/0", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing an $anchor property", + "schema": { + "$defs": { + "const_not_anchor": { + "const": { + "$anchor": "not_a_real_anchor" + } + } + }, + "if": { + "const": "skip not_a_real_anchor" + }, + "then": true, + "else" : { + "$ref": "#/$defs/const_not_anchor" + } + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_anchor", + "valid": true + }, + { + "description": "const at const_not_anchor does not match", + "data": 1, + "valid": false + } + ] + }, + { + "description": "invalid anchors", + "comment": "Section 8.2.2", + "schema": { "$ref": "https://json-schema.org/draft/2020-12/schema" }, + "tests": [ + { + "description": "MUST start with a letter (and not #)", + "data": { "$anchor" : "#foo" }, + "valid": false + }, + { + "description": "JSON pointers are not valid", + "data": { "$anchor" : "/a/b" }, + "valid": false + }, + { + "description": "invalid with valid beginning", + "data": { "$anchor" : "foo#something" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/anyOf.json b/vendor/jsonschema/json/tests/draft2020-12/anyOf.json new file mode 100644 index 00000000..ab5eb386 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/anyOf.json @@ -0,0 +1,189 @@ +[ + { + "description": "anyOf", + "schema": { + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf with boolean schemas, all true", + "schema": {"anyOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, some true", + "schema": {"anyOf": [true, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, all false", + "schema": {"anyOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf complex types", + "schema": { + "anyOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first anyOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second anyOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both anyOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "neither anyOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "anyOf with one empty schema", + "schema": { + "anyOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is valid", + "data": 123, + "valid": true + } + ] + }, + { + "description": "nested anyOf, to check validation semantics", + "schema": { + "anyOf": [ + { + "anyOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/boolean_schema.json b/vendor/jsonschema/json/tests/draft2020-12/boolean_schema.json new file mode 100644 index 00000000..6d40f23f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/boolean_schema.json @@ -0,0 +1,104 @@ +[ + { + "description": "boolean schema 'true'", + "schema": true, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "boolean true is valid", + "data": true, + "valid": true + }, + { + "description": "boolean false is valid", + "data": false, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + }, + { + "description": "array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "boolean schema 'false'", + "schema": false, + "tests": [ + { + "description": "number is invalid", + "data": 1, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "boolean true is invalid", + "data": true, + "valid": false + }, + { + "description": "boolean false is invalid", + "data": false, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + }, + { + "description": "object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/const.json b/vendor/jsonschema/json/tests/draft2020-12/const.json new file mode 100644 index 00000000..1c2cafcc --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/const.json @@ -0,0 +1,342 @@ +[ + { + "description": "const validation", + "schema": {"const": 2}, + "tests": [ + { + "description": "same value is valid", + "data": 2, + "valid": true + }, + { + "description": "another value is invalid", + "data": 5, + "valid": false + }, + { + "description": "another type is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "const with object", + "schema": {"const": {"foo": "bar", "baz": "bax"}}, + "tests": [ + { + "description": "same object is valid", + "data": {"foo": "bar", "baz": "bax"}, + "valid": true + }, + { + "description": "same object with different property order is valid", + "data": {"baz": "bax", "foo": "bar"}, + "valid": true + }, + { + "description": "another object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "another type is invalid", + "data": [1, 2], + "valid": false + } + ] + }, + { + "description": "const with array", + "schema": {"const": [{ "foo": "bar" }]}, + "tests": [ + { + "description": "same array is valid", + "data": [{"foo": "bar"}], + "valid": true + }, + { + "description": "another array item is invalid", + "data": [2], + "valid": false + }, + { + "description": "array with additional items is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + }, + { + "description": "const with null", + "schema": {"const": null}, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "not null is invalid", + "data": 0, + "valid": false + } + ] + }, + { + "description": "const with false does not match 0", + "schema": {"const": false}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "const with true does not match 1", + "schema": {"const": true}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "const with [false] does not match [0]", + "schema": {"const": [false]}, + "tests": [ + { + "description": "[false] is valid", + "data": [false], + "valid": true + }, + { + "description": "[0] is invalid", + "data": [0], + "valid": false + }, + { + "description": "[0.0] is invalid", + "data": [0.0], + "valid": false + } + ] + }, + { + "description": "const with [true] does not match [1]", + "schema": {"const": [true]}, + "tests": [ + { + "description": "[true] is valid", + "data": [true], + "valid": true + }, + { + "description": "[1] is invalid", + "data": [1], + "valid": false + }, + { + "description": "[1.0] is invalid", + "data": [1.0], + "valid": false + } + ] + }, + { + "description": "const with {\"a\": false} does not match {\"a\": 0}", + "schema": {"const": {"a": false}}, + "tests": [ + { + "description": "{\"a\": false} is valid", + "data": {"a": false}, + "valid": true + }, + { + "description": "{\"a\": 0} is invalid", + "data": {"a": 0}, + "valid": false + }, + { + "description": "{\"a\": 0.0} is invalid", + "data": {"a": 0.0}, + "valid": false + } + ] + }, + { + "description": "const with {\"a\": true} does not match {\"a\": 1}", + "schema": {"const": {"a": true}}, + "tests": [ + { + "description": "{\"a\": true} is valid", + "data": {"a": true}, + "valid": true + }, + { + "description": "{\"a\": 1} is invalid", + "data": {"a": 1}, + "valid": false + }, + { + "description": "{\"a\": 1.0} is invalid", + "data": {"a": 1.0}, + "valid": false + } + ] + }, + { + "description": "const with 0 does not match other zero-like types", + "schema": {"const": 0}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "empty string is invalid", + "data": "", + "valid": false + } + ] + }, + { + "description": "const with 1 does not match true", + "schema": {"const": 1}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "const with -2.0 matches integer and float types", + "schema": {"const": -2.0}, + "tests": [ + { + "description": "integer -2 is valid", + "data": -2, + "valid": true + }, + { + "description": "integer 2 is invalid", + "data": 2, + "valid": false + }, + { + "description": "float -2.0 is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float 2.0 is invalid", + "data": 2.0, + "valid": false + }, + { + "description": "float -2.00001 is invalid", + "data": -2.00001, + "valid": false + } + ] + }, + { + "description": "float and integers are equal up to 64-bit representation limits", + "schema": {"const": 9007199254740992}, + "tests": [ + { + "description": "integer is valid", + "data": 9007199254740992, + "valid": true + }, + { + "description": "integer minus one is invalid", + "data": 9007199254740991, + "valid": false + }, + { + "description": "float is valid", + "data": 9007199254740992.0, + "valid": true + }, + { + "description": "float minus one is invalid", + "data": 9007199254740991.0, + "valid": false + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "const": "hello\u0000there" }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/contains.json b/vendor/jsonschema/json/tests/draft2020-12/contains.json new file mode 100644 index 00000000..2b1a5152 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/contains.json @@ -0,0 +1,165 @@ +[ + { + "description": "contains keyword validation", + "schema": { + "contains": {"minimum": 5} + }, + "tests": [ + { + "description": "array with item matching schema (5) is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with item matching schema (6) is valid", + "data": [3, 4, 6], + "valid": true + }, + { + "description": "array with two items matching schema (5, 6) is valid", + "data": [3, 4, 5, 6], + "valid": true + }, + { + "description": "array without items matching schema is invalid", + "data": [2, 3, 4], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "not array is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "contains keyword with const keyword", + "schema": { + "contains": { "const": 5 } + }, + "tests": [ + { + "description": "array with item 5 is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with two items 5 is valid", + "data": [3, 4, 5, 5], + "valid": true + }, + { + "description": "array without item 5 is invalid", + "data": [1, 2, 3, 4], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema true", + "schema": {"contains": true}, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema false", + "schema": {"contains": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "non-arrays are valid", + "data": "contains does not apply to strings", + "valid": true + } + ] + }, + { + "description": "items + contains", + "schema": { + "items": { "multipleOf": 2 }, + "contains": { "multipleOf": 3 } + }, + "tests": [ + { + "description": "matches items, does not match contains", + "data": [ 2, 4, 8 ], + "valid": false + }, + { + "description": "does not match items, matches contains", + "data": [ 3, 6, 9 ], + "valid": false + }, + { + "description": "matches both items and contains", + "data": [ 6, 12 ], + "valid": true + }, + { + "description": "matches neither items nor contains", + "data": [ 1, 5 ], + "valid": false + } + ] + }, + { + "description": "contains with false if subschema", + "schema": { + "contains": { + "if": false, + "else": true + } + }, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains with null instance elements", + "schema": { + "contains": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null items", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/content.json b/vendor/jsonschema/json/tests/draft2020-12/content.json new file mode 100644 index 00000000..44688e82 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/content.json @@ -0,0 +1,127 @@ +[ + { + "description": "validation of string-encoded content based on media type", + "schema": { + "contentMediaType": "application/json" + }, + "tests": [ + { + "description": "a valid JSON document", + "data": "{\"foo\": \"bar\"}", + "valid": true + }, + { + "description": "an invalid JSON document; validates true", + "data": "{:}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary string-encoding", + "schema": { + "contentEncoding": "base64" + }, + "tests": [ + { + "description": "a valid base64 string", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "an invalid base64 string (% is not a valid character); validates true", + "data": "eyJmb28iOi%iYmFyIn0K", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary-encoded media type documents", + "schema": { + "contentMediaType": "application/json", + "contentEncoding": "base64" + }, + "tests": [ + { + "description": "a valid base64-encoded JSON document", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "a validly-encoded invalid JSON document; validates true", + "data": "ezp9Cg==", + "valid": true + }, + { + "description": "an invalid base64 string that is valid JSON; validates true", + "data": "{}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary-encoded media type documents with schema", + "schema": { + "contentMediaType": "application/json", + "contentEncoding": "base64", + "contentSchema": { "required": ["foo"], "properties": { "foo": { "type": "string" } } } + }, + "tests": [ + { + "description": "a valid base64-encoded JSON document", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "another valid base64-encoded JSON document", + "data": "eyJib28iOiAyMCwgImZvbyI6ICJiYXoifQ==", + "valid": true + }, + { + "description": "an invalid base64-encoded JSON document; validates true", + "data": "eyJib28iOiAyMH0=", + "valid": true + }, + { + "description": "an empty object as a base64-encoded JSON document; validates true", + "data": "e30=", + "valid": true + }, + { + "description": "an empty array as a base64-encoded JSON document", + "data": "W10=", + "valid": true + }, + { + "description": "a validly-encoded invalid JSON document; validates true", + "data": "ezp9Cg==", + "valid": true + }, + { + "description": "an invalid base64 string that is valid JSON; validates true", + "data": "{}", + "valid": true + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/default.json b/vendor/jsonschema/json/tests/draft2020-12/default.json new file mode 100644 index 00000000..289a9b66 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/default.json @@ -0,0 +1,79 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "the default keyword does not do anything if the property is missing", + "schema": { + "type": "object", + "properties": { + "alpha": { + "type": "number", + "maximum": 3, + "default": 5 + } + } + }, + "tests": [ + { + "description": "an explicit property value is checked against maximum (passing)", + "data": { "alpha": 1 }, + "valid": true + }, + { + "description": "an explicit property value is checked against maximum (failing)", + "data": { "alpha": 5 }, + "valid": false + }, + { + "description": "missing properties are not filled in with the default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/defs.json b/vendor/jsonschema/json/tests/draft2020-12/defs.json new file mode 100644 index 00000000..c738be27 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/defs.json @@ -0,0 +1,20 @@ +[ + { + "description": "validate definition against metaschema", + "schema": { + "$ref": "https://json-schema.org/draft/2020-12/schema" + }, + "tests": [ + { + "description": "valid definition schema", + "data": {"$defs": {"foo": {"type": "integer"}}}, + "valid": true + }, + { + "description": "invalid definition schema", + "data": {"$defs": {"foo": {"type": 1}}}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/dependentRequired.json b/vendor/jsonschema/json/tests/draft2020-12/dependentRequired.json new file mode 100644 index 00000000..c817120d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/dependentRequired.json @@ -0,0 +1,142 @@ +[ + { + "description": "single dependency", + "schema": {"dependentRequired": {"bar": ["foo"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "empty dependents", + "schema": {"dependentRequired": {"bar": []}}, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependents required", + "schema": {"dependentRequired": {"quux": ["foo", "bar"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependentRequired": { + "foo\nbar": ["foo\rbar"], + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "CRLF", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "quoted quotes", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "CRLF missing dependent", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "quoted quotes missing dependent", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/dependentSchemas.json b/vendor/jsonschema/json/tests/draft2020-12/dependentSchemas.json new file mode 100644 index 00000000..2ba1a757 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/dependentSchemas.json @@ -0,0 +1,129 @@ +[ + { + "description": "single dependency", + "schema": { + "dependentSchemas": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "boolean subschemas", + "schema": { + "dependentSchemas": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependentSchemas": { + "foo\tbar": {"minProperties": 4}, + "foo'bar": {"required": ["foo\"bar"]} + } + }, + "tests": [ + { + "description": "quoted tab", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "quoted quote", + "data": { + "foo'bar": {"foo\"bar": 1} + }, + "valid": false + }, + { + "description": "quoted tab invalid under dependent schema", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "quoted quote invalid under dependent schema", + "data": {"foo'bar": 1}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/dynamicRef.json b/vendor/jsonschema/json/tests/draft2020-12/dynamicRef.json new file mode 100644 index 00000000..c0a40c49 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/dynamicRef.json @@ -0,0 +1,619 @@ +[ + { + "description": "A $dynamicRef to a $dynamicAnchor in the same schema resource behaves like a normal $ref to an $anchor", + "schema": { + "$id": "https://test.json-schema.org/dynamicRef-dynamicAnchor-same-schema/root", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "A $dynamicRef to an $anchor in the same schema resource behaves like a normal $ref to an $anchor", + "schema": { + "$id": "https://test.json-schema.org/dynamicRef-anchor-same-schema/root", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "foo": { + "$anchor": "items", + "type": "string" + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "A $ref to a $dynamicAnchor in the same schema resource behaves like a normal $ref to an $anchor", + "schema": { + "$id": "https://test.json-schema.org/ref-dynamicAnchor-same-schema/root", + "type": "array", + "items": { "$ref": "#items" }, + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "A $dynamicRef resolves to the first $dynamicAnchor still in scope that is encountered when the schema is evaluated", + "schema": { + "$id": "https://test.json-schema.org/typical-dynamic-resolution/root", + "$ref": "list", + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + }, + "list": { + "$id": "list", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "items": { + "$comment": "This is only needed to satisfy the bookending requirement", + "$dynamicAnchor": "items" + } + } + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "A $dynamicRef with intermediate scopes that don't include a matching $dynamicAnchor does not affect dynamic scope resolution", + "schema": { + "$id": "https://test.json-schema.org/dynamic-resolution-with-intermediate-scopes/root", + "$ref": "intermediate-scope", + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + }, + "intermediate-scope": { + "$id": "intermediate-scope", + "$ref": "list" + }, + "list": { + "$id": "list", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "items": { + "$comment": "This is only needed to satisfy the bookending requirement", + "$dynamicAnchor": "items" + } + } + } + } + }, + "tests": [ + { + "description": "An array of strings is valid", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "An array containing non-strings is invalid", + "data": ["foo", 42], + "valid": false + } + ] + }, + { + "description": "An $anchor with the same name as a $dynamicAnchor is not used for dynamic scope resolution", + "schema": { + "$id": "https://test.json-schema.org/dynamic-resolution-ignores-anchors/root", + "$ref": "list", + "$defs": { + "foo": { + "$anchor": "items", + "type": "string" + }, + "list": { + "$id": "list", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "items": { + "$comment": "This is only needed to satisfy the bookending requirement", + "$dynamicAnchor": "items" + } + } + } + } + }, + "tests": [ + { + "description": "Any array is valid", + "data": ["foo", 42], + "valid": true + } + ] + }, + { + "description": "A $dynamicRef without a matching $dynamicAnchor in the same schema resource behaves like a normal $ref to $anchor", + "schema": { + "$id": "https://test.json-schema.org/dynamic-resolution-without-bookend/root", + "$ref": "list", + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + }, + "list": { + "$id": "list", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "items": { + "$comment": "This is only needed to give the reference somewhere to resolve to when it behaves like $ref", + "$anchor": "items" + } + } + } + } + }, + "tests": [ + { + "description": "Any array is valid", + "data": ["foo", 42], + "valid": true + } + ] + }, + { + "description": "A $dynamicRef with a non-matching $dynamicAnchor in the same schema resource behaves like a normal $ref to $anchor", + "schema": { + "$id": "https://test.json-schema.org/unmatched-dynamic-anchor/root", + "$ref": "list", + "$defs": { + "foo": { + "$dynamicAnchor": "items", + "type": "string" + }, + "list": { + "$id": "list", + "type": "array", + "items": { "$dynamicRef": "#items" }, + "$defs": { + "items": { + "$comment": "This is only needed to give the reference somewhere to resolve to when it behaves like $ref", + "$anchor": "items", + "$dynamicAnchor": "foo" + } + } + } + } + }, + "tests": [ + { + "description": "Any array is valid", + "data": ["foo", 42], + "valid": true + } + ] + }, + { + "description": "A $dynamicRef that initially resolves to a schema with a matching $dynamicAnchor resolves to the first $dynamicAnchor in the dynamic scope", + "schema": { + "$id": "https://test.json-schema.org/relative-dynamic-reference/root", + "$dynamicAnchor": "meta", + "type": "object", + "properties": { + "foo": { "const": "pass" } + }, + "$ref": "extended", + "$defs": { + "extended": { + "$id": "extended", + "$dynamicAnchor": "meta", + "type": "object", + "properties": { + "bar": { "$ref": "bar" } + } + }, + "bar": { + "$id": "bar", + "type": "object", + "properties": { + "baz": { "$dynamicRef": "extended#meta" } + } + } + } + }, + "tests": [ + { + "description": "The recursive part is valid against the root", + "data": { + "foo": "pass", + "bar": { + "baz": { "foo": "pass" } + } + }, + "valid": true + }, + { + "description": "The recursive part is not valid against the root", + "data": { + "foo": "pass", + "bar": { + "baz": { "foo": "fail" } + } + }, + "valid": false + } + ] + }, + { + "description": "A $dynamicRef that initially resolves to a schema without a matching $dynamicAnchor behaves like a normal $ref to $anchor", + "schema": { + "$id": "https://test.json-schema.org/relative-dynamic-reference-without-bookend/root", + "$dynamicAnchor": "meta", + "type": "object", + "properties": { + "foo": { "const": "pass" } + }, + "$ref": "extended", + "$defs": { + "extended": { + "$id": "extended", + "$anchor": "meta", + "type": "object", + "properties": { + "bar": { "$ref": "bar" } + } + }, + "bar": { + "$id": "bar", + "type": "object", + "properties": { + "baz": { "$dynamicRef": "extended#meta" } + } + } + } + }, + "tests": [ + { + "description": "The recursive part doesn't need to validate against the root", + "data": { + "foo": "pass", + "bar": { + "baz": { "foo": "fail" } + } + }, + "valid": true + } + ] + }, + { + "description": "multiple dynamic paths to the $dynamicRef keyword", + "schema": { + "$id": "https://test.json-schema.org/dynamic-ref-with-multiple-paths/main", + "$defs": { + "inner": { + "$id": "inner", + "$dynamicAnchor": "foo", + "title": "inner", + "additionalProperties": { + "$dynamicRef": "#foo" + } + } + }, + "if": { + "propertyNames": { + "pattern": "^[a-m]" + } + }, + "then": { + "title": "any type of node", + "$id": "anyLeafNode", + "$dynamicAnchor": "foo", + "$ref": "inner" + }, + "else": { + "title": "integer node", + "$id": "integerNode", + "$dynamicAnchor": "foo", + "type": [ "object", "integer" ], + "$ref": "inner" + } + }, + "tests": [ + { + "description": "recurse to anyLeafNode - floats are allowed", + "data": { "alpha": 1.1 }, + "valid": true + }, + { + "description": "recurse to integerNode - floats are not allowed", + "data": { "november": 1.1 }, + "valid": false + } + ] + }, + { + "description": "after leaving a dynamic scope, it is not used by a $dynamicRef", + "schema": { + "$id": "https://test.json-schema.org/dynamic-ref-leaving-dynamic-scope/main", + "if": { + "$id": "first_scope", + "$defs": { + "thingy": { + "$comment": "this is first_scope#thingy", + "$dynamicAnchor": "thingy", + "type": "number" + } + } + }, + "then": { + "$id": "second_scope", + "$ref": "start", + "$defs": { + "thingy": { + "$comment": "this is second_scope#thingy, the final destination of the $dynamicRef", + "$dynamicAnchor": "thingy", + "type": "null" + } + } + }, + "$defs": { + "start": { + "$comment": "this is the landing spot from $ref", + "$id": "start", + "$dynamicRef": "inner_scope#thingy" + }, + "thingy": { + "$comment": "this is the first stop for the $dynamicRef", + "$id": "inner_scope", + "$dynamicAnchor": "thingy", + "type": "string" + } + } + }, + "tests": [ + { + "description": "string matches /$defs/thingy, but the $dynamicRef does not stop here", + "data": "a string", + "valid": false + }, + { + "description": "first_scope is not in dynamic scope for the $dynamicRef", + "data": 42, + "valid": false + }, + { + "description": "/then/$defs/thingy is the final stop for the $dynamicRef", + "data": null, + "valid": true + } + ] + }, + { + "description": "strict-tree schema, guards against misspelled properties", + "schema": { + "$id": "http://localhost:1234/strict-tree.json", + "$dynamicAnchor": "node", + + "$ref": "tree.json", + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "instance with misspelled field", + "data": { + "children": [{ + "daat": 1 + }] + }, + "valid": false + }, + { + "description": "instance with correct field", + "data": { + "children": [{ + "data": 1 + }] + }, + "valid": true + } + ] + }, + { + "description": "tests for implementation dynamic anchor and reference link", + "schema": { + "$id": "http://localhost:1234/strict-extendible.json", + "$ref": "extendible-dynamic-ref.json", + "$defs": { + "elements": { + "$dynamicAnchor": "elements", + "properties": { + "a": true + }, + "required": ["a"], + "additionalProperties": false + } + } + }, + "tests": [ + { + "description": "incorrect parent schema", + "data": { + "a": true + }, + "valid": false + }, + { + "description": "incorrect extended schema", + "data": { + "elements": [ + { "b": 1 } + ] + }, + "valid": false + }, + { + "description": "correct extended schema", + "data": { + "elements": [ + { "a": 1 } + ] + }, + "valid": true + } + ] + }, + { + "description": "$ref and $dynamicAnchor are independent of order - $defs first", + "schema": { + "$id": "http://localhost:1234/strict-extendible-allof-defs-first.json", + "allOf": [ + { + "$ref": "extendible-dynamic-ref.json" + }, + { + "$defs": { + "elements": { + "$dynamicAnchor": "elements", + "properties": { + "a": true + }, + "required": ["a"], + "additionalProperties": false + } + } + } + ] + }, + "tests": [ + { + "description": "incorrect parent schema", + "data": { + "a": true + }, + "valid": false + }, + { + "description": "incorrect extended schema", + "data": { + "elements": [ + { "b": 1 } + ] + }, + "valid": false + }, + { + "description": "correct extended schema", + "data": { + "elements": [ + { "a": 1 } + ] + }, + "valid": true + } + ] + }, + { + "description": "$ref and $dynamicAnchor are independent of order - $ref first", + "schema": { + "$id": "http://localhost:1234/strict-extendible-allof-ref-first.json", + "allOf": [ + { + "$defs": { + "elements": { + "$dynamicAnchor": "elements", + "properties": { + "a": true + }, + "required": ["a"], + "additionalProperties": false + } + } + }, + { + "$ref": "extendible-dynamic-ref.json" + } + ] + }, + "tests": [ + { + "description": "incorrect parent schema", + "data": { + "a": true + }, + "valid": false + }, + { + "description": "incorrect extended schema", + "data": { + "elements": [ + { "b": 1 } + ] + }, + "valid": false + }, + { + "description": "correct extended schema", + "data": { + "elements": [ + { "a": 1 } + ] + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/enum.json b/vendor/jsonschema/json/tests/draft2020-12/enum.json new file mode 100644 index 00000000..f085097b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/enum.json @@ -0,0 +1,236 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + }, + { + "description": "valid object matches", + "data": {"foo": 12}, + "valid": true + }, + { + "description": "extra properties in object is invalid", + "data": {"foo": 12, "boo": 42}, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum-with-null validation", + "schema": { "enum": [6, null] }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is valid", + "data": 6, + "valid": true + }, + { + "description": "something else is invalid", + "data": "test", + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "wrong foo value", + "data": {"foo":"foot", "bar":"bar"}, + "valid": false + }, + { + "description": "wrong bar value", + "data": {"foo":"foo", "bar":"bart"}, + "valid": false + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "enum with escaped characters", + "schema": { + "enum": ["foo\nbar", "foo\rbar"] + }, + "tests": [ + { + "description": "member 1 is valid", + "data": "foo\nbar", + "valid": true + }, + { + "description": "member 2 is valid", + "data": "foo\rbar", + "valid": true + }, + { + "description": "another string is invalid", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "enum with false does not match 0", + "schema": {"enum": [false]}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "enum with true does not match 1", + "schema": {"enum": [true]}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "enum with 0 does not match false", + "schema": {"enum": [0]}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + } + ] + }, + { + "description": "enum with 1 does not match true", + "schema": {"enum": [1]}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "enum": [ "hello\u0000there" ] }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/exclusiveMaximum.json b/vendor/jsonschema/json/tests/draft2020-12/exclusiveMaximum.json new file mode 100644 index 00000000..dc3cd709 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/exclusiveMaximum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMaximum validation", + "schema": { + "exclusiveMaximum": 3.0 + }, + "tests": [ + { + "description": "below the exclusiveMaximum is valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + }, + { + "description": "above the exclusiveMaximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/exclusiveMinimum.json b/vendor/jsonschema/json/tests/draft2020-12/exclusiveMinimum.json new file mode 100644 index 00000000..b38d7ece --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/exclusiveMinimum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMinimum validation", + "schema": { + "exclusiveMinimum": 1.1 + }, + "tests": [ + { + "description": "above the exclusiveMinimum is valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "below the exclusiveMinimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/format.json b/vendor/jsonschema/json/tests/draft2020-12/format.json new file mode 100644 index 00000000..a4b51d28 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/format.json @@ -0,0 +1,686 @@ +[ + { + "description": "email format", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "idn-email format", + "schema": { "format": "idn-email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "regex format", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv4 format", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv6 format", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "idn-hostname format", + "schema": { "format": "idn-hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "hostname format", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date format", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date-time format", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "time format", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "json-pointer format", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "relative-json-pointer format", + "schema": { "format": "relative-json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "iri format", + "schema": { "format": "iri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "iri-reference format", + "schema": { "format": "iri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri format", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-reference format", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-template format", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uuid format", + "schema": { "format": "uuid" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "duration format", + "schema": { "format": "duration" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/id.json b/vendor/jsonschema/json/tests/draft2020-12/id.json new file mode 100644 index 00000000..37fb4297 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/id.json @@ -0,0 +1,289 @@ +[ + { + "description": "Invalid use of fragments in location-independent $id", + "schema": { + "$ref": "https://json-schema.org/draft/2020-12/schema" + }, + "tests": [ + { + "description": "Identifier name", + "data": { + "$ref": "#foo", + "$defs": { + "A": { + "$id": "#foo", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name and no ref", + "data": { + "$defs": { + "A": { "$id": "#foo" } + } + }, + "valid": false + }, + { + "description": "Identifier path", + "data": { + "$ref": "#/a/b", + "$defs": { + "A": { + "$id": "#/a/b", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar#foo", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#foo", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier path with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar#/a/b", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#/a/b", + "type": "integer" + } + } + }, + "valid": false + }, + { + "description": "Identifier name with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#foo", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#foo", + "type": "integer" + } + } + } + } + }, + "valid": false + }, + { + "description": "Identifier path with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#/a/b", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#/a/b", + "type": "integer" + } + } + } + } + }, + "valid": false + } + ] + }, + { + "description": "Valid use of empty fragments in location-independent $id", + "comment": "These are allowed but discouraged", + "schema": { + "$ref": "https://json-schema.org/draft/2020-12/schema" + }, + "tests": [ + { + "description": "Identifier name with absolute URI", + "data": { + "$ref": "http://localhost:1234/bar", + "$defs": { + "A": { + "$id": "http://localhost:1234/bar#", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Identifier name with base URI change in subschema", + "data": { + "$id": "http://localhost:1234/root", + "$ref": "http://localhost:1234/nested.json#/$defs/B", + "$defs": { + "A": { + "$id": "nested.json", + "$defs": { + "B": { + "$id": "#", + "type": "integer" + } + } + } + } + }, + "valid": true + } + ] + }, + { + "description": "Unnormalized $ids are allowed but discouraged", + "schema": { + "$ref": "https://json-schema.org/draft/2020-12/schema" + }, + "tests": [ + { + "description": "Unnormalized identifier", + "data": { + "$ref": "http://localhost:1234/foo/baz", + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier and no ref", + "data": { + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier with empty fragment", + "data": { + "$ref": "http://localhost:1234/foo/baz", + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz#", + "type": "integer" + } + } + }, + "valid": true + }, + { + "description": "Unnormalized identifier with empty fragment and no ref", + "data": { + "$defs": { + "A": { + "$id": "http://localhost:1234/foo/bar/../baz#", + "type": "integer" + } + } + }, + "valid": true + } + ] + }, + { + "description": "$id inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an $id buried in the enum", + "schema": { + "$defs": { + "id_in_enum": { + "enum": [ + { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + ] + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "string" + }, + "zzz_id_in_const": { + "const": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/id_in_enum" }, + { "$ref": "https://localhost:1234/id/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + }, + "valid": true + }, + { + "description": "match $ref to $id", + "data": "a string to match #/$defs/id_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to $id", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing an $id property", + "schema": { + "$defs": { + "const_not_id": { + "const": { + "$id": "not_a_real_id" + } + } + }, + "if": { + "const": "skip not_a_real_id" + }, + "then": true, + "else" : { + "$ref": "#/$defs/const_not_id" + } + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_id", + "valid": true + }, + { + "description": "const at const_not_id does not match", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/if-then-else.json b/vendor/jsonschema/json/tests/draft2020-12/if-then-else.json new file mode 100644 index 00000000..284e9191 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/if-then-else.json @@ -0,0 +1,258 @@ +[ + { + "description": "ignore if without then or else", + "schema": { + "if": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone if", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone if", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore then without if", + "schema": { + "then": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone then", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone then", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore else without if", + "schema": { + "else": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone else", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone else", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "if and then without else", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid when if test fails", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if and else without then", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid when if test passes", + "data": -1, + "valid": true + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "validate against correct branch, then vs else", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "non-interference across combined schemas", + "schema": { + "allOf": [ + { + "if": { + "exclusiveMaximum": 0 + } + }, + { + "then": { + "minimum": -10 + } + }, + { + "else": { + "multipleOf": 2 + } + } + ] + }, + "tests": [ + { + "description": "valid, but would have been invalid through then", + "data": -100, + "valid": true + }, + { + "description": "valid, but would have been invalid through else", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if with boolean schema true", + "schema": { + "if": true, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema true in if always chooses the then path (valid)", + "data": "then", + "valid": true + }, + { + "description": "boolean schema true in if always chooses the then path (invalid)", + "data": "else", + "valid": false + } + ] + }, + { + "description": "if with boolean schema false", + "schema": { + "if": false, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema false in if always chooses the else path (invalid)", + "data": "then", + "valid": false + }, + { + "description": "boolean schema false in if always chooses the else path (valid)", + "data": "else", + "valid": true + } + ] + }, + { + "description": "if appears at the end when serialized (keyword processing sequence)", + "schema": { + "then": { "const": "yes" }, + "else": { "const": "other" }, + "if": { "maxLength": 4 } + }, + "tests": [ + { + "description": "yes redirects to then and passes", + "data": "yes", + "valid": true + }, + { + "description": "other redirects to else and passes", + "data": "other", + "valid": true + }, + { + "description": "no redirects to then and fails", + "data": "no", + "valid": false + }, + { + "description": "invalid redirects to else and fails", + "data": "invalid", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/infinite-loop-detection.json b/vendor/jsonschema/json/tests/draft2020-12/infinite-loop-detection.json new file mode 100644 index 00000000..9c3c3627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/infinite-loop-detection.json @@ -0,0 +1,36 @@ +[ + { + "description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop", + "schema": { + "$defs": { + "int": { "type": "integer" } + }, + "allOf": [ + { + "properties": { + "foo": { + "$ref": "#/$defs/int" + } + } + }, + { + "additionalProperties": { + "$ref": "#/$defs/int" + } + } + ] + }, + "tests": [ + { + "description": "passing case", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "failing case", + "data": { "foo": "a string" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/items.json b/vendor/jsonschema/json/tests/draft2020-12/items.json new file mode 100644 index 00000000..38ab0e0a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/items.json @@ -0,0 +1,271 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "length": 1 + }, + "valid": true + } + ] + }, + { + "description": "items with boolean schema (true)", + "schema": {"items": true}, + "tests": [ + { + "description": "any array is valid", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schema (false)", + "schema": {"items": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": [ 1, "foo", true ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items and subitems", + "schema": { + "$defs": { + "item": { + "type": "array", + "items": false, + "prefixItems": [ + { "$ref": "#/$defs/sub-item" }, + { "$ref": "#/$defs/sub-item" } + ] + }, + "sub-item": { + "type": "object", + "required": ["foo"] + } + }, + "type": "array", + "items": false, + "prefixItems": [ + { "$ref": "#/$defs/item" }, + { "$ref": "#/$defs/item" }, + { "$ref": "#/$defs/item" } + ] + }, + "tests": [ + { + "description": "valid items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": true + }, + { + "description": "too many items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "too many sub-items", + "data": [ + [ {"foo": null}, {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong item", + "data": [ + {"foo": null}, + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong sub-item", + "data": [ + [ {}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "fewer items is valid", + "data": [ + [ {"foo": null} ], + [ {"foo": null} ] + ], + "valid": true + } + ] + }, + { + "description": "nested items", + "schema": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + }, + "tests": [ + { + "description": "valid nested array", + "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": true + }, + { + "description": "nested array with invalid type", + "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": false + }, + { + "description": "not deep enough", + "data": [[[1], [2],[3]], [[4], [5], [6]]], + "valid": false + } + ] + }, + { + "description": "prefixItems with no additional items allowed", + "schema": { + "prefixItems": [{}, {}, {}], + "items": false + }, + "tests": [ + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "fewer number of items present (1)", + "data": [ 1 ], + "valid": true + }, + { + "description": "fewer number of items present (2)", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "equal number of items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "items does not look in applicators, valid case", + "schema": { + "allOf": [ + { "prefixItems": [ { "minimum": 3 } ] } + ], + "items": { "minimum": 5 } + }, + "tests": [ + { + "description": "prefixItems in allOf does not constrain items, invalid case", + "data": [ 3, 5 ], + "valid": false + }, + { + "description": "prefixItems in allOf does not constrain items, valid case", + "data": [ 5, 5 ], + "valid": true + } + ] + }, + { + "description": "prefixItems validation adjusts the starting index for items", + "schema": { + "prefixItems": [ { "type": "string" } ], + "items": { "type": "integer" } + }, + "tests": [ + { + "description": "valid items", + "data": [ "x", 2, 3 ], + "valid": true + }, + { + "description": "wrong type of second item", + "data": [ "x", "y" ], + "valid": false + } + ] + }, + { + "description": "items with null instance elements", + "schema": { + "items": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/maxContains.json b/vendor/jsonschema/json/tests/draft2020-12/maxContains.json new file mode 100644 index 00000000..61c967de --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/maxContains.json @@ -0,0 +1,98 @@ +[ + { + "description": "maxContains without contains is ignored", + "schema": { + "maxContains": 1 + }, + "tests": [ + { + "description": "one item valid against lone maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "two items still valid against lone maxContains", + "data": [ 1, 2 ], + "valid": true + } + ] + }, + { + "description": "maxContains with contains", + "schema": { + "contains": {"const": 1}, + "maxContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, valid maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "all elements match, invalid maxContains", + "data": [ 1, 1 ], + "valid": false + }, + { + "description": "some elements match, valid maxContains", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "some elements match, invalid maxContains", + "data": [ 1, 2, 1 ], + "valid": false + } + ] + }, + { + "description": "maxContains with contains, value with a decimal", + "schema": { + "contains": {"const": 1}, + "maxContains": 1.0 + }, + "tests": [ + { + "description": "one element matches, valid maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "too many elements match, invalid maxContains", + "data": [ 1, 1 ], + "valid": false + } + ] + }, + { + "description": "minContains < maxContains", + "schema": { + "contains": {"const": 1}, + "minContains": 1, + "maxContains": 3 + }, + "tests": [ + { + "description": "actual < minContains < maxContains", + "data": [ ], + "valid": false + }, + { + "description": "minContains < actual < maxContains", + "data": [ 1, 1 ], + "valid": true + }, + { + "description": "minContains < maxContains < actual", + "data": [ 1, 1, 1, 1 ], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/maxItems.json b/vendor/jsonschema/json/tests/draft2020-12/maxItems.json new file mode 100644 index 00000000..f0c36ab2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/maxItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + }, + { + "description": "maxItems validation with a decimal", + "schema": {"maxItems": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/maxLength.json b/vendor/jsonschema/json/tests/draft2020-12/maxLength.json new file mode 100644 index 00000000..748b4daa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/maxLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + }, + { + "description": "maxLength validation with a decimal", + "schema": {"maxLength": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/maxProperties.json b/vendor/jsonschema/json/tests/draft2020-12/maxProperties.json new file mode 100644 index 00000000..acec1420 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/maxProperties.json @@ -0,0 +1,70 @@ +[ + { + "description": "maxProperties validation", + "schema": {"maxProperties": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "maxProperties validation with a decimal", + "schema": {"maxProperties": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + } + ] + }, + { + "description": "maxProperties = 0 means the object is empty", + "schema": { "maxProperties": 0 }, + "tests": [ + { + "description": "no properties is valid", + "data": {}, + "valid": true + }, + { + "description": "one property is invalid", + "data": { "foo": 1 }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/maximum.json b/vendor/jsonschema/json/tests/draft2020-12/maximum.json new file mode 100644 index 00000000..6844a39e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/maximum.json @@ -0,0 +1,54 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "maximum validation with unsigned integer", + "schema": {"maximum": 300}, + "tests": [ + { + "description": "below the maximum is invalid", + "data": 299.97, + "valid": true + }, + { + "description": "boundary point integer is valid", + "data": 300, + "valid": true + }, + { + "description": "boundary point float is valid", + "data": 300.00, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 300.5, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/minContains.json b/vendor/jsonschema/json/tests/draft2020-12/minContains.json new file mode 100644 index 00000000..851e262d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/minContains.json @@ -0,0 +1,216 @@ +[ + { + "description": "minContains without contains is ignored", + "schema": { + "minContains": 1 + }, + "tests": [ + { + "description": "one item valid against lone minContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "zero items still valid against lone minContains", + "data": [], + "valid": true + } + ] + }, + { + "description": "minContains=1 with contains", + "schema": { + "contains": {"const": 1}, + "minContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "no elements match", + "data": [ 2 ], + "valid": false + }, + { + "description": "single element matches, valid minContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "some elements match, valid minContains", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "all elements match, valid minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "minContains=2 with contains", + "schema": { + "contains": {"const": 1}, + "minContains": 2 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "some elements match, invalid minContains", + "data": [ 1, 2 ], + "valid": false + }, + { + "description": "all elements match, valid minContains (exactly as needed)", + "data": [ 1, 1 ], + "valid": true + }, + { + "description": "all elements match, valid minContains (more than needed)", + "data": [ 1, 1, 1 ], + "valid": true + }, + { + "description": "some elements match, valid minContains", + "data": [ 1, 2, 1 ], + "valid": true + } + ] + }, + { + "description": "minContains=2 with contains with a decimal value", + "schema": { + "contains": {"const": 1}, + "minContains": 2.0 + }, + "tests": [ + { + "description": "one element matches, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "both elements match, valid minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "maxContains = minContains", + "schema": { + "contains": {"const": 1}, + "maxContains": 2, + "minContains": 2 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "all elements match, invalid maxContains", + "data": [ 1, 1, 1 ], + "valid": false + }, + { + "description": "all elements match, valid maxContains and minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "maxContains < minContains", + "schema": { + "contains": {"const": 1}, + "maxContains": 1, + "minContains": 3 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "invalid maxContains", + "data": [ 1, 1, 1 ], + "valid": false + }, + { + "description": "invalid maxContains and minContains", + "data": [ 1, 1 ], + "valid": false + } + ] + }, + { + "description": "minContains = 0", + "schema": { + "contains": {"const": 1}, + "minContains": 0 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": true + }, + { + "description": "minContains = 0 makes contains always pass", + "data": [ 2 ], + "valid": true + } + ] + }, + { + "description": "minContains = 0 with maxContains", + "schema": { + "contains": {"const": 1}, + "minContains": 0, + "maxContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": true + }, + { + "description": "not more than maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "too many", + "data": [ 1, 1 ], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/minItems.json b/vendor/jsonschema/json/tests/draft2020-12/minItems.json new file mode 100644 index 00000000..d3b18720 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/minItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + }, + { + "description": "minItems validation with a decimal", + "schema": {"minItems": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/minLength.json b/vendor/jsonschema/json/tests/draft2020-12/minLength.json new file mode 100644 index 00000000..64db9480 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/minLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + }, + { + "description": "minLength validation with a decimal", + "schema": {"minLength": 2.0}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/minProperties.json b/vendor/jsonschema/json/tests/draft2020-12/minProperties.json new file mode 100644 index 00000000..9f74f789 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/minProperties.json @@ -0,0 +1,54 @@ +[ + { + "description": "minProperties validation", + "schema": {"minProperties": 1}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "minProperties validation with a decimal", + "schema": {"minProperties": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/minimum.json b/vendor/jsonschema/json/tests/draft2020-12/minimum.json new file mode 100644 index 00000000..21ae50e0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/minimum.json @@ -0,0 +1,69 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "minimum validation with signed integer", + "schema": {"minimum": -2}, + "tests": [ + { + "description": "negative above the minimum is valid", + "data": -1, + "valid": true + }, + { + "description": "positive above the minimum is valid", + "data": 0, + "valid": true + }, + { + "description": "boundary point is valid", + "data": -2, + "valid": true + }, + { + "description": "boundary point with float is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float below the minimum is invalid", + "data": -2.0001, + "valid": false + }, + { + "description": "int below the minimum is invalid", + "data": -3, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/multipleOf.json b/vendor/jsonschema/json/tests/draft2020-12/multipleOf.json new file mode 100644 index 00000000..25c25a91 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/multipleOf.json @@ -0,0 +1,71 @@ +[ + { + "description": "by int", + "schema": {"multipleOf": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"multipleOf": 1.5}, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"multipleOf": 0.0001}, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + }, + { + "description": "float division = inf", + "schema": {"type": "integer", "multipleOf": 0.123456789}, + "tests": [ + { + "description": "always invalid, but naive implementations may raise an overflow error", + "data": 1e308, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/not.json b/vendor/jsonschema/json/tests/draft2020-12/not.json new file mode 100644 index 00000000..98de0eda --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/not.json @@ -0,0 +1,117 @@ +[ + { + "description": "not", + "schema": { + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + }, + { + "description": "not with boolean schema true", + "schema": {"not": true}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "not with boolean schema false", + "schema": {"not": false}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/oneOf.json b/vendor/jsonschema/json/tests/draft2020-12/oneOf.json new file mode 100644 index 00000000..eeb7ae86 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/oneOf.json @@ -0,0 +1,274 @@ +[ + { + "description": "oneOf", + "schema": { + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all true", + "schema": {"oneOf": [true, true, true]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, one true", + "schema": {"oneOf": [true, false, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "oneOf with boolean schemas, more than one true", + "schema": {"oneOf": [true, true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all false", + "schema": {"oneOf": [false, false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf complex types", + "schema": { + "oneOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second oneOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both oneOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": false + }, + { + "description": "neither oneOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "oneOf with empty schema", + "schema": { + "oneOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "one valid - valid", + "data": "foo", + "valid": true + }, + { + "description": "both valid - invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "oneOf with required", + "schema": { + "type": "object", + "oneOf": [ + { "required": ["foo", "bar"] }, + { "required": ["foo", "baz"] } + ] + }, + "tests": [ + { + "description": "both invalid - invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "first valid - valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "second valid - valid", + "data": {"foo": 1, "baz": 3}, + "valid": true + }, + { + "description": "both valid - invalid", + "data": {"foo": 1, "bar": 2, "baz" : 3}, + "valid": false + } + ] + }, + { + "description": "oneOf with missing optional property", + "schema": { + "oneOf": [ + { + "properties": { + "bar": true, + "baz": true + }, + "required": ["bar"] + }, + { + "properties": { + "foo": true + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": {"bar": 8}, + "valid": true + }, + { + "description": "second oneOf valid", + "data": {"foo": "foo"}, + "valid": true + }, + { + "description": "both oneOf valid", + "data": {"foo": "foo", "bar": 8}, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": {"baz": "quux"}, + "valid": false + } + ] + }, + { + "description": "nested oneOf, to check validation semantics", + "schema": { + "oneOf": [ + { + "oneOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/bignum.json b/vendor/jsonschema/json/tests/draft2020-12/optional/bignum.json new file mode 100644 index 00000000..94b4a4e6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/bignum.json @@ -0,0 +1,93 @@ +[ + { + "description": "integer", + "schema": { "type": "integer" }, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + }, + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": { "type": "number" }, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + }, + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": { "type": "string" }, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "maximum integer comparison", + "schema": { "maximum": 18446744073709551615 }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "exclusiveMaximum": 972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "minimum integer comparison", + "schema": { "minimum": -18446744073709551615 }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "exclusiveMinimum": -972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/cross-draft.json b/vendor/jsonschema/json/tests/draft2020-12/optional/cross-draft.json new file mode 100644 index 00000000..5113bd64 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/cross-draft.json @@ -0,0 +1,18 @@ +[ + { + "description": "refs to historic drafts are processed as historic drafts", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "array", + "$ref": "http://localhost:1234/draft2019-09/ignore-prefixItems.json" + }, + "tests": [ + { + "description": "first item not a string is valid", + "comment": "if the implementation is not processing the $ref as a 2019-09 schema, this test will fail", + "data": [1, 2, 3], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/dependencies-compatibility.json b/vendor/jsonschema/json/tests/draft2020-12/optional/dependencies-compatibility.json new file mode 100644 index 00000000..6eafaf05 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/dependencies-compatibility.json @@ -0,0 +1,269 @@ +[ + { + "description": "single dependency", + "schema": {"dependencies": {"bar": ["foo"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "empty dependents", + "schema": {"dependencies": {"bar": []}}, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependents required", + "schema": {"dependencies": {"quux": ["foo", "bar"]}}, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\nbar": ["foo\rbar"], + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "CRLF", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "quoted quotes", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "CRLF missing dependent", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "quoted quotes missing dependent", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + }, + { + "description": "single schema dependency", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "boolean subschemas", + "schema": { + "dependencies": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "schema dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\tbar": {"minProperties": 4}, + "foo'bar": {"required": ["foo\"bar"]} + } + }, + "tests": [ + { + "description": "quoted tab", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "quoted quote", + "data": { + "foo'bar": {"foo\"bar": 1} + }, + "valid": false + }, + { + "description": "quoted tab invalid under dependent schema", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "quoted quote invalid under dependent schema", + "data": {"foo'bar": 1}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/ecmascript-regex.json b/vendor/jsonschema/json/tests/draft2020-12/optional/ecmascript-regex.json new file mode 100644 index 00000000..3ab9aafd --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/ecmascript-regex.json @@ -0,0 +1,563 @@ +[ + { + "description": "ECMA 262 regex $ does not match trailing newline", + "schema": { + "type": "string", + "pattern": "^abc$" + }, + "tests": [ + { + "description": "matches in Python, but not in ECMA 262", + "data": "abc\\n", + "valid": false + }, + { + "description": "matches", + "data": "abc", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex converts \\t to horizontal tab", + "schema": { + "type": "string", + "pattern": "^\\t$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\t", + "valid": false + }, + { + "description": "matches", + "data": "\u0009", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and upper letter", + "schema": { + "type": "string", + "pattern": "^\\cC$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cC", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and lower letter", + "schema": { + "type": "string", + "pattern": "^\\cc$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cc", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\d matches ascii digits only", + "schema": { + "type": "string", + "pattern": "^\\d$" + }, + "tests": [ + { + "description": "ASCII zero matches", + "data": "0", + "valid": true + }, + { + "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)", + "data": "߀", + "valid": false + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) does not match", + "data": "\u07c0", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\D matches everything but ascii digits", + "schema": { + "type": "string", + "pattern": "^\\D$" + }, + "tests": [ + { + "description": "ASCII zero does not match", + "data": "0", + "valid": false + }, + { + "description": "NKO DIGIT ZERO matches (unlike e.g. Python)", + "data": "߀", + "valid": true + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) matches", + "data": "\u07c0", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\w matches ascii letters only", + "schema": { + "type": "string", + "pattern": "^\\w$" + }, + "tests": [ + { + "description": "ASCII 'a' matches", + "data": "a", + "valid": true + }, + { + "description": "latin-1 e-acute does not match (unlike e.g. Python)", + "data": "é", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\W matches everything but ascii letters", + "schema": { + "type": "string", + "pattern": "^\\W$" + }, + "tests": [ + { + "description": "ASCII 'a' does not match", + "data": "a", + "valid": false + }, + { + "description": "latin-1 e-acute matches (unlike e.g. Python)", + "data": "é", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\s matches whitespace", + "schema": { + "type": "string", + "pattern": "^\\s$" + }, + "tests": [ + { + "description": "ASCII space matches", + "data": " ", + "valid": true + }, + { + "description": "Character tabulation matches", + "data": "\t", + "valid": true + }, + { + "description": "Line tabulation matches", + "data": "\u000b", + "valid": true + }, + { + "description": "Form feed matches", + "data": "\u000c", + "valid": true + }, + { + "description": "latin-1 non-breaking-space matches", + "data": "\u00a0", + "valid": true + }, + { + "description": "zero-width whitespace matches", + "data": "\ufeff", + "valid": true + }, + { + "description": "line feed matches (line terminator)", + "data": "\u000a", + "valid": true + }, + { + "description": "paragraph separator matches (line terminator)", + "data": "\u2029", + "valid": true + }, + { + "description": "EM SPACE matches (Space_Separator)", + "data": "\u2003", + "valid": true + }, + { + "description": "Non-whitespace control does not match", + "data": "\u0001", + "valid": false + }, + { + "description": "Non-whitespace does not match", + "data": "\u2013", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\S matches everything but whitespace", + "schema": { + "type": "string", + "pattern": "^\\S$" + }, + "tests": [ + { + "description": "ASCII space does not match", + "data": " ", + "valid": false + }, + { + "description": "Character tabulation does not match", + "data": "\t", + "valid": false + }, + { + "description": "Line tabulation does not match", + "data": "\u000b", + "valid": false + }, + { + "description": "Form feed does not match", + "data": "\u000c", + "valid": false + }, + { + "description": "latin-1 non-breaking-space does not match", + "data": "\u00a0", + "valid": false + }, + { + "description": "zero-width whitespace does not match", + "data": "\ufeff", + "valid": false + }, + { + "description": "line feed does not match (line terminator)", + "data": "\u000a", + "valid": false + }, + { + "description": "paragraph separator does not match (line terminator)", + "data": "\u2029", + "valid": false + }, + { + "description": "EM SPACE does not match (Space_Separator)", + "data": "\u2003", + "valid": false + }, + { + "description": "Non-whitespace control matches", + "data": "\u0001", + "valid": true + }, + { + "description": "Non-whitespace matches", + "data": "\u2013", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with pattern", + "schema": { "pattern": "\\p{Letter}cole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "\\w in patterns matches [A-Za-z0-9_], not unicode letters", + "schema": { "pattern": "\\wcole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "pattern with ASCII ranges", + "schema": { "pattern": "[a-z]cole" }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "ascii characters match", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + } + ] + }, + { + "description": "\\d in pattern matches [0-9], not unicode digits", + "schema": { "pattern": "^\\d+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": false + } + ] + }, + { + "description": "\\a is not an ECMA 262 control escape", + "schema": { "$ref": "https://json-schema.org/draft/2020-12/schema" }, + "tests": [ + { + "description": "when used as a pattern", + "data": { "pattern": "\\a" }, + "valid": false + } + ] + }, + { + "description": "pattern with non-ASCII digits", + "schema": { "pattern": "^\\p{digit}+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "\\p{Letter}cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "\\w in patternProperties matches [A-Za-z0-9_], not unicode letters", + "schema": { + "type": "object", + "patternProperties": { + "\\wcole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with ASCII ranges", + "schema": { + "type": "object", + "patternProperties": { + "[a-z]cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "ascii characters match", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + } + ] + }, + { + "description": "\\d in patternProperties matches [0-9], not unicode digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\d+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with non-ASCII digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\p{digit}+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/float-overflow.json b/vendor/jsonschema/json/tests/draft2020-12/optional/float-overflow.json new file mode 100644 index 00000000..52ff9827 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/float-overflow.json @@ -0,0 +1,13 @@ +[ + { + "description": "all integers are multiples of 0.5, if overflow is handled", + "schema": {"type": "integer", "multipleOf": 0.5}, + "tests": [ + { + "description": "valid if optional overflow handling is implemented", + "data": 1e308, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format-assertion.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format-assertion.json new file mode 100644 index 00000000..03400370 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format-assertion.json @@ -0,0 +1,42 @@ +[ + { + "description": "schema that uses custom metaschema with format-assertion: false", + "schema": { + "$id": "https://schema/using/format-assertion/false", + "$schema": "http://localhost:1234/draft2020-12/format-assertion-false.json", + "format": "ipv4" + }, + "tests": [ + { + "description": "format-assertion: false: valid string", + "data": "127.0.0.1", + "valid": true + }, + { + "description": "format-assertion: false: invalid string", + "data": "not-an-ipv4", + "valid": false + } + ] + }, + { + "description": "schema that uses custom metaschema with format-assertion: true", + "schema": { + "$id": "https://schema/using/format-assertion/true", + "$schema": "http://localhost:1234/draft2020-12/format-assertion-true.json", + "format": "ipv4" + }, + "tests": [ + { + "description": "format-assertion: true: valid string", + "data": "127.0.0.1", + "valid": true + }, + { + "description": "format-assertion: true: invalid string", + "data": "not-an-ipv4", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/date-time.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/date-time.json new file mode 100644 index 00000000..09112737 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/date-time.json @@ -0,0 +1,133 @@ +[ + { + "description": "validation of date-time strings", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid date-time string without second fraction", + "data": "1963-06-19T08:30:06Z", + "valid": true + }, + { + "description": "a valid date-time string with plus offset", + "data": "1937-01-01T12:00:27.87+00:20", + "valid": true + }, + { + "description": "a valid date-time string with minus offset", + "data": "1990-12-31T15:59:50.123-08:00", + "valid": true + }, + { + "description": "a valid date-time with a leap second, UTC", + "data": "1998-12-31T23:59:60Z", + "valid": true + }, + { + "description": "a valid date-time with a leap second, with minus offset", + "data": "1998-12-31T15:59:60.123-08:00", + "valid": true + }, + { + "description": "an invalid date-time past leap second, UTC", + "data": "1998-12-31T23:59:61Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong minute, UTC", + "data": "1998-12-31T23:58:60Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong hour, UTC", + "data": "1998-12-31T22:59:60Z", + "valid": false + }, + { + "description": "an invalid day in date-time string", + "data": "1990-02-31T15:59:59.123-08:00", + "valid": false + }, + { + "description": "an invalid offset in date-time string", + "data": "1990-12-31T15:59:59-24:00", + "valid": false + }, + { + "description": "an invalid closing Z after time-zone offset", + "data": "1963-06-19T08:30:06.28123+01:00Z", + "valid": false + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "case-insensitive T and Z", + "data": "1963-06-19t08:30:06.283185z", + "valid": true + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + }, + { + "description": "invalid non-padded month dates", + "data": "1963-6-19T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-padded day dates", + "data": "1963-06-1T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in date portion", + "data": "1963-06-1৪T00:00:00Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in time portion", + "data": "1963-06-11T0৪:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/date.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/date.json new file mode 100644 index 00000000..06c9ea0f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/date.json @@ -0,0 +1,223 @@ +[ + { + "description": "validation of date strings", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date string", + "data": "1963-06-19", + "valid": true + }, + { + "description": "a valid date string with 31 days in January", + "data": "2020-01-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in January", + "data": "2020-01-32", + "valid": false + }, + { + "description": "a valid date string with 28 days in February (normal)", + "data": "2021-02-28", + "valid": true + }, + { + "description": "a invalid date string with 29 days in February (normal)", + "data": "2021-02-29", + "valid": false + }, + { + "description": "a valid date string with 29 days in February (leap)", + "data": "2020-02-29", + "valid": true + }, + { + "description": "a invalid date string with 30 days in February (leap)", + "data": "2020-02-30", + "valid": false + }, + { + "description": "a valid date string with 31 days in March", + "data": "2020-03-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in March", + "data": "2020-03-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in April", + "data": "2020-04-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in April", + "data": "2020-04-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in May", + "data": "2020-05-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in May", + "data": "2020-05-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in June", + "data": "2020-06-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in June", + "data": "2020-06-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in July", + "data": "2020-07-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in July", + "data": "2020-07-32", + "valid": false + }, + { + "description": "a valid date string with 31 days in August", + "data": "2020-08-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in August", + "data": "2020-08-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in September", + "data": "2020-09-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in September", + "data": "2020-09-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in October", + "data": "2020-10-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in October", + "data": "2020-10-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in November", + "data": "2020-11-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in November", + "data": "2020-11-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in December", + "data": "2020-12-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in December", + "data": "2020-12-32", + "valid": false + }, + { + "description": "a invalid date string with invalid month", + "data": "2020-13-01", + "valid": false + }, + { + "description": "an invalid date string", + "data": "06/19/1963", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350", + "valid": false + }, + { + "description": "non-padded month dates are not valid", + "data": "1998-1-20", + "valid": false + }, + { + "description": "non-padded day dates are not valid", + "data": "1998-01-1", + "valid": false + }, + { + "description": "invalid month", + "data": "1998-13-01", + "valid": false + }, + { + "description": "invalid month-day combination", + "data": "1998-04-31", + "valid": false + }, + { + "description": "2021 is not a leap year", + "data": "2021-02-29", + "valid": false + }, + { + "description": "2020 is a leap year", + "data": "2020-02-29", + "valid": true + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1963-06-1৪", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/duration.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/duration.json new file mode 100644 index 00000000..a6acdc1c --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/duration.json @@ -0,0 +1,128 @@ +[ + { + "description": "validation of duration strings", + "schema": { "format": "duration" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid duration string", + "data": "P4DT12H30M5S", + "valid": true + }, + { + "description": "an invalid duration string", + "data": "PT1D", + "valid": false + }, + { + "description": "no elements present", + "data": "P", + "valid": false + }, + { + "description": "no time elements present", + "data": "P1YT", + "valid": false + }, + { + "description": "no date or time elements present", + "data": "PT", + "valid": false + }, + { + "description": "elements out of order", + "data": "P2D1Y", + "valid": false + }, + { + "description": "missing time separator", + "data": "P1D2H", + "valid": false + }, + { + "description": "time element in the date position", + "data": "P2S", + "valid": false + }, + { + "description": "four years duration", + "data": "P4Y", + "valid": true + }, + { + "description": "zero time, in seconds", + "data": "PT0S", + "valid": true + }, + { + "description": "zero time, in days", + "data": "P0D", + "valid": true + }, + { + "description": "one month duration", + "data": "P1M", + "valid": true + }, + { + "description": "one minute duration", + "data": "PT1M", + "valid": true + }, + { + "description": "one and a half days, in hours", + "data": "PT36H", + "valid": true + }, + { + "description": "one and a half days, in days and hours", + "data": "P1DT12H", + "valid": true + }, + { + "description": "two weeks", + "data": "P2W", + "valid": true + }, + { + "description": "weeks cannot be combined with other units", + "data": "P1Y2W", + "valid": false + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "P২Y", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/email.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/email.json new file mode 100644 index 00000000..5ce1c708 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/email.json @@ -0,0 +1,118 @@ +[ + { + "description": "validation of e-mail addresses", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "tilde in local part is valid", + "data": "te~st@example.com", + "valid": true + }, + { + "description": "tilde before local part is valid", + "data": "~test@example.com", + "valid": true + }, + { + "description": "tilde after local part is valid", + "data": "test~@example.com", + "valid": true + }, + { + "description": "a quoted string with a space in the local part is valid", + "data": "\"joe bloggs\"@example.com", + "valid": true + }, + { + "description": "a quoted string with a double dot in the local part is valid", + "data": "\"joe..bloggs\"@example.com", + "valid": true + }, + { + "description": "a quoted string with a @ in the local part is valid", + "data": "\"joe@bloggs\"@example.com", + "valid": true + }, + { + "description": "an IPv4-address-literal after the @ is valid", + "data": "joe.bloggs@[127.0.0.1]", + "valid": true + }, + { + "description": "an IPv6-address-literal after the @ is valid", + "data": "joe.bloggs@[IPv6:::1]", + "valid": true + }, + { + "description": "dot before local part is not valid", + "data": ".test@example.com", + "valid": false + }, + { + "description": "dot after local part is not valid", + "data": "test.@example.com", + "valid": false + }, + { + "description": "two separated dots inside local part are valid", + "data": "te.s.t@example.com", + "valid": true + }, + { + "description": "two subsequent dots inside local part are not valid", + "data": "te..st@example.com", + "valid": false + }, + { + "description": "an invalid domain", + "data": "joe.bloggs@invalid=domain.com", + "valid": false + }, + { + "description": "an invalid IPv4-address-literal", + "data": "joe.bloggs@[127.0.0.300]", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/hostname.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/hostname.json new file mode 100644 index 00000000..8a67fda8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/hostname.json @@ -0,0 +1,98 @@ +[ + { + "description": "validation of host names", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a valid punycoded IDN hostname", + "data": "xn--4gbwdl.xn--wgbh1c", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + }, + { + "description": "starts with hyphen", + "data": "-hostname", + "valid": false + }, + { + "description": "ends with hyphen", + "data": "hostname-", + "valid": false + }, + { + "description": "starts with underscore", + "data": "_hostname", + "valid": false + }, + { + "description": "ends with underscore", + "data": "hostname_", + "valid": false + }, + { + "description": "contains underscore", + "data": "host_name", + "valid": false + }, + { + "description": "maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com", + "valid": true + }, + { + "description": "exceeds maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkl.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/idn-email.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/idn-email.json new file mode 100644 index 00000000..6e213745 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/idn-email.json @@ -0,0 +1,58 @@ +[ + { + "description": "validation of an internationalized e-mail addresses", + "schema": { "format": "idn-email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid idn e-mail (example@example.test in Hangul)", + "data": "실례@실례.테스트", + "valid": true + }, + { + "description": "an invalid idn e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/idn-hostname.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/idn-hostname.json new file mode 100644 index 00000000..6c8f86a3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/idn-hostname.json @@ -0,0 +1,304 @@ +[ + { + "description": "validation of internationalized host names", + "schema": { "format": "idn-hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name (example.test in Hangul)", + "data": "실례.테스트", + "valid": true + }, + { + "description": "illegal first char U+302E Hangul single dot tone mark", + "data": "〮실례.테스트", + "valid": false + }, + { + "description": "contains illegal char U+302E Hangul single dot tone mark", + "data": "실〮례.테스트", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실례례테스트례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례테스트례례실례.테스트", + "valid": false + }, + { + "description": "invalid label, correct Punycode", + "comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc3492#section-7.1", + "data": "-> $1.00 <--", + "valid": false + }, + { + "description": "valid Chinese Punycode", + "comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4", + "data": "xn--ihqwcrb4cv8a8dqg056pqjye", + "valid": true + }, + { + "description": "invalid Punycode", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc5890#section-2.3.2.1", + "data": "xn--X", + "valid": false + }, + { + "description": "U-label contains \"--\" in the 3rd and 4th position", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1 https://tools.ietf.org/html/rfc5890#section-2.3.2.1", + "data": "XN--aa---o47jg78q", + "valid": false + }, + { + "description": "U-label starts with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "-hello", + "valid": false + }, + { + "description": "U-label ends with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "hello-", + "valid": false + }, + { + "description": "U-label starts and ends with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "-hello-", + "valid": false + }, + { + "description": "Begins with a Spacing Combining Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0903hello", + "valid": false + }, + { + "description": "Begins with a Nonspacing Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0300hello", + "valid": false + }, + { + "description": "Begins with an Enclosing Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0488hello", + "valid": false + }, + { + "description": "Exceptions that are PVALID, left-to-right chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u00df\u03c2\u0f0b\u3007", + "valid": true + }, + { + "description": "Exceptions that are PVALID, right-to-left chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u06fd\u06fe", + "valid": true + }, + { + "description": "Exceptions that are DISALLOWED, right-to-left chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u0640\u07fa", + "valid": false + }, + { + "description": "Exceptions that are DISALLOWED, left-to-right chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6 Note: The two combining marks (U+302E and U+302F) are in the middle and not at the start", + "data": "\u3031\u3032\u3033\u3034\u3035\u302e\u302f\u303b", + "valid": false + }, + { + "description": "MIDDLE DOT with no preceding 'l'", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "a\u00b7l", + "valid": false + }, + { + "description": "MIDDLE DOT with nothing preceding", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "\u00b7l", + "valid": false + }, + { + "description": "MIDDLE DOT with no following 'l'", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7a", + "valid": false + }, + { + "description": "MIDDLE DOT with nothing following", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7", + "valid": false + }, + { + "description": "MIDDLE DOT with surrounding 'l's", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7l", + "valid": true + }, + { + "description": "Greek KERAIA not followed by Greek", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375S", + "valid": false + }, + { + "description": "Greek KERAIA not followed by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375", + "valid": false + }, + { + "description": "Greek KERAIA followed by Greek", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375\u03b2", + "valid": true + }, + { + "description": "Hebrew GERESH not preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "A\u05f3\u05d1", + "valid": false + }, + { + "description": "Hebrew GERESH not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "\u05f3\u05d1", + "valid": false + }, + { + "description": "Hebrew GERESH preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "\u05d0\u05f3\u05d1", + "valid": true + }, + { + "description": "Hebrew GERSHAYIM not preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "A\u05f4\u05d1", + "valid": false + }, + { + "description": "Hebrew GERSHAYIM not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "\u05f4\u05d1", + "valid": false + }, + { + "description": "Hebrew GERSHAYIM preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "\u05d0\u05f4\u05d1", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with no Hiragana, Katakana, or Han", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "def\u30fbabc", + "valid": false + }, + { + "description": "KATAKANA MIDDLE DOT with no other characters", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb", + "valid": false + }, + { + "description": "KATAKANA MIDDLE DOT with Hiragana", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u3041", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with Katakana", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u30a1", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with Han", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u4e08", + "valid": true + }, + { + "description": "Arabic-Indic digits mixed with Extended Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8", + "data": "\u0660\u06f0", + "valid": false + }, + { + "description": "Arabic-Indic digits not mixed with Extended Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8", + "data": "\u0628\u0660\u0628", + "valid": true + }, + { + "description": "Extended Arabic-Indic digits not mixed with Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.9", + "data": "\u06f00", + "valid": true + }, + { + "description": "ZERO WIDTH JOINER not preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u0915\u200d\u0937", + "valid": false + }, + { + "description": "ZERO WIDTH JOINER not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u200d\u0937", + "valid": false + }, + { + "description": "ZERO WIDTH JOINER preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u0915\u094d\u200d\u0937", + "valid": true + }, + { + "description": "ZERO WIDTH NON-JOINER preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1", + "data": "\u0915\u094d\u200c\u0937", + "valid": true + }, + { + "description": "ZERO WIDTH NON-JOINER not preceded by Virama but matches regexp", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1 https://www.w3.org/TR/alreq/#h_disjoining_enforcement", + "data": "\u0628\u064a\u200c\u0628\u064a", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/ipv4.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/ipv4.json new file mode 100644 index 00000000..4706581f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/ipv4.json @@ -0,0 +1,84 @@ +[ + { + "description": "validation of IP addresses", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + }, + { + "description": "an IP address without 4 components", + "data": "127.0", + "valid": false + }, + { + "description": "an IP address as an integer", + "data": "0x7f000001", + "valid": false + }, + { + "description": "an IP address as an integer (decimal)", + "data": "2130706433", + "valid": false + }, + { + "description": "invalid leading zeroes, as they are treated as octals", + "comment": "see https://sick.codes/universal-netmask-npm-package-used-by-270000-projects-vulnerable-to-octal-input-data-server-side-request-forgery-remote-file-inclusion-local-file-inclusion-and-more-cve-2021-28918/", + "data": "087.10.0.1", + "valid": false + }, + { + "description": "value without leading zero is valid", + "data": "87.10.0.1", + "valid": true + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২7.0.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/ipv6.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/ipv6.json new file mode 100644 index 00000000..94368f2a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/ipv6.json @@ -0,0 +1,208 @@ +[ + { + "description": "validation of IPv6 addresses", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "trailing 4 hex symbols is valid", + "data": "::abef", + "valid": true + }, + { + "description": "trailing 5 hex symbols is invalid", + "data": "::abcef", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + }, + { + "description": "no digits is valid", + "data": "::", + "valid": true + }, + { + "description": "leading colons is valid", + "data": "::42:ff:1", + "valid": true + }, + { + "description": "trailing colons is valid", + "data": "d6::", + "valid": true + }, + { + "description": "missing leading octet is invalid", + "data": ":2:3:4:5:6:7:8", + "valid": false + }, + { + "description": "missing trailing octet is invalid", + "data": "1:2:3:4:5:6:7:", + "valid": false + }, + { + "description": "missing leading octet with omitted octets later", + "data": ":2:3:4::8", + "valid": false + }, + { + "description": "single set of double colons in the middle is valid", + "data": "1:d6::42", + "valid": true + }, + { + "description": "two sets of double colons is invalid", + "data": "1::d6::42", + "valid": false + }, + { + "description": "mixed format with the ipv4 section as decimal octets", + "data": "1::d6:192.168.0.1", + "valid": true + }, + { + "description": "mixed format with double colons between the sections", + "data": "1:2::192.168.0.1", + "valid": true + }, + { + "description": "mixed format with ipv4 section with octet out of range", + "data": "1::2:192.168.256.1", + "valid": false + }, + { + "description": "mixed format with ipv4 section with a hex octet", + "data": "1::2:192.168.ff.1", + "valid": false + }, + { + "description": "mixed format with leading double colons (ipv4-mapped ipv6 address)", + "data": "::ffff:192.168.0.1", + "valid": true + }, + { + "description": "triple colons is invalid", + "data": "1:2:3:4:5:::8", + "valid": false + }, + { + "description": "8 octets", + "data": "1:2:3:4:5:6:7:8", + "valid": true + }, + { + "description": "insufficient octets without double colons", + "data": "1:2:3:4:5:6:7", + "valid": false + }, + { + "description": "no colons is invalid", + "data": "1", + "valid": false + }, + { + "description": "ipv4 is not ipv6", + "data": "127.0.0.1", + "valid": false + }, + { + "description": "ipv4 segment must have 4 octets", + "data": "1:2:3:4:1.2.3", + "valid": false + }, + { + "description": "leading whitespace is invalid", + "data": " ::1", + "valid": false + }, + { + "description": "trailing whitespace is invalid", + "data": "::1 ", + "valid": false + }, + { + "description": "netmask is not a part of ipv6 address", + "data": "fe80::/64", + "valid": false + }, + { + "description": "zone id is not a part of ipv6 address", + "data": "fe80::a%eth1", + "valid": false + }, + { + "description": "a long valid ipv6", + "data": "1000:1000:1000:1000:1000:1000:255.255.255.255", + "valid": true + }, + { + "description": "a long invalid ipv6, below length limit, first", + "data": "100:100:100:100:100:100:255.255.255.255.255", + "valid": false + }, + { + "description": "a long invalid ipv6, below length limit, second", + "data": "100:100:100:100:100:100:100:255.255.255.255", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1:2:3:4:5:6:7:৪", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in the IPv4 portion", + "data": "1:2::192.16৪.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/iri-reference.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/iri-reference.json new file mode 100644 index 00000000..c6b4c22a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/iri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of IRI References", + "schema": { "format": "iri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IRI", + "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid protocol-relative IRI Reference", + "data": "//ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid relative IRI Reference", + "data": "/âππ", + "valid": true + }, + { + "description": "an invalid IRI Reference", + "data": "\\\\WINDOWS\\filëßåré", + "valid": false + }, + { + "description": "a valid IRI Reference", + "data": "âππ", + "valid": true + }, + { + "description": "a valid IRI fragment", + "data": "#Æ’rägmênt", + "valid": true + }, + { + "description": "an invalid IRI fragment", + "data": "#Æ’räg\\mênt", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/iri.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/iri.json new file mode 100644 index 00000000..a0d12aed --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/iri.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of IRIs", + "schema": { "format": "iri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IRI with anchor tag", + "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid IRI with anchor tag and parentheses", + "data": "http://ƒøø.com/blah_(wîkïpédiÃ¥)_blah#ßité-1", + "valid": true + }, + { + "description": "a valid IRI with URL-encoded stuff", + "data": "http://ƒøø.ßår/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid IRI with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid IRI based on IPv6", + "data": "http://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]", + "valid": true + }, + { + "description": "an invalid IRI based on IPv6", + "data": "http://2001:0db8:85a3:0000:0000:8a2e:0370:7334", + "valid": false + }, + { + "description": "an invalid relative IRI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid IRI", + "data": "\\\\WINDOWS\\filëßåré", + "valid": false + }, + { + "description": "an invalid IRI though valid IRI reference", + "data": "âππ", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/json-pointer.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/json-pointer.json new file mode 100644 index 00000000..a0346b57 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/json-pointer.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of JSON-pointers (JSON String Representation)", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid JSON-pointer", + "data": "/foo/bar~0/baz~1/%a", + "valid": true + }, + { + "description": "not a valid JSON-pointer (~ not escaped)", + "data": "/foo/bar~", + "valid": false + }, + { + "description": "valid JSON-pointer with empty segment", + "data": "/foo//bar", + "valid": true + }, + { + "description": "valid JSON-pointer with the last empty segment", + "data": "/foo/bar/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #1", + "data": "", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #2", + "data": "/foo", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #3", + "data": "/foo/0", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #4", + "data": "/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #5", + "data": "/a~1b", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #6", + "data": "/c%d", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #7", + "data": "/e^f", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #8", + "data": "/g|h", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #9", + "data": "/i\\j", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #10", + "data": "/k\"l", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #11", + "data": "/ ", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #12", + "data": "/m~0n", + "valid": true + }, + { + "description": "valid JSON-pointer used adding to the last array position", + "data": "/foo/-", + "valid": true + }, + { + "description": "valid JSON-pointer (- used as object member name)", + "data": "/foo/-/bar", + "valid": true + }, + { + "description": "valid JSON-pointer (multiple escaped characters)", + "data": "/~1~0~0~1~1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #1", + "data": "/~1.1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #2", + "data": "/~0.1", + "valid": true + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #1", + "data": "#", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #2", + "data": "#/", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #3", + "data": "#a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #1", + "data": "/~0~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #2", + "data": "/~0/~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #1", + "data": "/~2", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #2", + "data": "/~-1", + "valid": false + }, + { + "description": "not a valid JSON-pointer (multiple characters not escaped)", + "data": "/~~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1", + "data": "a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2", + "data": "0", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3", + "data": "a/a", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/regex.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/regex.json new file mode 100644 index 00000000..34491770 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/regex.json @@ -0,0 +1,48 @@ +[ + { + "description": "validation of regular expressions", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid regular expression", + "data": "([abc])+\\s+$", + "valid": true + }, + { + "description": "a regular expression with unclosed parens is invalid", + "data": "^(abc]", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/relative-json-pointer.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/relative-json-pointer.json new file mode 100644 index 00000000..9309986f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/relative-json-pointer.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of Relative JSON Pointers (RJP)", + "schema": { "format": "relative-json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid upwards RJP", + "data": "1", + "valid": true + }, + { + "description": "a valid downwards RJP", + "data": "0/foo/bar", + "valid": true + }, + { + "description": "a valid up and then down RJP, with array index", + "data": "2/0/baz/1/zip", + "valid": true + }, + { + "description": "a valid RJP taking the member or index name", + "data": "0#", + "valid": true + }, + { + "description": "an invalid RJP that is a valid JSON Pointer", + "data": "/foo/bar", + "valid": false + }, + { + "description": "negative prefix", + "data": "-1/foo/bar", + "valid": false + }, + { + "description": "## is not a valid json-pointer", + "data": "0##", + "valid": false + }, + { + "description": "zero cannot be followed by other digits, plus json-pointer", + "data": "01/a", + "valid": false + }, + { + "description": "zero cannot be followed by other digits, plus octothorpe", + "data": "01#", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/time.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/time.json new file mode 100644 index 00000000..31425871 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/time.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of time strings", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid time string", + "data": "08:30:06Z", + "valid": true + }, + { + "description": "a valid time string with leap second, Zulu", + "data": "23:59:60Z", + "valid": true + }, + { + "description": "invalid leap second, Zulu (wrong hour)", + "data": "22:59:60Z", + "valid": false + }, + { + "description": "invalid leap second, Zulu (wrong minute)", + "data": "23:58:60Z", + "valid": false + }, + { + "description": "valid leap second, zero time-offset", + "data": "23:59:60+00:00", + "valid": true + }, + { + "description": "invalid leap second, zero time-offset (wrong hour)", + "data": "22:59:60+00:00", + "valid": false + }, + { + "description": "invalid leap second, zero time-offset (wrong minute)", + "data": "23:58:60+00:00", + "valid": false + }, + { + "description": "valid leap second, positive time-offset", + "data": "01:29:60+01:30", + "valid": true + }, + { + "description": "valid leap second, large positive time-offset", + "data": "23:29:60+23:30", + "valid": true + }, + { + "description": "invalid leap second, positive time-offset (wrong hour)", + "data": "23:59:60+01:00", + "valid": false + }, + { + "description": "invalid leap second, positive time-offset (wrong minute)", + "data": "23:59:60+00:30", + "valid": false + }, + { + "description": "valid leap second, negative time-offset", + "data": "15:59:60-08:00", + "valid": true + }, + { + "description": "valid leap second, large negative time-offset", + "data": "00:29:60-23:30", + "valid": true + }, + { + "description": "invalid leap second, negative time-offset (wrong hour)", + "data": "23:59:60-01:00", + "valid": false + }, + { + "description": "invalid leap second, negative time-offset (wrong minute)", + "data": "23:59:60-00:30", + "valid": false + }, + { + "description": "a valid time string with second fraction", + "data": "23:20:50.52Z", + "valid": true + }, + { + "description": "a valid time string with precise second fraction", + "data": "08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid time string with plus offset", + "data": "08:30:06+00:20", + "valid": true + }, + { + "description": "a valid time string with minus offset", + "data": "08:30:06-08:00", + "valid": true + }, + { + "description": "a valid time string with case-insensitive Z", + "data": "08:30:06z", + "valid": true + }, + { + "description": "an invalid time string with invalid hour", + "data": "24:00:00Z", + "valid": false + }, + { + "description": "an invalid time string with invalid minute", + "data": "00:60:00Z", + "valid": false + }, + { + "description": "an invalid time string with invalid second", + "data": "00:00:61Z", + "valid": false + }, + { + "description": "an invalid time string with invalid leap second (wrong hour)", + "data": "22:59:60Z", + "valid": false + }, + { + "description": "an invalid time string with invalid leap second (wrong minute)", + "data": "23:58:60Z", + "valid": false + }, + { + "description": "an invalid time string with invalid time numoffset hour", + "data": "01:02:03+24:00", + "valid": false + }, + { + "description": "an invalid time string with invalid time numoffset minute", + "data": "01:02:03+00:60", + "valid": false + }, + { + "description": "an invalid time string with invalid time with both Z and numoffset", + "data": "01:02:03Z+00:30", + "valid": false + }, + { + "description": "an invalid offset indicator", + "data": "08:30:06 PST", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "01:01:01,1111", + "valid": false + }, + { + "description": "no time offset", + "data": "12:00:00", + "valid": false + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/unknown.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/unknown.json new file mode 100644 index 00000000..12339ae5 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/unknown.json @@ -0,0 +1,43 @@ +[ + { + "description": "unknown format", + "schema": { "format": "unknown" }, + "tests": [ + { + "description": "unknown formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "unknown formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "unknown formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "unknown formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "unknown formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "unknown formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "unknown formats ignore strings", + "data": "string", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri-reference.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri-reference.json new file mode 100644 index 00000000..7cdf228d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of URI References", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid relative URI Reference", + "data": "/abc", + "valid": true + }, + { + "description": "an invalid URI Reference", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "a valid URI Reference", + "data": "abc", + "valid": true + }, + { + "description": "a valid URI fragment", + "data": "#fragment", + "valid": true + }, + { + "description": "an invalid URI fragment", + "data": "#frag\\ment", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri-template.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri-template.json new file mode 100644 index 00000000..df355c55 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri-template.json @@ -0,0 +1,58 @@ +[ + { + "description": "format: uri-template", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term}", + "valid": true + }, + { + "description": "an invalid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term", + "valid": false + }, + { + "description": "a valid uri-template without variables", + "data": "http://example.com/dictionary", + "valid": true + }, + { + "description": "a valid relative uri-template", + "data": "dictionary/{term:1}/{term}", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri.json new file mode 100644 index 00000000..792d71a0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/uri.json @@ -0,0 +1,108 @@ +[ + { + "description": "validation of URIs", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "a valid URL with anchor tag", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid URL with anchor tag and parentheses", + "data": "http://foo.com/blah_(wikipedia)_blah#cite-1", + "valid": true + }, + { + "description": "a valid URL with URL-encoded stuff", + "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid puny-coded URL ", + "data": "http://xn--nw2a.xn--j6w193g/", + "valid": true + }, + { + "description": "a valid URL with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid URL based on IPv4", + "data": "http://223.255.255.254", + "valid": true + }, + { + "description": "a valid URL with ftp scheme", + "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt", + "valid": true + }, + { + "description": "a valid URL for a simple text file", + "data": "http://www.ietf.org/rfc/rfc2396.txt", + "valid": true + }, + { + "description": "a valid URL ", + "data": "ldap://[2001:db8::7]/c=GB?objectClass?one", + "valid": true + }, + { + "description": "a valid mailto URI", + "data": "mailto:John.Doe@example.com", + "valid": true + }, + { + "description": "a valid newsgroup URI", + "data": "news:comp.infosystems.www.servers.unix", + "valid": true + }, + { + "description": "a valid tel URI", + "data": "tel:+1-816-555-1212", + "valid": true + }, + { + "description": "a valid URN", + "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2", + "valid": true + }, + { + "description": "an invalid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": false + }, + { + "description": "an invalid relative URI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + }, + { + "description": "an invalid URI with spaces", + "data": "http:// shouldfail.com", + "valid": false + }, + { + "description": "an invalid URI with spaces and missing scheme", + "data": ":// should fail", + "valid": false + }, + { + "description": "an invalid URI with comma in scheme", + "data": "bar,baz:foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/format/uuid.json b/vendor/jsonschema/json/tests/draft2020-12/optional/format/uuid.json new file mode 100644 index 00000000..e54cbc0f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/format/uuid.json @@ -0,0 +1,85 @@ +[ + { + "description": "uuid format", + "schema": { + "format": "uuid" + }, + "tests": [ + { + "description": "all upper-case", + "data": "2EB8AA08-AA98-11EA-B4AA-73B441D16380", + "valid": true + }, + { + "description": "all lower-case", + "data": "2eb8aa08-aa98-11ea-b4aa-73b441d16380", + "valid": true + }, + { + "description": "mixed case", + "data": "2eb8aa08-AA98-11ea-B4Aa-73B441D16380", + "valid": true + }, + { + "description": "all zeroes is valid", + "data": "00000000-0000-0000-0000-000000000000", + "valid": true + }, + { + "description": "wrong length", + "data": "2eb8aa08-aa98-11ea-b4aa-73b441d1638", + "valid": false + }, + { + "description": "missing section", + "data": "2eb8aa08-aa98-11ea-73b441d16380", + "valid": false + }, + { + "description": "bad characters (not hex)", + "data": "2eb8aa08-aa98-11ea-b4ga-73b441d16380", + "valid": false + }, + { + "description": "no dashes", + "data": "2eb8aa08aa9811eab4aa73b441d16380", + "valid": false + }, + { + "description": "too few dashes", + "data": "2eb8aa08aa98-11ea-b4aa73b441d16380", + "valid": false + }, + { + "description": "too many dashes", + "data": "2eb8-aa08-aa98-11ea-b4aa73b44-1d16380", + "valid": false + }, + { + "description": "dashes in the wrong spot", + "data": "2eb8aa08aa9811eab4aa73b441d16380----", + "valid": false + }, + { + "description": "valid version 4", + "data": "98d80576-482e-427f-8434-7f86890ab222", + "valid": true + }, + { + "description": "valid version 5", + "data": "99c17cbb-656f-564a-940f-1a4568f03487", + "valid": true + }, + { + "description": "hypothetical version 6", + "data": "99c17cbb-656f-664a-940f-1a4568f03487", + "valid": true + }, + { + "description": "hypothetical version 15", + "data": "99c17cbb-656f-f64a-940f-1a4568f03487", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/non-bmp-regex.json b/vendor/jsonschema/json/tests/draft2020-12/optional/non-bmp-regex.json new file mode 100644 index 00000000..dd67af2b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/non-bmp-regex.json @@ -0,0 +1,82 @@ +[ + { + "description": "Proper UTF-16 surrogate pair handling: pattern", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { "pattern": "^ðŸ²*$" }, + "tests": [ + { + "description": "matches empty", + "data": "", + "valid": true + }, + { + "description": "matches single", + "data": "ðŸ²", + "valid": true + }, + { + "description": "matches two", + "data": "ðŸ²ðŸ²", + "valid": true + }, + { + "description": "doesn't match one", + "data": "ðŸ‰", + "valid": false + }, + { + "description": "doesn't match two", + "data": "ðŸ‰ðŸ‰", + "valid": false + }, + { + "description": "doesn't match one ASCII", + "data": "D", + "valid": false + }, + { + "description": "doesn't match two ASCII", + "data": "DD", + "valid": false + } + ] + }, + { + "description": "Proper UTF-16 surrogate pair handling: patternProperties", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { + "patternProperties": { + "^ðŸ²*$": { + "type": "integer" + } + } + }, + "tests": [ + { + "description": "matches empty", + "data": { "": 1 }, + "valid": true + }, + { + "description": "matches single", + "data": { "ðŸ²": 1 }, + "valid": true + }, + { + "description": "matches two", + "data": { "ðŸ²ðŸ²": 1 }, + "valid": true + }, + { + "description": "doesn't match one", + "data": { "ðŸ²": "hello" }, + "valid": false + }, + { + "description": "doesn't match two", + "data": { "ðŸ²ðŸ²": "hello" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/optional/refOfUnknownKeyword.json b/vendor/jsonschema/json/tests/draft2020-12/optional/refOfUnknownKeyword.json new file mode 100644 index 00000000..5b150df8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/optional/refOfUnknownKeyword.json @@ -0,0 +1,44 @@ +[ + { + "description": "reference of a root arbitrary keyword ", + "schema": { + "unknown-keyword": {"type": "integer"}, + "properties": { + "bar": {"$ref": "#/unknown-keyword"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "reference of an arbitrary keyword of a sub-schema", + "schema": { + "properties": { + "foo": {"unknown-keyword": {"type": "integer"}}, + "bar": {"$ref": "#/properties/foo/unknown-keyword"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/pattern.json b/vendor/jsonschema/json/tests/draft2020-12/pattern.json new file mode 100644 index 00000000..92db0f97 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/pattern.json @@ -0,0 +1,59 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/patternProperties.json b/vendor/jsonschema/json/tests/draft2020-12/patternProperties.json new file mode 100644 index 00000000..c276e647 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/patternProperties.json @@ -0,0 +1,171 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["foo"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + }, + { + "description": "patternProperties with boolean schemas", + "schema": { + "patternProperties": { + "f.*": true, + "b.*": false + } + }, + "tests": [ + { + "description": "object with property matching schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property matching schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "object with a property matching both true and false is invalid", + "data": {"foobar":1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "patternProperties with null valued instance properties", + "schema": { + "patternProperties": { + "^.*bar$": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foobar": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/prefixItems.json b/vendor/jsonschema/json/tests/draft2020-12/prefixItems.json new file mode 100644 index 00000000..aab1e365 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/prefixItems.json @@ -0,0 +1,98 @@ +[ + { + "description": "a schema given for prefixItems", + "schema": { + "prefixItems": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + }, + { + "description": "incomplete array of items", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with additional items", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "1": "valid", + "length": 2 + }, + "valid": true + } + ] + }, + { + "description": "prefixItems with boolean schemas", + "schema": { + "prefixItems": [true, false] + }, + "tests": [ + { + "description": "array with one item is valid", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with two items is invalid", + "data": [ 1, "foo" ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "additional items are allowed by default", + "schema": {"prefixItems": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + }, + { + "description": "prefixItems with null instance elements", + "schema": { + "prefixItems": [ + { + "type": "null" + } + ] + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/properties.json b/vendor/jsonschema/json/tests/draft2020-12/properties.json new file mode 100644 index 00000000..5b971ca0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/properties.json @@ -0,0 +1,236 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + }, + { + "description": "properties with boolean schema", + "schema": { + "properties": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "no property present is valid", + "data": {}, + "valid": true + }, + { + "description": "only 'true' property present is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "only 'false' property present is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "both properties present is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + } + ] + }, + { + "description": "properties with escaped characters", + "schema": { + "properties": { + "foo\nbar": {"type": "number"}, + "foo\"bar": {"type": "number"}, + "foo\\bar": {"type": "number"}, + "foo\rbar": {"type": "number"}, + "foo\tbar": {"type": "number"}, + "foo\fbar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with all numbers is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1", + "foo\\bar": "1", + "foo\rbar": "1", + "foo\tbar": "1", + "foo\fbar": "1" + }, + "valid": false + } + ] + }, + { + "description": "properties with null valued instance properties", + "schema": { + "properties": { + "foo": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { + "properties": { + "__proto__": {"type": "number"}, + "toString": { + "properties": { "length": { "type": "string" } } + }, + "constructor": {"type": "number"} + } + }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": true + }, + { + "description": "__proto__ not valid", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString not valid", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor not valid", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present and valid", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/propertyNames.json b/vendor/jsonschema/json/tests/draft2020-12/propertyNames.json new file mode 100644 index 00000000..8423690d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/propertyNames.json @@ -0,0 +1,78 @@ +[ + { + "description": "propertyNames validation", + "schema": { + "propertyNames": {"maxLength": 3} + }, + "tests": [ + { + "description": "all property names valid", + "data": { + "f": {}, + "foo": {} + }, + "valid": true + }, + { + "description": "some property names invalid", + "data": { + "foo": {}, + "foobar": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [1, 2, 3, 4], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema true", + "schema": {"propertyNames": true}, + "tests": [ + { + "description": "object with any properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema false", + "schema": {"propertyNames": false}, + "tests": [ + { + "description": "object with any properties is invalid", + "data": {"foo": 1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/ref.json b/vendor/jsonschema/json/tests/draft2020-12/ref.json new file mode 100644 index 00000000..d6300472 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/ref.json @@ -0,0 +1,834 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "prefixItems": [ + {"type": "integer"}, + {"$ref": "#/prefixItems/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "$defs": { + "tilde~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"} + }, + "properties": { + "tilde": {"$ref": "#/$defs/tilde~0field"}, + "slash": {"$ref": "#/$defs/slash~1field"}, + "percent": {"$ref": "#/$defs/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilde invalid", + "data": {"tilde": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilde valid", + "data": {"tilde": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "$defs": { + "a": {"type": "integer"}, + "b": {"$ref": "#/$defs/a"}, + "c": {"$ref": "#/$defs/b"} + }, + "$ref": "#/$defs/c" + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref applies alongside sibling keywords", + "schema": { + "$defs": { + "reffed": { + "type": "array" + } + }, + "properties": { + "foo": { + "$ref": "#/$defs/reffed", + "maxItems": 2 + } + } + }, + "tests": [ + { + "description": "ref valid, maxItems valid", + "data": { "foo": [] }, + "valid": true + }, + { + "description": "ref valid, maxItems invalid", + "data": { "foo": [1, 2, 3] }, + "valid": false + }, + { + "description": "ref invalid", + "data": { "foo": "string" }, + "valid": false + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": { + "$ref": "https://json-schema.org/draft/2020-12/schema" + }, + "tests": [ + { + "description": "remote ref valid", + "data": {"minLength": 1}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"minLength": -1}, + "valid": false + } + ] + }, + { + "description": "property named $ref that is not a reference", + "schema": { + "properties": { + "$ref": {"type": "string"} + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "property named $ref, containing an actual $ref", + "schema": { + "properties": { + "$ref": {"$ref": "#/$defs/is-string"} + }, + "$defs": { + "is-string": { + "type": "string" + } + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "$ref to boolean schema true", + "schema": { + "$ref": "#/$defs/bool", + "$defs": { + "bool": true + } + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "$ref to boolean schema false", + "schema": { + "$ref": "#/$defs/bool", + "$defs": { + "bool": false + } + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "Recursive references between schemas", + "schema": { + "$id": "http://localhost:1234/tree", + "description": "tree of nodes", + "type": "object", + "properties": { + "meta": {"type": "string"}, + "nodes": { + "type": "array", + "items": {"$ref": "node"} + } + }, + "required": ["meta", "nodes"], + "$defs": { + "node": { + "$id": "http://localhost:1234/node", + "description": "node", + "type": "object", + "properties": { + "value": {"type": "number"}, + "subtree": {"$ref": "tree"} + }, + "required": ["value"] + } + } + }, + "tests": [ + { + "description": "valid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 1.1}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": true + }, + { + "description": "invalid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": "string is invalid"}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": false + } + ] + }, + { + "description": "refs with quote", + "schema": { + "properties": { + "foo\"bar": {"$ref": "#/$defs/foo%22bar"} + }, + "$defs": { + "foo\"bar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with numbers is valid", + "data": { + "foo\"bar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "ref creates new scope when adjacent to keywords", + "schema": { + "$defs": { + "A": { + "unevaluatedProperties": false + } + }, + "properties": { + "prop1": { + "type": "string" + } + }, + "$ref": "#/$defs/A" + }, + "tests": [ + { + "description": "referenced subschema doesn't see annotations from properties", + "data": { + "prop1": "match" + }, + "valid": false + } + ] + }, + { + "description": "naive replacement of $ref with its destination is not correct", + "schema": { + "$defs": { + "a_string": { "type": "string" } + }, + "enum": [ + { "$ref": "#/$defs/a_string" } + ] + }, + "tests": [ + { + "description": "do not evaluate the $ref inside the enum, matching any string", + "data": "this is a string", + "valid": false + }, + { + "description": "do not evaluate the $ref inside the enum, definition exact match", + "data": { "type": "string" }, + "valid": false + }, + { + "description": "match the enum exactly", + "data": { "$ref": "#/$defs/a_string" }, + "valid": true + } + ] + }, + { + "description": "refs with relative uris and defs", + "schema": { + "$id": "http://example.com/schema-relative-uri-defs1.json", + "properties": { + "foo": { + "$id": "schema-relative-uri-defs2.json", + "$defs": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "$ref": "#/$defs/inner" + } + }, + "$ref": "schema-relative-uri-defs2.json" + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "relative refs with absolute uris and defs", + "schema": { + "$id": "http://example.com/schema-refs-absolute-uris-defs1.json", + "properties": { + "foo": { + "$id": "http://example.com/schema-refs-absolute-uris-defs2.json", + "$defs": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "$ref": "#/$defs/inner" + } + }, + "$ref": "schema-refs-absolute-uris-defs2.json" + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "$id must be resolved against nearest parent, not just immediate parent", + "schema": { + "$id": "http://example.com/a.json", + "$defs": { + "x": { + "$id": "http://example.com/b/c.json", + "not": { + "$defs": { + "y": { + "$id": "d.json", + "type": "number" + } + } + } + } + }, + "allOf": [ + { + "$ref": "http://example.com/b/d.json" + } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "non-number is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "order of evaluation: $id and $ref", + "schema": { + "$comment": "$id must be evaluated before $ref to get the proper $ref destination", + "$id": "/ref-and-id1/base.json", + "$ref": "int.json", + "$defs": { + "bigint": { + "$comment": "canonical uri: /ref-and-id1/int.json", + "$id": "int.json", + "maximum": 10 + }, + "smallint": { + "$comment": "canonical uri: /ref-and-id1-int.json", + "$id": "/ref-and-id1-int.json", + "maximum": 2 + } + } + }, + "tests": [ + { + "description": "data is valid against first definition", + "data": 5, + "valid": true + }, + { + "description": "data is invalid against first definition", + "data": 50, + "valid": false + } + ] + }, + { + "description": "order of evaluation: $id and $anchor and $ref", + "schema": { + "$comment": "$id must be evaluated before $ref to get the proper $ref destination", + "$id": "/ref-and-id2/base.json", + "$ref": "#bigint", + "$defs": { + "bigint": { + "$comment": "canonical uri: /ref-and-id2/base.json/$defs/bigint; another valid uri for this location: /ref-and-id2/base.json#bigint", + "$anchor": "bigint", + "maximum": 10 + }, + "smallint": { + "$comment": "canonical uri: /ref-and-id2#/$defs/smallint; another valid uri for this location: /ref-and-id2/#bigint", + "$id": "/ref-and-id2/", + "$anchor": "bigint", + "maximum": 2 + } + } + }, + "tests": [ + { + "description": "data is valid against first definition", + "data": 5, + "valid": true + }, + { + "description": "data is invalid against first definition", + "data": 50, + "valid": false + } + ] + }, + { + "description": "simple URN base URI with $ref via the URN", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed", + "minimum": 30, + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed"} + } + }, + "tests": [ + { + "description": "valid under the URN IDed schema", + "data": {"foo": 37}, + "valid": true + }, + { + "description": "invalid under the URN IDed schema", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "simple URN base URI with JSON pointer", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-00ff-ff00-4321feebdaed", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with NSS", + "schema": { + "$comment": "RFC 8141 §2.2", + "$id": "urn:example:1/406/47452/2", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with r-component", + "schema": { + "$comment": "RFC 8141 §2.3.1", + "$id": "urn:example:foo-bar-baz-qux?+CCResolve:cc=uk", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with q-component", + "schema": { + "$comment": "RFC 8141 §2.3.2", + "$id": "urn:example:weather?=op=map&lat=39.56&lon=-104.85&datetime=1969-07-21T02:56:15Z", + "properties": { + "foo": {"$ref": "#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with f-component", + "schema": { + "$comment": "RFC 8141 §2.3.3, but we don't allow fragments", + "$ref": "https://json-schema.org/draft/2020-12/schema" + }, + "tests": [ + { + "description": "is invalid", + "data": {"$id": "urn:example:foo-bar-baz-qux#somepart"}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and JSON pointer ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed#/$defs/bar"} + }, + "$defs": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and anchor ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed#something"} + }, + "$defs": { + "bar": { + "$anchor": "something", + "type": "string" + } + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/refRemote.json b/vendor/jsonschema/json/tests/draft2020-12/refRemote.json new file mode 100644 index 00000000..a8440396 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/refRemote.json @@ -0,0 +1,233 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas-defs.json#/$defs/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas-defs.json#/$defs/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "base URI change", + "schema": { + "$id": "http://localhost:1234/", + "items": { + "$id": "baseUriChange/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "base URI change ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "base URI change ref invalid", + "data": [["a"]], + "valid": false + } + ] + }, + { + "description": "base URI change - change folder", + "schema": { + "$id": "http://localhost:1234/scope_change_defs1.json", + "type" : "object", + "properties": {"list": {"$ref": "baseUriChangeFolder/"}}, + "$defs": { + "baz": { + "$id": "baseUriChangeFolder/", + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "base URI change - change folder in subschema", + "schema": { + "$id": "http://localhost:1234/scope_change_defs2.json", + "type" : "object", + "properties": {"list": {"$ref": "baseUriChangeFolderInSubschema/#/$defs/bar"}}, + "$defs": { + "baz": { + "$id": "baseUriChangeFolderInSubschema/", + "$defs": { + "bar": { + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "root ref in remote ref", + "schema": { + "$id": "http://localhost:1234/object", + "type": "object", + "properties": { + "name": {"$ref": "name-defs.json#/$defs/orNull"} + } + }, + "tests": [ + { + "description": "string is valid", + "data": { + "name": "foo" + }, + "valid": true + }, + { + "description": "null is valid", + "data": { + "name": null + }, + "valid": true + }, + { + "description": "object is invalid", + "data": { + "name": { + "name": null + } + }, + "valid": false + } + ] + }, + { + "description": "remote ref with ref to defs", + "schema": { + "$id": "http://localhost:1234/schema-remote-ref-ref-defs1.json", + "$ref": "ref-and-defs.json" + }, + "tests": [ + { + "description": "invalid", + "data": { + "bar": 1 + }, + "valid": false + }, + { + "description": "valid", + "data": { + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "Location-independent identifier in remote ref", + "schema": { + "$ref": "http://localhost:1234/locationIndependentIdentifier.json#/$defs/refToInteger" + }, + "tests": [ + { + "description": "integer is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "retrieved nested refs resolve relative to their URI not $id", + "schema": { + "$id": "http://localhost:1234/some-id", + "properties": { + "name": {"$ref": "nested/foo-ref-string.json"} + } + }, + "tests": [ + { + "description": "number is invalid", + "data": { + "name": {"foo": 1} + }, + "valid": false + }, + { + "description": "string is valid", + "data": { + "name": {"foo": "a"} + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/required.json b/vendor/jsonschema/json/tests/draft2020-12/required.json new file mode 100644 index 00000000..8d8087af --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/required.json @@ -0,0 +1,151 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with empty array", + "schema": { + "properties": { + "foo": {} + }, + "required": [] + }, + "tests": [ + { + "description": "property not required", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with escaped characters", + "schema": { + "required": [ + "foo\nbar", + "foo\"bar", + "foo\\bar", + "foo\rbar", + "foo\tbar", + "foo\fbar" + ] + }, + "tests": [ + { + "description": "object with all properties present is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with some properties missing is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "required properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { "required": ["__proto__", "toString", "constructor"] }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": false + }, + { + "description": "__proto__ present", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString present", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor present", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/type.json b/vendor/jsonschema/json/tests/draft2020-12/type.json new file mode 100644 index 00000000..83046470 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/type.json @@ -0,0 +1,474 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is an integer", + "data": 1.0, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not an integer, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is a number (and an integer)", + "data": 1.0, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not a number, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "a string is still a string, even if it looks like a number", + "data": "1", + "valid": true + }, + { + "description": "an empty string is still a string", + "data": "", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "zero is not a boolean", + "data": 0, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not a boolean", + "data": "", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "true is a boolean", + "data": true, + "valid": true + }, + { + "description": "false is a boolean", + "data": false, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "zero is not null", + "data": 0, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not null", + "data": "", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "true is not null", + "data": true, + "valid": false + }, + { + "description": "false is not null", + "data": false, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type as array with one item", + "schema": { + "type": ["string"] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "type: array or object", + "schema": { + "type": ["array", "object"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type: array, object or null", + "schema": { + "type": ["array", "object", "null"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/unevaluatedItems.json b/vendor/jsonschema/json/tests/draft2020-12/unevaluatedItems.json new file mode 100644 index 00000000..14c8d959 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/unevaluatedItems.json @@ -0,0 +1,644 @@ +[ + { + "description": "unevaluatedItems true", + "schema": { "unevaluatedItems": true }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems false", + "schema": { "unevaluatedItems": false }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems as schema", + "schema": { "unevaluatedItems": { "type": "string" } }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with valid unevaluated items", + "data": ["foo"], + "valid": true + }, + { + "description": "with invalid unevaluated items", + "data": [42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with uniform items", + "schema": { + "items": { "type": "string" }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "unevaluatedItems doesn't apply", + "data": ["foo", "bar"], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with tuple", + "schema": { + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with items", + "schema": { + "prefixItems": [ + { "type": "string" } + ], + "items": true, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "unevaluatedItems doesn't apply", + "data": ["foo", 42], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with nested tuple", + "schema": { + "prefixItems": [ + { "type": "string" } + ], + "allOf": [ + { + "prefixItems": [ + true, + { "type": "number" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", 42], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", 42, true], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested items", + "schema": { + "unevaluatedItems": {"type": "boolean"}, + "anyOf": [ + { "items": {"type": "string"} }, + true + ] + }, + "tests": [ + { + "description": "with only (valid) additional items", + "data": [true, false], + "valid": true + }, + { + "description": "with no additional items", + "data": ["yes", "no"], + "valid": true + }, + { + "description": "with invalid additional item", + "data": ["yes", false], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested prefixItems and items", + "schema": { + "allOf": [ + { + "prefixItems": [ + { "type": "string" } + ], + "items": true + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no additional items", + "data": ["foo"], + "valid": true + }, + { + "description": "with additional items", + "data": ["foo", 42, true], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with nested unevaluatedItems", + "schema": { + "allOf": [ + { + "prefixItems": [ + { "type": "string" } + ] + }, + { "unevaluatedItems": true } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no additional items", + "data": ["foo"], + "valid": true + }, + { + "description": "with additional items", + "data": ["foo", 42, true], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with anyOf", + "schema": { + "prefixItems": [ + { "const": "foo" } + ], + "anyOf": [ + { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + { + "prefixItems": [ + true, + true, + { "const": "baz" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "when one schema matches and has no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "when one schema matches and has unevaluated items", + "data": ["foo", "bar", 42], + "valid": false + }, + { + "description": "when two schemas match and has no unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "when two schemas match and has unevaluated items", + "data": ["foo", "bar", "baz", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with oneOf", + "schema": { + "prefixItems": [ + { "const": "foo" } + ], + "oneOf": [ + { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + { + "prefixItems": [ + true, + { "const": "baz" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with not", + "schema": { + "prefixItems": [ + { "const": "foo" } + ], + "not": { + "not": { + "prefixItems": [ + true, + { "const": "bar" } + ] + } + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with unevaluated items", + "data": ["foo", "bar"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with if/then/else", + "schema": { + "prefixItems": [ + { "const": "foo" } + ], + "if": { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + "then": { + "prefixItems": [ + true, + true, + { "const": "then" } + ] + }, + "else": { + "prefixItems": [ + true, + true, + true, + { "const": "else" } + ] + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "when if matches and it has no unevaluated items", + "data": ["foo", "bar", "then"], + "valid": true + }, + { + "description": "when if matches and it has unevaluated items", + "data": ["foo", "bar", "then", "else"], + "valid": false + }, + { + "description": "when if doesn't match and it has no unevaluated items", + "data": ["foo", 42, 42, "else"], + "valid": true + }, + { + "description": "when if doesn't match and it has unevaluated items", + "data": ["foo", 42, 42, "else", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with boolean schemas", + "schema": { + "allOf": [true], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with $ref", + "schema": { + "$ref": "#/$defs/bar", + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false, + "$defs": { + "bar": { + "prefixItems": [ + true, + { "type": "string" } + ] + } + } + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems can't see inside cousins", + "schema": { + "allOf": [ + { + "prefixItems": [ true ] + }, + { "unevaluatedItems": false } + ] + }, + "tests": [ + { + "description": "always fails", + "data": [ 1 ], + "valid": false + } + ] + }, + { + "description": "item is evaluated in an uncle schema to unevaluatedItems", + "schema": { + "properties": { + "foo": { + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false + } + }, + "anyOf": [ + { + "properties": { + "foo": { + "prefixItems": [ + true, + { "type": "string" } + ] + } + } + } + ] + }, + "tests": [ + { + "description": "no extra items", + "data": { + "foo": [ + "test" + ] + }, + "valid": true + }, + { + "description": "uncle keyword evaluation is not significant", + "data": { + "foo": [ + "test", + "test" + ] + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedItems depends on adjacent contains", + "schema": { + "prefixItems": [true], + "contains": {"type": "string"}, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "second item is evaluated by contains", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "contains fails, second item is not evaluated", + "data": [ 1, 2 ], + "valid": false + }, + { + "description": "contains passes, second item is not evaluated", + "data": [ 1, 2, "foo" ], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems depends on multiple nested contains", + "schema": { + "allOf": [ + { "contains": { "multipleOf": 2 } }, + { "contains": { "multipleOf": 3 } } + ], + "unevaluatedItems": { "multipleOf": 5 } + }, + "tests": [ + { + "description": "5 not evaluated, passes unevaluatedItems", + "data": [ 2, 3, 4, 5, 6 ], + "valid": true + }, + { + "description": "7 not evaluated, fails unevaluatedItems", + "data": [ 2, 3, 4, 7, 8 ], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems and contains interact to control item dependency relationship", + "schema": { + "if": { + "contains": {"const": "a"} + }, + "then": { + "if": { + "contains": {"const": "b"} + }, + "then": { + "if": { + "contains": {"const": "c"} + } + } + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "empty array is valid", + "data": [], + "valid": true + }, + { + "description": "only a's are valid", + "data": [ "a", "a" ], + "valid": true + }, + { + "description": "a's and b's are valid", + "data": [ "a", "b", "a", "b", "a" ], + "valid": true + }, + { + "description": "a's, b's and c's are valid", + "data": [ "c", "a", "c", "c", "b", "a" ], + "valid": true + }, + { + "description": "only b's are invalid", + "data": [ "b", "b" ], + "valid": false + }, + { + "description": "only c's are invalid", + "data": [ "c", "c" ], + "valid": false + }, + { + "description": "only b's and c's are invalid", + "data": [ "c", "b", "c", "b", "c" ], + "valid": false + }, + { + "description": "only a's and c's are invalid", + "data": [ "c", "a", "c", "a", "c" ], + "valid": false + } + ] + }, + { + "description": "non-array instances are valid", + "schema": {"unevaluatedItems": false}, + "tests": [ + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with null instance elements", + "schema": { + "unevaluatedItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/unevaluatedProperties.json b/vendor/jsonschema/json/tests/draft2020-12/unevaluatedProperties.json new file mode 100644 index 00000000..e39b21d2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/unevaluatedProperties.json @@ -0,0 +1,1362 @@ +[ + { + "description": "unevaluatedProperties true", + "schema": { + "type": "object", + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties schema", + "schema": { + "type": "object", + "unevaluatedProperties": { + "type": "string", + "minLength": 3 + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with valid unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with invalid unevaluated properties", + "data": { + "foo": "fo" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties false", + "schema": { + "type": "object", + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent properties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "^foo": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent additionalProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "additionalProperties": true, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with nested properties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "properties": { + "bar": { "type": "string" } + } + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with nested patternProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "patternProperties": { + "^bar": { "type": "string" } + } + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with nested additionalProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "additionalProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with nested unevaluatedProperties", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": { + "type": "string", + "maxLength": 2 + } + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with anyOf", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "anyOf": [ + { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + }, + { + "properties": { + "baz": { "const": "baz" } + }, + "required": ["baz"] + }, + { + "properties": { + "quux": { "const": "quux" } + }, + "required": ["quux"] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when one matches and has no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when one matches and has unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "not-baz" + }, + "valid": false + }, + { + "description": "when two match and has no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": true + }, + { + "description": "when two match and has unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "quux": "not-quux" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with oneOf", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "oneOf": [ + { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + }, + { + "properties": { + "baz": { "const": "baz" } + }, + "required": ["baz"] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "quux": "quux" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with not", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "not": { + "not": { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "then": { + "properties": { + "bar": { "type": "string" } + }, + "required": ["bar"] + }, + "else": { + "properties": { + "baz": { "type": "string" } + }, + "required": ["baz"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": true + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else, then not defined", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "else": { + "properties": { + "baz": { "type": "string" } + }, + "required": ["baz"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": false + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": true + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else, else not defined", + "schema": { + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "then": { + "properties": { + "bar": { "type": "string" } + }, + "required": ["bar"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with dependentSchemas", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "dependentSchemas": { + "foo": { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with boolean schemas", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [true], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with $ref", + "schema": { + "type": "object", + "$ref": "#/$defs/bar", + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false, + "$defs": { + "bar": { + "properties": { + "bar": { "type": "string" } + } + } + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties can't see inside cousins", + "schema": { + "allOf": [ + { + "properties": { + "foo": true + } + }, + { + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "always fails", + "data": { + "foo": 1 + }, + "valid": false + } + ] + }, + { + "description": "nested unevaluatedProperties, outer false, inner true, properties outside", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "nested unevaluatedProperties, outer false, inner true, properties inside", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "nested unevaluatedProperties, outer true, inner false, properties outside", + "schema": { + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": false + } + ], + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "nested unevaluatedProperties, outer true, inner false, properties inside", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + } + ], + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "cousin unevaluatedProperties, true and false, true with properties", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": true + }, + { + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "cousin unevaluatedProperties, true and false, false with properties", + "schema": { + "type": "object", + "allOf": [ + { + "unevaluatedProperties": true + }, + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "property is evaluated in an uncle schema to unevaluatedProperties", + "comment": "see https://stackoverflow.com/questions/66936884/deeply-nested-unevaluatedproperties-and-their-expectations", + "schema": { + "type": "object", + "properties": { + "foo": { + "type": "object", + "properties": { + "bar": { + "type": "string" + } + }, + "unevaluatedProperties": false + } + }, + "anyOf": [ + { + "properties": { + "foo": { + "properties": { + "faz": { + "type": "string" + } + } + } + } + } + ] + }, + "tests": [ + { + "description": "no extra properties", + "data": { + "foo": { + "bar": "test" + } + }, + "valid": true + }, + { + "description": "uncle keyword evaluation is not significant", + "data": { + "foo": { + "bar": "test", + "faz": "test" + } + }, + "valid": false + } + ] + }, + { + "description": "in-place applicator siblings, allOf has unevaluated", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": true + }, + "unevaluatedProperties": false + } + ], + "anyOf": [ + { + "properties": { + "bar": true + } + } + ] + }, + "tests": [ + { + "description": "base case: both properties present", + "data": { + "foo": 1, + "bar": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, bar is missing", + "data": { + "foo": 1 + }, + "valid": true + }, + { + "description": "in place applicator siblings, foo is missing", + "data": { + "bar": 1 + }, + "valid": false + } + ] + }, + { + "description": "in-place applicator siblings, anyOf has unevaluated", + "schema": { + "type": "object", + "allOf": [ + { + "properties": { + "foo": true + } + } + ], + "anyOf": [ + { + "properties": { + "bar": true + }, + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "base case: both properties present", + "data": { + "foo": 1, + "bar": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, bar is missing", + "data": { + "foo": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, foo is missing", + "data": { + "bar": 1 + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties + single cyclic ref", + "schema": { + "type": "object", + "properties": { + "x": { "$ref": "#" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is valid", + "data": {}, + "valid": true + }, + { + "description": "Single is valid", + "data": { "x": {} }, + "valid": true + }, + { + "description": "Unevaluated on 1st level is invalid", + "data": { "x": {}, "y": {} }, + "valid": false + }, + { + "description": "Nested is valid", + "data": { "x": { "x": {} } }, + "valid": true + }, + { + "description": "Unevaluated on 2nd level is invalid", + "data": { "x": { "x": {}, "y": {} } }, + "valid": false + }, + { + "description": "Deep nested is valid", + "data": { "x": { "x": { "x": {} } } }, + "valid": true + }, + { + "description": "Unevaluated on 3rd level is invalid", + "data": { "x": { "x": { "x": {}, "y": {} } } }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties + ref inside allOf / oneOf", + "schema": { + "$defs": { + "one": { + "properties": { "a": true } + }, + "two": { + "required": ["x"], + "properties": { "x": true } + } + }, + "allOf": [ + { "$ref": "#/$defs/one" }, + { "properties": { "b": true } }, + { + "oneOf": [ + { "$ref": "#/$defs/two" }, + { + "required": ["y"], + "properties": { "y": true } + } + ] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is invalid (no x or y)", + "data": {}, + "valid": false + }, + { + "description": "a and b are invalid (no x or y)", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "x and y are invalid", + "data": { "x": 1, "y": 1 }, + "valid": false + }, + { + "description": "a and x are valid", + "data": { "a": 1, "x": 1 }, + "valid": true + }, + { + "description": "a and y are valid", + "data": { "a": 1, "y": 1 }, + "valid": true + }, + { + "description": "a and b and x are valid", + "data": { "a": 1, "b": 1, "x": 1 }, + "valid": true + }, + { + "description": "a and b and y are valid", + "data": { "a": 1, "b": 1, "y": 1 }, + "valid": true + }, + { + "description": "a and b and x and y are invalid", + "data": { "a": 1, "b": 1, "x": 1, "y": 1 }, + "valid": false + } + ] + }, + { + "description": "dynamic evalation inside nested refs", + "schema": { + "$defs": { + "one": { + "oneOf": [ + { "$ref": "#/$defs/two" }, + { "required": ["b"], "properties": { "b": true } }, + { "required": ["xx"], "patternProperties": { "x": true } }, + { "required": ["all"], "unevaluatedProperties": true } + ] + }, + "two": { + "oneOf": [ + { "required": ["c"], "properties": { "c": true } }, + { "required": ["d"], "properties": { "d": true } } + ] + } + }, + "oneOf": [ + { "$ref": "#/$defs/one" }, + { "required": ["a"], "properties": { "a": true } } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is invalid", + "data": {}, + "valid": false + }, + { + "description": "a is valid", + "data": { "a": 1 }, + "valid": true + }, + { + "description": "b is valid", + "data": { "b": 1 }, + "valid": true + }, + { + "description": "c is valid", + "data": { "c": 1 }, + "valid": true + }, + { + "description": "d is valid", + "data": { "d": 1 }, + "valid": true + }, + { + "description": "a + b is invalid", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "a + c is invalid", + "data": { "a": 1, "c": 1 }, + "valid": false + }, + { + "description": "a + d is invalid", + "data": { "a": 1, "d": 1 }, + "valid": false + }, + { + "description": "b + c is invalid", + "data": { "b": 1, "c": 1 }, + "valid": false + }, + { + "description": "b + d is invalid", + "data": { "b": 1, "d": 1 }, + "valid": false + }, + { + "description": "c + d is invalid", + "data": { "c": 1, "d": 1 }, + "valid": false + }, + { + "description": "xx is valid", + "data": { "xx": 1 }, + "valid": true + }, + { + "description": "xx + foox is valid", + "data": { "xx": 1, "foox": 1 }, + "valid": true + }, + { + "description": "xx + foo is invalid", + "data": { "xx": 1, "foo": 1 }, + "valid": false + }, + { + "description": "xx + a is invalid", + "data": { "xx": 1, "a": 1 }, + "valid": false + }, + { + "description": "xx + b is invalid", + "data": { "xx": 1, "b": 1 }, + "valid": false + }, + { + "description": "xx + c is invalid", + "data": { "xx": 1, "c": 1 }, + "valid": false + }, + { + "description": "xx + d is invalid", + "data": { "xx": 1, "d": 1 }, + "valid": false + }, + { + "description": "all is valid", + "data": { "all": 1 }, + "valid": true + }, + { + "description": "all + foo is valid", + "data": { "all": 1, "foo": 1 }, + "valid": true + }, + { + "description": "all + a is invalid", + "data": { "all": 1, "a": 1 }, + "valid": false + } + ] + }, + { + "description": "non-object instances are valid", + "schema": {"unevaluatedProperties": false}, + "tests": [ + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with null valued instance properties", + "schema": { + "unevaluatedProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null valued properties", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/uniqueItems.json b/vendor/jsonschema/json/tests/draft2020-12/uniqueItems.json new file mode 100644 index 00000000..85c619d9 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/uniqueItems.json @@ -0,0 +1,404 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "non-unique array of more than two integers is invalid", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of strings is valid", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "non-unique array of strings is invalid", + "data": ["foo", "bar", "foo"], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "non-unique array of more than two arrays is invalid", + "data": [["foo"], ["bar"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "[1] and [true] are unique", + "data": [[1], [true]], + "valid": true + }, + { + "description": "[0] and [false] are unique", + "data": [[0], [false]], + "valid": true + }, + { + "description": "nested [1] and [true] are unique", + "data": [[[1], "foo"], [[true], "foo"]], + "valid": true + }, + { + "description": "nested [0] and [false] are unique", + "data": [[[0], "foo"], [[false], "foo"]], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1, "{}"], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + }, + { + "description": "different objects are unique", + "data": [{"a": 1, "b": 2}, {"a": 2, "b": 1}], + "valid": true + }, + { + "description": "objects are non-unique despite key order", + "data": [{"a": 1, "b": 2}, {"b": 2, "a": 1}], + "valid": false + }, + { + "description": "{\"a\": false} and {\"a\": 0} are unique", + "data": [{"a": false}, {"a": 0}], + "valid": true + }, + { + "description": "{\"a\": true} and {\"a\": 1} are unique", + "data": [{"a": true}, {"a": 1}], + "valid": true + } + ] + }, + { + "description": "uniqueItems with an array of items", + "schema": { + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is not valid", + "data": [false, true, "foo", "foo"], + "valid": false + }, + { + "description": "non-unique array extended from [true, false] is not valid", + "data": [true, false, "foo", "foo"], + "valid": false + } + ] + }, + { + "description": "uniqueItems with an array of items and additionalItems=false", + "schema": { + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true, + "items": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + }, + { + "description": "uniqueItems=false validation", + "schema": { "uniqueItems": false }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is valid", + "data": [1, 1], + "valid": true + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": true + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": true + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": true + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is valid", + "data": [["foo"], ["foo"]], + "valid": true + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are valid", + "data": [{}, [1], true, null, {}, 1], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items", + "schema": { + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is valid", + "data": [false, true, "foo", "foo"], + "valid": true + }, + { + "description": "non-unique array extended from [true, false] is valid", + "data": [true, false, "foo", "foo"], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items and additionalItems=false", + "schema": { + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false, + "items": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/unknownKeyword.json b/vendor/jsonschema/json/tests/draft2020-12/unknownKeyword.json new file mode 100644 index 00000000..e46657d8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/unknownKeyword.json @@ -0,0 +1,56 @@ +[ + { + "description": "$id inside an unknown keyword is not a real identifier", + "comment": "the implementation must not be confused by an $id in locations we do not know how to parse", + "schema": { + "$defs": { + "id_in_unknown0": { + "not": { + "array_of_schemas": [ + { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "null" + } + ] + } + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "string" + }, + "id_in_unknown1": { + "not": { + "object_of_schemas": { + "foo": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "integer" + } + } + } + } + }, + "anyOf": [ + { "$ref": "#/$defs/id_in_unknown0" }, + { "$ref": "#/$defs/id_in_unknown1" }, + { "$ref": "https://localhost:1234/unknownKeyword/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "type matches second anyOf, which has a real schema in it", + "data": "a string", + "valid": true + }, + { + "description": "type matches non-schema in first anyOf", + "data": null, + "valid": false + }, + { + "description": "type matches non-schema in third anyOf", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft2020-12/vocabulary.json b/vendor/jsonschema/json/tests/draft2020-12/vocabulary.json new file mode 100644 index 00000000..d84f8f17 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft2020-12/vocabulary.json @@ -0,0 +1,38 @@ +[ + { + "description": "schema that uses custom metaschema with with no validation vocabulary", + "schema": { + "$id": "https://schema/using/no/validation", + "$schema": "http://localhost:1234/draft2020-12/metaschema-no-validation.json", + "properties": { + "badProperty": false, + "numberProperty": { + "minimum": 10 + } + } + }, + "tests": [ + { + "description": "applicator vocabulary still works", + "data": { + "badProperty": "this property should not exist" + }, + "valid": false + }, + { + "description": "no validation: valid number", + "data": { + "numberProperty": 20 + }, + "valid": true + }, + { + "description": "no validation: invalid number, but it still validates", + "data": { + "numberProperty": 1 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/additionalItems.json b/vendor/jsonschema/json/tests/draft3/additionalItems.json new file mode 100644 index 00000000..0cb66870 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/additionalItems.json @@ -0,0 +1,128 @@ +[ + { + "description": "additionalItems as schema", + "schema": { + "items": [], + "additionalItems": {"type": "integer"} + }, + "tests": [ + { + "description": "additional items match schema", + "data": [ 1, 2, 3, 4 ], + "valid": true + }, + { + "description": "additional items do not match schema", + "data": [ 1, 2, 3, "foo" ], + "valid": false + } + ] + }, + { + "description": "when items is schema, additionalItems does nothing", + "schema": { + "items": {}, + "additionalItems": false + }, + "tests": [ + { + "description": "all items match schema", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + } + ] + }, + { + "description": "array of items with no additionalItems permitted", + "schema": { + "items": [{}, {}, {}], + "additionalItems": false + }, + "tests": [ + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "fewer number of items present (1)", + "data": [ 1 ], + "valid": true + }, + { + "description": "fewer number of items present (2)", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "equal number of items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "additionalItems as false without items", + "schema": {"additionalItems": false}, + "tests": [ + { + "description": + "items defaults to empty schema so everything is valid", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "additionalItems are allowed by default", + "schema": {"items": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators", + "schema": { + "extends": [ + { "items": [ { "type": "integer" } ] } + ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in extends are not examined", + "data": [ 1, null ], + "valid": true + } + ] + }, + { + "description": "additionalItems with null instance elements", + "schema": { + "additionalItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/additionalProperties.json b/vendor/jsonschema/json/tests/draft3/additionalProperties.json new file mode 100644 index 00000000..af7bfc6f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/additionalProperties.json @@ -0,0 +1,147 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobarbaz", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": "non-ASCII pattern with additionalProperties", + "schema": { + "patternProperties": {"^á": {}}, + "additionalProperties": false + }, + "tests": [ + { + "description": "matching the pattern is valid", + "data": {"ármányos": 2}, + "valid": true + }, + { + "description": "not matching the pattern is invalid", + "data": {"élmény": 2}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with schema", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + }, + { + "description": "additionalProperties does not look in applicators", + "schema": { + "extends": [ + {"properties": {"foo": {}}} + ], + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "properties defined in extends are not examined", + "data": {"foo": 1, "bar": true}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with null valued instance properties", + "schema": { + "additionalProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/default.json b/vendor/jsonschema/json/tests/draft3/default.json new file mode 100644 index 00000000..289a9b66 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/default.json @@ -0,0 +1,79 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "the default keyword does not do anything if the property is missing", + "schema": { + "type": "object", + "properties": { + "alpha": { + "type": "number", + "maximum": 3, + "default": 5 + } + } + }, + "tests": [ + { + "description": "an explicit property value is checked against maximum (passing)", + "data": { "alpha": 1 }, + "valid": true + }, + { + "description": "an explicit property value is checked against maximum (failing)", + "data": { "alpha": 5 }, + "valid": false + }, + { + "description": "missing properties are not filled in with the default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/dependencies.json b/vendor/jsonschema/json/tests/draft3/dependencies.json new file mode 100644 index 00000000..0ffa6bf4 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/dependencies.json @@ -0,0 +1,123 @@ +[ + { + "description": "dependencies", + "schema": { + "dependencies": {"bar": "foo"} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple dependencies", + "schema": { + "dependencies": {"quux": ["foo", "bar"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "multiple dependencies subschema", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/disallow.json b/vendor/jsonschema/json/tests/draft3/disallow.json new file mode 100644 index 00000000..a5c9d90c --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/disallow.json @@ -0,0 +1,80 @@ +[ + { + "description": "disallow", + "schema": { + "disallow": "integer" + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "multiple disallow", + "schema": { + "disallow": ["integer", "boolean"] + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "multiple disallow subschema", + "schema": { + "disallow": + ["string", + { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + }] + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": "foo", + "valid": false + }, + { + "description": "other mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/divisibleBy.json b/vendor/jsonschema/json/tests/draft3/divisibleBy.json new file mode 100644 index 00000000..ef7cc148 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/divisibleBy.json @@ -0,0 +1,60 @@ +[ + { + "description": "by int", + "schema": {"divisibleBy": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"divisibleBy": 1.5}, + "tests": [ + { + "description": "zero is divisible by anything (except 0)", + "data": 0, + "valid": true + }, + { + "description": "4.5 is divisible by 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not divisible by 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"divisibleBy": 0.0001}, + "tests": [ + { + "description": "0.0075 is divisible by 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not divisible by 0.0001", + "data": 0.00751, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/enum.json b/vendor/jsonschema/json/tests/draft3/enum.json new file mode 100644 index 00000000..5a1ab3b6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/enum.json @@ -0,0 +1,118 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum-with-null validation", + "schema": { "enum": [6, null] }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is valid", + "data": 6, + "valid": true + }, + { + "description": "something else is invalid", + "data": "test", + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"], "required":true} + } + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "wrong foo value", + "data": {"foo":"foot", "bar":"bar"}, + "valid": false + }, + { + "description": "wrong bar value", + "data": {"foo":"foo", "bar":"bart"}, + "valid": false + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "enum": [ "hello\u0000there" ] }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/extends.json b/vendor/jsonschema/json/tests/draft3/extends.json new file mode 100644 index 00000000..909bce57 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/extends.json @@ -0,0 +1,94 @@ +[ + { + "description": "extends", + "schema": { + "properties": {"bar": {"type": "integer", "required": true}}, + "extends": { + "properties": { + "foo": {"type": "string", "required": true} + } + } + }, + "tests": [ + { + "description": "extends", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch extends", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch extended", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "multiple extends", + "schema": { + "properties": {"bar": {"type": "integer", "required": true}}, + "extends" : [ + { + "properties": { + "foo": {"type": "string", "required": true} + } + }, + { + "properties": { + "baz": {"type": "null", "required": true} + } + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch first extends", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second extends", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "extends simple types", + "schema": { + "minimum": 20, + "extends": {"maximum": 30} + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch extends", + "data": 35, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/format.json b/vendor/jsonschema/json/tests/draft3/format.json new file mode 100644 index 00000000..a5447c90 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/format.json @@ -0,0 +1,362 @@ +[ + { + "description": "email format", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ip-address format", + "schema": { "format": "ip-address" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv6 format", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "host-name format", + "schema": { "format": "host-name" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date-time format", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "regex format", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date format", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "time format", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "color format", + "schema": { "format": "color" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri format", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/infinite-loop-detection.json b/vendor/jsonschema/json/tests/draft3/infinite-loop-detection.json new file mode 100644 index 00000000..090f49a0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/infinite-loop-detection.json @@ -0,0 +1,32 @@ +[ + { + "description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop", + "schema": { + "definitions": { + "int": { "type": "integer" } + }, + "properties": { + "foo": { + "$ref": "#/definitions/int" + } + }, + "extends": { + "additionalProperties": { + "$ref": "#/definitions/int" + } + } + }, + "tests": [ + { + "description": "passing case", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "failing case", + "data": { "foo": "a string" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/items.json b/vendor/jsonschema/json/tests/draft3/items.json new file mode 100644 index 00000000..e8bda222 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/items.json @@ -0,0 +1,78 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "an array of schemas for items", + "schema": { + "items": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + } + ] + }, + { + "description": "items with null instance elements", + "schema": { + "items": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + }, + { + "description": "array-form items with null instance elements", + "schema": { + "items": [ + { + "type": "null" + } + ] + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/maxItems.json b/vendor/jsonschema/json/tests/draft3/maxItems.json new file mode 100644 index 00000000..3b53a6b3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/maxItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/maxLength.json b/vendor/jsonschema/json/tests/draft3/maxLength.json new file mode 100644 index 00000000..4de42bca --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/maxLength.json @@ -0,0 +1,33 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 10, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/maximum.json b/vendor/jsonschema/json/tests/draft3/maximum.json new file mode 100644 index 00000000..ccb79c6c --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/maximum.json @@ -0,0 +1,99 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "maximum validation with unsigned integer", + "schema": {"maximum": 300}, + "tests": [ + { + "description": "below the maximum is invalid", + "data": 299.97, + "valid": true + }, + { + "description": "boundary point integer is valid", + "data": 300, + "valid": true + }, + { + "description": "boundary point float is valid", + "data": 300.00, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 300.5, + "valid": false + } + ] + }, + { + "description": "maximum validation (explicit false exclusivity)", + "schema": {"maximum": 3.0, "exclusiveMaximum": false}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMaximum validation", + "schema": { + "maximum": 3.0, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "below the maximum is still valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/minItems.json b/vendor/jsonschema/json/tests/draft3/minItems.json new file mode 100644 index 00000000..ed511881 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/minItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/minLength.json b/vendor/jsonschema/json/tests/draft3/minLength.json new file mode 100644 index 00000000..3f09158d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/minLength.json @@ -0,0 +1,33 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/minimum.json b/vendor/jsonschema/json/tests/draft3/minimum.json new file mode 100644 index 00000000..d579536e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/minimum.json @@ -0,0 +1,88 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMinimum validation", + "schema": { + "minimum": 1.1, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "above the minimum is still valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + } + ] + }, + { + "description": "minimum validation with signed integer", + "schema": {"minimum": -2}, + "tests": [ + { + "description": "negative above the minimum is valid", + "data": -1, + "valid": true + }, + { + "description": "positive above the minimum is valid", + "data": 0, + "valid": true + }, + { + "description": "boundary point is valid", + "data": -2, + "valid": true + }, + { + "description": "boundary point with float is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float below the minimum is invalid", + "data": -2.0001, + "valid": false + }, + { + "description": "int below the minimum is invalid", + "data": -3, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/bignum.json b/vendor/jsonschema/json/tests/draft3/optional/bignum.json new file mode 100644 index 00000000..1bc8eb21 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/bignum.json @@ -0,0 +1,95 @@ +[ + { + "description": "integer", + "schema": { "type": "integer" }, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + }, + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": { "type": "number" }, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + }, + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": { "type": "string" }, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "maximum integer comparison", + "schema": { "maximum": 18446744073709551615 }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "maximum": 972783798187987123879878123.18878137, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "minimum integer comparison", + "schema": { "minimum": -18446744073709551615 }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "minimum": -972783798187987123879878123.18878137, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/ecmascript-regex.json b/vendor/jsonschema/json/tests/draft3/optional/ecmascript-regex.json new file mode 100644 index 00000000..03fe9772 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/ecmascript-regex.json @@ -0,0 +1,18 @@ +[ + { + "description": "ECMA 262 regex dialect recognition", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "[^] is a valid regex", + "data": "[^]", + "valid": true + }, + { + "description": "ECMA 262 has no support for lookbehind", + "data": "(?<=foo)bar", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/color.json b/vendor/jsonschema/json/tests/draft3/optional/format/color.json new file mode 100644 index 00000000..0c0b5348 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/color.json @@ -0,0 +1,38 @@ +[ + { + "description": "validation of CSS colors", + "schema": { "format": "color" }, + "tests": [ + { + "description": "a valid CSS color name", + "data": "fuchsia", + "valid": true + }, + { + "description": "a valid six-digit CSS color code", + "data": "#CC8899", + "valid": true + }, + { + "description": "a valid three-digit CSS color code", + "data": "#C89", + "valid": true + }, + { + "description": "an invalid CSS color code", + "data": "#00332520", + "valid": false + }, + { + "description": "an invalid CSS color name", + "data": "puce", + "valid": false + }, + { + "description": "a CSS color name containing invalid characters", + "data": "light_grayish_red-violet", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/date-time.json b/vendor/jsonschema/json/tests/draft3/optional/format/date-time.json new file mode 100644 index 00000000..1f1e6fb3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/date-time.json @@ -0,0 +1,38 @@ +[ + { + "description": "validation of date-time strings", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "case-insensitive T and Z", + "data": "1963-06-19t08:30:06.283185z", + "valid": true + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + }, + { + "description": "invalid non-padded month dates", + "data": "1963-6-19T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-padded day dates", + "data": "1963-06-1T08:30:06.283185Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/date.json b/vendor/jsonschema/json/tests/draft3/optional/format/date.json new file mode 100644 index 00000000..796bc463 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/date.json @@ -0,0 +1,168 @@ +[ + { + "description": "validation of date strings", + "schema": { "format": "date" }, + "tests": [ + { + "description": "a valid date string", + "data": "1963-06-19", + "valid": true + }, + { + "description": "a valid date string with 31 days in January", + "data": "2020-01-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in January", + "data": "2020-01-32", + "valid": false + }, + { + "description": "a valid date string with 28 days in February (normal)", + "data": "2021-02-28", + "valid": true + }, + { + "description": "a invalid date string with 29 days in February (normal)", + "data": "2021-02-29", + "valid": false + }, + { + "description": "a valid date string with 29 days in February (leap)", + "data": "2020-02-29", + "valid": true + }, + { + "description": "a invalid date string with 30 days in February (leap)", + "data": "2020-02-30", + "valid": false + }, + { + "description": "a valid date string with 31 days in March", + "data": "2020-03-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in March", + "data": "2020-03-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in April", + "data": "2020-04-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in April", + "data": "2020-04-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in May", + "data": "2020-05-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in May", + "data": "2020-05-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in June", + "data": "2020-06-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in June", + "data": "2020-06-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in July", + "data": "2020-07-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in July", + "data": "2020-07-32", + "valid": false + }, + { + "description": "a valid date string with 31 days in August", + "data": "2020-08-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in August", + "data": "2020-08-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in September", + "data": "2020-09-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in September", + "data": "2020-09-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in October", + "data": "2020-10-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in October", + "data": "2020-10-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in November", + "data": "2020-11-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in November", + "data": "2020-11-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in December", + "data": "2020-12-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in December", + "data": "2020-12-32", + "valid": false + }, + { + "description": "a invalid date string with invalid month", + "data": "2020-13-01", + "valid": false + }, + { + "description": "an invalid date string", + "data": "06/19/1963", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350", + "valid": false + }, + { + "description": "invalidates non-padded month dates", + "data": "1998-1-20", + "valid": false + }, + { + "description": "invalidates non-padded day dates", + "data": "1998-01-1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/email.json b/vendor/jsonschema/json/tests/draft3/optional/format/email.json new file mode 100644 index 00000000..059615ad --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/email.json @@ -0,0 +1,53 @@ +[ + { + "description": "validation of e-mail addresses", + "schema": { "format": "email" }, + "tests": [ + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "tilde in local part is valid", + "data": "te~st@example.com", + "valid": true + }, + { + "description": "tilde before local part is valid", + "data": "~test@example.com", + "valid": true + }, + { + "description": "tilde after local part is valid", + "data": "test~@example.com", + "valid": true + }, + { + "description": "dot before local part is not valid", + "data": ".test@example.com", + "valid": false + }, + { + "description": "dot after local part is not valid", + "data": "test.@example.com", + "valid": false + }, + { + "description": "two separated dots inside local part are valid", + "data": "te.s.t@example.com", + "valid": true + }, + { + "description": "two subsequent dots inside local part are not valid", + "data": "te..st@example.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/host-name.json b/vendor/jsonschema/json/tests/draft3/optional/format/host-name.json new file mode 100644 index 00000000..d418f376 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/host-name.json @@ -0,0 +1,63 @@ +[ + { + "description": "validation of host names", + "schema": { "format": "host-name" }, + "tests": [ + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + }, + { + "description": "starts with hyphen", + "data": "-hostname", + "valid": false + }, + { + "description": "ends with hyphen", + "data": "hostname-", + "valid": false + }, + { + "description": "starts with underscore", + "data": "_hostname", + "valid": false + }, + { + "description": "ends with underscore", + "data": "hostname_", + "valid": false + }, + { + "description": "contains underscore", + "data": "host_name", + "valid": false + }, + { + "description": "maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com", + "valid": true + }, + { + "description": "exceeds maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkl.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/ip-address.json b/vendor/jsonschema/json/tests/draft3/optional/format/ip-address.json new file mode 100644 index 00000000..91cac9fa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/ip-address.json @@ -0,0 +1,23 @@ +[ + { + "description": "validation of IP addresses", + "schema": { "format": "ip-address" }, + "tests": [ + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/ipv6.json b/vendor/jsonschema/json/tests/draft3/optional/format/ipv6.json new file mode 100644 index 00000000..c3ef3790 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/ipv6.json @@ -0,0 +1,68 @@ +[ + { + "description": "validation of IPv6 addresses", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + }, + { + "description": "no digits is valid", + "data": "::", + "valid": true + }, + { + "description": "leading colons is valid", + "data": "::1", + "valid": true + }, + { + "description": "trailing colons is valid", + "data": "d6::", + "valid": true + }, + { + "description": "two sets of double colons is invalid", + "data": "1::d6::42", + "valid": false + }, + { + "description": "mixed format with the ipv4 section as decimal octets", + "data": "1::d6:192.168.0.1", + "valid": true + }, + { + "description": "mixed format with double colons between the sections", + "data": "1:2::192.168.0.1", + "valid": true + }, + { + "description": "mixed format with ipv4 section with octet out of range", + "data": "1::2:192.168.256.1", + "valid": false + }, + { + "description": "mixed format with ipv4 section with a hex octet", + "data": "1::2:192.168.ff.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/regex.json b/vendor/jsonschema/json/tests/draft3/optional/format/regex.json new file mode 100644 index 00000000..8a377638 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/regex.json @@ -0,0 +1,18 @@ +[ + { + "description": "validation of regular expressions", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "a valid regular expression", + "data": "([abc])+\\s+$", + "valid": true + }, + { + "description": "a regular expression with unclosed parens is invalid", + "data": "^(abc]", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/time.json b/vendor/jsonschema/json/tests/draft3/optional/format/time.json new file mode 100644 index 00000000..36c823e6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/time.json @@ -0,0 +1,18 @@ +[ + { + "description": "validation of time strings", + "schema": { "format": "time" }, + "tests": [ + { + "description": "a valid time string", + "data": "08:30:06", + "valid": true + }, + { + "description": "an invalid time string", + "data": "8:30 AM", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/format/uri.json b/vendor/jsonschema/json/tests/draft3/optional/format/uri.json new file mode 100644 index 00000000..f024b624 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/format/uri.json @@ -0,0 +1,28 @@ +[ + { + "description": "validation of URIs", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "an invalid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": false + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/non-bmp-regex.json b/vendor/jsonschema/json/tests/draft3/optional/non-bmp-regex.json new file mode 100644 index 00000000..dd67af2b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/non-bmp-regex.json @@ -0,0 +1,82 @@ +[ + { + "description": "Proper UTF-16 surrogate pair handling: pattern", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { "pattern": "^ðŸ²*$" }, + "tests": [ + { + "description": "matches empty", + "data": "", + "valid": true + }, + { + "description": "matches single", + "data": "ðŸ²", + "valid": true + }, + { + "description": "matches two", + "data": "ðŸ²ðŸ²", + "valid": true + }, + { + "description": "doesn't match one", + "data": "ðŸ‰", + "valid": false + }, + { + "description": "doesn't match two", + "data": "ðŸ‰ðŸ‰", + "valid": false + }, + { + "description": "doesn't match one ASCII", + "data": "D", + "valid": false + }, + { + "description": "doesn't match two ASCII", + "data": "DD", + "valid": false + } + ] + }, + { + "description": "Proper UTF-16 surrogate pair handling: patternProperties", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { + "patternProperties": { + "^ðŸ²*$": { + "type": "integer" + } + } + }, + "tests": [ + { + "description": "matches empty", + "data": { "": 1 }, + "valid": true + }, + { + "description": "matches single", + "data": { "ðŸ²": 1 }, + "valid": true + }, + { + "description": "matches two", + "data": { "ðŸ²ðŸ²": 1 }, + "valid": true + }, + { + "description": "doesn't match one", + "data": { "ðŸ²": "hello" }, + "valid": false + }, + { + "description": "doesn't match two", + "data": { "ðŸ²ðŸ²": "hello" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/optional/zeroTerminatedFloats.json b/vendor/jsonschema/json/tests/draft3/optional/zeroTerminatedFloats.json new file mode 100644 index 00000000..9b50ea27 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/optional/zeroTerminatedFloats.json @@ -0,0 +1,15 @@ +[ + { + "description": "some languages do not distinguish between different types of numeric value", + "schema": { + "type": "integer" + }, + "tests": [ + { + "description": "a float is not an integer even without fractional part", + "data": 1.0, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/pattern.json b/vendor/jsonschema/json/tests/draft3/pattern.json new file mode 100644 index 00000000..92db0f97 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/pattern.json @@ -0,0 +1,59 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/patternProperties.json b/vendor/jsonschema/json/tests/draft3/patternProperties.json new file mode 100644 index 00000000..b0f2a8e4 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/patternProperties.json @@ -0,0 +1,130 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + }, + { + "description": "patternProperties with null valued instance properties", + "schema": { + "patternProperties": { + "^.*bar$": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foobar": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/properties.json b/vendor/jsonschema/json/tests/draft3/properties.json new file mode 100644 index 00000000..cd238011 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/properties.json @@ -0,0 +1,112 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + }, + { + "description": "properties with null valued instance properties", + "schema": { + "properties": { + "foo": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/ref.json b/vendor/jsonschema/json/tests/draft3/ref.json new file mode 100644 index 00000000..924db767 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/ref.json @@ -0,0 +1,278 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "items": [ + {"type": "integer"}, + {"$ref": "#/items/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "definitions": { + "tilde~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"} + }, + "properties": { + "tilde": {"$ref": "#/definitions/tilde~0field"}, + "slash": {"$ref": "#/definitions/slash~1field"}, + "percent": {"$ref": "#/definitions/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilde invalid", + "data": {"tilde": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilde valid", + "data": {"tilde": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "definitions": { + "a": {"type": "integer"}, + "b": {"$ref": "#/definitions/a"}, + "c": {"$ref": "#/definitions/b"} + }, + "$ref": "#/definitions/c" + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref overrides any sibling keywords", + "schema": { + "definitions": { + "reffed": { + "type": "array" + } + }, + "properties": { + "foo": { + "$ref": "#/definitions/reffed", + "maxItems": 2 + } + } + }, + "tests": [ + { + "description": "remote ref valid", + "data": { "foo": [] }, + "valid": true + }, + { + "description": "remote ref valid, maxItems ignored", + "data": { "foo": [ 1, 2, 3] }, + "valid": true + }, + { + "description": "ref invalid", + "data": { "foo": "string" }, + "valid": false + } + ] + }, + { + "description": "property named $ref, containing an actual $ref", + "schema": { + "properties": { + "$ref": {"$ref": "#/definitions/is-string"} + }, + "definitions": { + "is-string": { + "type": "string" + } + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "$ref prevents a sibling id from changing the base uri", + "schema": { + "id": "http://localhost:1234/sibling_id/base/", + "definitions": { + "foo": { + "id": "http://localhost:1234/sibling_id/foo.json", + "type": "string" + }, + "base_foo": { + "$comment": "this canonical uri is http://localhost:1234/sibling_id/base/foo.json", + "id": "foo.json", + "type": "number" + } + }, + "allOf": [ + { + "$comment": "$ref resolves to http://localhost:1234/sibling_id/base/foo.json, not http://localhost:1234/sibling_id/foo.json", + "id": "http://localhost:1234/sibling_id/", + "$ref": "foo.json" + } + ] + }, + "tests": [ + { + "description": "$ref resolves to /definitions/base_foo, data does not validate", + "data": "a", + "valid": false + }, + { + "description": "$ref resolves to /definitions/base_foo, data validates", + "data": 1, + "valid": true + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": {"$ref": "http://json-schema.org/draft-03/schema#"}, + "tests": [ + { + "description": "remote ref valid", + "data": {"items": {"type": "integer"}}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"items": {"type": 1}}, + "valid": false + } + ] + }, + { + "description": "naive replacement of $ref with its destination is not correct", + "schema": { + "definitions": { + "a_string": { "type": "string" } + }, + "enum": [ + { "$ref": "#/definitions/a_string" } + ] + }, + "tests": [ + { + "description": "do not evaluate the $ref inside the enum, matching any string", + "data": "this is a string", + "valid": false + }, + { + "description": "match the enum exactly", + "data": { "$ref": "#/definitions/a_string" }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/refRemote.json b/vendor/jsonschema/json/tests/draft3/refRemote.json new file mode 100644 index 00000000..de0cb43a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/refRemote.json @@ -0,0 +1,74 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "change resolution scope", + "schema": { + "id": "http://localhost:1234/", + "items": { + "id": "baseUriChange/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "changed scope ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "changed scope ref invalid", + "data": [["a"]], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/required.json b/vendor/jsonschema/json/tests/draft3/required.json new file mode 100644 index 00000000..aaaf0242 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/required.json @@ -0,0 +1,53 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {"required" : true}, + "bar": {} + } + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required explicitly false validation", + "schema": { + "properties": { + "foo": {"required": false} + } + }, + "tests": [ + { + "description": "not required if required is false", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/type.json b/vendor/jsonschema/json/tests/draft3/type.json new file mode 100644 index 00000000..8447bc8e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/type.json @@ -0,0 +1,493 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not an integer, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is a number", + "data": 1.0, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not a number, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "a string is still a string, even if it looks like a number", + "data": "1", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "a boolean is a boolean", + "data": true, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "a boolean is not null", + "data": true, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "any type matches any type", + "schema": {"type": "any"}, + "tests": [ + { + "description": "any type includes integers", + "data": 1, + "valid": true + }, + { + "description": "any type includes float", + "data": 1.1, + "valid": true + }, + { + "description": "any type includes string", + "data": "foo", + "valid": true + }, + { + "description": "any type includes object", + "data": {}, + "valid": true + }, + { + "description": "any type includes array", + "data": [], + "valid": true + }, + { + "description": "any type includes boolean", + "data": true, + "valid": true + }, + { + "description": "any type includes null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "types can include schemas", + "schema": { + "type": [ + "array", + {"type": "object"} + ] + }, + "tests": [ + { + "description": "an integer is invalid", + "data": 1, + "valid": false + }, + { + "description": "a string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is valid", + "data": {}, + "valid": true + }, + { + "description": "an array is valid", + "data": [], + "valid": true + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "applies a nested schema", + "schema": { + "type": [ + "integer", + { + "properties": { + "foo": {"type": "null"} + } + } + ] + }, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "an object is valid only if it is fully valid", + "data": {"foo": null}, + "valid": true + }, + { + "description": "an object is invalid otherwise", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "types from separate schemas are merged", + "schema": { + "type": [ + {"type": ["string"]}, + {"type": ["array", "null"]} + ] + }, + "tests": [ + { + "description": "an integer is invalid", + "data": 1, + "valid": false + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "an array is valid", + "data": [1, 2, 3], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft3/uniqueItems.json b/vendor/jsonschema/json/tests/draft3/uniqueItems.json new file mode 100644 index 00000000..c48c6a06 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft3/uniqueItems.json @@ -0,0 +1,374 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "non-unique array of more than two integers is invalid", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "unique array of strings is valid", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "non-unique array of strings is invalid", + "data": ["foo", "bar", "foo"], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "non-unique array of more than two arrays is invalid", + "data": [["foo"], ["bar"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "[1] and [true] are unique", + "data": [[1], [true]], + "valid": true + }, + { + "description": "[0] and [false] are unique", + "data": [[0], [false]], + "valid": true + }, + { + "description": "nested [1] and [true] are unique", + "data": [[[1], "foo"], [[true], "foo"]], + "valid": true + }, + { + "description": "nested [0] and [false] are unique", + "data": [[[0], "foo"], [[false], "foo"]], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + }, + { + "description": "{\"a\": false} and {\"a\": 0} are unique", + "data": [{"a": false}, {"a": 0}], + "valid": true + }, + { + "description": "{\"a\": true} and {\"a\": 1} are unique", + "data": [{"a": true}, {"a": 1}], + "valid": true + } + ] + }, + { + "description": "uniqueItems with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is not valid", + "data": [false, true, "foo", "foo"], + "valid": false + }, + { + "description": "non-unique array extended from [true, false] is not valid", + "data": [true, false, "foo", "foo"], + "valid": false + } + ] + }, + { + "description": "uniqueItems with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + }, + { + "description": "uniqueItems=false validation", + "schema": { "uniqueItems": false }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is valid", + "data": [1, 1], + "valid": true + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": true + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": true + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": true + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is valid", + "data": [["foo"], ["foo"]], + "valid": true + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are valid", + "data": [{}, [1], true, null, {}, 1], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is valid", + "data": [false, true, "foo", "foo"], + "valid": true + }, + { + "description": "non-unique array extended from [true, false] is valid", + "data": [true, false, "foo", "foo"], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/additionalItems.json b/vendor/jsonschema/json/tests/draft4/additionalItems.json new file mode 100644 index 00000000..deb44fd3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/additionalItems.json @@ -0,0 +1,164 @@ +[ + { + "description": "additionalItems as schema", + "schema": { + "items": [{}], + "additionalItems": {"type": "integer"} + }, + "tests": [ + { + "description": "additional items match schema", + "data": [ null, 2, 3, 4 ], + "valid": true + }, + { + "description": "additional items do not match schema", + "data": [ null, 2, 3, "foo" ], + "valid": false + } + ] + }, + { + "description": "when items is schema, additionalItems does nothing", + "schema": { + "items": {}, + "additionalItems": false + }, + "tests": [ + { + "description": "all items match schema", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + } + ] + }, + { + "description": "array of items with no additionalItems permitted", + "schema": { + "items": [{}, {}, {}], + "additionalItems": false + }, + "tests": [ + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "fewer number of items present (1)", + "data": [ 1 ], + "valid": true + }, + { + "description": "fewer number of items present (2)", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "equal number of items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "additionalItems as false without items", + "schema": {"additionalItems": false}, + "tests": [ + { + "description": + "items defaults to empty schema so everything is valid", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "additionalItems are allowed by default", + "schema": {"items": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators, valid case", + "schema": { + "allOf": [ + { "items": [ { "type": "integer" } ] } + ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in allOf are not examined", + "data": [ 1, null ], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators, invalid case", + "schema": { + "allOf": [ + { "items": [ { "type": "integer" }, { "type": "string" } ] } + ], + "items": [ {"type": "integer" } ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in allOf are not examined", + "data": [ 1, "hello" ], + "valid": false + } + ] + }, + { + "description": "items validation adjusts the starting index for additionalItems", + "schema": { + "items": [ { "type": "string" } ], + "additionalItems": { "type": "integer" } + }, + "tests": [ + { + "description": "valid items", + "data": [ "x", 2, 3 ], + "valid": true + }, + { + "description": "wrong type of second item", + "data": [ "x", "y" ], + "valid": false + } + ] + }, + { + "description": "additionalItems with null instance elements", + "schema": { + "additionalItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/additionalProperties.json b/vendor/jsonschema/json/tests/draft4/additionalProperties.json new file mode 100644 index 00000000..0f8e1627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/additionalProperties.json @@ -0,0 +1,147 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobarbaz", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": "non-ASCII pattern with additionalProperties", + "schema": { + "patternProperties": {"^á": {}}, + "additionalProperties": false + }, + "tests": [ + { + "description": "matching the pattern is valid", + "data": {"ármányos": 2}, + "valid": true + }, + { + "description": "not matching the pattern is invalid", + "data": {"élmény": 2}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with schema", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + }, + { + "description": "additionalProperties does not look in applicators", + "schema": { + "allOf": [ + {"properties": {"foo": {}}} + ], + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "properties defined in allOf are not examined", + "data": {"foo": 1, "bar": true}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with null valued instance properties", + "schema": { + "additionalProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/allOf.json b/vendor/jsonschema/json/tests/draft4/allOf.json new file mode 100644 index 00000000..fc7dec59 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/allOf.json @@ -0,0 +1,261 @@ +[ + { + "description": "allOf", + "schema": { + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + }, + { + "description": "allOf with one empty schema", + "schema": { + "allOf": [ + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with two empty schemas", + "schema": { + "allOf": [ + {}, + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with the first empty schema", + "schema": { + "allOf": [ + {}, + { "type": "number" } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with the last empty schema", + "schema": { + "allOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "nested allOf, to check validation semantics", + "schema": { + "allOf": [ + { + "allOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "allOf combined with anyOf, oneOf", + "schema": { + "allOf": [ { "multipleOf": 2 } ], + "anyOf": [ { "multipleOf": 3 } ], + "oneOf": [ { "multipleOf": 5 } ] + }, + "tests": [ + { + "description": "allOf: false, anyOf: false, oneOf: false", + "data": 1, + "valid": false + }, + { + "description": "allOf: false, anyOf: false, oneOf: true", + "data": 5, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: false", + "data": 3, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: true", + "data": 15, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: false", + "data": 2, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: true", + "data": 10, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: false", + "data": 6, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: true", + "data": 30, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/anyOf.json b/vendor/jsonschema/json/tests/draft4/anyOf.json new file mode 100644 index 00000000..09cc3c2f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/anyOf.json @@ -0,0 +1,156 @@ +[ + { + "description": "anyOf", + "schema": { + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf complex types", + "schema": { + "anyOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first anyOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second anyOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both anyOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "neither anyOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "anyOf with one empty schema", + "schema": { + "anyOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is valid", + "data": 123, + "valid": true + } + ] + }, + { + "description": "nested anyOf, to check validation semantics", + "schema": { + "anyOf": [ + { + "anyOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/default.json b/vendor/jsonschema/json/tests/draft4/default.json new file mode 100644 index 00000000..289a9b66 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/default.json @@ -0,0 +1,79 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "the default keyword does not do anything if the property is missing", + "schema": { + "type": "object", + "properties": { + "alpha": { + "type": "number", + "maximum": 3, + "default": 5 + } + } + }, + "tests": [ + { + "description": "an explicit property value is checked against maximum (passing)", + "data": { "alpha": 1 }, + "valid": true + }, + { + "description": "an explicit property value is checked against maximum (failing)", + "data": { "alpha": 5 }, + "valid": false + }, + { + "description": "missing properties are not filled in with the default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/definitions.json b/vendor/jsonschema/json/tests/draft4/definitions.json new file mode 100644 index 00000000..482823be --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/definitions.json @@ -0,0 +1,26 @@ +[ + { + "description": "validate definition against metaschema", + "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, + "tests": [ + { + "description": "valid definition schema", + "data": { + "definitions": { + "foo": {"type": "integer"} + } + }, + "valid": true + }, + { + "description": "invalid definition schema", + "data": { + "definitions": { + "foo": {"type": 1} + } + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/dependencies.json b/vendor/jsonschema/json/tests/draft4/dependencies.json new file mode 100644 index 00000000..51eeddf3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/dependencies.json @@ -0,0 +1,194 @@ +[ + { + "description": "dependencies", + "schema": { + "dependencies": {"bar": ["foo"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple dependencies", + "schema": { + "dependencies": {"quux": ["foo", "bar"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "multiple dependencies subschema", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\nbar": ["foo\rbar"], + "foo\tbar": { + "minProperties": 4 + }, + "foo'bar": {"required": ["foo\"bar"]}, + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "valid object 1", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "valid object 2", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "valid object 3", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "invalid object 1", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "invalid object 2", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "invalid object 3", + "data": { + "foo'bar": 1 + }, + "valid": false + }, + { + "description": "invalid object 4", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/enum.json b/vendor/jsonschema/json/tests/draft4/enum.json new file mode 100644 index 00000000..f085097b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/enum.json @@ -0,0 +1,236 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + }, + { + "description": "valid object matches", + "data": {"foo": 12}, + "valid": true + }, + { + "description": "extra properties in object is invalid", + "data": {"foo": 12, "boo": 42}, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum-with-null validation", + "schema": { "enum": [6, null] }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is valid", + "data": 6, + "valid": true + }, + { + "description": "something else is invalid", + "data": "test", + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "wrong foo value", + "data": {"foo":"foot", "bar":"bar"}, + "valid": false + }, + { + "description": "wrong bar value", + "data": {"foo":"foo", "bar":"bart"}, + "valid": false + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "enum with escaped characters", + "schema": { + "enum": ["foo\nbar", "foo\rbar"] + }, + "tests": [ + { + "description": "member 1 is valid", + "data": "foo\nbar", + "valid": true + }, + { + "description": "member 2 is valid", + "data": "foo\rbar", + "valid": true + }, + { + "description": "another string is invalid", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "enum with false does not match 0", + "schema": {"enum": [false]}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "enum with true does not match 1", + "schema": {"enum": [true]}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "enum with 0 does not match false", + "schema": {"enum": [0]}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + } + ] + }, + { + "description": "enum with 1 does not match true", + "schema": {"enum": [1]}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "enum": [ "hello\u0000there" ] }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/format.json b/vendor/jsonschema/json/tests/draft4/format.json new file mode 100644 index 00000000..5bd83cc6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/format.json @@ -0,0 +1,218 @@ +[ + { + "description": "email format", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv4 format", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv6 format", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "hostname format", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date-time format", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri format", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/id.json b/vendor/jsonschema/json/tests/draft4/id.json new file mode 100644 index 00000000..1c91d33e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/id.json @@ -0,0 +1,53 @@ +[ + { + "description": "id inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an id buried in the enum", + "schema": { + "definitions": { + "id_in_enum": { + "enum": [ + { + "id": "https://localhost:1234/my_identifier.json", + "type": "null" + } + ] + }, + "real_id_in_schema": { + "id": "https://localhost:1234/my_identifier.json", + "type": "string" + }, + "zzz_id_in_const": { + "const": { + "id": "https://localhost:1234/my_identifier.json", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/definitions/id_in_enum" }, + { "$ref": "https://localhost:1234/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "id": "https://localhost:1234/my_identifier.json", + "type": "null" + }, + "valid": true + }, + { + "description": "match $ref to id", + "data": "a string to match #/definitions/id_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to id", + "data": 1, + "valid": false + } + ] + } + +] diff --git a/vendor/jsonschema/json/tests/draft4/infinite-loop-detection.json b/vendor/jsonschema/json/tests/draft4/infinite-loop-detection.json new file mode 100644 index 00000000..f98c74fc --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/infinite-loop-detection.json @@ -0,0 +1,36 @@ +[ + { + "description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop", + "schema": { + "definitions": { + "int": { "type": "integer" } + }, + "allOf": [ + { + "properties": { + "foo": { + "$ref": "#/definitions/int" + } + } + }, + { + "additionalProperties": { + "$ref": "#/definitions/int" + } + } + ] + }, + "tests": [ + { + "description": "passing case", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "failing case", + "data": { "foo": "a string" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/items.json b/vendor/jsonschema/json/tests/draft4/items.json new file mode 100644 index 00000000..16ea0704 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/items.json @@ -0,0 +1,227 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "length": 1 + }, + "valid": true + } + ] + }, + { + "description": "an array of schemas for items", + "schema": { + "items": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + }, + { + "description": "incomplete array of items", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with additional items", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "1": "valid", + "length": 2 + }, + "valid": true + } + ] + }, + { + "description": "items and subitems", + "schema": { + "definitions": { + "item": { + "type": "array", + "additionalItems": false, + "items": [ + { "$ref": "#/definitions/sub-item" }, + { "$ref": "#/definitions/sub-item" } + ] + }, + "sub-item": { + "type": "object", + "required": ["foo"] + } + }, + "type": "array", + "additionalItems": false, + "items": [ + { "$ref": "#/definitions/item" }, + { "$ref": "#/definitions/item" }, + { "$ref": "#/definitions/item" } + ] + }, + "tests": [ + { + "description": "valid items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": true + }, + { + "description": "too many items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "too many sub-items", + "data": [ + [ {"foo": null}, {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong item", + "data": [ + {"foo": null}, + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong sub-item", + "data": [ + [ {}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "fewer items is valid", + "data": [ + [ {"foo": null} ], + [ {"foo": null} ] + ], + "valid": true + } + ] + }, + { + "description": "nested items", + "schema": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + }, + "tests": [ + { + "description": "valid nested array", + "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": true + }, + { + "description": "nested array with invalid type", + "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": false + }, + { + "description": "not deep enough", + "data": [[[1], [2],[3]], [[4], [5], [6]]], + "valid": false + } + ] + }, + { + "description": "items with null instance elements", + "schema": { + "items": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + }, + { + "description": "array-form items with null instance elements", + "schema": { + "items": [ + { + "type": "null" + } + ] + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/maxItems.json b/vendor/jsonschema/json/tests/draft4/maxItems.json new file mode 100644 index 00000000..3b53a6b3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/maxItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/maxLength.json b/vendor/jsonschema/json/tests/draft4/maxLength.json new file mode 100644 index 00000000..811d35b2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/maxLength.json @@ -0,0 +1,33 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/maxProperties.json b/vendor/jsonschema/json/tests/draft4/maxProperties.json new file mode 100644 index 00000000..aa7209f5 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/maxProperties.json @@ -0,0 +1,54 @@ +[ + { + "description": "maxProperties validation", + "schema": {"maxProperties": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "maxProperties = 0 means the object is empty", + "schema": { "maxProperties": 0 }, + "tests": [ + { + "description": "no properties is valid", + "data": {}, + "valid": true + }, + { + "description": "one property is invalid", + "data": { "foo": 1 }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/maximum.json b/vendor/jsonschema/json/tests/draft4/maximum.json new file mode 100644 index 00000000..ccb79c6c --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/maximum.json @@ -0,0 +1,99 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "maximum validation with unsigned integer", + "schema": {"maximum": 300}, + "tests": [ + { + "description": "below the maximum is invalid", + "data": 299.97, + "valid": true + }, + { + "description": "boundary point integer is valid", + "data": 300, + "valid": true + }, + { + "description": "boundary point float is valid", + "data": 300.00, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 300.5, + "valid": false + } + ] + }, + { + "description": "maximum validation (explicit false exclusivity)", + "schema": {"maximum": 3.0, "exclusiveMaximum": false}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMaximum validation", + "schema": { + "maximum": 3.0, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "below the maximum is still valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/minItems.json b/vendor/jsonschema/json/tests/draft4/minItems.json new file mode 100644 index 00000000..ed511881 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/minItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/minLength.json b/vendor/jsonschema/json/tests/draft4/minLength.json new file mode 100644 index 00000000..3f09158d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/minLength.json @@ -0,0 +1,33 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/minProperties.json b/vendor/jsonschema/json/tests/draft4/minProperties.json new file mode 100644 index 00000000..49a0726e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/minProperties.json @@ -0,0 +1,38 @@ +[ + { + "description": "minProperties validation", + "schema": {"minProperties": 1}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/minimum.json b/vendor/jsonschema/json/tests/draft4/minimum.json new file mode 100644 index 00000000..22d310e1 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/minimum.json @@ -0,0 +1,114 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "minimum validation (explicit false exclusivity)", + "schema": {"minimum": 1.1, "exclusiveMinimum": false}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMinimum validation", + "schema": { + "minimum": 1.1, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "above the minimum is still valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + } + ] + }, + { + "description": "minimum validation with signed integer", + "schema": {"minimum": -2}, + "tests": [ + { + "description": "negative above the minimum is valid", + "data": -1, + "valid": true + }, + { + "description": "positive above the minimum is valid", + "data": 0, + "valid": true + }, + { + "description": "boundary point is valid", + "data": -2, + "valid": true + }, + { + "description": "boundary point with float is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float below the minimum is invalid", + "data": -2.0001, + "valid": false + }, + { + "description": "int below the minimum is invalid", + "data": -3, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/multipleOf.json b/vendor/jsonschema/json/tests/draft4/multipleOf.json new file mode 100644 index 00000000..9abeb975 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/multipleOf.json @@ -0,0 +1,71 @@ +[ + { + "description": "by int", + "schema": {"multipleOf": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"multipleOf": 1.5}, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"multipleOf": 0.0001}, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + }, + { + "description": "float division = inf", + "schema": {"type": "integer", "multipleOf": 0.123456789}, + "tests": [ + { + "description": "invalid, but naive implementations may raise an overflow error", + "data": 1e308, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/not.json b/vendor/jsonschema/json/tests/draft4/not.json new file mode 100644 index 00000000..cbb7f46b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/not.json @@ -0,0 +1,96 @@ +[ + { + "description": "not", + "schema": { + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + } + +] diff --git a/vendor/jsonschema/json/tests/draft4/oneOf.json b/vendor/jsonschema/json/tests/draft4/oneOf.json new file mode 100644 index 00000000..fb63b089 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/oneOf.json @@ -0,0 +1,230 @@ +[ + { + "description": "oneOf", + "schema": { + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf complex types", + "schema": { + "oneOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second oneOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both oneOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": false + }, + { + "description": "neither oneOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "oneOf with empty schema", + "schema": { + "oneOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "one valid - valid", + "data": "foo", + "valid": true + }, + { + "description": "both valid - invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "oneOf with required", + "schema": { + "type": "object", + "oneOf": [ + { "required": ["foo", "bar"] }, + { "required": ["foo", "baz"] } + ] + }, + "tests": [ + { + "description": "both invalid - invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "first valid - valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "second valid - valid", + "data": {"foo": 1, "baz": 3}, + "valid": true + }, + { + "description": "both valid - invalid", + "data": {"foo": 1, "bar": 2, "baz" : 3}, + "valid": false + } + ] + }, + { + "description": "oneOf with missing optional property", + "schema": { + "oneOf": [ + { + "properties": { + "bar": {}, + "baz": {} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": {"bar": 8}, + "valid": true + }, + { + "description": "second oneOf valid", + "data": {"foo": "foo"}, + "valid": true + }, + { + "description": "both oneOf valid", + "data": {"foo": "foo", "bar": 8}, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": {"baz": "quux"}, + "valid": false + } + ] + }, + { + "description": "nested oneOf, to check validation semantics", + "schema": { + "oneOf": [ + { + "oneOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/bignum.json b/vendor/jsonschema/json/tests/draft4/optional/bignum.json new file mode 100644 index 00000000..1bc8eb21 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/bignum.json @@ -0,0 +1,95 @@ +[ + { + "description": "integer", + "schema": { "type": "integer" }, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + }, + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": { "type": "number" }, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + }, + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": { "type": "string" }, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "maximum integer comparison", + "schema": { "maximum": 18446744073709551615 }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "maximum": 972783798187987123879878123.18878137, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "minimum integer comparison", + "schema": { "minimum": -18446744073709551615 }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "minimum": -972783798187987123879878123.18878137, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/ecmascript-regex.json b/vendor/jsonschema/json/tests/draft4/optional/ecmascript-regex.json new file mode 100644 index 00000000..c431baca --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/ecmascript-regex.json @@ -0,0 +1,552 @@ +[ + { + "description": "ECMA 262 regex $ does not match trailing newline", + "schema": { + "type": "string", + "pattern": "^abc$" + }, + "tests": [ + { + "description": "matches in Python, but not in ECMA 262", + "data": "abc\\n", + "valid": false + }, + { + "description": "matches", + "data": "abc", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex converts \\t to horizontal tab", + "schema": { + "type": "string", + "pattern": "^\\t$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\t", + "valid": false + }, + { + "description": "matches", + "data": "\u0009", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and upper letter", + "schema": { + "type": "string", + "pattern": "^\\cC$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cC", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and lower letter", + "schema": { + "type": "string", + "pattern": "^\\cc$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cc", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\d matches ascii digits only", + "schema": { + "type": "string", + "pattern": "^\\d$" + }, + "tests": [ + { + "description": "ASCII zero matches", + "data": "0", + "valid": true + }, + { + "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)", + "data": "߀", + "valid": false + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) does not match", + "data": "\u07c0", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\D matches everything but ascii digits", + "schema": { + "type": "string", + "pattern": "^\\D$" + }, + "tests": [ + { + "description": "ASCII zero does not match", + "data": "0", + "valid": false + }, + { + "description": "NKO DIGIT ZERO matches (unlike e.g. Python)", + "data": "߀", + "valid": true + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) matches", + "data": "\u07c0", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\w matches ascii letters only", + "schema": { + "type": "string", + "pattern": "^\\w$" + }, + "tests": [ + { + "description": "ASCII 'a' matches", + "data": "a", + "valid": true + }, + { + "description": "latin-1 e-acute does not match (unlike e.g. Python)", + "data": "é", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\W matches everything but ascii letters", + "schema": { + "type": "string", + "pattern": "^\\W$" + }, + "tests": [ + { + "description": "ASCII 'a' does not match", + "data": "a", + "valid": false + }, + { + "description": "latin-1 e-acute matches (unlike e.g. Python)", + "data": "é", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\s matches whitespace", + "schema": { + "type": "string", + "pattern": "^\\s$" + }, + "tests": [ + { + "description": "ASCII space matches", + "data": " ", + "valid": true + }, + { + "description": "Character tabulation matches", + "data": "\t", + "valid": true + }, + { + "description": "Line tabulation matches", + "data": "\u000b", + "valid": true + }, + { + "description": "Form feed matches", + "data": "\u000c", + "valid": true + }, + { + "description": "latin-1 non-breaking-space matches", + "data": "\u00a0", + "valid": true + }, + { + "description": "zero-width whitespace matches", + "data": "\ufeff", + "valid": true + }, + { + "description": "line feed matches (line terminator)", + "data": "\u000a", + "valid": true + }, + { + "description": "paragraph separator matches (line terminator)", + "data": "\u2029", + "valid": true + }, + { + "description": "EM SPACE matches (Space_Separator)", + "data": "\u2003", + "valid": true + }, + { + "description": "Non-whitespace control does not match", + "data": "\u0001", + "valid": false + }, + { + "description": "Non-whitespace does not match", + "data": "\u2013", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\S matches everything but whitespace", + "schema": { + "type": "string", + "pattern": "^\\S$" + }, + "tests": [ + { + "description": "ASCII space does not match", + "data": " ", + "valid": false + }, + { + "description": "Character tabulation does not match", + "data": "\t", + "valid": false + }, + { + "description": "Line tabulation does not match", + "data": "\u000b", + "valid": false + }, + { + "description": "Form feed does not match", + "data": "\u000c", + "valid": false + }, + { + "description": "latin-1 non-breaking-space does not match", + "data": "\u00a0", + "valid": false + }, + { + "description": "zero-width whitespace does not match", + "data": "\ufeff", + "valid": false + }, + { + "description": "line feed does not match (line terminator)", + "data": "\u000a", + "valid": false + }, + { + "description": "paragraph separator does not match (line terminator)", + "data": "\u2029", + "valid": false + }, + { + "description": "EM SPACE does not match (Space_Separator)", + "data": "\u2003", + "valid": false + }, + { + "description": "Non-whitespace control matches", + "data": "\u0001", + "valid": true + }, + { + "description": "Non-whitespace matches", + "data": "\u2013", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with pattern", + "schema": { "pattern": "\\p{Letter}cole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "\\w in patterns matches [A-Za-z0-9_], not unicode letters", + "schema": { "pattern": "\\wcole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "pattern with ASCII ranges", + "schema": { "pattern": "[a-z]cole" }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "ascii characters match", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + } + ] + }, + { + "description": "\\d in pattern matches [0-9], not unicode digits", + "schema": { "pattern": "^\\d+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": false + } + ] + }, + { + "description": "pattern with non-ASCII digits", + "schema": { "pattern": "^\\p{digit}+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "\\p{Letter}cole": {} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "\\w in patternProperties matches [A-Za-z0-9_], not unicode letters", + "schema": { + "type": "object", + "patternProperties": { + "\\wcole": {} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with ASCII ranges", + "schema": { + "type": "object", + "patternProperties": { + "[a-z]cole": {} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "ascii characters match", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + } + ] + }, + { + "description": "\\d in patternProperties matches [0-9], not unicode digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\d+$": {} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with non-ASCII digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\p{digit}+$": {} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/float-overflow.json b/vendor/jsonschema/json/tests/draft4/optional/float-overflow.json new file mode 100644 index 00000000..47fd5baa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/float-overflow.json @@ -0,0 +1,13 @@ +[ + { + "description": "all integers are multiples of 0.5, if overflow is handled", + "schema": {"type": "number", "multipleOf": 0.5}, + "tests": [ + { + "description": "valid if optional overflow handling is implemented", + "data": 1e308, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/format/date-time.json b/vendor/jsonschema/json/tests/draft4/optional/format/date-time.json new file mode 100644 index 00000000..09112737 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/format/date-time.json @@ -0,0 +1,133 @@ +[ + { + "description": "validation of date-time strings", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid date-time string without second fraction", + "data": "1963-06-19T08:30:06Z", + "valid": true + }, + { + "description": "a valid date-time string with plus offset", + "data": "1937-01-01T12:00:27.87+00:20", + "valid": true + }, + { + "description": "a valid date-time string with minus offset", + "data": "1990-12-31T15:59:50.123-08:00", + "valid": true + }, + { + "description": "a valid date-time with a leap second, UTC", + "data": "1998-12-31T23:59:60Z", + "valid": true + }, + { + "description": "a valid date-time with a leap second, with minus offset", + "data": "1998-12-31T15:59:60.123-08:00", + "valid": true + }, + { + "description": "an invalid date-time past leap second, UTC", + "data": "1998-12-31T23:59:61Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong minute, UTC", + "data": "1998-12-31T23:58:60Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong hour, UTC", + "data": "1998-12-31T22:59:60Z", + "valid": false + }, + { + "description": "an invalid day in date-time string", + "data": "1990-02-31T15:59:59.123-08:00", + "valid": false + }, + { + "description": "an invalid offset in date-time string", + "data": "1990-12-31T15:59:59-24:00", + "valid": false + }, + { + "description": "an invalid closing Z after time-zone offset", + "data": "1963-06-19T08:30:06.28123+01:00Z", + "valid": false + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "case-insensitive T and Z", + "data": "1963-06-19t08:30:06.283185z", + "valid": true + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + }, + { + "description": "invalid non-padded month dates", + "data": "1963-6-19T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-padded day dates", + "data": "1963-06-1T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in date portion", + "data": "1963-06-1৪T00:00:00Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in time portion", + "data": "1963-06-11T0৪:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/format/email.json b/vendor/jsonschema/json/tests/draft4/optional/format/email.json new file mode 100644 index 00000000..d6761a46 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/format/email.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of e-mail addresses", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "tilde in local part is valid", + "data": "te~st@example.com", + "valid": true + }, + { + "description": "tilde before local part is valid", + "data": "~test@example.com", + "valid": true + }, + { + "description": "tilde after local part is valid", + "data": "test~@example.com", + "valid": true + }, + { + "description": "dot before local part is not valid", + "data": ".test@example.com", + "valid": false + }, + { + "description": "dot after local part is not valid", + "data": "test.@example.com", + "valid": false + }, + { + "description": "two separated dots inside local part are valid", + "data": "te.s.t@example.com", + "valid": true + }, + { + "description": "two subsequent dots inside local part are not valid", + "data": "te..st@example.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/format/hostname.json b/vendor/jsonschema/json/tests/draft4/optional/format/hostname.json new file mode 100644 index 00000000..8a67fda8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/format/hostname.json @@ -0,0 +1,98 @@ +[ + { + "description": "validation of host names", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a valid punycoded IDN hostname", + "data": "xn--4gbwdl.xn--wgbh1c", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + }, + { + "description": "starts with hyphen", + "data": "-hostname", + "valid": false + }, + { + "description": "ends with hyphen", + "data": "hostname-", + "valid": false + }, + { + "description": "starts with underscore", + "data": "_hostname", + "valid": false + }, + { + "description": "ends with underscore", + "data": "hostname_", + "valid": false + }, + { + "description": "contains underscore", + "data": "host_name", + "valid": false + }, + { + "description": "maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com", + "valid": true + }, + { + "description": "exceeds maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkl.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/format/ipv4.json b/vendor/jsonschema/json/tests/draft4/optional/format/ipv4.json new file mode 100644 index 00000000..4706581f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/format/ipv4.json @@ -0,0 +1,84 @@ +[ + { + "description": "validation of IP addresses", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + }, + { + "description": "an IP address without 4 components", + "data": "127.0", + "valid": false + }, + { + "description": "an IP address as an integer", + "data": "0x7f000001", + "valid": false + }, + { + "description": "an IP address as an integer (decimal)", + "data": "2130706433", + "valid": false + }, + { + "description": "invalid leading zeroes, as they are treated as octals", + "comment": "see https://sick.codes/universal-netmask-npm-package-used-by-270000-projects-vulnerable-to-octal-input-data-server-side-request-forgery-remote-file-inclusion-local-file-inclusion-and-more-cve-2021-28918/", + "data": "087.10.0.1", + "valid": false + }, + { + "description": "value without leading zero is valid", + "data": "87.10.0.1", + "valid": true + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২7.0.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/format/ipv6.json b/vendor/jsonschema/json/tests/draft4/optional/format/ipv6.json new file mode 100644 index 00000000..94368f2a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/format/ipv6.json @@ -0,0 +1,208 @@ +[ + { + "description": "validation of IPv6 addresses", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "trailing 4 hex symbols is valid", + "data": "::abef", + "valid": true + }, + { + "description": "trailing 5 hex symbols is invalid", + "data": "::abcef", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + }, + { + "description": "no digits is valid", + "data": "::", + "valid": true + }, + { + "description": "leading colons is valid", + "data": "::42:ff:1", + "valid": true + }, + { + "description": "trailing colons is valid", + "data": "d6::", + "valid": true + }, + { + "description": "missing leading octet is invalid", + "data": ":2:3:4:5:6:7:8", + "valid": false + }, + { + "description": "missing trailing octet is invalid", + "data": "1:2:3:4:5:6:7:", + "valid": false + }, + { + "description": "missing leading octet with omitted octets later", + "data": ":2:3:4::8", + "valid": false + }, + { + "description": "single set of double colons in the middle is valid", + "data": "1:d6::42", + "valid": true + }, + { + "description": "two sets of double colons is invalid", + "data": "1::d6::42", + "valid": false + }, + { + "description": "mixed format with the ipv4 section as decimal octets", + "data": "1::d6:192.168.0.1", + "valid": true + }, + { + "description": "mixed format with double colons between the sections", + "data": "1:2::192.168.0.1", + "valid": true + }, + { + "description": "mixed format with ipv4 section with octet out of range", + "data": "1::2:192.168.256.1", + "valid": false + }, + { + "description": "mixed format with ipv4 section with a hex octet", + "data": "1::2:192.168.ff.1", + "valid": false + }, + { + "description": "mixed format with leading double colons (ipv4-mapped ipv6 address)", + "data": "::ffff:192.168.0.1", + "valid": true + }, + { + "description": "triple colons is invalid", + "data": "1:2:3:4:5:::8", + "valid": false + }, + { + "description": "8 octets", + "data": "1:2:3:4:5:6:7:8", + "valid": true + }, + { + "description": "insufficient octets without double colons", + "data": "1:2:3:4:5:6:7", + "valid": false + }, + { + "description": "no colons is invalid", + "data": "1", + "valid": false + }, + { + "description": "ipv4 is not ipv6", + "data": "127.0.0.1", + "valid": false + }, + { + "description": "ipv4 segment must have 4 octets", + "data": "1:2:3:4:1.2.3", + "valid": false + }, + { + "description": "leading whitespace is invalid", + "data": " ::1", + "valid": false + }, + { + "description": "trailing whitespace is invalid", + "data": "::1 ", + "valid": false + }, + { + "description": "netmask is not a part of ipv6 address", + "data": "fe80::/64", + "valid": false + }, + { + "description": "zone id is not a part of ipv6 address", + "data": "fe80::a%eth1", + "valid": false + }, + { + "description": "a long valid ipv6", + "data": "1000:1000:1000:1000:1000:1000:255.255.255.255", + "valid": true + }, + { + "description": "a long invalid ipv6, below length limit, first", + "data": "100:100:100:100:100:100:255.255.255.255.255", + "valid": false + }, + { + "description": "a long invalid ipv6, below length limit, second", + "data": "100:100:100:100:100:100:100:255.255.255.255", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1:2:3:4:5:6:7:৪", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in the IPv4 portion", + "data": "1:2::192.16৪.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/format/unknown.json b/vendor/jsonschema/json/tests/draft4/optional/format/unknown.json new file mode 100644 index 00000000..12339ae5 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/format/unknown.json @@ -0,0 +1,43 @@ +[ + { + "description": "unknown format", + "schema": { "format": "unknown" }, + "tests": [ + { + "description": "unknown formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "unknown formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "unknown formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "unknown formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "unknown formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "unknown formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "unknown formats ignore strings", + "data": "string", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/format/uri.json b/vendor/jsonschema/json/tests/draft4/optional/format/uri.json new file mode 100644 index 00000000..792d71a0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/format/uri.json @@ -0,0 +1,108 @@ +[ + { + "description": "validation of URIs", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "a valid URL with anchor tag", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid URL with anchor tag and parentheses", + "data": "http://foo.com/blah_(wikipedia)_blah#cite-1", + "valid": true + }, + { + "description": "a valid URL with URL-encoded stuff", + "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid puny-coded URL ", + "data": "http://xn--nw2a.xn--j6w193g/", + "valid": true + }, + { + "description": "a valid URL with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid URL based on IPv4", + "data": "http://223.255.255.254", + "valid": true + }, + { + "description": "a valid URL with ftp scheme", + "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt", + "valid": true + }, + { + "description": "a valid URL for a simple text file", + "data": "http://www.ietf.org/rfc/rfc2396.txt", + "valid": true + }, + { + "description": "a valid URL ", + "data": "ldap://[2001:db8::7]/c=GB?objectClass?one", + "valid": true + }, + { + "description": "a valid mailto URI", + "data": "mailto:John.Doe@example.com", + "valid": true + }, + { + "description": "a valid newsgroup URI", + "data": "news:comp.infosystems.www.servers.unix", + "valid": true + }, + { + "description": "a valid tel URI", + "data": "tel:+1-816-555-1212", + "valid": true + }, + { + "description": "a valid URN", + "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2", + "valid": true + }, + { + "description": "an invalid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": false + }, + { + "description": "an invalid relative URI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + }, + { + "description": "an invalid URI with spaces", + "data": "http:// shouldfail.com", + "valid": false + }, + { + "description": "an invalid URI with spaces and missing scheme", + "data": ":// should fail", + "valid": false + }, + { + "description": "an invalid URI with comma in scheme", + "data": "bar,baz:foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/non-bmp-regex.json b/vendor/jsonschema/json/tests/draft4/optional/non-bmp-regex.json new file mode 100644 index 00000000..dd67af2b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/non-bmp-regex.json @@ -0,0 +1,82 @@ +[ + { + "description": "Proper UTF-16 surrogate pair handling: pattern", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { "pattern": "^ðŸ²*$" }, + "tests": [ + { + "description": "matches empty", + "data": "", + "valid": true + }, + { + "description": "matches single", + "data": "ðŸ²", + "valid": true + }, + { + "description": "matches two", + "data": "ðŸ²ðŸ²", + "valid": true + }, + { + "description": "doesn't match one", + "data": "ðŸ‰", + "valid": false + }, + { + "description": "doesn't match two", + "data": "ðŸ‰ðŸ‰", + "valid": false + }, + { + "description": "doesn't match one ASCII", + "data": "D", + "valid": false + }, + { + "description": "doesn't match two ASCII", + "data": "DD", + "valid": false + } + ] + }, + { + "description": "Proper UTF-16 surrogate pair handling: patternProperties", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { + "patternProperties": { + "^ðŸ²*$": { + "type": "integer" + } + } + }, + "tests": [ + { + "description": "matches empty", + "data": { "": 1 }, + "valid": true + }, + { + "description": "matches single", + "data": { "ðŸ²": 1 }, + "valid": true + }, + { + "description": "matches two", + "data": { "ðŸ²ðŸ²": 1 }, + "valid": true + }, + { + "description": "doesn't match one", + "data": { "ðŸ²": "hello" }, + "valid": false + }, + { + "description": "doesn't match two", + "data": { "ðŸ²ðŸ²": "hello" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/optional/zeroTerminatedFloats.json b/vendor/jsonschema/json/tests/draft4/optional/zeroTerminatedFloats.json new file mode 100644 index 00000000..9b50ea27 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/optional/zeroTerminatedFloats.json @@ -0,0 +1,15 @@ +[ + { + "description": "some languages do not distinguish between different types of numeric value", + "schema": { + "type": "integer" + }, + "tests": [ + { + "description": "a float is not an integer even without fractional part", + "data": 1.0, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/pattern.json b/vendor/jsonschema/json/tests/draft4/pattern.json new file mode 100644 index 00000000..92db0f97 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/pattern.json @@ -0,0 +1,59 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/patternProperties.json b/vendor/jsonschema/json/tests/draft4/patternProperties.json new file mode 100644 index 00000000..51c8af3d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/patternProperties.json @@ -0,0 +1,135 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + }, + { + "description": "patternProperties with null valued instance properties", + "schema": { + "patternProperties": { + "^.*bar$": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foobar": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/properties.json b/vendor/jsonschema/json/tests/draft4/properties.json new file mode 100644 index 00000000..195159e6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/properties.json @@ -0,0 +1,205 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + }, + { + "description": "properties with escaped characters", + "schema": { + "properties": { + "foo\nbar": {"type": "number"}, + "foo\"bar": {"type": "number"}, + "foo\\bar": {"type": "number"}, + "foo\rbar": {"type": "number"}, + "foo\tbar": {"type": "number"}, + "foo\fbar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with all numbers is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1", + "foo\\bar": "1", + "foo\rbar": "1", + "foo\tbar": "1", + "foo\fbar": "1" + }, + "valid": false + } + ] + }, + { + "description": "properties with null valued instance properties", + "schema": { + "properties": { + "foo": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { + "properties": { + "__proto__": {"type": "number"}, + "toString": { + "properties": { "length": { "type": "string" } } + }, + "constructor": {"type": "number"} + } + }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": true + }, + { + "description": "__proto__ not valid", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString not valid", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor not valid", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present and valid", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/ref.json b/vendor/jsonschema/json/tests/draft4/ref.json new file mode 100644 index 00000000..b714fb0a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/ref.json @@ -0,0 +1,507 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "items": [ + {"type": "integer"}, + {"$ref": "#/items/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "definitions": { + "tilde~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"} + }, + "properties": { + "tilde": {"$ref": "#/definitions/tilde~0field"}, + "slash": {"$ref": "#/definitions/slash~1field"}, + "percent": {"$ref": "#/definitions/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilde invalid", + "data": {"tilde": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilde valid", + "data": {"tilde": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "definitions": { + "a": {"type": "integer"}, + "b": {"$ref": "#/definitions/a"}, + "c": {"$ref": "#/definitions/b"} + }, + "allOf": [{ "$ref": "#/definitions/c" }] + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref overrides any sibling keywords", + "schema": { + "definitions": { + "reffed": { + "type": "array" + } + }, + "properties": { + "foo": { + "$ref": "#/definitions/reffed", + "maxItems": 2 + } + } + }, + "tests": [ + { + "description": "ref valid", + "data": { "foo": [] }, + "valid": true + }, + { + "description": "ref valid, maxItems ignored", + "data": { "foo": [ 1, 2, 3] }, + "valid": true + }, + { + "description": "ref invalid", + "data": { "foo": "string" }, + "valid": false + } + ] + }, + { + "description": "$ref prevents a sibling id from changing the base uri", + "schema": { + "id": "http://localhost:1234/sibling_id/base/", + "definitions": { + "foo": { + "id": "http://localhost:1234/sibling_id/foo.json", + "type": "string" + }, + "base_foo": { + "$comment": "this canonical uri is http://localhost:1234/sibling_id/base/foo.json", + "id": "foo.json", + "type": "number" + } + }, + "allOf": [ + { + "$comment": "$ref resolves to http://localhost:1234/sibling_id/base/foo.json, not http://localhost:1234/sibling_id/foo.json", + "id": "http://localhost:1234/sibling_id/", + "$ref": "foo.json" + } + ] + }, + "tests": [ + { + "description": "$ref resolves to /definitions/base_foo, data does not validate", + "data": "a", + "valid": false + }, + { + "description": "$ref resolves to /definitions/base_foo, data validates", + "data": 1, + "valid": true + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, + "tests": [ + { + "description": "remote ref valid", + "data": {"minLength": 1}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"minLength": -1}, + "valid": false + } + ] + }, + { + "description": "property named $ref that is not a reference", + "schema": { + "properties": { + "$ref": {"type": "string"} + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "property named $ref, containing an actual $ref", + "schema": { + "properties": { + "$ref": {"$ref": "#/definitions/is-string"} + }, + "definitions": { + "is-string": { + "type": "string" + } + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "Recursive references between schemas", + "schema": { + "id": "http://localhost:1234/tree", + "description": "tree of nodes", + "type": "object", + "properties": { + "meta": {"type": "string"}, + "nodes": { + "type": "array", + "items": {"$ref": "node"} + } + }, + "required": ["meta", "nodes"], + "definitions": { + "node": { + "id": "http://localhost:1234/node", + "description": "node", + "type": "object", + "properties": { + "value": {"type": "number"}, + "subtree": {"$ref": "tree"} + }, + "required": ["value"] + } + } + }, + "tests": [ + { + "description": "valid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 1.1}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": true + }, + { + "description": "invalid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": "string is invalid"}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": false + } + ] + }, + { + "description": "refs with quote", + "schema": { + "properties": { + "foo\"bar": {"$ref": "#/definitions/foo%22bar"} + }, + "definitions": { + "foo\"bar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with numbers is valid", + "data": { + "foo\"bar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "Location-independent identifier", + "schema": { + "allOf": [{ + "$ref": "#foo" + }], + "definitions": { + "A": { + "id": "#foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with base URI change in subschema", + "schema": { + "id": "http://localhost:1234/root", + "allOf": [{ + "$ref": "http://localhost:1234/nested.json#foo" + }], + "definitions": { + "A": { + "id": "nested.json", + "definitions": { + "B": { + "id": "#foo", + "type": "integer" + } + } + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "naive replacement of $ref with its destination is not correct", + "schema": { + "definitions": { + "a_string": { "type": "string" } + }, + "enum": [ + { "$ref": "#/definitions/a_string" } + ] + }, + "tests": [ + { + "description": "do not evaluate the $ref inside the enum, matching any string", + "data": "this is a string", + "valid": false + }, + { + "description": "match the enum exactly", + "data": { "$ref": "#/definitions/a_string" }, + "valid": true + } + ] + }, + { + "description": "id must be resolved against nearest parent, not just immediate parent", + "schema": { + "id": "http://example.com/a.json", + "definitions": { + "x": { + "id": "http://example.com/b/c.json", + "not": { + "definitions": { + "y": { + "id": "d.json", + "type": "number" + } + } + } + } + }, + "allOf": [ + { + "$ref": "http://example.com/b/d.json" + } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "non-number is invalid", + "data": "a", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/refRemote.json b/vendor/jsonschema/json/tests/draft4/refRemote.json new file mode 100644 index 00000000..412c9ff8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/refRemote.json @@ -0,0 +1,189 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "base URI change", + "schema": { + "id": "http://localhost:1234/", + "items": { + "id": "baseUriChange/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "base URI change ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "base URI change ref invalid", + "data": [["a"]], + "valid": false + } + ] + }, + { + "description": "base URI change - change folder", + "schema": { + "id": "http://localhost:1234/scope_change_defs1.json", + "type" : "object", + "properties": { + "list": {"$ref": "#/definitions/baz"} + }, + "definitions": { + "baz": { + "id": "baseUriChangeFolder/", + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "base URI change - change folder in subschema", + "schema": { + "id": "http://localhost:1234/scope_change_defs2.json", + "type" : "object", + "properties": { + "list": {"$ref": "#/definitions/baz/definitions/bar"} + }, + "definitions": { + "baz": { + "id": "baseUriChangeFolderInSubschema/", + "definitions": { + "bar": { + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "root ref in remote ref", + "schema": { + "id": "http://localhost:1234/object", + "type": "object", + "properties": { + "name": {"$ref": "name.json#/definitions/orNull"} + } + }, + "tests": [ + { + "description": "string is valid", + "data": { + "name": "foo" + }, + "valid": true + }, + { + "description": "null is valid", + "data": { + "name": null + }, + "valid": true + }, + { + "description": "object is invalid", + "data": { + "name": { + "name": null + } + }, + "valid": false + } + ] + }, + { + "description": "Location-independent identifier in remote ref", + "schema": { + "$ref": "http://localhost:1234/locationIndependentIdentifierDraft4.json#/definitions/refToInteger" + }, + "tests": [ + { + "description": "integer is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/required.json b/vendor/jsonschema/json/tests/draft4/required.json new file mode 100644 index 00000000..6ccfdc2d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/required.json @@ -0,0 +1,135 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with escaped characters", + "schema": { + "required": [ + "foo\nbar", + "foo\"bar", + "foo\\bar", + "foo\rbar", + "foo\tbar", + "foo\fbar" + ] + }, + "tests": [ + { + "description": "object with all properties present is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with some properties missing is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "required properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { "required": ["__proto__", "toString", "constructor"] }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": false + }, + { + "description": "__proto__ present", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString present", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor present", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/type.json b/vendor/jsonschema/json/tests/draft4/type.json new file mode 100644 index 00000000..df46677b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/type.json @@ -0,0 +1,469 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not an integer, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is a number", + "data": 1.0, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not a number, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "a string is still a string, even if it looks like a number", + "data": "1", + "valid": true + }, + { + "description": "an empty string is still a string", + "data": "", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "zero is not a boolean", + "data": 0, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not a boolean", + "data": "", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "true is a boolean", + "data": true, + "valid": true + }, + { + "description": "false is a boolean", + "data": false, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "zero is not null", + "data": 0, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not null", + "data": "", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "true is not null", + "data": true, + "valid": false + }, + { + "description": "false is not null", + "data": false, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type as array with one item", + "schema": { + "type": ["string"] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "type: array or object", + "schema": { + "type": ["array", "object"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type: array, object or null", + "schema": { + "type": ["array", "object", "null"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft4/uniqueItems.json b/vendor/jsonschema/json/tests/draft4/uniqueItems.json new file mode 100644 index 00000000..2ccf666d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft4/uniqueItems.json @@ -0,0 +1,404 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "non-unique array of more than two integers is invalid", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of strings is valid", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "non-unique array of strings is invalid", + "data": ["foo", "bar", "foo"], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "non-unique array of more than two arrays is invalid", + "data": [["foo"], ["bar"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "[1] and [true] are unique", + "data": [[1], [true]], + "valid": true + }, + { + "description": "[0] and [false] are unique", + "data": [[0], [false]], + "valid": true + }, + { + "description": "nested [1] and [true] are unique", + "data": [[[1], "foo"], [[true], "foo"]], + "valid": true + }, + { + "description": "nested [0] and [false] are unique", + "data": [[[0], "foo"], [[false], "foo"]], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1, "{}"], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + }, + { + "description": "different objects are unique", + "data": [{"a": 1, "b": 2}, {"a": 2, "b": 1}], + "valid": true + }, + { + "description": "objects are non-unique despite key order", + "data": [{"a": 1, "b": 2}, {"b": 2, "a": 1}], + "valid": false + }, + { + "description": "{\"a\": false} and {\"a\": 0} are unique", + "data": [{"a": false}, {"a": 0}], + "valid": true + }, + { + "description": "{\"a\": true} and {\"a\": 1} are unique", + "data": [{"a": true}, {"a": 1}], + "valid": true + } + ] + }, + { + "description": "uniqueItems with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is not valid", + "data": [false, true, "foo", "foo"], + "valid": false + }, + { + "description": "non-unique array extended from [true, false] is not valid", + "data": [true, false, "foo", "foo"], + "valid": false + } + ] + }, + { + "description": "uniqueItems with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + }, + { + "description": "uniqueItems=false validation", + "schema": { "uniqueItems": false }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is valid", + "data": [1, 1], + "valid": true + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": true + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": true + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": true + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is valid", + "data": [["foo"], ["foo"]], + "valid": true + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are valid", + "data": [{}, [1], true, null, {}, 1], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is valid", + "data": [false, true, "foo", "foo"], + "valid": true + }, + { + "description": "non-unique array extended from [true, false] is valid", + "data": [true, false, "foo", "foo"], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/additionalItems.json b/vendor/jsonschema/json/tests/draft6/additionalItems.json new file mode 100644 index 00000000..deb44fd3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/additionalItems.json @@ -0,0 +1,164 @@ +[ + { + "description": "additionalItems as schema", + "schema": { + "items": [{}], + "additionalItems": {"type": "integer"} + }, + "tests": [ + { + "description": "additional items match schema", + "data": [ null, 2, 3, 4 ], + "valid": true + }, + { + "description": "additional items do not match schema", + "data": [ null, 2, 3, "foo" ], + "valid": false + } + ] + }, + { + "description": "when items is schema, additionalItems does nothing", + "schema": { + "items": {}, + "additionalItems": false + }, + "tests": [ + { + "description": "all items match schema", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + } + ] + }, + { + "description": "array of items with no additionalItems permitted", + "schema": { + "items": [{}, {}, {}], + "additionalItems": false + }, + "tests": [ + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "fewer number of items present (1)", + "data": [ 1 ], + "valid": true + }, + { + "description": "fewer number of items present (2)", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "equal number of items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "additionalItems as false without items", + "schema": {"additionalItems": false}, + "tests": [ + { + "description": + "items defaults to empty schema so everything is valid", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "additionalItems are allowed by default", + "schema": {"items": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators, valid case", + "schema": { + "allOf": [ + { "items": [ { "type": "integer" } ] } + ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in allOf are not examined", + "data": [ 1, null ], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators, invalid case", + "schema": { + "allOf": [ + { "items": [ { "type": "integer" }, { "type": "string" } ] } + ], + "items": [ {"type": "integer" } ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in allOf are not examined", + "data": [ 1, "hello" ], + "valid": false + } + ] + }, + { + "description": "items validation adjusts the starting index for additionalItems", + "schema": { + "items": [ { "type": "string" } ], + "additionalItems": { "type": "integer" } + }, + "tests": [ + { + "description": "valid items", + "data": [ "x", 2, 3 ], + "valid": true + }, + { + "description": "wrong type of second item", + "data": [ "x", "y" ], + "valid": false + } + ] + }, + { + "description": "additionalItems with null instance elements", + "schema": { + "additionalItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/additionalProperties.json b/vendor/jsonschema/json/tests/draft6/additionalProperties.json new file mode 100644 index 00000000..0f8e1627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/additionalProperties.json @@ -0,0 +1,147 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobarbaz", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": "non-ASCII pattern with additionalProperties", + "schema": { + "patternProperties": {"^á": {}}, + "additionalProperties": false + }, + "tests": [ + { + "description": "matching the pattern is valid", + "data": {"ármányos": 2}, + "valid": true + }, + { + "description": "not matching the pattern is invalid", + "data": {"élmény": 2}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with schema", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + }, + { + "description": "additionalProperties does not look in applicators", + "schema": { + "allOf": [ + {"properties": {"foo": {}}} + ], + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "properties defined in allOf are not examined", + "data": {"foo": 1, "bar": true}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with null valued instance properties", + "schema": { + "additionalProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/allOf.json b/vendor/jsonschema/json/tests/draft6/allOf.json new file mode 100644 index 00000000..ec9319e1 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/allOf.json @@ -0,0 +1,294 @@ +[ + { + "description": "allOf", + "schema": { + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all true", + "schema": {"allOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "allOf with boolean schemas, some false", + "schema": {"allOf": [true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all false", + "schema": {"allOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with one empty schema", + "schema": { + "allOf": [ + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with two empty schemas", + "schema": { + "allOf": [ + {}, + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with the first empty schema", + "schema": { + "allOf": [ + {}, + { "type": "number" } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with the last empty schema", + "schema": { + "allOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "nested allOf, to check validation semantics", + "schema": { + "allOf": [ + { + "allOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "allOf combined with anyOf, oneOf", + "schema": { + "allOf": [ { "multipleOf": 2 } ], + "anyOf": [ { "multipleOf": 3 } ], + "oneOf": [ { "multipleOf": 5 } ] + }, + "tests": [ + { + "description": "allOf: false, anyOf: false, oneOf: false", + "data": 1, + "valid": false + }, + { + "description": "allOf: false, anyOf: false, oneOf: true", + "data": 5, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: false", + "data": 3, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: true", + "data": 15, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: false", + "data": 2, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: true", + "data": 10, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: false", + "data": 6, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: true", + "data": 30, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/anyOf.json b/vendor/jsonschema/json/tests/draft6/anyOf.json new file mode 100644 index 00000000..ab5eb386 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/anyOf.json @@ -0,0 +1,189 @@ +[ + { + "description": "anyOf", + "schema": { + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf with boolean schemas, all true", + "schema": {"anyOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, some true", + "schema": {"anyOf": [true, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, all false", + "schema": {"anyOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf complex types", + "schema": { + "anyOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first anyOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second anyOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both anyOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "neither anyOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "anyOf with one empty schema", + "schema": { + "anyOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is valid", + "data": 123, + "valid": true + } + ] + }, + { + "description": "nested anyOf, to check validation semantics", + "schema": { + "anyOf": [ + { + "anyOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/boolean_schema.json b/vendor/jsonschema/json/tests/draft6/boolean_schema.json new file mode 100644 index 00000000..6d40f23f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/boolean_schema.json @@ -0,0 +1,104 @@ +[ + { + "description": "boolean schema 'true'", + "schema": true, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "boolean true is valid", + "data": true, + "valid": true + }, + { + "description": "boolean false is valid", + "data": false, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + }, + { + "description": "array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "boolean schema 'false'", + "schema": false, + "tests": [ + { + "description": "number is invalid", + "data": 1, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "boolean true is invalid", + "data": true, + "valid": false + }, + { + "description": "boolean false is invalid", + "data": false, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + }, + { + "description": "object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/const.json b/vendor/jsonschema/json/tests/draft6/const.json new file mode 100644 index 00000000..1c2cafcc --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/const.json @@ -0,0 +1,342 @@ +[ + { + "description": "const validation", + "schema": {"const": 2}, + "tests": [ + { + "description": "same value is valid", + "data": 2, + "valid": true + }, + { + "description": "another value is invalid", + "data": 5, + "valid": false + }, + { + "description": "another type is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "const with object", + "schema": {"const": {"foo": "bar", "baz": "bax"}}, + "tests": [ + { + "description": "same object is valid", + "data": {"foo": "bar", "baz": "bax"}, + "valid": true + }, + { + "description": "same object with different property order is valid", + "data": {"baz": "bax", "foo": "bar"}, + "valid": true + }, + { + "description": "another object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "another type is invalid", + "data": [1, 2], + "valid": false + } + ] + }, + { + "description": "const with array", + "schema": {"const": [{ "foo": "bar" }]}, + "tests": [ + { + "description": "same array is valid", + "data": [{"foo": "bar"}], + "valid": true + }, + { + "description": "another array item is invalid", + "data": [2], + "valid": false + }, + { + "description": "array with additional items is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + }, + { + "description": "const with null", + "schema": {"const": null}, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "not null is invalid", + "data": 0, + "valid": false + } + ] + }, + { + "description": "const with false does not match 0", + "schema": {"const": false}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "const with true does not match 1", + "schema": {"const": true}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "const with [false] does not match [0]", + "schema": {"const": [false]}, + "tests": [ + { + "description": "[false] is valid", + "data": [false], + "valid": true + }, + { + "description": "[0] is invalid", + "data": [0], + "valid": false + }, + { + "description": "[0.0] is invalid", + "data": [0.0], + "valid": false + } + ] + }, + { + "description": "const with [true] does not match [1]", + "schema": {"const": [true]}, + "tests": [ + { + "description": "[true] is valid", + "data": [true], + "valid": true + }, + { + "description": "[1] is invalid", + "data": [1], + "valid": false + }, + { + "description": "[1.0] is invalid", + "data": [1.0], + "valid": false + } + ] + }, + { + "description": "const with {\"a\": false} does not match {\"a\": 0}", + "schema": {"const": {"a": false}}, + "tests": [ + { + "description": "{\"a\": false} is valid", + "data": {"a": false}, + "valid": true + }, + { + "description": "{\"a\": 0} is invalid", + "data": {"a": 0}, + "valid": false + }, + { + "description": "{\"a\": 0.0} is invalid", + "data": {"a": 0.0}, + "valid": false + } + ] + }, + { + "description": "const with {\"a\": true} does not match {\"a\": 1}", + "schema": {"const": {"a": true}}, + "tests": [ + { + "description": "{\"a\": true} is valid", + "data": {"a": true}, + "valid": true + }, + { + "description": "{\"a\": 1} is invalid", + "data": {"a": 1}, + "valid": false + }, + { + "description": "{\"a\": 1.0} is invalid", + "data": {"a": 1.0}, + "valid": false + } + ] + }, + { + "description": "const with 0 does not match other zero-like types", + "schema": {"const": 0}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "empty string is invalid", + "data": "", + "valid": false + } + ] + }, + { + "description": "const with 1 does not match true", + "schema": {"const": 1}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "const with -2.0 matches integer and float types", + "schema": {"const": -2.0}, + "tests": [ + { + "description": "integer -2 is valid", + "data": -2, + "valid": true + }, + { + "description": "integer 2 is invalid", + "data": 2, + "valid": false + }, + { + "description": "float -2.0 is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float 2.0 is invalid", + "data": 2.0, + "valid": false + }, + { + "description": "float -2.00001 is invalid", + "data": -2.00001, + "valid": false + } + ] + }, + { + "description": "float and integers are equal up to 64-bit representation limits", + "schema": {"const": 9007199254740992}, + "tests": [ + { + "description": "integer is valid", + "data": 9007199254740992, + "valid": true + }, + { + "description": "integer minus one is invalid", + "data": 9007199254740991, + "valid": false + }, + { + "description": "float is valid", + "data": 9007199254740992.0, + "valid": true + }, + { + "description": "float minus one is invalid", + "data": 9007199254740991.0, + "valid": false + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "const": "hello\u0000there" }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/contains.json b/vendor/jsonschema/json/tests/draft6/contains.json new file mode 100644 index 00000000..bd93654f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/contains.json @@ -0,0 +1,144 @@ +[ + { + "description": "contains keyword validation", + "schema": { + "contains": {"minimum": 5} + }, + "tests": [ + { + "description": "array with item matching schema (5) is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with item matching schema (6) is valid", + "data": [3, 4, 6], + "valid": true + }, + { + "description": "array with two items matching schema (5, 6) is valid", + "data": [3, 4, 5, 6], + "valid": true + }, + { + "description": "array without items matching schema is invalid", + "data": [2, 3, 4], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "not array is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "contains keyword with const keyword", + "schema": { + "contains": { "const": 5 } + }, + "tests": [ + { + "description": "array with item 5 is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with two items 5 is valid", + "data": [3, 4, 5, 5], + "valid": true + }, + { + "description": "array without item 5 is invalid", + "data": [1, 2, 3, 4], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema true", + "schema": {"contains": true}, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema false", + "schema": {"contains": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "non-arrays are valid", + "data": "contains does not apply to strings", + "valid": true + } + ] + }, + { + "description": "items + contains", + "schema": { + "items": { "multipleOf": 2 }, + "contains": { "multipleOf": 3 } + }, + "tests": [ + { + "description": "matches items, does not match contains", + "data": [ 2, 4, 8 ], + "valid": false + }, + { + "description": "does not match items, matches contains", + "data": [ 3, 6, 9 ], + "valid": false + }, + { + "description": "matches both items and contains", + "data": [ 6, 12 ], + "valid": true + }, + { + "description": "matches neither items nor contains", + "data": [ 1, 5 ], + "valid": false + } + ] + }, + { + "description": "contains with null instance elements", + "schema": { + "contains": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null items", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/default.json b/vendor/jsonschema/json/tests/draft6/default.json new file mode 100644 index 00000000..289a9b66 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/default.json @@ -0,0 +1,79 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "the default keyword does not do anything if the property is missing", + "schema": { + "type": "object", + "properties": { + "alpha": { + "type": "number", + "maximum": 3, + "default": 5 + } + } + }, + "tests": [ + { + "description": "an explicit property value is checked against maximum (passing)", + "data": { "alpha": 1 }, + "valid": true + }, + { + "description": "an explicit property value is checked against maximum (failing)", + "data": { "alpha": 5 }, + "valid": false + }, + { + "description": "missing properties are not filled in with the default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/definitions.json b/vendor/jsonschema/json/tests/draft6/definitions.json new file mode 100644 index 00000000..d772fde3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/definitions.json @@ -0,0 +1,26 @@ +[ + { + "description": "validate definition against metaschema", + "schema": {"$ref": "http://json-schema.org/draft-06/schema#"}, + "tests": [ + { + "description": "valid definition schema", + "data": { + "definitions": { + "foo": {"type": "integer"} + } + }, + "valid": true + }, + { + "description": "invalid definition schema", + "data": { + "definitions": { + "foo": {"type": 1} + } + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/dependencies.json b/vendor/jsonschema/json/tests/draft6/dependencies.json new file mode 100644 index 00000000..a5e54282 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/dependencies.json @@ -0,0 +1,248 @@ +[ + { + "description": "dependencies", + "schema": { + "dependencies": {"bar": ["foo"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "dependencies with empty array", + "schema": { + "dependencies": {"bar": []} + }, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependencies", + "schema": { + "dependencies": {"quux": ["foo", "bar"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "multiple dependencies subschema", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "dependencies with boolean subschemas", + "schema": { + "dependencies": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\nbar": ["foo\rbar"], + "foo\tbar": { + "minProperties": 4 + }, + "foo'bar": {"required": ["foo\"bar"]}, + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "valid object 1", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "valid object 2", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "valid object 3", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "invalid object 1", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "invalid object 2", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "invalid object 3", + "data": { + "foo'bar": 1 + }, + "valid": false + }, + { + "description": "invalid object 4", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/enum.json b/vendor/jsonschema/json/tests/draft6/enum.json new file mode 100644 index 00000000..f085097b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/enum.json @@ -0,0 +1,236 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + }, + { + "description": "valid object matches", + "data": {"foo": 12}, + "valid": true + }, + { + "description": "extra properties in object is invalid", + "data": {"foo": 12, "boo": 42}, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum-with-null validation", + "schema": { "enum": [6, null] }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is valid", + "data": 6, + "valid": true + }, + { + "description": "something else is invalid", + "data": "test", + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "wrong foo value", + "data": {"foo":"foot", "bar":"bar"}, + "valid": false + }, + { + "description": "wrong bar value", + "data": {"foo":"foo", "bar":"bart"}, + "valid": false + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "enum with escaped characters", + "schema": { + "enum": ["foo\nbar", "foo\rbar"] + }, + "tests": [ + { + "description": "member 1 is valid", + "data": "foo\nbar", + "valid": true + }, + { + "description": "member 2 is valid", + "data": "foo\rbar", + "valid": true + }, + { + "description": "another string is invalid", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "enum with false does not match 0", + "schema": {"enum": [false]}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "enum with true does not match 1", + "schema": {"enum": [true]}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "enum with 0 does not match false", + "schema": {"enum": [0]}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + } + ] + }, + { + "description": "enum with 1 does not match true", + "schema": {"enum": [1]}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "enum": [ "hello\u0000there" ] }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/exclusiveMaximum.json b/vendor/jsonschema/json/tests/draft6/exclusiveMaximum.json new file mode 100644 index 00000000..dc3cd709 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/exclusiveMaximum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMaximum validation", + "schema": { + "exclusiveMaximum": 3.0 + }, + "tests": [ + { + "description": "below the exclusiveMaximum is valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + }, + { + "description": "above the exclusiveMaximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/exclusiveMinimum.json b/vendor/jsonschema/json/tests/draft6/exclusiveMinimum.json new file mode 100644 index 00000000..b38d7ece --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/exclusiveMinimum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMinimum validation", + "schema": { + "exclusiveMinimum": 1.1 + }, + "tests": [ + { + "description": "above the exclusiveMinimum is valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "below the exclusiveMinimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/format.json b/vendor/jsonschema/json/tests/draft6/format.json new file mode 100644 index 00000000..2df2a9f0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/format.json @@ -0,0 +1,326 @@ +[ + { + "description": "email format", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv4 format", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv6 format", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "hostname format", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date-time format", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "json-pointer format", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri format", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-reference format", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-template format", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/id.json b/vendor/jsonschema/json/tests/draft6/id.json new file mode 100644 index 00000000..03d30fcb --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/id.json @@ -0,0 +1,134 @@ +[ + { + "description": "id inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an id buried in the enum", + "schema": { + "definitions": { + "id_in_enum": { + "enum": [ + { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + ] + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "string" + }, + "zzz_id_in_const": { + "const": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/definitions/id_in_enum" }, + { "$ref": "https://localhost:1234/id/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + }, + "valid": true + }, + { + "description": "match $ref to id", + "data": "a string to match #/definitions/id_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to id", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing a plain-name $id property", + "schema": { + "definitions": { + "const_not_anchor": { + "const": { + "$id": "#not_a_real_anchor" + } + } + }, + "oneOf": [ + { + "const": "skip not_a_real_anchor" + }, + { + "allOf": [ + { + "not": { + "const": "skip not_a_real_anchor" + } + }, + { + "$ref": "#/definitions/const_not_anchor" + } + ] + } + ] + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_anchor", + "valid": true + }, + { + "description": "const at const_not_anchor does not match", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing an $id property", + "schema": { + "definitions": { + "const_not_id": { + "const": { + "$id": "not_a_real_id" + } + } + }, + "oneOf": [ + { + "const":"skip not_a_real_id" + }, + { + "allOf": [ + { + "not": { + "const": "skip not_a_real_id" + } + }, + { + "$ref": "#/definitions/const_not_id" + } + ] + } + ] + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_id", + "valid": true + }, + { + "description": "const at const_not_id does not match", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/infinite-loop-detection.json b/vendor/jsonschema/json/tests/draft6/infinite-loop-detection.json new file mode 100644 index 00000000..f98c74fc --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/infinite-loop-detection.json @@ -0,0 +1,36 @@ +[ + { + "description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop", + "schema": { + "definitions": { + "int": { "type": "integer" } + }, + "allOf": [ + { + "properties": { + "foo": { + "$ref": "#/definitions/int" + } + } + }, + { + "additionalProperties": { + "$ref": "#/definitions/int" + } + } + ] + }, + "tests": [ + { + "description": "passing case", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "failing case", + "data": { "foo": "a string" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/items.json b/vendor/jsonschema/json/tests/draft6/items.json new file mode 100644 index 00000000..7ed6781b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/items.json @@ -0,0 +1,282 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "length": 1 + }, + "valid": true + } + ] + }, + { + "description": "an array of schemas for items", + "schema": { + "items": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + }, + { + "description": "incomplete array of items", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with additional items", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "1": "valid", + "length": 2 + }, + "valid": true + } + ] + }, + { + "description": "items with boolean schema (true)", + "schema": {"items": true}, + "tests": [ + { + "description": "any array is valid", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schema (false)", + "schema": {"items": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": [ 1, "foo", true ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schemas", + "schema": { + "items": [true, false] + }, + "tests": [ + { + "description": "array with one item is valid", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with two items is invalid", + "data": [ 1, "foo" ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items and subitems", + "schema": { + "definitions": { + "item": { + "type": "array", + "additionalItems": false, + "items": [ + { "$ref": "#/definitions/sub-item" }, + { "$ref": "#/definitions/sub-item" } + ] + }, + "sub-item": { + "type": "object", + "required": ["foo"] + } + }, + "type": "array", + "additionalItems": false, + "items": [ + { "$ref": "#/definitions/item" }, + { "$ref": "#/definitions/item" }, + { "$ref": "#/definitions/item" } + ] + }, + "tests": [ + { + "description": "valid items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": true + }, + { + "description": "too many items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "too many sub-items", + "data": [ + [ {"foo": null}, {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong item", + "data": [ + {"foo": null}, + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong sub-item", + "data": [ + [ {}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "fewer items is valid", + "data": [ + [ {"foo": null} ], + [ {"foo": null} ] + ], + "valid": true + } + ] + }, + { + "description": "nested items", + "schema": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + }, + "tests": [ + { + "description": "valid nested array", + "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": true + }, + { + "description": "nested array with invalid type", + "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": false + }, + { + "description": "not deep enough", + "data": [[[1], [2],[3]], [[4], [5], [6]]], + "valid": false + } + ] + }, + { + "description": "single-form items with null instance elements", + "schema": { + "items": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + }, + { + "description": "array-form items with null instance elements", + "schema": { + "items": [ + { + "type": "null" + } + ] + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/maxItems.json b/vendor/jsonschema/json/tests/draft6/maxItems.json new file mode 100644 index 00000000..f0c36ab2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/maxItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + }, + { + "description": "maxItems validation with a decimal", + "schema": {"maxItems": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/maxLength.json b/vendor/jsonschema/json/tests/draft6/maxLength.json new file mode 100644 index 00000000..748b4daa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/maxLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + }, + { + "description": "maxLength validation with a decimal", + "schema": {"maxLength": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/maxProperties.json b/vendor/jsonschema/json/tests/draft6/maxProperties.json new file mode 100644 index 00000000..acec1420 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/maxProperties.json @@ -0,0 +1,70 @@ +[ + { + "description": "maxProperties validation", + "schema": {"maxProperties": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "maxProperties validation with a decimal", + "schema": {"maxProperties": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + } + ] + }, + { + "description": "maxProperties = 0 means the object is empty", + "schema": { "maxProperties": 0 }, + "tests": [ + { + "description": "no properties is valid", + "data": {}, + "valid": true + }, + { + "description": "one property is invalid", + "data": { "foo": 1 }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/maximum.json b/vendor/jsonschema/json/tests/draft6/maximum.json new file mode 100644 index 00000000..6844a39e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/maximum.json @@ -0,0 +1,54 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "maximum validation with unsigned integer", + "schema": {"maximum": 300}, + "tests": [ + { + "description": "below the maximum is invalid", + "data": 299.97, + "valid": true + }, + { + "description": "boundary point integer is valid", + "data": 300, + "valid": true + }, + { + "description": "boundary point float is valid", + "data": 300.00, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 300.5, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/minItems.json b/vendor/jsonschema/json/tests/draft6/minItems.json new file mode 100644 index 00000000..d3b18720 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/minItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + }, + { + "description": "minItems validation with a decimal", + "schema": {"minItems": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/minLength.json b/vendor/jsonschema/json/tests/draft6/minLength.json new file mode 100644 index 00000000..64db9480 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/minLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + }, + { + "description": "minLength validation with a decimal", + "schema": {"minLength": 2.0}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/minProperties.json b/vendor/jsonschema/json/tests/draft6/minProperties.json new file mode 100644 index 00000000..9f74f789 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/minProperties.json @@ -0,0 +1,54 @@ +[ + { + "description": "minProperties validation", + "schema": {"minProperties": 1}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "minProperties validation with a decimal", + "schema": {"minProperties": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/minimum.json b/vendor/jsonschema/json/tests/draft6/minimum.json new file mode 100644 index 00000000..21ae50e0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/minimum.json @@ -0,0 +1,69 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "minimum validation with signed integer", + "schema": {"minimum": -2}, + "tests": [ + { + "description": "negative above the minimum is valid", + "data": -1, + "valid": true + }, + { + "description": "positive above the minimum is valid", + "data": 0, + "valid": true + }, + { + "description": "boundary point is valid", + "data": -2, + "valid": true + }, + { + "description": "boundary point with float is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float below the minimum is invalid", + "data": -2.0001, + "valid": false + }, + { + "description": "int below the minimum is invalid", + "data": -3, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/multipleOf.json b/vendor/jsonschema/json/tests/draft6/multipleOf.json new file mode 100644 index 00000000..25c25a91 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/multipleOf.json @@ -0,0 +1,71 @@ +[ + { + "description": "by int", + "schema": {"multipleOf": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"multipleOf": 1.5}, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"multipleOf": 0.0001}, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + }, + { + "description": "float division = inf", + "schema": {"type": "integer", "multipleOf": 0.123456789}, + "tests": [ + { + "description": "always invalid, but naive implementations may raise an overflow error", + "data": 1e308, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/not.json b/vendor/jsonschema/json/tests/draft6/not.json new file mode 100644 index 00000000..98de0eda --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/not.json @@ -0,0 +1,117 @@ +[ + { + "description": "not", + "schema": { + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + }, + { + "description": "not with boolean schema true", + "schema": {"not": true}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "not with boolean schema false", + "schema": {"not": false}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/oneOf.json b/vendor/jsonschema/json/tests/draft6/oneOf.json new file mode 100644 index 00000000..eeb7ae86 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/oneOf.json @@ -0,0 +1,274 @@ +[ + { + "description": "oneOf", + "schema": { + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all true", + "schema": {"oneOf": [true, true, true]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, one true", + "schema": {"oneOf": [true, false, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "oneOf with boolean schemas, more than one true", + "schema": {"oneOf": [true, true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all false", + "schema": {"oneOf": [false, false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf complex types", + "schema": { + "oneOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second oneOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both oneOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": false + }, + { + "description": "neither oneOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "oneOf with empty schema", + "schema": { + "oneOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "one valid - valid", + "data": "foo", + "valid": true + }, + { + "description": "both valid - invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "oneOf with required", + "schema": { + "type": "object", + "oneOf": [ + { "required": ["foo", "bar"] }, + { "required": ["foo", "baz"] } + ] + }, + "tests": [ + { + "description": "both invalid - invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "first valid - valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "second valid - valid", + "data": {"foo": 1, "baz": 3}, + "valid": true + }, + { + "description": "both valid - invalid", + "data": {"foo": 1, "bar": 2, "baz" : 3}, + "valid": false + } + ] + }, + { + "description": "oneOf with missing optional property", + "schema": { + "oneOf": [ + { + "properties": { + "bar": true, + "baz": true + }, + "required": ["bar"] + }, + { + "properties": { + "foo": true + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": {"bar": 8}, + "valid": true + }, + { + "description": "second oneOf valid", + "data": {"foo": "foo"}, + "valid": true + }, + { + "description": "both oneOf valid", + "data": {"foo": "foo", "bar": 8}, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": {"baz": "quux"}, + "valid": false + } + ] + }, + { + "description": "nested oneOf, to check validation semantics", + "schema": { + "oneOf": [ + { + "oneOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/bignum.json b/vendor/jsonschema/json/tests/draft6/optional/bignum.json new file mode 100644 index 00000000..94b4a4e6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/bignum.json @@ -0,0 +1,93 @@ +[ + { + "description": "integer", + "schema": { "type": "integer" }, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + }, + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": { "type": "number" }, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + }, + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": { "type": "string" }, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "maximum integer comparison", + "schema": { "maximum": 18446744073709551615 }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "exclusiveMaximum": 972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "minimum integer comparison", + "schema": { "minimum": -18446744073709551615 }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "exclusiveMinimum": -972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/ecmascript-regex.json b/vendor/jsonschema/json/tests/draft6/optional/ecmascript-regex.json new file mode 100644 index 00000000..c4886aaa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/ecmascript-regex.json @@ -0,0 +1,552 @@ +[ + { + "description": "ECMA 262 regex $ does not match trailing newline", + "schema": { + "type": "string", + "pattern": "^abc$" + }, + "tests": [ + { + "description": "matches in Python, but not in ECMA 262", + "data": "abc\\n", + "valid": false + }, + { + "description": "matches", + "data": "abc", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex converts \\t to horizontal tab", + "schema": { + "type": "string", + "pattern": "^\\t$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\t", + "valid": false + }, + { + "description": "matches", + "data": "\u0009", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and upper letter", + "schema": { + "type": "string", + "pattern": "^\\cC$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cC", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and lower letter", + "schema": { + "type": "string", + "pattern": "^\\cc$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cc", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\d matches ascii digits only", + "schema": { + "type": "string", + "pattern": "^\\d$" + }, + "tests": [ + { + "description": "ASCII zero matches", + "data": "0", + "valid": true + }, + { + "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)", + "data": "߀", + "valid": false + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) does not match", + "data": "\u07c0", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\D matches everything but ascii digits", + "schema": { + "type": "string", + "pattern": "^\\D$" + }, + "tests": [ + { + "description": "ASCII zero does not match", + "data": "0", + "valid": false + }, + { + "description": "NKO DIGIT ZERO matches (unlike e.g. Python)", + "data": "߀", + "valid": true + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) matches", + "data": "\u07c0", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\w matches ascii letters only", + "schema": { + "type": "string", + "pattern": "^\\w$" + }, + "tests": [ + { + "description": "ASCII 'a' matches", + "data": "a", + "valid": true + }, + { + "description": "latin-1 e-acute does not match (unlike e.g. Python)", + "data": "é", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\W matches everything but ascii letters", + "schema": { + "type": "string", + "pattern": "^\\W$" + }, + "tests": [ + { + "description": "ASCII 'a' does not match", + "data": "a", + "valid": false + }, + { + "description": "latin-1 e-acute matches (unlike e.g. Python)", + "data": "é", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\s matches whitespace", + "schema": { + "type": "string", + "pattern": "^\\s$" + }, + "tests": [ + { + "description": "ASCII space matches", + "data": " ", + "valid": true + }, + { + "description": "Character tabulation matches", + "data": "\t", + "valid": true + }, + { + "description": "Line tabulation matches", + "data": "\u000b", + "valid": true + }, + { + "description": "Form feed matches", + "data": "\u000c", + "valid": true + }, + { + "description": "latin-1 non-breaking-space matches", + "data": "\u00a0", + "valid": true + }, + { + "description": "zero-width whitespace matches", + "data": "\ufeff", + "valid": true + }, + { + "description": "line feed matches (line terminator)", + "data": "\u000a", + "valid": true + }, + { + "description": "paragraph separator matches (line terminator)", + "data": "\u2029", + "valid": true + }, + { + "description": "EM SPACE matches (Space_Separator)", + "data": "\u2003", + "valid": true + }, + { + "description": "Non-whitespace control does not match", + "data": "\u0001", + "valid": false + }, + { + "description": "Non-whitespace does not match", + "data": "\u2013", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\S matches everything but whitespace", + "schema": { + "type": "string", + "pattern": "^\\S$" + }, + "tests": [ + { + "description": "ASCII space does not match", + "data": " ", + "valid": false + }, + { + "description": "Character tabulation does not match", + "data": "\t", + "valid": false + }, + { + "description": "Line tabulation does not match", + "data": "\u000b", + "valid": false + }, + { + "description": "Form feed does not match", + "data": "\u000c", + "valid": false + }, + { + "description": "latin-1 non-breaking-space does not match", + "data": "\u00a0", + "valid": false + }, + { + "description": "zero-width whitespace does not match", + "data": "\ufeff", + "valid": false + }, + { + "description": "line feed does not match (line terminator)", + "data": "\u000a", + "valid": false + }, + { + "description": "paragraph separator does not match (line terminator)", + "data": "\u2029", + "valid": false + }, + { + "description": "EM SPACE does not match (Space_Separator)", + "data": "\u2003", + "valid": false + }, + { + "description": "Non-whitespace control matches", + "data": "\u0001", + "valid": true + }, + { + "description": "Non-whitespace matches", + "data": "\u2013", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with pattern", + "schema": { "pattern": "\\p{Letter}cole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "\\w in patterns matches [A-Za-z0-9_], not unicode letters", + "schema": { "pattern": "\\wcole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "pattern with ASCII ranges", + "schema": { "pattern": "[a-z]cole" }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "ascii characters match", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + } + ] + }, + { + "description": "\\d in pattern matches [0-9], not unicode digits", + "schema": { "pattern": "^\\d+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": false + } + ] + }, + { + "description": "pattern with non-ASCII digits", + "schema": { "pattern": "^\\p{digit}+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "\\p{Letter}cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "\\w in patternProperties matches [A-Za-z0-9_], not unicode letters", + "schema": { + "type": "object", + "patternProperties": { + "\\wcole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with ASCII ranges", + "schema": { + "type": "object", + "patternProperties": { + "[a-z]cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "ascii characters match", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + } + ] + }, + { + "description": "\\d in patternProperties matches [0-9], not unicode digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\d+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with non-ASCII digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\p{digit}+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/float-overflow.json b/vendor/jsonschema/json/tests/draft6/optional/float-overflow.json new file mode 100644 index 00000000..52ff9827 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/float-overflow.json @@ -0,0 +1,13 @@ +[ + { + "description": "all integers are multiples of 0.5, if overflow is handled", + "schema": {"type": "integer", "multipleOf": 0.5}, + "tests": [ + { + "description": "valid if optional overflow handling is implemented", + "data": 1e308, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/date-time.json b/vendor/jsonschema/json/tests/draft6/optional/format/date-time.json new file mode 100644 index 00000000..09112737 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/date-time.json @@ -0,0 +1,133 @@ +[ + { + "description": "validation of date-time strings", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid date-time string without second fraction", + "data": "1963-06-19T08:30:06Z", + "valid": true + }, + { + "description": "a valid date-time string with plus offset", + "data": "1937-01-01T12:00:27.87+00:20", + "valid": true + }, + { + "description": "a valid date-time string with minus offset", + "data": "1990-12-31T15:59:50.123-08:00", + "valid": true + }, + { + "description": "a valid date-time with a leap second, UTC", + "data": "1998-12-31T23:59:60Z", + "valid": true + }, + { + "description": "a valid date-time with a leap second, with minus offset", + "data": "1998-12-31T15:59:60.123-08:00", + "valid": true + }, + { + "description": "an invalid date-time past leap second, UTC", + "data": "1998-12-31T23:59:61Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong minute, UTC", + "data": "1998-12-31T23:58:60Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong hour, UTC", + "data": "1998-12-31T22:59:60Z", + "valid": false + }, + { + "description": "an invalid day in date-time string", + "data": "1990-02-31T15:59:59.123-08:00", + "valid": false + }, + { + "description": "an invalid offset in date-time string", + "data": "1990-12-31T15:59:59-24:00", + "valid": false + }, + { + "description": "an invalid closing Z after time-zone offset", + "data": "1963-06-19T08:30:06.28123+01:00Z", + "valid": false + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "case-insensitive T and Z", + "data": "1963-06-19t08:30:06.283185z", + "valid": true + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + }, + { + "description": "invalid non-padded month dates", + "data": "1963-6-19T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-padded day dates", + "data": "1963-06-1T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in date portion", + "data": "1963-06-1৪T00:00:00Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in time portion", + "data": "1963-06-11T0৪:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/email.json b/vendor/jsonschema/json/tests/draft6/optional/format/email.json new file mode 100644 index 00000000..d6761a46 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/email.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of e-mail addresses", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "tilde in local part is valid", + "data": "te~st@example.com", + "valid": true + }, + { + "description": "tilde before local part is valid", + "data": "~test@example.com", + "valid": true + }, + { + "description": "tilde after local part is valid", + "data": "test~@example.com", + "valid": true + }, + { + "description": "dot before local part is not valid", + "data": ".test@example.com", + "valid": false + }, + { + "description": "dot after local part is not valid", + "data": "test.@example.com", + "valid": false + }, + { + "description": "two separated dots inside local part are valid", + "data": "te.s.t@example.com", + "valid": true + }, + { + "description": "two subsequent dots inside local part are not valid", + "data": "te..st@example.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/hostname.json b/vendor/jsonschema/json/tests/draft6/optional/format/hostname.json new file mode 100644 index 00000000..8a67fda8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/hostname.json @@ -0,0 +1,98 @@ +[ + { + "description": "validation of host names", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a valid punycoded IDN hostname", + "data": "xn--4gbwdl.xn--wgbh1c", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + }, + { + "description": "starts with hyphen", + "data": "-hostname", + "valid": false + }, + { + "description": "ends with hyphen", + "data": "hostname-", + "valid": false + }, + { + "description": "starts with underscore", + "data": "_hostname", + "valid": false + }, + { + "description": "ends with underscore", + "data": "hostname_", + "valid": false + }, + { + "description": "contains underscore", + "data": "host_name", + "valid": false + }, + { + "description": "maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com", + "valid": true + }, + { + "description": "exceeds maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkl.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/ipv4.json b/vendor/jsonschema/json/tests/draft6/optional/format/ipv4.json new file mode 100644 index 00000000..4706581f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/ipv4.json @@ -0,0 +1,84 @@ +[ + { + "description": "validation of IP addresses", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + }, + { + "description": "an IP address without 4 components", + "data": "127.0", + "valid": false + }, + { + "description": "an IP address as an integer", + "data": "0x7f000001", + "valid": false + }, + { + "description": "an IP address as an integer (decimal)", + "data": "2130706433", + "valid": false + }, + { + "description": "invalid leading zeroes, as they are treated as octals", + "comment": "see https://sick.codes/universal-netmask-npm-package-used-by-270000-projects-vulnerable-to-octal-input-data-server-side-request-forgery-remote-file-inclusion-local-file-inclusion-and-more-cve-2021-28918/", + "data": "087.10.0.1", + "valid": false + }, + { + "description": "value without leading zero is valid", + "data": "87.10.0.1", + "valid": true + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২7.0.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/ipv6.json b/vendor/jsonschema/json/tests/draft6/optional/format/ipv6.json new file mode 100644 index 00000000..94368f2a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/ipv6.json @@ -0,0 +1,208 @@ +[ + { + "description": "validation of IPv6 addresses", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "trailing 4 hex symbols is valid", + "data": "::abef", + "valid": true + }, + { + "description": "trailing 5 hex symbols is invalid", + "data": "::abcef", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + }, + { + "description": "no digits is valid", + "data": "::", + "valid": true + }, + { + "description": "leading colons is valid", + "data": "::42:ff:1", + "valid": true + }, + { + "description": "trailing colons is valid", + "data": "d6::", + "valid": true + }, + { + "description": "missing leading octet is invalid", + "data": ":2:3:4:5:6:7:8", + "valid": false + }, + { + "description": "missing trailing octet is invalid", + "data": "1:2:3:4:5:6:7:", + "valid": false + }, + { + "description": "missing leading octet with omitted octets later", + "data": ":2:3:4::8", + "valid": false + }, + { + "description": "single set of double colons in the middle is valid", + "data": "1:d6::42", + "valid": true + }, + { + "description": "two sets of double colons is invalid", + "data": "1::d6::42", + "valid": false + }, + { + "description": "mixed format with the ipv4 section as decimal octets", + "data": "1::d6:192.168.0.1", + "valid": true + }, + { + "description": "mixed format with double colons between the sections", + "data": "1:2::192.168.0.1", + "valid": true + }, + { + "description": "mixed format with ipv4 section with octet out of range", + "data": "1::2:192.168.256.1", + "valid": false + }, + { + "description": "mixed format with ipv4 section with a hex octet", + "data": "1::2:192.168.ff.1", + "valid": false + }, + { + "description": "mixed format with leading double colons (ipv4-mapped ipv6 address)", + "data": "::ffff:192.168.0.1", + "valid": true + }, + { + "description": "triple colons is invalid", + "data": "1:2:3:4:5:::8", + "valid": false + }, + { + "description": "8 octets", + "data": "1:2:3:4:5:6:7:8", + "valid": true + }, + { + "description": "insufficient octets without double colons", + "data": "1:2:3:4:5:6:7", + "valid": false + }, + { + "description": "no colons is invalid", + "data": "1", + "valid": false + }, + { + "description": "ipv4 is not ipv6", + "data": "127.0.0.1", + "valid": false + }, + { + "description": "ipv4 segment must have 4 octets", + "data": "1:2:3:4:1.2.3", + "valid": false + }, + { + "description": "leading whitespace is invalid", + "data": " ::1", + "valid": false + }, + { + "description": "trailing whitespace is invalid", + "data": "::1 ", + "valid": false + }, + { + "description": "netmask is not a part of ipv6 address", + "data": "fe80::/64", + "valid": false + }, + { + "description": "zone id is not a part of ipv6 address", + "data": "fe80::a%eth1", + "valid": false + }, + { + "description": "a long valid ipv6", + "data": "1000:1000:1000:1000:1000:1000:255.255.255.255", + "valid": true + }, + { + "description": "a long invalid ipv6, below length limit, first", + "data": "100:100:100:100:100:100:255.255.255.255.255", + "valid": false + }, + { + "description": "a long invalid ipv6, below length limit, second", + "data": "100:100:100:100:100:100:100:255.255.255.255", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1:2:3:4:5:6:7:৪", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in the IPv4 portion", + "data": "1:2::192.16৪.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/json-pointer.json b/vendor/jsonschema/json/tests/draft6/optional/format/json-pointer.json new file mode 100644 index 00000000..a0346b57 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/json-pointer.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of JSON-pointers (JSON String Representation)", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid JSON-pointer", + "data": "/foo/bar~0/baz~1/%a", + "valid": true + }, + { + "description": "not a valid JSON-pointer (~ not escaped)", + "data": "/foo/bar~", + "valid": false + }, + { + "description": "valid JSON-pointer with empty segment", + "data": "/foo//bar", + "valid": true + }, + { + "description": "valid JSON-pointer with the last empty segment", + "data": "/foo/bar/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #1", + "data": "", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #2", + "data": "/foo", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #3", + "data": "/foo/0", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #4", + "data": "/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #5", + "data": "/a~1b", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #6", + "data": "/c%d", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #7", + "data": "/e^f", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #8", + "data": "/g|h", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #9", + "data": "/i\\j", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #10", + "data": "/k\"l", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #11", + "data": "/ ", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #12", + "data": "/m~0n", + "valid": true + }, + { + "description": "valid JSON-pointer used adding to the last array position", + "data": "/foo/-", + "valid": true + }, + { + "description": "valid JSON-pointer (- used as object member name)", + "data": "/foo/-/bar", + "valid": true + }, + { + "description": "valid JSON-pointer (multiple escaped characters)", + "data": "/~1~0~0~1~1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #1", + "data": "/~1.1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #2", + "data": "/~0.1", + "valid": true + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #1", + "data": "#", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #2", + "data": "#/", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #3", + "data": "#a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #1", + "data": "/~0~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #2", + "data": "/~0/~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #1", + "data": "/~2", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #2", + "data": "/~-1", + "valid": false + }, + { + "description": "not a valid JSON-pointer (multiple characters not escaped)", + "data": "/~~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1", + "data": "a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2", + "data": "0", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3", + "data": "a/a", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/unknown.json b/vendor/jsonschema/json/tests/draft6/optional/format/unknown.json new file mode 100644 index 00000000..12339ae5 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/unknown.json @@ -0,0 +1,43 @@ +[ + { + "description": "unknown format", + "schema": { "format": "unknown" }, + "tests": [ + { + "description": "unknown formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "unknown formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "unknown formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "unknown formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "unknown formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "unknown formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "unknown formats ignore strings", + "data": "string", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/uri-reference.json b/vendor/jsonschema/json/tests/draft6/optional/format/uri-reference.json new file mode 100644 index 00000000..7cdf228d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/uri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of URI References", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid relative URI Reference", + "data": "/abc", + "valid": true + }, + { + "description": "an invalid URI Reference", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "a valid URI Reference", + "data": "abc", + "valid": true + }, + { + "description": "a valid URI fragment", + "data": "#fragment", + "valid": true + }, + { + "description": "an invalid URI fragment", + "data": "#frag\\ment", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/uri-template.json b/vendor/jsonschema/json/tests/draft6/optional/format/uri-template.json new file mode 100644 index 00000000..df355c55 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/uri-template.json @@ -0,0 +1,58 @@ +[ + { + "description": "format: uri-template", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term}", + "valid": true + }, + { + "description": "an invalid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term", + "valid": false + }, + { + "description": "a valid uri-template without variables", + "data": "http://example.com/dictionary", + "valid": true + }, + { + "description": "a valid relative uri-template", + "data": "dictionary/{term:1}/{term}", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/format/uri.json b/vendor/jsonschema/json/tests/draft6/optional/format/uri.json new file mode 100644 index 00000000..792d71a0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/format/uri.json @@ -0,0 +1,108 @@ +[ + { + "description": "validation of URIs", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "a valid URL with anchor tag", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid URL with anchor tag and parentheses", + "data": "http://foo.com/blah_(wikipedia)_blah#cite-1", + "valid": true + }, + { + "description": "a valid URL with URL-encoded stuff", + "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid puny-coded URL ", + "data": "http://xn--nw2a.xn--j6w193g/", + "valid": true + }, + { + "description": "a valid URL with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid URL based on IPv4", + "data": "http://223.255.255.254", + "valid": true + }, + { + "description": "a valid URL with ftp scheme", + "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt", + "valid": true + }, + { + "description": "a valid URL for a simple text file", + "data": "http://www.ietf.org/rfc/rfc2396.txt", + "valid": true + }, + { + "description": "a valid URL ", + "data": "ldap://[2001:db8::7]/c=GB?objectClass?one", + "valid": true + }, + { + "description": "a valid mailto URI", + "data": "mailto:John.Doe@example.com", + "valid": true + }, + { + "description": "a valid newsgroup URI", + "data": "news:comp.infosystems.www.servers.unix", + "valid": true + }, + { + "description": "a valid tel URI", + "data": "tel:+1-816-555-1212", + "valid": true + }, + { + "description": "a valid URN", + "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2", + "valid": true + }, + { + "description": "an invalid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": false + }, + { + "description": "an invalid relative URI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + }, + { + "description": "an invalid URI with spaces", + "data": "http:// shouldfail.com", + "valid": false + }, + { + "description": "an invalid URI with spaces and missing scheme", + "data": ":// should fail", + "valid": false + }, + { + "description": "an invalid URI with comma in scheme", + "data": "bar,baz:foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/optional/non-bmp-regex.json b/vendor/jsonschema/json/tests/draft6/optional/non-bmp-regex.json new file mode 100644 index 00000000..dd67af2b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/optional/non-bmp-regex.json @@ -0,0 +1,82 @@ +[ + { + "description": "Proper UTF-16 surrogate pair handling: pattern", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { "pattern": "^ðŸ²*$" }, + "tests": [ + { + "description": "matches empty", + "data": "", + "valid": true + }, + { + "description": "matches single", + "data": "ðŸ²", + "valid": true + }, + { + "description": "matches two", + "data": "ðŸ²ðŸ²", + "valid": true + }, + { + "description": "doesn't match one", + "data": "ðŸ‰", + "valid": false + }, + { + "description": "doesn't match two", + "data": "ðŸ‰ðŸ‰", + "valid": false + }, + { + "description": "doesn't match one ASCII", + "data": "D", + "valid": false + }, + { + "description": "doesn't match two ASCII", + "data": "DD", + "valid": false + } + ] + }, + { + "description": "Proper UTF-16 surrogate pair handling: patternProperties", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { + "patternProperties": { + "^ðŸ²*$": { + "type": "integer" + } + } + }, + "tests": [ + { + "description": "matches empty", + "data": { "": 1 }, + "valid": true + }, + { + "description": "matches single", + "data": { "ðŸ²": 1 }, + "valid": true + }, + { + "description": "matches two", + "data": { "ðŸ²ðŸ²": 1 }, + "valid": true + }, + { + "description": "doesn't match one", + "data": { "ðŸ²": "hello" }, + "valid": false + }, + { + "description": "doesn't match two", + "data": { "ðŸ²ðŸ²": "hello" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/pattern.json b/vendor/jsonschema/json/tests/draft6/pattern.json new file mode 100644 index 00000000..92db0f97 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/pattern.json @@ -0,0 +1,59 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/patternProperties.json b/vendor/jsonschema/json/tests/draft6/patternProperties.json new file mode 100644 index 00000000..c276e647 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/patternProperties.json @@ -0,0 +1,171 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["foo"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + }, + { + "description": "patternProperties with boolean schemas", + "schema": { + "patternProperties": { + "f.*": true, + "b.*": false + } + }, + "tests": [ + { + "description": "object with property matching schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property matching schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "object with a property matching both true and false is invalid", + "data": {"foobar":1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "patternProperties with null valued instance properties", + "schema": { + "patternProperties": { + "^.*bar$": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foobar": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/properties.json b/vendor/jsonschema/json/tests/draft6/properties.json new file mode 100644 index 00000000..5b971ca0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/properties.json @@ -0,0 +1,236 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + }, + { + "description": "properties with boolean schema", + "schema": { + "properties": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "no property present is valid", + "data": {}, + "valid": true + }, + { + "description": "only 'true' property present is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "only 'false' property present is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "both properties present is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + } + ] + }, + { + "description": "properties with escaped characters", + "schema": { + "properties": { + "foo\nbar": {"type": "number"}, + "foo\"bar": {"type": "number"}, + "foo\\bar": {"type": "number"}, + "foo\rbar": {"type": "number"}, + "foo\tbar": {"type": "number"}, + "foo\fbar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with all numbers is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1", + "foo\\bar": "1", + "foo\rbar": "1", + "foo\tbar": "1", + "foo\fbar": "1" + }, + "valid": false + } + ] + }, + { + "description": "properties with null valued instance properties", + "schema": { + "properties": { + "foo": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { + "properties": { + "__proto__": {"type": "number"}, + "toString": { + "properties": { "length": { "type": "string" } } + }, + "constructor": {"type": "number"} + } + }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": true + }, + { + "description": "__proto__ not valid", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString not valid", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor not valid", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present and valid", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/propertyNames.json b/vendor/jsonschema/json/tests/draft6/propertyNames.json new file mode 100644 index 00000000..f0788e64 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/propertyNames.json @@ -0,0 +1,107 @@ +[ + { + "description": "propertyNames validation", + "schema": { + "propertyNames": {"maxLength": 3} + }, + "tests": [ + { + "description": "all property names valid", + "data": { + "f": {}, + "foo": {} + }, + "valid": true + }, + { + "description": "some property names invalid", + "data": { + "foo": {}, + "foobar": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [1, 2, 3, 4], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "propertyNames validation with pattern", + "schema": { + "propertyNames": { "pattern": "^a+$" } + }, + "tests": [ + { + "description": "matching property names valid", + "data": { + "a": {}, + "aa": {}, + "aaa": {} + }, + "valid": true + }, + { + "description": "non-matching property name is invalid", + "data": { + "aaA": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema true", + "schema": {"propertyNames": true}, + "tests": [ + { + "description": "object with any properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema false", + "schema": {"propertyNames": false}, + "tests": [ + { + "description": "object with any properties is invalid", + "data": {"foo": 1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/ref.json b/vendor/jsonschema/json/tests/draft6/ref.json new file mode 100644 index 00000000..1724f81f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/ref.json @@ -0,0 +1,786 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "items": [ + {"type": "integer"}, + {"$ref": "#/items/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "definitions": { + "tilde~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"} + }, + "properties": { + "tilde": {"$ref": "#/definitions/tilde~0field"}, + "slash": {"$ref": "#/definitions/slash~1field"}, + "percent": {"$ref": "#/definitions/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilde invalid", + "data": {"tilde": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilde valid", + "data": {"tilde": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "definitions": { + "a": {"type": "integer"}, + "b": {"$ref": "#/definitions/a"}, + "c": {"$ref": "#/definitions/b"} + }, + "allOf": [{ "$ref": "#/definitions/c" }] + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref overrides any sibling keywords", + "schema": { + "definitions": { + "reffed": { + "type": "array" + } + }, + "properties": { + "foo": { + "$ref": "#/definitions/reffed", + "maxItems": 2 + } + } + }, + "tests": [ + { + "description": "ref valid", + "data": { "foo": [] }, + "valid": true + }, + { + "description": "ref valid, maxItems ignored", + "data": { "foo": [ 1, 2, 3] }, + "valid": true + }, + { + "description": "ref invalid", + "data": { "foo": "string" }, + "valid": false + } + ] + }, + { + "description": "$ref prevents a sibling $id from changing the base uri", + "schema": { + "$id": "http://localhost:1234/sibling_id/base/", + "definitions": { + "foo": { + "$id": "http://localhost:1234/sibling_id/foo.json", + "type": "string" + }, + "base_foo": { + "$comment": "this canonical uri is http://localhost:1234/sibling_id/base/foo.json", + "$id": "foo.json", + "type": "number" + } + }, + "allOf": [ + { + "$comment": "$ref resolves to http://localhost:1234/sibling_id/base/foo.json, not http://localhost:1234/sibling_id/foo.json", + "$id": "http://localhost:1234/sibling_id/", + "$ref": "foo.json" + } + ] + }, + "tests": [ + { + "description": "$ref resolves to /definitions/base_foo, data does not validate", + "data": "a", + "valid": false + }, + { + "description": "$ref resolves to /definitions/base_foo, data validates", + "data": 1, + "valid": true + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": {"$ref": "http://json-schema.org/draft-06/schema#"}, + "tests": [ + { + "description": "remote ref valid", + "data": {"minLength": 1}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"minLength": -1}, + "valid": false + } + ] + }, + { + "description": "property named $ref that is not a reference", + "schema": { + "properties": { + "$ref": {"type": "string"} + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "property named $ref, containing an actual $ref", + "schema": { + "properties": { + "$ref": {"$ref": "#/definitions/is-string"} + }, + "definitions": { + "is-string": { + "type": "string" + } + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "$ref to boolean schema true", + "schema": { + "allOf": [{ "$ref": "#/definitions/bool" }], + "definitions": { + "bool": true + } + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "$ref to boolean schema false", + "schema": { + "allOf": [{ "$ref": "#/definitions/bool" }], + "definitions": { + "bool": false + } + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "Recursive references between schemas", + "schema": { + "$id": "http://localhost:1234/tree", + "description": "tree of nodes", + "type": "object", + "properties": { + "meta": {"type": "string"}, + "nodes": { + "type": "array", + "items": {"$ref": "node"} + } + }, + "required": ["meta", "nodes"], + "definitions": { + "node": { + "$id": "http://localhost:1234/node", + "description": "node", + "type": "object", + "properties": { + "value": {"type": "number"}, + "subtree": {"$ref": "tree"} + }, + "required": ["value"] + } + } + }, + "tests": [ + { + "description": "valid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 1.1}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": true + }, + { + "description": "invalid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": "string is invalid"}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": false + } + ] + }, + { + "description": "refs with quote", + "schema": { + "properties": { + "foo\"bar": {"$ref": "#/definitions/foo%22bar"} + }, + "definitions": { + "foo\"bar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with numbers is valid", + "data": { + "foo\"bar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "Location-independent identifier", + "schema": { + "allOf": [{ + "$ref": "#foo" + }], + "definitions": { + "A": { + "$id": "#foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with base URI change in subschema", + "schema": { + "$id": "http://localhost:1234/root", + "allOf": [{ + "$ref": "http://localhost:1234/nested.json#foo" + }], + "definitions": { + "A": { + "$id": "nested.json", + "definitions": { + "B": { + "$id": "#foo", + "type": "integer" + } + } + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "naive replacement of $ref with its destination is not correct", + "schema": { + "definitions": { + "a_string": { "type": "string" } + }, + "enum": [ + { "$ref": "#/definitions/a_string" } + ] + }, + "tests": [ + { + "description": "do not evaluate the $ref inside the enum, matching any string", + "data": "this is a string", + "valid": false + }, + { + "description": "do not evaluate the $ref inside the enum, definition exact match", + "data": { "type": "string" }, + "valid": false + }, + { + "description": "match the enum exactly", + "data": { "$ref": "#/definitions/a_string" }, + "valid": true + } + ] + }, + { + "description": "refs with relative uris and defs", + "schema": { + "$id": "http://example.com/schema-relative-uri-defs1.json", + "properties": { + "foo": { + "$id": "schema-relative-uri-defs2.json", + "definitions": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "allOf": [ { "$ref": "#/definitions/inner" } ] + } + }, + "allOf": [ { "$ref": "schema-relative-uri-defs2.json" } ] + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "relative refs with absolute uris and defs", + "schema": { + "$id": "http://example.com/schema-refs-absolute-uris-defs1.json", + "properties": { + "foo": { + "$id": "http://example.com/schema-refs-absolute-uris-defs2.json", + "definitions": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "allOf": [ { "$ref": "#/definitions/inner" } ] + } + }, + "allOf": [ { "$ref": "schema-refs-absolute-uris-defs2.json" } ] + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "simple URN base URI with $ref via the URN", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed", + "minimum": 30, + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed"} + } + }, + "tests": [ + { + "description": "valid under the URN IDed schema", + "data": {"foo": 37}, + "valid": true + }, + { + "description": "invalid under the URN IDed schema", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "simple URN base URI with JSON pointer", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-00ff-ff00-4321feebdaed", + "properties": { + "foo": {"$ref": "#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with NSS", + "schema": { + "$comment": "RFC 8141 §2.2", + "$id": "urn:example:1/406/47452/2", + "properties": { + "foo": {"$ref": "#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with r-component", + "schema": { + "$comment": "RFC 8141 §2.3.1", + "$id": "urn:example:foo-bar-baz-qux?+CCResolve:cc=uk", + "properties": { + "foo": {"$ref": "#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with q-component", + "schema": { + "$comment": "RFC 8141 §2.3.2", + "$id": "urn:example:weather?=op=map&lat=39.56&lon=-104.85&datetime=1969-07-21T02:56:15Z", + "properties": { + "foo": {"$ref": "#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and JSON pointer ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and anchor ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed#something"} + }, + "definitions": { + "bar": { + "$id": "#something", + "type": "string" + } + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/refRemote.json b/vendor/jsonschema/json/tests/draft6/refRemote.json new file mode 100644 index 00000000..c2b20024 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/refRemote.json @@ -0,0 +1,239 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "base URI change", + "schema": { + "$id": "http://localhost:1234/", + "items": { + "$id": "baseUriChange/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "base URI change ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "base URI change ref invalid", + "data": [["a"]], + "valid": false + } + ] + }, + { + "description": "base URI change - change folder", + "schema": { + "$id": "http://localhost:1234/scope_change_defs1.json", + "type" : "object", + "properties": { + "list": {"$ref": "#/definitions/baz"} + }, + "definitions": { + "baz": { + "$id": "baseUriChangeFolder/", + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "base URI change - change folder in subschema", + "schema": { + "$id": "http://localhost:1234/scope_change_defs2.json", + "type" : "object", + "properties": { + "list": {"$ref": "#/definitions/baz/definitions/bar"} + }, + "definitions": { + "baz": { + "$id": "baseUriChangeFolderInSubschema/", + "definitions": { + "bar": { + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "root ref in remote ref", + "schema": { + "$id": "http://localhost:1234/object", + "type": "object", + "properties": { + "name": {"$ref": "name.json#/definitions/orNull"} + } + }, + "tests": [ + { + "description": "string is valid", + "data": { + "name": "foo" + }, + "valid": true + }, + { + "description": "null is valid", + "data": { + "name": null + }, + "valid": true + }, + { + "description": "object is invalid", + "data": { + "name": { + "name": null + } + }, + "valid": false + } + ] + }, + { + "description": "remote ref with ref to definitions", + "schema": { + "$id": "http://localhost:1234/schema-remote-ref-ref-defs1.json", + "allOf": [ + { "$ref": "ref-and-definitions.json" } + ] + }, + "tests": [ + { + "description": "invalid", + "data": { + "bar": 1 + }, + "valid": false + }, + { + "description": "valid", + "data": { + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "Location-independent identifier in remote ref", + "schema": { + "$ref": "http://localhost:1234/locationIndependentIdentifierPre2019.json#/definitions/refToInteger" + }, + "tests": [ + { + "description": "integer is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "retrieved nested refs resolve relative to their URI not $id", + "schema": { + "$id": "http://localhost:1234/some-id", + "properties": { + "name": {"$ref": "nested/foo-ref-string.json"} + } + }, + "tests": [ + { + "description": "number is invalid", + "data": { + "name": {"foo": 1} + }, + "valid": false + }, + { + "description": "string is valid", + "data": { + "name": {"foo": "a"} + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/required.json b/vendor/jsonschema/json/tests/draft6/required.json new file mode 100644 index 00000000..8d8087af --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/required.json @@ -0,0 +1,151 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with empty array", + "schema": { + "properties": { + "foo": {} + }, + "required": [] + }, + "tests": [ + { + "description": "property not required", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with escaped characters", + "schema": { + "required": [ + "foo\nbar", + "foo\"bar", + "foo\\bar", + "foo\rbar", + "foo\tbar", + "foo\fbar" + ] + }, + "tests": [ + { + "description": "object with all properties present is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with some properties missing is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "required properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { "required": ["__proto__", "toString", "constructor"] }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": false + }, + { + "description": "__proto__ present", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString present", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor present", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/type.json b/vendor/jsonschema/json/tests/draft6/type.json new file mode 100644 index 00000000..83046470 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/type.json @@ -0,0 +1,474 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is an integer", + "data": 1.0, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not an integer, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is a number (and an integer)", + "data": 1.0, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not a number, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "a string is still a string, even if it looks like a number", + "data": "1", + "valid": true + }, + { + "description": "an empty string is still a string", + "data": "", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "zero is not a boolean", + "data": 0, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not a boolean", + "data": "", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "true is a boolean", + "data": true, + "valid": true + }, + { + "description": "false is a boolean", + "data": false, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "zero is not null", + "data": 0, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not null", + "data": "", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "true is not null", + "data": true, + "valid": false + }, + { + "description": "false is not null", + "data": false, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type as array with one item", + "schema": { + "type": ["string"] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "type: array or object", + "schema": { + "type": ["array", "object"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type: array, object or null", + "schema": { + "type": ["array", "object", "null"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/uniqueItems.json b/vendor/jsonschema/json/tests/draft6/uniqueItems.json new file mode 100644 index 00000000..2ccf666d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/uniqueItems.json @@ -0,0 +1,404 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "non-unique array of more than two integers is invalid", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of strings is valid", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "non-unique array of strings is invalid", + "data": ["foo", "bar", "foo"], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "non-unique array of more than two arrays is invalid", + "data": [["foo"], ["bar"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "[1] and [true] are unique", + "data": [[1], [true]], + "valid": true + }, + { + "description": "[0] and [false] are unique", + "data": [[0], [false]], + "valid": true + }, + { + "description": "nested [1] and [true] are unique", + "data": [[[1], "foo"], [[true], "foo"]], + "valid": true + }, + { + "description": "nested [0] and [false] are unique", + "data": [[[0], "foo"], [[false], "foo"]], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1, "{}"], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + }, + { + "description": "different objects are unique", + "data": [{"a": 1, "b": 2}, {"a": 2, "b": 1}], + "valid": true + }, + { + "description": "objects are non-unique despite key order", + "data": [{"a": 1, "b": 2}, {"b": 2, "a": 1}], + "valid": false + }, + { + "description": "{\"a\": false} and {\"a\": 0} are unique", + "data": [{"a": false}, {"a": 0}], + "valid": true + }, + { + "description": "{\"a\": true} and {\"a\": 1} are unique", + "data": [{"a": true}, {"a": 1}], + "valid": true + } + ] + }, + { + "description": "uniqueItems with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is not valid", + "data": [false, true, "foo", "foo"], + "valid": false + }, + { + "description": "non-unique array extended from [true, false] is not valid", + "data": [true, false, "foo", "foo"], + "valid": false + } + ] + }, + { + "description": "uniqueItems with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + }, + { + "description": "uniqueItems=false validation", + "schema": { "uniqueItems": false }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is valid", + "data": [1, 1], + "valid": true + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": true + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": true + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": true + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is valid", + "data": [["foo"], ["foo"]], + "valid": true + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are valid", + "data": [{}, [1], true, null, {}, 1], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is valid", + "data": [false, true, "foo", "foo"], + "valid": true + }, + { + "description": "non-unique array extended from [true, false] is valid", + "data": [true, false, "foo", "foo"], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft6/unknownKeyword.json b/vendor/jsonschema/json/tests/draft6/unknownKeyword.json new file mode 100644 index 00000000..1f58d97e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft6/unknownKeyword.json @@ -0,0 +1,56 @@ +[ + { + "description": "$id inside an unknown keyword is not a real identifier", + "comment": "the implementation must not be confused by an $id in locations we do not know how to parse", + "schema": { + "definitions": { + "id_in_unknown0": { + "not": { + "array_of_schemas": [ + { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "null" + } + ] + } + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "string" + }, + "id_in_unknown1": { + "not": { + "object_of_schemas": { + "foo": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "integer" + } + } + } + } + }, + "anyOf": [ + { "$ref": "#/definitions/id_in_unknown0" }, + { "$ref": "#/definitions/id_in_unknown1" }, + { "$ref": "https://localhost:1234/unknownKeyword/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "type matches second anyOf, which has a real schema in it", + "data": "a string", + "valid": true + }, + { + "description": "type matches non-schema in first anyOf", + "data": null, + "valid": false + }, + { + "description": "type matches non-schema in third anyOf", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/additionalItems.json b/vendor/jsonschema/json/tests/draft7/additionalItems.json new file mode 100644 index 00000000..deb44fd3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/additionalItems.json @@ -0,0 +1,164 @@ +[ + { + "description": "additionalItems as schema", + "schema": { + "items": [{}], + "additionalItems": {"type": "integer"} + }, + "tests": [ + { + "description": "additional items match schema", + "data": [ null, 2, 3, 4 ], + "valid": true + }, + { + "description": "additional items do not match schema", + "data": [ null, 2, 3, "foo" ], + "valid": false + } + ] + }, + { + "description": "when items is schema, additionalItems does nothing", + "schema": { + "items": {}, + "additionalItems": false + }, + "tests": [ + { + "description": "all items match schema", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + } + ] + }, + { + "description": "array of items with no additionalItems permitted", + "schema": { + "items": [{}, {}, {}], + "additionalItems": false + }, + "tests": [ + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "fewer number of items present (1)", + "data": [ 1 ], + "valid": true + }, + { + "description": "fewer number of items present (2)", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "equal number of items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "additionalItems as false without items", + "schema": {"additionalItems": false}, + "tests": [ + { + "description": + "items defaults to empty schema so everything is valid", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "additionalItems are allowed by default", + "schema": {"items": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators, valid case", + "schema": { + "allOf": [ + { "items": [ { "type": "integer" } ] } + ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in allOf are not examined", + "data": [ 1, null ], + "valid": true + } + ] + }, + { + "description": "additionalItems does not look in applicators, invalid case", + "schema": { + "allOf": [ + { "items": [ { "type": "integer" }, { "type": "string" } ] } + ], + "items": [ {"type": "integer" } ], + "additionalItems": { "type": "boolean" } + }, + "tests": [ + { + "description": "items defined in allOf are not examined", + "data": [ 1, "hello" ], + "valid": false + } + ] + }, + { + "description": "items validation adjusts the starting index for additionalItems", + "schema": { + "items": [ { "type": "string" } ], + "additionalItems": { "type": "integer" } + }, + "tests": [ + { + "description": "valid items", + "data": [ "x", 2, 3 ], + "valid": true + }, + { + "description": "wrong type of second item", + "data": [ "x", "y" ], + "valid": false + } + ] + }, + { + "description": "additionalItems with null instance elements", + "schema": { + "additionalItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/additionalProperties.json b/vendor/jsonschema/json/tests/draft7/additionalProperties.json new file mode 100644 index 00000000..0f8e1627 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/additionalProperties.json @@ -0,0 +1,147 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobarbaz", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": "non-ASCII pattern with additionalProperties", + "schema": { + "patternProperties": {"^á": {}}, + "additionalProperties": false + }, + "tests": [ + { + "description": "matching the pattern is valid", + "data": {"ármányos": 2}, + "valid": true + }, + { + "description": "not matching the pattern is invalid", + "data": {"élmény": 2}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with schema", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + }, + { + "description": "additionalProperties does not look in applicators", + "schema": { + "allOf": [ + {"properties": {"foo": {}}} + ], + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "properties defined in allOf are not examined", + "data": {"foo": 1, "bar": true}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with null valued instance properties", + "schema": { + "additionalProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/allOf.json b/vendor/jsonschema/json/tests/draft7/allOf.json new file mode 100644 index 00000000..ec9319e1 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/allOf.json @@ -0,0 +1,294 @@ +[ + { + "description": "allOf", + "schema": { + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all true", + "schema": {"allOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "allOf with boolean schemas, some false", + "schema": {"allOf": [true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all false", + "schema": {"allOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with one empty schema", + "schema": { + "allOf": [ + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with two empty schemas", + "schema": { + "allOf": [ + {}, + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with the first empty schema", + "schema": { + "allOf": [ + {}, + { "type": "number" } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with the last empty schema", + "schema": { + "allOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "nested allOf, to check validation semantics", + "schema": { + "allOf": [ + { + "allOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "allOf combined with anyOf, oneOf", + "schema": { + "allOf": [ { "multipleOf": 2 } ], + "anyOf": [ { "multipleOf": 3 } ], + "oneOf": [ { "multipleOf": 5 } ] + }, + "tests": [ + { + "description": "allOf: false, anyOf: false, oneOf: false", + "data": 1, + "valid": false + }, + { + "description": "allOf: false, anyOf: false, oneOf: true", + "data": 5, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: false", + "data": 3, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: true", + "data": 15, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: false", + "data": 2, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: true", + "data": 10, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: false", + "data": 6, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: true", + "data": 30, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/anyOf.json b/vendor/jsonschema/json/tests/draft7/anyOf.json new file mode 100644 index 00000000..ab5eb386 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/anyOf.json @@ -0,0 +1,189 @@ +[ + { + "description": "anyOf", + "schema": { + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf with boolean schemas, all true", + "schema": {"anyOf": [true, true]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, some true", + "schema": {"anyOf": [true, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, all false", + "schema": {"anyOf": [false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf complex types", + "schema": { + "anyOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first anyOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second anyOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both anyOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "neither anyOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "anyOf with one empty schema", + "schema": { + "anyOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is valid", + "data": 123, + "valid": true + } + ] + }, + { + "description": "nested anyOf, to check validation semantics", + "schema": { + "anyOf": [ + { + "anyOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/boolean_schema.json b/vendor/jsonschema/json/tests/draft7/boolean_schema.json new file mode 100644 index 00000000..6d40f23f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/boolean_schema.json @@ -0,0 +1,104 @@ +[ + { + "description": "boolean schema 'true'", + "schema": true, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "boolean true is valid", + "data": true, + "valid": true + }, + { + "description": "boolean false is valid", + "data": false, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + }, + { + "description": "array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "boolean schema 'false'", + "schema": false, + "tests": [ + { + "description": "number is invalid", + "data": 1, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "boolean true is invalid", + "data": true, + "valid": false + }, + { + "description": "boolean false is invalid", + "data": false, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + }, + { + "description": "object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/const.json b/vendor/jsonschema/json/tests/draft7/const.json new file mode 100644 index 00000000..1c2cafcc --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/const.json @@ -0,0 +1,342 @@ +[ + { + "description": "const validation", + "schema": {"const": 2}, + "tests": [ + { + "description": "same value is valid", + "data": 2, + "valid": true + }, + { + "description": "another value is invalid", + "data": 5, + "valid": false + }, + { + "description": "another type is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "const with object", + "schema": {"const": {"foo": "bar", "baz": "bax"}}, + "tests": [ + { + "description": "same object is valid", + "data": {"foo": "bar", "baz": "bax"}, + "valid": true + }, + { + "description": "same object with different property order is valid", + "data": {"baz": "bax", "foo": "bar"}, + "valid": true + }, + { + "description": "another object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "another type is invalid", + "data": [1, 2], + "valid": false + } + ] + }, + { + "description": "const with array", + "schema": {"const": [{ "foo": "bar" }]}, + "tests": [ + { + "description": "same array is valid", + "data": [{"foo": "bar"}], + "valid": true + }, + { + "description": "another array item is invalid", + "data": [2], + "valid": false + }, + { + "description": "array with additional items is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + }, + { + "description": "const with null", + "schema": {"const": null}, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "not null is invalid", + "data": 0, + "valid": false + } + ] + }, + { + "description": "const with false does not match 0", + "schema": {"const": false}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "const with true does not match 1", + "schema": {"const": true}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "const with [false] does not match [0]", + "schema": {"const": [false]}, + "tests": [ + { + "description": "[false] is valid", + "data": [false], + "valid": true + }, + { + "description": "[0] is invalid", + "data": [0], + "valid": false + }, + { + "description": "[0.0] is invalid", + "data": [0.0], + "valid": false + } + ] + }, + { + "description": "const with [true] does not match [1]", + "schema": {"const": [true]}, + "tests": [ + { + "description": "[true] is valid", + "data": [true], + "valid": true + }, + { + "description": "[1] is invalid", + "data": [1], + "valid": false + }, + { + "description": "[1.0] is invalid", + "data": [1.0], + "valid": false + } + ] + }, + { + "description": "const with {\"a\": false} does not match {\"a\": 0}", + "schema": {"const": {"a": false}}, + "tests": [ + { + "description": "{\"a\": false} is valid", + "data": {"a": false}, + "valid": true + }, + { + "description": "{\"a\": 0} is invalid", + "data": {"a": 0}, + "valid": false + }, + { + "description": "{\"a\": 0.0} is invalid", + "data": {"a": 0.0}, + "valid": false + } + ] + }, + { + "description": "const with {\"a\": true} does not match {\"a\": 1}", + "schema": {"const": {"a": true}}, + "tests": [ + { + "description": "{\"a\": true} is valid", + "data": {"a": true}, + "valid": true + }, + { + "description": "{\"a\": 1} is invalid", + "data": {"a": 1}, + "valid": false + }, + { + "description": "{\"a\": 1.0} is invalid", + "data": {"a": 1.0}, + "valid": false + } + ] + }, + { + "description": "const with 0 does not match other zero-like types", + "schema": {"const": 0}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "empty string is invalid", + "data": "", + "valid": false + } + ] + }, + { + "description": "const with 1 does not match true", + "schema": {"const": 1}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "const with -2.0 matches integer and float types", + "schema": {"const": -2.0}, + "tests": [ + { + "description": "integer -2 is valid", + "data": -2, + "valid": true + }, + { + "description": "integer 2 is invalid", + "data": 2, + "valid": false + }, + { + "description": "float -2.0 is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float 2.0 is invalid", + "data": 2.0, + "valid": false + }, + { + "description": "float -2.00001 is invalid", + "data": -2.00001, + "valid": false + } + ] + }, + { + "description": "float and integers are equal up to 64-bit representation limits", + "schema": {"const": 9007199254740992}, + "tests": [ + { + "description": "integer is valid", + "data": 9007199254740992, + "valid": true + }, + { + "description": "integer minus one is invalid", + "data": 9007199254740991, + "valid": false + }, + { + "description": "float is valid", + "data": 9007199254740992.0, + "valid": true + }, + { + "description": "float minus one is invalid", + "data": 9007199254740991.0, + "valid": false + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "const": "hello\u0000there" }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/contains.json b/vendor/jsonschema/json/tests/draft7/contains.json new file mode 100644 index 00000000..2b1a5152 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/contains.json @@ -0,0 +1,165 @@ +[ + { + "description": "contains keyword validation", + "schema": { + "contains": {"minimum": 5} + }, + "tests": [ + { + "description": "array with item matching schema (5) is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with item matching schema (6) is valid", + "data": [3, 4, 6], + "valid": true + }, + { + "description": "array with two items matching schema (5, 6) is valid", + "data": [3, 4, 5, 6], + "valid": true + }, + { + "description": "array without items matching schema is invalid", + "data": [2, 3, 4], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "not array is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "contains keyword with const keyword", + "schema": { + "contains": { "const": 5 } + }, + "tests": [ + { + "description": "array with item 5 is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with two items 5 is valid", + "data": [3, 4, 5, 5], + "valid": true + }, + { + "description": "array without item 5 is invalid", + "data": [1, 2, 3, 4], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema true", + "schema": {"contains": true}, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema false", + "schema": {"contains": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "non-arrays are valid", + "data": "contains does not apply to strings", + "valid": true + } + ] + }, + { + "description": "items + contains", + "schema": { + "items": { "multipleOf": 2 }, + "contains": { "multipleOf": 3 } + }, + "tests": [ + { + "description": "matches items, does not match contains", + "data": [ 2, 4, 8 ], + "valid": false + }, + { + "description": "does not match items, matches contains", + "data": [ 3, 6, 9 ], + "valid": false + }, + { + "description": "matches both items and contains", + "data": [ 6, 12 ], + "valid": true + }, + { + "description": "matches neither items nor contains", + "data": [ 1, 5 ], + "valid": false + } + ] + }, + { + "description": "contains with false if subschema", + "schema": { + "contains": { + "if": false, + "else": true + } + }, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains with null instance elements", + "schema": { + "contains": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null items", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/default.json b/vendor/jsonschema/json/tests/draft7/default.json new file mode 100644 index 00000000..289a9b66 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/default.json @@ -0,0 +1,79 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "the default keyword does not do anything if the property is missing", + "schema": { + "type": "object", + "properties": { + "alpha": { + "type": "number", + "maximum": 3, + "default": 5 + } + } + }, + "tests": [ + { + "description": "an explicit property value is checked against maximum (passing)", + "data": { "alpha": 1 }, + "valid": true + }, + { + "description": "an explicit property value is checked against maximum (failing)", + "data": { "alpha": 5 }, + "valid": false + }, + { + "description": "missing properties are not filled in with the default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/definitions.json b/vendor/jsonschema/json/tests/draft7/definitions.json new file mode 100644 index 00000000..afe396e4 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/definitions.json @@ -0,0 +1,26 @@ +[ + { + "description": "validate definition against metaschema", + "schema": {"$ref": "http://json-schema.org/draft-07/schema#"}, + "tests": [ + { + "description": "valid definition schema", + "data": { + "definitions": { + "foo": {"type": "integer"} + } + }, + "valid": true + }, + { + "description": "invalid definition schema", + "data": { + "definitions": { + "foo": {"type": 1} + } + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/dependencies.json b/vendor/jsonschema/json/tests/draft7/dependencies.json new file mode 100644 index 00000000..a5e54282 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/dependencies.json @@ -0,0 +1,248 @@ +[ + { + "description": "dependencies", + "schema": { + "dependencies": {"bar": ["foo"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "dependencies with empty array", + "schema": { + "dependencies": {"bar": []} + }, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependencies", + "schema": { + "dependencies": {"quux": ["foo", "bar"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "multiple dependencies subschema", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "dependencies with boolean subschemas", + "schema": { + "dependencies": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "dependencies": { + "foo\nbar": ["foo\rbar"], + "foo\tbar": { + "minProperties": 4 + }, + "foo'bar": {"required": ["foo\"bar"]}, + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "valid object 1", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "valid object 2", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "valid object 3", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "invalid object 1", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "invalid object 2", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "invalid object 3", + "data": { + "foo'bar": 1 + }, + "valid": false + }, + { + "description": "invalid object 4", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/enum.json b/vendor/jsonschema/json/tests/draft7/enum.json new file mode 100644 index 00000000..f085097b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/enum.json @@ -0,0 +1,236 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + }, + { + "description": "valid object matches", + "data": {"foo": 12}, + "valid": true + }, + { + "description": "extra properties in object is invalid", + "data": {"foo": 12, "boo": 42}, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum-with-null validation", + "schema": { "enum": [6, null] }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is valid", + "data": 6, + "valid": true + }, + { + "description": "something else is invalid", + "data": "test", + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "wrong foo value", + "data": {"foo":"foot", "bar":"bar"}, + "valid": false + }, + { + "description": "wrong bar value", + "data": {"foo":"foo", "bar":"bart"}, + "valid": false + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "enum with escaped characters", + "schema": { + "enum": ["foo\nbar", "foo\rbar"] + }, + "tests": [ + { + "description": "member 1 is valid", + "data": "foo\nbar", + "valid": true + }, + { + "description": "member 2 is valid", + "data": "foo\rbar", + "valid": true + }, + { + "description": "another string is invalid", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "enum with false does not match 0", + "schema": {"enum": [false]}, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "enum with true does not match 1", + "schema": {"enum": [true]}, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "enum with 0 does not match false", + "schema": {"enum": [0]}, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + } + ] + }, + { + "description": "enum with 1 does not match true", + "schema": {"enum": [1]}, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "nul characters in strings", + "schema": { "enum": [ "hello\u0000there" ] }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/exclusiveMaximum.json b/vendor/jsonschema/json/tests/draft7/exclusiveMaximum.json new file mode 100644 index 00000000..dc3cd709 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/exclusiveMaximum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMaximum validation", + "schema": { + "exclusiveMaximum": 3.0 + }, + "tests": [ + { + "description": "below the exclusiveMaximum is valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + }, + { + "description": "above the exclusiveMaximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/exclusiveMinimum.json b/vendor/jsonschema/json/tests/draft7/exclusiveMinimum.json new file mode 100644 index 00000000..b38d7ece --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/exclusiveMinimum.json @@ -0,0 +1,30 @@ +[ + { + "description": "exclusiveMinimum validation", + "schema": { + "exclusiveMinimum": 1.1 + }, + "tests": [ + { + "description": "above the exclusiveMinimum is valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "below the exclusiveMinimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/format.json b/vendor/jsonschema/json/tests/draft7/format.json new file mode 100644 index 00000000..e2447d60 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/format.json @@ -0,0 +1,614 @@ +[ + { + "description": "email format", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "idn-email format", + "schema": { "format": "idn-email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "regex format", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv4 format", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "ipv6 format", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "idn-hostname format", + "schema": { "format": "idn-hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "hostname format", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date format", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "date-time format", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "time format", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "json-pointer format", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "relative-json-pointer format", + "schema": { "format": "relative-json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "iri format", + "schema": { "format": "iri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "iri-reference format", + "schema": { "format": "iri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri format", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-reference format", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + }, + { + "description": "uri-template format", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/id.json b/vendor/jsonschema/json/tests/draft7/id.json new file mode 100644 index 00000000..6be81b8d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/id.json @@ -0,0 +1,114 @@ +[ + { + "description": "id inside an enum is not a real identifier", + "comment": "the implementation must not be confused by an id buried in the enum", + "schema": { + "definitions": { + "id_in_enum": { + "enum": [ + { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + ] + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "string" + }, + "zzz_id_in_const": { + "const": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + } + } + }, + "anyOf": [ + { "$ref": "#/definitions/id_in_enum" }, + { "$ref": "https://localhost:1234/id/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "exact match to enum, and type matches", + "data": { + "$id": "https://localhost:1234/id/my_identifier.json", + "type": "null" + }, + "valid": true + }, + { + "description": "match $ref to id", + "data": "a string to match #/definitions/id_in_enum", + "valid": true + }, + { + "description": "no match on enum or $ref to id", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing a plain-name $id property", + "schema": { + "definitions": { + "const_not_anchor": { + "const": { + "$id": "#not_a_real_anchor" + } + } + }, + "if": { + "const": "skip not_a_real_anchor" + }, + "then": true, + "else" : { + "$ref": "#/definitions/const_not_anchor" + } + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_anchor", + "valid": true + }, + { + "description": "const at const_not_anchor does not match", + "data": 1, + "valid": false + } + ] + }, + { + "description": "non-schema object containing an $id property", + "schema": { + "definitions": { + "const_not_id": { + "const": { + "$id": "not_a_real_id" + } + } + }, + "if": { + "const": "skip not_a_real_id" + }, + "then": true, + "else" : { + "$ref": "#/definitions/const_not_id" + } + }, + "tests": [ + { + "description": "skip traversing definition for a valid result", + "data": "skip not_a_real_id", + "valid": true + }, + { + "description": "const at const_not_id does not match", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/if-then-else.json b/vendor/jsonschema/json/tests/draft7/if-then-else.json new file mode 100644 index 00000000..284e9191 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/if-then-else.json @@ -0,0 +1,258 @@ +[ + { + "description": "ignore if without then or else", + "schema": { + "if": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone if", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone if", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore then without if", + "schema": { + "then": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone then", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone then", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore else without if", + "schema": { + "else": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone else", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone else", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "if and then without else", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid when if test fails", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if and else without then", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid when if test passes", + "data": -1, + "valid": true + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "validate against correct branch, then vs else", + "schema": { + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "non-interference across combined schemas", + "schema": { + "allOf": [ + { + "if": { + "exclusiveMaximum": 0 + } + }, + { + "then": { + "minimum": -10 + } + }, + { + "else": { + "multipleOf": 2 + } + } + ] + }, + "tests": [ + { + "description": "valid, but would have been invalid through then", + "data": -100, + "valid": true + }, + { + "description": "valid, but would have been invalid through else", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if with boolean schema true", + "schema": { + "if": true, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema true in if always chooses the then path (valid)", + "data": "then", + "valid": true + }, + { + "description": "boolean schema true in if always chooses the then path (invalid)", + "data": "else", + "valid": false + } + ] + }, + { + "description": "if with boolean schema false", + "schema": { + "if": false, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema false in if always chooses the else path (invalid)", + "data": "then", + "valid": false + }, + { + "description": "boolean schema false in if always chooses the else path (valid)", + "data": "else", + "valid": true + } + ] + }, + { + "description": "if appears at the end when serialized (keyword processing sequence)", + "schema": { + "then": { "const": "yes" }, + "else": { "const": "other" }, + "if": { "maxLength": 4 } + }, + "tests": [ + { + "description": "yes redirects to then and passes", + "data": "yes", + "valid": true + }, + { + "description": "other redirects to else and passes", + "data": "other", + "valid": true + }, + { + "description": "no redirects to then and fails", + "data": "no", + "valid": false + }, + { + "description": "invalid redirects to else and fails", + "data": "invalid", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/infinite-loop-detection.json b/vendor/jsonschema/json/tests/draft7/infinite-loop-detection.json new file mode 100644 index 00000000..f98c74fc --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/infinite-loop-detection.json @@ -0,0 +1,36 @@ +[ + { + "description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop", + "schema": { + "definitions": { + "int": { "type": "integer" } + }, + "allOf": [ + { + "properties": { + "foo": { + "$ref": "#/definitions/int" + } + } + }, + { + "additionalProperties": { + "$ref": "#/definitions/int" + } + } + ] + }, + "tests": [ + { + "description": "passing case", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "failing case", + "data": { "foo": "a string" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/items.json b/vendor/jsonschema/json/tests/draft7/items.json new file mode 100644 index 00000000..7ed6781b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/items.json @@ -0,0 +1,282 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "length": 1 + }, + "valid": true + } + ] + }, + { + "description": "an array of schemas for items", + "schema": { + "items": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + }, + { + "description": "incomplete array of items", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with additional items", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "1": "valid", + "length": 2 + }, + "valid": true + } + ] + }, + { + "description": "items with boolean schema (true)", + "schema": {"items": true}, + "tests": [ + { + "description": "any array is valid", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schema (false)", + "schema": {"items": false}, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": [ 1, "foo", true ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schemas", + "schema": { + "items": [true, false] + }, + "tests": [ + { + "description": "array with one item is valid", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with two items is invalid", + "data": [ 1, "foo" ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items and subitems", + "schema": { + "definitions": { + "item": { + "type": "array", + "additionalItems": false, + "items": [ + { "$ref": "#/definitions/sub-item" }, + { "$ref": "#/definitions/sub-item" } + ] + }, + "sub-item": { + "type": "object", + "required": ["foo"] + } + }, + "type": "array", + "additionalItems": false, + "items": [ + { "$ref": "#/definitions/item" }, + { "$ref": "#/definitions/item" }, + { "$ref": "#/definitions/item" } + ] + }, + "tests": [ + { + "description": "valid items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": true + }, + { + "description": "too many items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "too many sub-items", + "data": [ + [ {"foo": null}, {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong item", + "data": [ + {"foo": null}, + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong sub-item", + "data": [ + [ {}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "fewer items is valid", + "data": [ + [ {"foo": null} ], + [ {"foo": null} ] + ], + "valid": true + } + ] + }, + { + "description": "nested items", + "schema": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + }, + "tests": [ + { + "description": "valid nested array", + "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": true + }, + { + "description": "nested array with invalid type", + "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": false + }, + { + "description": "not deep enough", + "data": [[[1], [2],[3]], [[4], [5], [6]]], + "valid": false + } + ] + }, + { + "description": "single-form items with null instance elements", + "schema": { + "items": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + }, + { + "description": "array-form items with null instance elements", + "schema": { + "items": [ + { + "type": "null" + } + ] + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/maxItems.json b/vendor/jsonschema/json/tests/draft7/maxItems.json new file mode 100644 index 00000000..f0c36ab2 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/maxItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + }, + { + "description": "maxItems validation with a decimal", + "schema": {"maxItems": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/maxLength.json b/vendor/jsonschema/json/tests/draft7/maxLength.json new file mode 100644 index 00000000..748b4daa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/maxLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + }, + { + "description": "maxLength validation with a decimal", + "schema": {"maxLength": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/maxProperties.json b/vendor/jsonschema/json/tests/draft7/maxProperties.json new file mode 100644 index 00000000..acec1420 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/maxProperties.json @@ -0,0 +1,70 @@ +[ + { + "description": "maxProperties validation", + "schema": {"maxProperties": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "maxProperties validation with a decimal", + "schema": {"maxProperties": 2.0}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + } + ] + }, + { + "description": "maxProperties = 0 means the object is empty", + "schema": { "maxProperties": 0 }, + "tests": [ + { + "description": "no properties is valid", + "data": {}, + "valid": true + }, + { + "description": "one property is invalid", + "data": { "foo": 1 }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/maximum.json b/vendor/jsonschema/json/tests/draft7/maximum.json new file mode 100644 index 00000000..6844a39e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/maximum.json @@ -0,0 +1,54 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "maximum validation with unsigned integer", + "schema": {"maximum": 300}, + "tests": [ + { + "description": "below the maximum is invalid", + "data": 299.97, + "valid": true + }, + { + "description": "boundary point integer is valid", + "data": 300, + "valid": true + }, + { + "description": "boundary point float is valid", + "data": 300.00, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 300.5, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/minItems.json b/vendor/jsonschema/json/tests/draft7/minItems.json new file mode 100644 index 00000000..d3b18720 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/minItems.json @@ -0,0 +1,44 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + }, + { + "description": "minItems validation with a decimal", + "schema": {"minItems": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/minLength.json b/vendor/jsonschema/json/tests/draft7/minLength.json new file mode 100644 index 00000000..64db9480 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/minLength.json @@ -0,0 +1,49 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + }, + { + "description": "minLength validation with a decimal", + "schema": {"minLength": 2.0}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/minProperties.json b/vendor/jsonschema/json/tests/draft7/minProperties.json new file mode 100644 index 00000000..9f74f789 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/minProperties.json @@ -0,0 +1,54 @@ +[ + { + "description": "minProperties validation", + "schema": {"minProperties": 1}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "minProperties validation with a decimal", + "schema": {"minProperties": 1.0}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/minimum.json b/vendor/jsonschema/json/tests/draft7/minimum.json new file mode 100644 index 00000000..21ae50e0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/minimum.json @@ -0,0 +1,69 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "minimum validation with signed integer", + "schema": {"minimum": -2}, + "tests": [ + { + "description": "negative above the minimum is valid", + "data": -1, + "valid": true + }, + { + "description": "positive above the minimum is valid", + "data": 0, + "valid": true + }, + { + "description": "boundary point is valid", + "data": -2, + "valid": true + }, + { + "description": "boundary point with float is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float below the minimum is invalid", + "data": -2.0001, + "valid": false + }, + { + "description": "int below the minimum is invalid", + "data": -3, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/multipleOf.json b/vendor/jsonschema/json/tests/draft7/multipleOf.json new file mode 100644 index 00000000..25c25a91 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/multipleOf.json @@ -0,0 +1,71 @@ +[ + { + "description": "by int", + "schema": {"multipleOf": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"multipleOf": 1.5}, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"multipleOf": 0.0001}, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + }, + { + "description": "float division = inf", + "schema": {"type": "integer", "multipleOf": 0.123456789}, + "tests": [ + { + "description": "always invalid, but naive implementations may raise an overflow error", + "data": 1e308, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/not.json b/vendor/jsonschema/json/tests/draft7/not.json new file mode 100644 index 00000000..98de0eda --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/not.json @@ -0,0 +1,117 @@ +[ + { + "description": "not", + "schema": { + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + }, + { + "description": "not with boolean schema true", + "schema": {"not": true}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "not with boolean schema false", + "schema": {"not": false}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/oneOf.json b/vendor/jsonschema/json/tests/draft7/oneOf.json new file mode 100644 index 00000000..eeb7ae86 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/oneOf.json @@ -0,0 +1,274 @@ +[ + { + "description": "oneOf", + "schema": { + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all true", + "schema": {"oneOf": [true, true, true]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, one true", + "schema": {"oneOf": [true, false, false]}, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "oneOf with boolean schemas, more than one true", + "schema": {"oneOf": [true, true, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all false", + "schema": {"oneOf": [false, false, false]}, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf complex types", + "schema": { + "oneOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second oneOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both oneOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": false + }, + { + "description": "neither oneOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "oneOf with empty schema", + "schema": { + "oneOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "one valid - valid", + "data": "foo", + "valid": true + }, + { + "description": "both valid - invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "oneOf with required", + "schema": { + "type": "object", + "oneOf": [ + { "required": ["foo", "bar"] }, + { "required": ["foo", "baz"] } + ] + }, + "tests": [ + { + "description": "both invalid - invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "first valid - valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "second valid - valid", + "data": {"foo": 1, "baz": 3}, + "valid": true + }, + { + "description": "both valid - invalid", + "data": {"foo": 1, "bar": 2, "baz" : 3}, + "valid": false + } + ] + }, + { + "description": "oneOf with missing optional property", + "schema": { + "oneOf": [ + { + "properties": { + "bar": true, + "baz": true + }, + "required": ["bar"] + }, + { + "properties": { + "foo": true + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": {"bar": 8}, + "valid": true + }, + { + "description": "second oneOf valid", + "data": {"foo": "foo"}, + "valid": true + }, + { + "description": "both oneOf valid", + "data": {"foo": "foo", "bar": 8}, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": {"baz": "quux"}, + "valid": false + } + ] + }, + { + "description": "nested oneOf, to check validation semantics", + "schema": { + "oneOf": [ + { + "oneOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/bignum.json b/vendor/jsonschema/json/tests/draft7/optional/bignum.json new file mode 100644 index 00000000..94b4a4e6 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/bignum.json @@ -0,0 +1,93 @@ +[ + { + "description": "integer", + "schema": { "type": "integer" }, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + }, + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": { "type": "number" }, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + }, + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": { "type": "string" }, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "maximum integer comparison", + "schema": { "maximum": 18446744073709551615 }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "exclusiveMaximum": 972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "minimum integer comparison", + "schema": { "minimum": -18446744073709551615 }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "exclusiveMinimum": -972783798187987123879878123.18878137 + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/content.json b/vendor/jsonschema/json/tests/draft7/optional/content.json new file mode 100644 index 00000000..3f5a7430 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/content.json @@ -0,0 +1,77 @@ +[ + { + "description": "validation of string-encoded content based on media type", + "schema": { + "contentMediaType": "application/json" + }, + "tests": [ + { + "description": "a valid JSON document", + "data": "{\"foo\": \"bar\"}", + "valid": true + }, + { + "description": "an invalid JSON document", + "data": "{:}", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary string-encoding", + "schema": { + "contentEncoding": "base64" + }, + "tests": [ + { + "description": "a valid base64 string", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "an invalid base64 string (% is not a valid character)", + "data": "eyJmb28iOi%iYmFyIn0K", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + }, + { + "description": "validation of binary-encoded media type documents", + "schema": { + "contentMediaType": "application/json", + "contentEncoding": "base64" + }, + "tests": [ + { + "description": "a valid base64-encoded JSON document", + "data": "eyJmb28iOiAiYmFyIn0K", + "valid": true + }, + { + "description": "a validly-encoded invalid JSON document", + "data": "ezp9Cg==", + "valid": false + }, + { + "description": "an invalid base64 string that is valid JSON", + "data": "{}", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/cross-draft.json b/vendor/jsonschema/json/tests/draft7/optional/cross-draft.json new file mode 100644 index 00000000..8ff53736 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/cross-draft.json @@ -0,0 +1,25 @@ +[ + { + "description": "refs to future drafts are processed as future drafts", + "schema": { + "type": "object", + "allOf": [ + { "properties": { "foo": true } }, + { "$ref": "http://localhost:1234/draft2019-09/dependentRequired.json" } + ] + }, + "tests": [ + { + "description": "missing bar is invalid", + "comment": "if the implementation is not processing the $ref as a 2019-09 schema, this test will fail", + "data": {"foo": "any value"}, + "valid": false + }, + { + "description": "present bar is valid", + "data": {"foo": "any value", "bar": "also any value"}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/ecmascript-regex.json b/vendor/jsonschema/json/tests/draft7/optional/ecmascript-regex.json new file mode 100644 index 00000000..c4886aaa --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/ecmascript-regex.json @@ -0,0 +1,552 @@ +[ + { + "description": "ECMA 262 regex $ does not match trailing newline", + "schema": { + "type": "string", + "pattern": "^abc$" + }, + "tests": [ + { + "description": "matches in Python, but not in ECMA 262", + "data": "abc\\n", + "valid": false + }, + { + "description": "matches", + "data": "abc", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex converts \\t to horizontal tab", + "schema": { + "type": "string", + "pattern": "^\\t$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\t", + "valid": false + }, + { + "description": "matches", + "data": "\u0009", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and upper letter", + "schema": { + "type": "string", + "pattern": "^\\cC$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cC", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 regex escapes control codes with \\c and lower letter", + "schema": { + "type": "string", + "pattern": "^\\cc$" + }, + "tests": [ + { + "description": "does not match", + "data": "\\cc", + "valid": false + }, + { + "description": "matches", + "data": "\u0003", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\d matches ascii digits only", + "schema": { + "type": "string", + "pattern": "^\\d$" + }, + "tests": [ + { + "description": "ASCII zero matches", + "data": "0", + "valid": true + }, + { + "description": "NKO DIGIT ZERO does not match (unlike e.g. Python)", + "data": "߀", + "valid": false + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) does not match", + "data": "\u07c0", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\D matches everything but ascii digits", + "schema": { + "type": "string", + "pattern": "^\\D$" + }, + "tests": [ + { + "description": "ASCII zero does not match", + "data": "0", + "valid": false + }, + { + "description": "NKO DIGIT ZERO matches (unlike e.g. Python)", + "data": "߀", + "valid": true + }, + { + "description": "NKO DIGIT ZERO (as \\u escape) matches", + "data": "\u07c0", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\w matches ascii letters only", + "schema": { + "type": "string", + "pattern": "^\\w$" + }, + "tests": [ + { + "description": "ASCII 'a' matches", + "data": "a", + "valid": true + }, + { + "description": "latin-1 e-acute does not match (unlike e.g. Python)", + "data": "é", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\W matches everything but ascii letters", + "schema": { + "type": "string", + "pattern": "^\\W$" + }, + "tests": [ + { + "description": "ASCII 'a' does not match", + "data": "a", + "valid": false + }, + { + "description": "latin-1 e-acute matches (unlike e.g. Python)", + "data": "é", + "valid": true + } + ] + }, + { + "description": "ECMA 262 \\s matches whitespace", + "schema": { + "type": "string", + "pattern": "^\\s$" + }, + "tests": [ + { + "description": "ASCII space matches", + "data": " ", + "valid": true + }, + { + "description": "Character tabulation matches", + "data": "\t", + "valid": true + }, + { + "description": "Line tabulation matches", + "data": "\u000b", + "valid": true + }, + { + "description": "Form feed matches", + "data": "\u000c", + "valid": true + }, + { + "description": "latin-1 non-breaking-space matches", + "data": "\u00a0", + "valid": true + }, + { + "description": "zero-width whitespace matches", + "data": "\ufeff", + "valid": true + }, + { + "description": "line feed matches (line terminator)", + "data": "\u000a", + "valid": true + }, + { + "description": "paragraph separator matches (line terminator)", + "data": "\u2029", + "valid": true + }, + { + "description": "EM SPACE matches (Space_Separator)", + "data": "\u2003", + "valid": true + }, + { + "description": "Non-whitespace control does not match", + "data": "\u0001", + "valid": false + }, + { + "description": "Non-whitespace does not match", + "data": "\u2013", + "valid": false + } + ] + }, + { + "description": "ECMA 262 \\S matches everything but whitespace", + "schema": { + "type": "string", + "pattern": "^\\S$" + }, + "tests": [ + { + "description": "ASCII space does not match", + "data": " ", + "valid": false + }, + { + "description": "Character tabulation does not match", + "data": "\t", + "valid": false + }, + { + "description": "Line tabulation does not match", + "data": "\u000b", + "valid": false + }, + { + "description": "Form feed does not match", + "data": "\u000c", + "valid": false + }, + { + "description": "latin-1 non-breaking-space does not match", + "data": "\u00a0", + "valid": false + }, + { + "description": "zero-width whitespace does not match", + "data": "\ufeff", + "valid": false + }, + { + "description": "line feed does not match (line terminator)", + "data": "\u000a", + "valid": false + }, + { + "description": "paragraph separator does not match (line terminator)", + "data": "\u2029", + "valid": false + }, + { + "description": "EM SPACE does not match (Space_Separator)", + "data": "\u2003", + "valid": false + }, + { + "description": "Non-whitespace control matches", + "data": "\u0001", + "valid": true + }, + { + "description": "Non-whitespace matches", + "data": "\u2013", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with pattern", + "schema": { "pattern": "\\p{Letter}cole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "\\w in patterns matches [A-Za-z0-9_], not unicode letters", + "schema": { "pattern": "\\wcole" }, + "tests": [ + { + "description": "ascii character in json string", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.", + "valid": false + } + ] + }, + { + "description": "pattern with ASCII ranges", + "schema": { "pattern": "[a-z]cole" }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.", + "valid": false + }, + { + "description": "ascii characters match", + "data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.", + "valid": true + } + ] + }, + { + "description": "\\d in pattern matches [0-9], not unicode digits", + "schema": { "pattern": "^\\d+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": false + } + ] + }, + { + "description": "pattern with non-ASCII digits", + "schema": { "pattern": "^\\p{digit}+$" }, + "tests": [ + { + "description": "ascii digits", + "data": "42", + "valid": true + }, + { + "description": "ascii non-digits", + "data": "-%#", + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": "৪২", + "valid": true + } + ] + }, + { + "description": "patterns always use unicode semantics with patternProperties", + "schema": { + "type": "object", + "patternProperties": { + "\\p{Letter}cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "\\w in patternProperties matches [A-Za-z0-9_], not unicode letters", + "schema": { + "type": "object", + "patternProperties": { + "\\wcole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii character in json string", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + }, + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode matching is case-sensitive", + "data": { "L'ÉCOLE": "PAS DE VRAIE VIE" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with ASCII ranges", + "schema": { + "type": "object", + "patternProperties": { + "[a-z]cole": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "literal unicode character in json string", + "data": { "l'école": "pas de vraie vie" }, + "valid": false + }, + { + "description": "unicode character in hex format in string", + "data": { "l'\u00e9cole": "pas de vraie vie" }, + "valid": false + }, + { + "description": "ascii characters match", + "data": { "l'ecole": "pas de vraie vie" }, + "valid": true + } + ] + }, + { + "description": "\\d in patternProperties matches [0-9], not unicode digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\d+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": false + } + ] + }, + { + "description": "patternProperties with non-ASCII digits", + "schema": { + "type": "object", + "patternProperties": { + "^\\p{digit}+$": true + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "ascii digits", + "data": { "42": "life, the universe, and everything" }, + "valid": true + }, + { + "description": "ascii non-digits", + "data": { "-%#": "spending the year dead for tax reasons" }, + "valid": false + }, + { + "description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)", + "data": { "৪২": "khajit has wares if you have coin" }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/float-overflow.json b/vendor/jsonschema/json/tests/draft7/optional/float-overflow.json new file mode 100644 index 00000000..52ff9827 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/float-overflow.json @@ -0,0 +1,13 @@ +[ + { + "description": "all integers are multiples of 0.5, if overflow is handled", + "schema": {"type": "integer", "multipleOf": 0.5}, + "tests": [ + { + "description": "valid if optional overflow handling is implemented", + "data": 1e308, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/date-time.json b/vendor/jsonschema/json/tests/draft7/optional/format/date-time.json new file mode 100644 index 00000000..09112737 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/date-time.json @@ -0,0 +1,133 @@ +[ + { + "description": "validation of date-time strings", + "schema": { "format": "date-time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid date-time string without second fraction", + "data": "1963-06-19T08:30:06Z", + "valid": true + }, + { + "description": "a valid date-time string with plus offset", + "data": "1937-01-01T12:00:27.87+00:20", + "valid": true + }, + { + "description": "a valid date-time string with minus offset", + "data": "1990-12-31T15:59:50.123-08:00", + "valid": true + }, + { + "description": "a valid date-time with a leap second, UTC", + "data": "1998-12-31T23:59:60Z", + "valid": true + }, + { + "description": "a valid date-time with a leap second, with minus offset", + "data": "1998-12-31T15:59:60.123-08:00", + "valid": true + }, + { + "description": "an invalid date-time past leap second, UTC", + "data": "1998-12-31T23:59:61Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong minute, UTC", + "data": "1998-12-31T23:58:60Z", + "valid": false + }, + { + "description": "an invalid date-time with leap second on a wrong hour, UTC", + "data": "1998-12-31T22:59:60Z", + "valid": false + }, + { + "description": "an invalid day in date-time string", + "data": "1990-02-31T15:59:59.123-08:00", + "valid": false + }, + { + "description": "an invalid offset in date-time string", + "data": "1990-12-31T15:59:59-24:00", + "valid": false + }, + { + "description": "an invalid closing Z after time-zone offset", + "data": "1963-06-19T08:30:06.28123+01:00Z", + "valid": false + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "case-insensitive T and Z", + "data": "1963-06-19t08:30:06.283185z", + "valid": true + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + }, + { + "description": "invalid non-padded month dates", + "data": "1963-6-19T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-padded day dates", + "data": "1963-06-1T08:30:06.283185Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in date portion", + "data": "1963-06-1৪T00:00:00Z", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in time portion", + "data": "1963-06-11T0৪:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/date.json b/vendor/jsonschema/json/tests/draft7/optional/format/date.json new file mode 100644 index 00000000..06c9ea0f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/date.json @@ -0,0 +1,223 @@ +[ + { + "description": "validation of date strings", + "schema": { "format": "date" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid date string", + "data": "1963-06-19", + "valid": true + }, + { + "description": "a valid date string with 31 days in January", + "data": "2020-01-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in January", + "data": "2020-01-32", + "valid": false + }, + { + "description": "a valid date string with 28 days in February (normal)", + "data": "2021-02-28", + "valid": true + }, + { + "description": "a invalid date string with 29 days in February (normal)", + "data": "2021-02-29", + "valid": false + }, + { + "description": "a valid date string with 29 days in February (leap)", + "data": "2020-02-29", + "valid": true + }, + { + "description": "a invalid date string with 30 days in February (leap)", + "data": "2020-02-30", + "valid": false + }, + { + "description": "a valid date string with 31 days in March", + "data": "2020-03-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in March", + "data": "2020-03-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in April", + "data": "2020-04-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in April", + "data": "2020-04-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in May", + "data": "2020-05-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in May", + "data": "2020-05-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in June", + "data": "2020-06-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in June", + "data": "2020-06-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in July", + "data": "2020-07-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in July", + "data": "2020-07-32", + "valid": false + }, + { + "description": "a valid date string with 31 days in August", + "data": "2020-08-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in August", + "data": "2020-08-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in September", + "data": "2020-09-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in September", + "data": "2020-09-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in October", + "data": "2020-10-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in October", + "data": "2020-10-32", + "valid": false + }, + { + "description": "a valid date string with 30 days in November", + "data": "2020-11-30", + "valid": true + }, + { + "description": "a invalid date string with 31 days in November", + "data": "2020-11-31", + "valid": false + }, + { + "description": "a valid date string with 31 days in December", + "data": "2020-12-31", + "valid": true + }, + { + "description": "a invalid date string with 32 days in December", + "data": "2020-12-32", + "valid": false + }, + { + "description": "a invalid date string with invalid month", + "data": "2020-13-01", + "valid": false + }, + { + "description": "an invalid date string", + "data": "06/19/1963", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350", + "valid": false + }, + { + "description": "non-padded month dates are not valid", + "data": "1998-1-20", + "valid": false + }, + { + "description": "non-padded day dates are not valid", + "data": "1998-01-1", + "valid": false + }, + { + "description": "invalid month", + "data": "1998-13-01", + "valid": false + }, + { + "description": "invalid month-day combination", + "data": "1998-04-31", + "valid": false + }, + { + "description": "2021 is not a leap year", + "data": "2021-02-29", + "valid": false + }, + { + "description": "2020 is a leap year", + "data": "2020-02-29", + "valid": true + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1963-06-1৪", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/email.json b/vendor/jsonschema/json/tests/draft7/optional/format/email.json new file mode 100644 index 00000000..d6761a46 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/email.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of e-mail addresses", + "schema": { "format": "email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "tilde in local part is valid", + "data": "te~st@example.com", + "valid": true + }, + { + "description": "tilde before local part is valid", + "data": "~test@example.com", + "valid": true + }, + { + "description": "tilde after local part is valid", + "data": "test~@example.com", + "valid": true + }, + { + "description": "dot before local part is not valid", + "data": ".test@example.com", + "valid": false + }, + { + "description": "dot after local part is not valid", + "data": "test.@example.com", + "valid": false + }, + { + "description": "two separated dots inside local part are valid", + "data": "te.s.t@example.com", + "valid": true + }, + { + "description": "two subsequent dots inside local part are not valid", + "data": "te..st@example.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/hostname.json b/vendor/jsonschema/json/tests/draft7/optional/format/hostname.json new file mode 100644 index 00000000..8a67fda8 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/hostname.json @@ -0,0 +1,98 @@ +[ + { + "description": "validation of host names", + "schema": { "format": "hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a valid punycoded IDN hostname", + "data": "xn--4gbwdl.xn--wgbh1c", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + }, + { + "description": "starts with hyphen", + "data": "-hostname", + "valid": false + }, + { + "description": "ends with hyphen", + "data": "hostname-", + "valid": false + }, + { + "description": "starts with underscore", + "data": "_hostname", + "valid": false + }, + { + "description": "ends with underscore", + "data": "hostname_", + "valid": false + }, + { + "description": "contains underscore", + "data": "host_name", + "valid": false + }, + { + "description": "maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com", + "valid": true + }, + { + "description": "exceeds maximum label length", + "data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkl.com", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/idn-email.json b/vendor/jsonschema/json/tests/draft7/optional/format/idn-email.json new file mode 100644 index 00000000..6e213745 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/idn-email.json @@ -0,0 +1,58 @@ +[ + { + "description": "validation of an internationalized e-mail addresses", + "schema": { "format": "idn-email" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid idn e-mail (example@example.test in Hangul)", + "data": "실례@실례.테스트", + "valid": true + }, + { + "description": "an invalid idn e-mail address", + "data": "2962", + "valid": false + }, + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/idn-hostname.json b/vendor/jsonschema/json/tests/draft7/optional/format/idn-hostname.json new file mode 100644 index 00000000..6c8f86a3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/idn-hostname.json @@ -0,0 +1,304 @@ +[ + { + "description": "validation of internationalized host names", + "schema": { "format": "idn-hostname" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid host name (example.test in Hangul)", + "data": "실례.테스트", + "valid": true + }, + { + "description": "illegal first char U+302E Hangul single dot tone mark", + "data": "〮실례.테스트", + "valid": false + }, + { + "description": "contains illegal char U+302E Hangul single dot tone mark", + "data": "실〮례.테스트", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실례례테스트례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례테스트례례실례.테스트", + "valid": false + }, + { + "description": "invalid label, correct Punycode", + "comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc3492#section-7.1", + "data": "-> $1.00 <--", + "valid": false + }, + { + "description": "valid Chinese Punycode", + "comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4", + "data": "xn--ihqwcrb4cv8a8dqg056pqjye", + "valid": true + }, + { + "description": "invalid Punycode", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc5890#section-2.3.2.1", + "data": "xn--X", + "valid": false + }, + { + "description": "U-label contains \"--\" in the 3rd and 4th position", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1 https://tools.ietf.org/html/rfc5890#section-2.3.2.1", + "data": "XN--aa---o47jg78q", + "valid": false + }, + { + "description": "U-label starts with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "-hello", + "valid": false + }, + { + "description": "U-label ends with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "hello-", + "valid": false + }, + { + "description": "U-label starts and ends with a dash", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1", + "data": "-hello-", + "valid": false + }, + { + "description": "Begins with a Spacing Combining Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0903hello", + "valid": false + }, + { + "description": "Begins with a Nonspacing Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0300hello", + "valid": false + }, + { + "description": "Begins with an Enclosing Mark", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2", + "data": "\u0488hello", + "valid": false + }, + { + "description": "Exceptions that are PVALID, left-to-right chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u00df\u03c2\u0f0b\u3007", + "valid": true + }, + { + "description": "Exceptions that are PVALID, right-to-left chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u06fd\u06fe", + "valid": true + }, + { + "description": "Exceptions that are DISALLOWED, right-to-left chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6", + "data": "\u0640\u07fa", + "valid": false + }, + { + "description": "Exceptions that are DISALLOWED, left-to-right chars", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6 Note: The two combining marks (U+302E and U+302F) are in the middle and not at the start", + "data": "\u3031\u3032\u3033\u3034\u3035\u302e\u302f\u303b", + "valid": false + }, + { + "description": "MIDDLE DOT with no preceding 'l'", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "a\u00b7l", + "valid": false + }, + { + "description": "MIDDLE DOT with nothing preceding", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "\u00b7l", + "valid": false + }, + { + "description": "MIDDLE DOT with no following 'l'", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7a", + "valid": false + }, + { + "description": "MIDDLE DOT with nothing following", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7", + "valid": false + }, + { + "description": "MIDDLE DOT with surrounding 'l's", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3", + "data": "l\u00b7l", + "valid": true + }, + { + "description": "Greek KERAIA not followed by Greek", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375S", + "valid": false + }, + { + "description": "Greek KERAIA not followed by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375", + "valid": false + }, + { + "description": "Greek KERAIA followed by Greek", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4", + "data": "\u03b1\u0375\u03b2", + "valid": true + }, + { + "description": "Hebrew GERESH not preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "A\u05f3\u05d1", + "valid": false + }, + { + "description": "Hebrew GERESH not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "\u05f3\u05d1", + "valid": false + }, + { + "description": "Hebrew GERESH preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5", + "data": "\u05d0\u05f3\u05d1", + "valid": true + }, + { + "description": "Hebrew GERSHAYIM not preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "A\u05f4\u05d1", + "valid": false + }, + { + "description": "Hebrew GERSHAYIM not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "\u05f4\u05d1", + "valid": false + }, + { + "description": "Hebrew GERSHAYIM preceded by Hebrew", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6", + "data": "\u05d0\u05f4\u05d1", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with no Hiragana, Katakana, or Han", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "def\u30fbabc", + "valid": false + }, + { + "description": "KATAKANA MIDDLE DOT with no other characters", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb", + "valid": false + }, + { + "description": "KATAKANA MIDDLE DOT with Hiragana", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u3041", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with Katakana", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u30a1", + "valid": true + }, + { + "description": "KATAKANA MIDDLE DOT with Han", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7", + "data": "\u30fb\u4e08", + "valid": true + }, + { + "description": "Arabic-Indic digits mixed with Extended Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8", + "data": "\u0660\u06f0", + "valid": false + }, + { + "description": "Arabic-Indic digits not mixed with Extended Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8", + "data": "\u0628\u0660\u0628", + "valid": true + }, + { + "description": "Extended Arabic-Indic digits not mixed with Arabic-Indic digits", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.9", + "data": "\u06f00", + "valid": true + }, + { + "description": "ZERO WIDTH JOINER not preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u0915\u200d\u0937", + "valid": false + }, + { + "description": "ZERO WIDTH JOINER not preceded by anything", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u200d\u0937", + "valid": false + }, + { + "description": "ZERO WIDTH JOINER preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf", + "data": "\u0915\u094d\u200d\u0937", + "valid": true + }, + { + "description": "ZERO WIDTH NON-JOINER preceded by Virama", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1", + "data": "\u0915\u094d\u200c\u0937", + "valid": true + }, + { + "description": "ZERO WIDTH NON-JOINER not preceded by Virama but matches regexp", + "comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1 https://www.w3.org/TR/alreq/#h_disjoining_enforcement", + "data": "\u0628\u064a\u200c\u0628\u064a", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/ipv4.json b/vendor/jsonschema/json/tests/draft7/optional/format/ipv4.json new file mode 100644 index 00000000..4706581f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/ipv4.json @@ -0,0 +1,84 @@ +[ + { + "description": "validation of IP addresses", + "schema": { "format": "ipv4" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + }, + { + "description": "an IP address without 4 components", + "data": "127.0", + "valid": false + }, + { + "description": "an IP address as an integer", + "data": "0x7f000001", + "valid": false + }, + { + "description": "an IP address as an integer (decimal)", + "data": "2130706433", + "valid": false + }, + { + "description": "invalid leading zeroes, as they are treated as octals", + "comment": "see https://sick.codes/universal-netmask-npm-package-used-by-270000-projects-vulnerable-to-octal-input-data-server-side-request-forgery-remote-file-inclusion-local-file-inclusion-and-more-cve-2021-28918/", + "data": "087.10.0.1", + "valid": false + }, + { + "description": "value without leading zero is valid", + "data": "87.10.0.1", + "valid": true + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২7.0.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/ipv6.json b/vendor/jsonschema/json/tests/draft7/optional/format/ipv6.json new file mode 100644 index 00000000..94368f2a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/ipv6.json @@ -0,0 +1,208 @@ +[ + { + "description": "validation of IPv6 addresses", + "schema": { "format": "ipv6" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "trailing 4 hex symbols is valid", + "data": "::abef", + "valid": true + }, + { + "description": "trailing 5 hex symbols is invalid", + "data": "::abcef", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + }, + { + "description": "no digits is valid", + "data": "::", + "valid": true + }, + { + "description": "leading colons is valid", + "data": "::42:ff:1", + "valid": true + }, + { + "description": "trailing colons is valid", + "data": "d6::", + "valid": true + }, + { + "description": "missing leading octet is invalid", + "data": ":2:3:4:5:6:7:8", + "valid": false + }, + { + "description": "missing trailing octet is invalid", + "data": "1:2:3:4:5:6:7:", + "valid": false + }, + { + "description": "missing leading octet with omitted octets later", + "data": ":2:3:4::8", + "valid": false + }, + { + "description": "single set of double colons in the middle is valid", + "data": "1:d6::42", + "valid": true + }, + { + "description": "two sets of double colons is invalid", + "data": "1::d6::42", + "valid": false + }, + { + "description": "mixed format with the ipv4 section as decimal octets", + "data": "1::d6:192.168.0.1", + "valid": true + }, + { + "description": "mixed format with double colons between the sections", + "data": "1:2::192.168.0.1", + "valid": true + }, + { + "description": "mixed format with ipv4 section with octet out of range", + "data": "1::2:192.168.256.1", + "valid": false + }, + { + "description": "mixed format with ipv4 section with a hex octet", + "data": "1::2:192.168.ff.1", + "valid": false + }, + { + "description": "mixed format with leading double colons (ipv4-mapped ipv6 address)", + "data": "::ffff:192.168.0.1", + "valid": true + }, + { + "description": "triple colons is invalid", + "data": "1:2:3:4:5:::8", + "valid": false + }, + { + "description": "8 octets", + "data": "1:2:3:4:5:6:7:8", + "valid": true + }, + { + "description": "insufficient octets without double colons", + "data": "1:2:3:4:5:6:7", + "valid": false + }, + { + "description": "no colons is invalid", + "data": "1", + "valid": false + }, + { + "description": "ipv4 is not ipv6", + "data": "127.0.0.1", + "valid": false + }, + { + "description": "ipv4 segment must have 4 octets", + "data": "1:2:3:4:1.2.3", + "valid": false + }, + { + "description": "leading whitespace is invalid", + "data": " ::1", + "valid": false + }, + { + "description": "trailing whitespace is invalid", + "data": "::1 ", + "valid": false + }, + { + "description": "netmask is not a part of ipv6 address", + "data": "fe80::/64", + "valid": false + }, + { + "description": "zone id is not a part of ipv6 address", + "data": "fe80::a%eth1", + "valid": false + }, + { + "description": "a long valid ipv6", + "data": "1000:1000:1000:1000:1000:1000:255.255.255.255", + "valid": true + }, + { + "description": "a long invalid ipv6, below length limit, first", + "data": "100:100:100:100:100:100:255.255.255.255.255", + "valid": false + }, + { + "description": "a long invalid ipv6, below length limit, second", + "data": "100:100:100:100:100:100:100:255.255.255.255", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4)", + "data": "1:2:3:4:5:6:7:৪", + "valid": false + }, + { + "description": "invalid non-ASCII '৪' (a Bengali 4) in the IPv4 portion", + "data": "1:2::192.16৪.0.1", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/iri-reference.json b/vendor/jsonschema/json/tests/draft7/optional/format/iri-reference.json new file mode 100644 index 00000000..c6b4c22a --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/iri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of IRI References", + "schema": { "format": "iri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IRI", + "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid protocol-relative IRI Reference", + "data": "//ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid relative IRI Reference", + "data": "/âππ", + "valid": true + }, + { + "description": "an invalid IRI Reference", + "data": "\\\\WINDOWS\\filëßåré", + "valid": false + }, + { + "description": "a valid IRI Reference", + "data": "âππ", + "valid": true + }, + { + "description": "a valid IRI fragment", + "data": "#Æ’rägmênt", + "valid": true + }, + { + "description": "an invalid IRI fragment", + "data": "#Æ’räg\\mênt", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/iri.json b/vendor/jsonschema/json/tests/draft7/optional/format/iri.json new file mode 100644 index 00000000..a0d12aed --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/iri.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of IRIs", + "schema": { "format": "iri" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid IRI with anchor tag", + "data": "http://ƒøø.ßår/?∂éœ=πîx#πîüx", + "valid": true + }, + { + "description": "a valid IRI with anchor tag and parentheses", + "data": "http://ƒøø.com/blah_(wîkïpédiÃ¥)_blah#ßité-1", + "valid": true + }, + { + "description": "a valid IRI with URL-encoded stuff", + "data": "http://ƒøø.ßår/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid IRI with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid IRI based on IPv6", + "data": "http://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]", + "valid": true + }, + { + "description": "an invalid IRI based on IPv6", + "data": "http://2001:0db8:85a3:0000:0000:8a2e:0370:7334", + "valid": false + }, + { + "description": "an invalid relative IRI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid IRI", + "data": "\\\\WINDOWS\\filëßåré", + "valid": false + }, + { + "description": "an invalid IRI though valid IRI reference", + "data": "âππ", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/json-pointer.json b/vendor/jsonschema/json/tests/draft7/optional/format/json-pointer.json new file mode 100644 index 00000000..a0346b57 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/json-pointer.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of JSON-pointers (JSON String Representation)", + "schema": { "format": "json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid JSON-pointer", + "data": "/foo/bar~0/baz~1/%a", + "valid": true + }, + { + "description": "not a valid JSON-pointer (~ not escaped)", + "data": "/foo/bar~", + "valid": false + }, + { + "description": "valid JSON-pointer with empty segment", + "data": "/foo//bar", + "valid": true + }, + { + "description": "valid JSON-pointer with the last empty segment", + "data": "/foo/bar/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #1", + "data": "", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #2", + "data": "/foo", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #3", + "data": "/foo/0", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #4", + "data": "/", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #5", + "data": "/a~1b", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #6", + "data": "/c%d", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #7", + "data": "/e^f", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #8", + "data": "/g|h", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #9", + "data": "/i\\j", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #10", + "data": "/k\"l", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #11", + "data": "/ ", + "valid": true + }, + { + "description": "valid JSON-pointer as stated in RFC 6901 #12", + "data": "/m~0n", + "valid": true + }, + { + "description": "valid JSON-pointer used adding to the last array position", + "data": "/foo/-", + "valid": true + }, + { + "description": "valid JSON-pointer (- used as object member name)", + "data": "/foo/-/bar", + "valid": true + }, + { + "description": "valid JSON-pointer (multiple escaped characters)", + "data": "/~1~0~0~1~1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #1", + "data": "/~1.1", + "valid": true + }, + { + "description": "valid JSON-pointer (escaped with fraction part) #2", + "data": "/~0.1", + "valid": true + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #1", + "data": "#", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #2", + "data": "#/", + "valid": false + }, + { + "description": "not a valid JSON-pointer (URI Fragment Identifier) #3", + "data": "#a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #1", + "data": "/~0~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (some escaped, but not all) #2", + "data": "/~0/~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #1", + "data": "/~2", + "valid": false + }, + { + "description": "not a valid JSON-pointer (wrong escape character) #2", + "data": "/~-1", + "valid": false + }, + { + "description": "not a valid JSON-pointer (multiple characters not escaped)", + "data": "/~~", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #1", + "data": "a", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #2", + "data": "0", + "valid": false + }, + { + "description": "not a valid JSON-pointer (isn't empty nor starts with /) #3", + "data": "a/a", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/regex.json b/vendor/jsonschema/json/tests/draft7/optional/format/regex.json new file mode 100644 index 00000000..34491770 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/regex.json @@ -0,0 +1,48 @@ +[ + { + "description": "validation of regular expressions", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid regular expression", + "data": "([abc])+\\s+$", + "valid": true + }, + { + "description": "a regular expression with unclosed parens is invalid", + "data": "^(abc]", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/relative-json-pointer.json b/vendor/jsonschema/json/tests/draft7/optional/format/relative-json-pointer.json new file mode 100644 index 00000000..9309986f --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/relative-json-pointer.json @@ -0,0 +1,83 @@ +[ + { + "description": "validation of Relative JSON Pointers (RJP)", + "schema": { "format": "relative-json-pointer" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid upwards RJP", + "data": "1", + "valid": true + }, + { + "description": "a valid downwards RJP", + "data": "0/foo/bar", + "valid": true + }, + { + "description": "a valid up and then down RJP, with array index", + "data": "2/0/baz/1/zip", + "valid": true + }, + { + "description": "a valid RJP taking the member or index name", + "data": "0#", + "valid": true + }, + { + "description": "an invalid RJP that is a valid JSON Pointer", + "data": "/foo/bar", + "valid": false + }, + { + "description": "negative prefix", + "data": "-1/foo/bar", + "valid": false + }, + { + "description": "## is not a valid json-pointer", + "data": "0##", + "valid": false + }, + { + "description": "zero cannot be followed by other digits, plus json-pointer", + "data": "01/a", + "valid": false + }, + { + "description": "zero cannot be followed by other digits, plus octothorpe", + "data": "01#", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/time.json b/vendor/jsonschema/json/tests/draft7/optional/format/time.json new file mode 100644 index 00000000..31425871 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/time.json @@ -0,0 +1,198 @@ +[ + { + "description": "validation of time strings", + "schema": { "format": "time" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid time string", + "data": "08:30:06Z", + "valid": true + }, + { + "description": "a valid time string with leap second, Zulu", + "data": "23:59:60Z", + "valid": true + }, + { + "description": "invalid leap second, Zulu (wrong hour)", + "data": "22:59:60Z", + "valid": false + }, + { + "description": "invalid leap second, Zulu (wrong minute)", + "data": "23:58:60Z", + "valid": false + }, + { + "description": "valid leap second, zero time-offset", + "data": "23:59:60+00:00", + "valid": true + }, + { + "description": "invalid leap second, zero time-offset (wrong hour)", + "data": "22:59:60+00:00", + "valid": false + }, + { + "description": "invalid leap second, zero time-offset (wrong minute)", + "data": "23:58:60+00:00", + "valid": false + }, + { + "description": "valid leap second, positive time-offset", + "data": "01:29:60+01:30", + "valid": true + }, + { + "description": "valid leap second, large positive time-offset", + "data": "23:29:60+23:30", + "valid": true + }, + { + "description": "invalid leap second, positive time-offset (wrong hour)", + "data": "23:59:60+01:00", + "valid": false + }, + { + "description": "invalid leap second, positive time-offset (wrong minute)", + "data": "23:59:60+00:30", + "valid": false + }, + { + "description": "valid leap second, negative time-offset", + "data": "15:59:60-08:00", + "valid": true + }, + { + "description": "valid leap second, large negative time-offset", + "data": "00:29:60-23:30", + "valid": true + }, + { + "description": "invalid leap second, negative time-offset (wrong hour)", + "data": "23:59:60-01:00", + "valid": false + }, + { + "description": "invalid leap second, negative time-offset (wrong minute)", + "data": "23:59:60-00:30", + "valid": false + }, + { + "description": "a valid time string with second fraction", + "data": "23:20:50.52Z", + "valid": true + }, + { + "description": "a valid time string with precise second fraction", + "data": "08:30:06.283185Z", + "valid": true + }, + { + "description": "a valid time string with plus offset", + "data": "08:30:06+00:20", + "valid": true + }, + { + "description": "a valid time string with minus offset", + "data": "08:30:06-08:00", + "valid": true + }, + { + "description": "a valid time string with case-insensitive Z", + "data": "08:30:06z", + "valid": true + }, + { + "description": "an invalid time string with invalid hour", + "data": "24:00:00Z", + "valid": false + }, + { + "description": "an invalid time string with invalid minute", + "data": "00:60:00Z", + "valid": false + }, + { + "description": "an invalid time string with invalid second", + "data": "00:00:61Z", + "valid": false + }, + { + "description": "an invalid time string with invalid leap second (wrong hour)", + "data": "22:59:60Z", + "valid": false + }, + { + "description": "an invalid time string with invalid leap second (wrong minute)", + "data": "23:58:60Z", + "valid": false + }, + { + "description": "an invalid time string with invalid time numoffset hour", + "data": "01:02:03+24:00", + "valid": false + }, + { + "description": "an invalid time string with invalid time numoffset minute", + "data": "01:02:03+00:60", + "valid": false + }, + { + "description": "an invalid time string with invalid time with both Z and numoffset", + "data": "01:02:03Z+00:30", + "valid": false + }, + { + "description": "an invalid offset indicator", + "data": "08:30:06 PST", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "01:01:01,1111", + "valid": false + }, + { + "description": "no time offset", + "data": "12:00:00", + "valid": false + }, + { + "description": "invalid non-ASCII '২' (a Bengali 2)", + "data": "1২:00:00Z", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/unknown.json b/vendor/jsonschema/json/tests/draft7/optional/format/unknown.json new file mode 100644 index 00000000..12339ae5 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/unknown.json @@ -0,0 +1,43 @@ +[ + { + "description": "unknown format", + "schema": { "format": "unknown" }, + "tests": [ + { + "description": "unknown formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "unknown formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "unknown formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "unknown formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "unknown formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "unknown formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "unknown formats ignore strings", + "data": "string", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/uri-reference.json b/vendor/jsonschema/json/tests/draft7/optional/format/uri-reference.json new file mode 100644 index 00000000..7cdf228d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/uri-reference.json @@ -0,0 +1,73 @@ +[ + { + "description": "validation of URI References", + "schema": { "format": "uri-reference" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid relative URI Reference", + "data": "/abc", + "valid": true + }, + { + "description": "an invalid URI Reference", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "a valid URI Reference", + "data": "abc", + "valid": true + }, + { + "description": "a valid URI fragment", + "data": "#fragment", + "valid": true + }, + { + "description": "an invalid URI fragment", + "data": "#frag\\ment", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/uri-template.json b/vendor/jsonschema/json/tests/draft7/optional/format/uri-template.json new file mode 100644 index 00000000..df355c55 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/uri-template.json @@ -0,0 +1,58 @@ +[ + { + "description": "format: uri-template", + "schema": { "format": "uri-template" }, + "tests": [ + { + "description": "all string formats ignore integers", + "data": 12, + "valid": true + }, + { + "description": "all string formats ignore floats", + "data": 13.7, + "valid": true + }, + { + "description": "all string formats ignore objects", + "data": {}, + "valid": true + }, + { + "description": "all string formats ignore arrays", + "data": [], + "valid": true + }, + { + "description": "all string formats ignore booleans", + "data": false, + "valid": true + }, + { + "description": "all string formats ignore nulls", + "data": null, + "valid": true + }, + { + "description": "a valid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term}", + "valid": true + }, + { + "description": "an invalid uri-template", + "data": "http://example.com/dictionary/{term:1}/{term", + "valid": false + }, + { + "description": "a valid uri-template without variables", + "data": "http://example.com/dictionary", + "valid": true + }, + { + "description": "a valid relative uri-template", + "data": "dictionary/{term:1}/{term}", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/format/uri.json b/vendor/jsonschema/json/tests/draft7/optional/format/uri.json new file mode 100644 index 00000000..792d71a0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/format/uri.json @@ -0,0 +1,108 @@ +[ + { + "description": "validation of URIs", + "schema": { "format": "uri" }, + "tests": [ + { + "description": "a valid URL with anchor tag", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid URL with anchor tag and parentheses", + "data": "http://foo.com/blah_(wikipedia)_blah#cite-1", + "valid": true + }, + { + "description": "a valid URL with URL-encoded stuff", + "data": "http://foo.bar/?q=Test%20URL-encoded%20stuff", + "valid": true + }, + { + "description": "a valid puny-coded URL ", + "data": "http://xn--nw2a.xn--j6w193g/", + "valid": true + }, + { + "description": "a valid URL with many special characters", + "data": "http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com", + "valid": true + }, + { + "description": "a valid URL based on IPv4", + "data": "http://223.255.255.254", + "valid": true + }, + { + "description": "a valid URL with ftp scheme", + "data": "ftp://ftp.is.co.za/rfc/rfc1808.txt", + "valid": true + }, + { + "description": "a valid URL for a simple text file", + "data": "http://www.ietf.org/rfc/rfc2396.txt", + "valid": true + }, + { + "description": "a valid URL ", + "data": "ldap://[2001:db8::7]/c=GB?objectClass?one", + "valid": true + }, + { + "description": "a valid mailto URI", + "data": "mailto:John.Doe@example.com", + "valid": true + }, + { + "description": "a valid newsgroup URI", + "data": "news:comp.infosystems.www.servers.unix", + "valid": true + }, + { + "description": "a valid tel URI", + "data": "tel:+1-816-555-1212", + "valid": true + }, + { + "description": "a valid URN", + "data": "urn:oasis:names:specification:docbook:dtd:xml:4.1.2", + "valid": true + }, + { + "description": "an invalid protocol-relative URI Reference", + "data": "//foo.bar/?baz=qux#quux", + "valid": false + }, + { + "description": "an invalid relative URI Reference", + "data": "/abc", + "valid": false + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + }, + { + "description": "an invalid URI with spaces", + "data": "http:// shouldfail.com", + "valid": false + }, + { + "description": "an invalid URI with spaces and missing scheme", + "data": ":// should fail", + "valid": false + }, + { + "description": "an invalid URI with comma in scheme", + "data": "bar,baz:foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/optional/non-bmp-regex.json b/vendor/jsonschema/json/tests/draft7/optional/non-bmp-regex.json new file mode 100644 index 00000000..dd67af2b --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/optional/non-bmp-regex.json @@ -0,0 +1,82 @@ +[ + { + "description": "Proper UTF-16 surrogate pair handling: pattern", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { "pattern": "^ðŸ²*$" }, + "tests": [ + { + "description": "matches empty", + "data": "", + "valid": true + }, + { + "description": "matches single", + "data": "ðŸ²", + "valid": true + }, + { + "description": "matches two", + "data": "ðŸ²ðŸ²", + "valid": true + }, + { + "description": "doesn't match one", + "data": "ðŸ‰", + "valid": false + }, + { + "description": "doesn't match two", + "data": "ðŸ‰ðŸ‰", + "valid": false + }, + { + "description": "doesn't match one ASCII", + "data": "D", + "valid": false + }, + { + "description": "doesn't match two ASCII", + "data": "DD", + "valid": false + } + ] + }, + { + "description": "Proper UTF-16 surrogate pair handling: patternProperties", + "comment": "Optional because .Net doesn't correctly handle 32-bit Unicode characters", + "schema": { + "patternProperties": { + "^ðŸ²*$": { + "type": "integer" + } + } + }, + "tests": [ + { + "description": "matches empty", + "data": { "": 1 }, + "valid": true + }, + { + "description": "matches single", + "data": { "ðŸ²": 1 }, + "valid": true + }, + { + "description": "matches two", + "data": { "ðŸ²ðŸ²": 1 }, + "valid": true + }, + { + "description": "doesn't match one", + "data": { "ðŸ²": "hello" }, + "valid": false + }, + { + "description": "doesn't match two", + "data": { "ðŸ²ðŸ²": "hello" }, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/pattern.json b/vendor/jsonschema/json/tests/draft7/pattern.json new file mode 100644 index 00000000..92db0f97 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/pattern.json @@ -0,0 +1,59 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/patternProperties.json b/vendor/jsonschema/json/tests/draft7/patternProperties.json new file mode 100644 index 00000000..c276e647 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/patternProperties.json @@ -0,0 +1,171 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["foo"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + }, + { + "description": "patternProperties with boolean schemas", + "schema": { + "patternProperties": { + "f.*": true, + "b.*": false + } + }, + "tests": [ + { + "description": "object with property matching schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property matching schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "object with a property matching both true and false is invalid", + "data": {"foobar":1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "patternProperties with null valued instance properties", + "schema": { + "patternProperties": { + "^.*bar$": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foobar": null}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/properties.json b/vendor/jsonschema/json/tests/draft7/properties.json new file mode 100644 index 00000000..5b971ca0 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/properties.json @@ -0,0 +1,236 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + }, + { + "description": "properties with boolean schema", + "schema": { + "properties": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "no property present is valid", + "data": {}, + "valid": true + }, + { + "description": "only 'true' property present is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "only 'false' property present is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "both properties present is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + } + ] + }, + { + "description": "properties with escaped characters", + "schema": { + "properties": { + "foo\nbar": {"type": "number"}, + "foo\"bar": {"type": "number"}, + "foo\\bar": {"type": "number"}, + "foo\rbar": {"type": "number"}, + "foo\tbar": {"type": "number"}, + "foo\fbar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with all numbers is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1", + "foo\\bar": "1", + "foo\rbar": "1", + "foo\tbar": "1", + "foo\fbar": "1" + }, + "valid": false + } + ] + }, + { + "description": "properties with null valued instance properties", + "schema": { + "properties": { + "foo": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { + "properties": { + "__proto__": {"type": "number"}, + "toString": { + "properties": { "length": { "type": "string" } } + }, + "constructor": {"type": "number"} + } + }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": true + }, + { + "description": "__proto__ not valid", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString not valid", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor not valid", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present and valid", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/propertyNames.json b/vendor/jsonschema/json/tests/draft7/propertyNames.json new file mode 100644 index 00000000..f0788e64 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/propertyNames.json @@ -0,0 +1,107 @@ +[ + { + "description": "propertyNames validation", + "schema": { + "propertyNames": {"maxLength": 3} + }, + "tests": [ + { + "description": "all property names valid", + "data": { + "f": {}, + "foo": {} + }, + "valid": true + }, + { + "description": "some property names invalid", + "data": { + "foo": {}, + "foobar": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [1, 2, 3, 4], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "propertyNames validation with pattern", + "schema": { + "propertyNames": { "pattern": "^a+$" } + }, + "tests": [ + { + "description": "matching property names valid", + "data": { + "a": {}, + "aa": {}, + "aaa": {} + }, + "valid": true + }, + { + "description": "non-matching property name is invalid", + "data": { + "aaA": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema true", + "schema": {"propertyNames": true}, + "tests": [ + { + "description": "object with any properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema false", + "schema": {"propertyNames": false}, + "tests": [ + { + "description": "object with any properties is invalid", + "data": {"foo": 1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/ref.json b/vendor/jsonschema/json/tests/draft7/ref.json new file mode 100644 index 00000000..ab67f0e3 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/ref.json @@ -0,0 +1,822 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "items": [ + {"type": "integer"}, + {"$ref": "#/items/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "definitions": { + "tilde~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"} + }, + "properties": { + "tilde": {"$ref": "#/definitions/tilde~0field"}, + "slash": {"$ref": "#/definitions/slash~1field"}, + "percent": {"$ref": "#/definitions/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilde invalid", + "data": {"tilde": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilde valid", + "data": {"tilde": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "definitions": { + "a": {"type": "integer"}, + "b": {"$ref": "#/definitions/a"}, + "c": {"$ref": "#/definitions/b"} + }, + "allOf": [{ "$ref": "#/definitions/c" }] + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref overrides any sibling keywords", + "schema": { + "definitions": { + "reffed": { + "type": "array" + } + }, + "properties": { + "foo": { + "$ref": "#/definitions/reffed", + "maxItems": 2 + } + } + }, + "tests": [ + { + "description": "ref valid", + "data": { "foo": [] }, + "valid": true + }, + { + "description": "ref valid, maxItems ignored", + "data": { "foo": [ 1, 2, 3] }, + "valid": true + }, + { + "description": "ref invalid", + "data": { "foo": "string" }, + "valid": false + } + ] + }, + { + "description": "$ref prevents a sibling $id from changing the base uri", + "schema": { + "$id": "http://localhost:1234/sibling_id/base/", + "definitions": { + "foo": { + "$id": "http://localhost:1234/sibling_id/foo.json", + "type": "string" + }, + "base_foo": { + "$comment": "this canonical uri is http://localhost:1234/sibling_id/base/foo.json", + "$id": "foo.json", + "type": "number" + } + }, + "allOf": [ + { + "$comment": "$ref resolves to http://localhost:1234/sibling_id/base/foo.json, not http://localhost:1234/sibling_id/foo.json", + "$id": "http://localhost:1234/sibling_id/", + "$ref": "foo.json" + } + ] + }, + "tests": [ + { + "description": "$ref resolves to /definitions/base_foo, data does not validate", + "data": "a", + "valid": false + }, + { + "description": "$ref resolves to /definitions/base_foo, data validates", + "data": 1, + "valid": true + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": {"$ref": "http://json-schema.org/draft-07/schema#"}, + "tests": [ + { + "description": "remote ref valid", + "data": {"minLength": 1}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"minLength": -1}, + "valid": false + } + ] + }, + { + "description": "property named $ref that is not a reference", + "schema": { + "properties": { + "$ref": {"type": "string"} + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "property named $ref, containing an actual $ref", + "schema": { + "properties": { + "$ref": {"$ref": "#/definitions/is-string"} + }, + "definitions": { + "is-string": { + "type": "string" + } + } + }, + "tests": [ + { + "description": "property named $ref valid", + "data": {"$ref": "a"}, + "valid": true + }, + { + "description": "property named $ref invalid", + "data": {"$ref": 2}, + "valid": false + } + ] + }, + { + "description": "$ref to boolean schema true", + "schema": { + "allOf": [{ "$ref": "#/definitions/bool" }], + "definitions": { + "bool": true + } + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "$ref to boolean schema false", + "schema": { + "allOf": [{ "$ref": "#/definitions/bool" }], + "definitions": { + "bool": false + } + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "Recursive references between schemas", + "schema": { + "$id": "http://localhost:1234/tree", + "description": "tree of nodes", + "type": "object", + "properties": { + "meta": {"type": "string"}, + "nodes": { + "type": "array", + "items": {"$ref": "node"} + } + }, + "required": ["meta", "nodes"], + "definitions": { + "node": { + "$id": "http://localhost:1234/node", + "description": "node", + "type": "object", + "properties": { + "value": {"type": "number"}, + "subtree": {"$ref": "tree"} + }, + "required": ["value"] + } + } + }, + "tests": [ + { + "description": "valid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 1.1}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": true + }, + { + "description": "invalid tree", + "data": { + "meta": "root", + "nodes": [ + { + "value": 1, + "subtree": { + "meta": "child", + "nodes": [ + {"value": "string is invalid"}, + {"value": 1.2} + ] + } + }, + { + "value": 2, + "subtree": { + "meta": "child", + "nodes": [ + {"value": 2.1}, + {"value": 2.2} + ] + } + } + ] + }, + "valid": false + } + ] + }, + { + "description": "refs with quote", + "schema": { + "properties": { + "foo\"bar": {"$ref": "#/definitions/foo%22bar"} + }, + "definitions": { + "foo\"bar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with numbers is valid", + "data": { + "foo\"bar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "Location-independent identifier", + "schema": { + "allOf": [{ + "$ref": "#foo" + }], + "definitions": { + "A": { + "$id": "#foo", + "type": "integer" + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "Location-independent identifier with base URI change in subschema", + "schema": { + "$id": "http://localhost:1234/root", + "allOf": [{ + "$ref": "http://localhost:1234/nested.json#foo" + }], + "definitions": { + "A": { + "$id": "nested.json", + "definitions": { + "B": { + "$id": "#foo", + "type": "integer" + } + } + } + } + }, + "tests": [ + { + "data": 1, + "description": "match", + "valid": true + }, + { + "data": "a", + "description": "mismatch", + "valid": false + } + ] + }, + { + "description": "naive replacement of $ref with its destination is not correct", + "schema": { + "definitions": { + "a_string": { "type": "string" } + }, + "enum": [ + { "$ref": "#/definitions/a_string" } + ] + }, + "tests": [ + { + "description": "do not evaluate the $ref inside the enum, matching any string", + "data": "this is a string", + "valid": false + }, + { + "description": "do not evaluate the $ref inside the enum, definition exact match", + "data": { "type": "string" }, + "valid": false + }, + { + "description": "match the enum exactly", + "data": { "$ref": "#/definitions/a_string" }, + "valid": true + } + ] + }, + { + "description": "refs with relative uris and defs", + "schema": { + "$id": "http://example.com/schema-relative-uri-defs1.json", + "properties": { + "foo": { + "$id": "schema-relative-uri-defs2.json", + "definitions": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "allOf": [ { "$ref": "#/definitions/inner" } ] + } + }, + "allOf": [ { "$ref": "schema-relative-uri-defs2.json" } ] + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "relative refs with absolute uris and defs", + "schema": { + "$id": "http://example.com/schema-refs-absolute-uris-defs1.json", + "properties": { + "foo": { + "$id": "http://example.com/schema-refs-absolute-uris-defs2.json", + "definitions": { + "inner": { + "properties": { + "bar": { "type": "string" } + } + } + }, + "allOf": [ { "$ref": "#/definitions/inner" } ] + } + }, + "allOf": [ { "$ref": "schema-refs-absolute-uris-defs2.json" } ] + }, + "tests": [ + { + "description": "invalid on inner field", + "data": { + "foo": { + "bar": 1 + }, + "bar": "a" + }, + "valid": false + }, + { + "description": "invalid on outer field", + "data": { + "foo": { + "bar": "a" + }, + "bar": 1 + }, + "valid": false + }, + { + "description": "valid on both fields", + "data": { + "foo": { + "bar": "a" + }, + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "$id must be resolved against nearest parent, not just immediate parent", + "schema": { + "$id": "http://example.com/a.json", + "definitions": { + "x": { + "$id": "http://example.com/b/c.json", + "not": { + "definitions": { + "y": { + "$id": "d.json", + "type": "number" + } + } + } + } + }, + "allOf": [ + { + "$ref": "http://example.com/b/d.json" + } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "non-number is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "simple URN base URI with $ref via the URN", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed", + "minimum": 30, + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ffff-ffff-4321feebdaed"} + } + }, + "tests": [ + { + "description": "valid under the URN IDed schema", + "data": {"foo": 37}, + "valid": true + }, + { + "description": "invalid under the URN IDed schema", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "simple URN base URI with JSON pointer", + "schema": { + "$comment": "URIs do not have to have HTTP(s) schemes", + "$id": "urn:uuid:deadbeef-1234-00ff-ff00-4321feebdaed", + "properties": { + "foo": {"$ref": "#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with NSS", + "schema": { + "$comment": "RFC 8141 §2.2", + "$id": "urn:example:1/406/47452/2", + "properties": { + "foo": {"$ref": "#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with r-component", + "schema": { + "$comment": "RFC 8141 §2.3.1", + "$id": "urn:example:foo-bar-baz-qux?+CCResolve:cc=uk", + "properties": { + "foo": {"$ref": "#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with q-component", + "schema": { + "$comment": "RFC 8141 §2.3.2", + "$id": "urn:example:weather?=op=map&lat=39.56&lon=-104.85&datetime=1969-07-21T02:56:15Z", + "properties": { + "foo": {"$ref": "#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and JSON pointer ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-0000-0000-4321feebdaed#/definitions/bar"} + }, + "definitions": { + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + }, + { + "description": "URN base URI with URN and anchor ref", + "schema": { + "$id": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed", + "properties": { + "foo": {"$ref": "urn:uuid:deadbeef-1234-ff00-00ff-4321feebdaed#something"} + }, + "definitions": { + "bar": { + "$id": "#something", + "type": "string" + } + } + }, + "tests": [ + { + "description": "a string is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "a non-string is invalid", + "data": {"foo": 12}, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/refRemote.json b/vendor/jsonschema/json/tests/draft7/refRemote.json new file mode 100644 index 00000000..c2b20024 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/refRemote.json @@ -0,0 +1,239 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "base URI change", + "schema": { + "$id": "http://localhost:1234/", + "items": { + "$id": "baseUriChange/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "base URI change ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "base URI change ref invalid", + "data": [["a"]], + "valid": false + } + ] + }, + { + "description": "base URI change - change folder", + "schema": { + "$id": "http://localhost:1234/scope_change_defs1.json", + "type" : "object", + "properties": { + "list": {"$ref": "#/definitions/baz"} + }, + "definitions": { + "baz": { + "$id": "baseUriChangeFolder/", + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "base URI change - change folder in subschema", + "schema": { + "$id": "http://localhost:1234/scope_change_defs2.json", + "type" : "object", + "properties": { + "list": {"$ref": "#/definitions/baz/definitions/bar"} + }, + "definitions": { + "baz": { + "$id": "baseUriChangeFolderInSubschema/", + "definitions": { + "bar": { + "type": "array", + "items": {"$ref": "folderInteger.json"} + } + } + } + } + }, + "tests": [ + { + "description": "number is valid", + "data": {"list": [1]}, + "valid": true + }, + { + "description": "string is invalid", + "data": {"list": ["a"]}, + "valid": false + } + ] + }, + { + "description": "root ref in remote ref", + "schema": { + "$id": "http://localhost:1234/object", + "type": "object", + "properties": { + "name": {"$ref": "name.json#/definitions/orNull"} + } + }, + "tests": [ + { + "description": "string is valid", + "data": { + "name": "foo" + }, + "valid": true + }, + { + "description": "null is valid", + "data": { + "name": null + }, + "valid": true + }, + { + "description": "object is invalid", + "data": { + "name": { + "name": null + } + }, + "valid": false + } + ] + }, + { + "description": "remote ref with ref to definitions", + "schema": { + "$id": "http://localhost:1234/schema-remote-ref-ref-defs1.json", + "allOf": [ + { "$ref": "ref-and-definitions.json" } + ] + }, + "tests": [ + { + "description": "invalid", + "data": { + "bar": 1 + }, + "valid": false + }, + { + "description": "valid", + "data": { + "bar": "a" + }, + "valid": true + } + ] + }, + { + "description": "Location-independent identifier in remote ref", + "schema": { + "$ref": "http://localhost:1234/locationIndependentIdentifierPre2019.json#/definitions/refToInteger" + }, + "tests": [ + { + "description": "integer is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "retrieved nested refs resolve relative to their URI not $id", + "schema": { + "$id": "http://localhost:1234/some-id", + "properties": { + "name": {"$ref": "nested/foo-ref-string.json"} + } + }, + "tests": [ + { + "description": "number is invalid", + "data": { + "name": {"foo": 1} + }, + "valid": false + }, + { + "description": "string is valid", + "data": { + "name": {"foo": "a"} + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/required.json b/vendor/jsonschema/json/tests/draft7/required.json new file mode 100644 index 00000000..8d8087af --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/required.json @@ -0,0 +1,151 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with empty array", + "schema": { + "properties": { + "foo": {} + }, + "required": [] + }, + "tests": [ + { + "description": "property not required", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with escaped characters", + "schema": { + "required": [ + "foo\nbar", + "foo\"bar", + "foo\\bar", + "foo\rbar", + "foo\tbar", + "foo\fbar" + ] + }, + "tests": [ + { + "description": "object with all properties present is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with some properties missing is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "required properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { "required": ["__proto__", "toString", "constructor"] }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": false + }, + { + "description": "__proto__ present", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString present", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor present", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/type.json b/vendor/jsonschema/json/tests/draft7/type.json new file mode 100644 index 00000000..83046470 --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/type.json @@ -0,0 +1,474 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is an integer", + "data": 1.0, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not an integer, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is a number (and an integer)", + "data": 1.0, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not a number, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "a string is still a string, even if it looks like a number", + "data": "1", + "valid": true + }, + { + "description": "an empty string is still a string", + "data": "", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "zero is not a boolean", + "data": 0, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not a boolean", + "data": "", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "true is a boolean", + "data": true, + "valid": true + }, + { + "description": "false is a boolean", + "data": false, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "zero is not null", + "data": 0, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not null", + "data": "", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "true is not null", + "data": true, + "valid": false + }, + { + "description": "false is not null", + "data": false, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type as array with one item", + "schema": { + "type": ["string"] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "type: array or object", + "schema": { + "type": ["array", "object"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type: array, object or null", + "schema": { + "type": ["array", "object", "null"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/uniqueItems.json b/vendor/jsonschema/json/tests/draft7/uniqueItems.json new file mode 100644 index 00000000..2ccf666d --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/uniqueItems.json @@ -0,0 +1,404 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "non-unique array of more than two integers is invalid", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of strings is valid", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "non-unique array of strings is invalid", + "data": ["foo", "bar", "foo"], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "non-unique array of more than two arrays is invalid", + "data": [["foo"], ["bar"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "[1] and [true] are unique", + "data": [[1], [true]], + "valid": true + }, + { + "description": "[0] and [false] are unique", + "data": [[0], [false]], + "valid": true + }, + { + "description": "nested [1] and [true] are unique", + "data": [[[1], "foo"], [[true], "foo"]], + "valid": true + }, + { + "description": "nested [0] and [false] are unique", + "data": [[[0], "foo"], [[false], "foo"]], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1, "{}"], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + }, + { + "description": "different objects are unique", + "data": [{"a": 1, "b": 2}, {"a": 2, "b": 1}], + "valid": true + }, + { + "description": "objects are non-unique despite key order", + "data": [{"a": 1, "b": 2}, {"b": 2, "a": 1}], + "valid": false + }, + { + "description": "{\"a\": false} and {\"a\": 0} are unique", + "data": [{"a": false}, {"a": 0}], + "valid": true + }, + { + "description": "{\"a\": true} and {\"a\": 1} are unique", + "data": [{"a": true}, {"a": 1}], + "valid": true + } + ] + }, + { + "description": "uniqueItems with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is not valid", + "data": [false, true, "foo", "foo"], + "valid": false + }, + { + "description": "non-unique array extended from [true, false] is not valid", + "data": [true, false, "foo", "foo"], + "valid": false + } + ] + }, + { + "description": "uniqueItems with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + }, + { + "description": "uniqueItems=false validation", + "schema": { "uniqueItems": false }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is valid", + "data": [1, 1], + "valid": true + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": true + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": true + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": true + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is valid", + "data": [["foo"], ["foo"]], + "valid": true + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are valid", + "data": [{}, [1], true, null, {}, 1], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is valid", + "data": [false, true, "foo", "foo"], + "valid": true + }, + { + "description": "non-unique array extended from [true, false] is valid", + "data": [true, false, "foo", "foo"], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items and additionalItems=false", + "schema": { + "items": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false, + "additionalItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/draft7/unknownKeyword.json b/vendor/jsonschema/json/tests/draft7/unknownKeyword.json new file mode 100644 index 00000000..1f58d97e --- /dev/null +++ b/vendor/jsonschema/json/tests/draft7/unknownKeyword.json @@ -0,0 +1,56 @@ +[ + { + "description": "$id inside an unknown keyword is not a real identifier", + "comment": "the implementation must not be confused by an $id in locations we do not know how to parse", + "schema": { + "definitions": { + "id_in_unknown0": { + "not": { + "array_of_schemas": [ + { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "null" + } + ] + } + }, + "real_id_in_schema": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "string" + }, + "id_in_unknown1": { + "not": { + "object_of_schemas": { + "foo": { + "$id": "https://localhost:1234/unknownKeyword/my_identifier.json", + "type": "integer" + } + } + } + } + }, + "anyOf": [ + { "$ref": "#/definitions/id_in_unknown0" }, + { "$ref": "#/definitions/id_in_unknown1" }, + { "$ref": "https://localhost:1234/unknownKeyword/my_identifier.json" } + ] + }, + "tests": [ + { + "description": "type matches second anyOf, which has a real schema in it", + "data": "a string", + "valid": true + }, + { + "description": "type matches non-schema in first anyOf", + "data": null, + "valid": false + }, + { + "description": "type matches non-schema in third anyOf", + "data": 1, + "valid": false + } + ] + } +] diff --git a/vendor/jsonschema/json/tests/latest b/vendor/jsonschema/json/tests/latest new file mode 120000 index 00000000..9a4784dd --- /dev/null +++ b/vendor/jsonschema/json/tests/latest @@ -0,0 +1 @@ +draft2020-12 \ No newline at end of file diff --git a/vendor/jsonschema/json/tox.ini b/vendor/jsonschema/json/tox.ini new file mode 100644 index 00000000..ec180a91 --- /dev/null +++ b/vendor/jsonschema/json/tox.ini @@ -0,0 +1,9 @@ +[tox] +minversion = 1.6 +envlist = sanity +skipsdist = True + +[testenv:sanity] +# used just for validating the structure of the test case files themselves +deps = jsonschema==4.6.1 +commands = {envpython} bin/jsonschema_suite check diff --git a/vendor/jsonschema/jsonschema/__init__.py b/vendor/jsonschema/jsonschema/__init__.py new file mode 100644 index 00000000..75f29469 --- /dev/null +++ b/vendor/jsonschema/jsonschema/__init__.py @@ -0,0 +1,58 @@ +""" +An implementation of JSON Schema for Python + +The main functionality is provided by the validator classes for each of the +supported JSON Schema versions. + +Most commonly, `validate` is the quickest way to simply validate a given +instance under a schema, and will create a validator for you. +""" +import warnings + +from jsonschema._format import ( + FormatChecker, + draft3_format_checker, + draft4_format_checker, + draft6_format_checker, + draft7_format_checker, + draft201909_format_checker, + draft202012_format_checker, +) +from jsonschema._types import TypeChecker +from jsonschema.exceptions import ( + ErrorTree, + FormatError, + RefResolutionError, + SchemaError, + ValidationError, +) +from jsonschema.protocols import Validator +from jsonschema.validators import ( + Draft3Validator, + Draft4Validator, + Draft6Validator, + Draft7Validator, + Draft201909Validator, + Draft202012Validator, + RefResolver, + validate, +) + + +def __getattr__(name): + if name == "__version__": + warnings.warn( + "Accessing jsonschema.__version__ is deprecated and will be " + "removed in a future release. Use importlib.metadata directly " + "to query for jsonschema's version.", + DeprecationWarning, + stacklevel=2, + ) + + try: + from importlib import metadata + except ImportError: + import importlib_metadata as metadata + + return metadata.version("jsonschema") + raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/vendor/jsonschema/jsonschema/__main__.py b/vendor/jsonschema/jsonschema/__main__.py new file mode 100644 index 00000000..fdc21e23 --- /dev/null +++ b/vendor/jsonschema/jsonschema/__main__.py @@ -0,0 +1,3 @@ +from jsonschema.cli import main + +main() diff --git a/vendor/jsonschema/jsonschema/__version__.py b/vendor/jsonschema/jsonschema/__version__.py new file mode 100644 index 00000000..d32a48d9 --- /dev/null +++ b/vendor/jsonschema/jsonschema/__version__.py @@ -0,0 +1 @@ +__version__ = "4.10.3" diff --git a/vendor/jsonschema/jsonschema/_format.py b/vendor/jsonschema/jsonschema/_format.py new file mode 100644 index 00000000..da4bb799 --- /dev/null +++ b/vendor/jsonschema/jsonschema/_format.py @@ -0,0 +1,498 @@ +from __future__ import annotations + +from contextlib import suppress +from uuid import UUID +import datetime +import ipaddress +import re +import typing + +from jsonschema.exceptions import FormatError + +_FormatCheckCallable = typing.Callable[[object], bool] +_F = typing.TypeVar("_F", bound=_FormatCheckCallable) +_RaisesType = typing.Union[ + typing.Type[Exception], typing.Tuple[typing.Type[Exception], ...], +] + + +class FormatChecker(object): + """ + A ``format`` property checker. + + JSON Schema does not mandate that the ``format`` property actually do any + validation. If validation is desired however, instances of this class can + be hooked into validators to enable format validation. + + `FormatChecker` objects always return ``True`` when asked about + formats that they do not know how to validate. + + To check a custom format using a function that takes an instance and + returns a ``bool``, use the `FormatChecker.checks` or + `FormatChecker.cls_checks` decorators. + + Arguments: + + formats (~collections.abc.Iterable): + + The known formats to validate. This argument can be used to + limit which formats will be used during validation. + """ + + checkers: dict[ + str, + tuple[_FormatCheckCallable, _RaisesType], + ] = {} + + def __init__(self, formats: typing.Iterable[str] | None = None): + if formats is None: + self.checkers = self.checkers.copy() + else: + self.checkers = dict((k, self.checkers[k]) for k in formats) + + def __repr__(self): + return "".format(sorted(self.checkers)) + + def checks( + self, format: str, raises: _RaisesType = (), + ) -> typing.Callable[[_F], _F]: + """ + Register a decorated function as validating a new format. + + Arguments: + + format (str): + + The format that the decorated function will check. + + raises (Exception): + + The exception(s) raised by the decorated function when an + invalid instance is found. + + The exception object will be accessible as the + `jsonschema.exceptions.ValidationError.cause` attribute of the + resulting validation error. + """ + + def _checks(func: _F) -> _F: + self.checkers[format] = (func, raises) + return func + + return _checks + + @classmethod + def cls_checks( + cls, format: str, raises: _RaisesType = (), + ) -> typing.Callable[[_F], _F]: + def _checks(func: _F) -> _F: + cls.checkers[format] = (func, raises) + return func + + return _checks + + def check(self, instance: object, format: str) -> None: + """ + Check whether the instance conforms to the given format. + + Arguments: + + instance (*any primitive type*, i.e. str, number, bool): + + The instance to check + + format (str): + + The format that instance should conform to + + + Raises: + + FormatError: if the instance does not conform to ``format`` + """ + + if format not in self.checkers: + return + + func, raises = self.checkers[format] + result, cause = None, None + try: + result = func(instance) + except raises as e: + cause = e + if not result: + raise FormatError(f"{instance!r} is not a {format!r}", cause=cause) + + def conforms(self, instance: object, format: str) -> bool: + """ + Check whether the instance conforms to the given format. + + Arguments: + + instance (*any primitive type*, i.e. str, number, bool): + + The instance to check + + format (str): + + The format that instance should conform to + + Returns: + + bool: whether it conformed + """ + + try: + self.check(instance, format) + except FormatError: + return False + else: + return True + + +draft3_format_checker = FormatChecker() +draft4_format_checker = FormatChecker() +draft6_format_checker = FormatChecker() +draft7_format_checker = FormatChecker() +draft201909_format_checker = FormatChecker() +draft202012_format_checker = FormatChecker() + +_draft_checkers: dict[str, FormatChecker] = dict( + draft3=draft3_format_checker, + draft4=draft4_format_checker, + draft6=draft6_format_checker, + draft7=draft7_format_checker, + draft201909=draft201909_format_checker, + draft202012=draft202012_format_checker, +) + + +def _checks_drafts( + name=None, + draft3=None, + draft4=None, + draft6=None, + draft7=None, + draft201909=None, + draft202012=None, + raises=(), +) -> typing.Callable[[_F], _F]: + draft3 = draft3 or name + draft4 = draft4 or name + draft6 = draft6 or name + draft7 = draft7 or name + draft201909 = draft201909 or name + draft202012 = draft202012 or name + + def wrap(func: _F) -> _F: + if draft3: + func = _draft_checkers["draft3"].checks(draft3, raises)(func) + if draft4: + func = _draft_checkers["draft4"].checks(draft4, raises)(func) + if draft6: + func = _draft_checkers["draft6"].checks(draft6, raises)(func) + if draft7: + func = _draft_checkers["draft7"].checks(draft7, raises)(func) + if draft201909: + func = _draft_checkers["draft201909"].checks(draft201909, raises)( + func, + ) + if draft202012: + func = _draft_checkers["draft202012"].checks(draft202012, raises)( + func, + ) + + # Oy. This is bad global state, but relied upon for now, until + # deprecation. See #519 and test_format_checkers_come_with_defaults + FormatChecker.cls_checks( + draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3, + raises, + )(func) + return func + + return wrap + + +@_checks_drafts(name="idn-email") +@_checks_drafts(name="email") +def is_email(instance: object) -> bool: + if not isinstance(instance, str): + return True + return "@" in instance + + +@_checks_drafts( + draft3="ip-address", + draft4="ipv4", + draft6="ipv4", + draft7="ipv4", + draft201909="ipv4", + draft202012="ipv4", + raises=ipaddress.AddressValueError, +) +def is_ipv4(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(ipaddress.IPv4Address(instance)) + + +@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError) +def is_ipv6(instance: object) -> bool: + if not isinstance(instance, str): + return True + address = ipaddress.IPv6Address(instance) + return not getattr(address, "scope_id", "") + + +with suppress(ImportError): + from fqdn import FQDN + + @_checks_drafts( + draft3="host-name", + draft4="hostname", + draft6="hostname", + draft7="hostname", + draft201909="hostname", + draft202012="hostname", + ) + def is_host_name(instance: object) -> bool: + if not isinstance(instance, str): + return True + return FQDN(instance).is_valid + + +with suppress(ImportError): + # The built-in `idna` codec only implements RFC 3890, so we go elsewhere. + import idna + + @_checks_drafts( + draft7="idn-hostname", + draft201909="idn-hostname", + draft202012="idn-hostname", + raises=(idna.IDNAError, UnicodeError), + ) + def is_idn_host_name(instance: object) -> bool: + if not isinstance(instance, str): + return True + idna.encode(instance) + return True + + +try: + import rfc3987 +except ImportError: + with suppress(ImportError): + from rfc3986_validator import validate_rfc3986 + + @_checks_drafts(name="uri") + def is_uri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3986(instance, rule="URI") + + @_checks_drafts( + draft6="uri-reference", + draft7="uri-reference", + draft201909="uri-reference", + draft202012="uri-reference", + raises=ValueError, + ) + def is_uri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3986(instance, rule="URI_reference") + +else: + + @_checks_drafts( + draft7="iri", + draft201909="iri", + draft202012="iri", + raises=ValueError, + ) + def is_iri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="IRI") + + @_checks_drafts( + draft7="iri-reference", + draft201909="iri-reference", + draft202012="iri-reference", + raises=ValueError, + ) + def is_iri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="IRI_reference") + + @_checks_drafts(name="uri", raises=ValueError) + def is_uri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="URI") + + @_checks_drafts( + draft6="uri-reference", + draft7="uri-reference", + draft201909="uri-reference", + draft202012="uri-reference", + raises=ValueError, + ) + def is_uri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="URI_reference") + + +with suppress(ImportError): + from rfc3339_validator import validate_rfc3339 + + @_checks_drafts(name="date-time") + def is_datetime(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3339(instance.upper()) + + @_checks_drafts( + draft7="time", + draft201909="time", + draft202012="time", + ) + def is_time(instance: object) -> bool: + if not isinstance(instance, str): + return True + return is_datetime("1970-01-01T" + instance) + + +@_checks_drafts(name="regex", raises=re.error) +def is_regex(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(re.compile(instance)) + + +@_checks_drafts( + draft3="date", + draft7="date", + draft201909="date", + draft202012="date", + raises=ValueError, +) +def is_date(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(instance.isascii() and datetime.date.fromisoformat(instance)) + + +@_checks_drafts(draft3="time", raises=ValueError) +def is_draft3_time(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(datetime.datetime.strptime(instance, "%H:%M:%S")) + + +with suppress(ImportError): + from webcolors import CSS21_NAMES_TO_HEX + import webcolors + + def is_css_color_code(instance: object) -> bool: + return webcolors.normalize_hex(instance) + + @_checks_drafts(draft3="color", raises=(ValueError, TypeError)) + def is_css21_color(instance: object) -> bool: + if ( + not isinstance(instance, str) + or instance.lower() in CSS21_NAMES_TO_HEX + ): + return True + return is_css_color_code(instance) + + +with suppress(ImportError): + import jsonpointer + + @_checks_drafts( + draft6="json-pointer", + draft7="json-pointer", + draft201909="json-pointer", + draft202012="json-pointer", + raises=jsonpointer.JsonPointerException, + ) + def is_json_pointer(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(jsonpointer.JsonPointer(instance)) + + # TODO: I don't want to maintain this, so it + # needs to go either into jsonpointer (pending + # https://github.com/stefankoegl/python-json-pointer/issues/34) or + # into a new external library. + @_checks_drafts( + draft7="relative-json-pointer", + draft201909="relative-json-pointer", + draft202012="relative-json-pointer", + raises=jsonpointer.JsonPointerException, + ) + def is_relative_json_pointer(instance: object) -> bool: + # Definition taken from: + # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 + if not isinstance(instance, str): + return True + non_negative_integer, rest = [], "" + for i, character in enumerate(instance): + if character.isdigit(): + # digits with a leading "0" are not allowed + if i > 0 and int(instance[i - 1]) == 0: + return False + + non_negative_integer.append(character) + continue + + if not non_negative_integer: + return False + + rest = instance[i:] + break + return (rest == "#") or bool(jsonpointer.JsonPointer(rest)) + + +with suppress(ImportError): + import uri_template + + @_checks_drafts( + draft6="uri-template", + draft7="uri-template", + draft201909="uri-template", + draft202012="uri-template", + ) + def is_uri_template(instance: object) -> bool: + if not isinstance(instance, str): + return True + return uri_template.validate(instance) + + +with suppress(ImportError): + import isoduration + + @_checks_drafts( + draft201909="duration", + draft202012="duration", + raises=isoduration.DurationParsingException, + ) + def is_duration(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(isoduration.parse_duration(instance)) + + +@_checks_drafts( + draft201909="uuid", + draft202012="uuid", + raises=ValueError, +) +def is_uuid(instance: object) -> bool: + if not isinstance(instance, str): + return True + UUID(instance) + return all(instance[position] == "-" for position in (8, 13, 18, 23)) diff --git a/vendor/jsonschema/jsonschema/_legacy_validators.py b/vendor/jsonschema/jsonschema/_legacy_validators.py new file mode 100644 index 00000000..fa450537 --- /dev/null +++ b/vendor/jsonschema/jsonschema/_legacy_validators.py @@ -0,0 +1,228 @@ +from jsonschema import _utils +from jsonschema.exceptions import ValidationError + + +def ignore_ref_siblings(schema): + """ + Ignore siblings of ``$ref`` if it is present. + + Otherwise, return all keywords. + + Suitable for use with `create`'s ``applicable_validators`` argument. + """ + ref = schema.get("$ref") + if ref is not None: + return [("$ref", ref)] + else: + return schema.items() + + +def dependencies_draft3(validator, dependencies, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependencies.items(): + if property not in instance: + continue + + if validator.is_type(dependency, "object"): + yield from validator.descend( + instance, dependency, schema_path=property, + ) + elif validator.is_type(dependency, "string"): + if dependency not in instance: + message = f"{dependency!r} is a dependency of {property!r}" + yield ValidationError(message) + else: + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + + +def dependencies_draft4_draft6_draft7( + validator, + dependencies, + instance, + schema, +): + """ + Support for the ``dependencies`` keyword from pre-draft 2019-09. + + In later drafts, the keyword was split into separate + ``dependentRequired`` and ``dependentSchemas`` validators. + """ + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependencies.items(): + if property not in instance: + continue + + if validator.is_type(dependency, "array"): + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + else: + yield from validator.descend( + instance, dependency, schema_path=property, + ) + + +def disallow_draft3(validator, disallow, instance, schema): + for disallowed in _utils.ensure_list(disallow): + if validator.evolve(schema={"type": [disallowed]}).is_valid(instance): + message = f"{disallowed!r} is disallowed for {instance!r}" + yield ValidationError(message) + + +def extends_draft3(validator, extends, instance, schema): + if validator.is_type(extends, "object"): + yield from validator.descend(instance, extends) + return + for index, subschema in enumerate(extends): + yield from validator.descend(instance, subschema, schema_path=index) + + +def items_draft3_draft4(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "object"): + for index, item in enumerate(instance): + yield from validator.descend(item, items, path=index) + else: + for (index, item), subschema in zip(enumerate(instance), items): + yield from validator.descend( + item, subschema, path=index, schema_path=index, + ) + + +def items_draft6_draft7_draft201909(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "array"): + for (index, item), subschema in zip(enumerate(instance), items): + yield from validator.descend( + item, subschema, path=index, schema_path=index, + ) + else: + for index, item in enumerate(instance): + yield from validator.descend(item, items, path=index) + + +def minimum_draft3_draft4(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMinimum", False): + failed = instance <= minimum + cmp = "less than or equal to" + else: + failed = instance < minimum + cmp = "less than" + + if failed: + message = f"{instance!r} is {cmp} the minimum of {minimum!r}" + yield ValidationError(message) + + +def maximum_draft3_draft4(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMaximum", False): + failed = instance >= maximum + cmp = "greater than or equal to" + else: + failed = instance > maximum + cmp = "greater than" + + if failed: + message = f"{instance!r} is {cmp} the maximum of {maximum!r}" + yield ValidationError(message) + + +def properties_draft3(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in properties.items(): + if property in instance: + yield from validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ) + elif subschema.get("required", False): + error = ValidationError(f"{property!r} is a required property") + error._set( + validator="required", + validator_value=subschema["required"], + instance=instance, + schema=schema, + ) + error.path.appendleft(property) + error.schema_path.extend([property, "required"]) + yield error + + +def type_draft3(validator, types, instance, schema): + types = _utils.ensure_list(types) + + all_errors = [] + for index, type in enumerate(types): + if validator.is_type(type, "object"): + errors = list(validator.descend(instance, type, schema_path=index)) + if not errors: + return + all_errors.extend(errors) + else: + if validator.is_type(instance, type): + return + else: + reprs = [] + for type in types: + try: + reprs.append(repr(type["name"])) + except Exception: + reprs.append(repr(type)) + yield ValidationError( + f"{instance!r} is not of type {', '.join(reprs)}", + context=all_errors, + ) + + +def contains_draft6_draft7(validator, contains, instance, schema): + if not validator.is_type(instance, "array"): + return + + if not any( + validator.evolve(schema=contains).is_valid(element) + for element in instance + ): + yield ValidationError( + f"None of {instance!r} are valid under the given schema", + ) + + +def recursiveRef(validator, recursiveRef, instance, schema): + lookup_url, target = validator.resolver.resolution_scope, validator.schema + + for each in reversed(validator.resolver._scopes_stack[1:]): + lookup_url, next_target = validator.resolver.resolve(each) + if next_target.get("$recursiveAnchor"): + target = next_target + else: + break + + fragment = recursiveRef.lstrip("#") + subschema = validator.resolver.resolve_fragment(target, fragment) + # FIXME: This is gutted (and not calling .descend) because it can trigger + # recursion errors, so there's a bug here. Re-enable the tests to + # see it. + subschema + return [] diff --git a/vendor/jsonschema/jsonschema/_types.py b/vendor/jsonschema/jsonschema/_types.py new file mode 100644 index 00000000..9d59eb32 --- /dev/null +++ b/vendor/jsonschema/jsonschema/_types.py @@ -0,0 +1,217 @@ +from __future__ import annotations + +import numbers +import typing + +from pyrsistent import pmap +import attr + +from jsonschema.exceptions import UndefinedTypeCheck + + +# unfortunately, the type of pmap is generic, and if used as the attr.ib +# converter, the generic type is presented to mypy, which then fails to match +# the concrete type of a type checker mapping +# this "do nothing" wrapper presents the correct information to mypy +def _typed_pmap_converter( + init_val: typing.Mapping[ + str, + typing.Callable[["TypeChecker", typing.Any], bool], + ], +) -> typing.Mapping[str, typing.Callable[["TypeChecker", typing.Any], bool]]: + return typing.cast( + typing.Mapping[ + str, + typing.Callable[["TypeChecker", typing.Any], bool], + ], + pmap(init_val), + ) + + +def is_array(checker, instance): + return isinstance(instance, list) + + +def is_bool(checker, instance): + return isinstance(instance, bool) + + +def is_integer(checker, instance): + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + return False + return isinstance(instance, int) + + +def is_null(checker, instance): + return instance is None + + +def is_number(checker, instance): + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + return False + return isinstance(instance, numbers.Number) + + +def is_object(checker, instance): + return isinstance(instance, dict) + + +def is_string(checker, instance): + return isinstance(instance, str) + + +def is_any(checker, instance): + return True + + +@attr.s(frozen=True) +class TypeChecker(object): + """ + A ``type`` property checker. + + A `TypeChecker` performs type checking for a `Validator`. Type + checks to perform are updated using `TypeChecker.redefine` or + `TypeChecker.redefine_many` and removed via `TypeChecker.remove`. + Each of these return a new `TypeChecker` object. + + Arguments: + + type_checkers (dict): + + The initial mapping of types to their checking functions. + """ + + _type_checkers: typing.Mapping[ + str, typing.Callable[["TypeChecker", typing.Any], bool], + ] = attr.ib( + default=pmap(), + converter=_typed_pmap_converter, + ) + + def is_type(self, instance, type): + """ + Check if the instance is of the appropriate type. + + Arguments: + + instance (object): + + The instance to check + + type (str): + + The name of the type that is expected. + + Returns: + + bool: Whether it conformed. + + + Raises: + + `jsonschema.exceptions.UndefinedTypeCheck`: + if type is unknown to this object. + """ + try: + fn = self._type_checkers[type] + except KeyError: + raise UndefinedTypeCheck(type) from None + + return fn(self, instance) + + def redefine(self, type, fn): + """ + Produce a new checker with the given type redefined. + + Arguments: + + type (str): + + The name of the type to check. + + fn (collections.abc.Callable): + + A function taking exactly two parameters - the type + checker calling the function and the instance to check. + The function should return true if instance is of this + type and false otherwise. + + Returns: + + A new `TypeChecker` instance. + """ + return self.redefine_many({type: fn}) + + def redefine_many(self, definitions=()): + """ + Produce a new checker with the given types redefined. + + Arguments: + + definitions (dict): + + A dictionary mapping types to their checking functions. + + Returns: + + A new `TypeChecker` instance. + """ + return attr.evolve( + self, type_checkers=self._type_checkers.update(definitions), + ) + + def remove(self, *types): + """ + Produce a new checker with the given types forgotten. + + Arguments: + + types (~collections.abc.Iterable): + + the names of the types to remove. + + Returns: + + A new `TypeChecker` instance + + Raises: + + `jsonschema.exceptions.UndefinedTypeCheck`: + + if any given type is unknown to this object + """ + + checkers = self._type_checkers + for each in types: + try: + checkers = checkers.remove(each) + except KeyError: + raise UndefinedTypeCheck(each) + return attr.evolve(self, type_checkers=checkers) + + +draft3_type_checker = TypeChecker( + { + "any": is_any, + "array": is_array, + "boolean": is_bool, + "integer": is_integer, + "object": is_object, + "null": is_null, + "number": is_number, + "string": is_string, + }, +) +draft4_type_checker = draft3_type_checker.remove("any") +draft6_type_checker = draft4_type_checker.redefine( + "integer", + lambda checker, instance: ( + is_integer(checker, instance) + or isinstance(instance, float) and instance.is_integer() + ), +) +draft7_type_checker = draft6_type_checker +draft201909_type_checker = draft7_type_checker +draft202012_type_checker = draft201909_type_checker diff --git a/vendor/jsonschema/jsonschema/_utils.py b/vendor/jsonschema/jsonschema/_utils.py new file mode 100644 index 00000000..687b95a1 --- /dev/null +++ b/vendor/jsonschema/jsonschema/_utils.py @@ -0,0 +1,343 @@ +from collections.abc import Mapping, MutableMapping, Sequence +from urllib.parse import urlsplit +import itertools +import json +import re +import sys + +from importlib import resources + + +class URIDict(MutableMapping): + """ + Dictionary which uses normalized URIs as keys. + """ + + def normalize(self, uri): + return urlsplit(uri).geturl() + + def __init__(self, *args, **kwargs): + self.store = dict() + self.store.update(*args, **kwargs) + + def __getitem__(self, uri): + return self.store[self.normalize(uri)] + + def __setitem__(self, uri, value): + self.store[self.normalize(uri)] = value + + def __delitem__(self, uri): + del self.store[self.normalize(uri)] + + def __iter__(self): + return iter(self.store) + + def __len__(self): + return len(self.store) + + def __repr__(self): + return repr(self.store) + + +class Unset(object): + """ + An as-of-yet unset attribute or unprovided default parameter. + """ + + def __repr__(self): + return "" + + +def load_schema(name): + """ + Load a schema from ./schemas/``name``.json and return it. + """ + from . import __name__ + return json.loads(resources.read_text(f"{__name__}.schemas", f"{name}.json")) + + +def format_as_index(container, indices): + """ + Construct a single string containing indexing operations for the indices. + + For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"] + + Arguments: + + container (str): + + A word to use for the thing being indexed + + indices (sequence): + + The indices to format. + """ + + if not indices: + return container + return f"{container}[{']['.join(repr(index) for index in indices)}]" + + +def find_additional_properties(instance, schema): + """ + Return the set of additional properties for the given ``instance``. + + Weeds out properties that should have been validated by ``properties`` and + / or ``patternProperties``. + + Assumes ``instance`` is dict-like already. + """ + + properties = schema.get("properties", {}) + patterns = "|".join(schema.get("patternProperties", {})) + for property in instance: + if property not in properties: + if patterns and re.search(patterns, property): + continue + yield property + + +def extras_msg(extras): + """ + Create an error message for extra items or properties. + """ + + if len(extras) == 1: + verb = "was" + else: + verb = "were" + return ", ".join(repr(extra) for extra in sorted(extras)), verb + + +def ensure_list(thing): + """ + Wrap ``thing`` in a list if it's a single str. + + Otherwise, return it unchanged. + """ + + if isinstance(thing, str): + return [thing] + return thing + + +def _mapping_equal(one, two): + """ + Check if two mappings are equal using the semantics of `equal`. + """ + if len(one) != len(two): + return False + return all( + key in two and equal(value, two[key]) + for key, value in one.items() + ) + + +def _sequence_equal(one, two): + """ + Check if two sequences are equal using the semantics of `equal`. + """ + if len(one) != len(two): + return False + return all(equal(i, j) for i, j in zip(one, two)) + + +def equal(one, two): + """ + Check if two things are equal evading some Python type hierarchy semantics. + + Specifically in JSON Schema, evade `bool` inheriting from `int`, + recursing into sequences to do the same. + """ + if isinstance(one, str) or isinstance(two, str): + return one == two + if isinstance(one, Sequence) and isinstance(two, Sequence): + return _sequence_equal(one, two) + if isinstance(one, Mapping) and isinstance(two, Mapping): + return _mapping_equal(one, two) + return unbool(one) == unbool(two) + + +def unbool(element, true=object(), false=object()): + """ + A hack to make True and 1 and False and 0 unique for ``uniq``. + """ + + if element is True: + return true + elif element is False: + return false + return element + + +def uniq(container): + """ + Check if all of a container's elements are unique. + + Tries to rely on the container being recursively sortable, or otherwise + falls back on (slow) brute force. + """ + try: + sort = sorted(unbool(i) for i in container) + sliced = itertools.islice(sort, 1, None) + + for i, j in zip(sort, sliced): + if equal(i, j): + return False + + except (NotImplementedError, TypeError): + seen = [] + for e in container: + e = unbool(e) + + for i in seen: + if equal(i, e): + return False + + seen.append(e) + return True + + +def find_evaluated_item_indexes_by_schema(validator, instance, schema): + """ + Get all indexes of items that get evaluated under the current schema + + Covers all keywords related to unevaluatedItems: items, prefixItems, if, + then, else, contains, unevaluatedItems, allOf, oneOf, anyOf + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_indexes = [] + + if "items" in schema: + return list(range(0, len(instance))) + + if "$ref" in schema: + scope, resolved = validator.resolver.resolve(schema["$ref"]) + validator.resolver.push_scope(scope) + + try: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, resolved, + ) + finally: + validator.resolver.pop_scope() + + if "prefixItems" in schema: + evaluated_indexes += list(range(0, len(schema["prefixItems"]))) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["then"], + ) + else: + if "else" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["else"], + ) + + for keyword in ["contains", "unevaluatedItems"]: + if keyword in schema: + for k, v in enumerate(instance): + if validator.evolve(schema=schema[keyword]).is_valid(v): + evaluated_indexes.append(k) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = list(validator.descend(instance, subschema)) + if not errs: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, subschema, + ) + + return evaluated_indexes + + +def find_evaluated_property_keys_by_schema(validator, instance, schema): + """ + Get all keys of items that get evaluated under the current schema + + Covers all keywords related to unevaluatedProperties: properties, + additionalProperties, unevaluatedProperties, patternProperties, + dependentSchemas, allOf, oneOf, anyOf, if, then, else + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_keys = [] + + if "$ref" in schema: + scope, resolved = validator.resolver.resolve(schema["$ref"]) + validator.resolver.push_scope(scope) + + try: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, resolved, + ) + finally: + validator.resolver.pop_scope() + + for keyword in [ + "properties", "additionalProperties", "unevaluatedProperties", + ]: + if keyword in schema: + if validator.is_type(schema[keyword], "boolean"): + for property, value in instance.items(): + if validator.evolve(schema=schema[keyword]).is_valid( + {property: value}, + ): + evaluated_keys.append(property) + + if validator.is_type(schema[keyword], "object"): + for property, subschema in schema[keyword].items(): + if property in instance and validator.evolve( + schema=subschema, + ).is_valid(instance[property]): + evaluated_keys.append(property) + + if "patternProperties" in schema: + for property, value in instance.items(): + for pattern, _ in schema["patternProperties"].items(): + if re.search(pattern, property) and validator.evolve( + schema=schema["patternProperties"], + ).is_valid({property: value}): + evaluated_keys.append(property) + + if "dependentSchemas" in schema: + for property, subschema in schema["dependentSchemas"].items(): + if property not in instance: + continue + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = list(validator.descend(instance, subschema)) + if not errs: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["then"], + ) + else: + if "else" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["else"], + ) + + return evaluated_keys diff --git a/vendor/jsonschema/jsonschema/_validators.py b/vendor/jsonschema/jsonschema/_validators.py new file mode 100644 index 00000000..874e8796 --- /dev/null +++ b/vendor/jsonschema/jsonschema/_validators.py @@ -0,0 +1,467 @@ +from fractions import Fraction +from urllib.parse import urldefrag, urljoin +import re + +from jsonschema._utils import ( + ensure_list, + equal, + extras_msg, + find_additional_properties, + find_evaluated_item_indexes_by_schema, + find_evaluated_property_keys_by_schema, + unbool, + uniq, +) +from jsonschema.exceptions import FormatError, ValidationError + + +def patternProperties(validator, patternProperties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for pattern, subschema in patternProperties.items(): + for k, v in instance.items(): + if re.search(pattern, k): + yield from validator.descend( + v, subschema, path=k, schema_path=pattern, + ) + + +def propertyNames(validator, propertyNames, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property in instance: + yield from validator.descend(instance=property, schema=propertyNames) + + +def additionalProperties(validator, aP, instance, schema): + if not validator.is_type(instance, "object"): + return + + extras = set(find_additional_properties(instance, schema)) + + if validator.is_type(aP, "object"): + for extra in extras: + yield from validator.descend(instance[extra], aP, path=extra) + elif not aP and extras: + if "patternProperties" in schema: + if len(extras) == 1: + verb = "does" + else: + verb = "do" + + joined = ", ".join(repr(each) for each in sorted(extras)) + patterns = ", ".join( + repr(each) for each in sorted(schema["patternProperties"]) + ) + error = f"{joined} {verb} not match any of the regexes: {patterns}" + yield ValidationError(error) + else: + error = "Additional properties are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(extras)) + + +def items(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + prefix = len(schema.get("prefixItems", [])) + total = len(instance) + if items is False and total > prefix: + message = f"Expected at most {prefix} items, but found {total}" + yield ValidationError(message) + else: + for index in range(prefix, total): + yield from validator.descend( + instance=instance[index], + schema=items, + path=index, + ) + + +def additionalItems(validator, aI, instance, schema): + if ( + not validator.is_type(instance, "array") + or validator.is_type(schema.get("items", {}), "object") + ): + return + + len_items = len(schema.get("items", [])) + if validator.is_type(aI, "object"): + for index, item in enumerate(instance[len_items:], start=len_items): + yield from validator.descend(item, aI, path=index) + elif not aI and len(instance) > len(schema.get("items", [])): + error = "Additional items are not allowed (%s %s unexpected)" + yield ValidationError( + error % extras_msg(instance[len(schema.get("items", [])):]), + ) + + +def const(validator, const, instance, schema): + if not equal(instance, const): + yield ValidationError(f"{const!r} was expected") + + +def contains(validator, contains, instance, schema): + if not validator.is_type(instance, "array"): + return + + matches = 0 + min_contains = schema.get("minContains", 1) + max_contains = schema.get("maxContains", len(instance)) + + for each in instance: + if validator.evolve(schema=contains).is_valid(each): + matches += 1 + if matches > max_contains: + yield ValidationError( + "Too many items match the given schema " + f"(expected at most {max_contains})", + validator="maxContains", + validator_value=max_contains, + ) + return + + if matches < min_contains: + if not matches: + yield ValidationError( + f"{instance!r} does not contain items " + "matching the given schema", + ) + else: + yield ValidationError( + "Too few items match the given schema (expected at least " + f"{min_contains} but only {matches} matched)", + validator="minContains", + validator_value=min_contains, + ) + + +def exclusiveMinimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance <= minimum: + yield ValidationError( + f"{instance!r} is less than or equal to " + f"the minimum of {minimum!r}", + ) + + +def exclusiveMaximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance >= maximum: + yield ValidationError( + f"{instance!r} is greater than or equal " + f"to the maximum of {maximum!r}", + ) + + +def minimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance < minimum: + message = f"{instance!r} is less than the minimum of {minimum!r}" + yield ValidationError(message) + + +def maximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance > maximum: + message = f"{instance!r} is greater than the maximum of {maximum!r}" + yield ValidationError(message) + + +def multipleOf(validator, dB, instance, schema): + if not validator.is_type(instance, "number"): + return + + if isinstance(dB, float): + quotient = instance / dB + try: + failed = int(quotient) != quotient + except OverflowError: + # When `instance` is large and `dB` is less than one, + # quotient can overflow to infinity; and then casting to int + # raises an error. + # + # In this case we fall back to Fraction logic, which is + # exact and cannot overflow. The performance is also + # acceptable: we try the fast all-float option first, and + # we know that fraction(dB) can have at most a few hundred + # digits in each part. The worst-case slowdown is therefore + # for already-slow enormous integers or Decimals. + failed = (Fraction(instance) / Fraction(dB)).denominator != 1 + else: + failed = instance % dB + + if failed: + yield ValidationError(f"{instance!r} is not a multiple of {dB}") + + +def minItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) < mI: + yield ValidationError(f"{instance!r} is too short") + + +def maxItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) > mI: + yield ValidationError(f"{instance!r} is too long") + + +def uniqueItems(validator, uI, instance, schema): + if ( + uI + and validator.is_type(instance, "array") + and not uniq(instance) + ): + yield ValidationError(f"{instance!r} has non-unique elements") + + +def pattern(validator, patrn, instance, schema): + if ( + validator.is_type(instance, "string") + and not re.search(patrn, instance) + ): + yield ValidationError(f"{instance!r} does not match {patrn!r}") + + +def format(validator, format, instance, schema): + if validator.format_checker is not None: + try: + validator.format_checker.check(instance, format) + except FormatError as error: + yield ValidationError(error.message, cause=error.cause) + + +def minLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) < mL: + yield ValidationError(f"{instance!r} is too short") + + +def maxLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) > mL: + yield ValidationError(f"{instance!r} is too long") + + +def dependentRequired(validator, dependentRequired, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependentRequired.items(): + if property not in instance: + continue + + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + + +def dependentSchemas(validator, dependentSchemas, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependentSchemas.items(): + if property not in instance: + continue + yield from validator.descend( + instance, dependency, schema_path=property, + ) + + +def enum(validator, enums, instance, schema): + if instance == 0 or instance == 1: + unbooled = unbool(instance) + if all(unbooled != unbool(each) for each in enums): + yield ValidationError(f"{instance!r} is not one of {enums!r}") + elif instance not in enums: + yield ValidationError(f"{instance!r} is not one of {enums!r}") + + +def ref(validator, ref, instance, schema): + resolve = getattr(validator.resolver, "resolve", None) + if resolve is None: + with validator.resolver.resolving(ref) as resolved: + yield from validator.descend(instance, resolved) + else: + scope, resolved = validator.resolver.resolve(ref) + validator.resolver.push_scope(scope) + + try: + yield from validator.descend(instance, resolved) + finally: + validator.resolver.pop_scope() + + +def dynamicRef(validator, dynamicRef, instance, schema): + _, fragment = urldefrag(dynamicRef) + + for url in validator.resolver._scopes_stack: + lookup_url = urljoin(url, dynamicRef) + with validator.resolver.resolving(lookup_url) as subschema: + if ("$dynamicAnchor" in subschema + and fragment == subschema["$dynamicAnchor"]): + yield from validator.descend(instance, subschema) + break + else: + with validator.resolver.resolving(dynamicRef) as subschema: + yield from validator.descend(instance, subschema) + + +def type(validator, types, instance, schema): + types = ensure_list(types) + + if not any(validator.is_type(instance, type) for type in types): + reprs = ", ".join(repr(type) for type in types) + yield ValidationError(f"{instance!r} is not of type {reprs}") + + +def properties(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in properties.items(): + if property in instance: + yield from validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ) + + +def required(validator, required, instance, schema): + if not validator.is_type(instance, "object"): + return + for property in required: + if property not in instance: + yield ValidationError(f"{property!r} is a required property") + + +def minProperties(validator, mP, instance, schema): + if validator.is_type(instance, "object") and len(instance) < mP: + yield ValidationError(f"{instance!r} does not have enough properties") + + +def maxProperties(validator, mP, instance, schema): + if not validator.is_type(instance, "object"): + return + if validator.is_type(instance, "object") and len(instance) > mP: + yield ValidationError(f"{instance!r} has too many properties") + + +def allOf(validator, allOf, instance, schema): + for index, subschema in enumerate(allOf): + yield from validator.descend(instance, subschema, schema_path=index) + + +def anyOf(validator, anyOf, instance, schema): + all_errors = [] + for index, subschema in enumerate(anyOf): + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + break + all_errors.extend(errs) + else: + yield ValidationError( + f"{instance!r} is not valid under any of the given schemas", + context=all_errors, + ) + + +def oneOf(validator, oneOf, instance, schema): + subschemas = enumerate(oneOf) + all_errors = [] + for index, subschema in subschemas: + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + first_valid = subschema + break + all_errors.extend(errs) + else: + yield ValidationError( + f"{instance!r} is not valid under any of the given schemas", + context=all_errors, + ) + + more_valid = [ + each for _, each in subschemas + if validator.evolve(schema=each).is_valid(instance) + ] + if more_valid: + more_valid.append(first_valid) + reprs = ", ".join(repr(schema) for schema in more_valid) + yield ValidationError(f"{instance!r} is valid under each of {reprs}") + + +def not_(validator, not_schema, instance, schema): + if validator.evolve(schema=not_schema).is_valid(instance): + message = f"{instance!r} should not be valid under {not_schema!r}" + yield ValidationError(message) + + +def if_(validator, if_schema, instance, schema): + if validator.evolve(schema=if_schema).is_valid(instance): + if "then" in schema: + then = schema["then"] + yield from validator.descend(instance, then, schema_path="then") + elif "else" in schema: + else_ = schema["else"] + yield from validator.descend(instance, else_, schema_path="else") + + +def unevaluatedItems(validator, unevaluatedItems, instance, schema): + if not validator.is_type(instance, "array"): + return + evaluated_item_indexes = find_evaluated_item_indexes_by_schema( + validator, instance, schema, + ) + unevaluated_items = [ + item for index, item in enumerate(instance) + if index not in evaluated_item_indexes + ] + if unevaluated_items: + error = "Unevaluated items are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(unevaluated_items)) + + +def unevaluatedProperties(validator, unevaluatedProperties, instance, schema): + if not validator.is_type(instance, "object"): + return + evaluated_property_keys = find_evaluated_property_keys_by_schema( + validator, instance, schema, + ) + unevaluated_property_keys = [] + for property in instance: + if property not in evaluated_property_keys: + for _ in validator.descend( + instance[property], + unevaluatedProperties, + path=property, + schema_path=property, + ): + unevaluated_property_keys.append(property) + + if unevaluated_property_keys: + error = "Unevaluated properties are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(unevaluated_property_keys)) + + +def prefixItems(validator, prefixItems, instance, schema): + if not validator.is_type(instance, "array"): + return + + for (index, item), subschema in zip(enumerate(instance), prefixItems): + yield from validator.descend( + instance=item, + schema=subschema, + schema_path=index, + path=index, + ) diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/benchmarks/__init__.py b/vendor/jsonschema/jsonschema/benchmarks/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/jsonschema/benchmarks/__init__.py rename to vendor/jsonschema/jsonschema/benchmarks/__init__.py diff --git a/vendor/jsonschema/jsonschema/benchmarks/issue232.py b/vendor/jsonschema/jsonschema/benchmarks/issue232.py new file mode 100644 index 00000000..bf357e91 --- /dev/null +++ b/vendor/jsonschema/jsonschema/benchmarks/issue232.py @@ -0,0 +1,25 @@ +""" +A performance benchmark using the example from issue #232. + +See https://github.com/python-jsonschema/jsonschema/pull/232. +""" +from pathlib import Path + +from pyperf import Runner +from pyrsistent import m + +from jsonschema.tests._suite import Version +import jsonschema + +issue232 = Version( + path=Path(__file__).parent / "issue232", + remotes=m(), + name="issue232", +) + + +if __name__ == "__main__": + issue232.benchmark( + runner=Runner(), + Validator=jsonschema.Draft4Validator, + ) diff --git a/vendor/jsonschema/jsonschema/benchmarks/issue232/issue.json b/vendor/jsonschema/jsonschema/benchmarks/issue232/issue.json new file mode 100644 index 00000000..804c3408 --- /dev/null +++ b/vendor/jsonschema/jsonschema/benchmarks/issue232/issue.json @@ -0,0 +1,2653 @@ +[ + { + "description": "Petstore", + "schema": { + "title": "A JSON Schema for Swagger 2.0 API.", + "id": "http://swagger.io/v2/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "required": [ + "swagger", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "swagger": { + "type": "string", + "enum": [ + "2.0" + ], + "description": "The Swagger version of this document." + }, + "info": { + "$ref": "#/definitions/info" + }, + "host": { + "type": "string", + "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", + "description": "The host (name or ip) of the API. Example: 'swagger.io'" + }, + "basePath": { + "type": "string", + "pattern": "^/", + "description": "The base path to the API. Example: '/api'." + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "consumes": { + "description": "A list of MIME types accepted by the API.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "definitions": { + "$ref": "#/definitions/definitions" + }, + "parameters": { + "$ref": "#/definitions/parameterDefinitions" + }, + "responses": { + "$ref": "#/definitions/responseDefinitions" + }, + "security": { + "$ref": "#/definitions/security" + }, + "securityDefinitions": { + "$ref": "#/definitions/securityDefinitions" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "General information about the API.", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." + }, + "termsOfService": { + "type": "string", + "description": "The terms of service for the API." + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the owners of the API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "license": { + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "paths": { + "type": "object", + "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + }, + "^/": { + "$ref": "#/definitions/pathItem" + } + }, + "additionalProperties": false + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "description": "One or more JSON objects describing the schemas being consumed and produced by the API." + }, + "parameterDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + }, + "description": "One or more JSON representations for parameters" + }, + "responseDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/response" + }, + "description": "One or more JSON representations for parameters" + }, + "externalDocs": { + "type": "object", + "additionalProperties": false, + "description": "information about external documentation", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "examples": { + "type": "object", + "additionalProperties": true + }, + "mimeType": { + "type": "string", + "description": "The MIME type of the HTTP message." + }, + "operation": { + "type": "object", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string", + "description": "A unique identifier of the operation." + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "consumes": { + "description": "A list of MIME types the API can consume.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "parameters": { + "$ref": "#/definitions/parametersList" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "security": { + "$ref": "#/definitions/security" + } + } + }, + "pathItem": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "parameters": { + "$ref": "#/definitions/parametersList" + } + } + }, + "responses": { + "type": "object", + "description": "Response objects names can either be any valid HTTP status code or 'default'.", + "minProperties": 1, + "additionalProperties": false, + "patternProperties": { + "^([0-9]{3})$|^(default)$": { + "$ref": "#/definitions/responseValue" + }, + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "not": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + } + }, + "responseValue": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "response": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + }, + "schema": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/fileSchema" + } + ] + }, + "headers": { + "$ref": "#/definitions/headers" + }, + "examples": { + "$ref": "#/definitions/examples" + } + }, + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "headers": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/header" + } + }, + "header": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "vendorExtension": { + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "bodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "schema" + ], + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "body" + ] + }, + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "schema": { + "$ref": "#/definitions/schema" + } + }, + "additionalProperties": false + }, + "headerParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "header" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "queryParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "query" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "formDataParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "formData" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array", + "file" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "pathParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "required" + ], + "properties": { + "required": { + "type": "boolean", + "enum": [ + true + ], + "description": "Determines whether or not this parameter is required or optional." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "path" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "nonBodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "type" + ], + "oneOf": [ + { + "$ref": "#/definitions/headerParameterSubSchema" + }, + { + "$ref": "#/definitions/formDataParameterSubSchema" + }, + { + "$ref": "#/definitions/queryParameterSubSchema" + }, + { + "$ref": "#/definitions/pathParameterSubSchema" + } + ] + }, + "parameter": { + "oneOf": [ + { + "$ref": "#/definitions/bodyParameter" + }, + { + "$ref": "#/definitions/nonBodyParameter" + } + ] + }, + "schema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "additionalProperties": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "type": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/type" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "default": {} + }, + "discriminator": { + "type": "string" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "fileSchema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "type" + ], + "properties": { + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "type": { + "type": "string", + "enum": [ + "file" + ] + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "primitivesItems": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "securityRequirement": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "xml": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "tag": { + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "securityDefinitions": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/basicAuthenticationSecurity" + }, + { + "$ref": "#/definitions/apiKeySecurity" + }, + { + "$ref": "#/definitions/oauth2ImplicitSecurity" + }, + { + "$ref": "#/definitions/oauth2PasswordSecurity" + }, + { + "$ref": "#/definitions/oauth2ApplicationSecurity" + }, + { + "$ref": "#/definitions/oauth2AccessCodeSecurity" + } + ] + } + }, + "basicAuthenticationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "basic" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "apiKeySecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "apiKey" + ] + }, + "name": { + "type": "string" + }, + "in": { + "type": "string", + "enum": [ + "header", + "query" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ImplicitSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "implicit" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2PasswordSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "password" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ApplicationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "application" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2AccessCodeSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "accessCode" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2Scopes": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "mediaTypeList": { + "type": "array", + "items": { + "$ref": "#/definitions/mimeType" + }, + "uniqueItems": true + }, + "parametersList": { + "type": "array", + "description": "The parameters needed to send a valid API call.", + "additionalItems": false, + "items": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "uniqueItems": true + }, + "schemesList": { + "type": "array", + "description": "The transfer protocol of the API.", + "items": { + "type": "string", + "enum": [ + "http", + "https", + "ws", + "wss" + ] + }, + "uniqueItems": true + }, + "collectionFormat": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes" + ], + "default": "csv" + }, + "collectionFormatWithMulti": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes", + "multi" + ], + "default": "csv" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "jsonReference": { + "type": "object", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + } + } + } + } + }, + "tests": [ + { + "description": "Example petsore", + "data": { + "swagger": "2.0", + "info": { + "description": "This is a sample server Petstore server. You can find out more about Swagger at [http://swagger.io](http://swagger.io) or on [irc.freenode.net, #swagger](http://swagger.io/irc/). For this sample, you can use the api key `special-key` to test the authorization filters.", + "version": "1.0.0", + "title": "Swagger Petstore", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "email": "apiteam@swagger.io" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + } + }, + "host": "petstore.swagger.io", + "basePath": "/v2", + "tags": [ + { + "name": "pet", + "description": "Everything about your Pets", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "store", + "description": "Access to Petstore orders" + }, + { + "name": "user", + "description": "Operations about user", + "externalDocs": { + "description": "Find out more about our store", + "url": "http://swagger.io" + } + } + ], + "schemes": [ + "http" + ], + "paths": { + "/pet": { + "post": { + "tags": [ + "pet" + ], + "summary": "Add a new pet to the store", + "description": "", + "operationId": "addPet", + "consumes": [ + "application/json", + "application/xml" + ], + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "Pet object that needs to be added to the store", + "required": true, + "schema": { + "$ref": "#/definitions/Pet" + } + } + ], + "responses": { + "405": { + "description": "Invalid input" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + }, + "put": { + "tags": [ + "pet" + ], + "summary": "Update an existing pet", + "description": "", + "operationId": "updatePet", + "consumes": [ + "application/json", + "application/xml" + ], + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "Pet object that needs to be added to the store", + "required": true, + "schema": { + "$ref": "#/definitions/Pet" + } + } + ], + "responses": { + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Pet not found" + }, + "405": { + "description": "Validation exception" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/pet/findByStatus": { + "get": { + "tags": [ + "pet" + ], + "summary": "Finds Pets by status", + "description": "Multiple status values can be provided with comma separated strings", + "operationId": "findPetsByStatus", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "status", + "in": "query", + "description": "Status values that need to be considered for filter", + "required": true, + "type": "array", + "items": { + "type": "string", + "enum": [ + "available", + "pending", + "sold" + ], + "default": "available" + }, + "collectionFormat": "multi" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + } + }, + "400": { + "description": "Invalid status value" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/pet/findByTags": { + "get": { + "tags": [ + "pet" + ], + "summary": "Finds Pets by tags", + "description": "Muliple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.", + "operationId": "findPetsByTags", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "tags", + "in": "query", + "description": "Tags to filter by", + "required": true, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + } + }, + "400": { + "description": "Invalid tag value" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ], + "deprecated": true + } + }, + "/pet/{petId}": { + "get": { + "tags": [ + "pet" + ], + "summary": "Find pet by ID", + "description": "Returns a single pet", + "operationId": "getPetById", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "petId", + "in": "path", + "description": "ID of pet to return", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/Pet" + } + }, + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Pet not found" + } + }, + "security": [ + { + "api_key": [] + } + ] + }, + "post": { + "tags": [ + "pet" + ], + "summary": "Updates a pet in the store with form data", + "description": "", + "operationId": "updatePetWithForm", + "consumes": [ + "application/x-www-form-urlencoded" + ], + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "petId", + "in": "path", + "description": "ID of pet that needs to be updated", + "required": true, + "type": "integer", + "format": "int64" + }, + { + "name": "name", + "in": "formData", + "description": "Updated name of the pet", + "required": false, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "Updated status of the pet", + "required": false, + "type": "string" + } + ], + "responses": { + "405": { + "description": "Invalid input" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + }, + "delete": { + "tags": [ + "pet" + ], + "summary": "Deletes a pet", + "description": "", + "operationId": "deletePet", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "api_key", + "in": "header", + "required": false, + "type": "string" + }, + { + "name": "petId", + "in": "path", + "description": "Pet id to delete", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Pet not found" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/pet/{petId}/uploadImage": { + "post": { + "tags": [ + "pet" + ], + "summary": "uploads an image", + "description": "", + "operationId": "uploadFile", + "consumes": [ + "multipart/form-data" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "petId", + "in": "path", + "description": "ID of pet to update", + "required": true, + "type": "integer", + "format": "int64" + }, + { + "name": "additionalMetadata", + "in": "formData", + "description": "Additional data to pass to server", + "required": false, + "type": "string" + }, + { + "name": "file", + "in": "formData", + "description": "file to upload", + "required": false, + "type": "file" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/ApiResponse" + } + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/store/inventory": { + "get": { + "tags": [ + "store" + ], + "summary": "Returns pet inventories by status", + "description": "Returns a map of status codes to quantities", + "operationId": "getInventory", + "produces": [ + "application/json" + ], + "parameters": [], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "type": "object", + "additionalProperties": { + "type": "integer", + "format": "int32" + } + } + } + }, + "security": [ + { + "api_key": [] + } + ] + } + }, + "/store/order": { + "post": { + "tags": [ + "store" + ], + "summary": "Place an order for a pet", + "description": "", + "operationId": "placeOrder", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "order placed for purchasing the pet", + "required": true, + "schema": { + "$ref": "#/definitions/Order" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/Order" + } + }, + "400": { + "description": "Invalid Order" + } + } + } + }, + "/store/order/{orderId}": { + "get": { + "tags": [ + "store" + ], + "summary": "Find purchase order by ID", + "description": "For valid response try integer IDs with value >= 1 and <= 10. Other values will generated exceptions", + "operationId": "getOrderById", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "orderId", + "in": "path", + "description": "ID of pet that needs to be fetched", + "required": true, + "type": "integer", + "maximum": 10.0, + "minimum": 1.0, + "format": "int64" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/Order" + } + }, + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Order not found" + } + } + }, + "delete": { + "tags": [ + "store" + ], + "summary": "Delete purchase order by ID", + "description": "For valid response try integer IDs with positive integer value. Negative or non-integer values will generate API errors", + "operationId": "deleteOrder", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "orderId", + "in": "path", + "description": "ID of the order that needs to be deleted", + "required": true, + "type": "integer", + "minimum": 1.0, + "format": "int64" + } + ], + "responses": { + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Order not found" + } + } + } + }, + "/user": { + "post": { + "tags": [ + "user" + ], + "summary": "Create user", + "description": "This can only be done by the logged in user.", + "operationId": "createUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "Created user object", + "required": true, + "schema": { + "$ref": "#/definitions/User" + } + } + ], + "responses": { + "default": { + "description": "successful operation" + } + } + } + }, + "/user/createWithArray": { + "post": { + "tags": [ + "user" + ], + "summary": "Creates list of users with given input array", + "description": "", + "operationId": "createUsersWithArrayInput", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "List of user object", + "required": true, + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/User" + } + } + } + ], + "responses": { + "default": { + "description": "successful operation" + } + } + } + }, + "/user/createWithList": { + "post": { + "tags": [ + "user" + ], + "summary": "Creates list of users with given input array", + "description": "", + "operationId": "createUsersWithListInput", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "List of user object", + "required": true, + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/User" + } + } + } + ], + "responses": { + "default": { + "description": "successful operation" + } + } + } + }, + "/user/login": { + "get": { + "tags": [ + "user" + ], + "summary": "Logs user into the system", + "description": "", + "operationId": "loginUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "username", + "in": "query", + "description": "The user name for login", + "required": true, + "type": "string" + }, + { + "name": "password", + "in": "query", + "description": "The password for login in clear text", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "type": "string" + }, + "headers": { + "X-Rate-Limit": { + "type": "integer", + "format": "int32", + "description": "calls per hour allowed by the user" + }, + "X-Expires-After": { + "type": "string", + "format": "date-time", + "description": "date in UTC when token expires" + } + } + }, + "400": { + "description": "Invalid username/password supplied" + } + } + } + }, + "/user/logout": { + "get": { + "tags": [ + "user" + ], + "summary": "Logs out current logged in user session", + "description": "", + "operationId": "logoutUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [], + "responses": { + "default": { + "description": "successful operation" + } + } + } + }, + "/user/{username}": { + "get": { + "tags": [ + "user" + ], + "summary": "Get user by user name", + "description": "", + "operationId": "getUserByName", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "username", + "in": "path", + "description": "The name that needs to be fetched. Use user1 for testing. ", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/User" + } + }, + "400": { + "description": "Invalid username supplied" + }, + "404": { + "description": "User not found" + } + } + }, + "put": { + "tags": [ + "user" + ], + "summary": "Updated user", + "description": "This can only be done by the logged in user.", + "operationId": "updateUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "username", + "in": "path", + "description": "name that need to be updated", + "required": true, + "type": "string" + }, + { + "in": "body", + "name": "body", + "description": "Updated user object", + "required": true, + "schema": { + "$ref": "#/definitions/User" + } + } + ], + "responses": { + "400": { + "description": "Invalid user supplied" + }, + "404": { + "description": "User not found" + } + } + }, + "delete": { + "tags": [ + "user" + ], + "summary": "Delete user", + "description": "This can only be done by the logged in user.", + "operationId": "deleteUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "username", + "in": "path", + "description": "The name that needs to be deleted", + "required": true, + "type": "string" + } + ], + "responses": { + "400": { + "description": "Invalid username supplied" + }, + "404": { + "description": "User not found" + } + } + } + } + }, + "securityDefinitions": { + "petstore_auth": { + "type": "oauth2", + "authorizationUrl": "http://petstore.swagger.io/oauth/dialog", + "flow": "implicit", + "scopes": { + "write:pets": "modify pets in your account", + "read:pets": "read your pets" + } + }, + "api_key": { + "type": "apiKey", + "name": "api_key", + "in": "header" + } + }, + "definitions": { + "Order": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "petId": { + "type": "integer", + "format": "int64" + }, + "quantity": { + "type": "integer", + "format": "int32" + }, + "shipDate": { + "type": "string", + "format": "date-time" + }, + "status": { + "type": "string", + "description": "Order Status", + "enum": [ + "placed", + "approved", + "delivered" + ] + }, + "complete": { + "type": "boolean", + "default": false + } + }, + "xml": { + "name": "Order" + } + }, + "Category": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + }, + "xml": { + "name": "Category" + } + }, + "User": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "username": { + "type": "string" + }, + "firstName": { + "type": "string" + }, + "lastName": { + "type": "string" + }, + "email": { + "type": "string" + }, + "password": { + "type": "string" + }, + "phone": { + "type": "string" + }, + "userStatus": { + "type": "integer", + "format": "int32", + "description": "User Status" + } + }, + "xml": { + "name": "User" + } + }, + "Tag": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + }, + "xml": { + "name": "Tag" + } + }, + "Pet": { + "type": "object", + "required": [ + "name", + "photoUrls" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "category": { + "$ref": "#/definitions/Category" + }, + "name": { + "type": "string", + "example": "doggie" + }, + "photoUrls": { + "type": "array", + "xml": { + "name": "photoUrl", + "wrapped": true + }, + "items": { + "type": "string" + } + }, + "tags": { + "type": "array", + "xml": { + "name": "tag", + "wrapped": true + }, + "items": { + "$ref": "#/definitions/Tag" + } + }, + "status": { + "type": "string", + "description": "pet status in the store", + "enum": [ + "available", + "pending", + "sold" + ] + } + }, + "xml": { + "name": "Pet" + } + }, + "ApiResponse": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "type": { + "type": "string" + }, + "message": { + "type": "string" + } + } + } + }, + "externalDocs": { + "description": "Find out more about Swagger", + "url": "http://swagger.io" + } + }, + "valid": true + } + ] + } +] diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/benchmarks/json_schema_test_suite.py b/vendor/jsonschema/jsonschema/benchmarks/json_schema_test_suite.py similarity index 93% rename from vendor/poetry-core/poetry/core/_vendor/jsonschema/benchmarks/json_schema_test_suite.py rename to vendor/jsonschema/jsonschema/benchmarks/json_schema_test_suite.py index 5add5051..905fb6a3 100644 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/benchmarks/json_schema_test_suite.py +++ b/vendor/jsonschema/jsonschema/benchmarks/json_schema_test_suite.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ A performance benchmark using the official test suite. @@ -9,6 +8,5 @@ from jsonschema.tests._suite import Suite - if __name__ == "__main__": Suite().benchmark(runner=Runner()) diff --git a/vendor/jsonschema/jsonschema/cli.py b/vendor/jsonschema/jsonschema/cli.py new file mode 100644 index 00000000..f19b680c --- /dev/null +++ b/vendor/jsonschema/jsonschema/cli.py @@ -0,0 +1,288 @@ +""" +The ``jsonschema`` command line. +""" + +from json import JSONDecodeError +from textwrap import dedent +import argparse +import json +import sys +import traceback + +try: + from importlib import metadata +except ImportError: + import importlib_metadata as metadata # type: ignore + +try: + from pkgutil import resolve_name +except ImportError: + from pkgutil_resolve_name import resolve_name # type: ignore + +import attr + +from jsonschema.exceptions import SchemaError +from jsonschema.validators import RefResolver, validator_for + + +class _CannotLoadFile(Exception): + pass + + +@attr.s +class _Outputter(object): + + _formatter = attr.ib() + _stdout = attr.ib() + _stderr = attr.ib() + + @classmethod + def from_arguments(cls, arguments, stdout, stderr): + if arguments["output"] == "plain": + formatter = _PlainFormatter(arguments["error_format"]) + elif arguments["output"] == "pretty": + formatter = _PrettyFormatter() + return cls(formatter=formatter, stdout=stdout, stderr=stderr) + + def load(self, path): + try: + file = open(path) + except FileNotFoundError: + self.filenotfound_error(path=path, exc_info=sys.exc_info()) + raise _CannotLoadFile() + + with file: + try: + return json.load(file) + except JSONDecodeError: + self.parsing_error(path=path, exc_info=sys.exc_info()) + raise _CannotLoadFile() + + def filenotfound_error(self, **kwargs): + self._stderr.write(self._formatter.filenotfound_error(**kwargs)) + + def parsing_error(self, **kwargs): + self._stderr.write(self._formatter.parsing_error(**kwargs)) + + def validation_error(self, **kwargs): + self._stderr.write(self._formatter.validation_error(**kwargs)) + + def validation_success(self, **kwargs): + self._stdout.write(self._formatter.validation_success(**kwargs)) + + +@attr.s +class _PrettyFormatter(object): + + _ERROR_MSG = dedent( + """\ + ===[{type}]===({path})=== + + {body} + ----------------------------- + """, + ) + _SUCCESS_MSG = "===[SUCCESS]===({path})===\n" + + def filenotfound_error(self, path, exc_info): + return self._ERROR_MSG.format( + path=path, + type="FileNotFoundError", + body="{!r} does not exist.".format(path), + ) + + def parsing_error(self, path, exc_info): + exc_type, exc_value, exc_traceback = exc_info + exc_lines = "".join( + traceback.format_exception(exc_type, exc_value, exc_traceback), + ) + return self._ERROR_MSG.format( + path=path, + type=exc_type.__name__, + body=exc_lines, + ) + + def validation_error(self, instance_path, error): + return self._ERROR_MSG.format( + path=instance_path, + type=error.__class__.__name__, + body=error, + ) + + def validation_success(self, instance_path): + return self._SUCCESS_MSG.format(path=instance_path) + + +@attr.s +class _PlainFormatter(object): + + _error_format = attr.ib() + + def filenotfound_error(self, path, exc_info): + return "{!r} does not exist.\n".format(path) + + def parsing_error(self, path, exc_info): + return "Failed to parse {}: {}\n".format( + "" if path == "" else repr(path), + exc_info[1], + ) + + def validation_error(self, instance_path, error): + return self._error_format.format(file_name=instance_path, error=error) + + def validation_success(self, instance_path): + return "" + + +def _resolve_name_with_default(name): + if "." not in name: + name = "jsonschema." + name + return resolve_name(name) + + +parser = argparse.ArgumentParser( + description="JSON Schema Validation CLI", +) +parser.add_argument( + "-i", "--instance", + action="append", + dest="instances", + help=""" + a path to a JSON instance (i.e. filename.json) to validate (may + be specified multiple times). If no instances are provided via this + option, one will be expected on standard input. + """, +) +parser.add_argument( + "-F", "--error-format", + help=""" + the format to use for each validation error message, specified + in a form suitable for str.format. This string will be passed + one formatted object named 'error' for each ValidationError. + Only provide this option when using --output=plain, which is the + default. If this argument is unprovided and --output=plain is + used, a simple default representation will be used. + """, +) +parser.add_argument( + "-o", "--output", + choices=["plain", "pretty"], + default="plain", + help=""" + an output format to use. 'plain' (default) will produce minimal + text with one line for each error, while 'pretty' will produce + more detailed human-readable output on multiple lines. + """, +) +parser.add_argument( + "-V", "--validator", + type=_resolve_name_with_default, + help=""" + the fully qualified object name of a validator to use, or, for + validators that are registered with jsonschema, simply the name + of the class. + """, +) +parser.add_argument( + "--base-uri", + help=""" + a base URI to assign to the provided schema, even if it does not + declare one (via e.g. $id). This option can be used if you wish to + resolve relative references to a particular URI (or local path) + """, +) +parser.add_argument( + "--version", + action="version", + version=metadata.version("jsonschema"), +) +parser.add_argument( + "schema", + help="the path to a JSON Schema to validate with (i.e. schema.json)", +) + + +def parse_args(args): + arguments = vars(parser.parse_args(args=args or ["--help"])) + if arguments["output"] != "plain" and arguments["error_format"]: + raise parser.error( + "--error-format can only be used with --output plain", + ) + if arguments["output"] == "plain" and arguments["error_format"] is None: + arguments["error_format"] = "{error.instance}: {error.message}\n" + return arguments + + +def _validate_instance(instance_path, instance, validator, outputter): + invalid = False + for error in validator.iter_errors(instance): + invalid = True + outputter.validation_error(instance_path=instance_path, error=error) + + if not invalid: + outputter.validation_success(instance_path=instance_path) + return invalid + + +def main(args=sys.argv[1:]): + sys.exit(run(arguments=parse_args(args=args))) + + +def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin): + outputter = _Outputter.from_arguments( + arguments=arguments, + stdout=stdout, + stderr=stderr, + ) + + try: + schema = outputter.load(arguments["schema"]) + except _CannotLoadFile: + return 1 + + if arguments["validator"] is None: + arguments["validator"] = validator_for(schema) + + try: + arguments["validator"].check_schema(schema) + except SchemaError as error: + outputter.validation_error( + instance_path=arguments["schema"], + error=error, + ) + return 1 + + if arguments["instances"]: + load, instances = outputter.load, arguments["instances"] + else: + def load(_): + try: + return json.load(stdin) + except JSONDecodeError: + outputter.parsing_error( + path="", exc_info=sys.exc_info(), + ) + raise _CannotLoadFile() + instances = [""] + + resolver = RefResolver( + base_uri=arguments["base_uri"], + referrer=schema, + ) if arguments["base_uri"] is not None else None + + validator = arguments["validator"](schema, resolver=resolver) + exit_code = 0 + for each in instances: + try: + instance = load(each) + except _CannotLoadFile: + exit_code = 1 + else: + exit_code |= _validate_instance( + instance_path=each, + instance=instance, + validator=validator, + outputter=outputter, + ) + + return exit_code diff --git a/vendor/jsonschema/jsonschema/exceptions.py b/vendor/jsonschema/jsonschema/exceptions.py new file mode 100644 index 00000000..d1351c49 --- /dev/null +++ b/vendor/jsonschema/jsonschema/exceptions.py @@ -0,0 +1,394 @@ +""" +Validation errors, and some surrounding helpers. +""" +from __future__ import annotations + +from collections import defaultdict, deque +from pprint import pformat +from textwrap import dedent, indent +import heapq +import itertools + +import attr + +from jsonschema import _utils + +WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"]) +STRONG_MATCHES: frozenset[str] = frozenset() + +_unset = _utils.Unset() + + +class _Error(Exception): + def __init__( + self, + message, + validator=_unset, + path=(), + cause=None, + context=(), + validator_value=_unset, + instance=_unset, + schema=_unset, + schema_path=(), + parent=None, + type_checker=_unset, + ): + super(_Error, self).__init__( + message, + validator, + path, + cause, + context, + validator_value, + instance, + schema, + schema_path, + parent, + ) + self.message = message + self.path = self.relative_path = deque(path) + self.schema_path = self.relative_schema_path = deque(schema_path) + self.context = list(context) + self.cause = self.__cause__ = cause + self.validator = validator + self.validator_value = validator_value + self.instance = instance + self.schema = schema + self.parent = parent + self._type_checker = type_checker + + for error in context: + error.parent = self + + def __repr__(self): + return f"<{self.__class__.__name__}: {self.message!r}>" + + def __str__(self): + essential_for_verbose = ( + self.validator, self.validator_value, self.instance, self.schema, + ) + if any(m is _unset for m in essential_for_verbose): + return self.message + + schema_path = _utils.format_as_index( + container=self._word_for_schema_in_error_message, + indices=list(self.relative_schema_path)[:-1], + ) + instance_path = _utils.format_as_index( + container=self._word_for_instance_in_error_message, + indices=self.relative_path, + ) + prefix = 16 * " " + + return dedent( + f"""\ + {self.message} + + Failed validating {self.validator!r} in {schema_path}: + {indent(pformat(self.schema, width=72), prefix).lstrip()} + + On {instance_path}: + {indent(pformat(self.instance, width=72), prefix).lstrip()} + """.rstrip(), + ) + + @classmethod + def create_from(cls, other): + return cls(**other._contents()) + + @property + def absolute_path(self): + parent = self.parent + if parent is None: + return self.relative_path + + path = deque(self.relative_path) + path.extendleft(reversed(parent.absolute_path)) + return path + + @property + def absolute_schema_path(self): + parent = self.parent + if parent is None: + return self.relative_schema_path + + path = deque(self.relative_schema_path) + path.extendleft(reversed(parent.absolute_schema_path)) + return path + + @property + def json_path(self): + path = "$" + for elem in self.absolute_path: + if isinstance(elem, int): + path += "[" + str(elem) + "]" + else: + path += "." + elem + return path + + def _set(self, type_checker=None, **kwargs): + if type_checker is not None and self._type_checker is _unset: + self._type_checker = type_checker + + for k, v in kwargs.items(): + if getattr(self, k) is _unset: + setattr(self, k, v) + + def _contents(self): + attrs = ( + "message", "cause", "context", "validator", "validator_value", + "path", "schema_path", "instance", "schema", "parent", + ) + return dict((attr, getattr(self, attr)) for attr in attrs) + + def _matches_type(self): + try: + expected = self.schema["type"] + except (KeyError, TypeError): + return False + + if isinstance(expected, str): + return self._type_checker.is_type(self.instance, expected) + + return any( + self._type_checker.is_type(self.instance, expected_type) + for expected_type in expected + ) + + +class ValidationError(_Error): + """ + An instance was invalid under a provided schema. + """ + + _word_for_schema_in_error_message = "schema" + _word_for_instance_in_error_message = "instance" + + +class SchemaError(_Error): + """ + A schema was invalid under its corresponding metaschema. + """ + + _word_for_schema_in_error_message = "metaschema" + _word_for_instance_in_error_message = "schema" + + +@attr.s(hash=True) +class RefResolutionError(Exception): + """ + A ref could not be resolved. + """ + + _cause = attr.ib() + + def __str__(self): + return str(self._cause) + + +class UndefinedTypeCheck(Exception): + """ + A type checker was asked to check a type it did not have registered. + """ + + def __init__(self, type): + self.type = type + + def __str__(self): + return f"Type {self.type!r} is unknown to this type checker" + + +class UnknownType(Exception): + """ + A validator was asked to validate an instance against an unknown type. + """ + + def __init__(self, type, instance, schema): + self.type = type + self.instance = instance + self.schema = schema + + def __str__(self): + prefix = 16 * " " + + return dedent( + f"""\ + Unknown type {self.type!r} for validator with schema: + {indent(pformat(self.schema, width=72), prefix).lstrip()} + + While checking instance: + {indent(pformat(self.instance, width=72), prefix).lstrip()} + """.rstrip(), + ) + + +class FormatError(Exception): + """ + Validating a format failed. + """ + + def __init__(self, message, cause=None): + super(FormatError, self).__init__(message, cause) + self.message = message + self.cause = self.__cause__ = cause + + def __str__(self): + return self.message + + +class ErrorTree(object): + """ + ErrorTrees make it easier to check which validations failed. + """ + + _instance = _unset + + def __init__(self, errors=()): + self.errors = {} + self._contents = defaultdict(self.__class__) + + for error in errors: + container = self + for element in error.path: + container = container[element] + container.errors[error.validator] = error + + container._instance = error.instance + + def __contains__(self, index): + """ + Check whether ``instance[index]`` has any errors. + """ + + return index in self._contents + + def __getitem__(self, index): + """ + Retrieve the child tree one level down at the given ``index``. + + If the index is not in the instance that this tree corresponds + to and is not known by this tree, whatever error would be raised + by ``instance.__getitem__`` will be propagated (usually this is + some subclass of `LookupError`. + """ + + if self._instance is not _unset and index not in self: + self._instance[index] + return self._contents[index] + + def __setitem__(self, index, value): + """ + Add an error to the tree at the given ``index``. + """ + self._contents[index] = value + + def __iter__(self): + """ + Iterate (non-recursively) over the indices in the instance with errors. + """ + + return iter(self._contents) + + def __len__(self): + """ + Return the `total_errors`. + """ + return self.total_errors + + def __repr__(self): + return f"<{self.__class__.__name__} ({len(self)} total errors)>" + + @property + def total_errors(self): + """ + The total number of errors in the entire tree, including children. + """ + + child_errors = sum(len(tree) for _, tree in self._contents.items()) + return len(self.errors) + child_errors + + +def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES): + """ + Create a key function that can be used to sort errors by relevance. + + Arguments: + weak (set): + a collection of validation keywords to consider to be + "weak". If there are two errors at the same level of the + instance and one is in the set of weak validation keywords, + the other error will take priority. By default, :kw:`anyOf` + and :kw:`oneOf` are considered weak keywords and will be + superseded by other same-level validation errors. + + strong (set): + a collection of validation keywords to consider to be + "strong" + """ + def relevance(error): + validator = error.validator + return ( + -len(error.path), + validator not in weak, + validator in strong, + not error._matches_type(), + ) + return relevance + + +relevance = by_relevance() + + +def best_match(errors, key=relevance): + """ + Try to find an error that appears to be the best match among given errors. + + In general, errors that are higher up in the instance (i.e. for which + `ValidationError.path` is shorter) are considered better matches, + since they indicate "more" is wrong with the instance. + + If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the + *opposite* assumption is made -- i.e. the deepest error is picked, + since these keywords only need to match once, and any other errors + may not be relevant. + + Arguments: + errors (collections.abc.Iterable): + + the errors to select from. Do not provide a mixture of + errors from different validation attempts (i.e. from + different instances or schemas), since it won't produce + sensical output. + + key (collections.abc.Callable): + + the key to use when sorting errors. See `relevance` and + transitively `by_relevance` for more details (the default is + to sort with the defaults of that function). Changing the + default is only useful if you want to change the function + that rates errors but still want the error context descent + done by this function. + + Returns: + the best matching error, or ``None`` if the iterable was empty + + .. note:: + + This function is a heuristic. Its return value may change for a given + set of inputs from version to version if better heuristics are added. + """ + errors = iter(errors) + best = next(errors, None) + if best is None: + return + best = max(itertools.chain([best], errors), key=key) + + while best.context: + # Calculate the minimum via nsmallest, because we don't recurse if + # all nested errors have the same relevance (i.e. if min == max == all) + smallest = heapq.nsmallest(2, best.context, key=key) + if len(smallest) == 2 and key(smallest[0]) == key(smallest[1]): + return best + best = smallest[0] + return best diff --git a/vendor/jsonschema/jsonschema/protocols.py b/vendor/jsonschema/jsonschema/protocols.py new file mode 100644 index 00000000..0e96eff5 --- /dev/null +++ b/vendor/jsonschema/jsonschema/protocols.py @@ -0,0 +1,181 @@ +""" +typing.Protocol classes for jsonschema interfaces. +""" + +# for reference material on Protocols, see +# https://www.python.org/dev/peps/pep-0544/ + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, ClassVar, Iterator +import sys + +# doing these imports with `try ... except ImportError` doesn't pass mypy +# checking because mypy sees `typing._SpecialForm` and +# `typing_extensions._SpecialForm` as incompatible +# +# see: +# https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module +# https://github.com/python/mypy/issues/4427 +if sys.version_info >= (3, 8): + from typing import Protocol, runtime_checkable +else: + from typing_extensions import Protocol, runtime_checkable + +# in order for Sphinx to resolve references accurately from type annotations, +# it needs to see names like `jsonschema.TypeChecker` +# therefore, only import at type-checking time (to avoid circular references), +# but use `jsonschema` for any types which will otherwise not be resolvable +if TYPE_CHECKING: + import jsonschema + +from jsonschema.exceptions import ValidationError +from jsonschema.validators import RefResolver + +# For code authors working on the validator protocol, these are the three +# use-cases which should be kept in mind: +# +# 1. As a protocol class, it can be used in type annotations to describe the +# available methods and attributes of a validator +# 2. It is the source of autodoc for the validator documentation +# 3. It is runtime_checkable, meaning that it can be used in isinstance() +# checks. +# +# Since protocols are not base classes, isinstance() checking is limited in +# its capabilities. See docs on runtime_checkable for detail + + +@runtime_checkable +class Validator(Protocol): + """ + The protocol to which all validator classes should adhere. + + :argument schema: the schema that the validator object + will validate with. It is assumed to be valid, and providing + an invalid schema can lead to undefined behavior. See + `Validator.check_schema` to validate a schema first. + :argument resolver: an instance of `jsonschema.RefResolver` that will be + used to resolve :kw:`$ref` properties (JSON references). If + unprovided, one will be created. + :argument format_checker: an instance of `jsonschema.FormatChecker` + whose `jsonschema.FormatChecker.conforms` method will be called to + check and see if instances conform to each :kw:`format` + property present in the schema. If unprovided, no validation + will be done for :kw:`format`. Certain formats require + additional packages to be installed (ipv5, uri, color, date-time). + The required packages can be found at the bottom of this page. + """ + + #: An object representing the validator's meta schema (the schema that + #: describes valid schemas in the given version). + META_SCHEMA: ClassVar[dict] + + #: A mapping of validation keywords (`str`\s) to functions that + #: validate the keyword with that name. For more information see + #: `creating-validators`. + VALIDATORS: ClassVar[dict] + + #: A `jsonschema.TypeChecker` that will be used when validating + #: :kw:`type` keywords in JSON schemas. + TYPE_CHECKER: ClassVar[jsonschema.TypeChecker] + + #: A `jsonschema.FormatChecker` that will be used when validating + #: :kw:`format` properties in JSON schemas. + FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker] + + #: The schema that was passed in when initializing the object. + schema: dict | bool + + def __init__( + self, + schema: dict | bool, + resolver: RefResolver | None = None, + format_checker: jsonschema.FormatChecker | None = None, + ) -> None: + ... + + @classmethod + def check_schema(cls, schema: dict) -> None: + """ + Validate the given schema against the validator's `META_SCHEMA`. + + :raises: `jsonschema.exceptions.SchemaError` if the schema + is invalid + """ + + def is_type(self, instance: Any, type: str) -> bool: + """ + Check if the instance is of the given (JSON Schema) type. + + :type type: str + :rtype: bool + :raises: `jsonschema.exceptions.UnknownType` if ``type`` + is not a known type. + """ + + def is_valid(self, instance: dict) -> bool: + """ + Check if the instance is valid under the current `schema`. + + :rtype: bool + + >>> schema = {"maxItems" : 2} + >>> Draft202012Validator(schema).is_valid([2, 3, 4]) + False + """ + + def iter_errors(self, instance: dict) -> Iterator[ValidationError]: + r""" + Lazily yield each of the validation errors in the given instance. + + :rtype: an `collections.abc.Iterable` of + `jsonschema.exceptions.ValidationError`\s + + >>> schema = { + ... "type" : "array", + ... "items" : {"enum" : [1, 2, 3]}, + ... "maxItems" : 2, + ... } + >>> v = Draft202012Validator(schema) + >>> for error in sorted(v.iter_errors([2, 3, 4]), key=str): + ... print(error.message) + 4 is not one of [1, 2, 3] + [2, 3, 4] is too long + """ + + def validate(self, instance: dict) -> None: + """ + Check if the instance is valid under the current `schema`. + + :raises: `jsonschema.exceptions.ValidationError` if the + instance is invalid + + >>> schema = {"maxItems" : 2} + >>> Draft202012Validator(schema).validate([2, 3, 4]) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + """ + + def evolve(self, **kwargs) -> "Validator": + """ + Create a new validator like this one, but with given changes. + + Preserves all other attributes, so can be used to e.g. create a + validator with a different schema but with the same :kw:`$ref` + resolution behavior. + + >>> validator = Draft202012Validator({}) + >>> validator.evolve(schema={"type": "number"}) + Draft202012Validator(schema={'type': 'number'}, format_checker=None) + + The returned object satisfies the validator protocol, but may not + be of the same concrete class! In particular this occurs + when a :kw:`$ref` occurs to a schema with a different + :kw:`$schema` than this one (i.e. for a different draft). + + >>> validator.evolve( + ... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]} + ... ) + Draft7Validator(schema=..., format_checker=None) + """ diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/__init__.py b/vendor/jsonschema/jsonschema/schemas/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/lark/parsers/__init__.py rename to vendor/jsonschema/jsonschema/schemas/__init__.py diff --git a/vendor/jsonschema/jsonschema/schemas/draft2019-09.json b/vendor/jsonschema/jsonschema/schemas/draft2019-09.json new file mode 100644 index 00000000..2248a0c8 --- /dev/null +++ b/vendor/jsonschema/jsonschema/schemas/draft2019-09.json @@ -0,0 +1,42 @@ +{ + "$schema": "https://json-schema.org/draft/2019-09/schema", + "$id": "https://json-schema.org/draft/2019-09/schema", + "$vocabulary": { + "https://json-schema.org/draft/2019-09/vocab/core": true, + "https://json-schema.org/draft/2019-09/vocab/applicator": true, + "https://json-schema.org/draft/2019-09/vocab/validation": true, + "https://json-schema.org/draft/2019-09/vocab/meta-data": true, + "https://json-schema.org/draft/2019-09/vocab/format": false, + "https://json-schema.org/draft/2019-09/vocab/content": true + }, + "$recursiveAnchor": true, + + "title": "Core and Validation specifications meta-schema", + "allOf": [ + {"$ref": "meta/core"}, + {"$ref": "meta/applicator"}, + {"$ref": "meta/validation"}, + {"$ref": "meta/meta-data"}, + {"$ref": "meta/format"}, + {"$ref": "meta/content"} + ], + "type": ["object", "boolean"], + "properties": { + "definitions": { + "$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.", + "type": "object", + "additionalProperties": { "$recursiveRef": "#" }, + "default": {} + }, + "dependencies": { + "$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"", + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$recursiveRef": "#" }, + { "$ref": "meta/validation#/$defs/stringArray" } + ] + } + } + } +} diff --git a/vendor/jsonschema/jsonschema/schemas/draft2020-12.json b/vendor/jsonschema/jsonschema/schemas/draft2020-12.json new file mode 100644 index 00000000..d5e2d31c --- /dev/null +++ b/vendor/jsonschema/jsonschema/schemas/draft2020-12.json @@ -0,0 +1,58 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://json-schema.org/draft/2020-12/schema", + "$vocabulary": { + "https://json-schema.org/draft/2020-12/vocab/core": true, + "https://json-schema.org/draft/2020-12/vocab/applicator": true, + "https://json-schema.org/draft/2020-12/vocab/unevaluated": true, + "https://json-schema.org/draft/2020-12/vocab/validation": true, + "https://json-schema.org/draft/2020-12/vocab/meta-data": true, + "https://json-schema.org/draft/2020-12/vocab/format-annotation": true, + "https://json-schema.org/draft/2020-12/vocab/content": true + }, + "$dynamicAnchor": "meta", + + "title": "Core and Validation specifications meta-schema", + "allOf": [ + {"$ref": "meta/core"}, + {"$ref": "meta/applicator"}, + {"$ref": "meta/unevaluated"}, + {"$ref": "meta/validation"}, + {"$ref": "meta/meta-data"}, + {"$ref": "meta/format-annotation"}, + {"$ref": "meta/content"} + ], + "type": ["object", "boolean"], + "$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.", + "properties": { + "definitions": { + "$comment": "\"definitions\" has been replaced by \"$defs\".", + "type": "object", + "additionalProperties": { "$dynamicRef": "#meta" }, + "deprecated": true, + "default": {} + }, + "dependencies": { + "$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.", + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$dynamicRef": "#meta" }, + { "$ref": "meta/validation#/$defs/stringArray" } + ] + }, + "deprecated": true, + "default": {} + }, + "$recursiveAnchor": { + "$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".", + "$ref": "meta/core#/$defs/anchorString", + "deprecated": true + }, + "$recursiveRef": { + "$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".", + "$ref": "meta/core#/$defs/uriReferenceString", + "deprecated": true + } + } +} diff --git a/vendor/jsonschema/jsonschema/schemas/draft3.json b/vendor/jsonschema/jsonschema/schemas/draft3.json new file mode 100644 index 00000000..23d59b64 --- /dev/null +++ b/vendor/jsonschema/jsonschema/schemas/draft3.json @@ -0,0 +1,177 @@ +{ + "$schema" : "http://json-schema.org/draft-03/schema#", + "id" : "http://json-schema.org/draft-03/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#", "type" : "object"}, + "default" : {} + }, + + "patternProperties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalItems" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "required" : { + "type" : "boolean", + "default" : false + }, + + "dependencies" : { + "type" : ["string", "array", "object"], + "additionalProperties" : { + "type" : ["string", "array", {"$ref" : "#"}], + "items" : { + "type" : "string" + } + }, + "default" : {} + }, + + "minimum" : { + "type" : "number" + }, + + "maximum" : { + "type" : "number" + }, + + "exclusiveMinimum" : { + "type" : "boolean", + "default" : false + }, + + "exclusiveMaximum" : { + "type" : "boolean", + "default" : false + }, + + "maxDecimal": { + "minimum": 0, + "type": "number" + }, + + "minItems" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "minimum" : 0 + }, + + "uniqueItems" : { + "type" : "boolean", + "default" : false + }, + + "pattern" : { + "type" : "string", + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer" + }, + + "enum" : { + "type" : "array" + }, + + "default" : { + "type" : "any" + }, + + "title" : { + "type" : "string" + }, + + "description" : { + "type" : "string" + }, + + "format" : { + "type" : "string" + }, + + "divisibleBy" : { + "type" : "number", + "minimum" : 0, + "exclusiveMinimum" : true, + "default" : 1 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "id" : { + "type" : "string", + "format" : "uri" + }, + + "$ref" : { + "type" : "string", + "format" : "uri" + }, + + "$schema" : { + "type" : "string", + "format" : "uri" + } + }, + + "dependencies" : { + "exclusiveMinimum" : "minimum", + "exclusiveMaximum" : "maximum" + }, + + "default" : {} +} diff --git a/vendor/jsonschema/jsonschema/schemas/draft4.json b/vendor/jsonschema/jsonschema/schemas/draft4.json new file mode 100644 index 00000000..ba0c1170 --- /dev/null +++ b/vendor/jsonschema/jsonschema/schemas/draft4.json @@ -0,0 +1,149 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "format": "uri", + "type": "string" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array" + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { "type": "string" }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {} +} diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft6.json b/vendor/jsonschema/jsonschema/schemas/draft6.json similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft6.json rename to vendor/jsonschema/jsonschema/schemas/draft6.json diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft7.json b/vendor/jsonschema/jsonschema/schemas/draft7.json similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft7.json rename to vendor/jsonschema/jsonschema/schemas/draft7.json diff --git a/vendor/jsonschema/jsonschema/schemas/vocabularies.json b/vendor/jsonschema/jsonschema/schemas/vocabularies.json new file mode 100644 index 00000000..bca17052 --- /dev/null +++ b/vendor/jsonschema/jsonschema/schemas/vocabularies.json @@ -0,0 +1 @@ +{"https://json-schema.org/draft/2020-12/meta/content": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/content", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/content": true}, "$dynamicAnchor": "meta", "title": "Content vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"contentEncoding": {"type": "string"}, "contentMediaType": {"type": "string"}, "contentSchema": {"$dynamicRef": "#meta"}}}, "https://json-schema.org/draft/2020-12/meta/unevaluated": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/unevaluated", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/unevaluated": true}, "$dynamicAnchor": "meta", "title": "Unevaluated applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"unevaluatedItems": {"$dynamicRef": "#meta"}, "unevaluatedProperties": {"$dynamicRef": "#meta"}}}, "https://json-schema.org/draft/2020-12/meta/format-annotation": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/format-annotation", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/format-annotation": true}, "$dynamicAnchor": "meta", "title": "Format vocabulary meta-schema for annotation results", "type": ["object", "boolean"], "properties": {"format": {"type": "string"}}}, "https://json-schema.org/draft/2020-12/meta/applicator": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/applicator", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/applicator": true}, "$dynamicAnchor": "meta", "title": "Applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"prefixItems": {"$ref": "#/$defs/schemaArray"}, "items": {"$dynamicRef": "#meta"}, "contains": {"$dynamicRef": "#meta"}, "additionalProperties": {"$dynamicRef": "#meta"}, "properties": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "default": {}}, "patternProperties": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "propertyNames": {"format": "regex"}, "default": {}}, "dependentSchemas": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}, "default": {}}, "propertyNames": {"$dynamicRef": "#meta"}, "if": {"$dynamicRef": "#meta"}, "then": {"$dynamicRef": "#meta"}, "else": {"$dynamicRef": "#meta"}, "allOf": {"$ref": "#/$defs/schemaArray"}, "anyOf": {"$ref": "#/$defs/schemaArray"}, "oneOf": {"$ref": "#/$defs/schemaArray"}, "not": {"$dynamicRef": "#meta"}}, "$defs": {"schemaArray": {"type": "array", "minItems": 1, "items": {"$dynamicRef": "#meta"}}}}, "https://json-schema.org/draft/2020-12/meta/meta-data": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/meta-data", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/meta-data": true}, "$dynamicAnchor": "meta", "title": "Meta-data vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"title": {"type": "string"}, "description": {"type": "string"}, "default": true, "deprecated": {"type": "boolean", "default": false}, "readOnly": {"type": "boolean", "default": false}, "writeOnly": {"type": "boolean", "default": false}, "examples": {"type": "array", "items": true}}}, "https://json-schema.org/draft/2020-12/meta/core": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/core", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/core": true}, "$dynamicAnchor": "meta", "title": "Core vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"$id": {"$ref": "#/$defs/uriReferenceString", "$comment": "Non-empty fragments not allowed.", "pattern": "^[^#]*#?$"}, "$schema": {"$ref": "#/$defs/uriString"}, "$ref": {"$ref": "#/$defs/uriReferenceString"}, "$anchor": {"$ref": "#/$defs/anchorString"}, "$dynamicRef": {"$ref": "#/$defs/uriReferenceString"}, "$dynamicAnchor": {"$ref": "#/$defs/anchorString"}, "$vocabulary": {"type": "object", "propertyNames": {"$ref": "#/$defs/uriString"}, "additionalProperties": {"type": "boolean"}}, "$comment": {"type": "string"}, "$defs": {"type": "object", "additionalProperties": {"$dynamicRef": "#meta"}}}, "$defs": {"anchorString": {"type": "string", "pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"}, "uriString": {"type": "string", "format": "uri"}, "uriReferenceString": {"type": "string", "format": "uri-reference"}}}, "https://json-schema.org/draft/2020-12/meta/validation": {"$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://json-schema.org/draft/2020-12/meta/validation", "$vocabulary": {"https://json-schema.org/draft/2020-12/vocab/validation": true}, "$dynamicAnchor": "meta", "title": "Validation vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"type": {"anyOf": [{"$ref": "#/$defs/simpleTypes"}, {"type": "array", "items": {"$ref": "#/$defs/simpleTypes"}, "minItems": 1, "uniqueItems": true}]}, "const": true, "enum": {"type": "array", "items": true}, "multipleOf": {"type": "number", "exclusiveMinimum": 0}, "maximum": {"type": "number"}, "exclusiveMaximum": {"type": "number"}, "minimum": {"type": "number"}, "exclusiveMinimum": {"type": "number"}, "maxLength": {"$ref": "#/$defs/nonNegativeInteger"}, "minLength": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "pattern": {"type": "string", "format": "regex"}, "maxItems": {"$ref": "#/$defs/nonNegativeInteger"}, "minItems": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "uniqueItems": {"type": "boolean", "default": false}, "maxContains": {"$ref": "#/$defs/nonNegativeInteger"}, "minContains": {"$ref": "#/$defs/nonNegativeInteger", "default": 1}, "maxProperties": {"$ref": "#/$defs/nonNegativeInteger"}, "minProperties": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "required": {"$ref": "#/$defs/stringArray"}, "dependentRequired": {"type": "object", "additionalProperties": {"$ref": "#/$defs/stringArray"}}}, "$defs": {"nonNegativeInteger": {"type": "integer", "minimum": 0}, "nonNegativeIntegerDefault0": {"$ref": "#/$defs/nonNegativeInteger", "default": 0}, "simpleTypes": {"enum": ["array", "boolean", "integer", "null", "number", "object", "string"]}, "stringArray": {"type": "array", "items": {"type": "string"}, "uniqueItems": true, "default": []}}}, "https://json-schema.org/draft/2019-09/meta/content": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/content", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/content": true}, "$recursiveAnchor": true, "title": "Content vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"contentMediaType": {"type": "string"}, "contentEncoding": {"type": "string"}, "contentSchema": {"$recursiveRef": "#"}}}, "https://json-schema.org/draft/2019-09/meta/applicator": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/applicator", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/applicator": true}, "$recursiveAnchor": true, "title": "Applicator vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"additionalItems": {"$recursiveRef": "#"}, "unevaluatedItems": {"$recursiveRef": "#"}, "items": {"anyOf": [{"$recursiveRef": "#"}, {"$ref": "#/$defs/schemaArray"}]}, "contains": {"$recursiveRef": "#"}, "additionalProperties": {"$recursiveRef": "#"}, "unevaluatedProperties": {"$recursiveRef": "#"}, "properties": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "default": {}}, "patternProperties": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "propertyNames": {"format": "regex"}, "default": {}}, "dependentSchemas": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}}, "propertyNames": {"$recursiveRef": "#"}, "if": {"$recursiveRef": "#"}, "then": {"$recursiveRef": "#"}, "else": {"$recursiveRef": "#"}, "allOf": {"$ref": "#/$defs/schemaArray"}, "anyOf": {"$ref": "#/$defs/schemaArray"}, "oneOf": {"$ref": "#/$defs/schemaArray"}, "not": {"$recursiveRef": "#"}}, "$defs": {"schemaArray": {"type": "array", "minItems": 1, "items": {"$recursiveRef": "#"}}}}, "https://json-schema.org/draft/2019-09/meta/meta-data": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/meta-data", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/meta-data": true}, "$recursiveAnchor": true, "title": "Meta-data vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"title": {"type": "string"}, "description": {"type": "string"}, "default": true, "deprecated": {"type": "boolean", "default": false}, "readOnly": {"type": "boolean", "default": false}, "writeOnly": {"type": "boolean", "default": false}, "examples": {"type": "array", "items": true}}}, "https://json-schema.org/draft/2019-09/meta/core": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/core", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/core": true}, "$recursiveAnchor": true, "title": "Core vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"$id": {"type": "string", "format": "uri-reference", "$comment": "Non-empty fragments not allowed.", "pattern": "^[^#]*#?$"}, "$schema": {"type": "string", "format": "uri"}, "$anchor": {"type": "string", "pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"}, "$ref": {"type": "string", "format": "uri-reference"}, "$recursiveRef": {"type": "string", "format": "uri-reference"}, "$recursiveAnchor": {"type": "boolean", "default": false}, "$vocabulary": {"type": "object", "propertyNames": {"type": "string", "format": "uri"}, "additionalProperties": {"type": "boolean"}}, "$comment": {"type": "string"}, "$defs": {"type": "object", "additionalProperties": {"$recursiveRef": "#"}, "default": {}}}}, "https://json-schema.org/draft/2019-09/meta/validation": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/validation", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/validation": true}, "$recursiveAnchor": true, "title": "Validation vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"multipleOf": {"type": "number", "exclusiveMinimum": 0}, "maximum": {"type": "number"}, "exclusiveMaximum": {"type": "number"}, "minimum": {"type": "number"}, "exclusiveMinimum": {"type": "number"}, "maxLength": {"$ref": "#/$defs/nonNegativeInteger"}, "minLength": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "pattern": {"type": "string", "format": "regex"}, "maxItems": {"$ref": "#/$defs/nonNegativeInteger"}, "minItems": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "uniqueItems": {"type": "boolean", "default": false}, "maxContains": {"$ref": "#/$defs/nonNegativeInteger"}, "minContains": {"$ref": "#/$defs/nonNegativeInteger", "default": 1}, "maxProperties": {"$ref": "#/$defs/nonNegativeInteger"}, "minProperties": {"$ref": "#/$defs/nonNegativeIntegerDefault0"}, "required": {"$ref": "#/$defs/stringArray"}, "dependentRequired": {"type": "object", "additionalProperties": {"$ref": "#/$defs/stringArray"}}, "const": true, "enum": {"type": "array", "items": true}, "type": {"anyOf": [{"$ref": "#/$defs/simpleTypes"}, {"type": "array", "items": {"$ref": "#/$defs/simpleTypes"}, "minItems": 1, "uniqueItems": true}]}}, "$defs": {"nonNegativeInteger": {"type": "integer", "minimum": 0}, "nonNegativeIntegerDefault0": {"$ref": "#/$defs/nonNegativeInteger", "default": 0}, "simpleTypes": {"enum": ["array", "boolean", "integer", "null", "number", "object", "string"]}, "stringArray": {"type": "array", "items": {"type": "string"}, "uniqueItems": true, "default": []}}}, "https://json-schema.org/draft/2019-09/meta/hyper-schema": {"$schema": "https://json-schema.org/draft/2019-09/hyper-schema", "$id": "https://json-schema.org/draft/2019-09/meta/hyper-schema", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/hyper-schema": true}, "$recursiveAnchor": true, "title": "JSON Hyper-Schema Vocabulary Schema", "type": ["object", "boolean"], "properties": {"base": {"type": "string", "format": "uri-template"}, "links": {"type": "array", "items": {"$ref": "https://json-schema.org/draft/2019-09/links"}}}, "links": [{"rel": "self", "href": "{+%24id}"}]}, "https://json-schema.org/draft/2019-09/meta/format": {"$schema": "https://json-schema.org/draft/2019-09/schema", "$id": "https://json-schema.org/draft/2019-09/meta/format", "$vocabulary": {"https://json-schema.org/draft/2019-09/vocab/format": true}, "$recursiveAnchor": true, "title": "Format vocabulary meta-schema", "type": ["object", "boolean"], "properties": {"format": {"type": "string"}}}} diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/tools/__init__.py b/vendor/jsonschema/jsonschema/tests/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/lark/tools/__init__.py rename to vendor/jsonschema/jsonschema/tests/__init__.py diff --git a/vendor/jsonschema/jsonschema/tests/_helpers.py b/vendor/jsonschema/jsonschema/tests/_helpers.py new file mode 100644 index 00000000..754ff830 --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/_helpers.py @@ -0,0 +1,25 @@ +from urllib.parse import urljoin + + +def issues_url(organization, repository): + return urljoin( + "https://github.com/", f"{organization}/{repository}/issues/", + ) + + +ISSUES_URL = issues_url("python-jsonschema", "jsonschema") +TEST_SUITE_ISSUES_URL = issues_url("json-schema-org", "JSON-Schema-Test-Suite") + + +def bug(issue=None): + message = "A known bug." + if issue is not None: + message += f" See {urljoin(ISSUES_URL, str(issue))}." + return message + + +def test_suite_bug(issue): + return ( + "A known test suite bug. " + f"See {urljoin(TEST_SUITE_ISSUES_URL, str(issue))}." + ) diff --git a/vendor/jsonschema/jsonschema/tests/_suite.py b/vendor/jsonschema/jsonschema/tests/_suite.py new file mode 100644 index 00000000..a2148baf --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/_suite.py @@ -0,0 +1,241 @@ +""" +Python representations of the JSON Schema Test Suite tests. +""" + +from functools import partial +from pathlib import Path +import json +import os +import re +import subprocess +import sys +import unittest + +import attr + +from jsonschema.validators import _VALIDATORS +import jsonschema + + +def _find_suite(): + root = os.environ.get("JSON_SCHEMA_TEST_SUITE") + if root is not None: + return Path(root) + + root = Path(jsonschema.__file__).parent.parent / "json" + if not root.is_dir(): # pragma: no cover + raise ValueError( + ( + "Can't find the JSON-Schema-Test-Suite directory. " + "Set the 'JSON_SCHEMA_TEST_SUITE' environment " + "variable or run the tests from alongside a checkout " + "of the suite." + ), + ) + return root + + +@attr.s(hash=True) +class Suite(object): + + _root = attr.ib(default=attr.Factory(_find_suite)) + + def _remotes(self): + jsonschema_suite = self._root.joinpath("bin", "jsonschema_suite") + remotes = subprocess.check_output( + [sys.executable, str(jsonschema_suite), "remotes"], + ) + return json.loads(remotes.decode("utf-8")) + + def benchmark(self, runner): # pragma: no cover + for name, Validator in _VALIDATORS.items(): + self.version(name=name).benchmark( + runner=runner, + Validator=Validator, + ) + + def version(self, name): + return Version( + name=name, + path=self._root.joinpath("tests", name), + remotes=self._remotes(), + ) + + +@attr.s(hash=True) +class Version(object): + + _path = attr.ib() + _remotes = attr.ib() + + name = attr.ib() + + def benchmark(self, runner, **kwargs): # pragma: no cover + for suite in self.tests(): + for test in suite: + runner.bench_func( + test.fully_qualified_name, + partial(test.validate_ignoring_errors, **kwargs), + ) + + def tests(self): + return ( + test + for child in self._path.glob("*.json") + for test in self._tests_in( + subject=child.name[:-5], + path=child, + ) + ) + + def format_tests(self): + path = self._path.joinpath("optional", "format") + return ( + test + for child in path.glob("*.json") + for test in self._tests_in( + subject=child.name[:-5], + path=child, + ) + ) + + def optional_tests_of(self, name): + return self._tests_in( + subject=name, + path=self._path.joinpath("optional", name + ".json"), + ) + + def to_unittest_testcase(self, *suites, **kwargs): + name = kwargs.pop("name", "Test" + self.name.title().replace("-", "")) + methods = { + test.method_name: test.to_unittest_method(**kwargs) + for suite in suites + for tests in suite + for test in tests + } + cls = type(name, (unittest.TestCase,), methods) + + try: + cls.__module__ = _someone_save_us_the_module_of_the_caller() + except Exception: # pragma: no cover + # We're doing crazy things, so if they go wrong, like a function + # behaving differently on some other interpreter, just make them + # not happen. + pass + + return cls + + def _tests_in(self, subject, path): + for each in json.loads(path.read_text(encoding="utf-8")): + yield ( + _Test( + version=self, + subject=subject, + case_description=each["description"], + schema=each["schema"], + remotes=self._remotes, + **test, + ) for test in each["tests"] + ) + + +@attr.s(hash=True, repr=False) +class _Test(object): + + version = attr.ib() + + subject = attr.ib() + case_description = attr.ib() + description = attr.ib() + + data = attr.ib() + schema = attr.ib(repr=False) + + valid = attr.ib() + + _remotes = attr.ib() + + comment = attr.ib(default=None) + + def __repr__(self): # pragma: no cover + return "".format(self.fully_qualified_name) + + @property + def fully_qualified_name(self): # pragma: no cover + return " > ".join( + [ + self.version.name, + self.subject, + self.case_description, + self.description, + ], + ) + + @property + def method_name(self): + delimiters = r"[\W\- ]+" + return "test_{}_{}_{}".format( + re.sub(delimiters, "_", self.subject), + re.sub(delimiters, "_", self.case_description), + re.sub(delimiters, "_", self.description), + ) + + def to_unittest_method(self, skip=lambda test: None, **kwargs): + if self.valid: + def fn(this): + self.validate(**kwargs) + else: + def fn(this): + with this.assertRaises(jsonschema.ValidationError): + self.validate(**kwargs) + + fn.__name__ = self.method_name + reason = skip(self) + if reason is None: + return fn + elif os.environ.get("JSON_SCHEMA_EXPECTED_FAILURES", "0") != "0": + return unittest.expectedFailure(fn) + else: + return unittest.skip(reason)(fn) + + def validate(self, Validator, **kwargs): + Validator.check_schema(self.schema) + resolver = jsonschema.RefResolver.from_schema( + schema=self.schema, + store=self._remotes, + id_of=Validator.ID_OF, + ) + + # XXX: #693 asks to improve the public API for this, since yeah, it's + # bad. Figures that since it's hard for end-users, we experience + # the pain internally here too. + def prevent_network_access(uri): + raise RuntimeError(f"Tried to access the network: {uri}") + resolver.resolve_remote = prevent_network_access + + validator = Validator(schema=self.schema, resolver=resolver, **kwargs) + if os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0": + breakpoint() + validator.validate(instance=self.data) + + def validate_ignoring_errors(self, Validator): # pragma: no cover + try: + self.validate(Validator=Validator) + except jsonschema.ValidationError: + pass + + +def _someone_save_us_the_module_of_the_caller(): + """ + The FQON of the module 2nd stack frames up from here. + + This is intended to allow us to dynamically return test case classes that + are indistinguishable from being defined in the module that wants them. + + Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run + the class that really is running. + + Save us all, this is all so so so so so terrible. + """ + + return sys._getframe(2).f_globals["__name__"] diff --git a/vendor/jsonschema/jsonschema/tests/fuzz_validate.py b/vendor/jsonschema/jsonschema/tests/fuzz_validate.py new file mode 100644 index 00000000..c12e88bc --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/fuzz_validate.py @@ -0,0 +1,50 @@ +""" +Fuzzing setup for OSS-Fuzz. + +See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the +other half of the setup here. +""" +import sys + +from hypothesis import given, strategies + +import jsonschema + +PRIM = strategies.one_of( + strategies.booleans(), + strategies.integers(), + strategies.floats(allow_nan=False, allow_infinity=False), + strategies.text(), +) +DICT = strategies.recursive( + base=strategies.one_of( + strategies.booleans(), + strategies.dictionaries(strategies.text(), PRIM), + ), + extend=lambda inner: strategies.dictionaries(strategies.text(), inner), +) + + +@given(obj1=DICT, obj2=DICT) +def test_schemas(obj1, obj2): + try: + jsonschema.validate(instance=obj1, schema=obj2) + except jsonschema.exceptions.ValidationError: + pass + except jsonschema.exceptions.SchemaError: + pass + + +def main(): + atheris.instrument_all() + atheris.Setup( + sys.argv, + test_schemas.hypothesis.fuzz_one_input, + enable_python_coverage=True, + ) + atheris.Fuzz() + + +if __name__ == "__main__": + import atheris + main() diff --git a/vendor/jsonschema/jsonschema/tests/test_cli.py b/vendor/jsonschema/jsonschema/tests/test_cli.py new file mode 100644 index 00000000..b0e08c45 --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/test_cli.py @@ -0,0 +1,913 @@ +from contextlib import redirect_stderr, redirect_stdout +from io import StringIO +from json import JSONDecodeError +from pathlib import Path +from textwrap import dedent +from unittest import TestCase +import json +import os +import subprocess +import sys +import tempfile + +try: # pragma: no cover + from importlib import metadata +except ImportError: # pragma: no cover + import importlib_metadata as metadata # type: ignore + +from pyrsistent import m + +from jsonschema import Draft4Validator, Draft202012Validator, cli +from jsonschema.exceptions import ( + RefResolutionError, + SchemaError, + ValidationError, +) +from jsonschema.validators import _LATEST_VERSION, validate + + +def fake_validator(*errors): + errors = list(reversed(errors)) + + class FakeValidator(object): + def __init__(self, *args, **kwargs): + pass + + def iter_errors(self, instance): + if errors: + return errors.pop() + return [] # pragma: no cover + + @classmethod + def check_schema(self, schema): + pass + + return FakeValidator + + +def fake_open(all_contents): + def open(path): + contents = all_contents.get(path) + if contents is None: + raise FileNotFoundError(path) + return StringIO(contents) + return open + + +def _message_for(non_json): + try: + json.loads(non_json) + except JSONDecodeError as error: + return str(error) + else: # pragma: no cover + raise RuntimeError("Tried and failed to capture a JSON dump error.") + + +class TestCLI(TestCase): + def run_cli( + self, argv, files=m(), stdin=StringIO(), exit_code=0, **override, + ): + arguments = cli.parse_args(argv) + arguments.update(override) + + self.assertFalse(hasattr(cli, "open")) + cli.open = fake_open(files) + try: + stdout, stderr = StringIO(), StringIO() + actual_exit_code = cli.run( + arguments, + stdin=stdin, + stdout=stdout, + stderr=stderr, + ) + finally: + del cli.open + + self.assertEqual( + actual_exit_code, exit_code, msg=dedent( + """ + Expected an exit code of {} != {}. + + stdout: {} + + stderr: {} + """.format( + exit_code, + actual_exit_code, + stdout.getvalue(), + stderr.getvalue(), + ), + ), + ) + return stdout.getvalue(), stderr.getvalue() + + def assertOutputs(self, stdout="", stderr="", **kwargs): + self.assertEqual( + self.run_cli(**kwargs), + (dedent(stdout), dedent(stderr)), + ) + + def test_invalid_instance(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="12: I am an error!\n", + ) + + def test_invalid_instance_pretty_output(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["-i", "some_instance", "--output", "pretty", "some_schema"], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_instance)=== + + I am an error! + ----------------------------- + """, + ) + + def test_invalid_instance_explicit_plain_output(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["--output", "plain", "-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="12: I am an error!\n", + ) + + def test_invalid_instance_multiple_errors(self): + instance = 12 + first = ValidationError("First error", instance=instance) + second = ValidationError("Second error", instance=instance) + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(instance), + ), + validator=fake_validator([first, second]), + + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 12: First error + 12: Second error + """, + ) + + def test_invalid_instance_multiple_errors_pretty_output(self): + instance = 12 + first = ValidationError("First error", instance=instance) + second = ValidationError("Second error", instance=instance) + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(instance), + ), + validator=fake_validator([first, second]), + + argv=["-i", "some_instance", "--output", "pretty", "some_schema"], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_instance)=== + + First error + ----------------------------- + ===[ValidationError]===(some_instance)=== + + Second error + ----------------------------- + """, + ) + + def test_multiple_invalid_instances(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + 12: An error + 12: Another error + foo: BOOM + """, + ) + + def test_multiple_invalid_instances_pretty_output(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "--output", "pretty", + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_first_instance)=== + + An error + ----------------------------- + ===[ValidationError]===(some_first_instance)=== + + Another error + ----------------------------- + ===[ValidationError]===(some_second_instance)=== + + BOOM + ----------------------------- + """, + ) + + def test_custom_error_format(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "--error-format", ":{error.message}._-_.{error.instance}:", + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr=":An error._-_.12::Another error._-_.12::BOOM._-_.foo:", + ) + + def test_invalid_schema(self): + self.assertOutputs( + files=dict(some_schema='{"type": 12}'), + argv=["some_schema"], + + exit_code=1, + stderr="""\ + 12: 12 is not valid under any of the given schemas + """, + ) + + def test_invalid_schema_pretty_output(self): + schema = {"type": 12} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance="") + error = str(e.exception) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_schema_multiple_errors(self): + self.assertOutputs( + files=dict(some_schema='{"type": 12, "items": 57}'), + argv=["some_schema"], + + exit_code=1, + stderr="""\ + 57: 57 is not of type 'object', 'boolean' + """, + ) + + def test_invalid_schema_multiple_errors_pretty_output(self): + schema = {"type": 12, "items": 57} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance="") + error = str(e.exception) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_schema_with_invalid_instance(self): + """ + "Validating" an instance that's invalid under an invalid schema + just shows the schema error. + """ + self.assertOutputs( + files=dict( + some_schema='{"type": 12, "minimum": 30}', + some_instance="13", + ), + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 12: 12 is not valid under any of the given schemas + """, + ) + + def test_invalid_schema_with_invalid_instance_pretty_output(self): + instance, schema = 13, {"type": 12, "minimum": 30} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance=instance) + error = str(e.exception) + + self.assertOutputs( + files=dict( + some_schema=json.dumps(schema), + some_instance=json.dumps(instance), + ), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_instance_continues_with_the_rest(self): + self.assertOutputs( + files=dict( + some_schema='{"minimum": 30}', + first_instance="not valid JSON!", + second_instance="12", + ), + argv=[ + "-i", "first_instance", + "-i", "second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + Failed to parse 'first_instance': {} + 12: 12 is less than the minimum of 30 + """.format(_message_for("not valid JSON!")), + ) + + def test_custom_error_format_applies_to_schema_errors(self): + instance, schema = 13, {"type": 12, "minimum": 30} + + with self.assertRaises(SchemaError): + validate(schema=schema, instance=instance) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + + argv=[ + "--error-format", ":{error.message}._-_.{error.instance}:", + "some_schema", + ], + + exit_code=1, + stderr=":12 is not valid under any of the given schemas._-_.12:", + ) + + def test_instance_is_invalid_JSON(self): + instance = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema="{}", some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + Failed to parse 'some_instance': {} + """.format(_message_for(instance)), + ) + + def test_instance_is_invalid_JSON_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict( + some_schema="{}", + some_instance="not valid JSON!", + ), + + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_instance)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_schema", stderr) + + def test_instance_is_invalid_JSON_on_stdin(self): + instance = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO(instance), + + argv=["some_schema"], + + exit_code=1, + stderr="""\ + Failed to parse : {} + """.format(_message_for(instance)), + ) + + def test_instance_is_invalid_JSON_on_stdin_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict(some_schema="{}"), + stdin=StringIO("not valid JSON!"), + + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "()===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_schema", stderr) + + def test_schema_is_invalid_JSON(self): + schema = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema=schema), + + argv=["some_schema"], + + exit_code=1, + stderr="""\ + Failed to parse 'some_schema': {} + """.format(_message_for(schema)), + ) + + def test_schema_is_invalid_JSON_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict(some_schema="not valid JSON!"), + + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_schema)===\n\nTraceback (most recent call last):\n", + stderr, + ) + + def test_schema_and_instance_are_both_invalid_JSON(self): + """ + Only the schema error is reported, as we abort immediately. + """ + schema, instance = "not valid JSON!", "also not valid JSON!" + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + + argv=["some_schema"], + + exit_code=1, + stderr="""\ + Failed to parse 'some_schema': {} + """.format(_message_for(schema)), + ) + + def test_schema_and_instance_are_both_invalid_JSON_pretty_output(self): + """ + Only the schema error is reported, as we abort immediately. + """ + stdout, stderr = self.run_cli( + files=dict( + some_schema="not valid JSON!", + some_instance="also not valid JSON!", + ), + + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_schema)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_instance", stderr) + + def test_instance_does_not_exist(self): + self.assertOutputs( + files=dict(some_schema="{}"), + argv=["-i", "nonexisting_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 'nonexisting_instance' does not exist. + """, + ) + + def test_instance_does_not_exist_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}"), + argv=[ + "--output", "pretty", + "-i", "nonexisting_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_instance)=== + + 'nonexisting_instance' does not exist. + ----------------------------- + """, + ) + + def test_schema_does_not_exist(self): + self.assertOutputs( + argv=["nonexisting_schema"], + + exit_code=1, + stderr="'nonexisting_schema' does not exist.\n", + ) + + def test_schema_does_not_exist_pretty_output(self): + self.assertOutputs( + argv=["--output", "pretty", "nonexisting_schema"], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_schema)=== + + 'nonexisting_schema' does not exist. + ----------------------------- + """, + ) + + def test_neither_instance_nor_schema_exist(self): + self.assertOutputs( + argv=["-i", "nonexisting_instance", "nonexisting_schema"], + + exit_code=1, + stderr="'nonexisting_schema' does not exist.\n", + ) + + def test_neither_instance_nor_schema_exist_pretty_output(self): + self.assertOutputs( + argv=[ + "--output", "pretty", + "-i", "nonexisting_instance", + "nonexisting_schema", + ], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_schema)=== + + 'nonexisting_schema' does not exist. + ----------------------------- + """, + ) + + def test_successful_validation(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + stdout="===[SUCCESS]===(some_instance)===\n", + stderr="", + ) + + def test_successful_validation_of_stdin(self): + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO("{}"), + argv=["some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_of_stdin_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO("{}"), + argv=["--output", "pretty", "some_schema"], + stdout="===[SUCCESS]===()===\n", + stderr="", + ) + + def test_successful_validation_of_just_the_schema(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_of_just_the_schema_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + stdout="===[SUCCESS]===(some_instance)===\n", + stderr="", + ) + + def test_successful_validation_via_explicit_base_uri(self): + ref_schema_file = tempfile.NamedTemporaryFile(delete=False) + ref_schema_file.close() + self.addCleanup(os.remove, ref_schema_file.name) + + ref_path = Path(ref_schema_file.name) + ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') + + schema = f'{{"$ref": "{ref_path.name}#definitions/num"}}' + + self.assertOutputs( + files=dict(some_schema=schema, some_instance="1"), + argv=[ + "-i", "some_instance", + "--base-uri", ref_path.parent.as_uri() + "/", + "some_schema", + ], + stdout="", + stderr="", + ) + + def test_unsuccessful_validation_via_explicit_base_uri(self): + ref_schema_file = tempfile.NamedTemporaryFile(delete=False) + ref_schema_file.close() + self.addCleanup(os.remove, ref_schema_file.name) + + ref_path = Path(ref_schema_file.name) + ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') + + schema = f'{{"$ref": "{ref_path.name}#definitions/num"}}' + + self.assertOutputs( + files=dict(some_schema=schema, some_instance='"1"'), + argv=[ + "-i", "some_instance", + "--base-uri", ref_path.parent.as_uri() + "/", + "some_schema", + ], + exit_code=1, + stdout="", + stderr="1: '1' is not of type 'integer'\n", + ) + + def test_nonexistent_file_with_explicit_base_uri(self): + schema = '{"$ref": "someNonexistentFile.json#definitions/num"}' + instance = "1" + + with self.assertRaises(RefResolutionError) as e: + self.assertOutputs( + files=dict( + some_schema=schema, + some_instance=instance, + ), + argv=[ + "-i", "some_instance", + "--base-uri", Path.cwd().as_uri(), + "some_schema", + ], + ) + error = str(e.exception) + self.assertIn(f"{os.sep}someNonexistentFile.json'", error) + + def test_invalid_exlicit_base_uri(self): + schema = '{"$ref": "foo.json#definitions/num"}' + instance = "1" + + with self.assertRaises(RefResolutionError) as e: + self.assertOutputs( + files=dict( + some_schema=schema, + some_instance=instance, + ), + argv=[ + "-i", "some_instance", + "--base-uri", "not@UR1", + "some_schema", + ], + ) + error = str(e.exception) + self.assertEqual( + error, "unknown url type: 'foo.json'", + ) + + def test_it_validates_using_the_latest_validator_when_unspecified(self): + # There isn't a better way now I can think of to ensure that the + # latest version was used, given that the call to validator_for + # is hidden inside the CLI, so guard that that's the case, and + # this test will have to be updated when versions change until + # we can think of a better way to ensure this behavior. + self.assertIs(Draft202012Validator, _LATEST_VERSION) + + self.assertOutputs( + files=dict(some_schema='{"const": "check"}', some_instance='"a"'), + argv=["-i", "some_instance", "some_schema"], + exit_code=1, + stdout="", + stderr="a: 'check' was expected\n", + ) + + def test_it_validates_using_draft7_when_specified(self): + """ + Specifically, `const` validation applies for Draft 7. + """ + schema = """ + { + "$schema": "http://json-schema.org/draft-07/schema#", + "const": "check" + } + """ + instance = '"foo"' + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + exit_code=1, + stdout="", + stderr="foo: 'check' was expected\n", + ) + + def test_it_validates_using_draft4_when_specified(self): + """ + Specifically, `const` validation *does not* apply for Draft 4. + """ + schema = """ + { + "$schema": "http://json-schema.org/draft-04/schema#", + "const": "check" + } + """ + instance = '"foo"' + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + +class TestParser(TestCase): + + FakeValidator = fake_validator() + + def test_find_validator_by_fully_qualified_object_name(self): + arguments = cli.parse_args( + [ + "--validator", + "jsonschema.tests.test_cli.TestParser.FakeValidator", + "--instance", "mem://some/instance", + "mem://some/schema", + ], + ) + self.assertIs(arguments["validator"], self.FakeValidator) + + def test_find_validator_in_jsonschema(self): + arguments = cli.parse_args( + [ + "--validator", "Draft4Validator", + "--instance", "mem://some/instance", + "mem://some/schema", + ], + ) + self.assertIs(arguments["validator"], Draft4Validator) + + def cli_output_for(self, *argv): + stdout, stderr = StringIO(), StringIO() + with redirect_stdout(stdout), redirect_stderr(stderr): + with self.assertRaises(SystemExit): + cli.parse_args(argv) + return stdout.getvalue(), stderr.getvalue() + + def test_unknown_output(self): + stdout, stderr = self.cli_output_for( + "--output", "foo", + "mem://some/schema", + ) + self.assertIn("invalid choice: 'foo'", stderr) + self.assertFalse(stdout) + + def test_useless_error_format(self): + stdout, stderr = self.cli_output_for( + "--output", "pretty", + "--error-format", "foo", + "mem://some/schema", + ) + self.assertIn( + "--error-format can only be used with --output plain", + stderr, + ) + self.assertFalse(stdout) + + +class TestCLIIntegration(TestCase): + def test_license(self): + output = subprocess.check_output( + [sys.executable, "-m", "pip", "show", "jsonschema"], + stderr=subprocess.STDOUT, + ) + self.assertIn(b"License: MIT", output) + + def test_version(self): + version = subprocess.check_output( + [sys.executable, "-m", "jsonschema", "--version"], + stderr=subprocess.STDOUT, + ) + version = version.decode("utf-8").strip() + self.assertEqual(version, metadata.version("jsonschema")) + + def test_no_arguments_shows_usage_notes(self): + output = subprocess.check_output( + [sys.executable, "-m", "jsonschema"], + stderr=subprocess.STDOUT, + ) + output_for_help = subprocess.check_output( + [sys.executable, "-m", "jsonschema", "--help"], + stderr=subprocess.STDOUT, + ) + self.assertEqual(output, output_for_help) diff --git a/vendor/jsonschema/jsonschema/tests/test_deprecations.py b/vendor/jsonschema/jsonschema/tests/test_deprecations.py new file mode 100644 index 00000000..58fd050a --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/test_deprecations.py @@ -0,0 +1,123 @@ +from unittest import TestCase + +from jsonschema import validators + + +class TestDeprecations(TestCase): + def test_version(self): + """ + As of v4.0.0, __version__ is deprecated in favor of importlib.metadata. + """ + + with self.assertWarns(DeprecationWarning) as w: + from jsonschema import __version__ # noqa + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Accessing jsonschema.__version__ is deprecated", + ), + ) + + def test_validators_ErrorTree(self): + """ + As of v4.0.0, importing ErrorTree from jsonschema.validators is + deprecated in favor of doing so from jsonschema.exceptions. + """ + + with self.assertWarns(DeprecationWarning) as w: + from jsonschema.validators import ErrorTree # noqa + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Importing ErrorTree from jsonschema.validators is deprecated", + ), + ) + + def test_validators_validators(self): + """ + As of v4.0.0, accessing jsonschema.validators.validators is + deprecated. + """ + + with self.assertWarns(DeprecationWarning) as w: + value = validators.validators + self.assertEqual(value, validators._VALIDATORS) + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Accessing jsonschema.validators.validators is deprecated", + ), + ) + + def test_validators_meta_schemas(self): + """ + As of v4.0.0, accessing jsonschema.validators.meta_schemas is + deprecated. + """ + + with self.assertWarns(DeprecationWarning) as w: + value = validators.meta_schemas + self.assertEqual(value, validators._META_SCHEMAS) + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Accessing jsonschema.validators.meta_schemas is deprecated", + ), + ) + + def test_RefResolver_in_scope(self): + """ + As of v4.0.0, RefResolver.in_scope is deprecated. + """ + + resolver = validators.RefResolver.from_schema({}) + with self.assertWarns(DeprecationWarning) as w: + with resolver.in_scope("foo"): + pass + + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "jsonschema.RefResolver.in_scope is deprecated ", + ), + ) + + def test_Validator_is_valid_two_arguments(self): + """ + As of v4.0.0, calling is_valid with two arguments (to provide a + different schema) is deprecated. + """ + + validator = validators.Draft7Validator({}) + with self.assertWarns(DeprecationWarning) as w: + result = validator.is_valid("foo", {"type": "number"}) + + self.assertFalse(result) + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Passing a schema to Validator.is_valid is deprecated ", + ), + ) + + def test_Validator_iter_errors_two_arguments(self): + """ + As of v4.0.0, calling iter_errors with two arguments (to provide a + different schema) is deprecated. + """ + + validator = validators.Draft7Validator({}) + with self.assertWarns(DeprecationWarning) as w: + error, = validator.iter_errors("foo", {"type": "number"}) + + self.assertEqual(error.validator, "type") + self.assertEqual(w.filename, __file__) + self.assertTrue( + str(w.warning).startswith( + "Passing a schema to Validator.iter_errors is deprecated ", + ), + ) diff --git a/vendor/jsonschema/jsonschema/tests/test_exceptions.py b/vendor/jsonschema/jsonschema/tests/test_exceptions.py new file mode 100644 index 00000000..521f445d --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/test_exceptions.py @@ -0,0 +1,587 @@ +from unittest import TestCase +import textwrap + +from jsonschema import exceptions +from jsonschema.validators import _LATEST_VERSION + + +class TestBestMatch(TestCase): + def best_match_of(self, instance, schema): + errors = list(_LATEST_VERSION(schema).iter_errors(instance)) + best = exceptions.best_match(iter(errors)) + reversed_best = exceptions.best_match(reversed(errors)) + self.assertEqual( + best._contents(), + reversed_best._contents(), + f"No consistent best match!\nGot: {best}\n\nThen: {reversed_best}", + ) + return best + + def test_shallower_errors_are_better_matches(self): + schema = { + "properties": { + "foo": { + "minProperties": 2, + "properties": {"bar": {"type": "object"}}, + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": []}}, schema=schema) + self.assertEqual(best.validator, "minProperties") + + def test_oneOf_and_anyOf_are_weak_matches(self): + """ + A property you *must* match is probably better than one you have to + match a part of. + """ + + schema = { + "minProperties": 2, + "anyOf": [{"type": "string"}, {"type": "number"}], + "oneOf": [{"type": "string"}, {"type": "number"}], + } + best = self.best_match_of(instance={}, schema=schema) + self.assertEqual(best.validator, "minProperties") + + def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self): + """ + If the most relevant error is an anyOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + """ + + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_no_anyOf_traversal_for_equally_relevant_errors(self): + """ + We don't traverse into an anyOf (as above) if all of its context errors + seem to be equally "wrong" against the instance. + """ + + schema = { + "anyOf": [ + {"type": "string"}, + {"type": "integer"}, + {"type": "object"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "anyOf") + + def test_anyOf_traversal_for_single_equally_relevant_error(self): + """ + We *do* traverse anyOf with a single nested error, even though it is + vacuously equally relevant to itself. + """ + + schema = { + "anyOf": [ + {"type": "string"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "type") + + def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self): + """ + If the most relevant error is an oneOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + """ + + schema = { + "properties": { + "foo": { + "oneOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_no_oneOf_traversal_for_equally_relevant_errors(self): + """ + We don't traverse into an oneOf (as above) if all of its context errors + seem to be equally "wrong" against the instance. + """ + + schema = { + "oneOf": [ + {"type": "string"}, + {"type": "integer"}, + {"type": "object"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "oneOf") + + def test_oneOf_traversal_for_single_equally_relevant_error(self): + """ + We *do* traverse oneOf with a single nested error, even though it is + vacuously equally relevant to itself. + """ + + schema = { + "oneOf": [ + {"type": "string"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "type") + + def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self): + """ + Now, if the error is allOf, we traverse but select the *most* relevant + error from the context, because all schemas here must match anyways. + """ + + schema = { + "properties": { + "foo": { + "allOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "string") + + def test_nested_context_for_oneOf(self): + """ + We traverse into nested contexts (a oneOf containing an error in a + nested oneOf here). + """ + + schema = { + "properties": { + "foo": { + "oneOf": [ + {"type": "string"}, + { + "oneOf": [ + {"type": "string"}, + { + "properties": { + "bar": {"type": "array"}, + }, + }, + ], + }, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_it_prioritizes_matching_types(self): + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": "array", "minItems": 2}, + {"type": "string", "minLength": 10}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertEqual(best.validator, "minLength") + + reordered = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string", "minLength": 10}, + {"type": "array", "minItems": 2}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=reordered) + self.assertEqual(best.validator, "minLength") + + def test_it_prioritizes_matching_union_types(self): + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": ["array", "object"], "minItems": 2}, + {"type": ["integer", "string"], "minLength": 10}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertEqual(best.validator, "minLength") + + reordered = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string", "minLength": 10}, + {"type": "array", "minItems": 2}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=reordered) + self.assertEqual(best.validator, "minLength") + + def test_boolean_schemas(self): + schema = {"properties": {"foo": False}} + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertIsNone(best.validator) + + def test_one_error(self): + validator = _LATEST_VERSION({"minProperties": 2}) + error, = validator.iter_errors({}) + self.assertEqual( + exceptions.best_match(validator.iter_errors({})).validator, + "minProperties", + ) + + def test_no_errors(self): + validator = _LATEST_VERSION({}) + self.assertIsNone(exceptions.best_match(validator.iter_errors({}))) + + +class TestByRelevance(TestCase): + def test_short_paths_are_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=["baz"]) + deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"]) + match = max([shallow, deep], key=exceptions.relevance) + self.assertIs(match, shallow) + + match = max([deep, shallow], key=exceptions.relevance) + self.assertIs(match, shallow) + + def test_global_errors_are_even_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=[]) + deep = exceptions.ValidationError("Oh yes!", path=["foo"]) + + errors = sorted([shallow, deep], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + errors = sorted([deep, shallow], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + def test_weak_keywords_are_lower_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + + best_match = exceptions.by_relevance(weak="a") + + match = max([weak, normal], key=best_match) + self.assertIs(match, normal) + + match = max([normal, weak], key=best_match) + self.assertIs(match, normal) + + def test_strong_keywords_are_higher_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + strong = exceptions.ValidationError("Oh fine!", path=[], validator="c") + + best_match = exceptions.by_relevance(weak="a", strong="c") + + match = max([weak, normal, strong], key=best_match) + self.assertIs(match, strong) + + match = max([strong, normal, weak], key=best_match) + self.assertIs(match, strong) + + +class TestErrorTree(TestCase): + def test_it_knows_how_many_total_errors_it_contains(self): + # FIXME: #442 + errors = [ + exceptions.ValidationError("Something", validator=i) + for i in range(8) + ] + tree = exceptions.ErrorTree(errors) + self.assertEqual(tree.total_errors, 8) + + def test_it_contains_an_item_if_the_item_had_an_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertIn("bar", tree) + + def test_it_does_not_contain_an_item_if_the_item_had_no_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertNotIn("foo", tree) + + def test_keywords_that_failed_appear_in_errors_dict(self): + error = exceptions.ValidationError("a message", validator="foo") + tree = exceptions.ErrorTree([error]) + self.assertEqual(tree.errors, {"foo": error}) + + def test_it_creates_a_child_tree_for_each_nested_path(self): + errors = [ + exceptions.ValidationError("a bar message", path=["bar"]), + exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]), + ] + tree = exceptions.ErrorTree(errors) + self.assertIn(0, tree["bar"]) + self.assertNotIn(1, tree["bar"]) + + def test_children_have_their_errors_dicts_built(self): + e1, e2 = ( + exceptions.ValidationError("1", validator="foo", path=["bar", 0]), + exceptions.ValidationError("2", validator="quux", path=["bar", 0]), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2}) + + def test_multiple_errors_with_instance(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + exceptions.ErrorTree([e1, e2]) + + def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self): + error = exceptions.ValidationError("123", validator="foo", instance=[]) + tree = exceptions.ErrorTree([error]) + + with self.assertRaises(IndexError): + tree[0] + + def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self): + """ + If a keyword refers to a path that isn't in the instance, the + tree still properly returns a subtree for that path. + """ + + error = exceptions.ValidationError( + "a message", validator="foo", instance={}, path=["foo"], + ) + tree = exceptions.ErrorTree([error]) + self.assertIsInstance(tree["foo"], exceptions.ErrorTree) + + def test_repr(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(repr(tree), "") + + +class TestErrorInitReprStr(TestCase): + def make_error(self, **kwargs): + defaults = dict( + message="hello", + validator="type", + validator_value="string", + instance=5, + schema={"type": "string"}, + ) + defaults.update(kwargs) + return exceptions.ValidationError(**defaults) + + def assertShows(self, expected, **kwargs): + expected = textwrap.dedent(expected).rstrip("\n") + + error = self.make_error(**kwargs) + message_line, _, rest = str(error).partition("\n") + self.assertEqual(message_line, error.message) + self.assertEqual(rest, expected) + + def test_it_calls_super_and_sets_args(self): + error = self.make_error() + self.assertGreater(len(error.args), 1) + + def test_repr(self): + self.assertEqual( + repr(exceptions.ValidationError(message="Hello!")), + "", + ) + + def test_unset_error(self): + error = exceptions.ValidationError("message") + self.assertEqual(str(error), "message") + + kwargs = { + "validator": "type", + "validator_value": "string", + "instance": 5, + "schema": {"type": "string"}, + } + # Just the message should show if any of the attributes are unset + for attr in kwargs: + k = dict(kwargs) + del k[attr] + error = exceptions.ValidationError("message", **k) + self.assertEqual(str(error), "message") + + def test_empty_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'type': 'string'} + + On instance: + 5 + """, + path=[], + schema_path=[], + ) + + def test_one_item_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'type': 'string'} + + On instance[0]: + 5 + """, + path=[0], + schema_path=["items"], + ) + + def test_multiple_item_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema['items'][0]: + {'type': 'string'} + + On instance[0]['a']: + 5 + """, + path=[0, "a"], + schema_path=["items", 0, 1], + ) + + def test_uses_pprint(self): + self.assertShows( + """ + Failed validating 'maxLength' in schema: + {0: 0, + 1: 1, + 2: 2, + 3: 3, + 4: 4, + 5: 5, + 6: 6, + 7: 7, + 8: 8, + 9: 9, + 10: 10, + 11: 11, + 12: 12, + 13: 13, + 14: 14, + 15: 15, + 16: 16, + 17: 17, + 18: 18, + 19: 19} + + On instance: + [0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24] + """, + instance=list(range(25)), + schema=dict(zip(range(20), range(20))), + validator="maxLength", + ) + + def test_str_works_with_instances_having_overriden_eq_operator(self): + """ + Check for #164 which rendered exceptions unusable when a + `ValidationError` involved instances with an `__eq__` method + that returned truthy values. + """ + + class DontEQMeBro(object): + def __eq__(this, other): # pragma: no cover + self.fail("Don't!") + + def __ne__(this, other): # pragma: no cover + self.fail("Don't!") + + instance = DontEQMeBro() + error = exceptions.ValidationError( + "a message", + validator="foo", + instance=instance, + validator_value="some", + schema="schema", + ) + self.assertIn(repr(instance), str(error)) + + +class TestHashable(TestCase): + def test_hashable(self): + set([exceptions.ValidationError("")]) + set([exceptions.SchemaError("")]) diff --git a/vendor/jsonschema/jsonschema/tests/test_format.py b/vendor/jsonschema/jsonschema/tests/test_format.py new file mode 100644 index 00000000..f6222655 --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/test_format.py @@ -0,0 +1,107 @@ +""" +Tests for the parts of jsonschema related to the :kw:`format` keyword. +""" + +from unittest import TestCase + +from jsonschema import FormatChecker, FormatError, ValidationError +from jsonschema.validators import Draft4Validator + +BOOM = ValueError("Boom!") +BANG = ZeroDivisionError("Bang!") + + +def boom(thing): + if thing == "bang": + raise BANG + raise BOOM + + +class TestFormatChecker(TestCase): + def test_it_can_validate_no_formats(self): + checker = FormatChecker(formats=()) + self.assertFalse(checker.checkers) + + def test_it_raises_a_key_error_for_unknown_formats(self): + with self.assertRaises(KeyError): + FormatChecker(formats=["o noes"]) + + def test_it_can_register_cls_checkers(self): + original = dict(FormatChecker.checkers) + self.addCleanup(FormatChecker.checkers.pop, "boom") + FormatChecker.cls_checks("boom")(boom) + self.assertEqual( + FormatChecker.checkers, + dict(original, boom=(boom, ())), + ) + + def test_it_can_register_checkers(self): + checker = FormatChecker() + checker.checks("boom")(boom) + self.assertEqual( + checker.checkers, + dict(FormatChecker.checkers, boom=(boom, ())), + ) + + def test_it_catches_registered_errors(self): + checker = FormatChecker() + checker.checks("boom", raises=type(BOOM))(boom) + + with self.assertRaises(FormatError) as cm: + checker.check(instance=12, format="boom") + + self.assertIs(cm.exception.cause, BOOM) + self.assertIs(cm.exception.__cause__, BOOM) + + # Unregistered errors should not be caught + with self.assertRaises(type(BANG)): + checker.check(instance="bang", format="boom") + + def test_format_error_causes_become_validation_error_causes(self): + checker = FormatChecker() + checker.checks("boom", raises=ValueError)(boom) + validator = Draft4Validator({"format": "boom"}, format_checker=checker) + + with self.assertRaises(ValidationError) as cm: + validator.validate("BOOM") + + self.assertIs(cm.exception.cause, BOOM) + self.assertIs(cm.exception.__cause__, BOOM) + + def test_format_checkers_come_with_defaults(self): + # This is bad :/ but relied upon. + # The docs for quite awhile recommended people do things like + # validate(..., format_checker=FormatChecker()) + # We should change that, but we can't without deprecation... + checker = FormatChecker() + with self.assertRaises(FormatError): + checker.check(instance="not-an-ipv4", format="ipv4") + + def test_repr(self): + checker = FormatChecker(formats=()) + checker.checks("foo")(lambda thing: True) # pragma: no cover + checker.checks("bar")(lambda thing: True) # pragma: no cover + checker.checks("baz")(lambda thing: True) # pragma: no cover + self.assertEqual( + repr(checker), + "", + ) + + def test_duration_format(self): + try: + from jsonschema._format import is_duration # noqa: F401 + except ImportError: # pragma: no cover + pass + else: + checker = FormatChecker() + self.assertTrue(checker.conforms(1, "duration")) + self.assertTrue(checker.conforms("P4Y", "duration")) + self.assertFalse(checker.conforms("test", "duration")) + + def test_uuid_format(self): + checker = FormatChecker() + self.assertTrue(checker.conforms(1, "uuid")) + self.assertTrue( + checker.conforms("6e6659ec-4503-4428-9f03-2e2ea4d6c278", "uuid"), + ) + self.assertFalse(checker.conforms("test", "uuid")) diff --git a/vendor/jsonschema/jsonschema/tests/test_jsonschema_test_suite.py b/vendor/jsonschema/jsonschema/tests/test_jsonschema_test_suite.py new file mode 100644 index 00000000..a09357f2 --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/test_jsonschema_test_suite.py @@ -0,0 +1,626 @@ +""" +Test runner for the JSON Schema official test suite + +Tests comprehensive correctness of each draft's validator. + +See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details. +""" + +import sys + +from jsonschema.tests._helpers import bug, test_suite_bug +from jsonschema.tests._suite import Suite +import jsonschema + +SUITE = Suite() +DRAFT3 = SUITE.version(name="draft3") +DRAFT4 = SUITE.version(name="draft4") +DRAFT6 = SUITE.version(name="draft6") +DRAFT7 = SUITE.version(name="draft7") +DRAFT201909 = SUITE.version(name="draft2019-09") +DRAFT202012 = SUITE.version(name="draft2020-12") + + +def skip(message, **kwargs): + def skipper(test): + if all(value == getattr(test, attr) for attr, value in kwargs.items()): + return message + return skipper + + +def missing_format(checker): + def missing_format(test): # pragma: no cover + schema = test.schema + if ( + schema is True + or schema is False + or "format" not in schema + or schema["format"] in checker.checkers + or test.valid + ): + return + + return "Format checker {0!r} not found.".format(schema["format"]) + return missing_format + + +def complex_email_validation(test): + if test.subject != "email": + return + + message = "Complex email validation is (intentionally) unsupported." + return skip( + message=message, + description="an invalid domain", + )(test) or skip( + message=message, + description="an invalid IPv4-address-literal", + )(test) or skip( + message=message, + description="dot after local part is not valid", + )(test) or skip( + message=message, + description="dot before local part is not valid", + )(test) or skip( + message=message, + description="two subsequent dots inside local part are not valid", + )(test) + + +is_narrow_build = sys.maxunicode == 2 ** 16 - 1 +if is_narrow_build: # pragma: no cover + message = "Not running surrogate Unicode case, this Python is narrow." + + def narrow_unicode_build(test): # pragma: no cover + return skip( + message=message, + description=( + "one supplementary Unicode code point is not long enough" + ), + )(test) or skip( + message=message, + description="two supplementary Unicode code points is long enough", + )(test) +else: + def narrow_unicode_build(test): # pragma: no cover + return + + +if sys.version_info < (3, 9): # pragma: no cover + message = "Rejecting leading zeros is 3.9+" + allowed_leading_zeros = skip( + message=message, + subject="ipv4", + description="invalid leading zeroes, as they are treated as octals", + ) +else: + def allowed_leading_zeros(test): # pragma: no cover + return + + +def leap_second(test): + message = "Leap seconds are unsupported." + return skip( + message=message, + subject="time", + description="a valid time string with leap second", + )(test) or skip( + message=message, + subject="time", + description="a valid time string with leap second, Zulu", + )(test) or skip( + message=message, + subject="time", + description="a valid time string with leap second with offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, positive time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, negative time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, large positive time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, large negative time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, zero time-offset", + )(test) or skip( + message=message, + subject="date-time", + description="a valid date-time with a leap second, UTC", + )(test) or skip( + message=message, + subject="date-time", + description="a valid date-time with a leap second, with minus offset", + )(test) + + +TestDraft3 = DRAFT3.to_unittest_testcase( + DRAFT3.tests(), + DRAFT3.format_tests(), + DRAFT3.optional_tests_of(name="bignum"), + DRAFT3.optional_tests_of(name="non-bmp-regex"), + DRAFT3.optional_tests_of(name="zeroTerminatedFloats"), + Validator=jsonschema.Draft3Validator, + format_checker=jsonschema.draft3_format_checker, + skip=lambda test: ( + narrow_unicode_build(test) + or missing_format(jsonschema.draft3_format_checker)(test) + or complex_email_validation(test) + or skip( + message=bug(371), + subject="ref", + valid=False, + case_description=( + "$ref prevents a sibling id from changing the base uri" + ), + )(test) + ), +) + + +TestDraft4 = DRAFT4.to_unittest_testcase( + DRAFT4.tests(), + DRAFT4.format_tests(), + DRAFT4.optional_tests_of(name="bignum"), + DRAFT4.optional_tests_of(name="float-overflow"), + DRAFT4.optional_tests_of(name="non-bmp-regex"), + DRAFT4.optional_tests_of(name="zeroTerminatedFloats"), + Validator=jsonschema.Draft4Validator, + format_checker=jsonschema.draft4_format_checker, + skip=lambda test: ( + narrow_unicode_build(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.draft4_format_checker)(test) + or complex_email_validation(test) + or skip( + message=bug(), + subject="ref", + case_description="Recursive references between schemas", + )(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "Location-independent identifier with " + "base URI change in subschema" + ), + )(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "$ref prevents a sibling id from changing the base uri" + ), + )(test) + or skip( + message=bug(371), + subject="id", + description="match $ref to id", + )(test) + or skip( + message=bug(371), + subject="id", + description="no match on enum or $ref to id", + )(test) + or skip( + message=bug(), + subject="refRemote", + case_description="base URI change - change folder in subschema", + )(test) + or skip( + message=bug(), + subject="ref", + case_description=( + "id must be resolved against nearest parent, " + "not just immediate parent" + ), + )(test) + ), +) + + +TestDraft6 = DRAFT6.to_unittest_testcase( + DRAFT6.tests(), + DRAFT6.format_tests(), + DRAFT6.optional_tests_of(name="bignum"), + DRAFT6.optional_tests_of(name="float-overflow"), + DRAFT6.optional_tests_of(name="non-bmp-regex"), + Validator=jsonschema.Draft6Validator, + format_checker=jsonschema.draft6_format_checker, + skip=lambda test: ( + narrow_unicode_build(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.draft6_format_checker)(test) + or complex_email_validation(test) + or skip( + message=bug(), + subject="refRemote", + case_description="base URI change - change folder in subschema", + )(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "$ref prevents a sibling $id from changing the base uri" + ), + )(test) + ), +) + + +TestDraft7 = DRAFT7.to_unittest_testcase( + DRAFT7.tests(), + DRAFT7.format_tests(), + DRAFT7.optional_tests_of(name="bignum"), + DRAFT7.optional_tests_of(name="content"), + DRAFT7.optional_tests_of(name="cross-draft"), + DRAFT7.optional_tests_of(name="float-overflow"), + DRAFT7.optional_tests_of(name="non-bmp-regex"), + Validator=jsonschema.Draft7Validator, + format_checker=jsonschema.draft7_format_checker, + skip=lambda test: ( + narrow_unicode_build(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.draft7_format_checker)(test) + or complex_email_validation(test) + or skip( + message=bug(), + subject="refRemote", + case_description="base URI change - change folder in subschema", + )(test) + or skip( + message=bug(371), + subject="ref", + case_description=( + "$ref prevents a sibling $id from changing the base uri" + ), + )(test) + or skip( + message=bug(), + subject="ref", + case_description=( + "$id must be resolved against nearest parent, " + "not just immediate parent" + ), + )(test) + or skip( + message=bug(593), + subject="content", + valid=False, + case_description=( + "validation of string-encoded content based on media type" + ), + )(test) + or skip( + message=bug(593), + subject="content", + valid=False, + case_description="validation of binary string-encoding", + )(test) + or skip( + message=bug(593), + subject="content", + valid=False, + case_description=( + "validation of binary-encoded media type documents" + ), + )(test) + ), +) + + +TestDraft201909 = DRAFT201909.to_unittest_testcase( + DRAFT201909.tests(), + DRAFT201909.optional_tests_of(name="bignum"), + DRAFT201909.optional_tests_of(name="cross-draft"), + DRAFT201909.optional_tests_of(name="float-overflow"), + DRAFT201909.optional_tests_of(name="non-bmp-regex"), + DRAFT201909.optional_tests_of(name="refOfUnknownKeyword"), + Validator=jsonschema.Draft201909Validator, + skip=lambda test: ( + skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + description="uncle keyword evaluation is not significant", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + description="when one schema matches and has unevaluated items", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + description="when two schemas match and has unevaluated items", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + description="when if matches and it has unevaluated items", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + case_description="unevaluatedItems with nested tuple", + description="with unevaluated items", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + case_description="unevaluatedItems with not", + description="with unevaluated items", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + case_description="unevaluatedItems with oneOf", + description="with unevaluated items", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + case_description="unevaluatedItems with $ref", + description="with unevaluated items", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + case_description="unevaluatedItems with tuple", + description="with unevaluated items", + )(test) + or skip( + message="unevaluatedItems is different in 2019-09 (needs work).", + subject="unevaluatedItems", + description="when if doesn't match and it has unevaluated items", + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + case_description=( + "$recursiveRef with no $recursiveAnchor in " + "the initial target schema resource" + ), + description=( + "leaf node does not match: recursion uses the inner schema" + ), + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + description="leaf node matches: recursion uses the inner schema", + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + case_description=( + "dynamic $recursiveRef destination (not predictable " + "at schema compile time)" + ), + description="integer node", + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + case_description=( + "multiple dynamic paths to the $recursiveRef keyword" + ), + description="recurse to integerNode - floats are not allowed", + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + description="integer does not match as a property value", + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + description=( + "leaf node does not match: " + "recursion only uses inner schema" + ), + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + description=( + "leaf node matches: " + "recursion only uses inner schema" + ), + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + description=( + "two levels, integer does not match as a property value" + ), + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + description="recursive mismatch", + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="recursiveRef", + description="two levels, no match", + )(test) + or skip( + message="recursiveRef support isn't working yet.", + subject="id", + case_description=( + "Invalid use of fragments in location-independent $id" + ), + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="defs", + description="invalid definition schema", + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="anchor", + case_description="same $anchor with different base uri", + )(test) + or skip( + message=test_suite_bug(574), + subject="vocabulary", + description=( + "no validation: invalid number, but it still validates" + ), + )(test) + or skip( + message=bug(), + subject="ref", + case_description=( + "$id must be resolved against nearest parent, " + "not just immediate parent" + ), + )(test) + ), +) + + +TestDraft201909Format = DRAFT201909.to_unittest_testcase( + DRAFT201909.format_tests(), + Validator=jsonschema.Draft201909Validator, + format_checker=jsonschema.draft201909_format_checker, + skip=lambda test: ( + complex_email_validation(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.draft201909_format_checker)(test) + or complex_email_validation(test) + ), +) + + +TestDraft202012 = DRAFT202012.to_unittest_testcase( + DRAFT202012.tests(), + DRAFT202012.optional_tests_of(name="bignum"), + DRAFT202012.optional_tests_of(name="cross-draft"), + DRAFT202012.optional_tests_of(name="float-overflow"), + DRAFT202012.optional_tests_of(name="non-bmp-regex"), + DRAFT202012.optional_tests_of(name="refOfUnknownKeyword"), + Validator=jsonschema.Draft202012Validator, + skip=lambda test: ( + narrow_unicode_build(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description="The recursive part is not valid against the root", + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description="incorrect extended schema", + case_description=( + "$ref and $dynamicAnchor are independent of order - " + "$defs first" + ), + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description="correct extended schema", + case_description=( + "$ref and $dynamicAnchor are independent of order - " + "$defs first" + ), + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description="correct extended schema", + case_description=( + "$ref and $dynamicAnchor are independent of order - $ref first" + ), + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description="incorrect extended schema", + case_description=( + "$ref and $dynamicAnchor are independent of order - $ref first" + ), + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description=( + "/then/$defs/thingy is the final stop for the $dynamicRef" + ), + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description=( + "string matches /$defs/thingy, but the $dynamicRef " + "does not stop here" + ), + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description=( + "string matches /$defs/thingy, but the $dynamicRef " + "does not stop here" + ), + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="dynamicRef", + description="recurse to integerNode - floats are not allowed", + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="defs", + description="invalid definition schema", + )(test) + or skip( + message="dynamicRef support isn't fully working yet.", + subject="anchor", + case_description="same $anchor with different base uri", + )(test) + or skip( + message=test_suite_bug(574), + subject="vocabulary", + description=( + "no validation: invalid number, but it still validates" + ), + )(test) + or skip( + message=bug(), + subject="ref", + case_description=( + "$id must be resolved against nearest parent, " + "not just immediate parent" + ), + )(test) + ), +) + + +TestDraft202012Format = DRAFT202012.to_unittest_testcase( + DRAFT202012.format_tests(), + Validator=jsonschema.Draft202012Validator, + format_checker=jsonschema.draft202012_format_checker, + skip=lambda test: ( + complex_email_validation(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.draft202012_format_checker)(test) + or complex_email_validation(test) + ), +) diff --git a/vendor/jsonschema/jsonschema/tests/test_types.py b/vendor/jsonschema/jsonschema/tests/test_types.py new file mode 100644 index 00000000..3fd1a700 --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/test_types.py @@ -0,0 +1,217 @@ +""" +Tests for the `TypeChecker`-based type interface. + +The actual correctness of the type checking is handled in +`test_jsonschema_test_suite`; these tests check that TypeChecker +functions correctly at a more granular level. +""" +from collections import namedtuple +from unittest import TestCase + +from jsonschema import ValidationError, _validators +from jsonschema._types import TypeChecker +from jsonschema.exceptions import UndefinedTypeCheck, UnknownType +from jsonschema.validators import Draft202012Validator, extend + + +def equals_2(checker, instance): + return instance == 2 + + +def is_namedtuple(instance): + return isinstance(instance, tuple) and getattr(instance, "_fields", None) + + +def is_object_or_named_tuple(checker, instance): + if Draft202012Validator.TYPE_CHECKER.is_type(instance, "object"): + return True + return is_namedtuple(instance) + + +class TestTypeChecker(TestCase): + def test_is_type(self): + checker = TypeChecker({"two": equals_2}) + self.assertEqual( + ( + checker.is_type(instance=2, type="two"), + checker.is_type(instance="bar", type="two"), + ), + (True, False), + ) + + def test_is_unknown_type(self): + with self.assertRaises(UndefinedTypeCheck) as e: + TypeChecker().is_type(4, "foobar") + self.assertIn( + "'foobar' is unknown to this type checker", + str(e.exception), + ) + self.assertTrue( + e.exception.__suppress_context__, + msg="Expected the internal KeyError to be hidden.", + ) + + def test_checks_can_be_added_at_init(self): + checker = TypeChecker({"two": equals_2}) + self.assertEqual(checker, TypeChecker().redefine("two", equals_2)) + + def test_redefine_existing_type(self): + self.assertEqual( + TypeChecker().redefine("two", object()).redefine("two", equals_2), + TypeChecker().redefine("two", equals_2), + ) + + def test_remove(self): + self.assertEqual( + TypeChecker({"two": equals_2}).remove("two"), + TypeChecker(), + ) + + def test_remove_unknown_type(self): + with self.assertRaises(UndefinedTypeCheck) as context: + TypeChecker().remove("foobar") + self.assertIn("foobar", str(context.exception)) + + def test_redefine_many(self): + self.assertEqual( + TypeChecker().redefine_many({"foo": int, "bar": str}), + TypeChecker().redefine("foo", int).redefine("bar", str), + ) + + def test_remove_multiple(self): + self.assertEqual( + TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"), + TypeChecker(), + ) + + def test_type_check_can_raise_key_error(self): + """ + Make sure no one writes: + + try: + self._type_checkers[type](...) + except KeyError: + + ignoring the fact that the function itself can raise that. + """ + + error = KeyError("Stuff") + + def raises_keyerror(checker, instance): + raise error + + with self.assertRaises(KeyError) as context: + TypeChecker({"foo": raises_keyerror}).is_type(4, "foo") + + self.assertIs(context.exception, error) + + +class TestCustomTypes(TestCase): + def test_simple_type_can_be_extended(self): + def int_or_str_int(checker, instance): + if not isinstance(instance, (int, str)): + return False + try: + int(instance) + except ValueError: + return False + return True + + CustomValidator = extend( + Draft202012Validator, + type_checker=Draft202012Validator.TYPE_CHECKER.redefine( + "integer", int_or_str_int, + ), + ) + validator = CustomValidator({"type": "integer"}) + + validator.validate(4) + validator.validate("4") + + with self.assertRaises(ValidationError): + validator.validate(4.4) + + with self.assertRaises(ValidationError): + validator.validate("foo") + + def test_object_can_be_extended(self): + schema = {"type": "object"} + + Point = namedtuple("Point", ["x", "y"]) + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema) + + validator.validate(Point(x=4, y=5)) + + def test_object_extensions_require_custom_validators(self): + schema = {"type": "object", "required": ["x"]} + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema) + + Point = namedtuple("Point", ["x", "y"]) + # Cannot handle required + with self.assertRaises(ValidationError): + validator.validate(Point(x=4, y=5)) + + def test_object_extensions_can_handle_custom_validators(self): + schema = { + "type": "object", + "required": ["x"], + "properties": {"x": {"type": "integer"}}, + } + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + def coerce_named_tuple(fn): + def coerced(validator, value, instance, schema): + if is_namedtuple(instance): + instance = instance._asdict() + return fn(validator, value, instance, schema) + return coerced + + required = coerce_named_tuple(_validators.required) + properties = coerce_named_tuple(_validators.properties) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + validators={"required": required, "properties": properties}, + ) + + validator = CustomValidator(schema) + + Point = namedtuple("Point", ["x", "y"]) + # Can now process required and properties + validator.validate(Point(x=4, y=5)) + + with self.assertRaises(ValidationError): + validator.validate(Point(x="not an integer", y=5)) + + # As well as still handle objects. + validator.validate({"x": 4, "y": 5}) + + with self.assertRaises(ValidationError): + validator.validate({"x": "not an integer", "y": 5}) + + def test_unknown_type(self): + with self.assertRaises(UnknownType) as e: + Draft202012Validator({}).is_type(12, "some unknown type") + self.assertIn("'some unknown type'", str(e.exception)) diff --git a/vendor/jsonschema/jsonschema/tests/test_utils.py b/vendor/jsonschema/jsonschema/tests/test_utils.py new file mode 100644 index 00000000..4e542b96 --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/test_utils.py @@ -0,0 +1,124 @@ +from unittest import TestCase + +from jsonschema._utils import equal + + +class TestEqual(TestCase): + def test_none(self): + self.assertTrue(equal(None, None)) + + +class TestDictEqual(TestCase): + def test_equal_dictionaries(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "b"} + self.assertTrue(equal(dict_1, dict_2)) + + def test_missing_key(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "x": "b"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_additional_key(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "b", "x": "x"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_missing_value(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "x"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_empty_dictionaries(self): + dict_1 = {} + dict_2 = {} + self.assertTrue(equal(dict_1, dict_2)) + + def test_one_none(self): + dict_1 = None + dict_2 = {"a": "b", "c": "d"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_same_item(self): + dict_1 = {"a": "b", "c": "d"} + self.assertTrue(equal(dict_1, dict_1)) + + def test_nested_equal(self): + dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} + dict_2 = {"c": "d", "a": {"a": "b", "c": "d"}} + self.assertTrue(equal(dict_1, dict_2)) + + def test_nested_dict_unequal(self): + dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} + dict_2 = {"c": "d", "a": {"a": "b", "c": "x"}} + self.assertFalse(equal(dict_1, dict_2)) + + def test_mixed_nested_equal(self): + dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} + dict_2 = {"c": "d", "a": ["a", "b", "c", "d"]} + self.assertTrue(equal(dict_1, dict_2)) + + def test_nested_list_unequal(self): + dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} + dict_2 = {"c": "d", "a": ["b", "c", "d", "a"]} + self.assertFalse(equal(dict_1, dict_2)) + + +class TestListEqual(TestCase): + def test_equal_lists(self): + list_1 = ["a", "b", "c"] + list_2 = ["a", "b", "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_unsorted_lists(self): + list_1 = ["a", "b", "c"] + list_2 = ["b", "b", "a"] + self.assertFalse(equal(list_1, list_2)) + + def test_first_list_larger(self): + list_1 = ["a", "b", "c"] + list_2 = ["a", "b"] + self.assertFalse(equal(list_1, list_2)) + + def test_second_list_larger(self): + list_1 = ["a", "b"] + list_2 = ["a", "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + def test_list_with_none_unequal(self): + list_1 = ["a", "b", None] + list_2 = ["a", "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + list_1 = ["a", "b", None] + list_2 = [None, "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + def test_list_with_none_equal(self): + list_1 = ["a", None, "c"] + list_2 = ["a", None, "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_empty_list(self): + list_1 = [] + list_2 = [] + self.assertTrue(equal(list_1, list_2)) + + def test_one_none(self): + list_1 = None + list_2 = [] + self.assertFalse(equal(list_1, list_2)) + + def test_same_list(self): + list_1 = ["a", "b", "c"] + self.assertTrue(equal(list_1, list_1)) + + def test_equal_nested_lists(self): + list_1 = ["a", ["b", "c"], "d"] + list_2 = ["a", ["b", "c"], "d"] + self.assertTrue(equal(list_1, list_2)) + + def test_unequal_nested_lists(self): + list_1 = ["a", ["b", "c"], "d"] + list_2 = ["a", [], "c"] + self.assertFalse(equal(list_1, list_2)) diff --git a/vendor/jsonschema/jsonschema/tests/test_validators.py b/vendor/jsonschema/jsonschema/tests/test_validators.py new file mode 100644 index 00000000..4ebc8db7 --- /dev/null +++ b/vendor/jsonschema/jsonschema/tests/test_validators.py @@ -0,0 +1,2221 @@ +from __future__ import annotations + +from collections import deque, namedtuple +from contextlib import contextmanager +from decimal import Decimal +from io import BytesIO +from unittest import TestCase, mock +from urllib.request import pathname2url +import json +import os +import sys +import tempfile +import unittest +import warnings + +import attr + +from jsonschema import ( + FormatChecker, + TypeChecker, + exceptions, + protocols, + validators, +) +from jsonschema.tests._helpers import bug + + +def fail(validator, errors, instance, schema): + for each in errors: + each.setdefault("message", "You told me to fail!") + yield exceptions.ValidationError(**each) + + +class TestCreateAndExtend(TestCase): + def setUp(self): + self.addCleanup( + self.assertEqual, + validators._META_SCHEMAS, + dict(validators._META_SCHEMAS), + ) + + self.meta_schema = {"$id": "some://meta/schema"} + self.validators = {"fail": fail} + self.type_checker = TypeChecker() + self.Validator = validators.create( + meta_schema=self.meta_schema, + validators=self.validators, + type_checker=self.type_checker, + ) + + def test_attrs(self): + self.assertEqual( + ( + self.Validator.VALIDATORS, + self.Validator.META_SCHEMA, + self.Validator.TYPE_CHECKER, + ), ( + self.validators, + self.meta_schema, + self.type_checker, + ), + ) + + def test_init(self): + schema = {"fail": []} + self.assertEqual(self.Validator(schema).schema, schema) + + def test_iter_errors_successful(self): + schema = {"fail": []} + validator = self.Validator(schema) + + errors = list(validator.iter_errors("hello")) + self.assertEqual(errors, []) + + def test_iter_errors_one_error(self): + schema = {"fail": [{"message": "Whoops!"}]} + validator = self.Validator(schema) + + expected_error = exceptions.ValidationError( + "Whoops!", + instance="goodbye", + schema=schema, + validator="fail", + validator_value=[{"message": "Whoops!"}], + schema_path=deque(["fail"]), + ) + + errors = list(validator.iter_errors("goodbye")) + self.assertEqual(len(errors), 1) + self.assertEqual(errors[0]._contents(), expected_error._contents()) + + def test_iter_errors_multiple_errors(self): + schema = { + "fail": [ + {"message": "First"}, + {"message": "Second!", "validator": "asdf"}, + {"message": "Third"}, + ], + } + validator = self.Validator(schema) + + errors = list(validator.iter_errors("goodbye")) + self.assertEqual(len(errors), 3) + + def test_if_a_version_is_provided_it_is_registered(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.assertEqual(Validator.__name__, "MyVersionValidator") + self.assertEqual(Validator.__qualname__, "MyVersionValidator") + + def test_repr(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.assertEqual( + repr(Validator({})), + "MyVersionValidator(schema={}, format_checker=None)", + ) + + def test_long_repr(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.assertEqual( + repr(Validator({"a": list(range(1000))})), ( + "MyVersionValidator(schema={'a': [0, 1, 2, 3, 4, 5, ...]}, " + "format_checker=None)" + ), + ) + + def test_repr_no_version(self): + Validator = validators.create(meta_schema={}) + self.assertEqual( + repr(Validator({})), + "Validator(schema={}, format_checker=None)", + ) + + def test_dashes_are_stripped_from_validator_names(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="foo-bar", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.assertEqual(Validator.__qualname__, "FooBarValidator") + + def test_if_a_version_is_not_provided_it_is_not_registered(self): + original = dict(validators._META_SCHEMAS) + validators.create(meta_schema={"id": "id"}) + self.assertEqual(validators._META_SCHEMAS, original) + + def test_validates_registers_meta_schema_id(self): + meta_schema_key = "meta schema id" + my_meta_schema = {"id": meta_schema_key} + + validators.create( + meta_schema=my_meta_schema, + version="my version", + id_of=lambda s: s.get("id", ""), + ) + self.addCleanup(validators._META_SCHEMAS.pop, meta_schema_key) + + self.assertIn(meta_schema_key, validators._META_SCHEMAS) + + def test_validates_registers_meta_schema_draft6_id(self): + meta_schema_key = "meta schema $id" + my_meta_schema = {"$id": meta_schema_key} + + validators.create( + meta_schema=my_meta_schema, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, meta_schema_key) + + self.assertIn(meta_schema_key, validators._META_SCHEMAS) + + def test_create_default_types(self): + Validator = validators.create(meta_schema={}, validators=()) + self.assertTrue( + all( + Validator({}).is_type(instance=instance, type=type) + for type, instance in [ + ("array", []), + ("boolean", True), + ("integer", 12), + ("null", None), + ("number", 12.0), + ("object", {}), + ("string", "foo"), + ] + ), + ) + + def test_extend(self): + original = dict(self.Validator.VALIDATORS) + new = object() + + Extended = validators.extend( + self.Validator, + validators={"new": new}, + ) + self.assertEqual( + ( + Extended.VALIDATORS, + Extended.META_SCHEMA, + Extended.TYPE_CHECKER, + self.Validator.VALIDATORS, + ), ( + dict(original, new=new), + self.Validator.META_SCHEMA, + self.Validator.TYPE_CHECKER, + original, + ), + ) + + def test_extend_idof(self): + """ + Extending a validator preserves its notion of schema IDs. + """ + def id_of(schema): + return schema.get("__test__", self.Validator.ID_OF(schema)) + correct_id = "the://correct/id/" + meta_schema = { + "$id": "the://wrong/id/", + "__test__": correct_id, + } + Original = validators.create( + meta_schema=meta_schema, + validators=self.validators, + type_checker=self.type_checker, + id_of=id_of, + ) + self.assertEqual(Original.ID_OF(Original.META_SCHEMA), correct_id) + + Derived = validators.extend(Original) + self.assertEqual(Derived.ID_OF(Derived.META_SCHEMA), correct_id) + + +class TestValidationErrorMessages(TestCase): + def message_for(self, instance, schema, *args, **kwargs): + cls = kwargs.pop("cls", validators._LATEST_VERSION) + cls.check_schema(schema) + validator = cls(schema, *args, **kwargs) + errors = list(validator.iter_errors(instance)) + self.assertTrue(errors, msg=f"No errors were raised for {instance!r}") + self.assertEqual( + len(errors), + 1, + msg=f"Expected exactly one error, found {errors!r}", + ) + return errors[0].message + + def test_single_type_failure(self): + message = self.message_for(instance=1, schema={"type": "string"}) + self.assertEqual(message, "1 is not of type 'string'") + + def test_single_type_list_failure(self): + message = self.message_for(instance=1, schema={"type": ["string"]}) + self.assertEqual(message, "1 is not of type 'string'") + + def test_multiple_type_failure(self): + types = "string", "object" + message = self.message_for(instance=1, schema={"type": list(types)}) + self.assertEqual(message, "1 is not of type 'string', 'object'") + + def test_object_with_named_type_failure(self): + schema = {"type": [{"name": "Foo", "minimum": 3}]} + message = self.message_for( + instance=1, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "1 is not of type 'Foo'") + + def test_minimum(self): + message = self.message_for(instance=1, schema={"minimum": 2}) + self.assertEqual(message, "1 is less than the minimum of 2") + + def test_maximum(self): + message = self.message_for(instance=1, schema={"maximum": 0}) + self.assertEqual(message, "1 is greater than the maximum of 0") + + def test_dependencies_single_element(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: on}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_object_without_title_type_failure_draft3(self): + type = {"type": [{"minimum": 3}]} + message = self.message_for( + instance=1, + schema={"type": [type]}, + cls=validators.Draft3Validator, + ) + self.assertEqual( + message, + "1 is not of type {'type': [{'minimum': 3}]}", + ) + + def test_dependencies_list_draft3(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: [on]}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_dependencies_list_draft7(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: [on]}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft7Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_additionalItems_single_failure(self): + message = self.message_for( + instance=[2], + schema={"items": [], "additionalItems": False}, + cls=validators.Draft3Validator, + ) + self.assertIn("(2 was unexpected)", message) + + def test_additionalItems_multiple_failures(self): + message = self.message_for( + instance=[1, 2, 3], + schema={"items": [], "additionalItems": False}, + cls=validators.Draft3Validator, + ) + self.assertIn("(1, 2, 3 were unexpected)", message) + + def test_additionalProperties_single_failure(self): + additional = "foo" + schema = {"additionalProperties": False} + message = self.message_for(instance={additional: 2}, schema=schema) + self.assertIn("('foo' was unexpected)", message) + + def test_additionalProperties_multiple_failures(self): + schema = {"additionalProperties": False} + message = self.message_for( + instance=dict.fromkeys(["foo", "bar"]), + schema=schema, + ) + + self.assertIn(repr("foo"), message) + self.assertIn(repr("bar"), message) + self.assertIn("were unexpected)", message) + + def test_const(self): + schema = {"const": 12} + message = self.message_for( + instance={"foo": "bar"}, + schema=schema, + ) + self.assertIn("12 was expected", message) + + def test_contains_draft_6(self): + schema = {"contains": {"const": 12}} + message = self.message_for( + instance=[2, {}, []], + schema=schema, + cls=validators.Draft6Validator, + ) + self.assertEqual( + message, + "None of [2, {}, []] are valid under the given schema", + ) + + def test_invalid_format_default_message(self): + checker = FormatChecker(formats=()) + checker.checks("thing")(lambda value: False) + + schema = {"format": "thing"} + message = self.message_for( + instance="bla", + schema=schema, + format_checker=checker, + ) + + self.assertIn(repr("bla"), message) + self.assertIn(repr("thing"), message) + self.assertIn("is not a", message) + + def test_additionalProperties_false_patternProperties(self): + schema = {"type": "object", + "additionalProperties": False, + "patternProperties": { + "^abc$": {"type": "string"}, + "^def$": {"type": "string"}, + }} + message = self.message_for( + instance={"zebra": 123}, + schema=schema, + cls=validators.Draft4Validator, + ) + self.assertEqual( + message, + "{} does not match any of the regexes: {}, {}".format( + repr("zebra"), repr("^abc$"), repr("^def$"), + ), + ) + message = self.message_for( + instance={"zebra": 123, "fish": 456}, + schema=schema, + cls=validators.Draft4Validator, + ) + self.assertEqual( + message, + "{}, {} do not match any of the regexes: {}, {}".format( + repr("fish"), repr("zebra"), repr("^abc$"), repr("^def$"), + ), + ) + + def test_False_schema(self): + message = self.message_for( + instance="something", + schema=False, + ) + self.assertEqual(message, "False schema does not allow 'something'") + + def test_multipleOf(self): + message = self.message_for( + instance=3, + schema={"multipleOf": 2}, + ) + self.assertEqual(message, "3 is not a multiple of 2") + + def test_minItems(self): + message = self.message_for(instance=[], schema={"minItems": 2}) + self.assertEqual(message, "[] is too short") + + def test_maxItems(self): + message = self.message_for(instance=[1, 2, 3], schema={"maxItems": 2}) + self.assertEqual(message, "[1, 2, 3] is too long") + + def test_prefixItems_with_items(self): + message = self.message_for( + instance=[1, 2, "foo", 5], + schema={"items": False, "prefixItems": [{}, {}]}, + ) + self.assertEqual(message, "Expected at most 2 items, but found 4") + + def test_minLength(self): + message = self.message_for( + instance="", + schema={"minLength": 2}, + ) + self.assertEqual(message, "'' is too short") + + def test_maxLength(self): + message = self.message_for( + instance="abc", + schema={"maxLength": 2}, + ) + self.assertEqual(message, "'abc' is too long") + + def test_pattern(self): + message = self.message_for( + instance="bbb", + schema={"pattern": "^a*$"}, + ) + self.assertEqual(message, "'bbb' does not match '^a*$'") + + def test_does_not_contain(self): + message = self.message_for( + instance=[], + schema={"contains": {"type": "string"}}, + ) + self.assertEqual( + message, + "[] does not contain items matching the given schema", + ) + + def test_contains_too_few(self): + message = self.message_for( + instance=["foo", 1], + schema={"contains": {"type": "string"}, "minContains": 2}, + ) + self.assertEqual( + message, + "Too few items match the given schema " + "(expected at least 2 but only 1 matched)", + ) + + def test_contains_too_few_both_constrained(self): + message = self.message_for( + instance=["foo", 1], + schema={ + "contains": {"type": "string"}, + "minContains": 2, + "maxContains": 4, + }, + ) + self.assertEqual( + message, + "Too few items match the given schema (expected at least 2 but " + "only 1 matched)", + ) + + def test_contains_too_many(self): + message = self.message_for( + instance=["foo", "bar", "baz"], + schema={"contains": {"type": "string"}, "maxContains": 2}, + ) + self.assertEqual( + message, + "Too many items match the given schema (expected at most 2)", + ) + + def test_contains_too_many_both_constrained(self): + message = self.message_for( + instance=["foo"] * 5, + schema={ + "contains": {"type": "string"}, + "minContains": 2, + "maxContains": 4, + }, + ) + self.assertEqual( + message, + "Too many items match the given schema (expected at most 4)", + ) + + def test_exclusiveMinimum(self): + message = self.message_for( + instance=3, + schema={"exclusiveMinimum": 5}, + ) + self.assertEqual( + message, + "3 is less than or equal to the minimum of 5", + ) + + def test_exclusiveMaximum(self): + message = self.message_for(instance=3, schema={"exclusiveMaximum": 2}) + self.assertEqual( + message, + "3 is greater than or equal to the maximum of 2", + ) + + def test_required(self): + message = self.message_for(instance={}, schema={"required": ["foo"]}) + self.assertEqual(message, "'foo' is a required property") + + def test_dependentRequired(self): + message = self.message_for( + instance={"foo": {}}, + schema={"dependentRequired": {"foo": ["bar"]}}, + ) + self.assertEqual(message, "'bar' is a dependency of 'foo'") + + def test_minProperties(self): + message = self.message_for(instance={}, schema={"minProperties": 2}) + self.assertEqual(message, "{} does not have enough properties") + + def test_maxProperties(self): + message = self.message_for( + instance={"a": {}, "b": {}, "c": {}}, + schema={"maxProperties": 2}, + ) + self.assertEqual( + message, + "{'a': {}, 'b': {}, 'c': {}} has too many properties", + ) + + def test_oneOf_matches_none(self): + message = self.message_for(instance={}, schema={"oneOf": [False]}) + self.assertEqual( + message, + "{} is not valid under any of the given schemas", + ) + + def test_oneOf_matches_too_many(self): + message = self.message_for(instance={}, schema={"oneOf": [True, True]}) + self.assertEqual(message, "{} is valid under each of True, True") + + def test_unevaluated_items(self): + schema = {"type": "array", "unevaluatedItems": False} + message = self.message_for(instance=["foo", "bar"], schema=schema) + self.assertIn( + message, + "Unevaluated items are not allowed ('bar', 'foo' were unexpected)", + ) + + def test_unevaluated_items_on_invalid_type(self): + schema = {"type": "array", "unevaluatedItems": False} + message = self.message_for(instance="foo", schema=schema) + self.assertEqual(message, "'foo' is not of type 'array'") + + def test_unevaluated_properties(self): + schema = {"type": "object", "unevaluatedProperties": False} + message = self.message_for( + instance={ + "foo": "foo", + "bar": "bar", + }, + schema=schema, + ) + self.assertEqual( + message, + "Unevaluated properties are not allowed " + "('bar', 'foo' were unexpected)", + ) + + def test_unevaluated_properties_on_invalid_type(self): + schema = {"type": "object", "unevaluatedProperties": False} + message = self.message_for(instance="foo", schema=schema) + self.assertEqual(message, "'foo' is not of type 'object'") + + +class TestValidationErrorDetails(TestCase): + # TODO: These really need unit tests for each individual keyword, rather + # than just these higher level tests. + def test_anyOf(self): + instance = 5 + schema = { + "anyOf": [ + {"minimum": 20}, + {"type": "string"}, + ], + } + + validator = validators.Draft4Validator(schema) + errors = list(validator.iter_errors(instance)) + self.assertEqual(len(errors), 1) + e = errors[0] + + self.assertEqual(e.validator, "anyOf") + self.assertEqual(e.validator_value, schema["anyOf"]) + self.assertEqual(e.instance, instance) + self.assertEqual(e.schema, schema) + self.assertIsNone(e.parent) + + self.assertEqual(e.path, deque([])) + self.assertEqual(e.relative_path, deque([])) + self.assertEqual(e.absolute_path, deque([])) + self.assertEqual(e.json_path, "$") + + self.assertEqual(e.schema_path, deque(["anyOf"])) + self.assertEqual(e.relative_schema_path, deque(["anyOf"])) + self.assertEqual(e.absolute_schema_path, deque(["anyOf"])) + + self.assertEqual(len(e.context), 2) + + e1, e2 = sorted_errors(e.context) + + self.assertEqual(e1.validator, "minimum") + self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"]) + self.assertEqual(e1.instance, instance) + self.assertEqual(e1.schema, schema["anyOf"][0]) + self.assertIs(e1.parent, e) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e1.absolute_path, deque([])) + self.assertEqual(e1.relative_path, deque([])) + self.assertEqual(e1.json_path, "$") + + self.assertEqual(e1.schema_path, deque([0, "minimum"])) + self.assertEqual(e1.relative_schema_path, deque([0, "minimum"])) + self.assertEqual( + e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]), + ) + + self.assertFalse(e1.context) + + self.assertEqual(e2.validator, "type") + self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"]) + self.assertEqual(e2.instance, instance) + self.assertEqual(e2.schema, schema["anyOf"][1]) + self.assertIs(e2.parent, e) + + self.assertEqual(e2.path, deque([])) + self.assertEqual(e2.relative_path, deque([])) + self.assertEqual(e2.absolute_path, deque([])) + self.assertEqual(e2.json_path, "$") + + self.assertEqual(e2.schema_path, deque([1, "type"])) + self.assertEqual(e2.relative_schema_path, deque([1, "type"])) + self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"])) + + self.assertEqual(len(e2.context), 0) + + def test_type(self): + instance = {"foo": 1} + schema = { + "type": [ + {"type": "integer"}, + { + "type": "object", + "properties": {"foo": {"enum": [2]}}, + }, + ], + } + + validator = validators.Draft3Validator(schema) + errors = list(validator.iter_errors(instance)) + self.assertEqual(len(errors), 1) + e = errors[0] + + self.assertEqual(e.validator, "type") + self.assertEqual(e.validator_value, schema["type"]) + self.assertEqual(e.instance, instance) + self.assertEqual(e.schema, schema) + self.assertIsNone(e.parent) + + self.assertEqual(e.path, deque([])) + self.assertEqual(e.relative_path, deque([])) + self.assertEqual(e.absolute_path, deque([])) + self.assertEqual(e.json_path, "$") + + self.assertEqual(e.schema_path, deque(["type"])) + self.assertEqual(e.relative_schema_path, deque(["type"])) + self.assertEqual(e.absolute_schema_path, deque(["type"])) + + self.assertEqual(len(e.context), 2) + + e1, e2 = sorted_errors(e.context) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e1.validator_value, schema["type"][0]["type"]) + self.assertEqual(e1.instance, instance) + self.assertEqual(e1.schema, schema["type"][0]) + self.assertIs(e1.parent, e) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e1.relative_path, deque([])) + self.assertEqual(e1.absolute_path, deque([])) + self.assertEqual(e1.json_path, "$") + + self.assertEqual(e1.schema_path, deque([0, "type"])) + self.assertEqual(e1.relative_schema_path, deque([0, "type"])) + self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"])) + + self.assertFalse(e1.context) + + self.assertEqual(e2.validator, "enum") + self.assertEqual(e2.validator_value, [2]) + self.assertEqual(e2.instance, 1) + self.assertEqual(e2.schema, {"enum": [2]}) + self.assertIs(e2.parent, e) + + self.assertEqual(e2.path, deque(["foo"])) + self.assertEqual(e2.relative_path, deque(["foo"])) + self.assertEqual(e2.absolute_path, deque(["foo"])) + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual( + e2.schema_path, deque([1, "properties", "foo", "enum"]), + ) + self.assertEqual( + e2.relative_schema_path, deque([1, "properties", "foo", "enum"]), + ) + self.assertEqual( + e2.absolute_schema_path, + deque(["type", 1, "properties", "foo", "enum"]), + ) + + self.assertFalse(e2.context) + + def test_single_nesting(self): + instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"} + schema = { + "properties": { + "foo": {"type": "string"}, + "bar": {"minItems": 2}, + "baz": {"maximum": 10, "enum": [2, 4, 6, 8]}, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2, e3, e4 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["baz"])) + self.assertEqual(e3.path, deque(["baz"])) + self.assertEqual(e4.path, deque(["foo"])) + + self.assertEqual(e1.relative_path, deque(["bar"])) + self.assertEqual(e2.relative_path, deque(["baz"])) + self.assertEqual(e3.relative_path, deque(["baz"])) + self.assertEqual(e4.relative_path, deque(["foo"])) + + self.assertEqual(e1.absolute_path, deque(["bar"])) + self.assertEqual(e2.absolute_path, deque(["baz"])) + self.assertEqual(e3.absolute_path, deque(["baz"])) + self.assertEqual(e4.absolute_path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.baz") + self.assertEqual(e3.json_path, "$.baz") + self.assertEqual(e4.json_path, "$.foo") + + self.assertEqual(e1.validator, "minItems") + self.assertEqual(e2.validator, "enum") + self.assertEqual(e3.validator, "maximum") + self.assertEqual(e4.validator, "type") + + def test_multiple_nesting(self): + instance = [1, {"foo": 2, "bar": {"baz": [1]}}, "quux"] + schema = { + "type": "string", + "items": { + "type": ["string", "object"], + "properties": { + "foo": {"enum": [1, 3]}, + "bar": { + "type": "array", + "properties": { + "bar": {"required": True}, + "baz": {"minItems": 2}, + }, + }, + }, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2, e3, e4, e5, e6 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e2.path, deque([0])) + self.assertEqual(e3.path, deque([1, "bar"])) + self.assertEqual(e4.path, deque([1, "bar", "bar"])) + self.assertEqual(e5.path, deque([1, "bar", "baz"])) + self.assertEqual(e6.path, deque([1, "foo"])) + + self.assertEqual(e1.json_path, "$") + self.assertEqual(e2.json_path, "$[0]") + self.assertEqual(e3.json_path, "$[1].bar") + self.assertEqual(e4.json_path, "$[1].bar.bar") + self.assertEqual(e5.json_path, "$[1].bar.baz") + self.assertEqual(e6.json_path, "$[1].foo") + + self.assertEqual(e1.schema_path, deque(["type"])) + self.assertEqual(e2.schema_path, deque(["items", "type"])) + self.assertEqual( + list(e3.schema_path), ["items", "properties", "bar", "type"], + ) + self.assertEqual( + list(e4.schema_path), + ["items", "properties", "bar", "properties", "bar", "required"], + ) + self.assertEqual( + list(e5.schema_path), + ["items", "properties", "bar", "properties", "baz", "minItems"], + ) + self.assertEqual( + list(e6.schema_path), ["items", "properties", "foo", "enum"], + ) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "type") + self.assertEqual(e3.validator, "type") + self.assertEqual(e4.validator, "required") + self.assertEqual(e5.validator, "minItems") + self.assertEqual(e6.validator, "enum") + + def test_recursive(self): + schema = { + "definitions": { + "node": { + "anyOf": [{ + "type": "object", + "required": ["name", "children"], + "properties": { + "name": { + "type": "string", + }, + "children": { + "type": "object", + "patternProperties": { + "^.*$": { + "$ref": "#/definitions/node", + }, + }, + }, + }, + }], + }, + }, + "type": "object", + "required": ["root"], + "properties": {"root": {"$ref": "#/definitions/node"}}, + } + + instance = { + "root": { + "name": "root", + "children": { + "a": { + "name": "a", + "children": { + "ab": { + "name": "ab", + # missing "children" + }, + }, + }, + }, + }, + } + validator = validators.Draft4Validator(schema) + + e, = validator.iter_errors(instance) + self.assertEqual(e.absolute_path, deque(["root"])) + self.assertEqual( + e.absolute_schema_path, deque(["properties", "root", "anyOf"]), + ) + self.assertEqual(e.json_path, "$.root") + + e1, = e.context + self.assertEqual(e1.absolute_path, deque(["root", "children", "a"])) + self.assertEqual( + e1.absolute_schema_path, deque( + [ + "properties", + "root", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + ], + ), + ) + self.assertEqual(e1.json_path, "$.root.children.a") + + e2, = e1.context + self.assertEqual( + e2.absolute_path, deque( + ["root", "children", "a", "children", "ab"], + ), + ) + self.assertEqual( + e2.absolute_schema_path, deque( + [ + "properties", + "root", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + ], + ), + ) + self.assertEqual(e2.json_path, "$.root.children.a.children.ab") + + def test_additionalProperties(self): + instance = {"bar": "bar", "foo": 2} + schema = {"additionalProperties": {"type": "integer", "minimum": 5}} + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_patternProperties(self): + instance = {"bar": 1, "foo": 2} + schema = { + "patternProperties": { + "bar": {"type": "string"}, + "foo": {"minimum": 5}, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_additionalItems(self): + instance = ["foo", 1] + schema = { + "items": [], + "additionalItems": {"type": "integer", "minimum": 5}, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([0])) + self.assertEqual(e2.path, deque([1])) + + self.assertEqual(e1.json_path, "$[0]") + self.assertEqual(e2.json_path, "$[1]") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_additionalItems_with_items(self): + instance = ["foo", "bar", 1] + schema = { + "items": [{}], + "additionalItems": {"type": "integer", "minimum": 5}, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([1])) + self.assertEqual(e2.path, deque([2])) + + self.assertEqual(e1.json_path, "$[1]") + self.assertEqual(e2.json_path, "$[2]") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_propertyNames(self): + instance = {"foo": 12} + schema = {"propertyNames": {"not": {"const": "foo"}}} + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(instance) + + self.assertEqual(error.validator, "not") + self.assertEqual( + error.message, + "'foo' should not be valid under {'const': 'foo'}", + ) + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["propertyNames", "not"])) + + def test_if_then(self): + schema = { + "if": {"const": 12}, + "then": {"const": 13}, + } + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(12) + + self.assertEqual(error.validator, "const") + self.assertEqual(error.message, "13 was expected") + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["then", "const"])) + + def test_if_else(self): + schema = { + "if": {"const": 12}, + "else": {"const": 13}, + } + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(15) + + self.assertEqual(error.validator, "const") + self.assertEqual(error.message, "13 was expected") + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["else", "const"])) + + def test_boolean_schema_False(self): + validator = validators.Draft7Validator(False) + error, = validator.iter_errors(12) + + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "False schema does not allow 12", + None, + None, + 12, + False, + deque([]), + "$", + ), + ) + + def test_ref(self): + ref, schema = "someRef", {"additionalProperties": {"type": "integer"}} + validator = validators.Draft7Validator( + {"$ref": ref}, + resolver=validators.RefResolver("", {}, store={ref: schema}), + ) + error, = validator.iter_errors({"foo": "notAnInteger"}) + + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "'notAnInteger' is not of type 'integer'", + "type", + "integer", + "notAnInteger", + deque(["foo"]), + {"type": "integer"}, + deque(["additionalProperties", "type"]), + "$.foo", + ), + ) + + def test_prefixItems(self): + schema = {"prefixItems": [{"type": "string"}, {}, {}, {"maximum": 3}]} + validator = validators.Draft202012Validator(schema) + type_error, min_error = validator.iter_errors([1, 2, "foo", 5]) + self.assertEqual( + ( + type_error.message, + type_error.validator, + type_error.validator_value, + type_error.instance, + type_error.absolute_path, + type_error.schema, + type_error.schema_path, + type_error.json_path, + ), + ( + "1 is not of type 'string'", + "type", + "string", + 1, + deque([0]), + {"type": "string"}, + deque(["prefixItems", 0, "type"]), + "$[0]", + ), + ) + self.assertEqual( + ( + min_error.message, + min_error.validator, + min_error.validator_value, + min_error.instance, + min_error.absolute_path, + min_error.schema, + min_error.schema_path, + min_error.json_path, + ), + ( + "5 is greater than the maximum of 3", + "maximum", + 3, + 5, + deque([3]), + {"maximum": 3}, + deque(["prefixItems", 3, "maximum"]), + "$[3]", + ), + ) + + def test_prefixItems_with_items(self): + schema = { + "items": {"type": "string"}, + "prefixItems": [{}], + } + validator = validators.Draft202012Validator(schema) + e1, e2 = validator.iter_errors(["foo", 2, "bar", 4, "baz"]) + self.assertEqual( + ( + e1.message, + e1.validator, + e1.validator_value, + e1.instance, + e1.absolute_path, + e1.schema, + e1.schema_path, + e1.json_path, + ), + ( + "2 is not of type 'string'", + "type", + "string", + 2, + deque([1]), + {"type": "string"}, + deque(["items", "type"]), + "$[1]", + ), + ) + self.assertEqual( + ( + e2.message, + e2.validator, + e2.validator_value, + e2.instance, + e2.absolute_path, + e2.schema, + e2.schema_path, + e2.json_path, + ), + ( + "4 is not of type 'string'", + "type", + "string", + 4, + deque([3]), + {"type": "string"}, + deque(["items", "type"]), + "$[3]", + ), + ) + + def test_contains_too_many(self): + """ + `contains` + `maxContains` produces only one error, even if there are + many more incorrectly matching elements. + """ + schema = {"contains": {"type": "string"}, "maxContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors(["foo", 2, "bar", 4, "baz", "quux"]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "Too many items match the given schema (expected at most 2)", + "maxContains", + 2, + ["foo", 2, "bar", 4, "baz", "quux"], + deque([]), + {"contains": {"type": "string"}, "maxContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_contains_too_few(self): + schema = {"contains": {"type": "string"}, "minContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors(["foo", 2, 4]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + ( + "Too few items match the given schema " + "(expected at least 2 but only 1 matched)" + ), + "minContains", + 2, + ["foo", 2, 4], + deque([]), + {"contains": {"type": "string"}, "minContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_contains_none(self): + schema = {"contains": {"type": "string"}, "minContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors([2, 4]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "[2, 4] does not contain items matching the given schema", + "contains", + {"type": "string"}, + [2, 4], + deque([]), + {"contains": {"type": "string"}, "minContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_ref_sibling(self): + schema = { + "$defs": {"foo": {"required": ["bar"]}}, + "properties": { + "aprop": { + "$ref": "#/$defs/foo", + "required": ["baz"], + }, + }, + } + + validator = validators.Draft202012Validator(schema) + e1, e2 = validator.iter_errors({"aprop": {}}) + self.assertEqual( + ( + e1.message, + e1.validator, + e1.validator_value, + e1.instance, + e1.absolute_path, + e1.schema, + e1.schema_path, + e1.relative_schema_path, + e1.json_path, + ), + ( + "'bar' is a required property", + "required", + ["bar"], + {}, + deque(["aprop"]), + {"required": ["bar"]}, + deque(["properties", "aprop", "required"]), + deque(["properties", "aprop", "required"]), + "$.aprop", + ), + ) + self.assertEqual( + ( + e2.message, + e2.validator, + e2.validator_value, + e2.instance, + e2.absolute_path, + e2.schema, + e2.schema_path, + e2.relative_schema_path, + e2.json_path, + ), + ( + "'baz' is a required property", + "required", + ["baz"], + {}, + deque(["aprop"]), + {"$ref": "#/$defs/foo", "required": ["baz"]}, + deque(["properties", "aprop", "required"]), + deque(["properties", "aprop", "required"]), + "$.aprop", + ), + ) + + +class MetaSchemaTestsMixin(object): + # TODO: These all belong upstream + def test_invalid_properties(self): + with self.assertRaises(exceptions.SchemaError): + self.Validator.check_schema({"properties": 12}) + + def test_minItems_invalid_string(self): + with self.assertRaises(exceptions.SchemaError): + # needs to be an integer + self.Validator.check_schema({"minItems": "1"}) + + def test_enum_allows_empty_arrays(self): + """ + Technically, all the spec says is they SHOULD have elements, not MUST. + + See #529. + """ + self.Validator.check_schema({"enum": []}) + + def test_enum_allows_non_unique_items(self): + """ + Technically, all the spec says is they SHOULD be unique, not MUST. + + See #529. + """ + self.Validator.check_schema({"enum": [12, 12]}) + + +class ValidatorTestMixin(MetaSchemaTestsMixin, object): + def test_it_implements_the_validator_protocol(self): + self.assertIsInstance(self.Validator({}), protocols.Validator) + + def test_valid_instances_are_valid(self): + schema, instance = self.valid + self.assertTrue(self.Validator(schema).is_valid(instance)) + + def test_invalid_instances_are_not_valid(self): + schema, instance = self.invalid + self.assertFalse(self.Validator(schema).is_valid(instance)) + + def test_non_existent_properties_are_ignored(self): + self.Validator({object(): object()}).validate(instance=object()) + + def test_it_creates_a_ref_resolver_if_not_provided(self): + self.assertIsInstance( + self.Validator({}).resolver, + validators.RefResolver, + ) + + def test_it_delegates_to_a_ref_resolver(self): + ref, schema = "someCoolRef", {"type": "integer"} + resolver = validators.RefResolver("", {}, store={ref: schema}) + validator = self.Validator({"$ref": ref}, resolver=resolver) + + with self.assertRaises(exceptions.ValidationError): + validator.validate(None) + + def test_evolve(self): + ref, schema = "someCoolRef", {"type": "integer"} + resolver = validators.RefResolver("", {}, store={ref: schema}) + + validator = self.Validator(schema, resolver=resolver) + new = validator.evolve(schema={"type": "string"}) + + expected = self.Validator({"type": "string"}, resolver=resolver) + + self.assertEqual(new, expected) + self.assertNotEqual(new, validator) + + def test_evolve_with_subclass(self): + """ + Subclassing validators isn't supported public API, but some users have + done it, because we don't actually error entirely when it's done :/ + + We need to deprecate doing so first to help as many of these users + ensure they can move to supported APIs, but this test ensures that in + the interim, we haven't broken those users. + """ + + @attr.s + class OhNo(self.Validator): + foo = attr.ib(factory=lambda: [1, 2, 3]) + _bar = attr.ib(default=37) + + validator = OhNo({}, bar=12) + self.assertEqual(validator.foo, [1, 2, 3]) + + new = validator.evolve(schema={"type": "integer"}) + self.assertEqual(new.foo, [1, 2, 3]) + self.assertEqual(new._bar, 12) + + def test_it_delegates_to_a_legacy_ref_resolver(self): + """ + Legacy RefResolvers support only the context manager form of + resolution. + """ + + class LegacyRefResolver(object): + @contextmanager + def resolving(this, ref): + self.assertEqual(ref, "the ref") + yield {"type": "integer"} + + resolver = LegacyRefResolver() + schema = {"$ref": "the ref"} + + with self.assertRaises(exceptions.ValidationError): + self.Validator(schema, resolver=resolver).validate(None) + + def test_is_type_is_true_for_valid_type(self): + self.assertTrue(self.Validator({}).is_type("foo", "string")) + + def test_is_type_is_false_for_invalid_type(self): + self.assertFalse(self.Validator({}).is_type("foo", "array")) + + def test_is_type_evades_bool_inheriting_from_int(self): + self.assertFalse(self.Validator({}).is_type(True, "integer")) + self.assertFalse(self.Validator({}).is_type(True, "number")) + + def test_it_can_validate_with_decimals(self): + schema = {"items": {"type": "number"}} + Validator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "number", + lambda checker, thing: isinstance( + thing, (int, float, Decimal), + ) and not isinstance(thing, bool), + ), + ) + + validator = Validator(schema) + validator.validate([1, 1.1, Decimal(1) / Decimal(8)]) + + invalid = ["foo", {}, [], True, None] + self.assertEqual( + [error.instance for error in validator.iter_errors(invalid)], + invalid, + ) + + def test_it_returns_true_for_formats_it_does_not_know_about(self): + validator = self.Validator( + {"format": "carrot"}, format_checker=FormatChecker(), + ) + validator.validate("bugs") + + def test_it_does_not_validate_formats_by_default(self): + validator = self.Validator({}) + self.assertIsNone(validator.format_checker) + + def test_it_validates_formats_if_a_checker_is_provided(self): + checker = FormatChecker() + bad = ValueError("Bad!") + + @checker.checks("foo", raises=ValueError) + def check(value): + if value == "good": + return True + elif value == "bad": + raise bad + else: # pragma: no cover + self.fail("What is {}? [Baby Don't Hurt Me]".format(value)) + + validator = self.Validator( + {"format": "foo"}, format_checker=checker, + ) + + validator.validate("good") + with self.assertRaises(exceptions.ValidationError) as cm: + validator.validate("bad") + + # Make sure original cause is attached + self.assertIs(cm.exception.cause, bad) + + def test_non_string_custom_type(self): + non_string_type = object() + schema = {"type": [non_string_type]} + Crazy = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + non_string_type, + lambda checker, thing: isinstance(thing, int), + ), + ) + Crazy(schema).validate(15) + + def test_it_properly_formats_tuples_in_errors(self): + """ + A tuple instance properly formats validation errors for uniqueItems. + + See #224 + """ + TupleValidator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "array", + lambda checker, thing: isinstance(thing, tuple), + ), + ) + with self.assertRaises(exceptions.ValidationError) as e: + TupleValidator({"uniqueItems": True}).validate((1, 1)) + self.assertIn("(1, 1) has non-unique elements", str(e.exception)) + + def test_check_redefined_sequence(self): + """ + Allow array to validate against another defined sequence type + """ + schema = {"type": "array", "uniqueItems": True} + MyMapping = namedtuple("MyMapping", "a, b") + Validator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine_many( + { + "array": lambda checker, thing: isinstance( + thing, (list, deque), + ), + "object": lambda checker, thing: isinstance( + thing, (dict, MyMapping), + ), + }, + ), + ) + validator = Validator(schema) + + valid_instances = [ + deque(["a", None, "1", "", True]), + deque([[False], [0]]), + [deque([False]), deque([0])], + [[deque([False])], [deque([0])]], + [[[[[deque([False])]]]], [[[[deque([0])]]]]], + [deque([deque([False])]), deque([deque([0])])], + [MyMapping("a", 0), MyMapping("a", False)], + [ + MyMapping("a", [deque([0])]), + MyMapping("a", [deque([False])]), + ], + [ + MyMapping("a", [MyMapping("a", deque([0]))]), + MyMapping("a", [MyMapping("a", deque([False]))]), + ], + [deque(deque(deque([False]))), deque(deque(deque([0])))], + ] + + for instance in valid_instances: + validator.validate(instance) + + invalid_instances = [ + deque(["a", "b", "a"]), + deque([[False], [False]]), + [deque([False]), deque([False])], + [[deque([False])], [deque([False])]], + [[[[[deque([False])]]]], [[[[deque([False])]]]]], + [deque([deque([False])]), deque([deque([False])])], + [MyMapping("a", False), MyMapping("a", False)], + [ + MyMapping("a", [deque([False])]), + MyMapping("a", [deque([False])]), + ], + [ + MyMapping("a", [MyMapping("a", deque([False]))]), + MyMapping("a", [MyMapping("a", deque([False]))]), + ], + [deque(deque(deque([False]))), deque(deque(deque([False])))], + ] + + for instance in invalid_instances: + with self.assertRaises(exceptions.ValidationError): + validator.validate(instance) + + +class AntiDraft6LeakMixin(object): + """ + Make sure functionality from draft 6 doesn't leak backwards in time. + """ + + def test_True_is_not_a_schema(self): + with self.assertRaises(exceptions.SchemaError) as e: + self.Validator.check_schema(True) + self.assertIn("True is not of type", str(e.exception)) + + def test_False_is_not_a_schema(self): + with self.assertRaises(exceptions.SchemaError) as e: + self.Validator.check_schema(False) + self.assertIn("False is not of type", str(e.exception)) + + @unittest.skip(bug(523)) + def test_True_is_not_a_schema_even_if_you_forget_to_check(self): + resolver = validators.RefResolver("", {}) + with self.assertRaises(Exception) as e: + self.Validator(True, resolver=resolver).validate(12) + self.assertNotIsInstance(e.exception, exceptions.ValidationError) + + @unittest.skip(bug(523)) + def test_False_is_not_a_schema_even_if_you_forget_to_check(self): + resolver = validators.RefResolver("", {}) + with self.assertRaises(Exception) as e: + self.Validator(False, resolver=resolver).validate(12) + self.assertNotIsInstance(e.exception, exceptions.ValidationError) + + +class TestDraft3Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase): + Validator = validators.Draft3Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + def test_any_type_is_valid_for_type_any(self): + validator = self.Validator({"type": "any"}) + validator.validate(object()) + + def test_any_type_is_redefinable(self): + """ + Sigh, because why not. + """ + Crazy = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "any", lambda checker, thing: isinstance(thing, int), + ), + ) + validator = Crazy({"type": "any"}) + validator.validate(12) + with self.assertRaises(exceptions.ValidationError): + validator.validate("foo") + + def test_is_type_is_true_for_any_type(self): + self.assertTrue(self.Validator({"type": "any"}).is_valid(object())) + + def test_is_type_does_not_evade_bool_if_it_is_being_tested(self): + self.assertTrue(self.Validator({}).is_type(True, "boolean")) + self.assertTrue(self.Validator({"type": "any"}).is_valid(True)) + + +class TestDraft4Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase): + Validator = validators.Draft4Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft6Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft6Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft7Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft7Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft201909Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft201909Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft202012Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft202012Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestValidatorFor(TestCase): + def test_draft_3(self): + schema = {"$schema": "http://json-schema.org/draft-03/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft3Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-03/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft3Validator, + ) + + def test_draft_4(self): + schema = {"$schema": "http://json-schema.org/draft-04/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft4Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-04/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft4Validator, + ) + + def test_draft_6(self): + schema = {"$schema": "http://json-schema.org/draft-06/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft6Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-06/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft6Validator, + ) + + def test_draft_7(self): + schema = {"$schema": "http://json-schema.org/draft-07/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft7Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-07/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft7Validator, + ) + + def test_draft_201909(self): + schema = {"$schema": "https://json-schema.org/draft/2019-09/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft201909Validator, + ) + + schema = {"$schema": "https://json-schema.org/draft/2019-09/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft201909Validator, + ) + + def test_draft_202012(self): + schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft202012Validator, + ) + + schema = {"$schema": "https://json-schema.org/draft/2020-12/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft202012Validator, + ) + + def test_True(self): + self.assertIs( + validators.validator_for(True), + validators._LATEST_VERSION, + ) + + def test_False(self): + self.assertIs( + validators.validator_for(False), + validators._LATEST_VERSION, + ) + + def test_custom_validator(self): + Validator = validators.create( + meta_schema={"id": "meta schema id"}, + version="12", + id_of=lambda s: s.get("id", ""), + ) + schema = {"$schema": "meta schema id"} + self.assertIs( + validators.validator_for(schema), + Validator, + ) + + def test_custom_validator_draft6(self): + Validator = validators.create( + meta_schema={"$id": "meta schema $id"}, + version="13", + ) + schema = {"$schema": "meta schema $id"} + self.assertIs( + validators.validator_for(schema), + Validator, + ) + + def test_validator_for_jsonschema_default(self): + self.assertIs(validators.validator_for({}), validators._LATEST_VERSION) + + def test_validator_for_custom_default(self): + self.assertIs(validators.validator_for({}, default=None), None) + + def test_warns_if_meta_schema_specified_was_not_found(self): + with self.assertWarns(DeprecationWarning) as cm: + validators.validator_for(schema={"$schema": "unknownSchema"}) + + self.assertEqual(cm.filename, __file__) + self.assertEqual( + str(cm.warning), + "The metaschema specified by $schema was not found. " + "Using the latest draft to validate, but this will raise " + "an error in the future.", + ) + + def test_does_not_warn_if_meta_schema_is_unspecified(self): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + validators.validator_for(schema={}, default={}) + self.assertFalse(w) + + def test_validator_for_custom_default_with_schema(self): + schema, default = {"$schema": "mailto:foo@example.com"}, object() + self.assertIs(validators.validator_for(schema, default), default) + + +class TestValidate(TestCase): + def assertUses(self, schema, Validator): + result = [] + with mock.patch.object(Validator, "check_schema", result.append): + validators.validate({}, schema) + self.assertEqual(result, [schema]) + + def test_draft3_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-03/schema#"}, + Validator=validators.Draft3Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-03/schema"}, + Validator=validators.Draft3Validator, + ) + + def test_draft4_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-04/schema#"}, + Validator=validators.Draft4Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-04/schema"}, + Validator=validators.Draft4Validator, + ) + + def test_draft6_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-06/schema#"}, + Validator=validators.Draft6Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-06/schema"}, + Validator=validators.Draft6Validator, + ) + + def test_draft7_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-07/schema#"}, + Validator=validators.Draft7Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-07/schema"}, + Validator=validators.Draft7Validator, + ) + + def test_draft202012_validator_is_chosen(self): + self.assertUses( + schema={ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + }, + Validator=validators.Draft202012Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={ + "$schema": "https://json-schema.org/draft/2020-12/schema", + }, + Validator=validators.Draft202012Validator, + ) + + def test_draft202012_validator_is_the_default(self): + self.assertUses(schema={}, Validator=validators.Draft202012Validator) + + def test_validation_error_message(self): + with self.assertRaises(exceptions.ValidationError) as e: + validators.validate(12, {"type": "string"}) + self.assertRegex( + str(e.exception), + "(?s)Failed validating '.*' in schema.*On instance", + ) + + def test_schema_error_message(self): + with self.assertRaises(exceptions.SchemaError) as e: + validators.validate(12, {"type": 12}) + self.assertRegex( + str(e.exception), + "(?s)Failed validating '.*' in metaschema.*On schema", + ) + + def test_it_uses_best_match(self): + schema = { + "oneOf": [ + {"type": "number", "minimum": 20}, + {"type": "array"}, + ], + } + with self.assertRaises(exceptions.ValidationError) as e: + validators.validate(12, schema) + self.assertIn("12 is less than the minimum of 20", str(e.exception)) + + +class TestRefResolver(TestCase): + + base_uri = "" + stored_uri = "foo://stored" + stored_schema = {"stored": "schema"} + + def setUp(self): + self.referrer = {} + self.store = {self.stored_uri: self.stored_schema} + self.resolver = validators.RefResolver( + self.base_uri, self.referrer, self.store, + ) + + def test_it_does_not_retrieve_schema_urls_from_the_network(self): + ref = validators.Draft3Validator.META_SCHEMA["id"] + with mock.patch.object(self.resolver, "resolve_remote") as patched: + with self.resolver.resolving(ref) as resolved: + pass + self.assertEqual(resolved, validators.Draft3Validator.META_SCHEMA) + self.assertFalse(patched.called) + + def test_it_resolves_local_refs(self): + ref = "#/properties/foo" + self.referrer["properties"] = {"foo": object()} + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, self.referrer["properties"]["foo"]) + + def test_it_resolves_local_refs_with_id(self): + schema = {"id": "http://bar/schema#", "a": {"foo": "bar"}} + resolver = validators.RefResolver.from_schema( + schema, + id_of=lambda schema: schema.get("id", ""), + ) + with resolver.resolving("#/a") as resolved: + self.assertEqual(resolved, schema["a"]) + with resolver.resolving("http://bar/schema#/a") as resolved: + self.assertEqual(resolved, schema["a"]) + + def test_it_retrieves_stored_refs(self): + with self.resolver.resolving(self.stored_uri) as resolved: + self.assertIs(resolved, self.stored_schema) + + self.resolver.store["cached_ref"] = {"foo": 12} + with self.resolver.resolving("cached_ref#/foo") as resolved: + self.assertEqual(resolved, 12) + + def test_it_retrieves_unstored_refs_via_requests(self): + ref = "http://bar#baz" + schema = {"baz": 12} + + if "requests" in sys.modules: + self.addCleanup( + sys.modules.__setitem__, "requests", sys.modules["requests"], + ) + sys.modules["requests"] = ReallyFakeRequests({"http://bar": schema}) + + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, 12) + + def test_it_retrieves_unstored_refs_via_urlopen(self): + ref = "http://bar#baz" + schema = {"baz": 12} + + if "requests" in sys.modules: + self.addCleanup( + sys.modules.__setitem__, "requests", sys.modules["requests"], + ) + sys.modules["requests"] = None + + @contextmanager + def fake_urlopen(url): + self.assertEqual(url, "http://bar") + yield BytesIO(json.dumps(schema).encode("utf8")) + + self.addCleanup(setattr, validators, "urlopen", validators.urlopen) + validators.urlopen = fake_urlopen + + with self.resolver.resolving(ref) as resolved: + pass + self.assertEqual(resolved, 12) + + def test_it_retrieves_local_refs_via_urlopen(self): + with tempfile.NamedTemporaryFile(delete=False, mode="wt") as tempf: + self.addCleanup(os.remove, tempf.name) + json.dump({"foo": "bar"}, tempf) + + ref = "file://{}#foo".format(pathname2url(tempf.name)) + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, "bar") + + def test_it_can_construct_a_base_uri_from_a_schema(self): + schema = {"id": "foo"} + resolver = validators.RefResolver.from_schema( + schema, + id_of=lambda schema: schema.get("id", ""), + ) + self.assertEqual(resolver.base_uri, "foo") + self.assertEqual(resolver.resolution_scope, "foo") + with resolver.resolving("") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("#") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("foo") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("foo#") as resolved: + self.assertEqual(resolved, schema) + + def test_it_can_construct_a_base_uri_from_a_schema_without_id(self): + schema = {} + resolver = validators.RefResolver.from_schema(schema) + self.assertEqual(resolver.base_uri, "") + self.assertEqual(resolver.resolution_scope, "") + with resolver.resolving("") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("#") as resolved: + self.assertEqual(resolved, schema) + + def test_custom_uri_scheme_handlers(self): + def handler(url): + self.assertEqual(url, ref) + return schema + + schema = {"foo": "bar"} + ref = "foo://bar" + resolver = validators.RefResolver("", {}, handlers={"foo": handler}) + with resolver.resolving(ref) as resolved: + self.assertEqual(resolved, schema) + + def test_cache_remote_on(self): + response = [object()] + + def handler(url): + try: + return response.pop() + except IndexError: # pragma: no cover + self.fail("Response must not have been cached!") + + ref = "foo://bar" + resolver = validators.RefResolver( + "", {}, cache_remote=True, handlers={"foo": handler}, + ) + with resolver.resolving(ref): + pass + with resolver.resolving(ref): + pass + + def test_cache_remote_off(self): + response = [object()] + + def handler(url): + try: + return response.pop() + except IndexError: # pragma: no cover + self.fail("Handler called twice!") + + ref = "foo://bar" + resolver = validators.RefResolver( + "", {}, cache_remote=False, handlers={"foo": handler}, + ) + with resolver.resolving(ref): + pass + + def test_if_you_give_it_junk_you_get_a_resolution_error(self): + error = ValueError("Oh no! What's this?") + + def handler(url): + raise error + + ref = "foo://bar" + resolver = validators.RefResolver("", {}, handlers={"foo": handler}) + with self.assertRaises(exceptions.RefResolutionError) as err: + with resolver.resolving(ref): + self.fail("Shouldn't get this far!") # pragma: no cover + self.assertEqual(err.exception, exceptions.RefResolutionError(error)) + + def test_helpful_error_message_on_failed_pop_scope(self): + resolver = validators.RefResolver("", {}) + resolver.pop_scope() + with self.assertRaises(exceptions.RefResolutionError) as exc: + resolver.pop_scope() + self.assertIn("Failed to pop the scope", str(exc.exception)) + + +def sorted_errors(errors): + def key(error): + return ( + [str(e) for e in error.path], + [str(e) for e in error.schema_path], + ) + return sorted(errors, key=key) + + +@attr.s +class ReallyFakeRequests(object): + + _responses = attr.ib() + + def get(self, url): + response = self._responses.get(url) + if url is None: # pragma: no cover + raise ValueError("Unknown URL: " + repr(url)) + return _ReallyFakeJSONResponse(json.dumps(response)) + + +@attr.s +class _ReallyFakeJSONResponse(object): + + _response = attr.ib() + + def json(self): + return json.loads(self._response) diff --git a/vendor/jsonschema/jsonschema/validators.py b/vendor/jsonschema/jsonschema/validators.py new file mode 100644 index 00000000..79a8da3a --- /dev/null +++ b/vendor/jsonschema/jsonschema/validators.py @@ -0,0 +1,1120 @@ +""" +Creation and extension of validators, with implementations for existing drafts. +""" +from __future__ import annotations + +from collections import deque +from collections.abc import Sequence +from functools import lru_cache +from operator import methodcaller +from urllib.parse import unquote, urldefrag, urljoin, urlsplit +from urllib.request import urlopen +from warnings import warn +import contextlib +import json +import reprlib +import typing +import warnings + +import attr + +from jsonschema import ( + _format, + _legacy_validators, + _types, + _utils, + _validators, + exceptions, +) + +_UNSET = _utils.Unset() + +_VALIDATORS: dict[str, typing.Any] = {} +_META_SCHEMAS = _utils.URIDict() +_VOCABULARIES: list[tuple[str, typing.Any]] = [] + + +def __getattr__(name): + if name == "ErrorTree": + warnings.warn( + "Importing ErrorTree from jsonschema.validators is deprecated. " + "Instead import it from jsonschema.exceptions.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.exceptions import ErrorTree + return ErrorTree + elif name == "validators": + warnings.warn( + "Accessing jsonschema.validators.validators is deprecated. " + "Use jsonschema.validators.validator_for with a given schema.", + DeprecationWarning, + stacklevel=2, + ) + return _VALIDATORS + elif name == "meta_schemas": + warnings.warn( + "Accessing jsonschema.validators.meta_schemas is deprecated. " + "Use jsonschema.validators.validator_for with a given schema.", + DeprecationWarning, + stacklevel=2, + ) + return _META_SCHEMAS + raise AttributeError(f"module {__name__} has no attribute {name}") + + +def validates(version): + """ + Register the decorated validator for a ``version`` of the specification. + + Registered validators and their meta schemas will be considered when + parsing :kw:`$schema` keywords' URIs. + + Arguments: + + version (str): + + An identifier to use as the version's name + + Returns: + + collections.abc.Callable: + + a class decorator to decorate the validator with the version + """ + + def _validates(cls): + _VALIDATORS[version] = cls + meta_schema_id = cls.ID_OF(cls.META_SCHEMA) + _META_SCHEMAS[meta_schema_id] = cls + return cls + return _validates + + +def _id_of(schema): + """ + Return the ID of a schema for recent JSON Schema drafts. + """ + if schema is True or schema is False: + return "" + return schema.get("$id", "") + + +def _store_schema_list(): + if not _VOCABULARIES: + _VOCABULARIES.extend(_utils.load_schema("vocabularies").items()) + return [ + (id, validator.META_SCHEMA) for id, validator in _META_SCHEMAS.items() + ] + _VOCABULARIES + + +def create( + meta_schema, + validators=(), + version=None, + type_checker=_types.draft202012_type_checker, + format_checker=_format.draft202012_format_checker, + id_of=_id_of, + applicable_validators=methodcaller("items"), +): + """ + Create a new validator class. + + Arguments: + + meta_schema (collections.abc.Mapping): + + the meta schema for the new validator class + + validators (collections.abc.Mapping): + + a mapping from names to callables, where each callable will + validate the schema property with the given name. + + Each callable should take 4 arguments: + + 1. a validator instance, + 2. the value of the property being validated within the + instance + 3. the instance + 4. the schema + + version (str): + + an identifier for the version that this validator class will + validate. If provided, the returned validator class will + have its ``__name__`` set to include the version, and also + will have `jsonschema.validators.validates` automatically + called for the given version. + + type_checker (jsonschema.TypeChecker): + + a type checker, used when applying the :kw:`type` keyword. + + If unprovided, a `jsonschema.TypeChecker` will be created + with a set of default types typical of JSON Schema drafts. + + format_checker (jsonschema.FormatChecker): + + a format checker, used when applying the :kw:`format` keyword. + + If unprovided, a `jsonschema.FormatChecker` will be created + with a set of default formats typical of JSON Schema drafts. + + id_of (collections.abc.Callable): + + A function that given a schema, returns its ID. + + applicable_validators (collections.abc.Callable): + + A function that given a schema, returns the list of + applicable validators (validation keywords and callables) + which will be used to validate the instance. + + Returns: + + a new `jsonschema.protocols.Validator` class + """ + # preemptively don't shadow the `Validator.format_checker` local + format_checker_arg = format_checker + + @attr.s + class Validator: + + VALIDATORS = dict(validators) + META_SCHEMA = dict(meta_schema) + TYPE_CHECKER = type_checker + FORMAT_CHECKER = format_checker_arg + ID_OF = staticmethod(id_of) + + schema = attr.ib(repr=reprlib.repr) + resolver = attr.ib(default=None, repr=False) + format_checker = attr.ib(default=None) + + def __attrs_post_init__(self): + if self.resolver is None: + self.resolver = RefResolver.from_schema( + self.schema, + id_of=id_of, + ) + + @classmethod + def check_schema(cls, schema): + for error in cls(cls.META_SCHEMA).iter_errors(schema): + raise exceptions.SchemaError.create_from(error) + + def evolve(self, **changes): + # Essentially reproduces attr.evolve, but may involve instantiating + # a different class than this one. + cls = self.__class__ + + schema = changes.setdefault("schema", self.schema) + NewValidator = validator_for(schema, default=cls) + + for field in attr.fields(cls): + if not field.init: + continue + attr_name = field.name # To deal with private attributes. + init_name = attr_name if attr_name[0] != "_" else attr_name[1:] + if init_name not in changes: + changes[init_name] = getattr(self, attr_name) + + return NewValidator(**changes) + + def iter_errors(self, instance, _schema=None): + if _schema is not None: + warnings.warn( + ( + "Passing a schema to Validator.iter_errors " + "is deprecated and will be removed in a future " + "release. Call validator.evolve(schema=new_schema)." + "iter_errors(...) instead." + ), + DeprecationWarning, + stacklevel=2, + ) + else: + _schema = self.schema + + if _schema is True: + return + elif _schema is False: + yield exceptions.ValidationError( + f"False schema does not allow {instance!r}", + validator=None, + validator_value=None, + instance=instance, + schema=_schema, + ) + return + + scope = id_of(_schema) + if scope: + self.resolver.push_scope(scope) + try: + for k, v in applicable_validators(_schema): + validator = self.VALIDATORS.get(k) + if validator is None: + continue + + errors = validator(self, v, instance, _schema) or () + for error in errors: + # set details if not already set by the called fn + error._set( + validator=k, + validator_value=v, + instance=instance, + schema=_schema, + type_checker=self.TYPE_CHECKER, + ) + if k not in {"if", "$ref"}: + error.schema_path.appendleft(k) + yield error + finally: + if scope: + self.resolver.pop_scope() + + def descend(self, instance, schema, path=None, schema_path=None): + for error in self.evolve(schema=schema).iter_errors(instance): + if path is not None: + error.path.appendleft(path) + if schema_path is not None: + error.schema_path.appendleft(schema_path) + yield error + + def validate(self, *args, **kwargs): + for error in self.iter_errors(*args, **kwargs): + raise error + + def is_type(self, instance, type): + try: + return self.TYPE_CHECKER.is_type(instance, type) + except exceptions.UndefinedTypeCheck: + raise exceptions.UnknownType(type, instance, self.schema) + + def is_valid(self, instance, _schema=None): + if _schema is not None: + warnings.warn( + ( + "Passing a schema to Validator.is_valid is deprecated " + "and will be removed in a future release. Call " + "validator.evolve(schema=new_schema).is_valid(...) " + "instead." + ), + DeprecationWarning, + stacklevel=2, + ) + self = self.evolve(schema=_schema) + + error = next(self.iter_errors(instance), None) + return error is None + + if version is not None: + safe = version.title().replace(" ", "").replace("-", "") + Validator.__name__ = Validator.__qualname__ = f"{safe}Validator" + Validator = validates(version)(Validator) + + return Validator + + +def extend( + validator, + validators=(), + version=None, + type_checker=None, + format_checker=None, +): + """ + Create a new validator class by extending an existing one. + + Arguments: + + validator (jsonschema.protocols.Validator): + + an existing validator class + + validators (collections.abc.Mapping): + + a mapping of new validator callables to extend with, whose + structure is as in `create`. + + .. note:: + + Any validator callables with the same name as an + existing one will (silently) replace the old validator + callable entirely, effectively overriding any validation + done in the "parent" validator class. + + If you wish to instead extend the behavior of a parent's + validator callable, delegate and call it directly in + the new validator function by retrieving it using + ``OldValidator.VALIDATORS["validation_keyword_name"]``. + + version (str): + + a version for the new validator class + + type_checker (jsonschema.TypeChecker): + + a type checker, used when applying the :kw:`type` keyword. + + If unprovided, the type checker of the extended + `jsonschema.protocols.Validator` will be carried along. + + format_checker (jsonschema.FormatChecker): + + a format checker, used when applying the :kw:`format` keyword. + + If unprovided, the format checker of the extended + `jsonschema.protocols.Validator` will be carried along. + + Returns: + + a new `jsonschema.protocols.Validator` class extending the one + provided + + .. note:: Meta Schemas + + The new validator class will have its parent's meta schema. + + If you wish to change or extend the meta schema in the new + validator class, modify ``META_SCHEMA`` directly on the returned + class. Note that no implicit copying is done, so a copy should + likely be made before modifying it, in order to not affect the + old validator. + """ + + all_validators = dict(validator.VALIDATORS) + all_validators.update(validators) + + if type_checker is None: + type_checker = validator.TYPE_CHECKER + if format_checker is None: + format_checker = validator.FORMAT_CHECKER + return create( + meta_schema=validator.META_SCHEMA, + validators=all_validators, + version=version, + type_checker=type_checker, + format_checker=format_checker, + id_of=validator.ID_OF, + ) + + +Draft3Validator = create( + meta_schema=_utils.load_schema("draft3"), + validators={ + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "dependencies": _legacy_validators.dependencies_draft3, + "disallow": _legacy_validators.disallow_draft3, + "divisibleBy": _validators.multipleOf, + "enum": _validators.enum, + "extends": _legacy_validators.extends_draft3, + "format": _validators.format, + "items": _legacy_validators.items_draft3_draft4, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maximum": _legacy_validators.maximum_draft3_draft4, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minimum": _legacy_validators.minimum_draft3_draft4, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _legacy_validators.properties_draft3, + "type": _legacy_validators.type_draft3, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft3_type_checker, + format_checker=_format.draft3_format_checker, + version="draft3", + id_of=lambda schema: schema.get("id", ""), + applicable_validators=_legacy_validators.ignore_ref_siblings, +) + +Draft4Validator = create( + meta_schema=_utils.load_schema("draft4"), + validators={ + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, + "enum": _validators.enum, + "format": _validators.format, + "items": _legacy_validators.items_draft3_draft4, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _legacy_validators.maximum_draft3_draft4, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _legacy_validators.minimum_draft3_draft4, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _validators.properties, + "required": _validators.required, + "type": _validators.type, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft4_type_checker, + format_checker=_format.draft4_format_checker, + version="draft4", + id_of=lambda schema: schema.get("id", ""), + applicable_validators=_legacy_validators.ignore_ref_siblings, +) + +Draft6Validator = create( + meta_schema=_utils.load_schema("draft6"), + validators={ + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "const": _validators.const, + "contains": _legacy_validators.contains_draft6_draft7, + "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, + "enum": _validators.enum, + "exclusiveMaximum": _validators.exclusiveMaximum, + "exclusiveMinimum": _validators.exclusiveMinimum, + "format": _validators.format, + "items": _legacy_validators.items_draft6_draft7_draft201909, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _validators.maximum, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _validators.minimum, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _validators.properties, + "propertyNames": _validators.propertyNames, + "required": _validators.required, + "type": _validators.type, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft6_type_checker, + format_checker=_format.draft6_format_checker, + version="draft6", + applicable_validators=_legacy_validators.ignore_ref_siblings, +) + +Draft7Validator = create( + meta_schema=_utils.load_schema("draft7"), + validators={ + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "const": _validators.const, + "contains": _legacy_validators.contains_draft6_draft7, + "dependencies": _legacy_validators.dependencies_draft4_draft6_draft7, + "enum": _validators.enum, + "exclusiveMaximum": _validators.exclusiveMaximum, + "exclusiveMinimum": _validators.exclusiveMinimum, + "format": _validators.format, + "if": _validators.if_, + "items": _legacy_validators.items_draft6_draft7_draft201909, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _validators.maximum, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _validators.minimum, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _validators.properties, + "propertyNames": _validators.propertyNames, + "required": _validators.required, + "type": _validators.type, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft7_type_checker, + format_checker=_format.draft7_format_checker, + version="draft7", + applicable_validators=_legacy_validators.ignore_ref_siblings, +) + +Draft201909Validator = create( + meta_schema=_utils.load_schema("draft2019-09"), + validators={ + "$recursiveRef": _legacy_validators.recursiveRef, + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "const": _validators.const, + "contains": _validators.contains, + "dependentRequired": _validators.dependentRequired, + "dependentSchemas": _validators.dependentSchemas, + "enum": _validators.enum, + "exclusiveMaximum": _validators.exclusiveMaximum, + "exclusiveMinimum": _validators.exclusiveMinimum, + "format": _validators.format, + "if": _validators.if_, + "items": _legacy_validators.items_draft6_draft7_draft201909, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _validators.maximum, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _validators.minimum, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "properties": _validators.properties, + "propertyNames": _validators.propertyNames, + "required": _validators.required, + "type": _validators.type, + "unevaluatedItems": _validators.unevaluatedItems, + "unevaluatedProperties": _validators.unevaluatedProperties, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft201909_type_checker, + format_checker=_format.draft201909_format_checker, + version="draft2019-09", +) + +Draft202012Validator = create( + meta_schema=_utils.load_schema("draft2020-12"), + validators={ + "$dynamicRef": _validators.dynamicRef, + "$ref": _validators.ref, + "additionalItems": _validators.additionalItems, + "additionalProperties": _validators.additionalProperties, + "allOf": _validators.allOf, + "anyOf": _validators.anyOf, + "const": _validators.const, + "contains": _validators.contains, + "dependentRequired": _validators.dependentRequired, + "dependentSchemas": _validators.dependentSchemas, + "enum": _validators.enum, + "exclusiveMaximum": _validators.exclusiveMaximum, + "exclusiveMinimum": _validators.exclusiveMinimum, + "format": _validators.format, + "if": _validators.if_, + "items": _validators.items, + "maxItems": _validators.maxItems, + "maxLength": _validators.maxLength, + "maxProperties": _validators.maxProperties, + "maximum": _validators.maximum, + "minItems": _validators.minItems, + "minLength": _validators.minLength, + "minProperties": _validators.minProperties, + "minimum": _validators.minimum, + "multipleOf": _validators.multipleOf, + "not": _validators.not_, + "oneOf": _validators.oneOf, + "pattern": _validators.pattern, + "patternProperties": _validators.patternProperties, + "prefixItems": _validators.prefixItems, + "properties": _validators.properties, + "propertyNames": _validators.propertyNames, + "required": _validators.required, + "type": _validators.type, + "unevaluatedItems": _validators.unevaluatedItems, + "unevaluatedProperties": _validators.unevaluatedProperties, + "uniqueItems": _validators.uniqueItems, + }, + type_checker=_types.draft202012_type_checker, + format_checker=_format.draft202012_format_checker, + version="draft2020-12", +) + +_LATEST_VERSION = Draft202012Validator + + +class RefResolver(object): + """ + Resolve JSON References. + + Arguments: + + base_uri (str): + + The URI of the referring document + + referrer: + + The actual referring document + + store (dict): + + A mapping from URIs to documents to cache + + cache_remote (bool): + + Whether remote refs should be cached after first resolution + + handlers (dict): + + A mapping from URI schemes to functions that should be used + to retrieve them + + urljoin_cache (:func:`functools.lru_cache`): + + A cache that will be used for caching the results of joining + the resolution scope to subscopes. + + remote_cache (:func:`functools.lru_cache`): + + A cache that will be used for caching the results of + resolved remote URLs. + + Attributes: + + cache_remote (bool): + + Whether remote refs should be cached after first resolution + """ + + def __init__( + self, + base_uri, + referrer, + store=(), + cache_remote=True, + handlers=(), + urljoin_cache=None, + remote_cache=None, + ): + if urljoin_cache is None: + urljoin_cache = lru_cache(1024)(urljoin) + if remote_cache is None: + remote_cache = lru_cache(1024)(self.resolve_from_url) + + self.referrer = referrer + self.cache_remote = cache_remote + self.handlers = dict(handlers) + + self._scopes_stack = [base_uri] + self.store = _utils.URIDict(_store_schema_list()) + self.store.update(store) + self.store[base_uri] = referrer + + self._urljoin_cache = urljoin_cache + self._remote_cache = remote_cache + + @classmethod + def from_schema(cls, schema, id_of=_id_of, *args, **kwargs): + """ + Construct a resolver from a JSON schema object. + + Arguments: + + schema: + + the referring schema + + Returns: + + `RefResolver` + """ + + return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs) + + def push_scope(self, scope): + """ + Enter a given sub-scope. + + Treats further dereferences as being performed underneath the + given scope. + """ + self._scopes_stack.append( + self._urljoin_cache(self.resolution_scope, scope), + ) + + def pop_scope(self): + """ + Exit the most recent entered scope. + + Treats further dereferences as being performed underneath the + original scope. + + Don't call this method more times than `push_scope` has been + called. + """ + try: + self._scopes_stack.pop() + except IndexError: + raise exceptions.RefResolutionError( + "Failed to pop the scope from an empty stack. " + "`pop_scope()` should only be called once for every " + "`push_scope()`", + ) + + @property + def resolution_scope(self): + """ + Retrieve the current resolution scope. + """ + return self._scopes_stack[-1] + + @property + def base_uri(self): + """ + Retrieve the current base URI, not including any fragment. + """ + uri, _ = urldefrag(self.resolution_scope) + return uri + + @contextlib.contextmanager + def in_scope(self, scope): + """ + Temporarily enter the given scope for the duration of the context. + """ + warnings.warn( + "jsonschema.RefResolver.in_scope is deprecated and will be " + "removed in a future release.", + DeprecationWarning, + stacklevel=3, + ) + self.push_scope(scope) + try: + yield + finally: + self.pop_scope() + + @contextlib.contextmanager + def resolving(self, ref): + """ + Resolve the given ``ref`` and enter its resolution scope. + + Exits the scope on exit of this context manager. + + Arguments: + + ref (str): + + The reference to resolve + """ + + url, resolved = self.resolve(ref) + self.push_scope(url) + try: + yield resolved + finally: + self.pop_scope() + + def _find_in_referrer(self, key): + return self._get_subschemas_cache()[key] + + @lru_cache() # noqa: B019 + def _get_subschemas_cache(self): + cache = {key: [] for key in _SUBSCHEMAS_KEYWORDS} + for keyword, subschema in _search_schema( + self.referrer, _match_subschema_keywords, + ): + cache[keyword].append(subschema) + return cache + + @lru_cache() # noqa: B019 + def _find_in_subschemas(self, url): + subschemas = self._get_subschemas_cache()["$id"] + if not subschemas: + return None + uri, fragment = urldefrag(url) + for subschema in subschemas: + target_uri = self._urljoin_cache( + self.resolution_scope, subschema["$id"], + ) + if target_uri.rstrip("/") == uri.rstrip("/"): + if fragment: + subschema = self.resolve_fragment(subschema, fragment) + return url, subschema + return None + + def resolve(self, ref): + """ + Resolve the given reference. + """ + url = self._urljoin_cache(self.resolution_scope, ref).rstrip("/") + + match = self._find_in_subschemas(url) + if match is not None: + return match + + return url, self._remote_cache(url) + + def resolve_from_url(self, url): + """ + Resolve the given URL. + """ + url, fragment = urldefrag(url) + if url: + try: + document = self.store[url] + except KeyError: + try: + document = self.resolve_remote(url) + except Exception as exc: + raise exceptions.RefResolutionError(exc) + else: + document = self.referrer + + return self.resolve_fragment(document, fragment) + + def resolve_fragment(self, document, fragment): + """ + Resolve a ``fragment`` within the referenced ``document``. + + Arguments: + + document: + + The referent document + + fragment (str): + + a URI fragment to resolve within it + """ + + fragment = fragment.lstrip("/") + + if not fragment: + return document + + if document is self.referrer: + find = self._find_in_referrer + else: + + def find(key): + yield from _search_schema(document, _match_keyword(key)) + + for keyword in ["$anchor", "$dynamicAnchor"]: + for subschema in find(keyword): + if fragment == subschema[keyword]: + return subschema + for keyword in ["id", "$id"]: + for subschema in find(keyword): + if "#" + fragment == subschema[keyword]: + return subschema + + # Resolve via path + parts = unquote(fragment).split("/") if fragment else [] + for part in parts: + part = part.replace("~1", "/").replace("~0", "~") + + if isinstance(document, Sequence): + # Array indexes should be turned into integers + try: + part = int(part) + except ValueError: + pass + try: + document = document[part] + except (TypeError, LookupError): + raise exceptions.RefResolutionError( + f"Unresolvable JSON pointer: {fragment!r}", + ) + + return document + + def resolve_remote(self, uri): + """ + Resolve a remote ``uri``. + + If called directly, does not check the store first, but after + retrieving the document at the specified URI it will be saved in + the store if :attr:`cache_remote` is True. + + .. note:: + + If the requests_ library is present, ``jsonschema`` will use it to + request the remote ``uri``, so that the correct encoding is + detected and used. + + If it isn't, or if the scheme of the ``uri`` is not ``http`` or + ``https``, UTF-8 is assumed. + + Arguments: + + uri (str): + + The URI to resolve + + Returns: + + The retrieved document + + .. _requests: https://pypi.org/project/requests/ + """ + try: + import requests + except ImportError: + requests = None + + scheme = urlsplit(uri).scheme + + if scheme in self.handlers: + result = self.handlers[scheme](uri) + elif scheme in ["http", "https"] and requests: + # Requests has support for detecting the correct encoding of + # json over http + result = requests.get(uri).json() + else: + # Otherwise, pass off to urllib and assume utf-8 + with urlopen(uri) as url: + result = json.loads(url.read().decode("utf-8")) + + if self.cache_remote: + self.store[uri] = result + return result + + +_SUBSCHEMAS_KEYWORDS = ("$id", "id", "$anchor", "$dynamicAnchor") + + +def _match_keyword(keyword): + + def matcher(value): + if keyword in value: + yield value + + return matcher + + +def _match_subschema_keywords(value): + for keyword in _SUBSCHEMAS_KEYWORDS: + if keyword in value: + yield keyword, value + + +def _search_schema(schema, matcher): + """Breadth-first search routine.""" + values = deque([schema]) + while values: + value = values.pop() + if not isinstance(value, dict): + continue + yield from matcher(value) + values.extendleft(value.values()) + + +def validate(instance, schema, cls=None, *args, **kwargs): + """ + Validate an instance under the given schema. + + >>> validate([2, 3, 4], {"maxItems": 2}) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + + :func:`validate` will first verify that the provided schema is + itself valid, since not doing so can lead to less obvious error + messages and fail in less obvious or consistent ways. + + If you know you have a valid schema already, especially if you + intend to validate multiple instances with the same schema, you + likely would prefer using the `Validator.validate` method directly + on a specific validator (e.g. ``Draft7Validator.validate``). + + + Arguments: + + instance: + + The instance to validate + + schema: + + The schema to validate with + + cls (Validator): + + The class that will be used to validate the instance. + + If the ``cls`` argument is not provided, two things will happen + in accordance with the specification. First, if the schema has a + :kw:`$schema` keyword containing a known meta-schema [#]_ then the + proper validator will be used. The specification recommends that + all schemas contain :kw:`$schema` properties for this reason. If no + :kw:`$schema` property is found, the default validator class is the + latest released draft. + + Any other provided positional and keyword arguments will be passed + on when instantiating the ``cls``. + + Raises: + + `jsonschema.exceptions.ValidationError` if the instance + is invalid + + `jsonschema.exceptions.SchemaError` if the schema itself + is invalid + + .. rubric:: Footnotes + .. [#] known by a validator registered with + `jsonschema.validators.validates` + """ + if cls is None: + cls = validator_for(schema) + + cls.check_schema(schema) + validator = cls(schema, *args, **kwargs) + error = exceptions.best_match(validator.iter_errors(instance)) + if error is not None: + raise error + + +def validator_for(schema, default=_UNSET): + """ + Retrieve the validator class appropriate for validating the given schema. + + Uses the :kw:`$schema` keyword that should be present in the given + schema to look up the appropriate validator class. + + Arguments: + + schema (collections.abc.Mapping or bool): + + the schema to look at + + default: + + the default to return if the appropriate validator class + cannot be determined. + + If unprovided, the default is to return the latest supported + draft. + """ + + DefaultValidator = _LATEST_VERSION if default is _UNSET else default + + if schema is True or schema is False or "$schema" not in schema: + return DefaultValidator + if schema["$schema"] not in _META_SCHEMAS: + if default is _UNSET: + warn( + ( + "The metaschema specified by $schema was not found. " + "Using the latest draft to validate, but this will raise " + "an error in the future." + ), + DeprecationWarning, + stacklevel=2, + ) + return _META_SCHEMAS.get(schema["$schema"], DefaultValidator) diff --git a/vendor/jsonschema/pyproject.toml b/vendor/jsonschema/pyproject.toml new file mode 100644 index 00000000..46a3e40b --- /dev/null +++ b/vendor/jsonschema/pyproject.toml @@ -0,0 +1,99 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.version] +path = "jsonschema/__version__.py" + +[project] +name = "jsonschema" +description = "An implementation of JSON Schema validation for Python" +readme = "README.rst" +requires-python = ">=3.7" +license = {text = "MIT"} +keywords = ["validation", "data validation", "jsonschema", "json"] +authors = [ + {email = "Julian+jsonschema@GrayVines.com"}, + {name = "Julian Berman"}, +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", +] +dynamic = ["version"] + +dependencies = [ + "attrs>=17.4.0", + "pyrsistent>=0.14.0,!=0.17.0,!=0.17.1,!=0.17.2", + + "importlib_metadata;python_version<'3.8'", + "typing_extensions;python_version<'3.8'", + + "importlib_resources>=1.4.0;python_version<'3.9'", + "pkgutil_resolve_name>=1.3.10;python_version<'3.9'", +] + +[project.optional-dependencies] +format = [ + "fqdn", + "idna", + "isoduration", + "jsonpointer>1.13", + "rfc3339-validator", + "rfc3987", + "uri_template", + "webcolors>=1.11", +] +format-nongpl = [ + "fqdn", + "idna", + "isoduration", + "jsonpointer>1.13", + "rfc3339-validator", + "rfc3986-validator>0.1.0", + "uri_template", + "webcolors>=1.11", +] + +[project.scripts] +jsonschema = "jsonschema.cli:main" + +[project.urls] +Homepage = "https://github.com/python-jsonschema/jsonschema" +Documentation = "https://python-jsonschema.readthedocs.io/" +Issues = "https://github.com/python-jsonschema/jsonschema/issues/" +Funding = "https://github.com/sponsors/Julian" +Tidelift = "https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-jsonschema&utm_medium=referral&utm_campaign=pypi-link" +Changelog = "https://github.com/python-jsonschema/jsonschema/blob/main/CHANGELOG.rst" +Source = "https://github.com/python-jsonschema/jsonschema" + +[tool.isort] +from_first = true +include_trailing_comma = true +multi_line_output = 3 + +[tool.mypy] +ignore_missing_imports = true + +[tool.pydocstyle] +match = "(?!(test_|_|compat|cli)).*\\.py" # see PyCQA/pydocstyle#323 +add-select = [ + "D410", # Trailing whitespace plz +] +add-ignore = [ + "D107", # Hah, no + "D200", # 1-line docstrings don't need to be on one line + "D202", # One line is fine. + "D412", # Trailing whitespace plz + "D413", # No trailing whitespace plz +] diff --git a/vendor/jsonschema/tox.ini b/vendor/jsonschema/tox.ini new file mode 100644 index 00000000..8f444804 --- /dev/null +++ b/vendor/jsonschema/tox.ini @@ -0,0 +1,118 @@ +[tox] +envlist = + py{37,38,39,310,311,py3}-{noextra,format,formatnongpl}-{build,tests}, + readme + safety + secrets + style + typing + docs-{dirhtml,doctest,linkcheck,spelling,style} +skipsdist = True + +[testenv] +changedir = {envtmpdir} +passenv = CODECOV* CI PYTHONUTF8 +setenv = + JSON_SCHEMA_TEST_SUITE = {toxinidir}/json + + coverage,codecov: MAYBE_COVERAGE = coverage run -m + coverage,codecov: COVERAGE_RCFILE={toxinidir}/.coveragerc + coverage,codecov: COVERAGE_DEBUG_FILE={envtmpdir}/coverage-debug + coverage,codecov: COVERAGE_FILE={envtmpdir}/coverage-data +whitelist_externals = + mkdir +commands = + noextra: {envpython} -m pip install --disable-pip-version-check {toxinidir} + format,perf: {envpython} -m pip install --disable-pip-version-check '{toxinidir}[format]' + formatnongpl: {envpython} -m pip install --disable-pip-version-check '{toxinidir}[format-nongpl]' + + # Ignore the deprecation warning until pypa/setuptools#3276 is released + tests,coverage,codecov: {envpython} -Werror -W"ignore:module 'sre_constants' is deprecated:DeprecationWarning" -m {env:MAYBE_COVERAGE:} twisted.trial {posargs:jsonschema} + tests: {envpython} -m doctest {toxinidir}/README.rst + + coverage: {envpython} -m coverage report --show-missing + coverage: {envpython} -m coverage html --directory={envtmpdir}/htmlcov + codecov: {envpython} -m coverage xml -o {envtmpdir}/coverage.xml + codecov: codecov --required --disable gcov --file {envtmpdir}/coverage.xml + + perf: {envpython} {toxinidir}/jsonschema/benchmarks/issue232.py --inherit-environ JSON_SCHEMA_TEST_SUITE {posargs:--output {envtmpdir}/bench-issue232.json} + perfsuite: {envpython} {toxinidir}/jsonschema/benchmarks/json_schema_test_suite.py --inherit-environ JSON_SCHEMA_TEST_SUITE {posargs:--output {envtmpdir}/bench-json_schema_test_suite.json} + + build: {envpython} -m build {toxinidir} --outdir {envtmpdir}/dist +deps = + build: build + + perf,perfsuite: pyperf + + tests,coverage,codecov: twisted + + coverage,codecov: coverage + codecov: codecov + +[testenv:bandit] +deps = bandit +commands = {envbindir}/bandit --recursive {toxinidir}/jsonschema + +[testenv:readme] +deps = + build + docutils + twine +commands = + {envpython} -m build --outdir {envtmpdir}/dist {toxinidir} + {envpython} -m twine check {envtmpdir}/dist/* + {envbindir}/rst2html5.py --halt=warning {toxinidir}/CHANGELOG.rst /dev/null + +[testenv:safety] +deps = safety +commands = + {envpython} -m pip install --disable-pip-version-check '{toxinidir}[format]' + {envpython} -m safety check + +[testenv:secrets] +deps = detect-secrets +commands = {envbindir}/detect-secrets scan {toxinidir} + +[testenv:style] +deps = + flake8 + flake8-broken-line + flake8-bugbear + flake8-commas + flake8-quotes + flake8-tidy-imports +commands = + {envpython} -m flake8 {posargs} {toxinidir}/jsonschema {toxinidir}/docs + +[testenv:typing] +skip_install = true +deps = + mypy + pyrsistent + types-attrs + types-requests +commands = {envpython} -m mypy --config {toxinidir}/pyproject.toml {posargs} {toxinidir}/jsonschema + +[testenv:docs-dirhtml] +commands = {envpython} -m sphinx -b dirhtml {toxinidir}/docs/ {envtmpdir}/build {posargs:-a -n -q -T -W} +deps = + -r{toxinidir}/docs/requirements.txt + +[testenv:docs-doctest] +commands = {envpython} -m sphinx -b doctest {toxinidir}/docs/ {envtmpdir}/build {posargs:-a -n -q -T -W} +deps = {[testenv:docs-dirhtml]deps} + +[testenv:docs-linkcheck] +commands = {envpython} -m sphinx -b linkcheck {toxinidir}/docs/ {envtmpdir}/build {posargs:-a -n -q -T -W} +deps = {[testenv:docs-dirhtml]deps} + +[testenv:docs-spelling] +commands = {envpython} -m sphinx -b spelling {toxinidir}/docs/ {envtmpdir}/build {posargs:-a -n -T -W} +deps = {[testenv:docs-dirhtml]deps} + +[testenv:docs-style] +commands = doc8 {posargs} {toxinidir}/docs +deps = + doc8 + pygments + pygments-github-lexers diff --git a/vendor/lark/.github/FUNDING.yml b/vendor/lark/.github/FUNDING.yml new file mode 100644 index 00000000..95bb6bb1 --- /dev/null +++ b/vendor/lark/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: lark-parser +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/vendor/lark/.github/ISSUE_TEMPLATE/bug_report.md b/vendor/lark/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..8c4dc39e --- /dev/null +++ b/vendor/lark/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,18 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** + +A clear and concise description of what the bug is, and what you expected to happen. + +**To Reproduce** + +Provide a short script that reproduces the erroneous behavior. + +If that is impossible, provide clear steps to reproduce the behavior. diff --git a/vendor/lark/.github/ISSUE_TEMPLATE/feature_request.md b/vendor/lark/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..98a5eaba --- /dev/null +++ b/vendor/lark/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,17 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: enhancement +assignees: '' + +--- + +**Suggestion** +Provide a clear and concise description of what the problem is, and what you would like to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/vendor/lark/.github/ISSUE_TEMPLATE/other.md b/vendor/lark/.github/ISSUE_TEMPLATE/other.md new file mode 100644 index 00000000..1c8fcda2 --- /dev/null +++ b/vendor/lark/.github/ISSUE_TEMPLATE/other.md @@ -0,0 +1,10 @@ +--- +name: Other +about: For any discussion that doesn't fit the templates +title: '' +labels: '' +assignees: '' + +--- + + diff --git a/vendor/lark/.github/ISSUE_TEMPLATE/question.md b/vendor/lark/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 00000000..481d923a --- /dev/null +++ b/vendor/lark/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,18 @@ +--- +name: Question +about: Ask a question about Lark or request help +title: '' +labels: question +assignees: '' + +--- + +**What is your question?** + +Try to be accurate and concise. + +**If you're having trouble with your code or grammar** + +Provide a small script that encapsulates your issue. + +Explain what you're trying to do, and what is obstructing your progress. diff --git a/vendor/lark/.github/workflows/codecov.yml b/vendor/lark/.github/workflows/codecov.yml new file mode 100644 index 00000000..970b8694 --- /dev/null +++ b/vendor/lark/.github/workflows/codecov.yml @@ -0,0 +1,42 @@ +name: Compute coverage and push to Codecov +on: [push] +jobs: + run: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + env: + OS: ${{ matrix.os }} + PYTHON: '3.7' + steps: + - uses: actions/checkout@master + - name: Download submodules + run: | + git submodule update --init --recursive + git submodule sync -q + git submodule update --init + - name: Setup Python + uses: actions/setup-python@master + with: + python-version: 3.7 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r test-requirements.txt + - name: Generate coverage report + run: | + pip install pytest + pip install pytest-cov + pytest --cov=./ --cov-report=xml + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ./coverage.xml + flags: unittests + env_vars: OS,PYTHON + name: codecov-umbrella + fail_ci_if_error: true + path_to_write_report: ./coverage/codecov_report.txt + verbose: true diff --git a/vendor/lark/.github/workflows/mypy.yml b/vendor/lark/.github/workflows/mypy.yml new file mode 100644 index 00000000..f1c667ff --- /dev/null +++ b/vendor/lark/.github/workflows/mypy.yml @@ -0,0 +1,19 @@ +name: Python type check +on: [push, pull_request] +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - name: Download submodules + run: git submodule update --init --recursive + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install mypy + - name: Lint with mypy + run: mypy -p lark || true diff --git a/vendor/lark/.github/workflows/tests.yml b/vendor/lark/.github/workflows/tests.yml new file mode 100644 index 00000000..a98a00b5 --- /dev/null +++ b/vendor/lark/.github/workflows/tests.yml @@ -0,0 +1,28 @@ +name: Tests +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0-rc - 3.10, pypy3] + + steps: + - uses: actions/checkout@v2 + - name: Download submodules + run: | + git submodule update --init --recursive + git submodule sync -q + git submodule update --init + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r test-requirements.txt + - name: Run tests + run: | + python -m tests \ No newline at end of file diff --git a/vendor/lark/.gitignore b/vendor/lark/.gitignore new file mode 100644 index 00000000..b30399ed --- /dev/null +++ b/vendor/lark/.gitignore @@ -0,0 +1,14 @@ +*.pyc +*.pyo +/.tox +/lark_parser.egg-info/** +tags +.vscode +.idea +.ropeproject +.cache +.mypy_cache +/dist +/build +docs/_build +docs/examples \ No newline at end of file diff --git a/vendor/lark/.gitmodules b/vendor/lark/.gitmodules new file mode 100644 index 00000000..8412b440 --- /dev/null +++ b/vendor/lark/.gitmodules @@ -0,0 +1,3 @@ +[submodule "tests/test_nearley/nearley"] + path = tests/test_nearley/nearley + url = https://github.com/Hardmath123/nearley diff --git a/vendor/lark/CHANGELOG.md b/vendor/lark/CHANGELOG.md new file mode 100644 index 00000000..22f4b8c1 --- /dev/null +++ b/vendor/lark/CHANGELOG.md @@ -0,0 +1,13 @@ +v1.0 + +- `maybe_placeholders` is now True by default + +- Renamed TraditionalLexer to BasicLexer, and 'standard' lexer option to 'basic' + +- Default priority is now 0, for both terminals and rules (used to be 1 for terminals) + +- Discard mechanism is now done by returning Discard, instead of raising it as an exception. + +- `use_accepts` in `UnexpectedInput.match_examples()` is now True by default + +- `v_args(meta=True)` now gives meta as the first argument. i.e. `(meta, children)` \ No newline at end of file diff --git a/vendor/poetry-core/poetry/core/_vendor/lark-parser.LICENSE b/vendor/lark/LICENSE similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/lark-parser.LICENSE rename to vendor/lark/LICENSE diff --git a/vendor/lark/MANIFEST.in b/vendor/lark/MANIFEST.in new file mode 100644 index 00000000..68946b7c --- /dev/null +++ b/vendor/lark/MANIFEST.in @@ -0,0 +1 @@ +include README.md LICENSE docs/* examples/*.py examples/*.png examples/*.lark tests/*.py tests/*.lark tests/grammars/* tests/test_nearley/*.py tests/test_nearley/grammars/* diff --git a/vendor/lark/README.md b/vendor/lark/README.md new file mode 100644 index 00000000..220c8614 --- /dev/null +++ b/vendor/lark/README.md @@ -0,0 +1,199 @@ +# Lark - a parsing toolkit for Python + +Lark is a parsing toolkit for Python, built with a focus on ergonomics, performance and modularity. + +Lark can parse all context-free languages. To put it simply, it means that it is capable of parsing almost any programming language out there, and to some degree most natural languages too. + +**Who is it for?** + + - **Beginners**: Lark is very friendly for experimentation. It can parse any grammar you throw at it, no matter how complicated or ambiguous, and do so efficiently. It also constructs an annotated parse-tree for you, using only the grammar and an input, and it gives you convienient and flexible tools to process that parse-tree. + + - **Experts**: Lark implements both Earley(SPPF) and LALR(1), and several different lexers, so you can trade-off power and speed, according to your requirements. It also provides a variety of sophisticated features and utilities. + +**What can it do?** + + - Parse all context-free grammars, and handle any ambiguity gracefully + - Build an annotated parse-tree automagically, no construction code required. + - Provide first-rate performance in terms of both Big-O complexity and measured run-time (considering that this is Python ;) + - Run on every Python interpreter (it's pure-python) + - Generate a stand-alone parser (for LALR(1) grammars) + +And many more features. Read ahead and find out! + +Most importantly, Lark will save you time and prevent you from getting parsing headaches. + +### Quick links + +- [Documentation @readthedocs](https://lark-parser.readthedocs.io/) +- [Cheatsheet (PDF)](/docs/_static/lark_cheatsheet.pdf) +- [Online IDE](https://lark-parser.org/ide) +- [Tutorial](/docs/json_tutorial.md) for writing a JSON parser. +- Blog post: [How to write a DSL with Lark](http://blog.erezsh.com/how-to-write-a-dsl-in-python-with-lark/) +- [Gitter chat](https://gitter.im/lark-parser/Lobby) + +### Install Lark + + $ pip install lark --upgrade + +Lark has no dependencies. + +[![Tests](https://github.com/lark-parser/lark/actions/workflows/tests.yml/badge.svg)](https://github.com/lark-parser/lark/actions/workflows/tests.yml) + +### Syntax Highlighting + +Lark provides syntax highlighting for its grammar files (\*.lark): + +- [Sublime Text & TextMate](https://github.com/lark-parser/lark_syntax) +- [vscode](https://github.com/lark-parser/vscode-lark) +- [Intellij & PyCharm](https://github.com/lark-parser/intellij-syntax-highlighting) +- [Vim](https://github.com/lark-parser/vim-lark-syntax) +- [Atom](https://github.com/Alhadis/language-grammars) + +### Clones + +These are implementations of Lark in other languages. They accept Lark grammars, and provide similar utilities. + +- [Lerche (Julia)](https://github.com/jamesrhester/Lerche.jl) - an unofficial clone, written entirely in Julia. +- [Lark.js (Javascript)](https://github.com/lark-parser/lark.js) - a port of the stand-alone LALR(1) parser generator to Javascsript. + +### Hello World + +Here is a little program to parse "Hello, World!" (Or any other similar phrase): + +```python +from lark import Lark + +l = Lark('''start: WORD "," WORD "!" + + %import common.WORD // imports from terminal library + %ignore " " // Disregard spaces in text + ''') + +print( l.parse("Hello, World!") ) +``` + +And the output is: + +```python +Tree(start, [Token(WORD, 'Hello'), Token(WORD, 'World')]) +``` + +Notice punctuation doesn't appear in the resulting tree. It's automatically filtered away by Lark. + +### Fruit flies like bananas + +Lark is great at handling ambiguity. Here is the result of parsing the phrase "fruit flies like bananas": + +![fruitflies.png](examples/fruitflies.png) + +[Read the code here](https://github.com/lark-parser/lark/tree/master/examples/fruitflies.py), and see [more examples here](https://lark-parser.readthedocs.io/en/latest/examples/index.html). + + +## List of main features + + - Builds a parse-tree (AST) automagically, based on the structure of the grammar + - **Earley** parser + - Can parse all context-free grammars + - Full support for ambiguous grammars + - **LALR(1)** parser + - Fast and light, competitive with PLY + - Can generate a stand-alone parser ([read more](docs/tools.md#stand-alone-parser)) + - **EBNF** grammar + - **Unicode** fully supported + - Automatic line & column tracking + - Interactive parser for advanced parsing flows and debugging + - Grammar composition - Import terminals and rules from other grammars + - Standard library of terminals (strings, numbers, names, etc.) + - Import grammars from Nearley.js ([read more](/docs/tools.md#importing-grammars-from-nearleyjs)) + - Extensive test suite [![codecov](https://codecov.io/gh/lark-parser/lark/branch/master/graph/badge.svg?token=lPxgVhCVPK)](https://codecov.io/gh/lark-parser/lark) + - Type annotations (MyPy support) + - And much more! + +See the full list of [features here](https://lark-parser.readthedocs.io/en/latest/features.html) + + +### Comparison to other libraries + +#### Performance comparison + +Lark is the fastest and lightest (lower is better) + +![Run-time Comparison](docs/_static/comparison_runtime.png) + +![Memory Usage Comparison](docs/_static/comparison_memory.png) + + +Check out the [JSON tutorial](/docs/json_tutorial.md#conclusion) for more details on how the comparison was made. + +*Note: I really wanted to add PLY to the benchmark, but I couldn't find a working JSON parser anywhere written in PLY. If anyone can point me to one that actually works, I would be happy to add it!* + +*Note 2: The parsimonious code has been optimized for this specific test, unlike the other benchmarks (Lark included). Its "real-world" performance may not be as good.* + +#### Feature comparison + +| Library | Algorithm | Grammar | Builds tree? | Supports ambiguity? | Can handle every CFG? | Line/Column tracking | Generates Stand-alone +|:--------|:----------|:----|:--------|:------------|:------------|:----------|:---------- +| **Lark** | Earley/LALR(1) | EBNF | Yes! | Yes! | Yes! | Yes! | Yes! (LALR only) | +| [PLY](http://www.dabeaz.com/ply/) | LALR(1) | BNF | No | No | No | No | No | +| [PyParsing](https://github.com/pyparsing/pyparsing) | PEG | Combinators | No | No | No\* | No | No | +| [Parsley](https://pypi.python.org/pypi/Parsley) | PEG | EBNF | No | No | No\* | No | No | +| [Parsimonious](https://github.com/erikrose/parsimonious) | PEG | EBNF | Yes | No | No\* | No | No | +| [ANTLR](https://github.com/antlr/antlr4) | LL(*) | EBNF | Yes | No | Yes? | Yes | No | + + +(\* *PEGs cannot handle non-deterministic grammars. Also, according to Wikipedia, it remains unanswered whether PEGs can really parse all deterministic CFGs*) + + +### Projects using Lark + + - [Poetry](https://github.com/python-poetry/poetry-core) - A utility for dependency management and packaging + - [tartiflette](https://github.com/dailymotion/tartiflette) - a GraphQL server by Dailymotion + - [PyQuil](https://github.com/rigetti/pyquil) - Python library for quantum programming using Quil + - [Preql](https://github.com/erezsh/preql) - An interpreted relational query language that compiles to SQL + - [Hypothesis](https://github.com/HypothesisWorks/hypothesis) - Library for property-based testing + - [mappyfile](https://github.com/geographika/mappyfile) - a MapFile parser for working with MapServer configuration + - [synapse](https://github.com/vertexproject/synapse) - an intelligence analysis platform + - [Datacube-core](https://github.com/opendatacube/datacube-core) - Open Data Cube analyses continental scale Earth Observation data through time + - [SPFlow](https://github.com/SPFlow/SPFlow) - Library for Sum-Product Networks + - [Torchani](https://github.com/aiqm/torchani) - Accurate Neural Network Potential on PyTorch + - [Command-Block-Assembly](https://github.com/simon816/Command-Block-Assembly) - An assembly language, and C compiler, for Minecraft commands + - [EQL](https://github.com/endgameinc/eql) - Event Query Language + - [Fabric-SDK-Py](https://github.com/hyperledger/fabric-sdk-py) - Hyperledger fabric SDK with Python 3.x + - [required](https://github.com/shezadkhan137/required) - multi-field validation using docstrings + - [miniwdl](https://github.com/chanzuckerberg/miniwdl) - A static analysis toolkit for the Workflow Description Language + - [pytreeview](https://gitlab.com/parmenti/pytreeview) - a lightweight tree-based grammar explorer + - [harmalysis](https://github.com/napulen/harmalysis) - A language for harmonic analysis and music theory + - [gersemi](https://github.com/BlankSpruce/gersemi) - A CMake code formatter + - [MistQL](https://github.com/evinism/mistql) - A query language for JSON-like structures + +Using Lark? Send me a message and I'll add your project! + +## License + +Lark uses the [MIT license](LICENSE). + +(The standalone tool is under MPL2) + +## Contributors + +Lark is currently accepting pull-requests. See [How to develop Lark](/docs/how_to_develop.md) + +Big thanks to everyone who contributed so far: + + + + + +## Sponsor + +If you like Lark, and want to see us grow, please consider [sponsoring us!](https://github.com/sponsors/lark-parser) + +## Contact the author + +Questions about code are best asked on [gitter](https://gitter.im/lark-parser/Lobby) or in the issues. + +For anything else, I can be reached by email at erezshin at gmail com. + + -- [Erez](https://github.com/erezsh) + + diff --git a/vendor/lark/docs/Makefile b/vendor/lark/docs/Makefile new file mode 100644 index 00000000..58127b42 --- /dev/null +++ b/vendor/lark/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = Lark +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/vendor/lark/docs/_static/comparison_memory.png b/vendor/lark/docs/_static/comparison_memory.png new file mode 100644 index 00000000..1cdd5f30 Binary files /dev/null and b/vendor/lark/docs/_static/comparison_memory.png differ diff --git a/vendor/lark/docs/_static/comparison_runtime.png b/vendor/lark/docs/_static/comparison_runtime.png new file mode 100644 index 00000000..bba01820 Binary files /dev/null and b/vendor/lark/docs/_static/comparison_runtime.png differ diff --git a/vendor/lark/docs/_static/lark_cheatsheet.pdf b/vendor/lark/docs/_static/lark_cheatsheet.pdf new file mode 100644 index 00000000..4f3226cc Binary files /dev/null and b/vendor/lark/docs/_static/lark_cheatsheet.pdf differ diff --git a/vendor/lark/docs/_static/sppf/sppf.html b/vendor/lark/docs/_static/sppf/sppf.html new file mode 100644 index 00000000..c9c3d218 --- /dev/null +++ b/vendor/lark/docs/_static/sppf/sppf.html @@ -0,0 +1,212 @@ + + + + + + + + + +
+ +
+ + + +

Shared Packed Parse Forest (SPPF)

+ + + + + + +
+ +

+ +

In the last decade there has been a lot of interest in generalized parsing techniques. These techniques can be used to generate a working parser for any context-free grammar. This means that we no longer have to massage our grammar to fit into restricted classes such as LL(k) or LR(k). Supporting all context-free grammars means that grammars can be written in a natural way, and grammars can be combined, since the class of context-free grammars is closed under composition.

+ +

One of the consequences of supporting the whole class of context-free grammars is that also ambiguous grammars are supported. In an ambiguous grammar there are sentences in the language that can be derived in multiple ways. Each derivation results in a distinct parse tree. For each additional ambiguity in the input sentence, the number of derivations might grow exponentially. Therefore generalized parsers output a parse forest, rather than a set of the parse trees. In this parse forest, often sharing is used used to reduce the total space required to represent all derivation trees. Nodes which have the same subtree are shared, and nodes are combined which correspond to different derivations of the same substring. A parse forest where sharing is employed is called a shared packed parse forest (SPPF).

+ +

This article will describe the SPPF data structure in more detail. More information about the generation of the SPPF using the GLL algorithm can be found in the paper GLL parse-tree generation by E. Scott and A. Johnstone. Right Nulled GLR parsers can also be used to generate an SPPF, which is described in the paper Right Nulled GLR Parsers by E. Scott and A. Johnstone.

+ +

There are three types of nodes in an SPPF associated with a GLL parser: symbol nodes, packed nodes, and intermediate nodes. In the visualizations symbol nodes are shown as rectangles with rounded corners, packed nodes are shown as circles, or ovals when the label is visualized, and intermediate nodes are shown as rectangles.

+ +

Symbol nodes have labels of the form $(x,j,i)$ where $x$ is a terminal, nonterminal, or $\varepsilon$ (i.e. $x \in T \cup N \cup \lbrace \varepsilon \rbrace$), and $0 \leq j \leq i \leq m$ with $m$ being the length of the input sentence. The tuple $(j,i)$ is called the extent, and denotes that the symbol $x$ has been matched on the substring from position $j$ up to position $i$. Here $j$ is called the left extent, and $i$ is called the right extent.

+ +

Packed nodes have labels of the form $(t,k)$, where $0 \leq k \leq m$. Here $k$ is called the pivot, and $t$ is of the form $X ::= \alpha \cdot \beta$. The value of $k$ represents that the last symbol of $\alpha$ ends at position $k$ of the input string. Packed nodes are used to represent multiple derivation trees. When multiple derivations are possible with the same extent, starting from the same nonterminal symbol node, a separate packed node is added to the symbol node for each derivation.

+ +

Intermediate nodes are used to binarize the SPPF. They are introduced from the left, and group the children of packed nodes in pairs from the left. The binarization ensures that the size of the SPPF is worst-case cubic in the size of the input sentence. The fact that the SPPF is binarized does not mean that each node in the SPPF has at most two children. A symbol node or intermediate node can still have as many packed node children as there are ambiguities starting from it. Intermediate nodes have labels of the form $(t,j,i)$ where $t$ is a grammar slot, and $(j,i)$ is the extent. There are no intermediate nodes of the shape $(A ::= \alpha \cdot, j,i)$, where the grammar pointer of the grammar slot is at the end of the alternate. These grammar slots are present in the form of symbol nodes.

+ +

Consider the following grammar:

+ +

$\quad S ::= ABCD \quad A ::= a \quad B ::= b \quad C ::= c \quad D ::= d. $

+ +

Then given input sentence $abcd$, the the following SPPF will be the result:

+ + +
+ + + + +
+

SPPF with intermediate nodes

+ +
+ +
+ + +

Suppose that the intermediate nodes had not been added to the SPPF. Then the nonterminal symbol nodes for $A$, $B$, $C$, and $D$ would have been attached to the nonterminal symbol node $S$:

+ + +
+ + + + +
+

SPPF without intermediate nodes

+ +
+ +
+ + +

This example shows how intermediate nodes ensure that the tree is binarized.

+ +

Adding cycles

+ +

Grammars that contain cycles can generate sentences which have infinitely many derivation trees. A context-free grammar is cyclic if there exists a nonterminal $A \in N$ and a derivation $A \overset{+}\Rightarrow A$. Note that a cyclic context-free grammar implies that the context-free grammar is left-recursive, but the converse does not hold. The derivation trees for a cyclic grammar are represented in the finite SPPF by introducing cycles in the graph.

+ +

Consider the following cyclic grammar: +$S ::= SS \mid a \mid \varepsilon$.

+ +

Given input sentence $a$, there are infinitely many derivations. All these derivations are present in the following SPPF:

+ + +
+ + + + +
+

SPPF containing an infinite number of derivations

+ +
+ +
+ + +

Ambiguities

+ +

A parse forest is ambiguous if and only if it contains at least one ambiguity. An ambiguity arises when a symbol node or intermediate node has at least two packed nodes as its children. Such nodes are called ambiguous. Consider for instance the following grammar with input sentence $1+1+1$: +$ E ::= E + E \mid 1 $.

+ +

This gives the following SPPF:

+ + +
+ + + + +
+

SPPF containing an ambiguous root node

+ +
+ +
+ + +

In this SPPF, symbol node $(E,0,5)$ has two packed nodes as children. This means that there are at least two different parse trees starting at this node, the parse trees representing derivations $(E+(E+E))$ and $((E+E)+E)$ respectively.

+ +

The set of all parse trees present in the SPPF is defined in the following way:

+ +

Start at the root node of the SPPF, and walk the tree by choosing one packed node below each visited node, and choosing all the children of a visited packed node in a recursive manner.

+ +

Structural Properties

+ +

There are various structural properties that are useful when reasoning about SPPFs in general. At first note that each symbol node $(E,j,i)$ with $E \in T \cup N \cup \lbrace \varepsilon \rbrace$ is unique, so an SPPF does not contain two symbol nodes $(A,k,l)$ and $(B,m,n)$ with $A = B, k = m$, and $l=n$.

+ +

Terminal symbol nodes have no children. These nodes represent the leaves of the parse forest. Nonterminal symbol nodes $(A,j,i)$ have packed node children of the form $(A ::= \gamma \cdot, k)$ with $j \leq k \leq i$, and the number of children is not limited to two.

+ +

Intermediate nodes $(t,j,i)$ have packed node children with labels of the form $(t,k)$, where $j \leq k \leq i$.

+ +

Packed nodes $(t,k)$ have one or two children. The right child is a symbol node $(x,k,i)$ and the left child (if it exists) is a symbol or intermediate node with label $(s,j,k)$, where $j \leq k \leq i$. Packed nodes have always exactly one parent which is a symbol node or intermediate node.

+ +

It is useful to observe that the SPPF is a bipartite graph, with on the one hand the set of intermediate and symbol nodes and on the other hand the set of packed nodes. Therefore edges always go from a node of the first type to a node of the second type, or the other way round. As a consequence, cyles in the SPPF are always of even length.

+ +

Transformation to an abstract syntax tree

+ +

In the end, we often want a single abstract syntax tree (AST) when parsing an input sentence. In order to arrive at this AST, we need disambiguation techniques to remove undesired parse trees from the SPPF or avoid the generation of undesired parse trees in the first place. {% cite sanden2014thesis %} describes several SPPF disambiguation filters that remove ambiguities arising in expression grammars. Furthermore a method is described to integrate parse-time filtering in GLL that tries to avoid embedding undesired parse trees in the SPPF.

+ +

Of course, other transformation might be needed such as the removal of whitespace and comments from the parse forest.

+ +
+ +
+ + + + +
+
+

+ + © 2016 Bram van der Sanden · + + Powered by the Academic + theme for Hugo. + + + +

Source: Wayback Machine + copy of http://www.bramvandersanden.com/post/2014/06/shared-packed-parse-forest/ used to be. +

+ +

+
+ +
+ \ No newline at end of file diff --git a/vendor/lark/docs/_static/sppf/sppf_111.svg b/vendor/lark/docs/_static/sppf/sppf_111.svg new file mode 100644 index 00000000..d8af1813 --- /dev/null +++ b/vendor/lark/docs/_static/sppf/sppf_111.svg @@ -0,0 +1,765 @@ + + + +image/svg+xml(E, 0, 5) +(E ::= E + • E ,0,2) +(E, 2, 5) +(E, 4, 5) +(E ::= E + • E ,0,4) +(E, 0, 1) +(+, 1, 2) +(1, 0, 1) +(E ::= E + • E ,2,4) +(E, 2, 3) +(+, 3, 4) +(1, 2, 3) +(1, 4, 5) +(E, 0, 3) + \ No newline at end of file diff --git a/vendor/lark/docs/_static/sppf/sppf_abcd.svg b/vendor/lark/docs/_static/sppf/sppf_abcd.svg new file mode 100644 index 00000000..9ed8c805 --- /dev/null +++ b/vendor/lark/docs/_static/sppf/sppf_abcd.svg @@ -0,0 +1,584 @@ + + + +image/svg+xml(S, 0, 4) +(S ::= A B C • D ,0,3) +(D, 3, 4) +(S ::= A B • C D ,0,2) +(C, 2, 3) +(A, 0, 1) +(B, 1, 2) +(a, 0, 1) +(b, 1, 2) +(c, 2, 3) +(d, 3, 4) + \ No newline at end of file diff --git a/vendor/lark/docs/_static/sppf/sppf_abcd_noint.svg b/vendor/lark/docs/_static/sppf/sppf_abcd_noint.svg new file mode 100644 index 00000000..ab9a46d7 --- /dev/null +++ b/vendor/lark/docs/_static/sppf/sppf_abcd_noint.svg @@ -0,0 +1,522 @@ + + + +image/svg+xml(S, 0, 4) +(A, 0, 1) +(B, 1, 2) +(C, 2, 3) +(D, 3, 4) +(a, 0, 1) +(b, 1, 2) +(c, 2, 3) +(d, 3, 4) + \ No newline at end of file diff --git a/vendor/lark/docs/_static/sppf/sppf_cycle.svg b/vendor/lark/docs/_static/sppf/sppf_cycle.svg new file mode 100644 index 00000000..dcac54d1 --- /dev/null +++ b/vendor/lark/docs/_static/sppf/sppf_cycle.svg @@ -0,0 +1,682 @@ + + + +image/svg+xml(S, 0, 1) +(S ::= a•,0) +(S ::= S S•,1) +(S ::= S S•,0) +(a, 0, 1) +(S ::= S • S ,0,1) +(S, 1, 1) +(S ::= S • S ,0,0) +(S ::= S • S ,0) +(S ::= +ε +•,1) +(S ::= S S•,1) +( +ε +, 1, 1) +(S ::= S • S ,1,1) +(S ::= S • S ,1) +(S ::= S • S ,0) +(S, 0, 0) +(S ::= +ε +•,0) +(S ::= S S•,0) +( +ε +, 0, 0) + \ No newline at end of file diff --git a/vendor/lark/docs/classes.rst b/vendor/lark/docs/classes.rst new file mode 100644 index 00000000..6e88fae4 --- /dev/null +++ b/vendor/lark/docs/classes.rst @@ -0,0 +1,92 @@ +API Reference +============= + +Lark +---- + +.. autoclass:: lark.Lark + :members: open, parse, parse_interactive, lex, save, load, get_terminal, open_from_package + + +Using Unicode character classes with ``regex`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Python's builtin ``re`` module has a few persistent known bugs and also won't parse +advanced regex features such as character classes. +With ``pip install lark[regex]``, the ``regex`` module will be +installed alongside lark and can act as a drop-in replacement to ``re``. + +Any instance of Lark instantiated with ``regex=True`` will use the ``regex`` module instead of ``re``. + +For example, we can use character classes to match PEP-3131 compliant Python identifiers: + +:: + + from lark import Lark + >>> g = Lark(r""" + ?start: NAME + NAME: ID_START ID_CONTINUE* + ID_START: /[\p{Lu}\p{Ll}\p{Lt}\p{Lm}\p{Lo}\p{Nl}_]+/ + ID_CONTINUE: ID_START | /[\p{Mn}\p{Mc}\p{Nd}\p{Pc}·]+/ + """, regex=True) + + >>> g.parse('வணகà¯à®•à®®à¯') + 'வணகà¯à®•à®®à¯' + + +Tree +---- + +.. autoclass:: lark.Tree + :members: pretty, find_pred, find_data, iter_subtrees, scan_values, + iter_subtrees_topdown, __rich__ + +Token +----- + +.. autoclass:: lark.Token + +Transformer, Visitor & Interpreter +---------------------------------- + +See :doc:`visitors`. + +ForestVisitor, ForestTransformer, & TreeForestTransformer +----------------------------------------------------------- + +See :doc:`forest`. + +UnexpectedInput +--------------- + +.. autoclass:: lark.exceptions.UnexpectedInput + :members: get_context, match_examples + +.. autoclass:: lark.exceptions.UnexpectedToken + +.. autoclass:: lark.exceptions.UnexpectedCharacters + +.. autoclass:: lark.exceptions.UnexpectedEOF + +InteractiveParser +----------------- + +.. autoclass:: lark.parsers.lalr_interactive_parser.InteractiveParser + :members: choices, feed_token, copy, pretty, resume_parse, exhaust_lexer, accepts, as_immutable + +.. autoclass:: lark.parsers.lalr_interactive_parser.ImmutableInteractiveParser + :members: choices, feed_token, copy, pretty, resume_parse, exhaust_lexer, accepts, as_mutable + + +ast_utils +--------- + +For an example of using ``ast_utils``, see `/examples/advanced/create_ast.py`_ + +.. autoclass:: lark.ast_utils.Ast + +.. autoclass:: lark.ast_utils.AsList + +.. autofunction:: lark.ast_utils.create_transformer + +.. _/examples/advanced/create_ast.py: examples/advanced/create_ast.html \ No newline at end of file diff --git a/vendor/lark/docs/conf.py b/vendor/lark/docs/conf.py new file mode 100644 index 00000000..c3d89fe4 --- /dev/null +++ b/vendor/lark/docs/conf.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# Lark documentation build configuration file, created by +# sphinx-quickstart on Sun Aug 16 13:09:41 2020. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) +autodoc_member_order = 'bysource' + + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinx.ext.coverage', + 'recommonmark', + 'sphinx_markdown_tables', + 'sphinx_gallery.gen_gallery' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = { + '.rst': 'restructuredtext', + '.md': 'markdown' +} + + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'Lark' +copyright = '2020, Erez Shinan' +author = 'Erez Shinan' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '' +# The full version, including alpha/beta/rc tags. +release = '' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# This is required for the alabaster theme +# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars +html_sidebars = { + '**': [ + 'relations.html', # needs 'show_related': True theme option to display + 'searchbox.html', + ] +} + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Larkdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'Lark.tex', 'Lark Documentation', + 'Erez Shinan', 'manual'), +] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'lark', 'Lark Documentation', + [author], 7) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'Lark', 'Lark Documentation', + author, 'Lark', 'One line description of project.', + 'Miscellaneous'), +] + +# -- Sphinx gallery config ------------------------------------------- + +sphinx_gallery_conf = { + 'examples_dirs': ['../examples'], + 'gallery_dirs': ['examples'], +} \ No newline at end of file diff --git a/vendor/lark/docs/features.md b/vendor/lark/docs/features.md new file mode 100644 index 00000000..5baa16eb --- /dev/null +++ b/vendor/lark/docs/features.md @@ -0,0 +1,32 @@ +# Features + +## Main Features + - Earley parser, capable of parsing any context-free grammar + - Implements SPPF, for efficient parsing and storing of ambiguous grammars. + - LALR(1) parser, limited in power of expression, but very efficient in space and performance (O(n)). + - Implements a parse-aware lexer that provides a better power of expression than traditional LALR implementations (such as ply). + - EBNF-inspired grammar, with extra features (See: [Grammar Reference](grammar.md)) + - Builds a parse-tree (AST) automagically based on the grammar + - Stand-alone parser generator - create a small independent parser to embed in your project. ([read more](tools.html#stand-alone-parser)) + - Flexible error handling by using an interactive parser interface (LALR only) + - Automatic line & column tracking (for both tokens and matched rules) + - Automatic terminal collision resolution + - Grammar composition - Import terminals and rules from other grammars + - Standard library of terminals (strings, numbers, names, etc.) + - Unicode fully supported + - Extensive test suite + - Type annotations (MyPy support) + - Pure-Python implementation + +[Read more about the parsers](parsers.md) + +## Extra features + + - Import rules and tokens from other Lark grammars, for code reuse and modularity. + - Support for external regex module ([see here](classes.html#using-unicode-character-classes-with-regex)) + - Import grammars from Nearley.js ([read more](tools.html#importing-grammars-from-nearleyjs)) + - CYK parser + - Visualize your parse trees as dot or png files ([see_example](https://github.com/lark-parser/lark/blob/master/examples/fruitflies.py)) + - Automatic reconstruction of input from parse-tree (see examples) + - Use Lark grammars in Julia and Javascript. + diff --git a/vendor/lark/docs/forest.rst b/vendor/lark/docs/forest.rst new file mode 100644 index 00000000..0d843355 --- /dev/null +++ b/vendor/lark/docs/forest.rst @@ -0,0 +1,65 @@ +Working with the SPPF +===================== + +When parsing with Earley, Lark provides the ``ambiguity='forest'`` option +to obtain the shared packed parse forest (SPPF) produced by the parser as +an alternative to it being automatically converted to a tree. + +Lark provides a few tools to facilitate working with the SPPF. Here are some +things to consider when deciding whether or not to use the SPPF. + +**Pros** + +- Efficient storage of highly ambiguous parses +- Precise handling of ambiguities +- Custom rule prioritizers +- Ability to handle infinite ambiguities +- Directly transform forest -> object instead of forest -> tree -> object + +**Cons** + +- More complex than working with a tree +- SPPF may contain nodes corresponding to rules generated internally +- Loss of Lark grammar features: + + - Rules starting with '_' are not inlined in the SPPF + - Rules starting with '?' are never inlined in the SPPF + - All tokens will appear in the SPPF + +SymbolNode +---------- + +.. autoclass:: lark.parsers.earley_forest.SymbolNode + :members: is_ambiguous, children + +PackedNode +---------- + +.. autoclass:: lark.parsers.earley_forest.PackedNode + :members: children + +ForestVisitor +------------- + +.. autoclass:: lark.parsers.earley_forest.ForestVisitor + :members: visit, visit_symbol_node_in, visit_symbol_node_out, + visit_packed_node_in, visit_packed_node_out, + visit_token_node, on_cycle, get_cycle_in_path + +ForestTransformer +----------------- + +.. autoclass:: lark.parsers.earley_forest.ForestTransformer + :members: transform, transform_symbol_node, transform_intermediate_node, + transform_packed_node, transform_token_node + +TreeForestTransformer +--------------------- + +.. autoclass:: lark.parsers.earley_forest.TreeForestTransformer + :members: __default__, __default_token__, __default_ambig__ + +handles_ambiguity +----------------- + +.. autofunction:: lark.parsers.earley_forest.handles_ambiguity diff --git a/vendor/lark/docs/grammar.md b/vendor/lark/docs/grammar.md new file mode 100644 index 00000000..b27b5652 --- /dev/null +++ b/vendor/lark/docs/grammar.md @@ -0,0 +1,329 @@ +# Grammar Reference + +## Definitions + +A **grammar** is a list of rules and terminals, that together define a language. + +Terminals define the alphabet of the language, while rules define its structure. + +In Lark, a terminal may be a string, a regular expression, or a concatenation of these and other terminals. + +Each rule is a list of terminals and rules, whose location and nesting define the structure of the resulting parse-tree. + +A **parsing algorithm** is an algorithm that takes a grammar definition and a sequence of symbols (members of the alphabet), and matches the entirety of the sequence by searching for a structure that is allowed by the grammar. + +### General Syntax and notes + +Grammars in Lark are based on [EBNF](https://en.wikipedia.org/wiki/Extended_Backus–Naur_form) syntax, with several enhancements. + +EBNF is basically a short-hand for common BNF patterns. + +Optionals are expanded: + +```ebnf + a b? c -> (a c | a b c) +``` + +Repetition is extracted into a recursion: + +```ebnf + a: b* -> a: _b_tag + _b_tag: (_b_tag b)? +``` + +And so on. + +Lark grammars are composed of a list of definitions and directives, each on its own line. A definition is either a named rule, or a named terminal, with the following syntax, respectively: + +```c + rule: + | etc. + + TERM: // Rules aren't allowed +``` + + +**Comments** start with `//` and last to the end of the line (C++ style) + +Lark begins the parse with the rule 'start', unless specified otherwise in the options. + +Names of rules are always in lowercase, while names of terminals are always in uppercase. This distinction has practical effects, for the shape of the generated parse-tree, and the automatic construction of the lexer (aka tokenizer, or scanner). + + +## Terminals + +Terminals are used to match text into symbols. They can be defined as a combination of literals and other terminals. + +**Syntax:** + +```html + [. ] : +``` + +Terminal names must be uppercase. + +Literals can be one of: + +* `"string"` +* `/regular expression+/` +* `"case-insensitive string"i` +* `/re with flags/imulx` +* Literal range: `"a".."z"`, `"1".."9"`, etc. + +Terminals also support grammar operators, such as `|`, `+`, `*` and `?`. + +Terminals are a linear construct, and therefore may not contain themselves (recursion isn't allowed). + +### Templates + +Templates are expanded when preprocessing the grammar. + +Definition syntax: + +```ebnf + my_template{param1, param2, ...}: +``` + +Use syntax: + +```ebnf +some_rule: my_template{arg1, arg2, ...} +``` + +Example: +```ebnf +_separated{x, sep}: x (sep x)* // Define a sequence of 'x sep x sep x ...' + +num_list: "[" _separated{NUMBER, ","} "]" // Will match "[1, 2, 3]" etc. +``` + +### Priority + +Terminals can be assigned a priority to influence lexing. Terminal priorities +are signed integers with a default value of 0. + +When using a lexer, the highest priority terminals are always matched first. + +When using Earley's dynamic lexing, terminal priorities are used to prefer +certain lexings and resolve ambiguity. + +### Regexp Flags + +You can use flags on regexps and strings. For example: + +```perl +SELECT: "select"i //# Will ignore case, and match SELECT or Select, etc. +MULTILINE_TEXT: /.+/s +SIGNED_INTEGER: / + [+-]? # the sign + (0|[1-9][0-9]*) # the digits + /x +``` + +Supported flags are one of: `imslux`. See Python's regex documentation for more details on each one. + +Regexps/strings of different flags can only be concatenated in Python 3.6+ + +#### Notes for when using a lexer: + +When using a lexer (basic or contextual), it is the grammar-author's responsibility to make sure the literals don't collide, or that if they do, they are matched in the desired order. Literals are matched according to the following precedence: + +1. Highest priority first (priority is specified as: TERM.number: ...) +2. Length of match (for regexps, the longest theoretical match is used) +3. Length of literal / pattern definition +4. Name + +**Examples:** +```perl +IF: "if" +INTEGER : /[0-9]+/ +INTEGER2 : ("0".."9")+ //# Same as INTEGER +DECIMAL.2: INTEGER? "." INTEGER //# Will be matched before INTEGER +WHITESPACE: (" " | /\t/ )+ +SQL_SELECT: "select"i +``` + +### Regular expressions & Ambiguity + +Each terminal is eventually compiled to a regular expression. All the operators and references inside it are mapped to their respective expressions. + +For example, in the following grammar, `A1` and `A2`, are equivalent: +```perl +A1: "a" | "b" +A2: /a|b/ +``` + +This means that inside terminals, Lark cannot detect or resolve ambiguity, even when using Earley. + +For example, for this grammar: +```perl +start : (A | B)+ +A : "a" | "ab" +B : "b" +``` +We get only one possible derivation, instead of two: + +```bash +>>> p = Lark(g, ambiguity="explicit") +>>> p.parse("ab") +Tree('start', [Token('A', 'ab')]) +``` + +This is happening because Python's regex engine always returns the best matching option. There is no way to access the alternatives. + +If you find yourself in this situation, the recommended solution is to use rules instead. + +Example: + +```python +>>> p = Lark("""start: (a | b)+ +... !a: "a" | "ab" +... !b: "b" +... """, ambiguity="explicit") +>>> print(p.parse("ab").pretty()) +_ambig + start + a ab + start + a a + b b +``` + + +## Rules + +**Syntax:** +```html + : [-> ] + | ... +``` + +Names of rules and aliases are always in lowercase. + +Rule definitions can be extended to the next line by using the OR operator (signified by a pipe: `|` ). + +An alias is a name for the specific rule alternative. It affects tree construction. + + +Each item is one of: + +* `rule` +* `TERMINAL` +* `"string literal"` or `/regexp literal/` +* `(item item ..)` - Group items +* `[item item ..]` - Maybe. Same as `(item item ..)?`, but when `maybe_placeholders=True`, generates `None` if there is no match. +* `item?` - Zero or one instances of item ("maybe") +* `item*` - Zero or more instances of item +* `item+` - One or more instances of item +* `item ~ n` - Exactly *n* instances of item +* `item ~ n..m` - Between *n* to *m* instances of item (not recommended for wide ranges, due to performance issues) + +**Examples:** +```perl +hello_world: "hello" "world" +mul: (mul "*")? number //# Left-recursion is allowed and encouraged! +expr: expr operator expr + | value //# Multi-line, belongs to expr + +four_words: word ~ 4 +``` + +### Priority + +Like terminals, rules can be assigned a priority. Rule priorities are signed +integers with a default value of 0. + +When using LALR, the highest priority rules are used to resolve collision errors. + +When using Earley, rule priorities are used to resolve ambiguity. + + +## Directives + +### %ignore + +All occurrences of the terminal will be ignored, and won't be part of the parse. + +Using the `%ignore` directive results in a cleaner grammar. + +It's especially important for the LALR(1) algorithm, because adding whitespace (or comments, or other extraneous elements) explicitly in the grammar, harms its predictive abilities, which are based on a lookahead of 1. + +**Syntax:** +```html +%ignore +``` +**Examples:** +```perl +%ignore " " + +COMMENT: "#" /[^\n]/* +%ignore COMMENT +``` +### %import + +Allows one to import terminals and rules from lark grammars. + +When importing rules, all their dependencies will be imported into a namespace, to avoid collisions. It's not possible to override their dependencies (e.g. like you would when inheriting a class). + +**Syntax:** +```html +%import . +%import . +%import . -> +%import . -> +%import (, , , ) +``` + +If the module path is absolute, Lark will attempt to load it from the built-in directory (which currently contains `common.lark`, `python.lark`, and `unicode.lark`). + +If the module path is relative, such as `.path.to.file`, Lark will attempt to load it from the current working directory. Grammars must have the `.lark` extension. + +The rule or terminal can be imported under another name with the `->` syntax. + +**Example:** +```perl +%import common.NUMBER + +%import .terminals_file (A, B, C) + +%import .rules_file.rulea -> ruleb +``` + +Note that `%ignore` directives cannot be imported. Imported rules will abide by the `%ignore` directives declared in the main grammar. + +### %declare + +Declare a terminal without defining it. Useful for plugins. + +### %override + +Override a rule or terminals, affecting all references to it, even in imported grammars. + +Useful for implementing an inheritance pattern when importing grammars. + +**Example:** +```perl +%import my_grammar (start, number, NUMBER) + +// Add hex support to my_grammar +%override number: NUMBER | /0x\w+/ +``` + +### %extend + +Extend the definition of a rule or terminal, e.g. add a new option on what it can match, like when separated with `|`. + +Useful for splitting up a definition of a complex rule with many different options over multiple files. + +Can also be used to implement a plugin system where a core grammar is extended by others. + + +**Example:** +```perl +%import my_grammar (start, NUMBER) + +// Add hex support to my_grammar +%extend NUMBER: /0x\w+/ +``` + +For both `%extend` and `%override`, there is not requirement for a rule/terminal to come from another file, but that is probably the most common usecase diff --git a/vendor/lark/docs/how_to_develop.md b/vendor/lark/docs/how_to_develop.md new file mode 100644 index 00000000..fb38ccf9 --- /dev/null +++ b/vendor/lark/docs/how_to_develop.md @@ -0,0 +1,68 @@ +# How to develop Lark - Guide + +There are many ways you can help the project: + +* Help solve issues +* Improve the documentation +* Write new grammars for Lark's library +* Write a blog post introducing Lark to your audience +* Port Lark to another language +* Help me with code development + +If you're interested in taking one of these on, let me know and I will provide more details and assist you in the process. + + +## Unit Tests + +Lark comes with an extensive set of tests. Many of the tests will run several times, once for each parser configuration. + +To run the tests, just go to the lark project root, and run the command: +```bash +python -m tests +``` + +or + +```bash +pypy -m tests +``` + +For a list of supported interpreters, you can consult the `tox.ini` file. + +You can also run a single unittest using its class and method name, for example: +```bash +## test_package test_class_name.test_function_name +python -m tests TestLalrBasic.test_keep_all_tokens +``` + +### tox + +To run all Unit Tests with tox, +install tox and Python 2.7 up to the latest python interpreter supported (consult the file tox.ini). +Then, +run the command `tox` on the root of this project (where the main setup.py file is on). + +And, for example, +if you would like to only run the Unit Tests for Python version 2.7, +you can run the command `tox -e py27` + +### pytest + +You can also run the tests using pytest: + +```bash +pytest tests +``` + +### Code Style + +Lark does not follow a predefined code style. +We accept any code style that makes sense, as long as it's Pythonic and easy to read. + +### Using setup.py + +Another way to run the tests is using setup.py: + +```bash +python setup.py test +``` diff --git a/vendor/lark/docs/how_to_use.md b/vendor/lark/docs/how_to_use.md new file mode 100644 index 00000000..08547829 --- /dev/null +++ b/vendor/lark/docs/how_to_use.md @@ -0,0 +1,83 @@ +# How To Use Lark - Guide + +## Work process + +This is the recommended process for working with Lark: + +1. Collect or create input samples, that demonstrate key features or behaviors in the language you're trying to parse. + +2. Write a grammar. Try to aim for a structure that is intuitive, and in a way that imitates how you would explain your language to a fellow human. + +3. Try your grammar in Lark against each input sample. Make sure the resulting parse-trees make sense. + +4. Use Lark's grammar features to [shape the tree](tree_construction.md): Get rid of superfluous rules by inlining them, and use aliases when specific cases need clarification. + + - You can perform steps 1-4 repeatedly, gradually growing your grammar to include more sentences. + +5. Create a transformer to evaluate the parse-tree into a structure you'll be comfortable to work with. This may include evaluating literals, merging branches, or even converting the entire tree into your own set of AST classes. + +Of course, some specific use-cases may deviate from this process. Feel free to suggest these cases, and I'll add them to this page. + +## Getting started + +Browse the [Examples](https://github.com/lark-parser/lark/tree/master/examples) to find a template that suits your purposes. + +Read the tutorials to get a better understanding of how everything works. (links in the [main page](/index)) + +Use the [Cheatsheet (PDF)](https://lark-parser.readthedocs.io/en/latest/_static/lark_cheatsheet.pdf) for quick reference. + +Use the reference pages for more in-depth explanations. (links in the [main page](/index)) + +## Debug + +Grammars may contain non-obvious bugs, usually caused by rules or terminals interfering with each other in subtle ways. + +When trying to debug a misbehaving grammar, the following methodology is recommended: + +1. Create a copy of the grammar, so you can change the parser/grammar without any worries +2. Find the minimal input that creates the error +3. Slowly remove rules from the grammar, while making sure the error still occurs. + +Usually, by the time you get to a minimal grammar, the problem becomes clear. + +But if it doesn't, feel free to ask us on gitter, or even open an issue. Post a reproducing code, with the minimal grammar and input, and we'll do our best to help. + +### LALR + +By default Lark silently resolves Shift/Reduce conflicts as Shift. To enable warnings pass `debug=True`. To get the messages printed you have to configure the `logger` beforehand. For example: + +```python +import logging +from lark import Lark, logger + +logger.setLevel(logging.DEBUG) + +collision_grammar = ''' +start: as as +as: a* +a: "a" +''' +p = Lark(collision_grammar, parser='lalr', debug=True) +``` + +## Tools + +### Stand-alone parser + +Lark can generate a stand-alone LALR(1) parser from a grammar. + +The resulting module provides the same interface as Lark, but with a fixed grammar, and reduced functionality. + +Run using: + +```bash +python -m lark.tools.standalone +``` + +For a play-by-play, read the [tutorial](http://blog.erezsh.com/create-a-stand-alone-lalr1-parser-in-python/) + +### Import Nearley.js grammars + +It is possible to import Nearley grammars into Lark. The Javascript code is translated using Js2Py. + +See the [tools page](tools.md) for more information. diff --git a/vendor/lark/docs/ide/app.html b/vendor/lark/docs/ide/app.html new file mode 100644 index 00000000..48a1505d --- /dev/null +++ b/vendor/lark/docs/ide/app.html @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + + diff --git a/vendor/lark/docs/ide/app.js b/vendor/lark/docs/ide/app.js new file mode 100644 index 00000000..90e54f13 --- /dev/null +++ b/vendor/lark/docs/ide/app.js @@ -0,0 +1,105 @@ +class app { + + constructor(modules, invocation){ + languagePluginLoader.then(() => { + // If you don't require for pre-loaded Python packages, remove this promise below. + window.pyodide.runPythonAsync("import setuptools, micropip").then(()=>{ + window.pyodide.runPythonAsync("micropip.install('lark-parser')").then(()=>{ + this.fetchSources(modules).then(() => { + window.pyodide.runPythonAsync("import " + Object.keys(modules).join("\nimport ") + "\n" + invocation + "\n").then(() => this.initializingComplete()); + }); + }); + }); + }); + } + + loadSources(module, baseURL, files) { + let promises = []; + + for (let f in files) { + promises.push( + new Promise((resolve, reject) => { + let file = files[f]; + let url = (baseURL ? baseURL + "/" : "") + file; + + fetch(url, {}).then((response) => { + if (response.status === 200) + return response.text().then((code) => { + let path = ("/lib/python3.7/site-packages/" + module + "/" + file).split("/"); + let lookup = ""; + + for (let i in path) { + if (!path[i]) { + continue; + } + + lookup += (lookup ? "/" : "") + path[i]; + + if (parseInt(i) === path.length - 1) { + window.pyodide._module.FS.writeFile(lookup, code); + console.debug(`fetched ${lookup}`); + } else { + try { + window.pyodide._module.FS.lookupPath(lookup); + } catch { + window.pyodide._module.FS.mkdir(lookup); + console.debug(`created ${lookup}`); + } + } + } + + resolve(); + }); + else + reject(); + }); + }) + ); + } + + return Promise.all(promises); + } + + fetchSources(modules) { + let promises = []; + + for( let module of Object.keys(modules) ) + { + promises.push( + new Promise((resolve, reject) => { + fetch(`${modules[module]}/files.json`, {}).then((response) => { + if (response.status === 200) { + response.text().then((list) => { + let files = JSON.parse(list); + + this.loadSources(module, modules[module], files).then(() => { + resolve(); + }) + }) + } else { + reject(); + } + }) + })); + } + + return Promise.all(promises).then(() => { + for( let module of Object.keys(modules) ) { + window.pyodide.loadedPackages[module] = "default channel"; + } + + window.pyodide.runPython( + 'import importlib as _importlib\n' + + '_importlib.invalidate_caches()\n' + ); + }); + } + + initializingComplete() { + document.body.classList.remove("is-loading") + } +} + +(function () { + window.top.app = new app({"app": "app"}, "import app.app; app.app.start()"); +})(); diff --git a/vendor/lark/docs/ide/app/app.py b/vendor/lark/docs/ide/app/app.py new file mode 100644 index 00000000..146aee98 --- /dev/null +++ b/vendor/lark/docs/ide/app/app.py @@ -0,0 +1,83 @@ +from . import html5 +from .examples import examples + +from lark import Lark +from lark.tree import Tree + + +class App(html5.Div): + def __init__(self): + super().__init__(""" +

+ IDE +

+ +
+ + + + +
+
+
Grammar:
+ +
+
+
Input:
+ +
+
+
+
    +
+
+ """) + self.sinkEvent("onKeyUp", "onChange") + + self.parser = "earley" + + # Pre-load examples + for name, (grammar, input) in examples.items(): + option = html5.Option(name) + option.grammar = grammar + option.input = input + + self.examples.appendChild(option) + + def onChange(self, e): + if html5.utils.doesEventHitWidgetOrChildren(e, self.examples): + example = self.examples.children(self.examples["selectedIndex"]) + self.grammar["value"] = example.grammar.strip() + self.input["value"] = example.input.strip() + self.onKeyUp() + + elif html5.utils.doesEventHitWidgetOrChildren(e, self.parser): + self.parser = self.parser.children(self.parser["selectedIndex"])["value"] + self.onKeyUp() + + def onKeyUp(self, e=None): + l = Lark(self.grammar["value"], parser=self.parser) + + try: + ast = l.parse(self.input["value"]) + except Exception as e: + self.ast.appendChild( + html5.Li(str(e)), replace=True + ) + + print(ast) + traverse = lambda node: html5.Li([node.data, html5.Ul([traverse(c) for c in node.children])] if isinstance(node, Tree) else node) + self.ast.appendChild(traverse(ast), replace=True) + + +def start(): + html5.Body().appendChild( + App() + ) + diff --git a/vendor/lark/docs/ide/app/core.py b/vendor/lark/docs/ide/app/core.py new file mode 100644 index 00000000..6ebe6791 --- /dev/null +++ b/vendor/lark/docs/ide/app/core.py @@ -0,0 +1,3152 @@ +# -*- coding: utf-8 -* + +######################################################################################################################## +# DOM-access functions and variables +######################################################################################################################## + +try: + # Pyodide + from js import window, eval as jseval + document = window.document + +except: + print("Emulation mode") + from xml.dom.minidom import parseString + + jseval = None + window = None + document = parseString("") + + +def domCreateAttribute(tag, ns=None): + """ + Creates a new HTML/SVG/... attribute + :param ns: the namespace. Default: HTML. Possible values: HTML, SVG, XBL, XUL + """ + uri = None + + if ns == "SVG": + uri = "http://www.w3.org/2000/svg" + elif ns == "XBL": + uri = "http://www.mozilla.org/xbl" + elif ns == "XUL": + uri = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul" + + if uri: + return document.createAttribute(uri, tag) + + return document.createAttribute(tag) + + +def domCreateElement(tag, ns=None): + """ + Creates a new HTML/SVG/... tag + :param ns: the namespace. Default: HTML. Possible values: HTML, SVG, XBL, XUL + """ + uri = None + + if ns == "SVG": + uri = "http://www.w3.org/2000/svg" + elif ns == "XBL": + uri = "http://www.mozilla.org/xbl" + elif ns == "XUL": + uri = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul" + + if uri: + return document.createElementNS(uri, tag) + + return document.createElement(tag) + + +def domCreateTextNode(txt=""): + return document.createTextNode(txt) + + +def domGetElementById(idTag): + return document.getElementById(idTag) + + +def domElementFromPoint(x, y): + return document.elementFromPoint(x, y) + + +def domGetElementsByTagName(tag): + items = document.getElementsByTagName(tag) + return [items.item(i) for i in range(0, int(items.length))] #pyodide interprets items.length as float, so convert to int + + +######################################################################################################################## +# HTML Widgets +######################################################################################################################## + +# TextNode ------------------------------------------------------------------------------------------------------------- + +class TextNode(object): + """ + Represents a piece of text inside the DOM. + This is the *only* object not deriving from "Widget", as it does + not support any of its properties. + """ + + def __init__(self, txt=None, *args, **kwargs): + super().__init__() + self._parent = None + self._children = [] + self.element = domCreateTextNode(txt or "") + self._isAttached = False + + def _setText(self, txt): + self.element.data = txt + + def _getText(self): + return self.element.data + + def __str__(self): + return self.element.data + + def onAttach(self): + self._isAttached = True + + def onDetach(self): + self._isAttached = False + + def _setDisabled(self, disabled): + return + + def _getDisabled(self): + return False + + def children(self): + return [] + + +# _WidgetClassWrapper ------------------------------------------------------------------------------------------------- + +class _WidgetClassWrapper(list): + + def __init__(self, targetWidget): + super().__init__() + + self.targetWidget = targetWidget + + def _updateElem(self): + if len(self) == 0: + self.targetWidget.element.removeAttribute("class") + else: + self.targetWidget.element.setAttribute("class", " ".join(self)) + + def append(self, p_object): + list.append(self, p_object) + self._updateElem() + + def clear(self): + list.clear(self) + self._updateElem() + + def remove(self, value): + try: + list.remove(self, value) + except: + pass + self._updateElem() + + def extend(self, iterable): + list.extend(self, iterable) + self._updateElem() + + def insert(self, index, p_object): + list.insert(self, index, p_object) + self._updateElem() + + def pop(self, index=None): + list.pop(self, index) + self._updateElem() + + +# _WidgetDataWrapper --------------------------------------------------------------------------------------------------- + +class _WidgetDataWrapper(dict): + + def __init__(self, targetWidget): + super().__init__() + + self.targetWidget = targetWidget + alldata = targetWidget.element + + for data in dir(alldata.dataset): + dict.__setitem__(self, data, getattr(alldata.dataset, data)) + + def __setitem__(self, key, value): + dict.__setitem__(self, key, value) + self.targetWidget.element.setAttribute(str("data-" + key), value) + + def update(self, E=None, **F): + dict.update(self, E, **F) + if E is not None and "keys" in dir(E): + for key in E: + self.targetWidget.element.setAttribute(str("data-" + key), E["data-" + key]) + elif E: + for (key, val) in E: + self.targetWidget.element.setAttribute(str("data-" + key), "data-" + val) + for key in F: + self.targetWidget.element.setAttribute(str("data-" + key), F["data-" + key]) + + +# _WidgetStyleWrapper -------------------------------------------------------------------------------------------------- + +class _WidgetStyleWrapper(dict): + + def __init__(self, targetWidget): + super().__init__() + + self.targetWidget = targetWidget + style = targetWidget.element.style + + for key in dir(style): + # Convert JS-Style-Syntax to CSS Syntax (ie borderTop -> border-top) + realKey = "" + for currChar in key: + if currChar.isupper(): + realKey += "-" + realKey += currChar.lower() + val = style.getPropertyValue(realKey) + if val: + dict.__setitem__(self, realKey, val) + + def __setitem__(self, key, value): + dict.__setitem__(self, key, value) + self.targetWidget.element.style.setProperty(key, value) + + def update(self, E=None, **F): + dict.update(self, E, **F) + if E is not None and "keys" in dir(E): + for key in E: + self.targetWidget.element.style.setProperty(key, E[key]) + elif E: + for (key, val) in E: + self.targetWidget.element.style.setProperty(key, val) + for key in F: + self.targetWidget.element.style.setProperty(key, F[key]) + + +# Widget --------------------------------------------------------------------------------------------------------------- + +class Widget(object): + _tagName = None + _namespace = None + _parserTagName = None + style = [] + + def __init__(self, *args, appendTo=None, style=None, **kwargs): + if "_wrapElem" in kwargs.keys(): + self.element = kwargs["_wrapElem"] + del kwargs["_wrapElem"] + else: + assert self._tagName is not None + self.element = domCreateElement(self._tagName, ns=self._namespace) + + super().__init__() + self._widgetClassWrapper = _WidgetClassWrapper(self) + self.addClass(self.style) + + if style: + self.addClass(style) + + self._children = [] + self._catchedEvents = {} + self._disabledState = 0 + self._isAttached = False + self._parent = None + + self._lastDisplayState = None + + if args: + self.appendChild(*args, **kwargs) + + if appendTo: + appendTo.appendChild(self) + + def sinkEvent(self, *args): + for event_attrName in args: + event = event_attrName.lower() + + if event_attrName in self._catchedEvents or event in ["onattach", "ondetach"]: + continue + + eventFn = getattr(self, event_attrName, None) + assert eventFn and callable(eventFn), "{} must provide a {} method".format(str(self), event_attrName) + + self._catchedEvents[event_attrName] = eventFn + + if event.startswith("on"): + event = event[2:] + + self.element.addEventListener(event, eventFn) + + def unsinkEvent(self, *args): + for event_attrName in args: + event = event_attrName.lower() + + if event_attrName not in self._catchedEvents: + continue + + eventFn = self._catchedEvents[event_attrName] + del self._catchedEvents[event_attrName] + + if event.startswith("on"): + event = event[2:] + + self.element.removeEventListener(event, eventFn) + + def disable(self): + if not self["disabled"]: + self["disabled"] = True + + def enable(self): + if self["disabled"]: + self["disabled"] = False + + def _getDisabled(self): + return bool(self._disabledState) + + def _setDisabled(self, disable): + for child in self._children: + child._setDisabled(disable) + + if disable: + self._disabledState += 1 + self.addClass("is-disabled") + + if isinstance(self, _attrDisabled): + self.element.disabled = True + + elif self._disabledState: + self._disabledState -= 1 + + if not self._disabledState: + self.removeClass("is-disabled") + + if isinstance(self, _attrDisabled): + self.element.disabled = False + + def _getTargetfuncName(self, key, type): + assert type in ["get", "set"] + return "_{}{}{}".format(type, key[0].upper(), key[1:]) + + def __getitem__(self, key): + funcName = self._getTargetfuncName(key, "get") + + if funcName in dir(self): + return getattr(self, funcName)() + + return None + + def __setitem__(self, key, value): + funcName = self._getTargetfuncName(key, "set") + + if funcName in dir(self): + return getattr(self, funcName)(value) + + raise ValueError("{} is no valid attribute for {}".format(key, (self._tagName or str(self)))) + + def __str__(self): + return str(self.__class__.__name__) + + def __iter__(self): + return self._children.__iter__() + + def _getData(self): + """ + Custom data attributes are intended to store custom data private to the page or application, for which there are no more appropriate attributes or elements. + :param name: + :returns: + """ + return _WidgetDataWrapper(self) + + def _getTranslate(self): + """ + Specifies whether an elements attribute values and contents of its children are to be translated when the page is localized, or whether to leave them unchanged. + :returns: True | False + """ + return True if self.element.translate == "yes" else False + + def _setTranslate(self, val): + """ + Specifies whether an elements attribute values and contents of its children are to be translated when the page is localized, or whether to leave them unchanged. + :param val: True | False + """ + self.element.translate = "yes" if val == True else "no" + + def _getTitle(self): + """ + Advisory information associated with the element. + :returns: str + """ + return self.element.title + + def _setTitle(self, val): + """ + Advisory information associated with the element. + :param val: str + """ + self.element.title = val + + def _getTabindex(self): + """ + Specifies whether the element represents an element that is is focusable (that is, an element which is part of the sequence of focusable elements in the document), and the relative order of the element in the sequence of focusable elements in the document. + :returns: number + """ + return self.element.getAttribute("tabindex") + + def _setTabindex(self, val): + """ + Specifies whether the element represents an element that is is focusable (that is, an element which is part of the sequence of focusable elements in the document), and the relative order of the element in the sequence of focusable elements in the document. + :param val: number + """ + self.element.setAttribute("tabindex", val) + + def _getSpellcheck(self): + """ + Specifies whether the element represents an element whose contents are subject to spell checking and grammar checking. + :returns: True | False + """ + return True if self.element.spellcheck == "true" else False + + def _setSpellcheck(self, val): + """ + Specifies whether the element represents an element whose contents are subject to spell checking and grammar checking. + :param val: True | False + """ + self.element.spellcheck = str(val).lower() + + def _getLang(self): + """ + Specifies the primary language for the contents of the element and for any of the elements attributes that contain text. + :returns: language tag e.g. de|en|fr|es|it|ru| + """ + return self.element.lang + + def _setLang(self, val): + """ + Specifies the primary language for the contents of the element and for any of the elements attributes that contain text. + :param val: language tag + """ + self.element.lang = val + + def _getHidden(self): + """ + Specifies that the element represents an element that is not yet, or is no longer, relevant. + :returns: True | False + """ + return True if self.element.hasAttribute("hidden") else False + + def _setHidden(self, val): + """ + Specifies that the element represents an element that is not yet, or is no longer, relevant. + :param val: True | False + """ + if val: + self.element.setAttribute("hidden", "") + else: + self.element.removeAttribute("hidden") + + def _getDropzone(self): + """ + Specifies what types of content can be dropped on the element, and instructs the UA about which actions to take with content when it is dropped on the element. + :returns: "copy" | "move" | "link" + """ + return self.element.dropzone + + def _setDropzone(self, val): + """ + Specifies what types of content can be dropped on the element, and instructs the UA about which actions to take with content when it is dropped on the element. + :param val: "copy" | "move" | "link" + """ + self.element.dropzone = val + + def _getDraggable(self): + """ + Specifies whether the element is draggable. + :returns: True | False | "auto" + """ + return (self.element.draggable if str(self.element.draggable) == "auto" else ( + True if str(self.element.draggable).lower() == "true" else False)) + + def _setDraggable(self, val): + """ + Specifies whether the element is draggable. + :param val: True | False | "auto" + """ + self.element.draggable = str(val).lower() + + def _getDir(self): + """ + Specifies the elements text directionality. + :returns: ltr | rtl | auto + """ + return self.element.dir + + def _setDir(self, val): + """ + Specifies the elements text directionality. + :param val: ltr | rtl | auto + """ + self.element.dir = val + + def _getContextmenu(self): + """ + The value of the id attribute on the menu with which to associate the element as a context menu. + :returns: + """ + return self.element.contextmenu + + def _setContextmenu(self, val): + """ + The value of the id attribute on the menu with which to associate the element as a context menu. + :param val: + """ + self.element.contextmenu = val + + def _getContenteditable(self): + """ + Specifies whether the contents of the element are editable. + :returns: True | False + """ + v = self.element.getAttribute("contenteditable") + return str(v).lower() == "true" + + def _setContenteditable(self, val): + """ + Specifies whether the contents of the element are editable. + :param val: True | False + """ + self.element.setAttribute("contenteditable", str(val).lower()) + + def _getAccesskey(self): + """ + A key label or list of key labels with which to associate the element; each key label represents a keyboard shortcut which UAs can use to activate the element or give focus to the element. + :param self: + :returns: + """ + return self.element.accesskey + + def _setAccesskey(self, val): + """ + A key label or list of key labels with which to associate the element; each key label represents a keyboard shortcut which UAs can use to activate the element or give focus to the element. + :param self: + :param val: + """ + self.element.accesskey = val + + def _getId(self): + """ + Specifies a unique id for an element + :param self: + :returns: + """ + return self.element.id + + def _setId(self, val): + """ + Specifies a unique id for an element + :param self: + :param val: + """ + self.element.id = val + + def _getClass(self): + """ + The class attribute specifies one or more classnames for an element. + :returns: + """ + return self._widgetClassWrapper + + def _setClass(self, value): + """ + The class attribute specifies one or more classnames for an element. + :param self: + :param value: + @raise ValueError: + """ + + if value is None: + self.element.setAttribute("class", " ") + elif isinstance(value, str): + self.element.setAttribute("class", value) + elif isinstance(value, list): + self.element.setAttribute("class", " ".join(value)) + else: + raise ValueError("Class must be a str, a List or None") + + def _getStyle(self): + """ + The style attribute specifies an inline style for an element. + :param self: + :returns: + """ + return _WidgetStyleWrapper(self) + + def _getRole(self): + """ + Specifies a role for an element + @param self: + @return: + """ + return self.element.getAttribute("role") + + def _setRole(self, val): + """ + Specifies a role for an element + @param self: + @param val: + """ + self.element.setAttribute("role", val) + + def hide(self): + """ + Hide element, if shown. + :return: + """ + state = self["style"].get("display", "") + + if state != "none": + self._lastDisplayState = state + self["style"]["display"] = "none" + + def show(self): + """ + Show element, if hidden. + :return: + """ + if self._lastDisplayState is not None: + self["style"]["display"] = self._lastDisplayState + self._lastDisplayState = None + + def isHidden(self): + """ + Checks if a widget is hidden. + :return: True if hidden, False otherwise. + """ + return self["style"].get("display", "") == "none" + + def isVisible(self): + """ + Checks if a widget is visible. + :return: True if visible, False otherwise. + """ + return not self.isHidden() + + def onBind(self, widget, name): + """ + Event function that is called on the widget when it is bound to another widget with a name. + This is only done by the HTML parser, a manual binding by the user is not triggered. + """ + return + + def onAttach(self): + self._isAttached = True + + for c in self._children: + c.onAttach() + + def onDetach(self): + self._isAttached = False + for c in self._children: + c.onDetach() + + def __collectChildren(self, *args, **kwargs): + assert not isinstance(self, _isVoid), "<%s> can't have children!" % self._tagName + + if kwargs.get("bindTo") is None: + kwargs["bindTo"] = self + + widgets = [] + for arg in args: + if isinstance(arg, (str, HtmlAst)): + widgets.extend(fromHTML(arg, **kwargs)) + + elif isinstance(arg, (list, tuple)): + for subarg in arg: + widgets.extend(self.__collectChildren(subarg, **kwargs)) + + elif not isinstance(arg, (Widget, TextNode)): + widgets.append(TextNode(str(arg))) + + else: + widgets.append(arg) + + return widgets + + def insertBefore(self, insert, child, **kwargs): + if not child: + return self.appendChild(insert) + + assert child in self._children, "{} is not a child of {}".format(child, self) + + toInsert = self.__collectChildren(insert, **kwargs) + + for insert in toInsert: + if insert._parent: + insert._parent.removeChild(insert) + + self.element.insertBefore(insert.element, child.element) + self._children.insert(self._children.index(child), insert) + + insert._parent = self + if self._isAttached: + insert.onAttach() + + return toInsert + + def prependChild(self, *args, **kwargs): + if kwargs.get("replace", False): + self.removeAllChildren() + del kwargs["replace"] + + toPrepend = self.__collectChildren(*args, **kwargs) + + for child in toPrepend: + if child._parent: + child._parent._children.remove(child) + child._parent = None + + if not self._children: + self.appendChild(child) + else: + self.insertBefore(child, self.children(0)) + + return toPrepend + + def appendChild(self, *args, **kwargs): + if kwargs.get("replace", False): + self.removeAllChildren() + del kwargs["replace"] + + toAppend = self.__collectChildren(*args, **kwargs) + + for child in toAppend: + if child._parent: + child._parent._children.remove(child) + + self._children.append(child) + self.element.appendChild(child.element) + child._parent = self + + if self._isAttached: + child.onAttach() + + return toAppend + + def removeChild(self, child): + assert child in self._children, "{} is not a child of {}".format(child, self) + + if child._isAttached: + child.onDetach() + + self.element.removeChild(child.element) + self._children.remove(child) + child._parent = None + + def removeAllChildren(self): + """ + Removes all child widgets of the current widget. + """ + for child in self._children[:]: + self.removeChild(child) + + def isParentOf(self, widget): + """ + Checks if an object is the parent of widget. + + :type widget: Widget + :param widget: The widget to check for. + :return: True, if widget is a child of the object, else False. + """ + + # You cannot be your own child! + if self == widget: + return False + + for child in self._children: + if child == widget: + return True + + if child.isParentOf(widget): + return True + + return False + + def isChildOf(self, widget): + """ + Checks if an object is the child of widget. + + :type widget: Widget + :param widget: The widget to check for. + :return: True, if object is a child of widget, else False. + """ + + # You cannot be your own parent! + if self == widget: + return False + + parent = self.parent() + while parent: + if parent == widget: + return True + + parent = widget.parent() + + return False + + def hasClass(self, className): + """ + Determine whether the current widget is assigned the given class + + :param className: The class name to search for. + :type className: str + """ + + if isinstance(className, str) or isinstance(className, unicode): + return className in self["class"] + else: + raise TypeError() + + def addClass(self, *args): + """ + Adds a class or a list of classes to the current widget. + If the widget already has the class, it is ignored. + + :param args: A list of class names. This can also be a list. + :type args: list of str | list of list of str + """ + + for item in args: + if isinstance(item, list): + self.addClass(*item) + + elif isinstance(item, str): + for sitem in item.split(" "): + if not self.hasClass(sitem): + self["class"].append(sitem) + else: + raise TypeError() + + def removeClass(self, *args): + """ + Removes a class or a list of classes from the current widget. + + :param args: A list of class names. This can also be a list. + :type args: list of str | list of list of str + """ + + for item in args: + if isinstance(item, list): + self.removeClass(item) + + elif isinstance(item, str): + for sitem in item.split(" "): + if self.hasClass(sitem): + self["class"].remove(sitem) + else: + raise TypeError() + + def toggleClass(self, on, off=None): + """ + Toggles the class ``on``. + + If the widget contains a class ``on``, it is toggled by ``off``. + ``off`` can either be a class name that is substituted, or nothing. + + :param on: Classname to test for. If ``on`` does not exist, but ``off``, ``off`` is replaced by ``on``. + :type on: str + + :param off: Classname to replace if ``on`` existed. + :type off: str + + :return: Returns True, if ``on`` was switched, else False. + :rtype: bool + """ + if self.hasClass(on): + self.removeClass(on) + + if off and not self.hasClass(off): + self.addClass(off) + + return False + + if off and self.hasClass(off): + self.removeClass(off) + + self.addClass(on) + return True + + def onBlur(self, event): + pass + + def onChange(self, event): + pass + + def onContextMenu(self, event): + pass + + def onFocus(self, event): + pass + + def onFocusIn(self, event): + pass + + def onFocusOut(self, event): + pass + + def onFormChange(self, event): + pass + + def onFormInput(self, event): + pass + + def onInput(self, event): + pass + + def onInvalid(self, event): + pass + + def onReset(self, event): + pass + + def onSelect(self, event): + pass + + def onSubmit(self, event): + pass + + def onKeyDown(self, event): + pass + + def onKeyPress(self, event): + pass + + def onKeyUp(self, event): + pass + + def onClick(self, event): + pass + + def onDblClick(self, event): + pass + + def onDrag(self, event): + pass + + def onDragEnd(self, event): + pass + + def onDragEnter(self, event): + pass + + def onDragLeave(self, event): + pass + + def onDragOver(self, event): + pass + + def onDragStart(self, event): + pass + + def onDrop(self, event): + pass + + def onMouseDown(self, event): + pass + + def onMouseMove(self, event): + pass + + def onMouseOut(self, event): + pass + + def onMouseOver(self, event): + pass + + def onMouseUp(self, event): + pass + + def onMouseWheel(self, event): + pass + + def onScroll(self, event): + pass + + def onTouchStart(self, event): + pass + + def onTouchEnd(self, event): + pass + + def onTouchMove(self, event): + pass + + def onTouchCancel(self, event): + pass + + def focus(self): + self.element.focus() + + def blur(self): + self.element.blur() + + def parent(self): + return self._parent + + def children(self, n=None): + """ + Access children of widget. + + If ``n`` is ommitted, it returns a list of all child-widgets; + Else, it returns the N'th child, or None if its out of bounds. + + :param n: Optional offset of child widget to return. + :type n: int + + :return: Returns all children or only the requested one. + :rtype: list | Widget | None + """ + if n is None: + return self._children[:] + + try: + return self._children[n] + except IndexError: + return None + + def sortChildren(self, key): + """ + Sorts our direct children. They are rearranged on DOM level. + Key must be a function accepting one widget as parameter and must return + the key used to sort these widgets. + """ + self._children.sort(key=key) + tmpl = self._children[:] + tmpl.reverse() + for c in tmpl: + self.element.removeChild(c.element) + self.element.insertBefore(c.element, self.element.children.item(0)) + + def fromHTML(self, html, appendTo=None, bindTo=None, replace=False, vars=None, **kwargs): + """ + Parses html and constructs its elements as part of self. + + :param html: HTML code. + :param appendTo: The entity where the HTML code is constructed below. This defaults to self in usual case. + :param bindTo: The entity where the named objects are bound to. This defaults to self in usual case. + :param replace: Clear entire content of appendTo before appending. + :param vars: Deprecated; Same as kwargs. + :param **kwargs: Additional variables provided as a dict for {{placeholders}} inside the HTML + + :return: + """ + if appendTo is None: + appendTo = self + + if bindTo is None: + bindTo = self + + if replace: + appendTo.removeAllChildren() + + # use of vars is deprecated! + if isinstance(vars, dict): + kwargs.update(vars) + + return fromHTML(html, appendTo=appendTo, bindTo=bindTo, **kwargs) + + +######################################################################################################################## +# Attribute Collectors +######################################################################################################################## + +# _attrLabel --------------------------------------------------------------------------------------------------------------- + +class _attrLabel(object): + def _getLabel(self): + return self.element.getAttribute("label") + + def _setLabel(self, val): + self.element.setAttribute("label", val) + + +# _attrCharset -------------------------------------------------------------------------------------------------------------- + +class _attrCharset(object): + def _getCharset(self): + return self.element._attrCharset + + def _setCharset(self, val): + self.element._attrCharset = val + + +# _attrCite ----------------------------------------------------------------------------------------------------------------- + +class _attrCite(object): + def _getCite(self): + return self.element._attrCite + + def _setCite(self, val): + self.element._attrCite = val + + +class _attrDatetime(object): + def _getDatetime(self): + return self.element.datetime + + def _setDatetime(self, val): + self.element.datetime = val + + +# Form ----------------------------------------------------------------------------------------------------------------- + +class _attrForm(object): + def _getForm(self): + return self.element.form + + def _setForm(self, val): + self.element.form = val + + +class _attrAlt(object): + def _getAlt(self): + return self.element.alt + + def _setAlt(self, val): + self.element.alt = val + + +class _attrAutofocus(object): + def _getAutofocus(self): + return True if self.element.hasAttribute("autofocus") else False + + def _setAutofocus(self, val): + if val: + self.element.setAttribute("autofocus", "") + else: + self.element.removeAttribute("autofocus") + + +class _attrDisabled(object): + pass + + +class _attrChecked(object): + def _getChecked(self): + return self.element.checked + + def _setChecked(self, val): + self.element.checked = val + + +class _attrIndeterminate(object): + def _getIndeterminate(self): + return self.element.indeterminate + + def _setIndeterminate(self, val): + self.element.indeterminate = val + + +class _attrName(object): + def _getName(self): + return self.element.getAttribute("name") + + def _setName(self, val): + self.element.setAttribute("name", val) + + +class _attrValue(object): + def _getValue(self): + return self.element.value + + def _setValue(self, val): + self.element.value = val + + +class _attrAutocomplete(object): + def _getAutocomplete(self): + return True if self.element.autocomplete == "on" else False + + def _setAutocomplete(self, val): + self.element.autocomplete = "on" if val == True else "off" + + +class _attrRequired(object): + def _getRequired(self): + return True if self.element.hasAttribute("required") else False + + def _setRequired(self, val): + if val: + self.element.setAttribute("required", "") + else: + self.element.removeAttribute("required") + + +class _attrMultiple(object): + def _getMultiple(self): + return True if self.element.hasAttribute("multiple") else False + + def _setMultiple(self, val): + if val: + self.element.setAttribute("multiple", "") + else: + self.element.removeAttribute("multiple") + + +class _attrSize(object): + def _getSize(self): + return self.element.size + + def _setSize(self, val): + self.element.size = val + + +class _attrFor(object): + def _getFor(self): + return self.element.getAttribute("for") + + def _setFor(self, val): + self.element.setAttribute("for", val) + + +class _attrInputs(_attrRequired): + def _getMaxlength(self): + return self.element.maxlength + + def _setMaxlength(self, val): + self.element.maxlength = val + + def _getPlaceholder(self): + return self.element.placeholder + + def _setPlaceholder(self, val): + self.element.placeholder = val + + def _getReadonly(self): + return True if self.element.hasAttribute("readonly") else False + + def _setReadonly(self, val): + if val: + self.element.setAttribute("readonly", "") + else: + self.element.removeAttribute("readonly") + + +class _attrFormhead(object): + def _getFormaction(self): + return self.element.formaction + + def _setFormaction(self, val): + self.element.formaction = val + + def _getFormenctype(self): + return self.element.formenctype + + def _setFormenctype(self, val): + self.element.formenctype = val + + def _getFormmethod(self): + return self.element.formmethod + + def _setFormmethod(self, val): + self.element.formmethod = val + + def _getFormtarget(self): + return self.element.formtarget + + def _setFormtarget(self, val): + self.element.formtarget = val + + def _getFormnovalidate(self): + return True if self.element.hasAttribute("formnovalidate") else False + + def _setFormnovalidate(self, val): + if val: + self.element.setAttribute("formnovalidate", "") + else: + self.element.removeAttribute("formnovalidate") + + +# _attrHref ----------------------------------------------------------------------------------------------------------------- + +class _attrHref(object): + def _getHref(self): + """ + Url of a Page + :param self: + """ + return self.element.href + + def _setHref(self, val): + """ + Url of a Page + :param val: URL + """ + self.element.href = val + + def _getHreflang(self): + return self.element.hreflang + + def _setHreflang(self, val): + self.element.hreflang = val + + +class _attrTarget(object): + def _getTarget(self): + return self.element.target + + def _setTarget(self, val): + self.element.target = val + + +# _attrMedia ---------------------------------------------------------------------------------------------------------------- + +class _attrType(object): + def _getType(self): + return self.element.type + + def _setType(self, val): + self.element.type = val + + +class _attrMedia(_attrType): + def _getMedia(self): + return self.element.media + + def _setMedia(self, val): + self.element.media = val + + +class _attrDimensions(object): + def _getWidth(self): + return self.element.width + + def _setWidth(self, val): + self.element.width = val + + def _getHeight(self): + return self.element.height + + def _setHeight(self, val): + self.element.height = val + + +class _attrUsemap(object): + def _getUsemap(self): + return self.element.usemap + + def _setUsemap(self, val): + self.element.usemap = val + + +class _attrMultimedia(object): + def _getAutoplay(self): + return True if self.element.hasAttribute("autoplay") else False + + def _setAutoplay(self, val): + if val: + self.element.setAttribute("autoplay", "") + else: + self.element.removeAttribute("autoplay") + + def _getPlaysinline(self): + return True if self.element.hasAttribute("playsinline") else False + + def _setPlaysinline(self, val): + if val: + self.element.setAttribute("playsinline", "") + else: + self.element.removeAttribute("playsinline") + + def _getControls(self): + return True if self.element.hasAttribute("controls") else False + + def _setControls(self, val): + if val: + self.element.setAttribute("controls", "") + else: + self.element.removeAttribute("controls") + + def _getLoop(self): + return True if self.element.hasAttribute("loop") else False + + def _setLoop(self, val): + if val: + self.element.setAttribute("loop", "") + else: + self.element.removeAttribute("loop") + + def _getMuted(self): + return True if self.element.hasAttribute("muted") else False + + def _setMuted(self, val): + if val: + self.element.setAttribute("muted", "") + else: + self.element.removeAttribute("muted") + + def _getPreload(self): + return self.element.preload + + def _setPreload(self, val): + self.element.preload = val + + +# _attrRel ------------------------------------------------------------------------------------------------------------------ + +class _attrRel(object): + def _getRel(self): + return self.element.rel + + def _setRel(self, val): + self.element.rel = val + + +# _attrSrc ------------------------------------------------------------------------------------------------------------------ + +class _attrSrc(object): + def _getSrc(self): + return self.element.src + + def _setSrc(self, val): + self.element.src = val + + +# Svg ------------------------------------------------------------------------------------------------------------------ + +class _attrSvgViewBox(object): + def _getViewbox(self): + viewBox = self.element.viewBox + try: + return " ".join([str(x) for x in [viewBox.baseVal.x, viewBox.baseVal.y, viewBox.baseVal.width, viewBox.baseVal.height]]) + except: + return "" + + def _setViewbox(self, val): + self.element.setAttribute("viewBox", val) + + def _getPreserveaspectratio(self): + return self.element.preserveAspectRatio + + def _setPreserveaspectratio(self, val): + self.element.setAttribute("preserveAspectRatio", val) + + +class _attrSvgDimensions(object): + def _getWidth(self): + return self.element.width + + def _setWidth(self, val): + self.element.setAttribute("width", val) + + def _getHeight(self): + return self.element.height + + def _setHeight(self, val): + self.element.setAttribute("height", val) + + def _getX(self): + return self.element.x + + def _setX(self, val): + self.element.setAttribute("x", val) + + def _getY(self): + return self.element.y + + def _setY(self, val): + self.element.setAttribute("y", val) + + def _getR(self): + return self.element.r + + def _setR(self, val): + self.element.setAttribute("r", val) + + def _getRx(self): + return self.element.rx + + def _setRx(self, val): + self.element.setAttribute("rx", val) + + def _getRy(self): + return self.element.ry + + def _setRy(self, val): + self.element.setAttribute("ry", val) + + def _getCx(self): + return self.element.cx + + def _setCx(self, val): + self.element.setAttribute("cx", val) + + def _getCy(self): + return self.element.cy + + def _setCy(self, val): + self.element.setAttribute("cy", val) + + +class _attrSvgPoints(object): + def _getPoints(self): + return self.element.points + + def _setPoints(self, val): + self.element.setAttribute("points", val) + + def _getX1(self): + return self.element.x1 + + def _setX1(self, val): + self.element.setAttribute("x1", val) + + def _getY1(self): + return self.element.y1 + + def _setY1(self, val): + self.element.setAttribute("y1", val) + + def _getX2(self): + return self.element.x2 + + def _setX2(self, val): + self.element.setAttribute("x2", val) + + def _getY2(self): + return self.element.y2 + + def _setY2(self, val): + self.element.setAttribute("y2", val) + + +class _attrSvgTransform(object): + def _getTransform(self): + return self.element.transform + + def _setTransform(self, val): + self.element.setAttribute("transform", val) + + +class _attrSvgXlink(object): + def _getXlinkhref(self): + return self.element.getAttribute("xlink:href") + + def _setXlinkhref(self, val): + self.element.setAttribute("xlink:href", val) + + +class _attrSvgStyles(object): + def _getFill(self): + return self.element.fill + + def _setFill(self, val): + self.element.setAttribute("fill", val) + + def _getStroke(self): + return self.element.stroke + + def _setStroke(self, val): + self.element.setAttribute("stroke", val) + + +class _isVoid(object): + pass + + +######################################################################################################################## +# HTML Elements +######################################################################################################################## + +# A -------------------------------------------------------------------------------------------------------------------- + +class A(Widget, _attrHref, _attrTarget, _attrMedia, _attrRel, _attrName): + _tagName = "a" + + def _getDownload(self): + """ + The download attribute specifies the path to a download + :returns: filename + """ + return self.element.download + + def _setDownload(self, val): + """ + The download attribute specifies the path to a download + :param val: filename + """ + self.element.download = val + + +# Area ----------------------------------------------------------------------------------------------------------------- + +class Area(A, _attrAlt, _isVoid): + _tagName = "area" + + def _getCoords(self): + return self.element.coords + + def _setCoords(self, val): + self.element.coords = val + + def _getShape(self): + return self.element.shape + + def _setShape(self, val): + self.element.shape = val + + +# Audio ---------------------------------------------------------------------------------------------------------------- + +class Audio(Widget, _attrSrc, _attrMultimedia): + _tagName = "audio" + +class Bdo(Widget): + _tagName = "bdo" + + +# Blockquote ----------------------------------------------------------------------------------------------------------- + +class Blockquote(Widget): + _tagName = "blockquote" + + def _getBlockquote(self): + return self.element.blockquote + + def _setBlockquote(self, val): + self.element.blockquote = val + + +# Body ----------------------------------------------------------------------------------------------------------------- + +class BodyCls(Widget): + + def __init__(self, *args, **kwargs): + super().__init__(_wrapElem=domGetElementsByTagName("body")[0], *args, **kwargs) + self._isAttached = True + + +_body = None + + +def Body(): + global _body + + if _body is None: + _body = BodyCls() + + return _body + + +# Canvas --------------------------------------------------------------------------------------------------------------- + +class Canvas(Widget, _attrDimensions): + _tagName = "canvas" + + +# Command -------------------------------------------------------------------------------------------------------------- + +class Command(Widget, _attrLabel, _attrType, _attrDisabled, _attrChecked): + _tagName = "command" + + def _getIcon(self): + return self.element.icon + + def _setIcon(self, val): + self.element.icon = val + + def _getRadiogroup(self): + return self.element.radiogroup + + def _setRadiogroup(self, val): + self.element.radiogroup = val + + +# _Del ----------------------------------------------------------------------------------------------------------------- + +class _Del(Widget, _attrCite, _attrDatetime): + _tagName = "_del" + + +# Dialog -------------------------------------------------------------------------------------------------------------- + +class Dialog(Widget): + _tagName = "dialog" + + def _getOpen(self): + return True if self.element.hasAttribute("open") else False + + def _setOpen(self, val): + if val: + self.element.setAttribute("open", "") + else: + self.element.removeAttribute("open") + +# Elements ------------------------------------------------------------------------------------------------------------- + +class Abbr(Widget): + _tagName = "abbr" + + +class Address(Widget): + _tagName = "address" + + +class Article(Widget): + _tagName = "article" + + +class Aside(Widget): + _tagName = "aside" + + +class B(Widget): + _tagName = "b" + + +class Bdi(Widget): + _tagName = "bdi" + + +class Br(Widget, _isVoid): + _tagName = "br" + + +class Caption(Widget): + _tagName = "caption" + + +class Cite(Widget): + _tagName = "cite" + + +class Code(Widget): + _tagName = "code" + + +class Datalist(Widget): + _tagName = "datalist" + + +class Dfn(Widget): + _tagName = "dfn" + + +class Div(Widget): + _tagName = "div" + + +class Em(Widget): + _tagName = "em" + + +class Embed(Widget, _attrSrc, _attrType, _attrDimensions, _isVoid): + _tagName = "embed" + + +class Figcaption(Widget): + _tagName = "figcaption" + + +class Figure(Widget): + _tagName = "figure" + + +class Footer(Widget): + _tagName = "footer" + + +class Header(Widget): + _tagName = "header" + + +class H1(Widget): + _tagName = "h1" + + +class H2(Widget): + _tagName = "h2" + + +class H3(Widget): + _tagName = "h3" + + +class H4(Widget): + _tagName = "h4" + + +class H5(Widget): + _tagName = "h5" + + +class H6(Widget): + _tagName = "h6" + + +class Hr(Widget, _isVoid): + _tagName = "hr" + + +class I(Widget): + _tagName = "i" + + +class Kdb(Widget): + _tagName = "kdb" + + +class Legend(Widget): + _tagName = "legend" + + +class Mark(Widget): + _tagName = "mark" + + +class Noscript(Widget): + _tagName = "noscript" + + +class P(Widget): + _tagName = "p" + + +class Rq(Widget): + _tagName = "rq" + + +class Rt(Widget): + _tagName = "rt" + + +class Ruby(Widget): + _tagName = "ruby" + + +class S(Widget): + _tagName = "s" + + +class Samp(Widget): + _tagName = "samp" + + +class Section(Widget): + _tagName = "section" + + +class Small(Widget): + _tagName = "small" + + +class Strong(Widget): + _tagName = "strong" + + +class Sub(Widget): + _tagName = "sub" + + +class Summery(Widget): + _tagName = "summery" + + +class Sup(Widget): + _tagName = "sup" + + +class U(Widget): + _tagName = "u" + + +class Var(Widget): + _tagName = "var" + + +class Wbr(Widget): + _tagName = "wbr" + + +# Form ----------------------------------------------------------------------------------------------------------------- + +class Button(Widget, _attrDisabled, _attrType, _attrForm, _attrAutofocus, _attrName, _attrValue, _attrFormhead): + _tagName = "button" + + +class Fieldset(Widget, _attrDisabled, _attrForm, _attrName): + _tagName = "fieldset" + + +class Form(Widget, _attrDisabled, _attrName, _attrTarget, _attrAutocomplete): + _tagName = "form" + + def _getNovalidate(self): + return True if self.element.hasAttribute("novalidate") else False + + def _setNovalidate(self, val): + if val: + self.element.setAttribute("novalidate", "") + else: + self.element.removeAttribute("novalidate") + + def _getAction(self): + return self.element.action + + def _setAction(self, val): + self.element.action = val + + def _getMethod(self): + return self.element.method + + def _setMethod(self, val): + self.element.method = val + + def _getEnctype(self): + return self.element.enctype + + def _setEnctype(self, val): + self.element.enctype = val + + def _getAccept_attrCharset(self): + return getattr(self.element, "accept-charset") + + def _setAccept_attrCharset(self, val): + self.element.setAttribute("accept-charset", val) + + +class Input(Widget, _attrDisabled, _attrType, _attrForm, _attrAlt, _attrAutofocus, _attrChecked, + _attrIndeterminate, _attrName, _attrDimensions, _attrValue, _attrFormhead, + _attrAutocomplete, _attrInputs, _attrMultiple, _attrSize, _attrSrc, _isVoid): + _tagName = "input" + + def _getAccept(self): + return self.element.accept + + def _setAccept(self, val): + self.element.accept = val + + def _getList(self): + return self.element.list + + def _setList(self, val): + self.element.list = val + + def _getMax(self): + return self.element.max + + def _setMax(self, val): + self.element.max = val + + def _getMin(self): + return self.element.min + + def _setMin(self, val): + self.element.min = val + + def _getPattern(self): + return self.element.pattern + + def _setPattern(self, val): + self.element.pattern = val + + def _getStep(self): + return self.element.step + + def _setStep(self, val): + self.element.step = val + + +class Label(Widget, _attrForm, _attrFor): + _tagName = "label" + autoIdCounter = 0 + + def __init__(self, *args, forElem=None, **kwargs): + super().__init__(*args, **kwargs) + + if forElem: + if not forElem["id"]: + idx = Label.autoIdCounter + Label.autoIdCounter += 1 + forElem["id"] = "label-autoid-for-{}".format(idx) + + self["for"] = forElem["id"] + + +class Optgroup(Widget, _attrDisabled, _attrLabel): + _tagName = "optgroup" + + +class Option(Widget, _attrDisabled, _attrLabel, _attrValue): + _tagName = "option" + + def _getSelected(self): + return True if self.element.selected else False + + def _setSelected(self, val): + if val: + self.element.selected = True + else: + self.element.selected = False + + +class Output(Widget, _attrForm, _attrName, _attrFor): + _tagName = "output" + + +class Select(Widget, _attrDisabled, _attrForm, _attrAutofocus, _attrName, _attrRequired, _attrMultiple, _attrSize): + _tagName = "select" + + def _getSelectedIndex(self): + return self.element.selectedIndex + + def _getOptions(self): + return self.element.options + + +class Textarea(Widget, _attrDisabled, _attrForm, _attrAutofocus, _attrName, _attrInputs, _attrValue): + _tagName = "textarea" + + def _getCols(self): + return self.element.cols + + def _setCols(self, val): + self.element.cols = val + + def _getRows(self): + return self.element.rows + + def _setRows(self, val): + self.element.rows = val + + def _getWrap(self): + return self.element.wrap + + def _setWrap(self, val): + self.element.wrap = val + + +# Head ----------------------------------------------------------------------------------------------------------------- + +class HeadCls(Widget): + + def __init__(self, *args, **kwargs): + super().__init__(_wrapElem=domGetElementsByTagName("head")[0], *args, **kwargs) + self._isAttached = True + + +_head = None + + +def Head(): + global _head + if _head is None: + _head = HeadCls() + return _head + + +# Iframe --------------------------------------------------------------------------------------------------------------- + +class Iframe(Widget, _attrSrc, _attrName, _attrDimensions): + _tagName = "iframe" + + def _getSandbox(self): + return self.element.sandbox + + def _setSandbox(self, val): + self.element.sandbox = val + + def _getSrcdoc(self): + return self.element.src + + def _setSrcdoc(self, val): + self.element.src = val + + def _getSeamless(self): + return True if self.element.hasAttribute("seamless") else False + + def _setSeamless(self, val): + if val: + self.element.setAttribute("seamless", "") + else: + self.element.removeAttribute("seamless") + + +# Img ------------------------------------------------------------------------------------------------------------------ + +class Img(Widget, _attrSrc, _attrDimensions, _attrUsemap, _attrAlt, _isVoid): + _tagName = "img" + + def __init__(self, src=None, *args, **kwargs): + super().__init__() + if src: + self["src"] = src + + def _getCrossorigin(self): + return self.element.crossorigin + + def _setCrossorigin(self, val): + self.element.crossorigin = val + + def _getIsmap(self): + return self.element.ismap + + def _setIsmap(self, val): + self.element.ismap = val + + +# Ins ------------------------------------------------------------------------------------------------------------------ + +class Ins(Widget, _attrCite, _attrDatetime): + _tagName = "ins" + + +# Keygen --------------------------------------------------------------------------------------------------------------- + +class Keygen(Form, _attrAutofocus, _attrDisabled): + _tagName = "keygen" + + def _getChallenge(self): + return True if self.element.hasAttribute("challenge") else False + + def _setChallenge(self, val): + if val: + self.element.setAttribute("challenge", "") + else: + self.element.removeAttribute("challenge") + + def _getKeytype(self): + return self.element.keytype + + def _setKeytype(self, val): + self.element.keytype = val + + +# Link ----------------------------------------------------------------------------------------------------------------- + +class Link(Widget, _attrHref, _attrMedia, _attrRel, _isVoid): + _tagName = "link" + + def _getSizes(self): + return self.element.sizes + + def _setSizes(self, val): + self.element.sizes = val + + +# List ----------------------------------------------------------------------------------------------------------------- + +class Ul(Widget): + _tagName = "ul" + + +class Ol(Widget): + _tagName = "ol" + + +class Li(Widget): + _tagName = "li" + + +class Dl(Widget): + _tagName = "dl" + + +class Dt(Widget): + _tagName = "dt" + + +class Dd(Widget): + _tagName = "dd" + + +# Map ------------------------------------------------------------------------------------------------------------------ + +class Map(Label, _attrType): + _tagName = "map" + + +# Menu ----------------------------------------------------------------------------------------------------------------- + +class Menu(Widget): + _tagName = "menu" + + +# Meta ----------------------------------------------------------------------------------------------------------------- + +class Meta(Widget, _attrName, _attrCharset, _isVoid): + _tagName = "meta" + + def _getContent(self): + return self.element.content + + def _setContent(self, val): + self.element.content = val + + +# Meter ---------------------------------------------------------------------------------------------------------------- + +class Meter(Form, _attrValue): + _tagName = "meter" + + def _getHigh(self): + return self.element.high + + def _setHigh(self, val): + self.element.high = val + + def _getLow(self): + return self.element.low + + def _setLow(self, val): + self.element.low = val + + def _getMax(self): + return self.element.max + + def _setMax(self, val): + self.element.max = val + + def _getMin(self): + return self.element.min + + def _setMin(self, val): + self.element.min = val + + def _getOptimum(self): + return self.element.optimum + + def _setOptimum(self, val): + self.element.optimum = val + + +# Nav ------------------------------------------------------------------------------------------------------------------ + +class Nav(Widget): + _tagName = "nav" + + +# Object ----------------------------------------------------------------------------------------------------------------- + +class Object(Form, _attrType, _attrName, _attrDimensions, _attrUsemap): + _tagName = "object" + + +# Param ----------------------------------------------------------------------------------------------------------------- + +class Param(Widget, _attrName, _attrValue, _isVoid): + _tagName = "param" + + +# Progress ------------------------------------------------------------------------------------------------------------- + +class Progress(Widget, _attrValue): + _tagName = "progress" + + def _getMax(self): + return self.element.max + + def _setMax(self, val): + self.element.max = val + + +# Q -------------------------------------------------------------------------------------------------------------------- + +class Q(Widget, _attrCite): + _tagName = "q" + + +# Script ---------------------------------------------------------------------------------------------------------------- + +class Script(Widget, _attrSrc, _attrCharset): + _tagName = "script" + + def _getAsync(self): + return True if self.element.hasAttribute("async") else False + + def _setAsync(self, val): + if val: + self.element.setAttribute("async", "") + else: + self.element.removeAttribute("async") + + def _getDefer(self): + return True if self.element.hasAttribute("defer") else False + + def _setDefer(self, val): + if val: + self.element.setAttribute("defer", "") + else: + self.element.removeAttribute("defer") + + +# Source --------------------------------------------------------------------------------------------------------------- + +class Source(Widget, _attrMedia, _attrSrc, _isVoid): + _tagName = "source" + + +# Span ----------------------------------------------------------------------------------------------------------------- + +class Span(Widget): + _tagName = "span" + + +# Style ---------------------------------------------------------------------------------------------------------------- + +class Style(Widget, _attrMedia): + _tagName = "style" + + def _getScoped(self): + return True if self.element.hasAttribute("scoped") else False + + def _setScoped(self, val): + if val: + self.element.setAttribute("scoped", "") + else: + self.element.removeAttribute("scoped") + + +# SVG ------------------------------------------------------------------------------------------------------------------ + +class Svg(Widget, _attrSvgViewBox, _attrSvgDimensions, _attrSvgTransform): + _tagName = "svg" + _namespace = "SVG" + + def _getVersion(self): + return self.element.version + + def _setVersion(self, val): + self.element.setAttribute("version", val) + + def _getXmlns(self): + return self.element.xmlns + + def _setXmlns(self, val): + self.element.setAttribute("xmlns", val) + + +class SvgCircle(Widget, _attrSvgTransform, _attrSvgDimensions): + _tagName = "circle" + _namespace = "SVG" + + +class SvgEllipse(Widget, _attrSvgTransform, _attrSvgDimensions): + _tagName = "ellipse" + _namespace = "SVG" + + +class SvgG(Widget, _attrSvgTransform, _attrSvgStyles): + _tagName = "g" + _namespace = "SVG" + + def _getSvgTransform(self): + return self.element.transform + + def _setSvgTransform(self, val): + self.element.setAttribute("transform", val) + + +class SvgImage(Widget, _attrSvgViewBox, _attrSvgDimensions, _attrSvgTransform, _attrSvgXlink): + _tagName = "image" + _namespace = "SVG" + + +class SvgLine(Widget, _attrSvgTransform, _attrSvgPoints): + _tagName = "line" + _namespace = "SVG" + + +class SvgPath(Widget, _attrSvgTransform): + _tagName = "path" + _namespace = "SVG" + + def _getD(self): + return self.element.d + + def _setD(self, val): + self.element.setAttribute("d", val) + + def _getPathLength(self): + return self.element.pathLength + + def _setPathLength(self, val): + self.element.setAttribute("pathLength", val) + + +class SvgPolygon(Widget, _attrSvgTransform, _attrSvgPoints): + _tagName = "polygon" + _namespace = "SVG" + + +class SvgPolyline(Widget, _attrSvgTransform, _attrSvgPoints): + _tagName = "polyline" + _namespace = "SVG" + + +class SvgRect(Widget, _attrSvgDimensions, _attrSvgTransform, _attrSvgStyles): + _tagName = "rect" + _namespace = "SVG" + + +class SvgText(Widget, _attrSvgDimensions, _attrSvgTransform, _attrSvgStyles): + _tagName = "text" + _namespace = "SVG" + + +# Table ---------------------------------------------------------------------------------------------------------------- + + +class Tr(Widget): + _tagName = "tr" + + def _getRowspan(self): + span = self.element.getAttribute("rowspan") + return span if span else 1 + + def _setRowspan(self, span): + assert span >= 1, "span may not be negative" + self.element.setAttribute("rowspan", span) + return self + + +class Td(Widget): + _tagName = "td" + + def _getColspan(self): + span = self.element.getAttribute("colspan") + return span if span else 1 + + def _setColspan(self, span): + assert span >= 1, "span may not be negative" + self.element.setAttribute("colspan", span) + return self + + def _getRowspan(self): + span = self.element.getAttribute("rowspan") + return span if span else 1 + + def _setRowspan(self, span): + assert span >= 1, "span may not be negative" + self.element.setAttribute("rowspan", span) + return self + + +class Th(Td): + _tagName = "th" + + +class Thead(Widget): + _tagName = "thead" + + +class Tbody(Widget): + _tagName = "tbody" + + +class ColWrapper(object): + def __init__(self, parentElem, *args, **kwargs): + super().__init__(*args, **kwargs) + self.parentElem = parentElem + + def __getitem__(self, item): + assert isinstance(item, int), "Invalid col-number. Expected int, got {}".format(str(type(item))) + if item < 0 or item > len(self.parentElem._children): + return None + + return self.parentElem._children[item] + + def __setitem__(self, key, value): + col = self[key] + assert col is not None, "Cannot assign widget to invalid column" + + col.removeAllChildren() + + if isinstance(value, list) or isinstance(value, tuple): + for el in value: + if isinstance(el, Widget) or isinstance(el, TextNode): + col.appendChild(value) + + elif isinstance(value, Widget) or isinstance(value, TextNode): + col.appendChild(value) + + +class RowWrapper(object): + def __init__(self, parentElem, *args, **kwargs): + super().__init__(*args, **kwargs) + self.parentElem = parentElem + + def __getitem__(self, item): + assert isinstance(item, int), "Invalid row-number. Expected int, got {}".format(str(type(item))) + if item < 0 or item > len(self.parentElem._children): + return None + + return ColWrapper(self.parentElem._children[item]) + + +class Table(Widget): + _tagName = "table" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.head = Thead() + self.body = Tbody() + self.appendChild(self.head) + self.appendChild(self.body) + + def prepareRow(self, row): + assert row >= 0, "Cannot create rows with negative index" + + for child in self.body._children: + row -= child["rowspan"] + if row < 0: + return + + while row >= 0: + self.body.appendChild(Tr()) + row -= 1 + + def prepareCol(self, row, col): + assert col >= 0, "Cannot create cols with negative index" + self.prepareRow(row) + + for rowChild in self.body._children: + row -= rowChild["rowspan"] + + if row < 0: + for colChild in rowChild._children: + col -= colChild["colspan"] + if col < 0: + return + + while col >= 0: + rowChild.appendChild(Td()) + col -= 1 + + return + + def prepareGrid(self, rows, cols): + for row in range(self.getRowCount(), self.getRowCount() + rows): + self.prepareCol(row, cols) + + def clear(self): + for row in self.body._children[:]: + + for col in row._children[:]: + row.removeChild(col) + + self.body.removeChild(row) + + def _getCell(self): + return RowWrapper(self.body) + + def getRowCount(self): + cnt = 0 + + for tr in self.body._children: + cnt += tr["rowspan"] + + return cnt + + +# Time ----------------------------------------------------------------------------------------------------------------- + +class Time(Widget, _attrDatetime): + _tagName = "time" + + +# Track ---------------------------------------------------------------------------------------------------------------- + +class Track(Label, _attrSrc, _isVoid): + _tagName = "track" + + def _getKind(self): + return self.element.kind + + def _setKind(self, val): + self.element.kind = val + + def _getSrclang(self): + return self.element.srclang + + def _setSrclang(self, val): + self.element.srclang = val + + def _getDefault(self): + return True if self.element.hasAttribute("default") else False + + def _setDefault(self, val): + if val: + self.element.setAttribute("default", "") + else: + self.element.removeAttribute("default") + + +# Video ---------------------------------------------------------------------------------------------------------------- + +class Video(Widget, _attrSrc, _attrDimensions, _attrMultimedia): + _tagName = "video" + + def _getPoster(self): + return self.element.poster + + def _setPoster(self, val): + self.element.poster = val + + +######################################################################################################################## +# Utilities +######################################################################################################################## + +def unescape(val, maxLength=0): + """ + Unquotes several HTML-quoted characters in a string. + + :param val: The value to be unescaped. + :type val: str + + :param maxLength: Cut-off after maxLength characters. + A value of 0 means "unlimited". (default) + :type maxLength: int + + :returns: The unquoted string. + :rtype: str + """ + val = val \ + .replace("<", "<") \ + .replace(">", ">") \ + .replace(""", "\"") \ + .replace("'", "'") + + if maxLength > 0: + return val[0:maxLength] + + return val + + +def doesEventHitWidgetOrParents(event, widget): + """ + Test if event 'event' hits widget 'widget' (or *any* of its parents) + """ + while widget: + if event.target == widget.element: + return True + + widget = widget.parent() + + return False + + +def doesEventHitWidgetOrChildren(event, widget): + """ + Test if event 'event' hits widget 'widget' (or *any* of its children) + """ + if event.target == widget.element: + return True + + for child in widget._children: + if doesEventHitWidgetOrChildren(event, child): + return True + + return False + + +def textToHtml(node, text): + """ + Generates html nodes from text by splitting text into content and into + line breaks html5.Br. + + :param node: The node where the nodes are appended to. + :param text: The text to be inserted. + """ + + for (i, part) in enumerate(text.split("\n")): + if i > 0: + node.appendChild(Br()) + + node.appendChild(TextNode(part)) + + +def parseInt(s, ret=0): + """ + Parses a value as int + """ + if not isinstance(s, str): + return int(s) + elif s: + if s[0] in "+-": + ts = s[1:] + else: + ts = s + + if ts and all([_ in "0123456789" for _ in ts]): + return int(s) + + return ret + + +def parseFloat(s, ret=0.0): + """ + Parses a value as float. + """ + if not isinstance(s, str): + return float(s) + elif s: + if s[0] in "+-": + ts = s[1:] + else: + ts = s + + if ts and ts.count(".") <= 1 and all([_ in ".0123456789" for _ in ts]): + return float(s) + + return ret + + +######################################################################################################################## +# Keycodes +######################################################################################################################## + +def getKey(event): + """ + Returns the Key Identifier of the given event + + Available Codes: https://www.w3.org/TR/2006/WD-DOM-Level-3-Events-20060413/keyset.html#KeySet-Set + """ + if hasattr(event, "key"): + return event.key + + elif hasattr(event, "keyIdentifier"): + if event.keyIdentifier in ["Esc", "U+001B"]: + return "Escape" + else: + return event.keyIdentifier + + return None + + +def isArrowLeft(event): + return getKey(event) in ["ArrowLeft", "Left"] + +def isArrowUp(event): + return getKey(event) in ["ArrowUp", "Up"] + +def isArrowRight(event): + return getKey(event) in ["ArrowRight", "Right"] + +def isArrowDown(event): + return getKey(event) in ["ArrowDown", "Down"] + +def isEscape(event): + return getKey(event) == "Escape" + +def isReturn(event): + return getKey(event) == "Enter" + +def isControl(event): # The Control (Ctrl) key. + return getKey(event) == "Control" + +def isShift(event): + return getKey(event) == "Shift" + + +######################################################################################################################## +# HTML parser +######################################################################################################################## + +# Global variables required by HTML parser +__tags = None +__domParser = None + + +def registerTag(tagName, widgetClass, override=True): + assert issubclass(widgetClass, Widget), "widgetClass must be a sub-class of Widget!" + global __tags + + if __tags is None: + _buildTags() + + if not override and tagName.lower() in __tags: + return + + attr = [] + + for fname in dir(widgetClass): + if fname.startswith("_set"): + attr.append(fname[4:].lower()) + + __tags[tagName.lower()] = (widgetClass, attr) + + +def tag(cls): + assert issubclass(cls, Widget) + registerTag(cls._parserTagName or cls.__name__, cls) # do NOT check for cls._tagName here!!! + return cls + + +def _buildTags(debug=False): + """ + Generates a dictionary of all to the html5-library + known tags and their associated objects and attributes. + """ + global __tags + + if __tags is not None: + return + + if __tags is None: + __tags = {} + + for cname in globals().keys(): + if cname.startswith("_"): + continue + + cls = globals()[cname] + + try: + if not issubclass(cls, Widget): + continue + except: + continue + + registerTag(cls._parserTagName or cls._tagName or cls.__name__, cls, override=False) + + if debug: + for tag in sorted(__tags.keys()): + print("{}: {}".format(tag, ", ".join(sorted(__tags[tag][1])))) + + +class HtmlAst(list): + pass + + +def parseHTML(html, debug=False): + """ + Parses the provided HTML-code according to the objects defined in the html5-library. + """ + + def convertEncodedText(txt): + """ + Convert HTML-encoded text into decoded string. + + The reason for this function is the handling of HTML entities, which is not + properly supported by native JavaScript. + + We use the browser's DOM parser to to this, according to + https://stackoverflow.com/questions/3700326/decode-amp-back-to-in-javascript + + :param txt: The encoded text. + :return: The decoded text. + """ + global __domParser + + if jseval is None: + return txt + + if __domParser is None: + __domParser = jseval("new DOMParser") + + dom = __domParser.parseFromString("" + str(txt), "text/html") + return dom.body.textContent + + def scanWhite(l): + """ + Scan and return whitespace. + """ + + ret = "" + while l and l[0] in " \t\r\n": + ret += l.pop(0) + + return ret + + def scanWord(l): + """ + Scan and return a word. + """ + + ret = "" + while l and l[0] not in " \t\r\n" + "<>=\"'": + ret += l.pop(0) + + return ret + + stack = [] + + # Obtain tag descriptions, if not already done! + global __tags + + if __tags is None: + _buildTags(debug=debug) + + # Prepare stack and input + stack.append((None, None, HtmlAst())) + html = [ch for ch in html] + + # Parse + while html: + tag = None + text = "" + + # Auto-close void elements (_isVoid), e.g.
,
, etc. + while stack and stack[-1][0] and issubclass(__tags[stack[-1][0]][0], _isVoid): + stack.pop() + + if not stack: + break + + parent = stack[-1][2] + + while html: + ch = html.pop(0) + + # Comment + if html and ch == "<" and "".join(html[:3]) == "!--": + html = html[3:] + while html and "".join(html[:3]) != "-->": + html.pop(0) + + html = html[3:] + + # Opening tag + elif html and ch == "<" and html[0] != "/": + tag = scanWord(html) + if tag.lower() in __tags: + break + + text += ch + tag + + # Closing tag + elif html and stack[-1][0] and ch == "<" and html[0] == "/": + junk = ch + junk += html.pop(0) + + tag = scanWord(html) + junk += tag + + if stack[-1][0] == tag.lower(): + junk += scanWhite(html) + if html and html[0] == ">": + html.pop(0) + stack.pop() + tag = None + break + + text += junk + tag = None + + else: + text += ch + + # Append plain text (if not only whitespace) + if (text and ((len(text) == 1 and text in ["\t "]) + or not all([ch in " \t\r\n" for ch in text]))): + # print("text", text) + parent.append(convertEncodedText(text)) + + # Create tag + if tag: + tag = tag.lower() + # print("tag", tag) + + elem = (tag, {}, HtmlAst()) + + stack.append(elem) + parent.append(elem) + + while html: + scanWhite(html) + if not html: + break + + # End of tag > + if html[0] == ">": + html.pop(0) + break + + # Closing tag at end /> + elif html[0] == "/": + html.pop(0) + scanWhite(html) + + if html[0] == ">": + stack.pop() + html.pop(0) + break + + val = att = scanWord(html).lower() + + if not att: + html.pop(0) + continue + + if att in __tags[tag][1] or att in ["[name]", "style", "disabled", "hidden"] or att.startswith("data-"): + scanWhite(html) + if html[0] == "=": + html.pop(0) + scanWhite(html) + + if html[0] in "\"'": + ch = html.pop(0) + + val = "" + while html and html[0] != ch: + val += html.pop(0) + + html.pop(0) + + if att not in elem[1]: + elem[1][att] = val + else: + elem[1][att] += " " + val + + continue + + while stack and stack[-1][0]: + stack.pop() + + return stack[0][2] + +def fromHTML(html, appendTo=None, bindTo=None, debug=False, vars=None, **kwargs): + """ + Parses the provided HTML code according to the objects defined in the html5-library. + html can also be pre-compiled by `parseHTML()` so that it executes faster. + + Constructs all objects as DOM nodes. The first level is chained into appendTo. + If no appendTo is provided, appendTo will be set to html5.Body(). + + If bindTo is provided, objects are bound to this widget. + + ```python + from vi import html5 + + div = html5.Div() + html5.parse.fromHTML(''' + ''', div) + + div.myLink.appendChild("appended!") + ``` + """ + + # Handle defaults + if bindTo is None: + bindTo = appendTo + + if isinstance(html, str): + html = parseHTML(html, debug=debug) + + assert isinstance(html, HtmlAst) + + if isinstance(vars, dict): + kwargs.update(vars) + + def replaceVars(txt): + for var, val in kwargs.items(): + txt = txt.replace("{{%s}}" % var, str(val) if val is not None else "") + + return txt + + def interpret(parent, items): + ret = [] + + for item in items: + if isinstance(item, str): + txt = TextNode(replaceVars(item)) + + if parent: + parent.appendChild(txt) + + ret.append(txt) + continue + + tag = item[0] + atts = item[1] + children = item[2] + + # Special handling for tables: A "thead" and "tbody" are already part of table! + if tag in ["thead", "tbody"] and isinstance(parent, Table): + wdg = getattr(parent, tag[1:]) + + # Usual way: Construct new element and chain it into the parent. + else: + wdg = __tags[tag][0]() + + for att, val in atts.items(): + val = replaceVars(val) + + if att == "[name]": + # Allow disable binding! + if not bindTo: + continue + + if getattr(bindTo, val, None): + print("Cannot assign name '{}' because it already exists in {}".format(val, bindTo)) + + elif not (any([val.startswith(x) for x in + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "_"]) + and all( + [x in "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789" + "_" + for x in val[1:]])): + print("Cannot assign name '{}' because it contains invalid characters".format(val)) + + else: + setattr(bindTo, val, wdg) + wdg.onBind(bindTo, val) + + if debug: + print("name '{}' assigned to {}".format(val, bindTo)) + + elif att == "class": + # print(tag, att, val.split()) + wdg.addClass(*val.split()) + + elif att == "disabled": + # print(tag, att, val) + if val == "disabled": + wdg.disable() + + elif att == "hidden": + # print(tag, att, val) + if val == "hidden": + wdg.hide() + + elif att == "style": + for dfn in val.split(";"): + if ":" not in dfn: + continue + + att, val = dfn.split(":", 1) + + # print(tag, "style", att.strip(), val.strip()) + wdg["style"][att.strip()] = val.strip() + + elif att.startswith("data-"): + wdg["data"][att[5:]] = val + + else: + wdg[att] = parseInt(val, val) + + interpret(wdg, children) + + if parent and not wdg.parent(): + parent.appendChild(wdg) + + ret.append(wdg) + + return ret + + return interpret(appendTo, html) + + +if __name__ == '__main__': + print(globals()) diff --git a/vendor/lark/docs/ide/app/examples.py b/vendor/lark/docs/ide/app/examples.py new file mode 100644 index 00000000..af9c38c4 --- /dev/null +++ b/vendor/lark/docs/ide/app/examples.py @@ -0,0 +1,150 @@ + +# Examples formattet this way: +# "name": ("grammar", "demo-input") + +examples = { + + # --- hello.lark --- + "hello.lark": (""" +start: WORD "," WORD "!" + +%import common.WORD // imports from terminal library +%ignore " " // Disregard spaces in text +""", "Hello, World!"), + + # --- calc.lark --- +"calc.lark": (""" +?start: sum + | NAME "=" sum -> assign_var + +?sum: product + | sum "+" product -> add + | sum "-" product -> sub + +?product: atom + | product "*" atom -> mul + | product "/" atom -> div + +?atom: NUMBER -> number + | "-" atom -> neg + | NAME -> var + | "(" sum ")" + +%import common.CNAME -> NAME +%import common.NUMBER +%import common.WS_INLINE +%ignore WS_INLINE""", + "1 + 2 * 3 + 4"), + + # --- json.lark --- + "json.lark": (""" +?start: value +?value: object + | array + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null +array : "[" [value ("," value)*] "]" +object : "{" [pair ("," pair)*] "}" +pair : string ":" value +string : ESCAPED_STRING +%import common.ESCAPED_STRING +%import common.SIGNED_NUMBER +%import common.WS +%ignore WS""", +""" +[ + { + "_id": "5edb875cf3d764da55602437", + "index": 0, + "guid": "3dae2206-5d4d-41fe-b81d-dc8cdba7acaa", + "isActive": false, + "balance": "$2,872.54", + "picture": "http://placehold.it/32x32", + "age": 24, + "eyeColor": "blue", + "name": "Theresa Vargas", + "gender": "female", + "company": "GEEKOL", + "email": "theresavargas@geekol.com", + "phone": "+1 (930) 450-3445", + "address": "418 Herbert Street, Sexton, Florida, 1375", + "about": "Id minim deserunt laborum enim. Veniam commodo incididunt amet aute esse duis veniam occaecat nulla esse aute et deserunt eiusmod. Anim elit ullamco minim magna sint laboris. Est consequat quis deserunt excepteur in magna pariatur laborum quis eu. Ex quis tempor elit qui qui et culpa sunt sit esse mollit cupidatat. Fugiat cillum deserunt enim minim irure reprehenderit est. Voluptate nisi quis amet quis incididunt pariatur nostrud Lorem consectetur adipisicing voluptate.\\r\\n", + "registered": "2016-11-19T01:02:42 -01:00", + "latitude": -25.65267, + "longitude": 104.19531, + "tags": [ + "eiusmod", + "reprehenderit", + "anim", + "sunt", + "esse", + "proident", + "esse" + ], + "friends": [ + { + "id": 0, + "name": "Roth Herrera" + }, + { + "id": 1, + "name": "Callie Christian" + }, + { + "id": 2, + "name": "Gracie Whitfield" + } + ], + "greeting": "Hello, Theresa Vargas! You have 6 unread messages.", + "favoriteFruit": "banana" + }, + { + "_id": "5edb875c845eb08161a83e64", + "index": 1, + "guid": "a8ada2c1-e2c7-40d3-96b4-52c93baff7f0", + "isActive": false, + "balance": "$2,717.04", + "picture": "http://placehold.it/32x32", + "age": 23, + "eyeColor": "green", + "name": "Lily Ross", + "gender": "female", + "company": "RODEOMAD", + "email": "lilyross@rodeomad.com", + "phone": "+1 (941) 465-3561", + "address": "525 Beekman Place, Blodgett, Marshall Islands, 3173", + "about": "Aliquip duis proident excepteur eiusmod in quis officia consequat culpa eu et ut. Occaecat reprehenderit tempor mollit do eu magna qui et magna exercitation aliqua. Incididunt exercitation dolor proident eiusmod minim occaecat. Sunt et minim mollit et veniam sint ex. Duis ullamco elit aute eu excepteur reprehenderit officia.\\r\\n", + "registered": "2019-11-02T04:06:42 -01:00", + "latitude": 17.031701, + "longitude": -42.657106, + "tags": [ + "id", + "non", + "culpa", + "reprehenderit", + "esse", + "elit", + "sit" + ], + "friends": [ + { + "id": 0, + "name": "Ursula Maldonado" + }, + { + "id": 1, + "name": "Traci Huff" + }, + { + "id": 2, + "name": "Taylor Holt" + } + ], + "greeting": "Hello, Lily Ross! You have 3 unread messages.", + "favoriteFruit": "strawberry" + } +]""") +} \ No newline at end of file diff --git a/vendor/lark/docs/ide/app/ext.py b/vendor/lark/docs/ide/app/ext.py new file mode 100644 index 00000000..330d032e --- /dev/null +++ b/vendor/lark/docs/ide/app/ext.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- +from . import core as html5 +from . import utils + +class Button(html5.Button): + + def __init__(self, txt=None, callback=None, className=None, *args, **kwargs): + super().__init__(*args, **kwargs) + self["class"] = "btn" + + if className: + self.addClass(className) + + self["type"] = "button" + + if txt is not None: + self.setText(txt) + + self.callback = callback + self.sinkEvent("onClick") + + def setText(self, txt): + if txt is not None: + self.element.innerHTML = txt + self["title"] = txt + else: + self.element.innerHTML = "" + self["title"] = "" + + def onClick(self, event): + event.stopPropagation() + event.preventDefault() + if self.callback is not None: + self.callback(self) + + +class Input(html5.Input): + def __init__(self, type="text", placeholder=None, callback=None, id=None, focusCallback=None, *args, **kwargs): + """ + + :param type: Input type. Default: "text + :param placeholder: Placeholder text. Default: None + :param callback: Function to be called onChanged: callback(id, value) + :param id: Optional id of the input element. Will be passed to callback + :return: + """ + super().__init__(*args, **kwargs) + self["class"] = "input" + self["type"] = type + if placeholder is not None: + self["placeholder"] = placeholder + + self.callback = callback + if id is not None: + self["id"] = id + self.sinkEvent("onChange") + + self.focusCallback = focusCallback + if focusCallback: + self.sinkEvent("onFocus") + + def onChange(self, event): + event.stopPropagation() + event.preventDefault() + if self.callback is not None: + self.callback(self, self["id"], self["value"]) + + def onFocus(self, event): + event.stopPropagation() + event.preventDefault() + if self.focusCallback is not None: + self.focusCallback(self, self["id"], self["value"]) + + def onDetach(self): + super().onDetach() + self.callback = None + + +class Popup(html5.Div): + def __init__(self, title=None, id=None, className=None, icon=None, enableShortcuts=True, closeable=True, *args, **kwargs): + super().__init__(""" +
+
+
+
+ +
+
+
+
+
+
+
+
+
+ """) + + self.appendChild = self.popupBody.appendChild + self.fromHTML = lambda *args, **kwargs: self.popupBody.fromHTML(*args, **kwargs) if kwargs.get("bindTo") else self.popupBody.fromHTML(bindTo=self, *args, **kwargs) + + self["class"] = "popup popup--center is-active" + if className: + self.addClass(className) + + if closeable: + closeBtn = Button("×", self.close, className="item-action") + closeBtn.removeClass("btn") + self.popupHeadItem.appendChild(closeBtn) + + if title: + self.popupHeadline.appendChild(title) + + if icon: + self.popupIcon.appendChild(icon[0]) + elif title: + self.popupIcon.appendChild(title[0]) + else: + self.popupIcon.appendChild("Vi") #fixme!!! this _LIBRARY_ is not only used in the Vi... + + # id can be used to pass information to callbacks + self.id = id + + #FIXME: Implement a global overlay! One popupOverlay next to a list of popups. + self.popupOverlay = html5.Div() + self.popupOverlay["class"] = "popup-overlay is-active" + + self.enableShortcuts = enableShortcuts + self.onDocumentKeyDownMethod = None + + self.popupOverlay.appendChild(self) + html5.Body().appendChild(self.popupOverlay) + + #FIXME: Close/Cancel every popup with click on popupCloseBtn without removing the global overlay. + + def onAttach(self): + super(Popup, self).onAttach() + + if self.enableShortcuts: + self.onDocumentKeyDownMethod = self.onDocumentKeyDown # safe reference to method + html5.document.addEventListener("keydown", self.onDocumentKeyDownMethod) + + def onDetach(self): + super(Popup, self).onDetach() + + if self.enableShortcuts: + html5.document.removeEventListener("keydown", self.onDocumentKeyDownMethod) + + def onDocumentKeyDown(self, event): + if html5.isEscape(event): + self.close() + + def close(self, *args, **kwargs): + html5.Body().removeChild(self.popupOverlay) + self.popupOverlay = None + + + +class InputDialog(Popup): + def __init__(self, text, value="", successHandler=None, abortHandler=None, + successLbl="OK", abortLbl="Cancel", placeholder="", *args, **kwargs): + + super().__init__(*args, **kwargs) + self.addClass("popup--inputdialog") + + self.sinkEvent("onKeyDown", "onKeyUp") + + self.successHandler = successHandler + self.abortHandler = abortHandler + + self.fromHTML( + """ +
+ + +
+ """, + vars={ + "text": text, + "value": value, + "placeholder": placeholder + } + ) + + # Cancel + self.popupFoot.appendChild(Button(abortLbl, self.onCancel, className="btn--cancel btn--danger")) + + # Okay + self.okayBtn = Button(successLbl, self.onOkay, className="btn--okay btn--primary") + if not value: + self.okayBtn.disable() + + self.popupFoot.appendChild(self.okayBtn) + + self.inputElem.focus() + + def onKeyDown(self, event): + if html5.isReturn(event) and self.inputElem["value"]: + event.stopPropagation() + event.preventDefault() + self.onOkay() + + def onKeyUp(self, event): + if self.inputElem["value"]: + self.okayBtn.enable() + else: + self.okayBtn.disable() + + def onDocumentKeyDown(self, event): + if html5.isEscape(event): + event.stopPropagation() + event.preventDefault() + self.onCancel() + + def onOkay(self, *args, **kwargs): + if self.successHandler: + self.successHandler(self, self.inputElem["value"]) + self.close() + + def onCancel(self, *args, **kwargs): + if self.abortHandler: + self.abortHandler(self, self.inputElem["value"]) + self.close() + + +class Alert(Popup): + """ + Just displaying an alerting message box with OK-button. + """ + + def __init__(self, msg, title=None, className=None, okCallback=None, okLabel="OK", icon="!", closeable=True, *args, **kwargs): + super().__init__(title, className=None, icon=icon, closeable=closeable, *args, **kwargs) + self.addClass("popup--alert") + + if className: + self.addClass(className) + + self.okCallback = okCallback + + message = html5.Span() + message.addClass("alert-msg") + self.popupBody.appendChild(message) + + if isinstance(msg, str): + msg = msg.replace("\n", "
") + + message.appendChild(msg, bindTo=False) + + self.sinkEvent("onKeyDown") + + if closeable: + okBtn = Button(okLabel, callback=self.onOkBtnClick) + okBtn.addClass("btn--okay btn--primary") + self.popupFoot.appendChild(okBtn) + + okBtn.focus() + + def drop(self): + self.okCallback = None + self.close() + + def onOkBtnClick(self, sender=None): + if self.okCallback: + self.okCallback(self) + + self.drop() + + def onKeyDown(self, event): + if html5.isReturn(event): + event.stopPropagation() + event.preventDefault() + self.onOkBtnClick() + + +class YesNoDialog(Popup): + def __init__(self, question, title=None, yesCallback=None, noCallback=None, + yesLabel="Yes", noLabel="No", icon="?", + closeable=False, *args, **kwargs): + super().__init__(title, closeable=closeable, icon=icon, *args, **kwargs) + self.addClass("popup--yesnodialog") + + self.yesCallback = yesCallback + self.noCallback = noCallback + + lbl = html5.Span() + lbl["class"].append("question") + self.popupBody.appendChild(lbl) + + if isinstance(question, html5.Widget): + lbl.appendChild(question) + else: + utils.textToHtml(lbl, question) + + if len(noLabel): + btnNo = Button(noLabel, className="btn--no", callback=self.onNoClicked) + #btnNo["class"].append("btn--no") + self.popupFoot.appendChild(btnNo) + + btnYes = Button(yesLabel, callback=self.onYesClicked) + btnYes["class"].append("btn--yes") + self.popupFoot.appendChild(btnYes) + + self.sinkEvent("onKeyDown") + btnYes.focus() + + def onKeyDown(self, event): + if html5.isReturn(event): + event.stopPropagation() + event.preventDefault() + self.onYesClicked() + + def onDocumentKeyDown(self, event): + if html5.isEscape(event): + event.stopPropagation() + event.preventDefault() + self.onNoClicked() + + def drop(self): + self.yesCallback = None + self.noCallback = None + self.close() + + def onYesClicked(self, *args, **kwargs): + if self.yesCallback: + self.yesCallback(self) + + self.drop() + + def onNoClicked(self, *args, **kwargs): + if self.noCallback: + self.noCallback(self) + + self.drop() + + +class SelectDialog(Popup): + + def __init__(self, prompt, items=None, title=None, okBtn="OK", cancelBtn="Cancel", forceSelect=False, + callback=None, *args, **kwargs): + super().__init__(title, *args, **kwargs) + self["class"].append("popup--selectdialog") + + self.callback = callback + self.items = items + assert isinstance(self.items, list) + + # Prompt + if prompt: + lbl = html5.Span() + lbl["class"].append("prompt") + + if isinstance(prompt, html5.Widget): + lbl.appendChild(prompt) + else: + utils.textToHtml(lbl, prompt) + + self.popupBody.appendChild(lbl) + + # Items + if not forceSelect and len(items) <= 3: + for idx, item in enumerate(items): + if isinstance(item, dict): + title = item.get("title") + cssc = item.get("class") + elif isinstance(item, tuple): + title = item[1] + cssc = None + else: + title = item + + btn = Button(title, callback=self.onAnyBtnClick) + btn.idx = idx + + if cssc: + btn.addClass(cssc) + + self.popupBody.appendChild(btn) + else: + self.select = html5.Select() + self.popupBody.appendChild(self.select) + + for idx, item in enumerate(items): + if isinstance(item, dict): + title = item.get("title") + elif isinstance(item, tuple): + title = item[1] + else: + title = item + + opt = html5.Option(title) + opt["value"] = str(idx) + + self.select.appendChild(opt) + + if okBtn: + self.popupFoot.appendChild(Button(okBtn, callback=self.onOkClick)) + + if cancelBtn: + self.popupFoot.appendChild(Button(cancelBtn, callback=self.onCancelClick)) + + def onAnyBtnClick(self, sender): + item = self.items[sender.idx] + + if isinstance(item, dict) and item.get("callback") and callable(item["callback"]): + item["callback"](item) + + if self.callback: + self.callback(item) + + self.items = None + self.close() + + def onCancelClick(self, sender=None): + self.close() + + def onOkClick(self, sender=None): + assert self.select["selectedIndex"] >= 0 + item = self.items[int(self.select.children(self.select["selectedIndex"])["value"])] + + if isinstance(item, dict) and item.get("callback") and callable(item["callback"]): + item["callback"](item) + + if self.callback: + self.callback(item) + + self.items = None + self.select = None + self.close() + + +class TextareaDialog(Popup): + def __init__(self, text, value="", successHandler=None, abortHandler=None, successLbl="OK", abortLbl="Cancel", + *args, **kwargs): + super().__init__(*args, **kwargs) + self["class"].append("popup--textareadialog") + + self.successHandler = successHandler + self.abortHandler = abortHandler + + span = html5.Span() + span.element.innerHTML = text + self.popupBody.appendChild(span) + + self.inputElem = html5.Textarea() + self.inputElem["value"] = value + self.popupBody.appendChild(self.inputElem) + + okayBtn = Button(successLbl, self.onOkay) + okayBtn["class"].append("btn--okay") + self.popupFoot.appendChild(okayBtn) + + cancelBtn = Button(abortLbl, self.onCancel) + cancelBtn["class"].append("btn--cancel") + self.popupFoot.appendChild(cancelBtn) + + self.sinkEvent("onKeyDown") + + self.inputElem.focus() + + def onDocumentKeyDown(self, event): + if html5.isEscape(event): + event.stopPropagation() + event.preventDefault() + self.onCancel() + + def onOkay(self, *args, **kwargs): + if self.successHandler: + self.successHandler(self, self.inputElem["value"]) + self.close() + + def onCancel(self, *args, **kwargs): + if self.abortHandler: + self.abortHandler(self, self.inputElem["value"]) + self.close() diff --git a/vendor/lark/docs/ide/app/files.json b/vendor/lark/docs/ide/app/files.json new file mode 100644 index 00000000..b2308999 --- /dev/null +++ b/vendor/lark/docs/ide/app/files.json @@ -0,0 +1,9 @@ +[ + "app.py", + "examples.py", + "html5.py", + "core.py", + "ext.py", + "ignite.py", + "utils.py" +] \ No newline at end of file diff --git a/vendor/lark/docs/ide/app/html5.py b/vendor/lark/docs/ide/app/html5.py new file mode 100644 index 00000000..b62a821b --- /dev/null +++ b/vendor/lark/docs/ide/app/html5.py @@ -0,0 +1,6 @@ +#-*- coding: utf-8 -*- + +from .core import * +from . import ext, utils, ignite + + diff --git a/vendor/lark/docs/ide/app/ignite.py b/vendor/lark/docs/ide/app/ignite.py new file mode 100644 index 00000000..61c10a06 --- /dev/null +++ b/vendor/lark/docs/ide/app/ignite.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +from . import core as html5 + + +@html5.tag +class Label(html5.Label): + _parserTagName = "ignite-label" + + def __init__(self, *args, **kwargs): + super(Label, self).__init__(style="label ignt-label", *args, **kwargs) + + +@html5.tag +class Input(html5.Input): + _parserTagName = "ignite-input" + + def __init__(self, *args, **kwargs): + super(Input, self).__init__(style="input ignt-input", *args, **kwargs) + + +@html5.tag +class Switch(html5.Div): + _parserTagName = "ignite-switch" + + def __init__(self, *args, **kwargs): + super(Switch, self).__init__(style="switch ignt-switch", *args, **kwargs) + + self.input = html5.Input(style="switch-input") + self.appendChild(self.input) + self.input["type"] = "checkbox" + + switchLabel = html5.Label(forElem=self.input) + switchLabel.addClass("switch-label") + self.appendChild(switchLabel) + + def _setChecked(self, value): + self.input["checked"] = bool(value) + + def _getChecked(self): + return self.input["checked"] + + +@html5.tag +class Check(html5.Input): + _parserTagName = "ignite-check" + + def __init__(self, *args, **kwargs): + super(Check, self).__init__(style="check ignt-check", *args, **kwargs) + + checkInput = html5.Input() + checkInput.addClass("check-input") + checkInput["type"] = "checkbox" + self.appendChild(checkInput) + + checkLabel = html5.Label(forElem=checkInput) + checkLabel.addClass("check-label") + self.appendChild(checkLabel) + + +@html5.tag +class Radio(html5.Div): + _parserTagName = "ignite-radio" + + def __init__(self, *args, **kwargs): + super(Radio, self).__init__(style="radio ignt-radio", *args, **kwargs) + + radioInput = html5.Input() + radioInput.addClass("radio-input") + radioInput["type"] = "radio" + self.appendChild(radioInput) + + radioLabel = html5.Label(forElem=radioInput) + radioLabel.addClass("radio-label") + self.appendChild(radioLabel) + + +@html5.tag +class Select(html5.Select): + _parserTagName = "ignite-select" + + def __init__(self, *args, **kwargs): + super(Select, self).__init__(style="select ignt-select", *args, **kwargs) + + defaultOpt = html5.Option() + defaultOpt["selected"] = True + defaultOpt["disabled"] = True + defaultOpt.element.innerHTML = "" + self.appendChild(defaultOpt) + + +@html5.tag +class Textarea(html5.Textarea): + _parserTagName = "ignite-textarea" + + def __init__(self, *args, **kwargs): + super(Textarea, self).__init__(style="textarea ignt-textarea", *args, **kwargs) + + +@html5.tag +class Progress(html5.Progress): + _parserTagName = "ignite-progress" + + def __init__(self, *args, **kwargs): + super(Progress, self).__init__(style="progress ignt-progress", *args, **kwargs) + + +@html5.tag +class Item(html5.Div): + _parserTagName = "ignite-item" + + def __init__(self, title=None, descr=None, className=None, *args, **kwargs): + super(Item, self).__init__(style="item ignt-item", *args, **kwargs) + if className: + self.addClass(className) + + self.fromHTML(""" +
+
+
+
+
+
+ """) + + if title: + self.itemHeadline.appendChild(html5.TextNode(title)) + + if descr: + self.itemSubline = html5.Div() + self.addClass("item-subline ignt-item-subline") + self.itemSubline.appendChild(html5.TextNode(descr)) + self.appendChild(self.itemSubline) + + +@html5.tag +class Table(html5.Table): + _parserTagName = "ignite-table" + + def __init__(self, *args, **kwargs): + super(Table, self).__init__(*args, **kwargs) + self.head.addClass("ignt-table-head") + self.body.addClass("ignt-table-body") + + def prepareRow(self, row): + assert row >= 0, "Cannot create rows with negative index" + + for child in self.body._children: + row -= child["rowspan"] + if row < 0: + return + + while row >= 0: + tableRow = html5.Tr() + tableRow.addClass("ignt-table-body-row") + self.body.appendChild(tableRow) + row -= 1 + + def prepareCol(self, row, col): + assert col >= 0, "Cannot create cols with negative index" + self.prepareRow(row) + + for rowChild in self.body._children: + row -= rowChild["rowspan"] + + if row < 0: + for colChild in rowChild._children: + col -= colChild["colspan"] + if col < 0: + return + + while col >= 0: + tableCell = html5.Td() + tableCell.addClass("ignt-table-body-cell") + rowChild.appendChild(tableCell) + col -= 1 + + return + def fastGrid( self, rows, cols, createHidden=False ): + colsstr = "".join(['' for i in range(0, cols)]) + tblstr = '' + + for r in range(0, rows): + tblstr += '%s' %("is-hidden" if createHidden else "",colsstr) + tblstr +="" + + self.fromHTML(tblstr) diff --git a/vendor/lark/docs/ide/app/utils.py b/vendor/lark/docs/ide/app/utils.py new file mode 100644 index 00000000..d80f6727 --- /dev/null +++ b/vendor/lark/docs/ide/app/utils.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +from . import core as html5 + +def unescape(val, maxLength = 0): + """ + Unquotes several HTML-quoted characters in a string. + + :param val: The value to be unescaped. + :type val: str + + :param maxLength: Cut-off after maxLength characters. + A value of 0 means "unlimited". (default) + :type maxLength: int + + :returns: The unquoted string. + :rtype: str + """ + val = val \ + .replace("<", "<") \ + .replace(">", ">") \ + .replace(""", "\"") \ + .replace("'", "'") + + if maxLength > 0: + return val[0:maxLength] + + return val + +def doesEventHitWidgetOrParents(event, widget): + """ + Test if event 'event' hits widget 'widget' (or *any* of its parents) + """ + while widget: + if event.target == widget.element: + return widget + + widget = widget.parent() + + return None + +def doesEventHitWidgetOrChildren(event, widget): + """ + Test if event 'event' hits widget 'widget' (or *any* of its children) + """ + if event.target == widget.element: + return widget + + for child in widget.children(): + if doesEventHitWidgetOrChildren(event, child): + return child + + return None + +def textToHtml(node, text): + """ + Generates html nodes from text by splitting text into content and into + line breaks html5.Br. + + :param node: The node where the nodes are appended to. + :param text: The text to be inserted. + """ + + for (i, part) in enumerate(text.split("\n")): + if i > 0: + node.appendChild(html5.Br()) + + node.appendChild(html5.TextNode(part)) + +def parseInt(s, ret = 0): + """ + Parses a value as int + """ + if not isinstance(s, str): + return int(s) + elif s: + if s[0] in "+-": + ts = s[1:] + else: + ts = s + + if ts and all([_ in "0123456789" for _ in ts]): + return int(s) + + return ret + +def parseFloat(s, ret = 0.0): + """ + Parses a value as float. + """ + if not isinstance(s, str): + return float(s) + elif s: + if s[0] in "+-": + ts = s[1:] + else: + ts = s + + if ts and ts.count(".") <= 1 and all([_ in ".0123456789" for _ in ts]): + return float(s) + + return ret diff --git a/vendor/lark/docs/ide/is-loading.gif b/vendor/lark/docs/ide/is-loading.gif new file mode 100644 index 00000000..79a8a679 Binary files /dev/null and b/vendor/lark/docs/ide/is-loading.gif differ diff --git a/vendor/lark/docs/ide/lark-logo.png b/vendor/lark/docs/ide/lark-logo.png new file mode 100644 index 00000000..d87b68f2 Binary files /dev/null and b/vendor/lark/docs/ide/lark-logo.png differ diff --git a/vendor/lark/docs/index.rst b/vendor/lark/docs/index.rst new file mode 100644 index 00000000..ae0c9e6f --- /dev/null +++ b/vendor/lark/docs/index.rst @@ -0,0 +1,121 @@ +.. Lark documentation master file, created by + sphinx-quickstart on Sun Aug 16 13:09:41 2020. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Lark's documentation! +================================ + +.. toctree:: + :maxdepth: 2 + :caption: Overview + :hidden: + + philosophy + features + parsers + +.. toctree:: + :maxdepth: 2 + :caption: Tutorials & Guides + :hidden: + + json_tutorial + how_to_use + how_to_develop + recipes + examples/index + + +.. toctree:: + :maxdepth: 2 + :caption: Reference + :hidden: + + grammar + tree_construction + classes + visitors + forest + tools + + + +Lark is a modern parsing library for Python. Lark can parse any context-free grammar. + +Lark provides: + +- Advanced grammar language, based on EBNF +- Three parsing algorithms to choose from: Earley, LALR(1) and CYK +- Automatic tree construction, inferred from your grammar +- Fast unicode lexer with regexp support, and automatic line-counting + + +Install Lark +-------------- + +.. code:: bash + + $ pip install lark + +Syntax Highlighting +------------------- + +- `Sublime Text & TextMate`_ +- `Visual Studio Code`_ (Or install through the vscode plugin system) +- `Intellij & PyCharm`_ +- `Vim`_ +- `Atom`_ + +.. _Sublime Text & TextMate: https://github.com/lark-parser/lark_syntax +.. _Visual Studio Code: https://github.com/lark-parser/vscode-lark +.. _Intellij & PyCharm: https://github.com/lark-parser/intellij-syntax-highlighting +.. _Vim: https://github.com/lark-parser/vim-lark-syntax +.. _Atom: https://github.com/Alhadis/language-grammars + +Resources +--------- + +- :doc:`philosophy` +- :doc:`features` +- `Examples`_ +- `Third-party examples`_ +- `Online IDE`_ +- Tutorials + + - `How to write a DSL`_ - Implements a toy LOGO-like language with + an interpreter + - :doc:`json_tutorial` - Teaches you how to use Lark + - Unofficial + + - `Program Synthesis is Possible`_ - Creates a DSL for Z3 + +- Guides + + - :doc:`how_to_use` + - :doc:`how_to_develop` + +- Reference + + - :doc:`grammar` + - :doc:`tree_construction` + - :doc:`visitors` + - :doc:`forest` + - :doc:`classes` + - :doc:`tools` + - `Cheatsheet (PDF)`_ + +- Discussion + + - `Gitter`_ + - `Forum (Google Groups)`_ + + +.. _Examples: https://github.com/lark-parser/lark/tree/master/examples +.. _Third-party examples: https://github.com/ligurio/lark-grammars +.. _Online IDE: https://lark-parser.org/ide +.. _How to write a DSL: http://blog.erezsh.com/how-to-write-a-dsl-in-python-with-lark/ +.. _Program Synthesis is Possible: https://www.cs.cornell.edu/~asampson/blog/minisynth.html +.. _Cheatsheet (PDF): _static/lark_cheatsheet.pdf +.. _Gitter: https://gitter.im/lark-parser/Lobby +.. _Forum (Google Groups): https://groups.google.com/forum/#!forum/lark-parser diff --git a/vendor/lark/docs/json_tutorial.md b/vendor/lark/docs/json_tutorial.md new file mode 100644 index 00000000..a46d8f38 --- /dev/null +++ b/vendor/lark/docs/json_tutorial.md @@ -0,0 +1,456 @@ +# JSON parser - Tutorial + +Lark is a parser - a program that accepts a grammar and text, and produces a structured tree that represents that text. +In this tutorial we will write a JSON parser in Lark, and explore Lark's various features in the process. + +It has 5 parts. + + 1. Writing the grammar + 2. Creating the parser + 3. Shaping the tree + 4. Evaluating the tree + 5. Optimizing + +Knowledge assumed: +- Using Python +- A basic understanding of how to use regular expressions + +## Part 1 - The Grammar + +Lark accepts its grammars in a format called [EBNF](https://www.wikiwand.com/en/Extended_Backus%E2%80%93Naur_form). It basically looks like this: + + rule_name : list of rules and TERMINALS to match + | another possible list of items + | etc. + + TERMINAL: "some text to match" + +(*a terminal is a string or a regular expression*) + +The parser will try to match each rule (left-part) by matching its items (right-part) sequentially, trying each alternative (In practice, the parser is predictive so we don't have to try every alternative). + +How to structure those rules is beyond the scope of this tutorial, but often it's enough to follow one's intuition. + +In the case of JSON, the structure is simple: A json document is either a list, or a dictionary, or a string/number/etc. + +The dictionaries and lists are recursive, and contain other json documents (or "values"). + +Let's write this structure in EBNF form: + +```lark + value: dict + | list + | STRING + | NUMBER + | "true" | "false" | "null" + + list : "[" [value ("," value)*] "]" + + dict : "{" [pair ("," pair)*] "}" + pair : STRING ":" value +``` + +A quick explanation of the syntax: + - Parenthesis let us group rules together. + - rule\* means *any amount*. That means, zero or more instances of that rule. + - [rule] means *optional*. That means zero or one instance of that rule. + +Lark also supports the rule+ operator, meaning one or more instances. It also supports the rule? operator which is another way to say *optional*. + +Of course, we still haven't defined "STRING" and "NUMBER". Luckily, both these literals are already defined in Lark's common library: + +```lark + %import common.ESCAPED_STRING -> STRING + %import common.SIGNED_NUMBER -> NUMBER +``` + +The arrow (->) renames the terminals. But that only adds obscurity in this case, so going forward we'll just use their original names. + +We'll also take care of the white-space, which is part of the text, by simply matching and then throwing it away. + +```lark + %import common.WS + %ignore WS +``` + +We tell our parser to ignore whitespace. Otherwise, we'd have to fill our grammar with WS terminals. + +By the way, if you're curious what these terminals signify, they are roughly equivalent to this: + +```lark + NUMBER : /-?\d+(\.\d+)?([eE][+-]?\d+)?/ + STRING : /".*?(?>> text = '{"key": ["item0", "item1", 3.14]}' +>>> json_parser.parse(text) +Tree(value, [Tree(dict, [Tree(pair, [Token(STRING, "key"), Tree(value, [Tree(list, [Tree(value, [Token(STRING, "item0")]), Tree(value, [Token(STRING, "item1")]), Tree(value, [Token(NUMBER, 3.14)])])])])])]) +>>> print( _.pretty() ) +value + dict + pair + "key" + value + list + value "item0" + value "item1" + value 3.14 +``` + +As promised, Lark automagically creates a tree that represents the parsed text. + +But something is suspiciously missing from the tree. Where are the curly braces, the commas and all the other punctuation literals? + +Lark automatically filters out literals from the tree, based on the following criteria: + +- Filter out string literals without a name, or with a name that starts with an underscore. +- Keep regexps, even unnamed ones, unless their name starts with an underscore. + +Unfortunately, this means that it will also filter out literals like "true" and "false", and we will lose that information. The next section, "Shaping the tree" deals with this issue, and others. + +## Part 3 - Shaping the Tree + +We now have a parser that can create a parse tree (or: AST), but the tree has some issues: + +1. "true", "false" and "null" are filtered out (test it out yourself!) +2. Is has useless branches, like *value*, that clutter-up our view. + +I'll present the solution, and then explain it: + +```lark + ?value: dict + | list + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + + ... + + string : ESCAPED_STRING +``` + +1. Those little arrows signify *aliases*. An alias is a name for a specific part of the rule. In this case, we will name the *true/false/null* matches, and this way we won't lose the information. We also alias *SIGNED_NUMBER* to mark it for later processing. + +2. The question-mark prefixing *value* ("?value") tells the tree-builder to inline this branch if it has only one member. In this case, *value* will always have only one member, and will always be inlined. + +3. We turned the *ESCAPED_STRING* terminal into a rule. This way it will appear in the tree as a branch. This is equivalent to aliasing (like we did for the number), but now *string* can also be used elsewhere in the grammar (namely, in the *pair* rule). + +Here is the new grammar: + +```python +from lark import Lark +json_parser = Lark(r""" + ?value: dict + | list + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + + list : "[" [value ("," value)*] "]" + + dict : "{" [pair ("," pair)*] "}" + pair : string ":" value + + string : ESCAPED_STRING + + %import common.ESCAPED_STRING + %import common.SIGNED_NUMBER + %import common.WS + %ignore WS + + """, start='value') +``` + +And let's test it out: + +```python +>>> text = '{"key": ["item0", "item1", 3.14, true]}' +>>> print( json_parser.parse(text).pretty() ) +dict + pair + string "key" + list + string "item0" + string "item1" + number 3.14 + true +``` + +Ah! That is much much nicer. + +## Part 4 - Evaluating the tree + +It's nice to have a tree, but what we really want is a JSON object. + +The way to do it is to evaluate the tree, using a Transformer. + +A transformer is a class with methods corresponding to branch names. For each branch, the appropriate method will be called with the children of the branch as its argument, and its return value will replace the branch in the tree. + +So let's write a partial transformer, that handles lists and dictionaries: + +```python +from lark import Transformer + +class MyTransformer(Transformer): + def list(self, items): + return list(items) + def pair(self, key_value): + k, v = key_value + return k, v + def dict(self, items): + return dict(items) +``` + +And when we run it, we get this: +```python +>>> tree = json_parser.parse(text) +>>> MyTransformer().transform(tree) +{Tree(string, [Token(ANONRE_1, "key")]): [Tree(string, [Token(ANONRE_1, "item0")]), Tree(string, [Token(ANONRE_1, "item1")]), Tree(number, [Token(ANONRE_0, 3.14)]), Tree(true, [])]} +``` + +This is pretty close. Let's write a full transformer that can handle the terminals too. + +Also, our definitions of list and dict are a bit verbose. We can do better: + +```python +from lark import Transformer + +class TreeToJson(Transformer): + def string(self, s): + (s,) = s + return s[1:-1] + def number(self, n): + (n,) = n + return float(n) + + list = list + pair = tuple + dict = dict + + null = lambda self, _: None + true = lambda self, _: True + false = lambda self, _: False +``` + +And when we run it: + +```python +>>> tree = json_parser.parse(text) +>>> TreeToJson().transform(tree) +{u'key': [u'item0', u'item1', 3.14, True]} +``` +Magic! + +## Part 5 - Optimizing + +### Step 1 - Benchmark + +By now, we have a fully working JSON parser, that can accept a string of JSON, and return its Pythonic representation. + +But how fast is it? + +Now, of course there are JSON libraries for Python written in C, and we can never compete with them. But since this is applicable to any parser you would write in Lark, let's see how far we can take this. + +The first step for optimizing is to have a benchmark. For this benchmark I'm going to take data from [json-generator.com/](http://www.json-generator.com/). I took their default suggestion and changed it to 5000 objects. The result is a 6.6MB sparse JSON file. + +Our first program is going to be just a concatenation of everything we've done so far: + +```python +import sys +from lark import Lark, Transformer + +json_grammar = r""" + ?value: dict + | list + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + + list : "[" [value ("," value)*] "]" + + dict : "{" [pair ("," pair)*] "}" + pair : string ":" value + + string : ESCAPED_STRING + + %import common.ESCAPED_STRING + %import common.SIGNED_NUMBER + %import common.WS + %ignore WS + """ + +class TreeToJson(Transformer): + def string(self, s): + (s,) = s + return s[1:-1] + def number(self, n): + (n,) = n + return float(n) + + list = list + pair = tuple + dict = dict + + null = lambda self, _: None + true = lambda self, _: True + false = lambda self, _: False + +json_parser = Lark(json_grammar, start='value', lexer='basic') + +if __name__ == '__main__': + with open(sys.argv[1]) as f: + tree = json_parser.parse(f.read()) + print(TreeToJson().transform(tree)) +``` + +We run it and get this: + + $ time python tutorial_json.py json_data > /dev/null + + real 0m36.257s + user 0m34.735s + sys 0m1.361s + + +That's unsatisfactory time for a 6MB file. Maybe if we were parsing configuration or a small DSL, but we're trying to handle large amount of data here. + +Well, turns out there's quite a bit we can do about it! + +### Step 2 - LALR(1) + +So far we've been using the Earley algorithm, which is the default in Lark. Earley is powerful but slow. But it just so happens that our grammar is LR-compatible, and specifically LALR(1) compatible. + +So let's switch to LALR(1) and see what happens: + +```python +json_parser = Lark(json_grammar, start='value', parser='lalr') +``` + $ time python tutorial_json.py json_data > /dev/null + + real 0m7.554s + user 0m7.352s + sys 0m0.148s + +Ah, that's much better. The resulting JSON is of course exactly the same. You can run it for yourself and see. + +It's important to note that not all grammars are LR-compatible, and so you can't always switch to LALR(1). But there's no harm in trying! If Lark lets you build the grammar, it means you're good to go. + +### Step 3 - Tree-less LALR(1) + +So far, we've built a full parse tree for our JSON, and then transformed it. It's a convenient method, but it's not the most efficient in terms of speed and memory. Luckily, Lark lets us avoid building the tree when parsing with LALR(1). + +Here's the way to do it: + +```python +json_parser = Lark(json_grammar, start='value', parser='lalr', transformer=TreeToJson()) + +if __name__ == '__main__': + with open(sys.argv[1]) as f: + print( json_parser.parse(f.read()) ) +``` + +We've used the transformer we've already written, but this time we plug it straight into the parser. Now it can avoid building the parse tree, and just send the data straight into our transformer. The *parse()* method now returns the transformed JSON, instead of a tree. + +Let's benchmark it: + + real 0m4.866s + user 0m4.722s + sys 0m0.121s + +That's a measurable improvement! Also, this way is more memory efficient. Check out the benchmark table at the end to see just how much. + +As a general practice, it's recommended to work with parse trees, and only skip the tree-builder when your transformer is already working. + +### Step 4 - PyPy + +PyPy is a JIT engine for running Python, and it's designed to be a drop-in replacement. + +Lark is written purely in Python, which makes it very suitable for PyPy. + +Let's get some free performance: + + $ time pypy tutorial_json.py json_data > /dev/null + + real 0m1.397s + user 0m1.296s + sys 0m0.083s + +PyPy is awesome! + +### Conclusion + +We've brought the run-time down from 36 seconds to 1.1 seconds, in a series of small and simple steps. + +Now let's compare the benchmarks in a nicely organized table. + +I measured memory consumption using a little script called [memusg](https://gist.github.com/netj/526585) + +| Code | CPython Time | PyPy Time | CPython Mem | PyPy Mem +|:-----|:-------------|:------------|:----------|:--------- +| Lark - Earley *(with lexer)* | 42s | 4s | 1167M | 608M | +| Lark - LALR(1) | 8s | 1.53s | 453M | 266M | +| Lark - LALR(1) tree-less | 4.76s | 1.23s | 70M | 134M | +| PyParsing ([Parser](https://github.com/pyparsing/pyparsing/blob/master/examples/jsonParser.py)) | 32s | 3.53s | 443M | 225M | +| funcparserlib ([Parser](https://github.com/vlasovskikh/funcparserlib/blob/master/tests/json.py)) | 8.5s | 1.3s | 483M | 293M | +| Parsimonious ([Parser](https://gist.github.com/reclosedev/5222560)) | ? | 5.7s | ? | 1545M | + + +I added a few other parsers for comparison. PyParsing and funcparselib fair pretty well in their memory usage (they don't build a tree), but they can't compete with the run-time speed of LALR(1). + +These benchmarks are for Lark's alpha version. I already have several optimizations planned that will significantly improve run-time speed. + +Once again, shout-out to PyPy for being so effective. + +## Afterword + +This is the end of the tutorial. I hoped you liked it and learned a little about Lark. + +To see what else you can do with Lark, check out the [examples](/examples). + +Read the documentation here: https://lark-parser.readthedocs.io/en/latest/ diff --git a/vendor/lark/docs/make.bat b/vendor/lark/docs/make.bat new file mode 100644 index 00000000..4f2e2868 --- /dev/null +++ b/vendor/lark/docs/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build +set SPHINXPROJ=Lark + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/vendor/lark/docs/parsers.md b/vendor/lark/docs/parsers.md new file mode 100644 index 00000000..6c99ac59 --- /dev/null +++ b/vendor/lark/docs/parsers.md @@ -0,0 +1,81 @@ +# Parsers +Lark implements the following parsing algorithms: Earley, LALR(1), and CYK + +## Earley + +An [Earley Parser](https://www.wikiwand.com/en/Earley_parser) is a chart parser capable of parsing any context-free grammar at O(n^3), and O(n^2) when the grammar is unambiguous. It can parse most LR grammars at O(n). Most programming languages are LR, and can be parsed at a linear time. + +Lark's Earley implementation runs on top of a skipping chart parser, which allows it to use regular expressions, instead of matching characters one-by-one. This is a huge improvement to Earley that is unique to Lark. This feature is used by default, but can also be requested explicitly using `lexer='dynamic'`. + +It's possible to bypass the dynamic lexing, and use the regular Earley parser with a basic lexer, that tokenizes as an independent first step. Doing so will provide a speed benefit, but will tokenize without using Earley's ambiguity-resolution ability. So choose this only if you know why! Activate with `lexer='basic'` + +**SPPF & Ambiguity resolution** + +Lark implements the Shared Packed Parse Forest data-structure for the Earley parser, in order to reduce the space and computation required to handle ambiguous grammars. + +You can read more about SPPF [here](https://web.archive.org/web/20191229100607/www.bramvandersanden.com/post/2014/06/shared-packed-parse-forest) + +As a result, Lark can efficiently parse and store every ambiguity in the grammar, when using Earley. + +Lark provides the following options to combat ambiguity: + +1) Lark will choose the best derivation for you (default). Users can choose between different disambiguation strategies, and can prioritize (or demote) individual rules over others, using the rule-priority syntax. + +2) Users may choose to receive the set of all possible parse-trees (using ambiguity='explicit'), and choose the best derivation themselves. While simple and flexible, it comes at the cost of space and performance, and so it isn't recommended for highly ambiguous grammars, or very long inputs. + +3) As an advanced feature, users may use specialized visitors to iterate the SPPF themselves. + +**lexer="dynamic_complete"** + +Earley's "dynamic" lexer uses regular expressions in order to tokenize the text. It tries every possible combination of terminals, but it matches each terminal exactly once, returning the longest possible match. + +That means, for example, that when `lexer="dynamic"` (which is the default), the terminal `/a+/`, when given the text `"aa"`, will return one result, `aa`, even though `a` would also be correct. + +This behavior was chosen because it is much faster, and it is usually what you would expect. + +Setting `lexer="dynamic_complete"` instructs the lexer to consider every possible regexp match. This ensures that the parser will consider and resolve every ambiguity, even inside the terminals themselves. This lexer provides the same capabilities as scannerless Earley, but with different performance tradeoffs. + +Warning: This lexer can be much slower, especially for open-ended terminals such as `/.*/` + + +## LALR(1) + +[LALR(1)](https://www.wikiwand.com/en/LALR_parser) is a very efficient, true-and-tested parsing algorithm. It's incredibly fast and requires very little memory. It can parse most programming languages (For example: Python and Java). + +LALR(1) stands for: + +- Left-to-right parsing order + +- Rightmost derivation, bottom-up + +- Lookahead of 1 token + +Lark comes with an efficient implementation that outperforms every other parsing library for Python (including PLY) + +Lark extends the traditional YACC-based architecture with a *contextual lexer*, which processes feedback from the parser, making the LALR(1) algorithm stronger than ever. + +The contextual lexer communicates with the parser, and uses the parser's lookahead prediction to narrow its choice of terminals. So at each point, the lexer only matches the subgroup of terminals that are legal at that parser state, instead of all of the terminals. It’s surprisingly effective at resolving common terminal collisions, and allows one to parse languages that LALR(1) was previously incapable of parsing. + +(If you're familiar with YACC, you can think of it as automatic lexer-states) + +This is an improvement to LALR(1) that is unique to Lark. + +### Grammar constraints in LALR(1) + +Due to having only a lookahead of one token, LALR is limited in its ability to choose between rules, when they both match the input. + +Tips for writing a conforming grammar: + +- Try to avoid writing different rules that can match the same sequence of characters. + +- For the best performance, prefer left-recursion over right-recursion. + +- Consider setting terminal priority only as a last resort. + +For a better understanding of these constraints, it's recommended to learn how a SLR parser works. SLR is very similar to LALR but much simpler. + +## CYK Parser + +A [CYK parser](https://www.wikiwand.com/en/CYK_algorithm) can parse any context-free grammar at O(n^3*|G|). + +Its too slow to be practical for simple grammars, but it offers good performance for highly ambiguous grammars. diff --git a/vendor/lark/docs/philosophy.md b/vendor/lark/docs/philosophy.md new file mode 100644 index 00000000..ebdbd4fc --- /dev/null +++ b/vendor/lark/docs/philosophy.md @@ -0,0 +1,65 @@ +# Philosophy + +Parsers are innately complicated and confusing. They're difficult to understand, difficult to write, and difficult to use. Even experts on the subject can become baffled by the nuances of these complicated state-machines. + +Lark's mission is to make the process of writing them as simple and abstract as possible, by following these design principles: + +## Design Principles + +1. Readability matters + +2. Keep the grammar clean and simple + +2. Don't force the user to decide on things that the parser can figure out on its own + +4. Usability is more important than performance + +5. Performance is still very important + +6. Follow the Zen of Python, whenever possible and applicable + + +In accordance with these principles, I arrived at the following design choices: + +----------- + +## Design Choices + +### 1. Separation of code and grammar + +Grammars are the de-facto reference for your language, and for the structure of your parse-tree. For any non-trivial language, the conflation of code and grammar always turns out convoluted and difficult to read. + +The grammars in Lark are EBNF-inspired, so they are especially easy to read & work with. + +### 2. Always build a parse-tree (unless told not to) + +Trees are always simpler to work with than state-machines. + +1. Trees allow you to see the "state-machine" visually + +2. Trees allow your computation to be aware of previous and future states + +3. Trees allow you to process the parse in steps, instead of forcing you to do it all at once. + +And anyway, every parse-tree can be replayed as a state-machine, so there is no loss of information. + +See this answer in more detail [here](https://github.com/erezsh/lark/issues/4). + +To improve performance, you can skip building the tree for LALR(1), by providing Lark with a transformer (see the [JSON example](https://github.com/erezsh/lark/blob/master/examples/json_parser.py)). + +### 3. Earley is the default + +The Earley algorithm can accept *any* context-free grammar you throw at it (i.e. any grammar you can write in EBNF, it can parse). That makes it extremely friendly to beginners, who are not aware of the strange and arbitrary restrictions that LALR(1) places on its grammars. + +As the users grow to understand the structure of their grammar, the scope of their target language, and their performance requirements, they may choose to switch over to LALR(1) to gain a huge performance boost, possibly at the cost of some language features. + +Both Earley and LALR(1) can use the same grammar, as long as all constraints are satisfied. + +In short, "Premature optimization is the root of all evil." + +### Other design features + +- Automatically resolve terminal collisions whenever possible + +- Automatically keep track of line & column numbers + diff --git a/vendor/lark/docs/recipes.md b/vendor/lark/docs/recipes.md new file mode 100644 index 00000000..1aadd047 --- /dev/null +++ b/vendor/lark/docs/recipes.md @@ -0,0 +1,174 @@ +# Recipes + +A collection of recipes to use Lark and its various features + + +## Use a transformer to parse integer tokens + +Transformers are the common interface for processing matched rules and tokens. + +They can be used during parsing for better performance. + +```python +from lark import Lark, Transformer + +class T(Transformer): + def INT(self, tok): + "Convert the value of `tok` from string to int, while maintaining line number & column." + return tok.update(value=int(tok)) + +parser = Lark(""" +start: INT* +%import common.INT +%ignore " " +""", parser="lalr", transformer=T()) + +print(parser.parse('3 14 159')) +``` + +Prints out: + +```python +Tree(start, [Token(INT, 3), Token(INT, 14), Token(INT, 159)]) +``` + + +## Collect all comments with lexer_callbacks + +`lexer_callbacks` can be used to interface with the lexer as it generates tokens. + +It accepts a dictionary of the form + + {TOKEN_TYPE: callback} + +Where callback is of type `f(Token) -> Token` + +It only works with the basic and contextual lexers. + +This has the same effect of using a transformer, but can also process ignored tokens. + +```python +from lark import Lark + +comments = [] + +parser = Lark(""" + start: INT* + + COMMENT: /#.*/ + + %import common (INT, WS) + %ignore COMMENT + %ignore WS +""", parser="lalr", lexer_callbacks={'COMMENT': comments.append}) + +parser.parse(""" +1 2 3 # hello +# world +4 5 6 +""") + +print(comments) +``` + +Prints out: + +```python +[Token(COMMENT, '# hello'), Token(COMMENT, '# world')] +``` + +*Note: We don't have to return a token, because comments are ignored* + +## CollapseAmbiguities + +Parsing ambiguous texts with earley and `ambiguity='explicit'` produces a single tree with `_ambig` nodes to mark where the ambiguity occurred. + +However, it's sometimes more convenient instead to work with a list of all possible unambiguous trees. + +Lark provides a utility transformer for that purpose: + +```python +from lark import Lark, Tree, Transformer +from lark.visitors import CollapseAmbiguities + +grammar = """ + !start: x y + + !x: "a" "b" + | "ab" + | "abc" + + !y: "c" "d" + | "cd" + | "d" + +""" +parser = Lark(grammar, ambiguity='explicit') + +t = parser.parse('abcd') +for x in CollapseAmbiguities().transform(t): + print(x.pretty()) +``` + +This prints out: + + start + x + a + b + y + c + d + + start + x ab + y cd + + start + x abc + y d + +While convenient, this should be used carefully, as highly ambiguous trees will soon create an exponential explosion of such unambiguous derivations. + + +## Keeping track of parents when visiting + +The following visitor assigns a `parent` attribute for every node in the tree. + +If your tree nodes aren't unique (if there is a shared Tree instance), the assert will fail. + +```python +class Parent(Visitor): + def __default__(self, tree): + for subtree in tree.children: + if isinstance(subtree, Tree): + assert not hasattr(subtree, 'parent') + subtree.parent = tree +``` + + +## Unwinding VisitError after a transformer/visitor exception + +Errors that happen inside visitors and transformers get wrapped inside a `VisitError` exception. + +This can often be inconvenient, if you wish the actual error to propagate upwards, or if you want to catch it. + +But, it's easy to unwrap it at the point of calling the transformer, by catching it and raising the `VisitError.orig_exc` attribute. + +For example: +```python +from lark import Lark, Transformer +from lark.visitors import VisitError + +tree = Lark('start: "a"').parse('a') + +class T(Transformer): + def start(self, x): + raise KeyError("Original Exception") + +t = T() +try: + print( t.transform(tree)) +except VisitError as e: + raise e.orig_exc +``` \ No newline at end of file diff --git a/vendor/lark/docs/requirements.txt b/vendor/lark/docs/requirements.txt new file mode 100644 index 00000000..ed224a60 --- /dev/null +++ b/vendor/lark/docs/requirements.txt @@ -0,0 +1,3 @@ +# https://docs.readthedocs.io/en/stable/guides/specifying-dependencies.html#specifying-a-requirements-file +sphinx-gallery +sphinx_markdown_tables \ No newline at end of file diff --git a/vendor/lark/docs/tools.md b/vendor/lark/docs/tools.md new file mode 100644 index 00000000..ee9d2cfc --- /dev/null +++ b/vendor/lark/docs/tools.md @@ -0,0 +1,71 @@ +# Tools (Stand-alone, Nearley) + +## Stand-alone parser + +Lark can generate a stand-alone LALR(1) parser from a grammar. + +The resulting module provides the same interface as Lark, but with a fixed grammar, and reduced functionality. + +Run using: + +```bash +python -m lark.tools.standalone +``` + +For a play-by-play, read the [tutorial](http://blog.erezsh.com/create-a-stand-alone-lalr1-parser-in-python/) + + +## Importing grammars from Nearley.js + +Lark comes with a tool to convert grammars from [Nearley](https://github.com/Hardmath123/nearley), a popular Earley library for Javascript. It uses [Js2Py](https://github.com/PiotrDabkowski/Js2Py) to convert and run the Javascript postprocessing code segments. + +#### Requirements + +1. Install Lark with the `nearley` component: +```bash +pip install lark[nearley] +``` + +2. Acquire a copy of the Nearley codebase. This can be done using: +```bash +git clone https://github.com/Hardmath123/nearley +``` + +#### Usage + +The tool can be run using: + +```bash +python -m lark.tools.nearley +``` + +Here's an example of how to import nearley's calculator example into Lark: + +```bash +git clone https://github.com/Hardmath123/nearley +python -m lark.tools.nearley nearley/examples/calculator/arithmetic.ne main ./nearley > ncalc.py +``` + +You can use the output as a regular python module: + +```python +>>> import ncalc +>>> ncalc.parse('sin(pi/4) ^ e') +0.38981434460254655 +``` + +The Nearley converter also supports an experimental converter for newer JavaScript (ES6+), using the `--es6` flag: + +```bash +git clone https://github.com/Hardmath123/nearley +python -m lark.tools.nearley nearley/examples/calculator/arithmetic.ne main nearley --es6 > ncalc.py +``` + +#### Notes + +- Lark currently cannot import templates from Nearley + +- Lark currently cannot export grammars to Nearley + +These might get added in the future, if enough users ask for them. + diff --git a/vendor/lark/docs/tree_construction.md b/vendor/lark/docs/tree_construction.md new file mode 100644 index 00000000..360b1eca --- /dev/null +++ b/vendor/lark/docs/tree_construction.md @@ -0,0 +1,153 @@ +# Tree Construction Reference + + +Lark builds a tree automatically based on the structure of the grammar, where each rule that is matched becomes a branch (node) in the tree, and its children are its matches, in the order of matching. + +For example, the rule `node: child1 child2` will create a tree node with two children. If it is matched as part of another rule (i.e. if it isn't the root), the new rule's tree node will become its parent. + +Using `item+` or `item*` will result in a list of items, equivalent to writing `item item item ..`. + +Using `item?` will return the item if it matched, or nothing. + +If `maybe_placeholders=True` (the default), then using `[item]` will return the item if it matched, or the value `None`, if it didn't. + +If `maybe_placeholders=False`, then `[]` behaves like `()?`. + +## Terminals + +Terminals are always values in the tree, never branches. + +Lark filters out certain types of terminals by default, considering them punctuation: + +- Terminals that won't appear in the tree are: + + - Unnamed literals (like `"keyword"` or `"+"`) + - Terminals whose name starts with an underscore (like `_DIGIT`) + +- Terminals that *will* appear in the tree are: + + - Unnamed regular expressions (like `/[0-9]/`) + - Named terminals whose name starts with a letter (like `DIGIT`) + +Note: Terminals composed of literals and other terminals always include the entire match without filtering any part. + +**Example:** +``` +start: PNAME pname + +PNAME: "(" NAME ")" +pname: "(" NAME ")" + +NAME: /\w+/ +%ignore /\s+/ +``` +Lark will parse "(Hello) (World)" as: + + start + (Hello) + pname World + +Rules prefixed with `!` will retain all their literals regardless. + + + + +**Example:** + +```perl + expr: "(" expr ")" + | NAME+ + + NAME: /\w+/ + + %ignore " " +``` + +Lark will parse "((hello world))" as: + + expr + expr + expr + "hello" + "world" + +The brackets do not appear in the tree by design. The words appear because they are matched by a named terminal. + + +## Shaping the tree + +Users can alter the automatic construction of the tree using a collection of grammar features. + + +* Rules whose name begins with an underscore will be inlined into their containing rule. + +**Example:** + +```perl + start: "(" _greet ")" + _greet: /\w+/ /\w+/ +``` + +Lark will parse "(hello world)" as: + + start + "hello" + "world" + + +* Rules that receive a question mark (?) at the beginning of their definition, will be inlined if they have a single child, after filtering. + +**Example:** + +```ruby + start: greet greet + ?greet: "(" /\w+/ ")" + | /\w+/ /\w+/ +``` + +Lark will parse "hello world (planet)" as: + + start + greet + "hello" + "world" + "planet" + +* Rules that begin with an exclamation mark will keep all their terminals (they won't get filtered). + +```perl + !expr: "(" expr ")" + | NAME+ + NAME: /\w+/ + %ignore " " +``` + +Will parse "((hello world))" as: + + expr + ( + expr + ( + expr + hello + world + ) + ) + +Using the `!` prefix is usually a "code smell", and may point to a flaw in your grammar design. + +* Aliases - options in a rule can receive an alias. It will be then used as the branch name for the option, instead of the rule name. + +**Example:** + +```ruby + start: greet greet + greet: "hello" + | "world" -> planet +``` + +Lark will parse "hello world" as: + + start + greet + planet diff --git a/vendor/lark/docs/visitors.rst b/vendor/lark/docs/visitors.rst new file mode 100644 index 00000000..e6f9c190 --- /dev/null +++ b/vendor/lark/docs/visitors.rst @@ -0,0 +1,120 @@ +Transformers & Visitors +======================= + +Transformers & Visitors provide a convenient interface to process the +parse-trees that Lark returns. + +They are used by inheriting from the correct class (visitor or transformer), +and implementing methods corresponding to the rule you wish to process. Each +method accepts the children as an argument. That can be modified using the +``v_args`` decorator, which allows one to inline the arguments (akin to ``*args``), +or add the tree ``meta`` property as an argument. + +See: `visitors.py`_ + +.. _visitors.py: https://github.com/lark-parser/lark/blob/master/lark/visitors.py + +Visitor +------- + +Visitors visit each node of the tree, and run the appropriate method on it according to the node's data. + +They work bottom-up, starting with the leaves and ending at the root of the tree. + +There are two classes that implement the visitor interface: + +- ``Visitor``: Visit every node (without recursion) +- ``Visitor_Recursive``: Visit every node using recursion. Slightly faster. + +Example: + :: + + class IncreaseAllNumbers(Visitor): + def number(self, tree): + assert tree.data == "number" + tree.children[0] += 1 + + IncreaseAllNumbers().visit(parse_tree) + +.. autoclass:: lark.visitors.Visitor + :members: visit, visit_topdown, __default__ + +.. autoclass:: lark.visitors.Visitor_Recursive + :members: visit, visit_topdown, __default__ + +Interpreter +----------- + +.. autoclass:: lark.visitors.Interpreter + + +Example: + :: + + class IncreaseSomeOfTheNumbers(Interpreter): + def number(self, tree): + tree.children[0] += 1 + + def skip(self, tree): + # skip this subtree. don't change any number node inside it. + pass + + IncreaseSomeOfTheNumbers().visit(parse_tree) + +Transformer +----------- + +.. autoclass:: lark.visitors.Transformer + :members: transform, __default__, __default_token__, __mul__ + +Example: + :: + + from lark import Tree, Transformer + + class EvalExpressions(Transformer): + def expr(self, args): + return eval(args[0]) + + t = Tree('a', [Tree('expr', ['1+2'])]) + print(EvalExpressions().transform( t )) + + # Prints: Tree(a, [3]) + +Example: + :: + + class T(Transformer): + INT = int + NUMBER = float + def NAME(self, name): + return lookup_dict.get(name, name) + + T(visit_tokens=True).transform(tree) + +.. autoclass:: lark.visitors.Transformer_NonRecursive + +.. autoclass:: lark.visitors.Transformer_InPlace + +.. autoclass:: lark.visitors.Transformer_InPlaceRecursive + +v_args +------ + +.. autofunction:: lark.visitors.v_args + +merge_transformers +------------------ + +.. autofunction:: lark.visitors.merge_transformers + +Discard +------- + +.. autoclass:: lark.visitors.Discard + + +VisitError +---------- + +.. autoclass:: lark.exceptions.VisitError \ No newline at end of file diff --git a/vendor/lark/examples/README.rst b/vendor/lark/examples/README.rst new file mode 100644 index 00000000..26523620 --- /dev/null +++ b/vendor/lark/examples/README.rst @@ -0,0 +1,21 @@ +Examples for Lark +================= + +**How to run the examples**: + +After cloning the repo, open the terminal into the root directory of the +project, and run the following: + +.. code:: bash + + [lark]$ python -m examples. + +For example, the following will parse all the Python files in the +standard library of your local installation: + +.. code:: bash + + [lark]$ python -m examples.advanced.python_parser + +Beginner Examples +~~~~~~~~~~~~~~~~~ diff --git a/vendor/poetry-core/poetry/core/json/schemas/__init__.py b/vendor/lark/examples/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/json/schemas/__init__.py rename to vendor/lark/examples/__init__.py diff --git a/vendor/lark/examples/advanced/README.rst b/vendor/lark/examples/advanced/README.rst new file mode 100644 index 00000000..96054863 --- /dev/null +++ b/vendor/lark/examples/advanced/README.rst @@ -0,0 +1,2 @@ +Advanced Examples +~~~~~~~~~~~~~~~~~ diff --git a/vendor/lark/examples/advanced/_json_parser.py b/vendor/lark/examples/advanced/_json_parser.py new file mode 100644 index 00000000..27253e25 --- /dev/null +++ b/vendor/lark/examples/advanced/_json_parser.py @@ -0,0 +1,64 @@ +""" +Simple JSON Parser +================== + +The code is short and clear, and outperforms every other parser (that's written in Python). +For an explanation, check out the JSON parser tutorial at /docs/json_tutorial.md + +(this is here for use by the other examples) +""" +import sys + +from lark import Lark, Transformer, v_args + +json_grammar = r""" + ?start: value + + ?value: object + | array + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + + array : "[" [value ("," value)*] "]" + object : "{" [pair ("," pair)*] "}" + pair : string ":" value + + string : ESCAPED_STRING + + %import common.ESCAPED_STRING + %import common.SIGNED_NUMBER + %import common.WS + + %ignore WS +""" + + +class TreeToJson(Transformer): + @v_args(inline=True) + def string(self, s): + return s[1:-1].replace('\\"', '"') + + array = list + pair = tuple + object = dict + number = v_args(inline=True)(float) + + null = lambda self, _: None + true = lambda self, _: True + false = lambda self, _: False + + +### Create the JSON parser with Lark, using the LALR algorithm +json_parser = Lark(json_grammar, parser='lalr', + # Using the basic lexer isn't required, and isn't usually recommended. + # But, it's good enough for JSON, and it's slightly faster. + lexer='basic', + # Disabling propagate_positions and placeholders slightly improves speed + propagate_positions=False, + maybe_placeholders=False, + # Using an internal transformer is faster and more memory efficient + transformer=TreeToJson()) + diff --git a/vendor/lark/examples/advanced/conf_earley.py b/vendor/lark/examples/advanced/conf_earley.py new file mode 100644 index 00000000..9b511fa6 --- /dev/null +++ b/vendor/lark/examples/advanced/conf_earley.py @@ -0,0 +1,45 @@ +""" +Earley’s dynamic lexer +====================== + +Demonstrates the power of Earley’s dynamic lexer on a toy configuration language + +Using a lexer for configuration files is tricky, because values don't +have to be surrounded by delimiters. Using a basic lexer for this just won't work. + +In this example we use a dynamic lexer and let the Earley parser resolve the ambiguity. + +Another approach is to use the contextual lexer with LALR. It is less powerful than Earley, +but it can handle some ambiguity when lexing and it's much faster. +See examples/conf_lalr.py for an example of that approach. + +""" +from lark import Lark + +parser = Lark(r""" + start: _NL? section+ + section: "[" NAME "]" _NL item+ + item: NAME "=" VALUE? _NL + + NAME: /\w/+ + VALUE: /./+ + + %import common.NEWLINE -> _NL + %import common.WS_INLINE + %ignore WS_INLINE + """, parser="earley") + +def test(): + sample_conf = """ +[bla] + +a=Hello +this="that",4 +empty= +""" + + r = parser.parse(sample_conf) + print (r.pretty()) + +if __name__ == '__main__': + test() diff --git a/vendor/lark/examples/advanced/conf_lalr.py b/vendor/lark/examples/advanced/conf_lalr.py new file mode 100644 index 00000000..450c6440 --- /dev/null +++ b/vendor/lark/examples/advanced/conf_lalr.py @@ -0,0 +1,43 @@ +""" +LALR’s contextual lexer +======================= + +This example demonstrates the power of LALR's contextual lexer, +by parsing a toy configuration language. + +The terminals `NAME` and `VALUE` overlap. They can match the same input. +A basic lexer would arbitrarily choose one over the other, based on priority, +which would lead to a (confusing) parse error. +However, due to the unambiguous structure of the grammar, Lark's LALR(1) algorithm knows +which one of them to expect at each point during the parse. +The lexer then only matches the tokens that the parser expects. +The result is a correct parse, something that is impossible with a regular lexer. + +Another approach is to use the Earley algorithm. +It will handle more cases than the contextual lexer, but at the cost of performance. +See examples/conf_earley.py for an example of that approach. +""" +from lark import Lark + +parser = Lark(r""" + start: _NL? section+ + section: "[" NAME "]" _NL item+ + item: NAME "=" VALUE? _NL + + NAME: /\w/+ + VALUE: /./+ + + %import common.NEWLINE -> _NL + %import common.WS_INLINE + %ignore WS_INLINE + """, parser="lalr") + + +sample_conf = """ +[bla] +a=Hello +this="that",4 +empty= +""" + +print(parser.parse(sample_conf).pretty()) diff --git a/vendor/lark/examples/advanced/create_ast.py b/vendor/lark/examples/advanced/create_ast.py new file mode 100644 index 00000000..95ce520f --- /dev/null +++ b/vendor/lark/examples/advanced/create_ast.py @@ -0,0 +1,121 @@ +""" +Creating an AST from the parse tree +=================================== + + This example demonstrates how to transform a parse-tree into an AST using `lark.ast_utils`. + + create_transformer() collects every subclass of `Ast` subclass from the module, + and creates a Lark transformer that builds the AST with no extra code. + + This example only works with Python 3. +""" + +import sys +from typing import List +from dataclasses import dataclass + +from lark import Lark, ast_utils, Transformer, v_args +from lark.tree import Meta + +this_module = sys.modules[__name__] + + +# +# Define AST +# +class _Ast(ast_utils.Ast): + # This will be skipped by create_transformer(), because it starts with an underscore + pass + +class _Statement(_Ast): + # This will be skipped by create_transformer(), because it starts with an underscore + pass + +@dataclass +class Value(_Ast, ast_utils.WithMeta): + "Uses WithMeta to include line-number metadata in the meta attribute" + meta: Meta + value: object + +@dataclass +class Name(_Ast): + name: str + +@dataclass +class CodeBlock(_Ast, ast_utils.AsList): + # Corresponds to code_block in the grammar + statements: List[_Statement] + +@dataclass +class If(_Statement): + cond: Value + then: CodeBlock + +@dataclass +class SetVar(_Statement): + # Corresponds to set_var in the grammar + name: str + value: Value + +@dataclass +class Print(_Statement): + value: Value + + +class ToAst(Transformer): + # Define extra transformation functions, for rules that don't correspond to an AST class. + + def STRING(self, s): + # Remove quotation marks + return s[1:-1] + + def DEC_NUMBER(self, n): + return int(n) + + @v_args(inline=True) + def start(self, x): + return x + +# +# Define Parser +# + +parser = Lark(""" + start: code_block + + code_block: statement+ + + ?statement: if | set_var | print + + if: "if" value "{" code_block "}" + set_var: NAME "=" value ";" + print: "print" value ";" + + value: name | STRING | DEC_NUMBER + name: NAME + + %import python (NAME, STRING, DEC_NUMBER) + %import common.WS + %ignore WS + """, + parser="lalr", +) + +transformer = ast_utils.create_transformer(this_module, ToAst()) + +def parse(text): + tree = parser.parse(text) + return transformer.transform(tree) + +# +# Test +# + +if __name__ == '__main__': + print(parse(""" + a = 1; + if a { + print "a is 1"; + a = 2; + } + """)) diff --git a/vendor/lark/examples/advanced/custom_lexer.py b/vendor/lark/examples/advanced/custom_lexer.py new file mode 100644 index 00000000..05a5eb5e --- /dev/null +++ b/vendor/lark/examples/advanced/custom_lexer.py @@ -0,0 +1,57 @@ +""" +Custom lexer +============ + +Demonstrates using a custom lexer to parse a non-textual stream of data + +You can use a custom lexer to tokenize text when the lexers offered by Lark +are too slow, or not flexible enough. + +You can also use it (as shown in this example) to tokenize streams of objects. +""" +from lark import Lark, Transformer, v_args +from lark.lexer import Lexer, Token + +class TypeLexer(Lexer): + def __init__(self, lexer_conf): + pass + + def lex(self, data): + for obj in data: + if isinstance(obj, int): + yield Token('INT', obj) + elif isinstance(obj, (type(''), type(u''))): + yield Token('STR', obj) + else: + raise TypeError(obj) + +parser = Lark(""" + start: data_item+ + data_item: STR INT* + + %declare STR INT + """, parser='lalr', lexer=TypeLexer) + + +class ParseToDict(Transformer): + @v_args(inline=True) + def data_item(self, name, *numbers): + return name.value, [n.value for n in numbers] + + start = dict + + +def test(): + data = ['alice', 1, 27, 3, 'bob', 4, 'carrie', 'dan', 8, 6] + + print(data) + + tree = parser.parse(data) + res = ParseToDict().transform(tree) + + print('-->') + print(res) # prints {'alice': [1, 27, 3], 'bob': [4], 'carrie': [], 'dan': [8, 6]} + + +if __name__ == '__main__': + test() diff --git a/vendor/lark/examples/advanced/dynamic_complete.py b/vendor/lark/examples/advanced/dynamic_complete.py new file mode 100644 index 00000000..386936d4 --- /dev/null +++ b/vendor/lark/examples/advanced/dynamic_complete.py @@ -0,0 +1,143 @@ +""" +Using lexer dynamic_complete +============================ + +Demonstrates how to use ``lexer='dynamic_complete'`` and ``ambiguity='explicit'`` + +Sometimes you have data that is highly ambiguous or 'broken' in some sense. +When using ``parser='earley'`` and ``lexer='dynamic_complete'``, Lark will be able +parse just about anything as long as there is a valid way to generate it from +the Grammar, including looking 'into' the Regexes. + +This examples shows how to parse a json input where the quotes have been +replaced by underscores: ``{_foo_:{}, _bar_: [], _baz_: __}`` +Notice that underscores might still appear inside strings, so a potentially +valid reading of the above is: +``{"foo_:{}, _bar": [], "baz": ""}`` +""" +from pprint import pprint + +from lark import Lark, Tree, Transformer, v_args +from lark.visitors import Transformer_InPlace + +GRAMMAR = r""" +%import common.SIGNED_NUMBER +%import common.WS_INLINE +%import common.NEWLINE +%ignore WS_INLINE + +?start: value + +?value: object + | array + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + +array : "[" (value ("," value)*)? "]" +object : "{" (pair ("," pair)*)? "}" +pair : string ":" value + +string: STRING +STRING : ESCAPED_STRING + +ESCAPED_STRING: QUOTE_CHAR _STRING_ESC_INNER QUOTE_CHAR +QUOTE_CHAR: "_" + +_STRING_INNER: /.*/ +_STRING_ESC_INNER: _STRING_INNER /(? var + +TEMPLATE_NAME: "$" NAME + +?template_start: (stmt | testlist_star_expr _NEWLINE) + +%ignore /[\t \f]+/ // WS +%ignore /\\[\t \f]*\r?\n/ // LINE_CONT +%ignore COMMENT +""" + +parser = Lark(TEMPLATED_PYTHON, parser='lalr', start=['single_input', 'file_input', 'eval_input', 'template_start'], postlex=PythonIndenter(), maybe_placeholders=False) + + +def parse_template(s): + return parser.parse(s + '\n', start='template_start') + +def parse_code(s): + return parser.parse(s + '\n', start='file_input') + + +# +# 2. Define translations using templates (each template code is parsed to a template tree) +# + +pytemplate = TemplateConf(parse=parse_template) + +translations_3to2 = { + 'yield from $a': + 'for _tmp in $a: yield _tmp', + + 'raise $e from $x': + 'raise $e', + + '$a / $b': + 'float($a) / $b', +} +translations_3to2 = {pytemplate(k): pytemplate(v) for k, v in translations_3to2.items()} + +# +# 3. Translate and reconstruct Python 3 code into valid Python 2 code +# + +python_reconstruct = PythonReconstructor(parser) + +def translate_py3to2(code): + tree = parse_code(code) + tree = TemplateTranslator(translations_3to2).translate(tree) + return python_reconstruct.reconstruct(tree) + + +# +# Test Code +# + +_TEST_CODE = ''' +if a / 2 > 1: + yield from [1,2,3] +else: + raise ValueError(a) from e + +''' + +def test(): + print(_TEST_CODE) + print(' -----> ') + print(translate_py3to2(_TEST_CODE)) + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/vendor/lark/examples/advanced/python2.lark b/vendor/lark/examples/advanced/python2.lark new file mode 100644 index 00000000..6fbae459 --- /dev/null +++ b/vendor/lark/examples/advanced/python2.lark @@ -0,0 +1,168 @@ +// Python 2 grammar for Lark + +// NOTE: Work in progress!!! (XXX TODO) +// This grammar should parse all python 2.x code successfully, +// but the resulting parse-tree is still not well-organized. + +// Adapted from: https://docs.python.org/2/reference/grammar.html +// Adapted by: Erez Shinan + +// Start symbols for the grammar: +// single_input is a single interactive statement; +// file_input is a module or sequence of commands read from an input file; +// eval_input is the input for the eval() and input() functions. +// NB: compound_stmt in single_input is followed by extra _NEWLINE! +single_input: _NEWLINE | simple_stmt | compound_stmt _NEWLINE +?file_input: (_NEWLINE | stmt)* +eval_input: testlist _NEWLINE? + +decorator: "@" dotted_name [ "(" [arglist] ")" ] _NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef) +funcdef: "def" NAME "(" parameters ")" ":" suite +parameters: [paramlist] +paramlist: param ("," param)* ["," [star_params ["," kw_params] | kw_params]] + | star_params ["," kw_params] + | kw_params +star_params: "*" NAME +kw_params: "**" NAME +param: fpdef ["=" test] +fpdef: NAME | "(" fplist ")" +fplist: fpdef ("," fpdef)* [","] + +?stmt: simple_stmt | compound_stmt +?simple_stmt: small_stmt (";" small_stmt)* [";"] _NEWLINE +?small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt + | import_stmt | global_stmt | exec_stmt | assert_stmt) +expr_stmt: testlist augassign (yield_expr|testlist) -> augassign2 + | testlist ("=" (yield_expr|testlist))+ -> assign + | testlist + +augassign: ("+=" | "-=" | "*=" | "/=" | "%=" | "&=" | "|=" | "^=" | "<<=" | ">>=" | "**=" | "//=") +// For normal assignments, additional restrictions enforced by the interpreter +print_stmt: "print" ( [ test ("," test)* [","] ] | ">>" test [ ("," test)+ [","] ] ) +del_stmt: "del" exprlist +pass_stmt: "pass" +?flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: "break" +continue_stmt: "continue" +return_stmt: "return" [testlist] +yield_stmt: yield_expr +raise_stmt: "raise" [test ["," test ["," test]]] +import_stmt: import_name | import_from +import_name: "import" dotted_as_names +import_from: "from" ("."* dotted_name | "."+) "import" ("*" | "(" import_as_names ")" | import_as_names) +?import_as_name: NAME ["as" NAME] +?dotted_as_name: dotted_name ["as" NAME] +import_as_names: import_as_name ("," import_as_name)* [","] +dotted_as_names: dotted_as_name ("," dotted_as_name)* +dotted_name: NAME ("." NAME)* +global_stmt: "global" NAME ("," NAME)* +exec_stmt: "exec" expr ["in" test ["," test]] +assert_stmt: "assert" test ["," test] + +?compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated +if_stmt: "if" test ":" suite ("elif" test ":" suite)* ["else" ":" suite] +while_stmt: "while" test ":" suite ["else" ":" suite] +for_stmt: "for" exprlist "in" testlist ":" suite ["else" ":" suite] +try_stmt: ("try" ":" suite ((except_clause ":" suite)+ ["else" ":" suite] ["finally" ":" suite] | "finally" ":" suite)) +with_stmt: "with" with_item ("," with_item)* ":" suite +with_item: test ["as" expr] +// NB compile.c makes sure that the default except clause is last +except_clause: "except" [test [("as" | ",") test]] +suite: simple_stmt | _NEWLINE _INDENT _NEWLINE? stmt+ _DEDENT _NEWLINE? + +// Backward compatibility cruft to support: +// [ x for x in lambda: True, lambda: False if x() ] +// even while also allowing: +// lambda x: 5 if x else 2 +// (But not a mix of the two) +testlist_safe: old_test [("," old_test)+ [","]] +old_test: or_test | old_lambdef +old_lambdef: "lambda" [paramlist] ":" old_test + +?test: or_test ["if" or_test "else" test] | lambdef +?or_test: and_test ("or" and_test)* +?and_test: not_test ("and" not_test)* +?not_test: "not" not_test | comparison +?comparison: expr (comp_op expr)* +comp_op: "<"|">"|"=="|">="|"<="|"<>"|"!="|"in"|"not" "in"|"is"|"is" "not" +?expr: xor_expr ("|" xor_expr)* +?xor_expr: and_expr ("^" and_expr)* +?and_expr: shift_expr ("&" shift_expr)* +?shift_expr: arith_expr (("<<"|">>") arith_expr)* +?arith_expr: term (("+"|"-") term)* +?term: factor (("*"|"/"|"%"|"//") factor)* +?factor: ("+"|"-"|"~") factor | power +?power: molecule ["**" factor] +// _trailer: "(" [arglist] ")" | "[" subscriptlist "]" | "." NAME +?molecule: molecule "(" [arglist] ")" -> func_call + | molecule "[" [subscriptlist] "]" -> getitem + | molecule "." NAME -> getattr + | atom +?atom: "(" [yield_expr|testlist_comp] ")" -> tuple + | "[" [listmaker] "]" + | "{" [dictorsetmaker] "}" + | "`" testlist1 "`" + | "(" test ")" + | NAME | number | string+ +listmaker: test ( list_for | ("," test)* [","] ) +?testlist_comp: test ( comp_for | ("," test)+ [","] | ",") +lambdef: "lambda" [paramlist] ":" test +?subscriptlist: subscript ("," subscript)* [","] +subscript: "." "." "." | test | [test] ":" [test] [sliceop] +sliceop: ":" [test] +?exprlist: expr ("," expr)* [","] +?testlist: test ("," test)* [","] +dictorsetmaker: ( (test ":" test (comp_for | ("," test ":" test)* [","])) | (test (comp_for | ("," test)* [","])) ) + +classdef: "class" NAME ["(" [testlist] ")"] ":" suite + +arglist: (argument ",")* (argument [","] + | star_args ["," kw_args] + | kw_args) + +star_args: "*" test +kw_args: "**" test + + +// The reason that keywords are test nodes instead of NAME is that using NAME +// results in an ambiguity. ast.c makes sure it's a NAME. +argument: test [comp_for] | test "=" test + +list_iter: list_for | list_if +list_for: "for" exprlist "in" testlist_safe [list_iter] +list_if: "if" old_test [list_iter] + +comp_iter: comp_for | comp_if +comp_for: "for" exprlist "in" or_test [comp_iter] +comp_if: "if" old_test [comp_iter] + +testlist1: test ("," test)* + +yield_expr: "yield" [testlist] + +number: DEC_NUMBER | HEX_NUMBER | OCT_NUMBER | FLOAT | IMAG_NUMBER +string: STRING | LONG_STRING +// Tokens + +COMMENT: /#[^\n]*/ +_NEWLINE: ( /\r?\n[\t ]*/ | COMMENT )+ + +STRING : /[ubf]?r?("(?!"").*?(? FLOAT +%import common.INT -> _INT +%import common.CNAME -> NAME +IMAG_NUMBER: (_INT | FLOAT) ("j"|"J") + + +%ignore /[\t \f]+/ // WS +%ignore /\\[\t \f]*\r?\n/ // LINE_CONT +%ignore COMMENT +%declare _INDENT _DEDENT + diff --git a/vendor/lark/examples/advanced/python_parser.py b/vendor/lark/examples/advanced/python_parser.py new file mode 100644 index 00000000..78f85204 --- /dev/null +++ b/vendor/lark/examples/advanced/python_parser.py @@ -0,0 +1,80 @@ +""" +Grammar-complete Python Parser +============================== + +A fully-working Python 2 & 3 parser (but not production ready yet!) + +This example demonstrates usage of the included Python grammars +""" +import sys +import os, os.path +from io import open +import glob, time + +from lark import Lark +from lark.indenter import PythonIndenter + + +kwargs = dict(postlex=PythonIndenter(), start='file_input') + +# Official Python grammar by Lark +python_parser3 = Lark.open_from_package('lark', 'python.lark', ['grammars'], parser='lalr', **kwargs) + +# Local Python2 grammar +python_parser2 = Lark.open('python2.lark', rel_to=__file__, parser='lalr', **kwargs) +python_parser2_earley = Lark.open('python2.lark', rel_to=__file__, parser='earley', lexer='basic', **kwargs) + +try: + xrange +except NameError: + chosen_parser = python_parser3 +else: + chosen_parser = python_parser2 + + +def _read(fn, *args): + kwargs = {'encoding': 'iso-8859-1'} + with open(fn, *args, **kwargs) as f: + return f.read() + +def _get_lib_path(): + if os.name == 'nt': + if 'PyPy' in sys.version: + return os.path.join(sys.prefix, 'lib-python', sys.winver) + else: + return os.path.join(sys.prefix, 'Lib') + else: + return [x for x in sys.path if x.endswith('%s.%s' % sys.version_info[:2])][0] + +def test_python_lib(): + path = _get_lib_path() + + start = time.time() + files = glob.glob(path+'/*.py') + total_kb = 0 + for f in files: + r = _read(os.path.join(path, f)) + kb = len(r) / 1024 + print( '%s -\t%.1f kb' % (f, kb)) + chosen_parser.parse(r + '\n') + total_kb += kb + + end = time.time() + print( "test_python_lib (%d files, %.1f kb), time: %.2f secs"%(len(files), total_kb, end-start) ) + +def test_earley_equals_lalr(): + path = _get_lib_path() + + files = glob.glob(path+'/*.py') + for f in files: + print( f ) + tree1 = python_parser2.parse(_read(os.path.join(path, f)) + '\n') + tree2 = python_parser2_earley.parse(_read(os.path.join(path, f)) + '\n') + assert tree1 == tree2 + + +if __name__ == '__main__': + test_python_lib() + # test_earley_equals_lalr() + # python_parser3.parse(_read(sys.argv[1]) + '\n') + diff --git a/vendor/lark/examples/advanced/qscintilla_json.py b/vendor/lark/examples/advanced/qscintilla_json.py new file mode 100644 index 00000000..62539540 --- /dev/null +++ b/vendor/lark/examples/advanced/qscintilla_json.py @@ -0,0 +1,205 @@ +""" +Syntax Highlighting +=================== + +This example shows how to write a syntax-highlighted editor with Qt and Lark + +Requirements: + + PyQt5==5.10.1 + QScintilla==2.10.4 +""" + +import sys +import textwrap + +from PyQt5.Qt import * # noqa + +from PyQt5.Qsci import QsciScintilla +from PyQt5.Qsci import QsciLexerCustom + +from lark import Lark + + +class LexerJson(QsciLexerCustom): + + def __init__(self, parent=None): + super().__init__(parent) + self.create_parser() + self.create_styles() + + def create_styles(self): + deeppink = QColor(249, 38, 114) + khaki = QColor(230, 219, 116) + mediumpurple = QColor(174, 129, 255) + mediumturquoise = QColor(81, 217, 205) + yellowgreen = QColor(166, 226, 46) + lightcyan = QColor(213, 248, 232) + darkslategrey = QColor(39, 40, 34) + + styles = { + 0: mediumturquoise, + 1: mediumpurple, + 2: yellowgreen, + 3: deeppink, + 4: khaki, + 5: lightcyan + } + + for style, color in styles.items(): + self.setColor(color, style) + self.setPaper(darkslategrey, style) + self.setFont(self.parent().font(), style) + + self.token_styles = { + "COLON": 5, + "COMMA": 5, + "LBRACE": 5, + "LSQB": 5, + "RBRACE": 5, + "RSQB": 5, + "FALSE": 0, + "NULL": 0, + "TRUE": 0, + "STRING": 4, + "NUMBER": 1, + } + + def create_parser(self): + grammar = ''' + anons: ":" "{" "}" "," "[" "]" + TRUE: "true" + FALSE: "false" + NULL: "NULL" + %import common.ESCAPED_STRING -> STRING + %import common.SIGNED_NUMBER -> NUMBER + %import common.WS + %ignore WS + ''' + + self.lark = Lark(grammar, parser=None, lexer='basic') + # All tokens: print([t.name for t in self.lark.parser.lexer.tokens]) + + def defaultPaper(self, style): + return QColor(39, 40, 34) + + def language(self): + return "Json" + + def description(self, style): + return {v: k for k, v in self.token_styles.items()}.get(style, "") + + def styleText(self, start, end): + self.startStyling(start) + text = self.parent().text()[start:end] + last_pos = 0 + + try: + for token in self.lark.lex(text): + ws_len = token.start_pos - last_pos + if ws_len: + self.setStyling(ws_len, 0) # whitespace + + token_len = len(bytearray(token, "utf-8")) + self.setStyling( + token_len, self.token_styles.get(token.type, 0)) + + last_pos = token.start_pos + token_len + except Exception as e: + print(e) + + +class EditorAll(QsciScintilla): + + def __init__(self, parent=None): + super().__init__(parent) + + # Set font defaults + font = QFont() + font.setFamily('Consolas') + font.setFixedPitch(True) + font.setPointSize(8) + font.setBold(True) + self.setFont(font) + + # Set margin defaults + fontmetrics = QFontMetrics(font) + self.setMarginsFont(font) + self.setMarginWidth(0, fontmetrics.width("000") + 6) + self.setMarginLineNumbers(0, True) + self.setMarginsForegroundColor(QColor(128, 128, 128)) + self.setMarginsBackgroundColor(QColor(39, 40, 34)) + self.setMarginType(1, self.SymbolMargin) + self.setMarginWidth(1, 12) + + # Set indentation defaults + self.setIndentationsUseTabs(False) + self.setIndentationWidth(4) + self.setBackspaceUnindents(True) + self.setIndentationGuides(True) + + # self.setFolding(QsciScintilla.CircledFoldStyle) + + # Set caret defaults + self.setCaretForegroundColor(QColor(247, 247, 241)) + self.setCaretWidth(2) + + # Set selection color defaults + self.setSelectionBackgroundColor(QColor(61, 61, 52)) + self.resetSelectionForegroundColor() + + # Set multiselection defaults + self.SendScintilla(QsciScintilla.SCI_SETMULTIPLESELECTION, True) + self.SendScintilla(QsciScintilla.SCI_SETMULTIPASTE, 1) + self.SendScintilla( + QsciScintilla.SCI_SETADDITIONALSELECTIONTYPING, True) + + lexer = LexerJson(self) + self.setLexer(lexer) + + +EXAMPLE_TEXT = textwrap.dedent("""\ + { + "_id": "5b05ffcbcf8e597939b3f5ca", + "about": "Excepteur consequat commodo esse voluptate aute aliquip ad sint deserunt commodo eiusmod irure. Sint aliquip sit magna duis eu est culpa aliqua excepteur ut tempor nulla. Aliqua ex pariatur id labore sit. Quis sit ex aliqua veniam exercitation laboris anim adipisicing. Lorem nisi reprehenderit ullamco labore qui sit ut aliqua tempor consequat pariatur proident.", + "address": "665 Malbone Street, Thornport, Louisiana, 243", + "age": 23, + "balance": "$3,216.91", + "company": "BULLJUICE", + "email": "elisekelley@bulljuice.com", + "eyeColor": "brown", + "gender": "female", + "guid": "d3a6d865-0f64-4042-8a78-4f53de9b0707", + "index": 0, + "isActive": false, + "isActive2": true, + "latitude": -18.660714, + "longitude": -85.378048, + "name": "Elise Kelley", + "phone": "+1 (808) 543-3966", + "picture": "http://placehold.it/32x32", + "registered": "2017-09-30T03:47:40 -02:00", + "tags": [ + "et", + "nostrud", + "in", + "fugiat", + "incididunt", + "labore", + "nostrud" + ] + }\ + """) + +def main(): + app = QApplication(sys.argv) + ex = EditorAll() + ex.setWindowTitle(__file__) + ex.setText(EXAMPLE_TEXT) + ex.resize(800, 600) + ex.show() + sys.exit(app.exec_()) + + +if __name__ == "__main__": + main() diff --git a/vendor/lark/examples/advanced/reconstruct_json.py b/vendor/lark/examples/advanced/reconstruct_json.py new file mode 100644 index 00000000..201bc32d --- /dev/null +++ b/vendor/lark/examples/advanced/reconstruct_json.py @@ -0,0 +1,50 @@ +""" +Reconstruct a JSON +================== + +Demonstrates the experimental text-reconstruction feature + +The Reconstructor takes a parse tree (already filtered from punctuation, of course), +and reconstructs it into correct text, that can be parsed correctly. +It can be useful for creating "hooks" to alter data before handing it to other parsers. You can also use it to generate samples from scratch. +""" + +import json + +from lark import Lark +from lark.reconstruct import Reconstructor + +from _json_parser import json_grammar + +test_json = ''' + { + "empty_object" : {}, + "empty_array" : [], + "booleans" : { "YES" : true, "NO" : false }, + "numbers" : [ 0, 1, -2, 3.3, 4.4e5, 6.6e-7 ], + "strings" : [ "This", [ "And" , "That", "And a \\"b" ] ], + "nothing" : null + } +''' + +def test_earley(): + + json_parser = Lark(json_grammar, maybe_placeholders=False) + tree = json_parser.parse(test_json) + + new_json = Reconstructor(json_parser).reconstruct(tree) + print (new_json) + print (json.loads(new_json) == json.loads(test_json)) + + +def test_lalr(): + + json_parser = Lark(json_grammar, parser='lalr', maybe_placeholders=False) + tree = json_parser.parse(test_json) + + new_json = Reconstructor(json_parser).reconstruct(tree) + print (new_json) + print (json.loads(new_json) == json.loads(test_json)) + +test_earley() +test_lalr() diff --git a/vendor/lark/examples/advanced/reconstruct_python.py b/vendor/lark/examples/advanced/reconstruct_python.py new file mode 100644 index 00000000..a318485f --- /dev/null +++ b/vendor/lark/examples/advanced/reconstruct_python.py @@ -0,0 +1,86 @@ +""" +Reconstruct Python +================== + +Demonstrates how Lark's experimental text-reconstruction feature can recreate +functional Python code from its parse-tree, using just the correct grammar and +a small formatter. + +""" + +from lark import Token, Lark +from lark.reconstruct import Reconstructor +from lark.indenter import PythonIndenter + +# Official Python grammar by Lark +python_parser3 = Lark.open_from_package('lark', 'python.lark', ['grammars'], + parser='lalr', postlex=PythonIndenter(), start='file_input', + maybe_placeholders=False # Necessary for reconstructor + ) + +SPACE_AFTER = set(',+-*/~@<>="|:') +SPACE_BEFORE = (SPACE_AFTER - set(',:')) | set('\'') + + +def special(sym): + return Token('SPECIAL', sym.name) + +def postproc(items): + stack = ['\n'] + actions = [] + last_was_whitespace = True + for item in items: + if isinstance(item, Token) and item.type == 'SPECIAL': + actions.append(item.value) + else: + if actions: + assert actions[0] == '_NEWLINE' and '_NEWLINE' not in actions[1:], actions + + for a in actions[1:]: + if a == '_INDENT': + stack.append(stack[-1] + ' ' * 4) + else: + assert a == '_DEDENT' + stack.pop() + actions.clear() + yield stack[-1] + last_was_whitespace = True + if not last_was_whitespace: + if item[0] in SPACE_BEFORE: + yield ' ' + yield item + last_was_whitespace = item[-1].isspace() + if not last_was_whitespace: + if item[-1] in SPACE_AFTER: + yield ' ' + last_was_whitespace = True + yield "\n" + + +class PythonReconstructor: + def __init__(self, parser): + self._recons = Reconstructor(parser, {'_NEWLINE': special, '_DEDENT': special, '_INDENT': special}) + + def reconstruct(self, tree): + return self._recons.reconstruct(tree, postproc) + + +def test(): + python_reconstructor = PythonReconstructor(python_parser3) + + self_contents = open(__file__).read() + + tree = python_parser3.parse(self_contents+'\n') + output = python_reconstructor.reconstruct(tree) + + tree_new = python_parser3.parse(output) + print(tree.pretty()) + print(tree_new.pretty()) + # assert tree.pretty() == tree_new.pretty() + assert tree == tree_new + + print(output) + + +if __name__ == '__main__': + test() \ No newline at end of file diff --git a/vendor/lark/examples/advanced/template_lark.lark b/vendor/lark/examples/advanced/template_lark.lark new file mode 100644 index 00000000..296407fe --- /dev/null +++ b/vendor/lark/examples/advanced/template_lark.lark @@ -0,0 +1,56 @@ +start: (_item | _NL)* + +_item: rule + | token + | statement + +_rule_or_token: RULE + | TOKEN +rule: RULE rule_params priority? ":" expansions{_rule_or_token} _NL +token: TOKEN priority? ":" expansions{TOKEN} _NL + +rule_params: ["{" RULE ("," RULE)* "}"] + +priority: "." NUMBER + +statement: "%ignore" expansions{TOKEN} _NL -> ignore + | "%import" import_path{_rule_or_token} ["->" _rule_or_token] _NL -> import + | "%import" import_path{_rule_or_token} name_list{_rule_or_token} _NL -> multi_import + | "%declare" TOKEN+ -> declare + +!import_path{name}: "."? name ("." name)* +name_list{name}: "(" name ("," name)* ")" + +?expansions{name}: alias{name} (_VBAR alias{name})* + +?alias{name}: expansion{name} ["->" RULE] + +?expansion{name}: expr{name}* + +?expr{name}: atom{name} [OP | "~" NUMBER [".." NUMBER]] + +?atom{name}: "(" expansions{name} ")" + | "[" expansions{name} "]" -> maybe + | value{name} + +?value{name}: STRING ".." STRING -> literal_range + | name + | (REGEXP | STRING) -> literal + | name "{" value{name} ("," value{name})* "}" -> template_usage + +_VBAR: _NL? "|" +OP: /[+*]|[?](?![a-z])/ +RULE: /!?[_?]?[a-z][_a-z0-9]*/ +TOKEN: /_?[A-Z][_A-Z0-9]*/ +STRING: _STRING "i"? +REGEXP: /\/(?!\/)(\\\/|\\\\|[^\/\n])*?\/[imslux]*/ +_NL: /(\r?\n)+\s*/ + +%import common.ESCAPED_STRING -> _STRING +%import common.INT -> NUMBER +%import common.WS_INLINE + +COMMENT: /\s*/ "//" /[^\n]/* + +%ignore WS_INLINE +%ignore COMMENT diff --git a/vendor/lark/examples/advanced/templates.py b/vendor/lark/examples/advanced/templates.py new file mode 100644 index 00000000..ac59b7a9 --- /dev/null +++ b/vendor/lark/examples/advanced/templates.py @@ -0,0 +1,29 @@ +""" +Templates +========= + +This example shows how to use Lark's templates to achieve cleaner grammars + +""" +from lark import Lark + +grammar = r""" +start: list | dict + +list: "[" _seperated{atom, ","} "]" +dict: "{" _seperated{key_value, ","} "}" +key_value: atom ":" atom + +_seperated{x, sep}: x (sep x)* // Define a sequence of 'x sep x sep x ...' + +atom: NUMBER | ESCAPED_STRING + +%import common (NUMBER, ESCAPED_STRING, WS) +%ignore WS +""" + + +parser = Lark(grammar) + +print(parser.parse('[1, "a", 2]')) +print(parser.parse('{"a": 2, "b": 6}')) \ No newline at end of file diff --git a/vendor/lark/examples/advanced/tree_forest_transformer.py b/vendor/lark/examples/advanced/tree_forest_transformer.py new file mode 100644 index 00000000..7582b577 --- /dev/null +++ b/vendor/lark/examples/advanced/tree_forest_transformer.py @@ -0,0 +1,58 @@ +""" +Transform a Forest +================== + +This example demonstrates how to subclass ``TreeForestTransformer`` to +directly transform a SPPF. +""" + +from lark import Lark +from lark.parsers.earley_forest import TreeForestTransformer, handles_ambiguity, Discard + +class CustomTransformer(TreeForestTransformer): + + @handles_ambiguity + def sentence(self, trees): + return next(tree for tree in trees if tree.data == 'simple') + + def simple(self, children): + children.append('.') + return self.tree_class('simple', children) + + def adj(self, children): + return Discard + + def __default_token__(self, token): + return token.capitalize() + +grammar = """ + sentence: noun verb noun -> simple + | noun verb "like" noun -> comparative + + noun: adj? NOUN + verb: VERB + adj: ADJ + + NOUN: "flies" | "bananas" | "fruit" + VERB: "like" | "flies" + ADJ: "fruit" + + %import common.WS + %ignore WS +""" + +parser = Lark(grammar, start='sentence', ambiguity='forest') +sentence = 'fruit flies like bananas' +forest = parser.parse(sentence) + +tree = CustomTransformer(resolve_ambiguity=False).transform(forest) +print(tree.pretty()) + +# Output: +# +# simple +# noun Flies +# verb Like +# noun Bananas +# . +# diff --git a/vendor/lark/examples/calc.py b/vendor/lark/examples/calc.py new file mode 100644 index 00000000..9e9aa78f --- /dev/null +++ b/vendor/lark/examples/calc.py @@ -0,0 +1,82 @@ +""" +Basic calculator +================ + +A simple example of a REPL calculator + +This example shows how to write a basic calculator with variables. +""" +from lark import Lark, Transformer, v_args + + +try: + input = raw_input # For Python2 compatibility +except NameError: + pass + + +calc_grammar = """ + ?start: sum + | NAME "=" sum -> assign_var + + ?sum: product + | sum "+" product -> add + | sum "-" product -> sub + + ?product: atom + | product "*" atom -> mul + | product "/" atom -> div + + ?atom: NUMBER -> number + | "-" atom -> neg + | NAME -> var + | "(" sum ")" + + %import common.CNAME -> NAME + %import common.NUMBER + %import common.WS_INLINE + + %ignore WS_INLINE +""" + + +@v_args(inline=True) # Affects the signatures of the methods +class CalculateTree(Transformer): + from operator import add, sub, mul, truediv as div, neg + number = float + + def __init__(self): + self.vars = {} + + def assign_var(self, name, value): + self.vars[name] = value + return value + + def var(self, name): + try: + return self.vars[name] + except KeyError: + raise Exception("Variable not found: %s" % name) + + +calc_parser = Lark(calc_grammar, parser='lalr', transformer=CalculateTree()) +calc = calc_parser.parse + + +def main(): + while True: + try: + s = input('> ') + except EOFError: + break + print(calc(s)) + + +def test(): + print(calc("a = 1+2")) + print(calc("1+a*-3")) + + +if __name__ == '__main__': + # test() + main() diff --git a/vendor/lark/examples/composition/README.md b/vendor/lark/examples/composition/README.md new file mode 100644 index 00000000..259a66a5 --- /dev/null +++ b/vendor/lark/examples/composition/README.md @@ -0,0 +1,10 @@ +Grammar Composition +=================== + +This example shows how to do grammar composition in Lark, by creating a new +file format that allows both CSV and JSON to co-exist. + +We show how, by using namespaces, Lark grammars and their transformers can be fully reused - +they don't need to care if their grammar is used directly, or being imported, or who is doing the importing. + +See [``main.py``](main.py) for more details. \ No newline at end of file diff --git a/vendor/lark/examples/composition/combined_csv_and_json.txt b/vendor/lark/examples/composition/combined_csv_and_json.txt new file mode 100644 index 00000000..5b8df820 --- /dev/null +++ b/vendor/lark/examples/composition/combined_csv_and_json.txt @@ -0,0 +1,6 @@ +{"header": ["this", "is", "json", 1111]} +# file lines author +data.json 12 Robin +data.csv 30 erezsh +compiler.py 123123 Megalng +{"footer": "done"} diff --git a/vendor/lark/examples/composition/csv.lark b/vendor/lark/examples/composition/csv.lark new file mode 100644 index 00000000..cc2b675b --- /dev/null +++ b/vendor/lark/examples/composition/csv.lark @@ -0,0 +1,14 @@ +start: header _NL row+ +header: "#" " "? (WORD _SEPARATOR?)+ +row: (_anything _SEPARATOR?)+ _NL +_anything: INT | WORD | NON_SEPARATOR_STRING | FLOAT | SIGNED_FLOAT +NON_SEPARATOR_STRING: /[a-zA-z.;\\\/]+/ +_SEPARATOR: /[ ]+/ + | "\t" + | "," + +%import common.NEWLINE -> _NL +%import common.WORD +%import common.INT +%import common.FLOAT +%import common.SIGNED_FLOAT diff --git a/vendor/lark/examples/composition/eval_csv.py b/vendor/lark/examples/composition/eval_csv.py new file mode 100644 index 00000000..8b83f081 --- /dev/null +++ b/vendor/lark/examples/composition/eval_csv.py @@ -0,0 +1,26 @@ +"Transformer for evaluating csv.lark" + +from lark import Transformer + +class CsvTreeToPandasDict(Transformer): + INT = int + FLOAT = float + SIGNED_FLOAT = float + WORD = str + NON_SEPARATOR_STRING = str + + def row(self, children): + return children + + def start(self, children): + data = {} + + header = children[0].children + for heading in header: + data[heading] = [] + + for row in children[1:]: + for i, element in enumerate(row): + data[header[i]].append(element) + + return data diff --git a/vendor/lark/examples/composition/eval_json.py b/vendor/lark/examples/composition/eval_json.py new file mode 100644 index 00000000..c665a197 --- /dev/null +++ b/vendor/lark/examples/composition/eval_json.py @@ -0,0 +1,17 @@ +"Transformer for evaluating json.lark" + +from lark import Transformer, v_args + +class JsonTreeToJson(Transformer): + @v_args(inline=True) + def string(self, s): + return s[1:-1].replace('\\"', '"') + + array = list + pair = tuple + object = dict + number = v_args(inline=True)(float) + + null = lambda self, _: None + true = lambda self, _: True + false = lambda self, _: False diff --git a/vendor/lark/examples/composition/json.lark b/vendor/lark/examples/composition/json.lark new file mode 100644 index 00000000..bb77c353 --- /dev/null +++ b/vendor/lark/examples/composition/json.lark @@ -0,0 +1,19 @@ +?start: value + +?value: object + | array + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + +array : "[" _WS? [value ("," _WS? value)*] "]" +object : "{" _WS? [pair ("," _WS? pair)*] "}" +pair : string ":" _WS value + +string : ESCAPED_STRING + +%import common.ESCAPED_STRING +%import common.SIGNED_NUMBER +%import common.WS -> _WS diff --git a/vendor/lark/examples/composition/main.py b/vendor/lark/examples/composition/main.py new file mode 100644 index 00000000..c6f150ff --- /dev/null +++ b/vendor/lark/examples/composition/main.py @@ -0,0 +1,51 @@ +""" +Grammar Composition +=================== + +This example shows how to do grammar composition in Lark, by creating a new +file format that allows both CSV and JSON to co-exist. + +1) We define ``storage.lark``, which imports both ``csv.lark`` and ``json.lark``, + and allows them to be used one after the other. + + In the generated tree, each imported rule/terminal is automatically prefixed (with ``json__`` or ``csv__), + which creates an implicit namespace and allows them to coexist without collisions. + +2) We merge their respective transformers (unaware of each other) into a new base transformer. + The resulting transformer can evaluate both JSON and CSV in the parse tree. + + The methods of each transformer are renamed into their appropriate namespace, using the given prefix. + This approach allows full re-use: the transformers don't need to care if their grammar is used directly, + or being imported, or who is doing the importing. + +""" +from pathlib import Path +from lark import Lark +from json import dumps +from lark.visitors import Transformer, merge_transformers + +from eval_csv import CsvTreeToPandasDict +from eval_json import JsonTreeToJson + +__dir__ = Path(__file__).parent + +class Storage(Transformer): + def start(self, children): + return children + +storage_transformer = merge_transformers(Storage(), csv=CsvTreeToPandasDict(), json=JsonTreeToJson()) + +parser = Lark.open("storage.lark", rel_to=__file__) + +def main(): + json_tree = parser.parse(dumps({"test": "a", "dict": { "list": [1, 1.2] }})) + res = storage_transformer.transform(json_tree) + print("Just JSON: ", res) + + csv_json_tree = parser.parse(open(__dir__ / 'combined_csv_and_json.txt').read()) + res = storage_transformer.transform(csv_json_tree) + print("JSON + CSV: ", dumps(res, indent=2)) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/vendor/lark/examples/composition/storage.lark b/vendor/lark/examples/composition/storage.lark new file mode 100644 index 00000000..09bb0ae7 --- /dev/null +++ b/vendor/lark/examples/composition/storage.lark @@ -0,0 +1,9 @@ +start: (csv__start | json__start _NL?)+ + +// Renaming of the import variables is required, as they receive the namespace of this file. +// See: https://github.com/lark-parser/lark/pull/973#issuecomment-907287565 +%import .csv.start -> csv__start +%import .json.start -> json__start + +%import .csv._NL -> _NL + diff --git a/vendor/lark/examples/fruitflies.png b/vendor/lark/examples/fruitflies.png new file mode 100644 index 00000000..aad9ecea Binary files /dev/null and b/vendor/lark/examples/fruitflies.png differ diff --git a/vendor/lark/examples/fruitflies.py b/vendor/lark/examples/fruitflies.py new file mode 100644 index 00000000..aca0b1b0 --- /dev/null +++ b/vendor/lark/examples/fruitflies.py @@ -0,0 +1,58 @@ +""" +Handling Ambiguity +================== + +A demonstration of ambiguity + +This example shows how to use get explicit ambiguity from Lark's Earley parser. + +""" +import sys +from lark import Lark, tree + +grammar = """ + sentence: noun verb noun -> simple + | noun verb "like" noun -> comparative + + noun: adj? NOUN + verb: VERB + adj: ADJ + + NOUN: "flies" | "bananas" | "fruit" + VERB: "like" | "flies" + ADJ: "fruit" + + %import common.WS + %ignore WS +""" + +parser = Lark(grammar, start='sentence', ambiguity='explicit') + +sentence = 'fruit flies like bananas' + +def make_png(filename): + tree.pydot__tree_to_png( parser.parse(sentence), filename) + +def make_dot(filename): + tree.pydot__tree_to_dot( parser.parse(sentence), filename) + +if __name__ == '__main__': + print(parser.parse(sentence).pretty()) + # make_png(sys.argv[1]) + # make_dot(sys.argv[1]) + +# Output: +# +# _ambig +# comparative +# noun fruit +# verb flies +# noun bananas +# simple +# noun +# fruit +# flies +# verb like +# noun bananas +# +# (or view a nicer version at "./fruitflies.png") diff --git a/vendor/lark/examples/grammars/README.md b/vendor/lark/examples/grammars/README.md new file mode 100644 index 00000000..cdd3b75b --- /dev/null +++ b/vendor/lark/examples/grammars/README.md @@ -0,0 +1,5 @@ +# Example Grammars + +This directory is a collection of lark grammars, taken from real world projects. + +- [Verilog](verilog.lark) - Taken from https://github.com/circuitgraph/circuitgraph/blob/master/circuitgraph/parsing/verilog.lark \ No newline at end of file diff --git a/vendor/lark/examples/grammars/verilog.lark b/vendor/lark/examples/grammars/verilog.lark new file mode 100644 index 00000000..0120fe71 --- /dev/null +++ b/vendor/lark/examples/grammars/verilog.lark @@ -0,0 +1,135 @@ +// Taken from https://github.com/circuitgraph/circuitgraph/blob/master/circuitgraph/parsing/verilog.lark +// Following https://www.verilog.com/VerilogBNF.html + +// 1. Source Text +start: description* + +?description: module + +module: "module" name_of_module list_of_ports? ";" module_item* "endmodule" + +?name_of_module: IDENTIFIER + +list_of_ports: "(" port ("," port)* ")" + +?port: IDENTIFIER + +?module_item: input_declaration + | output_declaration + | net_declaration + | module_instantiation + | continuous_assign + + +// 2. Declarations +input_declaration: "input" list_of_variables ";" + +output_declaration: "output" list_of_variables ";" + +net_declaration: "wire" list_of_variables ";" + +continuous_assign: "assign" list_of_assignments ";" + +list_of_variables: IDENTIFIER ("," IDENTIFIER)* + +list_of_assignments: assignment ("," assignment)* + + +// 3. Primitive Instances +// These are merged with module instantiations + +// 4. Module Instantiations +module_instantiation: name_of_module module_instance ("," module_instance)* ";" + +module_instance: name_of_instance "(" list_of_module_connections ")" + +?name_of_instance: IDENTIFIER + +list_of_module_connections: module_port_connection ("," module_port_connection)* + | named_port_connection ("," named_port_connection)* + +module_port_connection: expression + +named_port_connection: "." IDENTIFIER "(" expression ")" + + +// 5. Behavioral Statements +assignment: lvalue "=" expression + + +// 6. Specify Section + + +// 7. Expressions +?lvalue: identifier + +expression: condition + +?constant_value: constant_zero + | constant_one + | constant_x + +constant_zero: "1'b0" + | "1'h0" + +constant_one: "1'b1" + | "1'h1" + +constant_x: "1'bx" + | "1'hx" + +?condition : or + | ternary + +?ternary: or "?" or ":" or + +?or : xor + | or_gate + +?or_gate: or "|" xor + +?xor : and + | xor_gate + | xnor_gate + +?xor_gate: xor "^" and + +?xnor_gate: xor "~^" and + | xor "^~" and + +?and : unary + | and_gate + +?and_gate: and "&" unary + +?unary : primary + | not_gate + +not_gate: ( "!" | "~" ) primary + +?primary : IDENTIFIER + | constant_value + | "(" or ")" + + +// 8. General +?identifier: IDENTIFIER + +IDENTIFIER: CNAME + | ESCAPED_IDENTIFIER + + +// Lark +ESCAPED_IDENTIFIER: /\\([^\s]+)/ +COMMENT: "//" /[^\n]*/ NEWLINE +NEWLINE: "\n" +MULTILINE_COMMENT: /\/\*(\*(?!\/)|[^*])*\*\// + +%import common.CNAME +%import common.ESCAPED_STRING +%import common.WS + +%ignore WS +%ignore COMMENT +%ignore MULTILINE_COMMENT +%ignore NEWLINE diff --git a/vendor/lark/examples/indented_tree.py b/vendor/lark/examples/indented_tree.py new file mode 100644 index 00000000..6cdaf374 --- /dev/null +++ b/vendor/lark/examples/indented_tree.py @@ -0,0 +1,55 @@ +""" +Parsing Indentation +=================== + +A demonstration of parsing indentation (“whitespace significant†language) +and the usage of the Indenter class. + +Since indentation is context-sensitive, a postlex stage is introduced to +manufacture INDENT/DEDENT tokens. + +It is crucial for the indenter that the NL_type matches +the spaces (and tabs) after the newline. +""" +from lark import Lark +from lark.indenter import Indenter + +tree_grammar = r""" + ?start: _NL* tree + + tree: NAME _NL [_INDENT tree+ _DEDENT] + + %import common.CNAME -> NAME + %import common.WS_INLINE + %declare _INDENT _DEDENT + %ignore WS_INLINE + + _NL: /(\r?\n[\t ]*)+/ +""" + +class TreeIndenter(Indenter): + NL_type = '_NL' + OPEN_PAREN_types = [] + CLOSE_PAREN_types = [] + INDENT_type = '_INDENT' + DEDENT_type = '_DEDENT' + tab_len = 8 + +parser = Lark(tree_grammar, parser='lalr', postlex=TreeIndenter()) + +test_tree = """ +a + b + c + d + e + f + g +""" + +def test(): + print(parser.parse(test_tree).pretty()) + +if __name__ == '__main__': + test() + diff --git a/vendor/lark/examples/json_parser.py b/vendor/lark/examples/json_parser.py new file mode 100644 index 00000000..2f02eddd --- /dev/null +++ b/vendor/lark/examples/json_parser.py @@ -0,0 +1,91 @@ +""" +Simple JSON Parser +================== + +The code is short and clear, and outperforms every other parser (that's written in Python). +For an explanation, check out the JSON parser tutorial at /docs/json_tutorial.md +""" +import sys + +from lark import Lark, Transformer, v_args + +json_grammar = r""" + ?start: value + + ?value: object + | array + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + + array : "[" [value ("," value)*] "]" + object : "{" [pair ("," pair)*] "}" + pair : string ":" value + + string : ESCAPED_STRING + + %import common.ESCAPED_STRING + %import common.SIGNED_NUMBER + %import common.WS + + %ignore WS +""" + + +class TreeToJson(Transformer): + @v_args(inline=True) + def string(self, s): + return s[1:-1].replace('\\"', '"') + + array = list + pair = tuple + object = dict + number = v_args(inline=True)(float) + + null = lambda self, _: None + true = lambda self, _: True + false = lambda self, _: False + + +### Create the JSON parser with Lark, using the Earley algorithm +# json_parser = Lark(json_grammar, parser='earley', lexer='basic') +# def parse(x): +# return TreeToJson().transform(json_parser.parse(x)) + +### Create the JSON parser with Lark, using the LALR algorithm +json_parser = Lark(json_grammar, parser='lalr', + # Using the basic lexer isn't required, and isn't usually recommended. + # But, it's good enough for JSON, and it's slightly faster. + lexer='basic', + # Disabling propagate_positions and placeholders slightly improves speed + propagate_positions=False, + maybe_placeholders=False, + # Using an internal transformer is faster and more memory efficient + transformer=TreeToJson()) +parse = json_parser.parse + + +def test(): + test_json = ''' + { + "empty_object" : {}, + "empty_array" : [], + "booleans" : { "YES" : true, "NO" : false }, + "numbers" : [ 0, 1, -2, 3.3, 4.4e5, 6.6e-7 ], + "strings" : [ "This", [ "And" , "That", "And a \\"b" ] ], + "nothing" : null + } + ''' + + j = parse(test_json) + print(j) + import json + assert j == json.loads(test_json) + + +if __name__ == '__main__': + # test() + with open(sys.argv[1]) as f: + print(parse(f.read())) diff --git a/vendor/lark/examples/lark_grammar.py b/vendor/lark/examples/lark_grammar.py new file mode 100644 index 00000000..e23c5e32 --- /dev/null +++ b/vendor/lark/examples/lark_grammar.py @@ -0,0 +1,36 @@ +""" +Lark Grammar +============ + +A reference implementation of the Lark grammar (using LALR(1)) +""" +import lark +from pathlib import Path + +examples_path = Path(__file__).parent +lark_path = Path(lark.__file__).parent + +parser = lark.Lark.open(lark_path / 'grammars/lark.lark', rel_to=__file__, parser="lalr") + + +grammar_files = [ + examples_path / 'advanced/python2.lark', + examples_path / 'relative-imports/multiples.lark', + examples_path / 'relative-imports/multiple2.lark', + examples_path / 'relative-imports/multiple3.lark', + examples_path / 'tests/no_newline_at_end.lark', + examples_path / 'tests/negative_priority.lark', + examples_path / 'standalone/json.lark', + lark_path / 'grammars/common.lark', + lark_path / 'grammars/lark.lark', + lark_path / 'grammars/unicode.lark', + lark_path / 'grammars/python.lark', +] + +def test(): + for grammar_file in grammar_files: + tree = parser.parse(open(grammar_file).read()) + print("All grammars parsed successfully") + +if __name__ == '__main__': + test() diff --git a/vendor/lark/examples/relative-imports/multiple2.lark b/vendor/lark/examples/relative-imports/multiple2.lark new file mode 100644 index 00000000..a65077cd --- /dev/null +++ b/vendor/lark/examples/relative-imports/multiple2.lark @@ -0,0 +1 @@ +start: ("0" | "1")* "0" diff --git a/vendor/lark/examples/relative-imports/multiple3.lark b/vendor/lark/examples/relative-imports/multiple3.lark new file mode 100644 index 00000000..6a67bce7 --- /dev/null +++ b/vendor/lark/examples/relative-imports/multiple3.lark @@ -0,0 +1,5 @@ +start: mod0mod0+ + +mod0mod0: "0" | "1" mod1mod0 +mod1mod0: "1" | "0" mod2mod1 mod1mod0 +mod2mod1: "0" | "1" mod2mod1 diff --git a/vendor/lark/examples/relative-imports/multiples.lark b/vendor/lark/examples/relative-imports/multiples.lark new file mode 100644 index 00000000..35b5d17e --- /dev/null +++ b/vendor/lark/examples/relative-imports/multiples.lark @@ -0,0 +1,5 @@ +start: "2:" multiple2 + | "3:" multiple3 + +%import .multiple2.start -> multiple2 +%import .multiple3.start -> multiple3 diff --git a/vendor/lark/examples/relative-imports/multiples.py b/vendor/lark/examples/relative-imports/multiples.py new file mode 100644 index 00000000..b57d4464 --- /dev/null +++ b/vendor/lark/examples/relative-imports/multiples.py @@ -0,0 +1,28 @@ +# +# This example demonstrates relative imports with rule rewrite +# see multiples.lark +# + +# +# if b is a number written in binary, and m is either 2 or 3, +# the grammar aims to recognise m:b iif b is a multiple of m +# +# for example, 3:1001 is recognised +# because 9 (0b1001) is a multiple of 3 +# + +from lark import Lark, UnexpectedInput + +parser = Lark.open('multiples.lark', parser='lalr') + +def is_in_grammar(data): + try: + parser.parse(data) + except UnexpectedInput: + return False + return True + +for n_dec in range(100): + n_bin = bin(n_dec)[2:] + assert is_in_grammar('2:{}'.format(n_bin)) == (n_dec % 2 == 0) + assert is_in_grammar('3:{}'.format(n_bin)) == (n_dec % 3 == 0) diff --git a/vendor/lark/examples/standalone/README.md b/vendor/lark/examples/standalone/README.md new file mode 100644 index 00000000..6ec80355 --- /dev/null +++ b/vendor/lark/examples/standalone/README.md @@ -0,0 +1,20 @@ +# Standalone example + +To initialize, cd to this folder, and run: + + +```bash + ./create_standalone.sh +``` + +Or: +```bash +python -m lark.tools.standalone json.lark > json_parser.py +```` + +Then run using: + +```bash +python json_parser_main.py +``` + diff --git a/vendor/lark/examples/standalone/create_standalone.sh b/vendor/lark/examples/standalone/create_standalone.sh new file mode 100755 index 00000000..d8da6b0d --- /dev/null +++ b/vendor/lark/examples/standalone/create_standalone.sh @@ -0,0 +1,2 @@ +#!/bin/sh +PYTHONPATH=../.. python -m lark.tools.standalone json.lark > json_parser.py diff --git a/vendor/lark/examples/standalone/json.lark b/vendor/lark/examples/standalone/json.lark new file mode 100644 index 00000000..243a2308 --- /dev/null +++ b/vendor/lark/examples/standalone/json.lark @@ -0,0 +1,21 @@ +?start: value + +?value: object + | array + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + +array : "[" [value ("," value)*] "]" +object : "{" [pair ("," pair)*] "}" +pair : string ":" value + +string : ESCAPED_STRING + +%import common.ESCAPED_STRING +%import common.SIGNED_NUMBER +%import common.WS + +%ignore WS diff --git a/vendor/lark/examples/standalone/json_parser_main.py b/vendor/lark/examples/standalone/json_parser_main.py new file mode 100644 index 00000000..3d9b5a6d --- /dev/null +++ b/vendor/lark/examples/standalone/json_parser_main.py @@ -0,0 +1,37 @@ +""" +Standalone Parser +=================================== + + This example demonstrates how to generate and use the standalone parser, + using the JSON example. + + See README.md for more details. +""" + +import sys + +from json_parser import Lark_StandAlone, Transformer, v_args + +inline_args = v_args(inline=True) + +class TreeToJson(Transformer): + @inline_args + def string(self, s): + return s[1:-1].replace('\\"', '"') + + array = list + pair = tuple + object = dict + number = inline_args(float) + + null = lambda self, _: None + true = lambda self, _: True + false = lambda self, _: False + + +parser = Lark_StandAlone(transformer=TreeToJson()) + +if __name__ == '__main__': + with open(sys.argv[1]) as f: + print(parser.parse(f.read())) + diff --git a/vendor/lark/examples/tests/negative_priority.lark b/vendor/lark/examples/tests/negative_priority.lark new file mode 100644 index 00000000..e71f5e3e --- /dev/null +++ b/vendor/lark/examples/tests/negative_priority.lark @@ -0,0 +1,2 @@ +start: r +r.-1: "a" diff --git a/vendor/lark/examples/tests/no_newline_at_end.lark b/vendor/lark/examples/tests/no_newline_at_end.lark new file mode 100644 index 00000000..8ab64d7a --- /dev/null +++ b/vendor/lark/examples/tests/no_newline_at_end.lark @@ -0,0 +1 @@ +start: "a" \ No newline at end of file diff --git a/vendor/lark/examples/turtle_dsl.py b/vendor/lark/examples/turtle_dsl.py new file mode 100644 index 00000000..81a9cde4 --- /dev/null +++ b/vendor/lark/examples/turtle_dsl.py @@ -0,0 +1,90 @@ +""" +Turtle DSL +========== + +Implements a LOGO-like toy language for Python’s turtle, with interpreter. +""" + +try: + input = raw_input # For Python2 compatibility +except NameError: + pass + +import turtle + +from lark import Lark + +turtle_grammar = """ + start: instruction+ + + instruction: MOVEMENT NUMBER -> movement + | "c" COLOR [COLOR] -> change_color + | "fill" code_block -> fill + | "repeat" NUMBER code_block -> repeat + + code_block: "{" instruction+ "}" + + MOVEMENT: "f"|"b"|"l"|"r" + COLOR: LETTER+ + + %import common.LETTER + %import common.INT -> NUMBER + %import common.WS + %ignore WS +""" + +parser = Lark(turtle_grammar) + +def run_instruction(t): + if t.data == 'change_color': + turtle.color(*t.children) # We just pass the color names as-is + + elif t.data == 'movement': + name, number = t.children + { 'f': turtle.fd, + 'b': turtle.bk, + 'l': turtle.lt, + 'r': turtle.rt, }[name](int(number)) + + elif t.data == 'repeat': + count, block = t.children + for i in range(int(count)): + run_instruction(block) + + elif t.data == 'fill': + turtle.begin_fill() + run_instruction(t.children[0]) + turtle.end_fill() + + elif t.data == 'code_block': + for cmd in t.children: + run_instruction(cmd) + else: + raise SyntaxError('Unknown instruction: %s' % t.data) + + +def run_turtle(program): + parse_tree = parser.parse(program) + for inst in parse_tree.children: + run_instruction(inst) + +def main(): + while True: + code = input('> ') + try: + run_turtle(code) + except Exception as e: + print(e) + +def test(): + text = """ + c red yellow + fill { repeat 36 { + f200 l170 + }} + """ + run_turtle(text) + +if __name__ == '__main__': + # test() + main() diff --git a/vendor/lark/lark/__init__.py b/vendor/lark/lark/__init__.py new file mode 100644 index 00000000..99af3b37 --- /dev/null +++ b/vendor/lark/lark/__init__.py @@ -0,0 +1,9 @@ +from .utils import logger +from .tree import Tree, ParseTree +from .visitors import Transformer, Visitor, v_args, Discard, Transformer_NonRecursive +from .exceptions import (ParseError, LexError, GrammarError, UnexpectedToken, + UnexpectedInput, UnexpectedCharacters, UnexpectedEOF, LarkError) +from .lexer import Token +from .lark import Lark + +__version__: str = "1.1.2" diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/__pyinstaller/__init__.py b/vendor/lark/lark/__pyinstaller/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/lark/__pyinstaller/__init__.py rename to vendor/lark/lark/__pyinstaller/__init__.py diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/__pyinstaller/hook-lark.py b/vendor/lark/lark/__pyinstaller/hook-lark.py similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/lark/__pyinstaller/hook-lark.py rename to vendor/lark/lark/__pyinstaller/hook-lark.py diff --git a/vendor/lark/lark/ast_utils.py b/vendor/lark/lark/ast_utils.py new file mode 100644 index 00000000..faa17d0f --- /dev/null +++ b/vendor/lark/lark/ast_utils.py @@ -0,0 +1,59 @@ +""" + Module of utilities for transforming a lark.Tree into a custom Abstract Syntax Tree +""" + +import inspect, re +import types +from typing import Optional, Callable + +from lark import Transformer, v_args + +class Ast: + """Abstract class + + Subclasses will be collected by `create_transformer()` + """ + pass + +class AsList: + """Abstract class + + Subclasses will be instantiated with the parse results as a single list, instead of as arguments. + """ + +class WithMeta: + """Abstract class + + Subclasses will be instantiated with the Meta instance of the tree. (see ``v_args`` for more detail) + """ + pass + +def camel_to_snake(name): + return re.sub(r'(? Transformer: + """Collects `Ast` subclasses from the given module, and creates a Lark transformer that builds the AST. + + For each class, we create a corresponding rule in the transformer, with a matching name. + CamelCase names will be converted into snake_case. Example: "CodeBlock" -> "code_block". + + Classes starting with an underscore (`_`) will be skipped. + + Parameters: + ast_module: A Python module containing all the subclasses of ``ast_utils.Ast`` + transformer (Optional[Transformer]): An initial transformer. Its attributes may be overwritten. + decorator_factory (Callable): An optional callable accepting two booleans, inline, and meta, + and returning a decorator for the methods of ``transformer``. (default: ``v_args``). + """ + t = transformer or Transformer() + + for name, obj in inspect.getmembers(ast_module): + if not name.startswith('_') and inspect.isclass(obj): + if issubclass(obj, Ast): + wrapper = decorator_factory(inline=not issubclass(obj, AsList), meta=issubclass(obj, WithMeta)) + obj = wrapper(obj).__get__(t) + setattr(t, camel_to_snake(name), obj) + + return t diff --git a/vendor/lark/lark/common.py b/vendor/lark/lark/common.py new file mode 100644 index 00000000..d716add7 --- /dev/null +++ b/vendor/lark/lark/common.py @@ -0,0 +1,82 @@ +from copy import deepcopy +import sys +from types import ModuleType +from typing import Callable, Collection, Dict, Optional, TYPE_CHECKING + +if TYPE_CHECKING: + from .lark import PostLex + from .lexer import Lexer + from typing import Union, Type + if sys.version_info >= (3, 8): + from typing import Literal + else: + from typing_extensions import Literal + if sys.version_info >= (3, 10): + from typing import TypeAlias + else: + from typing_extensions import TypeAlias + +from .utils import Serialize +from .lexer import TerminalDef, Token + +###{standalone + +_ParserArgType: 'TypeAlias' = 'Literal["earley", "lalr", "cyk", "auto"]' +_LexerArgType: 'TypeAlias' = 'Union[Literal["auto", "basic", "contextual", "dynamic", "dynamic_complete"], Type[Lexer]]' +_Callback = Callable[[Token], Token] + +class LexerConf(Serialize): + __serialize_fields__ = 'terminals', 'ignore', 'g_regex_flags', 'use_bytes', 'lexer_type' + __serialize_namespace__ = TerminalDef, + + terminals: Collection[TerminalDef] + re_module: ModuleType + ignore: Collection[str] + postlex: 'Optional[PostLex]' + callbacks: Dict[str, _Callback] + g_regex_flags: int + skip_validation: bool + use_bytes: bool + lexer_type: Optional[_LexerArgType] + + def __init__(self, terminals: Collection[TerminalDef], re_module: ModuleType, ignore: Collection[str]=(), postlex: 'Optional[PostLex]'=None, callbacks: Optional[Dict[str, _Callback]]=None, g_regex_flags: int=0, skip_validation: bool=False, use_bytes: bool=False): + self.terminals = terminals + self.terminals_by_name = {t.name: t for t in self.terminals} + assert len(self.terminals) == len(self.terminals_by_name) + self.ignore = ignore + self.postlex = postlex + self.callbacks = callbacks or {} + self.g_regex_flags = g_regex_flags + self.re_module = re_module + self.skip_validation = skip_validation + self.use_bytes = use_bytes + self.lexer_type = None + + def _deserialize(self): + self.terminals_by_name = {t.name: t for t in self.terminals} + + def __deepcopy__(self, memo=None): + return type(self)( + deepcopy(self.terminals, memo), + self.re_module, + deepcopy(self.ignore, memo), + deepcopy(self.postlex, memo), + deepcopy(self.callbacks, memo), + deepcopy(self.g_regex_flags, memo), + deepcopy(self.skip_validation, memo), + deepcopy(self.use_bytes, memo), + ) + + +class ParserConf(Serialize): + __serialize_fields__ = 'rules', 'start', 'parser_type' + + def __init__(self, rules, callbacks, start): + assert isinstance(start, list) + self.rules = rules + self.callbacks = callbacks + self.start = start + + self.parser_type = None + +###} diff --git a/vendor/lark/lark/exceptions.py b/vendor/lark/lark/exceptions.py new file mode 100644 index 00000000..da982e33 --- /dev/null +++ b/vendor/lark/lark/exceptions.py @@ -0,0 +1,292 @@ +from .utils import logger, NO_VALUE +from typing import Mapping, Iterable, Callable, Union, TypeVar, Tuple, Any, List, Set, Optional, Collection, TYPE_CHECKING + +if TYPE_CHECKING: + from .lexer import Token + from .parsers.lalr_interactive_parser import InteractiveParser + from .tree import Tree + +###{standalone + +class LarkError(Exception): + pass + + +class ConfigurationError(LarkError, ValueError): + pass + + +def assert_config(value, options: Collection, msg='Got %r, expected one of %s'): + if value not in options: + raise ConfigurationError(msg % (value, options)) + + +class GrammarError(LarkError): + pass + + +class ParseError(LarkError): + pass + + +class LexError(LarkError): + pass + +T = TypeVar('T') + +class UnexpectedInput(LarkError): + """UnexpectedInput Error. + + Used as a base class for the following exceptions: + + - ``UnexpectedCharacters``: The lexer encountered an unexpected string + - ``UnexpectedToken``: The parser received an unexpected token + - ``UnexpectedEOF``: The parser expected a token, but the input ended + + After catching one of these exceptions, you may call the following helper methods to create a nicer error message. + """ + line: int + column: int + pos_in_stream = None + state: Any + _terminals_by_name = None + + def get_context(self, text: str, span: int=40) -> str: + """Returns a pretty string pinpointing the error in the text, + with span amount of context characters around it. + + Note: + The parser doesn't hold a copy of the text it has to parse, + so you have to provide it again + """ + assert self.pos_in_stream is not None, self + pos = self.pos_in_stream + start = max(pos - span, 0) + end = pos + span + if not isinstance(text, bytes): + before = text[start:pos].rsplit('\n', 1)[-1] + after = text[pos:end].split('\n', 1)[0] + return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n' + else: + before = text[start:pos].rsplit(b'\n', 1)[-1] + after = text[pos:end].split(b'\n', 1)[0] + return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace") + + def match_examples(self, parse_fn: 'Callable[[str], Tree]', + examples: Union[Mapping[T, Iterable[str]], Iterable[Tuple[T, Iterable[str]]]], + token_type_match_fallback: bool=False, + use_accepts: bool=True + ) -> Optional[T]: + """Allows you to detect what's wrong in the input text by matching + against example errors. + + Given a parser instance and a dictionary mapping some label with + some malformed syntax examples, it'll return the label for the + example that bests matches the current error. The function will + iterate the dictionary until it finds a matching error, and + return the corresponding value. + + For an example usage, see `examples/error_reporting_lalr.py` + + Parameters: + parse_fn: parse function (usually ``lark_instance.parse``) + examples: dictionary of ``{'example_string': value}``. + use_accepts: Recommended to keep this as ``use_accepts=True``. + """ + assert self.state is not None, "Not supported for this exception" + + if isinstance(examples, Mapping): + examples = examples.items() + + candidate = (None, False) + for i, (label, example) in enumerate(examples): + assert not isinstance(example, str), "Expecting a list" + + for j, malformed in enumerate(example): + try: + parse_fn(malformed) + except UnexpectedInput as ut: + if ut.state == self.state: + if ( + use_accepts + and isinstance(self, UnexpectedToken) + and isinstance(ut, UnexpectedToken) + and ut.accepts != self.accepts + ): + logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" % + (self.state, self.accepts, ut.accepts, i, j)) + continue + if ( + isinstance(self, (UnexpectedToken, UnexpectedEOF)) + and isinstance(ut, (UnexpectedToken, UnexpectedEOF)) + ): + if ut.token == self.token: # Try exact match first + logger.debug("Exact Match at example [%s][%s]" % (i, j)) + return label + + if token_type_match_fallback: + # Fallback to token types match + if (ut.token.type == self.token.type) and not candidate[-1]: + logger.debug("Token Type Fallback at example [%s][%s]" % (i, j)) + candidate = label, True + + if candidate[0] is None: + logger.debug("Same State match at example [%s][%s]" % (i, j)) + candidate = label, False + + return candidate[0] + + def _format_expected(self, expected): + if self._terminals_by_name: + d = self._terminals_by_name + expected = [d[t_name].user_repr() if t_name in d else t_name for t_name in expected] + return "Expected one of: \n\t* %s\n" % '\n\t* '.join(expected) + + +class UnexpectedEOF(ParseError, UnexpectedInput): + """An exception that is raised by the parser, when the input ends while it still expects a token. + """ + expected: 'List[Token]' + + def __init__(self, expected, state=None, terminals_by_name=None): + super(UnexpectedEOF, self).__init__() + + self.expected = expected + self.state = state + from .lexer import Token + self.token = Token("", "") # , line=-1, column=-1, pos_in_stream=-1) + self.pos_in_stream = -1 + self.line = -1 + self.column = -1 + self._terminals_by_name = terminals_by_name + + + def __str__(self): + message = "Unexpected end-of-input. " + message += self._format_expected(self.expected) + return message + + +class UnexpectedCharacters(LexError, UnexpectedInput): + """An exception that is raised by the lexer, when it cannot match the next + string of characters to any of its terminals. + """ + + allowed: Set[str] + considered_tokens: Set[Any] + + def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None, + terminals_by_name=None, considered_rules=None): + super(UnexpectedCharacters, self).__init__() + + # TODO considered_tokens and allowed can be figured out using state + self.line = line + self.column = column + self.pos_in_stream = lex_pos + self.state = state + self._terminals_by_name = terminals_by_name + + self.allowed = allowed + self.considered_tokens = considered_tokens + self.considered_rules = considered_rules + self.token_history = token_history + + if isinstance(seq, bytes): + self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace") + else: + self.char = seq[lex_pos] + self._context = self.get_context(seq) + + + def __str__(self): + message = "No terminal matches '%s' in the current parser context, at line %d col %d" % (self.char, self.line, self.column) + message += '\n\n' + self._context + if self.allowed: + message += self._format_expected(self.allowed) + if self.token_history: + message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history) + return message + + +class UnexpectedToken(ParseError, UnexpectedInput): + """An exception that is raised by the parser, when the token it received + doesn't match any valid step forward. + + Parameters: + token: The mismatched token + expected: The set of expected tokens + considered_rules: Which rules were considered, to deduce the expected tokens + state: A value representing the parser state. Do not rely on its value or type. + interactive_parser: An instance of ``InteractiveParser``, that is initialized to the point of failture, + and can be used for debugging and error handling. + + Note: These parameters are available as attributes of the instance. + """ + + expected: Set[str] + considered_rules: Set[str] + interactive_parser: 'InteractiveParser' + + def __init__(self, token, expected, considered_rules=None, state=None, interactive_parser=None, terminals_by_name=None, token_history=None): + super(UnexpectedToken, self).__init__() + + # TODO considered_rules and expected can be figured out using state + self.line = getattr(token, 'line', '?') + self.column = getattr(token, 'column', '?') + self.pos_in_stream = getattr(token, 'start_pos', None) + self.state = state + + self.token = token + self.expected = expected # XXX deprecate? `accepts` is better + self._accepts = NO_VALUE + self.considered_rules = considered_rules + self.interactive_parser = interactive_parser + self._terminals_by_name = terminals_by_name + self.token_history = token_history + + + @property + def accepts(self) -> Set[str]: + if self._accepts is NO_VALUE: + self._accepts = self.interactive_parser and self.interactive_parser.accepts() + return self._accepts + + def __str__(self): + message = ("Unexpected token %r at line %s, column %s.\n%s" + % (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected))) + if self.token_history: + message += "Previous tokens: %r\n" % self.token_history + + return message + + + +class VisitError(LarkError): + """VisitError is raised when visitors are interrupted by an exception + + It provides the following attributes for inspection: + + Parameters: + rule: the name of the visit rule that failed + obj: the tree-node or token that was being processed + orig_exc: the exception that cause it to fail + + Note: These parameters are available as attributes + """ + + obj: 'Union[Tree, Token]' + orig_exc: Exception + + def __init__(self, rule, obj, orig_exc): + message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc) + super(VisitError, self).__init__(message) + + self.rule = rule + self.obj = obj + self.orig_exc = orig_exc + + +class MissingVariableError(LarkError): + pass + +###} diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/grammar.py b/vendor/lark/lark/grammar.py similarity index 78% rename from vendor/poetry-core/poetry/core/_vendor/lark/grammar.py rename to vendor/lark/lark/grammar.py index bb843513..4f4fa90b 100644 --- a/vendor/poetry-core/poetry/core/_vendor/lark/grammar.py +++ b/vendor/lark/lark/grammar.py @@ -1,13 +1,18 @@ +from typing import Optional, Tuple, ClassVar + from .utils import Serialize ###{standalone +TOKEN_DEFAULT_PRIORITY = 0 + class Symbol(Serialize): __slots__ = ('name',) - is_term = NotImplemented + name: str + is_term: ClassVar[bool] = NotImplemented - def __init__(self, name): + def __init__(self, name: str) -> None: self.name = name def __eq__(self, other): @@ -25,11 +30,14 @@ def __repr__(self): fullrepr = property(__repr__) + def renamed(self, f): + return type(self)(f(self.name)) + class Terminal(Symbol): __serialize_fields__ = 'name', 'filter_out' - is_term = True + is_term: ClassVar[bool] = True def __init__(self, name, filter_out=False): self.name = name @@ -39,19 +47,26 @@ def __init__(self, name, filter_out=False): def fullrepr(self): return '%s(%r, %r)' % (type(self).__name__, self.name, self.filter_out) + def renamed(self, f): + return type(self)(f(self.name), self.filter_out) class NonTerminal(Symbol): __serialize_fields__ = 'name', - is_term = False - + is_term: ClassVar[bool] = False class RuleOptions(Serialize): __serialize_fields__ = 'keep_all_tokens', 'expand1', 'priority', 'template_source', 'empty_indices' - def __init__(self, keep_all_tokens=False, expand1=False, priority=None, template_source=None, empty_indices=()): + keep_all_tokens: bool + expand1: bool + priority: Optional[int] + template_source: Optional[str] + empty_indices: Tuple[bool, ...] + + def __init__(self, keep_all_tokens: bool=False, expand1: bool=False, priority: Optional[int]=None, template_source: Optional[str]=None, empty_indices: Tuple[bool, ...]=()) -> None: self.keep_all_tokens = keep_all_tokens self.expand1 = expand1 self.priority = priority @@ -104,5 +119,4 @@ def __eq__(self, other): return self.origin == other.origin and self.expansion == other.expansion - ###} diff --git a/vendor/poetry-core/poetry/core/masonry/utils/__init__.py b/vendor/lark/lark/grammars/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/masonry/utils/__init__.py rename to vendor/lark/lark/grammars/__init__.py diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/grammars/common.lark b/vendor/lark/lark/grammars/common.lark similarity index 76% rename from vendor/poetry-core/poetry/core/_vendor/lark/grammars/common.lark rename to vendor/lark/lark/grammars/common.lark index a675ca41..d2e86d17 100644 --- a/vendor/poetry-core/poetry/core/_vendor/lark/grammars/common.lark +++ b/vendor/lark/lark/grammars/common.lark @@ -1,3 +1,6 @@ +// Basic terminals for common use + + // // Numbers // @@ -21,7 +24,7 @@ SIGNED_NUMBER: ["+"|"-"] NUMBER // Strings // _STRING_INNER: /.*?/ -_STRING_ESC_INNER: _STRING_INNER /(? ignore + | "%import" import_path ["->" name] -> import + | "%import" import_path name_list -> multi_import + | "%override" rule -> override_rule + | "%declare" name+ -> declare + +!import_path: "."? name ("." name)* +name_list: "(" name ("," name)* ")" + +?expansions: alias (_VBAR alias)* + +?alias: expansion ["->" RULE] + +?expansion: expr* + +?expr: atom [OP | "~" NUMBER [".." NUMBER]] + +?atom: "(" expansions ")" + | "[" expansions "]" -> maybe + | value + +?value: STRING ".." STRING -> literal_range + | name + | (REGEXP | STRING) -> literal + | name "{" value ("," value)* "}" -> template_usage + +name: RULE + | TOKEN + +_VBAR: _NL? "|" +OP: /[+*]|[?](?![a-z])/ +RULE: /!?[_?]?[a-z][_a-z0-9]*/ +TOKEN: /_?[A-Z][_A-Z0-9]*/ +STRING: _STRING "i"? +REGEXP: /\/(?!\/)(\\\/|\\\\|[^\/])*?\/[imslux]*/ +_NL: /(\r?\n)+\s*/ + +%import common.ESCAPED_STRING -> _STRING +%import common.SIGNED_INT -> NUMBER +%import common.WS_INLINE + +COMMENT: /\s*/ "//" /[^\n]/* + +%ignore WS_INLINE +%ignore COMMENT diff --git a/vendor/lark/lark/grammars/python.lark b/vendor/lark/lark/grammars/python.lark new file mode 100644 index 00000000..4ac80b0b --- /dev/null +++ b/vendor/lark/lark/grammars/python.lark @@ -0,0 +1,253 @@ +// Python 3 grammar for Lark + +// This grammar should parse all python 3.x code successfully. + +// Adapted from: https://docs.python.org/3/reference/grammar.html + +// Start symbols for the grammar: +// single_input is a single interactive statement; +// file_input is a module or sequence of commands read from an input file; +// eval_input is the input for the eval() functions. +// NB: compound_stmt in single_input is followed by extra NEWLINE! +// + +single_input: _NEWLINE | simple_stmt | compound_stmt _NEWLINE +file_input: (_NEWLINE | stmt)* +eval_input: testlist _NEWLINE* + +decorator: "@" dotted_name [ "(" [arguments] ")" ] _NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) + +async_funcdef: "async" funcdef +funcdef: "def" NAME "(" [parameters] ")" ["->" test] ":" suite + +parameters: paramvalue ("," paramvalue)* ["," SLASH ("," paramvalue)*] ["," [starparams | kwparams]] + | starparams + | kwparams + +SLASH: "/" // Otherwise the it will completely disappear and it will be undisguisable in the result +starparams: (starparam | starguard) poststarparams +starparam: "*" typedparam +starguard: "*" +poststarparams: ("," paramvalue)* ["," kwparams] +kwparams: "**" typedparam ","? + +?paramvalue: typedparam ("=" test)? +?typedparam: NAME (":" test)? + + +lambdef: "lambda" [lambda_params] ":" test +lambdef_nocond: "lambda" [lambda_params] ":" test_nocond +lambda_params: lambda_paramvalue ("," lambda_paramvalue)* ["," [lambda_starparams | lambda_kwparams]] + | lambda_starparams + | lambda_kwparams +?lambda_paramvalue: NAME ("=" test)? +lambda_starparams: "*" [NAME] ("," lambda_paramvalue)* ["," [lambda_kwparams]] +lambda_kwparams: "**" NAME ","? + + +?stmt: simple_stmt | compound_stmt +?simple_stmt: small_stmt (";" small_stmt)* [";"] _NEWLINE +?small_stmt: (expr_stmt | assign_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +expr_stmt: testlist_star_expr +assign_stmt: annassign | augassign | assign + +annassign: testlist_star_expr ":" test ["=" test] +assign: testlist_star_expr ("=" (yield_expr|testlist_star_expr))+ +augassign: testlist_star_expr augassign_op (yield_expr|testlist) +!augassign_op: "+=" | "-=" | "*=" | "@=" | "/=" | "%=" | "&=" | "|=" | "^=" | "<<=" | ">>=" | "**=" | "//=" +?testlist_star_expr: test_or_star_expr + | test_or_star_expr ("," test_or_star_expr)+ ","? -> tuple + | test_or_star_expr "," -> tuple + +// For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: "del" exprlist +pass_stmt: "pass" +?flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: "break" +continue_stmt: "continue" +return_stmt: "return" [testlist] +yield_stmt: yield_expr +raise_stmt: "raise" [test ["from" test]] +import_stmt: import_name | import_from +import_name: "import" dotted_as_names +// note below: the ("." | "...") is necessary because "..." is tokenized as ELLIPSIS +import_from: "from" (dots? dotted_name | dots) "import" ("*" | "(" import_as_names ")" | import_as_names) +!dots: "."+ +import_as_name: NAME ["as" NAME] +dotted_as_name: dotted_name ["as" NAME] +import_as_names: import_as_name ("," import_as_name)* [","] +dotted_as_names: dotted_as_name ("," dotted_as_name)* +dotted_name: NAME ("." NAME)* +global_stmt: "global" NAME ("," NAME)* +nonlocal_stmt: "nonlocal" NAME ("," NAME)* +assert_stmt: "assert" test ["," test] + +?compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: "async" (funcdef | with_stmt | for_stmt) +if_stmt: "if" test ":" suite elifs ["else" ":" suite] +elifs: elif_* +elif_: "elif" test ":" suite +while_stmt: "while" test ":" suite ["else" ":" suite] +for_stmt: "for" exprlist "in" testlist ":" suite ["else" ":" suite] +try_stmt: "try" ":" suite except_clauses ["else" ":" suite] [finally] + | "try" ":" suite finally -> try_finally +finally: "finally" ":" suite +except_clauses: except_clause+ +except_clause: "except" [test ["as" NAME]] ":" suite + +with_stmt: "with" with_items ":" suite +with_items: with_item ("," with_item)* +with_item: test ["as" expr] +// NB compile.c makes sure that the default except clause is last +suite: simple_stmt | _NEWLINE _INDENT stmt+ _DEDENT + +?test: or_test ("if" or_test "else" test)? + | lambdef + | assign_expr + +assign_expr: NAME ":=" test + +?test_nocond: or_test | lambdef_nocond + +?or_test: and_test ("or" and_test)* +?and_test: not_test_ ("and" not_test_)* +?not_test_: "not" not_test_ -> not_test + | comparison +?comparison: expr (comp_op expr)* +star_expr: "*" expr + +?expr: or_expr +?or_expr: xor_expr ("|" xor_expr)* +?xor_expr: and_expr ("^" and_expr)* +?and_expr: shift_expr ("&" shift_expr)* +?shift_expr: arith_expr (_shift_op arith_expr)* +?arith_expr: term (_add_op term)* +?term: factor (_mul_op factor)* +?factor: _unary_op factor | power + +!_unary_op: "+"|"-"|"~" +!_add_op: "+"|"-" +!_shift_op: "<<"|">>" +!_mul_op: "*"|"@"|"/"|"%"|"//" +// <> isn't actually a valid comparison operator in Python. It's here for the +// sake of a __future__ import described in PEP 401 (which really works :-) +!comp_op: "<"|">"|"=="|">="|"<="|"<>"|"!="|"in"|"not" "in"|"is"|"is" "not" + +?power: await_expr ("**" factor)? +?await_expr: AWAIT? atom_expr +AWAIT: "await" + +?atom_expr: atom_expr "(" [arguments] ")" -> funccall + | atom_expr "[" subscriptlist "]" -> getitem + | atom_expr "." NAME -> getattr + | atom + +?atom: "(" yield_expr ")" + | "(" _tuple_inner? ")" -> tuple + | "(" comprehension{test_or_star_expr} ")" -> tuple_comprehension + | "[" _testlist_comp? "]" -> list + | "[" comprehension{test_or_star_expr} "]" -> list_comprehension + | "{" _dict_exprlist? "}" -> dict + | "{" comprehension{key_value} "}" -> dict_comprehension + | "{" _set_exprlist "}" -> set + | "{" comprehension{test} "}" -> set_comprehension + | NAME -> var + | number + | string_concat + | "(" test ")" + | "..." -> ellipsis + | "None" -> const_none + | "True" -> const_true + | "False" -> const_false + + +?string_concat: string+ + +_testlist_comp: test | _tuple_inner +_tuple_inner: test_or_star_expr (("," test_or_star_expr)+ [","] | ",") + + +?test_or_star_expr: test + | star_expr + +?subscriptlist: subscript + | subscript (("," subscript)+ [","] | ",") -> subscript_tuple +?subscript: test | ([test] ":" [test] [sliceop]) -> slice +sliceop: ":" [test] +?exprlist: (expr|star_expr) + | (expr|star_expr) (("," (expr|star_expr))+ [","]|",") +?testlist: test | testlist_tuple +testlist_tuple: test (("," test)+ [","] | ",") +_dict_exprlist: (key_value | "**" expr) ("," (key_value | "**" expr))* [","] + +key_value: test ":" test + +_set_exprlist: test_or_star_expr ("," test_or_star_expr)* [","] + +classdef: "class" NAME ["(" [arguments] ")"] ":" suite + + + +arguments: argvalue ("," argvalue)* ("," [ starargs | kwargs])? + | starargs + | kwargs + | comprehension{test} + +starargs: stararg ("," stararg)* ("," argvalue)* ["," kwargs] +stararg: "*" test +kwargs: "**" test ("," argvalue)* + +?argvalue: test ("=" test)? + + +comprehension{comp_result}: comp_result comp_fors [comp_if] +comp_fors: comp_for+ +comp_for: [ASYNC] "for" exprlist "in" or_test +ASYNC: "async" +?comp_if: "if" test_nocond + +// not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: "yield" [testlist] + | "yield" "from" test -> yield_from + +number: DEC_NUMBER | HEX_NUMBER | BIN_NUMBER | OCT_NUMBER | FLOAT_NUMBER | IMAG_NUMBER +string: STRING | LONG_STRING + +// Other terminals + +_NEWLINE: ( /\r?\n[\t ]*/ | COMMENT )+ + +%ignore /[\t \f]+/ // WS +%ignore /\\[\t \f]*\r?\n/ // LINE_CONT +%ignore COMMENT +%declare _INDENT _DEDENT + + +// Python terminals + +NAME: /[^\W\d]\w*/ +COMMENT: /#[^\n]*/ + +STRING: /([ubf]?r?|r[ubf])("(?!"").*?(? None: + self.paren_level = 0 + self.indent_level = [0] + assert self.tab_len > 0 + + def handle_NL(self, token: Token) -> Iterator[Token]: + if self.paren_level > 0: + return + + yield token + + indent_str = token.rsplit('\n', 1)[1] # Tabs and spaces + indent = indent_str.count(' ') + indent_str.count('\t') * self.tab_len + + if indent > self.indent_level[-1]: + self.indent_level.append(indent) + yield Token.new_borrow_pos(self.INDENT_type, indent_str, token) + else: + while indent < self.indent_level[-1]: + self.indent_level.pop() + yield Token.new_borrow_pos(self.DEDENT_type, indent_str, token) + + if indent != self.indent_level[-1]: + raise DedentError('Unexpected dedent to column %s. Expected dedent to %s' % (indent, self.indent_level[-1])) + + def _process(self, stream): + for token in stream: + if token.type == self.NL_type: + yield from self.handle_NL(token) + else: + yield token + + if token.type in self.OPEN_PAREN_types: + self.paren_level += 1 + elif token.type in self.CLOSE_PAREN_types: + self.paren_level -= 1 + assert self.paren_level >= 0 + + while len(self.indent_level) > 1: + self.indent_level.pop() + yield Token(self.DEDENT_type, '') + + assert self.indent_level == [0], self.indent_level + + def process(self, stream): + self.paren_level = 0 + self.indent_level = [0] + return self._process(stream) + + # XXX Hack for ContextualLexer. Maybe there's a more elegant solution? + @property + def always_accept(self): + return (self.NL_type,) + + @property + @abstractmethod + def NL_type(self) -> str: + raise NotImplementedError() + + @property + @abstractmethod + def OPEN_PAREN_types(self) -> List[str]: + raise NotImplementedError() + + @property + @abstractmethod + def CLOSE_PAREN_types(self) -> List[str]: + raise NotImplementedError() + + @property + @abstractmethod + def INDENT_type(self) -> str: + raise NotImplementedError() + + @property + @abstractmethod + def DEDENT_type(self) -> str: + raise NotImplementedError() + + @property + @abstractmethod + def tab_len(self) -> int: + raise NotImplementedError() + + +class PythonIndenter(Indenter): + NL_type = '_NEWLINE' + OPEN_PAREN_types = ['LPAR', 'LSQB', 'LBRACE'] + CLOSE_PAREN_types = ['RPAR', 'RSQB', 'RBRACE'] + INDENT_type = '_INDENT' + DEDENT_type = '_DEDENT' + tab_len = 8 + +###} diff --git a/vendor/lark/lark/lark.py b/vendor/lark/lark/lark.py new file mode 100644 index 00000000..afebdd14 --- /dev/null +++ b/vendor/lark/lark/lark.py @@ -0,0 +1,628 @@ +from abc import ABC, abstractmethod +import sys, os, pickle, hashlib +import tempfile +from typing import ( + TypeVar, Type, List, Dict, Iterator, Callable, Union, Optional, Sequence, + Tuple, Iterable, IO, Any, TYPE_CHECKING, Collection +) +if TYPE_CHECKING: + from .parsers.lalr_interactive_parser import InteractiveParser + from .tree import ParseTree + from .visitors import Transformer + if sys.version_info >= (3, 8): + from typing import Literal + else: + from typing_extensions import Literal + +from .exceptions import ConfigurationError, assert_config, UnexpectedInput +from .utils import Serialize, SerializeMemoizer, FS, isascii, logger +from .load_grammar import load_grammar, FromPackageLoader, Grammar, verify_used_files, PackageResource +from .tree import Tree +from .common import LexerConf, ParserConf, _ParserArgType, _LexerArgType + +from .lexer import Lexer, BasicLexer, TerminalDef, LexerThread, Token +from .parse_tree_builder import ParseTreeBuilder +from .parser_frontends import _validate_frontend_args, _get_lexer_callbacks, _deserialize_parsing_frontend, _construct_parsing_frontend +from .grammar import Rule + +import re +try: + import regex # type: ignore +except ImportError: + regex = None + + +###{standalone + + +class PostLex(ABC): + @abstractmethod + def process(self, stream: Iterator[Token]) -> Iterator[Token]: + return stream + + always_accept: Iterable[str] = () + +class LarkOptions(Serialize): + """Specifies the options for Lark + + """ + + start: List[str] + debug: bool + transformer: 'Optional[Transformer]' + propagate_positions: Union[bool, str] + maybe_placeholders: bool + cache: Union[bool, str] + regex: bool + g_regex_flags: int + keep_all_tokens: bool + tree_class: Any + parser: _ParserArgType + lexer: _LexerArgType + ambiguity: 'Literal["auto", "resolve", "explicit", "forest"]' + postlex: Optional[PostLex] + priority: 'Optional[Literal["auto", "normal", "invert"]]' + lexer_callbacks: Dict[str, Callable[[Token], Token]] + use_bytes: bool + edit_terminals: Optional[Callable[[TerminalDef], TerminalDef]] + import_paths: 'List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]' + source_path: Optional[str] + + OPTIONS_DOC = """ + **=== General Options ===** + + start + The start symbol. Either a string, or a list of strings for multiple possible starts (Default: "start") + debug + Display debug information and extra warnings. Use only when debugging (Default: ``False``) + When used with Earley, it generates a forest graph as "sppf.png", if 'dot' is installed. + transformer + Applies the transformer to every parse tree (equivalent to applying it after the parse, but faster) + propagate_positions + Propagates (line, column, end_line, end_column) attributes into all tree branches. + Accepts ``False``, ``True``, or a callable, which will filter which nodes to ignore when propagating. + maybe_placeholders + When ``True``, the ``[]`` operator returns ``None`` when not matched. + When ``False``, ``[]`` behaves like the ``?`` operator, and returns no value at all. + (default= ``True``) + cache + Cache the results of the Lark grammar analysis, for x2 to x3 faster loading. LALR only for now. + + - When ``False``, does nothing (default) + - When ``True``, caches to a temporary file in the local directory + - When given a string, caches to the path pointed by the string + regex + When True, uses the ``regex`` module instead of the stdlib ``re``. + g_regex_flags + Flags that are applied to all terminals (both regex and strings) + keep_all_tokens + Prevent the tree builder from automagically removing "punctuation" tokens (Default: ``False``) + tree_class + Lark will produce trees comprised of instances of this class instead of the default ``lark.Tree``. + + **=== Algorithm Options ===** + + parser + Decides which parser engine to use. Accepts "earley" or "lalr". (Default: "earley"). + (there is also a "cyk" option for legacy) + lexer + Decides whether or not to use a lexer stage + + - "auto" (default): Choose for me based on the parser + - "basic": Use a basic lexer + - "contextual": Stronger lexer (only works with parser="lalr") + - "dynamic": Flexible and powerful (only with parser="earley") + - "dynamic_complete": Same as dynamic, but tries *every* variation of tokenizing possible. + ambiguity + Decides how to handle ambiguity in the parse. Only relevant if parser="earley" + + - "resolve": The parser will automatically choose the simplest derivation + (it chooses consistently: greedy for tokens, non-greedy for rules) + - "explicit": The parser will return all derivations wrapped in "_ambig" tree nodes (i.e. a forest). + - "forest": The parser will return the root of the shared packed parse forest. + + **=== Misc. / Domain Specific Options ===** + + postlex + Lexer post-processing (Default: ``None``) Only works with the basic and contextual lexers. + priority + How priorities should be evaluated - "auto", ``None``, "normal", "invert" (Default: "auto") + lexer_callbacks + Dictionary of callbacks for the lexer. May alter tokens during lexing. Use with caution. + use_bytes + Accept an input of type ``bytes`` instead of ``str``. + edit_terminals + A callback for editing the terminals before parse. + import_paths + A List of either paths or loader functions to specify from where grammars are imported + source_path + Override the source of from where the grammar was loaded. Useful for relative imports and unconventional grammar loading + **=== End of Options ===** + """ + if __doc__: + __doc__ += OPTIONS_DOC + + + # Adding a new option needs to be done in multiple places: + # - In the dictionary below. This is the primary truth of which options `Lark.__init__` accepts + # - In the docstring above. It is used both for the docstring of `LarkOptions` and `Lark`, and in readthedocs + # - As an attribute of `LarkOptions` above + # - Potentially in `_LOAD_ALLOWED_OPTIONS` below this class, when the option doesn't change how the grammar is loaded + # - Potentially in `lark.tools.__init__`, if it makes sense, and it can easily be passed as a cmd argument + _defaults: Dict[str, Any] = { + 'debug': False, + 'keep_all_tokens': False, + 'tree_class': None, + 'cache': False, + 'postlex': None, + 'parser': 'earley', + 'lexer': 'auto', + 'transformer': None, + 'start': 'start', + 'priority': 'auto', + 'ambiguity': 'auto', + 'regex': False, + 'propagate_positions': False, + 'lexer_callbacks': {}, + 'maybe_placeholders': True, + 'edit_terminals': None, + 'g_regex_flags': 0, + 'use_bytes': False, + 'import_paths': [], + 'source_path': None, + '_plugins': {}, + } + + def __init__(self, options_dict): + o = dict(options_dict) + + options = {} + for name, default in self._defaults.items(): + if name in o: + value = o.pop(name) + if isinstance(default, bool) and name not in ('cache', 'use_bytes', 'propagate_positions'): + value = bool(value) + else: + value = default + + options[name] = value + + if isinstance(options['start'], str): + options['start'] = [options['start']] + + self.__dict__['options'] = options + + + assert_config(self.parser, ('earley', 'lalr', 'cyk', None)) + + if self.parser == 'earley' and self.transformer: + raise ConfigurationError('Cannot specify an embedded transformer when using the Earley algorithm. ' + 'Please use your transformer on the resulting parse tree, or use a different algorithm (i.e. LALR)') + + if o: + raise ConfigurationError("Unknown options: %s" % o.keys()) + + def __getattr__(self, name): + try: + return self.__dict__['options'][name] + except KeyError as e: + raise AttributeError(e) + + def __setattr__(self, name, value): + assert_config(name, self.options.keys(), "%r isn't a valid option. Expected one of: %s") + self.options[name] = value + + def serialize(self, memo): + return self.options + + @classmethod + def deserialize(cls, data, memo): + return cls(data) + + +# Options that can be passed to the Lark parser, even when it was loaded from cache/standalone. +# These options are only used outside of `load_grammar`. +_LOAD_ALLOWED_OPTIONS = {'postlex', 'transformer', 'lexer_callbacks', 'use_bytes', 'debug', 'g_regex_flags', 'regex', 'propagate_positions', 'tree_class', '_plugins'} + +_VALID_PRIORITY_OPTIONS = ('auto', 'normal', 'invert', None) +_VALID_AMBIGUITY_OPTIONS = ('auto', 'resolve', 'explicit', 'forest') + + +_T = TypeVar('_T', bound="Lark") + +class Lark(Serialize): + """Main interface for the library. + + It's mostly a thin wrapper for the many different parsers, and for the tree constructor. + + Parameters: + grammar: a string or file-object containing the grammar spec (using Lark's ebnf syntax) + options: a dictionary controlling various aspects of Lark. + + Example: + >>> Lark(r'''start: "foo" ''') + Lark(...) + """ + + source_path: str + source_grammar: str + grammar: 'Grammar' + options: LarkOptions + lexer: Lexer + terminals: List[TerminalDef] + + def __init__(self, grammar: 'Union[Grammar, str, IO[str]]', **options) -> None: + self.options = LarkOptions(options) + + # Set regex or re module + use_regex = self.options.regex + if use_regex: + if regex: + re_module = regex + else: + raise ImportError('`regex` module must be installed if calling `Lark(regex=True)`.') + else: + re_module = re + + # Some, but not all file-like objects have a 'name' attribute + if self.options.source_path is None: + try: + self.source_path = grammar.name + except AttributeError: + self.source_path = '' + else: + self.source_path = self.options.source_path + + # Drain file-like objects to get their contents + try: + read = grammar.read + except AttributeError: + pass + else: + grammar = read() + + cache_fn = None + cache_md5 = None + if isinstance(grammar, str): + self.source_grammar = grammar + if self.options.use_bytes: + if not isascii(grammar): + raise ConfigurationError("Grammar must be ascii only, when use_bytes=True") + + if self.options.cache: + if self.options.parser != 'lalr': + raise ConfigurationError("cache only works with parser='lalr' for now") + + unhashable = ('transformer', 'postlex', 'lexer_callbacks', 'edit_terminals', '_plugins') + options_str = ''.join(k+str(v) for k, v in options.items() if k not in unhashable) + from . import __version__ + s = grammar + options_str + __version__ + str(sys.version_info[:2]) + cache_md5 = hashlib.md5(s.encode('utf8')).hexdigest() + + if isinstance(self.options.cache, str): + cache_fn = self.options.cache + else: + if self.options.cache is not True: + raise ConfigurationError("cache argument must be bool or str") + + cache_fn = tempfile.gettempdir() + '/.lark_cache_%s_%s_%s.tmp' % (cache_md5, *sys.version_info[:2]) + + if FS.exists(cache_fn): + logger.debug('Loading grammar from cache: %s', cache_fn) + # Remove options that aren't relevant for loading from cache + for name in (set(options) - _LOAD_ALLOWED_OPTIONS): + del options[name] + with FS.open(cache_fn, 'rb') as f: + old_options = self.options + try: + file_md5 = f.readline().rstrip(b'\n') + cached_used_files = pickle.load(f) + if file_md5 == cache_md5.encode('utf8') and verify_used_files(cached_used_files): + cached_parser_data = pickle.load(f) + self._load(cached_parser_data, **options) + return + except Exception: # We should probably narrow done which errors we catch here. + logger.exception("Failed to load Lark from cache: %r. We will try to carry on." % cache_fn) + + # In theory, the Lark instance might have been messed up by the call to `_load`. + # In practice the only relevant thing that might have been overriden should be `options` + self.options = old_options + + + # Parse the grammar file and compose the grammars + self.grammar, used_files = load_grammar(grammar, self.source_path, self.options.import_paths, self.options.keep_all_tokens) + else: + assert isinstance(grammar, Grammar) + self.grammar = grammar + + + if self.options.lexer == 'auto': + if self.options.parser == 'lalr': + self.options.lexer = 'contextual' + elif self.options.parser == 'earley': + if self.options.postlex is not None: + logger.info("postlex can't be used with the dynamic lexer, so we use 'basic' instead. " + "Consider using lalr with contextual instead of earley") + self.options.lexer = 'basic' + else: + self.options.lexer = 'dynamic' + elif self.options.parser == 'cyk': + self.options.lexer = 'basic' + else: + assert False, self.options.parser + lexer = self.options.lexer + if isinstance(lexer, type): + assert issubclass(lexer, Lexer) # XXX Is this really important? Maybe just ensure interface compliance + else: + assert_config(lexer, ('basic', 'contextual', 'dynamic', 'dynamic_complete')) + if self.options.postlex is not None and 'dynamic' in lexer: + raise ConfigurationError("Can't use postlex with a dynamic lexer. Use basic or contextual instead") + + if self.options.ambiguity == 'auto': + if self.options.parser == 'earley': + self.options.ambiguity = 'resolve' + else: + assert_config(self.options.parser, ('earley', 'cyk'), "%r doesn't support disambiguation. Use one of these parsers instead: %s") + + if self.options.priority == 'auto': + self.options.priority = 'normal' + + if self.options.priority not in _VALID_PRIORITY_OPTIONS: + raise ConfigurationError("invalid priority option: %r. Must be one of %r" % (self.options.priority, _VALID_PRIORITY_OPTIONS)) + if self.options.ambiguity not in _VALID_AMBIGUITY_OPTIONS: + raise ConfigurationError("invalid ambiguity option: %r. Must be one of %r" % (self.options.ambiguity, _VALID_AMBIGUITY_OPTIONS)) + + if self.options.parser is None: + terminals_to_keep = '*' + elif self.options.postlex is not None: + terminals_to_keep = set(self.options.postlex.always_accept) + else: + terminals_to_keep = set() + + # Compile the EBNF grammar into BNF + self.terminals, self.rules, self.ignore_tokens = self.grammar.compile(self.options.start, terminals_to_keep) + + if self.options.edit_terminals: + for t in self.terminals: + self.options.edit_terminals(t) + + self._terminals_dict = {t.name: t for t in self.terminals} + + # If the user asked to invert the priorities, negate them all here. + if self.options.priority == 'invert': + for rule in self.rules: + if rule.options.priority is not None: + rule.options.priority = -rule.options.priority + for term in self.terminals: + term.priority = -term.priority + # Else, if the user asked to disable priorities, strip them from the + # rules and terminals. This allows the Earley parsers to skip an extra forest walk + # for improved performance, if you don't need them (or didn't specify any). + elif self.options.priority is None: + for rule in self.rules: + if rule.options.priority is not None: + rule.options.priority = None + for term in self.terminals: + term.priority = 0 + + # TODO Deprecate lexer_callbacks? + self.lexer_conf = LexerConf( + self.terminals, re_module, self.ignore_tokens, self.options.postlex, + self.options.lexer_callbacks, self.options.g_regex_flags, use_bytes=self.options.use_bytes + ) + + if self.options.parser: + self.parser = self._build_parser() + elif lexer: + self.lexer = self._build_lexer() + + if cache_fn: + logger.debug('Saving grammar to cache: %s', cache_fn) + with FS.open(cache_fn, 'wb') as f: + assert cache_md5 is not None + f.write(cache_md5.encode('utf8') + b'\n') + pickle.dump(used_files, f) + self.save(f, _LOAD_ALLOWED_OPTIONS) + + if __doc__: + __doc__ += "\n\n" + LarkOptions.OPTIONS_DOC + + __serialize_fields__ = 'parser', 'rules', 'options' + + def _build_lexer(self, dont_ignore=False): + lexer_conf = self.lexer_conf + if dont_ignore: + from copy import copy + lexer_conf = copy(lexer_conf) + lexer_conf.ignore = () + return BasicLexer(lexer_conf) + + def _prepare_callbacks(self): + self._callbacks = {} + # we don't need these callbacks if we aren't building a tree + if self.options.ambiguity != 'forest': + self._parse_tree_builder = ParseTreeBuilder( + self.rules, + self.options.tree_class or Tree, + self.options.propagate_positions, + self.options.parser != 'lalr' and self.options.ambiguity == 'explicit', + self.options.maybe_placeholders + ) + self._callbacks = self._parse_tree_builder.create_callback(self.options.transformer) + self._callbacks.update(_get_lexer_callbacks(self.options.transformer, self.terminals)) + + def _build_parser(self): + self._prepare_callbacks() + _validate_frontend_args(self.options.parser, self.options.lexer) + parser_conf = ParserConf(self.rules, self._callbacks, self.options.start) + return _construct_parsing_frontend( + self.options.parser, + self.options.lexer, + self.lexer_conf, + parser_conf, + options=self.options + ) + + def save(self, f, exclude_options: Collection[str] = ()): + """Saves the instance into the given file object + + Useful for caching and multiprocessing. + """ + data, m = self.memo_serialize([TerminalDef, Rule]) + if exclude_options: + data["options"] = {n: v for n, v in data["options"].items() if n not in exclude_options} + pickle.dump({'data': data, 'memo': m}, f, protocol=pickle.HIGHEST_PROTOCOL) + + @classmethod + def load(cls, f): + """Loads an instance from the given file object + + Useful for caching and multiprocessing. + """ + inst = cls.__new__(cls) + return inst._load(f) + + def _deserialize_lexer_conf(self, data, memo, options): + lexer_conf = LexerConf.deserialize(data['lexer_conf'], memo) + lexer_conf.callbacks = options.lexer_callbacks or {} + lexer_conf.re_module = regex if options.regex else re + lexer_conf.use_bytes = options.use_bytes + lexer_conf.g_regex_flags = options.g_regex_flags + lexer_conf.skip_validation = True + lexer_conf.postlex = options.postlex + return lexer_conf + + def _load(self, f, **kwargs): + if isinstance(f, dict): + d = f + else: + d = pickle.load(f) + memo_json = d['memo'] + data = d['data'] + + assert memo_json + memo = SerializeMemoizer.deserialize(memo_json, {'Rule': Rule, 'TerminalDef': TerminalDef}, {}) + options = dict(data['options']) + if (set(kwargs) - _LOAD_ALLOWED_OPTIONS) & set(LarkOptions._defaults): + raise ConfigurationError("Some options are not allowed when loading a Parser: {}" + .format(set(kwargs) - _LOAD_ALLOWED_OPTIONS)) + options.update(kwargs) + self.options = LarkOptions.deserialize(options, memo) + self.rules = [Rule.deserialize(r, memo) for r in data['rules']] + self.source_path = '' + _validate_frontend_args(self.options.parser, self.options.lexer) + self.lexer_conf = self._deserialize_lexer_conf(data['parser'], memo, self.options) + self.terminals = self.lexer_conf.terminals + self._prepare_callbacks() + self._terminals_dict = {t.name: t for t in self.terminals} + self.parser = _deserialize_parsing_frontend( + data['parser'], + memo, + self.lexer_conf, + self._callbacks, + self.options, # Not all, but multiple attributes are used + ) + return self + + @classmethod + def _load_from_dict(cls, data, memo, **kwargs): + inst = cls.__new__(cls) + return inst._load({'data': data, 'memo': memo}, **kwargs) + + @classmethod + def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str]=None, **options) -> _T: + """Create an instance of Lark with the grammar given by its filename + + If ``rel_to`` is provided, the function will find the grammar filename in relation to it. + + Example: + + >>> Lark.open("grammar_file.lark", rel_to=__file__, parser="lalr") + Lark(...) + + """ + if rel_to: + basepath = os.path.dirname(rel_to) + grammar_filename = os.path.join(basepath, grammar_filename) + with open(grammar_filename, encoding='utf8') as f: + return cls(f, **options) + + @classmethod + def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: 'Sequence[str]'=[""], **options) -> _T: + """Create an instance of Lark with the grammar loaded from within the package `package`. + This allows grammar loading from zipapps. + + Imports in the grammar will use the `package` and `search_paths` provided, through `FromPackageLoader` + + Example: + + Lark.open_from_package(__name__, "example.lark", ("grammars",), parser=...) + """ + package_loader = FromPackageLoader(package, search_paths) + full_path, text = package_loader(None, grammar_path) + options.setdefault('source_path', full_path) + options.setdefault('import_paths', []) + options['import_paths'].append(package_loader) + return cls(text, **options) + + def __repr__(self): + return 'Lark(open(%r), parser=%r, lexer=%r, ...)' % (self.source_path, self.options.parser, self.options.lexer) + + + def lex(self, text: str, dont_ignore: bool=False) -> Iterator[Token]: + """Only lex (and postlex) the text, without parsing it. Only relevant when lexer='basic' + + When dont_ignore=True, the lexer will return all tokens, even those marked for %ignore. + + :raises UnexpectedCharacters: In case the lexer cannot find a suitable match. + """ + if not hasattr(self, 'lexer') or dont_ignore: + lexer = self._build_lexer(dont_ignore) + else: + lexer = self.lexer + lexer_thread = LexerThread.from_text(lexer, text) + stream = lexer_thread.lex(None) + if self.options.postlex: + return self.options.postlex.process(stream) + return stream + + def get_terminal(self, name: str) -> TerminalDef: + """Get information about a terminal""" + return self._terminals_dict[name] + + def parse_interactive(self, text: Optional[str]=None, start: Optional[str]=None) -> 'InteractiveParser': + """Start an interactive parsing session. + + Parameters: + text (str, optional): Text to be parsed. Required for ``resume_parse()``. + start (str, optional): Start symbol + + Returns: + A new InteractiveParser instance. + + See Also: ``Lark.parse()`` + """ + return self.parser.parse_interactive(text, start=start) + + def parse(self, text: str, start: Optional[str]=None, on_error: 'Optional[Callable[[UnexpectedInput], bool]]'=None) -> 'ParseTree': + """Parse the given text, according to the options provided. + + Parameters: + text (str): Text to be parsed. + start (str, optional): Required if Lark was given multiple possible start symbols (using the start option). + on_error (function, optional): if provided, will be called on UnexpectedToken error. Return true to resume parsing. + LALR only. See examples/advanced/error_handling.py for an example of how to use on_error. + + Returns: + If a transformer is supplied to ``__init__``, returns whatever is the + result of the transformation. Otherwise, returns a Tree instance. + + :raises UnexpectedInput: On a parse error, one of these sub-exceptions will rise: + ``UnexpectedCharacters``, ``UnexpectedToken``, or ``UnexpectedEOF``. + For convenience, these sub-exceptions also inherit from ``ParserError`` and ``LexerError``. + + """ + return self.parser.parse(text, start=start, on_error=on_error) + + +###} diff --git a/vendor/lark/lark/lexer.py b/vendor/lark/lark/lexer.py new file mode 100644 index 00000000..ec71a12d --- /dev/null +++ b/vendor/lark/lark/lexer.py @@ -0,0 +1,541 @@ +# Lexer Implementation + +from abc import abstractmethod, ABC +import re +from contextlib import suppress +from typing import ( + TypeVar, Type, List, Dict, Iterator, Collection, Callable, Optional, FrozenSet, Any, + Pattern as REPattern, ClassVar, TYPE_CHECKING +) +from types import ModuleType +if TYPE_CHECKING: + from .common import LexerConf + +from .utils import classify, get_regexp_width, Serialize +from .exceptions import UnexpectedCharacters, LexError, UnexpectedToken +from .grammar import TOKEN_DEFAULT_PRIORITY + +###{standalone +from copy import copy + + +class Pattern(Serialize, ABC): + + value: str + flags: Collection[str] + raw: Optional[str] + type: ClassVar[str] + + def __init__(self, value: str, flags: Collection[str]=(), raw: Optional[str]=None) -> None: + self.value = value + self.flags = frozenset(flags) + self.raw = raw + + def __repr__(self): + return repr(self.to_regexp()) + + # Pattern Hashing assumes all subclasses have a different priority! + def __hash__(self): + return hash((type(self), self.value, self.flags)) + + def __eq__(self, other): + return type(self) == type(other) and self.value == other.value and self.flags == other.flags + + @abstractmethod + def to_regexp(self) -> str: + raise NotImplementedError() + + @property + @abstractmethod + def min_width(self) -> int: + raise NotImplementedError() + + @property + @abstractmethod + def max_width(self) -> int: + raise NotImplementedError() + + def _get_flags(self, value): + for f in self.flags: + value = ('(?%s:%s)' % (f, value)) + return value + + +class PatternStr(Pattern): + __serialize_fields__ = 'value', 'flags' + + type: ClassVar[str] = "str" + + def to_regexp(self) -> str: + return self._get_flags(re.escape(self.value)) + + @property + def min_width(self) -> int: + return len(self.value) + + @property + def max_width(self) -> int: + return len(self.value) + + +class PatternRE(Pattern): + __serialize_fields__ = 'value', 'flags', '_width' + + type: ClassVar[str] = "re" + + def to_regexp(self) -> str: + return self._get_flags(self.value) + + _width = None + def _get_width(self): + if self._width is None: + self._width = get_regexp_width(self.to_regexp()) + return self._width + + @property + def min_width(self) -> int: + return self._get_width()[0] + + @property + def max_width(self) -> int: + return self._get_width()[1] + + +class TerminalDef(Serialize): + __serialize_fields__ = 'name', 'pattern', 'priority' + __serialize_namespace__ = PatternStr, PatternRE + + name: str + pattern: Pattern + priority: int + + def __init__(self, name: str, pattern: Pattern, priority: int=TOKEN_DEFAULT_PRIORITY) -> None: + assert isinstance(pattern, Pattern), pattern + self.name = name + self.pattern = pattern + self.priority = priority + + def __repr__(self): + return '%s(%r, %r)' % (type(self).__name__, self.name, self.pattern) + + def user_repr(self) -> str: + if self.name.startswith('__'): # We represent a generated terminal + return self.pattern.raw or self.name + else: + return self.name + +_T = TypeVar('_T', bound="Token") + +class Token(str): + """A string with meta-information, that is produced by the lexer. + + When parsing text, the resulting chunks of the input that haven't been discarded, + will end up in the tree as Token instances. The Token class inherits from Python's ``str``, + so normal string comparisons and operations will work as expected. + + Attributes: + type: Name of the token (as specified in grammar) + value: Value of the token (redundant, as ``token.value == token`` will always be true) + start_pos: The index of the token in the text + line: The line of the token in the text (starting with 1) + column: The column of the token in the text (starting with 1) + end_line: The line where the token ends + end_column: The next column after the end of the token. For example, + if the token is a single character with a column value of 4, + end_column will be 5. + end_pos: the index where the token ends (basically ``start_pos + len(token)``) + """ + __slots__ = ('type', 'start_pos', 'value', 'line', 'column', 'end_line', 'end_column', 'end_pos') + + type: str + start_pos: int + value: Any + line: int + column: int + end_line: int + end_column: int + end_pos: int + + def __new__(cls, type_, value, start_pos=None, line=None, column=None, end_line=None, end_column=None, end_pos=None): + inst = super(Token, cls).__new__(cls, value) + inst.type = type_ + inst.start_pos = start_pos + inst.value = value + inst.line = line + inst.column = column + inst.end_line = end_line + inst.end_column = end_column + inst.end_pos = end_pos + return inst + + def update(self, type_: Optional[str]=None, value: Optional[Any]=None) -> 'Token': + return Token.new_borrow_pos( + type_ if type_ is not None else self.type, + value if value is not None else self.value, + self + ) + + @classmethod + def new_borrow_pos(cls: Type[_T], type_: str, value: Any, borrow_t: 'Token') -> _T: + return cls(type_, value, borrow_t.start_pos, borrow_t.line, borrow_t.column, borrow_t.end_line, borrow_t.end_column, borrow_t.end_pos) + + def __reduce__(self): + return (self.__class__, (self.type, self.value, self.start_pos, self.line, self.column)) + + def __repr__(self): + return 'Token(%r, %r)' % (self.type, self.value) + + def __deepcopy__(self, memo): + return Token(self.type, self.value, self.start_pos, self.line, self.column) + + def __eq__(self, other): + if isinstance(other, Token) and self.type != other.type: + return False + + return str.__eq__(self, other) + + __hash__ = str.__hash__ + + +class LineCounter: + __slots__ = 'char_pos', 'line', 'column', 'line_start_pos', 'newline_char' + + def __init__(self, newline_char): + self.newline_char = newline_char + self.char_pos = 0 + self.line = 1 + self.column = 1 + self.line_start_pos = 0 + + def __eq__(self, other): + if not isinstance(other, LineCounter): + return NotImplemented + + return self.char_pos == other.char_pos and self.newline_char == other.newline_char + + def feed(self, token: Token, test_newline=True): + """Consume a token and calculate the new line & column. + + As an optional optimization, set test_newline=False if token doesn't contain a newline. + """ + if test_newline: + newlines = token.count(self.newline_char) + if newlines: + self.line += newlines + self.line_start_pos = self.char_pos + token.rindex(self.newline_char) + 1 + + self.char_pos += len(token) + self.column = self.char_pos - self.line_start_pos + 1 + + +class UnlessCallback: + def __init__(self, scanner): + self.scanner = scanner + + def __call__(self, t): + res = self.scanner.match(t.value, 0) + if res: + _value, t.type = res + return t + + +class CallChain: + def __init__(self, callback1, callback2, cond): + self.callback1 = callback1 + self.callback2 = callback2 + self.cond = cond + + def __call__(self, t): + t2 = self.callback1(t) + return self.callback2(t) if self.cond(t2) else t2 + + +def _get_match(re_, regexp, s, flags): + m = re_.match(regexp, s, flags) + if m: + return m.group(0) + +def _create_unless(terminals, g_regex_flags, re_, use_bytes): + tokens_by_type = classify(terminals, lambda t: type(t.pattern)) + assert len(tokens_by_type) <= 2, tokens_by_type.keys() + embedded_strs = set() + callback = {} + for retok in tokens_by_type.get(PatternRE, []): + unless = [] + for strtok in tokens_by_type.get(PatternStr, []): + if strtok.priority != retok.priority: + continue + s = strtok.pattern.value + if s == _get_match(re_, retok.pattern.to_regexp(), s, g_regex_flags): + unless.append(strtok) + if strtok.pattern.flags <= retok.pattern.flags: + embedded_strs.add(strtok) + if unless: + callback[retok.name] = UnlessCallback(Scanner(unless, g_regex_flags, re_, match_whole=True, use_bytes=use_bytes)) + + new_terminals = [t for t in terminals if t not in embedded_strs] + return new_terminals, callback + + +class Scanner: + def __init__(self, terminals, g_regex_flags, re_, use_bytes, match_whole=False): + self.terminals = terminals + self.g_regex_flags = g_regex_flags + self.re_ = re_ + self.use_bytes = use_bytes + self.match_whole = match_whole + + self.allowed_types = {t.name for t in self.terminals} + + self._mres = self._build_mres(terminals, len(terminals)) + + def _build_mres(self, terminals, max_size): + # Python sets an unreasonable group limit (currently 100) in its re module + # Worse, the only way to know we reached it is by catching an AssertionError! + # This function recursively tries less and less groups until it's successful. + postfix = '$' if self.match_whole else '' + mres = [] + while terminals: + pattern = u'|'.join(u'(?P<%s>%s)' % (t.name, t.pattern.to_regexp() + postfix) for t in terminals[:max_size]) + if self.use_bytes: + pattern = pattern.encode('latin-1') + try: + mre = self.re_.compile(pattern, self.g_regex_flags) + except AssertionError: # Yes, this is what Python provides us.. :/ + return self._build_mres(terminals, max_size//2) + + mres.append((mre, {i: n for n, i in mre.groupindex.items()})) + terminals = terminals[max_size:] + return mres + + def match(self, text, pos): + for mre, type_from_index in self._mres: + m = mre.match(text, pos) + if m: + return m.group(0), type_from_index[m.lastindex] + + +def _regexp_has_newline(r: str): + r"""Expressions that may indicate newlines in a regexp: + - newlines (\n) + - escaped newline (\\n) + - anything but ([^...]) + - any-char (.) when the flag (?s) exists + - spaces (\s) + """ + return '\n' in r or '\\n' in r or '\\s' in r or '[^' in r or ('(?s' in r and '.' in r) + + +class LexerState: + """Represents the current state of the lexer as it scans the text + (Lexer objects are only instanciated per grammar, not per text) + """ + + __slots__ = 'text', 'line_ctr', 'last_token' + + def __init__(self, text, line_ctr=None, last_token=None): + self.text = text + self.line_ctr = line_ctr or LineCounter(b'\n' if isinstance(text, bytes) else '\n') + self.last_token = last_token + + def __eq__(self, other): + if not isinstance(other, LexerState): + return NotImplemented + + return self.text is other.text and self.line_ctr == other.line_ctr and self.last_token == other.last_token + + def __copy__(self): + return type(self)(self.text, copy(self.line_ctr), self.last_token) + + +class LexerThread: + """A thread that ties a lexer instance and a lexer state, to be used by the parser + """ + + def __init__(self, lexer: 'Lexer', lexer_state: LexerState): + self.lexer = lexer + self.state = lexer_state + + @classmethod + def from_text(cls, lexer: 'Lexer', text: str): + return cls(lexer, LexerState(text)) + + def lex(self, parser_state): + return self.lexer.lex(self.state, parser_state) + + def __copy__(self): + return type(self)(self.lexer, copy(self.state)) + + _Token = Token + + +_Callback = Callable[[Token], Token] + +class Lexer(ABC): + """Lexer interface + + Method Signatures: + lex(self, lexer_state, parser_state) -> Iterator[Token] + """ + @abstractmethod + def lex(self, lexer_state: LexerState, parser_state: Any) -> Iterator[Token]: + return NotImplemented + + def make_lexer_state(self, text): + "Deprecated" + return LexerState(text) + + +class BasicLexer(Lexer): + + terminals: Collection[TerminalDef] + ignore_types: FrozenSet[str] + newline_types: FrozenSet[str] + user_callbacks: Dict[str, _Callback] + callback: Dict[str, _Callback] + re: ModuleType + + def __init__(self, conf: 'LexerConf') -> None: + terminals = list(conf.terminals) + assert all(isinstance(t, TerminalDef) for t in terminals), terminals + + self.re = conf.re_module + + if not conf.skip_validation: + # Sanitization + for t in terminals: + try: + self.re.compile(t.pattern.to_regexp(), conf.g_regex_flags) + except self.re.error: + raise LexError("Cannot compile token %s: %s" % (t.name, t.pattern)) + + if t.pattern.min_width == 0: + raise LexError("Lexer does not allow zero-width terminals. (%s: %s)" % (t.name, t.pattern)) + + if not (set(conf.ignore) <= {t.name for t in terminals}): + raise LexError("Ignore terminals are not defined: %s" % (set(conf.ignore) - {t.name for t in terminals})) + + # Init + self.newline_types = frozenset(t.name for t in terminals if _regexp_has_newline(t.pattern.to_regexp())) + self.ignore_types = frozenset(conf.ignore) + + terminals.sort(key=lambda x: (-x.priority, -x.pattern.max_width, -len(x.pattern.value), x.name)) + self.terminals = terminals + self.user_callbacks = conf.callbacks + self.g_regex_flags = conf.g_regex_flags + self.use_bytes = conf.use_bytes + self.terminals_by_name = conf.terminals_by_name + + self._scanner = None + + def _build_scanner(self): + terminals, self.callback = _create_unless(self.terminals, self.g_regex_flags, self.re, self.use_bytes) + assert all(self.callback.values()) + + for type_, f in self.user_callbacks.items(): + if type_ in self.callback: + # Already a callback there, probably UnlessCallback + self.callback[type_] = CallChain(self.callback[type_], f, lambda t: t.type == type_) + else: + self.callback[type_] = f + + self._scanner = Scanner(terminals, self.g_regex_flags, self.re, self.use_bytes) + + @property + def scanner(self): + if self._scanner is None: + self._build_scanner() + return self._scanner + + def match(self, text, pos): + return self.scanner.match(text, pos) + + def lex(self, state: LexerState, parser_state: Any) -> Iterator[Token]: + with suppress(EOFError): + while True: + yield self.next_token(state, parser_state) + + def next_token(self, lex_state: LexerState, parser_state: Any=None) -> Token: + line_ctr = lex_state.line_ctr + while line_ctr.char_pos < len(lex_state.text): + res = self.match(lex_state.text, line_ctr.char_pos) + if not res: + allowed = self.scanner.allowed_types - self.ignore_types + if not allowed: + allowed = {""} + raise UnexpectedCharacters(lex_state.text, line_ctr.char_pos, line_ctr.line, line_ctr.column, + allowed=allowed, token_history=lex_state.last_token and [lex_state.last_token], + state=parser_state, terminals_by_name=self.terminals_by_name) + + value, type_ = res + + if type_ not in self.ignore_types: + t = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column) + line_ctr.feed(value, type_ in self.newline_types) + t.end_line = line_ctr.line + t.end_column = line_ctr.column + t.end_pos = line_ctr.char_pos + if t.type in self.callback: + t = self.callback[t.type](t) + if not isinstance(t, Token): + raise LexError("Callbacks must return a token (returned %r)" % t) + lex_state.last_token = t + return t + else: + if type_ in self.callback: + t2 = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column) + self.callback[type_](t2) + line_ctr.feed(value, type_ in self.newline_types) + + # EOF + raise EOFError(self) + + +class ContextualLexer(Lexer): + + lexers: Dict[str, BasicLexer] + root_lexer: BasicLexer + + def __init__(self, conf: 'LexerConf', states: Dict[str, Collection[str]], always_accept: Collection[str]=()) -> None: + terminals = list(conf.terminals) + terminals_by_name = conf.terminals_by_name + + trad_conf = copy(conf) + trad_conf.terminals = terminals + + lexer_by_tokens: Dict[FrozenSet[str], BasicLexer] = {} + self.lexers = {} + for state, accepts in states.items(): + key = frozenset(accepts) + try: + lexer = lexer_by_tokens[key] + except KeyError: + accepts = set(accepts) | set(conf.ignore) | set(always_accept) + lexer_conf = copy(trad_conf) + lexer_conf.terminals = [terminals_by_name[n] for n in accepts if n in terminals_by_name] + lexer = BasicLexer(lexer_conf) + lexer_by_tokens[key] = lexer + + self.lexers[state] = lexer + + assert trad_conf.terminals is terminals + self.root_lexer = BasicLexer(trad_conf) + + def lex(self, lexer_state: LexerState, parser_state: Any) -> Iterator[Token]: + try: + while True: + lexer = self.lexers[parser_state.position] + yield lexer.next_token(lexer_state, parser_state) + except EOFError: + pass + except UnexpectedCharacters as e: + # In the contextual lexer, UnexpectedCharacters can mean that the terminal is defined, but not in the current context. + # This tests the input against the global context, to provide a nicer error. + try: + last_token = lexer_state.last_token # Save last_token. Calling root_lexer.next_token will change this to the wrong token + token = self.root_lexer.next_token(lexer_state, parser_state) + raise UnexpectedToken(token, e.allowed, state=parser_state, token_history=[last_token], terminals_by_name=self.root_lexer.terminals_by_name) + except UnexpectedCharacters: + raise e # Raise the original UnexpectedCharacters. The root lexer raises it with the wrong expected set. + +###} diff --git a/vendor/lark/lark/load_grammar.py b/vendor/lark/lark/load_grammar.py new file mode 100644 index 00000000..581efce0 --- /dev/null +++ b/vendor/lark/lark/load_grammar.py @@ -0,0 +1,1411 @@ +"""Parses and creates Grammar objects""" +import hashlib +import os.path +import sys +from collections import namedtuple +from copy import copy, deepcopy +import pkgutil +from ast import literal_eval +from contextlib import suppress +from typing import List, Tuple, Union, Callable, Dict, Optional, Sequence +from importlib import resources + +from .utils import bfs, logger, classify_bool, is_id_continue, is_id_start, bfs_all_unique, small_factors +from .lexer import Token, TerminalDef, PatternStr, PatternRE + +from .parse_tree_builder import ParseTreeBuilder +from .parser_frontends import ParsingFrontend +from .common import LexerConf, ParserConf +from .grammar import RuleOptions, Rule, Terminal, NonTerminal, Symbol, TOKEN_DEFAULT_PRIORITY +from .utils import classify, dedup_list +from .exceptions import GrammarError, UnexpectedCharacters, UnexpectedToken, ParseError, UnexpectedInput + +from .tree import Tree, SlottedTree as ST +from .visitors import Transformer, Visitor, v_args, Transformer_InPlace, Transformer_NonRecursive +inline_args = v_args(inline=True) + +IMPORT_PATHS = ['grammars'] + +EXT = '.lark' + +_RE_FLAGS = 'imslux' + +_EMPTY = Symbol('__empty__') + +_TERMINAL_NAMES = { + '.' : 'DOT', + ',' : 'COMMA', + ':' : 'COLON', + ';' : 'SEMICOLON', + '+' : 'PLUS', + '-' : 'MINUS', + '*' : 'STAR', + '/' : 'SLASH', + '\\' : 'BACKSLASH', + '|' : 'VBAR', + '?' : 'QMARK', + '!' : 'BANG', + '@' : 'AT', + '#' : 'HASH', + '$' : 'DOLLAR', + '%' : 'PERCENT', + '^' : 'CIRCUMFLEX', + '&' : 'AMPERSAND', + '_' : 'UNDERSCORE', + '<' : 'LESSTHAN', + '>' : 'MORETHAN', + '=' : 'EQUAL', + '"' : 'DBLQUOTE', + '\'' : 'QUOTE', + '`' : 'BACKQUOTE', + '~' : 'TILDE', + '(' : 'LPAR', + ')' : 'RPAR', + '{' : 'LBRACE', + '}' : 'RBRACE', + '[' : 'LSQB', + ']' : 'RSQB', + '\n' : 'NEWLINE', + '\r\n' : 'CRLF', + '\t' : 'TAB', + ' ' : 'SPACE', +} + +# Grammar Parser +TERMINALS = { + '_LPAR': r'\(', + '_RPAR': r'\)', + '_LBRA': r'\[', + '_RBRA': r'\]', + '_LBRACE': r'\{', + '_RBRACE': r'\}', + 'OP': '[+*]|[?](?![a-z])', + '_COLON': ':', + '_COMMA': ',', + '_OR': r'\|', + '_DOT': r'\.(?!\.)', + '_DOTDOT': r'\.\.', + 'TILDE': '~', + 'RULE_MODIFIERS': '(!|![?]?|[?]!?)(?=[_a-z])', + 'RULE': '_?[a-z][_a-z0-9]*', + 'TERMINAL': '_?[A-Z][_A-Z0-9]*', + 'STRING': r'"(\\"|\\\\|[^"\n])*?"i?', + 'REGEXP': r'/(?!/)(\\/|\\\\|[^/])*?/[%s]*' % _RE_FLAGS, + '_NL': r'(\r?\n)+\s*', + '_NL_OR': r'(\r?\n)+\s*\|', + 'WS': r'[ \t]+', + 'COMMENT': r'\s*//[^\n]*', + 'BACKSLASH': r'\\[ ]*\n', + '_TO': '->', + '_IGNORE': r'%ignore', + '_OVERRIDE': r'%override', + '_DECLARE': r'%declare', + '_EXTEND': r'%extend', + '_IMPORT': r'%import', + 'NUMBER': r'[+-]?\d+', +} + +RULES = { + 'start': ['_list'], + '_list': ['_item', '_list _item'], + '_item': ['rule', 'term', 'ignore', 'import', 'declare', 'override', 'extend', '_NL'], + + 'rule': ['rule_modifiers RULE template_params priority _COLON expansions _NL'], + 'rule_modifiers': ['RULE_MODIFIERS', + ''], + 'priority': ['_DOT NUMBER', + ''], + 'template_params': ['_LBRACE _template_params _RBRACE', + ''], + '_template_params': ['RULE', + '_template_params _COMMA RULE'], + 'expansions': ['_expansions'], + '_expansions': ['alias', + '_expansions _OR alias', + '_expansions _NL_OR alias'], + + '?alias': ['expansion _TO nonterminal', 'expansion'], + 'expansion': ['_expansion'], + + '_expansion': ['', '_expansion expr'], + + '?expr': ['atom', + 'atom OP', + 'atom TILDE NUMBER', + 'atom TILDE NUMBER _DOTDOT NUMBER', + ], + + '?atom': ['_LPAR expansions _RPAR', + 'maybe', + 'value'], + + 'value': ['terminal', + 'nonterminal', + 'literal', + 'range', + 'template_usage'], + + 'terminal': ['TERMINAL'], + 'nonterminal': ['RULE'], + + '?name': ['RULE', 'TERMINAL'], + '?symbol': ['terminal', 'nonterminal'], + + 'maybe': ['_LBRA expansions _RBRA'], + 'range': ['STRING _DOTDOT STRING'], + + 'template_usage': ['nonterminal _LBRACE _template_args _RBRACE'], + '_template_args': ['value', + '_template_args _COMMA value'], + + 'term': ['TERMINAL _COLON expansions _NL', + 'TERMINAL _DOT NUMBER _COLON expansions _NL'], + 'override': ['_OVERRIDE rule', + '_OVERRIDE term'], + 'extend': ['_EXTEND rule', + '_EXTEND term'], + 'ignore': ['_IGNORE expansions _NL'], + 'declare': ['_DECLARE _declare_args _NL'], + 'import': ['_IMPORT _import_path _NL', + '_IMPORT _import_path _LPAR name_list _RPAR _NL', + '_IMPORT _import_path _TO name _NL'], + + '_import_path': ['import_lib', 'import_rel'], + 'import_lib': ['_import_args'], + 'import_rel': ['_DOT _import_args'], + '_import_args': ['name', '_import_args _DOT name'], + + 'name_list': ['_name_list'], + '_name_list': ['name', '_name_list _COMMA name'], + + '_declare_args': ['symbol', '_declare_args symbol'], + 'literal': ['REGEXP', 'STRING'], +} + + +# Value 5 keeps the number of states in the lalr parser somewhat minimal +# It isn't optimal, but close to it. See PR #949 +SMALL_FACTOR_THRESHOLD = 5 +# The Threshold whether repeat via ~ are split up into different rules +# 50 is chosen since it keeps the number of states low and therefore lalr analysis time low, +# while not being to overaggressive and unnecessarily creating rules that might create shift/reduce conflicts. +# (See PR #949) +REPEAT_BREAK_THRESHOLD = 50 + + +class FindRuleSize(Transformer): + def __init__(self, keep_all_tokens): + self.keep_all_tokens = keep_all_tokens + + def _will_not_get_removed(self, sym): + if isinstance(sym, NonTerminal): + return not sym.name.startswith('_') + if isinstance(sym, Terminal): + return self.keep_all_tokens or not sym.filter_out + if sym is _EMPTY: + return False + assert False, sym + + def _args_as_int(self, args): + for a in args: + if isinstance(a, int): + yield a + elif isinstance(a, Symbol): + yield 1 if self._will_not_get_removed(a) else 0 + else: + assert False + + def expansion(self, args): + return sum(self._args_as_int(args)) + + def expansions(self, args): + return max(self._args_as_int(args)) + + +@inline_args +class EBNF_to_BNF(Transformer_InPlace): + def __init__(self): + self.new_rules = [] + self.rules_cache = {} + self.prefix = 'anon' + self.i = 0 + self.rule_options = None + + def _name_rule(self, inner): + new_name = '__%s_%s_%d' % (self.prefix, inner, self.i) + self.i += 1 + return new_name + + def _add_rule(self, key, name, expansions): + t = NonTerminal(name) + self.new_rules.append((name, expansions, self.rule_options)) + self.rules_cache[key] = t + return t + + def _add_recurse_rule(self, type_, expr): + try: + return self.rules_cache[expr] + except KeyError: + new_name = self._name_rule(type_) + t = NonTerminal(new_name) + tree = ST('expansions', [ + ST('expansion', [expr]), + ST('expansion', [t, expr]) + ]) + return self._add_rule(expr, new_name, tree) + + def _add_repeat_rule(self, a, b, target, atom): + """Generate a rule that repeats target ``a`` times, and repeats atom ``b`` times. + + When called recursively (into target), it repeats atom for x(n) times, where: + x(0) = 1 + x(n) = a(n) * x(n-1) + b + + Example rule when a=3, b=4: + + new_rule: target target target atom atom atom atom + + """ + key = (a, b, target, atom) + try: + return self.rules_cache[key] + except KeyError: + new_name = self._name_rule('repeat_a%d_b%d' % (a, b)) + tree = ST('expansions', [ST('expansion', [target] * a + [atom] * b)]) + return self._add_rule(key, new_name, tree) + + def _add_repeat_opt_rule(self, a, b, target, target_opt, atom): + """Creates a rule that matches atom 0 to (a*n+b)-1 times. + + When target matches n times atom, and target_opt 0 to n-1 times target_opt, + + First we generate target * i followed by target_opt, for i from 0 to a-1 + These match 0 to n*a - 1 times atom + + Then we generate target * a followed by atom * i, for i from 0 to b-1 + These match n*a to n*a + b-1 times atom + + The created rule will not have any shift/reduce conflicts so that it can be used with lalr + + Example rule when a=3, b=4: + + new_rule: target_opt + | target target_opt + | target target target_opt + + | target target target + | target target target atom + | target target target atom atom + | target target target atom atom atom + + """ + key = (a, b, target, atom, "opt") + try: + return self.rules_cache[key] + except KeyError: + new_name = self._name_rule('repeat_a%d_b%d_opt' % (a, b)) + tree = ST('expansions', [ + ST('expansion', [target]*i + [target_opt]) for i in range(a) + ] + [ + ST('expansion', [target]*a + [atom]*i) for i in range(b) + ]) + return self._add_rule(key, new_name, tree) + + def _generate_repeats(self, rule, mn, mx): + """Generates a rule tree that repeats ``rule`` exactly between ``mn`` to ``mx`` times. + """ + # For a small number of repeats, we can take the naive approach + if mx < REPEAT_BREAK_THRESHOLD: + return ST('expansions', [ST('expansion', [rule] * n) for n in range(mn, mx + 1)]) + + # For large repeat values, we break the repetition into sub-rules. + # We treat ``rule~mn..mx`` as ``rule~mn rule~0..(diff=mx-mn)``. + # We then use small_factors to split up mn and diff up into values [(a, b), ...] + # This values are used with the help of _add_repeat_rule and _add_repeat_rule_opt + # to generate a complete rule/expression that matches the corresponding number of repeats + mn_target = rule + for a, b in small_factors(mn, SMALL_FACTOR_THRESHOLD): + mn_target = self._add_repeat_rule(a, b, mn_target, rule) + if mx == mn: + return mn_target + + diff = mx - mn + 1 # We add one because _add_repeat_opt_rule generates rules that match one less + diff_factors = small_factors(diff, SMALL_FACTOR_THRESHOLD) + diff_target = rule # Match rule 1 times + diff_opt_target = ST('expansion', []) # match rule 0 times (e.g. up to 1 -1 times) + for a, b in diff_factors[:-1]: + diff_opt_target = self._add_repeat_opt_rule(a, b, diff_target, diff_opt_target, rule) + diff_target = self._add_repeat_rule(a, b, diff_target, rule) + + a, b = diff_factors[-1] + diff_opt_target = self._add_repeat_opt_rule(a, b, diff_target, diff_opt_target, rule) + + return ST('expansions', [ST('expansion', [mn_target] + [diff_opt_target])]) + + def expr(self, rule, op, *args): + if op.value == '?': + empty = ST('expansion', []) + return ST('expansions', [rule, empty]) + elif op.value == '+': + # a : b c+ d + # --> + # a : b _c d + # _c : _c c | c; + return self._add_recurse_rule('plus', rule) + elif op.value == '*': + # a : b c* d + # --> + # a : b _c? d + # _c : _c c | c; + new_name = self._add_recurse_rule('star', rule) + return ST('expansions', [new_name, ST('expansion', [])]) + elif op.value == '~': + if len(args) == 1: + mn = mx = int(args[0]) + else: + mn, mx = map(int, args) + if mx < mn or mn < 0: + raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (rule, mn, mx)) + + return self._generate_repeats(rule, mn, mx) + + assert False, op + + def maybe(self, rule): + keep_all_tokens = self.rule_options and self.rule_options.keep_all_tokens + rule_size = FindRuleSize(keep_all_tokens).transform(rule) + empty = ST('expansion', [_EMPTY] * rule_size) + return ST('expansions', [rule, empty]) + + +class SimplifyRule_Visitor(Visitor): + + @staticmethod + def _flatten(tree): + while tree.expand_kids_by_data(tree.data): + pass + + def expansion(self, tree): + # rules_list unpacking + # a : b (c|d) e + # --> + # a : b c e | b d e + # + # In AST terms: + # expansion(b, expansions(c, d), e) + # --> + # expansions( expansion(b, c, e), expansion(b, d, e) ) + + self._flatten(tree) + + for i, child in enumerate(tree.children): + if isinstance(child, Tree) and child.data == 'expansions': + tree.data = 'expansions' + tree.children = [self.visit(ST('expansion', [option if i == j else other + for j, other in enumerate(tree.children)])) + for option in dedup_list(child.children)] + self._flatten(tree) + break + + def alias(self, tree): + rule, alias_name = tree.children + if rule.data == 'expansions': + aliases = [] + for child in tree.children[0].children: + aliases.append(ST('alias', [child, alias_name])) + tree.data = 'expansions' + tree.children = aliases + + def expansions(self, tree): + self._flatten(tree) + # Ensure all children are unique + if len(set(tree.children)) != len(tree.children): + tree.children = dedup_list(tree.children) # dedup is expensive, so try to minimize its use + + +class RuleTreeToText(Transformer): + def expansions(self, x): + return x + + def expansion(self, symbols): + return symbols, None + + def alias(self, x): + (expansion, _alias), alias = x + assert _alias is None, (alias, expansion, '-', _alias) # Double alias not allowed + return expansion, alias.name + + +class PrepareAnonTerminals(Transformer_InPlace): + """Create a unique list of anonymous terminals. Attempt to give meaningful names to them when we add them""" + + def __init__(self, terminals): + self.terminals = terminals + self.term_set = {td.name for td in self.terminals} + self.term_reverse = {td.pattern: td for td in terminals} + self.i = 0 + self.rule_options = None + + @inline_args + def pattern(self, p): + value = p.value + if p in self.term_reverse and p.flags != self.term_reverse[p].pattern.flags: + raise GrammarError(u'Conflicting flags for the same terminal: %s' % p) + + term_name = None + + if isinstance(p, PatternStr): + try: + # If already defined, use the user-defined terminal name + term_name = self.term_reverse[p].name + except KeyError: + # Try to assign an indicative anon-terminal name + try: + term_name = _TERMINAL_NAMES[value] + except KeyError: + if value and is_id_continue(value) and is_id_start(value[0]) and value.upper() not in self.term_set: + term_name = value.upper() + + if term_name in self.term_set: + term_name = None + + elif isinstance(p, PatternRE): + if p in self.term_reverse: # Kind of a weird placement.name + term_name = self.term_reverse[p].name + else: + assert False, p + + if term_name is None: + term_name = '__ANON_%d' % self.i + self.i += 1 + + if term_name not in self.term_set: + assert p not in self.term_reverse + self.term_set.add(term_name) + termdef = TerminalDef(term_name, p) + self.term_reverse[p] = termdef + self.terminals.append(termdef) + + filter_out = False if self.rule_options and self.rule_options.keep_all_tokens else isinstance(p, PatternStr) + + return Terminal(term_name, filter_out=filter_out) + + +class _ReplaceSymbols(Transformer_InPlace): + """Helper for ApplyTemplates""" + + def __init__(self): + self.names = {} + + def value(self, c): + if len(c) == 1 and isinstance(c[0], Symbol) and c[0].name in self.names: + return self.names[c[0].name] + return self.__default__('value', c, None) + + def template_usage(self, c): + name = c[0].name + if name in self.names: + return self.__default__('template_usage', [self.names[name]] + c[1:], None) + return self.__default__('template_usage', c, None) + + +class ApplyTemplates(Transformer_InPlace): + """Apply the templates, creating new rules that represent the used templates""" + + def __init__(self, rule_defs): + self.rule_defs = rule_defs + self.replacer = _ReplaceSymbols() + self.created_templates = set() + + def template_usage(self, c): + name = c[0].name + args = c[1:] + result_name = "%s{%s}" % (name, ",".join(a.name for a in args)) + if result_name not in self.created_templates: + self.created_templates.add(result_name) + (_n, params, tree, options) ,= (t for t in self.rule_defs if t[0] == name) + assert len(params) == len(args), args + result_tree = deepcopy(tree) + self.replacer.names = dict(zip(params, args)) + self.replacer.transform(result_tree) + self.rule_defs.append((result_name, [], result_tree, deepcopy(options))) + return NonTerminal(result_name) + + +def _rfind(s, choices): + return max(s.rfind(c) for c in choices) + + +def eval_escaping(s): + w = '' + i = iter(s) + for n in i: + w += n + if n == '\\': + try: + n2 = next(i) + except StopIteration: + raise GrammarError("Literal ended unexpectedly (bad escaping): `%r`" % s) + if n2 == '\\': + w += '\\\\' + elif n2 not in 'Uuxnftr': + w += '\\' + w += n2 + w = w.replace('\\"', '"').replace("'", "\\'") + + to_eval = "u'''%s'''" % w + try: + s = literal_eval(to_eval) + except SyntaxError as e: + raise GrammarError(s, e) + + return s + + +def _literal_to_pattern(literal): + assert isinstance(literal, Token) + v = literal.value + flag_start = _rfind(v, '/"')+1 + assert flag_start > 0 + flags = v[flag_start:] + assert all(f in _RE_FLAGS for f in flags), flags + + if literal.type == 'STRING' and '\n' in v: + raise GrammarError('You cannot put newlines in string literals') + + if literal.type == 'REGEXP' and '\n' in v and 'x' not in flags: + raise GrammarError('You can only use newlines in regular expressions ' + 'with the `x` (verbose) flag') + + v = v[:flag_start] + assert v[0] == v[-1] and v[0] in '"/' + x = v[1:-1] + + s = eval_escaping(x) + + if s == "": + raise GrammarError("Empty terminals are not allowed (%s)" % literal) + + if literal.type == 'STRING': + s = s.replace('\\\\', '\\') + return PatternStr(s, flags, raw=literal.value) + elif literal.type == 'REGEXP': + return PatternRE(s, flags, raw=literal.value) + else: + assert False, 'Invariant failed: literal.type not in ["STRING", "REGEXP"]' + + +@inline_args +class PrepareLiterals(Transformer_InPlace): + def literal(self, literal): + return ST('pattern', [_literal_to_pattern(literal)]) + + def range(self, start, end): + assert start.type == end.type == 'STRING' + start = start.value[1:-1] + end = end.value[1:-1] + assert len(eval_escaping(start)) == len(eval_escaping(end)) == 1 + regexp = '[%s-%s]' % (start, end) + return ST('pattern', [PatternRE(regexp)]) + + +def _make_joined_pattern(regexp, flags_set): + return PatternRE(regexp, ()) + +class TerminalTreeToPattern(Transformer_NonRecursive): + def pattern(self, ps): + p ,= ps + return p + + def expansion(self, items): + assert items + if len(items) == 1: + return items[0] + + pattern = ''.join(i.to_regexp() for i in items) + return _make_joined_pattern(pattern, {i.flags for i in items}) + + def expansions(self, exps): + if len(exps) == 1: + return exps[0] + + # Do a bit of sorting to make sure that the longest option is returned + # (Python's re module otherwise prefers just 'l' when given (l|ll) and both could match) + exps.sort(key=lambda x: (-x.max_width, -x.min_width, -len(x.value))) + + pattern = '(?:%s)' % ('|'.join(i.to_regexp() for i in exps)) + return _make_joined_pattern(pattern, {i.flags for i in exps}) + + def expr(self, args): + inner, op = args[:2] + if op == '~': + if len(args) == 3: + op = "{%d}" % int(args[2]) + else: + mn, mx = map(int, args[2:]) + if mx < mn: + raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (inner, mn, mx)) + op = "{%d,%d}" % (mn, mx) + else: + assert len(args) == 2 + return PatternRE('(?:%s)%s' % (inner.to_regexp(), op), inner.flags) + + def maybe(self, expr): + return self.expr(expr + ['?']) + + def alias(self, t): + raise GrammarError("Aliasing not allowed in terminals (You used -> in the wrong place)") + + def value(self, v): + return v[0] + + +class ValidateSymbols(Transformer_InPlace): + def value(self, v): + v ,= v + assert isinstance(v, (Tree, Symbol)) + return v + + +def nr_deepcopy_tree(t): + """Deepcopy tree `t` without recursion""" + return Transformer_NonRecursive(False).transform(t) + + +class Grammar: + + term_defs: List[Tuple[str, Tuple[Tree, int]]] + rule_defs: List[Tuple[str, Tuple[str, ...], Tree, RuleOptions]] + ignore: List[str] + + def __init__(self, rule_defs: List[Tuple[str, Tuple[str, ...], Tree, RuleOptions]], term_defs: List[Tuple[str, Tuple[Tree, int]]], ignore: List[str]) -> None: + self.term_defs = term_defs + self.rule_defs = rule_defs + self.ignore = ignore + + def compile(self, start, terminals_to_keep): + # We change the trees in-place (to support huge grammars) + # So deepcopy allows calling compile more than once. + term_defs = [(n, (nr_deepcopy_tree(t), p)) for n, (t, p) in self.term_defs] + rule_defs = [(n, p, nr_deepcopy_tree(t), o) for n, p, t, o in self.rule_defs] + + # =================== + # Compile Terminals + # =================== + + # Convert terminal-trees to strings/regexps + + for name, (term_tree, priority) in term_defs: + if term_tree is None: # Terminal added through %declare + continue + expansions = list(term_tree.find_data('expansion')) + if len(expansions) == 1 and not expansions[0].children: + raise GrammarError("Terminals cannot be empty (%s)" % name) + + transformer = PrepareLiterals() * TerminalTreeToPattern() + terminals = [TerminalDef(name, transformer.transform(term_tree), priority) + for name, (term_tree, priority) in term_defs if term_tree] + + # ================= + # Compile Rules + # ================= + + # 1. Pre-process terminals + anon_tokens_transf = PrepareAnonTerminals(terminals) + transformer = PrepareLiterals() * ValidateSymbols() * anon_tokens_transf # Adds to terminals + + # 2. Inline Templates + + transformer *= ApplyTemplates(rule_defs) + + # 3. Convert EBNF to BNF (and apply step 1 & 2) + ebnf_to_bnf = EBNF_to_BNF() + rules = [] + i = 0 + while i < len(rule_defs): # We have to do it like this because rule_defs might grow due to templates + name, params, rule_tree, options = rule_defs[i] + i += 1 + if len(params) != 0: # Dont transform templates + continue + rule_options = RuleOptions(keep_all_tokens=True) if options and options.keep_all_tokens else None + ebnf_to_bnf.rule_options = rule_options + ebnf_to_bnf.prefix = name + anon_tokens_transf.rule_options = rule_options + tree = transformer.transform(rule_tree) + res = ebnf_to_bnf.transform(tree) + rules.append((name, res, options)) + rules += ebnf_to_bnf.new_rules + + assert len(rules) == len({name for name, _t, _o in rules}), "Whoops, name collision" + + # 4. Compile tree to Rule objects + rule_tree_to_text = RuleTreeToText() + + simplify_rule = SimplifyRule_Visitor() + compiled_rules = [] + for rule_content in rules: + name, tree, options = rule_content + simplify_rule.visit(tree) + expansions = rule_tree_to_text.transform(tree) + + for i, (expansion, alias) in enumerate(expansions): + if alias and name.startswith('_'): + raise GrammarError("Rule %s is marked for expansion (it starts with an underscore) and isn't allowed to have aliases (alias=%s)"% (name, alias)) + + empty_indices = [x==_EMPTY for x in expansion] + if any(empty_indices): + exp_options = copy(options) or RuleOptions() + exp_options.empty_indices = empty_indices + expansion = [x for x in expansion if x!=_EMPTY] + else: + exp_options = options + + for sym in expansion: + assert isinstance(sym, Symbol) + if sym.is_term and exp_options and exp_options.keep_all_tokens: + sym.filter_out = False + rule = Rule(NonTerminal(name), expansion, i, alias, exp_options) + compiled_rules.append(rule) + + # Remove duplicates of empty rules, throw error for non-empty duplicates + if len(set(compiled_rules)) != len(compiled_rules): + duplicates = classify(compiled_rules, lambda x: x) + for dups in duplicates.values(): + if len(dups) > 1: + if dups[0].expansion: + raise GrammarError("Rules defined twice: %s\n\n(Might happen due to colliding expansion of optionals: [] or ?)" + % ''.join('\n * %s' % i for i in dups)) + + # Empty rule; assert all other attributes are equal + assert len({(r.alias, r.order, r.options) for r in dups}) == len(dups) + + # Remove duplicates + compiled_rules = list(set(compiled_rules)) + + # Filter out unused rules + while True: + c = len(compiled_rules) + used_rules = {s for r in compiled_rules + for s in r.expansion + if isinstance(s, NonTerminal) + and s != r.origin} + used_rules |= {NonTerminal(s) for s in start} + compiled_rules, unused = classify_bool(compiled_rules, lambda r: r.origin in used_rules) + for r in unused: + logger.debug("Unused rule: %s", r) + if len(compiled_rules) == c: + break + + # Filter out unused terminals + if terminals_to_keep != '*': + used_terms = {t.name for r in compiled_rules + for t in r.expansion + if isinstance(t, Terminal)} + terminals, unused = classify_bool(terminals, lambda t: t.name in used_terms or t.name in self.ignore or t.name in terminals_to_keep) + if unused: + logger.debug("Unused terminals: %s", [t.name for t in unused]) + + return terminals, compiled_rules, self.ignore + + +PackageResource = namedtuple('PackageResource', 'pkg_name path') + + +class FromPackageLoader: + """ + Provides a simple way of creating custom import loaders that load from packages via ``pkgutil.get_data`` instead of using `open`. + This allows them to be compatible even from within zip files. + + Relative imports are handled, so you can just freely use them. + + pkg_name: The name of the package. You can probably provide `__name__` most of the time + search_paths: All the path that will be search on absolute imports. + """ + + pkg_name: str + search_paths: Sequence[str] + + def __init__(self, pkg_name: str, search_paths: Sequence[str]=("", )) -> None: + self.pkg_name = pkg_name + self.search_paths = search_paths + + def __repr__(self): + return "%s(%r, %r)" % (type(self).__name__, self.pkg_name, self.search_paths) + + def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: + if base_path is None: + to_try = self.search_paths + else: + # Check whether or not the importing grammar was loaded by this module. + if not isinstance(base_path, PackageResource) or base_path.pkg_name != self.pkg_name: + # Technically false, but FileNotFound doesn't exist in python2.7, and this message should never reach the end user anyway + raise IOError() + to_try = [base_path.path] + + err = None + for path in to_try: + full_path = os.path.join(path, grammar_path) + try: + pkg = ".".join([self.pkg_name] + full_path.split(os.path.sep)[:-1]) + item = full_path.split(os.path.sep)[-1] + text: Optional[bytes] = resources.read_binary(pkg, item) + except IOError as e: + err = e + continue + else: + return PackageResource(self.pkg_name, full_path), (text.decode() if text else '') + + raise IOError('Cannot find grammar in given paths') from err + + +stdlib_loader = FromPackageLoader('lark', IMPORT_PATHS) + + + +def resolve_term_references(term_dict): + # TODO Solve with transitive closure (maybe) + + while True: + changed = False + for name, token_tree in term_dict.items(): + if token_tree is None: # Terminal added through %declare + continue + for exp in token_tree.find_data('value'): + item ,= exp.children + if isinstance(item, NonTerminal): + raise GrammarError("Rules aren't allowed inside terminals (%s in %s)" % (item, name)) + elif isinstance(item, Terminal): + try: + term_value = term_dict[item.name] + except KeyError: + raise GrammarError("Terminal used but not defined: %s" % item.name) + assert term_value is not None + exp.children[0] = term_value + changed = True + else: + assert isinstance(item, Tree) + if not changed: + break + + for name, term in term_dict.items(): + if term: # Not just declared + for child in term.children: + ids = [id(x) for x in child.iter_subtrees()] + if id(term) in ids: + raise GrammarError("Recursion in terminal '%s' (recursion is only allowed in rules, not terminals)" % name) + + + +def symbol_from_strcase(s): + assert isinstance(s, str) + return Terminal(s, filter_out=s.startswith('_')) if s.isupper() else NonTerminal(s) + +@inline_args +class PrepareGrammar(Transformer_InPlace): + def terminal(self, name): + return Terminal(str(name), filter_out=name.startswith('_')) + + def nonterminal(self, name): + return NonTerminal(name.value) + + +def _find_used_symbols(tree): + assert tree.data == 'expansions' + return {t.name for x in tree.find_data('expansion') + for t in x.scan_values(lambda t: isinstance(t, Symbol))} + + +def _get_parser(): + try: + return _get_parser.cache + except AttributeError: + terminals = [TerminalDef(name, PatternRE(value)) for name, value in TERMINALS.items()] + + rules = [(name.lstrip('?'), x, RuleOptions(expand1=name.startswith('?'))) + for name, x in RULES.items()] + rules = [Rule(NonTerminal(r), [symbol_from_strcase(s) for s in x.split()], i, None, o) + for r, xs, o in rules for i, x in enumerate(xs)] + + callback = ParseTreeBuilder(rules, ST).create_callback() + import re + lexer_conf = LexerConf(terminals, re, ['WS', 'COMMENT', 'BACKSLASH']) + parser_conf = ParserConf(rules, callback, ['start']) + lexer_conf.lexer_type = 'basic' + parser_conf.parser_type = 'lalr' + _get_parser.cache = ParsingFrontend(lexer_conf, parser_conf, None) + return _get_parser.cache + +GRAMMAR_ERRORS = [ + ('Incorrect type of value', ['a: 1\n']), + ('Unclosed parenthesis', ['a: (\n']), + ('Unmatched closing parenthesis', ['a: )\n', 'a: [)\n', 'a: (]\n']), + ('Expecting rule or terminal definition (missing colon)', ['a\n', 'A\n', 'a->\n', 'A->\n', 'a A\n']), + ('Illegal name for rules or terminals', ['Aa:\n']), + ('Alias expects lowercase name', ['a: -> "a"\n']), + ('Unexpected colon', ['a::\n', 'a: b:\n', 'a: B:\n', 'a: "a":\n']), + ('Misplaced operator', ['a: b??', 'a: b(?)', 'a:+\n', 'a:?\n', 'a:*\n', 'a:|*\n']), + ('Expecting option ("|") or a new rule or terminal definition', ['a:a\n()\n']), + ('Terminal names cannot contain dots', ['A.B\n']), + ('Expecting rule or terminal definition', ['"a"\n']), + ('%import expects a name', ['%import "a"\n']), + ('%ignore expects a value', ['%ignore %import\n']), + ] + +def _translate_parser_exception(parse, e): + error = e.match_examples(parse, GRAMMAR_ERRORS, use_accepts=True) + if error: + return error + elif 'STRING' in e.expected: + return "Expecting a value" + +def _parse_grammar(text, name, start='start'): + try: + tree = _get_parser().parse(text + '\n', start) + except UnexpectedCharacters as e: + context = e.get_context(text) + raise GrammarError("Unexpected input at line %d column %d in %s: \n\n%s" % + (e.line, e.column, name, context)) + except UnexpectedToken as e: + context = e.get_context(text) + error = _translate_parser_exception(_get_parser().parse, e) + if error: + raise GrammarError("%s, at line %s column %s\n\n%s" % (error, e.line, e.column, context)) + raise + + return PrepareGrammar().transform(tree) + + +def _error_repr(error): + if isinstance(error, UnexpectedToken): + error2 = _translate_parser_exception(_get_parser().parse, error) + if error2: + return error2 + expected = ', '.join(error.accepts or error.expected) + return "Unexpected token %r. Expected one of: {%s}" % (str(error.token), expected) + else: + return str(error) + +def _search_interactive_parser(interactive_parser, predicate): + def expand(node): + path, p = node + for choice in p.choices(): + t = Token(choice, '') + try: + new_p = p.feed_token(t) + except ParseError: # Illegal + pass + else: + yield path + (choice,), new_p + + for path, p in bfs_all_unique([((), interactive_parser)], expand): + if predicate(p): + return path, p + +def find_grammar_errors(text: str, start: str='start') -> List[Tuple[UnexpectedInput, str]]: + errors = [] + def on_error(e): + errors.append((e, _error_repr(e))) + + # recover to a new line + token_path, _ = _search_interactive_parser(e.interactive_parser.as_immutable(), lambda p: '_NL' in p.choices()) + for token_type in token_path: + e.interactive_parser.feed_token(Token(token_type, '')) + e.interactive_parser.feed_token(Token('_NL', '\n')) + return True + + _tree = _get_parser().parse(text + '\n', start, on_error=on_error) + + errors_by_line = classify(errors, lambda e: e[0].line) + errors = [el[0] for el in errors_by_line.values()] # already sorted + + for e in errors: + e[0].interactive_parser = None + return errors + + +def _get_mangle(prefix, aliases, base_mangle=None): + def mangle(s): + if s in aliases: + s = aliases[s] + else: + if s[0] == '_': + s = '_%s__%s' % (prefix, s[1:]) + else: + s = '%s__%s' % (prefix, s) + if base_mangle is not None: + s = base_mangle(s) + return s + return mangle + +def _mangle_definition_tree(exp, mangle): + if mangle is None: + return exp + exp = deepcopy(exp) # TODO: is this needed? + for t in exp.iter_subtrees(): + for i, c in enumerate(t.children): + if isinstance(c, Symbol): + t.children[i] = c.renamed(mangle) + + return exp + +def _make_rule_tuple(modifiers_tree, name, params, priority_tree, expansions): + if modifiers_tree.children: + m ,= modifiers_tree.children + expand1 = '?' in m + keep_all_tokens = '!' in m + else: + keep_all_tokens = False + expand1 = False + + if priority_tree.children: + p ,= priority_tree.children + priority = int(p) + else: + priority = None + + if params is not None: + params = [t.value for t in params.children] # For the grammar parser + + return name, params, expansions, RuleOptions(keep_all_tokens, expand1, priority=priority, + template_source=(name if params else None)) + + +class Definition: + def __init__(self, is_term, tree, params=(), options=None): + self.is_term = is_term + self.tree = tree + self.params = tuple(params) + self.options = options + +class GrammarBuilder: + + global_keep_all_tokens: bool + import_paths: List[Union[str, Callable]] + used_files: Dict[str, str] + + _definitions: Dict[str, Definition] + _ignore_names: List[str] + + def __init__(self, global_keep_all_tokens: bool=False, import_paths: Optional[List[Union[str, Callable]]]=None, used_files: Optional[Dict[str, str]]=None) -> None: + self.global_keep_all_tokens = global_keep_all_tokens + self.import_paths = import_paths or [] + self.used_files = used_files or {} + + self._definitions: Dict[str, Definition] = {} + self._ignore_names: List[str] = [] + + def _grammar_error(self, is_term, msg, *names): + args = {} + for i, name in enumerate(names, start=1): + postfix = '' if i == 1 else str(i) + args['name' + postfix] = name + args['type' + postfix] = lowercase_type = ("rule", "terminal")[is_term] + args['Type' + postfix] = lowercase_type.title() + raise GrammarError(msg.format(**args)) + + def _check_options(self, is_term, options): + if is_term: + if options is None: + options = 1 + elif not isinstance(options, int): + raise GrammarError("Terminal require a single int as 'options' (e.g. priority), got %s" % (type(options),)) + else: + if options is None: + options = RuleOptions() + elif not isinstance(options, RuleOptions): + raise GrammarError("Rules require a RuleOptions instance as 'options'") + if self.global_keep_all_tokens: + options.keep_all_tokens = True + return options + + + def _define(self, name, is_term, exp, params=(), options=None, *, override=False): + if name in self._definitions: + if not override: + self._grammar_error(is_term, "{Type} '{name}' defined more than once", name) + elif override: + self._grammar_error(is_term, "Cannot override a nonexisting {type} {name}", name) + + if name.startswith('__'): + self._grammar_error(is_term, 'Names starting with double-underscore are reserved (Error at {name})', name) + + self._definitions[name] = Definition(is_term, exp, params, self._check_options(is_term, options)) + + def _extend(self, name, is_term, exp, params=(), options=None): + if name not in self._definitions: + self._grammar_error(is_term, "Can't extend {type} {name} as it wasn't defined before", name) + + d = self._definitions[name] + + if is_term != d.is_term: + self._grammar_error(is_term, "Cannot extend {type} {name} - one is a terminal, while the other is not.", name) + if tuple(params) != d.params: + self._grammar_error(is_term, "Cannot extend {type} with different parameters: {name}", name) + + if d.tree is None: + self._grammar_error(is_term, "Can't extend {type} {name} - it is abstract.", name) + + # TODO: think about what to do with 'options' + base = d.tree + + assert isinstance(base, Tree) and base.data == 'expansions' + base.children.insert(0, exp) + + def _ignore(self, exp_or_name): + if isinstance(exp_or_name, str): + self._ignore_names.append(exp_or_name) + else: + assert isinstance(exp_or_name, Tree) + t = exp_or_name + if t.data == 'expansions' and len(t.children) == 1: + t2 ,= t.children + if t2.data=='expansion' and len(t2.children) == 1: + item ,= t2.children + if item.data == 'value': + item ,= item.children + if isinstance(item, Terminal): + # Keep terminal name, no need to create a new definition + self._ignore_names.append(item.name) + return + + name = '__IGNORE_%d'% len(self._ignore_names) + self._ignore_names.append(name) + self._definitions[name] = Definition(True, t, options=TOKEN_DEFAULT_PRIORITY) + + def _unpack_import(self, stmt, grammar_name): + if len(stmt.children) > 1: + path_node, arg1 = stmt.children + else: + path_node, = stmt.children + arg1 = None + + if isinstance(arg1, Tree): # Multi import + dotted_path = tuple(path_node.children) + names = arg1.children + aliases = dict(zip(names, names)) # Can't have aliased multi import, so all aliases will be the same as names + else: # Single import + dotted_path = tuple(path_node.children[:-1]) + if not dotted_path: + name ,= path_node.children + raise GrammarError("Nothing was imported from grammar `%s`" % name) + name = path_node.children[-1] # Get name from dotted path + aliases = {name.value: (arg1 or name).value} # Aliases if exist + + if path_node.data == 'import_lib': # Import from library + base_path = None + else: # Relative import + if grammar_name == '': # Import relative to script file path if grammar is coded in script + try: + base_file = os.path.abspath(sys.modules['__main__'].__file__) + except AttributeError: + base_file = None + else: + base_file = grammar_name # Import relative to grammar file path if external grammar file + if base_file: + if isinstance(base_file, PackageResource): + base_path = PackageResource(base_file.pkg_name, os.path.split(base_file.path)[0]) + else: + base_path = os.path.split(base_file)[0] + else: + base_path = os.path.abspath(os.path.curdir) + + return dotted_path, base_path, aliases + + def _unpack_definition(self, tree, mangle): + + if tree.data == 'rule': + name, params, exp, opts = _make_rule_tuple(*tree.children) + is_term = False + else: + name = tree.children[0].value + params = () # TODO terminal templates + opts = int(tree.children[1]) if len(tree.children) == 3 else TOKEN_DEFAULT_PRIORITY # priority + exp = tree.children[-1] + is_term = True + + if mangle is not None: + params = tuple(mangle(p) for p in params) + name = mangle(name) + + exp = _mangle_definition_tree(exp, mangle) + return name, is_term, exp, params, opts + + + def load_grammar(self, grammar_text: str, grammar_name: str="", mangle: Optional[Callable[[str], str]]=None) -> None: + tree = _parse_grammar(grammar_text, grammar_name) + + imports: Dict[Tuple[str, ...], Tuple[Optional[str], Dict[str, str]]] = {} + + for stmt in tree.children: + if stmt.data == 'import': + dotted_path, base_path, aliases = self._unpack_import(stmt, grammar_name) + try: + import_base_path, import_aliases = imports[dotted_path] + assert base_path == import_base_path, 'Inconsistent base_path for %s.' % '.'.join(dotted_path) + import_aliases.update(aliases) + except KeyError: + imports[dotted_path] = base_path, aliases + + for dotted_path, (base_path, aliases) in imports.items(): + self.do_import(dotted_path, base_path, aliases, mangle) + + for stmt in tree.children: + if stmt.data in ('term', 'rule'): + self._define(*self._unpack_definition(stmt, mangle)) + elif stmt.data == 'override': + r ,= stmt.children + self._define(*self._unpack_definition(r, mangle), override=True) + elif stmt.data == 'extend': + r ,= stmt.children + self._extend(*self._unpack_definition(r, mangle)) + elif stmt.data == 'ignore': + # if mangle is not None, we shouldn't apply ignore, since we aren't in a toplevel grammar + if mangle is None: + self._ignore(*stmt.children) + elif stmt.data == 'declare': + for symbol in stmt.children: + assert isinstance(symbol, Symbol), symbol + is_term = isinstance(symbol, Terminal) + if mangle is None: + name = symbol.name + else: + name = mangle(symbol.name) + self._define(name, is_term, None) + elif stmt.data == 'import': + pass + else: + assert False, stmt + + + term_defs = { name: d.tree + for name, d in self._definitions.items() + if d.is_term + } + resolve_term_references(term_defs) + + + def _remove_unused(self, used): + def rule_dependencies(symbol): + try: + d = self._definitions[symbol] + except KeyError: + return [] + if d.is_term: + return [] + return _find_used_symbols(d.tree) - set(d.params) + + _used = set(bfs(used, rule_dependencies)) + self._definitions = {k: v for k, v in self._definitions.items() if k in _used} + + + def do_import(self, dotted_path: Tuple[str, ...], base_path: Optional[str], aliases: Dict[str, str], base_mangle: Optional[Callable[[str], str]]=None) -> None: + assert dotted_path + mangle = _get_mangle('__'.join(dotted_path), aliases, base_mangle) + grammar_path = os.path.join(*dotted_path) + EXT + to_try = self.import_paths + ([base_path] if base_path is not None else []) + [stdlib_loader] + for source in to_try: + try: + if callable(source): + joined_path, text = source(base_path, grammar_path) + else: + joined_path = os.path.join(source, grammar_path) + with open(joined_path, encoding='utf8') as f: + text = f.read() + except IOError: + continue + else: + h = hashlib.md5(text.encode('utf8')).hexdigest() + if self.used_files.get(joined_path, h) != h: + raise RuntimeError("Grammar file was changed during importing") + self.used_files[joined_path] = h + + gb = GrammarBuilder(self.global_keep_all_tokens, self.import_paths, self.used_files) + gb.load_grammar(text, joined_path, mangle) + gb._remove_unused(map(mangle, aliases)) + for name in gb._definitions: + if name in self._definitions: + raise GrammarError("Cannot import '%s' from '%s': Symbol already defined." % (name, grammar_path)) + + self._definitions.update(**gb._definitions) + break + else: + # Search failed. Make Python throw a nice error. + open(grammar_path, encoding='utf8') + assert False, "Couldn't import grammar %s, but a corresponding file was found at a place where lark doesn't search for it" % (dotted_path,) + + + def validate(self) -> None: + for name, d in self._definitions.items(): + params = d.params + exp = d.tree + + for i, p in enumerate(params): + if p in self._definitions: + raise GrammarError("Template Parameter conflicts with rule %s (in template %s)" % (p, name)) + if p in params[:i]: + raise GrammarError("Duplicate Template Parameter %s (in template %s)" % (p, name)) + + if exp is None: # Remaining checks don't apply to abstract rules/terminals (created with %declare) + continue + + for temp in exp.find_data('template_usage'): + sym = temp.children[0].name + args = temp.children[1:] + if sym not in params: + if sym not in self._definitions: + self._grammar_error(d.is_term, "Template '%s' used but not defined (in {type} {name})" % sym, name) + if len(args) != len(self._definitions[sym].params): + expected, actual = len(self._definitions[sym].params), len(args) + self._grammar_error(d.is_term, "Wrong number of template arguments used for {name} " + "(expected %s, got %s) (in {type2} {name2})" % (expected, actual), sym, name) + + for sym in _find_used_symbols(exp): + if sym not in self._definitions and sym not in params: + self._grammar_error(d.is_term, "{Type} '{name}' used but not defined (in {type2} {name2})", sym, name) + + if not set(self._definitions).issuperset(self._ignore_names): + raise GrammarError("Terminals %s were marked to ignore but were not defined!" % (set(self._ignore_names) - set(self._definitions))) + + def build(self) -> Grammar: + self.validate() + rule_defs = [] + term_defs = [] + for name, d in self._definitions.items(): + (params, exp, options) = d.params, d.tree, d.options + if d.is_term: + assert len(params) == 0 + term_defs.append((name, (exp, options))) + else: + rule_defs.append((name, params, exp, options)) + # resolve_term_references(term_defs) + return Grammar(rule_defs, term_defs, self._ignore_names) + + +def verify_used_files(file_hashes): + for path, old in file_hashes.items(): + text = None + if isinstance(path, str) and os.path.exists(path): + with open(path, encoding='utf8') as f: + text = f.read() + elif isinstance(path, PackageResource): + with suppress(IOError): + text = pkgutil.get_data(*path).decode('utf-8') + if text is None: # We don't know how to load the path. ignore it. + continue + + current = hashlib.md5(text.encode()).hexdigest() + if old != current: + logger.info("File %r changed, rebuilding Parser" % path) + return False + return True + +def list_grammar_imports(grammar, import_paths=[]): + "Returns a list of paths to the lark grammars imported by the given grammar (recursively)" + builder = GrammarBuilder(False, import_paths) + builder.load_grammar(grammar, '') + return list(builder.used_files.keys()) + +def load_grammar(grammar, source, import_paths, global_keep_all_tokens): + builder = GrammarBuilder(global_keep_all_tokens, import_paths) + builder.load_grammar(grammar, source) + return builder.build(), builder.used_files diff --git a/vendor/lark/lark/parse_tree_builder.py b/vendor/lark/lark/parse_tree_builder.py new file mode 100644 index 00000000..888cc736 --- /dev/null +++ b/vendor/lark/lark/parse_tree_builder.py @@ -0,0 +1,385 @@ +from typing import List + +from .exceptions import GrammarError, ConfigurationError +from .lexer import Token +from .tree import Tree +from .visitors import Transformer_InPlace +from .visitors import _vargs_meta, _vargs_meta_inline + +###{standalone +from functools import partial, wraps +from itertools import repeat, product + + +class ExpandSingleChild: + def __init__(self, node_builder): + self.node_builder = node_builder + + def __call__(self, children): + if len(children) == 1: + return children[0] + else: + return self.node_builder(children) + + + +class PropagatePositions: + def __init__(self, node_builder, node_filter=None): + self.node_builder = node_builder + self.node_filter = node_filter + + def __call__(self, children): + res = self.node_builder(children) + + if isinstance(res, Tree): + # Calculate positions while the tree is streaming, according to the rule: + # - nodes start at the start of their first child's container, + # and end at the end of their last child's container. + # Containers are nodes that take up space in text, but have been inlined in the tree. + + res_meta = res.meta + + first_meta = self._pp_get_meta(children) + if first_meta is not None: + if not hasattr(res_meta, 'line'): + # meta was already set, probably because the rule has been inlined (e.g. `?rule`) + res_meta.line = getattr(first_meta, 'container_line', first_meta.line) + res_meta.column = getattr(first_meta, 'container_column', first_meta.column) + res_meta.start_pos = getattr(first_meta, 'container_start_pos', first_meta.start_pos) + res_meta.empty = False + + res_meta.container_line = getattr(first_meta, 'container_line', first_meta.line) + res_meta.container_column = getattr(first_meta, 'container_column', first_meta.column) + + last_meta = self._pp_get_meta(reversed(children)) + if last_meta is not None: + if not hasattr(res_meta, 'end_line'): + res_meta.end_line = getattr(last_meta, 'container_end_line', last_meta.end_line) + res_meta.end_column = getattr(last_meta, 'container_end_column', last_meta.end_column) + res_meta.end_pos = getattr(last_meta, 'container_end_pos', last_meta.end_pos) + res_meta.empty = False + + res_meta.container_end_line = getattr(last_meta, 'container_end_line', last_meta.end_line) + res_meta.container_end_column = getattr(last_meta, 'container_end_column', last_meta.end_column) + + return res + + def _pp_get_meta(self, children): + for c in children: + if self.node_filter is not None and not self.node_filter(c): + continue + if isinstance(c, Tree): + if not c.meta.empty: + return c.meta + elif isinstance(c, Token): + return c + +def make_propagate_positions(option): + if callable(option): + return partial(PropagatePositions, node_filter=option) + elif option is True: + return PropagatePositions + elif option is False: + return None + + raise ConfigurationError('Invalid option for propagate_positions: %r' % option) + + +class ChildFilter: + def __init__(self, to_include, append_none, node_builder): + self.node_builder = node_builder + self.to_include = to_include + self.append_none = append_none + + def __call__(self, children): + filtered = [] + + for i, to_expand, add_none in self.to_include: + if add_none: + filtered += [None] * add_none + if to_expand: + filtered += children[i].children + else: + filtered.append(children[i]) + + if self.append_none: + filtered += [None] * self.append_none + + return self.node_builder(filtered) + + +class ChildFilterLALR(ChildFilter): + """Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)""" + + def __call__(self, children): + filtered = [] + for i, to_expand, add_none in self.to_include: + if add_none: + filtered += [None] * add_none + if to_expand: + if filtered: + filtered += children[i].children + else: # Optimize for left-recursion + filtered = children[i].children + else: + filtered.append(children[i]) + + if self.append_none: + filtered += [None] * self.append_none + + return self.node_builder(filtered) + + +class ChildFilterLALR_NoPlaceholders(ChildFilter): + "Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)" + def __init__(self, to_include, node_builder): + self.node_builder = node_builder + self.to_include = to_include + + def __call__(self, children): + filtered = [] + for i, to_expand in self.to_include: + if to_expand: + if filtered: + filtered += children[i].children + else: # Optimize for left-recursion + filtered = children[i].children + else: + filtered.append(children[i]) + return self.node_builder(filtered) + + +def _should_expand(sym): + return not sym.is_term and sym.name.startswith('_') + + +def maybe_create_child_filter(expansion, keep_all_tokens, ambiguous, _empty_indices: List[bool]): + # Prepare empty_indices as: How many Nones to insert at each index? + if _empty_indices: + assert _empty_indices.count(False) == len(expansion) + s = ''.join(str(int(b)) for b in _empty_indices) + empty_indices = [len(ones) for ones in s.split('0')] + assert len(empty_indices) == len(expansion)+1, (empty_indices, len(expansion)) + else: + empty_indices = [0] * (len(expansion)+1) + + to_include = [] + nones_to_add = 0 + for i, sym in enumerate(expansion): + nones_to_add += empty_indices[i] + if keep_all_tokens or not (sym.is_term and sym.filter_out): + to_include.append((i, _should_expand(sym), nones_to_add)) + nones_to_add = 0 + + nones_to_add += empty_indices[len(expansion)] + + if _empty_indices or len(to_include) < len(expansion) or any(to_expand for i, to_expand,_ in to_include): + if _empty_indices or ambiguous: + return partial(ChildFilter if ambiguous else ChildFilterLALR, to_include, nones_to_add) + else: + # LALR without placeholders + return partial(ChildFilterLALR_NoPlaceholders, [(i, x) for i,x,_ in to_include]) + + +class AmbiguousExpander: + """Deal with the case where we're expanding children ('_rule') into a parent but the children + are ambiguous. i.e. (parent->_ambig->_expand_this_rule). In this case, make the parent itself + ambiguous with as many copies as their are ambiguous children, and then copy the ambiguous children + into the right parents in the right places, essentially shifting the ambiguity up the tree.""" + def __init__(self, to_expand, tree_class, node_builder): + self.node_builder = node_builder + self.tree_class = tree_class + self.to_expand = to_expand + + def __call__(self, children): + def _is_ambig_tree(t): + return hasattr(t, 'data') and t.data == '_ambig' + + # -- When we're repeatedly expanding ambiguities we can end up with nested ambiguities. + # All children of an _ambig node should be a derivation of that ambig node, hence + # it is safe to assume that if we see an _ambig node nested within an ambig node + # it is safe to simply expand it into the parent _ambig node as an alternative derivation. + ambiguous = [] + for i, child in enumerate(children): + if _is_ambig_tree(child): + if i in self.to_expand: + ambiguous.append(i) + + child.expand_kids_by_data('_ambig') + + if not ambiguous: + return self.node_builder(children) + + expand = [iter(child.children) if i in ambiguous else repeat(child) for i, child in enumerate(children)] + return self.tree_class('_ambig', [self.node_builder(list(f[0])) for f in product(zip(*expand))]) + + +def maybe_create_ambiguous_expander(tree_class, expansion, keep_all_tokens): + to_expand = [i for i, sym in enumerate(expansion) + if keep_all_tokens or ((not (sym.is_term and sym.filter_out)) and _should_expand(sym))] + if to_expand: + return partial(AmbiguousExpander, to_expand, tree_class) + + +class AmbiguousIntermediateExpander: + """ + Propagate ambiguous intermediate nodes and their derivations up to the + current rule. + + In general, converts + + rule + _iambig + _inter + someChildren1 + ... + _inter + someChildren2 + ... + someChildren3 + ... + + to + + _ambig + rule + someChildren1 + ... + someChildren3 + ... + rule + someChildren2 + ... + someChildren3 + ... + rule + childrenFromNestedIambigs + ... + someChildren3 + ... + ... + + propagating up any nested '_iambig' nodes along the way. + """ + + def __init__(self, tree_class, node_builder): + self.node_builder = node_builder + self.tree_class = tree_class + + def __call__(self, children): + def _is_iambig_tree(child): + return hasattr(child, 'data') and child.data == '_iambig' + + def _collapse_iambig(children): + """ + Recursively flatten the derivations of the parent of an '_iambig' + node. Returns a list of '_inter' nodes guaranteed not + to contain any nested '_iambig' nodes, or None if children does + not contain an '_iambig' node. + """ + + # Due to the structure of the SPPF, + # an '_iambig' node can only appear as the first child + if children and _is_iambig_tree(children[0]): + iambig_node = children[0] + result = [] + for grandchild in iambig_node.children: + collapsed = _collapse_iambig(grandchild.children) + if collapsed: + for child in collapsed: + child.children += children[1:] + result += collapsed + else: + new_tree = self.tree_class('_inter', grandchild.children + children[1:]) + result.append(new_tree) + return result + + collapsed = _collapse_iambig(children) + if collapsed: + processed_nodes = [self.node_builder(c.children) for c in collapsed] + return self.tree_class('_ambig', processed_nodes) + + return self.node_builder(children) + + + +def inplace_transformer(func): + @wraps(func) + def f(children): + # function name in a Transformer is a rule name. + tree = Tree(func.__name__, children) + return func(tree) + return f + + +def apply_visit_wrapper(func, name, wrapper): + if wrapper is _vargs_meta or wrapper is _vargs_meta_inline: + raise NotImplementedError("Meta args not supported for internal transformer") + + @wraps(func) + def f(children): + return wrapper(func, name, children, None) + return f + + +class ParseTreeBuilder: + def __init__(self, rules, tree_class, propagate_positions=False, ambiguous=False, maybe_placeholders=False): + self.tree_class = tree_class + self.propagate_positions = propagate_positions + self.ambiguous = ambiguous + self.maybe_placeholders = maybe_placeholders + + self.rule_builders = list(self._init_builders(rules)) + + def _init_builders(self, rules): + propagate_positions = make_propagate_positions(self.propagate_positions) + + for rule in rules: + options = rule.options + keep_all_tokens = options.keep_all_tokens + expand_single_child = options.expand1 + + wrapper_chain = list(filter(None, [ + (expand_single_child and not rule.alias) and ExpandSingleChild, + maybe_create_child_filter(rule.expansion, keep_all_tokens, self.ambiguous, options.empty_indices if self.maybe_placeholders else None), + propagate_positions, + self.ambiguous and maybe_create_ambiguous_expander(self.tree_class, rule.expansion, keep_all_tokens), + self.ambiguous and partial(AmbiguousIntermediateExpander, self.tree_class) + ])) + + yield rule, wrapper_chain + + def create_callback(self, transformer=None): + callbacks = {} + + default_handler = getattr(transformer, '__default__', None) + if default_handler: + def default_callback(data, children): + return default_handler(data, children, None) + else: + default_callback = self.tree_class + + for rule, wrapper_chain in self.rule_builders: + + user_callback_name = rule.alias or rule.options.template_source or rule.origin.name + try: + f = getattr(transformer, user_callback_name) + wrapper = getattr(f, 'visit_wrapper', None) + if wrapper is not None: + f = apply_visit_wrapper(f, user_callback_name, wrapper) + elif isinstance(transformer, Transformer_InPlace): + f = inplace_transformer(f) + except AttributeError: + f = partial(default_callback, user_callback_name) + + for w in wrapper_chain: + f = w(f) + + if rule in callbacks: + raise GrammarError("Rule '%s' already exists" % (rule,)) + + callbacks[rule] = f + + return callbacks + +###} diff --git a/vendor/lark/lark/parser_frontends.py b/vendor/lark/lark/parser_frontends.py new file mode 100644 index 00000000..e162edfa --- /dev/null +++ b/vendor/lark/lark/parser_frontends.py @@ -0,0 +1,245 @@ +from typing import Any, Callable, Dict, Tuple + +from .exceptions import ConfigurationError, GrammarError, assert_config +from .utils import get_regexp_width, Serialize +from .parsers.grammar_analysis import GrammarAnalyzer +from .lexer import LexerThread, BasicLexer, ContextualLexer, Lexer +from .parsers import earley, xearley, cyk +from .parsers.lalr_parser import LALR_Parser +from .tree import Tree +from .common import LexerConf, ParserConf, _ParserArgType, _LexerArgType + +###{standalone + +def _wrap_lexer(lexer_class): + future_interface = getattr(lexer_class, '__future_interface__', False) + if future_interface: + return lexer_class + else: + class CustomLexerWrapper(Lexer): + def __init__(self, lexer_conf): + self.lexer = lexer_class(lexer_conf) + def lex(self, lexer_state, parser_state): + return self.lexer.lex(lexer_state.text) + return CustomLexerWrapper + + +def _deserialize_parsing_frontend(data, memo, lexer_conf, callbacks, options): + parser_conf = ParserConf.deserialize(data['parser_conf'], memo) + cls = (options and options._plugins.get('LALR_Parser')) or LALR_Parser + parser = cls.deserialize(data['parser'], memo, callbacks, options.debug) + parser_conf.callbacks = callbacks + return ParsingFrontend(lexer_conf, parser_conf, options, parser=parser) + + +_parser_creators: 'Dict[str, Callable[[LexerConf, Any, Any], Any]]' = {} + + +class ParsingFrontend(Serialize): + __serialize_fields__ = 'lexer_conf', 'parser_conf', 'parser' + + def __init__(self, lexer_conf, parser_conf, options, parser=None): + self.parser_conf = parser_conf + self.lexer_conf = lexer_conf + self.options = options + + # Set-up parser + if parser: # From cache + self.parser = parser + else: + create_parser = _parser_creators.get(parser_conf.parser_type) + assert create_parser is not None, "{} is not supported in standalone mode".format( + parser_conf.parser_type + ) + self.parser = create_parser(lexer_conf, parser_conf, options) + + # Set-up lexer + lexer_type = lexer_conf.lexer_type + self.skip_lexer = False + if lexer_type in ('dynamic', 'dynamic_complete'): + assert lexer_conf.postlex is None + self.skip_lexer = True + return + + try: + create_lexer = { + 'basic': create_basic_lexer, + 'contextual': create_contextual_lexer, + }[lexer_type] + except KeyError: + assert issubclass(lexer_type, Lexer), lexer_type + self.lexer = _wrap_lexer(lexer_type)(lexer_conf) + else: + self.lexer = create_lexer(lexer_conf, self.parser, lexer_conf.postlex, options) + + if lexer_conf.postlex: + self.lexer = PostLexConnector(self.lexer, lexer_conf.postlex) + + def _verify_start(self, start=None): + if start is None: + start_decls = self.parser_conf.start + if len(start_decls) > 1: + raise ConfigurationError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start_decls) + start ,= start_decls + elif start not in self.parser_conf.start: + raise ConfigurationError("Unknown start rule %s. Must be one of %r" % (start, self.parser_conf.start)) + return start + + def _make_lexer_thread(self, text): + cls = (self.options and self.options._plugins.get('LexerThread')) or LexerThread + return text if self.skip_lexer else cls.from_text(self.lexer, text) + + def parse(self, text, start=None, on_error=None): + chosen_start = self._verify_start(start) + kw = {} if on_error is None else {'on_error': on_error} + stream = self._make_lexer_thread(text) + return self.parser.parse(stream, chosen_start, **kw) + + def parse_interactive(self, text=None, start=None): + chosen_start = self._verify_start(start) + if self.parser_conf.parser_type != 'lalr': + raise ConfigurationError("parse_interactive() currently only works with parser='lalr' ") + stream = self._make_lexer_thread(text) + return self.parser.parse_interactive(stream, chosen_start) + + +def _validate_frontend_args(parser, lexer) -> None: + assert_config(parser, ('lalr', 'earley', 'cyk')) + if not isinstance(lexer, type): # not custom lexer? + expected = { + 'lalr': ('basic', 'contextual'), + 'earley': ('basic', 'dynamic', 'dynamic_complete'), + 'cyk': ('basic', ), + }[parser] + assert_config(lexer, expected, 'Parser %r does not support lexer %%r, expected one of %%s' % parser) + + +def _get_lexer_callbacks(transformer, terminals): + result = {} + for terminal in terminals: + callback = getattr(transformer, terminal.name, None) + if callback is not None: + result[terminal.name] = callback + return result + +class PostLexConnector: + def __init__(self, lexer, postlexer): + self.lexer = lexer + self.postlexer = postlexer + + def lex(self, lexer_state, parser_state): + i = self.lexer.lex(lexer_state, parser_state) + return self.postlexer.process(i) + + + +def create_basic_lexer(lexer_conf, parser, postlex, options): + cls = (options and options._plugins.get('BasicLexer')) or BasicLexer + return cls(lexer_conf) + +def create_contextual_lexer(lexer_conf, parser, postlex, options): + cls = (options and options._plugins.get('ContextualLexer')) or ContextualLexer + states = {idx:list(t.keys()) for idx, t in parser._parse_table.states.items()} + always_accept = postlex.always_accept if postlex else () + return cls(lexer_conf, states, always_accept=always_accept) + +def create_lalr_parser(lexer_conf, parser_conf, options=None): + debug = options.debug if options else False + cls = (options and options._plugins.get('LALR_Parser')) or LALR_Parser + return cls(parser_conf, debug=debug) + +_parser_creators['lalr'] = create_lalr_parser + +###} + +class EarleyRegexpMatcher: + def __init__(self, lexer_conf): + self.regexps = {} + for t in lexer_conf.terminals: + regexp = t.pattern.to_regexp() + try: + width = get_regexp_width(regexp)[0] + except ValueError: + raise GrammarError("Bad regexp in token %s: %s" % (t.name, regexp)) + else: + if width == 0: + raise GrammarError("Dynamic Earley doesn't allow zero-width regexps", t) + if lexer_conf.use_bytes: + regexp = regexp.encode('utf-8') + + self.regexps[t.name] = lexer_conf.re_module.compile(regexp, lexer_conf.g_regex_flags) + + def match(self, term, text, index=0): + return self.regexps[term.name].match(text, index) + + +def create_earley_parser__dynamic(lexer_conf, parser_conf, options=None, **kw): + if lexer_conf.callbacks: + raise GrammarError("Earley's dynamic lexer doesn't support lexer_callbacks.") + + earley_matcher = EarleyRegexpMatcher(lexer_conf) + return xearley.Parser(lexer_conf, parser_conf, earley_matcher.match, **kw) + +def _match_earley_basic(term, token): + return term.name == token.type + +def create_earley_parser__basic(lexer_conf, parser_conf, options, **kw): + return earley.Parser(lexer_conf, parser_conf, _match_earley_basic, **kw) + +def create_earley_parser(lexer_conf, parser_conf, options): + resolve_ambiguity = options.ambiguity == 'resolve' + debug = options.debug if options else False + tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None + + extra = {} + if lexer_conf.lexer_type == 'dynamic': + f = create_earley_parser__dynamic + elif lexer_conf.lexer_type == 'dynamic_complete': + extra['complete_lex'] =True + f = create_earley_parser__dynamic + else: + f = create_earley_parser__basic + + return f(lexer_conf, parser_conf, options, resolve_ambiguity=resolve_ambiguity, debug=debug, tree_class=tree_class, **extra) + + + +class CYK_FrontEnd: + def __init__(self, lexer_conf, parser_conf, options=None): + self._analysis = GrammarAnalyzer(parser_conf) + self.parser = cyk.Parser(parser_conf.rules) + + self.callbacks = parser_conf.callbacks + + def parse(self, lexer_thread, start): + tokens = list(lexer_thread.lex(None)) + tree = self.parser.parse(tokens, start) + return self._transform(tree) + + def _transform(self, tree): + subtrees = list(tree.iter_subtrees()) + for subtree in subtrees: + subtree.children = [self._apply_callback(c) if isinstance(c, Tree) else c for c in subtree.children] + + return self._apply_callback(tree) + + def _apply_callback(self, tree): + return self.callbacks[tree.rule](tree.children) + + +_parser_creators['earley'] = create_earley_parser +_parser_creators['cyk'] = CYK_FrontEnd + + +def _construct_parsing_frontend( + parser_type: _ParserArgType, + lexer_type: _LexerArgType, + lexer_conf, + parser_conf, + options +): + assert isinstance(lexer_conf, LexerConf) + assert isinstance(parser_conf, ParserConf) + parser_conf.parser_type = parser_type + lexer_conf.lexer_type = lexer_type + return ParsingFrontend(lexer_conf, parser_conf, options) diff --git a/vendor/poetry-core/poetry/core/packages/utils/__init__.py b/vendor/lark/lark/parsers/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/packages/utils/__init__.py rename to vendor/lark/lark/parsers/__init__.py diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/cyk.py b/vendor/lark/lark/parsers/cyk.py similarity index 99% rename from vendor/poetry-core/poetry/core/_vendor/lark/parsers/cyk.py rename to vendor/lark/lark/parsers/cyk.py index ff0924f2..82818ccf 100644 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/cyk.py +++ b/vendor/lark/lark/parsers/cyk.py @@ -23,7 +23,7 @@ def match(t, s): return t.name == s.type -class Rule(object): +class Rule: """Context-free grammar rule.""" def __init__(self, lhs, rhs, weight, alias): @@ -51,7 +51,7 @@ def __ne__(self, other): return not (self == other) -class Grammar(object): +class Grammar: """Context-free grammar.""" def __init__(self, rules): @@ -68,7 +68,7 @@ def __repr__(self): # Parse tree data structures -class RuleNode(object): +class RuleNode: """A node in the parse tree, which also contains the full rhs rule.""" def __init__(self, rule, children, weight=0): @@ -81,7 +81,7 @@ def __repr__(self): -class Parser(object): +class Parser: """Parser wrapper.""" def __init__(self, rules): @@ -186,7 +186,7 @@ def _parse(s, g): # * Empty rules (epsilon rules) -class CnfWrapper(object): +class CnfWrapper: """CNF wrapper for grammar. Validates that the input grammar is CNF and provides helper data structures. diff --git a/vendor/lark/lark/parsers/earley.py b/vendor/lark/lark/parsers/earley.py new file mode 100644 index 00000000..044f7b05 --- /dev/null +++ b/vendor/lark/lark/parsers/earley.py @@ -0,0 +1,295 @@ +"""This module implements an Earley parser. + +The core Earley algorithm used here is based on Elizabeth Scott's implementation, here: + https://www.sciencedirect.com/science/article/pii/S1571066108001497 + +That is probably the best reference for understanding the algorithm here. + +The Earley parser outputs an SPPF-tree as per that document. The SPPF tree format +is explained here: https://lark-parser.readthedocs.io/en/latest/_static/sppf/sppf.html +""" + +from collections import deque + +from ..lexer import Token +from ..tree import Tree +from ..exceptions import UnexpectedEOF, UnexpectedToken +from ..utils import logger +from .grammar_analysis import GrammarAnalyzer +from ..grammar import NonTerminal +from .earley_common import Item +from .earley_forest import ForestSumVisitor, SymbolNode, TokenNode, ForestToParseTree + +class Parser: + def __init__(self, lexer_conf, parser_conf, term_matcher, resolve_ambiguity=True, debug=False, tree_class=Tree): + analysis = GrammarAnalyzer(parser_conf) + self.lexer_conf = lexer_conf + self.parser_conf = parser_conf + self.resolve_ambiguity = resolve_ambiguity + self.debug = debug + self.tree_class = tree_class + + self.FIRST = analysis.FIRST + self.NULLABLE = analysis.NULLABLE + self.callbacks = parser_conf.callbacks + self.predictions = {} + + ## These could be moved to the grammar analyzer. Pre-computing these is *much* faster than + # the slow 'isupper' in is_terminal. + self.TERMINALS = { sym for r in parser_conf.rules for sym in r.expansion if sym.is_term } + self.NON_TERMINALS = { sym for r in parser_conf.rules for sym in r.expansion if not sym.is_term } + + self.forest_sum_visitor = None + for rule in parser_conf.rules: + if rule.origin not in self.predictions: + self.predictions[rule.origin] = [x.rule for x in analysis.expand_rule(rule.origin)] + + ## Detect if any rules/terminals have priorities set. If the user specified priority = None, then + # the priorities will be stripped from all rules/terminals before they reach us, allowing us to + # skip the extra tree walk. We'll also skip this if the user just didn't specify priorities + # on any rules/terminals. + if self.forest_sum_visitor is None and rule.options.priority is not None: + self.forest_sum_visitor = ForestSumVisitor + + # Check terminals for priorities + # Ignore terminal priorities if the basic lexer is used + if self.lexer_conf.lexer_type != 'basic' and self.forest_sum_visitor is None: + for term in self.lexer_conf.terminals: + if term.priority: + self.forest_sum_visitor = ForestSumVisitor + break + + self.term_matcher = term_matcher + + + def predict_and_complete(self, i, to_scan, columns, transitives): + """The core Earley Predictor and Completer. + + At each stage of the input, we handling any completed items (things + that matched on the last cycle) and use those to predict what should + come next in the input stream. The completions and any predicted + non-terminals are recursively processed until we reach a set of, + which can be added to the scan list for the next scanner cycle.""" + # Held Completions (H in E.Scotts paper). + node_cache = {} + held_completions = {} + + column = columns[i] + # R (items) = Ei (column.items) + items = deque(column) + while items: + item = items.pop() # remove an element, A say, from R + + ### The Earley completer + if item.is_complete: ### (item.s == string) + if item.node is None: + label = (item.s, item.start, i) + item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) + item.node.add_family(item.s, item.rule, item.start, None, None) + + # create_leo_transitives(item.rule.origin, item.start) + + ###R Joop Leo right recursion Completer + if item.rule.origin in transitives[item.start]: + transitive = transitives[item.start][item.s] + if transitive.previous in transitives[transitive.column]: + root_transitive = transitives[transitive.column][transitive.previous] + else: + root_transitive = transitive + + new_item = Item(transitive.rule, transitive.ptr, transitive.start) + label = (root_transitive.s, root_transitive.start, i) + new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) + new_item.node.add_path(root_transitive, item.node) + if new_item.expect in self.TERMINALS: + # Add (B :: aC.B, h, y) to Q + to_scan.add(new_item) + elif new_item not in column: + # Add (B :: aC.B, h, y) to Ei and R + column.add(new_item) + items.append(new_item) + ###R Regular Earley completer + else: + # Empty has 0 length. If we complete an empty symbol in a particular + # parse step, we need to be able to use that same empty symbol to complete + # any predictions that result, that themselves require empty. Avoids + # infinite recursion on empty symbols. + # held_completions is 'H' in E.Scott's paper. + is_empty_item = item.start == i + if is_empty_item: + held_completions[item.rule.origin] = item.node + + originators = [originator for originator in columns[item.start] if originator.expect is not None and originator.expect == item.s] + for originator in originators: + new_item = originator.advance() + label = (new_item.s, originator.start, i) + new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) + new_item.node.add_family(new_item.s, new_item.rule, i, originator.node, item.node) + if new_item.expect in self.TERMINALS: + # Add (B :: aC.B, h, y) to Q + to_scan.add(new_item) + elif new_item not in column: + # Add (B :: aC.B, h, y) to Ei and R + column.add(new_item) + items.append(new_item) + + ### The Earley predictor + elif item.expect in self.NON_TERMINALS: ### (item.s == lr0) + new_items = [] + for rule in self.predictions[item.expect]: + new_item = Item(rule, 0, i) + new_items.append(new_item) + + # Process any held completions (H). + if item.expect in held_completions: + new_item = item.advance() + label = (new_item.s, item.start, i) + new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) + new_item.node.add_family(new_item.s, new_item.rule, new_item.start, item.node, held_completions[item.expect]) + new_items.append(new_item) + + for new_item in new_items: + if new_item.expect in self.TERMINALS: + to_scan.add(new_item) + elif new_item not in column: + column.add(new_item) + items.append(new_item) + + def _parse(self, lexer, columns, to_scan, start_symbol=None): + def is_quasi_complete(item): + if item.is_complete: + return True + + quasi = item.advance() + while not quasi.is_complete: + if quasi.expect not in self.NULLABLE: + return False + if quasi.rule.origin == start_symbol and quasi.expect == start_symbol: + return False + quasi = quasi.advance() + return True + + # def create_leo_transitives(origin, start): + # ... # removed at commit 4c1cfb2faf24e8f8bff7112627a00b94d261b420 + + def scan(i, token, to_scan): + """The core Earley Scanner. + + This is a custom implementation of the scanner that uses the + Lark lexer to match tokens. The scan list is built by the + Earley predictor, based on the previously completed tokens. + This ensures that at each phase of the parse we have a custom + lexer context, allowing for more complex ambiguities.""" + next_to_scan = set() + next_set = set() + columns.append(next_set) + transitives.append({}) + node_cache = {} + + for item in set(to_scan): + if match(item.expect, token): + new_item = item.advance() + label = (new_item.s, new_item.start, i) + # 'terminals' may not contain token.type when using %declare + # Additionally, token is not always a Token + # For example, it can be a Tree when using TreeMatcher + term = terminals.get(token.type) if isinstance(token, Token) else None + # Set the priority of the token node to 0 so that the + # terminal priorities do not affect the Tree chosen by + # ForestSumVisitor after the basic lexer has already + # "used up" the terminal priorities + token_node = TokenNode(token, term, priority=0) + new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) + new_item.node.add_family(new_item.s, item.rule, new_item.start, item.node, token_node) + + if new_item.expect in self.TERMINALS: + # add (B ::= Aai+1.B, h, y) to Q' + next_to_scan.add(new_item) + else: + # add (B ::= Aa+1.B, h, y) to Ei+1 + next_set.add(new_item) + + if not next_set and not next_to_scan: + expect = {i.expect.name for i in to_scan} + raise UnexpectedToken(token, expect, considered_rules=set(to_scan), state=frozenset(i.s for i in to_scan)) + + return next_to_scan + + + # Define parser functions + match = self.term_matcher + + terminals = self.lexer_conf.terminals_by_name + + # Cache for nodes & tokens created in a particular parse step. + transitives = [{}] + + ## The main Earley loop. + # Run the Prediction/Completion cycle for any Items in the current Earley set. + # Completions will be added to the SPPF tree, and predictions will be recursively + # processed down to terminals/empty nodes to be added to the scanner for the next + # step. + expects = {i.expect for i in to_scan} + i = 0 + for token in lexer.lex(expects): + self.predict_and_complete(i, to_scan, columns, transitives) + + to_scan = scan(i, token, to_scan) + i += 1 + + expects.clear() + expects |= {i.expect for i in to_scan} + + self.predict_and_complete(i, to_scan, columns, transitives) + + ## Column is now the final column in the parse. + assert i == len(columns)-1 + return to_scan + + def parse(self, lexer, start): + assert start, start + start_symbol = NonTerminal(start) + + columns = [set()] + to_scan = set() # The scan buffer. 'Q' in E.Scott's paper. + + ## Predict for the start_symbol. + # Add predicted items to the first Earley set (for the predictor) if they + # result in a non-terminal, or the scanner if they result in a terminal. + for rule in self.predictions[start_symbol]: + item = Item(rule, 0, 0) + if item.expect in self.TERMINALS: + to_scan.add(item) + else: + columns[0].add(item) + + to_scan = self._parse(lexer, columns, to_scan, start_symbol) + + # If the parse was successful, the start + # symbol should have been completed in the last step of the Earley cycle, and will be in + # this column. Find the item for the start_symbol, which is the root of the SPPF tree. + solutions = [n.node for n in columns[-1] if n.is_complete and n.node is not None and n.s == start_symbol and n.start == 0] + if not solutions: + expected_terminals = [t.expect.name for t in to_scan] + raise UnexpectedEOF(expected_terminals, state=frozenset(i.s for i in to_scan)) + + if self.debug: + from .earley_forest import ForestToPyDotVisitor + try: + debug_walker = ForestToPyDotVisitor() + except ImportError: + logger.warning("Cannot find dependency 'pydot', will not generate sppf debug image") + else: + debug_walker.visit(solutions[0], "sppf.png") + + + if len(solutions) > 1: + assert False, 'Earley should not generate multiple start symbol items!' + + if self.tree_class is not None: + # Perform our SPPF -> AST conversion + transformer = ForestToParseTree(self.tree_class, self.callbacks, self.forest_sum_visitor and self.forest_sum_visitor(), self.resolve_ambiguity) + return transformer.transform(solutions[0]) + + # return the root of the SPPF + return solutions[0] diff --git a/vendor/lark/lark/parsers/earley_common.py b/vendor/lark/lark/parsers/earley_common.py new file mode 100644 index 00000000..844ff108 --- /dev/null +++ b/vendor/lark/lark/parsers/earley_common.py @@ -0,0 +1,42 @@ +"""This module implements useful building blocks for the Earley parser +""" + + +class Item: + "An Earley Item, the atom of the algorithm." + + __slots__ = ('s', 'rule', 'ptr', 'start', 'is_complete', 'expect', 'previous', 'node', '_hash') + def __init__(self, rule, ptr, start): + self.is_complete = len(rule.expansion) == ptr + self.rule = rule # rule + self.ptr = ptr # ptr + self.start = start # j + self.node = None # w + if self.is_complete: + self.s = rule.origin + self.expect = None + self.previous = rule.expansion[ptr - 1] if ptr > 0 and len(rule.expansion) else None + else: + self.s = (rule, ptr) + self.expect = rule.expansion[ptr] + self.previous = rule.expansion[ptr - 1] if ptr > 0 and len(rule.expansion) else None + self._hash = hash((self.s, self.start)) + + def advance(self): + return Item(self.rule, self.ptr + 1, self.start) + + def __eq__(self, other): + return self is other or (self.s == other.s and self.start == other.start) + + def __hash__(self): + return self._hash + + def __repr__(self): + before = ( expansion.name for expansion in self.rule.expansion[:self.ptr] ) + after = ( expansion.name for expansion in self.rule.expansion[self.ptr:] ) + symbol = "{} ::= {}* {}".format(self.rule.origin.name, ' '.join(before), ' '.join(after)) + return '%s (%d)' % (symbol, self.start) + + +# class TransitiveItem(Item): +# ... # removed at commit 4c1cfb2faf24e8f8bff7112627a00b94d261b420 \ No newline at end of file diff --git a/vendor/lark/lark/parsers/earley_forest.py b/vendor/lark/lark/parsers/earley_forest.py new file mode 100644 index 00000000..2d602a39 --- /dev/null +++ b/vendor/lark/lark/parsers/earley_forest.py @@ -0,0 +1,804 @@ +""""This module implements an SPPF implementation + +This is used as the primary output mechanism for the Earley parser +in order to store complex ambiguities. + +Full reference and more details is here: +http://www.bramvandersanden.com/post/2014/06/shared-packed-parse-forest/ +""" + +from random import randint +from collections import deque +from operator import attrgetter +from importlib import import_module +from functools import partial + +from ..parse_tree_builder import AmbiguousIntermediateExpander +from ..visitors import Discard +from ..lexer import Token +from ..utils import logger +from ..tree import Tree + +class ForestNode: + pass + +class SymbolNode(ForestNode): + """ + A Symbol Node represents a symbol (or Intermediate LR0). + + Symbol nodes are keyed by the symbol (s). For intermediate nodes + s will be an LR0, stored as a tuple of (rule, ptr). For completed symbol + nodes, s will be a string representing the non-terminal origin (i.e. + the left hand side of the rule). + + The children of a Symbol or Intermediate Node will always be Packed Nodes; + with each Packed Node child representing a single derivation of a production. + + Hence a Symbol Node with a single child is unambiguous. + + Parameters: + s: A Symbol, or a tuple of (rule, ptr) for an intermediate node. + start: The index of the start of the substring matched by this symbol (inclusive). + end: The index of the end of the substring matched by this symbol (exclusive). + + Properties: + is_intermediate: True if this node is an intermediate node. + priority: The priority of the node's symbol. + """ + __slots__ = ('s', 'start', 'end', '_children', 'paths', 'paths_loaded', 'priority', 'is_intermediate', '_hash') + def __init__(self, s, start, end): + self.s = s + self.start = start + self.end = end + self._children = set() + self.paths = set() + self.paths_loaded = False + + ### We use inf here as it can be safely negated without resorting to conditionals, + # unlike None or float('NaN'), and sorts appropriately. + self.priority = float('-inf') + self.is_intermediate = isinstance(s, tuple) + self._hash = hash((self.s, self.start, self.end)) + + def add_family(self, lr0, rule, start, left, right): + self._children.add(PackedNode(self, lr0, rule, start, left, right)) + + def add_path(self, transitive, node): + self.paths.add((transitive, node)) + + def load_paths(self): + for transitive, node in self.paths: + if transitive.next_titem is not None: + vn = SymbolNode(transitive.next_titem.s, transitive.next_titem.start, self.end) + vn.add_path(transitive.next_titem, node) + self.add_family(transitive.reduction.rule.origin, transitive.reduction.rule, transitive.reduction.start, transitive.reduction.node, vn) + else: + self.add_family(transitive.reduction.rule.origin, transitive.reduction.rule, transitive.reduction.start, transitive.reduction.node, node) + self.paths_loaded = True + + @property + def is_ambiguous(self): + """Returns True if this node is ambiguous.""" + return len(self.children) > 1 + + @property + def children(self): + """Returns a list of this node's children sorted from greatest to + least priority.""" + if not self.paths_loaded: self.load_paths() + return sorted(self._children, key=attrgetter('sort_key')) + + def __iter__(self): + return iter(self._children) + + def __eq__(self, other): + if not isinstance(other, SymbolNode): + return False + return self is other or (type(self.s) == type(other.s) and self.s == other.s and self.start == other.start and self.end is other.end) + + def __hash__(self): + return self._hash + + def __repr__(self): + if self.is_intermediate: + rule = self.s[0] + ptr = self.s[1] + before = ( expansion.name for expansion in rule.expansion[:ptr] ) + after = ( expansion.name for expansion in rule.expansion[ptr:] ) + symbol = "{} ::= {}* {}".format(rule.origin.name, ' '.join(before), ' '.join(after)) + else: + symbol = self.s.name + return "({}, {}, {}, {})".format(symbol, self.start, self.end, self.priority) + +class PackedNode(ForestNode): + """ + A Packed Node represents a single derivation in a symbol node. + + Parameters: + rule: The rule associated with this node. + parent: The parent of this node. + left: The left child of this node. ``None`` if one does not exist. + right: The right child of this node. ``None`` if one does not exist. + priority: The priority of this node. + """ + __slots__ = ('parent', 's', 'rule', 'start', 'left', 'right', 'priority', '_hash') + def __init__(self, parent, s, rule, start, left, right): + self.parent = parent + self.s = s + self.start = start + self.rule = rule + self.left = left + self.right = right + self.priority = float('-inf') + self._hash = hash((self.left, self.right)) + + @property + def is_empty(self): + return self.left is None and self.right is None + + @property + def sort_key(self): + """ + Used to sort PackedNode children of SymbolNodes. + A SymbolNode has multiple PackedNodes if it matched + ambiguously. Hence, we use the sort order to identify + the order in which ambiguous children should be considered. + """ + return self.is_empty, -self.priority, self.rule.order + + @property + def children(self): + """Returns a list of this node's children.""" + return [x for x in [self.left, self.right] if x is not None] + + def __iter__(self): + yield self.left + yield self.right + + def __eq__(self, other): + if not isinstance(other, PackedNode): + return False + return self is other or (self.left == other.left and self.right == other.right) + + def __hash__(self): + return self._hash + + def __repr__(self): + if isinstance(self.s, tuple): + rule = self.s[0] + ptr = self.s[1] + before = ( expansion.name for expansion in rule.expansion[:ptr] ) + after = ( expansion.name for expansion in rule.expansion[ptr:] ) + symbol = "{} ::= {}* {}".format(rule.origin.name, ' '.join(before), ' '.join(after)) + else: + symbol = self.s.name + return "({}, {}, {}, {})".format(symbol, self.start, self.priority, self.rule.order) + +class TokenNode(ForestNode): + """ + A Token Node represents a matched terminal and is always a leaf node. + + Parameters: + token: The Token associated with this node. + term: The TerminalDef matched by the token. + priority: The priority of this node. + """ + __slots__ = ('token', 'term', 'priority', '_hash') + def __init__(self, token, term, priority=None): + self.token = token + self.term = term + if priority is not None: + self.priority = priority + else: + self.priority = term.priority if term is not None else 0 + self._hash = hash(token) + + def __eq__(self, other): + if not isinstance(other, TokenNode): + return False + return self is other or (self.token == other.token) + + def __hash__(self): + return self._hash + + def __repr__(self): + return repr(self.token) + +class ForestVisitor: + """ + An abstract base class for building forest visitors. + + This class performs a controllable depth-first walk of an SPPF. + The visitor will not enter cycles and will backtrack if one is encountered. + Subclasses are notified of cycles through the ``on_cycle`` method. + + Behavior for visit events is defined by overriding the + ``visit*node*`` functions. + + The walk is controlled by the return values of the ``visit*node_in`` + methods. Returning a node(s) will schedule them to be visited. The visitor + will begin to backtrack if no nodes are returned. + + Parameters: + single_visit: If ``True``, non-Token nodes will only be visited once. + """ + + def __init__(self, single_visit=False): + self.single_visit = single_visit + + def visit_token_node(self, node): + """Called when a ``Token`` is visited. ``Token`` nodes are always leaves.""" + pass + + def visit_symbol_node_in(self, node): + """Called when a symbol node is visited. Nodes that are returned + will be scheduled to be visited. If ``visit_intermediate_node_in`` + is not implemented, this function will be called for intermediate + nodes as well.""" + pass + + def visit_symbol_node_out(self, node): + """Called after all nodes returned from a corresponding ``visit_symbol_node_in`` + call have been visited. If ``visit_intermediate_node_out`` + is not implemented, this function will be called for intermediate + nodes as well.""" + pass + + def visit_packed_node_in(self, node): + """Called when a packed node is visited. Nodes that are returned + will be scheduled to be visited. """ + pass + + def visit_packed_node_out(self, node): + """Called after all nodes returned from a corresponding ``visit_packed_node_in`` + call have been visited.""" + pass + + def on_cycle(self, node, path): + """Called when a cycle is encountered. + + Parameters: + node: The node that causes a cycle. + path: The list of nodes being visited: nodes that have been + entered but not exited. The first element is the root in a forest + visit, and the last element is the node visited most recently. + ``path`` should be treated as read-only. + """ + pass + + def get_cycle_in_path(self, node, path): + """A utility function for use in ``on_cycle`` to obtain a slice of + ``path`` that only contains the nodes that make up the cycle.""" + index = len(path) - 1 + while id(path[index]) != id(node): + index -= 1 + return path[index:] + + def visit(self, root): + # Visiting is a list of IDs of all symbol/intermediate nodes currently in + # the stack. It serves two purposes: to detect when we 'recurse' in and out + # of a symbol/intermediate so that we can process both up and down. Also, + # since the SPPF can have cycles it allows us to detect if we're trying + # to recurse into a node that's already on the stack (infinite recursion). + visiting = set() + + # set of all nodes that have been visited + visited = set() + + # a list of nodes that are currently being visited + # used for the `on_cycle` callback + path = [] + + # We do not use recursion here to walk the Forest due to the limited + # stack size in python. Therefore input_stack is essentially our stack. + input_stack = deque([root]) + + # It is much faster to cache these as locals since they are called + # many times in large parses. + vpno = getattr(self, 'visit_packed_node_out') + vpni = getattr(self, 'visit_packed_node_in') + vsno = getattr(self, 'visit_symbol_node_out') + vsni = getattr(self, 'visit_symbol_node_in') + vino = getattr(self, 'visit_intermediate_node_out', vsno) + vini = getattr(self, 'visit_intermediate_node_in', vsni) + vtn = getattr(self, 'visit_token_node') + oc = getattr(self, 'on_cycle') + + while input_stack: + current = next(reversed(input_stack)) + try: + next_node = next(current) + except StopIteration: + input_stack.pop() + continue + except TypeError: + ### If the current object is not an iterator, pass through to Token/SymbolNode + pass + else: + if next_node is None: + continue + + if id(next_node) in visiting: + oc(next_node, path) + continue + + input_stack.append(next_node) + continue + + if isinstance(current, TokenNode): + vtn(current.token) + input_stack.pop() + continue + + current_id = id(current) + if current_id in visiting: + if isinstance(current, PackedNode): + vpno(current) + elif current.is_intermediate: + vino(current) + else: + vsno(current) + input_stack.pop() + path.pop() + visiting.remove(current_id) + visited.add(current_id) + elif self.single_visit and current_id in visited: + input_stack.pop() + else: + visiting.add(current_id) + path.append(current) + if isinstance(current, PackedNode): + next_node = vpni(current) + elif current.is_intermediate: + next_node = vini(current) + else: + next_node = vsni(current) + if next_node is None: + continue + + if not isinstance(next_node, ForestNode): + next_node = iter(next_node) + elif id(next_node) in visiting: + oc(next_node, path) + continue + + input_stack.append(next_node) + +class ForestTransformer(ForestVisitor): + """The base class for a bottom-up forest transformation. Most users will + want to use ``TreeForestTransformer`` instead as it has a friendlier + interface and covers most use cases. + + Transformations are applied via inheritance and overriding of the + ``transform*node`` methods. + + ``transform_token_node`` receives a ``Token`` as an argument. + All other methods receive the node that is being transformed and + a list of the results of the transformations of that node's children. + The return value of these methods are the resulting transformations. + + If ``Discard`` is raised in a node's transformation, no data from that node + will be passed to its parent's transformation. + """ + + def __init__(self): + super(ForestTransformer, self).__init__() + # results of transformations + self.data = dict() + # used to track parent nodes + self.node_stack = deque() + + def transform(self, root): + """Perform a transformation on an SPPF.""" + self.node_stack.append('result') + self.data['result'] = [] + self.visit(root) + assert len(self.data['result']) <= 1 + if self.data['result']: + return self.data['result'][0] + + def transform_symbol_node(self, node, data): + """Transform a symbol node.""" + return node + + def transform_intermediate_node(self, node, data): + """Transform an intermediate node.""" + return node + + def transform_packed_node(self, node, data): + """Transform a packed node.""" + return node + + def transform_token_node(self, node): + """Transform a ``Token``.""" + return node + + def visit_symbol_node_in(self, node): + self.node_stack.append(id(node)) + self.data[id(node)] = [] + return node.children + + def visit_packed_node_in(self, node): + self.node_stack.append(id(node)) + self.data[id(node)] = [] + return node.children + + def visit_token_node(self, node): + transformed = self.transform_token_node(node) + if transformed is not Discard: + self.data[self.node_stack[-1]].append(transformed) + + def _visit_node_out_helper(self, node, method): + self.node_stack.pop() + transformed = method(node, self.data[id(node)]) + if transformed is not Discard: + self.data[self.node_stack[-1]].append(transformed) + del self.data[id(node)] + + def visit_symbol_node_out(self, node): + self._visit_node_out_helper(node, self.transform_symbol_node) + + def visit_intermediate_node_out(self, node): + self._visit_node_out_helper(node, self.transform_intermediate_node) + + def visit_packed_node_out(self, node): + self._visit_node_out_helper(node, self.transform_packed_node) + + +class ForestSumVisitor(ForestVisitor): + """ + A visitor for prioritizing ambiguous parts of the Forest. + + This visitor is used when support for explicit priorities on + rules is requested (whether normal, or invert). It walks the + forest (or subsets thereof) and cascades properties upwards + from the leaves. + + It would be ideal to do this during parsing, however this would + require processing each Earley item multiple times. That's + a big performance drawback; so running a forest walk is the + lesser of two evils: there can be significantly more Earley + items created during parsing than there are SPPF nodes in the + final tree. + """ + def __init__(self): + super(ForestSumVisitor, self).__init__(single_visit=True) + + def visit_packed_node_in(self, node): + yield node.left + yield node.right + + def visit_symbol_node_in(self, node): + return iter(node.children) + + def visit_packed_node_out(self, node): + priority = node.rule.options.priority if not node.parent.is_intermediate and node.rule.options.priority else 0 + priority += getattr(node.right, 'priority', 0) + priority += getattr(node.left, 'priority', 0) + node.priority = priority + + def visit_symbol_node_out(self, node): + node.priority = max(child.priority for child in node.children) + +class PackedData(): + """Used in transformationss of packed nodes to distinguish the data + that comes from the left child and the right child. + """ + + class _NoData(): + pass + + NO_DATA = _NoData() + + def __init__(self, node, data): + self.left = self.NO_DATA + self.right = self.NO_DATA + if data: + if node.left is not None: + self.left = data[0] + if len(data) > 1: + self.right = data[1] + else: + self.right = data[0] + +class ForestToParseTree(ForestTransformer): + """Used by the earley parser when ambiguity equals 'resolve' or + 'explicit'. Transforms an SPPF into an (ambiguous) parse tree. + + Parameters: + tree_class: The tree class to use for construction + callbacks: A dictionary of rules to functions that output a tree + prioritizer: A ``ForestVisitor`` that manipulates the priorities of ForestNodes + resolve_ambiguity: If True, ambiguities will be resolved based on + priorities. Otherwise, `_ambig` nodes will be in the resulting tree. + use_cache: If True, the results of packed node transformations will be cached. + """ + + def __init__(self, tree_class=Tree, callbacks=dict(), prioritizer=ForestSumVisitor(), resolve_ambiguity=True, use_cache=True): + super(ForestToParseTree, self).__init__() + self.tree_class = tree_class + self.callbacks = callbacks + self.prioritizer = prioritizer + self.resolve_ambiguity = resolve_ambiguity + self._use_cache = use_cache + self._cache = {} + self._on_cycle_retreat = False + self._cycle_node = None + self._successful_visits = set() + + def visit(self, root): + if self.prioritizer: + self.prioritizer.visit(root) + super(ForestToParseTree, self).visit(root) + self._cache = {} + + def on_cycle(self, node, path): + logger.debug("Cycle encountered in the SPPF at node: %s. " + "As infinite ambiguities cannot be represented in a tree, " + "this family of derivations will be discarded.", node) + self._cycle_node = node + self._on_cycle_retreat = True + + def _check_cycle(self, node): + if self._on_cycle_retreat: + if id(node) == id(self._cycle_node) or id(node) in self._successful_visits: + self._cycle_node = None + self._on_cycle_retreat = False + else: + return Discard + + def _collapse_ambig(self, children): + new_children = [] + for child in children: + if hasattr(child, 'data') and child.data == '_ambig': + new_children += child.children + else: + new_children.append(child) + return new_children + + def _call_rule_func(self, node, data): + # called when transforming children of symbol nodes + # data is a list of trees or tokens that correspond to the + # symbol's rule expansion + return self.callbacks[node.rule](data) + + def _call_ambig_func(self, node, data): + # called when transforming a symbol node + # data is a list of trees where each tree's data is + # equal to the name of the symbol or one of its aliases. + if len(data) > 1: + return self.tree_class('_ambig', data) + elif data: + return data[0] + return Discard + + def transform_symbol_node(self, node, data): + if id(node) not in self._successful_visits: + return Discard + r = self._check_cycle(node) + if r is Discard: + return r + self._successful_visits.remove(id(node)) + data = self._collapse_ambig(data) + return self._call_ambig_func(node, data) + + def transform_intermediate_node(self, node, data): + if id(node) not in self._successful_visits: + return Discard + r = self._check_cycle(node) + if r is Discard: + return r + self._successful_visits.remove(id(node)) + if len(data) > 1: + children = [self.tree_class('_inter', c) for c in data] + return self.tree_class('_iambig', children) + return data[0] + + def transform_packed_node(self, node, data): + r = self._check_cycle(node) + if r is Discard: + return r + if self.resolve_ambiguity and id(node.parent) in self._successful_visits: + return Discard + if self._use_cache and id(node) in self._cache: + return self._cache[id(node)] + children = [] + assert len(data) <= 2 + data = PackedData(node, data) + if data.left is not PackedData.NO_DATA: + if node.left.is_intermediate and isinstance(data.left, list): + children += data.left + else: + children.append(data.left) + if data.right is not PackedData.NO_DATA: + children.append(data.right) + if node.parent.is_intermediate: + return self._cache.setdefault(id(node), children) + return self._cache.setdefault(id(node), self._call_rule_func(node, children)) + + def visit_symbol_node_in(self, node): + super(ForestToParseTree, self).visit_symbol_node_in(node) + if self._on_cycle_retreat: + return + return node.children + + def visit_packed_node_in(self, node): + self._on_cycle_retreat = False + to_visit = super(ForestToParseTree, self).visit_packed_node_in(node) + if not self.resolve_ambiguity or id(node.parent) not in self._successful_visits: + if not self._use_cache or id(node) not in self._cache: + return to_visit + + def visit_packed_node_out(self, node): + super(ForestToParseTree, self).visit_packed_node_out(node) + if not self._on_cycle_retreat: + self._successful_visits.add(id(node.parent)) + +def handles_ambiguity(func): + """Decorator for methods of subclasses of ``TreeForestTransformer``. + Denotes that the method should receive a list of transformed derivations.""" + func.handles_ambiguity = True + return func + +class TreeForestTransformer(ForestToParseTree): + """A ``ForestTransformer`` with a tree ``Transformer``-like interface. + By default, it will construct a tree. + + Methods provided via inheritance are called based on the rule/symbol + names of nodes in the forest. + + Methods that act on rules will receive a list of the results of the + transformations of the rule's children. By default, trees and tokens. + + Methods that act on tokens will receive a token. + + Alternatively, methods that act on rules may be annotated with + ``handles_ambiguity``. In this case, the function will receive a list + of all the transformations of all the derivations of the rule. + By default, a list of trees where each tree.data is equal to the + rule name or one of its aliases. + + Non-tree transformations are made possible by override of + ``__default__``, ``__default_token__``, and ``__default_ambig__``. + + Note: + Tree shaping features such as inlined rules and token filtering are + not built into the transformation. Positions are also not propagated. + + Parameters: + tree_class: The tree class to use for construction + prioritizer: A ``ForestVisitor`` that manipulates the priorities of nodes in the SPPF. + resolve_ambiguity: If True, ambiguities will be resolved based on priorities. + use_cache (bool): If True, caches the results of some transformations, + potentially improving performance when ``resolve_ambiguity==False``. + Only use if you know what you are doing: i.e. All transformation + functions are pure and referentially transparent. + """ + + def __init__(self, tree_class=Tree, prioritizer=ForestSumVisitor(), resolve_ambiguity=True, use_cache=False): + super(TreeForestTransformer, self).__init__(tree_class, dict(), prioritizer, resolve_ambiguity, use_cache) + + def __default__(self, name, data): + """Default operation on tree (for override). + + Returns a tree with name with data as children. + """ + return self.tree_class(name, data) + + def __default_ambig__(self, name, data): + """Default operation on ambiguous rule (for override). + + Wraps data in an '_ambig_' node if it contains more than + one element. + """ + if len(data) > 1: + return self.tree_class('_ambig', data) + elif data: + return data[0] + return Discard + + def __default_token__(self, node): + """Default operation on ``Token`` (for override). + + Returns ``node``. + """ + return node + + def transform_token_node(self, node): + return getattr(self, node.type, self.__default_token__)(node) + + def _call_rule_func(self, node, data): + name = node.rule.alias or node.rule.options.template_source or node.rule.origin.name + user_func = getattr(self, name, self.__default__) + if user_func == self.__default__ or hasattr(user_func, 'handles_ambiguity'): + user_func = partial(self.__default__, name) + if not self.resolve_ambiguity: + wrapper = partial(AmbiguousIntermediateExpander, self.tree_class) + user_func = wrapper(user_func) + return user_func(data) + + def _call_ambig_func(self, node, data): + name = node.s.name + user_func = getattr(self, name, self.__default_ambig__) + if user_func == self.__default_ambig__ or not hasattr(user_func, 'handles_ambiguity'): + user_func = partial(self.__default_ambig__, name) + return user_func(data) + +class ForestToPyDotVisitor(ForestVisitor): + """ + A Forest visitor which writes the SPPF to a PNG. + + The SPPF can get really large, really quickly because + of the amount of meta-data it stores, so this is probably + only useful for trivial trees and learning how the SPPF + is structured. + """ + def __init__(self, rankdir="TB"): + super(ForestToPyDotVisitor, self).__init__(single_visit=True) + self.pydot = import_module('pydot') + self.graph = self.pydot.Dot(graph_type='digraph', rankdir=rankdir) + + def visit(self, root, filename): + super(ForestToPyDotVisitor, self).visit(root) + try: + self.graph.write_png(filename) + except FileNotFoundError as e: + logger.error("Could not write png: ", e) + + def visit_token_node(self, node): + graph_node_id = str(id(node)) + graph_node_label = "\"{}\"".format(node.value.replace('"', '\\"')) + graph_node_color = 0x808080 + graph_node_style = "\"filled,rounded\"" + graph_node_shape = "diamond" + graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) + self.graph.add_node(graph_node) + + def visit_packed_node_in(self, node): + graph_node_id = str(id(node)) + graph_node_label = repr(node) + graph_node_color = 0x808080 + graph_node_style = "filled" + graph_node_shape = "diamond" + graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) + self.graph.add_node(graph_node) + yield node.left + yield node.right + + def visit_packed_node_out(self, node): + graph_node_id = str(id(node)) + graph_node = self.graph.get_node(graph_node_id)[0] + for child in [node.left, node.right]: + if child is not None: + child_graph_node_id = str(id(child)) + child_graph_node = self.graph.get_node(child_graph_node_id)[0] + self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node)) + else: + #### Try and be above the Python object ID range; probably impl. specific, but maybe this is okay. + child_graph_node_id = str(randint(100000000000000000000000000000,123456789012345678901234567890)) + child_graph_node_style = "invis" + child_graph_node = self.pydot.Node(child_graph_node_id, style=child_graph_node_style, label="None") + child_edge_style = "invis" + self.graph.add_node(child_graph_node) + self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node, style=child_edge_style)) + + def visit_symbol_node_in(self, node): + graph_node_id = str(id(node)) + graph_node_label = repr(node) + graph_node_color = 0x808080 + graph_node_style = "\"filled\"" + if node.is_intermediate: + graph_node_shape = "ellipse" + else: + graph_node_shape = "rectangle" + graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) + self.graph.add_node(graph_node) + return iter(node.children) + + def visit_symbol_node_out(self, node): + graph_node_id = str(id(node)) + graph_node = self.graph.get_node(graph_node_id)[0] + for child in node.children: + child_graph_node_id = str(id(child)) + child_graph_node = self.graph.get_node(child_graph_node_id)[0] + self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node)) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/grammar_analysis.py b/vendor/lark/lark/parsers/grammar_analysis.py similarity index 98% rename from vendor/poetry-core/poetry/core/_vendor/lark/parsers/grammar_analysis.py rename to vendor/lark/lark/parsers/grammar_analysis.py index 94c32ccc..b526e470 100644 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/grammar_analysis.py +++ b/vendor/lark/lark/parsers/grammar_analysis.py @@ -5,7 +5,7 @@ from ..grammar import Rule, Terminal, NonTerminal -class RulePtr(object): +class RulePtr: __slots__ = ('rule', 'index') def __init__(self, rule, index): @@ -38,7 +38,7 @@ def __hash__(self): # state generation ensures no duplicate LR0ItemSets -class LR0ItemSet(object): +class LR0ItemSet: __slots__ = ('kernel', 'closure', 'transitions', 'lookaheads') def __init__(self, kernel, closure): @@ -121,7 +121,7 @@ def calculate_sets(rules): return FIRST, FOLLOW, NULLABLE -class GrammarAnalyzer(object): +class GrammarAnalyzer: def __init__(self, parser_conf, debug=False): self.debug = debug @@ -138,7 +138,7 @@ def __init__(self, parser_conf, debug=False): for r in rules: for sym in r.expansion: if not (sym.is_term or sym in self.rules_by_origin): - raise GrammarError("Using an undefined rule: %s" % sym) # TODO test validation + raise GrammarError("Using an undefined rule: %s" % sym) self.start_states = {start: self.expand_rule(root_rule.origin) for start, root_rule in root_rules.items()} diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_analysis.py b/vendor/lark/lark/parsers/lalr_analysis.py similarity index 88% rename from vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_analysis.py rename to vendor/lark/lark/parsers/lalr_analysis.py index 8890c3cd..f6a993b9 100644 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_analysis.py +++ b/vendor/lark/lark/parsers/lalr_analysis.py @@ -6,10 +6,9 @@ # Author: Erez Shinan (2017) # Email : erezshin@gmail.com -import logging -from collections import defaultdict, deque +from collections import defaultdict -from ..utils import classify, classify_bool, bfs, fzset, Serialize, Enumerator +from ..utils import classify, classify_bool, bfs, fzset, Enumerator, logger from ..exceptions import GrammarError from .grammar_analysis import GrammarAnalyzer, Terminal, LR0ItemSet @@ -247,21 +246,38 @@ def compute_lookaheads(self): def compute_lalr1_states(self): m = {} + reduce_reduce = [] for state in self.lr0_states: actions = {} for la, next_state in state.transitions.items(): actions[la] = (Shift, next_state.closure) for la, rules in state.lookaheads.items(): if len(rules) > 1: - raise GrammarError('Reduce/Reduce collision in %s between the following rules: %s' % (la, ''.join([ '\n\t\t- ' + str(r) for r in rules ]))) + # Try to resolve conflict based on priority + p = [(r.options.priority or 0, r) for r in rules] + p.sort(key=lambda r: r[0], reverse=True) + best, second_best = p[:2] + if best[0] > second_best[0]: + rules = [best[1]] + else: + reduce_reduce.append((state, la, rules)) if la in actions: if self.debug: - logging.warning('Shift/Reduce conflict for terminal %s: (resolving as shift)', la.name) - logging.warning(' * %s', list(rules)[0]) + logger.warning('Shift/Reduce conflict for terminal %s: (resolving as shift)', la.name) + logger.warning(' * %s', list(rules)[0]) else: actions[la] = (Reduce, list(rules)[0]) m[state] = { k.name: v for k, v in actions.items() } + if reduce_reduce: + msgs = [] + for state, la, rules in reduce_reduce: + msg = 'Reduce/Reduce collision in %s between the following rules: %s' % (la, ''.join([ '\n\t- ' + str(r) for r in rules ])) + if self.debug: + msg += '\n collision occurred in state: {%s\n }' % ''.join(['\n\t' + str(x) for x in state.closure]) + msgs.append(msg) + raise GrammarError('\n\n'.join(msgs)) + states = { k.closure: v for k, v in m.items() } # compute end states @@ -285,4 +301,4 @@ def compute_lalr(self): self.compute_reads_relations() self.compute_includes_lookback() self.compute_lookaheads() - self.compute_lalr1_states() \ No newline at end of file + self.compute_lalr1_states() diff --git a/vendor/lark/lark/parsers/lalr_interactive_parser.py b/vendor/lark/lark/parsers/lalr_interactive_parser.py new file mode 100644 index 00000000..c9658daf --- /dev/null +++ b/vendor/lark/lark/parsers/lalr_interactive_parser.py @@ -0,0 +1,149 @@ +# This module provides a LALR interactive parser, which is used for debugging and error handling + +from typing import Iterator, List +from copy import copy +import warnings + +from lark.exceptions import UnexpectedToken +from lark.lexer import Token, LexerThread + + +class InteractiveParser: + """InteractiveParser gives you advanced control over parsing and error handling when parsing with LALR. + + For a simpler interface, see the ``on_error`` argument to ``Lark.parse()``. + """ + def __init__(self, parser, parser_state, lexer_thread: LexerThread): + self.parser = parser + self.parser_state = parser_state + self.lexer_thread = lexer_thread + self.result = None + + @property + def lexer_state(self) -> LexerThread: + warnings.warn("lexer_state will be removed in subsequent releases. Use lexer_thread instead.", DeprecationWarning) + return self.lexer_thread + + def feed_token(self, token: Token): + """Feed the parser with a token, and advance it to the next state, as if it received it from the lexer. + + Note that ``token`` has to be an instance of ``Token``. + """ + return self.parser_state.feed_token(token, token.type == '$END') + + def iter_parse(self) -> Iterator[Token]: + """Step through the different stages of the parse, by reading tokens from the lexer + and feeding them to the parser, one per iteration. + + Returns an iterator of the tokens it encounters. + + When the parse is over, the resulting tree can be found in ``InteractiveParser.result``. + """ + for token in self.lexer_thread.lex(self.parser_state): + yield token + self.result = self.feed_token(token) + + def exhaust_lexer(self) -> List[Token]: + """Try to feed the rest of the lexer state into the interactive parser. + + Note that this modifies the instance in place and does not feed an '$END' Token + """ + return list(self.iter_parse()) + + + def feed_eof(self, last_token=None): + """Feed a '$END' Token. Borrows from 'last_token' if given.""" + eof = Token.new_borrow_pos('$END', '', last_token) if last_token is not None else self.lexer_thread._Token('$END', '', 0, 1, 1) + return self.feed_token(eof) + + + def __copy__(self): + """Create a new interactive parser with a separate state. + + Calls to feed_token() won't affect the old instance, and vice-versa. + """ + return type(self)( + self.parser, + copy(self.parser_state), + copy(self.lexer_thread), + ) + + def copy(self): + return copy(self) + + def __eq__(self, other): + if not isinstance(other, InteractiveParser): + return False + + return self.parser_state == other.parser_state and self.lexer_thread == other.lexer_thread + + def as_immutable(self): + """Convert to an ``ImmutableInteractiveParser``.""" + p = copy(self) + return ImmutableInteractiveParser(p.parser, p.parser_state, p.lexer_thread) + + def pretty(self): + """Print the output of ``choices()`` in a way that's easier to read.""" + out = ["Parser choices:"] + for k, v in self.choices().items(): + out.append('\t- %s -> %r' % (k, v)) + out.append('stack size: %s' % len(self.parser_state.state_stack)) + return '\n'.join(out) + + def choices(self): + """Returns a dictionary of token types, matched to their action in the parser. + + Only returns token types that are accepted by the current state. + + Updated by ``feed_token()``. + """ + return self.parser_state.parse_conf.parse_table.states[self.parser_state.position] + + def accepts(self): + """Returns the set of possible tokens that will advance the parser into a new valid state.""" + accepts = set() + for t in self.choices(): + if t.isupper(): # is terminal? + new_cursor = copy(self) + try: + new_cursor.feed_token(self.lexer_thread._Token(t, '')) + except UnexpectedToken: + pass + else: + accepts.add(t) + return accepts + + def resume_parse(self): + """Resume automated parsing from the current state.""" + return self.parser.parse_from_state(self.parser_state) + + + +class ImmutableInteractiveParser(InteractiveParser): + """Same as ``InteractiveParser``, but operations create a new instance instead + of changing it in-place. + """ + + result = None + + def __hash__(self): + return hash((self.parser_state, self.lexer_thread)) + + def feed_token(self, token): + c = copy(self) + c.result = InteractiveParser.feed_token(c, token) + return c + + def exhaust_lexer(self): + """Try to feed the rest of the lexer state into the parser. + + Note that this returns a new ImmutableInteractiveParser and does not feed an '$END' Token""" + cursor = self.as_mutable() + cursor.exhaust_lexer() + return cursor.as_immutable() + + def as_mutable(self): + """Convert to an ``InteractiveParser``.""" + p = copy(self) + return InteractiveParser(p.parser, p.parser_state, p.lexer_thread) + diff --git a/vendor/lark/lark/parsers/lalr_parser.py b/vendor/lark/lark/parsers/lalr_parser.py new file mode 100644 index 00000000..2837b296 --- /dev/null +++ b/vendor/lark/lark/parsers/lalr_parser.py @@ -0,0 +1,200 @@ +"""This module implements a LALR(1) Parser +""" +# Author: Erez Shinan (2017) +# Email : erezshin@gmail.com +from copy import deepcopy, copy +from ..exceptions import UnexpectedInput, UnexpectedToken +from ..lexer import Token +from ..utils import Serialize + +from .lalr_analysis import LALR_Analyzer, Shift, Reduce, IntParseTable +from .lalr_interactive_parser import InteractiveParser +from lark.exceptions import UnexpectedCharacters, UnexpectedInput, UnexpectedToken + +###{standalone + +class LALR_Parser(Serialize): + def __init__(self, parser_conf, debug=False): + analysis = LALR_Analyzer(parser_conf, debug=debug) + analysis.compute_lalr() + callbacks = parser_conf.callbacks + + self._parse_table = analysis.parse_table + self.parser_conf = parser_conf + self.parser = _Parser(analysis.parse_table, callbacks, debug) + + @classmethod + def deserialize(cls, data, memo, callbacks, debug=False): + inst = cls.__new__(cls) + inst._parse_table = IntParseTable.deserialize(data, memo) + inst.parser = _Parser(inst._parse_table, callbacks, debug) + return inst + + def serialize(self, memo): + return self._parse_table.serialize(memo) + + def parse_interactive(self, lexer, start): + return self.parser.parse(lexer, start, start_interactive=True) + + def parse(self, lexer, start, on_error=None): + try: + return self.parser.parse(lexer, start) + except UnexpectedInput as e: + if on_error is None: + raise + + while True: + if isinstance(e, UnexpectedCharacters): + s = e.interactive_parser.lexer_thread.state + p = s.line_ctr.char_pos + + if not on_error(e): + raise e + + if isinstance(e, UnexpectedCharacters): + # If user didn't change the character position, then we should + if p == s.line_ctr.char_pos: + s.line_ctr.feed(s.text[p:p+1]) + + try: + return e.interactive_parser.resume_parse() + except UnexpectedToken as e2: + if (isinstance(e, UnexpectedToken) + and e.token.type == e2.token.type == '$END' + and e.interactive_parser == e2.interactive_parser): + # Prevent infinite loop + raise e2 + e = e2 + except UnexpectedCharacters as e2: + e = e2 + + +class ParseConf: + __slots__ = 'parse_table', 'callbacks', 'start', 'start_state', 'end_state', 'states' + + def __init__(self, parse_table, callbacks, start): + self.parse_table = parse_table + + self.start_state = self.parse_table.start_states[start] + self.end_state = self.parse_table.end_states[start] + self.states = self.parse_table.states + + self.callbacks = callbacks + self.start = start + + +class ParserState: + __slots__ = 'parse_conf', 'lexer', 'state_stack', 'value_stack' + + def __init__(self, parse_conf, lexer, state_stack=None, value_stack=None): + self.parse_conf = parse_conf + self.lexer = lexer + self.state_stack = state_stack or [self.parse_conf.start_state] + self.value_stack = value_stack or [] + + @property + def position(self): + return self.state_stack[-1] + + # Necessary for match_examples() to work + def __eq__(self, other): + if not isinstance(other, ParserState): + return NotImplemented + return len(self.state_stack) == len(other.state_stack) and self.position == other.position + + def __copy__(self): + return type(self)( + self.parse_conf, + self.lexer, # XXX copy + copy(self.state_stack), + deepcopy(self.value_stack), + ) + + def copy(self): + return copy(self) + + def feed_token(self, token, is_end=False): + state_stack = self.state_stack + value_stack = self.value_stack + states = self.parse_conf.states + end_state = self.parse_conf.end_state + callbacks = self.parse_conf.callbacks + + while True: + state = state_stack[-1] + try: + action, arg = states[state][token.type] + except KeyError: + expected = {s for s in states[state].keys() if s.isupper()} + raise UnexpectedToken(token, expected, state=self, interactive_parser=None) + + assert arg != end_state + + if action is Shift: + # shift once and return + assert not is_end + state_stack.append(arg) + value_stack.append(token if token.type not in callbacks else callbacks[token.type](token)) + return + else: + # reduce+shift as many times as necessary + rule = arg + size = len(rule.expansion) + if size: + s = value_stack[-size:] + del state_stack[-size:] + del value_stack[-size:] + else: + s = [] + + value = callbacks[rule](s) + + _action, new_state = states[state_stack[-1]][rule.origin.name] + assert _action is Shift + state_stack.append(new_state) + value_stack.append(value) + + if is_end and state_stack[-1] == end_state: + return value_stack[-1] + +class _Parser: + def __init__(self, parse_table, callbacks, debug=False): + self.parse_table = parse_table + self.callbacks = callbacks + self.debug = debug + + def parse(self, lexer, start, value_stack=None, state_stack=None, start_interactive=False): + parse_conf = ParseConf(self.parse_table, self.callbacks, start) + parser_state = ParserState(parse_conf, lexer, state_stack, value_stack) + if start_interactive: + return InteractiveParser(self, parser_state, parser_state.lexer) + return self.parse_from_state(parser_state) + + + def parse_from_state(self, state): + # Main LALR-parser loop + try: + token = None + for token in state.lexer.lex(state): + state.feed_token(token) + + end_token = Token.new_borrow_pos('$END', '', token) if token else Token('$END', '', 0, 1, 1) + return state.feed_token(end_token, True) + except UnexpectedInput as e: + try: + e.interactive_parser = InteractiveParser(self, state, state.lexer) + except NameError: + pass + raise e + except Exception as e: + if self.debug: + print("") + print("STATE STACK DUMP") + print("----------------") + for i, s in enumerate(state.state_stack): + print('%d)' % i , s) + print("") + + raise +###} + diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/xearley.py b/vendor/lark/lark/parsers/xearley.py similarity index 82% rename from vendor/poetry-core/poetry/core/_vendor/lark/parsers/xearley.py rename to vendor/lark/lark/parsers/xearley.py index 855625a9..343e5c0b 100644 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/xearley.py +++ b/vendor/lark/lark/parsers/xearley.py @@ -16,17 +16,18 @@ from collections import defaultdict +from ..tree import Tree from ..exceptions import UnexpectedCharacters from ..lexer import Token from ..grammar import Terminal from .earley import Parser as BaseParser -from .earley_forest import SymbolNode +from .earley_forest import SymbolNode, TokenNode class Parser(BaseParser): - def __init__(self, parser_conf, term_matcher, resolve_ambiguity=True, ignore = (), complete_lex = False, debug=False): - BaseParser.__init__(self, parser_conf, term_matcher, resolve_ambiguity, debug) - self.ignore = [Terminal(t) for t in ignore] + def __init__(self, lexer_conf, parser_conf, term_matcher, resolve_ambiguity=True, complete_lex = False, debug=False, tree_class=Tree): + BaseParser.__init__(self, lexer_conf, parser_conf, term_matcher, resolve_ambiguity, debug, tree_class) + self.ignore = [Terminal(t) for t in lexer_conf.ignore] self.complete_lex = complete_lex def _parse(self, stream, columns, to_scan, start_symbol=None): @@ -62,9 +63,10 @@ def scan(i, to_scan): t = Token(item.expect.name, m.group(0), i, text_line, text_column) delayed_matches[i+m.end()].append( (item, i, t) ) - # Remove any items that successfully matched in this pass from the to_scan buffer. - # This ensures we don't carry over tokens that already matched, if we're ignoring below. - to_scan.remove(item) + # XXX The following 3 lines were commented out for causing a bug. See issue #768 + # # Remove any items that successfully matched in this pass from the to_scan buffer. + # # This ensures we don't carry over tokens that already matched, if we're ignoring below. + # to_scan.remove(item) # 3) Process any ignores. This is typically used for e.g. whitespace. # We carry over any unmatched items from the to_scan buffer to be matched again after @@ -97,8 +99,9 @@ def scan(i, to_scan): new_item = item.advance() label = (new_item.s, new_item.start, i) + token_node = TokenNode(token, terminals[token.type]) new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) - new_item.node.add_family(new_item.s, item.rule, new_item.start, item.node, token) + new_item.node.add_family(new_item.s, item.rule, new_item.start, item.node, token_node) else: new_item = item @@ -112,13 +115,18 @@ def scan(i, to_scan): del delayed_matches[i+1] # No longer needed, so unburden memory if not next_set and not delayed_matches and not next_to_scan: - raise UnexpectedCharacters(stream, i, text_line, text_column, {item.expect.name for item in to_scan}, set(to_scan)) + considered_rules = list(sorted(to_scan, key=lambda key: key.rule.origin.name)) + raise UnexpectedCharacters(stream, i, text_line, text_column, {item.expect.name for item in to_scan}, + set(to_scan), state=frozenset(i.s for i in to_scan), + considered_rules=considered_rules + ) return next_to_scan delayed_matches = defaultdict(list) match = self.term_matcher + terminals = self.lexer_conf.terminals_by_name # Cache for nodes & tokens created in a particular parse step. transitives = [{}] @@ -148,4 +156,4 @@ def scan(i, to_scan): ## Column is now the final column in the parse. assert i == len(columns)-1 - return to_scan \ No newline at end of file + return to_scan diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/py.typed b/vendor/lark/lark/py.typed similarity index 100% rename from vendor/poetry-core/poetry/core/_vendor/packaging/py.typed rename to vendor/lark/lark/py.typed diff --git a/vendor/lark/lark/reconstruct.py b/vendor/lark/lark/reconstruct.py new file mode 100644 index 00000000..c5746684 --- /dev/null +++ b/vendor/lark/lark/reconstruct.py @@ -0,0 +1,107 @@ +"""Reconstruct text from a tree, based on Lark grammar""" + +from typing import List, Dict, Union, Callable, Iterable, Optional +import unicodedata + +from .lark import Lark +from .tree import Tree, ParseTree +from .visitors import Transformer_InPlace +from .lexer import Token, PatternStr, TerminalDef +from .grammar import Terminal, NonTerminal, Symbol + +from .tree_matcher import TreeMatcher, is_discarded_terminal +from .utils import is_id_continue + +def is_iter_empty(i): + try: + _ = next(i) + return False + except StopIteration: + return True + + +class WriteTokensTransformer(Transformer_InPlace): + "Inserts discarded tokens into their correct place, according to the rules of grammar" + + tokens: Dict[str, TerminalDef] + term_subs: Dict[str, Callable[[Symbol], str]] + + def __init__(self, tokens: Dict[str, TerminalDef], term_subs: Dict[str, Callable[[Symbol], str]]) -> None: + self.tokens = tokens + self.term_subs = term_subs + + def __default__(self, data, children, meta): + if not getattr(meta, 'match_tree', False): + return Tree(data, children) + + iter_args = iter(children) + to_write = [] + for sym in meta.orig_expansion: + if is_discarded_terminal(sym): + try: + v = self.term_subs[sym.name](sym) + except KeyError: + t = self.tokens[sym.name] + if not isinstance(t.pattern, PatternStr): + raise NotImplementedError("Reconstructing regexps not supported yet: %s" % t) + + v = t.pattern.value + to_write.append(v) + else: + x = next(iter_args) + if isinstance(x, list): + to_write += x + else: + if isinstance(x, Token): + assert Terminal(x.type) == sym, x + else: + assert NonTerminal(x.data) == sym, (sym, x) + to_write.append(x) + + assert is_iter_empty(iter_args) + return to_write + + +class Reconstructor(TreeMatcher): + """ + A Reconstructor that will, given a full parse Tree, generate source code. + + Note: + The reconstructor cannot generate values from regexps. If you need to produce discarded + regexes, such as newlines, use `term_subs` and provide default values for them. + + Paramters: + parser: a Lark instance + term_subs: a dictionary of [Terminal name as str] to [output text as str] + """ + + write_tokens: WriteTokensTransformer + + def __init__(self, parser: Lark, term_subs: Optional[Dict[str, Callable[[Symbol], str]]]=None) -> None: + TreeMatcher.__init__(self, parser) + + self.write_tokens = WriteTokensTransformer({t.name:t for t in self.tokens}, term_subs or {}) + + def _reconstruct(self, tree): + unreduced_tree = self.match_tree(tree, tree.data) + + res = self.write_tokens.transform(unreduced_tree) + for item in res: + if isinstance(item, Tree): + # TODO use orig_expansion.rulename to support templates + yield from self._reconstruct(item) + else: + yield item + + def reconstruct(self, tree: ParseTree, postproc: Optional[Callable[[Iterable[str]], Iterable[str]]]=None, insert_spaces: bool=True) -> str: + x = self._reconstruct(tree) + if postproc: + x = postproc(x) + y = [] + prev_item = '' + for item in x: + if insert_spaces and prev_item and item and is_id_continue(prev_item[-1]) and is_id_continue(item[0]): + y.append(' ') + y.append(item) + prev_item = item + return ''.join(y) diff --git a/vendor/lark/lark/tools/__init__.py b/vendor/lark/lark/tools/__init__.py new file mode 100644 index 00000000..6b0bd6ab --- /dev/null +++ b/vendor/lark/lark/tools/__init__.py @@ -0,0 +1,64 @@ +import sys +from argparse import ArgumentParser, FileType +from textwrap import indent +from logging import DEBUG, INFO, WARN, ERROR +from typing import Optional +import warnings + +from lark import Lark, logger + +lalr_argparser = ArgumentParser(add_help=False, epilog='Look at the Lark documentation for more info on the options') + +flags = [ + ('d', 'debug'), + 'keep_all_tokens', + 'regex', + 'propagate_positions', + 'maybe_placeholders', + 'use_bytes' +] + +options = ['start', 'lexer'] + +lalr_argparser.add_argument('-v', '--verbose', action='count', default=0, help="Increase Logger output level, up to three times") +lalr_argparser.add_argument('-s', '--start', action='append', default=[]) +lalr_argparser.add_argument('-l', '--lexer', default='contextual', choices=('basic', 'contextual')) +encoding: Optional[str] = 'utf-8' if sys.version_info > (3, 4) else None +lalr_argparser.add_argument('-o', '--out', type=FileType('w', encoding=encoding), default=sys.stdout, help='the output file (default=stdout)') +lalr_argparser.add_argument('grammar_file', type=FileType('r', encoding=encoding), help='A valid .lark file') + +for flag in flags: + if isinstance(flag, tuple): + options.append(flag[1]) + lalr_argparser.add_argument('-' + flag[0], '--' + flag[1], action='store_true') + elif isinstance(flag, str): + options.append(flag) + lalr_argparser.add_argument('--' + flag, action='store_true') + else: + raise NotImplementedError("flags must only contain strings or tuples of strings") + + +def build_lalr(namespace): + logger.setLevel((ERROR, WARN, INFO, DEBUG)[min(namespace.verbose, 3)]) + if len(namespace.start) == 0: + namespace.start.append('start') + kwargs = {n: getattr(namespace, n) for n in options} + return Lark(namespace.grammar_file, parser='lalr', **kwargs), namespace.out + + +def showwarning_as_comment(message, category, filename, lineno, file=None, line=None): + # Based on warnings._showwarnmsg_impl + text = warnings.formatwarning(message, category, filename, lineno, line) + text = indent(text, '# ') + if file is None: + file = sys.stderr + if file is None: + return + try: + file.write(text) + except OSError: + pass + + +def make_warnings_comments(): + warnings.showwarning = showwarning_as_comment diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/tools/nearley.py b/vendor/lark/lark/tools/nearley.py similarity index 77% rename from vendor/poetry-core/poetry/core/_vendor/lark/tools/nearley.py rename to vendor/lark/lark/tools/nearley.py index 0b04fb55..1fc27d56 100644 --- a/vendor/poetry-core/poetry/core/_vendor/lark/tools/nearley.py +++ b/vendor/lark/lark/tools/nearley.py @@ -1,11 +1,12 @@ -"Converts between Lark and Nearley grammars. Work in progress!" +"Converts Nearley grammars to Lark" import os.path import sys import codecs +import argparse -from lark import Lark, InlineTransformer +from lark import Lark, Transformer, v_args nearley_grammar = r""" start: (ruledef|directive)+ @@ -34,20 +35,23 @@ COMMENT: /#[^\n]*/ REGEXP: /\[.*?\]/ - %import common.ESCAPED_STRING -> STRING + STRING: _STRING "i"? + + %import common.ESCAPED_STRING -> _STRING %import common.WS %ignore WS %ignore COMMENT """ -nearley_grammar_parser = Lark(nearley_grammar, parser='earley', lexer='standard') +nearley_grammar_parser = Lark(nearley_grammar, parser='earley', lexer='basic') def _get_rulename(name): - name = {'_': '_ws_maybe', '__':'_ws'}.get(name, name) + name = {'_': '_ws_maybe', '__': '_ws'}.get(name, name) return 'n_' + name.replace('$', '__DOLLAR__').lower() -class NearleyToLark(InlineTransformer): +@v_args(inline=True) +class NearleyToLark(Transformer): def __init__(self): self._count = 0 self.extra_rules = {} @@ -130,14 +134,14 @@ def _nearley_to_lark(g, builtin_path, n2l, js_code, folder_path, includes): elif statement.data == 'macro': pass # TODO Add support for macros! elif statement.data == 'ruledef': - rule_defs.append( n2l.transform(statement) ) + rule_defs.append(n2l.transform(statement)) else: raise Exception("Unknown statement: %s" % statement) return rule_defs -def create_code_for_nearley_grammar(g, start, builtin_path, folder_path): +def create_code_for_nearley_grammar(g, start, builtin_path, folder_path, es6=False): import js2py emit_code = [] @@ -160,7 +164,10 @@ def emit(x=None): for alias, code in n2l.alias_js_code.items(): js_code.append('%s = (%s);' % (alias, code)) - emit(js2py.translate_js('\n'.join(js_code))) + if es6: + emit(js2py.translate_js6('\n'.join(js_code))) + else: + emit(js2py.translate_js('\n'.join(js_code))) emit('class TransformNearley(Transformer):') for alias in n2l.alias_js_code: emit(" %s = var.get('%s').to_python()" % (alias, alias)) @@ -173,18 +180,23 @@ def emit(x=None): return ''.join(emit_code) -def main(fn, start, nearley_lib): +def main(fn, start, nearley_lib, es6=False): with codecs.open(fn, encoding='utf8') as f: grammar = f.read() - return create_code_for_nearley_grammar(grammar, start, os.path.join(nearley_lib, 'builtin'), os.path.abspath(os.path.dirname(fn))) + return create_code_for_nearley_grammar(grammar, start, os.path.join(nearley_lib, 'builtin'), os.path.abspath(os.path.dirname(fn)), es6=es6) +def get_arg_parser(): + parser = argparse.ArgumentParser(description='Reads a Nearley grammar (with js functions), and outputs an equivalent lark parser.') + parser.add_argument('nearley_grammar', help='Path to the file containing the nearley grammar') + parser.add_argument('start_rule', help='Rule within the nearley grammar to make the base rule') + parser.add_argument('nearley_lib', help='Path to root directory of nearley codebase (used for including builtins)') + parser.add_argument('--es6', help='Enable experimental ES6 support', action='store_true') + return parser if __name__ == '__main__': - if len(sys.argv) < 4: - print("Reads Nearley grammar (with js functions) outputs an equivalent lark parser.") - print("Usage: %s " % sys.argv[0]) + parser = get_arg_parser() + if len(sys.argv) == 1: + parser.print_help(sys.stderr) sys.exit(1) - - fn, start, nearley_lib = sys.argv[1:] - - print(main(fn, start, nearley_lib)) + args = parser.parse_args() + print(main(fn=args.nearley_grammar, start=args.start_rule, nearley_lib=args.nearley_lib, es6=args.es6)) diff --git a/vendor/lark/lark/tools/serialize.py b/vendor/lark/lark/tools/serialize.py new file mode 100644 index 00000000..61540242 --- /dev/null +++ b/vendor/lark/lark/tools/serialize.py @@ -0,0 +1,34 @@ +import codecs +import sys +import json + +from lark import Lark +from lark.grammar import RuleOptions, Rule +from lark.lexer import TerminalDef +from lark.tools import lalr_argparser, build_lalr + +import argparse + +argparser = argparse.ArgumentParser(prog='python -m lark.tools.serialize', parents=[lalr_argparser], + description="Lark Serialization Tool - Stores Lark's internal state & LALR analysis as a JSON file", + epilog='Look at the Lark documentation for more info on the options') + + +def serialize(lark_inst, outfile): + data, memo = lark_inst.memo_serialize([TerminalDef, Rule]) + outfile.write('{\n') + outfile.write(' "data": %s,\n' % json.dumps(data)) + outfile.write(' "memo": %s\n' % json.dumps(memo)) + outfile.write('}\n') + + +def main(): + if len(sys.argv)==1: + argparser.print_help(sys.stderr) + sys.exit(1) + ns = argparser.parse_args() + serialize(*build_lalr(ns)) + + +if __name__ == '__main__': + main() diff --git a/vendor/lark/lark/tools/standalone.py b/vendor/lark/lark/tools/standalone.py new file mode 100644 index 00000000..9989f872 --- /dev/null +++ b/vendor/lark/lark/tools/standalone.py @@ -0,0 +1,190 @@ +###{standalone +# +# +# Lark Stand-alone Generator Tool +# ---------------------------------- +# Generates a stand-alone LALR(1) parser +# +# Git: https://github.com/erezsh/lark +# Author: Erez Shinan (erezshin@gmail.com) +# +# +# >>> LICENSE +# +# This tool and its generated code use a separate license from Lark, +# and are subject to the terms of the Mozilla Public License, v. 2.0. +# If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. +# +# If you wish to purchase a commercial license for this tool and its +# generated code, you may contact me via email or otherwise. +# +# If MPL2 is incompatible with your free or open-source project, +# contact me and we'll work it out. +# +# + +from abc import ABC, abstractmethod +from collections.abc import Sequence +from types import ModuleType +from typing import ( + TypeVar, Generic, Type, Tuple, List, Dict, Iterator, Collection, Callable, Optional, FrozenSet, Any, + Union, Iterable, IO, TYPE_CHECKING, + Pattern as REPattern, ClassVar, Set, Mapping +) +###} + +import sys +import token, tokenize +import os +from os import path +from collections import defaultdict +from functools import partial +from argparse import ArgumentParser + +import lark +from lark.tools import lalr_argparser, build_lalr, make_warnings_comments + + +from lark.grammar import Rule +from lark.lexer import TerminalDef + +_dir = path.dirname(__file__) +_larkdir = path.join(_dir, path.pardir) + + +EXTRACT_STANDALONE_FILES = [ + 'tools/standalone.py', + 'exceptions.py', + 'utils.py', + 'tree.py', + 'visitors.py', + 'grammar.py', + 'lexer.py', + 'common.py', + 'parse_tree_builder.py', + 'parsers/lalr_parser.py', + 'parsers/lalr_analysis.py', + 'parser_frontends.py', + 'lark.py', + 'indenter.py', +] + +def extract_sections(lines): + section = None + text = [] + sections = defaultdict(list) + for line in lines: + if line.startswith('###'): + if line[3] == '{': + section = line[4:].strip() + elif line[3] == '}': + sections[section] += text + section = None + text = [] + else: + raise ValueError(line) + elif section: + text.append(line) + + return {name: ''.join(text) for name, text in sections.items()} + + +def strip_docstrings(line_gen): + """ Strip comments and docstrings from a file. + Based on code from: https://stackoverflow.com/questions/1769332/script-to-remove-python-comments-docstrings + """ + res = [] + + prev_toktype = token.INDENT + last_lineno = -1 + last_col = 0 + + tokgen = tokenize.generate_tokens(line_gen) + for toktype, ttext, (slineno, scol), (elineno, ecol), ltext in tokgen: + if slineno > last_lineno: + last_col = 0 + if scol > last_col: + res.append(" " * (scol - last_col)) + if toktype == token.STRING and prev_toktype == token.INDENT: + # Docstring + res.append("#--") + elif toktype == tokenize.COMMENT: + # Comment + res.append("##\n") + else: + res.append(ttext) + prev_toktype = toktype + last_col = ecol + last_lineno = elineno + + return ''.join(res) + + +def gen_standalone(lark_inst, output=None, out=sys.stdout, compress=False): + if output is None: + output = partial(print, file=out) + + import pickle, zlib, base64 + def compressed_output(obj): + s = pickle.dumps(obj, pickle.HIGHEST_PROTOCOL) + c = zlib.compress(s) + output(repr(base64.b64encode(c))) + + def output_decompress(name): + output('%(name)s = pickle.loads(zlib.decompress(base64.b64decode(%(name)s)))' % locals()) + + output('# The file was automatically generated by Lark v%s' % lark.__version__) + output('__version__ = "%s"' % lark.__version__) + output() + + for i, pyfile in enumerate(EXTRACT_STANDALONE_FILES): + with open(os.path.join(_larkdir, pyfile)) as f: + code = extract_sections(f)['standalone'] + if i: # if not this file + code = strip_docstrings(partial(next, iter(code.splitlines(True)))) + output(code) + + data, m = lark_inst.memo_serialize([TerminalDef, Rule]) + output('import pickle, zlib, base64') + if compress: + output('DATA = (') + compressed_output(data) + output(')') + output_decompress('DATA') + output('MEMO = (') + compressed_output(m) + output(')') + output_decompress('MEMO') + else: + output('DATA = (') + output(data) + output(')') + output('MEMO = (') + output(m) + output(')') + + + output('Shift = 0') + output('Reduce = 1') + output("def Lark_StandAlone(**kwargs):") + output(" return Lark._load_from_dict(DATA, MEMO, **kwargs)") + + + + +def main(): + make_warnings_comments() + parser = ArgumentParser(prog="prog='python -m lark.tools.standalone'", description="Lark Stand-alone Generator Tool", + parents=[lalr_argparser], epilog='Look at the Lark documentation for more info on the options') + parser.add_argument('-c', '--compress', action='store_true', default=0, help="Enable compression") + if len(sys.argv) == 1: + parser.print_help(sys.stderr) + sys.exit(1) + ns = parser.parse_args() + + lark_inst, out = build_lalr(ns) + gen_standalone(lark_inst, out=out, compress=ns.compress) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/vendor/lark/lark/tree.py b/vendor/lark/lark/tree.py new file mode 100644 index 00000000..51c962d1 --- /dev/null +++ b/vendor/lark/lark/tree.py @@ -0,0 +1,262 @@ +import sys +from copy import deepcopy + +from typing import List, Callable, Iterator, Union, Optional, Generic, TypeVar, Any, TYPE_CHECKING + +if TYPE_CHECKING: + from .lexer import TerminalDef, Token + if sys.version_info >= (3, 8): + from typing import Literal + else: + from typing_extensions import Literal + +###{standalone +from collections import OrderedDict + +class Meta: + + empty: bool + line: int + column: int + start_pos: int + end_line: int + end_column: int + end_pos: int + orig_expansion: 'List[TerminalDef]' + match_tree: bool + + def __init__(self): + self.empty = True + + +_Leaf_T = TypeVar("_Leaf_T") +Branch = Union[_Leaf_T, 'Tree[_Leaf_T]'] + + +class Tree(Generic[_Leaf_T]): + """The main tree class. + + Creates a new tree, and stores "data" and "children" in attributes of the same name. + Trees can be hashed and compared. + + Parameters: + data: The name of the rule or alias + children: List of matched sub-rules and terminals + meta: Line & Column numbers (if ``propagate_positions`` is enabled). + meta attributes: line, column, start_pos, end_line, end_column, end_pos + """ + + data: str + children: 'List[Branch[_Leaf_T]]' + + def __init__(self, data: str, children: 'List[Branch[_Leaf_T]]', meta: Optional[Meta]=None) -> None: + self.data = data + self.children = children + self._meta = meta + + @property + def meta(self) -> Meta: + if self._meta is None: + self._meta = Meta() + return self._meta + + def __repr__(self): + return 'Tree(%r, %r)' % (self.data, self.children) + + def _pretty_label(self): + return self.data + + def _pretty(self, level, indent_str): + if len(self.children) == 1 and not isinstance(self.children[0], Tree): + return [indent_str*level, self._pretty_label(), '\t', '%s' % (self.children[0],), '\n'] + + l = [indent_str*level, self._pretty_label(), '\n'] + for n in self.children: + if isinstance(n, Tree): + l += n._pretty(level+1, indent_str) + else: + l += [indent_str*(level+1), '%s' % (n,), '\n'] + + return l + + def pretty(self, indent_str: str=' ') -> str: + """Returns an indented string representation of the tree. + + Great for debugging. + """ + return ''.join(self._pretty(0, indent_str)) + + def __rich__(self, parent:'rich.tree.Tree'=None) -> 'rich.tree.Tree': + """Returns a tree widget for the 'rich' library. + + Example: + :: + from rich import print + from lark import Tree + + tree = Tree('root', ['node1', 'node2']) + print(tree) + """ + return self._rich(parent) + + def _rich(self, parent): + if parent: + tree = parent.add(f'[bold]{self.data}[/bold]') + else: + import rich.tree + tree = rich.tree.Tree(self.data) + + for c in self.children: + if isinstance(c, Tree): + c._rich(tree) + else: + tree.add(f'[green]{c}[/green]') + + return tree + + def __eq__(self, other): + try: + return self.data == other.data and self.children == other.children + except AttributeError: + return False + + def __ne__(self, other): + return not (self == other) + + def __hash__(self) -> int: + return hash((self.data, tuple(self.children))) + + def iter_subtrees(self) -> 'Iterator[Tree[_Leaf_T]]': + """Depth-first iteration. + + Iterates over all the subtrees, never returning to the same node twice (Lark's parse-tree is actually a DAG). + """ + queue = [self] + subtrees = OrderedDict() + for subtree in queue: + subtrees[id(subtree)] = subtree + # Reason for type ignore https://github.com/python/mypy/issues/10999 + queue += [c for c in reversed(subtree.children) # type: ignore[misc] + if isinstance(c, Tree) and id(c) not in subtrees] + + del queue + return reversed(list(subtrees.values())) + + def find_pred(self, pred: 'Callable[[Tree[_Leaf_T]], bool]') -> 'Iterator[Tree[_Leaf_T]]': + """Returns all nodes of the tree that evaluate pred(node) as true.""" + return filter(pred, self.iter_subtrees()) + + def find_data(self, data: str) -> 'Iterator[Tree[_Leaf_T]]': + """Returns all nodes of the tree whose data equals the given data.""" + return self.find_pred(lambda t: t.data == data) + +###} + + def expand_kids_by_data(self, *data_values): + """Expand (inline) children with any of the given data values. Returns True if anything changed""" + changed = False + for i in range(len(self.children)-1, -1, -1): + child = self.children[i] + if isinstance(child, Tree) and child.data in data_values: + self.children[i:i+1] = child.children + changed = True + return changed + + + def scan_values(self, pred: 'Callable[[Branch[_Leaf_T]], bool]') -> Iterator[_Leaf_T]: + """Return all values in the tree that evaluate pred(value) as true. + + This can be used to find all the tokens in the tree. + + Example: + >>> all_tokens = tree.scan_values(lambda v: isinstance(v, Token)) + """ + for c in self.children: + if isinstance(c, Tree): + for t in c.scan_values(pred): + yield t + else: + if pred(c): + yield c + + def iter_subtrees_topdown(self): + """Breadth-first iteration. + + Iterates over all the subtrees, return nodes in order like pretty() does. + """ + stack = [self] + while stack: + node = stack.pop() + if not isinstance(node, Tree): + continue + yield node + for child in reversed(node.children): + stack.append(child) + + def __deepcopy__(self, memo): + return type(self)(self.data, deepcopy(self.children, memo), meta=self._meta) + + def copy(self) -> 'Tree[_Leaf_T]': + return type(self)(self.data, self.children) + + def set(self, data: str, children: 'List[Branch[_Leaf_T]]') -> None: + self.data = data + self.children = children + + +ParseTree = Tree['Token'] + + +class SlottedTree(Tree): + __slots__ = 'data', 'children', 'rule', '_meta' + + +def pydot__tree_to_png(tree: Tree, filename: str, rankdir: 'Literal["TB", "LR", "BT", "RL"]'="LR", **kwargs) -> None: + graph = pydot__tree_to_graph(tree, rankdir, **kwargs) + graph.write_png(filename) + + +def pydot__tree_to_dot(tree: Tree, filename, rankdir="LR", **kwargs): + graph = pydot__tree_to_graph(tree, rankdir, **kwargs) + graph.write(filename) + + +def pydot__tree_to_graph(tree: Tree, rankdir="LR", **kwargs): + """Creates a colorful image that represents the tree (data+children, without meta) + + Possible values for `rankdir` are "TB", "LR", "BT", "RL", corresponding to + directed graphs drawn from top to bottom, from left to right, from bottom to + top, and from right to left, respectively. + + `kwargs` can be any graph attribute (e. g. `dpi=200`). For a list of + possible attributes, see https://www.graphviz.org/doc/info/attrs.html. + """ + + import pydot # type: ignore[import] + graph = pydot.Dot(graph_type='digraph', rankdir=rankdir, **kwargs) + + i = [0] + + def new_leaf(leaf): + node = pydot.Node(i[0], label=repr(leaf)) + i[0] += 1 + graph.add_node(node) + return node + + def _to_pydot(subtree): + color = hash(subtree.data) & 0xffffff + color |= 0x808080 + + subnodes = [_to_pydot(child) if isinstance(child, Tree) else new_leaf(child) + for child in subtree.children] + node = pydot.Node(i[0], style="filled", fillcolor="#%x" % color, label=subtree.data) + i[0] += 1 + graph.add_node(node) + + for subnode in subnodes: + graph.add_edge(pydot.Edge(node, subnode)) + + return node + + _to_pydot(tree) + return graph diff --git a/vendor/lark/lark/tree_matcher.py b/vendor/lark/lark/tree_matcher.py new file mode 100644 index 00000000..fe6bd5f1 --- /dev/null +++ b/vendor/lark/lark/tree_matcher.py @@ -0,0 +1,186 @@ +"""Tree matcher based on Lark grammar""" + +import re +from collections import defaultdict + +from . import Tree, Token +from .common import ParserConf +from .parsers import earley +from .grammar import Rule, Terminal, NonTerminal + + +def is_discarded_terminal(t): + return t.is_term and t.filter_out + + +class _MakeTreeMatch: + def __init__(self, name, expansion): + self.name = name + self.expansion = expansion + + def __call__(self, args): + t = Tree(self.name, args) + t.meta.match_tree = True + t.meta.orig_expansion = self.expansion + return t + + +def _best_from_group(seq, group_key, cmp_key): + d = {} + for item in seq: + key = group_key(item) + if key in d: + v1 = cmp_key(item) + v2 = cmp_key(d[key]) + if v2 > v1: + d[key] = item + else: + d[key] = item + return list(d.values()) + + +def _best_rules_from_group(rules): + rules = _best_from_group(rules, lambda r: r, lambda r: -len(r.expansion)) + rules.sort(key=lambda r: len(r.expansion)) + return rules + + +def _match(term, token): + if isinstance(token, Tree): + name, _args = parse_rulename(term.name) + return token.data == name + elif isinstance(token, Token): + return term == Terminal(token.type) + assert False, (term, token) + + +def make_recons_rule(origin, expansion, old_expansion): + return Rule(origin, expansion, alias=_MakeTreeMatch(origin.name, old_expansion)) + + +def make_recons_rule_to_term(origin, term): + return make_recons_rule(origin, [Terminal(term.name)], [term]) + + +def parse_rulename(s): + "Parse rule names that may contain a template syntax (like rule{a, b, ...})" + name, args_str = re.match(r'(\w+)(?:{(.+)})?', s).groups() + args = args_str and [a.strip() for a in args_str.split(',')] + return name, args + + + +class ChildrenLexer: + def __init__(self, children): + self.children = children + + def lex(self, parser_state): + return self.children + +class TreeMatcher: + """Match the elements of a tree node, based on an ontology + provided by a Lark grammar. + + Supports templates and inlined rules (`rule{a, b,..}` and `_rule`) + + Initiialize with an instance of Lark. + """ + + def __init__(self, parser): + # XXX TODO calling compile twice returns different results! + assert parser.options.maybe_placeholders == False + # XXX TODO: we just ignore the potential existence of a postlexer + self.tokens, rules, _extra = parser.grammar.compile(parser.options.start, set()) + + self.rules_for_root = defaultdict(list) + + self.rules = list(self._build_recons_rules(rules)) + self.rules.reverse() + + # Choose the best rule from each group of {rule => [rule.alias]}, since we only really need one derivation. + self.rules = _best_rules_from_group(self.rules) + + self.parser = parser + self._parser_cache = {} + + def _build_recons_rules(self, rules): + "Convert tree-parsing/construction rules to tree-matching rules" + expand1s = {r.origin for r in rules if r.options.expand1} + + aliases = defaultdict(list) + for r in rules: + if r.alias: + aliases[r.origin].append(r.alias) + + rule_names = {r.origin for r in rules} + nonterminals = {sym for sym in rule_names + if sym.name.startswith('_') or sym in expand1s or sym in aliases} + + seen = set() + for r in rules: + recons_exp = [sym if sym in nonterminals else Terminal(sym.name) + for sym in r.expansion if not is_discarded_terminal(sym)] + + # Skip self-recursive constructs + if recons_exp == [r.origin] and r.alias is None: + continue + + sym = NonTerminal(r.alias) if r.alias else r.origin + rule = make_recons_rule(sym, recons_exp, r.expansion) + + if sym in expand1s and len(recons_exp) != 1: + self.rules_for_root[sym.name].append(rule) + + if sym.name not in seen: + yield make_recons_rule_to_term(sym, sym) + seen.add(sym.name) + else: + if sym.name.startswith('_') or sym in expand1s: + yield rule + else: + self.rules_for_root[sym.name].append(rule) + + for origin, rule_aliases in aliases.items(): + for alias in rule_aliases: + yield make_recons_rule_to_term(origin, NonTerminal(alias)) + yield make_recons_rule_to_term(origin, origin) + + def match_tree(self, tree, rulename): + """Match the elements of `tree` to the symbols of rule `rulename`. + + Parameters: + tree (Tree): the tree node to match + rulename (str): The expected full rule name (including template args) + + Returns: + Tree: an unreduced tree that matches `rulename` + + Raises: + UnexpectedToken: If no match was found. + + Note: + It's the callers' responsibility match the tree recursively. + """ + if rulename: + # validate + name, _args = parse_rulename(rulename) + assert tree.data == name + else: + rulename = tree.data + + # TODO: ambiguity? + try: + parser = self._parser_cache[rulename] + except KeyError: + rules = self.rules + _best_rules_from_group(self.rules_for_root[rulename]) + + # TODO pass callbacks through dict, instead of alias? + callbacks = {rule: rule.alias for rule in rules} + conf = ParserConf(rules, callbacks, [rulename]) + parser = earley.Parser(self.parser.lexer_conf, conf, _match, resolve_ambiguity=True) + self._parser_cache[rulename] = parser + + # find a full derivation + unreduced_tree = parser.parse(ChildrenLexer(tree.children), rulename) + assert unreduced_tree.data == rulename + return unreduced_tree diff --git a/vendor/lark/lark/tree_templates.py b/vendor/lark/lark/tree_templates.py new file mode 100644 index 00000000..25b4d249 --- /dev/null +++ b/vendor/lark/lark/tree_templates.py @@ -0,0 +1,170 @@ +"""This module defines utilities for matching and translation tree templates. + +A tree templates is a tree that contains nodes that are template variables. + +""" + +from typing import Union, Optional, Mapping, Dict, Tuple, Iterator + +from lark import Tree, Transformer +from lark.exceptions import MissingVariableError + +TreeOrCode = Union[Tree[str], str] +_TEMPLATE_MARKER = '$' + + +class TemplateConf: + """Template Configuration + + Allows customization for different uses of Template + """ + + def __init__(self, parse=None): + self._parse = parse + + def test_var(self, var: Union[Tree[str], str]) -> Optional[str]: + """Given a tree node, if it is a template variable return its name. Otherwise, return None. + + This method may be overridden for customization + + Parameters: + var: Tree | str - The tree node to test + + """ + if isinstance(var, str): + return _get_template_name(var) + + if ( + isinstance(var, Tree) + and var.data == "var" + and len(var.children) > 0 + and isinstance(var.children[0], str) + ): + return _get_template_name(var.children[0]) + + return None + + def _get_tree(self, template: TreeOrCode) -> Tree[str]: + if isinstance(template, str): + assert self._parse + template = self._parse(template) + + assert isinstance(template, Tree) + return template + + def __call__(self, template: Tree[str]) -> 'Template': + return Template(template, conf=self) + + def _match_tree_template(self, template: TreeOrCode, tree: TreeOrCode) -> Optional[Dict[str, TreeOrCode]]: + template_var = self.test_var(template) + if template_var: + return {template_var: tree} + + if isinstance(template, str): + if template == tree: + return {} + return None + + assert isinstance(template, Tree) and isinstance(tree, Tree), f"template={template} tree={tree}" + + if template.data == tree.data and len(template.children) == len(tree.children): + res = {} + for t1, t2 in zip(template.children, tree.children): + matches = self._match_tree_template(t1, t2) + if matches is None: + return None + + res.update(matches) + + return res + + return None + + +class _ReplaceVars(Transformer[str, Tree[str]]): + def __init__(self, conf: TemplateConf, vars: Mapping[str, Tree[str]]) -> None: + super().__init__() + self._conf = conf + self._vars = vars + + def __default__(self, data, children, meta) -> Tree[str]: + tree = super().__default__(data, children, meta) + + var = self._conf.test_var(tree) + if var: + try: + return self._vars[var] + except KeyError: + raise MissingVariableError(f"No mapping for template variable ({var})") + return tree + + +class Template: + """Represents a tree templates, tied to a specific configuration + + A tree template is a tree that contains nodes that are template variables. + Those variables will match any tree. + (future versions may support annotations on the variables, to allow more complex templates) + """ + + def __init__(self, tree: Tree[str], conf: TemplateConf = TemplateConf()): + self.conf = conf + self.tree = conf._get_tree(tree) + + def match(self, tree: TreeOrCode) -> Optional[Dict[str, TreeOrCode]]: + """Match a tree template to a tree. + + A tree template without variables will only match ``tree`` if it is equal to the template. + + Parameters: + tree (Tree): The tree to match to the template + + Returns: + Optional[Dict[str, Tree]]: If match is found, returns a dictionary mapping + template variable names to their matching tree nodes. + If no match was found, returns None. + """ + tree = self.conf._get_tree(tree) + return self.conf._match_tree_template(self.tree, tree) + + def search(self, tree: TreeOrCode) -> Iterator[Tuple[Tree[str], Dict[str, TreeOrCode]]]: + """Search for all occurances of the tree template inside ``tree``. + """ + tree = self.conf._get_tree(tree) + for subtree in tree.iter_subtrees(): + res = self.match(subtree) + if res: + yield subtree, res + + def apply_vars(self, vars: Mapping[str, Tree[str]]) -> Tree[str]: + """Apply vars to the template tree + """ + return _ReplaceVars(self.conf, vars).transform(self.tree) + + +def translate(t1: Template, t2: Template, tree: TreeOrCode): + """Search tree and translate each occurrance of t1 into t2. + """ + tree = t1.conf._get_tree(tree) # ensure it's a tree, parse if necessary and possible + for subtree, vars in t1.search(tree): + res = t2.apply_vars(vars) + subtree.set(res.data, res.children) + return tree + + +class TemplateTranslator: + """Utility class for translating a collection of patterns + """ + + def __init__(self, translations: Mapping[Template, Template]): + assert all(isinstance(k, Template) and isinstance(v, Template) for k, v in translations.items()) + self.translations = translations + + def translate(self, tree: Tree[str]): + for k, v in self.translations.items(): + tree = translate(k, v, tree) + return tree + + +def _get_template_name(value: str) -> Optional[str]: + return value.lstrip(_TEMPLATE_MARKER) if value.startswith(_TEMPLATE_MARKER) else None diff --git a/vendor/lark/lark/utils.py b/vendor/lark/lark/utils.py new file mode 100644 index 00000000..d7bab6fa --- /dev/null +++ b/vendor/lark/lark/utils.py @@ -0,0 +1,322 @@ +import unicodedata +import os +from functools import reduce +from collections import deque + +###{standalone +import sys, re +import logging +logger: logging.Logger = logging.getLogger("lark") +logger.addHandler(logging.StreamHandler()) +# Set to highest level, since we have some warnings amongst the code +# By default, we should not output any log messages +logger.setLevel(logging.CRITICAL) + + +NO_VALUE = object() + + +def classify(seq, key=None, value=None): + d = {} + for item in seq: + k = key(item) if (key is not None) else item + v = value(item) if (value is not None) else item + if k in d: + d[k].append(v) + else: + d[k] = [v] + return d + + +def _deserialize(data, namespace, memo): + if isinstance(data, dict): + if '__type__' in data: # Object + class_ = namespace[data['__type__']] + return class_.deserialize(data, memo) + elif '@' in data: + return memo[data['@']] + return {key:_deserialize(value, namespace, memo) for key, value in data.items()} + elif isinstance(data, list): + return [_deserialize(value, namespace, memo) for value in data] + return data + + +class Serialize: + """Safe-ish serialization interface that doesn't rely on Pickle + + Attributes: + __serialize_fields__ (List[str]): Fields (aka attributes) to serialize. + __serialize_namespace__ (list): List of classes that deserialization is allowed to instantiate. + Should include all field types that aren't builtin types. + """ + + def memo_serialize(self, types_to_memoize): + memo = SerializeMemoizer(types_to_memoize) + return self.serialize(memo), memo.serialize() + + def serialize(self, memo=None): + if memo and memo.in_types(self): + return {'@': memo.memoized.get(self)} + + fields = getattr(self, '__serialize_fields__') + res = {f: _serialize(getattr(self, f), memo) for f in fields} + res['__type__'] = type(self).__name__ + if hasattr(self, '_serialize'): + self._serialize(res, memo) + return res + + @classmethod + def deserialize(cls, data, memo): + namespace = getattr(cls, '__serialize_namespace__', []) + namespace = {c.__name__:c for c in namespace} + + fields = getattr(cls, '__serialize_fields__') + + if '@' in data: + return memo[data['@']] + + inst = cls.__new__(cls) + for f in fields: + try: + setattr(inst, f, _deserialize(data[f], namespace, memo)) + except KeyError as e: + raise KeyError("Cannot find key for class", cls, e) + + if hasattr(inst, '_deserialize'): + inst._deserialize() + + return inst + + +class SerializeMemoizer(Serialize): + "A version of serialize that memoizes objects to reduce space" + + __serialize_fields__ = 'memoized', + + def __init__(self, types_to_memoize): + self.types_to_memoize = tuple(types_to_memoize) + self.memoized = Enumerator() + + def in_types(self, value): + return isinstance(value, self.types_to_memoize) + + def serialize(self): + return _serialize(self.memoized.reversed(), None) + + @classmethod + def deserialize(cls, data, namespace, memo): + return _deserialize(data, namespace, memo) + + +try: + import regex # type: ignore +except ImportError: + regex = None + +import sre_parse +import sre_constants +categ_pattern = re.compile(r'\\p{[A-Za-z_]+}') + +def get_regexp_width(expr): + if regex: + # Since `sre_parse` cannot deal with Unicode categories of the form `\p{Mn}`, we replace these with + # a simple letter, which makes no difference as we are only trying to get the possible lengths of the regex + # match here below. + regexp_final = re.sub(categ_pattern, 'A', expr) + else: + if re.search(categ_pattern, expr): + raise ImportError('`regex` module must be installed in order to use Unicode categories.', expr) + regexp_final = expr + try: + return [int(x) for x in sre_parse.parse(regexp_final).getwidth()] + except sre_constants.error: + if not regex: + raise ValueError(expr) + else: + # sre_parse does not support the new features in regex. To not completely fail in that case, + # we manually test for the most important info (whether the empty string is matched) + c = regex.compile(regexp_final) + if c.match('') is None: + # MAXREPEAT is a none pickable subclass of int, therefore needs to be converted to enable caching + return 1, int(sre_constants.MAXREPEAT) + else: + return 0, int(sre_constants.MAXREPEAT) + +###} + + +_ID_START = 'Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Mn', 'Mc', 'Pc' +_ID_CONTINUE = _ID_START + ('Nd', 'Nl',) + +def _test_unicode_category(s, categories): + if len(s) != 1: + return all(_test_unicode_category(char, categories) for char in s) + return s == '_' or unicodedata.category(s) in categories + +def is_id_continue(s): + """ + Checks if all characters in `s` are alphanumeric characters (Unicode standard, so diacritics, indian vowels, non-latin + numbers, etc. all pass). Synonymous with a Python `ID_CONTINUE` identifier. See PEP 3131 for details. + """ + return _test_unicode_category(s, _ID_CONTINUE) + +def is_id_start(s): + """ + Checks if all characters in `s` are alphabetic characters (Unicode standard, so diacritics, indian vowels, non-latin + numbers, etc. all pass). Synonymous with a Python `ID_START` identifier. See PEP 3131 for details. + """ + return _test_unicode_category(s, _ID_START) + + +def dedup_list(l): + """Given a list (l) will removing duplicates from the list, + preserving the original order of the list. Assumes that + the list entries are hashable.""" + dedup = set() + return [x for x in l if not (x in dedup or dedup.add(x))] + + +class Enumerator(Serialize): + def __init__(self): + self.enums = {} + + def get(self, item): + if item not in self.enums: + self.enums[item] = len(self.enums) + return self.enums[item] + + def __len__(self): + return len(self.enums) + + def reversed(self): + r = {v: k for k, v in self.enums.items()} + assert len(r) == len(self.enums) + return r + + + +def combine_alternatives(lists): + """ + Accepts a list of alternatives, and enumerates all their possible concatinations. + + Examples: + >>> combine_alternatives([range(2), [4,5]]) + [[0, 4], [0, 5], [1, 4], [1, 5]] + + >>> combine_alternatives(["abc", "xy", '$']) + [['a', 'x', '$'], ['a', 'y', '$'], ['b', 'x', '$'], ['b', 'y', '$'], ['c', 'x', '$'], ['c', 'y', '$']] + + >>> combine_alternatives([]) + [[]] + """ + if not lists: + return [[]] + assert all(l for l in lists), lists + init = [[x] for x in lists[0]] + return reduce(lambda a,b: [i+[j] for i in a for j in b], lists[1:], init) + + +try: + import atomicwrites +except ImportError: + atomicwrites = None # type: ignore[assigment] + +class FS: + exists = staticmethod(os.path.exists) + + @staticmethod + def open(name, mode="r", **kwargs): + if atomicwrites and "w" in mode: + return atomicwrites.atomic_write(name, mode=mode, overwrite=True, **kwargs) + else: + return open(name, mode, **kwargs) + + + +def isascii(s): + """ str.isascii only exists in python3.7+ """ + try: + return s.isascii() + except AttributeError: + try: + s.encode('ascii') + return True + except (UnicodeDecodeError, UnicodeEncodeError): + return False + + +class fzset(frozenset): + def __repr__(self): + return '{%s}' % ', '.join(map(repr, self)) + + +def classify_bool(seq, pred): + true_elems = [] + false_elems = [] + + for elem in seq: + if pred(elem): + true_elems.append(elem) + else: + false_elems.append(elem) + + return true_elems, false_elems + + +def bfs(initial, expand): + open_q = deque(list(initial)) + visited = set(open_q) + while open_q: + node = open_q.popleft() + yield node + for next_node in expand(node): + if next_node not in visited: + visited.add(next_node) + open_q.append(next_node) + +def bfs_all_unique(initial, expand): + "bfs, but doesn't keep track of visited (aka seen), because there can be no repetitions" + open_q = deque(list(initial)) + while open_q: + node = open_q.popleft() + yield node + open_q += expand(node) + + +def _serialize(value, memo): + if isinstance(value, Serialize): + return value.serialize(memo) + elif isinstance(value, list): + return [_serialize(elem, memo) for elem in value] + elif isinstance(value, frozenset): + return list(value) # TODO reversible? + elif isinstance(value, dict): + return {key:_serialize(elem, memo) for key, elem in value.items()} + # assert value is None or isinstance(value, (int, float, str, tuple)), value + return value + + + + +def small_factors(n, max_factor): + """ + Splits n up into smaller factors and summands <= max_factor. + Returns a list of [(a, b), ...] + so that the following code returns n: + + n = 1 + for a, b in values: + n = n * a + b + + Currently, we also keep a + b <= max_factor, but that might change + """ + assert n >= 0 + assert max_factor > 2 + if n <= max_factor: + return [(n, 0)] + + for a in range(max_factor, 1, -1): + r, b = divmod(n, a) + if a + b <= max_factor: + return small_factors(r, max_factor) + [(a, b)] + assert False, "Failed to factorize %s" % n diff --git a/vendor/lark/lark/visitors.py b/vendor/lark/lark/visitors.py new file mode 100644 index 00000000..9feced1b --- /dev/null +++ b/vendor/lark/lark/visitors.py @@ -0,0 +1,577 @@ +from typing import TypeVar, Tuple, List, Callable, Generic, Type, Union, Optional, Any, cast +from abc import ABC + +from .utils import combine_alternatives +from .tree import Tree, Branch +from .exceptions import VisitError, GrammarError +from .lexer import Token + +###{standalone +from functools import wraps, update_wrapper +from inspect import getmembers, getmro + +_Return_T = TypeVar('_Return_T') +_Return_V = TypeVar('_Return_V') +_Leaf_T = TypeVar('_Leaf_T') +_Leaf_U = TypeVar('_Leaf_U') +_R = TypeVar('_R') +_FUNC = Callable[..., _Return_T] +_DECORATED = Union[_FUNC, type] + +class _DiscardType: + """When the Discard value is returned from a transformer callback, + that node is discarded and won't appear in the parent. + + Example: + :: + + class T(Transformer): + def ignore_tree(self, children): + return Discard + + def IGNORE_TOKEN(self, token): + return Discard + """ + + def __repr__(self): + return "lark.visitors.Discard" + +Discard = _DiscardType() + +# Transformers + +class _Decoratable: + "Provides support for decorating methods with @v_args" + + @classmethod + def _apply_v_args(cls, visit_wrapper): + mro = getmro(cls) + assert mro[0] is cls + libmembers = {name for _cls in mro[1:] for name, _ in getmembers(_cls)} + for name, value in getmembers(cls): + + # Make sure the function isn't inherited (unless it's overwritten) + if name.startswith('_') or (name in libmembers and name not in cls.__dict__): + continue + if not callable(value): + continue + + # Skip if v_args already applied (at the function level) + if isinstance(cls.__dict__[name], _VArgsWrapper): + continue + + setattr(cls, name, _VArgsWrapper(cls.__dict__[name], visit_wrapper)) + return cls + + def __class_getitem__(cls, _): + return cls + + +class Transformer(_Decoratable, ABC, Generic[_Leaf_T, _Return_T]): + """Transformers visit each node of the tree, and run the appropriate method on it according to the node's data. + + Methods are provided by the user via inheritance, and called according to ``tree.data``. + The returned value from each method replaces the node in the tree structure. + + Transformers work bottom-up (or depth-first), starting with the leaves and ending at the root of the tree. + Transformers can be used to implement map & reduce patterns. Because nodes are reduced from leaf to root, + at any point the callbacks may assume the children have already been transformed (if applicable). + + ``Transformer`` can do anything ``Visitor`` can do, but because it reconstructs the tree, + it is slightly less efficient. + + To discard a node, return Discard (``lark.visitors.Discard``). + + All these classes implement the transformer interface: + + - ``Transformer`` - Recursively transforms the tree. This is the one you probably want. + - ``Transformer_InPlace`` - Non-recursive. Changes the tree in-place instead of returning new instances + - ``Transformer_InPlaceRecursive`` - Recursive. Changes the tree in-place instead of returning new instances + + Parameters: + visit_tokens (bool, optional): Should the transformer visit tokens in addition to rules. + Setting this to ``False`` is slightly faster. Defaults to ``True``. + (For processing ignored tokens, use the ``lexer_callbacks`` options) + + NOTE: A transformer without methods essentially performs a non-memoized partial deepcopy. + """ + __visit_tokens__ = True # For backwards compatibility + + def __init__(self, visit_tokens: bool=True) -> None: + self.__visit_tokens__ = visit_tokens + + def _call_userfunc(self, tree, new_children=None): + # Assumes tree is already transformed + children = new_children if new_children is not None else tree.children + try: + f = getattr(self, tree.data) + except AttributeError: + return self.__default__(tree.data, children, tree.meta) + else: + try: + wrapper = getattr(f, 'visit_wrapper', None) + if wrapper is not None: + return f.visit_wrapper(f, tree.data, children, tree.meta) + else: + return f(children) + except GrammarError: + raise + except Exception as e: + raise VisitError(tree.data, tree, e) + + def _call_userfunc_token(self, token): + try: + f = getattr(self, token.type) + except AttributeError: + return self.__default_token__(token) + else: + try: + return f(token) + except GrammarError: + raise + except Exception as e: + raise VisitError(token.type, token, e) + + def _transform_children(self, children): + for c in children: + if isinstance(c, Tree): + res = self._transform_tree(c) + elif self.__visit_tokens__ and isinstance(c, Token): + res = self._call_userfunc_token(c) + else: + res = c + + if res is not Discard: + yield res + + def _transform_tree(self, tree): + children = list(self._transform_children(tree.children)) + return self._call_userfunc(tree, children) + + def transform(self, tree: Tree[_Leaf_T]) -> _Return_T: + "Transform the given tree, and return the final result" + return self._transform_tree(tree) + + def __mul__( + self: 'Transformer[_Leaf_T, Tree[_Leaf_U]]', + other: 'Union[Transformer[_Leaf_U, _Return_V], TransformerChain[_Leaf_U, _Return_V,]]' + ) -> 'TransformerChain[_Leaf_T, _Return_V]': + """Chain two transformers together, returning a new transformer. + """ + return TransformerChain(self, other) + + def __default__(self, data, children, meta): + """Default function that is called if there is no attribute matching ``data`` + + Can be overridden. Defaults to creating a new copy of the tree node (i.e. ``return Tree(data, children, meta)``) + """ + return Tree(data, children, meta) + + def __default_token__(self, token): + """Default function that is called if there is no attribute matching ``token.type`` + + Can be overridden. Defaults to returning the token as-is. + """ + return token + + +def merge_transformers(base_transformer=None, **transformers_to_merge): + """Merge a collection of transformers into the base_transformer, each into its own 'namespace'. + + When called, it will collect the methods from each transformer, and assign them to base_transformer, + with their name prefixed with the given keyword, as ``prefix__methodname``. + + This function is especially useful for processing grammars that import other grammars, + thereby creating some of their rules in a 'namespace'. (i.e with a consistent name prefix). + In this case, the key for the transformer should match the name of the imported grammar. + + Parameters: + base_transformer (Transformer, optional): The transformer that all other transformers will be added to. + **transformers_to_merge: Keyword arguments, in the form of ``name_prefix = transformer``. + + Raises: + AttributeError: In case of a name collision in the merged methods + + Example: + :: + + class TBase(Transformer): + def start(self, children): + return children[0] + 'bar' + + class TImportedGrammar(Transformer): + def foo(self, children): + return "foo" + + composed_transformer = merge_transformers(TBase(), imported=TImportedGrammar()) + + t = Tree('start', [ Tree('imported__foo', []) ]) + + assert composed_transformer.transform(t) == 'foobar' + + """ + if base_transformer is None: + base_transformer = Transformer() + for prefix, transformer in transformers_to_merge.items(): + for method_name in dir(transformer): + method = getattr(transformer, method_name) + if not callable(method): + continue + if method_name.startswith("_") or method_name == "transform": + continue + prefixed_method = prefix + "__" + method_name + if hasattr(base_transformer, prefixed_method): + raise AttributeError("Cannot merge: method '%s' appears more than once" % prefixed_method) + + setattr(base_transformer, prefixed_method, method) + + return base_transformer + + +class InlineTransformer(Transformer): # XXX Deprecated + def _call_userfunc(self, tree, new_children=None): + # Assumes tree is already transformed + children = new_children if new_children is not None else tree.children + try: + f = getattr(self, tree.data) + except AttributeError: + return self.__default__(tree.data, children, tree.meta) + else: + return f(*children) + + +class TransformerChain(Generic[_Leaf_T, _Return_T]): + + transformers: 'Tuple[Union[Transformer, TransformerChain], ...]' + + def __init__(self, *transformers: 'Union[Transformer, TransformerChain]') -> None: + self.transformers = transformers + + def transform(self, tree: Tree[_Leaf_T]) -> _Return_T: + for t in self.transformers: + tree = t.transform(tree) + return cast(_Return_T, tree) + + def __mul__( + self: 'TransformerChain[_Leaf_T, Tree[_Leaf_U]]', + other: 'Union[Transformer[_Leaf_U, _Return_V], TransformerChain[_Leaf_U, _Return_V]]' + ) -> 'TransformerChain[_Leaf_T, _Return_V]': + return TransformerChain(*self.transformers + (other,)) + + +class Transformer_InPlace(Transformer): + """Same as Transformer, but non-recursive, and changes the tree in-place instead of returning new instances + + Useful for huge trees. Conservative in memory. + """ + def _transform_tree(self, tree): # Cancel recursion + return self._call_userfunc(tree) + + def transform(self, tree: Tree[_Leaf_T]) -> _Return_T: + for subtree in tree.iter_subtrees(): + subtree.children = list(self._transform_children(subtree.children)) + + return self._transform_tree(tree) + + +class Transformer_NonRecursive(Transformer): + """Same as Transformer but non-recursive. + + Like Transformer, it doesn't change the original tree. + + Useful for huge trees. + """ + + def transform(self, tree: Tree[_Leaf_T]) -> _Return_T: + # Tree to postfix + rev_postfix = [] + q: List[Branch[_Leaf_T]] = [tree] + while q: + t = q.pop() + rev_postfix.append(t) + if isinstance(t, Tree): + q += t.children + + # Postfix to tree + stack: List = [] + for x in reversed(rev_postfix): + if isinstance(x, Tree): + size = len(x.children) + if size: + args = stack[-size:] + del stack[-size:] + else: + args = [] + + res = self._call_userfunc(x, args) + if res is not Discard: + stack.append(res) + + elif self.__visit_tokens__ and isinstance(x, Token): + res = self._call_userfunc_token(x) + if res is not Discard: + stack.append(res) + else: + stack.append(x) + + result, = stack # We should have only one tree remaining + # There are no guarantees on the type of the value produced by calling a user func for a + # child will produce. This means type system can't statically know that the final result is + # _Return_T. As a result a cast is required. + return cast(_Return_T, result) + + +class Transformer_InPlaceRecursive(Transformer): + "Same as Transformer, recursive, but changes the tree in-place instead of returning new instances" + def _transform_tree(self, tree): + tree.children = list(self._transform_children(tree.children)) + return self._call_userfunc(tree) + + +# Visitors + +class VisitorBase: + def _call_userfunc(self, tree): + return getattr(self, tree.data, self.__default__)(tree) + + def __default__(self, tree): + """Default function that is called if there is no attribute matching ``tree.data`` + + Can be overridden. Defaults to doing nothing. + """ + return tree + + def __class_getitem__(cls, _): + return cls + + +class Visitor(VisitorBase, ABC, Generic[_Leaf_T]): + """Tree visitor, non-recursive (can handle huge trees). + + Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data`` + """ + + def visit(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]: + "Visits the tree, starting with the leaves and finally the root (bottom-up)" + for subtree in tree.iter_subtrees(): + self._call_userfunc(subtree) + return tree + + def visit_topdown(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]: + "Visit the tree, starting at the root, and ending at the leaves (top-down)" + for subtree in tree.iter_subtrees_topdown(): + self._call_userfunc(subtree) + return tree + + +class Visitor_Recursive(VisitorBase, Generic[_Leaf_T]): + """Bottom-up visitor, recursive. + + Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data`` + + Slightly faster than the non-recursive version. + """ + + def visit(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]: + "Visits the tree, starting with the leaves and finally the root (bottom-up)" + for child in tree.children: + if isinstance(child, Tree): + self.visit(child) + + self._call_userfunc(tree) + return tree + + def visit_topdown(self,tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]: + "Visit the tree, starting at the root, and ending at the leaves (top-down)" + self._call_userfunc(tree) + + for child in tree.children: + if isinstance(child, Tree): + self.visit_topdown(child) + + return tree + + +class Interpreter(_Decoratable, ABC, Generic[_Leaf_T, _Return_T]): + """Interpreter walks the tree starting at the root. + + Visits the tree, starting with the root and finally the leaves (top-down) + + For each tree node, it calls its methods (provided by user via inheritance) according to ``tree.data``. + + Unlike ``Transformer`` and ``Visitor``, the Interpreter doesn't automatically visit its sub-branches. + The user has to explicitly call ``visit``, ``visit_children``, or use the ``@visit_children_decor``. + This allows the user to implement branching and loops. + """ + + def visit(self, tree: Tree[_Leaf_T]) -> _Return_T: + # There are no guarantees on the type of the value produced by calling a user func for a + # child will produce. So only annotate the public method and use an internal method when + # visiting child trees. + return self._visit_tree(tree) + + def _visit_tree(self, tree: Tree[_Leaf_T]): + f = getattr(self, tree.data) + wrapper = getattr(f, 'visit_wrapper', None) + if wrapper is not None: + return f.visit_wrapper(f, tree.data, tree.children, tree.meta) + else: + return f(tree) + + def visit_children(self, tree: Tree[_Leaf_T]) -> List: + return [self._visit_tree(child) if isinstance(child, Tree) else child + for child in tree.children] + + def __getattr__(self, name): + return self.__default__ + + def __default__(self, tree): + return self.visit_children(tree) + + +_InterMethod = Callable[[Type[Interpreter], _Return_T], _R] + +def visit_children_decor(func: _InterMethod) -> _InterMethod: + "See Interpreter" + @wraps(func) + def inner(cls, tree): + values = cls.visit_children(tree) + return func(cls, values) + return inner + +# Decorators + +def _apply_v_args(obj, visit_wrapper): + try: + _apply = obj._apply_v_args + except AttributeError: + return _VArgsWrapper(obj, visit_wrapper) + else: + return _apply(visit_wrapper) + + +class _VArgsWrapper: + """ + A wrapper around a Callable. It delegates `__call__` to the Callable. + If the Callable has a `__get__`, that is also delegate and the resulting function is wrapped. + Otherwise, we use the original function mirroring the behaviour without a __get__. + We also have the visit_wrapper attribute to be used by Transformers. + """ + base_func: Callable + + def __init__(self, func: Callable, visit_wrapper: Callable[[Callable, str, list, Any], Any]): + if isinstance(func, _VArgsWrapper): + func = func.base_func + # https://github.com/python/mypy/issues/708 + self.base_func = func # type: ignore[assignment] + self.visit_wrapper = visit_wrapper + update_wrapper(self, func) + + def __call__(self, *args, **kwargs): + return self.base_func(*args, **kwargs) + + def __get__(self, instance, owner=None): + try: + g = self.base_func.__get__ + except AttributeError: + return self + else: + return _VArgsWrapper(g(instance, owner), self.visit_wrapper) + + def __set_name__(self, owner, name): + try: + f = self.base_func.__set_name__ + except AttributeError: + return + else: + f(owner, name) + + +def _vargs_inline(f, _data, children, _meta): + return f(*children) +def _vargs_meta_inline(f, _data, children, meta): + return f(meta, *children) +def _vargs_meta(f, _data, children, meta): + return f(meta, children) +def _vargs_tree(f, data, children, meta): + return f(Tree(data, children, meta)) + + +def v_args(inline: bool = False, meta: bool = False, tree: bool = False, wrapper: Optional[Callable] = None) -> Callable[[_DECORATED], _DECORATED]: + """A convenience decorator factory for modifying the behavior of user-supplied visitor methods. + + By default, callback methods of transformers/visitors accept one argument - a list of the node's children. + + ``v_args`` can modify this behavior. When used on a transformer/visitor class definition, + it applies to all the callback methods inside it. + + ``v_args`` can be applied to a single method, or to an entire class. When applied to both, + the options given to the method take precedence. + + Parameters: + inline (bool, optional): Children are provided as ``*args`` instead of a list argument (not recommended for very long lists). + meta (bool, optional): Provides two arguments: ``children`` and ``meta`` (instead of just the first) + tree (bool, optional): Provides the entire tree as the argument, instead of the children. + wrapper (function, optional): Provide a function to decorate all methods. + + Example: + :: + + @v_args(inline=True) + class SolveArith(Transformer): + def add(self, left, right): + return left + right + + + class ReverseNotation(Transformer_InPlace): + @v_args(tree=True) + def tree_node(self, tree): + tree.children = tree.children[::-1] + """ + if tree and (meta or inline): + raise ValueError("Visitor functions cannot combine 'tree' with 'meta' or 'inline'.") + + func = None + if meta: + if inline: + func = _vargs_meta_inline + else: + func = _vargs_meta + elif inline: + func = _vargs_inline + elif tree: + func = _vargs_tree + + if wrapper is not None: + if func is not None: + raise ValueError("Cannot use 'wrapper' along with 'tree', 'meta' or 'inline'.") + func = wrapper + + def _visitor_args_dec(obj): + return _apply_v_args(obj, func) + return _visitor_args_dec + + +###} + + +# --- Visitor Utilities --- + +class CollapseAmbiguities(Transformer): + """ + Transforms a tree that contains any number of _ambig nodes into a list of trees, + each one containing an unambiguous tree. + + The length of the resulting list is the product of the length of all _ambig nodes. + + Warning: This may quickly explode for highly ambiguous trees. + + """ + def _ambig(self, options): + return sum(options, []) + + def __default__(self, data, children_lists, meta): + return [Tree(data, children, meta) for children in combine_alternatives(children_lists)] + + def __default_token__(self, t): + return [t] diff --git a/vendor/lark/pytest.ini b/vendor/lark/pytest.ini new file mode 100644 index 00000000..39503b6d --- /dev/null +++ b/vendor/lark/pytest.ini @@ -0,0 +1,6 @@ +[pytest] +minversion = 6.0 +addopts = -ra -q +testpaths = + tests +python_files = __main__.py diff --git a/vendor/lark/readthedocs.yml b/vendor/lark/readthedocs.yml new file mode 100644 index 00000000..4636dc73 --- /dev/null +++ b/vendor/lark/readthedocs.yml @@ -0,0 +1,12 @@ +version: 2 + +formats: all + +python: + version: 3.7 + install: + - requirements: docs/requirements.txt + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py diff --git a/vendor/lark/setup.cfg b/vendor/lark/setup.cfg new file mode 100644 index 00000000..6d71f28b --- /dev/null +++ b/vendor/lark/setup.cfg @@ -0,0 +1,8 @@ +[global] +zip_safe= + +[bdist_wheel] +universal = 1 + +[metadata] +license_file = LICENSE diff --git a/vendor/lark/setup.py b/vendor/lark/setup.py new file mode 100644 index 00000000..a6b5bef0 --- /dev/null +++ b/vendor/lark/setup.py @@ -0,0 +1,72 @@ +import re +from setuptools import setup + +__version__ ,= re.findall('__version__: str = "(.*)"', open('lark/__init__.py').read()) + +setup( + name = "lark", + version = __version__, + packages = ['lark', 'lark.parsers', 'lark.tools', 'lark.grammars', 'lark.__pyinstaller'], + + requires = [], + install_requires = [], + + extras_require = { + "regex": ["regex"], + "nearley": ["js2py"], + "atomic_cache": ["atomicwrites"], + }, + + package_data = {'': ['*.md', '*.lark'], 'lark': ['py.typed']}, + + test_suite = 'tests.__main__', + + # metadata for upload to PyPI + author = "Erez Shinan", + author_email = "erezshin@gmail.com", + description = "a modern parsing library", + license = "MIT", + keywords = "Earley LALR parser parsing ast", + url = "https://github.com/lark-parser/lark", + download_url = "https://github.com/lark-parser/lark/tarball/master", + long_description=''' +Lark is a modern general-purpose parsing library for Python. + +With Lark, you can parse any context-free grammar, efficiently, with very little code. + +Main Features: + - Builds a parse-tree (AST) automagically, based on the structure of the grammar + - Earley parser + - Can parse all context-free grammars + - Full support for ambiguous grammars + - LALR(1) parser + - Fast and light, competitive with PLY + - Can generate a stand-alone parser + - CYK parser, for highly ambiguous grammars + - EBNF grammar + - Unicode fully supported + - Automatic line & column tracking + - Standard library of terminals (strings, numbers, names, etc.) + - Import grammars from Nearley.js + - Extensive test suite + - And much more! + +Since version 1.0, only Python versions 3.6 and up are supported. +''', + + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Text Processing :: General", + "Topic :: Text Processing :: Linguistic", + "License :: OSI Approved :: MIT License", + ], + entry_points = { + 'pyinstaller40': [ + 'hook-dirs = lark.__pyinstaller:get_hook_dirs' + ] + }, +) + diff --git a/vendor/lark/test-requirements.txt b/vendor/lark/test-requirements.txt new file mode 100644 index 00000000..d304ee8f --- /dev/null +++ b/vendor/lark/test-requirements.txt @@ -0,0 +1,2 @@ +Js2Py==0.68 +regex \ No newline at end of file diff --git a/vendor/poetry-core/poetry/core/spdx/data/__init__.py b/vendor/lark/tests/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/spdx/data/__init__.py rename to vendor/lark/tests/__init__.py diff --git a/vendor/lark/tests/__main__.py b/vendor/lark/tests/__main__.py new file mode 100644 index 00000000..bbf99865 --- /dev/null +++ b/vendor/lark/tests/__main__.py @@ -0,0 +1,32 @@ +from __future__ import absolute_import, print_function + +import unittest +import logging +from lark import logger + +from .test_trees import TestTrees +from .test_tools import TestStandalone +from .test_cache import TestCache +from .test_grammar import TestGrammar +from .test_reconstructor import TestReconstructor +from .test_tree_forest_transformer import TestTreeForestTransformer +from .test_lexer import TestLexer +from .test_python_grammar import TestPythonParser +from .test_tree_templates import * # We define __all__ to list which TestSuites to run + +try: + from .test_nearley.test_nearley import TestNearley +except ImportError: + logger.warning("Warning: Skipping tests for Nearley grammar imports (js2py required)") + +# from .test_selectors import TestSelectors +# from .test_grammars import TestPythonG, TestConfigG + +from .test_logger import Testlogger + +from .test_parser import * # We define __all__ to list which TestSuites to run + +logger.setLevel(logging.INFO) + +if __name__ == '__main__': + unittest.main() diff --git a/vendor/lark/tests/grammars/ab.lark b/vendor/lark/tests/grammars/ab.lark new file mode 100644 index 00000000..33a985ad --- /dev/null +++ b/vendor/lark/tests/grammars/ab.lark @@ -0,0 +1,10 @@ +startab: expr + +expr: A B + | A expr B + +A: "a" +B: "b" + +%import common.WS +%ignore WS diff --git a/vendor/lark/tests/grammars/leading_underscore_grammar.lark b/vendor/lark/tests/grammars/leading_underscore_grammar.lark new file mode 100644 index 00000000..b09a2f4a --- /dev/null +++ b/vendor/lark/tests/grammars/leading_underscore_grammar.lark @@ -0,0 +1,6 @@ +A: "A" + +_SEP: "x" +_a: A + +c: _a _SEP \ No newline at end of file diff --git a/vendor/lark/tests/grammars/templates.lark b/vendor/lark/tests/grammars/templates.lark new file mode 100644 index 00000000..1631188e --- /dev/null +++ b/vendor/lark/tests/grammars/templates.lark @@ -0,0 +1 @@ +sep{item, delim}: item (delim item)* \ No newline at end of file diff --git a/vendor/lark/tests/grammars/test.lark b/vendor/lark/tests/grammars/test.lark new file mode 100644 index 00000000..3c3cbcfd --- /dev/null +++ b/vendor/lark/tests/grammars/test.lark @@ -0,0 +1,3 @@ +%import common.NUMBER +%import common.WORD +%import common.WS diff --git a/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar.lark b/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar.lark new file mode 100644 index 00000000..1af59538 --- /dev/null +++ b/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar.lark @@ -0,0 +1,4 @@ + +start: rule_to_import + +%import .test_relative_import_of_nested_grammar__grammar_to_import.rule_to_import \ No newline at end of file diff --git a/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar__grammar_to_import.lark b/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar__grammar_to_import.lark new file mode 100644 index 00000000..6a40e2bb --- /dev/null +++ b/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar__grammar_to_import.lark @@ -0,0 +1,4 @@ + +rule_to_import: NESTED_TERMINAL + +%import .test_relative_import_of_nested_grammar__nested_grammar.NESTED_TERMINAL diff --git a/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar__nested_grammar.lark b/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar__nested_grammar.lark new file mode 100644 index 00000000..2d4a2a8a --- /dev/null +++ b/vendor/lark/tests/grammars/test_relative_import_of_nested_grammar__nested_grammar.lark @@ -0,0 +1 @@ +NESTED_TERMINAL: "N" diff --git a/vendor/lark/tests/grammars/test_unicode.lark b/vendor/lark/tests/grammars/test_unicode.lark new file mode 100644 index 00000000..9731d0a6 --- /dev/null +++ b/vendor/lark/tests/grammars/test_unicode.lark @@ -0,0 +1 @@ +UNICODE : /[a-zØ-öø-ÿ]/ \ No newline at end of file diff --git a/vendor/lark/tests/grammars/three_rules_using_same_token.lark b/vendor/lark/tests/grammars/three_rules_using_same_token.lark new file mode 100644 index 00000000..8b41ad23 --- /dev/null +++ b/vendor/lark/tests/grammars/three_rules_using_same_token.lark @@ -0,0 +1,7 @@ +%import common.INT + +a: A +b: A +c: A + +A: "A" \ No newline at end of file diff --git a/vendor/lark/tests/test_cache.py b/vendor/lark/tests/test_cache.py new file mode 100644 index 00000000..479f1a1c --- /dev/null +++ b/vendor/lark/tests/test_cache.py @@ -0,0 +1,171 @@ +from __future__ import absolute_import + +import logging +from unittest import TestCase, main, skipIf + +from lark import Lark, Tree, Transformer +from lark.lexer import Lexer, Token +import lark.lark as lark_module + +try: + from StringIO import StringIO +except ImportError: + from io import BytesIO as StringIO + +try: + import regex +except ImportError: + regex = None + +class MockFile(StringIO): + def close(self): + pass + def __enter__(self): + return self + def __exit__(self, *args): + pass + +class MockFS: + def __init__(self): + self.files = {} + + def open(self, name, mode=None): + if name not in self.files: + f = self.files[name] = MockFile() + else: + f = self.files[name] + f.seek(0) + return f + + def exists(self, name): + return name in self.files + + +class CustomLexer(Lexer): + def __init__(self, lexer_conf): + pass + + def lex(self, data): + for obj in data: + yield Token('A', obj) + + +class InlineTestT(Transformer): + def add(self, children): + return sum(children if isinstance(children, list) else children.children) + + def NUM(self, token): + return int(token) + + def __reduce__(self): + raise TypeError("This Transformer should not be pickled.") + + +def append_zero(t): + return t.update(value=t.value + '0') + + +class TestCache(TestCase): + g = '''start: "a"''' + + + def setUp(self): + self.fs = lark_module.FS + self.mock_fs = MockFS() + lark_module.FS = self.mock_fs + + def tearDown(self): + self.mock_fs.files = {} + lark_module.FS = self.fs + + def test_simple(self): + fn = "bla" + + Lark(self.g, parser='lalr', cache=fn) + assert fn in self.mock_fs.files + parser = Lark(self.g, parser='lalr', cache=fn) + assert parser.parse('a') == Tree('start', []) + + def test_automatic_naming(self): + assert len(self.mock_fs.files) == 0 + Lark(self.g, parser='lalr', cache=True) + assert len(self.mock_fs.files) == 1 + parser = Lark(self.g, parser='lalr', cache=True) + assert parser.parse('a') == Tree('start', []) + + parser = Lark(self.g + ' "b"', parser='lalr', cache=True) + assert len(self.mock_fs.files) == 2 + assert parser.parse('ab') == Tree('start', []) + + parser = Lark(self.g, parser='lalr', cache=True) + assert parser.parse('a') == Tree('start', []) + + def test_custom_lexer(self): + + parser = Lark(self.g, parser='lalr', lexer=CustomLexer, cache=True) + parser = Lark(self.g, parser='lalr', lexer=CustomLexer, cache=True) + assert len(self.mock_fs.files) == 1 + assert parser.parse('a') == Tree('start', []) + + def test_options(self): + # Test options persistence + Lark(self.g, parser="lalr", debug=True, cache=True) + parser = Lark(self.g, parser="lalr", debug=True, cache=True) + assert parser.options.options['debug'] + + def test_inline(self): + # Test inline transformer (tree-less) & lexer_callbacks + # Note: the Transformer should not be saved to the file, + # and is made unpickable to check for that + g = r""" + start: add+ + add: NUM "+" NUM + NUM: /\d+/ + %ignore " " + """ + text = "1+2 3+4" + expected = Tree('start', [30, 70]) + + parser = Lark(g, parser='lalr', transformer=InlineTestT(), cache=True, lexer_callbacks={'NUM': append_zero}) + res0 = parser.parse(text) + parser = Lark(g, parser='lalr', transformer=InlineTestT(), cache=True, lexer_callbacks={'NUM': append_zero}) + assert len(self.mock_fs.files) == 1 + res1 = parser.parse(text) + res2 = InlineTestT().transform(Lark(g, parser="lalr", cache=True, lexer_callbacks={'NUM': append_zero}).parse(text)) + assert res0 == res1 == res2 == expected + + def test_imports(self): + g = """ + %import .grammars.ab (startab, expr) + """ + parser = Lark(g, parser='lalr', start='startab', cache=True, source_path=__file__) + assert len(self.mock_fs.files) == 1 + parser = Lark(g, parser='lalr', start='startab', cache=True, source_path=__file__) + assert len(self.mock_fs.files) == 1 + res = parser.parse("ab") + self.assertEqual(res, Tree('startab', [Tree('expr', ['a', 'b'])])) + + @skipIf(regex is None, "'regex' lib not installed") + def test_recursive_pattern(self): + g = """ + start: recursive+ + recursive: /\w{3}\d{3}(?R)?/ + """ + + assert len(self.mock_fs.files) == 0 + Lark(g, parser="lalr", regex=True, cache=True) + assert len(self.mock_fs.files) == 1 + + with self.assertLogs("lark", level="ERROR") as cm: + Lark(g, parser='lalr', regex=True, cache=True) + assert len(self.mock_fs.files) == 1 + # need to add an error log, because 'self.assertNoLogs' was added in Python 3.10 + logging.getLogger('lark').error("dummy message") + # should only have the dummy log + self.assertCountEqual(cm.output, ["ERROR:lark:dummy message"]) + + + + +if __name__ == '__main__': + main() diff --git a/vendor/lark/tests/test_grammar.py b/vendor/lark/tests/test_grammar.py new file mode 100644 index 00000000..78161899 --- /dev/null +++ b/vendor/lark/tests/test_grammar.py @@ -0,0 +1,299 @@ +from __future__ import absolute_import + +import os +from unittest import TestCase, main + +from lark import Lark, Token, Tree, ParseError, UnexpectedInput +from lark.load_grammar import GrammarError, GRAMMAR_ERRORS, find_grammar_errors, list_grammar_imports +from lark.load_grammar import FromPackageLoader + + +class TestGrammar(TestCase): + def setUp(self): + pass + + def test_errors(self): + for msg, examples in GRAMMAR_ERRORS: + for example in examples: + try: + p = Lark(example) + except GrammarError as e: + assert msg in str(e) + else: + assert False, "example did not raise an error" + + def test_empty_literal(self): + # Issues #888 + self.assertRaises(GrammarError, Lark, "start: \"\"") + + def test_ignore_name(self): + spaces = [] + p = Lark(""" + start: "a" "b" + WS: " " + %ignore WS + """, parser='lalr', lexer_callbacks={'WS': spaces.append}) + assert p.parse("a b") == p.parse("a b") + assert len(spaces) == 5 + + + def test_override_rule(self): + # Overrides the 'sep' template in existing grammar to add an optional terminating delimiter + # Thus extending it beyond its original capacity + p = Lark(""" + %import .test_templates_import (start, sep) + + %override sep{item, delim}: item (delim item)* delim? + %ignore " " + """, source_path=__file__) + + a = p.parse('[1, 2, 3]') + b = p.parse('[1, 2, 3, ]') + assert a == b + + self.assertRaises(GrammarError, Lark, """ + %import .test_templates_import (start, sep) + + %override sep{item}: item (delim item)* delim? + """, source_path=__file__) + + self.assertRaises(GrammarError, Lark, """ + %override sep{item}: item (delim item)* delim? + """, source_path=__file__) + + def test_override_terminal(self): + p = Lark(""" + + %import .grammars.ab (startab, A, B) + + %override A: "c" + %override B: "d" + """, start='startab', source_path=__file__) + + a = p.parse('cd') + self.assertEqual(a.children[0].children, [Token('A', 'c'), Token('B', 'd')]) + + def test_extend_rule(self): + p = Lark(""" + %import .grammars.ab (startab, A, B, expr) + + %extend expr: B A + """, start='startab', source_path=__file__) + a = p.parse('abab') + self.assertEqual(a.children[0].children, ['a', Tree('expr', ['b', 'a']), 'b']) + + self.assertRaises(GrammarError, Lark, """ + %extend expr: B A + """) + + def test_extend_term(self): + p = Lark(""" + %import .grammars.ab (startab, A, B, expr) + + %extend A: "c" + """, start='startab', source_path=__file__) + a = p.parse('acbb') + self.assertEqual(a.children[0].children, ['a', Tree('expr', ['c', 'b']), 'b']) + + def test_extend_twice(self): + p = Lark(""" + start: x+ + + x: "a" + %extend x: "b" + %extend x: "c" + """) + + assert p.parse("abccbba") == p.parse("cbabbbb") + + def test_undefined_ignore(self): + g = """!start: "A" + + %ignore B + """ + self.assertRaises( GrammarError, Lark, g) + + g = """!start: "A" + + %ignore start + """ + self.assertRaises( GrammarError, Lark, g) + + def test_alias_in_terminal(self): + g = """start: TERM + TERM: "a" -> alias + """ + self.assertRaises( GrammarError, Lark, g) + + def test_undefined_rule(self): + self.assertRaises(GrammarError, Lark, """start: a""") + + def test_undefined_term(self): + self.assertRaises(GrammarError, Lark, """start: A""") + + def test_token_multiline_only_works_with_x_flag(self): + g = r"""start: ABC + ABC: / a b c + d + e f + /i + """ + self.assertRaises( GrammarError, Lark, g) + + def test_import_custom_sources(self): + custom_loader = FromPackageLoader(__name__, ('grammars', )) + + grammar = """ + start: startab + + %import ab.startab + """ + + p = Lark(grammar, import_paths=[custom_loader]) + self.assertEqual(p.parse('ab'), + Tree('start', [Tree('startab', [Tree('ab__expr', [Token('ab__A', 'a'), Token('ab__B', 'b')])])])) + + def test_import_custom_sources2(self): + custom_loader = FromPackageLoader(__name__, ('grammars', )) + + grammar = """ + start: rule_to_import + + %import test_relative_import_of_nested_grammar__grammar_to_import.rule_to_import + """ + p = Lark(grammar, import_paths=[custom_loader]) + x = p.parse('N') + self.assertEqual(next(x.find_data('rule_to_import')).children, ['N']) + + def test_import_custom_sources3(self): + custom_loader2 = FromPackageLoader(__name__) + grammar = """ + %import .test_relative_import (start, WS) + %ignore WS + """ + p = Lark(grammar, import_paths=[custom_loader2], source_path=__file__) # import relative to current file + x = p.parse('12 capybaras') + self.assertEqual(x.children, ['12', 'capybaras']) + + def test_find_grammar_errors(self): + text = """ + a: rule + b rule + c: rule + B.: "hello" f + D: "okay" + """ + + assert [e.line for e, _s in find_grammar_errors(text)] == [3, 5] + + text = """ + a: rule + b rule + | ok + c: rule + B.: "hello" f + D: "okay" + """ + + assert [e.line for e, _s in find_grammar_errors(text)] == [3, 4, 6] + + text = """ + a: rule @#$#@$@&& + b: rule + | ok + c: rule + B: "hello" f @ + D: "okay" + """ + + x = find_grammar_errors(text) + assert [e.line for e, _s in find_grammar_errors(text)] == [2, 6] + + def test_ranged_repeat_terms(self): + g = u"""!start: AAA + AAA: "A"~3 + """ + l = Lark(g, parser='lalr') + self.assertEqual(l.parse(u'AAA'), Tree('start', ["AAA"])) + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'AA') + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'AAAA') + + g = u"""!start: AABB CC + AABB: "A"~0..2 "B"~2 + CC: "C"~1..2 + """ + l = Lark(g, parser='lalr') + self.assertEqual(l.parse(u'AABBCC'), Tree('start', ['AABB', 'CC'])) + self.assertEqual(l.parse(u'BBC'), Tree('start', ['BB', 'C'])) + self.assertEqual(l.parse(u'ABBCC'), Tree('start', ['ABB', 'CC'])) + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'AAAB') + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'AAABBB') + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'ABB') + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'AAAABB') + + def test_ranged_repeat_large(self): + g = u"""!start: "A"~60 + """ + l = Lark(g, parser='lalr') + self.assertGreater(len(l.rules), 1, "Expected that more than one rule will be generated") + self.assertEqual(l.parse(u'A' * 60), Tree('start', ["A"] * 60)) + self.assertRaises(ParseError, l.parse, u'A' * 59) + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'A' * 61) + + g = u"""!start: "A"~15..100 + """ + l = Lark(g, parser='lalr') + for i in range(0, 110): + if 15 <= i <= 100: + self.assertEqual(l.parse(u'A' * i), Tree('start', ['A']*i)) + else: + self.assertRaises(UnexpectedInput, l.parse, u'A' * i) + + # 8191 is a Mersenne prime + g = u"""start: "A"~8191 + """ + l = Lark(g, parser='lalr') + self.assertEqual(l.parse(u'A' * 8191), Tree('start', [])) + self.assertRaises(UnexpectedInput, l.parse, u'A' * 8190) + self.assertRaises(UnexpectedInput, l.parse, u'A' * 8192) + + def test_large_terminal(self): + g = "start: NUMBERS\n" + g += "NUMBERS: " + '|'.join('"%s"' % i for i in range(0, 1000)) + + l = Lark(g, parser='lalr') + for i in (0, 9, 99, 999): + self.assertEqual(l.parse(str(i)), Tree('start', [str(i)])) + for i in (-1, 1000): + self.assertRaises(UnexpectedInput, l.parse, str(i)) + + def test_list_grammar_imports(self): + grammar = """ + %import .test_templates_import (start, sep) + + %override sep{item, delim}: item (delim item)* delim? + %ignore " " + """ + + imports = list_grammar_imports(grammar, [os.path.dirname(__file__)]) + self.assertEqual({os.path.split(i)[-1] for i in imports}, {'test_templates_import.lark', 'templates.lark'}) + + imports = list_grammar_imports('%import common.WS', []) + assert len(imports) == 1 and imports[0].pkg_name == 'lark' + + def test_line_breaks(self): + p = Lark(r"""start: "a" \ + "b" + """) + p.parse('ab') + + + + + + +if __name__ == '__main__': + main() + + + diff --git a/vendor/lark/tests/test_lexer.py b/vendor/lark/tests/test_lexer.py new file mode 100644 index 00000000..411ef942 --- /dev/null +++ b/vendor/lark/tests/test_lexer.py @@ -0,0 +1,23 @@ +from unittest import TestCase, main + +from lark import Lark, Tree + +class TestLexer(TestCase): + def setUp(self): + pass + + def test_basic(self): + p = Lark(""" + start: "a" "b" "c" "d" + %ignore " " + """) + + res = list(p.lex("abc cba dd")) + assert res == list('abccbadd') + + res = list(p.lex("abc cba dd", dont_ignore=True)) + assert res == list('abc cba dd') + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/vendor/lark/tests/test_logger.py b/vendor/lark/tests/test_logger.py new file mode 100644 index 00000000..93dc8ed4 --- /dev/null +++ b/vendor/lark/tests/test_logger.py @@ -0,0 +1,65 @@ +import logging +from contextlib import contextmanager +from lark import Lark, logger +from unittest import TestCase, main + +try: + from StringIO import StringIO +except ImportError: + from io import StringIO + +@contextmanager +def capture_log(): + stream = StringIO() + orig_handler = logger.handlers[0] + del logger.handlers[:] + logger.addHandler(logging.StreamHandler(stream)) + yield stream + del logger.handlers[:] + logger.addHandler(orig_handler) + +class Testlogger(TestCase): + + def test_debug(self): + logger.setLevel(logging.DEBUG) + collision_grammar = ''' + start: as as + as: a* + a: "a" + ''' + with capture_log() as log: + Lark(collision_grammar, parser='lalr', debug=True) + + log = log.getvalue() + # since there are conflicts about A + # symbol A should appear in the log message for hint + self.assertIn("A", log) + + def test_non_debug(self): + logger.setLevel(logging.DEBUG) + collision_grammar = ''' + start: as as + as: a* + a: "a" + ''' + with capture_log() as log: + Lark(collision_grammar, parser='lalr', debug=False) + log = log.getvalue() + # no log messge + self.assertEqual(len(log), 0) + + def test_loglevel_higher(self): + logger.setLevel(logging.ERROR) + collision_grammar = ''' + start: as as + as: a* + a: "a" + ''' + with capture_log() as log: + Lark(collision_grammar, parser='lalr', debug=True) + log = log.getvalue() + # no log messge + self.assertEqual(len(log), 0) + +if __name__ == '__main__': + main() diff --git a/vendor/poetry-core/poetry/core/utils/__init__.py b/vendor/lark/tests/test_nearley/__init__.py similarity index 100% rename from vendor/poetry-core/poetry/core/utils/__init__.py rename to vendor/lark/tests/test_nearley/__init__.py diff --git a/vendor/lark/tests/test_nearley/grammars/include_unicode.ne b/vendor/lark/tests/test_nearley/grammars/include_unicode.ne new file mode 100644 index 00000000..b04c2a91 --- /dev/null +++ b/vendor/lark/tests/test_nearley/grammars/include_unicode.ne @@ -0,0 +1,3 @@ +@include "unicode.ne" + +main -> x diff --git a/vendor/lark/tests/test_nearley/grammars/unicode.ne b/vendor/lark/tests/test_nearley/grammars/unicode.ne new file mode 100644 index 00000000..c930830d --- /dev/null +++ b/vendor/lark/tests/test_nearley/grammars/unicode.ne @@ -0,0 +1 @@ +x -> "±a" diff --git a/vendor/lark/tests/test_nearley/test_nearley.py b/vendor/lark/tests/test_nearley/test_nearley.py new file mode 100644 index 00000000..c1783fe5 --- /dev/null +++ b/vendor/lark/tests/test_nearley/test_nearley.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import unittest +import logging +import os +import codecs + +from lark import logger +from lark.tools.nearley import create_code_for_nearley_grammar, main as nearley_tool_main + +logger.setLevel(logging.INFO) + +TEST_PATH = os.path.abspath(os.path.dirname(__file__)) +NEARLEY_PATH = os.path.join(TEST_PATH, 'nearley') +BUILTIN_PATH = os.path.join(NEARLEY_PATH, 'builtin') + +if not os.path.exists(BUILTIN_PATH): + logger.warning("Nearley not included. Skipping Nearley tests! (use git submodule to add)") + raise ImportError("Skipping Nearley tests!") + +import js2py # Ensures that js2py exists, to avoid failing tests + + +class TestNearley(unittest.TestCase): + def test_css(self): + fn = os.path.join(NEARLEY_PATH, 'examples/csscolor.ne') + with open(fn) as f: + grammar = f.read() + + code = create_code_for_nearley_grammar(grammar, 'csscolor', BUILTIN_PATH, os.path.dirname(fn)) + d = {} + exec (code, d) + parse = d['parse'] + + c = parse('#a199ff') + assert c['r'] == 161 + assert c['g'] == 153 + assert c['b'] == 255 + + c = parse('rgb(255, 70%, 3)') + assert c['r'] == 255 + assert c['g'] == 178 + assert c['b'] == 3 + + def test_include(self): + fn = os.path.join(NEARLEY_PATH, 'test/grammars/folder-test.ne') + with open(fn) as f: + grammar = f.read() + + code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, os.path.dirname(fn)) + d = {} + exec (code, d) + parse = d['parse'] + + parse('a') + parse('b') + + def test_multi_include(self): + fn = os.path.join(NEARLEY_PATH, 'test/grammars/multi-include-test.ne') + with open(fn) as f: + grammar = f.read() + + code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, os.path.dirname(fn)) + d = {} + exec (code, d) + parse = d['parse'] + + parse('a') + parse('b') + parse('c') + + def test_utf8(self): + grammar = u'main -> "±a"' + code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, './') + d = {} + exec (code, d) + parse = d['parse'] + + parse(u'±a') + + def test_backslash(self): + grammar = r'main -> "\""' + code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, './') + d = {} + exec (code, d) + parse = d['parse'] + parse(u'"') + + def test_null(self): + grammar = r'main -> "a" | null' + code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, './') + d = {} + exec (code, d) + parse = d['parse'] + parse('a') + parse('') + + def test_utf8_2(self): + fn = os.path.join(TEST_PATH, 'grammars/unicode.ne') + nearley_tool_main(fn, 'x', NEARLEY_PATH) + + def test_include_utf8(self): + fn = os.path.join(TEST_PATH, 'grammars/include_unicode.ne') + nearley_tool_main(fn, 'main', NEARLEY_PATH) + + +if __name__ == '__main__': + unittest.main() diff --git a/vendor/lark/tests/test_parser.py b/vendor/lark/tests/test_parser.py new file mode 100644 index 00000000..94427ca6 --- /dev/null +++ b/vendor/lark/tests/test_parser.py @@ -0,0 +1,2636 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import re +import unittest +import os +import sys +from copy import copy, deepcopy + +from lark.utils import isascii + +from lark import Token, Transformer_NonRecursive, LexError + +try: + from cStringIO import StringIO as cStringIO +except ImportError: + # Available only in Python 2.x, 3.x only has io.StringIO from below + cStringIO = None +from io import ( + StringIO as uStringIO, + BytesIO, + open, + ) + + +try: + import regex +except ImportError: + regex = None + +import lark +from lark import logger +from lark.lark import Lark +from lark.exceptions import GrammarError, ParseError, UnexpectedToken, UnexpectedInput, UnexpectedCharacters +from lark.tree import Tree +from lark.visitors import Transformer, Transformer_InPlace, v_args, Transformer_InPlaceRecursive +from lark.lexer import Lexer, BasicLexer +from lark.indenter import Indenter + +__all__ = ['TestParsers'] + +__path__ = os.path.dirname(__file__) +def _read(n, *args): + with open(os.path.join(__path__, n), *args) as f: + return f.read() + +class TestParsers(unittest.TestCase): + def test_big_list(self): + Lark(r""" + start: {} + """.format( + "|".join(['"%s"'%i for i in range(250)]) + )) + + def test_same_ast(self): + "Tests that Earley and LALR parsers produce equal trees" + g = Lark(r"""start: "(" name_list ("," "*" NAME)? ")" + name_list: NAME | name_list "," NAME + NAME: /\w+/ """, parser='lalr') + l = g.parse('(a,b,c,*x)') + + g = Lark(r"""start: "(" name_list ("," "*" NAME)? ")" + name_list: NAME | name_list "," NAME + NAME: /\w/+ """) + l2 = g.parse('(a,b,c,*x)') + assert l == l2, '%s != %s' % (l.pretty(), l2.pretty()) + + def test_infinite_recurse(self): + g = """start: a + a: a | "a" + """ + + self.assertRaises(GrammarError, Lark, g, parser='lalr') + + # TODO: should it? shouldn't it? + # l = Lark(g, parser='earley', lexer='dynamic') + # self.assertRaises(ParseError, l.parse, 'a') + + def test_propagate_positions(self): + g = Lark("""start: a + a: "a" + """, propagate_positions=True) + + r = g.parse('a') + self.assertEqual( r.children[0].meta.line, 1 ) + + g = Lark("""start: x + x: a + a: "a" + """, propagate_positions=True) + + r = g.parse('a') + self.assertEqual( r.children[0].meta.line, 1 ) + + def test_propagate_positions2(self): + g = Lark("""start: a + a: b + ?b: "(" t ")" + !t: "t" + """, propagate_positions=True) + + start = g.parse("(t)") + a ,= start.children + t ,= a.children + assert t.children[0] == "t" + + assert t.meta.column == 2 + assert t.meta.end_column == 3 + + assert start.meta.column == a.meta.column == 1 + assert start.meta.end_column == a.meta.end_column == 4 + + + + def test_expand1(self): + + g = Lark("""start: a + ?a: b + b: "x" + """) + + r = g.parse('x') + self.assertEqual( r.children[0].data, "b" ) + + g = Lark("""start: a + ?a: b -> c + b: "x" + """) + + r = g.parse('x') + self.assertEqual( r.children[0].data, "c" ) + + g = Lark("""start: a + ?a: B -> c + B: "x" + """) + self.assertEqual( r.children[0].data, "c" ) + + + g = Lark("""start: a + ?a: b b -> c + b: "x" + """) + r = g.parse('xx') + self.assertEqual( r.children[0].data, "c" ) + + def test_comment_in_rule_definition(self): + g = Lark("""start: a + a: "a" + // A comment + // Another comment + | "b" + // Still more + + c: "unrelated" + """) + r = g.parse('b') + self.assertEqual( r.children[0].data, "a" ) + + def test_visit_tokens(self): + class T(Transformer): + def a(self, children): + return children[0] + "!" + def A(self, tok): + return tok.update(value=tok.upper()) + + # Test regular + g = """start: a + a : A + A: "x" + """ + p = Lark(g, parser='lalr') + r = T(False).transform(p.parse("x")) + self.assertEqual( r.children, ["x!"] ) + r = T().transform(p.parse("x")) + self.assertEqual( r.children, ["X!"] ) + + # Test internal transformer + p = Lark(g, parser='lalr', transformer=T()) + r = p.parse("x") + self.assertEqual( r.children, ["X!"] ) + + def test_visit_tokens2(self): + g = """ + start: add+ + add: NUM "+" NUM + NUM: /\\d+/ + %ignore " " + """ + text = "1+2 3+4" + expected = Tree('start', [3, 7]) + for base in (Transformer, Transformer_InPlace, Transformer_NonRecursive, Transformer_InPlaceRecursive): + class T(base): + def add(self, children): + return sum(children if isinstance(children, list) else children.children) + + def NUM(self, token): + return int(token) + + + parser = Lark(g, parser='lalr', transformer=T()) + result = parser.parse(text) + self.assertEqual(result, expected) + + def test_vargs_meta(self): + + @v_args(meta=True) + class T1(Transformer): + def a(self, meta, children): + assert not children + return meta.line + + def start(self, meta, children): + return children + + @v_args(meta=True, inline=True) + class T2(Transformer): + def a(self, meta): + return meta.line + + def start(self, meta, *res): + return list(res) + + for T in (T1, T2): + for internal in [False, True]: + try: + g = Lark(r"""start: a+ + a : "x" _NL? + _NL: /\n/+ + """, parser='lalr', transformer=T() if internal else None, propagate_positions=True) + except NotImplementedError: + assert internal + continue + + res = g.parse("xx\nx\nxxx\n\n\nxx") + assert not internal + res = T().transform(res) + + self.assertEqual(res, [1, 1, 2, 3, 3, 3, 6, 6]) + + def test_vargs_tree(self): + tree = Lark(''' + start: a a a + !a: "A" + ''').parse('AAA') + tree_copy = deepcopy(tree) + + @v_args(tree=True) + class T(Transformer): + def a(self, tree): + return 1 + def start(self, tree): + return tree.children + + res = T().transform(tree) + self.assertEqual(res, [1, 1, 1]) + self.assertEqual(tree, tree_copy) + + + + def test_embedded_transformer(self): + class T(Transformer): + def a(self, children): + return "" + def b(self, children): + return "" + def c(self, children): + return "" + + # Test regular + g = Lark("""start: a + a : "x" + """, parser='lalr') + r = T().transform(g.parse("x")) + self.assertEqual( r.children, [""] ) + + + g = Lark("""start: a + a : "x" + """, parser='lalr', transformer=T()) + r = g.parse("x") + self.assertEqual( r.children, [""] ) + + + # Test Expand1 + g = Lark("""start: a + ?a : b + b : "x" + """, parser='lalr') + r = T().transform(g.parse("x")) + self.assertEqual( r.children, [""] ) + + + g = Lark("""start: a + ?a : b + b : "x" + """, parser='lalr', transformer=T()) + r = g.parse("x") + self.assertEqual( r.children, [""] ) + + # Test Expand1 -> Alias + g = Lark("""start: a + ?a : b b -> c + b : "x" + """, parser='lalr') + r = T().transform(g.parse("xx")) + self.assertEqual( r.children, [""] ) + + + g = Lark("""start: a + ?a : b b -> c + b : "x" + """, parser='lalr', transformer=T()) + r = g.parse("xx") + self.assertEqual( r.children, [""] ) + + def test_embedded_transformer_inplace(self): + @v_args(tree=True) + class T1(Transformer_InPlace): + def a(self, tree): + assert isinstance(tree, Tree), tree + tree.children.append("tested") + return tree + + def b(self, tree): + return Tree(tree.data, tree.children + ['tested2']) + + @v_args(tree=True) + class T2(Transformer): + def a(self, tree): + assert isinstance(tree, Tree), tree + tree.children.append("tested") + return tree + + def b(self, tree): + return Tree(tree.data, tree.children + ['tested2']) + + class T3(Transformer): + @v_args(tree=True) + def a(self, tree): + assert isinstance(tree, Tree) + tree.children.append("tested") + return tree + + @v_args(tree=True) + def b(self, tree): + return Tree(tree.data, tree.children + ['tested2']) + + for t in [T1(), T2(), T3()]: + for internal in [False, True]: + g = Lark("""start: a b + a : "x" + b : "y" + """, parser='lalr', transformer=t if internal else None) + r = g.parse("xy") + if not internal: + r = t.transform(r) + + a, b = r.children + self.assertEqual(a.children, ["tested"]) + self.assertEqual(b.children, ["tested2"]) + + def test_alias(self): + Lark("""start: ["a"] "b" ["c"] "e" ["f"] ["g"] ["h"] "x" -> d """) + + def test_backwards_custom_lexer(self): + class OldCustomLexer(Lexer): + def __init__(self, lexer_conf): + pass + + def lex(self, text): + yield Token('A', 'A') + + p = Lark(""" + start: A + %declare A + """, parser='lalr', lexer=OldCustomLexer) + + r = p.parse('') + self.assertEqual(r, Tree('start', [Token('A', 'A')])) + + + def test_lexer_token_limit(self): + "Python has a stupid limit of 100 groups in a regular expression. Test that we handle this limitation" + tokens = {'A%d'%i:'"%d"'%i for i in range(300)} + g = """start: %s + %s""" % (' '.join(tokens), '\n'.join("%s: %s"%x for x in tokens.items())) + + p = Lark(g, parser='lalr') + + + +def _make_full_earley_test(LEXER): + def _Lark(grammar, **kwargs): + return Lark(grammar, lexer=LEXER, parser='earley', propagate_positions=True, **kwargs) + class _TestFullEarley(unittest.TestCase): + def test_anon(self): + # Fails an Earley implementation without special handling for empty rules, + # or re-processing of already completed rules. + g = Lark(r"""start: B + B: ("ab"|/[^b]/)+ + """, lexer=LEXER) + + self.assertEqual( g.parse('abc').children[0], 'abc') + + def test_earley(self): + g = Lark("""start: A "b" c + A: "a"+ + c: "abc" + """, parser="earley", lexer=LEXER) + x = g.parse('aaaababc') + + def test_earley2(self): + grammar = """ + start: statement+ + + statement: "r" + | "c" /[a-z]/+ + + %ignore " " + """ + + program = """c b r""" + + l = Lark(grammar, parser='earley', lexer=LEXER) + l.parse(program) + + + @unittest.skipIf(LEXER=='dynamic', "Only relevant for the dynamic_complete parser") + def test_earley3(self): + """Tests prioritization and disambiguation for pseudo-terminals (there should be only one result) + + By default, `+` should immitate regexp greedy-matching + """ + grammar = """ + start: A A + A: "a"+ + """ + + l = Lark(grammar, parser='earley', lexer=LEXER) + res = l.parse("aaa") + self.assertEqual(set(res.children), {'aa', 'a'}) + # XXX TODO fix Earley to maintain correct order + # i.e. terminals it imitate greedy search for terminals, but lazy search for rules + # self.assertEqual(res.children, ['aa', 'a']) + + def test_earley4(self): + grammar = """ + start: A A? + A: "a"+ + """ + + l = Lark(grammar, parser='earley', lexer=LEXER) + res = l.parse("aaa") + assert set(res.children) == {'aa', 'a'} or res.children == ['aaa'] + # XXX TODO fix Earley to maintain correct order + # i.e. terminals it imitate greedy search for terminals, but lazy search for rules + # self.assertEqual(res.children, ['aaa']) + + def test_earley_repeating_empty(self): + # This was a sneaky bug! + + grammar = """ + !start: "a" empty empty "b" + empty: empty2 + empty2: + """ + + parser = Lark(grammar, parser='earley', lexer=LEXER) + res = parser.parse('ab') + + empty_tree = Tree('empty', [Tree('empty2', [])]) + self.assertSequenceEqual(res.children, ['a', empty_tree, empty_tree, 'b']) + + @unittest.skipIf(LEXER=='basic', "Requires dynamic lexer") + def test_earley_explicit_ambiguity(self): + # This was a sneaky bug! + + grammar = """ + start: a b | ab + a: "a" + b: "b" + ab: "ab" + """ + + parser = Lark(grammar, parser='earley', lexer=LEXER, ambiguity='explicit') + ambig_tree = parser.parse('ab') + self.assertEqual( ambig_tree.data, '_ambig') + self.assertEqual( len(ambig_tree.children), 2) + + @unittest.skipIf(LEXER=='basic', "Requires dynamic lexer") + def test_ambiguity1(self): + grammar = """ + start: cd+ "e" + + !cd: "c" + | "d" + | "cd" + + """ + l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER) + ambig_tree = l.parse('cde') + + assert ambig_tree.data == '_ambig', ambig_tree + assert len(ambig_tree.children) == 2 + + @unittest.skipIf(LEXER=='basic', "Requires dynamic lexer") + def test_ambiguity2(self): + grammar = """ + ANY: /[a-zA-Z0-9 ]+/ + a.2: "A" b+ + b.2: "B" + c: ANY + + start: (a|c)* + """ + l = Lark(grammar, parser='earley', lexer=LEXER) + res = l.parse('ABX') + expected = Tree('start', [ + Tree('a', [ + Tree('b', []) + ]), + Tree('c', [ + 'X' + ]) + ]) + self.assertEqual(res, expected) + + def test_ambiguous_intermediate_node(self): + grammar = """ + start: ab bc d? + !ab: "A" "B"? + !bc: "B"? "C" + !d: "D" + """ + + l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER) + ambig_tree = l.parse("ABCD") + expected = { + Tree('start', [Tree('ab', ['A']), Tree('bc', ['B', 'C']), Tree('d', ['D'])]), + Tree('start', [Tree('ab', ['A', 'B']), Tree('bc', ['C']), Tree('d', ['D'])]) + } + self.assertEqual(ambig_tree.data, '_ambig') + self.assertEqual(set(ambig_tree.children), expected) + + def test_ambiguous_symbol_and_intermediate_nodes(self): + grammar = """ + start: ab bc cd + !ab: "A" "B"? + !bc: "B"? "C"? + !cd: "C"? "D" + """ + + l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER) + ambig_tree = l.parse("ABCD") + expected = { + Tree('start', [ + Tree('ab', ['A', 'B']), + Tree('bc', ['C']), + Tree('cd', ['D']) + ]), + Tree('start', [ + Tree('ab', ['A', 'B']), + Tree('bc', []), + Tree('cd', ['C', 'D']) + ]), + Tree('start', [ + Tree('ab', ['A']), + Tree('bc', ['B', 'C']), + Tree('cd', ['D']) + ]), + Tree('start', [ + Tree('ab', ['A']), + Tree('bc', ['B']), + Tree('cd', ['C', 'D']) + ]), + } + self.assertEqual(ambig_tree.data, '_ambig') + self.assertEqual(set(ambig_tree.children), expected) + + def test_nested_ambiguous_intermediate_nodes(self): + grammar = """ + start: ab bc cd e? + !ab: "A" "B"? + !bc: "B"? "C"? + !cd: "C"? "D" + !e: "E" + """ + + l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER) + ambig_tree = l.parse("ABCDE") + expected = { + Tree('start', [ + Tree('ab', ['A', 'B']), + Tree('bc', ['C']), + Tree('cd', ['D']), + Tree('e', ['E']) + ]), + Tree('start', [ + Tree('ab', ['A']), + Tree('bc', ['B', 'C']), + Tree('cd', ['D']), + Tree('e', ['E']) + ]), + Tree('start', [ + Tree('ab', ['A']), + Tree('bc', ['B']), + Tree('cd', ['C', 'D']), + Tree('e', ['E']) + ]), + Tree('start', [ + Tree('ab', ['A', 'B']), + Tree('bc', []), + Tree('cd', ['C', 'D']), + Tree('e', ['E']) + ]), + } + self.assertEqual(ambig_tree.data, '_ambig') + self.assertEqual(set(ambig_tree.children), expected) + + def test_nested_ambiguous_intermediate_nodes2(self): + grammar = """ + start: ab bc cd de f + !ab: "A" "B"? + !bc: "B"? "C"? + !cd: "C"? "D"? + !de: "D"? "E" + !f: "F" + """ + + l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER) + ambig_tree = l.parse("ABCDEF") + expected = { + Tree('start', [ + Tree('ab', ['A', 'B']), + Tree('bc', ['C']), + Tree('cd', ['D']), + Tree('de', ['E']), + Tree('f', ['F']), + ]), + Tree('start', [ + Tree('ab', ['A']), + Tree('bc', ['B', 'C']), + Tree('cd', ['D']), + Tree('de', ['E']), + Tree('f', ['F']), + ]), + Tree('start', [ + Tree('ab', ['A']), + Tree('bc', ['B']), + Tree('cd', ['C', 'D']), + Tree('de', ['E']), + Tree('f', ['F']), + ]), + Tree('start', [ + Tree('ab', ['A']), + Tree('bc', ['B']), + Tree('cd', ['C']), + Tree('de', ['D', 'E']), + Tree('f', ['F']), + ]), + Tree('start', [ + Tree('ab', ['A', "B"]), + Tree('bc', []), + Tree('cd', ['C']), + Tree('de', ['D', 'E']), + Tree('f', ['F']), + ]), + Tree('start', [ + Tree('ab', ['A']), + Tree('bc', ['B', 'C']), + Tree('cd', []), + Tree('de', ['D', 'E']), + Tree('f', ['F']), + ]), + Tree('start', [ + Tree('ab', ['A', 'B']), + Tree('bc', []), + Tree('cd', ['C', 'D']), + Tree('de', ['E']), + Tree('f', ['F']), + ]), + Tree('start', [ + Tree('ab', ['A', 'B']), + Tree('bc', ['C']), + Tree('cd', []), + Tree('de', ['D', 'E']), + Tree('f', ['F']), + ]), + } + self.assertEqual(ambig_tree.data, '_ambig') + self.assertEqual(set(ambig_tree.children), expected) + + def test_ambiguous_intermediate_node_unnamed_token(self): + grammar = """ + start: ab bc "D" + !ab: "A" "B"? + !bc: "B"? "C" + """ + + l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER) + ambig_tree = l.parse("ABCD") + expected = { + Tree('start', [Tree('ab', ['A']), Tree('bc', ['B', 'C'])]), + Tree('start', [Tree('ab', ['A', 'B']), Tree('bc', ['C'])]) + } + self.assertEqual(ambig_tree.data, '_ambig') + self.assertEqual(set(ambig_tree.children), expected) + + def test_ambiguous_intermediate_node_inlined_rule(self): + grammar = """ + start: ab _bc d? + !ab: "A" "B"? + _bc: "B"? "C" + !d: "D" + """ + + l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER) + ambig_tree = l.parse("ABCD") + expected = { + Tree('start', [Tree('ab', ['A']), Tree('d', ['D'])]), + Tree('start', [Tree('ab', ['A', 'B']), Tree('d', ['D'])]) + } + self.assertEqual(ambig_tree.data, '_ambig') + self.assertEqual(set(ambig_tree.children), expected) + + def test_ambiguous_intermediate_node_conditionally_inlined_rule(self): + grammar = """ + start: ab bc d? + !ab: "A" "B"? + !?bc: "B"? "C" + !d: "D" + """ + + l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER) + ambig_tree = l.parse("ABCD") + expected = { + Tree('start', [Tree('ab', ['A']), Tree('bc', ['B', 'C']), Tree('d', ['D'])]), + Tree('start', [Tree('ab', ['A', 'B']), 'C', Tree('d', ['D'])]) + } + self.assertEqual(ambig_tree.data, '_ambig') + self.assertEqual(set(ambig_tree.children), expected) + + def test_fruitflies_ambig(self): + grammar = """ + start: noun verb noun -> simple + | noun verb "like" noun -> comparative + + noun: adj? NOUN + verb: VERB + adj: ADJ + + NOUN: "flies" | "bananas" | "fruit" + VERB: "like" | "flies" + ADJ: "fruit" + + %import common.WS + %ignore WS + """ + parser = Lark(grammar, ambiguity='explicit', lexer=LEXER) + tree = parser.parse('fruit flies like bananas') + + expected = Tree('_ambig', [ + Tree('comparative', [ + Tree('noun', ['fruit']), + Tree('verb', ['flies']), + Tree('noun', ['bananas']) + ]), + Tree('simple', [ + Tree('noun', [Tree('adj', ['fruit']), 'flies']), + Tree('verb', ['like']), + Tree('noun', ['bananas']) + ]) + ]) + + # self.assertEqual(tree, expected) + self.assertEqual(tree.data, expected.data) + self.assertEqual(set(tree.children), set(expected.children)) + + + @unittest.skipIf(LEXER!='dynamic_complete', "Only relevant for the dynamic_complete parser") + def test_explicit_ambiguity2(self): + grammar = r""" + start: NAME+ + NAME: /\w+/ + %ignore " " + """ + text = """cat""" + + parser = _Lark(grammar, start='start', ambiguity='explicit') + tree = parser.parse(text) + self.assertEqual(tree.data, '_ambig') + + combinations = {tuple(str(s) for s in t.children) for t in tree.children} + self.assertEqual(combinations, { + ('cat',), + ('ca', 't'), + ('c', 'at'), + ('c', 'a' ,'t') + }) + + def test_term_ambig_resolve(self): + grammar = r""" + !start: NAME+ + NAME: /\w+/ + %ignore " " + """ + text = """foo bar""" + + parser = Lark(grammar) + tree = parser.parse(text) + self.assertEqual(tree.children, ['foo', 'bar']) + + def test_cycle(self): + grammar = """ + start: start? + """ + + l = Lark(grammar, ambiguity='resolve', lexer=LEXER) + tree = l.parse('') + self.assertEqual(tree, Tree('start', [])) + + l = Lark(grammar, ambiguity='explicit', lexer=LEXER) + tree = l.parse('') + self.assertEqual(tree, Tree('start', [])) + + def test_cycle2(self): + grammar = """ + start: _operation + _operation: value + value: "b" + | "a" value + | _operation + """ + + l = Lark(grammar, ambiguity="explicit", lexer=LEXER) + tree = l.parse("ab") + self.assertEqual(tree, Tree('start', [Tree('value', [Tree('value', [])])])) + + def test_cycles(self): + grammar = """ + a: b + b: c* + c: a + """ + + l = Lark(grammar, start='a', ambiguity='resolve', lexer=LEXER) + tree = l.parse('') + self.assertEqual(tree, Tree('a', [Tree('b', [])])) + + l = Lark(grammar, start='a', ambiguity='explicit', lexer=LEXER) + tree = l.parse('') + self.assertEqual(tree, Tree('a', [Tree('b', [])])) + + def test_many_cycles(self): + grammar = """ + start: a? | start start + !a: "a" + """ + + l = Lark(grammar, ambiguity='resolve', lexer=LEXER) + tree = l.parse('a') + self.assertEqual(tree, Tree('start', [Tree('a', ['a'])])) + + l = Lark(grammar, ambiguity='explicit', lexer=LEXER) + tree = l.parse('a') + self.assertEqual(tree, Tree('start', [Tree('a', ['a'])])) + + def test_cycles_with_child_filter(self): + grammar = """ + a: _x + _x: _x? b + b: + """ + + grammar2 = """ + a: x + x: x? b + b: + """ + + l = Lark(grammar, start='a', ambiguity='resolve', lexer=LEXER) + tree = l.parse('') + self.assertEqual(tree, Tree('a', [Tree('b', [])])) + + l = Lark(grammar, start='a', ambiguity='explicit', lexer=LEXER) + tree = l.parse(''); + self.assertEqual(tree, Tree('a', [Tree('b', [])])) + + l = Lark(grammar2, start='a', ambiguity='resolve', lexer=LEXER) + tree = l.parse(''); + self.assertEqual(tree, Tree('a', [Tree('x', [Tree('b', [])])])) + + l = Lark(grammar2, start='a', ambiguity='explicit', lexer=LEXER) + tree = l.parse(''); + self.assertEqual(tree, Tree('a', [Tree('x', [Tree('b', [])])])) + + + + + + # @unittest.skipIf(LEXER=='dynamic', "Not implemented in Dynamic Earley yet") # TODO + # def test_not_all_derivations(self): + # grammar = """ + # start: cd+ "e" + + # !cd: "c" + # | "d" + # | "cd" + + # """ + # l = Lark(grammar, parser='earley', ambiguity='explicit', lexer=LEXER, earley__all_derivations=False) + # x = l.parse('cde') + # assert x.data != '_ambig', x + # assert len(x.children) == 1 + + _NAME = "TestFullEarley" + LEXER.capitalize() + _TestFullEarley.__name__ = _NAME + globals()[_NAME] = _TestFullEarley + __all__.append(_NAME) + +class CustomLexerNew(Lexer): + """ + Purpose of this custom lexer is to test the integration, + so it uses the traditionalparser as implementation without custom lexing behaviour. + """ + def __init__(self, lexer_conf): + self.lexer = BasicLexer(copy(lexer_conf)) + def lex(self, lexer_state, parser_state): + return self.lexer.lex(lexer_state, parser_state) + + __future_interface__ = True + +class CustomLexerOld(Lexer): + """ + Purpose of this custom lexer is to test the integration, + so it uses the traditionalparser as implementation without custom lexing behaviour. + """ + def __init__(self, lexer_conf): + self.lexer = BasicLexer(copy(lexer_conf)) + def lex(self, text): + ls = self.lexer.make_lexer_state(text) + return self.lexer.lex(ls, None) + + __future_interface__ = False + +def _tree_structure_check(a, b): + """ + Checks that both Tree objects have the same structure, without checking their values. + """ + assert a.data == b.data and len(a.children) == len(b.children) + for ca,cb in zip(a.children, b.children): + assert type(ca) == type(cb) + if isinstance(ca, Tree): + _tree_structure_check(ca, cb) + elif isinstance(ca, Token): + assert ca.type == cb.type + else: + assert ca == cb + +class DualBytesLark: + """ + A helper class that wraps both a normal parser, and a parser for bytes. + It automatically transforms `.parse` calls for both lexer, returning the value from the text lexer + It always checks that both produce the same output/error + + NOTE: Not currently used, but left here for future debugging. + """ + + def __init__(self, g, *args, **kwargs): + self.text_lexer = Lark(g, *args, use_bytes=False, **kwargs) + g = self.text_lexer.grammar_source.lower() + if '\\u' in g or not isascii(g): + # Bytes re can't deal with uniode escapes + self.bytes_lark = None + else: + # Everything here should work, so use `use_bytes='force'` + self.bytes_lark = Lark(self.text_lexer.grammar_source, *args, use_bytes='force', **kwargs) + + def parse(self, text, start=None): + # TODO: Easy workaround, more complex checks would be beneficial + if not isascii(text) or self.bytes_lark is None: + return self.text_lexer.parse(text, start) + try: + rv = self.text_lexer.parse(text, start) + except Exception as e: + try: + self.bytes_lark.parse(text.encode(), start) + except Exception as be: + assert type(e) == type(be), "Parser with and without `use_bytes` raise different exceptions" + raise e + assert False, "Parser without `use_bytes` raises exception, with doesn't" + try: + bv = self.bytes_lark.parse(text.encode(), start) + except Exception as be: + assert False, "Parser without `use_bytes` doesn't raise an exception, with does" + _tree_structure_check(rv, bv) + return rv + + @classmethod + def open(cls, grammar_filename, rel_to=None, **options): + if rel_to: + basepath = os.path.dirname(rel_to) + grammar_filename = os.path.join(basepath, grammar_filename) + with open(grammar_filename, encoding='utf8') as f: + return cls(f, **options) + + def save(self,f): + self.text_lexer.save(f) + if self.bytes_lark is not None: + self.bytes_lark.save(f) + + def load(self,f): + self.text_lexer = self.text_lexer.load(f) + if self.bytes_lark is not None: + self.bytes_lark.load(f) + +def _make_parser_test(LEXER, PARSER): + lexer_class_or_name = { + 'custom_new': CustomLexerNew, + 'custom_old': CustomLexerOld, + }.get(LEXER, LEXER) + + def _Lark(grammar, **kwargs): + return Lark(grammar, lexer=lexer_class_or_name, parser=PARSER, propagate_positions=True, **kwargs) + def _Lark_open(gfilename, **kwargs): + return Lark.open(gfilename, lexer=lexer_class_or_name, parser=PARSER, propagate_positions=True, **kwargs) + + if (LEXER, PARSER) == ('basic', 'earley'): + # Check that the `lark.lark` grammar represents can parse every example used in these tests. + # basic-Earley was an arbitrary choice, to make sure it only ran once. + lalr_parser = Lark.open(os.path.join(os.path.dirname(lark.__file__), 'grammars/lark.lark'), parser='lalr') + def wrap_with_test_grammar(f): + def _f(x, **kwargs): + inst = f(x, **kwargs) + lalr_parser.parse(inst.source_grammar) # Test after instance creation. When the grammar should fail, don't test it. + return inst + return _f + + _Lark = wrap_with_test_grammar(_Lark) + _Lark_open = wrap_with_test_grammar(_Lark_open) + + + class _TestParser(unittest.TestCase): + def test_basic1(self): + g = _Lark("""start: a+ b a* "b" a* + b: "b" + a: "a" + """) + + r = g.parse('aaabaab') + self.assertEqual( ''.join(x.data for x in r.children), 'aaabaa' ) + r = g.parse('aaabaaba') + self.assertEqual( ''.join(x.data for x in r.children), 'aaabaaa' ) + + self.assertRaises(ParseError, g.parse, 'aaabaa') + + def test_basic2(self): + # Multiple parsers and colliding tokens + g = _Lark("""start: B A + B: "12" + A: "1" """) + g2 = _Lark("""start: B A + B: "12" + A: "2" """) + x = g.parse('121') + assert x.data == 'start' and x.children == ['12', '1'], x + x = g2.parse('122') + assert x.data == 'start' and x.children == ['12', '2'], x + + + @unittest.skipIf(cStringIO is None, "cStringIO not available") + def test_stringio_bytes(self): + """Verify that a Lark can be created from file-like objects other than Python's standard 'file' object""" + _Lark(cStringIO(b'start: a+ b a* "b" a*\n b: "b"\n a: "a" ')) + + def test_stringio_unicode(self): + """Verify that a Lark can be created from file-like objects other than Python's standard 'file' object""" + _Lark(uStringIO(u'start: a+ b a* "b" a*\n b: "b"\n a: "a" ')) + + def test_unicode(self): + g = _Lark(u"""start: UNIA UNIB UNIA + UNIA: /\xa3/ + UNIB: /\u0101/ + """) + g.parse(u'\xa3\u0101\u00a3') + + def test_unicode2(self): + g = _Lark(r"""start: UNIA UNIB UNIA UNIC + UNIA: /\xa3/ + UNIB: "a\u0101b\ " + UNIC: /a?\u0101c\n/ + """) + g.parse(u'\xa3a\u0101b\\ \u00a3\u0101c\n') + + def test_unicode3(self): + g = _Lark(r"""start: UNIA UNIB UNIA UNIC + UNIA: /\xa3/ + UNIB: "\u0101" + UNIC: /\u0203/ /\n/ + """) + g.parse(u'\xa3\u0101\u00a3\u0203\n') + + def test_unicode4(self): + g = _Lark(r"""start: UNIA UNIB UNIA UNIC + UNIA: /\xa3/ + UNIB: "\U0010FFFF" + UNIC: /\U00100000/ /\n/ + """) + g.parse(u'\xa3\U0010FFFF\u00a3\U00100000\n') + + def test_hex_escape(self): + g = _Lark(r"""start: A B C + A: "\x01" + B: /\x02/ + C: "\xABCD" + """) + g.parse('\x01\x02\xABCD') + + def test_unicode_literal_range_escape(self): + g = _Lark(r"""start: A+ + A: "\u0061".."\u0063" + """) + g.parse('abc') + + @unittest.skipIf(sys.version_info < (3, 3), "re package did not support 32bit unicode escape sequence before Python 3.3") + def test_unicode_literal_range_escape2(self): + g = _Lark(r"""start: A+ + A: "\U0000FFFF".."\U00010002" + """) + g.parse('\U0000FFFF\U00010000\U00010001\U00010002') + + def test_hex_literal_range_escape(self): + g = _Lark(r"""start: A+ + A: "\x01".."\x03" + """) + g.parse('\x01\x02\x03') + + @unittest.skipIf(sys.version_info[0]==2 or sys.version_info[:2]==(3, 4), + "bytes parser isn't perfect in Python2, exceptions don't work correctly") + def test_bytes_utf8(self): + g = r""" + start: BOM? char+ + BOM: "\xef\xbb\xbf" + char: CHAR1 | CHAR2 | CHAR3 | CHAR4 + CONTINUATION_BYTE: "\x80" .. "\xbf" + CHAR1: "\x00" .. "\x7f" + CHAR2: "\xc0" .. "\xdf" CONTINUATION_BYTE + CHAR3: "\xe0" .. "\xef" CONTINUATION_BYTE CONTINUATION_BYTE + CHAR4: "\xf0" .. "\xf7" CONTINUATION_BYTE CONTINUATION_BYTE CONTINUATION_BYTE + """ + g = _Lark(g, use_bytes=True) + s = u"🔣 地? gurÄ«n".encode('utf-8') + self.assertEqual(len(g.parse(s).children), 10) + + for enc, j in [("sjis", u"地çƒã®çµµã¯ã‚°ãƒªãƒ¼ãƒ³ã§ã‚°ãƒƒãƒ‰? Chikyuu no e wa guriin de guddo"), + ("sjis", u"売春婦"), + ("euc-jp", u"乂鵬鵠")]: + s = j.encode(enc) + self.assertRaises(UnexpectedCharacters, g.parse, s) + + @unittest.skipIf(PARSER == 'cyk', "Takes forever") + def test_stack_for_ebnf(self): + """Verify that stack depth isn't an issue for EBNF grammars""" + g = _Lark(r"""start: a+ + a : "a" """) + + g.parse("a" * (sys.getrecursionlimit()*2 )) + + def test_expand1_lists_with_one_item(self): + g = _Lark(r"""start: list + ?list: item+ + item : A + A: "a" + """) + r = g.parse("a") + + # because 'list' is an expand-if-contains-one rule and we only provided one element it should have expanded to 'item' + self.assertSequenceEqual([subtree.data for subtree in r.children], ('item',)) + + # regardless of the amount of items: there should be only *one* child in 'start' because 'list' isn't an expand-all rule + self.assertEqual(len(r.children), 1) + + def test_expand1_lists_with_one_item_2(self): + g = _Lark(r"""start: list + ?list: item+ "!" + item : A + A: "a" + """) + r = g.parse("a!") + + # because 'list' is an expand-if-contains-one rule and we only provided one element it should have expanded to 'item' + self.assertSequenceEqual([subtree.data for subtree in r.children], ('item',)) + + # regardless of the amount of items: there should be only *one* child in 'start' because 'list' isn't an expand-all rule + self.assertEqual(len(r.children), 1) + + def test_dont_expand1_lists_with_multiple_items(self): + g = _Lark(r"""start: list + ?list: item+ + item : A + A: "a" + """) + r = g.parse("aa") + + # because 'list' is an expand-if-contains-one rule and we've provided more than one element it should *not* have expanded + self.assertSequenceEqual([subtree.data for subtree in r.children], ('list',)) + + # regardless of the amount of items: there should be only *one* child in 'start' because 'list' isn't an expand-all rule + self.assertEqual(len(r.children), 1) + + # Sanity check: verify that 'list' contains the two 'item's we've given it + [list] = r.children + self.assertSequenceEqual([item.data for item in list.children], ('item', 'item')) + + def test_dont_expand1_lists_with_multiple_items_2(self): + g = _Lark(r"""start: list + ?list: item+ "!" + item : A + A: "a" + """) + r = g.parse("aa!") + + # because 'list' is an expand-if-contains-one rule and we've provided more than one element it should *not* have expanded + self.assertSequenceEqual([subtree.data for subtree in r.children], ('list',)) + + # regardless of the amount of items: there should be only *one* child in 'start' because 'list' isn't an expand-all rule + self.assertEqual(len(r.children), 1) + + # Sanity check: verify that 'list' contains the two 'item's we've given it + [list] = r.children + self.assertSequenceEqual([item.data for item in list.children], ('item', 'item')) + + + + @unittest.skipIf(PARSER == 'cyk', "No empty rules") + def test_empty_expand1_list(self): + g = _Lark(r"""start: list + ?list: item* + item : A + A: "a" + """) + r = g.parse("") + + # because 'list' is an expand-if-contains-one rule and we've provided less than one element (i.e. none) it should *not* have expanded + self.assertSequenceEqual([subtree.data for subtree in r.children], ('list',)) + + # regardless of the amount of items: there should be only *one* child in 'start' because 'list' isn't an expand-all rule + self.assertEqual(len(r.children), 1) + + # Sanity check: verify that 'list' contains no 'item's as we've given it none + [list] = r.children + self.assertSequenceEqual([item.data for item in list.children], ()) + + @unittest.skipIf(PARSER == 'cyk', "No empty rules") + def test_empty_expand1_list_2(self): + g = _Lark(r"""start: list + ?list: item* "!"? + item : A + A: "a" + """) + r = g.parse("") + + # because 'list' is an expand-if-contains-one rule and we've provided less than one element (i.e. none) it should *not* have expanded + self.assertSequenceEqual([subtree.data for subtree in r.children], ('list',)) + + # regardless of the amount of items: there should be only *one* child in 'start' because 'list' isn't an expand-all rule + self.assertEqual(len(r.children), 1) + + # Sanity check: verify that 'list' contains no 'item's as we've given it none + [list] = r.children + self.assertSequenceEqual([item.data for item in list.children], ()) + + + @unittest.skipIf(PARSER == 'cyk', "No empty rules") + def test_empty_flatten_list(self): + g = _Lark(r"""start: list + list: | item "," list + item : A + A: "a" + """) + r = g.parse("") + + # Because 'list' is a flatten rule it's top-level element should *never* be expanded + self.assertSequenceEqual([subtree.data for subtree in r.children], ('list',)) + + # Sanity check: verify that 'list' contains no 'item's as we've given it none + [list] = r.children + self.assertSequenceEqual([item.data for item in list.children], ()) + + @unittest.skipIf(True, "Flattening list isn't implemented (and may never be)") + def test_single_item_flatten_list(self): + g = _Lark(r"""start: list + list: | item "," list + item : A + A: "a" + """) + r = g.parse("a,") + + # Because 'list' is a flatten rule it's top-level element should *never* be expanded + self.assertSequenceEqual([subtree.data for subtree in r.children], ('list',)) + + # Sanity check: verify that 'list' contains exactly the one 'item' we've given it + [list] = r.children + self.assertSequenceEqual([item.data for item in list.children], ('item',)) + + @unittest.skipIf(True, "Flattening list isn't implemented (and may never be)") + def test_multiple_item_flatten_list(self): + g = _Lark(r"""start: list + #list: | item "," list + item : A + A: "a" + """) + r = g.parse("a,a,") + + # Because 'list' is a flatten rule it's top-level element should *never* be expanded + self.assertSequenceEqual([subtree.data for subtree in r.children], ('list',)) + + # Sanity check: verify that 'list' contains exactly the two 'item's we've given it + [list] = r.children + self.assertSequenceEqual([item.data for item in list.children], ('item', 'item')) + + @unittest.skipIf(True, "Flattening list isn't implemented (and may never be)") + def test_recurse_flatten(self): + """Verify that stack depth doesn't get exceeded on recursive rules marked for flattening.""" + g = _Lark(r"""start: a | start a + a : A + A : "a" """) + + # Force PLY to write to the debug log, but prevent writing it to the terminal (uses repr() on the half-built + # STree data structures, which uses recursion). + g.parse("a" * (sys.getrecursionlimit() // 4)) + + def test_token_collision(self): + g = _Lark(r"""start: "Hello" NAME + NAME: /\w/+ + %ignore " " + """) + x = g.parse('Hello World') + self.assertSequenceEqual(x.children, ['World']) + x = g.parse('Hello HelloWorld') + self.assertSequenceEqual(x.children, ['HelloWorld']) + + def test_token_collision_WS(self): + g = _Lark(r"""start: "Hello" NAME + NAME: /\w/+ + %import common.WS + %ignore WS + """) + x = g.parse('Hello World') + self.assertSequenceEqual(x.children, ['World']) + x = g.parse('Hello HelloWorld') + self.assertSequenceEqual(x.children, ['HelloWorld']) + + def test_token_collision2(self): + g = _Lark(""" + !start: "starts" + + %import common.LCASE_LETTER + """) + + x = g.parse("starts") + self.assertSequenceEqual(x.children, ['starts']) + + def test_templates(self): + g = _Lark(r""" + start: "[" sep{NUMBER, ","} "]" + sep{item, delim}: item (delim item)* + NUMBER: /\d+/ + %ignore " " + """) + x = g.parse("[1, 2, 3, 4]") + self.assertSequenceEqual(x.children, [Tree('sep', ['1', '2', '3', '4'])]) + x = g.parse("[1]") + self.assertSequenceEqual(x.children, [Tree('sep', ['1'])]) + + def test_templates_recursion(self): + g = _Lark(r""" + start: "[" _sep{NUMBER, ","} "]" + _sep{item, delim}: item | _sep{item, delim} delim item + NUMBER: /\d+/ + %ignore " " + """) + x = g.parse("[1, 2, 3, 4]") + self.assertSequenceEqual(x.children, ['1', '2', '3', '4']) + x = g.parse("[1]") + self.assertSequenceEqual(x.children, ['1']) + + def test_templates_import(self): + g = _Lark_open("test_templates_import.lark", rel_to=__file__) + x = g.parse("[1, 2, 3, 4]") + self.assertSequenceEqual(x.children, [Tree('sep', ['1', '2', '3', '4'])]) + x = g.parse("[1]") + self.assertSequenceEqual(x.children, [Tree('sep', ['1'])]) + + def test_templates_alias(self): + g = _Lark(r""" + start: expr{"C"} + expr{t}: "A" t + | "B" t -> b + """) + x = g.parse("AC") + self.assertSequenceEqual(x.children, [Tree('expr', [])]) + x = g.parse("BC") + self.assertSequenceEqual(x.children, [Tree('b', [])]) + + def test_templates_modifiers(self): + g = _Lark(r""" + start: expr{"B"} + !expr{t}: "A" t + """) + x = g.parse("AB") + self.assertSequenceEqual(x.children, [Tree('expr', ["A", "B"])]) + g = _Lark(r""" + start: _expr{"B"} + !_expr{t}: "A" t + """) + x = g.parse("AB") + self.assertSequenceEqual(x.children, ["A", "B"]) + g = _Lark(r""" + start: expr{b} + b: "B" + ?expr{t}: "A" t + """) + x = g.parse("AB") + self.assertSequenceEqual(x.children, [Tree('b',[])]) + + def test_templates_templates(self): + g = _Lark('''start: a{b} + a{t}: t{"a"} + b{x}: x''') + x = g.parse('a') + self.assertSequenceEqual(x.children, [Tree('a', [Tree('b',[])])]) + + def test_g_regex_flags(self): + g = _Lark(""" + start: "a" /b+/ C + C: "C" | D + D: "D" E + E: "e" + """, g_regex_flags=re.I) + x1 = g.parse("ABBc") + x2 = g.parse("abdE") + + # def test_string_priority(self): + # g = _Lark("""start: (A | /a?bb/)+ + # A: "a" """) + # x = g.parse('abb') + # self.assertEqual(len(x.children), 2) + + # # This parse raises an exception because the lexer will always try to consume + # # "a" first and will never match the regular expression + # # This behavior is subject to change!! + # # Thie won't happen with ambiguity handling. + # g = _Lark("""start: (A | /a?ab/)+ + # A: "a" """) + # self.assertRaises(LexError, g.parse, 'aab') + + def test_rule_collision(self): + g = _Lark("""start: "a"+ "b" + | "a"+ """) + x = g.parse('aaaa') + x = g.parse('aaaab') + + def test_rule_collision2(self): + g = _Lark("""start: "a"* "b" + | "a"+ """) + x = g.parse('aaaa') + x = g.parse('aaaab') + x = g.parse('b') + + def test_token_not_anon(self): + """Tests that "a" is matched as an anonymous token, and not A. + """ + + g = _Lark("""start: "a" + A: "a" """) + x = g.parse('a') + self.assertEqual(len(x.children), 0, '"a" should be considered anonymous') + + g = _Lark("""start: "a" A + A: "a" """) + x = g.parse('aa') + self.assertEqual(len(x.children), 1, 'only "a" should be considered anonymous') + self.assertEqual(x.children[0].type, "A") + + g = _Lark("""start: /a/ + A: /a/ """) + x = g.parse('a') + self.assertEqual(len(x.children), 1) + self.assertEqual(x.children[0].type, "A", "A isn't associated with /a/") + + @unittest.skipIf(PARSER == 'cyk', "No empty rules") + def test_maybe(self): + g = _Lark("""start: ["a"] """) + x = g.parse('a') + x = g.parse('') + + def test_start(self): + g = _Lark("""a: "a" a? """, start='a') + x = g.parse('a') + x = g.parse('aa') + x = g.parse('aaa') + + def test_alias(self): + g = _Lark("""start: "a" -> b """) + x = g.parse('a') + self.assertEqual(x.data, "b") + + def test_token_ebnf(self): + g = _Lark("""start: A + A: "a"* ("b"? "c".."e")+ + """) + x = g.parse('abcde') + x = g.parse('dd') + + def test_backslash(self): + g = _Lark(r"""start: "\\" "a" + """) + x = g.parse(r'\a') + + g = _Lark(r"""start: /\\/ /a/ + """) + x = g.parse(r'\a') + + + def test_backslash2(self): + g = _Lark(r"""start: "\"" "-" + """) + x = g.parse('"-') + + g = _Lark(r"""start: /\// /-/ + """) + x = g.parse('/-') + + + + def test_special_chars(self): + g = _Lark(r"""start: "\n" + """) + x = g.parse('\n') + + g = _Lark(r"""start: /\n/ + """) + x = g.parse('\n') + + + # def test_token_recurse(self): + # g = _Lark("""start: A + # A: B + # B: A + # """) + + @unittest.skipIf(PARSER == 'cyk', "No empty rules") + def test_empty(self): + # Fails an Earley implementation without special handling for empty rules, + # or re-processing of already completed rules. + g = _Lark(r"""start: _empty a "B" + a: _empty "A" + _empty: + """) + x = g.parse('AB') + + def test_regex_quote(self): + g = r""" + start: SINGLE_QUOTED_STRING | DOUBLE_QUOTED_STRING + SINGLE_QUOTED_STRING : /'[^']*'/ + DOUBLE_QUOTED_STRING : /"[^"]*"/ + """ + + g = _Lark(g) + self.assertEqual( g.parse('"hello"').children, ['"hello"']) + self.assertEqual( g.parse("'hello'").children, ["'hello'"]) + + def test_join_regex_flags(self): + g = r""" + start: A + A: B C + B: /./s + C: /./ + """ + g = _Lark(g) + self.assertEqual(g.parse(" ").children,[" "]) + self.assertEqual(g.parse("\n ").children,["\n "]) + self.assertRaises(UnexpectedCharacters, g.parse, "\n\n") + + g = r""" + start: A + A: B | C + B: "b"i + C: "c" + """ + g = _Lark(g) + self.assertEqual(g.parse("b").children,["b"]) + self.assertEqual(g.parse("B").children,["B"]) + self.assertEqual(g.parse("c").children,["c"]) + self.assertRaises(UnexpectedCharacters, g.parse, "C") + + + def test_float_without_lexer(self): + expected_error = UnexpectedCharacters if 'dynamic' in LEXER else UnexpectedToken + if PARSER == 'cyk': + expected_error = ParseError + + g = _Lark("""start: ["+"|"-"] float + float: digit* "." digit+ exp? + | digit+ exp + exp: ("e"|"E") ["+"|"-"] digit+ + digit: "0"|"1"|"2"|"3"|"4"|"5"|"6"|"7"|"8"|"9" + """) + g.parse("1.2") + g.parse("-.2e9") + g.parse("+2e-9") + self.assertRaises( expected_error, g.parse, "+2e-9e") + + def test_keep_all_tokens(self): + l = _Lark("""start: "a"+ """, keep_all_tokens=True) + tree = l.parse('aaa') + self.assertEqual(tree.children, ['a', 'a', 'a']) + + + def test_token_flags(self): + l = _Lark("""!start: "a"i+ + """ + ) + tree = l.parse('aA') + self.assertEqual(tree.children, ['a', 'A']) + + l = _Lark("""!start: /a/i+ + """ + ) + tree = l.parse('aA') + self.assertEqual(tree.children, ['a', 'A']) + + # g = """!start: "a"i "a" + # """ + # self.assertRaises(GrammarError, _Lark, g) + + # g = """!start: /a/i /a/ + # """ + # self.assertRaises(GrammarError, _Lark, g) + + g = """start: NAME "," "a" + NAME: /[a-z_]/i /[a-z0-9_]/i* + """ + l = _Lark(g) + tree = l.parse('ab,a') + self.assertEqual(tree.children, ['ab']) + tree = l.parse('AB,a') + self.assertEqual(tree.children, ['AB']) + + def test_token_flags3(self): + l = _Lark("""!start: ABC+ + ABC: "abc"i + """ + ) + tree = l.parse('aBcAbC') + self.assertEqual(tree.children, ['aBc', 'AbC']) + + def test_token_flags2(self): + g = """!start: ("a"i | /a/ /b/?)+ + """ + l = _Lark(g) + tree = l.parse('aA') + self.assertEqual(tree.children, ['a', 'A']) + + def test_token_flags_verbose(self): + g = _Lark(r"""start: NL | ABC + ABC: / [a-z] /x + NL: /\n/ + """) + x = g.parse('a') + self.assertEqual(x.children, ['a']) + + def test_token_flags_verbose_multiline(self): + g = _Lark(r"""start: ABC + ABC: / a b c + d + e f + /x + """) + x = g.parse('abcdef') + self.assertEqual(x.children, ['abcdef']) + + @unittest.skipIf(PARSER == 'cyk', "No empty rules") + def test_twice_empty(self): + g = """!start: ("A"?)? + """ + l = _Lark(g) + tree = l.parse('A') + self.assertEqual(tree.children, ['A']) + + tree = l.parse('') + self.assertEqual(tree.children, []) + + + def test_line_and_column(self): + g = r"""!start: "A" bc "D" + !bc: "B\nC" + """ + l = _Lark(g) + a, bc, d = l.parse("AB\nCD").children + self.assertEqual(a.line, 1) + self.assertEqual(a.column, 1) + + bc ,= bc.children + self.assertEqual(bc.line, 1) + self.assertEqual(bc.column, 2) + + self.assertEqual(d.line, 2) + self.assertEqual(d.column, 2) + + # if LEXER != 'dynamic': + self.assertEqual(a.end_line, 1) + self.assertEqual(a.end_column, 2) + self.assertEqual(bc.end_line, 2) + self.assertEqual(bc.end_column, 2) + self.assertEqual(d.end_line, 2) + self.assertEqual(d.end_column, 3) + + + + def test_reduce_cycle(self): + """Tests an edge-condition in the LALR parser, in which a transition state looks exactly like the end state. + It seems that the correct solution is to explicitely distinguish finalization in the reduce() function. + """ + + l = _Lark(""" + term: A + | term term + + A: "a" + + """, start='term') + + tree = l.parse("aa") + self.assertEqual(len(tree.children), 2) + + + @unittest.skipIf(LEXER != 'basic', "basic lexer prioritization differs from dynamic lexer prioritization") + def test_lexer_prioritization(self): + "Tests effect of priority on result" + + grammar = """ + start: A B | AB + A.2: "a" + B: "b" + AB: "ab" + """ + l = _Lark(grammar) + res = l.parse("ab") + + self.assertEqual(res.children, ['a', 'b']) + self.assertNotEqual(res.children, ['ab']) + + grammar = """ + start: A B | AB + A: "a" + B: "b" + AB.3: "ab" + """ + l = _Lark(grammar) + res = l.parse("ab") + + self.assertNotEqual(res.children, ['a', 'b']) + self.assertEqual(res.children, ['ab']) + + + grammar = """ + start: A B | AB + A: "a" + B.-20: "b" + AB.-10: "ab" + """ + l = _Lark(grammar) + res = l.parse("ab") + self.assertEqual(res.children, ['a', 'b']) + + + grammar = """ + start: A B | AB + A.-99999999999999999999999: "a" + B: "b" + AB: "ab" + """ + l = _Lark(grammar) + res = l.parse("ab") + + self.assertEqual(res.children, ['ab']) + + + @unittest.skipIf('dynamic' not in LEXER, "dynamic lexer prioritization differs from basic lexer prioritization") + def test_dynamic_lexer_prioritization(self): + "Tests effect of priority on result" + + grammar = """ + start: A B | AB + A.2: "a" + B: "b" + AB: "ab" + """ + l = _Lark(grammar) + res = l.parse("ab") + + self.assertEqual(res.children, ['a', 'b']) + self.assertNotEqual(res.children, ['ab']) + + grammar = """ + start: A B | AB + A: "a" + B: "b" + AB.3: "ab" + """ + l = _Lark(grammar) + res = l.parse("ab") + + self.assertNotEqual(res.children, ['a', 'b']) + self.assertEqual(res.children, ['ab']) + + + # this case differs from prioritization with a basic lexer + grammar = """ + start: A B | AB + A: "a" + B.-20: "b" + AB.-10: "ab" + """ + l = _Lark(grammar) + res = l.parse("ab") + self.assertEqual(res.children, ['ab']) + + + grammar = """ + start: A B | AB + A.-99999999999999999999999: "a" + B: "b" + AB: "ab" + """ + l = _Lark(grammar) + res = l.parse("ab") + + self.assertEqual(res.children, ['ab']) + + + + + def test_import(self): + grammar = """ + start: NUMBER WORD + + %import common.NUMBER + %import common.WORD + %import common.WS + %ignore WS + + """ + l = _Lark(grammar) + x = l.parse('12 elephants') + self.assertEqual(x.children, ['12', 'elephants']) + + + def test_import_rename(self): + grammar = """ + start: N W + + %import common.NUMBER -> N + %import common.WORD -> W + %import common.WS + %ignore WS + + """ + l = _Lark(grammar) + x = l.parse('12 elephants') + self.assertEqual(x.children, ['12', 'elephants']) + + + def test_relative_import(self): + l = _Lark_open('test_relative_import.lark', rel_to=__file__) + x = l.parse('12 lions') + self.assertEqual(x.children, ['12', 'lions']) + + + def test_relative_import_unicode(self): + l = _Lark_open('test_relative_import_unicode.lark', rel_to=__file__) + x = l.parse(u'Ø') + self.assertEqual(x.children, [u'Ø']) + + + def test_relative_import_rename(self): + l = _Lark_open('test_relative_import_rename.lark', rel_to=__file__) + x = l.parse('12 lions') + self.assertEqual(x.children, ['12', 'lions']) + + + def test_relative_rule_import(self): + l = _Lark_open('test_relative_rule_import.lark', rel_to=__file__) + x = l.parse('xaabby') + self.assertEqual(x.children, [ + 'x', + Tree('expr', ['a', Tree('expr', ['a', 'b']), 'b']), + 'y']) + + + def test_relative_rule_import_drop_ignore(self): + # %ignore rules are dropped on import + l = _Lark_open('test_relative_rule_import_drop_ignore.lark', + rel_to=__file__) + self.assertRaises((ParseError, UnexpectedInput), + l.parse, 'xa abby') + + + def test_relative_rule_import_subrule(self): + l = _Lark_open('test_relative_rule_import_subrule.lark', + rel_to=__file__) + x = l.parse('xaabby') + self.assertEqual(x.children, [ + 'x', + Tree('startab', [ + Tree('grammars__ab__expr', [ + 'a', Tree('grammars__ab__expr', ['a', 'b']), 'b', + ]), + ]), + 'y']) + + + def test_relative_rule_import_subrule_no_conflict(self): + l = _Lark_open( + 'test_relative_rule_import_subrule_no_conflict.lark', + rel_to=__file__) + x = l.parse('xaby') + self.assertEqual(x.children, [Tree('expr', [ + 'x', + Tree('startab', [ + Tree('grammars__ab__expr', ['a', 'b']), + ]), + 'y'])]) + self.assertRaises((ParseError, UnexpectedInput), + l.parse, 'xaxabyby') + + + def test_relative_rule_import_rename(self): + l = _Lark_open('test_relative_rule_import_rename.lark', + rel_to=__file__) + x = l.parse('xaabby') + self.assertEqual(x.children, [ + 'x', + Tree('ab', ['a', Tree('ab', ['a', 'b']), 'b']), + 'y']) + + + def test_multi_import(self): + grammar = """ + start: NUMBER WORD + + %import common (NUMBER, WORD, WS) + %ignore WS + + """ + l = _Lark(grammar) + x = l.parse('12 toucans') + self.assertEqual(x.children, ['12', 'toucans']) + + + def test_relative_multi_import(self): + l = _Lark_open("test_relative_multi_import.lark", rel_to=__file__) + x = l.parse('12 capybaras') + self.assertEqual(x.children, ['12', 'capybaras']) + + def test_relative_import_preserves_leading_underscore(self): + l = _Lark_open("test_relative_import_preserves_leading_underscore.lark", rel_to=__file__) + x = l.parse('Ax') + self.assertEqual(next(x.find_data('c')).children, ['A']) + + def test_relative_import_of_nested_grammar(self): + l = _Lark_open("grammars/test_relative_import_of_nested_grammar.lark", rel_to=__file__) + x = l.parse('N') + self.assertEqual(next(x.find_data('rule_to_import')).children, ['N']) + + def test_relative_import_rules_dependencies_imported_only_once(self): + l = _Lark_open("test_relative_import_rules_dependencies_imported_only_once.lark", rel_to=__file__) + x = l.parse('AAA') + self.assertEqual(next(x.find_data('a')).children, ['A']) + self.assertEqual(next(x.find_data('b')).children, ['A']) + self.assertEqual(next(x.find_data('d')).children, ['A']) + + def test_import_errors(self): + grammar = """ + start: NUMBER WORD + + %import .grammars.bad_test.NUMBER + """ + self.assertRaises(IOError, _Lark, grammar) + + grammar = """ + start: NUMBER WORD + + %import bad_test.NUMBER + """ + self.assertRaises(IOError, _Lark, grammar) + + @unittest.skipIf('dynamic' in LEXER, "%declare/postlex doesn't work with dynamic") + def test_postlex_declare(self): # Note: this test does a lot. maybe split it up? + class TestPostLexer: + def process(self, stream): + for t in stream: + if t.type == 'A': + t.type = 'B' + yield t + else: + yield t + + always_accept = ('A',) + + parser = _Lark(""" + start: B + A: "A" + %declare B + """, postlex=TestPostLexer()) + + test_file = "A" + tree = parser.parse(test_file) + self.assertEqual(tree.children, [Token('B', 'A')]) + + @unittest.skipIf('dynamic' in LEXER, "%declare/postlex doesn't work with dynamic") + def test_postlex_indenter(self): + class CustomIndenter(Indenter): + NL_type = 'NEWLINE' + OPEN_PAREN_types = [] + CLOSE_PAREN_types = [] + INDENT_type = 'INDENT' + DEDENT_type = 'DEDENT' + tab_len = 8 + + grammar = r""" + start: "a" NEWLINE INDENT "b" NEWLINE DEDENT + + NEWLINE: ( /\r?\n */ )+ + + %ignore " "+ + %declare INDENT DEDENT + """ + + parser = _Lark(grammar, postlex=CustomIndenter()) + parser.parse("a\n b\n") + + + @unittest.skipIf(PARSER == 'cyk', "Doesn't work for CYK") + def test_prioritization(self): + "Tests effect of priority on result" + + grammar = """ + start: a | b + a.1: "a" + b.2: "a" + """ + + l = _Lark(grammar) + res = l.parse("a") + self.assertEqual(res.children[0].data, 'b') + + grammar = """ + start: a | b + a.2: "a" + b.1: "a" + """ + + l = _Lark(grammar) + res = l.parse("a") + self.assertEqual(res.children[0].data, 'a') + + grammar = """ + start: a | b + a.2: "A"+ + b.1: "A"+ "B"? + """ + + l = _Lark(grammar) + res = l.parse("AAAA") + self.assertEqual(res.children[0].data, 'a') + + l = _Lark(grammar) + res = l.parse("AAAB") + self.assertEqual(res.children[0].data, 'b') + + l = _Lark(grammar, priority="invert") + res = l.parse("AAAA") + self.assertEqual(res.children[0].data, 'b') + + + + @unittest.skipIf(PARSER != 'earley' or 'dynamic' not in LEXER, "Currently only Earley supports priority sum in rules") + def test_prioritization_sum(self): + "Tests effect of priority on result" + + grammar = """ + start: ab_ b_ a_ | indirection + indirection: a_ bb_ a_ + a_: "a" + b_: "b" + ab_: "ab" + bb_.1: "bb" + """ + + l = _Lark(grammar, priority="invert") + res = l.parse('abba') + self.assertEqual(''.join(child.data for child in res.children), 'ab_b_a_') + + grammar = """ + start: ab_ b_ a_ | indirection + indirection: a_ bb_ a_ + a_: "a" + b_: "b" + ab_.1: "ab" + bb_: "bb" + """ + + l = _Lark(grammar, priority="invert") + res = l.parse('abba') + self.assertEqual(''.join(child.data for child in res.children), 'indirection') + + grammar = """ + start: ab_ b_ a_ | indirection + indirection: a_ bb_ a_ + a_.2: "a" + b_.1: "b" + ab_.3: "ab" + bb_.3: "bb" + """ + + l = _Lark(grammar, priority="invert") + res = l.parse('abba') + self.assertEqual(''.join(child.data for child in res.children), 'ab_b_a_') + + grammar = """ + start: ab_ b_ a_ | indirection + indirection: a_ bb_ a_ + a_.1: "a" + b_.1: "b" + ab_.4: "ab" + bb_.3: "bb" + """ + + l = _Lark(grammar, priority="invert") + res = l.parse('abba') + self.assertEqual(''.join(child.data for child in res.children), 'indirection') + + + def test_utf8(self): + g = u"""start: a + a: "±a" + """ + l = _Lark(g) + self.assertEqual(l.parse(u'±a'), Tree('start', [Tree('a', [])])) + + g = u"""start: A + A: "±a" + """ + l = _Lark(g) + self.assertEqual(l.parse(u'±a'), Tree('start', [u'\xb1a'])) + + + + @unittest.skipIf(PARSER == 'cyk', "No empty rules") + def test_ignore(self): + grammar = r""" + COMMENT: /(!|(\/\/))[^\n]*/ + %ignore COMMENT + %import common.WS -> _WS + %import common.INT + start: "INT"i _WS+ INT _WS* + """ + + parser = _Lark(grammar) + + tree = parser.parse("int 1 ! This is a comment\n") + self.assertEqual(tree.children, ['1']) + + tree = parser.parse("int 1 ! This is a comment") # A trailing ignore token can be tricky! + self.assertEqual(tree.children, ['1']) + + parser = _Lark(r""" + start : "a"* + %ignore "b" + """) + tree = parser.parse("bb") + self.assertEqual(tree.children, []) + + + def test_regex_escaping(self): + g = _Lark("start: /[ab]/") + g.parse('a') + g.parse('b') + + self.assertRaises( UnexpectedInput, g.parse, 'c') + + _Lark(r'start: /\w/').parse('a') + + g = _Lark(r'start: /\\w/') + self.assertRaises( UnexpectedInput, g.parse, 'a') + g.parse(r'\w') + + _Lark(r'start: /\[/').parse('[') + + _Lark(r'start: /\//').parse('/') + + _Lark(r'start: /\\/').parse('\\') + + _Lark(r'start: /\[ab]/').parse('[ab]') + + _Lark(r'start: /\\[ab]/').parse('\\a') + + _Lark(r'start: /\t/').parse('\t') + + _Lark(r'start: /\\t/').parse('\\t') + + _Lark(r'start: /\\\t/').parse('\\\t') + + _Lark(r'start: "\t"').parse('\t') + + _Lark(r'start: "\\t"').parse('\\t') + + _Lark(r'start: "\\\t"').parse('\\\t') + + + def test_ranged_repeat_rules(self): + g = u"""!start: "A"~3 + """ + l = _Lark(g) + self.assertEqual(l.parse(u'AAA'), Tree('start', ["A", "A", "A"])) + self.assertRaises(ParseError, l.parse, u'AA') + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'AAAA') + + + g = u"""!start: "A"~0..2 + """ + if PARSER != 'cyk': # XXX CYK currently doesn't support empty grammars + l = _Lark(g) + self.assertEqual(l.parse(u''), Tree('start', [])) + self.assertEqual(l.parse(u'A'), Tree('start', ['A'])) + self.assertEqual(l.parse(u'AA'), Tree('start', ['A', 'A'])) + self.assertRaises((UnexpectedToken, UnexpectedInput), l.parse, u'AAA') + + g = u"""!start: "A"~3..2 + """ + self.assertRaises(GrammarError, _Lark, g) + + g = u"""!start: "A"~2..3 "B"~2 + """ + l = _Lark(g) + self.assertEqual(l.parse(u'AABB'), Tree('start', ['A', 'A', 'B', 'B'])) + self.assertEqual(l.parse(u'AAABB'), Tree('start', ['A', 'A', 'A', 'B', 'B'])) + self.assertRaises(ParseError, l.parse, u'AAAB') + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'AAABBB') + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'ABB') + self.assertRaises((ParseError, UnexpectedInput), l.parse, u'AAAABB') + + + + + @unittest.skipIf(PARSER=='earley', "Priority not handled correctly right now") # TODO XXX + def test_priority_vs_embedded(self): + g = """ + A.2: "a" + WORD: ("a".."z")+ + + start: (A | WORD)+ + """ + l = _Lark(g) + t = l.parse('abc') + self.assertEqual(t.children, ['a', 'bc']) + self.assertEqual(t.children[0].type, 'A') + + def test_line_counting(self): + p = _Lark("start: /[^x]+/") + + text = 'hello\nworld' + t = p.parse(text) + tok = t.children[0] + self.assertEqual(tok, text) + self.assertEqual(tok.line, 1) + self.assertEqual(tok.column, 1) + # if _LEXER != 'dynamic': + self.assertEqual(tok.end_line, 2) + self.assertEqual(tok.end_column, 6) + + @unittest.skipIf(PARSER=='cyk', "Empty rules") + def test_empty_end(self): + p = _Lark(""" + start: b c d + b: "B" + c: | "C" + d: | "D" + """) + res = p.parse('B') + self.assertEqual(len(res.children), 3) + + @unittest.skipIf(PARSER=='cyk', "Empty rules") + def test_maybe_placeholders(self): + # Anonymous tokens shouldn't count + p = _Lark("""start: ["a"] ["b"] ["c"] """, maybe_placeholders=True) + self.assertEqual(p.parse("").children, []) + + # Unless keep_all_tokens=True + p = _Lark("""start: ["a"] ["b"] ["c"] """, maybe_placeholders=True, keep_all_tokens=True) + self.assertEqual(p.parse("").children, [None, None, None]) + + # All invisible constructs shouldn't count + p = _Lark("""start: [A] ["b"] [_c] ["e" "f" _c] + A: "a" + _c: "c" """, maybe_placeholders=True) + self.assertEqual(p.parse("").children, [None]) + self.assertEqual(p.parse("c").children, [None]) + self.assertEqual(p.parse("aefc").children, ['a']) + + # ? shouldn't apply + p = _Lark("""!start: ["a"] "b"? ["c"] """, maybe_placeholders=True) + self.assertEqual(p.parse("").children, [None, None]) + self.assertEqual(p.parse("b").children, [None, 'b', None]) + + p = _Lark("""!start: ["a"] ["b"] ["c"] """, maybe_placeholders=True) + self.assertEqual(p.parse("").children, [None, None, None]) + self.assertEqual(p.parse("a").children, ['a', None, None]) + self.assertEqual(p.parse("b").children, [None, 'b', None]) + self.assertEqual(p.parse("c").children, [None, None, 'c']) + self.assertEqual(p.parse("ab").children, ['a', 'b', None]) + self.assertEqual(p.parse("ac").children, ['a', None, 'c']) + self.assertEqual(p.parse("bc").children, [None, 'b', 'c']) + self.assertEqual(p.parse("abc").children, ['a', 'b', 'c']) + + p = _Lark("""!start: (["a"] "b" ["c"])+ """, maybe_placeholders=True) + self.assertEqual(p.parse("b").children, [None, 'b', None]) + self.assertEqual(p.parse("bb").children, [None, 'b', None, None, 'b', None]) + self.assertEqual(p.parse("abbc").children, ['a', 'b', None, None, 'b', 'c']) + self.assertEqual(p.parse("babbcabcb").children, + [None, 'b', None, + 'a', 'b', None, + None, 'b', 'c', + 'a', 'b', 'c', + None, 'b', None]) + + p = _Lark("""!start: ["a"] ["c"] "b"+ ["a"] ["d"] """, maybe_placeholders=True) + self.assertEqual(p.parse("bb").children, [None, None, 'b', 'b', None, None]) + self.assertEqual(p.parse("bd").children, [None, None, 'b', None, 'd']) + self.assertEqual(p.parse("abba").children, ['a', None, 'b', 'b', 'a', None]) + self.assertEqual(p.parse("cbbbb").children, [None, 'c', 'b', 'b', 'b', 'b', None, None]) + + p = _Lark("""!start: ["a" "b" "c"] """, maybe_placeholders=True) + self.assertEqual(p.parse("").children, [None, None, None]) + self.assertEqual(p.parse("abc").children, ['a', 'b', 'c']) + + p = _Lark("""!start: ["a" ["b" "c"]] """, maybe_placeholders=True) + self.assertEqual(p.parse("").children, [None, None, None]) + self.assertEqual(p.parse("a").children, ['a', None, None]) + self.assertEqual(p.parse("abc").children, ['a', 'b', 'c']) + + p = _Lark(r"""!start: "a" ["b" | "c"] """, maybe_placeholders=True) + self.assertEqual(p.parse("a").children, ['a', None]) + self.assertEqual(p.parse("ab").children, ['a', 'b']) + + p = _Lark(r"""!start: "a" ["b" | "c" "d"] """, maybe_placeholders=True) + self.assertEqual(p.parse("a").children, ['a', None, None]) + # self.assertEqual(p.parse("ab").children, ['a', 'b', None]) # Not implemented; current behavior is incorrect + self.assertEqual(p.parse("acd").children, ['a', 'c', 'd']) + + + def test_escaped_string(self): + "Tests common.ESCAPED_STRING" + grammar = r""" + start: ESCAPED_STRING+ + + %import common (WS_INLINE, ESCAPED_STRING) + %ignore WS_INLINE + """ + + parser = _Lark(grammar) + parser.parse(r'"\\" "b" "c"') + + parser.parse(r'"That" "And a \"b"') + + + def test_meddling_unused(self): + "Unless 'unused' is removed, LALR analysis will fail on reduce-reduce collision" + + grammar = """ + start: EKS* x + x: EKS + unused: x* + EKS: "x" + """ + parser = _Lark(grammar) + + + @unittest.skipIf(PARSER!='lalr' or 'custom' in LEXER, "Serialize currently only works for LALR parsers without custom lexers (though it should be easy to extend)") + def test_serialize(self): + grammar = """ + start: _ANY b "C" + _ANY: /./ + b: "B" + """ + parser = _Lark(grammar) + s = BytesIO() + parser.save(s) + s.seek(0) + parser2 = Lark.load(s) + self.assertEqual(parser2.parse('ABC'), Tree('start', [Tree('b', [])]) ) + + def test_multi_start(self): + parser = _Lark(''' + a: "x" "a"? + b: "x" "b"? + ''', start=['a', 'b']) + + self.assertEqual(parser.parse('xa', 'a'), Tree('a', [])) + self.assertEqual(parser.parse('xb', 'b'), Tree('b', [])) + + def test_lexer_detect_newline_tokens(self): + # Detect newlines in regular tokens + g = _Lark(r"""start: "go" tail* + !tail : SA "@" | SB "@" | SC "@" | SD "@" + SA : "a" /\n/ + SB : /b./s + SC : "c" /[^a-z]/ + SD : "d" /\s/ + """) + a,b,c,d = [x.children[1] for x in g.parse('goa\n@b\n@c\n@d\n@').children] + self.assertEqual(a.line, 2) + self.assertEqual(b.line, 3) + self.assertEqual(c.line, 4) + self.assertEqual(d.line, 5) + + # Detect newlines in ignored tokens + for re in ['/\\n/', '/[^a-z]/', '/\\s/']: + g = _Lark('''!start: "a" "a" + %ignore {}'''.format(re)) + a, b = g.parse('a\na').children + self.assertEqual(a.line, 1) + self.assertEqual(b.line, 2) + + @unittest.skipIf(PARSER=='cyk' or LEXER=='custom_old', "match_examples() not supported for CYK/old custom lexer") + def test_match_examples(self): + p = _Lark(r""" + start: "a" "b" "c" + """) + + def match_error(s): + try: + _ = p.parse(s) + except UnexpectedInput as u: + return u.match_examples(p.parse, { + 0: ['abe'], + 1: ['ab'], + 2: ['cbc', 'dbc'], + }) + assert False + + assert match_error("abe") == 0 + assert match_error("ab") == 1 + assert match_error("bbc") == 2 + assert match_error("cbc") == 2 + self.assertEqual( match_error("dbc"), 2 ) + self.assertEqual( match_error("ebc"), 2 ) + + + @unittest.skipIf(not regex, "regex not installed") + def test_unicode_class(self): + "Tests that character classes from the `regex` module work correctly." + g = _Lark(r"""?start: NAME + NAME: ID_START ID_CONTINUE* + ID_START: /[\p{Lu}\p{Ll}\p{Lt}\p{Lm}\p{Lo}\p{Nl}_]+/ + ID_CONTINUE: ID_START | /[\p{Mn}\p{Mc}\p{Nd}\p{Pc}]+/""", regex=True) + + self.assertEqual(g.parse('வணகà¯à®•à®®à¯'), 'வணகà¯à®•à®®à¯') + + @unittest.skipIf(not regex, "regex not installed") + def test_unicode_word(self): + "Tests that a persistent bug in the `re` module works when `regex` is enabled." + g = _Lark(r"""?start: NAME + NAME: /[\w]+/ + """, regex=True) + self.assertEqual(g.parse('வணகà¯à®•à®®à¯'), 'வணகà¯à®•à®®à¯') + + @unittest.skipIf(not regex, "regex not installed") + def test_regex_width_fallback(self): + g = r""" + start: NAME NAME? + NAME: /(?(?=\d)\d+|\w+)/ + """ + self.assertRaises((GrammarError, LexError, re.error), _Lark, g) + p = _Lark(g, regex=True) + self.assertEqual(p.parse("123abc"), Tree('start', ['123', 'abc'])) + + g = r""" + start: NAME NAME? + NAME: /(?(?=\d)\d+|\w*)/ + """ + self.assertRaises((GrammarError, LexError, re.error), _Lark, g, regex=True) + + @unittest.skipIf(PARSER!='lalr', "interactive_parser is only implemented for LALR at the moment") + def test_parser_interactive_parser(self): + + g = _Lark(r''' + start: A+ B* + A: "a" + B: "b" + ''') + + ip = g.parse_interactive() + + self.assertRaises(UnexpectedToken, ip.feed_eof) + self.assertRaises(TypeError, ip.exhaust_lexer) + ip.feed_token(Token('A', 'a')) + res = ip.feed_eof() + self.assertEqual(res, Tree('start', ['a'])) + + ip = g.parse_interactive("ab") + + ip.exhaust_lexer() + + ip_copy = ip.copy() + self.assertEqual(ip_copy.parser_state, ip.parser_state) + self.assertEqual(ip_copy.lexer_thread.state, ip.lexer_thread.state) + self.assertIsNot(ip_copy.parser_state, ip.parser_state) + self.assertIsNot(ip_copy.lexer_thread.state, ip.lexer_thread.state) + self.assertIsNot(ip_copy.lexer_thread.state.line_ctr, ip.lexer_thread.state.line_ctr) + + res = ip.feed_eof(ip.lexer_thread.state.last_token) + self.assertEqual(res, Tree('start', ['a', 'b'])) + self.assertRaises(UnexpectedToken ,ip.feed_eof) + + self.assertRaises(UnexpectedToken, ip_copy.feed_token, Token('A', 'a')) + ip_copy.feed_token(Token('B', 'b')) + res = ip_copy.feed_eof() + self.assertEqual(res, Tree('start', ['a', 'b', 'b'])) + + @unittest.skipIf(PARSER!='lalr', "interactive_parser error handling only works with LALR for now") + def test_error_with_interactive_parser(self): + def ignore_errors(e): + if isinstance(e, UnexpectedCharacters): + # Skip bad character + return True + + # Must be UnexpectedToken + if e.token.type == 'COMMA': + # Skip comma + return True + elif e.token.type == 'SIGNED_NUMBER': + # Try to feed a comma and retry the number + e.interactive_parser.feed_token(Token('COMMA', ',')) + e.interactive_parser.feed_token(e.token) + + return True + + # Unhandled error. Will stop parse and raise exception + return False + + g = _Lark(r''' + start: "[" num ("," num)* "]" + ?num: SIGNED_NUMBER + %import common.SIGNED_NUMBER + %ignore " " + ''') + s = "[0 1, 2,, 3,,, 4, 5 6 ]" + tree = g.parse(s, on_error=ignore_errors) + res = [int(x) for x in tree.children] + assert res == list(range(7)) + + s = "[0 1, 2,@, 3,,, 4, 5 6 ]$" + tree = g.parse(s, on_error=ignore_errors) + + @unittest.skipIf(PARSER!='lalr', "interactive_parser error handling only works with LALR for now") + def test_iter_parse(self): + ab_grammar = '!start: "a"* "b"*' + parser = Lark(ab_grammar, parser="lalr") + ip = parser.parse_interactive("aaabb") + i = ip.iter_parse() + assert next(i) == 'a' + assert next(i) == 'a' + assert next(i) == 'a' + assert next(i) == 'b' + + @unittest.skipIf(PARSER!='lalr', "Tree-less mode is only supported in lalr") + def test_default_in_treeless_mode(self): + grammar = r""" + start: expr + + expr: A B + | A expr B + + A: "a" + B: "b" + + %import common.WS + %ignore WS + """ + s = 'a a a b b b' + + class AbTransformer(Transformer): + def __default__(self, data, children, meta): + return '@', data, children + + parser = _Lark(grammar) + a = AbTransformer().transform(parser.parse(s)) + parser = _Lark(grammar, transformer=AbTransformer()) + b = parser.parse(s) + assert a == b + + + _NAME = "Test" + PARSER.capitalize() + LEXER.capitalize() + _TestParser.__name__ = _NAME + _TestParser.__qualname__ = "tests.test_parser." + _NAME + globals()[_NAME] = _TestParser + __all__.append(_NAME) + +_TO_TEST = [ + ('basic', 'earley'), + ('basic', 'cyk'), + ('basic', 'lalr'), + + ('dynamic', 'earley'), + ('dynamic_complete', 'earley'), + + ('contextual', 'lalr'), + + ('custom_new', 'lalr'), + ('custom_new', 'cyk'), + ('custom_old', 'earley'), +] + +for _LEXER, _PARSER in _TO_TEST: + _make_parser_test(_LEXER, _PARSER) + +for _LEXER in ('dynamic', 'dynamic_complete'): + _make_full_earley_test(_LEXER) + +if __name__ == '__main__': + unittest.main() diff --git a/vendor/lark/tests/test_python_grammar.py b/vendor/lark/tests/test_python_grammar.py new file mode 100644 index 00000000..90307985 --- /dev/null +++ b/vendor/lark/tests/test_python_grammar.py @@ -0,0 +1,222 @@ +from unittest import TestCase, main + +from lark import Lark +from lark.indenter import PythonIndenter +from lark.exceptions import UnexpectedCharacters, UnexpectedToken + + +valid_DEC_NUMBER = [ + "0", + "000", + "0_0_0", + "4_2", + "1_0000_0000", + "123456789012345678901234567890", +] + +valid_HEX_NUMBER = [ + "0x_f", + "0xffff_ffff", + "0xffffffffffffffff", + "0Xffffffffffffffff", +] + +valid_OCT_NUMBER = [ + "0o5_7_7", + "0o_5", + "0o77777777777777777", + "0O77777777777777777", +] + +valid_BIN_NUMBER = [ + "0b1001_0100", + "0b_0", + "0b100000000000000000000000000000000000000000000000000000000000000000000", + "0B111111111111111111111111111111111111111111111111111111111111111111111", +] + +valid_FLOAT_NUMBER = [ + "1_00_00.5", + "1_00_00.5e5", + "1_00_00e5_1", + "1e1_0", + ".1_4", + ".1_4e1", + "1_2.5", + "3.14", + "314.", + "0.314", + "000.314", + ".314", + "3e14", + "3E14", + "3e-14", + "3e+14", + "3.e14", + ".3e14", + "3.1e4", +] + +valid_IMAG_NUMBER = [ + "0j", + "123456789012345678901234567890j", + "1_00_00j", + "1_00_00.5j", + "1_00_00e5_1j", + ".1_4j", + "3_3j", + ".5_6j", + "3.14j", + "314.j", + "0.314j", + "000.314j", + ".314j", + "3e14j", + "3E14j", + "3e-14j", + "3e+14j", + "3.e14j", + ".3e14j", + "3.1e4j", +] + +valid_number = (valid_DEC_NUMBER + valid_HEX_NUMBER + valid_OCT_NUMBER + + valid_BIN_NUMBER + valid_FLOAT_NUMBER + valid_IMAG_NUMBER) + + +invalid_number = [ + "0_", + "42_", + "1.4j_", + "0x_", + "0b1_", + "0xf_", + "0o5_", + "1_Else", + "0_b0", + "0_xf", + "0_o5", + "0_7", + "09_99", + "4_______2", + "0.1__4", + "0.1__4j", + "0b1001__0100", + "0xffff__ffff", + "0x___", + "0o5__77", + "1e1__0", + "1e1__0j", + "1_.4", + "1_.4j", + "1._4", + "1._4j", + "._5", + "._5j", + "1.0e+_1", + "1.0e+_1j", + "1.4_j", + "1.4e5_j", + "1_e1", + "1.4_e1", + "1.4_e1j", + "1e_1", + "1.4e_1", + "1.4e_1j", + "1+1.5_j_", + "1+1.5_j", + + "_0", + "_42", + "_1.4j", + "_0x", + "_0b1", + "_0xf", + "_0o5", + "_1_Else", + "_0_b0", + "_0_xf", + "_0_o5", + "_0_7", + "_09_99", + "_4_______2", + "_0.1__4", + "_0.1__4j", + "_0b1001__0100", + "_0xffff__ffff", + "_0x__", + "_0o5__77", + "_1e1__0", + "_1e1__0j", + "_1_.4", + "_1_.4j", + "_1._4", + "_1._4j", + "_._5", + "_._5j", + "_1.0e+_1", + "_1.0e+_1j", + "_1.4_j", + "_1.4e5_j", + "_1_e1", + "_1.4_e1", + "_1.4_e1j", + "_1e_1", + "_1.4e_1", + "_1.4e_1j", + "_1+1.5_j", + "_1+1.5_j", +] + + +class TestPythonParser(TestCase): + @classmethod + def setUpClass(cls): + cls.python_parser = Lark.open_from_package( + "lark", "python.lark", ("grammars",), parser='lalr', + postlex=PythonIndenter(), start=["number"]) + + def _test_parsed_is_this_terminal(self, text, terminal, start): + tree = self.python_parser.parse(text, start=start) + self.assertEqual(len(tree.children), 1) + token = tree.children[0] + self.assertEqual(token.type, terminal) + self.assertEqual(token.value, text) + + def test_DEC_NUMBER(self): + for case in valid_DEC_NUMBER: + self._test_parsed_is_this_terminal(case, "DEC_NUMBER", "number") + + def test_HEX_NUMBER(self): + for case in valid_HEX_NUMBER: + self._test_parsed_is_this_terminal(case, "HEX_NUMBER", "number") + + def test_OCT_NUMBER(self): + for case in valid_OCT_NUMBER: + self._test_parsed_is_this_terminal(case, "OCT_NUMBER", "number") + + def test_BIN_NUMBER(self): + for case in valid_BIN_NUMBER: + self._test_parsed_is_this_terminal(case, "BIN_NUMBER", "number") + + def test_FLOAT_NUMBER(self): + for case in valid_FLOAT_NUMBER: + self._test_parsed_is_this_terminal(case, "FLOAT_NUMBER", "number") + + def test_IMAG_NUMBER(self): + for case in valid_IMAG_NUMBER: + self._test_parsed_is_this_terminal(case, "IMAG_NUMBER", "number") + + def test_valid_number(self): + # XXX: all valid test cases should run with the above tests for numbers + for case in valid_number: + self.python_parser.parse(case, start="number") # no error + + def test_invalid_number(self): + for case in invalid_number: + with self.assertRaises((UnexpectedCharacters, UnexpectedToken)): + self.python_parser.parse(case, start="number") + + +if __name__ == '__main__': + main() diff --git a/vendor/lark/tests/test_reconstructor.py b/vendor/lark/tests/test_reconstructor.py new file mode 100644 index 00000000..fe1545ea --- /dev/null +++ b/vendor/lark/tests/test_reconstructor.py @@ -0,0 +1,196 @@ +# coding=utf-8 + +import json +import sys +import unittest +from itertools import product +from unittest import TestCase + +from lark import Lark +from lark.reconstruct import Reconstructor + +common = """ +%import common (WS_INLINE, NUMBER, WORD) +%ignore WS_INLINE +""" + + +def _remove_ws(s): + return s.replace(' ', '').replace('\n', '') + + +class TestReconstructor(TestCase): + + def assert_reconstruct(self, grammar, code, **options): + parser = Lark(grammar, parser='lalr', maybe_placeholders=False, **options) + tree = parser.parse(code) + new = Reconstructor(parser).reconstruct(tree) + self.assertEqual(_remove_ws(code), _remove_ws(new)) + + def test_starred_rule(self): + g = """ + start: item* + item: NL + | rule + rule: WORD ":" NUMBER + NL: /(\\r?\\n)+\\s*/ + """ + common + + code = """ + Elephants: 12 + """ + + self.assert_reconstruct(g, code) + + def test_starred_group(self): + g = """ + start: (rule | NL)* + rule: WORD ":" NUMBER + NL: /(\\r?\\n)+\\s*/ + """ + common + + code = """ + Elephants: 12 + """ + + self.assert_reconstruct(g, code) + + def test_alias(self): + g = """ + start: line* + line: NL + | rule + | "hello" -> hi + rule: WORD ":" NUMBER + NL: /(\\r?\\n)+\\s*/ + """ + common + + code = """ + Elephants: 12 + hello + """ + + self.assert_reconstruct(g, code) + + def test_keep_tokens(self): + g = """ + start: (NL | stmt)* + stmt: var op var + !op: ("+" | "-" | "*" | "/") + var: WORD + NL: /(\\r?\\n)+\\s*/ + """ + common + + code = """ + a+b + """ + + self.assert_reconstruct(g, code) + + def test_expand_rule(self): + g = """ + ?start: (NL | mult_stmt)* + ?mult_stmt: sum_stmt ["*" sum_stmt] + ?sum_stmt: var ["+" var] + var: WORD + NL: /(\\r?\\n)+\\s*/ + """ + common + + code = ['a', 'a*b', 'a+b', 'a*b+c', 'a+b*c', 'a+b*c+d'] + + for c in code: + self.assert_reconstruct(g, c) + + def test_json_example(self): + test_json = ''' + { + "empty_object" : {}, + "empty_array" : [], + "booleans" : { "YES" : true, "NO" : false }, + "numbers" : [ 0, 1, -2, 3.3, 4.4e5, 6.6e-7 ], + "strings" : [ "This", [ "And" , "That", "And a \\"b" ] ], + "nothing" : null + } + ''' + + json_grammar = r""" + ?start: value + + ?value: object + | array + | string + | SIGNED_NUMBER -> number + | "true" -> true + | "false" -> false + | "null" -> null + + array : "[" [value ("," value)*] "]" + object : "{" [pair ("," pair)*] "}" + pair : string ":" value + + string : ESCAPED_STRING + + %import common.ESCAPED_STRING + %import common.SIGNED_NUMBER + %import common.WS + + %ignore WS + """ + + json_parser = Lark(json_grammar, parser='lalr', maybe_placeholders=False) + tree = json_parser.parse(test_json) + + new_json = Reconstructor(json_parser).reconstruct(tree) + self.assertEqual(json.loads(new_json), json.loads(test_json)) + + def test_keep_all_tokens(self): + g = """ + start: "a"? _B? c? _d? + _B: "b" + c: "c" + _d: "d" + """ + examples = list(map(''.join, product(('', 'a'), ('', 'b'), ('', 'c'), ('', 'd'), ))) + for code in examples: + self.assert_reconstruct(g, code, keep_all_tokens=True) + + @unittest.skipIf(sys.version_info < (3, 0), "Python 2 does not play well with Unicode.") + def test_switch_grammar_unicode_terminal(self): + """ + This test checks that a parse tree built with a grammar containing only ascii characters can be reconstructed + with a grammar that has unicode rules (or vice versa). The original bug assigned ANON terminals to unicode + keywords, which offsets the ANON terminal count in the unicode grammar and causes subsequent identical ANON + tokens (e.g., `+=`) to mis-match between the two grammars. + """ + + g1 = """ + start: (NL | stmt)* + stmt: "keyword" var op var + !op: ("+=" | "-=" | "*=" | "/=") + var: WORD + NL: /(\\r?\\n)+\\s*/ + """ + common + + g2 = """ + start: (NL | stmt)* + stmt: "கà¯à®±à®¿à®ªà¯à®ªà¯" var op var + !op: ("+=" | "-=" | "*=" | "/=") + var: WORD + NL: /(\\r?\\n)+\\s*/ + """ + common + + code = """ + keyword x += y + """ + + l1 = Lark(g1, parser='lalr', maybe_placeholders=False) + l2 = Lark(g2, parser='lalr', maybe_placeholders=False) + r = Reconstructor(l2) + + tree = l1.parse(code) + code2 = r.reconstruct(tree) + assert l2.parse(code2) == tree + + +if __name__ == '__main__': + unittest.main() diff --git a/vendor/lark/tests/test_relative_import.lark b/vendor/lark/tests/test_relative_import.lark new file mode 100644 index 00000000..c614a31d --- /dev/null +++ b/vendor/lark/tests/test_relative_import.lark @@ -0,0 +1,7 @@ +start: NUMBER WORD + +%import .grammars.test.NUMBER +%import common.WORD +%import common.WS +%ignore WS + diff --git a/vendor/lark/tests/test_relative_import_preserves_leading_underscore.lark b/vendor/lark/tests/test_relative_import_preserves_leading_underscore.lark new file mode 100644 index 00000000..92c08c6f --- /dev/null +++ b/vendor/lark/tests/test_relative_import_preserves_leading_underscore.lark @@ -0,0 +1,3 @@ +start: c + +%import .grammars.leading_underscore_grammar.c \ No newline at end of file diff --git a/vendor/lark/tests/test_relative_import_rename.lark b/vendor/lark/tests/test_relative_import_rename.lark new file mode 100644 index 00000000..c4117715 --- /dev/null +++ b/vendor/lark/tests/test_relative_import_rename.lark @@ -0,0 +1,7 @@ +start: N WORD + +%import .grammars.test.NUMBER -> N +%import common.WORD +%import common.WS +%ignore WS + diff --git a/vendor/lark/tests/test_relative_import_rules_dependencies_imported_only_once.lark b/vendor/lark/tests/test_relative_import_rules_dependencies_imported_only_once.lark new file mode 100644 index 00000000..bc21c7f6 --- /dev/null +++ b/vendor/lark/tests/test_relative_import_rules_dependencies_imported_only_once.lark @@ -0,0 +1,5 @@ +%import .grammars.three_rules_using_same_token.a +%import .grammars.three_rules_using_same_token.b +%import .grammars.three_rules_using_same_token.c -> d + +start: a b d diff --git a/vendor/lark/tests/test_relative_import_unicode.lark b/vendor/lark/tests/test_relative_import_unicode.lark new file mode 100644 index 00000000..8010537d --- /dev/null +++ b/vendor/lark/tests/test_relative_import_unicode.lark @@ -0,0 +1,3 @@ +start: UNICODE + +%import .grammars.test_unicode.UNICODE \ No newline at end of file diff --git a/vendor/lark/tests/test_relative_multi_import.lark b/vendor/lark/tests/test_relative_multi_import.lark new file mode 100644 index 00000000..75c131de --- /dev/null +++ b/vendor/lark/tests/test_relative_multi_import.lark @@ -0,0 +1,4 @@ +start: NUMBER WORD + +%import .grammars.test (NUMBER, WORD, WS) +%ignore WS diff --git a/vendor/lark/tests/test_relative_rule_import.lark b/vendor/lark/tests/test_relative_rule_import.lark new file mode 100644 index 00000000..e3a33a5b --- /dev/null +++ b/vendor/lark/tests/test_relative_rule_import.lark @@ -0,0 +1,7 @@ +start: X expr Y + +X: "x" +Y: "y" + +%import .grammars.ab.expr + diff --git a/vendor/lark/tests/test_relative_rule_import_drop_ignore.lark b/vendor/lark/tests/test_relative_rule_import_drop_ignore.lark new file mode 100644 index 00000000..e3a33a5b --- /dev/null +++ b/vendor/lark/tests/test_relative_rule_import_drop_ignore.lark @@ -0,0 +1,7 @@ +start: X expr Y + +X: "x" +Y: "y" + +%import .grammars.ab.expr + diff --git a/vendor/lark/tests/test_relative_rule_import_rename.lark b/vendor/lark/tests/test_relative_rule_import_rename.lark new file mode 100644 index 00000000..342b329c --- /dev/null +++ b/vendor/lark/tests/test_relative_rule_import_rename.lark @@ -0,0 +1,7 @@ +start: X ab Y + +X: "x" +Y: "y" + +%import .grammars.ab.expr -> ab + diff --git a/vendor/lark/tests/test_relative_rule_import_subrule.lark b/vendor/lark/tests/test_relative_rule_import_subrule.lark new file mode 100644 index 00000000..94d7f809 --- /dev/null +++ b/vendor/lark/tests/test_relative_rule_import_subrule.lark @@ -0,0 +1,7 @@ +start: X startab Y + +X: "x" +Y: "y" + +%import .grammars.ab.startab + diff --git a/vendor/lark/tests/test_relative_rule_import_subrule_no_conflict.lark b/vendor/lark/tests/test_relative_rule_import_subrule_no_conflict.lark new file mode 100644 index 00000000..839aac1f --- /dev/null +++ b/vendor/lark/tests/test_relative_rule_import_subrule_no_conflict.lark @@ -0,0 +1,9 @@ +start: expr + +expr: X startab Y + +X: "x" +Y: "y" + +%import .grammars.ab.startab + diff --git a/vendor/lark/tests/test_templates_import.lark b/vendor/lark/tests/test_templates_import.lark new file mode 100644 index 00000000..a1272b8e --- /dev/null +++ b/vendor/lark/tests/test_templates_import.lark @@ -0,0 +1,4 @@ +start: "[" sep{NUMBER, ","} "]" +NUMBER: /\d+/ +%ignore " " +%import .grammars.templates.sep \ No newline at end of file diff --git a/vendor/lark/tests/test_tools.py b/vendor/lark/tests/test_tools.py new file mode 100644 index 00000000..deef9299 --- /dev/null +++ b/vendor/lark/tests/test_tools.py @@ -0,0 +1,139 @@ +from __future__ import absolute_import, print_function + +from unittest import TestCase, main + +from lark import Lark +from lark.tree import Tree +from lark.tools import standalone + +from io import StringIO + + +class TestStandalone(TestCase): + def setUp(self): + pass + + def _create_standalone(self, grammar, compress=False): + code_buf = StringIO() + standalone.gen_standalone(Lark(grammar, parser='lalr'), out=code_buf, compress=compress) + code = code_buf.getvalue() + + context = {'__doc__': None, '__name__': 'test_standalone'} + exec(code, context) + return context + + def test_simple(self): + grammar = """ + start: NUMBER WORD + + %import common.NUMBER + %import common.WORD + %import common.WS + %ignore WS + + """ + + context = self._create_standalone(grammar) + + _Lark = context['Lark_StandAlone'] + l = _Lark() + x = l.parse('12 elephants') + self.assertEqual(x.children, ['12', 'elephants']) + x = l.parse('16 candles') + self.assertEqual(x.children, ['16', 'candles']) + + self.assertRaises(context['UnexpectedToken'], l.parse, 'twelve monkeys') + self.assertRaises(context['UnexpectedToken'], l.parse, 'twelve') + self.assertRaises(context['UnexpectedCharacters'], l.parse, '$ talks') + + context = self._create_standalone(grammar, compress=True) + _Lark = context['Lark_StandAlone'] + l = _Lark() + x = l.parse('12 elephants') + + def test_contextual(self): + grammar = """ + start: a b + a: "A" "B" + b: "AB" + """ + + context = self._create_standalone(grammar) + + _Lark = context['Lark_StandAlone'] + l = _Lark() + x = l.parse('ABAB') + + _v_args = context['v_args'] + @_v_args(inline=True) + class T(context['Transformer']): + def a(self): + return 'a' + def b(self): + return 'b' + + start = _v_args(inline=False)(list) + + x = T().transform(x) + self.assertEqual(x, ['a', 'b']) + + l2 = _Lark(transformer=T()) + x = l2.parse('ABAB') + self.assertEqual(x, ['a', 'b']) + + def test_postlex(self): + from lark.indenter import Indenter + class MyIndenter(Indenter): + NL_type = '_NEWLINE' + OPEN_PAREN_types = ['LPAR', 'LSQB', 'LBRACE'] + CLOSE_PAREN_types = ['RPAR', 'RSQB', 'RBRACE'] + INDENT_type = '_INDENT' + DEDENT_type = '_DEDENT' + tab_len = 8 + + grammar = r""" + start: "(" ")" _NEWLINE + _NEWLINE: /\n/ + """ + + context = self._create_standalone(grammar) + _Lark = context['Lark_StandAlone'] + + l = _Lark(postlex=MyIndenter()) + x = l.parse('()\n') + self.assertEqual(x, Tree('start', [])) + l = _Lark(postlex=MyIndenter()) + x = l.parse('(\n)\n') + self.assertEqual(x, Tree('start', [])) + + def test_transformer(self): + grammar = r""" + start: some_rule "(" SOME_TERMINAL ")" + some_rule: SOME_TERMINAL + SOME_TERMINAL: /[A-Za-z_][A-Za-z0-9_]*/ + """ + context = self._create_standalone(grammar) + _Lark = context["Lark_StandAlone"] + + _Token = context["Token"] + _Tree = context["Tree"] + + class MyTransformer(context["Transformer"]): + def SOME_TERMINAL(self, token): + return _Token("SOME_TERMINAL", "token is transformed") + + def some_rule(self, children): + return _Tree("rule_is_transformed", []) + + parser = _Lark(transformer=MyTransformer()) + self.assertEqual( + parser.parse("FOO(BAR)"), + _Tree("start", [ + _Tree("rule_is_transformed", []), + _Token("SOME_TERMINAL", "token is transformed") + ]) + ) + + +if __name__ == '__main__': + main() diff --git a/vendor/lark/tests/test_tree_forest_transformer.py b/vendor/lark/tests/test_tree_forest_transformer.py new file mode 100644 index 00000000..e9600735 --- /dev/null +++ b/vendor/lark/tests/test_tree_forest_transformer.py @@ -0,0 +1,228 @@ +from __future__ import absolute_import + +import unittest + +from lark import Lark +from lark.lexer import Token +from lark.tree import Tree +from lark.visitors import Visitor, Transformer, Discard +from lark.parsers.earley_forest import TreeForestTransformer, handles_ambiguity + +class TestTreeForestTransformer(unittest.TestCase): + + grammar = """ + start: ab bc cd + !ab: "A" "B"? + !bc: "B"? "C"? + !cd: "C"? "D" + """ + + parser = Lark(grammar, parser='earley', ambiguity='forest') + forest = parser.parse("ABCD") + + def test_identity_resolve_ambiguity(self): + l = Lark(self.grammar, parser='earley', ambiguity='resolve') + tree1 = l.parse("ABCD") + tree2 = TreeForestTransformer(resolve_ambiguity=True).transform(self.forest) + self.assertEqual(tree1, tree2) + + def test_identity_explicit_ambiguity(self): + l = Lark(self.grammar, parser='earley', ambiguity='explicit') + tree1 = l.parse("ABCD") + tree2 = TreeForestTransformer(resolve_ambiguity=False).transform(self.forest) + self.assertEqual(tree1, tree2) + + def test_tree_class(self): + + class CustomTree(Tree): + pass + + class TreeChecker(Visitor): + def __default__(self, tree): + assert isinstance(tree, CustomTree) + + tree = TreeForestTransformer(resolve_ambiguity=False, tree_class=CustomTree).transform(self.forest) + TreeChecker().visit(tree) + + def test_token_calls(self): + + visited = [False] * 4 + + class CustomTransformer(TreeForestTransformer): + def A(self, node): + assert node.type == 'A' + visited[0] = True + def B(self, node): + assert node.type == 'B' + visited[1] = True + def C(self, node): + assert node.type == 'C' + visited[2] = True + def D(self, node): + assert node.type == 'D' + visited[3] = True + + tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest) + assert visited == [True] * 4 + + def test_default_token(self): + + token_count = [0] + + class CustomTransformer(TreeForestTransformer): + def __default_token__(self, node): + token_count[0] += 1 + assert isinstance(node, Token) + + tree = CustomTransformer(resolve_ambiguity=True).transform(self.forest) + self.assertEqual(token_count[0], 4) + + def test_rule_calls(self): + + visited_start = [False] + visited_ab = [False] + visited_bc = [False] + visited_cd = [False] + + class CustomTransformer(TreeForestTransformer): + def start(self, data): + visited_start[0] = True + def ab(self, data): + visited_ab[0] = True + def bc(self, data): + visited_bc[0] = True + def cd(self, data): + visited_cd[0] = True + + tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest) + self.assertTrue(visited_start[0]) + self.assertTrue(visited_ab[0]) + self.assertTrue(visited_bc[0]) + self.assertTrue(visited_cd[0]) + + def test_default_rule(self): + + rule_count = [0] + + class CustomTransformer(TreeForestTransformer): + def __default__(self, name, data): + rule_count[0] += 1 + + tree = CustomTransformer(resolve_ambiguity=True).transform(self.forest) + self.assertEqual(rule_count[0], 4) + + def test_default_ambig(self): + + ambig_count = [0] + + class CustomTransformer(TreeForestTransformer): + def __default_ambig__(self, name, data): + if len(data) > 1: + ambig_count[0] += 1 + + tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest) + self.assertEqual(ambig_count[0], 1) + + def test_handles_ambiguity(self): + + class CustomTransformer(TreeForestTransformer): + @handles_ambiguity + def start(self, data): + assert isinstance(data, list) + assert len(data) == 4 + for tree in data: + assert tree.data == 'start' + return 'handled' + + @handles_ambiguity + def ab(self, data): + assert isinstance(data, list) + assert len(data) == 1 + assert data[0].data == 'ab' + + tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest) + self.assertEqual(tree, 'handled') + + def test_discard(self): + + class CustomTransformer(TreeForestTransformer): + def bc(self, data): + return Discard + + def D(self, node): + return Discard + + class TreeChecker(Transformer): + def bc(self, children): + assert False + + def D(self, token): + assert False + + tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest) + TreeChecker(visit_tokens=True).transform(tree) + + def test_aliases(self): + + visited_ambiguous = [False] + visited_full = [False] + + class CustomTransformer(TreeForestTransformer): + @handles_ambiguity + def start(self, data): + for tree in data: + assert tree.data == 'ambiguous' or tree.data == 'full' + + def ambiguous(self, data): + visited_ambiguous[0] = True + assert len(data) == 3 + assert data[0].data == 'ab' + assert data[1].data == 'bc' + assert data[2].data == 'cd' + return self.tree_class('ambiguous', data) + + def full(self, data): + visited_full[0] = True + assert len(data) == 1 + assert data[0].data == 'abcd' + return self.tree_class('full', data) + + grammar = """ + start: ab bc cd -> ambiguous + | abcd -> full + !ab: "A" "B"? + !bc: "B"? "C"? + !cd: "C"? "D" + !abcd: "ABCD" + """ + + l = Lark(grammar, parser='earley', ambiguity='forest') + forest = l.parse('ABCD') + tree = CustomTransformer(resolve_ambiguity=False).transform(forest) + self.assertTrue(visited_ambiguous[0]) + self.assertTrue(visited_full[0]) + + def test_transformation(self): + + class CustomTransformer(TreeForestTransformer): + def __default__(self, name, data): + result = [] + for item in data: + if isinstance(item, list): + result += item + else: + result.append(item) + return result + + def __default_token__(self, node): + return node.lower() + + def __default_ambig__(self, name, data): + return data[0] + + result = CustomTransformer(resolve_ambiguity=False).transform(self.forest) + expected = ['a', 'b', 'c', 'd'] + self.assertEqual(result, expected) + +if __name__ == '__main__': + unittest.main() diff --git a/vendor/lark/tests/test_tree_templates.py b/vendor/lark/tests/test_tree_templates.py new file mode 100644 index 00000000..fa9578e3 --- /dev/null +++ b/vendor/lark/tests/test_tree_templates.py @@ -0,0 +1,225 @@ +from __future__ import absolute_import + +import unittest +from copy import deepcopy + +from lark import Lark, Tree, Token +from lark.exceptions import MissingVariableError +from lark.tree_templates import TemplateConf, Template, TemplateTranslator + +SOME_NON_TEMPLATED_STRING = "foo bar" +SOME_TEMPLATE_NAME = "thing" +SOME_TEMPLATE_STRING = f"${SOME_TEMPLATE_NAME}" +SOME_NON_STRING = 12345 +SOME_TEMPLATING_GRAMMAR = r""" +start: DASHES? foo DASHES? bar +DASHES: "--" +foo: "foo" + | TEMPLATE_NAME -> var +bar: "bar" + | TEMPLATE_NAME -> var +TEMPLATE_NAME: "$" NAME +NAME: /[^\W\d]\w*/ +%ignore /[\t \f]+/ // WS +""" +SOME_FOO_TEMPLATE = f"{SOME_TEMPLATE_STRING} bar" +SOME_BAR_TEMPLATE = f"foo {SOME_TEMPLATE_STRING}" +SOME_NON_TEMPLATE_TREE = Tree("foo", children=["hi"]) + +__all__ = [ + "TestTreeTemplatesConf", + "TestTreeTemplatesTemplateTranslator", + "TestTreeTemplatesTemplate", + "TestTreeTemplatesTemplateDefaultConf", +] + + +class TestTreeTemplatesConf(unittest.TestCase): + parser = Lark(SOME_TEMPLATING_GRAMMAR) + + def test_conf_test_var__not_var(self): + conf = TemplateConf(self.parser.parse) + + non_templates = { + "non-templated string": SOME_NON_TEMPLATED_STRING, + "non-var tree": Tree("stmt", children=[]), + "var tree, non-templated string": Tree( + "var", children=[SOME_NON_TEMPLATED_STRING] + ), + "var tree, templated string not first child": Tree( + "var", children=[SOME_NON_TEMPLATED_STRING, SOME_TEMPLATE_STRING] + ), + "var tree, first child not string": Tree("var", children=[SOME_NON_STRING]), + "var tree, no children": Tree("var", children=[]), + } + for description, test_case in non_templates.items(): + with self.subTest(msg=description): + self.assertIsNone(conf.test_var(test_case)) + + def test_conf_test_var__is_var(self): + conf = TemplateConf(self.parser.parse) + + non_templates = { + "templated string": SOME_TEMPLATE_STRING, + "var tree, non-templated string": Tree( + "var", children=[SOME_TEMPLATE_STRING] + ), + } + for description, test_case in non_templates.items(): + with self.subTest(msg=description): + self.assertEqual(SOME_TEMPLATE_NAME, conf.test_var(test_case)) + + def test_conf_call__same_tree(self): + conf = TemplateConf(self.parser.parse) + explicitly_parsed = self.parser.parse(SOME_FOO_TEMPLATE) + + non_templates = { + "to be parsed": SOME_FOO_TEMPLATE, + "already parsed": explicitly_parsed, + } + for description, test_case in non_templates.items(): + with self.subTest(msg=description): + template = conf(test_case) + self.assertEqual(explicitly_parsed, template.tree) + + def test_template_match__default_conf_match_same_tree__empty_dictionary(self): + template = Template(SOME_NON_TEMPLATE_TREE) + + self.assertEqual({}, template.match(SOME_NON_TEMPLATE_TREE)) + + +class TestTreeTemplatesTemplate(unittest.TestCase): + parser = Lark(SOME_TEMPLATING_GRAMMAR) + conf = TemplateConf(parser.parse) + + def test_template_match__same_tree_no_template__empty_dictionary(self): + template = Template(SOME_NON_TEMPLATE_TREE, conf=self.conf) + + self.assertEqual({}, template.match(SOME_NON_TEMPLATE_TREE)) + + def test_template_match__different_tree_no_template__none(self): + template = Template(SOME_NON_TEMPLATE_TREE, conf=self.conf) + + self.assertIsNone(template.match(Tree("foo", children=["bye"]))) + + def test_template_match__no_template__empty_dictionary(self): + tree = self.parser.parse(SOME_NON_TEMPLATED_STRING) + template = Template(tree, conf=self.conf) + + non_templates = { + "un-parsed string": SOME_NON_TEMPLATED_STRING, + "parsed tree": tree, + } + for description, test_case in non_templates.items(): + with self.subTest(msg=description): + self.assertEqual({}, template.match(test_case)) + + def test_template_match__with_template__empty_dictionary(self): + tree = self.parser.parse(SOME_FOO_TEMPLATE) + template = Template(tree, conf=self.conf) + + non_templates = {"un-parsed string": SOME_FOO_TEMPLATE, "parsed tree": tree} + expected_result = {SOME_TEMPLATE_NAME: tree.children[0]} + + for description, test_case in non_templates.items(): + with self.subTest(msg=description): + self.assertEqual(expected_result, template.match(test_case)) + + def test_template_match__different_tree__none(self): + tree = self.parser.parse(SOME_FOO_TEMPLATE) + template = Template(tree, conf=self.conf) + + non_templates = { + "un-parsed string": SOME_BAR_TEMPLATE, + "parsed tree": self.parser.parse(SOME_BAR_TEMPLATE), + } + for description, test_case in non_templates.items(): + with self.subTest(msg=description): + self.assertIsNone(template.match(test_case)) + + def test_template_search__same_tree_no_template__empty_generator(self): + template = Template(SOME_NON_TEMPLATE_TREE, conf=self.conf) + + self.assertEqual([], list(template.search(SOME_NON_TEMPLATE_TREE))) + + def test_template_search__same_tree_as_child__empty_generator(self): + template = Template(SOME_NON_TEMPLATE_TREE, conf=self.conf) + + self.assertEqual( + [], list(template.search(Tree("root", children=[SOME_NON_TEMPLATE_TREE]))) + ) + + def test_template_search__with_template__matched_result_with_parent_tree(self): + tree = self.parser.parse(SOME_FOO_TEMPLATE) + template = Template(tree, conf=self.conf) + + non_templates = {"un-parsed string": SOME_FOO_TEMPLATE, "parsed tree": tree} + expected_result = [(tree, {SOME_TEMPLATE_NAME: tree.children[0]})] + + for description, test_case in non_templates.items(): + with self.subTest(msg=description): + self.assertEqual(expected_result, list(template.search(test_case))) + + def test_template_apply_vars__empty__exception(self): + tree = self.parser.parse(SOME_FOO_TEMPLATE) + template = Template(tree, conf=self.conf) + + with self.assertRaises(MissingVariableError): + template.apply_vars({}) + + def test_template_apply_vars__no_matching_vars__exception(self): + tree = self.parser.parse(SOME_FOO_TEMPLATE) + template = Template(tree, conf=self.conf) + + with self.assertRaises(MissingVariableError): + template.apply_vars({"not used": SOME_NON_TEMPLATE_TREE}) + + def test_template_apply_vars__matching_vars__template_replaced(self): + tree = self.parser.parse(SOME_FOO_TEMPLATE) + template = Template(tree, conf=self.conf) + + expected_result = deepcopy(tree) + expected_result.children[0] = SOME_NON_TEMPLATE_TREE + self.assertEqual( + expected_result, + template.apply_vars({SOME_TEMPLATE_NAME: SOME_NON_TEMPLATE_TREE}), + ) + + +class TestTreeTemplatesTemplateTranslator(unittest.TestCase): + parser = Lark(SOME_TEMPLATING_GRAMMAR) + conf = TemplateConf(parser.parse) + + def test_translate__empty_translations__same_tree(self): + # no translations to match, so doesn't replace anything & can't error + translator = TemplateTranslator({}) + tree = self.parser.parse(SOME_FOO_TEMPLATE) + + expected_result = deepcopy(tree) + self.assertEqual(expected_result, translator.translate(tree)) + + def test_translate__one_translations__same_tree(self): + translations = { + self.conf(f"${SOME_TEMPLATE_NAME} bar"): self.conf( + f"--${SOME_TEMPLATE_NAME}-- bar" + ) + } + translator = TemplateTranslator(translations) + tree = self.parser.parse(SOME_NON_TEMPLATED_STRING) + + expected_result = deepcopy(tree) + expected_result.children.insert(0, Token("DASHES", "--")) + expected_result.children.insert(2, Token("DASHES", "--")) + self.assertEqual(expected_result, translator.translate(tree)) + + +class TestTreeTemplatesTemplateDefaultConf(unittest.TestCase): + def test_template_match__match_same_tree__empty_dictionary(self): + tree = Tree("foo", children=["hi"]) + template = Template(tree) + + self.assertEqual({}, template.match(tree)) + + +if __name__ == "__main__": + unittest.main() diff --git a/vendor/lark/tests/test_trees.py b/vendor/lark/tests/test_trees.py new file mode 100644 index 00000000..dd95f6b8 --- /dev/null +++ b/vendor/lark/tests/test_trees.py @@ -0,0 +1,451 @@ +from __future__ import absolute_import + +import unittest +from functools import partial, reduce, partialmethod +from operator import add, mul +from unittest import TestCase +import copy +import pickle +import functools + +from lark.tree import Tree +from lark.lexer import Token +from lark.visitors import Visitor, Visitor_Recursive, Transformer, Interpreter, visit_children_decor, v_args, Discard, Transformer_InPlace, \ + Transformer_InPlaceRecursive, Transformer_NonRecursive, merge_transformers + + +class TestTrees(TestCase): + def setUp(self): + self.tree1 = Tree('a', [Tree(x, y) for x, y in zip('bcd', 'xyz')]) + + def test_eq(self): + assert self.tree1 == self.tree1 + assert self.tree1 != 0 + + def test_copy(self): + assert self.tree1 == copy.copy(self.tree1) + + def test_deepcopy(self): + assert self.tree1 == copy.deepcopy(self.tree1) + + def test_pickle(self): + s = copy.deepcopy(self.tree1) + data = pickle.dumps(s, protocol=pickle.HIGHEST_PROTOCOL) + assert pickle.loads(data) == s + + def test_repr_runnable(self): + assert self.tree1 == eval(repr(self.tree1)) + + def test_iter_subtrees(self): + expected = [Tree('b', 'x'), Tree('c', 'y'), Tree('d', 'z'), + Tree('a', [Tree('b', 'x'), Tree('c', 'y'), Tree('d', 'z')])] + nodes = list(self.tree1.iter_subtrees()) + self.assertEqual(nodes, expected) + + def test_iter_subtrees_topdown(self): + expected = [Tree('a', [Tree('b', 'x'), Tree('c', 'y'), Tree('d', 'z')]), + Tree('b', 'x'), Tree('c', 'y'), Tree('d', 'z')] + nodes = list(self.tree1.iter_subtrees_topdown()) + self.assertEqual(nodes, expected) + + def test_visitor(self): + class Visitor1(Visitor): + def __init__(self): + self.nodes=[] + + def __default__(self,tree): + self.nodes.append(tree) + class Visitor1_Recursive(Visitor_Recursive): + def __init__(self): + self.nodes=[] + + def __default__(self,tree): + self.nodes.append(tree) + + visitor1=Visitor1() + visitor1_recursive=Visitor1_Recursive() + + expected_top_down = [Tree('a', [Tree('b', 'x'), Tree('c', 'y'), Tree('d', 'z')]), + Tree('b', 'x'), Tree('c', 'y'), Tree('d', 'z')] + expected_botton_up= [Tree('b', 'x'), Tree('c', 'y'), Tree('d', 'z'), + Tree('a', [Tree('b', 'x'), Tree('c', 'y'), Tree('d', 'z')])] + + visitor1.visit(self.tree1) + self.assertEqual(visitor1.nodes,expected_botton_up) + + visitor1_recursive.visit(self.tree1) + self.assertEqual(visitor1_recursive.nodes,expected_botton_up) + + visitor1.nodes=[] + visitor1_recursive.nodes=[] + + visitor1.visit_topdown(self.tree1) + self.assertEqual(visitor1.nodes,expected_top_down) + + visitor1_recursive.visit_topdown(self.tree1) + self.assertEqual(visitor1_recursive.nodes,expected_top_down) + + def test_interp(self): + t = Tree('a', [Tree('b', []), Tree('c', []), 'd']) + + class Interp1(Interpreter): + def a(self, tree): + return self.visit_children(tree) + ['e'] + + def b(self, tree): + return 'B' + + def c(self, tree): + return 'C' + + self.assertEqual(Interp1().visit(t), list('BCde')) + + class Interp2(Interpreter): + @visit_children_decor + def a(self, values): + return values + ['e'] + + def b(self, tree): + return 'B' + + def c(self, tree): + return 'C' + + self.assertEqual(Interp2().visit(t), list('BCde')) + + class Interp3(Interpreter): + def b(self, tree): + return 'B' + + def c(self, tree): + return 'C' + + self.assertEqual(Interp3().visit(t), list('BCd')) + + def test_transformer(self): + t = Tree('add', [Tree('sub', [Tree('i', ['3']), Tree('f', ['1.1'])]), Tree('i', ['1'])]) + + class T(Transformer): + i = v_args(inline=True)(int) + f = v_args(inline=True)(float) + + sub = lambda self, values: values[0] - values[1] + + def add(self, values): + return sum(values) + + res = T().transform(t) + self.assertEqual(res, 2.9) + + @v_args(inline=True) + class T(Transformer): + i = int + f = float + sub = lambda self, a, b: a-b + + def add(self, a, b): + return a + b + + + res = T().transform(t) + self.assertEqual(res, 2.9) + + + @v_args(inline=True) + class T(Transformer): + i = int + f = float + from operator import sub, add + + res = T().transform(t) + self.assertEqual(res, 2.9) + + def test_vargs(self): + @v_args() + class MyTransformer(Transformer): + @staticmethod + def integer(args): + return 1 # some code here + + @classmethod + def integer2(cls, args): + return 2 # some code here + + hello = staticmethod(lambda args: 'hello') + + x = MyTransformer().transform( Tree('integer', [2])) + self.assertEqual(x, 1) + x = MyTransformer().transform( Tree('integer2', [2])) + self.assertEqual(x, 2) + x = MyTransformer().transform( Tree('hello', [2])) + self.assertEqual(x, 'hello') + + def test_smart_decorator(self): + class OtherClass: + @staticmethod + def ab_staticmethod(a, b): + return (a, b) + + @classmethod + def ab_classmethod(cls, a, b): + assert cls is OtherClass, cls + return (a, b) + + def ab_method(self, a, b): + assert isinstance(self, OtherClass), self + return (a, b) + + @v_args(meta=True) + class OtherTransformer(Transformer): + @staticmethod + def ab_staticmethod(meta, children): + return tuple(children) + + @classmethod + def ab_classmethod(cls, meta, children): + assert cls is OtherTransformer, cls + return tuple(children) + + def ab_method(self, meta, children): + assert isinstance(self, OtherTransformer), self + return tuple(children) + + class CustomCallable: + def __call__(self, *args, **kwargs): + assert isinstance(self, CustomCallable) + return args + + oc_instance = OtherClass() + ot_instance = OtherTransformer() + + def ab_for_partialmethod(self, a, b): + assert isinstance(self, TestCls) + return a, b + + @v_args(inline=True) + class TestCls(Transformer): + @staticmethod + def ab_staticmethod(a, b): + return (a, b) + + @classmethod + def ab_classmethod(cls, a, b): + assert cls is TestCls + return (a, b) + + def ab_method(self, a, b): + assert isinstance(self, TestCls) + return (a, b) + + oc_class_ab_staticmethod = oc_instance.ab_staticmethod + oc_class_ab_classmethod = oc_instance.ab_classmethod + + oc_ab_staticmethod = oc_instance.ab_staticmethod + oc_ab_classmethod = oc_instance.ab_classmethod + oc_ab_method = oc_instance.ab_method + + ot_class_ab_staticmethod = ot_instance.ab_staticmethod + ot_class_ab_classmethod = ot_instance.ab_classmethod + + ot_ab_staticmethod = ot_instance.ab_staticmethod + ot_ab_classmethod = ot_instance.ab_classmethod + ot_ab_method = ot_instance.ab_method + + ab_partialmethod = partialmethod(ab_for_partialmethod, 1) + set_union = set(["a"]).union + static_add = staticmethod(add) + partial_reduce_mul = partial(reduce, mul) + + custom_callable = CustomCallable() + + test_instance = TestCls() + expected = { + "ab_classmethod": ([1, 2], (1, 2)), + "ab_staticmethod": ([1, 2], (1, 2)), + "ab_method": ([1, 2], (1, 2)), + "oc_ab_classmethod": ([1, 2], (1, 2)), + "oc_class_ab_classmethod": ([1, 2], (1, 2)), + + # AFAIK, these two cases are impossible to deal with. `oc_instance.ab_staticmethod` returns an actual + # function object that is impossible to distinguish from a normally defined method. + # (i.e. `staticmethod(f).__get__(?, ?) is f` is True) + # "oc_ab_staticmethod": ([1, 2], (1, 2)), + # "oc_class_ab_staticmethod": ([1, 2], (1, 2)), + + "oc_ab_method": ([1, 2], (1, 2)), + "ot_ab_classmethod": ([1, 2], (1, 2)), + "ot_class_ab_classmethod": ([1, 2], (1, 2)), + + # Same as above + # "ot_ab_staticmethod": ([1, 2], (1, 2)), + # "ot_class_ab_staticmethod": ([1, 2], (1, 2)), + + "ot_ab_method": ([1, 2], (1, 2)), + "ab_partialmethod": ([2], (1, 2)), + "custom_callable": ([1, 2], (1, 2)), + "set_union": ([["b"], ["c"]], {"a", "b", "c"}), + "static_add": ([1, 2], 3), + "partial_reduce_mul": ([[1, 2]], 2), + } + non_static = {"ab_method", "ab_partialmethod"} + for method_name, (children, expected_result) in expected.items(): + not_inline = "ot" in method_name + result = test_instance.transform(Tree(method_name, children)) + self.assertEqual(result, expected_result) + + if not_inline: + result = getattr(test_instance, method_name)(None, children) + else: + result = getattr(test_instance, method_name)(*children) + self.assertEqual(result, expected_result) + + if method_name not in non_static: + if not_inline: + result = getattr(TestCls, method_name)(None, children) + else: + result = getattr(TestCls, method_name)(*children) + self.assertEqual(result, expected_result) + + def test_vargs_set_name(self): + # Test with cached_property if available. That actually uses __set_name__ + prop = getattr(functools, "cached_property", property) + + class T(Transformer): + @v_args(inline=True) + @prop # Not sure why you would ever want to use a property here, but we support it + def test(self): + return lambda a, b: (self, a, b) + + t = T() + self.assertEqual(t.transform(Tree("test", [1, 2])), (t, 1, 2)) + + def test_inline_static(self): + @v_args(inline=True) + class T(Transformer): + @staticmethod + def test(a, b): + return a + b + x = T().transform(Tree('test', ['a', 'b'])) + self.assertEqual(x, 'ab') + + def test_vargs_override(self): + t = Tree('add', [Tree('sub', [Tree('i', ['3']), Tree('f', ['1.1'])]), Tree('i', ['1'])]) + + @v_args(inline=True) + class T(Transformer): + i = int + f = float + sub = lambda self, a, b: a-b + + not_a_method = {'other': 'stuff'} + + @v_args(inline=False) + def add(self, values): + return sum(values) + + res = T().transform(t) + self.assertEqual(res, 2.9) + + def test_partial(self): + + tree = Tree("start", [Tree("a", ["test1"]), Tree("b", ["test2"])]) + + def test(prefix, s, postfix): + return prefix + s.upper() + postfix + + @v_args(inline=True) + class T(Transformer): + a = functools.partial(test, "@", postfix="!") + b = functools.partial(lambda s: s + "!") + + res = T().transform(tree) + assert res.children == ["@TEST1!", "test2!"] + + def test_discard(self): + class MyTransformer(Transformer): + def a(self, args): + return 1 # some code here + + def b(cls, args): + return Discard + + t = Tree('root', [ + Tree('b', []), + Tree('a', []), + Tree('b', []), + Tree('c', []), + Tree('b', []), + ]) + t2 = Tree('root', [1, Tree('c', [])]) + + x = MyTransformer().transform( t ) + self.assertEqual(x, t2) + + def test_transformer_variants(self): + tree = Tree('start', [ + Tree('add', [Token('N', '1'), Token('N', '2'), Token('IGNORE_TOKEN', '4')]), + Tree('add', [Token('N', '3'), Token('N', '4')]), + Tree('ignore_tree', [Token('DO', 'NOT PANIC')]), + ]) + for base in (Transformer, Transformer_InPlace, Transformer_NonRecursive, Transformer_InPlaceRecursive): + class T(base): + def add(self, children): + return sum(children) + + def N(self, token): + return int(token) + + def ignore_tree(self, children): + return Discard + + def IGNORE_TOKEN(self, token): + return Discard + + copied = copy.deepcopy(tree) + result = T().transform(copied) + self.assertEqual(result, Tree('start', [3, 7])) + + def test_merge_transformers(self): + tree = Tree('start', [ + Tree('main', [ + Token("A", '1'), Token("B", '2') + ]), + Tree("module__main", [ + Token("A", "2"), Token("B", "3") + ]) + ]) + + class T1(Transformer): + A = int + B = int + main = sum + start = list + def module__main(self, children): + return sum(children) + + class T2(Transformer): + A = int + B = int + main = sum + start = list + + class T3(Transformer): + def main(self, children): + return sum(children) + + class T4(Transformer): + main = sum + + + t1_res = T1().transform(tree) + composed_res = merge_transformers(T2(), module=T3()).transform(tree) + self.assertEqual(t1_res, composed_res) + + composed_res2 = merge_transformers(T2(), module=T4()).transform(tree) + self.assertEqual(t1_res, composed_res2) + + with self.assertRaises(AttributeError): + merge_transformers(T1(), module=T3()) + +if __name__ == '__main__': + unittest.main() diff --git a/vendor/lark/tox.ini b/vendor/lark/tox.ini new file mode 100644 index 00000000..04840ebc --- /dev/null +++ b/vendor/lark/tox.ini @@ -0,0 +1,16 @@ +[tox] +envlist = py36, py37, py38, py39, pypy, pypy3 +skip_missing_interpreters=true + +[testenv] +whitelist_externals = git +deps = + -rtest-requirements.txt + +# to always force recreation and avoid unexpected side effects +recreate=True + +commands= + git submodule sync -q + git submodule update --init + python -m tests {posargs} diff --git a/vendor/poetry-core/.flake8 b/vendor/poetry-core/.flake8 index f3bacf68..23a4abf3 100644 --- a/vendor/poetry-core/.flake8 +++ b/vendor/poetry-core/.flake8 @@ -1,22 +1,28 @@ [flake8] +min_python_version = 3.7.0 max-line-length = 88 -ignore = E501, E203, W503 -per-file-ignores = __init__.py:F401 -exclude = - .git - __pycache__ - setup.py - build - dist - releases - .venv - .tox - .mypy_cache - .pytest_cache - .vscode - .github - poetry_core/_vendor/ - poetry_core/utils/_compat.py - poetry_core/utils/_typing.py - tests/fixtures/ - tests/masonry/fixtures/ +ban-relative-imports = True +# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy +format-greedy = 1 +inline-quotes = double +enable-extensions = TC, TC1 +type-checking-exempt-modules = typing, typing-extensions +eradicate-whitelist-extend = ^-.*; +extend-ignore = + # E203: Whitespace before ':' (pycqa/pycodestyle#373) + E203, + # E501: Line too long + E501, + # SIM106: Handle error-cases first + SIM106, + # ANN101: Missing type annotation for self in method + ANN101, + # ANN102: Missing type annotation for cls in classmethod + ANN102, +per-file-ignores = + tests/test_*:ANN201 + tests/**/test_*:ANN201 +extend-exclude = + src/poetry/core/_vendor/* + tests/fixtures/* + tests/**/fixtures/* diff --git a/vendor/poetry-core/.github/workflows/code-quality.yaml b/vendor/poetry-core/.github/workflows/code-quality.yaml deleted file mode 100644 index 899cbe46..00000000 --- a/vendor/poetry-core/.github/workflows/code-quality.yaml +++ /dev/null @@ -1,15 +0,0 @@ -name: Code Quality - -on: - pull_request: {} - push: - branches: [master] - -jobs: - pre-commit: - name: Linting - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v1 - - uses: pre-commit/action@v2.0.0 diff --git a/vendor/poetry-core/.github/workflows/downstream.yml b/vendor/poetry-core/.github/workflows/downstream.yml new file mode 100644 index 00000000..43a5d11c --- /dev/null +++ b/vendor/poetry-core/.github/workflows/downstream.yml @@ -0,0 +1,73 @@ +name: Poetry Downstream Tests + +on: + pull_request: {} + push: + branches: [main] + +jobs: + Tests: + name: ${{ matrix.os }} / ${{ matrix.python-version }} + runs-on: "${{ matrix.os }}-latest" + continue-on-error: ${{ matrix.experimental }} + strategy: + matrix: + os: [Ubuntu] + python-version: [3.9] + experimental: [false] + defaults: + run: + shell: bash + steps: + - uses: actions/checkout@v3 + with: + path: poetry-core + + - uses: actions/checkout@v3 + with: + repository: python-poetry/poetry + path: poetry + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + + - name: Get full python version + id: full-python-version + run: echo ::set-output name=version::$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))") + + - name: Install poetry + run: pip install poetry + + - name: Configure poetry + run: poetry config virtualenvs.in-project true + + - name: Set up cache + uses: actions/cache@v3 + id: cache + with: + path: ./poetry/.venv + key: venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('**/poetry.lock') }} + + - name: Ensure cache is healthy + if: steps.cache.outputs.cache-hit == 'true' + working-directory: ./poetry + run: timeout 10s poetry run pip --version >/dev/null 2>&1 || rm -rf .venv + + - name: Update poetry-core version + working-directory: ./poetry + run: | + # workaround 1.1.13 bug not respecting direct origin dep changes + poetry run pip uninstall -y poetry-core + poetry add --lock ../poetry-core + git diff + + - name: Install poetry (downstream) + working-directory: ./poetry + run: poetry install + + # TODO: mark run as success even when this fails and add comment to PR instead + - name: Run poetry test suite + working-directory: ./poetry + run: poetry run pytest diff --git a/vendor/poetry-core/.github/workflows/integration.yml b/vendor/poetry-core/.github/workflows/integration.yml index 6a1c942d..a83ec945 100644 --- a/vendor/poetry-core/.github/workflows/integration.yml +++ b/vendor/poetry-core/.github/workflows/integration.yml @@ -1,25 +1,32 @@ name: Integration -on: [push, pull_request] +on: + pull_request: {} + push: + branches: [main] jobs: Tests: - runs-on: ubuntu-latest + name: ${{ matrix.os }} / ${{ matrix.python-version }} + runs-on: "${{ matrix.os }}-latest" strategy: matrix: - python-version: [3.8] + os: [Ubuntu, MacOS, Windows] + python-version: ["3.7", "3.8", "3.9", "3.10"] + fail-fast: false + defaults: + run: + shell: bash steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} - name: Install tox - shell: bash run: pip install --upgrade tox - name: Execute integration tests - shell: bash run: tox -e integration diff --git a/vendor/poetry-core/.github/workflows/release.yml b/vendor/poetry-core/.github/workflows/release.yml index 8b948c7f..578b6f11 100644 --- a/vendor/poetry-core/.github/workflows/release.yml +++ b/vendor/poetry-core/.github/workflows/release.yml @@ -6,125 +6,47 @@ on: - '*.*.*' jobs: - - Linux: + Release: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Get tag - id: tag - run: | - echo ::set-output name=tag::${GITHUB_REF#refs/tags/} - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: 3.8 - - name: Install and set up Poetry - run: | - curl -fsS -o get-poetry.py https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py - python get-poetry.py -y - - name: Build distributions - run: | - source $HOME/.poetry/env - poetry build -vvv - - name: Upload distribution artifacts - uses: actions/upload-artifact@v1 - with: - name: pendulum-dist - path: dist + - name: Checkout code + uses: actions/checkout@v3 - MacOS: - runs-on: macos-latest + - name: Get tag + id: tag + run: echo ::set-output name=tag::${GITHUB_REF#refs/tags/} - steps: - - uses: actions/checkout@v2 - - name: Get tag - id: tag - run: | - echo ::set-output name=tag::${GITHUB_REF#refs/tags/} - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: 3.8 - - name: Install and set up Poetry - run: | - curl -fsS -o get-poetry.py https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py - python get-poetry.py -y - - name: Build distributions - run: | - source $HOME/.poetry/env - poetry build -vvv - - name: Upload distribution artifacts - uses: actions/upload-artifact@v1 - with: - name: pendulum-dist - path: dist + - name: Set up Python 3.9 + uses: actions/setup-python@v3 + with: + python-version: "3.9" - Windows: - runs-on: windows-latest + - name: Install Poetry + run: | + curl -sSL https://install.python-poetry.org | python - -y - steps: - - uses: actions/checkout@v2 - - name: Get tag - id: tag - shell: bash - run: | - echo ::set-output name=tag::${GITHUB_REF#refs/tags/} - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: 3.8 - - name: Install and setup Poetry - run: | - Invoke-WebRequest https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py -O get-poetry.py - python get-poetry.py -y - - name: Build distributions - run: | - $env:Path += ";$env:Userprofile\.poetry\bin" - poetry build -vvv - - name: Upload distribution artifact - uses: actions/upload-artifact@v1 - with: - name: pendulum-dist - path: dist + - name: Update PATH + run: echo "$HOME/.local/bin" >> $GITHUB_PATH - Release: - needs: [Linux, MacOS, Windows] - runs-on: ubuntu-latest + - name: Build project for distribution + run: poetry build - steps: - - name: Checkout code - uses: actions/checkout@v2 - - name: Get tag - id: tag + - name: Check Version + id: check-version run: | - echo ::set-output name=tag::${GITHUB_REF#refs/tags/} - - name: Download distribution artifact - uses: actions/download-artifact@master + [[ "$(poetry version --short)" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] \ + || echo ::set-output name=prerelease::true + + - name: Create Release + uses: ncipollo/release-action@v1 with: - name: pendulum-dist - path: dist - - name: Install and set up Poetry - run: | - curl -fsS -o get-poetry.py https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py - python get-poetry.py -y - - name: Check distributions - run: | - ls -la dist + artifacts: "dist/*" + token: ${{ secrets.GITHUB_TOKEN }} + draft: false + prerelease: steps.check-version.outputs.prerelease == 'true' + - name: Publish to PyPI env: POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} - run: | - source $HOME/.poetry/env - poetry publish - - name: Create Release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} - with: - tag_name: ${{ steps.tag.outputs.tag }} - release_name: ${{ steps.tag.outputs.tag }} - draft: false - prerelease: false + run: poetry publish diff --git a/vendor/poetry-core/.github/workflows/tests.yml b/vendor/poetry-core/.github/workflows/tests.yml index e1ca9947..91e56e64 100644 --- a/vendor/poetry-core/.github/workflows/tests.yml +++ b/vendor/poetry-core/.github/workflows/tests.yml @@ -3,58 +3,52 @@ name: Tests on: pull_request: {} push: - branches: [master] + branches: [main] jobs: - Tests: + tests: name: ${{ matrix.os }} / ${{ matrix.python-version }} - runs-on: ${{ matrix.os }}-latest + runs-on: "${{ matrix.os }}-latest" strategy: matrix: os: [Ubuntu, MacOS, Windows] - python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9, pypy2, pypy3] - exclude: + python-version: ["3.7", "3.8", "3.9", "3.10"] + include: - os: Ubuntu - python-version: pypy2 - - os: MacOS - python-version: pypy2 - - os: MacOS - python-version: pypy3 - - os: Windows - python-version: pypy2 - - os: Windows - python-version: pypy3 + python-version: pypy-3.8 + fail-fast: false + defaults: + run: + shell: bash steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} - - name: Get full python version + - name: Get full Python version id: full-python-version - shell: bash run: echo ::set-output name=version::$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))") - - name: Install poetry - shell: bash + - name: Bootstrap poetry run: | - curl -fsS -o get-poetry.py https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py - python get-poetry.py -y - echo "$HOME/.poetry/bin" >> $GITHUB_PATH + curl -sSL https://install.python-poetry.org | python - -y + + - name: Update PATH + if: ${{ matrix.os != 'Windows' }} + run: echo "$HOME/.local/bin" >> $GITHUB_PATH + + - name: Update Path for Windows + if: ${{ matrix.os == 'Windows' }} + run: echo "$APPDATA\Python\Scripts" >> $GITHUB_PATH - name: Configure poetry - shell: bash run: poetry config virtualenvs.in-project true - - name: Configure poetry installer - if: matrix.python-version == '2.7' - shell: bash - run: poetry config experimental.new-installer false - - name: Set up cache - uses: actions/cache@v1 + uses: actions/cache@v3 id: cache with: path: .venv @@ -62,13 +56,13 @@ jobs: - name: Ensure cache is healthy if: steps.cache.outputs.cache-hit == 'true' - shell: bash - run: timeout 10s poetry run pip --version >/dev/null 2>&1 || rm -rf .venv + run: timeout 10s poetry run pip --version || rm -rf .venv - name: Install dependencies - shell: bash run: poetry install - name: Run pytest - shell: bash - run: poetry run pytest -q tests + run: poetry run python -m pytest -p no:sugar -q tests/ + + - name: Run mypy + run: poetry run mypy diff --git a/vendor/poetry-core/.pre-commit-config.yaml b/vendor/poetry-core/.pre-commit-config.yaml index 9779be83..1a6cd25c 100644 --- a/vendor/poetry-core/.pre-commit-config.yaml +++ b/vendor/poetry-core/.pre-commit-config.yaml @@ -1,46 +1,87 @@ +exclude: | + (?x)( + ^tests/.*/fixtures/.* + | ^src/poetry/core/_vendor + ) + repos: - - repo: https://github.com/psf/black - rev: 19.10b0 + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 hooks: - - id: black - exclude: ^poetry/core/_vendor + - id: trailing-whitespace + exclude: "vendors/patches/jsonschema.patch" + - id: end-of-file-fixer + - id: debug-statements + - id: check-merge-conflict + - id: check-case-conflict + - id: check-json + - id: check-toml + - id: check-yaml + - id: pretty-format-json + args: + - --autofix + - --no-ensure-ascii + - --no-sort-keys + - id: check-ast + - id: debug-statements + - id: check-docstring-first - - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.3 + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.9.0 hooks: - - id: flake8 - exclude: | - (?x)( - ^poetry/core/utils/_typing.py$ - | ^poetry/core/utils/_compat.py$ - | ^poetry/core/_vendor - ) + - id: python-check-mock-methods + - id: python-use-type-annotations + - id: python-check-blanket-noqa + + - repo: https://github.com/asottile/yesqa + rev: v1.4.0 + hooks: + - id: yesqa + additional_dependencies: &flake8_deps + - flake8-annotations==2.9.0 + - flake8-broken-line==0.5.0 + - flake8-bugbear==22.7.1 + - flake8-comprehensions==3.10.0 + - flake8-eradicate==1.3.0 + - flake8-quotes==3.3.1 + - flake8-simplify==0.19.3 + - flake8-tidy-imports==4.8.0 + - flake8-type-checking==2.1.2 + - flake8-typing-imports==1.12.0 + - flake8-use-fstring==1.4 + - pep8-naming==0.13.1 + + - repo: https://github.com/asottile/pyupgrade + rev: v2.37.3 + hooks: + - id: pyupgrade + args: + - --py37-plus - - repo: https://github.com/pre-commit/mirrors-isort - rev: v5.4.2 + - repo: https://github.com/hadialqattan/pycln + rev: v2.1.1 + hooks: + - id: pycln + args: [--all] + + - repo: https://github.com/pycqa/isort + rev: 5.10.1 hooks: - id: isort - additional_dependencies: [toml] + args: [--add-import, from __future__ import annotations] exclude: | (?x)( ^.*/?setup\.py$ - | ^poetry/core/_vendor + | tests/.*\.pyi$ ) - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + - repo: https://github.com/psf/black + rev: 22.6.0 hooks: - - id: trailing-whitespace - exclude: | - (?x)( - ^tests/.*/fixtures/.* - | ^poetry/core/_vendor - ) - - id: end-of-file-fixer - exclude: | - (?x)( - ^tests/.*/fixtures/.* - | ^poetry/core/_vendor - ) - - id: debug-statements - exclude: ^poetry/core/_vendor + - id: black + + - repo: https://github.com/pycqa/flake8 + rev: 5.0.4 + hooks: + - id: flake8 + additional_dependencies: *flake8_deps diff --git a/vendor/poetry-core/CHANGELOG.md b/vendor/poetry-core/CHANGELOG.md index c1dbea84..a9d283a8 100644 --- a/vendor/poetry-core/CHANGELOG.md +++ b/vendor/poetry-core/CHANGELOG.md @@ -1,50 +1,230 @@ # Change Log -## [1.0.8] - 2022-02-27 +## [1.1.0] - 2022-08-31 + +- No functional changes. + +## [1.1.0rc3] - 2022-08-26 + +### Fixed + +- Fixed an issue where a malformed URL was passed to pip when installing from a git subdirectory ([#451](https://github.com/python-poetry/poetry-core/pull/451)). + +## [1.1.0rc2] - 2022-08-26 + +### Changed +- Enabled setting `version` of `ProjectPackage` to support dynamically setting the project's package version (e.g. from a plugin) ([#447](https://github.com/python-poetry/poetry-core/pull/447)). + +### Fixed + +- Fixed an issue where `authors` property was not detected ([#437](https://github.com/python-poetry/poetry-core/pull/437)). +- Fixed an issue where submodules of git dependencies was not checked out ([#439](https://github.com/python-poetry/poetry-core/pull/439)). +- Fixed an issue with Python constraints from markers ([#448](https://github.com/python-poetry/poetry-core/pull/448)). +- Fixed an issue where the latest version of git dependency was selected instead of the locked one ([#449](https://github.com/python-poetry/poetry-core/pull/449)). + + +## [1.1.0rc1] - 2022-08-17 + +### Changed + +- Replaced Poetry's helper method `canonicalize_name()` by `packaging.utils.canonicalize_name()` ([#418](https://github.com/python-poetry/poetry-core/pull/418)). +- Removed unused code ([#419](https://github.com/python-poetry/poetry-core/pull/419)). + +### Fixed + +- Fixed an issue with markers, that results in incorrectly resolved extra dependencies ([#415](https://github.com/python-poetry/poetry-core/pull/415)). +- Fixed an issue where equal markers had not the same hash ([#417](https://github.com/python-poetry/poetry-core/pull/417)). +- Fixed `allows_any()` for local versions ([#433](https://github.com/python-poetry/poetry-core/pull/433)). +- Fixed special cases of `next_major()`, `next_minor()`, etc. and deprecated ambiguous usage ([#434](https://github.com/python-poetry/poetry-core/pull/434)). +- Fixed an issue with Python constraints from markers ([#436](https://github.com/python-poetry/poetry-core/pull/436)). + + +## [1.1.0b3] - 2022-07-09 + +### Added + +- Added support for valid PEP 517 projects with another build-system than poetry-core as directory dependencies ([#368](https://github.com/python-poetry/poetry-core/pull/368), [#377](https://github.com/python-poetry/poetry-core/pull/377)). +- Added support for yanked files and releases according to PEP 592 ([#400](https://github.com/python-poetry/poetry-core/pull/400)). + +### Changed + +- Relaxed schema validation to allow additional properties ([#369](https://github.com/python-poetry/poetry-core/pull/369)). +- Harmonized string representation of dependencies ([#393](https://github.com/python-poetry/poetry-core/pull/393)). +- Changed wheel name normalization to follow most recent packaging specification ([#394](https://github.com/python-poetry/poetry-core/pull/394)). +- Changed equality check of direct origin dependencies, so that constraints are not considered anymore ([#405](https://github.com/python-poetry/poetry-core/pull/405)). +- Deprecated `Dependency.set_constraint()` and replaced it by a `constraint` property for consistency ([#370](https://github.com/python-poetry/poetry-core/pull/370)). +- Removed `Package.requires_extras` ([#374](https://github.com/python-poetry/poetry-core/pull/374)). +- Improved marker handling ([#380](https://github.com/python-poetry/poetry-core/pull/380), +[#383](https://github.com/python-poetry/poetry-core/pull/383), +[#384](https://github.com/python-poetry/poetry-core/pull/384), +[#390](https://github.com/python-poetry/poetry-core/pull/390), +[#395](https://github.com/python-poetry/poetry-core/pull/395)). + +### Fixed + +- Fixed hash method for `PackageSpecification`, `Package`, `Dependency` and their sub classes ([#370](https://github.com/python-poetry/poetry-core/pull/370)). +- Fixed merging of markers `python_version` and `python_full_version` ([#382](https://github.com/python-poetry/poetry-core/pull/382), [#388](https://github.com/python-poetry/poetry-core/pull/388)). +- Fixed python version normalization ([#385](https://github.com/python-poetry/poetry-core/pull/385), [#407](https://github.com/python-poetry/poetry-core/pull/407)). +- Fixed an issue where version identifiers with a local version segment allowed non local versions ([#396](https://github.com/python-poetry/poetry-core/pull/396)). +- Fixed an issue where version identifiers without a post release segment allowed post releases ([#396](https://github.com/python-poetry/poetry-core/pull/396)). +- Fixed script definitions that didn't work when extras were not explicitly defined ([#404](https://github.com/python-poetry/poetry-core/pull/404)). + + +## [1.1.0b2] - 2022-05-24 + +### Fixed + +- Fixed a regression where `poetry-core` no longer handled improper Python version constraints from package metadata ([#371](https://github.com/python-poetry/poetry-core/pull/371)) +- Fixed missing version bump in `poetry.core.__version__` ([#367](https://github.com/python-poetry/poetry-core/pull/367)) + +### Improvements + +- `poetry-core` generated wheel's now correctly identify `Generator` metadata as `poetry-core` instead of `poetry` ([#367](https://github.com/python-poetry/poetry-core/pull/367)) + + +## [1.1.0b1] - 2022-05-23 + +### Fixed + +- Fixed an issue where canonicalize package names leads to infinite loops ([#328](https://github.com/python-poetry/poetry-core/pull/328)). +- Fixed an issue where versions wasn't correct normalized to PEP-440 ([#344](https://github.com/python-poetry/poetry-core/pull/344)). +- Fixed an issue with union of multi markers if one marker is a subset of the other marker ([#352](https://github.com/python-poetry/poetry-core/pull/352)). +- Fixed an issue with markers which are not in disjunctive normal form (DNF) ([#347](https://github.com/python-poetry/poetry-core/pull/347)). +- Fixed an issue where stub-only partial namespace packages were not recognized as packages ([#221](https://github.com/python-poetry/poetry-core/pull/221)). +- Fixed an issue where PEP-508 url requirements with extras were not parsed correctly ([#345](https://github.com/python-poetry/poetry-core/pull/345)). +- Fixed an issue where PEP-508 strings with wildcard exclusion constraints were incorrectly exported ([#343](https://github.com/python-poetry/poetry-core/pull/343)). +- Allow hidden directories on Windows bare repos ([#341](https://github.com/python-poetry/poetry-core/pull/341)). +- Fixed an issue where dependencies with an epoch are parsed as empty ([#316](https://github.com/python-poetry/poetry-core/pull/316)). +- Fixed an issue where a package consisting of multiple packages wasn't build correctly ([#292](https://github.com/python-poetry/poetry-core/pull/292)). ### Added -- Add hooks according to PEP-660 for editable installs ([#257](https://github.com/python-poetry/poetry-core/pull/257)). +- Added support for handling git urls with subdirectory ([#288](https://github.com/python-poetry/poetry-core/pull/288)). +- Added support for metadata files as described in PEP-658 for PEP-503 "simple" API repositories ([#333](https://github.com/python-poetry/poetry-core/pull/333)). + +### Changed + +- Renamed dependency group of runtime dependencies to from `default` to `main` ([#326](https://github.com/python-poetry/poetry-core/pull/326)). + +### Improvements + +- `poetry-core` is now completely type checked. +- Improved the SemVer constraint parsing ([#327](https://github.com/python-poetry/poetry-core/pull/327)). +- Improved the speed when cloning git repositories ([#290](https://github.com/python-poetry/poetry-core/pull/290)). -## [1.0.7] - 2021-10-04 +## [1.1.0a7] - 2022-03-05 ### Fixed -- Fixed an issue where the wrong `git` executable could be used on Windows. ([#213](https://github.com/python-poetry/poetry-core/pull/213)) -- Fixed an issue where the Python 3.10 classifier was not automatically added. ([#215](https://github.com/python-poetry/poetry-core/pull/215)) +- Fixed an issue when evaluate `in/not in` markers ([#188](https://github.com/python-poetry/poetry-core/pull/188)). +- Fixed an issue when parsing of caret constraint with leading zero ([#201](https://github.com/python-poetry/poetry-core/pull/201)). +- Respect format for explicit included files when finding excluded files ([#228](https://github.com/python-poetry/poetry-core/pull/228)). +- Fixed an issue where only the last location was used when multiple packages should be included ([#108](https://github.com/python-poetry/poetry-core/pull/108)). +- Ensure that package `description` contains no new line ([#219](https://github.com/python-poetry/poetry-core/pull/219)). +- Fixed an issue where all default dependencies were removed instead of just the selected one ([#220](https://github.com/python-poetry/poetry-core/pull/220)). +- Ensure that authors and maintainers are normalized ([#276](https://github.com/python-poetry/poetry-core/pull/276)). + +### Added + +- Add support for most of the guaranteed hashes ([#207](https://github.com/python-poetry/poetry-core/pull/207)). +- Add support to declare multiple README files ([#248](https://github.com/python-poetry/poetry-core/pull/248)). +- Add support for git sub directories ([#192](https://github.com/python-poetry/poetry-core/pull/192)). +- Add hooks according to PEP-660 for editable installs ([#182](https://github.com/python-poetry/poetry-core/pull/182)). +- Add support for version epochs ([#264](https://github.com/python-poetry/poetry-core/pull/264)). + +### Changed + +- Drop python3.6 support ([#263](https://github.com/python-poetry/poetry-core/pull/263)). +- Loose the strictness when parsing version constraint to support invalid use of wildcards, e.g. `>=3.*` ([#186](https://github.com/python-poetry/poetry-core/pull/186)). +- No longer assume a default git branch name ([#192](https://github.com/python-poetry/poetry-core/pull/192)). +- Sort package name in extras to make it reproducible ([#280](https://github.com/python-poetry/poetry-core/pull/280)). + +### Improvements +- Improve marker handling ([#208](https://github.com/python-poetry/poetry-core/pull/208), +[#282](https://github.com/python-poetry/poetry-core/pull/282), +[#283](https://github.com/python-poetry/poetry-core/pull/283), +[#284](https://github.com/python-poetry/poetry-core/pull/284), +[#286](https://github.com/python-poetry/poetry-core/pull/286), +[#291](https://github.com/python-poetry/poetry-core/pull/291), +[#293](https://github.com/python-poetry/poetry-core/pull/293), +[#294](https://github.com/python-poetry/poetry-core/pull/294), +[#297](https://github.com/python-poetry/poetry-core/pull/297)). -## [1.0.6] - 2021-09-21 + +## [1.1.0a6] - 2021-07-30 ### Added -- Added support for more hash types gen generating hashes. ([#207](https://github.com/python-poetry/poetry-core/pull/207)) +- Added support for dependency groups. ([#183](https://github.com/python-poetry/poetry-core/pull/183)) + +## [1.1.0a5] - 2021-05-21 -## [1.0.5] - 2021-09-18 +### Added + +- Added support for script files in addition to standard entry points. ([#40](https://github.com/python-poetry/poetry-core/pull/40)) ### Fixed -- Fixed the copy of `Package` instances which led to file hashes not being available. ([#193](https://github.com/python-poetry/poetry-core/pull/193)) -- Fixed an issue where unsafe parameters could be passed to `git` commands. ([#203](https://github.com/python-poetry/poetry-core/pull/203)) -- Fixed an issue where the wrong `git` executable could be used on Windows. ([#205](https://github.com/python-poetry/poetry-core/pull/205)) +- Fixed an error in the way python markers with a precision >= 3 were handled. ([#178](https://github.com/python-poetry/poetry-core/pull/178)) -## [1.0.4] - 2021-08-19 +## [1.1.0a4] - 2021-04-30 + +### Changed + +- Files in source distributions now have a deterministic time to improve reproducibility. ([#142](https://github.com/python-poetry/poetry-core/pull/142)) + +### Fixed + +- Fixed an error where leading zeros in the local build part of version specifications were discarded. ([#167](https://github.com/python-poetry/poetry-core/pull/167)) +- Fixed the PEP 508 representation of file dependencies. ([#153](https://github.com/python-poetry/poetry-core/pull/153)) +- Fixed the copy of `Package` instances which led to file hashes not being available. ([#159](https://github.com/python-poetry/poetry-core/pull/159)) +- Fixed an error in the parsing of caret requirements with a pre-release lower bound. ([#171](https://github.com/python-poetry/poetry-core/pull/171)) +- Fixed an error where some pre-release versions were not flagged as pre-releases. ([#170](https://github.com/python-poetry/poetry-core/pull/170)) + + +## [1.1.0a3] - 2021-04-09 ### Fixed -- Fixed an error in the way python markers with a precision >= 3 were handled. ([#180](https://github.com/python-poetry/poetry-core/pull/180)) -- Fixed an error in the evaluation of `in/not in` markers ([#189](https://github.com/python-poetry/poetry-core/pull/189)) +- Fixed dependency markers not being properly copied when changing the constraint ([#162](https://github.com/python-poetry/poetry-core/pull/162)). -## [1.0.3] - 2021-04-09 +## [1.1.0a2] - 2021-04-08 ### Fixed -- Fixed an error when handling single-digit Python markers ([#156](https://github.com/python-poetry/poetry-core/pull/156)). -- Fixed dependency markers not being properly copied when changing the constraint ([#163](https://github.com/python-poetry/poetry-core/pull/163)). +- Fixed performance regressions when parsing version constraints ([#152](https://github.com/python-poetry/poetry-core/pull/152)). +- Fixed how local build versions are handled and compared ([#157](https://github.com/python-poetry/poetry-core/pull/157), [#158](https://github.com/python-poetry/poetry-core/pull/158)). +- Fixed errors when parsing some environment markers ([#155](https://github.com/python-poetry/poetry-core/pull/155)). + + +## [1.1.0a1] - 2021-03-30 + +This version is the first to drop support for Python 2.7 and 3.5. + +If you are still using these versions you should update the `requires` property of the `build-system` section +to restrict the version of `poetry-core`: + +```toml +[build-system] +requires = ["poetry-core<1.1.0"] +build-backend = "poetry.core.masonry.api" +``` + +### Changed + +- Dropped support for Python 2.7 and 3.5 ([#131](https://github.com/python-poetry/poetry-core/pull/131)). +- Reorganized imports internally to improve performances ([#131](https://github.com/python-poetry/poetry-core/pull/131)). +- Directory dependencies are now in non-develop mode by default ([#98](https://github.com/python-poetry/poetry-core/pull/98)). +- Improved support for PEP 440 specific versions that do not abide by semantic versioning ([#140](https://github.com/python-poetry/poetry-core/pull/140)). + +### Fixed + +- Fixed path dependencies PEP 508 representation ([#141](https://github.com/python-poetry/poetry-core/pull/141)). ## [1.0.2] - 2021-02-05 @@ -163,7 +343,6 @@ No changes. ## [1.0.0a6] - 2020-04-24 - ### Added - Added support for markers inverse ([#21](https://github.com/python-poetry/core/pull/21)). @@ -186,13 +365,21 @@ No changes. - Fixed support for stub-only packages ([#28](https://github.com/python-poetry/core/pull/28)). -[Unreleased]: https://github.com/python-poetry/poetry-core/compare/1.0.8...1.0 -[1.0.8]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.8 -[1.0.7]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.7 -[1.0.6]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.6 -[1.0.5]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.5 -[1.0.4]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.4 -[1.0.3]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.3 +[Unreleased]: https://github.com/python-poetry/poetry-core/compare/1.1.0...main +[1.1.0]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0 +[1.1.0rc3]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0rc3 +[1.1.0rc2]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0rc2 +[1.1.0rc1]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0rc1 +[1.1.0b3]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0b3 +[1.1.0b2]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0b2 +[1.1.0b1]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0b1 +[1.1.0a7]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0a7 +[1.1.0a6]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0a6 +[1.1.0a5]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0a5 +[1.1.0a4]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0a4 +[1.1.0a3]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0a3 +[1.1.0a2]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0a2 +[1.1.0a1]: https://github.com/python-poetry/poetry-core/releases/tag/1.1.0a1 [1.0.2]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.2 [1.0.1]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.1 [1.0.0]: https://github.com/python-poetry/poetry-core/releases/tag/1.0.0 diff --git a/vendor/poetry-core/Makefile b/vendor/poetry-core/Makefile index 36af0b13..6c0866ab 100644 --- a/vendor/poetry-core/Makefile +++ b/vendor/poetry-core/Makefile @@ -1,26 +1,30 @@ +SHELL := $(shell which bash) -e MAKEFILE_PATH := $(abspath $(lastword $(MAKEFILE_LIST))) ROOT_DIR := $(patsubst %/,%,$(dir $(MAKEFILE_PATH))) VENDOR_SRC := $(ROOT_DIR)/vendors -VENDOR_DIR := $(ROOT_DIR)/poetry/core/_vendor +VENDOR_DIR := $(ROOT_DIR)/src/poetry/core/_vendor +VENDOR_TXT := $(VENDOR_DIR)/vendor.txt POETRY_BIN ?= $(shell which poetry) - .PHONY: vendor/lock -vendor/lock: - # regenerate lock file - @pushd $(VENDOR_SRC) && $(POETRY_BIN) lock +vendor/lock: $(VENDOR_LOCK) + # regenerate lock file + @pushd $(VENDOR_SRC) && $(POETRY_BIN) lock --no-update +.PHONY: vendor/sync +vendor/sync: # regenerate vendor.txt file (exported from lockfile) - @pushd $(VENDOR_SRC) && $(POETRY_BIN) export --without-hashes \ + @pushd $(VENDOR_SRC) && $(POETRY_BIN) export --without-hashes 2> /dev/null \ | egrep -v "(importlib|zipp)" \ - | sort > $(VENDOR_DIR)/vendor.txt - + | sort > $(VENDOR_TXT) -.PHONY: vendor/sync -vendor/sync: | vendor/lock # vendor packages @vendoring sync # strip out *.pyi stubs @find "$(VENDOR_DIR)" -type f -name "*.pyi" -exec rm {} \; + +.PHONY: vendor/update +vendor/update: | vendor/lock vendor/sync + @: diff --git a/vendor/poetry-core/poetry.lock b/vendor/poetry-core/poetry.lock index d79d5ee7..fab06e51 100644 --- a/vendor/poetry-core/poetry.lock +++ b/vendor/poetry-core/poetry.lock @@ -1,22 +1,3 @@ -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "aspy.yaml" -version = "1.3.0" -description = "A few extensions to pyyaml." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -pyyaml = "*" - [[package]] name = "atomicwrites" version = "1.4.0" @@ -27,52 +8,42 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "20.3.0" +version = "21.4.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] - -[[package]] -name = "backports.functools-lru-cache" -version = "1.6.1" -description = "Backport of functools.lru_cache" -category = "dev" -optional = false -python-versions = ">=2.6" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] [[package]] -name = "backports.tempfile" -version = "1.0" -description = "Backport of new features in Python's tempfile module" +name = "build" +version = "0.7.0" +description = "A simple, correct PEP517 package builder" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -"backports.weakref" = "*" +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=0.22", markers = "python_version < \"3.8\""} +packaging = ">=19.0" +pep517 = ">=0.9.1" +tomli = ">=1.0.0" -[[package]] -name = "backports.weakref" -version = "1.0.post1" -description = "Backport of new features in Python's weakref module" -category = "dev" -optional = false -python-versions = "*" +[package.extras] +docs = ["furo (>=2020.11.19b18)", "sphinx (>=3.0,<4.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)"] +test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "toml (>=0.10.0)", "wheel (>=0.36.0)"] +typing = ["importlib-metadata (>=4.6.4)", "mypy (==0.910)", "typing-extensions (>=3.7.4.3)"] +virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "certifi" -version = "2020.12.5" +version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false @@ -80,30 +51,34 @@ python-versions = "*" [[package]] name = "cfgv" -version = "2.0.1" +version = "3.3.1" description = "Validate configuration and produce human readable error messages." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -six = "*" +python-versions = ">=3.6.1" [[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "7.1.2" +version = "8.1.3" description = "Composable command line interface toolkit" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "colorama" @@ -114,195 +89,158 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] -name = "configparser" -version = "4.0.2" -description = "Updated configparser from Python 3.7 for Python 2.6+." -category = "main" +name = "commonmark" +version = "0.9.1" +description = "Python parser for the CommonMark Markdown spec" +category = "dev" optional = false -python-versions = ">=2.6" +python-versions = "*" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2)", "pytest-flake8", "pytest-black-multipy"] - -[[package]] -name = "contextlib2" -version = "0.6.0.post1" -description = "Backports and enhancements for the contextlib module" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] [[package]] name = "coverage" -version = "5.4" +version = "6.4" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_version < \"3.11\" and extra == \"toml\""} [package.extras] -toml = ["toml"] +toml = ["tomli"] [[package]] name = "distlib" -version = "0.3.1" +version = "0.3.4" description = "Distribution utilities" category = "dev" optional = false python-versions = "*" -[[package]] -name = "enum34" -version = "1.1.10" -description = "Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "filelock" -version = "3.0.12" +version = "3.7.0" description = "A platform independent file lock." category = "dev" optional = false -python-versions = "*" - -[[package]] -name = "funcsigs" -version = "1.0.2" -description = "Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "functools32" -version = "3.2.3-2" -description = "Backport of the functools module from Python 3.2.3 for use on 2.7 and PyPy." -category = "main" -optional = false -python-versions = "*" +python-versions = ">=3.7" -[[package]] -name = "futures" -version = "3.3.0" -description = "Backport of the concurrent.futures package from Python 3" -category = "dev" -optional = false -python-versions = ">=2.6, <3" +[package.extras] +docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] +testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] [[package]] name = "identify" -version = "1.5.13" +version = "2.5.0" description = "File identification library for Python" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=3.7" [package.extras] -license = ["editdistance"] +license = ["ukkonen"] [[package]] name = "idna" -version = "2.10" +version = "3.3" description = "Internationalized Domain Names in Applications (IDNA)" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "1.7.0" +version = "4.11.3" description = "Read metadata from Python packages" category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" [package.dependencies] -configparser = {version = ">=3.5", markers = "python_version < \"3\""} -contextlib2 = {version = "*", markers = "python_version < \"3\""} -pathlib2 = {version = "*", markers = "python_version < \"3\""} +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "rst.linker"] -testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" -version = "3.2.1" +version = "5.7.1" description = "Read resources from Python packages" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" [package.dependencies] -contextlib2 = {version = "*", markers = "python_version < \"3\""} -pathlib2 = {version = "*", markers = "python_version < \"3\""} -singledispatch = {version = "*", markers = "python_version < \"3.4\""} -typing = {version = "*", markers = "python_version < \"3.5\""} -zipp = {version = ">=0.4", markers = "python_version < \"3.8\""} +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "rst.linker", "jaraco.packaging"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" [[package]] name = "jsonschema" -version = "3.2.0" +version = "4.5.1" description = "An implementation of JSON Schema validation for Python" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -six = ">=1.11.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] -name = "mock" -version = "3.0.5" -description = "Rolling backport of unittest.mock for all Pythons" +name = "mypy" +version = "0.960" +description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] -funcsigs = {version = ">=1", markers = "python_version < \"3.3\""} -six = "*" +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.10" [package.extras] -build = ["twine", "wheel", "blurb"] -docs = ["sphinx"] -test = ["pytest", "pytest-cov"] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] [[package]] -name = "more-itertools" -version = "5.0.0" -description = "More routines for operating on iterables, beyond itertools" +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." category = "dev" optional = false python-versions = "*" -[package.dependencies] -six = ">=1.0.0,<2.0.0" - -[[package]] -name = "more-itertools" -version = "8.6.0" -description = "More routines for operating on iterables, beyond itertools" -category = "dev" -optional = false -python-versions = ">=3.5" - [[package]] name = "nodeenv" -version = "1.5.0" +version = "1.6.0" description = "Node.js virtual environment builder" category = "dev" optional = false @@ -310,30 +248,18 @@ python-versions = "*" [[package]] name = "packaging" -version = "20.9" +version = "21.3" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] -pyparsing = ">=2.0.2" - -[[package]] -name = "pathlib2" -version = "2.3.5" -description = "Object-oriented filesystem paths" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -scandir = {version = "*", markers = "python_version < \"3.5\""} -six = "*" +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pep517" -version = "0.8.2" +version = "0.12.0" description = "Wrappers to build Python packages using PEP 517 hooks" category = "dev" optional = false @@ -341,179 +267,186 @@ python-versions = "*" [package.dependencies] importlib_metadata = {version = "*", markers = "python_version < \"3.8\""} -toml = "*" +tomli = {version = ">=1.1.0", markers = "python_version >= \"3.6\""} zipp = {version = "*", markers = "python_version < \"3.8\""} +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] + [[package]] name = "pluggy" -version = "0.13.1" +version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "1.21.0" +version = "2.19.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" [package.dependencies] -"aspy.yaml" = "*" cfgv = ">=2.0.0" -futures = {version = "*", markers = "python_version < \"3.2\""} identify = ">=1.0.0" importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -importlib-resources = {version = "*", markers = "python_version < \"3.7\""} nodeenv = ">=0.11.1" -pyyaml = "*" -six = "*" +pyyaml = ">=5.1" toml = "*" -virtualenv = ">=15.2" +virtualenv = ">=20.0.8" [[package]] name = "py" -version = "1.10.0" +version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pygments" +version = "2.12.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" [[package]] name = "pyparsing" -version = "2.4.7" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "dev" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pyrsistent" -version = "0.16.1" +version = "0.18.1" description = "Persistent/Functional/Immutable data structures" category = "dev" optional = false -python-versions = ">=2.7" - -[package.dependencies] -six = "*" +python-versions = ">=3.7" [[package]] name = "pytest" -version = "4.6.11" +version = "7.1.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=3.7" [package.dependencies] -atomicwrites = ">=1.0" -attrs = ">=17.4.0" -colorama = {version = "*", markers = "sys_platform == \"win32\" and python_version != \"3.4\""} -funcsigs = {version = ">=1.0", markers = "python_version < \"3.0\""} +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -more-itertools = [ - {version = ">=4.0.0,<6.0.0", markers = "python_version <= \"2.7\""}, - {version = ">=4.0.0", markers = "python_version > \"2.7\""}, -] +iniconfig = "*" packaging = "*" -pathlib2 = {version = ">=2.2.0", markers = "python_version < \"3.6\""} -pluggy = ">=0.12,<1.0" -py = ">=1.5.0" -six = ">=1.10.0" -wcwidth = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "nose", "requests", "mock"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-cov" -version = "2.11.1" +version = "3.0.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [package.dependencies] -coverage = ">=5.2.1" +coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-mock" -version = "2.0.0" -description = "Thin-wrapper around the mock package for easier use with py.test" +version = "3.7.0" +description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" [package.dependencies] -mock = {version = "*", markers = "python_version < \"3.0\""} -pytest = ">=2.7" +pytest = ">=5.0" [package.extras] -dev = ["pre-commit", "tox"] +dev = ["pre-commit", "tox", "pytest-asyncio"] [[package]] name = "pyyaml" -version = "5.3.1" +version = "6.0" description = "YAML parser and emitter for Python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [[package]] name = "requests" -version = "2.25.1" +version = "2.27.1" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] -name = "scandir" -version = "1.10.0" -description = "scandir, a better directory iterator and faster os.walk()" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "singledispatch" -version = "3.4.0.3" -description = "This library brings functools.singledispatch from Python 3.4 to Python 2.6-3.3." +name = "rich" +version = "12.4.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6.3,<4.0.0" [package.dependencies] -six = "*" +commonmark = ">=0.9.0,<0.10.0" +pygments = ">=2.6.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "six" -version = "1.15.0" +version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" @@ -525,9 +458,17 @@ category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "tox" -version = "3.21.4" +version = "3.25.0" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false @@ -546,32 +487,56 @@ virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2, [package.extras] docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] -testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)", "pathlib2 (>=2.3.3)"] +testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"] [[package]] -name = "typing" -version = "3.7.4.3" -description = "Type Hints for Python" +name = "typed-ast" +version = "1.5.4" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "types-jsonschema" +version = "4.4.4" +description = "Typing stubs for jsonschema" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-setuptools" +version = "57.4.14" +description = "Typing stubs for setuptools" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.3" +version = "1.26.9" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "vendoring" -version = "0.3.3" +version = "1.2.0" description = "A command line tool, to simplify vendoring pure Python dependencies." category = "dev" optional = false @@ -582,344 +547,371 @@ click = "*" jsonschema = "*" packaging = "*" requests = "*" +rich = "*" toml = "*" [package.extras] doc = ["sphinx"] -test = ["pytest", "pytest-xdist", "pytest-cov", "pytest-mock"] +test = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "virtualenv" -version = "20.4.2" +version = "20.14.1" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] -appdirs = ">=1.4.3,<2" distlib = ">=0.3.1,<1" -filelock = ">=3.0.0,<4" +filelock = ">=3.2,<4" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} -pathlib2 = {version = ">=2.3.3,<3", markers = "python_version < \"3.4\" and sys_platform != \"win32\""} +platformdirs = ">=2,<3" six = ">=1.9.0,<2" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -"backports.functools-lru-cache" = {version = ">=1.2.1", markers = "python_version < \"3.2\""} +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] [[package]] name = "zipp" -version = "1.2.0" +version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false -python-versions = ">=2.7" - -[package.dependencies] -contextlib2 = {version = "*", markers = "python_version < \"3.4\""} +python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["pathlib2", "unittest2", "jaraco.itertools", "func-timeout"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" -python-versions = "~2.7 || ^3.5" -content-hash = "73b0c1d12930e6381fb1a47e8c903603ac18a2981ee899f9f675c4ebcbbbe3ff" +python-versions = "^3.7" +content-hash = "942983e12963ee3294081a5f38b6a66034dc7cd350b48a65f21e706a77f160d7" [metadata.files] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -"aspy.yaml" = [ - {file = "aspy.yaml-1.3.0-py2.py3-none-any.whl", hash = "sha256:463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc"}, - {file = "aspy.yaml-1.3.0.tar.gz", hash = "sha256:e7c742382eff2caed61f87a39d13f99109088e5e93f04d76eb8d4b28aa143f45"}, -] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, - {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] -"backports.functools-lru-cache" = [ - {file = "backports.functools_lru_cache-1.6.1-py2.py3-none-any.whl", hash = "sha256:0bada4c2f8a43d533e4ecb7a12214d9420e66eb206d54bf2d682581ca4b80848"}, - {file = "backports.functools_lru_cache-1.6.1.tar.gz", hash = "sha256:8fde5f188da2d593bd5bc0be98d9abc46c95bb8a9dde93429570192ee6cc2d4a"}, -] -"backports.tempfile" = [ - {file = "backports.tempfile-1.0-py2.py3-none-any.whl", hash = "sha256:05aa50940946f05759696156a8c39be118169a0e0f94a49d0bb106503891ff54"}, - {file = "backports.tempfile-1.0.tar.gz", hash = "sha256:1c648c452e8770d759bdc5a5e2431209be70d25484e1be24876cf2168722c762"}, -] -"backports.weakref" = [ - {file = "backports.weakref-1.0.post1-py2.py3-none-any.whl", hash = "sha256:81bc9b51c0abc58edc76aefbbc68c62a787918ffe943a37947e162c3f8e19e82"}, - {file = "backports.weakref-1.0.post1.tar.gz", hash = "sha256:bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2"}, +build = [ + {file = "build-0.7.0-py3-none-any.whl", hash = "sha256:21b7ebbd1b22499c4dac536abc7606696ea4d909fd755e00f09f3c0f2c05e3c8"}, + {file = "build-0.7.0.tar.gz", hash = "sha256:1aaadcd69338252ade4f7ec1265e1a19184bf916d84c9b7df095f423948cb89f"}, ] certifi = [ - {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, - {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] cfgv = [ - {file = "cfgv-2.0.1-py2.py3-none-any.whl", hash = "sha256:fbd93c9ab0a523bf7daec408f3be2ed99a980e20b2d19b50fc184ca6b820d289"}, - {file = "cfgv-2.0.1.tar.gz", hash = "sha256:edb387943b665bf9c434f717bf630fa78aecd53d5900d2e05da6ad6048553144"}, + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] -chardet = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] -configparser = [ - {file = "configparser-4.0.2-py2.py3-none-any.whl", hash = "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c"}, - {file = "configparser-4.0.2.tar.gz", hash = "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df"}, -] -contextlib2 = [ - {file = "contextlib2-0.6.0.post1-py2.py3-none-any.whl", hash = "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b"}, - {file = "contextlib2-0.6.0.post1.tar.gz", hash = "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e"}, +commonmark = [ + {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, + {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, ] coverage = [ - {file = "coverage-5.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6d9c88b787638a451f41f97446a1c9fd416e669b4d9717ae4615bd29de1ac135"}, - {file = "coverage-5.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:66a5aae8233d766a877c5ef293ec5ab9520929c2578fd2069308a98b7374ea8c"}, - {file = "coverage-5.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9754a5c265f991317de2bac0c70a746efc2b695cf4d49f5d2cddeac36544fb44"}, - {file = "coverage-5.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:fbb17c0d0822684b7d6c09915677a32319f16ff1115df5ec05bdcaaee40b35f3"}, - {file = "coverage-5.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:b7f7421841f8db443855d2854e25914a79a1ff48ae92f70d0a5c2f8907ab98c9"}, - {file = "coverage-5.4-cp27-cp27m-win32.whl", hash = "sha256:4a780807e80479f281d47ee4af2eb2df3e4ccf4723484f77da0bb49d027e40a1"}, - {file = "coverage-5.4-cp27-cp27m-win_amd64.whl", hash = "sha256:87c4b38288f71acd2106f5d94f575bc2136ea2887fdb5dfe18003c881fa6b370"}, - {file = "coverage-5.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6809ebcbf6c1049002b9ac09c127ae43929042ec1f1dbd8bb1615f7cd9f70a0"}, - {file = "coverage-5.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ba7ca81b6d60a9f7a0b4b4e175dcc38e8fef4992673d9d6e6879fd6de00dd9b8"}, - {file = "coverage-5.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:89fc12c6371bf963809abc46cced4a01ca4f99cba17be5e7d416ed7ef1245d19"}, - {file = "coverage-5.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8eb7785bd23565b542b01fb39115a975fefb4a82f23d407503eee2c0106247"}, - {file = "coverage-5.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:7e40d3f8eb472c1509b12ac2a7e24158ec352fc8567b77ab02c0db053927e339"}, - {file = "coverage-5.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1ccae21a076d3d5f471700f6d30eb486da1626c380b23c70ae32ab823e453337"}, - {file = "coverage-5.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:755c56beeacac6a24c8e1074f89f34f4373abce8b662470d3aa719ae304931f3"}, - {file = "coverage-5.4-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:322549b880b2d746a7672bf6ff9ed3f895e9c9f108b714e7360292aa5c5d7cf4"}, - {file = "coverage-5.4-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:60a3307a84ec60578accd35d7f0c71a3a971430ed7eca6567399d2b50ef37b8c"}, - {file = "coverage-5.4-cp35-cp35m-win32.whl", hash = "sha256:1375bb8b88cb050a2d4e0da901001347a44302aeadb8ceb4b6e5aa373b8ea68f"}, - {file = "coverage-5.4-cp35-cp35m-win_amd64.whl", hash = "sha256:16baa799ec09cc0dcb43a10680573269d407c159325972dd7114ee7649e56c66"}, - {file = "coverage-5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2f2cf7a42d4b7654c9a67b9d091ec24374f7c58794858bff632a2039cb15984d"}, - {file = "coverage-5.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b62046592b44263fa7570f1117d372ae3f310222af1fc1407416f037fb3af21b"}, - {file = "coverage-5.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:812eaf4939ef2284d29653bcfee9665f11f013724f07258928f849a2306ea9f9"}, - {file = "coverage-5.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:859f0add98707b182b4867359e12bde806b82483fb12a9ae868a77880fc3b7af"}, - {file = "coverage-5.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:04b14e45d6a8e159c9767ae57ecb34563ad93440fc1b26516a89ceb5b33c1ad5"}, - {file = "coverage-5.4-cp36-cp36m-win32.whl", hash = "sha256:ebfa374067af240d079ef97b8064478f3bf71038b78b017eb6ec93ede1b6bcec"}, - {file = "coverage-5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:84df004223fd0550d0ea7a37882e5c889f3c6d45535c639ce9802293b39cd5c9"}, - {file = "coverage-5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1b811662ecf72eb2d08872731636aee6559cae21862c36f74703be727b45df90"}, - {file = "coverage-5.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6b588b5cf51dc0fd1c9e19f622457cc74b7d26fe295432e434525f1c0fae02bc"}, - {file = "coverage-5.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3fe50f1cac369b02d34ad904dfe0771acc483f82a1b54c5e93632916ba847b37"}, - {file = "coverage-5.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:32ab83016c24c5cf3db2943286b85b0a172dae08c58d0f53875235219b676409"}, - {file = "coverage-5.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:68fb816a5dd901c6aff352ce49e2a0ffadacdf9b6fae282a69e7a16a02dad5fb"}, - {file = "coverage-5.4-cp37-cp37m-win32.whl", hash = "sha256:a636160680c6e526b84f85d304e2f0bb4e94f8284dd765a1911de9a40450b10a"}, - {file = "coverage-5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:bb32ca14b4d04e172c541c69eec5f385f9a075b38fb22d765d8b0ce3af3a0c22"}, - {file = "coverage-5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4d7165a4e8f41eca6b990c12ee7f44fef3932fac48ca32cecb3a1b2223c21f"}, - {file = "coverage-5.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a565f48c4aae72d1d3d3f8e8fb7218f5609c964e9c6f68604608e5958b9c60c3"}, - {file = "coverage-5.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fff1f3a586246110f34dc762098b5afd2de88de507559e63553d7da643053786"}, - {file = "coverage-5.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a839e25f07e428a87d17d857d9935dd743130e77ff46524abb992b962eb2076c"}, - {file = "coverage-5.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:6625e52b6f346a283c3d563d1fd8bae8956daafc64bb5bbd2b8f8a07608e3994"}, - {file = "coverage-5.4-cp38-cp38-win32.whl", hash = "sha256:5bee3970617b3d74759b2d2df2f6a327d372f9732f9ccbf03fa591b5f7581e39"}, - {file = "coverage-5.4-cp38-cp38-win_amd64.whl", hash = "sha256:03ed2a641e412e42cc35c244508cf186015c217f0e4d496bf6d7078ebe837ae7"}, - {file = "coverage-5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14a9f1887591684fb59fdba8feef7123a0da2424b0652e1b58dd5b9a7bb1188c"}, - {file = "coverage-5.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9564ac7eb1652c3701ac691ca72934dd3009997c81266807aef924012df2f4b3"}, - {file = "coverage-5.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0f48fc7dc82ee14aeaedb986e175a429d24129b7eada1b7e94a864e4f0644dde"}, - {file = "coverage-5.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:107d327071061fd4f4a2587d14c389a27e4e5c93c7cba5f1f59987181903902f"}, - {file = "coverage-5.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:0cdde51bfcf6b6bd862ee9be324521ec619b20590787d1655d005c3fb175005f"}, - {file = "coverage-5.4-cp39-cp39-win32.whl", hash = "sha256:c67734cff78383a1f23ceba3b3239c7deefc62ac2b05fa6a47bcd565771e5880"}, - {file = "coverage-5.4-cp39-cp39-win_amd64.whl", hash = "sha256:c669b440ce46ae3abe9b2d44a913b5fd86bb19eb14a8701e88e3918902ecd345"}, - {file = "coverage-5.4-pp36-none-any.whl", hash = "sha256:c0ff1c1b4d13e2240821ef23c1efb1f009207cb3f56e16986f713c2b0e7cd37f"}, - {file = "coverage-5.4-pp37-none-any.whl", hash = "sha256:cd601187476c6bed26a0398353212684c427e10a903aeafa6da40c63309d438b"}, - {file = "coverage-5.4.tar.gz", hash = "sha256:6d2e262e5e8da6fa56e774fb8e2643417351427604c2b177f8e8c5f75fc928ca"}, + {file = "coverage-6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50ed480b798febce113709846b11f5d5ed1e529c88d8ae92f707806c50297abf"}, + {file = "coverage-6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26f8f92699756cb7af2b30720de0c5bb8d028e923a95b6d0c891088025a1ac8f"}, + {file = "coverage-6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60c2147921da7f4d2d04f570e1838db32b95c5509d248f3fe6417e91437eaf41"}, + {file = "coverage-6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:750e13834b597eeb8ae6e72aa58d1d831b96beec5ad1d04479ae3772373a8088"}, + {file = "coverage-6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af5b9ee0fc146e907aa0f5fb858c3b3da9199d78b7bb2c9973d95550bd40f701"}, + {file = "coverage-6.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a022394996419142b33a0cf7274cb444c01d2bb123727c4bb0b9acabcb515dea"}, + {file = "coverage-6.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5a78cf2c43b13aa6b56003707c5203f28585944c277c1f3f109c7b041b16bd39"}, + {file = "coverage-6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9229d074e097f21dfe0643d9d0140ee7433814b3f0fc3706b4abffd1e3038632"}, + {file = "coverage-6.4-cp310-cp310-win32.whl", hash = "sha256:fb45fe08e1abc64eb836d187b20a59172053999823f7f6ef4f18a819c44ba16f"}, + {file = "coverage-6.4-cp310-cp310-win_amd64.whl", hash = "sha256:3cfd07c5889ddb96a401449109a8b97a165be9d67077df6802f59708bfb07720"}, + {file = "coverage-6.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:03014a74023abaf5a591eeeaf1ac66a73d54eba178ff4cb1fa0c0a44aae70383"}, + {file = "coverage-6.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c82f2cd69c71698152e943f4a5a6b83a3ab1db73b88f6e769fabc86074c3b08"}, + {file = "coverage-6.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b546cf2b1974ddc2cb222a109b37c6ed1778b9be7e6b0c0bc0cf0438d9e45a6"}, + {file = "coverage-6.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc173f1ce9ffb16b299f51c9ce53f66a62f4d975abe5640e976904066f3c835d"}, + {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c53ad261dfc8695062fc8811ac7c162bd6096a05a19f26097f411bdf5747aee7"}, + {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:eef5292b60b6de753d6e7f2d128d5841c7915fb1e3321c3a1fe6acfe76c38052"}, + {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:543e172ce4c0de533fa892034cce260467b213c0ea8e39da2f65f9a477425211"}, + {file = "coverage-6.4-cp37-cp37m-win32.whl", hash = "sha256:00c8544510f3c98476bbd58201ac2b150ffbcce46a8c3e4fb89ebf01998f806a"}, + {file = "coverage-6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:b84ab65444dcc68d761e95d4d70f3cfd347ceca5a029f2ffec37d4f124f61311"}, + {file = "coverage-6.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d548edacbf16a8276af13063a2b0669d58bbcfca7c55a255f84aac2870786a61"}, + {file = "coverage-6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:033ebec282793bd9eb988d0271c211e58442c31077976c19c442e24d827d356f"}, + {file = "coverage-6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:742fb8b43835078dd7496c3c25a1ec8d15351df49fb0037bffb4754291ef30ce"}, + {file = "coverage-6.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55fae115ef9f67934e9f1103c9ba826b4c690e4c5bcf94482b8b2398311bf9c"}, + {file = "coverage-6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd698341626f3c77784858427bad0cdd54a713115b423d22ac83a28303d1d95"}, + {file = "coverage-6.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:62d382f7d77eeeaff14b30516b17bcbe80f645f5cf02bb755baac376591c653c"}, + {file = "coverage-6.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:016d7f5cf1c8c84f533a3c1f8f36126fbe00b2ec0ccca47cc5731c3723d327c6"}, + {file = "coverage-6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:69432946f154c6add0e9ede03cc43b96e2ef2733110a77444823c053b1ff5166"}, + {file = "coverage-6.4-cp38-cp38-win32.whl", hash = "sha256:83bd142cdec5e4a5c4ca1d4ff6fa807d28460f9db919f9f6a31babaaa8b88426"}, + {file = "coverage-6.4-cp38-cp38-win_amd64.whl", hash = "sha256:4002f9e8c1f286e986fe96ec58742b93484195defc01d5cc7809b8f7acb5ece3"}, + {file = "coverage-6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4f52c272fdc82e7c65ff3f17a7179bc5f710ebc8ce8a5cadac81215e8326740"}, + {file = "coverage-6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5578efe4038be02d76c344007b13119b2b20acd009a88dde8adec2de4f630b5"}, + {file = "coverage-6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8099ea680201c2221f8468c372198ceba9338a5fec0e940111962b03b3f716a"}, + {file = "coverage-6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a00441f5ea4504f5abbc047589d09e0dc33eb447dc45a1a527c8b74bfdd32c65"}, + {file = "coverage-6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e76bd16f0e31bc2b07e0fb1379551fcd40daf8cdf7e24f31a29e442878a827c"}, + {file = "coverage-6.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8d2e80dd3438e93b19e1223a9850fa65425e77f2607a364b6fd134fcd52dc9df"}, + {file = "coverage-6.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:341e9c2008c481c5c72d0e0dbf64980a4b2238631a7f9780b0fe2e95755fb018"}, + {file = "coverage-6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:21e6686a95025927775ac501e74f5940cdf6fe052292f3a3f7349b0abae6d00f"}, + {file = "coverage-6.4-cp39-cp39-win32.whl", hash = "sha256:968ed5407f9460bd5a591cefd1388cc00a8f5099de9e76234655ae48cfdbe2c3"}, + {file = "coverage-6.4-cp39-cp39-win_amd64.whl", hash = "sha256:e35217031e4b534b09f9b9a5841b9344a30a6357627761d4218818b865d45055"}, + {file = "coverage-6.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:e637ae0b7b481905358624ef2e81d7fb0b1af55f5ff99f9ba05442a444b11e45"}, + {file = "coverage-6.4.tar.gz", hash = "sha256:727dafd7f67a6e1cad808dc884bd9c5a2f6ef1f8f6d2f22b37b96cb0080d4f49"}, ] distlib = [ - {file = "distlib-0.3.1-py2.py3-none-any.whl", hash = "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb"}, - {file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"}, -] -enum34 = [ - {file = "enum34-1.1.10-py2-none-any.whl", hash = "sha256:a98a201d6de3f2ab3db284e70a33b0f896fbf35f8086594e8c9e74b909058d53"}, - {file = "enum34-1.1.10-py3-none-any.whl", hash = "sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328"}, - {file = "enum34-1.1.10.tar.gz", hash = "sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248"}, + {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, + {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, ] filelock = [ - {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, - {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, -] -funcsigs = [ - {file = "funcsigs-1.0.2-py2.py3-none-any.whl", hash = "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca"}, - {file = "funcsigs-1.0.2.tar.gz", hash = "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"}, -] -functools32 = [ - {file = "functools32-3.2.3-2.tar.gz", hash = "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"}, - {file = "functools32-3.2.3-2.zip", hash = "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0"}, -] -futures = [ - {file = "futures-3.3.0-py2-none-any.whl", hash = "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16"}, - {file = "futures-3.3.0.tar.gz", hash = "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"}, + {file = "filelock-3.7.0-py3-none-any.whl", hash = "sha256:c7b5fdb219b398a5b28c8e4c1893ef5f98ece6a38c6ab2c22e26ec161556fed6"}, + {file = "filelock-3.7.0.tar.gz", hash = "sha256:b795f1b42a61bbf8ec7113c341dad679d772567b936fbd1bf43c9a238e673e20"}, ] identify = [ - {file = "identify-1.5.13-py2.py3-none-any.whl", hash = "sha256:9dfb63a2e871b807e3ba62f029813552a24b5289504f5b071dea9b041aee9fe4"}, - {file = "identify-1.5.13.tar.gz", hash = "sha256:70b638cf4743f33042bebb3b51e25261a0a10e80f978739f17e7fd4837664a66"}, + {file = "identify-2.5.0-py2.py3-none-any.whl", hash = "sha256:3acfe15a96e4272b4ec5662ee3e231ceba976ef63fd9980ed2ce9cc415df393f"}, + {file = "identify-2.5.0.tar.gz", hash = "sha256:c83af514ea50bf2be2c4a3f2fb349442b59dc87284558ae9ff54191bff3541d2"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ - {file = "importlib_metadata-1.7.0-py2.py3-none-any.whl", hash = "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"}, - {file = "importlib_metadata-1.7.0.tar.gz", hash = "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83"}, + {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"}, + {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"}, ] importlib-resources = [ - {file = "importlib_resources-3.2.1-py2.py3-none-any.whl", hash = "sha256:e2860cf0c4bc999947228d18be154fa3779c5dde0b882bd2d7b3f4d25e698bd6"}, - {file = "importlib_resources-3.2.1.tar.gz", hash = "sha256:a9fe213ab6452708ec1b3f4ec6f2881b8ab3645cb4e5efb7fea2bbf05a91db3b"}, -] -jsonschema = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, + {file = "importlib_resources-5.7.1-py3-none-any.whl", hash = "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8"}, + {file = "importlib_resources-5.7.1.tar.gz", hash = "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3"}, ] -mock = [ - {file = "mock-3.0.5-py2.py3-none-any.whl", hash = "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"}, - {file = "mock-3.0.5.tar.gz", hash = "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3"}, +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] -more-itertools = [ - {file = "more-itertools-5.0.0.tar.gz", hash = "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4"}, - {file = "more_itertools-5.0.0-py2-none-any.whl", hash = "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc"}, - {file = "more_itertools-5.0.0-py3-none-any.whl", hash = "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"}, - {file = "more-itertools-8.6.0.tar.gz", hash = "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf"}, - {file = "more_itertools-8.6.0-py3-none-any.whl", hash = "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330"}, +jsonschema = [ + {file = "jsonschema-4.5.1-py3-none-any.whl", hash = "sha256:71b5e39324422543546572954ce71c67728922c104902cb7ce252e522235b33f"}, + {file = "jsonschema-4.5.1.tar.gz", hash = "sha256:7c6d882619340c3347a1bf7315e147e6d3dae439033ae6383d6acb908c101dfc"}, +] +mypy = [ + {file = "mypy-0.960-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3a3e525cd76c2c4f90f1449fd034ba21fcca68050ff7c8397bb7dd25dd8b8248"}, + {file = "mypy-0.960-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a76dc4f91e92db119b1be293892df8379b08fd31795bb44e0ff84256d34c251"}, + {file = "mypy-0.960-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffdad80a92c100d1b0fe3d3cf1a4724136029a29afe8566404c0146747114382"}, + {file = "mypy-0.960-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7d390248ec07fa344b9f365e6ed9d205bd0205e485c555bed37c4235c868e9d5"}, + {file = "mypy-0.960-cp310-cp310-win_amd64.whl", hash = "sha256:925aa84369a07846b7f3b8556ccade1f371aa554f2bd4fb31cb97a24b73b036e"}, + {file = "mypy-0.960-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:239d6b2242d6c7f5822163ee082ef7a28ee02e7ac86c35593ef923796826a385"}, + {file = "mypy-0.960-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f1ba54d440d4feee49d8768ea952137316d454b15301c44403db3f2cb51af024"}, + {file = "mypy-0.960-cp36-cp36m-win_amd64.whl", hash = "sha256:cb7752b24528c118a7403ee955b6a578bfcf5879d5ee91790667c8ea511d2085"}, + {file = "mypy-0.960-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:826a2917c275e2ee05b7c7b736c1e6549a35b7ea5a198ca457f8c2ebea2cbecf"}, + {file = "mypy-0.960-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3eabcbd2525f295da322dff8175258f3fc4c3eb53f6d1929644ef4d99b92e72d"}, + {file = "mypy-0.960-cp37-cp37m-win_amd64.whl", hash = "sha256:f47322796c412271f5aea48381a528a613f33e0a115452d03ae35d673e6064f8"}, + {file = "mypy-0.960-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2c7f8bb9619290836a4e167e2ef1f2cf14d70e0bc36c04441e41487456561409"}, + {file = "mypy-0.960-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbfb873cf2b8d8c3c513367febde932e061a5f73f762896826ba06391d932b2a"}, + {file = "mypy-0.960-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc537885891382e08129d9862553b3d00d4be3eb15b8cae9e2466452f52b0117"}, + {file = "mypy-0.960-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:481f98c6b24383188c928f33dd2f0776690807e12e9989dd0419edd5c74aa53b"}, + {file = "mypy-0.960-cp38-cp38-win_amd64.whl", hash = "sha256:29dc94d9215c3eb80ac3c2ad29d0c22628accfb060348fd23d73abe3ace6c10d"}, + {file = "mypy-0.960-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:33d53a232bb79057f33332dbbb6393e68acbcb776d2f571ba4b1d50a2c8ba873"}, + {file = "mypy-0.960-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d645e9e7f7a5da3ec3bbcc314ebb9bb22c7ce39e70367830eb3c08d0140b9ce"}, + {file = "mypy-0.960-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:85cf2b14d32b61db24ade8ac9ae7691bdfc572a403e3cb8537da936e74713275"}, + {file = "mypy-0.960-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a85a20b43fa69efc0b955eba1db435e2ffecb1ca695fe359768e0503b91ea89f"}, + {file = "mypy-0.960-cp39-cp39-win_amd64.whl", hash = "sha256:0ebfb3f414204b98c06791af37a3a96772203da60636e2897408517fcfeee7a8"}, + {file = "mypy-0.960-py3-none-any.whl", hash = "sha256:bfd4f6536bd384c27c392a8b8f790fd0ed5c0cf2f63fc2fed7bce56751d53026"}, + {file = "mypy-0.960.tar.gz", hash = "sha256:d4fccf04c1acf750babd74252e0f2db6bd2ac3aa8fe960797d9f3ef41cf2bfd4"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] nodeenv = [ - {file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"}, - {file = "nodeenv-1.5.0.tar.gz", hash = "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"}, + {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, + {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, ] packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, -] -pathlib2 = [ - {file = "pathlib2-2.3.5-py2.py3-none-any.whl", hash = "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db"}, - {file = "pathlib2-2.3.5.tar.gz", hash = "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pep517 = [ - {file = "pep517-0.8.2-py2.py3-none-any.whl", hash = "sha256:576c480be81f3e1a70a16182c762311eb80d1f8a7b0d11971e5234967d7a342c"}, - {file = "pep517-0.8.2.tar.gz", hash = "sha256:8e6199cf1288d48a0c44057f112acf18aa5ebabbf73faa242f598fbe145ba29e"}, + {file = "pep517-0.12.0-py2.py3-none-any.whl", hash = "sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161"}, + {file = "pep517-0.12.0.tar.gz", hash = "sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0"}, +] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] pre-commit = [ - {file = "pre_commit-1.21.0-py2.py3-none-any.whl", hash = "sha256:f92a359477f3252452ae2e8d3029de77aec59415c16ae4189bcfba40b757e029"}, - {file = "pre_commit-1.21.0.tar.gz", hash = "sha256:8f48d8637bdae6fa70cc97db9c1dd5aa7c5c8bf71968932a380628c25978b850"}, + {file = "pre_commit-2.19.0-py2.py3-none-any.whl", hash = "sha256:10c62741aa5704faea2ad69cb550ca78082efe5697d6f04e5710c3c229afdd10"}, + {file = "pre_commit-2.19.0.tar.gz", hash = "sha256:4233a1e38621c87d9dda9808c6606d7e7ba0e087cd56d3fe03202a01d2919615"}, ] py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pygments = [ + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pyrsistent = [ - {file = "pyrsistent-0.16.1.tar.gz", hash = "sha256:aa2ae1c2e496f4d6777f869ea5de7166a8ccb9c2e06ebcf6c7ff1b670c98c5ef"}, + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, ] pytest = [ - {file = "pytest-4.6.11-py2.py3-none-any.whl", hash = "sha256:a00a7d79cbbdfa9d21e7d0298392a8dd4123316bfac545075e6f8f24c94d8c97"}, - {file = "pytest-4.6.11.tar.gz", hash = "sha256:50fa82392f2120cc3ec2ca0a75ee615be4c479e66669789771f1758332be4353"}, + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, ] pytest-cov = [ - {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, - {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] pytest-mock = [ - {file = "pytest-mock-2.0.0.tar.gz", hash = "sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f"}, - {file = "pytest_mock-2.0.0-py2.py3-none-any.whl", hash = "sha256:cb67402d87d5f53c579263d37971a164743dc33c159dfb4fb4a86f37c5552307"}, + {file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"}, + {file = "pytest_mock-3.7.0-py3-none-any.whl", hash = "sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231"}, ] pyyaml = [ - {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, - {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, - {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, - {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, - {file = "PyYAML-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a"}, - {file = "PyYAML-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e"}, - {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, -] -scandir = [ - {file = "scandir-1.10.0-cp27-cp27m-win32.whl", hash = "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188"}, - {file = "scandir-1.10.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"}, - {file = "scandir-1.10.0-cp34-cp34m-win32.whl", hash = "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f"}, - {file = "scandir-1.10.0-cp34-cp34m-win_amd64.whl", hash = "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e"}, - {file = "scandir-1.10.0-cp35-cp35m-win32.whl", hash = "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f"}, - {file = "scandir-1.10.0-cp35-cp35m-win_amd64.whl", hash = "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32"}, - {file = "scandir-1.10.0-cp36-cp36m-win32.whl", hash = "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022"}, - {file = "scandir-1.10.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4"}, - {file = "scandir-1.10.0-cp37-cp37m-win32.whl", hash = "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173"}, - {file = "scandir-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d"}, - {file = "scandir-1.10.0.tar.gz", hash = "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae"}, -] -singledispatch = [ - {file = "singledispatch-3.4.0.3-py2.py3-none-any.whl", hash = "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"}, - {file = "singledispatch-3.4.0.3.tar.gz", hash = "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c"}, + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +rich = [ + {file = "rich-12.4.1-py3-none-any.whl", hash = "sha256:d13c6c90c42e24eb7ce660db397e8c398edd58acb7f92a2a88a95572b838aaa4"}, + {file = "rich-12.4.1.tar.gz", hash = "sha256:d239001c0fb7de985e21ec9a4bb542b5150350330bbc1849f835b9cbc8923b91"}, ] six = [ - {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, - {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -tox = [ - {file = "tox-3.21.4-py2.py3-none-any.whl", hash = "sha256:65d0e90ceb816638a50d64f4b47b11da767b284c0addda2294cb3cd69bd72425"}, - {file = "tox-3.21.4.tar.gz", hash = "sha256:cf7fef81a3a2434df4d7af2a6d1bf606d2970220addfbe7dea2615bd4bb2c252"}, +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -typing = [ - {file = "typing-3.7.4.3-py2-none-any.whl", hash = "sha256:283d868f5071ab9ad873e5e52268d611e851c870a2ba354193026f2dfb29d8b5"}, - {file = "typing-3.7.4.3.tar.gz", hash = "sha256:1187fb9c82fd670d10aa07bbb6cfcfe4bdda42d6fab8d5134f04e8c4d0b71cc9"}, +tox = [ + {file = "tox-3.25.0-py2.py3-none-any.whl", hash = "sha256:0805727eb4d6b049de304977dfc9ce315a1938e6619c3ab9f38682bb04662a5a"}, + {file = "tox-3.25.0.tar.gz", hash = "sha256:37888f3092aa4e9f835fc8cc6dadbaaa0782651c41ef359e3a5743fcb0308160"}, +] +typed-ast = [ + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +] +types-jsonschema = [ + {file = "types-jsonschema-4.4.4.tar.gz", hash = "sha256:d03f0c1a97ff06dda9535dfa51916a98f38bf40d6828ef4d93bc40708effe507"}, + {file = "types_jsonschema-4.4.4-py3-none-any.whl", hash = "sha256:294d2de9ea3564fbec6c56153e84d1f3f7d9b2ada36e183d88a63c126da7bc3d"}, +] +types-setuptools = [ + {file = "types-setuptools-57.4.14.tar.gz", hash = "sha256:df02fe1dd244f58cf4e67cfc3d0a97930a2d61a72dd89f21d81c71017cd83f9a"}, + {file = "types_setuptools-57.4.14-py3-none-any.whl", hash = "sha256:828f7e7e51e157876f47c80518b23ba0c3c36aa8081efd39d5d39f393938aec9"}, +] +typing-extensions = [ + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] urllib3 = [ - {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, - {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, ] vendoring = [ - {file = "vendoring-0.3.3-py2.py3-none-any.whl", hash = "sha256:2b91c302116320f903fdb5e60c2b0805d807e2b87425ab0c86624028aa5ffa57"}, - {file = "vendoring-0.3.3.tar.gz", hash = "sha256:2bbfc0c8da2863f4638c854b91e80c5d0ca57db62fb979d4b0f52088eeab1162"}, + {file = "vendoring-1.2.0-py2.py3-none-any.whl", hash = "sha256:35b5fca683264e69e851a7580bb6a6f9848af024ffc8382ed5491bcfa55750c6"}, + {file = "vendoring-1.2.0.tar.gz", hash = "sha256:6340a84bf542222c96f22ebc3cb87e4d86932dc04bc8d446e38285594702c00e"}, ] virtualenv = [ - {file = "virtualenv-20.4.2-py2.py3-none-any.whl", hash = "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3"}, - {file = "virtualenv-20.4.2.tar.gz", hash = "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, + {file = "virtualenv-20.14.1-py2.py3-none-any.whl", hash = "sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a"}, + {file = "virtualenv-20.14.1.tar.gz", hash = "sha256:ef589a79795589aada0c1c5b319486797c03b67ac3984c48c669c0e4f50df3a5"}, ] zipp = [ - {file = "zipp-1.2.0-py2.py3-none-any.whl", hash = "sha256:e0d9e63797e483a30d27e09fffd308c59a700d365ec34e93cc100844168bf921"}, - {file = "zipp-1.2.0.tar.gz", hash = "sha256:c70410551488251b0fee67b460fb9a536af8d6f9f008ad10ac51f615b6a521b1"}, + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] diff --git a/vendor/poetry-core/poetry/__init__.py b/vendor/poetry-core/poetry/__init__.py deleted file mode 100644 index 26cfe405..00000000 --- a/vendor/poetry-core/poetry/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from pkgutil import extend_path - - -__path__ = extend_path(__path__, __name__) diff --git a/vendor/poetry-core/poetry/core/__init__.py b/vendor/poetry-core/poetry/core/__init__.py deleted file mode 100644 index 99b56004..00000000 --- a/vendor/poetry-core/poetry/core/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -import sys - - -try: - from pathlib import Path -except ImportError: - # noinspection PyUnresolvedReferences - from pathlib2 import Path - -__version__ = "1.0.8" - -if not getattr(sys, "oxidized", False): - __vendor_site__ = (Path(__file__).parent / "_vendor").as_posix() - - if __vendor_site__ not in sys.path: - sys.path.insert(0, __vendor_site__) diff --git a/vendor/poetry-core/poetry/core/_vendor/_pyrsistent_version.py b/vendor/poetry-core/poetry/core/_vendor/_pyrsistent_version.py deleted file mode 100644 index 9513287c..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/_pyrsistent_version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '0.16.1' diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/__init__.py b/vendor/poetry-core/poetry/core/_vendor/attr/__init__.py deleted file mode 100644 index bf329cad..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/__init__.py +++ /dev/null @@ -1,76 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import sys - -from functools import partial - -from . import converters, exceptions, filters, setters, validators -from ._config import get_run_validators, set_run_validators -from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types -from ._make import ( - NOTHING, - Attribute, - Factory, - attrib, - attrs, - fields, - fields_dict, - make_class, - validate, -) -from ._version_info import VersionInfo - - -__version__ = "20.3.0" -__version_info__ = VersionInfo._from_version_string(__version__) - -__title__ = "attrs" -__description__ = "Classes Without Boilerplate" -__url__ = "https://www.attrs.org/" -__uri__ = __url__ -__doc__ = __description__ + " <" + __uri__ + ">" - -__author__ = "Hynek Schlawack" -__email__ = "hs@ox.cx" - -__license__ = "MIT" -__copyright__ = "Copyright (c) 2015 Hynek Schlawack" - - -s = attributes = attrs -ib = attr = attrib -dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) - -__all__ = [ - "Attribute", - "Factory", - "NOTHING", - "asdict", - "assoc", - "astuple", - "attr", - "attrib", - "attributes", - "attrs", - "converters", - "evolve", - "exceptions", - "fields", - "fields_dict", - "filters", - "get_run_validators", - "has", - "ib", - "make_class", - "resolve_types", - "s", - "set_run_validators", - "setters", - "validate", - "validators", -] - -if sys.version_info[:2] >= (3, 6): - from ._next_gen import define, field, frozen, mutable - - __all__.extend((define, field, frozen, mutable)) diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/_compat.py b/vendor/poetry-core/poetry/core/_vendor/attr/_compat.py deleted file mode 100644 index b0ead6e1..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/_compat.py +++ /dev/null @@ -1,231 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import platform -import sys -import types -import warnings - - -PY2 = sys.version_info[0] == 2 -PYPY = platform.python_implementation() == "PyPy" - - -if PYPY or sys.version_info[:2] >= (3, 6): - ordered_dict = dict -else: - from collections import OrderedDict - - ordered_dict = OrderedDict - - -if PY2: - from collections import Mapping, Sequence - - from UserDict import IterableUserDict - - # We 'bundle' isclass instead of using inspect as importing inspect is - # fairly expensive (order of 10-15 ms for a modern machine in 2016) - def isclass(klass): - return isinstance(klass, (type, types.ClassType)) - - # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. - TYPE = "type" - - def iteritems(d): - return d.iteritems() - - # Python 2 is bereft of a read-only dict proxy, so we make one! - class ReadOnlyDict(IterableUserDict): - """ - Best-effort read-only dict wrapper. - """ - - def __setitem__(self, key, val): - # We gently pretend we're a Python 3 mappingproxy. - raise TypeError( - "'mappingproxy' object does not support item assignment" - ) - - def update(self, _): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'update'" - ) - - def __delitem__(self, _): - # We gently pretend we're a Python 3 mappingproxy. - raise TypeError( - "'mappingproxy' object does not support item deletion" - ) - - def clear(self): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'clear'" - ) - - def pop(self, key, default=None): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'pop'" - ) - - def popitem(self): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'popitem'" - ) - - def setdefault(self, key, default=None): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'setdefault'" - ) - - def __repr__(self): - # Override to be identical to the Python 3 version. - return "mappingproxy(" + repr(self.data) + ")" - - def metadata_proxy(d): - res = ReadOnlyDict() - res.data.update(d) # We blocked update, so we have to do it like this. - return res - - def just_warn(*args, **kw): # pragma: no cover - """ - We only warn on Python 3 because we are not aware of any concrete - consequences of not setting the cell on Python 2. - """ - - -else: # Python 3 and later. - from collections.abc import Mapping, Sequence # noqa - - def just_warn(*args, **kw): - """ - We only warn on Python 3 because we are not aware of any concrete - consequences of not setting the cell on Python 2. - """ - warnings.warn( - "Running interpreter doesn't sufficiently support code object " - "introspection. Some features like bare super() or accessing " - "__class__ will not work with slotted classes.", - RuntimeWarning, - stacklevel=2, - ) - - def isclass(klass): - return isinstance(klass, type) - - TYPE = "class" - - def iteritems(d): - return d.items() - - def metadata_proxy(d): - return types.MappingProxyType(dict(d)) - - -def make_set_closure_cell(): - """Return a function of two arguments (cell, value) which sets - the value stored in the closure cell `cell` to `value`. - """ - # pypy makes this easy. (It also supports the logic below, but - # why not do the easy/fast thing?) - if PYPY: - - def set_closure_cell(cell, value): - cell.__setstate__((value,)) - - return set_closure_cell - - # Otherwise gotta do it the hard way. - - # Create a function that will set its first cellvar to `value`. - def set_first_cellvar_to(value): - x = value - return - - # This function will be eliminated as dead code, but - # not before its reference to `x` forces `x` to be - # represented as a closure cell rather than a local. - def force_x_to_be_a_cell(): # pragma: no cover - return x - - try: - # Extract the code object and make sure our assumptions about - # the closure behavior are correct. - if PY2: - co = set_first_cellvar_to.func_code - else: - co = set_first_cellvar_to.__code__ - if co.co_cellvars != ("x",) or co.co_freevars != (): - raise AssertionError # pragma: no cover - - # Convert this code object to a code object that sets the - # function's first _freevar_ (not cellvar) to the argument. - if sys.version_info >= (3, 8): - # CPython 3.8+ has an incompatible CodeType signature - # (added a posonlyargcount argument) but also added - # CodeType.replace() to do this without counting parameters. - set_first_freevar_code = co.replace( - co_cellvars=co.co_freevars, co_freevars=co.co_cellvars - ) - else: - args = [co.co_argcount] - if not PY2: - args.append(co.co_kwonlyargcount) - args.extend( - [ - co.co_nlocals, - co.co_stacksize, - co.co_flags, - co.co_code, - co.co_consts, - co.co_names, - co.co_varnames, - co.co_filename, - co.co_name, - co.co_firstlineno, - co.co_lnotab, - # These two arguments are reversed: - co.co_cellvars, - co.co_freevars, - ] - ) - set_first_freevar_code = types.CodeType(*args) - - def set_closure_cell(cell, value): - # Create a function using the set_first_freevar_code, - # whose first closure cell is `cell`. Calling it will - # change the value of that cell. - setter = types.FunctionType( - set_first_freevar_code, {}, "setter", (), (cell,) - ) - # And call it to set the cell. - setter(value) - - # Make sure it works on this interpreter: - def make_func_with_cell(): - x = None - - def func(): - return x # pragma: no cover - - return func - - if PY2: - cell = make_func_with_cell().func_closure[0] - else: - cell = make_func_with_cell().__closure__[0] - set_closure_cell(cell, 100) - if cell.cell_contents != 100: - raise AssertionError # pragma: no cover - - except Exception: - return just_warn - else: - return set_closure_cell - - -set_closure_cell = make_set_closure_cell() diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/_config.py b/vendor/poetry-core/poetry/core/_vendor/attr/_config.py deleted file mode 100644 index 8ec92096..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/_config.py +++ /dev/null @@ -1,23 +0,0 @@ -from __future__ import absolute_import, division, print_function - - -__all__ = ["set_run_validators", "get_run_validators"] - -_run_validators = True - - -def set_run_validators(run): - """ - Set whether or not validators are run. By default, they are run. - """ - if not isinstance(run, bool): - raise TypeError("'run' must be bool.") - global _run_validators - _run_validators = run - - -def get_run_validators(): - """ - Return whether or not validators are run. - """ - return _run_validators diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/_funcs.py b/vendor/poetry-core/poetry/core/_vendor/attr/_funcs.py deleted file mode 100644 index e6c930cb..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/_funcs.py +++ /dev/null @@ -1,390 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import copy - -from ._compat import iteritems -from ._make import NOTHING, _obj_setattr, fields -from .exceptions import AttrsAttributeNotFoundError - - -def asdict( - inst, - recurse=True, - filter=None, - dict_factory=dict, - retain_collection_types=False, - value_serializer=None, -): - """ - Return the ``attrs`` attribute values of *inst* as a dict. - - Optionally recurse into other ``attrs``-decorated classes. - - :param inst: Instance of an ``attrs``-decorated class. - :param bool recurse: Recurse into classes that are also - ``attrs``-decorated. - :param callable filter: A callable whose return code determines whether an - attribute or element is included (``True``) or dropped (``False``). Is - called with the `attr.Attribute` as the first argument and the - value as the second argument. - :param callable dict_factory: A callable to produce dictionaries from. For - example, to produce ordered dictionaries instead of normal Python - dictionaries, pass in ``collections.OrderedDict``. - :param bool retain_collection_types: Do not convert to ``list`` when - encountering an attribute whose type is ``tuple`` or ``set``. Only - meaningful if ``recurse`` is ``True``. - :param Optional[callable] value_serializer: A hook that is called for every - attribute or dict key/value. It receives the current instance, field - and value and must return the (updated) value. The hook is run *after* - the optional *filter* has been applied. - - :rtype: return type of *dict_factory* - - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 16.0.0 *dict_factory* - .. versionadded:: 16.1.0 *retain_collection_types* - .. versionadded:: 20.3.0 *value_serializer* - """ - attrs = fields(inst.__class__) - rv = dict_factory() - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - - if value_serializer is not None: - v = value_serializer(inst, a, v) - - if recurse is True: - if has(v.__class__): - rv[a.name] = asdict( - v, - True, - filter, - dict_factory, - retain_collection_types, - value_serializer, - ) - elif isinstance(v, (tuple, list, set, frozenset)): - cf = v.__class__ if retain_collection_types is True else list - rv[a.name] = cf( - [ - _asdict_anything( - i, - filter, - dict_factory, - retain_collection_types, - value_serializer, - ) - for i in v - ] - ) - elif isinstance(v, dict): - df = dict_factory - rv[a.name] = df( - ( - _asdict_anything( - kk, - filter, - df, - retain_collection_types, - value_serializer, - ), - _asdict_anything( - vv, - filter, - df, - retain_collection_types, - value_serializer, - ), - ) - for kk, vv in iteritems(v) - ) - else: - rv[a.name] = v - else: - rv[a.name] = v - return rv - - -def _asdict_anything( - val, - filter, - dict_factory, - retain_collection_types, - value_serializer, -): - """ - ``asdict`` only works on attrs instances, this works on anything. - """ - if getattr(val.__class__, "__attrs_attrs__", None) is not None: - # Attrs class. - rv = asdict( - val, - True, - filter, - dict_factory, - retain_collection_types, - value_serializer, - ) - elif isinstance(val, (tuple, list, set, frozenset)): - cf = val.__class__ if retain_collection_types is True else list - rv = cf( - [ - _asdict_anything( - i, - filter, - dict_factory, - retain_collection_types, - value_serializer, - ) - for i in val - ] - ) - elif isinstance(val, dict): - df = dict_factory - rv = df( - ( - _asdict_anything( - kk, filter, df, retain_collection_types, value_serializer - ), - _asdict_anything( - vv, filter, df, retain_collection_types, value_serializer - ), - ) - for kk, vv in iteritems(val) - ) - else: - rv = val - if value_serializer is not None: - rv = value_serializer(None, None, rv) - - return rv - - -def astuple( - inst, - recurse=True, - filter=None, - tuple_factory=tuple, - retain_collection_types=False, -): - """ - Return the ``attrs`` attribute values of *inst* as a tuple. - - Optionally recurse into other ``attrs``-decorated classes. - - :param inst: Instance of an ``attrs``-decorated class. - :param bool recurse: Recurse into classes that are also - ``attrs``-decorated. - :param callable filter: A callable whose return code determines whether an - attribute or element is included (``True``) or dropped (``False``). Is - called with the `attr.Attribute` as the first argument and the - value as the second argument. - :param callable tuple_factory: A callable to produce tuples from. For - example, to produce lists instead of tuples. - :param bool retain_collection_types: Do not convert to ``list`` - or ``dict`` when encountering an attribute which type is - ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is - ``True``. - - :rtype: return type of *tuple_factory* - - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 16.2.0 - """ - attrs = fields(inst.__class__) - rv = [] - retain = retain_collection_types # Very long. :/ - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - if recurse is True: - if has(v.__class__): - rv.append( - astuple( - v, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - ) - elif isinstance(v, (tuple, list, set, frozenset)): - cf = v.__class__ if retain is True else list - rv.append( - cf( - [ - astuple( - j, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(j.__class__) - else j - for j in v - ] - ) - ) - elif isinstance(v, dict): - df = v.__class__ if retain is True else dict - rv.append( - df( - ( - astuple( - kk, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(kk.__class__) - else kk, - astuple( - vv, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(vv.__class__) - else vv, - ) - for kk, vv in iteritems(v) - ) - ) - else: - rv.append(v) - else: - rv.append(v) - - return rv if tuple_factory is list else tuple_factory(rv) - - -def has(cls): - """ - Check whether *cls* is a class with ``attrs`` attributes. - - :param type cls: Class to introspect. - :raise TypeError: If *cls* is not a class. - - :rtype: bool - """ - return getattr(cls, "__attrs_attrs__", None) is not None - - -def assoc(inst, **changes): - """ - Copy *inst* and apply *changes*. - - :param inst: Instance of a class with ``attrs`` attributes. - :param changes: Keyword changes in the new copy. - - :return: A copy of inst with *changes* incorporated. - - :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't - be found on *cls*. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. deprecated:: 17.1.0 - Use `evolve` instead. - """ - import warnings - - warnings.warn( - "assoc is deprecated and will be removed after 2018/01.", - DeprecationWarning, - stacklevel=2, - ) - new = copy.copy(inst) - attrs = fields(inst.__class__) - for k, v in iteritems(changes): - a = getattr(attrs, k, NOTHING) - if a is NOTHING: - raise AttrsAttributeNotFoundError( - "{k} is not an attrs attribute on {cl}.".format( - k=k, cl=new.__class__ - ) - ) - _obj_setattr(new, k, v) - return new - - -def evolve(inst, **changes): - """ - Create a new instance, based on *inst* with *changes* applied. - - :param inst: Instance of a class with ``attrs`` attributes. - :param changes: Keyword changes in the new copy. - - :return: A copy of inst with *changes* incorporated. - - :raise TypeError: If *attr_name* couldn't be found in the class - ``__init__``. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 17.1.0 - """ - cls = inst.__class__ - attrs = fields(cls) - for a in attrs: - if not a.init: - continue - attr_name = a.name # To deal with private attributes. - init_name = attr_name if attr_name[0] != "_" else attr_name[1:] - if init_name not in changes: - changes[init_name] = getattr(inst, attr_name) - - return cls(**changes) - - -def resolve_types(cls, globalns=None, localns=None): - """ - Resolve any strings and forward annotations in type annotations. - - This is only required if you need concrete types in `Attribute`'s *type* - field. In other words, you don't need to resolve your types if you only - use them for static type checking. - - With no arguments, names will be looked up in the module in which the class - was created. If this is not what you want, e.g. if the name only exists - inside a method, you may pass *globalns* or *localns* to specify other - dictionaries in which to look up these names. See the docs of - `typing.get_type_hints` for more details. - - :param type cls: Class to resolve. - :param Optional[dict] globalns: Dictionary containing global variables. - :param Optional[dict] localns: Dictionary containing local variables. - - :raise TypeError: If *cls* is not a class. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - :raise NameError: If types cannot be resolved because of missing variables. - - :returns: *cls* so you can use this function also as a class decorator. - Please note that you have to apply it **after** `attr.s`. That means - the decorator has to come in the line **before** `attr.s`. - - .. versionadded:: 20.1.0 - """ - try: - # Since calling get_type_hints is expensive we cache whether we've - # done it already. - cls.__attrs_types_resolved__ - except AttributeError: - import typing - - hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) - for field in fields(cls): - if field.name in hints: - # Since fields have been frozen we must work around it. - _obj_setattr(field, "type", hints[field.name]) - cls.__attrs_types_resolved__ = True - - # Return the class so you can use it as a decorator too. - return cls diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/_make.py b/vendor/poetry-core/poetry/core/_vendor/attr/_make.py deleted file mode 100644 index 49484f93..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/_make.py +++ /dev/null @@ -1,2765 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import copy -import linecache -import sys -import threading -import uuid -import warnings - -from operator import itemgetter - -from . import _config, setters -from ._compat import ( - PY2, - PYPY, - isclass, - iteritems, - metadata_proxy, - ordered_dict, - set_closure_cell, -) -from .exceptions import ( - DefaultAlreadySetError, - FrozenInstanceError, - NotAnAttrsClassError, - PythonTooOldError, - UnannotatedAttributeError, -) - - -# This is used at least twice, so cache it here. -_obj_setattr = object.__setattr__ -_init_converter_pat = "__attr_converter_%s" -_init_factory_pat = "__attr_factory_{}" -_tuple_property_pat = ( - " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" -) -_classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar") -# we don't use a double-underscore prefix because that triggers -# name mangling when trying to create a slot for the field -# (when slots=True) -_hash_cache_field = "_attrs_cached_hash" - -_empty_metadata_singleton = metadata_proxy({}) - -# Unique object for unequivocal getattr() defaults. -_sentinel = object() - - -class _Nothing(object): - """ - Sentinel class to indicate the lack of a value when ``None`` is ambiguous. - - ``_Nothing`` is a singleton. There is only ever one of it. - """ - - _singleton = None - - def __new__(cls): - if _Nothing._singleton is None: - _Nothing._singleton = super(_Nothing, cls).__new__(cls) - return _Nothing._singleton - - def __repr__(self): - return "NOTHING" - - -NOTHING = _Nothing() -""" -Sentinel to indicate the lack of a value when ``None`` is ambiguous. -""" - - -class _CacheHashWrapper(int): - """ - An integer subclass that pickles / copies as None - - This is used for non-slots classes with ``cache_hash=True``, to avoid - serializing a potentially (even likely) invalid hash value. Since ``None`` - is the default value for uncalculated hashes, whenever this is copied, - the copy's value for the hash should automatically reset. - - See GH #613 for more details. - """ - - if PY2: - # For some reason `type(None)` isn't callable in Python 2, but we don't - # actually need a constructor for None objects, we just need any - # available function that returns None. - def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): - return _none_constructor, _args - - else: - - def __reduce__(self, _none_constructor=type(None), _args=()): - return _none_constructor, _args - - -def attrib( - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=None, - init=True, - metadata=None, - type=None, - converter=None, - factory=None, - kw_only=False, - eq=None, - order=None, - on_setattr=None, -): - """ - Create a new attribute on a class. - - .. warning:: - - Does *not* do anything unless the class is also decorated with - `attr.s`! - - :param default: A value that is used if an ``attrs``-generated ``__init__`` - is used and no value is passed while instantiating or the attribute is - excluded using ``init=False``. - - If the value is an instance of `Factory`, its callable will be - used to construct a new value (useful for mutable data types like lists - or dicts). - - If a default is not set (or set manually to `attr.NOTHING`), a value - *must* be supplied when instantiating; otherwise a `TypeError` - will be raised. - - The default can also be set using decorator notation as shown below. - - :type default: Any value - - :param callable factory: Syntactic sugar for - ``default=attr.Factory(factory)``. - - :param validator: `callable` that is called by ``attrs``-generated - ``__init__`` methods after the instance has been initialized. They - receive the initialized instance, the `Attribute`, and the - passed value. - - The return value is *not* inspected so the validator has to throw an - exception itself. - - If a `list` is passed, its items are treated as validators and must - all pass. - - Validators can be globally disabled and re-enabled using - `get_run_validators`. - - The validator can also be set using decorator notation as shown below. - - :type validator: `callable` or a `list` of `callable`\\ s. - - :param repr: Include this attribute in the generated ``__repr__`` - method. If ``True``, include the attribute; if ``False``, omit it. By - default, the built-in ``repr()`` function is used. To override how the - attribute value is formatted, pass a ``callable`` that takes a single - value and returns a string. Note that the resulting string is used - as-is, i.e. it will be used directly *instead* of calling ``repr()`` - (the default). - :type repr: a `bool` or a `callable` to use a custom function. - :param bool eq: If ``True`` (default), include this attribute in the - generated ``__eq__`` and ``__ne__`` methods that check two instances - for equality. - :param bool order: If ``True`` (default), include this attributes in the - generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. - :param bool cmp: Setting to ``True`` is equivalent to setting ``eq=True, - order=True``. Deprecated in favor of *eq* and *order*. - :param Optional[bool] hash: Include this attribute in the generated - ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This - is the correct behavior according the Python spec. Setting this value - to anything else than ``None`` is *discouraged*. - :param bool init: Include this attribute in the generated ``__init__`` - method. It is possible to set this to ``False`` and set a default - value. In that case this attributed is unconditionally initialized - with the specified default value or factory. - :param callable converter: `callable` that is called by - ``attrs``-generated ``__init__`` methods to convert attribute's value - to the desired format. It is given the passed-in value, and the - returned value will be used as the new value of the attribute. The - value is converted before being passed to the validator, if any. - :param metadata: An arbitrary mapping, to be used by third-party - components. See `extending_metadata`. - :param type: The type of the attribute. In Python 3.6 or greater, the - preferred method to specify the type is using a variable annotation - (see `PEP 526 `_). - This argument is provided for backward compatibility. - Regardless of the approach used, the type will be stored on - ``Attribute.type``. - - Please note that ``attrs`` doesn't do anything with this metadata by - itself. You can use it as part of your own code or for - `static type checking `. - :param kw_only: Make this attribute keyword-only (Python 3+) - in the generated ``__init__`` (if ``init`` is ``False``, this - parameter is ignored). - :param on_setattr: Allows to overwrite the *on_setattr* setting from - `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. - Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this - attribute -- regardless of the setting in `attr.s`. - :type on_setattr: `callable`, or a list of callables, or `None`, or - `attr.setters.NO_OP` - - .. versionadded:: 15.2.0 *convert* - .. versionadded:: 16.3.0 *metadata* - .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. - .. versionchanged:: 17.1.0 - *hash* is ``None`` and therefore mirrors *eq* by default. - .. versionadded:: 17.3.0 *type* - .. deprecated:: 17.4.0 *convert* - .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated - *convert* to achieve consistency with other noun-based arguments. - .. versionadded:: 18.1.0 - ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. - .. versionadded:: 18.2.0 *kw_only* - .. versionchanged:: 19.2.0 *convert* keyword argument removed - .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - .. versionadded:: 20.1.0 *on_setattr* - .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 - """ - eq, order = _determine_eq_order(cmp, eq, order, True) - - if hash is not None and hash is not True and hash is not False: - raise TypeError( - "Invalid value for hash. Must be True, False, or None." - ) - - if factory is not None: - if default is not NOTHING: - raise ValueError( - "The `default` and `factory` arguments are mutually " - "exclusive." - ) - if not callable(factory): - raise ValueError("The `factory` argument must be a callable.") - default = Factory(factory) - - if metadata is None: - metadata = {} - - # Apply syntactic sugar by auto-wrapping. - if isinstance(on_setattr, (list, tuple)): - on_setattr = setters.pipe(*on_setattr) - - if validator and isinstance(validator, (list, tuple)): - validator = and_(*validator) - - if converter and isinstance(converter, (list, tuple)): - converter = pipe(*converter) - - return _CountingAttr( - default=default, - validator=validator, - repr=repr, - cmp=None, - hash=hash, - init=init, - converter=converter, - metadata=metadata, - type=type, - kw_only=kw_only, - eq=eq, - order=order, - on_setattr=on_setattr, - ) - - -def _make_attr_tuple_class(cls_name, attr_names): - """ - Create a tuple subclass to hold `Attribute`s for an `attrs` class. - - The subclass is a bare tuple with properties for names. - - class MyClassAttributes(tuple): - __slots__ = () - x = property(itemgetter(0)) - """ - attr_class_name = "{}Attributes".format(cls_name) - attr_class_template = [ - "class {}(tuple):".format(attr_class_name), - " __slots__ = ()", - ] - if attr_names: - for i, attr_name in enumerate(attr_names): - attr_class_template.append( - _tuple_property_pat.format(index=i, attr_name=attr_name) - ) - else: - attr_class_template.append(" pass") - globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} - eval(compile("\n".join(attr_class_template), "", "exec"), globs) - - return globs[attr_class_name] - - -# Tuple class for extracted attributes from a class definition. -# `base_attrs` is a subset of `attrs`. -_Attributes = _make_attr_tuple_class( - "_Attributes", - [ - # all attributes to build dunder methods for - "attrs", - # attributes that have been inherited - "base_attrs", - # map inherited attributes to their originating classes - "base_attrs_map", - ], -) - - -def _is_class_var(annot): - """ - Check whether *annot* is a typing.ClassVar. - - The string comparison hack is used to avoid evaluating all string - annotations which would put attrs-based classes at a performance - disadvantage compared to plain old classes. - """ - return str(annot).startswith(_classvar_prefixes) - - -def _has_own_attribute(cls, attrib_name): - """ - Check whether *cls* defines *attrib_name* (and doesn't just inherit it). - - Requires Python 3. - """ - attr = getattr(cls, attrib_name, _sentinel) - if attr is _sentinel: - return False - - for base_cls in cls.__mro__[1:]: - a = getattr(base_cls, attrib_name, None) - if attr is a: - return False - - return True - - -def _get_annotations(cls): - """ - Get annotations for *cls*. - """ - if _has_own_attribute(cls, "__annotations__"): - return cls.__annotations__ - - return {} - - -def _counter_getter(e): - """ - Key function for sorting to avoid re-creating a lambda for every class. - """ - return e[1].counter - - -def _collect_base_attrs(cls, taken_attr_names): - """ - Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. - """ - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - - # Traverse the MRO and collect attributes. - for base_cls in reversed(cls.__mro__[1:-1]): - for a in getattr(base_cls, "__attrs_attrs__", []): - if a.inherited or a.name in taken_attr_names: - continue - - a = a.evolve(inherited=True) - base_attrs.append(a) - base_attr_map[a.name] = base_cls - - # For each name, only keep the freshest definition i.e. the furthest at the - # back. base_attr_map is fine because it gets overwritten with every new - # instance. - filtered = [] - seen = set() - for a in reversed(base_attrs): - if a.name in seen: - continue - filtered.insert(0, a) - seen.add(a.name) - - return filtered, base_attr_map - - -def _collect_base_attrs_broken(cls, taken_attr_names): - """ - Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. - - N.B. *taken_attr_names* will be mutated. - - Adhere to the old incorrect behavior. - - Notably it collects from the front and considers inherited attributes which - leads to the buggy behavior reported in #428. - """ - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - - # Traverse the MRO and collect attributes. - for base_cls in cls.__mro__[1:-1]: - for a in getattr(base_cls, "__attrs_attrs__", []): - if a.name in taken_attr_names: - continue - - a = a.evolve(inherited=True) - taken_attr_names.add(a.name) - base_attrs.append(a) - base_attr_map[a.name] = base_cls - - return base_attrs, base_attr_map - - -def _transform_attrs( - cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer -): - """ - Transform all `_CountingAttr`s on a class into `Attribute`s. - - If *these* is passed, use that and don't look for them on the class. - - *collect_by_mro* is True, collect them in the correct MRO order, otherwise - use the old -- incorrect -- order. See #428. - - Return an `_Attributes`. - """ - cd = cls.__dict__ - anns = _get_annotations(cls) - - if these is not None: - ca_list = [(name, ca) for name, ca in iteritems(these)] - - if not isinstance(these, ordered_dict): - ca_list.sort(key=_counter_getter) - elif auto_attribs is True: - ca_names = { - name - for name, attr in cd.items() - if isinstance(attr, _CountingAttr) - } - ca_list = [] - annot_names = set() - for attr_name, type in anns.items(): - if _is_class_var(type): - continue - annot_names.add(attr_name) - a = cd.get(attr_name, NOTHING) - - if not isinstance(a, _CountingAttr): - if a is NOTHING: - a = attrib() - else: - a = attrib(default=a) - ca_list.append((attr_name, a)) - - unannotated = ca_names - annot_names - if len(unannotated) > 0: - raise UnannotatedAttributeError( - "The following `attr.ib`s lack a type annotation: " - + ", ".join( - sorted(unannotated, key=lambda n: cd.get(n).counter) - ) - + "." - ) - else: - ca_list = sorted( - ( - (name, attr) - for name, attr in cd.items() - if isinstance(attr, _CountingAttr) - ), - key=lambda e: e[1].counter, - ) - - own_attrs = [ - Attribute.from_counting_attr( - name=attr_name, ca=ca, type=anns.get(attr_name) - ) - for attr_name, ca in ca_list - ] - - if collect_by_mro: - base_attrs, base_attr_map = _collect_base_attrs( - cls, {a.name for a in own_attrs} - ) - else: - base_attrs, base_attr_map = _collect_base_attrs_broken( - cls, {a.name for a in own_attrs} - ) - - attr_names = [a.name for a in base_attrs + own_attrs] - - AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) - - if kw_only: - own_attrs = [a.evolve(kw_only=True) for a in own_attrs] - base_attrs = [a.evolve(kw_only=True) for a in base_attrs] - - attrs = AttrsClass(base_attrs + own_attrs) - - # Mandatory vs non-mandatory attr order only matters when they are part of - # the __init__ signature and when they aren't kw_only (which are moved to - # the end and can be mandatory or non-mandatory in any order, as they will - # be specified as keyword args anyway). Check the order of those attrs: - had_default = False - for a in (a for a in attrs if a.init is not False and a.kw_only is False): - if had_default is True and a.default is NOTHING: - raise ValueError( - "No mandatory attributes allowed after an attribute with a " - "default value or factory. Attribute in question: %r" % (a,) - ) - - if had_default is False and a.default is not NOTHING: - had_default = True - - if field_transformer is not None: - attrs = field_transformer(cls, attrs) - return _Attributes((attrs, base_attrs, base_attr_map)) - - -if PYPY: - - def _frozen_setattrs(self, name, value): - """ - Attached to frozen classes as __setattr__. - """ - if isinstance(self, BaseException) and name in ( - "__cause__", - "__context__", - ): - BaseException.__setattr__(self, name, value) - return - - raise FrozenInstanceError() - - -else: - - def _frozen_setattrs(self, name, value): - """ - Attached to frozen classes as __setattr__. - """ - raise FrozenInstanceError() - - -def _frozen_delattrs(self, name): - """ - Attached to frozen classes as __delattr__. - """ - raise FrozenInstanceError() - - -class _ClassBuilder(object): - """ - Iteratively build *one* class. - """ - - __slots__ = ( - "_attr_names", - "_attrs", - "_base_attr_map", - "_base_names", - "_cache_hash", - "_cls", - "_cls_dict", - "_delete_attribs", - "_frozen", - "_has_post_init", - "_is_exc", - "_on_setattr", - "_slots", - "_weakref_slot", - "_has_own_setattr", - "_has_custom_setattr", - ) - - def __init__( - self, - cls, - these, - slots, - frozen, - weakref_slot, - getstate_setstate, - auto_attribs, - kw_only, - cache_hash, - is_exc, - collect_by_mro, - on_setattr, - has_custom_setattr, - field_transformer, - ): - attrs, base_attrs, base_map = _transform_attrs( - cls, - these, - auto_attribs, - kw_only, - collect_by_mro, - field_transformer, - ) - - self._cls = cls - self._cls_dict = dict(cls.__dict__) if slots else {} - self._attrs = attrs - self._base_names = set(a.name for a in base_attrs) - self._base_attr_map = base_map - self._attr_names = tuple(a.name for a in attrs) - self._slots = slots - self._frozen = frozen - self._weakref_slot = weakref_slot - self._cache_hash = cache_hash - self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) - self._delete_attribs = not bool(these) - self._is_exc = is_exc - self._on_setattr = on_setattr - - self._has_custom_setattr = has_custom_setattr - self._has_own_setattr = False - - self._cls_dict["__attrs_attrs__"] = self._attrs - - if frozen: - self._cls_dict["__setattr__"] = _frozen_setattrs - self._cls_dict["__delattr__"] = _frozen_delattrs - - self._has_own_setattr = True - - if getstate_setstate: - ( - self._cls_dict["__getstate__"], - self._cls_dict["__setstate__"], - ) = self._make_getstate_setstate() - - def __repr__(self): - return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) - - def build_class(self): - """ - Finalize class based on the accumulated configuration. - - Builder cannot be used after calling this method. - """ - if self._slots is True: - return self._create_slots_class() - else: - return self._patch_original_class() - - def _patch_original_class(self): - """ - Apply accumulated methods and return the class. - """ - cls = self._cls - base_names = self._base_names - - # Clean class of attribute definitions (`attr.ib()`s). - if self._delete_attribs: - for name in self._attr_names: - if ( - name not in base_names - and getattr(cls, name, _sentinel) is not _sentinel - ): - try: - delattr(cls, name) - except AttributeError: - # This can happen if a base class defines a class - # variable and we want to set an attribute with the - # same name by using only a type annotation. - pass - - # Attach our dunder methods. - for name, value in self._cls_dict.items(): - setattr(cls, name, value) - - # If we've inherited an attrs __setattr__ and don't write our own, - # reset it to object's. - if not self._has_own_setattr and getattr( - cls, "__attrs_own_setattr__", False - ): - cls.__attrs_own_setattr__ = False - - if not self._has_custom_setattr: - cls.__setattr__ = object.__setattr__ - - return cls - - def _create_slots_class(self): - """ - Build and return a new class with a `__slots__` attribute. - """ - base_names = self._base_names - cd = { - k: v - for k, v in iteritems(self._cls_dict) - if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") - } - - # If our class doesn't have its own implementation of __setattr__ - # (either from the user or by us), check the bases, if one of them has - # an attrs-made __setattr__, that needs to be reset. We don't walk the - # MRO because we only care about our immediate base classes. - # XXX: This can be confused by subclassing a slotted attrs class with - # XXX: a non-attrs class and subclass the resulting class with an attrs - # XXX: class. See `test_slotted_confused` for details. For now that's - # XXX: OK with us. - if not self._has_own_setattr: - cd["__attrs_own_setattr__"] = False - - if not self._has_custom_setattr: - for base_cls in self._cls.__bases__: - if base_cls.__dict__.get("__attrs_own_setattr__", False): - cd["__setattr__"] = object.__setattr__ - break - - # Traverse the MRO to check for an existing __weakref__. - weakref_inherited = False - for base_cls in self._cls.__mro__[1:-1]: - if base_cls.__dict__.get("__weakref__", None) is not None: - weakref_inherited = True - break - - names = self._attr_names - if ( - self._weakref_slot - and "__weakref__" not in getattr(self._cls, "__slots__", ()) - and "__weakref__" not in names - and not weakref_inherited - ): - names += ("__weakref__",) - - # We only add the names of attributes that aren't inherited. - # Setting __slots__ to inherited attributes wastes memory. - slot_names = [name for name in names if name not in base_names] - if self._cache_hash: - slot_names.append(_hash_cache_field) - cd["__slots__"] = tuple(slot_names) - - qualname = getattr(self._cls, "__qualname__", None) - if qualname is not None: - cd["__qualname__"] = qualname - - # Create new class based on old class and our methods. - cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) - - # The following is a fix for - # https://github.com/python-attrs/attrs/issues/102. On Python 3, - # if a method mentions `__class__` or uses the no-arg super(), the - # compiler will bake a reference to the class in the method itself - # as `method.__closure__`. Since we replace the class with a - # clone, we rewrite these references so it keeps working. - for item in cls.__dict__.values(): - if isinstance(item, (classmethod, staticmethod)): - # Class- and staticmethods hide their functions inside. - # These might need to be rewritten as well. - closure_cells = getattr(item.__func__, "__closure__", None) - else: - closure_cells = getattr(item, "__closure__", None) - - if not closure_cells: # Catch None or the empty list. - continue - for cell in closure_cells: - try: - match = cell.cell_contents is self._cls - except ValueError: # ValueError: Cell is empty - pass - else: - if match: - set_closure_cell(cell, cls) - - return cls - - def add_repr(self, ns): - self._cls_dict["__repr__"] = self._add_method_dunders( - _make_repr(self._attrs, ns=ns) - ) - return self - - def add_str(self): - repr = self._cls_dict.get("__repr__") - if repr is None: - raise ValueError( - "__str__ can only be generated if a __repr__ exists." - ) - - def __str__(self): - return self.__repr__() - - self._cls_dict["__str__"] = self._add_method_dunders(__str__) - return self - - def _make_getstate_setstate(self): - """ - Create custom __setstate__ and __getstate__ methods. - """ - # __weakref__ is not writable. - state_attr_names = tuple( - an for an in self._attr_names if an != "__weakref__" - ) - - def slots_getstate(self): - """ - Automatically created by attrs. - """ - return tuple(getattr(self, name) for name in state_attr_names) - - hash_caching_enabled = self._cache_hash - - def slots_setstate(self, state): - """ - Automatically created by attrs. - """ - __bound_setattr = _obj_setattr.__get__(self, Attribute) - for name, value in zip(state_attr_names, state): - __bound_setattr(name, value) - - # The hash code cache is not included when the object is - # serialized, but it still needs to be initialized to None to - # indicate that the first call to __hash__ should be a cache - # miss. - if hash_caching_enabled: - __bound_setattr(_hash_cache_field, None) - - return slots_getstate, slots_setstate - - def make_unhashable(self): - self._cls_dict["__hash__"] = None - return self - - def add_hash(self): - self._cls_dict["__hash__"] = self._add_method_dunders( - _make_hash( - self._cls, - self._attrs, - frozen=self._frozen, - cache_hash=self._cache_hash, - ) - ) - - return self - - def add_init(self): - self._cls_dict["__init__"] = self._add_method_dunders( - _make_init( - self._cls, - self._attrs, - self._has_post_init, - self._frozen, - self._slots, - self._cache_hash, - self._base_attr_map, - self._is_exc, - self._on_setattr is not None - and self._on_setattr is not setters.NO_OP, - ) - ) - - return self - - def add_eq(self): - cd = self._cls_dict - - cd["__eq__"] = self._add_method_dunders( - _make_eq(self._cls, self._attrs) - ) - cd["__ne__"] = self._add_method_dunders(_make_ne()) - - return self - - def add_order(self): - cd = self._cls_dict - - cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( - self._add_method_dunders(meth) - for meth in _make_order(self._cls, self._attrs) - ) - - return self - - def add_setattr(self): - if self._frozen: - return self - - sa_attrs = {} - for a in self._attrs: - on_setattr = a.on_setattr or self._on_setattr - if on_setattr and on_setattr is not setters.NO_OP: - sa_attrs[a.name] = a, on_setattr - - if not sa_attrs: - return self - - if self._has_custom_setattr: - # We need to write a __setattr__ but there already is one! - raise ValueError( - "Can't combine custom __setattr__ with on_setattr hooks." - ) - - # docstring comes from _add_method_dunders - def __setattr__(self, name, val): - try: - a, hook = sa_attrs[name] - except KeyError: - nval = val - else: - nval = hook(self, a, val) - - _obj_setattr(self, name, nval) - - self._cls_dict["__attrs_own_setattr__"] = True - self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) - self._has_own_setattr = True - - return self - - def _add_method_dunders(self, method): - """ - Add __module__ and __qualname__ to a *method* if possible. - """ - try: - method.__module__ = self._cls.__module__ - except AttributeError: - pass - - try: - method.__qualname__ = ".".join( - (self._cls.__qualname__, method.__name__) - ) - except AttributeError: - pass - - try: - method.__doc__ = "Method generated by attrs for class %s." % ( - self._cls.__qualname__, - ) - except AttributeError: - pass - - return method - - -_CMP_DEPRECATION = ( - "The usage of `cmp` is deprecated and will be removed on or after " - "2021-06-01. Please use `eq` and `order` instead." -) - - -def _determine_eq_order(cmp, eq, order, default_eq): - """ - Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. If *eq* is None, set it to *default_eq*. - """ - if cmp is not None and any((eq is not None, order is not None)): - raise ValueError("Don't mix `cmp` with `eq' and `order`.") - - # cmp takes precedence due to bw-compatibility. - if cmp is not None: - warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=3) - - return cmp, cmp - - # If left None, equality is set to the specified default and ordering - # mirrors equality. - if eq is None: - eq = default_eq - - if order is None: - order = eq - - if eq is False and order is True: - raise ValueError("`order` can only be True if `eq` is True too.") - - return eq, order - - -def _determine_whether_to_implement( - cls, flag, auto_detect, dunders, default=True -): - """ - Check whether we should implement a set of methods for *cls*. - - *flag* is the argument passed into @attr.s like 'init', *auto_detect* the - same as passed into @attr.s and *dunders* is a tuple of attribute names - whose presence signal that the user has implemented it themselves. - - Return *default* if no reason for either for or against is found. - - auto_detect must be False on Python 2. - """ - if flag is True or flag is False: - return flag - - if flag is None and auto_detect is False: - return default - - # Logically, flag is None and auto_detect is True here. - for dunder in dunders: - if _has_own_attribute(cls, dunder): - return False - - return default - - -def attrs( - maybe_cls=None, - these=None, - repr_ns=None, - repr=None, - cmp=None, - hash=None, - init=None, - slots=False, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=False, - kw_only=False, - cache_hash=False, - auto_exc=False, - eq=None, - order=None, - auto_detect=False, - collect_by_mro=False, - getstate_setstate=None, - on_setattr=None, - field_transformer=None, -): - r""" - A class decorator that adds `dunder - `_\ -methods according to the - specified attributes using `attr.ib` or the *these* argument. - - :param these: A dictionary of name to `attr.ib` mappings. This is - useful to avoid the definition of your attributes within the class body - because you can't (e.g. if you want to add ``__repr__`` methods to - Django models) or don't want to. - - If *these* is not ``None``, ``attrs`` will *not* search the class body - for attributes and will *not* remove any attributes from it. - - If *these* is an ordered dict (`dict` on Python 3.6+, - `collections.OrderedDict` otherwise), the order is deduced from - the order of the attributes inside *these*. Otherwise the order - of the definition of the attributes is used. - - :type these: `dict` of `str` to `attr.ib` - - :param str repr_ns: When using nested classes, there's no way in Python 2 - to automatically detect that. Therefore it's possible to set the - namespace explicitly for a more meaningful ``repr`` output. - :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, - *order*, and *hash* arguments explicitly, assume they are set to - ``True`` **unless any** of the involved methods for one of the - arguments is implemented in the *current* class (i.e. it is *not* - inherited from some base class). - - So for example by implementing ``__eq__`` on a class yourself, - ``attrs`` will deduce ``eq=False`` and won't create *neither* - ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible - ``__ne__`` by default, so it *should* be enough to only implement - ``__eq__`` in most cases). - - .. warning:: - - If you prevent ``attrs`` from creating the ordering methods for you - (``order=False``, e.g. by implementing ``__le__``), it becomes - *your* responsibility to make sure its ordering is sound. The best - way is to use the `functools.total_ordering` decorator. - - - Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, - *cmp*, or *hash* overrides whatever *auto_detect* would determine. - - *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises - a `PythonTooOldError`. - - :param bool repr: Create a ``__repr__`` method with a human readable - representation of ``attrs`` attributes.. - :param bool str: Create a ``__str__`` method that is identical to - ``__repr__``. This is usually not necessary except for - `Exception`\ s. - :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` - and ``__ne__`` methods that check two instances for equality. - - They compare the instances as if they were tuples of their ``attrs`` - attributes if and only if the types of both classes are *identical*! - :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, - ``__gt__``, and ``__ge__`` methods that behave like *eq* above and - allow instances to be ordered. If ``None`` (default) mirror value of - *eq*. - :param Optional[bool] cmp: Setting to ``True`` is equivalent to setting - ``eq=True, order=True``. Deprecated in favor of *eq* and *order*, has - precedence over them for backward-compatibility though. Must not be - mixed with *eq* or *order*. - :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method - is generated according how *eq* and *frozen* are set. - - 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. - 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to - None, marking it unhashable (which it is). - 3. If *eq* is False, ``__hash__`` will be left untouched meaning the - ``__hash__`` method of the base class will be used (if base class is - ``object``, this means it will fall back to id-based hashing.). - - Although not recommended, you can decide for yourself and force - ``attrs`` to create one (e.g. if the class is immutable even though you - didn't freeze it programmatically) by passing ``True`` or not. Both of - these cases are rather special and should be used carefully. - - See our documentation on `hashing`, Python's documentation on - `object.__hash__`, and the `GitHub issue that led to the default \ - behavior `_ for more - details. - :param bool init: Create a ``__init__`` method that initializes the - ``attrs`` attributes. Leading underscores are stripped for the - argument name. If a ``__attrs_post_init__`` method exists on the - class, it will be called after the class is fully initialized. - :param bool slots: Create a `slotted class ` that's more - memory-efficient. Slotted classes are generally superior to the default - dict classes, but have some gotchas you should know about, so we - encourage you to read the `glossary entry `. - :param bool frozen: Make instances immutable after initialization. If - someone attempts to modify a frozen instance, - `attr.exceptions.FrozenInstanceError` is raised. - - .. note:: - - 1. This is achieved by installing a custom ``__setattr__`` method - on your class, so you can't implement your own. - - 2. True immutability is impossible in Python. - - 3. This *does* have a minor a runtime performance `impact - ` when initializing new instances. In other words: - ``__init__`` is slightly slower with ``frozen=True``. - - 4. If a class is frozen, you cannot modify ``self`` in - ``__attrs_post_init__`` or a self-written ``__init__``. You can - circumvent that limitation by using - ``object.__setattr__(self, "attribute_name", value)``. - - 5. Subclasses of a frozen class are frozen too. - - :param bool weakref_slot: Make instances weak-referenceable. This has no - effect unless ``slots`` is also enabled. - :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated - attributes (Python 3.6 and later only) from the class body. - - In this case, you **must** annotate every field. If ``attrs`` - encounters a field that is set to an `attr.ib` but lacks a type - annotation, an `attr.exceptions.UnannotatedAttributeError` is - raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't - want to set a type. - - If you assign a value to those attributes (e.g. ``x: int = 42``), that - value becomes the default value like if it were passed using - ``attr.ib(default=42)``. Passing an instance of `Factory` also - works as expected. - - Attributes annotated as `typing.ClassVar`, and attributes that are - neither annotated nor set to an `attr.ib` are **ignored**. - - .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ - :param bool kw_only: Make all attributes keyword-only (Python 3+) - in the generated ``__init__`` (if ``init`` is ``False``, this - parameter is ignored). - :param bool cache_hash: Ensure that the object's hash code is computed - only once and stored on the object. If this is set to ``True``, - hashing must be either explicitly or implicitly enabled for this - class. If the hash code is cached, avoid any reassignments of - fields involved in hash code computation or mutations of the objects - those fields point to after object creation. If such changes occur, - the behavior of the object's hash code is undefined. - :param bool auto_exc: If the class subclasses `BaseException` - (which implicitly includes any subclass of any exception), the - following happens to behave like a well-behaved Python exceptions - class: - - - the values for *eq*, *order*, and *hash* are ignored and the - instances compare and hash by the instance's ids (N.B. ``attrs`` will - *not* remove existing implementations of ``__hash__`` or the equality - methods. It just won't add own ones.), - - all attributes that are either passed into ``__init__`` or have a - default value are additionally available as a tuple in the ``args`` - attribute, - - the value of *str* is ignored leaving ``__str__`` to base classes. - :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` - collects attributes from base classes. The default behavior is - incorrect in certain cases of multiple inheritance. It should be on by - default but is kept off for backward-compatability. - - See issue `#428 `_ for - more details. - - :param Optional[bool] getstate_setstate: - .. note:: - This is usually only interesting for slotted classes and you should - probably just set *auto_detect* to `True`. - - If `True`, ``__getstate__`` and - ``__setstate__`` are generated and attached to the class. This is - necessary for slotted classes to be pickleable. If left `None`, it's - `True` by default for slotted classes and ``False`` for dict classes. - - If *auto_detect* is `True`, and *getstate_setstate* is left `None`, - and **either** ``__getstate__`` or ``__setstate__`` is detected directly - on the class (i.e. not inherited), it is set to `False` (this is usually - what you want). - - :param on_setattr: A callable that is run whenever the user attempts to set - an attribute (either by assignment like ``i.x = 42`` or by using - `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments - as validators: the instance, the attribute that is being modified, and - the new value. - - If no exception is raised, the attribute is set to the return value of - the callable. - - If a list of callables is passed, they're automatically wrapped in an - `attr.setters.pipe`. - - :param Optional[callable] field_transformer: - A function that is called with the original class object and all - fields right before ``attrs`` finalizes the class. You can use - this, e.g., to automatically add converters or validators to - fields based on their types. See `transform-fields` for more details. - - .. versionadded:: 16.0.0 *slots* - .. versionadded:: 16.1.0 *frozen* - .. versionadded:: 16.3.0 *str* - .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. - .. versionchanged:: 17.1.0 - *hash* supports ``None`` as value which is also the default now. - .. versionadded:: 17.3.0 *auto_attribs* - .. versionchanged:: 18.1.0 - If *these* is passed, no attributes are deleted from the class body. - .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. - .. versionadded:: 18.2.0 *weakref_slot* - .. deprecated:: 18.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a - `DeprecationWarning` if the classes compared are subclasses of - each other. ``__eq`` and ``__ne__`` never tried to compared subclasses - to each other. - .. versionchanged:: 19.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider - subclasses comparable anymore. - .. versionadded:: 18.2.0 *kw_only* - .. versionadded:: 18.2.0 *cache_hash* - .. versionadded:: 19.1.0 *auto_exc* - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - .. versionadded:: 20.1.0 *auto_detect* - .. versionadded:: 20.1.0 *collect_by_mro* - .. versionadded:: 20.1.0 *getstate_setstate* - .. versionadded:: 20.1.0 *on_setattr* - .. versionadded:: 20.3.0 *field_transformer* - """ - if auto_detect and PY2: - raise PythonTooOldError( - "auto_detect only works on Python 3 and later." - ) - - eq_, order_ = _determine_eq_order(cmp, eq, order, None) - hash_ = hash # work around the lack of nonlocal - - if isinstance(on_setattr, (list, tuple)): - on_setattr = setters.pipe(*on_setattr) - - def wrap(cls): - - if getattr(cls, "__class__", None) is None: - raise TypeError("attrs only works with new-style classes.") - - is_frozen = frozen or _has_frozen_base_class(cls) - is_exc = auto_exc is True and issubclass(cls, BaseException) - has_own_setattr = auto_detect and _has_own_attribute( - cls, "__setattr__" - ) - - if has_own_setattr and is_frozen: - raise ValueError("Can't freeze a class with a custom __setattr__.") - - builder = _ClassBuilder( - cls, - these, - slots, - is_frozen, - weakref_slot, - _determine_whether_to_implement( - cls, - getstate_setstate, - auto_detect, - ("__getstate__", "__setstate__"), - default=slots, - ), - auto_attribs, - kw_only, - cache_hash, - is_exc, - collect_by_mro, - on_setattr, - has_own_setattr, - field_transformer, - ) - if _determine_whether_to_implement( - cls, repr, auto_detect, ("__repr__",) - ): - builder.add_repr(repr_ns) - if str is True: - builder.add_str() - - eq = _determine_whether_to_implement( - cls, eq_, auto_detect, ("__eq__", "__ne__") - ) - if not is_exc and eq is True: - builder.add_eq() - if not is_exc and _determine_whether_to_implement( - cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") - ): - builder.add_order() - - builder.add_setattr() - - if ( - hash_ is None - and auto_detect is True - and _has_own_attribute(cls, "__hash__") - ): - hash = False - else: - hash = hash_ - if hash is not True and hash is not False and hash is not None: - # Can't use `hash in` because 1 == True for example. - raise TypeError( - "Invalid value for hash. Must be True, False, or None." - ) - elif hash is False or (hash is None and eq is False) or is_exc: - # Don't do anything. Should fall back to __object__'s __hash__ - # which is by id. - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " hashing must be either explicitly or implicitly " - "enabled." - ) - elif hash is True or ( - hash is None and eq is True and is_frozen is True - ): - # Build a __hash__ if told so, or if it's safe. - builder.add_hash() - else: - # Raise TypeError on attempts to hash. - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " hashing must be either explicitly or implicitly " - "enabled." - ) - builder.make_unhashable() - - if _determine_whether_to_implement( - cls, init, auto_detect, ("__init__",) - ): - builder.add_init() - else: - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " init must be True." - ) - - return builder.build_class() - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but ``None`` if used as `@attrs()`. - if maybe_cls is None: - return wrap - else: - return wrap(maybe_cls) - - -_attrs = attrs -""" -Internal alias so we can use it in functions that take an argument called -*attrs*. -""" - - -if PY2: - - def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return ( - getattr(cls.__setattr__, "__module__", None) - == _frozen_setattrs.__module__ - and cls.__setattr__.__name__ == _frozen_setattrs.__name__ - ) - - -else: - - def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return cls.__setattr__ == _frozen_setattrs - - -def _attrs_to_tuple(obj, attrs): - """ - Create a tuple of all values of *obj*'s *attrs*. - """ - return tuple(getattr(obj, a.name) for a in attrs) - - -def _generate_unique_filename(cls, func_name): - """ - Create a "filename" suitable for a function being generated. - """ - unique_id = uuid.uuid4() - extra = "" - count = 1 - - while True: - unique_filename = "".format( - func_name, - cls.__module__, - getattr(cls, "__qualname__", cls.__name__), - extra, - ) - # To handle concurrency we essentially "reserve" our spot in - # the linecache with a dummy line. The caller can then - # set this value correctly. - cache_line = (1, None, (str(unique_id),), unique_filename) - if ( - linecache.cache.setdefault(unique_filename, cache_line) - == cache_line - ): - return unique_filename - - # Looks like this spot is taken. Try again. - count += 1 - extra = "-{0}".format(count) - - -def _make_hash(cls, attrs, frozen, cache_hash): - attrs = tuple( - a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) - ) - - tab = " " - - unique_filename = _generate_unique_filename(cls, "hash") - type_hash = hash(unique_filename) - - hash_def = "def __hash__(self" - hash_func = "hash((" - closing_braces = "))" - if not cache_hash: - hash_def += "):" - else: - if not PY2: - hash_def += ", *" - - hash_def += ( - ", _cache_wrapper=" - + "__import__('attr._make')._make._CacheHashWrapper):" - ) - hash_func = "_cache_wrapper(" + hash_func - closing_braces += ")" - - method_lines = [hash_def] - - def append_hash_computation_lines(prefix, indent): - """ - Generate the code for actually computing the hash code. - Below this will either be returned directly or used to compute - a value which is then cached, depending on the value of cache_hash - """ - - method_lines.extend( - [ - indent + prefix + hash_func, - indent + " %d," % (type_hash,), - ] - ) - - for a in attrs: - method_lines.append(indent + " self.%s," % a.name) - - method_lines.append(indent + " " + closing_braces) - - if cache_hash: - method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) - if frozen: - append_hash_computation_lines( - "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 - ) - method_lines.append(tab * 2 + ")") # close __setattr__ - else: - append_hash_computation_lines( - "self.%s = " % _hash_cache_field, tab * 2 - ) - method_lines.append(tab + "return self.%s" % _hash_cache_field) - else: - append_hash_computation_lines("return ", tab) - - script = "\n".join(method_lines) - globs = {} - locs = {} - bytecode = compile(script, unique_filename, "exec") - eval(bytecode, globs, locs) - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - linecache.cache[unique_filename] = ( - len(script), - None, - script.splitlines(True), - unique_filename, - ) - - return locs["__hash__"] - - -def _add_hash(cls, attrs): - """ - Add a hash method to *cls*. - """ - cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) - return cls - - -def _make_ne(): - """ - Create __ne__ method. - """ - - def __ne__(self, other): - """ - Check equality and either forward a NotImplemented or - return the result negated. - """ - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - - return not result - - return __ne__ - - -def _make_eq(cls, attrs): - """ - Create __eq__ method for *cls* with *attrs*. - """ - attrs = [a for a in attrs if a.eq] - - unique_filename = _generate_unique_filename(cls, "eq") - lines = [ - "def __eq__(self, other):", - " if other.__class__ is not self.__class__:", - " return NotImplemented", - ] - # We can't just do a big self.x = other.x and... clause due to - # irregularities like nan == nan is false but (nan,) == (nan,) is true. - if attrs: - lines.append(" return (") - others = [" ) == ("] - for a in attrs: - lines.append(" self.%s," % (a.name,)) - others.append(" other.%s," % (a.name,)) - - lines += others + [" )"] - else: - lines.append(" return True") - - script = "\n".join(lines) - globs = {} - locs = {} - bytecode = compile(script, unique_filename, "exec") - eval(bytecode, globs, locs) - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - linecache.cache[unique_filename] = ( - len(script), - None, - script.splitlines(True), - unique_filename, - ) - return locs["__eq__"] - - -def _make_order(cls, attrs): - """ - Create ordering methods for *cls* with *attrs*. - """ - attrs = [a for a in attrs if a.order] - - def attrs_to_tuple(obj): - """ - Save us some typing. - """ - return _attrs_to_tuple(obj, attrs) - - def __lt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) < attrs_to_tuple(other) - - return NotImplemented - - def __le__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) <= attrs_to_tuple(other) - - return NotImplemented - - def __gt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) > attrs_to_tuple(other) - - return NotImplemented - - def __ge__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) >= attrs_to_tuple(other) - - return NotImplemented - - return __lt__, __le__, __gt__, __ge__ - - -def _add_eq(cls, attrs=None): - """ - Add equality methods to *cls* with *attrs*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - cls.__eq__ = _make_eq(cls, attrs) - cls.__ne__ = _make_ne() - - return cls - - -_already_repring = threading.local() - - -def _make_repr(attrs, ns): - """ - Make a repr method that includes relevant *attrs*, adding *ns* to the full - name. - """ - - # Figure out which attributes to include, and which function to use to - # format them. The a.repr value can be either bool or a custom callable. - attr_names_with_reprs = tuple( - (a.name, repr if a.repr is True else a.repr) - for a in attrs - if a.repr is not False - ) - - def __repr__(self): - """ - Automatically created by attrs. - """ - try: - working_set = _already_repring.working_set - except AttributeError: - working_set = set() - _already_repring.working_set = working_set - - if id(self) in working_set: - return "..." - real_cls = self.__class__ - if ns is None: - qualname = getattr(real_cls, "__qualname__", None) - if qualname is not None: - class_name = qualname.rsplit(">.", 1)[-1] - else: - class_name = real_cls.__name__ - else: - class_name = ns + "." + real_cls.__name__ - - # Since 'self' remains on the stack (i.e.: strongly referenced) for the - # duration of this call, it's safe to depend on id(...) stability, and - # not need to track the instance and therefore worry about properties - # like weakref- or hash-ability. - working_set.add(id(self)) - try: - result = [class_name, "("] - first = True - for name, attr_repr in attr_names_with_reprs: - if first: - first = False - else: - result.append(", ") - result.extend( - (name, "=", attr_repr(getattr(self, name, NOTHING))) - ) - return "".join(result) + ")" - finally: - working_set.remove(id(self)) - - return __repr__ - - -def _add_repr(cls, ns=None, attrs=None): - """ - Add a repr method to *cls*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - cls.__repr__ = _make_repr(attrs, ns) - return cls - - -def fields(cls): - """ - Return the tuple of ``attrs`` attributes for a class. - - The tuple also allows accessing the fields by their names (see below for - examples). - - :param type cls: Class to introspect. - - :raise TypeError: If *cls* is not a class. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - :rtype: tuple (with name accessors) of `attr.Attribute` - - .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields - by name. - """ - if not isclass(cls): - raise TypeError("Passed object must be a class.") - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is None: - raise NotAnAttrsClassError( - "{cls!r} is not an attrs-decorated class.".format(cls=cls) - ) - return attrs - - -def fields_dict(cls): - """ - Return an ordered dictionary of ``attrs`` attributes for a class, whose - keys are the attribute names. - - :param type cls: Class to introspect. - - :raise TypeError: If *cls* is not a class. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - :rtype: an ordered dict where keys are attribute names and values are - `attr.Attribute`\\ s. This will be a `dict` if it's - naturally ordered like on Python 3.6+ or an - :class:`~collections.OrderedDict` otherwise. - - .. versionadded:: 18.1.0 - """ - if not isclass(cls): - raise TypeError("Passed object must be a class.") - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is None: - raise NotAnAttrsClassError( - "{cls!r} is not an attrs-decorated class.".format(cls=cls) - ) - return ordered_dict(((a.name, a) for a in attrs)) - - -def validate(inst): - """ - Validate all attributes on *inst* that have a validator. - - Leaves all exceptions through. - - :param inst: Instance of a class with ``attrs`` attributes. - """ - if _config._run_validators is False: - return - - for a in fields(inst.__class__): - v = a.validator - if v is not None: - v(inst, a, getattr(inst, a.name)) - - -def _is_slot_cls(cls): - return "__slots__" in cls.__dict__ - - -def _is_slot_attr(a_name, base_attr_map): - """ - Check if the attribute name comes from a slot class. - """ - return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) - - -def _make_init( - cls, - attrs, - post_init, - frozen, - slots, - cache_hash, - base_attr_map, - is_exc, - has_global_on_setattr, -): - if frozen and has_global_on_setattr: - raise ValueError("Frozen classes can't use on_setattr.") - - needs_cached_setattr = cache_hash or frozen - filtered_attrs = [] - attr_dict = {} - for a in attrs: - if not a.init and a.default is NOTHING: - continue - - filtered_attrs.append(a) - attr_dict[a.name] = a - - if a.on_setattr is not None: - if frozen is True: - raise ValueError("Frozen classes can't use on_setattr.") - - needs_cached_setattr = True - elif ( - has_global_on_setattr and a.on_setattr is not setters.NO_OP - ) or _is_slot_attr(a.name, base_attr_map): - needs_cached_setattr = True - - unique_filename = _generate_unique_filename(cls, "init") - - script, globs, annotations = _attrs_to_init_script( - filtered_attrs, - frozen, - slots, - post_init, - cache_hash, - base_attr_map, - is_exc, - needs_cached_setattr, - has_global_on_setattr, - ) - locs = {} - bytecode = compile(script, unique_filename, "exec") - globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) - - if needs_cached_setattr: - # Save the lookup overhead in __init__ if we need to circumvent - # setattr hooks. - globs["_cached_setattr"] = _obj_setattr - - eval(bytecode, globs, locs) - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - linecache.cache[unique_filename] = ( - len(script), - None, - script.splitlines(True), - unique_filename, - ) - - __init__ = locs["__init__"] - __init__.__annotations__ = annotations - - return __init__ - - -def _setattr(attr_name, value_var, has_on_setattr): - """ - Use the cached object.setattr to set *attr_name* to *value_var*. - """ - return "_setattr('%s', %s)" % (attr_name, value_var) - - -def _setattr_with_converter(attr_name, value_var, has_on_setattr): - """ - Use the cached object.setattr to set *attr_name* to *value_var*, but run - its converter first. - """ - return "_setattr('%s', %s(%s))" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - -def _assign(attr_name, value, has_on_setattr): - """ - Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise - relegate to _setattr. - """ - if has_on_setattr: - return _setattr(attr_name, value, True) - - return "self.%s = %s" % (attr_name, value) - - -def _assign_with_converter(attr_name, value_var, has_on_setattr): - """ - Unless *attr_name* has an on_setattr hook, use normal assignment after - conversion. Otherwise relegate to _setattr_with_converter. - """ - if has_on_setattr: - return _setattr_with_converter(attr_name, value_var, True) - - return "self.%s = %s(%s)" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - -if PY2: - - def _unpack_kw_only_py2(attr_name, default=None): - """ - Unpack *attr_name* from _kw_only dict. - """ - if default is not None: - arg_default = ", %s" % default - else: - arg_default = "" - return "%s = _kw_only.pop('%s'%s)" % ( - attr_name, - attr_name, - arg_default, - ) - - def _unpack_kw_only_lines_py2(kw_only_args): - """ - Unpack all *kw_only_args* from _kw_only dict and handle errors. - - Given a list of strings "{attr_name}" and "{attr_name}={default}" - generates list of lines of code that pop attrs from _kw_only dict and - raise TypeError similar to builtin if required attr is missing or - extra key is passed. - - >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"]))) - try: - a = _kw_only.pop('a') - b = _kw_only.pop('b', 42) - except KeyError as _key_error: - raise TypeError( - ... - if _kw_only: - raise TypeError( - ... - """ - lines = ["try:"] - lines.extend( - " " + _unpack_kw_only_py2(*arg.split("=")) - for arg in kw_only_args - ) - lines += """\ -except KeyError as _key_error: - raise TypeError( - '__init__() missing required keyword-only argument: %s' % _key_error - ) -if _kw_only: - raise TypeError( - '__init__() got an unexpected keyword argument %r' - % next(iter(_kw_only)) - ) -""".split( - "\n" - ) - return lines - - -def _attrs_to_init_script( - attrs, - frozen, - slots, - post_init, - cache_hash, - base_attr_map, - is_exc, - needs_cached_setattr, - has_global_on_setattr, -): - """ - Return a script of an initializer for *attrs* and a dict of globals. - - The globals are expected by the generated script. - - If *frozen* is True, we cannot set the attributes directly so we use - a cached ``object.__setattr__``. - """ - lines = [] - if needs_cached_setattr: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup per - # assignment. - # Note _setattr will be used again below if cache_hash is True - "_setattr = _cached_setattr.__get__(self, self.__class__)" - ) - - if frozen is True: - if slots is True: - fmt_setter = _setattr - fmt_setter_with_converter = _setattr_with_converter - else: - # Dict frozen classes assign directly to __dict__. - # But only if the attribute doesn't come from an ancestor slot - # class. - # Note _inst_dict will be used again below if cache_hash is True - lines.append("_inst_dict = self.__dict__") - - def fmt_setter(attr_name, value_var, has_on_setattr): - if _is_slot_attr(attr_name, base_attr_map): - return _setattr(attr_name, value_var, has_on_setattr) - - return "_inst_dict['%s'] = %s" % (attr_name, value_var) - - def fmt_setter_with_converter( - attr_name, value_var, has_on_setattr - ): - if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): - return _setattr_with_converter( - attr_name, value_var, has_on_setattr - ) - - return "_inst_dict['%s'] = %s(%s)" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - else: - # Not frozen. - fmt_setter = _assign - fmt_setter_with_converter = _assign_with_converter - - args = [] - kw_only_args = [] - attrs_to_validate = [] - - # This is a dictionary of names to validator and converter callables. - # Injecting this into __init__ globals lets us avoid lookups. - names_for_globals = {} - annotations = {"return": None} - - for a in attrs: - if a.validator: - attrs_to_validate.append(a) - - attr_name = a.name - has_on_setattr = a.on_setattr is not None or ( - a.on_setattr is not setters.NO_OP and has_global_on_setattr - ) - arg_name = a.name.lstrip("_") - - has_factory = isinstance(a.default, Factory) - if has_factory and a.default.takes_self: - maybe_self = "self" - else: - maybe_self = "" - - if a.init is False: - if has_factory: - init_factory_name = _init_factory_pat.format(a.name) - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - init_factory_name + "(%s)" % (maybe_self,), - has_on_setattr, - ) - ) - conv_name = _init_converter_pat % (a.name,) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - init_factory_name + "(%s)" % (maybe_self,), - has_on_setattr, - ) - ) - names_for_globals[init_factory_name] = a.default.factory - else: - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - "attr_dict['%s'].default" % (attr_name,), - has_on_setattr, - ) - ) - conv_name = _init_converter_pat % (a.name,) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - "attr_dict['%s'].default" % (attr_name,), - has_on_setattr, - ) - ) - elif a.default is not NOTHING and not has_factory: - arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) - - elif has_factory: - arg = "%s=NOTHING" % (arg_name,) - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - lines.append("if %s is not NOTHING:" % (arg_name,)) - - init_factory_name = _init_factory_pat.format(a.name) - if a.converter is not None: - lines.append( - " " - + fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter_with_converter( - attr_name, - init_factory_name + "(" + maybe_self + ")", - has_on_setattr, - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append( - " " + fmt_setter(attr_name, arg_name, has_on_setattr) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter( - attr_name, - init_factory_name + "(" + maybe_self + ")", - has_on_setattr, - ) - ) - names_for_globals[init_factory_name] = a.default.factory - else: - if a.kw_only: - kw_only_args.append(arg_name) - else: - args.append(arg_name) - - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) - - if a.init is True and a.converter is None and a.type is not None: - annotations[arg_name] = a.type - - if attrs_to_validate: # we can skip this if there are no validators. - names_for_globals["_config"] = _config - lines.append("if _config._run_validators is True:") - for a in attrs_to_validate: - val_name = "__attr_validator_" + a.name - attr_name = "__attr_" + a.name - lines.append( - " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) - ) - names_for_globals[val_name] = a.validator - names_for_globals[attr_name] = a - - if post_init: - lines.append("self.__attrs_post_init__()") - - # because this is set only after __attrs_post_init is called, a crash - # will result if post-init tries to access the hash code. This seemed - # preferable to setting this beforehand, in which case alteration to - # field values during post-init combined with post-init accessing the - # hash code would result in silent bugs. - if cache_hash: - if frozen: - if slots: - # if frozen and slots, then _setattr defined above - init_hash_cache = "_setattr('%s', %s)" - else: - # if frozen and not slots, then _inst_dict defined above - init_hash_cache = "_inst_dict['%s'] = %s" - else: - init_hash_cache = "self.%s = %s" - lines.append(init_hash_cache % (_hash_cache_field, "None")) - - # For exceptions we rely on BaseException.__init__ for proper - # initialization. - if is_exc: - vals = ",".join("self." + a.name for a in attrs if a.init) - - lines.append("BaseException.__init__(self, %s)" % (vals,)) - - args = ", ".join(args) - if kw_only_args: - if PY2: - lines = _unpack_kw_only_lines_py2(kw_only_args) + lines - - args += "%s**_kw_only" % (", " if args else "",) # leading comma - else: - args += "%s*, %s" % ( - ", " if args else "", # leading comma - ", ".join(kw_only_args), # kw_only args - ) - return ( - """\ -def __init__(self, {args}): - {lines} -""".format( - args=args, lines="\n ".join(lines) if lines else "pass" - ), - names_for_globals, - annotations, - ) - - -class Attribute(object): - """ - *Read-only* representation of an attribute. - - Instances of this class are frequently used for introspection purposes - like: - - - `fields` returns a tuple of them. - - Validators get them passed as the first argument. - - The *field transformer* hook receives a list of them. - - :attribute name: The name of the attribute. - :attribute inherited: Whether or not that attribute has been inherited from - a base class. - - Plus *all* arguments of `attr.ib` (except for ``factory`` - which is only syntactic sugar for ``default=Factory(...)``. - - .. versionadded:: 20.1.0 *inherited* - .. versionadded:: 20.1.0 *on_setattr* - .. versionchanged:: 20.2.0 *inherited* is not taken into account for - equality checks and hashing anymore. - - For the full version history of the fields, see `attr.ib`. - """ - - __slots__ = ( - "name", - "default", - "validator", - "repr", - "eq", - "order", - "hash", - "init", - "metadata", - "type", - "converter", - "kw_only", - "inherited", - "on_setattr", - ) - - def __init__( - self, - name, - default, - validator, - repr, - cmp, # XXX: unused, remove along with other cmp code. - hash, - init, - inherited, - metadata=None, - type=None, - converter=None, - kw_only=False, - eq=None, - order=None, - on_setattr=None, - ): - eq, order = _determine_eq_order(cmp, eq, order, True) - - # Cache this descriptor here to speed things up later. - bound_setattr = _obj_setattr.__get__(self, Attribute) - - # Despite the big red warning, people *do* instantiate `Attribute` - # themselves. - bound_setattr("name", name) - bound_setattr("default", default) - bound_setattr("validator", validator) - bound_setattr("repr", repr) - bound_setattr("eq", eq) - bound_setattr("order", order) - bound_setattr("hash", hash) - bound_setattr("init", init) - bound_setattr("converter", converter) - bound_setattr( - "metadata", - ( - metadata_proxy(metadata) - if metadata - else _empty_metadata_singleton - ), - ) - bound_setattr("type", type) - bound_setattr("kw_only", kw_only) - bound_setattr("inherited", inherited) - bound_setattr("on_setattr", on_setattr) - - def __setattr__(self, name, value): - raise FrozenInstanceError() - - @classmethod - def from_counting_attr(cls, name, ca, type=None): - # type holds the annotated value. deal with conflicts: - if type is None: - type = ca.type - elif ca.type is not None: - raise ValueError( - "Type annotation and type argument cannot both be present" - ) - inst_dict = { - k: getattr(ca, k) - for k in Attribute.__slots__ - if k - not in ( - "name", - "validator", - "default", - "type", - "inherited", - ) # exclude methods and deprecated alias - } - return cls( - name=name, - validator=ca._validator, - default=ca._default, - type=type, - cmp=None, - inherited=False, - **inst_dict - ) - - @property - def cmp(self): - """ - Simulate the presence of a cmp attribute and warn. - """ - warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) - - return self.eq and self.order - - # Don't use attr.evolve since fields(Attribute) doesn't work - def evolve(self, **changes): - """ - Copy *self* and apply *changes*. - - This works similarly to `attr.evolve` but that function does not work - with ``Attribute``. - - It is mainly meant to be used for `transform-fields`. - - .. versionadded:: 20.3.0 - """ - new = copy.copy(self) - - new._setattrs(changes.items()) - - return new - - # Don't use _add_pickle since fields(Attribute) doesn't work - def __getstate__(self): - """ - Play nice with pickle. - """ - return tuple( - getattr(self, name) if name != "metadata" else dict(self.metadata) - for name in self.__slots__ - ) - - def __setstate__(self, state): - """ - Play nice with pickle. - """ - self._setattrs(zip(self.__slots__, state)) - - def _setattrs(self, name_values_pairs): - bound_setattr = _obj_setattr.__get__(self, Attribute) - for name, value in name_values_pairs: - if name != "metadata": - bound_setattr(name, value) - else: - bound_setattr( - name, - metadata_proxy(value) - if value - else _empty_metadata_singleton, - ) - - -_a = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=(name != "metadata"), - init=True, - inherited=False, - ) - for name in Attribute.__slots__ -] - -Attribute = _add_hash( - _add_eq( - _add_repr(Attribute, attrs=_a), - attrs=[a for a in _a if a.name != "inherited"], - ), - attrs=[a for a in _a if a.hash and a.name != "inherited"], -) - - -class _CountingAttr(object): - """ - Intermediate representation of attributes that uses a counter to preserve - the order in which the attributes have been defined. - - *Internal* data structure of the attrs library. Running into is most - likely the result of a bug like a forgotten `@attr.s` decorator. - """ - - __slots__ = ( - "counter", - "_default", - "repr", - "eq", - "order", - "hash", - "init", - "metadata", - "_validator", - "converter", - "type", - "kw_only", - "on_setattr", - ) - __attrs_attrs__ = tuple( - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=True, - init=True, - kw_only=False, - eq=True, - order=False, - inherited=False, - on_setattr=None, - ) - for name in ( - "counter", - "_default", - "repr", - "eq", - "order", - "hash", - "init", - "on_setattr", - ) - ) + ( - Attribute( - name="metadata", - default=None, - validator=None, - repr=True, - cmp=None, - hash=False, - init=True, - kw_only=False, - eq=True, - order=False, - inherited=False, - on_setattr=None, - ), - ) - cls_counter = 0 - - def __init__( - self, - default, - validator, - repr, - cmp, # XXX: unused, remove along with cmp - hash, - init, - converter, - metadata, - type, - kw_only, - eq, - order, - on_setattr, - ): - _CountingAttr.cls_counter += 1 - self.counter = _CountingAttr.cls_counter - self._default = default - self._validator = validator - self.converter = converter - self.repr = repr - self.eq = eq - self.order = order - self.hash = hash - self.init = init - self.metadata = metadata - self.type = type - self.kw_only = kw_only - self.on_setattr = on_setattr - - def validator(self, meth): - """ - Decorator that adds *meth* to the list of validators. - - Returns *meth* unchanged. - - .. versionadded:: 17.1.0 - """ - if self._validator is None: - self._validator = meth - else: - self._validator = and_(self._validator, meth) - return meth - - def default(self, meth): - """ - Decorator that allows to set the default for an attribute. - - Returns *meth* unchanged. - - :raises DefaultAlreadySetError: If default has been set before. - - .. versionadded:: 17.1.0 - """ - if self._default is not NOTHING: - raise DefaultAlreadySetError() - - self._default = Factory(meth, takes_self=True) - - return meth - - -_CountingAttr = _add_eq(_add_repr(_CountingAttr)) - - -@attrs(slots=True, init=False, hash=True) -class Factory(object): - """ - Stores a factory callable. - - If passed as the default value to `attr.ib`, the factory is used to - generate a new value. - - :param callable factory: A callable that takes either none or exactly one - mandatory positional argument depending on *takes_self*. - :param bool takes_self: Pass the partially initialized instance that is - being initialized as a positional argument. - - .. versionadded:: 17.1.0 *takes_self* - """ - - factory = attrib() - takes_self = attrib() - - def __init__(self, factory, takes_self=False): - """ - `Factory` is part of the default machinery so if we want a default - value here, we have to implement it ourselves. - """ - self.factory = factory - self.takes_self = takes_self - - -def make_class(name, attrs, bases=(object,), **attributes_arguments): - """ - A quick way to create a new class called *name* with *attrs*. - - :param str name: The name for the new class. - - :param attrs: A list of names or a dictionary of mappings of names to - attributes. - - If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, - `collections.OrderedDict` otherwise), the order is deduced from - the order of the names or attributes inside *attrs*. Otherwise the - order of the definition of the attributes is used. - :type attrs: `list` or `dict` - - :param tuple bases: Classes that the new class will subclass. - - :param attributes_arguments: Passed unmodified to `attr.s`. - - :return: A new class with *attrs*. - :rtype: type - - .. versionadded:: 17.1.0 *bases* - .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. - """ - if isinstance(attrs, dict): - cls_dict = attrs - elif isinstance(attrs, (list, tuple)): - cls_dict = dict((a, attrib()) for a in attrs) - else: - raise TypeError("attrs argument must be a dict or a list.") - - post_init = cls_dict.pop("__attrs_post_init__", None) - type_ = type( - name, - bases, - {} if post_init is None else {"__attrs_post_init__": post_init}, - ) - # For pickling to work, the __module__ variable needs to be set to the - # frame where the class is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - type_.__module__ = sys._getframe(1).f_globals.get( - "__name__", "__main__" - ) - except (AttributeError, ValueError): - pass - - # We do it here for proper warnings with meaningful stacklevel. - cmp = attributes_arguments.pop("cmp", None) - ( - attributes_arguments["eq"], - attributes_arguments["order"], - ) = _determine_eq_order( - cmp, - attributes_arguments.get("eq"), - attributes_arguments.get("order"), - True, - ) - - return _attrs(these=cls_dict, **attributes_arguments)(type_) - - -# These are required by within this module so we define them here and merely -# import into .validators / .converters. - - -@attrs(slots=True, hash=True) -class _AndValidator(object): - """ - Compose many validators to a single one. - """ - - _validators = attrib() - - def __call__(self, inst, attr, value): - for v in self._validators: - v(inst, attr, value) - - -def and_(*validators): - """ - A validator that composes multiple validators into one. - - When called on a value, it runs all wrapped validators. - - :param callables validators: Arbitrary number of validators. - - .. versionadded:: 17.1.0 - """ - vals = [] - for validator in validators: - vals.extend( - validator._validators - if isinstance(validator, _AndValidator) - else [validator] - ) - - return _AndValidator(tuple(vals)) - - -def pipe(*converters): - """ - A converter that composes multiple converters into one. - - When called on a value, it runs all wrapped converters, returning the - *last* value. - - :param callables converters: Arbitrary number of converters. - - .. versionadded:: 20.1.0 - """ - - def pipe_converter(val): - for converter in converters: - val = converter(val) - - return val - - return pipe_converter diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/_next_gen.py b/vendor/poetry-core/poetry/core/_vendor/attr/_next_gen.py deleted file mode 100644 index 2b5565c5..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/_next_gen.py +++ /dev/null @@ -1,160 +0,0 @@ -""" -This is a Python 3.6 and later-only, keyword-only, and **provisional** API that -calls `attr.s` with different default values. - -Provisional APIs that shall become "import attrs" one glorious day. -""" - -from functools import partial - -from attr.exceptions import UnannotatedAttributeError - -from . import setters -from ._make import NOTHING, _frozen_setattrs, attrib, attrs - - -def define( - maybe_cls=None, - *, - these=None, - repr=None, - hash=None, - init=None, - slots=True, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=None, - kw_only=False, - cache_hash=False, - auto_exc=True, - eq=None, - order=False, - auto_detect=True, - getstate_setstate=None, - on_setattr=None, - field_transformer=None, -): - r""" - The only behavioral differences are the handling of the *auto_attribs* - option: - - :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves - exactly like `attr.s`. If left `None`, `attr.s` will try to guess: - - 1. If all attributes are annotated and no `attr.ib` is found, it assumes - *auto_attribs=True*. - 2. Otherwise it assumes *auto_attribs=False* and tries to collect - `attr.ib`\ s. - - and that mutable classes (``frozen=False``) validate on ``__setattr__``. - - .. versionadded:: 20.1.0 - """ - - def do_it(cls, auto_attribs): - return attrs( - maybe_cls=cls, - these=these, - repr=repr, - hash=hash, - init=init, - slots=slots, - frozen=frozen, - weakref_slot=weakref_slot, - str=str, - auto_attribs=auto_attribs, - kw_only=kw_only, - cache_hash=cache_hash, - auto_exc=auto_exc, - eq=eq, - order=order, - auto_detect=auto_detect, - collect_by_mro=True, - getstate_setstate=getstate_setstate, - on_setattr=on_setattr, - field_transformer=field_transformer, - ) - - def wrap(cls): - """ - Making this a wrapper ensures this code runs during class creation. - - We also ensure that frozen-ness of classes is inherited. - """ - nonlocal frozen, on_setattr - - had_on_setattr = on_setattr not in (None, setters.NO_OP) - - # By default, mutable classes validate on setattr. - if frozen is False and on_setattr is None: - on_setattr = setters.validate - - # However, if we subclass a frozen class, we inherit the immutability - # and disable on_setattr. - for base_cls in cls.__bases__: - if base_cls.__setattr__ is _frozen_setattrs: - if had_on_setattr: - raise ValueError( - "Frozen classes can't use on_setattr " - "(frozen-ness was inherited)." - ) - - on_setattr = setters.NO_OP - break - - if auto_attribs is not None: - return do_it(cls, auto_attribs) - - try: - return do_it(cls, True) - except UnannotatedAttributeError: - return do_it(cls, False) - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but ``None`` if used as `@attrs()`. - if maybe_cls is None: - return wrap - else: - return wrap(maybe_cls) - - -mutable = define -frozen = partial(define, frozen=True, on_setattr=None) - - -def field( - *, - default=NOTHING, - validator=None, - repr=True, - hash=None, - init=True, - metadata=None, - converter=None, - factory=None, - kw_only=False, - eq=None, - order=None, - on_setattr=None, -): - """ - Identical to `attr.ib`, except keyword-only and with some arguments - removed. - - .. versionadded:: 20.1.0 - """ - return attrib( - default=default, - validator=validator, - repr=repr, - hash=hash, - init=init, - metadata=metadata, - converter=converter, - factory=factory, - kw_only=kw_only, - eq=eq, - order=order, - on_setattr=on_setattr, - ) diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/_version_info.py b/vendor/poetry-core/poetry/core/_vendor/attr/_version_info.py deleted file mode 100644 index 014e78a1..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/_version_info.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import absolute_import, division, print_function - -from functools import total_ordering - -from ._funcs import astuple -from ._make import attrib, attrs - - -@total_ordering -@attrs(eq=False, order=False, slots=True, frozen=True) -class VersionInfo(object): - """ - A version object that can be compared to tuple of length 1--4: - - >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) - True - >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) - True - >>> vi = attr.VersionInfo(19, 2, 0, "final") - >>> vi < (19, 1, 1) - False - >>> vi < (19,) - False - >>> vi == (19, 2,) - True - >>> vi == (19, 2, 1) - False - - .. versionadded:: 19.2 - """ - - year = attrib(type=int) - minor = attrib(type=int) - micro = attrib(type=int) - releaselevel = attrib(type=str) - - @classmethod - def _from_version_string(cls, s): - """ - Parse *s* and return a _VersionInfo. - """ - v = s.split(".") - if len(v) == 3: - v.append("final") - - return cls( - year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] - ) - - def _ensure_tuple(self, other): - """ - Ensure *other* is a tuple of a valid length. - - Returns a possibly transformed *other* and ourselves as a tuple of - the same length as *other*. - """ - - if self.__class__ is other.__class__: - other = astuple(other) - - if not isinstance(other, tuple): - raise NotImplementedError - - if not (1 <= len(other) <= 4): - raise NotImplementedError - - return astuple(self)[: len(other)], other - - def __eq__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - return us == them - - def __lt__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't - # have to do anything special with releaselevel for now. - return us < them diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/converters.py b/vendor/poetry-core/poetry/core/_vendor/attr/converters.py deleted file mode 100644 index 715ce178..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/converters.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -Commonly useful converters. -""" - -from __future__ import absolute_import, division, print_function - -from ._make import NOTHING, Factory, pipe - - -__all__ = [ - "pipe", - "optional", - "default_if_none", -] - - -def optional(converter): - """ - A converter that allows an attribute to be optional. An optional attribute - is one which can be set to ``None``. - - :param callable converter: the converter that is used for non-``None`` - values. - - .. versionadded:: 17.1.0 - """ - - def optional_converter(val): - if val is None: - return None - return converter(val) - - return optional_converter - - -def default_if_none(default=NOTHING, factory=None): - """ - A converter that allows to replace ``None`` values by *default* or the - result of *factory*. - - :param default: Value to be used if ``None`` is passed. Passing an instance - of `attr.Factory` is supported, however the ``takes_self`` option - is *not*. - :param callable factory: A callable that takes not parameters whose result - is used if ``None`` is passed. - - :raises TypeError: If **neither** *default* or *factory* is passed. - :raises TypeError: If **both** *default* and *factory* are passed. - :raises ValueError: If an instance of `attr.Factory` is passed with - ``takes_self=True``. - - .. versionadded:: 18.2.0 - """ - if default is NOTHING and factory is None: - raise TypeError("Must pass either `default` or `factory`.") - - if default is not NOTHING and factory is not None: - raise TypeError( - "Must pass either `default` or `factory` but not both." - ) - - if factory is not None: - default = Factory(factory) - - if isinstance(default, Factory): - if default.takes_self: - raise ValueError( - "`takes_self` is not supported by default_if_none." - ) - - def default_if_none_converter(val): - if val is not None: - return val - - return default.factory() - - else: - - def default_if_none_converter(val): - if val is not None: - return val - - return default - - return default_if_none_converter diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/exceptions.py b/vendor/poetry-core/poetry/core/_vendor/attr/exceptions.py deleted file mode 100644 index fcd89106..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/exceptions.py +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import absolute_import, division, print_function - - -class FrozenError(AttributeError): - """ - A frozen/immutable instance or attribute haave been attempted to be - modified. - - It mirrors the behavior of ``namedtuples`` by using the same error message - and subclassing `AttributeError`. - - .. versionadded:: 20.1.0 - """ - - msg = "can't set attribute" - args = [msg] - - -class FrozenInstanceError(FrozenError): - """ - A frozen instance has been attempted to be modified. - - .. versionadded:: 16.1.0 - """ - - -class FrozenAttributeError(FrozenError): - """ - A frozen attribute has been attempted to be modified. - - .. versionadded:: 20.1.0 - """ - - -class AttrsAttributeNotFoundError(ValueError): - """ - An ``attrs`` function couldn't find an attribute that the user asked for. - - .. versionadded:: 16.2.0 - """ - - -class NotAnAttrsClassError(ValueError): - """ - A non-``attrs`` class has been passed into an ``attrs`` function. - - .. versionadded:: 16.2.0 - """ - - -class DefaultAlreadySetError(RuntimeError): - """ - A default has been set using ``attr.ib()`` and is attempted to be reset - using the decorator. - - .. versionadded:: 17.1.0 - """ - - -class UnannotatedAttributeError(RuntimeError): - """ - A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type - annotation. - - .. versionadded:: 17.3.0 - """ - - -class PythonTooOldError(RuntimeError): - """ - It was attempted to use an ``attrs`` feature that requires a newer Python - version. - - .. versionadded:: 18.2.0 - """ - - -class NotCallableError(TypeError): - """ - A ``attr.ib()`` requiring a callable has been set with a value - that is not callable. - - .. versionadded:: 19.2.0 - """ - - def __init__(self, msg, value): - super(TypeError, self).__init__(msg, value) - self.msg = msg - self.value = value - - def __str__(self): - return str(self.msg) diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/filters.py b/vendor/poetry-core/poetry/core/_vendor/attr/filters.py deleted file mode 100644 index dc47e8fa..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/filters.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Commonly useful filters for `attr.asdict`. -""" - -from __future__ import absolute_import, division, print_function - -from ._compat import isclass -from ._make import Attribute - - -def _split_what(what): - """ - Returns a tuple of `frozenset`s of classes and attributes. - """ - return ( - frozenset(cls for cls in what if isclass(cls)), - frozenset(cls for cls in what if isinstance(cls, Attribute)), - ) - - -def include(*what): - """ - Whitelist *what*. - - :param what: What to whitelist. - :type what: `list` of `type` or `attr.Attribute`\\ s - - :rtype: `callable` - """ - cls, attrs = _split_what(what) - - def include_(attribute, value): - return value.__class__ in cls or attribute in attrs - - return include_ - - -def exclude(*what): - """ - Blacklist *what*. - - :param what: What to blacklist. - :type what: `list` of classes or `attr.Attribute`\\ s. - - :rtype: `callable` - """ - cls, attrs = _split_what(what) - - def exclude_(attribute, value): - return value.__class__ not in cls and attribute not in attrs - - return exclude_ diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/setters.py b/vendor/poetry-core/poetry/core/_vendor/attr/setters.py deleted file mode 100644 index 240014b3..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/setters.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Commonly used hooks for on_setattr. -""" - -from __future__ import absolute_import, division, print_function - -from . import _config -from .exceptions import FrozenAttributeError - - -def pipe(*setters): - """ - Run all *setters* and return the return value of the last one. - - .. versionadded:: 20.1.0 - """ - - def wrapped_pipe(instance, attrib, new_value): - rv = new_value - - for setter in setters: - rv = setter(instance, attrib, rv) - - return rv - - return wrapped_pipe - - -def frozen(_, __, ___): - """ - Prevent an attribute to be modified. - - .. versionadded:: 20.1.0 - """ - raise FrozenAttributeError() - - -def validate(instance, attrib, new_value): - """ - Run *attrib*'s validator on *new_value* if it has one. - - .. versionadded:: 20.1.0 - """ - if _config._run_validators is False: - return new_value - - v = attrib.validator - if not v: - return new_value - - v(instance, attrib, new_value) - - return new_value - - -def convert(instance, attrib, new_value): - """ - Run *attrib*'s converter -- if it has one -- on *new_value* and return the - result. - - .. versionadded:: 20.1.0 - """ - c = attrib.converter - if c: - return c(new_value) - - return new_value - - -NO_OP = object() -""" -Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. - -Does not work in `pipe` or within lists. - -.. versionadded:: 20.1.0 -""" diff --git a/vendor/poetry-core/poetry/core/_vendor/attr/validators.py b/vendor/poetry-core/poetry/core/_vendor/attr/validators.py deleted file mode 100644 index b9a73054..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attr/validators.py +++ /dev/null @@ -1,379 +0,0 @@ -""" -Commonly useful validators. -""" - -from __future__ import absolute_import, division, print_function - -import re - -from ._make import _AndValidator, and_, attrib, attrs -from .exceptions import NotCallableError - - -__all__ = [ - "and_", - "deep_iterable", - "deep_mapping", - "in_", - "instance_of", - "is_callable", - "matches_re", - "optional", - "provides", -] - - -@attrs(repr=False, slots=True, hash=True) -class _InstanceOfValidator(object): - type = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not isinstance(value, self.type): - raise TypeError( - "'{name}' must be {type!r} (got {value!r} that is a " - "{actual!r}).".format( - name=attr.name, - type=self.type, - actual=value.__class__, - value=value, - ), - attr, - self.type, - value, - ) - - def __repr__(self): - return "".format( - type=self.type - ) - - -def instance_of(type): - """ - A validator that raises a `TypeError` if the initializer is called - with a wrong type for this particular attribute (checks are performed using - `isinstance` therefore it's also valid to pass a tuple of types). - - :param type: The type to check for. - :type type: type or tuple of types - - :raises TypeError: With a human readable error message, the attribute - (of type `attr.Attribute`), the expected type, and the value it - got. - """ - return _InstanceOfValidator(type) - - -@attrs(repr=False, frozen=True, slots=True) -class _MatchesReValidator(object): - regex = attrib() - flags = attrib() - match_func = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.match_func(value): - raise ValueError( - "'{name}' must match regex {regex!r}" - " ({value!r} doesn't)".format( - name=attr.name, regex=self.regex.pattern, value=value - ), - attr, - self.regex, - value, - ) - - def __repr__(self): - return "".format( - regex=self.regex - ) - - -def matches_re(regex, flags=0, func=None): - r""" - A validator that raises `ValueError` if the initializer is called - with a string that doesn't match *regex*. - - :param str regex: a regex string to match against - :param int flags: flags that will be passed to the underlying re function - (default 0) - :param callable func: which underlying `re` function to call (options - are `re.fullmatch`, `re.search`, `re.match`, default - is ``None`` which means either `re.fullmatch` or an emulation of - it on Python 2). For performance reasons, they won't be used directly - but on a pre-`re.compile`\ ed pattern. - - .. versionadded:: 19.2.0 - """ - fullmatch = getattr(re, "fullmatch", None) - valid_funcs = (fullmatch, None, re.search, re.match) - if func not in valid_funcs: - raise ValueError( - "'func' must be one of %s." - % ( - ", ".join( - sorted( - e and e.__name__ or "None" for e in set(valid_funcs) - ) - ), - ) - ) - - pattern = re.compile(regex, flags) - if func is re.match: - match_func = pattern.match - elif func is re.search: - match_func = pattern.search - else: - if fullmatch: - match_func = pattern.fullmatch - else: - pattern = re.compile(r"(?:{})\Z".format(regex), flags) - match_func = pattern.match - - return _MatchesReValidator(pattern, flags, match_func) - - -@attrs(repr=False, slots=True, hash=True) -class _ProvidesValidator(object): - interface = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.interface.providedBy(value): - raise TypeError( - "'{name}' must provide {interface!r} which {value!r} " - "doesn't.".format( - name=attr.name, interface=self.interface, value=value - ), - attr, - self.interface, - value, - ) - - def __repr__(self): - return "".format( - interface=self.interface - ) - - -def provides(interface): - """ - A validator that raises a `TypeError` if the initializer is called - with an object that does not provide the requested *interface* (checks are - performed using ``interface.providedBy(value)`` (see `zope.interface - `_). - - :param interface: The interface to check for. - :type interface: ``zope.interface.Interface`` - - :raises TypeError: With a human readable error message, the attribute - (of type `attr.Attribute`), the expected interface, and the - value it got. - """ - return _ProvidesValidator(interface) - - -@attrs(repr=False, slots=True, hash=True) -class _OptionalValidator(object): - validator = attrib() - - def __call__(self, inst, attr, value): - if value is None: - return - - self.validator(inst, attr, value) - - def __repr__(self): - return "".format( - what=repr(self.validator) - ) - - -def optional(validator): - """ - A validator that makes an attribute optional. An optional attribute is one - which can be set to ``None`` in addition to satisfying the requirements of - the sub-validator. - - :param validator: A validator (or a list of validators) that is used for - non-``None`` values. - :type validator: callable or `list` of callables. - - .. versionadded:: 15.1.0 - .. versionchanged:: 17.1.0 *validator* can be a list of validators. - """ - if isinstance(validator, list): - return _OptionalValidator(_AndValidator(validator)) - return _OptionalValidator(validator) - - -@attrs(repr=False, slots=True, hash=True) -class _InValidator(object): - options = attrib() - - def __call__(self, inst, attr, value): - try: - in_options = value in self.options - except TypeError: # e.g. `1 in "abc"` - in_options = False - - if not in_options: - raise ValueError( - "'{name}' must be in {options!r} (got {value!r})".format( - name=attr.name, options=self.options, value=value - ) - ) - - def __repr__(self): - return "".format( - options=self.options - ) - - -def in_(options): - """ - A validator that raises a `ValueError` if the initializer is called - with a value that does not belong in the options provided. The check is - performed using ``value in options``. - - :param options: Allowed options. - :type options: list, tuple, `enum.Enum`, ... - - :raises ValueError: With a human readable error message, the attribute (of - type `attr.Attribute`), the expected options, and the value it - got. - - .. versionadded:: 17.1.0 - """ - return _InValidator(options) - - -@attrs(repr=False, slots=False, hash=True) -class _IsCallableValidator(object): - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not callable(value): - message = ( - "'{name}' must be callable " - "(got {value!r} that is a {actual!r})." - ) - raise NotCallableError( - msg=message.format( - name=attr.name, value=value, actual=value.__class__ - ), - value=value, - ) - - def __repr__(self): - return "" - - -def is_callable(): - """ - A validator that raises a `attr.exceptions.NotCallableError` if the - initializer is called with a value for this particular attribute - that is not callable. - - .. versionadded:: 19.1.0 - - :raises `attr.exceptions.NotCallableError`: With a human readable error - message containing the attribute (`attr.Attribute`) name, - and the value it got. - """ - return _IsCallableValidator() - - -@attrs(repr=False, slots=True, hash=True) -class _DeepIterable(object): - member_validator = attrib(validator=is_callable()) - iterable_validator = attrib( - default=None, validator=optional(is_callable()) - ) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.iterable_validator is not None: - self.iterable_validator(inst, attr, value) - - for member in value: - self.member_validator(inst, attr, member) - - def __repr__(self): - iterable_identifier = ( - "" - if self.iterable_validator is None - else " {iterable!r}".format(iterable=self.iterable_validator) - ) - return ( - "" - ).format( - iterable_identifier=iterable_identifier, - member=self.member_validator, - ) - - -def deep_iterable(member_validator, iterable_validator=None): - """ - A validator that performs deep validation of an iterable. - - :param member_validator: Validator to apply to iterable members - :param iterable_validator: Validator to apply to iterable itself - (optional) - - .. versionadded:: 19.1.0 - - :raises TypeError: if any sub-validators fail - """ - return _DeepIterable(member_validator, iterable_validator) - - -@attrs(repr=False, slots=True, hash=True) -class _DeepMapping(object): - key_validator = attrib(validator=is_callable()) - value_validator = attrib(validator=is_callable()) - mapping_validator = attrib(default=None, validator=optional(is_callable())) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.mapping_validator is not None: - self.mapping_validator(inst, attr, value) - - for key in value: - self.key_validator(inst, attr, key) - self.value_validator(inst, attr, value[key]) - - def __repr__(self): - return ( - "" - ).format(key=self.key_validator, value=self.value_validator) - - -def deep_mapping(key_validator, value_validator, mapping_validator=None): - """ - A validator that performs deep validation of a dictionary. - - :param key_validator: Validator to apply to dictionary keys - :param value_validator: Validator to apply to dictionary values - :param mapping_validator: Validator to apply to top-level mapping - attribute (optional) - - .. versionadded:: 19.1.0 - - :raises TypeError: if any sub-validators fail - """ - return _DeepMapping(key_validator, value_validator, mapping_validator) diff --git a/vendor/poetry-core/poetry/core/_vendor/attrs.LICENSE b/vendor/poetry-core/poetry/core/_vendor/attrs.LICENSE deleted file mode 100644 index 7ae3df93..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/attrs.LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Hynek Schlawack - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/__init__.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/__init__.py deleted file mode 100644 index 1791fe7f..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -An implementation of JSON Schema for Python - -The main functionality is provided by the validator classes for each of the -supported JSON Schema versions. - -Most commonly, `validate` is the quickest way to simply validate a given -instance under a schema, and will create a validator for you. -""" - -from jsonschema.exceptions import ( - ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError -) -from jsonschema._format import ( - FormatChecker, - draft3_format_checker, - draft4_format_checker, - draft6_format_checker, - draft7_format_checker, -) -from jsonschema._types import TypeChecker -from jsonschema.validators import ( - Draft3Validator, - Draft4Validator, - Draft6Validator, - Draft7Validator, - RefResolver, - validate, -) - -__version__ = "3.2.0" diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/__main__.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/__main__.py deleted file mode 100644 index 82c29fd3..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/__main__.py +++ /dev/null @@ -1,2 +0,0 @@ -from jsonschema.cli import main -main() diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_format.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/_format.py deleted file mode 100644 index 281a7cfc..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_format.py +++ /dev/null @@ -1,425 +0,0 @@ -import datetime -import re -import socket -import struct - -from jsonschema.compat import str_types -from jsonschema.exceptions import FormatError - - -class FormatChecker(object): - """ - A ``format`` property checker. - - JSON Schema does not mandate that the ``format`` property actually do any - validation. If validation is desired however, instances of this class can - be hooked into validators to enable format validation. - - `FormatChecker` objects always return ``True`` when asked about - formats that they do not know how to validate. - - To check a custom format using a function that takes an instance and - returns a ``bool``, use the `FormatChecker.checks` or - `FormatChecker.cls_checks` decorators. - - Arguments: - - formats (~collections.Iterable): - - The known formats to validate. This argument can be used to - limit which formats will be used during validation. - """ - - checkers = {} - - def __init__(self, formats=None): - if formats is None: - self.checkers = self.checkers.copy() - else: - self.checkers = dict((k, self.checkers[k]) for k in formats) - - def __repr__(self): - return "".format(sorted(self.checkers)) - - def checks(self, format, raises=()): - """ - Register a decorated function as validating a new format. - - Arguments: - - format (str): - - The format that the decorated function will check. - - raises (Exception): - - The exception(s) raised by the decorated function when an - invalid instance is found. - - The exception object will be accessible as the - `jsonschema.exceptions.ValidationError.cause` attribute of the - resulting validation error. - """ - - def _checks(func): - self.checkers[format] = (func, raises) - return func - return _checks - - cls_checks = classmethod(checks) - - def check(self, instance, format): - """ - Check whether the instance conforms to the given format. - - Arguments: - - instance (*any primitive type*, i.e. str, number, bool): - - The instance to check - - format (str): - - The format that instance should conform to - - - Raises: - - FormatError: if the instance does not conform to ``format`` - """ - - if format not in self.checkers: - return - - func, raises = self.checkers[format] - result, cause = None, None - try: - result = func(instance) - except raises as e: - cause = e - if not result: - raise FormatError( - "%r is not a %r" % (instance, format), cause=cause, - ) - - def conforms(self, instance, format): - """ - Check whether the instance conforms to the given format. - - Arguments: - - instance (*any primitive type*, i.e. str, number, bool): - - The instance to check - - format (str): - - The format that instance should conform to - - Returns: - - bool: whether it conformed - """ - - try: - self.check(instance, format) - except FormatError: - return False - else: - return True - - -draft3_format_checker = FormatChecker() -draft4_format_checker = FormatChecker() -draft6_format_checker = FormatChecker() -draft7_format_checker = FormatChecker() - - -_draft_checkers = dict( - draft3=draft3_format_checker, - draft4=draft4_format_checker, - draft6=draft6_format_checker, - draft7=draft7_format_checker, -) - - -def _checks_drafts( - name=None, - draft3=None, - draft4=None, - draft6=None, - draft7=None, - raises=(), -): - draft3 = draft3 or name - draft4 = draft4 or name - draft6 = draft6 or name - draft7 = draft7 or name - - def wrap(func): - if draft3: - func = _draft_checkers["draft3"].checks(draft3, raises)(func) - if draft4: - func = _draft_checkers["draft4"].checks(draft4, raises)(func) - if draft6: - func = _draft_checkers["draft6"].checks(draft6, raises)(func) - if draft7: - func = _draft_checkers["draft7"].checks(draft7, raises)(func) - - # Oy. This is bad global state, but relied upon for now, until - # deprecation. See https://github.com/Julian/jsonschema/issues/519 - # and test_format_checkers_come_with_defaults - FormatChecker.cls_checks(draft7 or draft6 or draft4 or draft3, raises)( - func, - ) - return func - return wrap - - -@_checks_drafts(name="idn-email") -@_checks_drafts(name="email") -def is_email(instance): - if not isinstance(instance, str_types): - return True - return "@" in instance - - -_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$") - - -@_checks_drafts( - draft3="ip-address", draft4="ipv4", draft6="ipv4", draft7="ipv4", -) -def is_ipv4(instance): - if not isinstance(instance, str_types): - return True - if not _ipv4_re.match(instance): - return False - return all(0 <= int(component) <= 255 for component in instance.split(".")) - - -if hasattr(socket, "inet_pton"): - # FIXME: Really this only should raise struct.error, but see the sadness - # that is https://twistedmatrix.com/trac/ticket/9409 - @_checks_drafts( - name="ipv6", raises=(socket.error, struct.error, ValueError), - ) - def is_ipv6(instance): - if not isinstance(instance, str_types): - return True - return socket.inet_pton(socket.AF_INET6, instance) - - -_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$") - - -@_checks_drafts( - draft3="host-name", - draft4="hostname", - draft6="hostname", - draft7="hostname", -) -def is_host_name(instance): - if not isinstance(instance, str_types): - return True - if not _host_name_re.match(instance): - return False - components = instance.split(".") - for component in components: - if len(component) > 63: - return False - return True - - -try: - # The built-in `idna` codec only implements RFC 3890, so we go elsewhere. - import idna -except ImportError: - pass -else: - @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError) - def is_idn_host_name(instance): - if not isinstance(instance, str_types): - return True - idna.encode(instance) - return True - - -try: - import rfc3987 -except ImportError: - try: - from rfc3986_validator import validate_rfc3986 - except ImportError: - pass - else: - @_checks_drafts(name="uri") - def is_uri(instance): - if not isinstance(instance, str_types): - return True - return validate_rfc3986(instance, rule="URI") - - @_checks_drafts( - draft6="uri-reference", - draft7="uri-reference", - raises=ValueError, - ) - def is_uri_reference(instance): - if not isinstance(instance, str_types): - return True - return validate_rfc3986(instance, rule="URI_reference") - -else: - @_checks_drafts(draft7="iri", raises=ValueError) - def is_iri(instance): - if not isinstance(instance, str_types): - return True - return rfc3987.parse(instance, rule="IRI") - - @_checks_drafts(draft7="iri-reference", raises=ValueError) - def is_iri_reference(instance): - if not isinstance(instance, str_types): - return True - return rfc3987.parse(instance, rule="IRI_reference") - - @_checks_drafts(name="uri", raises=ValueError) - def is_uri(instance): - if not isinstance(instance, str_types): - return True - return rfc3987.parse(instance, rule="URI") - - @_checks_drafts( - draft6="uri-reference", - draft7="uri-reference", - raises=ValueError, - ) - def is_uri_reference(instance): - if not isinstance(instance, str_types): - return True - return rfc3987.parse(instance, rule="URI_reference") - - -try: - from strict_rfc3339 import validate_rfc3339 -except ImportError: - try: - from rfc3339_validator import validate_rfc3339 - except ImportError: - validate_rfc3339 = None - -if validate_rfc3339: - @_checks_drafts(name="date-time") - def is_datetime(instance): - if not isinstance(instance, str_types): - return True - return validate_rfc3339(instance) - - @_checks_drafts(draft7="time") - def is_time(instance): - if not isinstance(instance, str_types): - return True - return is_datetime("1970-01-01T" + instance) - - -@_checks_drafts(name="regex", raises=re.error) -def is_regex(instance): - if not isinstance(instance, str_types): - return True - return re.compile(instance) - - -@_checks_drafts(draft3="date", draft7="date", raises=ValueError) -def is_date(instance): - if not isinstance(instance, str_types): - return True - return datetime.datetime.strptime(instance, "%Y-%m-%d") - - -@_checks_drafts(draft3="time", raises=ValueError) -def is_draft3_time(instance): - if not isinstance(instance, str_types): - return True - return datetime.datetime.strptime(instance, "%H:%M:%S") - - -try: - import webcolors -except ImportError: - pass -else: - def is_css_color_code(instance): - return webcolors.normalize_hex(instance) - - @_checks_drafts(draft3="color", raises=(ValueError, TypeError)) - def is_css21_color(instance): - if ( - not isinstance(instance, str_types) or - instance.lower() in webcolors.css21_names_to_hex - ): - return True - return is_css_color_code(instance) - - def is_css3_color(instance): - if instance.lower() in webcolors.css3_names_to_hex: - return True - return is_css_color_code(instance) - - -try: - import jsonpointer -except ImportError: - pass -else: - @_checks_drafts( - draft6="json-pointer", - draft7="json-pointer", - raises=jsonpointer.JsonPointerException, - ) - def is_json_pointer(instance): - if not isinstance(instance, str_types): - return True - return jsonpointer.JsonPointer(instance) - - # TODO: I don't want to maintain this, so it - # needs to go either into jsonpointer (pending - # https://github.com/stefankoegl/python-json-pointer/issues/34) or - # into a new external library. - @_checks_drafts( - draft7="relative-json-pointer", - raises=jsonpointer.JsonPointerException, - ) - def is_relative_json_pointer(instance): - # Definition taken from: - # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 - if not isinstance(instance, str_types): - return True - non_negative_integer, rest = [], "" - for i, character in enumerate(instance): - if character.isdigit(): - non_negative_integer.append(character) - continue - - if not non_negative_integer: - return False - - rest = instance[i:] - break - return (rest == "#") or jsonpointer.JsonPointer(rest) - - -try: - import uritemplate.exceptions -except ImportError: - pass -else: - @_checks_drafts( - draft6="uri-template", - draft7="uri-template", - raises=uritemplate.exceptions.InvalidTemplate, - ) - def is_uri_template( - instance, - template_validator=uritemplate.Validator().force_balanced_braces(), - ): - template = uritemplate.URITemplate(instance) - return template_validator.validate(template) diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_legacy_validators.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/_legacy_validators.py deleted file mode 100644 index 264ff7d7..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_legacy_validators.py +++ /dev/null @@ -1,141 +0,0 @@ -from jsonschema import _utils -from jsonschema.compat import iteritems -from jsonschema.exceptions import ValidationError - - -def dependencies_draft3(validator, dependencies, instance, schema): - if not validator.is_type(instance, "object"): - return - - for property, dependency in iteritems(dependencies): - if property not in instance: - continue - - if validator.is_type(dependency, "object"): - for error in validator.descend( - instance, dependency, schema_path=property, - ): - yield error - elif validator.is_type(dependency, "string"): - if dependency not in instance: - yield ValidationError( - "%r is a dependency of %r" % (dependency, property) - ) - else: - for each in dependency: - if each not in instance: - message = "%r is a dependency of %r" - yield ValidationError(message % (each, property)) - - -def disallow_draft3(validator, disallow, instance, schema): - for disallowed in _utils.ensure_list(disallow): - if validator.is_valid(instance, {"type": [disallowed]}): - yield ValidationError( - "%r is disallowed for %r" % (disallowed, instance) - ) - - -def extends_draft3(validator, extends, instance, schema): - if validator.is_type(extends, "object"): - for error in validator.descend(instance, extends): - yield error - return - for index, subschema in enumerate(extends): - for error in validator.descend(instance, subschema, schema_path=index): - yield error - - -def items_draft3_draft4(validator, items, instance, schema): - if not validator.is_type(instance, "array"): - return - - if validator.is_type(items, "object"): - for index, item in enumerate(instance): - for error in validator.descend(item, items, path=index): - yield error - else: - for (index, item), subschema in zip(enumerate(instance), items): - for error in validator.descend( - item, subschema, path=index, schema_path=index, - ): - yield error - - -def minimum_draft3_draft4(validator, minimum, instance, schema): - if not validator.is_type(instance, "number"): - return - - if schema.get("exclusiveMinimum", False): - failed = instance <= minimum - cmp = "less than or equal to" - else: - failed = instance < minimum - cmp = "less than" - - if failed: - yield ValidationError( - "%r is %s the minimum of %r" % (instance, cmp, minimum) - ) - - -def maximum_draft3_draft4(validator, maximum, instance, schema): - if not validator.is_type(instance, "number"): - return - - if schema.get("exclusiveMaximum", False): - failed = instance >= maximum - cmp = "greater than or equal to" - else: - failed = instance > maximum - cmp = "greater than" - - if failed: - yield ValidationError( - "%r is %s the maximum of %r" % (instance, cmp, maximum) - ) - - -def properties_draft3(validator, properties, instance, schema): - if not validator.is_type(instance, "object"): - return - - for property, subschema in iteritems(properties): - if property in instance: - for error in validator.descend( - instance[property], - subschema, - path=property, - schema_path=property, - ): - yield error - elif subschema.get("required", False): - error = ValidationError("%r is a required property" % property) - error._set( - validator="required", - validator_value=subschema["required"], - instance=instance, - schema=schema, - ) - error.path.appendleft(property) - error.schema_path.extend([property, "required"]) - yield error - - -def type_draft3(validator, types, instance, schema): - types = _utils.ensure_list(types) - - all_errors = [] - for index, type in enumerate(types): - if validator.is_type(type, "object"): - errors = list(validator.descend(instance, type, schema_path=index)) - if not errors: - return - all_errors.extend(errors) - else: - if validator.is_type(instance, type): - return - else: - yield ValidationError( - _utils.types_msg(instance, types), context=all_errors, - ) diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_reflect.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/_reflect.py deleted file mode 100644 index d09e38fb..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_reflect.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- test-case-name: twisted.test.test_reflect -*- -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -""" -Standardized versions of various cool and/or strange things that you can do -with Python's reflection capabilities. -""" - -import sys - -from jsonschema.compat import PY3 - - -class _NoModuleFound(Exception): - """ - No module was found because none exists. - """ - - - -class InvalidName(ValueError): - """ - The given name is not a dot-separated list of Python objects. - """ - - - -class ModuleNotFound(InvalidName): - """ - The module associated with the given name doesn't exist and it can't be - imported. - """ - - - -class ObjectNotFound(InvalidName): - """ - The object associated with the given name doesn't exist and it can't be - imported. - """ - - - -if PY3: - def reraise(exception, traceback): - raise exception.with_traceback(traceback) -else: - exec("""def reraise(exception, traceback): - raise exception.__class__, exception, traceback""") - -reraise.__doc__ = """ -Re-raise an exception, with an optional traceback, in a way that is compatible -with both Python 2 and Python 3. - -Note that on Python 3, re-raised exceptions will be mutated, with their -C{__traceback__} attribute being set. - -@param exception: The exception instance. -@param traceback: The traceback to use, or C{None} indicating a new traceback. -""" - - -def _importAndCheckStack(importName): - """ - Import the given name as a module, then walk the stack to determine whether - the failure was the module not existing, or some code in the module (for - example a dependent import) failing. This can be helpful to determine - whether any actual application code was run. For example, to distiguish - administrative error (entering the wrong module name), from programmer - error (writing buggy code in a module that fails to import). - - @param importName: The name of the module to import. - @type importName: C{str} - @raise Exception: if something bad happens. This can be any type of - exception, since nobody knows what loading some arbitrary code might - do. - @raise _NoModuleFound: if no module was found. - """ - try: - return __import__(importName) - except ImportError: - excType, excValue, excTraceback = sys.exc_info() - while excTraceback: - execName = excTraceback.tb_frame.f_globals["__name__"] - # in Python 2 execName is None when an ImportError is encountered, - # where in Python 3 execName is equal to the importName. - if execName is None or execName == importName: - reraise(excValue, excTraceback) - excTraceback = excTraceback.tb_next - raise _NoModuleFound() - - - -def namedAny(name): - """ - Retrieve a Python object by its fully qualified name from the global Python - module namespace. The first part of the name, that describes a module, - will be discovered and imported. Each subsequent part of the name is - treated as the name of an attribute of the object specified by all of the - name which came before it. For example, the fully-qualified name of this - object is 'twisted.python.reflect.namedAny'. - - @type name: L{str} - @param name: The name of the object to return. - - @raise InvalidName: If the name is an empty string, starts or ends with - a '.', or is otherwise syntactically incorrect. - - @raise ModuleNotFound: If the name is syntactically correct but the - module it specifies cannot be imported because it does not appear to - exist. - - @raise ObjectNotFound: If the name is syntactically correct, includes at - least one '.', but the module it specifies cannot be imported because - it does not appear to exist. - - @raise AttributeError: If an attribute of an object along the way cannot be - accessed, or a module along the way is not found. - - @return: the Python object identified by 'name'. - """ - if not name: - raise InvalidName('Empty module name') - - names = name.split('.') - - # if the name starts or ends with a '.' or contains '..', the __import__ - # will raise an 'Empty module name' error. This will provide a better error - # message. - if '' in names: - raise InvalidName( - "name must be a string giving a '.'-separated list of Python " - "identifiers, not %r" % (name,)) - - topLevelPackage = None - moduleNames = names[:] - while not topLevelPackage: - if moduleNames: - trialname = '.'.join(moduleNames) - try: - topLevelPackage = _importAndCheckStack(trialname) - except _NoModuleFound: - moduleNames.pop() - else: - if len(names) == 1: - raise ModuleNotFound("No module named %r" % (name,)) - else: - raise ObjectNotFound('%r does not name an object' % (name,)) - - obj = topLevelPackage - for n in names[1:]: - obj = getattr(obj, n) - - return obj diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_types.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/_types.py deleted file mode 100644 index a71a4e34..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_types.py +++ /dev/null @@ -1,188 +0,0 @@ -import numbers - -from pyrsistent import pmap -import attr - -from jsonschema.compat import int_types, str_types -from jsonschema.exceptions import UndefinedTypeCheck - - -def is_array(checker, instance): - return isinstance(instance, list) - - -def is_bool(checker, instance): - return isinstance(instance, bool) - - -def is_integer(checker, instance): - # bool inherits from int, so ensure bools aren't reported as ints - if isinstance(instance, bool): - return False - return isinstance(instance, int_types) - - -def is_null(checker, instance): - return instance is None - - -def is_number(checker, instance): - # bool inherits from int, so ensure bools aren't reported as ints - if isinstance(instance, bool): - return False - return isinstance(instance, numbers.Number) - - -def is_object(checker, instance): - return isinstance(instance, dict) - - -def is_string(checker, instance): - return isinstance(instance, str_types) - - -def is_any(checker, instance): - return True - - -@attr.s(frozen=True) -class TypeChecker(object): - """ - A ``type`` property checker. - - A `TypeChecker` performs type checking for an `IValidator`. Type - checks to perform are updated using `TypeChecker.redefine` or - `TypeChecker.redefine_many` and removed via `TypeChecker.remove`. - Each of these return a new `TypeChecker` object. - - Arguments: - - type_checkers (dict): - - The initial mapping of types to their checking functions. - """ - _type_checkers = attr.ib(default=pmap(), converter=pmap) - - def is_type(self, instance, type): - """ - Check if the instance is of the appropriate type. - - Arguments: - - instance (object): - - The instance to check - - type (str): - - The name of the type that is expected. - - Returns: - - bool: Whether it conformed. - - - Raises: - - `jsonschema.exceptions.UndefinedTypeCheck`: - if type is unknown to this object. - """ - try: - fn = self._type_checkers[type] - except KeyError: - raise UndefinedTypeCheck(type) - - return fn(self, instance) - - def redefine(self, type, fn): - """ - Produce a new checker with the given type redefined. - - Arguments: - - type (str): - - The name of the type to check. - - fn (collections.Callable): - - A function taking exactly two parameters - the type - checker calling the function and the instance to check. - The function should return true if instance is of this - type and false otherwise. - - Returns: - - A new `TypeChecker` instance. - """ - return self.redefine_many({type: fn}) - - def redefine_many(self, definitions=()): - """ - Produce a new checker with the given types redefined. - - Arguments: - - definitions (dict): - - A dictionary mapping types to their checking functions. - - Returns: - - A new `TypeChecker` instance. - """ - return attr.evolve( - self, type_checkers=self._type_checkers.update(definitions), - ) - - def remove(self, *types): - """ - Produce a new checker with the given types forgotten. - - Arguments: - - types (~collections.Iterable): - - the names of the types to remove. - - Returns: - - A new `TypeChecker` instance - - Raises: - - `jsonschema.exceptions.UndefinedTypeCheck`: - - if any given type is unknown to this object - """ - - checkers = self._type_checkers - for each in types: - try: - checkers = checkers.remove(each) - except KeyError: - raise UndefinedTypeCheck(each) - return attr.evolve(self, type_checkers=checkers) - - -draft3_type_checker = TypeChecker( - { - u"any": is_any, - u"array": is_array, - u"boolean": is_bool, - u"integer": is_integer, - u"object": is_object, - u"null": is_null, - u"number": is_number, - u"string": is_string, - }, -) -draft4_type_checker = draft3_type_checker.remove(u"any") -draft6_type_checker = draft4_type_checker.redefine( - u"integer", - lambda checker, instance: ( - is_integer(checker, instance) or - isinstance(instance, float) and instance.is_integer() - ), -) -draft7_type_checker = draft6_type_checker diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_utils.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/_utils.py deleted file mode 100644 index 117eec24..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_utils.py +++ /dev/null @@ -1,215 +0,0 @@ -import itertools -import json -import os -import re - -from jsonschema.compat import MutableMapping, str_types, urlsplit - - -class URIDict(MutableMapping): - """ - Dictionary which uses normalized URIs as keys. - """ - - def normalize(self, uri): - return urlsplit(uri).geturl() - - def __init__(self, *args, **kwargs): - self.store = dict() - self.store.update(*args, **kwargs) - - def __getitem__(self, uri): - return self.store[self.normalize(uri)] - - def __setitem__(self, uri, value): - self.store[self.normalize(uri)] = value - - def __delitem__(self, uri): - del self.store[self.normalize(uri)] - - def __iter__(self): - return iter(self.store) - - def __len__(self): - return len(self.store) - - def __repr__(self): - return repr(self.store) - - -class Unset(object): - """ - An as-of-yet unset attribute or unprovided default parameter. - """ - - def __repr__(self): - return "" - - -def load_schema(name): - """ - Load a schema from ./schemas/``name``.json and return it. - """ - with open( - os.path.join(os.path.dirname(__file__), "schemas", "{0}.json".format(name)) - ) as f: - data = f.read() - - return json.loads(data) - - -def indent(string, times=1): - """ - A dumb version of `textwrap.indent` from Python 3.3. - """ - - return "\n".join(" " * (4 * times) + line for line in string.splitlines()) - - -def format_as_index(indices): - """ - Construct a single string containing indexing operations for the indices. - - For example, [1, 2, "foo"] -> [1][2]["foo"] - - Arguments: - - indices (sequence): - - The indices to format. - """ - - if not indices: - return "" - return "[%s]" % "][".join(repr(index) for index in indices) - - -def find_additional_properties(instance, schema): - """ - Return the set of additional properties for the given ``instance``. - - Weeds out properties that should have been validated by ``properties`` and - / or ``patternProperties``. - - Assumes ``instance`` is dict-like already. - """ - - properties = schema.get("properties", {}) - patterns = "|".join(schema.get("patternProperties", {})) - for property in instance: - if property not in properties: - if patterns and re.search(patterns, property): - continue - yield property - - -def extras_msg(extras): - """ - Create an error message for extra items or properties. - """ - - if len(extras) == 1: - verb = "was" - else: - verb = "were" - return ", ".join(repr(extra) for extra in extras), verb - - -def types_msg(instance, types): - """ - Create an error message for a failure to match the given types. - - If the ``instance`` is an object and contains a ``name`` property, it will - be considered to be a description of that object and used as its type. - - Otherwise the message is simply the reprs of the given ``types``. - """ - - reprs = [] - for type in types: - try: - reprs.append(repr(type["name"])) - except Exception: - reprs.append(repr(type)) - return "%r is not of type %s" % (instance, ", ".join(reprs)) - - -def flatten(suitable_for_isinstance): - """ - isinstance() can accept a bunch of really annoying different types: - * a single type - * a tuple of types - * an arbitrary nested tree of tuples - - Return a flattened tuple of the given argument. - """ - - types = set() - - if not isinstance(suitable_for_isinstance, tuple): - suitable_for_isinstance = (suitable_for_isinstance,) - for thing in suitable_for_isinstance: - if isinstance(thing, tuple): - types.update(flatten(thing)) - else: - types.add(thing) - return tuple(types) - - -def ensure_list(thing): - """ - Wrap ``thing`` in a list if it's a single str. - - Otherwise, return it unchanged. - """ - - if isinstance(thing, str_types): - return [thing] - return thing - - -def equal(one, two): - """ - Check if two things are equal, but evade booleans and ints being equal. - """ - return unbool(one) == unbool(two) - - -def unbool(element, true=object(), false=object()): - """ - A hack to make True and 1 and False and 0 unique for ``uniq``. - """ - - if element is True: - return true - elif element is False: - return false - return element - - -def uniq(container): - """ - Check if all of a container's elements are unique. - - Successively tries first to rely that the elements are hashable, then - falls back on them being sortable, and finally falls back on brute - force. - """ - - try: - return len(set(unbool(i) for i in container)) == len(container) - except TypeError: - try: - sort = sorted(unbool(i) for i in container) - sliced = itertools.islice(sort, 1, None) - for i, j in zip(sort, sliced): - if i == j: - return False - except (NotImplementedError, TypeError): - seen = [] - for e in container: - e = unbool(e) - if e in seen: - return False - seen.append(e) - return True diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_validators.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/_validators.py deleted file mode 100644 index 179fec09..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/_validators.py +++ /dev/null @@ -1,373 +0,0 @@ -import re - -from jsonschema._utils import ( - ensure_list, - equal, - extras_msg, - find_additional_properties, - types_msg, - unbool, - uniq, -) -from jsonschema.exceptions import FormatError, ValidationError -from jsonschema.compat import iteritems - - -def patternProperties(validator, patternProperties, instance, schema): - if not validator.is_type(instance, "object"): - return - - for pattern, subschema in iteritems(patternProperties): - for k, v in iteritems(instance): - if re.search(pattern, k): - for error in validator.descend( - v, subschema, path=k, schema_path=pattern, - ): - yield error - - -def propertyNames(validator, propertyNames, instance, schema): - if not validator.is_type(instance, "object"): - return - - for property in instance: - for error in validator.descend( - instance=property, - schema=propertyNames, - ): - yield error - - -def additionalProperties(validator, aP, instance, schema): - if not validator.is_type(instance, "object"): - return - - extras = set(find_additional_properties(instance, schema)) - - if validator.is_type(aP, "object"): - for extra in extras: - for error in validator.descend(instance[extra], aP, path=extra): - yield error - elif not aP and extras: - if "patternProperties" in schema: - patterns = sorted(schema["patternProperties"]) - if len(extras) == 1: - verb = "does" - else: - verb = "do" - error = "%s %s not match any of the regexes: %s" % ( - ", ".join(map(repr, sorted(extras))), - verb, - ", ".join(map(repr, patterns)), - ) - yield ValidationError(error) - else: - error = "Additional properties are not allowed (%s %s unexpected)" - yield ValidationError(error % extras_msg(extras)) - - -def items(validator, items, instance, schema): - if not validator.is_type(instance, "array"): - return - - if validator.is_type(items, "array"): - for (index, item), subschema in zip(enumerate(instance), items): - for error in validator.descend( - item, subschema, path=index, schema_path=index, - ): - yield error - else: - for index, item in enumerate(instance): - for error in validator.descend(item, items, path=index): - yield error - - -def additionalItems(validator, aI, instance, schema): - if ( - not validator.is_type(instance, "array") or - validator.is_type(schema.get("items", {}), "object") - ): - return - - len_items = len(schema.get("items", [])) - if validator.is_type(aI, "object"): - for index, item in enumerate(instance[len_items:], start=len_items): - for error in validator.descend(item, aI, path=index): - yield error - elif not aI and len(instance) > len(schema.get("items", [])): - error = "Additional items are not allowed (%s %s unexpected)" - yield ValidationError( - error % - extras_msg(instance[len(schema.get("items", [])):]) - ) - - -def const(validator, const, instance, schema): - if not equal(instance, const): - yield ValidationError("%r was expected" % (const,)) - - -def contains(validator, contains, instance, schema): - if not validator.is_type(instance, "array"): - return - - if not any(validator.is_valid(element, contains) for element in instance): - yield ValidationError( - "None of %r are valid under the given schema" % (instance,) - ) - - -def exclusiveMinimum(validator, minimum, instance, schema): - if not validator.is_type(instance, "number"): - return - - if instance <= minimum: - yield ValidationError( - "%r is less than or equal to the minimum of %r" % ( - instance, minimum, - ), - ) - - -def exclusiveMaximum(validator, maximum, instance, schema): - if not validator.is_type(instance, "number"): - return - - if instance >= maximum: - yield ValidationError( - "%r is greater than or equal to the maximum of %r" % ( - instance, maximum, - ), - ) - - -def minimum(validator, minimum, instance, schema): - if not validator.is_type(instance, "number"): - return - - if instance < minimum: - yield ValidationError( - "%r is less than the minimum of %r" % (instance, minimum) - ) - - -def maximum(validator, maximum, instance, schema): - if not validator.is_type(instance, "number"): - return - - if instance > maximum: - yield ValidationError( - "%r is greater than the maximum of %r" % (instance, maximum) - ) - - -def multipleOf(validator, dB, instance, schema): - if not validator.is_type(instance, "number"): - return - - if isinstance(dB, float): - quotient = instance / dB - failed = int(quotient) != quotient - else: - failed = instance % dB - - if failed: - yield ValidationError("%r is not a multiple of %r" % (instance, dB)) - - -def minItems(validator, mI, instance, schema): - if validator.is_type(instance, "array") and len(instance) < mI: - yield ValidationError("%r is too short" % (instance,)) - - -def maxItems(validator, mI, instance, schema): - if validator.is_type(instance, "array") and len(instance) > mI: - yield ValidationError("%r is too long" % (instance,)) - - -def uniqueItems(validator, uI, instance, schema): - if ( - uI and - validator.is_type(instance, "array") and - not uniq(instance) - ): - yield ValidationError("%r has non-unique elements" % (instance,)) - - -def pattern(validator, patrn, instance, schema): - if ( - validator.is_type(instance, "string") and - not re.search(patrn, instance) - ): - yield ValidationError("%r does not match %r" % (instance, patrn)) - - -def format(validator, format, instance, schema): - if validator.format_checker is not None: - try: - validator.format_checker.check(instance, format) - except FormatError as error: - yield ValidationError(error.message, cause=error.cause) - - -def minLength(validator, mL, instance, schema): - if validator.is_type(instance, "string") and len(instance) < mL: - yield ValidationError("%r is too short" % (instance,)) - - -def maxLength(validator, mL, instance, schema): - if validator.is_type(instance, "string") and len(instance) > mL: - yield ValidationError("%r is too long" % (instance,)) - - -def dependencies(validator, dependencies, instance, schema): - if not validator.is_type(instance, "object"): - return - - for property, dependency in iteritems(dependencies): - if property not in instance: - continue - - if validator.is_type(dependency, "array"): - for each in dependency: - if each not in instance: - message = "%r is a dependency of %r" - yield ValidationError(message % (each, property)) - else: - for error in validator.descend( - instance, dependency, schema_path=property, - ): - yield error - - -def enum(validator, enums, instance, schema): - if instance == 0 or instance == 1: - unbooled = unbool(instance) - if all(unbooled != unbool(each) for each in enums): - yield ValidationError("%r is not one of %r" % (instance, enums)) - elif instance not in enums: - yield ValidationError("%r is not one of %r" % (instance, enums)) - - -def ref(validator, ref, instance, schema): - resolve = getattr(validator.resolver, "resolve", None) - if resolve is None: - with validator.resolver.resolving(ref) as resolved: - for error in validator.descend(instance, resolved): - yield error - else: - scope, resolved = validator.resolver.resolve(ref) - validator.resolver.push_scope(scope) - - try: - for error in validator.descend(instance, resolved): - yield error - finally: - validator.resolver.pop_scope() - - -def type(validator, types, instance, schema): - types = ensure_list(types) - - if not any(validator.is_type(instance, type) for type in types): - yield ValidationError(types_msg(instance, types)) - - -def properties(validator, properties, instance, schema): - if not validator.is_type(instance, "object"): - return - - for property, subschema in iteritems(properties): - if property in instance: - for error in validator.descend( - instance[property], - subschema, - path=property, - schema_path=property, - ): - yield error - - -def required(validator, required, instance, schema): - if not validator.is_type(instance, "object"): - return - for property in required: - if property not in instance: - yield ValidationError("%r is a required property" % property) - - -def minProperties(validator, mP, instance, schema): - if validator.is_type(instance, "object") and len(instance) < mP: - yield ValidationError( - "%r does not have enough properties" % (instance,) - ) - - -def maxProperties(validator, mP, instance, schema): - if not validator.is_type(instance, "object"): - return - if validator.is_type(instance, "object") and len(instance) > mP: - yield ValidationError("%r has too many properties" % (instance,)) - - -def allOf(validator, allOf, instance, schema): - for index, subschema in enumerate(allOf): - for error in validator.descend(instance, subschema, schema_path=index): - yield error - - -def anyOf(validator, anyOf, instance, schema): - all_errors = [] - for index, subschema in enumerate(anyOf): - errs = list(validator.descend(instance, subschema, schema_path=index)) - if not errs: - break - all_errors.extend(errs) - else: - yield ValidationError( - "%r is not valid under any of the given schemas" % (instance,), - context=all_errors, - ) - - -def oneOf(validator, oneOf, instance, schema): - subschemas = enumerate(oneOf) - all_errors = [] - for index, subschema in subschemas: - errs = list(validator.descend(instance, subschema, schema_path=index)) - if not errs: - first_valid = subschema - break - all_errors.extend(errs) - else: - yield ValidationError( - "%r is not valid under any of the given schemas" % (instance,), - context=all_errors, - ) - - more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)] - if more_valid: - more_valid.append(first_valid) - reprs = ", ".join(repr(schema) for schema in more_valid) - yield ValidationError( - "%r is valid under each of %s" % (instance, reprs) - ) - - -def not_(validator, not_schema, instance, schema): - if validator.is_valid(instance, not_schema): - yield ValidationError( - "%r is not allowed for %r" % (not_schema, instance) - ) - - -def if_(validator, if_schema, instance, schema): - if validator.is_valid(instance, if_schema): - if u"then" in schema: - then = schema[u"then"] - for error in validator.descend(instance, then, schema_path="then"): - yield error - elif u"else" in schema: - else_ = schema[u"else"] - for error in validator.descend(instance, else_, schema_path="else"): - yield error diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/benchmarks/issue232.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/benchmarks/issue232.py deleted file mode 100644 index 65e3aedf..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/benchmarks/issue232.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python -""" -A performance benchmark using the example from issue #232. - -See https://github.com/Julian/jsonschema/pull/232. -""" -from twisted.python.filepath import FilePath -from pyperf import Runner -from pyrsistent import m - -from jsonschema.tests._suite import Version -import jsonschema - - -issue232 = Version( - path=FilePath(__file__).sibling("issue232"), - remotes=m(), - name="issue232", -) - - -if __name__ == "__main__": - issue232.benchmark( - runner=Runner(), - Validator=jsonschema.Draft4Validator, - ) diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/cli.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/cli.py deleted file mode 100644 index ab3335b2..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/cli.py +++ /dev/null @@ -1,90 +0,0 @@ -""" -The ``jsonschema`` command line. -""" -from __future__ import absolute_import -import argparse -import json -import sys - -from jsonschema import __version__ -from jsonschema._reflect import namedAny -from jsonschema.validators import validator_for - - -def _namedAnyWithDefault(name): - if "." not in name: - name = "jsonschema." + name - return namedAny(name) - - -def _json_file(path): - with open(path) as file: - return json.load(file) - - -parser = argparse.ArgumentParser( - description="JSON Schema Validation CLI", -) -parser.add_argument( - "-i", "--instance", - action="append", - dest="instances", - type=_json_file, - help=( - "a path to a JSON instance (i.e. filename.json) " - "to validate (may be specified multiple times)" - ), -) -parser.add_argument( - "-F", "--error-format", - default="{error.instance}: {error.message}\n", - help=( - "the format to use for each error output message, specified in " - "a form suitable for passing to str.format, which will be called " - "with 'error' for each error" - ), -) -parser.add_argument( - "-V", "--validator", - type=_namedAnyWithDefault, - help=( - "the fully qualified object name of a validator to use, or, for " - "validators that are registered with jsonschema, simply the name " - "of the class." - ), -) -parser.add_argument( - "--version", - action="version", - version=__version__, -) -parser.add_argument( - "schema", - help="the JSON Schema to validate with (i.e. schema.json)", - type=_json_file, -) - - -def parse_args(args): - arguments = vars(parser.parse_args(args=args or ["--help"])) - if arguments["validator"] is None: - arguments["validator"] = validator_for(arguments["schema"]) - return arguments - - -def main(args=sys.argv[1:]): - sys.exit(run(arguments=parse_args(args=args))) - - -def run(arguments, stdout=sys.stdout, stderr=sys.stderr): - error_format = arguments["error_format"] - validator = arguments["validator"](schema=arguments["schema"]) - - validator.check_schema(arguments["schema"]) - - errored = False - for instance in arguments["instances"] or (): - for error in validator.iter_errors(instance): - stderr.write(error_format.format(error=error)) - errored = True - return errored diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/compat.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/compat.py deleted file mode 100644 index 47e09804..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/compat.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Python 2/3 compatibility helpers. - -Note: This module is *not* public API. -""" -import contextlib -import operator -import sys - - -try: - from collections.abc import MutableMapping, Sequence # noqa -except ImportError: - from collections import MutableMapping, Sequence # noqa - -PY3 = sys.version_info[0] >= 3 - -if PY3: - zip = zip - from functools import lru_cache - from io import StringIO as NativeIO - from urllib.parse import ( - unquote, urljoin, urlunsplit, SplitResult, urlsplit - ) - from urllib.request import pathname2url, urlopen - str_types = str, - int_types = int, - iteritems = operator.methodcaller("items") -else: - from itertools import izip as zip # noqa - from io import BytesIO as NativeIO - from urlparse import urljoin, urlunsplit, SplitResult, urlsplit - from urllib import pathname2url, unquote # noqa - import urllib2 # noqa - def urlopen(*args, **kwargs): - return contextlib.closing(urllib2.urlopen(*args, **kwargs)) - - str_types = basestring - int_types = int, long - iteritems = operator.methodcaller("iteritems") - - from functools32 import lru_cache - - -def urldefrag(url): - if "#" in url: - s, n, p, q, frag = urlsplit(url) - defrag = urlunsplit((s, n, p, q, "")) - else: - defrag = url - frag = "" - return defrag, frag - - -# flake8: noqa diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/exceptions.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/exceptions.py deleted file mode 100644 index 691dcffe..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/exceptions.py +++ /dev/null @@ -1,374 +0,0 @@ -""" -Validation errors, and some surrounding helpers. -""" -from collections import defaultdict, deque -import itertools -import pprint -import textwrap - -import attr - -from jsonschema import _utils -from jsonschema.compat import PY3, iteritems - - -WEAK_MATCHES = frozenset(["anyOf", "oneOf"]) -STRONG_MATCHES = frozenset() - -_unset = _utils.Unset() - - -class _Error(Exception): - def __init__( - self, - message, - validator=_unset, - path=(), - cause=None, - context=(), - validator_value=_unset, - instance=_unset, - schema=_unset, - schema_path=(), - parent=None, - ): - super(_Error, self).__init__( - message, - validator, - path, - cause, - context, - validator_value, - instance, - schema, - schema_path, - parent, - ) - self.message = message - self.path = self.relative_path = deque(path) - self.schema_path = self.relative_schema_path = deque(schema_path) - self.context = list(context) - self.cause = self.__cause__ = cause - self.validator = validator - self.validator_value = validator_value - self.instance = instance - self.schema = schema - self.parent = parent - - for error in context: - error.parent = self - - def __repr__(self): - return "<%s: %r>" % (self.__class__.__name__, self.message) - - def __unicode__(self): - essential_for_verbose = ( - self.validator, self.validator_value, self.instance, self.schema, - ) - if any(m is _unset for m in essential_for_verbose): - return self.message - - pschema = pprint.pformat(self.schema, width=72) - pinstance = pprint.pformat(self.instance, width=72) - return self.message + textwrap.dedent(""" - - Failed validating %r in %s%s: - %s - - On %s%s: - %s - """.rstrip() - ) % ( - self.validator, - self._word_for_schema_in_error_message, - _utils.format_as_index(list(self.relative_schema_path)[:-1]), - _utils.indent(pschema), - self._word_for_instance_in_error_message, - _utils.format_as_index(self.relative_path), - _utils.indent(pinstance), - ) - - if PY3: - __str__ = __unicode__ - else: - def __str__(self): - return unicode(self).encode("utf-8") - - @classmethod - def create_from(cls, other): - return cls(**other._contents()) - - @property - def absolute_path(self): - parent = self.parent - if parent is None: - return self.relative_path - - path = deque(self.relative_path) - path.extendleft(reversed(parent.absolute_path)) - return path - - @property - def absolute_schema_path(self): - parent = self.parent - if parent is None: - return self.relative_schema_path - - path = deque(self.relative_schema_path) - path.extendleft(reversed(parent.absolute_schema_path)) - return path - - def _set(self, **kwargs): - for k, v in iteritems(kwargs): - if getattr(self, k) is _unset: - setattr(self, k, v) - - def _contents(self): - attrs = ( - "message", "cause", "context", "validator", "validator_value", - "path", "schema_path", "instance", "schema", "parent", - ) - return dict((attr, getattr(self, attr)) for attr in attrs) - - -class ValidationError(_Error): - """ - An instance was invalid under a provided schema. - """ - - _word_for_schema_in_error_message = "schema" - _word_for_instance_in_error_message = "instance" - - -class SchemaError(_Error): - """ - A schema was invalid under its corresponding metaschema. - """ - - _word_for_schema_in_error_message = "metaschema" - _word_for_instance_in_error_message = "schema" - - -@attr.s(hash=True) -class RefResolutionError(Exception): - """ - A ref could not be resolved. - """ - - _cause = attr.ib() - - def __str__(self): - return str(self._cause) - - -class UndefinedTypeCheck(Exception): - """ - A type checker was asked to check a type it did not have registered. - """ - - def __init__(self, type): - self.type = type - - def __unicode__(self): - return "Type %r is unknown to this type checker" % self.type - - if PY3: - __str__ = __unicode__ - else: - def __str__(self): - return unicode(self).encode("utf-8") - - -class UnknownType(Exception): - """ - A validator was asked to validate an instance against an unknown type. - """ - - def __init__(self, type, instance, schema): - self.type = type - self.instance = instance - self.schema = schema - - def __unicode__(self): - pschema = pprint.pformat(self.schema, width=72) - pinstance = pprint.pformat(self.instance, width=72) - return textwrap.dedent(""" - Unknown type %r for validator with schema: - %s - - While checking instance: - %s - """.rstrip() - ) % (self.type, _utils.indent(pschema), _utils.indent(pinstance)) - - if PY3: - __str__ = __unicode__ - else: - def __str__(self): - return unicode(self).encode("utf-8") - - -class FormatError(Exception): - """ - Validating a format failed. - """ - - def __init__(self, message, cause=None): - super(FormatError, self).__init__(message, cause) - self.message = message - self.cause = self.__cause__ = cause - - def __unicode__(self): - return self.message - - if PY3: - __str__ = __unicode__ - else: - def __str__(self): - return self.message.encode("utf-8") - - -class ErrorTree(object): - """ - ErrorTrees make it easier to check which validations failed. - """ - - _instance = _unset - - def __init__(self, errors=()): - self.errors = {} - self._contents = defaultdict(self.__class__) - - for error in errors: - container = self - for element in error.path: - container = container[element] - container.errors[error.validator] = error - - container._instance = error.instance - - def __contains__(self, index): - """ - Check whether ``instance[index]`` has any errors. - """ - - return index in self._contents - - def __getitem__(self, index): - """ - Retrieve the child tree one level down at the given ``index``. - - If the index is not in the instance that this tree corresponds to and - is not known by this tree, whatever error would be raised by - ``instance.__getitem__`` will be propagated (usually this is some - subclass of `exceptions.LookupError`. - """ - - if self._instance is not _unset and index not in self: - self._instance[index] - return self._contents[index] - - def __setitem__(self, index, value): - """ - Add an error to the tree at the given ``index``. - """ - self._contents[index] = value - - def __iter__(self): - """ - Iterate (non-recursively) over the indices in the instance with errors. - """ - - return iter(self._contents) - - def __len__(self): - """ - Return the `total_errors`. - """ - return self.total_errors - - def __repr__(self): - return "<%s (%s total errors)>" % (self.__class__.__name__, len(self)) - - @property - def total_errors(self): - """ - The total number of errors in the entire tree, including children. - """ - - child_errors = sum(len(tree) for _, tree in iteritems(self._contents)) - return len(self.errors) + child_errors - - -def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES): - """ - Create a key function that can be used to sort errors by relevance. - - Arguments: - weak (set): - a collection of validator names to consider to be "weak". - If there are two errors at the same level of the instance - and one is in the set of weak validator names, the other - error will take priority. By default, :validator:`anyOf` and - :validator:`oneOf` are considered weak validators and will - be superseded by other same-level validation errors. - - strong (set): - a collection of validator names to consider to be "strong" - """ - def relevance(error): - validator = error.validator - return -len(error.path), validator not in weak, validator in strong - return relevance - - -relevance = by_relevance() - - -def best_match(errors, key=relevance): - """ - Try to find an error that appears to be the best match among given errors. - - In general, errors that are higher up in the instance (i.e. for which - `ValidationError.path` is shorter) are considered better matches, - since they indicate "more" is wrong with the instance. - - If the resulting match is either :validator:`oneOf` or :validator:`anyOf`, - the *opposite* assumption is made -- i.e. the deepest error is picked, - since these validators only need to match once, and any other errors may - not be relevant. - - Arguments: - errors (collections.Iterable): - - the errors to select from. Do not provide a mixture of - errors from different validation attempts (i.e. from - different instances or schemas), since it won't produce - sensical output. - - key (collections.Callable): - - the key to use when sorting errors. See `relevance` and - transitively `by_relevance` for more details (the default is - to sort with the defaults of that function). Changing the - default is only useful if you want to change the function - that rates errors but still want the error context descent - done by this function. - - Returns: - the best matching error, or ``None`` if the iterable was empty - - .. note:: - - This function is a heuristic. Its return value may change for a given - set of inputs from version to version if better heuristics are added. - """ - errors = iter(errors) - best = next(errors, None) - if best is None: - return - best = max(itertools.chain([best], errors), key=key) - - while best.context: - best = min(best.context, key=key) - return best diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft3.json b/vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft3.json deleted file mode 100644 index f8a09c56..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft3.json +++ /dev/null @@ -1,199 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-03/schema#", - "dependencies": { - "exclusiveMaximum": "maximum", - "exclusiveMinimum": "minimum" - }, - "id": "http://json-schema.org/draft-03/schema#", - "properties": { - "$ref": { - "format": "uri", - "type": "string" - }, - "$schema": { - "format": "uri", - "type": "string" - }, - "additionalItems": { - "default": {}, - "type": [ - { - "$ref": "#" - }, - "boolean" - ] - }, - "additionalProperties": { - "default": {}, - "type": [ - { - "$ref": "#" - }, - "boolean" - ] - }, - "default": { - "type": "any" - }, - "dependencies": { - "additionalProperties": { - "items": { - "type": "string" - }, - "type": [ - "string", - "array", - { - "$ref": "#" - } - ] - }, - "default": {}, - "type": [ - "string", - "array", - "object" - ] - }, - "description": { - "type": "string" - }, - "disallow": { - "items": { - "type": [ - "string", - { - "$ref": "#" - } - ] - }, - "type": [ - "string", - "array" - ], - "uniqueItems": true - }, - "divisibleBy": { - "default": 1, - "exclusiveMinimum": true, - "minimum": 0, - "type": "number" - }, - "enum": { - "type": "array" - }, - "exclusiveMaximum": { - "default": false, - "type": "boolean" - }, - "exclusiveMinimum": { - "default": false, - "type": "boolean" - }, - "extends": { - "default": {}, - "items": { - "$ref": "#" - }, - "type": [ - { - "$ref": "#" - }, - "array" - ] - }, - "format": { - "type": "string" - }, - "id": { - "format": "uri", - "type": "string" - }, - "items": { - "default": {}, - "items": { - "$ref": "#" - }, - "type": [ - { - "$ref": "#" - }, - "array" - ] - }, - "maxDecimal": { - "minimum": 0, - "type": "number" - }, - "maxItems": { - "minimum": 0, - "type": "integer" - }, - "maxLength": { - "type": "integer" - }, - "maximum": { - "type": "number" - }, - "minItems": { - "default": 0, - "minimum": 0, - "type": "integer" - }, - "minLength": { - "default": 0, - "minimum": 0, - "type": "integer" - }, - "minimum": { - "type": "number" - }, - "pattern": { - "format": "regex", - "type": "string" - }, - "patternProperties": { - "additionalProperties": { - "$ref": "#" - }, - "default": {}, - "type": "object" - }, - "properties": { - "additionalProperties": { - "$ref": "#", - "type": "object" - }, - "default": {}, - "type": "object" - }, - "required": { - "default": false, - "type": "boolean" - }, - "title": { - "type": "string" - }, - "type": { - "default": "any", - "items": { - "type": [ - "string", - { - "$ref": "#" - } - ] - }, - "type": [ - "string", - "array" - ], - "uniqueItems": true - }, - "uniqueItems": { - "default": false, - "type": "boolean" - } - }, - "type": "object" -} diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft4.json b/vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft4.json deleted file mode 100644 index 9b666cff..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/schemas/draft4.json +++ /dev/null @@ -1,222 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "default": {}, - "definitions": { - "positiveInteger": { - "minimum": 0, - "type": "integer" - }, - "positiveIntegerDefault0": { - "allOf": [ - { - "$ref": "#/definitions/positiveInteger" - }, - { - "default": 0 - } - ] - }, - "schemaArray": { - "items": { - "$ref": "#" - }, - "minItems": 1, - "type": "array" - }, - "simpleTypes": { - "enum": [ - "array", - "boolean", - "integer", - "null", - "number", - "object", - "string" - ] - }, - "stringArray": { - "items": { - "type": "string" - }, - "minItems": 1, - "type": "array", - "uniqueItems": true - } - }, - "dependencies": { - "exclusiveMaximum": [ - "maximum" - ], - "exclusiveMinimum": [ - "minimum" - ] - }, - "description": "Core schema meta-schema", - "id": "http://json-schema.org/draft-04/schema#", - "properties": { - "$schema": { - "format": "uri", - "type": "string" - }, - "additionalItems": { - "anyOf": [ - { - "type": "boolean" - }, - { - "$ref": "#" - } - ], - "default": {} - }, - "additionalProperties": { - "anyOf": [ - { - "type": "boolean" - }, - { - "$ref": "#" - } - ], - "default": {} - }, - "allOf": { - "$ref": "#/definitions/schemaArray" - }, - "anyOf": { - "$ref": "#/definitions/schemaArray" - }, - "default": {}, - "definitions": { - "additionalProperties": { - "$ref": "#" - }, - "default": {}, - "type": "object" - }, - "dependencies": { - "additionalProperties": { - "anyOf": [ - { - "$ref": "#" - }, - { - "$ref": "#/definitions/stringArray" - } - ] - }, - "type": "object" - }, - "description": { - "type": "string" - }, - "enum": { - "type": "array" - }, - "exclusiveMaximum": { - "default": false, - "type": "boolean" - }, - "exclusiveMinimum": { - "default": false, - "type": "boolean" - }, - "format": { - "type": "string" - }, - "id": { - "format": "uri", - "type": "string" - }, - "items": { - "anyOf": [ - { - "$ref": "#" - }, - { - "$ref": "#/definitions/schemaArray" - } - ], - "default": {} - }, - "maxItems": { - "$ref": "#/definitions/positiveInteger" - }, - "maxLength": { - "$ref": "#/definitions/positiveInteger" - }, - "maxProperties": { - "$ref": "#/definitions/positiveInteger" - }, - "maximum": { - "type": "number" - }, - "minItems": { - "$ref": "#/definitions/positiveIntegerDefault0" - }, - "minLength": { - "$ref": "#/definitions/positiveIntegerDefault0" - }, - "minProperties": { - "$ref": "#/definitions/positiveIntegerDefault0" - }, - "minimum": { - "type": "number" - }, - "multipleOf": { - "exclusiveMinimum": true, - "minimum": 0, - "type": "number" - }, - "not": { - "$ref": "#" - }, - "oneOf": { - "$ref": "#/definitions/schemaArray" - }, - "pattern": { - "format": "regex", - "type": "string" - }, - "patternProperties": { - "additionalProperties": { - "$ref": "#" - }, - "default": {}, - "type": "object" - }, - "properties": { - "additionalProperties": { - "$ref": "#" - }, - "default": {}, - "type": "object" - }, - "required": { - "$ref": "#/definitions/stringArray" - }, - "title": { - "type": "string" - }, - "type": { - "anyOf": [ - { - "$ref": "#/definitions/simpleTypes" - }, - { - "items": { - "$ref": "#/definitions/simpleTypes" - }, - "minItems": 1, - "type": "array", - "uniqueItems": true - } - ] - }, - "uniqueItems": { - "default": false, - "type": "boolean" - } - }, - "type": "object" -} diff --git a/vendor/poetry-core/poetry/core/_vendor/jsonschema/validators.py b/vendor/poetry-core/poetry/core/_vendor/jsonschema/validators.py deleted file mode 100644 index 1dc420c7..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/jsonschema/validators.py +++ /dev/null @@ -1,970 +0,0 @@ -""" -Creation and extension of validators, with implementations for existing drafts. -""" -from __future__ import division - -from warnings import warn -import contextlib -import json -import numbers - -from six import add_metaclass - -from jsonschema import ( - _legacy_validators, - _types, - _utils, - _validators, - exceptions, -) -from jsonschema.compat import ( - Sequence, - int_types, - iteritems, - lru_cache, - str_types, - unquote, - urldefrag, - urljoin, - urlopen, - urlsplit, -) - -# Sigh. https://gitlab.com/pycqa/flake8/issues/280 -# https://github.com/pyga/ebb-lint/issues/7 -# Imported for backwards compatibility. -from jsonschema.exceptions import ErrorTree -ErrorTree - - -class _DontDoThat(Exception): - """ - Raised when a Validators with non-default type checker is misused. - - Asking one for DEFAULT_TYPES doesn't make sense, since type checkers - exist for the unrepresentable cases where DEFAULT_TYPES can't - represent the type relationship. - """ - - def __str__(self): - return "DEFAULT_TYPES cannot be used on Validators using TypeCheckers" - - -validators = {} -meta_schemas = _utils.URIDict() - - -def _generate_legacy_type_checks(types=()): - """ - Generate newer-style type checks out of JSON-type-name-to-type mappings. - - Arguments: - - types (dict): - - A mapping of type names to their Python types - - Returns: - - A dictionary of definitions to pass to `TypeChecker` - """ - types = dict(types) - - def gen_type_check(pytypes): - pytypes = _utils.flatten(pytypes) - - def type_check(checker, instance): - if isinstance(instance, bool): - if bool not in pytypes: - return False - return isinstance(instance, pytypes) - - return type_check - - definitions = {} - for typename, pytypes in iteritems(types): - definitions[typename] = gen_type_check(pytypes) - - return definitions - - -_DEPRECATED_DEFAULT_TYPES = { - u"array": list, - u"boolean": bool, - u"integer": int_types, - u"null": type(None), - u"number": numbers.Number, - u"object": dict, - u"string": str_types, -} -_TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES = _types.TypeChecker( - type_checkers=_generate_legacy_type_checks(_DEPRECATED_DEFAULT_TYPES), -) - - -def validates(version): - """ - Register the decorated validator for a ``version`` of the specification. - - Registered validators and their meta schemas will be considered when - parsing ``$schema`` properties' URIs. - - Arguments: - - version (str): - - An identifier to use as the version's name - - Returns: - - collections.Callable: - - a class decorator to decorate the validator with the version - """ - - def _validates(cls): - validators[version] = cls - meta_schema_id = cls.ID_OF(cls.META_SCHEMA) - if meta_schema_id: - meta_schemas[meta_schema_id] = cls - return cls - return _validates - - -def _DEFAULT_TYPES(self): - if self._CREATED_WITH_DEFAULT_TYPES is None: - raise _DontDoThat() - - warn( - ( - "The DEFAULT_TYPES attribute is deprecated. " - "See the type checker attached to this validator instead." - ), - DeprecationWarning, - stacklevel=2, - ) - return self._DEFAULT_TYPES - - -class _DefaultTypesDeprecatingMetaClass(type): - DEFAULT_TYPES = property(_DEFAULT_TYPES) - - -def _id_of(schema): - if schema is True or schema is False: - return u"" - return schema.get(u"$id", u"") - - -def create( - meta_schema, - validators=(), - version=None, - default_types=None, - type_checker=None, - id_of=_id_of, -): - """ - Create a new validator class. - - Arguments: - - meta_schema (collections.Mapping): - - the meta schema for the new validator class - - validators (collections.Mapping): - - a mapping from names to callables, where each callable will - validate the schema property with the given name. - - Each callable should take 4 arguments: - - 1. a validator instance, - 2. the value of the property being validated within the - instance - 3. the instance - 4. the schema - - version (str): - - an identifier for the version that this validator class will - validate. If provided, the returned validator class will - have its ``__name__`` set to include the version, and also - will have `jsonschema.validators.validates` automatically - called for the given version. - - type_checker (jsonschema.TypeChecker): - - a type checker, used when applying the :validator:`type` validator. - - If unprovided, a `jsonschema.TypeChecker` will be created - with a set of default types typical of JSON Schema drafts. - - default_types (collections.Mapping): - - .. deprecated:: 3.0.0 - - Please use the type_checker argument instead. - - If set, it provides mappings of JSON types to Python types - that will be converted to functions and redefined in this - object's `jsonschema.TypeChecker`. - - id_of (collections.Callable): - - A function that given a schema, returns its ID. - - Returns: - - a new `jsonschema.IValidator` class - """ - - if default_types is not None: - if type_checker is not None: - raise TypeError( - "Do not specify default_types when providing a type checker.", - ) - _created_with_default_types = True - warn( - ( - "The default_types argument is deprecated. " - "Use the type_checker argument instead." - ), - DeprecationWarning, - stacklevel=2, - ) - type_checker = _types.TypeChecker( - type_checkers=_generate_legacy_type_checks(default_types), - ) - else: - default_types = _DEPRECATED_DEFAULT_TYPES - if type_checker is None: - _created_with_default_types = False - type_checker = _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES - elif type_checker is _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES: - _created_with_default_types = False - else: - _created_with_default_types = None - - @add_metaclass(_DefaultTypesDeprecatingMetaClass) - class Validator(object): - - VALIDATORS = dict(validators) - META_SCHEMA = dict(meta_schema) - TYPE_CHECKER = type_checker - ID_OF = staticmethod(id_of) - - DEFAULT_TYPES = property(_DEFAULT_TYPES) - _DEFAULT_TYPES = dict(default_types) - _CREATED_WITH_DEFAULT_TYPES = _created_with_default_types - - def __init__( - self, - schema, - types=(), - resolver=None, - format_checker=None, - ): - if types: - warn( - ( - "The types argument is deprecated. Provide " - "a type_checker to jsonschema.validators.extend " - "instead." - ), - DeprecationWarning, - stacklevel=2, - ) - - self.TYPE_CHECKER = self.TYPE_CHECKER.redefine_many( - _generate_legacy_type_checks(types), - ) - - if resolver is None: - resolver = RefResolver.from_schema(schema, id_of=id_of) - - self.resolver = resolver - self.format_checker = format_checker - self.schema = schema - - @classmethod - def check_schema(cls, schema): - for error in cls(cls.META_SCHEMA).iter_errors(schema): - raise exceptions.SchemaError.create_from(error) - - def iter_errors(self, instance, _schema=None): - if _schema is None: - _schema = self.schema - - if _schema is True: - return - elif _schema is False: - yield exceptions.ValidationError( - "False schema does not allow %r" % (instance,), - validator=None, - validator_value=None, - instance=instance, - schema=_schema, - ) - return - - scope = id_of(_schema) - if scope: - self.resolver.push_scope(scope) - try: - ref = _schema.get(u"$ref") - if ref is not None: - validators = [(u"$ref", ref)] - else: - validators = iteritems(_schema) - - for k, v in validators: - validator = self.VALIDATORS.get(k) - if validator is None: - continue - - errors = validator(self, v, instance, _schema) or () - for error in errors: - # set details if not already set by the called fn - error._set( - validator=k, - validator_value=v, - instance=instance, - schema=_schema, - ) - if k != u"$ref": - error.schema_path.appendleft(k) - yield error - finally: - if scope: - self.resolver.pop_scope() - - def descend(self, instance, schema, path=None, schema_path=None): - for error in self.iter_errors(instance, schema): - if path is not None: - error.path.appendleft(path) - if schema_path is not None: - error.schema_path.appendleft(schema_path) - yield error - - def validate(self, *args, **kwargs): - for error in self.iter_errors(*args, **kwargs): - raise error - - def is_type(self, instance, type): - try: - return self.TYPE_CHECKER.is_type(instance, type) - except exceptions.UndefinedTypeCheck: - raise exceptions.UnknownType(type, instance, self.schema) - - def is_valid(self, instance, _schema=None): - error = next(self.iter_errors(instance, _schema), None) - return error is None - - if version is not None: - Validator = validates(version)(Validator) - Validator.__name__ = version.title().replace(" ", "") + "Validator" - - return Validator - - -def extend(validator, validators=(), version=None, type_checker=None): - """ - Create a new validator class by extending an existing one. - - Arguments: - - validator (jsonschema.IValidator): - - an existing validator class - - validators (collections.Mapping): - - a mapping of new validator callables to extend with, whose - structure is as in `create`. - - .. note:: - - Any validator callables with the same name as an - existing one will (silently) replace the old validator - callable entirely, effectively overriding any validation - done in the "parent" validator class. - - If you wish to instead extend the behavior of a parent's - validator callable, delegate and call it directly in - the new validator function by retrieving it using - ``OldValidator.VALIDATORS["validator_name"]``. - - version (str): - - a version for the new validator class - - type_checker (jsonschema.TypeChecker): - - a type checker, used when applying the :validator:`type` validator. - - If unprovided, the type checker of the extended - `jsonschema.IValidator` will be carried along.` - - Returns: - - a new `jsonschema.IValidator` class extending the one provided - - .. note:: Meta Schemas - - The new validator class will have its parent's meta schema. - - If you wish to change or extend the meta schema in the new - validator class, modify ``META_SCHEMA`` directly on the returned - class. Note that no implicit copying is done, so a copy should - likely be made before modifying it, in order to not affect the - old validator. - """ - - all_validators = dict(validator.VALIDATORS) - all_validators.update(validators) - - if type_checker is None: - type_checker = validator.TYPE_CHECKER - elif validator._CREATED_WITH_DEFAULT_TYPES: - raise TypeError( - "Cannot extend a validator created with default_types " - "with a type_checker. Update the validator to use a " - "type_checker when created." - ) - return create( - meta_schema=validator.META_SCHEMA, - validators=all_validators, - version=version, - type_checker=type_checker, - id_of=validator.ID_OF, - ) - - -Draft3Validator = create( - meta_schema=_utils.load_schema("draft3"), - validators={ - u"$ref": _validators.ref, - u"additionalItems": _validators.additionalItems, - u"additionalProperties": _validators.additionalProperties, - u"dependencies": _legacy_validators.dependencies_draft3, - u"disallow": _legacy_validators.disallow_draft3, - u"divisibleBy": _validators.multipleOf, - u"enum": _validators.enum, - u"extends": _legacy_validators.extends_draft3, - u"format": _validators.format, - u"items": _legacy_validators.items_draft3_draft4, - u"maxItems": _validators.maxItems, - u"maxLength": _validators.maxLength, - u"maximum": _legacy_validators.maximum_draft3_draft4, - u"minItems": _validators.minItems, - u"minLength": _validators.minLength, - u"minimum": _legacy_validators.minimum_draft3_draft4, - u"pattern": _validators.pattern, - u"patternProperties": _validators.patternProperties, - u"properties": _legacy_validators.properties_draft3, - u"type": _legacy_validators.type_draft3, - u"uniqueItems": _validators.uniqueItems, - }, - type_checker=_types.draft3_type_checker, - version="draft3", - id_of=lambda schema: schema.get(u"id", ""), -) - -Draft4Validator = create( - meta_schema=_utils.load_schema("draft4"), - validators={ - u"$ref": _validators.ref, - u"additionalItems": _validators.additionalItems, - u"additionalProperties": _validators.additionalProperties, - u"allOf": _validators.allOf, - u"anyOf": _validators.anyOf, - u"dependencies": _validators.dependencies, - u"enum": _validators.enum, - u"format": _validators.format, - u"items": _legacy_validators.items_draft3_draft4, - u"maxItems": _validators.maxItems, - u"maxLength": _validators.maxLength, - u"maxProperties": _validators.maxProperties, - u"maximum": _legacy_validators.maximum_draft3_draft4, - u"minItems": _validators.minItems, - u"minLength": _validators.minLength, - u"minProperties": _validators.minProperties, - u"minimum": _legacy_validators.minimum_draft3_draft4, - u"multipleOf": _validators.multipleOf, - u"not": _validators.not_, - u"oneOf": _validators.oneOf, - u"pattern": _validators.pattern, - u"patternProperties": _validators.patternProperties, - u"properties": _validators.properties, - u"required": _validators.required, - u"type": _validators.type, - u"uniqueItems": _validators.uniqueItems, - }, - type_checker=_types.draft4_type_checker, - version="draft4", - id_of=lambda schema: schema.get(u"id", ""), -) - -Draft6Validator = create( - meta_schema=_utils.load_schema("draft6"), - validators={ - u"$ref": _validators.ref, - u"additionalItems": _validators.additionalItems, - u"additionalProperties": _validators.additionalProperties, - u"allOf": _validators.allOf, - u"anyOf": _validators.anyOf, - u"const": _validators.const, - u"contains": _validators.contains, - u"dependencies": _validators.dependencies, - u"enum": _validators.enum, - u"exclusiveMaximum": _validators.exclusiveMaximum, - u"exclusiveMinimum": _validators.exclusiveMinimum, - u"format": _validators.format, - u"items": _validators.items, - u"maxItems": _validators.maxItems, - u"maxLength": _validators.maxLength, - u"maxProperties": _validators.maxProperties, - u"maximum": _validators.maximum, - u"minItems": _validators.minItems, - u"minLength": _validators.minLength, - u"minProperties": _validators.minProperties, - u"minimum": _validators.minimum, - u"multipleOf": _validators.multipleOf, - u"not": _validators.not_, - u"oneOf": _validators.oneOf, - u"pattern": _validators.pattern, - u"patternProperties": _validators.patternProperties, - u"properties": _validators.properties, - u"propertyNames": _validators.propertyNames, - u"required": _validators.required, - u"type": _validators.type, - u"uniqueItems": _validators.uniqueItems, - }, - type_checker=_types.draft6_type_checker, - version="draft6", -) - -Draft7Validator = create( - meta_schema=_utils.load_schema("draft7"), - validators={ - u"$ref": _validators.ref, - u"additionalItems": _validators.additionalItems, - u"additionalProperties": _validators.additionalProperties, - u"allOf": _validators.allOf, - u"anyOf": _validators.anyOf, - u"const": _validators.const, - u"contains": _validators.contains, - u"dependencies": _validators.dependencies, - u"enum": _validators.enum, - u"exclusiveMaximum": _validators.exclusiveMaximum, - u"exclusiveMinimum": _validators.exclusiveMinimum, - u"format": _validators.format, - u"if": _validators.if_, - u"items": _validators.items, - u"maxItems": _validators.maxItems, - u"maxLength": _validators.maxLength, - u"maxProperties": _validators.maxProperties, - u"maximum": _validators.maximum, - u"minItems": _validators.minItems, - u"minLength": _validators.minLength, - u"minProperties": _validators.minProperties, - u"minimum": _validators.minimum, - u"multipleOf": _validators.multipleOf, - u"oneOf": _validators.oneOf, - u"not": _validators.not_, - u"pattern": _validators.pattern, - u"patternProperties": _validators.patternProperties, - u"properties": _validators.properties, - u"propertyNames": _validators.propertyNames, - u"required": _validators.required, - u"type": _validators.type, - u"uniqueItems": _validators.uniqueItems, - }, - type_checker=_types.draft7_type_checker, - version="draft7", -) - -_LATEST_VERSION = Draft7Validator - - -class RefResolver(object): - """ - Resolve JSON References. - - Arguments: - - base_uri (str): - - The URI of the referring document - - referrer: - - The actual referring document - - store (dict): - - A mapping from URIs to documents to cache - - cache_remote (bool): - - Whether remote refs should be cached after first resolution - - handlers (dict): - - A mapping from URI schemes to functions that should be used - to retrieve them - - urljoin_cache (:func:`functools.lru_cache`): - - A cache that will be used for caching the results of joining - the resolution scope to subscopes. - - remote_cache (:func:`functools.lru_cache`): - - A cache that will be used for caching the results of - resolved remote URLs. - - Attributes: - - cache_remote (bool): - - Whether remote refs should be cached after first resolution - """ - - def __init__( - self, - base_uri, - referrer, - store=(), - cache_remote=True, - handlers=(), - urljoin_cache=None, - remote_cache=None, - ): - if urljoin_cache is None: - urljoin_cache = lru_cache(1024)(urljoin) - if remote_cache is None: - remote_cache = lru_cache(1024)(self.resolve_from_url) - - self.referrer = referrer - self.cache_remote = cache_remote - self.handlers = dict(handlers) - - self._scopes_stack = [base_uri] - self.store = _utils.URIDict( - (id, validator.META_SCHEMA) - for id, validator in iteritems(meta_schemas) - ) - self.store.update(store) - self.store[base_uri] = referrer - - self._urljoin_cache = urljoin_cache - self._remote_cache = remote_cache - - @classmethod - def from_schema(cls, schema, id_of=_id_of, *args, **kwargs): - """ - Construct a resolver from a JSON schema object. - - Arguments: - - schema: - - the referring schema - - Returns: - - `RefResolver` - """ - - return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs) - - def push_scope(self, scope): - """ - Enter a given sub-scope. - - Treats further dereferences as being performed underneath the - given scope. - """ - self._scopes_stack.append( - self._urljoin_cache(self.resolution_scope, scope), - ) - - def pop_scope(self): - """ - Exit the most recent entered scope. - - Treats further dereferences as being performed underneath the - original scope. - - Don't call this method more times than `push_scope` has been - called. - """ - try: - self._scopes_stack.pop() - except IndexError: - raise exceptions.RefResolutionError( - "Failed to pop the scope from an empty stack. " - "`pop_scope()` should only be called once for every " - "`push_scope()`" - ) - - @property - def resolution_scope(self): - """ - Retrieve the current resolution scope. - """ - return self._scopes_stack[-1] - - @property - def base_uri(self): - """ - Retrieve the current base URI, not including any fragment. - """ - uri, _ = urldefrag(self.resolution_scope) - return uri - - @contextlib.contextmanager - def in_scope(self, scope): - """ - Temporarily enter the given scope for the duration of the context. - """ - self.push_scope(scope) - try: - yield - finally: - self.pop_scope() - - @contextlib.contextmanager - def resolving(self, ref): - """ - Resolve the given ``ref`` and enter its resolution scope. - - Exits the scope on exit of this context manager. - - Arguments: - - ref (str): - - The reference to resolve - """ - - url, resolved = self.resolve(ref) - self.push_scope(url) - try: - yield resolved - finally: - self.pop_scope() - - def resolve(self, ref): - """ - Resolve the given reference. - """ - url = self._urljoin_cache(self.resolution_scope, ref) - return url, self._remote_cache(url) - - def resolve_from_url(self, url): - """ - Resolve the given remote URL. - """ - url, fragment = urldefrag(url) - try: - document = self.store[url] - except KeyError: - try: - document = self.resolve_remote(url) - except Exception as exc: - raise exceptions.RefResolutionError(exc) - - return self.resolve_fragment(document, fragment) - - def resolve_fragment(self, document, fragment): - """ - Resolve a ``fragment`` within the referenced ``document``. - - Arguments: - - document: - - The referent document - - fragment (str): - - a URI fragment to resolve within it - """ - - fragment = fragment.lstrip(u"/") - parts = unquote(fragment).split(u"/") if fragment else [] - - for part in parts: - part = part.replace(u"~1", u"/").replace(u"~0", u"~") - - if isinstance(document, Sequence): - # Array indexes should be turned into integers - try: - part = int(part) - except ValueError: - pass - try: - document = document[part] - except (TypeError, LookupError): - raise exceptions.RefResolutionError( - "Unresolvable JSON pointer: %r" % fragment - ) - - return document - - def resolve_remote(self, uri): - """ - Resolve a remote ``uri``. - - If called directly, does not check the store first, but after - retrieving the document at the specified URI it will be saved in - the store if :attr:`cache_remote` is True. - - .. note:: - - If the requests_ library is present, ``jsonschema`` will use it to - request the remote ``uri``, so that the correct encoding is - detected and used. - - If it isn't, or if the scheme of the ``uri`` is not ``http`` or - ``https``, UTF-8 is assumed. - - Arguments: - - uri (str): - - The URI to resolve - - Returns: - - The retrieved document - - .. _requests: https://pypi.org/project/requests/ - """ - try: - import requests - except ImportError: - requests = None - - scheme = urlsplit(uri).scheme - - if scheme in self.handlers: - result = self.handlers[scheme](uri) - elif scheme in [u"http", u"https"] and requests: - # Requests has support for detecting the correct encoding of - # json over http - result = requests.get(uri).json() - else: - # Otherwise, pass off to urllib and assume utf-8 - with urlopen(uri) as url: - result = json.loads(url.read().decode("utf-8")) - - if self.cache_remote: - self.store[uri] = result - return result - - -def validate(instance, schema, cls=None, *args, **kwargs): - """ - Validate an instance under the given schema. - - >>> validate([2, 3, 4], {"maxItems": 2}) - Traceback (most recent call last): - ... - ValidationError: [2, 3, 4] is too long - - :func:`validate` will first verify that the provided schema is - itself valid, since not doing so can lead to less obvious error - messages and fail in less obvious or consistent ways. - - If you know you have a valid schema already, especially if you - intend to validate multiple instances with the same schema, you - likely would prefer using the `IValidator.validate` method directly - on a specific validator (e.g. ``Draft7Validator.validate``). - - - Arguments: - - instance: - - The instance to validate - - schema: - - The schema to validate with - - cls (IValidator): - - The class that will be used to validate the instance. - - If the ``cls`` argument is not provided, two things will happen - in accordance with the specification. First, if the schema has a - :validator:`$schema` property containing a known meta-schema [#]_ - then the proper validator will be used. The specification recommends - that all schemas contain :validator:`$schema` properties for this - reason. If no :validator:`$schema` property is found, the default - validator class is the latest released draft. - - Any other provided positional and keyword arguments will be passed - on when instantiating the ``cls``. - - Raises: - - `jsonschema.exceptions.ValidationError` if the instance - is invalid - - `jsonschema.exceptions.SchemaError` if the schema itself - is invalid - - .. rubric:: Footnotes - .. [#] known by a validator registered with - `jsonschema.validators.validates` - """ - if cls is None: - cls = validator_for(schema) - - cls.check_schema(schema) - validator = cls(schema, *args, **kwargs) - error = exceptions.best_match(validator.iter_errors(instance)) - if error is not None: - raise error - - -def validator_for(schema, default=_LATEST_VERSION): - """ - Retrieve the validator class appropriate for validating the given schema. - - Uses the :validator:`$schema` property that should be present in the - given schema to look up the appropriate validator class. - - Arguments: - - schema (collections.Mapping or bool): - - the schema to look at - - default: - - the default to return if the appropriate validator class - cannot be determined. - - If unprovided, the default is to return the latest supported - draft. - """ - if schema is True or schema is False or u"$schema" not in schema: - return default - if schema[u"$schema"] not in meta_schemas: - warn( - ( - "The metaschema specified by $schema was not found. " - "Using the latest draft to validate, but this will raise " - "an error in the future." - ), - DeprecationWarning, - stacklevel=2, - ) - return meta_schemas.get(schema[u"$schema"], _LATEST_VERSION) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/__init__.py b/vendor/poetry-core/poetry/core/_vendor/lark/__init__.py deleted file mode 100644 index 8ddab96a..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .tree import Tree -from .visitors import Transformer, Visitor, v_args, Discard -from .visitors import InlineTransformer, inline_args # XXX Deprecated -from .exceptions import (ParseError, LexError, GrammarError, UnexpectedToken, - UnexpectedInput, UnexpectedCharacters, LarkError) -from .lexer import Token -from .lark import Lark - -__version__ = "0.9.0" diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/common.py b/vendor/poetry-core/poetry/core/_vendor/lark/common.py deleted file mode 100644 index c44f9cef..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/common.py +++ /dev/null @@ -1,29 +0,0 @@ -from .utils import Serialize -from .lexer import TerminalDef - -###{standalone - -class LexerConf(Serialize): - __serialize_fields__ = 'tokens', 'ignore', 'g_regex_flags' - __serialize_namespace__ = TerminalDef, - - def __init__(self, tokens, ignore=(), postlex=None, callbacks=None, g_regex_flags=0): - self.tokens = tokens - self.ignore = ignore - self.postlex = postlex - self.callbacks = callbacks or {} - self.g_regex_flags = g_regex_flags - - def _deserialize(self): - self.callbacks = {} # TODO - -###} - -class ParserConf: - def __init__(self, rules, callbacks, start): - assert isinstance(start, list) - self.rules = rules - self.callbacks = callbacks - self.start = start - - diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/exceptions.py b/vendor/poetry-core/poetry/core/_vendor/lark/exceptions.py deleted file mode 100644 index 1c5e533e..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/exceptions.py +++ /dev/null @@ -1,119 +0,0 @@ -from .utils import STRING_TYPE - -###{standalone -class LarkError(Exception): - pass - -class GrammarError(LarkError): - pass - -class ParseError(LarkError): - pass - -class LexError(LarkError): - pass - -class UnexpectedEOF(ParseError): - def __init__(self, expected): - self.expected = expected - - message = ("Unexpected end-of-input. Expected one of: \n\t* %s\n" % '\n\t* '.join(x.name for x in self.expected)) - super(UnexpectedEOF, self).__init__(message) - - -class UnexpectedInput(LarkError): - pos_in_stream = None - - def get_context(self, text, span=40): - pos = self.pos_in_stream - start = max(pos - span, 0) - end = pos + span - before = text[start:pos].rsplit('\n', 1)[-1] - after = text[pos:end].split('\n', 1)[0] - return before + after + '\n' + ' ' * len(before) + '^\n' - - def match_examples(self, parse_fn, examples, token_type_match_fallback=False): - """ Given a parser instance and a dictionary mapping some label with - some malformed syntax examples, it'll return the label for the - example that bests matches the current error. - """ - assert self.state is not None, "Not supported for this exception" - - candidate = (None, False) - for label, example in examples.items(): - assert not isinstance(example, STRING_TYPE) - - for malformed in example: - try: - parse_fn(malformed) - except UnexpectedInput as ut: - if ut.state == self.state: - try: - if ut.token == self.token: # Try exact match first - return label - - if token_type_match_fallback: - # Fallback to token types match - if (ut.token.type == self.token.type) and not candidate[-1]: - candidate = label, True - - except AttributeError: - pass - if not candidate[0]: - candidate = label, False - - return candidate[0] - - -class UnexpectedCharacters(LexError, UnexpectedInput): - def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None): - message = "No terminal defined for '%s' at line %d col %d" % (seq[lex_pos], line, column) - - self.line = line - self.column = column - self.allowed = allowed - self.considered_tokens = considered_tokens - self.pos_in_stream = lex_pos - self.state = state - - message += '\n\n' + self.get_context(seq) - if allowed: - message += '\nExpecting: %s\n' % allowed - if token_history: - message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in token_history) - - super(UnexpectedCharacters, self).__init__(message) - - - -class UnexpectedToken(ParseError, UnexpectedInput): - def __init__(self, token, expected, considered_rules=None, state=None, puppet=None): - self.token = token - self.expected = expected # XXX str shouldn't necessary - self.line = getattr(token, 'line', '?') - self.column = getattr(token, 'column', '?') - self.considered_rules = considered_rules - self.state = state - self.pos_in_stream = getattr(token, 'pos_in_stream', None) - self.puppet = puppet - - message = ("Unexpected token %r at line %s, column %s.\n" - "Expected one of: \n\t* %s\n" - % (token, self.line, self.column, '\n\t* '.join(self.expected))) - - super(UnexpectedToken, self).__init__(message) - -class VisitError(LarkError): - """VisitError is raised when visitors are interrupted by an exception - - It provides the following attributes for inspection: - - obj: the tree node or token it was processing when the exception was raised - - orig_exc: the exception that cause it to fail - """ - def __init__(self, rule, obj, orig_exc): - self.obj = obj - self.orig_exc = orig_exc - - message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc) - super(VisitError, self).__init__(message) -###} diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/indenter.py b/vendor/poetry-core/poetry/core/_vendor/lark/indenter.py deleted file mode 100644 index 69323905..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/indenter.py +++ /dev/null @@ -1,61 +0,0 @@ -"Provides Indentation services for languages with indentation similar to Python" - -from .lexer import Token - -###{standalone -class Indenter: - def __init__(self): - self.paren_level = None - self.indent_level = None - assert self.tab_len > 0 - - def handle_NL(self, token): - if self.paren_level > 0: - return - - yield token - - indent_str = token.rsplit('\n', 1)[1] # Tabs and spaces - indent = indent_str.count(' ') + indent_str.count('\t') * self.tab_len - - if indent > self.indent_level[-1]: - self.indent_level.append(indent) - yield Token.new_borrow_pos(self.INDENT_type, indent_str, token) - else: - while indent < self.indent_level[-1]: - self.indent_level.pop() - yield Token.new_borrow_pos(self.DEDENT_type, indent_str, token) - - assert indent == self.indent_level[-1], '%s != %s' % (indent, self.indent_level[-1]) - - def _process(self, stream): - for token in stream: - if token.type == self.NL_type: - for t in self.handle_NL(token): - yield t - else: - yield token - - if token.type in self.OPEN_PAREN_types: - self.paren_level += 1 - elif token.type in self.CLOSE_PAREN_types: - self.paren_level -= 1 - assert self.paren_level >= 0 - - while len(self.indent_level) > 1: - self.indent_level.pop() - yield Token(self.DEDENT_type, '') - - assert self.indent_level == [0], self.indent_level - - def process(self, stream): - self.paren_level = 0 - self.indent_level = [0] - return self._process(stream) - - # XXX Hack for ContextualLexer. Maybe there's a more elegant solution? - @property - def always_accept(self): - return (self.NL_type,) - -###} diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/lark.py b/vendor/poetry-core/poetry/core/_vendor/lark/lark.py deleted file mode 100644 index 2b783cb2..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/lark.py +++ /dev/null @@ -1,405 +0,0 @@ -from __future__ import absolute_import - -import sys, os, pickle, hashlib, logging -from io import open - - -from .utils import STRING_TYPE, Serialize, SerializeMemoizer, FS -from .load_grammar import load_grammar -from .tree import Tree -from .common import LexerConf, ParserConf - -from .lexer import Lexer, TraditionalLexer, TerminalDef, UnexpectedToken -from .parse_tree_builder import ParseTreeBuilder -from .parser_frontends import get_frontend -from .grammar import Rule - -import re -try: - import regex -except ImportError: - regex = None - -###{standalone - -class LarkOptions(Serialize): - """Specifies the options for Lark - - """ - OPTIONS_DOC = """ -# General - - start - The start symbol. Either a string, or a list of strings for - multiple possible starts (Default: "start") - debug - Display debug information, such as warnings (default: False) - transformer - Applies the transformer to every parse tree (equivlent to - applying it after the parse, but faster) - propagate_positions - Propagates (line, column, end_line, end_column) - attributes into all tree branches. - maybe_placeholders - When True, the `[]` operator returns `None` when not matched. - When `False`, `[]` behaves like the `?` operator, - and returns no value at all. - (default=`False`. Recommended to set to `True`) - regex - When True, uses the `regex` module instead of the stdlib `re`. - cache - Cache the results of the Lark grammar analysis, for x2 to x3 faster loading. - LALR only for now. - When `False`, does nothing (default) - When `True`, caches to a temporary file in the local directory - When given a string, caches to the path pointed by the string - - g_regex_flags - Flags that are applied to all terminals - (both regex and strings) - keep_all_tokens - Prevent the tree builder from automagically - removing "punctuation" tokens (default: False) - -# Algorithm - - parser - Decides which parser engine to use - Accepts "earley" or "lalr". (Default: "earley") - (there is also a "cyk" option for legacy) - - lexer - Decides whether or not to use a lexer stage - "auto" (default): Choose for me based on the parser - "standard": Use a standard lexer - "contextual": Stronger lexer (only works with parser="lalr") - "dynamic": Flexible and powerful (only with parser="earley") - "dynamic_complete": Same as dynamic, but tries *every* variation - of tokenizing possible. - - ambiguity - Decides how to handle ambiguity in the parse. - Only relevant if parser="earley" - "resolve": The parser will automatically choose the simplest - derivation (it chooses consistently: greedy for - tokens, non-greedy for rules) - "explicit": The parser will return all derivations wrapped - in "_ambig" tree nodes (i.e. a forest). - -# Domain Specific - - postlex - Lexer post-processing (Default: None) Only works with the - standard and contextual lexers. - priority - How priorities should be evaluated - auto, none, normal, - invert (Default: auto) - lexer_callbacks - Dictionary of callbacks for the lexer. May alter - tokens during lexing. Use with caution. - edit_terminals - A callback - """ - if __doc__: - __doc__ += OPTIONS_DOC - - _defaults = { - 'debug': False, - 'keep_all_tokens': False, - 'tree_class': None, - 'cache': False, - 'postlex': None, - 'parser': 'earley', - 'lexer': 'auto', - 'transformer': None, - 'start': 'start', - 'priority': 'auto', - 'ambiguity': 'auto', - 'regex': False, - 'propagate_positions': False, - 'lexer_callbacks': {}, - 'maybe_placeholders': False, - 'edit_terminals': None, - 'g_regex_flags': 0, - } - - def __init__(self, options_dict): - o = dict(options_dict) - - options = {} - for name, default in self._defaults.items(): - if name in o: - value = o.pop(name) - if isinstance(default, bool) and name != 'cache': - value = bool(value) - else: - value = default - - options[name] = value - - if isinstance(options['start'], STRING_TYPE): - options['start'] = [options['start']] - - self.__dict__['options'] = options - - assert self.parser in ('earley', 'lalr', 'cyk', None) - - if self.parser == 'earley' and self.transformer: - raise ValueError('Cannot specify an embedded transformer when using the Earley algorithm.' - 'Please use your transformer on the resulting parse tree, or use a different algorithm (i.e. LALR)') - - if o: - raise ValueError("Unknown options: %s" % o.keys()) - - def __getattr__(self, name): - try: - return self.options[name] - except KeyError as e: - raise AttributeError(e) - - def __setattr__(self, name, value): - assert name in self.options - self.options[name] = value - - def serialize(self, memo): - return self.options - - @classmethod - def deserialize(cls, data, memo): - return cls(data) - - -class Lark(Serialize): - def __init__(self, grammar, **options): - """ - grammar : a string or file-object containing the grammar spec (using Lark's ebnf syntax) - options : a dictionary controlling various aspects of Lark. - """ - - self.options = LarkOptions(options) - - # Set regex or re module - use_regex = self.options.regex - if use_regex: - if regex: - self.re = regex - else: - raise ImportError('`regex` module must be installed if calling `Lark(regex=True)`.') - else: - self.re = re - - # Some, but not all file-like objects have a 'name' attribute - try: - self.source = grammar.name - except AttributeError: - self.source = '' - - # Drain file-like objects to get their contents - try: - read = grammar.read - except AttributeError: - pass - else: - grammar = read() - - assert isinstance(grammar, STRING_TYPE) - - cache_fn = None - if self.options.cache: - if self.options.parser != 'lalr': - raise NotImplementedError("cache only works with parser='lalr' for now") - if isinstance(self.options.cache, STRING_TYPE): - cache_fn = self.options.cache - else: - if self.options.cache is not True: - raise ValueError("cache must be bool or str") - unhashable = ('transformer', 'postlex', 'lexer_callbacks', 'edit_terminals') - from . import __version__ - options_str = ''.join(k+str(v) for k, v in options.items() if k not in unhashable) - s = grammar + options_str + __version__ - md5 = hashlib.md5(s.encode()).hexdigest() - cache_fn = '.lark_cache_%s.tmp' % md5 - - if FS.exists(cache_fn): - logging.debug('Loading grammar from cache: %s', cache_fn) - with FS.open(cache_fn, 'rb') as f: - self._load(f, self.options.transformer, self.options.postlex) - return - - if self.options.lexer == 'auto': - if self.options.parser == 'lalr': - self.options.lexer = 'contextual' - elif self.options.parser == 'earley': - self.options.lexer = 'dynamic' - elif self.options.parser == 'cyk': - self.options.lexer = 'standard' - else: - assert False, self.options.parser - lexer = self.options.lexer - assert lexer in ('standard', 'contextual', 'dynamic', 'dynamic_complete') or issubclass(lexer, Lexer) - - if self.options.ambiguity == 'auto': - if self.options.parser == 'earley': - self.options.ambiguity = 'resolve' - else: - disambig_parsers = ['earley', 'cyk'] - assert self.options.parser in disambig_parsers, ( - 'Only %s supports disambiguation right now') % ', '.join(disambig_parsers) - - if self.options.priority == 'auto': - if self.options.parser in ('earley', 'cyk', ): - self.options.priority = 'normal' - elif self.options.parser in ('lalr', ): - self.options.priority = None - elif self.options.priority in ('invert', 'normal'): - assert self.options.parser in ('earley', 'cyk'), "priorities are not supported for LALR at this time" - - assert self.options.priority in ('auto', None, 'normal', 'invert'), 'invalid priority option specified: {}. options are auto, none, normal, invert.'.format(self.options.priority) - assert self.options.ambiguity not in ('resolve__antiscore_sum', ), 'resolve__antiscore_sum has been replaced with the option priority="invert"' - assert self.options.ambiguity in ('resolve', 'explicit', 'auto', ) - - # Parse the grammar file and compose the grammars (TODO) - self.grammar = load_grammar(grammar, self.source, self.re) - - # Compile the EBNF grammar into BNF - self.terminals, self.rules, self.ignore_tokens = self.grammar.compile(self.options.start) - - if self.options.edit_terminals: - for t in self.terminals: - self.options.edit_terminals(t) - - self._terminals_dict = {t.name:t for t in self.terminals} - - # If the user asked to invert the priorities, negate them all here. - # This replaces the old 'resolve__antiscore_sum' option. - if self.options.priority == 'invert': - for rule in self.rules: - if rule.options.priority is not None: - rule.options.priority = -rule.options.priority - # Else, if the user asked to disable priorities, strip them from the - # rules. This allows the Earley parsers to skip an extra forest walk - # for improved performance, if you don't need them (or didn't specify any). - elif self.options.priority == None: - for rule in self.rules: - if rule.options.priority is not None: - rule.options.priority = None - - # TODO Deprecate lexer_callbacks? - lexer_callbacks = dict(self.options.lexer_callbacks) - if self.options.transformer: - t = self.options.transformer - for term in self.terminals: - if hasattr(t, term.name): - lexer_callbacks[term.name] = getattr(t, term.name) - - self.lexer_conf = LexerConf(self.terminals, self.ignore_tokens, self.options.postlex, lexer_callbacks, self.options.g_regex_flags) - - if self.options.parser: - self.parser = self._build_parser() - elif lexer: - self.lexer = self._build_lexer() - - if cache_fn: - logging.debug('Saving grammar to cache: %s', cache_fn) - with FS.open(cache_fn, 'wb') as f: - self.save(f) - - if __init__.__doc__: - __init__.__doc__ += "\nOptions:\n" + LarkOptions.OPTIONS_DOC - - __serialize_fields__ = 'parser', 'rules', 'options' - - def _build_lexer(self): - return TraditionalLexer(self.lexer_conf.tokens, ignore=self.lexer_conf.ignore, user_callbacks=self.lexer_conf.callbacks, g_regex_flags=self.lexer_conf.g_regex_flags) - - def _prepare_callbacks(self): - self.parser_class = get_frontend(self.options.parser, self.options.lexer) - self._parse_tree_builder = ParseTreeBuilder(self.rules, self.options.tree_class or Tree, self.options.propagate_positions, self.options.keep_all_tokens, self.options.parser!='lalr' and self.options.ambiguity=='explicit', self.options.maybe_placeholders) - self._callbacks = self._parse_tree_builder.create_callback(self.options.transformer) - - def _build_parser(self): - self._prepare_callbacks() - parser_conf = ParserConf(self.rules, self._callbacks, self.options.start) - return self.parser_class(self.lexer_conf, parser_conf, self.re, options=self.options) - - def save(self, f): - data, m = self.memo_serialize([TerminalDef, Rule]) - pickle.dump({'data': data, 'memo': m}, f) - - @classmethod - def load(cls, f): - inst = cls.__new__(cls) - return inst._load(f) - - def _load(self, f, transformer=None, postlex=None): - if isinstance(f, dict): - d = f - else: - d = pickle.load(f) - memo = d['memo'] - data = d['data'] - - assert memo - memo = SerializeMemoizer.deserialize(memo, {'Rule': Rule, 'TerminalDef': TerminalDef}, {}) - options = dict(data['options']) - if transformer is not None: - options['transformer'] = transformer - if postlex is not None: - options['postlex'] = postlex - self.options = LarkOptions.deserialize(options, memo) - self.re = regex if self.options.regex else re - self.rules = [Rule.deserialize(r, memo) for r in data['rules']] - self.source = '' - self._prepare_callbacks() - self.parser = self.parser_class.deserialize(data['parser'], memo, self._callbacks, self.options.postlex, self.re) - return self - - @classmethod - def _load_from_dict(cls, data, memo, transformer=None, postlex=None): - inst = cls.__new__(cls) - return inst._load({'data': data, 'memo': memo}, transformer, postlex) - - @classmethod - def open(cls, grammar_filename, rel_to=None, **options): - """Create an instance of Lark with the grammar given by its filename - - If rel_to is provided, the function will find the grammar filename in relation to it. - - Example: - - >>> Lark.open("grammar_file.lark", rel_to=__file__, parser="lalr") - Lark(...) - - """ - if rel_to: - basepath = os.path.dirname(rel_to) - grammar_filename = os.path.join(basepath, grammar_filename) - with open(grammar_filename, encoding='utf8') as f: - return cls(f, **options) - - def __repr__(self): - return 'Lark(open(%r), parser=%r, lexer=%r, ...)' % (self.source, self.options.parser, self.options.lexer) - - - def lex(self, text): - "Only lex (and postlex) the text, without parsing it. Only relevant when lexer='standard'" - if not hasattr(self, 'lexer'): - self.lexer = self._build_lexer() - stream = self.lexer.lex(text) - if self.options.postlex: - return self.options.postlex.process(stream) - return stream - - def get_terminal(self, name): - "Get information about a terminal" - return self._terminals_dict[name] - - def parse(self, text, start=None, on_error=None): - """Parse the given text, according to the options provided. - - Parameters: - start: str - required if Lark was given multiple possible start symbols (using the start option). - on_error: function - if provided, will be called on UnexpectedToken error. Return true to resume parsing. LALR only. - - Returns a tree, unless specified otherwise. - """ - try: - return self.parser.parse(text, start=start) - except UnexpectedToken as e: - if on_error is None: - raise - - while True: - if not on_error(e): - raise e - try: - return e.puppet.resume_parse() - except UnexpectedToken as e2: - e = e2 - - -###} diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/lexer.py b/vendor/poetry-core/poetry/core/_vendor/lark/lexer.py deleted file mode 100644 index bff5de9e..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/lexer.py +++ /dev/null @@ -1,395 +0,0 @@ -## Lexer Implementation - -import re - -from .utils import Str, classify, get_regexp_width, Py36, Serialize -from .exceptions import UnexpectedCharacters, LexError, UnexpectedToken - -###{standalone - -class Pattern(Serialize): - - def __init__(self, value, flags=()): - self.value = value - self.flags = frozenset(flags) - - def __repr__(self): - return repr(self.to_regexp()) - - # Pattern Hashing assumes all subclasses have a different priority! - def __hash__(self): - return hash((type(self), self.value, self.flags)) - def __eq__(self, other): - return type(self) == type(other) and self.value == other.value and self.flags == other.flags - - def to_regexp(self): - raise NotImplementedError() - - if Py36: - # Python 3.6 changed syntax for flags in regular expression - def _get_flags(self, value): - for f in self.flags: - value = ('(?%s:%s)' % (f, value)) - return value - - else: - def _get_flags(self, value): - for f in self.flags: - value = ('(?%s)' % f) + value - return value - - -class PatternStr(Pattern): - __serialize_fields__ = 'value', 'flags' - - type = "str" - - def to_regexp(self): - return self._get_flags(re.escape(self.value)) - - @property - def min_width(self): - return len(self.value) - max_width = min_width - -class PatternRE(Pattern): - __serialize_fields__ = 'value', 'flags', '_width' - - type = "re" - - def to_regexp(self): - return self._get_flags(self.value) - - _width = None - def _get_width(self): - if self._width is None: - self._width = get_regexp_width(self.to_regexp()) - return self._width - - @property - def min_width(self): - return self._get_width()[0] - @property - def max_width(self): - return self._get_width()[1] - - -class TerminalDef(Serialize): - __serialize_fields__ = 'name', 'pattern', 'priority' - __serialize_namespace__ = PatternStr, PatternRE - - def __init__(self, name, pattern, priority=1): - assert isinstance(pattern, Pattern), pattern - self.name = name - self.pattern = pattern - self.priority = priority - - def __repr__(self): - return '%s(%r, %r)' % (type(self).__name__, self.name, self.pattern) - - - -class Token(Str): - __slots__ = ('type', 'pos_in_stream', 'value', 'line', 'column', 'end_line', 'end_column', 'end_pos') - - def __new__(cls, type_, value, pos_in_stream=None, line=None, column=None, end_line=None, end_column=None, end_pos=None): - try: - self = super(Token, cls).__new__(cls, value) - except UnicodeDecodeError: - value = value.decode('latin1') - self = super(Token, cls).__new__(cls, value) - - self.type = type_ - self.pos_in_stream = pos_in_stream - self.value = value - self.line = line - self.column = column - self.end_line = end_line - self.end_column = end_column - self.end_pos = end_pos - return self - - def update(self, type_=None, value=None): - return Token.new_borrow_pos( - type_ if type_ is not None else self.type, - value if value is not None else self.value, - self - ) - - @classmethod - def new_borrow_pos(cls, type_, value, borrow_t): - return cls(type_, value, borrow_t.pos_in_stream, borrow_t.line, borrow_t.column, borrow_t.end_line, borrow_t.end_column, borrow_t.end_pos) - - def __reduce__(self): - return (self.__class__, (self.type, self.value, self.pos_in_stream, self.line, self.column, )) - - def __repr__(self): - return 'Token(%s, %r)' % (self.type, self.value) - - def __deepcopy__(self, memo): - return Token(self.type, self.value, self.pos_in_stream, self.line, self.column) - - def __eq__(self, other): - if isinstance(other, Token) and self.type != other.type: - return False - - return Str.__eq__(self, other) - - __hash__ = Str.__hash__ - - -class LineCounter: - def __init__(self): - self.newline_char = '\n' - self.char_pos = 0 - self.line = 1 - self.column = 1 - self.line_start_pos = 0 - - def feed(self, token, test_newline=True): - """Consume a token and calculate the new line & column. - - As an optional optimization, set test_newline=False is token doesn't contain a newline. - """ - if test_newline: - newlines = token.count(self.newline_char) - if newlines: - self.line += newlines - self.line_start_pos = self.char_pos + token.rindex(self.newline_char) + 1 - - self.char_pos += len(token) - self.column = self.char_pos - self.line_start_pos + 1 - -class _Lex: - "Built to serve both Lexer and ContextualLexer" - def __init__(self, lexer, state=None): - self.lexer = lexer - self.state = state - - def lex(self, stream, newline_types, ignore_types): - newline_types = frozenset(newline_types) - ignore_types = frozenset(ignore_types) - line_ctr = LineCounter() - last_token = None - - while line_ctr.char_pos < len(stream): - lexer = self.lexer - res = lexer.match(stream, line_ctr.char_pos) - if not res: - allowed = {v for m, tfi in lexer.mres for v in tfi.values()} - ignore_types - if not allowed: - allowed = {""} - raise UnexpectedCharacters(stream, line_ctr.char_pos, line_ctr.line, line_ctr.column, allowed=allowed, state=self.state, token_history=last_token and [last_token]) - - value, type_ = res - - if type_ not in ignore_types: - t = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column) - line_ctr.feed(value, type_ in newline_types) - t.end_line = line_ctr.line - t.end_column = line_ctr.column - t.end_pos = line_ctr.char_pos - if t.type in lexer.callback: - t = lexer.callback[t.type](t) - if not isinstance(t, Token): - raise ValueError("Callbacks must return a token (returned %r)" % t) - yield t - last_token = t - else: - if type_ in lexer.callback: - t2 = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column) - lexer.callback[type_](t2) - line_ctr.feed(value, type_ in newline_types) - - - - -class UnlessCallback: - def __init__(self, mres): - self.mres = mres - - def __call__(self, t): - for mre, type_from_index in self.mres: - m = mre.match(t.value) - if m: - t.type = type_from_index[m.lastindex] - break - return t - -class CallChain: - def __init__(self, callback1, callback2, cond): - self.callback1 = callback1 - self.callback2 = callback2 - self.cond = cond - - def __call__(self, t): - t2 = self.callback1(t) - return self.callback2(t) if self.cond(t2) else t2 - - - - - -def _create_unless(terminals, g_regex_flags, re_): - tokens_by_type = classify(terminals, lambda t: type(t.pattern)) - assert len(tokens_by_type) <= 2, tokens_by_type.keys() - embedded_strs = set() - callback = {} - for retok in tokens_by_type.get(PatternRE, []): - unless = [] # {} - for strtok in tokens_by_type.get(PatternStr, []): - if strtok.priority > retok.priority: - continue - s = strtok.pattern.value - m = re_.match(retok.pattern.to_regexp(), s, g_regex_flags) - if m and m.group(0) == s: - unless.append(strtok) - if strtok.pattern.flags <= retok.pattern.flags: - embedded_strs.add(strtok) - if unless: - callback[retok.name] = UnlessCallback(build_mres(unless, g_regex_flags, re_, match_whole=True)) - - terminals = [t for t in terminals if t not in embedded_strs] - return terminals, callback - - -def _build_mres(terminals, max_size, g_regex_flags, match_whole, re_): - # Python sets an unreasonable group limit (currently 100) in its re module - # Worse, the only way to know we reached it is by catching an AssertionError! - # This function recursively tries less and less groups until it's successful. - postfix = '$' if match_whole else '' - mres = [] - while terminals: - try: - mre = re_.compile(u'|'.join(u'(?P<%s>%s)'%(t.name, t.pattern.to_regexp()+postfix) for t in terminals[:max_size]), g_regex_flags) - except AssertionError: # Yes, this is what Python provides us.. :/ - return _build_mres(terminals, max_size//2, g_regex_flags, match_whole, re_) - - # terms_from_name = {t.name: t for t in terminals[:max_size]} - mres.append((mre, {i:n for n,i in mre.groupindex.items()} )) - terminals = terminals[max_size:] - return mres - -def build_mres(terminals, g_regex_flags, re_, match_whole=False): - return _build_mres(terminals, len(terminals), g_regex_flags, match_whole, re_) - -def _regexp_has_newline(r): - r"""Expressions that may indicate newlines in a regexp: - - newlines (\n) - - escaped newline (\\n) - - anything but ([^...]) - - any-char (.) when the flag (?s) exists - - spaces (\s) - """ - return '\n' in r or '\\n' in r or '\\s' in r or '[^' in r or ('(?s' in r and '.' in r) - -class Lexer(object): - """Lexer interface - - Method Signatures: - lex(self, stream) -> Iterator[Token] - """ - lex = NotImplemented - - -class TraditionalLexer(Lexer): - - def __init__(self, terminals, re_, ignore=(), user_callbacks={}, g_regex_flags=0): - assert all(isinstance(t, TerminalDef) for t in terminals), terminals - - terminals = list(terminals) - - self.re = re_ - # Sanitization - for t in terminals: - try: - self.re.compile(t.pattern.to_regexp(), g_regex_flags) - except self.re.error: - raise LexError("Cannot compile token %s: %s" % (t.name, t.pattern)) - - if t.pattern.min_width == 0: - raise LexError("Lexer does not allow zero-width terminals. (%s: %s)" % (t.name, t.pattern)) - - assert set(ignore) <= {t.name for t in terminals} - - # Init - self.newline_types = [t.name for t in terminals if _regexp_has_newline(t.pattern.to_regexp())] - self.ignore_types = list(ignore) - - terminals.sort(key=lambda x:(-x.priority, -x.pattern.max_width, -len(x.pattern.value), x.name)) - self.terminals = terminals - self.user_callbacks = user_callbacks - self.build(g_regex_flags) - - def build(self, g_regex_flags=0): - terminals, self.callback = _create_unless(self.terminals, g_regex_flags, re_=self.re) - assert all(self.callback.values()) - - for type_, f in self.user_callbacks.items(): - if type_ in self.callback: - # Already a callback there, probably UnlessCallback - self.callback[type_] = CallChain(self.callback[type_], f, lambda t: t.type == type_) - else: - self.callback[type_] = f - - self.mres = build_mres(terminals, g_regex_flags, self.re) - - def match(self, stream, pos): - for mre, type_from_index in self.mres: - m = mre.match(stream, pos) - if m: - return m.group(0), type_from_index[m.lastindex] - - def lex(self, stream): - return _Lex(self).lex(stream, self.newline_types, self.ignore_types) - - - - -class ContextualLexer(Lexer): - - def __init__(self, terminals, states, re_, ignore=(), always_accept=(), user_callbacks={}, g_regex_flags=0): - self.re = re_ - tokens_by_name = {} - for t in terminals: - assert t.name not in tokens_by_name, t - tokens_by_name[t.name] = t - - lexer_by_tokens = {} - self.lexers = {} - for state, accepts in states.items(): - key = frozenset(accepts) - try: - lexer = lexer_by_tokens[key] - except KeyError: - accepts = set(accepts) | set(ignore) | set(always_accept) - state_tokens = [tokens_by_name[n] for n in accepts if n and n in tokens_by_name] - lexer = TraditionalLexer(state_tokens, re_=self.re, ignore=ignore, user_callbacks=user_callbacks, g_regex_flags=g_regex_flags) - lexer_by_tokens[key] = lexer - - self.lexers[state] = lexer - - self.root_lexer = TraditionalLexer(terminals, re_=self.re, ignore=ignore, user_callbacks=user_callbacks, g_regex_flags=g_regex_flags) - - def lex(self, stream, get_parser_state): - parser_state = get_parser_state() - l = _Lex(self.lexers[parser_state], parser_state) - try: - for x in l.lex(stream, self.root_lexer.newline_types, self.root_lexer.ignore_types): - yield x - parser_state = get_parser_state() - l.lexer = self.lexers[parser_state] - l.state = parser_state # For debug only, no need to worry about multithreading - except UnexpectedCharacters as e: - # In the contextual lexer, UnexpectedCharacters can mean that the terminal is defined, - # but not in the current context. - # This tests the input against the global context, to provide a nicer error. - root_match = self.root_lexer.match(stream, e.pos_in_stream) - if not root_match: - raise - - value, type_ = root_match - t = Token(type_, value, e.pos_in_stream, e.line, e.column) - raise UnexpectedToken(t, e.allowed, state=e.state) - -###} diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/load_grammar.py b/vendor/poetry-core/poetry/core/_vendor/lark/load_grammar.py deleted file mode 100644 index 407d8d16..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/load_grammar.py +++ /dev/null @@ -1,947 +0,0 @@ -"Parses and creates Grammar objects" - -import os.path -import sys -from copy import copy, deepcopy -from io import open - -from .utils import bfs, eval_escaping -from .lexer import Token, TerminalDef, PatternStr, PatternRE - -from .parse_tree_builder import ParseTreeBuilder -from .parser_frontends import LALR_TraditionalLexer -from .common import LexerConf, ParserConf -from .grammar import RuleOptions, Rule, Terminal, NonTerminal, Symbol -from .utils import classify, suppress, dedup_list, Str -from .exceptions import GrammarError, UnexpectedCharacters, UnexpectedToken - -from .tree import Tree, SlottedTree as ST -from .visitors import Transformer, Visitor, v_args, Transformer_InPlace, Transformer_NonRecursive -inline_args = v_args(inline=True) - -__path__ = os.path.dirname(__file__) -IMPORT_PATHS = [os.path.join(__path__, 'grammars')] - -EXT = '.lark' - -_RE_FLAGS = 'imslux' - -_EMPTY = Symbol('__empty__') - -_TERMINAL_NAMES = { - '.' : 'DOT', - ',' : 'COMMA', - ':' : 'COLON', - ';' : 'SEMICOLON', - '+' : 'PLUS', - '-' : 'MINUS', - '*' : 'STAR', - '/' : 'SLASH', - '\\' : 'BACKSLASH', - '|' : 'VBAR', - '?' : 'QMARK', - '!' : 'BANG', - '@' : 'AT', - '#' : 'HASH', - '$' : 'DOLLAR', - '%' : 'PERCENT', - '^' : 'CIRCUMFLEX', - '&' : 'AMPERSAND', - '_' : 'UNDERSCORE', - '<' : 'LESSTHAN', - '>' : 'MORETHAN', - '=' : 'EQUAL', - '"' : 'DBLQUOTE', - '\'' : 'QUOTE', - '`' : 'BACKQUOTE', - '~' : 'TILDE', - '(' : 'LPAR', - ')' : 'RPAR', - '{' : 'LBRACE', - '}' : 'RBRACE', - '[' : 'LSQB', - ']' : 'RSQB', - '\n' : 'NEWLINE', - '\r\n' : 'CRLF', - '\t' : 'TAB', - ' ' : 'SPACE', -} - -# Grammar Parser -TERMINALS = { - '_LPAR': r'\(', - '_RPAR': r'\)', - '_LBRA': r'\[', - '_RBRA': r'\]', - '_LBRACE': r'\{', - '_RBRACE': r'\}', - 'OP': '[+*]|[?](?![a-z])', - '_COLON': ':', - '_COMMA': ',', - '_OR': r'\|', - '_DOT': r'\.(?!\.)', - '_DOTDOT': r'\.\.', - 'TILDE': '~', - 'RULE': '!?[_?]?[a-z][_a-z0-9]*', - 'TERMINAL': '_?[A-Z][_A-Z0-9]*', - 'STRING': r'"(\\"|\\\\|[^"\n])*?"i?', - 'REGEXP': r'/(?!/)(\\/|\\\\|[^/\n])*?/[%s]*' % _RE_FLAGS, - '_NL': r'(\r?\n)+\s*', - 'WS': r'[ \t]+', - 'COMMENT': r'\s*//[^\n]*', - '_TO': '->', - '_IGNORE': r'%ignore', - '_DECLARE': r'%declare', - '_IMPORT': r'%import', - 'NUMBER': r'[+-]?\d+', -} - -RULES = { - 'start': ['_list'], - '_list': ['_item', '_list _item'], - '_item': ['rule', 'term', 'statement', '_NL'], - - 'rule': ['RULE template_params _COLON expansions _NL', - 'RULE template_params _DOT NUMBER _COLON expansions _NL'], - 'template_params': ['_LBRACE _template_params _RBRACE', - ''], - '_template_params': ['RULE', - '_template_params _COMMA RULE'], - 'expansions': ['alias', - 'expansions _OR alias', - 'expansions _NL _OR alias'], - - '?alias': ['expansion _TO RULE', 'expansion'], - 'expansion': ['_expansion'], - - '_expansion': ['', '_expansion expr'], - - '?expr': ['atom', - 'atom OP', - 'atom TILDE NUMBER', - 'atom TILDE NUMBER _DOTDOT NUMBER', - ], - - '?atom': ['_LPAR expansions _RPAR', - 'maybe', - 'value'], - - 'value': ['terminal', - 'nonterminal', - 'literal', - 'range', - 'template_usage'], - - 'terminal': ['TERMINAL'], - 'nonterminal': ['RULE'], - - '?name': ['RULE', 'TERMINAL'], - - 'maybe': ['_LBRA expansions _RBRA'], - 'range': ['STRING _DOTDOT STRING'], - - 'template_usage': ['RULE _LBRACE _template_args _RBRACE'], - '_template_args': ['value', - '_template_args _COMMA value'], - - 'term': ['TERMINAL _COLON expansions _NL', - 'TERMINAL _DOT NUMBER _COLON expansions _NL'], - 'statement': ['ignore', 'import', 'declare'], - 'ignore': ['_IGNORE expansions _NL'], - 'declare': ['_DECLARE _declare_args _NL'], - 'import': ['_IMPORT _import_path _NL', - '_IMPORT _import_path _LPAR name_list _RPAR _NL', - '_IMPORT _import_path _TO name _NL'], - - '_import_path': ['import_lib', 'import_rel'], - 'import_lib': ['_import_args'], - 'import_rel': ['_DOT _import_args'], - '_import_args': ['name', '_import_args _DOT name'], - - 'name_list': ['_name_list'], - '_name_list': ['name', '_name_list _COMMA name'], - - '_declare_args': ['name', '_declare_args name'], - 'literal': ['REGEXP', 'STRING'], -} - -@inline_args -class EBNF_to_BNF(Transformer_InPlace): - def __init__(self): - self.new_rules = [] - self.rules_by_expr = {} - self.prefix = 'anon' - self.i = 0 - self.rule_options = None - - def _add_recurse_rule(self, type_, expr): - if expr in self.rules_by_expr: - return self.rules_by_expr[expr] - - new_name = '__%s_%s_%d' % (self.prefix, type_, self.i) - self.i += 1 - t = NonTerminal(new_name) - tree = ST('expansions', [ST('expansion', [expr]), ST('expansion', [t, expr])]) - self.new_rules.append((new_name, tree, self.rule_options)) - self.rules_by_expr[expr] = t - return t - - def expr(self, rule, op, *args): - if op.value == '?': - empty = ST('expansion', []) - return ST('expansions', [rule, empty]) - elif op.value == '+': - # a : b c+ d - # --> - # a : b _c d - # _c : _c c | c; - return self._add_recurse_rule('plus', rule) - elif op.value == '*': - # a : b c* d - # --> - # a : b _c? d - # _c : _c c | c; - new_name = self._add_recurse_rule('star', rule) - return ST('expansions', [new_name, ST('expansion', [])]) - elif op.value == '~': - if len(args) == 1: - mn = mx = int(args[0]) - else: - mn, mx = map(int, args) - if mx < mn or mn < 0: - raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (rule, mn, mx)) - return ST('expansions', [ST('expansion', [rule] * n) for n in range(mn, mx+1)]) - assert False, op - - def maybe(self, rule): - keep_all_tokens = self.rule_options and self.rule_options.keep_all_tokens - - def will_not_get_removed(sym): - if isinstance(sym, NonTerminal): - return not sym.name.startswith('_') - if isinstance(sym, Terminal): - return keep_all_tokens or not sym.filter_out - assert False - - if any(rule.scan_values(will_not_get_removed)): - empty = _EMPTY - else: - empty = ST('expansion', []) - - return ST('expansions', [rule, empty]) - - -class SimplifyRule_Visitor(Visitor): - - @staticmethod - def _flatten(tree): - while True: - to_expand = [i for i, child in enumerate(tree.children) - if isinstance(child, Tree) and child.data == tree.data] - if not to_expand: - break - tree.expand_kids_by_index(*to_expand) - - def expansion(self, tree): - # rules_list unpacking - # a : b (c|d) e - # --> - # a : b c e | b d e - # - # In AST terms: - # expansion(b, expansions(c, d), e) - # --> - # expansions( expansion(b, c, e), expansion(b, d, e) ) - - self._flatten(tree) - - for i, child in enumerate(tree.children): - if isinstance(child, Tree) and child.data == 'expansions': - tree.data = 'expansions' - tree.children = [self.visit(ST('expansion', [option if i==j else other - for j, other in enumerate(tree.children)])) - for option in dedup_list(child.children)] - self._flatten(tree) - break - - def alias(self, tree): - rule, alias_name = tree.children - if rule.data == 'expansions': - aliases = [] - for child in tree.children[0].children: - aliases.append(ST('alias', [child, alias_name])) - tree.data = 'expansions' - tree.children = aliases - - def expansions(self, tree): - self._flatten(tree) - # Ensure all children are unique - if len(set(tree.children)) != len(tree.children): - tree.children = dedup_list(tree.children) # dedup is expensive, so try to minimize its use - - -class RuleTreeToText(Transformer): - def expansions(self, x): - return x - def expansion(self, symbols): - return symbols, None - def alias(self, x): - (expansion, _alias), alias = x - assert _alias is None, (alias, expansion, '-', _alias) # Double alias not allowed - return expansion, alias.value - - -@inline_args -class CanonizeTree(Transformer_InPlace): - def tokenmods(self, *args): - if len(args) == 1: - return list(args) - tokenmods, value = args - return tokenmods + [value] - -class PrepareAnonTerminals(Transformer_InPlace): - "Create a unique list of anonymous terminals. Attempt to give meaningful names to them when we add them" - - def __init__(self, terminals): - self.terminals = terminals - self.term_set = {td.name for td in self.terminals} - self.term_reverse = {td.pattern: td for td in terminals} - self.i = 0 - - - @inline_args - def pattern(self, p): - value = p.value - if p in self.term_reverse and p.flags != self.term_reverse[p].pattern.flags: - raise GrammarError(u'Conflicting flags for the same terminal: %s' % p) - - term_name = None - - if isinstance(p, PatternStr): - try: - # If already defined, use the user-defined terminal name - term_name = self.term_reverse[p].name - except KeyError: - # Try to assign an indicative anon-terminal name - try: - term_name = _TERMINAL_NAMES[value] - except KeyError: - if value.isalnum() and value[0].isalpha() and value.upper() not in self.term_set: - with suppress(UnicodeEncodeError): - value.upper().encode('ascii') # Make sure we don't have unicode in our terminal names - term_name = value.upper() - - if term_name in self.term_set: - term_name = None - - elif isinstance(p, PatternRE): - if p in self.term_reverse: # Kind of a wierd placement.name - term_name = self.term_reverse[p].name - else: - assert False, p - - if term_name is None: - term_name = '__ANON_%d' % self.i - self.i += 1 - - if term_name not in self.term_set: - assert p not in self.term_reverse - self.term_set.add(term_name) - termdef = TerminalDef(term_name, p) - self.term_reverse[p] = termdef - self.terminals.append(termdef) - - return Terminal(term_name, filter_out=isinstance(p, PatternStr)) - -class _ReplaceSymbols(Transformer_InPlace): - " Helper for ApplyTemplates " - - def __init__(self): - self.names = {} - - def value(self, c): - if len(c) == 1 and isinstance(c[0], Token) and c[0].value in self.names: - return self.names[c[0].value] - return self.__default__('value', c, None) - - def template_usage(self, c): - if c[0] in self.names: - return self.__default__('template_usage', [self.names[c[0]].name] + c[1:], None) - return self.__default__('template_usage', c, None) - -class ApplyTemplates(Transformer_InPlace): - " Apply the templates, creating new rules that represent the used templates " - - def __init__(self, rule_defs): - self.rule_defs = rule_defs - self.replacer = _ReplaceSymbols() - self.created_templates = set() - - def template_usage(self, c): - name = c[0] - args = c[1:] - result_name = "%s{%s}" % (name, ",".join(a.name for a in args)) - if result_name not in self.created_templates: - self.created_templates.add(result_name) - (_n, params, tree, options) ,= (t for t in self.rule_defs if t[0] == name) - assert len(params) == len(args), args - result_tree = deepcopy(tree) - self.replacer.names = dict(zip(params, args)) - self.replacer.transform(result_tree) - self.rule_defs.append((result_name, [], result_tree, deepcopy(options))) - return NonTerminal(result_name) - - -def _rfind(s, choices): - return max(s.rfind(c) for c in choices) - - - - -def _literal_to_pattern(literal): - v = literal.value - flag_start = _rfind(v, '/"')+1 - assert flag_start > 0 - flags = v[flag_start:] - assert all(f in _RE_FLAGS for f in flags), flags - - v = v[:flag_start] - assert v[0] == v[-1] and v[0] in '"/' - x = v[1:-1] - - s = eval_escaping(x) - - if literal.type == 'STRING': - s = s.replace('\\\\', '\\') - - return { 'STRING': PatternStr, - 'REGEXP': PatternRE }[literal.type](s, flags) - - -@inline_args -class PrepareLiterals(Transformer_InPlace): - def literal(self, literal): - return ST('pattern', [_literal_to_pattern(literal)]) - - def range(self, start, end): - assert start.type == end.type == 'STRING' - start = start.value[1:-1] - end = end.value[1:-1] - assert len(eval_escaping(start)) == len(eval_escaping(end)) == 1, (start, end, len(eval_escaping(start)), len(eval_escaping(end))) - regexp = '[%s-%s]' % (start, end) - return ST('pattern', [PatternRE(regexp)]) - - -class TerminalTreeToPattern(Transformer): - def pattern(self, ps): - p ,= ps - return p - - def expansion(self, items): - assert items - if len(items) == 1: - return items[0] - if len({i.flags for i in items}) > 1: - raise GrammarError("Lark doesn't support joining terminals with conflicting flags!") - return PatternRE(''.join(i.to_regexp() for i in items), items[0].flags if items else ()) - - def expansions(self, exps): - if len(exps) == 1: - return exps[0] - if len({i.flags for i in exps}) > 1: - raise GrammarError("Lark doesn't support joining terminals with conflicting flags!") - return PatternRE('(?:%s)' % ('|'.join(i.to_regexp() for i in exps)), exps[0].flags) - - def expr(self, args): - inner, op = args[:2] - if op == '~': - if len(args) == 3: - op = "{%d}" % int(args[2]) - else: - mn, mx = map(int, args[2:]) - if mx < mn: - raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (inner, mn, mx)) - op = "{%d,%d}" % (mn, mx) - else: - assert len(args) == 2 - return PatternRE('(?:%s)%s' % (inner.to_regexp(), op), inner.flags) - - def maybe(self, expr): - return self.expr(expr + ['?']) - - def alias(self, t): - raise GrammarError("Aliasing not allowed in terminals (You used -> in the wrong place)") - - def value(self, v): - return v[0] - -class PrepareSymbols(Transformer_InPlace): - def value(self, v): - v ,= v - if isinstance(v, Tree): - return v - elif v.type == 'RULE': - return NonTerminal(Str(v.value)) - elif v.type == 'TERMINAL': - return Terminal(Str(v.value), filter_out=v.startswith('_')) - assert False - -def _choice_of_rules(rules): - return ST('expansions', [ST('expansion', [Token('RULE', name)]) for name in rules]) - -def nr_deepcopy_tree(t): - "Deepcopy tree `t` without recursion" - return Transformer_NonRecursive(False).transform(t) - -class Grammar: - def __init__(self, rule_defs, term_defs, ignore): - self.term_defs = term_defs - self.rule_defs = rule_defs - self.ignore = ignore - - def compile(self, start): - # We change the trees in-place (to support huge grammars) - # So deepcopy allows calling compile more than once. - term_defs = deepcopy(list(self.term_defs)) - rule_defs = [(n,p,nr_deepcopy_tree(t),o) for n,p,t,o in self.rule_defs] - - # =================== - # Compile Terminals - # =================== - - # Convert terminal-trees to strings/regexps - - for name, (term_tree, priority) in term_defs: - if term_tree is None: # Terminal added through %declare - continue - expansions = list(term_tree.find_data('expansion')) - if len(expansions) == 1 and not expansions[0].children: - raise GrammarError("Terminals cannot be empty (%s)" % name) - - transformer = PrepareLiterals() * TerminalTreeToPattern() - terminals = [TerminalDef(name, transformer.transform( term_tree ), priority) - for name, (term_tree, priority) in term_defs if term_tree] - - # ================= - # Compile Rules - # ================= - - # 1. Pre-process terminals - transformer = PrepareLiterals() * PrepareSymbols() * PrepareAnonTerminals(terminals) # Adds to terminals - - # 2. Inline Templates - - transformer *= ApplyTemplates(rule_defs) - - # 3. Convert EBNF to BNF (and apply step 1 & 2) - ebnf_to_bnf = EBNF_to_BNF() - rules = [] - i = 0 - while i < len(rule_defs): # We have to do it like this because rule_defs might grow due to templates - name, params, rule_tree, options = rule_defs[i] - i += 1 - if len(params) != 0: # Dont transform templates - continue - ebnf_to_bnf.rule_options = RuleOptions(keep_all_tokens=True) if options.keep_all_tokens else None - ebnf_to_bnf.prefix = name - tree = transformer.transform(rule_tree) - res = ebnf_to_bnf.transform(tree) - rules.append((name, res, options)) - rules += ebnf_to_bnf.new_rules - - assert len(rules) == len({name for name, _t, _o in rules}), "Whoops, name collision" - - # 4. Compile tree to Rule objects - rule_tree_to_text = RuleTreeToText() - - simplify_rule = SimplifyRule_Visitor() - compiled_rules = [] - for rule_content in rules: - name, tree, options = rule_content - simplify_rule.visit(tree) - expansions = rule_tree_to_text.transform(tree) - - for i, (expansion, alias) in enumerate(expansions): - if alias and name.startswith('_'): - raise GrammarError("Rule %s is marked for expansion (it starts with an underscore) and isn't allowed to have aliases (alias=%s)" % (name, alias)) - - empty_indices = [x==_EMPTY for x in expansion] - if any(empty_indices): - exp_options = copy(options) or RuleOptions() - exp_options.empty_indices = empty_indices - expansion = [x for x in expansion if x!=_EMPTY] - else: - exp_options = options - - assert all(isinstance(x, Symbol) for x in expansion), expansion - rule = Rule(NonTerminal(name), expansion, i, alias, exp_options) - compiled_rules.append(rule) - - # Remove duplicates of empty rules, throw error for non-empty duplicates - if len(set(compiled_rules)) != len(compiled_rules): - duplicates = classify(compiled_rules, lambda x: x) - for dups in duplicates.values(): - if len(dups) > 1: - if dups[0].expansion: - raise GrammarError("Rules defined twice: %s\n\n(Might happen due to colliding expansion of optionals: [] or ?)" - % ''.join('\n * %s' % i for i in dups)) - - # Empty rule; assert all other attributes are equal - assert len({(r.alias, r.order, r.options) for r in dups}) == len(dups) - - # Remove duplicates - compiled_rules = list(set(compiled_rules)) - - - # Filter out unused rules - while True: - c = len(compiled_rules) - used_rules = {s for r in compiled_rules - for s in r.expansion - if isinstance(s, NonTerminal) - and s != r.origin} - used_rules |= {NonTerminal(s) for s in start} - compiled_rules = [r for r in compiled_rules if r.origin in used_rules] - if len(compiled_rules) == c: - break - - # Filter out unused terminals - used_terms = {t.name for r in compiled_rules - for t in r.expansion - if isinstance(t, Terminal)} - terminals = [t for t in terminals if t.name in used_terms or t.name in self.ignore] - - return terminals, compiled_rules, self.ignore - - - -_imported_grammars = {} -def import_grammar(grammar_path, re_, base_paths=[]): - if grammar_path not in _imported_grammars: - import_paths = base_paths + IMPORT_PATHS - for import_path in import_paths: - with suppress(IOError): - joined_path = os.path.join(import_path, grammar_path) - with open(joined_path, encoding='utf8') as f: - text = f.read() - grammar = load_grammar(text, joined_path, re_) - _imported_grammars[grammar_path] = grammar - break - else: - open(grammar_path, encoding='utf8') - assert False - - return _imported_grammars[grammar_path] - -def import_from_grammar_into_namespace(grammar, namespace, aliases): - """Returns all rules and terminals of grammar, prepended - with a 'namespace' prefix, except for those which are aliased. - """ - - imported_terms = dict(grammar.term_defs) - imported_rules = {n:(n,p,deepcopy(t),o) for n,p,t,o in grammar.rule_defs} - - term_defs = [] - rule_defs = [] - - def rule_dependencies(symbol): - if symbol.type != 'RULE': - return [] - try: - _, params, tree,_ = imported_rules[symbol] - except KeyError: - raise GrammarError("Missing symbol '%s' in grammar %s" % (symbol, namespace)) - return _find_used_symbols(tree) - set(params) - - - - def get_namespace_name(name, params): - if params is not None: - try: - return params[name] - except KeyError: - pass - try: - return aliases[name].value - except KeyError: - if name[0] == '_': - return '_%s__%s' % (namespace, name[1:]) - return '%s__%s' % (namespace, name) - - to_import = list(bfs(aliases, rule_dependencies)) - for symbol in to_import: - if symbol.type == 'TERMINAL': - term_defs.append([get_namespace_name(symbol, None), imported_terms[symbol]]) - else: - assert symbol.type == 'RULE' - _, params, tree, options = imported_rules[symbol] - params_map = {p: ('%s__%s' if p[0]!='_' else '_%s__%s' ) % (namespace, p) for p in params} - for t in tree.iter_subtrees(): - for i, c in enumerate(t.children): - if isinstance(c, Token) and c.type in ('RULE', 'TERMINAL'): - t.children[i] = Token(c.type, get_namespace_name(c, params_map)) - params = [params_map[p] for p in params] # We can not rely on ordered dictionaries - rule_defs.append((get_namespace_name(symbol, params_map), params, tree, options)) - - - return term_defs, rule_defs - - - -def resolve_term_references(term_defs): - # TODO Solve with transitive closure (maybe) - - term_dict = {k:t for k, (t,_p) in term_defs} - assert len(term_dict) == len(term_defs), "Same name defined twice?" - - while True: - changed = False - for name, (token_tree, _p) in term_defs: - if token_tree is None: # Terminal added through %declare - continue - for exp in token_tree.find_data('value'): - item ,= exp.children - if isinstance(item, Token): - if item.type == 'RULE': - raise GrammarError("Rules aren't allowed inside terminals (%s in %s)" % (item, name)) - if item.type == 'TERMINAL': - term_value = term_dict[item] - assert term_value is not None - exp.children[0] = term_value - changed = True - if not changed: - break - - for name, term in term_dict.items(): - if term: # Not just declared - for child in term.children: - ids = [id(x) for x in child.iter_subtrees()] - if id(term) in ids: - raise GrammarError("Recursion in terminal '%s' (recursion is only allowed in rules, not terminals)" % name) - - -def options_from_rule(name, params, *x): - if len(x) > 1: - priority, expansions = x - priority = int(priority) - else: - expansions ,= x - priority = None - params = [t.value for t in params.children] if params is not None else [] # For the grammar parser - - keep_all_tokens = name.startswith('!') - name = name.lstrip('!') - expand1 = name.startswith('?') - name = name.lstrip('?') - - return name, params, expansions, RuleOptions(keep_all_tokens, expand1, priority=priority, - template_source=(name if params else None)) - - -def symbols_from_strcase(expansion): - return [Terminal(x, filter_out=x.startswith('_')) if x.isupper() else NonTerminal(x) for x in expansion] - -@inline_args -class PrepareGrammar(Transformer_InPlace): - def terminal(self, name): - return name - def nonterminal(self, name): - return name - - -def _find_used_symbols(tree): - assert tree.data == 'expansions' - return {t for x in tree.find_data('expansion') - for t in x.scan_values(lambda t: t.type in ('RULE', 'TERMINAL'))} - -class GrammarLoader: - def __init__(self, re_): - self.re = re_ - terminals = [TerminalDef(name, PatternRE(value)) for name, value in TERMINALS.items()] - - rules = [options_from_rule(name, None, x) for name, x in RULES.items()] - rules = [Rule(NonTerminal(r), symbols_from_strcase(x.split()), i, None, o) for r, _p, xs, o in rules for i, x in enumerate(xs)] - callback = ParseTreeBuilder(rules, ST).create_callback() - lexer_conf = LexerConf(terminals, ['WS', 'COMMENT']) - - parser_conf = ParserConf(rules, callback, ['start']) - self.parser = LALR_TraditionalLexer(lexer_conf, parser_conf, re_) - - self.canonize_tree = CanonizeTree() - - def load_grammar(self, grammar_text, grammar_name=''): - "Parse grammar_text, verify, and create Grammar object. Display nice messages on error." - - try: - tree = self.canonize_tree.transform( self.parser.parse(grammar_text+'\n') ) - except UnexpectedCharacters as e: - context = e.get_context(grammar_text) - raise GrammarError("Unexpected input at line %d column %d in %s: \n\n%s" % - (e.line, e.column, grammar_name, context)) - except UnexpectedToken as e: - context = e.get_context(grammar_text) - error = e.match_examples(self.parser.parse, { - 'Unclosed parenthesis': ['a: (\n'], - 'Umatched closing parenthesis': ['a: )\n', 'a: [)\n', 'a: (]\n'], - 'Expecting rule or terminal definition (missing colon)': ['a\n', 'a->\n', 'A->\n', 'a A\n'], - 'Alias expects lowercase name': ['a: -> "a"\n'], - 'Unexpected colon': ['a::\n', 'a: b:\n', 'a: B:\n', 'a: "a":\n'], - 'Misplaced operator': ['a: b??', 'a: b(?)', 'a:+\n', 'a:?\n', 'a:*\n', 'a:|*\n'], - 'Expecting option ("|") or a new rule or terminal definition': ['a:a\n()\n'], - '%import expects a name': ['%import "a"\n'], - '%ignore expects a value': ['%ignore %import\n'], - }) - if error: - raise GrammarError("%s at line %s column %s\n\n%s" % (error, e.line, e.column, context)) - elif 'STRING' in e.expected: - raise GrammarError("Expecting a value at line %s column %s\n\n%s" % (e.line, e.column, context)) - raise - - tree = PrepareGrammar().transform(tree) - - # Extract grammar items - defs = classify(tree.children, lambda c: c.data, lambda c: c.children) - term_defs = defs.pop('term', []) - rule_defs = defs.pop('rule', []) - statements = defs.pop('statement', []) - assert not defs - - term_defs = [td if len(td)==3 else (td[0], 1, td[1]) for td in term_defs] - term_defs = [(name.value, (t, int(p))) for name, p, t in term_defs] - rule_defs = [options_from_rule(*x) for x in rule_defs] - - # Execute statements - ignore, imports = [], {} - for (stmt,) in statements: - if stmt.data == 'ignore': - t ,= stmt.children - ignore.append(t) - elif stmt.data == 'import': - if len(stmt.children) > 1: - path_node, arg1 = stmt.children - else: - path_node, = stmt.children - arg1 = None - - if isinstance(arg1, Tree): # Multi import - dotted_path = tuple(path_node.children) - names = arg1.children - aliases = dict(zip(names, names)) # Can't have aliased multi import, so all aliases will be the same as names - else: # Single import - dotted_path = tuple(path_node.children[:-1]) - name = path_node.children[-1] # Get name from dotted path - aliases = {name: arg1 or name} # Aliases if exist - - if path_node.data == 'import_lib': # Import from library - base_paths = [] - else: # Relative import - if grammar_name == '': # Import relative to script file path if grammar is coded in script - try: - base_file = os.path.abspath(sys.modules['__main__'].__file__) - except AttributeError: - base_file = None - else: - base_file = grammar_name # Import relative to grammar file path if external grammar file - if base_file: - base_paths = [os.path.split(base_file)[0]] - else: - base_paths = [os.path.abspath(os.path.curdir)] - - try: - import_base_paths, import_aliases = imports[dotted_path] - assert base_paths == import_base_paths, 'Inconsistent base_paths for %s.' % '.'.join(dotted_path) - import_aliases.update(aliases) - except KeyError: - imports[dotted_path] = base_paths, aliases - - elif stmt.data == 'declare': - for t in stmt.children: - term_defs.append([t.value, (None, None)]) - else: - assert False, stmt - - # import grammars - for dotted_path, (base_paths, aliases) in imports.items(): - grammar_path = os.path.join(*dotted_path) + EXT - g = import_grammar(grammar_path, self.re, base_paths=base_paths) - new_td, new_rd = import_from_grammar_into_namespace(g, '__'.join(dotted_path), aliases) - - term_defs += new_td - rule_defs += new_rd - - # Verify correctness 1 - for name, _ in term_defs: - if name.startswith('__'): - raise GrammarError('Names starting with double-underscore are reserved (Error at %s)' % name) - - # Handle ignore tokens - # XXX A slightly hacky solution. Recognition of %ignore TERMINAL as separate comes from the lexer's - # inability to handle duplicate terminals (two names, one value) - ignore_names = [] - for t in ignore: - if t.data=='expansions' and len(t.children) == 1: - t2 ,= t.children - if t2.data=='expansion' and len(t2.children) == 1: - item ,= t2.children - if item.data == 'value': - item ,= item.children - if isinstance(item, Token) and item.type == 'TERMINAL': - ignore_names.append(item.value) - continue - - name = '__IGNORE_%d'% len(ignore_names) - ignore_names.append(name) - term_defs.append((name, (t, 1))) - - # Verify correctness 2 - terminal_names = set() - for name, _ in term_defs: - if name in terminal_names: - raise GrammarError("Terminal '%s' defined more than once" % name) - terminal_names.add(name) - - if set(ignore_names) > terminal_names: - raise GrammarError("Terminals %s were marked to ignore but were not defined!" % (set(ignore_names) - terminal_names)) - - resolve_term_references(term_defs) - - rules = rule_defs - - rule_names = {} - for name, params, _x, _o in rules: - if name.startswith('__'): - raise GrammarError('Names starting with double-underscore are reserved (Error at %s)' % name) - if name in rule_names: - raise GrammarError("Rule '%s' defined more than once" % name) - rule_names[name] = len(params) - - for name, params , expansions, _o in rules: - for i, p in enumerate(params): - if p in rule_names: - raise GrammarError("Template Parameter conflicts with rule %s (in template %s)" % (p, name)) - if p in params[:i]: - raise GrammarError("Duplicate Template Parameter %s (in template %s)" % (p, name)) - for temp in expansions.find_data('template_usage'): - sym = temp.children[0] - args = temp.children[1:] - if sym not in params: - if sym not in rule_names: - raise GrammarError("Template '%s' used but not defined (in rule %s)" % (sym, name)) - if len(args) != rule_names[sym]: - raise GrammarError("Wrong number of template arguments used for %s " - "(expected %s, got %s) (in rule %s)"%(sym, rule_names[sym], len(args), name)) - for sym in _find_used_symbols(expansions): - if sym.type == 'TERMINAL': - if sym not in terminal_names: - raise GrammarError("Token '%s' used but not defined (in rule %s)" % (sym, name)) - else: - if sym not in rule_names and sym not in params: - raise GrammarError("Rule '%s' used but not defined (in rule %s)" % (sym, name)) - - - return Grammar(rules, term_defs, ignore_names) - - - -def load_grammar(grammar, source, re_): - return GrammarLoader(re_).load_grammar(grammar, source) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parse_tree_builder.py b/vendor/poetry-core/poetry/core/_vendor/lark/parse_tree_builder.py deleted file mode 100644 index 5a7c5d70..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parse_tree_builder.py +++ /dev/null @@ -1,277 +0,0 @@ -from .exceptions import GrammarError -from .lexer import Token -from .tree import Tree -from .visitors import InlineTransformer # XXX Deprecated -from .visitors import Transformer_InPlace -from .visitors import _vargs_meta, _vargs_meta_inline - -###{standalone -from functools import partial, wraps -from itertools import repeat, product - - -class ExpandSingleChild: - def __init__(self, node_builder): - self.node_builder = node_builder - - def __call__(self, children): - if len(children) == 1: - return children[0] - else: - return self.node_builder(children) - -class PropagatePositions: - def __init__(self, node_builder): - self.node_builder = node_builder - - def __call__(self, children): - res = self.node_builder(children) - - # local reference to Tree.meta reduces number of presence checks - if isinstance(res, Tree): - res_meta = res.meta - for c in children: - if isinstance(c, Tree): - child_meta = c.meta - if not child_meta.empty: - res_meta.line = child_meta.line - res_meta.column = child_meta.column - res_meta.start_pos = child_meta.start_pos - res_meta.empty = False - break - elif isinstance(c, Token): - res_meta.line = c.line - res_meta.column = c.column - res_meta.start_pos = c.pos_in_stream - res_meta.empty = False - break - - for c in reversed(children): - if isinstance(c, Tree): - child_meta = c.meta - if not child_meta.empty: - res_meta.end_line = child_meta.end_line - res_meta.end_column = child_meta.end_column - res_meta.end_pos = child_meta.end_pos - res_meta.empty = False - break - elif isinstance(c, Token): - res_meta.end_line = c.end_line - res_meta.end_column = c.end_column - res_meta.end_pos = c.end_pos - res_meta.empty = False - break - - return res - - -class ChildFilter: - def __init__(self, to_include, append_none, node_builder): - self.node_builder = node_builder - self.to_include = to_include - self.append_none = append_none - - def __call__(self, children): - filtered = [] - - for i, to_expand, add_none in self.to_include: - if add_none: - filtered += [None] * add_none - if to_expand: - filtered += children[i].children - else: - filtered.append(children[i]) - - if self.append_none: - filtered += [None] * self.append_none - - return self.node_builder(filtered) - -class ChildFilterLALR(ChildFilter): - "Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)" - - def __call__(self, children): - filtered = [] - for i, to_expand, add_none in self.to_include: - if add_none: - filtered += [None] * add_none - if to_expand: - if filtered: - filtered += children[i].children - else: # Optimize for left-recursion - filtered = children[i].children - else: - filtered.append(children[i]) - - if self.append_none: - filtered += [None] * self.append_none - - return self.node_builder(filtered) - -class ChildFilterLALR_NoPlaceholders(ChildFilter): - "Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)" - def __init__(self, to_include, node_builder): - self.node_builder = node_builder - self.to_include = to_include - - def __call__(self, children): - filtered = [] - for i, to_expand in self.to_include: - if to_expand: - if filtered: - filtered += children[i].children - else: # Optimize for left-recursion - filtered = children[i].children - else: - filtered.append(children[i]) - return self.node_builder(filtered) - -def _should_expand(sym): - return not sym.is_term and sym.name.startswith('_') - -def maybe_create_child_filter(expansion, keep_all_tokens, ambiguous, _empty_indices): - # Prepare empty_indices as: How many Nones to insert at each index? - if _empty_indices: - assert _empty_indices.count(False) == len(expansion) - s = ''.join(str(int(b)) for b in _empty_indices) - empty_indices = [len(ones) for ones in s.split('0')] - assert len(empty_indices) == len(expansion)+1, (empty_indices, len(expansion)) - else: - empty_indices = [0] * (len(expansion)+1) - - to_include = [] - nones_to_add = 0 - for i, sym in enumerate(expansion): - nones_to_add += empty_indices[i] - if keep_all_tokens or not (sym.is_term and sym.filter_out): - to_include.append((i, _should_expand(sym), nones_to_add)) - nones_to_add = 0 - - nones_to_add += empty_indices[len(expansion)] - - if _empty_indices or len(to_include) < len(expansion) or any(to_expand for i, to_expand,_ in to_include): - if _empty_indices or ambiguous: - return partial(ChildFilter if ambiguous else ChildFilterLALR, to_include, nones_to_add) - else: - # LALR without placeholders - return partial(ChildFilterLALR_NoPlaceholders, [(i, x) for i,x,_ in to_include]) - -class AmbiguousExpander: - """Deal with the case where we're expanding children ('_rule') into a parent but the children - are ambiguous. i.e. (parent->_ambig->_expand_this_rule). In this case, make the parent itself - ambiguous with as many copies as their are ambiguous children, and then copy the ambiguous children - into the right parents in the right places, essentially shifting the ambiguiuty up the tree.""" - def __init__(self, to_expand, tree_class, node_builder): - self.node_builder = node_builder - self.tree_class = tree_class - self.to_expand = to_expand - - def __call__(self, children): - def _is_ambig_tree(child): - return hasattr(child, 'data') and child.data == '_ambig' - - #### When we're repeatedly expanding ambiguities we can end up with nested ambiguities. - # All children of an _ambig node should be a derivation of that ambig node, hence - # it is safe to assume that if we see an _ambig node nested within an ambig node - # it is safe to simply expand it into the parent _ambig node as an alternative derivation. - ambiguous = [] - for i, child in enumerate(children): - if _is_ambig_tree(child): - if i in self.to_expand: - ambiguous.append(i) - - to_expand = [j for j, grandchild in enumerate(child.children) if _is_ambig_tree(grandchild)] - child.expand_kids_by_index(*to_expand) - - if not ambiguous: - return self.node_builder(children) - - expand = [ iter(child.children) if i in ambiguous else repeat(child) for i, child in enumerate(children) ] - return self.tree_class('_ambig', [self.node_builder(list(f[0])) for f in product(zip(*expand))]) - -def maybe_create_ambiguous_expander(tree_class, expansion, keep_all_tokens): - to_expand = [i for i, sym in enumerate(expansion) - if keep_all_tokens or ((not (sym.is_term and sym.filter_out)) and _should_expand(sym))] - if to_expand: - return partial(AmbiguousExpander, to_expand, tree_class) - -def ptb_inline_args(func): - @wraps(func) - def f(children): - return func(*children) - return f - -def inplace_transformer(func): - @wraps(func) - def f(children): - # function name in a Transformer is a rule name. - tree = Tree(func.__name__, children) - return func(tree) - return f - -def apply_visit_wrapper(func, name, wrapper): - if wrapper is _vargs_meta or wrapper is _vargs_meta_inline: - raise NotImplementedError("Meta args not supported for internal transformer") - @wraps(func) - def f(children): - return wrapper(func, name, children, None) - return f - - -class ParseTreeBuilder: - def __init__(self, rules, tree_class, propagate_positions=False, keep_all_tokens=False, ambiguous=False, maybe_placeholders=False): - self.tree_class = tree_class - self.propagate_positions = propagate_positions - self.always_keep_all_tokens = keep_all_tokens - self.ambiguous = ambiguous - self.maybe_placeholders = maybe_placeholders - - self.rule_builders = list(self._init_builders(rules)) - - def _init_builders(self, rules): - for rule in rules: - options = rule.options - keep_all_tokens = self.always_keep_all_tokens or options.keep_all_tokens - expand_single_child = options.expand1 - - wrapper_chain = list(filter(None, [ - (expand_single_child and not rule.alias) and ExpandSingleChild, - maybe_create_child_filter(rule.expansion, keep_all_tokens, self.ambiguous, options.empty_indices if self.maybe_placeholders else None), - self.propagate_positions and PropagatePositions, - self.ambiguous and maybe_create_ambiguous_expander(self.tree_class, rule.expansion, keep_all_tokens), - ])) - - yield rule, wrapper_chain - - - def create_callback(self, transformer=None): - callbacks = {} - - for rule, wrapper_chain in self.rule_builders: - - user_callback_name = rule.alias or rule.options.template_source or rule.origin.name - try: - f = getattr(transformer, user_callback_name) - # XXX InlineTransformer is deprecated! - wrapper = getattr(f, 'visit_wrapper', None) - if wrapper is not None: - f = apply_visit_wrapper(f, user_callback_name, wrapper) - else: - if isinstance(transformer, InlineTransformer): - f = ptb_inline_args(f) - elif isinstance(transformer, Transformer_InPlace): - f = inplace_transformer(f) - except AttributeError: - f = partial(self.tree_class, user_callback_name) - - for w in wrapper_chain: - f = w(f) - - if rule in callbacks: - raise GrammarError("Rule '%s' already exists" % (rule,)) - - callbacks[rule] = f - - return callbacks - -###} diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parser_frontends.py b/vendor/poetry-core/poetry/core/_vendor/lark/parser_frontends.py deleted file mode 100644 index c453ab67..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parser_frontends.py +++ /dev/null @@ -1,233 +0,0 @@ -from functools import partial - -from .utils import get_regexp_width, Serialize -from .parsers.grammar_analysis import GrammarAnalyzer -from .lexer import TraditionalLexer, ContextualLexer, Lexer, Token -from .parsers import earley, xearley, cyk -from .parsers.lalr_parser import LALR_Parser -from .grammar import Rule -from .tree import Tree -from .common import LexerConf - -###{standalone - -def get_frontend(parser, lexer): - if parser=='lalr': - if lexer is None: - raise ValueError('The LALR parser requires use of a lexer') - elif lexer == 'standard': - return LALR_TraditionalLexer - elif lexer == 'contextual': - return LALR_ContextualLexer - elif issubclass(lexer, Lexer): - return partial(LALR_CustomLexer, lexer) - else: - raise ValueError('Unknown lexer: %s' % lexer) - elif parser=='earley': - if lexer=='standard': - return Earley - elif lexer=='dynamic': - return XEarley - elif lexer=='dynamic_complete': - return XEarley_CompleteLex - elif lexer=='contextual': - raise ValueError('The Earley parser does not support the contextual parser') - else: - raise ValueError('Unknown lexer: %s' % lexer) - elif parser == 'cyk': - if lexer == 'standard': - return CYK - else: - raise ValueError('CYK parser requires using standard parser.') - else: - raise ValueError('Unknown parser: %s' % parser) - - -class _ParserFrontend(Serialize): - def _parse(self, input, start, *args): - if start is None: - start = self.start - if len(start) > 1: - raise ValueError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start) - start ,= start - return self.parser.parse(input, start, *args) - - -class WithLexer(_ParserFrontend): - lexer = None - parser = None - lexer_conf = None - start = None - - __serialize_fields__ = 'parser', 'lexer_conf', 'start' - __serialize_namespace__ = LexerConf, - - def __init__(self, lexer_conf, parser_conf, re_, options=None): - self.lexer_conf = lexer_conf - self.start = parser_conf.start - self.postlex = lexer_conf.postlex - self.re = re_ - - @classmethod - def deserialize(cls, data, memo, callbacks, postlex, re_): - inst = super(WithLexer, cls).deserialize(data, memo) - inst.re = re_ - inst.postlex = postlex - inst.parser = LALR_Parser.deserialize(inst.parser, memo, callbacks) - inst.init_lexer() - return inst - - def _serialize(self, data, memo): - data['parser'] = data['parser'].serialize(memo) - - def lex(self, *args): - stream = self.lexer.lex(*args) - return self.postlex.process(stream) if self.postlex else stream - - def parse(self, text, start=None): - token_stream = self.lex(text) - return self._parse(token_stream, start) - - def init_traditional_lexer(self): - self.lexer = TraditionalLexer(self.lexer_conf.tokens, re_=self.re, ignore=self.lexer_conf.ignore, user_callbacks=self.lexer_conf.callbacks, g_regex_flags=self.lexer_conf.g_regex_flags) - -class LALR_WithLexer(WithLexer): - def __init__(self, lexer_conf, parser_conf, re_, options=None): - debug = options.debug if options else False - self.re = re_ - self.parser = LALR_Parser(parser_conf, debug=debug) - WithLexer.__init__(self, lexer_conf, parser_conf, re_, options) - - self.init_lexer() - - def init_lexer(self): - raise NotImplementedError() - -class LALR_TraditionalLexer(LALR_WithLexer): - def init_lexer(self): - self.init_traditional_lexer() - -class LALR_ContextualLexer(LALR_WithLexer): - def init_lexer(self): - states = {idx:list(t.keys()) for idx, t in self.parser._parse_table.states.items()} - always_accept = self.postlex.always_accept if self.postlex else () - self.lexer = ContextualLexer(self.lexer_conf.tokens, states, - re_=self.re, - ignore=self.lexer_conf.ignore, - always_accept=always_accept, - user_callbacks=self.lexer_conf.callbacks, - g_regex_flags=self.lexer_conf.g_regex_flags) - - - def parse(self, text, start=None): - parser_state = [None] - def set_parser_state(s): - parser_state[0] = s - - token_stream = self.lex(text, lambda: parser_state[0]) - return self._parse(token_stream, start, set_parser_state) -###} - -class LALR_CustomLexer(LALR_WithLexer): - def __init__(self, lexer_cls, lexer_conf, parser_conf, re_, options=None): - self.lexer = lexer_cls(lexer_conf, re_=re_) - debug = options.debug if options else False - self.parser = LALR_Parser(parser_conf, debug=debug) - WithLexer.__init__(self, lexer_conf, parser_conf, re_, options) - - -def tokenize_text(text): - line = 1 - col_start_pos = 0 - for i, ch in enumerate(text): - if '\n' in ch: - line += ch.count('\n') - col_start_pos = i + ch.rindex('\n') - yield Token('CHAR', ch, line=line, column=i - col_start_pos) - -class Earley(WithLexer): - def __init__(self, lexer_conf, parser_conf, re_, options=None): - WithLexer.__init__(self, lexer_conf, parser_conf, re_, options) - self.init_traditional_lexer() - - resolve_ambiguity = options.ambiguity == 'resolve' - debug = options.debug if options else False - self.parser = earley.Parser(parser_conf, self.match, resolve_ambiguity=resolve_ambiguity, debug=debug) - - def match(self, term, token): - return term.name == token.type - - -class XEarley(_ParserFrontend): - def __init__(self, lexer_conf, parser_conf, re_, options=None, **kw): - self.re = re_ - - self.token_by_name = {t.name:t for t in lexer_conf.tokens} - self.start = parser_conf.start - - self._prepare_match(lexer_conf) - resolve_ambiguity = options.ambiguity == 'resolve' - debug = options.debug if options else False - self.parser = xearley.Parser(parser_conf, - self.match, - ignore=lexer_conf.ignore, - resolve_ambiguity=resolve_ambiguity, - debug=debug, - **kw - ) - - def match(self, term, text, index=0): - return self.regexps[term.name].match(text, index) - - def _prepare_match(self, lexer_conf): - self.regexps = {} - for t in lexer_conf.tokens: - if t.priority != 1: - raise ValueError("Dynamic Earley doesn't support weights on terminals", t, t.priority) - regexp = t.pattern.to_regexp() - try: - width = get_regexp_width(regexp)[0] - except ValueError: - raise ValueError("Bad regexp in token %s: %s" % (t.name, regexp)) - else: - if width == 0: - raise ValueError("Dynamic Earley doesn't allow zero-width regexps", t) - - self.regexps[t.name] = self.re.compile(regexp, lexer_conf.g_regex_flags) - - def parse(self, text, start): - return self._parse(text, start) - -class XEarley_CompleteLex(XEarley): - def __init__(self, *args, **kw): - XEarley.__init__(self, *args, complete_lex=True, **kw) - - - -class CYK(WithLexer): - - def __init__(self, lexer_conf, parser_conf, re_, options=None): - WithLexer.__init__(self, lexer_conf, parser_conf, re_, options) - self.init_traditional_lexer() - - self._analysis = GrammarAnalyzer(parser_conf) - self.parser = cyk.Parser(parser_conf.rules) - - self.callbacks = parser_conf.callbacks - - def parse(self, text, start): - tokens = list(self.lex(text)) - parse = self._parse(tokens, start) - parse = self._transform(parse) - return parse - - def _transform(self, tree): - subtrees = list(tree.iter_subtrees()) - for subtree in subtrees: - subtree.children = [self._apply_callback(c) if isinstance(c, Tree) else c for c in subtree.children] - - return self._apply_callback(tree) - - def _apply_callback(self, tree): - return self.callbacks[tree.rule](tree.children) - diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley.py b/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley.py deleted file mode 100644 index 59e9a06a..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley.py +++ /dev/null @@ -1,328 +0,0 @@ -"""This module implements an scanerless Earley parser. - -The core Earley algorithm used here is based on Elizabeth Scott's implementation, here: - https://www.sciencedirect.com/science/article/pii/S1571066108001497 - -That is probably the best reference for understanding the algorithm here. - -The Earley parser outputs an SPPF-tree as per that document. The SPPF tree format -is better documented here: - http://www.bramvandersanden.com/post/2014/06/shared-packed-parse-forest/ -""" - -import logging -from collections import deque - -from ..visitors import Transformer_InPlace, v_args -from ..exceptions import UnexpectedEOF, UnexpectedToken -from .grammar_analysis import GrammarAnalyzer -from ..grammar import NonTerminal -from .earley_common import Item, TransitiveItem -from .earley_forest import ForestToTreeVisitor, ForestSumVisitor, SymbolNode, ForestToAmbiguousTreeVisitor - -class Parser: - def __init__(self, parser_conf, term_matcher, resolve_ambiguity=True, debug=False): - analysis = GrammarAnalyzer(parser_conf) - self.parser_conf = parser_conf - self.resolve_ambiguity = resolve_ambiguity - self.debug = debug - - self.FIRST = analysis.FIRST - self.NULLABLE = analysis.NULLABLE - self.callbacks = parser_conf.callbacks - self.predictions = {} - - ## These could be moved to the grammar analyzer. Pre-computing these is *much* faster than - # the slow 'isupper' in is_terminal. - self.TERMINALS = { sym for r in parser_conf.rules for sym in r.expansion if sym.is_term } - self.NON_TERMINALS = { sym for r in parser_conf.rules for sym in r.expansion if not sym.is_term } - - self.forest_sum_visitor = None - for rule in parser_conf.rules: - if rule.origin not in self.predictions: - self.predictions[rule.origin] = [x.rule for x in analysis.expand_rule(rule.origin)] - - ## Detect if any rules have priorities set. If the user specified priority = "none" then - # the priorities will be stripped from all rules before they reach us, allowing us to - # skip the extra tree walk. We'll also skip this if the user just didn't specify priorities - # on any rules. - if self.forest_sum_visitor is None and rule.options.priority is not None: - self.forest_sum_visitor = ForestSumVisitor - - self.term_matcher = term_matcher - - - def predict_and_complete(self, i, to_scan, columns, transitives): - """The core Earley Predictor and Completer. - - At each stage of the input, we handling any completed items (things - that matched on the last cycle) and use those to predict what should - come next in the input stream. The completions and any predicted - non-terminals are recursively processed until we reach a set of, - which can be added to the scan list for the next scanner cycle.""" - # Held Completions (H in E.Scotts paper). - node_cache = {} - held_completions = {} - - column = columns[i] - # R (items) = Ei (column.items) - items = deque(column) - while items: - item = items.pop() # remove an element, A say, from R - - ### The Earley completer - if item.is_complete: ### (item.s == string) - if item.node is None: - label = (item.s, item.start, i) - item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) - item.node.add_family(item.s, item.rule, item.start, None, None) - - # create_leo_transitives(item.rule.origin, item.start) - - ###R Joop Leo right recursion Completer - if item.rule.origin in transitives[item.start]: - transitive = transitives[item.start][item.s] - if transitive.previous in transitives[transitive.column]: - root_transitive = transitives[transitive.column][transitive.previous] - else: - root_transitive = transitive - - new_item = Item(transitive.rule, transitive.ptr, transitive.start) - label = (root_transitive.s, root_transitive.start, i) - new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) - new_item.node.add_path(root_transitive, item.node) - if new_item.expect in self.TERMINALS: - # Add (B :: aC.B, h, y) to Q - to_scan.add(new_item) - elif new_item not in column: - # Add (B :: aC.B, h, y) to Ei and R - column.add(new_item) - items.append(new_item) - ###R Regular Earley completer - else: - # Empty has 0 length. If we complete an empty symbol in a particular - # parse step, we need to be able to use that same empty symbol to complete - # any predictions that result, that themselves require empty. Avoids - # infinite recursion on empty symbols. - # held_completions is 'H' in E.Scott's paper. - is_empty_item = item.start == i - if is_empty_item: - held_completions[item.rule.origin] = item.node - - originators = [originator for originator in columns[item.start] if originator.expect is not None and originator.expect == item.s] - for originator in originators: - new_item = originator.advance() - label = (new_item.s, originator.start, i) - new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) - new_item.node.add_family(new_item.s, new_item.rule, i, originator.node, item.node) - if new_item.expect in self.TERMINALS: - # Add (B :: aC.B, h, y) to Q - to_scan.add(new_item) - elif new_item not in column: - # Add (B :: aC.B, h, y) to Ei and R - column.add(new_item) - items.append(new_item) - - ### The Earley predictor - elif item.expect in self.NON_TERMINALS: ### (item.s == lr0) - new_items = [] - for rule in self.predictions[item.expect]: - new_item = Item(rule, 0, i) - new_items.append(new_item) - - # Process any held completions (H). - if item.expect in held_completions: - new_item = item.advance() - label = (new_item.s, item.start, i) - new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) - new_item.node.add_family(new_item.s, new_item.rule, new_item.start, item.node, held_completions[item.expect]) - new_items.append(new_item) - - for new_item in new_items: - if new_item.expect in self.TERMINALS: - to_scan.add(new_item) - elif new_item not in column: - column.add(new_item) - items.append(new_item) - - def _parse(self, stream, columns, to_scan, start_symbol=None): - def is_quasi_complete(item): - if item.is_complete: - return True - - quasi = item.advance() - while not quasi.is_complete: - if quasi.expect not in self.NULLABLE: - return False - if quasi.rule.origin == start_symbol and quasi.expect == start_symbol: - return False - quasi = quasi.advance() - return True - - def create_leo_transitives(origin, start): - visited = set() - to_create = [] - trule = None - previous = None - - ### Recursively walk backwards through the Earley sets until we find the - # first transitive candidate. If this is done continuously, we shouldn't - # have to walk more than 1 hop. - while True: - if origin in transitives[start]: - previous = trule = transitives[start][origin] - break - - is_empty_rule = not self.FIRST[origin] - if is_empty_rule: - break - - candidates = [ candidate for candidate in columns[start] if candidate.expect is not None and origin == candidate.expect ] - if len(candidates) != 1: - break - originator = next(iter(candidates)) - - if originator is None or originator in visited: - break - - visited.add(originator) - if not is_quasi_complete(originator): - break - - trule = originator.advance() - if originator.start != start: - visited.clear() - - to_create.append((origin, start, originator)) - origin = originator.rule.origin - start = originator.start - - # If a suitable Transitive candidate is not found, bail. - if trule is None: - return - - #### Now walk forwards and create Transitive Items in each set we walked through; and link - # each transitive item to the next set forwards. - while to_create: - origin, start, originator = to_create.pop() - titem = None - if previous is not None: - titem = previous.next_titem = TransitiveItem(origin, trule, originator, previous.column) - else: - titem = TransitiveItem(origin, trule, originator, start) - previous = transitives[start][origin] = titem - - - - def scan(i, token, to_scan): - """The core Earley Scanner. - - This is a custom implementation of the scanner that uses the - Lark lexer to match tokens. The scan list is built by the - Earley predictor, based on the previously completed tokens. - This ensures that at each phase of the parse we have a custom - lexer context, allowing for more complex ambiguities.""" - next_to_scan = set() - next_set = set() - columns.append(next_set) - transitives.append({}) - node_cache = {} - - for item in set(to_scan): - if match(item.expect, token): - new_item = item.advance() - label = (new_item.s, new_item.start, i) - new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label)) - new_item.node.add_family(new_item.s, item.rule, new_item.start, item.node, token) - - if new_item.expect in self.TERMINALS: - # add (B ::= Aai+1.B, h, y) to Q' - next_to_scan.add(new_item) - else: - # add (B ::= Aa+1.B, h, y) to Ei+1 - next_set.add(new_item) - - if not next_set and not next_to_scan: - expect = {i.expect.name for i in to_scan} - raise UnexpectedToken(token, expect, considered_rules = set(to_scan)) - - return next_to_scan - - - # Define parser functions - match = self.term_matcher - - # Cache for nodes & tokens created in a particular parse step. - transitives = [{}] - - ## The main Earley loop. - # Run the Prediction/Completion cycle for any Items in the current Earley set. - # Completions will be added to the SPPF tree, and predictions will be recursively - # processed down to terminals/empty nodes to be added to the scanner for the next - # step. - i = 0 - for token in stream: - self.predict_and_complete(i, to_scan, columns, transitives) - - to_scan = scan(i, token, to_scan) - i += 1 - - self.predict_and_complete(i, to_scan, columns, transitives) - - ## Column is now the final column in the parse. - assert i == len(columns)-1 - return to_scan - - def parse(self, stream, start): - assert start, start - start_symbol = NonTerminal(start) - - columns = [set()] - to_scan = set() # The scan buffer. 'Q' in E.Scott's paper. - - ## Predict for the start_symbol. - # Add predicted items to the first Earley set (for the predictor) if they - # result in a non-terminal, or the scanner if they result in a terminal. - for rule in self.predictions[start_symbol]: - item = Item(rule, 0, 0) - if item.expect in self.TERMINALS: - to_scan.add(item) - else: - columns[0].add(item) - - to_scan = self._parse(stream, columns, to_scan, start_symbol) - - # If the parse was successful, the start - # symbol should have been completed in the last step of the Earley cycle, and will be in - # this column. Find the item for the start_symbol, which is the root of the SPPF tree. - solutions = [n.node for n in columns[-1] if n.is_complete and n.node is not None and n.s == start_symbol and n.start == 0] - if self.debug: - from .earley_forest import ForestToPyDotVisitor - try: - debug_walker = ForestToPyDotVisitor() - except ImportError: - logging.warning("Cannot find dependency 'pydot', will not generate sppf debug image") - else: - debug_walker.visit(solutions[0], "sppf.png") - - - if not solutions: - expected_tokens = [t.expect for t in to_scan] - raise UnexpectedEOF(expected_tokens) - elif len(solutions) > 1: - assert False, 'Earley should not generate multiple start symbol items!' - - # Perform our SPPF -> AST conversion using the right ForestVisitor. - forest_tree_visitor_cls = ForestToTreeVisitor if self.resolve_ambiguity else ForestToAmbiguousTreeVisitor - forest_tree_visitor = forest_tree_visitor_cls(self.callbacks, self.forest_sum_visitor and self.forest_sum_visitor()) - - return forest_tree_visitor.visit(solutions[0]) - - -class ApplyCallbacks(Transformer_InPlace): - def __init__(self, postprocess): - self.postprocess = postprocess - - @v_args(meta=True) - def drv(self, children, meta): - return self.postprocess[meta.rule](children) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley_common.py b/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley_common.py deleted file mode 100644 index 6bd614ba..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley_common.py +++ /dev/null @@ -1,75 +0,0 @@ -"This module implements an Earley Parser" - -# The parser uses a parse-forest to keep track of derivations and ambiguations. -# When the parse ends successfully, a disambiguation stage resolves all ambiguity -# (right now ambiguity resolution is not developed beyond the needs of lark) -# Afterwards the parse tree is reduced (transformed) according to user callbacks. -# I use the no-recursion version of Transformer, because the tree might be -# deeper than Python's recursion limit (a bit absurd, but that's life) -# -# The algorithm keeps track of each state set, using a corresponding Column instance. -# Column keeps track of new items using NewsList instances. -# -# Author: Erez Shinan (2017) -# Email : erezshin@gmail.com - -from ..grammar import NonTerminal, Terminal - -class Item(object): - "An Earley Item, the atom of the algorithm." - - __slots__ = ('s', 'rule', 'ptr', 'start', 'is_complete', 'expect', 'previous', 'node', '_hash') - def __init__(self, rule, ptr, start): - self.is_complete = len(rule.expansion) == ptr - self.rule = rule # rule - self.ptr = ptr # ptr - self.start = start # j - self.node = None # w - if self.is_complete: - self.s = rule.origin - self.expect = None - self.previous = rule.expansion[ptr - 1] if ptr > 0 and len(rule.expansion) else None - else: - self.s = (rule, ptr) - self.expect = rule.expansion[ptr] - self.previous = rule.expansion[ptr - 1] if ptr > 0 and len(rule.expansion) else None - self._hash = hash((self.s, self.start)) - - def advance(self): - return Item(self.rule, self.ptr + 1, self.start) - - def __eq__(self, other): - return self is other or (self.s == other.s and self.start == other.start) - - def __hash__(self): - return self._hash - - def __repr__(self): - before = ( expansion.name for expansion in self.rule.expansion[:self.ptr] ) - after = ( expansion.name for expansion in self.rule.expansion[self.ptr:] ) - symbol = "{} ::= {}* {}".format(self.rule.origin.name, ' '.join(before), ' '.join(after)) - return '%s (%d)' % (symbol, self.start) - - -class TransitiveItem(Item): - __slots__ = ('recognized', 'reduction', 'column', 'next_titem') - def __init__(self, recognized, trule, originator, start): - super(TransitiveItem, self).__init__(trule.rule, trule.ptr, trule.start) - self.recognized = recognized - self.reduction = originator - self.column = start - self.next_titem = None - self._hash = hash((self.s, self.start, self.recognized)) - - def __eq__(self, other): - if not isinstance(other, TransitiveItem): - return False - return self is other or (type(self.s) == type(other.s) and self.s == other.s and self.start == other.start and self.recognized == other.recognized) - - def __hash__(self): - return self._hash - - def __repr__(self): - before = ( expansion.name for expansion in self.rule.expansion[:self.ptr] ) - after = ( expansion.name for expansion in self.rule.expansion[self.ptr:] ) - return '{} : {} -> {}* {} ({}, {})'.format(self.recognized.name, self.rule.origin.name, ' '.join(before), ' '.join(after), self.column, self.start) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley_forest.py b/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley_forest.py deleted file mode 100644 index c8b4f253..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/earley_forest.py +++ /dev/null @@ -1,430 +0,0 @@ -""""This module implements an SPPF implementation - -This is used as the primary output mechanism for the Earley parser -in order to store complex ambiguities. - -Full reference and more details is here: -http://www.bramvandersanden.com/post/2014/06/shared-packed-parse-forest/ -""" - -from random import randint -from math import isinf -from collections import deque -from operator import attrgetter -from importlib import import_module - -from ..tree import Tree -from ..exceptions import ParseError - -class ForestNode(object): - pass - -class SymbolNode(ForestNode): - """ - A Symbol Node represents a symbol (or Intermediate LR0). - - Symbol nodes are keyed by the symbol (s). For intermediate nodes - s will be an LR0, stored as a tuple of (rule, ptr). For completed symbol - nodes, s will be a string representing the non-terminal origin (i.e. - the left hand side of the rule). - - The children of a Symbol or Intermediate Node will always be Packed Nodes; - with each Packed Node child representing a single derivation of a production. - - Hence a Symbol Node with a single child is unambiguous. - """ - __slots__ = ('s', 'start', 'end', '_children', 'paths', 'paths_loaded', 'priority', 'is_intermediate', '_hash') - def __init__(self, s, start, end): - self.s = s - self.start = start - self.end = end - self._children = set() - self.paths = set() - self.paths_loaded = False - - ### We use inf here as it can be safely negated without resorting to conditionals, - # unlike None or float('NaN'), and sorts appropriately. - self.priority = float('-inf') - self.is_intermediate = isinstance(s, tuple) - self._hash = hash((self.s, self.start, self.end)) - - def add_family(self, lr0, rule, start, left, right): - self._children.add(PackedNode(self, lr0, rule, start, left, right)) - - def add_path(self, transitive, node): - self.paths.add((transitive, node)) - - def load_paths(self): - for transitive, node in self.paths: - if transitive.next_titem is not None: - vn = SymbolNode(transitive.next_titem.s, transitive.next_titem.start, self.end) - vn.add_path(transitive.next_titem, node) - self.add_family(transitive.reduction.rule.origin, transitive.reduction.rule, transitive.reduction.start, transitive.reduction.node, vn) - else: - self.add_family(transitive.reduction.rule.origin, transitive.reduction.rule, transitive.reduction.start, transitive.reduction.node, node) - self.paths_loaded = True - - @property - def is_ambiguous(self): - return len(self.children) > 1 - - @property - def children(self): - if not self.paths_loaded: self.load_paths() - return sorted(self._children, key=attrgetter('sort_key')) - - def __iter__(self): - return iter(self._children) - - def __eq__(self, other): - if not isinstance(other, SymbolNode): - return False - return self is other or (type(self.s) == type(other.s) and self.s == other.s and self.start == other.start and self.end is other.end) - - def __hash__(self): - return self._hash - - def __repr__(self): - if self.is_intermediate: - rule = self.s[0] - ptr = self.s[1] - before = ( expansion.name for expansion in rule.expansion[:ptr] ) - after = ( expansion.name for expansion in rule.expansion[ptr:] ) - symbol = "{} ::= {}* {}".format(rule.origin.name, ' '.join(before), ' '.join(after)) - else: - symbol = self.s.name - return "({}, {}, {}, {})".format(symbol, self.start, self.end, self.priority) - -class PackedNode(ForestNode): - """ - A Packed Node represents a single derivation in a symbol node. - """ - __slots__ = ('parent', 's', 'rule', 'start', 'left', 'right', 'priority', '_hash') - def __init__(self, parent, s, rule, start, left, right): - self.parent = parent - self.s = s - self.start = start - self.rule = rule - self.left = left - self.right = right - self.priority = float('-inf') - self._hash = hash((self.left, self.right)) - - @property - def is_empty(self): - return self.left is None and self.right is None - - @property - def sort_key(self): - """ - Used to sort PackedNode children of SymbolNodes. - A SymbolNode has multiple PackedNodes if it matched - ambiguously. Hence, we use the sort order to identify - the order in which ambiguous children should be considered. - """ - return self.is_empty, -self.priority, self.rule.order - - def __iter__(self): - return iter([self.left, self.right]) - - def __eq__(self, other): - if not isinstance(other, PackedNode): - return False - return self is other or (self.left == other.left and self.right == other.right) - - def __hash__(self): - return self._hash - - def __repr__(self): - if isinstance(self.s, tuple): - rule = self.s[0] - ptr = self.s[1] - before = ( expansion.name for expansion in rule.expansion[:ptr] ) - after = ( expansion.name for expansion in rule.expansion[ptr:] ) - symbol = "{} ::= {}* {}".format(rule.origin.name, ' '.join(before), ' '.join(after)) - else: - symbol = self.s.name - return "({}, {}, {}, {})".format(symbol, self.start, self.priority, self.rule.order) - -class ForestVisitor(object): - """ - An abstract base class for building forest visitors. - - Use this as a base when you need to walk the forest. - """ - __slots__ = ['result'] - - def visit_token_node(self, node): pass - def visit_symbol_node_in(self, node): pass - def visit_symbol_node_out(self, node): pass - def visit_packed_node_in(self, node): pass - def visit_packed_node_out(self, node): pass - - def visit(self, root): - self.result = None - # Visiting is a list of IDs of all symbol/intermediate nodes currently in - # the stack. It serves two purposes: to detect when we 'recurse' in and out - # of a symbol/intermediate so that we can process both up and down. Also, - # since the SPPF can have cycles it allows us to detect if we're trying - # to recurse into a node that's already on the stack (infinite recursion). - visiting = set() - - # We do not use recursion here to walk the Forest due to the limited - # stack size in python. Therefore input_stack is essentially our stack. - input_stack = deque([root]) - - # It is much faster to cache these as locals since they are called - # many times in large parses. - vpno = getattr(self, 'visit_packed_node_out') - vpni = getattr(self, 'visit_packed_node_in') - vsno = getattr(self, 'visit_symbol_node_out') - vsni = getattr(self, 'visit_symbol_node_in') - vtn = getattr(self, 'visit_token_node') - while input_stack: - current = next(reversed(input_stack)) - try: - next_node = next(current) - except StopIteration: - input_stack.pop() - continue - except TypeError: - ### If the current object is not an iterator, pass through to Token/SymbolNode - pass - else: - if next_node is None: - continue - - if id(next_node) in visiting: - raise ParseError("Infinite recursion in grammar, in rule '%s'!" % next_node.s.name) - - input_stack.append(next_node) - continue - - if not isinstance(current, ForestNode): - vtn(current) - input_stack.pop() - continue - - current_id = id(current) - if current_id in visiting: - if isinstance(current, PackedNode): vpno(current) - else: vsno(current) - input_stack.pop() - visiting.remove(current_id) - continue - else: - visiting.add(current_id) - if isinstance(current, PackedNode): next_node = vpni(current) - else: next_node = vsni(current) - if next_node is None: - continue - - if id(next_node) in visiting: - raise ParseError("Infinite recursion in grammar!") - - input_stack.append(next_node) - continue - - return self.result - -class ForestSumVisitor(ForestVisitor): - """ - A visitor for prioritizing ambiguous parts of the Forest. - - This visitor is used when support for explicit priorities on - rules is requested (whether normal, or invert). It walks the - forest (or subsets thereof) and cascades properties upwards - from the leaves. - - It would be ideal to do this during parsing, however this would - require processing each Earley item multiple times. That's - a big performance drawback; so running a forest walk is the - lesser of two evils: there can be significantly more Earley - items created during parsing than there are SPPF nodes in the - final tree. - """ - def visit_packed_node_in(self, node): - return iter([node.left, node.right]) - - def visit_symbol_node_in(self, node): - return iter(node.children) - - def visit_packed_node_out(self, node): - priority = node.rule.options.priority if not node.parent.is_intermediate and node.rule.options.priority else 0 - priority += getattr(node.right, 'priority', 0) - priority += getattr(node.left, 'priority', 0) - node.priority = priority - - def visit_symbol_node_out(self, node): - node.priority = max(child.priority for child in node.children) - -class ForestToTreeVisitor(ForestVisitor): - """ - A Forest visitor which converts an SPPF forest to an unambiguous AST. - - The implementation in this visitor walks only the first ambiguous child - of each symbol node. When it finds an ambiguous symbol node it first - calls the forest_sum_visitor implementation to sort the children - into preference order using the algorithms defined there; so the first - child should always be the highest preference. The forest_sum_visitor - implementation should be another ForestVisitor which sorts the children - according to some priority mechanism. - """ - __slots__ = ['forest_sum_visitor', 'callbacks', 'output_stack'] - def __init__(self, callbacks, forest_sum_visitor = None): - assert callbacks - self.forest_sum_visitor = forest_sum_visitor - self.callbacks = callbacks - - def visit(self, root): - self.output_stack = deque() - return super(ForestToTreeVisitor, self).visit(root) - - def visit_token_node(self, node): - self.output_stack[-1].append(node) - - def visit_symbol_node_in(self, node): - if self.forest_sum_visitor and node.is_ambiguous and isinf(node.priority): - self.forest_sum_visitor.visit(node) - return next(iter(node.children)) - - def visit_packed_node_in(self, node): - if not node.parent.is_intermediate: - self.output_stack.append([]) - return iter([node.left, node.right]) - - def visit_packed_node_out(self, node): - if not node.parent.is_intermediate: - result = self.callbacks[node.rule](self.output_stack.pop()) - if self.output_stack: - self.output_stack[-1].append(result) - else: - self.result = result - -class ForestToAmbiguousTreeVisitor(ForestToTreeVisitor): - """ - A Forest visitor which converts an SPPF forest to an ambiguous AST. - - Because of the fundamental disparity between what can be stored in - an SPPF and what can be stored in a Tree; this implementation is not - complete. It correctly deals with ambiguities that occur on symbol nodes only, - and cannot deal with ambiguities that occur on intermediate nodes. - - Usually, most parsers can be rewritten to avoid intermediate node - ambiguities. Also, this implementation could be fixed, however - the code to handle intermediate node ambiguities is messy and - would not be performant. It is much better not to use this and - instead to correctly disambiguate the forest and only store unambiguous - parses in Trees. It is here just to provide some parity with the - old ambiguity='explicit'. - - This is mainly used by the test framework, to make it simpler to write - tests ensuring the SPPF contains the right results. - """ - def __init__(self, callbacks, forest_sum_visitor = ForestSumVisitor): - super(ForestToAmbiguousTreeVisitor, self).__init__(callbacks, forest_sum_visitor) - - def visit_token_node(self, node): - self.output_stack[-1].children.append(node) - - def visit_symbol_node_in(self, node): - if self.forest_sum_visitor and node.is_ambiguous and isinf(node.priority): - self.forest_sum_visitor.visit(node) - if not node.is_intermediate and node.is_ambiguous: - self.output_stack.append(Tree('_ambig', [])) - return iter(node.children) - - def visit_symbol_node_out(self, node): - if not node.is_intermediate and node.is_ambiguous: - result = self.output_stack.pop() - if self.output_stack: - self.output_stack[-1].children.append(result) - else: - self.result = result - - def visit_packed_node_in(self, node): - if not node.parent.is_intermediate: - self.output_stack.append(Tree('drv', [])) - return iter([node.left, node.right]) - - def visit_packed_node_out(self, node): - if not node.parent.is_intermediate: - result = self.callbacks[node.rule](self.output_stack.pop().children) - if self.output_stack: - self.output_stack[-1].children.append(result) - else: - self.result = result - -class ForestToPyDotVisitor(ForestVisitor): - """ - A Forest visitor which writes the SPPF to a PNG. - - The SPPF can get really large, really quickly because - of the amount of meta-data it stores, so this is probably - only useful for trivial trees and learning how the SPPF - is structured. - """ - def __init__(self, rankdir="TB"): - self.pydot = import_module('pydot') - self.graph = self.pydot.Dot(graph_type='digraph', rankdir=rankdir) - - def visit(self, root, filename): - super(ForestToPyDotVisitor, self).visit(root) - self.graph.write_png(filename) - - def visit_token_node(self, node): - graph_node_id = str(id(node)) - graph_node_label = "\"{}\"".format(node.value.replace('"', '\\"')) - graph_node_color = 0x808080 - graph_node_style = "\"filled,rounded\"" - graph_node_shape = "diamond" - graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) - self.graph.add_node(graph_node) - - def visit_packed_node_in(self, node): - graph_node_id = str(id(node)) - graph_node_label = repr(node) - graph_node_color = 0x808080 - graph_node_style = "filled" - graph_node_shape = "diamond" - graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) - self.graph.add_node(graph_node) - return iter([node.left, node.right]) - - def visit_packed_node_out(self, node): - graph_node_id = str(id(node)) - graph_node = self.graph.get_node(graph_node_id)[0] - for child in [node.left, node.right]: - if child is not None: - child_graph_node_id = str(id(child)) - child_graph_node = self.graph.get_node(child_graph_node_id)[0] - self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node)) - else: - #### Try and be above the Python object ID range; probably impl. specific, but maybe this is okay. - child_graph_node_id = str(randint(100000000000000000000000000000,123456789012345678901234567890)) - child_graph_node_style = "invis" - child_graph_node = self.pydot.Node(child_graph_node_id, style=child_graph_node_style, label="None") - child_edge_style = "invis" - self.graph.add_node(child_graph_node) - self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node, style=child_edge_style)) - - def visit_symbol_node_in(self, node): - graph_node_id = str(id(node)) - graph_node_label = repr(node) - graph_node_color = 0x808080 - graph_node_style = "\"filled\"" - if node.is_intermediate: - graph_node_shape = "ellipse" - else: - graph_node_shape = "rectangle" - graph_node = self.pydot.Node(graph_node_id, style=graph_node_style, fillcolor="#{:06x}".format(graph_node_color), shape=graph_node_shape, label=graph_node_label) - self.graph.add_node(graph_node) - return iter(node.children) - - def visit_symbol_node_out(self, node): - graph_node_id = str(id(node)) - graph_node = self.graph.get_node(graph_node_id)[0] - for child in node.children: - child_graph_node_id = str(id(child)) - child_graph_node = self.graph.get_node(child_graph_node_id)[0] - self.graph.add_edge(self.pydot.Edge(graph_node, child_graph_node)) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_parser.py b/vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_parser.py deleted file mode 100644 index f26cbc5b..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_parser.py +++ /dev/null @@ -1,119 +0,0 @@ -"""This module implements a LALR(1) Parser -""" -# Author: Erez Shinan (2017) -# Email : erezshin@gmail.com -from ..exceptions import UnexpectedToken -from ..lexer import Token -from ..utils import Enumerator, Serialize - -from .lalr_analysis import LALR_Analyzer, Shift, Reduce, IntParseTable -from .lalr_puppet import ParserPuppet - -###{standalone - -class LALR_Parser(object): - def __init__(self, parser_conf, debug=False): - assert all(r.options.priority is None for r in parser_conf.rules), "LALR doesn't yet support prioritization" - analysis = LALR_Analyzer(parser_conf, debug=debug) - analysis.compute_lalr() - callbacks = parser_conf.callbacks - - self._parse_table = analysis.parse_table - self.parser_conf = parser_conf - self.parser = _Parser(analysis.parse_table, callbacks, debug) - - @classmethod - def deserialize(cls, data, memo, callbacks): - inst = cls.__new__(cls) - inst._parse_table = IntParseTable.deserialize(data, memo) - inst.parser = _Parser(inst._parse_table, callbacks) - return inst - - def serialize(self, memo): - return self._parse_table.serialize(memo) - - def parse(self, *args): - return self.parser.parse(*args) - - -class _Parser: - def __init__(self, parse_table, callbacks, debug=False): - self.parse_table = parse_table - self.callbacks = callbacks - self.debug = debug - - def parse(self, seq, start, set_state=None, value_stack=None, state_stack=None): - token = None - stream = iter(seq) - states = self.parse_table.states - start_state = self.parse_table.start_states[start] - end_state = self.parse_table.end_states[start] - - state_stack = state_stack or [start_state] - value_stack = value_stack or [] - - if set_state: set_state(start_state) - - def get_action(token): - state = state_stack[-1] - try: - return states[state][token.type] - except KeyError: - expected = [s for s in states[state].keys() if s.isupper()] - try: - puppet = ParserPuppet(self, state_stack, value_stack, start, stream, set_state) - except NameError: - puppet = None - raise UnexpectedToken(token, expected, state=state, puppet=puppet) - - def reduce(rule): - size = len(rule.expansion) - if size: - s = value_stack[-size:] - del state_stack[-size:] - del value_stack[-size:] - else: - s = [] - - value = self.callbacks[rule](s) - - _action, new_state = states[state_stack[-1]][rule.origin.name] - assert _action is Shift - state_stack.append(new_state) - value_stack.append(value) - - # Main LALR-parser loop - try: - for token in stream: - while True: - action, arg = get_action(token) - assert arg != end_state - - if action is Shift: - state_stack.append(arg) - value_stack.append(token) - if set_state: set_state(arg) - break # next token - else: - reduce(arg) - except Exception as e: - if self.debug: - print("") - print("STATE STACK DUMP") - print("----------------") - for i, s in enumerate(state_stack): - print('%d)' % i , s) - print("") - - raise - - token = Token.new_borrow_pos('$END', '', token) if token else Token('$END', '', 0, 1, 1) - while True: - _action, arg = get_action(token) - assert(_action is Reduce) - reduce(arg) - if state_stack[-1] == end_state: - return value_stack[-1] - -###} - diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_puppet.py b/vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_puppet.py deleted file mode 100644 index 968783cc..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/parsers/lalr_puppet.py +++ /dev/null @@ -1,79 +0,0 @@ -# This module provide a LALR puppet, which is used to debugging and error handling - -from copy import deepcopy - -from .lalr_analysis import Shift, Reduce - -class ParserPuppet: - def __init__(self, parser, state_stack, value_stack, start, stream, set_state): - self.parser = parser - self._state_stack = state_stack - self._value_stack = value_stack - self._start = start - self._stream = stream - self._set_state = set_state - - self.result = None - - def feed_token(self, token): - """Advance the parser state, as if it just recieved `token` from the lexer - - """ - end_state = self.parser.parse_table.end_states[self._start] - state_stack = self._state_stack - value_stack = self._value_stack - - state = state_stack[-1] - action, arg = self.parser.parse_table.states[state][token.type] - assert arg != end_state - - while action is Reduce: - rule = arg - size = len(rule.expansion) - if size: - s = value_stack[-size:] - del state_stack[-size:] - del value_stack[-size:] - else: - s = [] - - value = self.parser.callbacks[rule](s) - - _action, new_state = self.parser.parse_table.states[state_stack[-1]][rule.origin.name] - assert _action is Shift - state_stack.append(new_state) - value_stack.append(value) - - if state_stack[-1] == end_state: - self.result = value_stack[-1] - return self.result - - state = state_stack[-1] - action, arg = self.parser.parse_table.states[state][token.type] - assert arg != end_state - - assert action is Shift - state_stack.append(arg) - value_stack.append(token) - - def copy(self): - return type(self)( - self.parser, - list(self._state_stack), - deepcopy(self._value_stack), - self._start, - self._stream, - self._set_state, - ) - - def pretty(): - print("Puppet choices:") - for k, v in self.choices.items(): - print('\t-', k, '->', v) - print('stack size:', len(self._state_stack)) - - def choices(self): - return self.parser.parse_table.states[self._state_stack[-1]] - - def resume_parse(self): - return self.parser.parse(self._stream, self._start, self._set_state, self._value_stack, self._state_stack) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/reconstruct.py b/vendor/poetry-core/poetry/core/_vendor/lark/reconstruct.py deleted file mode 100644 index 1e3adc77..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/reconstruct.py +++ /dev/null @@ -1,164 +0,0 @@ -from collections import defaultdict - -from .tree import Tree -from .visitors import Transformer_InPlace -from .common import ParserConf -from .lexer import Token, PatternStr -from .parsers import earley -from .grammar import Rule, Terminal, NonTerminal - - - -def is_discarded_terminal(t): - return t.is_term and t.filter_out - -def is_iter_empty(i): - try: - _ = next(i) - return False - except StopIteration: - return True - - -class WriteTokensTransformer(Transformer_InPlace): - "Inserts discarded tokens into their correct place, according to the rules of grammar" - - def __init__(self, tokens, term_subs): - self.tokens = tokens - self.term_subs = term_subs - - def __default__(self, data, children, meta): - if not getattr(meta, 'match_tree', False): - return Tree(data, children) - - iter_args = iter(children) - to_write = [] - for sym in meta.orig_expansion: - if is_discarded_terminal(sym): - try: - v = self.term_subs[sym.name](sym) - except KeyError: - t = self.tokens[sym.name] - if not isinstance(t.pattern, PatternStr): - raise NotImplementedError("Reconstructing regexps not supported yet: %s" % t) - - v = t.pattern.value - to_write.append(v) - else: - x = next(iter_args) - if isinstance(x, list): - to_write += x - else: - if isinstance(x, Token): - assert Terminal(x.type) == sym, x - else: - assert NonTerminal(x.data) == sym, (sym, x) - to_write.append(x) - - assert is_iter_empty(iter_args) - return to_write - - -class MatchTree(Tree): - pass - -class MakeMatchTree: - def __init__(self, name, expansion): - self.name = name - self.expansion = expansion - - def __call__(self, args): - t = MatchTree(self.name, args) - t.meta.match_tree = True - t.meta.orig_expansion = self.expansion - return t - -def best_from_group(seq, group_key, cmp_key): - d = {} - for item in seq: - key = group_key(item) - if key in d: - v1 = cmp_key(item) - v2 = cmp_key(d[key]) - if v2 > v1: - d[key] = item - else: - d[key] = item - return list(d.values()) - -class Reconstructor: - def __init__(self, parser, term_subs={}): - # XXX TODO calling compile twice returns different results! - assert parser.options.maybe_placeholders == False - tokens, rules, _grammar_extra = parser.grammar.compile(parser.options.start) - - self.write_tokens = WriteTokensTransformer({t.name:t for t in tokens}, term_subs) - self.rules = list(self._build_recons_rules(rules)) - self.rules.reverse() - - # Choose the best rule from each group of {rule => [rule.alias]}, since we only really need one derivation. - self.rules = best_from_group(self.rules, lambda r: r, lambda r: -len(r.expansion)) - - self.rules.sort(key=lambda r: len(r.expansion)) - callbacks = {rule: rule.alias for rule in self.rules} # TODO pass callbacks through dict, instead of alias? - self.parser = earley.Parser(ParserConf(self.rules, callbacks, parser.options.start), - self._match, resolve_ambiguity=True) - - def _build_recons_rules(self, rules): - expand1s = {r.origin for r in rules if r.options.expand1} - - aliases = defaultdict(list) - for r in rules: - if r.alias: - aliases[r.origin].append( r.alias ) - - rule_names = {r.origin for r in rules} - nonterminals = {sym for sym in rule_names - if sym.name.startswith('_') or sym in expand1s or sym in aliases } - - for r in rules: - recons_exp = [sym if sym in nonterminals else Terminal(sym.name) - for sym in r.expansion if not is_discarded_terminal(sym)] - - # Skip self-recursive constructs - if recons_exp == [r.origin]: - continue - - sym = NonTerminal(r.alias) if r.alias else r.origin - - yield Rule(sym, recons_exp, alias=MakeMatchTree(sym.name, r.expansion)) - - for origin, rule_aliases in aliases.items(): - for alias in rule_aliases: - yield Rule(origin, [Terminal(alias)], alias=MakeMatchTree(origin.name, [NonTerminal(alias)])) - yield Rule(origin, [Terminal(origin.name)], alias=MakeMatchTree(origin.name, [origin])) - - def _match(self, term, token): - if isinstance(token, Tree): - return Terminal(token.data) == term - elif isinstance(token, Token): - return term == Terminal(token.type) - assert False - - def _reconstruct(self, tree): - # TODO: ambiguity? - unreduced_tree = self.parser.parse(tree.children, tree.data) # find a full derivation - assert unreduced_tree.data == tree.data - res = self.write_tokens.transform(unreduced_tree) - for item in res: - if isinstance(item, Tree): - for x in self._reconstruct(item): - yield x - else: - yield item - - def reconstruct(self, tree): - x = self._reconstruct(tree) - y = [] - prev_item = '' - for item in x: - if prev_item and item and prev_item[-1].isalnum() and item[0].isalnum(): - y.append(' ') - y.append(item) - prev_item = item - return ''.join(y) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/reconstruct2.py b/vendor/poetry-core/poetry/core/_vendor/lark/reconstruct2.py deleted file mode 100644 index c7300a06..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/reconstruct2.py +++ /dev/null @@ -1,155 +0,0 @@ -from collections import defaultdict - -from .tree import Tree -from .visitors import Transformer_InPlace -from .common import ParserConf -from .lexer import Token, PatternStr -from .parsers import earley -from .grammar import Rule, Terminal, NonTerminal - - - -def is_discarded_terminal(t): - return t.is_term and t.filter_out - -def is_iter_empty(i): - try: - _ = next(i) - return False - except StopIteration: - return True - -class WriteTokensTransformer(Transformer_InPlace): - def __init__(self, tokens): - self.tokens = tokens - - def __default__(self, data, children, meta): - # if not isinstance(t, MatchTree): - # return t - if not getattr(meta, 'match_tree', False): - return Tree(data, children) - - iter_args = iter(children) - print('@@@', children, meta.orig_expansion) - to_write = [] - for sym in meta.orig_expansion: - if is_discarded_terminal(sym): - t = self.tokens[sym.name] - value = t.pattern.value - if not isinstance(t.pattern, PatternStr): - if t.name == "_NEWLINE": - value = "\n" - else: - raise NotImplementedError("Reconstructing regexps not supported yet: %s" % t) - to_write.append(value) - else: - x = next(iter_args) - if isinstance(x, list): - to_write += x - else: - if isinstance(x, Token): - assert Terminal(x.type) == sym, x - else: - assert NonTerminal(x.data) == sym, (sym, x) - to_write.append(x) - - assert is_iter_empty(iter_args) - return to_write - - -class MatchTree(Tree): - pass - -class MakeMatchTree: - def __init__(self, name, expansion): - self.name = name - self.expansion = expansion - - def __call__(self, args): - t = MatchTree(self.name, args) - t.meta.match_tree = True - t.meta.orig_expansion = self.expansion - return t - -from lark.load_grammar import SimplifyRule_Visitor, RuleTreeToText -class Reconstructor: - def __init__(self, parser): - # XXX TODO calling compile twice returns different results! - assert parser.options.maybe_placeholders == False - tokens, rules, _grammar_extra = parser.grammar.compile(parser.options.start) - - self.write_tokens = WriteTokensTransformer({t.name:t for t in tokens}) - self.rules = list(set(list(self._build_recons_rules(rules)))) - callbacks = {rule: rule.alias for rule in self.rules} # TODO pass callbacks through dict, instead of alias? - for r in self.rules: - print("*", r) - self.parser = earley.Parser(ParserConf(self.rules, callbacks, parser.options.start), - self._match, resolve_ambiguity=True) - - def _build_recons_rules(self, rules): - expand1s = {r.origin for r in rules if r.options.expand1} - - aliases = defaultdict(list) - for r in rules: - if r.alias: - aliases[r.origin].append( r.alias ) - - rule_names = {r.origin for r in rules} - nonterminals = {sym for sym in rule_names - if sym.name.startswith('_') or sym in expand1s or sym in aliases } - - for r in rules: - _recons_exp = [] - for sym in r.expansion: - if not is_discarded_terminal(sym): - if sym in nonterminals: - if sym in expand1s: - v = Tree('expansions', [sym, Terminal(sym.name.upper())]) - else: - v = sym - else: - v = Terminal(sym.name.upper()) - _recons_exp.append(v) - - simplify_rule = SimplifyRule_Visitor() - rule_tree_to_text = RuleTreeToText() - tree = Tree('expansions', [Tree('expansion', _recons_exp)]) - simplify_rule.visit(tree) - expansions = rule_tree_to_text.transform(tree) - - for recons_exp, alias in expansions: - - # Skip self-recursive constructs - if recons_exp == [r.origin]: - continue - - sym = NonTerminal(r.alias) if r.alias else r.origin - - yield Rule(sym, recons_exp, alias=MakeMatchTree(sym.name, r.expansion)) - - for origin, rule_aliases in aliases.items(): - for alias in rule_aliases: - yield Rule(origin, [Terminal(alias.upper())], alias=MakeMatchTree(origin.name, [NonTerminal(alias)])) - yield Rule(origin, [Terminal(origin.name.upper())], alias=MakeMatchTree(origin.name, [origin])) - - def _match(self, term, token): - if isinstance(token, Tree): - return Terminal(token.data.upper()) == term - elif isinstance(token, Token): - return term == Terminal(token.type.upper()) - assert False - - def _reconstruct(self, tree): - # TODO: ambiguity? - unreduced_tree = self.parser.parse(tree.children, tree.data) # find a full derivation - assert unreduced_tree.data == tree.data - res = self.write_tokens.transform(unreduced_tree) - for item in res: - if isinstance(item, Tree): - for x in self._reconstruct(item): - yield x - else: - yield item - - def reconstruct(self, tree): - return ''.join(self._reconstruct(tree)) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/tools/serialize.py b/vendor/poetry-core/poetry/core/_vendor/lark/tools/serialize.py deleted file mode 100644 index fb69d35a..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/tools/serialize.py +++ /dev/null @@ -1,39 +0,0 @@ -import codecs -import sys -import json - -from lark import Lark -from lark.grammar import RuleOptions, Rule -from lark.lexer import TerminalDef - -import argparse - -argparser = argparse.ArgumentParser(prog='python -m lark.tools.serialize') #description='''Lark Serialization Tool -- Stores Lark's internal state & LALR analysis as a convenient JSON file''') - -argparser.add_argument('grammar_file', type=argparse.FileType('r'), help='A valid .lark file') -argparser.add_argument('-o', '--out', type=argparse.FileType('w'), default=sys.stdout, help='json file path to create (default=stdout)') -argparser.add_argument('-s', '--start', default='start', help='start symbol (default="start")', nargs='+') -argparser.add_argument('-l', '--lexer', default='standard', choices=['standard', 'contextual'], help='lexer type (default="standard")') - - -def serialize(infile, outfile, lexer, start): - lark_inst = Lark(infile, parser="lalr", lexer=lexer, start=start) # TODO contextual - - data, memo = lark_inst.memo_serialize([TerminalDef, Rule]) - outfile.write('{\n') - outfile.write(' "data": %s,\n' % json.dumps(data)) - outfile.write(' "memo": %s\n' % json.dumps(memo)) - outfile.write('}\n') - - -def main(): - if len(sys.argv) == 1 or '-h' in sys.argv or '--help' in sys.argv: - print("Lark Serialization Tool - Stores Lark's internal state & LALR analysis as a JSON file") - print("") - argparser.print_help() - else: - args = argparser.parse_args() - serialize(args.grammar_file, args.out, args.lexer, args.start) - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/tools/standalone.py b/vendor/poetry-core/poetry/core/_vendor/lark/tools/standalone.py deleted file mode 100644 index 72042cda..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/tools/standalone.py +++ /dev/null @@ -1,127 +0,0 @@ -###{standalone -# -# -# Lark Stand-alone Generator Tool -# ---------------------------------- -# Generates a stand-alone LALR(1) parser with a standard lexer -# -# Git: https://github.com/erezsh/lark -# Author: Erez Shinan (erezshin@gmail.com) -# -# -# >>> LICENSE -# -# This tool and its generated code use a separate license from Lark, -# and are subject to the terms of the Mozilla Public License, v. 2.0. -# If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. -# -# If you wish to purchase a commercial license for this tool and its -# generated code, you may contact me via email or otherwise. -# -# If MPL2 is incompatible with your free or open-source project, -# contact me and we'll work it out. -# -# - -import os -from io import open -###} - -import codecs -import sys -import os -from pprint import pprint -from os import path -from collections import defaultdict - -import lark -from lark import Lark -from lark.parsers.lalr_analysis import Reduce - - -from lark.grammar import RuleOptions, Rule -from lark.lexer import TerminalDef - -_dir = path.dirname(__file__) -_larkdir = path.join(_dir, path.pardir) - - -EXTRACT_STANDALONE_FILES = [ - 'tools/standalone.py', - 'exceptions.py', - 'utils.py', - 'tree.py', - 'visitors.py', - 'indenter.py', - 'grammar.py', - 'lexer.py', - 'common.py', - 'parse_tree_builder.py', - 'parsers/lalr_parser.py', - 'parsers/lalr_analysis.py', - 'parser_frontends.py', - 'lark.py', -] - -def extract_sections(lines): - section = None - text = [] - sections = defaultdict(list) - for l in lines: - if l.startswith('###'): - if l[3] == '{': - section = l[4:].strip() - elif l[3] == '}': - sections[section] += text - section = None - text = [] - else: - raise ValueError(l) - elif section: - text.append(l) - - return {name:''.join(text) for name, text in sections.items()} - - -def main(fobj, start): - lark_inst = Lark(fobj, parser="lalr", lexer="contextual", start=start) - - print('# The file was automatically generated by Lark v%s' % lark.__version__) - - for pyfile in EXTRACT_STANDALONE_FILES: - with open(os.path.join(_larkdir, pyfile)) as f: - print (extract_sections(f)['standalone']) - - data, m = lark_inst.memo_serialize([TerminalDef, Rule]) - print( 'DATA = (' ) - # pprint(data, width=160) - print(data) - print(')') - print( 'MEMO = (') - print(m) - print(')') - - - print('Shift = 0') - print('Reduce = 1') - print("def Lark_StandAlone(transformer=None, postlex=None):") - print(" return Lark._load_from_dict(DATA, MEMO, transformer=transformer, postlex=postlex)") - - - -if __name__ == '__main__': - if len(sys.argv) < 2: - print("Lark Stand-alone Generator Tool") - print("Usage: python -m lark.tools.standalone []") - sys.exit(1) - - if len(sys.argv) == 3: - fn, start = sys.argv[1:] - elif len(sys.argv) == 2: - fn, start = sys.argv[1], 'start' - else: - assert False, sys.argv - - with codecs.open(fn, encoding='utf8') as f: - main(f, start) diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/tree.py b/vendor/poetry-core/poetry/core/_vendor/lark/tree.py deleted file mode 100644 index f9767e43..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/tree.py +++ /dev/null @@ -1,175 +0,0 @@ -try: - from future_builtins import filter -except ImportError: - pass - -from copy import deepcopy -from collections import OrderedDict - - -###{standalone -class Meta: - def __init__(self): - self.empty = True - -class Tree(object): - def __init__(self, data, children, meta=None): - self.data = data - self.children = children - self._meta = meta - - @property - def meta(self): - if self._meta is None: - self._meta = Meta() - return self._meta - - def __repr__(self): - return 'Tree(%s, %s)' % (self.data, self.children) - - def _pretty_label(self): - return self.data - - def _pretty(self, level, indent_str): - if len(self.children) == 1 and not isinstance(self.children[0], Tree): - return [ indent_str*level, self._pretty_label(), '\t', '%s' % (self.children[0],), '\n'] - - l = [ indent_str*level, self._pretty_label(), '\n' ] - for n in self.children: - if isinstance(n, Tree): - l += n._pretty(level+1, indent_str) - else: - l += [ indent_str*(level+1), '%s' % (n,), '\n' ] - - return l - - def pretty(self, indent_str=' '): - return ''.join(self._pretty(0, indent_str)) - - def __eq__(self, other): - try: - return self.data == other.data and self.children == other.children - except AttributeError: - return False - - def __ne__(self, other): - return not (self == other) - - def __hash__(self): - return hash((self.data, tuple(self.children))) - - def iter_subtrees(self): - queue = [self] - subtrees = OrderedDict() - for subtree in queue: - subtrees[id(subtree)] = subtree - queue += [c for c in reversed(subtree.children) - if isinstance(c, Tree) and id(c) not in subtrees] - - del queue - return reversed(list(subtrees.values())) - - def find_pred(self, pred): - "Find all nodes where pred(tree) == True" - return filter(pred, self.iter_subtrees()) - - def find_data(self, data): - "Find all nodes where tree.data == data" - return self.find_pred(lambda t: t.data == data) - -###} - - def expand_kids_by_index(self, *indices): - "Expand (inline) children at the given indices" - for i in sorted(indices, reverse=True): # reverse so that changing tail won't affect indices - kid = self.children[i] - self.children[i:i+1] = kid.children - - def scan_values(self, pred): - for c in self.children: - if isinstance(c, Tree): - for t in c.scan_values(pred): - yield t - else: - if pred(c): - yield c - - def iter_subtrees_topdown(self): - stack = [self] - while stack: - node = stack.pop() - if not isinstance(node, Tree): - continue - yield node - for n in reversed(node.children): - stack.append(n) - - def __deepcopy__(self, memo): - return type(self)(self.data, deepcopy(self.children, memo), meta=self._meta) - - def copy(self): - return type(self)(self.data, self.children) - - def set(self, data, children): - self.data = data - self.children = children - - # XXX Deprecated! Here for backwards compatibility <0.6.0 - @property - def line(self): - return self.meta.line - @property - def column(self): - return self.meta.column - @property - def end_line(self): - return self.meta.end_line - @property - def end_column(self): - return self.meta.end_column - - -class SlottedTree(Tree): - __slots__ = 'data', 'children', 'rule', '_meta' - - -def pydot__tree_to_png(tree, filename, rankdir="LR", **kwargs): - """Creates a colorful image that represents the tree (data+children, without meta) - - Possible values for `rankdir` are "TB", "LR", "BT", "RL", corresponding to - directed graphs drawn from top to bottom, from left to right, from bottom to - top, and from right to left, respectively. - - `kwargs` can be any graph attribute (e. g. `dpi=200`). For a list of - possible attributes, see https://www.graphviz.org/doc/info/attrs.html. - """ - - import pydot - graph = pydot.Dot(graph_type='digraph', rankdir=rankdir, **kwargs) - - i = [0] - - def new_leaf(leaf): - node = pydot.Node(i[0], label=repr(leaf)) - i[0] += 1 - graph.add_node(node) - return node - - def _to_pydot(subtree): - color = hash(subtree.data) & 0xffffff - color |= 0x808080 - - subnodes = [_to_pydot(child) if isinstance(child, Tree) else new_leaf(child) - for child in subtree.children] - node = pydot.Node(i[0], style="filled", fillcolor="#%x"%color, label=subtree.data) - i[0] += 1 - graph.add_node(node) - - for subnode in subnodes: - graph.add_edge(pydot.Edge(node, subnode)) - - return node - - _to_pydot(tree) - graph.write_png(filename) - diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/utils.py b/vendor/poetry-core/poetry/core/_vendor/lark/utils.py deleted file mode 100644 index 36f50d1e..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/utils.py +++ /dev/null @@ -1,308 +0,0 @@ -import sys -import os -from functools import reduce -from ast import literal_eval -from collections import deque - -class fzset(frozenset): - def __repr__(self): - return '{%s}' % ', '.join(map(repr, self)) - - -def classify_bool(seq, pred): - true_elems = [] - false_elems = [] - - for elem in seq: - if pred(elem): - true_elems.append(elem) - else: - false_elems.append(elem) - - return true_elems, false_elems - - - -def bfs(initial, expand): - open_q = deque(list(initial)) - visited = set(open_q) - while open_q: - node = open_q.popleft() - yield node - for next_node in expand(node): - if next_node not in visited: - visited.add(next_node) - open_q.append(next_node) - - - - -def _serialize(value, memo): - if isinstance(value, Serialize): - return value.serialize(memo) - elif isinstance(value, list): - return [_serialize(elem, memo) for elem in value] - elif isinstance(value, frozenset): - return list(value) # TODO reversible? - elif isinstance(value, dict): - return {key:_serialize(elem, memo) for key, elem in value.items()} - return value - -###{standalone -def classify(seq, key=None, value=None): - d = {} - for item in seq: - k = key(item) if (key is not None) else item - v = value(item) if (value is not None) else item - if k in d: - d[k].append(v) - else: - d[k] = [v] - return d - - -def _deserialize(data, namespace, memo): - if isinstance(data, dict): - if '__type__' in data: # Object - class_ = namespace[data['__type__']] - return class_.deserialize(data, memo) - elif '@' in data: - return memo[data['@']] - return {key:_deserialize(value, namespace, memo) for key, value in data.items()} - elif isinstance(data, list): - return [_deserialize(value, namespace, memo) for value in data] - return data - - -class Serialize(object): - def memo_serialize(self, types_to_memoize): - memo = SerializeMemoizer(types_to_memoize) - return self.serialize(memo), memo.serialize() - - def serialize(self, memo=None): - if memo and memo.in_types(self): - return {'@': memo.memoized.get(self)} - - fields = getattr(self, '__serialize_fields__') - res = {f: _serialize(getattr(self, f), memo) for f in fields} - res['__type__'] = type(self).__name__ - postprocess = getattr(self, '_serialize', None) - if postprocess: - postprocess(res, memo) - return res - - @classmethod - def deserialize(cls, data, memo): - namespace = getattr(cls, '__serialize_namespace__', {}) - namespace = {c.__name__:c for c in namespace} - - fields = getattr(cls, '__serialize_fields__') - - if '@' in data: - return memo[data['@']] - - inst = cls.__new__(cls) - for f in fields: - try: - setattr(inst, f, _deserialize(data[f], namespace, memo)) - except KeyError as e: - raise KeyError("Cannot find key for class", cls, e) - postprocess = getattr(inst, '_deserialize', None) - if postprocess: - postprocess() - return inst - - -class SerializeMemoizer(Serialize): - __serialize_fields__ = 'memoized', - - def __init__(self, types_to_memoize): - self.types_to_memoize = tuple(types_to_memoize) - self.memoized = Enumerator() - - def in_types(self, value): - return isinstance(value, self.types_to_memoize) - - def serialize(self): - return _serialize(self.memoized.reversed(), None) - - @classmethod - def deserialize(cls, data, namespace, memo): - return _deserialize(data, namespace, memo) - - - -try: - STRING_TYPE = basestring -except NameError: # Python 3 - STRING_TYPE = str - - -import types -from functools import wraps, partial -from contextlib import contextmanager - -Str = type(u'') -try: - classtype = types.ClassType # Python2 -except AttributeError: - classtype = type # Python3 - -def smart_decorator(f, create_decorator): - if isinstance(f, types.FunctionType): - return wraps(f)(create_decorator(f, True)) - - elif isinstance(f, (classtype, type, types.BuiltinFunctionType)): - return wraps(f)(create_decorator(f, False)) - - elif isinstance(f, types.MethodType): - return wraps(f)(create_decorator(f.__func__, True)) - - elif isinstance(f, partial): - # wraps does not work for partials in 2.7: https://bugs.python.org/issue3445 - return wraps(f.func)(create_decorator(lambda *args, **kw: f(*args[1:], **kw), True)) - - else: - return create_decorator(f.__func__.__call__, True) - -try: - import regex -except ImportError: - regex = None - -import sys, re -Py36 = (sys.version_info[:2] >= (3, 6)) - -import sre_parse -import sre_constants -categ_pattern = re.compile(r'\\p{[A-Za-z_]+}') -def get_regexp_width(expr): - if regex: - # Since `sre_parse` cannot deal with Unicode categories of the form `\p{Mn}`, we replace these with - # a simple letter, which makes no difference as we are only trying to get the possible lengths of the regex - # match here below. - regexp_final = re.sub(categ_pattern, 'A', expr) - else: - if re.search(categ_pattern, expr): - raise ImportError('`regex` module must be installed in order to use Unicode categories.', expr) - regexp_final = expr - try: - return [int(x) for x in sre_parse.parse(regexp_final).getwidth()] - except sre_constants.error: - raise ValueError(expr) - -###} - - -def dedup_list(l): - """Given a list (l) will removing duplicates from the list, - preserving the original order of the list. Assumes that - the list entries are hashable.""" - dedup = set() - return [ x for x in l if not (x in dedup or dedup.add(x))] - - - - -try: - from contextlib import suppress # Python 3 -except ImportError: - @contextmanager - def suppress(*excs): - '''Catch and dismiss the provided exception - - >>> x = 'hello' - >>> with suppress(IndexError): - ... x = x[10] - >>> x - 'hello' - ''' - try: - yield - except excs: - pass - - - - -try: - compare = cmp -except NameError: - def compare(a, b): - if a == b: - return 0 - elif a > b: - return 1 - return -1 - - - -class Enumerator(Serialize): - def __init__(self): - self.enums = {} - - def get(self, item): - if item not in self.enums: - self.enums[item] = len(self.enums) - return self.enums[item] - - def __len__(self): - return len(self.enums) - - def reversed(self): - r = {v: k for k, v in self.enums.items()} - assert len(r) == len(self.enums) - return r - - -def eval_escaping(s): - w = '' - i = iter(s) - for n in i: - w += n - if n == '\\': - try: - n2 = next(i) - except StopIteration: - raise ValueError("Literal ended unexpectedly (bad escaping): `%r`" % s) - if n2 == '\\': - w += '\\\\' - elif n2 not in 'uxnftr': - w += '\\' - w += n2 - w = w.replace('\\"', '"').replace("'", "\\'") - - to_eval = "u'''%s'''" % w - try: - s = literal_eval(to_eval) - except SyntaxError as e: - raise ValueError(s, e) - - return s - - -def combine_alternatives(lists): - """ - Accepts a list of alternatives, and enumerates all their possible concatinations. - - Examples: - >>> combine_alternatives([range(2), [4,5]]) - [[0, 4], [0, 5], [1, 4], [1, 5]] - - >>> combine_alternatives(["abc", "xy", '$']) - [['a', 'x', '$'], ['a', 'y', '$'], ['b', 'x', '$'], ['b', 'y', '$'], ['c', 'x', '$'], ['c', 'y', '$']] - - >>> combine_alternatives([]) - [[]] - """ - if not lists: - return [[]] - assert all(l for l in lists), lists - init = [[x] for x in lists[0]] - return reduce(lambda a,b: [i+[j] for i in a for j in b], lists[1:], init) - - - -class FS: - open = open - exists = os.path.exists \ No newline at end of file diff --git a/vendor/poetry-core/poetry/core/_vendor/lark/visitors.py b/vendor/poetry-core/poetry/core/_vendor/lark/visitors.py deleted file mode 100644 index c9f0e2dd..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/lark/visitors.py +++ /dev/null @@ -1,399 +0,0 @@ -from functools import wraps - -from .utils import smart_decorator, combine_alternatives -from .tree import Tree -from .exceptions import VisitError, GrammarError -from .lexer import Token - -###{standalone -from inspect import getmembers, getmro - -class Discard(Exception): - pass - -# Transformers - -class _Decoratable: - @classmethod - def _apply_decorator(cls, decorator, **kwargs): - mro = getmro(cls) - assert mro[0] is cls - libmembers = {name for _cls in mro[1:] for name, _ in getmembers(_cls)} - for name, value in getmembers(cls): - - # Make sure the function isn't inherited (unless it's overwritten) - if name.startswith('_') or (name in libmembers and name not in cls.__dict__): - continue - if not callable(value): - continue - - # Skip if v_args already applied (at the function level) - if hasattr(cls.__dict__[name], 'vargs_applied') or hasattr(value, 'vargs_applied'): - continue - - static = isinstance(cls.__dict__[name], (staticmethod, classmethod)) - setattr(cls, name, decorator(value, static=static, **kwargs)) - return cls - - def __class_getitem__(cls, _): - return cls - - -class Transformer(_Decoratable): - """Visits the tree recursively, starting with the leaves and finally the root (bottom-up) - - Calls its methods (provided by user via inheritance) according to tree.data - The returned value replaces the old one in the structure. - - Can be used to implement map or reduce. - """ - __visit_tokens__ = True # For backwards compatibility - - def __init__(self, visit_tokens=True): - self.__visit_tokens__ = visit_tokens - - def _call_userfunc(self, tree, new_children=None): - # Assumes tree is already transformed - children = new_children if new_children is not None else tree.children - try: - f = getattr(self, tree.data) - except AttributeError: - return self.__default__(tree.data, children, tree.meta) - else: - try: - wrapper = getattr(f, 'visit_wrapper', None) - if wrapper is not None: - return f.visit_wrapper(f, tree.data, children, tree.meta) - else: - return f(children) - except (GrammarError, Discard): - raise - except Exception as e: - raise VisitError(tree.data, tree, e) - - def _call_userfunc_token(self, token): - try: - f = getattr(self, token.type) - except AttributeError: - return self.__default_token__(token) - else: - try: - return f(token) - except (GrammarError, Discard): - raise - except Exception as e: - raise VisitError(token.type, token, e) - - - def _transform_children(self, children): - for c in children: - try: - if isinstance(c, Tree): - yield self._transform_tree(c) - elif self.__visit_tokens__ and isinstance(c, Token): - yield self._call_userfunc_token(c) - else: - yield c - except Discard: - pass - - def _transform_tree(self, tree): - children = list(self._transform_children(tree.children)) - return self._call_userfunc(tree, children) - - def transform(self, tree): - return self._transform_tree(tree) - - def __mul__(self, other): - return TransformerChain(self, other) - - def __default__(self, data, children, meta): - "Default operation on tree (for override)" - return Tree(data, children, meta) - - def __default_token__(self, token): - "Default operation on token (for override)" - return token - - - -class InlineTransformer(Transformer): # XXX Deprecated - def _call_userfunc(self, tree, new_children=None): - # Assumes tree is already transformed - children = new_children if new_children is not None else tree.children - try: - f = getattr(self, tree.data) - except AttributeError: - return self.__default__(tree.data, children, tree.meta) - else: - return f(*children) - - -class TransformerChain(object): - def __init__(self, *transformers): - self.transformers = transformers - - def transform(self, tree): - for t in self.transformers: - tree = t.transform(tree) - return tree - - def __mul__(self, other): - return TransformerChain(*self.transformers + (other,)) - - -class Transformer_InPlace(Transformer): - "Non-recursive. Changes the tree in-place instead of returning new instances" - def _transform_tree(self, tree): # Cancel recursion - return self._call_userfunc(tree) - - def transform(self, tree): - for subtree in tree.iter_subtrees(): - subtree.children = list(self._transform_children(subtree.children)) - - return self._transform_tree(tree) - - -class Transformer_NonRecursive(Transformer): - "Non-recursive. Doesn't change the original tree." - - def transform(self, tree): - # Tree to postfix - rev_postfix = [] - q = [tree] - while q: - t = q.pop() - rev_postfix.append( t ) - if isinstance(t, Tree): - q += t.children - - # Postfix to tree - stack = [] - for x in reversed(rev_postfix): - if isinstance(x, Tree): - size = len(x.children) - if size: - args = stack[-size:] - del stack[-size:] - else: - args = [] - stack.append(self._call_userfunc(x, args)) - else: - stack.append(x) - - t ,= stack # We should have only one tree remaining - return t - - - -class Transformer_InPlaceRecursive(Transformer): - "Recursive. Changes the tree in-place instead of returning new instances" - def _transform_tree(self, tree): - tree.children = list(self._transform_children(tree.children)) - return self._call_userfunc(tree) - - - -# Visitors - -class VisitorBase: - def _call_userfunc(self, tree): - return getattr(self, tree.data, self.__default__)(tree) - - def __default__(self, tree): - "Default operation on tree (for override)" - return tree - - def __class_getitem__(cls, _): - return cls - - -class Visitor(VisitorBase): - """Bottom-up visitor, non-recursive - - Visits the tree, starting with the leaves and finally the root (bottom-up) - Calls its methods (provided by user via inheritance) according to tree.data - """ - - def visit(self, tree): - for subtree in tree.iter_subtrees(): - self._call_userfunc(subtree) - return tree - - def visit_topdown(self,tree): - for subtree in tree.iter_subtrees_topdown(): - self._call_userfunc(subtree) - return tree - -class Visitor_Recursive(VisitorBase): - """Bottom-up visitor, recursive - - Visits the tree, starting with the leaves and finally the root (bottom-up) - Calls its methods (provided by user via inheritance) according to tree.data - """ - - def visit(self, tree): - for child in tree.children: - if isinstance(child, Tree): - self.visit(child) - - self._call_userfunc(tree) - return tree - - def visit_topdown(self,tree): - self._call_userfunc(tree) - - for child in tree.children: - if isinstance(child, Tree): - self.visit_topdown(child) - - return tree - - - -def visit_children_decor(func): - "See Interpreter" - @wraps(func) - def inner(cls, tree): - values = cls.visit_children(tree) - return func(cls, values) - return inner - - -class Interpreter(_Decoratable): - """Top-down visitor, recursive - - Visits the tree, starting with the root and finally the leaves (top-down) - Calls its methods (provided by user via inheritance) according to tree.data - - Unlike Transformer and Visitor, the Interpreter doesn't automatically visit its sub-branches. - The user has to explicitly call visit_children, or use the @visit_children_decor - """ - - def visit(self, tree): - f = getattr(self, tree.data) - wrapper = getattr(f, 'visit_wrapper', None) - if wrapper is not None: - return f.visit_wrapper(f, tree.data, tree.children, tree.meta) - else: - return f(tree) - - def visit_children(self, tree): - return [self.visit(child) if isinstance(child, Tree) else child - for child in tree.children] - - def __getattr__(self, name): - return self.__default__ - - def __default__(self, tree): - return self.visit_children(tree) - - - - -# Decorators - -def _apply_decorator(obj, decorator, **kwargs): - try: - _apply = obj._apply_decorator - except AttributeError: - return decorator(obj, **kwargs) - else: - return _apply(decorator, **kwargs) - - - -def _inline_args__func(func): - @wraps(func) - def create_decorator(_f, with_self): - if with_self: - def f(self, children): - return _f(self, *children) - else: - def f(self, children): - return _f(*children) - return f - - return smart_decorator(func, create_decorator) - - -def inline_args(obj): # XXX Deprecated - return _apply_decorator(obj, _inline_args__func) - - - -def _visitor_args_func_dec(func, visit_wrapper=None, static=False): - def create_decorator(_f, with_self): - if with_self: - def f(self, *args, **kwargs): - return _f(self, *args, **kwargs) - else: - def f(self, *args, **kwargs): - return _f(*args, **kwargs) - return f - - if static: - f = wraps(func)(create_decorator(func, False)) - else: - f = smart_decorator(func, create_decorator) - f.vargs_applied = True - f.visit_wrapper = visit_wrapper - return f - - -def _vargs_inline(f, data, children, meta): - return f(*children) -def _vargs_meta_inline(f, data, children, meta): - return f(meta, *children) -def _vargs_meta(f, data, children, meta): - return f(children, meta) # TODO swap these for consistency? Backwards incompatible! -def _vargs_tree(f, data, children, meta): - return f(Tree(data, children, meta)) - -def v_args(inline=False, meta=False, tree=False, wrapper=None): - "A convenience decorator factory, for modifying the behavior of user-supplied visitor methods" - if tree and (meta or inline): - raise ValueError("Visitor functions cannot combine 'tree' with 'meta' or 'inline'.") - - func = None - if meta: - if inline: - func = _vargs_meta_inline - else: - func = _vargs_meta - elif inline: - func = _vargs_inline - elif tree: - func = _vargs_tree - - if wrapper is not None: - if func is not None: - raise ValueError("Cannot use 'wrapper' along with 'tree', 'meta' or 'inline'.") - func = wrapper - - def _visitor_args_dec(obj): - return _apply_decorator(obj, _visitor_args_func_dec, visit_wrapper=func) - return _visitor_args_dec - - -###} - - -#--- Visitor Utilities --- - -class CollapseAmbiguities(Transformer): - """ - Transforms a tree that contains any number of _ambig nodes into a list of trees, - each one containing an unambiguous tree. - - The length of the resulting list is the product of the length of all _ambig nodes. - - Warning: This may quickly explode for highly ambiguous trees. - - """ - def _ambig(self, options): - return sum(options, []) - def __default__(self, data, children_lists, meta): - return [Tree(data, children, meta) for children in combine_alternatives(children_lists)] - def __default_token__(self, t): - return [t] diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE b/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE deleted file mode 100644 index 6f62d44e..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE.APACHE b/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE.APACHE deleted file mode 100644 index f433b1a5..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE.BSD b/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE.BSD deleted file mode 100644 index 42ce7b75..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Donald Stufft and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/__about__.py b/vendor/poetry-core/poetry/core/_vendor/packaging/__about__.py deleted file mode 100644 index 4c43a968..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/__about__.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "20.9" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014-2019 %s" % __author__ diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/__init__.py b/vendor/poetry-core/poetry/core/_vendor/packaging/__init__.py deleted file mode 100644 index a0cf67df..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -from .__about__ import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - __version__, -) - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/_compat.py b/vendor/poetry-core/poetry/core/_vendor/packaging/_compat.py deleted file mode 100644 index e54bd4ed..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/_compat.py +++ /dev/null @@ -1,38 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import sys - -from ._typing import TYPE_CHECKING - -if TYPE_CHECKING: # pragma: no cover - from typing import Any, Dict, Tuple, Type - - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - -# flake8: noqa - -if PY3: - string_types = (str,) -else: - string_types = (basestring,) - - -def with_metaclass(meta, *bases): - # type: (Type[Any], Tuple[Type[Any], ...]) -> Any - """ - Create a base class with a metaclass. - """ - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): # type: ignore - def __new__(cls, name, this_bases, d): - # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any - return meta(name, bases, d) - - return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/_structures.py b/vendor/poetry-core/poetry/core/_vendor/packaging/_structures.py deleted file mode 100644 index 800d5c55..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/_structures.py +++ /dev/null @@ -1,86 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - - -class InfinityType(object): - def __repr__(self): - # type: () -> str - return "Infinity" - - def __hash__(self): - # type: () -> int - return hash(repr(self)) - - def __lt__(self, other): - # type: (object) -> bool - return False - - def __le__(self, other): - # type: (object) -> bool - return False - - def __eq__(self, other): - # type: (object) -> bool - return isinstance(other, self.__class__) - - def __ne__(self, other): - # type: (object) -> bool - return not isinstance(other, self.__class__) - - def __gt__(self, other): - # type: (object) -> bool - return True - - def __ge__(self, other): - # type: (object) -> bool - return True - - def __neg__(self): - # type: (object) -> NegativeInfinityType - return NegativeInfinity - - -Infinity = InfinityType() - - -class NegativeInfinityType(object): - def __repr__(self): - # type: () -> str - return "-Infinity" - - def __hash__(self): - # type: () -> int - return hash(repr(self)) - - def __lt__(self, other): - # type: (object) -> bool - return True - - def __le__(self, other): - # type: (object) -> bool - return True - - def __eq__(self, other): - # type: (object) -> bool - return isinstance(other, self.__class__) - - def __ne__(self, other): - # type: (object) -> bool - return not isinstance(other, self.__class__) - - def __gt__(self, other): - # type: (object) -> bool - return False - - def __ge__(self, other): - # type: (object) -> bool - return False - - def __neg__(self): - # type: (object) -> InfinityType - return Infinity - - -NegativeInfinity = NegativeInfinityType() diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/_typing.py b/vendor/poetry-core/poetry/core/_vendor/packaging/_typing.py deleted file mode 100644 index 77a8b918..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/_typing.py +++ /dev/null @@ -1,48 +0,0 @@ -"""For neatly implementing static typing in packaging. - -`mypy` - the static type analysis tool we use - uses the `typing` module, which -provides core functionality fundamental to mypy's functioning. - -Generally, `typing` would be imported at runtime and used in that fashion - -it acts as a no-op at runtime and does not have any run-time overhead by -design. - -As it turns out, `typing` is not vendorable - it uses separate sources for -Python 2/Python 3. Thus, this codebase can not expect it to be present. -To work around this, mypy allows the typing import to be behind a False-y -optional to prevent it from running at runtime and type-comments can be used -to remove the need for the types to be accessible directly during runtime. - -This module provides the False-y guard in a nicely named fashion so that a -curious maintainer can reach here to read this. - -In packaging, all static-typing related imports should be guarded as follows: - - from packaging._typing import TYPE_CHECKING - - if TYPE_CHECKING: - from typing import ... - -Ref: https://github.com/python/mypy/issues/3216 -""" - -__all__ = ["TYPE_CHECKING", "cast"] - -# The TYPE_CHECKING constant defined by the typing module is False at runtime -# but True while type checking. -if False: # pragma: no cover - from typing import TYPE_CHECKING -else: - TYPE_CHECKING = False - -# typing's cast syntax requires calling typing.cast at runtime, but we don't -# want to import typing at runtime. Here, we inform the type checkers that -# we're importing `typing.cast` as `cast` and re-implement typing.cast's -# runtime behavior in a block that is ignored by type checkers. -if TYPE_CHECKING: # pragma: no cover - # not executed at runtime - from typing import cast -else: - # executed at runtime - def cast(type_, value): # noqa - return value diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/markers.py b/vendor/poetry-core/poetry/core/_vendor/packaging/markers.py deleted file mode 100644 index e0330ab6..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/markers.py +++ /dev/null @@ -1,336 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import operator -import os -import platform -import sys - -from pyparsing import ( # noqa: N817 - Forward, - Group, - Literal as L, - ParseException, - ParseResults, - QuotedString, - ZeroOrMore, - stringEnd, - stringStart, -) - -from ._compat import string_types -from ._typing import TYPE_CHECKING -from .specifiers import InvalidSpecifier, Specifier - -if TYPE_CHECKING: # pragma: no cover - from typing import Any, Callable, Dict, List, Optional, Tuple, Union - - Operator = Callable[[str, str], bool] - - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -class Node(object): - def __init__(self, value): - # type: (Any) -> None - self.value = value - - def __str__(self): - # type: () -> str - return str(self.value) - - def __repr__(self): - # type: () -> str - return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) - - def serialize(self): - # type: () -> str - raise NotImplementedError - - -class Variable(Node): - def serialize(self): - # type: () -> str - return str(self) - - -class Value(Node): - def serialize(self): - # type: () -> str - return '"{0}"'.format(self) - - -class Op(Node): - def serialize(self): - # type: () -> str - return str(self) - - -VARIABLE = ( - L("implementation_version") - | L("platform_python_implementation") - | L("implementation_name") - | L("python_full_version") - | L("platform_release") - | L("platform_version") - | L("platform_machine") - | L("platform_system") - | L("python_version") - | L("sys_platform") - | L("os_name") - | L("os.name") # PEP-345 - | L("sys.platform") # PEP-345 - | L("platform.version") # PEP-345 - | L("platform.machine") # PEP-345 - | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # undocumented setuptools legacy - | L("extra") # PEP-508 -) -ALIASES = { - "os.name": "os_name", - "sys.platform": "sys_platform", - "platform.version": "platform_version", - "platform.machine": "platform_machine", - "platform.python_implementation": "platform_python_implementation", - "python_implementation": "platform_python_implementation", -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results): - # type: (Union[ParseResults, List[Any]]) -> List[Any] - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results - - -def _format_marker(marker, first=True): - # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str - - assert isinstance(marker, (list, tuple, string_types)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} # type: Dict[str, Operator] - - -def _eval_op(lhs, op, rhs): - # type: (str, Op, str) -> bool - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs) - - oper = _operators.get(op.serialize()) # type: Optional[Operator] - if oper is None: - raise UndefinedComparison( - "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) - ) - - return oper(lhs, rhs) - - -class Undefined(object): - pass - - -_undefined = Undefined() - - -def _get_env(environment, name): - # type: (Dict[str, str], str) -> str - value = environment.get(name, _undefined) # type: Union[str, Undefined] - - if isinstance(value, Undefined): - raise UndefinedEnvironmentName( - "{0!r} does not exist in evaluation environment.".format(name) - ) - - return value - - -def _evaluate_markers(markers, environment): - # type: (List[Any], Dict[str, str]) -> bool - groups = [[]] # type: List[List[bool]] - - for marker in markers: - assert isinstance(marker, (list, tuple, string_types)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) - rhs_value = rhs.value - else: - lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) - - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info): - # type: (sys._version_info) -> str - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment(): - # type: () -> Dict[str, str] - if hasattr(sys, "implementation"): - # Ignoring the `sys.implementation` reference for type checking due to - # mypy not liking that the attribute doesn't exist in Python 2.7 when - # run with the `--py27` flag. - iver = format_full_version(sys.implementation.version) # type: ignore - implementation_name = sys.implementation.name # type: ignore - else: - iver = "0" - implementation_name = "" - - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": ".".join(platform.python_version_tuple()[:2]), - "sys_platform": sys.platform, - } - - -class Marker(object): - def __init__(self, marker): - # type: (str) -> None - try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( - marker, marker[e.loc : e.loc + 8] - ) - raise InvalidMarker(err_str) - - def __str__(self): - # type: () -> str - return _format_marker(self._markers) - - def __repr__(self): - # type: () -> str - return "".format(str(self)) - - def evaluate(self, environment=None): - # type: (Optional[Dict[str, str]]) -> bool - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - if environment is not None: - current_environment.update(environment) - - return _evaluate_markers(self._markers, current_environment) diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/requirements.py b/vendor/poetry-core/poetry/core/_vendor/packaging/requirements.py deleted file mode 100644 index aa69d50d..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/requirements.py +++ /dev/null @@ -1,160 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import re -import string -import sys - -from pyparsing import ( # noqa: N817 - Combine, - Literal as L, - Optional, - ParseException, - Regex, - Word, - ZeroOrMore, - originalTextFor, - stringEnd, - stringStart, -) - -from ._typing import TYPE_CHECKING -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet - -if sys.version_info[0] >= 3: - from urllib import parse as urlparse # pragma: no cover -else: # pragma: no cover - import urlparse - - -if TYPE_CHECKING: # pragma: no cover - from typing import List, Optional as TOptional, Set - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r"[^ ]+")("url") -URL = AT + URI - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine( - VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False -)("_raw_spec") -_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start : t._original_end]) -) -MARKER_SEPARATOR = SEMICOLON -MARKER = MARKER_SEPARATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd -# pyparsing isn't thread safe during initialization, so we do it eagerly, see -# issue #104 -REQUIREMENT.parseString("x[]") - - -class Requirement(object): - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string): - # type: (str) -> None - try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - 'Parse error at "{0!r}": {1}'.format( - requirement_string[e.loc : e.loc + 8], e.msg - ) - ) - - self.name = req.name # type: str - if req.url: - parsed_url = urlparse.urlparse(req.url) - if parsed_url.scheme == "file": - if urlparse.urlunparse(parsed_url) != req.url: - raise InvalidRequirement("Invalid URL given") - elif not (parsed_url.scheme and parsed_url.netloc) or ( - not parsed_url.scheme and not parsed_url.netloc - ): - raise InvalidRequirement("Invalid URL: {0}".format(req.url)) - self.url = req.url # type: TOptional[str] - else: - self.url = None - self.extras = set(req.extras.asList() if req.extras else []) # type: Set[str] - self.specifier = SpecifierSet(req.specifier) # type: SpecifierSet - self.marker = req.marker if req.marker else None # type: TOptional[Marker] - - def __str__(self): - # type: () -> str - parts = [self.name] # type: List[str] - - if self.extras: - parts.append("[{0}]".format(",".join(sorted(self.extras)))) - - if self.specifier: - parts.append(str(self.specifier)) - - if self.url: - parts.append("@ {0}".format(self.url)) - if self.marker: - parts.append(" ") - - if self.marker: - parts.append("; {0}".format(self.marker)) - - return "".join(parts) - - def __repr__(self): - # type: () -> str - return "".format(str(self)) diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/specifiers.py b/vendor/poetry-core/poetry/core/_vendor/packaging/specifiers.py deleted file mode 100644 index a6a83c1f..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,864 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import abc -import functools -import itertools -import re -import warnings - -from ._compat import string_types, with_metaclass -from ._typing import TYPE_CHECKING -from .utils import canonicalize_version -from .version import LegacyVersion, Version, parse - -if TYPE_CHECKING: # pragma: no cover - from typing import Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union - - ParsedVersion = Union[Version, LegacyVersion] - UnparsedVersion = Union[Version, LegacyVersion, str] - CallableOperator = Callable[[ParsedVersion, str], bool] - - -class InvalidSpecifier(ValueError): - """ - An invalid specifier was found, users should refer to PEP 440. - """ - - -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore - @abc.abstractmethod - def __str__(self): - # type: () -> str - """ - Returns the str representation of this Specifier like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self): - # type: () -> int - """ - Returns a hash value for this Specifier like object. - """ - - @abc.abstractmethod - def __eq__(self, other): - # type: (object) -> bool - """ - Returns a boolean representing whether or not the two Specifier like - objects are equal. - """ - - @abc.abstractmethod - def __ne__(self, other): - # type: (object) -> bool - """ - Returns a boolean representing whether or not the two Specifier like - objects are not equal. - """ - - @abc.abstractproperty - def prereleases(self): - # type: () -> Optional[bool] - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @prereleases.setter - def prereleases(self, value): - # type: (bool) -> None - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @abc.abstractmethod - def contains(self, item, prereleases=None): - # type: (str, Optional[bool]) -> bool - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter(self, iterable, prereleases=None): - # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class _IndividualSpecifier(BaseSpecifier): - - _operators = {} # type: Dict[str, str] - - def __init__(self, spec="", prereleases=None): - # type: (str, Optional[bool]) -> None - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - - self._spec = ( - match.group("operator").strip(), - match.group("version").strip(), - ) # type: Tuple[str, str] - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self): - # type: () -> str - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) - - def __str__(self): - # type: () -> str - return "{0}{1}".format(*self._spec) - - @property - def _canonical_spec(self): - # type: () -> Tuple[str, Union[Version, str]] - return self._spec[0], canonicalize_version(self._spec[1]) - - def __hash__(self): - # type: () -> int - return hash(self._canonical_spec) - - def __eq__(self, other): - # type: (object) -> bool - if isinstance(other, string_types): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def __ne__(self, other): - # type: (object) -> bool - if isinstance(other, string_types): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec != other._spec - - def _get_operator(self, op): - # type: (str) -> CallableOperator - operator_callable = getattr( - self, "_compare_{0}".format(self._operators[op]) - ) # type: CallableOperator - return operator_callable - - def _coerce_version(self, version): - # type: (UnparsedVersion) -> ParsedVersion - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version - - @property - def operator(self): - # type: () -> str - return self._spec[0] - - @property - def version(self): - # type: () -> str - return self._spec[1] - - @property - def prereleases(self): - # type: () -> Optional[bool] - return self._prereleases - - @prereleases.setter - def prereleases(self, value): - # type: (bool) -> None - self._prereleases = value - - def __contains__(self, item): - # type: (str) -> bool - return self.contains(item) - - def contains(self, item, prereleases=None): - # type: (UnparsedVersion, Optional[bool]) -> bool - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - normalized_item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable = self._get_operator(self.operator) # type: CallableOperator - return operator_callable(normalized_item, self.version) - - def filter(self, iterable, prereleases=None): - # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later incase nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(==|!=|<=|>=|<|>)) - \s* - (?P - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def __init__(self, spec="", prereleases=None): - # type: (str, Optional[bool]) -> None - super(LegacySpecifier, self).__init__(spec, prereleases) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def _coerce_version(self, version): - # type: (Union[ParsedVersion, str]) -> LegacyVersion - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective, spec): - # type: (LegacyVersion, str) -> bool - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective, spec): - # type: (LegacyVersion, str) -> bool - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective, spec): - # type: (LegacyVersion, str) -> bool - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal(self, prospective, spec): - # type: (LegacyVersion, str) -> bool - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective, spec): - # type: (LegacyVersion, str) -> bool - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective, spec): - # type: (LegacyVersion, str) -> bool - return prospective > self._coerce_version(spec) - - -def _require_version_compare( - fn, # type: (Callable[[Specifier, ParsedVersion, str], bool]) -): - # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] - @functools.wraps(fn) - def wrapped(self, prospective, spec): - # type: (Specifier, ParsedVersion, str) -> bool - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - - return wrapped - - -class Specifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(~=|==|!=|<=|>=|<|>|===)) - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. - (?: - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - @_require_version_compare - def _compare_compatible(self, prospective, spec): - # type: (ParsedVersion, str) -> bool - - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore post and dev releases and we want to treat the pre-release as - # it's own separate segment. - prefix = ".".join( - list( - itertools.takewhile( - lambda x: (not x.startswith("post") and not x.startswith("dev")), - _version_split(spec), - ) - )[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - @_require_version_compare - def _compare_equal(self, prospective, spec): - # type: (ParsedVersion, str) -> bool - - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) - # Split the spec out by dots, and pretend that there is an implicit - # dot in between a release segment and a pre-release segment. - split_spec = _version_split(spec[:-2]) # Remove the trailing .* - - # Split the prospective version out by dots, and pretend that there - # is an implicit dot in between a release segment and a pre-release - # segment. - split_prospective = _version_split(str(prospective)) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - shortened_prospective = split_prospective[: len(split_spec)] - - # Pad out our two sides with zeros so that they both equal the same - # length. - padded_spec, padded_prospective = _pad_version( - split_spec, shortened_prospective - ) - - return padded_prospective == padded_spec - else: - # Convert our spec string into a Version - spec_version = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec_version.local: - prospective = Version(prospective.public) - - return prospective == spec_version - - @_require_version_compare - def _compare_not_equal(self, prospective, spec): - # type: (ParsedVersion, str) -> bool - return not self._compare_equal(prospective, spec) - - @_require_version_compare - def _compare_less_than_equal(self, prospective, spec): - # type: (ParsedVersion, str) -> bool - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) <= Version(spec) - - @_require_version_compare - def _compare_greater_than_equal(self, prospective, spec): - # type: (ParsedVersion, str) -> bool - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) >= Version(spec) - - @_require_version_compare - def _compare_less_than(self, prospective, spec_str): - # type: (ParsedVersion, str) -> bool - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - @_require_version_compare - def _compare_greater_than(self, prospective, spec_str): - # type: (ParsedVersion, str) -> bool - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective, spec): - # type: (Version, str) -> bool - return str(prospective).lower() == str(spec).lower() - - @property - def prereleases(self): - # type: () -> bool - - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value): - # type: (bool) -> None - self._prereleases = value - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version): - # type: (str) -> List[str] - result = [] # type: List[str] - for item in version.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _pad_version(left, right): - # type: (List[str], List[str]) -> Tuple[List[str], List[str]] - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) - - -class SpecifierSet(BaseSpecifier): - def __init__(self, specifiers="", prereleases=None): - # type: (str, Optional[bool]) -> None - - # Split on , to break each individual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed = set() - for specifier in split_specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - def __repr__(self): - # type: () -> str - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "".format(str(self), pre) - - def __str__(self): - # type: () -> str - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self): - # type: () -> int - return hash(self._specs) - - def __and__(self, other): - # type: (Union[SpecifierSet, str]) -> SpecifierSet - if isinstance(other, string_types): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other): - # type: (object) -> bool - if isinstance(other, (string_types, _IndividualSpecifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __ne__(self, other): - # type: (object) -> bool - if isinstance(other, (string_types, _IndividualSpecifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs != other._specs - - def __len__(self): - # type: () -> int - return len(self._specs) - - def __iter__(self): - # type: () -> Iterator[_IndividualSpecifier] - return iter(self._specs) - - @property - def prereleases(self): - # type: () -> Optional[bool] - - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value): - # type: (bool) -> None - self._prereleases = value - - def __contains__(self, item): - # type: (Union[ParsedVersion, str]) -> bool - return self.contains(item) - - def contains(self, item, prereleases=None): - # type: (Union[ParsedVersion, str], Optional[bool]) -> bool - - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter( - self, - iterable, # type: Iterable[Union[ParsedVersion, str]] - prereleases=None, # type: Optional[bool] - ): - # type: (...) -> Iterable[Union[ParsedVersion, str]] - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. - else: - filtered = [] # type: List[Union[ParsedVersion, str]] - found_prereleases = [] # type: List[Union[ParsedVersion, str]] - - for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return found_prereleases - - return filtered diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/tags.py b/vendor/poetry-core/poetry/core/_vendor/packaging/tags.py deleted file mode 100644 index d637f1b6..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/tags.py +++ /dev/null @@ -1,866 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import - -import distutils.util - -try: - from importlib.machinery import EXTENSION_SUFFIXES -except ImportError: # pragma: no cover - import imp - - EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] - del imp -import collections -import logging -import os -import platform -import re -import struct -import sys -import sysconfig -import warnings - -from ._typing import TYPE_CHECKING, cast - -if TYPE_CHECKING: # pragma: no cover - from typing import ( - IO, - Dict, - FrozenSet, - Iterable, - Iterator, - List, - Optional, - Sequence, - Tuple, - Union, - ) - - PythonVersion = Sequence[int] - MacVersion = Tuple[int, int] - GlibcVersion = Tuple[int, int] - - -logger = logging.getLogger(__name__) - -INTERPRETER_SHORT_NAMES = { - "python": "py", # Generic. - "cpython": "cp", - "pypy": "pp", - "ironpython": "ip", - "jython": "jy", -} # type: Dict[str, str] - - -_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 - - -_LEGACY_MANYLINUX_MAP = { - # CentOS 7 w/ glibc 2.17 (PEP 599) - (2, 17): "manylinux2014", - # CentOS 6 w/ glibc 2.12 (PEP 571) - (2, 12): "manylinux2010", - # CentOS 5 w/ glibc 2.5 (PEP 513) - (2, 5): "manylinux1", -} - -# If glibc ever changes its major version, we need to know what the last -# minor version was, so we can build the complete list of all versions. -# For now, guess what the highest minor version might be, assume it will -# be 50 for testing. Once this actually happens, update the dictionary -# with the actual value. -_LAST_GLIBC_MINOR = collections.defaultdict(lambda: 50) # type: Dict[int, int] -glibcVersion = collections.namedtuple("Version", ["major", "minor"]) - - -class Tag(object): - """ - A representation of the tag triple for a wheel. - - Instances are considered immutable and thus are hashable. Equality checking - is also supported. - """ - - __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] - - def __init__(self, interpreter, abi, platform): - # type: (str, str, str) -> None - self._interpreter = interpreter.lower() - self._abi = abi.lower() - self._platform = platform.lower() - # The __hash__ of every single element in a Set[Tag] will be evaluated each time - # that a set calls its `.disjoint()` method, which may be called hundreds of - # times when scanning a page of links for packages with tags matching that - # Set[Tag]. Pre-computing the value here produces significant speedups for - # downstream consumers. - self._hash = hash((self._interpreter, self._abi, self._platform)) - - @property - def interpreter(self): - # type: () -> str - return self._interpreter - - @property - def abi(self): - # type: () -> str - return self._abi - - @property - def platform(self): - # type: () -> str - return self._platform - - def __eq__(self, other): - # type: (object) -> bool - if not isinstance(other, Tag): - return NotImplemented - - return ( - (self.platform == other.platform) - and (self.abi == other.abi) - and (self.interpreter == other.interpreter) - ) - - def __hash__(self): - # type: () -> int - return self._hash - - def __str__(self): - # type: () -> str - return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) - - def __repr__(self): - # type: () -> str - return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) - - -def parse_tag(tag): - # type: (str) -> FrozenSet[Tag] - """ - Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. - - Returning a set is required due to the possibility that the tag is a - compressed tag set. - """ - tags = set() - interpreters, abis, platforms = tag.split("-") - for interpreter in interpreters.split("."): - for abi in abis.split("."): - for platform_ in platforms.split("."): - tags.add(Tag(interpreter, abi, platform_)) - return frozenset(tags) - - -def _warn_keyword_parameter(func_name, kwargs): - # type: (str, Dict[str, bool]) -> bool - """ - Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. - """ - if not kwargs: - return False - elif len(kwargs) > 1 or "warn" not in kwargs: - kwargs.pop("warn", None) - arg = next(iter(kwargs.keys())) - raise TypeError( - "{}() got an unexpected keyword argument {!r}".format(func_name, arg) - ) - return kwargs["warn"] - - -def _get_config_var(name, warn=False): - # type: (str, bool) -> Union[int, str, None] - value = sysconfig.get_config_var(name) - if value is None and warn: - logger.debug( - "Config variable '%s' is unset, Python ABI tag may be incorrect", name - ) - return value - - -def _normalize_string(string): - # type: (str) -> str - return string.replace(".", "_").replace("-", "_") - - -def _abi3_applies(python_version): - # type: (PythonVersion) -> bool - """ - Determine if the Python version supports abi3. - - PEP 384 was first implemented in Python 3.2. - """ - return len(python_version) > 1 and tuple(python_version) >= (3, 2) - - -def _cpython_abis(py_version, warn=False): - # type: (PythonVersion, bool) -> List[str] - py_version = tuple(py_version) # To allow for version comparison. - abis = [] - version = _version_nodot(py_version[:2]) - debug = pymalloc = ucs4 = "" - with_debug = _get_config_var("Py_DEBUG", warn) - has_refcount = hasattr(sys, "gettotalrefcount") - # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled - # extension modules is the best option. - # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 - has_ext = "_d.pyd" in EXTENSION_SUFFIXES - if with_debug or (with_debug is None and (has_refcount or has_ext)): - debug = "d" - if py_version < (3, 8): - with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) - if with_pymalloc or with_pymalloc is None: - pymalloc = "m" - if py_version < (3, 3): - unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) - if unicode_size == 4 or ( - unicode_size is None and sys.maxunicode == 0x10FFFF - ): - ucs4 = "u" - elif debug: - # Debug builds can also load "normal" extension modules. - # We can also assume no UCS-4 or pymalloc requirement. - abis.append("cp{version}".format(version=version)) - abis.insert( - 0, - "cp{version}{debug}{pymalloc}{ucs4}".format( - version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 - ), - ) - return abis - - -def cpython_tags( - python_version=None, # type: Optional[PythonVersion] - abis=None, # type: Optional[Iterable[str]] - platforms=None, # type: Optional[Iterable[str]] - **kwargs # type: bool -): - # type: (...) -> Iterator[Tag] - """ - Yields the tags for a CPython interpreter. - - The tags consist of: - - cp-- - - cp-abi3- - - cp-none- - - cp-abi3- # Older Python versions down to 3.2. - - If python_version only specifies a major version then user-provided ABIs and - the 'none' ABItag will be used. - - If 'abi3' or 'none' are specified in 'abis' then they will be yielded at - their normal position and not at the beginning. - """ - warn = _warn_keyword_parameter("cpython_tags", kwargs) - if not python_version: - python_version = sys.version_info[:2] - - interpreter = "cp{}".format(_version_nodot(python_version[:2])) - - if abis is None: - if len(python_version) > 1: - abis = _cpython_abis(python_version, warn) - else: - abis = [] - abis = list(abis) - # 'abi3' and 'none' are explicitly handled later. - for explicit_abi in ("abi3", "none"): - try: - abis.remove(explicit_abi) - except ValueError: - pass - - platforms = list(platforms or _platform_tags()) - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - if _abi3_applies(python_version): - for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): - yield tag - for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): - yield tag - - if _abi3_applies(python_version): - for minor_version in range(python_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{version}".format( - version=_version_nodot((python_version[0], minor_version)) - ) - yield Tag(interpreter, "abi3", platform_) - - -def _generic_abi(): - # type: () -> Iterator[str] - abi = sysconfig.get_config_var("SOABI") - if abi: - yield _normalize_string(abi) - - -def generic_tags( - interpreter=None, # type: Optional[str] - abis=None, # type: Optional[Iterable[str]] - platforms=None, # type: Optional[Iterable[str]] - **kwargs # type: bool -): - # type: (...) -> Iterator[Tag] - """ - Yields the tags for a generic interpreter. - - The tags consist of: - - -- - - The "none" ABI will be added if it was not explicitly provided. - """ - warn = _warn_keyword_parameter("generic_tags", kwargs) - if not interpreter: - interp_name = interpreter_name() - interp_version = interpreter_version(warn=warn) - interpreter = "".join([interp_name, interp_version]) - if abis is None: - abis = _generic_abi() - platforms = list(platforms or _platform_tags()) - abis = list(abis) - if "none" not in abis: - abis.append("none") - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - -def _py_interpreter_range(py_version): - # type: (PythonVersion) -> Iterator[str] - """ - Yields Python versions in descending order. - - After the latest version, the major-only version will be yielded, and then - all previous versions of that major version. - """ - if len(py_version) > 1: - yield "py{version}".format(version=_version_nodot(py_version[:2])) - yield "py{major}".format(major=py_version[0]) - if len(py_version) > 1: - for minor in range(py_version[1] - 1, -1, -1): - yield "py{version}".format(version=_version_nodot((py_version[0], minor))) - - -def compatible_tags( - python_version=None, # type: Optional[PythonVersion] - interpreter=None, # type: Optional[str] - platforms=None, # type: Optional[Iterable[str]] -): - # type: (...) -> Iterator[Tag] - """ - Yields the sequence of tags that are compatible with a specific version of Python. - - The tags consist of: - - py*-none- - - -none-any # ... if `interpreter` is provided. - - py*-none-any - """ - if not python_version: - python_version = sys.version_info[:2] - platforms = list(platforms or _platform_tags()) - for version in _py_interpreter_range(python_version): - for platform_ in platforms: - yield Tag(version, "none", platform_) - if interpreter: - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(python_version): - yield Tag(version, "none", "any") - - -def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): - # type: (str, bool) -> str - if not is_32bit: - return arch - - if arch.startswith("ppc"): - return "ppc" - - return "i386" - - -def _mac_binary_formats(version, cpu_arch): - # type: (MacVersion, str) -> List[str] - formats = [cpu_arch] - if cpu_arch == "x86_64": - if version < (10, 4): - return [] - formats.extend(["intel", "fat64", "fat32"]) - - elif cpu_arch == "i386": - if version < (10, 4): - return [] - formats.extend(["intel", "fat32", "fat"]) - - elif cpu_arch == "ppc64": - # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? - if version > (10, 5) or version < (10, 4): - return [] - formats.append("fat64") - - elif cpu_arch == "ppc": - if version > (10, 6): - return [] - formats.extend(["fat32", "fat"]) - - if cpu_arch in {"arm64", "x86_64"}: - formats.append("universal2") - - if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: - formats.append("universal") - - return formats - - -def mac_platforms(version=None, arch=None): - # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] - """ - Yields the platform tags for a macOS system. - - The `version` parameter is a two-item tuple specifying the macOS version to - generate platform tags for. The `arch` parameter is the CPU architecture to - generate platform tags for. Both parameters default to the appropriate value - for the current system. - """ - version_str, _, cpu_arch = platform.mac_ver() # type: ignore - if version is None: - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - else: - version = version - if arch is None: - arch = _mac_arch(cpu_arch) - else: - arch = arch - - if (10, 0) <= version and version < (11, 0): - # Prior to Mac OS 11, each yearly release of Mac OS bumped the - # "minor" version number. The major version was always 10. - for minor_version in range(version[1], -1, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=10, minor=minor_version, binary_format=binary_format - ) - - if version >= (11, 0): - # Starting with Mac OS 11, each yearly release bumps the major version - # number. The minor versions are now the midyear updates. - for major_version in range(version[0], 10, -1): - compat_version = major_version, 0 - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=major_version, minor=0, binary_format=binary_format - ) - - if version >= (11, 0): - # Mac OS 11 on x86_64 is compatible with binaries from previous releases. - # Arm64 support was introduced in 11.0, so no Arm binaries from previous - # releases exist. - # - # However, the "universal2" binary format can have a - # macOS version earlier than 11.0 when the x86_64 part of the binary supports - # that version of macOS. - if arch == "x86_64": - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - else: - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_format = "universal2" - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - - -# From PEP 513, PEP 600 -def _is_manylinux_compatible(name, arch, glibc_version): - # type: (str, str, GlibcVersion) -> bool - sys_glibc = _get_glibc_version() - if sys_glibc < glibc_version: - return False - # Check for presence of _manylinux module. - try: - import _manylinux # noqa - except ImportError: - pass - else: - if hasattr(_manylinux, "manylinux_compatible"): - result = _manylinux.manylinux_compatible( - glibc_version[0], glibc_version[1], arch - ) - if result is not None: - return bool(result) - else: - if glibc_version == (2, 5): - if hasattr(_manylinux, "manylinux1_compatible"): - return bool(_manylinux.manylinux1_compatible) - if glibc_version == (2, 12): - if hasattr(_manylinux, "manylinux2010_compatible"): - return bool(_manylinux.manylinux2010_compatible) - if glibc_version == (2, 17): - if hasattr(_manylinux, "manylinux2014_compatible"): - return bool(_manylinux.manylinux2014_compatible) - return True - - -def _glibc_version_string(): - # type: () -> Optional[str] - # Returns glibc version string, or None if not using glibc. - return _glibc_version_string_confstr() or _glibc_version_string_ctypes() - - -def _glibc_version_string_confstr(): - # type: () -> Optional[str] - """ - Primary implementation of glibc_version_string using os.confstr. - """ - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module. - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 - try: - # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". - version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 - "CS_GNU_LIBC_VERSION" - ) - assert version_string is not None - _, version = version_string.split() # type: Tuple[str, str] - except (AssertionError, AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def _glibc_version_string_ctypes(): - # type: () -> Optional[str] - """ - Fallback implementation of glibc_version_string using ctypes. - """ - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - # - # We must also handle the special case where the executable is not a - # dynamically linked executable. This can occur when using musl libc, - # for example. In this situation, dlopen() will error, leading to an - # OSError. Interestingly, at least in the case of musl, there is no - # errno set on the OSError. The single string argument used to construct - # OSError comes from libc itself and is therefore not portable to - # hard code here. In any case, failure to call dlopen() means we - # can proceed, so we bail on our attempt. - try: - # Note: typeshed is wrong here so we are ignoring this line. - process_namespace = ctypes.CDLL(None) # type: ignore - except OSError: - return None - - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str = gnu_get_libc_version() # type: str - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -def _parse_glibc_version(version_str): - # type: (str) -> Tuple[int, int] - # Parse glibc version. - # - # We use a regexp instead of str.split because we want to discard any - # random junk that might come after the minor version -- this might happen - # in patched/forked versions of glibc (e.g. Linaro's version of glibc - # uses version strings like "2.20-2014.11"). See gh-3588. - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - "Expected glibc version with 2 components major.minor," - " got: %s" % version_str, - RuntimeWarning, - ) - return -1, -1 - return (int(m.group("major")), int(m.group("minor"))) - - -_glibc_version = [] # type: List[Tuple[int, int]] - - -def _get_glibc_version(): - # type: () -> Tuple[int, int] - if _glibc_version: - return _glibc_version[0] - version_str = _glibc_version_string() - if version_str is None: - _glibc_version.append((-1, -1)) - else: - _glibc_version.append(_parse_glibc_version(version_str)) - return _glibc_version[0] - - -# Python does not provide platform information at sufficient granularity to -# identify the architecture of the running executable in some cases, so we -# determine it dynamically by reading the information from the running -# process. This only applies on Linux, which uses the ELF format. -class _ELFFileHeader(object): - # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header - class _InvalidELFFileHeader(ValueError): - """ - An invalid ELF file header was found. - """ - - ELF_MAGIC_NUMBER = 0x7F454C46 - ELFCLASS32 = 1 - ELFCLASS64 = 2 - ELFDATA2LSB = 1 - ELFDATA2MSB = 2 - EM_386 = 3 - EM_S390 = 22 - EM_ARM = 40 - EM_X86_64 = 62 - EF_ARM_ABIMASK = 0xFF000000 - EF_ARM_ABI_VER5 = 0x05000000 - EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - def __init__(self, file): - # type: (IO[bytes]) -> None - def unpack(fmt): - # type: (str) -> int - try: - (result,) = struct.unpack( - fmt, file.read(struct.calcsize(fmt)) - ) # type: (int, ) - except struct.error: - raise _ELFFileHeader._InvalidELFFileHeader() - return result - - self.e_ident_magic = unpack(">I") - if self.e_ident_magic != self.ELF_MAGIC_NUMBER: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_class = unpack("B") - if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_data = unpack("B") - if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_version = unpack("B") - self.e_ident_osabi = unpack("B") - self.e_ident_abiversion = unpack("B") - self.e_ident_pad = file.read(7) - format_h = "H" - format_i = "I" - format_q = "Q" - format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q - self.e_type = unpack(format_h) - self.e_machine = unpack(format_h) - self.e_version = unpack(format_i) - self.e_entry = unpack(format_p) - self.e_phoff = unpack(format_p) - self.e_shoff = unpack(format_p) - self.e_flags = unpack(format_i) - self.e_ehsize = unpack(format_h) - self.e_phentsize = unpack(format_h) - self.e_phnum = unpack(format_h) - self.e_shentsize = unpack(format_h) - self.e_shnum = unpack(format_h) - self.e_shstrndx = unpack(format_h) - - -def _get_elf_header(): - # type: () -> Optional[_ELFFileHeader] - try: - with open(sys.executable, "rb") as f: - elf_header = _ELFFileHeader(f) - except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): - return None - return elf_header - - -def _is_linux_armhf(): - # type: () -> bool - # hard-float ABI can be detected from the ELF header of the running - # process - # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_ARM - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABIMASK - ) == elf_header.EF_ARM_ABI_VER5 - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD - ) == elf_header.EF_ARM_ABI_FLOAT_HARD - return result - - -def _is_linux_i686(): - # type: () -> bool - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_386 - return result - - -def _have_compatible_manylinux_abi(arch): - # type: (str) -> bool - if arch == "armv7l": - return _is_linux_armhf() - if arch == "i686": - return _is_linux_i686() - return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} - - -def _manylinux_tags(linux, arch): - # type: (str, str) -> Iterator[str] - # Oldest glibc to be supported regardless of architecture is (2, 17). - too_old_glibc2 = glibcVersion(2, 16) - if arch in {"x86_64", "i686"}: - # On x86/i686 also oldest glibc to be supported is (2, 5). - too_old_glibc2 = glibcVersion(2, 4) - current_glibc = glibcVersion(*_get_glibc_version()) - glibc_max_list = [current_glibc] - # We can assume compatibility across glibc major versions. - # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 - # - # Build a list of maximum glibc versions so that we can - # output the canonical list of all glibc from current_glibc - # down to too_old_glibc2, including all intermediary versions. - for glibc_major in range(current_glibc.major - 1, 1, -1): - glibc_max_list.append(glibcVersion(glibc_major, _LAST_GLIBC_MINOR[glibc_major])) - for glibc_max in glibc_max_list: - if glibc_max.major == too_old_glibc2.major: - min_minor = too_old_glibc2.minor - else: - # For other glibc major versions oldest supported is (x, 0). - min_minor = -1 - for glibc_minor in range(glibc_max.minor, min_minor, -1): - glibc_version = (glibc_max.major, glibc_minor) - tag = "manylinux_{}_{}".format(*glibc_version) - if _is_manylinux_compatible(tag, arch, glibc_version): - yield linux.replace("linux", tag) - # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if glibc_version in _LEGACY_MANYLINUX_MAP: - legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] - if _is_manylinux_compatible(legacy_tag, arch, glibc_version): - yield linux.replace("linux", legacy_tag) - - -def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): - # type: (bool) -> Iterator[str] - linux = _normalize_string(distutils.util.get_platform()) - if is_32bit: - if linux == "linux_x86_64": - linux = "linux_i686" - elif linux == "linux_aarch64": - linux = "linux_armv7l" - _, arch = linux.split("_", 1) - if _have_compatible_manylinux_abi(arch): - for tag in _manylinux_tags(linux, arch): - yield tag - yield linux - - -def _generic_platforms(): - # type: () -> Iterator[str] - yield _normalize_string(distutils.util.get_platform()) - - -def _platform_tags(): - # type: () -> Iterator[str] - """ - Provides the platform tags for this installation. - """ - if platform.system() == "Darwin": - return mac_platforms() - elif platform.system() == "Linux": - return _linux_platforms() - else: - return _generic_platforms() - - -def interpreter_name(): - # type: () -> str - """ - Returns the name of the running interpreter. - """ - try: - name = sys.implementation.name # type: ignore - except AttributeError: # pragma: no cover - # Python 2.7 compatibility. - name = platform.python_implementation().lower() - return INTERPRETER_SHORT_NAMES.get(name) or name - - -def interpreter_version(**kwargs): - # type: (bool) -> str - """ - Returns the version of the running interpreter. - """ - warn = _warn_keyword_parameter("interpreter_version", kwargs) - version = _get_config_var("py_version_nodot", warn=warn) - if version: - version = str(version) - else: - version = _version_nodot(sys.version_info[:2]) - return version - - -def _version_nodot(version): - # type: (PythonVersion) -> str - return "".join(map(str, version)) - - -def sys_tags(**kwargs): - # type: (bool) -> Iterator[Tag] - """ - Returns the sequence of tag triples for the running interpreter. - - The order of the sequence corresponds to priority order for the - interpreter, from most to least important. - """ - warn = _warn_keyword_parameter("sys_tags", kwargs) - - interp_name = interpreter_name() - if interp_name == "cp": - for tag in cpython_tags(warn=warn): - yield tag - else: - for tag in generic_tags(): - yield tag - - for tag in compatible_tags(): - yield tag diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/utils.py b/vendor/poetry-core/poetry/core/_vendor/packaging/utils.py deleted file mode 100644 index 6e8c2a3e..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/utils.py +++ /dev/null @@ -1,138 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import re - -from ._typing import TYPE_CHECKING, cast -from .tags import Tag, parse_tag -from .version import InvalidVersion, Version - -if TYPE_CHECKING: # pragma: no cover - from typing import FrozenSet, NewType, Tuple, Union - - BuildTag = Union[Tuple[()], Tuple[int, str]] - NormalizedName = NewType("NormalizedName", str) -else: - BuildTag = tuple - NormalizedName = str - - -class InvalidWheelFilename(ValueError): - """ - An invalid wheel filename was found, users should refer to PEP 427. - """ - - -class InvalidSdistFilename(ValueError): - """ - An invalid sdist filename was found, users should refer to the packaging user guide. - """ - - -_canonicalize_regex = re.compile(r"[-_.]+") -# PEP 427: The build number must start with a digit. -_build_tag_regex = re.compile(r"(\d+)(.*)") - - -def canonicalize_name(name): - # type: (str) -> NormalizedName - # This is taken from PEP 503. - value = _canonicalize_regex.sub("-", name).lower() - return cast(NormalizedName, value) - - -def canonicalize_version(version): - # type: (Union[Version, str]) -> Union[Version, str] - """ - This is very similar to Version.__str__, but has one subtle difference - with the way it handles the release segment. - """ - if not isinstance(version, Version): - try: - version = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - - parts = [] - - # Epoch - if version.epoch != 0: - parts.append("{0}!".format(version.epoch)) - - # Release segment - # NB: This strips trailing '.0's to normalize - parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) - - # Pre-release - if version.pre is not None: - parts.append("".join(str(x) for x in version.pre)) - - # Post-release - if version.post is not None: - parts.append(".post{0}".format(version.post)) - - # Development release - if version.dev is not None: - parts.append(".dev{0}".format(version.dev)) - - # Local version segment - if version.local is not None: - parts.append("+{0}".format(version.local)) - - return "".join(parts) - - -def parse_wheel_filename(filename): - # type: (str) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]] - if not filename.endswith(".whl"): - raise InvalidWheelFilename( - "Invalid wheel filename (extension must be '.whl'): {0}".format(filename) - ) - - filename = filename[:-4] - dashes = filename.count("-") - if dashes not in (4, 5): - raise InvalidWheelFilename( - "Invalid wheel filename (wrong number of parts): {0}".format(filename) - ) - - parts = filename.split("-", dashes - 2) - name_part = parts[0] - # See PEP 427 for the rules on escaping the project name - if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: - raise InvalidWheelFilename("Invalid project name: {0}".format(filename)) - name = canonicalize_name(name_part) - version = Version(parts[1]) - if dashes == 5: - build_part = parts[2] - build_match = _build_tag_regex.match(build_part) - if build_match is None: - raise InvalidWheelFilename( - "Invalid build number: {0} in '{1}'".format(build_part, filename) - ) - build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) - else: - build = () - tags = parse_tag(parts[-1]) - return (name, version, build, tags) - - -def parse_sdist_filename(filename): - # type: (str) -> Tuple[NormalizedName, Version] - if not filename.endswith(".tar.gz"): - raise InvalidSdistFilename( - "Invalid sdist filename (extension must be '.tar.gz'): {0}".format(filename) - ) - - # We are requiring a PEP 440 version, which cannot contain dashes, - # so we split on the last dash. - name_part, sep, version_part = filename[:-7].rpartition("-") - if not sep: - raise InvalidSdistFilename("Invalid sdist filename: {0}".format(filename)) - - name = canonicalize_name(name_part) - version = Version(version_part) - return (name, version) diff --git a/vendor/poetry-core/poetry/core/_vendor/packaging/version.py b/vendor/poetry-core/poetry/core/_vendor/packaging/version.py deleted file mode 100644 index 517d91f2..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/packaging/version.py +++ /dev/null @@ -1,556 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import collections -import itertools -import re -import warnings - -from ._structures import Infinity, NegativeInfinity -from ._typing import TYPE_CHECKING - -if TYPE_CHECKING: # pragma: no cover - from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union - - from ._structures import InfinityType, NegativeInfinityType - - InfiniteTypes = Union[InfinityType, NegativeInfinityType] - PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] - SubLocalType = Union[InfiniteTypes, int, str] - LocalType = Union[ - NegativeInfinityType, - Tuple[ - Union[ - SubLocalType, - Tuple[SubLocalType, str], - Tuple[NegativeInfinityType, SubLocalType], - ], - ..., - ], - ] - CmpKey = Tuple[ - int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType - ] - LegacyCmpKey = Tuple[int, Tuple[str, ...]] - VersionComparisonMethod = Callable[ - [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool - ] - -__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] - - -_Version = collections.namedtuple( - "_Version", ["epoch", "release", "dev", "pre", "post", "local"] -) - - -def parse(version): - # type: (str) -> Union[LegacyVersion, Version] - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. - """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) - - -class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. - """ - - -class _BaseVersion(object): - _key = None # type: Union[CmpKey, LegacyCmpKey] - - def __hash__(self): - # type: () -> int - return hash(self._key) - - # Please keep the duplicated `isinstance` check - # in the six comparisons hereunder - # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other): - # type: (_BaseVersion) -> bool - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key < other._key - - def __le__(self, other): - # type: (_BaseVersion) -> bool - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key <= other._key - - def __eq__(self, other): - # type: (object) -> bool - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key == other._key - - def __ge__(self, other): - # type: (_BaseVersion) -> bool - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key >= other._key - - def __gt__(self, other): - # type: (_BaseVersion) -> bool - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key > other._key - - def __ne__(self, other): - # type: (object) -> bool - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key != other._key - - -class LegacyVersion(_BaseVersion): - def __init__(self, version): - # type: (str) -> None - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def __str__(self): - # type: () -> str - return self._version - - def __repr__(self): - # type: () -> str - return "".format(repr(str(self))) - - @property - def public(self): - # type: () -> str - return self._version - - @property - def base_version(self): - # type: () -> str - return self._version - - @property - def epoch(self): - # type: () -> int - return -1 - - @property - def release(self): - # type: () -> None - return None - - @property - def pre(self): - # type: () -> None - return None - - @property - def post(self): - # type: () -> None - return None - - @property - def dev(self): - # type: () -> None - return None - - @property - def local(self): - # type: () -> None - return None - - @property - def is_prerelease(self): - # type: () -> bool - return False - - @property - def is_postrelease(self): - # type: () -> bool - return False - - @property - def is_devrelease(self): - # type: () -> bool - return False - - -_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) - -_legacy_version_replacement_map = { - "pre": "c", - "preview": "c", - "-": "final-", - "rc": "c", - "dev": "@", -} - - -def _parse_version_parts(s): - # type: (str) -> Iterator[str] - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version): - # type: (str) -> LegacyCmpKey - - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts = [] # type: List[str] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - - return epoch, tuple(parts) - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                          # pre-release
-            [-_\.]?
-            (?P(a|b|c|rc|alpha|beta|pre|preview))
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_\.]?
-                (?Ppost|rev|r)
-                [-_\.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_\.]?
-            (?Pdev)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-"""
-
-
-class Version(_BaseVersion):
-
-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
-
-    def __init__(self, version):
-        # type: (str) -> None
-
-        # Validate the version and parse it into pieces
-        match = self._regex.search(version)
-        if not match:
-            raise InvalidVersion("Invalid version: '{0}'".format(version))
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self):
-        # type: () -> str
-        return "".format(repr(str(self)))
-
-    def __str__(self):
-        # type: () -> str
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append("{0}!".format(self.epoch))
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        # Pre-release
-        if self.pre is not None:
-            parts.append("".join(str(x) for x in self.pre))
-
-        # Post-release
-        if self.post is not None:
-            parts.append(".post{0}".format(self.post))
-
-        # Development release
-        if self.dev is not None:
-            parts.append(".dev{0}".format(self.dev))
-
-        # Local version segment
-        if self.local is not None:
-            parts.append("+{0}".format(self.local))
-
-        return "".join(parts)
-
-    @property
-    def epoch(self):
-        # type: () -> int
-        _epoch = self._version.epoch  # type: int
-        return _epoch
-
-    @property
-    def release(self):
-        # type: () -> Tuple[int, ...]
-        _release = self._version.release  # type: Tuple[int, ...]
-        return _release
-
-    @property
-    def pre(self):
-        # type: () -> Optional[Tuple[str, int]]
-        _pre = self._version.pre  # type: Optional[Tuple[str, int]]
-        return _pre
-
-    @property
-    def post(self):
-        # type: () -> Optional[Tuple[str, int]]
-        return self._version.post[1] if self._version.post else None
-
-    @property
-    def dev(self):
-        # type: () -> Optional[Tuple[str, int]]
-        return self._version.dev[1] if self._version.dev else None
-
-    @property
-    def local(self):
-        # type: () -> Optional[str]
-        if self._version.local:
-            return ".".join(str(x) for x in self._version.local)
-        else:
-            return None
-
-    @property
-    def public(self):
-        # type: () -> str
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self):
-        # type: () -> str
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append("{0}!".format(self.epoch))
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        return "".join(parts)
-
-    @property
-    def is_prerelease(self):
-        # type: () -> bool
-        return self.dev is not None or self.pre is not None
-
-    @property
-    def is_postrelease(self):
-        # type: () -> bool
-        return self.post is not None
-
-    @property
-    def is_devrelease(self):
-        # type: () -> bool
-        return self.dev is not None
-
-    @property
-    def major(self):
-        # type: () -> int
-        return self.release[0] if len(self.release) >= 1 else 0
-
-    @property
-    def minor(self):
-        # type: () -> int
-        return self.release[1] if len(self.release) >= 2 else 0
-
-    @property
-    def micro(self):
-        # type: () -> int
-        return self.release[2] if len(self.release) >= 3 else 0
-
-
-def _parse_letter_version(
-    letter,  # type: str
-    number,  # type: Union[str, bytes, SupportsInt]
-):
-    # type: (...) -> Optional[Tuple[str, int]]
-
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-    return None
-
-
-_local_version_separators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local):
-    # type: (str) -> Optional[LocalType]
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_separators.split(local)
-        )
-    return None
-
-
-def _cmpkey(
-    epoch,  # type: int
-    release,  # type: Tuple[int, ...]
-    pre,  # type: Optional[Tuple[str, int]]
-    post,  # type: Optional[Tuple[str, int]]
-    dev,  # type: Optional[Tuple[str, int]]
-    local,  # type: Optional[Tuple[SubLocalType]]
-):
-    # type: (...) -> CmpKey
-
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    _release = tuple(
-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
-    )
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        _pre = NegativeInfinity  # type: PrePostDevType
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        _pre = Infinity
-    else:
-        _pre = pre
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        _post = NegativeInfinity  # type: PrePostDevType
-
-    else:
-        _post = post
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        _dev = Infinity  # type: PrePostDevType
-
-    else:
-        _dev = dev
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        _local = NegativeInfinity  # type: LocalType
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        _local = tuple(
-            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
-        )
-
-    return epoch, _release, _pre, _post, _dev, _local
diff --git a/vendor/poetry-core/poetry/core/_vendor/pyparsing.LICENSE b/vendor/poetry-core/poetry/core/_vendor/pyparsing.LICENSE
deleted file mode 100644
index 1bf98523..00000000
--- a/vendor/poetry-core/poetry/core/_vendor/pyparsing.LICENSE
+++ /dev/null
@@ -1,18 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/poetry-core/poetry/core/_vendor/pyparsing.py b/vendor/poetry-core/poetry/core/_vendor/pyparsing.py
deleted file mode 100644
index 581d5bbb..00000000
--- a/vendor/poetry-core/poetry/core/_vendor/pyparsing.py
+++ /dev/null
@@ -1,7107 +0,0 @@
-# -*- coding: utf-8 -*-
-# module pyparsing.py
-#
-# Copyright (c) 2003-2019  Paul T. McGuire
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-
-__doc__ = \
-"""
-pyparsing module - Classes and methods to define and execute parsing grammars
-=============================================================================
-
-The pyparsing module is an alternative approach to creating and
-executing simple grammars, vs. the traditional lex/yacc approach, or the
-use of regular expressions.  With pyparsing, you don't need to learn
-a new syntax for defining grammars or matching expressions - the parsing
-module provides a library of classes that you use to construct the
-grammar directly in Python.
-
-Here is a program to parse "Hello, World!" (or any greeting of the form
-``", !"``), built up using :class:`Word`,
-:class:`Literal`, and :class:`And` elements
-(the :class:`'+'` operators create :class:`And` expressions,
-and the strings are auto-converted to :class:`Literal` expressions)::
-
-    from pyparsing import Word, alphas
-
-    # define grammar of a greeting
-    greet = Word(alphas) + "," + Word(alphas) + "!"
-
-    hello = "Hello, World!"
-    print (hello, "->", greet.parseString(hello))
-
-The program outputs the following::
-
-    Hello, World! -> ['Hello', ',', 'World', '!']
-
-The Python representation of the grammar is quite readable, owing to the
-self-explanatory class names, and the use of '+', '|' and '^' operators.
-
-The :class:`ParseResults` object returned from
-:class:`ParserElement.parseString` can be
-accessed as a nested list, a dictionary, or an object with named
-attributes.
-
-The pyparsing module handles some of the problems that are typically
-vexing when writing text parsers:
-
-  - extra or missing whitespace (the above program will also handle
-    "Hello,World!", "Hello  ,  World  !", etc.)
-  - quoted strings
-  - embedded comments
-
-
-Getting Started -
------------------
-Visit the classes :class:`ParserElement` and :class:`ParseResults` to
-see the base classes that most other pyparsing
-classes inherit from. Use the docstrings for examples of how to:
-
- - construct literal match expressions from :class:`Literal` and
-   :class:`CaselessLiteral` classes
- - construct character word-group expressions using the :class:`Word`
-   class
- - see how to create repetitive expressions using :class:`ZeroOrMore`
-   and :class:`OneOrMore` classes
- - use :class:`'+'`, :class:`'|'`, :class:`'^'`,
-   and :class:`'&'` operators to combine simple expressions into
-   more complex ones
- - associate names with your parsed results using
-   :class:`ParserElement.setResultsName`
- - access the parsed data, which is returned as a :class:`ParseResults`
-   object
- - find some helpful expression short-cuts like :class:`delimitedList`
-   and :class:`oneOf`
- - find more useful common expressions in the :class:`pyparsing_common`
-   namespace class
-"""
-
-__version__ = "2.4.7"
-__versionTime__ = "30 Mar 2020 00:43 UTC"
-__author__ = "Paul McGuire "
-
-import string
-from weakref import ref as wkref
-import copy
-import sys
-import warnings
-import re
-import sre_constants
-import collections
-import pprint
-import traceback
-import types
-from datetime import datetime
-from operator import itemgetter
-import itertools
-from functools import wraps
-from contextlib import contextmanager
-
-try:
-    # Python 3
-    from itertools import filterfalse
-except ImportError:
-    from itertools import ifilterfalse as filterfalse
-
-try:
-    from _thread import RLock
-except ImportError:
-    from threading import RLock
-
-try:
-    # Python 3
-    from collections.abc import Iterable
-    from collections.abc import MutableMapping, Mapping
-except ImportError:
-    # Python 2.7
-    from collections import Iterable
-    from collections import MutableMapping, Mapping
-
-try:
-    from collections import OrderedDict as _OrderedDict
-except ImportError:
-    try:
-        from ordereddict import OrderedDict as _OrderedDict
-    except ImportError:
-        _OrderedDict = None
-
-try:
-    from types import SimpleNamespace
-except ImportError:
-    class SimpleNamespace: pass
-
-# version compatibility configuration
-__compat__ = SimpleNamespace()
-__compat__.__doc__ = """
-    A cross-version compatibility configuration for pyparsing features that will be
-    released in a future version. By setting values in this configuration to True,
-    those features can be enabled in prior versions for compatibility development
-    and testing.
-
-     - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping
-       of results names when an And expression is nested within an Or or MatchFirst; set to
-       True to enable bugfix released in pyparsing 2.3.0, or False to preserve
-       pre-2.3.0 handling of named results
-"""
-__compat__.collect_all_And_tokens = True
-
-__diag__ = SimpleNamespace()
-__diag__.__doc__ = """
-Diagnostic configuration (all default to False)
-     - warn_multiple_tokens_in_named_alternation - flag to enable warnings when a results
-       name is defined on a MatchFirst or Or expression with one or more And subexpressions
-       (only warns if __compat__.collect_all_And_tokens is False)
-     - warn_ungrouped_named_tokens_in_collection - flag to enable warnings when a results
-       name is defined on a containing expression with ungrouped subexpressions that also
-       have results names
-     - warn_name_set_on_empty_Forward - flag to enable warnings whan a Forward is defined
-       with a results name, but has no contents defined
-     - warn_on_multiple_string_args_to_oneof - flag to enable warnings whan oneOf is
-       incorrectly called with multiple str arguments
-     - enable_debug_on_named_expressions - flag to auto-enable debug on all subsequent
-       calls to ParserElement.setName()
-"""
-__diag__.warn_multiple_tokens_in_named_alternation = False
-__diag__.warn_ungrouped_named_tokens_in_collection = False
-__diag__.warn_name_set_on_empty_Forward = False
-__diag__.warn_on_multiple_string_args_to_oneof = False
-__diag__.enable_debug_on_named_expressions = False
-__diag__._all_names = [nm for nm in vars(__diag__) if nm.startswith("enable_") or nm.startswith("warn_")]
-
-def _enable_all_warnings():
-    __diag__.warn_multiple_tokens_in_named_alternation = True
-    __diag__.warn_ungrouped_named_tokens_in_collection = True
-    __diag__.warn_name_set_on_empty_Forward = True
-    __diag__.warn_on_multiple_string_args_to_oneof = True
-__diag__.enable_all_warnings = _enable_all_warnings
-
-
-__all__ = ['__version__', '__versionTime__', '__author__', '__compat__', '__diag__',
-           'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
-           'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
-           'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
-           'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
-           'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
-           'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter',
-           'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char',
-           'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
-           'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
-           'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
-           'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
-           'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
-           'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
-           'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
-           'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
-           'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
-           'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation', 'locatedExpr', 'withClass',
-           'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set',
-           'conditionAsParseAction', 're',
-           ]
-
-system_version = tuple(sys.version_info)[:3]
-PY_3 = system_version[0] == 3
-if PY_3:
-    _MAX_INT = sys.maxsize
-    basestring = str
-    unichr = chr
-    unicode = str
-    _ustr = str
-
-    # build list of single arg builtins, that can be used as parse actions
-    singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
-
-else:
-    _MAX_INT = sys.maxint
-    range = xrange
-
-    def _ustr(obj):
-        """Drop-in replacement for str(obj) that tries to be Unicode
-        friendly. It first tries str(obj). If that fails with
-        a UnicodeEncodeError, then it tries unicode(obj). It then
-        < returns the unicode object | encodes it with the default
-        encoding | ... >.
-        """
-        if isinstance(obj, unicode):
-            return obj
-
-        try:
-            # If this works, then _ustr(obj) has the same behaviour as str(obj), so
-            # it won't break any existing code.
-            return str(obj)
-
-        except UnicodeEncodeError:
-            # Else encode it
-            ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
-            xmlcharref = Regex(r'&#\d+;')
-            xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
-            return xmlcharref.transformString(ret)
-
-    # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
-    singleArgBuiltins = []
-    import __builtin__
-
-    for fname in "sum len sorted reversed list tuple set any all min max".split():
-        try:
-            singleArgBuiltins.append(getattr(__builtin__, fname))
-        except AttributeError:
-            continue
-
-_generatorType = type((y for y in range(1)))
-
-def _xml_escape(data):
-    """Escape &, <, >, ", ', etc. in a string of data."""
-
-    # ampersand must be replaced first
-    from_symbols = '&><"\''
-    to_symbols = ('&' + s + ';' for s in "amp gt lt quot apos".split())
-    for from_, to_ in zip(from_symbols, to_symbols):
-        data = data.replace(from_, to_)
-    return data
-
-alphas = string.ascii_uppercase + string.ascii_lowercase
-nums = "0123456789"
-hexnums = nums + "ABCDEFabcdef"
-alphanums = alphas + nums
-_bslash = chr(92)
-printables = "".join(c for c in string.printable if c not in string.whitespace)
-
-
-def conditionAsParseAction(fn, message=None, fatal=False):
-    msg = message if message is not None else "failed user-defined condition"
-    exc_type = ParseFatalException if fatal else ParseException
-    fn = _trim_arity(fn)
-
-    @wraps(fn)
-    def pa(s, l, t):
-        if not bool(fn(s, l, t)):
-            raise exc_type(s, l, msg)
-
-    return pa
-
-class ParseBaseException(Exception):
-    """base exception class for all parsing runtime exceptions"""
-    # Performance tuning: we construct a *lot* of these, so keep this
-    # constructor as small and fast as possible
-    def __init__(self, pstr, loc=0, msg=None, elem=None):
-        self.loc = loc
-        if msg is None:
-            self.msg = pstr
-            self.pstr = ""
-        else:
-            self.msg = msg
-            self.pstr = pstr
-        self.parserElement = elem
-        self.args = (pstr, loc, msg)
-
-    @classmethod
-    def _from_exception(cls, pe):
-        """
-        internal factory method to simplify creating one type of ParseException
-        from another - avoids having __init__ signature conflicts among subclasses
-        """
-        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
-
-    def __getattr__(self, aname):
-        """supported attributes by name are:
-           - lineno - returns the line number of the exception text
-           - col - returns the column number of the exception text
-           - line - returns the line containing the exception text
-        """
-        if aname == "lineno":
-            return lineno(self.loc, self.pstr)
-        elif aname in ("col", "column"):
-            return col(self.loc, self.pstr)
-        elif aname == "line":
-            return line(self.loc, self.pstr)
-        else:
-            raise AttributeError(aname)
-
-    def __str__(self):
-        if self.pstr:
-            if self.loc >= len(self.pstr):
-                foundstr = ', found end of text'
-            else:
-                foundstr = (', found %r' % self.pstr[self.loc:self.loc + 1]).replace(r'\\', '\\')
-        else:
-            foundstr = ''
-        return ("%s%s  (at char %d), (line:%d, col:%d)" %
-                   (self.msg, foundstr, self.loc, self.lineno, self.column))
-    def __repr__(self):
-        return _ustr(self)
-    def markInputline(self, markerString=">!<"):
-        """Extracts the exception line from the input string, and marks
-           the location of the exception with a special symbol.
-        """
-        line_str = self.line
-        line_column = self.column - 1
-        if markerString:
-            line_str = "".join((line_str[:line_column],
-                                markerString, line_str[line_column:]))
-        return line_str.strip()
-    def __dir__(self):
-        return "lineno col line".split() + dir(type(self))
-
-class ParseException(ParseBaseException):
-    """
-    Exception thrown when parse expressions don't match class;
-    supported attributes by name are:
-    - lineno - returns the line number of the exception text
-    - col - returns the column number of the exception text
-    - line - returns the line containing the exception text
-
-    Example::
-
-        try:
-            Word(nums).setName("integer").parseString("ABC")
-        except ParseException as pe:
-            print(pe)
-            print("column: {}".format(pe.col))
-
-    prints::
-
-       Expected integer (at char 0), (line:1, col:1)
-        column: 1
-
-    """
-
-    @staticmethod
-    def explain(exc, depth=16):
-        """
-        Method to take an exception and translate the Python internal traceback into a list
-        of the pyparsing expressions that caused the exception to be raised.
-
-        Parameters:
-
-         - exc - exception raised during parsing (need not be a ParseException, in support
-           of Python exceptions that might be raised in a parse action)
-         - depth (default=16) - number of levels back in the stack trace to list expression
-           and function names; if None, the full stack trace names will be listed; if 0, only
-           the failing input line, marker, and exception string will be shown
-
-        Returns a multi-line string listing the ParserElements and/or function names in the
-        exception's stack trace.
-
-        Note: the diagnostic output will include string representations of the expressions
-        that failed to parse. These representations will be more helpful if you use `setName` to
-        give identifiable names to your expressions. Otherwise they will use the default string
-        forms, which may be cryptic to read.
-
-        explain() is only supported under Python 3.
-        """
-        import inspect
-
-        if depth is None:
-            depth = sys.getrecursionlimit()
-        ret = []
-        if isinstance(exc, ParseBaseException):
-            ret.append(exc.line)
-            ret.append(' ' * (exc.col - 1) + '^')
-        ret.append("{0}: {1}".format(type(exc).__name__, exc))
-
-        if depth > 0:
-            callers = inspect.getinnerframes(exc.__traceback__, context=depth)
-            seen = set()
-            for i, ff in enumerate(callers[-depth:]):
-                frm = ff[0]
-
-                f_self = frm.f_locals.get('self', None)
-                if isinstance(f_self, ParserElement):
-                    if frm.f_code.co_name not in ('parseImpl', '_parseNoCache'):
-                        continue
-                    if f_self in seen:
-                        continue
-                    seen.add(f_self)
-
-                    self_type = type(f_self)
-                    ret.append("{0}.{1} - {2}".format(self_type.__module__,
-                                                      self_type.__name__,
-                                                      f_self))
-                elif f_self is not None:
-                    self_type = type(f_self)
-                    ret.append("{0}.{1}".format(self_type.__module__,
-                                                self_type.__name__))
-                else:
-                    code = frm.f_code
-                    if code.co_name in ('wrapper', ''):
-                        continue
-
-                    ret.append("{0}".format(code.co_name))
-
-                depth -= 1
-                if not depth:
-                    break
-
-        return '\n'.join(ret)
-
-
-class ParseFatalException(ParseBaseException):
-    """user-throwable exception thrown when inconsistent parse content
-       is found; stops all parsing immediately"""
-    pass
-
-class ParseSyntaxException(ParseFatalException):
-    """just like :class:`ParseFatalException`, but thrown internally
-    when an :class:`ErrorStop` ('-' operator) indicates
-    that parsing is to stop immediately because an unbacktrackable
-    syntax error has been found.
-    """
-    pass
-
-#~ class ReparseException(ParseBaseException):
-    #~ """Experimental class - parse actions can raise this exception to cause
-       #~ pyparsing to reparse the input string:
-        #~ - with a modified input string, and/or
-        #~ - with a modified start location
-       #~ Set the values of the ReparseException in the constructor, and raise the
-       #~ exception in a parse action to cause pyparsing to use the new string/location.
-       #~ Setting the values as None causes no change to be made.
-       #~ """
-    #~ def __init_( self, newstring, restartLoc ):
-        #~ self.newParseText = newstring
-        #~ self.reparseLoc = restartLoc
-
-class RecursiveGrammarException(Exception):
-    """exception thrown by :class:`ParserElement.validate` if the
-    grammar could be improperly recursive
-    """
-    def __init__(self, parseElementList):
-        self.parseElementTrace = parseElementList
-
-    def __str__(self):
-        return "RecursiveGrammarException: %s" % self.parseElementTrace
-
-class _ParseResultsWithOffset(object):
-    def __init__(self, p1, p2):
-        self.tup = (p1, p2)
-    def __getitem__(self, i):
-        return self.tup[i]
-    def __repr__(self):
-        return repr(self.tup[0])
-    def setOffset(self, i):
-        self.tup = (self.tup[0], i)
-
-class ParseResults(object):
-    """Structured parse results, to provide multiple means of access to
-    the parsed data:
-
-       - as a list (``len(results)``)
-       - by list index (``results[0], results[1]``, etc.)
-       - by attribute (``results.`` - see :class:`ParserElement.setResultsName`)
-
-    Example::
-
-        integer = Word(nums)
-        date_str = (integer.setResultsName("year") + '/'
-                        + integer.setResultsName("month") + '/'
-                        + integer.setResultsName("day"))
-        # equivalent form:
-        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-
-        # parseString returns a ParseResults object
-        result = date_str.parseString("1999/12/31")
-
-        def test(s, fn=repr):
-            print("%s -> %s" % (s, fn(eval(s))))
-        test("list(result)")
-        test("result[0]")
-        test("result['month']")
-        test("result.day")
-        test("'month' in result")
-        test("'minutes' in result")
-        test("result.dump()", str)
-
-    prints::
-
-        list(result) -> ['1999', '/', '12', '/', '31']
-        result[0] -> '1999'
-        result['month'] -> '12'
-        result.day -> '31'
-        'month' in result -> True
-        'minutes' in result -> False
-        result.dump() -> ['1999', '/', '12', '/', '31']
-        - day: 31
-        - month: 12
-        - year: 1999
-    """
-    def __new__(cls, toklist=None, name=None, asList=True, modal=True):
-        if isinstance(toklist, cls):
-            return toklist
-        retobj = object.__new__(cls)
-        retobj.__doinit = True
-        return retobj
-
-    # Performance tuning: we construct a *lot* of these, so keep this
-    # constructor as small and fast as possible
-    def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance):
-        if self.__doinit:
-            self.__doinit = False
-            self.__name = None
-            self.__parent = None
-            self.__accumNames = {}
-            self.__asList = asList
-            self.__modal = modal
-            if toklist is None:
-                toklist = []
-            if isinstance(toklist, list):
-                self.__toklist = toklist[:]
-            elif isinstance(toklist, _generatorType):
-                self.__toklist = list(toklist)
-            else:
-                self.__toklist = [toklist]
-            self.__tokdict = dict()
-
-        if name is not None and name:
-            if not modal:
-                self.__accumNames[name] = 0
-            if isinstance(name, int):
-                name = _ustr(name)  # will always return a str, but use _ustr for consistency
-            self.__name = name
-            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None, '', [])):
-                if isinstance(toklist, basestring):
-                    toklist = [toklist]
-                if asList:
-                    if isinstance(toklist, ParseResults):
-                        self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0)
-                    else:
-                        self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0)
-                    self[name].__name = name
-                else:
-                    try:
-                        self[name] = toklist[0]
-                    except (KeyError, TypeError, IndexError):
-                        self[name] = toklist
-
-    def __getitem__(self, i):
-        if isinstance(i, (int, slice)):
-            return self.__toklist[i]
-        else:
-            if i not in self.__accumNames:
-                return self.__tokdict[i][-1][0]
-            else:
-                return ParseResults([v[0] for v in self.__tokdict[i]])
-
-    def __setitem__(self, k, v, isinstance=isinstance):
-        if isinstance(v, _ParseResultsWithOffset):
-            self.__tokdict[k] = self.__tokdict.get(k, list()) + [v]
-            sub = v[0]
-        elif isinstance(k, (int, slice)):
-            self.__toklist[k] = v
-            sub = v
-        else:
-            self.__tokdict[k] = self.__tokdict.get(k, list()) + [_ParseResultsWithOffset(v, 0)]
-            sub = v
-        if isinstance(sub, ParseResults):
-            sub.__parent = wkref(self)
-
-    def __delitem__(self, i):
-        if isinstance(i, (int, slice)):
-            mylen = len(self.__toklist)
-            del self.__toklist[i]
-
-            # convert int to slice
-            if isinstance(i, int):
-                if i < 0:
-                    i += mylen
-                i = slice(i, i + 1)
-            # get removed indices
-            removed = list(range(*i.indices(mylen)))
-            removed.reverse()
-            # fixup indices in token dictionary
-            for name, occurrences in self.__tokdict.items():
-                for j in removed:
-                    for k, (value, position) in enumerate(occurrences):
-                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
-        else:
-            del self.__tokdict[i]
-
-    def __contains__(self, k):
-        return k in self.__tokdict
-
-    def __len__(self):
-        return len(self.__toklist)
-
-    def __bool__(self):
-        return (not not self.__toklist)
-    __nonzero__ = __bool__
-
-    def __iter__(self):
-        return iter(self.__toklist)
-
-    def __reversed__(self):
-        return iter(self.__toklist[::-1])
-
-    def _iterkeys(self):
-        if hasattr(self.__tokdict, "iterkeys"):
-            return self.__tokdict.iterkeys()
-        else:
-            return iter(self.__tokdict)
-
-    def _itervalues(self):
-        return (self[k] for k in self._iterkeys())
-
-    def _iteritems(self):
-        return ((k, self[k]) for k in self._iterkeys())
-
-    if PY_3:
-        keys = _iterkeys
-        """Returns an iterator of all named result keys."""
-
-        values = _itervalues
-        """Returns an iterator of all named result values."""
-
-        items = _iteritems
-        """Returns an iterator of all named result key-value tuples."""
-
-    else:
-        iterkeys = _iterkeys
-        """Returns an iterator of all named result keys (Python 2.x only)."""
-
-        itervalues = _itervalues
-        """Returns an iterator of all named result values (Python 2.x only)."""
-
-        iteritems = _iteritems
-        """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
-
-        def keys(self):
-            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
-            return list(self.iterkeys())
-
-        def values(self):
-            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
-            return list(self.itervalues())
-
-        def items(self):
-            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
-            return list(self.iteritems())
-
-    def haskeys(self):
-        """Since keys() returns an iterator, this method is helpful in bypassing
-           code that looks for the existence of any defined results names."""
-        return bool(self.__tokdict)
-
-    def pop(self, *args, **kwargs):
-        """
-        Removes and returns item at specified index (default= ``last``).
-        Supports both ``list`` and ``dict`` semantics for ``pop()``. If
-        passed no argument or an integer argument, it will use ``list``
-        semantics and pop tokens from the list of parsed tokens. If passed
-        a non-integer argument (most likely a string), it will use ``dict``
-        semantics and pop the corresponding value from any defined results
-        names. A second default return value argument is supported, just as in
-        ``dict.pop()``.
-
-        Example::
-
-            def remove_first(tokens):
-                tokens.pop(0)
-            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
-            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
-
-            label = Word(alphas)
-            patt = label("LABEL") + OneOrMore(Word(nums))
-            print(patt.parseString("AAB 123 321").dump())
-
-            # Use pop() in a parse action to remove named result (note that corresponding value is not
-            # removed from list form of results)
-            def remove_LABEL(tokens):
-                tokens.pop("LABEL")
-                return tokens
-            patt.addParseAction(remove_LABEL)
-            print(patt.parseString("AAB 123 321").dump())
-
-        prints::
-
-            ['AAB', '123', '321']
-            - LABEL: AAB
-
-            ['AAB', '123', '321']
-        """
-        if not args:
-            args = [-1]
-        for k, v in kwargs.items():
-            if k == 'default':
-                args = (args[0], v)
-            else:
-                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
-        if (isinstance(args[0], int)
-                or len(args) == 1
-                or args[0] in self):
-            index = args[0]
-            ret = self[index]
-            del self[index]
-            return ret
-        else:
-            defaultvalue = args[1]
-            return defaultvalue
-
-    def get(self, key, defaultValue=None):
-        """
-        Returns named result matching the given key, or if there is no
-        such name, then returns the given ``defaultValue`` or ``None`` if no
-        ``defaultValue`` is specified.
-
-        Similar to ``dict.get()``.
-
-        Example::
-
-            integer = Word(nums)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-
-            result = date_str.parseString("1999/12/31")
-            print(result.get("year")) # -> '1999'
-            print(result.get("hour", "not specified")) # -> 'not specified'
-            print(result.get("hour")) # -> None
-        """
-        if key in self:
-            return self[key]
-        else:
-            return defaultValue
-
-    def insert(self, index, insStr):
-        """
-        Inserts new element at location index in the list of parsed tokens.
-
-        Similar to ``list.insert()``.
-
-        Example::
-
-            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
-
-            # use a parse action to insert the parse location in the front of the parsed results
-            def insert_locn(locn, tokens):
-                tokens.insert(0, locn)
-            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
-        """
-        self.__toklist.insert(index, insStr)
-        # fixup indices in token dictionary
-        for name, occurrences in self.__tokdict.items():
-            for k, (value, position) in enumerate(occurrences):
-                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
-
-    def append(self, item):
-        """
-        Add single element to end of ParseResults list of elements.
-
-        Example::
-
-            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
-
-            # use a parse action to compute the sum of the parsed integers, and add it to the end
-            def append_sum(tokens):
-                tokens.append(sum(map(int, tokens)))
-            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
-        """
-        self.__toklist.append(item)
-
-    def extend(self, itemseq):
-        """
-        Add sequence of elements to end of ParseResults list of elements.
-
-        Example::
-
-            patt = OneOrMore(Word(alphas))
-
-            # use a parse action to append the reverse of the matched strings, to make a palindrome
-            def make_palindrome(tokens):
-                tokens.extend(reversed([t[::-1] for t in tokens]))
-                return ''.join(tokens)
-            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
-        """
-        if isinstance(itemseq, ParseResults):
-            self.__iadd__(itemseq)
-        else:
-            self.__toklist.extend(itemseq)
-
-    def clear(self):
-        """
-        Clear all elements and results names.
-        """
-        del self.__toklist[:]
-        self.__tokdict.clear()
-
-    def __getattr__(self, name):
-        try:
-            return self[name]
-        except KeyError:
-            return ""
-
-    def __add__(self, other):
-        ret = self.copy()
-        ret += other
-        return ret
-
-    def __iadd__(self, other):
-        if other.__tokdict:
-            offset = len(self.__toklist)
-            addoffset = lambda a: offset if a < 0 else a + offset
-            otheritems = other.__tokdict.items()
-            otherdictitems = [(k, _ParseResultsWithOffset(v[0], addoffset(v[1])))
-                              for k, vlist in otheritems for v in vlist]
-            for k, v in otherdictitems:
-                self[k] = v
-                if isinstance(v[0], ParseResults):
-                    v[0].__parent = wkref(self)
-
-        self.__toklist += other.__toklist
-        self.__accumNames.update(other.__accumNames)
-        return self
-
-    def __radd__(self, other):
-        if isinstance(other, int) and other == 0:
-            # useful for merging many ParseResults using sum() builtin
-            return self.copy()
-        else:
-            # this may raise a TypeError - so be it
-            return other + self
-
-    def __repr__(self):
-        return "(%s, %s)" % (repr(self.__toklist), repr(self.__tokdict))
-
-    def __str__(self):
-        return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
-
-    def _asStringList(self, sep=''):
-        out = []
-        for item in self.__toklist:
-            if out and sep:
-                out.append(sep)
-            if isinstance(item, ParseResults):
-                out += item._asStringList()
-            else:
-                out.append(_ustr(item))
-        return out
-
-    def asList(self):
-        """
-        Returns the parse results as a nested list of matching tokens, all converted to strings.
-
-        Example::
-
-            patt = OneOrMore(Word(alphas))
-            result = patt.parseString("sldkj lsdkj sldkj")
-            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
-            print(type(result), result) # ->  ['sldkj', 'lsdkj', 'sldkj']
-
-            # Use asList() to create an actual list
-            result_list = result.asList()
-            print(type(result_list), result_list) # ->  ['sldkj', 'lsdkj', 'sldkj']
-        """
-        return [res.asList() if isinstance(res, ParseResults) else res for res in self.__toklist]
-
-    def asDict(self):
-        """
-        Returns the named parse results as a nested dictionary.
-
-        Example::
-
-            integer = Word(nums)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-
-            result = date_str.parseString('12/31/1999')
-            print(type(result), repr(result)) # ->  (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
-
-            result_dict = result.asDict()
-            print(type(result_dict), repr(result_dict)) # ->  {'day': '1999', 'year': '12', 'month': '31'}
-
-            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
-            import json
-            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
-            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
-        """
-        if PY_3:
-            item_fn = self.items
-        else:
-            item_fn = self.iteritems
-
-        def toItem(obj):
-            if isinstance(obj, ParseResults):
-                if obj.haskeys():
-                    return obj.asDict()
-                else:
-                    return [toItem(v) for v in obj]
-            else:
-                return obj
-
-        return dict((k, toItem(v)) for k, v in item_fn())
-
-    def copy(self):
-        """
-        Returns a new copy of a :class:`ParseResults` object.
-        """
-        ret = ParseResults(self.__toklist)
-        ret.__tokdict = dict(self.__tokdict.items())
-        ret.__parent = self.__parent
-        ret.__accumNames.update(self.__accumNames)
-        ret.__name = self.__name
-        return ret
-
-    def asXML(self, doctag=None, namedItemsOnly=False, indent="", formatted=True):
-        """
-        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
-        """
-        nl = "\n"
-        out = []
-        namedItems = dict((v[1], k) for (k, vlist) in self.__tokdict.items()
-                          for v in vlist)
-        nextLevelIndent = indent + "  "
-
-        # collapse out indents if formatting is not desired
-        if not formatted:
-            indent = ""
-            nextLevelIndent = ""
-            nl = ""
-
-        selfTag = None
-        if doctag is not None:
-            selfTag = doctag
-        else:
-            if self.__name:
-                selfTag = self.__name
-
-        if not selfTag:
-            if namedItemsOnly:
-                return ""
-            else:
-                selfTag = "ITEM"
-
-        out += [nl, indent, "<", selfTag, ">"]
-
-        for i, res in enumerate(self.__toklist):
-            if isinstance(res, ParseResults):
-                if i in namedItems:
-                    out += [res.asXML(namedItems[i],
-                                      namedItemsOnly and doctag is None,
-                                      nextLevelIndent,
-                                      formatted)]
-                else:
-                    out += [res.asXML(None,
-                                      namedItemsOnly and doctag is None,
-                                      nextLevelIndent,
-                                      formatted)]
-            else:
-                # individual token, see if there is a name for it
-                resTag = None
-                if i in namedItems:
-                    resTag = namedItems[i]
-                if not resTag:
-                    if namedItemsOnly:
-                        continue
-                    else:
-                        resTag = "ITEM"
-                xmlBodyText = _xml_escape(_ustr(res))
-                out += [nl, nextLevelIndent, "<", resTag, ">",
-                        xmlBodyText,
-                                                ""]
-
-        out += [nl, indent, ""]
-        return "".join(out)
-
-    def __lookup(self, sub):
-        for k, vlist in self.__tokdict.items():
-            for v, loc in vlist:
-                if sub is v:
-                    return k
-        return None
-
-    def getName(self):
-        r"""
-        Returns the results name for this token expression. Useful when several
-        different expressions might match at a particular location.
-
-        Example::
-
-            integer = Word(nums)
-            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
-            house_number_expr = Suppress('#') + Word(nums, alphanums)
-            user_data = (Group(house_number_expr)("house_number")
-                        | Group(ssn_expr)("ssn")
-                        | Group(integer)("age"))
-            user_info = OneOrMore(user_data)
-
-            result = user_info.parseString("22 111-22-3333 #221B")
-            for item in result:
-                print(item.getName(), ':', item[0])
-
-        prints::
-
-            age : 22
-            ssn : 111-22-3333
-            house_number : 221B
-        """
-        if self.__name:
-            return self.__name
-        elif self.__parent:
-            par = self.__parent()
-            if par:
-                return par.__lookup(self)
-            else:
-                return None
-        elif (len(self) == 1
-              and len(self.__tokdict) == 1
-              and next(iter(self.__tokdict.values()))[0][1] in (0, -1)):
-            return next(iter(self.__tokdict.keys()))
-        else:
-            return None
-
-    def dump(self, indent='', full=True, include_list=True, _depth=0):
-        """
-        Diagnostic method for listing out the contents of
-        a :class:`ParseResults`. Accepts an optional ``indent`` argument so
-        that this string can be embedded in a nested display of other data.
-
-        Example::
-
-            integer = Word(nums)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-
-            result = date_str.parseString('12/31/1999')
-            print(result.dump())
-
-        prints::
-
-            ['12', '/', '31', '/', '1999']
-            - day: 1999
-            - month: 31
-            - year: 12
-        """
-        out = []
-        NL = '\n'
-        if include_list:
-            out.append(indent + _ustr(self.asList()))
-        else:
-            out.append('')
-
-        if full:
-            if self.haskeys():
-                items = sorted((str(k), v) for k, v in self.items())
-                for k, v in items:
-                    if out:
-                        out.append(NL)
-                    out.append("%s%s- %s: " % (indent, ('  ' * _depth), k))
-                    if isinstance(v, ParseResults):
-                        if v:
-                            out.append(v.dump(indent=indent, full=full, include_list=include_list, _depth=_depth + 1))
-                        else:
-                            out.append(_ustr(v))
-                    else:
-                        out.append(repr(v))
-            elif any(isinstance(vv, ParseResults) for vv in self):
-                v = self
-                for i, vv in enumerate(v):
-                    if isinstance(vv, ParseResults):
-                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,
-                                                            ('  ' * (_depth)),
-                                                            i,
-                                                            indent,
-                                                            ('  ' * (_depth + 1)),
-                                                            vv.dump(indent=indent,
-                                                                    full=full,
-                                                                    include_list=include_list,
-                                                                    _depth=_depth + 1)))
-                    else:
-                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,
-                                                            ('  ' * (_depth)),
-                                                            i,
-                                                            indent,
-                                                            ('  ' * (_depth + 1)),
-                                                            _ustr(vv)))
-
-        return "".join(out)
-
-    def pprint(self, *args, **kwargs):
-        """
-        Pretty-printer for parsed results as a list, using the
-        `pprint `_ module.
-        Accepts additional positional or keyword args as defined for
-        `pprint.pprint `_ .
-
-        Example::
-
-            ident = Word(alphas, alphanums)
-            num = Word(nums)
-            func = Forward()
-            term = ident | num | Group('(' + func + ')')
-            func <<= ident + Group(Optional(delimitedList(term)))
-            result = func.parseString("fna a,b,(fnb c,d,200),100")
-            result.pprint(width=40)
-
-        prints::
-
-            ['fna',
-             ['a',
-              'b',
-              ['(', 'fnb', ['c', 'd', '200'], ')'],
-              '100']]
-        """
-        pprint.pprint(self.asList(), *args, **kwargs)
-
-    # add support for pickle protocol
-    def __getstate__(self):
-        return (self.__toklist,
-                (self.__tokdict.copy(),
-                 self.__parent is not None and self.__parent() or None,
-                 self.__accumNames,
-                 self.__name))
-
-    def __setstate__(self, state):
-        self.__toklist = state[0]
-        self.__tokdict, par, inAccumNames, self.__name = state[1]
-        self.__accumNames = {}
-        self.__accumNames.update(inAccumNames)
-        if par is not None:
-            self.__parent = wkref(par)
-        else:
-            self.__parent = None
-
-    def __getnewargs__(self):
-        return self.__toklist, self.__name, self.__asList, self.__modal
-
-    def __dir__(self):
-        return dir(type(self)) + list(self.keys())
-
-    @classmethod
-    def from_dict(cls, other, name=None):
-        """
-        Helper classmethod to construct a ParseResults from a dict, preserving the
-        name-value relations as results names. If an optional 'name' argument is
-        given, a nested ParseResults will be returned
-        """
-        def is_iterable(obj):
-            try:
-                iter(obj)
-            except Exception:
-                return False
-            else:
-                if PY_3:
-                    return not isinstance(obj, (str, bytes))
-                else:
-                    return not isinstance(obj, basestring)
-
-        ret = cls([])
-        for k, v in other.items():
-            if isinstance(v, Mapping):
-                ret += cls.from_dict(v, name=k)
-            else:
-                ret += cls([v], name=k, asList=is_iterable(v))
-        if name is not None:
-            ret = cls([ret], name=name)
-        return ret
-
-MutableMapping.register(ParseResults)
-
-def col (loc, strg):
-    """Returns current column within a string, counting newlines as line separators.
-   The first column is number 1.
-
-   Note: the default parsing behavior is to expand tabs in the input string
-   before starting the parsing process.  See
-   :class:`ParserElement.parseString` for more
-   information on parsing strings containing ```` s, and suggested
-   methods to maintain a consistent view of the parsed string, the parse
-   location, and line and column positions within the parsed string.
-   """
-    s = strg
-    return 1 if 0 < loc < len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)
-
-def lineno(loc, strg):
-    """Returns current line number within a string, counting newlines as line separators.
-    The first line is number 1.
-
-    Note - the default parsing behavior is to expand tabs in the input string
-    before starting the parsing process.  See :class:`ParserElement.parseString`
-    for more information on parsing strings containing ```` s, and
-    suggested methods to maintain a consistent view of the parsed string, the
-    parse location, and line and column positions within the parsed string.
-    """
-    return strg.count("\n", 0, loc) + 1
-
-def line(loc, strg):
-    """Returns the line of text containing loc within a string, counting newlines as line separators.
-       """
-    lastCR = strg.rfind("\n", 0, loc)
-    nextCR = strg.find("\n", loc)
-    if nextCR >= 0:
-        return strg[lastCR + 1:nextCR]
-    else:
-        return strg[lastCR + 1:]
-
-def _defaultStartDebugAction(instring, loc, expr):
-    print(("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % (lineno(loc, instring), col(loc, instring))))
-
-def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks):
-    print("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
-
-def _defaultExceptionDebugAction(instring, loc, expr, exc):
-    print("Exception raised:" + _ustr(exc))
-
-def nullDebugAction(*args):
-    """'Do-nothing' debug action, to suppress debugging output during parsing."""
-    pass
-
-# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
-#~ 'decorator to trim function calls to match the arity of the target'
-#~ def _trim_arity(func, maxargs=3):
-    #~ if func in singleArgBuiltins:
-        #~ return lambda s,l,t: func(t)
-    #~ limit = 0
-    #~ foundArity = False
-    #~ def wrapper(*args):
-        #~ nonlocal limit,foundArity
-        #~ while 1:
-            #~ try:
-                #~ ret = func(*args[limit:])
-                #~ foundArity = True
-                #~ return ret
-            #~ except TypeError:
-                #~ if limit == maxargs or foundArity:
-                    #~ raise
-                #~ limit += 1
-                #~ continue
-    #~ return wrapper
-
-# this version is Python 2.x-3.x cross-compatible
-'decorator to trim function calls to match the arity of the target'
-def _trim_arity(func, maxargs=2):
-    if func in singleArgBuiltins:
-        return lambda s, l, t: func(t)
-    limit = [0]
-    foundArity = [False]
-
-    # traceback return data structure changed in Py3.5 - normalize back to plain tuples
-    if system_version[:2] >= (3, 5):
-        def extract_stack(limit=0):
-            # special handling for Python 3.5.0 - extra deep call stack by 1
-            offset = -3 if system_version == (3, 5, 0) else -2
-            frame_summary = traceback.extract_stack(limit=-offset + limit - 1)[offset]
-            return [frame_summary[:2]]
-        def extract_tb(tb, limit=0):
-            frames = traceback.extract_tb(tb, limit=limit)
-            frame_summary = frames[-1]
-            return [frame_summary[:2]]
-    else:
-        extract_stack = traceback.extract_stack
-        extract_tb = traceback.extract_tb
-
-    # synthesize what would be returned by traceback.extract_stack at the call to
-    # user's parse action 'func', so that we don't incur call penalty at parse time
-
-    LINE_DIFF = 6
-    # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND
-    # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
-    this_line = extract_stack(limit=2)[-1]
-    pa_call_line_synth = (this_line[0], this_line[1] + LINE_DIFF)
-
-    def wrapper(*args):
-        while 1:
-            try:
-                ret = func(*args[limit[0]:])
-                foundArity[0] = True
-                return ret
-            except TypeError:
-                # re-raise TypeErrors if they did not come from our arity testing
-                if foundArity[0]:
-                    raise
-                else:
-                    try:
-                        tb = sys.exc_info()[-1]
-                        if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
-                            raise
-                    finally:
-                        try:
-                            del tb
-                        except NameError:
-                            pass
-
-                if limit[0] <= maxargs:
-                    limit[0] += 1
-                    continue
-                raise
-
-    # copy func name to wrapper for sensible debug output
-    func_name = ""
-    try:
-        func_name = getattr(func, '__name__',
-                            getattr(func, '__class__').__name__)
-    except Exception:
-        func_name = str(func)
-    wrapper.__name__ = func_name
-
-    return wrapper
-
-
-class ParserElement(object):
-    """Abstract base level parser element class."""
-    DEFAULT_WHITE_CHARS = " \n\t\r"
-    verbose_stacktrace = False
-
-    @staticmethod
-    def setDefaultWhitespaceChars(chars):
-        r"""
-        Overrides the default whitespace chars
-
-        Example::
-
-            # default whitespace chars are space,  and newline
-            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
-
-            # change to just treat newline as significant
-            ParserElement.setDefaultWhitespaceChars(" \t")
-            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
-        """
-        ParserElement.DEFAULT_WHITE_CHARS = chars
-
-    @staticmethod
-    def inlineLiteralsUsing(cls):
-        """
-        Set class to be used for inclusion of string literals into a parser.
-
-        Example::
-
-            # default literal class used is Literal
-            integer = Word(nums)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-
-            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
-
-
-            # change to Suppress
-            ParserElement.inlineLiteralsUsing(Suppress)
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-
-            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
-        """
-        ParserElement._literalStringClass = cls
-
-    @classmethod
-    def _trim_traceback(cls, tb):
-        while tb.tb_next:
-            tb = tb.tb_next
-        return tb
-
-    def __init__(self, savelist=False):
-        self.parseAction = list()
-        self.failAction = None
-        # ~ self.name = ""  # don't define self.name, let subclasses try/except upcall
-        self.strRepr = None
-        self.resultsName = None
-        self.saveAsList = savelist
-        self.skipWhitespace = True
-        self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS)
-        self.copyDefaultWhiteChars = True
-        self.mayReturnEmpty = False # used when checking for left-recursion
-        self.keepTabs = False
-        self.ignoreExprs = list()
-        self.debug = False
-        self.streamlined = False
-        self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
-        self.errmsg = ""
-        self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
-        self.debugActions = (None, None, None)  # custom debug actions
-        self.re = None
-        self.callPreparse = True # used to avoid redundant calls to preParse
-        self.callDuringTry = False
-
-    def copy(self):
-        """
-        Make a copy of this :class:`ParserElement`.  Useful for defining
-        different parse actions for the same parsing pattern, using copies of
-        the original parse element.
-
-        Example::
-
-            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
-            integerK = integer.copy().addParseAction(lambda toks: toks[0] * 1024) + Suppress("K")
-            integerM = integer.copy().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M")
-
-            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
-
-        prints::
-
-            [5120, 100, 655360, 268435456]
-
-        Equivalent form of ``expr.copy()`` is just ``expr()``::
-
-            integerM = integer().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M")
-        """
-        cpy = copy.copy(self)
-        cpy.parseAction = self.parseAction[:]
-        cpy.ignoreExprs = self.ignoreExprs[:]
-        if self.copyDefaultWhiteChars:
-            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
-        return cpy
-
-    def setName(self, name):
-        """
-        Define name for this expression, makes debugging and exception messages clearer.
-
-        Example::
-
-            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
-            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
-        """
-        self.name = name
-        self.errmsg = "Expected " + self.name
-        if __diag__.enable_debug_on_named_expressions:
-            self.setDebug()
-        return self
-
-    def setResultsName(self, name, listAllMatches=False):
-        """
-        Define name for referencing matching tokens as a nested attribute
-        of the returned parse results.
-        NOTE: this returns a *copy* of the original :class:`ParserElement` object;
-        this is so that the client can define a basic element, such as an
-        integer, and reference it in multiple places with different names.
-
-        You can also set results names using the abbreviated syntax,
-        ``expr("name")`` in place of ``expr.setResultsName("name")``
-        - see :class:`__call__`.
-
-        Example::
-
-            date_str = (integer.setResultsName("year") + '/'
-                        + integer.setResultsName("month") + '/'
-                        + integer.setResultsName("day"))
-
-            # equivalent form:
-            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
-        """
-        return self._setResultsName(name, listAllMatches)
-
-    def _setResultsName(self, name, listAllMatches=False):
-        newself = self.copy()
-        if name.endswith("*"):
-            name = name[:-1]
-            listAllMatches = True
-        newself.resultsName = name
-        newself.modalResults = not listAllMatches
-        return newself
-
-    def setBreak(self, breakFlag=True):
-        """Method to invoke the Python pdb debugger when this element is
-           about to be parsed. Set ``breakFlag`` to True to enable, False to
-           disable.
-        """
-        if breakFlag:
-            _parseMethod = self._parse
-            def breaker(instring, loc, doActions=True, callPreParse=True):
-                import pdb
-                # this call to pdb.set_trace() is intentional, not a checkin error
-                pdb.set_trace()
-                return _parseMethod(instring, loc, doActions, callPreParse)
-            breaker._originalParseMethod = _parseMethod
-            self._parse = breaker
-        else:
-            if hasattr(self._parse, "_originalParseMethod"):
-                self._parse = self._parse._originalParseMethod
-        return self
-
-    def setParseAction(self, *fns, **kwargs):
-        """
-        Define one or more actions to perform when successfully matching parse element definition.
-        Parse action fn is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` ,
-        ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where:
-
-        - s   = the original string being parsed (see note below)
-        - loc = the location of the matching substring
-        - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object
-
-        If the functions in fns modify the tokens, they can return them as the return
-        value from fn, and the modified list of tokens will replace the original.
-        Otherwise, fn does not need to return any value.
-
-        If None is passed as the parse action, all previously added parse actions for this
-        expression are cleared.
-
-        Optional keyword arguments:
-        - callDuringTry = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing
-
-        Note: the default parsing behavior is to expand tabs in the input string
-        before starting the parsing process.  See :class:`parseString for more
-        information on parsing strings containing ```` s, and suggested
-        methods to maintain a consistent view of the parsed string, the parse
-        location, and line and column positions within the parsed string.
-
-        Example::
-
-            integer = Word(nums)
-            date_str = integer + '/' + integer + '/' + integer
-
-            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
-
-            # use parse action to convert to ints at parse time
-            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
-            date_str = integer + '/' + integer + '/' + integer
-
-            # note that integer fields are now ints, not strings
-            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
-        """
-        if list(fns) == [None,]:
-            self.parseAction = []
-        else:
-            if not all(callable(fn) for fn in fns):
-                raise TypeError("parse actions must be callable")
-            self.parseAction = list(map(_trim_arity, list(fns)))
-            self.callDuringTry = kwargs.get("callDuringTry", False)
-        return self
-
-    def addParseAction(self, *fns, **kwargs):
-        """
-        Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`.
-
-        See examples in :class:`copy`.
-        """
-        self.parseAction += list(map(_trim_arity, list(fns)))
-        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
-        return self
-
-    def addCondition(self, *fns, **kwargs):
-        """Add a boolean predicate function to expression's list of parse actions. See
-        :class:`setParseAction` for function call signatures. Unlike ``setParseAction``,
-        functions passed to ``addCondition`` need to return boolean success/fail of the condition.
-
-        Optional keyword arguments:
-        - message = define a custom message to be used in the raised exception
-        - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
-
-        Example::
-
-            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
-            year_int = integer.copy()
-            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
-            date_str = year_int + '/' + integer + '/' + integer
-
-            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
-        """
-        for fn in fns:
-            self.parseAction.append(conditionAsParseAction(fn, message=kwargs.get('message'),
-                                                           fatal=kwargs.get('fatal', False)))
-
-        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
-        return self
-
-    def setFailAction(self, fn):
-        """Define action to perform if parsing fails at this expression.
-           Fail acton fn is a callable function that takes the arguments
-           ``fn(s, loc, expr, err)`` where:
-           - s = string being parsed
-           - loc = location where expression match was attempted and failed
-           - expr = the parse expression that failed
-           - err = the exception thrown
-           The function returns no value.  It may throw :class:`ParseFatalException`
-           if it is desired to stop parsing immediately."""
-        self.failAction = fn
-        return self
-
-    def _skipIgnorables(self, instring, loc):
-        exprsFound = True
-        while exprsFound:
-            exprsFound = False
-            for e in self.ignoreExprs:
-                try:
-                    while 1:
-                        loc, dummy = e._parse(instring, loc)
-                        exprsFound = True
-                except ParseException:
-                    pass
-        return loc
-
-    def preParse(self, instring, loc):
-        if self.ignoreExprs:
-            loc = self._skipIgnorables(instring, loc)
-
-        if self.skipWhitespace:
-            wt = self.whiteChars
-            instrlen = len(instring)
-            while loc < instrlen and instring[loc] in wt:
-                loc += 1
-
-        return loc
-
-    def parseImpl(self, instring, loc, doActions=True):
-        return loc, []
-
-    def postParse(self, instring, loc, tokenlist):
-        return tokenlist
-
-    # ~ @profile
-    def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True):
-        TRY, MATCH, FAIL = 0, 1, 2
-        debugging = (self.debug)  # and doActions)
-
-        if debugging or self.failAction:
-            # ~ print ("Match", self, "at loc", loc, "(%d, %d)" % (lineno(loc, instring), col(loc, instring)))
-            if self.debugActions[TRY]:
-                self.debugActions[TRY](instring, loc, self)
-            try:
-                if callPreParse and self.callPreparse:
-                    preloc = self.preParse(instring, loc)
-                else:
-                    preloc = loc
-                tokensStart = preloc
-                if self.mayIndexError or preloc >= len(instring):
-                    try:
-                        loc, tokens = self.parseImpl(instring, preloc, doActions)
-                    except IndexError:
-                        raise ParseException(instring, len(instring), self.errmsg, self)
-                else:
-                    loc, tokens = self.parseImpl(instring, preloc, doActions)
-            except Exception as err:
-                # ~ print ("Exception raised:", err)
-                if self.debugActions[FAIL]:
-                    self.debugActions[FAIL](instring, tokensStart, self, err)
-                if self.failAction:
-                    self.failAction(instring, tokensStart, self, err)
-                raise
-        else:
-            if callPreParse and self.callPreparse:
-                preloc = self.preParse(instring, loc)
-            else:
-                preloc = loc
-            tokensStart = preloc
-            if self.mayIndexError or preloc >= len(instring):
-                try:
-                    loc, tokens = self.parseImpl(instring, preloc, doActions)
-                except IndexError:
-                    raise ParseException(instring, len(instring), self.errmsg, self)
-            else:
-                loc, tokens = self.parseImpl(instring, preloc, doActions)
-
-        tokens = self.postParse(instring, loc, tokens)
-
-        retTokens = ParseResults(tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults)
-        if self.parseAction and (doActions or self.callDuringTry):
-            if debugging:
-                try:
-                    for fn in self.parseAction:
-                        try:
-                            tokens = fn(instring, tokensStart, retTokens)
-                        except IndexError as parse_action_exc:
-                            exc = ParseException("exception raised in parse action")
-                            exc.__cause__ = parse_action_exc
-                            raise exc
-
-                        if tokens is not None and tokens is not retTokens:
-                            retTokens = ParseResults(tokens,
-                                                      self.resultsName,
-                                                      asList=self.saveAsList and isinstance(tokens, (ParseResults, list)),
-                                                      modal=self.modalResults)
-                except Exception as err:
-                    # ~ print "Exception raised in user parse action:", err
-                    if self.debugActions[FAIL]:
-                        self.debugActions[FAIL](instring, tokensStart, self, err)
-                    raise
-            else:
-                for fn in self.parseAction:
-                    try:
-                        tokens = fn(instring, tokensStart, retTokens)
-                    except IndexError as parse_action_exc:
-                        exc = ParseException("exception raised in parse action")
-                        exc.__cause__ = parse_action_exc
-                        raise exc
-
-                    if tokens is not None and tokens is not retTokens:
-                        retTokens = ParseResults(tokens,
-                                                  self.resultsName,
-                                                  asList=self.saveAsList and isinstance(tokens, (ParseResults, list)),
-                                                  modal=self.modalResults)
-        if debugging:
-            # ~ print ("Matched", self, "->", retTokens.asList())
-            if self.debugActions[MATCH]:
-                self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens)
-
-        return loc, retTokens
-
-    def tryParse(self, instring, loc):
-        try:
-            return self._parse(instring, loc, doActions=False)[0]
-        except ParseFatalException:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-    def canParseNext(self, instring, loc):
-        try:
-            self.tryParse(instring, loc)
-        except (ParseException, IndexError):
-            return False
-        else:
-            return True
-
-    class _UnboundedCache(object):
-        def __init__(self):
-            cache = {}
-            self.not_in_cache = not_in_cache = object()
-
-            def get(self, key):
-                return cache.get(key, not_in_cache)
-
-            def set(self, key, value):
-                cache[key] = value
-
-            def clear(self):
-                cache.clear()
-
-            def cache_len(self):
-                return len(cache)
-
-            self.get = types.MethodType(get, self)
-            self.set = types.MethodType(set, self)
-            self.clear = types.MethodType(clear, self)
-            self.__len__ = types.MethodType(cache_len, self)
-
-    if _OrderedDict is not None:
-        class _FifoCache(object):
-            def __init__(self, size):
-                self.not_in_cache = not_in_cache = object()
-
-                cache = _OrderedDict()
-
-                def get(self, key):
-                    return cache.get(key, not_in_cache)
-
-                def set(self, key, value):
-                    cache[key] = value
-                    while len(cache) > size:
-                        try:
-                            cache.popitem(False)
-                        except KeyError:
-                            pass
-
-                def clear(self):
-                    cache.clear()
-
-                def cache_len(self):
-                    return len(cache)
-
-                self.get = types.MethodType(get, self)
-                self.set = types.MethodType(set, self)
-                self.clear = types.MethodType(clear, self)
-                self.__len__ = types.MethodType(cache_len, self)
-
-    else:
-        class _FifoCache(object):
-            def __init__(self, size):
-                self.not_in_cache = not_in_cache = object()
-
-                cache = {}
-                key_fifo = collections.deque([], size)
-
-                def get(self, key):
-                    return cache.get(key, not_in_cache)
-
-                def set(self, key, value):
-                    cache[key] = value
-                    while len(key_fifo) > size:
-                        cache.pop(key_fifo.popleft(), None)
-                    key_fifo.append(key)
-
-                def clear(self):
-                    cache.clear()
-                    key_fifo.clear()
-
-                def cache_len(self):
-                    return len(cache)
-
-                self.get = types.MethodType(get, self)
-                self.set = types.MethodType(set, self)
-                self.clear = types.MethodType(clear, self)
-                self.__len__ = types.MethodType(cache_len, self)
-
-    # argument cache for optimizing repeated calls when backtracking through recursive expressions
-    packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
-    packrat_cache_lock = RLock()
-    packrat_cache_stats = [0, 0]
-
-    # this method gets repeatedly called during backtracking with the same arguments -
-    # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
-    def _parseCache(self, instring, loc, doActions=True, callPreParse=True):
-        HIT, MISS = 0, 1
-        lookup = (self, instring, loc, callPreParse, doActions)
-        with ParserElement.packrat_cache_lock:
-            cache = ParserElement.packrat_cache
-            value = cache.get(lookup)
-            if value is cache.not_in_cache:
-                ParserElement.packrat_cache_stats[MISS] += 1
-                try:
-                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
-                except ParseBaseException as pe:
-                    # cache a copy of the exception, without the traceback
-                    cache.set(lookup, pe.__class__(*pe.args))
-                    raise
-                else:
-                    cache.set(lookup, (value[0], value[1].copy()))
-                    return value
-            else:
-                ParserElement.packrat_cache_stats[HIT] += 1
-                if isinstance(value, Exception):
-                    raise value
-                return value[0], value[1].copy()
-
-    _parse = _parseNoCache
-
-    @staticmethod
-    def resetCache():
-        ParserElement.packrat_cache.clear()
-        ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
-
-    _packratEnabled = False
-    @staticmethod
-    def enablePackrat(cache_size_limit=128):
-        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
-           Repeated parse attempts at the same string location (which happens
-           often in many complex grammars) can immediately return a cached value,
-           instead of re-executing parsing/validating code.  Memoizing is done of
-           both valid results and parsing exceptions.
-
-           Parameters:
-
-           - cache_size_limit - (default= ``128``) - if an integer value is provided
-             will limit the size of the packrat cache; if None is passed, then
-             the cache size will be unbounded; if 0 is passed, the cache will
-             be effectively disabled.
-
-           This speedup may break existing programs that use parse actions that
-           have side-effects.  For this reason, packrat parsing is disabled when
-           you first import pyparsing.  To activate the packrat feature, your
-           program must call the class method :class:`ParserElement.enablePackrat`.
-           For best results, call ``enablePackrat()`` immediately after
-           importing pyparsing.
-
-           Example::
-
-               import pyparsing
-               pyparsing.ParserElement.enablePackrat()
-        """
-        if not ParserElement._packratEnabled:
-            ParserElement._packratEnabled = True
-            if cache_size_limit is None:
-                ParserElement.packrat_cache = ParserElement._UnboundedCache()
-            else:
-                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
-            ParserElement._parse = ParserElement._parseCache
-
-    def parseString(self, instring, parseAll=False):
-        """
-        Execute the parse expression with the given string.
-        This is the main interface to the client code, once the complete
-        expression has been built.
-
-        Returns the parsed data as a :class:`ParseResults` object, which may be
-        accessed as a list, or as a dict or object with attributes if the given parser
-        includes results names.
-
-        If you want the grammar to require that the entire input string be
-        successfully parsed, then set ``parseAll`` to True (equivalent to ending
-        the grammar with ``StringEnd()``).
-
-        Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string,
-        in order to report proper column numbers in parse actions.
-        If the input string contains tabs and
-        the grammar uses parse actions that use the ``loc`` argument to index into the
-        string being parsed, you can ensure you have a consistent view of the input
-        string by:
-
-        - calling ``parseWithTabs`` on your grammar before calling ``parseString``
-          (see :class:`parseWithTabs`)
-        - define your parse action using the full ``(s, loc, toks)`` signature, and
-          reference the input string using the parse action's ``s`` argument
-        - explictly expand the tabs in your input string before calling
-          ``parseString``
-
-        Example::
-
-            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
-            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
-        """
-        ParserElement.resetCache()
-        if not self.streamlined:
-            self.streamline()
-            # ~ self.saveAsList = True
-        for e in self.ignoreExprs:
-            e.streamline()
-        if not self.keepTabs:
-            instring = instring.expandtabs()
-        try:
-            loc, tokens = self._parse(instring, 0)
-            if parseAll:
-                loc = self.preParse(instring, loc)
-                se = Empty() + StringEnd()
-                se._parse(instring, loc)
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
-                if getattr(exc, '__traceback__', None) is not None:
-                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
-                raise exc
-        else:
-            return tokens
-
-    def scanString(self, instring, maxMatches=_MAX_INT, overlap=False):
-        """
-        Scan the input string for expression matches.  Each match will return the
-        matching tokens, start location, and end location.  May be called with optional
-        ``maxMatches`` argument, to clip scanning after 'n' matches are found.  If
-        ``overlap`` is specified, then overlapping matches will be reported.
-
-        Note that the start and end locations are reported relative to the string
-        being parsed.  See :class:`parseString` for more information on parsing
-        strings with embedded tabs.
-
-        Example::
-
-            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
-            print(source)
-            for tokens, start, end in Word(alphas).scanString(source):
-                print(' '*start + '^'*(end-start))
-                print(' '*start + tokens[0])
-
-        prints::
-
-            sldjf123lsdjjkf345sldkjf879lkjsfd987
-            ^^^^^
-            sldjf
-                    ^^^^^^^
-                    lsdjjkf
-                              ^^^^^^
-                              sldkjf
-                                       ^^^^^^
-                                       lkjsfd
-        """
-        if not self.streamlined:
-            self.streamline()
-        for e in self.ignoreExprs:
-            e.streamline()
-
-        if not self.keepTabs:
-            instring = _ustr(instring).expandtabs()
-        instrlen = len(instring)
-        loc = 0
-        preparseFn = self.preParse
-        parseFn = self._parse
-        ParserElement.resetCache()
-        matches = 0
-        try:
-            while loc <= instrlen and matches < maxMatches:
-                try:
-                    preloc = preparseFn(instring, loc)
-                    nextLoc, tokens = parseFn(instring, preloc, callPreParse=False)
-                except ParseException:
-                    loc = preloc + 1
-                else:
-                    if nextLoc > loc:
-                        matches += 1
-                        yield tokens, preloc, nextLoc
-                        if overlap:
-                            nextloc = preparseFn(instring, loc)
-                            if nextloc > loc:
-                                loc = nextLoc
-                            else:
-                                loc += 1
-                        else:
-                            loc = nextLoc
-                    else:
-                        loc = preloc + 1
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
-                if getattr(exc, '__traceback__', None) is not None:
-                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
-                raise exc
-
-    def transformString(self, instring):
-        """
-        Extension to :class:`scanString`, to modify matching text with modified tokens that may
-        be returned from a parse action.  To use ``transformString``, define a grammar and
-        attach a parse action to it that modifies the returned token list.
-        Invoking ``transformString()`` on a target string will then scan for matches,
-        and replace the matched text patterns according to the logic in the parse
-        action.  ``transformString()`` returns the resulting transformed string.
-
-        Example::
-
-            wd = Word(alphas)
-            wd.setParseAction(lambda toks: toks[0].title())
-
-            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
-
-        prints::
-
-            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
-        """
-        out = []
-        lastE = 0
-        # force preservation of s, to minimize unwanted transformation of string, and to
-        # keep string locs straight between transformString and scanString
-        self.keepTabs = True
-        try:
-            for t, s, e in self.scanString(instring):
-                out.append(instring[lastE:s])
-                if t:
-                    if isinstance(t, ParseResults):
-                        out += t.asList()
-                    elif isinstance(t, list):
-                        out += t
-                    else:
-                        out.append(t)
-                lastE = e
-            out.append(instring[lastE:])
-            out = [o for o in out if o]
-            return "".join(map(_ustr, _flatten(out)))
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
-                if getattr(exc, '__traceback__', None) is not None:
-                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
-                raise exc
-
-    def searchString(self, instring, maxMatches=_MAX_INT):
-        """
-        Another extension to :class:`scanString`, simplifying the access to the tokens found
-        to match the given parse expression.  May be called with optional
-        ``maxMatches`` argument, to clip searching after 'n' matches are found.
-
-        Example::
-
-            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
-            cap_word = Word(alphas.upper(), alphas.lower())
-
-            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
-
-            # the sum() builtin can be used to merge results into a single ParseResults object
-            print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
-
-        prints::
-
-            [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
-            ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
-        """
-        try:
-            return ParseResults([t for t, s, e in self.scanString(instring, maxMatches)])
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
-                if getattr(exc, '__traceback__', None) is not None:
-                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
-                raise exc
-
-    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
-        """
-        Generator method to split a string using the given expression as a separator.
-        May be called with optional ``maxsplit`` argument, to limit the number of splits;
-        and the optional ``includeSeparators`` argument (default= ``False``), if the separating
-        matching text should be included in the split results.
-
-        Example::
-
-            punc = oneOf(list(".,;:/-!?"))
-            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
-
-        prints::
-
-            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
-        """
-        splits = 0
-        last = 0
-        for t, s, e in self.scanString(instring, maxMatches=maxsplit):
-            yield instring[last:s]
-            if includeSeparators:
-                yield t[0]
-            last = e
-        yield instring[last:]
-
-    def __add__(self, other):
-        """
-        Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement
-        converts them to :class:`Literal`s by default.
-
-        Example::
-
-            greet = Word(alphas) + "," + Word(alphas) + "!"
-            hello = "Hello, World!"
-            print (hello, "->", greet.parseString(hello))
-
-        prints::
-
-            Hello, World! -> ['Hello', ',', 'World', '!']
-
-        ``...`` may be used as a parse expression as a short form of :class:`SkipTo`.
-
-            Literal('start') + ... + Literal('end')
-
-        is equivalent to:
-
-            Literal('start') + SkipTo('end')("_skipped*") + Literal('end')
-
-        Note that the skipped text is returned with '_skipped' as a results name,
-        and to support having multiple skips in the same parser, the value returned is
-        a list of all skipped text.
-        """
-        if other is Ellipsis:
-            return _PendingSkip(self)
-
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return And([self, other])
-
-    def __radd__(self, other):
-        """
-        Implementation of + operator when left operand is not a :class:`ParserElement`
-        """
-        if other is Ellipsis:
-            return SkipTo(self)("_skipped*") + self
-
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return other + self
-
-    def __sub__(self, other):
-        """
-        Implementation of - operator, returns :class:`And` with error stop
-        """
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return self + And._ErrorStop() + other
-
-    def __rsub__(self, other):
-        """
-        Implementation of - operator when left operand is not a :class:`ParserElement`
-        """
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return other - self
-
-    def __mul__(self, other):
-        """
-        Implementation of * operator, allows use of ``expr * 3`` in place of
-        ``expr + expr + expr``.  Expressions may also me multiplied by a 2-integer
-        tuple, similar to ``{min, max}`` multipliers in regular expressions.  Tuples
-        may also include ``None`` as in:
-         - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent
-              to ``expr*n + ZeroOrMore(expr)``
-              (read as "at least n instances of ``expr``")
-         - ``expr*(None, n)`` is equivalent to ``expr*(0, n)``
-              (read as "0 to n instances of ``expr``")
-         - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)``
-         - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)``
-
-        Note that ``expr*(None, n)`` does not raise an exception if
-        more than n exprs exist in the input stream; that is,
-        ``expr*(None, n)`` does not enforce a maximum number of expr
-        occurrences.  If this behavior is desired, then write
-        ``expr*(None, n) + ~expr``
-        """
-        if other is Ellipsis:
-            other = (0, None)
-        elif isinstance(other, tuple) and other[:1] == (Ellipsis,):
-            other = ((0, ) + other[1:] + (None,))[:2]
-
-        if isinstance(other, int):
-            minElements, optElements = other, 0
-        elif isinstance(other, tuple):
-            other = tuple(o if o is not Ellipsis else None for o in other)
-            other = (other + (None, None))[:2]
-            if other[0] is None:
-                other = (0, other[1])
-            if isinstance(other[0], int) and other[1] is None:
-                if other[0] == 0:
-                    return ZeroOrMore(self)
-                if other[0] == 1:
-                    return OneOrMore(self)
-                else:
-                    return self * other[0] + ZeroOrMore(self)
-            elif isinstance(other[0], int) and isinstance(other[1], int):
-                minElements, optElements = other
-                optElements -= minElements
-            else:
-                raise TypeError("cannot multiply 'ParserElement' and ('%s', '%s') objects", type(other[0]), type(other[1]))
-        else:
-            raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
-
-        if minElements < 0:
-            raise ValueError("cannot multiply ParserElement by negative value")
-        if optElements < 0:
-            raise ValueError("second tuple value must be greater or equal to first tuple value")
-        if minElements == optElements == 0:
-            raise ValueError("cannot multiply ParserElement by 0 or (0, 0)")
-
-        if optElements:
-            def makeOptionalList(n):
-                if n > 1:
-                    return Optional(self + makeOptionalList(n - 1))
-                else:
-                    return Optional(self)
-            if minElements:
-                if minElements == 1:
-                    ret = self + makeOptionalList(optElements)
-                else:
-                    ret = And([self] * minElements) + makeOptionalList(optElements)
-            else:
-                ret = makeOptionalList(optElements)
-        else:
-            if minElements == 1:
-                ret = self
-            else:
-                ret = And([self] * minElements)
-        return ret
-
-    def __rmul__(self, other):
-        return self.__mul__(other)
-
-    def __or__(self, other):
-        """
-        Implementation of | operator - returns :class:`MatchFirst`
-        """
-        if other is Ellipsis:
-            return _PendingSkip(self, must_skip=True)
-
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return MatchFirst([self, other])
-
-    def __ror__(self, other):
-        """
-        Implementation of | operator when left operand is not a :class:`ParserElement`
-        """
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return other | self
-
-    def __xor__(self, other):
-        """
-        Implementation of ^ operator - returns :class:`Or`
-        """
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return Or([self, other])
-
-    def __rxor__(self, other):
-        """
-        Implementation of ^ operator when left operand is not a :class:`ParserElement`
-        """
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return other ^ self
-
-    def __and__(self, other):
-        """
-        Implementation of & operator - returns :class:`Each`
-        """
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return Each([self, other])
-
-    def __rand__(self, other):
-        """
-        Implementation of & operator when left operand is not a :class:`ParserElement`
-        """
-        if isinstance(other, basestring):
-            other = self._literalStringClass(other)
-        if not isinstance(other, ParserElement):
-            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
-                          SyntaxWarning, stacklevel=2)
-            return None
-        return other & self
-
-    def __invert__(self):
-        """
-        Implementation of ~ operator - returns :class:`NotAny`
-        """
-        return NotAny(self)
-
-    def __iter__(self):
-        # must implement __iter__ to override legacy use of sequential access to __getitem__ to
-        # iterate over a sequence
-        raise TypeError('%r object is not iterable' % self.__class__.__name__)
-
-    def __getitem__(self, key):
-        """
-        use ``[]`` indexing notation as a short form for expression repetition:
-         - ``expr[n]`` is equivalent to ``expr*n``
-         - ``expr[m, n]`` is equivalent to ``expr*(m, n)``
-         - ``expr[n, ...]`` or ``expr[n,]`` is equivalent
-              to ``expr*n + ZeroOrMore(expr)``
-              (read as "at least n instances of ``expr``")
-         - ``expr[..., n]`` is equivalent to ``expr*(0, n)``
-              (read as "0 to n instances of ``expr``")
-         - ``expr[...]`` and ``expr[0, ...]`` are equivalent to ``ZeroOrMore(expr)``
-         - ``expr[1, ...]`` is equivalent to ``OneOrMore(expr)``
-         ``None`` may be used in place of ``...``.
-
-        Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception
-        if more than ``n`` ``expr``s exist in the input stream.  If this behavior is
-        desired, then write ``expr[..., n] + ~expr``.
-       """
-
-        # convert single arg keys to tuples
-        try:
-            if isinstance(key, str):
-                key = (key,)
-            iter(key)
-        except TypeError:
-            key = (key, key)
-
-        if len(key) > 2:
-            warnings.warn("only 1 or 2 index arguments supported ({0}{1})".format(key[:5],
-                                                                                '... [{0}]'.format(len(key))
-                                                                                if len(key) > 5 else ''))
-
-        # clip to 2 elements
-        ret = self * tuple(key[:2])
-        return ret
-
-    def __call__(self, name=None):
-        """
-        Shortcut for :class:`setResultsName`, with ``listAllMatches=False``.
-
-        If ``name`` is given with a trailing ``'*'`` character, then ``listAllMatches`` will be
-        passed as ``True``.
-
-        If ``name` is omitted, same as calling :class:`copy`.
-
-        Example::
-
-            # these are equivalent
-            userdata = Word(alphas).setResultsName("name") + Word(nums + "-").setResultsName("socsecno")
-            userdata = Word(alphas)("name") + Word(nums + "-")("socsecno")
-        """
-        if name is not None:
-            return self._setResultsName(name)
-        else:
-            return self.copy()
-
-    def suppress(self):
-        """
-        Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from
-        cluttering up returned output.
-        """
-        return Suppress(self)
-
-    def leaveWhitespace(self):
-        """
-        Disables the skipping of whitespace before matching the characters in the
-        :class:`ParserElement`'s defined pattern.  This is normally only used internally by
-        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
-        """
-        self.skipWhitespace = False
-        return self
-
-    def setWhitespaceChars(self, chars):
-        """
-        Overrides the default whitespace chars
-        """
-        self.skipWhitespace = True
-        self.whiteChars = chars
-        self.copyDefaultWhiteChars = False
-        return self
-
-    def parseWithTabs(self):
-        """
-        Overrides default behavior to expand ````s to spaces before parsing the input string.
-        Must be called before ``parseString`` when the input grammar contains elements that
-        match ```` characters.
-        """
-        self.keepTabs = True
-        return self
-
-    def ignore(self, other):
-        """
-        Define expression to be ignored (e.g., comments) while doing pattern
-        matching; may be called repeatedly, to define multiple comment or other
-        ignorable patterns.
-
-        Example::
-
-            patt = OneOrMore(Word(alphas))
-            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
-
-            patt.ignore(cStyleComment)
-            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
-        """
-        if isinstance(other, basestring):
-            other = Suppress(other)
-
-        if isinstance(other, Suppress):
-            if other not in self.ignoreExprs:
-                self.ignoreExprs.append(other)
-        else:
-            self.ignoreExprs.append(Suppress(other.copy()))
-        return self
-
-    def setDebugActions(self, startAction, successAction, exceptionAction):
-        """
-        Enable display of debugging messages while doing pattern matching.
-        """
-        self.debugActions = (startAction or _defaultStartDebugAction,
-                             successAction or _defaultSuccessDebugAction,
-                             exceptionAction or _defaultExceptionDebugAction)
-        self.debug = True
-        return self
-
-    def setDebug(self, flag=True):
-        """
-        Enable display of debugging messages while doing pattern matching.
-        Set ``flag`` to True to enable, False to disable.
-
-        Example::
-
-            wd = Word(alphas).setName("alphaword")
-            integer = Word(nums).setName("numword")
-            term = wd | integer
-
-            # turn on debugging for wd
-            wd.setDebug()
-
-            OneOrMore(term).parseString("abc 123 xyz 890")
-
-        prints::
-
-            Match alphaword at loc 0(1,1)
-            Matched alphaword -> ['abc']
-            Match alphaword at loc 3(1,4)
-            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
-            Match alphaword at loc 7(1,8)
-            Matched alphaword -> ['xyz']
-            Match alphaword at loc 11(1,12)
-            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
-            Match alphaword at loc 15(1,16)
-            Exception raised:Expected alphaword (at char 15), (line:1, col:16)
-
-        The output shown is that produced by the default debug actions - custom debug actions can be
-        specified using :class:`setDebugActions`. Prior to attempting
-        to match the ``wd`` expression, the debugging message ``"Match  at loc (,)"``
-        is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"``
-        message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression,
-        which makes debugging and exception messages easier to understand - for instance, the default
-        name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``.
-        """
-        if flag:
-            self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction)
-        else:
-            self.debug = False
-        return self
-
-    def __str__(self):
-        return self.name
-
-    def __repr__(self):
-        return _ustr(self)
-
-    def streamline(self):
-        self.streamlined = True
-        self.strRepr = None
-        return self
-
-    def checkRecursion(self, parseElementList):
-        pass
-
-    def validate(self, validateTrace=None):
-        """
-        Check defined expressions for valid structure, check for infinite recursive definitions.
-        """
-        self.checkRecursion([])
-
-    def parseFile(self, file_or_filename, parseAll=False):
-        """
-        Execute the parse expression on the given file or filename.
-        If a filename is specified (instead of a file object),
-        the entire file is opened, read, and closed before parsing.
-        """
-        try:
-            file_contents = file_or_filename.read()
-        except AttributeError:
-            with open(file_or_filename, "r") as f:
-                file_contents = f.read()
-        try:
-            return self.parseString(file_contents, parseAll)
-        except ParseBaseException as exc:
-            if ParserElement.verbose_stacktrace:
-                raise
-            else:
-                # catch and re-raise exception from here, clearing out pyparsing internal stack trace
-                if getattr(exc, '__traceback__', None) is not None:
-                    exc.__traceback__ = self._trim_traceback(exc.__traceback__)
-                raise exc
-
-    def __eq__(self, other):
-        if self is other:
-            return True
-        elif isinstance(other, basestring):
-            return self.matches(other)
-        elif isinstance(other, ParserElement):
-            return vars(self) == vars(other)
-        return False
-
-    def __ne__(self, other):
-        return not (self == other)
-
-    def __hash__(self):
-        return id(self)
-
-    def __req__(self, other):
-        return self == other
-
-    def __rne__(self, other):
-        return not (self == other)
-
-    def matches(self, testString, parseAll=True):
-        """
-        Method for quick testing of a parser against a test string. Good for simple
-        inline microtests of sub expressions while building up larger parser.
-
-        Parameters:
-         - testString - to test against this expression for a match
-         - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
-
-        Example::
-
-            expr = Word(nums)
-            assert expr.matches("100")
-        """
-        try:
-            self.parseString(_ustr(testString), parseAll=parseAll)
-            return True
-        except ParseBaseException:
-            return False
-
-    def runTests(self, tests, parseAll=True, comment='#',
-                 fullDump=True, printResults=True, failureTests=False, postParse=None,
-                 file=None):
-        """
-        Execute the parse expression on a series of test strings, showing each
-        test, the parsed results or where the parse failed. Quick and easy way to
-        run a parse expression against a list of sample strings.
-
-        Parameters:
-         - tests - a list of separate test strings, or a multiline string of test strings
-         - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
-         - comment - (default= ``'#'``) - expression for indicating embedded comments in the test
-              string; pass None to disable comment filtering
-         - fullDump - (default= ``True``) - dump results as list followed by results names in nested outline;
-              if False, only dump nested list
-         - printResults - (default= ``True``) prints test output to stdout
-         - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing
-         - postParse - (default= ``None``) optional callback for successful parse results; called as
-              `fn(test_string, parse_results)` and returns a string to be added to the test output
-         - file - (default=``None``) optional file-like object to which test output will be written;
-              if None, will default to ``sys.stdout``
-
-        Returns: a (success, results) tuple, where success indicates that all tests succeeded
-        (or failed if ``failureTests`` is True), and the results contain a list of lines of each
-        test's output
-
-        Example::
-
-            number_expr = pyparsing_common.number.copy()
-
-            result = number_expr.runTests('''
-                # unsigned integer
-                100
-                # negative integer
-                -100
-                # float with scientific notation
-                6.02e23
-                # integer with scientific notation
-                1e-12
-                ''')
-            print("Success" if result[0] else "Failed!")
-
-            result = number_expr.runTests('''
-                # stray character
-                100Z
-                # missing leading digit before '.'
-                -.100
-                # too many '.'
-                3.14.159
-                ''', failureTests=True)
-            print("Success" if result[0] else "Failed!")
-
-        prints::
-
-            # unsigned integer
-            100
-            [100]
-
-            # negative integer
-            -100
-            [-100]
-
-            # float with scientific notation
-            6.02e23
-            [6.02e+23]
-
-            # integer with scientific notation
-            1e-12
-            [1e-12]
-
-            Success
-
-            # stray character
-            100Z
-               ^
-            FAIL: Expected end of text (at char 3), (line:1, col:4)
-
-            # missing leading digit before '.'
-            -.100
-            ^
-            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
-
-            # too many '.'
-            3.14.159
-                ^
-            FAIL: Expected end of text (at char 4), (line:1, col:5)
-
-            Success
-
-        Each test string must be on a single line. If you want to test a string that spans multiple
-        lines, create a test like this::
-
-            expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
-
-        (Note that this is a raw string literal, you must include the leading 'r'.)
-        """
-        if isinstance(tests, basestring):
-            tests = list(map(str.strip, tests.rstrip().splitlines()))
-        if isinstance(comment, basestring):
-            comment = Literal(comment)
-        if file is None:
-            file = sys.stdout
-        print_ = file.write
-
-        allResults = []
-        comments = []
-        success = True
-        NL = Literal(r'\n').addParseAction(replaceWith('\n')).ignore(quotedString)
-        BOM = u'\ufeff'
-        for t in tests:
-            if comment is not None and comment.matches(t, False) or comments and not t:
-                comments.append(t)
-                continue
-            if not t:
-                continue
-            out = ['\n' + '\n'.join(comments) if comments else '', t]
-            comments = []
-            try:
-                # convert newline marks to actual newlines, and strip leading BOM if present
-                t = NL.transformString(t.lstrip(BOM))
-                result = self.parseString(t, parseAll=parseAll)
-            except ParseBaseException as pe:
-                fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
-                if '\n' in t:
-                    out.append(line(pe.loc, t))
-                    out.append(' ' * (col(pe.loc, t) - 1) + '^' + fatal)
-                else:
-                    out.append(' ' * pe.loc + '^' + fatal)
-                out.append("FAIL: " + str(pe))
-                success = success and failureTests
-                result = pe
-            except Exception as exc:
-                out.append("FAIL-EXCEPTION: " + str(exc))
-                success = success and failureTests
-                result = exc
-            else:
-                success = success and not failureTests
-                if postParse is not None:
-                    try:
-                        pp_value = postParse(t, result)
-                        if pp_value is not None:
-                            if isinstance(pp_value, ParseResults):
-                                out.append(pp_value.dump())
-                            else:
-                                out.append(str(pp_value))
-                        else:
-                            out.append(result.dump())
-                    except Exception as e:
-                        out.append(result.dump(full=fullDump))
-                        out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e))
-                else:
-                    out.append(result.dump(full=fullDump))
-
-            if printResults:
-                if fullDump:
-                    out.append('')
-                print_('\n'.join(out))
-
-            allResults.append((t, result))
-
-        return success, allResults
-
-
-class _PendingSkip(ParserElement):
-    # internal placeholder class to hold a place were '...' is added to a parser element,
-    # once another ParserElement is added, this placeholder will be replaced with a SkipTo
-    def __init__(self, expr, must_skip=False):
-        super(_PendingSkip, self).__init__()
-        self.strRepr = str(expr + Empty()).replace('Empty', '...')
-        self.name = self.strRepr
-        self.anchor = expr
-        self.must_skip = must_skip
-
-    def __add__(self, other):
-        skipper = SkipTo(other).setName("...")("_skipped*")
-        if self.must_skip:
-            def must_skip(t):
-                if not t._skipped or t._skipped.asList() == ['']:
-                    del t[0]
-                    t.pop("_skipped", None)
-            def show_skip(t):
-                if t._skipped.asList()[-1:] == ['']:
-                    skipped = t.pop('_skipped')
-                    t['_skipped'] = 'missing <' + repr(self.anchor) + '>'
-            return (self.anchor + skipper().addParseAction(must_skip)
-                    | skipper().addParseAction(show_skip)) + other
-
-        return self.anchor + skipper + other
-
-    def __repr__(self):
-        return self.strRepr
-
-    def parseImpl(self, *args):
-        raise Exception("use of `...` expression without following SkipTo target expression")
-
-
-class Token(ParserElement):
-    """Abstract :class:`ParserElement` subclass, for defining atomic
-    matching patterns.
-    """
-    def __init__(self):
-        super(Token, self).__init__(savelist=False)
-
-
-class Empty(Token):
-    """An empty token, will always match.
-    """
-    def __init__(self):
-        super(Empty, self).__init__()
-        self.name = "Empty"
-        self.mayReturnEmpty = True
-        self.mayIndexError = False
-
-
-class NoMatch(Token):
-    """A token that will never match.
-    """
-    def __init__(self):
-        super(NoMatch, self).__init__()
-        self.name = "NoMatch"
-        self.mayReturnEmpty = True
-        self.mayIndexError = False
-        self.errmsg = "Unmatchable token"
-
-    def parseImpl(self, instring, loc, doActions=True):
-        raise ParseException(instring, loc, self.errmsg, self)
-
-
-class Literal(Token):
-    """Token to exactly match a specified string.
-
-    Example::
-
-        Literal('blah').parseString('blah')  # -> ['blah']
-        Literal('blah').parseString('blahfooblah')  # -> ['blah']
-        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
-
-    For case-insensitive matching, use :class:`CaselessLiteral`.
-
-    For keyword matching (force word break before and after the matched string),
-    use :class:`Keyword` or :class:`CaselessKeyword`.
-    """
-    def __init__(self, matchString):
-        super(Literal, self).__init__()
-        self.match = matchString
-        self.matchLen = len(matchString)
-        try:
-            self.firstMatchChar = matchString[0]
-        except IndexError:
-            warnings.warn("null string passed to Literal; use Empty() instead",
-                            SyntaxWarning, stacklevel=2)
-            self.__class__ = Empty
-        self.name = '"%s"' % _ustr(self.match)
-        self.errmsg = "Expected " + self.name
-        self.mayReturnEmpty = False
-        self.mayIndexError = False
-
-        # Performance tuning: modify __class__ to select
-        # a parseImpl optimized for single-character check
-        if self.matchLen == 1 and type(self) is Literal:
-            self.__class__ = _SingleCharLiteral
-
-    def parseImpl(self, instring, loc, doActions=True):
-        if instring[loc] == self.firstMatchChar and instring.startswith(self.match, loc):
-            return loc + self.matchLen, self.match
-        raise ParseException(instring, loc, self.errmsg, self)
-
-class _SingleCharLiteral(Literal):
-    def parseImpl(self, instring, loc, doActions=True):
-        if instring[loc] == self.firstMatchChar:
-            return loc + 1, self.match
-        raise ParseException(instring, loc, self.errmsg, self)
-
-_L = Literal
-ParserElement._literalStringClass = Literal
-
-class Keyword(Token):
-    """Token to exactly match a specified string as a keyword, that is,
-    it must be immediately followed by a non-keyword character.  Compare
-    with :class:`Literal`:
-
-     - ``Literal("if")`` will match the leading ``'if'`` in
-       ``'ifAndOnlyIf'``.
-     - ``Keyword("if")`` will not; it will only match the leading
-       ``'if'`` in ``'if x=1'``, or ``'if(y==2)'``
-
-    Accepts two optional constructor arguments in addition to the
-    keyword string:
-
-     - ``identChars`` is a string of characters that would be valid
-       identifier characters, defaulting to all alphanumerics + "_" and
-       "$"
-     - ``caseless`` allows case-insensitive matching, default is ``False``.
-
-    Example::
-
-        Keyword("start").parseString("start")  # -> ['start']
-        Keyword("start").parseString("starting")  # -> Exception
-
-    For case-insensitive matching, use :class:`CaselessKeyword`.
-    """
-    DEFAULT_KEYWORD_CHARS = alphanums + "_$"
-
-    def __init__(self, matchString, identChars=None, caseless=False):
-        super(Keyword, self).__init__()
-        if identChars is None:
-            identChars = Keyword.DEFAULT_KEYWORD_CHARS
-        self.match = matchString
-        self.matchLen = len(matchString)
-        try:
-            self.firstMatchChar = matchString[0]
-        except IndexError:
-            warnings.warn("null string passed to Keyword; use Empty() instead",
-                          SyntaxWarning, stacklevel=2)
-        self.name = '"%s"' % self.match
-        self.errmsg = "Expected " + self.name
-        self.mayReturnEmpty = False
-        self.mayIndexError = False
-        self.caseless = caseless
-        if caseless:
-            self.caselessmatch = matchString.upper()
-            identChars = identChars.upper()
-        self.identChars = set(identChars)
-
-    def parseImpl(self, instring, loc, doActions=True):
-        if self.caseless:
-            if ((instring[loc:loc + self.matchLen].upper() == self.caselessmatch)
-                    and (loc >= len(instring) - self.matchLen
-                         or instring[loc + self.matchLen].upper() not in self.identChars)
-                    and (loc == 0
-                         or instring[loc - 1].upper() not in self.identChars)):
-                return loc + self.matchLen, self.match
-
-        else:
-            if instring[loc] == self.firstMatchChar:
-                if ((self.matchLen == 1 or instring.startswith(self.match, loc))
-                        and (loc >= len(instring) - self.matchLen
-                             or instring[loc + self.matchLen] not in self.identChars)
-                        and (loc == 0 or instring[loc - 1] not in self.identChars)):
-                    return loc + self.matchLen, self.match
-
-        raise ParseException(instring, loc, self.errmsg, self)
-
-    def copy(self):
-        c = super(Keyword, self).copy()
-        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
-        return c
-
-    @staticmethod
-    def setDefaultKeywordChars(chars):
-        """Overrides the default Keyword chars
-        """
-        Keyword.DEFAULT_KEYWORD_CHARS = chars
-
-class CaselessLiteral(Literal):
-    """Token to match a specified string, ignoring case of letters.
-    Note: the matched results will always be in the case of the given
-    match string, NOT the case of the input text.
-
-    Example::
-
-        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
-
-    (Contrast with example for :class:`CaselessKeyword`.)
-    """
-    def __init__(self, matchString):
-        super(CaselessLiteral, self).__init__(matchString.upper())
-        # Preserve the defining literal.
-        self.returnString = matchString
-        self.name = "'%s'" % self.returnString
-        self.errmsg = "Expected " + self.name
-
-    def parseImpl(self, instring, loc, doActions=True):
-        if instring[loc:loc + self.matchLen].upper() == self.match:
-            return loc + self.matchLen, self.returnString
-        raise ParseException(instring, loc, self.errmsg, self)
-
-class CaselessKeyword(Keyword):
-    """
-    Caseless version of :class:`Keyword`.
-
-    Example::
-
-        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
-
-    (Contrast with example for :class:`CaselessLiteral`.)
-    """
-    def __init__(self, matchString, identChars=None):
-        super(CaselessKeyword, self).__init__(matchString, identChars, caseless=True)
-
-class CloseMatch(Token):
-    """A variation on :class:`Literal` which matches "close" matches,
-    that is, strings with at most 'n' mismatching characters.
-    :class:`CloseMatch` takes parameters:
-
-     - ``match_string`` - string to be matched
-     - ``maxMismatches`` - (``default=1``) maximum number of
-       mismatches allowed to count as a match
-
-    The results from a successful parse will contain the matched text
-    from the input string and the following named results:
-
-     - ``mismatches`` - a list of the positions within the
-       match_string where mismatches were found
-     - ``original`` - the original match_string used to compare
-       against the input string
-
-    If ``mismatches`` is an empty list, then the match was an exact
-    match.
-
-    Example::
-
-        patt = CloseMatch("ATCATCGAATGGA")
-        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
-        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
-
-        # exact match
-        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
-
-        # close match allowing up to 2 mismatches
-        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
-        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
-    """
-    def __init__(self, match_string, maxMismatches=1):
-        super(CloseMatch, self).__init__()
-        self.name = match_string
-        self.match_string = match_string
-        self.maxMismatches = maxMismatches
-        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
-        self.mayIndexError = False
-        self.mayReturnEmpty = False
-
-    def parseImpl(self, instring, loc, doActions=True):
-        start = loc
-        instrlen = len(instring)
-        maxloc = start + len(self.match_string)
-
-        if maxloc <= instrlen:
-            match_string = self.match_string
-            match_stringloc = 0
-            mismatches = []
-            maxMismatches = self.maxMismatches
-
-            for match_stringloc, s_m in enumerate(zip(instring[loc:maxloc], match_string)):
-                src, mat = s_m
-                if src != mat:
-                    mismatches.append(match_stringloc)
-                    if len(mismatches) > maxMismatches:
-                        break
-            else:
-                loc = match_stringloc + 1
-                results = ParseResults([instring[start:loc]])
-                results['original'] = match_string
-                results['mismatches'] = mismatches
-                return loc, results
-
-        raise ParseException(instring, loc, self.errmsg, self)
-
-
-class Word(Token):
-    """Token for matching words composed of allowed character sets.
-    Defined with string containing all allowed initial characters, an
-    optional string containing allowed body characters (if omitted,
-    defaults to the initial character set), and an optional minimum,
-    maximum, and/or exact length.  The default value for ``min`` is
-    1 (a minimum value < 1 is not valid); the default values for
-    ``max`` and ``exact`` are 0, meaning no maximum or exact
-    length restriction. An optional ``excludeChars`` parameter can
-    list characters that might be found in the input ``bodyChars``
-    string; useful to define a word of all printables except for one or
-    two characters, for instance.
-
-    :class:`srange` is useful for defining custom character set strings
-    for defining ``Word`` expressions, using range notation from
-    regular expression character sets.
-
-    A common mistake is to use :class:`Word` to match a specific literal
-    string, as in ``Word("Address")``. Remember that :class:`Word`
-    uses the string argument to define *sets* of matchable characters.
-    This expression would match "Add", "AAA", "dAred", or any other word
-    made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an
-    exact literal string, use :class:`Literal` or :class:`Keyword`.
-
-    pyparsing includes helper strings for building Words:
-
-     - :class:`alphas`
-     - :class:`nums`
-     - :class:`alphanums`
-     - :class:`hexnums`
-     - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255
-       - accented, tilded, umlauted, etc.)
-     - :class:`punc8bit` (non-alphabetic characters in ASCII range
-       128-255 - currency, symbols, superscripts, diacriticals, etc.)
-     - :class:`printables` (any non-whitespace character)
-
-    Example::
-
-        # a word composed of digits
-        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
-
-        # a word with a leading capital, and zero or more lowercase
-        capital_word = Word(alphas.upper(), alphas.lower())
-
-        # hostnames are alphanumeric, with leading alpha, and '-'
-        hostname = Word(alphas, alphanums + '-')
-
-        # roman numeral (not a strict parser, accepts invalid mix of characters)
-        roman = Word("IVXLCDM")
-
-        # any string of non-whitespace characters, except for ','
-        csv_value = Word(printables, excludeChars=",")
-    """
-    def __init__(self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None):
-        super(Word, self).__init__()
-        if excludeChars:
-            excludeChars = set(excludeChars)
-            initChars = ''.join(c for c in initChars if c not in excludeChars)
-            if bodyChars:
-                bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
-        self.initCharsOrig = initChars
-        self.initChars = set(initChars)
-        if bodyChars:
-            self.bodyCharsOrig = bodyChars
-            self.bodyChars = set(bodyChars)
-        else:
-            self.bodyCharsOrig = initChars
-            self.bodyChars = set(initChars)
-
-        self.maxSpecified = max > 0
-
-        if min < 1:
-            raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
-
-        self.minLen = min
-
-        if max > 0:
-            self.maxLen = max
-        else:
-            self.maxLen = _MAX_INT
-
-        if exact > 0:
-            self.maxLen = exact
-            self.minLen = exact
-
-        self.name = _ustr(self)
-        self.errmsg = "Expected " + self.name
-        self.mayIndexError = False
-        self.asKeyword = asKeyword
-
-        if ' ' not in self.initCharsOrig + self.bodyCharsOrig and (min == 1 and max == 0 and exact == 0):
-            if self.bodyCharsOrig == self.initCharsOrig:
-                self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
-            elif len(self.initCharsOrig) == 1:
-                self.reString = "%s[%s]*" % (re.escape(self.initCharsOrig),
-                                             _escapeRegexRangeChars(self.bodyCharsOrig),)
-            else:
-                self.reString = "[%s][%s]*" % (_escapeRegexRangeChars(self.initCharsOrig),
-                                               _escapeRegexRangeChars(self.bodyCharsOrig),)
-            if self.asKeyword:
-                self.reString = r"\b" + self.reString + r"\b"
-
-            try:
-                self.re = re.compile(self.reString)
-            except Exception:
-                self.re = None
-            else:
-                self.re_match = self.re.match
-                self.__class__ = _WordRegex
-
-    def parseImpl(self, instring, loc, doActions=True):
-        if instring[loc] not in self.initChars:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        start = loc
-        loc += 1
-        instrlen = len(instring)
-        bodychars = self.bodyChars
-        maxloc = start + self.maxLen
-        maxloc = min(maxloc, instrlen)
-        while loc < maxloc and instring[loc] in bodychars:
-            loc += 1
-
-        throwException = False
-        if loc - start < self.minLen:
-            throwException = True
-        elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
-            throwException = True
-        elif self.asKeyword:
-            if (start > 0 and instring[start - 1] in bodychars
-                    or loc < instrlen and instring[loc] in bodychars):
-                throwException = True
-
-        if throwException:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        return loc, instring[start:loc]
-
-    def __str__(self):
-        try:
-            return super(Word, self).__str__()
-        except Exception:
-            pass
-
-        if self.strRepr is None:
-
-            def charsAsStr(s):
-                if len(s) > 4:
-                    return s[:4] + "..."
-                else:
-                    return s
-
-            if self.initCharsOrig != self.bodyCharsOrig:
-                self.strRepr = "W:(%s, %s)" % (charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig))
-            else:
-                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
-
-        return self.strRepr
-
-class _WordRegex(Word):
-    def parseImpl(self, instring, loc, doActions=True):
-        result = self.re_match(instring, loc)
-        if not result:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        loc = result.end()
-        return loc, result.group()
-
-
-class Char(_WordRegex):
-    """A short-cut class for defining ``Word(characters, exact=1)``,
-    when defining a match of any single character in a string of
-    characters.
-    """
-    def __init__(self, charset, asKeyword=False, excludeChars=None):
-        super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars)
-        self.reString = "[%s]" % _escapeRegexRangeChars(''.join(self.initChars))
-        if asKeyword:
-            self.reString = r"\b%s\b" % self.reString
-        self.re = re.compile(self.reString)
-        self.re_match = self.re.match
-
-
-class Regex(Token):
-    r"""Token for matching strings that match a given regular
-    expression. Defined with string specifying the regular expression in
-    a form recognized by the stdlib Python  `re module `_.
-    If the given regex contains named groups (defined using ``(?P...)``),
-    these will be preserved as named parse results.
-
-    If instead of the Python stdlib re module you wish to use a different RE module
-    (such as the `regex` module), you can replace it by either building your
-    Regex object with a compiled RE that was compiled using regex:
-
-    Example::
-
-        realnum = Regex(r"[+-]?\d+\.\d*")
-        date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)')
-        # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
-        roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
-
-        # use regex module instead of stdlib re module to construct a Regex using
-        # a compiled regular expression
-        import regex
-        parser = pp.Regex(regex.compile(r'[0-9]'))
-
-    """
-    def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False):
-        """The parameters ``pattern`` and ``flags`` are passed
-        to the ``re.compile()`` function as-is. See the Python
-        `re module `_ module for an
-        explanation of the acceptable patterns and flags.
-        """
-        super(Regex, self).__init__()
-
-        if isinstance(pattern, basestring):
-            if not pattern:
-                warnings.warn("null string passed to Regex; use Empty() instead",
-                              SyntaxWarning, stacklevel=2)
-
-            self.pattern = pattern
-            self.flags = flags
-
-            try:
-                self.re = re.compile(self.pattern, self.flags)
-                self.reString = self.pattern
-            except sre_constants.error:
-                warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
-                              SyntaxWarning, stacklevel=2)
-                raise
-
-        elif hasattr(pattern, 'pattern') and hasattr(pattern, 'match'):
-            self.re = pattern
-            self.pattern = self.reString = pattern.pattern
-            self.flags = flags
-
-        else:
-            raise TypeError("Regex may only be constructed with a string or a compiled RE object")
-
-        self.re_match = self.re.match
-
-        self.name = _ustr(self)
-        self.errmsg = "Expected " + self.name
-        self.mayIndexError = False
-        self.mayReturnEmpty = self.re_match("") is not None
-        self.asGroupList = asGroupList
-        self.asMatch = asMatch
-        if self.asGroupList:
-            self.parseImpl = self.parseImplAsGroupList
-        if self.asMatch:
-            self.parseImpl = self.parseImplAsMatch
-
-    def parseImpl(self, instring, loc, doActions=True):
-        result = self.re_match(instring, loc)
-        if not result:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        loc = result.end()
-        ret = ParseResults(result.group())
-        d = result.groupdict()
-        if d:
-            for k, v in d.items():
-                ret[k] = v
-        return loc, ret
-
-    def parseImplAsGroupList(self, instring, loc, doActions=True):
-        result = self.re_match(instring, loc)
-        if not result:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        loc = result.end()
-        ret = result.groups()
-        return loc, ret
-
-    def parseImplAsMatch(self, instring, loc, doActions=True):
-        result = self.re_match(instring, loc)
-        if not result:
-            raise ParseException(instring, loc, self.errmsg, self)
-
-        loc = result.end()
-        ret = result
-        return loc, ret
-
-    def __str__(self):
-        try:
-            return super(Regex, self).__str__()
-        except Exception:
-            pass
-
-        if self.strRepr is None:
-            self.strRepr = "Re:(%s)" % repr(self.pattern)
-
-        return self.strRepr
-
-    def sub(self, repl):
-        r"""
-        Return Regex with an attached parse action to transform the parsed
-        result as if called using `re.sub(expr, repl, string) `_.
-
-        Example::
-
-            make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2")
-            print(make_html.transformString("h1:main title:"))
-            # prints "

main title

" - """ - if self.asGroupList: - warnings.warn("cannot use sub() with Regex(asGroupList=True)", - SyntaxWarning, stacklevel=2) - raise SyntaxError() - - if self.asMatch and callable(repl): - warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", - SyntaxWarning, stacklevel=2) - raise SyntaxError() - - if self.asMatch: - def pa(tokens): - return tokens[0].expand(repl) - else: - def pa(tokens): - return self.re.sub(repl, tokens[0]) - return self.addParseAction(pa) - -class QuotedString(Token): - r""" - Token for matching strings that are delimited by quoting characters. - - Defined with the following parameters: - - - quoteChar - string of one or more characters defining the - quote delimiting string - - escChar - character to escape quotes, typically backslash - (default= ``None``) - - escQuote - special quote sequence to escape an embedded quote - string (such as SQL's ``""`` to escape an embedded ``"``) - (default= ``None``) - - multiline - boolean indicating whether quotes can span - multiple lines (default= ``False``) - - unquoteResults - boolean indicating whether the matched text - should be unquoted (default= ``True``) - - endQuoteChar - string of one or more characters defining the - end of the quote delimited string (default= ``None`` => same as - quoteChar) - - convertWhitespaceEscapes - convert escaped whitespace - (``'\t'``, ``'\n'``, etc.) to actual whitespace - (default= ``True``) - - Example:: - - qs = QuotedString('"') - print(qs.searchString('lsjdf "This is the quote" sldjf')) - complex_qs = QuotedString('{{', endQuoteChar='}}') - print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) - sql_qs = QuotedString('"', escQuote='""') - print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) - - prints:: - - [['This is the quote']] - [['This is the "quote"']] - [['This is the quote with "embedded" quotes']] - """ - def __init__(self, quoteChar, escChar=None, escQuote=None, multiline=False, - unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString, self).__init__() - - # remove white space from quote chars - wont work anyway - quoteChar = quoteChar.strip() - if not quoteChar: - warnings.warn("quoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) - raise SyntaxError() - - if endQuoteChar is None: - endQuoteChar = quoteChar - else: - endQuoteChar = endQuoteChar.strip() - if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) - raise SyntaxError() - - self.quoteChar = quoteChar - self.quoteCharLen = len(quoteChar) - self.firstQuoteChar = quoteChar[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes - - if multiline: - self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % (re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '')) - else: - self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % (re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '')) - if len(self.endQuoteChar) > 1: - self.pattern += ( - '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar) - 1, 0, -1)) + ')') - - if escQuote: - self.pattern += (r'|(?:%s)' % re.escape(escQuote)) - if escChar: - self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar) + "(.)" - self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - self.re_match = self.re.match - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) - raise - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl(self, instring, loc, doActions=True): - result = instring[loc] == self.firstQuoteChar and self.re_match(instring, loc) or None - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result.group() - - if self.unquoteResults: - - # strip off quotes - ret = ret[self.quoteCharLen: -self.endQuoteCharLen] - - if isinstance(ret, basestring): - # replace escaped whitespace - if '\\' in ret and self.convertWhitespaceEscapes: - ws_map = { - r'\t': '\t', - r'\n': '\n', - r'\f': '\f', - r'\r': '\r', - } - for wslit, wschar in ws_map.items(): - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) - - # replace escaped quotes - if self.escQuote: - ret = ret.replace(self.escQuote, self.endQuoteChar) - - return loc, ret - - def __str__(self): - try: - return super(QuotedString, self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) - - return self.strRepr - - -class CharsNotIn(Token): - """Token for matching words composed of characters *not* in a given - set (will include whitespace in matched characters if not listed in - the provided exclusion set - see example). Defined with string - containing all disallowed characters, and an optional minimum, - maximum, and/or exact length. The default value for ``min`` is - 1 (a minimum value < 1 is not valid); the default values for - ``max`` and ``exact`` are 0, meaning no maximum or exact - length restriction. - - Example:: - - # define a comma-separated-value as anything that is not a ',' - csv_value = CharsNotIn(',') - print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) - - prints:: - - ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] - """ - def __init__(self, notChars, min=1, max=0, exact=0): - super(CharsNotIn, self).__init__() - self.skipWhitespace = False - self.notChars = notChars - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use " - "Optional(CharsNotIn()) if zero-length char group is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = (self.minLen == 0) - self.mayIndexError = False - - def parseImpl(self, instring, loc, doActions=True): - if instring[loc] in self.notChars: - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - notchars = self.notChars - maxlen = min(start + self.maxLen, len(instring)) - while loc < maxlen and instring[loc] not in notchars: - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__(self): - try: - return super(CharsNotIn, self).__str__() - except Exception: - pass - - if self.strRepr is None: - if len(self.notChars) > 4: - self.strRepr = "!W:(%s...)" % self.notChars[:4] - else: - self.strRepr = "!W:(%s)" % self.notChars - - return self.strRepr - -class White(Token): - """Special matching class for matching whitespace. Normally, - whitespace is ignored by pyparsing grammars. This class is included - when some whitespace structures are significant. Define with - a string containing the whitespace characters to be matched; default - is ``" \\t\\r\\n"``. Also takes optional ``min``, - ``max``, and ``exact`` arguments, as defined for the - :class:`Word` class. - """ - whiteStrs = { - ' ' : '', - '\t': '', - '\n': '', - '\r': '', - '\f': '', - u'\u00A0': '', - u'\u1680': '', - u'\u180E': '', - u'\u2000': '', - u'\u2001': '', - u'\u2002': '', - u'\u2003': '', - u'\u2004': '', - u'\u2005': '', - u'\u2006': '', - u'\u2007': '', - u'\u2008': '', - u'\u2009': '', - u'\u200A': '', - u'\u200B': '', - u'\u202F': '', - u'\u205F': '', - u'\u3000': '', - } - def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White, self).__init__() - self.matchWhite = ws - self.setWhitespaceChars("".join(c for c in self.whiteChars if c not in self.matchWhite)) - # ~ self.leaveWhitespace() - self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) - self.mayReturnEmpty = True - self.errmsg = "Expected " + self.name - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - def parseImpl(self, instring, loc, doActions=True): - if instring[loc] not in self.matchWhite: - raise ParseException(instring, loc, self.errmsg, self) - start = loc - loc += 1 - maxloc = start + self.maxLen - maxloc = min(maxloc, len(instring)) - while loc < maxloc and instring[loc] in self.matchWhite: - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - -class _PositionToken(Token): - def __init__(self): - super(_PositionToken, self).__init__() - self.name = self.__class__.__name__ - self.mayReturnEmpty = True - self.mayIndexError = False - -class GoToColumn(_PositionToken): - """Token to advance to a specific column of input text; useful for - tabular report scraping. - """ - def __init__(self, colno): - super(GoToColumn, self).__init__() - self.col = colno - - def preParse(self, instring, loc): - if col(loc, instring) != self.col: - instrlen = len(instring) - if self.ignoreExprs: - loc = self._skipIgnorables(instring, loc) - while loc < instrlen and instring[loc].isspace() and col(loc, instring) != self.col: - loc += 1 - return loc - - def parseImpl(self, instring, loc, doActions=True): - thiscol = col(loc, instring) - if thiscol > self.col: - raise ParseException(instring, loc, "Text not in expected column", self) - newloc = loc + self.col - thiscol - ret = instring[loc: newloc] - return newloc, ret - - -class LineStart(_PositionToken): - r"""Matches if current position is at the beginning of a line within - the parse string - - Example:: - - test = '''\ - AAA this line - AAA and this line - AAA but not this one - B AAA and definitely not this one - ''' - - for t in (LineStart() + 'AAA' + restOfLine).searchString(test): - print(t) - - prints:: - - ['AAA', ' this line'] - ['AAA', ' and this line'] - - """ - def __init__(self): - super(LineStart, self).__init__() - self.errmsg = "Expected start of line" - - def parseImpl(self, instring, loc, doActions=True): - if col(loc, instring) == 1: - return loc, [] - raise ParseException(instring, loc, self.errmsg, self) - -class LineEnd(_PositionToken): - """Matches if current position is at the end of a line within the - parse string - """ - def __init__(self): - super(LineEnd, self).__init__() - self.setWhitespaceChars(ParserElement.DEFAULT_WHITE_CHARS.replace("\n", "")) - self.errmsg = "Expected end of line" - - def parseImpl(self, instring, loc, doActions=True): - if loc < len(instring): - if instring[loc] == "\n": - return loc + 1, "\n" - else: - raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): - return loc + 1, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class StringStart(_PositionToken): - """Matches if current position is at the beginning of the parse - string - """ - def __init__(self): - super(StringStart, self).__init__() - self.errmsg = "Expected start of text" - - def parseImpl(self, instring, loc, doActions=True): - if loc != 0: - # see if entire string up to here is just whitespace and ignoreables - if loc != self.preParse(instring, 0): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class StringEnd(_PositionToken): - """Matches if current position is at the end of the parse string - """ - def __init__(self): - super(StringEnd, self).__init__() - self.errmsg = "Expected end of text" - - def parseImpl(self, instring, loc, doActions=True): - if loc < len(instring): - raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): - return loc + 1, [] - elif loc > len(instring): - return loc, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class WordStart(_PositionToken): - """Matches if the current position is at the beginning of a Word, - and is not preceded by any character in a given set of - ``wordChars`` (default= ``printables``). To emulate the - ``\b`` behavior of regular expressions, use - ``WordStart(alphanums)``. ``WordStart`` will also match at - the beginning of the string being parsed, or at the beginning of - a line. - """ - def __init__(self, wordChars=printables): - super(WordStart, self).__init__() - self.wordChars = set(wordChars) - self.errmsg = "Not at the start of a word" - - def parseImpl(self, instring, loc, doActions=True): - if loc != 0: - if (instring[loc - 1] in self.wordChars - or instring[loc] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class WordEnd(_PositionToken): - """Matches if the current position is at the end of a Word, and is - not followed by any character in a given set of ``wordChars`` - (default= ``printables``). To emulate the ``\b`` behavior of - regular expressions, use ``WordEnd(alphanums)``. ``WordEnd`` - will also match at the end of the string being parsed, or at the end - of a line. - """ - def __init__(self, wordChars=printables): - super(WordEnd, self).__init__() - self.wordChars = set(wordChars) - self.skipWhitespace = False - self.errmsg = "Not at the end of a word" - - def parseImpl(self, instring, loc, doActions=True): - instrlen = len(instring) - if instrlen > 0 and loc < instrlen: - if (instring[loc] in self.wordChars or - instring[loc - 1] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - -class ParseExpression(ParserElement): - """Abstract subclass of ParserElement, for combining and - post-processing parsed tokens. - """ - def __init__(self, exprs, savelist=False): - super(ParseExpression, self).__init__(savelist) - if isinstance(exprs, _generatorType): - exprs = list(exprs) - - if isinstance(exprs, basestring): - self.exprs = [self._literalStringClass(exprs)] - elif isinstance(exprs, ParserElement): - self.exprs = [exprs] - elif isinstance(exprs, Iterable): - exprs = list(exprs) - # if sequence of strings provided, wrap with Literal - if any(isinstance(expr, basestring) for expr in exprs): - exprs = (self._literalStringClass(e) if isinstance(e, basestring) else e for e in exprs) - self.exprs = list(exprs) - else: - try: - self.exprs = list(exprs) - except TypeError: - self.exprs = [exprs] - self.callPreparse = False - - def append(self, other): - self.exprs.append(other) - self.strRepr = None - return self - - def leaveWhitespace(self): - """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on - all contained expressions.""" - self.skipWhitespace = False - self.exprs = [e.copy() for e in self.exprs] - for e in self.exprs: - e.leaveWhitespace() - return self - - def ignore(self, other): - if isinstance(other, Suppress): - if other not in self.ignoreExprs: - super(ParseExpression, self).ignore(other) - for e in self.exprs: - e.ignore(self.ignoreExprs[-1]) - else: - super(ParseExpression, self).ignore(other) - for e in self.exprs: - e.ignore(self.ignoreExprs[-1]) - return self - - def __str__(self): - try: - return super(ParseExpression, self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.exprs)) - return self.strRepr - - def streamline(self): - super(ParseExpression, self).streamline() - - for e in self.exprs: - e.streamline() - - # collapse nested And's of the form And(And(And(a, b), c), d) to And(a, b, c, d) - # but only if there are no parse actions or resultsNames on the nested And's - # (likewise for Or's and MatchFirst's) - if len(self.exprs) == 2: - other = self.exprs[0] - if (isinstance(other, self.__class__) - and not other.parseAction - and other.resultsName is None - and not other.debug): - self.exprs = other.exprs[:] + [self.exprs[1]] - self.strRepr = None - self.mayReturnEmpty |= other.mayReturnEmpty - self.mayIndexError |= other.mayIndexError - - other = self.exprs[-1] - if (isinstance(other, self.__class__) - and not other.parseAction - and other.resultsName is None - and not other.debug): - self.exprs = self.exprs[:-1] + other.exprs[:] - self.strRepr = None - self.mayReturnEmpty |= other.mayReturnEmpty - self.mayIndexError |= other.mayIndexError - - self.errmsg = "Expected " + _ustr(self) - - return self - - def validate(self, validateTrace=None): - tmp = (validateTrace if validateTrace is not None else [])[:] + [self] - for e in self.exprs: - e.validate(tmp) - self.checkRecursion([]) - - def copy(self): - ret = super(ParseExpression, self).copy() - ret.exprs = [e.copy() for e in self.exprs] - return ret - - def _setResultsName(self, name, listAllMatches=False): - if __diag__.warn_ungrouped_named_tokens_in_collection: - for e in self.exprs: - if isinstance(e, ParserElement) and e.resultsName: - warnings.warn("{0}: setting results name {1!r} on {2} expression " - "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", - name, - type(self).__name__, - e.resultsName), - stacklevel=3) - - return super(ParseExpression, self)._setResultsName(name, listAllMatches) - - -class And(ParseExpression): - """ - Requires all given :class:`ParseExpression` s to be found in the given order. - Expressions may be separated by whitespace. - May be constructed using the ``'+'`` operator. - May also be constructed using the ``'-'`` operator, which will - suppress backtracking. - - Example:: - - integer = Word(nums) - name_expr = OneOrMore(Word(alphas)) - - expr = And([integer("id"), name_expr("name"), integer("age")]) - # more easily written as: - expr = integer("id") + name_expr("name") + integer("age") - """ - - class _ErrorStop(Empty): - def __init__(self, *args, **kwargs): - super(And._ErrorStop, self).__init__(*args, **kwargs) - self.name = '-' - self.leaveWhitespace() - - def __init__(self, exprs, savelist=True): - exprs = list(exprs) - if exprs and Ellipsis in exprs: - tmp = [] - for i, expr in enumerate(exprs): - if expr is Ellipsis: - if i < len(exprs) - 1: - skipto_arg = (Empty() + exprs[i + 1]).exprs[-1] - tmp.append(SkipTo(skipto_arg)("_skipped*")) - else: - raise Exception("cannot construct And with sequence ending in ...") - else: - tmp.append(expr) - exprs[:] = tmp - super(And, self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.setWhitespaceChars(self.exprs[0].whiteChars) - self.skipWhitespace = self.exprs[0].skipWhitespace - self.callPreparse = True - - def streamline(self): - # collapse any _PendingSkip's - if self.exprs: - if any(isinstance(e, ParseExpression) and e.exprs and isinstance(e.exprs[-1], _PendingSkip) - for e in self.exprs[:-1]): - for i, e in enumerate(self.exprs[:-1]): - if e is None: - continue - if (isinstance(e, ParseExpression) - and e.exprs and isinstance(e.exprs[-1], _PendingSkip)): - e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] - self.exprs[i + 1] = None - self.exprs = [e for e in self.exprs if e is not None] - - super(And, self).streamline() - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - return self - - def parseImpl(self, instring, loc, doActions=True): - # pass False as last arg to _parse for first element, since we already - # pre-parsed the string as part of our And pre-parsing - loc, resultlist = self.exprs[0]._parse(instring, loc, doActions, callPreParse=False) - errorStop = False - for e in self.exprs[1:]: - if isinstance(e, And._ErrorStop): - errorStop = True - continue - if errorStop: - try: - loc, exprtokens = e._parse(instring, loc, doActions) - except ParseSyntaxException: - raise - except ParseBaseException as pe: - pe.__traceback__ = None - raise ParseSyntaxException._from_exception(pe) - except IndexError: - raise ParseSyntaxException(instring, len(instring), self.errmsg, self) - else: - loc, exprtokens = e._parse(instring, loc, doActions) - if exprtokens or exprtokens.haskeys(): - resultlist += exprtokens - return loc, resultlist - - def __iadd__(self, other): - if isinstance(other, basestring): - other = self._literalStringClass(other) - return self.append(other) # And([self, other]) - - def checkRecursion(self, parseElementList): - subRecCheckList = parseElementList[:] + [self] - for e in self.exprs: - e.checkRecursion(subRecCheckList) - if not e.mayReturnEmpty: - break - - def __str__(self): - if hasattr(self, "name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - -class Or(ParseExpression): - """Requires that at least one :class:`ParseExpression` is found. If - two expressions match, the expression that matches the longest - string will be used. May be constructed using the ``'^'`` - operator. - - Example:: - - # construct Or using '^' operator - - number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) - - prints:: - - [['123'], ['3.1416'], ['789']] - """ - def __init__(self, exprs, savelist=False): - super(Or, self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def streamline(self): - super(Or, self).streamline() - if __compat__.collect_all_And_tokens: - self.saveAsList = any(e.saveAsList for e in self.exprs) - return self - - def parseImpl(self, instring, loc, doActions=True): - maxExcLoc = -1 - maxException = None - matches = [] - for e in self.exprs: - try: - loc2 = e.tryParse(instring, loc) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring, len(instring), e.errmsg, self) - maxExcLoc = len(instring) - else: - # save match among all matches, to retry longest to shortest - matches.append((loc2, e)) - - if matches: - # re-evaluate all matches in descending order of length of match, in case attached actions - # might change whether or how much they match of the input. - matches.sort(key=itemgetter(0), reverse=True) - - if not doActions: - # no further conditions or parse actions to change the selection of - # alternative, so the first match will be the best match - best_expr = matches[0][1] - return best_expr._parse(instring, loc, doActions) - - longest = -1, None - for loc1, expr1 in matches: - if loc1 <= longest[0]: - # already have a longer match than this one will deliver, we are done - return longest - - try: - loc2, toks = expr1._parse(instring, loc, doActions) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - else: - if loc2 >= loc1: - return loc2, toks - # didn't match as much as before - elif loc2 > longest[0]: - longest = loc2, toks - - if longest != (-1, None): - return longest - - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - - def __ixor__(self, other): - if isinstance(other, basestring): - other = self._literalStringClass(other) - return self.append(other) # Or([self, other]) - - def __str__(self): - if hasattr(self, "name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion(self, parseElementList): - subRecCheckList = parseElementList[:] + [self] - for e in self.exprs: - e.checkRecursion(subRecCheckList) - - def _setResultsName(self, name, listAllMatches=False): - if (not __compat__.collect_all_And_tokens - and __diag__.warn_multiple_tokens_in_named_alternation): - if any(isinstance(e, And) for e in self.exprs): - warnings.warn("{0}: setting results name {1!r} on {2} expression " - "may only return a single token for an And alternative, " - "in future will return the full list of tokens".format( - "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), - stacklevel=3) - - return super(Or, self)._setResultsName(name, listAllMatches) - - -class MatchFirst(ParseExpression): - """Requires that at least one :class:`ParseExpression` is found. If - two expressions match, the first one listed is the one that will - match. May be constructed using the ``'|'`` operator. - - Example:: - - # construct MatchFirst using '|' operator - - # watch the order of expressions to match - number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] - - # put more selective expression first - number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) - print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] - """ - def __init__(self, exprs, savelist=False): - super(MatchFirst, self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def streamline(self): - super(MatchFirst, self).streamline() - if __compat__.collect_all_And_tokens: - self.saveAsList = any(e.saveAsList for e in self.exprs) - return self - - def parseImpl(self, instring, loc, doActions=True): - maxExcLoc = -1 - maxException = None - for e in self.exprs: - try: - ret = e._parse(instring, loc, doActions) - return ret - except ParseException as err: - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring, len(instring), e.errmsg, self) - maxExcLoc = len(instring) - - # only got here if no expression matched, raise exception for match that made it the furthest - else: - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - def __ior__(self, other): - if isinstance(other, basestring): - other = self._literalStringClass(other) - return self.append(other) # MatchFirst([self, other]) - - def __str__(self): - if hasattr(self, "name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion(self, parseElementList): - subRecCheckList = parseElementList[:] + [self] - for e in self.exprs: - e.checkRecursion(subRecCheckList) - - def _setResultsName(self, name, listAllMatches=False): - if (not __compat__.collect_all_And_tokens - and __diag__.warn_multiple_tokens_in_named_alternation): - if any(isinstance(e, And) for e in self.exprs): - warnings.warn("{0}: setting results name {1!r} on {2} expression " - "may only return a single token for an And alternative, " - "in future will return the full list of tokens".format( - "warn_multiple_tokens_in_named_alternation", name, type(self).__name__), - stacklevel=3) - - return super(MatchFirst, self)._setResultsName(name, listAllMatches) - - -class Each(ParseExpression): - """Requires all given :class:`ParseExpression` s to be found, but in - any order. Expressions may be separated by whitespace. - - May be constructed using the ``'&'`` operator. - - Example:: - - color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") - shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") - integer = Word(nums) - shape_attr = "shape:" + shape_type("shape") - posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") - color_attr = "color:" + color("color") - size_attr = "size:" + integer("size") - - # use Each (using operator '&') to accept attributes in any order - # (shape and posn are required, color and size are optional) - shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) - - shape_spec.runTests(''' - shape: SQUARE color: BLACK posn: 100, 120 - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - color:GREEN size:20 shape:TRIANGLE posn:20,40 - ''' - ) - - prints:: - - shape: SQUARE color: BLACK posn: 100, 120 - ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - - color: BLACK - - posn: ['100', ',', '120'] - - x: 100 - - y: 120 - - shape: SQUARE - - - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - - color: BLUE - - posn: ['50', ',', '80'] - - x: 50 - - y: 80 - - shape: CIRCLE - - size: 50 - - - color: GREEN size: 20 shape: TRIANGLE posn: 20,40 - ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - - color: GREEN - - posn: ['20', ',', '40'] - - x: 20 - - y: 40 - - shape: TRIANGLE - - size: 20 - """ - def __init__(self, exprs, savelist=True): - super(Each, self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.skipWhitespace = True - self.initExprGroups = True - self.saveAsList = True - - def streamline(self): - super(Each, self).streamline() - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - return self - - def parseImpl(self, instring, loc, doActions=True): - if self.initExprGroups: - self.opt1map = dict((id(e.expr), e) for e in self.exprs if isinstance(e, Optional)) - opt1 = [e.expr for e in self.exprs if isinstance(e, Optional)] - opt2 = [e for e in self.exprs if e.mayReturnEmpty and not isinstance(e, (Optional, Regex))] - self.optionals = opt1 + opt2 - self.multioptionals = [e.expr for e in self.exprs if isinstance(e, ZeroOrMore)] - self.multirequired = [e.expr for e in self.exprs if isinstance(e, OneOrMore)] - self.required = [e for e in self.exprs if not isinstance(e, (Optional, ZeroOrMore, OneOrMore))] - self.required += self.multirequired - self.initExprGroups = False - tmpLoc = loc - tmpReqd = self.required[:] - tmpOpt = self.optionals[:] - matchOrder = [] - - keepMatching = True - while keepMatching: - tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired - failed = [] - for e in tmpExprs: - try: - tmpLoc = e.tryParse(instring, tmpLoc) - except ParseException: - failed.append(e) - else: - matchOrder.append(self.opt1map.get(id(e), e)) - if e in tmpReqd: - tmpReqd.remove(e) - elif e in tmpOpt: - tmpOpt.remove(e) - if len(failed) == len(tmpExprs): - keepMatching = False - - if tmpReqd: - missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring, loc, "Missing one or more required elements (%s)" % missing) - - # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e, Optional) and e.expr in tmpOpt] - - resultlist = [] - for e in matchOrder: - loc, results = e._parse(instring, loc, doActions) - resultlist.append(results) - - finalResults = sum(resultlist, ParseResults([])) - return loc, finalResults - - def __str__(self): - if hasattr(self, "name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion(self, parseElementList): - subRecCheckList = parseElementList[:] + [self] - for e in self.exprs: - e.checkRecursion(subRecCheckList) - - -class ParseElementEnhance(ParserElement): - """Abstract subclass of :class:`ParserElement`, for combining and - post-processing parsed tokens. - """ - def __init__(self, expr, savelist=False): - super(ParseElementEnhance, self).__init__(savelist) - if isinstance(expr, basestring): - if issubclass(self._literalStringClass, Token): - expr = self._literalStringClass(expr) - else: - expr = self._literalStringClass(Literal(expr)) - self.expr = expr - self.strRepr = None - if expr is not None: - self.mayIndexError = expr.mayIndexError - self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars(expr.whiteChars) - self.skipWhitespace = expr.skipWhitespace - self.saveAsList = expr.saveAsList - self.callPreparse = expr.callPreparse - self.ignoreExprs.extend(expr.ignoreExprs) - - def parseImpl(self, instring, loc, doActions=True): - if self.expr is not None: - return self.expr._parse(instring, loc, doActions, callPreParse=False) - else: - raise ParseException("", loc, self.errmsg, self) - - def leaveWhitespace(self): - self.skipWhitespace = False - self.expr = self.expr.copy() - if self.expr is not None: - self.expr.leaveWhitespace() - return self - - def ignore(self, other): - if isinstance(other, Suppress): - if other not in self.ignoreExprs: - super(ParseElementEnhance, self).ignore(other) - if self.expr is not None: - self.expr.ignore(self.ignoreExprs[-1]) - else: - super(ParseElementEnhance, self).ignore(other) - if self.expr is not None: - self.expr.ignore(self.ignoreExprs[-1]) - return self - - def streamline(self): - super(ParseElementEnhance, self).streamline() - if self.expr is not None: - self.expr.streamline() - return self - - def checkRecursion(self, parseElementList): - if self in parseElementList: - raise RecursiveGrammarException(parseElementList + [self]) - subRecCheckList = parseElementList[:] + [self] - if self.expr is not None: - self.expr.checkRecursion(subRecCheckList) - - def validate(self, validateTrace=None): - if validateTrace is None: - validateTrace = [] - tmp = validateTrace[:] + [self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion([]) - - def __str__(self): - try: - return super(ParseElementEnhance, self).__str__() - except Exception: - pass - - if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.expr)) - return self.strRepr - - -class FollowedBy(ParseElementEnhance): - """Lookahead matching of the given parse expression. - ``FollowedBy`` does *not* advance the parsing position within - the input string, it only verifies that the specified parse - expression matches at the current position. ``FollowedBy`` - always returns a null token list. If any results names are defined - in the lookahead expression, those *will* be returned for access by - name. - - Example:: - - # use FollowedBy to match a label only if it is followed by a ':' - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() - - prints:: - - [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] - """ - def __init__(self, expr): - super(FollowedBy, self).__init__(expr) - self.mayReturnEmpty = True - - def parseImpl(self, instring, loc, doActions=True): - # by using self._expr.parse and deleting the contents of the returned ParseResults list - # we keep any named results that were defined in the FollowedBy expression - _, ret = self.expr._parse(instring, loc, doActions=doActions) - del ret[:] - - return loc, ret - - -class PrecededBy(ParseElementEnhance): - """Lookbehind matching of the given parse expression. - ``PrecededBy`` does not advance the parsing position within the - input string, it only verifies that the specified parse expression - matches prior to the current position. ``PrecededBy`` always - returns a null token list, but if a results name is defined on the - given expression, it is returned. - - Parameters: - - - expr - expression that must match prior to the current parse - location - - retreat - (default= ``None``) - (int) maximum number of characters - to lookbehind prior to the current parse location - - If the lookbehind expression is a string, Literal, Keyword, or - a Word or CharsNotIn with a specified exact or maximum length, then - the retreat parameter is not required. Otherwise, retreat must be - specified to give a maximum number of characters to look back from - the current parse position for a lookbehind match. - - Example:: - - # VB-style variable names with type prefixes - int_var = PrecededBy("#") + pyparsing_common.identifier - str_var = PrecededBy("$") + pyparsing_common.identifier - - """ - def __init__(self, expr, retreat=None): - super(PrecededBy, self).__init__(expr) - self.expr = self.expr().leaveWhitespace() - self.mayReturnEmpty = True - self.mayIndexError = False - self.exact = False - if isinstance(expr, str): - retreat = len(expr) - self.exact = True - elif isinstance(expr, (Literal, Keyword)): - retreat = expr.matchLen - self.exact = True - elif isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT: - retreat = expr.maxLen - self.exact = True - elif isinstance(expr, _PositionToken): - retreat = 0 - self.exact = True - self.retreat = retreat - self.errmsg = "not preceded by " + str(expr) - self.skipWhitespace = False - self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) - - def parseImpl(self, instring, loc=0, doActions=True): - if self.exact: - if loc < self.retreat: - raise ParseException(instring, loc, self.errmsg) - start = loc - self.retreat - _, ret = self.expr._parse(instring, start) - else: - # retreat specified a maximum lookbehind window, iterate - test_expr = self.expr + StringEnd() - instring_slice = instring[max(0, loc - self.retreat):loc] - last_expr = ParseException(instring, loc, self.errmsg) - for offset in range(1, min(loc, self.retreat + 1)+1): - try: - # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) - _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset) - except ParseBaseException as pbe: - last_expr = pbe - else: - break - else: - raise last_expr - return loc, ret - - -class NotAny(ParseElementEnhance): - """Lookahead to disallow matching with the given parse expression. - ``NotAny`` does *not* advance the parsing position within the - input string, it only verifies that the specified parse expression - does *not* match at the current position. Also, ``NotAny`` does - *not* skip over leading whitespace. ``NotAny`` always returns - a null token list. May be constructed using the '~' operator. - - Example:: - - AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split()) - - # take care not to mistake keywords for identifiers - ident = ~(AND | OR | NOT) + Word(alphas) - boolean_term = Optional(NOT) + ident - - # very crude boolean expression - to support parenthesis groups and - # operation hierarchy, use infixNotation - boolean_expr = boolean_term + ZeroOrMore((AND | OR) + boolean_term) - - # integers that are followed by "." are actually floats - integer = Word(nums) + ~Char(".") - """ - def __init__(self, expr): - super(NotAny, self).__init__(expr) - # ~ self.leaveWhitespace() - self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs - self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, " + _ustr(self.expr) - - def parseImpl(self, instring, loc, doActions=True): - if self.expr.canParseNext(instring, loc): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - def __str__(self): - if hasattr(self, "name"): - return self.name - - if self.strRepr is None: - self.strRepr = "~{" + _ustr(self.expr) + "}" - - return self.strRepr - -class _MultipleMatch(ParseElementEnhance): - def __init__(self, expr, stopOn=None): - super(_MultipleMatch, self).__init__(expr) - self.saveAsList = True - ender = stopOn - if isinstance(ender, basestring): - ender = self._literalStringClass(ender) - self.stopOn(ender) - - def stopOn(self, ender): - if isinstance(ender, basestring): - ender = self._literalStringClass(ender) - self.not_ender = ~ender if ender is not None else None - return self - - def parseImpl(self, instring, loc, doActions=True): - self_expr_parse = self.expr._parse - self_skip_ignorables = self._skipIgnorables - check_ender = self.not_ender is not None - if check_ender: - try_not_ender = self.not_ender.tryParse - - # must be at least one (but first see if we are the stopOn sentinel; - # if so, fail) - if check_ender: - try_not_ender(instring, loc) - loc, tokens = self_expr_parse(instring, loc, doActions, callPreParse=False) - try: - hasIgnoreExprs = (not not self.ignoreExprs) - while 1: - if check_ender: - try_not_ender(instring, loc) - if hasIgnoreExprs: - preloc = self_skip_ignorables(instring, loc) - else: - preloc = loc - loc, tmptokens = self_expr_parse(instring, preloc, doActions) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens - except (ParseException, IndexError): - pass - - return loc, tokens - - def _setResultsName(self, name, listAllMatches=False): - if __diag__.warn_ungrouped_named_tokens_in_collection: - for e in [self.expr] + getattr(self.expr, 'exprs', []): - if isinstance(e, ParserElement) and e.resultsName: - warnings.warn("{0}: setting results name {1!r} on {2} expression " - "collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", - name, - type(self).__name__, - e.resultsName), - stacklevel=3) - - return super(_MultipleMatch, self)._setResultsName(name, listAllMatches) - - -class OneOrMore(_MultipleMatch): - """Repetition of one or more of the given expression. - - Parameters: - - expr - expression that must match one or more times - - stopOn - (default= ``None``) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example:: - - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: BLACK" - OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] - - # use stopOn attribute for OneOrMore to avoid reading label string as part of the data - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] - - # could also be written as - (attr_expr * (1,)).parseString(text).pprint() - """ - - def __str__(self): - if hasattr(self, "name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + _ustr(self.expr) + "}..." - - return self.strRepr - -class ZeroOrMore(_MultipleMatch): - """Optional repetition of zero or more of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - stopOn - (default= ``None``) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example: similar to :class:`OneOrMore` - """ - def __init__(self, expr, stopOn=None): - super(ZeroOrMore, self).__init__(expr, stopOn=stopOn) - self.mayReturnEmpty = True - - def parseImpl(self, instring, loc, doActions=True): - try: - return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException, IndexError): - return loc, [] - - def __str__(self): - if hasattr(self, "name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]..." - - return self.strRepr - - -class _NullToken(object): - def __bool__(self): - return False - __nonzero__ = __bool__ - def __str__(self): - return "" - -class Optional(ParseElementEnhance): - """Optional matching of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - default (optional) - value to be returned if the optional expression is not found. - - Example:: - - # US postal code can be a 5-digit zip, plus optional 4-digit qualifier - zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) - zip.runTests(''' - # traditional ZIP code - 12345 - - # ZIP+4 form - 12101-0001 - - # invalid ZIP - 98765- - ''') - - prints:: - - # traditional ZIP code - 12345 - ['12345'] - - # ZIP+4 form - 12101-0001 - ['12101-0001'] - - # invalid ZIP - 98765- - ^ - FAIL: Expected end of text (at char 5), (line:1, col:6) - """ - __optionalNotMatched = _NullToken() - - def __init__(self, expr, default=__optionalNotMatched): - super(Optional, self).__init__(expr, savelist=False) - self.saveAsList = self.expr.saveAsList - self.defaultValue = default - self.mayReturnEmpty = True - - def parseImpl(self, instring, loc, doActions=True): - try: - loc, tokens = self.expr._parse(instring, loc, doActions, callPreParse=False) - except (ParseException, IndexError): - if self.defaultValue is not self.__optionalNotMatched: - if self.expr.resultsName: - tokens = ParseResults([self.defaultValue]) - tokens[self.expr.resultsName] = self.defaultValue - else: - tokens = [self.defaultValue] - else: - tokens = [] - return loc, tokens - - def __str__(self): - if hasattr(self, "name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]" - - return self.strRepr - -class SkipTo(ParseElementEnhance): - """Token for skipping over all undefined text until the matched - expression is found. - - Parameters: - - expr - target expression marking the end of the data to be skipped - - include - (default= ``False``) if True, the target expression is also parsed - (the skipped text and target expression are returned as a 2-element list). - - ignore - (default= ``None``) used to define grammars (typically quoted strings and - comments) that might contain false matches to the target expression - - failOn - (default= ``None``) define expressions that are not allowed to be - included in the skipped test; if found before the target expression is found, - the SkipTo is not a match - - Example:: - - report = ''' - Outstanding Issues Report - 1 Jan 2000 - - # | Severity | Description | Days Open - -----+----------+-------------------------------------------+----------- - 101 | Critical | Intermittent system crash | 6 - 94 | Cosmetic | Spelling error on Login ('log|n') | 14 - 79 | Minor | System slow when running too many reports | 47 - ''' - integer = Word(nums) - SEP = Suppress('|') - # use SkipTo to simply match everything up until the next SEP - # - ignore quoted strings, so that a '|' character inside a quoted string does not match - # - parse action will call token.strip() for each matched token, i.e., the description body - string_data = SkipTo(SEP, ignore=quotedString) - string_data.setParseAction(tokenMap(str.strip)) - ticket_expr = (integer("issue_num") + SEP - + string_data("sev") + SEP - + string_data("desc") + SEP - + integer("days_open")) - - for tkt in ticket_expr.searchString(report): - print tkt.dump() - - prints:: - - ['101', 'Critical', 'Intermittent system crash', '6'] - - days_open: 6 - - desc: Intermittent system crash - - issue_num: 101 - - sev: Critical - ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - - days_open: 14 - - desc: Spelling error on Login ('log|n') - - issue_num: 94 - - sev: Cosmetic - ['79', 'Minor', 'System slow when running too many reports', '47'] - - days_open: 47 - - desc: System slow when running too many reports - - issue_num: 79 - - sev: Minor - """ - def __init__(self, other, include=False, ignore=None, failOn=None): - super(SkipTo, self).__init__(other) - self.ignoreExpr = ignore - self.mayReturnEmpty = True - self.mayIndexError = False - self.includeMatch = include - self.saveAsList = False - if isinstance(failOn, basestring): - self.failOn = self._literalStringClass(failOn) - else: - self.failOn = failOn - self.errmsg = "No match found for " + _ustr(self.expr) - - def parseImpl(self, instring, loc, doActions=True): - startloc = loc - instrlen = len(instring) - expr = self.expr - expr_parse = self.expr._parse - self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None - self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - - tmploc = loc - while tmploc <= instrlen: - if self_failOn_canParseNext is not None: - # break if failOn expression matches - if self_failOn_canParseNext(instring, tmploc): - break - - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break - - try: - expr_parse(instring, tmploc, doActions=False, callPreParse=False) - except (ParseException, IndexError): - # no match, advance loc in string - tmploc += 1 - else: - # matched skipto expr, done - break - - else: - # ran off the end of the input string without matching skipto expr, fail - raise ParseException(instring, loc, self.errmsg, self) - - # build up return values - loc = tmploc - skiptext = instring[startloc:loc] - skipresult = ParseResults(skiptext) - - if self.includeMatch: - loc, mat = expr_parse(instring, loc, doActions, callPreParse=False) - skipresult += mat - - return loc, skipresult - -class Forward(ParseElementEnhance): - """Forward declaration of an expression to be defined later - - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the ``Forward`` - variable using the '<<' operator. - - Note: take care when assigning to ``Forward`` not to overlook - precedence of operators. - - Specifically, '|' has a lower precedence than '<<', so that:: - - fwdExpr << a | b | c - - will actually be evaluated as:: - - (fwdExpr << a) | b | c - - thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the ``Forward``:: - - fwdExpr << (a | b | c) - - Converting to use the '<<=' operator instead will avoid this problem. - - See :class:`ParseResults.pprint` for an example of a recursive - parser created using ``Forward``. - """ - def __init__(self, other=None): - super(Forward, self).__init__(other, savelist=False) - - def __lshift__(self, other): - if isinstance(other, basestring): - other = self._literalStringClass(other) - self.expr = other - self.strRepr = None - self.mayIndexError = self.expr.mayIndexError - self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars(self.expr.whiteChars) - self.skipWhitespace = self.expr.skipWhitespace - self.saveAsList = self.expr.saveAsList - self.ignoreExprs.extend(self.expr.ignoreExprs) - return self - - def __ilshift__(self, other): - return self << other - - def leaveWhitespace(self): - self.skipWhitespace = False - return self - - def streamline(self): - if not self.streamlined: - self.streamlined = True - if self.expr is not None: - self.expr.streamline() - return self - - def validate(self, validateTrace=None): - if validateTrace is None: - validateTrace = [] - - if self not in validateTrace: - tmp = validateTrace[:] + [self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion([]) - - def __str__(self): - if hasattr(self, "name"): - return self.name - if self.strRepr is not None: - return self.strRepr - - # Avoid infinite recursion by setting a temporary strRepr - self.strRepr = ": ..." - - # Use the string representation of main expression. - retString = '...' - try: - if self.expr is not None: - retString = _ustr(self.expr)[:1000] - else: - retString = "None" - finally: - self.strRepr = self.__class__.__name__ + ": " + retString - return self.strRepr - - def copy(self): - if self.expr is not None: - return super(Forward, self).copy() - else: - ret = Forward() - ret <<= self - return ret - - def _setResultsName(self, name, listAllMatches=False): - if __diag__.warn_name_set_on_empty_Forward: - if self.expr is None: - warnings.warn("{0}: setting results name {0!r} on {1} expression " - "that has no contained expression".format("warn_name_set_on_empty_Forward", - name, - type(self).__name__), - stacklevel=3) - - return super(Forward, self)._setResultsName(name, listAllMatches) - -class TokenConverter(ParseElementEnhance): - """ - Abstract subclass of :class:`ParseExpression`, for converting parsed results. - """ - def __init__(self, expr, savelist=False): - super(TokenConverter, self).__init__(expr) # , savelist) - self.saveAsList = False - -class Combine(TokenConverter): - """Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the - input string; this can be disabled by specifying - ``'adjacent=False'`` in the constructor. - - Example:: - - real = Word(nums) + '.' + Word(nums) - print(real.parseString('3.1416')) # -> ['3', '.', '1416'] - # will also erroneously match the following - print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] - - real = Combine(Word(nums) + '.' + Word(nums)) - print(real.parseString('3.1416')) # -> ['3.1416'] - # no match when there are internal spaces - print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) - """ - def __init__(self, expr, joinString="", adjacent=True): - super(Combine, self).__init__(expr) - # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself - if adjacent: - self.leaveWhitespace() - self.adjacent = adjacent - self.skipWhitespace = True - self.joinString = joinString - self.callPreparse = True - - def ignore(self, other): - if self.adjacent: - ParserElement.ignore(self, other) - else: - super(Combine, self).ignore(other) - return self - - def postParse(self, instring, loc, tokenlist): - retToks = tokenlist.copy() - del retToks[:] - retToks += ParseResults(["".join(tokenlist._asStringList(self.joinString))], modal=self.modalResults) - - if self.resultsName and retToks.haskeys(): - return [retToks] - else: - return retToks - -class Group(TokenConverter): - """Converter to return the matched tokens as a list - useful for - returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions. - - Example:: - - ident = Word(alphas) - num = Word(nums) - term = ident | num - func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a, b, 100")) # -> ['fn', 'a', 'b', '100'] - - func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a, b, 100")) # -> ['fn', ['a', 'b', '100']] - """ - def __init__(self, expr): - super(Group, self).__init__(expr) - self.saveAsList = True - - def postParse(self, instring, loc, tokenlist): - return [tokenlist] - -class Dict(TokenConverter): - """Converter to return a repetitive expression as a list, but also - as a dictionary. Each element can also be referenced using the first - token in the expression as its key. Useful for tabular report - scraping when the first column can be used as a item key. - - Example:: - - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - # print attributes as plain groups - print(OneOrMore(attr_expr).parseString(text).dump()) - - # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names - result = Dict(OneOrMore(Group(attr_expr))).parseString(text) - print(result.dump()) - - # access named fields as dict entries, or output as dict - print(result['shape']) - print(result.asDict()) - - prints:: - - ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} - - See more examples at :class:`ParseResults` of accessing fields by results name. - """ - def __init__(self, expr): - super(Dict, self).__init__(expr) - self.saveAsList = True - - def postParse(self, instring, loc, tokenlist): - for i, tok in enumerate(tokenlist): - if len(tok) == 0: - continue - ikey = tok[0] - if isinstance(ikey, int): - ikey = _ustr(tok[0]).strip() - if len(tok) == 1: - tokenlist[ikey] = _ParseResultsWithOffset("", i) - elif len(tok) == 2 and not isinstance(tok[1], ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1], i) - else: - dictvalue = tok.copy() # ParseResults(i) - del dictvalue[0] - if len(dictvalue) != 1 or (isinstance(dictvalue, ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue, i) - else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0], i) - - if self.resultsName: - return [tokenlist] - else: - return tokenlist - - -class Suppress(TokenConverter): - """Converter for ignoring the results of a parsed expression. - - Example:: - - source = "a, b, c,d" - wd = Word(alphas) - wd_list1 = wd + ZeroOrMore(',' + wd) - print(wd_list1.parseString(source)) - - # often, delimiters that are useful during parsing are just in the - # way afterward - use Suppress to keep them out of the parsed output - wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) - print(wd_list2.parseString(source)) - - prints:: - - ['a', ',', 'b', ',', 'c', ',', 'd'] - ['a', 'b', 'c', 'd'] - - (See also :class:`delimitedList`.) - """ - def postParse(self, instring, loc, tokenlist): - return [] - - def suppress(self): - return self - - -class OnlyOnce(object): - """Wrapper for parse actions, to ensure they are only called once. - """ - def __init__(self, methodCall): - self.callable = _trim_arity(methodCall) - self.called = False - def __call__(self, s, l, t): - if not self.called: - results = self.callable(s, l, t) - self.called = True - return results - raise ParseException(s, l, "") - def reset(self): - self.called = False - -def traceParseAction(f): - """Decorator for debugging parse actions. - - When the parse action is called, this decorator will print - ``">> entering method-name(line:, , )"``. - When the parse action completes, the decorator will print - ``"<<"`` followed by the returned value, or any exception that the parse action raised. - - Example:: - - wd = Word(alphas) - - @traceParseAction - def remove_duplicate_chars(tokens): - return ''.join(sorted(set(''.join(tokens)))) - - wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) - print(wds.parseString("slkdjs sld sldd sdlf sdljf")) - - prints:: - - >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) - < 3: - thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write(">>entering %s(line: '%s', %d, %r)\n" % (thisFunc, line(l, s), l, t)) - try: - ret = f(*paArgs) - except Exception as exc: - sys.stderr.write("< ['aa', 'bb', 'cc'] - delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] - """ - dlName = _ustr(expr) + " [" + _ustr(delim) + " " + _ustr(expr) + "]..." - if combine: - return Combine(expr + ZeroOrMore(delim + expr)).setName(dlName) - else: - return (expr + ZeroOrMore(Suppress(delim) + expr)).setName(dlName) - -def countedArray(expr, intExpr=None): - """Helper to define a counted list of expressions. - - This helper defines a pattern of the form:: - - integer expr expr expr... - - where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the - leading count token is suppressed. - - If ``intExpr`` is specified, it should be a pyparsing expression - that produces an integer value. - - Example:: - - countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] - - # in this parser, the leading integer value is given in binary, - # '10' indicating that 2 values are in the array - binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) - countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] - """ - arrayExpr = Forward() - def countFieldParseAction(s, l, t): - n = t[0] - arrayExpr << (n and Group(And([expr] * n)) or Group(empty)) - return [] - if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t: int(t[0])) - else: - intExpr = intExpr.copy() - intExpr.setName("arrayLen") - intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return (intExpr + arrayExpr).setName('(len) ' + _ustr(expr) + '...') - -def _flatten(L): - ret = [] - for i in L: - if isinstance(i, list): - ret.extend(_flatten(i)) - else: - ret.append(i) - return ret - -def matchPreviousLiteral(expr): - """Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks for - a 'repeat' of a previous expression. For example:: - - first = Word(nums) - second = matchPreviousLiteral(first) - matchExpr = first + ":" + second - - will match ``"1:1"``, but not ``"1:2"``. Because this - matches a previous literal, will also match the leading - ``"1:1"`` in ``"1:10"``. If this is not desired, use - :class:`matchPreviousExpr`. Do *not* use with packrat parsing - enabled. - """ - rep = Forward() - def copyTokenToRepeater(s, l, t): - if t: - if len(t) == 1: - rep << t[0] - else: - # flatten t tokens - tflat = _flatten(t.asList()) - rep << And(Literal(tt) for tt in tflat) - else: - rep << Empty() - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def matchPreviousExpr(expr): - """Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks for - a 'repeat' of a previous expression. For example:: - - first = Word(nums) - second = matchPreviousExpr(first) - matchExpr = first + ":" + second - - will match ``"1:1"``, but not ``"1:2"``. Because this - matches by expressions, will *not* match the leading ``"1:1"`` - in ``"1:10"``; the expressions are evaluated first, and then - compared, so ``"1"`` is compared with ``"10"``. Do *not* use - with packrat parsing enabled. - """ - rep = Forward() - e2 = expr.copy() - rep <<= e2 - def copyTokenToRepeater(s, l, t): - matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s, l, t): - theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException('', 0, '') - rep.setParseAction(mustMatchTheseTokens, callDuringTry=True) - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def _escapeRegexRangeChars(s): - # ~ escape these chars: ^-[] - for c in r"\^-[]": - s = s.replace(c, _bslash + c) - s = s.replace("\n", r"\n") - s = s.replace("\t", r"\t") - return _ustr(s) - -def oneOf(strs, caseless=False, useRegex=True, asKeyword=False): - """Helper to quickly define a set of alternative Literals, and makes - sure to do longest-first testing when there is a conflict, - regardless of the input order, but returns - a :class:`MatchFirst` for best performance. - - Parameters: - - - strs - a string of space-delimited literals, or a collection of - string literals - - caseless - (default= ``False``) - treat all literals as - caseless - - useRegex - (default= ``True``) - as an optimization, will - generate a Regex object; otherwise, will generate - a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if - creating a :class:`Regex` raises an exception) - - asKeyword - (default=``False``) - enforce Keyword-style matching on the - generated expressions - - Example:: - - comp_oper = oneOf("< = > <= >= !=") - var = Word(alphas) - number = Word(nums) - term = var | number - comparison_expr = term + comp_oper + term - print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) - - prints:: - - [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] - """ - if isinstance(caseless, basestring): - warnings.warn("More than one string argument passed to oneOf, pass " - "choices as a list or space-delimited string", stacklevel=2) - - if caseless: - isequal = (lambda a, b: a.upper() == b.upper()) - masks = (lambda a, b: b.upper().startswith(a.upper())) - parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral - else: - isequal = (lambda a, b: a == b) - masks = (lambda a, b: b.startswith(a)) - parseElementClass = Keyword if asKeyword else Literal - - symbols = [] - if isinstance(strs, basestring): - symbols = strs.split() - elif isinstance(strs, Iterable): - symbols = list(strs) - else: - warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) - if not symbols: - return NoMatch() - - if not asKeyword: - # if not producing keywords, need to reorder to take care to avoid masking - # longer choices with shorter ones - i = 0 - while i < len(symbols) - 1: - cur = symbols[i] - for j, other in enumerate(symbols[i + 1:]): - if isequal(other, cur): - del symbols[i + j + 1] - break - elif masks(cur, other): - del symbols[i + j + 1] - symbols.insert(i, other) - break - else: - i += 1 - - if not (caseless or asKeyword) and useRegex: - # ~ print (strs, "->", "|".join([_escapeRegexChars(sym) for sym in symbols])) - try: - if len(symbols) == len("".join(symbols)): - return Regex("[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols)).setName(' | '.join(symbols)) - else: - return Regex("|".join(re.escape(sym) for sym in symbols)).setName(' | '.join(symbols)) - except Exception: - warnings.warn("Exception creating Regex for oneOf, building MatchFirst", - SyntaxWarning, stacklevel=2) - - # last resort, just use MatchFirst - return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) - -def dictOf(key, value): - """Helper to easily and clearly define a dictionary by specifying - the respective patterns for the key and value. Takes care of - defining the :class:`Dict`, :class:`ZeroOrMore`, and - :class:`Group` tokens in the proper order. The key pattern - can include delimiting markers or punctuation, as long as they are - suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the :class:`Dict` results - can include named token fields. - - Example:: - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - print(OneOrMore(attr_expr).parseString(text).dump()) - - attr_label = label - attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) - - # similar to Dict, but simpler call format - result = dictOf(attr_label, attr_value).parseString(text) - print(result.dump()) - print(result['shape']) - print(result.shape) # object attribute access works too - print(result.asDict()) - - prints:: - - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - SQUARE - {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} - """ - return Dict(OneOrMore(Group(key + value))) - -def originalTextFor(expr, asString=True): - """Helper to return the original, untokenized text for a given - expression. Useful to restore the parsed fields of an HTML start - tag into the raw tag text itself, or to revert separate tokens with - intervening whitespace back to the original matching input text. By - default, returns astring containing the original parsed text. - - If the optional ``asString`` argument is passed as - ``False``, then the return value is - a :class:`ParseResults` containing any results names that - were originally matched, and a single token containing the original - matched text from the input string. So if the expression passed to - :class:`originalTextFor` contains expressions with defined - results names, you must set ``asString`` to ``False`` if you - want to preserve those results name values. - - Example:: - - src = "this is test bold text normal text " - for tag in ("b", "i"): - opener, closer = makeHTMLTags(tag) - patt = originalTextFor(opener + SkipTo(closer) + closer) - print(patt.searchString(src)[0]) - - prints:: - - [' bold text '] - ['text'] - """ - locMarker = Empty().setParseAction(lambda s, loc, t: loc) - endlocMarker = locMarker.copy() - endlocMarker.callPreparse = False - matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") - if asString: - extractText = lambda s, l, t: s[t._original_start: t._original_end] - else: - def extractText(s, l, t): - t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] - matchExpr.setParseAction(extractText) - matchExpr.ignoreExprs = expr.ignoreExprs - return matchExpr - -def ungroup(expr): - """Helper to undo pyparsing's default grouping of And expressions, - even if all but one are non-empty. - """ - return TokenConverter(expr).addParseAction(lambda t: t[0]) - -def locatedExpr(expr): - """Helper to decorate a returned token with its starting and ending - locations in the input string. - - This helper adds the following results names: - - - locn_start = location where matched expression begins - - locn_end = location where matched expression ends - - value = the actual parsed results - - Be careful if the input text contains ```` characters, you - may want to call :class:`ParserElement.parseWithTabs` - - Example:: - - wd = Word(alphas) - for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): - print(match) - - prints:: - - [[0, 'ljsdf', 5]] - [[8, 'lksdjjf', 15]] - [[18, 'lkkjj', 23]] - """ - locator = Empty().setParseAction(lambda s, l, t: l) - return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) - - -# convenience constants for positional expressions -empty = Empty().setName("empty") -lineStart = LineStart().setName("lineStart") -lineEnd = LineEnd().setName("lineEnd") -stringStart = StringStart().setName("stringStart") -stringEnd = StringEnd().setName("stringEnd") - -_escapedPunc = Word(_bslash, r"\[]-*.$+^?()~ ", exact=2).setParseAction(lambda s, l, t: t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s, l, t: unichr(int(t[0].lstrip(r'\0x'), 16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s, l, t: unichr(int(t[0][1:], 8))) -_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) -_charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group(OneOrMore(_charRange | _singleChar)).setResultsName("body") + "]" - -def srange(s): - r"""Helper to easily define string ranges for use in Word - construction. Borrows syntax from regexp '[]' string range - definitions:: - - srange("[0-9]") -> "0123456789" - srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" - srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - - The input string must be enclosed in []'s, and the returned string - is the expanded character set joined into a single string. The - values enclosed in the []'s may be: - - - a single character - - an escaped character with a leading backslash (such as ``\-`` - or ``\]``) - - an escaped hex character with a leading ``'\x'`` - (``\x21``, which is a ``'!'`` character) (``\0x##`` - is also supported for backwards compatibility) - - an escaped octal character with a leading ``'\0'`` - (``\041``, which is a ``'!'`` character) - - a range of any of the above, separated by a dash (``'a-z'``, - etc.) - - any combination of the above (``'aeiouy'``, - ``'a-zA-Z0-9_$'``, etc.) - """ - _expanded = lambda p: p if not isinstance(p, ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]), ord(p[1]) + 1)) - try: - return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) - except Exception: - return "" - -def matchOnlyAtCol(n): - """Helper method for defining parse actions that require matching at - a specific column in the input text. - """ - def verifyCol(strg, locn, toks): - if col(locn, strg) != n: - raise ParseException(strg, locn, "matched token not at column %d" % n) - return verifyCol - -def replaceWith(replStr): - """Helper method for common parse actions that simply return - a literal value. Especially useful when used with - :class:`transformString` (). - - Example:: - - num = Word(nums).setParseAction(lambda toks: int(toks[0])) - na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) - term = na | num - - OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] - """ - return lambda s, l, t: [replStr] - -def removeQuotes(s, l, t): - """Helper parse action for removing quotation marks from parsed - quoted strings. - - Example:: - - # by default, quotation marks are included in parsed results - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] - - # use removeQuotes to strip quotation marks from parsed results - quotedString.setParseAction(removeQuotes) - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] - """ - return t[0][1:-1] - -def tokenMap(func, *args): - """Helper to define a parse action by mapping a function to all - elements of a ParseResults list. If any additional args are passed, - they are forwarded to the given function as additional arguments - after the token, as in - ``hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))``, - which will convert the parsed data to an integer using base 16. - - Example (compare the last to example in :class:`ParserElement.transformString`:: - - hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) - hex_ints.runTests(''' - 00 11 22 aa FF 0a 0d 1a - ''') - - upperword = Word(alphas).setParseAction(tokenMap(str.upper)) - OneOrMore(upperword).runTests(''' - my kingdom for a horse - ''') - - wd = Word(alphas).setParseAction(tokenMap(str.title)) - OneOrMore(wd).setParseAction(' '.join).runTests(''' - now is the winter of our discontent made glorious summer by this sun of york - ''') - - prints:: - - 00 11 22 aa FF 0a 0d 1a - [0, 17, 34, 170, 255, 10, 13, 26] - - my kingdom for a horse - ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] - - now is the winter of our discontent made glorious summer by this sun of york - ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] - """ - def pa(s, l, t): - return [func(tokn, *args) for tokn in t] - - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - pa.__name__ = func_name - - return pa - -upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) -"""(Deprecated) Helper parse action to convert tokens to upper case. -Deprecated in favor of :class:`pyparsing_common.upcaseTokens`""" - -downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) -"""(Deprecated) Helper parse action to convert tokens to lower case. -Deprecated in favor of :class:`pyparsing_common.downcaseTokens`""" - -def _makeTags(tagStr, xml, - suppress_LT=Suppress("<"), - suppress_GT=Suppress(">")): - """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr, basestring): - resname = tagStr - tagStr = Keyword(tagStr, caseless=not xml) - else: - resname = tagStr.name - - tagAttrName = Word(alphas, alphanums + "_-:") - if xml: - tagAttrValue = dblQuotedString.copy().setParseAction(removeQuotes) - openTag = (suppress_LT - + tagStr("tag") - + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) - + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') - + suppress_GT) - else: - tagAttrValue = quotedString.copy().setParseAction(removeQuotes) | Word(printables, excludeChars=">") - openTag = (suppress_LT - + tagStr("tag") - + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens) - + Optional(Suppress("=") + tagAttrValue)))) - + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == '/') - + suppress_GT) - closeTag = Combine(_L("", adjacent=False) - - openTag.setName("<%s>" % resname) - # add start results name in parse action now that ungrouped names are not reported at two levels - openTag.addParseAction(lambda t: t.__setitem__("start" + "".join(resname.replace(":", " ").title().split()), t.copy())) - closeTag = closeTag("end" + "".join(resname.replace(":", " ").title().split())).setName("" % resname) - openTag.tag = resname - closeTag.tag = resname - openTag.tag_body = SkipTo(closeTag()) - return openTag, closeTag - -def makeHTMLTags(tagStr): - """Helper to construct opening and closing tag expressions for HTML, - given a tag name. Matches tags in either upper or lower case, - attributes with namespaces and with quoted or unquoted values. - - Example:: - - text = 'More info at the
pyparsing wiki page' - # makeHTMLTags returns pyparsing expressions for the opening and - # closing tags as a 2-tuple - a, a_end = makeHTMLTags("A") - link_expr = a + SkipTo(a_end)("link_text") + a_end - - for link in link_expr.searchString(text): - # attributes in the tag (like "href" shown here) are - # also accessible as named results - print(link.link_text, '->', link.href) - - prints:: - - pyparsing -> https://github.com/pyparsing/pyparsing/wiki - """ - return _makeTags(tagStr, False) - -def makeXMLTags(tagStr): - """Helper to construct opening and closing tag expressions for XML, - given a tag name. Matches tags only in the given upper/lower case. - - Example: similar to :class:`makeHTMLTags` - """ - return _makeTags(tagStr, True) - -def withAttribute(*args, **attrDict): - """Helper to create a validating parse action to be used with start - tags created with :class:`makeXMLTags` or - :class:`makeHTMLTags`. Use ``withAttribute`` to qualify - a starting tag with a required attribute value, to avoid false - matches on common tags such as ```` or ``
``. - - Call ``withAttribute`` with a series of attribute names and - values. Specify the list of filter attributes names and values as: - - - keyword arguments, as in ``(align="right")``, or - - as an explicit dict with ``**`` operator, when an attribute - name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` - - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))`` - - For attribute names with a namespace prefix, you must use the second - form. Attribute names are matched insensitive to upper/lower case. - - If just testing for ``class`` (with or without a namespace), use - :class:`withClass`. - - To verify that the attribute exists, but without specifying a value, - pass ``withAttribute.ANY_VALUE`` as the value. - - Example:: - - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this has no type
-
- - ''' - div,div_end = makeHTMLTags("div") - - # only match div tag having a type attribute with value "grid" - div_grid = div().setParseAction(withAttribute(type="grid")) - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - # construct a match with any div tag having a type attribute, regardless of the value - div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - - prints:: - - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - if args: - attrs = args[:] - else: - attrs = attrDict.items() - attrs = [(k, v) for k, v in attrs] - def pa(s, l, tokens): - for attrName, attrValue in attrs: - if attrName not in tokens: - raise ParseException(s, l, "no matching attribute " + attrName) - if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s, l, "attribute '%s' has value '%s', must be '%s'" % - (attrName, tokens[attrName], attrValue)) - return pa -withAttribute.ANY_VALUE = object() - -def withClass(classname, namespace=''): - """Simplified version of :class:`withAttribute` when - matching on a div class - made difficult because ``class`` is - a reserved word in Python. - - Example:: - - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this <div> has no class
-
- - ''' - div,div_end = makeHTMLTags("div") - div_grid = div().setParseAction(withClass("grid")) - - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - - prints:: - - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr: classname}) - -opAssoc = SimpleNamespace() -opAssoc.LEFT = object() -opAssoc.RIGHT = object() - -def infixNotation(baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')')): - """Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary - or binary, left- or right-associative. Parse actions can also be - attached to operator expressions. The generated parser will also - recognize the use of parentheses to override operator precedences - (see example below). - - Note: if you define a deep operator list, you may see performance - issues when using infixNotation. See - :class:`ParserElement.enablePackrat` for a mechanism to potentially - improve your parser performance. - - Parameters: - - baseExpr - expression representing the most basic element for the - nested - - opList - list of tuples, one for each operator precedence level - in the expression grammar; each tuple is of the form ``(opExpr, - numTerms, rightLeftAssoc, parseAction)``, where: - - - opExpr is the pyparsing expression for the operator; may also - be a string, which will be converted to a Literal; if numTerms - is 3, opExpr is a tuple of two expressions, for the two - operators separating the 3 terms - - numTerms is the number of terms for this operator (must be 1, - 2, or 3) - - rightLeftAssoc is the indicator whether the operator is right - or left associative, using the pyparsing-defined constants - ``opAssoc.RIGHT`` and ``opAssoc.LEFT``. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the parse action - tuple member may be omitted); if the parse action is passed - a tuple or list of functions, this is equivalent to calling - ``setParseAction(*fn)`` - (:class:`ParserElement.setParseAction`) - - lpar - expression for matching left-parentheses - (default= ``Suppress('(')``) - - rpar - expression for matching right-parentheses - (default= ``Suppress(')')``) - - Example:: - - # simple example of four-function arithmetic with ints and - # variable names - integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - - arith_expr = infixNotation(integer | varname, - [ - ('-', 1, opAssoc.RIGHT), - (oneOf('* /'), 2, opAssoc.LEFT), - (oneOf('+ -'), 2, opAssoc.LEFT), - ]) - - arith_expr.runTests(''' - 5+3*6 - (5+3)*6 - -2--11 - ''', fullDump=False) - - prints:: - - 5+3*6 - [[5, '+', [3, '*', 6]]] - - (5+3)*6 - [[[5, '+', 3], '*', 6]] - - -2--11 - [[['-', 2], '-', ['-', 11]]] - """ - # captive version of FollowedBy that does not do parse actions or capture results names - class _FB(FollowedBy): - def parseImpl(self, instring, loc, doActions=True): - self.expr.tryParse(instring, loc) - return loc, [] - - ret = Forward() - lastExpr = baseExpr | (lpar + ret + rpar) - for i, operDef in enumerate(opList): - opExpr, arity, rightLeftAssoc, pa = (operDef + (None, ))[:4] - termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr - if arity == 3: - if opExpr is None or len(opExpr) != 2: - raise ValueError( - "if numterms=3, opExpr must be a tuple or list of two expressions") - opExpr1, opExpr2 = opExpr - thisExpr = Forward().setName(termName) - if rightLeftAssoc == opAssoc.LEFT: - if arity == 1: - matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + OneOrMore(opExpr)) - elif arity == 2: - if opExpr is not None: - matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group(lastExpr + OneOrMore(opExpr + lastExpr)) - else: - matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr + OneOrMore(lastExpr)) - elif arity == 3: - matchExpr = (_FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) - + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr))) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - elif rightLeftAssoc == opAssoc.RIGHT: - if arity == 1: - # try to avoid LR with this extra test - if not isinstance(opExpr, Optional): - opExpr = Optional(opExpr) - matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) - elif arity == 2: - if opExpr is not None: - matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group(lastExpr + OneOrMore(opExpr + thisExpr)) - else: - matchExpr = _FB(lastExpr + thisExpr) + Group(lastExpr + OneOrMore(thisExpr)) - elif arity == 3: - matchExpr = (_FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) - + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr)) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - else: - raise ValueError("operator must indicate right or left associativity") - if pa: - if isinstance(pa, (tuple, list)): - matchExpr.setParseAction(*pa) - else: - matchExpr.setParseAction(pa) - thisExpr <<= (matchExpr.setName(termName) | lastExpr) - lastExpr = thisExpr - ret <<= lastExpr - return ret - -operatorPrecedence = infixNotation -"""(Deprecated) Former name of :class:`infixNotation`, will be -dropped in a future release.""" - -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' - | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") - -def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """Helper method for defining nested lists enclosed in opening and - closing delimiters ("(" and ")" are the default). - - Parameters: - - opener - opening character for a nested list - (default= ``"("``); can also be a pyparsing expression - - closer - closing character for a nested list - (default= ``")"``); can also be a pyparsing expression - - content - expression for items within the nested lists - (default= ``None``) - - ignoreExpr - expression for ignoring opening and closing - delimiters (default= :class:`quotedString`) - - If an expression is not provided for the content argument, the - nested expression will capture all whitespace-delimited content - between delimiters as a list of separate values. - - Use the ``ignoreExpr`` argument to define expressions that may - contain opening or closing characters that should not be treated as - opening or closing characters for nesting, such as quotedString or - a comment expression. Specify multiple expressions using an - :class:`Or` or :class:`MatchFirst`. The default is - :class:`quotedString`, but if no expressions are to be ignored, then - pass ``None`` for this argument. - - Example:: - - data_type = oneOf("void int short long char float double") - decl_data_type = Combine(data_type + Optional(Word('*'))) - ident = Word(alphas+'_', alphanums+'_') - number = pyparsing_common.number - arg = Group(decl_data_type + ident) - LPAR, RPAR = map(Suppress, "()") - - code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - - c_function = (decl_data_type("type") - + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR - + code_body("body")) - c_function.ignore(cStyleComment) - - source_code = ''' - int is_odd(int x) { - return (x%2); - } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { - return (10+ord(hchar)-ord('A')); - } - } - ''' - for func in c_function.searchString(source_code): - print("%(name)s (%(type)s) args: %(args)s" % func) - - - prints:: - - is_odd (int) args: [['int', 'x']] - dec_to_hex (int) args: [['char', 'hchar']] - """ - if opener == closer: - raise ValueError("opening and closing strings cannot be the same") - if content is None: - if isinstance(opener, basestring) and isinstance(closer, basestring): - if len(opener) == 1 and len(closer) == 1: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr - + CharsNotIn(opener - + closer - + ParserElement.DEFAULT_WHITE_CHARS, exact=1) - ) - ).setParseAction(lambda t: t[0].strip())) - else: - content = (empty.copy() + CharsNotIn(opener - + closer - + ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t: t[0].strip())) - else: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr - + ~Literal(opener) - + ~Literal(closer) - + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) - ).setParseAction(lambda t: t[0].strip())) - else: - content = (Combine(OneOrMore(~Literal(opener) - + ~Literal(closer) - + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS, exact=1)) - ).setParseAction(lambda t: t[0].strip())) - else: - raise ValueError("opening and closing arguments must be strings if no content expression is given") - ret = Forward() - if ignoreExpr is not None: - ret <<= Group(Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer)) - else: - ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) - ret.setName('nested %s%s expression' % (opener, closer)) - return ret - -def indentedBlock(blockStatementExpr, indentStack, indent=True): - """Helper method for defining space-delimited indentation blocks, - such as those used to define block statements in Python source code. - - Parameters: - - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single - grammar should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond - the current level; set to False for block of left-most - statements (default= ``True``) - - A valid block must contain at least one ``blockStatement``. - - Example:: - - data = ''' - def A(z): - A1 - B = 100 - G = A2 - A2 - A3 - B - def BB(a,b,c): - BB1 - def BBA(): - bba1 - bba2 - bba3 - C - D - def spam(x,y): - def eggs(z): - pass - ''' - - - indentStack = [1] - stmt = Forward() - - identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group("(" + Optional(delimitedList(identifier)) + ")") + ":") - func_body = indentedBlock(stmt, indentStack) - funcDef = Group(funcDecl + func_body) - - rvalue = Forward() - funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") - rvalue << (funcCall | identifier | Word(nums)) - assignment = Group(identifier + "=" + rvalue) - stmt << (funcDef | assignment | identifier) - - module_body = OneOrMore(stmt) - - parseTree = module_body.parseString(data) - parseTree.pprint() - - prints:: - - [['def', - 'A', - ['(', 'z', ')'], - ':', - [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], - 'B', - ['def', - 'BB', - ['(', 'a', 'b', 'c', ')'], - ':', - [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], - 'C', - 'D', - ['def', - 'spam', - ['(', 'x', 'y', ')'], - ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] - """ - backup_stack = indentStack[:] - - def reset_stack(): - indentStack[:] = backup_stack - - def checkPeerIndent(s, l, t): - if l >= len(s): return - curCol = col(l, s) - if curCol != indentStack[-1]: - if curCol > indentStack[-1]: - raise ParseException(s, l, "illegal nesting") - raise ParseException(s, l, "not a peer entry") - - def checkSubIndent(s, l, t): - curCol = col(l, s) - if curCol > indentStack[-1]: - indentStack.append(curCol) - else: - raise ParseException(s, l, "not a subentry") - - def checkUnindent(s, l, t): - if l >= len(s): return - curCol = col(l, s) - if not(indentStack and curCol in indentStack): - raise ParseException(s, l, "not an unindent") - if curCol < indentStack[-1]: - indentStack.pop() - - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress(), stopOn=StringEnd()) - INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') - PEER = Empty().setParseAction(checkPeerIndent).setName('') - UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') - if indent: - smExpr = Group(Optional(NL) - + INDENT - + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) - + UNDENT) - else: - smExpr = Group(Optional(NL) - + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) - + UNDENT) - smExpr.setFailAction(lambda a, b, c, d: reset_stack()) - blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr.setName('indented block') - -alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") -punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") - -anyOpenTag, anyCloseTag = makeHTMLTags(Word(alphas, alphanums + "_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(), '><& "\'')) -commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") -def replaceHTMLEntity(t): - """Helper parser action to replace common HTML entities with their special characters""" - return _htmlEntityMap.get(t.entity) - -# it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form ``/* ... */``" - -htmlComment = Regex(r"").setName("HTML comment") -"Comment of the form ````" - -restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") -dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form ``// ... (to end of line)``" - -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/' | dblSlashComment).setName("C++ style comment") -"Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`" - -javaStyleComment = cppStyleComment -"Same as :class:`cppStyleComment`" - -pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form ``# ... (to end of line)``" - -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') - + Optional(Word(" \t") - + ~Literal(",") + ~LineEnd()))).streamline().setName("commaItem") -commaSeparatedList = delimitedList(Optional(quotedString.copy() | _commasepitem, default="")).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or -quoted strings, separated by commas. - -This expression is deprecated in favor of :class:`pyparsing_common.comma_separated_list`. -""" - -# some other useful expressions - using lower-case class name since we are really using this as a namespace -class pyparsing_common: - """Here are some common low-level expressions that may be useful in - jump-starting parser development: - - - numeric forms (:class:`integers`, :class:`reals`, - :class:`scientific notation`) - - common :class:`programming identifiers` - - network addresses (:class:`MAC`, - :class:`IPv4`, :class:`IPv6`) - - ISO8601 :class:`dates` and - :class:`datetime` - - :class:`UUID` - - :class:`comma-separated list` - - Parse actions: - - - :class:`convertToInteger` - - :class:`convertToFloat` - - :class:`convertToDate` - - :class:`convertToDatetime` - - :class:`stripHTMLTags` - - :class:`upcaseTokens` - - :class:`downcaseTokens` - - Example:: - - pyparsing_common.number.runTests(''' - # any int or real number, returned as the appropriate type - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.fnumber.runTests(''' - # any int or real number, returned as float - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.hex_integer.runTests(''' - # hex numbers - 100 - FF - ''') - - pyparsing_common.fraction.runTests(''' - # fractions - 1/2 - -3/4 - ''') - - pyparsing_common.mixed_integer.runTests(''' - # mixed fractions - 1 - 1/2 - -3/4 - 1-3/4 - ''') - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' - # uuid - 12345678-1234-5678-1234-567812345678 - ''') - - prints:: - - # any int or real number, returned as the appropriate type - 100 - [100] - - -100 - [-100] - - +100 - [100] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # any int or real number, returned as float - 100 - [100.0] - - -100 - [-100.0] - - +100 - [100.0] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # hex numbers - 100 - [256] - - FF - [255] - - # fractions - 1/2 - [0.5] - - -3/4 - [-0.75] - - # mixed fractions - 1 - [1] - - 1/2 - [0.5] - - -3/4 - [-0.75] - - 1-3/4 - [1.75] - - # uuid - 12345678-1234-5678-1234-567812345678 - [UUID('12345678-1234-5678-1234-567812345678')] - """ - - convertToInteger = tokenMap(int) - """ - Parse action for converting parsed integers to Python int - """ - - convertToFloat = tokenMap(float) - """ - Parse action for converting parsed numbers to Python float - """ - - integer = Word(nums).setName("integer").setParseAction(convertToInteger) - """expression that parses an unsigned integer, returns an int""" - - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int, 16)) - """expression that parses a hexadecimal integer, returns an int""" - - signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) - """expression that parses an integer with optional leading sign, returns an int""" - - fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") - """fractional expression of an integer divided by an integer, returns a float""" - fraction.addParseAction(lambda t: t[0]/t[-1]) - - mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") - """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" - mixed_integer.addParseAction(sum) - - real = Regex(r'[+-]?(?:\d+\.\d*|\.\d+)').setName("real number").setParseAction(convertToFloat) - """expression that parses a floating point number and returns a float""" - - sci_real = Regex(r'[+-]?(?:\d+(?:[eE][+-]?\d+)|(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional - scientific notation and returns a float""" - - # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).streamline() - """any numeric expression, returns the corresponding Python type""" - - fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) - """any int or real number, returned as float""" - - identifier = Word(alphas + '_', alphanums + '_').setName("identifier") - """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - - ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (``0.0.0.0 - 255.255.255.255``)" - - _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part) * 7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) - + "::" - + Optional(_ipv6_part + (':' + _ipv6_part) * (0, 6)) - ).setName("short IPv6 address") - _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) - _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") - ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") - "IPv6 address (long, short, or mixed form)" - - mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") - "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" - - @staticmethod - def convertToDate(fmt="%Y-%m-%d"): - """ - Helper to create a parse action for converting parsed date string to Python datetime.date - - Params - - - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``) - - Example:: - - date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) - - prints:: - - [datetime.date(1999, 12, 31)] - """ - def cvt_fn(s, l, t): - try: - return datetime.strptime(t[0], fmt).date() - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - @staticmethod - def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """Helper to create a parse action for converting parsed - datetime string to Python datetime.datetime - - Params - - - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``) - - Example:: - - dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) - - prints:: - - [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] - """ - def cvt_fn(s, l, t): - try: - return datetime.strptime(t[0], fmt) - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (``yyyy-mm-dd``)" - - iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``" - - uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)" - - _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() - @staticmethod - def stripHTMLTags(s, l, tokens): - """Parse action to remove HTML tags from web page HTML source - - Example:: - - # strip HTML links from normal text - text = 'More info at the
pyparsing wiki page' - td, td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - print(table_text.parseString(text).body) - - Prints:: - - More info at the pyparsing wiki page - """ - return pyparsing_common._html_stripper.transformString(tokens[0]) - - _commasepitem = Combine(OneOrMore(~Literal(",") - + ~LineEnd() - + Word(printables, excludeChars=',') - + Optional(White(" \t")))).streamline().setName("commaItem") - comma_separated_list = delimitedList(Optional(quotedString.copy() - | _commasepitem, default='') - ).setName("comma separated list") - """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" - - upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) - """Parse action to convert tokens to upper case.""" - - downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) - """Parse action to convert tokens to lower case.""" - - -class _lazyclassproperty(object): - def __init__(self, fn): - self.fn = fn - self.__doc__ = fn.__doc__ - self.__name__ = fn.__name__ - - def __get__(self, obj, cls): - if cls is None: - cls = type(obj) - if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) - for superclass in cls.__mro__[1:]): - cls._intern = {} - attrname = self.fn.__name__ - if attrname not in cls._intern: - cls._intern[attrname] = self.fn(cls) - return cls._intern[attrname] - - -class unicode_set(object): - """ - A set of Unicode characters, for language-specific strings for - ``alphas``, ``nums``, ``alphanums``, and ``printables``. - A unicode_set is defined by a list of ranges in the Unicode character - set, in a class attribute ``_ranges``, such as:: - - _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] - - A unicode set can also be defined using multiple inheritance of other unicode sets:: - - class CJK(Chinese, Japanese, Korean): - pass - """ - _ranges = [] - - @classmethod - def _get_chars_for_ranges(cls): - ret = [] - for cc in cls.__mro__: - if cc is unicode_set: - break - for rr in cc._ranges: - ret.extend(range(rr[0], rr[-1] + 1)) - return [unichr(c) for c in sorted(set(ret))] - - @_lazyclassproperty - def printables(cls): - "all non-whitespace characters in this range" - return u''.join(filterfalse(unicode.isspace, cls._get_chars_for_ranges())) - - @_lazyclassproperty - def alphas(cls): - "all alphabetic characters in this range" - return u''.join(filter(unicode.isalpha, cls._get_chars_for_ranges())) - - @_lazyclassproperty - def nums(cls): - "all numeric digit characters in this range" - return u''.join(filter(unicode.isdigit, cls._get_chars_for_ranges())) - - @_lazyclassproperty - def alphanums(cls): - "all alphanumeric characters in this range" - return cls.alphas + cls.nums - - -class pyparsing_unicode(unicode_set): - """ - A namespace class for defining common language unicode_sets. - """ - _ranges = [(32, sys.maxunicode)] - - class Latin1(unicode_set): - "Unicode set for Latin-1 Unicode Character Range" - _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] - - class LatinA(unicode_set): - "Unicode set for Latin-A Unicode Character Range" - _ranges = [(0x0100, 0x017f),] - - class LatinB(unicode_set): - "Unicode set for Latin-B Unicode Character Range" - _ranges = [(0x0180, 0x024f),] - - class Greek(unicode_set): - "Unicode set for Greek Unicode Character Ranges" - _ranges = [ - (0x0370, 0x03ff), (0x1f00, 0x1f15), (0x1f18, 0x1f1d), (0x1f20, 0x1f45), (0x1f48, 0x1f4d), - (0x1f50, 0x1f57), (0x1f59,), (0x1f5b,), (0x1f5d,), (0x1f5f, 0x1f7d), (0x1f80, 0x1fb4), (0x1fb6, 0x1fc4), - (0x1fc6, 0x1fd3), (0x1fd6, 0x1fdb), (0x1fdd, 0x1fef), (0x1ff2, 0x1ff4), (0x1ff6, 0x1ffe), - ] - - class Cyrillic(unicode_set): - "Unicode set for Cyrillic Unicode Character Range" - _ranges = [(0x0400, 0x04ff)] - - class Chinese(unicode_set): - "Unicode set for Chinese Unicode Character Range" - _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f),] - - class Japanese(unicode_set): - "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" - _ranges = [] - - class Kanji(unicode_set): - "Unicode set for Kanji Unicode Character Range" - _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f),] - - class Hiragana(unicode_set): - "Unicode set for Hiragana Unicode Character Range" - _ranges = [(0x3040, 0x309f),] - - class Katakana(unicode_set): - "Unicode set for Katakana Unicode Character Range" - _ranges = [(0x30a0, 0x30ff),] - - class Korean(unicode_set): - "Unicode set for Korean Unicode Character Range" - _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f),] - - class CJK(Chinese, Japanese, Korean): - "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" - pass - - class Thai(unicode_set): - "Unicode set for Thai Unicode Character Range" - _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b),] - - class Arabic(unicode_set): - "Unicode set for Arabic Unicode Character Range" - _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f),] - - class Hebrew(unicode_set): - "Unicode set for Hebrew Unicode Character Range" - _ranges = [(0x0590, 0x05ff),] - - class Devanagari(unicode_set): - "Unicode set for Devanagari Unicode Character Range" - _ranges = [(0x0900, 0x097f), (0xa8e0, 0xa8ff)] - -pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges - + pyparsing_unicode.Japanese.Hiragana._ranges - + pyparsing_unicode.Japanese.Katakana._ranges) - -# define ranges in language character sets -if PY_3: - setattr(pyparsing_unicode, u"العربية", pyparsing_unicode.Arabic) - setattr(pyparsing_unicode, u"中文", pyparsing_unicode.Chinese) - setattr(pyparsing_unicode, u"кириллица", pyparsing_unicode.Cyrillic) - setattr(pyparsing_unicode, u"Ελληνικά", pyparsing_unicode.Greek) - setattr(pyparsing_unicode, u"עִברִית", pyparsing_unicode.Hebrew) - setattr(pyparsing_unicode, u"日本語", pyparsing_unicode.Japanese) - setattr(pyparsing_unicode.Japanese, u"漢字", pyparsing_unicode.Japanese.Kanji) - setattr(pyparsing_unicode.Japanese, u"カタカナ", pyparsing_unicode.Japanese.Katakana) - setattr(pyparsing_unicode.Japanese, u"ã²ã‚‰ãŒãª", pyparsing_unicode.Japanese.Hiragana) - setattr(pyparsing_unicode, u"한국어", pyparsing_unicode.Korean) - setattr(pyparsing_unicode, u"ไทย", pyparsing_unicode.Thai) - setattr(pyparsing_unicode, u"देवनागरी", pyparsing_unicode.Devanagari) - - -class pyparsing_test: - """ - namespace class for classes useful in writing unit tests - """ - - class reset_pyparsing_context: - """ - Context manager to be used when writing unit tests that modify pyparsing config values: - - packrat parsing - - default whitespace characters. - - default keyword characters - - literal string auto-conversion class - - __diag__ settings - - Example: - with reset_pyparsing_context(): - # test that literals used to construct a grammar are automatically suppressed - ParserElement.inlineLiteralsUsing(Suppress) - - term = Word(alphas) | Word(nums) - group = Group('(' + term[...] + ')') - - # assert that the '()' characters are not included in the parsed tokens - self.assertParseAndCheckLisst(group, "(abc 123 def)", ['abc', '123', 'def']) - - # after exiting context manager, literals are converted to Literal expressions again - """ - - def __init__(self): - self._save_context = {} - - def save(self): - self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS - self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS - self._save_context[ - "literal_string_class" - ] = ParserElement._literalStringClass - self._save_context["packrat_enabled"] = ParserElement._packratEnabled - self._save_context["packrat_parse"] = ParserElement._parse - self._save_context["__diag__"] = { - name: getattr(__diag__, name) for name in __diag__._all_names - } - self._save_context["__compat__"] = { - "collect_all_And_tokens": __compat__.collect_all_And_tokens - } - return self - - def restore(self): - # reset pyparsing global state - if ( - ParserElement.DEFAULT_WHITE_CHARS - != self._save_context["default_whitespace"] - ): - ParserElement.setDefaultWhitespaceChars( - self._save_context["default_whitespace"] - ) - Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] - ParserElement.inlineLiteralsUsing( - self._save_context["literal_string_class"] - ) - for name, value in self._save_context["__diag__"].items(): - setattr(__diag__, name, value) - ParserElement._packratEnabled = self._save_context["packrat_enabled"] - ParserElement._parse = self._save_context["packrat_parse"] - __compat__.collect_all_And_tokens = self._save_context["__compat__"] - - def __enter__(self): - return self.save() - - def __exit__(self, *args): - return self.restore() - - class TestParseResultsAsserts: - """ - A mixin class to add parse results assertion methods to normal unittest.TestCase classes. - """ - def assertParseResultsEquals( - self, result, expected_list=None, expected_dict=None, msg=None - ): - """ - Unit test assertion to compare a ParseResults object with an optional expected_list, - and compare any defined results names with an optional expected_dict. - """ - if expected_list is not None: - self.assertEqual(expected_list, result.asList(), msg=msg) - if expected_dict is not None: - self.assertEqual(expected_dict, result.asDict(), msg=msg) - - def assertParseAndCheckList( - self, expr, test_string, expected_list, msg=None, verbose=True - ): - """ - Convenience wrapper assert to test a parser element and input string, and assert that - the resulting ParseResults.asList() is equal to the expected_list. - """ - result = expr.parseString(test_string, parseAll=True) - if verbose: - print(result.dump()) - self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) - - def assertParseAndCheckDict( - self, expr, test_string, expected_dict, msg=None, verbose=True - ): - """ - Convenience wrapper assert to test a parser element and input string, and assert that - the resulting ParseResults.asDict() is equal to the expected_dict. - """ - result = expr.parseString(test_string, parseAll=True) - if verbose: - print(result.dump()) - self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) - - def assertRunTestResults( - self, run_tests_report, expected_parse_results=None, msg=None - ): - """ - Unit test assertion to evaluate output of ParserElement.runTests(). If a list of - list-dict tuples is given as the expected_parse_results argument, then these are zipped - with the report tuples returned by runTests and evaluated using assertParseResultsEquals. - Finally, asserts that the overall runTests() success value is True. - - :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests - :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] - """ - run_test_success, run_test_results = run_tests_report - - if expected_parse_results is not None: - merged = [ - (rpt[0], rpt[1], expected) - for rpt, expected in zip(run_test_results, expected_parse_results) - ] - for test_string, result, expected in merged: - # expected should be a tuple containing a list and/or a dict or an exception, - # and optional failure message string - # an empty tuple will skip any result validation - fail_msg = next( - (exp for exp in expected if isinstance(exp, str)), None - ) - expected_exception = next( - ( - exp - for exp in expected - if isinstance(exp, type) and issubclass(exp, Exception) - ), - None, - ) - if expected_exception is not None: - with self.assertRaises( - expected_exception=expected_exception, msg=fail_msg or msg - ): - if isinstance(result, Exception): - raise result - else: - expected_list = next( - (exp for exp in expected if isinstance(exp, list)), None - ) - expected_dict = next( - (exp for exp in expected if isinstance(exp, dict)), None - ) - if (expected_list, expected_dict) != (None, None): - self.assertParseResultsEquals( - result, - expected_list=expected_list, - expected_dict=expected_dict, - msg=fail_msg or msg, - ) - else: - # warning here maybe? - print("no validation for {!r}".format(test_string)) - - # do this last, in case some specific test results can be reported instead - self.assertTrue( - run_test_success, msg=msg if msg is not None else "failed runTests" - ) - - @contextmanager - def assertRaisesParseException(self, exc_type=ParseException, msg=None): - with self.assertRaises(exc_type, msg=msg): - yield - - -if __name__ == "__main__": - - selectToken = CaselessLiteral("select") - fromToken = CaselessLiteral("from") - - ident = Word(alphas, alphanums + "_$") - - columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - columnNameList = Group(delimitedList(columnName)).setName("columns") - columnSpec = ('*' | columnNameList) - - tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - tableNameList = Group(delimitedList(tableName)).setName("tables") - - simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") - - # demo runTests method, including embedded comments in test string - simpleSQL.runTests(""" - # '*' as column list and dotted table name - select * from SYS.XYZZY - - # caseless match on "SELECT", and casts back to "select" - SELECT * from XYZZY, ABC - - # list of column names, and mixed case SELECT keyword - Select AA,BB,CC from Sys.dual - - # multiple tables - Select A, B, C from Sys.dual, Table2 - - # invalid SELECT keyword - should fail - Xelect A, B, C from Sys.dual - - # incomplete command - should fail - Select - - # invalid column name - should fail - Select ^^^ frox Sys.dual - - """) - - pyparsing_common.number.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - # any int or real number, returned as float - pyparsing_common.fnumber.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - pyparsing_common.hex_integer.runTests(""" - 100 - FF - """) - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(""" - 12345678-1234-5678-1234-567812345678 - """) diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/LICENSE.mit b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/LICENSE.mit deleted file mode 100644 index 6609e4c0..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/LICENSE.mit +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2019 Tobias Gustafsson - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/__init__.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/__init__.py deleted file mode 100644 index be299658..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- - -from pyrsistent._pmap import pmap, m, PMap - -from pyrsistent._pvector import pvector, v, PVector - -from pyrsistent._pset import pset, s, PSet - -from pyrsistent._pbag import pbag, b, PBag - -from pyrsistent._plist import plist, l, PList - -from pyrsistent._pdeque import pdeque, dq, PDeque - -from pyrsistent._checked_types import ( - CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError, - CheckedValueTypeError, CheckedType, optional) - -from pyrsistent._field_common import ( - field, PTypeError, pset_field, pmap_field, pvector_field) - -from pyrsistent._precord import PRecord - -from pyrsistent._pclass import PClass, PClassMeta - -from pyrsistent._immutable import immutable - -from pyrsistent._helpers import freeze, thaw, mutant - -from pyrsistent._transformations import inc, discard, rex, ny - -from pyrsistent._toolz import get_in - - -__all__ = ('pmap', 'm', 'PMap', - 'pvector', 'v', 'PVector', - 'pset', 's', 'PSet', - 'pbag', 'b', 'PBag', - 'plist', 'l', 'PList', - 'pdeque', 'dq', 'PDeque', - 'CheckedPMap', 'CheckedPVector', 'CheckedPSet', 'InvariantException', 'CheckedKeyTypeError', 'CheckedValueTypeError', 'CheckedType', 'optional', - 'PRecord', 'field', 'pset_field', 'pmap_field', 'pvector_field', - 'PClass', 'PClassMeta', - 'immutable', - 'freeze', 'thaw', 'mutant', - 'get_in', - 'inc', 'discard', 'rex', 'ny') diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_checked_types.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_checked_types.py deleted file mode 100644 index 293d989f..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_checked_types.py +++ /dev/null @@ -1,542 +0,0 @@ -from ._compat import Iterable -import six - -from pyrsistent._compat import Enum, string_types -from pyrsistent._pmap import PMap, pmap -from pyrsistent._pset import PSet, pset -from pyrsistent._pvector import PythonPVector, python_pvector - - -class CheckedType(object): - """ - Marker class to enable creation and serialization of checked object graphs. - """ - __slots__ = () - - @classmethod - def create(cls, source_data, _factory_fields=None): - raise NotImplementedError() - - def serialize(self, format=None): - raise NotImplementedError() - - -def _restore_pickle(cls, data): - return cls.create(data, _factory_fields=set()) - - -class InvariantException(Exception): - """ - Exception raised from a :py:class:`CheckedType` when invariant tests fail or when a mandatory - field is missing. - - Contains two fields of interest: - invariant_errors, a tuple of error data for the failing invariants - missing_fields, a tuple of strings specifying the missing names - """ - - def __init__(self, error_codes=(), missing_fields=(), *args, **kwargs): - self.invariant_errors = tuple(e() if callable(e) else e for e in error_codes) - self.missing_fields = missing_fields - super(InvariantException, self).__init__(*args, **kwargs) - - def __str__(self): - return super(InvariantException, self).__str__() + \ - ", invariant_errors=[{invariant_errors}], missing_fields=[{missing_fields}]".format( - invariant_errors=', '.join(str(e) for e in self.invariant_errors), - missing_fields=', '.join(self.missing_fields)) - - -_preserved_iterable_types = ( - Enum, -) -"""Some types are themselves iterable, but we want to use the type itself and -not its members for the type specification. This defines a set of such types -that we explicitly preserve. - -Note that strings are not such types because the string inputs we pass in are -values, not types. -""" - - -def maybe_parse_user_type(t): - """Try to coerce a user-supplied type directive into a list of types. - - This function should be used in all places where a user specifies a type, - for consistency. - - The policy for what defines valid user input should be clear from the implementation. - """ - is_type = isinstance(t, type) - is_preserved = isinstance(t, type) and issubclass(t, _preserved_iterable_types) - is_string = isinstance(t, string_types) - is_iterable = isinstance(t, Iterable) - - if is_preserved: - return [t] - elif is_string: - return [t] - elif is_type and not is_iterable: - return [t] - elif is_iterable: - # Recur to validate contained types as well. - ts = t - return tuple(e for t in ts for e in maybe_parse_user_type(t)) - else: - # If this raises because `t` cannot be formatted, so be it. - raise TypeError( - 'Type specifications must be types or strings. Input: {}'.format(t) - ) - - -def maybe_parse_many_user_types(ts): - # Just a different name to communicate that you're parsing multiple user - # inputs. `maybe_parse_user_type` handles the iterable case anyway. - return maybe_parse_user_type(ts) - - -def _store_types(dct, bases, destination_name, source_name): - maybe_types = maybe_parse_many_user_types([ - d[source_name] - for d in ([dct] + [b.__dict__ for b in bases]) if source_name in d - ]) - - dct[destination_name] = maybe_types - - -def _merge_invariant_results(result): - verdict = True - data = [] - for verd, dat in result: - if not verd: - verdict = False - data.append(dat) - - return verdict, tuple(data) - - -def wrap_invariant(invariant): - # Invariant functions may return the outcome of several tests - # In those cases the results have to be merged before being passed - # back to the client. - def f(*args, **kwargs): - result = invariant(*args, **kwargs) - if isinstance(result[0], bool): - return result - - return _merge_invariant_results(result) - - return f - - -def _all_dicts(bases, seen=None): - """ - Yield each class in ``bases`` and each of their base classes. - """ - if seen is None: - seen = set() - for cls in bases: - if cls in seen: - continue - seen.add(cls) - yield cls.__dict__ - for b in _all_dicts(cls.__bases__, seen): - yield b - - -def store_invariants(dct, bases, destination_name, source_name): - # Invariants are inherited - invariants = [] - for ns in [dct] + list(_all_dicts(bases)): - try: - invariant = ns[source_name] - except KeyError: - continue - invariants.append(invariant) - - if not all(callable(invariant) for invariant in invariants): - raise TypeError('Invariants must be callable') - dct[destination_name] = tuple(wrap_invariant(inv) for inv in invariants) - - -class _CheckedTypeMeta(type): - def __new__(mcs, name, bases, dct): - _store_types(dct, bases, '_checked_types', '__type__') - store_invariants(dct, bases, '_checked_invariants', '__invariant__') - - def default_serializer(self, _, value): - if isinstance(value, CheckedType): - return value.serialize() - return value - - dct.setdefault('__serializer__', default_serializer) - - dct['__slots__'] = () - - return super(_CheckedTypeMeta, mcs).__new__(mcs, name, bases, dct) - - -class CheckedTypeError(TypeError): - def __init__(self, source_class, expected_types, actual_type, actual_value, *args, **kwargs): - super(CheckedTypeError, self).__init__(*args, **kwargs) - self.source_class = source_class - self.expected_types = expected_types - self.actual_type = actual_type - self.actual_value = actual_value - - -class CheckedKeyTypeError(CheckedTypeError): - """ - Raised when trying to set a value using a key with a type that doesn't match the declared type. - - Attributes: - source_class -- The class of the collection - expected_types -- Allowed types - actual_type -- The non matching type - actual_value -- Value of the variable with the non matching type - """ - pass - - -class CheckedValueTypeError(CheckedTypeError): - """ - Raised when trying to set a value using a key with a type that doesn't match the declared type. - - Attributes: - source_class -- The class of the collection - expected_types -- Allowed types - actual_type -- The non matching type - actual_value -- Value of the variable with the non matching type - """ - pass - - -def _get_class(type_name): - module_name, class_name = type_name.rsplit('.', 1) - module = __import__(module_name, fromlist=[class_name]) - return getattr(module, class_name) - - -def get_type(typ): - if isinstance(typ, type): - return typ - - return _get_class(typ) - - -def get_types(typs): - return [get_type(typ) for typ in typs] - - -def _check_types(it, expected_types, source_class, exception_type=CheckedValueTypeError): - if expected_types: - for e in it: - if not any(isinstance(e, get_type(t)) for t in expected_types): - actual_type = type(e) - msg = "Type {source_class} can only be used with {expected_types}, not {actual_type}".format( - source_class=source_class.__name__, - expected_types=tuple(get_type(et).__name__ for et in expected_types), - actual_type=actual_type.__name__) - raise exception_type(source_class, expected_types, actual_type, e, msg) - - -def _invariant_errors(elem, invariants): - return [data for valid, data in (invariant(elem) for invariant in invariants) if not valid] - - -def _invariant_errors_iterable(it, invariants): - return sum([_invariant_errors(elem, invariants) for elem in it], []) - - -def optional(*typs): - """ Convenience function to specify that a value may be of any of the types in type 'typs' or None """ - return tuple(typs) + (type(None),) - - -def _checked_type_create(cls, source_data, _factory_fields=None, ignore_extra=False): - if isinstance(source_data, cls): - return source_data - - # Recursively apply create methods of checked types if the types of the supplied data - # does not match any of the valid types. - types = get_types(cls._checked_types) - checked_type = next((t for t in types if issubclass(t, CheckedType)), None) - if checked_type: - return cls([checked_type.create(data, ignore_extra=ignore_extra) - if not any(isinstance(data, t) for t in types) else data - for data in source_data]) - - return cls(source_data) - -@six.add_metaclass(_CheckedTypeMeta) -class CheckedPVector(PythonPVector, CheckedType): - """ - A CheckedPVector is a PVector which allows specifying type and invariant checks. - - >>> class Positives(CheckedPVector): - ... __type__ = (int, float) - ... __invariant__ = lambda n: (n >= 0, 'Negative') - ... - >>> Positives([1, 2, 3]) - Positives([1, 2, 3]) - """ - - __slots__ = () - - def __new__(cls, initial=()): - if type(initial) == PythonPVector: - return super(CheckedPVector, cls).__new__(cls, initial._count, initial._shift, initial._root, initial._tail) - - return CheckedPVector.Evolver(cls, python_pvector()).extend(initial).persistent() - - def set(self, key, value): - return self.evolver().set(key, value).persistent() - - def append(self, val): - return self.evolver().append(val).persistent() - - def extend(self, it): - return self.evolver().extend(it).persistent() - - create = classmethod(_checked_type_create) - - def serialize(self, format=None): - serializer = self.__serializer__ - return list(serializer(format, v) for v in self) - - def __reduce__(self): - # Pickling support - return _restore_pickle, (self.__class__, list(self),) - - class Evolver(PythonPVector.Evolver): - __slots__ = ('_destination_class', '_invariant_errors') - - def __init__(self, destination_class, vector): - super(CheckedPVector.Evolver, self).__init__(vector) - self._destination_class = destination_class - self._invariant_errors = [] - - def _check(self, it): - _check_types(it, self._destination_class._checked_types, self._destination_class) - error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants) - self._invariant_errors.extend(error_data) - - def __setitem__(self, key, value): - self._check([value]) - return super(CheckedPVector.Evolver, self).__setitem__(key, value) - - def append(self, elem): - self._check([elem]) - return super(CheckedPVector.Evolver, self).append(elem) - - def extend(self, it): - it = list(it) - self._check(it) - return super(CheckedPVector.Evolver, self).extend(it) - - def persistent(self): - if self._invariant_errors: - raise InvariantException(error_codes=self._invariant_errors) - - result = self._orig_pvector - if self.is_dirty() or (self._destination_class != type(self._orig_pvector)): - pv = super(CheckedPVector.Evolver, self).persistent().extend(self._extra_tail) - result = self._destination_class(pv) - self._reset(result) - - return result - - def __repr__(self): - return self.__class__.__name__ + "({0})".format(self.tolist()) - - __str__ = __repr__ - - def evolver(self): - return CheckedPVector.Evolver(self.__class__, self) - - -@six.add_metaclass(_CheckedTypeMeta) -class CheckedPSet(PSet, CheckedType): - """ - A CheckedPSet is a PSet which allows specifying type and invariant checks. - - >>> class Positives(CheckedPSet): - ... __type__ = (int, float) - ... __invariant__ = lambda n: (n >= 0, 'Negative') - ... - >>> Positives([1, 2, 3]) - Positives([1, 2, 3]) - """ - - __slots__ = () - - def __new__(cls, initial=()): - if type(initial) is PMap: - return super(CheckedPSet, cls).__new__(cls, initial) - - evolver = CheckedPSet.Evolver(cls, pset()) - for e in initial: - evolver.add(e) - - return evolver.persistent() - - def __repr__(self): - return self.__class__.__name__ + super(CheckedPSet, self).__repr__()[4:] - - def __str__(self): - return self.__repr__() - - def serialize(self, format=None): - serializer = self.__serializer__ - return set(serializer(format, v) for v in self) - - create = classmethod(_checked_type_create) - - def __reduce__(self): - # Pickling support - return _restore_pickle, (self.__class__, list(self),) - - def evolver(self): - return CheckedPSet.Evolver(self.__class__, self) - - class Evolver(PSet._Evolver): - __slots__ = ('_destination_class', '_invariant_errors') - - def __init__(self, destination_class, original_set): - super(CheckedPSet.Evolver, self).__init__(original_set) - self._destination_class = destination_class - self._invariant_errors = [] - - def _check(self, it): - _check_types(it, self._destination_class._checked_types, self._destination_class) - error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants) - self._invariant_errors.extend(error_data) - - def add(self, element): - self._check([element]) - self._pmap_evolver[element] = True - return self - - def persistent(self): - if self._invariant_errors: - raise InvariantException(error_codes=self._invariant_errors) - - if self.is_dirty() or self._destination_class != type(self._original_pset): - return self._destination_class(self._pmap_evolver.persistent()) - - return self._original_pset - - -class _CheckedMapTypeMeta(type): - def __new__(mcs, name, bases, dct): - _store_types(dct, bases, '_checked_key_types', '__key_type__') - _store_types(dct, bases, '_checked_value_types', '__value_type__') - store_invariants(dct, bases, '_checked_invariants', '__invariant__') - - def default_serializer(self, _, key, value): - sk = key - if isinstance(key, CheckedType): - sk = key.serialize() - - sv = value - if isinstance(value, CheckedType): - sv = value.serialize() - - return sk, sv - - dct.setdefault('__serializer__', default_serializer) - - dct['__slots__'] = () - - return super(_CheckedMapTypeMeta, mcs).__new__(mcs, name, bases, dct) - -# Marker object -_UNDEFINED_CHECKED_PMAP_SIZE = object() - - -@six.add_metaclass(_CheckedMapTypeMeta) -class CheckedPMap(PMap, CheckedType): - """ - A CheckedPMap is a PMap which allows specifying type and invariant checks. - - >>> class IntToFloatMap(CheckedPMap): - ... __key_type__ = int - ... __value_type__ = float - ... __invariant__ = lambda k, v: (int(v) == k, 'Invalid mapping') - ... - >>> IntToFloatMap({1: 1.5, 2: 2.25}) - IntToFloatMap({1: 1.5, 2: 2.25}) - """ - - __slots__ = () - - def __new__(cls, initial={}, size=_UNDEFINED_CHECKED_PMAP_SIZE): - if size is not _UNDEFINED_CHECKED_PMAP_SIZE: - return super(CheckedPMap, cls).__new__(cls, size, initial) - - evolver = CheckedPMap.Evolver(cls, pmap()) - for k, v in initial.items(): - evolver.set(k, v) - - return evolver.persistent() - - def evolver(self): - return CheckedPMap.Evolver(self.__class__, self) - - def __repr__(self): - return self.__class__.__name__ + "({0})".format(str(dict(self))) - - __str__ = __repr__ - - def serialize(self, format=None): - serializer = self.__serializer__ - return dict(serializer(format, k, v) for k, v in self.items()) - - @classmethod - def create(cls, source_data, _factory_fields=None): - if isinstance(source_data, cls): - return source_data - - # Recursively apply create methods of checked types if the types of the supplied data - # does not match any of the valid types. - key_types = get_types(cls._checked_key_types) - checked_key_type = next((t for t in key_types if issubclass(t, CheckedType)), None) - value_types = get_types(cls._checked_value_types) - checked_value_type = next((t for t in value_types if issubclass(t, CheckedType)), None) - - if checked_key_type or checked_value_type: - return cls(dict((checked_key_type.create(key) if checked_key_type and not any(isinstance(key, t) for t in key_types) else key, - checked_value_type.create(value) if checked_value_type and not any(isinstance(value, t) for t in value_types) else value) - for key, value in source_data.items())) - - return cls(source_data) - - def __reduce__(self): - # Pickling support - return _restore_pickle, (self.__class__, dict(self),) - - class Evolver(PMap._Evolver): - __slots__ = ('_destination_class', '_invariant_errors') - - def __init__(self, destination_class, original_map): - super(CheckedPMap.Evolver, self).__init__(original_map) - self._destination_class = destination_class - self._invariant_errors = [] - - def set(self, key, value): - _check_types([key], self._destination_class._checked_key_types, self._destination_class, CheckedKeyTypeError) - _check_types([value], self._destination_class._checked_value_types, self._destination_class) - self._invariant_errors.extend(data for valid, data in (invariant(key, value) - for invariant in self._destination_class._checked_invariants) - if not valid) - - return super(CheckedPMap.Evolver, self).set(key, value) - - def persistent(self): - if self._invariant_errors: - raise InvariantException(error_codes=self._invariant_errors) - - if self.is_dirty() or type(self._original_pmap) != self._destination_class: - return self._destination_class(self._buckets_evolver.persistent(), self._size) - - return self._original_pmap diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_compat.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_compat.py deleted file mode 100644 index e728586a..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_compat.py +++ /dev/null @@ -1,31 +0,0 @@ -from six import string_types - - -# enum compat -try: - from enum import Enum -except: - class Enum(object): pass - # no objects will be instances of this class - -# collections compat -try: - from collections.abc import ( - Container, - Hashable, - Iterable, - Mapping, - Sequence, - Set, - Sized, - ) -except ImportError: - from collections import ( - Container, - Hashable, - Iterable, - Mapping, - Sequence, - Set, - Sized, - ) diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_field_common.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_field_common.py deleted file mode 100644 index ca1cccd4..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_field_common.py +++ /dev/null @@ -1,330 +0,0 @@ -import six -import sys - -from pyrsistent._checked_types import ( - CheckedPMap, - CheckedPSet, - CheckedPVector, - CheckedType, - InvariantException, - _restore_pickle, - get_type, - maybe_parse_user_type, - maybe_parse_many_user_types, -) -from pyrsistent._checked_types import optional as optional_type -from pyrsistent._checked_types import wrap_invariant -import inspect - -PY2 = sys.version_info[0] < 3 - - -def set_fields(dct, bases, name): - dct[name] = dict(sum([list(b.__dict__.get(name, {}).items()) for b in bases], [])) - - for k, v in list(dct.items()): - if isinstance(v, _PField): - dct[name][k] = v - del dct[k] - - -def check_global_invariants(subject, invariants): - error_codes = tuple(error_code for is_ok, error_code in - (invariant(subject) for invariant in invariants) if not is_ok) - if error_codes: - raise InvariantException(error_codes, (), 'Global invariant failed') - - -def serialize(serializer, format, value): - if isinstance(value, CheckedType) and serializer is PFIELD_NO_SERIALIZER: - return value.serialize(format) - - return serializer(format, value) - - -def check_type(destination_cls, field, name, value): - if field.type and not any(isinstance(value, get_type(t)) for t in field.type): - actual_type = type(value) - message = "Invalid type for field {0}.{1}, was {2}".format(destination_cls.__name__, name, actual_type.__name__) - raise PTypeError(destination_cls, name, field.type, actual_type, message) - - -def is_type_cls(type_cls, field_type): - if type(field_type) is set: - return True - types = tuple(field_type) - if len(types) == 0: - return False - return issubclass(get_type(types[0]), type_cls) - - -def is_field_ignore_extra_complaint(type_cls, field, ignore_extra): - # ignore_extra param has default False value, for speed purpose no need to propagate False - if not ignore_extra: - return False - - if not is_type_cls(type_cls, field.type): - return False - - if PY2: - return 'ignore_extra' in inspect.getargspec(field.factory).args - else: - return 'ignore_extra' in inspect.signature(field.factory).parameters - - - -class _PField(object): - __slots__ = ('type', 'invariant', 'initial', 'mandatory', '_factory', 'serializer') - - def __init__(self, type, invariant, initial, mandatory, factory, serializer): - self.type = type - self.invariant = invariant - self.initial = initial - self.mandatory = mandatory - self._factory = factory - self.serializer = serializer - - @property - def factory(self): - # If no factory is specified and the type is another CheckedType use the factory method of that CheckedType - if self._factory is PFIELD_NO_FACTORY and len(self.type) == 1: - typ = get_type(tuple(self.type)[0]) - if issubclass(typ, CheckedType): - return typ.create - - return self._factory - -PFIELD_NO_TYPE = () -PFIELD_NO_INVARIANT = lambda _: (True, None) -PFIELD_NO_FACTORY = lambda x: x -PFIELD_NO_INITIAL = object() -PFIELD_NO_SERIALIZER = lambda _, value: value - - -def field(type=PFIELD_NO_TYPE, invariant=PFIELD_NO_INVARIANT, initial=PFIELD_NO_INITIAL, - mandatory=False, factory=PFIELD_NO_FACTORY, serializer=PFIELD_NO_SERIALIZER): - """ - Field specification factory for :py:class:`PRecord`. - - :param type: a type or iterable with types that are allowed for this field - :param invariant: a function specifying an invariant that must hold for the field - :param initial: value of field if not specified when instantiating the record - :param mandatory: boolean specifying if the field is mandatory or not - :param factory: function called when field is set. - :param serializer: function that returns a serialized version of the field - """ - - # NB: We have to check this predicate separately from the predicates in - # `maybe_parse_user_type` et al. because this one is related to supporting - # the argspec for `field`, while those are related to supporting the valid - # ways to specify types. - - # Multiple types must be passed in one of the following containers. Note - # that a type that is a subclass of one of these containers, like a - # `collections.namedtuple`, will work as expected, since we check - # `isinstance` and not `issubclass`. - if isinstance(type, (list, set, tuple)): - types = set(maybe_parse_many_user_types(type)) - else: - types = set(maybe_parse_user_type(type)) - - invariant_function = wrap_invariant(invariant) if invariant != PFIELD_NO_INVARIANT and callable(invariant) else invariant - field = _PField(type=types, invariant=invariant_function, initial=initial, - mandatory=mandatory, factory=factory, serializer=serializer) - - _check_field_parameters(field) - - return field - - -def _check_field_parameters(field): - for t in field.type: - if not isinstance(t, type) and not isinstance(t, six.string_types): - raise TypeError('Type parameter expected, not {0}'.format(type(t))) - - if field.initial is not PFIELD_NO_INITIAL and \ - not callable(field.initial) and \ - field.type and not any(isinstance(field.initial, t) for t in field.type): - raise TypeError('Initial has invalid type {0}'.format(type(field.initial))) - - if not callable(field.invariant): - raise TypeError('Invariant must be callable') - - if not callable(field.factory): - raise TypeError('Factory must be callable') - - if not callable(field.serializer): - raise TypeError('Serializer must be callable') - - -class PTypeError(TypeError): - """ - Raised when trying to assign a value with a type that doesn't match the declared type. - - Attributes: - source_class -- The class of the record - field -- Field name - expected_types -- Types allowed for the field - actual_type -- The non matching type - """ - def __init__(self, source_class, field, expected_types, actual_type, *args, **kwargs): - super(PTypeError, self).__init__(*args, **kwargs) - self.source_class = source_class - self.field = field - self.expected_types = expected_types - self.actual_type = actual_type - - -SEQ_FIELD_TYPE_SUFFIXES = { - CheckedPVector: "PVector", - CheckedPSet: "PSet", -} - -# Global dictionary to hold auto-generated field types: used for unpickling -_seq_field_types = {} - -def _restore_seq_field_pickle(checked_class, item_type, data): - """Unpickling function for auto-generated PVec/PSet field types.""" - type_ = _seq_field_types[checked_class, item_type] - return _restore_pickle(type_, data) - -def _types_to_names(types): - """Convert a tuple of types to a human-readable string.""" - return "".join(get_type(typ).__name__.capitalize() for typ in types) - -def _make_seq_field_type(checked_class, item_type): - """Create a subclass of the given checked class with the given item type.""" - type_ = _seq_field_types.get((checked_class, item_type)) - if type_ is not None: - return type_ - - class TheType(checked_class): - __type__ = item_type - - def __reduce__(self): - return (_restore_seq_field_pickle, - (checked_class, item_type, list(self))) - - suffix = SEQ_FIELD_TYPE_SUFFIXES[checked_class] - TheType.__name__ = _types_to_names(TheType._checked_types) + suffix - _seq_field_types[checked_class, item_type] = TheType - return TheType - -def _sequence_field(checked_class, item_type, optional, initial): - """ - Create checked field for either ``PSet`` or ``PVector``. - - :param checked_class: ``CheckedPSet`` or ``CheckedPVector``. - :param item_type: The required type for the items in the set. - :param optional: If true, ``None`` can be used as a value for - this field. - :param initial: Initial value to pass to factory. - - :return: A ``field`` containing a checked class. - """ - TheType = _make_seq_field_type(checked_class, item_type) - - if optional: - def factory(argument, _factory_fields=None, ignore_extra=False): - if argument is None: - return None - else: - return TheType.create(argument, _factory_fields=_factory_fields, ignore_extra=ignore_extra) - else: - factory = TheType.create - - return field(type=optional_type(TheType) if optional else TheType, - factory=factory, mandatory=True, - initial=factory(initial)) - - -def pset_field(item_type, optional=False, initial=()): - """ - Create checked ``PSet`` field. - - :param item_type: The required type for the items in the set. - :param optional: If true, ``None`` can be used as a value for - this field. - :param initial: Initial value to pass to factory if no value is given - for the field. - - :return: A ``field`` containing a ``CheckedPSet`` of the given type. - """ - return _sequence_field(CheckedPSet, item_type, optional, - initial) - - -def pvector_field(item_type, optional=False, initial=()): - """ - Create checked ``PVector`` field. - - :param item_type: The required type for the items in the vector. - :param optional: If true, ``None`` can be used as a value for - this field. - :param initial: Initial value to pass to factory if no value is given - for the field. - - :return: A ``field`` containing a ``CheckedPVector`` of the given type. - """ - return _sequence_field(CheckedPVector, item_type, optional, - initial) - - -_valid = lambda item: (True, "") - - -# Global dictionary to hold auto-generated field types: used for unpickling -_pmap_field_types = {} - -def _restore_pmap_field_pickle(key_type, value_type, data): - """Unpickling function for auto-generated PMap field types.""" - type_ = _pmap_field_types[key_type, value_type] - return _restore_pickle(type_, data) - -def _make_pmap_field_type(key_type, value_type): - """Create a subclass of CheckedPMap with the given key and value types.""" - type_ = _pmap_field_types.get((key_type, value_type)) - if type_ is not None: - return type_ - - class TheMap(CheckedPMap): - __key_type__ = key_type - __value_type__ = value_type - - def __reduce__(self): - return (_restore_pmap_field_pickle, - (self.__key_type__, self.__value_type__, dict(self))) - - TheMap.__name__ = "{0}To{1}PMap".format( - _types_to_names(TheMap._checked_key_types), - _types_to_names(TheMap._checked_value_types)) - _pmap_field_types[key_type, value_type] = TheMap - return TheMap - - -def pmap_field(key_type, value_type, optional=False, invariant=PFIELD_NO_INVARIANT): - """ - Create a checked ``PMap`` field. - - :param key: The required type for the keys of the map. - :param value: The required type for the values of the map. - :param optional: If true, ``None`` can be used as a value for - this field. - :param invariant: Pass-through to ``field``. - - :return: A ``field`` containing a ``CheckedPMap``. - """ - TheMap = _make_pmap_field_type(key_type, value_type) - - if optional: - def factory(argument): - if argument is None: - return None - else: - return TheMap.create(argument) - else: - factory = TheMap.create - - return field(mandatory=True, initial=TheMap(), - type=optional_type(TheMap) if optional else TheMap, - factory=factory, invariant=invariant) diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_helpers.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_helpers.py deleted file mode 100644 index c9c58fea..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_helpers.py +++ /dev/null @@ -1,82 +0,0 @@ -from functools import wraps -import six -from pyrsistent._pmap import PMap, pmap -from pyrsistent._pset import PSet, pset -from pyrsistent._pvector import PVector, pvector - - -def freeze(o): - """ - Recursively convert simple Python containers into pyrsistent versions - of those containers. - - - list is converted to pvector, recursively - - dict is converted to pmap, recursively on values (but not keys) - - set is converted to pset, but not recursively - - tuple is converted to tuple, recursively. - - Sets and dict keys are not recursively frozen because they do not contain - mutable data by convention. The main exception to this rule is that - dict keys and set elements are often instances of mutable objects that - support hash-by-id, which this function can't convert anyway. - - >>> freeze(set([1, 2])) - pset([1, 2]) - >>> freeze([1, {'a': 3}]) - pvector([1, pmap({'a': 3})]) - >>> freeze((1, [])) - (1, pvector([])) - """ - typ = type(o) - if typ is dict: - return pmap(dict((k, freeze(v)) for k, v in six.iteritems(o))) - if typ is list: - return pvector(map(freeze, o)) - if typ is tuple: - return tuple(map(freeze, o)) - if typ is set: - return pset(o) - return o - - -def thaw(o): - """ - Recursively convert pyrsistent containers into simple Python containers. - - - pvector is converted to list, recursively - - pmap is converted to dict, recursively on values (but not keys) - - pset is converted to set, but not recursively - - tuple is converted to tuple, recursively. - - >>> from pyrsistent import s, m, v - >>> thaw(s(1, 2)) - {1, 2} - >>> thaw(v(1, m(a=3))) - [1, {'a': 3}] - >>> thaw((1, v())) - (1, []) - """ - if isinstance(o, PVector): - return list(map(thaw, o)) - if isinstance(o, PMap): - return dict((k, thaw(v)) for k, v in o.iteritems()) - if isinstance(o, PSet): - return set(o) - if type(o) is tuple: - return tuple(map(thaw, o)) - return o - - -def mutant(fn): - """ - Convenience decorator to isolate mutation to within the decorated function (with respect - to the input arguments). - - All arguments to the decorated function will be frozen so that they are guaranteed not to change. - The return value is also frozen. - """ - @wraps(fn) - def inner_f(*args, **kwargs): - return freeze(fn(*[freeze(e) for e in args], **dict(freeze(item) for item in kwargs.items()))) - - return inner_f diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_immutable.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_immutable.py deleted file mode 100644 index a89bd755..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_immutable.py +++ /dev/null @@ -1,105 +0,0 @@ -import sys - -import six - - -def immutable(members='', name='Immutable', verbose=False): - """ - Produces a class that either can be used standalone or as a base class for persistent classes. - - This is a thin wrapper around a named tuple. - - Constructing a type and using it to instantiate objects: - - >>> Point = immutable('x, y', name='Point') - >>> p = Point(1, 2) - >>> p2 = p.set(x=3) - >>> p - Point(x=1, y=2) - >>> p2 - Point(x=3, y=2) - - Inheriting from a constructed type. In this case no type name needs to be supplied: - - >>> class PositivePoint(immutable('x, y')): - ... __slots__ = tuple() - ... def __new__(cls, x, y): - ... if x > 0 and y > 0: - ... return super(PositivePoint, cls).__new__(cls, x, y) - ... raise Exception('Coordinates must be positive!') - ... - >>> p = PositivePoint(1, 2) - >>> p.set(x=3) - PositivePoint(x=3, y=2) - >>> p.set(y=-3) - Traceback (most recent call last): - Exception: Coordinates must be positive! - - The persistent class also supports the notion of frozen members. The value of a frozen member - cannot be updated. For example it could be used to implement an ID that should remain the same - over time. A frozen member is denoted by a trailing underscore. - - >>> Point = immutable('x, y, id_', name='Point') - >>> p = Point(1, 2, id_=17) - >>> p.set(x=3) - Point(x=3, y=2, id_=17) - >>> p.set(id_=18) - Traceback (most recent call last): - AttributeError: Cannot set frozen members id_ - """ - - if isinstance(members, six.string_types): - members = members.replace(',', ' ').split() - - def frozen_member_test(): - frozen_members = ["'%s'" % f for f in members if f.endswith('_')] - if frozen_members: - return """ - frozen_fields = fields_to_modify & set([{frozen_members}]) - if frozen_fields: - raise AttributeError('Cannot set frozen members %s' % ', '.join(frozen_fields)) - """.format(frozen_members=', '.join(frozen_members)) - - return '' - - verbose_string = "" - if sys.version_info < (3, 7): - # Verbose is no longer supported in Python 3.7 - verbose_string = ", verbose={verbose}".format(verbose=verbose) - - quoted_members = ', '.join("'%s'" % m for m in members) - template = """ -class {class_name}(namedtuple('ImmutableBase', [{quoted_members}]{verbose_string})): - __slots__ = tuple() - - def __repr__(self): - return super({class_name}, self).__repr__().replace('ImmutableBase', self.__class__.__name__) - - def set(self, **kwargs): - if not kwargs: - return self - - fields_to_modify = set(kwargs.keys()) - if not fields_to_modify <= {member_set}: - raise AttributeError("'%s' is not a member" % ', '.join(fields_to_modify - {member_set})) - - {frozen_member_test} - - return self.__class__.__new__(self.__class__, *map(kwargs.pop, [{quoted_members}], self)) -""".format(quoted_members=quoted_members, - member_set="set([%s])" % quoted_members if quoted_members else 'set()', - frozen_member_test=frozen_member_test(), - verbose_string=verbose_string, - class_name=name) - - if verbose: - print(template) - - from collections import namedtuple - namespace = dict(namedtuple=namedtuple, __name__='pyrsistent_immutable') - try: - six.exec_(template, namespace) - except SyntaxError as e: - raise SyntaxError(e.message + ':\n' + template) - - return namespace[name] \ No newline at end of file diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pbag.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pbag.py deleted file mode 100644 index 9905e9a6..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pbag.py +++ /dev/null @@ -1,267 +0,0 @@ -from ._compat import Container, Iterable, Sized, Hashable -from functools import reduce -from pyrsistent._pmap import pmap - - -def _add_to_counters(counters, element): - return counters.set(element, counters.get(element, 0) + 1) - - -class PBag(object): - """ - A persistent bag/multiset type. - - Requires elements to be hashable, and allows duplicates, but has no - ordering. Bags are hashable. - - Do not instantiate directly, instead use the factory functions :py:func:`b` - or :py:func:`pbag` to create an instance. - - Some examples: - - >>> s = pbag([1, 2, 3, 1]) - >>> s2 = s.add(4) - >>> s3 = s2.remove(1) - >>> s - pbag([1, 1, 2, 3]) - >>> s2 - pbag([1, 1, 2, 3, 4]) - >>> s3 - pbag([1, 2, 3, 4]) - """ - - __slots__ = ('_counts', '__weakref__') - - def __init__(self, counts): - self._counts = counts - - def add(self, element): - """ - Add an element to the bag. - - >>> s = pbag([1]) - >>> s2 = s.add(1) - >>> s3 = s.add(2) - >>> s2 - pbag([1, 1]) - >>> s3 - pbag([1, 2]) - """ - return PBag(_add_to_counters(self._counts, element)) - - def update(self, iterable): - """ - Update bag with all elements in iterable. - - >>> s = pbag([1]) - >>> s.update([1, 2]) - pbag([1, 1, 2]) - """ - if iterable: - return PBag(reduce(_add_to_counters, iterable, self._counts)) - - return self - - def remove(self, element): - """ - Remove an element from the bag. - - >>> s = pbag([1, 1, 2]) - >>> s2 = s.remove(1) - >>> s3 = s.remove(2) - >>> s2 - pbag([1, 2]) - >>> s3 - pbag([1, 1]) - """ - if element not in self._counts: - raise KeyError(element) - elif self._counts[element] == 1: - newc = self._counts.remove(element) - else: - newc = self._counts.set(element, self._counts[element] - 1) - return PBag(newc) - - def count(self, element): - """ - Return the number of times an element appears. - - - >>> pbag([]).count('non-existent') - 0 - >>> pbag([1, 1, 2]).count(1) - 2 - """ - return self._counts.get(element, 0) - - def __len__(self): - """ - Return the length including duplicates. - - >>> len(pbag([1, 1, 2])) - 3 - """ - return sum(self._counts.itervalues()) - - def __iter__(self): - """ - Return an iterator of all elements, including duplicates. - - >>> list(pbag([1, 1, 2])) - [1, 1, 2] - >>> list(pbag([1, 2])) - [1, 2] - """ - for elt, count in self._counts.iteritems(): - for i in range(count): - yield elt - - def __contains__(self, elt): - """ - Check if an element is in the bag. - - >>> 1 in pbag([1, 1, 2]) - True - >>> 0 in pbag([1, 2]) - False - """ - return elt in self._counts - - def __repr__(self): - return "pbag({0})".format(list(self)) - - def __eq__(self, other): - """ - Check if two bags are equivalent, honoring the number of duplicates, - and ignoring insertion order. - - >>> pbag([1, 1, 2]) == pbag([1, 2]) - False - >>> pbag([2, 1, 0]) == pbag([0, 1, 2]) - True - """ - if type(other) is not PBag: - raise TypeError("Can only compare PBag with PBags") - return self._counts == other._counts - - def __lt__(self, other): - raise TypeError('PBags are not orderable') - - __le__ = __lt__ - __gt__ = __lt__ - __ge__ = __lt__ - - # Multiset-style operations similar to collections.Counter - - def __add__(self, other): - """ - Combine elements from two PBags. - - >>> pbag([1, 2, 2]) + pbag([2, 3, 3]) - pbag([1, 2, 2, 2, 3, 3]) - """ - if not isinstance(other, PBag): - return NotImplemented - result = self._counts.evolver() - for elem, other_count in other._counts.iteritems(): - result[elem] = self.count(elem) + other_count - return PBag(result.persistent()) - - def __sub__(self, other): - """ - Remove elements from one PBag that are present in another. - - >>> pbag([1, 2, 2, 2, 3]) - pbag([2, 3, 3, 4]) - pbag([1, 2, 2]) - """ - if not isinstance(other, PBag): - return NotImplemented - result = self._counts.evolver() - for elem, other_count in other._counts.iteritems(): - newcount = self.count(elem) - other_count - if newcount > 0: - result[elem] = newcount - elif elem in self: - result.remove(elem) - return PBag(result.persistent()) - - def __or__(self, other): - """ - Union: Keep elements that are present in either of two PBags. - - >>> pbag([1, 2, 2, 2]) | pbag([2, 3, 3]) - pbag([1, 2, 2, 2, 3, 3]) - """ - if not isinstance(other, PBag): - return NotImplemented - result = self._counts.evolver() - for elem, other_count in other._counts.iteritems(): - count = self.count(elem) - newcount = max(count, other_count) - result[elem] = newcount - return PBag(result.persistent()) - - def __and__(self, other): - """ - Intersection: Only keep elements that are present in both PBags. - - >>> pbag([1, 2, 2, 2]) & pbag([2, 3, 3]) - pbag([2]) - """ - if not isinstance(other, PBag): - return NotImplemented - result = pmap().evolver() - for elem, count in self._counts.iteritems(): - newcount = min(count, other.count(elem)) - if newcount > 0: - result[elem] = newcount - return PBag(result.persistent()) - - def __hash__(self): - """ - Hash based on value of elements. - - >>> m = pmap({pbag([1, 2]): "it's here!"}) - >>> m[pbag([2, 1])] - "it's here!" - >>> pbag([1, 1, 2]) in m - False - """ - return hash(self._counts) - - -Container.register(PBag) -Iterable.register(PBag) -Sized.register(PBag) -Hashable.register(PBag) - - -def b(*elements): - """ - Construct a persistent bag. - - Takes an arbitrary number of arguments to insert into the new persistent - bag. - - >>> b(1, 2, 3, 2) - pbag([1, 2, 2, 3]) - """ - return pbag(elements) - - -def pbag(elements): - """ - Convert an iterable to a persistent bag. - - Takes an iterable with elements to insert. - - >>> pbag([1, 2, 3, 2]) - pbag([1, 2, 2, 3]) - """ - if not elements: - return _EMPTY_PBAG - return PBag(reduce(_add_to_counters, elements, pmap())) - - -_EMPTY_PBAG = PBag(pmap()) - diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pclass.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pclass.py deleted file mode 100644 index a437f716..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pclass.py +++ /dev/null @@ -1,264 +0,0 @@ -import six -from pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants) -from pyrsistent._field_common import ( - set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants -) -from pyrsistent._transformations import transform - - -def _is_pclass(bases): - return len(bases) == 1 and bases[0] == CheckedType - - -class PClassMeta(type): - def __new__(mcs, name, bases, dct): - set_fields(dct, bases, name='_pclass_fields') - store_invariants(dct, bases, '_pclass_invariants', '__invariant__') - dct['__slots__'] = ('_pclass_frozen',) + tuple(key for key in dct['_pclass_fields']) - - # There must only be one __weakref__ entry in the inheritance hierarchy, - # lets put it on the top level class. - if _is_pclass(bases): - dct['__slots__'] += ('__weakref__',) - - return super(PClassMeta, mcs).__new__(mcs, name, bases, dct) - -_MISSING_VALUE = object() - - -def _check_and_set_attr(cls, field, name, value, result, invariant_errors): - check_type(cls, field, name, value) - is_ok, error_code = field.invariant(value) - if not is_ok: - invariant_errors.append(error_code) - else: - setattr(result, name, value) - - -@six.add_metaclass(PClassMeta) -class PClass(CheckedType): - """ - A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting - from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it - is not a PMap and hence not a collection but rather a plain Python object. - - - More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent - """ - def __new__(cls, **kwargs): # Support *args? - result = super(PClass, cls).__new__(cls) - factory_fields = kwargs.pop('_factory_fields', None) - ignore_extra = kwargs.pop('ignore_extra', None) - missing_fields = [] - invariant_errors = [] - for name, field in cls._pclass_fields.items(): - if name in kwargs: - if factory_fields is None or name in factory_fields: - if is_field_ignore_extra_complaint(PClass, field, ignore_extra): - value = field.factory(kwargs[name], ignore_extra=ignore_extra) - else: - value = field.factory(kwargs[name]) - else: - value = kwargs[name] - _check_and_set_attr(cls, field, name, value, result, invariant_errors) - del kwargs[name] - elif field.initial is not PFIELD_NO_INITIAL: - initial = field.initial() if callable(field.initial) else field.initial - _check_and_set_attr( - cls, field, name, initial, result, invariant_errors) - elif field.mandatory: - missing_fields.append('{0}.{1}'.format(cls.__name__, name)) - - if invariant_errors or missing_fields: - raise InvariantException(tuple(invariant_errors), tuple(missing_fields), 'Field invariant failed') - - if kwargs: - raise AttributeError("'{0}' are not among the specified fields for {1}".format( - ', '.join(kwargs), cls.__name__)) - - check_global_invariants(result, cls._pclass_invariants) - - result._pclass_frozen = True - return result - - def set(self, *args, **kwargs): - """ - Set a field in the instance. Returns a new instance with the updated value. The original instance remains - unmodified. Accepts key-value pairs or single string representing the field name and a value. - - >>> from pyrsistent import PClass, field - >>> class AClass(PClass): - ... x = field() - ... - >>> a = AClass(x=1) - >>> a2 = a.set(x=2) - >>> a3 = a.set('x', 3) - >>> a - AClass(x=1) - >>> a2 - AClass(x=2) - >>> a3 - AClass(x=3) - """ - if args: - kwargs[args[0]] = args[1] - - factory_fields = set(kwargs) - - for key in self._pclass_fields: - if key not in kwargs: - value = getattr(self, key, _MISSING_VALUE) - if value is not _MISSING_VALUE: - kwargs[key] = value - - return self.__class__(_factory_fields=factory_fields, **kwargs) - - @classmethod - def create(cls, kwargs, _factory_fields=None, ignore_extra=False): - """ - Factory method. Will create a new PClass of the current type and assign the values - specified in kwargs. - - :param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not - in the set of fields on the PClass. - """ - if isinstance(kwargs, cls): - return kwargs - - if ignore_extra: - kwargs = {k: kwargs[k] for k in cls._pclass_fields if k in kwargs} - - return cls(_factory_fields=_factory_fields, ignore_extra=ignore_extra, **kwargs) - - def serialize(self, format=None): - """ - Serialize the current PClass using custom serializer functions for fields where - such have been supplied. - """ - result = {} - for name in self._pclass_fields: - value = getattr(self, name, _MISSING_VALUE) - if value is not _MISSING_VALUE: - result[name] = serialize(self._pclass_fields[name].serializer, format, value) - - return result - - def transform(self, *transformations): - """ - Apply transformations to the currency PClass. For more details on transformations see - the documentation for PMap. Transformations on PClasses do not support key matching - since the PClass is not a collection. Apart from that the transformations available - for other persistent types work as expected. - """ - return transform(self, transformations) - - def __eq__(self, other): - if isinstance(other, self.__class__): - for name in self._pclass_fields: - if getattr(self, name, _MISSING_VALUE) != getattr(other, name, _MISSING_VALUE): - return False - - return True - - return NotImplemented - - def __ne__(self, other): - return not self == other - - def __hash__(self): - # May want to optimize this by caching the hash somehow - return hash(tuple((key, getattr(self, key, _MISSING_VALUE)) for key in self._pclass_fields)) - - def __setattr__(self, key, value): - if getattr(self, '_pclass_frozen', False): - raise AttributeError("Can't set attribute, key={0}, value={1}".format(key, value)) - - super(PClass, self).__setattr__(key, value) - - def __delattr__(self, key): - raise AttributeError("Can't delete attribute, key={0}, use remove()".format(key)) - - def _to_dict(self): - result = {} - for key in self._pclass_fields: - value = getattr(self, key, _MISSING_VALUE) - if value is not _MISSING_VALUE: - result[key] = value - - return result - - def __repr__(self): - return "{0}({1})".format(self.__class__.__name__, - ', '.join('{0}={1}'.format(k, repr(v)) for k, v in self._to_dict().items())) - - def __reduce__(self): - # Pickling support - data = dict((key, getattr(self, key)) for key in self._pclass_fields if hasattr(self, key)) - return _restore_pickle, (self.__class__, data,) - - def evolver(self): - """ - Returns an evolver for this object. - """ - return _PClassEvolver(self, self._to_dict()) - - def remove(self, name): - """ - Remove attribute given by name from the current instance. Raises AttributeError if the - attribute doesn't exist. - """ - evolver = self.evolver() - del evolver[name] - return evolver.persistent() - - -class _PClassEvolver(object): - __slots__ = ('_pclass_evolver_original', '_pclass_evolver_data', '_pclass_evolver_data_is_dirty', '_factory_fields') - - def __init__(self, original, initial_dict): - self._pclass_evolver_original = original - self._pclass_evolver_data = initial_dict - self._pclass_evolver_data_is_dirty = False - self._factory_fields = set() - - def __getitem__(self, item): - return self._pclass_evolver_data[item] - - def set(self, key, value): - if self._pclass_evolver_data.get(key, _MISSING_VALUE) is not value: - self._pclass_evolver_data[key] = value - self._factory_fields.add(key) - self._pclass_evolver_data_is_dirty = True - - return self - - def __setitem__(self, key, value): - self.set(key, value) - - def remove(self, item): - if item in self._pclass_evolver_data: - del self._pclass_evolver_data[item] - self._factory_fields.discard(item) - self._pclass_evolver_data_is_dirty = True - return self - - raise AttributeError(item) - - def __delitem__(self, item): - self.remove(item) - - def persistent(self): - if self._pclass_evolver_data_is_dirty: - return self._pclass_evolver_original.__class__(_factory_fields=self._factory_fields, - **self._pclass_evolver_data) - - return self._pclass_evolver_original - - def __setattr__(self, key, value): - if key not in self.__slots__: - self.set(key, value) - else: - super(_PClassEvolver, self).__setattr__(key, value) - - def __getattr__(self, item): - return self[item] diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pdeque.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pdeque.py deleted file mode 100644 index 5147b3fa..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pdeque.py +++ /dev/null @@ -1,376 +0,0 @@ -from ._compat import Sequence, Hashable -from itertools import islice, chain -from numbers import Integral -from pyrsistent._plist import plist - - -class PDeque(object): - """ - Persistent double ended queue (deque). Allows quick appends and pops in both ends. Implemented - using two persistent lists. - - A maximum length can be specified to create a bounded queue. - - Fully supports the Sequence and Hashable protocols including indexing and slicing but - if you need fast random access go for the PVector instead. - - Do not instantiate directly, instead use the factory functions :py:func:`dq` or :py:func:`pdeque` to - create an instance. - - Some examples: - - >>> x = pdeque([1, 2, 3]) - >>> x.left - 1 - >>> x.right - 3 - >>> x[0] == x.left - True - >>> x[-1] == x.right - True - >>> x.pop() - pdeque([1, 2]) - >>> x.pop() == x[:-1] - True - >>> x.popleft() - pdeque([2, 3]) - >>> x.append(4) - pdeque([1, 2, 3, 4]) - >>> x.appendleft(4) - pdeque([4, 1, 2, 3]) - - >>> y = pdeque([1, 2, 3], maxlen=3) - >>> y.append(4) - pdeque([2, 3, 4], maxlen=3) - >>> y.appendleft(4) - pdeque([4, 1, 2], maxlen=3) - """ - __slots__ = ('_left_list', '_right_list', '_length', '_maxlen', '__weakref__') - - def __new__(cls, left_list, right_list, length, maxlen=None): - instance = super(PDeque, cls).__new__(cls) - instance._left_list = left_list - instance._right_list = right_list - instance._length = length - - if maxlen is not None: - if not isinstance(maxlen, Integral): - raise TypeError('An integer is required as maxlen') - - if maxlen < 0: - raise ValueError("maxlen must be non-negative") - - instance._maxlen = maxlen - return instance - - @property - def right(self): - """ - Rightmost element in dqueue. - """ - return PDeque._tip_from_lists(self._right_list, self._left_list) - - @property - def left(self): - """ - Leftmost element in dqueue. - """ - return PDeque._tip_from_lists(self._left_list, self._right_list) - - @staticmethod - def _tip_from_lists(primary_list, secondary_list): - if primary_list: - return primary_list.first - - if secondary_list: - return secondary_list[-1] - - raise IndexError('No elements in empty deque') - - def __iter__(self): - return chain(self._left_list, self._right_list.reverse()) - - def __repr__(self): - return "pdeque({0}{1})".format(list(self), - ', maxlen={0}'.format(self._maxlen) if self._maxlen is not None else '') - __str__ = __repr__ - - @property - def maxlen(self): - """ - Maximum length of the queue. - """ - return self._maxlen - - def pop(self, count=1): - """ - Return new deque with rightmost element removed. Popping the empty queue - will return the empty queue. A optional count can be given to indicate the - number of elements to pop. Popping with a negative index is the same as - popleft. Executes in amortized O(k) where k is the number of elements to pop. - - >>> pdeque([1, 2]).pop() - pdeque([1]) - >>> pdeque([1, 2]).pop(2) - pdeque([]) - >>> pdeque([1, 2]).pop(-1) - pdeque([2]) - """ - if count < 0: - return self.popleft(-count) - - new_right_list, new_left_list = PDeque._pop_lists(self._right_list, self._left_list, count) - return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen) - - def popleft(self, count=1): - """ - Return new deque with leftmost element removed. Otherwise functionally - equivalent to pop(). - - >>> pdeque([1, 2]).popleft() - pdeque([2]) - """ - if count < 0: - return self.pop(-count) - - new_left_list, new_right_list = PDeque._pop_lists(self._left_list, self._right_list, count) - return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen) - - @staticmethod - def _pop_lists(primary_list, secondary_list, count): - new_primary_list = primary_list - new_secondary_list = secondary_list - - while count > 0 and (new_primary_list or new_secondary_list): - count -= 1 - if new_primary_list.rest: - new_primary_list = new_primary_list.rest - elif new_primary_list: - new_primary_list = new_secondary_list.reverse() - new_secondary_list = plist() - else: - new_primary_list = new_secondary_list.reverse().rest - new_secondary_list = plist() - - return new_primary_list, new_secondary_list - - def _is_empty(self): - return not self._left_list and not self._right_list - - def __lt__(self, other): - if not isinstance(other, PDeque): - return NotImplemented - - return tuple(self) < tuple(other) - - def __eq__(self, other): - if not isinstance(other, PDeque): - return NotImplemented - - if tuple(self) == tuple(other): - # Sanity check of the length value since it is redundant (there for performance) - assert len(self) == len(other) - return True - - return False - - def __hash__(self): - return hash(tuple(self)) - - def __len__(self): - return self._length - - def append(self, elem): - """ - Return new deque with elem as the rightmost element. - - >>> pdeque([1, 2]).append(3) - pdeque([1, 2, 3]) - """ - new_left_list, new_right_list, new_length = self._append(self._left_list, self._right_list, elem) - return PDeque(new_left_list, new_right_list, new_length, self._maxlen) - - def appendleft(self, elem): - """ - Return new deque with elem as the leftmost element. - - >>> pdeque([1, 2]).appendleft(3) - pdeque([3, 1, 2]) - """ - new_right_list, new_left_list, new_length = self._append(self._right_list, self._left_list, elem) - return PDeque(new_left_list, new_right_list, new_length, self._maxlen) - - def _append(self, primary_list, secondary_list, elem): - if self._maxlen is not None and self._length == self._maxlen: - if self._maxlen == 0: - return primary_list, secondary_list, 0 - new_primary_list, new_secondary_list = PDeque._pop_lists(primary_list, secondary_list, 1) - return new_primary_list, new_secondary_list.cons(elem), self._length - - return primary_list, secondary_list.cons(elem), self._length + 1 - - @staticmethod - def _extend_list(the_list, iterable): - count = 0 - for elem in iterable: - the_list = the_list.cons(elem) - count += 1 - - return the_list, count - - def _extend(self, primary_list, secondary_list, iterable): - new_primary_list, extend_count = PDeque._extend_list(primary_list, iterable) - new_secondary_list = secondary_list - current_len = self._length + extend_count - if self._maxlen is not None and current_len > self._maxlen: - pop_len = current_len - self._maxlen - new_secondary_list, new_primary_list = PDeque._pop_lists(new_secondary_list, new_primary_list, pop_len) - extend_count -= pop_len - - return new_primary_list, new_secondary_list, extend_count - - def extend(self, iterable): - """ - Return new deque with all elements of iterable appended to the right. - - >>> pdeque([1, 2]).extend([3, 4]) - pdeque([1, 2, 3, 4]) - """ - new_right_list, new_left_list, extend_count = self._extend(self._right_list, self._left_list, iterable) - return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen) - - def extendleft(self, iterable): - """ - Return new deque with all elements of iterable appended to the left. - - NB! The elements will be inserted in reverse order compared to the order in the iterable. - - >>> pdeque([1, 2]).extendleft([3, 4]) - pdeque([4, 3, 1, 2]) - """ - new_left_list, new_right_list, extend_count = self._extend(self._left_list, self._right_list, iterable) - return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen) - - def count(self, elem): - """ - Return the number of elements equal to elem present in the queue - - >>> pdeque([1, 2, 1]).count(1) - 2 - """ - return self._left_list.count(elem) + self._right_list.count(elem) - - def remove(self, elem): - """ - Return new deque with first element from left equal to elem removed. If no such element is found - a ValueError is raised. - - >>> pdeque([2, 1, 2]).remove(2) - pdeque([1, 2]) - """ - try: - return PDeque(self._left_list.remove(elem), self._right_list, self._length - 1) - except ValueError: - # Value not found in left list, try the right list - try: - # This is severely inefficient with a double reverse, should perhaps implement a remove_last()? - return PDeque(self._left_list, - self._right_list.reverse().remove(elem).reverse(), self._length - 1) - except ValueError: - raise ValueError('{0} not found in PDeque'.format(elem)) - - def reverse(self): - """ - Return reversed deque. - - >>> pdeque([1, 2, 3]).reverse() - pdeque([3, 2, 1]) - - Also supports the standard python reverse function. - - >>> reversed(pdeque([1, 2, 3])) - pdeque([3, 2, 1]) - """ - return PDeque(self._right_list, self._left_list, self._length) - __reversed__ = reverse - - def rotate(self, steps): - """ - Return deque with elements rotated steps steps. - - >>> x = pdeque([1, 2, 3]) - >>> x.rotate(1) - pdeque([3, 1, 2]) - >>> x.rotate(-2) - pdeque([3, 1, 2]) - """ - popped_deque = self.pop(steps) - if steps >= 0: - return popped_deque.extendleft(islice(self.reverse(), steps)) - - return popped_deque.extend(islice(self, -steps)) - - def __reduce__(self): - # Pickling support - return pdeque, (list(self), self._maxlen) - - def __getitem__(self, index): - if isinstance(index, slice): - if index.step is not None and index.step != 1: - # Too difficult, no structural sharing possible - return pdeque(tuple(self)[index], maxlen=self._maxlen) - - result = self - if index.start is not None: - result = result.popleft(index.start % self._length) - if index.stop is not None: - result = result.pop(self._length - (index.stop % self._length)) - - return result - - if not isinstance(index, Integral): - raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) - - if index >= 0: - return self.popleft(index).left - - shifted = len(self) + index - if shifted < 0: - raise IndexError( - "pdeque index {0} out of range {1}".format(index, len(self)), - ) - return self.popleft(shifted).left - - index = Sequence.index - -Sequence.register(PDeque) -Hashable.register(PDeque) - - -def pdeque(iterable=(), maxlen=None): - """ - Return deque containing the elements of iterable. If maxlen is specified then - len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen. - - >>> pdeque([1, 2, 3]) - pdeque([1, 2, 3]) - >>> pdeque([1, 2, 3, 4], maxlen=2) - pdeque([3, 4], maxlen=2) - """ - t = tuple(iterable) - if maxlen is not None: - t = t[-maxlen:] - length = len(t) - pivot = int(length / 2) - left = plist(t[:pivot]) - right = plist(t[pivot:], reverse=True) - return PDeque(left, right, length, maxlen) - -def dq(*elements): - """ - Return deque containing all arguments. - - >>> dq(1, 2, 3) - pdeque([1, 2, 3]) - """ - return pdeque(elements) diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_plist.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_plist.py deleted file mode 100644 index 8b4267f5..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_plist.py +++ /dev/null @@ -1,313 +0,0 @@ -from ._compat import Sequence, Hashable -from numbers import Integral -from functools import reduce - - -class _PListBuilder(object): - """ - Helper class to allow construction of a list without - having to reverse it in the end. - """ - __slots__ = ('_head', '_tail') - - def __init__(self): - self._head = _EMPTY_PLIST - self._tail = _EMPTY_PLIST - - def _append(self, elem, constructor): - if not self._tail: - self._head = constructor(elem) - self._tail = self._head - else: - self._tail.rest = constructor(elem) - self._tail = self._tail.rest - - return self._head - - def append_elem(self, elem): - return self._append(elem, lambda e: PList(e, _EMPTY_PLIST)) - - def append_plist(self, pl): - return self._append(pl, lambda l: l) - - def build(self): - return self._head - - -class _PListBase(object): - __slots__ = ('__weakref__',) - - # Selected implementations can be taken straight from the Sequence - # class, other are less suitable. Especially those that work with - # index lookups. - count = Sequence.count - index = Sequence.index - - def __reduce__(self): - # Pickling support - return plist, (list(self),) - - def __len__(self): - """ - Return the length of the list, computed by traversing it. - - This is obviously O(n) but with the current implementation - where a list is also a node the overhead of storing the length - in every node would be quite significant. - """ - return sum(1 for _ in self) - - def __repr__(self): - return "plist({0})".format(list(self)) - __str__ = __repr__ - - def cons(self, elem): - """ - Return a new list with elem inserted as new head. - - >>> plist([1, 2]).cons(3) - plist([3, 1, 2]) - """ - return PList(elem, self) - - def mcons(self, iterable): - """ - Return a new list with all elements of iterable repeatedly cons:ed to the current list. - NB! The elements will be inserted in the reverse order of the iterable. - Runs in O(len(iterable)). - - >>> plist([1, 2]).mcons([3, 4]) - plist([4, 3, 1, 2]) - """ - head = self - for elem in iterable: - head = head.cons(elem) - - return head - - def reverse(self): - """ - Return a reversed version of list. Runs in O(n) where n is the length of the list. - - >>> plist([1, 2, 3]).reverse() - plist([3, 2, 1]) - - Also supports the standard reversed function. - - >>> reversed(plist([1, 2, 3])) - plist([3, 2, 1]) - """ - result = plist() - head = self - while head: - result = result.cons(head.first) - head = head.rest - - return result - __reversed__ = reverse - - def split(self, index): - """ - Spilt the list at position specified by index. Returns a tuple containing the - list up until index and the list after the index. Runs in O(index). - - >>> plist([1, 2, 3, 4]).split(2) - (plist([1, 2]), plist([3, 4])) - """ - lb = _PListBuilder() - right_list = self - i = 0 - while right_list and i < index: - lb.append_elem(right_list.first) - right_list = right_list.rest - i += 1 - - if not right_list: - # Just a small optimization in the cases where no split occurred - return self, _EMPTY_PLIST - - return lb.build(), right_list - - def __iter__(self): - li = self - while li: - yield li.first - li = li.rest - - def __lt__(self, other): - if not isinstance(other, _PListBase): - return NotImplemented - - return tuple(self) < tuple(other) - - def __eq__(self, other): - """ - Traverses the lists, checking equality of elements. - - This is an O(n) operation, but preserves the standard semantics of list equality. - """ - if not isinstance(other, _PListBase): - return NotImplemented - - self_head = self - other_head = other - while self_head and other_head: - if not self_head.first == other_head.first: - return False - self_head = self_head.rest - other_head = other_head.rest - - return not self_head and not other_head - - def __getitem__(self, index): - # Don't use this this data structure if you plan to do a lot of indexing, it is - # very inefficient! Use a PVector instead! - - if isinstance(index, slice): - if index.start is not None and index.stop is None and (index.step is None or index.step == 1): - return self._drop(index.start) - - # Take the easy way out for all other slicing cases, not much structural reuse possible anyway - return plist(tuple(self)[index]) - - if not isinstance(index, Integral): - raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) - - if index < 0: - # NB: O(n)! - index += len(self) - - try: - return self._drop(index).first - except AttributeError: - raise IndexError("PList index out of range") - - def _drop(self, count): - if count < 0: - raise IndexError("PList index out of range") - - head = self - while count > 0: - head = head.rest - count -= 1 - - return head - - def __hash__(self): - return hash(tuple(self)) - - def remove(self, elem): - """ - Return new list with first element equal to elem removed. O(k) where k is the position - of the element that is removed. - - Raises ValueError if no matching element is found. - - >>> plist([1, 2, 1]).remove(1) - plist([2, 1]) - """ - - builder = _PListBuilder() - head = self - while head: - if head.first == elem: - return builder.append_plist(head.rest) - - builder.append_elem(head.first) - head = head.rest - - raise ValueError('{0} not found in PList'.format(elem)) - - -class PList(_PListBase): - """ - Classical Lisp style singly linked list. Adding elements to the head using cons is O(1). - Element access is O(k) where k is the position of the element in the list. Taking the - length of the list is O(n). - - Fully supports the Sequence and Hashable protocols including indexing and slicing but - if you need fast random access go for the PVector instead. - - Do not instantiate directly, instead use the factory functions :py:func:`l` or :py:func:`plist` to - create an instance. - - Some examples: - - >>> x = plist([1, 2]) - >>> y = x.cons(3) - >>> x - plist([1, 2]) - >>> y - plist([3, 1, 2]) - >>> y.first - 3 - >>> y.rest == x - True - >>> y[:2] - plist([3, 1]) - """ - __slots__ = ('first', 'rest') - - def __new__(cls, first, rest): - instance = super(PList, cls).__new__(cls) - instance.first = first - instance.rest = rest - return instance - - def __bool__(self): - return True - __nonzero__ = __bool__ - - -Sequence.register(PList) -Hashable.register(PList) - - -class _EmptyPList(_PListBase): - __slots__ = () - - def __bool__(self): - return False - __nonzero__ = __bool__ - - @property - def first(self): - raise AttributeError("Empty PList has no first") - - @property - def rest(self): - return self - - -Sequence.register(_EmptyPList) -Hashable.register(_EmptyPList) - -_EMPTY_PLIST = _EmptyPList() - - -def plist(iterable=(), reverse=False): - """ - Creates a new persistent list containing all elements of iterable. - Optional parameter reverse specifies if the elements should be inserted in - reverse order or not. - - >>> plist([1, 2, 3]) - plist([1, 2, 3]) - >>> plist([1, 2, 3], reverse=True) - plist([3, 2, 1]) - """ - if not reverse: - iterable = list(iterable) - iterable.reverse() - - return reduce(lambda pl, elem: pl.cons(elem), iterable, _EMPTY_PLIST) - - -def l(*elements): - """ - Creates a new persistent list containing all arguments. - - >>> l(1, 2, 3) - plist([1, 2, 3]) - """ - return plist(elements) diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pmap.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pmap.py deleted file mode 100644 index e8a0ec53..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pmap.py +++ /dev/null @@ -1,460 +0,0 @@ -from ._compat import Mapping, Hashable -from itertools import chain -import six -from pyrsistent._pvector import pvector -from pyrsistent._transformations import transform - - -class PMap(object): - """ - Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible. - - Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to - create an instance. - - Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer - re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are - hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of - the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid - excessive hash collisions. - - This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the - semantics are the same (more or less) the same function names have been used but for some cases it is not possible, - for example assignments and deletion of values. - - PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for - element access. - - Random access and insert is log32(n) where n is the size of the map. - - The following are examples of some common operations on persistent maps - - >>> m1 = m(a=1, b=3) - >>> m2 = m1.set('c', 3) - >>> m3 = m2.remove('a') - >>> m1 - pmap({'b': 3, 'a': 1}) - >>> m2 - pmap({'c': 3, 'b': 3, 'a': 1}) - >>> m3 - pmap({'c': 3, 'b': 3}) - >>> m3['c'] - 3 - >>> m3.c - 3 - """ - __slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash') - - def __new__(cls, size, buckets): - self = super(PMap, cls).__new__(cls) - self._size = size - self._buckets = buckets - return self - - @staticmethod - def _get_bucket(buckets, key): - index = hash(key) % len(buckets) - bucket = buckets[index] - return index, bucket - - @staticmethod - def _getitem(buckets, key): - _, bucket = PMap._get_bucket(buckets, key) - if bucket: - for k, v in bucket: - if k == key: - return v - - raise KeyError(key) - - def __getitem__(self, key): - return PMap._getitem(self._buckets, key) - - @staticmethod - def _contains(buckets, key): - _, bucket = PMap._get_bucket(buckets, key) - if bucket: - for k, _ in bucket: - if k == key: - return True - - return False - - return False - - def __contains__(self, key): - return self._contains(self._buckets, key) - - get = Mapping.get - - def __iter__(self): - return self.iterkeys() - - def __getattr__(self, key): - try: - return self[key] - except KeyError: - raise AttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, key) - ) - - def iterkeys(self): - for k, _ in self.iteritems(): - yield k - - # These are more efficient implementations compared to the original - # methods that are based on the keys iterator and then calls the - # accessor functions to access the value for the corresponding key - def itervalues(self): - for _, v in self.iteritems(): - yield v - - def iteritems(self): - for bucket in self._buckets: - if bucket: - for k, v in bucket: - yield k, v - - def values(self): - return pvector(self.itervalues()) - - def keys(self): - return pvector(self.iterkeys()) - - def items(self): - return pvector(self.iteritems()) - - def __len__(self): - return self._size - - def __repr__(self): - return 'pmap({0})'.format(str(dict(self))) - - def __eq__(self, other): - if self is other: - return True - if not isinstance(other, Mapping): - return NotImplemented - if len(self) != len(other): - return False - if isinstance(other, PMap): - if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash') - and self._cached_hash != other._cached_hash): - return False - if self._buckets == other._buckets: - return True - return dict(self.iteritems()) == dict(other.iteritems()) - elif isinstance(other, dict): - return dict(self.iteritems()) == other - return dict(self.iteritems()) == dict(six.iteritems(other)) - - __ne__ = Mapping.__ne__ - - def __lt__(self, other): - raise TypeError('PMaps are not orderable') - - __le__ = __lt__ - __gt__ = __lt__ - __ge__ = __lt__ - - def __str__(self): - return self.__repr__() - - def __hash__(self): - if not hasattr(self, '_cached_hash'): - self._cached_hash = hash(frozenset(self.iteritems())) - return self._cached_hash - - def set(self, key, val): - """ - Return a new PMap with key and val inserted. - - >>> m1 = m(a=1, b=2) - >>> m2 = m1.set('a', 3) - >>> m3 = m1.set('c' ,4) - >>> m1 - pmap({'b': 2, 'a': 1}) - >>> m2 - pmap({'b': 2, 'a': 3}) - >>> m3 - pmap({'c': 4, 'b': 2, 'a': 1}) - """ - return self.evolver().set(key, val).persistent() - - def remove(self, key): - """ - Return a new PMap without the element specified by key. Raises KeyError if the element - is not present. - - >>> m1 = m(a=1, b=2) - >>> m1.remove('a') - pmap({'b': 2}) - """ - return self.evolver().remove(key).persistent() - - def discard(self, key): - """ - Return a new PMap without the element specified by key. Returns reference to itself - if element is not present. - - >>> m1 = m(a=1, b=2) - >>> m1.discard('a') - pmap({'b': 2}) - >>> m1 is m1.discard('c') - True - """ - try: - return self.remove(key) - except KeyError: - return self - - def update(self, *maps): - """ - Return a new PMap with the items in Mappings inserted. If the same key is present in multiple - maps the rightmost (last) value is inserted. - - >>> m1 = m(a=1, b=2) - >>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35}) - pmap({'c': 3, 'b': 2, 'a': 17, 'd': 35}) - """ - return self.update_with(lambda l, r: r, *maps) - - def update_with(self, update_fn, *maps): - """ - Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple - maps the values will be merged using merge_fn going from left to right. - - >>> from operator import add - >>> m1 = m(a=1, b=2) - >>> m1.update_with(add, m(a=2)) - pmap({'b': 2, 'a': 3}) - - The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost. - - >>> m1 = m(a=1) - >>> m1.update_with(lambda l, r: l, m(a=2), {'a':3}) - pmap({'a': 1}) - """ - evolver = self.evolver() - for map in maps: - for key, value in map.items(): - evolver.set(key, update_fn(evolver[key], value) if key in evolver else value) - - return evolver.persistent() - - def __add__(self, other): - return self.update(other) - - def __reduce__(self): - # Pickling support - return pmap, (dict(self),) - - def transform(self, *transformations): - """ - Transform arbitrarily complex combinations of PVectors and PMaps. A transformation - consists of two parts. One match expression that specifies which elements to transform - and one transformation function that performs the actual transformation. - - >>> from pyrsistent import freeze, ny - >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'}, - ... {'author': 'Steve', 'content': 'A slightly longer article'}], - ... 'weather': {'temperature': '11C', 'wind': '5m/s'}}) - >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c) - >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c) - >>> very_short_news.articles[0].content - 'A short article' - >>> very_short_news.articles[1].content - 'A slightly long...' - - When nothing has been transformed the original data structure is kept - - >>> short_news is news_paper - True - >>> very_short_news is news_paper - False - >>> very_short_news.articles[0] is news_paper.articles[0] - True - """ - return transform(self, transformations) - - def copy(self): - return self - - class _Evolver(object): - __slots__ = ('_buckets_evolver', '_size', '_original_pmap') - - def __init__(self, original_pmap): - self._original_pmap = original_pmap - self._buckets_evolver = original_pmap._buckets.evolver() - self._size = original_pmap._size - - def __getitem__(self, key): - return PMap._getitem(self._buckets_evolver, key) - - def __setitem__(self, key, val): - self.set(key, val) - - def set(self, key, val): - if len(self._buckets_evolver) < 0.67 * self._size: - self._reallocate(2 * len(self._buckets_evolver)) - - kv = (key, val) - index, bucket = PMap._get_bucket(self._buckets_evolver, key) - if bucket: - for k, v in bucket: - if k == key: - if v is not val: - new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket] - self._buckets_evolver[index] = new_bucket - - return self - - new_bucket = [kv] - new_bucket.extend(bucket) - self._buckets_evolver[index] = new_bucket - self._size += 1 - else: - self._buckets_evolver[index] = [kv] - self._size += 1 - - return self - - def _reallocate(self, new_size): - new_list = new_size * [None] - buckets = self._buckets_evolver.persistent() - for k, v in chain.from_iterable(x for x in buckets if x): - index = hash(k) % new_size - if new_list[index]: - new_list[index].append((k, v)) - else: - new_list[index] = [(k, v)] - - # A reallocation should always result in a dirty buckets evolver to avoid - # possible loss of elements when doing the reallocation. - self._buckets_evolver = pvector().evolver() - self._buckets_evolver.extend(new_list) - - def is_dirty(self): - return self._buckets_evolver.is_dirty() - - def persistent(self): - if self.is_dirty(): - self._original_pmap = PMap(self._size, self._buckets_evolver.persistent()) - - return self._original_pmap - - def __len__(self): - return self._size - - def __contains__(self, key): - return PMap._contains(self._buckets_evolver, key) - - def __delitem__(self, key): - self.remove(key) - - def remove(self, key): - index, bucket = PMap._get_bucket(self._buckets_evolver, key) - - if bucket: - new_bucket = [(k, v) for (k, v) in bucket if k != key] - if len(bucket) > len(new_bucket): - self._buckets_evolver[index] = new_bucket if new_bucket else None - self._size -= 1 - return self - - raise KeyError('{0}'.format(key)) - - def evolver(self): - """ - Create a new evolver for this pmap. For a discussion on evolvers in general see the - documentation for the pvector evolver. - - Create the evolver and perform various mutating updates to it: - - >>> m1 = m(a=1, b=2) - >>> e = m1.evolver() - >>> e['c'] = 3 - >>> len(e) - 3 - >>> del e['a'] - - The underlying pmap remains the same: - - >>> m1 - pmap({'b': 2, 'a': 1}) - - The changes are kept in the evolver. An updated pmap can be created using the - persistent() function on the evolver. - - >>> m2 = e.persistent() - >>> m2 - pmap({'c': 3, 'b': 2}) - - The new pmap will share data with the original pmap in the same way that would have - been done if only using operations on the pmap. - """ - return self._Evolver(self) - -Mapping.register(PMap) -Hashable.register(PMap) - - -def _turbo_mapping(initial, pre_size): - if pre_size: - size = pre_size - else: - try: - size = 2 * len(initial) or 8 - except Exception: - # Guess we can't figure out the length. Give up on length hinting, - # we can always reallocate later. - size = 8 - - buckets = size * [None] - - if not isinstance(initial, Mapping): - # Make a dictionary of the initial data if it isn't already, - # that will save us some job further down since we can assume no - # key collisions - initial = dict(initial) - - for k, v in six.iteritems(initial): - h = hash(k) - index = h % size - bucket = buckets[index] - - if bucket: - bucket.append((k, v)) - else: - buckets[index] = [(k, v)] - - return PMap(len(initial), pvector().extend(buckets)) - - -_EMPTY_PMAP = _turbo_mapping({}, 0) - - -def pmap(initial={}, pre_size=0): - """ - Create new persistent map, inserts all elements in initial into the newly created map. - The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This - may have a positive performance impact in the cases where you know beforehand that a large number of elements - will be inserted into the map eventually since it will reduce the number of reallocations required. - - >>> pmap({'a': 13, 'b': 14}) - pmap({'b': 14, 'a': 13}) - """ - if not initial: - return _EMPTY_PMAP - - return _turbo_mapping(initial, pre_size) - - -def m(**kwargs): - """ - Creates a new persitent map. Inserts all key value arguments into the newly created map. - - >>> m(a=13, b=14) - pmap({'b': 14, 'a': 13}) - """ - return pmap(kwargs) diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_precord.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_precord.py deleted file mode 100644 index ec8d32c3..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_precord.py +++ /dev/null @@ -1,169 +0,0 @@ -import six -from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants -from pyrsistent._field_common import ( - set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants -) -from pyrsistent._pmap import PMap, pmap - - -class _PRecordMeta(type): - def __new__(mcs, name, bases, dct): - set_fields(dct, bases, name='_precord_fields') - store_invariants(dct, bases, '_precord_invariants', '__invariant__') - - dct['_precord_mandatory_fields'] = \ - set(name for name, field in dct['_precord_fields'].items() if field.mandatory) - - dct['_precord_initial_values'] = \ - dict((k, field.initial) for k, field in dct['_precord_fields'].items() if field.initial is not PFIELD_NO_INITIAL) - - - dct['__slots__'] = () - - return super(_PRecordMeta, mcs).__new__(mcs, name, bases, dct) - - -@six.add_metaclass(_PRecordMeta) -class PRecord(PMap, CheckedType): - """ - A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting - from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element - access using subscript notation. - - More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent - """ - def __new__(cls, **kwargs): - # Hack total! If these two special attributes exist that means we can create - # ourselves. Otherwise we need to go through the Evolver to create the structures - # for us. - if '_precord_size' in kwargs and '_precord_buckets' in kwargs: - return super(PRecord, cls).__new__(cls, kwargs['_precord_size'], kwargs['_precord_buckets']) - - factory_fields = kwargs.pop('_factory_fields', None) - ignore_extra = kwargs.pop('_ignore_extra', False) - - initial_values = kwargs - if cls._precord_initial_values: - initial_values = dict((k, v() if callable(v) else v) - for k, v in cls._precord_initial_values.items()) - initial_values.update(kwargs) - - e = _PRecordEvolver(cls, pmap(), _factory_fields=factory_fields, _ignore_extra=ignore_extra) - for k, v in initial_values.items(): - e[k] = v - - return e.persistent() - - def set(self, *args, **kwargs): - """ - Set a field in the record. This set function differs slightly from that in the PMap - class. First of all it accepts key-value pairs. Second it accepts multiple key-value - pairs to perform one, atomic, update of multiple fields. - """ - - # The PRecord set() can accept kwargs since all fields that have been declared are - # valid python identifiers. Also allow multiple fields to be set in one operation. - if args: - return super(PRecord, self).set(args[0], args[1]) - - return self.update(kwargs) - - def evolver(self): - """ - Returns an evolver of this object. - """ - return _PRecordEvolver(self.__class__, self) - - def __repr__(self): - return "{0}({1})".format(self.__class__.__name__, - ', '.join('{0}={1}'.format(k, repr(v)) for k, v in self.items())) - - @classmethod - def create(cls, kwargs, _factory_fields=None, ignore_extra=False): - """ - Factory method. Will create a new PRecord of the current type and assign the values - specified in kwargs. - - :param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not - in the set of fields on the PRecord. - """ - if isinstance(kwargs, cls): - return kwargs - - if ignore_extra: - kwargs = {k: kwargs[k] for k in cls._precord_fields if k in kwargs} - - return cls(_factory_fields=_factory_fields, _ignore_extra=ignore_extra, **kwargs) - - def __reduce__(self): - # Pickling support - return _restore_pickle, (self.__class__, dict(self),) - - def serialize(self, format=None): - """ - Serialize the current PRecord using custom serializer functions for fields where - such have been supplied. - """ - return dict((k, serialize(self._precord_fields[k].serializer, format, v)) for k, v in self.items()) - - -class _PRecordEvolver(PMap._Evolver): - __slots__ = ('_destination_cls', '_invariant_error_codes', '_missing_fields', '_factory_fields', '_ignore_extra') - - def __init__(self, cls, original_pmap, _factory_fields=None, _ignore_extra=False): - super(_PRecordEvolver, self).__init__(original_pmap) - self._destination_cls = cls - self._invariant_error_codes = [] - self._missing_fields = [] - self._factory_fields = _factory_fields - self._ignore_extra = _ignore_extra - - def __setitem__(self, key, original_value): - self.set(key, original_value) - - def set(self, key, original_value): - field = self._destination_cls._precord_fields.get(key) - if field: - if self._factory_fields is None or field in self._factory_fields: - try: - if is_field_ignore_extra_complaint(PRecord, field, self._ignore_extra): - value = field.factory(original_value, ignore_extra=self._ignore_extra) - else: - value = field.factory(original_value) - except InvariantException as e: - self._invariant_error_codes += e.invariant_errors - self._missing_fields += e.missing_fields - return self - else: - value = original_value - - check_type(self._destination_cls, field, key, value) - - is_ok, error_code = field.invariant(value) - if not is_ok: - self._invariant_error_codes.append(error_code) - - return super(_PRecordEvolver, self).set(key, value) - else: - raise AttributeError("'{0}' is not among the specified fields for {1}".format(key, self._destination_cls.__name__)) - - def persistent(self): - cls = self._destination_cls - is_dirty = self.is_dirty() - pm = super(_PRecordEvolver, self).persistent() - if is_dirty or not isinstance(pm, cls): - result = cls(_precord_buckets=pm._buckets, _precord_size=pm._size) - else: - result = pm - - if cls._precord_mandatory_fields: - self._missing_fields += tuple('{0}.{1}'.format(cls.__name__, f) for f - in (cls._precord_mandatory_fields - set(result.keys()))) - - if self._invariant_error_codes or self._missing_fields: - raise InvariantException(tuple(self._invariant_error_codes), tuple(self._missing_fields), - 'Field invariant failed') - - check_global_invariants(result, cls._precord_invariants) - - return result diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pset.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pset.py deleted file mode 100644 index a972ec53..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pset.py +++ /dev/null @@ -1,229 +0,0 @@ -from ._compat import Set, Hashable -import sys -from pyrsistent._pmap import pmap - -PY2 = sys.version_info[0] < 3 - - -class PSet(object): - """ - Persistent set implementation. Built on top of the persistent map. The set supports all operations - in the Set protocol and is Hashable. - - Do not instantiate directly, instead use the factory functions :py:func:`s` or :py:func:`pset` - to create an instance. - - Random access and insert is log32(n) where n is the size of the set. - - Some examples: - - >>> s = pset([1, 2, 3, 1]) - >>> s2 = s.add(4) - >>> s3 = s2.remove(2) - >>> s - pset([1, 2, 3]) - >>> s2 - pset([1, 2, 3, 4]) - >>> s3 - pset([1, 3, 4]) - """ - __slots__ = ('_map', '__weakref__') - - def __new__(cls, m): - self = super(PSet, cls).__new__(cls) - self._map = m - return self - - def __contains__(self, element): - return element in self._map - - def __iter__(self): - return iter(self._map) - - def __len__(self): - return len(self._map) - - def __repr__(self): - if PY2 or not self: - return 'p' + str(set(self)) - - return 'pset([{0}])'.format(str(set(self))[1:-1]) - - def __str__(self): - return self.__repr__() - - def __hash__(self): - return hash(self._map) - - def __reduce__(self): - # Pickling support - return pset, (list(self),) - - @classmethod - def _from_iterable(cls, it, pre_size=8): - return PSet(pmap(dict((k, True) for k in it), pre_size=pre_size)) - - def add(self, element): - """ - Return a new PSet with element added - - >>> s1 = s(1, 2) - >>> s1.add(3) - pset([1, 2, 3]) - """ - return self.evolver().add(element).persistent() - - def update(self, iterable): - """ - Return a new PSet with elements in iterable added - - >>> s1 = s(1, 2) - >>> s1.update([3, 4, 4]) - pset([1, 2, 3, 4]) - """ - e = self.evolver() - for element in iterable: - e.add(element) - - return e.persistent() - - def remove(self, element): - """ - Return a new PSet with element removed. Raises KeyError if element is not present. - - >>> s1 = s(1, 2) - >>> s1.remove(2) - pset([1]) - """ - if element in self._map: - return self.evolver().remove(element).persistent() - - raise KeyError("Element '%s' not present in PSet" % element) - - def discard(self, element): - """ - Return a new PSet with element removed. Returns itself if element is not present. - """ - if element in self._map: - return self.evolver().remove(element).persistent() - - return self - - class _Evolver(object): - __slots__ = ('_original_pset', '_pmap_evolver') - - def __init__(self, original_pset): - self._original_pset = original_pset - self._pmap_evolver = original_pset._map.evolver() - - def add(self, element): - self._pmap_evolver[element] = True - return self - - def remove(self, element): - del self._pmap_evolver[element] - return self - - def is_dirty(self): - return self._pmap_evolver.is_dirty() - - def persistent(self): - if not self.is_dirty(): - return self._original_pset - - return PSet(self._pmap_evolver.persistent()) - - def __len__(self): - return len(self._pmap_evolver) - - def copy(self): - return self - - def evolver(self): - """ - Create a new evolver for this pset. For a discussion on evolvers in general see the - documentation for the pvector evolver. - - Create the evolver and perform various mutating updates to it: - - >>> s1 = s(1, 2, 3) - >>> e = s1.evolver() - >>> _ = e.add(4) - >>> len(e) - 4 - >>> _ = e.remove(1) - - The underlying pset remains the same: - - >>> s1 - pset([1, 2, 3]) - - The changes are kept in the evolver. An updated pmap can be created using the - persistent() function on the evolver. - - >>> s2 = e.persistent() - >>> s2 - pset([2, 3, 4]) - - The new pset will share data with the original pset in the same way that would have - been done if only using operations on the pset. - """ - return PSet._Evolver(self) - - # All the operations and comparisons you would expect on a set. - # - # This is not very beautiful. If we avoid inheriting from PSet we can use the - # __slots__ concepts (which requires a new style class) and hopefully save some memory. - __le__ = Set.__le__ - __lt__ = Set.__lt__ - __gt__ = Set.__gt__ - __ge__ = Set.__ge__ - __eq__ = Set.__eq__ - __ne__ = Set.__ne__ - - __and__ = Set.__and__ - __or__ = Set.__or__ - __sub__ = Set.__sub__ - __xor__ = Set.__xor__ - - issubset = __le__ - issuperset = __ge__ - union = __or__ - intersection = __and__ - difference = __sub__ - symmetric_difference = __xor__ - - isdisjoint = Set.isdisjoint - -Set.register(PSet) -Hashable.register(PSet) - -_EMPTY_PSET = PSet(pmap()) - - -def pset(iterable=(), pre_size=8): - """ - Creates a persistent set from iterable. Optionally takes a sizing parameter equivalent to that - used for :py:func:`pmap`. - - >>> s1 = pset([1, 2, 3, 2]) - >>> s1 - pset([1, 2, 3]) - """ - if not iterable: - return _EMPTY_PSET - - return PSet._from_iterable(iterable, pre_size=pre_size) - - -def s(*elements): - """ - Create a persistent set. - - Takes an arbitrary number of arguments to insert into the new set. - - >>> s1 = s(1, 2, 3, 2) - >>> s1 - pset([1, 2, 3]) - """ - return pset(elements) diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pvector.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pvector.py deleted file mode 100644 index 82232782..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_pvector.py +++ /dev/null @@ -1,713 +0,0 @@ -from abc import abstractmethod, ABCMeta -from ._compat import Sequence, Hashable -from numbers import Integral -import operator -import six -from pyrsistent._transformations import transform - - -def _bitcount(val): - return bin(val).count("1") - -BRANCH_FACTOR = 32 -BIT_MASK = BRANCH_FACTOR - 1 -SHIFT = _bitcount(BIT_MASK) - - -def compare_pvector(v, other, operator): - return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other) - - -def _index_or_slice(index, stop): - if stop is None: - return index - - return slice(index, stop) - - -class PythonPVector(object): - """ - Support structure for PVector that implements structural sharing for vectors using a trie. - """ - __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__') - - def __new__(cls, count, shift, root, tail): - self = super(PythonPVector, cls).__new__(cls) - self._count = count - self._shift = shift - self._root = root - self._tail = tail - - # Derived attribute stored for performance - self._tail_offset = self._count - len(self._tail) - return self - - def __len__(self): - return self._count - - def __getitem__(self, index): - if isinstance(index, slice): - # There are more conditions than the below where it would be OK to - # return ourselves, implement those... - if index.start is None and index.stop is None and index.step is None: - return self - - # This is a bit nasty realizing the whole structure as a list before - # slicing it but it is the fastest way I've found to date, and it's easy :-) - return _EMPTY_PVECTOR.extend(self.tolist()[index]) - - if index < 0: - index += self._count - - return PythonPVector._node_for(self, index)[index & BIT_MASK] - - def __add__(self, other): - return self.extend(other) - - def __repr__(self): - return 'pvector({0})'.format(str(self.tolist())) - - def __str__(self): - return self.__repr__() - - def __iter__(self): - # This is kind of lazy and will produce some memory overhead but it is the fasted method - # by far of those tried since it uses the speed of the built in python list directly. - return iter(self.tolist()) - - def __ne__(self, other): - return not self.__eq__(other) - - def __eq__(self, other): - return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq) - - def __gt__(self, other): - return compare_pvector(self, other, operator.gt) - - def __lt__(self, other): - return compare_pvector(self, other, operator.lt) - - def __ge__(self, other): - return compare_pvector(self, other, operator.ge) - - def __le__(self, other): - return compare_pvector(self, other, operator.le) - - def __mul__(self, times): - if times <= 0 or self is _EMPTY_PVECTOR: - return _EMPTY_PVECTOR - - if times == 1: - return self - - return _EMPTY_PVECTOR.extend(times * self.tolist()) - - __rmul__ = __mul__ - - def _fill_list(self, node, shift, the_list): - if shift: - shift -= SHIFT - for n in node: - self._fill_list(n, shift, the_list) - else: - the_list.extend(node) - - def tolist(self): - """ - The fastest way to convert the vector into a python list. - """ - the_list = [] - self._fill_list(self._root, self._shift, the_list) - the_list.extend(self._tail) - return the_list - - def _totuple(self): - """ - Returns the content as a python tuple. - """ - return tuple(self.tolist()) - - def __hash__(self): - # Taking the easy way out again... - return hash(self._totuple()) - - def transform(self, *transformations): - return transform(self, transformations) - - def __reduce__(self): - # Pickling support - return pvector, (self.tolist(),) - - def mset(self, *args): - if len(args) % 2: - raise TypeError("mset expected an even number of arguments") - - evolver = self.evolver() - for i in range(0, len(args), 2): - evolver[args[i]] = args[i+1] - - return evolver.persistent() - - class Evolver(object): - __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes', - '_extra_tail', '_cached_leafs', '_orig_pvector') - - def __init__(self, v): - self._reset(v) - - def __getitem__(self, index): - if not isinstance(index, Integral): - raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) - - if index < 0: - index += self._count + len(self._extra_tail) - - if self._count <= index < self._count + len(self._extra_tail): - return self._extra_tail[index - self._count] - - return PythonPVector._node_for(self, index)[index & BIT_MASK] - - def _reset(self, v): - self._count = v._count - self._shift = v._shift - self._root = v._root - self._tail = v._tail - self._tail_offset = v._tail_offset - self._dirty_nodes = {} - self._cached_leafs = {} - self._extra_tail = [] - self._orig_pvector = v - - def append(self, element): - self._extra_tail.append(element) - return self - - def extend(self, iterable): - self._extra_tail.extend(iterable) - return self - - def set(self, index, val): - self[index] = val - return self - - def __setitem__(self, index, val): - if not isinstance(index, Integral): - raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) - - if index < 0: - index += self._count + len(self._extra_tail) - - if 0 <= index < self._count: - node = self._cached_leafs.get(index >> SHIFT) - if node: - node[index & BIT_MASK] = val - elif index >= self._tail_offset: - if id(self._tail) not in self._dirty_nodes: - self._tail = list(self._tail) - self._dirty_nodes[id(self._tail)] = True - self._cached_leafs[index >> SHIFT] = self._tail - self._tail[index & BIT_MASK] = val - else: - self._root = self._do_set(self._shift, self._root, index, val) - elif self._count <= index < self._count + len(self._extra_tail): - self._extra_tail[index - self._count] = val - elif index == self._count + len(self._extra_tail): - self._extra_tail.append(val) - else: - raise IndexError("Index out of range: %s" % (index,)) - - def _do_set(self, level, node, i, val): - if id(node) in self._dirty_nodes: - ret = node - else: - ret = list(node) - self._dirty_nodes[id(ret)] = True - - if level == 0: - ret[i & BIT_MASK] = val - self._cached_leafs[i >> SHIFT] = ret - else: - sub_index = (i >> level) & BIT_MASK # >>> - ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val) - - return ret - - def delete(self, index): - del self[index] - return self - - def __delitem__(self, key): - if self._orig_pvector: - # All structural sharing bets are off, base evolver on _extra_tail only - l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist() - l.extend(self._extra_tail) - self._reset(_EMPTY_PVECTOR) - self._extra_tail = l - - del self._extra_tail[key] - - def persistent(self): - result = self._orig_pvector - if self.is_dirty(): - result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail) - self._reset(result) - - return result - - def __len__(self): - return self._count + len(self._extra_tail) - - def is_dirty(self): - return bool(self._dirty_nodes or self._extra_tail) - - def evolver(self): - return PythonPVector.Evolver(self) - - def set(self, i, val): - # This method could be implemented by a call to mset() but doing so would cause - # a ~5 X performance penalty on PyPy (considered the primary platform for this implementation - # of PVector) so we're keeping this implementation for now. - - if not isinstance(i, Integral): - raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__) - - if i < 0: - i += self._count - - if 0 <= i < self._count: - if i >= self._tail_offset: - new_tail = list(self._tail) - new_tail[i & BIT_MASK] = val - return PythonPVector(self._count, self._shift, self._root, new_tail) - - return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail) - - if i == self._count: - return self.append(val) - - raise IndexError("Index out of range: %s" % (i,)) - - def _do_set(self, level, node, i, val): - ret = list(node) - if level == 0: - ret[i & BIT_MASK] = val - else: - sub_index = (i >> level) & BIT_MASK # >>> - ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val) - - return ret - - @staticmethod - def _node_for(pvector_like, i): - if 0 <= i < pvector_like._count: - if i >= pvector_like._tail_offset: - return pvector_like._tail - - node = pvector_like._root - for level in range(pvector_like._shift, 0, -SHIFT): - node = node[(i >> level) & BIT_MASK] # >>> - - return node - - raise IndexError("Index out of range: %s" % (i,)) - - def _create_new_root(self): - new_shift = self._shift - - # Overflow root? - if (self._count >> SHIFT) > (1 << self._shift): # >>> - new_root = [self._root, self._new_path(self._shift, self._tail)] - new_shift += SHIFT - else: - new_root = self._push_tail(self._shift, self._root, self._tail) - - return new_root, new_shift - - def append(self, val): - if len(self._tail) < BRANCH_FACTOR: - new_tail = list(self._tail) - new_tail.append(val) - return PythonPVector(self._count + 1, self._shift, self._root, new_tail) - - # Full tail, push into tree - new_root, new_shift = self._create_new_root() - return PythonPVector(self._count + 1, new_shift, new_root, [val]) - - def _new_path(self, level, node): - if level == 0: - return node - - return [self._new_path(level - SHIFT, node)] - - def _mutating_insert_tail(self): - self._root, self._shift = self._create_new_root() - self._tail = [] - - def _mutating_fill_tail(self, offset, sequence): - max_delta_len = BRANCH_FACTOR - len(self._tail) - delta = sequence[offset:offset + max_delta_len] - self._tail.extend(delta) - delta_len = len(delta) - self._count += delta_len - return offset + delta_len - - def _mutating_extend(self, sequence): - offset = 0 - sequence_len = len(sequence) - while offset < sequence_len: - offset = self._mutating_fill_tail(offset, sequence) - if len(self._tail) == BRANCH_FACTOR: - self._mutating_insert_tail() - - self._tail_offset = self._count - len(self._tail) - - def extend(self, obj): - # Mutates the new vector directly for efficiency but that's only an - # implementation detail, once it is returned it should be considered immutable - l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj) - if l: - new_vector = self.append(l[0]) - new_vector._mutating_extend(l[1:]) - return new_vector - - return self - - def _push_tail(self, level, parent, tail_node): - """ - if parent is leaf, insert node, - else does it map to an existing child? -> - node_to_insert = push node one more level - else alloc new path - - return node_to_insert placed in copy of parent - """ - ret = list(parent) - - if level == SHIFT: - ret.append(tail_node) - return ret - - sub_index = ((self._count - 1) >> level) & BIT_MASK # >>> - if len(parent) > sub_index: - ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node) - return ret - - ret.append(self._new_path(level - SHIFT, tail_node)) - return ret - - def index(self, value, *args, **kwargs): - return self.tolist().index(value, *args, **kwargs) - - def count(self, value): - return self.tolist().count(value) - - def delete(self, index, stop=None): - l = self.tolist() - del l[_index_or_slice(index, stop)] - return _EMPTY_PVECTOR.extend(l) - - def remove(self, value): - l = self.tolist() - l.remove(value) - return _EMPTY_PVECTOR.extend(l) - -@six.add_metaclass(ABCMeta) -class PVector(object): - """ - Persistent vector implementation. Meant as a replacement for the cases where you would normally - use a Python list. - - Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to - create an instance. - - Heavily influenced by the persistent vector available in Clojure. Initially this was more or - less just a port of the Java code for the Clojure vector. It has since been modified and to - some extent optimized for usage in Python. - - The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No - updates are done to the original vector. Structural sharing between vectors are applied where possible to save - space and to avoid making complete copies. - - This structure corresponds most closely to the built in list type and is intended as a replacement. Where the - semantics are the same (more or less) the same function names have been used but for some cases it is not possible, - for example assignments. - - The PVector implements the Sequence protocol and is Hashable. - - Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector. - - The following are examples of some common operations on persistent vectors: - - >>> p = v(1, 2, 3) - >>> p2 = p.append(4) - >>> p3 = p2.extend([5, 6, 7]) - >>> p - pvector([1, 2, 3]) - >>> p2 - pvector([1, 2, 3, 4]) - >>> p3 - pvector([1, 2, 3, 4, 5, 6, 7]) - >>> p3[5] - 6 - >>> p.set(1, 99) - pvector([1, 99, 3]) - >>> - """ - - @abstractmethod - def __len__(self): - """ - >>> len(v(1, 2, 3)) - 3 - """ - - @abstractmethod - def __getitem__(self, index): - """ - Get value at index. Full slicing support. - - >>> v1 = v(5, 6, 7, 8) - >>> v1[2] - 7 - >>> v1[1:3] - pvector([6, 7]) - """ - - @abstractmethod - def __add__(self, other): - """ - >>> v1 = v(1, 2) - >>> v2 = v(3, 4) - >>> v1 + v2 - pvector([1, 2, 3, 4]) - """ - - @abstractmethod - def __mul__(self, times): - """ - >>> v1 = v(1, 2) - >>> 3 * v1 - pvector([1, 2, 1, 2, 1, 2]) - """ - - @abstractmethod - def __hash__(self): - """ - >>> v1 = v(1, 2, 3) - >>> v2 = v(1, 2, 3) - >>> hash(v1) == hash(v2) - True - """ - - @abstractmethod - def evolver(self): - """ - Create a new evolver for this pvector. The evolver acts as a mutable view of the vector - with "transaction like" semantics. No part of the underlying vector i updated, it is still - fully immutable. Furthermore multiple evolvers created from the same pvector do not - interfere with each other. - - You may want to use an evolver instead of working directly with the pvector in the - following cases: - - * Multiple updates are done to the same vector and the intermediate results are of no - interest. In this case using an evolver may be a more efficient and easier to work with. - * You need to pass a vector into a legacy function or a function that you have no control - over which performs in place mutations of lists. In this case pass an evolver instance - instead and then create a new pvector from the evolver once the function returns. - - The following example illustrates a typical workflow when working with evolvers. It also - displays most of the API (which i kept small by design, you should not be tempted to - use evolvers in excess ;-)). - - Create the evolver and perform various mutating updates to it: - - >>> v1 = v(1, 2, 3, 4, 5) - >>> e = v1.evolver() - >>> e[1] = 22 - >>> _ = e.append(6) - >>> _ = e.extend([7, 8, 9]) - >>> e[8] += 1 - >>> len(e) - 9 - - The underlying pvector remains the same: - - >>> v1 - pvector([1, 2, 3, 4, 5]) - - The changes are kept in the evolver. An updated pvector can be created using the - persistent() function on the evolver. - - >>> v2 = e.persistent() - >>> v2 - pvector([1, 22, 3, 4, 5, 6, 7, 8, 10]) - - The new pvector will share data with the original pvector in the same way that would have - been done if only using operations on the pvector. - """ - - @abstractmethod - def mset(self, *args): - """ - Return a new vector with elements in specified positions replaced by values (multi set). - - Elements on even positions in the argument list are interpreted as indexes while - elements on odd positions are considered values. - - >>> v1 = v(1, 2, 3) - >>> v1.mset(0, 11, 2, 33) - pvector([11, 2, 33]) - """ - - @abstractmethod - def set(self, i, val): - """ - Return a new vector with element at position i replaced with val. The original vector remains unchanged. - - Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will - result in an IndexError. - - >>> v1 = v(1, 2, 3) - >>> v1.set(1, 4) - pvector([1, 4, 3]) - >>> v1.set(3, 4) - pvector([1, 2, 3, 4]) - >>> v1.set(-1, 4) - pvector([1, 2, 4]) - """ - - @abstractmethod - def append(self, val): - """ - Return a new vector with val appended. - - >>> v1 = v(1, 2) - >>> v1.append(3) - pvector([1, 2, 3]) - """ - - @abstractmethod - def extend(self, obj): - """ - Return a new vector with all values in obj appended to it. Obj may be another - PVector or any other Iterable. - - >>> v1 = v(1, 2, 3) - >>> v1.extend([4, 5]) - pvector([1, 2, 3, 4, 5]) - """ - - @abstractmethod - def index(self, value, *args, **kwargs): - """ - Return first index of value. Additional indexes may be supplied to limit the search to a - sub range of the vector. - - >>> v1 = v(1, 2, 3, 4, 3) - >>> v1.index(3) - 2 - >>> v1.index(3, 3, 5) - 4 - """ - - @abstractmethod - def count(self, value): - """ - Return the number of times that value appears in the vector. - - >>> v1 = v(1, 4, 3, 4) - >>> v1.count(4) - 2 - """ - - @abstractmethod - def transform(self, *transformations): - """ - Transform arbitrarily complex combinations of PVectors and PMaps. A transformation - consists of two parts. One match expression that specifies which elements to transform - and one transformation function that performs the actual transformation. - - >>> from pyrsistent import freeze, ny - >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'}, - ... {'author': 'Steve', 'content': 'A slightly longer article'}], - ... 'weather': {'temperature': '11C', 'wind': '5m/s'}}) - >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c) - >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c) - >>> very_short_news.articles[0].content - 'A short article' - >>> very_short_news.articles[1].content - 'A slightly long...' - - When nothing has been transformed the original data structure is kept - - >>> short_news is news_paper - True - >>> very_short_news is news_paper - False - >>> very_short_news.articles[0] is news_paper.articles[0] - True - """ - - @abstractmethod - def delete(self, index, stop=None): - """ - Delete a portion of the vector by index or range. - - >>> v1 = v(1, 2, 3, 4, 5) - >>> v1.delete(1) - pvector([1, 3, 4, 5]) - >>> v1.delete(1, 3) - pvector([1, 4, 5]) - """ - - @abstractmethod - def remove(self, value): - """ - Remove the first occurrence of a value from the vector. - - >>> v1 = v(1, 2, 3, 2, 1) - >>> v2 = v1.remove(1) - >>> v2 - pvector([2, 3, 2, 1]) - >>> v2.remove(1) - pvector([2, 3, 2]) - """ - - -_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], []) -PVector.register(PythonPVector) -Sequence.register(PVector) -Hashable.register(PVector) - -def python_pvector(iterable=()): - """ - Create a new persistent vector containing the elements in iterable. - - >>> v1 = pvector([1, 2, 3]) - >>> v1 - pvector([1, 2, 3]) - """ - return _EMPTY_PVECTOR.extend(iterable) - -try: - # Use the C extension as underlying trie implementation if it is available - import os - if os.environ.get('PYRSISTENT_NO_C_EXTENSION'): - pvector = python_pvector - else: - from pvectorc import pvector - PVector.register(type(pvector())) -except ImportError: - pvector = python_pvector - - -def v(*elements): - """ - Create a new persistent vector containing all parameters to this function. - - >>> v1 = v(1, 2, 3) - >>> v1 - pvector([1, 2, 3]) - """ - return pvector(elements) diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_toolz.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_toolz.py deleted file mode 100644 index 6643ee86..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_toolz.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -Functionality copied from the toolz package to avoid having -to add toolz as a dependency. - -See https://github.com/pytoolz/toolz/. - -toolz is relased under BSD licence. Below is the licence text -from toolz as it appeared when copying the code. - --------------------------------------------------------------- - -Copyright (c) 2013 Matthew Rocklin - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - a. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - b. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - c. Neither the name of toolz nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -DAMAGE. -""" -import operator -from six.moves import reduce - - -def get_in(keys, coll, default=None, no_default=False): - """ - NB: This is a straight copy of the get_in implementation found in - the toolz library (https://github.com/pytoolz/toolz/). It works - with persistent data structures as well as the corresponding - datastructures from the stdlib. - - Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys. - - If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless - ``no_default`` is specified, then it raises KeyError or IndexError. - - ``get_in`` is a generalization of ``operator.getitem`` for nested data - structures such as dictionaries and lists. - >>> from pyrsistent import freeze - >>> transaction = freeze({'name': 'Alice', - ... 'purchase': {'items': ['Apple', 'Orange'], - ... 'costs': [0.50, 1.25]}, - ... 'credit card': '5555-1234-1234-1234'}) - >>> get_in(['purchase', 'items', 0], transaction) - 'Apple' - >>> get_in(['name'], transaction) - 'Alice' - >>> get_in(['purchase', 'total'], transaction) - >>> get_in(['purchase', 'items', 'apple'], transaction) - >>> get_in(['purchase', 'items', 10], transaction) - >>> get_in(['purchase', 'total'], transaction, 0) - 0 - >>> get_in(['y'], {}, no_default=True) - Traceback (most recent call last): - ... - KeyError: 'y' - """ - try: - return reduce(operator.getitem, keys, coll) - except (KeyError, IndexError, TypeError): - if no_default: - raise - return default \ No newline at end of file diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_transformations.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_transformations.py deleted file mode 100644 index 61209896..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/_transformations.py +++ /dev/null @@ -1,143 +0,0 @@ -import re -import six -try: - from inspect import Parameter, signature -except ImportError: - signature = None - try: - from inspect import getfullargspec as getargspec - except ImportError: - from inspect import getargspec - - -_EMPTY_SENTINEL = object() - - -def inc(x): - """ Add one to the current value """ - return x + 1 - - -def dec(x): - """ Subtract one from the current value """ - return x - 1 - - -def discard(evolver, key): - """ Discard the element and returns a structure without the discarded elements """ - try: - del evolver[key] - except KeyError: - pass - - -# Matchers -def rex(expr): - """ Regular expression matcher to use together with transform functions """ - r = re.compile(expr) - return lambda key: isinstance(key, six.string_types) and r.match(key) - - -def ny(_): - """ Matcher that matches any value """ - return True - - -# Support functions -def _chunks(l, n): - for i in range(0, len(l), n): - yield l[i:i + n] - - -def transform(structure, transformations): - r = structure - for path, command in _chunks(transformations, 2): - r = _do_to_path(r, path, command) - return r - - -def _do_to_path(structure, path, command): - if not path: - return command(structure) if callable(command) else command - - kvs = _get_keys_and_values(structure, path[0]) - return _update_structure(structure, kvs, path[1:], command) - - -def _items(structure): - try: - return structure.items() - except AttributeError: - # Support wider range of structures by adding a transform_items() or similar? - return list(enumerate(structure)) - - -def _get(structure, key, default): - try: - if hasattr(structure, '__getitem__'): - return structure[key] - - return getattr(structure, key) - - except (IndexError, KeyError): - return default - - -def _get_keys_and_values(structure, key_spec): - if callable(key_spec): - # Support predicates as callable objects in the path - arity = _get_arity(key_spec) - if arity == 1: - # Unary predicates are called with the "key" of the path - # - eg a key in a mapping, an index in a sequence. - return [(k, v) for k, v in _items(structure) if key_spec(k)] - elif arity == 2: - # Binary predicates are called with the key and the corresponding - # value. - return [(k, v) for k, v in _items(structure) if key_spec(k, v)] - else: - # Other arities are an error. - raise ValueError( - "callable in transform path must take 1 or 2 arguments" - ) - - # Non-callables are used as-is as a key. - return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))] - - -if signature is None: - def _get_arity(f): - argspec = getargspec(f) - return len(argspec.args) - len(argspec.defaults or ()) -else: - def _get_arity(f): - return sum( - 1 - for p - in signature(f).parameters.values() - if p.default is Parameter.empty - and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD) - ) - - -def _update_structure(structure, kvs, path, command): - from pyrsistent._pmap import pmap - e = structure.evolver() - if not path and command is discard: - # Do this in reverse to avoid index problems with vectors. See #92. - for k, v in reversed(kvs): - discard(e, k) - else: - for k, v in kvs: - is_empty = False - if v is _EMPTY_SENTINEL: - # Allow expansion of structure but make sure to cover the case - # when an empty pmap is added as leaf node. See #154. - is_empty = True - v = pmap() - - result = _do_to_path(v, path, command) - if result is not v or is_empty: - e[k] = result - - return e.persistent() diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/typing.py b/vendor/poetry-core/poetry/core/_vendor/pyrsistent/typing.py deleted file mode 100644 index 6a86c831..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/typing.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Helpers for use with type annotation. - -Use the empty classes in this module when annotating the types of Pyrsistent -objects, instead of using the actual collection class. - -For example, - - from pyrsistent import pvector - from pyrsistent.typing import PVector - - myvector: PVector[str] = pvector(['a', 'b', 'c']) - -""" -from __future__ import absolute_import - -try: - from typing import Container - from typing import Hashable - from typing import Generic - from typing import Iterable - from typing import Mapping - from typing import Sequence - from typing import Sized - from typing import TypeVar - - __all__ = [ - 'CheckedPMap', - 'CheckedPSet', - 'CheckedPVector', - 'PBag', - 'PDeque', - 'PList', - 'PMap', - 'PSet', - 'PVector', - ] - - T = TypeVar('T') - KT = TypeVar('KT') - VT = TypeVar('VT') - - class CheckedPMap(Mapping[KT, VT], Hashable): - pass - - # PSet.add and PSet.discard have different type signatures than that of Set. - class CheckedPSet(Generic[T], Hashable): - pass - - class CheckedPVector(Sequence[T], Hashable): - pass - - class PBag(Container[T], Iterable[T], Sized, Hashable): - pass - - class PDeque(Sequence[T], Hashable): - pass - - class PList(Sequence[T], Hashable): - pass - - class PMap(Mapping[KT, VT], Hashable): - pass - - # PSet.add and PSet.discard have different type signatures than that of Set. - class PSet(Generic[T], Hashable): - pass - - class PVector(Sequence[T], Hashable): - pass - - class PVectorEvolver(Generic[T]): - pass - - class PMapEvolver(Generic[KT, VT]): - pass - - class PSetEvolver(Generic[T]): - pass -except ImportError: - pass diff --git a/vendor/poetry-core/poetry/core/_vendor/six.LICENSE b/vendor/poetry-core/poetry/core/_vendor/six.LICENSE deleted file mode 100644 index de663311..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/six.LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright (c) 2010-2020 Benjamin Peterson - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/poetry-core/poetry/core/_vendor/six.py b/vendor/poetry-core/poetry/core/_vendor/six.py deleted file mode 100644 index 83f69783..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/six.py +++ /dev/null @@ -1,982 +0,0 @@ -# Copyright (c) 2010-2020 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.15.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - del io - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" - _assertNotRegex = "assertNotRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -def assertNotRegex(self, *args, **kwargs): - return getattr(self, _assertNotRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] > (3,): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - # This does exactly the same what the :func:`py3:functools.update_wrapper` - # function does on Python versions after 3.2. It sets the ``__wrapped__`` - # attribute on ``wrapper`` object and it doesn't raise an error if any of - # the attributes mentioned in ``assigned`` and ``updated`` are missing on - # ``wrapped`` object. - def _update_wrapper(wrapper, wrapped, - assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - for attr in assigned: - try: - value = getattr(wrapped, attr) - except AttributeError: - continue - else: - setattr(wrapper, attr, value) - for attr in updated: - getattr(wrapper, attr).update(getattr(wrapped, attr, {})) - wrapper.__wrapped__ = wrapped - return wrapper - _update_wrapper.__doc__ = functools.update_wrapper.__doc__ - - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - return functools.partial(_update_wrapper, wrapped=wrapped, - assigned=assigned, updated=updated) - wraps.__doc__ = functools.wraps.__doc__ - -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - if sys.version_info[:2] >= (3, 7): - # This version introduced PEP 560 that requires a bit - # of extra care (we mimic what is done by __build_class__). - resolved_bases = types.resolve_bases(bases) - if resolved_bases is not bases: - d['__orig_bases__'] = bases - else: - resolved_bases = bases - return meta(name, resolved_bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - if hasattr(cls, '__qualname__'): - orig_vars['__qualname__'] = cls.__qualname__ - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """Coerce **s** to six.binary_type. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, binary_type): - return s - if isinstance(s, text_type): - return s.encode(encoding, errors) - raise TypeError("not expecting type '%s'" % type(s)) - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - # Optimization: Fast return for the common case. - if type(s) is str: - return s - if PY2 and isinstance(s, text_type): - return s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - return s.decode(encoding, errors) - elif not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - return s - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """Coerce *s* to six.text_type. - - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def python_2_unicode_compatible(klass): - """ - A class decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/LICENSE b/vendor/poetry-core/poetry/core/_vendor/tomlkit/LICENSE deleted file mode 100644 index 44cf2b30..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2018 Sébastien Eustace - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/__init__.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/__init__.py deleted file mode 100644 index e0a7a542..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -from .api import aot -from .api import array -from .api import boolean -from .api import comment -from .api import date -from .api import datetime -from .api import document -from .api import dumps -from .api import float_ -from .api import inline_table -from .api import integer -from .api import item -from .api import key -from .api import key_value -from .api import loads -from .api import nl -from .api import parse -from .api import string -from .api import table -from .api import time -from .api import value -from .api import ws - - -__version__ = "0.7.0" diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/_compat.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/_compat.py deleted file mode 100644 index 8d3b0ae3..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/_compat.py +++ /dev/null @@ -1,174 +0,0 @@ -import re -import sys - - -try: - from datetime import timezone -except ImportError: - from datetime import datetime - from datetime import timedelta - from datetime import tzinfo - - class timezone(tzinfo): - __slots__ = "_offset", "_name" - - # Sentinel value to disallow None - _Omitted = object() - - def __new__(cls, offset, name=_Omitted): - if not isinstance(offset, timedelta): - raise TypeError("offset must be a timedelta") - if name is cls._Omitted: - if not offset: - return cls.utc - name = None - elif not isinstance(name, str): - raise TypeError("name must be a string") - if not cls._minoffset <= offset <= cls._maxoffset: - raise ValueError( - "offset must be a timedelta " - "strictly between -timedelta(hours=24) and " - "timedelta(hours=24)." - ) - return cls._create(offset, name) - - @classmethod - def _create(cls, offset, name=None): - self = tzinfo.__new__(cls) - self._offset = offset - self._name = name - return self - - def __getinitargs__(self): - """pickle support""" - if self._name is None: - return (self._offset,) - return (self._offset, self._name) - - def __eq__(self, other): - if type(other) != timezone: - return False - return self._offset == other._offset - - def __hash__(self): - return hash(self._offset) - - def __repr__(self): - """Convert to formal string, for repr(). - - >>> tz = timezone.utc - >>> repr(tz) - 'datetime.timezone.utc' - >>> tz = timezone(timedelta(hours=-5), 'EST') - >>> repr(tz) - "datetime.timezone(datetime.timedelta(-1, 68400), 'EST')" - """ - if self is self.utc: - return "datetime.timezone.utc" - if self._name is None: - return "%s.%s(%r)" % ( - self.__class__.__module__, - self.__class__.__name__, - self._offset, - ) - return "%s.%s(%r, %r)" % ( - self.__class__.__module__, - self.__class__.__name__, - self._offset, - self._name, - ) - - def __str__(self): - return self.tzname(None) - - def utcoffset(self, dt): - if isinstance(dt, datetime) or dt is None: - return self._offset - raise TypeError( - "utcoffset() argument must be a datetime instance" " or None" - ) - - def tzname(self, dt): - if isinstance(dt, datetime) or dt is None: - if self._name is None: - return self._name_from_offset(self._offset) - return self._name - raise TypeError("tzname() argument must be a datetime instance" " or None") - - def dst(self, dt): - if isinstance(dt, datetime) or dt is None: - return None - raise TypeError("dst() argument must be a datetime instance" " or None") - - def fromutc(self, dt): - if isinstance(dt, datetime): - if dt.tzinfo is not self: - raise ValueError("fromutc: dt.tzinfo " "is not self") - return dt + self._offset - raise TypeError("fromutc() argument must be a datetime instance" " or None") - - _maxoffset = timedelta(hours=23, minutes=59) - _minoffset = -_maxoffset - - @staticmethod - def _name_from_offset(delta): - if not delta: - return "UTC" - if delta < timedelta(0): - sign = "-" - delta = -delta - else: - sign = "+" - hours, rest = divmod(delta, timedelta(hours=1)) - minutes, rest = divmod(rest, timedelta(minutes=1)) - seconds = rest.seconds - microseconds = rest.microseconds - if microseconds: - return ("UTC{}{:02d}:{:02d}:{:02d}.{:06d}").format( - sign, hours, minutes, seconds, microseconds - ) - if seconds: - return "UTC{}{:02d}:{:02d}:{:02d}".format(sign, hours, minutes, seconds) - return "UTC{}{:02d}:{:02d}".format(sign, hours, minutes) - - timezone.utc = timezone._create(timedelta(0)) - timezone.min = timezone._create(timezone._minoffset) - timezone.max = timezone._create(timezone._maxoffset) - - -PY2 = sys.version_info[0] == 2 -PY36 = sys.version_info >= (3, 6) -PY38 = sys.version_info >= (3, 8) - -if PY2: - unicode = unicode - chr = unichr - long = long -else: - unicode = str - chr = chr - long = int - - -if PY36: - OrderedDict = dict -else: - from collections import OrderedDict - - -def decode(string, encodings=None): - if not PY2 and not isinstance(string, bytes): - return string - - if PY2 and isinstance(string, unicode): - return string - - encodings = encodings or ["utf-8", "latin1", "ascii"] - - for encoding in encodings: - try: - return string.decode(encoding) - except (UnicodeEncodeError, UnicodeDecodeError): - pass - - return string.decode(encodings[0], errors="ignore") diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/_utils.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/_utils.py deleted file mode 100644 index 2ae3e424..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/_utils.py +++ /dev/null @@ -1,144 +0,0 @@ -import re - -from datetime import date -from datetime import datetime -from datetime import time -from datetime import timedelta -from typing import Union - -from ._compat import decode -from ._compat import timezone - - -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping - - -RFC_3339_LOOSE = re.compile( - "^" - r"(([0-9]+)-(\d{2})-(\d{2}))?" # Date - "(" - "([T ])?" # Separator - r"(\d{2}):(\d{2}):(\d{2})(\.([0-9]+))?" # Time - r"((Z)|([\+|\-]([01][0-9]|2[0-3]):([0-5][0-9])))?" # Timezone - ")?" - "$" -) - -RFC_3339_DATETIME = re.compile( - "^" - "([0-9]+)-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])" # Date - "[T ]" # Separator - r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.([0-9]+))?" # Time - r"((Z)|([\+|\-]([01][0-9]|2[0-3]):([0-5][0-9])))?" # Timezone - "$" -) - -RFC_3339_DATE = re.compile("^([0-9]+)-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])$") - -RFC_3339_TIME = re.compile( - r"^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.([0-9]+))?$" -) - -_utc = timezone(timedelta(), "UTC") - - -def parse_rfc3339(string): # type: (str) -> Union[datetime, date, time] - m = RFC_3339_DATETIME.match(string) - if m: - year = int(m.group(1)) - month = int(m.group(2)) - day = int(m.group(3)) - hour = int(m.group(4)) - minute = int(m.group(5)) - second = int(m.group(6)) - microsecond = 0 - - if m.group(7): - microsecond = int(("{:<06s}".format(m.group(8)))[:6]) - - if m.group(9): - # Timezone - tz = m.group(9) - if tz == "Z": - tzinfo = _utc - else: - sign = m.group(11)[0] - hour_offset, minute_offset = int(m.group(12)), int(m.group(13)) - offset = timedelta(seconds=hour_offset * 3600 + minute_offset * 60) - if sign == "-": - offset = -offset - - tzinfo = timezone( - offset, "{}{}:{}".format(sign, m.group(12), m.group(13)) - ) - - return datetime( - year, month, day, hour, minute, second, microsecond, tzinfo=tzinfo - ) - else: - return datetime(year, month, day, hour, minute, second, microsecond) - - m = RFC_3339_DATE.match(string) - if m: - year = int(m.group(1)) - month = int(m.group(2)) - day = int(m.group(3)) - - return date(year, month, day) - - m = RFC_3339_TIME.match(string) - if m: - hour = int(m.group(1)) - minute = int(m.group(2)) - second = int(m.group(3)) - microsecond = 0 - - if m.group(4): - microsecond = int(("{:<06s}".format(m.group(5)))[:6]) - - return time(hour, minute, second, microsecond) - - raise ValueError("Invalid RFC 339 string") - - -_escaped = {"b": "\b", "t": "\t", "n": "\n", "f": "\f", "r": "\r", '"': '"', "\\": "\\"} -_escapes = {v: k for k, v in _escaped.items()} - - -def escape_string(s): - s = decode(s) - - res = [] - start = 0 - - def flush(): - if start != i: - res.append(s[start:i]) - - return i + 1 - - i = 0 - while i < len(s): - c = s[i] - if c in '"\\\n\r\t\b\f': - start = flush() - res.append("\\" + _escapes[c]) - elif ord(c) < 0x20: - start = flush() - res.append("\\u%04x" % ord(c)) - i += 1 - - flush() - - return "".join(res) - - -def merge_dicts(d1, d2): - for k, v in d2.items(): - if k in d1 and isinstance(d1[k], dict) and isinstance(d2[k], Mapping): - merge_dicts(d1[k], d2[k]) - else: - d1[k] = d2[k] diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/api.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/api.py deleted file mode 100644 index 3de41219..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/api.py +++ /dev/null @@ -1,142 +0,0 @@ -import datetime as _datetime - -from typing import Tuple - -from ._utils import parse_rfc3339 -from .container import Container -from .items import AoT -from .items import Array -from .items import Bool -from .items import Comment -from .items import Date -from .items import DateTime -from .items import Float -from .items import InlineTable -from .items import Integer -from .items import Item as _Item -from .items import Key -from .items import String -from .items import Table -from .items import Time -from .items import Trivia -from .items import Whitespace -from .items import item -from .parser import Parser -from .toml_document import TOMLDocument as _TOMLDocument - - -def loads(string): # type: (str) -> _TOMLDocument - """ - Parses a string into a TOMLDocument. - - Alias for parse(). - """ - return parse(string) - - -def dumps(data, sort_keys=False): # type: (_TOMLDocument, bool) -> str - """ - Dumps a TOMLDocument into a string. - """ - if not isinstance(data, _TOMLDocument) and isinstance(data, dict): - data = item(data, _sort_keys=sort_keys) - - return data.as_string() - - -def parse(string): # type: (str) -> _TOMLDocument - """ - Parses a string into a TOMLDocument. - """ - return Parser(string).parse() - - -def document(): # type: () -> _TOMLDocument - """ - Returns a new TOMLDocument instance. - """ - return _TOMLDocument() - - -# Items -def integer(raw): # type: (str) -> Integer - return item(int(raw)) - - -def float_(raw): # type: (str) -> Float - return item(float(raw)) - - -def boolean(raw): # type: (str) -> Bool - return item(raw == "true") - - -def string(raw): # type: (str) -> String - return item(raw) - - -def date(raw): # type: (str) -> Date - value = parse_rfc3339(raw) - if not isinstance(value, _datetime.date): - raise ValueError("date() only accepts date strings.") - - return item(value) - - -def time(raw): # type: (str) -> Time - value = parse_rfc3339(raw) - if not isinstance(value, _datetime.time): - raise ValueError("time() only accepts time strings.") - - return item(value) - - -def datetime(raw): # type: (str) -> DateTime - value = parse_rfc3339(raw) - if not isinstance(value, _datetime.datetime): - raise ValueError("datetime() only accepts datetime strings.") - - return item(value) - - -def array(raw=None): # type: (str) -> Array - if raw is None: - raw = "[]" - - return value(raw) - - -def table(): # type: () -> Table - return Table(Container(), Trivia(), False) - - -def inline_table(): # type: () -> InlineTable - return InlineTable(Container(), Trivia(), new=True) - - -def aot(): # type: () -> AoT - return AoT([]) - - -def key(k): # type: (str) -> Key - return Key(k) - - -def value(raw): # type: (str) -> _Item - return Parser(raw)._parse_value() - - -def key_value(src): # type: (str) -> Tuple[Key, _Item] - return Parser(src)._parse_key_value() - - -def ws(src): # type: (str) -> Whitespace - return Whitespace(src, fixed=True) - - -def nl(): # type: () -> Whitespace - return ws("\n") - - -def comment(string): # type: (str) -> Comment - return Comment(Trivia(comment_ws=" ", comment="# " + string)) diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/container.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/container.py deleted file mode 100644 index 6386e738..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/container.py +++ /dev/null @@ -1,800 +0,0 @@ -from __future__ import unicode_literals - -import copy - -from typing import Any -from typing import Dict -from typing import Generator -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union - -from ._compat import decode -from ._utils import merge_dicts -from .exceptions import KeyAlreadyPresent -from .exceptions import NonExistentKey -from .exceptions import ParseError -from .exceptions import TOMLKitError -from .items import AoT -from .items import Comment -from .items import Item -from .items import Key -from .items import Null -from .items import Table -from .items import Whitespace -from .items import item as _item - - -_NOT_SET = object() - - -class Container(dict): - """ - A container for items within a TOMLDocument. - """ - - def __init__(self, parsed=False): # type: (bool) -> None - self._map = {} # type: Dict[Key, int] - self._body = [] # type: List[Tuple[Optional[Key], Item]] - self._parsed = parsed - self._table_keys = [] - - @property - def body(self): # type: () -> List[Tuple[Optional[Key], Item]] - return self._body - - @property - def value(self): # type: () -> Dict[Any, Any] - d = {} - for k, v in self._body: - if k is None: - continue - - k = k.key - v = v.value - - if isinstance(v, Container): - v = v.value - - if k in d: - merge_dicts(d[k], v) - else: - d[k] = v - - return d - - def parsing(self, parsing): # type: (bool) -> None - self._parsed = parsing - - for k, v in self._body: - if isinstance(v, Table): - v.value.parsing(parsing) - elif isinstance(v, AoT): - for t in v.body: - t.value.parsing(parsing) - - def add( - self, key, item=None - ): # type: (Union[Key, Item, str], Optional[Item]) -> Container - """ - Adds an item to the current Container. - """ - if item is None: - if not isinstance(key, (Comment, Whitespace)): - raise ValueError( - "Non comment/whitespace items must have an associated key" - ) - - key, item = None, key - - return self.append(key, item) - - def append(self, key, item): # type: (Union[Key, str, None], Item) -> Container - if not isinstance(key, Key) and key is not None: - key = Key(key) - - if not isinstance(item, Item): - item = _item(item) - - if isinstance(item, (AoT, Table)) and item.name is None: - item.name = key.key - - if ( - isinstance(item, Table) - and self._body - and not self._parsed - and not item.trivia.indent - ): - item.trivia.indent = "\n" - - if isinstance(item, AoT) and self._body and not self._parsed: - if item and "\n" not in item[0].trivia.indent: - item[0].trivia.indent = "\n" + item[0].trivia.indent - else: - self.append(None, Whitespace("\n")) - - if key is not None and key in self: - current_idx = self._map[key] - if isinstance(current_idx, tuple): - current_body_element = self._body[current_idx[-1]] - else: - current_body_element = self._body[current_idx] - - current = current_body_element[1] - - if isinstance(item, Table): - if not isinstance(current, (Table, AoT)): - raise KeyAlreadyPresent(key) - - if item.is_aot_element(): - # New AoT element found later on - # Adding it to the current AoT - if not isinstance(current, AoT): - current = AoT([current, item], parsed=self._parsed) - - self._replace(key, key, current) - else: - current.append(item) - - return self - elif current.is_aot(): - if not item.is_aot_element(): - # Tried to define a table after an AoT with the same name. - raise KeyAlreadyPresent(key) - - current.append(item) - - return self - elif current.is_super_table(): - if item.is_super_table(): - # We need to merge both super tables - if ( - self._table_keys[-1] != current_body_element[0] - or key.is_dotted() - or current_body_element[0].is_dotted() - ): - if not isinstance(current_idx, tuple): - current_idx = (current_idx,) - - self._map[key] = current_idx + (len(self._body),) - self._body.append((key, item)) - self._table_keys.append(key) - - # Building a temporary proxy to check for errors - OutOfOrderTableProxy(self, self._map[key]) - - return self - - for k, v in item.value.body: - current.append(k, v) - - return self - elif current_body_element[0].is_dotted(): - raise TOMLKitError("Redefinition of an existing table") - elif not item.is_super_table(): - raise KeyAlreadyPresent(key) - elif isinstance(item, AoT): - if not isinstance(current, AoT): - # Tried to define an AoT after a table with the same name. - raise KeyAlreadyPresent(key) - - for table in item.body: - current.append(table) - - return self - else: - raise KeyAlreadyPresent(key) - - is_table = isinstance(item, (Table, AoT)) - if key is not None and self._body and not self._parsed: - # If there is already at least one table in the current container - # and the given item is not a table, we need to find the last - # item that is not a table and insert after it - # If no such item exists, insert at the top of the table - key_after = None - idx = 0 - for k, v in self._body: - if isinstance(v, Null): - # This happens only after deletion - continue - - if isinstance(v, Whitespace) and not v.is_fixed(): - continue - - if not is_table and isinstance(v, (Table, AoT)): - break - - key_after = k or idx - idx += 1 - - if key_after is not None: - if isinstance(key_after, int): - if key_after + 1 < len(self._body) - 1: - return self._insert_at(key_after + 1, key, item) - else: - previous_item = self._body[-1][1] - if ( - not isinstance(previous_item, Whitespace) - and not is_table - and "\n" not in previous_item.trivia.trail - ): - previous_item.trivia.trail += "\n" - else: - return self._insert_after(key_after, key, item) - else: - return self._insert_at(0, key, item) - - if key in self._map: - current_idx = self._map[key] - if isinstance(current_idx, tuple): - current_idx = current_idx[-1] - - current = self._body[current_idx][1] - if key is not None and not isinstance(current, Table): - raise KeyAlreadyPresent(key) - - # Adding sub tables to a currently existing table - if not isinstance(current_idx, tuple): - current_idx = (current_idx,) - - self._map[key] = current_idx + (len(self._body),) - else: - self._map[key] = len(self._body) - - self._body.append((key, item)) - if item.is_table(): - self._table_keys.append(key) - - if key is not None: - super(Container, self).__setitem__(key.key, item.value) - - return self - - def remove(self, key): # type: (Union[Key, str]) -> Container - if not isinstance(key, Key): - key = Key(key) - - idx = self._map.pop(key, None) - if idx is None: - raise NonExistentKey(key) - - if isinstance(idx, tuple): - for i in idx: - self._body[i] = (None, Null()) - else: - self._body[idx] = (None, Null()) - - super(Container, self).__delitem__(key.key) - - return self - - def _insert_after( - self, key, other_key, item - ): # type: (Union[str, Key], Union[str, Key], Union[Item, Any]) -> Container - if key is None: - raise ValueError("Key cannot be null in insert_after()") - - if key not in self: - raise NonExistentKey(key) - - if not isinstance(key, Key): - key = Key(key) - - if not isinstance(other_key, Key): - other_key = Key(other_key) - - item = _item(item) - - idx = self._map[key] - # Insert after the max index if there are many. - if isinstance(idx, tuple): - idx = max(idx) - current_item = self._body[idx][1] - if "\n" not in current_item.trivia.trail: - current_item.trivia.trail += "\n" - - # Increment indices after the current index - for k, v in self._map.items(): - if isinstance(v, tuple): - new_indices = [] - for v_ in v: - if v_ > idx: - v_ = v_ + 1 - - new_indices.append(v_) - - self._map[k] = tuple(new_indices) - elif v > idx: - self._map[k] = v + 1 - - self._map[other_key] = idx + 1 - self._body.insert(idx + 1, (other_key, item)) - - if key is not None: - super(Container, self).__setitem__(other_key.key, item.value) - - return self - - def _insert_at( - self, idx, key, item - ): # type: (int, Union[str, Key], Union[Item, Any]) -> Container - if idx > len(self._body) - 1: - raise ValueError("Unable to insert at position {}".format(idx)) - - if not isinstance(key, Key): - key = Key(key) - - item = _item(item) - - if idx > 0: - previous_item = self._body[idx - 1][1] - if ( - not isinstance(previous_item, Whitespace) - and not isinstance(item, (AoT, Table)) - and "\n" not in previous_item.trivia.trail - ): - previous_item.trivia.trail += "\n" - - # Increment indices after the current index - for k, v in self._map.items(): - if isinstance(v, tuple): - new_indices = [] - for v_ in v: - if v_ >= idx: - v_ = v_ + 1 - - new_indices.append(v_) - - self._map[k] = tuple(new_indices) - elif v >= idx: - self._map[k] = v + 1 - - self._map[key] = idx - self._body.insert(idx, (key, item)) - - if key is not None: - super(Container, self).__setitem__(key.key, item.value) - - return self - - def item(self, key): # type: (Union[Key, str]) -> Item - if not isinstance(key, Key): - key = Key(key) - - idx = self._map.get(key, None) - if idx is None: - raise NonExistentKey(key) - - if isinstance(idx, tuple): - # The item we are getting is an out of order table - # so we need a proxy to retrieve the proper objects - # from the parent container - return OutOfOrderTableProxy(self, idx) - - return self._body[idx][1] - - def last_item(self): # type: () -> Optional[Item] - if self._body: - return self._body[-1][1] - - def as_string(self): # type: () -> str - s = "" - for k, v in self._body: - if k is not None: - if isinstance(v, Table): - s += self._render_table(k, v) - elif isinstance(v, AoT): - s += self._render_aot(k, v) - else: - s += self._render_simple_item(k, v) - else: - s += self._render_simple_item(k, v) - - return s - - def _render_table( - self, key, table, prefix=None - ): # (Key, Table, Optional[str]) -> str - cur = "" - - if table.display_name is not None: - _key = table.display_name - else: - _key = key.as_string() - - if prefix is not None: - _key = prefix + "." + _key - - if not table.is_super_table() or ( - any( - not isinstance(v, (Table, AoT, Whitespace)) for _, v in table.value.body - ) - and not key.is_dotted() - ): - open_, close = "[", "]" - if table.is_aot_element(): - open_, close = "[[", "]]" - - cur += "{}{}{}{}{}{}{}{}".format( - table.trivia.indent, - open_, - decode(_key), - close, - table.trivia.comment_ws, - decode(table.trivia.comment), - table.trivia.trail, - "\n" if "\n" not in table.trivia.trail and len(table.value) > 0 else "", - ) - - for k, v in table.value.body: - if isinstance(v, Table): - if v.is_super_table(): - if k.is_dotted() and not key.is_dotted(): - # Dotted key inside table - cur += self._render_table(k, v) - else: - cur += self._render_table(k, v, prefix=_key) - else: - cur += self._render_table(k, v, prefix=_key) - elif isinstance(v, AoT): - cur += self._render_aot(k, v, prefix=_key) - else: - cur += self._render_simple_item( - k, v, prefix=_key if key.is_dotted() else None - ) - - return cur - - def _render_aot(self, key, aot, prefix=None): - _key = key.as_string() - if prefix is not None: - _key = prefix + "." + _key - - cur = "" - _key = decode(_key) - for table in aot.body: - cur += self._render_aot_table(table, prefix=_key) - - return cur - - def _render_aot_table(self, table, prefix=None): # (Table, Optional[str]) -> str - cur = "" - - _key = prefix or "" - - if not table.is_super_table(): - open_, close = "[[", "]]" - - cur += "{}{}{}{}{}{}{}".format( - table.trivia.indent, - open_, - decode(_key), - close, - table.trivia.comment_ws, - decode(table.trivia.comment), - table.trivia.trail, - ) - - for k, v in table.value.body: - if isinstance(v, Table): - if v.is_super_table(): - if k.is_dotted(): - # Dotted key inside table - cur += self._render_table(k, v) - else: - cur += self._render_table(k, v, prefix=_key) - else: - cur += self._render_table(k, v, prefix=_key) - elif isinstance(v, AoT): - cur += self._render_aot(k, v, prefix=_key) - else: - cur += self._render_simple_item(k, v) - - return cur - - def _render_simple_item(self, key, item, prefix=None): - if key is None: - return item.as_string() - - _key = key.as_string() - if prefix is not None: - _key = prefix + "." + _key - - return "{}{}{}{}{}{}{}".format( - item.trivia.indent, - decode(_key), - key.sep, - decode(item.as_string()), - item.trivia.comment_ws, - decode(item.trivia.comment), - item.trivia.trail, - ) - - # Dictionary methods - - def keys(self): # type: () -> Generator[str] - return super(Container, self).keys() - - def values(self): # type: () -> Generator[Item] - for k in self.keys(): - yield self[k] - - def items(self): # type: () -> Generator[Item] - for k, v in self.value.items(): - if k is None: - continue - - yield k, v - - def update(self, other): # type: (Dict) -> None - for k, v in other.items(): - self[k] = v - - def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any - if not isinstance(key, Key): - key = Key(key) - - if key not in self: - return default - - return self[key] - - def pop(self, key, default=_NOT_SET): - try: - value = self[key] - except KeyError: - if default is _NOT_SET: - raise - - return default - - del self[key] - - return value - - def setdefault( - self, key, default=None - ): # type: (Union[Key, str], Any) -> Union[Item, Container] - if key not in self: - self[key] = default - - return self[key] - - def __contains__(self, key): # type: (Union[Key, str]) -> bool - if not isinstance(key, Key): - key = Key(key) - - return key in self._map - - def __getitem__(self, key): # type: (Union[Key, str]) -> Union[Item, Container] - if not isinstance(key, Key): - key = Key(key) - - idx = self._map.get(key, None) - if idx is None: - raise NonExistentKey(key) - - if isinstance(idx, tuple): - # The item we are getting is an out of order table - # so we need a proxy to retrieve the proper objects - # from the parent container - return OutOfOrderTableProxy(self, idx) - - item = self._body[idx][1] - if item.is_boolean(): - return item.value - - return item - - def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None - if key is not None and key in self: - self._replace(key, key, value) - else: - self.append(key, value) - - def __delitem__(self, key): # type: (Union[Key, str]) -> None - self.remove(key) - - def _replace( - self, key, new_key, value - ): # type: (Union[Key, str], Union[Key, str], Item) -> None - if not isinstance(key, Key): - key = Key(key) - - if not isinstance(new_key, Key): - new_key = Key(new_key) - - idx = self._map.get(key, None) - if idx is None: - raise NonExistentKey(key) - - self._replace_at(idx, new_key, value) - - def _replace_at( - self, idx, new_key, value - ): # type: (Union[int, Tuple[int]], Union[Key, str], Item) -> None - if not isinstance(new_key, Key): - new_key = Key(new_key) - - if isinstance(idx, tuple): - for i in idx[1:]: - self._body[i] = (None, Null()) - - idx = idx[0] - - k, v = self._body[idx] - - self._map[new_key] = self._map.pop(k) - if new_key != k: - super(Container, self).__delitem__(k) - - if isinstance(self._map[new_key], tuple): - self._map[new_key] = self._map[new_key][0] - - value = _item(value) - - # Copying trivia - if not isinstance(value, (Whitespace, AoT)): - value.trivia.indent = v.trivia.indent - value.trivia.comment_ws = v.trivia.comment_ws - value.trivia.comment = v.trivia.comment - value.trivia.trail = v.trivia.trail - - if isinstance(value, Table): - # Insert a cosmetic new line for tables - value.append(None, Whitespace("\n")) - - self._body[idx] = (new_key, value) - - super(Container, self).__setitem__(new_key.key, value.value) - - def __str__(self): # type: () -> str - return str(self.value) - - def __repr__(self): # type: () -> str - return super(Container, self).__repr__() - - def __eq__(self, other): # type: (Dict) -> bool - if not isinstance(other, dict): - return NotImplemented - - return self.value == other - - def _getstate(self, protocol): - return (self._parsed,) - - def __reduce__(self): - return self.__reduce_ex__(2) - - def __reduce_ex__(self, protocol): - return ( - self.__class__, - self._getstate(protocol), - (self._map, self._body, self._parsed), - ) - - def __setstate__(self, state): - self._map = state[0] - self._body = state[1] - self._parsed = state[2] - - def copy(self): # type: () -> Container - return copy.copy(self) - - def __copy__(self): # type: () -> Container - c = self.__class__(self._parsed) - for k, v in super(Container, self).copy().items(): - super(Container, c).__setitem__(k, v) - - c._body += self.body - c._map.update(self._map) - - return c - - -class OutOfOrderTableProxy(dict): - def __init__(self, container, indices): # type: (Container, Tuple) -> None - self._container = container - self._internal_container = Container(self._container.parsing) - self._tables = [] - self._tables_map = {} - self._map = {} - - for i in indices: - key, item = self._container._body[i] - - if isinstance(item, Table): - self._tables.append(item) - table_idx = len(self._tables) - 1 - for k, v in item.value.body: - self._internal_container.append(k, v) - self._tables_map[k] = table_idx - if k is not None: - super(OutOfOrderTableProxy, self).__setitem__(k.key, v) - else: - self._internal_container.append(key, item) - self._map[key] = i - if key is not None: - super(OutOfOrderTableProxy, self).__setitem__(key.key, item) - - @property - def value(self): - return self._internal_container.value - - def __getitem__(self, key): # type: (Union[Key, str]) -> Any - if key not in self._internal_container: - raise NonExistentKey(key) - - return self._internal_container[key] - - def __setitem__(self, key, item): # type: (Union[Key, str], Any) -> None - if key in self._map: - idx = self._map[key] - self._container._replace_at(idx, key, item) - elif key in self._tables_map: - table = self._tables[self._tables_map[key]] - table[key] = item - elif self._tables: - table = self._tables[0] - table[key] = item - else: - self._container[key] = item - - if key is not None: - super(OutOfOrderTableProxy, self).__setitem__(key, item) - - def __delitem__(self, key): # type: (Union[Key, str]) -> None - if key in self._map: - idx = self._map[key] - del self._container[key] - del self._map[key] - elif key in self._tables_map: - table = self._tables[self._tables_map[key]] - del table[key] - del self._tables_map[key] - else: - raise NonExistentKey(key) - - del self._internal_container[key] - - def keys(self): - return self._internal_container.keys() - - def values(self): - return self._internal_container.values() - - def items(self): # type: () -> Generator[Item] - return self._internal_container.items() - - def update(self, other): # type: (Dict) -> None - self._internal_container.update(other) - - def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any - return self._internal_container.get(key, default=default) - - def pop(self, key, default=_NOT_SET): - return self._internal_container.pop(key, default=default) - - def setdefault( - self, key, default=None - ): # type: (Union[Key, str], Any) -> Union[Item, Container] - return self._internal_container.setdefault(key, default=default) - - def __contains__(self, key): - return key in self._internal_container - - def __str__(self): - return str(self._internal_container) - - def __repr__(self): - return repr(self._internal_container) - - def __eq__(self, other): # type: (Dict) -> bool - if not isinstance(other, dict): - return NotImplemented - - return self._internal_container == other - - def __getattr__(self, attribute): - return getattr(self._internal_container, attribute) diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/exceptions.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/exceptions.py deleted file mode 100644 index 44836363..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/exceptions.py +++ /dev/null @@ -1,221 +0,0 @@ -from typing import Optional - - -class TOMLKitError(Exception): - - pass - - -class ParseError(ValueError, TOMLKitError): - """ - This error occurs when the parser encounters a syntax error - in the TOML being parsed. The error references the line and - location within the line where the error was encountered. - """ - - def __init__( - self, line, col, message=None - ): # type: (int, int, Optional[str]) -> None - self._line = line - self._col = col - - if message is None: - message = "TOML parse error" - - super(ParseError, self).__init__( - "{} at line {} col {}".format(message, self._line, self._col) - ) - - @property - def line(self): - return self._line - - @property - def col(self): - return self._col - - -class MixedArrayTypesError(ParseError): - """ - An array was found that had two or more element types. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Mixed types found in array" - - super(MixedArrayTypesError, self).__init__(line, col, message=message) - - -class InvalidNumberError(ParseError): - """ - A numeric field was improperly specified. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Invalid number" - - super(InvalidNumberError, self).__init__(line, col, message=message) - - -class InvalidDateTimeError(ParseError): - """ - A datetime field was improperly specified. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Invalid datetime" - - super(InvalidDateTimeError, self).__init__(line, col, message=message) - - -class InvalidDateError(ParseError): - """ - A date field was improperly specified. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Invalid date" - - super(InvalidDateError, self).__init__(line, col, message=message) - - -class InvalidTimeError(ParseError): - """ - A date field was improperly specified. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Invalid time" - - super(InvalidTimeError, self).__init__(line, col, message=message) - - -class InvalidNumberOrDateError(ParseError): - """ - A numeric or date field was improperly specified. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Invalid number or date format" - - super(InvalidNumberOrDateError, self).__init__(line, col, message=message) - - -class InvalidUnicodeValueError(ParseError): - """ - A unicode code was improperly specified. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Invalid unicode value" - - super(InvalidUnicodeValueError, self).__init__(line, col, message=message) - - -class UnexpectedCharError(ParseError): - """ - An unexpected character was found during parsing. - """ - - def __init__(self, line, col, char): # type: (int, int, str) -> None - message = "Unexpected character: {}".format(repr(char)) - - super(UnexpectedCharError, self).__init__(line, col, message=message) - - -class EmptyKeyError(ParseError): - """ - An empty key was found during parsing. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Empty key" - - super(EmptyKeyError, self).__init__(line, col, message=message) - - -class EmptyTableNameError(ParseError): - """ - An empty table name was found during parsing. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Empty table name" - - super(EmptyTableNameError, self).__init__(line, col, message=message) - - -class InvalidCharInStringError(ParseError): - """ - The string being parsed contains an invalid character. - """ - - def __init__(self, line, col, char): # type: (int, int, str) -> None - message = "Invalid character {} in string".format(repr(char)) - - super(InvalidCharInStringError, self).__init__(line, col, message=message) - - -class UnexpectedEofError(ParseError): - """ - The TOML being parsed ended before the end of a statement. - """ - - def __init__(self, line, col): # type: (int, int) -> None - message = "Unexpected end of file" - - super(UnexpectedEofError, self).__init__(line, col, message=message) - - -class InternalParserError(ParseError): - """ - An error that indicates a bug in the parser. - """ - - def __init__( - self, line, col, message=None - ): # type: (int, int, Optional[str]) -> None - msg = "Internal parser error" - if message: - msg += " ({})".format(message) - - super(InternalParserError, self).__init__(line, col, message=msg) - - -class NonExistentKey(KeyError, TOMLKitError): - """ - A non-existent key was used. - """ - - def __init__(self, key): - message = 'Key "{}" does not exist.'.format(key) - - super(NonExistentKey, self).__init__(message) - - -class KeyAlreadyPresent(TOMLKitError): - """ - An already present key was used. - """ - - def __init__(self, key): - message = 'Key "{}" already exists.'.format(key) - - super(KeyAlreadyPresent, self).__init__(message) - - -class InvalidControlChar(ParseError): - def __init__(self, line, col, char, type): # type: (int, int, int, str) -> None - display_code = "\\u00" - - if char < 16: - display_code += "0" - - display_code += str(char) - - message = ( - "Control characters (codes less than 0x1f and 0x7f) are not allowed in {}, " - "use {} instead".format(type, display_code) - ) - - super(InvalidControlChar, self).__init__(line, col, message=message) diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/items.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/items.py deleted file mode 100644 index 184ffe7d..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/items.py +++ /dev/null @@ -1,1339 +0,0 @@ -from __future__ import unicode_literals - -import re -import string - -from datetime import date -from datetime import datetime -from datetime import time -from enum import Enum -from typing import Any -from typing import Dict -from typing import Generator -from typing import List -from typing import Optional -from typing import Union - -from ._compat import PY2 -from ._compat import PY38 -from ._compat import decode -from ._compat import long -from ._compat import unicode -from ._utils import escape_string - - -if PY2: - from functools32 import lru_cache -else: - from functools import lru_cache - - -def item(value, _parent=None, _sort_keys=False): - from .container import Container - - if isinstance(value, Item): - return value - - if isinstance(value, bool): - return Bool(value, Trivia()) - elif isinstance(value, int): - return Integer(value, Trivia(), str(value)) - elif isinstance(value, float): - return Float(value, Trivia(), str(value)) - elif isinstance(value, dict): - val = Table(Container(), Trivia(), False) - for k, v in sorted( - value.items(), - key=lambda i: (isinstance(i[1], dict), i[0] if _sort_keys else 1), - ): - val[k] = item(v, _parent=val, _sort_keys=_sort_keys) - - return val - elif isinstance(value, list): - if value and isinstance(value[0], dict): - a = AoT([]) - else: - a = Array([], Trivia()) - - for v in value: - if isinstance(v, dict): - table = Table(Container(), Trivia(), True) - - for k, _v in sorted( - v.items(), - key=lambda i: (isinstance(i[1], dict), i[0] if _sort_keys else 1), - ): - i = item(_v, _sort_keys=_sort_keys) - if isinstance(table, InlineTable): - i.trivia.trail = "" - - table[k] = item(i, _sort_keys=_sort_keys) - - v = table - - a.append(v) - - return a - elif isinstance(value, (str, unicode)): - escaped = escape_string(value) - - return String(StringType.SLB, decode(value), escaped, Trivia()) - elif isinstance(value, datetime): - return DateTime( - value.year, - value.month, - value.day, - value.hour, - value.minute, - value.second, - value.microsecond, - value.tzinfo, - Trivia(), - value.isoformat().replace("+00:00", "Z"), - ) - elif isinstance(value, date): - return Date(value.year, value.month, value.day, Trivia(), value.isoformat()) - elif isinstance(value, time): - return Time( - value.hour, - value.minute, - value.second, - value.microsecond, - value.tzinfo, - Trivia(), - value.isoformat(), - ) - - raise ValueError("Invalid type {}".format(type(value))) - - -class StringType(Enum): - # Single Line Basic - SLB = '"' - # Multi Line Basic - MLB = '"""' - # Single Line Literal - SLL = "'" - # Multi Line Literal - MLL = "'''" - - @property - @lru_cache(maxsize=None) - def unit(self): # type: () -> str - return self.value[0] - - @lru_cache(maxsize=None) - def is_basic(self): # type: () -> bool - return self in {StringType.SLB, StringType.MLB} - - @lru_cache(maxsize=None) - def is_literal(self): # type: () -> bool - return self in {StringType.SLL, StringType.MLL} - - @lru_cache(maxsize=None) - def is_singleline(self): # type: () -> bool - return self in {StringType.SLB, StringType.SLL} - - @lru_cache(maxsize=None) - def is_multiline(self): # type: () -> bool - return self in {StringType.MLB, StringType.MLL} - - @lru_cache(maxsize=None) - def toggle(self): # type: () -> StringType - return { - StringType.SLB: StringType.MLB, - StringType.MLB: StringType.SLB, - StringType.SLL: StringType.MLL, - StringType.MLL: StringType.SLL, - }[self] - - -class BoolType(Enum): - TRUE = "true" - FALSE = "false" - - @lru_cache(maxsize=None) - def __bool__(self): - return {BoolType.TRUE: True, BoolType.FALSE: False}[self] - - if PY2: - __nonzero__ = __bool__ # for PY2 - - def __iter__(self): - return iter(self.value) - - def __len__(self): - return len(self.value) - - -class Trivia: - """ - Trivia information (aka metadata). - """ - - def __init__( - self, indent=None, comment_ws=None, comment=None, trail=None - ): # type: (str, str, str, str) -> None - # Whitespace before a value. - self.indent = indent or "" - # Whitespace after a value, but before a comment. - self.comment_ws = comment_ws or "" - # Comment, starting with # character, or empty string if no comment. - self.comment = comment or "" - # Trailing newline. - if trail is None: - trail = "\n" - - self.trail = trail - - -class KeyType(Enum): - """ - The type of a Key. - - Keys can be bare (unquoted), or quoted using basic ("), or literal (') - quotes following the same escaping rules as single-line StringType. - """ - - Bare = "" - Basic = '"' - Literal = "'" - - -class Key: - """ - A key value. - """ - - def __init__( - self, k, t=None, sep=None, dotted=False, original=None - ): # type: (str, Optional[KeyType], Optional[str], bool, Optional[str]) -> None - if t is None: - if any( - [c not in string.ascii_letters + string.digits + "-" + "_" for c in k] - ): - t = KeyType.Basic - else: - t = KeyType.Bare - - self.t = t - if sep is None: - sep = " = " - - self.sep = sep - self.key = k - if original is None: - original = k - - self._original = original - - self._dotted = dotted - - @property - def delimiter(self): # type: () -> str - return self.t.value - - def is_dotted(self): # type: () -> bool - return self._dotted - - def is_bare(self): # type: () -> bool - return self.t == KeyType.Bare - - def as_string(self): # type: () -> str - return "{}{}{}".format(self.delimiter, self._original, self.delimiter) - - def __hash__(self): # type: () -> int - return hash(self.key) - - def __eq__(self, other): # type: (Key) -> bool - if isinstance(other, Key): - return self.key == other.key - - return self.key == other - - def __str__(self): # type: () -> str - return self.as_string() - - def __repr__(self): # type: () -> str - return "".format(self.as_string()) - - -class Item(object): - """ - An item within a TOML document. - """ - - def __init__(self, trivia): # type: (Trivia) -> None - self._trivia = trivia - - @property - def trivia(self): # type: () -> Trivia - return self._trivia - - @property - def discriminant(self): # type: () -> int - raise NotImplementedError() - - def as_string(self): # type: () -> str - raise NotImplementedError() - - # Helpers - - def comment(self, comment): # type: (str) -> Item - if not comment.strip().startswith("#"): - comment = "# " + comment - - self._trivia.comment_ws = " " - self._trivia.comment = comment - - return self - - def indent(self, indent): # type: (int) -> Item - if self._trivia.indent.startswith("\n"): - self._trivia.indent = "\n" + " " * indent - else: - self._trivia.indent = " " * indent - - return self - - def is_boolean(self): # type: () -> bool - return isinstance(self, Bool) - - def is_table(self): # type: () -> bool - return isinstance(self, Table) - - def is_inline_table(self): # type: () -> bool - return isinstance(self, InlineTable) - - def is_aot(self): # type: () -> bool - return isinstance(self, AoT) - - def _getstate(self, protocol=3): - return (self._trivia,) - - def __reduce__(self): - return self.__reduce_ex__(2) - - def __reduce_ex__(self, protocol): - return self.__class__, self._getstate(protocol) - - -class Whitespace(Item): - """ - A whitespace literal. - """ - - def __init__(self, s, fixed=False): # type: (str, bool) -> None - self._s = s - self._fixed = fixed - - @property - def s(self): # type: () -> str - return self._s - - @property - def value(self): # type: () -> str - return self._s - - @property - def trivia(self): # type: () -> Trivia - raise RuntimeError("Called trivia on a Whitespace variant.") - - @property - def discriminant(self): # type: () -> int - return 0 - - def is_fixed(self): # type: () -> bool - return self._fixed - - def as_string(self): # type: () -> str - return self._s - - def __repr__(self): # type: () -> str - return "<{} {}>".format(self.__class__.__name__, repr(self._s)) - - def _getstate(self, protocol=3): - return self._s, self._fixed - - -class Comment(Item): - """ - A comment literal. - """ - - @property - def discriminant(self): # type: () -> int - return 1 - - def as_string(self): # type: () -> str - return "{}{}{}".format( - self._trivia.indent, decode(self._trivia.comment), self._trivia.trail - ) - - def __str__(self): # type: () -> str - return "{}{}".format(self._trivia.indent, decode(self._trivia.comment)) - - -class Integer(long, Item): - """ - An integer literal. - """ - - def __new__(cls, value, trivia, raw): # type: (int, Trivia, str) -> Integer - return super(Integer, cls).__new__(cls, value) - - def __init__(self, _, trivia, raw): # type: (int, Trivia, str) -> None - super(Integer, self).__init__(trivia) - - self._raw = raw - self._sign = False - - if re.match(r"^[+\-]\d+$", raw): - self._sign = True - - @property - def discriminant(self): # type: () -> int - return 2 - - @property - def value(self): # type: () -> int - return self - - def as_string(self): # type: () -> str - return self._raw - - def __add__(self, other): - result = super(Integer, self).__add__(other) - - return self._new(result) - - def __radd__(self, other): - result = super(Integer, self).__radd__(other) - - if isinstance(other, Integer): - return self._new(result) - - return result - - def __sub__(self, other): - result = super(Integer, self).__sub__(other) - - return self._new(result) - - def __rsub__(self, other): - result = super(Integer, self).__rsub__(other) - - if isinstance(other, Integer): - return self._new(result) - - return result - - def _new(self, result): - raw = str(result) - - if self._sign: - sign = "+" if result >= 0 else "-" - raw = sign + raw - - return Integer(result, self._trivia, raw) - - def _getstate(self, protocol=3): - return int(self), self._trivia, self._raw - - -class Float(float, Item): - """ - A float literal. - """ - - def __new__(cls, value, trivia, raw): # type: (float, Trivia, str) -> Integer - return super(Float, cls).__new__(cls, value) - - def __init__(self, _, trivia, raw): # type: (float, Trivia, str) -> None - super(Float, self).__init__(trivia) - - self._raw = raw - self._sign = False - - if re.match(r"^[+\-].+$", raw): - self._sign = True - - @property - def discriminant(self): # type: () -> int - return 3 - - @property - def value(self): # type: () -> float - return self - - def as_string(self): # type: () -> str - return self._raw - - def __add__(self, other): - result = super(Float, self).__add__(other) - - return self._new(result) - - def __radd__(self, other): - result = super(Float, self).__radd__(other) - - if isinstance(other, Float): - return self._new(result) - - return result - - def __sub__(self, other): - result = super(Float, self).__sub__(other) - - return self._new(result) - - def __rsub__(self, other): - result = super(Float, self).__rsub__(other) - - if isinstance(other, Float): - return self._new(result) - - return result - - def _new(self, result): - raw = str(result) - - if self._sign: - sign = "+" if result >= 0 else "-" - raw = sign + raw - - return Float(result, self._trivia, raw) - - def _getstate(self, protocol=3): - return float(self), self._trivia, self._raw - - -class Bool(Item): - """ - A boolean literal. - """ - - def __init__(self, t, trivia): # type: (int, Trivia) -> None - super(Bool, self).__init__(trivia) - - self._value = bool(t) - - @property - def discriminant(self): # type: () -> int - return 4 - - @property - def value(self): # type: () -> bool - return self._value - - def as_string(self): # type: () -> str - return str(self._value).lower() - - def _getstate(self, protocol=3): - return self._value, self._trivia - - def __bool__(self): - return self._value - - __nonzero__ = __bool__ - - def __eq__(self, other): - if not isinstance(other, bool): - return NotImplemented - - return other == self._value - - def __hash__(self): - return hash(self._value) - - def __repr__(self): - return repr(self._value) - - -class DateTime(Item, datetime): - """ - A datetime literal. - """ - - def __new__( - cls, - year, - month, - day, - hour, - minute, - second, - microsecond, - tzinfo, - trivia, - raw, - **kwargs - ): # type: (int, int, int, int, int, int, int, Optional[datetime.tzinfo], Trivia, str, Any) -> datetime - return datetime.__new__( - cls, - year, - month, - day, - hour, - minute, - second, - microsecond, - tzinfo=tzinfo, - **kwargs - ) - - def __init__( - self, year, month, day, hour, minute, second, microsecond, tzinfo, trivia, raw - ): # type: (int, int, int, int, int, int, int, Optional[datetime.tzinfo], Trivia, str) -> None - super(DateTime, self).__init__(trivia) - - self._raw = raw - - @property - def discriminant(self): # type: () -> int - return 5 - - @property - def value(self): # type: () -> datetime - return self - - def as_string(self): # type: () -> str - return self._raw - - def __add__(self, other): - if PY38: - result = datetime( - self.year, - self.month, - self.day, - self.hour, - self.minute, - self.second, - self.microsecond, - self.tzinfo, - ).__add__(other) - else: - result = super(DateTime, self).__add__(other) - - return self._new(result) - - def __sub__(self, other): - if PY38: - result = datetime( - self.year, - self.month, - self.day, - self.hour, - self.minute, - self.second, - self.microsecond, - self.tzinfo, - ).__sub__(other) - else: - result = super(DateTime, self).__sub__(other) - - if isinstance(result, datetime): - result = self._new(result) - - return result - - def _new(self, result): - raw = result.isoformat() - - return DateTime( - result.year, - result.month, - result.day, - result.hour, - result.minute, - result.second, - result.microsecond, - result.tzinfo, - self._trivia, - raw, - ) - - def _getstate(self, protocol=3): - return ( - self.year, - self.month, - self.day, - self.hour, - self.minute, - self.second, - self.microsecond, - self.tzinfo, - self._trivia, - self._raw, - ) - - -class Date(Item, date): - """ - A date literal. - """ - - def __new__(cls, year, month, day, *_): # type: (int, int, int, Any) -> date - return date.__new__(cls, year, month, day) - - def __init__( - self, year, month, day, trivia, raw - ): # type: (int, int, int, Trivia, str) -> None - super(Date, self).__init__(trivia) - - self._raw = raw - - @property - def discriminant(self): # type: () -> int - return 6 - - @property - def value(self): # type: () -> date - return self - - def as_string(self): # type: () -> str - return self._raw - - def __add__(self, other): - if PY38: - result = date(self.year, self.month, self.day).__add__(other) - else: - result = super(Date, self).__add__(other) - - return self._new(result) - - def __sub__(self, other): - if PY38: - result = date(self.year, self.month, self.day).__sub__(other) - else: - result = super(Date, self).__sub__(other) - - if isinstance(result, date): - result = self._new(result) - - return result - - def _new(self, result): - raw = result.isoformat() - - return Date(result.year, result.month, result.day, self._trivia, raw) - - def _getstate(self, protocol=3): - return (self.year, self.month, self.day, self._trivia, self._raw) - - -class Time(Item, time): - """ - A time literal. - """ - - def __new__( - cls, hour, minute, second, microsecond, tzinfo, *_ - ): # type: (int, int, int, int, Optional[datetime.tzinfo], Any) -> time - return time.__new__(cls, hour, minute, second, microsecond, tzinfo) - - def __init__( - self, hour, minute, second, microsecond, tzinfo, trivia, raw - ): # type: (int, int, int, int, Optional[datetime.tzinfo], Trivia, str) -> None - super(Time, self).__init__(trivia) - - self._raw = raw - - @property - def discriminant(self): # type: () -> int - return 7 - - @property - def value(self): # type: () -> time - return self - - def as_string(self): # type: () -> str - return self._raw - - def _getstate(self, protocol=3): - return ( - self.hour, - self.minute, - self.second, - self.microsecond, - self.tzinfo, - self._trivia, - self._raw, - ) - - -class Array(Item, list): - """ - An array literal - """ - - def __init__( - self, value, trivia, multiline=False - ): # type: (list, Trivia, bool) -> None - super(Array, self).__init__(trivia) - - list.__init__( - self, [v.value for v in value if not isinstance(v, (Whitespace, Comment))] - ) - - self._value = value - self._multiline = multiline - - @property - def discriminant(self): # type: () -> int - return 8 - - @property - def value(self): # type: () -> list - return self - - def multiline(self, multiline): # type: (bool) -> self - self._multiline = multiline - - return self - - def as_string(self): # type: () -> str - if not self._multiline: - return "[{}]".format("".join(v.as_string() for v in self._value)) - - s = "[\n" + self.trivia.indent + " " * 4 - s += (",\n" + self.trivia.indent + " " * 4).join( - v.as_string() for v in self._value if not isinstance(v, Whitespace) - ) - s += ",\n" - s += "]" - - return s - - def append(self, _item): # type: (Any) -> None - if self._value: - self._value.append(Whitespace(", ")) - - it = item(_item) - super(Array, self).append(it.value) - - self._value.append(it) - - if not PY2: - - def clear(self): - super(Array, self).clear() - - self._value.clear() - - def __iadd__(self, other): # type: (list) -> Array - if not isinstance(other, list): - return NotImplemented - - for v in other: - self.append(v) - - return self - - def __delitem__(self, key): - super(Array, self).__delitem__(key) - - j = 0 if key >= 0 else -1 - for i, v in enumerate(self._value if key >= 0 else reversed(self._value)): - if key < 0: - i = -i - 1 - - if isinstance(v, (Comment, Whitespace)): - continue - - if j == key: - del self._value[i] - - if i < 0 and abs(i) > len(self._value): - i += 1 - - if i < len(self._value) - 1 and isinstance(self._value[i], Whitespace): - del self._value[i] - - break - - j += 1 if key >= 0 else -1 - - def __str__(self): - return str( - [v.value for v in self._value if not isinstance(v, (Whitespace, Comment))] - ) - - def __repr__(self): - return str(self) - - def _getstate(self, protocol=3): - return self._value, self._trivia - - -class Table(Item, dict): - """ - A table literal. - """ - - def __init__( - self, - value, - trivia, - is_aot_element, - is_super_table=False, - name=None, - display_name=None, - ): # type: (tomlkit.container.Container, Trivia, bool, bool, Optional[str], Optional[str]) -> None - super(Table, self).__init__(trivia) - - self.name = name - self.display_name = display_name - self._value = value - self._is_aot_element = is_aot_element - self._is_super_table = is_super_table - - for k, v in self._value.body: - if k is not None: - super(Table, self).__setitem__(k.key, v) - - @property - def value(self): # type: () -> tomlkit.container.Container - return self._value - - @property - def discriminant(self): # type: () -> int - return 9 - - def add(self, key, item=None): # type: (Union[Key, Item, str], Any) -> Item - if item is None: - if not isinstance(key, (Comment, Whitespace)): - raise ValueError( - "Non comment/whitespace items must have an associated key" - ) - - key, item = None, key - - return self.append(key, item) - - def append(self, key, _item): # type: (Union[Key, str], Any) -> Table - """ - Appends a (key, item) to the table. - """ - if not isinstance(_item, Item): - _item = item(_item) - - self._value.append(key, _item) - - if isinstance(key, Key): - key = key.key - - if key is not None: - super(Table, self).__setitem__(key, _item) - - m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) - if not m: - return self - - indent = m.group(1) - - if not isinstance(_item, Whitespace): - m = re.match("(?s)^([^ ]*)(.*)$", _item.trivia.indent) - if not m: - _item.trivia.indent = indent - else: - _item.trivia.indent = m.group(1) + indent + m.group(2) - - return self - - def raw_append(self, key, _item): # type: (Union[Key, str], Any) -> Table - if not isinstance(_item, Item): - _item = item(_item) - - self._value.append(key, _item) - - if isinstance(key, Key): - key = key.key - - if key is not None: - super(Table, self).__setitem__(key, _item) - - return self - - def remove(self, key): # type: (Union[Key, str]) -> Table - self._value.remove(key) - - if isinstance(key, Key): - key = key.key - - if key is not None: - super(Table, self).__delitem__(key) - - return self - - def is_aot_element(self): # type: () -> bool - return self._is_aot_element - - def is_super_table(self): # type: () -> bool - return self._is_super_table - - def as_string(self): # type: () -> str - return self._value.as_string() - - # Helpers - - def indent(self, indent): # type: (int) -> Table - super(Table, self).indent(indent) - - m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) - if not m: - indent = "" - else: - indent = m.group(1) - - for k, item in self._value.body: - if not isinstance(item, Whitespace): - item.trivia.indent = indent + item.trivia.indent - - return self - - def keys(self): # type: () -> Generator[str] - for k in self._value.keys(): - yield k - - def values(self): # type: () -> Generator[Item] - for v in self._value.values(): - yield v - - def items(self): # type: () -> Generator[Item] - for k, v in self._value.items(): - yield k, v - - def update(self, other): # type: (Dict) -> None - for k, v in other.items(): - self[k] = v - - def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any - return self._value.get(key, default) - - def __contains__(self, key): # type: (Union[Key, str]) -> bool - return key in self._value - - def __getitem__(self, key): # type: (Union[Key, str]) -> Item - return self._value[key] - - def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None - if not isinstance(value, Item): - value = item(value) - - self._value[key] = value - - if key is not None: - super(Table, self).__setitem__(key, value) - - m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) - if not m: - return - - indent = m.group(1) - - if not isinstance(value, Whitespace): - m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent) - if not m: - value.trivia.indent = indent - else: - value.trivia.indent = m.group(1) + indent + m.group(2) - - def __delitem__(self, key): # type: (Union[Key, str]) -> None - self.remove(key) - - def __repr__(self): - return super(Table, self).__repr__() - - def __str__(self): - return str(self.value) - - def _getstate(self, protocol=3): - return ( - self._value, - self._trivia, - self._is_aot_element, - self._is_super_table, - self.name, - self.display_name, - ) - - -class InlineTable(Item, dict): - """ - An inline table literal. - """ - - def __init__( - self, value, trivia, new=False - ): # type: (tomlkit.container.Container, Trivia, bool) -> None - super(InlineTable, self).__init__(trivia) - - self._value = value - self._new = new - - for k, v in self._value.body: - if k is not None: - super(InlineTable, self).__setitem__(k.key, v) - - @property - def discriminant(self): # type: () -> int - return 10 - - @property - def value(self): # type: () -> Dict - return self._value - - def append(self, key, _item): # type: (Union[Key, str], Any) -> InlineTable - """ - Appends a (key, item) to the table. - """ - if not isinstance(_item, Item): - _item = item(_item) - - if not isinstance(_item, (Whitespace, Comment)): - if not _item.trivia.indent and len(self._value) > 0 and not self._new: - _item.trivia.indent = " " - if _item.trivia.comment: - _item.trivia.comment = "" - - self._value.append(key, _item) - - if isinstance(key, Key): - key = key.key - - if key is not None: - super(InlineTable, self).__setitem__(key, _item) - - return self - - def remove(self, key): # type: (Union[Key, str]) -> InlineTable - self._value.remove(key) - - if isinstance(key, Key): - key = key.key - - if key is not None: - super(InlineTable, self).__delitem__(key) - - return self - - def as_string(self): # type: () -> str - buf = "{" - for i, (k, v) in enumerate(self._value.body): - if k is None: - if i == len(self._value.body) - 1: - if self._new: - buf = buf.rstrip(", ") - else: - buf = buf.rstrip(",") - - buf += v.as_string() - - continue - - buf += "{}{}{}{}{}{}".format( - v.trivia.indent, - k.as_string() + ("." if k.is_dotted() else ""), - k.sep, - v.as_string(), - v.trivia.comment, - v.trivia.trail.replace("\n", ""), - ) - - if i != len(self._value.body) - 1: - buf += "," - if self._new: - buf += " " - - buf += "}" - - return buf - - def keys(self): # type: () -> Generator[str] - for k in self._value.keys(): - yield k - - def values(self): # type: () -> Generator[Item] - for v in self._value.values(): - yield v - - def items(self): # type: () -> Generator[Item] - for k, v in self._value.items(): - yield k, v - - def update(self, other): # type: (Dict) -> None - for k, v in other.items(): - self[k] = v - - def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any - return self._value.get(key, default) - - def __contains__(self, key): # type: (Union[Key, str]) -> bool - return key in self._value - - def __getitem__(self, key): # type: (Union[Key, str]) -> Item - return self._value[key] - - def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None - if not isinstance(value, Item): - value = item(value) - - self._value[key] = value - - if key is not None: - super(InlineTable, self).__setitem__(key, value) - if value.trivia.comment: - value.trivia.comment = "" - - m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) - if not m: - return - - indent = m.group(1) - - if not isinstance(value, Whitespace): - m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent) - if not m: - value.trivia.indent = indent - else: - value.trivia.indent = m.group(1) + indent + m.group(2) - - def __delitem__(self, key): # type: (Union[Key, str]) -> None - self.remove(key) - - def __repr__(self): - return super(InlineTable, self).__repr__() - - def _getstate(self, protocol=3): - return (self._value, self._trivia) - - -class String(unicode, Item): - """ - A string literal. - """ - - def __new__(cls, t, value, original, trivia): - return super(String, cls).__new__(cls, value) - - def __init__( - self, t, _, original, trivia - ): # type: (StringType, str, original, Trivia) -> None - super(String, self).__init__(trivia) - - self._t = t - self._original = original - - @property - def discriminant(self): # type: () -> int - return 11 - - @property - def value(self): # type: () -> str - return self - - def as_string(self): # type: () -> str - return "{}{}{}".format(self._t.value, decode(self._original), self._t.value) - - def __add__(self, other): - result = super(String, self).__add__(other) - - return self._new(result) - - def __sub__(self, other): - result = super(String, self).__sub__(other) - - return self._new(result) - - def _new(self, result): - return String(self._t, result, result, self._trivia) - - def _getstate(self, protocol=3): - return self._t, unicode(self), self._original, self._trivia - - -class AoT(Item, list): - """ - An array of table literal - """ - - def __init__( - self, body, name=None, parsed=False - ): # type: (List[Table], Optional[str], bool) -> None - self.name = name - self._body = [] - self._parsed = parsed - - super(AoT, self).__init__(Trivia(trail="")) - - for table in body: - self.append(table) - - @property - def body(self): # type: () -> List[Table] - return self._body - - @property - def discriminant(self): # type: () -> int - return 12 - - @property - def value(self): # type: () -> List[Dict[Any, Any]] - return [v.value for v in self._body] - - def append(self, table): # type: (Table) -> Table - m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) - if m: - indent = m.group(1) - - m = re.match("(?s)^([^ ]*)(.*)$", table.trivia.indent) - if not m: - table.trivia.indent = indent - else: - table.trivia.indent = m.group(1) + indent + m.group(2) - - if not self._parsed and "\n" not in table.trivia.indent and self._body: - table.trivia.indent = "\n" + table.trivia.indent - - self._body.append(table) - - super(AoT, self).append(table) - - return table - - def as_string(self): # type: () -> str - b = "" - for table in self._body: - b += table.as_string() - - return b - - def __repr__(self): # type: () -> str - return "".format(self.value) - - def _getstate(self, protocol=3): - return self._body, self.name, self._parsed - - -class Null(Item): - """ - A null item. - """ - - def __init__(self): # type: () -> None - pass - - @property - def discriminant(self): # type: () -> int - return -1 - - @property - def value(self): # type: () -> None - return None - - def as_string(self): # type: () -> str - return "" - - def _getstate(self, protocol=3): - return tuple() diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/parser.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/parser.py deleted file mode 100644 index 49929954..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/parser.py +++ /dev/null @@ -1,1299 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import re -import string - -from typing import Any -from typing import Generator -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union - -from ._compat import chr -from ._compat import decode -from ._utils import RFC_3339_LOOSE -from ._utils import _escaped -from ._utils import parse_rfc3339 -from .container import Container -from .exceptions import EmptyKeyError -from .exceptions import EmptyTableNameError -from .exceptions import InternalParserError -from .exceptions import InvalidCharInStringError -from .exceptions import InvalidControlChar -from .exceptions import InvalidDateError -from .exceptions import InvalidDateTimeError -from .exceptions import InvalidNumberError -from .exceptions import InvalidTimeError -from .exceptions import InvalidUnicodeValueError -from .exceptions import ParseError -from .exceptions import UnexpectedCharError -from .exceptions import UnexpectedEofError -from .items import AoT -from .items import Array -from .items import Bool -from .items import BoolType -from .items import Comment -from .items import Date -from .items import DateTime -from .items import Float -from .items import InlineTable -from .items import Integer -from .items import Item -from .items import Key -from .items import KeyType -from .items import Null -from .items import String -from .items import StringType -from .items import Table -from .items import Time -from .items import Trivia -from .items import Whitespace -from .source import Source -from .toml_char import TOMLChar -from .toml_document import TOMLDocument - - -CTRL_I = 0x09 # Tab -CTRL_J = 0x0A # Line feed -CTRL_M = 0x0D # Carriage return -CTRL_CHAR_LIMIT = 0x1F -CHR_DEL = 0x7F - - -class Parser: - """ - Parser for TOML documents. - """ - - def __init__(self, string): # type: (str) -> None - # Input to parse - self._src = Source(decode(string)) - - self._aot_stack = [] - - @property - def _state(self): - return self._src.state - - @property - def _idx(self): - return self._src.idx - - @property - def _current(self): - return self._src.current - - @property - def _marker(self): - return self._src.marker - - def extract(self): # type: () -> str - """ - Extracts the value between marker and index - """ - return self._src.extract() - - def inc(self, exception=None): # type: (Optional[ParseError.__class__]) -> bool - """ - Increments the parser if the end of the input has not been reached. - Returns whether or not it was able to advance. - """ - return self._src.inc(exception=exception) - - def inc_n(self, n, exception=None): # type: (int, Optional[ParseError]) -> bool - """ - Increments the parser by n characters - if the end of the input has not been reached. - """ - return self._src.inc_n(n=n, exception=exception) - - def consume(self, chars, min=0, max=-1): - """ - Consume chars until min/max is satisfied is valid. - """ - return self._src.consume(chars=chars, min=min, max=max) - - def end(self): # type: () -> bool - """ - Returns True if the parser has reached the end of the input. - """ - return self._src.end() - - def mark(self): # type: () -> None - """ - Sets the marker to the index's current position - """ - self._src.mark() - - def parse_error(self, exception=ParseError, *args): - """ - Creates a generic "parse error" at the current position. - """ - return self._src.parse_error(exception, *args) - - def parse(self): # type: () -> TOMLDocument - body = TOMLDocument(True) - - # Take all keyvals outside of tables/AoT's. - while not self.end(): - # Break out if a table is found - if self._current == "[": - break - - # Otherwise, take and append one KV - item = self._parse_item() - if not item: - break - - key, value = item - if key is not None and key.is_dotted(): - # We actually have a table - self._handle_dotted_key(body, key, value) - elif not self._merge_ws(value, body): - body.append(key, value) - - self.mark() - - while not self.end(): - key, value = self._parse_table() - if isinstance(value, Table) and value.is_aot_element(): - # This is just the first table in an AoT. Parse the rest of the array - # along with it. - value = self._parse_aot(value, key.key) - - body.append(key, value) - - body.parsing(False) - - return body - - def _merge_ws(self, item, container): # type: (Item, Container) -> bool - """ - Merges the given Item with the last one currently in the given Container if - both are whitespace items. - - Returns True if the items were merged. - """ - last = container.last_item() - if not last: - return False - - if not isinstance(item, Whitespace) or not isinstance(last, Whitespace): - return False - - start = self._idx - (len(last.s) + len(item.s)) - container.body[-1] = ( - container.body[-1][0], - Whitespace(self._src[start : self._idx]), - ) - - return True - - def _is_child(self, parent, child): # type: (str, str) -> bool - """ - Returns whether a key is strictly a child of another key. - AoT siblings are not considered children of one another. - """ - parent_parts = tuple(self._split_table_name(parent)) - child_parts = tuple(self._split_table_name(child)) - - if parent_parts == child_parts: - return False - - return parent_parts == child_parts[: len(parent_parts)] - - def _split_table_name(self, name): # type: (str) -> Generator[Key] - in_name = False - current = "" - t = KeyType.Bare - parts = 0 - for c in name: - c = TOMLChar(c) - - if c == ".": - if in_name: - current += c - continue - - if not current: - raise self.parse_error() - - yield Key(current.strip(), t=t, sep="", original=current) - - parts += 1 - - current = "" - t = KeyType.Bare - continue - elif c in {"'", '"'}: - if in_name: - if ( - t == KeyType.Literal - and c == '"' - or t == KeyType.Basic - and c == "'" - ): - current += c - continue - - if c != t.value: - raise self.parse_error() - - in_name = False - else: - if ( - current.strip() - and TOMLChar(current[-1]).is_spaces() - and not parts - ): - raise self.parse_error() - - in_name = True - t = KeyType.Literal if c == "'" else KeyType.Basic - - continue - elif in_name or c.is_bare_key_char(): - current += c - elif c.is_spaces(): - # A space is only valid at this point - # if it's in between parts. - # We store it for now and will check - # later if it's valid - current += c - continue - else: - raise self.parse_error() - - if current.strip(): - yield Key(current.strip(), t=t, sep="", original=current) - - def _parse_item(self): # type: () -> Optional[Tuple[Optional[Key], Item]] - """ - Attempts to parse the next item and returns it, along with its key - if the item is value-like. - """ - self.mark() - with self._state as state: - while True: - c = self._current - if c == "\n": - # Found a newline; Return all whitespace found up to this point. - self.inc() - - return None, Whitespace(self.extract()) - elif c in " \t\r": - # Skip whitespace. - if not self.inc(): - return None, Whitespace(self.extract()) - elif c == "#": - # Found a comment, parse it - indent = self.extract() - cws, comment, trail = self._parse_comment_trail() - - return None, Comment(Trivia(indent, cws, comment, trail)) - elif c == "[": - # Found a table, delegate to the calling function. - return - else: - # Begining of a KV pair. - # Return to beginning of whitespace so it gets included - # as indentation for the KV about to be parsed. - state.restore = True - break - - return self._parse_key_value(True) - - def _parse_comment_trail(self): # type: () -> Tuple[str, str, str] - """ - Returns (comment_ws, comment, trail) - If there is no comment, comment_ws and comment will - simply be empty. - """ - if self.end(): - return "", "", "" - - comment = "" - comment_ws = "" - self.mark() - - while True: - c = self._current - - if c == "\n": - break - elif c == "#": - comment_ws = self.extract() - - self.mark() - self.inc() # Skip # - - # The comment itself - while not self.end() and not self._current.is_nl(): - code = ord(self._current) - if code == CHR_DEL or code <= CTRL_CHAR_LIMIT and code != CTRL_I: - raise self.parse_error(InvalidControlChar, code, "comments") - - if not self.inc(): - break - - comment = self.extract() - self.mark() - - break - elif c in " \t\r": - self.inc() - else: - raise self.parse_error(UnexpectedCharError, c) - - if self.end(): - break - - while self._current.is_spaces() and self.inc(): - pass - - if self._current == "\r": - self.inc() - - if self._current == "\n": - self.inc() - - trail = "" - if self._idx != self._marker or self._current.is_ws(): - trail = self.extract() - - return comment_ws, comment, trail - - def _parse_key_value(self, parse_comment=False): # type: (bool) -> (Key, Item) - # Leading indent - self.mark() - - while self._current.is_spaces() and self.inc(): - pass - - indent = self.extract() - - # Key - key = self._parse_key() - - self.mark() - - found_equals = self._current == "=" - while self._current.is_kv_sep() and self.inc(): - if self._current == "=": - if found_equals: - raise self.parse_error(UnexpectedCharError, "=") - else: - found_equals = True - pass - - if not key.sep: - key.sep = self.extract() - else: - key.sep += self.extract() - - # Value - val = self._parse_value() - # Comment - if parse_comment: - cws, comment, trail = self._parse_comment_trail() - meta = val.trivia - if not meta.comment_ws: - meta.comment_ws = cws - - meta.comment = comment - meta.trail = trail - else: - val.trivia.trail = "" - - val.trivia.indent = indent - - return key, val - - def _parse_key(self): # type: () -> Key - """ - Parses a Key at the current position; - WS before the key must be exhausted first at the callsite. - """ - if self._current in "\"'": - return self._parse_quoted_key() - else: - return self._parse_bare_key() - - def _parse_quoted_key(self): # type: () -> Key - """ - Parses a key enclosed in either single or double quotes. - """ - quote_style = self._current - key_type = None - dotted = False - for t in KeyType: - if t.value == quote_style: - key_type = t - break - - if key_type is None: - raise RuntimeError("Should not have entered _parse_quoted_key()") - - self.inc() - self.mark() - - while self._current != quote_style and self.inc(): - pass - - key = self.extract() - - if self._current == ".": - self.inc() - dotted = True - key += "." + self._parse_key().as_string() - key_type = KeyType.Bare - else: - self.inc() - - return Key(key, key_type, "", dotted) - - def _parse_bare_key(self): # type: () -> Key - """ - Parses a bare key. - """ - key_type = None - dotted = False - - self.mark() - while ( - self._current.is_bare_key_char() or self._current.is_spaces() - ) and self.inc(): - pass - - original = self.extract() - key = original.strip() - if not key: - # Empty key - raise self.parse_error(ParseError, "Empty key found") - - if " " in key: - # Bare key with spaces in it - raise self.parse_error(ParseError, 'Invalid key "{}"'.format(key)) - - if self._current == ".": - self.inc() - dotted = True - original += "." + self._parse_key().as_string() - key = original.strip() - key_type = KeyType.Bare - - return Key(key, key_type, "", dotted, original=original) - - def _handle_dotted_key( - self, container, key, value - ): # type: (Union[Container, Table], Key, Any) -> None - names = tuple(self._split_table_name(key.as_string())) - name = names[0] - name._dotted = True - if name in container: - if not isinstance(value, Table): - table = Table(Container(True), Trivia(), False, is_super_table=True) - _table = table - for i, _name in enumerate(names[1:]): - if i == len(names) - 2: - _name.sep = key.sep - - _table.append(_name, value) - else: - _name._dotted = True - _table.append( - _name, - Table( - Container(True), - Trivia(), - False, - is_super_table=i < len(names) - 2, - ), - ) - - _table = _table[_name] - - value = table - - container.append(name, value) - - return - else: - table = Table(Container(True), Trivia(), False, is_super_table=True) - if isinstance(container, Table): - container.raw_append(name, table) - else: - container.append(name, table) - - for i, _name in enumerate(names[1:]): - if i == len(names) - 2: - _name.sep = key.sep - - table.append(_name, value) - else: - _name._dotted = True - if _name in table.value: - table = table.value[_name] - else: - table.append( - _name, - Table( - Container(True), - Trivia(), - False, - is_super_table=i < len(names) - 2, - ), - ) - - table = table[_name] - - def _parse_value(self): # type: () -> Item - """ - Attempts to parse a value at the current position. - """ - self.mark() - c = self._current - trivia = Trivia() - - if c == StringType.SLB.value: - return self._parse_basic_string() - elif c == StringType.SLL.value: - return self._parse_literal_string() - elif c == BoolType.TRUE.value[0]: - return self._parse_true() - elif c == BoolType.FALSE.value[0]: - return self._parse_false() - elif c == "[": - return self._parse_array() - elif c == "{": - return self._parse_inline_table() - elif c in "+-" or self._peek(4) in { - "+inf", - "-inf", - "inf", - "+nan", - "-nan", - "nan", - }: - # Number - while self._current not in " \t\n\r#,]}" and self.inc(): - pass - - raw = self.extract() - - item = self._parse_number(raw, trivia) - if item is not None: - return item - - raise self.parse_error(InvalidNumberError) - elif c in string.digits: - # Integer, Float, Date, Time or DateTime - while self._current not in " \t\n\r#,]}" and self.inc(): - pass - - raw = self.extract() - - m = RFC_3339_LOOSE.match(raw) - if m: - if m.group(1) and m.group(5): - # datetime - try: - dt = parse_rfc3339(raw) - return DateTime( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - trivia, - raw, - ) - except ValueError: - raise self.parse_error(InvalidDateTimeError) - - if m.group(1): - try: - dt = parse_rfc3339(raw) - date = Date(dt.year, dt.month, dt.day, trivia, raw) - self.mark() - while self._current not in "\t\n\r#,]}" and self.inc(): - pass - - time_raw = self.extract() - if not time_raw.strip(): - trivia.comment_ws = time_raw - return date - - dt = parse_rfc3339(raw + time_raw) - return DateTime( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - trivia, - raw + time_raw, - ) - except ValueError: - raise self.parse_error(InvalidDateError) - - if m.group(5): - try: - t = parse_rfc3339(raw) - return Time( - t.hour, - t.minute, - t.second, - t.microsecond, - t.tzinfo, - trivia, - raw, - ) - except ValueError: - raise self.parse_error(InvalidTimeError) - - item = self._parse_number(raw, trivia) - if item is not None: - return item - - raise self.parse_error(InvalidNumberError) - else: - raise self.parse_error(UnexpectedCharError, c) - - def _parse_true(self): - return self._parse_bool(BoolType.TRUE) - - def _parse_false(self): - return self._parse_bool(BoolType.FALSE) - - def _parse_bool(self, style): # type: (BoolType) -> Bool - with self._state: - style = BoolType(style) - - # only keep parsing for bool if the characters match the style - # try consuming rest of chars in style - for c in style: - self.consume(c, min=1, max=1) - - return Bool(style, Trivia()) - - def _parse_array(self): # type: () -> Array - # Consume opening bracket, EOF here is an issue (middle of array) - self.inc(exception=UnexpectedEofError) - - elems = [] # type: List[Item] - prev_value = None - while True: - # consume whitespace - mark = self._idx - self.consume(TOMLChar.SPACES) - newline = self.consume(TOMLChar.NL) - indent = self._src[mark : self._idx] - if newline: - elems.append(Whitespace(indent)) - continue - - # consume comment - if self._current == "#": - cws, comment, trail = self._parse_comment_trail() - elems.append(Comment(Trivia(indent, cws, comment, trail))) - continue - - # consume indent - if indent: - elems.append(Whitespace(indent)) - continue - - # consume value - if not prev_value: - try: - elems.append(self._parse_value()) - prev_value = True - continue - except UnexpectedCharError: - pass - - # consume comma - if prev_value and self._current == ",": - self.inc(exception=UnexpectedEofError) - elems.append(Whitespace(",")) - prev_value = False - continue - - # consume closing bracket - if self._current == "]": - # consume closing bracket, EOF here doesn't matter - self.inc() - break - - raise self.parse_error(UnexpectedCharError, self._current) - - try: - res = Array(elems, Trivia()) - except ValueError: - pass - else: - return res - - def _parse_inline_table(self): # type: () -> InlineTable - # consume opening bracket, EOF here is an issue (middle of array) - self.inc(exception=UnexpectedEofError) - - elems = Container(True) - trailing_comma = None - while True: - # consume leading whitespace - mark = self._idx - self.consume(TOMLChar.SPACES) - raw = self._src[mark : self._idx] - if raw: - elems.add(Whitespace(raw)) - - if not trailing_comma: - # None: empty inline table - # False: previous key-value pair was not followed by a comma - if self._current == "}": - # consume closing bracket, EOF here doesn't matter - self.inc() - break - - if ( - trailing_comma is False - or trailing_comma is None - and self._current == "," - ): - # Either the previous key-value pair was not followed by a comma - # or the table has an unexpected leading comma. - raise self.parse_error(UnexpectedCharError, self._current) - else: - # True: previous key-value pair was followed by a comma - if self._current == "}" or self._current == ",": - raise self.parse_error(UnexpectedCharError, self._current) - - key, val = self._parse_key_value(False) - if key.is_dotted(): - self._handle_dotted_key(elems, key, val) - else: - elems.add(key, val) - - # consume trailing whitespace - mark = self._idx - self.consume(TOMLChar.SPACES) - raw = self._src[mark : self._idx] - if raw: - elems.add(Whitespace(raw)) - - # consume trailing comma - trailing_comma = self._current == "," - if trailing_comma: - # consume closing bracket, EOF here is an issue (middle of inline table) - self.inc(exception=UnexpectedEofError) - - return InlineTable(elems, Trivia()) - - def _parse_number(self, raw, trivia): # type: (str, Trivia) -> Optional[Item] - # Leading zeros are not allowed - sign = "" - if raw.startswith(("+", "-")): - sign = raw[0] - raw = raw[1:] - - if ( - len(raw) > 1 - and raw.startswith("0") - and not raw.startswith(("0.", "0o", "0x", "0b", "0e")) - ): - return - - if raw.startswith(("0o", "0x", "0b")) and sign: - return - - digits = "[0-9]" - base = 10 - if raw.startswith("0b"): - digits = "[01]" - base = 2 - elif raw.startswith("0o"): - digits = "[0-7]" - base = 8 - elif raw.startswith("0x"): - digits = "[0-9a-f]" - base = 16 - - # Underscores should be surrounded by digits - clean = re.sub("(?i)(?<={})_(?={})".format(digits, digits), "", raw) - - if "_" in clean: - return - - if clean.endswith("."): - return - - try: - return Integer(int(sign + clean, base), trivia, sign + raw) - except ValueError: - try: - return Float(float(sign + clean), trivia, sign + raw) - except ValueError: - return - - def _parse_literal_string(self): # type: () -> String - with self._state: - return self._parse_string(StringType.SLL) - - def _parse_basic_string(self): # type: () -> String - with self._state: - return self._parse_string(StringType.SLB) - - def _parse_escaped_char(self, multiline): - if multiline and self._current.is_ws(): - # When the last non-whitespace character on a line is - # a \, it will be trimmed along with all whitespace - # (including newlines) up to the next non-whitespace - # character or closing delimiter. - # """\ - # hello \ - # world""" - tmp = "" - while self._current.is_ws(): - tmp += self._current - # consume the whitespace, EOF here is an issue - # (middle of string) - self.inc(exception=UnexpectedEofError) - continue - - # the escape followed by whitespace must have a newline - # before any other chars - if "\n" not in tmp: - raise self.parse_error(InvalidCharInStringError, self._current) - - return "" - - if self._current in _escaped: - c = _escaped[self._current] - - # consume this char, EOF here is an issue (middle of string) - self.inc(exception=UnexpectedEofError) - - return c - - if self._current in {"u", "U"}: - # this needs to be a unicode - u, ue = self._peek_unicode(self._current == "U") - if u is not None: - # consume the U char and the unicode value - self.inc_n(len(ue) + 1) - - return u - - raise self.parse_error(InvalidUnicodeValueError) - - raise self.parse_error(InvalidCharInStringError, self._current) - - def _parse_string(self, delim): # type: (StringType) -> String - # only keep parsing for string if the current character matches the delim - if self._current != delim.unit: - raise self.parse_error( - InternalParserError, - "Invalid character for string type {}".format(delim), - ) - - # consume the opening/first delim, EOF here is an issue - # (middle of string or middle of delim) - self.inc(exception=UnexpectedEofError) - - if self._current == delim.unit: - # consume the closing/second delim, we do not care if EOF occurs as - # that would simply imply an empty single line string - if not self.inc() or self._current != delim.unit: - # Empty string - return String(delim, "", "", Trivia()) - - # consume the third delim, EOF here is an issue (middle of string) - self.inc(exception=UnexpectedEofError) - - delim = delim.toggle() # convert delim to multi delim - - self.mark() # to extract the original string with whitespace and all - value = "" - - # A newline immediately following the opening delimiter will be trimmed. - if delim.is_multiline() and self._current == "\n": - # consume the newline, EOF here is an issue (middle of string) - self.inc(exception=UnexpectedEofError) - - escaped = False # whether the previous key was ESCAPE - while True: - code = ord(self._current) - if ( - delim.is_singleline() - and not escaped - and (code == CHR_DEL or code <= CTRL_CHAR_LIMIT and code != CTRL_I) - ): - raise self.parse_error(InvalidControlChar, code, "strings") - elif ( - delim.is_multiline() - and not escaped - and ( - code == CHR_DEL - or code <= CTRL_CHAR_LIMIT - and code not in [CTRL_I, CTRL_J, CTRL_M] - ) - ): - raise self.parse_error(InvalidControlChar, code, "strings") - elif not escaped and self._current == delim.unit: - # try to process current as a closing delim - original = self.extract() - - close = "" - if delim.is_multiline(): - # Consume the delimiters to see if we are at the end of the string - close = "" - while self._current == delim.unit: - close += self._current - self.inc() - - if len(close) < 3: - # Not a triple quote, leave in result as-is. - # Adding back the characters we already consumed - value += close - continue - - if len(close) == 3: - # We are at the end of the string - return String(delim, value, original, Trivia()) - - if len(close) >= 6: - raise self.parse_error(InvalidCharInStringError, self._current) - - value += close[:-3] - original += close[:-3] - - return String(delim, value, original, Trivia()) - else: - # consume the closing delim, we do not care if EOF occurs as - # that would simply imply the end of self._src - self.inc() - - return String(delim, value, original, Trivia()) - elif delim.is_basic() and escaped: - # attempt to parse the current char as an escaped value, an exception - # is raised if this fails - value += self._parse_escaped_char(delim.is_multiline()) - - # no longer escaped - escaped = False - elif delim.is_basic() and self._current == "\\": - # the next char is being escaped - escaped = True - - # consume this char, EOF here is an issue (middle of string) - self.inc(exception=UnexpectedEofError) - else: - # this is either a literal string where we keep everything as is, - # or this is not a special escaped char in a basic string - value += self._current - - # consume this char, EOF here is an issue (middle of string) - self.inc(exception=UnexpectedEofError) - - def _parse_table( - self, parent_name=None, parent=None - ): # type: (Optional[str], Optional[Table]) -> Tuple[Key, Union[Table, AoT]] - """ - Parses a table element. - """ - if self._current != "[": - raise self.parse_error( - InternalParserError, "_parse_table() called on non-bracket character." - ) - - indent = self.extract() - self.inc() # Skip opening bracket - - if self.end(): - raise self.parse_error(UnexpectedEofError) - - is_aot = False - if self._current == "[": - if not self.inc(): - raise self.parse_error(UnexpectedEofError) - - is_aot = True - - # Consume any whitespace - self.mark() - while self._current.is_spaces() and self.inc(): - pass - - ws_prefix = self.extract() - - # Key - if self._current in [StringType.SLL.value, StringType.SLB.value]: - delimiter = ( - StringType.SLL - if self._current == StringType.SLL.value - else StringType.SLB - ) - name = self._parse_string(delimiter) - name = "{delimiter}{name}{delimiter}".format( - delimiter=delimiter.value, name=name - ) - - self.mark() - while self._current != "]" and self.inc(): - if self.end(): - raise self.parse_error(UnexpectedEofError) - - pass - - ws_suffix = self.extract() - name += ws_suffix - else: - self.mark() - while self._current != "]" and self.inc(): - if self.end(): - raise self.parse_error(UnexpectedEofError) - - pass - - name = self.extract() - - name = ws_prefix + name - - if not name.strip(): - raise self.parse_error(EmptyTableNameError) - - key = Key(name, sep="") - name_parts = tuple(self._split_table_name(name)) - if any(" " in part.key.strip() and part.is_bare() for part in name_parts): - raise self.parse_error(ParseError, 'Invalid table name "{}"'.format(name)) - - missing_table = False - if parent_name: - parent_name_parts = tuple(self._split_table_name(parent_name)) - else: - parent_name_parts = tuple() - - if len(name_parts) > len(parent_name_parts) + 1: - missing_table = True - - name_parts = name_parts[len(parent_name_parts) :] - - values = Container(True) - - self.inc() # Skip closing bracket - if is_aot: - # TODO: Verify close bracket - self.inc() - - cws, comment, trail = self._parse_comment_trail() - - result = Null() - table = Table( - values, - Trivia(indent, cws, comment, trail), - is_aot, - name=name, - display_name=name, - ) - - if len(name_parts) > 1: - if missing_table: - # Missing super table - # i.e. a table initialized like this: [foo.bar] - # without initializing [foo] - # - # So we have to create the parent tables - table = Table( - Container(True), - Trivia(indent, cws, comment, trail), - is_aot and name_parts[0].key in self._aot_stack, - is_super_table=True, - name=name_parts[0].key, - ) - - result = table - key = name_parts[0] - - for i, _name in enumerate(name_parts[1:]): - if _name in table: - child = table[_name] - else: - child = Table( - Container(True), - Trivia(indent, cws, comment, trail), - is_aot and i == len(name_parts[1:]) - 1, - is_super_table=i < len(name_parts[1:]) - 1, - name=_name.key, - display_name=name if i == len(name_parts[1:]) - 1 else None, - ) - - if is_aot and i == len(name_parts[1:]) - 1: - table.append(_name, AoT([child], name=table.name, parsed=True)) - else: - table.append(_name, child) - - table = child - values = table.value - else: - if name_parts: - key = name_parts[0] - - while not self.end(): - item = self._parse_item() - if item: - _key, item = item - if not self._merge_ws(item, values): - if _key is not None and _key.is_dotted(): - self._handle_dotted_key(table, _key, item) - else: - table.raw_append(_key, item) - else: - if self._current == "[": - is_aot_next, name_next = self._peek_table() - - if self._is_child(name, name_next): - key_next, table_next = self._parse_table(name, table) - - table.raw_append(key_next, table_next) - - # Picking up any sibling - while not self.end(): - _, name_next = self._peek_table() - - if not self._is_child(name, name_next): - break - - key_next, table_next = self._parse_table(name, table) - - table.raw_append(key_next, table_next) - - break - else: - raise self.parse_error( - InternalParserError, - "_parse_item() returned None on a non-bracket character.", - ) - - if isinstance(result, Null): - result = table - - if is_aot and (not self._aot_stack or name != self._aot_stack[-1]): - result = self._parse_aot(result, name) - - return key, result - - def _peek_table(self): # type: () -> Tuple[bool, str] - """ - Peeks ahead non-intrusively by cloning then restoring the - initial state of the parser. - - Returns the name of the table about to be parsed, - as well as whether it is part of an AoT. - """ - # we always want to restore after exiting this scope - with self._state(save_marker=True, restore=True): - if self._current != "[": - raise self.parse_error( - InternalParserError, - "_peek_table() entered on non-bracket character", - ) - - # AoT - self.inc() - is_aot = False - if self._current == "[": - self.inc() - is_aot = True - - self.mark() - - while self._current != "]" and self.inc(): - table_name = self.extract() - - return is_aot, table_name - - def _parse_aot(self, first, name_first): # type: (Table, str) -> AoT - """ - Parses all siblings of the provided table first and bundles them into - an AoT. - """ - payload = [first] - self._aot_stack.append(name_first) - while not self.end(): - is_aot_next, name_next = self._peek_table() - if is_aot_next and name_next == name_first: - _, table = self._parse_table(name_first) - payload.append(table) - else: - break - - self._aot_stack.pop() - - return AoT(payload, parsed=True) - - def _peek(self, n): # type: (int) -> str - """ - Peeks ahead n characters. - - n is the max number of characters that will be peeked. - """ - # we always want to restore after exiting this scope - with self._state(restore=True): - buf = "" - for _ in range(n): - if self._current not in " \t\n\r#,]}": - buf += self._current - self.inc() - continue - - break - return buf - - def _peek_unicode( - self, is_long - ): # type: (bool) -> Tuple[Optional[str], Optional[str]] - """ - Peeks ahead non-intrusively by cloning then restoring the - initial state of the parser. - - Returns the unicode value is it's a valid one else None. - """ - # we always want to restore after exiting this scope - with self._state(save_marker=True, restore=True): - if self._current not in {"u", "U"}: - raise self.parse_error( - InternalParserError, "_peek_unicode() entered on non-unicode value" - ) - - self.inc() # Dropping prefix - self.mark() - - if is_long: - chars = 8 - else: - chars = 4 - - if not self.inc_n(chars): - value, extracted = None, None - else: - extracted = self.extract() - - if extracted[0].lower() == "d" and extracted[1].strip("01234567"): - return None, None - - try: - value = chr(int(extracted, 16)) - except (ValueError, OverflowError): - value = None - - return value, extracted diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/source.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/source.py deleted file mode 100644 index 6a6a2391..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/source.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import itertools - -from copy import copy -from typing import Any -from typing import Optional -from typing import Tuple -from typing import Type - -from ._compat import PY2 -from ._compat import unicode -from .exceptions import ParseError -from .exceptions import UnexpectedCharError -from .exceptions import UnexpectedEofError -from .toml_char import TOMLChar - - -class _State: - def __init__( - self, source, save_marker=False, restore=False - ): # type: (_Source, Optional[bool], Optional[bool]) -> None - self._source = source - self._save_marker = save_marker - self.restore = restore - - def __enter__(self): # type: () -> None - # Entering this context manager - save the state - if PY2: - # Python 2.7 does not allow to directly copy - # an iterator, so we have to make tees of the original - # chars iterator. - self._source._chars, self._chars = itertools.tee(self._source._chars) - else: - self._chars = copy(self._source._chars) - self._idx = self._source._idx - self._current = self._source._current - self._marker = self._source._marker - - return self - - def __exit__(self, exception_type, exception_val, trace): - # Exiting this context manager - restore the prior state - if self.restore or exception_type: - self._source._chars = self._chars - self._source._idx = self._idx - self._source._current = self._current - if self._save_marker: - self._source._marker = self._marker - - -class _StateHandler: - """ - State preserver for the Parser. - """ - - def __init__(self, source): # type: (Source) -> None - self._source = source - self._states = [] - - def __call__(self, *args, **kwargs): - return _State(self._source, *args, **kwargs) - - def __enter__(self): # type: () -> None - state = self() - self._states.append(state) - return state.__enter__() - - def __exit__(self, exception_type, exception_val, trace): - state = self._states.pop() - return state.__exit__(exception_type, exception_val, trace) - - -class Source(unicode): - EOF = TOMLChar("\0") - - def __init__(self, _): # type: (unicode) -> None - super(Source, self).__init__() - - # Collection of TOMLChars - self._chars = iter([(i, TOMLChar(c)) for i, c in enumerate(self)]) - - self._idx = 0 - self._marker = 0 - self._current = TOMLChar("") - - self._state = _StateHandler(self) - - self.inc() - - def reset(self): - # initialize both idx and current - self.inc() - - # reset marker - self.mark() - - @property - def state(self): # type: () -> _StateHandler - return self._state - - @property - def idx(self): # type: () -> int - return self._idx - - @property - def current(self): # type: () -> TOMLChar - return self._current - - @property - def marker(self): # type: () -> int - return self._marker - - def extract(self): # type: () -> unicode - """ - Extracts the value between marker and index - """ - return self[self._marker : self._idx] - - def inc(self, exception=None): # type: (Optional[Type[ParseError]]) -> bool - """ - Increments the parser if the end of the input has not been reached. - Returns whether or not it was able to advance. - """ - try: - self._idx, self._current = next(self._chars) - - return True - except StopIteration: - self._idx = len(self) - self._current = self.EOF - if exception: - raise self.parse_error(exception) - - return False - - def inc_n(self, n, exception=None): # type: (int, Exception) -> bool - """ - Increments the parser by n characters - if the end of the input has not been reached. - """ - for _ in range(n): - if not self.inc(exception=exception): - return False - - return True - - def consume(self, chars, min=0, max=-1): - """ - Consume chars until min/max is satisfied is valid. - """ - while self.current in chars and max != 0: - min -= 1 - max -= 1 - if not self.inc(): - break - - # failed to consume minimum number of characters - if min > 0: - self.parse_error(UnexpectedCharError) - - def end(self): # type: () -> bool - """ - Returns True if the parser has reached the end of the input. - """ - return self._current is self.EOF - - def mark(self): # type: () -> None - """ - Sets the marker to the index's current position - """ - self._marker = self._idx - - def parse_error( - self, exception=ParseError, *args - ): # type: (Type[ParseError], Any) -> ParseError - """ - Creates a generic "parse error" at the current position. - """ - line, col = self._to_linecol() - - return exception(line, col, *args) - - def _to_linecol(self): # type: () -> Tuple[int, int] - cur = 0 - for i, line in enumerate(self.splitlines()): - if cur + len(line) + 1 > self.idx: - return (i + 1, self.idx - cur) - - cur += len(line) + 1 - - return len(self.splitlines()), 0 diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_char.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_char.py deleted file mode 100644 index 079b16cc..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_char.py +++ /dev/null @@ -1,67 +0,0 @@ -import string - -from ._compat import PY2 -from ._compat import unicode - - -if PY2: - from functools32 import lru_cache -else: - from functools import lru_cache - - -class TOMLChar(unicode): - def __init__(self, c): - super(TOMLChar, self).__init__() - - if len(self) > 1: - raise ValueError("A TOML character must be of length 1") - - BARE = string.ascii_letters + string.digits + "-_" - KV = "= \t" - NUMBER = string.digits + "+-_.e" - SPACES = " \t" - NL = "\n\r" - WS = SPACES + NL - - @lru_cache(maxsize=None) - def is_bare_key_char(self): # type: () -> bool - """ - Whether the character is a valid bare key name or not. - """ - return self in self.BARE - - @lru_cache(maxsize=None) - def is_kv_sep(self): # type: () -> bool - """ - Whether the character is a valid key/value separator ot not. - """ - return self in self.KV - - @lru_cache(maxsize=None) - def is_int_float_char(self): # type: () -> bool - """ - Whether the character if a valid integer or float value character or not. - """ - return self in self.NUMBER - - @lru_cache(maxsize=None) - def is_ws(self): # type: () -> bool - """ - Whether the character is a whitespace character or not. - """ - return self in self.WS - - @lru_cache(maxsize=None) - def is_nl(self): # type: () -> bool - """ - Whether the character is a new line character or not. - """ - return self in self.NL - - @lru_cache(maxsize=None) - def is_spaces(self): # type: () -> bool - """ - Whether the character is a space or not - """ - return self in self.SPACES diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_document.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_document.py deleted file mode 100644 index b485e302..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_document.py +++ /dev/null @@ -1,7 +0,0 @@ -from .container import Container - - -class TOMLDocument(Container): - """ - A TOML document. - """ diff --git a/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_file.py b/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_file.py deleted file mode 100644 index 3b416664..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/tomlkit/toml_file.py +++ /dev/null @@ -1,24 +0,0 @@ -import io - -from typing import Any -from typing import Dict - -from .api import loads -from .toml_document import TOMLDocument - - -class TOMLFile(object): - """ - Represents a TOML file. - """ - - def __init__(self, path): # type: (str) -> None - self._path = path - - def read(self): # type: () -> TOMLDocument - with io.open(self._path, encoding="utf-8") as f: - return loads(f.read()) - - def write(self, data): # type: (TOMLDocument) -> None - with io.open(self._path, "w", encoding="utf-8") as f: - f.write(data.as_string()) diff --git a/vendor/poetry-core/poetry/core/_vendor/vendor.txt b/vendor/poetry-core/poetry/core/_vendor/vendor.txt deleted file mode 100644 index 13de1ee1..00000000 --- a/vendor/poetry-core/poetry/core/_vendor/vendor.txt +++ /dev/null @@ -1,9 +0,0 @@ -attrs==20.3.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" -jsonschema==3.2.0 -lark-parser==0.9.0 -packaging==20.9; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") -pyparsing==2.4.7; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" -pyrsistent==0.16.1; python_version >= "2.7" -six==1.15.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "2.7" -tomlkit==0.7.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") -typing-extensions==3.7.4.3; python_version >= "3.6" and python_version < "3.8" diff --git a/vendor/poetry-core/poetry/core/exceptions/__init__.py b/vendor/poetry-core/poetry/core/exceptions/__init__.py deleted file mode 100644 index 5a258870..00000000 --- a/vendor/poetry-core/poetry/core/exceptions/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from poetry.core.exceptions.base import PoetryCoreException - - -__all__ = [clazz.__name__ for clazz in {PoetryCoreException}] diff --git a/vendor/poetry-core/poetry/core/exceptions/base.py b/vendor/poetry-core/poetry/core/exceptions/base.py deleted file mode 100644 index 41b1c3e8..00000000 --- a/vendor/poetry-core/poetry/core/exceptions/base.py +++ /dev/null @@ -1,2 +0,0 @@ -class PoetryCoreException(Exception): - pass diff --git a/vendor/poetry-core/poetry/core/factory.py b/vendor/poetry-core/poetry/core/factory.py deleted file mode 100644 index be157f00..00000000 --- a/vendor/poetry-core/poetry/core/factory.py +++ /dev/null @@ -1,373 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import logging - -from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Union -from warnings import warn - -from .json import validate_object -from .packages.dependency import Dependency -from .packages.project_package import ProjectPackage -from .poetry import Poetry -from .pyproject import PyProjectTOML -from .spdx import license_by_id -from .utils._compat import Path - - -logger = logging.getLogger(__name__) - - -class Factory(object): - """ - Factory class to create various elements needed by Poetry. - """ - - def create_poetry( - self, cwd=None, with_dev=True - ): # type: (Optional[Path], bool) -> Poetry - poetry_file = self.locate(cwd) - local_config = PyProjectTOML(path=poetry_file).poetry_config - - # Checking validity - check_result = self.validate(local_config) - if check_result["errors"]: - message = "" - for error in check_result["errors"]: - message += " - {}\n".format(error) - - raise RuntimeError("The Poetry configuration is invalid:\n" + message) - - # Load package - name = local_config["name"] - version = local_config["version"] - package = ProjectPackage(name, version, version) - package.root_dir = poetry_file.parent - - for author in local_config["authors"]: - package.authors.append(author) - - for maintainer in local_config.get("maintainers", []): - package.maintainers.append(maintainer) - - package.description = local_config.get("description", "") - package.homepage = local_config.get("homepage") - package.repository_url = local_config.get("repository") - package.documentation_url = local_config.get("documentation") - try: - license_ = license_by_id(local_config.get("license", "")) - except ValueError: - license_ = None - - package.license = license_ - package.keywords = local_config.get("keywords", []) - package.classifiers = local_config.get("classifiers", []) - - if "readme" in local_config: - package.readme = Path(poetry_file.parent) / local_config["readme"] - - if "platform" in local_config: - package.platform = local_config["platform"] - - if "dependencies" in local_config: - for name, constraint in local_config["dependencies"].items(): - if name.lower() == "python": - package.python_versions = constraint - continue - - if isinstance(constraint, list): - for _constraint in constraint: - package.add_dependency( - self.create_dependency( - name, _constraint, root_dir=package.root_dir - ) - ) - - continue - - package.add_dependency( - self.create_dependency(name, constraint, root_dir=package.root_dir) - ) - - if with_dev and "dev-dependencies" in local_config: - for name, constraint in local_config["dev-dependencies"].items(): - if isinstance(constraint, list): - for _constraint in constraint: - package.add_dependency( - self.create_dependency( - name, - _constraint, - category="dev", - root_dir=package.root_dir, - ) - ) - - continue - - package.add_dependency( - self.create_dependency( - name, constraint, category="dev", root_dir=package.root_dir - ) - ) - - extras = local_config.get("extras", {}) - for extra_name, requirements in extras.items(): - package.extras[extra_name] = [] - - # Checking for dependency - for req in requirements: - req = Dependency(req, "*") - - for dep in package.requires: - if dep.name == req.name: - dep.in_extras.append(extra_name) - package.extras[extra_name].append(dep) - - break - - if "build" in local_config: - build = local_config["build"] - if not isinstance(build, dict): - build = {"script": build} - package.build_config = build or {} - - if "include" in local_config: - package.include = [] - - for include in local_config["include"]: - if not isinstance(include, dict): - include = {"path": include} - - formats = include.get("format", []) - if formats and not isinstance(formats, list): - formats = [formats] - include["format"] = formats - - package.include.append(include) - - if "exclude" in local_config: - package.exclude = local_config["exclude"] - - if "packages" in local_config: - package.packages = local_config["packages"] - - # Custom urls - if "urls" in local_config: - package.custom_urls = local_config["urls"] - - return Poetry(poetry_file, local_config, package) - - @classmethod - def create_dependency( - cls, - name, # type: str - constraint, # type: Union[str, Dict[str, Any]] - category="main", # type: str - root_dir=None, # type: Optional[Path] - ): # type: (...) -> Dependency - from .packages.constraints import parse_constraint as parse_generic_constraint - from .packages.directory_dependency import DirectoryDependency - from .packages.file_dependency import FileDependency - from .packages.url_dependency import URLDependency - from .packages.utils.utils import create_nested_marker - from .packages.vcs_dependency import VCSDependency - from .version.markers import AnyMarker - from .version.markers import parse_marker - - if constraint is None: - constraint = "*" - - if isinstance(constraint, dict): - optional = constraint.get("optional", False) - python_versions = constraint.get("python") - platform = constraint.get("platform") - markers = constraint.get("markers") - if "allows-prereleases" in constraint: - message = ( - 'The "{}" dependency specifies ' - 'the "allows-prereleases" property, which is deprecated. ' - 'Use "allow-prereleases" instead.'.format(name) - ) - warn(message, DeprecationWarning) - logger.warning(message) - - allows_prereleases = constraint.get( - "allow-prereleases", constraint.get("allows-prereleases", False) - ) - - if "git" in constraint: - # VCS dependency - dependency = VCSDependency( - name, - "git", - constraint["git"], - branch=constraint.get("branch", None), - tag=constraint.get("tag", None), - rev=constraint.get("rev", None), - category=category, - optional=optional, - develop=constraint.get("develop", False), - extras=constraint.get("extras", []), - ) - elif "file" in constraint: - file_path = Path(constraint["file"]) - - dependency = FileDependency( - name, - file_path, - category=category, - base=root_dir, - extras=constraint.get("extras", []), - ) - elif "path" in constraint: - path = Path(constraint["path"]) - - if root_dir: - is_file = root_dir.joinpath(path).is_file() - else: - is_file = path.is_file() - - if is_file: - dependency = FileDependency( - name, - path, - category=category, - optional=optional, - base=root_dir, - extras=constraint.get("extras", []), - ) - else: - dependency = DirectoryDependency( - name, - path, - category=category, - optional=optional, - base=root_dir, - develop=constraint.get("develop", False), - extras=constraint.get("extras", []), - ) - elif "url" in constraint: - dependency = URLDependency( - name, - constraint["url"], - category=category, - optional=optional, - extras=constraint.get("extras", []), - ) - else: - version = constraint["version"] - - dependency = Dependency( - name, - version, - optional=optional, - category=category, - allows_prereleases=allows_prereleases, - extras=constraint.get("extras", []), - ) - - if not markers: - marker = AnyMarker() - if python_versions: - dependency.python_versions = python_versions - marker = marker.intersect( - parse_marker( - create_nested_marker( - "python_version", dependency.python_constraint - ) - ) - ) - - if platform: - marker = marker.intersect( - parse_marker( - create_nested_marker( - "sys_platform", parse_generic_constraint(platform) - ) - ) - ) - else: - marker = parse_marker(markers) - - if not marker.is_any(): - dependency.marker = marker - - dependency.source_name = constraint.get("source") - else: - dependency = Dependency(name, constraint, category=category) - - return dependency - - @classmethod - def validate( - cls, config, strict=False - ): # type: (dict, bool) -> Dict[str, List[str]] - """ - Checks the validity of a configuration - """ - result = {"errors": [], "warnings": []} - # Schema validation errors - validation_errors = validate_object(config, "poetry-schema") - - result["errors"] += validation_errors - - if strict: - # If strict, check the file more thoroughly - if "dependencies" in config: - python_versions = config["dependencies"]["python"] - if python_versions == "*": - result["warnings"].append( - "A wildcard Python dependency is ambiguous. " - "Consider specifying a more explicit one." - ) - - for name, constraint in config["dependencies"].items(): - if not isinstance(constraint, dict): - continue - - if "allows-prereleases" in constraint: - result["warnings"].append( - 'The "{}" dependency specifies ' - 'the "allows-prereleases" property, which is deprecated. ' - 'Use "allow-prereleases" instead.'.format(name) - ) - - # Checking for scripts with extras - if "scripts" in config: - scripts = config["scripts"] - for name, script in scripts.items(): - if not isinstance(script, dict): - continue - - extras = script["extras"] - for extra in extras: - if extra not in config["extras"]: - result["errors"].append( - 'Script "{}" requires extra "{}" which is not defined.'.format( - name, extra - ) - ) - - return result - - @classmethod - def locate(cls, cwd): # type: (Path) -> Path - candidates = [Path(cwd)] - candidates.extend(Path(cwd).parents) - - for path in candidates: - poetry_file = path / "pyproject.toml" - - if poetry_file.exists(): - return poetry_file - - else: - raise RuntimeError( - "Poetry could not find a pyproject.toml file in {} or its parents".format( - cwd - ) - ) diff --git a/vendor/poetry-core/poetry/core/json/__init__.py b/vendor/poetry-core/poetry/core/json/__init__.py deleted file mode 100644 index 9f94540f..00000000 --- a/vendor/poetry-core/poetry/core/json/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -import json - -from importlib import resources -from typing import List - -from jsonschema import Draft7Validator - - -class ValidationError(ValueError): - - pass - - -def validate_object(obj, schema_name): # type: (dict, str) -> List[str] - try: - schema = json.loads( - resources.read_text( - f"{__name__}.schemas", "{}.json".format(schema_name) - ) - ) - except Exception: - raise ValueError("Schema {} does not exist.".format(schema_name)) - - validator = Draft7Validator(schema) - validation_errors = sorted(validator.iter_errors(obj), key=lambda e: e.path) - - errors = [] - - for error in validation_errors: - message = error.message - if error.path: - message = "[{}] {}".format( - ".".join(str(x) for x in error.absolute_path), message - ) - - errors.append(message) - - return errors diff --git a/vendor/poetry-core/poetry/core/json/schemas/poetry-schema.json b/vendor/poetry-core/poetry/core/json/schemas/poetry-schema.json deleted file mode 100644 index 81664910..00000000 --- a/vendor/poetry-core/poetry/core/json/schemas/poetry-schema.json +++ /dev/null @@ -1,591 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "name": "Package", - "type": "object", - "additionalProperties": false, - "required": [ - "name", - "version", - "description" - ], - "properties": { - "name": { - "type": "string", - "description": "Package name." - }, - "version": { - "type": "string", - "description": "Package version." - }, - "description": { - "type": "string", - "description": "Short package description." - }, - "keywords": { - "type": "array", - "items": { - "type": "string", - "description": "A tag/keyword that this package relates to." - } - }, - "homepage": { - "type": "string", - "description": "Homepage URL for the project.", - "format": "uri" - }, - "repository": { - "type": "string", - "description": "Repository URL for the project.", - "format": "uri" - }, - "documentation": { - "type": "string", - "description": "Documentation URL for the project.", - "format": "uri" - }, - "license": { - "type": "string", - "description": "License name." - }, - "authors": { - "$ref": "#/definitions/authors" - }, - "maintainers": { - "$ref": "#/definitions/maintainers" - }, - "readme": { - "type": "string", - "description": "The path to the README file" - }, - "classifiers": { - "type": "array", - "description": "A list of trove classifers." - }, - "packages": { - "type": "array", - "description": "A list of packages to include in the final distribution.", - "items": { - "type": "object", - "description": "Information about where the package resides.", - "additionalProperties": false, - "required": [ - "include" - ], - "properties": { - "include": { - "$ref": "#/definitions/include-path" - }, - "from": { - "type": "string", - "description": "Where the source directory of the package resides." - }, - "format": { - "$ref": "#/definitions/package-formats" - } - } - } - }, - "include": { - "type": "array", - "description": "A list of files and folders to include.", - "items": { - "anyOf": [ - { - "$ref": "#/definitions/include-path" - }, - { - "type": "object", - "additionalProperties": false, - "required": [ - "path" - ], - "properties": { - "path": { - "$ref": "#/definitions/include-path" - }, - "format": { - "$ref": "#/definitions/package-formats" - } - } - } - ] - } - }, - "exclude": { - "type": "array", - "description": "A list of files and folders to exclude." - }, - "dependencies": { - "type": "object", - "description": "This is a hash of package name (keys) and version constraints (values) that are required to run this package.", - "required": [ - "python" - ], - "properties": { - "python": { - "type": "string", - "description": "The Python versions the package is compatible with." - } - }, - "$ref": "#/definitions/dependencies", - "additionalProperties": false - }, - "dev-dependencies": { - "type": "object", - "description": "This is a hash of package name (keys) and version constraints (values) that this package requires for developing it (testing tools and such).", - "$ref": "#/definitions/dependencies", - "additionalProperties": false - }, - "extras": { - "type": "object", - "patternProperties": { - "^[a-zA-Z-_.0-9]+$": { - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "build": { - "$ref": "#/definitions/build-section" - }, - "source": { - "type": "array", - "description": "A set of additional repositories where packages can be found.", - "additionalProperties": { - "$ref": "#/definitions/repository" - }, - "items": { - "$ref": "#/definitions/repository" - } - }, - "scripts": { - "type": "object", - "description": "A hash of scripts to be installed.", - "items": { - "type": "string" - } - }, - "plugins": { - "type": "object", - "description": "A hash of hashes representing plugins", - "patternProperties": { - "^[a-zA-Z-_.0-9]+$": { - "type": "object", - "patternProperties": { - "^[a-zA-Z-_.0-9]+$": { - "type": "string" - } - } - } - } - }, - "urls": { - "type": "object", - "patternProperties": { - "^.+$": { - "type": "string", - "description": "The full url of the custom url." - } - } - } - }, - "definitions": { - "authors": { - "type": "array", - "description": "List of authors that contributed to the package. This is typically the main maintainers, not the full list.", - "items": { - "type": "string" - } - }, - "maintainers": { - "type": "array", - "description": "List of maintainers, other than the original author(s), that upkeep the package.", - "items": { - "type": "string" - } - }, - "include-path": { - "type": "string", - "description": "Path to file or directory to include." - }, - "package-format": { - "type": "string", - "enum": ["sdist", "wheel"], - "description": "A Python packaging format." - }, - "package-formats": { - "oneOf": [ - {"$ref": "#/definitions/package-format"}, - {"type": "array", "items": {"$ref": "#/definitions/package-format"}} - ], - "description": "The format(s) for which the package must be included." - }, - "dependencies": { - "type": "object", - "patternProperties": { - "^[a-zA-Z-_.0-9]+$": { - "oneOf": [ - { - "$ref": "#/definitions/dependency" - }, - { - "$ref": "#/definitions/long-dependency" - }, - { - "$ref": "#/definitions/git-dependency" - }, - { - "$ref": "#/definitions/file-dependency" - }, - { - "$ref": "#/definitions/path-dependency" - }, - { - "$ref": "#/definitions/url-dependency" - }, - { - "$ref": "#/definitions/multiple-constraints-dependency" - } - ] - } - } - }, - "dependency": { - "type": "string", - "description": "The constraint of the dependency." - }, - "long-dependency": { - "type": "object", - "required": [ - "version" - ], - "additionalProperties": false, - "properties": { - "version": { - "type": "string", - "description": "The constraint of the dependency." - }, - "python": { - "type": "string", - "description": "The python versions for which the dependency should be installed." - }, - "platform": { - "type": "string", - "description": "The platform(s) for which the dependency should be installed." - }, - "markers": { - "type": "string", - "description": "The PEP 508 compliant environment markers for which the dependency should be installed." - }, - "allow-prereleases": { - "type": "boolean", - "description": "Whether the dependency allows prereleases or not." - }, - "allows-prereleases": { - "type": "boolean", - "description": "Whether the dependency allows prereleases or not." - }, - "optional": { - "type": "boolean", - "description": "Whether the dependency is optional or not." - }, - "extras": { - "type": "array", - "description": "The required extras for this dependency.", - "items": { - "type": "string" - } - }, - "source": { - "type": "string", - "description": "The exclusive source used to search for this dependency." - } - } - }, - "git-dependency": { - "type": "object", - "required": [ - "git" - ], - "additionalProperties": false, - "properties": { - "git": { - "type": "string", - "description": "The url of the git repository.", - "format": "uri" - }, - "branch": { - "type": "string", - "description": "The branch to checkout." - }, - "tag": { - "type": "string", - "description": "The tag to checkout." - }, - "rev": { - "type": "string", - "description": "The revision to checkout." - }, - "python": { - "type": "string", - "description": "The python versions for which the dependency should be installed." - }, - "platform": { - "type": "string", - "description": "The platform(s) for which the dependency should be installed." - }, - "markers": { - "type": "string", - "description": "The PEP 508 compliant environment markers for which the dependency should be installed." - }, - "allow-prereleases": { - "type": "boolean", - "description": "Whether the dependency allows prereleases or not." - }, - "allows-prereleases": { - "type": "boolean", - "description": "Whether the dependency allows prereleases or not." - }, - "optional": { - "type": "boolean", - "description": "Whether the dependency is optional or not." - }, - "extras": { - "type": "array", - "description": "The required extras for this dependency.", - "items": { - "type": "string" - } - }, - "develop": { - "type": "boolean", - "description": "Whether to install the dependency in development mode." - } - } - }, - "file-dependency": { - "type": "object", - "required": [ - "file" - ], - "additionalProperties": false, - "properties": { - "file": { - "type": "string", - "description": "The path to the file." - }, - "python": { - "type": "string", - "description": "The python versions for which the dependency should be installed." - }, - "platform": { - "type": "string", - "description": "The platform(s) for which the dependency should be installed." - }, - "markers": { - "type": "string", - "description": "The PEP 508 compliant environment markers for which the dependency should be installed." - }, - "optional": { - "type": "boolean", - "description": "Whether the dependency is optional or not." - }, - "extras": { - "type": "array", - "description": "The required extras for this dependency.", - "items": { - "type": "string" - } - } - } - }, - "path-dependency": { - "type": "object", - "required": [ - "path" - ], - "additionalProperties": false, - "properties": { - "path": { - "type": "string", - "description": "The path to the dependency." - }, - "python": { - "type": "string", - "description": "The python versions for which the dependency should be installed." - }, - "platform": { - "type": "string", - "description": "The platform(s) for which the dependency should be installed." - }, - "markers": { - "type": "string", - "description": "The PEP 508 compliant environment markers for which the dependency should be installed." - }, - "optional": { - "type": "boolean", - "description": "Whether the dependency is optional or not." - }, - "extras": { - "type": "array", - "description": "The required extras for this dependency.", - "items": { - "type": "string" - } - }, - "develop": { - "type": "boolean", - "description": "Whether to install the dependency in development mode." - } - } - }, - "url-dependency": { - "type": "object", - "required": [ - "url" - ], - "additionalProperties": false, - "properties": { - "url": { - "type": "string", - "description": "The url to the file." - }, - "python": { - "type": "string", - "description": "The python versions for which the dependency should be installed." - }, - "platform": { - "type": "string", - "description": "The platform(s) for which the dependency should be installed." - }, - "markers": { - "type": "string", - "description": "The PEP 508 compliant environment markers for which the dependency should be installed." - }, - "optional": { - "type": "boolean", - "description": "Whether the dependency is optional or not." - }, - "extras": { - "type": "array", - "description": "The required extras for this dependency.", - "items": { - "type": "string" - } - } - } - }, - "multiple-constraints-dependency": { - "type": "array", - "minItems": 1, - "items": { - "oneOf": [ - { - "$ref": "#/definitions/dependency" - }, - { - "$ref": "#/definitions/long-dependency" - }, - { - "$ref": "#/definitions/git-dependency" - }, - { - "$ref": "#/definitions/file-dependency" - }, - { - "$ref": "#/definitions/path-dependency" - }, - { - "$ref": "#/definitions/url-dependency" - } - ] - } - }, - "scripts": { - "type": "object", - "patternProperties": { - "^[a-zA-Z-_.0-9]+$": { - "oneOf": [ - { - "$ref": "#/definitions/script" - }, - { - "$ref": "#/definitions/extra-script" - } - ] - } - } - }, - "script": { - "type": "string", - "description": "A simple script pointing to a callable object." - }, - "extra-script": { - "type": "object", - "description": "A script that should be installed only if extras are activated.", - "additionalProperties": false, - "properties": { - "callable": { - "$ref": "#/definitions/script" - }, - "extras": { - "type": "array", - "description": "The required extras for this script.", - "items": { - "type": "string" - } - } - } - }, - "repository": { - "type": "object", - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "The name of the repository" - }, - "url": { - "type": "string", - "description": "The url of the repository", - "format": "uri" - }, - "default": { - "type": "boolean", - "description": "Make this repository the default (disable PyPI)" - }, - "secondary": { - "type": "boolean", - "description": "Declare this repository as secondary, i.e. it will only be looked up last for packages." - } - } - }, - "build-script": { - "type": "string", - "description": "The python script file used to build extensions." - }, - "build-config": { - "type": "object", - "description": "Build specific configurations.", - "additionalProperties": false, - "properties": { - "generate-setup-file": { - "type": "boolean", - "description": "Generate and include a setup.py file in sdist.", - "default": true - }, - "script": { - "$ref": "#/definitions/build-script" - } - } - }, - "build-section": { - "oneOf": [ - {"$ref": "#/definitions/build-script"}, - {"$ref": "#/definitions/build-config"} - ] - } - } -} diff --git a/vendor/poetry-core/poetry/core/masonry/__init__.py b/vendor/poetry-core/poetry/core/masonry/__init__.py deleted file mode 100644 index ddd3a14f..00000000 --- a/vendor/poetry-core/poetry/core/masonry/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -""" -This module handles the packaging and publishing -of python projects. - -A lot of the code used here has been taken from -`flit `__ and adapted -to work with the poetry codebase, so kudos to them for showing the way. -""" - -from .builder import Builder diff --git a/vendor/poetry-core/poetry/core/masonry/api.py b/vendor/poetry-core/poetry/core/masonry/api.py deleted file mode 100644 index 1899092a..00000000 --- a/vendor/poetry-core/poetry/core/masonry/api.py +++ /dev/null @@ -1,91 +0,0 @@ -""" -PEP-517 compliant buildsystem API -""" -import logging - -from typing import Any -from typing import Dict -from typing import List -from typing import Optional - -from poetry.core.factory import Factory -from poetry.core.utils._compat import Path -from poetry.core.utils._compat import unicode - -from .builders.sdist import SdistBuilder -from .builders.wheel import WheelBuilder - - -log = logging.getLogger(__name__) - - -def get_requires_for_build_wheel( - config_settings=None, -): # type: (Optional[Dict[str, Any]]) -> List[str] - """ - Returns an additional list of requirements for building, as PEP508 strings, - above and beyond those specified in the pyproject.toml file. - - This implementation is optional. At the moment it only returns an empty list, which would be the same as if - not define. So this is just for completeness for future implementation. - """ - - return [] - - -# For now, we require all dependencies to build either a wheel or an sdist. -get_requires_for_build_sdist = get_requires_for_build_wheel - - -def prepare_metadata_for_build_wheel( - metadata_directory, config_settings=None -): # type: (str, Optional[Dict[str, Any]]) -> str - poetry = Factory().create_poetry(Path(".").resolve(), with_dev=False) - builder = WheelBuilder(poetry) - - dist_info = Path(metadata_directory, builder.dist_info) - dist_info.mkdir(parents=True, exist_ok=True) - - if "scripts" in poetry.local_config or "plugins" in poetry.local_config: - with (dist_info / "entry_points.txt").open("w", encoding="utf-8") as f: - builder._write_entry_points(f) - - with (dist_info / "WHEEL").open("w", encoding="utf-8") as f: - builder._write_wheel_file(f) - - with (dist_info / "METADATA").open("w", encoding="utf-8") as f: - builder._write_metadata_file(f) - - return dist_info.name - - -def build_wheel( - wheel_directory, config_settings=None, metadata_directory=None -): # type: (str, Optional[Dict[str, Any]], Optional[str]) -> str - """Builds a wheel, places it in wheel_directory""" - poetry = Factory().create_poetry(Path(".").resolve(), with_dev=False) - - return unicode(WheelBuilder.make_in(poetry, Path(wheel_directory))) - - -def build_sdist( - sdist_directory, config_settings=None -): # type: (str, Optional[Dict[str, Any]]) -> str - """Builds an sdist, places it in sdist_directory""" - poetry = Factory().create_poetry(Path(".").resolve(), with_dev=False) - - path = SdistBuilder(poetry).build(Path(sdist_directory)) - - return unicode(path.name) - - -def build_editable( - wheel_directory, config_settings=None, metadata_directory=None, -): # type: (str, Optional[Dict[str, Any]], Optional[str]) -> str - poetry = Factory().create_poetry(Path(".").resolve(), with_dev=False) - - return unicode(WheelBuilder.make_in(poetry, Path(wheel_directory), editable=True)) - - -get_requires_for_build_editable = get_requires_for_build_wheel -prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel diff --git a/vendor/poetry-core/poetry/core/masonry/builder.py b/vendor/poetry-core/poetry/core/masonry/builder.py deleted file mode 100644 index 21e3bb15..00000000 --- a/vendor/poetry-core/poetry/core/masonry/builder.py +++ /dev/null @@ -1,35 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Optional -from typing import Union - -from poetry.core.utils._compat import Path - -from .builders.sdist import SdistBuilder -from .builders.wheel import WheelBuilder - - -if TYPE_CHECKING: - from poetry.core.poetry import Poetry # noqa - - -class Builder: - _FORMATS = { - "sdist": SdistBuilder, - "wheel": WheelBuilder, - } - - def __init__(self, poetry): # type: ("Poetry") -> None - self._poetry = poetry - - def build( - self, fmt, executable=None - ): # type: (str, Optional[Union[str, Path]]) -> None - if fmt in self._FORMATS: - builders = [self._FORMATS[fmt]] - elif fmt == "all": - builders = self._FORMATS.values() - else: - raise ValueError("Invalid format: {}".format(fmt)) - - for builder in builders: - builder(self._poetry, executable=executable).build() diff --git a/vendor/poetry-core/poetry/core/masonry/builders/__init__.py b/vendor/poetry-core/poetry/core/masonry/builders/__init__.py deleted file mode 100644 index 20d725b7..00000000 --- a/vendor/poetry-core/poetry/core/masonry/builders/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .sdist import SdistBuilder -from .wheel import WheelBuilder diff --git a/vendor/poetry-core/poetry/core/masonry/builders/builder.py b/vendor/poetry-core/poetry/core/masonry/builders/builder.py deleted file mode 100644 index f0f7e8a5..00000000 --- a/vendor/poetry-core/poetry/core/masonry/builders/builder.py +++ /dev/null @@ -1,374 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import re -import shutil -import sys -import tempfile - -from collections import defaultdict -from contextlib import contextmanager -from typing import TYPE_CHECKING -from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Set -from typing import Union - -from poetry.core.utils._compat import Path -from poetry.core.utils._compat import to_str -from poetry.core.vcs import get_vcs - -from ..metadata import Metadata -from ..utils.module import Module -from ..utils.package_include import PackageInclude - - -if TYPE_CHECKING: - from poetry.core.poetry import Poetry # noqa - - -AUTHOR_REGEX = re.compile(r"(?u)^(?P[- .,\w\d'’\"()]+) <(?P.+?)>$") - -METADATA_BASE = """\ -Metadata-Version: 2.1 -Name: {name} -Version: {version} -Summary: {summary} -""" - -logger = logging.getLogger(__name__) - - -class Builder(object): - format = None # type: Optional[str] - - def __init__( - self, poetry, ignore_packages_formats=False, executable=None - ): # type: ("Poetry", bool, Optional[Union[Path, str]]) -> None - self._poetry = poetry - self._package = poetry.package - self._path = poetry.file.parent - self._excluded_files = None # type: Optional[Set[str]] - self._executable = Path(executable or sys.executable) - - packages = [] - for p in self._package.packages: - formats = p.get("format", []) - if not isinstance(formats, list): - formats = [formats] - - if ( - formats - and self.format - and self.format not in formats - and not ignore_packages_formats - ): - continue - - packages.append(p) - - includes = [] - for include in self._package.include: - formats = include.get("format", []) - - if ( - formats - and self.format - and self.format not in formats - and not ignore_packages_formats - ): - continue - - includes.append(include) - - self._module = Module( - self._package.name, - self._path.as_posix(), - packages=packages, - includes=includes, - ) - - self._meta = Metadata.from_package(self._package) - - @property - def executable(self): # type: () -> Path - return self._executable - - def build(self): # type: () -> None - raise NotImplementedError() - - def find_excluded_files(self): # type: () -> Set[str] - if self._excluded_files is None: - # Checking VCS - vcs = get_vcs(self._path) - if not vcs: - vcs_ignored_files = set() - else: - vcs_ignored_files = set(vcs.get_ignored_files()) - - explicitely_excluded = set() - for excluded_glob in self._package.exclude: - for excluded in self._path.glob(str(excluded_glob)): - explicitely_excluded.add( - Path(excluded).relative_to(self._path).as_posix() - ) - - explicitely_included = set() - for inc in self._package.include: - included_glob = inc["path"] - for included in self._path.glob(str(included_glob)): - explicitely_included.add( - Path(included).relative_to(self._path).as_posix() - ) - - ignored = (vcs_ignored_files | explicitely_excluded) - explicitely_included - result = set() - for file in ignored: - result.add(file) - - # The list of excluded files might be big and we will do a lot - # containment check (x in excluded). - # Returning a set make those tests much much faster. - self._excluded_files = result - - return self._excluded_files - - def is_excluded(self, filepath): # type: (Union[str, Path]) -> bool - exclude_path = Path(filepath) - - while True: - if exclude_path.as_posix() in self.find_excluded_files(): - return True - - if len(exclude_path.parts) > 1: - exclude_path = exclude_path.parent - else: - break - - return False - - def find_files_to_add( - self, exclude_build=True - ): # type: (bool) -> Set[BuildIncludeFile] - """ - Finds all files to add to the tarball - """ - to_add = set() - - for include in self._module.includes: - include.refresh() - formats = include.formats or ["sdist"] - - for file in include.elements: - if "__pycache__" in str(file): - continue - - if file.is_dir(): - if self.format in formats: - for current_file in file.glob("**/*"): - include_file = BuildIncludeFile( - path=current_file, - project_root=self._path, - source_root=self._path, - ) - - if not current_file.is_dir() and not self.is_excluded( - include_file.relative_to_source_root() - ): - to_add.add(include_file) - continue - - if ( - isinstance(include, PackageInclude) - and include.source - and self.format == "wheel" - ): - source_root = include.base - else: - source_root = self._path - - include_file = BuildIncludeFile( - path=file, project_root=self._path, source_root=source_root - ) - - if self.is_excluded( - include_file.relative_to_project_root() - ) and isinstance(include, PackageInclude): - continue - - if file.suffix == ".pyc": - continue - - if file in to_add: - # Skip duplicates - continue - - logger.debug("Adding: {}".format(str(file))) - to_add.add(include_file) - - # add build script if it is specified and explicitly required - if self._package.build_script and not exclude_build: - to_add.add( - BuildIncludeFile( - path=self._package.build_script, - project_root=self._path, - source_root=self._path, - ) - ) - - return to_add - - def get_metadata_content(self): # type: () -> str - content = METADATA_BASE.format( - name=self._meta.name, - version=self._meta.version, - summary=to_str(self._meta.summary), - ) - - # Optional fields - if self._meta.home_page: - content += "Home-page: {}\n".format(self._meta.home_page) - - if self._meta.license: - content += "License: {}\n".format(self._meta.license) - - if self._meta.keywords: - content += "Keywords: {}\n".format(self._meta.keywords) - - if self._meta.author: - content += "Author: {}\n".format(to_str(self._meta.author)) - - if self._meta.author_email: - content += "Author-email: {}\n".format(to_str(self._meta.author_email)) - - if self._meta.maintainer: - content += "Maintainer: {}\n".format(to_str(self._meta.maintainer)) - - if self._meta.maintainer_email: - content += "Maintainer-email: {}\n".format( - to_str(self._meta.maintainer_email) - ) - - if self._meta.requires_python: - content += "Requires-Python: {}\n".format(self._meta.requires_python) - - for classifier in self._meta.classifiers: - content += "Classifier: {}\n".format(classifier) - - for extra in sorted(self._meta.provides_extra): - content += "Provides-Extra: {}\n".format(extra) - - for dep in sorted(self._meta.requires_dist): - content += "Requires-Dist: {}\n".format(dep) - - for url in sorted(self._meta.project_urls, key=lambda u: u[0]): - content += "Project-URL: {}\n".format(to_str(url)) - - if self._meta.description_content_type: - content += "Description-Content-Type: {}\n".format( - self._meta.description_content_type - ) - - if self._meta.description is not None: - content += "\n" + to_str(self._meta.description) + "\n" - - return content - - def convert_entry_points(self): # type: () -> Dict[str, List[str]] - result = defaultdict(list) - - # Scripts -> Entry points - for name, ep in self._poetry.local_config.get("scripts", {}).items(): - extras = "" - if isinstance(ep, dict): - extras = "[{}]".format(", ".join(ep["extras"])) - ep = ep["callable"] - - result["console_scripts"].append("{} = {}{}".format(name, ep, extras)) - - # Plugins -> entry points - plugins = self._poetry.local_config.get("plugins", {}) - for groupname, group in plugins.items(): - for name, ep in sorted(group.items()): - result[groupname].append("{} = {}".format(name, ep)) - - for groupname in result: - result[groupname] = sorted(result[groupname]) - - return dict(result) - - @classmethod - def convert_author(cls, author): # type: (str) -> Dict[str, str] - m = AUTHOR_REGEX.match(author) - - name = m.group("name") - email = m.group("email") - - return {"name": name, "email": email} - - @classmethod - @contextmanager - def temporary_directory(cls, *args, **kwargs): # type: (*Any, **Any) -> None - try: - from tempfile import TemporaryDirectory - - with TemporaryDirectory(*args, **kwargs) as name: - yield name - except ImportError: - name = tempfile.mkdtemp(*args, **kwargs) - - yield name - - shutil.rmtree(name) - - -class BuildIncludeFile: - def __init__( - self, - path, # type: Union[Path, str] - project_root, # type: Union[Path, str] - source_root=None, # type: Optional[Union[Path, str]] - ): - """ - :param project_root: the full path of the project's root - :param path: a full path to the file to be included - :param source_root: the root path to resolve to - """ - self.path = Path(path) - self.project_root = Path(project_root).resolve() - self.source_root = None if not source_root else Path(source_root).resolve() - if not self.path.is_absolute() and self.source_root: - self.path = self.source_root / self.path - else: - self.path = self.path - - try: - self.path = self.path.resolve() - except FileNotFoundError: - # this is an issue in in python 3.5, since resolve uses strict=True by - # default, this workaround needs to be maintained till python 2.7 and - # python 3.5 are dropped, until we can use resolve(strict=False). - pass - - def __eq__(self, other): # type: (Union[BuildIncludeFile, Path]) -> bool - if hasattr(other, "path"): - return self.path == other.path - return self.path == other - - def __ne__(self, other): # type: (Union[BuildIncludeFile, Path]) -> bool - return not self.__eq__(other) - - def __hash__(self): # type: () -> int - return hash(self.path) - - def __repr__(self): # type: () -> str - return str(self.path) - - def relative_to_project_root(self): # type: () -> Path - return self.path.relative_to(self.project_root) - - def relative_to_source_root(self): # type: () -> Path - if self.source_root is not None: - return self.path.relative_to(self.source_root) - return self.path diff --git a/vendor/poetry-core/poetry/core/masonry/builders/sdist.py b/vendor/poetry-core/poetry/core/masonry/builders/sdist.py deleted file mode 100644 index 5e1f8b9c..00000000 --- a/vendor/poetry-core/poetry/core/masonry/builders/sdist.py +++ /dev/null @@ -1,419 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import os -import re -import tarfile -import time - -from collections import defaultdict -from contextlib import contextmanager -from copy import copy -from gzip import GzipFile -from io import BytesIO -from posixpath import join as pjoin -from pprint import pformat -from tarfile import TarInfo -from typing import TYPE_CHECKING -from typing import Dict -from typing import Iterator -from typing import List -from typing import Optional -from typing import Set -from typing import Tuple - -from poetry.core.utils._compat import Path -from poetry.core.utils._compat import decode -from poetry.core.utils._compat import encode -from poetry.core.utils._compat import to_str - -from ..utils.helpers import normalize_file_permissions -from ..utils.package_include import PackageInclude -from .builder import Builder -from .builder import BuildIncludeFile - - -if TYPE_CHECKING: - from poetry.core.packages import Dependency # noqa - from poetry.core.packages import ProjectPackage # noqa - -SETUP = """\ -# -*- coding: utf-8 -*- -from setuptools import setup - -{before} -setup_kwargs = {{ - 'name': {name!r}, - 'version': {version!r}, - 'description': {description!r}, - 'long_description': {long_description!r}, - 'author': {author!r}, - 'author_email': {author_email!r}, - 'maintainer': {maintainer!r}, - 'maintainer_email': {maintainer_email!r}, - 'url': {url!r}, - {extra} -}} -{after} - -setup(**setup_kwargs) -""" - -logger = logging.getLogger(__name__) - - -class SdistBuilder(Builder): - - format = "sdist" - - def build(self, target_dir=None): # type: (Optional[Path]) -> Path - logger.info("Building sdist") - if target_dir is None: - target_dir = self._path / "dist" - - if not target_dir.exists(): - target_dir.mkdir(parents=True) - - target = target_dir / "{}-{}.tar.gz".format( - self._package.pretty_name, self._meta.version - ) - gz = GzipFile(target.as_posix(), mode="wb", mtime=0) - tar = tarfile.TarFile( - target.as_posix(), mode="w", fileobj=gz, format=tarfile.PAX_FORMAT - ) - - try: - tar_dir = "{}-{}".format(self._package.pretty_name, self._meta.version) - - files_to_add = self.find_files_to_add(exclude_build=False) - - for file in sorted(files_to_add, key=lambda x: x.relative_to_source_root()): - tar_info = tar.gettarinfo( - str(file.path), - arcname=pjoin(tar_dir, str(file.relative_to_source_root())), - ) - tar_info = self.clean_tarinfo(tar_info) - - if tar_info.isreg(): - with file.path.open("rb") as f: - tar.addfile(tar_info, f) - else: - tar.addfile(tar_info) # Symlinks & ? - - if self._poetry.package.build_should_generate_setup(): - setup = self.build_setup() - tar_info = tarfile.TarInfo(pjoin(tar_dir, "setup.py")) - tar_info.size = len(setup) - tar_info.mtime = time.time() - tar.addfile(tar_info, BytesIO(setup)) - - pkg_info = self.build_pkg_info() - - tar_info = tarfile.TarInfo(pjoin(tar_dir, "PKG-INFO")) - tar_info.size = len(pkg_info) - tar_info.mtime = time.time() - tar.addfile(tar_info, BytesIO(pkg_info)) - finally: - tar.close() - gz.close() - - logger.info("Built {}".format(target.name)) - return target - - def build_setup(self): # type: () -> bytes - before, extra, after = [], [], [] - package_dir = {} - - # If we have a build script, use it - if self._package.build_script: - after += [ - "from {} import *".format(self._package.build_script.split(".")[0]), - "build(setup_kwargs)", - ] - - modules = [] - packages = [] - package_data = {} - for include in self._module.includes: - if include.formats and "sdist" not in include.formats: - continue - - if isinstance(include, PackageInclude): - if include.is_package(): - pkg_dir, _packages, _package_data = self.find_packages(include) - - if pkg_dir is not None: - package_dir[""] = os.path.relpath(pkg_dir, str(self._path)) - - packages += [p for p in _packages if p not in packages] - package_data.update(_package_data) - else: - module = include.elements[0].relative_to(include.base).stem - - if include.source is not None: - package_dir[""] = str(include.base.relative_to(self._path)) - - if module not in modules: - modules.append(module) - else: - pass - - if package_dir: - before.append("package_dir = \\\n{}\n".format(pformat(package_dir))) - extra.append("'package_dir': package_dir,") - - if packages: - before.append("packages = \\\n{}\n".format(pformat(sorted(packages)))) - extra.append("'packages': packages,") - - if package_data: - before.append("package_data = \\\n{}\n".format(pformat(package_data))) - extra.append("'package_data': package_data,") - - if modules: - before.append("modules = \\\n{}".format(pformat(modules))) - extra.append("'py_modules': modules,".format()) - - dependencies, extras = self.convert_dependencies( - self._package, self._package.requires - ) - if dependencies: - before.append( - "install_requires = \\\n{}\n".format(pformat(sorted(dependencies))) - ) - extra.append("'install_requires': install_requires,") - - if extras: - before.append("extras_require = \\\n{}\n".format(pformat(extras))) - extra.append("'extras_require': extras_require,") - - entry_points = self.convert_entry_points() - if entry_points: - before.append("entry_points = \\\n{}\n".format(pformat(entry_points))) - extra.append("'entry_points': entry_points,") - - if self._package.python_versions != "*": - python_requires = self._meta.requires_python - - extra.append("'python_requires': {!r},".format(python_requires)) - - return encode( - SETUP.format( - before="\n".join(before), - name=to_str(self._meta.name), - version=to_str(self._meta.version), - description=to_str(self._meta.summary), - long_description=to_str(self._meta.description), - author=to_str(self._meta.author), - author_email=to_str(self._meta.author_email), - maintainer=to_str(self._meta.maintainer), - maintainer_email=to_str(self._meta.maintainer_email), - url=to_str(self._meta.home_page), - extra="\n ".join(extra), - after="\n".join(after), - ) - ) - - @contextmanager - def setup_py(self): # type: () -> Iterator[Path] - setup = self._path / "setup.py" - has_setup = setup.exists() - - if has_setup: - logger.warning("A setup.py file already exists. Using it.") - else: - with setup.open("w", encoding="utf-8") as f: - f.write(decode(self.build_setup())) - - yield setup - - if not has_setup: - setup.unlink() - - def build_pkg_info(self): # type: () -> bytes - return encode(self.get_metadata_content()) - - def find_packages( - self, include - ): # type: (PackageInclude) -> Tuple[str, List[str], dict] - """ - Discover subpackages and data. - - It also retrieves necessary files. - """ - pkgdir = None - if include.source is not None: - pkgdir = str(include.base) - - base = str(include.elements[0].parent) - - pkg_name = include.package - pkg_data = defaultdict(list) - # Undocumented distutils feature: - # the empty string matches all package names - pkg_data[""].append("*") - packages = [pkg_name] - subpkg_paths = set() - - def find_nearest_pkg(rel_path): # type: (str) -> Tuple[str, str] - parts = rel_path.split(os.sep) - for i in reversed(range(1, len(parts))): - ancestor = "/".join(parts[:i]) - if ancestor in subpkg_paths: - pkg = ".".join([pkg_name] + parts[:i]) - return pkg, "/".join(parts[i:]) - - # Relative to the top-level package - return pkg_name, Path(rel_path).as_posix() - - for path, dirnames, filenames in os.walk(str(base), topdown=True): - if os.path.basename(path) == "__pycache__": - continue - - from_top_level = os.path.relpath(path, base) - if from_top_level == ".": - continue - - is_subpkg = any( - [filename.endswith(".py") for filename in filenames] - ) and not all( - [ - self.is_excluded(Path(path, filename).relative_to(self._path)) - for filename in filenames - if filename.endswith(".py") - ] - ) - if is_subpkg: - subpkg_paths.add(from_top_level) - parts = from_top_level.split(os.sep) - packages.append(".".join([pkg_name] + parts)) - else: - pkg, from_nearest_pkg = find_nearest_pkg(from_top_level) - - data_elements = [ - f.relative_to(self._path) - for f in Path(path).glob("*") - if not f.is_dir() - ] - - data = [e for e in data_elements if not self.is_excluded(e)] - if not data: - continue - - if len(data) == len(data_elements): - pkg_data[pkg].append(pjoin(from_nearest_pkg, "*")) - else: - for d in data: - if d.is_dir(): - continue - - pkg_data[pkg] += [pjoin(from_nearest_pkg, d.name) for d in data] - - # Sort values in pkg_data - pkg_data = {k: sorted(v) for (k, v) in pkg_data.items() if v} - - return pkgdir, sorted(packages), pkg_data - - def find_files_to_add( - self, exclude_build=False - ): # type: (bool) -> Set[BuildIncludeFile] - to_add = super(SdistBuilder, self).find_files_to_add(exclude_build) - - # add any additional files, starting with all LICENSE files - additional_files = { - license_file for license_file in self._path.glob("LICENSE*") - } - - # Include project files - additional_files.add("pyproject.toml") - - # add readme if it is specified - if "readme" in self._poetry.local_config: - additional_files.add(self._poetry.local_config["readme"]) - - for file in additional_files: - file = BuildIncludeFile( - path=file, project_root=self._path, source_root=self._path - ) - if file.path.exists(): - logger.debug("Adding: {}".format(file.relative_to_source_root())) - to_add.add(file) - - return to_add - - @classmethod - def convert_dependencies( - cls, package, dependencies - ): # type: ("ProjectPackage", List["Dependency"]) -> Tuple[List[str], Dict[str, List[str]]] - main = [] - extras = defaultdict(list) - req_regex = re.compile(r"^(.+) \((.+)\)$") - - for dependency in dependencies: - if dependency.is_optional(): - for extra_name, reqs in package.extras.items(): - for req in reqs: - if req.name == dependency.name: - requirement = to_str( - dependency.to_pep_508(with_extras=False) - ) - if ";" in requirement: - requirement, conditions = requirement.split(";") - - requirement = requirement.strip() - if req_regex.match(requirement): - requirement = req_regex.sub( - "\\1\\2", requirement.strip() - ) - - extras[extra_name + ":" + conditions.strip()].append( - requirement - ) - - continue - - requirement = requirement.strip() - if req_regex.match(requirement): - requirement = req_regex.sub( - "\\1\\2", requirement.strip() - ) - extras[extra_name].append(requirement) - continue - - requirement = to_str(dependency.to_pep_508()) - if ";" in requirement: - requirement, conditions = requirement.split(";") - - requirement = requirement.strip() - if req_regex.match(requirement): - requirement = req_regex.sub("\\1\\2", requirement.strip()) - - extras[":" + conditions.strip()].append(requirement) - - continue - - requirement = requirement.strip() - if req_regex.match(requirement): - requirement = req_regex.sub("\\1\\2", requirement.strip()) - - main.append(requirement) - - return main, dict(extras) - - @classmethod - def clean_tarinfo(cls, tar_info): # type: (TarInfo) -> TarInfo - """ - Clean metadata from a TarInfo object to make it more reproducible. - - - Set uid & gid to 0 - - Set uname and gname to "" - - Normalise permissions to 644 or 755 - - Set mtime if not None - """ - ti = copy(tar_info) - ti.uid = 0 - ti.gid = 0 - ti.uname = "" - ti.gname = "" - ti.mode = normalize_file_permissions(ti.mode) - - return ti diff --git a/vendor/poetry-core/poetry/core/masonry/builders/wheel.py b/vendor/poetry-core/poetry/core/masonry/builders/wheel.py deleted file mode 100644 index 383ed733..00000000 --- a/vendor/poetry-core/poetry/core/masonry/builders/wheel.py +++ /dev/null @@ -1,379 +0,0 @@ -from __future__ import unicode_literals - -import contextlib -import csv -import hashlib -import logging -import os -import shutil -import stat -import subprocess -import tempfile -import zipfile - -from base64 import urlsafe_b64encode -from io import BytesIO -from io import StringIO -from typing import TYPE_CHECKING -from typing import Iterator -from typing import Optional -from typing import TextIO -from typing import Union - -from packaging.tags import sys_tags - -from poetry.core import __version__ -from poetry.core.semver import parse_constraint -from poetry.core.utils._compat import PY2 -from poetry.core.utils._compat import Path -from poetry.core.utils._compat import decode - -from ..utils.helpers import escape_name -from ..utils.helpers import escape_version -from ..utils.helpers import normalize_file_permissions -from ..utils.package_include import PackageInclude -from .builder import Builder -from .sdist import SdistBuilder - - -if TYPE_CHECKING: - from poetry.core.poetry import Poetry # noqa - -wheel_file_template = """\ -Wheel-Version: 1.0 -Generator: poetry {version} -Root-Is-Purelib: {pure_lib} -Tag: {tag} -""" - -logger = logging.getLogger(__name__) - - -class WheelBuilder(Builder): - format = "wheel" - - def __init__( - self, poetry, target_dir=None, original=None, executable=None, editable=False, - ): # type: ("Poetry", Optional[Path], Optional[Path], Optional[str], bool) -> None - super(WheelBuilder, self).__init__(poetry, executable=executable) - - self._records = [] - self._original_path = self._path - self._target_dir = target_dir or (self._poetry.file.parent / "dist") - if original: - self._original_path = original.file.parent - self._editable = editable - - @classmethod - def make_in( - cls, poetry, directory=None, original=None, executable=None, editable=False, - ): # type: ("Poetry", Optional[Path], Optional[Path], Optional[str], bool) -> str - wb = WheelBuilder( - poetry, - target_dir=directory, - original=original, - executable=executable, - editable=editable, - ) - wb.build() - - return wb.wheel_filename - - @classmethod - def make(cls, poetry, executable=None): # type: ("Poetry", Optional[str]) -> None - """Build a wheel in the dist/ directory, and optionally upload it.""" - cls.make_in(poetry, executable=executable) - - def build(self): # type: () -> None - logger.info("Building wheel") - - dist_dir = self._target_dir - if not dist_dir.exists(): - dist_dir.mkdir() - - (fd, temp_path) = tempfile.mkstemp(suffix=".whl") - - st_mode = os.stat(temp_path).st_mode - new_mode = normalize_file_permissions(st_mode) - os.chmod(temp_path, new_mode) - - with os.fdopen(fd, "w+b") as fd_file: - with zipfile.ZipFile( - fd_file, mode="w", compression=zipfile.ZIP_DEFLATED - ) as zip_file: - if not self._editable: - if not self._poetry.package.build_should_generate_setup(): - self._build(zip_file) - self._copy_module(zip_file) - else: - self._copy_module(zip_file) - self._build(zip_file) - else: - self._build(zip_file) - self._add_pth(zip_file) - - self._write_metadata(zip_file) - self._write_record(zip_file) - - wheel_path = dist_dir / self.wheel_filename - if wheel_path.exists(): - wheel_path.unlink() - shutil.move(temp_path, str(wheel_path)) - - logger.info("Built {}".format(self.wheel_filename)) - - def _add_pth(self, wheel): # type: (zipfile.ZipFile) -> None - paths = set() - for include in self._module.includes: - if isinstance(include, PackageInclude) and ( - include.is_module() or include.is_package() - ): - paths.add(include.base.resolve().as_posix()) - - content = "" - for path in paths: - content += path + os.linesep - - pth_file = Path(self._module.name).with_suffix(".pth") - - with self._write_to_zip(wheel, str(pth_file)) as f: - f.write(content) - - def _build(self, wheel): # type: (zipfile.ZipFile) -> None - if self._package.build_script: - if not self._poetry.package.build_should_generate_setup(): - # Since we have a build script but no setup.py generation is required, - # we assume that the build script will build and copy the files - # directly. - # That way they will be picked up when adding files to the wheel. - current_path = os.getcwd() - try: - os.chdir(str(self._path)) - self._run_build_script(self._package.build_script) - finally: - os.chdir(current_path) - else: - with SdistBuilder(poetry=self._poetry).setup_py() as setup: - # We need to place ourselves in the temporary - # directory in order to build the package - current_path = os.getcwd() - try: - os.chdir(str(self._path)) - self._run_build_command(setup) - finally: - os.chdir(current_path) - - build_dir = self._path / "build" - lib = list(build_dir.glob("lib.*")) - if not lib: - # The result of building the extensions - # does not exist, this may due to conditional - # builds, so we assume that it's okay - return - - lib = lib[0] - - for pkg in lib.glob("**/*"): - if pkg.is_dir() or self.is_excluded(pkg): - continue - - rel_path = str(pkg.relative_to(lib)) - - if rel_path in wheel.namelist(): - continue - - logger.debug("Adding: {}".format(rel_path)) - - self._add_file(wheel, pkg, rel_path) - - def _run_build_command(self, setup): # type: (Path) -> None - subprocess.check_call( - [ - self.executable.as_posix(), - str(setup), - "build", - "-b", - str(self._path / "build"), - ] - ) - - def _run_build_script(self, build_script): # type: (str) -> None - logger.debug("Executing build script: {}".format(build_script)) - subprocess.check_call([self.executable.as_posix(), build_script]) - - def _copy_module(self, wheel): # type: (zipfile.ZipFile) -> None - to_add = self.find_files_to_add() - - # Walk the files and compress them, - # sorting everything so the order is stable. - for file in sorted(list(to_add), key=lambda x: x.path): - self._add_file(wheel, file.path, file.relative_to_source_root()) - - def _write_metadata(self, wheel): # type: (zipfile.ZipFile) -> None - if ( - "scripts" in self._poetry.local_config - or "plugins" in self._poetry.local_config - ): - with self._write_to_zip(wheel, self.dist_info + "/entry_points.txt") as f: - self._write_entry_points(f) - - license_files_to_add = [] - for base in ("COPYING", "LICENSE"): - license_files_to_add.append(self._path / base) - license_files_to_add.extend(self._path.glob(base + ".*")) - - license_files_to_add.extend(self._path.joinpath("LICENSES").glob("**/*")) - - for path in set(license_files_to_add): - if path.is_file(): - relative_path = "%s/%s" % (self.dist_info, path.relative_to(self._path)) - self._add_file(wheel, path, relative_path) - else: - logger.debug("Skipping: {}".format(path.as_posix())) - - with self._write_to_zip(wheel, self.dist_info + "/WHEEL") as f: - self._write_wheel_file(f) - - with self._write_to_zip(wheel, self.dist_info + "/METADATA") as f: - self._write_metadata_file(f) - - def _write_record(self, wheel): # type: (zipfile.ZipFile) -> None - # Write a record of the files in the wheel - with self._write_to_zip(wheel, self.dist_info + "/RECORD") as f: - record = StringIO() if not PY2 else BytesIO() - - csv_writer = csv.writer( - record, - delimiter=csv.excel.delimiter, - quotechar=csv.excel.quotechar, - lineterminator="\n", - ) - for path, hash, size in self._records: - csv_writer.writerow((path, "sha256={}".format(hash), size)) - - # RECORD itself is recorded with no hash or size - csv_writer.writerow((self.dist_info + "/RECORD", "", "")) - - f.write(decode(record.getvalue())) - - @property - def dist_info(self): # type: () -> str - return self.dist_info_name(self._package.name, self._meta.version) - - @property - def wheel_filename(self): # type: () -> str - return "{}-{}-{}.whl".format( - escape_name(self._package.pretty_name), - escape_version(self._meta.version), - self.tag, - ) - - def supports_python2(self): # type: () -> bool - return self._package.python_constraint.allows_any( - parse_constraint(">=2.0.0 <3.0.0") - ) - - def dist_info_name(self, distribution, version): # type: (str, str) -> str - escaped_name = escape_name(distribution) - escaped_version = escape_version(version) - - return "{}-{}.dist-info".format(escaped_name, escaped_version) - - @property - def tag(self): # type: () -> str - if self._package.build_script: - tag = next(sys_tags()) - tag = (tag.interpreter, tag.abi, tag.platform) - else: - platform = "any" - if self.supports_python2(): - impl = "py2.py3" - else: - impl = "py3" - - tag = (impl, "none", platform) - - return "-".join(tag) - - def _add_file( - self, wheel, full_path, rel_path - ): # type: (zipfile.ZipFile, Union[Path, str], Union[Path, str]) -> None - full_path, rel_path = str(full_path), str(rel_path) - if os.sep != "/": - # We always want to have /-separated paths in the zip file and in - # RECORD - rel_path = rel_path.replace(os.sep, "/") - - zinfo = zipfile.ZipInfo(rel_path) - - # Normalize permission bits to either 755 (executable) or 644 - st_mode = os.stat(full_path).st_mode - new_mode = normalize_file_permissions(st_mode) - zinfo.external_attr = (new_mode & 0xFFFF) << 16 # Unix attributes - - if stat.S_ISDIR(st_mode): - zinfo.external_attr |= 0x10 # MS-DOS directory flag - - hashsum = hashlib.sha256() - with open(full_path, "rb") as src: - while True: - buf = src.read(1024 * 8) - if not buf: - break - hashsum.update(buf) - - src.seek(0) - wheel.writestr(zinfo, src.read(), compress_type=zipfile.ZIP_DEFLATED) - - size = os.stat(full_path).st_size - hash_digest = urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=") - - self._records.append((rel_path, hash_digest, size)) - - @contextlib.contextmanager - def _write_to_zip( - self, wheel, rel_path - ): # type: (zipfile.ZipFile, str) -> Iterator[StringIO] - sio = StringIO() - yield sio - - # The default is a fixed timestamp rather than the current time, so - # that building a wheel twice on the same computer can automatically - # give you the exact same result. - date_time = (2016, 1, 1, 0, 0, 0) - zi = zipfile.ZipInfo(rel_path, date_time) - zi.external_attr = (0o644 & 0xFFFF) << 16 # Unix attributes - b = sio.getvalue().encode("utf-8") - hashsum = hashlib.sha256(b) - hash_digest = urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=") - - wheel.writestr(zi, b, compress_type=zipfile.ZIP_DEFLATED) - self._records.append((rel_path, hash_digest, len(b))) - - def _write_entry_points(self, fp): # type: (TextIO) -> None - """ - Write entry_points.txt. - """ - entry_points = self.convert_entry_points() - - for group_name in sorted(entry_points): - fp.write("[{}]\n".format(group_name)) - for ep in sorted(entry_points[group_name]): - fp.write(ep.replace(" ", "") + "\n") - - fp.write("\n") - - def _write_wheel_file(self, fp): # type: (TextIO) -> None - fp.write( - wheel_file_template.format( - version=__version__, - pure_lib="true" if self._package.build_script is None else "false", - tag=self.tag, - ) - ) - - def _write_metadata_file(self, fp): # type: (TextIO) -> None - """ - Write out metadata in the 2.x format (email like) - """ - fp.write(decode(self.get_metadata_content())) diff --git a/vendor/poetry-core/poetry/core/masonry/metadata.py b/vendor/poetry-core/poetry/core/masonry/metadata.py deleted file mode 100644 index 48bf6033..00000000 --- a/vendor/poetry-core/poetry/core/masonry/metadata.py +++ /dev/null @@ -1,96 +0,0 @@ -from typing import TYPE_CHECKING - -from poetry.core.utils.helpers import canonicalize_name -from poetry.core.utils.helpers import normalize_version -from poetry.core.version.helpers import format_python_constraint - - -if TYPE_CHECKING: - from poetry.core.packages import Package # noqa - - -class Metadata: - - metadata_version = "2.1" - # version 1.0 - name = None - version = None - platforms = () - supported_platforms = () - summary = None - description = None - keywords = None - home_page = None - download_url = None - author = None - author_email = None - license = None - # version 1.1 - classifiers = () - requires = () - provides = () - obsoletes = () - # version 1.2 - maintainer = None - maintainer_email = None - requires_python = None - requires_external = () - requires_dist = [] - provides_dist = () - obsoletes_dist = () - project_urls = () - - # Version 2.1 - description_content_type = None - provides_extra = [] - - @classmethod - def from_package(cls, package): # type: ("Package") -> Metadata - meta = cls() - - meta.name = canonicalize_name(package.name) - meta.version = normalize_version(package.version.text) - meta.summary = package.description - if package.readme: - with package.readme.open(encoding="utf-8") as f: - meta.description = f.read() - - meta.keywords = ",".join(package.keywords) - meta.home_page = package.homepage or package.repository_url - meta.author = package.author_name - meta.author_email = package.author_email - - if package.license: - meta.license = package.license.id - - meta.classifiers = package.all_classifiers - - # Version 1.2 - meta.maintainer = package.maintainer_name - meta.maintainer_email = package.maintainer_email - - # Requires python - if package.python_versions != "*": - meta.requires_python = format_python_constraint(package.python_constraint) - - meta.requires_dist = [d.to_pep_508() for d in package.requires] - - # Version 2.1 - if package.readme: - if package.readme.suffix == ".rst": - meta.description_content_type = "text/x-rst" - elif package.readme.suffix in [".md", ".markdown"]: - meta.description_content_type = "text/markdown" - else: - meta.description_content_type = "text/plain" - - meta.provides_extra = [e for e in package.extras] - - if package.urls: - for name, url in package.urls.items(): - if name == "Homepage" and meta.home_page == url: - continue - - meta.project_urls += ("{}, {}".format(name, url),) - - return meta diff --git a/vendor/poetry-core/poetry/core/masonry/utils/helpers.py b/vendor/poetry-core/poetry/core/masonry/utils/helpers.py deleted file mode 100644 index 3a515f42..00000000 --- a/vendor/poetry-core/poetry/core/masonry/utils/helpers.py +++ /dev/null @@ -1,31 +0,0 @@ -import re - - -def normalize_file_permissions(st_mode): # type: (int) -> int - """ - Normalizes the permission bits in the st_mode field from stat to 644/755 - - Popular VCSs only track whether a file is executable or not. The exact - permissions can vary on systems with different umasks. Normalising - to 644 (non executable) or 755 (executable) makes builds more reproducible. - """ - # Set 644 permissions, leaving higher bits of st_mode unchanged - new_mode = (st_mode | 0o644) & ~0o133 - if st_mode & 0o100: - new_mode |= 0o111 # Executable: 644 -> 755 - - return new_mode - - -def escape_version(version): # type: (str) -> str - """ - Escaped version in wheel filename. Doesn't exactly follow - the escaping specification in :pep:`427#escaping-and-unicode` - because this conflicts with :pep:`440#local-version-identifiers`. - """ - return re.sub(r"[^\w\d.+]+", "_", version, flags=re.UNICODE) - - -def escape_name(name): # type: (str) -> str - """Escaped wheel name as specified in :pep:`427#escaping-and-unicode`.""" - return re.sub(r"[^\w\d.]+", "_", name, flags=re.UNICODE) diff --git a/vendor/poetry-core/poetry/core/masonry/utils/include.py b/vendor/poetry-core/poetry/core/masonry/utils/include.py deleted file mode 100644 index f8f2b8f1..00000000 --- a/vendor/poetry-core/poetry/core/masonry/utils/include.py +++ /dev/null @@ -1,50 +0,0 @@ -from typing import List -from typing import Optional - -from poetry.core.utils._compat import Path - - -class Include(object): - """ - Represents an "include" entry. - - It can be a glob string, a single file or a directory. - - This class will then detect the type of this include: - - - a package - - a module - - a file - - a directory - """ - - def __init__( - self, base, include, formats=None - ): # type: (Path, str, Optional[List[str]]) -> None - self._base = base - self._include = str(include) - self._formats = formats - - self._elements = sorted( - list(self._base.glob(str(self._include))) - ) # type: List[Path] - - @property - def base(self): # type: () -> Path - return self._base - - @property - def elements(self): # type: () -> List[Path] - return self._elements - - @property - def formats(self): # type: () -> Optional[List[str]] - return self._formats - - def is_empty(self): # type: () -> bool - return len(self._elements) == 0 - - def refresh(self): # type: () -> Include - self._elements = sorted(list(self._base.glob(self._include))) - - return self diff --git a/vendor/poetry-core/poetry/core/masonry/utils/module.py b/vendor/poetry-core/poetry/core/masonry/utils/module.py deleted file mode 100644 index 2e2a7539..00000000 --- a/vendor/poetry-core/poetry/core/masonry/utils/module.py +++ /dev/null @@ -1,109 +0,0 @@ -from typing import Any -from typing import Dict -from typing import List -from typing import Optional - -from poetry.core.utils._compat import Path -from poetry.core.utils.helpers import module_name - -from .include import Include -from .package_include import PackageInclude - - -class ModuleOrPackageNotFound(ValueError): - - pass - - -class Module: - def __init__( - self, name, directory=".", packages=None, includes=None - ): # type: (str, str, Optional[List[Dict[str, Any]]], Optional[List[Dict[str, Any]]]) -> None - self._name = module_name(name) - self._in_src = False - self._is_package = False - self._path = Path(directory) - self._includes = [] - packages = packages or [] - includes = includes or [] - - if not packages: - # It must exist either as a .py file or a directory, but not both - pkg_dir = Path(directory, self._name) - py_file = Path(directory, self._name + ".py") - if pkg_dir.is_dir() and py_file.is_file(): - raise ValueError("Both {} and {} exist".format(pkg_dir, py_file)) - elif pkg_dir.is_dir(): - packages = [{"include": str(pkg_dir.relative_to(self._path))}] - elif py_file.is_file(): - packages = [{"include": str(py_file.relative_to(self._path))}] - else: - # Searching for a src module - src = Path(directory, "src") - src_pkg_dir = src / self._name - src_py_file = src / (self._name + ".py") - - if src_pkg_dir.is_dir() and src_py_file.is_file(): - raise ValueError("Both {} and {} exist".format(pkg_dir, py_file)) - elif src_pkg_dir.is_dir(): - packages = [ - { - "include": str(src_pkg_dir.relative_to(src)), - "from": str(src.relative_to(self._path)), - } - ] - elif src_py_file.is_file(): - packages = [ - { - "include": str(src_py_file.relative_to(src)), - "from": str(src.relative_to(self._path)), - } - ] - else: - raise ModuleOrPackageNotFound( - "No file/folder found for package {}".format(name) - ) - - for package in packages: - formats = package.get("format") - if formats and not isinstance(formats, list): - formats = [formats] - - self._includes.append( - PackageInclude( - self._path, - package["include"], - formats=formats, - source=package.get("from"), - ) - ) - - for include in includes: - self._includes.append( - Include(self._path, include["path"], formats=include["format"]) - ) - - @property - def name(self): # type: () -> str - return self._name - - @property - def path(self): # type: () -> Path - return self._path - - @property - def file(self): # type: () -> Path - if self._is_package: - return self._path / "__init__.py" - else: - return self._path - - @property - def includes(self): # type: () -> List - return self._includes - - def is_package(self): # type: () -> bool - return self._is_package - - def is_in_src(self): # type: () -> bool - return self._in_src diff --git a/vendor/poetry-core/poetry/core/masonry/utils/package_include.py b/vendor/poetry-core/poetry/core/masonry/utils/package_include.py deleted file mode 100644 index d409f5d3..00000000 --- a/vendor/poetry-core/poetry/core/masonry/utils/package_include.py +++ /dev/null @@ -1,87 +0,0 @@ -from typing import List -from typing import Optional - -from poetry.core.utils._compat import Path - -from .include import Include - - -class PackageInclude(Include): - def __init__( - self, base, include, formats=None, source=None - ): # type: (Path, str, Optional[List[str]], Optional[str]) -> None - self._package = None - self._is_package = False - self._is_module = False - self._source = source - - if source is not None: - base = base / source - - super(PackageInclude, self).__init__(base, include, formats=formats) - self.check_elements() - - @property - def package(self): # type: () -> str - return self._package - - @property - def source(self): # type: () -> Optional[str] - return self._source - - def is_package(self): # type: () -> bool - return self._is_package - - def is_module(self): # type: () -> bool - return self._is_module - - def refresh(self): # type: () -> PackageInclude - super(PackageInclude, self).refresh() - - return self.check_elements() - - def is_stub_only(self): # type: () -> bool - # returns `True` if this a PEP 561 stub-only package, - # see [PEP 561](https://www.python.org/dev/peps/pep-0561/#stub-only-packages) - return self.package.endswith("-stubs") and all( - el.suffix == ".pyi" - or (el.parent.name == self.package and el.name == "py.typed") - for el in self.elements - if el.is_file() - ) - - def has_modules(self): # type: () -> bool - # Packages no longer need an __init__.py in python3, but there must - # at least be one .py file for it to be considered a package - return any(element.suffix == ".py" for element in self.elements) - - def check_elements(self): # type: () -> PackageInclude - if not self._elements: - raise ValueError( - "{} does not contain any element".format(self._base / self._include) - ) - - root = self._elements[0] - if len(self._elements) > 1: - # Probably glob - self._is_package = True - self._package = root.parent.name - - if not self.is_stub_only() and not self.has_modules(): - raise ValueError("{} is not a package.".format(root.name)) - - else: - if root.is_dir(): - # If it's a directory, we include everything inside it - self._package = root.name - self._elements = sorted(list(root.glob("**/*"))) # type: List[Path] - - if not self.is_stub_only() and not self.has_modules(): - raise ValueError("{} is not a package.".format(root.name)) - - self._is_package = True - else: - self._package = root.stem - self._is_module = True - - return self diff --git a/vendor/poetry-core/poetry/core/packages/__init__.py b/vendor/poetry-core/poetry/core/packages/__init__.py deleted file mode 100644 index bf3c4c91..00000000 --- a/vendor/poetry-core/poetry/core/packages/__init__.py +++ /dev/null @@ -1,207 +0,0 @@ -import os -import re - -from typing import List -from typing import Optional -from typing import Union - -from poetry.core.semver import parse_constraint -from poetry.core.utils._compat import Path -from poetry.core.utils.patterns import wheel_file_re -from poetry.core.version.requirements import Requirement - -from .dependency import Dependency -from .directory_dependency import DirectoryDependency -from .file_dependency import FileDependency -from .package import Package -from .project_package import ProjectPackage -from .url_dependency import URLDependency -from .utils.link import Link -from .utils.utils import convert_markers -from .utils.utils import group_markers -from .utils.utils import is_archive_file -from .utils.utils import is_installable_dir -from .utils.utils import is_url -from .utils.utils import path_to_url -from .utils.utils import strip_extras -from .utils.utils import url_to_path -from .vcs_dependency import VCSDependency - - -def _make_file_or_dir_dep( - name, # type: str - path, # type: Path - base=None, # type: Optional[Path] - extras=None, # type: Optional[List[str]] -): # type: (...) -> Optional[Union[FileDependency, DirectoryDependency]] - """ - Helper function to create a file or directoru dependency with the given arguments. If - path is not a file or directory that exists, `None` is returned. - """ - _path = path - if not path.is_absolute() and base: - # a base path was specified, so we should respect that - _path = Path(base) / path - - if _path.is_file(): - return FileDependency(name, path, base=base, extras=extras) - elif _path.is_dir(): - return DirectoryDependency(name, path, base=base, extras=extras) - - return None - - -def dependency_from_pep_508( - name, relative_to=None -): # type: (str, Optional[Path]) -> Dependency - """ - Resolve a PEP-508 requirement string to a `Dependency` instance. If a `relative_to` - path is specified, this is used as the base directory if the identified dependency is - of file or directory type. - """ - from poetry.core.vcs.git import ParsedUrl - - # Removing comments - parts = name.split("#", 1) - name = parts[0].strip() - if len(parts) > 1: - rest = parts[1] - if " ;" in rest: - name += " ;" + rest.split(" ;", 1)[1] - - req = Requirement(name) - - if req.marker: - markers = convert_markers(req.marker) - else: - markers = {} - - name = req.name - path = os.path.normpath(os.path.abspath(name)) - link = None - - if is_url(name): - link = Link(name) - elif req.url: - link = Link(req.url) - else: - p, extras = strip_extras(path) - if os.path.isdir(p) and (os.path.sep in name or name.startswith(".")): - - if not is_installable_dir(p): - raise ValueError( - "Directory {!r} is not installable. File 'setup.py' " - "not found.".format(name) - ) - link = Link(path_to_url(p)) - elif is_archive_file(p): - link = Link(path_to_url(p)) - - # it's a local file, dir, or url - if link: - is_file_uri = link.scheme == "file" - is_relative_uri = is_file_uri and re.search(r"\.\./", link.url) - - # Handle relative file URLs - if is_file_uri and is_relative_uri: - path = Path(link.path) - if relative_to: - path = relative_to / path - link = Link(path_to_url(path)) - - # wheel file - version = None - if link.is_wheel: - m = wheel_file_re.match(link.filename) - if not m: - raise ValueError("Invalid wheel name: {}".format(link.filename)) - name = m.group("name") - version = m.group("ver") - - name = req.name or link.egg_fragment - dep = None - - if link.scheme.startswith("git+"): - url = ParsedUrl.parse(link.url) - dep = VCSDependency(name, "git", url.url, rev=url.rev, extras=req.extras) - elif link.scheme == "git": - dep = VCSDependency( - name, "git", link.url_without_fragment, extras=req.extras - ) - elif link.scheme in ["http", "https"]: - dep = URLDependency(name, link.url) - elif is_file_uri: - # handle RFC 8089 references - path = url_to_path(req.url) - dep = _make_file_or_dir_dep( - name=name, path=path, base=relative_to, extras=req.extras - ) - else: - try: - # this is a local path not using the file URI scheme - dep = _make_file_or_dir_dep( - name=name, path=Path(req.url), base=relative_to, extras=req.extras, - ) - except ValueError: - pass - - if dep is None: - dep = Dependency(name, version or "*", extras=req.extras) - - if version: - dep._constraint = parse_constraint(version) - else: - if req.pretty_constraint: - constraint = req.constraint - else: - constraint = "*" - - dep = Dependency(name, constraint, extras=req.extras) - - if "extra" in markers: - # If we have extras, the dependency is optional - dep.deactivate() - - for or_ in markers["extra"]: - for _, extra in or_: - dep.in_extras.append(extra) - - if "python_version" in markers: - ors = [] - for or_ in markers["python_version"]: - ands = [] - for op, version in or_: - # Expand python version - if op == "==" and "*" not in version: - version = "~" + version - op = "" - elif op == "!=": - version += ".*" - elif op in ("in", "not in"): - versions = [] - for v in re.split("[ ,]+", version): - split = v.split(".") - if len(split) in [1, 2]: - split.append("*") - op_ = "" if op == "in" else "!=" - else: - op_ = "==" if op == "in" else "!=" - - versions.append(op_ + ".".join(split)) - - glue = " || " if op == "in" else ", " - if versions: - ands.append(glue.join(versions)) - - continue - - ands.append("{}{}".format(op, version)) - - ors.append(" ".join(ands)) - - dep.python_versions = " || ".join(ors) - - if req.marker: - dep.marker = req.marker - - return dep diff --git a/vendor/poetry-core/poetry/core/packages/constraints/__init__.py b/vendor/poetry-core/poetry/core/packages/constraints/__init__.py deleted file mode 100644 index 33acb85a..00000000 --- a/vendor/poetry-core/poetry/core/packages/constraints/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -import re - -from typing import Union - -from .any_constraint import AnyConstraint -from .base_constraint import BaseConstraint -from .constraint import Constraint -from .empty_constraint import EmptyConstraint -from .multi_constraint import MultiConstraint -from .union_constraint import UnionConstraint - - -BASIC_CONSTRAINT = re.compile(r"^(!?==?)?\s*([^\s]+?)\s*$") -ConstraintTypes = Union[ - AnyConstraint, Constraint, UnionConstraint, EmptyConstraint, MultiConstraint -] - - -def parse_constraint( - constraints, -): # type: (str) -> Union[AnyConstraint, UnionConstraint, Constraint] - if constraints == "*": - return AnyConstraint() - - or_constraints = re.split(r"\s*\|\|?\s*", constraints.strip()) - or_groups = [] - for constraints in or_constraints: - and_constraints = re.split( - r"(?< ,]) *(? 1: - for constraint in and_constraints: - constraint_objects.append(parse_single_constraint(constraint)) - else: - constraint_objects.append(parse_single_constraint(and_constraints[0])) - - if len(constraint_objects) == 1: - constraint = constraint_objects[0] - else: - constraint = constraint_objects[0] - for next_constraint in constraint_objects[1:]: - constraint = constraint.intersect(next_constraint) - - or_groups.append(constraint) - - if len(or_groups) == 1: - return or_groups[0] - else: - return UnionConstraint(*or_groups) - - -def parse_single_constraint(constraint): # type: (str) -> Constraint - # Basic comparator - m = BASIC_CONSTRAINT.match(constraint) - if m: - op = m.group(1) - if op is None: - op = "==" - - version = m.group(2).strip() - - return Constraint(version, op) - - raise ValueError("Could not parse version constraint: {}".format(constraint)) diff --git a/vendor/poetry-core/poetry/core/packages/constraints/any_constraint.py b/vendor/poetry-core/poetry/core/packages/constraints/any_constraint.py deleted file mode 100644 index 88945a11..00000000 --- a/vendor/poetry-core/poetry/core/packages/constraints/any_constraint.py +++ /dev/null @@ -1,43 +0,0 @@ -from typing import TYPE_CHECKING - -from .base_constraint import BaseConstraint -from .empty_constraint import EmptyConstraint - - -if TYPE_CHECKING: - from . import ConstraintTypes # noqa - - -class AnyConstraint(BaseConstraint): - def allows(self, other): # type: ("ConstraintTypes") -> bool - return True - - def allows_all(self, other): # type: ("ConstraintTypes") -> bool - return True - - def allows_any(self, other): # type: ("ConstraintTypes") -> bool - return True - - def difference(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - if other.is_any(): - return EmptyConstraint() - - return other - - def intersect(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - return other - - def union(self, other): # type: ("ConstraintTypes") -> AnyConstraint - return AnyConstraint() - - def is_any(self): # type: () -> bool - return True - - def is_empty(self): # type: () -> bool - return False - - def __str__(self): # type: () -> str - return "*" - - def __eq__(self, other): # type: ("ConstraintTypes") -> bool - return other.is_any() diff --git a/vendor/poetry-core/poetry/core/packages/constraints/base_constraint.py b/vendor/poetry-core/poetry/core/packages/constraints/base_constraint.py deleted file mode 100644 index 0db9aff4..00000000 --- a/vendor/poetry-core/poetry/core/packages/constraints/base_constraint.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import TYPE_CHECKING - - -if TYPE_CHECKING: - from . import ConstraintTypes # noqa - - -class BaseConstraint(object): - def allows(self, other): # type: ("ConstraintTypes") -> bool - raise NotImplementedError - - def allows_all(self, other): # type: ("ConstraintTypes") -> bool - raise NotImplementedError() - - def allows_any(self, other): # type: ("ConstraintTypes") -> bool - raise NotImplementedError() - - def difference(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - raise NotImplementedError() - - def intersect(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - raise NotImplementedError() - - def union(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - raise NotImplementedError() - - def is_any(self): # type: () -> bool - return False - - def is_empty(self): # type: () -> bool - return False - - def __repr__(self): # type: () -> str - return "<{} {}>".format(self.__class__.__name__, str(self)) - - def __eq__(self, other): # type: ("ConstraintTypes") -> bool - raise NotImplementedError() diff --git a/vendor/poetry-core/poetry/core/packages/constraints/constraint.py b/vendor/poetry-core/poetry/core/packages/constraints/constraint.py deleted file mode 100644 index 1ebe915f..00000000 --- a/vendor/poetry-core/poetry/core/packages/constraints/constraint.py +++ /dev/null @@ -1,131 +0,0 @@ -import operator - -from typing import TYPE_CHECKING -from typing import Any -from typing import Union - -from .base_constraint import BaseConstraint -from .empty_constraint import EmptyConstraint - - -if TYPE_CHECKING: - from . import ConstraintTypes # noqa - - -class Constraint(BaseConstraint): - - OP_EQ = operator.eq - OP_NE = operator.ne - - _trans_op_str = {"=": OP_EQ, "==": OP_EQ, "!=": OP_NE} - - _trans_op_int = {OP_EQ: "==", OP_NE: "!="} - - def __init__(self, version, operator="=="): # type: (str, str) -> None - if operator == "=": - operator = "==" - - self._version = version - self._operator = operator - self._op = self._trans_op_str[operator] - - @property - def version(self): # type: () -> str - return self._version - - @property - def operator(self): # type: () -> str - return self._operator - - def allows(self, other): # type: ("ConstraintTypes") -> bool - is_equal_op = self._operator == "==" - is_non_equal_op = self._operator == "!=" - is_other_equal_op = other.operator == "==" - is_other_non_equal_op = other.operator == "!=" - - if is_equal_op and is_other_equal_op: - return self._version == other.version - - if ( - is_equal_op - and is_other_non_equal_op - or is_non_equal_op - and is_other_equal_op - or is_non_equal_op - and is_other_non_equal_op - ): - return self._version != other.version - - return False - - def allows_all(self, other): # type: ("ConstraintTypes") -> bool - if not isinstance(other, Constraint): - return other.is_empty() - - return other == self - - def allows_any(self, other): # type: ("ConstraintTypes") -> bool - if isinstance(other, Constraint): - is_non_equal_op = self._operator == "!=" - is_other_non_equal_op = other.operator == "!=" - - if is_non_equal_op and is_other_non_equal_op: - return self._version != other.version - - return other.allows(self) - - def difference( - self, other - ): # type: ("ConstraintTypes") -> Union[Constraint, "EmptyConstraint"] - if other.allows(self): - return EmptyConstraint() - - return self - - def intersect(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - from .multi_constraint import MultiConstraint - - if isinstance(other, Constraint): - if other == self: - return self - - if self.operator == "!=" and other.operator == "==" and self.allows(other): - return other - - if other.operator == "!=" and self.operator == "==" and other.allows(self): - return self - - if other.operator == "!=" and self.operator == "!=": - return MultiConstraint(self, other) - - return EmptyConstraint() - - return other.intersect(self) - - def union(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - if isinstance(other, Constraint): - from .union_constraint import UnionConstraint - - return UnionConstraint(self, other) - - return other.union(self) - - def is_any(self): # type: () -> bool - return False - - def is_empty(self): # type: () -> bool - return False - - def __eq__(self, other): # type: (Any) -> bool - if not isinstance(other, Constraint): - return NotImplemented - - return (self.version, self.operator) == (other.version, other.operator) - - def __hash__(self): # type: () -> int - return hash((self._operator, self._version)) - - def __str__(self): # type: () -> str - return "{}{}".format( - self._operator if self._operator != "==" else "", self._version - ) diff --git a/vendor/poetry-core/poetry/core/packages/constraints/empty_constraint.py b/vendor/poetry-core/poetry/core/packages/constraints/empty_constraint.py deleted file mode 100644 index 4db043de..00000000 --- a/vendor/poetry-core/poetry/core/packages/constraints/empty_constraint.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import TYPE_CHECKING - -from .base_constraint import BaseConstraint - - -if TYPE_CHECKING: - from . import ConstraintTypes # noqa - - -class EmptyConstraint(BaseConstraint): - - pretty_string = None - - def matches(self, _): # type: ("ConstraintTypes") -> bool - return True - - def is_empty(self): # type: () -> bool - return True - - def allows(self, other): # type: ("ConstraintTypes") -> bool - return False - - def allows_all(self, other): # type: ("ConstraintTypes") -> bool - return True - - def allows_any(self, other): # type: ("ConstraintTypes") -> bool - return True - - def intersect(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - return other - - def difference(self, other): # type: ("ConstraintTypes") -> None - return - - def __eq__(self, other): # type: ("ConstraintTypes") -> bool - return other.is_empty() - - def __str__(self): # type: () -> str - return "" diff --git a/vendor/poetry-core/poetry/core/packages/constraints/multi_constraint.py b/vendor/poetry-core/poetry/core/packages/constraints/multi_constraint.py deleted file mode 100644 index 33fc9e4a..00000000 --- a/vendor/poetry-core/poetry/core/packages/constraints/multi_constraint.py +++ /dev/null @@ -1,100 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Any -from typing import Tuple - -from .base_constraint import BaseConstraint -from .constraint import Constraint - - -if TYPE_CHECKING: - from . import ConstraintTypes # noqa - - -class MultiConstraint(BaseConstraint): - def __init__(self, *constraints): # type: (*Constraint) -> None - if any(c.operator == "==" for c in constraints): - raise ValueError( - "A multi-constraint can only be comprised of negative constraints" - ) - - self._constraints = constraints - - @property - def constraints(self): # type: () -> Tuple[Constraint] - return self._constraints - - def allows(self, other): # type: ("ConstraintTypes") -> bool - for constraint in self._constraints: - if not constraint.allows(other): - return False - - return True - - def allows_all(self, other): # type: ("ConstraintTypes") -> bool - if other.is_any(): - return False - - if other.is_empty(): - return True - - if isinstance(other, Constraint): - return self.allows(other) - - our_constraints = iter(self._constraints) - their_constraints = iter(other.constraints) - our_constraint = next(our_constraints, None) - their_constraint = next(their_constraints, None) - - while our_constraint and their_constraint: - if our_constraint.allows_all(their_constraint): - their_constraint = next(their_constraints, None) - else: - our_constraint = next(our_constraints, None) - - return their_constraint is None - - def allows_any(self, other): # type: ("ConstraintTypes") -> bool - if other.is_any(): - return True - - if other.is_empty(): - return True - - if isinstance(other, Constraint): - return self.allows(other) - - if isinstance(other, MultiConstraint): - for c1 in self.constraints: - for c2 in other.constraints: - if c1.allows(c2): - return True - - return False - - def intersect(self, other): # type: (Constraint) -> MultiConstraint - if isinstance(other, Constraint): - constraints = self._constraints - if other not in constraints: - constraints += (other,) - else: - constraints = (other,) - - if len(constraints) == 1: - return constraints[0] - - return MultiConstraint(*constraints) - - def __eq__(self, other): # type: (Any) -> bool - if not isinstance(other, MultiConstraint): - return False - - return sorted( - self._constraints, key=lambda c: (c.operator, c.version) - ) == sorted(other.constraints, key=lambda c: (c.operator, c.version)) - - def __str__(self): # type: () -> str - constraints = [] - for constraint in self._constraints: - constraints.append(str(constraint)) - - return "{}".format(", ").join(constraints) diff --git a/vendor/poetry-core/poetry/core/packages/constraints/union_constraint.py b/vendor/poetry-core/poetry/core/packages/constraints/union_constraint.py deleted file mode 100644 index ec0330c2..00000000 --- a/vendor/poetry-core/poetry/core/packages/constraints/union_constraint.py +++ /dev/null @@ -1,124 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Tuple -from typing import Union - -from .base_constraint import BaseConstraint -from .constraint import Constraint -from .empty_constraint import EmptyConstraint -from .multi_constraint import MultiConstraint - - -if TYPE_CHECKING: - from . import ConstraintTypes # noqa - - -class UnionConstraint(BaseConstraint): - def __init__(self, *constraints): # type: (*Constraint) -> None - self._constraints = constraints - - @property - def constraints(self): # type: () -> Tuple[Constraint] - return self._constraints - - def allows( - self, other - ): # type: (Union[Constraint, MultiConstraint, UnionConstraint]) -> bool - for constraint in self._constraints: - if constraint.allows(other): - return True - - return False - - def allows_any(self, other): # type: ("ConstraintTypes") -> bool - if other.is_empty(): - return False - - if other.is_any(): - return True - - if isinstance(other, Constraint): - constraints = [other] - else: - constraints = other.constraints - - for our_constraint in self._constraints: - for their_constraint in constraints: - if our_constraint.allows_any(their_constraint): - return True - - return False - - def allows_all(self, other): # type: ("ConstraintTypes") -> bool - if other.is_any(): - return False - - if other.is_empty(): - return True - - if isinstance(other, Constraint): - constraints = [other] - else: - constraints = other.constraints - - our_constraints = iter(self._constraints) - their_constraints = iter(constraints) - our_constraint = next(our_constraints, None) - their_constraint = next(their_constraints, None) - - while our_constraint and their_constraint: - if our_constraint.allows_all(their_constraint): - their_constraint = next(their_constraints, None) - else: - our_constraint = next(our_constraints, None) - - return their_constraint is None - - def intersect(self, other): # type: ("ConstraintTypes") -> "ConstraintTypes" - if other.is_any(): - return self - - if other.is_empty(): - return other - - if isinstance(other, Constraint): - if self.allows(other): - return other - - return EmptyConstraint() - - new_constraints = [] - for our_constraint in self._constraints: - for their_constraint in other.constraints: - intersection = our_constraint.intersect(their_constraint) - - if not intersection.is_empty() and intersection not in new_constraints: - new_constraints.append(intersection) - - if not new_constraints: - return EmptyConstraint() - - return UnionConstraint(*new_constraints) - - def union(self, other): # type: (Constraint) -> UnionConstraint - if isinstance(other, Constraint): - constraints = self._constraints - if other not in self._constraints: - constraints += (other,) - - return UnionConstraint(*constraints) - - def __eq__(self, other): # type: ("ConstraintTypes") -> bool - - if not isinstance(other, UnionConstraint): - return False - - return sorted( - self._constraints, key=lambda c: (c.operator, c.version) - ) == sorted(other.constraints, key=lambda c: (c.operator, c.version)) - - def __str__(self): # type: () -> str - constraints = [] - for constraint in self._constraints: - constraints.append(str(constraint)) - - return "{}".format(" || ").join(constraints) diff --git a/vendor/poetry-core/poetry/core/packages/dependency.py b/vendor/poetry-core/poetry/core/packages/dependency.py deleted file mode 100644 index f17485fd..00000000 --- a/vendor/poetry-core/poetry/core/packages/dependency.py +++ /dev/null @@ -1,420 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Any -from typing import FrozenSet -from typing import List -from typing import Optional -from typing import Union - -import poetry.core.packages - -from poetry.core.semver import Version -from poetry.core.semver import VersionConstraint -from poetry.core.semver import VersionRange -from poetry.core.semver import VersionUnion -from poetry.core.semver import parse_constraint -from poetry.core.version.markers import AnyMarker -from poetry.core.version.markers import parse_marker - -from .constraints import parse_constraint as parse_generic_constraint -from .constraints.constraint import Constraint -from .constraints.multi_constraint import MultiConstraint -from .constraints.union_constraint import UnionConstraint -from .specification import PackageSpecification -from .utils.utils import convert_markers - - -if TYPE_CHECKING: - from poetry.core.version.markers import BaseMarker # noqa - from poetry.core.version.markers import VersionTypes # noqa - - from .constraints import BaseConstraint # noqa - - -class Dependency(PackageSpecification): - def __init__( - self, - name, # type: str - constraint, # type: Union[str, VersionConstraint] - optional=False, # type: bool - category="main", # type: str - allows_prereleases=False, # type: bool - extras=None, # type: Union[List[str], FrozenSet[str]] - source_type=None, # type: Optional[str] - source_url=None, # type: Optional[str] - source_reference=None, # type: Optional[str] - source_resolved_reference=None, # type: Optional[str] - ): - super(Dependency, self).__init__( - name, - source_type=source_type, - source_url=source_url, - source_reference=source_reference, - source_resolved_reference=source_resolved_reference, - features=extras, - ) - - self._constraint = None - self.set_constraint(constraint=constraint) - - self._pretty_constraint = str(constraint) - self._optional = optional - self._category = category - - if isinstance(self._constraint, VersionRange) and self._constraint.min: - allows_prereleases = ( - allows_prereleases or self._constraint.min.is_prerelease() - ) - - self._allows_prereleases = allows_prereleases - - self._python_versions = "*" - self._python_constraint = parse_constraint("*") - self._transitive_python_versions = None - self._transitive_python_constraint = None - self._transitive_marker = None - self._extras = frozenset(extras or []) - - self._in_extras = [] - - self._activated = not self._optional - - self.is_root = False - self.marker = AnyMarker() - self.source_name = None - - @property - def name(self): # type: () -> str - return self._name - - @property - def constraint(self): # type: () -> "VersionTypes" - return self._constraint - - def set_constraint(self, constraint): # type: (Union[str, "VersionTypes"]) -> None - try: - if not isinstance(constraint, VersionConstraint): - self._constraint = parse_constraint(constraint) - else: - self._constraint = constraint - except ValueError: - self._constraint = parse_constraint("*") - - @property - def pretty_constraint(self): # type: () -> str - return self._pretty_constraint - - @property - def pretty_name(self): # type: () -> str - return self._pretty_name - - @property - def category(self): # type: () -> str - return self._category - - @property - def python_versions(self): # type: () -> str - return self._python_versions - - @python_versions.setter - def python_versions(self, value): # type: (str) -> None - self._python_versions = value - self._python_constraint = parse_constraint(value) - if not self._python_constraint.is_any(): - self.marker = self.marker.intersect( - parse_marker( - self._create_nested_marker( - "python_version", self._python_constraint - ) - ) - ) - - @property - def transitive_python_versions(self): # type: () -> str - if self._transitive_python_versions is None: - return self._python_versions - - return self._transitive_python_versions - - @transitive_python_versions.setter - def transitive_python_versions(self, value): # type: (str) -> None - self._transitive_python_versions = value - self._transitive_python_constraint = parse_constraint(value) - - @property - def transitive_marker(self): # type: () -> "BaseMarker" - if self._transitive_marker is None: - return self.marker - - return self._transitive_marker - - @transitive_marker.setter - def transitive_marker(self, value): # type: ("BaseMarker") -> None - self._transitive_marker = value - - @property - def python_constraint(self): # type: () -> "VersionTypes" - return self._python_constraint - - @property - def transitive_python_constraint(self): # type: () -> "VersionTypes" - if self._transitive_python_constraint is None: - return self._python_constraint - - return self._transitive_python_constraint - - @property - def extras(self): # type: () -> FrozenSet[str] - return self._extras - - @property - def in_extras(self): # type: () -> list - return self._in_extras - - @property - def base_pep_508_name(self): # type: () -> str - requirement = self.pretty_name - - if self.extras: - requirement += "[{}]".format(",".join(self.extras)) - - if isinstance(self.constraint, VersionUnion): - if self.constraint.excludes_single_version(): - requirement += " ({})".format(str(self.constraint)) - else: - constraints = self.pretty_constraint.split(",") - constraints = [parse_constraint(c) for c in constraints] - constraints = [str(c) for c in constraints] - requirement += " ({})".format(",".join(constraints)) - elif isinstance(self.constraint, Version): - requirement += " (=={})".format(self.constraint.text) - elif not self.constraint.is_any(): - requirement += " ({})".format(str(self.constraint).replace(" ", "")) - - return requirement - - def allows_prereleases(self): # type: () -> bool - return self._allows_prereleases - - def is_optional(self): # type: () -> bool - return self._optional - - def is_activated(self): # type: () -> bool - return self._activated - - def is_vcs(self): # type: () -> bool - return False - - def is_file(self): # type: () -> bool - return False - - def is_directory(self): # type: () -> bool - return False - - def is_url(self): # type: () -> bool - return False - - def accepts(self, package): # type: (poetry.core.packages.Package) -> bool - """ - Determines if the given package matches this dependency. - """ - return ( - self._name == package.name - and self._constraint.allows(package.version) - and (not package.is_prerelease() or self.allows_prereleases()) - ) - - def to_pep_508(self, with_extras=True): # type: (bool) -> str - requirement = self.base_pep_508_name - - markers = [] - has_extras = False - if not self.marker.is_any(): - marker = self.marker - if not with_extras: - marker = marker.without_extras() - - # we re-check for any marker here since the without extra marker might - # return an any marker again - if not marker.is_empty() and not marker.is_any(): - markers.append(str(marker)) - - has_extras = "extra" in convert_markers(marker) - else: - # Python marker - if self.python_versions != "*": - python_constraint = self.python_constraint - - markers.append( - self._create_nested_marker("python_version", python_constraint) - ) - - in_extras = " || ".join(self._in_extras) - if in_extras and with_extras and not has_extras: - markers.append( - self._create_nested_marker("extra", parse_generic_constraint(in_extras)) - ) - - if markers: - if self.is_vcs() or self.is_url(): - requirement += " " - - if len(markers) > 1: - markers = ["({})".format(m) for m in markers] - requirement += "; {}".format(" and ".join(markers)) - else: - requirement += "; {}".format(markers[0]) - - return requirement - - def _create_nested_marker( - self, name, constraint - ): # type: (str, Union["BaseConstraint", Version, VersionConstraint]) -> str - if isinstance(constraint, (MultiConstraint, UnionConstraint)): - parts = [] - for c in constraint.constraints: - multi = False - if isinstance(c, (MultiConstraint, UnionConstraint)): - multi = True - - parts.append((multi, self._create_nested_marker(name, c))) - - glue = " and " - if isinstance(constraint, UnionConstraint): - parts = [ - "({})".format(part[1]) if part[0] else part[1] for part in parts - ] - glue = " or " - else: - parts = [part[1] for part in parts] - - marker = glue.join(parts) - elif isinstance(constraint, Constraint): - marker = '{} {} "{}"'.format(name, constraint.operator, constraint.version) - elif isinstance(constraint, VersionUnion): - parts = [] - for c in constraint.ranges: - parts.append(self._create_nested_marker(name, c)) - - glue = " or " - parts = ["({})".format(part) for part in parts] - - marker = glue.join(parts) - elif isinstance(constraint, Version): - if constraint.precision >= 3 and name == "python_version": - name = "python_full_version" - - marker = '{} == "{}"'.format(name, constraint.text) - else: - if constraint.min is not None: - min_name = name - if constraint.min.precision >= 3 and name == "python_version": - min_name = "python_full_version" - - if constraint.max is None: - name = min_name - - op = ">=" - if not constraint.include_min: - op = ">" - - version = constraint.min.text - if constraint.max is not None: - max_name = name - if constraint.max.precision >= 3 and name == "python_version": - max_name = "python_full_version" - - text = '{} {} "{}"'.format(min_name, op, version) - - op = "<=" - if not constraint.include_max: - op = "<" - - version = constraint.max - - text += ' and {} {} "{}"'.format(max_name, op, version) - - return text - elif constraint.max is not None: - if constraint.max.precision >= 3 and name == "python_version": - name = "python_full_version" - - op = "<=" - if not constraint.include_max: - op = "<" - - version = constraint.max - else: - return "" - - marker = '{} {} "{}"'.format(name, op, version) - - return marker - - def activate(self): # type: () -> None - """ - Set the dependency as mandatory. - """ - self._activated = True - - def deactivate(self): # type: () -> None - """ - Set the dependency as optional. - """ - if not self._optional: - self._optional = True - - self._activated = False - - def with_constraint( - self, constraint - ): # type: (Union[str, VersionConstraint]) -> Dependency - new = Dependency( - self.pretty_name, - constraint, - optional=self.is_optional(), - category=self.category, - allows_prereleases=self.allows_prereleases(), - extras=self._extras, - source_type=self._source_type, - source_url=self._source_url, - source_reference=self._source_reference, - ) - - new.is_root = self.is_root - new.python_versions = self.python_versions - new.transitive_python_versions = self.transitive_python_versions - new.marker = self.marker - new.transitive_marker = self.transitive_marker - - for in_extra in self.in_extras: - new.in_extras.append(in_extra) - - return new - - def __eq__(self, other): # type: (Any) -> bool - if not isinstance(other, Dependency): - return NotImplemented - - return ( - self.is_same_package_as(other) - and self._constraint == other.constraint - and self._extras == other.extras - ) - - def __ne__(self, other): # type: (Any) -> bool - return not self == other - - def __hash__(self): # type: () -> int - return ( - super(Dependency, self).__hash__() - ^ hash(self._constraint) - ^ hash(self._extras) - ) - - def __str__(self): # type: () -> str - if self.is_root: - return self._pretty_name - return self.base_pep_508_name - - def __repr__(self): # type: () -> str - return "<{} {}>".format(self.__class__.__name__, str(self)) diff --git a/vendor/poetry-core/poetry/core/packages/directory_dependency.py b/vendor/poetry-core/poetry/core/packages/directory_dependency.py deleted file mode 100644 index ac119390..00000000 --- a/vendor/poetry-core/poetry/core/packages/directory_dependency.py +++ /dev/null @@ -1,138 +0,0 @@ -from typing import TYPE_CHECKING -from typing import FrozenSet -from typing import List -from typing import Union - -from poetry.core.pyproject import PyProjectTOML -from poetry.core.utils._compat import Path - - -if TYPE_CHECKING: - from .constraints import BaseConstraint # noqa - -from .dependency import Dependency - - -class DirectoryDependency(Dependency): - def __init__( - self, - name, # type: str - path, # type: Path - category="main", # type: str - optional=False, # type: bool - base=None, # type: Path - develop=False, # type: bool - extras=None, # type: Union[List[str], FrozenSet[str]] - ): - self._path = path - self._base = base or Path.cwd() - self._full_path = path - - if not self._path.is_absolute(): - try: - self._full_path = self._base.joinpath(self._path).resolve() - except FileNotFoundError: - raise ValueError("Directory {} does not exist".format(self._path)) - - self._develop = develop - self._supports_poetry = False - - if not self._full_path.exists(): - raise ValueError("Directory {} does not exist".format(self._path)) - - if self._full_path.is_file(): - raise ValueError("{} is a file, expected a directory".format(self._path)) - - # Checking content to determine actions - setup = self._full_path / "setup.py" - self._supports_poetry = PyProjectTOML( - self._full_path / "pyproject.toml" - ).is_poetry_project() - - if not setup.exists() and not self._supports_poetry: - raise ValueError( - "Directory {} does not seem to be a Python package".format( - self._full_path - ) - ) - - super(DirectoryDependency, self).__init__( - name, - "*", - category=category, - optional=optional, - allows_prereleases=True, - source_type="directory", - source_url=self._full_path.as_posix(), - extras=extras, - ) - - @property - def path(self): # type: () -> Path - return self._path - - @property - def full_path(self): # type: () -> Path - return self._full_path - - @property - def base(self): # type: () -> Path - return self._base - - @property - def develop(self): # type: () -> bool - return self._develop - - def supports_poetry(self): # type: () -> bool - return self._supports_poetry - - def is_directory(self): # type: () -> bool - return True - - def with_constraint( - self, constraint - ): # type: ("BaseConstraint") -> DirectoryDependency - new = DirectoryDependency( - self.pretty_name, - path=self.path, - base=self.base, - optional=self.is_optional(), - category=self.category, - develop=self._develop, - extras=self._extras, - ) - - new._constraint = constraint - new._pretty_constraint = str(constraint) - - new.is_root = self.is_root - new.python_versions = self.python_versions - new.marker = self.marker - new.transitive_marker = self.transitive_marker - - for in_extra in self.in_extras: - new.in_extras.append(in_extra) - - return new - - @property - def base_pep_508_name(self): # type: () -> str - requirement = self.pretty_name - - if self.extras: - requirement += "[{}]".format(",".join(self.extras)) - - requirement += " @ {}".format(self._path.as_posix()) - - return requirement - - def __str__(self): # type: () -> str - if self.is_root: - return self._pretty_name - - return "{} ({} {})".format( - self._pretty_name, self._pretty_constraint, self._path.as_posix() - ) - - def __hash__(self): # type: () -> int - return hash((self._name, self._full_path.as_posix())) diff --git a/vendor/poetry-core/poetry/core/packages/file_dependency.py b/vendor/poetry-core/poetry/core/packages/file_dependency.py deleted file mode 100644 index be79fd90..00000000 --- a/vendor/poetry-core/poetry/core/packages/file_dependency.py +++ /dev/null @@ -1,123 +0,0 @@ -import hashlib -import io - -from typing import TYPE_CHECKING -from typing import FrozenSet -from typing import List -from typing import Union - -from poetry.core.packages.utils.utils import path_to_url -from poetry.core.utils._compat import Path - -from .dependency import Dependency - - -if TYPE_CHECKING: - from .constraints import BaseConstraint - - -class FileDependency(Dependency): - def __init__( - self, - name, # type: str - path, # type: Path - category="main", # type: str - optional=False, # type: bool - base=None, # type: Path - extras=None, # type: Union[List[str], FrozenSet[str]] - ): - self._path = path - self._base = base or Path.cwd() - self._full_path = path - - if not self._path.is_absolute(): - try: - self._full_path = self._base.joinpath(self._path).resolve() - except FileNotFoundError: - raise ValueError("Directory {} does not exist".format(self._path)) - - if not self._full_path.exists(): - raise ValueError("File {} does not exist".format(self._path)) - - if self._full_path.is_dir(): - raise ValueError("{} is a directory, expected a file".format(self._path)) - - super(FileDependency, self).__init__( - name, - "*", - category=category, - optional=optional, - allows_prereleases=True, - source_type="file", - source_url=self._full_path.as_posix(), - extras=extras, - ) - - @property - def base(self): # type: () -> Path - return self._base - - @property - def path(self): # type: () -> Path - return self._path - - @property - def full_path(self): # type: () -> Path - return self._full_path - - def is_file(self): # type: () -> bool - return True - - def hash(self, hash_name="sha256"): # type: (str) -> str - h = hashlib.new(hash_name) - with self._full_path.open("rb") as fp: - for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""): - h.update(content) - - return h.hexdigest() - - def with_constraint(self, constraint): # type: ("BaseConstraint") -> FileDependency - new = FileDependency( - self.pretty_name, - path=self.path, - base=self.base, - optional=self.is_optional(), - category=self.category, - extras=self._extras, - ) - - new._constraint = constraint - new._pretty_constraint = str(constraint) - - new.is_root = self.is_root - new.python_versions = self.python_versions - new.marker = self.marker - new.transitive_marker = self.transitive_marker - - for in_extra in self.in_extras: - new.in_extras.append(in_extra) - - return new - - @property - def base_pep_508_name(self): # type: () -> str - requirement = self.pretty_name - - if self.extras: - requirement += "[{}]".format(",".join(self.extras)) - - path = path_to_url(self.path) if self.path.is_absolute() else self.path - requirement += " @ {}".format(path) - - return requirement - - def __str__(self): # type: () -> str - if self.is_root: - return self._pretty_name - - return "{} ({} {})".format( - self._pretty_name, self._pretty_constraint, self._path - ) - - def __hash__(self): # type: () -> int - return hash((self._name, self._full_path)) diff --git a/vendor/poetry-core/poetry/core/packages/package.py b/vendor/poetry-core/poetry/core/packages/package.py deleted file mode 100644 index 02c28938..00000000 --- a/vendor/poetry-core/poetry/core/packages/package.py +++ /dev/null @@ -1,445 +0,0 @@ -# -*- coding: utf-8 -*- -import copy -import re - -from contextlib import contextmanager -from typing import TYPE_CHECKING -from typing import Dict -from typing import List -from typing import Optional -from typing import Union - -from poetry.core.semver import Version -from poetry.core.semver import parse_constraint -from poetry.core.spdx import License -from poetry.core.spdx import license_by_id -from poetry.core.version.markers import AnyMarker -from poetry.core.version.markers import parse_marker - -# Do not move to the TYPE_CHECKING only section, because Dependency get's imported -# by poetry/packages/locker.py from here -from .dependency import Dependency -from .specification import PackageSpecification -from .utils.utils import create_nested_marker - - -if TYPE_CHECKING: - from poetry.core.semver import VersionTypes # noqa - from poetry.core.version.markers import BaseMarker # noqa - - from .directory_dependency import DirectoryDependency - from .file_dependency import FileDependency - from .url_dependency import URLDependency - from .vcs_dependency import VCSDependency - -AUTHOR_REGEX = re.compile(r"(?u)^(?P[- .,\w\d'’\"()&]+)(?: <(?P.+?)>)?$") - - -class Package(PackageSpecification): - - AVAILABLE_PYTHONS = { - "2", - "2.7", - "3", - "3.4", - "3.5", - "3.6", - "3.7", - "3.8", - "3.9", - "3.10", - } - - def __init__( - self, - name, # type: str - version, # type: Union[str, Version] - pretty_version=None, # type: Optional[str] - source_type=None, # type: Optional[str] - source_url=None, # type: Optional[str] - source_reference=None, # type: Optional[str] - source_resolved_reference=None, # type: Optional[str] - features=None, # type: Optional[List[str]] - ): - """ - Creates a new in memory package. - """ - super(Package, self).__init__( - name, - source_type=source_type, - source_url=source_url, - source_reference=source_reference, - source_resolved_reference=source_resolved_reference, - features=features, - ) - - if not isinstance(version, Version): - self._version = Version.parse(version) - self._pretty_version = pretty_version or version - else: - self._version = version - self._pretty_version = pretty_version or self._version.text - - self.description = "" - - self._authors = [] - self._maintainers = [] - - self.homepage = None - self.repository_url = None - self.documentation_url = None - self.keywords = [] - self._license = None - self.readme = None - - self.requires = [] - self.dev_requires = [] - self.extras = {} - self.requires_extras = [] - - self.category = "main" - self.files = [] - self.optional = False - - self.classifiers = [] - - self._python_versions = "*" - self._python_constraint = parse_constraint("*") - self._python_marker = AnyMarker() - - self.platform = None - self.marker = AnyMarker() - - self.root_dir = None - - self.develop = True - - @property - def name(self): # type: () -> str - return self._name - - @property - def pretty_name(self): # type: () -> str - return self._pretty_name - - @property - def version(self): # type: () -> "Version" - return self._version - - @property - def pretty_version(self): # type: () -> str - return self._pretty_version - - @property - def unique_name(self): # type: () -> str - if self.is_root(): - return self._name - - return self.complete_name + "-" + self._version.text - - @property - def pretty_string(self): # type: () -> str - return self.pretty_name + " " + self.pretty_version - - @property - def full_pretty_version(self): # type: () -> str - if self.source_type in ["file", "directory", "url"]: - return "{} {}".format(self._pretty_version, self.source_url) - - if self.source_type not in ["hg", "git"]: - return self._pretty_version - - if self.source_resolved_reference: - if len(self.source_resolved_reference) == 40: - return "{} {}".format( - self._pretty_version, self.source_resolved_reference[0:7] - ) - - # if source reference is a sha1 hash -- truncate - if len(self.source_reference) == 40: - return "{} {}".format(self._pretty_version, self.source_reference[0:7]) - - return "{} {}".format( - self._pretty_version, - self._source_resolved_reference or self._source_reference, - ) - - @property - def authors(self): # type: () -> list - return self._authors - - @property - def author_name(self): # type: () -> str - return self._get_author()["name"] - - @property - def author_email(self): # type: () -> str - return self._get_author()["email"] - - @property - def maintainers(self): # type: () -> list - return self._maintainers - - @property - def maintainer_name(self): # type: () -> str - return self._get_maintainer()["name"] - - @property - def maintainer_email(self): # type: () -> str - return self._get_maintainer()["email"] - - @property - def all_requires( - self, - ): # type: () -> List[Union["DirectoryDependency", "FileDependency", "URLDependency", "VCSDependency", Dependency]] - return self.requires + self.dev_requires - - def _get_author(self): # type: () -> dict - if not self._authors: - return {"name": None, "email": None} - - m = AUTHOR_REGEX.match(self._authors[0]) - - if m is None: - raise ValueError( - "Invalid author string. Must be in the format: " - "John Smith " - ) - - name = m.group("name") - email = m.group("email") - - return {"name": name, "email": email} - - def _get_maintainer(self): # type: () -> dict - if not self._maintainers: - return {"name": None, "email": None} - - m = AUTHOR_REGEX.match(self._maintainers[0]) - - if m is None: - raise ValueError( - "Invalid maintainer string. Must be in the format: " - "John Smith " - ) - - name = m.group("name") - email = m.group("email") - - return {"name": name, "email": email} - - @property - def python_versions(self): # type: () -> str - return self._python_versions - - @python_versions.setter - def python_versions(self, value): # type: (str) -> None - self._python_versions = value - self._python_constraint = parse_constraint(value) - self._python_marker = parse_marker( - create_nested_marker("python_version", self._python_constraint) - ) - - @property - def python_constraint(self): # type: () -> "VersionTypes" - return self._python_constraint - - @property - def python_marker(self): # type: () -> "BaseMarker" - return self._python_marker - - @property - def license(self): # type: () -> License - return self._license - - @license.setter - def license(self, value): # type: (Optional[str, License]) -> None - if value is None: - self._license = value - elif isinstance(value, License): - self._license = value - else: - self._license = license_by_id(value) - - @property - def all_classifiers(self): # type: () -> List[str] - classifiers = copy.copy(self.classifiers) - - # Automatically set python classifiers - if self.python_versions == "*": - python_constraint = parse_constraint("~2.7 || ^3.4") - else: - python_constraint = self.python_constraint - - for version in sorted(self.AVAILABLE_PYTHONS): - if len(version) == 1: - constraint = parse_constraint(version + ".*") - else: - constraint = Version.parse(version) - - if python_constraint.allows_any(constraint): - classifiers.append( - "Programming Language :: Python :: {}".format(version) - ) - - # Automatically set license classifiers - if self.license: - classifiers.append(self.license.classifier) - - classifiers = set(classifiers) - - return sorted(classifiers) - - @property - def urls(self): # type: () -> Dict[str, str] - urls = {} - - if self.homepage: - urls["Homepage"] = self.homepage - - if self.repository_url: - urls["Repository"] = self.repository_url - - if self.documentation_url: - urls["Documentation"] = self.documentation_url - - return urls - - def is_prerelease(self): # type: () -> bool - return self._version.is_prerelease() - - def is_root(self): # type: () -> bool - return False - - def add_dependency( - self, dependency, - ): # type: (Dependency) -> Dependency - if dependency.category == "dev": - self.dev_requires.append(dependency) - else: - self.requires.append(dependency) - - return dependency - - def to_dependency( - self, - ): # type: () -> Union[Dependency, "DirectoryDependency", "FileDependency", "URLDependency", "VCSDependency"] - from poetry.core.utils._compat import Path - - from .dependency import Dependency - from .directory_dependency import DirectoryDependency - from .file_dependency import FileDependency - from .url_dependency import URLDependency - from .vcs_dependency import VCSDependency - - if self.source_type == "directory": - dep = DirectoryDependency( - self._name, - Path(self._source_url), - category=self.category, - optional=self.optional, - base=self.root_dir, - develop=self.develop, - extras=self.features, - ) - elif self.source_type == "file": - dep = FileDependency( - self._name, - Path(self._source_url), - category=self.category, - optional=self.optional, - base=self.root_dir, - extras=self.features, - ) - elif self.source_type == "url": - dep = URLDependency( - self._name, - self._source_url, - category=self.category, - optional=self.optional, - extras=self.features, - ) - elif self.source_type == "git": - dep = VCSDependency( - self._name, - self.source_type, - self.source_url, - rev=self.source_reference, - resolved_rev=self.source_resolved_reference, - category=self.category, - optional=self.optional, - develop=self.develop, - extras=self.features, - ) - else: - dep = Dependency(self._name, self._version, extras=self.features) - - if not self.marker.is_any(): - dep.marker = self.marker - - if not self.python_constraint.is_any(): - dep.python_versions = self.python_versions - - if self._source_type not in ["directory", "file", "url", "git"]: - return dep - - return dep.with_constraint(self._version) - - @contextmanager - def with_python_versions(self, python_versions): # type: (str) -> None - original_python_versions = self.python_versions - - self.python_versions = python_versions - - yield - - self.python_versions = original_python_versions - - def with_features(self, features): # type: (List[str]) -> "Package" - package = self.clone() - - package._features = frozenset(features) - - return package - - def without_features(self): # type: () -> "Package" - return self.with_features([]) - - def clone(self): # type: () -> "Package" - clone = self.__class__(self.pretty_name, self.version) - clone.__dict__ = copy.deepcopy(self.__dict__) - return clone - - def __hash__(self): # type: () -> int - return super(Package, self).__hash__() ^ hash(self._version) - - def __eq__(self, other): # type: (Package) -> bool - if not isinstance(other, Package): - return NotImplemented - - return self.is_same_package_as(other) and self._version == other.version - - def __str__(self): # type: () -> str - return "{} ({})".format(self.complete_name, self.full_pretty_version) - - def __repr__(self): # type: () -> str - args = [repr(self._name), repr(self._version.text)] - - if self._features: - args.append("features={}".format(repr(self._features))) - - if self._source_type: - args.append("source_type={}".format(repr(self._source_type))) - args.append("source_url={}".format(repr(self._source_url))) - - if self._source_reference: - args.append("source_reference={}".format(repr(self._source_reference))) - - if self._source_resolved_reference: - args.append( - "source_resolved_reference={}".format( - repr(self._source_resolved_reference) - ) - ) - - return "Package({})".format(", ".join(args)) diff --git a/vendor/poetry-core/poetry/core/packages/project_package.py b/vendor/poetry-core/poetry/core/packages/project_package.py deleted file mode 100644 index aabde641..00000000 --- a/vendor/poetry-core/poetry/core/packages/project_package.py +++ /dev/null @@ -1,81 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Any -from typing import Dict -from typing import Optional -from typing import Union - -from poetry.core.semver import VersionRange -from poetry.core.semver import parse_constraint -from poetry.core.version.markers import parse_marker - - -if TYPE_CHECKING: - from . import ( - DirectoryDependency, - FileDependency, - URLDependency, - VCSDependency, - Dependency, - ) - -from .package import Package -from .utils.utils import create_nested_marker - - -class ProjectPackage(Package): - def __init__( - self, name, version, pretty_version=None - ): # type: (str, Union[str, VersionRange], Optional[str]) -> None - super(ProjectPackage, self).__init__(name, version, pretty_version) - - self.build_config = dict() - self.packages = [] - self.include = [] - self.exclude = [] - self.custom_urls = {} - - if self._python_versions == "*": - self._python_constraint = parse_constraint("~2.7 || >=3.4") - - @property - def build_script(self): # type: () -> Optional[str] - return self.build_config.get("script") - - def is_root(self): # type: () -> bool - return True - - def to_dependency( - self, - ): # type: () -> Union["DirectoryDependency", "FileDependency", "URLDependency", "VCSDependency", "Dependency"] - dependency = super(ProjectPackage, self).to_dependency() - - dependency.is_root = True - - return dependency - - @property - def python_versions(self): # type: () -> Union[str, VersionRange] - return self._python_versions - - @python_versions.setter - def python_versions(self, value): # type: (Union[str, VersionRange]) -> None - self._python_versions = value - - if value == "*" or value == VersionRange(): - value = "~2.7 || >=3.4" - - self._python_constraint = parse_constraint(value) - self._python_marker = parse_marker( - create_nested_marker("python_version", self._python_constraint) - ) - - @property - def urls(self): # type: () -> Dict[str, Any] - urls = super(ProjectPackage, self).urls - - urls.update(self.custom_urls) - - return urls - - def build_should_generate_setup(self): # type: () -> bool - return self.build_config.get("generate-setup-file", True) diff --git a/vendor/poetry-core/poetry/core/packages/specification.py b/vendor/poetry-core/poetry/core/packages/specification.py deleted file mode 100644 index 70b88f19..00000000 --- a/vendor/poetry-core/poetry/core/packages/specification.py +++ /dev/null @@ -1,118 +0,0 @@ -from typing import FrozenSet -from typing import List -from typing import Optional - -from poetry.core.utils.helpers import canonicalize_name - - -class PackageSpecification(object): - def __init__( - self, - name, # type: str - source_type=None, # type: Optional[str] - source_url=None, # type: Optional[str] - source_reference=None, # type: Optional[str] - source_resolved_reference=None, # type: Optional[str] - features=None, # type: Optional[List[str]] - ): - self._pretty_name = name - self._name = canonicalize_name(name) - self._source_type = source_type - self._source_url = source_url - self._source_reference = source_reference - self._source_resolved_reference = source_resolved_reference - - if not features: - features = [] - - self._features = frozenset(features) - - @property - def name(self): # type: () -> str - return self._name - - @property - def pretty_name(self): # type: () -> str - return self._pretty_name - - @property - def complete_name(self): # type: () -> str - name = self._name - - if self._features: - name = "{}[{}]".format(name, ",".join(sorted(self._features))) - - return name - - @property - def source_type(self): # type: () -> Optional[str] - return self._source_type - - @property - def source_url(self): # type: () -> Optional[str] - return self._source_url - - @property - def source_reference(self): # type: () -> Optional[str] - return self._source_reference - - @property - def source_resolved_reference(self): # type: () -> Optional[str] - return self._source_resolved_reference - - @property - def features(self): # type: () -> FrozenSet[str] - return self._features - - def is_same_package_as(self, other): # type: ("PackageSpecification") -> bool - if other.complete_name != self.complete_name: - return False - - if self._source_type: - if self._source_type != other.source_type: - return False - - if self._source_url or other.source_url: - if self._source_url != other.source_url: - return False - - if self._source_reference or other.source_reference: - # special handling for packages with references - if not self._source_reference or not other.source_reference: - # case: one reference is defined and is non-empty, but other is not - return False - - if not ( - self._source_reference == other.source_reference - or self._source_reference.startswith(other.source_reference) - or other.source_reference.startswith(self._source_reference) - ): - # case: both references defined, but one is not equal to or a short - # representation of the other - return False - - if ( - self._source_resolved_reference - and other.source_resolved_reference - and self._source_resolved_reference - != other.source_resolved_reference - ): - return False - - return True - - def __hash__(self): # type: () -> int - if not self._source_type: - return hash(self._name) - - return ( - hash(self._name) - ^ hash(self._source_type) - ^ hash(self._source_url) - ^ hash(self._source_reference) - ^ hash(self._source_resolved_reference) - ^ hash(self._features) - ) - - def __str__(self): # type: () -> str - raise NotImplementedError() diff --git a/vendor/poetry-core/poetry/core/packages/url_dependency.py b/vendor/poetry-core/poetry/core/packages/url_dependency.py deleted file mode 100644 index 2d4ce5dd..00000000 --- a/vendor/poetry-core/poetry/core/packages/url_dependency.py +++ /dev/null @@ -1,85 +0,0 @@ -from typing import TYPE_CHECKING -from typing import FrozenSet -from typing import List -from typing import Union - -from poetry.core.utils._compat import urlparse - -from .dependency import Dependency - - -if TYPE_CHECKING: - from .constraints import BaseConstraint - - -class URLDependency(Dependency): - def __init__( - self, - name, # type: str - url, # type: str - category="main", # type: str - optional=False, # type: bool - extras=None, # type: Union[List[str], FrozenSet[str]] - ): - self._url = url - - parsed = urlparse.urlparse(url) - if not parsed.scheme or not parsed.netloc: - raise ValueError("{} does not seem like a valid url".format(url)) - - super(URLDependency, self).__init__( - name, - "*", - category=category, - optional=optional, - allows_prereleases=True, - source_type="url", - source_url=self._url, - extras=extras, - ) - - @property - def url(self): # type: () -> str - return self._url - - @property - def base_pep_508_name(self): # type: () -> str - requirement = self.pretty_name - - if self.extras: - requirement += "[{}]".format(",".join(self.extras)) - - requirement += " @ {}".format(self._url) - - return requirement - - def is_url(self): # type: () -> bool - return True - - def with_constraint(self, constraint): # type: ("BaseConstraint") -> URLDependency - new = URLDependency( - self.pretty_name, - url=self._url, - optional=self.is_optional(), - category=self.category, - extras=self._extras, - ) - - new._constraint = constraint - new._pretty_constraint = str(constraint) - - new.is_root = self.is_root - new.python_versions = self.python_versions - new.marker = self.marker - new.transitive_marker = self.transitive_marker - - for in_extra in self.in_extras: - new.in_extras.append(in_extra) - - return new - - def __str__(self): # type: () -> str - return "{} ({} url)".format(self._pretty_name, self._pretty_constraint) - - def __hash__(self): # type: () -> int - return hash((self._name, self._url)) diff --git a/vendor/poetry-core/poetry/core/packages/utils/link.py b/vendor/poetry-core/poetry/core/packages/utils/link.py deleted file mode 100644 index 76f6c1c7..00000000 --- a/vendor/poetry-core/poetry/core/packages/utils/link.py +++ /dev/null @@ -1,189 +0,0 @@ -import posixpath -import re - -from typing import TYPE_CHECKING -from typing import Any -from typing import Optional -from typing import Tuple - - -if TYPE_CHECKING: - from pip._internal.index.collector import HTMLPage # noqa - -from .utils import path_to_url -from .utils import splitext - - -try: - import urllib.parse as urlparse -except ImportError: - import urlparse - - -class Link: - def __init__( - self, url, comes_from=None, requires_python=None - ): # type: (str, Optional["HTMLPage"], Optional[str]) -> None - """ - Object representing a parsed link from https://pypi.python.org/simple/* - - url: - url of the resource pointed to (href of the link) - comes_from: - instance of HTMLPage where the link was found, or string. - requires_python: - String containing the `Requires-Python` metadata field, specified - in PEP 345. This may be specified by a data-requires-python - attribute in the HTML link tag, as described in PEP 503. - """ - - # url can be a UNC windows share - if url.startswith("\\\\"): - url = path_to_url(url) - - self.url = url - self.comes_from = comes_from - self.requires_python = requires_python if requires_python else None - - def __str__(self): # type: () -> str - if self.requires_python: - rp = " (requires-python:%s)" % self.requires_python - else: - rp = "" - if self.comes_from: - return "%s (from %s)%s" % (self.url, self.comes_from, rp) - else: - return str(self.url) - - def __repr__(self): # type: () -> str - return "" % self - - def __eq__(self, other): # type: (Any) -> bool - if not isinstance(other, Link): - return NotImplemented - return self.url == other.url - - def __ne__(self, other): # type: (Any) -> bool - if not isinstance(other, Link): - return NotImplemented - return self.url != other.url - - def __lt__(self, other): # type: (Any) -> bool - if not isinstance(other, Link): - return NotImplemented - return self.url < other.url - - def __le__(self, other): # type: (Any) -> bool - if not isinstance(other, Link): - return NotImplemented - return self.url <= other.url - - def __gt__(self, other): # type: (Any) -> bool - if not isinstance(other, Link): - return NotImplemented - return self.url > other.url - - def __ge__(self, other): # type: (Any) -> bool - if not isinstance(other, Link): - return NotImplemented - return self.url >= other.url - - def __hash__(self): # type: () -> int - return hash(self.url) - - @property - def filename(self): # type: () -> str - _, netloc, path, _, _ = urlparse.urlsplit(self.url) - name = posixpath.basename(path.rstrip("/")) or netloc - name = urlparse.unquote(name) - assert name, "URL %r produced no filename" % self.url - return name - - @property - def scheme(self): # type: () -> str - return urlparse.urlsplit(self.url)[0] - - @property - def netloc(self): # type: () -> str - return urlparse.urlsplit(self.url)[1] - - @property - def path(self): # type: () -> str - return urlparse.unquote(urlparse.urlsplit(self.url)[2]) - - def splitext(self): # type: () -> Tuple[str, str] - return splitext(posixpath.basename(self.path.rstrip("/"))) - - @property - def ext(self): # type: () -> str - return self.splitext()[1] - - @property - def url_without_fragment(self): # type: () -> str - scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url) - return urlparse.urlunsplit((scheme, netloc, path, query, None)) - - _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") - - @property - def egg_fragment(self): # type: () -> Optional[str] - match = self._egg_fragment_re.search(self.url) - if not match: - return None - return match.group(1) - - _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") - - @property - def subdirectory_fragment(self): # type: () -> Optional[str] - match = self._subdirectory_fragment_re.search(self.url) - if not match: - return None - return match.group(1) - - _hash_re = re.compile(r"(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)") - - @property - def hash(self): # type: () -> Optional[str] - match = self._hash_re.search(self.url) - if match: - return match.group(2) - return None - - @property - def hash_name(self): # type: () -> Optional[str] - match = self._hash_re.search(self.url) - if match: - return match.group(1) - return None - - @property - def show_url(self): # type: () -> str - return posixpath.basename(self.url.split("#", 1)[0].split("?", 1)[0]) - - @property - def is_wheel(self): # type: () -> bool - return self.ext == ".whl" - - @property - def is_wininst(self): # type: () -> bool - return self.ext == ".exe" - - @property - def is_egg(self): # type: () -> bool - return self.ext == ".egg" - - @property - def is_sdist(self): # type: () -> bool - return self.ext in {".tar.bz2", ".tar.gz", ".zip"} - - @property - def is_artifact(self): # type: () -> bool - """ - Determines if this points to an actual artifact (e.g. a tarball) or if - it points to an "abstract" thing like a path or a VCS location. - """ - if self.scheme in ["ssh", "git", "hg", "bzr", "sftp", "svn"]: - return False - - return True diff --git a/vendor/poetry-core/poetry/core/packages/utils/utils.py b/vendor/poetry-core/poetry/core/packages/utils/utils.py deleted file mode 100644 index 6b380660..00000000 --- a/vendor/poetry-core/poetry/core/packages/utils/utils.py +++ /dev/null @@ -1,352 +0,0 @@ -import os -import posixpath -import re -import sys - -from typing import TYPE_CHECKING -from typing import Dict -from typing import List -from typing import Tuple -from typing import Union - -from six.moves.urllib.parse import unquote # noqa -from six.moves.urllib.parse import urlsplit # noqa -from six.moves.urllib.request import url2pathname # noqa - -from poetry.core.packages.constraints.constraint import Constraint -from poetry.core.packages.constraints.multi_constraint import MultiConstraint -from poetry.core.packages.constraints.union_constraint import UnionConstraint -from poetry.core.semver import EmptyConstraint -from poetry.core.semver import Version -from poetry.core.semver import VersionConstraint -from poetry.core.semver import VersionRange -from poetry.core.semver import VersionUnion -from poetry.core.semver import parse_constraint -from poetry.core.utils._compat import Path -from poetry.core.version.markers import BaseMarker -from poetry.core.version.markers import MarkerUnion -from poetry.core.version.markers import MultiMarker -from poetry.core.version.markers import SingleMarker - - -if TYPE_CHECKING: - from poetry.core.packages.constraints import BaseConstraint # noqa - from poetry.core.semver import VersionTypes # noqa - -BZ2_EXTENSIONS = (".tar.bz2", ".tbz") -XZ_EXTENSIONS = (".tar.xz", ".txz", ".tlz", ".tar.lz", ".tar.lzma") -ZIP_EXTENSIONS = (".zip", ".whl") -TAR_EXTENSIONS = (".tar.gz", ".tgz", ".tar") -ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS -SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS - -try: - import bz2 # noqa - - SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS -except ImportError: - pass - -try: - # Only for Python 3.3+ - import lzma # noqa - - SUPPORTED_EXTENSIONS += XZ_EXTENSIONS -except ImportError: - pass - - -def path_to_url(path): # type: (Union[str, Path]) -> str - """ - Convert a path to a file: URL. The path will be made absolute unless otherwise - specified and have quoted path parts. - """ - return Path(path).absolute().as_uri() - - -def url_to_path(url): # type: (str) -> Path - """ - Convert an RFC8089 file URI to path. - - The logic used here is borrowed from pip - https://github.com/pypa/pip/blob/4d1932fcdd1974c820ea60b3286984ebb0c3beaa/src/pip/_internal/utils/urls.py#L31 - """ - if not url.startswith("file:"): - raise ValueError("{} is not a valid file URI".format(url)) - - _, netloc, path, _, _ = urlsplit(url) - - if not netloc or netloc == "localhost": - # According to RFC 8089, same as empty authority. - netloc = "" - elif netloc not in {".", ".."} and sys.platform == "win32": - # If we have a UNC path, prepend UNC share notation. - netloc = "\\\\" + netloc - else: - raise ValueError( - "non-local file URIs are not supported on this platform: {}".format(url) - ) - - return Path(url2pathname(netloc + unquote(path))) - - -def is_url(name): # type: (str) -> bool - if ":" not in name: - return False - scheme = name.split(":", 1)[0].lower() - - return scheme in [ - "http", - "https", - "file", - "ftp", - "ssh", - "git", - "hg", - "bzr", - "sftp", - "svn", - "ssh", - ] - - -def strip_extras(path): # type: (str) -> Tuple[str, str] - m = re.match(r"^(.+)(\[[^\]]+\])$", path) - extras = None - if m: - path_no_extras = m.group(1) - extras = m.group(2) - else: - path_no_extras = path - - return path_no_extras, extras - - -def is_installable_dir(path): # type: (str) -> bool - """Return True if `path` is a directory containing a setup.py file.""" - if not os.path.isdir(path): - return False - setup_py = os.path.join(path, "setup.py") - if os.path.isfile(setup_py): - return True - return False - - -def is_archive_file(name): # type: (str) -> bool - """Return True if `name` is a considered as an archive file.""" - ext = splitext(name)[1].lower() - if ext in ARCHIVE_EXTENSIONS: - return True - return False - - -def splitext(path): # type: (str) -> Tuple[str, str] - """Like os.path.splitext, but take off .tar too""" - base, ext = posixpath.splitext(path) - if base.lower().endswith(".tar"): - ext = base[-4:] + ext - base = base[:-4] - return base, ext - - -def group_markers( - markers, or_=False -): # type: (List[BaseMarker], bool) -> List[Union[Tuple[str, str, str], List[Tuple[str, str, str]]]] - groups = [[]] - - for marker in markers: - if or_: - groups.append([]) - - if isinstance(marker, (MultiMarker, MarkerUnion)): - groups[-1].append( - group_markers(marker.markers, isinstance(marker, MarkerUnion)) - ) - elif isinstance(marker, SingleMarker): - lhs, op, rhs = marker.name, marker.operator, marker.value - - groups[-1].append((lhs, op, rhs)) - - return groups - - -def convert_markers(marker): # type: (BaseMarker) -> Dict[str, List[Tuple[str, str]]] - groups = group_markers([marker]) - - requirements = {} - - def _group( - _groups, or_=False - ): # type: (List[Union[Tuple[str, str, str], List[Tuple[str, str, str]]]], bool) -> None - ors = {} - for group in _groups: - if isinstance(group, list): - _group(group, or_=True) - else: - variable, op, value = group - group_name = str(variable) - - # python_full_version is equivalent to python_version - # for Poetry so we merge them - if group_name == "python_full_version": - group_name = "python_version" - - if group_name not in requirements: - requirements[group_name] = [] - - if group_name not in ors: - ors[group_name] = or_ - - if ors[group_name] or not requirements[group_name]: - requirements[group_name].append([]) - - requirements[group_name][-1].append((str(op), str(value))) - - ors[group_name] = False - - _group(groups, or_=True) - - return requirements - - -def create_nested_marker( - name, constraint -): # type: (str, Union["BaseConstraint", VersionUnion, Version, VersionConstraint]) -> str - if constraint.is_any(): - return "" - - if isinstance(constraint, (MultiConstraint, UnionConstraint)): - parts = [] - for c in constraint.constraints: - multi = False - if isinstance(c, (MultiConstraint, UnionConstraint)): - multi = True - - parts.append((multi, create_nested_marker(name, c))) - - glue = " and " - if isinstance(constraint, UnionConstraint): - parts = ["({})".format(part[1]) if part[0] else part[1] for part in parts] - glue = " or " - else: - parts = [part[1] for part in parts] - - marker = glue.join(parts) - elif isinstance(constraint, Constraint): - marker = '{} {} "{}"'.format(name, constraint.operator, constraint.version) - elif isinstance(constraint, VersionUnion): - parts = [] - for c in constraint.ranges: - parts.append(create_nested_marker(name, c)) - - glue = " or " - parts = ["({})".format(part) for part in parts] - - marker = glue.join(parts) - elif isinstance(constraint, Version): - if name == "python_version" and constraint.precision >= 3: - name = "python_full_version" - - marker = '{} == "{}"'.format(name, constraint.text) - else: - if constraint.min is not None: - op = ">=" - if not constraint.include_min: - op = ">" - - version = constraint.min - if constraint.max is not None: - min_name = max_name = name - if min_name == "python_version" and constraint.min.precision >= 3: - min_name = "python_full_version" - - if max_name == "python_version" and constraint.max.precision >= 3: - max_name = "python_full_version" - - text = '{} {} "{}"'.format(min_name, op, version) - - op = "<=" - if not constraint.include_max: - op = "<" - - version = constraint.max - - text += ' and {} {} "{}"'.format(max_name, op, version) - - return text - elif constraint.max is not None: - op = "<=" - if not constraint.include_max: - op = "<" - - version = constraint.max - else: - return "" - - if name == "python_version" and version.precision >= 3: - name = "python_full_version" - - marker = '{} {} "{}"'.format(name, op, version) - - return marker - - -def get_python_constraint_from_marker(marker,): # type: (BaseMarker) -> "VersionTypes" - python_marker = marker.only("python_version", "python_full_version") - if python_marker.is_any(): - return VersionRange() - - if python_marker.is_empty(): - return EmptyConstraint() - - markers = convert_markers(marker) - - ors = [] - for or_ in markers["python_version"]: - ands = [] - for op, version in or_: - # Expand python version - if op == "==": - version = "~" + version - op = "" - elif op == "!=": - version += ".*" - elif op in ("<=", ">"): - parsed_version = Version.parse(version) - if parsed_version.precision == 1: - if op == "<=": - op = "<" - version = parsed_version.next_major.text - elif op == ">": - op = ">=" - version = parsed_version.next_major.text - elif parsed_version.precision == 2: - if op == "<=": - op = "<" - version = parsed_version.next_minor.text - elif op == ">": - op = ">=" - version = parsed_version.next_minor.text - elif op in ("in", "not in"): - versions = [] - for v in re.split("[ ,]+", version): - split = v.split(".") - if len(split) in [1, 2]: - split.append("*") - op_ = "" if op == "in" else "!=" - else: - op_ = "==" if op == "in" else "!=" - - versions.append(op_ + ".".join(split)) - - glue = " || " if op == "in" else ", " - if versions: - ands.append(glue.join(versions)) - - continue - - ands.append("{}{}".format(op, version)) - - ors.append(" ".join(ands)) - - return parse_constraint(" || ".join(ors)) diff --git a/vendor/poetry-core/poetry/core/packages/vcs_dependency.py b/vendor/poetry-core/poetry/core/packages/vcs_dependency.py deleted file mode 100644 index 2800644a..00000000 --- a/vendor/poetry-core/poetry/core/packages/vcs_dependency.py +++ /dev/null @@ -1,165 +0,0 @@ -from typing import TYPE_CHECKING -from typing import FrozenSet -from typing import List -from typing import Optional -from typing import Union - -from poetry.core.vcs import git - -from .dependency import Dependency - - -if TYPE_CHECKING: - from .constraints import BaseConstraint - - -class VCSDependency(Dependency): - """ - Represents a VCS dependency - """ - - def __init__( - self, - name, # type: str - vcs, # type: str - source, # type: str - branch=None, # type: Optional[str] - tag=None, # type: Optional[str] - rev=None, # type: Optional[str] - resolved_rev=None, # type: Optional[str] - category="main", # type: str - optional=False, # type: bool - develop=False, # type: bool - extras=None, # type: Union[List[str], FrozenSet[str]] - ): - self._vcs = vcs - self._source = source - - if not any([branch, tag, rev]): - # If nothing has been specified, we assume master - branch = "master" - - self._branch = branch - self._tag = tag - self._rev = rev - self._develop = develop - - super(VCSDependency, self).__init__( - name, - "*", - category=category, - optional=optional, - allows_prereleases=True, - source_type=self._vcs.lower(), - source_url=self._source, - source_reference=branch or tag or rev, - source_resolved_reference=resolved_rev, - extras=extras, - ) - - @property - def vcs(self): # type: () -> str - return self._vcs - - @property - def source(self): # type: () -> str - return self._source - - @property - def branch(self): # type: () -> Optional[str] - return self._branch - - @property - def tag(self): # type: () -> Optional[str] - return self._tag - - @property - def rev(self): # type: () -> Optional[str] - return self._rev - - @property - def develop(self): # type: () -> bool - return self._develop - - @property - def reference(self): # type: () -> str - return self._branch or self._tag or self._rev - - @property - def pretty_constraint(self): # type: () -> str - if self._branch: - what = "branch" - version = self._branch - elif self._tag: - what = "tag" - version = self._tag - else: - what = "rev" - version = self._rev - - return "{} {}".format(what, version) - - @property - def base_pep_508_name(self): # type: () -> str - requirement = self.pretty_name - parsed_url = git.ParsedUrl.parse(self._source) - - if self.extras: - requirement += "[{}]".format(",".join(self.extras)) - - if parsed_url.protocol is not None: - requirement += " @ {}+{}@{}".format(self._vcs, self._source, self.reference) - else: - requirement += " @ {}+ssh://{}@{}".format( - self._vcs, parsed_url.format(), self.reference - ) - - return requirement - - def is_vcs(self): # type: () -> bool - return True - - def accepts_prereleases(self): # type: () -> bool - return True - - def with_constraint(self, constraint): # type: ("BaseConstraint") -> VCSDependency - new = VCSDependency( - self.pretty_name, - self._vcs, - self._source, - branch=self._branch, - tag=self._tag, - rev=self._rev, - resolved_rev=self._source_resolved_reference, - optional=self.is_optional(), - category=self.category, - develop=self._develop, - extras=self._extras, - ) - - new._constraint = constraint - new._pretty_constraint = str(constraint) - - new.is_root = self.is_root - new.python_versions = self.python_versions - new.marker = self.marker - new.transitive_marker = self.transitive_marker - - for in_extra in self.in_extras: - new.in_extras.append(in_extra) - - return new - - def __str__(self): # type: () -> str - reference = self._vcs - if self._branch: - reference += " branch {}".format(self._branch) - elif self._tag: - reference += " tag {}".format(self._tag) - elif self._rev: - reference += " rev {}".format(self._rev) - - return "{} ({} {})".format(self._pretty_name, self._constraint, reference) - - def __hash__(self): # type: () -> int - return hash((self._name, self._vcs, self._branch, self._tag, self._rev)) diff --git a/vendor/poetry-core/poetry/core/poetry.py b/vendor/poetry-core/poetry/core/poetry.py deleted file mode 100644 index af04a0da..00000000 --- a/vendor/poetry-core/poetry/core/poetry.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -from typing import TYPE_CHECKING -from typing import Any - -from poetry.core.pyproject import PyProjectTOML -from poetry.core.utils._compat import Path # noqa - - -if TYPE_CHECKING: - from poetry.core.packages import ProjectPackage # noqa - from poetry.core.pyproject.toml import PyProjectTOMLFile # noqa - - -class Poetry(object): - def __init__( - self, file, local_config, package, - ): # type: (Path, dict, "ProjectPackage") -> None - self._pyproject = PyProjectTOML(file) - self._package = package - self._local_config = local_config - - @property - def pyproject(self): # type: () -> PyProjectTOML - return self._pyproject - - @property - def file(self): # type: () -> "PyProjectTOMLFile" - return self._pyproject.file - - @property - def package(self): # type: () -> "ProjectPackage" - return self._package - - @property - def local_config(self): # type: () -> dict - return self._local_config - - def get_project_config(self, config, default=None): # type: (str, Any) -> Any - return self._local_config.get("config", {}).get(config, default) diff --git a/vendor/poetry-core/poetry/core/pyproject/__init__.py b/vendor/poetry-core/poetry/core/pyproject/__init__.py deleted file mode 100644 index 9f760fbe..00000000 --- a/vendor/poetry-core/poetry/core/pyproject/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from poetry.core.pyproject.exceptions import PyProjectException -from poetry.core.pyproject.tables import BuildSystem -from poetry.core.pyproject.toml import PyProjectTOML - - -__all__ = [clazz.__name__ for clazz in {BuildSystem, PyProjectException, PyProjectTOML}] diff --git a/vendor/poetry-core/poetry/core/pyproject/exceptions.py b/vendor/poetry-core/poetry/core/pyproject/exceptions.py deleted file mode 100644 index d82bb492..00000000 --- a/vendor/poetry-core/poetry/core/pyproject/exceptions.py +++ /dev/null @@ -1,5 +0,0 @@ -from poetry.core.exceptions import PoetryCoreException - - -class PyProjectException(PoetryCoreException): - pass diff --git a/vendor/poetry-core/poetry/core/pyproject/tables.py b/vendor/poetry-core/poetry/core/pyproject/tables.py deleted file mode 100644 index 1f368622..00000000 --- a/vendor/poetry-core/poetry/core/pyproject/tables.py +++ /dev/null @@ -1,62 +0,0 @@ -from typing import TYPE_CHECKING -from typing import List -from typing import Optional - -from poetry.core.utils._compat import Path -from poetry.core.utils.helpers import canonicalize_name - - -if TYPE_CHECKING: - from poetry.core.packages import Dependency # noqa - - -# TODO: Convert to dataclass once python 2.7, 3.5 is dropped -class BuildSystem: - def __init__( - self, build_backend=None, requires=None - ): # type: (Optional[str], Optional[List[str]]) -> None - self.build_backend = ( - build_backend - if build_backend is not None - else "setuptools.build_meta:__legacy__" - ) - self.requires = requires if requires is not None else ["setuptools", "wheel"] - self._dependencies = None - - @property - def dependencies(self): # type: () -> List["Dependency"] - if self._dependencies is None: - # avoid circular dependency when loading DirectoryDependency - from poetry.core.packages import DirectoryDependency - from poetry.core.packages import FileDependency - from poetry.core.packages import dependency_from_pep_508 - - self._dependencies = [] - for requirement in self.requires: - dependency = None - try: - dependency = dependency_from_pep_508(requirement) - except ValueError: - # PEP 517 requires can be path if not PEP 508 - path = Path(requirement) - try: - if path.is_file(): - dependency = FileDependency( - name=canonicalize_name(path.name), path=path - ) - elif path.is_dir(): - dependency = DirectoryDependency( - name=canonicalize_name(path.name), path=path - ) - except OSError: - # compatibility Python < 3.8 - # https://docs.python.org/3/library/pathlib.html#methods - pass - - if dependency is None: - # skip since we could not determine requirement - continue - - self._dependencies.append(dependency) - - return self._dependencies diff --git a/vendor/poetry-core/poetry/core/pyproject/toml.py b/vendor/poetry-core/poetry/core/pyproject/toml.py deleted file mode 100644 index a223dc1f..00000000 --- a/vendor/poetry-core/poetry/core/pyproject/toml.py +++ /dev/null @@ -1,90 +0,0 @@ -from typing import Any -from typing import Optional -from typing import Union - -from tomlkit.container import Container -from tomlkit.toml_document import TOMLDocument - -from poetry.core.pyproject.exceptions import PyProjectException -from poetry.core.pyproject.tables import BuildSystem -from poetry.core.toml import TOMLFile -from poetry.core.utils._compat import Path - - -class PyProjectTOML: - def __init__(self, path): # type: (Union[str, Path]) -> None - self._file = TOMLFile(path=path) - self._data = None # type: Optional[TOMLDocument] - self._build_system = None # type: Optional[BuildSystem] - self._poetry_config = None # type: Optional[TOMLDocument] - - @property - def file(self): # type: () -> TOMLFile - return self._file - - @property - def data(self): # type: () -> TOMLDocument - if self._data is None: - if not self._file.exists(): - self._data = TOMLDocument() - else: - self._data = self._file.read() - return self._data - - @property - def build_system(self): # type: () -> BuildSystem - if self._build_system is None: - build_backend = None - requires = None - - if not self._file.exists(): - build_backend = "poetry.core.masonry.api" - requires = ["poetry-core"] - - container = self.data.get("build-system", {}) - self._build_system = BuildSystem( - build_backend=container.get("build-backend", build_backend), - requires=container.get("requires", requires), - ) - return self._build_system - - @property - def poetry_config(self): # type: () -> Optional[TOMLDocument] - if self._poetry_config is None: - self._poetry_config = self.data.get("tool", {}).get("poetry") - if self._poetry_config is None: - raise PyProjectException( - "[tool.poetry] section not found in {}".format(self._file) - ) - return self._poetry_config - - def is_poetry_project(self): # type: () -> bool - if self.file.exists(): - try: - _ = self.poetry_config - return True - except PyProjectException: - pass - return False - - def __getattr__(self, item): # type: (str) -> Any - return getattr(self.data, item) - - def save(self): # type: () -> None - data = self.data - - if self._poetry_config is not None: - data["tool"]["poetry"] = self._poetry_config - - if self._build_system is not None: - if "build-system" not in data: - data["build-system"] = Container() - data["build-system"]["requires"] = self._build_system.requires - data["build-system"]["build-backend"] = self._build_system.build_backend - - self.file.write(data=data) - - def reload(self): # type: () -> None - self._data = None - self._build_system = None - self._poetry_config = None diff --git a/vendor/poetry-core/poetry/core/semver/__init__.py b/vendor/poetry-core/poetry/core/semver/__init__.py deleted file mode 100644 index 2cff22d6..00000000 --- a/vendor/poetry-core/poetry/core/semver/__init__.py +++ /dev/null @@ -1,151 +0,0 @@ -import re - -from typing import Union - -from .empty_constraint import EmptyConstraint -from .exceptions import ParseConstraintError -from .patterns import BASIC_CONSTRAINT -from .patterns import CARET_CONSTRAINT -from .patterns import TILDE_CONSTRAINT -from .patterns import TILDE_PEP440_CONSTRAINT -from .patterns import X_CONSTRAINT -from .version import Version -from .version_constraint import VersionConstraint -from .version_range import VersionRange -from .version_union import VersionUnion - - -VersionTypes = Union[Version, VersionRange, VersionUnion, EmptyConstraint] - - -def parse_constraint(constraints): # type: (str) -> VersionTypes - if constraints == "*": - return VersionRange() - - or_constraints = re.split(r"\s*\|\|?\s*", constraints.strip()) - or_groups = [] - for constraints in or_constraints: - and_constraints = re.split( - "(?< ,]) *(? 1: - for constraint in and_constraints: - constraint_objects.append(parse_single_constraint(constraint)) - else: - constraint_objects.append(parse_single_constraint(and_constraints[0])) - - if len(constraint_objects) == 1: - constraint = constraint_objects[0] - else: - constraint = constraint_objects[0] - for next_constraint in constraint_objects[1:]: - constraint = constraint.intersect(next_constraint) - - or_groups.append(constraint) - - if len(or_groups) == 1: - return or_groups[0] - else: - return VersionUnion.of(*or_groups) - - -def parse_single_constraint(constraint): # type: (str) -> VersionTypes - m = re.match(r"(?i)^v?[xX*](\.[xX*])*$", constraint) - if m: - return VersionRange() - - # Tilde range - m = TILDE_CONSTRAINT.match(constraint) - if m: - version = Version.parse(m.group(1)) - - high = version.stable.next_minor - if len(m.group(1).split(".")) == 1: - high = version.stable.next_major - - return VersionRange(version, high, include_min=True) - - # PEP 440 Tilde range (~=) - m = TILDE_PEP440_CONSTRAINT.match(constraint) - if m: - precision = 1 - if m.group(3): - precision += 1 - - if m.group(4): - precision += 1 - - version = Version.parse(m.group(1)) - - if precision == 2: - high = version.stable.next_major - else: - high = version.stable.next_minor - - return VersionRange(version, high, include_min=True) - - # Caret range - m = CARET_CONSTRAINT.match(constraint) - if m: - version = Version.parse(m.group(1)) - - return VersionRange(version, version.next_breaking, include_min=True) - - # X Range - m = X_CONSTRAINT.match(constraint) - if m: - op = m.group(1) - major = int(m.group(2)) - minor = m.group(3) - - if minor is not None: - version = Version(major, int(minor), 0) - - result = VersionRange(version, version.next_minor, include_min=True) - else: - if major == 0: - result = VersionRange(max=Version(1, 0, 0)) - else: - version = Version(major, 0, 0) - - result = VersionRange(version, version.next_major, include_min=True) - - if op == "!=": - result = VersionRange().difference(result) - - return result - - # Basic comparator - m = BASIC_CONSTRAINT.match(constraint) - if m: - op = m.group(1) - version = m.group(2) - - if version == "dev": - version = "0.0-dev" - - try: - version = Version.parse(version) - except ValueError: - raise ValueError( - "Could not parse version constraint: {}".format(constraint) - ) - - if op == "<": - return VersionRange(max=version) - elif op == "<=": - return VersionRange(max=version, include_max=True) - elif op == ">": - return VersionRange(min=version) - elif op == ">=": - return VersionRange(min=version, include_min=True) - elif op == "!=": - return VersionUnion(VersionRange(max=version), VersionRange(min=version)) - else: - return version - - raise ParseConstraintError( - "Could not parse version constraint: {}".format(constraint) - ) diff --git a/vendor/poetry-core/poetry/core/semver/empty_constraint.py b/vendor/poetry-core/poetry/core/semver/empty_constraint.py deleted file mode 100644 index c463fa58..00000000 --- a/vendor/poetry-core/poetry/core/semver/empty_constraint.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import TYPE_CHECKING - -from .version_constraint import VersionConstraint - - -if TYPE_CHECKING: - from . import VersionTypes # noqa - from .version import Version # noqa - - -class EmptyConstraint(VersionConstraint): - def is_empty(self): # type: () -> bool - return True - - def is_any(self): # type: () -> bool - return False - - def allows(self, version): # type: ("Version") -> bool - return False - - def allows_all(self, other): # type: ("VersionTypes") -> bool - return other.is_empty() - - def allows_any(self, other): # type: ("VersionTypes") -> bool - return False - - def intersect(self, other): # type: ("VersionTypes") -> EmptyConstraint - return self - - def union(self, other): # type: ("VersionTypes") -> "VersionTypes" - return other - - def difference(self, other): # type: ("VersionTypes") -> EmptyConstraint - return self - - def __str__(self): # type: () -> str - return "" diff --git a/vendor/poetry-core/poetry/core/semver/exceptions.py b/vendor/poetry-core/poetry/core/semver/exceptions.py deleted file mode 100644 index b2432399..00000000 --- a/vendor/poetry-core/poetry/core/semver/exceptions.py +++ /dev/null @@ -1,6 +0,0 @@ -class ParseVersionError(ValueError): - pass - - -class ParseConstraintError(ValueError): - pass diff --git a/vendor/poetry-core/poetry/core/semver/patterns.py b/vendor/poetry-core/poetry/core/semver/patterns.py deleted file mode 100644 index 6cda2a30..00000000 --- a/vendor/poetry-core/poetry/core/semver/patterns.py +++ /dev/null @@ -1,22 +0,0 @@ -import re - - -MODIFIERS = ( - "[._-]?" - r"((?!post)(?:beta|b|c|pre|RC|alpha|a|patch|pl|p|dev)(?:(?:[.-]?\d+)*)?)?" - r"([+-]?([0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?" -) - -_COMPLETE_VERSION = r"v?(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:\.(\d+))?{}(?:\+[^\s]+)?".format( - MODIFIERS -) - -COMPLETE_VERSION = re.compile("(?i)" + _COMPLETE_VERSION) - -CARET_CONSTRAINT = re.compile(r"(?i)^\^({})$".format(_COMPLETE_VERSION)) -TILDE_CONSTRAINT = re.compile(r"(?i)^~(?!=)\s*({})$".format(_COMPLETE_VERSION)) -TILDE_PEP440_CONSTRAINT = re.compile(r"(?i)^~=\s*({})$".format(_COMPLETE_VERSION)) -X_CONSTRAINT = re.compile(r"^(!=|==)?\s*v?(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:\.[xX*])+$") -BASIC_CONSTRAINT = re.compile( - r"(?i)^(<>|!=|>=?|<=?|==?)?\s*({}|dev)".format(_COMPLETE_VERSION) -) diff --git a/vendor/poetry-core/poetry/core/semver/version.py b/vendor/poetry-core/poetry/core/semver/version.py deleted file mode 100644 index acd5f3e8..00000000 --- a/vendor/poetry-core/poetry/core/semver/version.py +++ /dev/null @@ -1,476 +0,0 @@ -import re - -from typing import TYPE_CHECKING -from typing import List -from typing import Optional -from typing import Union - -from .empty_constraint import EmptyConstraint -from .exceptions import ParseVersionError -from .patterns import COMPLETE_VERSION -from .version_constraint import VersionConstraint -from .version_range import VersionRange -from .version_union import VersionUnion - - -if TYPE_CHECKING: - from . import VersionTypes # noqa - - -class Version(VersionRange): - """ - A parsed semantic version number. - """ - - def __init__( - self, - major, # type: int - minor=None, # type: Optional[int] - patch=None, # type: Optional[int] - rest=None, # type: Optional[int] - pre=None, # type: Optional[str] - build=None, # type: Optional[str] - text=None, # type: Optional[str] - precision=None, # type: Optional[int] - ): # type: (...) -> None - self._major = int(major) - self._precision = None - if precision is None: - self._precision = 1 - - if minor is None: - minor = 0 - else: - if self._precision is not None: - self._precision += 1 - - self._minor = int(minor) - - if patch is None: - patch = 0 - else: - if self._precision is not None: - self._precision += 1 - - if rest is None: - rest = 0 - else: - if self._precision is not None: - self._precision += 1 - - if precision is not None: - self._precision = precision - - self._patch = int(patch) - self._rest = int(rest) - - if text is None: - parts = [str(major)] - if self._precision >= 2 or minor != 0: - parts.append(str(minor)) - - if self._precision >= 3 or patch != 0: - parts.append(str(patch)) - - if self._precision >= 4 or rest != 0: - parts.append(str(rest)) - - text = ".".join(parts) - if pre: - text += "-{}".format(pre) - - if build: - text += "+{}".format(build) - - self._text = text - - pre = self._normalize_prerelease(pre) - - self._prerelease = [] - if pre is not None: - self._prerelease = self._split_parts(pre) - - build = self._normalize_build(build) - - self._build = [] - if build is not None: - if build.startswith(("-", "+")): - build = build[1:] - - self._build = self._split_parts(build) - - @property - def major(self): # type: () -> int - return self._major - - @property - def minor(self): # type: () -> int - return self._minor - - @property - def patch(self): # type: () -> int - return self._patch - - @property - def rest(self): # type: () -> int - return self._rest - - @property - def prerelease(self): # type: () -> List[str] - return self._prerelease - - @property - def build(self): # type: () -> List[str] - return self._build - - @property - def text(self): # type: () -> str - return self._text - - @property - def precision(self): # type: () -> int - return self._precision - - @property - def stable(self): # type: () -> Version - if not self.is_prerelease(): - return self - - return self.next_patch - - @property - def next_major(self): # type: () -> Version - if self.is_prerelease() and self.minor == 0 and self.patch == 0: - return Version(self.major, self.minor, self.patch) - - return self._increment_major() - - @property - def next_minor(self): # type: () -> Version - if self.is_prerelease() and self.patch == 0: - return Version(self.major, self.minor, self.patch) - - return self._increment_minor() - - @property - def next_patch(self): # type: () -> Version - if self.is_prerelease(): - return Version(self.major, self.minor, self.patch) - - return self._increment_patch() - - @property - def next_breaking(self): # type: () -> Version - if self.major == 0: - if self.minor != 0: - return self._increment_minor() - - if self._precision == 1: - return self._increment_major() - elif self._precision == 2: - return self._increment_minor() - - return self._increment_patch() - - return self._increment_major() - - @property - def first_prerelease(self): # type: () -> Version - return Version.parse( - "{}.{}.{}-alpha.0".format(self.major, self.minor, self.patch) - ) - - @property - def min(self): # type: () -> Version - return self - - @property - def max(self): # type: () -> Version - return self - - @property - def full_max(self): # type: () -> Version - return self - - @property - def include_min(self): # type: () -> bool - return True - - @property - def include_max(self): # type: () -> bool - return True - - @classmethod - def parse(cls, text): # type: (str) -> Version - try: - match = COMPLETE_VERSION.match(text) - except TypeError: - match = None - - if match is None: - raise ParseVersionError('Unable to parse "{}".'.format(text)) - - text = text.rstrip(".") - - major = int(match.group(1)) - minor = int(match.group(2)) if match.group(2) else None - patch = int(match.group(3)) if match.group(3) else None - rest = int(match.group(4)) if match.group(4) else None - - pre = match.group(5) - build = match.group(6) - - if build: - build = build.lstrip("+") - - return Version(major, minor, patch, rest, pre, build, text) - - def is_any(self): # type: () -> bool - return False - - def is_empty(self): # type: () -> bool - return False - - def is_prerelease(self): # type: () -> bool - return len(self._prerelease) > 0 - - def allows(self, version): # type: (Version) -> bool - return self == version - - def allows_all(self, other): # type: ("VersionTypes") -> bool - return other.is_empty() or other == self - - def allows_any(self, other): # type: ("VersionTypes") -> bool - return other.allows(self) - - def intersect( - self, other - ): # type: ("VersionTypes") -> Union[Version, EmptyConstraint] - if other.allows(self): - return self - - return EmptyConstraint() - - def union(self, other): # type: ("VersionTypes") -> "VersionTypes" - from .version_range import VersionRange - - if other.allows(self): - return other - - if isinstance(other, VersionRange): - if other.min == self: - return VersionRange( - other.min, - other.max, - include_min=True, - include_max=other.include_max, - ) - - if other.max == self: - return VersionRange( - other.min, - other.max, - include_min=other.include_min, - include_max=True, - ) - - return VersionUnion.of(self, other) - - def difference( - self, other - ): # type: ("VersionTypes") -> Union[Version, EmptyConstraint] - if other.allows(self): - return EmptyConstraint() - - return self - - def equals_without_prerelease(self, other): # type: (Version) -> bool - return ( - self.major == other.major - and self.minor == other.minor - and self.patch == other.patch - ) - - def _increment_major(self): # type: () -> Version - return Version(self.major + 1, 0, 0, precision=self._precision) - - def _increment_minor(self): # type: () -> Version - return Version(self.major, self.minor + 1, 0, precision=self._precision) - - def _increment_patch(self): # type: () -> Version - return Version( - self.major, self.minor, self.patch + 1, precision=self._precision - ) - - def _normalize_prerelease(self, pre): # type: (str) -> Optional[str] - if not pre: - return - - m = re.match(r"(?i)^(a|alpha|b|beta|c|pre|rc|dev)[-.]?(\d+)?$", pre) - if not m: - return - - modifier = m.group(1) - number = m.group(2) - - if number is None: - number = 0 - - if modifier == "a": - modifier = "alpha" - elif modifier == "b": - modifier = "beta" - elif modifier in {"c", "pre"}: - modifier = "rc" - elif modifier == "dev": - modifier = "alpha" - - return "{}.{}".format(modifier, number) - - def _normalize_build(self, build): # type: (str) -> Optional[str] - if not build: - return - - if build.startswith("post"): - build = build.lstrip("post") - - if not build: - return - - return build - - def _split_parts(self, text): # type: (str) -> List[Union[str, int]] - parts = text.split(".") - - for i, part in enumerate(parts): - try: - parts[i] = int(part) - except (TypeError, ValueError): - continue - - return parts - - def __lt__(self, other): # type: (Version) -> int - return self._cmp(other) < 0 - - def __le__(self, other): # type: (Version) -> int - return self._cmp(other) <= 0 - - def __gt__(self, other): # type: (Version) -> int - return self._cmp(other) > 0 - - def __ge__(self, other): # type: (Version) -> int - return self._cmp(other) >= 0 - - def _cmp(self, other): # type: (Version) -> int - if not isinstance(other, VersionConstraint): - return NotImplemented - - if not isinstance(other, Version): - return -other._cmp(self) - - if self.major != other.major: - return self._cmp_parts(self.major, other.major) - - if self.minor != other.minor: - return self._cmp_parts(self.minor, other.minor) - - if self.patch != other.patch: - return self._cmp_parts(self.patch, other.patch) - - if self.rest != other.rest: - return self._cmp_parts(self.rest, other.rest) - - # Pre-releases always come before no pre-release string. - if not self.is_prerelease() and other.is_prerelease(): - return 1 - - if not other.is_prerelease() and self.is_prerelease(): - return -1 - - comparison = self._cmp_lists(self.prerelease, other.prerelease) - if comparison != 0: - return comparison - - # Builds always come after no build string. - if not self.build and other.build: - return -1 - - if not other.build and self.build: - return 1 - - return self._cmp_lists(self.build, other.build) - - def _cmp_parts(self, a, b): # type: (Optional[int], Optional[int]) -> int - if a < b: - return -1 - elif a > b: - return 1 - - return 0 - - def _cmp_lists(self, a, b): # type: (List, List) -> int - for i in range(max(len(a), len(b))): - a_part = None - if i < len(a): - a_part = a[i] - - b_part = None - if i < len(b): - b_part = b[i] - - if a_part == b_part: - continue - - # Missing parts come after present ones. - if a_part is None: - return -1 - - if b_part is None: - return 1 - - if isinstance(a_part, int): - if isinstance(b_part, int): - return self._cmp_parts(a_part, b_part) - - return -1 - else: - if isinstance(b_part, int): - return 1 - - return self._cmp_parts(a_part, b_part) - - return 0 - - def __eq__(self, other): # type: (Version) -> bool - if not isinstance(other, Version): - return NotImplemented - - return ( - self._major == other.major - and self._minor == other.minor - and self._patch == other.patch - and self._rest == other.rest - and self._prerelease == other.prerelease - and self._build == other.build - ) - - def __ne__(self, other): # type: ("VersionTypes") -> bool - return not self == other - - def __str__(self): # type: () -> str - return self._text - - def __repr__(self): # type: () -> str - return "".format(str(self)) - - def __hash__(self): # type: () -> int - return hash( - ( - self.major, - self.minor, - self.patch, - ".".join(str(p) for p in self.prerelease), - ".".join(str(p) for p in self.build), - ) - ) diff --git a/vendor/poetry-core/poetry/core/semver/version_constraint.py b/vendor/poetry-core/poetry/core/semver/version_constraint.py deleted file mode 100644 index 343efc8f..00000000 --- a/vendor/poetry-core/poetry/core/semver/version_constraint.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import TYPE_CHECKING - - -if TYPE_CHECKING: - from poetry.core.semver import Version # noqa - - -class VersionConstraint: - def is_empty(self): # type: () -> bool - raise NotImplementedError() - - def is_any(self): # type: () -> bool - raise NotImplementedError() - - def allows(self, version): # type: ("Version") -> bool - raise NotImplementedError() - - def allows_all(self, other): # type: (VersionConstraint) -> bool - raise NotImplementedError() - - def allows_any(self, other): # type: (VersionConstraint) -> bool - raise NotImplementedError() - - def intersect(self, other): # type: (VersionConstraint) -> VersionConstraint - raise NotImplementedError() - - def union(self, other): # type: (VersionConstraint) -> VersionConstraint - raise NotImplementedError() - - def difference(self, other): # type: (VersionConstraint) -> VersionConstraint - raise NotImplementedError() diff --git a/vendor/poetry-core/poetry/core/semver/version_range.py b/vendor/poetry-core/poetry/core/semver/version_range.py deleted file mode 100644 index 26f42f5d..00000000 --- a/vendor/poetry-core/poetry/core/semver/version_range.py +++ /dev/null @@ -1,465 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Any -from typing import List -from typing import Optional - -from .empty_constraint import EmptyConstraint -from .version_constraint import VersionConstraint -from .version_union import VersionUnion - - -if TYPE_CHECKING: - from poetry.core.semver.version import Version - - from . import VersionTypes # noqa - - -class VersionRange(VersionConstraint): - def __init__( - self, - min=None, # type: Optional["Version"] - max=None, # type: Optional["Version"] - include_min=False, # type: bool - include_max=False, # type: bool - always_include_max_prerelease=False, # type: bool - ): - full_max = max - if ( - not always_include_max_prerelease - and not include_max - and full_max is not None - and not full_max.is_prerelease() - and not full_max.build - and ( - min is None - or not min.is_prerelease() - or not min.equals_without_prerelease(full_max) - ) - ): - full_max = full_max.first_prerelease - - self._min = min - self._max = max - self._full_max = full_max - self._include_min = include_min - self._include_max = include_max - - @property - def min(self): # type: () -> "Version" - return self._min - - @property - def max(self): # type: () -> "Version" - return self._max - - @property - def full_max(self): # type: () -> "Version" - return self._full_max - - @property - def include_min(self): # type: () -> bool - return self._include_min - - @property - def include_max(self): # type: () -> bool - return self._include_max - - def is_empty(self): # type: () -> bool - return False - - def is_any(self): # type: () -> bool - return self._min is None and self._max is None - - def allows(self, other): # type: ("Version") -> bool - if self._min is not None: - if other < self._min: - return False - - if not self._include_min and other == self._min: - return False - - if self.full_max is not None: - if other > self.full_max: - return False - - if not self._include_max and other == self.full_max: - return False - - return True - - def allows_all(self, other): # type: ("VersionTypes") -> bool - from .version import Version - - if other.is_empty(): - return True - - if isinstance(other, Version): - return self.allows(other) - - if isinstance(other, VersionUnion): - return all([self.allows_all(constraint) for constraint in other.ranges]) - - if isinstance(other, VersionRange): - return not other.allows_lower(self) and not other.allows_higher(self) - - raise ValueError("Unknown VersionConstraint type {}.".format(other)) - - def allows_any(self, other): # type: ("VersionTypes") -> bool - from .version import Version - - if other.is_empty(): - return False - - if isinstance(other, Version): - return self.allows(other) - - if isinstance(other, VersionUnion): - return any([self.allows_any(constraint) for constraint in other.ranges]) - - if isinstance(other, VersionRange): - return not other.is_strictly_lower(self) and not other.is_strictly_higher( - self - ) - - raise ValueError("Unknown VersionConstraint type {}.".format(other)) - - def intersect(self, other): # type: ("VersionTypes") -> "VersionTypes" - from .version import Version - - if other.is_empty(): - return other - - if isinstance(other, VersionUnion): - return other.intersect(self) - - # A range and a Version just yields the version if it's in the range. - if isinstance(other, Version): - if self.allows(other): - return other - - return EmptyConstraint() - - if not isinstance(other, VersionRange): - raise ValueError("Unknown VersionConstraint type {}.".format(other)) - - if self.allows_lower(other): - if self.is_strictly_lower(other): - return EmptyConstraint() - - intersect_min = other.min - intersect_include_min = other.include_min - else: - if other.is_strictly_lower(self): - return EmptyConstraint() - - intersect_min = self._min - intersect_include_min = self._include_min - - if self.allows_higher(other): - intersect_max = other.max - intersect_include_max = other.include_max - else: - intersect_max = self._max - intersect_include_max = self._include_max - - if intersect_min is None and intersect_max is None: - return VersionRange() - - # If the range is just a single version. - if intersect_min == intersect_max: - # Because we already verified that the lower range isn't strictly - # lower, there must be some overlap. - assert intersect_include_min and intersect_include_max - - return intersect_min - - # If we got here, there is an actual range. - return VersionRange( - intersect_min, intersect_max, intersect_include_min, intersect_include_max - ) - - def union(self, other): # type: ("VersionTypes") -> "VersionTypes" - from .version import Version - - if isinstance(other, Version): - if self.allows(other): - return self - - if other == self.min: - return VersionRange( - self.min, self.max, include_min=True, include_max=self.include_max - ) - - if other == self.max: - return VersionRange( - self.min, self.max, include_min=self.include_min, include_max=True - ) - - return VersionUnion.of(self, other) - - if isinstance(other, VersionRange): - # If the two ranges don't overlap, we won't be able to create a single - # VersionRange for both of them. - edges_touch = ( - self.max == other.min and (self.include_max or other.include_min) - ) or (self.min == other.max and (self.include_min or other.include_max)) - - if not edges_touch and not self.allows_any(other): - return VersionUnion.of(self, other) - - if self.allows_lower(other): - union_min = self.min - union_include_min = self.include_min - else: - union_min = other.min - union_include_min = other.include_min - - if self.allows_higher(other): - union_max = self.max - union_include_max = self.include_max - else: - union_max = other.max - union_include_max = other.include_max - - return VersionRange( - union_min, - union_max, - include_min=union_include_min, - include_max=union_include_max, - ) - - return VersionUnion.of(self, other) - - def difference(self, other): # type: ("VersionTypes") -> "VersionTypes" - from .version import Version - - if other.is_empty(): - return self - - if isinstance(other, Version): - if not self.allows(other): - return self - - if other == self.min: - if not self.include_min: - return self - - return VersionRange(self.min, self.max, False, self.include_max) - - if other == self.max: - if not self.include_max: - return self - - return VersionRange(self.min, self.max, self.include_min, False) - - return VersionUnion.of( - VersionRange(self.min, other, self.include_min, False), - VersionRange(other, self.max, False, self.include_max), - ) - elif isinstance(other, VersionRange): - if not self.allows_any(other): - return self - - if not self.allows_lower(other): - before = None - elif self.min == other.min: - before = self.min - else: - before = VersionRange( - self.min, other.min, self.include_min, not other.include_min - ) - - if not self.allows_higher(other): - after = None - elif self.max == other.max: - after = self.max - else: - after = VersionRange( - other.max, self.max, not other.include_max, self.include_max - ) - - if before is None and after is None: - return EmptyConstraint() - - if before is None: - return after - - if after is None: - return before - - return VersionUnion.of(before, after) - elif isinstance(other, VersionUnion): - ranges = [] # type: List[VersionRange] - current = self - - for range in other.ranges: - # Skip any ranges that are strictly lower than [current]. - if range.is_strictly_lower(current): - continue - - # If we reach a range strictly higher than [current], no more ranges - # will be relevant so we can bail early. - if range.is_strictly_higher(current): - break - - difference = current.difference(range) - if difference.is_empty(): - return EmptyConstraint() - elif isinstance(difference, VersionUnion): - # If [range] split [current] in half, we only need to continue - # checking future ranges against the latter half. - ranges.append(difference.ranges[0]) - current = difference.ranges[-1] - else: - current = difference - - if not ranges: - return current - - return VersionUnion.of(*(ranges + [current])) - - raise ValueError("Unknown VersionConstraint type {}.".format(other)) - - def allows_lower(self, other): # type: (VersionRange) -> bool - if self.min is None: - return other.min is not None - - if other.min is None: - return False - - if self.min < other.min: - return True - - if self.min > other.min: - return False - - return self.include_min and not other.include_min - - def allows_higher(self, other): # type: (VersionRange) -> bool - if self.full_max is None: - return other.max is not None - - if other.full_max is None: - return False - - if self.full_max < other.full_max: - return False - - if self.full_max > other.full_max: - return True - - return self.include_max and not other.include_max - - def is_strictly_lower(self, other): # type: (VersionRange) -> bool - if self.full_max is None or other.min is None: - return False - - if self.full_max < other.min: - return True - - if self.full_max > other.min: - return False - - return not self.include_max or not other.include_min - - def is_strictly_higher(self, other): # type: (VersionRange) -> bool - return other.is_strictly_lower(self) - - def is_adjacent_to(self, other): # type: (VersionRange) -> bool - if self.max != other.min: - return False - - return ( - self.include_max - and not other.include_min - or not self.include_max - and other.include_min - ) - - def __eq__(self, other): # type: (Any) -> int - if not isinstance(other, VersionRange): - return False - - return ( - self._min == other.min - and self._max == other.max - and self._include_min == other.include_min - and self._include_max == other.include_max - ) - - def __lt__(self, other): # type: (VersionRange) -> int - return self._cmp(other) < 0 - - def __le__(self, other): # type: (VersionRange) -> int - return self._cmp(other) <= 0 - - def __gt__(self, other): # type: (VersionRange) -> int - return self._cmp(other) > 0 - - def __ge__(self, other): # type: (VersionRange) -> int - return self._cmp(other) >= 0 - - def _cmp(self, other): # type: (VersionRange) -> int - if self.min is None: - if other.min is None: - return self._compare_max(other) - - return -1 - elif other.min is None: - return 1 - - result = self.min._cmp(other.min) - if result != 0: - return result - - if self.include_min != other.include_min: - return -1 if self.include_min else 1 - - return self._compare_max(other) - - def _compare_max(self, other): # type: (VersionRange) -> int - if self.max is None: - if other.max is None: - return 0 - - return 1 - elif other.max is None: - return -1 - - result = self.max._cmp(other.max) - if result != 0: - return result - - if self.include_max != other.include_max: - return 1 if self.include_max else -1 - - return 0 - - def __str__(self): # type: () -> str - text = "" - - if self.min is not None: - text += ">=" if self.include_min else ">" - text += self.min.text - - if self.max is not None: - if self.min is not None: - text += "," - - text += "{}{}".format("<=" if self.include_max else "<", self.max.text) - - if self.min is None and self.max is None: - return "*" - - return text - - def __repr__(self): # type: () -> str - return "".format(str(self)) - - def __hash__(self): # type: () -> int - return ( - hash(self.min) - ^ hash(self.max) - ^ hash(self.include_min) - ^ hash(self.include_max) - ) diff --git a/vendor/poetry-core/poetry/core/semver/version_union.py b/vendor/poetry-core/poetry/core/semver/version_union.py deleted file mode 100644 index 50a597db..00000000 --- a/vendor/poetry-core/poetry/core/semver/version_union.py +++ /dev/null @@ -1,268 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Any -from typing import List - -from .empty_constraint import EmptyConstraint -from .version_constraint import VersionConstraint - - -if TYPE_CHECKING: - from . import VersionTypes # noqa - from .version import Version - from .version_range import VersionRange - - -class VersionUnion(VersionConstraint): - """ - A version constraint representing a union of multiple disjoint version - ranges. - - An instance of this will only be created if the version can't be represented - as a non-compound value. - """ - - def __init__(self, *ranges): # type: (*"VersionRange") -> None - self._ranges = list(ranges) - - @property - def ranges(self): # type: () -> List["VersionRange"] - return self._ranges - - @classmethod - def of(cls, *ranges): # type: (*"VersionTypes") -> "VersionTypes" - from .version_range import VersionRange - - flattened = [] - for constraint in ranges: - if constraint.is_empty(): - continue - - if isinstance(constraint, VersionUnion): - flattened += constraint.ranges - continue - - flattened.append(constraint) - - if not flattened: - return EmptyConstraint() - - if any([constraint.is_any() for constraint in flattened]): - return VersionRange() - - # Only allow Versions and VersionRanges here so we can more easily reason - # about everything in flattened. _EmptyVersions and VersionUnions are - # filtered out above. - for constraint in flattened: - if isinstance(constraint, VersionRange): - continue - - raise ValueError("Unknown VersionConstraint type {}.".format(constraint)) - - flattened.sort() - - merged = [] - for constraint in flattened: - # Merge this constraint with the previous one, but only if they touch. - if not merged or ( - not merged[-1].allows_any(constraint) - and not merged[-1].is_adjacent_to(constraint) - ): - merged.append(constraint) - else: - merged[-1] = merged[-1].union(constraint) - - if len(merged) == 1: - return merged[0] - - return VersionUnion(*merged) - - def is_empty(self): # type: () -> bool - return False - - def is_any(self): # type: () -> bool - return False - - def allows(self, version): # type: ("Version") -> bool - return any([constraint.allows(version) for constraint in self._ranges]) - - def allows_all(self, other): # type: ("VersionTypes") -> bool - our_ranges = iter(self._ranges) - their_ranges = iter(self._ranges_for(other)) - - our_current_range = next(our_ranges, None) - their_current_range = next(their_ranges, None) - - while our_current_range and their_current_range: - if our_current_range.allows_all(their_current_range): - their_current_range = next(their_ranges, None) - else: - our_current_range = next(our_ranges, None) - - return their_current_range is None - - def allows_any(self, other): # type: ("VersionTypes") -> bool - our_ranges = iter(self._ranges) - their_ranges = iter(self._ranges_for(other)) - - our_current_range = next(our_ranges, None) - their_current_range = next(their_ranges, None) - - while our_current_range and their_current_range: - if our_current_range.allows_any(their_current_range): - return True - - if their_current_range.allows_higher(our_current_range): - our_current_range = next(our_ranges, None) - else: - their_current_range = next(their_ranges, None) - - return False - - def intersect(self, other): # type: ("VersionTypes") -> "VersionTypes" - our_ranges = iter(self._ranges) - their_ranges = iter(self._ranges_for(other)) - new_ranges = [] - - our_current_range = next(our_ranges, None) - their_current_range = next(their_ranges, None) - - while our_current_range and their_current_range: - intersection = our_current_range.intersect(their_current_range) - - if not intersection.is_empty(): - new_ranges.append(intersection) - - if their_current_range.allows_higher(our_current_range): - our_current_range = next(our_ranges, None) - else: - their_current_range = next(their_ranges, None) - - return VersionUnion.of(*new_ranges) - - def union(self, other): # type: ("VersionTypes") -> "VersionTypes" - return VersionUnion.of(self, other) - - def difference(self, other): # type: ("VersionTypes") -> "VersionTypes" - our_ranges = iter(self._ranges) - their_ranges = iter(self._ranges_for(other)) - new_ranges = [] - - state = { - "current": next(our_ranges, None), - "their_range": next(their_ranges, None), - } - - def their_next_range(): # type: () -> bool - state["their_range"] = next(their_ranges, None) - if state["their_range"]: - return True - - new_ranges.append(state["current"]) - our_current = next(our_ranges, None) - while our_current: - new_ranges.append(our_current) - our_current = next(our_ranges, None) - - return False - - def our_next_range(include_current=True): # type: (bool) -> bool - if include_current: - new_ranges.append(state["current"]) - - our_current = next(our_ranges, None) - if not our_current: - return False - - state["current"] = our_current - - return True - - while True: - if state["their_range"] is None: - break - - if state["their_range"].is_strictly_lower(state["current"]): - if not their_next_range(): - break - - continue - - if state["their_range"].is_strictly_higher(state["current"]): - if not our_next_range(): - break - - continue - - difference = state["current"].difference(state["their_range"]) - if isinstance(difference, VersionUnion): - assert len(difference.ranges) == 2 - new_ranges.append(difference.ranges[0]) - state["current"] = difference.ranges[-1] - - if not their_next_range(): - break - elif difference.is_empty(): - if not our_next_range(False): - break - else: - state["current"] = difference - - if state["current"].allows_higher(state["their_range"]): - if not their_next_range(): - break - else: - if not our_next_range(): - break - - if not new_ranges: - return EmptyConstraint() - - if len(new_ranges) == 1: - return new_ranges[0] - - return VersionUnion.of(*new_ranges) - - def _ranges_for(self, constraint): # type: ("VersionTypes") -> List["VersionRange"] - from .version_range import VersionRange - - if constraint.is_empty(): - return [] - - if isinstance(constraint, VersionUnion): - return constraint.ranges - - if isinstance(constraint, VersionRange): - return [constraint] - - raise ValueError("Unknown VersionConstraint type {}".format(constraint)) - - def excludes_single_version(self): # type: () -> bool - from .version import Version - from .version_range import VersionRange - - return isinstance(VersionRange().difference(self), Version) - - def __eq__(self, other): # type: (Any) -> bool - if not isinstance(other, VersionUnion): - return False - - return self._ranges == other.ranges - - def __hash__(self): # type: () -> int - h = hash(self._ranges[0]) - - for range in self._ranges[1:]: - h ^= hash(range) - - return h - - def __str__(self): # type: () -> str - from .version_range import VersionRange - - if self.excludes_single_version(): - return "!={}".format(VersionRange().difference(self)) - - return " || ".join([str(r) for r in self._ranges]) - - def __repr__(self): # type: () -> str - return "".format(str(self)) diff --git a/vendor/poetry-core/poetry/core/spdx/__init__.py b/vendor/poetry-core/poetry/core/spdx/__init__.py deleted file mode 100644 index 490c7a95..00000000 --- a/vendor/poetry-core/poetry/core/spdx/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -import json - -from importlib import resources -from typing import Dict -from typing import Optional - -from .license import License -from .updater import Updater - -_licenses = None # type: Optional[Dict[str, License]] - - -def license_by_id(identifier): # type: (str) -> License - if _licenses is None: - load_licenses() - - id = identifier.lower() - - if id not in _licenses: - if not identifier: - raise ValueError("A license identifier is required") - return License(identifier, identifier, False, False) - - return _licenses[id] - - -def load_licenses(): # type: () -> None - global _licenses - - _licenses = {} - - data = json.loads(resources.read_text(f"{__name__}.data", "licenses.json")) - - for name, license_info in data.items(): - license = License(name, license_info[0], license_info[1], license_info[2]) - _licenses[name.lower()] = license - - full_name = license_info[0].lower() - if full_name in _licenses: - existing_license = _licenses[full_name] - if not existing_license.is_deprecated: - continue - - _licenses[full_name] = license - - # Add a Proprietary license for non-standard licenses - _licenses["proprietary"] = License("Proprietary", "Proprietary", False, False) - - -if __name__ == "__main__": - updater = Updater() - updater.dump() diff --git a/vendor/poetry-core/poetry/core/spdx/updater.py b/vendor/poetry-core/poetry/core/spdx/updater.py deleted file mode 100644 index 30c3a519..00000000 --- a/vendor/poetry-core/poetry/core/spdx/updater.py +++ /dev/null @@ -1,46 +0,0 @@ -import json -import os - -from io import open -from typing import Any -from typing import Dict -from typing import Optional - - -try: - from urllib.request import urlopen -except ImportError: - from urllib2 import urlopen - - -class Updater: - - BASE_URL = "https://raw.githubusercontent.com/spdx/license-list-data/master/json/" - - def __init__(self, base_url=BASE_URL): # type: (str) -> None - self._base_url = base_url - - def dump(self, file=None): # type: (Optional[str]) -> None - if file is None: - file = os.path.join(os.path.dirname(__file__), "data", "licenses.json") - - licenses_url = self._base_url + "licenses.json" - - with open(file, "w", encoding="utf-8") as f: - f.write( - json.dumps(self.get_licenses(licenses_url), indent=2, sort_keys=True) - ) - - def get_licenses(self, url): # type: (str) -> Dict[str, Any] - licenses = {} - with urlopen(url) as r: - data = json.loads(r.read().decode()) - - for info in data["licenses"]: - licenses[info["licenseId"]] = [ - info["name"], - info["isOsiApproved"], - info["isDeprecatedLicenseId"], - ] - - return licenses diff --git a/vendor/poetry-core/poetry/core/toml/__init__.py b/vendor/poetry-core/poetry/core/toml/__init__.py deleted file mode 100644 index affd9054..00000000 --- a/vendor/poetry-core/poetry/core/toml/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from poetry.core.toml.exceptions import TOMLError -from poetry.core.toml.file import TOMLFile - - -__all__ = [clazz.__name__ for clazz in {TOMLError, TOMLFile}] diff --git a/vendor/poetry-core/poetry/core/toml/exceptions.py b/vendor/poetry-core/poetry/core/toml/exceptions.py deleted file mode 100644 index 03959e29..00000000 --- a/vendor/poetry-core/poetry/core/toml/exceptions.py +++ /dev/null @@ -1,7 +0,0 @@ -from tomlkit.exceptions import TOMLKitError - -from poetry.core.exceptions import PoetryCoreException - - -class TOMLError(TOMLKitError, PoetryCoreException): - pass diff --git a/vendor/poetry-core/poetry/core/toml/file.py b/vendor/poetry-core/poetry/core/toml/file.py deleted file mode 100644 index 574bdcb8..00000000 --- a/vendor/poetry-core/poetry/core/toml/file.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Any -from typing import Union - -from tomlkit.exceptions import TOMLKitError -from tomlkit.toml_file import TOMLFile as BaseTOMLFile - -from poetry.core.toml import TOMLError -from poetry.core.utils._compat import Path - - -if TYPE_CHECKING: - from tomlkit.toml_document import TOMLDocument # noqa - - -class TOMLFile(BaseTOMLFile): - def __init__(self, path): # type: (Union[str, Path]) -> None - if isinstance(path, str): - path = Path(path) - super(TOMLFile, self).__init__(path.as_posix()) - self.__path = path - - @property - def path(self): # type: () -> Path - return self.__path - - def exists(self): # type: () -> bool - return self.__path.exists() - - def read(self): # type: () -> "TOMLDocument" - try: - return super(TOMLFile, self).read() - except (ValueError, TOMLKitError) as e: - raise TOMLError("Invalid TOML file {}: {}".format(self.path.as_posix(), e)) - - def __getattr__(self, item): # type: (str) -> Any - return getattr(self.__path, item) - - def __str__(self): # type: () -> str - return self.__path.as_posix() diff --git a/vendor/poetry-core/poetry/core/utils/_compat.py b/vendor/poetry-core/poetry/core/utils/_compat.py deleted file mode 100644 index 7c5daa9f..00000000 --- a/vendor/poetry-core/poetry/core/utils/_compat.py +++ /dev/null @@ -1,125 +0,0 @@ -import sys - -from typing import AnyStr -from typing import List -from typing import Optional -from typing import Union - -import six.moves.urllib.parse as urllib_parse - - -urlparse = urllib_parse - - -try: # Python 2 - long = long - unicode = unicode - basestring = basestring -except NameError: # Python 3 - long = int - unicode = str - basestring = str - - -PY2 = sys.version_info[0] == 2 -PY34 = sys.version_info >= (3, 4) -PY35 = sys.version_info >= (3, 5) -PY36 = sys.version_info >= (3, 6) -PY37 = sys.version_info >= (3, 7) - -WINDOWS = sys.platform == "win32" - -if PY2: - import pipes - - shell_quote = pipes.quote -else: - import shlex - - shell_quote = shlex.quote - -if PY35: - from pathlib import Path # noqa -else: - from pathlib2 import Path # noqa - -if not PY36: - from collections import OrderedDict # noqa -else: - OrderedDict = dict - - -try: - FileNotFoundError -except NameError: - FileNotFoundError = IOError # noqa - - -def decode( - string, encodings=None -): # type: (Union[AnyStr, unicode], Optional[str]) -> Union[str, bytes] - if not PY2 and not isinstance(string, bytes): - return string - - if PY2 and isinstance(string, unicode): - return string - - encodings = encodings or ["utf-8", "latin1", "ascii"] - - for encoding in encodings: - try: - return string.decode(encoding) - except (UnicodeEncodeError, UnicodeDecodeError): - pass - - return string.decode(encodings[0], errors="ignore") - - -def encode( - string, encodings=None -): # type: (AnyStr, Optional[str]) -> Union[str, bytes] - if not PY2 and isinstance(string, bytes): - return string - - if PY2 and isinstance(string, str): - return string - - encodings = encodings or ["utf-8", "latin1", "ascii"] - - for encoding in encodings: - try: - return string.encode(encoding) - except (UnicodeEncodeError, UnicodeDecodeError): - pass - - return string.encode(encodings[0], errors="ignore") - - -def to_str(string): # type: (AnyStr) -> str - if isinstance(string, str) or not isinstance(string, (unicode, bytes)): - return string - - if PY2: - method = "encode" - else: - method = "decode" - - encodings = ["utf-8", "latin1", "ascii"] - - for encoding in encodings: - try: - return getattr(string, method)(encoding) - except (UnicodeEncodeError, UnicodeDecodeError): - pass - - return getattr(string, method)(encodings[0], errors="ignore") - - -def list_to_shell_command(cmd): # type: (List[str]) -> str - executable = cmd[0] - - if " " in executable: - executable = '"{}"'.format(executable) - cmd[0] = executable - - return " ".join(cmd) diff --git a/vendor/poetry-core/poetry/core/utils/helpers.py b/vendor/poetry-core/poetry/core/utils/helpers.py deleted file mode 100644 index 6047e830..00000000 --- a/vendor/poetry-core/poetry/core/utils/helpers.py +++ /dev/null @@ -1,106 +0,0 @@ -import os -import re -import shutil -import stat -import tempfile - -from contextlib import contextmanager -from typing import Any -from typing import Iterator -from typing import List -from typing import Union - -from poetry.core.utils._compat import Path -from poetry.core.version import Version - - -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping - - -_canonicalize_regex = re.compile(r"[-_]+") - - -def canonicalize_name(name): # type: (str) -> str - return _canonicalize_regex.sub("-", name).lower() - - -def module_name(name): # type: (str) -> str - return canonicalize_name(name).replace(".", "_").replace("-", "_") - - -def normalize_version(version): # type: (str) -> str - return str(Version(version)) - - -@contextmanager -def temporary_directory(*args, **kwargs): # type: (*Any, **Any) -> Iterator[str] - name = tempfile.mkdtemp(*args, **kwargs) - yield name - safe_rmtree(name) - - -def parse_requires(requires): # type: (str) -> List[str] - lines = requires.split("\n") - - requires_dist = [] - in_section = False - current_marker = None - for line in lines: - line = line.strip() - if not line: - if in_section: - in_section = False - - continue - - if line.startswith("["): - # extras or conditional dependencies - marker = line.lstrip("[").rstrip("]") - if ":" not in marker: - extra, marker = marker, None - else: - extra, marker = marker.split(":") - - if extra: - if marker: - marker = '{} and extra == "{}"'.format(marker, extra) - else: - marker = 'extra == "{}"'.format(extra) - - if marker: - current_marker = marker - - continue - - if current_marker: - line = "{} ; {}".format(line, current_marker) - - requires_dist.append(line) - - return requires_dist - - -def _on_rm_error(func, path, exc_info): # type: (Any, Union[str, Path], Any) -> None - if not os.path.exists(path): - return - - os.chmod(path, stat.S_IWRITE) - func(path) - - -def safe_rmtree(path): # type: (Union[str, Path]) -> None - if Path(path).is_symlink(): - return os.unlink(str(path)) - - shutil.rmtree(path, onerror=_on_rm_error) - - -def merge_dicts(d1, d2): # type: (dict, dict) -> None - for k, v in d2.items(): - if k in d1 and isinstance(d1[k], dict) and isinstance(d2[k], Mapping): - merge_dicts(d1[k], d2[k]) - else: - d1[k] = d2[k] diff --git a/vendor/poetry-core/poetry/core/utils/patterns.py b/vendor/poetry-core/poetry/core/utils/patterns.py deleted file mode 100644 index 1d6413c2..00000000 --- a/vendor/poetry-core/poetry/core/utils/patterns.py +++ /dev/null @@ -1,9 +0,0 @@ -import re - - -wheel_file_re = re.compile( - r"""^(?P(?P.+?)(-(?P\d.+?))?) - ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?) - \.whl|\.dist-info)$""", - re.VERBOSE, -) diff --git a/vendor/poetry-core/poetry/core/utils/toml_file.py b/vendor/poetry-core/poetry/core/utils/toml_file.py deleted file mode 100644 index 7abf9a8a..00000000 --- a/vendor/poetry-core/poetry/core/utils/toml_file.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -from typing import Any - -from poetry.core.toml import TOMLFile - - -class TomlFile(TOMLFile): - @classmethod - def __new__(cls, *args, **kwargs): # type: (*Any, **Any) -> TOMLFile - import warnings - - warnings.warn( - "Use of {}.{} has been deprecated, use {}.{} instead.".format( - cls.__module__, cls.__name__, TOMLFile.__module__, TOMLFile.__name__, - ), - category=DeprecationWarning, - stacklevel=2, - ) - return super(TomlFile, cls).__new__(cls) diff --git a/vendor/poetry-core/poetry/core/vcs/__init__.py b/vendor/poetry-core/poetry/core/vcs/__init__.py deleted file mode 100644 index a97f1a7f..00000000 --- a/vendor/poetry-core/poetry/core/vcs/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -import subprocess - -from poetry.core.utils._compat import Path -from poetry.core.utils._compat import decode - -from .git import Git - - -def get_vcs(directory): # type: (Path) -> Git - working_dir = Path.cwd() - os.chdir(str(directory.resolve())) - - try: - from .git import executable - - git_dir = decode( - subprocess.check_output( - [executable(), "rev-parse", "--show-toplevel"], stderr=subprocess.STDOUT - ) - ).strip() - - vcs = Git(Path(git_dir)) - - except (subprocess.CalledProcessError, OSError, RuntimeError): - vcs = None - finally: - os.chdir(str(working_dir)) - - return vcs diff --git a/vendor/poetry-core/poetry/core/vcs/git.py b/vendor/poetry-core/poetry/core/vcs/git.py deleted file mode 100644 index 04e3cf2c..00000000 --- a/vendor/poetry-core/poetry/core/vcs/git.py +++ /dev/null @@ -1,366 +0,0 @@ -# -*- coding: utf-8 -*- -import re -import subprocess - -from collections import namedtuple -from typing import Any -from typing import Optional - -from poetry.core.utils._compat import PY36 -from poetry.core.utils._compat import WINDOWS -from poetry.core.utils._compat import Path -from poetry.core.utils._compat import decode - - -pattern_formats = { - "protocol": r"\w+", - "user": r"[a-zA-Z0-9_.-]+", - "resource": r"[a-zA-Z0-9_.-]+", - "port": r"\d+", - "path": r"[\w~.\-/\\]+", - "name": r"[\w~.\-]+", - "rev": r"[^@#]+", -} - -PATTERNS = [ - re.compile( - r"^(git\+)?" - r"(?Phttps?|git|ssh|rsync|file)://" - r"(?:(?P{user})@)?" - r"(?P{resource})?" - r"(:(?P{port}))?" - r"(?P[:/\\]({path}[/\\])?" - r"((?P{name}?)(\.git|[/\\])?)?)" - r"([@#](?P{rev}))?" - r"$".format( - user=pattern_formats["user"], - resource=pattern_formats["resource"], - port=pattern_formats["port"], - path=pattern_formats["path"], - name=pattern_formats["name"], - rev=pattern_formats["rev"], - ) - ), - re.compile( - r"(git\+)?" - r"((?P{protocol})://)" - r"(?:(?P{user})@)?" - r"(?P{resource}:?)" - r"(:(?P{port}))?" - r"(?P({path})" - r"(?P{name})(\.git|/)?)" - r"([@#](?P{rev}))?" - r"$".format( - protocol=pattern_formats["protocol"], - user=pattern_formats["user"], - resource=pattern_formats["resource"], - port=pattern_formats["port"], - path=pattern_formats["path"], - name=pattern_formats["name"], - rev=pattern_formats["rev"], - ) - ), - re.compile( - r"^(?:(?P{user})@)?" - r"(?P{resource})" - r"(:(?P{port}))?" - r"(?P([:/]{path}/)" - r"(?P{name})(\.git|/)?)" - r"([@#](?P{rev}))?" - r"$".format( - user=pattern_formats["user"], - resource=pattern_formats["resource"], - port=pattern_formats["port"], - path=pattern_formats["path"], - name=pattern_formats["name"], - rev=pattern_formats["rev"], - ) - ), - re.compile( - r"((?P{user})@)?" - r"(?P{resource})" - r"[:/]{{1,2}}" - r"(?P({path})" - r"(?P{name})(\.git|/)?)" - r"([@#](?P{rev}))?" - r"$".format( - user=pattern_formats["user"], - resource=pattern_formats["resource"], - path=pattern_formats["path"], - name=pattern_formats["name"], - rev=pattern_formats["rev"], - ) - ), -] - - -class GitError(RuntimeError): - - pass - - -class ParsedUrl: - def __init__( - self, - protocol, # type: Optional[str] - resource, # type: Optional[str] - pathname, # type: Optional[str] - user, # type: Optional[str] - port, # type: Optional[str] - name, # type: Optional[str] - rev, # type: Optional[str] - ): - self.protocol = protocol - self.resource = resource - self.pathname = pathname - self.user = user - self.port = port - self.name = name - self.rev = rev - - @classmethod - def parse(cls, url): # type: (str) -> ParsedUrl - for pattern in PATTERNS: - m = pattern.match(url) - if m: - groups = m.groupdict() - return ParsedUrl( - groups.get("protocol"), - groups.get("resource"), - groups.get("pathname"), - groups.get("user"), - groups.get("port"), - groups.get("name"), - groups.get("rev"), - ) - - raise ValueError('Invalid git url "{}"'.format(url)) - - @property - def url(self): # type: () -> str - return "{}{}{}{}{}".format( - "{}://".format(self.protocol) if self.protocol else "", - "{}@".format(self.user) if self.user else "", - self.resource, - ":{}".format(self.port) if self.port else "", - "/" + self.pathname.lstrip(":/"), - ) - - def format(self): # type: () -> str - return self.url - - def __str__(self): # type: () -> str - return self.format() - - -GitUrl = namedtuple("GitUrl", ["url", "revision"]) - - -_executable = None - - -def executable(): - global _executable - - if _executable is not None: - return _executable - - if WINDOWS and PY36: - # Finding git via where.exe - where = "%WINDIR%\\System32\\where.exe" - paths = decode( - subprocess.check_output([where, "git"], shell=True, encoding="oem") - ).split("\n") - for path in paths: - if not path: - continue - - path = Path(path.strip()) - try: - path.relative_to(Path.cwd()) - except ValueError: - _executable = str(path) - - break - else: - _executable = "git" - - if _executable is None: - raise RuntimeError("Unable to find a valid git executable") - - return _executable - - -def _reset_executable(): - global _executable - - _executable = None - - -class GitConfig: - def __init__(self, requires_git_presence=False): # type: (bool) -> None - self._config = {} - - try: - config_list = decode( - subprocess.check_output( - [executable(), "config", "-l"], stderr=subprocess.STDOUT - ) - ) - - m = re.findall("(?ms)^([^=]+)=(.*?)$", config_list) - if m: - for group in m: - self._config[group[0]] = group[1] - except (subprocess.CalledProcessError, OSError): - if requires_git_presence: - raise - - def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any - return self._config.get(key, default) - - def __getitem__(self, item): # type: (Any) -> Any - return self._config[item] - - -class Git: - def __init__(self, work_dir=None): # type: (Optional[Path]) -> None - self._config = GitConfig(requires_git_presence=True) - self._work_dir = work_dir - - @classmethod - def normalize_url(cls, url): # type: (str) -> GitUrl - parsed = ParsedUrl.parse(url) - - formatted = re.sub(r"^git\+", "", url) - if parsed.rev: - formatted = re.sub(r"[#@]{}$".format(parsed.rev), "", formatted) - - altered = parsed.format() != formatted - - if altered: - if re.match(r"^git\+https?", url) and re.match( - r"^/?:[^0-9]", parsed.pathname - ): - normalized = re.sub(r"git\+(.*:[^:]+):(.*)", "\\1/\\2", url) - elif re.match(r"^git\+file", url): - normalized = re.sub(r"git\+", "", url) - else: - normalized = re.sub(r"^(?:git\+)?ssh://", "", url) - else: - normalized = parsed.format() - - return GitUrl(re.sub(r"#[^#]*$", "", normalized), parsed.rev) - - @property - def config(self): # type: () -> GitConfig - return self._config - - def clone(self, repository, dest): # type: (str, Path) -> str - self._check_parameter(repository) - - return self.run("clone", "--recurse-submodules", "--", repository, str(dest)) - - def checkout(self, rev, folder=None): # type: (str, Optional[Path]) -> str - args = [] - if folder is None and self._work_dir: - folder = self._work_dir - - if folder: - args += [ - "--git-dir", - (folder / ".git").as_posix(), - "--work-tree", - folder.as_posix(), - ] - - self._check_parameter(rev) - - args += ["checkout", rev] - - return self.run(*args) - - def rev_parse(self, rev, folder=None): # type: (str, Optional[Path]) -> str - args = [] - if folder is None and self._work_dir: - folder = self._work_dir - - if folder: - args += [ - "--git-dir", - (folder / ".git").as_posix(), - "--work-tree", - folder.as_posix(), - ] - - self._check_parameter(rev) - - # We need "^0" (an alternative to "^{commit}") to ensure that the - # commit SHA of the commit the tag points to is returned, even in - # the case of annotated tags. - # - # We deliberately avoid the "^{commit}" syntax itself as on some - # platforms (cygwin/msys to be specific), the braces are interpreted - # as special characters and would require escaping, while on others - # they should not be escaped. - args += ["rev-parse", rev + "^0"] - - return self.run(*args) - - def get_ignored_files(self, folder=None): # type: (Optional[Path]) -> list - args = [] - if folder is None and self._work_dir: - folder = self._work_dir - - if folder: - args += [ - "--git-dir", - (folder / ".git").as_posix(), - "--work-tree", - folder.as_posix(), - ] - - args += ["ls-files", "--others", "-i", "--exclude-standard"] - output = self.run(*args) - - return output.strip().split("\n") - - def remote_urls(self, folder=None): # type: (Optional[Path]) -> dict - output = self.run( - "config", "--get-regexp", r"remote\..*\.url", folder=folder - ).strip() - - urls = {} - for url in output.splitlines(): - name, url = url.split(" ", 1) - urls[name.strip()] = url.strip() - - return urls - - def remote_url(self, folder=None): # type: (Optional[Path]) -> str - urls = self.remote_urls(folder=folder) - - return urls.get("remote.origin.url", urls[list(urls.keys())[0]]) - - def run(self, *args, **kwargs): # type: (*Any, **Any) -> str - folder = kwargs.pop("folder", None) - if folder: - args = ( - "--git-dir", - (folder / ".git").as_posix(), - "--work-tree", - folder.as_posix(), - ) + args - - return decode( - subprocess.check_output( - [executable()] + list(args), stderr=subprocess.STDOUT - ) - ).strip() - - def _check_parameter(self, parameter): # type: (str) -> None - """ - Checks a git parameter to avoid unwanted code execution. - """ - if parameter.strip().startswith("-"): - raise GitError("Invalid Git parameter: {}".format(parameter)) diff --git a/vendor/poetry-core/poetry/core/version/__init__.py b/vendor/poetry-core/poetry/core/version/__init__.py deleted file mode 100644 index 62d0349f..00000000 --- a/vendor/poetry-core/poetry/core/version/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -import operator - -from typing import Union - -from .exceptions import InvalidVersion -from .legacy_version import LegacyVersion -from .version import Version - - -OP_EQ = operator.eq -OP_LT = operator.lt -OP_LE = operator.le -OP_GT = operator.gt -OP_GE = operator.ge -OP_NE = operator.ne - -_trans_op = { - "=": OP_EQ, - "==": OP_EQ, - "<": OP_LT, - "<=": OP_LE, - ">": OP_GT, - ">=": OP_GE, - "!=": OP_NE, -} - - -def parse( - version, # type: str - strict=False, # type: bool -): # type:(...) -> Union[Version, LegacyVersion] - """ - Parse the given version string and return either a :class:`Version` object - or a LegacyVersion object depending on if the given version is - a valid PEP 440 version or a legacy version. - - If strict=True only PEP 440 versions will be accepted. - """ - try: - return Version(version) - except InvalidVersion: - if strict: - raise - - return LegacyVersion(version) diff --git a/vendor/poetry-core/poetry/core/version/base.py b/vendor/poetry-core/poetry/core/version/base.py deleted file mode 100644 index 826f8622..00000000 --- a/vendor/poetry-core/poetry/core/version/base.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import Callable - - -class BaseVersion: - def __init__(self, version): # type: (str) -> None - self._version = str(version) - self._key = None - - def __hash__(self): # type: () -> int - return hash(self._key) - - def __lt__(self, other): # type: (BaseVersion) -> bool - return self._compare(other, lambda s, o: s < o) - - def __le__(self, other): # type: (BaseVersion) -> bool - return self._compare(other, lambda s, o: s <= o) - - def __eq__(self, other): # type: (BaseVersion) -> bool - return self._compare(other, lambda s, o: s == o) - - def __ge__(self, other): # type: (BaseVersion) -> bool - return self._compare(other, lambda s, o: s >= o) - - def __gt__(self, other): # type: (BaseVersion) -> bool - return self._compare(other, lambda s, o: s > o) - - def __ne__(self, other): # type: (BaseVersion) -> bool - return self._compare(other, lambda s, o: s != o) - - def _compare(self, other, method): # type: (BaseVersion, Callable) -> bool - if not isinstance(other, BaseVersion): - return NotImplemented - - return method(self._key, other._key) diff --git a/vendor/poetry-core/poetry/core/version/exceptions.py b/vendor/poetry-core/poetry/core/version/exceptions.py deleted file mode 100644 index 741b13ca..00000000 --- a/vendor/poetry-core/poetry/core/version/exceptions.py +++ /dev/null @@ -1,3 +0,0 @@ -class InvalidVersion(ValueError): - - pass diff --git a/vendor/poetry-core/poetry/core/version/grammars/__init__.py b/vendor/poetry-core/poetry/core/version/grammars/__init__.py deleted file mode 100644 index e88bac85..00000000 --- a/vendor/poetry-core/poetry/core/version/grammars/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -import sys - -from pathlib import Path - - -if getattr(sys, "oxidized", False): - parents = 4 if sys.platform.startswith("win") else 5 - __path_assets__ = ( - Path(__path__[0]).parents[parents] / "assets" / "core" / "version" / "grammars" - ) -else: - __path_assets__ = Path(__path__[0]) diff --git a/vendor/poetry-core/poetry/core/version/helpers.py b/vendor/poetry-core/poetry/core/version/helpers.py deleted file mode 100644 index bd46e8d2..00000000 --- a/vendor/poetry-core/poetry/core/version/helpers.py +++ /dev/null @@ -1,66 +0,0 @@ -from typing import TYPE_CHECKING -from typing import Union - -from poetry.core.semver import Version -from poetry.core.semver import VersionUnion -from poetry.core.semver import parse_constraint - - -if TYPE_CHECKING: - from poetry.core.semver import VersionConstraint # noqa - -PYTHON_VERSION = [ - "2.7.*", - "3.0.*", - "3.1.*", - "3.2.*", - "3.3.*", - "3.4.*", - "3.5.*", - "3.6.*", - "3.7.*", - "3.8.*", - "3.9.*", -] - - -def format_python_constraint( - constraint, -): # type: (Union[Version, VersionUnion, "VersionConstraint"]) -> str - """ - This helper will help in transforming - disjunctive constraint into proper constraint. - """ - if isinstance(constraint, Version): - if constraint.precision >= 3: - return "=={}".format(str(constraint)) - - # Transform 3.6 or 3 - if constraint.precision == 2: - # 3.6 - constraint = parse_constraint( - "~{}.{}".format(constraint.major, constraint.minor) - ) - else: - constraint = parse_constraint("^{}.0".format(constraint.major)) - - if not isinstance(constraint, VersionUnion): - return str(constraint) - - formatted = [] - accepted = [] - - for version in PYTHON_VERSION: - version_constraint = parse_constraint(version) - matches = constraint.allows_any(version_constraint) - if not matches: - formatted.append("!=" + version) - else: - accepted.append(version) - - # Checking lower bound - low = accepted[0] - - formatted.insert(0, ">=" + ".".join(low.split(".")[:2])) - - return ", ".join(formatted) diff --git a/vendor/poetry-core/poetry/core/version/legacy_version.py b/vendor/poetry-core/poetry/core/version/legacy_version.py deleted file mode 100644 index adaa53d7..00000000 --- a/vendor/poetry-core/poetry/core/version/legacy_version.py +++ /dev/null @@ -1,92 +0,0 @@ -import re - -from typing import Tuple - -from .base import BaseVersion - - -class LegacyVersion(BaseVersion): - def __init__(self, version): # type: (str) -> None - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - def __str__(self): # type: () -> str - return self._version - - def __repr__(self): # type: () -> str - return "".format(repr(str(self))) - - @property - def public(self): # type: () -> str - return self._version - - @property - def base_version(self): # type: () -> str - return self._version - - @property - def local(self): # type: () -> None - return None - - @property - def is_prerelease(self): # type: () -> bool - return False - - @property - def is_postrelease(self): # type: () -> bool - return False - - -_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) - -_legacy_version_replacement_map = { - "pre": "c", - "preview": "c", - "-": "final-", - "rc": "c", - "dev": "@", -} - - -def _parse_version_parts(s): # type: (str) -> str - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version): # type: (str) -> Tuple[int, Tuple[str]] - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - parts = tuple(parts) - - return epoch, parts diff --git a/vendor/poetry-core/poetry/core/version/markers.py b/vendor/poetry-core/poetry/core/version/markers.py deleted file mode 100644 index 5eb5d956..00000000 --- a/vendor/poetry-core/poetry/core/version/markers.py +++ /dev/null @@ -1,753 +0,0 @@ -import re - -from typing import TYPE_CHECKING -from typing import Any -from typing import Dict -from typing import Iterator -from typing import List -from typing import Union - -from lark import Lark -from lark import Token -from lark import Tree - -from .grammars import __path_assets__ - - -if TYPE_CHECKING: - from poetry.core.semver import VersionTypes # noqa - -MarkerTypes = Union[ - "AnyMarker", "EmptyMarker", "SingleMarker", "MultiMarker", "MarkerUnion" -] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -ALIASES = { - "os.name": "os_name", - "sys.platform": "sys_platform", - "platform.version": "platform_version", - "platform.machine": "platform_machine", - "platform.python_implementation": "platform_python_implementation", - "python_implementation": "platform_python_implementation", -} -_parser = Lark.open( - __path_assets__ / "markers.lark", parser="lalr" -) - - -class BaseMarker(object): - def intersect(self, other): # type: (BaseMarker) -> BaseMarker - raise NotImplementedError() - - def union(self, other): # type: (BaseMarker) -> BaseMarker - raise NotImplementedError() - - def is_any(self): # type: () -> bool - return False - - def is_empty(self): # type: () -> bool - return False - - def validate(self, environment): # type: (Dict[str, Any]) -> bool - raise NotImplementedError() - - def without_extras(self): # type: () -> BaseMarker - raise NotImplementedError() - - def exclude(self, marker_name): # type: (str) -> BaseMarker - raise NotImplementedError() - - def only(self, *marker_names): # type: (str) -> BaseMarker - raise NotImplementedError() - - def invert(self): # type: () -> BaseMarker - raise NotImplementedError() - - def __repr__(self): # type: () -> str - return "<{} {}>".format(self.__class__.__name__, str(self)) - - -class AnyMarker(BaseMarker): - def intersect(self, other): # type: (MarkerTypes) -> MarkerTypes - return other - - def union(self, other): # type: (MarkerTypes) -> MarkerTypes - return self - - def is_any(self): # type: () -> bool - return True - - def is_empty(self): # type: () -> bool - return False - - def validate(self, environment): # type: (Dict[str, Any]) -> bool - return True - - def without_extras(self): # type: () -> MarkerTypes - return self - - def exclude(self, marker_name): # type: (str) -> MarkerTypes - return self - - def only(self, *marker_names): # type: (*str) -> MarkerTypes - return self - - def invert(self): # type: () -> EmptyMarker - return EmptyMarker() - - def __str__(self): # type: () -> str - return "" - - def __repr__(self): # type: () -> str - return "" - - def __hash__(self): # type: () -> int - return hash(("", "")) - - def __eq__(self, other): # type: (MarkerTypes) -> bool - if not isinstance(other, BaseMarker): - return NotImplemented - - return isinstance(other, AnyMarker) - - -class EmptyMarker(BaseMarker): - def intersect(self, other): # type: (MarkerTypes) -> MarkerTypes - return self - - def union(self, other): # type: (MarkerTypes) -> MarkerTypes - return other - - def is_any(self): # type: () -> bool - return False - - def is_empty(self): # type: () -> bool - return True - - def validate(self, environment): # type: (Dict[str, Any]) -> bool - return False - - def without_extras(self): # type: () -> BaseMarker - return self - - def exclude(self, marker_name): # type: (str) -> EmptyMarker - return self - - def only(self, *marker_names): # type: (*str) -> EmptyMarker - return self - - def invert(self): # type: () -> AnyMarker - return AnyMarker() - - def __str__(self): # type: () -> str - return "" - - def __repr__(self): # type: () -> str - return "" - - def __hash__(self): # type: () -> int - return hash(("", "")) - - def __eq__(self, other): # type: (MarkerTypes) -> bool - if not isinstance(other, BaseMarker): - return NotImplemented - - return isinstance(other, EmptyMarker) - - -class SingleMarker(BaseMarker): - - _CONSTRAINT_RE = re.compile(r"(?i)^(~=|!=|>=?|<=?|==?=?|in|not in)?\s*(.+)$") - _VERSION_LIKE_MARKER_NAME = { - "python_version", - "python_full_version", - "platform_release", - } - - def __init__( - self, name, constraint - ): # type: (str, Union[str, "VersionTypes"]) -> None - from poetry.core.packages.constraints import ( - parse_constraint as parse_generic_constraint, - ) - from poetry.core.semver import parse_constraint - - self._name = ALIASES.get(name, name) - self._constraint_string = str(constraint) - - # Extract operator and value - m = self._CONSTRAINT_RE.match(self._constraint_string) - self._operator = m.group(1) - if self._operator is None: - self._operator = "==" - - self._value = m.group(2) - self._parser = parse_generic_constraint - - if name in self._VERSION_LIKE_MARKER_NAME: - self._parser = parse_constraint - - if self._operator in {"in", "not in"}: - versions = [] - for v in re.split("[ ,]+", self._value): - split = v.split(".") - if len(split) in [1, 2]: - split.append("*") - op = "" if self._operator == "in" else "!=" - else: - op = "==" if self._operator == "in" else "!=" - - versions.append(op + ".".join(split)) - - glue = ", " - if self._operator == "in": - glue = " || " - - self._constraint = self._parser(glue.join(versions)) - else: - self._constraint = self._parser(self._constraint_string) - else: - # if we have a in/not in operator we split the constraint - # into a union/multi-constraint of single constraint - constraint_string = self._constraint_string - if self._operator in {"in", "not in"}: - op, glue = ("==", " || ") if self._operator == "in" else ("!=", ", ") - values = re.split("[ ,]+", self._value) - constraint_string = glue.join( - ("{} {}".format(op, value) for value in values) - ) - - self._constraint = self._parser(constraint_string) - - @property - def name(self): # type: () -> str - return self._name - - @property - def constraint_string(self): # type: () -> str - if self._operator in {"in", "not in"}: - return "{} {}".format(self._operator, self._value) - - return self._constraint_string - - @property - def constraint(self): # type: () -> "VersionTypes" - return self._constraint - - @property - def operator(self): # type: () -> str - return self._operator - - @property - def value(self): # type: () -> str - return self._value - - def intersect(self, other): # type: (MarkerTypes) -> MarkerTypes - if isinstance(other, SingleMarker): - if other.name != self.name: - return MultiMarker(self, other) - - if self == other: - return self - - if self._operator in {"in", "not in"} or other.operator in {"in", "not in"}: - return MultiMarker.of(self, other) - - new_constraint = self._constraint.intersect(other.constraint) - if new_constraint.is_empty(): - return EmptyMarker() - - if new_constraint == self._constraint or new_constraint == other.constraint: - return SingleMarker(self._name, new_constraint) - - return MultiMarker.of(self, other) - - return other.intersect(self) - - def union(self, other): # type: (MarkerTypes) -> MarkerTypes - if isinstance(other, SingleMarker): - if self == other: - return self - - return MarkerUnion.of(self, other) - - return other.union(self) - - def validate(self, environment): # type: (Dict[str, Any]) -> bool - if environment is None: - return True - - if self._name not in environment: - return True - - return self._constraint.allows(self._parser(environment[self._name])) - - def without_extras(self): # type: () -> MarkerTypes - return self.exclude("extra") - - def exclude(self, marker_name): # type: (str) -> MarkerTypes - if self.name == marker_name: - return AnyMarker() - - return self - - def only(self, *marker_names): # type: (*str) -> Union[SingleMarker, EmptyMarker] - if self.name not in marker_names: - return EmptyMarker() - - return self - - def invert(self): # type: () -> MarkerTypes - if self._operator in ("===", "=="): - operator = "!=" - elif self._operator == "!=": - operator = "==" - elif self._operator == ">": - operator = "<=" - elif self._operator == ">=": - operator = "<" - elif self._operator == "<": - operator = ">=" - elif self._operator == "<=": - operator = ">" - elif self._operator == "in": - operator = "not in" - elif self._operator == "not in": - operator = "in" - elif self._operator == "~=": - # This one is more tricky to handle - # since it's technically a multi marker - # so the inverse will be a union of inverse - from poetry.core.semver import VersionRange - - if not isinstance(self._constraint, VersionRange): - # The constraint must be a version range, otherwise - # it's an internal error - raise RuntimeError( - "The '~=' operator should only represent version ranges" - ) - - min_ = self._constraint.min - min_operator = ">=" if self._constraint.include_min else "<" - max_ = self._constraint.max - max_operator = "<=" if self._constraint.include_max else "<" - - return MultiMarker.of( - SingleMarker(self._name, "{} {}".format(min_operator, min_)), - SingleMarker(self._name, "{} {}".format(max_operator, max_)), - ).invert() - else: - # We should never go there - raise RuntimeError("Invalid marker operator '{}'".format(self._operator)) - - return parse_marker("{} {} '{}'".format(self._name, operator, self._value)) - - def __eq__(self, other): # type: (MarkerTypes) -> bool - if not isinstance(other, SingleMarker): - return False - - return self._name == other.name and self._constraint == other.constraint - - def __hash__(self): # type: () -> int - return hash((self._name, self._constraint_string)) - - def __str__(self): # type: () -> str - return '{} {} "{}"'.format(self._name, self._operator, self._value) - - -def _flatten_markers( - markers, flatten_class -): # type: (Iterator[Union[MarkerUnion, MultiMarker]], Any) -> List[MarkerTypes] - flattened = [] - - for marker in markers: - if isinstance(marker, flatten_class): - flattened += _flatten_markers(marker.markers, flatten_class) - else: - flattened.append(marker) - - return flattened - - -class MultiMarker(BaseMarker): - def __init__(self, *markers): # type: (*MarkerTypes) -> None - self._markers = [] - - markers = _flatten_markers(markers, MultiMarker) - - for m in markers: - self._markers.append(m) - - @classmethod - def of(cls, *markers): # type: (*MarkerTypes) -> MarkerTypes - new_markers = [] - markers = _flatten_markers(markers, MultiMarker) - - for marker in markers: - if marker in new_markers: - continue - - if marker.is_any(): - continue - - if isinstance(marker, SingleMarker): - intersected = False - for i, mark in enumerate(new_markers): - if ( - not isinstance(mark, SingleMarker) - or isinstance(mark, SingleMarker) - and mark.name != marker.name - ): - continue - - intersection = mark.constraint.intersect(marker.constraint) - if intersection == mark.constraint: - intersected = True - elif intersection == marker.constraint: - new_markers[i] = marker - intersected = True - elif intersection.is_empty(): - return EmptyMarker() - - if intersected: - continue - - new_markers.append(marker) - - if any(m.is_empty() for m in new_markers) or not new_markers: - return EmptyMarker() - - if len(new_markers) == 1 and new_markers[0].is_any(): - return AnyMarker() - - return MultiMarker(*new_markers) - - @property - def markers(self): # type: () -> List[MarkerTypes] - return self._markers - - def intersect(self, other): # type: (MarkerTypes) -> MarkerTypes - if other.is_any(): - return self - - if other.is_empty(): - return other - - new_markers = self._markers + [other] - - return MultiMarker.of(*new_markers) - - def union(self, other): # type: (MarkerTypes) -> MarkerTypes - if isinstance(other, (SingleMarker, MultiMarker)): - return MarkerUnion.of(self, other) - - return other.union(self) - - def validate(self, environment): # type: (Dict[str, Any]) -> bool - for m in self._markers: - if not m.validate(environment): - return False - - return True - - def without_extras(self): # type: () -> MarkerTypes - return self.exclude("extra") - - def exclude(self, marker_name): # type: (str) -> MarkerTypes - new_markers = [] - - for m in self._markers: - if isinstance(m, SingleMarker) and m.name == marker_name: - # The marker is not relevant since it must be excluded - continue - - marker = m.exclude(marker_name) - - if not marker.is_empty(): - new_markers.append(marker) - - return self.of(*new_markers) - - def only(self, *marker_names): # type: (*str) -> MarkerTypes - new_markers = [] - - for m in self._markers: - if isinstance(m, SingleMarker) and m.name not in marker_names: - # The marker is not relevant since it's not one we want - continue - - marker = m.only(*marker_names) - - if not marker.is_empty(): - new_markers.append(marker) - - return self.of(*new_markers) - - def invert(self): # type: () -> MarkerTypes - markers = [marker.invert() for marker in self._markers] - - return MarkerUnion.of(*markers) - - def __eq__(self, other): # type: (MarkerTypes) -> bool - if not isinstance(other, MultiMarker): - return False - - return set(self._markers) == set(other.markers) - - def __hash__(self): # type: () -> int - h = hash("multi") - for m in self._markers: - h |= hash(m) - - return h - - def __str__(self): # type: () -> str - elements = [] - for m in self._markers: - if isinstance(m, SingleMarker): - elements.append(str(m)) - elif isinstance(m, MultiMarker): - elements.append(str(m)) - else: - elements.append("({})".format(str(m))) - - return " and ".join(elements) - - -class MarkerUnion(BaseMarker): - def __init__(self, *markers): # type: (*MarkerTypes) -> None - self._markers = list(markers) - - @property - def markers(self): # type: () -> List[MarkerTypes] - return self._markers - - @classmethod - def of(cls, *markers): # type: (*BaseMarker) -> MarkerTypes - flattened_markers = _flatten_markers(markers, MarkerUnion) - - markers = [] - for marker in flattened_markers: - if marker in markers: - continue - - if isinstance(marker, SingleMarker) and marker.name == "python_version": - intersected = False - for i, mark in enumerate(markers): - if ( - not isinstance(mark, SingleMarker) - or isinstance(mark, SingleMarker) - and mark.name != marker.name - ): - continue - - intersection = mark.constraint.union(marker.constraint) - if intersection == mark.constraint: - intersected = True - break - elif intersection == marker.constraint: - markers[i] = marker - intersected = True - break - - if intersected: - continue - - markers.append(marker) - - if any(m.is_any() for m in markers): - return AnyMarker() - - if not markers: - return AnyMarker() - - if len(markers) == 1: - return markers[0] - - return MarkerUnion(*markers) - - def append(self, marker): # type: (MarkerTypes) -> None - if marker in self._markers: - return - - self._markers.append(marker) - - def intersect(self, other): # type: (MarkerTypes) -> MarkerTypes - if other.is_any(): - return self - - if other.is_empty(): - return other - - new_markers = [] - if isinstance(other, (SingleMarker, MultiMarker)): - for marker in self._markers: - intersection = marker.intersect(other) - - if not intersection.is_empty(): - new_markers.append(intersection) - elif isinstance(other, MarkerUnion): - for our_marker in self._markers: - for their_marker in other.markers: - intersection = our_marker.intersect(their_marker) - - if not intersection.is_empty(): - new_markers.append(intersection) - - return MarkerUnion.of(*new_markers) - - def union(self, other): # type: (MarkerTypes) -> MarkerTypes - if other.is_any(): - return other - - if other.is_empty(): - return self - - new_markers = self._markers + [other] - - return MarkerUnion.of(*new_markers) - - def validate(self, environment): # type: (Dict[str, Any]) -> bool - for m in self._markers: - if m.validate(environment): - return True - - return False - - def without_extras(self): # type: () -> MarkerTypes - return self.exclude("extra") - - def exclude(self, marker_name): # type: (str) -> MarkerTypes - new_markers = [] - - for m in self._markers: - if isinstance(m, SingleMarker) and m.name == marker_name: - # The marker is not relevant since it must be excluded - continue - - marker = m.exclude(marker_name) - - if not marker.is_empty(): - new_markers.append(marker) - - return self.of(*new_markers) - - def only(self, *marker_names): # type: (*str) -> MarkerTypes - new_markers = [] - - for m in self._markers: - if isinstance(m, SingleMarker) and m.name not in marker_names: - # The marker is not relevant since it's not one we want - continue - - marker = m.only(*marker_names) - - if not marker.is_empty(): - new_markers.append(marker) - - return self.of(*new_markers) - - def invert(self): # type: () -> MarkerTypes - markers = [marker.invert() for marker in self._markers] - - return MultiMarker.of(*markers) - - def __eq__(self, other): # type: (MarkerTypes) -> bool - if not isinstance(other, MarkerUnion): - return False - - return set(self._markers) == set(other.markers) - - def __hash__(self): # type: () -> int - h = hash("union") - for m in self._markers: - h |= hash(m) - - return h - - def __str__(self): # type: () -> str - return " or ".join( - str(m) for m in self._markers if not m.is_any() and not m.is_empty() - ) - - def is_any(self): # type: () -> bool - return any(m.is_any() for m in self._markers) - - def is_empty(self): # type: () -> bool - return all(m.is_empty() for m in self._markers) - - -def parse_marker(marker): # type: (str) -> MarkerTypes - if marker == "": - return EmptyMarker() - - if not marker or marker == "*": - return AnyMarker() - - parsed = _parser.parse(marker) - - markers = _compact_markers(parsed.children) - - return markers - - -def _compact_markers(tree_elements, tree_prefix=""): # type: (Tree, str) -> MarkerTypes - groups = [MultiMarker()] - for token in tree_elements: - if isinstance(token, Token): - if token.type == "{}BOOL_OP".format(tree_prefix) and token.value == "or": - groups.append(MultiMarker()) - - continue - - if token.data == "marker": - groups[-1] = MultiMarker.of( - groups[-1], _compact_markers(token.children, tree_prefix=tree_prefix) - ) - elif token.data == "{}item".format(tree_prefix): - name, op, value = token.children - if value.type == "{}MARKER_NAME".format(tree_prefix): - name, value, = value, name - - value = value[1:-1] - groups[-1] = MultiMarker.of( - groups[-1], SingleMarker(name, "{}{}".format(op, value)) - ) - elif token.data == "{}BOOL_OP".format(tree_prefix): - if token.children[0] == "or": - groups.append(MultiMarker()) - - for i, group in enumerate(reversed(groups)): - if group.is_empty(): - del groups[len(groups) - 1 - i] - continue - - if isinstance(group, MultiMarker) and len(group.markers) == 1: - groups[len(groups) - 1 - i] = group.markers[0] - - if not groups: - return EmptyMarker() - - if len(groups) == 1: - return groups[0] - - return MarkerUnion.of(*groups) diff --git a/vendor/poetry-core/poetry/core/version/requirements.py b/vendor/poetry-core/poetry/core/version/requirements.py deleted file mode 100644 index d0029fc4..00000000 --- a/vendor/poetry-core/poetry/core/version/requirements.py +++ /dev/null @@ -1,121 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from lark import Lark -from lark import UnexpectedCharacters -from lark import UnexpectedToken - -from poetry.core.semver import parse_constraint -from poetry.core.semver.exceptions import ParseConstraintError - -from .grammars import __path_assets__ -from .markers import _compact_markers - - -try: - import urllib.parse as urlparse -except ImportError: - import urlparse - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -_parser = Lark.open( - __path_assets__ / "pep508.lark", parser="lalr" -) - - -class Requirement(object): - """ - Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - def __init__(self, requirement_string): # type: (str) -> None - try: - parsed = _parser.parse(requirement_string) - except (UnexpectedCharacters, UnexpectedToken) as e: - raise InvalidRequirement( - "The requirement is invalid: Unexpected character at column {}\n\n{}".format( - e.column, e.get_context(requirement_string) - ) - ) - - self.name = next(parsed.scan_values(lambda t: t.type == "NAME")).value - url = next(parsed.scan_values(lambda t: t.type == "URI"), None) - - if url: - url = url.value - parsed_url = urlparse.urlparse(url) - if parsed_url.scheme == "file": - if urlparse.urlunparse(parsed_url) != url: - raise InvalidRequirement( - 'The requirement is invalid: invalid URL "{0}"'.format(url) - ) - elif ( - not (parsed_url.scheme and parsed_url.netloc) - or (not parsed_url.scheme and not parsed_url.netloc) - ) and not parsed_url.path: - raise InvalidRequirement( - 'The requirement is invalid: invalid URL "{0}"'.format(url) - ) - self.url = url - else: - self.url = None - - self.extras = [e.value for e in parsed.scan_values(lambda t: t.type == "EXTRA")] - constraint = next(parsed.find_data("version_specification"), None) - if not constraint: - constraint = "*" - else: - constraint = ",".join(constraint.children) - - try: - self.constraint = parse_constraint(constraint) - except ParseConstraintError: - raise InvalidRequirement( - 'The requirement is invalid: invalid version constraint "{}"'.format( - constraint - ) - ) - - self.pretty_constraint = constraint - - marker = next(parsed.find_data("marker_spec"), None) - if marker: - marker = _compact_markers( - marker.children[0].children, tree_prefix="markers__" - ) - - self.marker = marker - - def __str__(self): # type: () -> str - parts = [self.name] - - if self.extras: - parts.append("[{0}]".format(",".join(sorted(self.extras)))) - - if self.pretty_constraint: - parts.append(self.pretty_constraint) - - if self.url: - parts.append("@ {0}".format(self.url)) - - if self.marker: - parts.append("; {0}".format(self.marker)) - - return "".join(parts) - - def __repr__(self): # type: () -> str - return "".format(str(self)) diff --git a/vendor/poetry-core/poetry/core/version/utils.py b/vendor/poetry-core/poetry/core/version/utils.py deleted file mode 100644 index a81a9e7f..00000000 --- a/vendor/poetry-core/poetry/core/version/utils.py +++ /dev/null @@ -1,65 +0,0 @@ -from typing import Any - - -class Infinity(object): - def __repr__(self): # type: () -> str - return "Infinity" - - def __hash__(self): # type: () -> int - return hash(repr(self)) - - def __lt__(self, other): # type: (Any) -> bool - return False - - def __le__(self, other): # type: (Any) -> bool - return False - - def __eq__(self, other): # type: (Any) -> bool - return isinstance(other, self.__class__) - - def __ne__(self, other): # type: (Any) -> bool - return not isinstance(other, self.__class__) - - def __gt__(self, other): # type: (Any) -> bool - return True - - def __ge__(self, other): # type: (Any) -> bool - return True - - def __neg__(self): # type: () -> NegativeInfinity - return NegativeInfinity - - -Infinity = Infinity() # type: ignore - - -class NegativeInfinity(object): - def __repr__(self): # type: () -> str - return "-Infinity" - - def __hash__(self): # type: () -> int - return hash(repr(self)) - - def __lt__(self, other): # type: (Any) -> bool - return True - - def __le__(self, other): # type: (Any) -> bool - return True - - def __eq__(self, other): # type: (Any) -> bool - return isinstance(other, self.__class__) - - def __ne__(self, other): # type: (Any) -> bool - return not isinstance(other, self.__class__) - - def __gt__(self, other): # type: (Any) -> bool - return False - - def __ge__(self, other): # type: (Any) -> bool - return False - - def __neg__(self): # type: () -> Infinity - return Infinity - - -NegativeInfinity = NegativeInfinity() # type: ignore diff --git a/vendor/poetry-core/poetry/core/version/version.py b/vendor/poetry-core/poetry/core/version/version.py deleted file mode 100644 index 0726d943..00000000 --- a/vendor/poetry-core/poetry/core/version/version.py +++ /dev/null @@ -1,243 +0,0 @@ -import re - -from collections import namedtuple -from itertools import dropwhile -from typing import Any -from typing import Optional -from typing import Tuple -from typing import Union - -from .base import BaseVersion -from .exceptions import InvalidVersion -from .utils import Infinity -from .utils import NegativeInfinity - - -_Version = namedtuple("_Version", ["epoch", "release", "dev", "pre", "post", "local"]) - - -VERSION_PATTERN = re.compile( - r""" - ^ - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                          # pre-release
-            [-_.]?
-            (?P(a|b|c|rc|alpha|beta|pre|preview))
-            [-_.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_.]?
-                (?Ppost|rev|r)
-                [-_.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_.]?
-            (?Pdev)
-            [-_.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_.][a-z0-9]+)*))?       # local version
-    $
-""",
-    re.IGNORECASE | re.VERBOSE,
-)
-
-
-class Version(BaseVersion):
-    def __init__(self, version):  # type: (str) -> None
-        # Validate the version and parse it into pieces
-        match = VERSION_PATTERN.match(version)
-        if not match:
-            raise InvalidVersion("Invalid version: '{0}'".format(version))
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self):  # type: () -> str
-        return "".format(repr(str(self)))
-
-    def __str__(self):  # type: () -> str
-        parts = []
-
-        # Epoch
-        if self._version.epoch != 0:
-            parts.append("{0}!".format(self._version.epoch))
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self._version.release))
-
-        # Pre-release
-        if self._version.pre is not None:
-            parts.append("".join(str(x) for x in self._version.pre))
-
-        # Post-release
-        if self._version.post is not None:
-            parts.append(".post{0}".format(self._version.post[1]))
-
-        # Development release
-        if self._version.dev is not None:
-            parts.append(".dev{0}".format(self._version.dev[1]))
-
-        # Local version segment
-        if self._version.local is not None:
-            parts.append("+{0}".format(".".join(str(x) for x in self._version.local)))
-
-        return "".join(parts)
-
-    @property
-    def public(self):  # type: () -> str
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self):  # type: () -> str
-        parts = []
-
-        # Epoch
-        if self._version.epoch != 0:
-            parts.append("{0}!".format(self._version.epoch))
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self._version.release))
-
-        return "".join(parts)
-
-    @property
-    def local(self):  # type: () -> str
-        version_string = str(self)
-        if "+" in version_string:
-            return version_string.split("+", 1)[1]
-
-    @property
-    def is_prerelease(self):  # type: () -> bool
-        return bool(self._version.dev or self._version.pre)
-
-    @property
-    def is_postrelease(self):  # type: () -> bool
-        return bool(self._version.post)
-
-
-def _parse_letter_version(
-    letter, number
-):  # type: (str, Optional[str]) -> Tuple[str, int]
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-
-_local_version_seperators = re.compile(r"[._-]")
-
-
-def _parse_local_version(local):  # type: (Optional[str]) -> Tuple[Union[str, int], ...]
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_seperators.split(local)
-        )
-
-
-def _cmpkey(
-    epoch,  # type: int
-    release,  # type: Optional[Tuple[int, ...]]
-    pre,  # type: Optional[Tuple[str, int]]
-    post,  # type: Optional[Tuple[str, int]]
-    dev,  # type: Optional[Tuple[str, int]]
-    local,  # type: Optional[Tuple[Union[str, int], ...]]
-):  # type: (...) -> Tuple[int, Tuple[int, ...], Union[Union[Infinity, NegativeInfinity, Tuple[str, int]], Any], Union[NegativeInfinity, Tuple[str, int]], Union[Union[Infinity, Tuple[str, int]], Any], Union[NegativeInfinity, Tuple[Union[Tuple[int, str], Tuple[NegativeInfinity, Union[str, int]]], ...]]]
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    release = tuple(reversed(list(dropwhile(lambda x: x == 0, reversed(release)))))
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        pre = -Infinity
-
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        pre = Infinity
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        post = -Infinity
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        dev = Infinity
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        local = -Infinity
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)
-
-    return epoch, release, pre, post, dev, local
diff --git a/vendor/poetry-core/pyproject.toml b/vendor/poetry-core/pyproject.toml
index 61104ed1..697a6845 100644
--- a/vendor/poetry-core/pyproject.toml
+++ b/vendor/poetry-core/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "poetry-core"
-version = "1.0.8"
+version = "1.1.0"
 description = "Poetry PEP 517 Build Backend"
 authors = ["Sébastien Eustace "]
 
@@ -19,86 +19,89 @@ classifiers = [
 ]
 
 packages = [
-    {include = "poetry"},
+    { include = "poetry", from = "src" },
+]
+include = [
+    { path = "tests", format = "sdist" },
 ]
 exclude = [
     "**/*.pyc",
     "**/*.pyi",
 ]
 
+[tool.poetry.build]
+generate-setup-file = false
+
 [tool.poetry.urls]
 "Bug Tracker" = "https://github.com/python-poetry/poetry/issues"
 
 [tool.poetry.dependencies]
-python = "~2.7 || ^3.5"
-
-# required for compatibility
-importlib-metadata = {version = "^1.7.0", python = "~2.7 || >=3.5, <3.8"}
-pathlib2 = {version = "^2.3.5", python = "~2.7"}
-typing = {version = "^3.7.4.1", python = "~2.7"}
-
-# required by tomlkit
-enum34 = {version = "^1.1.10", python = "~2.7"}
-
-# required by tomlkit, jsonschema
-functools32 = {version = "^3.2.3-2", python = "~2.7"}
+python = "^3.7"
 
 [tool.poetry.dev-dependencies]
-pre-commit = "^1.10"
-pyrsistent = "^0.16.0"
-pytest = "^4.6"
-pytest-cov = "^2.8"
-pytest-mock = "^2.0"
+pre-commit = "^2.15.0"
+pyrsistent = "^0.18.0"
+pytest = "^7.1.2"
+pytest-cov = "^3.0.0"
+pytest-mock = "^3.5"
 tox = "^3.0"
-vendoring = {version = "^0.3", python = "~3.8"}
-pep517 = "^0.8.2"
-"backports.tempfile" = {version = "^1.0", python = "~2.7"}
+vendoring = {version = "^1.0", python = "^3.8"}
+build = "^0.7.0"
+mypy = ">=0.960"
+types-jsonschema = ">=4.4.4"
+types-setuptools = ">=57.4.14"
 
 [tool.black]
 line-length = 88
+preview = true
 include = '\.pyi?$'
-exclude = '''
-/(
-    \.eggs
-  | \.git
-  | \.hg
-  | \.mypy_cache
-  | \.tox
-  | \.venv
-  | _build
-  | buck-out
-  | build
-  | dist
-  | poetry/core/_vendor/*
-)/
-'''
+extend-exclude = "src/poetry/core/_vendor/*"
 
 [tool.isort]
-line_length = 88
+profile = "black"
 force_single_line = true
 atomic = true
 include_trailing_comma = true
 lines_after_imports = 2
 lines_between_types = 1
-multi_line_output = 3
 use_parentheses = true
-not_skip = "__init__.py"
 skip_glob = ["*/setup.py", "*/poetry/core/_vendor/*"]
 filter_files = true
 
 known_first_party = "poetry.core"
 known_third_party = ["poetry.core._vendor"]
 
+[tool.mypy]
+strict = true
+explicit_package_bases = true
+namespace_packages = true
+show_error_codes = true
+enable_error_code = [
+    "ignore-without-code",
+    "redundant-expr",
+    "truthy-bool",
+]
+mypy_path = "src"
+files = "src, tests"
+exclude = "(?x)(^tests/.*/fixtures | ^src/poetry/core/_vendor)"
+
+[[tool.mypy.overrides]]
+module = [
+  'lark.*',
+  'tomlkit.*',
+  'virtualenv.*',
+]
+ignore_missing_imports = true
+
 [tool.vendoring]
-destination = "poetry/core/_vendor/"
-requirements = "poetry/core/_vendor/vendor.txt"
+destination = "src/poetry/core/_vendor/"
+requirements = "src/poetry/core/_vendor/vendor.txt"
 namespace = ""
 
-protected-files = ["__init__.py", "README.md", "vendor.txt"]
+protected-files = ["vendor.txt"]
 patches-dir = "vendors/patches"
 
 [tool.vendoring.transformations]
-substitute = []
 drop = [
     "bin/",
     "*.so",
@@ -106,17 +109,9 @@ drop = [
     "*/tests/"
 ]
 
-[tool.vendoring.typing-stubs]
-six = ["six.__init__", "six.moves.__init__", "six.moves.configparser"]
-appdirs = []
-
-[tool.vendoring.license.directories]
-
-
 [tool.vendoring.license.fallback-urls]
 pyrsistent = "https://raw.githubusercontent.com/tobgu/pyrsistent/master/LICENSE.mit"
 
 [build-system]
-requires = []
+requires = ["poetry-core>=1.0.0"]
 build-backend = "poetry.core.masonry.api"
-backend-path = ["."]
diff --git a/vendor/poetry-core/src/poetry/core/__init__.py b/vendor/poetry-core/src/poetry/core/__init__.py
new file mode 100644
index 00000000..9d63535f
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+
+# this cannot presently be replaced with importlib.metadata.version as when building
+# itself, poetry-core is not available as an installed distribution.
+__version__ = "1.1.0"
diff --git a/vendor/poetry-core/src/poetry/core/exceptions/__init__.py b/vendor/poetry-core/src/poetry/core/exceptions/__init__.py
new file mode 100644
index 00000000..d96ee129
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/exceptions/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from poetry.core.exceptions.base import PoetryCoreException
+
+
+__all__ = ["PoetryCoreException"]
diff --git a/vendor/poetry-core/src/poetry/core/exceptions/base.py b/vendor/poetry-core/src/poetry/core/exceptions/base.py
new file mode 100644
index 00000000..43727628
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/exceptions/base.py
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+
+class PoetryCoreException(Exception):
+    pass
diff --git a/vendor/poetry-core/src/poetry/core/factory.py b/vendor/poetry-core/src/poetry/core/factory.py
new file mode 100644
index 00000000..90b8974d
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/factory.py
@@ -0,0 +1,450 @@
+from __future__ import annotations
+
+import logging
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Union
+from typing import cast
+from warnings import warn
+
+from poetry.core.utils.helpers import combine_unicode
+from poetry.core.utils.helpers import readme_content_type
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.dependency_group import DependencyGroup
+    from poetry.core.packages.project_package import ProjectPackage
+    from poetry.core.poetry import Poetry
+    from poetry.core.spdx.license import License
+
+    DependencyConstraint = Union[str, Dict[str, Any]]
+    DependencyConfig = Mapping[
+        str, Union[List[DependencyConstraint], DependencyConstraint]
+    ]
+
+
+logger = logging.getLogger(__name__)
+
+
+class Factory:
+    """
+    Factory class to create various elements needed by Poetry.
+    """
+
+    def create_poetry(
+        self, cwd: Path | None = None, with_groups: bool = True
+    ) -> Poetry:
+        from poetry.core.poetry import Poetry
+        from poetry.core.pyproject.toml import PyProjectTOML
+
+        poetry_file = self.locate(cwd)
+        local_config = PyProjectTOML(path=poetry_file).poetry_config
+
+        # Checking validity
+        check_result = self.validate(local_config)
+        if check_result["errors"]:
+            message = ""
+            for error in check_result["errors"]:
+                message += f"  - {error}\n"
+
+            raise RuntimeError("The Poetry configuration is invalid:\n" + message)
+
+        # Load package
+        name = cast(str, local_config["name"])
+        version = cast(str, local_config["version"])
+        package = self.get_package(name, version)
+        package = self.configure_package(
+            package, local_config, poetry_file.parent, with_groups=with_groups
+        )
+
+        return Poetry(poetry_file, local_config, package)
+
+    @classmethod
+    def get_package(cls, name: str, version: str) -> ProjectPackage:
+        from poetry.core.packages.project_package import ProjectPackage
+
+        return ProjectPackage(name, version, version)
+
+    @classmethod
+    def _add_package_group_dependencies(
+        cls,
+        package: ProjectPackage,
+        group: str | DependencyGroup,
+        dependencies: DependencyConfig,
+    ) -> None:
+        from poetry.core.packages.dependency_group import MAIN_GROUP
+
+        if isinstance(group, str):
+            if package.has_dependency_group(group):
+                group = package.dependency_group(group)
+            else:
+                from poetry.core.packages.dependency_group import DependencyGroup
+
+                group = DependencyGroup(group)
+
+        for name, constraints in dependencies.items():
+            _constraints = (
+                constraints if isinstance(constraints, list) else [constraints]
+            )
+            for _constraint in _constraints:
+                if name.lower() == "python":
+                    if group.name == MAIN_GROUP and isinstance(_constraint, str):
+                        package.python_versions = _constraint
+                    continue
+
+                group.add_dependency(
+                    cls.create_dependency(
+                        name,
+                        _constraint,
+                        groups=[group.name],
+                        root_dir=package.root_dir,
+                    )
+                )
+
+        package.add_dependency_group(group)
+
+    @classmethod
+    def configure_package(
+        cls,
+        package: ProjectPackage,
+        config: dict[str, Any],
+        root: Path,
+        with_groups: bool = True,
+    ) -> ProjectPackage:
+        from poetry.core.packages.dependency import Dependency
+        from poetry.core.packages.dependency_group import MAIN_GROUP
+        from poetry.core.packages.dependency_group import DependencyGroup
+        from poetry.core.spdx.helpers import license_by_id
+
+        package.root_dir = root
+
+        for author in config["authors"]:
+            package.authors.append(combine_unicode(author))
+
+        for maintainer in config.get("maintainers", []):
+            package.maintainers.append(combine_unicode(maintainer))
+
+        package.description = config.get("description", "")
+        package.homepage = config.get("homepage")
+        package.repository_url = config.get("repository")
+        package.documentation_url = config.get("documentation")
+        try:
+            license_: License | None = license_by_id(config.get("license", ""))
+        except ValueError:
+            license_ = None
+
+        package.license = license_
+        package.keywords = config.get("keywords", [])
+        package.classifiers = config.get("classifiers", [])
+
+        if "readme" in config:
+            if isinstance(config["readme"], str):
+                package.readmes = (root / config["readme"],)
+            else:
+                package.readmes = tuple(root / readme for readme in config["readme"])
+
+        if "platform" in config:
+            package.platform = config["platform"]
+
+        if "dependencies" in config:
+            cls._add_package_group_dependencies(
+                package=package, group=MAIN_GROUP, dependencies=config["dependencies"]
+            )
+
+        if with_groups and "group" in config:
+            for group_name, group_config in config["group"].items():
+                group = DependencyGroup(
+                    group_name, optional=group_config.get("optional", False)
+                )
+                cls._add_package_group_dependencies(
+                    package=package,
+                    group=group,
+                    dependencies=group_config["dependencies"],
+                )
+
+        if with_groups and "dev-dependencies" in config:
+            cls._add_package_group_dependencies(
+                package=package, group="dev", dependencies=config["dev-dependencies"]
+            )
+
+        extras = config.get("extras", {})
+        for extra_name, requirements in extras.items():
+            package.extras[extra_name] = []
+
+            # Checking for dependency
+            for req in requirements:
+                req = Dependency(req, "*")
+
+                for dep in package.requires:
+                    if dep.name == req.name:
+                        dep.in_extras.append(extra_name)
+                        package.extras[extra_name].append(dep)
+
+                        break
+
+        if "build" in config:
+            build = config["build"]
+            if not isinstance(build, dict):
+                build = {"script": build}
+            package.build_config = build or {}
+
+        if "include" in config:
+            package.include = []
+
+            for include in config["include"]:
+                if not isinstance(include, dict):
+                    include = {"path": include}
+
+                formats = include.get("format", [])
+                if formats and not isinstance(formats, list):
+                    formats = [formats]
+                include["format"] = formats
+
+                package.include.append(include)
+
+        if "exclude" in config:
+            package.exclude = config["exclude"]
+
+        if "packages" in config:
+            package.packages = config["packages"]
+
+        # Custom urls
+        if "urls" in config:
+            package.custom_urls = config["urls"]
+
+        return package
+
+    @classmethod
+    def create_dependency(
+        cls,
+        name: str,
+        constraint: DependencyConstraint,
+        groups: list[str] | None = None,
+        root_dir: Path | None = None,
+    ) -> Dependency:
+        from poetry.core.packages.constraints import (
+            parse_constraint as parse_generic_constraint,
+        )
+        from poetry.core.packages.dependency import Dependency
+        from poetry.core.packages.dependency_group import MAIN_GROUP
+        from poetry.core.packages.directory_dependency import DirectoryDependency
+        from poetry.core.packages.file_dependency import FileDependency
+        from poetry.core.packages.url_dependency import URLDependency
+        from poetry.core.packages.utils.utils import create_nested_marker
+        from poetry.core.packages.vcs_dependency import VCSDependency
+        from poetry.core.semver.helpers import parse_constraint
+        from poetry.core.version.markers import AnyMarker
+        from poetry.core.version.markers import parse_marker
+
+        if groups is None:
+            groups = [MAIN_GROUP]
+
+        if constraint is None:
+            constraint = "*"
+
+        if isinstance(constraint, dict):
+            optional = constraint.get("optional", False)
+            python_versions = constraint.get("python")
+            platform = constraint.get("platform")
+            markers = constraint.get("markers")
+            if "allows-prereleases" in constraint:
+                message = (
+                    f'The "{name}" dependency specifies '
+                    'the "allows-prereleases" property, which is deprecated. '
+                    'Use "allow-prereleases" instead.'
+                )
+                warn(message, DeprecationWarning)
+                logger.warning(message)
+
+            allows_prereleases = constraint.get(
+                "allow-prereleases", constraint.get("allows-prereleases", False)
+            )
+
+            dependency: Dependency
+            if "git" in constraint:
+                # VCS dependency
+                dependency = VCSDependency(
+                    name,
+                    "git",
+                    constraint["git"],
+                    branch=constraint.get("branch", None),
+                    tag=constraint.get("tag", None),
+                    rev=constraint.get("rev", None),
+                    directory=constraint.get("subdirectory", None),
+                    groups=groups,
+                    optional=optional,
+                    develop=constraint.get("develop", False),
+                    extras=constraint.get("extras", []),
+                )
+            elif "file" in constraint:
+                file_path = Path(constraint["file"])
+
+                dependency = FileDependency(
+                    name,
+                    file_path,
+                    groups=groups,
+                    base=root_dir,
+                    extras=constraint.get("extras", []),
+                )
+            elif "path" in constraint:
+                path = Path(constraint["path"])
+
+                if root_dir:
+                    is_file = root_dir.joinpath(path).is_file()
+                else:
+                    is_file = path.is_file()
+
+                if is_file:
+                    dependency = FileDependency(
+                        name,
+                        path,
+                        groups=groups,
+                        optional=optional,
+                        base=root_dir,
+                        extras=constraint.get("extras", []),
+                    )
+                else:
+                    dependency = DirectoryDependency(
+                        name,
+                        path,
+                        groups=groups,
+                        optional=optional,
+                        base=root_dir,
+                        develop=constraint.get("develop", False),
+                        extras=constraint.get("extras", []),
+                    )
+            elif "url" in constraint:
+                dependency = URLDependency(
+                    name,
+                    constraint["url"],
+                    groups=groups,
+                    optional=optional,
+                    extras=constraint.get("extras", []),
+                )
+            else:
+                version = constraint["version"]
+
+                dependency = Dependency(
+                    name,
+                    version,
+                    optional=optional,
+                    groups=groups,
+                    allows_prereleases=allows_prereleases,
+                    extras=constraint.get("extras", []),
+                )
+
+            marker = parse_marker(markers) if markers else AnyMarker()
+
+            if python_versions:
+                marker = marker.intersect(
+                    parse_marker(
+                        create_nested_marker(
+                            "python_version", parse_constraint(python_versions)
+                        )
+                    )
+                )
+
+            if platform:
+                marker = marker.intersect(
+                    parse_marker(
+                        create_nested_marker(
+                            "sys_platform", parse_generic_constraint(platform)
+                        )
+                    )
+                )
+
+            if not marker.is_any():
+                dependency.marker = marker
+
+            dependency.source_name = constraint.get("source")
+        else:
+            dependency = Dependency(name, constraint, groups=groups)
+
+        return dependency
+
+    @classmethod
+    def validate(
+        cls, config: dict[str, Any], strict: bool = False
+    ) -> dict[str, list[str]]:
+        """
+        Checks the validity of a configuration
+        """
+        from poetry.core.json import validate_object
+
+        result: dict[str, list[str]] = {"errors": [], "warnings": []}
+        # Schema validation errors
+        validation_errors = validate_object(config, "poetry-schema")
+
+        result["errors"] += validation_errors
+
+        if strict:
+            # If strict, check the file more thoroughly
+            if "dependencies" in config:
+                python_versions = config["dependencies"]["python"]
+                if python_versions == "*":
+                    result["warnings"].append(
+                        "A wildcard Python dependency is ambiguous. "
+                        "Consider specifying a more explicit one."
+                    )
+
+                for name, constraint in config["dependencies"].items():
+                    if not isinstance(constraint, dict):
+                        continue
+
+                    if "allows-prereleases" in constraint:
+                        result["warnings"].append(
+                            f'The "{name}" dependency specifies '
+                            'the "allows-prereleases" property, which is deprecated. '
+                            'Use "allow-prereleases" instead.'
+                        )
+
+            # Checking for scripts with extras
+            if "scripts" in config:
+                scripts = config["scripts"]
+                config_extras = config.get("extras", {})
+
+                for name, script in scripts.items():
+                    if not isinstance(script, dict):
+                        continue
+
+                    extras = script.get("extras", [])
+                    for extra in extras:
+                        if extra not in config_extras:
+                            result["errors"].append(
+                                f'Script "{name}" requires extra "{extra}" which is not'
+                                " defined."
+                            )
+
+            # Checking types of all readme files (must match)
+            if "readme" in config and not isinstance(config["readme"], str):
+                readme_types = {readme_content_type(r) for r in config["readme"]}
+                if len(readme_types) > 1:
+                    result["errors"].append(
+                        "Declared README files must be of same type: found"
+                        f" {', '.join(sorted(readme_types))}"
+                    )
+
+        return result
+
+    @classmethod
+    def locate(cls, cwd: Path | None = None) -> Path:
+        cwd = Path(cwd or Path.cwd())
+        candidates = [cwd]
+        candidates.extend(cwd.parents)
+
+        for path in candidates:
+            poetry_file = path / "pyproject.toml"
+
+            if poetry_file.exists():
+                return poetry_file
+
+        else:
+            raise RuntimeError(
+                f"Poetry could not find a pyproject.toml file in {cwd} or its parents"
+            )
diff --git a/vendor/poetry-core/src/poetry/core/json/__init__.py b/vendor/poetry-core/src/poetry/core/json/__init__.py
new file mode 100644
index 00000000..84ec3c8f
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/json/__init__.py
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+import json
+import os
+
+from importlib import resources
+from typing import Any
+
+
+class ValidationError(ValueError):
+    pass
+
+
+def validate_object(obj: dict[str, Any], schema_name: str) -> list[str]:
+    try:
+        schema = json.loads(
+            resources.read_text(f"{__name__}.schemas", f"{schema_name}.json")
+        )
+    except Exception:
+        raise ValueError(f"Schema {schema_name} does not exist.")
+
+    from jsonschema import Draft7Validator
+
+    validator = Draft7Validator(schema)
+    validation_errors = sorted(validator.iter_errors(obj), key=lambda e: e.path)  # type: ignore[no-any-return]
+
+    errors = []
+
+    for error in validation_errors:
+        message = error.message
+        if error.path:
+            path = ".".join(str(x) for x in error.absolute_path)
+            message = f"[{path}] {message}"
+
+        errors.append(message)
+
+    return errors
diff --git a/vendor/poetry/poetry/config/__init__.py b/vendor/poetry-core/src/poetry/core/json/schemas/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/config/__init__.py
rename to vendor/poetry-core/src/poetry/core/json/schemas/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/json/schemas/poetry-schema.json b/vendor/poetry-core/src/poetry/core/json/schemas/poetry-schema.json
new file mode 100644
index 00000000..6c519910
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/json/schemas/poetry-schema.json
@@ -0,0 +1,651 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "name": "Package",
+  "type": "object",
+  "additionalProperties": true,
+  "required": [
+    "name",
+    "version",
+    "description",
+    "authors"
+  ],
+  "properties": {
+    "name": {
+      "type": "string",
+      "description": "Package name."
+    },
+    "version": {
+      "type": "string",
+      "description": "Package version."
+    },
+    "description": {
+      "type": "string",
+      "description": "Short package description.",
+      "pattern": "^[^\n]*$"
+    },
+    "keywords": {
+      "type": "array",
+      "items": {
+        "type": "string",
+        "description": "A tag/keyword that this package relates to."
+      }
+    },
+    "homepage": {
+      "type": "string",
+      "description": "Homepage URL for the project.",
+      "format": "uri"
+    },
+    "repository": {
+      "type": "string",
+      "description": "Repository URL for the project.",
+      "format": "uri"
+    },
+    "documentation": {
+      "type": "string",
+      "description": "Documentation URL for the project.",
+      "format": "uri"
+    },
+    "license": {
+      "type": "string",
+      "description": "License name."
+    },
+    "authors": {
+      "$ref": "#/definitions/authors"
+    },
+    "maintainers": {
+      "$ref": "#/definitions/maintainers"
+    },
+    "readme": {
+      "anyOf": [
+        {
+          "type": "string",
+          "description": "The path to the README file."
+        },
+        {
+          "type": "array",
+          "description": "A list of paths to the readme files.",
+          "items": {
+            "type": "string"
+          }
+        }
+      ]
+    },
+    "classifiers": {
+      "type": "array",
+      "description": "A list of trove classifers."
+    },
+    "packages": {
+      "type": "array",
+      "description": "A list of packages to include in the final distribution.",
+      "items": {
+        "type": "object",
+        "description": "Information about where the package resides.",
+        "additionalProperties": false,
+        "required": [
+          "include"
+        ],
+        "properties": {
+          "include": {
+            "$ref": "#/definitions/include-path"
+          },
+          "from": {
+            "type": "string",
+            "description": "Where the source directory of the package resides."
+          },
+          "format": {
+            "$ref": "#/definitions/package-formats"
+          }
+        }
+      }
+    },
+    "include": {
+      "type": "array",
+      "description": "A list of files and folders to include.",
+      "items": {
+        "anyOf": [
+          {
+            "$ref": "#/definitions/include-path"
+          },
+          {
+            "type": "object",
+            "additionalProperties": false,
+            "required": [
+              "path"
+            ],
+            "properties": {
+              "path": {
+                "$ref": "#/definitions/include-path"
+              },
+              "format": {
+                "$ref": "#/definitions/package-formats"
+              }
+            }
+          }
+        ]
+      }
+    },
+    "exclude": {
+      "type": "array",
+      "description": "A list of files and folders to exclude."
+    },
+    "dependencies": {
+      "type": "object",
+      "description": "This is a hash of package name (keys) and version constraints (values) that are required to run this package.",
+      "required": [
+        "python"
+      ],
+      "properties": {
+        "python": {
+          "type": "string",
+          "description": "The Python versions the package is compatible with."
+        }
+      },
+      "$ref": "#/definitions/dependencies",
+      "additionalProperties": false
+    },
+    "dev-dependencies": {
+      "type": "object",
+      "description": "This is a hash of package name (keys) and version constraints (values) that this package requires for developing it (testing tools and such).",
+      "$ref": "#/definitions/dependencies",
+      "additionalProperties": false
+    },
+    "extras": {
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z-_.0-9]+$": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "group": {
+      "type": "object",
+      "description": "This represents groups of dependencies",
+      "patternProperties": {
+        "^[a-zA-Z-_.0-9]+$": {
+          "type": "object",
+          "description": "This represents a single dependency group",
+          "required": [
+            "dependencies"
+          ],
+          "properties": {
+            "optional": {
+              "type": "boolean",
+              "description": "Whether the dependency group is optional or not"
+            },
+            "dependencies": {
+              "type": "object",
+              "description": "The dependencies of this dependency group",
+              "$ref": "#/definitions/dependencies",
+              "additionalProperties": false
+            }
+          },
+          "additionalProperties": false
+        }
+      }
+    },
+    "build": {
+      "$ref": "#/definitions/build-section"
+    },
+    "scripts": {
+      "type": "object",
+      "description": "A hash of scripts to be installed.",
+      "patternProperties": {
+        "^[a-zA-Z-_.0-9]+$": {
+          "oneOf": [
+            {
+              "$ref": "#/definitions/script-legacy"
+            },
+            {
+              "$ref": "#/definitions/script-table"
+            }
+          ]
+        }
+      }
+    },
+    "plugins": {
+      "type": "object",
+      "description": "A hash of hashes representing plugins",
+      "patternProperties": {
+        "^[a-zA-Z-_.0-9]+$": {
+          "type": "object",
+          "patternProperties": {
+            "^[a-zA-Z-_.0-9]+$": {
+              "type": "string"
+            }
+          }
+        }
+      }
+    },
+    "urls": {
+      "type": "object",
+      "patternProperties": {
+        "^.+$": {
+          "type": "string",
+          "description": "The full url of the custom url."
+        }
+      }
+    }
+  },
+  "definitions": {
+    "authors": {
+      "type": "array",
+      "description": "List of authors that contributed to the package. This is typically the main maintainers, not the full list.",
+      "items": {
+        "type": "string"
+      }
+    },
+    "maintainers": {
+      "type": "array",
+      "description": "List of maintainers, other than the original author(s), that upkeep the package.",
+      "items": {
+        "type": "string"
+      }
+    },
+    "include-path": {
+      "type": "string",
+      "description": "Path to file or directory to include."
+    },
+    "package-format": {
+      "type": "string",
+      "enum": [
+        "sdist",
+        "wheel"
+      ],
+      "description": "A Python packaging format."
+    },
+    "package-formats": {
+      "oneOf": [
+        {
+          "$ref": "#/definitions/package-format"
+        },
+        {
+          "type": "array",
+          "items": {
+            "$ref": "#/definitions/package-format"
+          }
+        }
+      ],
+      "description": "The format(s) for which the package must be included."
+    },
+    "dependencies": {
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z-_.0-9]+$": {
+          "oneOf": [
+            {
+              "$ref": "#/definitions/dependency"
+            },
+            {
+              "$ref": "#/definitions/long-dependency"
+            },
+            {
+              "$ref": "#/definitions/git-dependency"
+            },
+            {
+              "$ref": "#/definitions/file-dependency"
+            },
+            {
+              "$ref": "#/definitions/path-dependency"
+            },
+            {
+              "$ref": "#/definitions/url-dependency"
+            },
+            {
+              "$ref": "#/definitions/multiple-constraints-dependency"
+            }
+          ]
+        }
+      }
+    },
+    "dependency": {
+      "type": "string",
+      "description": "The constraint of the dependency."
+    },
+    "long-dependency": {
+      "type": "object",
+      "required": [
+        "version"
+      ],
+      "additionalProperties": false,
+      "properties": {
+        "version": {
+          "type": "string",
+          "description": "The constraint of the dependency."
+        },
+        "python": {
+          "type": "string",
+          "description": "The python versions for which the dependency should be installed."
+        },
+        "platform": {
+          "type": "string",
+          "description": "The platform(s) for which the dependency should be installed."
+        },
+        "markers": {
+          "type": "string",
+          "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
+        },
+        "allow-prereleases": {
+          "type": "boolean",
+          "description": "Whether the dependency allows prereleases or not."
+        },
+        "allows-prereleases": {
+          "type": "boolean",
+          "description": "Whether the dependency allows prereleases or not."
+        },
+        "optional": {
+          "type": "boolean",
+          "description": "Whether the dependency is optional or not."
+        },
+        "extras": {
+          "type": "array",
+          "description": "The required extras for this dependency.",
+          "items": {
+            "type": "string"
+          }
+        },
+        "source": {
+          "type": "string",
+          "description": "The exclusive source used to search for this dependency."
+        }
+      }
+    },
+    "git-dependency": {
+      "type": "object",
+      "required": [
+        "git"
+      ],
+      "additionalProperties": false,
+      "properties": {
+        "git": {
+          "type": "string",
+          "description": "The url of the git repository.",
+          "format": "uri"
+        },
+        "branch": {
+          "type": "string",
+          "description": "The branch to checkout."
+        },
+        "tag": {
+          "type": "string",
+          "description": "The tag to checkout."
+        },
+        "rev": {
+          "type": "string",
+          "description": "The revision to checkout."
+        },
+        "subdirectory": {
+          "type": "string",
+          "description": "The relative path to the directory where the package is located."
+        },
+        "python": {
+          "type": "string",
+          "description": "The python versions for which the dependency should be installed."
+        },
+        "platform": {
+          "type": "string",
+          "description": "The platform(s) for which the dependency should be installed."
+        },
+        "markers": {
+          "type": "string",
+          "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
+        },
+        "allow-prereleases": {
+          "type": "boolean",
+          "description": "Whether the dependency allows prereleases or not."
+        },
+        "allows-prereleases": {
+          "type": "boolean",
+          "description": "Whether the dependency allows prereleases or not."
+        },
+        "optional": {
+          "type": "boolean",
+          "description": "Whether the dependency is optional or not."
+        },
+        "extras": {
+          "type": "array",
+          "description": "The required extras for this dependency.",
+          "items": {
+            "type": "string"
+          }
+        },
+        "develop": {
+          "type": "boolean",
+          "description": "Whether to install the dependency in development mode."
+        }
+      }
+    },
+    "file-dependency": {
+      "type": "object",
+      "required": [
+        "file"
+      ],
+      "additionalProperties": false,
+      "properties": {
+        "file": {
+          "type": "string",
+          "description": "The path to the file."
+        },
+        "python": {
+          "type": "string",
+          "description": "The python versions for which the dependency should be installed."
+        },
+        "platform": {
+          "type": "string",
+          "description": "The platform(s) for which the dependency should be installed."
+        },
+        "markers": {
+          "type": "string",
+          "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
+        },
+        "optional": {
+          "type": "boolean",
+          "description": "Whether the dependency is optional or not."
+        },
+        "extras": {
+          "type": "array",
+          "description": "The required extras for this dependency.",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "path-dependency": {
+      "type": "object",
+      "required": [
+        "path"
+      ],
+      "additionalProperties": false,
+      "properties": {
+        "path": {
+          "type": "string",
+          "description": "The path to the dependency."
+        },
+        "python": {
+          "type": "string",
+          "description": "The python versions for which the dependency should be installed."
+        },
+        "platform": {
+          "type": "string",
+          "description": "The platform(s) for which the dependency should be installed."
+        },
+        "markers": {
+          "type": "string",
+          "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
+        },
+        "optional": {
+          "type": "boolean",
+          "description": "Whether the dependency is optional or not."
+        },
+        "extras": {
+          "type": "array",
+          "description": "The required extras for this dependency.",
+          "items": {
+            "type": "string"
+          }
+        },
+        "develop": {
+          "type": "boolean",
+          "description": "Whether to install the dependency in development mode."
+        }
+      }
+    },
+    "url-dependency": {
+      "type": "object",
+      "required": [
+        "url"
+      ],
+      "additionalProperties": false,
+      "properties": {
+        "url": {
+          "type": "string",
+          "description": "The url to the file."
+        },
+        "python": {
+          "type": "string",
+          "description": "The python versions for which the dependency should be installed."
+        },
+        "platform": {
+          "type": "string",
+          "description": "The platform(s) for which the dependency should be installed."
+        },
+        "markers": {
+          "type": "string",
+          "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
+        },
+        "optional": {
+          "type": "boolean",
+          "description": "Whether the dependency is optional or not."
+        },
+        "extras": {
+          "type": "array",
+          "description": "The required extras for this dependency.",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "multiple-constraints-dependency": {
+      "type": "array",
+      "minItems": 1,
+      "items": {
+        "oneOf": [
+          {
+            "$ref": "#/definitions/dependency"
+          },
+          {
+            "$ref": "#/definitions/long-dependency"
+          },
+          {
+            "$ref": "#/definitions/git-dependency"
+          },
+          {
+            "$ref": "#/definitions/file-dependency"
+          },
+          {
+            "$ref": "#/definitions/path-dependency"
+          },
+          {
+            "$ref": "#/definitions/url-dependency"
+          }
+        ]
+      }
+    },
+    "script-table": {
+      "type": "object",
+      "oneOf": [
+        {
+          "$ref": "#/definitions/extra-script-legacy"
+        },
+        {
+          "$ref": "#/definitions/extra-scripts"
+        }
+      ]
+    },
+    "script-legacy": {
+      "type": "string",
+      "description": "A simple script pointing to a callable object."
+    },
+    "extra-scripts": {
+      "type": "object",
+      "description": "Either a console entry point or a script file that'll be included in the distribution package.",
+      "additionalProperties": false,
+      "properties": {
+        "reference": {
+          "type": "string",
+          "description": "If type is file this is the relative path of the script file, if console it is the module name."
+        },
+        "type": {
+          "description": "Value can be either file or console.",
+          "type": "string",
+          "enum": [
+            "file",
+            "console"
+          ]
+        },
+        "extras": {
+          "type": "array",
+          "description": "The required extras for this script. Only applicable if type is console.",
+          "items": {
+            "type": "string"
+          }
+        }
+      },
+      "required": [
+        "reference",
+        "type"
+      ]
+    },
+    "extra-script-legacy": {
+      "type": "object",
+      "description": "A script that should be installed only if extras are activated.",
+      "additionalProperties": false,
+      "properties": {
+        "callable": {
+          "$ref": "#/definitions/script-legacy",
+          "description": "The entry point of the script. Deprecated in favour of reference."
+        },
+        "extras": {
+          "type": "array",
+          "description": "The required extras for this script.",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "build-script": {
+      "type": "string",
+      "description": "The python script file used to build extensions."
+    },
+    "build-config": {
+      "type": "object",
+      "description": "Build specific configurations.",
+      "additionalProperties": false,
+      "properties": {
+        "generate-setup-file": {
+          "type": "boolean",
+          "description": "Generate and include a setup.py file in sdist.",
+          "default": true
+        },
+        "script": {
+          "$ref": "#/definitions/build-script"
+        }
+      }
+    },
+    "build-section": {
+      "oneOf": [
+        {
+          "$ref": "#/definitions/build-script"
+        },
+        {
+          "$ref": "#/definitions/build-config"
+        }
+      ]
+    }
+  }
+}
diff --git a/vendor/poetry-core/src/poetry/core/masonry/__init__.py b/vendor/poetry-core/src/poetry/core/masonry/__init__.py
new file mode 100644
index 00000000..943204ad
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/__init__.py
@@ -0,0 +1,8 @@
+"""
+This module handles the packaging and publishing
+of python projects.
+
+A lot of the code used here has been taken from
+`flit `__ and adapted
+to work with the poetry codebase, so kudos to them for showing the way.
+"""
diff --git a/vendor/poetry-core/src/poetry/core/masonry/api.py b/vendor/poetry-core/src/poetry/core/masonry/api.py
new file mode 100644
index 00000000..2ed4ed5f
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/api.py
@@ -0,0 +1,92 @@
+"""
+PEP-517 compliant buildsystem API
+"""
+from __future__ import annotations
+
+import logging
+
+from pathlib import Path
+from typing import Any
+
+from poetry.core.factory import Factory
+from poetry.core.masonry.builders.sdist import SdistBuilder
+from poetry.core.masonry.builders.wheel import WheelBuilder
+
+
+log = logging.getLogger(__name__)
+
+
+def get_requires_for_build_wheel(
+    config_settings: dict[str, Any] | None = None,
+) -> list[str]:
+    """
+    Returns an additional list of requirements for building, as PEP508 strings,
+    above and beyond those specified in the pyproject.toml file.
+
+    This implementation is optional. At the moment it only returns an empty list, which would be the same as if
+    not define. So this is just for completeness for future implementation.
+    """
+
+    return []
+
+
+# For now, we require all dependencies to build either a wheel or an sdist.
+get_requires_for_build_sdist = get_requires_for_build_wheel
+
+
+def prepare_metadata_for_build_wheel(
+    metadata_directory: str, config_settings: dict[str, Any] | None = None
+) -> str:
+    poetry = Factory().create_poetry(Path(".").resolve(), with_groups=False)
+    builder = WheelBuilder(poetry)
+
+    dist_info = Path(metadata_directory, builder.dist_info)
+    dist_info.mkdir(parents=True, exist_ok=True)
+
+    if "scripts" in poetry.local_config or "plugins" in poetry.local_config:
+        with (dist_info / "entry_points.txt").open("w", encoding="utf-8") as f:
+            builder._write_entry_points(f)
+
+    with (dist_info / "WHEEL").open("w", encoding="utf-8") as f:
+        builder._write_wheel_file(f)
+
+    with (dist_info / "METADATA").open("w", encoding="utf-8") as f:
+        builder._write_metadata_file(f)
+
+    return dist_info.name
+
+
+def build_wheel(
+    wheel_directory: str,
+    config_settings: dict[str, Any] | None = None,
+    metadata_directory: str | None = None,
+) -> str:
+    """Builds a wheel, places it in wheel_directory"""
+    poetry = Factory().create_poetry(Path(".").resolve(), with_groups=False)
+
+    return WheelBuilder.make_in(poetry, Path(wheel_directory))
+
+
+def build_sdist(
+    sdist_directory: str, config_settings: dict[str, Any] | None = None
+) -> str:
+    """Builds an sdist, places it in sdist_directory"""
+    poetry = Factory().create_poetry(Path(".").resolve(), with_groups=False)
+
+    path = SdistBuilder(poetry).build(Path(sdist_directory))
+
+    return path.name
+
+
+def build_editable(
+    wheel_directory: str,
+    config_settings: dict[str, Any] | None = None,
+    metadata_directory: str | None = None,
+) -> str:
+    poetry = Factory().create_poetry(Path(".").resolve(), with_groups=False)
+
+    return WheelBuilder.make_in(poetry, Path(wheel_directory), editable=True)
+
+
+get_requires_for_build_editable = get_requires_for_build_wheel
+prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel
diff --git a/vendor/poetry-core/src/poetry/core/masonry/builder.py b/vendor/poetry-core/src/poetry/core/masonry/builder.py
new file mode 100644
index 00000000..30428858
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/builder.py
@@ -0,0 +1,33 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from poetry.core.poetry import Poetry
+
+
+class Builder:
+    def __init__(self, poetry: Poetry) -> None:
+        from poetry.core.masonry.builders.sdist import SdistBuilder
+        from poetry.core.masonry.builders.wheel import WheelBuilder
+
+        self._poetry = poetry
+
+        self._formats = {
+            "sdist": SdistBuilder,
+            "wheel": WheelBuilder,
+        }
+
+    def build(self, fmt: str, executable: str | Path | None = None) -> None:
+        if fmt in self._formats:
+            builders = [self._formats[fmt]]
+        elif fmt == "all":
+            builders = list(self._formats.values())
+        else:
+            raise ValueError(f"Invalid format: {fmt}")
+
+        for builder in builders:
+            builder(self._poetry, executable=executable).build()
diff --git a/vendor/poetry/poetry/console/__init__.py b/vendor/poetry-core/src/poetry/core/masonry/builders/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/console/__init__.py
rename to vendor/poetry-core/src/poetry/core/masonry/builders/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/masonry/builders/builder.py b/vendor/poetry-core/src/poetry/core/masonry/builders/builder.py
new file mode 100644
index 00000000..0a546ae5
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/builders/builder.py
@@ -0,0 +1,402 @@
+from __future__ import annotations
+
+import logging
+import re
+import sys
+import warnings
+
+from collections import defaultdict
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.poetry import Poetry
+
+
+AUTHOR_REGEX = re.compile(r"(?u)^(?P[- .,\w\d'’\"()]+) <(?P.+?)>$")
+
+METADATA_BASE = """\
+Metadata-Version: 2.1
+Name: {name}
+Version: {version}
+Summary: {summary}
+"""
+
+logger = logging.getLogger(__name__)
+
+
+class Builder:
+    format: str | None = None
+
+    def __init__(
+        self,
+        poetry: Poetry,
+        ignore_packages_formats: bool = False,
+        executable: Path | None = None,
+    ) -> None:
+        from poetry.core.masonry.metadata import Metadata
+        from poetry.core.masonry.utils.module import Module
+
+        self._poetry = poetry
+        self._package = poetry.package
+        self._path: Path = poetry.file.parent
+        self._excluded_files: set[str] | None = None
+        self._executable = Path(executable or sys.executable)
+
+        packages = []
+        for p in self._package.packages:
+            formats = p.get("format") or None
+
+            # Default to including the package in both sdist & wheel
+            # if the `format` key is not provided in the inline include table.
+            if formats is None:
+                formats = ["sdist", "wheel"]
+
+            if not isinstance(formats, list):
+                formats = [formats]
+
+            if (
+                formats
+                and self.format
+                and self.format not in formats
+                and not ignore_packages_formats
+            ):
+                continue
+
+            packages.append(p)
+
+        includes = []
+        for include in self._package.include:
+            formats = include.get("format", [])
+
+            if (
+                formats
+                and self.format
+                and self.format not in formats
+                and not ignore_packages_formats
+            ):
+                continue
+
+            includes.append(include)
+
+        self._module = Module(
+            self._package.name,
+            self._path.as_posix(),
+            packages=packages,
+            includes=includes,
+        )
+
+        self._meta = Metadata.from_package(self._package)
+
+    @property
+    def executable(self) -> Path:
+        return self._executable
+
+    @property
+    def default_target_dir(self) -> Path:
+        return self._path / "dist"
+
+    def build(self, target_dir: Path | None) -> Path:
+        raise NotImplementedError()
+
+    def find_excluded_files(self, fmt: str | None = None) -> set[str]:
+        if self._excluded_files is None:
+            from poetry.core.vcs import get_vcs
+
+            # Checking VCS
+            vcs = get_vcs(self._path)
+            if not vcs:
+                vcs_ignored_files = set()
+            else:
+                vcs_ignored_files = set(vcs.get_ignored_files())
+
+            explicitely_excluded = set()
+            for excluded_glob in self._package.exclude:
+                for excluded in self._path.glob(str(excluded_glob)):
+                    explicitely_excluded.add(
+                        Path(excluded).relative_to(self._path).as_posix()
+                    )
+
+            explicitely_included = set()
+            for inc in self._package.include:
+                if fmt and inc["format"] and fmt not in inc["format"]:
+                    continue
+
+                included_glob = inc["path"]
+                for included in self._path.glob(str(included_glob)):
+                    explicitely_included.add(
+                        Path(included).relative_to(self._path).as_posix()
+                    )
+
+            ignored = (vcs_ignored_files | explicitely_excluded) - explicitely_included
+            result = set()
+            for file in ignored:
+                result.add(file)
+
+            # The list of excluded files might be big and we will do a lot
+            # containment check (x in excluded).
+            # Returning a set make those tests much much faster.
+            self._excluded_files = result
+
+        return self._excluded_files
+
+    def is_excluded(self, filepath: str | Path) -> bool:
+        exclude_path = Path(filepath)
+
+        while True:
+            if exclude_path.as_posix() in self.find_excluded_files(fmt=self.format):
+                return True
+
+            if len(exclude_path.parts) > 1:
+                exclude_path = exclude_path.parent
+            else:
+                break
+
+        return False
+
+    def find_files_to_add(self, exclude_build: bool = True) -> set[BuildIncludeFile]:
+        """
+        Finds all files to add to the tarball
+        """
+        from poetry.core.masonry.utils.package_include import PackageInclude
+
+        to_add = set()
+
+        for include in self._module.includes:
+            include.refresh()
+            formats = include.formats or ["sdist"]
+
+            for file in include.elements:
+                if "__pycache__" in str(file):
+                    continue
+
+                if (
+                    isinstance(include, PackageInclude)
+                    and include.source
+                    and self.format == "wheel"
+                ):
+                    source_root = include.base
+                else:
+                    source_root = self._path
+
+                if file.is_dir():
+                    if self.format in formats:
+                        for current_file in file.glob("**/*"):
+                            include_file = BuildIncludeFile(
+                                path=current_file,
+                                project_root=self._path,
+                                source_root=source_root,
+                            )
+
+                            if not current_file.is_dir() and not self.is_excluded(
+                                include_file.relative_to_source_root()
+                            ):
+                                to_add.add(include_file)
+                    continue
+
+                include_file = BuildIncludeFile(
+                    path=file, project_root=self._path, source_root=source_root
+                )
+
+                if self.is_excluded(
+                    include_file.relative_to_project_root()
+                ) and isinstance(include, PackageInclude):
+                    continue
+
+                if file.suffix == ".pyc":
+                    continue
+
+                logger.debug(f"Adding: {str(file)}")
+                to_add.add(include_file)
+
+        # add build script if it is specified and explicitly required
+        if self._package.build_script and not exclude_build:
+            to_add.add(
+                BuildIncludeFile(
+                    path=self._package.build_script,
+                    project_root=self._path,
+                    source_root=self._path,
+                )
+            )
+
+        return to_add
+
+    def get_metadata_content(self) -> str:
+        content = METADATA_BASE.format(
+            name=self._meta.name,
+            version=self._meta.version,
+            summary=str(self._meta.summary),
+        )
+
+        # Optional fields
+        if self._meta.home_page:
+            content += f"Home-page: {self._meta.home_page}\n"
+
+        if self._meta.license:
+            content += f"License: {self._meta.license}\n"
+
+        if self._meta.keywords:
+            content += f"Keywords: {self._meta.keywords}\n"
+
+        if self._meta.author:
+            content += f"Author: {str(self._meta.author)}\n"
+
+        if self._meta.author_email:
+            content += f"Author-email: {str(self._meta.author_email)}\n"
+
+        if self._meta.maintainer:
+            content += f"Maintainer: {str(self._meta.maintainer)}\n"
+
+        if self._meta.maintainer_email:
+            content += f"Maintainer-email: {str(self._meta.maintainer_email)}\n"
+
+        if self._meta.requires_python:
+            content += f"Requires-Python: {self._meta.requires_python}\n"
+
+        for classifier in self._meta.classifiers:
+            content += f"Classifier: {classifier}\n"
+
+        for extra in sorted(self._meta.provides_extra):
+            content += f"Provides-Extra: {extra}\n"
+
+        for dep in sorted(self._meta.requires_dist):
+            content += f"Requires-Dist: {dep}\n"
+
+        for url in sorted(self._meta.project_urls, key=lambda u: u[0]):
+            content += f"Project-URL: {str(url)}\n"
+
+        if self._meta.description_content_type:
+            content += (
+                f"Description-Content-Type: {self._meta.description_content_type}\n"
+            )
+
+        if self._meta.description is not None:
+            content += "\n" + str(self._meta.description) + "\n"
+
+        return content
+
+    def convert_entry_points(self) -> dict[str, list[str]]:
+        result = defaultdict(list)
+
+        # Scripts -> Entry points
+        for name, specification in self._poetry.local_config.get("scripts", {}).items():
+            if isinstance(specification, str):
+                # TODO: deprecate this in favour or reference
+                specification = {"reference": specification, "type": "console"}
+
+            if "callable" in specification:
+                warnings.warn(
+                    f"Use of callable in script specification ({name}) is deprecated."
+                    " Use reference instead.",
+                    DeprecationWarning,
+                )
+                specification = {
+                    "reference": specification["callable"],
+                    "type": "console",
+                }
+
+            if specification.get("type") != "console":
+                continue
+
+            extras = specification.get("extras", [])
+            extras = f"[{', '.join(extras)}]" if extras else ""
+            reference = specification.get("reference")
+
+            if reference:
+                result["console_scripts"].append(f"{name} = {reference}{extras}")
+
+        # Plugins -> entry points
+        plugins = self._poetry.local_config.get("plugins", {})
+        for groupname, group in plugins.items():
+            for name, specification in sorted(group.items()):
+                result[groupname].append(f"{name} = {specification}")
+
+        for groupname in result:
+            result[groupname] = sorted(result[groupname])
+
+        return dict(result)
+
+    def convert_script_files(self) -> list[Path]:
+        script_files: list[Path] = []
+
+        for name, specification in self._poetry.local_config.get("scripts", {}).items():
+            if isinstance(specification, dict) and specification.get("type") == "file":
+                source = specification["reference"]
+
+                if Path(source).is_absolute():
+                    raise RuntimeError(
+                        f"{source} in {name} is an absolute path. Expected relative"
+                        " path."
+                    )
+
+                abs_path = Path.joinpath(self._path, source)
+
+                if not abs_path.exists():
+                    raise RuntimeError(
+                        f"{abs_path} in script specification ({name}) is not found."
+                    )
+
+                if not abs_path.is_file():
+                    raise RuntimeError(
+                        f"{abs_path} in script specification ({name}) is not a file."
+                    )
+
+                script_files.append(abs_path)
+
+        return script_files
+
+    @classmethod
+    def convert_author(cls, author: str) -> dict[str, str]:
+        m = AUTHOR_REGEX.match(author)
+        if m is None:
+            raise RuntimeError(f"{author} does not match regex")
+
+        name = m.group("name")
+        email = m.group("email")
+
+        return {"name": name, "email": email}
+
+
+class BuildIncludeFile:
+    def __init__(
+        self,
+        path: Path | str,
+        project_root: Path | str,
+        source_root: Path | str | None = None,
+    ) -> None:
+        """
+        :param project_root: the full path of the project's root
+        :param path: a full path to the file to be included
+        :param source_root: the root path to resolve to
+        """
+        self.path = Path(path)
+        self.project_root = Path(project_root).resolve()
+        self.source_root = None if not source_root else Path(source_root).resolve()
+        if not self.path.is_absolute() and self.source_root:
+            self.path = self.source_root / self.path
+        else:
+            self.path = self.path
+
+        self.path = self.path.resolve()
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, BuildIncludeFile):
+            return False
+
+        return self.path == other.path
+
+    def __hash__(self) -> int:
+        return hash(self.path)
+
+    def __repr__(self) -> str:
+        return str(self.path)
+
+    def relative_to_project_root(self) -> Path:
+        return self.path.relative_to(self.project_root)
+
+    def relative_to_source_root(self) -> Path:
+        if self.source_root is not None:
+            return self.path.relative_to(self.source_root)
+
+        return self.path
diff --git a/vendor/poetry-core/src/poetry/core/masonry/builders/sdist.py b/vendor/poetry-core/src/poetry/core/masonry/builders/sdist.py
new file mode 100644
index 00000000..7aa1e331
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/builders/sdist.py
@@ -0,0 +1,421 @@
+from __future__ import annotations
+
+import logging
+import os
+import re
+import tarfile
+
+from collections import defaultdict
+from contextlib import contextmanager
+from copy import copy
+from gzip import GzipFile
+from io import BytesIO
+from pathlib import Path
+from posixpath import join as pjoin
+from pprint import pformat
+from typing import TYPE_CHECKING
+from typing import Iterator
+
+from poetry.core.masonry.builders.builder import Builder
+from poetry.core.masonry.builders.builder import BuildIncludeFile
+
+
+if TYPE_CHECKING:
+    from tarfile import TarInfo
+
+    from poetry.core.masonry.utils.package_include import PackageInclude
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.project_package import ProjectPackage
+
+SETUP = """\
+# -*- coding: utf-8 -*-
+from setuptools import setup
+
+{before}
+setup_kwargs = {{
+    'name': {name!r},
+    'version': {version!r},
+    'description': {description!r},
+    'long_description': {long_description!r},
+    'author': {author!r},
+    'author_email': {author_email!r},
+    'maintainer': {maintainer!r},
+    'maintainer_email': {maintainer_email!r},
+    'url': {url!r},
+    {extra}
+}}
+{after}
+
+setup(**setup_kwargs)
+"""
+
+logger = logging.getLogger(__name__)
+
+
+class SdistBuilder(Builder):
+    format = "sdist"
+
+    def build(
+        self,
+        target_dir: Path | None = None,
+    ) -> Path:
+        logger.info("Building sdist")
+        target_dir = target_dir or self.default_target_dir
+
+        if not target_dir.exists():
+            target_dir.mkdir(parents=True)
+
+        target = target_dir / f"{self._package.pretty_name}-{self._meta.version}.tar.gz"
+        gz = GzipFile(target.as_posix(), mode="wb", mtime=0)
+        tar = tarfile.TarFile(
+            target.as_posix(), mode="w", fileobj=gz, format=tarfile.PAX_FORMAT
+        )
+
+        try:
+            tar_dir = f"{self._package.pretty_name}-{self._meta.version}"
+
+            files_to_add = self.find_files_to_add(exclude_build=False)
+
+            for file in sorted(files_to_add, key=lambda x: x.relative_to_source_root()):
+                tar_info = tar.gettarinfo(
+                    str(file.path),
+                    arcname=pjoin(tar_dir, str(file.relative_to_source_root())),
+                )
+                tar_info = self.clean_tarinfo(tar_info)
+
+                if tar_info.isreg():
+                    with file.path.open("rb") as f:
+                        tar.addfile(tar_info, f)
+                else:
+                    tar.addfile(tar_info)  # Symlinks & ?
+
+            if self._poetry.package.build_should_generate_setup():
+                setup = self.build_setup()
+                tar_info = tarfile.TarInfo(pjoin(tar_dir, "setup.py"))
+                tar_info.size = len(setup)
+                tar_info.mtime = 0
+                tar_info = self.clean_tarinfo(tar_info)
+                tar.addfile(tar_info, BytesIO(setup))
+
+            pkg_info = self.build_pkg_info()
+
+            tar_info = tarfile.TarInfo(pjoin(tar_dir, "PKG-INFO"))
+            tar_info.size = len(pkg_info)
+            tar_info.mtime = 0
+            tar_info = self.clean_tarinfo(tar_info)
+            tar.addfile(tar_info, BytesIO(pkg_info))
+        finally:
+            tar.close()
+            gz.close()
+
+        logger.info(f"Built {target.name}")
+        return target
+
+    def build_setup(self) -> bytes:
+        from poetry.core.masonry.utils.package_include import PackageInclude
+
+        before, extra, after = [], [], []
+        package_dir: dict[str, str] = {}
+
+        # If we have a build script, use it
+        if self._package.build_script:
+            import_name = ".".join(
+                Path(self._package.build_script).with_suffix("").parts
+            )
+            after += [f"from {import_name} import *", "build(setup_kwargs)"]
+
+        modules = []
+        packages = []
+        package_data = {}
+        for include in self._module.includes:
+            if include.formats and "sdist" not in include.formats:
+                continue
+
+            if isinstance(include, PackageInclude):
+                if include.is_package():
+                    pkg_dir, _packages, _package_data = self.find_packages(include)
+
+                    if pkg_dir is not None:
+                        pkg_root = os.path.relpath(pkg_dir, str(self._path))
+                        if "" in package_dir:
+                            package_dir.update(
+                                (p, os.path.join(pkg_root, p.replace(".", "/")))
+                                for p in _packages
+                            )
+                        else:
+                            package_dir[""] = pkg_root
+
+                    packages += [p for p in _packages if p not in packages]
+                    package_data.update(_package_data)
+                else:
+                    module = include.elements[0].relative_to(include.base).stem
+
+                    if include.source is not None:
+                        package_dir[""] = str(include.base.relative_to(self._path))
+
+                    if module not in modules:
+                        modules.append(module)
+            else:
+                pass
+
+        if package_dir:
+            before.append(f"package_dir = \\\n{pformat(package_dir)}\n")
+            extra.append("'package_dir': package_dir,")
+
+        if packages:
+            before.append(f"packages = \\\n{pformat(sorted(packages))}\n")
+            extra.append("'packages': packages,")
+
+        if package_data:
+            before.append(f"package_data = \\\n{pformat(package_data)}\n")
+            extra.append("'package_data': package_data,")
+
+        if modules:
+            before.append(f"modules = \\\n{pformat(modules)}")
+            extra.append("'py_modules': modules,")
+
+        dependencies, extras = self.convert_dependencies(
+            self._package, self._package.requires
+        )
+        if dependencies:
+            before.append(f"install_requires = \\\n{pformat(sorted(dependencies))}\n")
+            extra.append("'install_requires': install_requires,")
+
+        if extras:
+            before.append(f"extras_require = \\\n{pformat(extras)}\n")
+            extra.append("'extras_require': extras_require,")
+
+        entry_points = self.convert_entry_points()
+        if entry_points:
+            before.append(f"entry_points = \\\n{pformat(entry_points)}\n")
+            extra.append("'entry_points': entry_points,")
+
+        script_files = self.convert_script_files()
+        if script_files:
+            rel_paths = [str(p.relative_to(self._path)) for p in script_files]
+            before.append(f"scripts = \\\n{pformat(rel_paths)}\n")
+            extra.append("'scripts': scripts,")
+
+        if self._package.python_versions != "*":
+            python_requires = self._meta.requires_python
+
+            extra.append(f"'python_requires': {python_requires!r},")
+
+        return SETUP.format(
+            before="\n".join(before),
+            name=str(self._meta.name),
+            version=str(self._meta.version),
+            description=str(self._meta.summary),
+            long_description=str(self._meta.description),
+            author=str(self._meta.author),
+            author_email=str(self._meta.author_email),
+            maintainer=str(self._meta.maintainer),
+            maintainer_email=str(self._meta.maintainer_email),
+            url=str(self._meta.home_page),
+            extra="\n    ".join(extra),
+            after="\n".join(after),
+        ).encode()
+
+    @contextmanager
+    def setup_py(self) -> Iterator[Path]:
+        setup = self._path / "setup.py"
+        has_setup = setup.exists()
+
+        if has_setup:
+            logger.warning("A setup.py file already exists. Using it.")
+        else:
+            with setup.open("w", encoding="utf-8") as f:
+                f.write(self.build_setup().decode())
+
+        yield setup
+
+        if not has_setup:
+            setup.unlink()
+
+    def build_pkg_info(self) -> bytes:
+        return self.get_metadata_content().encode()
+
+    def find_packages(
+        self, include: PackageInclude
+    ) -> tuple[str | None, list[str], dict[str, list[str]]]:
+        """
+        Discover subpackages and data.
+
+        It also retrieves necessary files.
+        """
+        pkgdir = None
+        if include.source is not None:
+            pkgdir = str(include.base)
+
+        base = str(include.elements[0].parent)
+
+        pkg_name = include.package
+        pkg_data: dict[str, list[str]] = defaultdict(list)
+        # Undocumented distutils feature:
+        # the empty string matches all package names
+        pkg_data[""].append("*")
+        packages = [pkg_name]
+        subpkg_paths = set()
+
+        def find_nearest_pkg(rel_path: str) -> tuple[str, str]:
+            parts = rel_path.split(os.sep)
+            for i in reversed(range(1, len(parts))):
+                ancestor = "/".join(parts[:i])
+                if ancestor in subpkg_paths:
+                    pkg = ".".join([pkg_name] + parts[:i])
+                    return pkg, "/".join(parts[i:])
+
+            # Relative to the top-level package
+            return pkg_name, Path(rel_path).as_posix()
+
+        for path, _dirnames, filenames in os.walk(str(base), topdown=True):
+            if os.path.basename(path) == "__pycache__":
+                continue
+
+            from_top_level = os.path.relpath(path, base)
+            if from_top_level == ".":
+                continue
+
+            is_subpkg = any(
+                [filename.endswith(".py") for filename in filenames]
+            ) and not all(
+                [
+                    self.is_excluded(Path(path, filename).relative_to(self._path))
+                    for filename in filenames
+                    if filename.endswith(".py")
+                ]
+            )
+            if is_subpkg:
+                subpkg_paths.add(from_top_level)
+                parts = from_top_level.split(os.sep)
+                packages.append(".".join([pkg_name] + parts))
+            else:
+                pkg, from_nearest_pkg = find_nearest_pkg(from_top_level)
+
+                data_elements = [
+                    f.relative_to(self._path)
+                    for f in Path(path).glob("*")
+                    if not f.is_dir()
+                ]
+
+                data = [e for e in data_elements if not self.is_excluded(e)]
+                if not data:
+                    continue
+
+                if len(data) == len(data_elements):
+                    pkg_data[pkg].append(pjoin(from_nearest_pkg, "*"))
+                else:
+                    for d in data:
+                        if d.is_dir():
+                            continue
+
+                        pkg_data[pkg] += [pjoin(from_nearest_pkg, d.name) for d in data]
+
+        # Sort values in pkg_data
+        pkg_data = {k: sorted(v) for (k, v) in pkg_data.items() if v}
+
+        return pkgdir, sorted(packages), pkg_data
+
+    def find_files_to_add(self, exclude_build: bool = False) -> set[BuildIncludeFile]:
+        to_add = super().find_files_to_add(exclude_build)
+
+        # add any additional files, starting with all LICENSE files
+        additional_files = set(self._path.glob("LICENSE*"))
+
+        # add script files
+        additional_files.update(self.convert_script_files())
+
+        # Include project files
+        additional_files.add(Path("pyproject.toml"))
+
+        # add readme if it is specified
+        if "readme" in self._poetry.local_config:
+            additional_files.add(self._poetry.local_config["readme"])
+
+        for additional_file in additional_files:
+            file = BuildIncludeFile(
+                path=additional_file, project_root=self._path, source_root=self._path
+            )
+            if file.path.exists():
+                logger.debug(f"Adding: {file.relative_to_source_root()}")
+                to_add.add(file)
+
+        return to_add
+
+    @classmethod
+    def convert_dependencies(
+        cls, package: ProjectPackage, dependencies: list[Dependency]
+    ) -> tuple[list[str], dict[str, list[str]]]:
+        main = []
+        extras = defaultdict(list)
+        req_regex = re.compile(r"^(.+) \((.+)\)$")
+
+        for dependency in dependencies:
+            if dependency.is_optional():
+                for extra_name, reqs in package.extras.items():
+                    for req in reqs:
+                        if req.name == dependency.name:
+                            requirement = dependency.to_pep_508(with_extras=False)
+                            if ";" in requirement:
+                                requirement, conditions = requirement.split(";")
+
+                                requirement = requirement.strip()
+                                if req_regex.match(requirement):
+                                    requirement = req_regex.sub(
+                                        "\\1\\2", requirement.strip()
+                                    )
+
+                                extras[extra_name + ":" + conditions.strip()].append(
+                                    requirement
+                                )
+
+                                continue
+
+                            requirement = requirement.strip()
+                            if req_regex.match(requirement):
+                                requirement = req_regex.sub(
+                                    "\\1\\2", requirement.strip()
+                                )
+                            extras[extra_name].append(requirement)
+                continue
+
+            requirement = dependency.to_pep_508()
+            if ";" in requirement:
+                requirement, conditions = requirement.split(";")
+
+                requirement = requirement.strip()
+                if req_regex.match(requirement):
+                    requirement = req_regex.sub("\\1\\2", requirement.strip())
+
+                extras[":" + conditions.strip()].append(requirement)
+
+                continue
+
+            requirement = requirement.strip()
+            if req_regex.match(requirement):
+                requirement = req_regex.sub("\\1\\2", requirement.strip())
+
+            main.append(requirement)
+
+        return main, dict(extras)
+
+    @classmethod
+    def clean_tarinfo(cls, tar_info: TarInfo) -> TarInfo:
+        """
+        Clean metadata from a TarInfo object to make it more reproducible.
+
+            - Set uid & gid to 0
+            - Set uname and gname to ""
+            - Normalise permissions to 644 or 755
+            - Set mtime if not None
+        """
+        from poetry.core.masonry.utils.helpers import normalize_file_permissions
+
+        ti = copy(tar_info)
+        ti.uid = 0
+        ti.gid = 0
+        ti.uname = ""
+        ti.gname = ""
+        ti.mode = normalize_file_permissions(ti.mode)
+
+        return ti
diff --git a/vendor/poetry-core/src/poetry/core/masonry/builders/wheel.py b/vendor/poetry-core/src/poetry/core/masonry/builders/wheel.py
new file mode 100644
index 00000000..01533e7b
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/builders/wheel.py
@@ -0,0 +1,399 @@
+from __future__ import annotations
+
+import contextlib
+import csv
+import hashlib
+import logging
+import os
+import shutil
+import stat
+import subprocess
+import tempfile
+import zipfile
+
+from base64 import urlsafe_b64encode
+from io import StringIO
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Iterator
+from typing import TextIO
+
+from packaging.tags import sys_tags
+
+from poetry.core import __version__
+from poetry.core.masonry.builders.builder import Builder
+from poetry.core.masonry.builders.sdist import SdistBuilder
+from poetry.core.masonry.utils.helpers import escape_name
+from poetry.core.masonry.utils.helpers import escape_version
+from poetry.core.masonry.utils.helpers import normalize_file_permissions
+from poetry.core.masonry.utils.package_include import PackageInclude
+from poetry.core.semver.helpers import parse_constraint
+
+
+if TYPE_CHECKING:
+    from poetry.core.poetry import Poetry
+
+wheel_file_template = """\
+Wheel-Version: 1.0
+Generator: poetry-core {version}
+Root-Is-Purelib: {pure_lib}
+Tag: {tag}
+"""
+
+logger = logging.getLogger(__name__)
+
+
+class WheelBuilder(Builder):
+    format = "wheel"
+
+    def __init__(
+        self,
+        poetry: Poetry,
+        original: Path | None = None,
+        executable: Path | None = None,
+        editable: bool = False,
+    ) -> None:
+        super().__init__(poetry, executable=executable)
+
+        self._records: list[tuple[str, str, int]] = []
+        self._original_path = self._path
+        if original:
+            self._original_path = original.parent
+        self._editable = editable
+
+    @classmethod
+    def make_in(
+        cls,
+        poetry: Poetry,
+        directory: Path | None = None,
+        original: Path | None = None,
+        executable: Path | None = None,
+        editable: bool = False,
+    ) -> str:
+        wb = WheelBuilder(
+            poetry,
+            original=original,
+            executable=executable,
+            editable=editable,
+        )
+        wb.build(target_dir=directory)
+
+        return wb.wheel_filename
+
+    @classmethod
+    def make(cls, poetry: Poetry, executable: Path | None = None) -> None:
+        """Build a wheel in the dist/ directory, and optionally upload it."""
+        cls.make_in(poetry, executable=executable)
+
+    def build(
+        self,
+        target_dir: Path | None = None,
+    ) -> Path:
+        logger.info("Building wheel")
+
+        target_dir = target_dir or self.default_target_dir
+        if not target_dir.exists():
+            target_dir.mkdir()
+
+        (fd, temp_path) = tempfile.mkstemp(suffix=".whl")
+
+        st_mode = os.stat(temp_path).st_mode
+        new_mode = normalize_file_permissions(st_mode)
+        os.chmod(temp_path, new_mode)
+
+        with os.fdopen(fd, "w+b") as fd_file, zipfile.ZipFile(
+            fd_file, mode="w", compression=zipfile.ZIP_DEFLATED
+        ) as zip_file:
+            if not self._editable:
+                if not self._poetry.package.build_should_generate_setup():
+                    self._build(zip_file)
+                    self._copy_module(zip_file)
+                else:
+                    self._copy_module(zip_file)
+                    self._build(zip_file)
+            else:
+                self._build(zip_file)
+                self._add_pth(zip_file)
+
+            self._copy_file_scripts(zip_file)
+            self._write_metadata(zip_file)
+            self._write_record(zip_file)
+
+        wheel_path = target_dir / self.wheel_filename
+        if wheel_path.exists():
+            wheel_path.unlink()
+        shutil.move(temp_path, str(wheel_path))
+
+        logger.info(f"Built {self.wheel_filename}")
+        return wheel_path
+
+    def _add_pth(self, wheel: zipfile.ZipFile) -> None:
+        paths = set()
+        for include in self._module.includes:
+            if isinstance(include, PackageInclude) and (
+                include.is_module() or include.is_package()
+            ):
+                paths.add(include.base.resolve().as_posix())
+
+        content = ""
+        for path in paths:
+            content += path + os.linesep
+
+        pth_file = Path(self._module.name).with_suffix(".pth")
+
+        with self._write_to_zip(wheel, str(pth_file)) as f:
+            f.write(content)
+
+    def _build(self, wheel: zipfile.ZipFile) -> None:
+        if self._package.build_script:
+            if not self._poetry.package.build_should_generate_setup():
+                # Since we have a build script but no setup.py generation is required,
+                # we assume that the build script will build and copy the files
+                # directly.
+                # That way they will be picked up when adding files to the wheel.
+                current_path = os.getcwd()
+                try:
+                    os.chdir(str(self._path))
+                    self._run_build_script(self._package.build_script)
+                finally:
+                    os.chdir(current_path)
+            else:
+                with SdistBuilder(poetry=self._poetry).setup_py() as setup:
+                    # We need to place ourselves in the temporary
+                    # directory in order to build the package
+                    current_path = os.getcwd()
+                    try:
+                        os.chdir(str(self._path))
+                        self._run_build_command(setup)
+                    finally:
+                        os.chdir(current_path)
+
+                    build_dir = self._path / "build"
+                    libs: list[Path] = list(build_dir.glob("lib.*"))
+                    if not libs:
+                        # The result of building the extensions
+                        # does not exist, this may due to conditional
+                        # builds, so we assume that it's okay
+                        return
+
+                    lib = libs[0]
+
+                    for pkg in lib.glob("**/*"):
+                        if pkg.is_dir() or self.is_excluded(pkg):
+                            continue
+
+                        rel_path = str(pkg.relative_to(lib))
+
+                        if rel_path in wheel.namelist():
+                            continue
+
+                        logger.debug(f"Adding: {rel_path}")
+
+                        self._add_file(wheel, pkg, rel_path)
+
+    def _copy_file_scripts(self, wheel: zipfile.ZipFile) -> None:
+        file_scripts = self.convert_script_files()
+
+        for abs_path in file_scripts:
+            self._add_file(
+                wheel,
+                abs_path,
+                Path.joinpath(Path(self.wheel_data_folder), "scripts", abs_path.name),
+            )
+
+    def _run_build_command(self, setup: Path) -> None:
+        subprocess.check_call(
+            [
+                self.executable.as_posix(),
+                str(setup),
+                "build",
+                "-b",
+                str(self._path / "build"),
+            ]
+        )
+
+    def _run_build_script(self, build_script: str) -> None:
+        logger.debug(f"Executing build script: {build_script}")
+        subprocess.check_call([self.executable.as_posix(), build_script])
+
+    def _copy_module(self, wheel: zipfile.ZipFile) -> None:
+        to_add = self.find_files_to_add()
+
+        # Walk the files and compress them,
+        # sorting everything so the order is stable.
+        for file in sorted(to_add, key=lambda x: x.path):
+            self._add_file(wheel, file.path, file.relative_to_source_root())
+
+    def _write_metadata(self, wheel: zipfile.ZipFile) -> None:
+        if (
+            "scripts" in self._poetry.local_config
+            or "plugins" in self._poetry.local_config
+        ):
+            with self._write_to_zip(wheel, self.dist_info + "/entry_points.txt") as f:
+                self._write_entry_points(f)
+
+        license_files_to_add = []
+        for base in ("COPYING", "LICENSE"):
+            license_files_to_add.append(self._path / base)
+            license_files_to_add.extend(self._path.glob(base + ".*"))
+
+        license_files_to_add.extend(self._path.joinpath("LICENSES").glob("**/*"))
+
+        for path in set(license_files_to_add):
+            if path.is_file():
+                relative_path = f"{self.dist_info}/{path.relative_to(self._path)}"
+                self._add_file(wheel, path, relative_path)
+            else:
+                logger.debug(f"Skipping: {path.as_posix()}")
+
+        with self._write_to_zip(wheel, self.dist_info + "/WHEEL") as f:
+            self._write_wheel_file(f)
+
+        with self._write_to_zip(wheel, self.dist_info + "/METADATA") as f:
+            self._write_metadata_file(f)
+
+    def _write_record(self, wheel: zipfile.ZipFile) -> None:
+        # Write a record of the files in the wheel
+        with self._write_to_zip(wheel, self.dist_info + "/RECORD") as f:
+            record = StringIO()
+
+            csv_writer = csv.writer(
+                record,
+                delimiter=csv.excel.delimiter,
+                quotechar=csv.excel.quotechar,
+                lineterminator="\n",
+            )
+            for path, hash, size in self._records:
+                csv_writer.writerow((path, f"sha256={hash}", size))
+
+            # RECORD itself is recorded with no hash or size
+            csv_writer.writerow((self.dist_info + "/RECORD", "", ""))
+
+            f.write(record.getvalue())
+
+    @property
+    def dist_info(self) -> str:
+        return self.dist_info_name(self._package.name, self._meta.version)
+
+    @property
+    def wheel_data_folder(self) -> str:
+        return f"{self._package.name}-{self._meta.version}.data"
+
+    @property
+    def wheel_filename(self) -> str:
+        name = escape_name(self._package.pretty_name)
+        version = escape_version(self._meta.version)
+        return f"{name}-{version}-{self.tag}.whl"
+
+    def supports_python2(self) -> bool:
+        return self._package.python_constraint.allows_any(
+            parse_constraint(">=2.0.0 <3.0.0")
+        )
+
+    def dist_info_name(self, distribution: str, version: str) -> str:
+        escaped_name = escape_name(distribution)
+        escaped_version = escape_version(version)
+
+        return f"{escaped_name}-{escaped_version}.dist-info"
+
+    @property
+    def tag(self) -> str:
+        if self._package.build_script:
+            sys_tag = next(sys_tags())
+            tag = (sys_tag.interpreter, sys_tag.abi, sys_tag.platform)
+        else:
+            platform = "any"
+            if self.supports_python2():
+                impl = "py2.py3"
+            else:
+                impl = "py3"
+
+            tag = (impl, "none", platform)
+
+        return "-".join(tag)
+
+    def _add_file(
+        self,
+        wheel: zipfile.ZipFile,
+        full_path: Path | str,
+        rel_path: Path | str,
+    ) -> None:
+        full_path, rel_path = str(full_path), str(rel_path)
+        if os.sep != "/":
+            # We always want to have /-separated paths in the zip file and in
+            # RECORD
+            rel_path = rel_path.replace(os.sep, "/")
+
+        zinfo = zipfile.ZipInfo(rel_path)
+
+        # Normalize permission bits to either 755 (executable) or 644
+        st_mode = os.stat(full_path).st_mode
+        new_mode = normalize_file_permissions(st_mode)
+        zinfo.external_attr = (new_mode & 0xFFFF) << 16  # Unix attributes
+
+        if stat.S_ISDIR(st_mode):
+            zinfo.external_attr |= 0x10  # MS-DOS directory flag
+
+        hashsum = hashlib.sha256()
+        with open(full_path, "rb") as src:
+            while True:
+                buf = src.read(1024 * 8)
+                if not buf:
+                    break
+                hashsum.update(buf)
+
+            src.seek(0)
+            wheel.writestr(zinfo, src.read(), compress_type=zipfile.ZIP_DEFLATED)
+
+        size = os.stat(full_path).st_size
+        hash_digest = urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=")
+
+        self._records.append((rel_path, hash_digest, size))
+
+    @contextlib.contextmanager
+    def _write_to_zip(
+        self, wheel: zipfile.ZipFile, rel_path: str
+    ) -> Iterator[StringIO]:
+        sio = StringIO()
+        yield sio
+
+        # The default is a fixed timestamp rather than the current time, so
+        # that building a wheel twice on the same computer can automatically
+        # give you the exact same result.
+        date_time = (2016, 1, 1, 0, 0, 0)
+        zi = zipfile.ZipInfo(rel_path, date_time)
+        zi.external_attr = (0o644 & 0xFFFF) << 16  # Unix attributes
+        b = sio.getvalue().encode("utf-8")
+        hashsum = hashlib.sha256(b)
+        hash_digest = urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=")
+
+        wheel.writestr(zi, b, compress_type=zipfile.ZIP_DEFLATED)
+        self._records.append((rel_path, hash_digest, len(b)))
+
+    def _write_entry_points(self, fp: TextIO) -> None:
+        """
+        Write entry_points.txt.
+        """
+        entry_points = self.convert_entry_points()
+
+        for group_name in sorted(entry_points):
+            fp.write(f"[{group_name}]\n")
+            for ep in sorted(entry_points[group_name]):
+                fp.write(ep.replace(" ", "") + "\n")
+
+            fp.write("\n")
+
+    def _write_wheel_file(self, fp: TextIO) -> None:
+        fp.write(
+            wheel_file_template.format(
+                version=__version__,
+                pure_lib="true" if self._package.build_script is None else "false",
+                tag=self.tag,
+            )
+        )
+
+    def _write_metadata_file(self, fp: TextIO) -> None:
+        """
+        Write out metadata in the 2.x format (email like)
+        """
+        fp.write(self.get_metadata_content())
diff --git a/vendor/poetry-core/src/poetry/core/masonry/metadata.py b/vendor/poetry-core/src/poetry/core/masonry/metadata.py
new file mode 100644
index 00000000..33c76d2e
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/metadata.py
@@ -0,0 +1,100 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.utils.helpers import readme_content_type
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+
+    from poetry.core.packages.package import Package
+
+
+class Metadata:
+    metadata_version = "2.1"
+    # version 1.0
+    name: NormalizedName | None = None
+    version: str
+    platforms: tuple[str, ...] = ()
+    supported_platforms: tuple[str, ...] = ()
+    summary: str | None = None
+    description: str | None = None
+    keywords: str | None = None
+    home_page: str | None = None
+    download_url: str | None = None
+    author: str | None = None
+    author_email: str | None = None
+    license: str | None = None
+    # version 1.1
+    classifiers: tuple[str, ...] = ()
+    requires: tuple[str, ...] = ()
+    provides: tuple[str, ...] = ()
+    obsoletes: tuple[str, ...] = ()
+    # version 1.2
+    maintainer: str | None = None
+    maintainer_email: str | None = None
+    requires_python: str | None = None
+    requires_external: tuple[str, ...] = ()
+    requires_dist: list[str] = []
+    provides_dist: tuple[str, ...] = ()
+    obsoletes_dist: tuple[str, ...] = ()
+    project_urls: tuple[str, ...] = ()
+
+    # Version 2.1
+    description_content_type: str | None = None
+    provides_extra: list[str] = []
+
+    @classmethod
+    def from_package(cls, package: Package) -> Metadata:
+        from packaging.utils import canonicalize_name
+
+        from poetry.core.utils.helpers import normalize_version
+        from poetry.core.version.helpers import format_python_constraint
+
+        meta = cls()
+
+        meta.name = canonicalize_name(package.name)
+        meta.version = normalize_version(package.version.text)
+        meta.summary = package.description
+        if package.readmes:
+            descriptions = []
+            for readme in package.readmes:
+                with readme.open(encoding="utf-8") as f:
+                    descriptions.append(f.read())
+            meta.description = "\n".join(descriptions)
+
+        meta.keywords = ",".join(package.keywords)
+        meta.home_page = package.homepage or package.repository_url
+        meta.author = package.author_name
+        meta.author_email = package.author_email
+
+        if package.license:
+            meta.license = package.license.id
+
+        meta.classifiers = tuple(package.all_classifiers)
+
+        # Version 1.2
+        meta.maintainer = package.maintainer_name
+        meta.maintainer_email = package.maintainer_email
+
+        # Requires python
+        if package.python_versions != "*":
+            meta.requires_python = format_python_constraint(package.python_constraint)
+
+        meta.requires_dist = [d.to_pep_508() for d in package.requires]
+
+        # Version 2.1
+        if package.readmes:
+            meta.description_content_type = readme_content_type(package.readmes[0])
+
+        meta.provides_extra = list(package.extras)
+
+        if package.urls:
+            for name, url in package.urls.items():
+                if name == "Homepage" and meta.home_page == url:
+                    continue
+
+                meta.project_urls += (f"{name}, {url}",)
+
+        return meta
diff --git a/vendor/poetry/poetry/console/args/__init__.py b/vendor/poetry-core/src/poetry/core/masonry/utils/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/console/args/__init__.py
rename to vendor/poetry-core/src/poetry/core/masonry/utils/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/masonry/utils/helpers.py b/vendor/poetry-core/src/poetry/core/masonry/utils/helpers.py
new file mode 100644
index 00000000..b79089f3
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/utils/helpers.py
@@ -0,0 +1,36 @@
+from __future__ import annotations
+
+import re
+
+
+def normalize_file_permissions(st_mode: int) -> int:
+    """
+    Normalizes the permission bits in the st_mode field from stat to 644/755
+
+    Popular VCSs only track whether a file is executable or not. The exact
+    permissions can vary on systems with different umasks. Normalising
+    to 644 (non executable) or 755 (executable) makes builds more reproducible.
+    """
+    # Set 644 permissions, leaving higher bits of st_mode unchanged
+    new_mode = (st_mode | 0o644) & ~0o133
+    if st_mode & 0o100:
+        new_mode |= 0o111  # Executable: 644 -> 755
+
+    return new_mode
+
+
+def escape_version(version: str) -> str:
+    """
+    Escaped version in wheel filename. Doesn't exactly follow
+    the escaping specification in :pep:`427#escaping-and-unicode`
+    because this conflicts with :pep:`440#local-version-identifiers`.
+    """
+    return re.sub(r"[^\w\d.+]+", "_", version, flags=re.UNICODE)
+
+
+def escape_name(name: str) -> str:
+    """
+    Escaped wheel name as specified in https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode.
+    This function should only be used for the generation of artifact names, and not to normalize or filter existing artifact names.
+    """
+    return re.sub(r"[-_.]+", "_", name, flags=re.UNICODE).lower()
diff --git a/vendor/poetry-core/src/poetry/core/masonry/utils/include.py b/vendor/poetry-core/src/poetry/core/masonry/utils/include.py
new file mode 100644
index 00000000..f183aa6c
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/utils/include.py
@@ -0,0 +1,51 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+
+class Include:
+    """
+    Represents an "include" entry.
+
+    It can be a glob string, a single file or a directory.
+
+    This class will then detect the type of this include:
+
+        - a package
+        - a module
+        - a file
+        - a directory
+    """
+
+    def __init__(
+        self, base: Path, include: str, formats: list[str] | None = None
+    ) -> None:
+        self._base = base
+        self._include = str(include)
+        self._formats = formats
+
+        self._elements: list[Path] = sorted(self._base.glob(str(self._include)))
+
+    @property
+    def base(self) -> Path:
+        return self._base
+
+    @property
+    def elements(self) -> list[Path]:
+        return self._elements
+
+    @property
+    def formats(self) -> list[str] | None:
+        return self._formats
+
+    def is_empty(self) -> bool:
+        return len(self._elements) == 0
+
+    def refresh(self) -> Include:
+        self._elements = sorted(self._base.glob(self._include))
+
+        return self
diff --git a/vendor/poetry-core/src/poetry/core/masonry/utils/module.py b/vendor/poetry-core/src/poetry/core/masonry/utils/module.py
new file mode 100644
index 00000000..c97aefda
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/utils/module.py
@@ -0,0 +1,115 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+
+if TYPE_CHECKING:
+    from poetry.core.masonry.utils.include import Include
+
+
+class ModuleOrPackageNotFound(ValueError):
+    pass
+
+
+class Module:
+    def __init__(
+        self,
+        name: str,
+        directory: str = ".",
+        packages: list[dict[str, Any]] | None = None,
+        includes: list[dict[str, Any]] | None = None,
+    ) -> None:
+        from poetry.core.masonry.utils.include import Include
+        from poetry.core.masonry.utils.package_include import PackageInclude
+        from poetry.core.utils.helpers import module_name
+
+        self._name = module_name(name)
+        self._in_src = False
+        self._is_package = False
+        self._path = Path(directory)
+        self._includes: list[Include] = []
+        packages = packages or []
+        includes = includes or []
+
+        if not packages:
+            # It must exist either as a .py file or a directory, but not both
+            pkg_dir = Path(directory, self._name)
+            py_file = Path(directory, self._name + ".py")
+            if pkg_dir.is_dir() and py_file.is_file():
+                raise ValueError(f"Both {pkg_dir} and {py_file} exist")
+            elif pkg_dir.is_dir():
+                packages = [{"include": str(pkg_dir.relative_to(self._path))}]
+            elif py_file.is_file():
+                packages = [{"include": str(py_file.relative_to(self._path))}]
+            else:
+                # Searching for a src module
+                src = Path(directory, "src")
+                src_pkg_dir = src / self._name
+                src_py_file = src / (self._name + ".py")
+
+                if src_pkg_dir.is_dir() and src_py_file.is_file():
+                    raise ValueError(f"Both {pkg_dir} and {py_file} exist")
+                elif src_pkg_dir.is_dir():
+                    packages = [
+                        {
+                            "include": str(src_pkg_dir.relative_to(src)),
+                            "from": str(src.relative_to(self._path)),
+                        }
+                    ]
+                elif src_py_file.is_file():
+                    packages = [
+                        {
+                            "include": str(src_py_file.relative_to(src)),
+                            "from": str(src.relative_to(self._path)),
+                        }
+                    ]
+                else:
+                    raise ModuleOrPackageNotFound(
+                        f"No file/folder found for package {name}"
+                    )
+
+        for package in packages:
+            formats = package.get("format")
+            if formats and not isinstance(formats, list):
+                formats = [formats]
+
+            self._includes.append(
+                PackageInclude(
+                    self._path,
+                    package["include"],
+                    formats=formats,
+                    source=package.get("from"),
+                )
+            )
+
+        for include in includes:
+            self._includes.append(
+                Include(self._path, include["path"], formats=include["format"])
+            )
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    @property
+    def path(self) -> Path:
+        return self._path
+
+    @property
+    def file(self) -> Path:
+        if self._is_package:
+            return self._path / "__init__.py"
+        else:
+            return self._path
+
+    @property
+    def includes(self) -> list[Include]:
+        return self._includes
+
+    def is_package(self) -> bool:
+        return self._is_package
+
+    def is_in_src(self) -> bool:
+        return self._in_src
diff --git a/vendor/poetry-core/src/poetry/core/masonry/utils/package_include.py b/vendor/poetry-core/src/poetry/core/masonry/utils/package_include.py
new file mode 100644
index 00000000..643d02f8
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/masonry/utils/package_include.py
@@ -0,0 +1,93 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.masonry.utils.include import Include
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+
+class PackageInclude(Include):
+    def __init__(
+        self,
+        base: Path,
+        include: str,
+        formats: list[str] | None = None,
+        source: str | None = None,
+    ) -> None:
+        self._package: str
+        self._is_package = False
+        self._is_module = False
+        self._source = source
+
+        if source is not None:
+            base = base / source
+
+        super().__init__(base, include, formats=formats)
+        self.check_elements()
+
+    @property
+    def package(self) -> str:
+        return self._package
+
+    @property
+    def source(self) -> str | None:
+        return self._source
+
+    def is_package(self) -> bool:
+        return self._is_package
+
+    def is_module(self) -> bool:
+        return self._is_module
+
+    def refresh(self) -> PackageInclude:
+        super().refresh()
+
+        return self.check_elements()
+
+    def is_stub_only(self) -> bool:
+        # returns `True` if this a PEP 561 stub-only package,
+        # see [PEP 561](https://www.python.org/dev/peps/pep-0561/#stub-only-packages)
+        return (self.package or "").endswith("-stubs") and all(
+            el.suffix == ".pyi" or el.name == "py.typed"
+            for el in self.elements
+            if el.is_file()
+        )
+
+    def has_modules(self) -> bool:
+        # Packages no longer need an __init__.py in python3, but there must
+        # at least be one .py file for it to be considered a package
+        return any(element.suffix == ".py" for element in self.elements)
+
+    def check_elements(self) -> PackageInclude:
+        if not self._elements:
+            raise ValueError(
+                f"{self._base / self._include} does not contain any element"
+            )
+
+        root = self._elements[0]
+        if len(self._elements) > 1:
+            # Probably glob
+            self._is_package = True
+            self._package = root.parent.name
+
+            if not self.is_stub_only() and not self.has_modules():
+                raise ValueError(f"{root.name} is not a package.")
+
+        else:
+            if root.is_dir():
+                # If it's a directory, we include everything inside it
+                self._package = root.name
+                self._elements: list[Path] = sorted(root.glob("**/*"))
+
+                if not self.is_stub_only() and not self.has_modules():
+                    raise ValueError(f"{root.name} is not a package.")
+
+                self._is_package = True
+            else:
+                self._package = root.stem
+                self._is_module = True
+
+        return self
diff --git a/vendor/poetry/poetry/console/commands/cache/__init__.py b/vendor/poetry-core/src/poetry/core/packages/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/console/commands/cache/__init__.py
rename to vendor/poetry-core/src/poetry/core/packages/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/packages/constraints/__init__.py b/vendor/poetry-core/src/poetry/core/packages/constraints/__init__.py
new file mode 100644
index 00000000..d61a2cb7
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/constraints/__init__.py
@@ -0,0 +1,73 @@
+from __future__ import annotations
+
+import re
+
+from poetry.core.packages.constraints.any_constraint import AnyConstraint
+from poetry.core.packages.constraints.base_constraint import BaseConstraint
+from poetry.core.packages.constraints.constraint import Constraint
+from poetry.core.packages.constraints.empty_constraint import EmptyConstraint
+from poetry.core.packages.constraints.multi_constraint import MultiConstraint
+from poetry.core.packages.constraints.union_constraint import UnionConstraint
+
+
+BASIC_CONSTRAINT = re.compile(r"^(!?==?)?\s*([^\s]+?)\s*$")
+
+
+def parse_constraint(constraints: str) -> BaseConstraint:
+    if constraints == "*":
+        return AnyConstraint()
+
+    or_constraints = re.split(r"\s*\|\|?\s*", constraints.strip())
+    or_groups = []
+    for constraints in or_constraints:
+        and_constraints = re.split(
+            r"(?< ,]) *(? 1:
+            for constraint in and_constraints:
+                constraint_objects.append(parse_single_constraint(constraint))
+        else:
+            constraint_objects.append(parse_single_constraint(and_constraints[0]))
+
+        if len(constraint_objects) == 1:
+            constraint = constraint_objects[0]
+        else:
+            constraint = constraint_objects[0]
+            for next_constraint in constraint_objects[1:]:
+                constraint = constraint.intersect(next_constraint)
+
+        or_groups.append(constraint)
+
+    if len(or_groups) == 1:
+        return or_groups[0]
+    else:
+        return UnionConstraint(*or_groups)
+
+
+def parse_single_constraint(constraint: str) -> Constraint:
+    # Basic comparator
+    m = BASIC_CONSTRAINT.match(constraint)
+    if m:
+        op = m.group(1)
+        if op is None:
+            op = "=="
+
+        version = m.group(2).strip()
+
+        return Constraint(version, op)
+
+    raise ValueError(f"Could not parse version constraint: {constraint}")
+
+
+__all__ = [
+    "AnyConstraint",
+    "BaseConstraint",
+    "Constraint",
+    "EmptyConstraint",
+    "MultiConstraint",
+    "UnionConstraint",
+    "parse_constraint",
+    "parse_single_constraint",
+]
diff --git a/vendor/poetry-core/src/poetry/core/packages/constraints/any_constraint.py b/vendor/poetry-core/src/poetry/core/packages/constraints/any_constraint.py
new file mode 100644
index 00000000..348e4d1d
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/constraints/any_constraint.py
@@ -0,0 +1,39 @@
+from __future__ import annotations
+
+from poetry.core.packages.constraints.base_constraint import BaseConstraint
+from poetry.core.packages.constraints.empty_constraint import EmptyConstraint
+
+
+class AnyConstraint(BaseConstraint):
+    def allows(self, other: BaseConstraint) -> bool:
+        return True
+
+    def allows_all(self, other: BaseConstraint) -> bool:
+        return True
+
+    def allows_any(self, other: BaseConstraint) -> bool:
+        return True
+
+    def difference(self, other: BaseConstraint) -> BaseConstraint:
+        if other.is_any():
+            return EmptyConstraint()
+
+        raise ValueError("Unimplemented constraint difference")
+
+    def intersect(self, other: BaseConstraint) -> BaseConstraint:
+        return other
+
+    def union(self, other: BaseConstraint) -> AnyConstraint:
+        return AnyConstraint()
+
+    def is_any(self) -> bool:
+        return True
+
+    def is_empty(self) -> bool:
+        return False
+
+    def __str__(self) -> str:
+        return "*"
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, BaseConstraint) and other.is_any()
diff --git a/vendor/poetry-core/src/poetry/core/packages/constraints/base_constraint.py b/vendor/poetry-core/src/poetry/core/packages/constraints/base_constraint.py
new file mode 100644
index 00000000..7f580c5e
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/constraints/base_constraint.py
@@ -0,0 +1,33 @@
+from __future__ import annotations
+
+
+class BaseConstraint:
+    def allows(self, other: BaseConstraint) -> bool:
+        raise NotImplementedError()
+
+    def allows_all(self, other: BaseConstraint) -> bool:
+        raise NotImplementedError()
+
+    def allows_any(self, other: BaseConstraint) -> bool:
+        raise NotImplementedError()
+
+    def difference(self, other: BaseConstraint) -> BaseConstraint:
+        raise NotImplementedError()
+
+    def intersect(self, other: BaseConstraint) -> BaseConstraint:
+        raise NotImplementedError()
+
+    def union(self, other: BaseConstraint) -> BaseConstraint:
+        raise NotImplementedError()
+
+    def is_any(self) -> bool:
+        return False
+
+    def is_empty(self) -> bool:
+        return False
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__} {str(self)}>"
+
+    def __eq__(self, other: object) -> bool:
+        raise NotImplementedError()
diff --git a/vendor/poetry-core/src/poetry/core/packages/constraints/constraint.py b/vendor/poetry-core/src/poetry/core/packages/constraints/constraint.py
new file mode 100644
index 00000000..013449fd
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/constraints/constraint.py
@@ -0,0 +1,139 @@
+from __future__ import annotations
+
+import operator
+
+from poetry.core.packages.constraints import AnyConstraint
+from poetry.core.packages.constraints.base_constraint import BaseConstraint
+from poetry.core.packages.constraints.empty_constraint import EmptyConstraint
+
+
+class Constraint(BaseConstraint):
+    OP_EQ = operator.eq
+    OP_NE = operator.ne
+
+    _trans_op_str = {"=": OP_EQ, "==": OP_EQ, "!=": OP_NE}
+
+    _trans_op_int = {OP_EQ: "==", OP_NE: "!="}
+
+    def __init__(self, version: str, operator: str = "==") -> None:
+        if operator == "=":
+            operator = "=="
+
+        self._version = version
+        self._operator = operator
+        self._op = self._trans_op_str[operator]
+
+    @property
+    def version(self) -> str:
+        return self._version
+
+    @property
+    def operator(self) -> str:
+        return self._operator
+
+    def allows(self, other: BaseConstraint) -> bool:
+        if not isinstance(other, Constraint):
+            raise ValueError("Unimplemented comparison of constraints")
+
+        is_equal_op = self._operator == "=="
+        is_non_equal_op = self._operator == "!="
+        is_other_equal_op = other.operator == "=="
+        is_other_non_equal_op = other.operator == "!="
+
+        if is_equal_op and is_other_equal_op:
+            return self._version == other.version
+
+        if (
+            is_equal_op
+            and is_other_non_equal_op
+            or is_non_equal_op
+            and is_other_equal_op
+            or is_non_equal_op
+            and is_other_non_equal_op
+        ):
+            return self._version != other.version
+
+        return False
+
+    def allows_all(self, other: BaseConstraint) -> bool:
+        if not isinstance(other, Constraint):
+            return other.is_empty()
+
+        return other == self
+
+    def allows_any(self, other: BaseConstraint) -> bool:
+        if isinstance(other, Constraint):
+            is_non_equal_op = self._operator == "!="
+            is_other_non_equal_op = other.operator == "!="
+
+            if is_non_equal_op and is_other_non_equal_op:
+                return self._version != other.version
+
+        return other.allows(self)
+
+    def difference(self, other: BaseConstraint) -> Constraint | EmptyConstraint:
+        if other.allows(self):
+            return EmptyConstraint()
+
+        return self
+
+    def intersect(self, other: BaseConstraint) -> BaseConstraint:
+        from poetry.core.packages.constraints.multi_constraint import MultiConstraint
+
+        if isinstance(other, Constraint):
+            if other == self:
+                return self
+
+            if self.operator == "!=" and other.operator == "==" and self.allows(other):
+                return other
+
+            if other.operator == "!=" and self.operator == "==" and other.allows(self):
+                return self
+
+            if other.operator == "!=" and self.operator == "!=":
+                return MultiConstraint(self, other)
+
+            return EmptyConstraint()
+
+        return other.intersect(self)
+
+    def union(self, other: BaseConstraint) -> BaseConstraint:
+        if isinstance(other, Constraint):
+            from poetry.core.packages.constraints.union_constraint import (
+                UnionConstraint,
+            )
+
+            if other == self:
+                return self
+
+            if self.operator == "!=" and other.operator == "==" and self.allows(other):
+                return self
+
+            if other.operator == "!=" and self.operator == "==" and other.allows(self):
+                return other
+
+            if other.operator == "==" and self.operator == "==":
+                return UnionConstraint(self, other)
+
+            return AnyConstraint()
+
+        return other.union(self)
+
+    def is_any(self) -> bool:
+        return False
+
+    def is_empty(self) -> bool:
+        return False
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Constraint):
+            return NotImplemented
+
+        return (self.version, self.operator) == (other.version, other.operator)
+
+    def __hash__(self) -> int:
+        return hash((self._operator, self._version))
+
+    def __str__(self) -> str:
+        op = self._operator if self._operator != "==" else ""
+        return f"{op}{self._version}"
diff --git a/vendor/poetry-core/src/poetry/core/packages/constraints/empty_constraint.py b/vendor/poetry-core/src/poetry/core/packages/constraints/empty_constraint.py
new file mode 100644
index 00000000..2e703d62
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/constraints/empty_constraint.py
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+from poetry.core.packages.constraints.base_constraint import BaseConstraint
+
+
+class EmptyConstraint(BaseConstraint):
+    pretty_string = None
+
+    def matches(self, _: BaseConstraint) -> bool:
+        return True
+
+    def is_empty(self) -> bool:
+        return True
+
+    def allows(self, other: BaseConstraint) -> bool:
+        return False
+
+    def allows_all(self, other: BaseConstraint) -> bool:
+        return other.is_empty()
+
+    def allows_any(self, other: BaseConstraint) -> bool:
+        return False
+
+    def intersect(self, other: BaseConstraint) -> BaseConstraint:
+        return self
+
+    def difference(self, other: BaseConstraint) -> BaseConstraint:
+        return self
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, BaseConstraint):
+            return False
+
+        return other.is_empty()
+
+    def __str__(self) -> str:
+        return ""
diff --git a/vendor/poetry-core/src/poetry/core/packages/constraints/multi_constraint.py b/vendor/poetry-core/src/poetry/core/packages/constraints/multi_constraint.py
new file mode 100644
index 00000000..8ab75fec
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/constraints/multi_constraint.py
@@ -0,0 +1,91 @@
+from __future__ import annotations
+
+from poetry.core.packages.constraints.base_constraint import BaseConstraint
+from poetry.core.packages.constraints.constraint import Constraint
+
+
+class MultiConstraint(BaseConstraint):
+    def __init__(self, *constraints: Constraint) -> None:
+        if any(c.operator == "==" for c in constraints):
+            raise ValueError(
+                "A multi-constraint can only be comprised of negative constraints"
+            )
+
+        self._constraints = constraints
+
+    @property
+    def constraints(self) -> tuple[Constraint, ...]:
+        return self._constraints
+
+    def allows(self, other: BaseConstraint) -> bool:
+        return all(constraint.allows(other) for constraint in self._constraints)
+
+    def allows_all(self, other: BaseConstraint) -> bool:
+        if other.is_any():
+            return False
+
+        if other.is_empty():
+            return True
+
+        if not isinstance(other, MultiConstraint):
+            return self.allows(other)
+
+        our_constraints = iter(self._constraints)
+        their_constraints = iter(other.constraints)
+        our_constraint = next(our_constraints, None)
+        their_constraint = next(their_constraints, None)
+
+        while our_constraint and their_constraint:
+            if our_constraint.allows_all(their_constraint):
+                their_constraint = next(their_constraints, None)
+            else:
+                our_constraint = next(our_constraints, None)
+
+        return their_constraint is None
+
+    def allows_any(self, other: BaseConstraint) -> bool:
+        if other.is_any():
+            return True
+
+        if other.is_empty():
+            return True
+
+        if isinstance(other, Constraint):
+            return self.allows(other)
+
+        if isinstance(other, MultiConstraint):
+            return any(
+                c1.allows(c2) for c1 in self.constraints for c2 in other.constraints
+            )
+
+        return False
+
+    def intersect(self, other: BaseConstraint) -> BaseConstraint:
+        if not isinstance(other, Constraint):
+            raise ValueError("Unimplemented constraint intersection")
+
+        constraints = self._constraints
+        if other not in constraints:
+            constraints += (other,)
+        else:
+            constraints = (other,)
+
+        if len(constraints) == 1:
+            return constraints[0]
+
+        return MultiConstraint(*constraints)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, MultiConstraint):
+            return False
+
+        return sorted(
+            self._constraints, key=lambda c: (c.operator, c.version)
+        ) == sorted(other.constraints, key=lambda c: (c.operator, c.version))
+
+    def __str__(self) -> str:
+        constraints = []
+        for constraint in self._constraints:
+            constraints.append(str(constraint))
+
+        return ", ".join(constraints)
diff --git a/vendor/poetry-core/src/poetry/core/packages/constraints/union_constraint.py b/vendor/poetry-core/src/poetry/core/packages/constraints/union_constraint.py
new file mode 100644
index 00000000..65651509
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/constraints/union_constraint.py
@@ -0,0 +1,141 @@
+from __future__ import annotations
+
+from typing import cast
+
+from poetry.core.packages.constraints.base_constraint import BaseConstraint
+from poetry.core.packages.constraints.constraint import Constraint
+from poetry.core.packages.constraints.empty_constraint import EmptyConstraint
+from poetry.core.packages.constraints.multi_constraint import MultiConstraint
+
+
+class UnionConstraint(BaseConstraint):
+    def __init__(self, *constraints: BaseConstraint) -> None:
+        self._constraints = constraints
+
+    @property
+    def constraints(self) -> tuple[BaseConstraint, ...]:
+        return self._constraints
+
+    def allows(
+        self,
+        other: BaseConstraint,
+    ) -> bool:
+        return any(constraint.allows(other) for constraint in self._constraints)
+
+    def allows_any(self, other: BaseConstraint) -> bool:
+        if other.is_empty():
+            return False
+
+        if other.is_any():
+            return True
+
+        if isinstance(other, (UnionConstraint, MultiConstraint)):
+            constraints = other.constraints
+        else:
+            constraints = (other,)
+
+        return any(
+            our_constraint.allows_any(their_constraint)
+            for our_constraint in self._constraints
+            for their_constraint in constraints
+        )
+
+    def allows_all(self, other: BaseConstraint) -> bool:
+        if other.is_any():
+            return False
+
+        if other.is_empty():
+            return True
+
+        if isinstance(other, (UnionConstraint, MultiConstraint)):
+            constraints = other.constraints
+        else:
+            constraints = (other,)
+
+        our_constraints = iter(self._constraints)
+        their_constraints = iter(constraints)
+        our_constraint = next(our_constraints, None)
+        their_constraint = next(their_constraints, None)
+
+        while our_constraint and their_constraint:
+            if our_constraint.allows_all(their_constraint):
+                their_constraint = next(their_constraints, None)
+            else:
+                our_constraint = next(our_constraints, None)
+
+        return their_constraint is None
+
+    def intersect(self, other: BaseConstraint) -> BaseConstraint:
+        if other.is_any():
+            return self
+
+        if other.is_empty():
+            return other
+
+        if isinstance(other, Constraint):
+            if self.allows(other):
+                return other
+
+            return EmptyConstraint()
+
+        # Two remaining cases: an intersection with another union, or an intersection
+        # with a multi.
+        #
+        # In the first case:
+        # (A or B) and (C or D) => (A and C) or (A and D) or (B and C) or (B and D)
+        #
+        # In the second case:
+        # (A or B) and (C and D) => (A and C and D) or (B and C and D)
+        new_constraints = []
+        if isinstance(other, UnionConstraint):
+            for our_constraint in self._constraints:
+                for their_constraint in other.constraints:
+                    intersection = our_constraint.intersect(their_constraint)
+
+                    if (
+                        not intersection.is_empty()
+                        and intersection not in new_constraints
+                    ):
+                        new_constraints.append(intersection)
+
+        else:
+            other = cast(MultiConstraint, other)
+
+            for our_constraint in self._constraints:
+                intersection = our_constraint
+                for their_constraint in other.constraints:
+                    intersection = intersection.intersect(their_constraint)
+
+                if not intersection.is_empty() and intersection not in new_constraints:
+                    new_constraints.append(intersection)
+
+        if not new_constraints:
+            return EmptyConstraint()
+
+        if len(new_constraints) == 1:
+            return new_constraints[0]
+
+        return UnionConstraint(*new_constraints)
+
+    def union(self, other: BaseConstraint) -> UnionConstraint:
+        if not isinstance(other, Constraint):
+            raise ValueError("Unimplemented constraint union")
+
+        constraints = self._constraints
+        if other not in self._constraints:
+            constraints += (other,)
+
+        return UnionConstraint(*constraints)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, UnionConstraint):
+            return False
+
+        return set(self._constraints) == set(other._constraints)
+
+    def __str__(self) -> str:
+        constraints = []
+        for constraint in self._constraints:
+            constraints.append(str(constraint))
+
+        return " || ".join(constraints)
diff --git a/vendor/poetry-core/src/poetry/core/packages/dependency.py b/vendor/poetry-core/src/poetry/core/packages/dependency.py
new file mode 100644
index 00000000..4310d695
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/dependency.py
@@ -0,0 +1,617 @@
+from __future__ import annotations
+
+import os
+import re
+import warnings
+
+from contextlib import suppress
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Iterable
+from typing import TypeVar
+
+from poetry.core.packages.constraints import (
+    parse_constraint as parse_generic_constraint,
+)
+from poetry.core.packages.dependency_group import MAIN_GROUP
+from poetry.core.packages.specification import PackageSpecification
+from poetry.core.packages.utils.utils import contains_group_without_marker
+from poetry.core.packages.utils.utils import normalize_python_version_markers
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version_range_constraint import VersionRangeConstraint
+from poetry.core.version.markers import parse_marker
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+
+    from poetry.core.packages.constraints import BaseConstraint
+    from poetry.core.packages.directory_dependency import DirectoryDependency
+    from poetry.core.packages.file_dependency import FileDependency
+    from poetry.core.semver.version_constraint import VersionConstraint
+    from poetry.core.version.markers import BaseMarker
+
+    T = TypeVar("T", bound="Dependency")
+
+
+class Dependency(PackageSpecification):
+    def __init__(
+        self,
+        name: str,
+        constraint: str | VersionConstraint,
+        optional: bool = False,
+        groups: Iterable[str] | None = None,
+        allows_prereleases: bool = False,
+        extras: Iterable[str] | None = None,
+        source_type: str | None = None,
+        source_url: str | None = None,
+        source_reference: str | None = None,
+        source_resolved_reference: str | None = None,
+        source_subdirectory: str | None = None,
+    ) -> None:
+        from poetry.core.version.markers import AnyMarker
+
+        super().__init__(
+            name,
+            source_type=source_type,
+            source_url=source_url,
+            source_reference=source_reference,
+            source_resolved_reference=source_resolved_reference,
+            source_subdirectory=source_subdirectory,
+            features=extras,
+        )
+
+        self._constraint: VersionConstraint
+        self._pretty_constraint: str
+        self.constraint = constraint  # type: ignore[assignment]
+
+        self._optional = optional
+
+        if not groups:
+            groups = [MAIN_GROUP]
+
+        self._groups = frozenset(groups)
+
+        if (
+            isinstance(self._constraint, VersionRangeConstraint)
+            and self._constraint.min
+        ):
+            allows_prereleases = (
+                allows_prereleases or self._constraint.min.is_unstable()
+            )
+
+        self._allows_prereleases = allows_prereleases
+
+        self._python_versions = "*"
+        self._python_constraint = parse_constraint("*")
+        self._transitive_python_versions: str | None = None
+        self._transitive_python_constraint: VersionConstraint | None = None
+        self._transitive_marker: BaseMarker | None = None
+
+        self._in_extras: list[str] = []
+
+        self._activated = not self._optional
+
+        self.is_root = False
+        self._marker: BaseMarker = AnyMarker()
+        self.source_name: str | None = None
+
+    @property
+    def name(self) -> NormalizedName:
+        return self._name
+
+    @property
+    def constraint(self) -> VersionConstraint:
+        return self._constraint
+
+    @constraint.setter
+    def constraint(self, constraint: str | VersionConstraint) -> None:
+        from poetry.core.semver.version_constraint import VersionConstraint
+
+        try:
+            if not isinstance(constraint, VersionConstraint):
+                self._constraint = parse_constraint(constraint)
+            else:
+                self._constraint = constraint
+        except ValueError:
+            self._constraint = parse_constraint("*")
+        self._pretty_constraint = str(constraint)
+
+    def set_constraint(self, constraint: str | VersionConstraint) -> None:
+        warnings.warn(
+            "Calling method 'set_constraint' is deprecated and will be removed. "
+            "It has been replaced by the property 'constraint' for consistency.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        self.constraint = constraint  # type: ignore[assignment]
+
+    @property
+    def pretty_constraint(self) -> str:
+        return self._pretty_constraint
+
+    @property
+    def pretty_name(self) -> str:
+        return self._pretty_name
+
+    @property
+    def groups(self) -> frozenset[str]:
+        return self._groups
+
+    @property
+    def python_versions(self) -> str:
+        return self._python_versions
+
+    @python_versions.setter
+    def python_versions(self, value: str) -> None:
+        self._python_versions = value
+        self._python_constraint = parse_constraint(value)
+        if not self._python_constraint.is_any():
+            self._marker = self._marker.intersect(
+                parse_marker(
+                    self._create_nested_marker(
+                        "python_version", self._python_constraint
+                    )
+                )
+            )
+
+    @property
+    def transitive_python_versions(self) -> str:
+        if self._transitive_python_versions is None:
+            return self._python_versions
+
+        return self._transitive_python_versions
+
+    @transitive_python_versions.setter
+    def transitive_python_versions(self, value: str) -> None:
+        self._transitive_python_versions = value
+        self._transitive_python_constraint = parse_constraint(value)
+
+    @property
+    def marker(self) -> BaseMarker:
+        return self._marker
+
+    @marker.setter
+    def marker(self, marker: str | BaseMarker) -> None:
+        from poetry.core.packages.utils.utils import convert_markers
+        from poetry.core.semver.helpers import parse_constraint
+        from poetry.core.version.markers import BaseMarker
+        from poetry.core.version.markers import parse_marker
+
+        if not isinstance(marker, BaseMarker):
+            marker = parse_marker(marker)
+
+        self._marker = marker
+
+        markers = convert_markers(marker)
+
+        if "extra" in markers:
+            # If we have extras, the dependency is optional
+            self.deactivate()
+
+            for or_ in markers["extra"]:
+                for _, extra in or_:
+                    self.in_extras.append(extra)
+
+        # Recalculate python versions.
+        self._python_versions = "*"
+        if not contains_group_without_marker(markers, "python_version"):
+            python_version_markers = markers["python_version"]
+            self._python_versions = normalize_python_version_markers(
+                python_version_markers
+            )
+
+        self._python_constraint = parse_constraint(self._python_versions)
+
+    @property
+    def transitive_marker(self) -> BaseMarker:
+        if self._transitive_marker is None:
+            return self.marker
+
+        return self._transitive_marker
+
+    @transitive_marker.setter
+    def transitive_marker(self, value: BaseMarker) -> None:
+        self._transitive_marker = value
+
+    @property
+    def python_constraint(self) -> VersionConstraint:
+        return self._python_constraint
+
+    @property
+    def transitive_python_constraint(self) -> VersionConstraint:
+        if self._transitive_python_constraint is None:
+            return self._python_constraint
+
+        return self._transitive_python_constraint
+
+    @property
+    def extras(self) -> frozenset[str]:
+        # extras activated in a dependency is the same as features
+        return self._features
+
+    @property
+    def in_extras(self) -> list[str]:
+        return self._in_extras
+
+    @property
+    def base_pep_508_name(self) -> str:
+        from poetry.core.semver.version import Version
+        from poetry.core.semver.version_union import VersionUnion
+
+        requirement = self.pretty_name
+
+        if self.extras:
+            extras = ",".join(sorted(self.extras))
+            requirement += f"[{extras}]"
+
+        constraint = self.constraint
+        if isinstance(constraint, VersionUnion):
+            if (
+                constraint.excludes_single_version()
+                or constraint.excludes_single_wildcard_range()
+            ):
+                # This branch is a short-circuit logic for special cases and
+                # avoids having to split and parse constraint again. This has
+                # no functional difference with the logic in the else branch.
+                requirement += f" ({str(constraint)})"
+            else:
+                constraints = ",".join(
+                    str(parse_constraint(c)) for c in self.pretty_constraint.split(",")
+                )
+                requirement += f" ({constraints})"
+        elif isinstance(constraint, Version):
+            requirement += f" (=={constraint.text})"
+        elif not constraint.is_any():
+            requirement += f" ({str(constraint).replace(' ', '')})"
+
+        return requirement
+
+    def allows_prereleases(self) -> bool:
+        return self._allows_prereleases
+
+    def is_optional(self) -> bool:
+        return self._optional
+
+    def is_activated(self) -> bool:
+        return self._activated
+
+    def is_vcs(self) -> bool:
+        return False
+
+    def is_file(self) -> bool:
+        return False
+
+    def is_directory(self) -> bool:
+        return False
+
+    def is_url(self) -> bool:
+        return False
+
+    def to_pep_508(self, with_extras: bool = True) -> str:
+        from poetry.core.packages.utils.utils import convert_markers
+
+        requirement = self.base_pep_508_name
+
+        markers = []
+        has_extras = False
+        if not self.marker.is_any():
+            marker = self.marker
+            if not with_extras:
+                marker = marker.without_extras()
+
+            # we re-check for any marker here since the without extra marker might
+            # return an any marker again
+            if not marker.is_empty() and not marker.is_any():
+                markers.append(str(marker))
+
+            has_extras = "extra" in convert_markers(marker)
+        else:
+            # Python marker
+            if self.python_versions != "*":
+                python_constraint = self.python_constraint
+
+                markers.append(
+                    self._create_nested_marker("python_version", python_constraint)
+                )
+
+        in_extras = " || ".join(self._in_extras)
+        if in_extras and with_extras and not has_extras:
+            markers.append(
+                self._create_nested_marker("extra", parse_generic_constraint(in_extras))
+            )
+
+        if markers:
+            if self.is_vcs() or self.is_url() or self.is_file():
+                requirement += " "
+
+            if len(markers) > 1:
+                marker_str = " and ".join(f"({m})" for m in markers)
+                requirement += f"; {marker_str}"
+            else:
+                requirement += f"; {markers[0]}"
+
+        return requirement
+
+    def _create_nested_marker(
+        self, name: str, constraint: BaseConstraint | VersionConstraint
+    ) -> str:
+        from poetry.core.packages.constraints.constraint import Constraint
+        from poetry.core.packages.constraints.multi_constraint import MultiConstraint
+        from poetry.core.packages.constraints.union_constraint import UnionConstraint
+        from poetry.core.semver.version import Version
+        from poetry.core.semver.version_union import VersionUnion
+
+        if isinstance(constraint, (MultiConstraint, UnionConstraint)):
+            multi_parts = []
+            for c in constraint.constraints:
+                multi = isinstance(c, (MultiConstraint, UnionConstraint))
+                multi_parts.append((multi, self._create_nested_marker(name, c)))
+
+            glue = " and "
+            if isinstance(constraint, UnionConstraint):
+                parts = [f"({part[1]})" if part[0] else part[1] for part in multi_parts]
+                glue = " or "
+            else:
+                parts = [part[1] for part in multi_parts]
+
+            marker = glue.join(parts)
+        elif isinstance(constraint, Constraint):
+            marker = f'{name} {constraint.operator} "{constraint.version}"'
+        elif isinstance(constraint, VersionUnion):
+            parts = [self._create_nested_marker(name, c) for c in constraint.ranges]
+            glue = " or "
+            parts = [f"({part})" for part in parts]
+
+            marker = glue.join(parts)
+        elif isinstance(constraint, Version):
+            if constraint.precision >= 3 and name == "python_version":
+                name = "python_full_version"
+
+            marker = f'{name} == "{constraint.text}"'
+        else:
+            assert isinstance(constraint, VersionRangeConstraint)
+            if constraint.min is not None:
+                min_name = name
+                if constraint.min.precision >= 3 and name == "python_version":
+                    min_name = "python_full_version"
+
+                    if constraint.max is None:
+                        name = min_name
+
+                op = ">="
+                if not constraint.include_min:
+                    op = ">"
+
+                version = constraint.min.text
+                if constraint.max is not None:
+                    max_name = name
+                    if constraint.max.precision >= 3 and name == "python_version":
+                        max_name = "python_full_version"
+
+                    text = f'{min_name} {op} "{version}"'
+
+                    op = "<="
+                    if not constraint.include_max:
+                        op = "<"
+
+                    version = constraint.max.text
+
+                    text += f' and {max_name} {op} "{version}"'
+
+                    return text
+            elif constraint.max is not None:
+                if constraint.max.precision >= 3 and name == "python_version":
+                    name = "python_full_version"
+
+                op = "<="
+                if not constraint.include_max:
+                    op = "<"
+
+                version = constraint.max.text
+            else:
+                return ""
+
+            marker = f'{name} {op} "{version}"'
+
+        return marker
+
+    def activate(self) -> None:
+        """
+        Set the dependency as mandatory.
+        """
+        self._activated = True
+
+    def deactivate(self) -> None:
+        """
+        Set the dependency as optional.
+        """
+        if not self._optional:
+            self._optional = True
+
+        self._activated = False
+
+    def with_constraint(self: T, constraint: str | VersionConstraint) -> T:
+        dependency = self.clone()
+        dependency.constraint = constraint  # type: ignore[assignment]
+        return dependency
+
+    @classmethod
+    def create_from_pep_508(
+        cls, name: str, relative_to: Path | None = None
+    ) -> Dependency:
+        """
+        Resolve a PEP-508 requirement string to a `Dependency` instance. If a `relative_to`
+        path is specified, this is used as the base directory if the identified dependency is
+        of file or directory type.
+        """
+        from poetry.core.packages.url_dependency import URLDependency
+        from poetry.core.packages.utils.link import Link
+        from poetry.core.packages.utils.utils import is_archive_file
+        from poetry.core.packages.utils.utils import is_python_project
+        from poetry.core.packages.utils.utils import is_url
+        from poetry.core.packages.utils.utils import path_to_url
+        from poetry.core.packages.utils.utils import strip_extras
+        from poetry.core.packages.utils.utils import url_to_path
+        from poetry.core.packages.vcs_dependency import VCSDependency
+        from poetry.core.utils.patterns import wheel_file_re
+        from poetry.core.vcs.git import ParsedUrl
+        from poetry.core.version.requirements import Requirement
+
+        # Removing comments
+        parts = name.split(" #", 1)
+        name = parts[0].strip()
+        if len(parts) > 1:
+            rest = parts[1]
+            if " ;" in rest:
+                name += " ;" + rest.split(" ;", 1)[1]
+
+        req = Requirement(name)
+
+        name = req.name
+        link = None
+
+        if is_url(name):
+            link = Link(name)
+        elif req.url:
+            link = Link(req.url)
+        else:
+            path_str = os.path.normpath(os.path.abspath(name))
+            p, extras = strip_extras(path_str)
+            if os.path.isdir(p) and (os.path.sep in name or name.startswith(".")):
+                if not is_python_project(Path(name)):
+                    raise ValueError(
+                        f"Directory {name!r} is not installable. File 'setup.[py|cfg]' "
+                        "not found."
+                    )
+                link = Link(path_to_url(p))
+            elif is_archive_file(p):
+                link = Link(path_to_url(p))
+
+        # it's a local file, dir, or url
+        if link:
+            is_file_uri = link.scheme == "file"
+            is_relative_uri = is_file_uri and re.search(r"\.\./", link.url)
+
+            # Handle relative file URLs
+            if is_file_uri and is_relative_uri:
+                path = Path(link.path)
+                if relative_to:
+                    path = relative_to / path
+                link = Link(path_to_url(path))
+
+            # wheel file
+            version = None
+            if link.is_wheel:
+                m = wheel_file_re.match(link.filename)
+                if not m:
+                    raise ValueError(f"Invalid wheel name: {link.filename}")
+                name = m.group("name")
+                version = m.group("ver")
+
+            dep: Dependency | None = None
+
+            if link.scheme.startswith("git+"):
+                url = ParsedUrl.parse(link.url)
+                dep = VCSDependency(
+                    name,
+                    "git",
+                    url.url,
+                    rev=url.rev,
+                    directory=url.subdirectory,
+                    extras=req.extras,
+                )
+            elif link.scheme == "git":
+                dep = VCSDependency(
+                    name, "git", link.url_without_fragment, extras=req.extras
+                )
+            elif link.scheme in ["http", "https"]:
+                dep = URLDependency(name, link.url, extras=req.extras)
+            elif is_file_uri:
+                # handle RFC 8089 references
+                path = url_to_path(req.url)
+                dep = _make_file_or_dir_dep(
+                    name=name, path=path, base=relative_to, extras=req.extras
+                )
+            else:
+                with suppress(ValueError):
+                    # this is a local path not using the file URI scheme
+                    dep = _make_file_or_dir_dep(
+                        name=name,
+                        path=Path(req.url),
+                        base=relative_to,
+                        extras=req.extras,
+                    )
+
+            if dep is None:
+                dep = Dependency(name, version or "*", extras=req.extras)
+
+            if version:
+                dep._constraint = parse_constraint(version)
+        else:
+            constraint: VersionConstraint | str
+            if req.pretty_constraint:
+                constraint = req.constraint
+            else:
+                constraint = "*"
+            dep = Dependency(name, constraint, extras=req.extras)
+
+        if req.marker:
+            dep.marker = req.marker
+
+        return dep
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Dependency):
+            return NotImplemented
+
+        # "constraint" is implicitly given for direct origin dependencies and might not
+        # be set yet ("*"). Thus, it shouldn't be used to determine if two direct origin
+        # dependencies are equal.
+        # Calling is_direct_origin() for one dependency is sufficient because
+        # super().__eq__() returns False for different origins.
+        return super().__eq__(other) and (
+            self._constraint == other.constraint or self.is_direct_origin()
+        )
+
+    def __hash__(self) -> int:
+        # don't include _constraint in hash because it is mutable!
+        return super().__hash__()
+
+    def __str__(self) -> str:
+        if self.is_root:
+            return self._pretty_name
+        if self.is_direct_origin():
+            # adding version since this information is especially useful in debug output
+            parts = [p.strip() for p in self.base_pep_508_name.split("@", 1)]
+            return f"{parts[0]} ({self._pretty_constraint}) @ {parts[1]}"
+        return self.base_pep_508_name
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__} {str(self)}>"
+
+
+def _make_file_or_dir_dep(
+    name: str,
+    path: Path,
+    base: Path | None = None,
+    extras: list[str] | None = None,
+) -> FileDependency | DirectoryDependency | None:
+    """
+    Helper function to create a file or directoru dependency with the given arguments. If
+    path is not a file or directory that exists, `None` is returned.
+    """
+    from poetry.core.packages.directory_dependency import DirectoryDependency
+    from poetry.core.packages.file_dependency import FileDependency
+
+    _path = path
+    if not path.is_absolute() and base:
+        # a base path was specified, so we should respect that
+        _path = Path(base) / path
+
+    if _path.is_file():
+        return FileDependency(name, path, base=base, extras=extras)
+    elif _path.is_dir():
+        return DirectoryDependency(name, path, base=base, extras=extras)
+
+    return None
diff --git a/vendor/poetry-core/src/poetry/core/packages/dependency_group.py b/vendor/poetry-core/src/poetry/core/packages/dependency_group.py
new file mode 100644
index 00000000..9afa692e
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/dependency_group.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+
+
+MAIN_GROUP = "main"
+
+
+class DependencyGroup:
+    def __init__(self, name: str, optional: bool = False) -> None:
+        self._name: str = name
+        self._optional: bool = optional
+        self._dependencies: list[Dependency] = []
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    @property
+    def dependencies(self) -> list[Dependency]:
+        return self._dependencies
+
+    def is_optional(self) -> bool:
+        return self._optional
+
+    def add_dependency(self, dependency: Dependency) -> None:
+        self._dependencies.append(dependency)
+
+    def remove_dependency(self, name: str) -> None:
+        from packaging.utils import canonicalize_name
+
+        name = canonicalize_name(name)
+
+        dependencies = []
+        for dependency in self.dependencies:
+            if dependency.name == name:
+                continue
+
+            dependencies.append(dependency)
+
+        self._dependencies = dependencies
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, DependencyGroup):
+            return NotImplemented
+
+        return self._name == other.name and set(self._dependencies) == set(
+            other.dependencies
+        )
+
+    def __repr__(self) -> str:
+        cls = self.__class__.__name__
+        return f"{cls}({self._name}, optional={self._optional})"
diff --git a/vendor/poetry-core/src/poetry/core/packages/directory_dependency.py b/vendor/poetry-core/src/poetry/core/packages/directory_dependency.py
new file mode 100644
index 00000000..24ee0593
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/directory_dependency.py
@@ -0,0 +1,95 @@
+from __future__ import annotations
+
+import functools
+
+from pathlib import Path
+from typing import Iterable
+
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.utils.utils import is_python_project
+from poetry.core.packages.utils.utils import path_to_url
+from poetry.core.pyproject.toml import PyProjectTOML
+
+
+class DirectoryDependency(Dependency):
+    def __init__(
+        self,
+        name: str,
+        path: Path,
+        groups: Iterable[str] | None = None,
+        optional: bool = False,
+        base: Path | None = None,
+        develop: bool = False,
+        extras: Iterable[str] | None = None,
+    ) -> None:
+        self._path = path
+        self._base = base or Path.cwd()
+        self._full_path = path
+
+        if not self._path.is_absolute():
+            try:
+                self._full_path = self._base.joinpath(self._path).resolve()
+            except FileNotFoundError:
+                raise ValueError(f"Directory {self._path} does not exist")
+
+        self._develop = develop
+
+        if not self._full_path.exists():
+            raise ValueError(f"Directory {self._path} does not exist")
+
+        if self._full_path.is_file():
+            raise ValueError(f"{self._path} is a file, expected a directory")
+
+        if not is_python_project(self._full_path):
+            raise ValueError(
+                f"Directory {self._full_path} does not seem to be a Python package"
+            )
+
+        super().__init__(
+            name,
+            "*",
+            groups=groups,
+            optional=optional,
+            allows_prereleases=True,
+            source_type="directory",
+            source_url=self._full_path.as_posix(),
+            extras=extras,
+        )
+
+        # cache this function to avoid multiple IO reads and parsing
+        self.supports_poetry = functools.lru_cache(maxsize=1)(self._supports_poetry)
+
+    @property
+    def path(self) -> Path:
+        return self._path
+
+    @property
+    def full_path(self) -> Path:
+        return self._full_path
+
+    @property
+    def base(self) -> Path:
+        return self._base
+
+    @property
+    def develop(self) -> bool:
+        return self._develop
+
+    def _supports_poetry(self) -> bool:
+        return PyProjectTOML(self._full_path / "pyproject.toml").is_poetry_project()
+
+    def is_directory(self) -> bool:
+        return True
+
+    @property
+    def base_pep_508_name(self) -> str:
+        requirement = self.pretty_name
+
+        if self.extras:
+            extras = ",".join(sorted(self.extras))
+            requirement += f"[{extras}]"
+
+        path = path_to_url(self.path) if self.path.is_absolute() else self.path
+        requirement += f" @ {path}"
+
+        return requirement
diff --git a/vendor/poetry-core/src/poetry/core/packages/file_dependency.py b/vendor/poetry-core/src/poetry/core/packages/file_dependency.py
new file mode 100644
index 00000000..5e654243
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/file_dependency.py
@@ -0,0 +1,84 @@
+from __future__ import annotations
+
+import hashlib
+import io
+
+from pathlib import Path
+from typing import Iterable
+
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.utils.utils import path_to_url
+
+
+class FileDependency(Dependency):
+    def __init__(
+        self,
+        name: str,
+        path: Path,
+        groups: Iterable[str] | None = None,
+        optional: bool = False,
+        base: Path | None = None,
+        extras: Iterable[str] | None = None,
+    ) -> None:
+        self._path = path
+        self._base = base or Path.cwd()
+        self._full_path = path
+
+        if not self._path.is_absolute():
+            try:
+                self._full_path = self._base.joinpath(self._path).resolve()
+            except FileNotFoundError:
+                raise ValueError(f"Directory {self._path} does not exist")
+
+        if not self._full_path.exists():
+            raise ValueError(f"File {self._path} does not exist")
+
+        if self._full_path.is_dir():
+            raise ValueError(f"{self._path} is a directory, expected a file")
+
+        super().__init__(
+            name,
+            "*",
+            groups=groups,
+            optional=optional,
+            allows_prereleases=True,
+            source_type="file",
+            source_url=self._full_path.as_posix(),
+            extras=extras,
+        )
+
+    @property
+    def base(self) -> Path:
+        return self._base
+
+    @property
+    def path(self) -> Path:
+        return self._path
+
+    @property
+    def full_path(self) -> Path:
+        return self._full_path
+
+    def is_file(self) -> bool:
+        return True
+
+    def hash(self, hash_name: str = "sha256") -> str:
+        h = hashlib.new(hash_name)
+        with self._full_path.open("rb") as fp:
+            for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""):
+                h.update(content)
+
+        return h.hexdigest()
+
+    @property
+    def base_pep_508_name(self) -> str:
+        requirement = self.pretty_name
+
+        if self.extras:
+            extras = ",".join(sorted(self.extras))
+            requirement += f"[{extras}]"
+
+        path = path_to_url(self.path) if self.path.is_absolute() else self.path
+        requirement += f" @ {path}"
+
+        return requirement
diff --git a/vendor/poetry-core/src/poetry/core/packages/package.py b/vendor/poetry-core/src/poetry/core/packages/package.py
new file mode 100644
index 00000000..aa04880d
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/package.py
@@ -0,0 +1,600 @@
+from __future__ import annotations
+
+import copy
+import re
+
+from contextlib import contextmanager
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Collection
+from typing import Iterable
+from typing import Iterator
+from typing import TypeVar
+from typing import cast
+
+from poetry.core.packages.dependency_group import MAIN_GROUP
+from poetry.core.packages.specification import PackageSpecification
+from poetry.core.packages.utils.utils import create_nested_marker
+from poetry.core.packages.utils.utils import get_python_constraint_from_marker
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.version.markers import parse_marker
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.dependency_group import DependencyGroup
+    from poetry.core.semver.version import Version
+    from poetry.core.semver.version_constraint import VersionConstraint
+    from poetry.core.spdx.license import License
+    from poetry.core.version.markers import BaseMarker
+
+    T = TypeVar("T", bound="Package")
+
+AUTHOR_REGEX = re.compile(r"(?u)^(?P[- .,\w\d'’\"()&]+)(?: <(?P.+?)>)?$")
+
+
+class Package(PackageSpecification):
+    AVAILABLE_PYTHONS = {
+        "2",
+        "2.7",
+        "3",
+        "3.4",
+        "3.5",
+        "3.6",
+        "3.7",
+        "3.8",
+        "3.9",
+        "3.10",
+    }
+
+    def __init__(
+        self,
+        name: str,
+        version: str | Version,
+        pretty_version: str | None = None,
+        source_type: str | None = None,
+        source_url: str | None = None,
+        source_reference: str | None = None,
+        source_resolved_reference: str | None = None,
+        source_subdirectory: str | None = None,
+        features: Iterable[str] | None = None,
+        develop: bool = False,
+        yanked: str | bool = False,
+    ) -> None:
+        """
+        Creates a new in memory package.
+        """
+        from poetry.core.version.markers import AnyMarker
+
+        super().__init__(
+            name,
+            source_type=source_type,
+            source_url=source_url,
+            source_reference=source_reference,
+            source_resolved_reference=source_resolved_reference,
+            source_subdirectory=source_subdirectory,
+            features=features,
+        )
+
+        self._set_version(version, pretty_version)
+
+        self.description = ""
+
+        self._authors: list[str] = []
+        self._maintainers: list[str] = []
+
+        self.homepage: str | None = None
+        self.repository_url: str | None = None
+        self.documentation_url: str | None = None
+        self.keywords: list[str] = []
+        self._license: License | None = None
+        self.readmes: tuple[Path, ...] = ()
+
+        self.extras: dict[str, list[Dependency]] = {}
+
+        self._dependency_groups: dict[str, DependencyGroup] = {}
+
+        # For compatibility with previous version, we keep the category
+        self.category = "main"
+        self.files: list[dict[str, str]] = []
+        self.optional = False
+
+        self.classifiers: list[str] = []
+
+        self._python_versions = "*"
+        self._python_constraint = parse_constraint("*")
+        self._python_marker: BaseMarker = AnyMarker()
+
+        self.platform = None
+        self.marker: BaseMarker = AnyMarker()
+
+        self.root_dir: Path | None = None
+
+        self.develop = develop
+
+        self._yanked = yanked
+
+    @property
+    def name(self) -> NormalizedName:
+        return self._name
+
+    @property
+    def pretty_name(self) -> str:
+        return self._pretty_name
+
+    @property
+    def version(self) -> Version:
+        return self._version
+
+    @property
+    def pretty_version(self) -> str:
+        return self._pretty_version
+
+    @property
+    def unique_name(self) -> str:
+        if self.is_root():
+            return self._name
+
+        return self.complete_name + "-" + self._version.text
+
+    @property
+    def pretty_string(self) -> str:
+        return self.pretty_name + " " + self.pretty_version
+
+    @property
+    def full_pretty_version(self) -> str:
+        if self.source_type in ["file", "directory", "url"]:
+            return f"{self._pretty_version} {self.source_url}"
+
+        if self.source_type not in ["hg", "git"]:
+            return self._pretty_version
+
+        ref: str | None
+        if self.source_resolved_reference and len(self.source_resolved_reference) == 40:
+            ref = self.source_resolved_reference[0:7]
+            return f"{self._pretty_version} {ref}"
+
+        # if source reference is a sha1 hash -- truncate
+        if self.source_reference and len(self.source_reference) == 40:
+            return f"{self._pretty_version} {self.source_reference[0:7]}"
+
+        ref = self._source_resolved_reference or self._source_reference
+        return f"{self._pretty_version} {ref}"
+
+    @property
+    def authors(self) -> list[str]:
+        return self._authors
+
+    @property
+    def author_name(self) -> str | None:
+        return self._get_author()["name"]
+
+    @property
+    def author_email(self) -> str | None:
+        return self._get_author()["email"]
+
+    @property
+    def maintainers(self) -> list[str]:
+        return self._maintainers
+
+    @property
+    def maintainer_name(self) -> str | None:
+        return self._get_maintainer()["name"]
+
+    @property
+    def maintainer_email(self) -> str | None:
+        return self._get_maintainer()["email"]
+
+    @property
+    def requires(self) -> list[Dependency]:
+        """
+        Returns the main dependencies
+        """
+        if not self._dependency_groups or MAIN_GROUP not in self._dependency_groups:
+            return []
+
+        return self._dependency_groups[MAIN_GROUP].dependencies
+
+    @property
+    def all_requires(
+        self,
+    ) -> list[Dependency]:
+        """
+        Returns the main dependencies and group dependencies.
+        """
+        return [
+            dependency
+            for group in self._dependency_groups.values()
+            for dependency in group.dependencies
+        ]
+
+    def _set_version(
+        self, version: str | Version, pretty_version: str | None = None
+    ) -> None:
+        from poetry.core.semver.version import Version
+
+        if not isinstance(version, Version):
+            self._version = Version.parse(version)
+            self._pretty_version = pretty_version or version
+        else:
+            self._version = version
+            self._pretty_version = pretty_version or self._version.text
+
+    def _get_author(self) -> dict[str, str | None]:
+        if not self._authors:
+            return {"name": None, "email": None}
+
+        m = AUTHOR_REGEX.match(self._authors[0])
+
+        if m is None:
+            raise ValueError(
+                "Invalid author string. Must be in the format: "
+                "John Smith "
+            )
+
+        name = m.group("name")
+        email = m.group("email")
+
+        return {"name": name, "email": email}
+
+    def _get_maintainer(self) -> dict[str, str | None]:
+        if not self._maintainers:
+            return {"name": None, "email": None}
+
+        m = AUTHOR_REGEX.match(self._maintainers[0])
+
+        if m is None:
+            raise ValueError(
+                "Invalid maintainer string. Must be in the format: "
+                "John Smith "
+            )
+
+        name = m.group("name")
+        email = m.group("email")
+
+        return {"name": name, "email": email}
+
+    @property
+    def python_versions(self) -> str:
+        return self._python_versions
+
+    @python_versions.setter
+    def python_versions(self, value: str) -> None:
+        self._python_versions = value
+        constraint = parse_constraint(value)
+        self._python_marker = parse_marker(
+            create_nested_marker("python_version", constraint)
+        )
+        self._python_constraint = get_python_constraint_from_marker(self._python_marker)
+
+    @property
+    def python_constraint(self) -> VersionConstraint:
+        return self._python_constraint
+
+    @property
+    def python_marker(self) -> BaseMarker:
+        return self._python_marker
+
+    @property
+    def license(self) -> License | None:
+        return self._license
+
+    @license.setter
+    def license(self, value: str | License | None) -> None:
+        from poetry.core.spdx.helpers import license_by_id
+        from poetry.core.spdx.license import License
+
+        if value is None or isinstance(value, License):
+            self._license = value
+        else:
+            self._license = license_by_id(value)
+
+    @property
+    def all_classifiers(self) -> list[str]:
+        from poetry.core.semver.version import Version
+
+        classifiers = copy.copy(self.classifiers)
+
+        # Automatically set python classifiers
+        if self.python_versions == "*":
+            python_constraint = parse_constraint("~2.7 || ^3.4")
+        else:
+            python_constraint = self.python_constraint
+
+        python_classifier_prefix = "Programming Language :: Python"
+        python_classifiers = []
+
+        # we sort python versions by sorting an int tuple of (major, minor) version
+        # to ensure we sort 3.10 after 3.9
+        for version in sorted(
+            self.AVAILABLE_PYTHONS, key=lambda x: tuple(map(int, x.split(".")))
+        ):
+            if len(version) == 1:
+                constraint = parse_constraint(version + ".*")
+            else:
+                constraint = Version.parse(version)
+
+            if python_constraint.allows_any(constraint):
+                classifier = f"{python_classifier_prefix} :: {version}"
+                if classifier not in python_classifiers:
+                    python_classifiers.append(classifier)
+
+        # Automatically set license classifiers
+        if self.license:
+            classifiers.append(self.license.classifier)
+
+        # Sort classifiers and insert python classifiers at the right location. We do
+        # it like this so that 3.10 is sorted after 3.9.
+        sorted_classifiers = []
+        python_classifiers_inserted = False
+        for classifier in sorted(set(classifiers)):
+            if (
+                not python_classifiers_inserted
+                and classifier > python_classifier_prefix
+            ):
+                sorted_classifiers.extend(python_classifiers)
+                python_classifiers_inserted = True
+            sorted_classifiers.append(classifier)
+
+        if not python_classifiers_inserted:
+            sorted_classifiers.extend(python_classifiers)
+
+        return sorted_classifiers
+
+    @property
+    def urls(self) -> dict[str, str]:
+        urls = {}
+
+        if self.homepage:
+            urls["Homepage"] = self.homepage
+
+        if self.repository_url:
+            urls["Repository"] = self.repository_url
+
+        if self.documentation_url:
+            urls["Documentation"] = self.documentation_url
+
+        return urls
+
+    @property
+    def readme(self) -> Path | None:
+        import warnings
+
+        warnings.warn(
+            "`readme` is deprecated: you are getting only the first readme file. Please"
+            " use the plural form `readmes`.",
+            DeprecationWarning,
+        )
+        return next(iter(self.readmes), None)
+
+    @readme.setter
+    def readme(self, path: Path) -> None:
+        import warnings
+
+        warnings.warn(
+            "`readme` is deprecated. Please assign a tuple to the plural form"
+            " `readmes`.",
+            DeprecationWarning,
+        )
+        self.readmes = (path,)
+
+    @property
+    def yanked(self) -> bool:
+        return isinstance(self._yanked, str) or bool(self._yanked)
+
+    @property
+    def yanked_reason(self) -> str:
+        if isinstance(self._yanked, str):
+            return self._yanked
+        return ""
+
+    def is_prerelease(self) -> bool:
+        return self._version.is_unstable()
+
+    def is_root(self) -> bool:
+        return False
+
+    def dependency_group_names(self, include_optional: bool = False) -> set[str]:
+        return {
+            name
+            for name, group in self._dependency_groups.items()
+            if not group.is_optional() or include_optional
+        }
+
+    def add_dependency_group(self, group: DependencyGroup) -> None:
+        self._dependency_groups[group.name] = group
+
+    def has_dependency_group(self, name: str) -> bool:
+        return name in self._dependency_groups
+
+    def dependency_group(self, name: str) -> DependencyGroup:
+        if not self.has_dependency_group(name):
+            raise ValueError(f'The dependency group "{name}" does not exist.')
+
+        return self._dependency_groups[name]
+
+    def add_dependency(
+        self,
+        dependency: Dependency,
+    ) -> Dependency:
+        from poetry.core.packages.dependency_group import DependencyGroup
+
+        for group_name in dependency.groups:
+            if group_name not in self._dependency_groups:
+                # Dynamically add the dependency group
+                self.add_dependency_group(DependencyGroup(group_name))
+
+            self._dependency_groups[group_name].add_dependency(dependency)
+
+        return dependency
+
+    def without_dependency_groups(self: T, groups: Collection[str]) -> T:
+        """
+        Returns a clone of the package with the given dependency groups excluded.
+        """
+        package = self.clone()
+
+        for group_name in groups:
+            if group_name in package._dependency_groups:
+                del package._dependency_groups[group_name]
+
+        return package
+
+    def without_optional_dependency_groups(self: T) -> T:
+        """
+        Returns a clone of the package without optional dependency groups.
+        """
+        package = self.clone()
+
+        for group_name, group in self._dependency_groups.items():
+            if group.is_optional():
+                del package._dependency_groups[group_name]
+
+        return package
+
+    def with_dependency_groups(
+        self: T, groups: Collection[str], only: bool = False
+    ) -> T:
+        """
+        Returns a clone of the package with the given dependency groups opted in.
+
+        Note that it will return all dependencies across all groups
+        more the given, optional, groups.
+
+        If `only` is set to True, then only the given groups will be selected.
+        """
+        package = self.clone()
+
+        for group_name, group in self._dependency_groups.items():
+            if (only or group.is_optional()) and group_name not in groups:
+                del package._dependency_groups[group_name]
+
+        return package
+
+    def to_dependency(self) -> Dependency:
+        from pathlib import Path
+
+        from poetry.core.packages.dependency import Dependency
+        from poetry.core.packages.directory_dependency import DirectoryDependency
+        from poetry.core.packages.file_dependency import FileDependency
+        from poetry.core.packages.url_dependency import URLDependency
+        from poetry.core.packages.vcs_dependency import VCSDependency
+
+        dep: Dependency
+        if self.source_type == "directory":
+            dep = DirectoryDependency(
+                self._name,
+                Path(cast(str, self._source_url)),
+                groups=list(self._dependency_groups.keys()),
+                optional=self.optional,
+                base=self.root_dir,
+                develop=self.develop,
+                extras=self.features,
+            )
+        elif self.source_type == "file":
+            dep = FileDependency(
+                self._name,
+                Path(cast(str, self._source_url)),
+                groups=list(self._dependency_groups.keys()),
+                optional=self.optional,
+                base=self.root_dir,
+                extras=self.features,
+            )
+        elif self.source_type == "url":
+            dep = URLDependency(
+                self._name,
+                cast(str, self._source_url),
+                groups=list(self._dependency_groups.keys()),
+                optional=self.optional,
+                extras=self.features,
+            )
+        elif self.source_type == "git":
+            dep = VCSDependency(
+                self._name,
+                self.source_type,
+                cast(str, self.source_url),
+                rev=self.source_reference,
+                resolved_rev=self.source_resolved_reference,
+                directory=self.source_subdirectory,
+                groups=list(self._dependency_groups.keys()),
+                optional=self.optional,
+                develop=self.develop,
+                extras=self.features,
+            )
+        else:
+            dep = Dependency(self._name, self._version, extras=self.features)
+
+        if not self.marker.is_any():
+            dep.marker = self.marker
+
+        if not self.python_constraint.is_any():
+            dep.python_versions = self.python_versions
+
+        if not self.is_direct_origin():
+            return dep
+
+        return dep.with_constraint(self._version)
+
+    @contextmanager
+    def with_python_versions(self, python_versions: str) -> Iterator[None]:
+        original_python_versions = self.python_versions
+
+        self.python_versions = python_versions
+
+        yield
+
+        self.python_versions = original_python_versions
+
+    def satisfies(
+        self, dependency: Dependency, ignore_source_type: bool = False
+    ) -> bool:
+        """
+        Helper method to check if this package satisfies a given dependency.
+
+        This is determined by assessing if this instance provides the package and
+        features specified by the given dependency. Further, version and source
+        types are checked.
+        """
+        if not self.provides(dependency) or not dependency.constraint.allows(
+            self.version
+        ):
+            return False
+
+        return ignore_source_type or self.is_same_source_as(dependency)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Package):
+            return NotImplemented
+
+        return super().__eq__(other) and self._version == other.version
+
+    def __hash__(self) -> int:
+        return super().__hash__() ^ hash(self._version)
+
+    def __str__(self) -> str:
+        return f"{self.complete_name} ({self.full_pretty_version})"
+
+    def __repr__(self) -> str:
+        args = [repr(self._name), repr(self._version.text)]
+
+        if self._features:
+            args.append(f"features={repr(self._features)}")
+
+        if self._source_type:
+            args.append(f"source_type={repr(self._source_type)}")
+            args.append(f"source_url={repr(self._source_url)}")
+
+            if self._source_reference:
+                args.append(f"source_reference={repr(self._source_reference)}")
+
+            if self._source_resolved_reference:
+                args.append(
+                    f"source_resolved_reference={repr(self._source_resolved_reference)}"
+                )
+            if self._source_subdirectory:
+                args.append(f"source_subdirectory={repr(self._source_subdirectory)}")
+
+        args_str = ", ".join(args)
+        return f"Package({args_str})"
diff --git a/vendor/poetry-core/src/poetry/core/packages/project_package.py b/vendor/poetry-core/src/poetry/core/packages/project_package.py
new file mode 100644
index 00000000..1d6f5505
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/project_package.py
@@ -0,0 +1,90 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.version.markers import parse_marker
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.semver.version import Version
+
+from poetry.core.packages.package import Package
+from poetry.core.packages.utils.utils import create_nested_marker
+
+
+class ProjectPackage(Package):
+    def __init__(
+        self,
+        name: str,
+        version: str | Version,
+        pretty_version: str | None = None,
+    ) -> None:
+        super().__init__(name, version, pretty_version)
+
+        self.build_config: dict[str, Any] = {}
+        self.packages: list[dict[str, Any]] = []
+        self.include: list[dict[str, Any]] = []
+        self.exclude: list[dict[str, Any]] = []
+        self.custom_urls: dict[str, str] = {}
+
+        if self._python_versions == "*":
+            self._python_constraint = parse_constraint("~2.7 || >=3.4")
+
+    @property
+    def build_script(self) -> str | None:
+        return self.build_config.get("script")
+
+    def is_root(self) -> bool:
+        return True
+
+    def to_dependency(self) -> Dependency:
+        dependency = super().to_dependency()
+
+        dependency.is_root = True
+
+        return dependency
+
+    @property
+    def python_versions(self) -> str:
+        return self._python_versions
+
+    @python_versions.setter
+    def python_versions(self, value: str) -> None:
+        self._python_versions = value
+
+        if value == "*":
+            value = "~2.7 || >=3.4"
+
+        self._python_constraint = parse_constraint(value)
+        self._python_marker = parse_marker(
+            create_nested_marker("python_version", self._python_constraint)
+        )
+
+    @property
+    def version(self) -> Version:
+        # override version to make it settable
+        return super().version
+
+    @version.setter
+    def version(self, value: str | Version) -> None:
+        self._set_version(value)
+
+    @property
+    def urls(self) -> dict[str, str]:
+        urls = super().urls
+
+        urls.update(self.custom_urls)
+
+        return urls
+
+    def __hash__(self) -> int:
+        # The parent Package class's __hash__ incorporates the version because
+        # a Package's version is immutable. But a ProjectPackage's version is
+        # mutable. So call Package's parent hash function.
+        return super(Package, self).__hash__()
+
+    def build_should_generate_setup(self) -> bool:
+        return self.build_config.get("generate-setup-file", True)
diff --git a/vendor/poetry-core/src/poetry/core/packages/specification.py b/vendor/poetry-core/src/poetry/core/packages/specification.py
new file mode 100644
index 00000000..8ff0dbb3
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/specification.py
@@ -0,0 +1,198 @@
+from __future__ import annotations
+
+import copy
+
+from typing import TYPE_CHECKING
+from typing import Iterable
+from typing import TypeVar
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+
+    T = TypeVar("T", bound="PackageSpecification")
+
+
+class PackageSpecification:
+    def __init__(
+        self,
+        name: str,
+        source_type: str | None = None,
+        source_url: str | None = None,
+        source_reference: str | None = None,
+        source_resolved_reference: str | None = None,
+        source_subdirectory: str | None = None,
+        features: Iterable[str] | None = None,
+    ) -> None:
+        from packaging.utils import canonicalize_name
+
+        self._pretty_name = name
+        self._name = canonicalize_name(name)
+        self._source_type = source_type
+        self._source_url = source_url
+        self._source_reference = source_reference
+        self._source_resolved_reference = source_resolved_reference
+        self._source_subdirectory = source_subdirectory
+
+        if not features:
+            features = []
+
+        self._features = frozenset(features)
+
+    @property
+    def name(self) -> NormalizedName:
+        return self._name
+
+    @property
+    def pretty_name(self) -> str:
+        return self._pretty_name
+
+    @property
+    def complete_name(self) -> str:
+        name: str = self._name
+
+        if self._features:
+            features = ",".join(sorted(self._features))
+            name = f"{name}[{features}]"
+
+        return name
+
+    @property
+    def source_type(self) -> str | None:
+        return self._source_type
+
+    @property
+    def source_url(self) -> str | None:
+        return self._source_url
+
+    @property
+    def source_reference(self) -> str | None:
+        return self._source_reference
+
+    @property
+    def source_resolved_reference(self) -> str | None:
+        return self._source_resolved_reference
+
+    @property
+    def source_subdirectory(self) -> str | None:
+        return self._source_subdirectory
+
+    @property
+    def features(self) -> frozenset[str]:
+        return self._features
+
+    def is_direct_origin(self) -> bool:
+        return self._source_type in [
+            "directory",
+            "file",
+            "url",
+            "git",
+        ]
+
+    def provides(self, other: PackageSpecification) -> bool:
+        """
+        Helper method to determine if this package provides the given specification.
+
+        This determination is made to be true, if the names are the same and this
+        package provides all features required by the other specification.
+
+        Source type checks are explicitly ignored here as this is not of interest.
+        """
+        return self.name == other.name and self.features.issuperset(other.features)
+
+    def is_same_source_as(self, other: PackageSpecification) -> bool:
+        if self._source_type != other.source_type:
+            return False
+
+        if not self._source_type:
+            # both packages are of source type None
+            # no need to check further
+            return True
+
+        if (
+            self._source_url or other.source_url
+        ) and self._source_url != other.source_url:
+            return False
+
+        if (
+            self._source_subdirectory or other.source_subdirectory
+        ) and self._source_subdirectory != other.source_subdirectory:
+            return False
+
+        # We check the resolved reference first:
+        # if they match we assume equality regardless
+        # of their source reference.
+        # This is important when comparing a resolved branch VCS
+        # dependency to a direct commit reference VCS dependency
+        if (
+            self._source_resolved_reference
+            and other.source_resolved_reference
+            and self._source_resolved_reference == other.source_resolved_reference
+        ):
+            return True
+
+        if self._source_reference or other.source_reference:
+            # special handling for packages with references
+            if not self._source_reference or not other.source_reference:
+                # case: one reference is defined and is non-empty, but other is not
+                return False
+
+            if not (
+                self._source_reference == other.source_reference
+                or self._source_reference.startswith(other.source_reference)
+                or other.source_reference.startswith(self._source_reference)
+            ):
+                # case: both references defined, but one is not equal to or a short
+                # representation of the other
+                return False
+
+            if (
+                self._source_resolved_reference
+                and other.source_resolved_reference
+                and self._source_resolved_reference != other.source_resolved_reference
+            ):
+                return False
+
+        return True
+
+    def is_same_package_as(self, other: PackageSpecification) -> bool:
+        if other.complete_name != self.complete_name:
+            return False
+
+        return self.is_same_source_as(other)
+
+    def clone(self: T) -> T:
+        return copy.deepcopy(self)
+
+    def with_features(self: T, features: Iterable[str]) -> T:
+        package = self.clone()
+
+        package._features = frozenset(features)
+
+        return package
+
+    def without_features(self: T) -> T:
+        return self.with_features([])
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, PackageSpecification):
+            return NotImplemented
+        return self.is_same_package_as(other)
+
+    def __hash__(self) -> int:
+        result = hash(self.complete_name)  # complete_name includes features
+
+        if self._source_type:
+            # Don't include _source_reference and _source_resolved_reference in hash
+            # because two specs can be equal even if these attributes are not equal.
+            # (They must still meet certain conditions. See is_same_source_as().)
+            result ^= (
+                hash(self._source_type)
+                ^ hash(self._source_url)
+                ^ hash(self._source_subdirectory)
+            )
+
+        return result
+
+    def __str__(self) -> str:
+        raise NotImplementedError()
diff --git a/vendor/poetry-core/src/poetry/core/packages/url_dependency.py b/vendor/poetry-core/src/poetry/core/packages/url_dependency.py
new file mode 100644
index 00000000..fd020d59
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/url_dependency.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from typing import Iterable
+from urllib.parse import urlparse
+
+from poetry.core.packages.dependency import Dependency
+
+
+class URLDependency(Dependency):
+    def __init__(
+        self,
+        name: str,
+        url: str,
+        groups: Iterable[str] | None = None,
+        optional: bool = False,
+        extras: Iterable[str] | None = None,
+    ) -> None:
+        self._url = url
+
+        parsed = urlparse(url)
+        if not parsed.scheme or not parsed.netloc:
+            raise ValueError(f"{url} does not seem like a valid url")
+
+        super().__init__(
+            name,
+            "*",
+            groups=groups,
+            optional=optional,
+            allows_prereleases=True,
+            source_type="url",
+            source_url=self._url,
+            extras=extras,
+        )
+
+    @property
+    def url(self) -> str:
+        return self._url
+
+    @property
+    def base_pep_508_name(self) -> str:
+        requirement = self.pretty_name
+
+        if self.extras:
+            extras = ",".join(sorted(self.extras))
+            requirement += f"[{extras}]"
+
+        requirement += f" @ {self._url}"
+
+        return requirement
+
+    def is_url(self) -> bool:
+        return True
diff --git a/vendor/poetry/poetry/console/commands/debug/__init__.py b/vendor/poetry-core/src/poetry/core/packages/utils/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/console/commands/debug/__init__.py
rename to vendor/poetry-core/src/poetry/core/packages/utils/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/packages/utils/link.py b/vendor/poetry-core/src/poetry/core/packages/utils/link.py
new file mode 100644
index 00000000..c6d8277d
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/utils/link.py
@@ -0,0 +1,232 @@
+from __future__ import annotations
+
+import posixpath
+import re
+import urllib.parse as urlparse
+
+from poetry.core.packages.utils.utils import path_to_url
+from poetry.core.packages.utils.utils import splitext
+
+
+class Link:
+    def __init__(
+        self,
+        url: str,
+        requires_python: str | None = None,
+        metadata: str | bool | None = None,
+        yanked: str | bool = False,
+    ) -> None:
+        """
+        Object representing a parsed link from https://pypi.python.org/simple/*
+
+        url:
+            url of the resource pointed to (href of the link)
+        requires_python:
+            String containing the `Requires-Python` metadata field, specified
+            in PEP 345. This may be specified by a data-requires-python
+            attribute in the HTML link tag, as described in PEP 503.
+        metadata:
+            String of the syntax `=` representing the hash
+            of the Core Metadata file. This may be specified by a
+            data-dist-info-metadata attribute in the HTML link tag, as described
+            in PEP 658.
+        yanked:
+            False, if the data-yanked attribute is not present.
+            A string, if the data-yanked attribute has a string value.
+            True, if the data-yanked attribute is present but has no value.
+            According to PEP 592.
+        """
+
+        # url can be a UNC windows share
+        if url.startswith("\\\\"):
+            url = path_to_url(url)
+
+        self.url = url
+        self.requires_python = requires_python if requires_python else None
+
+        if isinstance(metadata, str):
+            metadata = {"true": True, "": False, "false": False}.get(
+                metadata.strip().lower(), metadata
+            )
+
+        self._metadata = metadata
+        self._yanked = yanked
+
+    def __str__(self) -> str:
+        if self.requires_python:
+            rp = f" (requires-python:{self.requires_python})"
+        else:
+            rp = ""
+
+        return f"{self.url}{rp}"
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Link):
+            return NotImplemented
+        return self.url == other.url
+
+    def __ne__(self, other: object) -> bool:
+        if not isinstance(other, Link):
+            return NotImplemented
+        return self.url != other.url
+
+    def __lt__(self, other: object) -> bool:
+        if not isinstance(other, Link):
+            return NotImplemented
+        return self.url < other.url
+
+    def __le__(self, other: object) -> bool:
+        if not isinstance(other, Link):
+            return NotImplemented
+        return self.url <= other.url
+
+    def __gt__(self, other: object) -> bool:
+        if not isinstance(other, Link):
+            return NotImplemented
+        return self.url > other.url
+
+    def __ge__(self, other: object) -> bool:
+        if not isinstance(other, Link):
+            return NotImplemented
+        return self.url >= other.url
+
+    def __hash__(self) -> int:
+        return hash(self.url)
+
+    @property
+    def filename(self) -> str:
+        _, netloc, path, _, _ = urlparse.urlsplit(self.url)
+        name = posixpath.basename(path.rstrip("/")) or netloc
+        name = urlparse.unquote(name)
+
+        return name
+
+    @property
+    def scheme(self) -> str:
+        return urlparse.urlsplit(self.url)[0]
+
+    @property
+    def netloc(self) -> str:
+        return urlparse.urlsplit(self.url)[1]
+
+    @property
+    def path(self) -> str:
+        return urlparse.unquote(urlparse.urlsplit(self.url)[2])
+
+    def splitext(self) -> tuple[str, str]:
+        return splitext(posixpath.basename(self.path.rstrip("/")))
+
+    @property
+    def ext(self) -> str:
+        return self.splitext()[1]
+
+    @property
+    def url_without_fragment(self) -> str:
+        scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url)
+        return urlparse.urlunsplit((scheme, netloc, path, query, None))
+
+    _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
+
+    @property
+    def egg_fragment(self) -> str | None:
+        match = self._egg_fragment_re.search(self.url)
+        if not match:
+            return None
+        return match.group(1)
+
+    _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
+
+    @property
+    def subdirectory_fragment(self) -> str | None:
+        match = self._subdirectory_fragment_re.search(self.url)
+        if not match:
+            return None
+        return match.group(1)
+
+    _hash_re = re.compile(r"(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)")
+
+    @property
+    def has_metadata(self) -> bool:
+        if self._metadata is None:
+            return False
+        return bool(self._metadata) and (self.is_wheel or self.is_sdist)
+
+    @property
+    def metadata_url(self) -> str | None:
+        if self.has_metadata:
+            return f"{self.url_without_fragment.split('?', 1)[0]}.metadata"
+        return None
+
+    @property
+    def metadata_hash(self) -> str | None:
+        if self.has_metadata and isinstance(self._metadata, str):
+            match = self._hash_re.search(self._metadata)
+            if match:
+                return match.group(2)
+        return None
+
+    @property
+    def metadata_hash_name(self) -> str | None:
+        if self.has_metadata and isinstance(self._metadata, str):
+            match = self._hash_re.search(self._metadata)
+            if match:
+                return match.group(1)
+        return None
+
+    @property
+    def hash(self) -> str | None:
+        match = self._hash_re.search(self.url)
+        if match:
+            return match.group(2)
+        return None
+
+    @property
+    def hash_name(self) -> str | None:
+        match = self._hash_re.search(self.url)
+        if match:
+            return match.group(1)
+        return None
+
+    @property
+    def show_url(self) -> str:
+        return posixpath.basename(self.url.split("#", 1)[0].split("?", 1)[0])
+
+    @property
+    def is_wheel(self) -> bool:
+        return self.ext == ".whl"
+
+    @property
+    def is_wininst(self) -> bool:
+        return self.ext == ".exe"
+
+    @property
+    def is_egg(self) -> bool:
+        return self.ext == ".egg"
+
+    @property
+    def is_sdist(self) -> bool:
+        return self.ext in {".tar.bz2", ".tar.gz", ".zip"}
+
+    @property
+    def is_artifact(self) -> bool:
+        """
+        Determines if this points to an actual artifact (e.g. a tarball) or if
+        it points to an "abstract" thing like a path or a VCS location.
+        """
+        if self.scheme in ["ssh", "git", "hg", "bzr", "sftp", "svn"]:
+            return False
+
+        return True
+
+    @property
+    def yanked(self) -> bool:
+        return isinstance(self._yanked, str) or bool(self._yanked)
+
+    @property
+    def yanked_reason(self) -> str:
+        if isinstance(self._yanked, str):
+            return self._yanked
+        return ""
diff --git a/vendor/poetry-core/src/poetry/core/packages/utils/utils.py b/vendor/poetry-core/src/poetry/core/packages/utils/utils.py
new file mode 100644
index 00000000..dc6e8e12
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/utils/utils.py
@@ -0,0 +1,379 @@
+from __future__ import annotations
+
+import functools
+import posixpath
+import re
+import sys
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Dict
+from typing import List
+from typing import Tuple
+from urllib.parse import unquote
+from urllib.parse import urlsplit
+from urllib.request import url2pathname
+
+from poetry.core.pyproject.toml import PyProjectTOML
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version import Version
+from poetry.core.semver.version_range import VersionRange
+from poetry.core.version.markers import dnf
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.constraints import BaseConstraint
+    from poetry.core.semver.version_constraint import VersionConstraint
+    from poetry.core.semver.version_union import VersionUnion
+    from poetry.core.version.markers import BaseMarker
+
+    # Even though we've `from __future__ import annotations`, mypy doesn't seem to like
+    # this as `dict[str, ...]`
+    ConvertedMarkers = Dict[str, List[List[Tuple[str, str]]]]
+
+
+BZ2_EXTENSIONS = (".tar.bz2", ".tbz")
+XZ_EXTENSIONS = (".tar.xz", ".txz", ".tlz", ".tar.lz", ".tar.lzma")
+ZIP_EXTENSIONS = (".zip", ".whl")
+TAR_EXTENSIONS = (".tar.gz", ".tgz", ".tar")
+ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
+SUPPORTED_EXTENSIONS: tuple[str, ...] = ZIP_EXTENSIONS + TAR_EXTENSIONS
+
+try:
+    import bz2  # noqa: F401
+
+    SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
+except ImportError:
+    pass
+
+try:
+    # Only for Python 3.3+
+    import lzma  # noqa: F401
+
+    SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
+except ImportError:
+    pass
+
+
+def path_to_url(path: str | Path) -> str:
+    """
+    Convert a path to a file: URL.  The path will be made absolute unless otherwise
+    specified and have quoted path parts.
+    """
+    return Path(path).absolute().as_uri()
+
+
+def url_to_path(url: str) -> Path:
+    """
+    Convert an RFC8089 file URI to path.
+
+    The logic used here is borrowed from pip
+    https://github.com/pypa/pip/blob/4d1932fcdd1974c820ea60b3286984ebb0c3beaa/src/pip/_internal/utils/urls.py#L31
+    """
+    if not url.startswith("file:"):
+        raise ValueError(f"{url} is not a valid file URI")
+
+    _, netloc, path, _, _ = urlsplit(url)
+
+    if not netloc or netloc == "localhost":
+        # According to RFC 8089, same as empty authority.
+        netloc = ""
+    elif netloc not in {".", ".."} and sys.platform == "win32":
+        # If we have a UNC path, prepend UNC share notation.
+        netloc = "\\\\" + netloc
+    else:
+        raise ValueError(
+            f"non-local file URIs are not supported on this platform: {url}"
+        )
+
+    return Path(url2pathname(netloc + unquote(path)))
+
+
+def is_url(name: str) -> bool:
+    if ":" not in name:
+        return False
+    scheme = name.split(":", 1)[0].lower()
+
+    return scheme in [
+        "http",
+        "https",
+        "file",
+        "ftp",
+        "ssh",
+        "git",
+        "hg",
+        "bzr",
+        "sftp",
+        "svn",
+        "ssh",
+    ]
+
+
+def strip_extras(path: str) -> tuple[str, str | None]:
+    m = re.match(r"^(.+)(\[[^\]]+\])$", path)
+    extras = None
+    if m:
+        path_no_extras = m.group(1)
+        extras = m.group(2)
+    else:
+        path_no_extras = path
+
+    return path_no_extras, extras
+
+
+@functools.lru_cache(maxsize=None)
+def is_python_project(path: Path) -> bool:
+    """Return true if the directory is a Python project"""
+    if not path.is_dir():
+        return False
+
+    setup_py = path / "setup.py"
+    setup_cfg = path / "setup.cfg"
+    setuptools_project = setup_py.exists() or setup_cfg.exists()
+
+    pyproject = PyProjectTOML(path / "pyproject.toml")
+
+    supports_pep517 = setuptools_project or pyproject.is_build_system_defined()
+    supports_poetry = pyproject.is_poetry_project()
+
+    return supports_pep517 or supports_poetry
+
+
+def is_archive_file(name: str) -> bool:
+    """Return True if `name` is a considered as an archive file."""
+    ext = splitext(name)[1].lower()
+    if ext in ARCHIVE_EXTENSIONS:
+        return True
+    return False
+
+
+def splitext(path: str) -> tuple[str, str]:
+    """Like os.path.splitext, but take off .tar too"""
+    base, ext = posixpath.splitext(path)
+    if base.lower().endswith(".tar"):
+        ext = base[-4:] + ext
+        base = base[:-4]
+    return base, ext
+
+
+def convert_markers(marker: BaseMarker) -> ConvertedMarkers:
+    from poetry.core.version.markers import MarkerUnion
+    from poetry.core.version.markers import MultiMarker
+    from poetry.core.version.markers import SingleMarker
+
+    requirements: ConvertedMarkers = {}
+    marker = dnf(marker)
+    conjunctions = marker.markers if isinstance(marker, MarkerUnion) else [marker]
+    group_count = len(conjunctions)
+
+    def add_constraint(
+        marker_name: str, constraint: tuple[str, str], group_index: int
+    ) -> None:
+        # python_full_version is equivalent to python_version
+        # for Poetry so we merge them
+        if marker_name == "python_full_version":
+            marker_name = "python_version"
+        if marker_name not in requirements:
+            requirements[marker_name] = [[] for _ in range(group_count)]
+        requirements[marker_name][group_index].append(constraint)
+
+    for i, sub_marker in enumerate(conjunctions):
+        if isinstance(sub_marker, MultiMarker):
+            for m in sub_marker.markers:
+                if isinstance(m, SingleMarker):
+                    add_constraint(m.name, (m.operator, m.value), i)
+        elif isinstance(sub_marker, SingleMarker):
+            add_constraint(sub_marker.name, (sub_marker.operator, sub_marker.value), i)
+
+    for group_name in requirements:
+        # remove duplicates
+        seen = []
+        for r in requirements[group_name]:
+            if r not in seen:
+                seen.append(r)
+        requirements[group_name] = seen
+
+    return requirements
+
+
+def contains_group_without_marker(markers: ConvertedMarkers, marker_name: str) -> bool:
+    return marker_name not in markers or [] in markers[marker_name]
+
+
+def create_nested_marker(
+    name: str,
+    constraint: BaseConstraint | VersionUnion | Version | VersionConstraint,
+) -> str:
+    from poetry.core.packages.constraints.constraint import Constraint
+    from poetry.core.packages.constraints.multi_constraint import MultiConstraint
+    from poetry.core.packages.constraints.union_constraint import UnionConstraint
+    from poetry.core.semver.version_union import VersionUnion
+
+    if constraint.is_any():
+        return ""
+
+    if isinstance(constraint, (MultiConstraint, UnionConstraint)):
+        multi_parts = []
+        for c in constraint.constraints:
+            multi = isinstance(c, (MultiConstraint, UnionConstraint))
+            multi_parts.append((multi, create_nested_marker(name, c)))
+
+        glue = " and "
+        if isinstance(constraint, UnionConstraint):
+            parts = [f"({part[1]})" if part[0] else part[1] for part in multi_parts]
+            glue = " or "
+        else:
+            parts = [part[1] for part in multi_parts]
+
+        marker = glue.join(parts)
+    elif isinstance(constraint, Constraint):
+        marker = f'{name} {constraint.operator} "{constraint.version}"'
+    elif isinstance(constraint, VersionUnion):
+        parts = [create_nested_marker(name, c) for c in constraint.ranges]
+        glue = " or "
+        parts = [f"({part})" for part in parts]
+        marker = glue.join(parts)
+    elif isinstance(constraint, Version):
+        if name == "python_version" and constraint.precision >= 3:
+            name = "python_full_version"
+
+        marker = f'{name} == "{constraint.text}"'
+    else:
+        assert isinstance(constraint, VersionRange)
+        if constraint.min is not None:
+            op = ">="
+            if not constraint.include_min:
+                op = ">"
+
+            version = constraint.min
+            if constraint.max is not None:
+                min_name = max_name = name
+                if min_name == "python_version" and constraint.min.precision >= 3:
+                    min_name = "python_full_version"
+
+                if max_name == "python_version" and constraint.max.precision >= 3:
+                    max_name = "python_full_version"
+
+                text = f'{min_name} {op} "{version}"'
+
+                op = "<="
+                if not constraint.include_max:
+                    op = "<"
+
+                version = constraint.max
+
+                text += f' and {max_name} {op} "{version}"'
+
+                return text
+        elif constraint.max is not None:
+            op = "<="
+            if not constraint.include_max:
+                op = "<"
+
+            version = constraint.max
+        else:
+            return ""
+
+        if name == "python_version" and version.precision >= 3:
+            name = "python_full_version"
+
+        marker = f'{name} {op} "{version}"'
+
+    return marker
+
+
+def get_python_constraint_from_marker(
+    marker: BaseMarker,
+) -> VersionConstraint:
+    from poetry.core.semver.empty_constraint import EmptyConstraint
+    from poetry.core.semver.version_range import VersionRange
+
+    python_marker = marker.only("python_version", "python_full_version")
+    if python_marker.is_any():
+        return VersionRange()
+
+    if python_marker.is_empty():
+        return EmptyConstraint()
+
+    markers = convert_markers(marker)
+    if contains_group_without_marker(markers, "python_version"):
+        # groups are in disjunctive normal form (DNF),
+        # an empty group means that python_version does not appear in this group,
+        # which means that python_version is arbitrary for this group
+        return VersionRange()
+
+    python_version_markers = markers["python_version"]
+    normalized = normalize_python_version_markers(python_version_markers)
+    constraint = parse_constraint(normalized)
+    return constraint
+
+
+def normalize_python_version_markers(  # NOSONAR
+    disjunction: list[list[tuple[str, str]]],
+) -> str:
+    ors = []
+    for or_ in disjunction:
+        ands = []
+        for op, version in or_:
+            # Expand python version
+            if op == "==" and "*" not in version and version.count(".") < 2:
+                version = "~" + version
+                op = ""
+
+            elif op == "!=" and "*" not in version and version.count(".") < 2:
+                version += ".*"
+
+            elif op in ("<=", ">"):
+                # Make adjustments on encountering versions with less than full
+                # precision.
+                #
+                # Per PEP-508:
+                # python_version <-> '.'.join(platform.python_version_tuple()[:2])
+                #
+                # So for two digits of precision we make the following adjustments:
+                # - `python_version > "x.y"` requires version >= x.(y+1).anything
+                # - `python_version <= "x.y"` requires version < x.(y+1).anything
+                #
+                # Treatment when we see a single digit of precision is less clear: is
+                # that even a legitimate marker?
+                #
+                # Experiment suggests that pip behaviour is essentially to make a
+                # lexicographical comparison, for example `python_version > "3"` is
+                # satisfied by version 3.anything, whereas `python_version <= "3"` is
+                # satisfied only by version 2.anything.
+                #
+                # We achieve the above by fiddling with the operator and version in the
+                # marker.
+                parsed_version = Version.parse(version)
+                if parsed_version.precision < 3:
+                    if op == "<=":
+                        op = "<"
+                    elif op == ">":
+                        op = ">="
+
+                if parsed_version.precision == 2:
+                    version = parsed_version.next_minor().text
+
+            elif op in ("in", "not in"):
+                versions = []
+                for v in re.split("[ ,]+", version):
+                    split = v.split(".")
+                    if len(split) in [1, 2]:
+                        split.append("*")
+                        op_ = "" if op == "in" else "!="
+                    else:
+                        op_ = "==" if op == "in" else "!="
+
+                    versions.append(op_ + ".".join(split))
+
+                if versions:
+                    glue = " || " if op == "in" else ", "
+                    ands.append(glue.join(versions))
+
+                continue
+
+            ands.append(f"{op}{version}")
+
+        ors.append(" ".join(ands))
+
+    return " || ".join(ors)
diff --git a/vendor/poetry-core/src/poetry/core/packages/vcs_dependency.py b/vendor/poetry-core/src/poetry/core/packages/vcs_dependency.py
new file mode 100644
index 00000000..f79e8b7d
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/packages/vcs_dependency.py
@@ -0,0 +1,125 @@
+from __future__ import annotations
+
+from typing import Iterable
+
+from poetry.core.packages.dependency import Dependency
+
+
+class VCSDependency(Dependency):
+    """
+    Represents a VCS dependency
+    """
+
+    def __init__(
+        self,
+        name: str,
+        vcs: str,
+        source: str,
+        branch: str | None = None,
+        tag: str | None = None,
+        rev: str | None = None,
+        resolved_rev: str | None = None,
+        directory: str | None = None,
+        groups: Iterable[str] | None = None,
+        optional: bool = False,
+        develop: bool = False,
+        extras: Iterable[str] | None = None,
+    ) -> None:
+        self._vcs = vcs
+        self._source = source
+
+        self._branch = branch
+        self._tag = tag
+        self._rev = rev
+        self._directory = directory
+        self._develop = develop
+
+        super().__init__(
+            name,
+            "*",
+            groups=groups,
+            optional=optional,
+            allows_prereleases=True,
+            source_type=self._vcs.lower(),
+            source_url=self._source,
+            source_reference=branch or tag or rev or "HEAD",
+            source_resolved_reference=resolved_rev,
+            source_subdirectory=directory,
+            extras=extras,
+        )
+
+    @property
+    def vcs(self) -> str:
+        return self._vcs
+
+    @property
+    def source(self) -> str:
+        return self._source
+
+    @property
+    def branch(self) -> str | None:
+        return self._branch
+
+    @property
+    def tag(self) -> str | None:
+        return self._tag
+
+    @property
+    def rev(self) -> str | None:
+        return self._rev
+
+    @property
+    def directory(self) -> str | None:
+        return self._directory
+
+    @property
+    def develop(self) -> bool:
+        return self._develop
+
+    @property
+    def reference(self) -> str:
+        reference = self._branch or self._tag or self._rev or ""
+        return reference
+
+    @property
+    def pretty_constraint(self) -> str:
+        if self._branch:
+            what = "branch"
+            version = self._branch
+        elif self._tag:
+            what = "tag"
+            version = self._tag
+        elif self._rev:
+            what = "rev"
+            version = self._rev
+        else:
+            return ""
+
+        return f"{what} {version}"
+
+    @property
+    def base_pep_508_name(self) -> str:
+        from poetry.core.vcs import git
+
+        requirement = self.pretty_name
+        parsed_url = git.ParsedUrl.parse(self._source)
+
+        if self.extras:
+            extras = ",".join(sorted(self.extras))
+            requirement += f"[{extras}]"
+
+        if parsed_url.protocol is not None:
+            requirement += f" @ {self._vcs}+{self._source}"
+        else:
+            requirement += f" @ {self._vcs}+ssh://{parsed_url.format()}"
+
+        if self.reference:
+            requirement += f"@{self.reference}"
+
+        if self._directory:
+            requirement += f"#subdirectory={self._directory}"
+
+        return requirement
+
+    def is_vcs(self) -> bool:
+        return True
diff --git a/vendor/poetry-core/src/poetry/core/poetry.py b/vendor/poetry-core/src/poetry/core/poetry.py
new file mode 100644
index 00000000..fbcd464f
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/poetry.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from poetry.core.packages.project_package import ProjectPackage
+    from poetry.core.pyproject.toml import PyProjectTOML
+    from poetry.core.toml import TOMLFile
+
+
+class Poetry:
+    def __init__(
+        self,
+        file: Path,
+        local_config: dict[str, Any],
+        package: ProjectPackage,
+    ) -> None:
+        from poetry.core.pyproject.toml import PyProjectTOML
+
+        self._pyproject = PyProjectTOML(file)
+        self._package = package
+        self._local_config = local_config
+
+    @property
+    def pyproject(self) -> PyProjectTOML:
+        return self._pyproject
+
+    @property
+    def file(self) -> TOMLFile:
+        return self._pyproject.file
+
+    @property
+    def package(self) -> ProjectPackage:
+        return self._package
+
+    @property
+    def local_config(self) -> dict[str, Any]:
+        return self._local_config
+
+    def get_project_config(self, config: str, default: Any = None) -> Any:
+        return self._local_config.get("config", {}).get(config, default)
diff --git a/vendor/poetry-core/poetry/core/_vendor/pyrsistent/py.typed b/vendor/poetry-core/src/poetry/core/py.typed
similarity index 100%
rename from vendor/poetry-core/poetry/core/_vendor/pyrsistent/py.typed
rename to vendor/poetry-core/src/poetry/core/py.typed
diff --git a/vendor/poetry/poetry/console/commands/env/__init__.py b/vendor/poetry-core/src/poetry/core/pyproject/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/console/commands/env/__init__.py
rename to vendor/poetry-core/src/poetry/core/pyproject/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/pyproject/exceptions.py b/vendor/poetry-core/src/poetry/core/pyproject/exceptions.py
new file mode 100644
index 00000000..ca01aaf8
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/pyproject/exceptions.py
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from poetry.core.exceptions import PoetryCoreException
+
+
+class PyProjectException(PoetryCoreException):
+    pass
diff --git a/vendor/poetry-core/src/poetry/core/pyproject/tables.py b/vendor/poetry-core/src/poetry/core/pyproject/tables.py
new file mode 100644
index 00000000..99a4c83d
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/pyproject/tables.py
@@ -0,0 +1,55 @@
+from __future__ import annotations
+
+from contextlib import suppress
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+
+
+# TODO: Convert to dataclass once python 2.7, 3.5 is dropped
+class BuildSystem:
+    def __init__(
+        self, build_backend: str | None = None, requires: list[str] | None = None
+    ) -> None:
+        self.build_backend = (
+            build_backend
+            if build_backend is not None
+            else "setuptools.build_meta:__legacy__"
+        )
+        self.requires = requires if requires is not None else ["setuptools", "wheel"]
+        self._dependencies: list[Dependency] | None = None
+
+    @property
+    def dependencies(self) -> list[Dependency]:
+        if self._dependencies is None:
+            # avoid circular dependency when loading DirectoryDependency
+            from poetry.core.packages.dependency import Dependency
+            from poetry.core.packages.directory_dependency import DirectoryDependency
+            from poetry.core.packages.file_dependency import FileDependency
+
+            self._dependencies = []
+            for requirement in self.requires:
+                dependency = None
+                try:
+                    dependency = Dependency.create_from_pep_508(requirement)
+                except ValueError:
+                    # PEP 517 requires can be path if not PEP 508
+                    path = Path(requirement)
+                    # compatibility Python < 3.8
+                    # https://docs.python.org/3/library/pathlib.html#methods
+                    with suppress(OSError):
+                        if path.is_file():
+                            dependency = FileDependency(name=path.name, path=path)
+                        elif path.is_dir():
+                            dependency = DirectoryDependency(name=path.name, path=path)
+
+                if dependency is None:
+                    # skip since we could not determine requirement
+                    continue
+
+                self._dependencies.append(dependency)
+
+        return self._dependencies
diff --git a/vendor/poetry-core/src/poetry/core/pyproject/toml.py b/vendor/poetry-core/src/poetry/core/pyproject/toml.py
new file mode 100644
index 00000000..7496254a
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/pyproject/toml.py
@@ -0,0 +1,111 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import cast
+
+from tomlkit.container import Container
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from tomlkit.toml_document import TOMLDocument
+
+    from poetry.core.pyproject.tables import BuildSystem
+    from poetry.core.toml import TOMLFile
+
+
+class PyProjectTOML:
+    def __init__(self, path: str | Path) -> None:
+        from poetry.core.toml import TOMLFile
+
+        self._file = TOMLFile(path=path)
+        self._data: TOMLDocument | None = None
+        self._build_system: BuildSystem | None = None
+
+    @property
+    def file(self) -> TOMLFile:
+        return self._file
+
+    @property
+    def data(self) -> TOMLDocument:
+        from tomlkit.toml_document import TOMLDocument
+
+        if self._data is None:
+            if not self._file.exists():
+                self._data = TOMLDocument()
+            else:
+                self._data = self._file.read()
+
+        return self._data
+
+    def is_build_system_defined(self) -> bool:
+        return self._file.exists() and "build-system" in self.data
+
+    @property
+    def build_system(self) -> BuildSystem:
+        from poetry.core.pyproject.tables import BuildSystem
+
+        if self._build_system is None:
+            build_backend = None
+            requires = None
+
+            if not self._file.exists():
+                build_backend = "poetry.core.masonry.api"
+                requires = ["poetry-core"]
+
+            container = self.data.get("build-system", {})
+            self._build_system = BuildSystem(
+                build_backend=container.get("build-backend", build_backend),
+                requires=container.get("requires", requires),
+            )
+
+        return self._build_system
+
+    @property
+    def poetry_config(self) -> Container:
+        from tomlkit.exceptions import NonExistentKey
+
+        try:
+            return cast(Container, self.data["tool"]["poetry"])
+        except NonExistentKey as e:
+            from poetry.core.pyproject.exceptions import PyProjectException
+
+            raise PyProjectException(
+                f"[tool.poetry] section not found in {self._file}"
+            ) from e
+
+    def is_poetry_project(self) -> bool:
+        from poetry.core.pyproject.exceptions import PyProjectException
+
+        if self.file.exists():
+            try:
+                _ = self.poetry_config
+                return True
+            except PyProjectException:
+                pass
+
+        return False
+
+    def __getattr__(self, item: str) -> Any:
+        return getattr(self.data, item)
+
+    def save(self) -> None:
+        from tomlkit.container import Container
+
+        data = self.data
+
+        if self._build_system is not None:
+            if "build-system" not in data:
+                data["build-system"] = Container()
+
+            build_system = cast(Container, data["build-system"])
+            build_system["requires"] = self._build_system.requires
+            build_system["build-backend"] = self._build_system.build_backend
+
+        self.file.write(data=data)
+
+    def reload(self) -> None:
+        self._data = None
+        self._build_system = None
diff --git a/vendor/poetry/poetry/console/commands/self/__init__.py b/vendor/poetry-core/src/poetry/core/semver/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/console/commands/self/__init__.py
rename to vendor/poetry-core/src/poetry/core/semver/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/semver/empty_constraint.py b/vendor/poetry-core/src/poetry/core/semver/empty_constraint.py
new file mode 100644
index 00000000..5cd7ea73
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/empty_constraint.py
@@ -0,0 +1,51 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.semver.version_constraint import VersionConstraint
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version import Version
+    from poetry.core.semver.version_range_constraint import VersionRangeConstraint
+
+
+class EmptyConstraint(VersionConstraint):
+    def is_empty(self) -> bool:
+        return True
+
+    def is_any(self) -> bool:
+        return False
+
+    def is_simple(self) -> bool:
+        return True
+
+    def allows(self, version: Version) -> bool:
+        return False
+
+    def allows_all(self, other: VersionConstraint) -> bool:
+        return other.is_empty()
+
+    def allows_any(self, other: VersionConstraint) -> bool:
+        return False
+
+    def intersect(self, other: VersionConstraint) -> EmptyConstraint:
+        return self
+
+    def union(self, other: VersionConstraint) -> VersionConstraint:
+        return other
+
+    def difference(self, other: VersionConstraint) -> EmptyConstraint:
+        return self
+
+    def flatten(self) -> list[VersionRangeConstraint]:
+        return []
+
+    def __str__(self) -> str:
+        return ""
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, VersionConstraint):
+            return False
+
+        return other.is_empty()
diff --git a/vendor/poetry-core/src/poetry/core/semver/exceptions.py b/vendor/poetry-core/src/poetry/core/semver/exceptions.py
new file mode 100644
index 00000000..d06e56f7
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/exceptions.py
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+
+class ParseConstraintError(ValueError):
+    pass
diff --git a/vendor/poetry-core/src/poetry/core/semver/helpers.py b/vendor/poetry-core/src/poetry/core/semver/helpers.py
new file mode 100644
index 00000000..0ce090d8
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/helpers.py
@@ -0,0 +1,147 @@
+from __future__ import annotations
+
+import re
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version_constraint import VersionConstraint
+
+
+def parse_constraint(constraints: str) -> VersionConstraint:
+    if constraints == "*":
+        from poetry.core.semver.version_range import VersionRange
+
+        return VersionRange()
+
+    or_constraints = re.split(r"\s*\|\|?\s*", constraints.strip())
+    or_groups = []
+    for constraints in or_constraints:
+        # allow trailing commas for robustness (even though it may not be
+        # standard-compliant it seems to occur in some packages)
+        constraints = constraints.rstrip(",").rstrip()
+        and_constraints = re.split(
+            "(?< ,]) *(? 1:
+            for constraint in and_constraints:
+                constraint_objects.append(parse_single_constraint(constraint))
+        else:
+            constraint_objects.append(parse_single_constraint(and_constraints[0]))
+
+        if len(constraint_objects) == 1:
+            constraint = constraint_objects[0]
+        else:
+            constraint = constraint_objects[0]
+            for next_constraint in constraint_objects[1:]:
+                constraint = constraint.intersect(next_constraint)
+
+        or_groups.append(constraint)
+
+    if len(or_groups) == 1:
+        return or_groups[0]
+    else:
+        from poetry.core.semver.version_union import VersionUnion
+
+        return VersionUnion.of(*or_groups)
+
+
+def parse_single_constraint(constraint: str) -> VersionConstraint:
+    from poetry.core.semver.patterns import BASIC_CONSTRAINT
+    from poetry.core.semver.patterns import CARET_CONSTRAINT
+    from poetry.core.semver.patterns import TILDE_CONSTRAINT
+    from poetry.core.semver.patterns import TILDE_PEP440_CONSTRAINT
+    from poetry.core.semver.patterns import X_CONSTRAINT
+    from poetry.core.semver.version import Version
+    from poetry.core.semver.version_range import VersionRange
+    from poetry.core.semver.version_union import VersionUnion
+
+    m = re.match(r"(?i)^v?[xX*](\.[xX*])*$", constraint)
+    if m:
+        return VersionRange()
+
+    # Tilde range
+    m = TILDE_CONSTRAINT.match(constraint)
+    if m:
+        version = Version.parse(m.group("version"))
+        high = version.stable.next_minor()
+        if version.release.precision == 1:
+            high = version.stable.next_major()
+
+        return VersionRange(version, high, include_min=True)
+
+    # PEP 440 Tilde range (~=)
+    m = TILDE_PEP440_CONSTRAINT.match(constraint)
+    if m:
+        version = Version.parse(m.group("version"))
+        if version.release.precision == 2:
+            high = version.stable.next_major()
+        else:
+            high = version.stable.next_minor()
+
+        return VersionRange(version, high, include_min=True)
+
+    # Caret range
+    m = CARET_CONSTRAINT.match(constraint)
+    if m:
+        version = Version.parse(m.group("version"))
+
+        return VersionRange(version, version.next_breaking(), include_min=True)
+
+    # X Range
+    m = X_CONSTRAINT.match(constraint)
+    if m:
+        op = m.group("op")
+        major = int(m.group(2))
+        minor = m.group(3)
+
+        if minor is not None:
+            version = Version.from_parts(major, int(minor), 0)
+            result: VersionConstraint = VersionRange(
+                version, version.next_minor(), include_min=True
+            )
+        else:
+            if major == 0:
+                result = VersionRange(max=Version.from_parts(1, 0, 0))
+            else:
+                version = Version.from_parts(major, 0, 0)
+
+                result = VersionRange(version, version.next_major(), include_min=True)
+
+        if op == "!=":
+            result = VersionRange().difference(result)
+
+        return result
+
+    # Basic comparator
+    m = BASIC_CONSTRAINT.match(constraint)
+    if m:
+        op = m.group("op")
+        version_string = m.group("version")
+
+        if version_string == "dev":
+            version_string = "0.0-dev"
+
+        try:
+            version = Version.parse(version_string)
+        except ValueError:
+            raise ValueError(f"Could not parse version constraint: {constraint}")
+
+        if op == "<":
+            return VersionRange(max=version)
+        if op == "<=":
+            return VersionRange(max=version, include_max=True)
+        if op == ">":
+            return VersionRange(min=version)
+        if op == ">=":
+            return VersionRange(min=version, include_min=True)
+        if op == "!=":
+            return VersionUnion(VersionRange(max=version), VersionRange(min=version))
+        return version
+
+    from poetry.core.semver.exceptions import ParseConstraintError
+
+    raise ParseConstraintError(f"Could not parse version constraint: {constraint}")
diff --git a/vendor/poetry-core/src/poetry/core/semver/patterns.py b/vendor/poetry-core/src/poetry/core/semver/patterns.py
new file mode 100644
index 00000000..0dd213cf
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/patterns.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+import re
+
+from packaging.version import VERSION_PATTERN
+
+
+COMPLETE_VERSION = re.compile(VERSION_PATTERN, re.VERBOSE | re.IGNORECASE)
+
+CARET_CONSTRAINT = re.compile(
+    rf"^\^(?P{VERSION_PATTERN})$", re.VERBOSE | re.IGNORECASE
+)
+TILDE_CONSTRAINT = re.compile(
+    rf"^~(?!=)\s*(?P{VERSION_PATTERN})$", re.VERBOSE | re.IGNORECASE
+)
+TILDE_PEP440_CONSTRAINT = re.compile(
+    rf"^~=\s*(?P{VERSION_PATTERN})$", re.VERBOSE | re.IGNORECASE
+)
+X_CONSTRAINT = re.compile(
+    r"^(?P!=|==)?\s*v?(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:\.[xX*])+$"
+)
+
+# note that we also allow technically incorrect version patterns with astrix (eg: 3.5.*)
+# as this is supported by pip and appears in metadata within python packages
+BASIC_CONSTRAINT = re.compile(
+    rf"^(?P<>|!=|>=?|<=?|==?)?\s*(?P{VERSION_PATTERN}|dev)(\.\*)?$",
+    re.VERBOSE | re.IGNORECASE,
+)
diff --git a/vendor/poetry-core/src/poetry/core/semver/util.py b/vendor/poetry-core/src/poetry/core/semver/util.py
new file mode 100644
index 00000000..b70e85df
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/util.py
@@ -0,0 +1,58 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.semver.version_range import VersionRange
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version_constraint import VersionConstraint
+
+
+def constraint_regions(constraints: list[VersionConstraint]) -> list[VersionRange]:
+    """
+    Transform a list of VersionConstraints into a list of VersionRanges that mark out
+    the distinct regions of version-space.
+
+    eg input >=3.6 and >=2.7,<3.0.0 || >=3.4.0
+    output <2.7, >=2.7,<3.0.0, >=3.0.0,<3.4.0, >=3.4.0,<3.6, >=3.6.
+    """
+    flattened = []
+    for constraint in constraints:
+        flattened += constraint.flatten()
+
+    mins = {
+        (constraint.min, not constraint.include_min)
+        for constraint in flattened
+        if constraint.min is not None
+    }
+    maxs = {
+        (constraint.max, constraint.include_max)
+        for constraint in flattened
+        if constraint.max is not None
+    }
+
+    edges = sorted(mins | maxs)
+    if not edges:
+        return [VersionRange(None, None)]
+
+    start = edges[0]
+    regions = [
+        VersionRange(None, start[0], include_max=start[1]),
+    ]
+
+    for low, high in zip(edges, edges[1:]):
+        version_range = VersionRange(
+            low[0],
+            high[0],
+            include_min=not low[1],
+            include_max=high[1],
+        )
+        regions.append(version_range)
+
+    end = edges[-1]
+    regions.append(
+        VersionRange(end[0], None, include_min=not end[1]),
+    )
+
+    return regions
diff --git a/vendor/poetry-core/src/poetry/core/semver/version.py b/vendor/poetry-core/src/poetry/core/semver/version.py
new file mode 100644
index 00000000..407ed8c9
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/version.py
@@ -0,0 +1,184 @@
+from __future__ import annotations
+
+import dataclasses
+
+from typing import TYPE_CHECKING
+
+from poetry.core.semver.empty_constraint import EmptyConstraint
+from poetry.core.semver.version_range_constraint import VersionRangeConstraint
+from poetry.core.semver.version_union import VersionUnion
+from poetry.core.version.pep440 import Release
+from poetry.core.version.pep440.version import PEP440Version
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version_constraint import VersionConstraint
+    from poetry.core.version.pep440 import LocalSegmentType
+    from poetry.core.version.pep440 import ReleaseTag
+
+
+@dataclasses.dataclass(frozen=True)
+class Version(PEP440Version, VersionRangeConstraint):
+    """
+    A parsed semantic version number.
+    """
+
+    @property
+    def precision(self) -> int:
+        return self.release.precision
+
+    @property
+    def stable(self) -> Version:
+        if self.is_stable():
+            return self
+
+        return self.next_patch()
+
+    def next_breaking(self) -> Version:
+        if self.major == 0:
+            if self.minor is not None and self.minor != 0:
+                return self.next_minor()
+
+            if self.precision == 1:
+                return self.next_major()
+            elif self.precision == 2:
+                return self.next_minor()
+
+            return self.next_patch()
+
+        return self.stable.next_major()
+
+    @property
+    def min(self) -> Version:
+        return self
+
+    @property
+    def max(self) -> Version:
+        return self
+
+    @property
+    def full_max(self) -> Version:
+        return self
+
+    @property
+    def include_min(self) -> bool:
+        return True
+
+    @property
+    def include_max(self) -> bool:
+        return True
+
+    def is_any(self) -> bool:
+        return False
+
+    def is_empty(self) -> bool:
+        return False
+
+    def is_simple(self) -> bool:
+        return True
+
+    def allows(self, version: Version | None) -> bool:
+        if version is None:
+            return False
+
+        _this, _other = self, version
+
+        # allow weak equality to allow `3.0.0+local.1` for `3.0.0`
+        if not _this.is_local() and _other.is_local():
+            _other = _other.without_local()
+
+        return _this == _other
+
+    def allows_all(self, other: VersionConstraint) -> bool:
+        return other.is_empty() or (
+            self.allows(other) if isinstance(other, self.__class__) else other == self
+        )
+
+    def allows_any(self, other: VersionConstraint) -> bool:
+        if isinstance(other, Version):
+            return self.allows(other)
+
+        return other.allows(self)
+
+    def intersect(self, other: VersionConstraint) -> Version | EmptyConstraint:
+        if other.allows(self):
+            return self
+
+        if isinstance(other, Version) and self.allows(other):
+            return other
+
+        return EmptyConstraint()
+
+    def union(self, other: VersionConstraint) -> VersionConstraint:
+        from poetry.core.semver.version_range import VersionRange
+
+        if other.allows(self):
+            return other
+
+        if isinstance(other, VersionRangeConstraint):
+            if self.allows(other.min):
+                return VersionRange(
+                    other.min,
+                    other.max,
+                    include_min=True,
+                    include_max=other.include_max,
+                )
+
+            if self.allows(other.max):
+                return VersionRange(
+                    other.min,
+                    other.max,
+                    include_min=other.include_min,
+                    include_max=True,
+                )
+
+        return VersionUnion.of(self, other)
+
+    def difference(self, other: VersionConstraint) -> Version | EmptyConstraint:
+        if other.allows(self):
+            return EmptyConstraint()
+
+        return self
+
+    def flatten(self) -> list[VersionRangeConstraint]:
+        return [self]
+
+    def __str__(self) -> str:
+        return self.text
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __eq__(self, other: object) -> bool:
+        from poetry.core.semver.version_range import VersionRange
+
+        if isinstance(other, VersionRange):
+            return (
+                self == other.min
+                and self == other.max
+                and (other.include_min or other.include_max)
+            )
+        return super().__eq__(other)
+
+    @classmethod
+    def from_parts(
+        cls,
+        major: int,
+        minor: int | None = None,
+        patch: int | None = None,
+        extra: int | tuple[int, ...] | None = None,
+        pre: ReleaseTag | None = None,
+        post: ReleaseTag | None = None,
+        dev: ReleaseTag | None = None,
+        local: LocalSegmentType = None,
+        *,
+        epoch: int = 0,
+    ) -> Version:
+        return cls(
+            release=Release(major=major, minor=minor, patch=patch, extra=extra),
+            pre=pre,
+            post=post,
+            dev=dev,
+            local=local,
+            epoch=epoch,
+        )
diff --git a/vendor/poetry-core/src/poetry/core/semver/version_constraint.py b/vendor/poetry-core/src/poetry/core/semver/version_constraint.py
new file mode 100644
index 00000000..8a35a33d
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/version_constraint.py
@@ -0,0 +1,51 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version import Version
+    from poetry.core.semver.version_range_constraint import VersionRangeConstraint
+
+
+class VersionConstraint:
+    @abstractmethod
+    def is_empty(self) -> bool:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def is_any(self) -> bool:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def is_simple(self) -> bool:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def allows(self, version: Version) -> bool:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def allows_all(self, other: VersionConstraint) -> bool:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def allows_any(self, other: VersionConstraint) -> bool:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def intersect(self, other: VersionConstraint) -> VersionConstraint:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def union(self, other: VersionConstraint) -> VersionConstraint:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def difference(self, other: VersionConstraint) -> VersionConstraint:
+        raise NotImplementedError()
+
+    @abstractmethod
+    def flatten(self) -> list[VersionRangeConstraint]:
+        raise NotImplementedError()
diff --git a/vendor/poetry-core/src/poetry/core/semver/version_range.py b/vendor/poetry-core/src/poetry/core/semver/version_range.py
new file mode 100644
index 00000000..3a6aff19
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/version_range.py
@@ -0,0 +1,427 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.semver.empty_constraint import EmptyConstraint
+from poetry.core.semver.version_range_constraint import VersionRangeConstraint
+from poetry.core.semver.version_union import VersionUnion
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version import Version
+    from poetry.core.semver.version_constraint import VersionConstraint
+
+
+class VersionRange(VersionRangeConstraint):
+    def __init__(
+        self,
+        min: Version | None = None,
+        max: Version | None = None,
+        include_min: bool = False,
+        include_max: bool = False,
+        always_include_max_prerelease: bool = False,
+    ) -> None:
+        full_max = max
+        if (
+            not always_include_max_prerelease
+            and not include_max
+            and full_max is not None
+            and full_max.is_stable()
+            and not full_max.is_postrelease()
+            and (min is None or min.is_stable() or min.release != full_max.release)
+        ):
+            full_max = full_max.first_prerelease()
+
+        self._min = min
+        self._max = max
+        self._full_max = full_max
+        self._include_min = include_min
+        self._include_max = include_max
+
+    @property
+    def min(self) -> Version | None:
+        return self._min
+
+    @property
+    def max(self) -> Version | None:
+        return self._max
+
+    @property
+    def full_max(self) -> Version | None:
+        return self._full_max
+
+    @property
+    def include_min(self) -> bool:
+        return self._include_min
+
+    @property
+    def include_max(self) -> bool:
+        return self._include_max
+
+    def is_empty(self) -> bool:
+        return False
+
+    def is_any(self) -> bool:
+        return self._min is None and self._max is None
+
+    def is_simple(self) -> bool:
+        return self._min is None or self._max is None
+
+    def allows(self, other: Version) -> bool:
+        if self._min is not None:
+            if other < self._min:
+                return False
+
+            if not self._include_min and other == self._min:
+                return False
+
+        if self.full_max is not None:
+            _this, _other = self.full_max, other
+
+            if not _this.is_local() and _other.is_local():
+                # allow weak equality to allow `3.0.0+local.1` for `<=3.0.0`
+                _other = _other.without_local()
+
+            if not _this.is_postrelease() and _other.is_postrelease():
+                # allow weak equality to allow `3.0.0-1` for `<=3.0.0`
+                _other = _other.without_postrelease()
+
+            if _other > _this:
+                return False
+
+            if not self._include_max and _other == _this:
+                return False
+
+        return True
+
+    def allows_all(self, other: VersionConstraint) -> bool:
+        from poetry.core.semver.version import Version
+
+        if other.is_empty():
+            return True
+
+        if isinstance(other, Version):
+            return self.allows(other)
+
+        if isinstance(other, VersionUnion):
+            return all([self.allows_all(constraint) for constraint in other.ranges])
+
+        if isinstance(other, VersionRangeConstraint):
+            return not other.allows_lower(self) and not other.allows_higher(self)
+
+        raise ValueError(f"Unknown VersionConstraint type {other}.")
+
+    def allows_any(self, other: VersionConstraint) -> bool:
+        from poetry.core.semver.version import Version
+
+        if other.is_empty():
+            return False
+
+        if isinstance(other, Version):
+            return self.allows(other)
+
+        if isinstance(other, VersionUnion):
+            return any([self.allows_any(constraint) for constraint in other.ranges])
+
+        if isinstance(other, VersionRangeConstraint):
+            return not other.is_strictly_lower(self) and not other.is_strictly_higher(
+                self
+            )
+
+        raise ValueError(f"Unknown VersionConstraint type {other}.")
+
+    def intersect(self, other: VersionConstraint) -> VersionConstraint:
+        from poetry.core.semver.version import Version
+
+        if other.is_empty():
+            return other
+
+        if isinstance(other, VersionUnion):
+            return other.intersect(self)
+
+        # A range and a Version just yields the version if it's in the range.
+        if isinstance(other, Version):
+            if self.allows(other):
+                return other
+
+            return EmptyConstraint()
+
+        if not isinstance(other, VersionRangeConstraint):
+            raise ValueError(f"Unknown VersionConstraint type {other}.")
+
+        if self.allows_lower(other):
+            if self.is_strictly_lower(other):
+                return EmptyConstraint()
+
+            intersect_min = other.min
+            intersect_include_min = other.include_min
+        else:
+            if other.is_strictly_lower(self):
+                return EmptyConstraint()
+
+            intersect_min = self._min
+            intersect_include_min = self._include_min
+
+        if self.allows_higher(other):
+            intersect_max = other.max
+            intersect_include_max = other.include_max
+        else:
+            intersect_max = self._max
+            intersect_include_max = self._include_max
+
+        if intersect_min is None and intersect_max is None:
+            return VersionRange()
+
+        # If the range is just a single version.
+        if intersect_min == intersect_max:
+            # Because we already verified that the lower range isn't strictly
+            # lower, there must be some overlap.
+            assert intersect_include_min and intersect_include_max
+            assert intersect_min is not None
+
+            return intersect_min
+
+        # If we got here, there is an actual range.
+        return VersionRange(
+            intersect_min, intersect_max, intersect_include_min, intersect_include_max
+        )
+
+    def union(self, other: VersionConstraint) -> VersionConstraint:
+        from poetry.core.semver.version import Version
+
+        if isinstance(other, Version):
+            if self.allows(other):
+                return self
+
+            if other == self.min:
+                return VersionRange(
+                    self.min, self.max, include_min=True, include_max=self.include_max
+                )
+
+            if other == self.max:
+                return VersionRange(
+                    self.min, self.max, include_min=self.include_min, include_max=True
+                )
+
+            return VersionUnion.of(self, other)
+
+        if isinstance(other, VersionRangeConstraint):
+            # If the two ranges don't overlap, we won't be able to create a single
+            # VersionRange for both of them.
+            edges_touch = (
+                self.max == other.min and (self.include_max or other.include_min)
+            ) or (self.min == other.max and (self.include_min or other.include_max))
+
+            if not edges_touch and not self.allows_any(other):
+                return VersionUnion.of(self, other)
+
+            if self.allows_lower(other):
+                union_min = self.min
+                union_include_min = self.include_min
+            else:
+                union_min = other.min
+                union_include_min = other.include_min
+
+            if self.allows_higher(other):
+                union_max = self.max
+                union_include_max = self.include_max
+            else:
+                union_max = other.max
+                union_include_max = other.include_max
+
+            return VersionRange(
+                union_min,
+                union_max,
+                include_min=union_include_min,
+                include_max=union_include_max,
+            )
+
+        return VersionUnion.of(self, other)
+
+    def difference(self, other: VersionConstraint) -> VersionConstraint:
+        from poetry.core.semver.version import Version
+
+        if other.is_empty():
+            return self
+
+        if isinstance(other, Version):
+            if not self.allows(other):
+                return self
+
+            if other == self.min:
+                if not self.include_min:
+                    return self
+
+                return VersionRange(self.min, self.max, False, self.include_max)
+
+            if other == self.max:
+                if not self.include_max:
+                    return self
+
+                return VersionRange(self.min, self.max, self.include_min, False)
+
+            return VersionUnion.of(
+                VersionRange(self.min, other, self.include_min, False),
+                VersionRange(other, self.max, False, self.include_max),
+            )
+        elif isinstance(other, VersionRangeConstraint):
+            if not self.allows_any(other):
+                return self
+
+            before: VersionConstraint | None
+            if not self.allows_lower(other):
+                before = None
+            elif self.min == other.min:
+                before = self.min
+            else:
+                before = VersionRange(
+                    self.min, other.min, self.include_min, not other.include_min
+                )
+
+            after: VersionConstraint | None
+            if not self.allows_higher(other):
+                after = None
+            elif self.max == other.max:
+                after = self.max
+            else:
+                after = VersionRange(
+                    other.max, self.max, not other.include_max, self.include_max
+                )
+
+            if before is None and after is None:
+                return EmptyConstraint()
+
+            if before is None:
+                assert after is not None
+                return after
+
+            if after is None:
+                return before
+
+            return VersionUnion.of(before, after)
+        elif isinstance(other, VersionUnion):
+            ranges: list[VersionRangeConstraint] = []
+            current: VersionRangeConstraint = self
+
+            for range in other.ranges:
+                # Skip any ranges that are strictly lower than [current].
+                if range.is_strictly_lower(current):
+                    continue
+
+                # If we reach a range strictly higher than [current], no more ranges
+                # will be relevant so we can bail early.
+                if range.is_strictly_higher(current):
+                    break
+
+                difference = current.difference(range)
+                if difference.is_empty():
+                    return EmptyConstraint()
+                elif isinstance(difference, VersionUnion):
+                    # If [range] split [current] in half, we only need to continue
+                    # checking future ranges against the latter half.
+                    ranges.append(difference.ranges[0])
+                    current = difference.ranges[-1]
+                else:
+                    assert isinstance(difference, VersionRangeConstraint)
+                    current = difference
+
+            if not ranges:
+                return current
+
+            return VersionUnion.of(*(ranges + [current]))
+
+        raise ValueError(f"Unknown VersionConstraint type {other}.")
+
+    def flatten(self) -> list[VersionRangeConstraint]:
+        return [self]
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, VersionRangeConstraint):
+            return False
+
+        return (
+            self._min == other.min
+            and self._max == other.max
+            and self._include_min == other.include_min
+            and self._include_max == other.include_max
+        )
+
+    def __lt__(self, other: VersionRangeConstraint) -> bool:
+        return self._cmp(other) < 0
+
+    def __le__(self, other: VersionRangeConstraint) -> bool:
+        return self._cmp(other) <= 0
+
+    def __gt__(self, other: VersionRangeConstraint) -> bool:
+        return self._cmp(other) > 0
+
+    def __ge__(self, other: VersionRangeConstraint) -> bool:
+        return self._cmp(other) >= 0
+
+    def _cmp(self, other: VersionRangeConstraint) -> int:
+        if self.min is None:
+            if other.min is None:
+                return self._compare_max(other)
+
+            return -1
+        elif other.min is None:
+            return 1
+
+        if self.min > other.min:
+            return 1
+        elif self.min < other.min:
+            return -1
+
+        if self.include_min != other.include_min:
+            return -1 if self.include_min else 1
+
+        return self._compare_max(other)
+
+    def _compare_max(self, other: VersionRangeConstraint) -> int:
+        if self.max is None:
+            if other.max is None:
+                return 0
+
+            return 1
+        elif other.max is None:
+            return -1
+
+        if self.max > other.max:
+            return 1
+        elif self.max < other.max:
+            return -1
+
+        if self.include_max != other.include_max:
+            return 1 if self.include_max else -1
+
+        return 0
+
+    def __str__(self) -> str:
+        text = ""
+
+        if self.min is not None:
+            text += ">=" if self.include_min else ">"
+            text += self.min.text
+
+        if self.max is not None:
+            if self.min is not None:
+                text += ","
+
+            op = "<=" if self.include_max else "<"
+            text += f"{op}{self.max.text}"
+
+        if self.min is None and self.max is None:
+            return "*"
+
+        return text
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __hash__(self) -> int:
+        return (
+            hash(self.min)
+            ^ hash(self.max)
+            ^ hash(self.include_min)
+            ^ hash(self.include_max)
+        )
diff --git a/vendor/poetry-core/src/poetry/core/semver/version_range_constraint.py b/vendor/poetry-core/src/poetry/core/semver/version_range_constraint.py
new file mode 100644
index 00000000..03a431c6
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/version_range_constraint.py
@@ -0,0 +1,93 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from typing import TYPE_CHECKING
+
+from poetry.core.semver.version_constraint import VersionConstraint
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version import Version
+
+
+class VersionRangeConstraint(VersionConstraint):
+    @property
+    @abstractmethod
+    def min(self) -> Version | None:
+        raise NotImplementedError()
+
+    @property
+    @abstractmethod
+    def max(self) -> Version | None:
+        raise NotImplementedError()
+
+    @property
+    @abstractmethod
+    def full_max(self) -> Version | None:
+        raise NotImplementedError()
+
+    @property
+    @abstractmethod
+    def include_min(self) -> bool:
+        raise NotImplementedError()
+
+    @property
+    @abstractmethod
+    def include_max(self) -> bool:
+        raise NotImplementedError()
+
+    def allows_lower(self, other: VersionRangeConstraint) -> bool:
+        if self.min is None:
+            return other.min is not None
+
+        if other.min is None:
+            return False
+
+        if self.min < other.min:
+            return True
+
+        if self.min > other.min:
+            return False
+
+        return self.include_min and not other.include_min
+
+    def allows_higher(self, other: VersionRangeConstraint) -> bool:
+        if self.full_max is None:
+            return other.max is not None
+
+        if other.full_max is None:
+            return False
+
+        if self.full_max < other.full_max:
+            return False
+
+        if self.full_max > other.full_max:
+            return True
+
+        return self.include_max and not other.include_max
+
+    def is_strictly_lower(self, other: VersionRangeConstraint) -> bool:
+        if self.full_max is None or other.min is None:
+            return False
+
+        if self.full_max < other.min:
+            return True
+
+        if self.full_max > other.min:
+            return False
+
+        return not self.include_max or not other.include_min
+
+    def is_strictly_higher(self, other: VersionRangeConstraint) -> bool:
+        return other.is_strictly_lower(self)
+
+    def is_adjacent_to(self, other: VersionRangeConstraint) -> bool:
+        if self.max != other.min:
+            return False
+
+        return (
+            self.include_max
+            and not other.include_min
+            or not self.include_max
+            and other.include_min
+        )
diff --git a/vendor/poetry-core/src/poetry/core/semver/version_union.py b/vendor/poetry-core/src/poetry/core/semver/version_union.py
new file mode 100644
index 00000000..26edb742
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/semver/version_union.py
@@ -0,0 +1,423 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.semver.empty_constraint import EmptyConstraint
+from poetry.core.semver.version_constraint import VersionConstraint
+from poetry.core.semver.version_range_constraint import VersionRangeConstraint
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version import Version
+
+
+class VersionUnion(VersionConstraint):
+    """
+    A version constraint representing a union of multiple disjoint version
+    ranges.
+
+    An instance of this will only be created if the version can't be represented
+    as a non-compound value.
+    """
+
+    def __init__(self, *ranges: VersionRangeConstraint) -> None:
+        self._ranges = list(ranges)
+
+    @property
+    def ranges(self) -> list[VersionRangeConstraint]:
+        return self._ranges
+
+    @classmethod
+    def of(cls, *ranges: VersionConstraint) -> VersionConstraint:
+        from poetry.core.semver.version_range import VersionRange
+
+        flattened: list[VersionRangeConstraint] = []
+        for constraint in ranges:
+            if constraint.is_empty():
+                continue
+
+            if isinstance(constraint, VersionUnion):
+                flattened += constraint.ranges
+                continue
+
+            assert isinstance(constraint, VersionRangeConstraint)
+            flattened.append(constraint)
+
+        if not flattened:
+            return EmptyConstraint()
+
+        if any([constraint.is_any() for constraint in flattened]):
+            return VersionRange()
+
+        # Only allow Versions and VersionRanges here so we can more easily reason
+        # about everything in flattened. _EmptyVersions and VersionUnions are
+        # filtered out above.
+        for constraint in flattened:
+            if not isinstance(constraint, VersionRangeConstraint):
+                raise ValueError(f"Unknown VersionConstraint type {constraint}.")
+
+        flattened.sort()
+
+        merged: list[VersionRangeConstraint] = []
+        for constraint in flattened:
+            # Merge this constraint with the previous one, but only if they touch.
+            if not merged or (
+                not merged[-1].allows_any(constraint)
+                and not merged[-1].is_adjacent_to(constraint)
+            ):
+                merged.append(constraint)
+            else:
+                new_constraint = merged[-1].union(constraint)
+                assert isinstance(new_constraint, VersionRangeConstraint)
+                merged[-1] = new_constraint
+
+        if len(merged) == 1:
+            return merged[0]
+
+        return VersionUnion(*merged)
+
+    def is_empty(self) -> bool:
+        return False
+
+    def is_any(self) -> bool:
+        return False
+
+    def is_simple(self) -> bool:
+        return self.excludes_single_version()
+
+    def allows(self, version: Version) -> bool:
+        return any([constraint.allows(version) for constraint in self._ranges])
+
+    def allows_all(self, other: VersionConstraint) -> bool:
+        our_ranges = iter(self._ranges)
+        their_ranges = iter(other.flatten())
+
+        our_current_range = next(our_ranges, None)
+        their_current_range = next(their_ranges, None)
+
+        while our_current_range and their_current_range:
+            if our_current_range.allows_all(their_current_range):
+                their_current_range = next(their_ranges, None)
+            else:
+                our_current_range = next(our_ranges, None)
+
+        return their_current_range is None
+
+    def allows_any(self, other: VersionConstraint) -> bool:
+        our_ranges = iter(self._ranges)
+        their_ranges = iter(other.flatten())
+
+        our_current_range = next(our_ranges, None)
+        their_current_range = next(their_ranges, None)
+
+        while our_current_range and their_current_range:
+            if our_current_range.allows_any(their_current_range):
+                return True
+
+            if their_current_range.allows_higher(our_current_range):
+                our_current_range = next(our_ranges, None)
+            else:
+                their_current_range = next(their_ranges, None)
+
+        return False
+
+    def intersect(self, other: VersionConstraint) -> VersionConstraint:
+        our_ranges = iter(self._ranges)
+        their_ranges = iter(other.flatten())
+        new_ranges = []
+
+        our_current_range = next(our_ranges, None)
+        their_current_range = next(their_ranges, None)
+
+        while our_current_range and their_current_range:
+            intersection = our_current_range.intersect(their_current_range)
+
+            if not intersection.is_empty():
+                new_ranges.append(intersection)
+
+            if their_current_range.allows_higher(our_current_range):
+                our_current_range = next(our_ranges, None)
+            else:
+                their_current_range = next(their_ranges, None)
+
+        return VersionUnion.of(*new_ranges)
+
+    def union(self, other: VersionConstraint) -> VersionConstraint:
+        return VersionUnion.of(self, other)
+
+    def difference(self, other: VersionConstraint) -> VersionConstraint:
+        our_ranges = iter(self._ranges)
+        their_ranges = iter(other.flatten())
+        new_ranges: list[VersionConstraint] = []
+
+        state = {
+            "current": next(our_ranges, None),
+            "their_range": next(their_ranges, None),
+        }
+
+        def their_next_range() -> bool:
+            state["their_range"] = next(their_ranges, None)
+            if state["their_range"]:
+                return True
+
+            assert state["current"] is not None
+            new_ranges.append(state["current"])
+            our_current = next(our_ranges, None)
+            while our_current:
+                new_ranges.append(our_current)
+                our_current = next(our_ranges, None)
+
+            return False
+
+        def our_next_range(include_current: bool = True) -> bool:
+            if include_current:
+                assert state["current"] is not None
+                new_ranges.append(state["current"])
+
+            our_current = next(our_ranges, None)
+            if not our_current:
+                return False
+
+            state["current"] = our_current
+
+            return True
+
+        while True:
+            if state["their_range"] is None:
+                break
+
+            assert state["current"] is not None
+            if state["their_range"].is_strictly_lower(state["current"]):
+                if not their_next_range():
+                    break
+
+                continue
+
+            if state["their_range"].is_strictly_higher(state["current"]):
+                if not our_next_range():
+                    break
+
+                continue
+
+            difference = state["current"].difference(state["their_range"])
+            if isinstance(difference, VersionUnion):
+                assert len(difference.ranges) == 2
+                new_ranges.append(difference.ranges[0])
+                state["current"] = difference.ranges[-1]
+
+                if not their_next_range():
+                    break
+            elif difference.is_empty():
+                if not our_next_range(False):
+                    break
+            else:
+                assert isinstance(difference, VersionRangeConstraint)
+                state["current"] = difference
+
+                if state["current"].allows_higher(state["their_range"]):
+                    if not their_next_range():
+                        break
+                else:
+                    if not our_next_range():
+                        break
+
+        if not new_ranges:
+            return EmptyConstraint()
+
+        if len(new_ranges) == 1:
+            return new_ranges[0]
+
+        return VersionUnion.of(*new_ranges)
+
+    def flatten(self) -> list[VersionRangeConstraint]:
+        return self.ranges
+
+    def _exclude_single_wildcard_range_string(self) -> str:
+        """
+        Helper method to convert this instance into a wild card range
+        string.
+        """
+        if not self.excludes_single_wildcard_range():
+            raise ValueError("Not a valid wildcard range")
+
+        # we assume here that since it is a single exclusion range
+        # that it is one of "< 2.0.0 || >= 2.1.0" or ">= 2.1.0 || < 2.0.0"
+        # and the one with the max is the first part
+        idx_order = (0, 1) if self._ranges[0].max else (1, 0)
+        one = self._ranges[idx_order[0]].max
+        assert one is not None
+        two = self._ranges[idx_order[1]].min
+        assert two is not None
+
+        # versions can have both semver and non semver parts
+        parts_one = [
+            one.major,
+            one.minor or 0,
+            one.patch or 0,
+            *list(one.non_semver_parts or []),
+        ]
+        parts_two = [
+            two.major,
+            two.minor or 0,
+            two.patch or 0,
+            *list(two.non_semver_parts or []),
+        ]
+
+        # we assume here that a wildcard range implies that the part following the
+        # first part that is different in the second range is the wildcard, this means
+        # that multiple wildcards are not supported right now.
+        parts = []
+
+        for idx, part in enumerate(parts_one):
+            parts.append(str(part))
+            if parts_two[idx] != part:
+                # since this part is different the next one is the wildcard
+                # for example, "< 2.0.0 || >= 2.1.0" gets us a wildcard range
+                # 2.0.*
+                parts.append("*")
+                break
+        else:
+            # we should not ever get here, however it is likely that poorly
+            # constructed metadata exists
+            raise ValueError("Not a valid wildcard range")
+
+        return f"!={'.'.join(parts)}"
+
+    @staticmethod
+    def _excludes_single_wildcard_range_check_is_valid_range(
+        one: VersionRangeConstraint, two: VersionRangeConstraint
+    ) -> bool:
+        """
+        Helper method to determine if two versions define a single wildcard range.
+
+        In cases where !=2.0.* was parsed by us, the union is of the range
+        <2.0.0 || >=2.1.0. In user defined ranges, precision might be different.
+        For example, a union <2.0 || >= 2.1.0 is still !=2.0.*. In order to
+        handle these cases we make sure that if precisions do not match, extra
+        checks are performed to validate that the constraint is a valid single
+        wildcard range.
+        """
+
+        assert one.max is not None
+        assert two.min is not None
+
+        max_precision = max(one.max.precision, two.min.precision)
+
+        if max_precision <= 3:
+            # In cases where both versions have a precision less than 3,
+            # we can make use of the next major/minor/patch versions.
+            return two.min in {
+                one.max.next_major(),
+                one.max.next_minor(),
+                one.max.next_patch(),
+            }
+        else:
+            # When there are non-semver parts in one of the versions, we need to
+            # ensure we use zero padded version and in addition to next major/minor/
+            # patch versions, also check each next release for the extra parts.
+            from_parts = one.max.__class__.from_parts
+
+            _extras: list[list[int]] = []
+            _versions: list[Version] = []
+
+            for _version in [one.max, two.min]:
+                _extra = list(_version.non_semver_parts or [])
+
+                while len(_extra) < (max_precision - 3):
+                    # pad zeros for extra parts to ensure precisions are equal
+                    _extra.append(0)
+
+                # create a new release with unspecified parts padded with zeros
+                _padded_version: Version = from_parts(
+                    major=_version.major,
+                    minor=_version.minor or 0,
+                    patch=_version.patch or 0,
+                    extra=tuple(_extra),
+                )
+
+                _extras.append(_extra)
+                _versions.append(_padded_version)
+
+            _extra_one = _extras[0]
+            _padded_version_one = _versions[0]
+            _padded_version_two = _versions[1]
+
+            _check_versions = {
+                _padded_version_one.next_major(),
+                _padded_version_one.next_minor(),
+                _padded_version_one.next_patch(),
+            }
+
+            # for each non-semver (extra) part, bump a version
+            for idx in range(len(_extra_one)):
+                _extra = [
+                    *_extra_one[: idx - 1],
+                    (_extra_one[idx] + 1),
+                    *_extra_one[idx + 1 :],
+                ]
+                _check_versions.add(
+                    from_parts(
+                        _padded_version_one.major,
+                        _padded_version_one.minor,
+                        _padded_version_one.patch,
+                        tuple(_extra),
+                    )
+                )
+
+            return _padded_version_two in _check_versions
+
+    def excludes_single_wildcard_range(self) -> bool:
+        from poetry.core.semver.version_range import VersionRange
+
+        if len(self._ranges) != 2:
+            return False
+
+        idx_order = (0, 1) if self._ranges[0].max else (1, 0)
+        one = self._ranges[idx_order[0]]
+        two = self._ranges[idx_order[1]]
+
+        is_range_exclusion = (
+            one.max and not one.include_max and two.min and two.include_min
+        )
+
+        if not is_range_exclusion:
+            return False
+
+        if not self._excludes_single_wildcard_range_check_is_valid_range(one, two):
+            return False
+
+        return isinstance(VersionRange().difference(self), VersionRange)
+
+    def excludes_single_version(self) -> bool:
+        from poetry.core.semver.version import Version
+        from poetry.core.semver.version_range import VersionRange
+
+        return isinstance(VersionRange().difference(self), Version)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, VersionUnion):
+            return False
+
+        return self._ranges == other.ranges
+
+    def __hash__(self) -> int:
+        h = hash(self._ranges[0])
+
+        for range in self._ranges[1:]:
+            h ^= hash(range)
+
+        return h
+
+    def __str__(self) -> str:
+        from poetry.core.semver.version_range import VersionRange
+
+        if self.excludes_single_version():
+            return f"!={VersionRange().difference(self)}"
+
+        try:
+            return self._exclude_single_wildcard_range_string()
+        except ValueError:
+            return " || ".join([str(r) for r in self._ranges])
+
+    def __repr__(self) -> str:
+        return f""
diff --git a/vendor/poetry/poetry/console/logging/__init__.py b/vendor/poetry-core/src/poetry/core/spdx/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/console/logging/__init__.py
rename to vendor/poetry-core/src/poetry/core/spdx/__init__.py
diff --git a/vendor/poetry/poetry/inspection/__init__.py b/vendor/poetry-core/src/poetry/core/spdx/data/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/inspection/__init__.py
rename to vendor/poetry-core/src/poetry/core/spdx/data/__init__.py
diff --git a/vendor/poetry-core/poetry/core/spdx/data/licenses.json b/vendor/poetry-core/src/poetry/core/spdx/data/licenses.json
similarity index 99%
rename from vendor/poetry-core/poetry/core/spdx/data/licenses.json
rename to vendor/poetry-core/src/poetry/core/spdx/data/licenses.json
index b598305b..6a241f66 100644
--- a/vendor/poetry-core/poetry/core/spdx/data/licenses.json
+++ b/vendor/poetry-core/src/poetry/core/spdx/data/licenses.json
@@ -1040,17 +1040,17 @@
     false
   ],
   "LiLiQ-P-1.1": [
-    "Licence Libre du Qu\u00e9bec \u2013 Permissive version 1.1",
+    "Licence Libre du Québec – Permissive version 1.1",
     true,
     false
   ],
   "LiLiQ-R-1.1": [
-    "Licence Libre du Qu\u00e9bec \u2013 R\u00e9ciprocit\u00e9 version 1.1",
+    "Licence Libre du Québec – Réciprocité version 1.1",
     true,
     false
   ],
   "LiLiQ-Rplus-1.1": [
-    "Licence Libre du Qu\u00e9bec \u2013 R\u00e9ciprocit\u00e9 forte version 1.1",
+    "Licence Libre du Québec – Réciprocité forte version 1.1",
     true,
     false
   ],
diff --git a/vendor/poetry-core/src/poetry/core/spdx/helpers.py b/vendor/poetry-core/src/poetry/core/spdx/helpers.py
new file mode 100644
index 00000000..1dde7455
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/spdx/helpers.py
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+import functools
+import json
+import os
+
+from importlib import resources
+
+from poetry.core.spdx.license import License
+
+
+def license_by_id(identifier: str) -> License:
+    if not identifier:
+        raise ValueError("A license identifier is required")
+
+    licenses = _load_licenses()
+    return licenses.get(
+        identifier.lower(), License(identifier, identifier, False, False)
+    )
+
+
+@functools.lru_cache()
+def _load_licenses() -> dict[str, License]:
+    from . import __name__
+    licenses = {}
+    data = json.loads(resources.read_text(f"{__name__}.data", "licenses.json"))
+
+    for name, license_info in data.items():
+        license = License(name, license_info[0], license_info[1], license_info[2])
+        licenses[name.lower()] = license
+
+        full_name = license_info[0].lower()
+        if full_name in licenses:
+            existing_license = licenses[full_name]
+            if not existing_license.is_deprecated:
+                continue
+
+        licenses[full_name] = license
+
+    # Add a Proprietary license for non-standard licenses
+    licenses["proprietary"] = License("Proprietary", "Proprietary", False, False)
+
+    return licenses
+
+
+if __name__ == "__main__":
+    from poetry.core.spdx.updater import Updater
+
+    updater = Updater()
+    updater.dump()
diff --git a/vendor/poetry-core/poetry/core/spdx/license.py b/vendor/poetry-core/src/poetry/core/spdx/license.py
similarity index 96%
rename from vendor/poetry-core/poetry/core/spdx/license.py
rename to vendor/poetry-core/src/poetry/core/spdx/license.py
index f5a9fb6d..901a1cbc 100644
--- a/vendor/poetry-core/poetry/core/spdx/license.py
+++ b/vendor/poetry-core/src/poetry/core/spdx/license.py
@@ -1,8 +1,13 @@
+from __future__ import annotations
+
 from collections import namedtuple
-from typing import Optional
 
 
 class License(namedtuple("License", "id name is_osi_approved is_deprecated")):
+    id: str
+    name: str
+    is_osi_approved: bool
+    is_deprecated: bool
 
     CLASSIFIER_SUPPORTED = {
         # Not OSI Approved
@@ -131,7 +136,7 @@ class License(namedtuple("License", "id name is_osi_approved is_deprecated")):
     }
 
     @property
-    def classifier(self):  # type: () -> str
+    def classifier(self) -> str:
         parts = ["License"]
 
         if self.is_osi_approved:
@@ -144,7 +149,7 @@ def classifier(self):  # type: () -> str
         return " :: ".join(parts)
 
     @property
-    def classifier_name(self):  # type: () -> Optional[str]
+    def classifier_name(self) -> str | None:
         if self.id not in self.CLASSIFIER_SUPPORTED:
             if self.is_osi_approved:
                 return None
diff --git a/vendor/poetry-core/src/poetry/core/spdx/updater.py b/vendor/poetry-core/src/poetry/core/spdx/updater.py
new file mode 100644
index 00000000..9f6ff37d
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/spdx/updater.py
@@ -0,0 +1,39 @@
+from __future__ import annotations
+
+import json
+import os
+
+from typing import Any
+from urllib.request import urlopen
+
+
+class Updater:
+    BASE_URL = "https://raw.githubusercontent.com/spdx/license-list-data/master/json/"
+
+    def __init__(self, base_url: str = BASE_URL) -> None:
+        self._base_url = base_url
+
+    def dump(self, file: str | None = None) -> None:
+        if file is None:
+            file = os.path.join(os.path.dirname(__file__), "data", "licenses.json")
+
+        licenses_url = self._base_url + "licenses.json"
+
+        with open(file, "w", encoding="utf-8") as f:
+            f.write(
+                json.dumps(self.get_licenses(licenses_url), indent=2, sort_keys=True)
+            )
+
+    def get_licenses(self, url: str) -> dict[str, Any]:
+        licenses = {}
+        with urlopen(url) as r:
+            data = json.loads(r.read().decode())
+
+        for info in data["licenses"]:
+            licenses[info["licenseId"]] = [
+                info["name"],
+                info["isOsiApproved"],
+                info["isDeprecatedLicenseId"],
+            ]
+
+        return licenses
diff --git a/vendor/poetry-core/src/poetry/core/toml/__init__.py b/vendor/poetry-core/src/poetry/core/toml/__init__.py
new file mode 100644
index 00000000..3ce16890
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/toml/__init__.py
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from poetry.core.toml.exceptions import TOMLError
+from poetry.core.toml.file import TOMLFile
+
+
+__all__ = ["TOMLError", "TOMLFile"]
diff --git a/vendor/poetry-core/src/poetry/core/toml/exceptions.py b/vendor/poetry-core/src/poetry/core/toml/exceptions.py
new file mode 100644
index 00000000..efa189cf
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/toml/exceptions.py
@@ -0,0 +1,9 @@
+from __future__ import annotations
+
+from tomlkit.exceptions import TOMLKitError
+
+from poetry.core.exceptions import PoetryCoreException
+
+
+class TOMLError(TOMLKitError, PoetryCoreException):  # type: ignore[misc]
+    pass
diff --git a/vendor/poetry-core/src/poetry/core/toml/file.py b/vendor/poetry-core/src/poetry/core/toml/file.py
new file mode 100644
index 00000000..7bc1cd74
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/toml/file.py
@@ -0,0 +1,42 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+from tomlkit.toml_file import TOMLFile as BaseTOMLFile
+
+
+if TYPE_CHECKING:
+    from tomlkit.toml_document import TOMLDocument
+
+
+class TOMLFile(BaseTOMLFile):  # type: ignore[misc]
+    def __init__(self, path: str | Path) -> None:
+        if isinstance(path, str):
+            path = Path(path)
+        super().__init__(path.as_posix())
+        self.__path = path
+
+    @property
+    def path(self) -> Path:
+        return self.__path
+
+    def exists(self) -> bool:
+        return self.__path.exists()
+
+    def read(self) -> TOMLDocument:
+        from tomlkit.exceptions import TOMLKitError
+
+        from poetry.core.toml import TOMLError
+
+        try:
+            return super().read()
+        except (ValueError, TOMLKitError) as e:
+            raise TOMLError(f"Invalid TOML file {self.path.as_posix()}: {e}")
+
+    def __getattr__(self, item: str) -> Any:
+        return getattr(self.__path, item)
+
+    def __str__(self) -> str:
+        return self.__path.as_posix()
diff --git a/vendor/poetry/poetry/io/__init__.py b/vendor/poetry-core/src/poetry/core/utils/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/io/__init__.py
rename to vendor/poetry-core/src/poetry/core/utils/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/utils/_compat.py b/vendor/poetry-core/src/poetry/core/utils/_compat.py
new file mode 100644
index 00000000..7b3f59e7
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/utils/_compat.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+import sys
+
+
+WINDOWS = sys.platform == "win32"
diff --git a/vendor/poetry-core/src/poetry/core/utils/helpers.py b/vendor/poetry-core/src/poetry/core/utils/helpers.py
new file mode 100644
index 00000000..3e03d0c2
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/utils/helpers.py
@@ -0,0 +1,101 @@
+from __future__ import annotations
+
+import os
+import shutil
+import stat
+import tempfile
+import unicodedata
+
+from contextlib import contextmanager
+from pathlib import Path
+from typing import Any
+from typing import Iterator
+
+from packaging.utils import canonicalize_name
+
+from poetry.core.version.pep440 import PEP440Version
+
+
+def combine_unicode(string: str) -> str:
+    return unicodedata.normalize("NFC", string)
+
+
+def module_name(name: str) -> str:
+    return canonicalize_name(name).replace("-", "_")
+
+
+def normalize_version(version: str) -> str:
+    return PEP440Version.parse(version).to_string()
+
+
+@contextmanager
+def temporary_directory(*args: Any, **kwargs: Any) -> Iterator[str]:
+    name = tempfile.mkdtemp(*args, **kwargs)
+    yield name
+    safe_rmtree(name)
+
+
+def parse_requires(requires: str) -> list[str]:
+    lines = requires.split("\n")
+
+    requires_dist = []
+    in_section = False
+    current_marker = None
+    for line in lines:
+        line = line.strip()
+        if not line:
+            if in_section:
+                in_section = False
+
+            continue
+
+        if line.startswith("["):
+            # extras or conditional dependencies
+            marker = line.lstrip("[").rstrip("]")
+            if ":" not in marker:
+                extra, marker = marker, ""
+            else:
+                extra, marker = marker.split(":")
+
+            if extra:
+                if marker:
+                    marker = f'{marker} and extra == "{extra}"'
+                else:
+                    marker = f'extra == "{extra}"'
+
+            if marker:
+                current_marker = marker
+
+            continue
+
+        if current_marker:
+            line = f"{line} ; {current_marker}"
+
+        requires_dist.append(line)
+
+    return requires_dist
+
+
+def _on_rm_error(func: Any, path: str | Path, exc_info: Any) -> None:
+    if not os.path.exists(path):
+        return
+
+    os.chmod(path, stat.S_IWRITE)
+    func(path)
+
+
+def safe_rmtree(path: str | Path) -> None:
+    if Path(path).is_symlink():
+        return os.unlink(str(path))
+
+    shutil.rmtree(path, onerror=_on_rm_error)
+
+
+def readme_content_type(path: str | Path) -> str:
+    suffix = Path(path).suffix
+    if suffix == ".rst":
+        return "text/x-rst"
+    elif suffix in [".md", ".markdown"]:
+        return "text/markdown"
+    else:
+        return "text/plain"
diff --git a/vendor/poetry-core/src/poetry/core/utils/patterns.py b/vendor/poetry-core/src/poetry/core/utils/patterns.py
new file mode 100644
index 00000000..c2d9d9bf
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/utils/patterns.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+import re
+
+
+wheel_file_re = re.compile(
+    r"""^(?P(?P.+?)(-(?P\d.+?))?)
+        ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?)
+        \.whl|\.dist-info)$""",
+    re.VERBOSE,
+)
diff --git a/vendor/poetry-core/src/poetry/core/utils/toml_file.py b/vendor/poetry-core/src/poetry/core/utils/toml_file.py
new file mode 100644
index 00000000..69df7589
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/utils/toml_file.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+from typing import Any
+
+from poetry.core.toml import TOMLFile
+
+
+class TomlFile(TOMLFile):
+    @classmethod
+    def __new__(cls: type[TOMLFile], *args: Any, **kwargs: Any) -> TOMLFile:
+        import warnings
+
+        this_import = f"{cls.__module__}.{cls.__name__}"
+        new_import = f"{TOMLFile.__module__}.{TOMLFile.__name__}"
+        warnings.warn(
+            f"Use of {this_import} has been deprecated, use {new_import} instead.",
+            category=DeprecationWarning,
+            stacklevel=2,
+        )
+        return super().__new__(cls)  # type: ignore[no-any-return,misc]
diff --git a/vendor/poetry-core/src/poetry/core/vcs/__init__.py b/vendor/poetry-core/src/poetry/core/vcs/__init__.py
new file mode 100644
index 00000000..f4096ec0
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/vcs/__init__.py
@@ -0,0 +1,35 @@
+from __future__ import annotations
+
+import os
+import subprocess
+
+from pathlib import Path
+
+from poetry.core.vcs.git import Git
+
+
+def get_vcs(directory: Path) -> Git | None:
+    working_dir = Path.cwd()
+    os.chdir(str(directory.resolve()))
+
+    vcs: Git | None
+
+    try:
+        from poetry.core.vcs.git import executable
+
+        git_dir = (
+            subprocess.check_output(
+                [executable(), "rev-parse", "--show-toplevel"], stderr=subprocess.STDOUT
+            )
+            .decode()
+            .strip()
+        )
+
+        vcs = Git(Path(git_dir))
+
+    except (subprocess.CalledProcessError, OSError, RuntimeError):
+        vcs = None
+    finally:
+        os.chdir(str(working_dir))
+
+    return vcs
diff --git a/vendor/poetry-core/src/poetry/core/vcs/git.py b/vendor/poetry-core/src/poetry/core/vcs/git.py
new file mode 100644
index 00000000..aeccdf37
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/vcs/git.py
@@ -0,0 +1,384 @@
+from __future__ import annotations
+
+import re
+import subprocess
+
+from collections import namedtuple
+from pathlib import Path
+from typing import Any
+
+from poetry.core.utils._compat import WINDOWS
+
+
+PROTOCOL = r"\w+"
+USER = r"[a-zA-Z0-9_.-]+"
+RESOURCE = r"[a-zA-Z0-9_.-]+"
+PORT = r"\d+"
+PATH = r"[\w~.\-/\\\$]+"
+NAME = r"[\w~.\-]+"
+REV = r"[^@#]+?"
+SUBDIR = r"[\w\-/\\]+"
+
+PATTERNS = [
+    re.compile(
+        r"^(git\+)?"
+        r"(?Phttps?|git|ssh|rsync|file)://"
+        rf"(?:(?P{USER})@)?"
+        rf"(?P{RESOURCE})?"
+        rf"(:(?P{PORT}))?"
+        rf"(?P[:/\\]({PATH}[/\\])?"
+        rf"((?P{NAME}?)(\.git|[/\\])?)?)"
+        r"(?:"
+        r"#egg=?.+"
+        r"|"
+        rf"#(?:egg=.+?&subdirectory=|subdirectory=)(?P{SUBDIR})"
+        r"|"
+        rf"[@#](?P{REV})(?:[&#](?:egg=.+?|(?:egg=.+?&subdirectory=|subdirectory=)(?P{SUBDIR})))?"
+        r")?"
+        r"$"
+    ),
+    re.compile(
+        r"(git\+)?"
+        rf"((?P{PROTOCOL})://)"
+        rf"(?:(?P{USER})@)?"
+        rf"(?P{RESOURCE}:?)"
+        rf"(:(?P{PORT}))?"
+        rf"(?P({PATH})"
+        rf"(?P{NAME})(\.git|/)?)"
+        r"(?:"
+        r"#egg=?.+"
+        r"|"
+        rf"#(?:egg=.+?&subdirectory=|subdirectory=)(?P{SUBDIR})"
+        r"|"
+        rf"[@#](?P{REV})(?:[&#](?:egg=.+?|(?:egg=.+?&subdirectory=|subdirectory=)(?P{SUBDIR})))?"
+        r")?"
+        r"$"
+    ),
+    re.compile(
+        rf"^(?:(?P{USER})@)?"
+        rf"(?P{RESOURCE})"
+        rf"(:(?P{PORT}))?"
+        rf"(?P([:/]{PATH}/)"
+        rf"(?P{NAME})(\.git|/)?)"
+        r"(?:"
+        r"#egg=.+?"
+        r"|"
+        rf"#(?:egg=.+?&subdirectory=|subdirectory=)(?P{SUBDIR})"
+        r"|"
+        rf"[@#](?P{REV})(?:[&#](?:egg=.+?&subdirectory=|subdirectory=)(?P{SUBDIR}))?"
+        r")?"
+        r"$"
+    ),
+    re.compile(
+        rf"((?P{USER})@)?"
+        rf"(?P{RESOURCE})"
+        r"[:/]{{1,2}}"
+        rf"(?P({PATH})"
+        rf"(?P{NAME})(\.git|/)?)"
+        r"(?:"
+        r"#egg=?.+"
+        r"|"
+        rf"#(?:egg=.+?&subdirectory=|subdirectory=)(?P{SUBDIR})"
+        r"|"
+        rf"[@#](?P{REV})(?:[&#](?:egg=.+?|(?:egg=.+?&subdirectory=|subdirectory=)(?P{SUBDIR})))?"
+        r")?"
+        r"$"
+    ),
+]
+
+
+class GitError(RuntimeError):
+    pass
+
+
+class ParsedUrl:
+    def __init__(
+        self,
+        protocol: str | None,
+        resource: str | None,
+        pathname: str | None,
+        user: str | None,
+        port: str | None,
+        name: str | None,
+        rev: str | None,
+        subdirectory: str | None = None,
+    ) -> None:
+        self.protocol = protocol
+        self.resource = resource
+        self.pathname = pathname
+        self.user = user
+        self.port = port
+        self.name = name
+        self.rev = rev
+        self.subdirectory = subdirectory
+
+    @classmethod
+    def parse(cls, url: str) -> ParsedUrl:
+        for pattern in PATTERNS:
+            m = pattern.match(url)
+            if m:
+                groups = m.groupdict()
+                return ParsedUrl(
+                    groups.get("protocol"),
+                    groups.get("resource"),
+                    groups.get("pathname"),
+                    groups.get("user"),
+                    groups.get("port"),
+                    groups.get("name"),
+                    groups.get("rev"),
+                    groups.get("rev_subdirectory") or groups.get("subdirectory"),
+                )
+
+        raise ValueError(f'Invalid git url "{url}"')
+
+    @property
+    def url(self) -> str:
+        protocol = f"{self.protocol}://" if self.protocol else ""
+        user = f"{self.user}@" if self.user else ""
+        port = f":{self.port}" if self.port else ""
+        path = "/" + (self.pathname or "").lstrip(":/")
+        return f"{protocol}{user}{self.resource}{port}{path}"
+
+    def format(self) -> str:
+        return self.url
+
+    def __str__(self) -> str:
+        return self.format()
+
+
+GitUrl = namedtuple("GitUrl", ["url", "revision", "subdirectory"])
+
+
+_executable: str | None = None
+
+
+def executable() -> str:
+    global _executable
+
+    if _executable is not None:
+        return _executable
+
+    if WINDOWS:
+        # Finding git via where.exe
+        where = "%WINDIR%\\System32\\where.exe"
+        paths = subprocess.check_output(
+            [where, "git"], shell=True, encoding="oem"
+        ).split("\n")
+        for path in paths:
+            if not path:
+                continue
+
+            _path = Path(path.strip())
+            try:
+                _path.relative_to(Path.cwd())
+            except ValueError:
+                _executable = str(_path)
+
+                break
+    else:
+        _executable = "git"
+
+    if _executable is None:
+        raise RuntimeError("Unable to find a valid git executable")
+
+    return _executable
+
+
+def _reset_executable() -> None:
+    global _executable
+
+    _executable = None
+
+
+class GitConfig:
+    def __init__(self, requires_git_presence: bool = False) -> None:
+        self._config = {}
+
+        try:
+            config_list = subprocess.check_output(
+                [executable(), "config", "-l"], stderr=subprocess.STDOUT
+            ).decode()
+
+            m = re.findall("(?ms)^([^=]+)=(.*?)$", config_list)
+            if m:
+                for group in m:
+                    self._config[group[0]] = group[1]
+        except (subprocess.CalledProcessError, OSError):
+            if requires_git_presence:
+                raise
+
+    def get(self, key: Any, default: Any | None = None) -> Any:
+        return self._config.get(key, default)
+
+    def __getitem__(self, item: Any) -> Any:
+        return self._config[item]
+
+
+class Git:
+    def __init__(self, work_dir: Path | None = None) -> None:
+        self._config = GitConfig(requires_git_presence=True)
+        self._work_dir = work_dir
+
+    @classmethod
+    def normalize_url(cls, url: str) -> GitUrl:
+        parsed = ParsedUrl.parse(url)
+
+        formatted = re.sub(r"^git\+", "", url)
+        if parsed.rev:
+            formatted = re.sub(rf"[#@]{parsed.rev}(?=[#&]?)(?!\=)", "", formatted)
+
+        if parsed.subdirectory:
+            formatted = re.sub(
+                rf"[#&]subdirectory={parsed.subdirectory}$", "", formatted
+            )
+
+        altered = parsed.format() != formatted
+
+        if altered:
+            if re.match(r"^git\+https?", url) and re.match(
+                r"^/?:[^0-9]", parsed.pathname or ""
+            ):
+                normalized = re.sub(r"git\+(.*:[^:]+):(.*)", "\\1/\\2", url)
+            elif re.match(r"^git\+file", url):
+                normalized = re.sub(r"git\+", "", url)
+            else:
+                normalized = re.sub(r"^(?:git\+)?ssh://", "", url)
+        else:
+            normalized = parsed.format()
+
+        return GitUrl(
+            re.sub(r"#[^#]*$", "", normalized), parsed.rev, parsed.subdirectory
+        )
+
+    @property
+    def config(self) -> GitConfig:
+        return self._config
+
+    @property
+    def version(self) -> tuple[int, int, int]:
+        output = self.run("version")
+        version = re.search(r"(\d+)\.(\d+)\.(\d+)", output)
+        if not version:
+            return (0, 0, 0)
+        return int(version.group(1)), int(version.group(2)), int(version.group(3))
+
+    def clone(self, repository: str, dest: Path) -> str:
+        self._check_parameter(repository)
+        cmd = [
+            "clone",
+            "--filter=blob:none",
+            "--recurse-submodules",
+            "--",
+            repository,
+            str(dest),
+        ]
+        # Blobless clones introduced in Git 2.17
+        if self.version < (2, 17):
+            cmd.remove("--filter=blob:none")
+        return self.run(*cmd)
+
+    def checkout(self, rev: str, folder: Path | None = None) -> str:
+        args = []
+        if folder is None and self._work_dir:
+            folder = self._work_dir
+
+        if folder:
+            args += [
+                "--git-dir",
+                (folder / ".git").as_posix(),
+                "--work-tree",
+                folder.as_posix(),
+            ]
+
+        self._check_parameter(rev)
+
+        args += ["checkout", "--recurse-submodules", rev]
+
+        return self.run(*args)
+
+    def rev_parse(self, rev: str, folder: Path | None = None) -> str:
+        args = []
+        if folder is None and self._work_dir:
+            folder = self._work_dir
+
+        self._check_parameter(rev)
+
+        # We need "^0" (an alternative to "^{commit}") to ensure that the
+        # commit SHA of the commit the tag points to is returned, even in
+        # the case of annotated tags.
+        #
+        # We deliberately avoid the "^{commit}" syntax itself as on some
+        # platforms (cygwin/msys to be specific), the braces are interpreted
+        # as special characters and would require escaping, while on others
+        # they should not be escaped.
+        args += ["rev-parse", rev + "^0"]
+
+        return self.run(*args, folder=folder)
+
+    def get_current_branch(self, folder: Path | None = None) -> str:
+        if folder is None and self._work_dir:
+            folder = self._work_dir
+
+        output = self.run("symbolic-ref", "--short", "HEAD", folder=folder)
+
+        return output.strip()
+
+    def get_ignored_files(self, folder: Path | None = None) -> list[str]:
+        args = []
+        if folder is None and self._work_dir:
+            folder = self._work_dir
+
+        if folder:
+            args += [
+                "--git-dir",
+                (folder / ".git").as_posix(),
+                "--work-tree",
+                folder.as_posix(),
+            ]
+
+        args += ["ls-files", "--others", "-i", "--exclude-standard"]
+        output = self.run(*args)
+
+        return output.strip().split("\n")
+
+    def remote_urls(self, folder: Path | None = None) -> dict[str, str]:
+        output = self.run(
+            "config", "--get-regexp", r"remote\..*\.url", folder=folder
+        ).strip()
+
+        urls = {}
+        for url in output.splitlines():
+            name, url = url.split(" ", 1)
+            urls[name.strip()] = url.strip()
+
+        return urls
+
+    def remote_url(self, folder: Path | None = None) -> str:
+        urls = self.remote_urls(folder=folder)
+
+        return urls.get("remote.origin.url", urls[list(urls.keys())[0]])
+
+    def run(self, *args: Any, **kwargs: Any) -> str:
+        folder = kwargs.pop("folder", None)
+        if folder:
+            args = (
+                "--git-dir",
+                (folder / ".git").as_posix(),
+                "--work-tree",
+                folder.as_posix(),
+            ) + args
+
+        return (
+            subprocess.check_output(
+                [executable()] + list(args), stderr=subprocess.STDOUT
+            )
+            .decode()
+            .strip()
+        )
+
+    def _check_parameter(self, parameter: str) -> None:
+        """
+        Checks a git parameter to avoid unwanted code execution.
+        """
+        if parameter.strip().startswith("-"):
+            raise GitError(f"Invalid Git parameter: {parameter}")
diff --git a/vendor/poetry/poetry/masonry/__init__.py b/vendor/poetry-core/src/poetry/core/version/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/masonry/__init__.py
rename to vendor/poetry-core/src/poetry/core/version/__init__.py
diff --git a/vendor/poetry-core/src/poetry/core/version/exceptions.py b/vendor/poetry-core/src/poetry/core/version/exceptions.py
new file mode 100644
index 00000000..752fada6
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/exceptions.py
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+
+class InvalidVersion(ValueError):
+    pass
diff --git a/vendor/poetry-core/src/poetry/core/version/grammars/__init__.py b/vendor/poetry-core/src/poetry/core/version/grammars/__init__.py
new file mode 100644
index 00000000..971104e2
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/grammars/__init__.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+import sys
+
+from pathlib import Path
+
+
+if getattr(sys, "oxidized", False):
+    GRAMMAR_DIR = (
+        Path(__path__[0]).parents[4] / "assets" / "core" / "version" / "grammars"
+    )
+else:
+    GRAMMAR_DIR = Path(__path__[0])
+
+GRAMMAR_PEP_508_CONSTRAINTS = GRAMMAR_DIR / "pep508.lark"
+
+GRAMMAR_PEP_508_MARKERS = GRAMMAR_DIR / "markers.lark"
diff --git a/vendor/poetry-core/poetry/core/version/grammars/markers.lark b/vendor/poetry-core/src/poetry/core/version/grammars/markers.lark
similarity index 97%
rename from vendor/poetry-core/poetry/core/version/grammars/markers.lark
rename to vendor/poetry-core/src/poetry/core/version/grammars/markers.lark
index 189ab02a..e0079c2a 100644
--- a/vendor/poetry-core/poetry/core/version/grammars/markers.lark
+++ b/vendor/poetry-core/src/poetry/core/version/grammars/markers.lark
@@ -15,7 +15,6 @@ MARKER_NAME: "implementation_version"
     | "platform_system"
     | "python_version"
     | "sys_platform"
-    | "sys_platform"
     | "os_name"
     | "os.name"
     | "sys.platform"
diff --git a/vendor/poetry-core/poetry/core/version/grammars/pep508.lark b/vendor/poetry-core/src/poetry/core/version/grammars/pep508.lark
similarity index 100%
rename from vendor/poetry-core/poetry/core/version/grammars/pep508.lark
rename to vendor/poetry-core/src/poetry/core/version/grammars/pep508.lark
diff --git a/vendor/poetry-core/src/poetry/core/version/helpers.py b/vendor/poetry-core/src/poetry/core/version/helpers.py
new file mode 100644
index 00000000..435d32af
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/helpers.py
@@ -0,0 +1,64 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version import Version
+from poetry.core.semver.version_union import VersionUnion
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version_constraint import VersionConstraint
+
+PYTHON_VERSION = [
+    "2.7.*",
+    "3.0.*",
+    "3.1.*",
+    "3.2.*",
+    "3.3.*",
+    "3.4.*",
+    "3.5.*",
+    "3.6.*",
+    "3.7.*",
+    "3.8.*",
+    "3.9.*",
+    "3.10.*",
+]
+
+
+def format_python_constraint(constraint: VersionConstraint) -> str:
+    """
+    This helper will help in transforming
+    disjunctive constraint into proper constraint.
+    """
+    if isinstance(constraint, Version):
+        if constraint.precision >= 3:
+            return f"=={str(constraint)}"
+
+        # Transform 3.6 or 3
+        if constraint.precision == 2:
+            # 3.6
+            constraint = parse_constraint(f"~{constraint.major}.{constraint.minor}")
+        else:
+            constraint = parse_constraint(f"^{constraint.major}.0")
+
+    if not isinstance(constraint, VersionUnion):
+        return str(constraint)
+
+    formatted = []
+    accepted = []
+
+    for version in PYTHON_VERSION:
+        version_constraint = parse_constraint(version)
+        matches = constraint.allows_any(version_constraint)
+        if not matches:
+            formatted.append("!=" + version)
+        else:
+            accepted.append(version)
+
+    # Checking lower bound
+    low = accepted[0]
+
+    formatted.insert(0, ">=" + ".".join(low.split(".")[:2]))
+
+    return ", ".join(formatted)
diff --git a/vendor/poetry-core/src/poetry/core/version/markers.py b/vendor/poetry-core/src/poetry/core/version/markers.py
new file mode 100644
index 00000000..5f64b146
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/markers.py
@@ -0,0 +1,926 @@
+from __future__ import annotations
+
+import itertools
+import re
+
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import Callable
+from typing import Iterable
+
+from poetry.core.semver.version_constraint import VersionConstraint
+from poetry.core.version.grammars import GRAMMAR_PEP_508_MARKERS
+from poetry.core.version.parser import Parser
+
+
+if TYPE_CHECKING:
+    from lark import Tree
+
+    from poetry.core.packages.constraints import BaseConstraint
+
+
+class InvalidMarker(ValueError):
+    """
+    An invalid marker was found, users should refer to PEP 508.
+    """
+
+
+class UndefinedComparison(ValueError):
+    """
+    An invalid operation was attempted on a value that doesn't support it.
+    """
+
+
+class UndefinedEnvironmentName(ValueError):
+    """
+    A name was attempted to be used that does not exist inside of the
+    environment.
+    """
+
+
+ALIASES = {
+    "os.name": "os_name",
+    "sys.platform": "sys_platform",
+    "platform.version": "platform_version",
+    "platform.machine": "platform_machine",
+    "platform.python_implementation": "platform_python_implementation",
+    "python_implementation": "platform_python_implementation",
+}
+
+PYTHON_VERSION_MARKERS = {"python_version", "python_full_version"}
+
+# Parser: PEP 508 Environment Markers
+_parser = Parser(GRAMMAR_PEP_508_MARKERS, "lalr")
+
+
+class BaseMarker:
+    def intersect(self, other: BaseMarker) -> BaseMarker:
+        raise NotImplementedError()
+
+    def union(self, other: BaseMarker) -> BaseMarker:
+        raise NotImplementedError()
+
+    def is_any(self) -> bool:
+        return False
+
+    def is_empty(self) -> bool:
+        return False
+
+    def validate(self, environment: dict[str, Any] | None) -> bool:
+        raise NotImplementedError()
+
+    def without_extras(self) -> BaseMarker:
+        raise NotImplementedError()
+
+    def exclude(self, marker_name: str) -> BaseMarker:
+        raise NotImplementedError()
+
+    def only(self, *marker_names: str) -> BaseMarker:
+        raise NotImplementedError()
+
+    def invert(self) -> BaseMarker:
+        raise NotImplementedError()
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__} {str(self)}>"
+
+
+class AnyMarker(BaseMarker):
+    def intersect(self, other: BaseMarker) -> BaseMarker:
+        return other
+
+    def union(self, other: BaseMarker) -> BaseMarker:
+        return self
+
+    def is_any(self) -> bool:
+        return True
+
+    def is_empty(self) -> bool:
+        return False
+
+    def validate(self, environment: dict[str, Any] | None) -> bool:
+        return True
+
+    def without_extras(self) -> BaseMarker:
+        return self
+
+    def exclude(self, marker_name: str) -> BaseMarker:
+        return self
+
+    def only(self, *marker_names: str) -> BaseMarker:
+        return self
+
+    def invert(self) -> EmptyMarker:
+        return EmptyMarker()
+
+    def __str__(self) -> str:
+        return ""
+
+    def __repr__(self) -> str:
+        return ""
+
+    def __hash__(self) -> int:
+        return hash(("", ""))
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, BaseMarker):
+            return NotImplemented
+
+        return isinstance(other, AnyMarker)
+
+
+class EmptyMarker(BaseMarker):
+    def intersect(self, other: BaseMarker) -> BaseMarker:
+        return self
+
+    def union(self, other: BaseMarker) -> BaseMarker:
+        return other
+
+    def is_any(self) -> bool:
+        return False
+
+    def is_empty(self) -> bool:
+        return True
+
+    def validate(self, environment: dict[str, Any] | None) -> bool:
+        return False
+
+    def without_extras(self) -> BaseMarker:
+        return self
+
+    def exclude(self, marker_name: str) -> EmptyMarker:
+        return self
+
+    def only(self, *marker_names: str) -> EmptyMarker:
+        return self
+
+    def invert(self) -> AnyMarker:
+        return AnyMarker()
+
+    def __str__(self) -> str:
+        return ""
+
+    def __repr__(self) -> str:
+        return ""
+
+    def __hash__(self) -> int:
+        return hash(("", ""))
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, BaseMarker):
+            return NotImplemented
+
+        return isinstance(other, EmptyMarker)
+
+
+class SingleMarker(BaseMarker):
+    _CONSTRAINT_RE = re.compile(r"(?i)^(~=|!=|>=?|<=?|==?=?|in|not in)?\s*(.+)$")
+    _VERSION_LIKE_MARKER_NAME = {
+        "python_version",
+        "python_full_version",
+        "platform_release",
+    }
+
+    def __init__(
+        self, name: str, constraint: str | BaseConstraint | VersionConstraint
+    ) -> None:
+        from poetry.core.packages.constraints import (
+            parse_constraint as parse_generic_constraint,
+        )
+        from poetry.core.semver.helpers import parse_constraint
+
+        self._constraint: BaseConstraint | VersionConstraint
+        self._parser: Callable[[str], BaseConstraint | VersionConstraint]
+        self._name = ALIASES.get(name, name)
+        constraint_string = str(constraint)
+
+        # Extract operator and value
+        m = self._CONSTRAINT_RE.match(constraint_string)
+        if m is None:
+            raise ValueError(f"Invalid marker '{constraint_string}'")
+
+        self._operator = m.group(1)
+        if self._operator is None:
+            self._operator = "=="
+
+        self._value = m.group(2)
+        self._parser = parse_generic_constraint
+
+        if name in self._VERSION_LIKE_MARKER_NAME:
+            self._parser = parse_constraint
+
+            if self._operator in {"in", "not in"}:
+                versions = []
+                for v in re.split("[ ,]+", self._value):
+                    split = v.split(".")
+                    if len(split) in [1, 2]:
+                        split.append("*")
+                        op = "" if self._operator == "in" else "!="
+                    else:
+                        op = "==" if self._operator == "in" else "!="
+
+                    versions.append(op + ".".join(split))
+
+                glue = ", "
+                if self._operator == "in":
+                    glue = " || "
+
+                self._constraint = self._parser(glue.join(versions))
+            else:
+                self._constraint = self._parser(constraint_string)
+        else:
+            # if we have a in/not in operator we split the constraint
+            # into a union/multi-constraint of single constraint
+            if self._operator in {"in", "not in"}:
+                op, glue = ("==", " || ") if self._operator == "in" else ("!=", ", ")
+                values = re.split("[ ,]+", self._value)
+                constraint_string = glue.join(f"{op} {value}" for value in values)
+
+            self._constraint = self._parser(constraint_string)
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    @property
+    def constraint(self) -> BaseConstraint | VersionConstraint:
+        return self._constraint
+
+    @property
+    def operator(self) -> str:
+        return self._operator
+
+    @property
+    def value(self) -> str:
+        return self._value
+
+    def intersect(self, other: BaseMarker) -> BaseMarker:
+        if isinstance(other, SingleMarker):
+            return MultiMarker.of(self, other)
+
+        return other.intersect(self)
+
+    def union(self, other: BaseMarker) -> BaseMarker:
+        if isinstance(other, SingleMarker):
+            if self == other:
+                return self
+
+            if self == other.invert():
+                return AnyMarker()
+
+            return MarkerUnion.of(self, other)
+
+        return other.union(self)
+
+    def validate(self, environment: dict[str, Any] | None) -> bool:
+        if environment is None:
+            return True
+
+        if self._name not in environment:
+            return True
+
+        # The type of constraint returned by the parser matches our constraint: either
+        # both are BaseConstraint or both are VersionConstraint.  But it's hard for mypy
+        # to know that.
+        constraint = self._parser(environment[self._name])
+        return self._constraint.allows(constraint)  # type: ignore[arg-type]
+
+    def without_extras(self) -> BaseMarker:
+        return self.exclude("extra")
+
+    def exclude(self, marker_name: str) -> BaseMarker:
+        if self.name == marker_name:
+            return AnyMarker()
+
+        return self
+
+    def only(self, *marker_names: str) -> SingleMarker | AnyMarker:
+        if self.name not in marker_names:
+            return AnyMarker()
+
+        return self
+
+    def invert(self) -> BaseMarker:
+        if self._operator in ("===", "=="):
+            operator = "!="
+        elif self._operator == "!=":
+            operator = "=="
+        elif self._operator == ">":
+            operator = "<="
+        elif self._operator == ">=":
+            operator = "<"
+        elif self._operator == "<":
+            operator = ">="
+        elif self._operator == "<=":
+            operator = ">"
+        elif self._operator == "in":
+            operator = "not in"
+        elif self._operator == "not in":
+            operator = "in"
+        elif self._operator == "~=":
+            # This one is more tricky to handle
+            # since it's technically a multi marker
+            # so the inverse will be a union of inverse
+            from poetry.core.semver.version_range_constraint import (
+                VersionRangeConstraint,
+            )
+
+            if not isinstance(self._constraint, VersionRangeConstraint):
+                # The constraint must be a version range, otherwise
+                # it's an internal error
+                raise RuntimeError(
+                    "The '~=' operator should only represent version ranges"
+                )
+
+            min_ = self._constraint.min
+            min_operator = ">=" if self._constraint.include_min else ">"
+            max_ = self._constraint.max
+            max_operator = "<=" if self._constraint.include_max else "<"
+
+            return MultiMarker.of(
+                SingleMarker(self._name, f"{min_operator} {min_}"),
+                SingleMarker(self._name, f"{max_operator} {max_}"),
+            ).invert()
+        else:
+            # We should never go there
+            raise RuntimeError(f"Invalid marker operator '{self._operator}'")
+
+        return parse_marker(f"{self._name} {operator} '{self._value}'")
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, SingleMarker):
+            return False
+
+        return self._name == other.name and self._constraint == other.constraint
+
+    def __hash__(self) -> int:
+        return hash((self._name, self._constraint))
+
+    def __str__(self) -> str:
+        return f'{self._name} {self._operator} "{self._value}"'
+
+
+def _flatten_markers(
+    markers: Iterable[BaseMarker],
+    flatten_class: type[MarkerUnion | MultiMarker],
+) -> list[BaseMarker]:
+    flattened = []
+
+    for marker in markers:
+        if isinstance(marker, flatten_class):
+            flattened += _flatten_markers(
+                marker.markers,  # type: ignore[attr-defined]
+                flatten_class,
+            )
+        else:
+            flattened.append(marker)
+
+    return flattened
+
+
+class MultiMarker(BaseMarker):
+    def __init__(self, *markers: BaseMarker) -> None:
+        self._markers = []
+
+        flattened_markers = _flatten_markers(markers, MultiMarker)
+
+        for m in flattened_markers:
+            self._markers.append(m)
+
+    @classmethod
+    def of(cls, *markers: BaseMarker) -> BaseMarker:
+        new_markers = _flatten_markers(markers, MultiMarker)
+        old_markers: list[BaseMarker] = []
+
+        while old_markers != new_markers:
+            old_markers = new_markers
+            new_markers = []
+            for marker in old_markers:
+                if marker in new_markers:
+                    continue
+
+                if marker.is_any():
+                    continue
+
+                if isinstance(marker, SingleMarker):
+                    intersected = False
+                    for i, mark in enumerate(new_markers):
+                        if isinstance(mark, SingleMarker) and (
+                            mark.name == marker.name
+                            or {mark.name, marker.name} == PYTHON_VERSION_MARKERS
+                        ):
+                            new_marker = _merge_single_markers(mark, marker, cls)
+                            if new_marker is not None:
+                                new_markers[i] = new_marker
+                                intersected = True
+
+                        elif isinstance(mark, MarkerUnion):
+                            intersection = mark.intersect(marker)
+                            if isinstance(intersection, SingleMarker):
+                                new_markers[i] = intersection
+                            elif intersection.is_empty():
+                                return EmptyMarker()
+                    if intersected:
+                        continue
+
+                elif isinstance(marker, MarkerUnion):
+                    for mark in new_markers:
+                        if isinstance(mark, SingleMarker):
+                            intersection = marker.intersect(mark)
+                            if isinstance(intersection, SingleMarker):
+                                marker = intersection
+                                break
+                            elif intersection.is_empty():
+                                return EmptyMarker()
+
+                new_markers.append(marker)
+
+        if any(m.is_empty() for m in new_markers) or not new_markers:
+            return EmptyMarker()
+
+        if len(new_markers) == 1:
+            return new_markers[0]
+
+        return MultiMarker(*new_markers)
+
+    @property
+    def markers(self) -> list[BaseMarker]:
+        return self._markers
+
+    def intersect(self, other: BaseMarker) -> BaseMarker:
+        if other.is_any():
+            return self
+
+        if other.is_empty():
+            return other
+
+        if isinstance(other, MarkerUnion):
+            return other.intersect(self)
+
+        new_markers = self._markers + [other]
+
+        return MultiMarker.of(*new_markers)
+
+    def union(self, other: BaseMarker) -> BaseMarker:
+        if isinstance(other, (SingleMarker, MultiMarker)):
+            return MarkerUnion.of(self, other)
+
+        return other.union(self)
+
+    def union_simplify(self, other: BaseMarker) -> BaseMarker | None:
+        """
+        In contrast to the standard union method, which prefers to return
+        a MarkerUnion of MultiMarkers, this version prefers to return
+        a MultiMarker of MarkerUnions.
+
+        The rationale behind this approach is to find additional simplifications.
+        In order to avoid endless recursions, this method returns None
+        if it cannot find a simplification.
+        """
+        if isinstance(other, SingleMarker):
+            new_markers = []
+            for marker in self._markers:
+                union = marker.union(other)
+                if not union.is_any():
+                    new_markers.append(union)
+
+            if len(new_markers) == 1:
+                return new_markers[0]
+            if other in new_markers and all(
+                other == m or isinstance(m, MarkerUnion) and other in m.markers
+                for m in new_markers
+            ):
+                return other
+
+            if not any(isinstance(m, MarkerUnion) for m in new_markers):
+                return self.of(*new_markers)
+
+        elif isinstance(other, MultiMarker):
+            common_markers = [
+                marker for marker in self.markers if marker in other.markers
+            ]
+
+            unique_markers = [
+                marker for marker in self.markers if marker not in common_markers
+            ]
+            if not unique_markers:
+                return self
+
+            other_unique_markers = [
+                marker for marker in other.markers if marker not in common_markers
+            ]
+            if not other_unique_markers:
+                return other
+
+            if common_markers:
+                unique_union = self.of(*unique_markers).union(
+                    self.of(*other_unique_markers)
+                )
+                if not isinstance(unique_union, MarkerUnion):
+                    return self.of(*common_markers).intersect(unique_union)
+
+            else:
+                # Usually this operation just complicates things, but the special case
+                # where it doesn't allows the collapse of adjacent ranges eg
+                #
+                # 'python_version >= "3.6" and python_version < "3.6.2"' union
+                # 'python_version >= "3.6.2" and python_version < "3.7"' ->
+                #
+                # 'python_version >= "3.6" and python_version < "3.7"'.
+                unions = [
+                    m1.union(m2) for m2 in other_unique_markers for m1 in unique_markers
+                ]
+                conjunction = self.of(*unions)
+                if not isinstance(conjunction, MultiMarker) or not any(
+                    isinstance(m, MarkerUnion) for m in conjunction.markers
+                ):
+                    return conjunction
+
+        return None
+
+    def validate(self, environment: dict[str, Any] | None) -> bool:
+        return all(m.validate(environment) for m in self._markers)
+
+    def without_extras(self) -> BaseMarker:
+        return self.exclude("extra")
+
+    def exclude(self, marker_name: str) -> BaseMarker:
+        new_markers = []
+
+        for m in self._markers:
+            if isinstance(m, SingleMarker) and m.name == marker_name:
+                # The marker is not relevant since it must be excluded
+                continue
+
+            marker = m.exclude(marker_name)
+
+            if not marker.is_empty():
+                new_markers.append(marker)
+
+        return self.of(*new_markers)
+
+    def only(self, *marker_names: str) -> BaseMarker:
+        new_markers = []
+
+        for m in self._markers:
+            if isinstance(m, SingleMarker) and m.name not in marker_names:
+                # The marker is not relevant since it's not one we want
+                continue
+
+            marker = m.only(*marker_names)
+
+            if not marker.is_empty():
+                new_markers.append(marker)
+
+        return self.of(*new_markers)
+
+    def invert(self) -> BaseMarker:
+        markers = [marker.invert() for marker in self._markers]
+
+        return MarkerUnion.of(*markers)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, MultiMarker):
+            return False
+
+        return set(self._markers) == set(other.markers)
+
+    def __hash__(self) -> int:
+        h = hash("multi")
+        for m in self._markers:
+            h |= hash(m)
+
+        return h
+
+    def __str__(self) -> str:
+        elements = []
+        for m in self._markers:
+            if isinstance(m, (SingleMarker, MultiMarker)):
+                elements.append(str(m))
+            else:
+                elements.append(f"({str(m)})")
+
+        return " and ".join(elements)
+
+
+class MarkerUnion(BaseMarker):
+    def __init__(self, *markers: BaseMarker) -> None:
+        self._markers = list(markers)
+
+    @property
+    def markers(self) -> list[BaseMarker]:
+        return self._markers
+
+    @classmethod
+    def of(cls, *markers: BaseMarker) -> BaseMarker:
+        new_markers = _flatten_markers(markers, MarkerUnion)
+        old_markers: list[BaseMarker] = []
+
+        while old_markers != new_markers:
+            old_markers = new_markers
+            new_markers = []
+            for marker in old_markers:
+                if marker in new_markers or marker.is_empty():
+                    continue
+
+                included = False
+
+                if isinstance(marker, SingleMarker):
+                    for i, mark in enumerate(new_markers):
+                        if isinstance(mark, SingleMarker) and (
+                            mark.name == marker.name
+                            or {mark.name, marker.name} == PYTHON_VERSION_MARKERS
+                        ):
+                            new_marker = _merge_single_markers(mark, marker, cls)
+                            if new_marker is not None:
+                                new_markers[i] = new_marker
+                                included = True
+                                break
+
+                        elif isinstance(mark, MultiMarker):
+                            union = mark.union_simplify(marker)
+                            if union is not None:
+                                new_markers[i] = union
+                                included = True
+                                break
+
+                elif isinstance(marker, MultiMarker):
+                    included = False
+                    for i, mark in enumerate(new_markers):
+                        union = marker.union_simplify(mark)
+                        if union is not None:
+                            new_markers[i] = union
+                            included = True
+                            break
+
+                if included:
+                    # flatten again because union_simplify may return a union
+                    new_markers = _flatten_markers(new_markers, MarkerUnion)
+                else:
+                    new_markers.append(marker)
+
+        if any(m.is_any() for m in new_markers):
+            return AnyMarker()
+
+        if not new_markers:
+            return EmptyMarker()
+
+        if len(new_markers) == 1:
+            return new_markers[0]
+
+        return MarkerUnion(*new_markers)
+
+    def append(self, marker: BaseMarker) -> None:
+        if marker in self._markers:
+            return
+
+        self._markers.append(marker)
+
+    def intersect(self, other: BaseMarker) -> BaseMarker:
+        if other.is_any():
+            return self
+
+        if other.is_empty():
+            return other
+
+        new_markers = []
+        if isinstance(other, (SingleMarker, MultiMarker)):
+            for marker in self._markers:
+                intersection = marker.intersect(other)
+
+                if not intersection.is_empty():
+                    new_markers.append(intersection)
+        elif isinstance(other, MarkerUnion):
+            for our_marker in self._markers:
+                for their_marker in other.markers:
+                    intersection = our_marker.intersect(their_marker)
+
+                    if not intersection.is_empty():
+                        new_markers.append(intersection)
+
+        return MarkerUnion.of(*new_markers)
+
+    def union(self, other: BaseMarker) -> BaseMarker:
+        if other.is_any():
+            return other
+
+        if other.is_empty():
+            return self
+
+        new_markers = self._markers + [other]
+
+        return MarkerUnion.of(*new_markers)
+
+    def validate(self, environment: dict[str, Any] | None) -> bool:
+        return any(m.validate(environment) for m in self._markers)
+
+    def without_extras(self) -> BaseMarker:
+        return self.exclude("extra")
+
+    def exclude(self, marker_name: str) -> BaseMarker:
+        new_markers = []
+
+        for m in self._markers:
+            if isinstance(m, SingleMarker) and m.name == marker_name:
+                # The marker is not relevant since it must be excluded
+                continue
+
+            marker = m.exclude(marker_name)
+            new_markers.append(marker)
+
+        if not new_markers:
+            # All markers were the excluded marker.
+            return AnyMarker()
+
+        return self.of(*new_markers)
+
+    def only(self, *marker_names: str) -> BaseMarker:
+        new_markers = []
+
+        for m in self._markers:
+            if isinstance(m, SingleMarker) and m.name not in marker_names:
+                # The marker is not relevant since it's not one we want
+                continue
+
+            marker = m.only(*marker_names)
+
+            if not marker.is_empty():
+                new_markers.append(marker)
+
+        return self.of(*new_markers)
+
+    def invert(self) -> BaseMarker:
+        markers = [marker.invert() for marker in self._markers]
+
+        return MultiMarker.of(*markers)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, MarkerUnion):
+            return False
+
+        return set(self._markers) == set(other.markers)
+
+    def __hash__(self) -> int:
+        h = hash("union")
+        for m in self._markers:
+            h |= hash(m)
+
+        return h
+
+    def __str__(self) -> str:
+        return " or ".join(
+            str(m) for m in self._markers if not m.is_any() and not m.is_empty()
+        )
+
+    def is_any(self) -> bool:
+        return any(m.is_any() for m in self._markers)
+
+    def is_empty(self) -> bool:
+        return all(m.is_empty() for m in self._markers)
+
+
+def parse_marker(marker: str) -> BaseMarker:
+    if marker == "":
+        return EmptyMarker()
+
+    if not marker or marker == "*":
+        return AnyMarker()
+
+    parsed = _parser.parse(marker)
+
+    markers = _compact_markers(parsed.children)
+
+    return markers
+
+
+def _compact_markers(tree_elements: Tree, tree_prefix: str = "") -> BaseMarker:
+    from lark import Token
+
+    groups: list[BaseMarker] = [MultiMarker()]
+    for token in tree_elements:
+        if isinstance(token, Token):
+            if token.type == f"{tree_prefix}BOOL_OP" and token.value == "or":
+                groups.append(MultiMarker())
+
+            continue
+
+        if token.data == "marker":
+            groups[-1] = MultiMarker.of(
+                groups[-1], _compact_markers(token.children, tree_prefix=tree_prefix)
+            )
+        elif token.data == f"{tree_prefix}item":
+            name, op, value = token.children
+            if value.type == f"{tree_prefix}MARKER_NAME":
+                name, value, = (
+                    value,
+                    name,
+                )
+
+            value = value[1:-1]
+            groups[-1] = MultiMarker.of(
+                groups[-1], SingleMarker(str(name), f"{op}{value}")
+            )
+        elif token.data == f"{tree_prefix}BOOL_OP" and token.children[0] == "or":
+            groups.append(MultiMarker())
+
+    for i, group in enumerate(reversed(groups)):
+        if group.is_empty():
+            del groups[len(groups) - 1 - i]
+            continue
+
+        if isinstance(group, MultiMarker) and len(group.markers) == 1:
+            groups[len(groups) - 1 - i] = group.markers[0]
+
+    if not groups:
+        return EmptyMarker()
+
+    if len(groups) == 1:
+        return groups[0]
+
+    return MarkerUnion.of(*groups)
+
+
+def dnf(marker: BaseMarker) -> BaseMarker:
+    """Transforms the marker into DNF (disjunctive normal form)."""
+    if isinstance(marker, MultiMarker):
+        dnf_markers = [dnf(m) for m in marker.markers]
+        sub_marker_lists = [
+            m.markers if isinstance(m, MarkerUnion) else [m] for m in dnf_markers
+        ]
+        return MarkerUnion.of(
+            *[MultiMarker.of(*c) for c in itertools.product(*sub_marker_lists)]
+        )
+    if isinstance(marker, MarkerUnion):
+        return MarkerUnion.of(*[dnf(m) for m in marker.markers])
+    return marker
+
+
+def _merge_single_markers(
+    marker1: SingleMarker,
+    marker2: SingleMarker,
+    merge_class: type[MultiMarker | MarkerUnion],
+) -> BaseMarker | None:
+    if {marker1.name, marker2.name} == PYTHON_VERSION_MARKERS:
+        return _merge_python_version_single_markers(marker1, marker2, merge_class)
+
+    if merge_class == MultiMarker:
+        merge_method = marker1.constraint.intersect
+    else:
+        merge_method = marker1.constraint.union
+    # Markers with the same name have the same constraint type,
+    # but mypy can't see that.
+    result_constraint = merge_method(marker2.constraint)  # type: ignore[arg-type]
+
+    result_marker: BaseMarker | None = None
+    if result_constraint.is_empty():
+        result_marker = EmptyMarker()
+    elif result_constraint.is_any():
+        result_marker = AnyMarker()
+    elif result_constraint == marker1.constraint:
+        result_marker = marker1
+    elif result_constraint == marker2.constraint:
+        result_marker = marker2
+    elif (
+        isinstance(result_constraint, VersionConstraint)
+        and result_constraint.is_simple()
+    ):
+        result_marker = SingleMarker(marker1.name, result_constraint)
+    return result_marker
+
+
+def _merge_python_version_single_markers(
+    marker1: SingleMarker,
+    marker2: SingleMarker,
+    merge_class: type[MultiMarker | MarkerUnion],
+) -> BaseMarker | None:
+    from poetry.core.packages.utils.utils import get_python_constraint_from_marker
+
+    if marker1.name == "python_version":
+        version_marker = marker1
+        full_version_marker = marker2
+    else:
+        version_marker = marker2
+        full_version_marker = marker1
+
+    normalized_constraint = get_python_constraint_from_marker(version_marker)
+    normalized_marker = SingleMarker("python_full_version", normalized_constraint)
+    merged_marker = _merge_single_markers(
+        normalized_marker, full_version_marker, merge_class
+    )
+    if merged_marker == normalized_marker:
+        # prefer original marker to avoid unnecessary changes
+        return version_marker
+    if merged_marker and isinstance(merged_marker, SingleMarker):
+        # We have to fix markers like 'python_full_version == "3.6"'
+        # to receive 'python_full_version == "3.6.0"'.
+        # It seems a bit hacky to convert to string and back to marker,
+        # but it's probably much simpler than to consider the different constraint
+        # classes (mostly VersonRangeConstraint, but VersionUnion for "!=") and
+        # since this conversion is only required for python_full_version markers
+        # it may be sufficient to handle it here.
+        marker_string = str(merged_marker)
+        precision = marker_string.count(".") + 1
+        if precision < 3:
+            marker_string = marker_string[:-1] + ".0" * (3 - precision) + '"'
+            merged_marker = parse_marker(marker_string)
+    return merged_marker
diff --git a/vendor/poetry-core/src/poetry/core/version/parser.py b/vendor/poetry-core/src/poetry/core/version/parser.py
new file mode 100644
index 00000000..085cfa38
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/parser.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from lark import Lark
+    from lark import Tree
+
+
+class Parser:
+    def __init__(
+        self, grammar: Path, parser: str = "lalr", debug: bool = False
+    ) -> None:
+        self._grammar = grammar
+        self._parser = parser
+        self._debug = debug
+        self._lark: Lark | None = None
+
+    def parse(self, text: str, **kwargs: Any) -> Tree:
+        from lark import Lark
+
+        if self._lark is None:
+            self._lark = Lark.open(
+                grammar_filename=self._grammar, parser=self._parser, debug=self._debug
+            )
+
+        return self._lark.parse(text=text, **kwargs)
diff --git a/vendor/poetry-core/src/poetry/core/version/pep440/__init__.py b/vendor/poetry-core/src/poetry/core/version/pep440/__init__.py
new file mode 100644
index 00000000..7c7b3f8c
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/pep440/__init__.py
@@ -0,0 +1,9 @@
+from __future__ import annotations
+
+from poetry.core.version.pep440.segments import LocalSegmentType
+from poetry.core.version.pep440.segments import Release
+from poetry.core.version.pep440.segments import ReleaseTag
+from poetry.core.version.pep440.version import PEP440Version
+
+
+__all__ = ["LocalSegmentType", "Release", "ReleaseTag", "PEP440Version"]
diff --git a/vendor/poetry-core/src/poetry/core/version/pep440/parser.py b/vendor/poetry-core/src/poetry/core/version/pep440/parser.py
new file mode 100644
index 00000000..11fae074
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/pep440/parser.py
@@ -0,0 +1,83 @@
+from __future__ import annotations
+
+import re
+
+from typing import TYPE_CHECKING
+from typing import Match
+from typing import TypeVar
+
+from packaging.version import VERSION_PATTERN
+
+from poetry.core.version.exceptions import InvalidVersion
+from poetry.core.version.pep440 import Release
+from poetry.core.version.pep440 import ReleaseTag
+
+
+if TYPE_CHECKING:
+    from poetry.core.version.pep440 import LocalSegmentType
+    from poetry.core.version.pep440.version import PEP440Version
+
+T = TypeVar("T", bound="PEP440Version")
+
+
+class PEP440Parser:
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _local_version_separators = re.compile(r"[._-]")
+
+    @classmethod
+    def _get_release(cls, match: Match[str] | None) -> Release:
+        if not match or match.group("release") is None:
+            return Release(0)
+        return Release.from_parts(*(int(i) for i in match.group("release").split(".")))
+
+    @classmethod
+    def _get_prerelease(cls, match: Match[str] | None) -> ReleaseTag | None:
+        if not match or match.group("pre") is None:
+            return None
+        return ReleaseTag(match.group("pre_l"), int(match.group("pre_n") or 0))
+
+    @classmethod
+    def _get_postrelease(cls, match: Match[str] | None) -> ReleaseTag | None:
+        if not match or match.group("post") is None:
+            return None
+
+        return ReleaseTag(
+            match.group("post_l") or "post",
+            int(match.group("post_n1") or match.group("post_n2") or 0),
+        )
+
+    @classmethod
+    def _get_devrelease(cls, match: Match[str] | None) -> ReleaseTag | None:
+        if not match or match.group("dev") is None:
+            return None
+        return ReleaseTag(match.group("dev_l"), int(match.group("dev_n") or 0))
+
+    @classmethod
+    def _get_local(cls, match: Match[str] | None) -> LocalSegmentType | None:
+        if not match or match.group("local") is None:
+            return None
+
+        return tuple(
+            part.lower()
+            for part in cls._local_version_separators.split(match.group("local"))
+        )
+
+    @classmethod
+    def parse(cls, value: str, version_class: type[T]) -> T:
+        match = cls._regex.search(value) if value else None
+        if not match:
+            raise InvalidVersion(f"Invalid PEP 440 version: '{value}'")
+
+        return version_class(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=cls._get_release(match),
+            pre=cls._get_prerelease(match),
+            post=cls._get_postrelease(match),
+            dev=cls._get_devrelease(match),
+            local=cls._get_local(match),
+            text=value,
+        )
+
+
+def parse_pep440(value: str, version_class: type[T]) -> T:
+    return PEP440Parser.parse(value, version_class)
diff --git a/vendor/poetry-core/src/poetry/core/version/pep440/segments.py b/vendor/poetry-core/src/poetry/core/version/pep440/segments.py
new file mode 100644
index 00000000..f4292d08
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/pep440/segments.py
@@ -0,0 +1,150 @@
+from __future__ import annotations
+
+import dataclasses
+
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+
+# Release phase IDs according to PEP440
+RELEASE_PHASE_ID_ALPHA = "a"
+RELEASE_PHASE_ID_BETA = "b"
+RELEASE_PHASE_ID_RC = "rc"
+RELEASE_PHASE_ID_POST = "post"
+RELEASE_PHASE_ID_DEV = "dev"
+
+RELEASE_PHASE_SPELLINGS = {
+    RELEASE_PHASE_ID_ALPHA: {RELEASE_PHASE_ID_ALPHA, "alpha"},
+    RELEASE_PHASE_ID_BETA: {RELEASE_PHASE_ID_BETA, "beta"},
+    RELEASE_PHASE_ID_RC: {RELEASE_PHASE_ID_RC, "c", "pre", "preview"},
+    RELEASE_PHASE_ID_POST: {RELEASE_PHASE_ID_POST, "r", "rev", "-"},
+    RELEASE_PHASE_ID_DEV: {RELEASE_PHASE_ID_DEV},
+}
+RELEASE_PHASE_NORMALIZATIONS = {
+    s: id_ for id_, spellings in RELEASE_PHASE_SPELLINGS.items() for s in spellings
+}
+
+
+@dataclasses.dataclass(frozen=True, eq=True, order=True)
+class Release:
+    major: int = dataclasses.field(default=0, compare=False)
+    minor: int | None = dataclasses.field(default=None, compare=False)
+    patch: int | None = dataclasses.field(default=None, compare=False)
+    # some projects use non-semver versioning schemes, eg: 1.2.3.4
+    extra: int | tuple[int, ...] | None = dataclasses.field(default=None, compare=False)
+    precision: int = dataclasses.field(init=False, compare=False)
+    text: str = dataclasses.field(init=False, compare=False)
+    _compare_key: tuple[int, ...] = dataclasses.field(init=False, compare=True)
+
+    def __post_init__(self) -> None:
+        if self.extra is None:
+            object.__setattr__(self, "extra", ())
+        elif not isinstance(self.extra, tuple):
+            object.__setattr__(self, "extra", (self.extra,))
+        assert isinstance(self.extra, tuple)
+
+        parts = [
+            str(part)
+            for part in [self.major, self.minor, self.patch, *self.extra]
+            if part is not None
+        ]
+        object.__setattr__(self, "text", ".".join(parts))
+        object.__setattr__(self, "precision", len(parts))
+        object.__setattr__(
+            self,
+            "_compare_key",
+            (self.major, self.minor or 0, self.patch or 0, *self.extra),
+        )
+
+    @classmethod
+    def from_parts(cls, *parts: int) -> Release:
+        if not parts:
+            return cls()
+
+        return cls(
+            major=parts[0],
+            minor=parts[1] if len(parts) > 1 else None,
+            patch=parts[2] if len(parts) > 2 else None,
+            extra=parts[3:] if len(parts) > 3 else (),
+        )
+
+    def to_string(self) -> str:
+        return self.text
+
+    def next_major(self) -> Release:
+        assert isinstance(self.extra, tuple)
+        return dataclasses.replace(
+            self,
+            major=self.major + 1,
+            minor=0 if self.minor is not None else None,
+            patch=0 if self.patch is not None else None,
+            extra=tuple(0 for _ in self.extra),
+        )
+
+    def next_minor(self) -> Release:
+        assert isinstance(self.extra, tuple)
+        return dataclasses.replace(
+            self,
+            major=self.major,
+            minor=self.minor + 1 if self.minor is not None else 1,
+            patch=0 if self.patch is not None else None,
+            extra=tuple(0 for _ in self.extra),
+        )
+
+    def next_patch(self) -> Release:
+        assert isinstance(self.extra, tuple)
+        return dataclasses.replace(
+            self,
+            major=self.major,
+            minor=self.minor if self.minor is not None else 0,
+            patch=self.patch + 1 if self.patch is not None else 1,
+            extra=tuple(0 for _ in self.extra),
+        )
+
+
+@dataclasses.dataclass(frozen=True, eq=True, order=True)
+class ReleaseTag:
+    phase: str
+    number: int = dataclasses.field(default=0)
+
+    def __post_init__(self) -> None:
+        object.__setattr__(
+            self, "phase", RELEASE_PHASE_NORMALIZATIONS.get(self.phase, self.phase)
+        )
+
+    def to_string(self, short: bool = False) -> str:
+        if short:
+            import warnings
+
+            warnings.warn(
+                "Parameter 'short' has no effect and will be removed. "
+                "(Release tags are always normalized according to PEP 440 now.)",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+
+        return f"{self.phase}{self.number}"
+
+    def next(self) -> ReleaseTag:
+        return dataclasses.replace(self, phase=self.phase, number=self.number + 1)
+
+    def next_phase(self) -> ReleaseTag | None:
+        if self.phase in [
+            RELEASE_PHASE_ID_POST,
+            RELEASE_PHASE_ID_RC,
+            RELEASE_PHASE_ID_DEV,
+        ]:
+            return None
+
+        if self.phase == RELEASE_PHASE_ID_ALPHA:
+            _phase = RELEASE_PHASE_ID_BETA
+        elif self.phase == RELEASE_PHASE_ID_BETA:
+            _phase = RELEASE_PHASE_ID_RC
+        else:
+            return None
+
+        return self.__class__(phase=_phase, number=0)
+
+
+LocalSegmentType = Optional[Union[str, int, Tuple[Union[str, int], ...]]]
diff --git a/vendor/poetry-core/src/poetry/core/version/pep440/version.py b/vendor/poetry-core/src/poetry/core/version/pep440/version.py
new file mode 100644
index 00000000..eeae009a
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/pep440/version.py
@@ -0,0 +1,317 @@
+from __future__ import annotations
+
+import dataclasses
+import functools
+import warnings
+
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import TypeVar
+
+from poetry.core.version.pep440.segments import RELEASE_PHASE_ID_ALPHA
+from poetry.core.version.pep440.segments import RELEASE_PHASE_ID_DEV
+from poetry.core.version.pep440.segments import RELEASE_PHASE_ID_POST
+from poetry.core.version.pep440.segments import Release
+from poetry.core.version.pep440.segments import ReleaseTag
+
+
+if TYPE_CHECKING:
+    from poetry.core.version.pep440.segments import LocalSegmentType
+
+
+@functools.total_ordering
+class AlwaysSmaller:
+    def __lt__(self, other: object) -> bool:
+        return True
+
+
+@functools.total_ordering
+class AlwaysGreater:
+    def __gt__(self, other: object) -> bool:
+        return True
+
+
+class Infinity(AlwaysGreater, int):
+    pass
+
+
+class NegativeInfinity(AlwaysSmaller, int):
+    pass
+
+
+T = TypeVar("T", bound="PEP440Version")
+
+# we use the phase "z" to ensure we always sort this after other phases
+_INF_TAG = ReleaseTag("z", Infinity())
+# we use the phase "" to ensure we always sort this before other phases
+_NEG_INF_TAG = ReleaseTag("", NegativeInfinity())
+
+
+@dataclasses.dataclass(frozen=True, eq=True, order=True)
+class PEP440Version:
+    epoch: int = dataclasses.field(default=0, compare=False)
+    release: Release = dataclasses.field(default_factory=Release, compare=False)
+    pre: ReleaseTag | None = dataclasses.field(default=None, compare=False)
+    post: ReleaseTag | None = dataclasses.field(default=None, compare=False)
+    dev: ReleaseTag | None = dataclasses.field(default=None, compare=False)
+    local: LocalSegmentType = dataclasses.field(default=None, compare=False)
+    text: str = dataclasses.field(default="", compare=False)
+    _compare_key: tuple[
+        int, Release, ReleaseTag, ReleaseTag, ReleaseTag, tuple[int | str, ...]
+    ] = dataclasses.field(init=False, compare=True)
+
+    def __post_init__(self) -> None:
+        if self.local is not None and not isinstance(self.local, tuple):
+            object.__setattr__(self, "local", (self.local,))
+
+        if isinstance(self.release, tuple):
+            object.__setattr__(self, "release", Release(*self.release))
+
+        # we do this here to handle both None and tomlkit string values
+        object.__setattr__(
+            self, "text", self.to_string() if not self.text else str(self.text)
+        )
+
+        object.__setattr__(self, "_compare_key", self._make_compare_key())
+
+    def _make_compare_key(
+        self,
+    ) -> tuple[
+        int,
+        Release,
+        ReleaseTag,
+        ReleaseTag,
+        ReleaseTag,
+        tuple[tuple[int, int | str], ...],
+    ]:
+        """
+        This code is based on the implementation of packaging.version._cmpkey(..)
+        """
+        # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+        # We'll do this by abusing the pre segment, but we _only_ want to do this
+        # if there is not a pre or a post segment. If we have one of those then
+        # the normal sorting rules will handle this case correctly.
+        if self.pre is None and self.post is None and self.dev is not None:
+            _pre = _NEG_INF_TAG
+        # Versions without a pre-release (except as noted above) should sort after
+        # those with one.
+        elif self.pre is None:
+            _pre = _INF_TAG
+        else:
+            _pre = self.pre
+
+        # Versions without a post segment should sort before those with one.
+        _post = _NEG_INF_TAG if self.post is None else self.post
+
+        # Versions without a development segment should sort after those with one.
+        _dev = _INF_TAG if self.dev is None else self.dev
+
+        _local: tuple[tuple[int, int | str], ...]
+        if self.local is None:
+            # Versions without a local segment should sort before those with one.
+            _local = ((NegativeInfinity(), ""),)
+        else:
+            # Versions with a local segment need that segment parsed to implement
+            # the sorting rules in PEP440.
+            # - Alpha numeric segments sort before numeric segments
+            # - Alpha numeric segments sort lexicographically
+            # - Numeric segments sort numerically
+            # - Shorter versions sort before longer versions when the prefixes
+            #   match exactly
+            assert isinstance(self.local, tuple)
+            _local = tuple(
+                # We typecast strings that are integers so that they can be compared
+                (int(i), "") if str(i).isnumeric() else (NegativeInfinity(), i)
+                for i in self.local
+            )
+        return self.epoch, self.release, _pre, _post, _dev, _local
+
+    @property
+    def major(self) -> int:
+        return self.release.major
+
+    @property
+    def minor(self) -> int | None:
+        return self.release.minor
+
+    @property
+    def patch(self) -> int | None:
+        return self.release.patch
+
+    @property
+    def non_semver_parts(self) -> tuple[int, ...]:
+        assert isinstance(self.release.extra, tuple)
+        return self.release.extra
+
+    def to_string(self, short: bool = False) -> str:
+        if short:
+            import warnings
+
+            warnings.warn(
+                "Parameter 'short' has no effect and will be removed. "
+                "(Versions are always normalized according to PEP 440 now.)",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+
+        version_string = self.release.to_string()
+
+        if self.epoch:
+            # if epoch is non-zero we should include it
+            version_string = f"{self.epoch}!{version_string}"
+
+        if self.pre:
+            version_string += self.pre.to_string()
+
+        if self.post:
+            version_string = f"{version_string}.{self.post.to_string()}"
+
+        if self.dev:
+            version_string = f"{version_string}.{self.dev.to_string()}"
+
+        if self.local:
+            assert isinstance(self.local, tuple)
+            version_string += "+" + ".".join(map(str, self.local))
+
+        return version_string.lower()
+
+    @classmethod
+    def parse(cls: type[T], value: str) -> T:
+        from poetry.core.version.pep440.parser import parse_pep440
+
+        return parse_pep440(value, cls)
+
+    def is_prerelease(self) -> bool:
+        return self.pre is not None
+
+    def is_postrelease(self) -> bool:
+        return self.post is not None
+
+    def is_devrelease(self) -> bool:
+        return self.dev is not None
+
+    def is_local(self) -> bool:
+        return self.local is not None
+
+    def is_no_suffix_release(self) -> bool:
+        return not (self.pre or self.post or self.dev)
+
+    def is_unstable(self) -> bool:
+        return self.is_prerelease() or self.is_devrelease()
+
+    def is_stable(self) -> bool:
+        return not self.is_unstable()
+
+    def _is_increment_required(self) -> bool:
+        return self.is_stable() or (not self.is_prerelease() and self.is_postrelease())
+
+    def next_major(self: T) -> T:
+        release = self.release
+        if self._is_increment_required() or Release(release.major, 0, 0) < release:
+            release = release.next_major()
+        return self.__class__(epoch=self.epoch, release=release)
+
+    def next_minor(self: T) -> T:
+        release = self.release
+        if (
+            self._is_increment_required()
+            or Release(release.major, release.minor, 0) < release
+        ):
+            release = release.next_minor()
+        return self.__class__(epoch=self.epoch, release=release)
+
+    def next_patch(self: T) -> T:
+        release = self.release
+        if (
+            self._is_increment_required()
+            or Release(release.major, release.minor, release.patch) < release
+        ):
+            release = release.next_patch()
+        return self.__class__(epoch=self.epoch, release=release)
+
+    def next_prerelease(self: T, next_phase: bool = False) -> PEP440Version:
+        if self.is_stable():
+            warnings.warn(
+                "Calling next_prerelease() on a stable release is deprecated for its"
+                " ambiguity. Use next_major(), next_minor(), etc. together with"
+                " first_prerelease()",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+        if self.is_prerelease():
+            assert self.pre is not None
+            if not self.is_devrelease() or self.is_postrelease():
+                pre = self.pre.next_phase() if next_phase else self.pre.next()
+            else:
+                pre = self.pre
+        else:
+            pre = ReleaseTag(RELEASE_PHASE_ID_ALPHA)
+        return self.__class__(epoch=self.epoch, release=self.release, pre=pre)
+
+    def next_postrelease(self: T) -> T:
+        if self.is_postrelease():
+            assert self.post is not None
+            post = self.post.next() if self.dev is None else self.post
+        else:
+            post = ReleaseTag(RELEASE_PHASE_ID_POST)
+        return self.__class__(
+            epoch=self.epoch,
+            release=self.release,
+            pre=self.pre,
+            post=post,
+        )
+
+    def next_devrelease(self: T) -> T:
+        if self.is_devrelease():
+            assert self.dev is not None
+            dev = self.dev.next()
+        else:
+            warnings.warn(
+                "Calling next_devrelease() on a non dev release is deprecated for its"
+                " ambiguity. Use next_major(), next_minor(), etc. together with"
+                " first_devrelease()",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            dev = ReleaseTag(RELEASE_PHASE_ID_DEV)
+        return self.__class__(
+            epoch=self.epoch,
+            release=self.release,
+            pre=self.pre,
+            post=self.post,
+            dev=dev,
+        )
+
+    def first_prerelease(self: T) -> T:
+        return self.__class__(
+            epoch=self.epoch,
+            release=self.release,
+            pre=ReleaseTag(RELEASE_PHASE_ID_ALPHA),
+        )
+
+    def first_devrelease(self: T) -> T:
+        return self.__class__(
+            epoch=self.epoch,
+            release=self.release,
+            pre=self.pre,
+            post=self.post,
+            dev=ReleaseTag(RELEASE_PHASE_ID_DEV),
+        )
+
+    def replace(self: T, **kwargs: Any) -> T:
+        return self.__class__(
+            **{
+                **{
+                    k: getattr(self, k)
+                    for k in self.__dataclass_fields__.keys()
+                    if k not in ("_compare_key", "text")
+                },  # setup defaults with current values, excluding compare keys and text
+                **kwargs,  # keys to replace
+            }
+        )
+
+    def without_local(self: T) -> T:
+        return self.replace(local=None)
+
+    def without_postrelease(self: T) -> T:
+        return self.replace(post=None)
diff --git a/vendor/poetry-core/src/poetry/core/version/requirements.py b/vendor/poetry-core/src/poetry/core/version/requirements.py
new file mode 100644
index 00000000..b20de2ff
--- /dev/null
+++ b/vendor/poetry-core/src/poetry/core/version/requirements.py
@@ -0,0 +1,108 @@
+from __future__ import annotations
+
+import urllib.parse as urlparse
+
+from poetry.core.semver.exceptions import ParseConstraintError
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.version.grammars import GRAMMAR_PEP_508_CONSTRAINTS
+from poetry.core.version.markers import _compact_markers
+from poetry.core.version.parser import Parser
+
+
+class InvalidRequirement(ValueError):
+    """
+    An invalid requirement was found, users should refer to PEP 508.
+    """
+
+
+# Parser: PEP 508 Constraints
+_parser = Parser(GRAMMAR_PEP_508_CONSTRAINTS, "lalr")
+
+
+class Requirement:
+    """
+    Parse a requirement.
+
+    Parse a given requirement string into its parts, such as name, specifier,
+    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+    string.
+    """
+
+    def __init__(self, requirement_string: str) -> None:
+        from lark import UnexpectedCharacters
+        from lark import UnexpectedToken
+
+        try:
+            parsed = _parser.parse(requirement_string)
+        except (UnexpectedCharacters, UnexpectedToken) as e:
+            raise InvalidRequirement(
+                "The requirement is invalid: Unexpected character at column"
+                f" {e.column}\n\n{e.get_context(requirement_string)}"
+            )
+
+        self.name: str = next(parsed.scan_values(lambda t: t.type == "NAME")).value
+        url = next(parsed.scan_values(lambda t: t.type == "URI"), None)
+
+        if url:
+            url = url.value
+            parsed_url = urlparse.urlparse(url)
+            if parsed_url.scheme == "file":
+                if urlparse.urlunparse(parsed_url) != url:
+                    raise InvalidRequirement(
+                        f'The requirement is invalid: invalid URL "{url}"'
+                    )
+            elif (
+                not (parsed_url.scheme and parsed_url.netloc)
+                or (not parsed_url.scheme and not parsed_url.netloc)
+            ) and not parsed_url.path:
+                raise InvalidRequirement(
+                    f'The requirement is invalid: invalid URL "{url}"'
+                )
+            self.url = url
+        else:
+            self.url = None
+
+        self.extras = [e.value for e in parsed.scan_values(lambda t: t.type == "EXTRA")]
+        constraint = next(parsed.find_data("version_specification"), None)
+        if not constraint:
+            constraint = "*"
+        else:
+            constraint = ",".join(constraint.children)
+
+        try:
+            self.constraint = parse_constraint(constraint)
+        except ParseConstraintError:
+            raise InvalidRequirement(
+                f'The requirement is invalid: invalid version constraint "{constraint}"'
+            )
+
+        self.pretty_constraint = constraint
+
+        marker = next(parsed.find_data("marker_spec"), None)
+        if marker:
+            marker = _compact_markers(
+                marker.children[0].children, tree_prefix="markers__"
+            )
+
+        self.marker = marker
+
+    def __str__(self) -> str:
+        parts = [self.name]
+
+        if self.extras:
+            extras = ",".join(sorted(self.extras))
+            parts.append(f"[{extras}]")
+
+        if self.pretty_constraint:
+            parts.append(self.pretty_constraint)
+
+        if self.url:
+            parts.append(f"@ {self.url}")
+
+        if self.marker:
+            parts.append(f"; {self.marker}")
+
+        return "".join(parts)
+
+    def __repr__(self) -> str:
+        return f""
diff --git a/vendor/poetry-core/stanza b/vendor/poetry-core/stanza
deleted file mode 100755
index a0469c38..00000000
--- a/vendor/poetry-core/stanza
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python
-import os
-import subprocess
-import tarfile
-import zipfile
-
-from pathlib import Path
-from typing import Dict
-from typing import List
-
-from cleo import Application
-from cleo import Command
-from cleo import argument
-from vendoring.configuration import Configuration
-from vendoring.configuration import load_configuration
-from vendoring.tasks.cleanup import cleanup_existing_vendored
-from vendoring.tasks.license import find_and_extract_license
-from vendoring.tasks.license import license_fallback
-from vendoring.tasks.vendor import apply_patches
-from vendoring.tasks.vendor import detect_vendored_libs
-from vendoring.tasks.vendor import download_libraries
-from vendoring.tasks.vendor import remove_unnecessary_items
-from vendoring.utils import remove_all
-from vendoring.utils import run
-
-
-def extract_license(
-    destination: Path,
-    sdist: Path,
-    license_directories: Dict[str, str],
-    license_fallback_urls: Dict[str, str],
-) -> None:
-    def extract_from_source_tarfile(sdist: Path) -> bool:
-        ext = sdist.suffixes[-1][1:]
-        with tarfile.open(sdist, mode="r:{}".format(ext)) as tar:
-            return find_and_extract_license(
-                destination, tar, tar.getmembers(), license_directories,
-            )
-
-    def extract_from_source_zipfile(sdist: Path) -> bool:
-        with zipfile.ZipFile(sdist) as zip:
-            return find_and_extract_license(
-                destination, zip, zip.infolist(), license_directories,
-            )
-
-    if sdist.suffixes[-2] == ".tar":
-        found = extract_from_source_tarfile(sdist)
-    elif sdist.suffixes[-1] == ".zip":
-        found = extract_from_source_zipfile(sdist)
-    elif sdist.suffixes[-1] == ".whl":
-        found = extract_from_source_zipfile(sdist)
-    else:
-        raise NotImplementedError("new sdist type!")
-
-    if found:
-        return
-
-    license_fallback(
-        destination, sdist.name, license_directories, license_fallback_urls
-    )
-
-
-def fetch_licenses(config: Configuration) -> None:
-    destination = config.destination
-    license_directories = config.license_directories
-    license_fallback_urls = config.license_fallback_urls
-    requirements = config.requirements
-
-    tmp_dir = destination / "__tmp__"
-    download_sources(tmp_dir, requirements)
-
-    for sdist in tmp_dir.iterdir():
-        extract_license(destination, sdist, license_directories, license_fallback_urls)
-
-    remove_all([tmp_dir])
-
-
-def vendor_libraries(config: Configuration) -> List[str]:
-    destination = config.destination
-
-    # Download the relevant libraries.
-    download_libraries(config.requirements, destination)
-
-    # Cleanup unnecessary directories/files created.
-    remove_unnecessary_items(destination, config.drop_paths)
-
-    # Detect what got downloaded.
-    vendored_libs = detect_vendored_libs(destination, config.protected_files)
-
-    # Apply user provided patches.
-    apply_patches(config.patches_dir, working_directory=config.base_directory)
-
-    return vendored_libs
-
-
-def download_sources(location: Path, requirements: Path) -> None:
-    cmd = [
-        "pip",
-        "download",
-        "-r",
-        str(requirements),
-        "--no-deps",
-        "--dest",
-        str(location),
-    ]
-
-    run(cmd, working_directory=None)
-
-
-class VendorUpdateCommand(Command):
-
-    name = "update"
-
-    description = "Update one or more vendor packages"
-
-    arguments = [
-        argument("packages", "The packages to vendor.", optional=True, multiple=True)
-    ]
-
-    def handle(self):
-        packages = self.argument("packages")
-        current_dir = os.getcwd()
-        base = os.path.dirname(__file__)
-        try:
-            os.chdir(base.join(["vendors"]))
-            if not packages:
-                subprocess.run(["poetry", "lock"])
-            else:
-                subprocess.run(["poetry", "update", "--lock"])
-
-            subprocess.run(["poetry", "show", "--all", "--tree"])
-            subprocess.run(
-                [
-                    "poetry",
-                    "export",
-                    "-f",
-                    "requirements.txt",
-                    "-o",
-                    "../poetry/core/_vendor/vendor.txt",
-                    "--without-hashes",
-                ]
-            )
-        finally:
-            os.chdir(current_dir)
-
-        lines = []
-        with open("poetry/core/_vendor/vendor.txt") as f:
-            for line in f.readlines():
-                if ";" in line:
-                    line, _ = line.split(";", maxsplit=1)
-
-                if line.startswith("wheels/"):
-                    line = "vendors/" + line
-
-                if line.startswith(
-                    ("enum34", "functools32", "pathlib2", "typing", "scandir", "typing")
-                ):
-                    continue
-
-                lines.append(line.strip())
-
-        with open("poetry/core/_vendor/vendor.txt", "w") as f:
-            f.write("\n".join(lines))
-
-        config = load_configuration(Path(base))
-        cleanup_existing_vendored(config)
-        vendor_libraries(config)
-        fetch_licenses(config)
-
-
-class VendorCommand(Command):
-
-    name = "vendor"
-
-    description = "Vendor related commands."
-
-    commands = [VendorUpdateCommand()]
-
-    def handle(self):
-        return self.call("help", self.name)
-
-
-app = Application("stanza")
-app.add(VendorCommand())
-
-
-if __name__ == "__main__":
-    app.run()
diff --git a/vendor/poetry-core/tests/conftest.py b/vendor/poetry-core/tests/conftest.py
index 44a4ff89..30e87fcd 100644
--- a/vendor/poetry-core/tests/conftest.py
+++ b/vendor/poetry-core/tests/conftest.py
@@ -1,16 +1,26 @@
+from __future__ import annotations
+
 import sys
+import tempfile
 
+from pathlib import Path
+from typing import TYPE_CHECKING
 from typing import Callable
+from typing import Iterator
 
 import pytest
 import virtualenv
 
 from poetry.core.factory import Factory
-from poetry.core.utils._compat import Path
-from tests.testutils import tempfile
+from poetry.core.utils._compat import WINDOWS
+
+
+if TYPE_CHECKING:
+    from _pytest.config import Config
+    from _pytest.config.argparsing import Parser
 
 
-def pytest_addoption(parser):
+def pytest_addoption(parser: Parser) -> None:
     parser.addoption(
         "--integration",
         action="store_true",
@@ -20,15 +30,15 @@ def pytest_addoption(parser):
     )
 
 
-def pytest_configure(config):
+def pytest_configure(config: Config) -> None:
     config.addinivalue_line("markers", "integration: mark integration tests")
 
     if not config.option.integration:
-        setattr(config.option, "markexpr", "not integration")
+        config.option.markexpr = "not integration"
 
 
-def get_project_from_dir(base_directory):  # type: (Path) -> Callable[[str], Path]
-    def get(name):  # type: (str) -> Path
+def get_project_from_dir(base_directory: Path) -> Callable[[str], Path]:
+    def get(name: str) -> Path:
         path = base_directory / name
         if not path.exists():
             raise FileNotFoundError(str(path))
@@ -38,45 +48,45 @@ def get(name):  # type: (str) -> Path
 
 
 @pytest.fixture(scope="session")
-def project_source_root():  # type: () -> Path
+def project_source_root() -> Path:
     return Path(__file__).parent.parent
 
 
 @pytest.fixture(scope="session")
-def project_source_test_root():  # type: () -> Path
+def project_source_test_root() -> Path:
     return Path(__file__).parent
 
 
 @pytest.fixture(scope="session")
-def common_fixtures_directory(project_source_test_root):  # type: (Path) -> Path
+def common_fixtures_directory(project_source_test_root: Path) -> Path:
     return project_source_test_root / "fixtures"
 
 
 @pytest.fixture(scope="session")
-def common_project(common_fixtures_directory):  # type: (Path) -> Callable[[str], Path]
+def common_project(common_fixtures_directory: Path) -> Callable[[str], Path]:
     return get_project_from_dir(common_fixtures_directory)
 
 
 @pytest.fixture(scope="session")
-def masonry_fixtures_directory(project_source_test_root):  # type: (Path) -> Path
+def masonry_fixtures_directory(project_source_test_root: Path) -> Path:
     return project_source_test_root / "masonry" / "builders" / "fixtures"
 
 
 @pytest.fixture(scope="session")
 def masonry_project(
-    masonry_fixtures_directory,
-):  # type: (Path) -> Callable[[str], Path]
+    masonry_fixtures_directory: Path,
+) -> Callable[[str], Path]:
     return get_project_from_dir(masonry_fixtures_directory)
 
 
 @pytest.fixture
-def temporary_directory():  # type: () -> Path
+def temporary_directory() -> Iterator[Path]:
     with tempfile.TemporaryDirectory(prefix="poetry-core") as tmp:
         yield Path(tmp)
 
 
 @pytest.fixture
-def venv(temporary_directory):  # type: (Path) -> Path
+def venv(temporary_directory: Path) -> Path:
     venv_dir = temporary_directory / ".venv"
     virtualenv.cli_run(
         [
@@ -91,10 +101,10 @@ def venv(temporary_directory):  # type: (Path) -> Path
 
 
 @pytest.fixture
-def python(venv):  # type: (Path) -> str
-    return (venv / "bin" / "python").as_posix()
+def python(venv: Path) -> str:
+    return venv.joinpath("Scripts/Python.exe" if WINDOWS else "bin/python").as_posix()
 
 
 @pytest.fixture()
-def f():  # type: () -> Factory
+def f() -> Factory:
     return Factory()
diff --git a/vendor/poetry-core/tests/fixtures/complete.toml b/vendor/poetry-core/tests/fixtures/complete.toml
index a894a89c..8d45ac0c 100644
--- a/vendor/poetry-core/tests/fixtures/complete.toml
+++ b/vendor/poetry-core/tests/fixtures/complete.toml
@@ -38,6 +38,8 @@ pytest-cov = "^2.4"
 
 [tool.poetry.scripts]
 my-script = 'my_package:main'
+sample_pyscript = { reference = "script-files/sample_script.py", type= "file" }
+sample_shscript = { reference = "script-files/sample_script.sh", type= "file" }
 
 
 [[tool.poetry.source]]
diff --git a/vendor/poetry-core/tests/fixtures/project_failing_strict_validation/pyproject.toml b/vendor/poetry-core/tests/fixtures/project_failing_strict_validation/pyproject.toml
new file mode 100644
index 00000000..6d282ba9
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/project_failing_strict_validation/pyproject.toml
@@ -0,0 +1,12 @@
+[tool.poetry]
+readme = ["README.rst", "README_WITH_ANOTHER_EXTENSION.md"]
+
+[tool.poetry.dependencies]
+python = "*"
+pathlib2 = { version = "^2.2", python = "3.7", allows-prereleases = true }
+
+[tool.poetry.scripts]
+a_script_with_unknown_extra = { reference = "a_script_with_unknown_extra.py", type = "file", extras = ["foo"] }
+a_script_without_extras = { reference = "a_script_without_extras.py", type = "file" }
+a_script_with_empty_extras = { reference = "a_script_with_empty_extras.py", type = "file", extras = [] }
+another_script = "another_script:main"
diff --git a/vendor/poetry-core/tests/fixtures/project_with_groups_and_explicit_main/README.rst b/vendor/poetry-core/tests/fixtures/project_with_groups_and_explicit_main/README.rst
new file mode 100644
index 00000000..f7fe1547
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/project_with_groups_and_explicit_main/README.rst
@@ -0,0 +1,2 @@
+My Package
+==========
diff --git a/vendor/poetry-core/tests/fixtures/project_with_groups_and_explicit_main/pyproject.toml b/vendor/poetry-core/tests/fixtures/project_with_groups_and_explicit_main/pyproject.toml
new file mode 100644
index 00000000..18c6a595
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/project_with_groups_and_explicit_main/pyproject.toml
@@ -0,0 +1,17 @@
+[tool.poetry]
+name = "simple-project"
+version = "0.1.0"
+description = ""
+authors = ["Your Name "]
+
+[tool.poetry.dependencies]
+python = "^3.7"
+
+[tool.poetry.group.main.dependencies]
+aiohttp = "^2.17.0"
+
+[tools.poetry]
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/poetry/mixology/solutions/__init__.py b/vendor/poetry-core/tests/fixtures/project_with_groups_and_explicit_main/simple_project/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/mixology/solutions/__init__.py
rename to vendor/poetry-core/tests/fixtures/project_with_groups_and_explicit_main/simple_project/__init__.py
diff --git a/vendor/poetry-core/tests/fixtures/project_with_groups_and_legacy_dev/README.rst b/vendor/poetry-core/tests/fixtures/project_with_groups_and_legacy_dev/README.rst
new file mode 100644
index 00000000..f7fe1547
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/project_with_groups_and_legacy_dev/README.rst
@@ -0,0 +1,2 @@
+My Package
+==========
diff --git a/vendor/poetry-core/tests/fixtures/project_with_groups_and_legacy_dev/pyproject.toml b/vendor/poetry-core/tests/fixtures/project_with_groups_and_legacy_dev/pyproject.toml
new file mode 100644
index 00000000..6e239c72
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/project_with_groups_and_legacy_dev/pyproject.toml
@@ -0,0 +1,20 @@
+[tool.poetry]
+name = "simple-project"
+version = "0.1.0"
+description = ""
+authors = ["Your Name "]
+
+[tool.poetry.dependencies]
+python = "^3.7"
+
+[tool.poetry.group.dev.dependencies]
+pre-commit = "^2.17.0"
+
+[tool.poetry.dev-dependencies]
+pytest = "^5.2"
+
+[tools.poetry]
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/poetry/utils/__init__.py b/vendor/poetry-core/tests/fixtures/project_with_groups_and_legacy_dev/simple_project/__init__.py
similarity index 100%
rename from vendor/poetry/poetry/utils/__init__.py
rename to vendor/poetry-core/tests/fixtures/project_with_groups_and_legacy_dev/simple_project/__init__.py
diff --git a/vendor/poetry-core/tests/fixtures/project_with_pep517_non_poetry/pyproject.toml b/vendor/poetry-core/tests/fixtures/project_with_pep517_non_poetry/pyproject.toml
new file mode 100644
index 00000000..8b36a60d
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/project_with_pep517_non_poetry/pyproject.toml
@@ -0,0 +1,13 @@
+[build-system]
+requires = ["flit_core >=3.7.1,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "flit"
+authors = []
+dependencies = [
+    "flit_core >=3.7.1",
+]
+requires-python = ">=3.6"
+readme = "README.rst"
+dynamic = ['version', 'description']
diff --git a/vendor/poetry-core/tests/fixtures/project_with_setup/setup.py b/vendor/poetry-core/tests/fixtures/project_with_setup/setup.py
index 71b3074d..ce86fe3d 100644
--- a/vendor/poetry-core/tests/fixtures/project_with_setup/setup.py
+++ b/vendor/poetry-core/tests/fixtures/project_with_setup/setup.py
@@ -1,9 +1,7 @@
-# -*- coding: utf-8 -*-
-
 from setuptools import setup
 
 
-kwargs = dict(
+setup(
     name="my-package",
     license="MIT",
     version="0.1.2",
@@ -14,6 +12,3 @@
     packages=["my_package"],
     install_requires=["pendulum>=1.4.4", "cachy[msgpack]>=0.2.0"],
 )
-
-
-setup(**kwargs)
diff --git a/vendor/poetry-core/tests/fixtures/project_with_setup_cfg_only/setup.cfg b/vendor/poetry-core/tests/fixtures/project_with_setup_cfg_only/setup.cfg
new file mode 100644
index 00000000..b0f43520
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/project_with_setup_cfg_only/setup.cfg
@@ -0,0 +1,18 @@
+[metadata]
+name = my_package
+version = attr: my_package.VERSION
+description = My package description
+long_description = file: README.rst, CHANGELOG.rst, LICENSE.rst
+keywords = one, two
+license = BSD 3-Clause License
+classifiers =
+    Framework :: Django
+    Programming Language :: Python :: 3
+
+[options]
+zip_safe = False
+include_package_data = True
+packages = find:
+install_requires =
+    requests
+    importlib-metadata; python_version<"3.8"
diff --git a/vendor/poetry-core/tests/fixtures/sample_project/pyproject.toml b/vendor/poetry-core/tests/fixtures/sample_project/pyproject.toml
index f2cc0bce..a99e6c19 100644
--- a/vendor/poetry-core/tests/fixtures/sample_project/pyproject.toml
+++ b/vendor/poetry-core/tests/fixtures/sample_project/pyproject.toml
@@ -3,7 +3,7 @@ name = "my-package"
 version = "1.2.3"
 description = "Some description."
 authors = [
-    "Sébastien Eustace "
+    "SeÌbastien Eustace "
 ]
 license = "MIT"
 
@@ -50,7 +50,7 @@ dataclasses = {version = "^0.7", python = ">=3.6.1,<3.7"}
 [tool.poetry.extras]
 db = [ "orator" ]
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "~3.4"
 
 
diff --git a/vendor/poetry-core/tests/fixtures/script-files/sample_script.py b/vendor/poetry-core/tests/fixtures/script-files/sample_script.py
new file mode 100644
index 00000000..1318d50b
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/script-files/sample_script.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+
+from __future__ import annotations
+
+
+hello = "Hello World!"
diff --git a/vendor/poetry-core/tests/fixtures/script-files/sample_script.sh b/vendor/poetry-core/tests/fixtures/script-files/sample_script.sh
new file mode 100644
index 00000000..d6954d95
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/script-files/sample_script.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+echo "Hello World!"
diff --git a/vendor/poetry-core/tests/fixtures/with_readme_files/README-1.rst b/vendor/poetry-core/tests/fixtures/with_readme_files/README-1.rst
new file mode 100644
index 00000000..265d70d6
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/with_readme_files/README-1.rst
@@ -0,0 +1,2 @@
+Single Python
+=============
diff --git a/vendor/poetry-core/tests/fixtures/with_readme_files/README-2.rst b/vendor/poetry-core/tests/fixtures/with_readme_files/README-2.rst
new file mode 100644
index 00000000..a5693d97
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/with_readme_files/README-2.rst
@@ -0,0 +1,2 @@
+Changelog
+=========
diff --git a/vendor/poetry-core/tests/fixtures/with_readme_files/pyproject.toml b/vendor/poetry-core/tests/fixtures/with_readme_files/pyproject.toml
new file mode 100644
index 00000000..850e5117
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/with_readme_files/pyproject.toml
@@ -0,0 +1,19 @@
+[tool.poetry]
+name = "single-python"
+version = "0.1"
+description = "Some description."
+authors = [
+    "Wagner Macedo "
+]
+license = "MIT"
+
+readme = [
+    "README-1.rst",
+    "README-2.rst"
+]
+
+homepage = "https://python-poetry.org/"
+
+
+[tool.poetry.dependencies]
+python = "2.7.15"
diff --git a/vendor/poetry-core/tests/fixtures/with_readme_files/single_python.py b/vendor/poetry-core/tests/fixtures/with_readme_files/single_python.py
new file mode 100644
index 00000000..ceb22ae1
--- /dev/null
+++ b/vendor/poetry-core/tests/fixtures/with_readme_files/single_python.py
@@ -0,0 +1,6 @@
+"""Example module"""
+
+from __future__ import annotations
+
+
+__version__ = "0.1"
diff --git a/vendor/poetry-core/tests/integration/test_pep517.py b/vendor/poetry-core/tests/integration/test_pep517.py
index 321f8d71..07e8d3bb 100644
--- a/vendor/poetry-core/tests/integration/test_pep517.py
+++ b/vendor/poetry-core/tests/integration/test_pep517.py
@@ -1,13 +1,23 @@
+from __future__ import annotations
+
+import sys
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from pep517.build import build
-from pep517.check import check
+# noinspection PyProtectedMember
+from build.__main__ import build_package
+from build.util import project_wheel_metadata
 
-from poetry.core.utils._compat import Path
 from tests.testutils import subprocess_run
 from tests.testutils import temporary_project_directory
 
 
+if TYPE_CHECKING:
+    from _pytest.fixtures import FixtureRequest
+
 pytestmark = pytest.mark.integration
 
 
@@ -19,32 +29,42 @@
         ("masonry_project", "disable_setup_py"),
     ],
 )
-def test_pep517_check_poetry_managed(request, getter, project):
+def test_pep517_check_poetry_managed(
+    request: FixtureRequest, getter: str, project: str
+) -> None:
     with temporary_project_directory(request.getfixturevalue(getter)(project)) as path:
-        assert check(path)
+        assert project_wheel_metadata(path)
 
 
-def test_pep517_check(project_source_root):
-    assert check(str(project_source_root))
+def test_pep517_check(project_source_root: Path) -> None:
+    assert project_wheel_metadata(str(project_source_root))
 
 
-def test_pep517_build_sdist(temporary_directory, project_source_root):
-    build(
-        source_dir=str(project_source_root), dist="sdist", dest=str(temporary_directory)
+def test_pep517_build_sdist(
+    temporary_directory: Path, project_source_root: Path
+) -> None:
+    build_package(
+        srcdir=str(project_source_root),
+        outdir=str(temporary_directory),
+        distributions=["sdist"],
     )
     distributions = list(temporary_directory.glob("poetry-core-*.tar.gz"))
     assert len(distributions) == 1
 
 
-def test_pep517_build_wheel(temporary_directory, project_source_root):
-    build(
-        source_dir=str(project_source_root), dist="wheel", dest=str(temporary_directory)
+def test_pep517_build_wheel(
+    temporary_directory: Path, project_source_root: Path
+) -> None:
+    build_package(
+        srcdir=str(project_source_root),
+        outdir=str(temporary_directory),
+        distributions=["wheel"],
     )
     distributions = list(temporary_directory.glob("poetry_core-*-none-any.whl"))
     assert len(distributions) == 1
 
 
-def test_pip_wheel_build(temporary_directory, project_source_root):
+def test_pip_wheel_build(temporary_directory: Path, project_source_root: Path) -> None:
     tmp = str(temporary_directory)
     pip = subprocess_run(
         "pip", "wheel", "--use-pep517", "-w", tmp, str(project_source_root)
@@ -57,7 +77,13 @@ def test_pip_wheel_build(temporary_directory, project_source_root):
     assert len(wheels) == 1
 
 
-def test_pip_install_no_binary(python, project_source_root):
+@pytest.mark.xfail(
+    sys.version_info < (3, 8),
+    # see https://github.com/python/importlib_metadata/issues/392
+    reason="importlib-metadata can't be installed with --no-binary anymore",
+    strict=True,
+)
+def test_pip_install_no_binary(python: str, project_source_root: Path) -> None:
     subprocess_run(
         python,
         "-m",
diff --git a/vendor/poetry-core/tests/json/test_poetry_schema.py b/vendor/poetry-core/tests/json/test_poetry_schema.py
index 905427eb..06f5e343 100644
--- a/vendor/poetry-core/tests/json/test_poetry_schema.py
+++ b/vendor/poetry-core/tests/json/test_poetry_schema.py
@@ -1,25 +1,31 @@
+from __future__ import annotations
+
+from typing import Any
+
 import pytest
 
 from poetry.core.json import validate_object
 
 
 @pytest.fixture
-def base_object():
+def base_object() -> dict[str, Any]:
     return {
         "name": "myapp",
         "version": "1.0.0",
         "description": "Some description.",
+        "authors": ["Your Name "],
         "dependencies": {"python": "^3.6"},
         "dev-dependencies": {},
     }
 
 
 @pytest.fixture
-def multi_url_object():
+def multi_url_object() -> dict[str, Any]:
     return {
         "name": "myapp",
         "version": "1.0.0",
         "description": "Some description.",
+        "authors": ["Your Name "],
         "dependencies": {
             "python": [
                 {
@@ -33,12 +39,21 @@ def multi_url_object():
     }
 
 
-def test_path_dependencies(base_object):
+def test_path_dependencies(base_object: dict[str, Any]) -> None:
     base_object["dependencies"].update({"foo": {"path": "../foo"}})
     base_object["dev-dependencies"].update({"foo": {"path": "../foo"}})
 
     assert len(validate_object(base_object, "poetry-schema")) == 0
 
 
-def test_multi_url_dependencies(multi_url_object):
+def test_multi_url_dependencies(multi_url_object: dict[str, Any]) -> None:
     assert len(validate_object(multi_url_object, "poetry-schema")) == 0
+
+
+def test_multiline_description(base_object: dict[str, Any]) -> None:
+    bad_description = "Some multi-\nline string"
+    base_object["description"] = bad_description
+
+    errors = validate_object(base_object, "poetry-schema")
+    assert len(errors) == 1
+    assert errors[0] == f"[description] {bad_description!r} does not match '^[^\\n]*$'"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/build_script_in_subdir/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/build_script_in_subdir/pyproject.toml
new file mode 100644
index 00000000..86dcc30c
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/build_script_in_subdir/pyproject.toml
@@ -0,0 +1,14 @@
+[tool.poetry]
+name = "build_script_in_subdir"
+version = "0.1"
+description = "Some description."
+authors = [
+    "Brandon Chinn "
+]
+license = "MIT"
+homepage = "https://python-poetry.org/"
+packages = [
+    { include = "*", from = "src" },
+]
+
+build = "scripts/build.py"
diff --git a/vendor/poetry/.coveragerc b/vendor/poetry-core/tests/masonry/builders/fixtures/build_script_in_subdir/scripts/build.py
similarity index 100%
rename from vendor/poetry/.coveragerc
rename to vendor/poetry-core/tests/masonry/builders/fixtures/build_script_in_subdir/scripts/build.py
diff --git a/vendor/poetry/poetry/version/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/build_script_in_subdir/src/foo.py
similarity index 100%
rename from vendor/poetry/poetry/version/__init__.py
rename to vendor/poetry-core/tests/masonry/builders/fixtures/build_script_in_subdir/src/foo.py
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/case_sensitive_exclusions/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/case_sensitive_exclusions/pyproject.toml
index 57d25a43..3bf793ed 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/case_sensitive_exclusions/pyproject.toml
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/case_sensitive_exclusions/pyproject.toml
@@ -46,4 +46,4 @@ time = ["pendulum"]
 [tool.poetry.scripts]
 my-script = "my_package:main"
 my-2nd-script = "my_package:main2"
-extra-script = {callable = "my_package.extra:main", extras = ["time"]}
+extra-script = {reference = "my_package.extra:main", extras = ["time"], type = "console"}
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/complete/bin/script.sh b/vendor/poetry-core/tests/masonry/builders/fixtures/complete/bin/script.sh
new file mode 100644
index 00000000..2a9686ac
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/complete/bin/script.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+echo "Hello World!"
\ No newline at end of file
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/complete/my_package/data1/test.json b/vendor/poetry-core/tests/masonry/builders/fixtures/complete/my_package/data1/test.json
index e69de29b..0967ef42 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/complete/my_package/data1/test.json
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/complete/my_package/data1/test.json
@@ -0,0 +1 @@
+{}
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/complete/my_package/sub_pkg2/data2/data.json b/vendor/poetry-core/tests/masonry/builders/fixtures/complete/my_package/sub_pkg2/data2/data.json
index e69de29b..0967ef42 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/complete/my_package/sub_pkg2/data2/data.json
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/complete/my_package/sub_pkg2/data2/data.json
@@ -0,0 +1 @@
+{}
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/complete/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/complete/pyproject.toml
index d6455b7d..8b7d2c6b 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/complete/pyproject.toml
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/complete/pyproject.toml
@@ -36,7 +36,7 @@ cachy = { version = "^0.2.0", extras = ["msgpack"] }
 
 [tool.poetry.dependencies.pendulum]
 version = "^1.4"
-markers= 'python_version ~= "2.7" and sys_platform == "win32" or python_version in "3.4 3.5"'
+markers = 'python_version ~= "2.7" and sys_platform == "win32" or python_version in "3.4 3.5"'
 optional = true
 
 [tool.poetry.dev-dependencies]
@@ -48,7 +48,9 @@ time = ["pendulum"]
 [tool.poetry.scripts]
 my-script = "my_package:main"
 my-2nd-script = "my_package:main2"
-extra-script = {callable = "my_package.extra:main", extras = ["time"]}
+file-script = { reference = "bin/script.sh", type = "file" }
+extra-script = { reference = "my_package.extra:main", extras = ["time"], type = "console" }
+
 
 [tool.poetry.urls]
 "Issue Tracker" = "https://github.com/python-poetry/poetry/issues"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/__init__.py
new file mode 100644
index 00000000..3dc1f76b
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/__init__.py
@@ -0,0 +1 @@
+__version__ = "0.1.0"
diff --git a/vendor/requests/tests/testserver/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/compiled/source.c
similarity index 100%
rename from vendor/requests/tests/testserver/__init__.py
rename to vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/compiled/source.c
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/compiled/source.h b/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/compiled/source.h
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/cython_code.pyx b/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/exclude_whl_include_sdist/cython_code.pyx
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/pyproject.toml
new file mode 100644
index 00000000..a684e61a
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/exclude-whl-include-sdist/pyproject.toml
@@ -0,0 +1,17 @@
+[tool.poetry]
+name = "exclude-whl-include-sdist"
+description = ""
+authors = []
+version = "0.1.0"
+exclude = ["exclude_whl_include_sdist/compiled", "exclude_whl_include_sdist/*.pyx"]
+include = [
+    { path = "exclude_whl_include_sdist/compiled/**/*", format = "sdist" },
+    { path = "exclude_whl_include_sdist/*.pyx", format = "sdist" }
+]
+
+[tool.poetry.dependencies]
+python = "^3.9"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/excluded_subpackage/example/test/excluded.py b/vendor/poetry-core/tests/masonry/builders/fixtures/excluded_subpackage/example/test/excluded.py
index bf6e1f89..273b5df6 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/excluded_subpackage/example/test/excluded.py
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/excluded_subpackage/example/test/excluded.py
@@ -1,5 +1,5 @@
-from .. import __version__
+from tests.masonry.builders.fixtures.excluded_subpackage.example import __version__
 
 
-def test_version():
+def test_version() -> None:
     assert __version__ == "0.1.0"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/extended/setup.py b/vendor/poetry-core/tests/masonry/builders/fixtures/extended/setup.py
new file mode 100644
index 00000000..1c07efe3
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/extended/setup.py
@@ -0,0 +1,24 @@
+from setuptools import setup
+
+packages = ["extended"]
+
+package_data = {"": ["*"]}
+
+setup_kwargs = {
+    "name": "extended",
+    "version": "0.1",
+    "description": "Some description.",
+    "long_description": "Module 1\n========\n",
+    "author": "Sébastien Eustace",
+    "author_email": "sebastien@eustace.io",
+    "maintainer": "None",
+    "maintainer_email": "None",
+    "url": "https://python-poetry.org/",
+    "packages": packages,
+    "package_data": package_data,
+}
+from build import *
+
+build(setup_kwargs)
+
+setup(**setup_kwargs)
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/extended_with_no_setup/build.py b/vendor/poetry-core/tests/masonry/builders/fixtures/extended_with_no_setup/build.py
index 4f1fee59..c8b6a72e 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/extended_with_no_setup/build.py
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/extended_with_no_setup/build.py
@@ -9,7 +9,7 @@
 extensions = [Extension("extended.extended", ["extended/extended.c"])]
 
 
-def build():
+def build() -> None:
     distribution = Distribution({"name": "extended", "ext_modules": extensions})
     distribution.package_dir = "extended"
 
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/invalid_case_sensitive_exclusions/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/invalid_case_sensitive_exclusions/pyproject.toml
index 6ef10e59..44e226cc 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/invalid_case_sensitive_exclusions/pyproject.toml
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/invalid_case_sensitive_exclusions/pyproject.toml
@@ -41,4 +41,4 @@ time = ["pendulum"]
 [tool.poetry.scripts]
 my-script = "my_package:main"
 my-2nd-script = "my_package:main2"
-extra-script = {callable = "my_package.extra:main", extras = ["time"]}
+extra-script = {reference = "my_package.extra:main", extras = ["time"], type = "console"}
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/licenses_and_copying/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/licenses_and_copying/pyproject.toml
index b56bbe63..70880bcd 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/licenses_and_copying/pyproject.toml
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/licenses_and_copying/pyproject.toml
@@ -43,7 +43,7 @@ time = ["pendulum"]
 [tool.poetry.scripts]
 my-script = "my_package:main"
 my-2nd-script = "my_package:main2"
-extra-script = {callable = "my_package.extra:main", extras = ["time"]}
+extra-script = {reference = "my_package.extra:main", extras = ["time"], type = "console"}
 
 [tool.poetry.urls]
 "Issue Tracker" = "https://github.com/python-poetry/poetry/issues"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pkg-stubs/module.pyi b/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pkg-stubs/module.pyi
new file mode 100644
index 00000000..d79e6e39
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pkg-stubs/module.pyi
@@ -0,0 +1,4 @@
+"""Example module"""
+from typing import Tuple
+
+version_info = Tuple[int, int, int]
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pkg-stubs/subpkg/__init__.pyi b/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pkg-stubs/subpkg/__init__.pyi
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pkg-stubs/subpkg/py.typed b/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pkg-stubs/subpkg/py.typed
new file mode 100644
index 00000000..b648ac92
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pkg-stubs/subpkg/py.typed
@@ -0,0 +1 @@
+partial
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pyproject.toml
new file mode 100644
index 00000000..265effd0
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/pep_561_stub_only_partial_namespace/pyproject.toml
@@ -0,0 +1,14 @@
+[tool.poetry]
+name = "pep-561-stubs"
+version = "0.1"
+description = "PEP 561 stub namespace package example with the py.typed marker file"
+authors = [
+    "Henrik Bruåsdal "
+]
+license = "MIT"
+packages = [
+    {include = "pkg-stubs"}
+]
+
+[tool.poetry.dependencies]
+python = "^3.6"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/README.rst b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/README.rst
new file mode 100644
index 00000000..f7fe1547
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/README.rst
@@ -0,0 +1,2 @@
+My Package
+==========
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/my_package/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/my_package/__init__.py
new file mode 100644
index 00000000..10aa336c
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/my_package/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.2.3"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/pyproject.toml
new file mode 100644
index 00000000..2c949a9d
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_string/pyproject.toml
@@ -0,0 +1,19 @@
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "Some description."
+authors = [
+    "Poetry Maintainers "
+]
+license = "MIT"
+readme = "README.rst"
+
+[tool.poetry.dependencies]
+python = "^3.6"
+
+[tool.poetry.dev-dependencies]
+
+[tool.poetry.extras]
+
+[tool.poetry.scripts]
+script-legacy = "my_package:main"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/README.rst b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/README.rst
new file mode 100644
index 00000000..f7fe1547
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/README.rst
@@ -0,0 +1,2 @@
+My Package
+==========
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/my_package/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/my_package/__init__.py
new file mode 100644
index 00000000..10aa336c
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/my_package/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.2.3"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/pyproject.toml
new file mode 100644
index 00000000..cab2488f
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_callable_legacy_table/pyproject.toml
@@ -0,0 +1,21 @@
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "Some description."
+authors = [
+    "Poetry Maintainers "
+]
+license = "MIT"
+readme = "README.rst"
+
+[tool.poetry.dependencies]
+python = "^3.6"
+
+[tool.poetry.dev-dependencies]
+
+[tool.poetry.extras]
+time = []
+
+[tool.poetry.scripts]
+script-legacy = { callable = "my_package.extra_legacy:main" }
+extra-script-legacy = { callable = "my_package.extra_legacy:main", extras = ["time"] }
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/README.rst b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/README.rst
new file mode 100644
index 00000000..f7fe1547
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/README.rst
@@ -0,0 +1,2 @@
+My Package
+==========
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/my_package/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/my_package/__init__.py
new file mode 100644
index 00000000..10aa336c
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/my_package/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.2.3"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/pyproject.toml
new file mode 100644
index 00000000..a55800b6
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_console/pyproject.toml
@@ -0,0 +1,21 @@
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "Some description."
+authors = [
+    "Poetry Maintainers "
+]
+license = "MIT"
+readme = "README.rst"
+
+[tool.poetry.dependencies]
+python = "^3.6"
+
+[tool.poetry.dev-dependencies]
+
+[tool.poetry.extras]
+time = []
+
+[tool.poetry.scripts]
+script = { reference = "my_package.extra:main", type = "console" }
+extra-script = { reference = "my_package.extra:main", extras = ["time"], type = "console" }
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/README.rst b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/README.rst
new file mode 100644
index 00000000..f7fe1547
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/README.rst
@@ -0,0 +1,2 @@
+My Package
+==========
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/bin/script.sh b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/bin/script.sh
new file mode 100644
index 00000000..2a9686ac
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/bin/script.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+echo "Hello World!"
\ No newline at end of file
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/my_package/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/my_package/__init__.py
new file mode 100644
index 00000000..10aa336c
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/my_package/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.2.3"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/pyproject.toml
new file mode 100644
index 00000000..973a94c1
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file/pyproject.toml
@@ -0,0 +1,19 @@
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "Some description."
+authors = [
+    "Poetry Maintainers "
+]
+license = "MIT"
+readme = "README.rst"
+
+[tool.poetry.dependencies]
+python = "^3.6"
+
+[tool.poetry.dev-dependencies]
+
+[tool.poetry.extras]
+
+[tool.poetry.scripts]
+sh-script = { reference = "bin/script.sh", type = "file" }
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/README.rst b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/README.rst
new file mode 100644
index 00000000..f7fe1547
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/README.rst
@@ -0,0 +1,2 @@
+My Package
+==========
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/bin/script.sh b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/bin/script.sh
new file mode 100644
index 00000000..2a9686ac
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/bin/script.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+echo "Hello World!"
\ No newline at end of file
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/my_package/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/my_package/__init__.py
new file mode 100644
index 00000000..10aa336c
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/my_package/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.2.3"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/pyproject.toml
new file mode 100644
index 00000000..7c6aa562
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_invalid_definition/pyproject.toml
@@ -0,0 +1,19 @@
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "Some description."
+authors = [
+    "Poetry Maintainers "
+]
+license = "MIT"
+readme = "README.rst"
+
+[tool.poetry.dependencies]
+python = "^3.6"
+
+[tool.poetry.dev-dependencies]
+
+[tool.poetry.extras]
+
+[tool.poetry.scripts]
+invalid_definition = { reference = "bin/script.sh", type = "ffiillee" }
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/README.rst b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/README.rst
new file mode 100644
index 00000000..f7fe1547
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/README.rst
@@ -0,0 +1,2 @@
+My Package
+==========
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/my_package/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/my_package/__init__.py
new file mode 100644
index 00000000..10aa336c
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/my_package/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.2.3"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/pyproject.toml
new file mode 100644
index 00000000..973a94c1
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/script_reference_file_missing/pyproject.toml
@@ -0,0 +1,19 @@
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "Some description."
+authors = [
+    "Poetry Maintainers "
+]
+license = "MIT"
+readme = "README.rst"
+
+[tool.poetry.dependencies]
+python = "^3.6"
+
+[tool.poetry.dev-dependencies]
+
+[tool.poetry.extras]
+
+[tool.poetry.scripts]
+sh-script = { reference = "bin/script.sh", type = "file" }
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/split_source/lib_a/module_a/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/split_source/lib_a/module_a/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/split_source/lib_b/module_b/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/split_source/lib_b/module_b/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/split_source/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/split_source/pyproject.toml
new file mode 100644
index 00000000..f11bba04
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/split_source/pyproject.toml
@@ -0,0 +1,15 @@
+[tool.poetry]
+name = "split-source"
+version = "0.1"
+description = "Combine packages from different locations."
+authors = [
+    "Jan Harkes "
+]
+license = "MIT"
+packages = [
+    { include = "module_a", from = "lib_a" },
+    { include = "module_b", from = "lib_b" },
+]
+
+[tool.poetry.dependencies]
+python = "^3.6"
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/src_extended/setup.py b/vendor/poetry-core/tests/masonry/builders/fixtures/src_extended/setup.py
new file mode 100644
index 00000000..cd4c43cf
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/src_extended/setup.py
@@ -0,0 +1,27 @@
+from setuptools import setup
+
+package_dir = {"": "src"}
+
+packages = ["extended"]
+
+package_data = {"": ["*"]}
+
+setup_kwargs = {
+    "name": "extended",
+    "version": "0.1",
+    "description": "Some description.",
+    "long_description": "Module 1\n========\n",
+    "author": "Sébastien Eustace",
+    "author_email": "sebastien@eustace.io",
+    "maintainer": "None",
+    "maintainer_email": "None",
+    "url": "https://python-poetry.org/",
+    "package_dir": package_dir,
+    "packages": packages,
+    "package_data": package_data,
+}
+from build import *
+
+build(setup_kwargs)
+
+setup(**setup_kwargs)
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/with_include_inline_table/both.txt b/vendor/poetry-core/tests/masonry/builders/fixtures/with_include_inline_table/both.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/with_include_inline_table/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/with_include_inline_table/pyproject.toml
index 95b0949c..5309992f 100644
--- a/vendor/poetry-core/tests/masonry/builders/fixtures/with_include_inline_table/pyproject.toml
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/with_include_inline_table/pyproject.toml
@@ -24,6 +24,7 @@ packages = [
 
 include = [
     { path = "tests", format = "sdist" },
+    { path = "both.txt" },
     { path = "wheel_only.txt", format = "wheel" }
 ]
 
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/with_wildcard_dependency_constraint/my_package/__init__.py b/vendor/poetry-core/tests/masonry/builders/fixtures/with_wildcard_dependency_constraint/my_package/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/masonry/builders/fixtures/with_wildcard_dependency_constraint/pyproject.toml b/vendor/poetry-core/tests/masonry/builders/fixtures/with_wildcard_dependency_constraint/pyproject.toml
new file mode 100644
index 00000000..66268407
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/builders/fixtures/with_wildcard_dependency_constraint/pyproject.toml
@@ -0,0 +1,11 @@
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "Some description."
+authors = [
+    "People Everywhere "
+]
+
+[tool.poetry.dependencies]
+python = "^3.10"
+google-api-python-client = ">=1.8,!=2.0.*"
diff --git a/vendor/poetry-core/tests/masonry/builders/test_builder.py b/vendor/poetry-core/tests/masonry/builders/test_builder.py
index 57d41be3..6e3db706 100644
--- a/vendor/poetry-core/tests/masonry/builders/test_builder.py
+++ b/vendor/poetry-core/tests/masonry/builders/test_builder.py
@@ -1,17 +1,22 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
 import sys
 
 from email.parser import Parser
+from pathlib import Path
+from typing import TYPE_CHECKING
 
 import pytest
 
 from poetry.core.factory import Factory
 from poetry.core.masonry.builders.builder import Builder
-from poetry.core.utils._compat import PY37
-from poetry.core.utils._compat import Path
 
 
-def test_builder_find_excluded_files(mocker):
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+
+def test_builder_find_excluded_files(mocker: MockerFixture) -> None:
     p = mocker.patch("poetry.core.vcs.git.Git.get_ignored_files")
     p.return_value = []
 
@@ -23,10 +28,10 @@ def test_builder_find_excluded_files(mocker):
 
 
 @pytest.mark.xfail(
-    sys.platform == "win32" and not PY37,
+    sys.platform == "win32",
     reason="Windows is case insensitive for the most part",
 )
-def test_builder_find_case_sensitive_excluded_files(mocker):
+def test_builder_find_case_sensitive_excluded_files(mocker: MockerFixture) -> None:
     p = mocker.patch("poetry.core.vcs.git.Git.get_ignored_files")
     p.return_value = []
 
@@ -48,10 +53,12 @@ def test_builder_find_case_sensitive_excluded_files(mocker):
 
 
 @pytest.mark.xfail(
-    sys.platform == "win32" and not PY37,
+    sys.platform == "win32",
     reason="Windows is case insensitive for the most part",
 )
-def test_builder_find_invalid_case_sensitive_excluded_files(mocker):
+def test_builder_find_invalid_case_sensitive_excluded_files(
+    mocker: MockerFixture,
+) -> None:
     p = mocker.patch("poetry.core.vcs.git.Git.get_ignored_files")
     p.return_value = []
 
@@ -64,7 +71,7 @@ def test_builder_find_invalid_case_sensitive_excluded_files(mocker):
     assert {"my_package/Bar/foo/bar/Foo.py"} == builder.find_excluded_files()
 
 
-def test_get_metadata_content():
+def test_get_metadata_content() -> None:
     builder = Builder(
         Factory().create_poetry(Path(__file__).parent / "fixtures" / "complete")
     )
@@ -89,11 +96,11 @@ def test_get_metadata_content():
     assert classifiers == [
         "License :: OSI Approved :: MIT License",
         "Programming Language :: Python :: 3",
-        "Programming Language :: Python :: 3.10",
         "Programming Language :: Python :: 3.6",
         "Programming Language :: Python :: 3.7",
         "Programming Language :: Python :: 3.8",
         "Programming Language :: Python :: 3.9",
+        "Programming Language :: Python :: 3.10",
         "Topic :: Software Development :: Build Tools",
         "Topic :: Software Development :: Libraries :: Python Modules",
     ]
@@ -105,7 +112,8 @@ def test_get_metadata_content():
     assert requires == [
         "cachy[msgpack] (>=0.2.0,<0.3.0)",
         "cleo (>=0.6,<0.7)",
-        'pendulum (>=1.4,<2.0); (python_version ~= "2.7" and sys_platform == "win32" or python_version in "3.4 3.5") and (extra == "time")',
+        'pendulum (>=1.4,<2.0); (python_version ~= "2.7" and sys_platform == "win32" or'
+        ' python_version in "3.4 3.5") and (extra == "time")',
     ]
 
     urls = parsed.get_all("Project-URL")
@@ -116,7 +124,7 @@ def test_get_metadata_content():
     ]
 
 
-def test_metadata_homepage_default():
+def test_metadata_homepage_default() -> None:
     builder = Builder(
         Factory().create_poetry(Path(__file__).parent / "fixtures" / "simple_version")
     )
@@ -126,7 +134,7 @@ def test_metadata_homepage_default():
     assert metadata["Home-page"] is None
 
 
-def test_metadata_with_vcs_dependencies():
+def test_metadata_with_vcs_dependencies() -> None:
     builder = Builder(
         Factory().create_poetry(
             Path(__file__).parent / "fixtures" / "with_vcs_dependency"
@@ -137,10 +145,10 @@ def test_metadata_with_vcs_dependencies():
 
     requires_dist = metadata["Requires-Dist"]
 
-    assert "cleo @ git+https://github.com/sdispater/cleo.git@master" == requires_dist
+    assert requires_dist == "cleo @ git+https://github.com/sdispater/cleo.git@master"
 
 
-def test_metadata_with_url_dependencies():
+def test_metadata_with_url_dependencies() -> None:
     builder = Builder(
         Factory().create_poetry(
             Path(__file__).parent / "fixtures" / "with_url_dependency"
@@ -152,6 +160,140 @@ def test_metadata_with_url_dependencies():
     requires_dist = metadata["Requires-Dist"]
 
     assert (
-        "demo @ https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
-        == requires_dist
+        requires_dist
+        == "demo @"
+        " https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
+    )
+
+
+def test_missing_script_files_throws_error() -> None:
+    builder = Builder(
+        Factory().create_poetry(
+            Path(__file__).parent / "fixtures" / "script_reference_file_missing"
+        )
+    )
+
+    with pytest.raises(RuntimeError) as err:
+        builder.convert_script_files()
+
+    assert "is not found." in err.value.args[0]
+
+
+def test_invalid_script_files_definition() -> None:
+    with pytest.raises(RuntimeError) as err:
+        Builder(
+            Factory().create_poetry(
+                Path(__file__).parent
+                / "fixtures"
+                / "script_reference_file_invalid_definition"
+            )
+        )
+
+    assert "configuration is invalid" in err.value.args[0]
+    assert "[scripts.invalid_definition]" in err.value.args[0]
+
+
+@pytest.mark.parametrize(
+    "fixture",
+    [
+        "script_callable_legacy_table",
+    ],
+)
+def test_entrypoint_scripts_legacy_warns(fixture: str) -> None:
+    with pytest.warns(DeprecationWarning):
+        Builder(
+            Factory().create_poetry(Path(__file__).parent / "fixtures" / fixture)
+        ).convert_entry_points()
+
+
+@pytest.mark.parametrize(
+    "fixture, result",
+    [
+        (
+            "script_callable_legacy_table",
+            {
+                "console_scripts": [
+                    "extra-script-legacy = my_package.extra_legacy:main",
+                    "script-legacy = my_package.extra_legacy:main",
+                ]
+            },
+        ),
+        (
+            "script_callable_legacy_string",
+            {"console_scripts": ["script-legacy = my_package:main"]},
+        ),
+        (
+            "script_reference_console",
+            {
+                "console_scripts": [
+                    "extra-script = my_package.extra:main[time]",
+                    "script = my_package.extra:main",
+                ]
+            },
+        ),
+        (
+            "script_reference_file",
+            {},
+        ),
+    ],
+)
+@pytest.mark.filterwarnings("ignore::DeprecationWarning")
+def test_builder_convert_entry_points(
+    fixture: str, result: dict[str, list[str]]
+) -> None:
+    entry_points = Builder(
+        Factory().create_poetry(Path(__file__).parent / "fixtures" / fixture)
+    ).convert_entry_points()
+    assert entry_points == result
+
+
+@pytest.mark.parametrize(
+    "fixture, result",
+    [
+        (
+            "script_callable_legacy_table",
+            [],
+        ),
+        (
+            "script_callable_legacy_string",
+            [],
+        ),
+        (
+            "script_reference_console",
+            [],
+        ),
+        (
+            "script_reference_file",
+            [Path("bin") / "script.sh"],
+        ),
+    ],
+)
+def test_builder_convert_script_files(fixture: str, result: list[Path]) -> None:
+    project_root = Path(__file__).parent / "fixtures" / fixture
+    script_files = Builder(Factory().create_poetry(project_root)).convert_script_files()
+    assert [p.relative_to(project_root) for p in script_files] == result
+
+
+def test_metadata_with_readme_files() -> None:
+    test_path = Path(__file__).parent.parent.parent / "fixtures" / "with_readme_files"
+    builder = Builder(Factory().create_poetry(test_path))
+
+    metadata = Parser().parsestr(builder.get_metadata_content())
+
+    readme1 = test_path / "README-1.rst"
+    readme2 = test_path / "README-2.rst"
+    description = "\n".join([readme1.read_text(), readme2.read_text(), ""])
+
+    assert metadata.get_payload() == description
+
+
+def test_metadata_with_wildcard_dependency_constraint() -> None:
+    test_path = (
+        Path(__file__).parent / "fixtures" / "with_wildcard_dependency_constraint"
     )
+    builder = Builder(Factory().create_poetry(test_path))
+
+    metadata = Parser().parsestr(builder.get_metadata_content())
+
+    requires = metadata.get_all("Requires-Dist")
+    assert requires == ["google-api-python-client (>=1.8,!=2.0.*)"]
diff --git a/vendor/poetry-core/tests/masonry/builders/test_complete.py b/vendor/poetry-core/tests/masonry/builders/test_complete.py
index e8b634ec..0517acd0 100644
--- a/vendor/poetry-core/tests/masonry/builders/test_complete.py
+++ b/vendor/poetry-core/tests/masonry/builders/test_complete.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
+from __future__ import annotations
 
 import ast
 import os
@@ -11,20 +10,26 @@
 import tempfile
 import zipfile
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import Iterator
+
 import pytest
 
 from poetry.core import __version__
 from poetry.core.factory import Factory
-from poetry.core.masonry import Builder
-from poetry.core.utils._compat import Path
-from poetry.core.utils._compat import decode
+from poetry.core.masonry.builder import Builder
+
 
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
 
 fixtures_dir = Path(__file__).parent / "fixtures"
 
 
 @pytest.fixture(autouse=True)
-def setup():
+def setup() -> Iterator[None]:
     clear_samples_dist()
 
     yield
@@ -32,7 +37,7 @@ def setup():
     clear_samples_dist()
 
 
-def clear_samples_dist():
+def clear_samples_dist() -> None:
     for dist in fixtures_dir.glob("**/dist"):
         if dist.is_dir():
             shutil.rmtree(str(dist))
@@ -44,7 +49,7 @@ def clear_samples_dist():
     or platform.python_implementation().lower() == "pypy",
     reason="Disable test on Windows for Python <=3.6 and for PyPy",
 )
-def test_wheel_c_extension():
+def test_wheel_c_extension() -> None:
     module_path = fixtures_dir / "extended"
     builder = Builder(Factory().create_poetry(module_path))
     builder.build(fmt="all")
@@ -71,24 +76,22 @@ def test_wheel_c_extension():
     assert has_compiled_extension
 
     try:
-        wheel_data = decode(zip.read("extended-0.1.dist-info/WHEEL"))
+        wheel_data = zip.read("extended-0.1.dist-info/WHEEL").decode()
 
         assert (
             re.match(
-                """(?m)^\
+                f"""(?m)^\
 Wheel-Version: 1.0
-Generator: poetry {}
+Generator: poetry-core {__version__}
 Root-Is-Purelib: false
 Tag: cp[23]_?\\d+-cp[23]_?\\d+m?u?-.+
-$""".format(
-                    __version__
-                ),
+$""",
                 wheel_data,
             )
             is not None
         )
 
-        records = decode(zip.read("extended-0.1.dist-info/RECORD"))
+        records = zip.read("extended-0.1.dist-info/RECORD").decode()
 
         assert re.search(r"\s+extended/extended.*\.(so|pyd)", records) is not None
     finally:
@@ -101,7 +104,7 @@ def test_wheel_c_extension():
     or platform.python_implementation().lower() == "pypy",
     reason="Disable test on Windows for Python <=3.6 and for PyPy",
 )
-def test_wheel_c_extension_with_no_setup():
+def test_wheel_c_extension_with_no_setup() -> None:
     module_path = fixtures_dir / "extended_with_no_setup"
     builder = Builder(Factory().create_poetry(module_path))
     builder.build(fmt="all")
@@ -128,24 +131,22 @@ def test_wheel_c_extension_with_no_setup():
     assert has_compiled_extension
 
     try:
-        wheel_data = decode(zip.read("extended-0.1.dist-info/WHEEL"))
+        wheel_data = zip.read("extended-0.1.dist-info/WHEEL").decode()
 
         assert (
             re.match(
-                """(?m)^\
+                f"""(?m)^\
 Wheel-Version: 1.0
-Generator: poetry {}
+Generator: poetry-core {__version__}
 Root-Is-Purelib: false
 Tag: cp[23]_?\\d+-cp[23]_?\\d+m?u?-.+
-$""".format(
-                    __version__
-                ),
+$""",
                 wheel_data,
             )
             is not None
         )
 
-        records = decode(zip.read("extended-0.1.dist-info/RECORD"))
+        records = zip.read("extended-0.1.dist-info/RECORD").decode()
 
         assert re.search(r"\s+extended/extended.*\.(so|pyd)", records) is not None
     finally:
@@ -158,7 +159,7 @@ def test_wheel_c_extension_with_no_setup():
     or platform.python_implementation().lower() == "pypy",
     reason="Disable test on Windows for Python <=3.6 and for PyPy",
 )
-def test_wheel_c_extension_src_layout():
+def test_wheel_c_extension_src_layout() -> None:
     module_path = fixtures_dir / "src_extended"
     builder = Builder(Factory().create_poetry(module_path))
     builder.build(fmt="all")
@@ -185,31 +186,29 @@ def test_wheel_c_extension_src_layout():
     assert has_compiled_extension
 
     try:
-        wheel_data = decode(zip.read("extended-0.1.dist-info/WHEEL"))
+        wheel_data = zip.read("extended-0.1.dist-info/WHEEL").decode()
 
         assert (
             re.match(
-                """(?m)^\
+                f"""(?m)^\
 Wheel-Version: 1.0
-Generator: poetry {}
+Generator: poetry-core {__version__}
 Root-Is-Purelib: false
 Tag: cp[23]_?\\d+-cp[23]_?\\d+m?u?-.+
-$""".format(
-                    __version__
-                ),
+$""",
                 wheel_data,
             )
             is not None
         )
 
-        records = decode(zip.read("extended-0.1.dist-info/RECORD"))
+        records = zip.read("extended-0.1.dist-info/RECORD").decode()
 
         assert re.search(r"\s+extended/extended.*\.(so|pyd)", records) is not None
     finally:
         zip.close()
 
 
-def test_complete():
+def test_complete() -> None:
     module_path = fixtures_dir / "complete"
     builder = Builder(Factory().create_poetry(module_path))
     builder.build(fmt="all")
@@ -224,11 +223,16 @@ def test_complete():
 
     try:
         assert "my_package/sub_pgk1/extra_file.xml" not in zip.namelist()
+        assert "my-package-1.2.3.data/scripts/script.sh" in zip.namelist()
+        assert (
+            "Hello World"
+            in zip.read("my-package-1.2.3.data/scripts/script.sh").decode()
+        )
 
         entry_points = zip.read("my_package-1.2.3.dist-info/entry_points.txt")
 
         assert (
-            decode(entry_points.decode())
+            entry_points.decode()
             == """\
 [console_scripts]
 extra-script=my_package.extra:main[time]
@@ -237,20 +241,18 @@ def test_complete():
 
 """
         )
-        wheel_data = decode(zip.read("my_package-1.2.3.dist-info/WHEEL"))
+        wheel_data = zip.read("my_package-1.2.3.dist-info/WHEEL").decode()
 
         assert (
             wheel_data
-            == """\
+            == f"""\
 Wheel-Version: 1.0
-Generator: poetry {}
+Generator: poetry-core {__version__}
 Root-Is-Purelib: true
 Tag: py3-none-any
-""".format(
-                __version__
-            )
+"""
         )
-        wheel_data = decode(zip.read("my_package-1.2.3.dist-info/METADATA"))
+        wheel_data = zip.read("my_package-1.2.3.dist-info/METADATA").decode()
 
         assert (
             wheel_data
@@ -269,11 +271,11 @@ def test_complete():
 Requires-Python: >=3.6,<4.0
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
 Classifier: Topic :: Software Development :: Build Tools
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
 Provides-Extra: time
@@ -290,11 +292,32 @@ def test_complete():
 
 """
         )
+        actual_records = zip.read("my_package-1.2.3.dist-info/RECORD").decode()
+
+        # For some reason, the ordering of the files and the SHA hashes
+        # vary per operating systems and Python versions.
+        # So instead of 1:1 assertion, let's do a bit clunkier one:
+
+        expected_records = [
+            "my_package/__init__.py",
+            "my_package/data1/test.json",
+            "my_package/sub_pkg1/__init__.py",
+            "my_package/sub_pkg2/__init__.py",
+            "my_package/sub_pkg2/data2/data.json",
+            "my_package-1.2.3.dist-info/entry_points.txt",
+            "my_package-1.2.3.dist-info/LICENSE",
+            "my_package-1.2.3.dist-info/WHEEL",
+            "my_package-1.2.3.dist-info/METADATA",
+        ]
+
+        for expected_record in expected_records:
+            assert expected_record in actual_records
+
     finally:
         zip.close()
 
 
-def test_complete_no_vcs():
+def test_complete_no_vcs() -> None:
     # Copy the complete fixtures dir to a temporary directory
     module_path = fixtures_dir / "complete"
     temporary_dir = Path(tempfile.mkdtemp()) / "complete"
@@ -318,6 +341,7 @@ def test_complete_no_vcs():
         "my_package/sub_pkg1/__init__.py",
         "my_package/sub_pkg2/__init__.py",
         "my_package/sub_pkg2/data2/data.json",
+        "my-package-1.2.3.data/scripts/script.sh",
         "my_package/sub_pkg3/foo.py",
         "my_package-1.2.3.dist-info/entry_points.txt",
         "my_package-1.2.3.dist-info/LICENSE",
@@ -332,7 +356,7 @@ def test_complete_no_vcs():
         entry_points = zip.read("my_package-1.2.3.dist-info/entry_points.txt")
 
         assert (
-            decode(entry_points.decode())
+            entry_points.decode()
             == """\
 [console_scripts]
 extra-script=my_package.extra:main[time]
@@ -341,20 +365,18 @@ def test_complete_no_vcs():
 
 """
         )
-        wheel_data = decode(zip.read("my_package-1.2.3.dist-info/WHEEL"))
+        wheel_data = zip.read("my_package-1.2.3.dist-info/WHEEL").decode()
 
         assert (
             wheel_data
-            == """\
+            == f"""\
 Wheel-Version: 1.0
-Generator: poetry {}
+Generator: poetry-core {__version__}
 Root-Is-Purelib: true
 Tag: py3-none-any
-""".format(
-                __version__
-            )
+"""
         )
-        wheel_data = decode(zip.read("my_package-1.2.3.dist-info/METADATA"))
+        wheel_data = zip.read("my_package-1.2.3.dist-info/METADATA").decode()
 
         assert (
             wheel_data
@@ -373,11 +395,11 @@ def test_complete_no_vcs():
 Requires-Python: >=3.6,<4.0
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
 Classifier: Topic :: Software Development :: Build Tools
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
 Provides-Extra: time
@@ -398,7 +420,7 @@ def test_complete_no_vcs():
         zip.close()
 
 
-def test_module_src():
+def test_module_src() -> None:
     module_path = fixtures_dir / "source_file"
     builder = Builder(Factory().create_poetry(module_path))
     builder.build(fmt="all")
@@ -422,7 +444,7 @@ def test_module_src():
         zip.close()
 
 
-def test_package_src():
+def test_package_src() -> None:
     module_path = fixtures_dir / "source_package"
     builder = Builder(Factory().create_poetry(module_path))
     builder.build(fmt="all")
@@ -447,7 +469,33 @@ def test_package_src():
         zip.close()
 
 
-def test_package_with_include(mocker):
+def test_split_source() -> None:
+    module_path = fixtures_dir / "split_source"
+    builder = Builder(Factory().create_poetry(module_path))
+    builder.build(fmt="all")
+
+    sdist = module_path / "dist" / "split-source-0.1.tar.gz"
+
+    assert sdist.exists()
+
+    with tarfile.open(str(sdist), "r") as tar:
+        assert "split-source-0.1/lib_a/module_a/__init__.py" in tar.getnames()
+        assert "split-source-0.1/lib_b/module_b/__init__.py" in tar.getnames()
+
+    whl = module_path / "dist" / "split_source-0.1-py3-none-any.whl"
+
+    assert whl.exists()
+
+    zip = zipfile.ZipFile(str(whl))
+
+    try:
+        assert "module_a/__init__.py" in zip.namelist()
+        assert "module_b/__init__.py" in zip.namelist()
+    finally:
+        zip.close()
+
+
+def test_package_with_include(mocker: MockerFixture) -> None:
     module_path = fixtures_dir / "with-include"
 
     # Patch git module to return specific excluded files
@@ -495,11 +543,13 @@ def test_package_with_include(mocker):
         assert "with-include-1.2.3/for_wheel_only/__init__.py" not in names
         assert "with-include-1.2.3/src/src_package/__init__.py" in names
 
-        setup = tar.extractfile("with-include-1.2.3/setup.py").read()
+        file = tar.extractfile("with-include-1.2.3/setup.py")
+        assert file
+        setup = file.read()
         setup_ast = ast.parse(setup)
 
         setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
-        ns = {}
+        ns: dict[str, Any] = {}
         exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
         assert ns["package_dir"] == {"": "src"}
         assert ns["packages"] == [
@@ -530,3 +580,48 @@ def test_package_with_include(mocker):
         assert "package_with_include/__init__.py" in names
         assert "tests/__init__.py" not in names
         assert "src_package/__init__.py" in names
+
+
+def test_respect_format_for_explicit_included_files() -> None:
+    module_path = fixtures_dir / "exclude-whl-include-sdist"
+    builder = Builder(Factory().create_poetry(module_path))
+    builder.build(fmt="all")
+
+    sdist = module_path / "dist" / "exclude-whl-include-sdist-0.1.0.tar.gz"
+
+    assert sdist.exists()
+
+    with tarfile.open(str(sdist), "r") as tar:
+        names = tar.getnames()
+        assert (
+            "exclude-whl-include-sdist-0.1.0/exclude_whl_include_sdist/__init__.py"
+            in names
+        )
+        assert (
+            "exclude-whl-include-sdist-0.1.0/exclude_whl_include_sdist/compiled/source.c"
+            in names
+        )
+        assert (
+            "exclude-whl-include-sdist-0.1.0/exclude_whl_include_sdist/compiled/source.h"
+            in names
+        )
+        assert (
+            "exclude-whl-include-sdist-0.1.0/exclude_whl_include_sdist/cython_code.pyx"
+            in names
+        )
+        assert "exclude-whl-include-sdist-0.1.0/pyproject.toml" in names
+        assert "exclude-whl-include-sdist-0.1.0/setup.py" in names
+        assert "exclude-whl-include-sdist-0.1.0/PKG-INFO" in names
+
+    whl = module_path / "dist" / "exclude_whl_include_sdist-0.1.0-py3-none-any.whl"
+
+    assert whl.exists()
+
+    with zipfile.ZipFile(str(whl)) as z:
+        names = z.namelist()
+        assert "exclude_whl_include_sdist/__init__.py" in names
+        assert "exclude_whl_include_sdist/compiled/source.c" not in names
+        assert "exclude_whl_include_sdist/compiled/source.h" not in names
+        assert "exclude_whl_include_sdist/cython_code.pyx" not in names
+
+    pass
diff --git a/vendor/poetry-core/tests/masonry/builders/test_sdist.py b/vendor/poetry-core/tests/masonry/builders/test_sdist.py
index 5c39eb57..66785c91 100644
--- a/vendor/poetry-core/tests/masonry/builders/test_sdist.py
+++ b/vendor/poetry-core/tests/masonry/builders/test_sdist.py
@@ -1,29 +1,35 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
 import ast
 import gzip
+import hashlib
 import shutil
 import tarfile
 
 from email.parser import Parser
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import Iterator
 
 import pytest
 
 from poetry.core.factory import Factory
 from poetry.core.masonry.builders.sdist import SdistBuilder
 from poetry.core.masonry.utils.package_include import PackageInclude
-from poetry.core.packages import Package
 from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.project_package import ProjectPackage
 from poetry.core.packages.vcs_dependency import VCSDependency
-from poetry.core.utils._compat import Path
-from poetry.core.utils._compat import encode
-from poetry.core.utils._compat import to_str
 
 
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
 fixtures_dir = Path(__file__).parent / "fixtures"
 
 
 @pytest.fixture(autouse=True)
-def setup():
+def setup() -> Iterator[None]:
     clear_samples_dist()
 
     yield
@@ -31,18 +37,18 @@ def setup():
     clear_samples_dist()
 
 
-def clear_samples_dist():
+def clear_samples_dist() -> None:
     for dist in fixtures_dir.glob("**/dist"):
         if dist.is_dir():
             shutil.rmtree(str(dist))
 
 
-def project(name):
+def project(name: str) -> Path:
     return Path(__file__).parent / "fixtures" / name
 
 
-def test_convert_dependencies():
-    package = Package("foo", "1.2.3")
+def test_convert_dependencies() -> None:
+    package = ProjectPackage("foo", "1.2.3")
     result = SdistBuilder.convert_dependencies(
         package,
         [
@@ -58,15 +64,15 @@ def test_convert_dependencies():
         "A>=1.0,<2.0",
         "B>=1.0,<1.1",
         "C==1.2.3",
-        "D @ git+https://github.com/sdispater/d.git@master",
+        "D @ git+https://github.com/sdispater/d.git",
         "E>=1.0,<2.0",
         "F>=1.0,<2.0,!=1.3",
     ]
-    extras = {}
+    extras: dict[str, Any] = {}
 
     assert result == (main, extras)
 
-    package = Package("foo", "1.2.3")
+    package = ProjectPackage("foo", "1.2.3")
     package.extras = {"bar": [Dependency("A", "*")]}
 
     result = SdistBuilder.convert_dependencies(
@@ -108,7 +114,7 @@ def test_convert_dependencies():
     assert result == (main, extras)
 
 
-def test_make_setup():
+def test_make_setup() -> None:
     poetry = Factory().create_poetry(project("complete"))
 
     builder = SdistBuilder(poetry)
@@ -116,7 +122,7 @@ def test_make_setup():
     setup_ast = ast.parse(setup)
 
     setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
-    ns = {}
+    ns: dict[str, Any] = {}
     exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
     assert ns["packages"] == [
         "my_package",
@@ -132,6 +138,7 @@ def test_make_setup():
             "my-script = my_package:main",
         ]
     }
+    assert ns["scripts"] == [str(Path("bin") / "script.sh")]
     assert ns["extras_require"] == {
         'time:python_version ~= "2.7" and sys_platform == "win32" or python_version in "3.4 3.5"': [
             "pendulum>=1.4,<2.0"
@@ -139,7 +146,7 @@ def test_make_setup():
     }
 
 
-def test_make_pkg_info(mocker):
+def test_make_pkg_info(mocker: MockerFixture) -> None:
     get_metadata_content = mocker.patch(
         "poetry.core.masonry.builders.builder.Builder.get_metadata_content"
     )
@@ -151,18 +158,18 @@ def test_make_pkg_info(mocker):
     assert get_metadata_content.called
 
 
-def test_make_pkg_info_any_python():
+def test_make_pkg_info_any_python() -> None:
     poetry = Factory().create_poetry(project("module1"))
 
     builder = SdistBuilder(poetry)
     pkg_info = builder.build_pkg_info()
     p = Parser()
-    parsed = p.parsestr(to_str(pkg_info))
+    parsed = p.parsestr(pkg_info.decode())
 
     assert "Requires-Python" not in parsed
 
 
-def test_find_files_to_add():
+def test_find_files_to_add() -> None:
     poetry = Factory().create_poetry(project("complete"))
 
     builder = SdistBuilder(poetry)
@@ -172,6 +179,7 @@ def test_find_files_to_add():
         [
             Path("LICENSE"),
             Path("README.rst"),
+            Path("bin/script.sh"),
             Path("my_package/__init__.py"),
             Path("my_package/data1/test.json"),
             Path("my_package/sub_pkg1/__init__.py"),
@@ -183,7 +191,7 @@ def test_find_files_to_add():
     )
 
 
-def test_make_pkg_info_multi_constraints_dependency():
+def test_make_pkg_info_multi_constraints_dependency() -> None:
     poetry = Factory().create_poetry(
         Path(__file__).parent.parent.parent
         / "fixtures"
@@ -193,7 +201,7 @@ def test_make_pkg_info_multi_constraints_dependency():
     builder = SdistBuilder(poetry)
     pkg_info = builder.build_pkg_info()
     p = Parser()
-    parsed = p.parsestr(to_str(pkg_info))
+    parsed = p.parsestr(pkg_info.decode())
 
     requires = parsed.get_all("Requires-Dist")
     assert requires == [
@@ -202,7 +210,7 @@ def test_make_pkg_info_multi_constraints_dependency():
     ]
 
 
-def test_find_packages():
+def test_find_packages() -> None:
     poetry = Factory().create_poetry(project("complete"))
 
     builder = SdistBuilder(poetry)
@@ -239,7 +247,7 @@ def test_find_packages():
     assert pkg_data == {"": ["*"]}
 
 
-def test_package():
+def test_package() -> None:
     poetry = Factory().create_poetry(project("complete"))
 
     builder = SdistBuilder(poetry)
@@ -253,7 +261,25 @@ def test_package():
         assert "my-package-1.2.3/LICENSE" in tar.getnames()
 
 
-def test_setup_py_context():
+def test_sdist_reproducibility() -> None:
+    poetry = Factory().create_poetry(project("complete"))
+
+    hashes = set()
+
+    for _ in range(2):
+        builder = SdistBuilder(poetry)
+        builder.build()
+
+        sdist = fixtures_dir / "complete" / "dist" / "my-package-1.2.3.tar.gz"
+
+        assert sdist.exists()
+
+        hashes.add(hashlib.sha256(sdist.read_bytes()).hexdigest())
+
+    assert len(hashes) == 1
+
+
+def test_setup_py_context() -> None:
     poetry = Factory().create_poetry(project("complete"))
 
     builder = SdistBuilder(poetry)
@@ -269,8 +295,8 @@ def test_setup_py_context():
 
             with open(str(setup), "rb") as f:
                 # we convert to string  and replace line endings here for compatibility
-                data = to_str(encode(f.read())).replace("\r\n", "\n")
-                assert data == to_str(builder.build_setup())
+                data = f.read().decode().replace("\r\n", "\n")
+                assert data == builder.build_setup().decode()
 
         assert not project_setup_py.exists()
     finally:
@@ -278,7 +304,7 @@ def test_setup_py_context():
             project_setup_py.unlink()
 
 
-def test_module():
+def test_module() -> None:
     poetry = Factory().create_poetry(project("module1"))
 
     builder = SdistBuilder(poetry)
@@ -292,7 +318,7 @@ def test_module():
         assert "module1-0.1/module1.py" in tar.getnames()
 
 
-def test_prelease():
+def test_prelease() -> None:
     poetry = Factory().create_poetry(project("prerelease"))
 
     builder = SdistBuilder(poetry)
@@ -303,8 +329,8 @@ def test_prelease():
     assert sdist.exists()
 
 
-@pytest.mark.parametrize("directory", [("extended"), ("extended_legacy_config")])
-def test_with_c_extensions(directory):
+@pytest.mark.parametrize("directory", ["extended", "extended_legacy_config"])
+def test_with_c_extensions(directory: str) -> None:
     poetry = Factory().create_poetry(project("extended"))
 
     builder = SdistBuilder(poetry)
@@ -319,7 +345,7 @@ def test_with_c_extensions(directory):
         assert "extended-0.1/extended/extended.c" in tar.getnames()
 
 
-def test_with_c_extensions_src_layout():
+def test_with_c_extensions_src_layout() -> None:
     poetry = Factory().create_poetry(project("src_extended"))
 
     builder = SdistBuilder(poetry)
@@ -334,7 +360,16 @@ def test_with_c_extensions_src_layout():
         assert "extended-0.1/src/extended/extended.c" in tar.getnames()
 
 
-def test_with_src_module_file():
+def test_with_build_script_in_subdir() -> None:
+    poetry = Factory().create_poetry(project("build_script_in_subdir"))
+
+    builder = SdistBuilder(poetry)
+    setup = builder.build_setup()
+    # should not error
+    ast.parse(setup)
+
+
+def test_with_src_module_file() -> None:
     poetry = Factory().create_poetry(project("source_file"))
 
     builder = SdistBuilder(poetry)
@@ -344,7 +379,7 @@ def test_with_src_module_file():
     setup_ast = ast.parse(setup)
 
     setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
-    ns = {}
+    ns: dict[str, Any] = {}
     exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
     assert ns["package_dir"] == {"": "src"}
     assert ns["modules"] == ["module_src"]
@@ -359,7 +394,7 @@ def test_with_src_module_file():
         assert "module-src-0.1/src/module_src.py" in tar.getnames()
 
 
-def test_with_src_module_dir():
+def test_with_src_module_dir() -> None:
     poetry = Factory().create_poetry(project("source_package"))
 
     builder = SdistBuilder(poetry)
@@ -369,7 +404,7 @@ def test_with_src_module_dir():
     setup_ast = ast.parse(setup)
 
     setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
-    ns = {}
+    ns: dict[str, Any] = {}
     exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
     assert ns["package_dir"] == {"": "src"}
     assert ns["packages"] == ["package_src"]
@@ -385,7 +420,7 @@ def test_with_src_module_dir():
         assert "package-src-0.1/src/package_src/module.py" in tar.getnames()
 
 
-def test_default_with_excluded_data(mocker):
+def test_default_with_excluded_data(mocker: MockerFixture) -> None:
     # Patch git module to return specific excluded files
     p = mocker.patch("poetry.core.vcs.git.Git.get_ignored_files")
     p.return_value = [
@@ -412,7 +447,7 @@ def test_default_with_excluded_data(mocker):
     setup_ast = ast.parse(setup)
 
     setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
-    ns = {}
+    ns: dict[str, Any] = {}
     exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
     assert "package_dir" not in ns
     assert ns["packages"] == ["my_package"]
@@ -441,10 +476,17 @@ def test_default_with_excluded_data(mocker):
         assert "my-package-1.2.3/PKG-INFO" in names
         # all last modified times should be set to a valid timestamp
         for tarinfo in tar.getmembers():
-            assert 0 < tarinfo.mtime
+            if tarinfo.name in [
+                "my-package-1.2.3/setup.py",
+                "my-package-1.2.3/PKG-INFO",
+            ]:
+                # generated files have timestamp set to 0
+                assert tarinfo.mtime == 0
+                continue
+            assert tarinfo.mtime > 0
 
 
-def test_src_excluded_nested_data():
+def test_src_excluded_nested_data() -> None:
     module_path = fixtures_dir / "exclude_nested_data_toml"
     poetry = Factory().create_poetry(module_path)
 
@@ -477,29 +519,29 @@ def test_src_excluded_nested_data():
         assert "my-package-1.2.3/my_package/public/item2/itemdata2.txt" not in names
 
 
-def test_proper_python_requires_if_two_digits_precision_version_specified():
+def test_proper_python_requires_if_two_digits_precision_version_specified() -> None:
     poetry = Factory().create_poetry(project("simple_version"))
 
     builder = SdistBuilder(poetry)
     pkg_info = builder.build_pkg_info()
     p = Parser()
-    parsed = p.parsestr(to_str(pkg_info))
+    parsed = p.parsestr(pkg_info.decode())
 
     assert parsed["Requires-Python"] == ">=3.6,<3.7"
 
 
-def test_proper_python_requires_if_three_digits_precision_version_specified():
+def test_proper_python_requires_if_three_digits_precision_version_specified() -> None:
     poetry = Factory().create_poetry(project("single_python"))
 
     builder = SdistBuilder(poetry)
     pkg_info = builder.build_pkg_info()
     p = Parser()
-    parsed = p.parsestr(to_str(pkg_info))
+    parsed = p.parsestr(pkg_info.decode())
 
     assert parsed["Requires-Python"] == "==2.7.15"
 
 
-def test_includes():
+def test_includes() -> None:
     poetry = Factory().create_poetry(project("with-include"))
 
     builder = SdistBuilder(poetry)
@@ -515,7 +557,7 @@ def test_includes():
         assert "with-include-1.2.3/notes.txt" in tar.getnames()
 
 
-def test_includes_with_inline_table():
+def test_includes_with_inline_table() -> None:
     poetry = Factory().create_poetry(project("with_include_inline_table"))
 
     builder = SdistBuilder(poetry)
@@ -532,12 +574,13 @@ def test_includes_with_inline_table():
     assert sdist.exists()
 
     with tarfile.open(str(sdist), "r") as tar:
+        assert "with-include-1.2.3/both.txt" in tar.getnames()
         assert "with-include-1.2.3/wheel_only.txt" not in tar.getnames()
         assert "with-include-1.2.3/tests/__init__.py" in tar.getnames()
         assert "with-include-1.2.3/tests/test_foo/test.py" in tar.getnames()
 
 
-def test_excluded_subpackage():
+def test_excluded_subpackage() -> None:
     poetry = Factory().create_poetry(project("excluded_subpackage"))
 
     builder = SdistBuilder(poetry)
@@ -546,13 +589,13 @@ def test_excluded_subpackage():
     setup_ast = ast.parse(setup)
 
     setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
-    ns = {}
+    ns: dict[str, Any] = {}
     exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
 
     assert ns["packages"] == ["example"]
 
 
-def test_sdist_package_pep_561_stub_only():
+def test_sdist_package_pep_561_stub_only() -> None:
     root = fixtures_dir / "pep_561_stub_only"
     poetry = Factory().create_poetry(root)
 
@@ -570,7 +613,7 @@ def test_sdist_package_pep_561_stub_only():
         assert "pep-561-stubs-0.1/pkg-stubs/subpkg/__init__.pyi" in names
 
 
-def test_sdist_disable_setup_py():
+def test_sdist_disable_setup_py() -> None:
     module_path = fixtures_dir / "disable_setup_py"
     poetry = Factory().create_poetry(module_path)
 
@@ -590,7 +633,7 @@ def test_sdist_disable_setup_py():
         }
 
 
-def test_sdist_mtime_zero():
+def test_sdist_mtime_zero() -> None:
     poetry = Factory().create_poetry(project("module1"))
 
     builder = SdistBuilder(poetry)
@@ -603,3 +646,19 @@ def test_sdist_mtime_zero():
     with gzip.open(str(sdist), "rb") as gz:
         gz.read(100)
         assert gz.mtime == 0
+
+
+def test_split_source() -> None:
+    root = fixtures_dir / "split_source"
+    poetry = Factory().create_poetry(root)
+
+    builder = SdistBuilder(poetry)
+
+    # Check setup.py
+    setup = builder.build_setup()
+    setup_ast = ast.parse(setup)
+
+    setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
+    ns: dict[str, Any] = {}
+    exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
+    assert "" in ns["package_dir"] and "module_b" in ns["package_dir"]
diff --git a/vendor/poetry-core/tests/masonry/builders/test_wheel.py b/vendor/poetry-core/tests/masonry/builders/test_wheel.py
index e300bb34..bde27857 100644
--- a/vendor/poetry-core/tests/masonry/builders/test_wheel.py
+++ b/vendor/poetry-core/tests/masonry/builders/test_wheel.py
@@ -1,21 +1,31 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
 import os
 import shutil
 import zipfile
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import Iterator
+from typing import TextIO
+
 import pytest
 
 from poetry.core.factory import Factory
 from poetry.core.masonry.builders.wheel import WheelBuilder
-from poetry.core.utils._compat import Path
 from tests.masonry.builders.test_sdist import project
 
 
+if TYPE_CHECKING:
+    from _pytest.monkeypatch import MonkeyPatch
+    from pytest_mock import MockerFixture
+
 fixtures_dir = Path(__file__).parent / "fixtures"
 
 
 @pytest.fixture(autouse=True)
-def setup():
+def setup() -> Iterator[None]:
     clear_samples_dist()
 
     yield
@@ -23,13 +33,13 @@ def setup():
     clear_samples_dist()
 
 
-def clear_samples_dist():
+def clear_samples_dist() -> None:
     for dist in fixtures_dir.glob("**/dist"):
         if dist.is_dir():
             shutil.rmtree(str(dist))
 
 
-def test_wheel_module():
+def test_wheel_module() -> None:
     module_path = fixtures_dir / "module1"
     WheelBuilder.make(Factory().create_poetry(module_path))
 
@@ -41,7 +51,7 @@ def test_wheel_module():
         assert "module1.py" in z.namelist()
 
 
-def test_wheel_package():
+def test_wheel_package() -> None:
     module_path = fixtures_dir / "complete"
     WheelBuilder.make(Factory().create_poetry(module_path))
 
@@ -53,7 +63,7 @@ def test_wheel_package():
         assert "my_package/sub_pkg1/__init__.py" in z.namelist()
 
 
-def test_wheel_prerelease():
+def test_wheel_prerelease() -> None:
     module_path = fixtures_dir / "prerelease"
     WheelBuilder.make(Factory().create_poetry(module_path))
 
@@ -62,7 +72,7 @@ def test_wheel_prerelease():
     assert whl.exists()
 
 
-def test_wheel_excluded_data():
+def test_wheel_excluded_data() -> None:
     module_path = fixtures_dir / "default_with_excluded_data_toml"
     WheelBuilder.make(Factory().create_poetry(module_path))
 
@@ -77,7 +87,7 @@ def test_wheel_excluded_data():
         assert "my_package/data/data1.txt" not in z.namelist()
 
 
-def test_wheel_excluded_nested_data():
+def test_wheel_excluded_nested_data() -> None:
     module_path = fixtures_dir / "exclude_nested_data_toml"
     poetry = Factory().create_poetry(module_path)
     WheelBuilder.make(poetry)
@@ -98,7 +108,7 @@ def test_wheel_excluded_nested_data():
         assert "my_package/public/item2/itemdata2.txt" not in z.namelist()
 
 
-def test_include_excluded_code():
+def test_include_excluded_code() -> None:
     module_path = fixtures_dir / "include_excluded_code"
     poetry = Factory().create_poetry(module_path)
     wb = WheelBuilder(poetry)
@@ -112,7 +122,7 @@ def test_include_excluded_code():
         assert "lib/my_package/generated.py" not in z.namelist()
 
 
-def test_wheel_localversionlabel():
+def test_wheel_localversionlabel() -> None:
     module_path = fixtures_dir / "localversionlabel"
     project = Factory().create_poetry(module_path)
     WheelBuilder.make(project)
@@ -125,7 +135,7 @@ def test_wheel_localversionlabel():
         assert local_version_string + ".dist-info/METADATA" in z.namelist()
 
 
-def test_wheel_package_src():
+def test_wheel_package_src() -> None:
     module_path = fixtures_dir / "source_package"
     WheelBuilder.make(Factory().create_poetry(module_path))
 
@@ -138,7 +148,7 @@ def test_wheel_package_src():
         assert "package_src/module.py" in z.namelist()
 
 
-def test_wheel_module_src():
+def test_wheel_module_src() -> None:
     module_path = fixtures_dir / "source_file"
     WheelBuilder.make(Factory().create_poetry(module_path))
 
@@ -150,7 +160,7 @@ def test_wheel_module_src():
         assert "module_src.py" in z.namelist()
 
 
-def test_dist_info_file_permissions():
+def test_dist_info_file_permissions() -> None:
     module_path = fixtures_dir / "complete"
     WheelBuilder.make(Factory().create_poetry(module_path))
 
@@ -173,7 +183,7 @@ def test_dist_info_file_permissions():
         )
 
 
-def test_wheel_includes_inline_table():
+def test_wheel_includes_inline_table() -> None:
     module_path = fixtures_dir / "with_include_inline_table"
     WheelBuilder.make(Factory().create_poetry(module_path))
 
@@ -182,6 +192,7 @@ def test_wheel_includes_inline_table():
     assert whl.exists()
 
     with zipfile.ZipFile(str(whl)) as z:
+        assert "both.txt" in z.namelist()
         assert "wheel_only.txt" in z.namelist()
         assert "notes.txt" not in z.namelist()
 
@@ -190,7 +201,7 @@ def test_wheel_includes_inline_table():
     "package",
     ["pep_561_stub_only", "pep_561_stub_only_partial", "pep_561_stub_only_src"],
 )
-def test_wheel_package_pep_561_stub_only(package):
+def test_wheel_package_pep_561_stub_only(package: str) -> None:
     root = fixtures_dir / package
     WheelBuilder.make(Factory().create_poetry(root))
 
@@ -204,7 +215,21 @@ def test_wheel_package_pep_561_stub_only(package):
         assert "pkg-stubs/subpkg/__init__.pyi" in z.namelist()
 
 
-def test_wheel_package_pep_561_stub_only_includes_typed_marker():
+def test_wheel_package_pep_561_stub_only_partial_namespace() -> None:
+    root = fixtures_dir / "pep_561_stub_only_partial_namespace"
+    WheelBuilder.make(Factory().create_poetry(root))
+
+    whl = root / "dist" / "pep_561_stubs-0.1-py3-none-any.whl"
+
+    assert whl.exists()
+
+    with zipfile.ZipFile(str(whl)) as z:
+        assert "pkg-stubs/module.pyi" in z.namelist()
+        assert "pkg-stubs/subpkg/__init__.pyi" in z.namelist()
+        assert "pkg-stubs/subpkg/py.typed" in z.namelist()
+
+
+def test_wheel_package_pep_561_stub_only_includes_typed_marker() -> None:
     root = fixtures_dir / "pep_561_stub_only_partial"
     WheelBuilder.make(Factory().create_poetry(root))
 
@@ -216,7 +241,7 @@ def test_wheel_package_pep_561_stub_only_includes_typed_marker():
         assert "pkg-stubs/py.typed" in z.namelist()
 
 
-def test_wheel_includes_licenses_in_correct_paths():
+def test_wheel_includes_licenses_in_correct_paths() -> None:
     root = fixtures_dir / "licenses_and_copying"
     WheelBuilder.make(Factory().create_poetry(root))
 
@@ -233,7 +258,7 @@ def test_wheel_includes_licenses_in_correct_paths():
         assert "my_package-1.2.3.dist-info/LICENSES/MIT.txt" in z.namelist()
 
 
-def test_wheel_with_file_with_comma():
+def test_wheel_with_file_with_comma() -> None:
     root = fixtures_dir / "comma_file"
     WheelBuilder.make(Factory().create_poetry(root))
 
@@ -246,7 +271,7 @@ def test_wheel_with_file_with_comma():
         assert '\n"comma_file/a,b.py"' in records.decode()
 
 
-def test_default_src_with_excluded_data(mocker):
+def test_default_src_with_excluded_data(mocker: MockerFixture) -> None:
     # Patch git module to return specific excluded files
     p = mocker.patch("poetry.core.vcs.git.Git.get_ignored_files")
     p.return_value = [
@@ -287,15 +312,15 @@ def test_default_src_with_excluded_data(mocker):
         assert "my_package/data/sub_data/data3.txt" in names
 
 
-def test_wheel_file_is_closed(monkeypatch):
+def test_wheel_file_is_closed(monkeypatch: MonkeyPatch) -> None:
     """Confirm that wheel zip files are explicitly closed."""
 
     # Using a list is a hack for Python 2.7 compatibility.
-    fd_file = [None]
+    fd_file: list[TextIO | None] = [None]
 
     real_fdopen = os.fdopen
 
-    def capturing_fdopen(*args, **kwargs):
+    def capturing_fdopen(*args: Any, **kwargs: Any) -> TextIO | None:
         fd_file[0] = real_fdopen(*args, **kwargs)
         return fd_file[0]
 
diff --git a/vendor/poetry-core/tests/masonry/test_api.py b/vendor/poetry-core/tests/masonry/test_api.py
index 15f5d69e..3bec5d45 100644
--- a/vendor/poetry-core/tests/masonry/test_api.py
+++ b/vendor/poetry-core/tests/masonry/test_api.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
+from __future__ import annotations
 
 import os
 import platform
@@ -7,20 +6,20 @@
 import zipfile
 
 from contextlib import contextmanager
+from pathlib import Path
+from typing import Iterator
 
 import pytest
 
 from poetry.core import __version__
 from poetry.core.masonry import api
-from poetry.core.utils._compat import Path
-from poetry.core.utils._compat import decode
 from poetry.core.utils.helpers import temporary_directory
 from tests.testutils import validate_sdist_contents
 from tests.testutils import validate_wheel_contents
 
 
 @contextmanager
-def cwd(directory):
+def cwd(directory: str | Path) -> Iterator[None]:
     prev = os.getcwd()
     os.chdir(str(directory))
     try:
@@ -32,19 +31,19 @@ def cwd(directory):
 fixtures = os.path.join(os.path.dirname(__file__), "builders", "fixtures")
 
 
-def test_get_requires_for_build_wheel():
-    expected = []
+def test_get_requires_for_build_wheel() -> None:
+    expected: list[str] = []
     with cwd(os.path.join(fixtures, "complete")):
         assert api.get_requires_for_build_wheel() == expected
 
 
-def test_get_requires_for_build_sdist():
-    expected = []
+def test_get_requires_for_build_sdist() -> None:
+    expected: list[str] = []
     with cwd(os.path.join(fixtures, "complete")):
         assert api.get_requires_for_build_sdist() == expected
 
 
-def test_build_wheel():
+def test_build_wheel() -> None:
     with temporary_directory() as tmp_dir, cwd(os.path.join(fixtures, "complete")):
         filename = api.build_wheel(tmp_dir)
         validate_wheel_contents(
@@ -55,7 +54,7 @@ def test_build_wheel():
         )
 
 
-def test_build_wheel_with_include():
+def test_build_wheel_with_include() -> None:
     with temporary_directory() as tmp_dir, cwd(os.path.join(fixtures, "with-include")):
         filename = api.build_wheel(tmp_dir)
         validate_wheel_contents(
@@ -66,14 +65,14 @@ def test_build_wheel_with_include():
         )
 
 
-def test_build_wheel_with_bad_path_dev_dep_succeeds():
+def test_build_wheel_with_bad_path_dev_dep_succeeds() -> None:
     with temporary_directory() as tmp_dir, cwd(
         os.path.join(fixtures, "with_bad_path_dev_dep")
     ):
         api.build_wheel(tmp_dir)
 
 
-def test_build_wheel_with_bad_path_dep_fails():
+def test_build_wheel_with_bad_path_dep_fails() -> None:
     with pytest.raises(ValueError) as err, temporary_directory() as tmp_dir, cwd(
         os.path.join(fixtures, "with_bad_path_dep")
     ):
@@ -87,7 +86,7 @@ def test_build_wheel_with_bad_path_dep_fails():
     or platform.python_implementation().lower() == "pypy",
     reason="Disable test on Windows for Python <=3.6 and for PyPy",
 )
-def test_build_wheel_extended():
+def test_build_wheel_extended() -> None:
     with temporary_directory() as tmp_dir, cwd(os.path.join(fixtures, "extended")):
         filename = api.build_wheel(tmp_dir)
         whl = Path(tmp_dir) / filename
@@ -95,7 +94,7 @@ def test_build_wheel_extended():
         validate_wheel_contents(name="extended", version="0.1", path=whl.as_posix())
 
 
-def test_build_sdist():
+def test_build_sdist() -> None:
     with temporary_directory() as tmp_dir, cwd(os.path.join(fixtures, "complete")):
         filename = api.build_sdist(tmp_dir)
         validate_sdist_contents(
@@ -106,7 +105,7 @@ def test_build_sdist():
         )
 
 
-def test_build_sdist_with_include():
+def test_build_sdist_with_include() -> None:
     with temporary_directory() as tmp_dir, cwd(os.path.join(fixtures, "with-include")):
         filename = api.build_sdist(tmp_dir)
         validate_sdist_contents(
@@ -117,14 +116,14 @@ def test_build_sdist_with_include():
         )
 
 
-def test_build_sdist_with_bad_path_dev_dep_succeeds():
+def test_build_sdist_with_bad_path_dev_dep_succeeds() -> None:
     with temporary_directory() as tmp_dir, cwd(
         os.path.join(fixtures, "with_bad_path_dev_dep")
     ):
         api.build_sdist(tmp_dir)
 
 
-def test_build_sdist_with_bad_path_dep_fails():
+def test_build_sdist_with_bad_path_dep_fails() -> None:
     with pytest.raises(ValueError) as err, temporary_directory() as tmp_dir, cwd(
         os.path.join(fixtures, "with_bad_path_dep")
     ):
@@ -132,7 +131,7 @@ def test_build_sdist_with_bad_path_dep_fails():
     assert "does not exist" in str(err.value)
 
 
-def test_prepare_metadata_for_build_wheel():
+def test_prepare_metadata_for_build_wheel() -> None:
     entry_points = """\
 [console_scripts]
 extra-script=my_package.extra:main[time]
@@ -140,14 +139,12 @@ def test_prepare_metadata_for_build_wheel():
 my-script=my_package:main
 
 """
-    wheel_data = """\
+    wheel_data = f"""\
 Wheel-Version: 1.0
-Generator: poetry {}
+Generator: poetry-core {__version__}
 Root-Is-Purelib: true
 Tag: py3-none-any
-""".format(
-        __version__
-    )
+"""
     metadata = """\
 Metadata-Version: 2.1
 Name: my-package
@@ -163,11 +160,11 @@ def test_prepare_metadata_for_build_wheel():
 Requires-Python: >=3.6,<4.0
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
 Classifier: Topic :: Software Development :: Build Tools
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
 Provides-Extra: time
@@ -186,7 +183,7 @@ def test_prepare_metadata_for_build_wheel():
     with temporary_directory() as tmp_dir, cwd(os.path.join(fixtures, "complete")):
         dirname = api.prepare_metadata_for_build_wheel(tmp_dir)
 
-        assert "my_package-1.2.3.dist-info" == dirname
+        assert dirname == "my_package-1.2.3.dist-info"
 
         dist_info = Path(tmp_dir, dirname)
 
@@ -195,23 +192,23 @@ def test_prepare_metadata_for_build_wheel():
         assert (dist_info / "METADATA").exists()
 
         with (dist_info / "entry_points.txt").open(encoding="utf-8") as f:
-            assert entry_points == decode(f.read())
+            assert entry_points == f.read()
 
         with (dist_info / "WHEEL").open(encoding="utf-8") as f:
-            assert wheel_data == decode(f.read())
+            assert wheel_data == f.read()
 
         with (dist_info / "METADATA").open(encoding="utf-8") as f:
-            assert metadata == decode(f.read())
+            assert metadata == f.read()
 
 
-def test_prepare_metadata_for_build_wheel_with_bad_path_dev_dep_succeeds():
+def test_prepare_metadata_for_build_wheel_with_bad_path_dev_dep_succeeds() -> None:
     with temporary_directory() as tmp_dir, cwd(
         os.path.join(fixtures, "with_bad_path_dev_dep")
     ):
         api.prepare_metadata_for_build_wheel(tmp_dir)
 
 
-def test_prepare_metadata_for_build_wheel_with_bad_path_dep_succeeds():
+def test_prepare_metadata_for_build_wheel_with_bad_path_dep_succeeds() -> None:
     with pytest.raises(ValueError) as err, temporary_directory() as tmp_dir, cwd(
         os.path.join(fixtures, "with_bad_path_dep")
     ):
@@ -219,7 +216,7 @@ def test_prepare_metadata_for_build_wheel_with_bad_path_dep_succeeds():
     assert "does not exist" in str(err.value)
 
 
-def test_build_editable_wheel():
+def test_build_editable_wheel() -> None:
     pkg_dir = Path(fixtures) / "complete"
 
     with temporary_directory() as tmp_dir, cwd(pkg_dir):
@@ -227,12 +224,13 @@ def test_build_editable_wheel():
         wheel_pth = Path(tmp_dir) / filename
 
         validate_wheel_contents(
-            name="my_package", version="1.2.3", path=str(wheel_pth),
+            name="my_package",
+            version="1.2.3",
+            path=str(wheel_pth),
         )
 
-        with zipfile.ZipFile(str(wheel_pth)) as z:
+        with zipfile.ZipFile(wheel_pth) as z:
             namelist = z.namelist()
 
             assert "my_package.pth" in namelist
             assert pkg_dir.as_posix() == z.read("my_package.pth").decode().strip()
-            assert not any(file for file in namelist if file.startswith("my_package/"))
diff --git a/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only/bad/module.pyi b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only/bad/module.pyi
index f85a07d4..d79e6e39 100644
--- a/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only/bad/module.pyi
+++ b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only/bad/module.pyi
@@ -1,5 +1,4 @@
 """Example module"""
 from typing import Tuple
 
-
 version_info = Tuple[int, int, int]
diff --git a/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only/good-stubs/module.pyi b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only/good-stubs/module.pyi
index f85a07d4..d79e6e39 100644
--- a/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only/good-stubs/module.pyi
+++ b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only/good-stubs/module.pyi
@@ -1,5 +1,4 @@
 """Example module"""
 from typing import Tuple
 
-
 version_info = Tuple[int, int, int]
diff --git a/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only_partial_namespace/good-stubs/module.pyi b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only_partial_namespace/good-stubs/module.pyi
new file mode 100644
index 00000000..d79e6e39
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only_partial_namespace/good-stubs/module.pyi
@@ -0,0 +1,4 @@
+"""Example module"""
+from typing import Tuple
+
+version_info = Tuple[int, int, int]
diff --git a/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only_partial_namespace/good-stubs/subpkg/__init__.pyi b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only_partial_namespace/good-stubs/subpkg/__init__.pyi
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only_partial_namespace/good-stubs/subpkg/py.typed b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only_partial_namespace/good-stubs/subpkg/py.typed
new file mode 100644
index 00000000..b648ac92
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/utils/fixtures/pep_561_stub_only_partial_namespace/good-stubs/subpkg/py.typed
@@ -0,0 +1 @@
+partial
diff --git a/vendor/poetry-core/tests/masonry/utils/test_helpers.py b/vendor/poetry-core/tests/masonry/utils/test_helpers.py
new file mode 100644
index 00000000..8be13c40
--- /dev/null
+++ b/vendor/poetry-core/tests/masonry/utils/test_helpers.py
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+import pytest
+
+from poetry.core.masonry.utils.helpers import escape_name
+from poetry.core.masonry.utils.helpers import escape_version
+
+
+@pytest.mark.parametrize(
+    "version,expected",
+    [
+        ("1.2.3", "1.2.3"),
+        ("1.2.3_1", "1.2.3_1"),
+        ("1.2.3-1", "1.2.3_1"),
+        ("1.2.3-1", "1.2.3_1"),
+        ("2022.2", "2022.2"),
+        ("12.20.12-----451---14-1-4-41", "12.20.12_451_14_1_4_41"),
+        ("1.0b2.dev1", "1.0b2.dev1"),
+        ("1.0+abc.7", "1.0+abc.7"),
+    ],
+)
+def test_escape_version(version: str, expected: str) -> None:
+    assert escape_version(version) == expected
+
+
+@pytest.mark.parametrize(
+    "name,expected",
+    [
+        ("foo", "foo"),
+        ("foo-bar", "foo_bar"),
+        ("FOO-bAr", "foo_bar"),
+        ("foo.bar", "foo_bar"),
+        ("foo123-ba---.r", "foo123_ba_r"),
+    ],
+)
+def test_escape_name(name: str, expected: str) -> None:
+    assert escape_name(name) == expected
diff --git a/vendor/poetry-core/tests/masonry/utils/test_package_include.py b/vendor/poetry-core/tests/masonry/utils/test_package_include.py
index 0db1ff6d..913a4057 100644
--- a/vendor/poetry-core/tests/masonry/utils/test_package_include.py
+++ b/vendor/poetry-core/tests/masonry/utils/test_package_include.py
@@ -1,14 +1,17 @@
+from __future__ import annotations
+
+from pathlib import Path
+
 import pytest
 
 from poetry.core.masonry.utils.package_include import PackageInclude
-from poetry.core.utils._compat import Path
 
 
 fixtures_dir = Path(__file__).parent / "fixtures"
 with_includes = fixtures_dir / "with_includes"
 
 
-def test_package_include_with_multiple_dirs():
+def test_package_include_with_multiple_dirs() -> None:
     pkg_include = PackageInclude(base=fixtures_dir, include="with_includes")
     assert pkg_include.elements == [
         with_includes / "__init__.py",
@@ -23,12 +26,12 @@ def test_package_include_with_multiple_dirs():
     ]
 
 
-def test_package_include_with_simple_dir():
+def test_package_include_with_simple_dir() -> None:
     pkg_include = PackageInclude(base=with_includes, include="bar")
     assert pkg_include.elements == [with_includes / "bar/baz.py"]
 
 
-def test_package_include_with_nested_dir():
+def test_package_include_with_nested_dir() -> None:
     pkg_include = PackageInclude(base=with_includes, include="extra_package/**/*.py")
     assert pkg_include.elements == [
         with_includes / "extra_package/some_dir/foo.py",
@@ -36,14 +39,14 @@ def test_package_include_with_nested_dir():
     ]
 
 
-def test_package_include_with_no_python_files_in_dir():
+def test_package_include_with_no_python_files_in_dir() -> None:
     with pytest.raises(ValueError) as e:
         PackageInclude(base=with_includes, include="not_a_python_pkg")
 
     assert str(e.value) == "not_a_python_pkg is not a package."
 
 
-def test_package_include_with_non_existent_directory():
+def test_package_include_with_non_existent_directory() -> None:
     with pytest.raises(ValueError) as e:
         PackageInclude(base=with_includes, include="not_a_dir")
 
@@ -52,7 +55,7 @@ def test_package_include_with_non_existent_directory():
     assert str(e.value) == err_str
 
 
-def test_pep_561_stub_only_package_good_name_suffix():
+def test_pep_561_stub_only_package_good_name_suffix() -> None:
     pkg_include = PackageInclude(
         base=fixtures_dir / "pep_561_stub_only", include="good-stubs"
     )
@@ -62,7 +65,20 @@ def test_pep_561_stub_only_package_good_name_suffix():
     ]
 
 
-def test_pep_561_stub_only_package_bad_name_suffix():
+def test_pep_561_stub_only_partial_namespace_package_good_name_suffix() -> None:
+    pkg_include = PackageInclude(
+        base=fixtures_dir / "pep_561_stub_only_partial_namespace", include="good-stubs"
+    )
+    assert pkg_include.elements == [
+        fixtures_dir / "pep_561_stub_only_partial_namespace/good-stubs/module.pyi",
+        fixtures_dir / "pep_561_stub_only_partial_namespace/good-stubs/subpkg/",
+        fixtures_dir
+        / "pep_561_stub_only_partial_namespace/good-stubs/subpkg/__init__.pyi",
+        fixtures_dir / "pep_561_stub_only_partial_namespace/good-stubs/subpkg/py.typed",
+    ]
+
+
+def test_pep_561_stub_only_package_bad_name_suffix() -> None:
     with pytest.raises(ValueError) as e:
         PackageInclude(base=fixtures_dir / "pep_561_stub_only", include="bad")
 
diff --git a/vendor/poetry-core/tests/packages/constraints/test_constraint.py b/vendor/poetry-core/tests/packages/constraints/test_constraint.py
index 335ae465..b93497d1 100644
--- a/vendor/poetry-core/tests/packages/constraints/test_constraint.py
+++ b/vendor/poetry-core/tests/packages/constraints/test_constraint.py
@@ -1,10 +1,21 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from poetry.core.packages.constraints import AnyConstraint
 from poetry.core.packages.constraints.constraint import Constraint
 from poetry.core.packages.constraints.empty_constraint import EmptyConstraint
 from poetry.core.packages.constraints.multi_constraint import MultiConstraint
 from poetry.core.packages.constraints.union_constraint import UnionConstraint
 
 
-def test_allows():
+if TYPE_CHECKING:
+    from poetry.core.packages.constraints import BaseConstraint
+
+
+def test_allows() -> None:
     c = Constraint("win32")
 
     assert c.allows(Constraint("win32"))
@@ -16,7 +27,7 @@ def test_allows():
     assert c.allows(Constraint("linux"))
 
 
-def test_allows_any():
+def test_allows_any() -> None:
     c = Constraint("win32")
 
     assert c.allows_any(Constraint("win32"))
@@ -32,7 +43,7 @@ def test_allows_any():
     assert c.allows_any(Constraint("linux", "!="))
 
 
-def test_allows_all():
+def test_allows_all() -> None:
     c = Constraint("win32")
 
     assert c.allows_all(Constraint("win32"))
@@ -41,49 +52,117 @@ def test_allows_all():
     assert not c.allows_all(UnionConstraint(Constraint("win32"), Constraint("linux")))
 
 
-def test_intersect():
-    c = Constraint("win32")
-
-    intersection = c.intersect(Constraint("linux"))
-    assert intersection == EmptyConstraint()
-
-    intersection = c.intersect(
-        UnionConstraint(Constraint("win32"), Constraint("linux"))
-    )
-    assert intersection == Constraint("win32")
-
-    intersection = c.intersect(
-        UnionConstraint(Constraint("linux"), Constraint("linux2"))
-    )
-    assert intersection == EmptyConstraint()
-
-    intersection = c.intersect(Constraint("linux", "!="))
-    assert intersection == c
-
-    c = Constraint("win32", "!=")
-
-    intersection = c.intersect(Constraint("linux", "!="))
-    assert intersection == MultiConstraint(
-        Constraint("win32", "!="), Constraint("linux", "!=")
-    )
-
-
-def test_union():
-    c = Constraint("win32")
-
-    union = c.union(Constraint("linux"))
-    assert union == UnionConstraint(Constraint("win32"), Constraint("linux"))
-
-    union = c.union(UnionConstraint(Constraint("win32"), Constraint("linux")))
-    assert union == UnionConstraint(Constraint("win32"), Constraint("linux"))
-
-    union = c.union(UnionConstraint(Constraint("linux"), Constraint("linux2")))
-    assert union == UnionConstraint(
-        Constraint("win32"), Constraint("linux"), Constraint("linux2")
-    )
-
-
-def test_difference():
+@pytest.mark.parametrize(
+    ("constraint1", "constraint2", "expected"),
+    [
+        (
+            Constraint("win32"),
+            Constraint("win32"),
+            Constraint("win32"),
+        ),
+        (
+            Constraint("win32"),
+            Constraint("linux"),
+            EmptyConstraint(),
+        ),
+        (
+            Constraint("win32"),
+            UnionConstraint(Constraint("win32"), Constraint("linux")),
+            Constraint("win32"),
+        ),
+        (
+            Constraint("win32"),
+            UnionConstraint(Constraint("linux"), Constraint("linux2")),
+            EmptyConstraint(),
+        ),
+        (
+            Constraint("win32"),
+            Constraint("linux", "!="),
+            Constraint("win32"),
+        ),
+        (
+            Constraint("win32", "!="),
+            Constraint("linux"),
+            Constraint("linux"),
+        ),
+        (
+            Constraint("win32", "!="),
+            Constraint("linux", "!="),
+            MultiConstraint(Constraint("win32", "!="), Constraint("linux", "!=")),
+        ),
+        (
+            UnionConstraint(Constraint("win32"), Constraint("linux")),
+            UnionConstraint(Constraint("win32"), Constraint("darwin")),
+            Constraint("win32"),
+        ),
+        (
+            UnionConstraint(Constraint("win32"), Constraint("linux")),
+            MultiConstraint(Constraint("win32", "!="), Constraint("darwin", "!=")),
+            Constraint("linux"),
+        ),
+    ],
+)
+def test_intersect(
+    constraint1: BaseConstraint,
+    constraint2: BaseConstraint,
+    expected: BaseConstraint,
+) -> None:
+    intersection = constraint1.intersect(constraint2)
+    assert intersection == expected
+
+
+@pytest.mark.parametrize(
+    ("constraint1", "constraint2", "expected"),
+    [
+        (
+            Constraint("win32"),
+            Constraint("win32"),
+            Constraint("win32"),
+        ),
+        (
+            Constraint("win32"),
+            Constraint("linux"),
+            UnionConstraint(Constraint("win32"), Constraint("linux")),
+        ),
+        (
+            Constraint("win32"),
+            UnionConstraint(Constraint("win32"), Constraint("linux")),
+            UnionConstraint(Constraint("win32"), Constraint("linux")),
+        ),
+        (
+            Constraint("win32"),
+            UnionConstraint(Constraint("linux"), Constraint("linux2")),
+            UnionConstraint(
+                Constraint("win32"), Constraint("linux"), Constraint("linux2")
+            ),
+        ),
+        (
+            Constraint("win32"),
+            Constraint("linux", "!="),
+            Constraint("linux", "!="),
+        ),
+        (
+            Constraint("win32", "!="),
+            Constraint("linux"),
+            Constraint("win32", "!="),
+        ),
+        (
+            Constraint("win32", "!="),
+            Constraint("linux", "!="),
+            AnyConstraint(),
+        ),
+    ],
+)
+def test_union(
+    constraint1: BaseConstraint,
+    constraint2: BaseConstraint,
+    expected: BaseConstraint,
+) -> None:
+    union = constraint1.union(constraint2)
+    assert union == expected
+
+
+def test_difference() -> None:
     c = Constraint("win32")
 
     assert c.difference(Constraint("win32")).is_empty()
diff --git a/vendor/poetry-core/tests/packages/constraints/test_main.py b/vendor/poetry-core/tests/packages/constraints/test_main.py
index 0769a47a..e95c04ca 100644
--- a/vendor/poetry-core/tests/packages/constraints/test_main.py
+++ b/vendor/poetry-core/tests/packages/constraints/test_main.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 from poetry.core.packages.constraints import parse_constraint
@@ -18,7 +20,7 @@
         ("!= win32", Constraint("win32", "!=")),
     ],
 )
-def test_parse_constraint(input, constraint):
+def test_parse_constraint(input: str, constraint: AnyConstraint | Constraint) -> None:
     assert parse_constraint(input) == constraint
 
 
@@ -39,7 +41,7 @@ def test_parse_constraint(input, constraint):
         ),
     ],
 )
-def test_parse_constraint_multi(input, constraint):
+def test_parse_constraint_multi(input: str, constraint: MultiConstraint) -> None:
     assert parse_constraint(input) == constraint
 
 
@@ -53,5 +55,5 @@ def test_parse_constraint_multi(input, constraint):
         ),
     ],
 )
-def test_parse_constraint_union(input, constraint):
+def test_parse_constraint_union(input: str, constraint: UnionConstraint) -> None:
     assert parse_constraint(input) == constraint
diff --git a/vendor/poetry-core/tests/packages/constraints/test_multi_constraint.py b/vendor/poetry-core/tests/packages/constraints/test_multi_constraint.py
index 56d96dc9..5d8cb36b 100644
--- a/vendor/poetry-core/tests/packages/constraints/test_multi_constraint.py
+++ b/vendor/poetry-core/tests/packages/constraints/test_multi_constraint.py
@@ -1,8 +1,10 @@
+from __future__ import annotations
+
 from poetry.core.packages.constraints.constraint import Constraint
 from poetry.core.packages.constraints.multi_constraint import MultiConstraint
 
 
-def test_allows():
+def test_allows() -> None:
     c = MultiConstraint(Constraint("win32", "!="), Constraint("linux", "!="))
 
     assert not c.allows(Constraint("win32"))
@@ -10,7 +12,7 @@ def test_allows():
     assert c.allows(Constraint("darwin"))
 
 
-def test_allows_any():
+def test_allows_any() -> None:
     c = MultiConstraint(Constraint("win32", "!="), Constraint("linux", "!="))
 
     assert c.allows_any(Constraint("darwin"))
@@ -22,7 +24,7 @@ def test_allows_any():
     )
 
 
-def test_allows_all():
+def test_allows_all() -> None:
     c = MultiConstraint(Constraint("win32", "!="), Constraint("linux", "!="))
 
     assert c.allows_all(Constraint("darwin"))
@@ -34,7 +36,7 @@ def test_allows_all():
     )
 
 
-def test_intersect():
+def test_intersect() -> None:
     c = MultiConstraint(Constraint("win32", "!="), Constraint("linux", "!="))
 
     intersection = c.intersect(Constraint("win32", "!="))
diff --git a/vendor/poetry-core/tests/packages/constraints/test_union_constraint.py b/vendor/poetry-core/tests/packages/constraints/test_union_constraint.py
index b64d5912..6545dae5 100644
--- a/vendor/poetry-core/tests/packages/constraints/test_union_constraint.py
+++ b/vendor/poetry-core/tests/packages/constraints/test_union_constraint.py
@@ -1,8 +1,10 @@
+from __future__ import annotations
+
 from poetry.core.packages.constraints.constraint import Constraint
 from poetry.core.packages.constraints.union_constraint import UnionConstraint
 
 
-def test_allows():
+def test_allows() -> None:
     c = UnionConstraint(Constraint("win32"), Constraint("linux"))
 
     assert c.allows(Constraint("win32"))
@@ -10,7 +12,7 @@ def test_allows():
     assert not c.allows(Constraint("darwin"))
 
 
-def test_allows_any():
+def test_allows_any() -> None:
     c = UnionConstraint(Constraint("win32"), Constraint("linux"))
 
     assert c.allows_any(c)
@@ -20,7 +22,7 @@ def test_allows_any():
     assert not c.allows_any(Constraint("darwin"))
 
 
-def test_allows_all():
+def test_allows_all() -> None:
     c = UnionConstraint(Constraint("win32"), Constraint("linux"))
 
     assert c.allows_all(c)
diff --git a/vendor/poetry-core/tests/packages/test_dependency.py b/vendor/poetry-core/tests/packages/test_dependency.py
index 03a2cf6e..a9bbb496 100644
--- a/vendor/poetry-core/tests/packages/test_dependency.py
+++ b/vendor/poetry-core/tests/packages/test_dependency.py
@@ -1,65 +1,29 @@
+from __future__ import annotations
+
 import pytest
 
-from poetry.core.packages import Dependency
-from poetry.core.packages import Package
-from poetry.core.packages import dependency_from_pep_508
+from poetry.core.packages.dependency import Dependency
 from poetry.core.version.markers import parse_marker
 
 
-def test_accepts():
-    dependency = Dependency("A", "^1.0")
-    package = Package("A", "1.4")
-
-    assert dependency.accepts(package)
-
-
-def test_accepts_prerelease():
-    dependency = Dependency("A", "^1.0", allows_prereleases=True)
-    package = Package("A", "1.4-beta.1")
-
-    assert dependency.accepts(package)
-
-
-def test_accepts_python_versions():
-    dependency = Dependency("A", "^1.0")
-    dependency.python_versions = "^3.6"
-    package = Package("A", "1.4")
-    package.python_versions = "~3.6"
-
-    assert dependency.accepts(package)
-
-
-def test_accepts_fails_with_different_names():
-    dependency = Dependency("A", "^1.0")
-    package = Package("B", "1.4")
-
-    assert not dependency.accepts(package)
-
-
-def test_accepts_fails_with_version_mismatch():
-    dependency = Dependency("A", "~1.0")
-    package = Package("B", "1.4")
-
-    assert not dependency.accepts(package)
-
-
-def test_accepts_fails_with_prerelease_mismatch():
-    dependency = Dependency("A", "^1.0")
-    package = Package("B", "1.4-beta.1")
-
-    assert not dependency.accepts(package)
-
-
-def test_accepts_fails_with_python_versions_mismatch():
-    dependency = Dependency("A", "^1.0")
-    dependency.python_versions = "^3.6"
-    package = Package("B", "1.4")
-    package.python_versions = "~3.5"
-
-    assert not dependency.accepts(package)
+@pytest.mark.parametrize(
+    "constraint,result",
+    [
+        ("^1.0", False),
+        ("^1.0.dev0", True),
+        ("^1.0.0", False),
+        ("^1.0.0.dev0", True),
+        ("^1.0.0.alpha0", True),
+        ("^1.0.0.alpha0+local", True),
+        ("^1.0.0.rc0+local", True),
+        ("^1.0.0-1", False),
+    ],
+)
+def test_allows_prerelease(constraint: str, result: bool) -> None:
+    assert Dependency("A", constraint).allows_prereleases() == result
 
 
-def test_to_pep_508():
+def test_to_pep_508() -> None:
     dependency = Dependency("Django", "^1.23")
 
     result = dependency.to_pep_508()
@@ -70,20 +34,21 @@ def test_to_pep_508():
 
     result = dependency.to_pep_508()
     assert (
-        result == "Django (>=1.23,<2.0); "
+        result
+        == "Django (>=1.23,<2.0); "
         'python_version >= "2.7" and python_version < "2.8" '
         'or python_version >= "3.6" and python_version < "4.0"'
     )
 
 
-def test_to_pep_508_wilcard():
+def test_to_pep_508_wilcard() -> None:
     dependency = Dependency("Django", "*")
 
     result = dependency.to_pep_508()
     assert result == "Django"
 
 
-def test_to_pep_508_in_extras():
+def test_to_pep_508_in_extras() -> None:
     dependency = Dependency("Django", "^1.23")
     dependency.in_extras.append("foo")
 
@@ -101,8 +66,9 @@ def test_to_pep_508_in_extras():
     dependency.python_versions = "~2.7 || ^3.6"
 
     result = dependency.to_pep_508()
-    assert result == (
-        "Django (>=1.23,<2.0); "
+    assert (
+        result
+        == "Django (>=1.23,<2.0); "
         "("
         'python_version >= "2.7" and python_version < "2.8" '
         'or python_version >= "3.6" and python_version < "4.0"'
@@ -111,27 +77,38 @@ def test_to_pep_508_in_extras():
     )
 
     result = dependency.to_pep_508(with_extras=False)
-    assert result == (
-        "Django (>=1.23,<2.0); "
+    assert (
+        result
+        == "Django (>=1.23,<2.0); "
         'python_version >= "2.7" and python_version < "2.8" '
         'or python_version >= "3.6" and python_version < "4.0"'
     )
 
 
-def test_to_pep_508_in_extras_parsed():
-    dependency = dependency_from_pep_508('foo[bar] (>=1.23,<2.0) ; extra == "baz"')
+def test_to_pep_508_in_extras_parsed() -> None:
+    dependency = Dependency.create_from_pep_508(
+        'foo[baz,bar] (>=1.23,<2.0) ; extra == "baz"'
+    )
 
     result = dependency.to_pep_508()
-    assert result == 'foo[bar] (>=1.23,<2.0); extra == "baz"'
+    assert result == 'foo[bar,baz] (>=1.23,<2.0); extra == "baz"'
 
     result = dependency.to_pep_508(with_extras=False)
-    assert result == "foo[bar] (>=1.23,<2.0)"
+    assert result == "foo[bar,baz] (>=1.23,<2.0)"
 
 
-def test_to_pep_508_with_single_version_excluded():
-    dependency = Dependency("foo", "!=1.2.3")
+@pytest.mark.parametrize(
+    ("exclusion", "expected"),
+    [
+        ("!=1.2.3", "!=1.2.3"),
+        ("!=1.2.*", "!=1.2.*"),
+        ("<2.0 || >=2.1", "!=2.0.*"),
+    ],
+)
+def test_to_pep_508_with_excluded_versions(exclusion: str, expected: str) -> None:
+    dependency = Dependency("foo", exclusion)
 
-    assert "foo (!=1.2.3)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == f"foo ({expected})"
 
 
 @pytest.mark.parametrize(
@@ -144,67 +121,69 @@ def test_to_pep_508_with_single_version_excluded():
         ("== 3.5.4", 'python_full_version == "3.5.4"'),
     ],
 )
-def test_to_pep_508_with_patch_python_version(python_versions, marker):
+def test_to_pep_508_with_patch_python_version(
+    python_versions: str, marker: str
+) -> None:
     dependency = Dependency("Django", "^1.23")
     dependency.python_versions = python_versions
 
-    expected = "Django (>=1.23,<2.0); {}".format(marker)
+    expected = f"Django (>=1.23,<2.0); {marker}"
 
-    assert expected == dependency.to_pep_508()
-    assert marker == str(dependency.marker)
+    assert dependency.to_pep_508() == expected
+    assert str(dependency.marker) == marker
 
 
-def test_to_pep_508_tilde():
+def test_to_pep_508_tilde() -> None:
     dependency = Dependency("foo", "~1.2.3")
 
-    assert "foo (>=1.2.3,<1.3.0)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=1.2.3,<1.3.0)"
 
     dependency = Dependency("foo", "~1.2")
 
-    assert "foo (>=1.2,<1.3)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=1.2,<1.3)"
 
     dependency = Dependency("foo", "~0.2.3")
 
-    assert "foo (>=0.2.3,<0.3.0)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=0.2.3,<0.3.0)"
 
     dependency = Dependency("foo", "~0.2")
 
-    assert "foo (>=0.2,<0.3)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=0.2,<0.3)"
 
 
-def test_to_pep_508_caret():
+def test_to_pep_508_caret() -> None:
     dependency = Dependency("foo", "^1.2.3")
 
-    assert "foo (>=1.2.3,<2.0.0)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=1.2.3,<2.0.0)"
 
     dependency = Dependency("foo", "^1.2")
 
-    assert "foo (>=1.2,<2.0)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=1.2,<2.0)"
 
     dependency = Dependency("foo", "^0.2.3")
 
-    assert "foo (>=0.2.3,<0.3.0)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=0.2.3,<0.3.0)"
 
     dependency = Dependency("foo", "^0.2")
 
-    assert "foo (>=0.2,<0.3)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=0.2,<0.3)"
 
 
-def test_to_pep_508_combination():
+def test_to_pep_508_combination() -> None:
     dependency = Dependency("foo", "^1.2,!=1.3.5")
 
-    assert "foo (>=1.2,<2.0,!=1.3.5)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=1.2,<2.0,!=1.3.5)"
 
     dependency = Dependency("foo", "~1.2,!=1.2.5")
 
-    assert "foo (>=1.2,<1.3,!=1.2.5)" == dependency.to_pep_508()
+    assert dependency.to_pep_508() == "foo (>=1.2,<1.3,!=1.2.5)"
 
 
-def test_complete_name():
-    assert "foo" == Dependency("foo", ">=1.2.3").complete_name
+def test_complete_name() -> None:
+    assert Dependency("foo", ">=1.2.3").complete_name == "foo"
     assert (
-        "foo[bar,baz]"
-        == Dependency("foo", ">=1.2.3", extras=["baz", "bar"]).complete_name
+        Dependency("foo", ">=1.2.3", extras=["baz", "bar"]).complete_name
+        == "foo[bar,baz]"
     )
 
 
@@ -220,19 +199,51 @@ def test_complete_name():
             ["x"],
             "A[x] (>=1.6.5,!=1.8.0,<3.1.0)",
         ),
+        # test single version range exclusions
+        ("A", ">=1.8,!=2.0.*", None, "A (>=1.8,!=2.0.*)"),
+        ("A", "!=0.0.*", None, "A (!=0.0.*)"),
+        ("A", "!=0.1.*", None, "A (!=0.1.*)"),
+        ("A", "!=0.*", None, "A (>=1.0.0)"),
+        ("A", ">=1.8,!=2.*", None, "A (>=1.8,!=2.*)"),
+        ("A", ">=1.8,!=2.*.*", None, "A (>=1.8,!=2.*)"),
+        ("A", ">=1.8,<2.0 || >=2.1.0", None, "A (>=1.8,!=2.0.*)"),
+        ("A", ">=1.8,<2.0.0 || >=3.0.0", None, "A (>=1.8,!=2.*)"),
+        ("A", ">=1.8,<2.0 || >=3", None, "A (>=1.8,!=2.*)"),
+        ("A", ">=1.8,<2 || >=2.1.0", None, "A (>=1.8,!=2.0.*)"),
+        ("A", ">=1.8,<2 || >=2.1", None, "A (>=1.8,!=2.0.*)"),
+        ("A", ">=1.8,!=2.0.*,!=3.0.*", None, "A (>=1.8,!=2.0.*,!=3.0.*)"),
+        ("A", ">=1.8.0.0,<2.0.0.0 || >=2.0.1.0", None, "A (>=1.8.0.0,!=2.0.0.*)"),
+        ("A", ">=1.8.0.0,<2 || >=2.0.1.0", None, "A (>=1.8.0.0,!=2.0.0.*)"),
+        # we verify that the range exclusion logic is not too eager
+        ("A", ">=1.8,<2.0 || >=2.2.0", None, "A (>=1.8,<2.0 || >=2.2.0)"),
+        ("A", ">=1.8,<2.0 || >=2.1.5", None, "A (>=1.8,<2.0 || >=2.1.5)"),
+        ("A", ">=1.8.0.0,<2 || >=2.0.1.5", None, "A (>=1.8.0.0,<2 || >=2.0.1.5)"),
+        # non-semver version test is ignored due to existing bug in wildcard
+        # constraint parsing that ignores non-semver versions
+        # TODO: re-enable for verification once fixed
+        # ("A", ">=1.8.0.0,!=2.0.0.*", None, "A (>=1.8.0.0,!=2.0.0.*)"),  # noqa: E800
     ],
 )
-def test_dependency_string_representation(name, constraint, extras, expected):
+def test_dependency_string_representation(
+    name: str, constraint: str, extras: list[str] | None, expected: str
+) -> None:
     dependency = Dependency(name=name, constraint=constraint, extras=extras)
     assert str(dependency) == expected
 
 
-def test_with_constraint():
+def test_set_constraint_sets_pretty_constraint() -> None:
+    dependency = Dependency("A", "^1.0")
+    assert dependency.pretty_constraint == "^1.0"
+    dependency.constraint = "^2.0"  # type: ignore[assignment]
+    assert dependency.pretty_constraint == "^2.0"
+
+
+def test_with_constraint() -> None:
     dependency = Dependency(
         "foo",
         "^1.2.3",
         optional=True,
-        category="dev",
+        groups=["dev"],
         allows_prereleases=True,
         extras=["bar", "baz"],
     )
@@ -250,10 +261,95 @@ def test_with_constraint():
     assert new.name == dependency.name
     assert str(new.constraint) == ">=1.2.6,<2.0.0"
     assert new.is_optional()
-    assert new.category == "dev"
+    assert new.groups == frozenset(["dev"])
     assert new.allows_prereleases()
     assert set(new.extras) == {"bar", "baz"}
     assert new.marker == dependency.marker
     assert new.transitive_marker == dependency.transitive_marker
     assert new.python_constraint == dependency.python_constraint
     assert new.transitive_python_constraint == dependency.transitive_python_constraint
+
+
+@pytest.mark.parametrize(
+    "marker, expected",
+    [
+        ('python_version >= "3.6" and python_version < "4.0"', ">=3.6,<4.0"),
+        ('sys_platform == "linux"', "*"),
+        ('python_version >= "3.9" or sys_platform == "linux"', "*"),
+        ('python_version >= "3.9" and sys_platform == "linux"', ">=3.9"),
+    ],
+)
+def test_marker_properly_sets_python_constraint(marker: str, expected: str) -> None:
+    dependency = Dependency("foo", "^1.2.3")
+    dependency.marker = marker  # type: ignore[assignment]
+    assert str(dependency.python_constraint) == expected
+
+
+def test_dependency_markers_are_the_same_as_markers() -> None:
+    dependency = Dependency.create_from_pep_508('foo ; extra=="bar"')
+    marker = parse_marker('extra=="bar"')
+
+    assert dependency.marker == marker
+
+
+def test_marker_properly_unsets_python_constraint() -> None:
+    dependency = Dependency("foo", "^1.2.3")
+
+    dependency.marker = 'python_version >= "3.6"'  # type: ignore[assignment]
+    assert str(dependency.python_constraint) == ">=3.6"
+
+    dependency.marker = "*"  # type: ignore[assignment]
+    assert str(dependency.python_constraint) == "*"
+
+
+def test_create_from_pep_508_url_with_activated_extras() -> None:
+    dependency = Dependency.create_from_pep_508("name [fred,bar] @ http://foo.com")
+    assert dependency.extras == {"fred", "bar"}
+
+
+@pytest.mark.parametrize(
+    "dependency1, dependency2, expected",
+    [
+        (Dependency("a", "1.0"), Dependency("a", "1.0"), True),
+        (Dependency("a", "1.0"), Dependency("a", "1.0.1"), False),
+        (Dependency("a", "1.0"), Dependency("a1", "1.0"), False),
+        (Dependency("a", "1.0"), Dependency("a", "1.0", source_type="file"), False),
+        # constraint is implicitly given for direct origin dependencies,
+        # but might not be set
+        (
+            Dependency("a", "1.0", source_type="file"),
+            Dependency("a", "*", source_type="file"),
+            True,
+        ),
+        # constraint is not implicit for non direct origin dependencies
+        (Dependency("a", "1.0"), Dependency("a", "*"), False),
+        (
+            Dependency("a", "1.0", source_type="legacy"),
+            Dependency("a", "*", source_type="legacy"),
+            False,
+        ),
+    ],
+)
+def test_eq(dependency1: Dependency, dependency2: Dependency, expected: bool) -> None:
+    assert (dependency1 == dependency2) is expected
+    assert (dependency2 == dependency1) is expected
+
+
+@pytest.mark.parametrize(
+    "attr_name, value",
+    [
+        ("constraint", "2.0"),
+        ("python_versions", "<3.8"),
+        ("transitive_python_versions", "<3.8"),
+        ("marker", "sys_platform == 'linux'"),
+        ("transitive_marker", "sys_platform == 'linux'"),
+    ],
+)
+def test_mutable_attributes_not_in_hash(attr_name: str, value: str) -> None:
+    dependency = Dependency("foo", "^1.2.3")
+    ref_hash = hash(dependency)
+
+    ref_value = getattr(dependency, attr_name)
+    setattr(dependency, attr_name, value)
+    assert value != ref_value
+    assert hash(dependency) == ref_hash
diff --git a/vendor/poetry-core/tests/packages/test_dependency_group.py b/vendor/poetry-core/tests/packages/test_dependency_group.py
new file mode 100644
index 00000000..9f65acfa
--- /dev/null
+++ b/vendor/poetry-core/tests/packages/test_dependency_group.py
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.dependency_group import DependencyGroup
+
+
+def test_dependency_group_remove_dependency() -> None:
+    group = DependencyGroup(name="linter")
+    group.add_dependency(Dependency(name="black", constraint="*"))
+    group.add_dependency(Dependency(name="isort", constraint="*"))
+    group.add_dependency(Dependency(name="flake8", constraint="*"))
+
+    assert {dependency.name for dependency in group.dependencies} == {
+        "black",
+        "isort",
+        "flake8",
+    }
+
+    group.remove_dependency("isort")
+    assert {dependency.name for dependency in group.dependencies} == {"black", "flake8"}
+
+    group.remove_dependency("black")
+    assert {dependency.name for dependency in group.dependencies} == {"flake8"}
+
+    group.remove_dependency("flake8")
+    assert {dependency.name for dependency in group.dependencies} == set()
diff --git a/vendor/poetry-core/tests/packages/test_directory_dependency.py b/vendor/poetry-core/tests/packages/test_directory_dependency.py
index 522935da..c2146021 100644
--- a/vendor/poetry-core/tests/packages/test_directory_dependency.py
+++ b/vendor/poetry-core/tests/packages/test_directory_dependency.py
@@ -1,57 +1,128 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import cast
+
 import pytest
 
-from poetry.core.packages import dependency_from_pep_508
+from poetry.core.packages.dependency import Dependency
 from poetry.core.packages.directory_dependency import DirectoryDependency
-from poetry.core.utils._compat import Path
 
 
 DIST_PATH = Path(__file__).parent.parent / "fixtures" / "git" / "github.com" / "demo"
+SAMPLE_PROJECT = Path(__file__).parent.parent / "fixtures" / "sample_project"
 
 
-def test_directory_dependency_must_exist():
+def test_directory_dependency_must_exist() -> None:
     with pytest.raises(ValueError):
         DirectoryDependency("demo", DIST_PATH / "invalid")
 
 
-def _test_directory_dependency_pep_508(name, path, pep_508_input, pep_508_output=None):
-    dep = dependency_from_pep_508(pep_508_input, relative_to=Path(__file__).parent)
+def _test_directory_dependency_pep_508(
+    name: str, path: Path, pep_508_input: str, pep_508_output: str | None = None
+) -> None:
+    dep = Dependency.create_from_pep_508(
+        pep_508_input, relative_to=Path(__file__).parent
+    )
 
     assert dep.is_directory()
+    dep = cast(DirectoryDependency, dep)
     assert dep.name == name
     assert dep.path == path
     assert dep.to_pep_508() == pep_508_output or pep_508_input
 
 
-def test_directory_dependency_pep_508_local_absolute():
+def test_directory_dependency_pep_508_local_absolute() -> None:
     path = (
         Path(__file__).parent.parent
         / "fixtures"
         / "project_with_multi_constraints_dependency"
     )
-    requirement = "{} @ file://{}".format("demo", path.as_posix())
+    requirement = f"demo @ file://{path.as_posix()}"
     _test_directory_dependency_pep_508("demo", path, requirement)
 
-    requirement = "{} @ {}".format("demo", path)
+    requirement = f"demo @ {path}"
     _test_directory_dependency_pep_508("demo", path, requirement)
 
 
-def test_directory_dependency_pep_508_localhost():
+def test_directory_dependency_pep_508_localhost() -> None:
     path = (
         Path(__file__).parent.parent
         / "fixtures"
         / "project_with_multi_constraints_dependency"
     )
-    requirement = "{} @ file://localhost{}".format("demo", path.as_posix())
-    requirement_expected = "{} @ file://{}".format("demo", path.as_posix())
+    requirement = f"demo @ file://localhost{path.as_posix()}"
+    requirement_expected = f"demo @ file://{path.as_posix()}"
     _test_directory_dependency_pep_508("demo", path, requirement, requirement_expected)
 
 
-def test_directory_dependency_pep_508_local_relative():
+def test_directory_dependency_pep_508_local_relative() -> None:
     path = Path("..") / "fixtures" / "project_with_multi_constraints_dependency"
 
     with pytest.raises(ValueError):
-        requirement = "{} @ file://{}".format("demo", path.as_posix())
+        requirement = f"demo @ file://{path.as_posix()}"
         _test_directory_dependency_pep_508("demo", path, requirement)
 
-    requirement = "{} @ {}".format("demo", path)
+    requirement = f"demo @ {path}"
     _test_directory_dependency_pep_508("demo", path, requirement)
+
+
+def test_directory_dependency_pep_508_extras() -> None:
+    path = (
+        Path(__file__).parent.parent
+        / "fixtures"
+        / "project_with_multi_constraints_dependency"
+    )
+    requirement = f"demo[foo,bar] @ file://{path.as_posix()}"
+    requirement_expected = f"demo[bar,foo] @ file://{path.as_posix()}"
+    _test_directory_dependency_pep_508("demo", path, requirement, requirement_expected)
+
+
+@pytest.mark.parametrize(
+    "name,path,extras,constraint,expected",
+    [
+        (
+            "my-package",
+            SAMPLE_PROJECT,
+            None,
+            None,
+            f"my-package (*) @ {SAMPLE_PROJECT.as_uri()}",
+        ),
+        (
+            "my-package",
+            SAMPLE_PROJECT,
+            ["db"],
+            "1.2",
+            f"my-package[db] (1.2) @ {SAMPLE_PROJECT.as_uri()}",
+        ),
+    ],
+)
+def test_directory_dependency_string_representation(
+    name: str,
+    path: Path,
+    extras: list[str] | None,
+    constraint: str | None,
+    expected: str,
+) -> None:
+    dependency = DirectoryDependency(name=name, path=path, extras=extras)
+    if constraint:
+        dependency.constraint = constraint  # type: ignore[assignment]
+    assert str(dependency) == expected
+
+
+@pytest.mark.parametrize(
+    ("fixture", "name"),
+    [
+        ("project_with_pep517_non_poetry", "PEP 517"),
+        ("project_with_setup_cfg_only", "setup.cfg"),
+    ],
+)
+def test_directory_dependency_non_poetry_pep517(fixture: str, name: str) -> None:
+    path = Path(__file__).parent.parent / "fixtures" / fixture
+
+    try:
+        DirectoryDependency("package", path)
+    except ValueError as e:
+        if "does not seem to be a Python package" not in str(e):
+            raise e from e
+        pytest.fail(f"A {name} project not recognized as valid directory dependency")
diff --git a/vendor/poetry-core/tests/packages/test_file_dependency.py b/vendor/poetry-core/tests/packages/test_file_dependency.py
index 6f155135..c0db558d 100644
--- a/vendor/poetry-core/tests/packages/test_file_dependency.py
+++ b/vendor/poetry-core/tests/packages/test_file_dependency.py
@@ -1,36 +1,46 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import cast
+
 import pytest
 
-from poetry.core.packages import FileDependency
-from poetry.core.packages import dependency_from_pep_508
-from poetry.core.utils._compat import PY36
-from poetry.core.utils._compat import Path
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.file_dependency import FileDependency
+from poetry.core.version.markers import SingleMarker
+
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
 
+    from poetry.core.version.markers import BaseMarker
 
 DIST_PATH = Path(__file__).parent.parent / "fixtures" / "distributions"
 TEST_FILE = "demo-0.1.0.tar.gz"
 
 
-def test_file_dependency_wrong_path():
+def test_file_dependency_wrong_path() -> None:
     with pytest.raises(ValueError):
-        FileDependency("demo", DIST_PATH / TEST_FILE.replace("1", "2"))
+        FileDependency("demo", DIST_PATH / "demo-0.2.0.tar.gz")
 
 
-def test_file_dependency_dir():
+def test_file_dependency_dir() -> None:
     with pytest.raises(ValueError):
         FileDependency("demo", DIST_PATH)
 
 
-def test_default_hash():
+def test_default_hash() -> None:
     path = DIST_PATH / TEST_FILE
     dep = FileDependency("demo", path)
-    SHA_256 = "72e8531e49038c5f9c4a837b088bfcb8011f4a9f76335c8f0654df6ac539b3d6"
-    assert dep.hash() == SHA_256
+    sha_256 = "72e8531e49038c5f9c4a837b088bfcb8011f4a9f76335c8f0654df6ac539b3d6"
+    assert dep.hash() == sha_256
 
 
 try:
-    from hashlib import algorithms_guaranteed as ALGORITHMS_GUARANTEED
+    from hashlib import algorithms_guaranteed
 except ImportError:
-    ALGORITHMS_GUARANTEED = "md5,sha1,sha224,sha256,sha384,sha512".split(",")
+    algorithms_guaranteed = {"md5", "sha1", "sha224", "sha256", "sha384", "sha512"}
 
 
 @pytest.mark.parametrize(
@@ -74,55 +84,139 @@ def test_default_hash():
                 "ba3d2a964b0680b6dc9565a03952e29c294c785d5a2307d3e2d785d73b75ed7e",
             ),
         ]
-        if hash_name in ALGORITHMS_GUARANTEED
+        if hash_name in algorithms_guaranteed
     ],
 )
-def test_guaranteed_hash(hash_name, expected):
+def test_guaranteed_hash(hash_name: str, expected: str) -> None:
     path = DIST_PATH / TEST_FILE
     dep = FileDependency("demo", path)
     assert dep.hash(hash_name) == expected
 
 
 def _test_file_dependency_pep_508(
-    mocker, name, path, pep_508_input, pep_508_output=None
-):
+    mocker: MockerFixture,
+    name: str,
+    path: Path,
+    pep_508_input: str,
+    pep_508_output: str | None = None,
+    marker: BaseMarker | None = None,
+) -> None:
     mocker.patch.object(Path, "exists").return_value = True
     mocker.patch.object(Path, "is_file").return_value = True
-    if not PY36:
-        mocker.patch.object(Path, "resolve").return_value = path
 
-    dep = dependency_from_pep_508(pep_508_input, relative_to=Path(__file__).parent)
+    dep = Dependency.create_from_pep_508(
+        pep_508_input, relative_to=Path(__file__).parent
+    )
+    if marker:
+        dep.marker = marker
 
     assert dep.is_file()
+    dep = cast(FileDependency, dep)
     assert dep.name == name
     assert dep.path == path
     assert dep.to_pep_508() == pep_508_output or pep_508_input
 
 
-def test_file_dependency_pep_508_local_file_absolute(mocker):
+def test_file_dependency_pep_508_local_file_absolute(mocker: MockerFixture) -> None:
     path = DIST_PATH / "demo-0.2.0.tar.gz"
-    requirement = "{} @ file://{}".format("demo", path.as_posix())
+    requirement = f"demo @ file://{path.as_posix()}"
     _test_file_dependency_pep_508(mocker, "demo", path, requirement)
 
-    requirement = "{} @ {}".format("demo", path)
+    requirement = f"demo @ {path}"
     _test_file_dependency_pep_508(mocker, "demo", path, requirement)
 
 
-def test_file_dependency_pep_508_local_file_localhost(mocker):
+def test_file_dependency_pep_508_local_file_localhost(mocker: MockerFixture) -> None:
     path = DIST_PATH / "demo-0.2.0.tar.gz"
-    requirement = "{} @ file://localhost{}".format("demo", path.as_posix())
-    requirement_expected = "{} @ file://{}".format("demo", path.as_posix())
+    requirement = f"demo @ file://localhost{path.as_posix()}"
+    requirement_expected = f"demo @ file://{path.as_posix()}"
     _test_file_dependency_pep_508(
         mocker, "demo", path, requirement, requirement_expected
     )
 
 
-def test_file_dependency_pep_508_local_file_relative_path(mocker):
+def test_file_dependency_pep_508_local_file_relative_path(
+    mocker: MockerFixture,
+) -> None:
     path = Path("..") / "fixtures" / "distributions" / "demo-0.2.0.tar.gz"
 
     with pytest.raises(ValueError):
-        requirement = "{} @ file://{}".format("demo", path.as_posix())
+        requirement = f"demo @ file://{path.as_posix()}"
         _test_file_dependency_pep_508(mocker, "demo", path, requirement)
 
-    requirement = "{} @ {}".format("demo", path)
+    requirement = f"demo @ {path}"
     _test_file_dependency_pep_508(mocker, "demo", path, requirement)
+
+
+def test_absolute_file_dependency_to_pep_508_with_marker(mocker: MockerFixture) -> None:
+    wheel = "demo-0.1.0-py2.py3-none-any.whl"
+
+    abs_path = DIST_PATH / wheel
+    requirement = f'demo @ file://{abs_path.as_posix()} ; sys_platform == "linux"'
+    _test_file_dependency_pep_508(
+        mocker,
+        "demo",
+        abs_path,
+        requirement,
+        marker=SingleMarker("sys.platform", "linux"),
+    )
+
+
+def test_relative_file_dependency_to_pep_508_with_marker(mocker: MockerFixture) -> None:
+    wheel = "demo-0.1.0-py2.py3-none-any.whl"
+
+    rel_path = Path("..") / "fixtures" / "distributions" / wheel
+    requirement = f'demo @ {rel_path.as_posix()} ; sys_platform == "linux"'
+    _test_file_dependency_pep_508(
+        mocker,
+        "demo",
+        rel_path,
+        requirement,
+        marker=SingleMarker("sys.platform", "linux"),
+    )
+
+
+def test_file_dependency_pep_508_extras(mocker: MockerFixture) -> None:
+    wheel = "demo-0.1.0-py2.py3-none-any.whl"
+
+    rel_path = Path("..") / "fixtures" / "distributions" / wheel
+    requirement = f'demo[foo,bar] @ {rel_path.as_posix()} ; sys_platform == "linux"'
+    _test_file_dependency_pep_508(
+        mocker,
+        "demo",
+        rel_path,
+        requirement,
+        f'demo[bar,foo] @ {rel_path.as_posix()} ; sys_platform == "linux"',
+    )
+
+
+@pytest.mark.parametrize(
+    "name,path,extras,constraint,expected",
+    [
+        (
+            "demo",
+            DIST_PATH / TEST_FILE,
+            None,
+            None,
+            f"demo (*) @ {(DIST_PATH / TEST_FILE).as_uri()}",
+        ),
+        (
+            "demo",
+            DIST_PATH / TEST_FILE,
+            ["foo"],
+            "1.2",
+            f"demo[foo] (1.2) @ {(DIST_PATH / TEST_FILE).as_uri()}",
+        ),
+    ],
+)
+def test_file_dependency_string_representation(
+    name: str,
+    path: Path,
+    extras: list[str] | None,
+    constraint: str | None,
+    expected: str,
+) -> None:
+    dependency = FileDependency(name=name, path=path, extras=extras)
+    if constraint:
+        dependency.constraint = constraint  # type: ignore[assignment]
+    assert str(dependency) == expected
diff --git a/vendor/poetry-core/tests/packages/test_main.py b/vendor/poetry-core/tests/packages/test_main.py
index 28340f98..f284ccc9 100644
--- a/vendor/poetry-core/tests/packages/test_main.py
+++ b/vendor/poetry-core/tests/packages/test_main.py
@@ -1,42 +1,48 @@
-from poetry.core.packages import dependency_from_pep_508
+from __future__ import annotations
+
+from typing import cast
+
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.url_dependency import URLDependency
+from poetry.core.packages.vcs_dependency import VCSDependency
 from poetry.core.semver.version import Version
 
 
-def test_dependency_from_pep_508():
+def test_dependency_from_pep_508() -> None:
     name = "requests"
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == name
     assert str(dep.constraint) == "*"
 
 
-def test_dependency_from_pep_508_with_version():
+def test_dependency_from_pep_508_with_version() -> None:
     name = "requests==2.18.0"
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
 
 
-def test_dependency_from_pep_508_with_parens():
+def test_dependency_from_pep_508_with_parens() -> None:
     name = "requests (==2.18.0)"
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
 
 
-def test_dependency_from_pep_508_with_constraint():
+def test_dependency_from_pep_508_with_constraint() -> None:
     name = "requests>=2.12.0,!=2.17.*,<3.0"
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == ">=2.12.0,<2.17.0 || >=2.18.0,<3.0"
 
 
-def test_dependency_from_pep_508_with_extras():
+def test_dependency_from_pep_508_with_extras() -> None:
     name = 'requests==2.18.0; extra == "foo" or extra == "bar"'
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
@@ -44,9 +50,9 @@ def test_dependency_from_pep_508_with_extras():
     assert str(dep.marker) == 'extra == "foo" or extra == "bar"'
 
 
-def test_dependency_from_pep_508_with_python_version():
+def test_dependency_from_pep_508_with_python_version() -> None:
     name = 'requests (==2.18.0); python_version == "2.7" or python_version == "2.6"'
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
@@ -55,9 +61,9 @@ def test_dependency_from_pep_508_with_python_version():
     assert str(dep.marker) == 'python_version == "2.7" or python_version == "2.6"'
 
 
-def test_dependency_from_pep_508_with_single_python_version():
+def test_dependency_from_pep_508_with_single_python_version() -> None:
     name = 'requests (==2.18.0); python_version == "2.7"'
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
@@ -66,9 +72,9 @@ def test_dependency_from_pep_508_with_single_python_version():
     assert str(dep.marker) == 'python_version == "2.7"'
 
 
-def test_dependency_from_pep_508_with_platform():
+def test_dependency_from_pep_508_with_platform() -> None:
     name = 'requests (==2.18.0); sys_platform == "win32" or sys_platform == "darwin"'
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
@@ -77,29 +83,30 @@ def test_dependency_from_pep_508_with_platform():
     assert str(dep.marker) == 'sys_platform == "win32" or sys_platform == "darwin"'
 
 
-def test_dependency_from_pep_508_complex():
+def test_dependency_from_pep_508_complex() -> None:
     name = (
         "requests (==2.18.0); "
         'python_version >= "2.7" and python_version != "3.2" '
         'and (sys_platform == "win32" or sys_platform == "darwin") '
         'and extra == "foo"'
     )
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
     assert dep.in_extras == ["foo"]
     assert dep.python_versions == ">=2.7 !=3.2.*"
-    assert str(dep.marker) == (
-        'python_version >= "2.7" and python_version != "3.2" '
+    assert (
+        str(dep.marker)
+        == 'python_version >= "2.7" and python_version != "3.2" '
         'and (sys_platform == "win32" or sys_platform == "darwin") '
         'and extra == "foo"'
     )
 
 
-def test_dependency_python_version_in():
+def test_dependency_python_version_in() -> None:
     name = "requests (==2.18.0); python_version in '3.3 3.4 3.5'"
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
@@ -107,9 +114,9 @@ def test_dependency_python_version_in():
     assert str(dep.marker) == 'python_version in "3.3 3.4 3.5"'
 
 
-def test_dependency_python_version_in_comma():
+def test_dependency_python_version_in_comma() -> None:
     name = "requests (==2.18.0); python_version in '3.3, 3.4, 3.5'"
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
@@ -117,18 +124,18 @@ def test_dependency_python_version_in_comma():
     assert str(dep.marker) == 'python_version in "3.3, 3.4, 3.5"'
 
 
-def test_dependency_platform_in():
+def test_dependency_platform_in() -> None:
     name = "requests (==2.18.0); sys_platform in 'win32 darwin'"
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
     assert str(dep.marker) == 'sys_platform in "win32 darwin"'
 
 
-def test_dependency_with_extra():
+def test_dependency_with_extra() -> None:
     name = "requests[security] (==2.18.0)"
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
@@ -137,31 +144,31 @@ def test_dependency_with_extra():
     assert "security" in dep.extras
 
 
-def test_dependency_from_pep_508_with_python_version_union_of_multi():
+def test_dependency_from_pep_508_with_python_version_union_of_multi() -> None:
     name = (
         "requests (==2.18.0); "
         '(python_version >= "2.7" and python_version < "2.8") '
         'or (python_version >= "3.4" and python_version < "3.5")'
     )
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
     assert dep.extras == frozenset()
     assert dep.python_versions == ">=2.7 <2.8 || >=3.4 <3.5"
-    assert str(dep.marker) == (
-        'python_version >= "2.7" and python_version < "2.8" '
+    assert (
+        str(dep.marker)
+        == 'python_version >= "2.7" and python_version < "2.8" '
         'or python_version >= "3.4" and python_version < "3.5"'
     )
 
 
-def test_dependency_from_pep_508_with_not_in_op_marker():
+def test_dependency_from_pep_508_with_not_in_op_marker() -> None:
     name = (
-        "jinja2 (>=2.7,<2.8)"
-        '; python_version not in "3.0,3.1,3.2" and extra == "export"'
+        'jinja2 (>=2.7,<2.8); python_version not in "3.0,3.1,3.2" and extra == "export"'
     )
 
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "jinja2"
     assert str(dep.constraint) == ">=2.7,<2.8"
@@ -172,103 +179,134 @@ def test_dependency_from_pep_508_with_not_in_op_marker():
     )
 
 
-def test_dependency_from_pep_508_with_git_url():
+def test_dependency_from_pep_508_with_git_url() -> None:
     name = "django-utils @ git+ssh://git@corp-gitlab.com/corp-utils.git@1.2"
 
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
+
+    assert dep.name == "django-utils"
+    assert dep.is_vcs()
+    dep = cast(VCSDependency, dep)
+    assert dep.vcs == "git"
+    assert dep.source == "ssh://git@corp-gitlab.com/corp-utils.git"
+    assert dep.reference == "1.2"
+
+
+def test_dependency_from_pep_508_with_git_url_and_subdirectory() -> None:
+    name = (
+        "django-utils @"
+        " git+ssh://git@corp-gitlab.com/corp-utils.git@1.2#subdirectory=package-dir"
+    )
+
+    dep = Dependency.create_from_pep_508(name)
 
-    assert "django-utils" == dep.name
+    assert dep.name == "django-utils"
     assert dep.is_vcs()
-    assert "git" == dep.vcs
-    assert "ssh://git@corp-gitlab.com/corp-utils.git" == dep.source
-    assert "1.2" == dep.reference
+    dep = cast(VCSDependency, dep)
+    assert dep.vcs == "git"
+    assert dep.source == "ssh://git@corp-gitlab.com/corp-utils.git"
+    assert dep.reference == "1.2"
+    assert dep.directory == "package-dir"
 
 
-def test_dependency_from_pep_508_with_git_url_and_comment_and_extra():
+def test_dependency_from_pep_508_with_git_url_and_comment_and_extra() -> None:
     name = (
         "poetry @ git+https://github.com/python-poetry/poetry.git@b;ar;#egg=poetry"
         ' ; extra == "foo;"'
     )
 
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
-    assert "poetry" == dep.name
+    assert dep.name == "poetry"
     assert dep.is_vcs()
-    assert "git" == dep.vcs
-    assert "https://github.com/python-poetry/poetry.git" == dep.source
-    assert "b;ar;" == dep.reference
+    dep = cast(VCSDependency, dep)
+    assert dep.vcs == "git"
+    assert dep.source == "https://github.com/python-poetry/poetry.git"
+    assert dep.reference == "b;ar;"
     assert dep.in_extras == ["foo;"]
 
 
-def test_dependency_from_pep_508_with_url():
+def test_dependency_from_pep_508_with_url() -> None:
     name = "django-utils @ https://example.com/django-utils-1.0.0.tar.gz"
 
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
-    assert "django-utils" == dep.name
+    assert dep.name == "django-utils"
     assert dep.is_url()
-    assert "https://example.com/django-utils-1.0.0.tar.gz" == dep.url
+    dep = cast(URLDependency, dep)
+    assert dep.url == "https://example.com/django-utils-1.0.0.tar.gz"
 
 
-def test_dependency_from_pep_508_with_wheel_url():
+def test_dependency_from_pep_508_with_wheel_url() -> None:
     name = (
         "example_wheel @ https://example.com/example_wheel-14.0.2-py2.py3-none-any.whl"
     )
 
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
-    assert "example-wheel" == dep.name
+    assert dep.name == "example-wheel"
     assert str(dep.constraint) == "14.0.2"
 
 
-def test_dependency_from_pep_508_with_python_full_version():
+def test_dependency_from_pep_508_with_python_full_version() -> None:
     name = (
         "requests (==2.18.0); "
         '(python_version >= "2.7" and python_version < "2.8") '
         'or (python_full_version >= "3.4" and python_full_version < "3.5.4")'
     )
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "requests"
     assert str(dep.constraint) == "2.18.0"
     assert dep.extras == frozenset()
     assert dep.python_versions == ">=2.7 <2.8 || >=3.4 <3.5.4"
-    assert str(dep.marker) == (
-        'python_version >= "2.7" and python_version < "2.8" '
+    assert (
+        str(dep.marker)
+        == 'python_version >= "2.7" and python_version < "2.8" '
         'or python_full_version >= "3.4" and python_full_version < "3.5.4"'
     )
 
 
-def test_dependency_from_pep_508_with_python_full_version_pep440_compatible_release_astrix():
+def test_dependency_from_pep_508_with_python_full_version_pep440_compatible_release_astrix() -> (
+    None
+):
     name = 'pathlib2 ; python_version == "3.4.*" or python_version < "3"'
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "pathlib2"
     assert str(dep.constraint) == "*"
     assert dep.python_versions == "==3.4.* || <3"
 
 
-def test_dependency_from_pep_508_with_python_full_version_pep440_compatible_release_tilde():
+def test_dependency_from_pep_508_with_python_full_version_pep440_compatible_release_tilde() -> (
+    None
+):
     name = 'pathlib2 ; python_version ~= "3.4" or python_version < "3"'
-    dep = dependency_from_pep_508(name)
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "pathlib2"
     assert str(dep.constraint) == "*"
     assert dep.python_versions == "~=3.4 || <3"
 
 
-def test_dependency_from_pep_508_should_not_produce_empty_constraints_for_correct_markers():
-    name = 'pytest-mypy; python_implementation != "PyPy" and python_version <= "3.10" and python_version > "3"'
-    dep = dependency_from_pep_508(name)
+def test_dependency_from_pep_508_should_not_produce_empty_constraints_for_correct_markers() -> (
+    None
+):
+    name = (
+        'pytest-mypy; python_implementation != "PyPy" and python_version <= "3.10" and'
+        ' python_version > "3"'
+    )
+    dep = Dependency.create_from_pep_508(name)
 
     assert dep.name == "pytest-mypy"
     assert str(dep.constraint) == "*"
-    assert dep.python_versions == "<=3.10 >3"
+    assert dep.python_versions == "<3.11 >=3"
     assert dep.python_constraint.allows(Version.parse("3.6"))
-    assert dep.python_constraint.allows(Version.parse("3.10"))
-    assert not dep.python_constraint.allows(Version.parse("3"))
+    assert dep.python_constraint.allows(Version.parse("3.10.4"))
+    assert dep.python_constraint.allows(Version.parse("3"))
     assert dep.python_constraint.allows(Version.parse("3.0.1"))
     assert (
         str(dep.marker)
-        == 'platform_python_implementation != "PyPy" and python_version <= "3.10" and python_version > "3"'
+        == 'platform_python_implementation != "PyPy" and python_version <= "3.10" and'
+        ' python_version > "3"'
     )
diff --git a/vendor/poetry-core/tests/packages/test_package.py b/vendor/poetry-core/tests/packages/test_package.py
index d717aa96..ffaec831 100644
--- a/vendor/poetry-core/tests/packages/test_package.py
+++ b/vendor/poetry-core/tests/packages/test_package.py
@@ -1,13 +1,40 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
+from __future__ import annotations
+
+import random
+
+from pathlib import Path
+from typing import cast
 
 import pytest
 
-from poetry.core.packages import Package
-from poetry.core.utils._compat import Path
+from poetry.core.factory import Factory
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.dependency_group import DependencyGroup
+from poetry.core.packages.directory_dependency import DirectoryDependency
+from poetry.core.packages.file_dependency import FileDependency
+from poetry.core.packages.package import Package
+from poetry.core.packages.project_package import ProjectPackage
+from poetry.core.packages.url_dependency import URLDependency
+from poetry.core.packages.vcs_dependency import VCSDependency
+from poetry.core.semver.version import Version
+
+
+@pytest.fixture()
+def package_with_groups() -> Package:
+    package = Package("foo", "1.2.3")
+
+    optional_group = DependencyGroup("optional", optional=True)
+    optional_group.add_dependency(Factory.create_dependency("bam", "^3.0.0"))
 
+    package.add_dependency(Factory.create_dependency("bar", "^1.0.0"))
+    package.add_dependency(Factory.create_dependency("baz", "^1.1.0"))
+    package.add_dependency(Factory.create_dependency("bim", "^2.0.0", groups=["dev"]))
+    package.add_dependency_group(optional_group)
 
-def test_package_authors():
+    return package
+
+
+def test_package_authors() -> None:
     package = Package("foo", "0.1.0")
 
     package.authors.append("Sébastien Eustace ")
@@ -19,7 +46,7 @@ def test_package_authors():
     assert package.author_email is None
 
 
-def test_package_authors_invalid():
+def test_package_authors_invalid() -> None:
     package = Package("foo", "0.1.0")
 
     package.authors.insert(0, " None:
     package = Package("foo", "0.1.0")
 
     dependency = package.add_dependency(
         f.create_dependency(
             "poetry",
             {"git": "https://github.com/python-poetry/poetry.git"},
-            category=category,
+            groups=groups,
         )
     )
-    assert dependency.category == category
+    assert dependency.groups == frozenset(groups)
 
 
-def test_package_add_dependency_vcs_category_default_main(f):
+def test_package_add_dependency_vcs_groups_default_main(f: Factory) -> None:
     package = Package("foo", "0.1.0")
 
     dependency = package.add_dependency(
@@ -54,12 +81,14 @@ def test_package_add_dependency_vcs_category_default_main(f):
             "poetry", {"git": "https://github.com/python-poetry/poetry.git"}
         )
     )
-    assert dependency.category == "main"
+    assert dependency.groups == frozenset(["main"])
 
 
-@pytest.mark.parametrize("category", ["main", "dev"])
+@pytest.mark.parametrize("groups", [["main"], ["dev"]])
 @pytest.mark.parametrize("optional", [True, False])
-def test_package_url_category_optional(category, optional, f):
+def test_package_url_groups_optional(
+    groups: list[str], optional: bool, f: Factory
+) -> None:
     package = Package("foo", "0.1.0")
 
     dependency = package.add_dependency(
@@ -69,20 +98,20 @@ def test_package_url_category_optional(category, optional, f):
                 "url": "https://github.com/python-poetry/poetry/releases/download/1.0.5/poetry-1.0.5-linux.tar.gz",
                 "optional": optional,
             },
-            category=category,
+            groups=groups,
         )
     )
-    assert dependency.category == category
+    assert dependency.groups == frozenset(groups)
     assert dependency.is_optional() == optional
 
 
-def test_package_equality_simple():
+def test_package_equality_simple() -> None:
     assert Package("foo", "0.1.0") == Package("foo", "0.1.0")
     assert Package("foo", "0.1.0") != Package("foo", "0.1.1")
     assert Package("bar", "0.1.0") != Package("foo", "0.1.0")
 
 
-def test_package_equality_source_type():
+def test_package_equality_source_type() -> None:
     a1 = Package("a", "0.1.0", source_type="file")
     a2 = Package(a1.name, a1.version, source_type="directory")
     a3 = Package(a1.name, a1.version, source_type=a1.source_type)
@@ -96,7 +125,7 @@ def test_package_equality_source_type():
     assert a2 != a4
 
 
-def test_package_equality_source_url():
+def test_package_equality_source_url() -> None:
     a1 = Package("a", "0.1.0", source_type="file", source_url="/some/path")
     a2 = Package(
         a1.name, a1.version, source_type=a1.source_type, source_url="/some/other/path"
@@ -114,7 +143,7 @@ def test_package_equality_source_url():
     assert a2 != a4
 
 
-def test_package_equality_source_reference():
+def test_package_equality_source_reference() -> None:
     a1 = Package(
         "a",
         "0.1.0",
@@ -146,7 +175,9 @@ def test_package_equality_source_reference():
     assert a2 != a4
 
 
-def test_package_resolved_reference_is_relevant_for_equality_only_if_present_for_both_packages():
+def test_package_resolved_reference_is_relevant_for_equality_only_if_present_for_both_packages() -> (
+    None
+):
     a1 = Package(
         "a",
         "0.1.0",
@@ -187,41 +218,72 @@ def test_package_resolved_reference_is_relevant_for_equality_only_if_present_for
     assert a2 == a4
 
 
-def test_complete_name():
-    assert "foo" == Package("foo", "1.2.3").complete_name
+def test_package_equality_source_subdirectory() -> None:
+    a1 = Package(
+        "a",
+        "0.1.0",
+        source_type="git",
+        source_url="https://foo.bar",
+        source_subdirectory="baz",
+    )
+    a2 = Package(
+        a1.name,
+        a1.version,
+        source_type="git",
+        source_url="https://foo.bar",
+        source_subdirectory="qux",
+    )
+    a3 = Package(
+        a1.name,
+        a1.version,
+        source_type="git",
+        source_url="https://foo.bar",
+        source_subdirectory="baz",
+    )
+    a4 = Package(a1.name, a1.version, source_type="git")
+
+    assert a1 == a3
+    assert a1 != a2
+    assert a2 != a3
+    assert a1 != a4
+    assert a2 != a4
+
+
+def test_complete_name() -> None:
+    assert Package("foo", "1.2.3").complete_name == "foo"
     assert (
-        "foo[bar,baz]" == Package("foo", "1.2.3", features=["baz", "bar"]).complete_name
+        Package("foo", "1.2.3", features=["baz", "bar"]).complete_name == "foo[bar,baz]"
     )
 
 
-def test_to_dependency():
+def test_to_dependency() -> None:
     package = Package("foo", "1.2.3")
     dep = package.to_dependency()
 
-    assert "foo" == dep.name
-    assert package.version == dep.constraint
+    assert dep.name == "foo"
+    assert dep.constraint == package.version
 
 
-def test_to_dependency_with_python_constraint():
+def test_to_dependency_with_python_constraint() -> None:
     package = Package("foo", "1.2.3")
     package.python_versions = ">=3.6"
     dep = package.to_dependency()
 
-    assert "foo" == dep.name
-    assert package.version == dep.constraint
-    assert ">=3.6" == dep.python_versions
+    assert dep.name == "foo"
+    assert dep.constraint == package.version
+    assert dep.python_versions == ">=3.6"
 
 
-def test_to_dependency_with_features():
+def test_to_dependency_with_features() -> None:
     package = Package("foo", "1.2.3", features=["baz", "bar"])
     dep = package.to_dependency()
 
-    assert "foo" == dep.name
-    assert package.version == dep.constraint
-    assert frozenset({"bar", "baz"}) == dep.features
+    assert dep.name == "foo"
+    assert dep.constraint == package.version
+    assert dep.features == frozenset({"bar", "baz"})
 
 
-def test_to_dependency_for_directory():
+def test_to_dependency_for_directory() -> None:
     path = Path(__file__).parent.parent.joinpath("fixtures/simple_project")
     package = Package(
         "foo",
@@ -232,16 +294,17 @@ def test_to_dependency_for_directory():
     )
     dep = package.to_dependency()
 
-    assert "foo" == dep.name
-    assert package.version == dep.constraint
-    assert frozenset({"bar", "baz"}) == dep.features
+    assert dep.name == "foo"
+    assert dep.constraint == package.version
+    assert dep.features == frozenset({"bar", "baz"})
     assert dep.is_directory()
-    assert path == dep.path
-    assert "directory" == dep.source_type
-    assert path.as_posix() == dep.source_url
+    dep = cast(DirectoryDependency, dep)
+    assert dep.path == path
+    assert dep.source_type == "directory"
+    assert dep.source_url == path.as_posix()
 
 
-def test_to_dependency_for_file():
+def test_to_dependency_for_file() -> None:
     path = Path(__file__).parent.parent.joinpath(
         "fixtures/distributions/demo-0.1.0.tar.gz"
     )
@@ -254,16 +317,17 @@ def test_to_dependency_for_file():
     )
     dep = package.to_dependency()
 
-    assert "foo" == dep.name
-    assert package.version == dep.constraint
-    assert frozenset({"bar", "baz"}) == dep.features
+    assert dep.name == "foo"
+    assert dep.constraint == package.version
+    assert dep.features == frozenset({"bar", "baz"})
     assert dep.is_file()
-    assert path == dep.path
-    assert "file" == dep.source_type
-    assert path.as_posix() == dep.source_url
+    dep = cast(FileDependency, dep)
+    assert dep.path == path
+    assert dep.source_type == "file"
+    assert dep.source_url == path.as_posix()
 
 
-def test_to_dependency_for_url():
+def test_to_dependency_for_url() -> None:
     package = Package(
         "foo",
         "1.2.3",
@@ -273,16 +337,44 @@ def test_to_dependency_for_url():
     )
     dep = package.to_dependency()
 
-    assert "foo" == dep.name
-    assert package.version == dep.constraint
-    assert frozenset({"bar", "baz"}) == dep.features
+    assert dep.name == "foo"
+    assert dep.constraint == package.version
+    assert dep.features == frozenset({"bar", "baz"})
     assert dep.is_url()
-    assert "https://example.com/path.tar.gz" == dep.url
-    assert "url" == dep.source_type
-    assert "https://example.com/path.tar.gz" == dep.source_url
+    dep = cast(URLDependency, dep)
+    assert dep.url == "https://example.com/path.tar.gz"
+    assert dep.source_type == "url"
+    assert dep.source_url == "https://example.com/path.tar.gz"
 
 
-def test_package_clone(f):
+def test_to_dependency_for_vcs() -> None:
+    package = Package(
+        "foo",
+        "1.2.3",
+        source_type="git",
+        source_url="https://github.com/foo/foo.git",
+        source_reference="master",
+        source_resolved_reference="123456",
+        source_subdirectory="baz",
+        features=["baz", "bar"],
+    )
+    dep = package.to_dependency()
+
+    assert dep.name == "foo"
+    assert dep.constraint == package.version
+    assert dep.features == frozenset({"bar", "baz"})
+    assert dep.is_vcs()
+    dep = cast(VCSDependency, dep)
+    assert dep.source_type == "git"
+    assert dep.source == "https://github.com/foo/foo.git"
+    assert dep.reference == "master"
+    assert dep.source_reference == "master"
+    assert dep.source_resolved_reference == "123456"
+    assert dep.directory == "baz"
+    assert dep.source_subdirectory == "baz"
+
+
+def test_package_clone(f: Factory) -> None:
     # TODO(nic): this test is not future-proof, in that any attributes added
     #  to the Package object and not filled out in this test setup might
     #  cause comparisons to match that otherwise should not.  A factory method
@@ -297,8 +389,11 @@ def test_package_clone(f):
         source_reference="fe4d2adabf3feb5d32b70ab5c105285fa713b10c",
         source_resolved_reference="fe4d2adabf3feb5d32b70ab5c105285fa713b10c",
         features=["abc", "def"],
+        develop=random.choice((True, False)),
     )
-    p.files = (["file1", "file2", "file3"],)
+    p.add_dependency(Factory.create_dependency("foo", "^1.2.3"))
+    p.add_dependency(Factory.create_dependency("foo", "^1.2.3", groups=["dev"]))
+    p.files = (["file1", "file2", "file3"],)  # type: ignore[assignment]
     p.homepage = "https://some.other.url"
     p.repository_url = "http://bug.farm"
     p.documentation_url = "http://lorem.ipsum/dolor/sit.amet"
@@ -306,3 +401,176 @@ def test_package_clone(f):
 
     assert p == p2
     assert p.__dict__ == p2.__dict__
+    assert len(p2.requires) == 1
+    assert len(p2.all_requires) == 2
+
+
+def test_dependency_groups(package_with_groups: Package) -> None:
+    assert len(package_with_groups.requires) == 2
+    assert len(package_with_groups.all_requires) == 4
+
+
+def test_without_dependency_groups(package_with_groups: Package) -> None:
+    package = package_with_groups.without_dependency_groups(["dev"])
+
+    assert len(package.requires) == 2
+    assert len(package.all_requires) == 3
+
+    package = package_with_groups.without_dependency_groups(["dev", "optional"])
+
+    assert len(package.requires) == 2
+    assert len(package.all_requires) == 2
+
+
+def test_with_dependency_groups(package_with_groups: Package) -> None:
+    package = package_with_groups.with_dependency_groups([])
+
+    assert len(package.requires) == 2
+    assert len(package.all_requires) == 3
+
+    package = package_with_groups.with_dependency_groups(["optional"])
+
+    assert len(package.requires) == 2
+    assert len(package.all_requires) == 4
+
+
+def test_without_optional_dependency_groups(package_with_groups: Package) -> None:
+    package = package_with_groups.without_optional_dependency_groups()
+
+    assert len(package.requires) == 2
+    assert len(package.all_requires) == 3
+
+
+def test_only_with_dependency_groups(package_with_groups: Package) -> None:
+    package = package_with_groups.with_dependency_groups(["dev"], only=True)
+
+    assert len(package.requires) == 0
+    assert len(package.all_requires) == 1
+
+    package = package_with_groups.with_dependency_groups(["dev", "optional"], only=True)
+
+    assert len(package.requires) == 0
+    assert len(package.all_requires) == 2
+
+    package = package_with_groups.with_dependency_groups(["main"], only=True)
+
+    assert len(package.requires) == 2
+    assert len(package.all_requires) == 2
+
+
+def test_get_readme_property_with_multiple_readme_files() -> None:
+    package = Package("foo", "0.1.0")
+
+    package.readmes = (Path("README.md"), Path("HISTORY.md"))
+    with pytest.deprecated_call():
+        assert package.readme == Path("README.md")
+
+
+def test_set_readme_property() -> None:
+    package = Package("foo", "0.1.0")
+
+    with pytest.deprecated_call():
+        package.readme = Path("README.md")
+
+    assert package.readmes == (Path("README.md"),)
+    with pytest.deprecated_call():
+        assert package.readme == Path("README.md")
+
+
+@pytest.mark.parametrize(
+    ("package", "dependency", "ignore_source_type", "result"),
+    [
+        (Package("foo", "0.1.0"), Dependency("foo", ">=0.1.0"), False, True),
+        (Package("foo", "0.1.0"), Dependency("foo", "<0.1.0"), False, False),
+        (
+            Package("foo", "0.1.0"),
+            Dependency("foo", ">=0.1.0", source_type="git"),
+            False,
+            False,
+        ),
+        (
+            Package("foo", "0.1.0"),
+            Dependency("foo", ">=0.1.0", source_type="git"),
+            True,
+            True,
+        ),
+        (
+            Package("foo", "0.1.0"),
+            Dependency("foo", "<0.1.0", source_type="git"),
+            True,
+            False,
+        ),
+    ],
+)
+def test_package_satisfies(
+    package: Package, dependency: Dependency, ignore_source_type: bool, result: bool
+) -> None:
+    assert package.satisfies(dependency, ignore_source_type) == result
+
+
+def test_package_pep592_default_not_yanked() -> None:
+    package = Package("foo", "1.0")
+
+    assert not package.yanked
+    assert package.yanked_reason == ""
+
+
+@pytest.mark.parametrize(
+    ("yanked", "expected_yanked", "expected_yanked_reason"),
+    [
+        (True, True, ""),
+        (False, False, ""),
+        ("the reason", True, "the reason"),
+        ("", True, ""),
+    ],
+)
+def test_package_pep592_yanked(
+    yanked: str | bool, expected_yanked: bool, expected_yanked_reason: str
+) -> None:
+    package = Package("foo", "1.0", yanked=yanked)
+
+    assert package.yanked == expected_yanked
+    assert package.yanked_reason == expected_yanked_reason
+
+
+def test_python_versions_are_normalized() -> None:
+    package = Package("foo", "1.2.3")
+    package.python_versions = ">3.6,<=3.10"
+
+    assert (
+        str(package.python_marker)
+        == 'python_version > "3.6" and python_version <= "3.10"'
+    )
+    assert str(package.python_constraint) == ">=3.7,<3.11"
+
+
+def test_cannot_update_package_version() -> None:
+    package = Package("foo", "1.2.3")
+    with pytest.raises(AttributeError):
+        package.version = "1.2.4"  # type: ignore[misc,assignment]
+
+
+def test_project_package_version_update_string() -> None:
+    package = ProjectPackage("foo", "1.2.3")
+    package.version = "1.2.4"  # type: ignore[assignment]
+    assert package.version.text == "1.2.4"
+
+
+def test_project_package_version_update_version() -> None:
+    package = ProjectPackage("foo", "1.2.3")
+    package.version = Version.parse("1.2.4")
+    assert package.version.text == "1.2.4"
+
+
+def test_project_package_hash_not_changed_when_version_is_changed() -> None:
+    package = ProjectPackage("foo", "1.2.3")
+    package_hash = hash(package)
+    package_clone = package.clone()
+    assert package == package_clone
+    assert hash(package) == hash(package_clone)
+
+    package.version = Version.parse("1.2.4")
+
+    assert hash(package) == package_hash, "Hash must not change!"
+    assert hash(package_clone) == package_hash
+    assert package != package_clone
diff --git a/vendor/poetry-core/tests/packages/test_specification.py b/vendor/poetry-core/tests/packages/test_specification.py
new file mode 100644
index 00000000..79ec3052
--- /dev/null
+++ b/vendor/poetry-core/tests/packages/test_specification.py
@@ -0,0 +1,117 @@
+from __future__ import annotations
+
+import pytest
+
+from poetry.core.packages.specification import PackageSpecification
+
+
+@pytest.mark.parametrize(
+    "spec1, spec2, expected",
+    [
+        (PackageSpecification("a"), PackageSpecification("a"), True),
+        (PackageSpecification("a", "type1"), PackageSpecification("a", "type1"), True),
+        (PackageSpecification("a", "type1"), PackageSpecification("a", "type2"), False),
+        (PackageSpecification("a"), PackageSpecification("a", "type1"), False),
+        (PackageSpecification("a", "type1"), PackageSpecification("a"), False),
+    ],
+)
+def test_is_same_package_source_type(
+    spec1: PackageSpecification,
+    spec2: PackageSpecification,
+    expected: bool,
+) -> None:
+    assert spec1.is_same_package_as(spec2) == expected
+
+
+@pytest.mark.parametrize(
+    ("source_type", "result"),
+    [
+        ("directory", True),
+        ("file", True),
+        ("url", True),
+        ("git", True),
+        ("legacy", False),
+        (None, False),
+    ],
+)
+def test_is_direct_origin(source_type: str | None, result: bool) -> None:
+    assert PackageSpecification("package", source_type).is_direct_origin() == result
+
+
+@pytest.mark.parametrize(
+    "spec1, spec2, expected",
+    [
+        (PackageSpecification("a"), PackageSpecification("a"), True),
+        (PackageSpecification("a"), PackageSpecification("b"), False),
+        (PackageSpecification("a", features=["x"]), PackageSpecification("a"), True),
+        (
+            PackageSpecification("a", features=["x"]),
+            PackageSpecification("a", features=["x"]),
+            True,
+        ),
+        (
+            PackageSpecification("a", features=["x"]),
+            PackageSpecification("b", features=["x"]),
+            False,
+        ),
+        (
+            PackageSpecification("a", features=["x"]),
+            PackageSpecification("a", features=["y"]),
+            False,
+        ),
+        (
+            PackageSpecification("a", features=["x"]),
+            PackageSpecification("a", features=["x", "y"]),
+            False,
+        ),
+        (
+            PackageSpecification("a", features=["x", "y"]),
+            PackageSpecification("a", features=["x"]),
+            True,
+        ),
+    ],
+)
+def test_specification_provides(
+    spec1: PackageSpecification,
+    spec2: PackageSpecification,
+    expected: bool,
+) -> None:
+    assert spec1.provides(spec2) == expected
+
+
+@pytest.mark.parametrize(
+    "spec1, spec2",
+    [
+        (
+            # nothing except for name and features matters if no source
+            PackageSpecification("a", None, "url1", "ref1", "resref1", "sub1"),
+            PackageSpecification("a", None, "url2", "ref2", "resref2", "sub2"),
+        ),
+        (
+            # ref does not matter if resolved ref is equal
+            PackageSpecification("a", "type", "url", "ref1", "resref1"),
+            PackageSpecification("a", "type", "url", "ref2", "resref1"),
+        ),
+        (
+            # resolved ref does not matter if no ref
+            PackageSpecification("a", "type", "url", None, "resref1"),
+            PackageSpecification("a", "type", "url", None, "resref2"),
+        ),
+        (
+            # resolved ref unset when ref starts with other
+            PackageSpecification("a", "type", "url", "ref/a", "resref1"),
+            PackageSpecification("a", "type", "url", "ref", None),
+        ),
+        (
+            # resolved ref unset when ref starts with other
+            PackageSpecification("a", "type", "url", "ref/a", None),
+            PackageSpecification("a", "type", "url", "ref", "resref2"),
+        ),
+    ],
+)
+def test_equal_specifications_have_same_hash(
+    spec1: PackageSpecification, spec2: PackageSpecification
+) -> None:
+    assert spec1 == spec2
+    assert spec2 == spec1
+    assert hash(spec1) == hash(spec2)
diff --git a/vendor/poetry-core/tests/packages/test_url_dependency.py b/vendor/poetry-core/tests/packages/test_url_dependency.py
index 93423d94..08697f85 100644
--- a/vendor/poetry-core/tests/packages/test_url_dependency.py
+++ b/vendor/poetry-core/tests/packages/test_url_dependency.py
@@ -1,23 +1,80 @@
-from poetry.core.packages import URLDependency
+from __future__ import annotations
+
+import pytest
+
+from poetry.core.packages.url_dependency import URLDependency
 from poetry.core.version.markers import SingleMarker
 
 
-def test_to_pep_508():
+def test_to_pep_508() -> None:
     dependency = URLDependency(
         "pytorch",
         "https://download.pytorch.org/whl/cpu/torch-1.5.1%2Bcpu-cp38-cp38-linux_x86_64.whl",
     )
 
-    expected = "pytorch @ https://download.pytorch.org/whl/cpu/torch-1.5.1%2Bcpu-cp38-cp38-linux_x86_64.whl"
+    expected = (
+        "pytorch @"
+        " https://download.pytorch.org/whl/cpu/torch-1.5.1%2Bcpu-cp38-cp38-linux_x86_64.whl"
+    )
+    assert dependency.to_pep_508() == expected
+
+
+def test_to_pep_508_with_extras() -> None:
+    dependency = URLDependency(
+        "pytorch",
+        "https://download.pytorch.org/whl/cpu/torch-1.5.1%2Bcpu-cp38-cp38-linux_x86_64.whl",
+        extras=["foo", "bar"],
+    )
+
+    expected = (
+        "pytorch[bar,foo] @"
+        " https://download.pytorch.org/whl/cpu/torch-1.5.1%2Bcpu-cp38-cp38-linux_x86_64.whl"
+    )
     assert expected == dependency.to_pep_508()
 
 
-def test_to_pep_508_with_marker():
+def test_to_pep_508_with_marker() -> None:
     dependency = URLDependency(
         "pytorch",
         "https://download.pytorch.org/whl/cpu/torch-1.5.1%2Bcpu-cp38-cp38-linux_x86_64.whl",
     )
     dependency.marker = SingleMarker("sys.platform", "linux")
 
-    expected = 'pytorch @ https://download.pytorch.org/whl/cpu/torch-1.5.1%2Bcpu-cp38-cp38-linux_x86_64.whl ; sys_platform == "linux"'
-    assert expected == dependency.to_pep_508()
+    expected = (
+        "pytorch @"
+        " https://download.pytorch.org/whl/cpu/torch-1.5.1%2Bcpu-cp38-cp38-linux_x86_64.whl"
+        ' ; sys_platform == "linux"'
+    )
+    assert dependency.to_pep_508() == expected
+
+
+@pytest.mark.parametrize(
+    "name,url,extras,constraint,expected",
+    [
+        (
+            "example",
+            "https://example.org/example.whl",
+            None,
+            None,
+            "example (*) @ https://example.org/example.whl",
+        ),
+        (
+            "example",
+            "https://example.org/example.whl",
+            ["foo"],
+            "1.2",
+            "example[foo] (1.2) @ https://example.org/example.whl",
+        ),
+    ],
+)
+def test_directory_dependency_string_representation(
+    name: str,
+    url: str,
+    extras: list[str] | None,
+    constraint: str | None,
+    expected: str,
+) -> None:
+    dependency = URLDependency(name=name, url=url, extras=extras)
+    if constraint:
+        dependency.constraint = constraint  # type: ignore[assignment]
+    assert str(dependency) == expected
diff --git a/vendor/poetry-core/tests/packages/test_vcs_dependency.py b/vendor/poetry-core/tests/packages/test_vcs_dependency.py
index 188350c6..97aa666c 100644
--- a/vendor/poetry-core/tests/packages/test_vcs_dependency.py
+++ b/vendor/poetry-core/tests/packages/test_vcs_dependency.py
@@ -1,76 +1,147 @@
+from __future__ import annotations
+
+from typing import Any
+
 import pytest
 
 from poetry.core.packages.vcs_dependency import VCSDependency
 
 
-def test_to_pep_508():
+@pytest.mark.parametrize(
+    "kwargs, expected",
+    [
+        ({}, "poetry @ git+https://github.com/python-poetry/poetry.git"),
+        (
+            {"extras": ["foo"]},
+            "poetry[foo] @ git+https://github.com/python-poetry/poetry.git",
+        ),
+        (
+            {"extras": ["foo", "bar"]},
+            "poetry[bar,foo] @ git+https://github.com/python-poetry/poetry.git",
+        ),
+        (
+            {"branch": "main"},
+            "poetry @ git+https://github.com/python-poetry/poetry.git@main",
+        ),
+        (
+            {"tag": "1.0"},
+            "poetry @ git+https://github.com/python-poetry/poetry.git@1.0",
+        ),
+        (
+            {"rev": "12345"},
+            "poetry @ git+https://github.com/python-poetry/poetry.git@12345",
+        ),
+        (
+            {"directory": "sub"},
+            "poetry @ git+https://github.com/python-poetry/poetry.git#subdirectory=sub",
+        ),
+        (
+            {"branch": "main", "directory": "sub"},
+            "poetry @ git+https://github.com/python-poetry/poetry.git"
+            "@main#subdirectory=sub",
+        ),
+    ],
+)
+def test_to_pep_508(kwargs: dict[str, Any], expected: str) -> None:
     dependency = VCSDependency(
-        "poetry", "git", "https://github.com/python-poetry/poetry.git"
+        "poetry", "git", "https://github.com/python-poetry/poetry.git", **kwargs
     )
 
-    expected = "poetry @ git+https://github.com/python-poetry/poetry.git@master"
-
-    assert expected == dependency.to_pep_508()
+    assert dependency.to_pep_508() == expected
 
 
-def test_to_pep_508_ssh():
+def test_to_pep_508_ssh() -> None:
     dependency = VCSDependency("poetry", "git", "git@github.com:sdispater/poetry.git")
 
-    expected = "poetry @ git+ssh://git@github.com/sdispater/poetry.git@master"
+    expected = "poetry @ git+ssh://git@github.com/sdispater/poetry.git"
 
-    assert expected == dependency.to_pep_508()
+    assert dependency.to_pep_508() == expected
 
 
-def test_to_pep_508_with_extras():
-    dependency = VCSDependency(
-        "poetry", "git", "https://github.com/python-poetry/poetry.git", extras=["foo"]
-    )
-
-    expected = "poetry[foo] @ git+https://github.com/python-poetry/poetry.git@master"
-
-    assert expected == dependency.to_pep_508()
-
-
-def test_to_pep_508_in_extras():
+def test_to_pep_508_in_extras() -> None:
     dependency = VCSDependency(
         "poetry", "git", "https://github.com/python-poetry/poetry.git"
     )
     dependency.in_extras.append("foo")
 
-    expected = 'poetry @ git+https://github.com/python-poetry/poetry.git@master ; extra == "foo"'
-    assert expected == dependency.to_pep_508()
+    expected = (
+        'poetry @ git+https://github.com/python-poetry/poetry.git ; extra == "foo"'
+    )
+    assert dependency.to_pep_508() == expected
 
     dependency = VCSDependency(
         "poetry", "git", "https://github.com/python-poetry/poetry.git", extras=["bar"]
     )
     dependency.in_extras.append("foo")
 
-    expected = 'poetry[bar] @ git+https://github.com/python-poetry/poetry.git@master ; extra == "foo"'
+    expected = (
+        'poetry[bar] @ git+https://github.com/python-poetry/poetry.git ; extra == "foo"'
+    )
 
-    assert expected == dependency.to_pep_508()
+    assert dependency.to_pep_508() == expected
 
     dependency = VCSDependency(
         "poetry", "git", "https://github.com/python-poetry/poetry.git", "b;ar;"
     )
     dependency.in_extras.append("foo;")
 
-    expected = 'poetry @ git+https://github.com/python-poetry/poetry.git@b;ar; ; extra == "foo;"'
+    expected = (
+        "poetry @ git+https://github.com/python-poetry/poetry.git@b;ar; ; extra =="
+        ' "foo;"'
+    )
 
-    assert expected == dependency.to_pep_508()
+    assert dependency.to_pep_508() == expected
+
+
+@pytest.mark.parametrize(
+    "name,source,branch,extras,constraint,expected",
+    [
+        (
+            "example",
+            "https://example.org/example.git",
+            "main",
+            None,
+            None,
+            "example (*) @ git+https://example.org/example.git@main",
+        ),
+        (
+            "example",
+            "https://example.org/example.git",
+            "main",
+            ["foo"],
+            "1.2",
+            "example[foo] (1.2) @ git+https://example.org/example.git@main",
+        ),
+    ],
+)
+def test_directory_dependency_string_representation(
+    name: str,
+    source: str,
+    branch: str,
+    extras: list[str] | None,
+    constraint: str | None,
+    expected: str,
+) -> None:
+    dependency = VCSDependency(
+        name=name, vcs="git", source=source, branch=branch, extras=extras
+    )
+    if constraint:
+        dependency.constraint = constraint  # type: ignore[assignment]
+    assert str(dependency) == expected
 
 
-@pytest.mark.parametrize("category", ["main", "dev"])
-def test_category(category):
+@pytest.mark.parametrize("groups", [["main"], ["dev"]])
+def test_category(groups: list[str]) -> None:
     dependency = VCSDependency(
         "poetry",
         "git",
         "https://github.com/python-poetry/poetry.git",
-        category=category,
+        groups=groups,
     )
-    assert category == dependency.category
+    assert dependency.groups == frozenset(groups)
 
 
-def test_vcs_dependency_can_have_resolved_reference_specified():
+def test_vcs_dependency_can_have_resolved_reference_specified() -> None:
     dependency = VCSDependency(
         "poetry",
         "git",
@@ -82,3 +153,22 @@ def test_vcs_dependency_can_have_resolved_reference_specified():
     assert dependency.branch == "develop"
     assert dependency.source_reference == "develop"
     assert dependency.source_resolved_reference == "123456"
+
+
+def test_vcs_dependencies_are_equal_if_resolved_references_match() -> None:
+    dependency1 = VCSDependency(
+        "poetry",
+        "git",
+        "https://github.com/python-poetry/poetry.git",
+        branch="develop",
+        resolved_rev="123456",
+    )
+    dependency2 = VCSDependency(
+        "poetry",
+        "git",
+        "https://github.com/python-poetry/poetry.git",
+        rev="123",
+        resolved_rev="123456",
+    )
+
+    assert dependency1 == dependency2
diff --git a/vendor/poetry-core/tests/packages/utils/test_utils.py b/vendor/poetry-core/tests/packages/utils/test_utils.py
index 7250e5f5..f64550e0 100644
--- a/vendor/poetry-core/tests/packages/utils/test_utils.py
+++ b/vendor/poetry-core/tests/packages/utils/test_utils.py
@@ -1,43 +1,163 @@
+from __future__ import annotations
+
+from pathlib import Path
+
 import pytest
 
 from poetry.core.packages.utils.utils import convert_markers
 from poetry.core.packages.utils.utils import get_python_constraint_from_marker
-from poetry.core.semver import parse_constraint
+from poetry.core.packages.utils.utils import is_python_project
+from poetry.core.semver.helpers import parse_constraint
 from poetry.core.version.markers import parse_marker
 
 
-def test_convert_markers():
-    marker = parse_marker(
-        'sys_platform == "win32" and python_version < "3.6" '
-        'or sys_platform == "win32" and python_version < "3.6" and python_version >= "3.3" '
-        'or sys_platform == "win32" and python_version < "3.3"'
-    )
-
-    converted = convert_markers(marker)
-
-    assert converted["python_version"] == [
-        [("<", "3.6")],
-        [("<", "3.6"), (">=", "3.3")],
-        [("<", "3.3")],
-    ]
-
-    marker = parse_marker('python_version == "2.7" or python_version == "2.6"')
-    converted = convert_markers(marker)
-
-    assert converted["python_version"] == [[("==", "2.7")], [("==", "2.6")]]
+@pytest.mark.parametrize(
+    "marker, expected",
+    [
+        (
+            'sys_platform == "win32" and python_version < "3.6" or sys_platform =='
+            ' "linux" and python_version < "3.6" and python_version >= "3.3" or'
+            ' sys_platform == "darwin" and python_version < "3.3"',
+            {
+                "python_version": [
+                    [("<", "3.6")],
+                    [("<", "3.6"), (">=", "3.3")],
+                    [("<", "3.3")],
+                ],
+                "sys_platform": [
+                    [("==", "win32")],
+                    [("==", "linux")],
+                    [("==", "darwin")],
+                ],
+            },
+        ),
+        (
+            'sys_platform == "win32" and python_version < "3.6" or sys_platform =='
+            ' "win32" and python_version < "3.6" and python_version >= "3.3" or'
+            ' sys_platform == "win32" and python_version < "3.3"',
+            {"python_version": [[("<", "3.6")]], "sys_platform": [[("==", "win32")]]},
+        ),
+        (
+            'python_version == "2.7" or python_version == "2.6"',
+            {"python_version": [[("==", "2.7")], [("==", "2.6")]]},
+        ),
+        (
+            '(python_version < "2.7" or python_full_version >= "3.0.0") and'
+            ' python_full_version < "3.6.0"',
+            {"python_version": [[("<", "2.7")], [(">=", "3.0.0"), ("<", "3.6.0")]]},
+        ),
+        (
+            '(python_version < "2.7" or python_full_version >= "3.0.0") and'
+            ' extra == "foo"',
+            {
+                "extra": [[("==", "foo")]],
+                "python_version": [[("<", "2.7")], [(">=", "3.0.0")]],
+            },
+        ),
+        (
+            'python_version >= "3.9" or sys_platform == "linux"',
+            {
+                "python_version": [[(">=", "3.9")], []],
+                "sys_platform": [[], [("==", "linux")]],
+            },
+        ),
+        (
+            'python_version >= "3.9" and sys_platform == "linux"',
+            {
+                "python_version": [[(">=", "3.9")]],
+                "sys_platform": [[("==", "linux")]],
+            },
+        ),
+    ],
+)
+def test_convert_markers(
+    marker: str, expected: dict[str, list[list[tuple[str, str]]]]
+) -> None:
+    parsed_marker = parse_marker(marker)
+    converted = convert_markers(parsed_marker)
+    assert converted == expected
 
 
 @pytest.mark.parametrize(
     ["marker", "constraint"],
     [
+        # ==
+        ('python_version == "3.6"', "~3.6"),
+        ('python_version == "3.6.*"', "==3.6.*"),
+        ('python_version == "3.6.* "', "==3.6.*"),
+        # !=
+        ('python_version != "3.6"', "!=3.6.*"),
+        ('python_version != "3.6.*"', "!=3.6.*"),
+        ('python_version != "3.6.* "', "!=3.6.*"),
+        # <, <=, >, >= precision 1
+        ('python_version < "3"', "<3"),
+        ('python_version <= "3"', "<3"),
+        ('python_version > "3"', ">=3"),
+        ('python_version >= "3"', ">=3"),
+        # <, <=, >, >= precision 2
+        ('python_version < "3.6"', "<3.6"),
+        ('python_version <= "3.6"', "<3.7"),
+        ('python_version > "3.6"', ">=3.7"),
+        ('python_version >= "3.6"', ">=3.6"),
+        # in, not in
+        ('python_version in "2.7, 3.6"', ">=2.7.0,<2.8.0 || >=3.6.0,<3.7.0"),
+        ('python_version in "2.7, 3.6.2"', ">=2.7.0,<2.8.0 || 3.6.2"),
+        ('python_version not in "2.7, 3.6"', "<2.7.0 || >=2.8.0,<3.6.0 || >=3.7.0"),
+        ('python_version not in "2.7, 3.6.2"', "<2.7.0 || >=2.8.0,<3.6.2 || >3.6.2"),
+        # and
         ('python_version >= "3.6" and python_full_version < "4.0"', ">=3.6, <4.0"),
         (
             'python_full_version >= "3.6.1" and python_full_version < "4.0.0"',
             ">=3.6.1, <4.0.0",
         ),
+        # or
+        ('python_version < "3.6" or python_version >= "3.9"', "<3.6 || >=3.9"),
+        # and or
+        (
+            'python_version >= "3.7" and python_version < "3.8" or python_version >='
+            ' "3.9" and python_version < "3.10"',
+            ">=3.7,<3.8 || >=3.9,<3.10",
+        ),
+        (
+            '(python_version < "2.7" or python_full_version >= "3.0.0") and'
+            ' python_full_version < "3.6.0"',
+            "<2.7 || >=3.0,<3.6",
+        ),
+        # no python_version
+        ('sys_platform == "linux"', "*"),
+        # no relevant python_version
+        ('python_version >= "3.9" or sys_platform == "linux"', "*"),
+        # relevant python_version
+        ('python_version >= "3.9" and sys_platform == "linux"', ">=3.9"),
+        # exclude specific version
+        (
+            'python_version >= "3.5" and python_full_version != "3.7.6"',
+            ">=3.5,<3.7.6 || >3.7.6",
+        ),
+        # Full exact version
+        (
+            'python_full_version == "3.6.1"',
+            "3.6.1",
+        ),
+    ],
+)
+def test_get_python_constraint_from_marker(marker: str, constraint: str) -> None:
+    marker_parsed = parse_marker(marker)
+    constraint_parsed = parse_constraint(constraint)
+    assert get_python_constraint_from_marker(marker_parsed) == constraint_parsed
+
+
+@pytest.mark.parametrize(
+    ("fixture", "result"),
+    [
+        ("simple_project", True),
+        ("project_with_setup_cfg_only", True),
+        ("project_with_setup", True),
+        ("project_with_pep517_non_poetry", True),
+        ("project_without_pep517", False),
+        ("does_not_exist", False),
     ],
 )
-def test_get_python_constraint_from_marker(marker, constraint):
-    marker = parse_marker(marker)
-    constraint = parse_constraint(constraint)
-    assert constraint == get_python_constraint_from_marker(marker)
+def test_package_utils_is_python_project(fixture: str, result: bool) -> None:
+    path = Path(__file__).parent.parent.parent / "fixtures" / fixture
+    assert is_python_project(path) == result
diff --git a/vendor/poetry-core/tests/packages/utils/test_utils_link.py b/vendor/poetry-core/tests/packages/utils/test_utils_link.py
index fb39f93f..b4958754 100644
--- a/vendor/poetry-core/tests/packages/utils/test_utils_link.py
+++ b/vendor/poetry-core/tests/packages/utils/test_utils_link.py
@@ -1,21 +1,145 @@
+from __future__ import annotations
+
 import uuid
 
 from hashlib import sha256
 
-from poetry.core.packages import Link
+import pytest
+
+from poetry.core.packages.utils.link import Link
+
+
+def make_checksum() -> str:
+    return sha256(str(uuid.uuid4()).encode()).hexdigest()
+
+
+@pytest.fixture()
+def file_checksum() -> str:
+    return make_checksum()
+
+
+@pytest.fixture()
+def metadata_checksum() -> str:
+    return make_checksum()
 
 
-def make_url(ext):
-    checksum = sha256(str(uuid.uuid4()).encode())
+def make_url(
+    ext: str, file_checksum: str | None = None, metadata_checksum: str | None = None
+) -> Link:
+    file_checksum = file_checksum or make_checksum()
     return Link(
         "https://files.pythonhosted.org/packages/16/52/dead/"
-        "demo-1.0.0.{}#sha256={}".format(ext, checksum)
+        f"demo-1.0.0.{ext}#sha256={file_checksum}",
+        metadata=f"sha256={metadata_checksum}" if metadata_checksum else None,
     )
 
 
-def test_package_link_is_checks():
-    assert make_url("egg").is_egg
-    assert make_url("tar.gz").is_sdist
-    assert make_url("zip").is_sdist
-    assert make_url("exe").is_wininst
-    assert make_url("cp36-cp36m-manylinux1_x86_64.whl").is_wheel
+def test_package_link_hash(file_checksum: str) -> None:
+    link = make_url(ext="whl", file_checksum=file_checksum)
+    assert link.hash_name == "sha256"
+    assert link.hash == file_checksum
+    assert link.show_url == "demo-1.0.0.whl"
+
+    # this is legacy PEP 503, no metadata hash is present
+    assert not link.has_metadata
+    assert not link.metadata_url
+    assert not link.metadata_hash
+    assert not link.metadata_hash_name
+
+
+@pytest.mark.parametrize(
+    ("ext", "check"),
+    [
+        ("whl", "wheel"),
+        ("egg", "egg"),
+        ("tar.gz", "sdist"),
+        ("zip", "sdist"),
+        ("cp36-cp36m-manylinux1_x86_64.whl", "wheel"),
+    ],
+)
+def test_package_link_is_checks(ext: str, check: str) -> None:
+    link = make_url(ext=ext)
+    assert getattr(link, f"is_{check}")
+
+
+@pytest.mark.parametrize(
+    ("ext", "has_metadata"),
+    [("whl", True), ("egg", False), ("tar.gz", True), ("zip", True)],
+)
+def test_package_link_pep658(
+    ext: str, has_metadata: bool, metadata_checksum: str
+) -> None:
+    link = make_url(ext=ext, metadata_checksum=metadata_checksum)
+
+    if has_metadata:
+        assert link.has_metadata
+        assert link.metadata_url == f"{link.url_without_fragment}.metadata"
+        assert link.metadata_hash == metadata_checksum
+        assert link.metadata_hash_name == "sha256"
+    else:
+        assert not link.has_metadata
+        assert not link.metadata_url
+        assert not link.metadata_hash
+        assert not link.metadata_hash_name
+
+
+def test_package_link_pep658_no_default_metadata() -> None:
+    link = make_url(ext="whl")
+
+    assert not link.has_metadata
+    assert not link.metadata_url
+    assert not link.metadata_hash
+    assert not link.metadata_hash_name
+
+
+@pytest.mark.parametrize(
+    ("metadata", "has_metadata"),
+    [
+        ("true", True),
+        ("false", False),
+        ("", False),
+    ],
+)
+def test_package_link_pep653_non_hash_metadata_value(
+    file_checksum: str, metadata: str | bool, has_metadata: bool
+) -> None:
+    link = Link(
+        "https://files.pythonhosted.org/packages/16/52/dead/"
+        f"demo-1.0.0.whl#sha256={file_checksum}",
+        metadata=metadata,
+    )
+
+    if has_metadata:
+        assert link.has_metadata
+        assert link.metadata_url == f"{link.url_without_fragment}.metadata"
+    else:
+        assert not link.has_metadata
+        assert not link.metadata_url
+
+    assert not link.metadata_hash
+    assert not link.metadata_hash_name
+
+
+def test_package_link_pep592_default_not_yanked() -> None:
+    link = make_url(ext="whl")
+
+    assert not link.yanked
+    assert link.yanked_reason == ""
+
+
+@pytest.mark.parametrize(
+    ("yanked", "expected_yanked", "expected_yanked_reason"),
+    [
+        (True, True, ""),
+        (False, False, ""),
+        ("the reason", True, "the reason"),
+        ("", True, ""),
+    ],
+)
+def test_package_link_pep592_yanked(
+    yanked: str | bool, expected_yanked: bool, expected_yanked_reason: str
+) -> None:
+    link = Link("https://example.org", yanked=yanked)
+
+    assert link.yanked == expected_yanked
+    assert link.yanked_reason == expected_yanked_reason
diff --git a/vendor/poetry-core/tests/packages/utils/test_utils_urls.py b/vendor/poetry-core/tests/packages/utils/test_utils_urls.py
index e7fa0cc8..6480c78a 100644
--- a/vendor/poetry-core/tests/packages/utils/test_utils_urls.py
+++ b/vendor/poetry-core/tests/packages/utils/test_utils_urls.py
@@ -1,26 +1,27 @@
 # These test scenarios are ported over from pypa/pip
 # https://raw.githubusercontent.com/pypa/pip/b447f438df08303f4f07f2598f190e73876443ba/tests/unit/test_urls.py
 
+from __future__ import annotations
+
 import sys
 
-import pytest
+from pathlib import Path
 
-from six.moves.urllib.request import pathname2url  # noqa
+import pytest
 
-from poetry.core.packages import path_to_url
-from poetry.core.packages import url_to_path
-from poetry.core.utils._compat import Path
+from poetry.core.packages.utils.utils import path_to_url
+from poetry.core.packages.utils.utils import url_to_path
 
 
 @pytest.mark.skipif("sys.platform == 'win32'")
-def test_path_to_url_unix():
+def test_path_to_url_unix() -> None:
     assert path_to_url("/tmp/file") == "file:///tmp/file"
     path = Path(".") / "file"
     assert path_to_url("file") == "file://" + path.absolute().as_posix()
 
 
 @pytest.mark.skipif("sys.platform != 'win32'")
-def test_path_to_url_win():
+def test_path_to_url_win() -> None:
     assert path_to_url("c:/tmp/file") == "file:///c:/tmp/file"
     assert path_to_url("c:\\tmp\\file") == "file:///c:/tmp/file"
     assert path_to_url(r"\\unc\as\path") == "file://unc/as/path"
@@ -41,7 +42,7 @@ def test_path_to_url_win():
         ("file:///c:/tmp/file", r"C:\tmp\file", "/c:/tmp/file"),
     ],
 )
-def test_url_to_path(url, win_expected, non_win_expected):
+def test_url_to_path(url: str, win_expected: str, non_win_expected: str | None) -> None:
     if sys.platform == "win32":
         expected_path = win_expected
     else:
@@ -55,7 +56,7 @@ def test_url_to_path(url, win_expected, non_win_expected):
 
 
 @pytest.mark.skipif("sys.platform != 'win32'")
-def test_url_to_path_path_to_url_symmetry_win():
+def test_url_to_path_path_to_url_symmetry_win() -> None:
     path = r"C:\tmp\file"
     assert url_to_path(path_to_url(path)) == Path(path)
 
diff --git a/vendor/poetry-core/tests/pyproject/conftest.py b/vendor/poetry-core/tests/pyproject/conftest.py
index 4d42a193..82ff2198 100644
--- a/vendor/poetry-core/tests/pyproject/conftest.py
+++ b/vendor/poetry-core/tests/pyproject/conftest.py
@@ -1,11 +1,16 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
-from poetry.core.utils._compat import Path
-from poetry.core.utils._compat import decode
+
+if TYPE_CHECKING:
+    from pathlib import Path
 
 
 @pytest.fixture
-def pyproject_toml(tmp_path):  # type: (Path) -> Path
+def pyproject_toml(tmp_path: Path) -> Path:
     path = tmp_path / "pyproject.toml"
     with path.open(mode="w"):
         pass
@@ -13,19 +18,19 @@ def pyproject_toml(tmp_path):  # type: (Path) -> Path
 
 
 @pytest.fixture
-def build_system_section(pyproject_toml):  # type: (Path) -> str
+def build_system_section(pyproject_toml: Path) -> str:
     content = """
 [build-system]
 requires = ["poetry-core"]
 build-backend = "poetry.core.masonry.api"
 """
     with pyproject_toml.open(mode="a") as f:
-        f.write(decode(content))
+        f.write(content)
     return content
 
 
 @pytest.fixture
-def poetry_section(pyproject_toml):  # type: (Path) -> str
+def poetry_section(pyproject_toml: Path) -> str:
     content = """
 [tool.poetry]
 name = "poetry"
@@ -34,5 +39,5 @@ def poetry_section(pyproject_toml):  # type: (Path) -> str
 python = "^3.5"
 """
     with pyproject_toml.open(mode="a") as f:
-        f.write(decode(content))
+        f.write(content)
     return content
diff --git a/vendor/poetry-core/tests/pyproject/test_pyproject_toml.py b/vendor/poetry-core/tests/pyproject/test_pyproject_toml.py
index 4fc3eab7..434151e1 100644
--- a/vendor/poetry-core/tests/pyproject/test_pyproject_toml.py
+++ b/vendor/poetry-core/tests/pyproject/test_pyproject_toml.py
@@ -1,21 +1,27 @@
+from __future__ import annotations
+
 import uuid
 
+from pathlib import Path
+from typing import Any
+
 import pytest
 
 from tomlkit.toml_document import TOMLDocument
 from tomlkit.toml_file import TOMLFile
 
-from poetry.core.pyproject import PyProjectException
-from poetry.core.pyproject import PyProjectTOML
-from poetry.core.utils._compat import Path  # noqa
+from poetry.core.pyproject.exceptions import PyProjectException
+from poetry.core.pyproject.toml import PyProjectTOML
 
 
-def test_pyproject_toml_simple(pyproject_toml, build_system_section, poetry_section):
+def test_pyproject_toml_simple(
+    pyproject_toml: Path, build_system_section: str, poetry_section: str
+) -> None:
     data = TOMLFile(pyproject_toml.as_posix()).read()
     assert PyProjectTOML(pyproject_toml).data == data
 
 
-def test_pyproject_toml_no_poetry_config(pyproject_toml):
+def test_pyproject_toml_no_poetry_config(pyproject_toml: Path) -> None:
     pyproject = PyProjectTOML(pyproject_toml)
 
     assert not pyproject.is_poetry_project()
@@ -23,20 +29,23 @@ def test_pyproject_toml_no_poetry_config(pyproject_toml):
     with pytest.raises(PyProjectException) as excval:
         _ = pyproject.poetry_config
 
-    assert "[tool.poetry] section not found in {}".format(
-        pyproject_toml.as_posix()
-    ) in str(excval.value)
+    assert f"[tool.poetry] section not found in {pyproject_toml.as_posix()}" in str(
+        excval.value
+    )
 
 
-def test_pyproject_toml_poetry_config(pyproject_toml, poetry_section):
+def test_pyproject_toml_poetry_config(
+    pyproject_toml: Path, poetry_section: str
+) -> None:
     pyproject = PyProjectTOML(pyproject_toml)
-    config = TOMLFile(pyproject_toml.as_posix()).read()["tool"]["poetry"]
+    doc: dict[str, Any] = TOMLFile(pyproject_toml.as_posix()).read()
+    config = doc["tool"]["poetry"]
 
     assert pyproject.is_poetry_project()
     assert pyproject.poetry_config == config
 
 
-def test_pyproject_toml_no_build_system_defaults():
+def test_pyproject_toml_no_build_system_defaults() -> None:
     pyproject_toml = (
         Path(__file__).parent.parent
         / "fixtures"
@@ -52,13 +61,13 @@ def test_pyproject_toml_no_build_system_defaults():
     assert build_system.dependencies[1].to_pep_508() == "Cython (>=0.29.6,<0.30.0)"
 
 
-def test_pyproject_toml_build_requires_as_dependencies(pyproject_toml):
+def test_pyproject_toml_build_requires_as_dependencies(pyproject_toml: Path) -> None:
     build_system = PyProjectTOML(pyproject_toml).build_system
     assert build_system.requires == ["setuptools", "wheel"]
     assert build_system.build_backend == "setuptools.build_meta:__legacy__"
 
 
-def test_pyproject_toml_non_existent(pyproject_toml):
+def test_pyproject_toml_non_existent(pyproject_toml: Path) -> None:
     pyproject_toml.unlink()
     pyproject = PyProjectTOML(pyproject_toml)
     build_system = pyproject.build_system
@@ -68,7 +77,7 @@ def test_pyproject_toml_non_existent(pyproject_toml):
     assert build_system.build_backend == "poetry.core.masonry.api"
 
 
-def test_pyproject_toml_reload(pyproject_toml, poetry_section):
+def test_pyproject_toml_reload(pyproject_toml: Path, poetry_section: str) -> None:
     pyproject = PyProjectTOML(pyproject_toml)
     name_original = pyproject.poetry_config["name"]
     name_new = str(uuid.uuid4())
@@ -80,7 +89,9 @@ def test_pyproject_toml_reload(pyproject_toml, poetry_section):
     assert pyproject.poetry_config["name"] == name_original
 
 
-def test_pyproject_toml_save(pyproject_toml, poetry_section, build_system_section):
+def test_pyproject_toml_save(
+    pyproject_toml: Path, poetry_section: str, build_system_section: str
+) -> None:
     pyproject = PyProjectTOML(pyproject_toml)
 
     name = str(uuid.uuid4())
diff --git a/vendor/poetry-core/tests/pyproject/test_pyproject_toml_file.py b/vendor/poetry-core/tests/pyproject/test_pyproject_toml_file.py
index 071dab28..95fd20f9 100644
--- a/vendor/poetry-core/tests/pyproject/test_pyproject_toml_file.py
+++ b/vendor/poetry-core/tests/pyproject/test_pyproject_toml_file.py
@@ -1,13 +1,20 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
 from poetry.core.exceptions import PoetryCoreException
 from poetry.core.toml import TOMLFile
-from poetry.core.utils._compat import decode
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
 
 
 def test_old_pyproject_toml_file_deprecation(
-    pyproject_toml, build_system_section, poetry_section
-):
+    pyproject_toml: Path, build_system_section: str, poetry_section: str
+) -> None:
     from poetry.core.utils.toml_file import TomlFile
 
     with pytest.warns(DeprecationWarning):
@@ -17,16 +24,16 @@ def test_old_pyproject_toml_file_deprecation(
     assert data == TOMLFile(pyproject_toml).read()
 
 
-def test_pyproject_toml_file_invalid(pyproject_toml):
+def test_pyproject_toml_file_invalid(pyproject_toml: Path) -> None:
     with pyproject_toml.open(mode="a") as f:
-        f.write(decode("<<<<<<<<<<<"))
+        f.write("<<<<<<<<<<<")
 
     with pytest.raises(PoetryCoreException) as excval:
         _ = TOMLFile(pyproject_toml).read()
 
-    assert "Invalid TOML file {}".format(pyproject_toml.as_posix()) in str(excval.value)
+    assert f"Invalid TOML file {pyproject_toml.as_posix()}" in str(excval.value)
 
 
-def test_pyproject_toml_file_getattr(tmp_path, pyproject_toml):
+def test_pyproject_toml_file_getattr(tmp_path: Path, pyproject_toml: Path) -> None:
     file = TOMLFile(pyproject_toml)
     assert file.parent == tmp_path
diff --git a/vendor/poetry-core/tests/semver/test_helpers.py b/vendor/poetry-core/tests/semver/test_helpers.py
new file mode 100644
index 00000000..d53e9e57
--- /dev/null
+++ b/vendor/poetry-core/tests/semver/test_helpers.py
@@ -0,0 +1,421 @@
+from __future__ import annotations
+
+from typing import cast
+
+import pytest
+
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version import Version
+from poetry.core.semver.version_range import VersionRange
+from poetry.core.semver.version_union import VersionUnion
+from poetry.core.version.pep440 import ReleaseTag
+
+
+@pytest.mark.parametrize(
+    "input,constraint",
+    [
+        ("*", VersionRange()),
+        ("*.*", VersionRange()),
+        ("v*.*", VersionRange()),
+        ("*.x.*", VersionRange()),
+        ("x.X.x.*", VersionRange()),
+        (">1.0.0", VersionRange(min=Version.from_parts(1, 0, 0))),
+        ("<1.2.3", VersionRange(max=Version.from_parts(1, 2, 3))),
+        ("<=1.2.3", VersionRange(max=Version.from_parts(1, 2, 3), include_max=True)),
+        (">=1.2.3", VersionRange(min=Version.from_parts(1, 2, 3), include_min=True)),
+        ("=1.2.3", Version.from_parts(1, 2, 3)),
+        ("1.2.3", Version.from_parts(1, 2, 3)),
+        ("1!2.3.4", Version.from_parts(2, 3, 4, epoch=1)),
+        ("=1.0", Version.from_parts(1, 0, 0)),
+        ("1.2.3b5", Version.from_parts(1, 2, 3, pre=ReleaseTag("beta", 5))),
+        (">= 1.2.3", VersionRange(min=Version.from_parts(1, 2, 3), include_min=True)),
+        (
+            ">dev",
+            VersionRange(min=Version.from_parts(0, 0, dev=ReleaseTag("dev"))),
+        ),  # Issue 206
+    ],
+)
+def test_parse_constraint(input: str, constraint: Version | VersionRange) -> None:
+    assert parse_constraint(input) == constraint
+
+
+@pytest.mark.parametrize(
+    "input,constraint",
+    [
+        (
+            "v2.*",
+            VersionRange(
+                Version.from_parts(2, 0, 0), Version.from_parts(3, 0, 0), True
+            ),
+        ),
+        (
+            "2.*.*",
+            VersionRange(
+                Version.from_parts(2, 0, 0), Version.from_parts(3, 0, 0), True
+            ),
+        ),
+        (
+            "20.*",
+            VersionRange(
+                Version.from_parts(20, 0, 0), Version.from_parts(21, 0, 0), True
+            ),
+        ),
+        (
+            "20.*.*",
+            VersionRange(
+                Version.from_parts(20, 0, 0), Version.from_parts(21, 0, 0), True
+            ),
+        ),
+        (
+            "2.0.*",
+            VersionRange(
+                Version.from_parts(2, 0, 0), Version.from_parts(2, 1, 0), True
+            ),
+        ),
+        (
+            "2.x",
+            VersionRange(
+                Version.from_parts(2, 0, 0), Version.from_parts(3, 0, 0), True
+            ),
+        ),
+        (
+            "2.x.x",
+            VersionRange(
+                Version.from_parts(2, 0, 0), Version.from_parts(3, 0, 0), True
+            ),
+        ),
+        (
+            "2.2.X",
+            VersionRange(
+                Version.from_parts(2, 2, 0), Version.from_parts(2, 3, 0), True
+            ),
+        ),
+        ("0.*", VersionRange(max=Version.from_parts(1, 0, 0))),
+        ("0.*.*", VersionRange(max=Version.from_parts(1, 0, 0))),
+        ("0.x", VersionRange(max=Version.from_parts(1, 0, 0))),
+    ],
+)
+def test_parse_constraint_wildcard(input: str, constraint: VersionRange) -> None:
+    assert parse_constraint(input) == constraint
+
+
+@pytest.mark.parametrize(
+    "input,constraint",
+    [
+        (
+            "~v1",
+            VersionRange(
+                Version.from_parts(1, 0, 0), Version.from_parts(2, 0, 0), True
+            ),
+        ),
+        (
+            "~1.0",
+            VersionRange(
+                Version.from_parts(1, 0, 0), Version.from_parts(1, 1, 0), True
+            ),
+        ),
+        (
+            "~1.0.0",
+            VersionRange(
+                Version.from_parts(1, 0, 0), Version.from_parts(1, 1, 0), True
+            ),
+        ),
+        (
+            "~1.2",
+            VersionRange(
+                Version.from_parts(1, 2, 0), Version.from_parts(1, 3, 0), True
+            ),
+        ),
+        (
+            "~1.2.3",
+            VersionRange(
+                Version.from_parts(1, 2, 3), Version.from_parts(1, 3, 0), True
+            ),
+        ),
+        (
+            "~1.2-beta",
+            VersionRange(
+                Version.from_parts(1, 2, 0, pre=ReleaseTag("beta")),
+                Version.from_parts(1, 3, 0),
+                True,
+            ),
+        ),
+        (
+            "~1.2-b2",
+            VersionRange(
+                Version.from_parts(1, 2, 0, pre=ReleaseTag("beta", 2)),
+                Version.from_parts(1, 3, 0),
+                True,
+            ),
+        ),
+        (
+            "~0.3",
+            VersionRange(
+                Version.from_parts(0, 3, 0), Version.from_parts(0, 4, 0), True
+            ),
+        ),
+        (
+            "~3.5",
+            VersionRange(
+                Version.from_parts(3, 5, 0), Version.from_parts(3, 6, 0), True
+            ),
+        ),
+        (
+            "~=3.5",
+            VersionRange(
+                Version.from_parts(3, 5, 0), Version.from_parts(4, 0, 0), True
+            ),
+        ),  # PEP 440
+        (
+            "~=3.5.3",
+            VersionRange(
+                Version.from_parts(3, 5, 3), Version.from_parts(3, 6, 0), True
+            ),
+        ),  # PEP 440
+        (
+            "~=3.5.3rc1",
+            VersionRange(
+                Version.from_parts(3, 5, 3, pre=ReleaseTag("rc", 1)),
+                Version.from_parts(3, 6, 0),
+                True,
+            ),
+        ),  # PEP 440
+    ],
+)
+def test_parse_constraint_tilde(input: str, constraint: VersionRange) -> None:
+    assert parse_constraint(input) == constraint
+
+
+@pytest.mark.parametrize(
+    "input,constraint",
+    [
+        (
+            "^v1",
+            VersionRange(
+                Version.from_parts(1, 0, 0), Version.from_parts(2, 0, 0), True
+            ),
+        ),
+        ("^0", VersionRange(Version.from_parts(0), Version.from_parts(1), True)),
+        (
+            "^0.0",
+            VersionRange(
+                Version.from_parts(0, 0, 0), Version.from_parts(0, 1, 0), True
+            ),
+        ),
+        (
+            "^1.2",
+            VersionRange(
+                Version.from_parts(1, 2, 0), Version.from_parts(2, 0, 0), True
+            ),
+        ),
+        (
+            "^1.2.3-beta.2",
+            VersionRange(
+                Version.from_parts(1, 2, 3, pre=ReleaseTag("beta", 2)),
+                Version.from_parts(2, 0, 0),
+                True,
+            ),
+        ),
+        (
+            "^1.2.3",
+            VersionRange(
+                Version.from_parts(1, 2, 3), Version.from_parts(2, 0, 0), True
+            ),
+        ),
+        (
+            "^0.2.3",
+            VersionRange(
+                Version.from_parts(0, 2, 3), Version.from_parts(0, 3, 0), True
+            ),
+        ),
+        (
+            "^0.2",
+            VersionRange(
+                Version.from_parts(0, 2, 0), Version.from_parts(0, 3, 0), True
+            ),
+        ),
+        (
+            "^0.2.0",
+            VersionRange(
+                Version.from_parts(0, 2, 0), Version.from_parts(0, 3, 0), True
+            ),
+        ),
+        (
+            "^0.0.3",
+            VersionRange(
+                Version.from_parts(0, 0, 3), Version.from_parts(0, 0, 4), True
+            ),
+        ),
+    ],
+)
+def test_parse_constraint_caret(input: str, constraint: VersionRange) -> None:
+    assert parse_constraint(input) == constraint
+
+
+@pytest.mark.parametrize(
+    "input",
+    [
+        ">2.0,<=3.0",
+        ">2.0 <=3.0",
+        ">2.0  <=3.0",
+        ">2.0, <=3.0",
+        ">2.0 ,<=3.0",
+        ">2.0 , <=3.0",
+        ">2.0   , <=3.0",
+        "> 2.0   <=  3.0",
+        "> 2.0  ,  <=  3.0",
+        "  > 2.0  ,  <=  3.0 ",
+    ],
+)
+def test_parse_constraint_multi(input: str) -> None:
+    assert parse_constraint(input) == VersionRange(
+        Version.from_parts(2, 0, 0),
+        Version.from_parts(3, 0, 0),
+        include_min=False,
+        include_max=True,
+    )
+
+
+@pytest.mark.parametrize(
+    "input, output",
+    [
+        (
+            ">1!2,<=2!3",
+            VersionRange(
+                Version.from_parts(2, 0, 0, epoch=1),
+                Version.from_parts(3, 0, 0, epoch=2),
+                include_min=False,
+                include_max=True,
+            ),
+        ),
+        (
+            ">=1!2,<2!3",
+            VersionRange(
+                Version.from_parts(2, 0, 0, epoch=1),
+                Version.from_parts(3, 0, 0, epoch=2),
+                include_min=True,
+                include_max=False,
+            ),
+        ),
+    ],
+)
+def test_parse_constraint_multi_with_epochs(input: str, output: VersionRange) -> None:
+    assert parse_constraint(input) == output
+
+
+@pytest.mark.parametrize(
+    "input",
+    [">=2.7,!=3.0.*,!=3.1.*", ">=2.7, !=3.0.*, !=3.1.*", ">= 2.7, != 3.0.*, != 3.1.*"],
+)
+def test_parse_constraint_multi_wilcard(input: str) -> None:
+    assert parse_constraint(input) == VersionUnion(
+        VersionRange(
+            Version.from_parts(2, 7, 0), Version.from_parts(3, 0, 0), True, False
+        ),
+        VersionRange(Version.from_parts(3, 2, 0), None, True, False),
+    )
+
+
+@pytest.mark.parametrize(
+    "input,constraint",
+    [
+        (
+            "!=v2.*",
+            VersionRange(max=Version.parse("2.0")).union(
+                VersionRange(Version.parse("3.0"), include_min=True)
+            ),
+        ),
+        (
+            "!=2.*.*",
+            VersionRange(max=Version.parse("2.0")).union(
+                VersionRange(Version.parse("3.0"), include_min=True)
+            ),
+        ),
+        (
+            "!=2.0.*",
+            VersionRange(max=Version.parse("2.0")).union(
+                VersionRange(Version.parse("2.1"), include_min=True)
+            ),
+        ),
+        ("!=0.*", VersionRange(Version.parse("1.0"), include_min=True)),
+        ("!=0.*.*", VersionRange(Version.parse("1.0"), include_min=True)),
+    ],
+)
+def test_parse_constraints_negative_wildcard(
+    input: str, constraint: VersionRange
+) -> None:
+    assert parse_constraint(input) == constraint
+
+
+@pytest.mark.parametrize(
+    "input,constraint",
+    [
+        (">3.7,", VersionRange(min=Version.parse("3.7"))),
+        (">3.7 , ", VersionRange(min=Version.parse("3.7"))),
+        (
+            ">3.7,<3.8,",
+            VersionRange(min=Version.parse("3.7"), max=Version.parse("3.8")),
+        ),
+        (
+            ">3.7,||<3.6,",
+            VersionRange(min=Version.parse("3.7")).union(
+                VersionRange(max=Version.parse("3.6"))
+            ),
+        ),
+        (
+            ">3.7 , || <3.6 , ",
+            VersionRange(min=Version.parse("3.7")).union(
+                VersionRange(max=Version.parse("3.6"))
+            ),
+        ),
+        (
+            ">3.7, <3.8, || <3.6, >3.5",
+            VersionRange(min=Version.parse("3.7"), max=Version.parse("3.8")).union(
+                VersionRange(min=Version.parse("3.5"), max=Version.parse("3.6"))
+            ),
+        ),
+    ],
+)
+def test_parse_constraints_with_trailing_comma(
+    input: str, constraint: VersionRange
+) -> None:
+    assert parse_constraint(input) == constraint
+
+
+@pytest.mark.parametrize(
+    "input, expected",
+    [
+        ("1", "1"),
+        ("1.2", "1.2"),
+        ("1.2.3", "1.2.3"),
+        ("!=1", "!=1"),
+        ("!=1.2", "!=1.2"),
+        ("!=1.2.3", "!=1.2.3"),
+        ("^1", ">=1,<2"),
+        ("^1.0", ">=1.0,<2.0"),
+        ("^1.0.0", ">=1.0.0,<2.0.0"),
+        ("~1", ">=1,<2"),
+        ("~1.0", ">=1.0,<1.1"),
+        ("~1.0.0", ">=1.0.0,<1.1.0"),
+    ],
+)
+def test_constraints_keep_version_precision(input: str, expected: str) -> None:
+    assert str(parse_constraint(input)) == expected
+
+
+@pytest.mark.parametrize(
+    "unsorted, sorted_",
+    [
+        (["1.0.3", "1.0.2", "1.0.1"], ["1.0.1", "1.0.2", "1.0.3"]),
+        (["1.0.0.2", "1.0.0.0rc2"], ["1.0.0.0rc2", "1.0.0.2"]),
+        (["1.0.0.0", "1.0.0.0rc2"], ["1.0.0.0rc2", "1.0.0.0"]),
+        (["1.0.0.0.0", "1.0.0.0rc2"], ["1.0.0.0rc2", "1.0.0.0.0"]),
+        (["1.0.0rc2", "1.0.0rc1"], ["1.0.0rc1", "1.0.0rc2"]),
+        (["1.0.0rc2", "1.0.0b1"], ["1.0.0b1", "1.0.0rc2"]),
+    ],
+)
+def test_versions_are_sortable(unsorted: list[str], sorted_: list[str]) -> None:
+    unsorted_parsed = [cast(Version, parse_constraint(u)) for u in unsorted]
+    sorted_parsed = [cast(Version, parse_constraint(s)) for s in sorted_]
+
+    assert sorted(unsorted_parsed) == sorted_parsed
diff --git a/vendor/poetry-core/tests/semver/test_main.py b/vendor/poetry-core/tests/semver/test_main.py
deleted file mode 100644
index 85a46b3f..00000000
--- a/vendor/poetry-core/tests/semver/test_main.py
+++ /dev/null
@@ -1,200 +0,0 @@
-import pytest
-
-from poetry.core.semver import Version
-from poetry.core.semver import VersionRange
-from poetry.core.semver import VersionUnion
-from poetry.core.semver import parse_constraint
-
-
-@pytest.mark.parametrize(
-    "input,constraint",
-    [
-        ("*", VersionRange()),
-        ("*.*", VersionRange()),
-        ("v*.*", VersionRange()),
-        ("*.x.*", VersionRange()),
-        ("x.X.x.*", VersionRange()),
-        # ('!=1.0.0', Constraint('!=', '1.0.0.0')),
-        (">1.0.0", VersionRange(min=Version(1, 0, 0))),
-        ("<1.2.3", VersionRange(max=Version(1, 2, 3))),
-        ("<=1.2.3", VersionRange(max=Version(1, 2, 3), include_max=True)),
-        (">=1.2.3", VersionRange(min=Version(1, 2, 3), include_min=True)),
-        ("=1.2.3", Version(1, 2, 3)),
-        ("1.2.3", Version(1, 2, 3)),
-        ("=1.0", Version(1, 0, 0)),
-        ("1.2.3b5", Version(1, 2, 3, pre="b5")),
-        (">= 1.2.3", VersionRange(min=Version(1, 2, 3), include_min=True)),
-        (">dev", VersionRange(min=Version(0, 0, pre="dev"))),  # Issue 206
-    ],
-)
-def test_parse_constraint(input, constraint):
-    assert parse_constraint(input) == constraint
-
-
-@pytest.mark.parametrize(
-    "input,constraint",
-    [
-        ("v2.*", VersionRange(Version(2, 0, 0), Version(3, 0, 0), True)),
-        ("2.*.*", VersionRange(Version(2, 0, 0), Version(3, 0, 0), True)),
-        ("20.*", VersionRange(Version(20, 0, 0), Version(21, 0, 0), True)),
-        ("20.*.*", VersionRange(Version(20, 0, 0), Version(21, 0, 0), True)),
-        ("2.0.*", VersionRange(Version(2, 0, 0), Version(2, 1, 0), True)),
-        ("2.x", VersionRange(Version(2, 0, 0), Version(3, 0, 0), True)),
-        ("2.x.x", VersionRange(Version(2, 0, 0), Version(3, 0, 0), True)),
-        ("2.2.X", VersionRange(Version(2, 2, 0), Version(2, 3, 0), True)),
-        ("0.*", VersionRange(max=Version(1, 0, 0))),
-        ("0.*.*", VersionRange(max=Version(1, 0, 0))),
-        ("0.x", VersionRange(max=Version(1, 0, 0))),
-    ],
-)
-def test_parse_constraint_wildcard(input, constraint):
-    assert parse_constraint(input) == constraint
-
-
-@pytest.mark.parametrize(
-    "input,constraint",
-    [
-        ("~v1", VersionRange(Version(1, 0, 0), Version(2, 0, 0), True)),
-        ("~1.0", VersionRange(Version(1, 0, 0), Version(1, 1, 0), True)),
-        ("~1.0.0", VersionRange(Version(1, 0, 0), Version(1, 1, 0), True)),
-        ("~1.2", VersionRange(Version(1, 2, 0), Version(1, 3, 0), True)),
-        ("~1.2.3", VersionRange(Version(1, 2, 3), Version(1, 3, 0), True)),
-        (
-            "~1.2-beta",
-            VersionRange(Version(1, 2, 0, pre="beta"), Version(1, 3, 0), True),
-        ),
-        ("~1.2-b2", VersionRange(Version(1, 2, 0, pre="b2"), Version(1, 3, 0), True)),
-        ("~0.3", VersionRange(Version(0, 3, 0), Version(0, 4, 0), True)),
-        ("~3.5", VersionRange(Version(3, 5, 0), Version(3, 6, 0), True)),
-        ("~=3.5", VersionRange(Version(3, 5, 0), Version(4, 0, 0), True)),  # PEP 440
-        ("~=3.5.3", VersionRange(Version(3, 5, 3), Version(3, 6, 0), True)),  # PEP 440
-        (
-            "~=3.5.3rc1",
-            VersionRange(Version(3, 5, 3, pre="rc1"), Version(3, 6, 0), True),
-        ),  # PEP 440
-    ],
-)
-def test_parse_constraint_tilde(input, constraint):
-    assert parse_constraint(input) == constraint
-
-
-@pytest.mark.parametrize(
-    "input,constraint",
-    [
-        ("^v1", VersionRange(Version(1, 0, 0), Version(2, 0, 0), True)),
-        ("^0", VersionRange(Version(0, 0, 0), Version(1, 0, 0), True)),
-        ("^0.0", VersionRange(Version(0, 0, 0), Version(0, 1, 0), True)),
-        ("^1.2", VersionRange(Version(1, 2, 0), Version(2, 0, 0), True)),
-        (
-            "^1.2.3-beta.2",
-            VersionRange(Version(1, 2, 3, pre="beta.2"), Version(2, 0, 0), True),
-        ),
-        ("^1.2.3", VersionRange(Version(1, 2, 3), Version(2, 0, 0), True)),
-        ("^0.2.3", VersionRange(Version(0, 2, 3), Version(0, 3, 0), True)),
-        ("^0.2", VersionRange(Version(0, 2, 0), Version(0, 3, 0), True)),
-        ("^0.2.0", VersionRange(Version(0, 2, 0), Version(0, 3, 0), True)),
-        ("^0.0.3", VersionRange(Version(0, 0, 3), Version(0, 0, 4), True)),
-    ],
-)
-def test_parse_constraint_caret(input, constraint):
-    assert parse_constraint(input) == constraint
-
-
-@pytest.mark.parametrize(
-    "input",
-    [
-        ">2.0,<=3.0",
-        ">2.0 <=3.0",
-        ">2.0  <=3.0",
-        ">2.0, <=3.0",
-        ">2.0 ,<=3.0",
-        ">2.0 , <=3.0",
-        ">2.0   , <=3.0",
-        "> 2.0   <=  3.0",
-        "> 2.0  ,  <=  3.0",
-        "  > 2.0  ,  <=  3.0 ",
-    ],
-)
-def test_parse_constraint_multi(input):
-    assert parse_constraint(input) == VersionRange(
-        Version(2, 0, 0), Version(3, 0, 0), include_min=False, include_max=True
-    )
-
-
-@pytest.mark.parametrize(
-    "input",
-    [">=2.7,!=3.0.*,!=3.1.*", ">=2.7, !=3.0.*, !=3.1.*", ">= 2.7, != 3.0.*, != 3.1.*"],
-)
-def test_parse_constraint_multi_wilcard(input):
-    assert parse_constraint(input) == VersionUnion(
-        VersionRange(Version(2, 7, 0), Version(3, 0, 0), True, False),
-        VersionRange(Version(3, 2, 0), None, True, False),
-    )
-
-
-@pytest.mark.parametrize(
-    "input,constraint",
-    [
-        (
-            "!=v2.*",
-            VersionRange(max=Version.parse("2.0")).union(
-                VersionRange(Version.parse("3.0"), include_min=True)
-            ),
-        ),
-        (
-            "!=2.*.*",
-            VersionRange(max=Version.parse("2.0")).union(
-                VersionRange(Version.parse("3.0"), include_min=True)
-            ),
-        ),
-        (
-            "!=2.0.*",
-            VersionRange(max=Version.parse("2.0")).union(
-                VersionRange(Version.parse("2.1"), include_min=True)
-            ),
-        ),
-        ("!=0.*", VersionRange(Version.parse("1.0"), include_min=True)),
-        ("!=0.*.*", VersionRange(Version.parse("1.0"), include_min=True)),
-    ],
-)
-def test_parse_constraints_negative_wildcard(input, constraint):
-    assert parse_constraint(input) == constraint
-
-
-@pytest.mark.parametrize(
-    "input, expected",
-    [
-        ("1", "1"),
-        ("1.2", "1.2"),
-        ("1.2.3", "1.2.3"),
-        ("!=1", "!=1"),
-        ("!=1.2", "!=1.2"),
-        ("!=1.2.3", "!=1.2.3"),
-        ("^1", ">=1,<2"),
-        ("^1.0", ">=1.0,<2.0"),
-        ("^1.0.0", ">=1.0.0,<2.0.0"),
-        ("~1", ">=1,<2"),
-        ("~1.0", ">=1.0,<1.1"),
-        ("~1.0.0", ">=1.0.0,<1.1.0"),
-    ],
-)
-def test_constraints_keep_version_precision(input, expected):
-    assert str(parse_constraint(input)) == expected
-
-
-@pytest.mark.parametrize(
-    "unsorted, sorted_",
-    [
-        (["1.0.3", "1.0.2", "1.0.1"], ["1.0.1", "1.0.2", "1.0.3"]),
-        (["1.0.0.2", "1.0.0.0rc2"], ["1.0.0.0rc2", "1.0.0.2"]),
-        (["1.0.0.0", "1.0.0.0rc2"], ["1.0.0.0rc2", "1.0.0.0"]),
-        (["1.0.0.0.0", "1.0.0.0rc2"], ["1.0.0.0rc2", "1.0.0.0.0"]),
-        (["1.0.0rc2", "1.0.0rc1"], ["1.0.0rc1", "1.0.0rc2"]),
-        (["1.0.0rc2", "1.0.0b1"], ["1.0.0b1", "1.0.0rc2"]),
-    ],
-)
-def test_versions_are_sortable(unsorted, sorted_):
-    unsorted = [parse_constraint(u) for u in unsorted]
-    sorted_ = [parse_constraint(s) for s in sorted_]
-
-    assert sorted(unsorted) == sorted_
diff --git a/vendor/poetry-core/tests/semver/test_parse_constraint.py b/vendor/poetry-core/tests/semver/test_parse_constraint.py
index f70c5741..ae6f2d3a 100644
--- a/vendor/poetry-core/tests/semver/test_parse_constraint.py
+++ b/vendor/poetry-core/tests/semver/test_parse_constraint.py
@@ -1,90 +1,256 @@
+from __future__ import annotations
+
 import pytest
 
-from poetry.core.semver import Version
-from poetry.core.semver import VersionRange
-from poetry.core.semver import VersionUnion
-from poetry.core.semver import parse_constraint
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version import Version
+from poetry.core.semver.version_range import VersionRange
+from poetry.core.semver.version_union import VersionUnion
+from poetry.core.version.pep440 import ReleaseTag
 
 
 @pytest.mark.parametrize(
     "constraint,version",
     [
-        ("~=3.8", VersionRange(min=Version(3, 8), max=Version(4, 0), include_min=True)),
+        (
+            "~=3.8",
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(4, 0),
+                include_min=True,
+            ),
+        ),
         (
             "== 3.8.*",
-            VersionRange(min=Version(3, 8), max=Version(3, 9, 0), include_min=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(3, 9, 0),
+                include_min=True,
+            ),
+        ),
+        (
+            "== 3.8.x",
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(3, 9, 0),
+                include_min=True,
+            ),
         ),
         (
             "~= 3.8",
-            VersionRange(min=Version(3, 8), max=Version(4, 0), include_min=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(4, 0),
+                include_min=True,
+            ),
+        ),
+        (
+            "~3.8",
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(3, 9),
+                include_min=True,
+            ),
+        ),
+        (
+            "~ 3.8",
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(3, 9),
+                include_min=True,
+            ),
         ),
-        ("~3.8", VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True)),
-        ("~ 3.8", VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True)),
-        (">3.8", VersionRange(min=Version(3, 8))),
-        (">=3.8", VersionRange(min=Version(3, 8), include_min=True)),
-        (">= 3.8", VersionRange(min=Version(3, 8), include_min=True)),
+        (">3.8", VersionRange(min=Version.from_parts(3, 8))),
+        (">=3.8", VersionRange(min=Version.from_parts(3, 8), include_min=True)),
+        (">= 3.8", VersionRange(min=Version.from_parts(3, 8), include_min=True)),
         (
             ">3.8,<=6.5",
-            VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(6, 5),
+                include_max=True,
+            ),
         ),
         (
             ">3.8,<= 6.5",
-            VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(6, 5),
+                include_max=True,
+            ),
         ),
         (
             "> 3.8,<= 6.5",
-            VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(6, 5),
+                include_max=True,
+            ),
         ),
         (
             "> 3.8,<=6.5",
-            VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(6, 5),
+                include_max=True,
+            ),
         ),
         (
             ">3.8 ,<=6.5",
-            VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(6, 5),
+                include_max=True,
+            ),
         ),
         (
             ">3.8, <=6.5",
-            VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(6, 5),
+                include_max=True,
+            ),
         ),
         (
             ">3.8 , <=6.5",
-            VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True),
+            VersionRange(
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(6, 5),
+                include_max=True,
+            ),
         ),
         (
             "==3.8",
             VersionRange(
-                min=Version(3, 8), max=Version(3, 8), include_min=True, include_max=True
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(3, 8),
+                include_min=True,
+                include_max=True,
             ),
         ),
         (
             "== 3.8",
             VersionRange(
-                min=Version(3, 8), max=Version(3, 8), include_min=True, include_max=True
+                min=Version.from_parts(3, 8),
+                max=Version.from_parts(3, 8),
+                include_min=True,
+                include_max=True,
             ),
         ),
         (
             "~2.7 || ~3.8",
             VersionUnion(
-                VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True),
-                VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True),
+                VersionRange(
+                    min=Version.from_parts(2, 7),
+                    max=Version.from_parts(2, 8),
+                    include_min=True,
+                ),
+                VersionRange(
+                    min=Version.from_parts(3, 8),
+                    max=Version.from_parts(3, 9),
+                    include_min=True,
+                ),
             ),
         ),
         (
             "~2.7||~3.8",
             VersionUnion(
-                VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True),
-                VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True),
+                VersionRange(
+                    min=Version.from_parts(2, 7),
+                    max=Version.from_parts(2, 8),
+                    include_min=True,
+                ),
+                VersionRange(
+                    min=Version.from_parts(3, 8),
+                    max=Version.from_parts(3, 9),
+                    include_min=True,
+                ),
             ),
         ),
         (
             "~ 2.7||~ 3.8",
             VersionUnion(
-                VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True),
-                VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True),
+                VersionRange(
+                    min=Version.from_parts(2, 7),
+                    max=Version.from_parts(2, 8),
+                    include_min=True,
+                ),
+                VersionRange(
+                    min=Version.from_parts(3, 8),
+                    max=Version.from_parts(3, 9),
+                    include_min=True,
+                ),
+            ),
+        ),
+        (
+            "^1.0.0a1",
+            VersionRange(
+                min=Version.from_parts(1, 0, 0, pre=ReleaseTag("a", 1)),
+                max=Version.from_parts(2, 0, 0),
+                include_min=True,
+            ),
+        ),
+        (
+            "^1.0.0a1.dev0",
+            VersionRange(
+                min=Version.from_parts(
+                    1, 0, 0, pre=ReleaseTag("a", 1), dev=ReleaseTag("dev", 0)
+                ),
+                max=Version.from_parts(2, 0, 0),
+                include_min=True,
+            ),
+        ),
+        (
+            "1.0.0a1.dev0",
+            VersionRange(
+                min=Version.from_parts(
+                    1, 0, 0, pre=ReleaseTag("a", 1), dev=ReleaseTag("dev", 0)
+                ),
+                max=Version.from_parts(
+                    1, 0, 0, pre=ReleaseTag("a", 1), dev=ReleaseTag("dev", 0)
+                ),
+                include_min=True,
+            ),
+        ),
+        (
+            "~1.0.0a1",
+            VersionRange(
+                min=Version.from_parts(1, 0, 0, pre=ReleaseTag("a", 1)),
+                max=Version.from_parts(1, 1, 0),
+                include_min=True,
+            ),
+        ),
+        (
+            "~1.0.0a1.dev0",
+            VersionRange(
+                min=Version.from_parts(
+                    1, 0, 0, pre=ReleaseTag("a", 1), dev=ReleaseTag("dev", 0)
+                ),
+                max=Version.from_parts(1, 1, 0),
+                include_min=True,
+            ),
+        ),
+        (
+            "^0",
+            VersionRange(
+                min=Version.from_parts(0),
+                max=Version.from_parts(1),
+                include_min=True,
+            ),
+        ),
+        (
+            "^0.0",
+            VersionRange(
+                min=Version.from_parts(0, 0),
+                max=Version.from_parts(0, 1),
+                include_min=True,
             ),
         ),
     ],
 )
-def test_parse_constraint(constraint, version):
-    assert parse_constraint(constraint) == version
+@pytest.mark.parametrize(("with_whitespace_padding",), [(True,), (False,)])
+def test_parse_constraint(
+    constraint: str, version: VersionRange | VersionUnion, with_whitespace_padding: bool
+) -> None:
+    padding = " " * (4 if with_whitespace_padding else 0)
+    assert parse_constraint(f"{padding}{constraint}{padding}") == version
diff --git a/vendor/poetry-core/tests/semver/test_utils.py b/vendor/poetry-core/tests/semver/test_utils.py
new file mode 100644
index 00000000..413cac69
--- /dev/null
+++ b/vendor/poetry-core/tests/semver/test_utils.py
@@ -0,0 +1,83 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from poetry.core.semver.empty_constraint import EmptyConstraint
+from poetry.core.semver.util import constraint_regions
+from poetry.core.semver.version import Version
+from poetry.core.semver.version_range import VersionRange
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version_constraint import VersionConstraint
+
+
+PY27 = Version.parse("2.7")
+PY30 = Version.parse("3")
+PY36 = Version.parse("3.6.0")
+PY37 = Version.parse("3.7")
+PY38 = Version.parse("3.8.0")
+PY40 = Version.parse("4.0.0")
+
+
+@pytest.mark.parametrize(
+    "versions, expected",
+    [
+        ([VersionRange(None, None)], [VersionRange(None, None)]),
+        ([EmptyConstraint()], [VersionRange(None, None)]),
+        (
+            [VersionRange(PY27, None, include_min=True)],
+            [
+                VersionRange(None, PY27, include_max=False),
+                VersionRange(PY27, None, include_min=True),
+            ],
+        ),
+        (
+            [VersionRange(None, PY40, include_max=False)],
+            [
+                VersionRange(None, PY40, include_max=False),
+                VersionRange(PY40, None, include_min=True),
+            ],
+        ),
+        (
+            [VersionRange(PY27, PY27, include_min=True, include_max=True)],
+            [
+                VersionRange(None, PY27, include_max=False),
+                VersionRange(PY27, PY27, include_min=True, include_max=True),
+                VersionRange(PY27, None, include_min=False),
+            ],
+        ),
+        (
+            [VersionRange(PY27, PY30, include_min=True, include_max=False)],
+            [
+                VersionRange(None, PY27, include_max=False),
+                VersionRange(PY27, PY30, include_min=True, include_max=False),
+                VersionRange(PY30, None, include_min=True),
+            ],
+        ),
+        (
+            [
+                VersionRange(PY27, PY30, include_min=True, include_max=False).union(
+                    VersionRange(PY37, PY40, include_min=False, include_max=True)
+                ),
+                VersionRange(PY36, PY38, include_min=True, include_max=False),
+            ],
+            [
+                VersionRange(None, PY27, include_max=False),
+                VersionRange(PY27, PY30, include_min=True, include_max=False),
+                VersionRange(PY30, PY36, include_min=True, include_max=False),
+                VersionRange(PY36, PY37, include_min=True, include_max=True),
+                VersionRange(PY37, PY38, include_min=False, include_max=False),
+                VersionRange(PY38, PY40, include_min=True, include_max=True),
+                VersionRange(PY40, None, include_min=False),
+            ],
+        ),
+    ],
+)
+def test_constraint_regions(
+    versions: list[VersionConstraint], expected: list[VersionRange]
+) -> None:
+    regions = constraint_regions(versions)
+    assert regions == expected
diff --git a/vendor/poetry-core/tests/semver/test_version.py b/vendor/poetry-core/tests/semver/test_version.py
index 9f26d4eb..99231c95 100644
--- a/vendor/poetry-core/tests/semver/test_version.py
+++ b/vendor/poetry-core/tests/semver/test_version.py
@@ -1,64 +1,99 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
-from poetry.core.semver import EmptyConstraint
-from poetry.core.semver import Version
-from poetry.core.semver import VersionRange
-from poetry.core.semver.exceptions import ParseVersionError
+from poetry.core.semver.empty_constraint import EmptyConstraint
+from poetry.core.semver.version import Version
+from poetry.core.semver.version_range import VersionRange
+from poetry.core.version.exceptions import InvalidVersion
+from poetry.core.version.pep440 import ReleaseTag
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version_constraint import VersionConstraint
 
 
 @pytest.mark.parametrize(
-    "input,version",
+    "text,version",
     [
-        ("1.0.0", Version(1, 0, 0)),
-        ("1", Version(1, 0, 0)),
-        ("1.0", Version(1, 0, 0)),
-        ("1b1", Version(1, 0, 0, pre="beta1")),
-        ("1.0b1", Version(1, 0, 0, pre="beta1")),
-        ("1.0.0b1", Version(1, 0, 0, pre="beta1")),
-        ("1.0.0-b1", Version(1, 0, 0, pre="beta1")),
-        ("1.0.0-beta.1", Version(1, 0, 0, pre="beta1")),
-        ("1.0.0+1", Version(1, 0, 0, build="1")),
-        ("1.0.0-1", Version(1, 0, 0, build="1")),
-        ("1.0.0.0", Version(1, 0, 0)),
-        ("1.0.0-post", Version(1, 0, 0)),
-        ("1.0.0-post1", Version(1, 0, 0, build="1")),
-        ("0.6c", Version(0, 6, 0, pre="rc0")),
-        ("0.6pre", Version(0, 6, 0, pre="rc0")),
+        ("1.0.0", Version.from_parts(1, 0, 0)),
+        ("1", Version.from_parts(1, 0, 0)),
+        ("1.0", Version.from_parts(1, 0, 0)),
+        ("1b1", Version.from_parts(1, 0, 0, pre=ReleaseTag("beta", 1))),
+        ("1.0b1", Version.from_parts(1, 0, 0, pre=ReleaseTag("beta", 1))),
+        ("1.0.0b1", Version.from_parts(1, 0, 0, pre=ReleaseTag("beta", 1))),
+        ("1.0.0-b1", Version.from_parts(1, 0, 0, pre=ReleaseTag("beta", 1))),
+        ("1.0.0-beta.1", Version.from_parts(1, 0, 0, pre=ReleaseTag("beta", 1))),
+        ("1.0.0+1", Version.from_parts(1, 0, 0, local=1)),
+        ("1.0.0-1", Version.from_parts(1, 0, 0, post=ReleaseTag("post", 1))),
+        ("1.0.0.0", Version.from_parts(1, 0, 0, extra=0)),
+        ("1.0.0-post", Version.from_parts(1, 0, 0, post=ReleaseTag("post"))),
+        ("1.0.0-post1", Version.from_parts(1, 0, 0, post=ReleaseTag("post", 1))),
+        ("0.6c", Version.from_parts(0, 6, 0, pre=ReleaseTag("rc", 0))),
+        ("0.6pre", Version.from_parts(0, 6, 0, pre=ReleaseTag("preview", 0))),
+        ("1!2.3.4", Version.from_parts(2, 3, 4, epoch=1)),
     ],
 )
-def test_parse_valid(input, version):
-    parsed = Version.parse(input)
+def test_parse_valid(text: str, version: Version) -> None:
+    parsed = Version.parse(text)
 
     assert parsed == version
-    assert parsed.text == input
-
-
-@pytest.mark.parametrize("input", [(None, "example")])
-def test_parse_invalid(input):
-    with pytest.raises(ParseVersionError):
-        Version.parse(input)
-
-
-def test_comparison():
-    versions = [
-        "1.0.0-alpha",
-        "1.0.0-alpha.1",
-        "1.0.0-beta.2",
-        "1.0.0-beta.11",
-        "1.0.0-rc.1",
-        "1.0.0-rc.1+build.1",
-        "1.0.0",
-        "1.0.0+0.3.7",
-        "1.3.7+build",
-        "1.3.7+build.2.b8f12d7",
-        "1.3.7+build.11.e0f985a",
-        "2.0.0",
-        "2.1.0",
-        "2.2.0",
-        "2.11.0",
-        "2.11.1",
-    ]
+    assert parsed.text == text
+
 
+@pytest.mark.parametrize("value", [None, "example"])
+def test_parse_invalid(value: str | None) -> None:
+    with pytest.raises(InvalidVersion):
+        Version.parse(value)  # type: ignore[arg-type]
+
+
+@pytest.mark.parametrize(
+    "versions",
+    [
+        [
+            "1.0.0-alpha",
+            "1.0.0-alpha.1",
+            "1.0.0-beta.2",
+            "1.0.0-beta.11",
+            "1.0.0-rc.1",
+            "1.0.0-rc.1+build.1",
+            "1.0.0",
+            "1.0.0+0.3.7",
+            "1.3.7+build",
+            "1.3.7+build.2.b8f12d7",
+            "1.3.7+build.11.e0f985a",
+            "2.0.0",
+            "2.1.0",
+            "2.2.0",
+            "2.11.0",
+            "2.11.1",
+        ],
+        # PEP 440 example comparisons
+        [
+            "1.0.dev456",
+            "1.0a1",
+            "1.0a2.dev456",
+            "1.0a12.dev456",
+            "1.0a12",
+            "1.0b1.dev456",
+            "1.0b2",
+            "1.0b2.post345.dev456",
+            "1.0b2.post345",
+            "1.0rc1.dev456",
+            "1.0rc1",
+            "1.0",
+            "1.0+abc.5",
+            "1.0+abc.7",
+            "1.0+5",
+            "1.0.post456.dev34",
+            "1.0.post456",
+            "1.1.dev1",
+        ],
+    ],
+)
+def test_comparison(versions: list[str]) -> None:
     for i in range(len(versions)):
         for j in range(len(versions)):
             a = Version.parse(versions[i])
@@ -72,7 +107,7 @@ def test_comparison():
             assert (a != b) == (i != j)
 
 
-def test_equality():
+def test_equality() -> None:
     assert Version.parse("1.2.3") == Version.parse("01.2.3")
     assert Version.parse("1.2.3") == Version.parse("1.02.3")
     assert Version.parse("1.2.3") == Version.parse("1.2.03")
@@ -80,17 +115,44 @@ def test_equality():
     assert Version.parse("1.2.3+1") == Version.parse("1.2.3+01")
 
 
-def test_allows():
+def test_allows() -> None:
     v = Version.parse("1.2.3")
     assert v.allows(v)
     assert not v.allows(Version.parse("2.2.3"))
     assert not v.allows(Version.parse("1.3.3"))
     assert not v.allows(Version.parse("1.2.4"))
     assert not v.allows(Version.parse("1.2.3-dev"))
-    assert not v.allows(Version.parse("1.2.3+build"))
+    assert not v.allows(Version.parse("1.2.3-1"))
+    assert not v.allows(Version.parse("1.2.3-1+build"))
+    assert v.allows(Version.parse("1.2.3+build"))
+
 
+def test_allows_with_local() -> None:
+    v = Version.parse("1.2.3+build.1")
+    assert v.allows(v)
+    assert not v.allows(Version.parse("1.2.3"))
+    assert not v.allows(Version.parse("1.3.3"))
+    assert not v.allows(Version.parse("1.2.3-dev"))
+    assert not v.allows(Version.parse("1.2.3+build.2"))
+    # local version with a great number of segments will always compare as
+    # greater than a local version with fewer segments
+    assert not v.allows(Version.parse("1.2.3+build.1.0"))
+    assert not v.allows(Version.parse("1.2.3-1"))
+    assert not v.allows(Version.parse("1.2.3-1+build.1"))
 
-def test_allows_all():
+
+def test_allows_with_post() -> None:
+    v = Version.parse("1.2.3-1")
+    assert v.allows(v)
+    assert not v.allows(Version.parse("1.2.3"))
+    assert not v.allows(Version.parse("1.2.3-2"))
+    assert not v.allows(Version.parse("2.2.3"))
+    assert not v.allows(Version.parse("1.2.3-dev"))
+    assert not v.allows(Version.parse("1.2.3+build.2"))
+    assert v.allows(Version.parse("1.2.3-1+build.1"))
+
+
+def test_allows_all() -> None:
     v = Version.parse("1.2.3")
 
     assert v.allows_all(v)
@@ -102,32 +164,95 @@ def test_allows_all():
     assert v.allows_all(EmptyConstraint())
 
 
-def test_allows_any():
-    v = Version.parse("1.2.3")
-
-    assert v.allows_any(v)
-    assert not v.allows_any(Version.parse("0.0.3"))
-    assert v.allows_any(VersionRange(Version.parse("1.1.4"), Version.parse("1.2.4")))
-    assert v.allows_any(VersionRange())
-    assert not v.allows_any(EmptyConstraint())
-
+@pytest.mark.parametrize(
+    ("version1", "version2", "expected"),
+    [
+        (
+            Version.parse("1.2.3"),
+            Version.parse("1.2.3"),
+            True,
+        ),
+        (
+            Version.parse("1.2.3"),
+            Version.parse("1.2.3+cpu"),
+            True,
+        ),
+        (
+            Version.parse("1.2.3+cpu"),
+            Version.parse("1.2.3"),
+            False,
+        ),
+        (
+            Version.parse("1.2.3"),
+            Version.parse("0.0.3"),
+            False,
+        ),
+        (
+            Version.parse("1.2.3"),
+            VersionRange(Version.parse("1.1.4"), Version.parse("1.2.4")),
+            True,
+        ),
+        (
+            Version.parse("1.2.3"),
+            VersionRange(),
+            True,
+        ),
+        (
+            Version.parse("1.2.3"),
+            EmptyConstraint(),
+            False,
+        ),
+    ],
+)
+def test_allows_any(
+    version1: VersionConstraint,
+    version2: VersionConstraint,
+    expected: bool,
+) -> None:
+    actual = version1.allows_any(version2)
+    assert actual == expected
 
-def test_intersect():
-    v = Version.parse("1.2.3")
 
-    assert v.intersect(v) == v
-    assert v.intersect(Version.parse("1.1.4")).is_empty()
-    assert (
-        v.intersect(VersionRange(Version.parse("1.1.4"), Version.parse("1.2.4"))) == v
-    )
-    assert (
-        Version.parse("1.1.4")
-        .intersect(VersionRange(v, Version.parse("1.2.4")))
-        .is_empty()
-    )
+@pytest.mark.parametrize(
+    ("version1", "version2", "expected"),
+    [
+        (
+            Version.parse("1.2.3"),
+            Version.parse("1.1.4"),
+            EmptyConstraint(),
+        ),
+        (
+            Version.parse("1.2.3"),
+            VersionRange(Version.parse("1.1.4"), Version.parse("1.2.4")),
+            Version.parse("1.2.3"),
+        ),
+        (
+            Version.parse("1.1.4"),
+            VersionRange(Version.parse("1.2.3"), Version.parse("1.2.4")),
+            EmptyConstraint(),
+        ),
+        (
+            Version.parse("1.2.3"),
+            Version.parse("1.2.3.post0"),
+            EmptyConstraint(),
+        ),
+        (
+            Version.parse("1.2.3"),
+            Version.parse("1.2.3+local"),
+            Version.parse("1.2.3+local"),
+        ),
+    ],
+)
+def test_intersect(
+    version1: VersionConstraint,
+    version2: VersionConstraint,
+    expected: VersionConstraint,
+) -> None:
+    assert version1.intersect(version2) == expected
+    assert version2.intersect(version1) == expected
 
 
-def test_union():
+def test_union() -> None:
     v = Version.parse("1.2.3")
 
     assert v.union(v) == v
@@ -152,7 +277,7 @@ def test_union():
     assert result.allows(Version.parse("0.1.0"))
 
 
-def test_difference():
+def test_difference() -> None:
     v = Version.parse("1.2.3")
 
     assert v.difference(v).is_empty()
diff --git a/vendor/poetry-core/tests/semver/test_version_range.py b/vendor/poetry-core/tests/semver/test_version_range.py
index 3667a465..9cd41453 100644
--- a/vendor/poetry-core/tests/semver/test_version_range.py
+++ b/vendor/poetry-core/tests/semver/test_version_range.py
@@ -1,81 +1,217 @@
+from __future__ import annotations
+
 import pytest
 
-from poetry.core.semver import EmptyConstraint
-from poetry.core.semver import Version
-from poetry.core.semver import VersionRange
+from poetry.core.semver.empty_constraint import EmptyConstraint
+from poetry.core.semver.version import Version
+from poetry.core.semver.version_range import VersionRange
 
 
 @pytest.fixture()
-def v003():
+def v003() -> Version:
     return Version.parse("0.0.3")
 
 
 @pytest.fixture()
-def v010():
+def v010() -> Version:
     return Version.parse("0.1.0")
 
 
 @pytest.fixture()
-def v080():
+def v080() -> Version:
     return Version.parse("0.8.0")
 
 
 @pytest.fixture()
-def v072():
+def v072() -> Version:
     return Version.parse("0.7.2")
 
 
 @pytest.fixture()
-def v114():
+def v114() -> Version:
     return Version.parse("1.1.4")
 
 
 @pytest.fixture()
-def v123():
+def v123() -> Version:
     return Version.parse("1.2.3")
 
 
 @pytest.fixture()
-def v124():
+def v124() -> Version:
     return Version.parse("1.2.4")
 
 
 @pytest.fixture()
-def v130():
+def v130() -> Version:
     return Version.parse("1.3.0")
 
 
 @pytest.fixture()
-def v140():
+def v140() -> Version:
     return Version.parse("1.4.0")
 
 
 @pytest.fixture()
-def v200():
+def v200() -> Version:
     return Version.parse("2.0.0")
 
 
 @pytest.fixture()
-def v234():
+def v234() -> Version:
     return Version.parse("2.3.4")
 
 
 @pytest.fixture()
-def v250():
+def v250() -> Version:
     return Version.parse("2.5.0")
 
 
 @pytest.fixture()
-def v300():
+def v300() -> Version:
     return Version.parse("3.0.0")
 
 
 @pytest.fixture()
-def v300b1():
+def v300b1() -> Version:
     return Version.parse("3.0.0b1")
 
 
-def test_allows_all(v003, v010, v080, v114, v123, v124, v140, v200, v234, v250, v300):
+@pytest.mark.parametrize(
+    "base,other",
+    [
+        pytest.param(Version.parse("3.0.0"), Version.parse("3.0.0-1"), id="post"),
+        pytest.param(
+            Version.parse("3.0.0"), Version.parse("3.0.0+local.1"), id="local"
+        ),
+    ],
+)
+def test_allows_post_releases_with_max(base: Version, other: Version) -> None:
+    range = VersionRange(max=base, include_max=True)
+    assert range.allows(other)
+
+
+@pytest.mark.parametrize(
+    "base,other",
+    [
+        pytest.param(Version.parse("3.0.0"), Version.parse("3.0.0-1"), id="post"),
+        pytest.param(
+            Version.parse("3.0.0"), Version.parse("3.0.0+local.1"), id="local"
+        ),
+    ],
+)
+def test_allows_post_releases_with_min(base: Version, other: Version) -> None:
+    range = VersionRange(min=base, include_min=True)
+    assert range.allows(other)
+
+
+def test_allows_post_releases_with_post_and_local_min() -> None:
+    one = Version.parse("3.0.0+local.1")
+    two = Version.parse("3.0.0-1")
+    three = Version.parse("3.0.0-1+local.1")
+    four = Version.parse("3.0.0+local.2")
+
+    assert VersionRange(min=one, include_min=True).allows(two)
+    assert VersionRange(min=one, include_min=True).allows(three)
+    assert VersionRange(min=one, include_min=True).allows(four)
+
+    assert not VersionRange(min=two, include_min=True).allows(one)
+    assert VersionRange(min=two, include_min=True).allows(three)
+    assert not VersionRange(min=two, include_min=True).allows(four)
+
+    assert not VersionRange(min=three, include_min=True).allows(one)
+    assert not VersionRange(min=three, include_min=True).allows(two)
+    assert not VersionRange(min=three, include_min=True).allows(four)
+
+    assert not VersionRange(min=four, include_min=True).allows(one)
+    assert VersionRange(min=four, include_min=True).allows(two)
+    assert VersionRange(min=four, include_min=True).allows(three)
+
+
+def test_allows_post_releases_with_post_and_local_max() -> None:
+    one = Version.parse("3.0.0+local.1")
+    two = Version.parse("3.0.0-1")
+    three = Version.parse("3.0.0-1+local.1")
+    four = Version.parse("3.0.0+local.2")
+
+    assert VersionRange(max=one, include_max=True).allows(two)
+    assert VersionRange(max=one, include_max=True).allows(three)
+    assert not VersionRange(max=one, include_max=True).allows(four)
+
+    assert VersionRange(max=two, include_max=True).allows(one)
+    assert VersionRange(max=two, include_max=True).allows(three)
+    assert VersionRange(max=two, include_max=True).allows(four)
+
+    assert VersionRange(max=three, include_max=True).allows(one)
+    assert VersionRange(max=three, include_max=True).allows(two)
+    assert VersionRange(max=three, include_max=True).allows(four)
+
+    assert VersionRange(max=four, include_max=True).allows(one)
+    assert VersionRange(max=four, include_max=True).allows(two)
+    assert VersionRange(max=four, include_max=True).allows(three)
+
+
+@pytest.mark.parametrize(
+    "base,one,two",
+    [
+        pytest.param(
+            Version.parse("3.0.0"),
+            Version.parse("3.0.0-1"),
+            Version.parse("3.0.0-2"),
+            id="post",
+        ),
+        pytest.param(
+            Version.parse("3.0.0"),
+            Version.parse("3.0.0+local.1"),
+            Version.parse("3.0.0+local.2"),
+            id="local",
+        ),
+    ],
+)
+def test_allows_post_releases_explicit_with_max(
+    base: Version, one: Version, two: Version
+) -> None:
+    range = VersionRange(max=one, include_max=True)
+    assert range.allows(base)
+    assert not range.allows(two)
+
+    range = VersionRange(max=two, include_max=True)
+    assert range.allows(base)
+    assert range.allows(one)
+
+
+@pytest.mark.parametrize(
+    "base,one,two",
+    [
+        pytest.param(
+            Version.parse("3.0.0"),
+            Version.parse("3.0.0-1"),
+            Version.parse("3.0.0-2"),
+            id="post",
+        ),
+        pytest.param(
+            Version.parse("3.0.0"),
+            Version.parse("3.0.0+local.1"),
+            Version.parse("3.0.0+local.2"),
+            id="local",
+        ),
+    ],
+)
+def test_allows_post_releases_explicit_with_min(
+    base: Version, one: Version, two: Version
+) -> None:
+    range = VersionRange(min=one, include_min=True)
+    assert not range.allows(base)
+    assert range.allows(two)
+
+    range = VersionRange(min=two, include_min=True)
+    assert not range.allows(base)
+    assert not range.allows(one)
+
+
+def test_allows_all(
+    v123: Version, v124: Version, v140: Version, v250: Version, v300: Version
+) -> None:
     assert VersionRange(v123, v250).allows_all(EmptyConstraint())
 
     range = VersionRange(v123, v250, include_max=True)
@@ -84,7 +220,10 @@ def test_allows_all(v003, v010, v080, v114, v123, v124, v140, v200, v234, v250,
     assert range.allows_all(v250)
     assert not range.allows_all(v300)
 
-    # with no min
+
+def test_allows_all_with_no_min(
+    v080: Version, v140: Version, v250: Version, v300: Version
+) -> None:
     range = VersionRange(max=v250)
     assert range.allows_all(VersionRange(v080, v140))
     assert not range.allows_all(VersionRange(v080, v300))
@@ -93,7 +232,10 @@ def test_allows_all(v003, v010, v080, v114, v123, v124, v140, v200, v234, v250,
     assert range.allows_all(range)
     assert not range.allows_all(VersionRange())
 
-    # with no max
+
+def test_allows_all_with_no_max(
+    v003: Version, v010: Version, v080: Version, v140: Version
+) -> None:
     range = VersionRange(min=v010)
     assert range.allows_all(VersionRange(v080, v140))
     assert not range.allows_all(VersionRange(v003, v140))
@@ -102,6 +244,10 @@ def test_allows_all(v003, v010, v080, v114, v123, v124, v140, v200, v234, v250,
     assert range.allows_all(range)
     assert not range.allows_all(VersionRange())
 
+
+def test_allows_all_bordering_range_not_more_inclusive(
+    v010: Version, v250: Version
+) -> None:
     # Allows bordering range that is not more inclusive
     exclusive = VersionRange(v010, v250)
     inclusive = VersionRange(v010, v250, True, True)
@@ -110,6 +256,16 @@ def test_allows_all(v003, v010, v080, v114, v123, v124, v140, v200, v234, v250,
     assert not exclusive.allows_all(inclusive)
     assert exclusive.allows_all(exclusive)
 
+
+def test_allows_all_contained_unions(
+    v010: Version,
+    v114: Version,
+    v123: Version,
+    v124: Version,
+    v140: Version,
+    v200: Version,
+    v234: Version,
+) -> None:
     # Allows unions that are completely contained
     range = VersionRange(v114, v200)
     assert range.allows_all(VersionRange(v123, v124).union(v140))
@@ -118,8 +274,19 @@ def test_allows_all(v003, v010, v080, v114, v123, v124, v140, v200, v234, v250,
 
 
 def test_allows_any(
-    v003, v010, v072, v080, v114, v123, v124, v140, v200, v234, v250, v300
-):
+    v003: Version,
+    v010: Version,
+    v072: Version,
+    v080: Version,
+    v114: Version,
+    v123: Version,
+    v124: Version,
+    v140: Version,
+    v200: Version,
+    v234: Version,
+    v250: Version,
+    v300: Version,
+) -> None:
     # disallows an empty constraint
     assert not VersionRange(v123, v250).allows_any(EmptyConstraint())
 
@@ -182,7 +349,14 @@ def test_allows_any(
     )
 
 
-def test_intersect(v114, v123, v124, v200, v250, v300):
+def test_intersect(
+    v114: Version,
+    v123: Version,
+    v124: Version,
+    v200: Version,
+    v250: Version,
+    v300: Version,
+) -> None:
     # two overlapping ranges
     assert VersionRange(v123, v250).intersect(VersionRange(v200, v300)) == VersionRange(
         v200, v250
@@ -215,8 +389,20 @@ def test_intersect(v114, v123, v124, v200, v250, v300):
 
 
 def test_union(
-    v003, v010, v072, v080, v114, v123, v124, v130, v140, v200, v234, v250, v300
-):
+    v003: Version,
+    v010: Version,
+    v072: Version,
+    v080: Version,
+    v114: Version,
+    v123: Version,
+    v124: Version,
+    v130: Version,
+    v140: Version,
+    v200: Version,
+    v234: Version,
+    v250: Version,
+    v300: Version,
+) -> None:
     # with a version returns the range if it contains the version
     range = VersionRange(v114, v124)
     assert range.union(v123) == range
@@ -260,7 +446,7 @@ def test_union(
     assert result == VersionRange(v003, v200)
 
 
-def test_include_max_prerelease(v200, v300, v300b1):
+def test_include_max_prerelease(v200: Version, v300: Version, v300b1: Version) -> None:
     result = VersionRange(v200, v300)
 
     assert not result.allows(v300b1)
diff --git a/vendor/poetry-core/tests/spdx/test_helpers.py b/vendor/poetry-core/tests/spdx/test_helpers.py
new file mode 100644
index 00000000..add838d9
--- /dev/null
+++ b/vendor/poetry-core/tests/spdx/test_helpers.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+import pytest
+
+from poetry.core.spdx.helpers import license_by_id
+
+
+def test_license_by_id() -> None:
+    license = license_by_id("MIT")
+
+    assert license.id == "MIT"
+    assert license.name == "MIT License"
+    assert license.is_osi_approved
+    assert not license.is_deprecated
+
+    license = license_by_id("LGPL-3.0-or-later")
+
+    assert license.id == "LGPL-3.0-or-later"
+    assert license.name == "GNU Lesser General Public License v3.0 or later"
+    assert license.is_osi_approved
+    assert not license.is_deprecated
+
+
+def test_license_by_id_is_case_insensitive() -> None:
+    license = license_by_id("mit")
+
+    assert license.id == "MIT"
+
+    license = license_by_id("miT")
+
+    assert license.id == "MIT"
+
+
+def test_license_by_id_with_full_name() -> None:
+    license = license_by_id("GNU Lesser General Public License v3.0 or later")
+
+    assert license.id == "LGPL-3.0-or-later"
+    assert license.name == "GNU Lesser General Public License v3.0 or later"
+    assert license.is_osi_approved
+    assert not license.is_deprecated
+
+
+def test_license_by_id_invalid() -> None:
+    with pytest.raises(ValueError):
+        license_by_id("")
+
+
+def test_license_by_id_custom() -> None:
+    license = license_by_id("Custom")
+
+    assert license.id == "Custom"
+    assert license.name == "Custom"
+    assert not license.is_osi_approved
+    assert not license.is_deprecated
diff --git a/vendor/poetry-core/tests/spdx/test_license.py b/vendor/poetry-core/tests/spdx/test_license.py
index 431dbacc..6cada073 100644
--- a/vendor/poetry-core/tests/spdx/test_license.py
+++ b/vendor/poetry-core/tests/spdx/test_license.py
@@ -1,7 +1,9 @@
-from poetry.core.spdx import license_by_id
+from __future__ import annotations
 
+from poetry.core.spdx.helpers import license_by_id
 
-def test_classifier_name():
+
+def test_classifier_name() -> None:
     license = license_by_id("lgpl-3.0-or-later")
 
     assert (
@@ -10,47 +12,48 @@ def test_classifier_name():
     )
 
 
-def test_classifier_name_no_classifer_osi_approved():
+def test_classifier_name_no_classifer_osi_approved() -> None:
     license = license_by_id("LiLiQ-R-1.1")
 
     assert license.classifier_name is None
 
 
-def test_classifier_name_no_classifer():
+def test_classifier_name_no_classifer() -> None:
     license = license_by_id("Leptonica")
 
     assert license.classifier_name == "Other/Proprietary License"
 
 
-def test_classifier():
+def test_classifier() -> None:
     license = license_by_id("lgpl-3.0-or-later")
 
-    assert license.classifier == (
-        "License :: "
+    assert (
+        license.classifier
+        == "License :: "
         "OSI Approved :: "
         "GNU Lesser General Public License v3 or later (LGPLv3+)"
     )
 
 
-def test_classifier_no_classifer_osi_approved():
+def test_classifier_no_classifer_osi_approved() -> None:
     license = license_by_id("LiLiQ-R-1.1")
 
     assert license.classifier == "License :: OSI Approved"
 
 
-def test_classifier_no_classifer():
+def test_classifier_no_classifer() -> None:
     license = license_by_id("Leptonica")
 
     assert license.classifier == "License :: Other/Proprietary License"
 
 
-def test_proprietary_license():
+def test_proprietary_license() -> None:
     license = license_by_id("Proprietary")
 
-    assert "License :: Other/Proprietary License" == license.classifier
+    assert license.classifier == "License :: Other/Proprietary License"
 
 
-def test_custom_license():
+def test_custom_license() -> None:
     license = license_by_id("Amazon Software License")
 
-    assert "License :: Other/Proprietary License" == license.classifier
+    assert license.classifier == "License :: Other/Proprietary License"
diff --git a/vendor/poetry-core/tests/spdx/test_main.py b/vendor/poetry-core/tests/spdx/test_main.py
deleted file mode 100644
index 62448e35..00000000
--- a/vendor/poetry-core/tests/spdx/test_main.py
+++ /dev/null
@@ -1,52 +0,0 @@
-import pytest
-
-from poetry.core.spdx import license_by_id
-
-
-def test_license_by_id():
-    license = license_by_id("MIT")
-
-    assert license.id == "MIT"
-    assert license.name == "MIT License"
-    assert license.is_osi_approved
-    assert not license.is_deprecated
-
-    license = license_by_id("LGPL-3.0-or-later")
-
-    assert license.id == "LGPL-3.0-or-later"
-    assert license.name == "GNU Lesser General Public License v3.0 or later"
-    assert license.is_osi_approved
-    assert not license.is_deprecated
-
-
-def test_license_by_id_is_case_insensitive():
-    license = license_by_id("mit")
-
-    assert license.id == "MIT"
-
-    license = license_by_id("miT")
-
-    assert license.id == "MIT"
-
-
-def test_license_by_id_with_full_name():
-    license = license_by_id("GNU Lesser General Public License v3.0 or later")
-
-    assert license.id == "LGPL-3.0-or-later"
-    assert license.name == "GNU Lesser General Public License v3.0 or later"
-    assert license.is_osi_approved
-    assert not license.is_deprecated
-
-
-def test_license_by_id_invalid():
-    with pytest.raises(ValueError):
-        license_by_id("")
-
-
-def test_license_by_id_custom():
-    license = license_by_id("Custom")
-
-    assert license.id == "Custom"
-    assert license.name == "Custom"
-    assert not license.is_osi_approved
-    assert not license.is_deprecated
diff --git a/vendor/poetry-core/tests/test_core_version.py b/vendor/poetry-core/tests/test_core_version.py
new file mode 100644
index 00000000..601ef49f
--- /dev/null
+++ b/vendor/poetry-core/tests/test_core_version.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from poetry.core import __version__
+from poetry.core.pyproject.toml import PyProjectTOML
+
+
+def test_version_is_synced() -> None:
+    pyproject = PyProjectTOML(Path(__file__).parent.parent.joinpath("pyproject.toml"))
+    assert __version__ == pyproject.poetry_config.get("version")
diff --git a/vendor/poetry-core/tests/test_factory.py b/vendor/poetry-core/tests/test_factory.py
index 605e34e5..cc985a66 100644
--- a/vendor/poetry-core/tests/test_factory.py
+++ b/vendor/poetry-core/tests/test_factory.py
@@ -1,19 +1,26 @@
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-from __future__ import unicode_literals
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import cast
 
 import pytest
 
 from poetry.core.factory import Factory
+from poetry.core.packages.vcs_dependency import VCSDependency
+from poetry.core.semver.helpers import parse_constraint
 from poetry.core.toml import TOMLFile
-from poetry.core.utils._compat import PY2
-from poetry.core.utils._compat import Path
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
 
 
 fixtures_dir = Path(__file__).parent / "fixtures"
 
 
-def test_create_poetry():
+def test_create_poetry() -> None:
     poetry = Factory().create_poetry(fixtures_dir / "sample_project")
 
     package = poetry.package
@@ -22,9 +29,10 @@ def test_create_poetry():
     assert package.version.text == "1.2.3"
     assert package.description == "Some description."
     assert package.authors == ["Sébastien Eustace "]
+    assert package.license
     assert package.license.id == "MIT"
     assert (
-        package.readme.relative_to(fixtures_dir).as_posix()
+        package.readmes[0].relative_to(fixtures_dir).as_posix()
         == "sample_project/README.rst"
     )
     assert package.homepage == "https://python-poetry.org"
@@ -34,7 +42,7 @@ def test_create_poetry():
     assert package.python_versions == "~2.7 || ^3.6"
     assert str(package.python_constraint) == ">=2.7,<2.8 || >=3.6,<4.0"
 
-    dependencies = {}
+    dependencies: dict[str, Dependency] = {}
     for dep in package.requires:
         dependencies[dep.name] = dep
 
@@ -45,6 +53,7 @@ def test_create_poetry():
     pendulum = dependencies["pendulum"]
     assert pendulum.pretty_constraint == "branch 2.0"
     assert pendulum.is_vcs()
+    pendulum = cast(VCSDependency, pendulum)
     assert pendulum.vcs == "git"
     assert pendulum.branch == "2.0"
     assert pendulum.source == "https://github.com/sdispater/pendulum.git"
@@ -54,6 +63,7 @@ def test_create_poetry():
     tomlkit = dependencies["tomlkit"]
     assert tomlkit.pretty_constraint == "rev 3bff550"
     assert tomlkit.is_vcs()
+    tomlkit = cast(VCSDependency, tomlkit)
     assert tomlkit.vcs == "git"
     assert tomlkit.rev == "3bff550"
     assert tomlkit.source == "https://github.com/sdispater/tomlkit.git"
@@ -69,7 +79,7 @@ def test_create_poetry():
 
     pathlib2 = dependencies["pathlib2"]
     assert pathlib2.pretty_constraint == "^2.2"
-    assert pathlib2.python_versions == "~2.7"
+    assert pathlib2.python_versions == ">=2.7 <2.8"
     assert not pathlib2.is_optional()
 
     demo = dependencies["demo"]
@@ -97,13 +107,14 @@ def test_create_poetry():
     assert functools32.pretty_constraint == "^3.2.3"
     assert (
         str(functools32.marker)
-        == 'python_version ~= "2.7" and sys_platform == "win32" or python_version in "3.4 3.5"'
+        == 'python_version ~= "2.7" and sys_platform == "win32" or python_version in'
+        ' "3.4 3.5"'
     )
 
     dataclasses = dependencies["dataclasses"]
     assert dataclasses.name == "dataclasses"
     assert dataclasses.pretty_constraint == "^0.7"
-    assert dataclasses.python_versions == ">=3.6.1,<3.7"
+    assert dataclasses.python_versions == ">=3.6.1 <3.7"
     assert (
         str(dataclasses.marker)
         == 'python_full_version >= "3.6.1" and python_version < "3.7"'
@@ -123,17 +134,17 @@ def test_create_poetry():
         "Programming Language :: Python :: 2",
         "Programming Language :: Python :: 2.7",
         "Programming Language :: Python :: 3",
-        "Programming Language :: Python :: 3.10",
         "Programming Language :: Python :: 3.6",
         "Programming Language :: Python :: 3.7",
         "Programming Language :: Python :: 3.8",
         "Programming Language :: Python :: 3.9",
+        "Programming Language :: Python :: 3.10",
         "Topic :: Software Development :: Build Tools",
         "Topic :: Software Development :: Libraries :: Python Modules",
     ]
 
 
-def test_create_poetry_with_packages_and_includes():
+def test_create_poetry_with_packages_and_includes() -> None:
     poetry = Factory().create_poetry(
         fixtures_dir.parent / "masonry" / "builders" / "fixtures" / "with-include"
     )
@@ -156,7 +167,7 @@ def test_create_poetry_with_packages_and_includes():
     ]
 
 
-def test_create_poetry_with_multi_constraints_dependency():
+def test_create_poetry_with_multi_constraints_dependency() -> None:
     poetry = Factory().create_poetry(
         fixtures_dir / "project_with_multi_constraints_dependency"
     )
@@ -166,64 +177,200 @@ def test_create_poetry_with_multi_constraints_dependency():
     assert len(package.requires) == 2
 
 
-def test_validate():
+def test_validate() -> None:
     complete = TOMLFile(fixtures_dir / "complete.toml")
-    content = complete.read()["tool"]["poetry"]
+    doc: dict[str, Any] = complete.read()
+    content = doc["tool"]["poetry"]
 
     assert Factory.validate(content) == {"errors": [], "warnings": []}
 
 
-def test_validate_fails():
+def test_validate_fails() -> None:
     complete = TOMLFile(fixtures_dir / "complete.toml")
-    content = complete.read()["tool"]["poetry"]
-    content["this key is not in the schema"] = ""
+    doc: dict[str, Any] = complete.read()
+    content = doc["tool"]["poetry"]
+    content["authors"] = "this is not a valid array"
 
-    if PY2:
-        expected = (
-            "Additional properties are not allowed "
-            "(u'this key is not in the schema' was unexpected)"
-        )
-    else:
-        expected = (
-            "Additional properties are not allowed "
-            "('this key is not in the schema' was unexpected)"
-        )
+    expected = "[authors] 'this is not a valid array' is not of type 'array'"
 
     assert Factory.validate(content) == {"errors": [expected], "warnings": []}
 
 
-def test_create_poetry_fails_on_invalid_configuration():
+def test_validate_without_strict_fails_only_non_strict() -> None:
+    project_failing_strict_validation = TOMLFile(
+        fixtures_dir / "project_failing_strict_validation" / "pyproject.toml"
+    )
+    doc: dict[str, Any] = project_failing_strict_validation.read()
+    content = doc["tool"]["poetry"]
+
+    assert Factory.validate(content) == {
+        "errors": [
+            "'name' is a required property",
+            "'version' is a required property",
+            "'description' is a required property",
+            "'authors' is a required property",
+        ],
+        "warnings": [],
+    }
+
+
+def test_validate_strict_fails_strict_and_non_strict() -> None:
+    project_failing_strict_validation = TOMLFile(
+        fixtures_dir / "project_failing_strict_validation" / "pyproject.toml"
+    )
+    doc: dict[str, Any] = project_failing_strict_validation.read()
+    content = doc["tool"]["poetry"]
+
+    assert Factory.validate(content, strict=True) == {
+        "errors": [
+            "'name' is a required property",
+            "'version' is a required property",
+            "'description' is a required property",
+            "'authors' is a required property",
+            'Script "a_script_with_unknown_extra" requires extra "foo" which is not'
+            " defined.",
+            "Declared README files must be of same type: found text/markdown,"
+            " text/x-rst",
+        ],
+        "warnings": [
+            "A wildcard Python dependency is ambiguous. Consider specifying a more"
+            " explicit one.",
+            'The "pathlib2" dependency specifies the "allows-prereleases" property,'
+            ' which is deprecated. Use "allow-prereleases" instead.',
+        ],
+    }
+
+
+def test_strict_validation_success_on_multiple_readme_files() -> None:
+    with_readme_files = TOMLFile(fixtures_dir / "with_readme_files" / "pyproject.toml")
+    doc: dict[str, Any] = with_readme_files.read()
+    content = doc["tool"]["poetry"]
+
+    assert Factory.validate(content, strict=True) == {"errors": [], "warnings": []}
+
+
+def test_strict_validation_fails_on_readme_files_with_unmatching_types() -> None:
+    with_readme_files = TOMLFile(fixtures_dir / "with_readme_files" / "pyproject.toml")
+    doc: dict[str, Any] = with_readme_files.read()
+    content = doc["tool"]["poetry"]
+    content["readme"][0] = "README.md"
+
+    assert Factory.validate(content, strict=True) == {
+        "errors": [
+            "Declared README files must be of same type: found text/markdown,"
+            " text/x-rst"
+        ],
+        "warnings": [],
+    }
+
+
+def test_create_poetry_fails_on_invalid_configuration() -> None:
     with pytest.raises(RuntimeError) as e:
         Factory().create_poetry(
             Path(__file__).parent / "fixtures" / "invalid_pyproject" / "pyproject.toml"
         )
 
-    if PY2:
-        expected = """\
-The Poetry configuration is invalid:
-  - u'description' is a required property
-"""
-    else:
-        expected = """\
+    expected = """\
 The Poetry configuration is invalid:
   - 'description' is a required property
 """
-    assert expected == str(e.value)
+    assert str(e.value) == expected
 
 
-def test_create_poetry_omits_dev_dependencies_iff_with_dev_is_false():
-    poetry = Factory().create_poetry(fixtures_dir / "sample_project", with_dev=False)
-    assert not any(r for r in poetry.package.dev_requires if "pytest" in str(r))
+def test_create_poetry_omits_dev_dependencies_iff_with_dev_is_false() -> None:
+    poetry = Factory().create_poetry(fixtures_dir / "sample_project", with_groups=False)
+    assert not any("dev" in r.groups for r in poetry.package.all_requires)
 
     poetry = Factory().create_poetry(fixtures_dir / "sample_project")
-    assert any(r for r in poetry.package.dev_requires if "pytest" in str(r))
+    assert any("dev" in r.groups for r in poetry.package.all_requires)
 
 
-def test_create_poetry_fails_with_invalid_dev_dependencies_iff_with_dev_is_true():
+def test_create_poetry_fails_with_invalid_dev_dependencies_iff_with_dev_is_true() -> (
+    None
+):
     with pytest.raises(ValueError) as err:
         Factory().create_poetry(fixtures_dir / "project_with_invalid_dev_deps")
     assert "does not exist" in str(err.value)
 
     Factory().create_poetry(
-        fixtures_dir / "project_with_invalid_dev_deps", with_dev=False
+        fixtures_dir / "project_with_invalid_dev_deps", with_groups=False
+    )
+
+
+def test_create_poetry_with_groups_and_legacy_dev() -> None:
+    poetry = Factory().create_poetry(
+        fixtures_dir / "project_with_groups_and_legacy_dev"
+    )
+
+    package = poetry.package
+    dependencies = package.all_requires
+
+    assert len(dependencies) == 2
+    assert {dependency.name for dependency in dependencies} == {"pytest", "pre-commit"}
+
+
+def test_create_poetry_with_groups_and_explicit_main() -> None:
+    poetry = Factory().create_poetry(
+        fixtures_dir / "project_with_groups_and_explicit_main"
     )
+
+    package = poetry.package
+    dependencies = package.requires
+
+    assert len(dependencies) == 1
+    assert {dependency.name for dependency in dependencies} == {
+        "aiohttp",
+    }
+
+
+@pytest.mark.parametrize(
+    "constraint, exp_python, exp_marker",
+    [
+        ({"python": "3.7"}, "~3.7", 'python_version == "3.7"'),
+        ({"platform": "linux"}, "*", 'sys_platform == "linux"'),
+        ({"markers": 'python_version == "3.7"'}, "~3.7", 'python_version == "3.7"'),
+        (
+            {"markers": 'platform_machine == "x86_64"'},
+            "*",
+            'platform_machine == "x86_64"',
+        ),
+        (
+            {"python": "3.7", "markers": 'platform_machine == "x86_64"'},
+            "~3.7",
+            'platform_machine == "x86_64" and python_version == "3.7"',
+        ),
+        (
+            {"platform": "linux", "markers": 'platform_machine == "x86_64"'},
+            "*",
+            'platform_machine == "x86_64" and sys_platform == "linux"',
+        ),
+        (
+            {
+                "python": "3.7",
+                "platform": "linux",
+                "markers": 'platform_machine == "x86_64"',
+            },
+            "~3.7",
+            'platform_machine == "x86_64" and python_version == "3.7" and sys_platform'
+            ' == "linux"',
+        ),
+        (
+            {"python": ">=3.7", "markers": 'python_version < "4.0"'},
+            "<4.0 >=3.7",
+            'python_version < "4.0" and python_version >= "3.7"',
+        ),
+        (
+            {"platform": "linux", "markers": 'sys_platform == "win32"'},
+            "*",
+            "",
+        ),
+    ],
+)
+def test_create_dependency_marker_variants(
+    constraint: dict[str, Any], exp_python: str, exp_marker: str
+) -> None:
+    constraint["version"] = "1.0.0"
+    dep = Factory.create_dependency("foo", constraint)
+    assert dep.python_versions == exp_python
+    assert dep.python_constraint == parse_constraint(exp_python)
+    assert str(dep.marker) == exp_marker
diff --git a/vendor/poetry-core/tests/testutils.py b/vendor/poetry-core/tests/testutils.py
index 8e4fc266..f29f6004 100644
--- a/vendor/poetry-core/tests/testutils.py
+++ b/vendor/poetry-core/tests/testutils.py
@@ -1,23 +1,17 @@
+from __future__ import annotations
+
 import shutil
 import subprocess
 import tarfile
+import tempfile
 import zipfile
 
 from contextlib import contextmanager
+from pathlib import Path
 from typing import Any
-from typing import ContextManager
-from typing import Dict
-from typing import List
-from typing import Optional
+from typing import Generator
 
 from poetry.core.toml import TOMLFile
-from poetry.core.utils._compat import Path
-
-
-try:
-    from backports import tempfile
-except ImportError:
-    import tempfile
 
 
 __toml_build_backend_patch__ = {
@@ -30,8 +24,8 @@
 
 @contextmanager
 def temporary_project_directory(
-    path, toml_patch=None
-):  # type: (Path, Optional[Dict[str, Any]]) -> ContextManager[str]
+    path: Path, toml_patch: dict[str, Any] | None = None
+) -> Generator[str, None, None]:
     """
     Context manager that takes a project source directory, copies content to a temporary
     directory, patches the `pyproject.toml` using the provided patch, or using the default
@@ -56,34 +50,32 @@ def temporary_project_directory(
         yield str(dst)
 
 
-def subprocess_run(*args, **kwargs):  # type: (str, Any) -> subprocess.CompletedProcess
+def subprocess_run(*args: str, **kwargs: Any) -> subprocess.CompletedProcess[str]:
     """
     Helper method to run a subprocess. Asserts for success.
     """
-    result = subprocess.run(
-        args, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs
-    )
+    result = subprocess.run(args, text=True, capture_output=True, **kwargs)
     assert result.returncode == 0
     return result
 
 
 def validate_wheel_contents(
-    name, version, path, files=None
-):  # type: (str, str, str, Optional[List[str]]) -> None
-    dist_info = "{}-{}.dist-info".format(name, version)
+    name: str, version: str, path: str, files: list[str] | None = None
+) -> None:
+    dist_info = f"{name}-{version}.dist-info"
     files = files or []
 
     with zipfile.ZipFile(path) as z:
         namelist = z.namelist()
         # we use concatenation here for PY2 compat
         for filename in ["WHEEL", "METADATA", "RECORD"] + files:
-            assert "{}/{}".format(dist_info, filename) in namelist
+            assert f"{dist_info}/{filename}" in namelist
 
 
 def validate_sdist_contents(
-    name, version, path, files
-):  # type: (str, str, str, List[str]) -> None
+    name: str, version: str, path: str, files: list[str]
+) -> None:
     with tarfile.open(path) as tar:
         namelist = tar.getnames()
         for filename in files:
-            assert "{}-{}/{}".format(name, version, filename) in namelist
+            assert f"{name}-{version}/{filename}" in namelist
diff --git a/vendor/poetry-core/tests/utils/test_helpers.py b/vendor/poetry-core/tests/utils/test_helpers.py
index b07e6730..74c89a5b 100644
--- a/vendor/poetry-core/tests/utils/test_helpers.py
+++ b/vendor/poetry-core/tests/utils/test_helpers.py
@@ -1,15 +1,90 @@
+from __future__ import annotations
+
 import os
 
+from pathlib import Path
 from stat import S_IREAD
 
 import pytest
 
-from poetry.core.utils.helpers import canonicalize_name
+from poetry.core.utils.helpers import combine_unicode
+from poetry.core.utils.helpers import normalize_version
 from poetry.core.utils.helpers import parse_requires
+from poetry.core.utils.helpers import readme_content_type
 from poetry.core.utils.helpers import temporary_directory
 
 
-def test_parse_requires():
+@pytest.mark.parametrize(
+    "version,normalized_version",
+    [
+        (  # already normalized version
+            "1!2.3.4.5.6a7.post8.dev9+local1.123.abc",
+            "1!2.3.4.5.6a7.post8.dev9+local1.123.abc",
+        ),
+        # PEP 440 Normalization
+        # Case sensitivity
+        ("1.1RC1", "1.1rc1"),
+        # Integer Normalization
+        ("00", "0"),
+        ("09000", "9000"),
+        ("1.0+foo0100", "1.0+foo0100"),
+        # Pre-release separators
+        ("1.1.a1", "1.1a1"),
+        ("1.1-a1", "1.1a1"),
+        ("1.1_a1", "1.1a1"),
+        ("1.1a.1", "1.1a1"),
+        ("1.1a-1", "1.1a1"),
+        ("1.1a_1", "1.1a1"),
+        # Pre-release spelling
+        ("1.1alpha1", "1.1a1"),
+        ("1.1beta2", "1.1b2"),
+        ("1.1c3", "1.1rc3"),
+        ("1.1pre4", "1.1rc4"),
+        ("1.1preview5", "1.1rc5"),
+        # Implicit pre-release number
+        ("1.2a", "1.2a0"),
+        # Post release separators
+        ("1.2.post2", "1.2.post2"),
+        ("1.2-post2", "1.2.post2"),
+        ("1.2_post2", "1.2.post2"),
+        ("1.2post.2", "1.2.post2"),
+        ("1.2post-2", "1.2.post2"),
+        ("1.2post_2", "1.2.post2"),
+        # Post release spelling
+        ("1.0-r4", "1.0.post4"),
+        ("1.0-rev4", "1.0.post4"),
+        # Implicit post release number
+        ("1.2.post", "1.2.post0"),
+        # Implicit post releases
+        ("1.0-1", "1.0.post1"),
+        # Development release separators
+        ("1.2.dev2", "1.2.dev2"),
+        ("1.2-dev2", "1.2.dev2"),
+        ("1.2_dev2", "1.2.dev2"),
+        ("1.2dev.2", "1.2.dev2"),
+        ("1.2dev-2", "1.2.dev2"),
+        ("1.2dev_2", "1.2.dev2"),
+        # Implicit development release number
+        ("1.2.dev", "1.2.dev0"),
+        # Local version segments
+        ("1.0+ubuntu-1", "1.0+ubuntu.1"),
+        ("1.0+ubuntu_1", "1.0+ubuntu.1"),
+        # Preceding v character
+        ("v1.0", "1.0"),
+        # Leading and Trailing Whitespace
+        (" 1.0 ", "1.0"),
+        ("\t1.0\t", "1.0"),
+        ("\n1.0\n", "1.0"),
+        ("\r\n1.0\r\n", "1.0"),
+        ("\f1.0\f", "1.0"),
+        ("\v1.0\v", "1.0"),
+    ],
+)
+def test_normalize_version(version: str, normalized_version: str) -> None:
+    assert normalize_version(version) == normalized_version
+
+
+def test_parse_requires() -> None:
     requires = """\
 jsonschema>=2.6.0.0,<3.0.0.0
 lockfile>=0.12.0.0,<0.13.0.0
@@ -53,21 +128,31 @@ def test_parse_requires():
         "msgpack-python>=0.5.0.0,<0.6.0.0",
         "pyparsing>=2.2.0.0,<3.0.0.0",
         "requests-toolbelt>=0.8.0.0,<0.9.0.0",
-        'typing>=3.6.0.0,<4.0.0.0 ; (python_version >= "2.7.0.0" and python_version < "2.8.0.0") or (python_version >= "3.4.0.0" and python_version < "3.5.0.0")',
-        'virtualenv>=15.2.0.0,<16.0.0.0 ; python_version >= "2.7.0.0" and python_version < "2.8.0.0"',
-        'pathlib2>=2.3.0.0,<3.0.0.0 ; python_version >= "2.7.0.0" and python_version < "2.8.0.0"',
-        'zipfile36>=0.1.0.0,<0.2.0.0 ; python_version >= "3.4.0.0" and python_version < "3.6.0.0"',
-        'isort@ git+git://github.com/timothycrosley/isort.git@e63ae06ec7d70b06df9e528357650281a3d3ec22#egg=isort ; extra == "dev"',
+        'typing>=3.6.0.0,<4.0.0.0 ; (python_version >= "2.7.0.0" and python_version <'
+        ' "2.8.0.0") or (python_version >= "3.4.0.0" and python_version < "3.5.0.0")',
+        'virtualenv>=15.2.0.0,<16.0.0.0 ; python_version >= "2.7.0.0" and'
+        ' python_version < "2.8.0.0"',
+        'pathlib2>=2.3.0.0,<3.0.0.0 ; python_version >= "2.7.0.0" and python_version <'
+        ' "2.8.0.0"',
+        'zipfile36>=0.1.0.0,<0.2.0.0 ; python_version >= "3.4.0.0" and python_version <'
+        ' "3.6.0.0"',
+        "isort@"
+        " git+git://github.com/timothycrosley/isort.git@e63ae06ec7d70b06df9e528357650281a3d3ec22#egg=isort"
+        ' ; extra == "dev"',
     ]
     assert result == expected
 
 
-@pytest.mark.parametrize("raw", ["a-b-c", "a_b-c", "a_b_c", "a-b_c"])
-def test_utils_helpers_canonical_names(raw):
-    assert canonicalize_name(raw) == "a-b-c"
+def test_utils_helpers_combine_unicode() -> None:
+    combined_expected = "é"
+    decomposed = "eÌ"
+    assert combined_expected != decomposed
 
+    combined = combine_unicode(decomposed)
+    assert combined == combined_expected
 
-def test_utils_helpers_temporary_directory_readonly_file():
+
+def test_utils_helpers_temporary_directory_readonly_file() -> None:
     with temporary_directory() as temp_dir:
         readonly_filename = os.path.join(temp_dir, "file.txt")
         with open(readonly_filename, "w+") as readonly_file:
@@ -76,3 +161,20 @@ def test_utils_helpers_temporary_directory_readonly_file():
 
     assert not os.path.exists(temp_dir)
     assert not os.path.exists(readonly_filename)
+
+
+@pytest.mark.parametrize(
+    "readme, content_type",
+    [
+        ("README.rst", "text/x-rst"),
+        ("README.md", "text/markdown"),
+        ("README", "text/plain"),
+        (Path("README.rst"), "text/x-rst"),
+        (Path("README.md"), "text/markdown"),
+        (Path("README"), "text/plain"),
+    ],
+)
+def test_utils_helpers_readme_content_type(
+    readme: str | Path, content_type: str
+) -> None:
+    assert readme_content_type(readme) == content_type
diff --git a/vendor/poetry-core/tests/vcs/test_vcs.py b/vendor/poetry-core/tests/vcs/test_vcs.py
index 9632d49e..43f87b81 100644
--- a/vendor/poetry-core/tests/vcs/test_vcs.py
+++ b/vendor/poetry-core/tests/vcs/test_vcs.py
@@ -1,10 +1,14 @@
+from __future__ import annotations
+
 import subprocess
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
 import pytest
 
-from poetry.core.utils._compat import PY36
 from poetry.core.utils._compat import WINDOWS
-from poetry.core.utils._compat import Path
 from poetry.core.vcs.git import Git
 from poetry.core.vcs.git import GitError
 from poetry.core.vcs.git import GitUrl
@@ -12,79 +16,141 @@
 from poetry.core.vcs.git import _reset_executable
 
 
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+
 @pytest.mark.parametrize(
     "url, normalized",
     [
         (
             "git+ssh://user@hostname:project.git#commit",
-            GitUrl("user@hostname:project.git", "commit"),
+            GitUrl("user@hostname:project.git", "commit", None),
         ),
         (
             "git+http://user@hostname/project/blah.git@commit",
-            GitUrl("http://user@hostname/project/blah.git", "commit"),
+            GitUrl("http://user@hostname/project/blah.git", "commit", None),
         ),
         (
             "git+https://user@hostname/project/blah.git",
-            GitUrl("https://user@hostname/project/blah.git", None),
+            GitUrl("https://user@hostname/project/blah.git", None, None),
         ),
         (
             "git+https://user@hostname/project~_-.foo/blah~_-.bar.git",
-            GitUrl("https://user@hostname/project~_-.foo/blah~_-.bar.git", None),
+            GitUrl("https://user@hostname/project~_-.foo/blah~_-.bar.git", None, None),
         ),
         (
             "git+https://user@hostname:project/blah.git",
-            GitUrl("https://user@hostname/project/blah.git", None),
+            GitUrl("https://user@hostname/project/blah.git", None, None),
         ),
         (
             "git+ssh://git@github.com:sdispater/poetry.git#v1.0.27",
-            GitUrl("git@github.com:sdispater/poetry.git", "v1.0.27"),
+            GitUrl("git@github.com:sdispater/poetry.git", "v1.0.27", None),
         ),
         (
             "git+ssh://git@github.com:/sdispater/poetry.git",
-            GitUrl("git@github.com:/sdispater/poetry.git", None),
+            GitUrl("git@github.com:/sdispater/poetry.git", None, None),
+        ),
+        (
+            "git+ssh://git@github.com:org/repo",
+            GitUrl("git@github.com:org/repo", None, None),
         ),
-        ("git+ssh://git@github.com:org/repo", GitUrl("git@github.com:org/repo", None),),
         (
             "git+ssh://git@github.com/org/repo",
-            GitUrl("ssh://git@github.com/org/repo", None),
+            GitUrl("ssh://git@github.com/org/repo", None, None),
         ),
-        ("git+ssh://foo:22/some/path", GitUrl("ssh://foo:22/some/path", None)),
-        ("git@github.com:org/repo", GitUrl("git@github.com:org/repo", None)),
+        ("git+ssh://foo:22/some/path", GitUrl("ssh://foo:22/some/path", None, None)),
+        ("git@github.com:org/repo", GitUrl("git@github.com:org/repo", None, None)),
         (
             "git+https://github.com/sdispater/pendulum",
-            GitUrl("https://github.com/sdispater/pendulum", None),
+            GitUrl("https://github.com/sdispater/pendulum", None, None),
         ),
         (
             "git+https://github.com/sdispater/pendulum#7a018f2d075b03a73409e8356f9b29c9ad4ea2c5",
             GitUrl(
                 "https://github.com/sdispater/pendulum",
                 "7a018f2d075b03a73409e8356f9b29c9ad4ea2c5",
+                None,
             ),
         ),
         (
             "git+ssh://git@git.example.com:b/b.git#v1.0.0",
-            GitUrl("git@git.example.com:b/b.git", "v1.0.0"),
+            GitUrl("git@git.example.com:b/b.git", "v1.0.0", None),
         ),
         (
             "git+ssh://git@github.com:sdispater/pendulum.git#foo/bar",
-            GitUrl("git@github.com:sdispater/pendulum.git", "foo/bar"),
+            GitUrl("git@github.com:sdispater/pendulum.git", "foo/bar", None),
         ),
-        ("git+file:///foo/bar.git", GitUrl("file:///foo/bar.git", None)),
+        ("git+file:///foo/bar.git", GitUrl("file:///foo/bar.git", None, None)),
         (
             "git+file://C:\\Users\\hello\\testing.git#zkat/windows-files",
-            GitUrl("file://C:\\Users\\hello\\testing.git", "zkat/windows-files"),
+            GitUrl("file://C:\\Users\\hello\\testing.git", "zkat/windows-files", None),
+        ),
+        # hidden directories on Windows ues $ in their path
+        # python-poetry/poetry#5493
+        (
+            "git+file://C:\\Users\\hello$\\testing.git#zkat/windows-files",
+            GitUrl("file://C:\\Users\\hello$\\testing.git", "zkat/windows-files", None),
         ),
         (
             "git+https://git.example.com/sdispater/project/my_repo.git",
-            GitUrl("https://git.example.com/sdispater/project/my_repo.git", None),
+            GitUrl("https://git.example.com/sdispater/project/my_repo.git", None, None),
         ),
         (
             "git+ssh://git@git.example.com:sdispater/project/my_repo.git",
-            GitUrl("git@git.example.com:sdispater/project/my_repo.git", None),
+            GitUrl("git@git.example.com:sdispater/project/my_repo.git", None, None),
+        ),
+        (
+            "git+https://github.com/demo/pyproject-demo-subdirectory.git#subdirectory=project",
+            GitUrl(
+                "https://github.com/demo/pyproject-demo-subdirectory.git",
+                None,
+                "project",
+            ),
+        ),
+        (
+            "git+https://github.com/demo/pyproject-demo-subdirectory.git@commit#subdirectory=project",
+            GitUrl(
+                "https://github.com/demo/pyproject-demo-subdirectory.git",
+                "commit",
+                "project",
+            ),
+        ),
+        (
+            "git+https://github.com/demo/pyproject-demo-subdirectory.git#commit&subdirectory=project",
+            GitUrl(
+                "https://github.com/demo/pyproject-demo-subdirectory.git",
+                "commit",
+                "project",
+            ),
+        ),
+        (
+            "git+https://github.com/demo/pyproject-demo-subdirectory.git#commit#subdirectory=project",
+            GitUrl(
+                "https://github.com/demo/pyproject-demo-subdirectory.git",
+                "commit",
+                "project",
+            ),
+        ),
+        (
+            "git+https://github.com/demo/pyproject-demo-subdirectory.git@commit&subdirectory=project",
+            GitUrl(
+                "https://github.com/demo/pyproject-demo-subdirectory.git",
+                "commit",
+                "project",
+            ),
+        ),
+        (
+            "git+https://github.com/demo/pyproject-demo-subdirectory.git@subdirectory#subdirectory=subdirectory",
+            GitUrl(
+                "https://github.com/demo/pyproject-demo-subdirectory.git",
+                "subdirectory",
+                "subdirectory",
+            ),
         ),
     ],
 )
-def test_normalize_url(url, normalized):
+def test_normalize_url(url: str, normalized: GitUrl) -> None:
     assert normalized == Git.normalize_url(url)
 
 
@@ -139,6 +205,18 @@ def test_normalize_url(url, normalized):
                 "v1.0.27",
             ),
         ),
+        (
+            "git+ssh://git@github.com:sdispater/poetry.git#egg=name",
+            ParsedUrl(
+                "ssh",
+                "github.com",
+                ":sdispater/poetry.git",
+                "git",
+                None,
+                "poetry",
+                None,
+            ),
+        ),
         (
             "git+ssh://git@github.com:/sdispater/poetry.git",
             ParsedUrl(
@@ -247,9 +325,74 @@ def test_normalize_url(url, normalized):
                 None,
             ),
         ),
+        (
+            "git+ssh://git@git.example.com:sdispater/project/my_repo.git#subdirectory=project-dir",
+            ParsedUrl(
+                "ssh",
+                "git.example.com",
+                ":sdispater/project/my_repo.git",
+                "git",
+                None,
+                "my_repo",
+                None,
+                "project-dir",
+            ),
+        ),
+        (
+            "git+ssh://git@git.example.com:sdispater/project/my_repo.git#commit&subdirectory=project-dir",
+            ParsedUrl(
+                "ssh",
+                "git.example.com",
+                ":sdispater/project/my_repo.git",
+                "git",
+                None,
+                "my_repo",
+                "commit",
+                "project-dir",
+            ),
+        ),
+        (
+            "git+ssh://git@git.example.com:sdispater/project/my_repo.git@commit#subdirectory=project-dir",
+            ParsedUrl(
+                "ssh",
+                "git.example.com",
+                ":sdispater/project/my_repo.git",
+                "git",
+                None,
+                "my_repo",
+                "commit",
+                "project-dir",
+            ),
+        ),
+        (
+            "git+ssh://git@git.example.com:sdispater/project/my_repo.git@commit&subdirectory=project_dir",
+            ParsedUrl(
+                "ssh",
+                "git.example.com",
+                ":sdispater/project/my_repo.git",
+                "git",
+                None,
+                "my_repo",
+                "commit",
+                "project_dir",
+            ),
+        ),
+        (
+            "git+ssh://git@git.example.com:sdispater/project/my_repo.git@commit#egg=package&subdirectory=project_dir",
+            ParsedUrl(
+                "ssh",
+                "git.example.com",
+                ":sdispater/project/my_repo.git",
+                "git",
+                None,
+                "my_repo",
+                "commit",
+                "project_dir",
+            ),
+        ),
     ],
 )
-def test_parse_url(url, parsed):
+def test_parse_url(url: str, parsed: ParsedUrl) -> None:
     result = ParsedUrl.parse(url)
     assert parsed.name == result.name
     assert parsed.pathname == result.pathname
@@ -261,39 +404,45 @@ def test_parse_url(url, parsed):
     assert parsed.user == result.user
 
 
-def test_parse_url_should_fail():
+def test_parse_url_should_fail() -> None:
     url = "https://" + "@" * 64 + "!"
 
     with pytest.raises(ValueError):
         ParsedUrl.parse(url)
 
 
-def test_git_clone_raises_error_on_invalid_repository():
+def test_git_clone_raises_error_on_invalid_repository() -> None:
     with pytest.raises(GitError):
         Git().clone("-u./payload", Path("foo"))
 
 
-def test_git_checkout_raises_error_on_invalid_repository():
+def test_git_checkout_raises_error_on_invalid_repository() -> None:
     with pytest.raises(GitError):
         Git().checkout("-u./payload")
 
 
-def test_git_rev_parse_raises_error_on_invalid_repository():
+def test_git_rev_parse_raises_error_on_invalid_repository() -> None:
     with pytest.raises(GitError):
         Git().rev_parse("-u./payload")
 
 
 @pytest.mark.skipif(
-    not WINDOWS or not PY36,
-    reason="Retrieving the complete path to git is only necessary on Windows, for security reasons",
+    not WINDOWS,
+    reason=(
+        "Retrieving the complete path to git is only necessary on Windows, for security"
+        " reasons"
+    ),
 )
-def test_ensure_absolute_path_to_git(mocker):
+def test_ensure_absolute_path_to_git(mocker: MockerFixture) -> None:
     _reset_executable()
 
-    def checkout_output(cmd, *args, **kwargs):
+    def checkout_output(cmd: list[str], *args: Any, **kwargs: Any) -> str | bytes:
         if Path(cmd[0]).name == "where.exe":
             return "\n".join(
-                [str(Path.cwd().joinpath("git.exe")), "C:\\Git\\cmd\\git.exe"]
+                [
+                    str(Path.cwd().joinpath("git.exe")),
+                    "C:\\Git\\cmd\\git.exe",
+                ]
             )
 
         return b""
@@ -309,10 +458,13 @@ def checkout_output(cmd, *args, **kwargs):
 
 
 @pytest.mark.skipif(
-    not WINDOWS or not PY36,
-    reason="Retrieving the complete path to git is only necessary on Windows, for security reasons",
+    not WINDOWS,
+    reason=(
+        "Retrieving the complete path to git is only necessary on Windows, for security"
+        " reasons"
+    ),
 )
-def test_ensure_existing_git_executable_is_found(mocker):
+def test_ensure_existing_git_executable_is_found(mocker: MockerFixture) -> None:
     mock = mocker.patch.object(subprocess, "check_output", return_value=b"")
 
     Git().run("config")
diff --git a/vendor/poetry-core/tests/version/pep440/__init__.py b/vendor/poetry-core/tests/version/pep440/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry-core/tests/version/pep440/test_segments.py b/vendor/poetry-core/tests/version/pep440/test_segments.py
new file mode 100644
index 00000000..4536b3e0
--- /dev/null
+++ b/vendor/poetry-core/tests/version/pep440/test_segments.py
@@ -0,0 +1,70 @@
+from __future__ import annotations
+
+import pytest
+
+from poetry.core.version.pep440 import Release
+from poetry.core.version.pep440 import ReleaseTag
+from poetry.core.version.pep440.segments import RELEASE_PHASE_NORMALIZATIONS
+
+
+@pytest.mark.parametrize(
+    "parts,result",
+    [
+        ((1,), Release(1)),
+        ((1, 2), Release(1, 2)),
+        ((1, 2, 3), Release(1, 2, 3)),
+        ((1, 2, 3, 4), Release(1, 2, 3, 4)),
+        ((1, 2, 3, 4, 5, 6), Release(1, 2, 3, (4, 5, 6))),
+    ],
+)
+def test_pep440_release_segment_from_parts(
+    parts: tuple[int, ...], result: Release
+) -> None:
+    assert Release.from_parts(*parts) == result
+
+
+@pytest.mark.parametrize(
+    "parts,result",
+    [
+        (("a",), ReleaseTag("alpha", 0)),
+        (("a", 1), ReleaseTag("alpha", 1)),
+        (("b",), ReleaseTag("beta", 0)),
+        (("b", 1), ReleaseTag("beta", 1)),
+        (("pre",), ReleaseTag("preview", 0)),
+        (("pre", 1), ReleaseTag("preview", 1)),
+        (("c",), ReleaseTag("rc", 0)),
+        (("c", 1), ReleaseTag("rc", 1)),
+        (("r",), ReleaseTag("rev", 0)),
+        (("r", 1), ReleaseTag("rev", 1)),
+    ],
+)
+def test_pep440_release_tag_normalisation(
+    parts: tuple[str] | tuple[str, int], result: ReleaseTag
+) -> None:
+    tag = ReleaseTag(*parts)
+    assert tag == result
+    assert tag.to_string() == result.to_string()
+
+
+@pytest.mark.parametrize(
+    "parts,result",
+    [
+        (("a",), ReleaseTag("beta")),
+        (("b",), ReleaseTag("rc")),
+        (("post",), None),
+        (("rc",), None),
+        (("rev",), None),
+        (("dev",), None),
+    ],
+)
+def test_pep440_release_tag_next_phase(
+    parts: tuple[str], result: ReleaseTag | None
+) -> None:
+    assert ReleaseTag(*parts).next_phase() == result
+
+
+@pytest.mark.parametrize("phase", list({*RELEASE_PHASE_NORMALIZATIONS.keys()}))
+def test_pep440_release_tag_next(phase: str) -> None:
+    tag = ReleaseTag(phase=phase).next()
+    assert tag.phase == RELEASE_PHASE_NORMALIZATIONS[phase]
+    assert tag.number == 1
diff --git a/vendor/poetry-core/tests/version/pep440/test_version.py b/vendor/poetry-core/tests/version/pep440/test_version.py
new file mode 100644
index 00000000..c4266fd8
--- /dev/null
+++ b/vendor/poetry-core/tests/version/pep440/test_version.py
@@ -0,0 +1,261 @@
+from __future__ import annotations
+
+import pytest
+
+from poetry.core.version.exceptions import InvalidVersion
+from poetry.core.version.pep440 import PEP440Version
+from poetry.core.version.pep440 import Release
+from poetry.core.version.pep440 import ReleaseTag
+
+
+@pytest.mark.parametrize(
+    "text,result",
+    [
+        ("1", PEP440Version(release=Release.from_parts(1))),
+        ("1.2.3", PEP440Version(release=Release.from_parts(1, 2, 3))),
+        (
+            "1.2.3-1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3), post=ReleaseTag("post", 1)
+            ),
+        ),
+        (
+            "1.2.3.dev1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3), dev=ReleaseTag("dev", 1)
+            ),
+        ),
+        (
+            "1.2.3-1.dev1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3),
+                post=ReleaseTag("post", 1),
+                dev=ReleaseTag("dev", 1),
+            ),
+        ),
+        (
+            "1.2.3+local",
+            PEP440Version(release=Release.from_parts(1, 2, 3), local="local"),
+        ),
+        (
+            "1.2.3+local.1",
+            PEP440Version(release=Release.from_parts(1, 2, 3), local=("local", 1)),
+        ),
+        (
+            "1.2.3+local1",
+            PEP440Version(release=Release.from_parts(1, 2, 3), local="local1"),
+        ),
+        ("1.2.3+1", PEP440Version(release=Release.from_parts(1, 2, 3), local=1)),
+        (
+            "1.2.3a1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3), pre=ReleaseTag("alpha", 1)
+            ),
+        ),
+        (
+            "1.2.3.a1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3), pre=ReleaseTag("alpha", 1)
+            ),
+        ),
+        (
+            "1.2.3alpha1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3), pre=ReleaseTag("alpha", 1)
+            ),
+        ),
+        (
+            "1.2.3b1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3), pre=ReleaseTag("beta", 1)
+            ),
+        ),
+        (
+            "1.2.3.b1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3), pre=ReleaseTag("beta", 1)
+            ),
+        ),
+        (
+            "1.2.3beta1",
+            PEP440Version(
+                release=Release.from_parts(1, 2, 3), pre=ReleaseTag("beta", 1)
+            ),
+        ),
+        (
+            "1.2.3rc1",
+            PEP440Version(release=Release.from_parts(1, 2, 3), pre=ReleaseTag("rc", 1)),
+        ),
+        (
+            "1.2.3.rc1",
+            PEP440Version(release=Release.from_parts(1, 2, 3), pre=ReleaseTag("rc", 1)),
+        ),
+        (
+            "2.2.0dev0+build.05669607",
+            PEP440Version(
+                release=Release.from_parts(2, 2, 0),
+                dev=ReleaseTag("dev", 0),
+                local=("build", "05669607"),
+            ),
+        ),
+    ],
+)
+def test_pep440_parse_text(text: str, result: PEP440Version) -> None:
+    assert PEP440Version.parse(text) == result
+
+
+@pytest.mark.parametrize(
+    "text", ["1.2.3.dev1-1", "example-1", "1.2.3-random1", "1.2.3-1-1"]
+)
+def test_pep440_parse_text_invalid_versions(text: str) -> None:
+    with pytest.raises(InvalidVersion):
+        PEP440Version.parse(text)
+
+
+@pytest.mark.parametrize(
+    "version, expected",
+    [
+        ("1", "2"),
+        ("2!1", "2!2"),
+        ("1+local", "2"),
+        ("1.2", "2.0"),
+        ("1.2.3", "2.0.0"),
+        ("1.2.3.4", "2.0.0.0"),
+        ("1.dev0", "1"),
+        ("1.2.dev0", "2.0"),
+        ("1.post1", "2"),
+        ("1.2.post1", "2.0"),
+        ("1.post1.dev0", "2"),
+        ("1.2.post1.dev0", "2.0"),
+        ("1.a1", "1"),
+        ("1.2a1", "2.0"),
+        ("1.a1.post2", "1"),
+        ("1.2a1.post2", "2.0"),
+        ("1.a1.post2.dev0", "1"),
+        ("1.2a1.post2.dev0", "2.0"),
+    ],
+)
+def test_next_major(version: str, expected: str) -> None:
+    v = PEP440Version.parse(version)
+    assert v.next_major().text == expected
+
+
+@pytest.mark.parametrize(
+    "version, expected",
+    [
+        ("1", "1.1"),
+        ("1.2", "1.3"),
+        ("2!1.2", "2!1.3"),
+        ("1.2+local", "1.3"),
+        ("1.2.3", "1.3.0"),
+        ("1.2.3.4", "1.3.0.0"),
+        ("1.dev0", "1"),
+        ("1.2dev0", "1.2"),
+        ("1.2.3dev0", "1.3.0"),
+        ("1.post1", "1.1"),
+        ("1.2.post1", "1.3"),
+        ("1.2.3.post1", "1.3.0"),
+        ("1.post1.dev0", "1.1"),
+        ("1.2.post1.dev0", "1.3"),
+        ("1.a1", "1"),
+        ("1.2a1", "1.2"),
+        ("1.2.3a1", "1.3.0"),
+        ("1.a1.post2", "1"),
+        ("1.2a1.post2", "1.2"),
+        ("1.2.3a1.post2", "1.3.0"),
+        ("1.a1.post2.dev0", "1"),
+        ("1.2a1.post2.dev0", "1.2"),
+        ("1.2.3a1.post2.dev0", "1.3.0"),
+    ],
+)
+def test_next_minor(version: str, expected: str) -> None:
+    v = PEP440Version.parse(version)
+    assert v.next_minor().text == expected
+
+
+@pytest.mark.parametrize(
+    "version, expected",
+    [
+        ("1", "1.0.1"),
+        ("1.2", "1.2.1"),
+        ("1.2.3", "1.2.4"),
+        ("2!1.2.3", "2!1.2.4"),
+        ("1.2.3+local", "1.2.4"),
+        ("1.2.3.4", "1.2.4.0"),
+        ("1.dev0", "1"),
+        ("1.2dev0", "1.2"),
+        ("1.2.3dev0", "1.2.3"),
+        ("1.2.3.4dev0", "1.2.4.0"),
+        ("1.post1", "1.0.1"),
+        ("1.2.post1", "1.2.1"),
+        ("1.2.3.post1", "1.2.4"),
+        ("1.post1.dev0", "1.0.1"),
+        ("1.2.post1.dev0", "1.2.1"),
+        ("1.2.3.post1.dev0", "1.2.4"),
+        ("1.a1", "1"),
+        ("1.2a1", "1.2"),
+        ("1.2.3a1", "1.2.3"),
+        ("1.2.3.4a1", "1.2.4.0"),
+        ("1.a1.post2", "1"),
+        ("1.2a1.post2", "1.2"),
+        ("1.2.3a1.post2", "1.2.3"),
+        ("1.2.3.4a1.post2", "1.2.4.0"),
+        ("1.a1.post2.dev0", "1"),
+        ("1.2a1.post2.dev0", "1.2"),
+        ("1.2.3a1.post2.dev0", "1.2.3"),
+        ("1.2.3.4a1.post2.dev0", "1.2.4.0"),
+    ],
+)
+def test_next_patch(version: str, expected: str) -> None:
+    v = PEP440Version.parse(version)
+    assert v.next_patch().text == expected
+
+
+@pytest.mark.parametrize(
+    "version, expected",
+    [
+        ("1.2a1", "1.2a2"),
+        ("2!1.2a1", "2!1.2a2"),
+        ("1.2dev0", "1.2a0"),
+        ("1.2a1.dev0", "1.2a1"),
+        ("1.2a1.post1.dev0", "1.2a2"),
+    ],
+)
+def test_next_prerelease(version: str, expected: str) -> None:
+    v = PEP440Version.parse(version)
+    assert v.next_prerelease().text == expected
+
+
+@pytest.mark.parametrize(
+    "version, expected",
+    [
+        ("1", "1.post0"),
+        ("1.post1", "1.post2"),
+        ("9!1.2.3.4", "9!1.2.3.4.post0"),
+        ("9!1.2.3.4.post2", "9!1.2.3.4.post3"),
+        ("1.dev0", "1.post0"),
+        ("1.post1.dev0", "1.post1"),
+        ("1a1", "1a1.post0"),
+        ("1a1.dev0", "1a1.post0"),
+        ("1a1.post2", "1a1.post3"),
+        ("1a1.post2.dev0", "1a1.post2"),
+    ],
+)
+def test_next_postrelease(version: str, expected: str) -> None:
+    v = PEP440Version.parse(version)
+    assert v.next_postrelease().text == expected
+
+
+def test_next_devrelease() -> None:
+    v = PEP440Version.parse("9!1.2.3a1.post2.dev3")
+    assert v.next_devrelease().text == "9!1.2.3a1.post2.dev4"
+
+
+def test_next_firstprerelease() -> None:
+    v = PEP440Version.parse("9!1.2.3a1.post2.dev3")
+    assert v.first_prerelease().text == "9!1.2.3a0"
+
+
+def test_next_firstdevrelease() -> None:
+    v = PEP440Version.parse("9!1.2.3a1.post2.dev3")
+    assert v.first_devrelease().text == "9!1.2.3a1.post2.dev0"
diff --git a/vendor/poetry-core/tests/version/test_markers.py b/vendor/poetry-core/tests/version/test_markers.py
index 389d25c1..43e945ba 100644
--- a/vendor/poetry-core/tests/version/test_markers.py
+++ b/vendor/poetry-core/tests/version/test_markers.py
@@ -1,71 +1,84 @@
+from __future__ import annotations
+
 import os
 
+from typing import TYPE_CHECKING
+
 import pytest
 
+from poetry.core.version.markers import AnyMarker
+from poetry.core.version.markers import EmptyMarker
 from poetry.core.version.markers import MarkerUnion
 from poetry.core.version.markers import MultiMarker
 from poetry.core.version.markers import SingleMarker
+from poetry.core.version.markers import dnf
 from poetry.core.version.markers import parse_marker
 
 
-def test_single_marker():
+if TYPE_CHECKING:
+    from poetry.core.version.markers import BaseMarker
+
+
+def test_single_marker() -> None:
     m = parse_marker('sys_platform == "darwin"')
 
     assert isinstance(m, SingleMarker)
     assert m.name == "sys_platform"
-    assert m.constraint_string == "==darwin"
+    assert str(m.constraint) == "darwin"
 
     m = parse_marker('python_version in "2.7, 3.0, 3.1"')
 
     assert isinstance(m, SingleMarker)
     assert m.name == "python_version"
-    assert m.constraint_string == "in 2.7, 3.0, 3.1"
     assert str(m.constraint) == ">=2.7.0,<2.8.0 || >=3.0.0,<3.2.0"
 
     m = parse_marker('"2.7" in python_version')
 
     assert isinstance(m, SingleMarker)
     assert m.name == "python_version"
-    assert m.constraint_string == "in 2.7"
     assert str(m.constraint) == ">=2.7.0,<2.8.0"
 
     m = parse_marker('python_version not in "2.7, 3.0, 3.1"')
 
     assert isinstance(m, SingleMarker)
     assert m.name == "python_version"
-    assert m.constraint_string == "not in 2.7, 3.0, 3.1"
     assert str(m.constraint) == "<2.7.0 || >=2.8.0,<3.0.0 || >=3.2.0"
 
     m = parse_marker(
-        "platform_machine in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64 win32 WIN32'"
+        "platform_machine in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64"
+        " win32 WIN32'"
     )
 
     assert isinstance(m, SingleMarker)
     assert m.name == "platform_machine"
     assert (
-        m.constraint_string
-        == "in x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64 win32 WIN32"
-    )
-    assert str(m.constraint) == (
-        "x86_64 || X86_64 || aarch64 || AARCH64 || ppc64le || PPC64LE || amd64 || AMD64 || win32 || WIN32"
+        str(m.constraint)
+        == "x86_64 || X86_64 || aarch64 || AARCH64 || ppc64le || PPC64LE || amd64 ||"
+        " AMD64 || win32 || WIN32"
     )
 
     m = parse_marker(
-        "platform_machine not in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64 win32 WIN32'"
+        "platform_machine not in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64"
+        " AMD64 win32 WIN32'"
     )
 
     assert isinstance(m, SingleMarker)
     assert m.name == "platform_machine"
     assert (
-        m.constraint_string
-        == "not in x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64 win32 WIN32"
-    )
-    assert str(m.constraint) == (
-        "!=x86_64, !=X86_64, !=aarch64, !=AARCH64, !=ppc64le, !=PPC64LE, !=amd64, !=AMD64, !=win32, !=WIN32"
+        str(m.constraint)
+        == "!=x86_64, !=X86_64, !=aarch64, !=AARCH64, !=ppc64le, !=PPC64LE, !=amd64,"
+        " !=AMD64, !=win32, !=WIN32"
     )
 
 
-def test_single_marker_intersect():
+def test_single_marker_normalisation() -> None:
+    m1 = SingleMarker("python_version", ">=3.6")
+    m2 = SingleMarker("python_version", ">= 3.6")
+    assert m1 == m2
+    assert hash(m1) == hash(m2)
+
+
+def test_single_marker_intersect() -> None:
     m = parse_marker('sys_platform == "darwin"')
 
     intersection = m.intersect(parse_marker('implementation_name == "cpython"'))
@@ -80,14 +93,14 @@ def test_single_marker_intersect():
     assert str(intersection) == 'python_version >= "3.4" and python_version < "3.6"'
 
 
-def test_single_marker_intersect_compacts_constraints():
+def test_single_marker_intersect_compacts_constraints() -> None:
     m = parse_marker('python_version < "3.6"')
 
     intersection = m.intersect(parse_marker('python_version < "3.4"'))
     assert str(intersection) == 'python_version < "3.4"'
 
 
-def test_single_marker_intersect_with_multi():
+def test_single_marker_intersect_with_multi() -> None:
     m = parse_marker('sys_platform == "darwin"')
 
     intersection = m.intersect(
@@ -95,11 +108,12 @@ def test_single_marker_intersect_with_multi():
     )
     assert (
         str(intersection)
-        == 'implementation_name == "cpython" and python_version >= "3.6" and sys_platform == "darwin"'
+        == 'implementation_name == "cpython" and python_version >= "3.6" and'
+        ' sys_platform == "darwin"'
     )
 
 
-def test_single_marker_intersect_with_multi_with_duplicate():
+def test_single_marker_intersect_with_multi_with_duplicate() -> None:
     m = parse_marker('python_version < "4.0"')
 
     intersection = m.intersect(
@@ -108,7 +122,7 @@ def test_single_marker_intersect_with_multi_with_duplicate():
     assert str(intersection) == 'sys_platform == "darwin" and python_version < "4.0"'
 
 
-def test_single_marker_intersect_with_multi_compacts_constraint():
+def test_single_marker_intersect_with_multi_compacts_constraint() -> None:
     m = parse_marker('python_version < "3.6"')
 
     intersection = m.intersect(
@@ -120,7 +134,25 @@ def test_single_marker_intersect_with_multi_compacts_constraint():
     )
 
 
-def test_single_marker_not_in_python_intersection():
+def test_single_marker_intersect_with_union_leads_to_single_marker() -> None:
+    m = parse_marker('python_version >= "3.6"')
+
+    intersection = m.intersect(
+        parse_marker('python_version < "3.6" or python_version >= "3.7"')
+    )
+    assert str(intersection) == 'python_version >= "3.7"'
+
+
+def test_single_marker_intersect_with_union_leads_to_empty() -> None:
+    m = parse_marker('python_version == "3.7"')
+
+    intersection = m.intersect(
+        parse_marker('python_version < "3.7" or python_version >= "3.8"')
+    )
+    assert intersection.is_empty()
+
+
+def test_single_marker_not_in_python_intersection() -> None:
     m = parse_marker('python_version not in "2.7, 3.0, 3.1"')
 
     intersection = m.intersect(
@@ -129,29 +161,60 @@ def test_single_marker_not_in_python_intersection():
     assert str(intersection) == 'python_version not in "2.7, 3.0, 3.1, 3.2"'
 
 
-def test_single_marker_union():
+def test_single_marker_union() -> None:
     m = parse_marker('sys_platform == "darwin"')
 
-    intersection = m.union(parse_marker('implementation_name == "cpython"'))
-    assert (
-        str(intersection)
-        == 'sys_platform == "darwin" or implementation_name == "cpython"'
-    )
+    union = m.union(parse_marker('implementation_name == "cpython"'))
+    assert str(union) == 'sys_platform == "darwin" or implementation_name == "cpython"'
 
+
+def test_single_marker_union_is_any() -> None:
     m = parse_marker('python_version >= "3.4"')
 
-    intersection = m.union(parse_marker('python_version < "3.6"'))
-    assert str(intersection) == 'python_version >= "3.4" or python_version < "3.6"'
+    union = m.union(parse_marker('python_version < "3.6"'))
+    assert union.is_any()
 
 
-def test_single_marker_union_compacts_constraints():
-    m = parse_marker('python_version < "3.6"')
+@pytest.mark.parametrize(
+    ("marker1", "marker2", "expected"),
+    [
+        (
+            'python_version < "3.6"',
+            'python_version < "3.4"',
+            'python_version < "3.6"',
+        ),
+        (
+            'sys_platform == "linux"',
+            'sys_platform != "win32"',
+            'sys_platform != "win32"',
+        ),
+        (
+            'python_version == "3.6"',
+            'python_version > "3.6"',
+            'python_version >= "3.6"',
+        ),
+        (
+            'python_version == "3.6"',
+            'python_version < "3.6"',
+            'python_version <= "3.6"',
+        ),
+        (
+            'python_version < "3.6"',
+            'python_version > "3.6"',
+            'python_version != "3.6"',
+        ),
+    ],
+)
+def test_single_marker_union_is_single_marker(
+    marker1: str, marker2: str, expected: str
+) -> None:
+    m = parse_marker(marker1)
 
-    union = m.union(parse_marker('python_version < "3.4"'))
-    assert str(union) == 'python_version < "3.6"'
+    union = m.union(parse_marker(marker2))
+    assert str(union) == expected
 
 
-def test_single_marker_union_with_multi():
+def test_single_marker_union_with_multi() -> None:
     m = parse_marker('sys_platform == "darwin"')
 
     union = m.union(
@@ -159,11 +222,12 @@ def test_single_marker_union_with_multi():
     )
     assert (
         str(union)
-        == 'implementation_name == "cpython" and python_version >= "3.6" or sys_platform == "darwin"'
+        == 'implementation_name == "cpython" and python_version >= "3.6" or'
+        ' sys_platform == "darwin"'
     )
 
 
-def test_single_marker_union_with_multi_duplicate():
+def test_single_marker_union_with_multi_duplicate() -> None:
     m = parse_marker('sys_platform == "darwin" and python_version >= "3.6"')
 
     union = m.union(
@@ -172,7 +236,61 @@ def test_single_marker_union_with_multi_duplicate():
     assert str(union) == 'sys_platform == "darwin" and python_version >= "3.6"'
 
 
-def test_single_marker_union_with_union():
+@pytest.mark.parametrize(
+    ("single_marker", "multi_marker", "expected"),
+    [
+        (
+            'python_version >= "3.6"',
+            'python_version >= "3.7" and sys_platform == "win32"',
+            'python_version >= "3.6"',
+        ),
+        (
+            'sys_platform == "linux"',
+            'sys_platform != "linux" and sys_platform != "win32"',
+            'sys_platform != "win32"',
+        ),
+    ],
+)
+def test_single_marker_union_with_multi_is_single_marker(
+    single_marker: str, multi_marker: str, expected: str
+) -> None:
+    m = parse_marker(single_marker)
+    union = m.union(parse_marker(multi_marker))
+    assert str(union) == expected
+
+
+def test_single_marker_union_with_multi_cannot_be_simplified() -> None:
+    m = parse_marker('python_version >= "3.7"')
+    union = m.union(parse_marker('python_version >= "3.6" and sys_platform == "win32"'))
+    assert (
+        str(union)
+        == 'python_version >= "3.6" and sys_platform == "win32" or python_version >='
+        ' "3.7"'
+    )
+
+
+def test_single_marker_union_with_multi_is_union_of_single_markers() -> None:
+    m = parse_marker('python_version >= "3.6"')
+    union = m.union(parse_marker('python_version < "3.6" and sys_platform == "win32"'))
+    assert str(union) == 'sys_platform == "win32" or python_version >= "3.6"'
+
+
+def test_single_marker_union_with_multi_union_is_union_of_single_markers() -> None:
+    m = parse_marker('python_version >= "3.6"')
+    union = m.union(
+        parse_marker(
+            'python_version < "3.6" and sys_platform == "win32" or python_version <'
+            ' "3.6" and sys_platform == "linux"'
+        )
+    )
+    assert (
+        str(union)
+        == 'sys_platform == "win32" or sys_platform == "linux" or python_version >='
+        ' "3.6"'
+    )
+
+
+def test_single_marker_union_with_union() -> None:
     m = parse_marker('sys_platform == "darwin"')
 
     union = m.union(
@@ -180,18 +298,19 @@ def test_single_marker_union_with_union():
     )
     assert (
         str(union)
-        == 'implementation_name == "cpython" or python_version >= "3.6" or sys_platform == "darwin"'
+        == 'implementation_name == "cpython" or python_version >= "3.6" or sys_platform'
+        ' == "darwin"'
     )
 
 
-def test_single_marker_not_in_python_union():
+def test_single_marker_not_in_python_union() -> None:
     m = parse_marker('python_version not in "2.7, 3.0, 3.1"')
 
     union = m.union(parse_marker('python_version not in "2.7, 3.0, 3.1, 3.2"'))
     assert str(union) == 'python_version not in "2.7, 3.0, 3.1"'
 
 
-def test_single_marker_union_with_union_duplicate():
+def test_single_marker_union_with_union_duplicate() -> None:
     m = parse_marker('sys_platform == "darwin"')
 
     union = m.union(parse_marker('sys_platform == "darwin" or python_version >= "3.6"'))
@@ -208,7 +327,13 @@ def test_single_marker_union_with_union_duplicate():
     assert str(union) == 'sys_platform == "darwin" or python_version <= "3.6"'
 
 
-def test_multi_marker():
+def test_single_marker_union_with_inverse() -> None:
+    m = parse_marker('sys_platform == "darwin"')
+    union = m.union(parse_marker('sys_platform != "darwin"'))
+    assert union.is_any()
+
+
+def test_multi_marker() -> None:
     m = parse_marker('sys_platform == "darwin" and implementation_name == "cpython"')
 
     assert isinstance(m, MultiMarker)
@@ -218,7 +343,7 @@ def test_multi_marker():
     ]
 
 
-def test_multi_marker_is_empty_is_contradictory():
+def test_multi_marker_is_empty_is_contradictory() -> None:
     m = parse_marker(
         'sys_platform == "linux" and python_version >= "3.5" and python_version < "2.8"'
     )
@@ -230,56 +355,242 @@ def test_multi_marker_is_empty_is_contradictory():
     assert m.is_empty()
 
 
-def test_multi_marker_intersect_multi():
+def test_multi_complex_multi_marker_is_empty() -> None:
+    m1 = parse_marker(
+        'python_full_version >= "3.0.0" and python_full_version < "3.4.0"'
+    )
+    m2 = parse_marker(
+        'python_version >= "3.6" and python_full_version < "3.0.0" and python_version <'
+        ' "3.7"'
+    )
+    m3 = parse_marker(
+        'python_version >= "3.6" and python_version < "3.7" and python_full_version >='
+        ' "3.5.0"'
+    )
+
+    m = m1.intersect(m2.union(m3))
+
+    assert m.is_empty()
+
+
+def test_multi_marker_intersect_multi() -> None:
     m = parse_marker('sys_platform == "darwin" and implementation_name == "cpython"')
 
     intersection = m.intersect(
         parse_marker('python_version >= "3.6" and os_name == "Windows"')
     )
-    assert str(intersection) == (
-        'sys_platform == "darwin" and implementation_name == "cpython" '
+    assert (
+        str(intersection)
+        == 'sys_platform == "darwin" and implementation_name == "cpython" '
         'and python_version >= "3.6" and os_name == "Windows"'
     )
 
 
-def test_multi_marker_intersect_multi_with_overlapping_constraints():
+def test_multi_marker_intersect_multi_with_overlapping_constraints() -> None:
     m = parse_marker('sys_platform == "darwin" and python_version < "3.6"')
 
     intersection = m.intersect(
         parse_marker(
-            'python_version <= "3.4" and os_name == "Windows" and sys_platform == "darwin"'
+            'python_version <= "3.4" and os_name == "Windows" and sys_platform =='
+            ' "darwin"'
         )
     )
-    assert str(intersection) == (
-        'sys_platform == "darwin" and python_version <= "3.4" and os_name == "Windows"'
+    assert (
+        str(intersection)
+        == 'sys_platform == "darwin" and python_version <= "3.4" and os_name =='
+        ' "Windows"'
     )
 
 
-def test_multi_marker_union_multi():
-    m = parse_marker('sys_platform == "darwin" and implementation_name == "cpython"')
+def test_multi_marker_intersect_with_union_drops_union() -> None:
+    m = parse_marker('python_version >= "3" and python_version < "4"')
+    m2 = parse_marker('python_version < "2" or python_version >= "3"')
+    assert str(m.intersect(m2)) == str(m)
+    assert str(m2.intersect(m)) == str(m)
 
-    intersection = m.union(
-        parse_marker('python_version >= "3.6" and os_name == "Windows"')
+
+def test_multi_marker_intersect_with_multi_union_leads_to_empty_in_one_step() -> None:
+    # empty marker in one step
+    # py == 2 and (py < 2 or py >= 3) -> empty
+    m = parse_marker('sys_platform == "darwin" and python_version == "2"')
+    m2 = parse_marker(
+        'sys_platform == "darwin" and (python_version < "2" or python_version >= "3")'
     )
-    assert str(intersection) == (
-        'sys_platform == "darwin" and implementation_name == "cpython" '
-        'or python_version >= "3.6" and os_name == "Windows"'
+    assert m.intersect(m2).is_empty()
+    assert m2.intersect(m).is_empty()
+
+
+def test_multi_marker_intersect_with_multi_union_leads_to_empty_in_two_steps() -> None:
+    # empty marker in two steps
+    # py >= 2 and (py < 2 or py >= 3) -> py >= 3
+    # py < 3 and py >= 3 -> empty
+    m = parse_marker('python_version >= "2" and python_version < "3"')
+    m2 = parse_marker(
+        'sys_platform == "darwin" and (python_version < "2" or python_version >= "3")'
     )
+    assert m.intersect(m2).is_empty()
+    assert m2.intersect(m).is_empty()
 
 
-def test_multi_marker_union_with_union():
+def test_multi_marker_union_multi() -> None:
     m = parse_marker('sys_platform == "darwin" and implementation_name == "cpython"')
 
-    intersection = m.union(
-        parse_marker('python_version >= "3.6" or os_name == "Windows"')
+    union = m.union(parse_marker('python_version >= "3.6" and os_name == "Windows"'))
+    assert (
+        str(union)
+        == 'sys_platform == "darwin" and implementation_name == "cpython" '
+        'or python_version >= "3.6" and os_name == "Windows"'
     )
-    assert str(intersection) == (
-        'python_version >= "3.6" or os_name == "Windows"'
+
+
+def test_multi_marker_union_multi_is_single_marker() -> None:
+    m = parse_marker('python_version >= "3" and sys_platform == "win32"')
+    m2 = parse_marker('sys_platform != "win32" and python_version >= "3"')
+    assert str(m.union(m2)) == 'python_version >= "3"'
+    assert str(m2.union(m)) == 'python_version >= "3"'
+
+
+@pytest.mark.parametrize(
+    "marker1, marker2, expected",
+    [
+        (
+            'python_version >= "3" and sys_platform == "win32"',
+            'python_version >= "3" and sys_platform != "win32" and sys_platform !='
+            ' "linux"',
+            'python_version >= "3" and sys_platform != "linux"',
+        ),
+        (
+            'python_version >= "3.8" and python_version < "4.0" and sys_platform =='
+            ' "win32"',
+            'python_version >= "3.8" and python_version < "4.0"',
+            'python_version >= "3.8" and python_version < "4.0"',
+        ),
+    ],
+)
+def test_multi_marker_union_multi_is_multi(
+    marker1: str, marker2: str, expected: str
+) -> None:
+    m1 = parse_marker(marker1)
+    m2 = parse_marker(marker2)
+    assert str(m1.union(m2)) == expected
+    assert str(m2.union(m1)) == expected
+
+
+@pytest.mark.parametrize(
+    "marker1, marker2, expected",
+    [
+        # Ranges with same start
+        (
+            'python_version >= "3.6" and python_full_version < "3.6.2"',
+            'python_version >= "3.6" and python_version < "3.7"',
+            'python_version >= "3.6" and python_version < "3.7"',
+        ),
+        (
+            'python_version > "3.6" and python_full_version < "3.6.2"',
+            'python_version > "3.6" and python_version < "3.7"',
+            'python_version > "3.6" and python_version < "3.7"',
+        ),
+        # Ranges meet exactly
+        (
+            'python_version >= "3.6" and python_full_version < "3.6.2"',
+            'python_full_version >= "3.6.2" and python_version < "3.7"',
+            'python_version >= "3.6" and python_version < "3.7"',
+        ),
+        (
+            'python_version >= "3.6" and python_full_version <= "3.6.2"',
+            'python_full_version > "3.6.2" and python_version < "3.7"',
+            'python_version >= "3.6" and python_version < "3.7"',
+        ),
+        # Ranges overlap
+        (
+            'python_version >= "3.6" and python_full_version <= "3.6.8"',
+            'python_full_version >= "3.6.2" and python_version < "3.7"',
+            'python_version >= "3.6" and python_version < "3.7"',
+        ),
+        # Ranges with same end.  Ideally the union would give the lower version first.
+        (
+            'python_version >= "3.6" and python_version < "3.7"',
+            'python_full_version >= "3.6.2" and python_version < "3.7"',
+            'python_version < "3.7" and python_version >= "3.6"',
+        ),
+        (
+            'python_version >= "3.6" and python_version <= "3.7"',
+            'python_full_version >= "3.6.2" and python_version <= "3.7"',
+            'python_version <= "3.7" and python_version >= "3.6"',
+        ),
+        # A range covers an exact marker.
+        (
+            'python_version >= "3.6" and python_version <= "3.7"',
+            'python_version == "3.6"',
+            'python_version >= "3.6" and python_version <= "3.7"',
+        ),
+        (
+            'python_version >= "3.6" and python_version <= "3.7"',
+            'python_version == "3.6" and implementation_name == "cpython"',
+            'python_version >= "3.6" and python_version <= "3.7"',
+        ),
+        (
+            'python_version >= "3.6" and python_version <= "3.7"',
+            'python_full_version == "3.6.2"',
+            'python_version >= "3.6" and python_version <= "3.7"',
+        ),
+        (
+            'python_version >= "3.6" and python_version <= "3.7"',
+            'python_full_version == "3.6.2" and implementation_name == "cpython"',
+            'python_version >= "3.6" and python_version <= "3.7"',
+        ),
+        (
+            'python_version >= "3.6" and python_version <= "3.7"',
+            'python_version == "3.7"',
+            'python_version >= "3.6" and python_version <= "3.7"',
+        ),
+        (
+            'python_version >= "3.6" and python_version <= "3.7"',
+            'python_version == "3.7" and implementation_name == "cpython"',
+            'python_version >= "3.6" and python_version <= "3.7"',
+        ),
+    ],
+)
+def test_version_ranges_collapse_on_union(
+    marker1: str, marker2: str, expected: str
+) -> None:
+    m1 = parse_marker(marker1)
+    m2 = parse_marker(marker2)
+    assert str(m1.union(m2)) == expected
+    assert str(m2.union(m1)) == expected
+
+
+def test_multi_marker_union_with_union() -> None:
+    m = parse_marker('sys_platform == "darwin" and implementation_name == "cpython"')
+
+    union = m.union(parse_marker('python_version >= "3.6" or os_name == "Windows"'))
+    assert (
+        str(union)
+        == 'python_version >= "3.6" or os_name == "Windows"'
         ' or sys_platform == "darwin" and implementation_name == "cpython"'
     )
 
 
-def test_marker_union():
+def test_multi_marker_union_with_multi_union_is_single_marker() -> None:
+    m = parse_marker('sys_platform == "darwin" and python_version == "3"')
+    m2 = parse_marker(
+        'sys_platform == "darwin" and python_version < "3" or sys_platform == "darwin"'
+        ' and python_version > "3"'
+    )
+    assert str(m.union(m2)) == 'sys_platform == "darwin"'
+    assert str(m2.union(m)) == 'sys_platform == "darwin"'
+
+
+def test_multi_marker_union_with_union_multi_is_single_marker() -> None:
+    m = parse_marker('sys_platform == "darwin" and python_version == "3"')
+    m2 = parse_marker(
+        'sys_platform == "darwin" and (python_version < "3" or python_version > "3")'
+    )
+    assert str(m.union(m2)) == 'sys_platform == "darwin"'
+    assert str(m2.union(m)) == 'sys_platform == "darwin"'
+
+
+def test_marker_union() -> None:
     m = parse_marker('sys_platform == "darwin" or implementation_name == "cpython"')
 
     assert isinstance(m, MarkerUnion)
@@ -289,56 +600,57 @@ def test_marker_union():
     ]
 
 
-def test_marker_union_deduplicate():
+def test_marker_union_deduplicate() -> None:
     m = parse_marker(
-        'sys_platform == "darwin" or implementation_name == "cpython" or sys_platform == "darwin"'
+        'sys_platform == "darwin" or implementation_name == "cpython" or sys_platform'
+        ' == "darwin"'
     )
 
     assert str(m) == 'sys_platform == "darwin" or implementation_name == "cpython"'
 
 
-def test_marker_union_intersect_single_marker():
+def test_marker_union_intersect_single_marker() -> None:
     m = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
 
     intersection = m.intersect(parse_marker('implementation_name == "cpython"'))
-    assert str(intersection) == (
-        'sys_platform == "darwin" and implementation_name == "cpython" '
+    assert (
+        str(intersection)
+        == 'sys_platform == "darwin" and implementation_name == "cpython" '
         'or python_version < "3.4" and implementation_name == "cpython"'
     )
 
 
-def test_marker_union_intersect_single_with_overlapping_constraints():
+def test_marker_union_intersect_single_with_overlapping_constraints() -> None:
     m = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
 
     intersection = m.intersect(parse_marker('python_version <= "3.6"'))
     assert (
         str(intersection)
-        == 'sys_platform == "darwin" and python_version <= "3.6" or python_version < "3.4"'
+        == 'sys_platform == "darwin" and python_version <= "3.6" or python_version <'
+        ' "3.4"'
     )
 
     m = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
     intersection = m.intersect(parse_marker('sys_platform == "darwin"'))
-    assert (
-        str(intersection)
-        == 'sys_platform == "darwin" or python_version < "3.4" and sys_platform == "darwin"'
-    )
+    assert str(intersection) == 'sys_platform == "darwin"'
 
 
-def test_marker_union_intersect_marker_union():
+def test_marker_union_intersect_marker_union() -> None:
     m = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
 
     intersection = m.intersect(
         parse_marker('implementation_name == "cpython" or os_name == "Windows"')
     )
-    assert str(intersection) == (
-        'sys_platform == "darwin" and implementation_name == "cpython" '
+    assert (
+        str(intersection)
+        == 'sys_platform == "darwin" and implementation_name == "cpython" '
         'or sys_platform == "darwin" and os_name == "Windows" or '
         'python_version < "3.4" and implementation_name == "cpython" or '
         'python_version < "3.4" and os_name == "Windows"'
     )
 
 
-def test_marker_union_intersect_marker_union_drops_unnecessary_markers():
+def test_marker_union_intersect_marker_union_drops_unnecessary_markers() -> None:
     m = parse_marker(
         'python_version >= "2.7" and python_version < "2.8" '
         'or python_version >= "3.4" and python_version < "4.0"'
@@ -353,72 +665,81 @@ def test_marker_union_intersect_marker_union_drops_unnecessary_markers():
         'python_version >= "2.7" and python_version < "2.8" '
         'or python_version >= "3.4" and python_version < "4.0"'
     )
-    assert expected == str(intersection)
+    assert str(intersection) == expected
 
 
-def test_marker_union_intersect_multi_marker():
-    m = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
+def test_marker_union_intersect_multi_marker() -> None:
+    m1 = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
+    m2 = parse_marker('implementation_name == "cpython" and os_name == "Windows"')
 
-    intersection = m.intersect(
-        parse_marker('implementation_name == "cpython" and os_name == "Windows"')
-    )
-    assert str(intersection) == (
-        'implementation_name == "cpython" and os_name == "Windows" and sys_platform == "darwin" '
-        'or implementation_name == "cpython" and os_name == "Windows" and python_version < "3.4"'
+    expected = (
+        'implementation_name == "cpython" and os_name == "Windows" and sys_platform'
+        ' == "darwin" or implementation_name == "cpython" and os_name == "Windows"'
+        ' and python_version < "3.4"'
     )
 
+    intersection = m1.intersect(m2)
+    assert str(intersection) == expected
+
+    intersection = m2.intersect(m1)
+    assert str(intersection) == expected
 
-def test_marker_union_union_with_union():
+
+def test_marker_union_union_with_union() -> None:
     m = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
 
     union = m.union(
         parse_marker('implementation_name == "cpython" or os_name == "Windows"')
     )
-    assert str(union) == (
-        'sys_platform == "darwin" or python_version < "3.4" '
+    assert (
+        str(union)
+        == 'sys_platform == "darwin" or python_version < "3.4" '
         'or implementation_name == "cpython" or os_name == "Windows"'
     )
 
 
-def test_marker_union_union_duplicates():
+def test_marker_union_union_duplicates() -> None:
     m = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
 
     union = m.union(parse_marker('sys_platform == "darwin" or os_name == "Windows"'))
-    assert str(union) == (
-        'sys_platform == "darwin" or python_version < "3.4" or os_name == "Windows"'
+    assert (
+        str(union)
+        == 'sys_platform == "darwin" or python_version < "3.4" or os_name == "Windows"'
     )
 
     m = parse_marker('sys_platform == "darwin" or python_version < "3.4"')
 
     union = m.union(
         parse_marker(
-            'sys_platform == "darwin" or os_name == "Windows" or python_version <= "3.6"'
+            'sys_platform == "darwin" or os_name == "Windows" or python_version <='
+            ' "3.6"'
         )
     )
-    assert str(union) == (
-        'sys_platform == "darwin" or python_version <= "3.6" or os_name == "Windows"'
+    assert (
+        str(union)
+        == 'sys_platform == "darwin" or python_version <= "3.6" or os_name == "Windows"'
     )
 
 
-def test_marker_union_all_any():
+def test_marker_union_all_any() -> None:
     union = MarkerUnion(parse_marker(""), parse_marker(""))
 
     assert union.is_any()
 
 
-def test_marker_union_not_all_any():
+def test_marker_union_not_all_any() -> None:
     union = MarkerUnion(parse_marker(""), parse_marker(""), parse_marker(""))
 
     assert union.is_any()
 
 
-def test_marker_union_all_empty():
+def test_marker_union_all_empty() -> None:
     union = MarkerUnion(parse_marker(""), parse_marker(""))
 
     assert union.is_empty()
 
 
-def test_marker_union_not_all_empty():
+def test_marker_union_not_all_empty() -> None:
     union = MarkerUnion(
         parse_marker(""), parse_marker(""), parse_marker("")
     )
@@ -426,44 +747,46 @@ def test_marker_union_not_all_empty():
     assert not union.is_empty()
 
 
-def test_marker_str_conversion_skips_empty_and_any():
+def test_marker_str_conversion_skips_empty_and_any() -> None:
     union = MarkerUnion(
         parse_marker(""),
         parse_marker(
-            'sys_platform == "darwin" or python_version <= "3.6" or os_name == "Windows"'
+            'sys_platform == "darwin" or python_version <= "3.6" or os_name =='
+            ' "Windows"'
         ),
         parse_marker(""),
     )
 
-    assert str(union) == (
-        'sys_platform == "darwin" or python_version <= "3.6" or os_name == "Windows"'
+    assert (
+        str(union)
+        == 'sys_platform == "darwin" or python_version <= "3.6" or os_name == "Windows"'
     )
 
 
-def test_intersect_compacts_constraints():
+def test_intersect_compacts_constraints() -> None:
     m = parse_marker('python_version < "4.0"')
 
     intersection = m.intersect(parse_marker('python_version < "5.0"'))
     assert str(intersection) == 'python_version < "4.0"'
 
 
-def test_multi_marker_removes_duplicates():
+def test_multi_marker_removes_duplicates() -> None:
     m = parse_marker('sys_platform == "win32" and sys_platform == "win32"')
 
-    assert 'sys_platform == "win32"' == str(m)
+    assert str(m) == 'sys_platform == "win32"'
 
     m = parse_marker(
         'sys_platform == "darwin" and implementation_name == "cpython" '
         'and sys_platform == "darwin" and implementation_name == "cpython"'
     )
 
-    assert 'sys_platform == "darwin" and implementation_name == "cpython"' == str(m)
+    assert str(m) == 'sys_platform == "darwin" and implementation_name == "cpython"'
 
 
 @pytest.mark.parametrize(
     ("marker_string", "environment", "expected"),
     [
-        ("os_name == '{0}'".format(os.name), None, True),
+        (f"os_name == '{os.name}'", None, True),
         ("os_name == 'foo'", {"os_name": "foo"}, True),
         ("os_name == 'foo'", {"os_name": "bar"}, False),
         ("'2.7' in python_version", {"python_version": "2.7.5"}, True),
@@ -474,23 +797,23 @@ def test_multi_marker_removes_duplicates():
             True,
         ),
         (
-            "python_version ~= '2.7.0' and (os_name == 'foo' or " "os_name == 'bar')",
+            "python_version ~= '2.7.0' and (os_name == 'foo' or os_name == 'bar')",
             {"os_name": "foo", "python_version": "2.7.4"},
             True,
         ),
         (
-            "python_version ~= '2.7.0' and (os_name == 'foo' or " "os_name == 'bar')",
+            "python_version ~= '2.7.0' and (os_name == 'foo' or os_name == 'bar')",
             {"os_name": "bar", "python_version": "2.7.4"},
             True,
         ),
         (
-            "python_version ~= '2.7.0' and (os_name == 'foo' or " "os_name == 'bar')",
+            "python_version ~= '2.7.0' and (os_name == 'foo' or os_name == 'bar')",
             {"os_name": "other", "python_version": "2.7.4"},
             False,
         ),
         ("extra == 'security'", {"extra": "quux"}, False),
         ("extra == 'security'", {"extra": "security"}, True),
-        ("os.name == '{0}'".format(os.name), None, True),
+        (f"os.name == '{os.name}'", None, True),
         ("sys.platform == 'win32'", {"sys_platform": "linux2"}, False),
         ("platform.version in 'Ubuntu'", {"platform_version": "#39"}, False),
         ("platform.machine=='x86_64'", {"platform_machine": "x86_64"}, True),
@@ -500,33 +823,39 @@ def test_multi_marker_removes_duplicates():
             False,
         ),
         (
-            "python_version == '2.5' and platform.python_implementation" "!= 'Jython'",
+            "python_version == '2.5' and platform.python_implementation!= 'Jython'",
             {"python_version": "2.7"},
             False,
         ),
         (
-            "platform_machine in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64 win32 WIN32'",
+            "platform_machine in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64"
+            " AMD64 win32 WIN32'",
             {"platform_machine": "foo"},
             False,
         ),
         (
-            "platform_machine in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64 win32 WIN32'",
+            "platform_machine in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64"
+            " AMD64 win32 WIN32'",
             {"platform_machine": "x86_64"},
             True,
         ),
         (
-            "platform_machine not in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64 win32 WIN32'",
+            "platform_machine not in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE"
+            " amd64 AMD64 win32 WIN32'",
             {"platform_machine": "foo"},
             True,
         ),
         (
-            "platform_machine not in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE amd64 AMD64 win32 WIN32'",
+            "platform_machine not in 'x86_64 X86_64 aarch64 AARCH64 ppc64le PPC64LE"
+            " amd64 AMD64 win32 WIN32'",
             {"platform_machine": "x86_64"},
             False,
         ),
     ],
 )
-def test_validate(marker_string, environment, expected):
+def test_validate(
+    marker_string: str, environment: dict[str, str] | None, expected: bool
+) -> None:
     m = parse_marker(marker_string)
 
     assert m.validate(environment) is expected
@@ -541,7 +870,7 @@ def test_validate(marker_string, environment, expected):
         )
     ],
 )
-def test_parse_version_like_markers(marker, env):
+def test_parse_version_like_markers(marker: str, env: dict[str, str]) -> None:
     m = parse_marker(marker)
 
     assert m.validate(env)
@@ -557,23 +886,28 @@ def test_parse_version_like_markers(marker, env):
             'python_version >= "3.6"',
         ),
         (
-            'python_version >= "3.6" and (extra == "foo" or extra == "bar") or implementation_name == "pypy"',
+            'python_version >= "3.6" and (extra == "foo" or extra == "bar") or'
+            ' implementation_name == "pypy"',
             'python_version >= "3.6" or implementation_name == "pypy"',
         ),
         (
-            'python_version >= "3.6" and extra == "foo" or implementation_name == "pypy" and extra == "bar"',
+            'python_version >= "3.6" and extra == "foo" or implementation_name =='
+            ' "pypy" and extra == "bar"',
             'python_version >= "3.6" or implementation_name == "pypy"',
         ),
         (
-            'python_version >= "3.6" or extra == "foo" and implementation_name == "pypy" or extra == "bar"',
+            'python_version >= "3.6" or extra == "foo" and implementation_name =='
+            ' "pypy" or extra == "bar"',
             'python_version >= "3.6" or implementation_name == "pypy"',
         ),
+        ('extra == "foo"', ""),
+        ('extra == "foo" or extra == "bar"', ""),
     ],
 )
-def test_without_extras(marker, expected):
+def test_without_extras(marker: str, expected: str) -> None:
     m = parse_marker(marker)
 
-    assert expected == str(m.without_extras())
+    assert str(m.without_extras()) == expected
 
 
 @pytest.mark.parametrize(
@@ -589,38 +923,47 @@ def test_without_extras(marker, expected):
         (
             'python_version >= "3.6" and (extra == "foo" or extra == "bar")',
             "python_version",
-            '(extra == "foo" or extra == "bar")',
+            'extra == "foo" or extra == "bar"',
         ),
         (
-            'python_version >= "3.6" and (extra == "foo" or extra == "bar") or implementation_name == "pypy"',
+            'python_version >= "3.6" and (extra == "foo" or extra == "bar") or'
+            ' implementation_name == "pypy"',
             "python_version",
-            '(extra == "foo" or extra == "bar") or implementation_name == "pypy"',
+            'extra == "foo" or extra == "bar" or implementation_name == "pypy"',
         ),
         (
-            'python_version >= "3.6" and extra == "foo" or implementation_name == "pypy" and extra == "bar"',
+            'python_version >= "3.6" and extra == "foo" or implementation_name =='
+            ' "pypy" and extra == "bar"',
             "implementation_name",
             'python_version >= "3.6" and extra == "foo" or extra == "bar"',
         ),
         (
-            'python_version >= "3.6" or extra == "foo" and implementation_name == "pypy" or extra == "bar"',
+            'python_version >= "3.6" or extra == "foo" and implementation_name =='
+            ' "pypy" or extra == "bar"',
             "implementation_name",
             'python_version >= "3.6" or extra == "foo" or extra == "bar"',
         ),
+        (
+            'extra == "foo" and python_version >= "3.6" or python_version >= "3.6"',
+            "extra",
+            'python_version >= "3.6"',
+        ),
     ],
 )
-def test_exclude(marker, excluded, expected):
+def test_exclude(marker: str, excluded: str, expected: str) -> None:
     m = parse_marker(marker)
 
     if expected == "*":
         assert m.exclude(excluded).is_any()
     else:
-        assert expected == str(m.exclude(excluded))
+        assert str(m.exclude(excluded)) == expected
 
 
 @pytest.mark.parametrize(
     "marker, only, expected",
     [
         ('python_version >= "3.6"', ["python_version"], 'python_version >= "3.6"'),
+        ('python_version >= "3.6"', ["sys_platform"], ""),
         (
             'python_version >= "3.6" and extra == "foo"',
             ["python_version"],
@@ -629,42 +972,54 @@ def test_exclude(marker, excluded, expected):
         (
             'python_version >= "3.6" and (extra == "foo" or extra == "bar")',
             ["extra"],
-            '(extra == "foo" or extra == "bar")',
+            'extra == "foo" or extra == "bar"',
         ),
         (
-            'python_version >= "3.6" and (extra == "foo" or extra == "bar") or implementation_name == "pypy"',
+            'python_version >= "3.6" and (extra == "foo" or extra == "bar") or'
+            ' implementation_name == "pypy"',
             ["implementation_name"],
             'implementation_name == "pypy"',
         ),
         (
-            'python_version >= "3.6" and extra == "foo" or implementation_name == "pypy" and extra == "bar"',
+            'python_version >= "3.6" and extra == "foo" or implementation_name =='
+            ' "pypy" and extra == "bar"',
             ["implementation_name"],
             'implementation_name == "pypy"',
         ),
         (
-            'python_version >= "3.6" or extra == "foo" and implementation_name == "pypy" or extra == "bar"',
+            'python_version >= "3.6" or extra == "foo" and implementation_name =='
+            ' "pypy" or extra == "bar"',
             ["implementation_name"],
             'implementation_name == "pypy"',
         ),
         (
-            'python_version >= "3.6" or extra == "foo" and implementation_name == "pypy" or extra == "bar"',
+            'python_version >= "3.6" or extra == "foo" and implementation_name =='
+            ' "pypy" or extra == "bar"',
             ["implementation_name", "python_version"],
             'python_version >= "3.6" or implementation_name == "pypy"',
         ),
     ],
 )
-def test_only(marker, only, expected):
+def test_only(marker: str, only: list[str], expected: str) -> None:
     m = parse_marker(marker)
 
-    assert expected == str(m.only(*only))
+    assert str(m.only(*only)) == expected
 
 
-def test_union_of_a_single_marker_is_the_single_marker():
+def test_union_of_a_single_marker_is_the_single_marker() -> None:
     union = MarkerUnion.of(SingleMarker("python_version", ">= 2.7"))
 
     assert SingleMarker("python_version", ">= 2.7") == union
 
 
+def test_union_of_multi_with_a_containing_single() -> None:
+    single = parse_marker('python_version >= "2.7"')
+    multi = parse_marker('python_version >= "2.7" and extra == "foo"')
+    union = multi.union(single)
+
+    assert union == single
+
+
 @pytest.mark.parametrize(
     "marker, inverse",
     [
@@ -690,12 +1045,12 @@ def test_union_of_a_single_marker_is_the_single_marker():
             'python_version < "3.6" or python_version >= "4.0"',
         ),
         (
-            'python_version ~= "3.6.3"',
-            'python_version < "3.6.3" or python_version >= "3.7.0"',
+            'python_full_version ~= "3.6.3"',
+            'python_full_version < "3.6.3" or python_full_version >= "3.7.0"',
         ),
     ],
 )
-def test_invert(marker, inverse):
+def test_invert(marker: str, inverse: str) -> None:
     m = parse_marker(marker)
 
     assert parse_marker(inverse) == m.invert()
@@ -705,12 +1060,375 @@ def test_invert(marker, inverse):
     "marker, expected",
     [
         (
-            'python_version >= "3.6" or python_version < "3.7" or python_version < "3.6"',
+            'python_version >= "3.6" or python_version < "3.7" or python_version <'
+            ' "3.6"',
             'python_version >= "3.6" or python_version < "3.7"',
         ),
     ],
 )
-def test_union_should_drop_markers_if_their_complement_is_present(marker, expected):
+def test_union_should_drop_markers_if_their_complement_is_present(
+    marker: str, expected: str
+) -> None:
     m = parse_marker(marker)
 
     assert parse_marker(expected) == m
+
+
+@pytest.mark.parametrize(
+    "scheme, marker, expected",
+    [
+        ("empty", EmptyMarker(), EmptyMarker()),
+        ("any", AnyMarker(), AnyMarker()),
+        (
+            "A_",
+            SingleMarker("python_version", ">=3.7"),
+            SingleMarker("python_version", ">=3.7"),
+        ),
+        (
+            "AB_",
+            MultiMarker(
+                SingleMarker("python_version", ">=3.7"),
+                SingleMarker("python_version", "<3.9"),
+            ),
+            MultiMarker(
+                SingleMarker("python_version", ">=3.7"),
+                SingleMarker("python_version", "<3.9"),
+            ),
+        ),
+        (
+            "A+B_",
+            MarkerUnion(
+                SingleMarker("python_version", "<3.7"),
+                SingleMarker("python_version", ">=3.9"),
+            ),
+            MarkerUnion(
+                SingleMarker("python_version", "<3.7"),
+                SingleMarker("python_version", ">=3.9"),
+            ),
+        ),
+        (
+            "AB+AC_",
+            MarkerUnion(
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("python_version", "<3.9"),
+                ),
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+            ),
+            MarkerUnion(
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("python_version", "<3.9"),
+                ),
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+            ),
+        ),
+        (
+            "A(B+C)_AB+AC",
+            MultiMarker(
+                SingleMarker("python_version", ">=3.7"),
+                MarkerUnion(
+                    SingleMarker("python_version", "<3.9"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+            ),
+            MarkerUnion(
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("python_version", "<3.9"),
+                ),
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+            ),
+        ),
+        (
+            "(A+B)(C+D)_AC+AD+BC+BD",
+            MultiMarker(
+                MarkerUnion(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("sys_platform", "win32"),
+                ),
+                MarkerUnion(
+                    SingleMarker("python_version", "<3.9"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+            ),
+            MarkerUnion(
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("python_version", "<3.9"),
+                ),
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.7"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+                MultiMarker(
+                    SingleMarker("sys_platform", "win32"),
+                    SingleMarker("python_version", "<3.9"),
+                ),
+            ),
+        ),
+        (
+            "A(B+C)+(D+E)(F+G)_AB+AC+DF+DG+EF+DG",
+            MarkerUnion(
+                MultiMarker(
+                    SingleMarker("sys_platform", "win32"),
+                    MarkerUnion(
+                        SingleMarker("python_version", "<3.7"),
+                        SingleMarker("python_version", ">=3.9"),
+                    ),
+                ),
+                MultiMarker(
+                    MarkerUnion(
+                        SingleMarker("python_version", "<3.8"),
+                        SingleMarker("python_version", ">=3.9"),
+                    ),
+                    MarkerUnion(
+                        SingleMarker("sys_platform", "linux"),
+                        SingleMarker("python_version", ">=3.9"),
+                    ),
+                ),
+            ),
+            MarkerUnion(
+                MultiMarker(
+                    SingleMarker("sys_platform", "win32"),
+                    SingleMarker("python_version", "<3.7"),
+                ),
+                SingleMarker("python_version", ">=3.9"),
+                MultiMarker(
+                    SingleMarker("python_version", "<3.8"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+            ),
+        ),
+        (
+            "(A+B(C+D))(E+F)_AE+AF+BCE+BCF+BDE+BDF",
+            MultiMarker(
+                MarkerUnion(
+                    SingleMarker("python_version", ">=3.9"),
+                    MultiMarker(
+                        SingleMarker("implementation_name", "cpython"),
+                        MarkerUnion(
+                            SingleMarker("python_version", "<3.7"),
+                            SingleMarker("python_version", ">=3.8"),
+                        ),
+                    ),
+                ),
+                MarkerUnion(
+                    SingleMarker("sys_platform", "win32"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+            ),
+            MarkerUnion(
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.9"),
+                    SingleMarker("sys_platform", "win32"),
+                ),
+                MultiMarker(
+                    SingleMarker("python_version", ">=3.9"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+                MultiMarker(
+                    SingleMarker("implementation_name", "cpython"),
+                    SingleMarker("python_version", "<3.7"),
+                    SingleMarker("sys_platform", "win32"),
+                ),
+                MultiMarker(
+                    SingleMarker("implementation_name", "cpython"),
+                    SingleMarker("python_version", "<3.7"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+                MultiMarker(
+                    SingleMarker("implementation_name", "cpython"),
+                    SingleMarker("python_version", ">=3.8"),
+                    SingleMarker("sys_platform", "win32"),
+                ),
+                MultiMarker(
+                    SingleMarker("implementation_name", "cpython"),
+                    SingleMarker("python_version", ">=3.8"),
+                    SingleMarker("sys_platform", "linux"),
+                ),
+            ),
+        ),
+    ],
+)
+def test_dnf(scheme: str, marker: BaseMarker, expected: BaseMarker) -> None:
+    assert dnf(marker) == expected
+
+
+def test_single_markers_are_found_in_complex_intersection() -> None:
+    m1 = parse_marker('implementation_name != "pypy" and python_version <= "3.6"')
+    m2 = parse_marker(
+        'python_version >= "3.6" and python_version < "4.0" and implementation_name =='
+        ' "cpython"'
+    )
+    intersection = m1.intersect(m2)
+    assert (
+        str(intersection)
+        == 'implementation_name == "cpython" and python_version == "3.6"'
+    )
+
+
+@pytest.mark.parametrize(
+    "python_version, python_full_version, "
+    "expected_intersection_version, expected_union_version",
+    [
+        # python_version > 3.6 (equal to python_full_version >= 3.7.0)
+        ('> "3.6"', '> "3.5.2"', '> "3.6"', '> "3.5.2"'),
+        ('> "3.6"', '>= "3.5.2"', '> "3.6"', '>= "3.5.2"'),
+        ('> "3.6"', '> "3.6.2"', '> "3.6"', '> "3.6.2"'),
+        ('> "3.6"', '>= "3.6.2"', '> "3.6"', '>= "3.6.2"'),
+        ('> "3.6"', '> "3.7.0"', '> "3.7.0"', '> "3.6"'),
+        ('> "3.6"', '>= "3.7.0"', '> "3.6"', '> "3.6"'),
+        ('> "3.6"', '> "3.7.1"', '> "3.7.1"', '> "3.6"'),
+        ('> "3.6"', '>= "3.7.1"', '>= "3.7.1"', '> "3.6"'),
+        ('> "3.6"', '== "3.6.2"', "", None),
+        ('> "3.6"', '== "3.7.0"', '== "3.7.0"', '> "3.6"'),
+        ('> "3.6"', '== "3.7.1"', '== "3.7.1"', '> "3.6"'),
+        ('> "3.6"', '!= "3.6.2"', '> "3.6"', '!= "3.6.2"'),
+        ('> "3.6"', '!= "3.7.0"', '> "3.7.0"', ""),
+        ('> "3.6"', '!= "3.7.1"', None, ""),
+        ('> "3.6"', '< "3.7.0"', "", ""),
+        ('> "3.6"', '<= "3.7.0"', '== "3.7.0"', ""),
+        ('> "3.6"', '< "3.7.1"', None, ""),
+        ('> "3.6"', '<= "3.7.1"', None, ""),
+        # python_version >= 3.6 (equal to python_full_version >= 3.6.0)
+        ('>= "3.6"', '> "3.5.2"', '>= "3.6"', '> "3.5.2"'),
+        ('>= "3.6"', '>= "3.5.2"', '>= "3.6"', '>= "3.5.2"'),
+        ('>= "3.6"', '> "3.6.0"', '> "3.6.0"', '>= "3.6"'),
+        ('>= "3.6"', '>= "3.6.0"', '>= "3.6"', '>= "3.6"'),
+        ('>= "3.6"', '> "3.6.1"', '> "3.6.1"', '>= "3.6"'),
+        ('>= "3.6"', '>= "3.6.1"', '>= "3.6.1"', '>= "3.6"'),
+        ('>= "3.6"', '== "3.5.2"', "", None),
+        ('>= "3.6"', '== "3.6.0"', '== "3.6.0"', '>= "3.6"'),
+        ('>= "3.6"', '!= "3.5.2"', '>= "3.6"', '!= "3.5.2"'),
+        ('>= "3.6"', '!= "3.6.0"', '> "3.6.0"', ""),
+        ('>= "3.6"', '!= "3.6.1"', None, ""),
+        ('>= "3.6"', '!= "3.7.1"', None, ""),
+        ('>= "3.6"', '< "3.6.0"', "", ""),
+        ('>= "3.6"', '<= "3.6.0"', '== "3.6.0"', ""),
+        ('>= "3.6"', '< "3.6.1"', None, ""),  # '== "3.6.0"'
+        ('>= "3.6"', '<= "3.6.1"', None, ""),
+        # python_version < 3.6 (equal to python_full_version < 3.6.0)
+        ('< "3.6"', '< "3.5.2"', '< "3.5.2"', '< "3.6"'),
+        ('< "3.6"', '<= "3.5.2"', '<= "3.5.2"', '< "3.6"'),
+        ('< "3.6"', '< "3.6.0"', '< "3.6"', '< "3.6"'),
+        ('< "3.6"', '<= "3.6.0"', '< "3.6"', '<= "3.6.0"'),
+        ('< "3.6"', '< "3.6.1"', '< "3.6"', '< "3.6.1"'),
+        ('< "3.6"', '<= "3.6.1"', '< "3.6"', '<= "3.6.1"'),
+        ('< "3.6"', '== "3.5.2"', '== "3.5.2"', '< "3.6"'),
+        ('< "3.6"', '== "3.6.0"', "", '<= "3.6.0"'),
+        ('< "3.6"', '!= "3.5.2"', None, ""),
+        ('< "3.6"', '!= "3.6.0"', '< "3.6"', '!= "3.6.0"'),
+        ('< "3.6"', '> "3.6.0"', "", '!= "3.6.0"'),
+        ('< "3.6"', '>= "3.6.0"', "", ""),
+        ('< "3.6"', '> "3.5.2"', None, ""),
+        ('< "3.6"', '>= "3.5.2"', None, ""),
+        # python_version <= 3.6 (equal to python_full_version < 3.7.0)
+        ('<= "3.6"', '< "3.6.1"', '< "3.6.1"', '<= "3.6"'),
+        ('<= "3.6"', '<= "3.6.1"', '<= "3.6.1"', '<= "3.6"'),
+        ('<= "3.6"', '< "3.7.0"', '<= "3.6"', '<= "3.6"'),
+        ('<= "3.6"', '<= "3.7.0"', '<= "3.6"', '<= "3.7.0"'),
+        ('<= "3.6"', '== "3.6.1"', '== "3.6.1"', '<= "3.6"'),
+        ('<= "3.6"', '== "3.7.0"', "", '<= "3.7.0"'),
+        ('<= "3.6"', '!= "3.6.1"', None, ""),
+        ('<= "3.6"', '!= "3.7.0"', '<= "3.6"', '!= "3.7.0"'),
+        ('<= "3.6"', '> "3.7.0"', "", '!= "3.7.0"'),
+        ('<= "3.6"', '>= "3.7.0"', "", ""),
+        ('<= "3.6"', '> "3.6.2"', None, ""),
+        ('<= "3.6"', '>= "3.6.2"', None, ""),
+        # python_version == 3.6  # noqa: E800
+        # (equal to python_full_version >= 3.6.0 and python_full_version < 3.7.0)
+        ('== "3.6"', '< "3.5.2"', "", None),
+        ('== "3.6"', '<= "3.5.2"', "", None),
+        ('== "3.6"', '> "3.5.2"', '== "3.6"', '> "3.5.2"'),
+        ('== "3.6"', '>= "3.5.2"', '== "3.6"', '>= "3.5.2"'),
+        ('== "3.6"', '!= "3.5.2"', '== "3.6"', '!= "3.5.2"'),
+        ('== "3.6"', '< "3.6.0"', "", '< "3.7.0"'),
+        ('== "3.6"', '<= "3.6.0"', '== "3.6.0"', '< "3.7.0"'),
+        ('== "3.6"', '> "3.6.0"', None, '>= "3.6.0"'),
+        ('== "3.6"', '>= "3.6.0"', '== "3.6"', '>= "3.6.0"'),
+        ('== "3.6"', '!= "3.6.0"', None, ""),
+        ('== "3.6"', '< "3.6.1"', None, '< "3.7.0"'),
+        ('== "3.6"', '<= "3.6.1"', None, '< "3.7.0"'),
+        ('== "3.6"', '> "3.6.1"', None, '>= "3.6.0"'),
+        ('== "3.6"', '>= "3.6.1"', None, '>= "3.6.0"'),
+        ('== "3.6"', '!= "3.6.1"', None, ""),
+        ('== "3.6"', '< "3.7.0"', '== "3.6"', '< "3.7.0"'),
+        ('== "3.6"', '<= "3.7.0"', '== "3.6"', '<= "3.7.0"'),
+        ('== "3.6"', '> "3.7.0"', "", None),
+        ('== "3.6"', '>= "3.7.0"', "", '>= "3.6.0"'),
+        ('== "3.6"', '!= "3.7.0"', '== "3.6"', '!= "3.7.0"'),
+        ('== "3.6"', '<= "3.7.1"', '== "3.6"', '<= "3.7.1"'),
+        ('== "3.6"', '< "3.7.1"', '== "3.6"', '< "3.7.1"'),
+        ('== "3.6"', '> "3.7.1"', "", None),
+        ('== "3.6"', '>= "3.7.1"', "", None),
+        ('== "3.6"', '!= "3.7.1"', '== "3.6"', '!= "3.7.1"'),
+        # python_version != 3.6  # noqa: E800
+        # (equal to python_full_version < 3.6.0 or python_full_version >= 3.7.0)
+        ('!= "3.6"', '< "3.5.2"', '< "3.5.2"', '!= "3.6"'),
+        ('!= "3.6"', '<= "3.5.2"', '<= "3.5.2"', '!= "3.6"'),
+        ('!= "3.6"', '> "3.5.2"', None, ""),
+        ('!= "3.6"', '>= "3.5.2"', None, ""),
+        ('!= "3.6"', '!= "3.5.2"', None, ""),
+        ('!= "3.6"', '< "3.6.0"', '< "3.6.0"', '!= "3.6"'),
+        ('!= "3.6"', '<= "3.6.0"', '< "3.6.0"', None),
+        ('!= "3.6"', '> "3.6.0"', '>= "3.7.0"', '!= "3.6.0"'),
+        ('!= "3.6"', '>= "3.6.0"', '>= "3.7.0"', ""),
+        ('!= "3.6"', '!= "3.6.0"', '!= "3.6"', '!= "3.6.0"'),
+        ('!= "3.6"', '< "3.6.1"', '< "3.6.0"', None),
+        ('!= "3.6"', '<= "3.6.1"', '< "3.6.0"', None),
+        ('!= "3.6"', '> "3.6.1"', '>= "3.7.0"', None),
+        ('!= "3.6"', '>= "3.6.1"', '>= "3.7.0"', None),
+        ('!= "3.6"', '!= "3.6.1"', '!= "3.6"', '!= "3.6.1"'),
+        ('!= "3.6"', '< "3.7.0"', '< "3.6.0"', ""),
+        ('!= "3.6"', '<= "3.7.0"', None, ""),
+        ('!= "3.6"', '> "3.7.0"', '> "3.7.0"', '!= "3.6"'),
+        ('!= "3.6"', '>= "3.7.0"', '>= "3.7.0"', '!= "3.6"'),
+        ('!= "3.6"', '!= "3.7.0"', None, ""),
+        ('!= "3.6"', '<= "3.7.1"', None, ""),
+        ('!= "3.6"', '< "3.7.1"', None, ""),
+        ('!= "3.6"', '> "3.7.1"', '> "3.7.1"', '!= "3.6"'),
+        ('!= "3.6"', '>= "3.7.1"', '>= "3.7.1"', '!= "3.6"'),
+        ('!= "3.6"', '!= "3.7.1"', None, ""),
+    ],
+)
+def test_merging_python_version_and_python_full_version(
+    python_version: str,
+    python_full_version: str,
+    expected_intersection_version: str,
+    expected_union_version: str,
+) -> None:
+    m = f"python_version {python_version}"
+    m2 = f"python_full_version {python_full_version}"
+
+    def get_expected_marker(expected_version: str, op: str) -> str:
+        if expected_version is None:
+            expected = f"{m} {op} {m2}"
+        elif expected_version in ("", ""):
+            expected = expected_version
+        else:
+            expected_marker_name = (
+                "python_version"
+                if expected_version.count(".") < 2
+                else "python_full_version"
+            )
+            expected = f"{expected_marker_name} {expected_version}"
+        return expected
+
+    expected_intersection = get_expected_marker(expected_intersection_version, "and")
+    expected_union = get_expected_marker(expected_union_version, "or")
+
+    intersection = parse_marker(m).intersect(parse_marker(m2))
+    assert str(intersection) == expected_intersection
+
+    union = parse_marker(m).union(parse_marker(m2))
+    assert str(union) == expected_union
diff --git a/vendor/poetry-core/tests/version/test_requirements.py b/vendor/poetry-core/tests/version/test_requirements.py
index 9a779bd1..cdbea457 100644
--- a/vendor/poetry-core/tests/version/test_requirements.py
+++ b/vendor/poetry-core/tests/version/test_requirements.py
@@ -1,13 +1,24 @@
+from __future__ import annotations
+
 import re
 
+from typing import Any
+
 import pytest
 
-from poetry.core.semver import parse_constraint
+from poetry.core.semver.helpers import parse_constraint
 from poetry.core.version.requirements import InvalidRequirement
 from poetry.core.version.requirements import Requirement
 
 
-def assert_requirement(req, name, url=None, extras=None, constraint="*", marker=None):
+def assert_requirement(
+    req: Requirement,
+    name: str,
+    url: str | None = None,
+    extras: list[str] | None = None,
+    constraint: str = "*",
+    marker: str | None = None,
+) -> None:
     if extras is None:
         extras = []
 
@@ -28,12 +39,16 @@ def assert_requirement(req, name, url=None, extras=None, constraint="*", marker=
         ("name", {"name": "name"}),
         ("foo-bar.quux_baz", {"name": "foo-bar.quux_baz"}),
         ("name>=3", {"name": "name", "constraint": ">=3"}),
-        ("name==1.0.org1", {"name": "name", "constraint": "==1.0.org1"}),
+        ("name>=3.*", {"name": "name", "constraint": ">=3.0"}),
+        ("name<3.*", {"name": "name", "constraint": "<3.0"}),
+        ("name>3.5.*", {"name": "name", "constraint": ">3.5"}),
+        ("name==1.0.post1", {"name": "name", "constraint": "==1.0.post1"}),
+        ("name==1.2.0b1.dev0", {"name": "name", "constraint": "==1.2.0b1.dev0"}),
         (
-            "name>=1.x.y;python_version=='2.6'",
+            "name>=1.2.3;python_version=='2.6'",
             {
                 "name": "name",
-                "constraint": ">=1.x.y",
+                "constraint": ">=1.2.3",
                 "marker": 'python_version == "2.6"',
             },
         ),
@@ -56,7 +71,10 @@ def assert_requirement(req, name, url=None, extras=None, constraint="*", marker=
             "name @ file:///absolute/path",
             {"name": "name", "url": "file:///absolute/path"},
         ),
-        ("name @ file://.", {"name": "name", "url": "file://."},),
+        (
+            "name @ file://.",
+            {"name": "name", "url": "file://."},
+        ),
         (
             "name [fred,bar] @ http://foo.com ; python_version=='2.7'",
             {
@@ -67,7 +85,8 @@ def assert_requirement(req, name, url=None, extras=None, constraint="*", marker=
             },
         ),
         (
-            "foo @ https://example.com/name;v=1.1/?query=foo&bar=baz#blah ; python_version=='3.4'",
+            "foo @ https://example.com/name;v=1.1/?query=foo&bar=baz#blah ;"
+            " python_version=='3.4'",
             {
                 "name": "foo",
                 "url": "https://example.com/name;v=1.1/?query=foo&bar=baz#blah",
@@ -75,16 +94,20 @@ def assert_requirement(req, name, url=None, extras=None, constraint="*", marker=
             },
         ),
         (
-            'foo (>=1.2.3) ; python_version >= "2.7" and python_version < "2.8" or python_version >= "3.4" and python_version < "3.5"',
+            'foo (>=1.2.3) ; python_version >= "2.7" and python_version < "2.8" or'
+            ' python_version >= "3.4" and python_version < "3.5"',
             {
                 "name": "foo",
                 "constraint": ">=1.2.3",
-                "marker": 'python_version >= "2.7" and python_version < "2.8" or python_version >= "3.4" and python_version < "3.5"',
+                "marker": (
+                    'python_version >= "2.7" and python_version < "2.8" or'
+                    ' python_version >= "3.4" and python_version < "3.5"'
+                ),
             },
         ),
     ],
 )
-def test_requirement(string, expected):
+def test_requirement(string: str, expected: dict[str, Any]) -> None:
     req = Requirement(string)
 
     assert_requirement(req, **expected)
@@ -99,9 +122,9 @@ def test_requirement(string, expected):
         ("name @ file:/.", "invalid URL"),
     ],
 )
-def test_invalid_requirement(string, exception):
+def test_invalid_requirement(string: str, exception: str) -> None:
     with pytest.raises(
         InvalidRequirement,
-        match=re.escape("The requirement is invalid: {}".format(exception)),
+        match=re.escape(f"The requirement is invalid: {exception}"),
     ):
         Requirement(string)
diff --git a/vendor/poetry-core/tox.ini b/vendor/poetry-core/tox.ini
index 822055fb..082a6161 100644
--- a/vendor/poetry-core/tox.ini
+++ b/vendor/poetry-core/tox.ini
@@ -1,7 +1,7 @@
 [tox]
 minversion = 3.3.0
 isolated_build = True
-envlist = py27, py35, py36, py37, py38, pypy, pypy3, integration
+envlist = py37, py38, py39, py310, pypy3, integration
 
 [testenv]
 whitelist_externals = poetry
@@ -13,9 +13,10 @@ commands =
 
 [testenv:integration]
 basepython = python3
+skip_install = false
 deps =
     pytest
-    pep517
+    build
     virtualenv
 commands =
     pytest --integration {posargs} tests/integration
diff --git a/vendor/poetry-core/vendors/deps.txt b/vendor/poetry-core/vendors/deps.txt
new file mode 100644
index 00000000..5d242f5c
--- /dev/null
+++ b/vendor/poetry-core/vendors/deps.txt
@@ -0,0 +1,2 @@
+packaging==21.3
+tomlkit==0.11.4
diff --git a/vendor/poetry-core/vendors/patches/jsonschema.patch b/vendor/poetry-core/vendors/patches/jsonschema.patch
index 423e641d..3af40eb3 100644
--- a/vendor/poetry-core/vendors/patches/jsonschema.patch
+++ b/vendor/poetry-core/vendors/patches/jsonschema.patch
@@ -1,42 +1,36 @@
-diff --git a/poetry/core/_vendor/jsonschema/__init__.py b/poetry/core/_vendor/jsonschema/__init__.py
-index 7dd3598..c604eed 100644
---- a/poetry/core/_vendor/jsonschema/__init__.py
-+++ b/poetry/core/_vendor/jsonschema/__init__.py
-@@ -27,8 +27,5 @@ from jsonschema.validators import (
-     RefResolver,
-     validate,
- )
--try:
--    from importlib import metadata
--except ImportError: # for Python<3.8
--    import importlib_metadata as metadata
--__version__ = metadata.version("jsonschema")
-+
-+__version__ = "3.2.0"
-diff --git a/poetry/core/_vendor/jsonschema/_utils.py b/poetry/core/_vendor/jsonschema/_utils.py
-index 8fb8593..368474a 100644
---- a/poetry/core/_vendor/jsonschema/_utils.py
-+++ b/poetry/core/_vendor/jsonschema/_utils.py
-@@ -1,6 +1,6 @@
+diff --git b/src/poetry/core/_vendor/jsonschema/_utils.py a/src/poetry/core/_vendor/jsonschema/_utils.py
+index a2ad5a9..d4f5697 100644
+--- b/src/poetry/core/_vendor/jsonschema/_utils.py
++++ a/src/poetry/core/_vendor/jsonschema/_utils.py
+@@ -2,15 +2,8 @@ from collections.abc import Mapping, MutableMapping, Sequence
+ from urllib.parse import urlsplit
  import itertools
  import json
--import pkgutil
 +import os
  import re
-
- from jsonschema.compat import MutableMapping, str_types, urlsplit
-@@ -50,9 +50,12 @@ def load_schema(name):
+-import sys
+-
+-# The files() API was added in Python 3.9.
+-if sys.version_info >= (3, 9):  # pragma: no cover
+-    from importlib import resources
+-else:  # pragma: no cover
+-    import importlib_resources as resources  # type: ignore
+-
+ 
+ class URIDict(MutableMapping):
+     """
+@@ -56,9 +49,12 @@ def load_schema(name):
      """
      Load a schema from ./schemas/``name``.json and return it.
      """
 +    with open(
-+        os.path.join(os.path.dirname(__file__), "schemas", "{0}.json".format(name))
++        os.path.join(os.path.dirname(__file__), "schemas", "{0}.json".format(name)),
++        encoding="utf-8"
 +    ) as f:
 +        data = f.read()
-
--    data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
--    return json.loads(data.decode("utf-8"))
-+    return json.loads(data)
-
-
- def indent(string, times=1):
+ 
+-    path = resources.files(__package__).joinpath(f"schemas/{name}.json")
+-    data = path.read_text(encoding="utf-8")
+     return json.loads(data)
+ 
+ 
diff --git a/vendor/poetry-core/vendors/poetry.lock b/vendor/poetry-core/vendors/poetry.lock
index ea3e4b63..3011a0a2 100644
--- a/vendor/poetry-core/vendors/poetry.lock
+++ b/vendor/poetry-core/vendors/poetry.lock
@@ -1,54 +1,72 @@
 [[package]]
 name = "attrs"
-version = "20.3.0"
+version = "22.1.0"
 description = "Classes Without Boilerplate"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.5"
 
 [package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"]
-docs = ["furo", "sphinx", "zope.interface"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"]
+dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
+docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
+tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
+tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
 
 [[package]]
 name = "importlib-metadata"
-version = "3.4.0"
+version = "4.12.0"
 description = "Read metadata from Python packages"
 category = "main"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
 typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
 zipp = ">=0.5"
 
 [package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
+perf = ["ipython"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
+
+[[package]]
+name = "importlib-resources"
+version = "5.9.0"
+description = "Read resources from Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
 
 [[package]]
 name = "jsonschema"
-version = "3.2.0"
+version = "4.10.0"
 description = "An implementation of JSON Schema validation for Python"
 category = "main"
 optional = false
-python-versions = "*"
+python-versions = ">=3.7"
 
 [package.dependencies]
 attrs = ">=17.4.0"
 importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
-pyrsistent = ">=0.14.0"
-six = ">=1.11.0"
+importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
+pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
+pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2"
+typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
 
 [package.extras]
-format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"]
-format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"]
+format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
+format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
 
 [[package]]
-name = "lark-parser"
-version = "0.9.0"
+name = "lark"
+version = "1.1.2"
 description = "a modern parsing library"
 category = "main"
 optional = false
@@ -56,119 +74,145 @@ python-versions = "*"
 
 [package.extras]
 regex = ["regex"]
+nearley = ["js2py"]
+atomic_cache = ["atomicwrites"]
 
 [[package]]
 name = "packaging"
-version = "20.9"
+version = "21.3"
 description = "Core utilities for Python packages"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.6"
 
 [package.dependencies]
-pyparsing = ">=2.0.2"
+pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
 
 [[package]]
-name = "pyparsing"
-version = "2.4.7"
-description = "Python parsing module"
+name = "pkgutil-resolve-name"
+version = "1.3.10"
+description = "Resolve a name to an object."
 category = "main"
 optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+python-versions = ">=3.6"
 
 [[package]]
-name = "pyrsistent"
-version = "0.16.1"
-description = "Persistent/Functional/Immutable data structures"
+name = "pyparsing"
+version = "3.0.9"
+description = "pyparsing module - Classes and methods to define and execute parsing grammars"
 category = "main"
 optional = false
-python-versions = ">=2.7"
+python-versions = ">=3.6.8"
 
-[package.dependencies]
-six = "*"
+[package.extras]
+diagrams = ["railroad-diagrams", "jinja2"]
 
 [[package]]
-name = "six"
-version = "1.15.0"
-description = "Python 2 and 3 compatibility utilities"
+name = "pyrsistent"
+version = "0.18.1"
+description = "Persistent/Functional/Immutable data structures"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+python-versions = ">=3.7"
 
 [[package]]
 name = "tomlkit"
-version = "0.7.0"
+version = "0.11.4"
 description = "Style preserving TOML library"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6,<4.0"
 
 [[package]]
 name = "typing-extensions"
-version = "3.7.4.3"
-description = "Backported and Experimental Type Hints for Python 3.5+"
+version = "4.3.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
 category = "main"
 optional = false
-python-versions = "*"
+python-versions = ">=3.7"
 
 [[package]]
 name = "zipp"
-version = "3.4.0"
+version = "3.8.1"
 description = "Backport of pathlib-compatible object wrapper for zip files"
 category = "main"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.extras]
-docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
 
 [metadata]
 lock-version = "1.1"
-python-versions = "^3.6"
-content-hash = "6790cea1370c2296b96f05af2218de159499268429c0c81757de8e4e90bfa9b0"
+python-versions = "^3.7"
+content-hash = "23292eec87b0c9b74619143ac531f78902eb7bf928ac406b931a481c8a25a10d"
 
 [metadata.files]
 attrs = [
-    {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"},
-    {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"},
+    {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
+    {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
 ]
 importlib-metadata = [
-    {file = "importlib_metadata-3.4.0-py3-none-any.whl", hash = "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771"},
-    {file = "importlib_metadata-3.4.0.tar.gz", hash = "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d"},
+    {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"},
+    {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"},
+]
+importlib-resources = [
+    {file = "importlib_resources-5.9.0-py3-none-any.whl", hash = "sha256:f78a8df21a79bcc30cfd400bdc38f314333de7c0fb619763f6b9dabab8268bb7"},
+    {file = "importlib_resources-5.9.0.tar.gz", hash = "sha256:5481e97fb45af8dcf2f798952625591c58fe599d0735d86b10f54de086a61681"},
 ]
 jsonschema = [
-    {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"},
-    {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"},
+    {file = "jsonschema-4.10.0-py3-none-any.whl", hash = "sha256:92128509e5b700bf0f1fd08a7d018252b16a1454465dfa6b899558eeae584241"},
+    {file = "jsonschema-4.10.0.tar.gz", hash = "sha256:8ff7b44c6a99c6bfd55ca9ac45261c649cefd40aaba1124c29aaef1bcb378d84"},
 ]
-lark-parser = [
-    {file = "lark-parser-0.9.0.tar.gz", hash = "sha256:9e7589365d6b6de1cca40b0eaec31104a3fb96a37a11a9dfd5098e95b50aa6cd"},
+lark = [
+    {file = "lark-1.1.2-py2.py3-none-any.whl", hash = "sha256:c1ab213fc5e2d273fe2d91da218ccc8b5b92d065b17faa5e743499cb16594b7d"},
+    {file = "lark-1.1.2.tar.gz", hash = "sha256:7a8d0c07d663da9391d7faee1bf1d7df4998c47ca43a593cbef5c7566acd057a"},
 ]
 packaging = [
-    {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"},
-    {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"},
+    {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
+    {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
+]
+pkgutil-resolve-name = [
+    {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"},
+    {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
 ]
 pyparsing = [
-    {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
-    {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
+    {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
+    {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
 ]
 pyrsistent = [
-    {file = "pyrsistent-0.16.1.tar.gz", hash = "sha256:aa2ae1c2e496f4d6777f869ea5de7166a8ccb9c2e06ebcf6c7ff1b670c98c5ef"},
-]
-six = [
-    {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
-    {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
+    {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"},
+    {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"},
+    {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"},
+    {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"},
+    {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"},
+    {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"},
+    {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"},
+    {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"},
+    {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"},
+    {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"},
+    {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"},
+    {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"},
+    {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"},
+    {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"},
+    {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"},
+    {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"},
+    {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"},
+    {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"},
+    {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"},
+    {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"},
+    {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"},
 ]
 tomlkit = [
-    {file = "tomlkit-0.7.0-py2.py3-none-any.whl", hash = "sha256:6babbd33b17d5c9691896b0e68159215a9387ebfa938aa3ac42f4a4beeb2b831"},
-    {file = "tomlkit-0.7.0.tar.gz", hash = "sha256:ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618"},
+    {file = "tomlkit-0.11.4-py3-none-any.whl", hash = "sha256:25d4e2e446c453be6360c67ddfb88838cfc42026322770ba13d1fbd403a93a5c"},
+    {file = "tomlkit-0.11.4.tar.gz", hash = "sha256:3235a9010fae54323e727c3ac06fb720752fe6635b3426e379daec60fbd44a83"},
 ]
 typing-extensions = [
-    {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"},
-    {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"},
-    {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"},
+    {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
+    {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
 ]
 zipp = [
-    {file = "zipp-3.4.0-py3-none-any.whl", hash = "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108"},
-    {file = "zipp-3.4.0.tar.gz", hash = "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"},
+    {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"},
+    {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"},
 ]
diff --git a/vendor/poetry-core/vendors/pyproject.toml b/vendor/poetry-core/vendors/pyproject.toml
index 2671380a..f830c24f 100644
--- a/vendor/poetry-core/vendors/pyproject.toml
+++ b/vendor/poetry-core/vendors/pyproject.toml
@@ -19,10 +19,14 @@ classifiers = [
 ]
 
 [tool.poetry.dependencies]
-python = "^3.6"
+python = "^3.7"
 
-jsonschema = "^3.2.0"
-lark-parser = "^0.9.0"
-packaging = "^20.9"
-pyrsistent = "^0.16.0"
-tomlkit = ">=0.7.0,<1.0.0"
+jsonschema = "^4.5.1"
+lark = "^1.1.2"
+packaging = ">=20.9"
+tomlkit = ">=0.11.1,<1.0.0"
+
+# Needed by jsonschema and only at python < 3.8, but to make
+# sure that it is always delivered we add an unconditional
+# dependency here.
+typing-extensions = "^4.2.0"
diff --git a/vendor/poetry/.cirrus.yml b/vendor/poetry/.cirrus.yml
index c95db272..5c020794 100644
--- a/vendor/poetry/.cirrus.yml
+++ b/vendor/poetry/.cirrus.yml
@@ -1,79 +1,38 @@
 freebsd_instance:
-  image_family: freebsd-12-2
+  image_family: freebsd-13-0
+  cpu: 1
+  memory: 4G
 
 test_task:
   name: "Tests / FreeBSD / "
   only_if: $CIRRUS_TAG == ''
   skip: "!changesInclude('.cirrus.yml', 'poetry.lock', 'pyproject.toml', '**.json','**.py')"
   env:
+    # `SHELL` environment variable is not set by default, so we explicitly set it to
+    # avoid failures on tests that depend on it.
+    SHELL: sh
     matrix:
-      - PYTHON: python2.7
       - PYTHON: python3.7
-  python_script:
+      - PYTHON: python3.8
+      - PYTHON: python3.9
+      - PYTHON: python3.10
+  pkg_script:
     - PYPACKAGE=$(printf '%s' $PYTHON | tr -d '.')
     - SQLPACKAGE=$(printf '%s-sqlite3' $PYPACKAGE | sed 's/thon//')
-    - pkg install -y git-lite $PYPACKAGE $SQLPACKAGE
+    - pkg install -y git-lite curl $PYPACKAGE $SQLPACKAGE
   pip_script:
     - $PYTHON -m ensurepip
-    - $PYTHON -m pip install -U pip tox
-    - $PYTHON -m pip install -U --pre poetry
+    - $PYTHON -m pip --disable-pip-version-check install -U pip
+  env_script:
+    - echo "PATH=/.local/bin:${PATH}" >> $CIRRUS_ENV
+  poetry_script:
+    - curl -sL https://install.python-poetry.org | $PYTHON - -y
     - poetry config virtualenvs.in-project true
-  tox_script: $PYTHON -m tox -e py -- -q --junitxml=junit.xml tests
+  test_script:
+    - poetry install
+    - poetry run pytest -n auto -q --junitxml=junit.xml tests
   on_failure:
     annotate_failure_artifacts:
       path: junit.xml
       format: junit
       type: text/xml
-
-release_task:
-  name: "Release / FreeBSD"
-  only_if: $CIRRUS_TAG != ''
-  env:
-    GITHUB_TOKEN: ENCRYPTED[2b573a2d28a03523ac6fb5b3c2f513a41c0a98db81e40e50e1d103b171f85c57e58ae38d957499dbf7fd7635cfcfd7be]
-    PYTHON: python3.8
-    PYTHON27: python2.7
-    PYTHON36: python3.6
-    PYTHON37: python3.7
-    PYTHON38: python3.8
-  freebsd_instance:
-    matrix:
-      - image_family: freebsd-12-2
-      - image_family: freebsd-13-0-snap
-      - image_family: freebsd-11-4-snap
-  python_script: pkg install -y curl bash jq python3 python27 python36 python37 python38
-  pip_script:
-    - python2.7 -m ensurepip
-    - python3.6 -m ensurepip
-    - python3.7 -m ensurepip
-    - python3.8 -m ensurepip
-  build_script: bash ./make-nix-release.sh
-  upload_script: |
-    #!/usr/bin/env bash
-
-    if [[ "$CIRRUS_RELEASE" == "" ]]; then
-      CIRRUS_RELEASE=$(curl -sL https://api.github.com/repos/$CIRRUS_REPO_FULL_NAME/releases/tags/$CIRRUS_TAG | jq -r '.id')
-      if [[ "$CIRRUS_RELEASE" == "null" ]]; then
-        echo "Failed to find a release associated with this tag!"
-        exit 0
-      fi
-    fi
-
-    if [[ "$GITHUB_TOKEN" == "" ]]; then
-      echo "Please provide GitHub access token via GITHUB_TOKEN environment variable!"
-      exit 1
-    fi
-
-    for fpath in releases/*
-    do
-      echo "Uploading $fpath..."
-      name=$(basename "$fpath")
-      url_to_upload="https://uploads.github.com/repos/$CIRRUS_REPO_FULL_NAME/releases/$CIRRUS_RELEASE/assets?name=$name"
-      echo "Uploading to $url_to_upload"
-      curl -X POST \
-        --data-binary @$fpath \
-        --header "Authorization: token $GITHUB_TOKEN" \
-        --header "Content-Type: application/octet-stream" \
-        $url_to_upload
-    done
-  archive_artifacts:
-    path: "releases/*"
diff --git a/vendor/poetry/.flake8 b/vendor/poetry/.flake8
index 130c44c6..c2389cff 100644
--- a/vendor/poetry/.flake8
+++ b/vendor/poetry/.flake8
@@ -1,22 +1,32 @@
 [flake8]
+min_python_version = 3.7.0
 max-line-length = 88
-ignore = E501, E203, W503
-per-file-ignores = __init__.py:F401
-exclude =
-    .git
-    __pycache__
-    setup.py
-    build
-    dist
-    releases
-    .venv
-    .tox
-    .mypy_cache
-    .pytest_cache
-    .vscode
-    .github
-    poetry/utils/_compat.py
-    poetry/utils/env_scripts/tags.py
-    tests/fixtures/
-    tests/repositories/fixtures/
-    tests/utils/fixtures/
+ban-relative-imports = true
+# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
+format-greedy = 1
+inline-quotes = double
+enable-extensions = TC, TC1
+type-checking-exempt-modules = typing, typing-extensions
+eradicate-whitelist-extend = ^-.*;
+extend-ignore =
+    # E203: Whitespace before ':' (pycqa/pycodestyle#373)
+    E203,
+    # SIM106: Handle error-cases first
+    SIM106,
+    # ANN101: Missing type annotation for self in method
+    ANN101,
+    # ANN102: Missing type annotation for cls in classmethod
+    ANN102,
+per-file-ignores =
+    # TC002: Move third-party import '...' into a type-checking block
+    __init__.py:TC002,
+    # ANN201: Missing return type annotation for public function
+    tests/test_*:ANN201
+    tests/**/test_*:ANN201
+extend-exclude =
+    # Frozen and not subject to change in this repo:
+    get-poetry.py,
+    install-poetry.py,
+    # External to the project's coding standards:
+    tests/fixtures/*,
+    tests/**/fixtures/*,
diff --git a/vendor/poetry/.github/dependabot.yml b/vendor/poetry/.github/dependabot.yml
new file mode 100644
index 00000000..f4369a40
--- /dev/null
+++ b/vendor/poetry/.github/dependabot.yml
@@ -0,0 +1,11 @@
+version: 2
+
+updates:
+  - package-ecosystem: "pip"
+    directory: "/"
+    schedule:
+      interval: "monthly"
+    # keep dependency updates manual for now
+    open-pull-requests-limit: 0
+    reviewers:
+      - "python-poetry/triage"
diff --git a/vendor/poetry/.github/workflows/code-quality.yaml b/vendor/poetry/.github/workflows/code-quality.yaml
deleted file mode 100644
index 4c1e4d56..00000000
--- a/vendor/poetry/.github/workflows/code-quality.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
-name: Code Quality
-
-on:
-  pull_request:
-    paths-ignore:
-      - 'docs/**'
-      - '.cirrus.yml'
-  push:
-    branches: [master]
-    paths-ignore:
-      - 'docs/**'
-      - '.cirrus.yml'
-
-jobs:
-  pre-commit:
-    name: Linting
-    runs-on: ubuntu-latest
-    steps:
-    - uses: actions/checkout@v1
-    - uses: actions/setup-python@v1
-    - uses: pre-commit/action@v2.0.0
diff --git a/vendor/poetry/.github/workflows/docs.yml b/vendor/poetry/.github/workflows/docs.yml
new file mode 100644
index 00000000..73e701c4
--- /dev/null
+++ b/vendor/poetry/.github/workflows/docs.yml
@@ -0,0 +1,75 @@
+name: "Documentation Preview"
+
+on:
+  pull_request:
+    # allow repository maintainers to modify and test workflow
+    paths-ignore:
+      - "**"
+      - "!.github/workflows/docs.yml"
+  pull_request_target:
+    # enable runs for this workflow when labeled as documentation only
+    # prevent execution when the workflow itself is modified from a fork
+    types:
+      - labeled
+      - synchronize
+    paths-ignore:
+      - "**"
+      - "!docs/**"
+
+jobs:
+  deploy:
+    name: Build & Deploy
+    runs-on: ubuntu-latest
+    if: |
+      (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'Documentation'))
+      || (github.event_name != 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository)
+    steps:
+      - name: Checkout Website Source
+        uses: actions/checkout@v3
+        with:
+          repository: python-poetry/website
+
+      - name: Checkout Poetry Source
+        uses: actions/checkout@v3
+        with:
+          path: poetry
+          ref: ${{ github.event.pull_request.head.sha }}
+
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: "3.9"
+
+      - name: Setup Node
+        uses: actions/setup-node@v2
+        with:
+          node-version: "14"
+
+      - name: Build Assets
+        run: npm ci && npm run prod
+
+      - name: Fetch Documentation
+        run: |
+          python -m pip install poetry
+          poetry install --no-dev
+          poetry run python bin/website build --local ./poetry
+
+      - name: Install Hugo
+        uses: peaceiris/actions-hugo@v2
+        with:
+          hugo-version: '0.83.1'
+
+      - name: Build
+        run: hugo -v --minify
+
+      - name: Deploy
+        uses: amondnet/vercel-action@v20
+        id: vercel-action
+        with:
+          vercel-token: ${{ secrets.VERCEL_TOKEN }}
+          github-token: ${{ secrets.GITHUB_TOKEN }}
+          vercel-org-id: ${{ secrets.VERCEL_ORG_ID }}
+          vercel-project-id: ${{ secrets.VERCEL_PROJECT_ID }}
+          scope: python-poetry
+          github-comment: true
+          working-directory: public
diff --git a/vendor/poetry/.github/workflows/main.yml b/vendor/poetry/.github/workflows/main.yml
index 6ff9bd9e..8459a54e 100644
--- a/vendor/poetry/.github/workflows/main.yml
+++ b/vendor/poetry/.github/workflows/main.yml
@@ -5,6 +5,7 @@ on:
     paths-ignore:
       - 'docs/**'
       - '.cirrus.yml'
+      - '.github/workflows/docs.yml'
     branches:
       - master
       - develop
@@ -12,17 +13,33 @@ on:
     paths-ignore:
       - 'docs/**'
       - '.cirrus.yml'
+      - '.github/workflows/docs.yml'
     branches:
       - '**'
 
+concurrency:
+  group: tests-${{ github.head_ref || github.ref }}
+  cancel-in-progress: ${{ github.event_name == 'pull_request' }}
+
 jobs:
   tests:
-    name: ${{ matrix.os }} / ${{ matrix.python-version }}
-    runs-on: ${{ matrix.os }}-latest
+    name: ${{ matrix.os }} / ${{ matrix.python-version }} ${{ matrix.suffix }}
+    runs-on: ${{ matrix.image }}
     strategy:
       matrix:
-        os: [Ubuntu, MacOS, Windows]
-        python-version: ["2.7", "3.5","3.6", "3.7", "3.8", "3.9", "3.10"]
+        os: [Ubuntu, macOS, Windows]
+        python-version: ["3.7", "3.8", "3.9", "3.10"]
+        include:
+          - os: Ubuntu
+            image: ubuntu-22.04
+          - os: Windows
+            image: windows-2022
+          - os: macOS
+            image: macos-12
+      fail-fast: false
+    defaults:
+      run:
+        shell: bash
     steps:
       - uses: actions/checkout@v2
 
@@ -33,19 +50,22 @@ jobs:
 
       - name: Get full Python version
         id: full-python-version
-        shell: bash
         run: echo ::set-output name=version::$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))")
 
       - name: Bootstrap poetry
-        shell: bash
         run: |
-          python -m ensurepip
-          python -m pip install --upgrade pip
-          python -m pip install .
+          curl -sL https://install.python-poetry.org | python - -y ${{ matrix.bootstrap-args }}
+
+      - name: Update PATH
+        if: ${{ matrix.os != 'Windows' }}
+        run: echo "$HOME/.local/bin" >> $GITHUB_PATH
+
+      - name: Update Path for Windows
+        if: ${{ matrix.os == 'Windows' }}
+        run: echo "$APPDATA\Python\Scripts" >> $GITHUB_PATH
 
       - name: Configure poetry
-        shell: bash
-        run: python -m poetry config virtualenvs.in-project true
+        run: poetry config virtualenvs.in-project true
 
       - name: Set up cache
         uses: actions/cache@v2
@@ -56,13 +76,43 @@ jobs:
 
       - name: Ensure cache is healthy
         if: steps.cache.outputs.cache-hit == 'true'
-        shell: bash
-        run: timeout 10s python -m poetry run pip --version || rm -rf .venv
+        run: timeout 10s poetry run pip --version || rm -rf .venv
 
       - name: Install dependencies
-        shell: bash
-        run: python -m poetry install
+        run: poetry install
+
+      - name: Run mypy
+        run: poetry run mypy
+
+      - name: Install pytest plugin
+        run: poetry run pip install pytest-github-actions-annotate-failures
 
       - name: Run pytest
-        shell: bash
-        run: python -m poetry run python -m pytest -v tests
+        run: poetry run python -m pytest -n auto -p no:sugar -q tests/
+
+      - name: Run pytest (integration suite)
+        env:
+          POETRY_TEST_INTEGRATION_GIT_USERNAME: ${GITHUB_ACTOR}
+          POETRY_TEST_INTEGRATION_GIT_PASSWORD: ${{ secrets.GITHUB_TOKEN }}
+        run: poetry run python -m pytest -n auto -p no:sugar -q --integration tests/integration
+
+      - name: Get Plugin Version (poetry-plugin-export)
+        id: poetry-plugin-export-version
+        run: |
+          echo ::set-output name=version::$(\
+            poetry show poetry-plugin-export  | grep version | cut -d : -f 2 | xargs)
+
+      - name: Checkout Plugin Source (poetry-plugin-export)
+        uses: actions/checkout@v2
+        with:
+          path: poetry-plugin-export
+          repository: python-poetry/poetry-plugin-export
+          ref: refs/tags/${{ steps.poetry-plugin-export-version.outputs.version }}
+
+      - name: Run pytest (poetry-plugin-export)
+        run: poetry run python -m pytest -p no:sugar -q poetry-plugin-export/tests/
+
+      - name: Check for clean working tree
+        run: |
+          git diff --exit-code --stat HEAD
+          git -C poetry-plugin-export diff --exit-code --stat HEAD
diff --git a/vendor/poetry/.github/workflows/release.yml b/vendor/poetry/.github/workflows/release.yml
index a2faa9b4..2f8bbdde 100644
--- a/vendor/poetry/.github/workflows/release.yml
+++ b/vendor/poetry/.github/workflows/release.yml
@@ -4,289 +4,44 @@ on:
   push:
     tags:
       - '*.*.*'
-  pull_request:
-    paths:
-      - '!**'
-      - '.github/workflows/release.yml'
-      - 'pyproject.toml'
-      - 'poetry.lock'
-      - 'sonnet'
-      - 'make-nix-release.sh'
 
 jobs:
-
-  Linux:
+  release:
+    name: Release
     runs-on: ubuntu-latest
-
     steps:
-    - uses: actions/checkout@v2
-    - name: Get tag
-      id: tag
-      run: |
-        if [[ ${GITHUB_REF} == refs/tags/* ]]; then
-          echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
-        else
-          echo ::set-output name=tag::${GITHUB_SHA::7}
-        fi
-    - name: Building release
-      run: |
-        make linux_release
-    - name: Upload release file
-      uses: actions/upload-artifact@v2
-      with:
-        name: poetry-${{ steps.tag.outputs.tag }}-linux.tar.gz
-        path: releases/poetry-*-linux.tar.gz
-    - name: Upload checksum file
-      uses: actions/upload-artifact@v2
-      with:
-        name: poetry-${{ steps.tag.outputs.tag }}-linux.sha256sum
-        path: releases/poetry-*-linux.sha256sum
-
-  MacOS:
-    runs-on: macos-latest
+      - name: Checkout code
+        uses: actions/checkout@v2
 
-    steps:
-    - uses: actions/checkout@v2
-    - name: Get tag
-      id: tag
-      run: |
-        if [[ ${GITHUB_REF} == refs/tags/* ]]; then
-          echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
-        else
-          echo ::set-output name=tag::${GITHUB_SHA::7}
-        fi
-    - name: Set up Python 3.8
-      uses: actions/setup-python@v2
-      with:
-        python-version: "3.8"
-    - name: Install Poetry
-      run: |
-        python get-poetry.py -y
-        source $HOME/.poetry/env
-    - name: Install dependencies
-      run: |
-        source $HOME/.poetry/env
-        poetry install --no-dev
-    - name: Preparing Python executables
-      run: |
-        curl -L https://github.com/sdispater/python-binaries/releases/download/2.7.18/python-2.7.18.macos.tar.xz -o python-2.7.18.tar.xz
-        curl -L https://github.com/sdispater/python-binaries/releases/download/3.5.9/python-3.5.9.macos.tar.xz -o python-3.5.9.tar.xz
-        curl -L https://github.com/sdispater/python-binaries/releases/download/3.6.8/python-3.6.8.macos.tar.xz -o python-3.6.8.tar.xz
-        curl -L https://github.com/sdispater/python-binaries/releases/download/3.7.6/python-3.7.6.macos.tar.xz -o python-3.7.6.tar.xz
-        curl -L https://github.com/sdispater/python-binaries/releases/download/3.8.3/python-3.8.3.macos.tar.xz -o python-3.8.3.tar.xz
-        curl -L https://github.com/sdispater/python-binaries/releases/download/3.9.5/python-3.9.5.macos.tar.xz -o python-3.9.5.tar.xz
-        curl -L https://github.com/sdispater/python-binaries/releases/download/3.10.2/python-3.10.2.macos.tar.xz -o python-3.10.2.tar.xz
-        tar -zxf python-2.7.18.tar.xz
-        tar -zxf python-3.5.9.tar.xz
-        tar -zxf python-3.6.8.tar.xz
-        tar -zxf python-3.7.6.tar.xz
-        tar -zxf python-3.8.3.tar.xz
-        tar -zxf python-3.9.5.tar.xz
-        tar -zxf python-3.10.2.tar.xz
-    - name: Build specific release
-      run: |
-        source $HOME/.poetry/env
-        poetry run python sonnet make release --ansi -P "2.7:python-2.7.18/bin/python" -P "3.5:python-3.5.9/bin/python" -P "3.6:python-3.6.8/bin/python" -P "3.7:python-3.7.6/bin/python" -P "3.8:python-3.8.3/bin/python" -P "3.9:python-3.9.5/bin/python" -P "3.10:python-3.10.2/bin/python"
-    - name: Upload release file
-      uses: actions/upload-artifact@v2
-      with:
-        name: poetry-${{ steps.tag.outputs.tag }}-darwin.tar.gz
-        path: releases/poetry-*-darwin.tar.gz
-    - name: Upload checksum file
-      uses: actions/upload-artifact@v2
-      with:
-        name: poetry-${{ steps.tag.outputs.tag }}-darwin.sha256sum
-        path: releases/poetry-*-darwin.sha256sum
+      - name: Set up Python 3.9
+        uses: actions/setup-python@v2
+        with:
+          python-version: "3.9"
 
-  Windows:
-    runs-on: windows-latest
+      - name: Install Poetry
+        run: python install-poetry.py -y
 
-    steps:
-    - uses: actions/checkout@v2
-    - name: Get tag
-      id: tag
-      shell: bash
-      run: |
-        if [[ ${GITHUB_REF} == refs/tags/* ]]; then
-          echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
-        else
-          echo ::set-output name=tag::${GITHUB_SHA::7}
-        fi
-    - name: Set up Python 3.8
-      uses: actions/setup-python@v2
-      with:
-        python-version: "3.8"
-    - name: Install Poetry
-      run: |
-        python get-poetry.py -y
-        $env:Path += ";$env:Userprofile\.poetry\bin"
-    - name: Install dependencies
-      run: |
-        $env:Path += ";$env:Userprofile\.poetry\bin"
-        poetry install --no-dev
-    - name: Preparing Python executables
-      run: |
-        Invoke-WebRequest https://github.com/sdispater/python-binaries/releases/download/2.7.17/python-2.7.17.windows.tar.xz -O python-2.7.17.tar.xz
-        Invoke-WebRequest https://github.com/sdispater/python-binaries/releases/download/3.5.4/python-3.5.4.windows.tar.xz -O python-3.5.4.tar.xz
-        Invoke-WebRequest https://github.com/sdispater/python-binaries/releases/download/3.6.8/python-3.6.8.windows.tar.xz -O python-3.6.8.tar.xz
-        Invoke-WebRequest https://github.com/sdispater/python-binaries/releases/download/3.7.6/python-3.7.6.windows.tar.xz -O python-3.7.6.tar.xz
-        Invoke-WebRequest https://github.com/sdispater/python-binaries/releases/download/3.8.3/python-3.8.3.windows.tar.xz -O python-3.8.3.tar.xz
-        Invoke-WebRequest https://github.com/sdispater/python-binaries/releases/download/3.9.5/python-3.9.5.windows.tar.xz -O python-3.9.5.tar.xz
-        Invoke-WebRequest https://github.com/sdispater/python-binaries/releases/download/3.10.2/python-3.10.2.windows.tar.xz -O python-3.10.2.tar.xz
-        7z x python-2.7.17.tar.xz
-        7z x python-3.5.4.tar.xz
-        7z x python-3.6.8.tar.xz
-        7z x python-3.7.6.tar.xz
-        7z x python-3.8.3.tar.xz
-        7z x python-3.9.5.tar.xz
-        7z x python-3.10.2.tar.xz
-        7z x python-2.7.17.tar
-        7z x python-3.4.4.tar
-        7z x python-3.5.4.tar
-        7z x python-3.6.8.tar
-        7z x python-3.7.6.tar
-        7z x python-3.8.3.tar
-        7z x python-3.9.5.tar
-        7z x python-3.10.2.tar
-    - name: Build specific release
-      run: |
-        $env:Path += ";$env:Userprofile\.poetry\bin"
-        poetry run python sonnet make release --ansi -P "2.7:python-2.7.17\python.exe" -P "3.5:python-3.5.4\python.exe" -P "3.6:python-3.6.8\python.exe" -P "3.7:python-3.7.6\python.exe" -P "3.8:python-3.8.3\python.exe" -P "3.9:python-3.9.5\python.exe" -P "3.10:python-3.10.2\python.exe"
-    - name: Upload release file
-      uses: actions/upload-artifact@v2
-      with:
-        name: poetry-${{ steps.tag.outputs.tag }}-win32.tar.gz
-        path: releases/poetry-*-win32.tar.gz
-    - name: Upload checksum file
-      uses: actions/upload-artifact@v2
-      with:
-        name: poetry-${{ steps.tag.outputs.tag }}-win32.sha256sum
-        path: releases/poetry-*-win32.sha256sum
+      - name: Update PATH
+        run: echo "$HOME/.local/bin" >> $GITHUB_PATH
 
-  Release:
-    needs: [Linux, MacOS, Windows]
-    runs-on: ubuntu-latest
-    if: startsWith(github.ref, 'refs/tags/')
+      - name: Build project for distribution
+        run: poetry build
 
-    steps:
-      - name: Checkout code
-        uses: actions/checkout@v2
-      - name: Get tag
-        id: tag
+      - name: Check Version
+        id: check-version
         run: |
-          echo ::set-output name=tag::${GITHUB_REF#refs/tags/}
-      - name: Download Linux release file
-        uses: actions/download-artifact@master
-        with:
-          name: poetry-${{ steps.tag.outputs.tag }}-linux.tar.gz
-          path: releases/
-      - name: Download Linux checksum file
-        uses: actions/download-artifact@master
-        with:
-          name: poetry-${{ steps.tag.outputs.tag }}-linux.sha256sum
-          path: releases/
-      - name: Download MacOS release file
-        uses: actions/download-artifact@master
-        with:
-          name: poetry-${{ steps.tag.outputs.tag }}-darwin.tar.gz
-          path: releases/
-      - name: Download MacOS checksum file
-        uses: actions/download-artifact@master
-        with:
-          name: poetry-${{ steps.tag.outputs.tag }}-darwin.sha256sum
-          path: releases/
-      - name: Download Windows release file
-        uses: actions/download-artifact@master
-        with:
-          name: poetry-${{ steps.tag.outputs.tag }}-win32.tar.gz
-          path: releases/
-      - name: Download Windows checksum file
-        uses: actions/download-artifact@master
-        with:
-          name: poetry-${{ steps.tag.outputs.tag }}-win32.sha256sum
-          path: releases/
+          [[ "$(poetry version --short)" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] \
+            || echo ::set-output name=prerelease::true
+
       - name: Create Release
-        id: create_release
-        uses: actions/create-release@v1
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        uses: ncipollo/release-action@v1
         with:
-          tag_name: ${{ steps.tag.outputs.tag }}
-          release_name: ${{ steps.tag.outputs.tag }}
+          artifacts: "dist/*"
+          token: ${{ secrets.GITHUB_TOKEN }}
           draft: false
-          prerelease: false
-      - name: Upload Linux release file asset
-        uses: actions/upload-release-asset@v1.0.1
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        with:
-          upload_url: ${{ steps.create_release.outputs.upload_url }}
-          asset_path: releases/poetry-${{ steps.tag.outputs.tag }}-linux.tar.gz
-          asset_name: poetry-${{ steps.tag.outputs.tag }}-linux.tar.gz
-          asset_content_type: application/gzip
-      - name: Upload Linux checksum file asset
-        uses: actions/upload-release-asset@v1.0.1
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        with:
-          upload_url: ${{ steps.create_release.outputs.upload_url }}
-          asset_path: releases/poetry-${{ steps.tag.outputs.tag }}-linux.sha256sum
-          asset_name: poetry-${{ steps.tag.outputs.tag }}-linux.sha256sum
-          asset_content_type: text/plain
-      - name: Upload MacOS release file asset
-        uses: actions/upload-release-asset@v1.0.1
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        with:
-          upload_url: ${{ steps.create_release.outputs.upload_url }}
-          asset_path: releases/poetry-${{ steps.tag.outputs.tag }}-darwin.tar.gz
-          asset_name: poetry-${{ steps.tag.outputs.tag }}-darwin.tar.gz
-          asset_content_type: application/gzip
-      - name: Upload MacOS checksum file asset
-        uses: actions/upload-release-asset@v1.0.1
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        with:
-          upload_url: ${{ steps.create_release.outputs.upload_url }}
-          asset_path: releases/poetry-${{ steps.tag.outputs.tag }}-darwin.sha256sum
-          asset_name: poetry-${{ steps.tag.outputs.tag }}-darwin.sha256sum
-          asset_content_type: text/plain
-      - name: Upload Windows release file asset
-        uses: actions/upload-release-asset@v1.0.1
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        with:
-          upload_url: ${{ steps.create_release.outputs.upload_url }}
-          asset_path: releases/poetry-${{ steps.tag.outputs.tag }}-win32.tar.gz
-          asset_name: poetry-${{ steps.tag.outputs.tag }}-win32.tar.gz
-          asset_content_type: application/gzip
-      - name: Upload Windows checksum file asset
-        uses: actions/upload-release-asset@v1.0.1
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        with:
-          upload_url: ${{ steps.create_release.outputs.upload_url }}
-          asset_path: releases/poetry-${{ steps.tag.outputs.tag }}-win32.sha256sum
-          asset_name: poetry-${{ steps.tag.outputs.tag }}-win32.sha256sum
-          asset_content_type: text/plain
-      - name: Set up Python 3.8
-        uses: actions/setup-python@v2
-        with:
-          python-version: "3.8"
-      - name: Install Poetry
-        run: |
-          python get-poetry.py -y
-      - name: Install dependencies
-        run: |
-          source $HOME/.poetry/env
-          poetry install --no-dev
-      - name: Build project for distribution
-        run: |
-          source $HOME/.poetry/env
-          poetry run poetry build
+          prerelease: steps.check-version.outputs.prerelease == 'true'
+
       - name: Publish to PyPI
         env:
           POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }}
-        run: |
-          source $HOME/.poetry/env
-          poetry run poetry publish
+        run: poetry publish
diff --git a/vendor/poetry/.github/workflows/skip.yml b/vendor/poetry/.github/workflows/skip.yml
index a9b384e3..40225d5a 100644
--- a/vendor/poetry/.github/workflows/skip.yml
+++ b/vendor/poetry/.github/workflows/skip.yml
@@ -3,35 +3,31 @@ name: Skip All Jobs
 
 on:
   push:
-    paths:
-      - '!**'
-      - 'docs/**'
-      - '.cirrus.yml'
+    paths-ignore:
+      - '**'
+      - '!docs/**'
+      - '!.cirrus.yml'
+      - '!.github/workflows/docs.yml'
     branches:
       - master
       - develop
   pull_request:
-    paths:
-      - '!**'
-      - 'docs/**'
-      - '.cirrus.yml'
+    paths-ignore:
+      - '**'
+      - '!docs/**'
+      - '!.cirrus.yml'
+      - '!.github/workflows/docs.yml'
     branches:
       - '**'
 
 jobs:
-  pre-commit:
-    name: Linting
-    runs-on: ubuntu-latest
-    steps:
-      - run: exit 0
-
   tests:
     name: ${{ matrix.os }} / ${{ matrix.python-version }}
     # we do not need os specific runners here, using linux is quicker
     runs-on: ubuntu-latest
     strategy:
       matrix:
-        os: [Ubuntu, MacOS, Windows]
-        python-version: [2.7, 3.5, 3.6, 3.7, 3.8]
+        os: [Ubuntu, macOS, Windows]
+        python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"]
     steps:
       - run: exit 0
diff --git a/vendor/poetry/.gitignore b/vendor/poetry/.gitignore
index fcbd92e9..0004c5bc 100644
--- a/vendor/poetry/.gitignore
+++ b/vendor/poetry/.gitignore
@@ -38,3 +38,5 @@ MANIFEST.in
 /releases/*
 pip-wheel-metadata
 /poetry.toml
+
+poetry/core/*
diff --git a/vendor/poetry/.pre-commit-config.yaml b/vendor/poetry/.pre-commit-config.yaml
index 07a33646..eeac4684 100644
--- a/vendor/poetry/.pre-commit-config.yaml
+++ b/vendor/poetry/.pre-commit-config.yaml
@@ -1,29 +1,91 @@
+ci:
+  autofix_prs: false
+
 repos:
-  - repo: https://github.com/psf/black
-    rev: 19.10b0
+  - repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v4.3.0
     hooks:
-      - id: black
-        # Pinning click because of https://github.com/psf/black/issues/2964
-        additional_dependencies: [click==8.0.4]
+      - id: trailing-whitespace
+      - id: end-of-file-fixer
+        exclude: ^.*\.egg-info/
+      - id: check-merge-conflict
+      - id: check-case-conflict
+      - id: check-json
+      - id: check-toml
+      - id: check-yaml
+      - id: pretty-format-json
+        args: [--autofix, --no-ensure-ascii, --no-sort-keys]
+      - id: check-ast
+      - id: debug-statements
+      - id: check-docstring-first
 
+  - repo: https://github.com/pre-commit/pygrep-hooks
+    rev: v1.9.0
+    hooks:
+      - id: python-check-mock-methods
+      - id: python-use-type-annotations
+      - id: python-check-blanket-noqa
 
-  - repo: https://gitlab.com/pycqa/flake8
-    rev: 3.8.3
+  - repo: https://github.com/asottile/yesqa
+    rev: v1.4.0
     hooks:
-      - id: flake8
+      - id: yesqa
+        additional_dependencies: &flake8_deps
+          - flake8-annotations==2.9.0
+          - flake8-broken-line==0.5.0
+          - flake8-bugbear==22.7.1
+          - flake8-comprehensions==3.10.0
+          - flake8-eradicate==1.3.0
+          - flake8-quotes==3.3.1
+          - flake8-simplify==0.19.3
+          - flake8-tidy-imports==4.8.0
+          - flake8-type-checking==2.1.2
+          - flake8-typing-imports==1.12.0
+          - flake8-use-fstring==1.4
+          - pep8-naming==0.13.1
+
+  - repo: https://github.com/asottile/pyupgrade
+    rev: v2.37.3
+    hooks:
+      - id: pyupgrade
+        args: [--py37-plus]
+        exclude: ^(install|get)-poetry.py$
+
+  - repo: https://github.com/hadialqattan/pycln
+    rev: v2.1.1
+    hooks:
+      - id: pycln
+        args: [--all]
 
-  - repo: https://github.com/timothycrosley/isort
-    rev: 5.4.2
+  - repo: https://github.com/pycqa/isort
+    rev: 5.10.1
     hooks:
       - id: isort
-        additional_dependencies: [toml]
-        exclude: ^.*/?setup\.py$
+        name: "isort (python)"
+        types: [python]
+        args: [--add-import, from __future__ import annotations]
+        exclude: |
+          (?x)(
+             ^(install|get)-poetry.py$
+              | ^src/poetry/__init__.py$
+          )
+      - id: isort
+        name: "isort (pyi)"
+        types: [pyi]
+        args: [--lines-after-imports, "-1"]
 
-  - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v3.2.0
+  - repo: https://github.com/psf/black
+    rev: 22.6.0
     hooks:
-      - id: trailing-whitespace
-        exclude: ^tests/.*/fixtures/.*
-      - id: end-of-file-fixer
-        exclude: ^tests/.*/fixtures/.*
-      - id: debug-statements
+      - id: black
+
+  - repo: https://github.com/pycqa/flake8
+    rev: 5.0.4
+    hooks:
+      - id: flake8
+        additional_dependencies: *flake8_deps
+
+  - repo: https://github.com/pre-commit/pre-commit
+    rev: v2.20.0
+    hooks:
+      - id: validate_manifest
diff --git a/vendor/poetry/.pre-commit-hooks.yaml b/vendor/poetry/.pre-commit-hooks.yaml
new file mode 100644
index 00000000..d2662b8a
--- /dev/null
+++ b/vendor/poetry/.pre-commit-hooks.yaml
@@ -0,0 +1,26 @@
+- id: poetry-check
+  name: poetry-check
+  description: run poetry check to validate config
+  entry: poetry check
+  language: python
+  language_version: python3
+  pass_filenames: false
+  files: ^pyproject.toml$
+
+- id: poetry-lock
+  name: poetry-lock
+  description: run poetry lock to update lock file
+  entry: poetry lock
+  language: python
+  language_version: python3
+  pass_filenames: false
+
+- id: poetry-export
+  name: poetry-export
+  description: run poetry export to sync lock file with requirements.txt
+  entry: poetry export
+  language: python
+  language_version: python3
+  pass_filenames: false
+  files: ^poetry.lock$
+  args: ["-f", "requirements.txt", "-o", "requirements.txt"]
diff --git a/vendor/poetry/CHANGELOG.md b/vendor/poetry/CHANGELOG.md
index 89320749..c78dc231 100644
--- a/vendor/poetry/CHANGELOG.md
+++ b/vendor/poetry/CHANGELOG.md
@@ -1,24 +1,302 @@
 # Change Log
 
-## [1.1.15] - 2022-08-22
+## [1.2.0] - 2022-08-31
+
+### Docs
+
+- Added note about how to add a git dependency with a subdirectory ([#6218](https://github.com/python-poetry/poetry/pull/6218))
+- Fixed several style issues in the docs ([#6255](https://github.com/python-poetry/poetry/pull/6255))
+- Fixed outdated info about `--only` parameter ([#6264](https://github.com/python-poetry/poetry/pull/6264))
+
+
+## [1.2.0rc2] - 2022-08-26
+
+### Fixed
+
+- Fixed an issue where virtual environments were created unnecessarily when running `poetry self` commands ([#6226](https://github.com/python-poetry/poetry/pull/6226))
+- Ensure that packages' `pretty_name` are written to the lock file ([#6243](https://github.com/python-poetry/poetry/pull/6243))
+
+### Improvements
+
+- Improved the consistency of `Pool().remove_repository()` to make it easier to write poetry plugins ([#6231](https://github.com/python-poetry/poetry/pull/6231))
+
+### Docs
+
+- Removed mentions of Python 2.7 from docs ([#6235](https://github.com/python-poetry/poetry/pull/6235))
+- Added note about the difference between groups and extras ([#6232](https://github.com/python-poetry/poetry/pull/6232))
+
+
+## [1.2.0rc1] - 2022-08-22
+
+### Added
+
+- Added support for subdirectories in git dependencies ([#5172](https://github.com/python-poetry/poetry/pull/5172))
+- Added support for yanked releases and files (PEP-592) ([#5841](https://github.com/python-poetry/poetry/pull/5841))
+- Virtual environments can now be created even with empty project names ([#5856](https://github.com/python-poetry/poetry/pull/5856))
+- Added support for `nushell` in `poetry shell` ([#6063](https://github.com/python-poetry/poetry/pull/6063))
+
+### Changed
+
+- Poetry now falls back to gather metadata for dependencies via pep517 if parsing `pyproject.toml` fails ([#5834](https://github.com/python-poetry/poetry/pull/5834))
+- Replaced Poetry's helper method `canonicalize_name()` with `packaging.utils.canonicalize_name()` ([#6022](https://github.com/python-poetry/poetry/pull/6022))
+- Removed code for the `export` command, which is now provided via plugin ([#6128](https://github.com/python-poetry/poetry/pull/6128))
+- Extras and extras dependencies are now sorted in the lock file ([#6169](https://github.com/python-poetry/poetry/pull/6169))
+- Removed deprecated (1.2-only) CLI options ([#6210](https://github.com/python-poetry/poetry/pull/6210))
+
+### Fixed
+
+- Fixed an issue where symlinks in the lock file were not resolved ([#5850](https://github.com/python-poetry/poetry/pull/5850))
+- Fixed a `tomlkit` regression resulting in inconsistent line endings ([#5870](https://github.com/python-poetry/poetry/pull/5870))
+- Fixed an issue where the `POETRY_PYPI_TOKEN_PYPI` environment variable wasn't respected ([#5911](https://github.com/python-poetry/poetry/pull/5911))
+- Fixed an issue where neither Python nor a managed venv can be found, when using Python from MS Store ([#5931](https://github.com/python-poetry/poetry/pull/5931))
+- Improved error message of `poetry publish` in the event of an upload error ([#6043](https://github.com/python-poetry/poetry/pull/6043))
+- Fixed an issue where `poetry lock` fails without output ([#6058](https://github.com/python-poetry/poetry/pull/6058))
+- Fixed an issue where Windows drive mappings break virtual environment names ([#6110](https://github.com/python-poetry/poetry/pull/6110))
+- `tomlkit` versions with memory leak are now avoided ([#6160](https://github.com/python-poetry/poetry/pull/6160))
+- Fixed an infinite loop in the solver ([#6178](https://github.com/python-poetry/poetry/pull/6178))
+- Fixed an issue where latest version was used instead of locked one for vcs dependencies with extras ([#6185](https://github.com/python-poetry/poetry/pull/6185))
+
+### Docs
+
+- Document use of the `subdirectory` parameter ([#5949](https://github.com/python-poetry/poetry/pull/5949))
+- Document suggested `tox` config for different use cases ([#6026](https://github.com/python-poetry/poetry/pull/6026))
+
+
+## [1.2.0b3] - 2022-07-13
+
+**Important**: This release fixes a critical issue that prevented hashes from being retrieved when locking dependencies,
+due to a breaking change on PyPI JSON API (see [#5972](https://github.com/python-poetry/poetry/pull/5972)
+and [the upstream change](https://github.com/pypi/warehouse/pull/11775) for more details).
+
+After upgrading, you have to clear Poetry cache manually to get that feature working correctly again:
+
+```bash
+$ poetry cache clear pypi --all
+```
+
+### Added
+
+- Added `--only-root` to `poetry install` to install a project without its
+  dependencies ([#5783](https://github.com/python-poetry/poetry/pull/5783))
 
 ### Changed
 
-- Poetry now fallback to gather metadata for dependencies via pep517 if parsing pyproject.toml fail ([#6206](https://github.com/python-poetry/poetry/pull/6206))
-- Extras and extras dependencies are now sorted in lock file ([#6207](https://github.com/python-poetry/poetry/pull/6207))
+- Improved user experience of `poetry init` ([#5838](https://github.com/python-poetry/poetry/pull/5838))
+- Added default timeout for all HTTP requests, to avoid hanging
+  requests ([#5881](https://github.com/python-poetry/poetry/pull/5881))
+- Updated `poetry init` to better specify how to skip adding
+  dependencies ([#5946](https://github.com/python-poetry/poetry/pull/5946))
+- Updated Poetry repository names to avoid clashes with user-defined
+  repositories ([#5910](https://github.com/python-poetry/poetry/pull/5910))
+
+### Fixed
+
+- Fixed an issue where extras where not handled if they did not match the case-sensitive name of the
+  packages ([#4122](https://github.com/python-poetry/poetry/pull/4122))
+- Fixed configuration of `experimental.system-git-client` option
+  through `poetry config` ([#5818](https://github.com/python-poetry/poetry/pull/5818))
+- Fixed uninstallation of git dependencies on Windows ([#5836](https://github.com/python-poetry/poetry/pull/5836))
+- Fixed an issue where `~` was not correctly expanded
+  in `virtualenvs.path` ([#5848](https://github.com/python-poetry/poetry/pull/5848))
+- Fixed an issue where installing/locking dependencies would hang when setting an incorrect git
+  repository ([#5880](https://github.com/python-poetry/poetry/pull/5880))
+- Fixed an issue in `poetry publish` when keyring was not properly
+  configured ([#5889](https://github.com/python-poetry/poetry/pull/5889))
+- Fixed duplicated line output in console ([#5890](https://github.com/python-poetry/poetry/pull/5890))
+- Fixed an issue where the same wheels where downloaded multiple times during
+  installation ([#5871](https://github.com/python-poetry/poetry/pull/5871))
+- Fixed an issue where dependencies hashes could not be retrieved when locking due to a breaking change on PyPI JSON
+  API ([#5973](https://github.com/python-poetry/poetry/pull/5973))
+- Fixed an issue where a dependency with non-requested extras could not be installed if it is requested with extras by
+  another dependency ([#5770](https://github.com/python-poetry/poetry/pull/5770))
+- Updated git backend to correctly read local/global git config when using dulwich as a git
+  backend ([#5935](https://github.com/python-poetry/poetry/pull/5935))
+- Fixed an issue where optional dependencies where not correctly exported when defining
+  groups ([#5819](https://github.com/python-poetry/poetry/pull/5819))
+
+### Docs
+
+- Fixed configuration instructions for repositories
+  specification ([#5809](https://github.com/python-poetry/poetry/pull/5809))
+- Added a link to dependency specification
+  from `pyproject.toml` ([#5815](https://github.com/python-poetry/poetry/pull/5815))
+- Improved `zsh` autocompletion instructions ([#5859](https://github.com/python-poetry/poetry/pull/5859))
+- Improved installation and update documentations ([#5857](https://github.com/python-poetry/poetry/pull/5857))
+- Improved exact requirements documentation ([#5874](https://github.com/python-poetry/poetry/pull/5874))
+- Added documentation for `@` operator ([#5822](https://github.com/python-poetry/poetry/pull/5822))
+- Improved autocompletion documentation ([#5879](https://github.com/python-poetry/poetry/pull/5879))
+- Improved `scripts` definition documentation ([#5884](https://github.com/python-poetry/poetry/pull/5884))
 
 ## [1.1.14] - 2022-07-08
 
-### Fixed
+## Fixed
 
 - Fixed an issue where dependencies hashes could not be retrieved when locking due to a breaking change on PyPI JSON API ([#5973](https://github.com/python-poetry/poetry/pull/5973))
 
+## [1.2.0b2] - 2022-06-07
+
+### Added
+
+- Added support for multiple-constraint direct origin dependencies with the same
+  version ([#5715](https://github.com/python-poetry/poetry/pull/5715))
+- Added support disabling TLS verification for custom package sources via `poetry config certificates..cert false` ([#5719](https://github.com/python-poetry/poetry/pull/5719)
+- Added new configuration (`virtualenvs.prompt`) to customize the prompt of the Poetry-managed virtual environment ([#5606](https://github.com/python-poetry/poetry/pull/5606))
+- Added progress indicator to `download_file` (used when downloading dists) ([#5451](https://github.com/python-poetry/poetry/pull/5451))
+- Added `--dry-run` to `poetry version` command ([#5603](https://github.com/python-poetry/poetry/pull/5603))
+- Added `--why` to `poetry show` ([#5444](https://github.com/python-poetry/poetry/pull/5444))
+- Added support for single page (html) repositories ([#5517](https://github.com/python-poetry/poetry/pull/5517))
+- Added support for PEP 508 strings when adding
+  dependencies via `poetry add` command ([#5554](https://github.com/python-poetry/poetry/pull/5554))
+- Added `--no-cache` as a global option ([#5519](https://github.com/python-poetry/poetry/pull/5519))
+- Added cert retrieval for HTTP requests made by Poetry ([#5320](https://github.com/python-poetry/poetry/pull/5320))
+- Added `--skip-existing` to `poetry publish` ([#2812](https://github.com/python-poetry/poetry/pull/2812))
+- Added `--all-extras` to `poetry install` ([#5452](https://github.com/python-poetry/poetry/pull/5452))
+- Added new `poetry self` sub-commands to manage plugins and/or system environment packages, eg: keyring backends ([#5450](https://github.com/python-poetry/poetry/pull/5450))
+- Added new configuration (`installer.no-binary`) to allow selection of non-binary distributions when installing a dependency ([#5609](https://github.com/python-poetry/poetry/pull/5609))
+
+### Changed
+
+- `poetry plugin` commands are now deprecated in favor of the more generic `poetry self`
+  commands ([#5450](https://github.com/python-poetry/poetry/pull/5450))
+- When creating new projects, Poetry no longer restricts README extensions to `md` and `rst` ([#5357](https://github.com/python-poetry/poetry/pull/5357))
+- Changed the provider to allow fallback to installed packages ([#5704](https://github.com/python-poetry/poetry/pull/5704))
+- Solver now correctly handles and prefers direct reference constraints (vcs, file etc.) over public version identifiers ([#5654](https://github.com/python-poetry/poetry/pull/5654))
+- Changed the build script behavior to create an ephemeral build environment when a build script is
+  specified ([#5401](https://github.com/python-poetry/poetry/pull/5401))
+- Improved performance when determining PEP 517 metadata from sources ([#5601](https://github.com/python-poetry/poetry/pull/5601))
+- Project package sources no longer need to be redefined as global repositories when configuring credentials ([#5563](https://github.com/python-poetry/poetry/pull/5563))
+- Replaced external git command use with dulwich, in order to force the legacy behaviour set `experimental.system-git-client` configuration to `true` ([#5428](https://github.com/python-poetry/poetry/pull/5428))
+- Improved http request handling for sources and multiple paths on same netloc ([#5518](https://github.com/python-poetry/poetry/pull/5518))
+- Made `no-pip` and `no-setuptools` configuration explicit ([#5455](https://github.com/python-poetry/poetry/pull/5455))
+- Improved application logging, use of `-vv` now provides more debug information ([#5503](https://github.com/python-poetry/poetry/pull/5503))
+- Renamed implicit group `default` to `main` ([#5465](https://github.com/python-poetry/poetry/pull/5465))
+- Replaced in-tree implementation of `poetry export`
+  with `poetry-plugin-export` ([#5413](https://github.com/python-poetry/poetry/pull/5413))
+- Changed the password manager behavior to use a `"null"` keyring when
+  disabled ([#5251](https://github.com/python-poetry/poetry/pull/5251))
+- Incremental improvement of Solver performance ([#5335](https://github.com/python-poetry/poetry/pull/5335))
+- Newly created virtual environments on macOS now are excluded from Time Machine backups ([#4599](https://github.com/python-poetry/poetry/pull/4599))
+- Poetry no longer raises an exception when a package is not found on PyPI ([#5698](https://github.com/python-poetry/poetry/pull/5698))
+- Update `packaging` dependency to use major version 21, this change forces Poetry to drop support for managing Python 2.7 environments ([#4749](https://github.com/python-poetry/poetry/pull/4749))
+
+### Fixed
+
+- Fixed `poetry update --dry-run` to not modify `poetry.lock` ([#5718](https://github.com/python-poetry/poetry/pull/5718), [#3666](https://github.com/python-poetry/poetry/issues/3666), [#3766](https://github.com/python-poetry/poetry/issues/3766))
+- Fixed [#5537](https://github.com/python-poetry/poetry/issues/5537) where export fails to resolve dependencies with more than one
+  path ([#5688](https://github.com/python-poetry/poetry/pull/5688))
+- Fixed an issue where the environment variables `POETRY_CONFIG_DIR` and `POETRY_CACHE_DIR` were not being respected ([#5672](https://github.com/python-poetry/poetry/pull/5672))
+- Fixed [#3628](https://github.com/python-poetry/poetry/issues/3628) and [#4702](https://github.com/python-poetry/poetry/issues/4702) by handling invalid distributions
+  gracefully ([#5645](https://github.com/python-poetry/poetry/pull/5645))
+- Fixed an issue where the provider ignored subdirectory when merging and improve subdirectory support for vcs
+  deps ([#5648](https://github.com/python-poetry/poetry/pull/5648))
+- Fixed an issue where users could not select an empty choice when selecting
+  dependencies ([#4606](https://github.com/python-poetry/poetry/pull/4606))
+- Fixed an issue where `poetry init -n` crashes in a root directory ([#5612](https://github.com/python-poetry/poetry/pull/5612))
+- Fixed an issue where Solver errors arise due to wheels having different Python
+  constraints ([#5616](https://github.com/python-poetry/poetry/pull/5616))
+- Fixed an issue where editable path dependencies using `setuptools` could not be correctly installed ([#5590](https://github.com/python-poetry/poetry/pull/5590))
+- Fixed flicker when displaying executor operations ([#5556](https://github.com/python-poetry/poetry/pull/5556))
+- Fixed an issue where the `poetry lock --no-update` only sorted by name and not by name and
+  version ([#5446](https://github.com/python-poetry/poetry/pull/5446))
+- Fixed an issue where the Solver fails when a dependency has multiple constrained dependency definitions for the same
+  package ([#5403](https://github.com/python-poetry/poetry/pull/5403))
+- Fixed an issue where dependency resolution takes a while because Poetry checks all possible combinations
+  even markers are mutually exclusive ([#4695](https://github.com/python-poetry/poetry/pull/4695))
+- Fixed incorrect version selector constraint ([#5500](https://github.com/python-poetry/poetry/pull/5500))
+- Fixed an issue where `poetry lock --no-update` dropped
+  packages ([#5435](https://github.com/python-poetry/poetry/pull/5435))
+- Fixed an issue where packages were incorrectly grouped when
+  exporting ([#5156](https://github.com/python-poetry/poetry/pull/5156))
+- Fixed an issue where lockfile always updates when using private
+  sources ([#5362](https://github.com/python-poetry/poetry/pull/5362))
+- Fixed an issue where the solver did not account for selected package features ([#5305](https://github.com/python-poetry/poetry/pull/5305))
+- Fixed an issue with console script execution of editable dependencies on Windows ([#3339](https://github.com/python-poetry/poetry/pull/3339))
+- Fixed an issue where editable builder did not write PEP-610 metadata ([#5703](https://github.com/python-poetry/poetry/pull/5703))
+- Fixed an issue where Poetry 1.1 lock files were incorrectly identified as not fresh ([#5458](https://github.com/python-poetry/poetry/pull/5458))
+
+### Docs
+
+- Updated plugin management commands ([#5450](https://github.com/python-poetry/poetry/pull/5450))
+- Added the `--readme` flag to documentation ([#5357](https://github.com/python-poetry/poetry/pull/5357))
+- Added example for multiple maintainers ([#5661](https://github.com/python-poetry/poetry/pull/5661))
+- Updated documentation for issues [#4800](https://github.com/python-poetry/poetry/issues/4800), [#3709](https://github.com/python-poetry/poetry/issues/3709), [#3573](https://github.com/python-poetry/poetry/issues/3573), [#2211](https://github.com/python-poetry/poetry/issues/2211) and [#2414](https://github.com/python-poetry/poetry/pull/2414) ([#5656](https://github.com/python-poetry/poetry/pull/5656))
+- Added `poetry.toml` note in configuration ([#5492](https://github.com/python-poetry/poetry/pull/5492))
+- Add documentation for `poetry about`, `poetry help`, `poetrylist`, and the `--full-path` and `--all` options
+  documentation ([#5664](https://github.com/python-poetry/poetry/pull/5664))
+- Added more clarification to the `--why` flag ([#5653](https://github.com/python-poetry/poetry/pull/5653))
+- Updated documentation to refer to PowerShell for Windows, including
+  instructions ([#3978](https://github.com/python-poetry/poetry/pull/3978), [#5618](https://github.com/python-poetry/poetry/pull/5618))
+- Added PEP 508 name requirement ([#5642](https://github.com/python-poetry/poetry/pull/5642))
+- Added example for each section of pyproject.toml ([#5585](https://github.com/python-poetry/poetry/pull/5642))
+- Added documentation for `--local` to fix issue [#5623](https://github.com/python-poetry/poetry/issues/5623) ([#5629](https://github.com/python-poetry/poetry/pull/5629))
+- Added troubleshooting documentation for using proper quotation with
+  ZSH ([#4847](https://github.com/python-poetry/poetry/pull/4847))
+- Added information on git and basic http auth ([#5578](https://github.com/python-poetry/poetry/pull/5578))
+- Removed ambiguity about PEP 440 and semver ([#5576](https://github.com/python-poetry/poetry/pull/5576))
+- Removed Pipenv comparison ([#5561](https://github.com/python-poetry/poetry/pull/5561))
+- Improved dependency group related documentation ([#5338](https://github.com/python-poetry/poetry/pull/5338))
+- Added documentation for default directories used by Poetry ([#5391](https://github.com/python-poetry/poetry/pull/5301))
+- Added warning about credentials preserved in shell history ([#5726](https://github.com/python-poetry/poetry/pull/5726))
+- Improved documentation of the `readme` option, including multiple files and additional formats ([#5158](https://github.com/python-poetry/poetry/pull/5158))
+- Improved contributing documentation ([#5708](https://github.com/python-poetry/poetry/pull/5708))
+- Remove all references to `--dev-only` option ([#5771](https://github.com/python-poetry/poetry/pull/5771))
+
+## [1.2.0b1] - 2022-03-17
+
+### Fixed
+
+- Fixed an issue where the system environment couldn't be detected ([#4406](https://github.com/python-poetry/poetry/pull/4406)).
+- Fixed another issue where the system environment couldn't be detected ([#4433](https://github.com/python-poetry/poetry/pull/4433)).
+- Replace deprecated requests parameter in uploader ([#4580](https://github.com/python-poetry/poetry/pull/4580)).
+- Fix an issue where venv are detected as broken when using MSys2 on windows ([#4482](https://github.com/python-poetry/poetry/pull/4482)).
+- Fixed an issue where the cache breaks on windows ([#4531](https://github.com/python-poetry/poetry/pull/4531)).
+- Fixed an issue where a whitespace before a semicolon was missing on `poetry export` ([#4575](https://github.com/python-poetry/poetry/issues/4575)).
+- Fixed an issue where markers were not correctly assigned to nested dependencies ([#3511](https://github.com/python-poetry/poetry/issues/3511)).
+- Recognize one digit version in wheel filenames ([#3338](https://github.com/python-poetry/poetry/pull/3338)).
+- Fixed an issue when `locale` is unset ([#4038](https://github.com/python-poetry/poetry/pull/4038)).
+- Fixed an issue where the fallback to another interpreter didn't work ([#3475](https://github.com/python-poetry/poetry/pull/3475)).
+- Merge any marker constraints into constraints with specific markers ([#4590](https://github.com/python-poetry/poetry/pull/4590)).
+- Normalize path before hashing so that the generated venv name is independent of case on Windows ([#4813](https://github.com/python-poetry/poetry/pull/4813)).
+- Fixed an issue where a dependency wasn't upgrade by using `@latest` on `poetry update` ([#4945](https://github.com/python-poetry/poetry/pull/4945)).
+- Fixed an issue where conda envs in windows are always reported as broken([#5007](https://github.com/python-poetry/poetry/pull/5007)).
+- Fixed an issue where Poetry doesn't find its own venv on `poetry self update` ([#5049](https://github.com/python-poetry/poetry/pull/5049)).
+- Fix misuse of pretty_constraint ([#4932](https://github.com/python-poetry/poetry/pull/4932)).
+- Fixed an issue where the reported python version used for venv creation wasn't correct ([#5086](https://github.com/python-poetry/poetry/pull/5086)).
+- Fixed an issue where the searched package wasn't display in the interactive dialog of `poetry init` ([#5076](https://github.com/python-poetry/poetry/pull/5076)).
+- Fixed an issue where Poetry raises an exception on `poetry show` when no lock files exists ([#5242](https://github.com/python-poetry/poetry/pull/5242)).
+- Fixed an issue where Poetry crashes when optional `vcs_info.requested_version` in `direct_url.json` wasn't included ([#5274](https://github.com/python-poetry/poetry/pull/5274)).
+- Fixed an issue where dependencies with extras were updated despite using `--no-update` ([#4618](https://github.com/python-poetry/poetry/pull/4618)).
+- Fixed various places where poetry writes messages to stdout instead of stderr ([#4110](https://github.com/python-poetry/poetry/pull/4110), [#5179](https://github.com/python-poetry/poetry/pull/5179)).
+- Ensured that when complete packages are created dependency inherits source and resolved refs from package ([#4604](https://github.com/python-poetry/poetry/pull/4604)).
+- Ensured that when complete packages are created dependency inherits subdirectory from package if supported ([#4604](https://github.com/python-poetry/poetry/pull/4604)).
+- Fixed an issue where `POETRY_EXPERIMENTAL_NEW_INSTALLER` needs to be set to an empty string to disable it ([#3811](https://github.com/python-poetry/poetry/pull/3811)).
+
+### Added
+
+- `poetry show ` now also shows which packages depend on it ([#2351](https://github.com/python-poetry/poetry/pull/2351)).
+- The info dialog by `poetry about` now contains version information about installed poetry and poetry-core ([#5288](https://github.com/python-poetry/poetry/pull/5288)).
+- Print error message when `poetry publish` fails ([#3549](https://github.com/python-poetry/poetry/pull/3549)).
+- Added in info output to `poetry lock --check` ([#5081](https://github.com/python-poetry/poetry/pull/5081)).
+- Added new argument `--all` for `poetry env remove` to delete all venv of a project at once ([#3212](https://github.com/python-poetry/poetry/pull/3212)).
+- Added new argument `--without-urls` for `poetry export` to exclude source repository urls from the exported file ([#4763](https://github.com/python-poetry/poetry/pull/4763)).
+- Added a `new installer.max-workers` property to the configuration ([#3516](https://github.com/python-poetry/poetry/pull/3516)).
+- Added experimental option `virtualenvs.prefer-active-python` to detect current activated python ([#4852](https://github.com/python-poetry/poetry/pull/4852)).
+- Added better windows shell support ([#5053](https://github.com/python-poetry/poetry/pull/5053)).
+
+### Changed
+
+- Drop python3.6 support ([#5055](https://github.com/python-poetry/poetry/pull/5055)).
+- Exit with callable return code in generated script ([#4456](https://github.com/python-poetry/poetry/pull/4456)).
+- Internal use of the `pep517` high level interfaces for package metadata inspections have been replaced with the `build` package. ([#5155](https://github.com/python-poetry/poetry/pull/5155)).
+- Poetry now raises an error if the python version in the project environment is no longer compatible with the project ([#4520](https://github.com/python-poetry/poetry/pull/4520)).
+
+
 ## [1.1.13] - 2022-02-09
 
 ### Fixed
 
 - Fixed an issue where envs in MSYS2 always reported as broken ([#4942](https://github.com/python-poetry/poetry/pull/4942))
-- Fixed an issue where conda envs in windows are always reported as broken ([#5008](https://github.com/python-poetry/poetry/pull/5008))
+- Fixed an issue where conda envs in windows are always reported as broken([#5008](https://github.com/python-poetry/poetry/pull/5008))
 - Fixed an issue where Poetry doesn't find its own venv on `poetry self update` ([#5048](https://github.com/python-poetry/poetry/pull/5048))
 
 ## [1.1.12] - 2021-11-27
@@ -29,7 +307,6 @@
 - Fixed `JSONDecodeError` when installing packages by updating `cachecontrol` version ([#4831](https://github.com/python-poetry/poetry/pull/4831))
 - Fixed dropped markers in dependency walk ([#4686](https://github.com/python-poetry/poetry/pull/4686))
 
-
 ## [1.1.11] - 2021-10-04
 
 ### Fixed
@@ -38,74 +315,116 @@
 - Fixed an issue where the wrong `git` executable could be used on Windows. ([python-poetry/poetry-core#213](https://github.com/python-poetry/poetry-core/pull/213))
 - Fixed an issue where the Python 3.10 classifier was not automatically added. ([python-poetry/poetry-core#215](https://github.com/python-poetry/poetry-core/pull/215))
 
-
 ## [1.1.10] - 2021-09-21
 
 ### Fixed
 
-- Fixed an issue where non-sha256 hashes were not checked. ([#4529](https://github.com/python-poetry/poetry/pull/4529))
-
+-   Fixed an issue where non-sha256 hashes were not checked. ([#4529](https://github.com/python-poetry/poetry/pull/4529))
 
 ## [1.1.9] - 2021-09-18
 
 ### Fixed
 
-- Fixed a security issue where file hashes were not checked prior to installation. ([#4420](https://github.com/python-poetry/poetry/pull/4420), [#4444](https://github.com/python-poetry/poetry/pull/4444), [python-poetry/poetry-core#193](https://github.com/python-poetry/poetry-core/pull/193))
-- Fixed the detection of the system environment when the setting `virtualenvs.create` is deactivated. ([#4507](https://github.com/python-poetry/poetry/pull/4507))
-- Fixed an issue where unsafe parameters could be passed to `git` commands. ([python-poetry/poetry-core#203](https://github.com/python-poetry/poetry-core/pull/203))
-- Fixed an issue where the wrong `git` executable could be used on Windows. ([python-poetry/poetry-core#205](https://github.com/python-poetry/poetry-core/pull/205))
-
+-   Fixed a security issue where file hashes were not checked prior to installation. ([#4420](https://github.com/python-poetry/poetry/pull/4420), [#4444](https://github.com/python-poetry/poetry/pull/4444), [python-poetry/poetry-core#193](https://github.com/python-poetry/poetry-core/pull/193))
+-   Fixed the detection of the system environment when the setting `virtualenvs.create` is deactivated. ([#4507](https://github.com/python-poetry/poetry/pull/4507))
+-   Fixed an issue where unsafe parameters could be passed to `git` commands. ([python-poetry/poetry-core#203](https://github.com/python-poetry/poetry-core/pull/203))
+-   Fixed an issue where the wrong `git` executable could be used on Windows. ([python-poetry/poetry-core#205](https://github.com/python-poetry/poetry-core/pull/205))
 
 ## [1.1.8] - 2021-08-19
 
 ### Fixed
 
-- Fixed an error with repository prioritization when specifying secondary repositories. ([#4241](https://github.com/python-poetry/poetry/pull/4241))
-- Fixed the detection of the system environment when the setting `virtualenvs.create` is deactivated. ([#4330](https://github.com/python-poetry/poetry/pull/4330), [#4407](https://github.com/python-poetry/poetry/pull/4407))
-- Fixed the evaluation of relative path dependencies. ([#4246](https://github.com/python-poetry/poetry/pull/4246))
-- Fixed environment detection for Python 3.10 environments. ([#4387](https://github.com/python-poetry/poetry/pull/4387))
-- Fixed an error in the evaluation of `in/not in` markers ([python-poetry/poetry-core#189](https://github.com/python-poetry/poetry-core/pull/189))
+-   Fixed an error with repository prioritization when specifying secondary repositories. ([#4241](https://github.com/python-poetry/poetry/pull/4241))
+-   Fixed the detection of the system environment when the setting `virtualenvs.create` is deactivated. ([#4330](https://github.com/python-poetry/poetry/pull/4330), [#4407](https://github.com/python-poetry/poetry/pull/4407))
+-   Fixed the evaluation of relative path dependencies. ([#4246](https://github.com/python-poetry/poetry/pull/4246))
+-   Fixed environment detection for Python 3.10 environments. ([#4387](https://github.com/python-poetry/poetry/pull/4387))
+-   Fixed an error in the evaluation of `in/not in` markers ([python-poetry/poetry-core#189](https://github.com/python-poetry/poetry-core/pull/189))
 
+## [1.2.0a2] - 2021-08-01
 
-## [1.1.7] - 2021-06-25
+### Added
 
-**Note**: Lock files might need to be regenerated for the first fix below to take effect.
-You can use `poetry lock` to do so **without** the `--no-update` option.
+- Poetry now supports dependency groups. ([#4260](https://github.com/python-poetry/poetry/pull/4260))
+- The `install` command now supports a `--sync` option to synchronize the environment with the lock file. ([#4336](https://github.com/python-poetry/poetry/pull/4336))
 
 ### Changed
 
-- This release is compatible with the `install-poetry.py` installation script to ease the migration path from `1.1` releases to `1.2` releases. ([#4192](https://github.com/python-poetry/poetry/pull/4192))
+- Improved the way credentials are retrieved to better support keyring backends. ([#4086](https://github.com/python-poetry/poetry/pull/4086))
+- The `--remove-untracked` option of the `install` command is now deprecated in favor of the new `--sync` option. ([#4336](https://github.com/python-poetry/poetry/pull/4336))
+- The user experience when installing dependency groups has been improved. ([#4336](https://github.com/python-poetry/poetry/pull/4336))
 
 ### Fixed
 
-- Fixed an issue where transitive dependencies of directory or VCS dependencies were not installed or otherwise removed. ([#4203](https://github.com/python-poetry/poetry/pull/4203))
-- Fixed an issue where the combination of the `--tree` and `--no-dev` options for the show command was still displaying development dependencies. ([#3992](https://github.com/python-poetry/poetry/pull/3992))
+- Fixed performance issues when resolving dependencies. ([#3839](https://github.com/python-poetry/poetry/pull/3839))
+- Fixed an issue where transitive dependencies of directory or VCS dependencies were not installed or otherwise removed. ([#4202](https://github.com/python-poetry/poetry/pull/4202))
+- Fixed the behavior of the `init` command in non-interactive mode. ([#2899](https://github.com/python-poetry/poetry/pull/2899))
+- Fixed the detection of the system environment when the setting `virtualenvs.create` is deactivated. ([#4329](https://github.com/python-poetry/poetry/pull/4329))
+- Fixed the display of possible solutions for some common errors. ([#4332](https://github.com/python-poetry/poetry/pull/4332))
 
 
-## [1.1.6] - 2021-04-14
+## [1.1.7] - 2021-06-25
+
+Note: Lock files might need to be regenerated for the first fix below to take effect.\
+You can use `poetry lock` to do so without the `--no-update` option.
+
+### Changed
+
+-   This release is compatible with the `install-poetry.py` installation script to ease the migration path from `1.1` releases to `1.2` releases. ([#4192](https://github.com/python-poetry/poetry/pull/4192))
 
 ### Fixed
 
-- Fixed export format for path dependencies. ([#3121](https://github.com/python-poetry/poetry/pull/3121))
-- Fixed errors caused by environment modification when executing some commands. ([#3253](https://github.com/python-poetry/poetry/pull/3253))
-- Fixed handling of wheel files with single-digit versions. ([#3338](https://github.com/python-poetry/poetry/pull/3338))
-- Fixed an error when handling single-digit Python markers. ([poetry-core#156](https://github.com/python-poetry/poetry-core/pull/156))
-- Fixed dependency markers not being properly copied when changing the constraint leading to resolution errors. ([poetry-core#163](https://github.com/python-poetry/poetry-core/pull/163))
-- Fixed an error where VCS dependencies were always updated. ([#3947](https://github.com/python-poetry/poetry/pull/3947))
-- Fixed an error where the incorrect version of a package was locked when using environment markers. ([#3945](https://github.com/python-poetry/poetry/pull/3945))
+-   Fixed an issue where transitive dependencies of directory or VCS dependencies were not installed or otherwise removed. ([#4203](https://github.com/python-poetry/poetry/pull/4203))
+-   Fixed an issue where the combination of the `--tree` and `--no-dev` options for the show command was still displaying development dependencies. ([#3992](https://github.com/python-poetry/poetry/pull/3992))
+
+## [1.2.0a1] - 2021-05-21
+
+This release is the first testing release of the upcoming 1.2.0 version.
+
+It **drops** support for Python 2.7 and 3.5.
+
+### Added
+
+- Poetry now supports a plugin system to alter or expand Poetry's functionality. ([#3733](https://github.com/python-poetry/poetry/pull/3733))
+- Poetry now supports [PEP 610](https://www.python.org/dev/peps/pep-0610/). ([#3876](https://github.com/python-poetry/poetry/pull/3876))
+- Several configuration options to better control the way virtual environments are created are now available. ([#3157](https://github.com/python-poetry/poetry/pull/3157), [#3711](https://github.com/python-poetry/poetry/pull/3711)).
+- The `new` command now supports namespace packages. ([#2768](https://github.com/python-poetry/poetry/pull/2768))
+- The `add` command now supports the `--editable` option to add packages in editable mode. ([#3940](https://github.com/python-poetry/poetry/pull/3940))
 
+### Changed
 
-## [1.1.5] - 2021-03-03
+- Python 2.7 and 3.5 are no longer supported. ([#3405](https://github.com/python-poetry/poetry/pull/3405))
+- The usage of the `get-poetry.py` script is now deprecated and is replaced by the `install-poetry.py` script. ([#3706](https://github.com/python-poetry/poetry/pull/3706))
+- Directory dependencies are now in non-develop mode by default. ([poetry-core#98](https://github.com/python-poetry/poetry-core/pull/98))
+- Improved support for PEP 440 specific versions that do not abide by semantic versioning. ([poetry-core#140](https://github.com/python-poetry/poetry-core/pull/140))
+- Improved the CLI experience and performance by migrating to the latest version of Cleo. ([#3618](https://github.com/python-poetry/poetry/pull/3618))
+- Packages previously considered as unsafe (`pip`, `setuptools`, `wheels` and `distribute`) can now be managed as any other package. ([#2826](https://github.com/python-poetry/poetry/pull/2826))
+- The `new` command now defaults to the Markdown format for README files. ([#2768](https://github.com/python-poetry/poetry/pull/2768))
 
 ### Fixed
 
-- Fixed an error in the `export` command when no lock file existed and a verbose flag was passed to the command. ([#3310](https://github.com/python-poetry/poetry/pull/3310))
-- Fixed an error where the `pyproject.toml` was not reverted when using the `add` command. ([#3622](https://github.com/python-poetry/poetry/pull/3622))
-- Fixed errors when using non-HTTPS indices. ([#3622](https://github.com/python-poetry/poetry/pull/3622))
-- Fixed errors when handling simple indices redirection. ([#3622](https://github.com/python-poetry/poetry/pull/3622))
-- Fixed errors when trying to handle newer wheels by using the latest version of `poetry-core` and `packaging`. ([#3677](https://github.com/python-poetry/poetry/pull/3677))
-- Fixed an error when using some versions of `poetry-core` due to an incorrect import . ([#3696](https://github.com/python-poetry/poetry/pull/3696))
+- Fixed an error where command line options were not taken into account when using the `run` command. ([#3618](https://github.com/python-poetry/poetry/pull/3618))
+- Fixed an error in the way custom repositories were resolved. ([#3406](https://github.com/python-poetry/poetry/pull/3406))
+
+## [1.1.6] - 2021-04-14
 
+### Fixed
+-   Fixed export format for path dependencies. ([#3121](https://github.com/python-poetry/poetry/pull/3121))
+-   Fixed errors caused by environment modification when executing some commands. ([#3253](https://github.com/python-poetry/poetry/pull/3253))
+-   Fixed handling of wheel files with single-digit versions. ([#3338](https://github.com/python-poetry/poetry/pull/3338))
+-   Fixed an error when handling single-digit Python markers. ([poetry-core#156](https://github.com/python-poetry/poetry-core/pull/156))
+-   Fixed dependency markers not being properly copied when changing the constraint leading to resolution errors. ([poetry-core#163](https://github.com/python-poetry/poetry-core/pull/163))
+-   Fixed an error where VCS dependencies were always updated. ([#3947](https://github.com/python-poetry/poetry/pull/3947))
+-   Fixed an error where the incorrect version of a package was locked when using environment markers. ([#3945](https://github.com/python-poetry/poetry/pull/3945))
+
+## [1.1.5] - 2021-03-04
+
+### Fixed
+- Fixed an error in the export command when no lock file existed and a verbose flag was passed to the command. (#3310)
+- Fixed an error where the pyproject.toml was not reverted when using the add command. (#3622)
+- Fixed errors when using non-HTTPS indices. (#3622)
+- Fixed errors when handling simple indices redirection. (#3622)
+- Fixed errors when trying to handle newer wheels by using the latest version of poetry-core and packaging. (#3677)
+- Fixed an error when using some versions of poetry-core due to an incorrect import. (#3696)
 
 ## [1.1.4] - 2020-10-23
 
@@ -1016,7 +1335,7 @@ commands in project subdirectories.
 - Improved dependency resolution to avoid unnecessary operations.
 - Improved dependency resolution speed.
 - Improved CLI reactivity by deferring imports.
-- License classifer is not automatically added to classifers.
+- License classifier is not automatically added to classifiers.
 
 ### Fixed
 
@@ -1061,7 +1380,7 @@ commands in project subdirectories.
 
 ### Changed
 
-- Changed how wilcard constraints are handled.
+- Changed how wildcard constraints are handled.
 
 ### Fixed
 
@@ -1190,8 +1509,15 @@ Initial release
 
 
 
-[Unreleased]: https://github.com/python-poetry/poetry/compare/1.1.15...1.1
-[1.1.15]: https://github.com/python-poetry/poetry/releases/tag/1.1.15
+[Unreleased]: https://github.com/python-poetry/poetry/compare/1.2.0...master
+[1.2.0]: https://github.com/python-poetry/poetry/releases/tag/1.2.0
+[1.2.0rc2]: https://github.com/python-poetry/poetry/releases/tag/1.2.0rc2
+[1.2.0rc1]: https://github.com/python-poetry/poetry/releases/tag/1.2.0rc1
+[1.2.0b3]: https://github.com/python-poetry/poetry/releases/tag/1.2.0b3
+[1.2.0b2]: https://github.com/python-poetry/poetry/releases/tag/1.2.0b2
+[1.2.0b1]: https://github.com/python-poetry/poetry/releases/tag/1.2.0b1
+[1.2.0a2]: https://github.com/python-poetry/poetry/releases/tag/1.2.0a2
+[1.2.0a1]: https://github.com/python-poetry/poetry/releases/tag/1.2.0a1
 [1.1.14]: https://github.com/python-poetry/poetry/releases/tag/1.1.14
 [1.1.13]: https://github.com/python-poetry/poetry/releases/tag/1.1.13
 [1.1.12]: https://github.com/python-poetry/poetry/releases/tag/1.1.12
diff --git a/vendor/poetry/CONTRIBUTING.md b/vendor/poetry/CONTRIBUTING.md
index 73358148..80a4b1cc 100644
--- a/vendor/poetry/CONTRIBUTING.md
+++ b/vendor/poetry/CONTRIBUTING.md
@@ -1,3 +1,7 @@
+
+
 # Contributing to Poetry
 
 First off, thanks for taking the time to contribute!
@@ -55,7 +59,7 @@ Provide more context by answering these questions:
 Include details about your configuration and environment:
 
 * **Which version of Poetry are you using?** You can get the exact version by running `poetry -V` in your terminal.
-* **Which Python version Poetry has been installed for?** Execute the `debug:info` to get the information.
+* **Which Python version Poetry has been installed for?** Execute the `poetry debug info` to get the information.
 * **What's the name and version of the OS you're using**?
 
 
@@ -104,11 +108,13 @@ the code base, join us on our [Discord Server](https://discordapp.com/invite/awx
 
 You will need Poetry to start contributing on the Poetry codebase. Refer to the [documentation](https://python-poetry.org/docs/#introduction) to start using Poetry.
 
+> **Note:** Local development of Poetry requires Python 3.8 or newer.
+
 You will first need to clone the repository using `git` and place yourself in its directory:
 
 ```bash
-$ git clone git@github.com:python-poetry/poetry.git
-$ cd poetry
+git clone git@github.com:python-poetry/poetry.git
+cd poetry
 ```
 
 > **Note:** We recommend that you use a personal [fork](https://docs.github.com/en/free-pro-team@latest/github/getting-started-with-github/fork-a-repo) for this step. If you are new to GitHub collaboration,
@@ -118,8 +124,15 @@ Now, you will need to install the required dependency for Poetry and be sure tha
 tests are passing on your machine:
 
 ```bash
-$ poetry install
-$ poetry run pytest tests/
+poetry install
+poetry run pytest tests/
+```
+
+Poetry uses [mypy](https://github.com/python/mypy) for typechecking, and the CI
+will fail if it finds any errors.  To run mypy locally:
+
+```bash
+poetry run mypy
 ```
 
 Poetry uses the [black](https://github.com/psf/black) coding style and you must ensure that your
@@ -132,13 +145,13 @@ To make sure that you don't accidentally commit code that does not follow the co
 install a pre-commit hook that will check that everything is in order:
 
 ```bash
-$ poetry run pre-commit install
+poetry run pre-commit install
 ```
 
 You can also run it anytime using:
 
 ```bash
-$ poetry run pre-commit run --all-files
+poetry run pre-commit run --all-files
 ```
 
 Your code must always be accompanied by corresponding tests, if tests are not present your code
@@ -178,7 +191,7 @@ If you are helping with the triage of reported issues, this section provides som
 
 #### Multiple versions
 
-Often times you would want to attempt to reproduce issues with multiple versions of `poetry` at the same time. For these use cases, the [pipx project](https://pipxproject.github.io/pipx/) is useful.
+Often times you would want to attempt to reproduce issues with multiple versions of `poetry` at the same time. For these use cases, the [pipx project](https://pypa.github.io/pipx/) is useful.
 
 You can set your environment up like so.
 
diff --git a/vendor/poetry/Makefile b/vendor/poetry/Makefile
deleted file mode 100644
index cc8b4ffa..00000000
--- a/vendor/poetry/Makefile
+++ /dev/null
@@ -1,70 +0,0 @@
-# This file is part of Poetry
-# https://github.com/python-poetry/poetry
-
-# Licensed under the MIT license:
-# http://www.opensource.org/licenses/MIT-license
-# Copyright (c) 2018 Sébastien Eustace
-
-POETRY_RELEASE := $$(sed -n -E "s/__version__ = '(.+)'/\1/p" poetry/__version__.py)
-
-# lists all available targets
-list:
-	@sh -c "$(MAKE) -p no_targets__ | \
-		awk -F':' '/^[a-zA-Z0-9][^\$$#\/\\t=]*:([^=]|$$)/ {\
-			split(\$$1,A,/ /);for(i in A)print A[i]\
-		}' | grep -v '__\$$' | grep -v 'make\[1\]' | grep -v 'Makefile' | sort"
-# required for list
-no_targets__:
-
-clean:
-	@rm -rf build dist .eggs *.egg-info
-	@rm -rf .benchmarks .coverage coverage.xml htmlcov report.xml .tox
-	@find . -type d -name '.mypy_cache' -exec rm -rf {} +
-	@find . -type d -name '__pycache__' -exec rm -rf {} +
-	@find . -type d -name '*pytest_cache*' -exec rm -rf {} +
-	@find . -type f -name "*.py[co]" -exec rm -rf {} +
-
-format: clean
-	@poetry run black poetry/ tests/
-
-# install all dependencies
-setup: setup-python
-
-# test your application (tests in the tests/ directory)
-test:
-	@poetry run pytest --cov=poetry --cov-config .coveragerc tests/ -sq
-
-release: build linux_release osx_release
-
-build:
-	@poetry build
-	@python sonnet make release
-
-publish:
-	@poetry publish
-
-wheel:
-	@poetry build -v
-
-linux_release:
-	docker pull quay.io/pypa/manylinux2010_x86_64:2021-02-06-3d322a5
-	docker pull quay.io/pypa/manylinux2010_x86_64:2022-02-05-4cb577c
-	docker run --rm -i -v `pwd`:/io \
-		-e VENDOR_ONLY=1 \
-		-e PYTHON=/opt/python/cp38-cp38/bin/python \
-		-e PYTHON27=/opt/python/cp27-cp27m/bin/python \
-		-e PYTHON35=/opt/python/cp35-cp35m/bin/python \
-		quay.io/pypa/manylinux2010_x86_64:2021-02-06-3d322a5 sh -c "cd /io && ./make-nix-release.sh"
-	docker run --rm -i -v `pwd`:/io \
-		-e VENDOR_REUSE=1 \
-		-e PYTHON=/opt/python/cp38-cp38/bin/python \
-		-e PYTHON36=/opt/python/cp36-cp36m/bin/python \
-		-e PYTHON37=/opt/python/cp37-cp37m/bin/python \
-		-e PYTHON38=/opt/python/cp38-cp38/bin/python \
-		-e PYTHON39=/opt/python/cp39-cp39/bin/python \
-		-e PYTHON310=/opt/python/cp310-cp310/bin/python \
-		quay.io/pypa/manylinux2010_x86_64:2022-02-05-4cb577c sh -c "cd /io && ./make-nix-release.sh"
-
-# run tests against all supported python versions
-tox:
-	@tox
diff --git a/vendor/poetry/README.md b/vendor/poetry/README.md
index 2ad207fd..264e4aa2 100644
--- a/vendor/poetry/README.md
+++ b/vendor/poetry/README.md
@@ -1,138 +1,34 @@
 # Poetry: Dependency Management for Python
 
+[![Tests Status](https://github.com/python-poetry/poetry/workflows/Tests/badge.svg?branch=master&event=push)](https://github.com/python-poetry/poetry/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush)
+[![Stable Version](https://img.shields.io/pypi/v/poetry?label=stable)](https://pypi.org/project/poetry/)
+[![Pre-release Version](https://img.shields.io/github/v/release/python-poetry/poetry?label=pre-release&include_prereleases&sort=semver)](https://pypi.org/project/poetry/#history)
+[![Downloads](https://img.shields.io/pypi/dm/poetry)](https://pypistats.org/packages/poetry)
+[![Discord](https://img.shields.io/discord/487711540787675139?logo=discord)](https://discord.com/invite/awxPgve)
+
 Poetry helps you declare, manage and install dependencies of Python projects,
 ensuring you have the right stack everywhere.
 
-![Poetry Install](https://raw.githubusercontent.com/python-poetry/poetry/master/assets/install.gif)
+It requires Python 3.7+ to run.
 
-It supports Python 2.7 and 3.5+.
-
-**Note**: Python 2.7 and 3.5 will no longer be supported in the next feature release (1.2).
-You should consider updating your Python version to a supported one.
+![Poetry Install](https://raw.githubusercontent.com/python-poetry/poetry/master/assets/install.gif)
 
-[![Tests Status](https://github.com/python-poetry/poetry/workflows/Tests/badge.svg?branch=master&event=push)](https://github.com/python-poetry/poetry/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush)
+## Documentation
 
 The [complete documentation](https://python-poetry.org/docs/) is available on the [official website](https://python-poetry.org).
 
 ## Installation
 
-Poetry provides a custom installer that will install `poetry` isolated
-from the rest of your system by vendorizing its dependencies. This is the
-recommended way of installing `poetry`.
-
-*Note:*
-    The `get-poetry.py` script described here will be replaced in Poetry 1.2 by `install-poetry.py`.
-    From Poetry **1.1.7 onwards**, you can already use this script as described [here](https://python-poetry.org/docs/master/#installation).
-
-```bash
-curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python
-```
-
-Alternatively, you can download the `get-poetry.py` file and execute it separately.
-
-The setup script must be able to find one of following executables in your shell's path environment:
-
-- `python` (which can be a py3 or py2 interpreter)
-- `python3`
-- `py.exe -3` (Windows)
-- `py.exe -2` (Windows)
-
-If you want to install prerelease versions, you can do so by passing `--preview` to `get-poetry.py`:
-
-```bash
-python get-poetry.py --preview
-```
-
-Similarly, if you want to install a specific version, you can use `--version`:
-
-```bash
-python get-poetry.py --version 0.7.0
-```
-
-Using `pip` to install `poetry` is also possible.
-
-```bash
-pip install --user poetry
-```
-
-Be aware, however, that it will also install poetry's dependencies
-which might cause conflicts.
-
-## Updating `poetry`
-
-Updating poetry to the latest stable version is as simple as calling the `self update` command.
-
-```bash
-poetry self update
-```
-
-If you want to install prerelease versions, you can use the `--preview` option.
-
-```bash
-poetry self update --preview
-```
-
-And finally, if you want to install a specific version you can pass it as an argument
-to `self update`.
-
-```bash
-poetry self update 1.0.0
-```
-
-*Note:*
-
-    If you are still on poetry version < 1.0 use `poetry self:update` instead.
-
-
-## Enable tab completion for Bash, Fish, or Zsh
-
-`poetry` supports generating completion scripts for Bash, Fish, and Zsh.
-See `poetry help completions` for full details, but the gist is as simple as using one of the following:
-
-```bash
-# Bash
-poetry completions bash > /etc/bash_completion.d/poetry.bash-completion
-
-# Bash (Homebrew)
-poetry completions bash > $(brew --prefix)/etc/bash_completion.d/poetry.bash-completion
-
-# Fish
-poetry completions fish > ~/.config/fish/completions/poetry.fish
-
-# Fish (Homebrew)
-poetry completions fish > (brew --prefix)/share/fish/vendor_completions.d/poetry.fish
-
-# Zsh
-poetry completions zsh > ~/.zfunc/_poetry
-
-# Zsh (Homebrew)
-poetry completions zsh > $(brew --prefix)/share/zsh/site-functions/_poetry
-
-# Zsh (Oh-My-Zsh)
-mkdir $ZSH_CUSTOM/plugins/poetry
-poetry completions zsh > $ZSH_CUSTOM/plugins/poetry/_poetry
-
-# Zsh (prezto)
-poetry completions zsh > ~/.zprezto/modules/completion/external/src/_poetry
-```
-
-*Note:* you may need to restart your shell in order for the changes to take
-effect.
-
-For `zsh`, you must then add the following line in your `~/.zshrc` before
-`compinit` (not for homebrew setup):
-
-```zsh
-fpath+=~/.zfunc
-```
-
+Instructions on how to install `poetry` can be found [here](https://python-poetry.org/docs/master/#installation).
+You can also refer [here](https://python-poetry.org/docs/master/#enable-tab-completion-for-bash-fish-or-zsh) for
+information on how to enable tab completion in your environment.
 
 ## Introduction
 
 `poetry` is a tool to handle dependency installation as well as building and packaging of Python packages.
 It only needs one file to do all of that: the new, [standardized](https://www.python.org/dev/peps/pep-0518/) `pyproject.toml`.
 
-In other words, poetry uses `pyproject.toml` to replace `setup.py`, `requirements.txt`, `setup.cfg`, `MANIFEST.in` and the newly added `Pipfile`.
+In other words, poetry uses `pyproject.toml` to replace `setup.py`, `requirements.txt`, `setup.cfg`, `MANIFEST.in` and `Pipfile`.
 
 ```toml
 [tool.poetry]
@@ -146,32 +42,32 @@ authors = [
     "Sébastien Eustace "
 ]
 
-readme = 'README.md'  # Markdown files are supported
+readme = "README.md"
 
 repository = "https://github.com/python-poetry/poetry"
-homepage = "https://github.com/python-poetry/poetry"
+homepage = "https://python-poetry.org"
 
-keywords = ['packaging', 'poetry']
+keywords = ["packaging", "poetry"]
 
 [tool.poetry.dependencies]
-python = "~2.7 || ^3.2"  # Compatible python versions must be declared here
-toml = "^0.9"
+python = "^3.8"  # Compatible python versions must be declared here
+aiohttp = "^3.8.1"
 # Dependencies with extras
-requests = { version = "^2.13", extras = [ "security" ] }
+requests = { version = "^2.28", extras = [ "security" ] }
 # Python specific dependencies with prereleases allowed
-pathlib2 = { version = "^2.2", python = "~2.7", allow-prereleases = true }
+tomli = { version = "^2.0.1", python = "<3.11", allow-prereleases = true }
 # Git dependencies
-cleo = { git = "https://github.com/sdispater/cleo.git", branch = "master" }
+cleo = { git = "https://github.com/python-poetry/cleo.git", branch = "master" }
 
 # Optional dependencies (extras)
-pendulum = { version = "^1.4", optional = true }
+pendulum = { version = "^2.1.2", optional = true }
 
 [tool.poetry.dev-dependencies]
-pytest = "^3.0"
-pytest-cov = "^2.4"
+pytest = "^7.1.2"
+pytest-cov = "^3.0"
 
 [tool.poetry.scripts]
-my-script = 'my_package:main'
+my-script = "my_package:main"
 ```
 
 There are some things we can notice here:
@@ -179,7 +75,7 @@ There are some things we can notice here:
 * It will try to enforce [semantic versioning]() as the best practice in version naming.
 * You can specify the readme, included and excluded files: no more `MANIFEST.in`.
 `poetry` will also use VCS ignore files (like `.gitignore`) to populate the `exclude` section.
-* Keywords (up to 5) can be specified and will act as tags on the packaging site.
+* Keywords can be specified and will act as tags on the packaging site.
 * The dependencies sections support caret, tilde, wildcard, inequality and multiple requirements.
 * You must specify the python versions for which your package is compatible.
 
@@ -192,84 +88,18 @@ So, `poetry` can be installed globally and used everywhere.
 
 Packaging systems and dependency management in Python are rather convoluted and hard to understand for newcomers.
 Even for seasoned developers it might be cumbersome at times to create all files needed in a Python project: `setup.py`,
-`requirements.txt`, `setup.cfg`, `MANIFEST.in` and the newly added `Pipfile`.
+`requirements.txt`, `setup.cfg`, `MANIFEST.in` and `Pipfile`.
 
 So I wanted a tool that would limit everything to a single configuration file to do:
 dependency management, packaging and publishing.
 
 It takes inspiration in tools that exist in other languages, like `composer` (PHP) or `cargo` (Rust).
 
-And, finally, there is no reliable tool to properly resolve dependencies in Python, so I started `poetry`
-to bring an exhaustive dependency resolver to the Python community.
-
-### What about Pipenv?
-
-In short: I do not like the CLI it provides, or some of the decisions made,
-and I think we can make a better and more intuitive one. Here are a few things
-that I don't like.
-
-#### Dependency resolution
-
-The dependency resolution is erratic and will fail even if there is a solution. Let's take an example:
-
-```bash
-pipenv install oslo.utils==1.4.0
-```
-
-will fail with this error:
-
-```text
-Could not find a version that matches pbr!=0.7,!=2.1.0,<1.0,>=0.6,>=2.0.0
-```
-
-while Poetry will get you the right set of packages:
-
-```bash
-poetry add oslo.utils=1.4.0
-```
-
-results in :
-
-```text
-  - Installing pytz (2018.3)
-  - Installing netifaces (0.10.6)
-  - Installing netaddr (0.7.19)
-  - Installing oslo.i18n (2.1.0)
-  - Installing iso8601 (0.1.12)
-  - Installing six (1.11.0)
-  - Installing babel (2.5.3)
-  - Installing pbr (0.11.1)
-  - Installing oslo.utils (1.4.0)
-```
-
-This is possible thanks to the efficient dependency resolver at the heart of Poetry.
-
-Here is a breakdown of what exactly happens here:
-
-`oslo.utils (1.4.0)` depends on:
-
-- `pbr (>=0.6,!=0.7,<1.0)`
-- `Babel (>=1.3)`
-- `six (>=1.9.0)`
-- `iso8601 (>=0.1.9)`
-- `oslo.i18n (>=1.3.0)`
-- `netaddr (>=0.7.12)`
-- `netifaces (>=0.10.4)`
-
-What interests us is `pbr (>=0.6,!=0.7,<1.0)`.
-
-At this point, poetry will choose `pbr==0.11.1` which is the latest version that matches the constraint.
-
-Next it will try to select `oslo.i18n==3.20.0` which is the latest version that matches `oslo.i18n (>=1.3.0)`.
-
-However this version requires `pbr (!=2.1.0,>=2.0.0)` which is incompatible with `pbr==0.11.1`,
-so `poetry` will try to find a version of `oslo.i18n` that satisfies `pbr (>=0.6,!=0.7,<1.0)`.
-
-By analyzing the releases of `oslo.i18n`, it will find `oslo.i18n==2.1.0` which requires `pbr (>=0.11,<2.0)`.
-At this point the rest of the resolution is straightforward since there is no more conflict.
+And, finally, I started `poetry` to bring another exhaustive dependency resolver to the Python community apart from
+[Conda's](https://conda.io).
 
 ## Resources
 
 * [Official Website](https://python-poetry.org)
 * [Issue Tracker](https://github.com/python-poetry/poetry/issues)
-* [Discord](https://discordapp.com/invite/awxPgve)
+* [Discord](https://discord.com/invite/awxPgve)
diff --git a/vendor/poetry/docs/_index.md b/vendor/poetry/docs/_index.md
index 6773c17f..dd0bb652 100644
--- a/vendor/poetry/docs/_index.md
+++ b/vendor/poetry/docs/_index.md
@@ -13,218 +13,315 @@ menu:
 
 Poetry is a tool for **dependency management** and **packaging** in Python.
 It allows you to declare the libraries your project depends on and it will manage (install/update) them for you.
+Poetry offers a lockfile to ensure repeatable installs, and can build your project for distribution.
 
 
 ## System requirements
 
-Poetry requires Python 2.7 or 3.5+. It is multi-platform and the goal is to make it work equally well
-on Windows, Linux and OSX.
+Poetry requires **Python 3.7+**. It is multi-platform and the goal is to make it work equally well
+on Linux, macOS and Windows.
+
+## Installation
 
 {{% note %}}
-Python 2.7 and 3.5 will no longer be supported in the next feature release (1.2).
-You should consider updating your Python version to a supported one.
+If you are viewing documentation for the development branch, you may wish to install a preview or development version of Poetry.
+See the **advanced** installation instructions to use a preview or alternate version of Poetry.
 {{% /note %}}
 
+{{< tabs tabTotal="3" tabID1="installing-with-the-official-installer" tabID2="installing-with-pipx" tabID3="installing-manually" tabName1="With the official installer" tabName2="With pipx" tabName3="Manually (advanced)" >}}
 
-## Installation
+{{< tab tabID="installing-with-the-official-installer" >}}
+
+We provide a custom installer that will install Poetry in a new virtual environment to isolate it
+from the rest of your system. This ensures that dependencies will not be accidentally upgraded or
+uninstalled, and allows Poetry to manage its own environment.
 
-Poetry provides a custom installer that will install `poetry` isolated
-from the rest of your system by vendorizing its dependencies. This is the
-recommended way of installing `poetry`.
+{{< steps >}}
+{{< step >}}
+**Install Poetry**
+
+The installer script is available directly at [install.python-poetry.org](https://install.python-poetry.org),
+and is developed in [its own repository](https://github.com/python-poetry/install.python-poetry.org).
+The script can be executed directly (i.e. 'curl python') or downloaded and then executed from disk
+(e.g. in a CI environment).
 
 {{% warning %}}
-The `get-poetry.py` script described here will be replaced in Poetry 1.2 by `install.python-poetry.org`.
-From Poetry **1.1.7 onwards**, you can already use this script as described [here]({{< relref "docs/master/#installation" >}}).
+The previous `get-poetry.py` and `install-poetry.py` installers are deprecated. Any installs performed
+using `get-poetry.py` should be uninstalled and reinstalled using `install.python-poetry.org` to ensure
+in-place upgrades are possible.
 {{% /warning %}}
 
-### osx / linux / bashonwindows install instructions
+**Linux, macOS, Windows (WSL)**
+
 ```bash
-curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
+curl -sSL https://install.python-poetry.org | python3 -
 ```
-### windows powershell install instructions
+
+{{% note %}}
+Note: On some systems, `python` may still refer to Python 2 instead of Python 3. We always suggest the
+`python3` binary to avoid ambiguity.
+{{% /note %}}
+
+**Windows (Powershell)**
 ```powershell
-(Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py -UseBasicParsing).Content | python -
+(Invoke-WebRequest -Uri https://install.python-poetry.org -UseBasicParsing).Content | py -
 ```
 
 {{% note %}}
-You only need to install Poetry once. It will automatically pick up the current
-Python version and use it to [create virtualenvs]({{< relref "managing-environments" >}}) accordingly.
+If you have installed Python through the Microsoft Store, replace `py` with `python` in the command
+above.
 {{% /note %}}
 
-The installer installs the `poetry` tool to Poetry's `bin` directory.
-On Unix it is located at `$HOME/.poetry/bin` and on Windows at `%USERPROFILE%\.poetry\bin`.
+{{< /step >}}
+{{< step >}}
+**Install Poetry (advanced)**
+
+By default, Poetry is installed into a platform and user-specific directory:
 
-This directory will be automatically added to your `$PATH` environment variable,
-by appending a statement to your `$HOME/.profile` configuration (or equivalent files).
-If you do not feel comfortable with this, please pass the `--no-modify-path` flag to
-the installer and manually add the Poetry's `bin` directory to your path.
+- `~/Library/Application Support/pypoetry` on MacOS.
+- `~/.local/share/pypoetry` on Linux/Unix.
+- `%APPDATA%\pypoetry` on Windows.
 
-Finally, open a new shell and type the following:
+If you wish to change this, you may define the `$POETRY_HOME` environment variable:
 
 ```bash
-poetry --version
+curl -sSL https://install.python-poetry.org | POETRY_HOME=/etc/poetry python3 -
 ```
 
-If you see something like `Poetry 0.12.0` then you are ready to use Poetry.
-If you decide Poetry isn't your thing, you can completely remove it from your system
-by running the installer again with the `--uninstall` option or by setting
-the `POETRY_UNINSTALL` environment variable before executing the installer.
+If you want to install prerelease versions, you can do so by passing `--preview` option to `install-poetry.py`
+or by using the `$POETRY_PREVIEW` environment variable:
 
 ```bash
-python get-poetry.py --uninstall
-POETRY_UNINSTALL=1 python get-poetry.py
+curl -sSL https://install.python-poetry.org | python3 - --preview
+curl -sSL https://install.python-poetry.org | POETRY_PREVIEW=1 python3 -
 ```
 
-By default, Poetry is installed into the user's platform-specific home directory. If you wish to change this, you may define the `POETRY_HOME` environment variable:
+Similarly, if you want to install a specific version, you can use `--version` option or the `$POETRY_VERSION`
+environment variable:
 
 ```bash
-POETRY_HOME=/etc/poetry python get-poetry.py
+curl -sSL https://install.python-poetry.org | python3 - --version 1.2.0
+curl -sSL https://install.python-poetry.org | POETRY_VERSION=1.2.0 python3 -
 ```
 
-If you want to install prerelease versions, you can do so by passing `--preview` to `get-poetry.py`
-or by using the `POETRY_PREVIEW` environment variable:
+You can also install Poetry from a `git` repository by using the `--git` option:
 
 ```bash
-python get-poetry.py --preview
-POETRY_PREVIEW=1 python get-poetry.py
-```
+curl -sSL https://install.python-poetry.org | python3 - --git https://github.com/python-poetry/poetry.git@master
+````
+{{< /step >}}
+{{< step >}}
+**Add Poetry to your PATH**
 
-Similarly, if you want to install a specific version, you can use `--version` or the `POETRY_VERSION`
-environment variable:
+The installer creates a `poetry` wrapper in a well-known, platform-specific directory:
+
+- `$HOME/.local/bin` on Unix.
+- `%APPDATA%\Python\Scripts` on Windows.
+
+If this directory is not present in your `$PATH`, you can add it in order to invoke Poetry
+as `poetry`.
+
+Alternatively, the full path to the `poetry` binary can always be used:
+
+- `$POETRY_HOME/bin/poetry` if `$POETRY_HOME` is set.
+- `~/Library/Application Support/pypoetry/bin/poetry` on MacOS.
+- `~/.local/share/pypoetry/bin/poetry` on Linux/Unix.
+- `%APPDATA%\pypoetry\Scripts\poetry` on Windows.
+
+{{< /step >}}
+{{< step >}}
+**Use Poetry**
+
+Once Poetry is installed and in your `$PATH`, you can execute the following:
 
 ```bash
-python get-poetry.py --version 0.12.0
-POETRY_VERSION=0.12.0 python get-poetry.py
+poetry --version
 ```
 
-{{% note %}}
-Note that the installer does not support Poetry releases < 0.12.0.
-{{% /note %}}
+If you see something like `Poetry (version 1.2.0)`, your install is ready to use!
+{{< /step >}}
+{{< step >}}
+**Update Poetry**
 
-{{% note %}}
-The setup script must be able to find one of following executables in your shell's path environment:
+Poetry is able to update itself when installed using the official installer.
 
-- `python` (which can be a py3 or py2 interpreter)
-- `python3`
-- `py.exe -3` (Windows)
-- `py.exe -2` (Windows)
-{{% /note %}}
+```bash
+poetry self update
+```
 
-### Alternative installation methods (not recommended)
+If you want to install pre-release versions, you can use the `--preview` option.
 
-{{% note %}}
-Using alternative installation methods will make Poetry always
-use the Python version for which it has been installed to create
-virtualenvs.
+```bash
+poetry self update --preview
+```
 
-So, you will need to install Poetry for each Python version you
-want to use and switch between them.
-{{% /note %}}
+And finally, if you want to install a specific version, you can pass it as an argument
+to `self update`.
 
-#### Installing with `pip`
+```bash
+poetry self update 1.2.0
+```
 
-Using `pip` to install Poetry is possible.
+{{% warning %}}
+Poetry `1.1` series releases are not able to update in-place to `1.2` or newer series releases.
+To migrate to newer releases, uninstall using your original install method, and then reinstall
+using the [methods above]({{< ref "#installation" >}} "Installation").
+{{% /warning %}}
+{{< /step >}}
+{{< step >}}
+**Uninstall Poetry**
+
+If you decide Poetry isn't your thing, you can completely remove it from your system
+by running the installer again with the `--uninstall` option or by setting
+the `POETRY_UNINSTALL` environment variable before executing the installer.
 
 ```bash
-pip install --user poetry
+curl -sSL https://install.python-poetry.org | python3 - --uninstall
+curl -sSL https://install.python-poetry.org | POETRY_UNINSTALL=1 python3 -
 ```
 
 {{% warning %}}
-Be aware that it will also install Poetry's dependencies
-which might cause conflicts with other packages.
+If you installed using the deprecated `get-poetry.py` script, you should use it to uninstall instead:
+
+```bash
+curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 - --uninstall
+```
 {{% /warning %}}
 
-#### Installing with `pipx`
+{{< /step >}}
+{{< /steps >}}
+
+{{< /tab >}}
+{{< tab tabID="installing-with-pipx" >}}
+
+Using [`pipx`](https://github.com/pypa/pipx) to install Poetry is also possible and fully supported.
 
-Using [`pipx`](https://github.com/cs01/pipx) to install Poetry is also possible. `pipx` is used to install Python CLI applications globally while still isolating them in virtual environments. This allows for clean upgrades and uninstalls. pipx supports Python 3.6 and later. If using an earlier version of Python, consider [pipsi](https://github.com/mitsuhiko/pipsi).
+`pipx` is used to install Python CLI applications globally while still isolating them in virtual environments.
+`pipx` will manage upgrades and uninstalls when used to install Poetry.
+
+{{< steps >}}
+{{< step >}}
+**Install Poetry**
 
 ```bash
 pipx install poetry
 ```
+{{< /step >}}
+{{< step >}}
+**Install Poetry (advanced)**
+
+`pipx` can be install different versions of Poetry, using the same syntax as pip:
 
 ```bash
-pipx upgrade poetry
+pipx install poetry==1.2.0
 ```
 
+`pipx` can also install versions of Poetry in parallel, which allows for easy testing of alternate or prerelease
+versions. Each version is given a unique, user-specified suffix, which will be used to create a unique binary name:
+
 ```bash
-pipx uninstall poetry
+pipx install --suffix=@1.2.0 poetry==1.2.0
+poetry@1.2.0 --version
 ```
 
-[Github repository](https://github.com/cs01/pipx).
+```bash
+pipx install --suffix=@preview --pip-args=--pre poetry
+poetry@preview --version
+```
 
+Finally, `pipx` can install any valid [pip requirement spec](https://pip.pypa.io/en/stable/cli/pip_install/), which
+allows for installations of the development version from `git`, or even for local testing of pull requests:
 
-## Updating `poetry`
+```
+pipx install --suffix @master git+https://github.com/python-poetry/poetry.git@master
+pipx install --suffix @pr1234 git+https://github.com/python-poetry/poetry.git@refs/pull/1234/head
 
-Updating Poetry to the latest stable version is as simple as calling the `self update` command.
+```
+{{< /step >}}
+{{< step >}}
+**Update Poetry**
 
 ```bash
-poetry self update
+pipx upgrade poetry
 ```
-
-If you want to install pre-release versions, you can use the `--preview` option.
+{{< /step >}}
+{{< step >}}
+**Uninstall Poetry**
 
 ```bash
-poetry self update --preview
+pipx uninstall poetry
 ```
+{{< /step >}}
+{{< /steps >}}
 
-And finally, if you want to install a specific version, you can pass it as an argument
-to `self update`.
+{{< /tab >}}
+{{< tab tabID="installing-manually" >}}
+
+Poetry can be installed manually using `pip` and the `venv` module. By doing so you will essentially perform the steps carried
+out by the official installer. As this is an advanced installation method, these instructions are Unix-only and omit specific
+examples such as installing from `git`.
+
+The variable `$VENV_PATH` will be used to indicate the path at which the virtual environment was created.
 
 ```bash
-poetry self update 0.8.0
+python3 -m venv $VENV_PATH
+$VENV_PATH/bin/pip install -U pip setuptools
+$VENV_PATH/bin/pip install poetry
 ```
 
-{{% note %}}
-The `self update` command will only work if you used the recommended
-installer to install Poetry.
-{{% /note %}}
+Poetry will be available at `$VENV_PATH/bin/poetry` and can be invoked directly or symlinked elsewhere.
+
+To uninstall Poetry, simply delete the entire `$VENV_PATH` directory.
+
+{{< /tab >}}
+{{< /tabs >}}
 
-{{% note %}}
-If you are still on poetry version < 1.0 use `poetry self:update` instead.
-{{% /note %}}
 
 ## Enable tab completion for Bash, Fish, or Zsh
 
 `poetry` supports generating completion scripts for Bash, Fish, and Zsh.
 See `poetry help completions` for full details, but the gist is as simple as using one of the following:
 
+### Bash
 
-```bash
-# Bash
-poetry completions bash > /etc/bash_completion.d/poetry.bash-completion
+#### Auto-loaded (recommended)
 
-# Bash (Homebrew)
-poetry completions bash > $(brew --prefix)/etc/bash_completion.d/poetry.bash-completion
+```bash
+poetry completions bash >> ~/.bash_completion
+```
 
-# Fish
-poetry completions fish > ~/.config/fish/completions/poetry.fish
+#### Lazy-loaded
 
-# Fish (Homebrew)
-poetry completions fish > (brew --prefix)/share/fish/vendor_completions.d/poetry.fish
+```bash
+poetry completions bash > ${XDG_DATA_HOME:~/.local/share}/bash_completion/completions/poetry
+```
 
-# Zsh
-poetry completions zsh > ~/.zfunc/_poetry
+### Fish
 
-# Oh-My-Zsh
-mkdir $ZSH_CUSTOM/plugins/poetry
-poetry completions zsh > $ZSH_CUSTOM/plugins/poetry/_poetry
+```fish
+poetry completions fish > ~/.config/fish/completions/poetry.fish
+```
 
-# prezto
-poetry completions zsh > ~/.zprezto/modules/completion/external/src/_poetry
+### Zsh
 
+```zsh
+poetry completions zsh > ~/.zfunc/_poetry
 ```
 
-{{% note %}}
-You may need to restart your shell in order for the changes to take effect.
-{{% /note %}}
-
-For `zsh`, you must then add the following line in your `~/.zshrc` before `compinit`:
+You must then add the following lines in your `~/.zshrc`, if they do not already exist:
 
 ```bash
 fpath+=~/.zfunc
+autoload -Uz compinit && compinit
 ```
 
-For `oh-my-zsh`, you must then enable poetry in your `~/.zshrc` plugins
+#### Oh My Zsh
+
+```zsh
+mkdir $ZSH_CUSTOM/plugins/poetry
+poetry completions zsh > $ZSH_CUSTOM/plugins/poetry/_poetry
+```
+You must then add `poetry` to your plugins array in `~/.zshrc`:
 
 ```text
 plugins(
@@ -232,3 +329,13 @@ plugins(
 	...
 	)
 ```
+
+#### prezto
+
+```zsh
+poetry completions zsh > ~/.zprezto/modules/completion/external/src/_poetry
+```
+
+{{% note %}}
+You may need to restart your shell in order for these changes to take effect.
+{{% /note %}}
diff --git a/vendor/poetry/docs/basic-usage.md b/vendor/poetry/docs/basic-usage.md
index 76f57c33..316be879 100644
--- a/vendor/poetry/docs/basic-usage.md
+++ b/vendor/poetry/docs/basic-usage.md
@@ -12,7 +12,7 @@ menu:
 # Basic usage
 
 For the basic usage introduction we will be installing `pendulum`, a datetime library.
-If you have not yet installed Poetry, refer to the [Introduction]({{< relref "." >}} "Introduction") chapter.
+If you have not yet installed Poetry, refer to the [Introduction]({{< relref "docs" >}} "Introduction") chapter.
 
 ## Project setup
 
@@ -27,12 +27,11 @@ This will create the `poetry-demo` directory with the following content:
 ```text
 poetry-demo
 ├── pyproject.toml
-├── README.rst
+├── README.md
 ├── poetry_demo
 │   └── __init__.py
 └── tests
-    ├── __init__.py
-    └── test_poetry_demo.py
+    └── __init__.py
 ```
 
 The `pyproject.toml` file is what is the most important here. This will orchestrate
@@ -44,14 +43,23 @@ name = "poetry-demo"
 version = "0.1.0"
 description = ""
 authors = ["Sébastien Eustace "]
+readme = "README.md"
+packages = [{include = "poetry_demo"}]
 
 [tool.poetry.dependencies]
-python = "*"
+python = "^3.7"
 
-[tool.poetry.dev-dependencies]
-pytest = "^3.4"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
 ```
 
+Poetry assumes your package contains a package with the same name as `tool.poetry.name`.
+If this is not the case, populate `tool.poetry.packages` to specify your package or packages.
+
+See [Packages]({{< relref "pyproject#packages" >}}) for more information.
+
 ### Initialising a pre-existing project
 
 Instead of creating a new project, Poetry can be used to 'initialise' a pre-populated
@@ -68,13 +76,13 @@ If you want to add dependencies to your project, you can specify them in the `to
 
 ```toml
 [tool.poetry.dependencies]
-pendulum = "^1.4"
+pendulum = "^2.1"
 ```
 
 As you can see, it takes a mapping of **package names** and **version constraints**.
 
 Poetry uses this information to search for the right set of files in package "repositories" that you register
-in the `tool.poetry.repositories` section, or on [PyPI](https://pypi.org) by default.
+in the `tool.poetry.source` section, or on [PyPI](https://pypi.org) by default.
 
 Also, instead of modifying the `pyproject.toml` file by hand, you can use the `add` command.
 
@@ -82,14 +90,14 @@ Also, instead of modifying the `pyproject.toml` file by hand, you can use the `a
 $ poetry add pendulum
 ```
 
-It will automatically find a suitable version constraint **and install** the package and subdependencies.
+It will automatically find a suitable version constraint **and install** the package and sub-dependencies.
 
 
 ## Using your virtual environment
 
 By default, poetry creates a virtual environment in `{cache-dir}/virtualenvs` (`{cache-dir}\virtualenvs` on Windows).
 You can change the [`cache-dir`]({{< relref "configuration#cache-dir" >}} "cache-dir configuration documentation") value by editing the poetry config.
-Additionally, you can use the [`virtualenvs.in-project`]({{< relref "configuration#virtualenvsin-project" >}} "virtualenv.in-project configuration documentation") configuration variable
+Additionally, you can use the [`virtualenvs.in-project`]({{< relref "configuration#virtualenvsin-project" >}} "#virtualenvs.in-project configuration documentation") configuration variable
 to create virtual environment within your project directory.
 
 
@@ -117,27 +125,28 @@ cannot modify the environment of the shell that it has been called from such
 that an activated virtual environment remains active after the Poetry command
 has completed execution.
 
-Therefore, Poetry has to create a sub-shell with the virtual envrionment activated
+Therefore, Poetry has to create a sub-shell with the virtual environment activated
 in order for the subsequent commands to run from within the virtual environment.
 {{% /note %}}
 
+
 Alternatively, to avoid creating a new shell, you can manually activate the
-virtual environment by running `source {path_to_venv}/bin/activate` (`source {path_to_venv}\Scripts\activate.bat` on Windows).
+virtual environment by running `source {path_to_venv}/bin/activate` (`{path_to_venv}\Scripts\activate.ps1` on Windows PowerShell).
 To get the path to your virtual environment run `poetry env info --path`.
 You can also combine these into a nice one-liner, `source $(poetry env info --path)/bin/activate`
 To deactivate this virtual environment simply use `deactivate`.
 
-|                   | POSIX Shell                                      | Windows                                     | Exit/Deactivate |
-|-------------------|------------------------------------------------|---------------------------------------------|-----------------|
-| New Shell         | `poetry shell`                                 | `poetry shell`                              | `exit`          |
-| Manual Activation | `source {path_to_venv}/bin/activate`           | `source {path_to_venv}\Scripts\activate.bat`| `deactivate`    |
-| One-liner         | ```source`poetry env info --path`/bin/activate```|                                             | `deactivate`    |
+|                   | POSIX Shell                                     | Windows (PowerShell)                  | Exit/Deactivate |
+| ----------------- | ----------------------------------------------- | ------------------------------------- | --------------- |
+| New Shell         | `poetry shell`                                  | `poetry shell`                        | `exit`          |
+| Manual Activation | `source {path_to_venv}/bin/activate`            | `{path_to_venv}\Scripts\activate.ps1` | `deactivate`    |
+| One-liner         | `source $(poetry env info --path)/bin/activate` |                                       | `deactivate`    |
 
 
 ### Version constraints
 
-In our example, we are requesting the `pendulum` package with the version constraint `^1.4`.
-This means any version greater or equal to 1.4.0 and less than 2.0.0 (`>=1.4.0 <2.0.0`).
+In our example, we are requesting the `pendulum` package with the version constraint `^2.1`.
+This means any version greater or equal to 2.1.0 and less than 3.0.0 (`>=2.1.0 <3.0.0`).
 
 Please read [Dependency specification]({{< relref "dependency-specification" >}} "Dependency specification documentation") for more in-depth information on versions,
 how versions relate to each other, and on the different ways you can specify dependencies.
@@ -158,7 +167,7 @@ for the version constraint you have specified.
 
 ## Installing dependencies
 
-To install the defined dependencies for your project, just run the `install` command.
+To install the defined dependencies for your project, just run the [`install`]({{< relref "cli#install" >}}) command.
 
 ```bash
 poetry install
@@ -171,7 +180,7 @@ When you run this command, one of two things may happen:
 If you have never run the command before and there is also no `poetry.lock` file present,
 Poetry simply resolves all dependencies listed in your `pyproject.toml` file and downloads the latest version of their files.
 
-When Poetry has finished installing, it writes all of the packages and the exact versions of them that it downloaded to the `poetry.lock` file,
+When Poetry has finished installing, it writes all the packages and their exact versions that it downloaded to the `poetry.lock` file,
 locking the project to those specific versions.
 You should commit the `poetry.lock` file to your project repo so that all people working on the project are locked to the same versions of dependencies (more below).
 
@@ -186,7 +195,7 @@ Either way, running `install` when a `poetry.lock` file is present resolves and
 but Poetry uses the exact versions listed in `poetry.lock` to ensure that the package versions are consistent for everyone working on your project.
 As a result you will have all dependencies requested by your `pyproject.toml` file,
 but they may not all be at the very latest available versions
-(some of the dependencies listed in the `poetry.lock` file may have released newer versions since the file was created).
+(some dependencies listed in the `poetry.lock` file may have released newer versions since the file was created).
 This is by design, it ensures that your project does not break because of unexpected changes in dependencies.
 
 ### Commit your `poetry.lock` file to version control
@@ -206,7 +215,7 @@ For libraries it is not necessary to commit the lock file.
 
 ### Installing dependencies only
 
-The current project is installed in [editable](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs) mode by default.
+The current project is installed in [editable](https://pip.pypa.io/en/stable/topics/local-project-installs/) mode by default.
 
 If you want to install the dependencies only, run the `install` command with the `--no-root` flag:
 
diff --git a/vendor/poetry/docs/cli.md b/vendor/poetry/docs/cli.md
index fb353ebc..3ae708c4 100644
--- a/vendor/poetry/docs/cli.md
+++ b/vendor/poetry/docs/cli.md
@@ -9,6 +9,7 @@ menu:
     weight: 30
 ---
 
+
 # Commands
 
 You've already learned how to use the command-line interface to do some things.
@@ -25,6 +26,9 @@ then `--help` combined with any of those can give you more information.
 * `--ansi`: Force ANSI output.
 * `--no-ansi`: Disable ANSI output.
 * `--version (-V)`: Display this application version.
+* `--no-interaction (-n)`: Do not ask any interactive question.
+* `--no-plugins`: Disables plugins.
+* `--no-cache`: Disables Poetry source caches.
 
 
 ## new
@@ -41,12 +45,11 @@ will create a folder as follows:
 ```text
 my-package
 ├── pyproject.toml
-├── README.rst
+├── README.md
 ├── my_package
 │   └── __init__.py
 └── tests
-    ├── __init__.py
-    └── test_my_package.py
+    └── __init__.py
 ```
 
 If you want to name your project differently than the folder, you can pass
@@ -67,15 +70,44 @@ That will create a folder structure as follows:
 ```text
 my-package
 ├── pyproject.toml
-├── README.rst
+├── README.md
 ├── src
 │   └── my_package
 │       └── __init__.py
 └── tests
-    ├── __init__.py
-    └── test_my_package.py
+    └── __init__.py
+```
+
+The `--name` option is smart enough to detect namespace packages and create
+the required structure for you.
+
+```bash
+poetry new --src --name my.package my-package
+```
+
+will create the following structure:
+
+```text
+my-package
+├── pyproject.toml
+├── README.md
+├── src
+│   └── my
+│       └── package
+│           └── __init__.py
+└── tests
+    └── __init__.py
 ```
 
+### Options
+
+* `--name`: Set the resulting package name.
+* `--src`: Use the src layout for the project.
+* `--readme`: Specify the readme file extension. Default is `md`. If you intend to publish to PyPI
+  keep the [recommendations for a PyPI-friendly README](https://packaging.python.org/en/latest/guides/making-a-pypi-friendly-readme/)
+  in mind.
+
+
 ## init
 
 This command will help you create a `pyproject.toml` file interactively
@@ -112,29 +144,64 @@ This ensures that everyone using the library will get the same versions of the d
 
 If there is no `poetry.lock` file, Poetry will create one after dependency resolution.
 
-You can specify to the command that you do not want the development dependencies installed by passing
-the `--no-dev` option.
+If you want to exclude one or more dependency group for the installation, you can use
+the `--without` option.
+
+```bash
+poetry install --without test,docs
+```
+
+{{% note %}}
+The `--no-dev` option is now deprecated. You should use the `--without dev` notation instead.
+{{% /note %}}
+
+You can also select optional dependency groups with the `--with` option.
 
 ```bash
-poetry install --no-dev
+poetry install --with test,docs
 ```
 
-If you want to remove old dependencies no longer present in the lock file, use the
-`--remove-untracked` option.
+It's also possible to only install specific dependency groups by using the `only` option.
 
 ```bash
-poetry install --remove-untracked
+poetry install --only test,docs
+```
+
+To only install the project itself with no dependencies, use the `--only-root` flag.
+
+```bash
+poetry install --only-root
+```
+
+See [Dependency groups]({{< relref "managing-dependencies#dependency-groups" >}}) for more information
+about dependency groups.
+
+If you want to synchronize your environment – and ensure it matches the lock file – use the
+`--sync` option.
+
+```bash
+poetry install --sync
+```
+
+The `--sync` can be combined with group-related options:
+
+```bash
+poetry install --without dev --sync
+poetry install --with docs --sync
+poetry install --only dev
 ```
 
 You can also specify the extras you want installed
-by passing the `-E|--extras` option (See [Extras]({{< relref "pyproject#extras" >}}) for more info)
+by passing the `-E|--extras` option (See [Extras]({{< relref "pyproject#extras" >}}) for more info).
+Pass `--all-extras` to install all defined extras for a project.
 
 ```bash
 poetry install --extras "mysql pgsql"
 poetry install -E mysql -E pgsql
+poetry install --all-extras
 ```
 
-By default `poetry` will install your project's package everytime you run `install`:
+By default `poetry` will install your project's package every time you run `install`:
 
 ```bash
 $ poetry install
@@ -153,9 +220,22 @@ poetry install --no-root
 
 ### Options
 
-* `--no-dev`: Do not install dev dependencies.
+* `--without`: The dependency groups to ignore.
+* `--with`: The optional dependency groups to include.
+* `--only`: The only dependency groups to include.
+* `--only-root`: Install only the root project, exclude all dependencies.
+* `--sync`: Synchronize the environment with the locked packages and the specified groups.
 * `--no-root`: Do not install the root package (your project).
+* `--dry-run`: Output the operations but do not execute anything (implicitly enables --verbose).
 * `--extras (-E)`: Features to install (multiple values allowed).
+* `--all-extras`: Install all extra features (conflicts with --extras).
+* `--no-dev`: Do not install dev dependencies. (**Deprecated**)
+* `--remove-untracked`: Remove dependencies not presented in the lock file. (**Deprecated**)
+
+{{% note %}}
+When `--only` is specified, `--with` and `--without` options are ignored.
+{{% /note %}}
+
 
 ## update
 
@@ -181,10 +261,17 @@ update the constraint, for example `^2.3`. You can do this using the `add` comma
 
 ### Options
 
+* `--without`: The dependency groups to ignore.
+* `--with`: The optional dependency groups to include.
+* `--only`: The only dependency groups to include.
 * `--dry-run` : Outputs the operations but will not execute anything (implicitly enables --verbose).
-* `--no-dev` : Do not install dev dependencies.
+* `--no-dev` : Do not update the development dependencies. (**Deprecated**)
 * `--lock` : Do not perform install (only update the lockfile).
 
+{{% note %}}
+When `--only` is specified, `--with` and `--without` options are ignored.
+{{% /note %}}
+
 ## add
 
 The `add` command adds required packages to your `pyproject.toml` and installs them.
@@ -196,22 +283,41 @@ poetry will choose a suitable one based on the available package versions.
 poetry add requests pendulum
 ```
 
-You also can specify a constraint when adding a package, like so:
+You can also specify a constraint when adding a package:
 
 ```bash
+# Allow >=2.0.5, <3.0.0 versions
 poetry add pendulum@^2.0.5
+
+# Allow >=2.0.5, <2.1.0 versions
+poetry add pendulum@~2.0.5
+
+# Allow >=2.0.5 versions, without upper bound
 poetry add "pendulum>=2.0.5"
+
+# Allow only 2.0.5 version
+poetry add pendulum==2.0.5
 ```
 
+{{% note %}}
+See the [Dependency specification]({{< relref "dependency-specification#using-the--operator" >}}) page for more information about the `@` operator.
+{{% /note %}}
+
 If you try to add a package that is already present, you will get an error.
 However, if you specify a constraint, like above, the dependency will be updated
-by using the specified constraint. If you want to get the latest version of an already
-present dependency you can use the special `latest` constraint:
+by using the specified constraint.
+
+If you want to get the latest version of an already
+present dependency, you can use the special `latest` constraint:
 
 ```bash
 poetry add pendulum@latest
 ```
 
+{{% note %}}
+See the [Dependency specification]({{< relref "dependency-specification" >}}) for more information on setting the version constraints for a package.
+{{% /note %}}
+
 You can also add `git` dependencies:
 
 ```bash
@@ -222,6 +328,9 @@ or use ssh instead of https:
 
 ```bash
 poetry add git+ssh://git@github.com/sdispater/pendulum.git
+
+# or alternatively:
+poetry add git+ssh://git@github.com:sdispater/pendulum.git
 ```
 
 If you need to checkout a specific branch, tag or revision,
@@ -230,9 +339,19 @@ you can specify it when using `add`:
 ```bash
 poetry add git+https://github.com/sdispater/pendulum.git#develop
 poetry add git+https://github.com/sdispater/pendulum.git#2.0.5
+
+# or using SSH instead:
+poetry add git+ssh://github.com/sdispater/pendulum.git#develop
+poetry add git+ssh://github.com/sdispater/pendulum.git#2.0.5
 ```
 
-or make them point to a local directory or file:
+or reference a subdirectory:
+
+```bash
+poetry add git+https://github.com/myorg/mypackage_with_subdirs.git@main#subdirectory=subdir
+```
+
+You can also add a local directory or file:
 
 ```bash
 poetry add ./my-package/
@@ -240,7 +359,14 @@ poetry add ../my-package/dist/my-package-0.1.0.tar.gz
 poetry add ../my-package/dist/my_package-0.1.0.whl
 ```
 
-If you want the dependency to be installed in editable mode you can specify it in the `pyproject.toml` file. It means that changes in the local directory will be reflected directly in environment.
+If you want the dependency to be installed in editable mode you can use the `--editable` option.
+
+```bash
+poetry add --editable ./my-package/
+poetry add --editable git+ssh://github.com/sdispater/pendulum.git#develop
+```
+
+Alternatively, you can specify it in the `pyproject.toml` file. It means that changes in the local directory will be reflected directly in environment.
 
 ```toml
 [tool.poetry.dependencies]
@@ -248,7 +374,7 @@ my-package = {path = "../my/path", develop = true}
 ```
 
 {{% note %}}
-Before poetry 1.1 path dependencies were installed in editable mode by default. You should always set the `develop` attribute explicit,
+Before poetry 1.1 path dependencies were installed in editable mode by default. You should always set the `develop` attribute explicitly,
 to make sure the behavior is the same for all poetry versions.
 {{% /note %}}
 
@@ -256,18 +382,37 @@ If the package(s) you want to install provide extras, you can specify them
 when adding the package:
 
 ```bash
-poetry add requests[security,socks]
+poetry add "requests[security,socks]"
 poetry add "requests[security,socks]~=2.22.0"
 poetry add "git+https://github.com/pallets/flask.git@1.1.1[dotenv,dev]"
 ```
 
+{{% warning %}}
+Some shells may treat square braces (`[` and `]`) as special characters. It is suggested to always quote arguments containing these characters to prevent unexpected shell expansion.
+{{% /warning %}}
+
+If you want to add a package to a specific group of dependencies, you can use the `--group (-G)` option:
+
+```bash
+poetry add mkdocs --group docs
+```
+
+See [Dependency groups]({{< relref "managing-dependencies#dependency-groups" >}}) for more information
+about dependency groups.
+
 ### Options
 
-* `--dev (-D)`: Add package as development dependency.
-* `--path`: The path to a dependency.
-* `--optional` : Add as an optional dependency.
-* `--dry-run` : Outputs the operations but will not execute anything (implicitly enables --verbose).
-* `--lock` : Do not perform install (only update the lockfile).
+* `--group (-G)`: The group to add the dependency to.
+* `--dev (-D)`: Add package as development dependency. (**Deprecated**)
+* `--editable (-e)`: Add vcs/path dependencies as editable.
+* `--extras (-E)`: Extras to activate for the dependency. (multiple values allowed)
+* `--optional`: Add as an optional dependency.
+* `--python`: Python version for which the dependency must be installed.
+* `--platform`: Platforms for which the dependency must be installed.
+* `--source`: Name of the source to use to install the package.
+* `--allow-prereleases`: Accept prereleases.
+* `--dry-run`: Output the operations but do not execute anything (implicitly enables --verbose).
+* `--lock`: Do not perform install (only update the lockfile).
 
 
 ## remove
@@ -279,9 +424,19 @@ list of installed packages.
 poetry remove pendulum
 ```
 
+If you want to remove a package from a specific group of dependencies, you can use the `--group (-G)` option:
+
+```bash
+poetry remove mkdocs --group docs
+```
+
+See [Dependency groups]({{< relref "managing-dependencies#dependency-groups" >}}) for more information
+about dependency groups.
+
 ### Options
 
-* `--dev (-D)`: Removes a package from the development dependencies.
+* `--group (-G)`: The group to remove the dependency from.
+* `--dev (-D)`: Removes a package from the development dependencies. (**Deprecated**)
 * `--dry-run` : Outputs the operations but will not execute anything (implicitly enables --verbose).
 
 
@@ -302,18 +457,30 @@ name        : pendulum
 version     : 1.4.2
 description : Python datetimes made easy
 
-dependencies:
+dependencies
  - python-dateutil >=2.6.1
  - tzlocal >=1.4
  - pytzdata >=2017.2.2
+
+required by
+ - calendar >=1.4.0
 ```
 
 ### Options
 
-* `--no-dev`: Do not list the dev dependencies.
+* `--without`: The dependency groups to ignore.
+* `--why`: When showing the full list, or a `--tree` for a single package, display why a package is included.
+* `--with`: The optional dependency groups to include.
+* `--only`: The only dependency groups to include.
+* `--no-dev`: Do not list the dev dependencies. (**Deprecated**)
 * `--tree`: List the dependencies as a tree.
 * `--latest (-l)`: Show the latest version.
 * `--outdated (-o)`: Show the latest version but only for packages that are outdated.
+* `--all (-a)`: Show all packages (even those not compatible with current system).
+
+{{% note %}}
+When `--only` is specified, `--with` and `--without` options are ignored.
+{{% /note %}}
 
 
 ## build
@@ -348,7 +515,11 @@ It can also build the package if you pass it the `--build` option.
 Should match a repository name set by the [`config`](#config) command.
 * `--username (-u)`: The username to access the repository.
 * `--password (-p)`: The password to access the repository.
+* `--cert`: Certificate authority to access the repository.
+* `--client-cert`: Client certificate to access the repository.
+* `--build`: Build the package before publishing.
 * `--dry-run`: Perform all actions except upload the package.
+* `--skip-existing`: Ignore errors from files already existing in the repository.
 
 ## config
 
@@ -371,6 +542,7 @@ See [Configuration]({{< relref "configuration" >}}) for all available settings.
 
 * `--unset`: Remove the configuration element named by `setting-key`.
 * `--list`: Show the list of current config variables.
+* `--local`: Set/Get settings that are specific to a project (in the local configuration file `poetry.toml`).
 
 ## run
 
@@ -408,11 +580,19 @@ If one doesn't exist yet, it will be created.
 poetry shell
 ```
 
+Note that this command starts a new shell and activates the virtual environment.
+
+As such, `exit` should be used to properly exit the shell and the virtual environment instead of `deactivate`.
+
 ## check
 
 The `check` command validates the structure of the `pyproject.toml` file
 and returns a detailed report if there are any errors.
 
+{{% note %}}
+This command is also available as a pre-commit hook. See [pre-commit hooks]({{< relref "pre-commit-hooks#poetry-check">}}) for more information.
+{{% /note %}}
+
 ```bash
 poetry check
 ```
@@ -431,6 +611,7 @@ This command locks (without installing) the dependencies specified in `pyproject
 
 {{% note %}}
 By default, this will lock all dependencies to the latest available compatible versions. To only refresh the lock file, use the `--no-update` option.
+This command is also available as a pre-commit hook. See [pre-commit hooks]({{< relref "pre-commit-hooks#poetry-lock">}}) for more information.
 {{% /note %}}
 
 ```bash
@@ -439,6 +620,7 @@ poetry lock
 
 ### Options
 
+* `--check`: Verify that `poetry.lock` is consistent with `pyproject.toml`
 * `--no-update`: Do not update locked versions, only refresh lock file.
 
 ## version
@@ -447,26 +629,35 @@ This command shows the current version of the project or bumps the version of
 the project and writes the new version back to `pyproject.toml` if a valid
 bump rule is provided.
 
-The new version should ideally be a valid [semver](https://semver.org/) string or a valid bump rule:
-`patch`, `minor`, `major`, `prepatch`, `preminor`, `premajor`, `prerelease`.
+The new version should be a valid [PEP 440](https://peps.python.org/pep-0440/)
+string or a valid bump rule: `patch`, `minor`, `major`, `prepatch`, `preminor`,
+`premajor`, `prerelease`.
+
+{{% note %}}
+
+If you would like to use semantic versioning for your project, please see
+[here]({{< relref "libraries#versioning" >}}).
+
+{{% /note %}}
 
 The table below illustrates the effect of these rules with concrete examples.
 
-| rule       |        before | after         |
-|------------|---------------|---------------|
-| major      |         1.3.0 | 2.0.0         |
-| minor      |         2.1.4 | 2.2.0         |
-| patch      |         4.1.1 | 4.1.2         |
-| premajor   |         1.0.2 | 2.0.0-alpha.0 |
-| preminor   |         1.0.2 | 1.1.0-alpha.0 |
-| prepatch   |         1.0.2 | 1.0.3-alpha.0 |
-| prerelease |         1.0.2 | 1.0.3-alpha.0 |
-| prerelease | 1.0.3-alpha.0 | 1.0.3-alpha.1 |
-| prerelease |  1.0.3-beta.0 | 1.0.3-beta.1  |
+| rule       | before  | after   |
+| ---------- |---------|---------|
+| major      | 1.3.0   | 2.0.0   |
+| minor      | 2.1.4   | 2.2.0   |
+| patch      | 4.1.1   | 4.1.2   |
+| premajor   | 1.0.2   | 2.0.0a0 |
+| preminor   | 1.0.2   | 1.1.0a0 |
+| prepatch   | 1.0.2   | 1.0.3a0 |
+| prerelease | 1.0.2   | 1.0.3a0 |
+| prerelease | 1.0.3a0 | 1.0.3a1 |
+| prerelease | 1.0.3b0 | 1.0.3b1 |
 
-## Options
+### Options
 
 * `--short (-s)`: Output the version number only.
+* `--dry-run`: Do not update pyproject.toml file.
 
 ## export
 
@@ -477,7 +668,13 @@ poetry export -f requirements.txt --output requirements.txt
 ```
 
 {{% note %}}
-Only the `requirements.txt` format is currently supported.
+This command is provided by the [Export Poetry Plugin](https://github.com/python-poetry/poetry-plugin-export)
+and is also available as a pre-commit hook. See [pre-commit hooks]({{< relref "pre-commit-hooks#poetry-export" >}}) for more information.
+{{% /note %}}
+
+{{% note %}}
+Unlike the `install` command, this command only includes the project's dependencies defined in the implicit `main`
+group defined in `tool.poetry.dependencies` when used without specifying any options.
 {{% /note %}}
 
 ### Options
@@ -486,9 +683,13 @@ Only the `requirements.txt` format is currently supported.
   Currently, only `requirements.txt` is supported.
 * `--output (-o)`: The name of the output file.  If omitted, print to standard
   output.
-* `--dev`: Include development dependencies.
+* `--dev`: Include development dependencies. (**Deprecated**)
 * `--extras (-E)`: Extra sets of dependencies to include.
+* `--without`: The dependency groups to ignore.
+* `--with`: The optional dependency groups to include.
+* `--only`: The only dependency groups to include.
 * `--without-hashes`: Exclude hashes from the exported file.
+* `--without-urls`: Exclude source repository urls from the exported file.
 * `--with-credentials`: Include credentials for extra indices.
 
 ## env
@@ -509,3 +710,254 @@ The `cache list` command lists Poetry's available caches.
 ```bash
 poetry cache list
 ```
+
+### cache clear
+
+The `cache clear` command removes packages from a cached repository.
+
+For example, to clear the whole cache of packages from the `pypi` repository, run:
+
+```bash
+poetry cache clear pypi --all
+```
+
+To only remove a specific package from a cache, you have to specify the cache entry in the following form `cache:package:version`:
+
+```bash
+poetry cache clear pypi:requests:2.24.0
+```
+
+## source
+
+The `source` namespace regroups sub commands to manage repository sources for a Poetry project.
+
+### source add
+
+The `source add` command adds source configuration to the project.
+
+For example, to add the `pypi-test` source, you can run:
+
+```bash
+poetry source add pypi-test https://test.pypi.org/simple/
+```
+
+{{% note %}}
+You cannot use the name `pypi` as it is reserved for use by the default PyPI source.
+{{% /note %}}
+
+#### Options
+
+* `--default`: Set this source as the [default]({{< relref "repositories#disabling-the-pypi-repository" >}}) (disable PyPI).
+* `--secondary`: Set this source as a [secondary]({{< relref "repositories#install-dependencies-from-a-private-repository" >}}) source.
+
+{{% note %}}
+You cannot set a source as both `default` and `secondary`.
+{{% /note %}}
+
+### source show
+
+The `source show` command displays information on all configured sources for the project.
+
+```bash
+poetry source show
+```
+
+Optionally, you can show information of one or more sources by specifying their names.
+
+```bash
+poetry source show pypi-test
+```
+
+{{% note %}}
+This command will only show sources configured via the `pyproject.toml` and does not include PyPI.
+{{% /note %}}
+
+### source remove
+
+The `source remove` command removes a configured source from your `pyproject.toml`.
+
+```bash
+poetry source remove pypi-test
+```
+
+## about
+
+The `about` command displays global information about Poetry, including the current version and version of `poetry-core`.
+
+```bash
+poetry about
+```
+
+## help
+
+The `help` command displays global help, or help for a specific command.
+
+To display global help:
+
+```bash
+poetry help
+```
+
+To display help for a specific command, for instance `show`:
+
+```bash
+poetry help show
+```
+
+{{% note %}}
+The `--help` option can also be passed to any command to get help for a specific command.
+
+For instance:
+
+```bash
+poetry show --help
+```
+{{% /note %}}
+
+## list
+
+The `list` command displays all the available Poetry commands.
+
+```bash
+poetry list
+```
+
+## self
+
+The `self` namespace regroups sub commands to manage the Poetry installation itself.
+
+{{% note %}}
+Use of these commands will create the required `pyproject.toml` and `poetry.lock` files in your
+[configuration directory]({{< relref "configuration" >}}).
+{{% /note %}}
+
+### self add
+
+The `self add` command installs Poetry plugins and make them available at runtime. Additionally, it can
+also be used to upgrade Poetry's own dependencies or inject additional packages into the runtime
+environment
+
+{{% note %}}
+The `self add` command works exactly like the [`add` command](#add). However, is different in that the packages
+managed are for Poetry's runtime environment.
+
+The package specification formats supported by the `self add` command are the same as the ones supported
+by the [`add` command](#add).
+{{% /note %}}
+
+For example, to install the `poetry-plugin-export` plugin, you can run:
+
+```bash
+poetry self add poetry-plugin-export
+```
+
+To update to the latest `poetry-core` version, you can run:
+
+```bash
+poetry self add poetry-core@latest
+```
+
+To add a keyring provider `artifacts-keyring`, you can run:
+
+```bash
+poetry self add artifacts-keyring
+```
+
+#### Options
+
+* `--editable (-e)`: Add vcs/path dependencies as editable.
+* `--extras (-E)`: Extras to activate for the dependency. (multiple values allowed)
+* `--allow-prereleases`: Accept prereleases.
+* `--source`: Name of the source to use to install the package.
+* `--dry-run`: Output the operations but do not execute anything (implicitly enables --verbose).
+
+### self update
+
+The `self update` command updates Poetry version in its current runtime environment.
+
+{{% note %}}
+The `self update` command works exactly like the [`update` command](#update). However,
+is different in that the packages managed are for Poetry's runtime environment.
+{{% /note %}}
+
+```bash
+poetry self update
+```
+
+#### Options
+
+* `--preview`: Allow the installation of pre-release versions.
+* `--dry-run`: Output the operations but do not execute anything (implicitly enables --verbose).
+
+### self lock
+
+The `self lock` command reads this Poetry installation's system `pyproject.toml` file. The system
+dependencies are locked in the corresponding `poetry.lock` file.
+
+```bash
+poetry self lock
+```
+
+#### Options
+
+* `--check`: Verify that `poetry.lock` is consistent with `pyproject.toml`
+* `--no-update`: Do not update locked versions, only refresh lock file.
+
+### self show
+
+The `self show` command behaves similar to the show command, but
+working within Poetry's runtime environment. This lists all packages installed within
+the Poetry install environment.
+
+To show only additional packages that have been added via self add and their
+dependencies use `self show --addons`.
+
+```bash
+poetry self show
+```
+
+#### Options
+
+* `--addons`: List only add-on packages installed.
+* `--tree`: List the dependencies as a tree.
+* `--latest (-l)`: Show the latest version.
+* `--outdated (-o)`: Show the latest version but only for packages that are outdated.
+
+### self show plugins
+
+The `self show plugins` command lists all the currently installed plugins.
+
+```bash
+poetry self show plugins
+```
+
+### self remove
+
+The `self remove` command removes an installed addon package.
+
+```bash
+poetry self remove poetry-plugin-export
+```
+
+#### Options
+
+* `--dry-run`: Outputs the operations but will not execute anything (implicitly enables --verbose).
+
+### self install
+
+The `self install` command ensures all additional packages specified are installed in the current
+runtime environment.
+
+{{% note %}}
+The `self install` command works similar to the [`install` command](#install). However,
+is different in that the packages managed are for Poetry's runtime environment.
+{{% /note %}}
+
+```bash
+poetry self install --sync
+```
+
+#### Options
+
+* `--sync`: Synchronize the environment with the locked packages and the specified groups.
+* `--dry-run`: Output the operations but do not execute anything (implicitly enables --verbose).
diff --git a/vendor/poetry/docs/configuration.md b/vendor/poetry/docs/configuration.md
index c89af267..9b601677 100644
--- a/vendor/poetry/docs/configuration.md
+++ b/vendor/poetry/docs/configuration.md
@@ -11,7 +11,7 @@ menu:
 
 # Configuration
 
-Poetry can be configured via the `config` command ([see more about its usage here]({{< relref "docs/cli#config" >}} "config command documentation"))
+Poetry can be configured via the `config` command ([see more about its usage here]({{< relref "cli#config" >}} "config command documentation"))
 or directly in the `config.toml` file that will be automatically be created when you first run that command.
 This file can typically be found in one of the following directories:
 
@@ -30,6 +30,15 @@ by passing the `--local` option to the `config` command.
 poetry config virtualenvs.create false --local
 ```
 
+{{% note %}}
+Your local configuration of Poetry application is stored in the `poetry.toml` file,
+which is separate from `pyproject.toml`.
+{{% /note %}}
+
+{{% warning %}}
+Be mindful about checking in this file into your repository since it may contain user-specific or sensitive information.
+{{% /note %}}
+
 ## Listing the current configuration
 
 To list the current configuration you can use the `--list` option
@@ -45,7 +54,13 @@ which will give you something similar to this:
 cache-dir = "/path/to/cache/directory"
 virtualenvs.create = true
 virtualenvs.in-project = null
+virtualenvs.options.always-copy = true
+virtualenvs.options.no-pip = false
+virtualenvs.options.no-setuptools = false
+virtualenvs.options.system-site-packages = false
 virtualenvs.path = "{cache-dir}/virtualenvs"  # /path/to/cache/directory/virtualenvs
+virtualenvs.prefer-active-python = false
+virtualenvs.prompt = "{project_name}-py{python_version}"
 ```
 
 ## Displaying a single configuration setting
@@ -100,12 +115,39 @@ This also works for secret settings, like credentials:
 export POETRY_HTTP_BASIC_MY_REPOSITORY_PASSWORD=secret
 ```
 
+## Default Directories
+
+Poetry uses the following default directories:
+
+### Config Directory
+
+- Linux: `$XDG_CONFIG_HOME/pypoetry` or `~/.config/pypoetry`
+- Windows: `%APPDATA%\pypoetry`
+- MacOS: `~/Library/Preferences/pypoetry`
+
+You can override the Config directory by setting the `POETRY_CONFIG_DIR` environment variable.
+
+### Data Directory
+
+- Linux: `$XDG_DATA_HOME/pypoetry` or `~/.local/share/pypoetry`
+- Windows: `%APPDATA%\pypoetry`
+- MacOS: `~/Library/Application Support/pypoetry`
+
+You can override the Data directory by setting the `POETRY_DATA_DIR` or `POETRY_HOME` environment variables. If `POETRY_HOME` is set, it will be given higher priority.
+
+### Cache Directory
+
+- Linux: `$XDG_CACHE_HOME/pypoetry` or `~/.cache/pypoetry`
+- Windows: `%APPDATA%\pypoetry\Cache`
+- MacOS: `~/Library/Caches/pypoetry`
+
+You can override the Cache directory by setting the `POETRY_CACHE_DIR` environment variable.
 
 ## Available settings
 
 ### `cache-dir`
 
-**Type**: string
+**Type**: `string`
 
 The path to the cache directory used by Poetry.
 
@@ -115,51 +157,248 @@ Defaults to one of the following directories:
 - Windows: `C:\Users\\AppData\Local\pypoetry\Cache`
 - Unix:    `~/.cache/pypoetry`
 
-### `installer.parallel`
+### `experimental.system-git-client`
 
-**Type**: boolean
+**Type**: `boolean`
 
-Use parallel execution when using the new (`>=1.1.0`) installer.
-Defaults to `true`.
+**Default**: `false`
+
+*Introduced in 1.2.0*
+
+Use system git client backend for git related tasks.
+
+Poetry uses `dulwich` by default for git related tasks to not rely on the availability of a git client.
+
+If you encounter any problems with it, set to `true` to use the system git backend.
+
+### `installer.max-workers`
+
+**Type**: `int`
+
+**Default**: `number_of_cores + 4`
+
+*Introduced in 1.2.0*
+
+Set the maximum number of workers while using the parallel installer.
+The `number_of_cores` is determined by `os.cpu_count()`.
+If this raises a `NotImplementedError` exception, `number_of_cores` is assumed to be 1.
+
+If this configuration parameter is set to a value greater than `number_of_cores + 4`,
+the number of maximum workers is still limited at `number_of_cores + 4`.
 
 {{% note %}}
-This configuration will be ignored, and parallel execution disabled when running
-Python 2.7 under Windows.
+This configuration is ignored when `installer.parallel` is set to `false`.
 {{% /note %}}
 
-### `virtualenvs.create`
+### `installer.no-binary`
 
-**Type**: boolean
+**Type**: `string | boolean`
 
-Create a new virtual environment if one doesn't already exist.
-Defaults to `true`.
+**Default**: `false`
+
+*Introduced in 1.2.0*
+
+When set this configuration allows users to configure package distribution format policy for all or
+specific packages.
+
+| Configuration          | Description                                                |
+|------------------------|------------------------------------------------------------|
+| `:all:` or `true`      | Disallow binary distributions for all packages.            |
+| `:none:` or `false`    | Allow binary distributions for all packages.               |
+| `package[,package,..]` | Disallow binary distributions for specified packages only. |
+
+{{% note %}}
+This configuration is only respected when using the new installer. If you have disabled it please
+consider re-enabling it.
+
+As with all configurations described here, this is a user specific configuration. This means that this
+is not taken into consideration when a lockfile is generated or dependencies are resolved. This is
+applied only when selecting which distribution for dependency should be installed into a Poetry managed
+environment.
+{{% /note %}}
 
 {{% note %}}
-When setting this configuration to `false`, the Python environment used must have `pip`
-installed and available.
+For project specific usage, it is recommended that this be configured with the `--local`.
+
+```bash
+poetry config --local installer.no-binary :all:
+```
 {{% /note %}}
 
+{{% note %}}
+For CI or container environments using [environment variable](#using-environment-variables)
+to configure this might be useful.
+
+```bash
+export POETRY_INSTALLER_NO_BINARY=:all:
+```
+{{% /note %}}
+
+{{% warning %}}
+Unless this is required system-wide, if configured globally, you could encounter slower install times
+across all your projects if incorrectly set.
+{{% /warning %}}
+
+### `installer.parallel`
+
+**Type**: `boolean`
+
+**Default**: `true`
+
+*Introduced in 1.1.4*
+
+Use parallel execution when using the new (`>=1.1.0`) installer.
+
+### `virtualenvs.create`
+
+**Type**: `boolean`
+
+**Default**: `true`
+
+Create a new virtual environment if one doesn't already exist.
+
+If set to `false`, poetry will install dependencies into the current python environment.
+
 ### `virtualenvs.in-project`
 
-**Type**: boolean
+**Type**: `boolean`
+
+**Default**: `None`
 
 Create the virtualenv inside the project's root directory.
-Defaults to `None`.
 
-If set to `true`, the virtualenv wil be created and expected in a folder named `.venv` within the root directory of the project.
+If not set explicitly, `poetry` by default will create virtual environment under
+`{cache-dir}/virtualenvs` or use the `{project-dir}/.venv` directory when one is available.
+
+If set to `true`, the virtualenv will be created and expected in a folder named
+`.venv` within the root directory of the project.
+
+If set to `false`, `poetry` will ignore any existing `.venv` directory.
+
+### `virtualenvs.options.always-copy`
+
+**Type**: `boolean`
+
+**Default**: `false`
+
+*Introduced in 1.2.0*
+
+If set to `true` the `--always-copy` parameter is passed to `virtualenv` on creation of the virtual environment, so that
+all needed files are copied into it instead of symlinked.
+
+### `virtualenvs.options.no-pip`
+
+**Type**: `boolean`
+
+**Default**: `false`
+
+*Introduced in 1.2.0*
+
+If set to `true` the `--no-pip` parameter is passed to `virtualenv` on creation of the virtual environment. This means
+when a new virtual environment is created, `pip` will not be installed in the environment.
+
+{{% note %}}
+Poetry, for its internal operations, uses the `pip` wheel embedded in the `virtualenv` package installed as a dependency
+in Poetry's runtime environment. If a user runs `poetry run pip` when this option is set to `true`, the `pip` the
+embedded instance of `pip` is used.
+
+You can safely set this, along with `no-setuptools`, to `true`, if you desire a virtual environment with no additional
+packages. This is desirable for production environments.
+{{% /note %}}
+
+### `virtualenvs.options.no-setuptools`
+
+**Type**: `boolean`
+
+**Default**: `false`
+
+*Introduced in 1.2.0*
+
+If set to `true` the `--no-setuptools` parameter is passed to `virtualenv` on creation of the virtual environment. This
+means when a new virtual environment is created, `setuptools` will not be installed in the environment. Poetry, for its
+internal operations, does not require `setuptools` and this can safely be set to `true`.
 
-If not set explicitly (default), `poetry` will use the virtualenv from the `.venv` directory when one is available. If set to `false`, `poetry` will ignore any existing `.venv` directory.
+{{% warning %}}
+Some development tools like IDEs, make an assumption that `setuptools` (and other) packages are always present and
+available within a virtual environment. This can cause some features in these tools to not work as expected.
+{{% /warning %}}
 
+### `virtualenvs.options.system-site-packages`
+
+**Type**: `boolean`
+
+**Default**: `false`
+
+Give the virtual environment access to the system site-packages directory.
+Applies on virtualenv creation.
 
 ### `virtualenvs.path`
 
-**Type**: string
+**Type**: `string`
+
+**Default**: `{cache-dir}/virtualenvs`
 
 Directory where virtual environments will be created.
-Defaults to `{cache-dir}/virtualenvs` (`{cache-dir}\virtualenvs` on Windows).
+
+### `virtualenvs.prefer-active-python` (experimental)
+
+**Type**: `boolean`
+
+**Default**: `false`
+
+*Introduced in 1.2.0*
+
+Use currently activated Python version to create a new virtual environment.
+If set to `false`, Python version used during Poetry installation is used.
+
+### `virtualenvs.prompt`
+
+**Type**: `string`
+
+**Default**: `{project_name}-py{python_version}`
+
+*Introduced in 1.2.0*
+
+Format string defining the prompt to be displayed when the virtual environment is activated.
+The variables `project_name` and `python_version` are available for formatting.
 
 ### `repositories.`
 
-**Type**: string
+**Type**: `string`
 
 Set a new alternative repository. See [Repositories]({{< relref "repositories" >}}) for more information.
+
+### `http-basic.`:
+
+**Type**: `(string, string)`
+
+Set repository credentials (`username` and `password`) for ``.
+See [Repositories - Configuring credentials]({{< relref "repositories#configuring-credentials" >}})
+for more information.
+
+### `pypi-token.`:
+
+**Type**: `string`
+
+Set repository credentials (using an API token) for ``.
+See [Repositories - Configuring credentials]({{< relref "repositories#configuring-credentials" >}})
+for more information.
+
+### `certificates..cert`:
+
+**Type**: `string | boolean`
+
+Set custom certificate authority for repository ``.
+See [Repositories - Configuring credentials - Custom certificate authority]({{< relref "repositories#custom-certificate-authority-and-mutual-tls-authentication" >}})
+for more information.
+
+This configuration can be set to `false`, if TLS certificate verification should be skipped for this
+repository.
+
+### `certificates..client-cert`:
+
+**Type**: `string`
+
+Set client certificate for repository ``.
+See [Repositories - Configuring credentials - Custom certificate authority]({{< relref "repositories#custom-certificate-authority-and-mutual-tls-authentication" >}})
+for more information.
diff --git a/vendor/poetry/docs/contributing.md b/vendor/poetry/docs/contributing.md
index ebd0774f..a8f80e77 100644
--- a/vendor/poetry/docs/contributing.md
+++ b/vendor/poetry/docs/contributing.md
@@ -9,6 +9,10 @@ menu:
     weight: 100
 ---
 
+
+
 # Contributing to Poetry
 
 First off, thanks for taking the time to contribute!
@@ -37,6 +41,7 @@ Following these guidelines helps maintainers and the community understand your r
 Before creating bug reports, please check [this list](#before-submitting-a-bug-report) to be sure that you need to create one. When you are creating a bug report, please include as many details as possible. Fill out the [required template](https://github.com/python-poetry/poetry/blob/master/.github/ISSUE_TEMPLATE/---bug-report.md), the information it asks helps the maintainers resolve the issue faster.
 
 > **Note:** If you find a **Closed** issue that seems like it is the same thing that you're experiencing, open a new issue and include a link to the original issue in the body of your new one.
+
 #### Before submitting a bug report
 
 * **Check the [FAQs on the official website](https://python-poetry.org/docs/faq)** for a list of common questions and problems.
@@ -65,7 +70,7 @@ Provide more context by answering these questions:
 Include details about your configuration and environment:
 
 * **Which version of Poetry are you using?** You can get the exact version by running `poetry -V` in your terminal.
-* **Which Python version Poetry has been installed for?** Execute the `debug:info` to get the information.
+* **Which Python version Poetry has been installed for?** Execute the `poetry debug info` to get the information.
 * **What's the name and version of the OS you're using**?
 
 
@@ -118,11 +123,15 @@ the code base, join us on our [Discord Server](https://discordapp.com/invite/awx
 
 You will need Poetry to start contributing on the Poetry codebase. Refer to the [documentation](https://python-poetry.org/docs/#introduction) to start using Poetry.
 
+{{% note %}}
+Local development of Poetry requires Python 3.8 or newer.
+{{% /note %}}
+
 You will first need to clone the repository using `git` and place yourself in its directory:
 
 ```bash
-$ git clone git@github.com:python-poetry/poetry.git
-$ cd poetry
+git clone git@github.com:python-poetry/poetry.git
+cd poetry
 ```
 
 {{% note %}}
@@ -134,8 +143,15 @@ Now, you will need to install the required dependency for Poetry and be sure tha
 tests are passing on your machine:
 
 ```bash
-$ poetry install
-$ poetry run pytest tests/
+poetry install
+poetry run pytest tests/
+```
+
+Poetry uses [mypy](https://github.com/python/mypy) for typechecking, and the CI
+will fail if it finds any errors.  To run mypy locally:
+
+```bash
+poetry run mypy
 ```
 
 Poetry uses the [black](https://github.com/psf/black) coding style and you must ensure that your
@@ -148,13 +164,13 @@ To make sure that you don't accidentally commit code that does not follow the co
 install a pre-commit hook that will check that everything is in order:
 
 ```bash
-$ poetry run pre-commit install
+poetry run pre-commit install
 ```
 
 You can also run it anytime using:
 
 ```bash
-$ poetry run pre-commit run --all-files
+poetry run pre-commit run --all-files
 ```
 
 Your code must always be accompanied by corresponding tests, if tests are not present your code
@@ -197,7 +213,7 @@ If you are helping with the triage of reported issues, this section provides som
 
 #### Multiple versions
 
-Often times you would want to attempt to reproduce issues with multiple versions of `poetry` at the same time. For these use cases, the [pipx project](https://pipxproject.github.io/pipx/) is useful.
+Often times you would want to attempt to reproduce issues with multiple versions of `poetry` at the same time. For these use cases, the [pipx project](https://pypa.github.io/pipx/) is useful.
 
 You can set your environment up like so.
 
diff --git a/vendor/poetry/docs/dependency-specification.md b/vendor/poetry/docs/dependency-specification.md
index 5591a830..694582ef 100644
--- a/vendor/poetry/docs/dependency-specification.md
+++ b/vendor/poetry/docs/dependency-specification.md
@@ -18,12 +18,7 @@ of the dependency and on the optional constraints that might be needed for it to
 
 ### Caret requirements
 
-**Caret requirements** allow SemVer compatible updates to a specified version.
-An update is allowed if the new version number does not modify the left-most non-zero digit in the major, minor, patch grouping.
-In this case, if we ran `poetry update requests`, poetry would update us to version `2.14.0` if it was available,
-but would not update us to `3.0.0`.
-If instead we had specified the version string as `^0.1.13`, poetry would update to `0.1.14` but not `0.2.0`.
-`0.0.x` is not considered compatible with any other version.
+**Caret requirements** allow [SemVer](https://semver.org/) compatible updates to a specified version. An update is allowed if the new version number does not modify the left-most non-zero digit in the major, minor, patch grouping. For instance, if we previously ran `poetry add requests@^2.13.0` and wanted to update the library and ran `poetry update requests`, poetry would update us to version `2.14.0` if it was available, but would not update us to `3.0.0`. If instead we had specified the version string as `^0.1.13`, poetry would update to `0.1.14` but not `0.2.0`. `0.0.x` is not considered compatible with any other version.
 
 Here are some more examples of caret requirements and the versions that would be allowed with them:
 
@@ -76,15 +71,51 @@ Here are some examples of inequality requirements:
 != 1.2.3
 ```
 
+#### Multiple requirements
+
+Multiple version requirements can also be separated with a comma, e.g. `>= 1.2, < 1.5`.
+
 ### Exact requirements
 
 You can specify the exact version of a package.
+
+`==1.2.3` is an example of an exact version specification.
+
 This will tell Poetry to install this version and this version only.
 If other dependencies require a different version, the solver will ultimately fail and abort any install or update procedures.
 
-#### Multiple requirements
+### Using the `@` operator
 
-Multiple version requirements can also be separated with a comma, e.g. `>= 1.2, < 1.5`.
+When adding dependencies via `poetry add`, you can use the `@` operator.
+This is understood similarly to the `==` syntax, but also allows prefixing any
+specifiers that are valid in `pyproject.toml`. For example:
+
+```shell
+poetry add django@^4.0.0
+```
+
+The above would translate to the following entry in `pyproject.toml`:
+```toml
+Django = "^4.0.0"
+```
+
+The special keyword `latest` is also understood by the `@` operator:
+```shell
+poetry add django@latest
+```
+
+The above would translate to the following entry in `pyproject.toml`, assuming the latest release of `django` is `4.0.5`:
+```toml
+Django = "^4.0.5"
+```
+
+#### Extras
+
+Extras and `@` can be combined as one might expect (`package[extra]@version`):
+
+```shell
+poetry add django[bcrypt]@^4.0.0
+```
 
 ## `git` dependencies
 
@@ -114,6 +145,55 @@ flask = { git = "https://github.com/pallets/flask.git", rev = "38eb5d3b" }
 numpy = { git = "https://github.com/numpy/numpy.git", tag = "v0.13.2" }
 ```
 
+In cases where the package you want to install is located in a subdirectory of the VCS repository, you can use the `subdirectory` option, similarly to what [pip](https://pip.pypa.io/en/stable/topics/vcs-support/#url-fragments) provides:
+
+```toml
+[tool.poetry.dependencies]
+# Install a package named `subdir_package` from a folder called `subdir` within the repository
+subdir_package = { git = "https://github.com/myorg/mypackage_with_subdirs.git", subdirectory = "subdir" }
+```
+
+with the corresponding `add` call:
+
+```bash
+poetry add "https://github.com/myorg/mypackage_with_subdirs.git#subdirectory=subdir"
+```
+
+To use an SSH connection, for example in the case of private repositories, use the following example syntax:
+
+```toml
+[tool.poetry.dependencies]
+requests = { git = "git@github.com:requests/requests.git" }
+```
+
+To use HTTP basic authentication with your git repositories, you can configure credentials similar to
+how [repository credentials]({{< relref "repositories#configuring-credentials" >}}) are configured.
+
+```bash
+poetry config repositories.git-org-project https://github.com/org/project.git
+poetry config http-basic.git-org-project username token
+poetry add git+https://github.com/org/project.git
+```
+
+{{% note %}}
+With Poetry 1.2 releases, the default git client used is [Dulwich](https://www.dulwich.io/).
+
+We fall back to legacy system git client implementation in cases where
+[gitcredentials](https://git-scm.com/docs/gitcredentials) is used. This fallback will be removed in
+a future release where `gitcredentials` helpers can be better supported natively.
+
+In cases where you encounter issues with the default implementation that used to work prior to
+Poetry 1.2, you may wish to explicitly configure the use of the system git client via a shell
+subprocess call.
+
+```bash
+poetry config experimental.system-git-client true
+```
+
+Keep in mind however, that doing so will surface bugs that existed in versions prior to 1.2 which
+were caused due to the use of the system git client.
+{{% /note %}}
+
 ## `path` dependencies
 
 To depend on a library located in a local directory or file,
@@ -150,6 +230,46 @@ with the corresponding `add` call:
 poetry add https://example.com/my-package-0.1.0.tar.gz
 ```
 
+## Dependency `extras`
+
+You can specify [PEP-508 Extras](https://www.python.org/dev/peps/pep-0508/#extras)
+for a dependency as shown here.
+
+```toml
+[tool.poetry.dependencies]
+gunicorn = { version = "^20.1", extras = ["gevent"] }
+```
+
+{{% note %}}
+These activate extra defined for the dependency, to configure an optional dependency
+for extras in your project refer to [`extras`]({{< relref "pyproject#extras" >}}).
+{{% /note %}}
+
+## `source` dependencies
+
+To depend on a package from an [alternate repository]({{< relref "repositories/#install-dependencies-from-a-private-repository" >}}),
+you can use the `source` property:
+
+```toml
+[[tool.poetry.source]]
+name = "foo"
+url = "https://foo.bar/simple/"
+secondary = true
+
+[tool.poetry.dependencies]
+my-cool-package = { version = "*", source = "foo" }
+```
+
+with the corresponding `add` call:
+
+```sh
+poetry add my-cool-package --source foo
+```
+
+{{% note %}}
+In this example, we expect `foo` to be configured correctly. See [using a private repository](repositories.md#using-a-private-repository)
+for further information.
+{{% /note %}}
 
 ## Python restricted dependencies
 
@@ -157,12 +277,12 @@ You can also specify that a dependency should be installed only for specific Pyt
 
 ```toml
 [tool.poetry.dependencies]
-pathlib2 = { version = "^2.2", python = "~2.7" }
+tomli = { version = "^2.0.1", python = "<3.11" }
 ```
 
 ```toml
 [tool.poetry.dependencies]
-pathlib2 = { version = "^2.2", python = "~2.7 || ^3.2" }
+pathlib2 = { version = "^2.2", python = "^3.2" }
 ```
 
 ## Using environment markers
@@ -173,10 +293,9 @@ via the `markers` property:
 
 ```toml
 [tool.poetry.dependencies]
-pathlib2 = { version = "^2.2", markers = "python_version ~= '2.7' or sys_platform == 'win32'" }
+pathlib2 = { version = "^2.2", markers = "python_version <= '3.4' or sys_platform == 'win32'" }
 ```
 
-
 ## Multiple constraints dependencies
 
 Sometimes, one of your dependency may have different version ranges depending
@@ -189,11 +308,16 @@ you would declare it like so:
 ```toml
 [tool.poetry.dependencies]
 foo = [
-    {version = "<=1.9", python = "^2.7"},
-    {version = "^2.0", python = "^3.4"}
+    {version = "<=1.9", python = "^3.6"},
+    {version = "^2.0", python = "^3.8"}
 ]
 ```
 
+{{% note %}}
+The constraints **must** have different requirements (like `python`)
+otherwise it will cause an error when resolving dependencies.
+{{% /note %}}
+
 ## Expanded dependency specification syntax
 
 In the case of more complex dependency specifications, you may find that you
@@ -203,26 +327,21 @@ you can shift from using "inline table" syntax, to the "standard table" syntax.
 An example where this might be useful is the following:
 
 ```toml
-[tool.poetry.dev-dependencies]
-black = {version = "19.10b0", allow-prereleases = true, python = "^3.6", markers = "platform_python_implementation == 'CPython'"}
+[tool.poetry.group.dev.dependencies]
+black = {version = "19.10b0", allow-prereleases = true, python = "^3.7", markers = "platform_python_implementation == 'CPython'"}
 ```
 
-As a single line, this is a lot to digest. To make this a little bit easier to
+As a single line, this is a lot to digest. To make this a bit easier to
 work with, you can do the following:
 
 ```toml
-[tool.poetry.dev-dependencies.black]
+[tool.poetry.group.dev.dependencies.black]
 version = "19.10b0"
 allow-prereleases = true
-python = "^3.6"
+python = "^3.7"
 markers = "platform_python_implementation == 'CPython'"
 ```
 
-All of the same information is still present, and ends up providing the exact
+The same information is still present, and ends up providing the exact
 same specification. It's simply split into multiple, slightly more readable,
 lines.
-
-{{% note %}}
-The constraints **must** have different requirements (like `python`)
-otherwise it will cause an error when resolving dependencies.
-{{% /note %}}
diff --git a/vendor/poetry/docs/faq.md b/vendor/poetry/docs/faq.md
index 122a6856..780e7a7c 100644
--- a/vendor/poetry/docs/faq.md
+++ b/vendor/poetry/docs/faq.md
@@ -14,12 +14,12 @@ menu:
 ### Why is the dependency resolution process slow?
 
 While the dependency resolver at the heart of Poetry is highly optimized and
-should be fast enough for most cases, sometimes, with some specific set of dependencies,
+should be fast enough for most cases, with certain sets of dependencies
 it can take time to find a valid solution.
 
 This is due to the fact that not all libraries on PyPI have properly declared their metadata
 and, as such, they are not available via the PyPI JSON API. At this point, Poetry has no choice
-but downloading the packages and inspect them to get the necessary information. This is an expensive
+but to download the packages and inspect them to get the necessary information. This is an expensive
 operation, both in bandwidth and time, which is why it seems this is a long process.
 
 At the moment there is no way around it.
@@ -41,7 +41,7 @@ The only good alternative is to define an upper bound on your constraints,
 which you can increase in a new release after testing that your package is compatible
 with the new major version of your dependency.
 
-For example instead of using `>=3.4` you should use `~3.4` which allows all versions `<4.0`.
+For example instead of using `>=3.4` you should use `^3.4` which allows all versions `<4.0`.
 The `^` operator works very well with libraries following [semantic versioning](https://semver.org).
 
 ### Is tox supported?
@@ -57,20 +57,58 @@ requires = ["poetry-core>=1.0.0"]
 build-backend = "poetry.core.masonry.api"
 ```
 
-And use a `tox.ini` configuration file similar to this:
+`tox` can be configured in multiple ways. It depends on what should be the code under test and which dependencies
+should be installed.
 
-```INI
+#### Usecase #1
+```ini
 [tox]
 isolated_build = true
-envlist = py27, py36
 
 [testenv]
+deps =
+    pytest
+commands =
+    pytest tests/ --import-mode importlib
+```
+
+`tox` will create an `sdist` package of the project and uses `pip` to install it in a fresh environment.
+Thus, dependencies are resolved by `pip`.
+
+#### Usecase #2
+```ini
+[tox]
+isolated_build = true
+
+[testenv]
+whitelist_externals = poetry
+commands_pre =
+    poetry install --no-root --sync
+commands =
+    poetry run pytest tests/ --import-mode importlib
+```
+
+`tox` will create an `sdist` package of the project and uses `pip` to install it in a fresh environment.
+Thus, dependencies are resolved by `pip` in the first place. But afterwards we run Poetry,
+ which will install the locked dependencies into the environment.
+
+#### Usecase #3
+```ini
+[tox]
+isolated_build = true
+
+[testenv]
+skip_install = true
 whitelist_externals = poetry
+commands_pre =
+    poetry install
 commands =
-    poetry install -v
-    poetry run pytest tests/
+    poetry run pytest tests/ --import-mode importlib
 ```
 
+`tox` will not do any install. Poetry installs all the dependencies and the current package an editable mode.
+Thus, tests are running against the local files and not the builded and installed package.
+
 ### I don't want Poetry to manage my virtual environments. Can I disable it?
 
 While Poetry automatically creates virtual environments to always work isolated
@@ -82,3 +120,35 @@ In this case, you can disable this feature by setting the `virtualenvs.create` s
 ```bash
 poetry config virtualenvs.create false
 ```
+
+### Why is Poetry telling me that the current project's Python requirement is not compatible with one or more packages' Python requirements?
+
+Unlike `pip`, Poetry doesn't resolve for just the Python in the current environment. Instead it makes sure that a dependency
+is resolvable within the given Python version range in `pyproject.toml`.
+
+Assume you have the following `pyproject.toml`:
+
+```toml
+[tool.poetry.dependencies]
+python = "^3.7"
+```
+
+This means your project aims to be compatible with any Python version >=3.7,<4.0. Whenever you try to add a dependency
+whose Python requirement doesn't match the whole range Poetry will tell you this, e.g.:
+
+```
+The current project's Python requirement (>=3.7.0,<4.0.0) is not compatible with some of the required packages Python requirement:
+    - scipy requires Python >=3.7,<3.11, so it will not be satisfied for Python >=3.11,<4.0.0
+```
+
+Usually you will want to match the Python requirement of your project with the upper bound of the failing dependency.
+Alternative you can tell Poetry to install this dependency [only for a specific range of Python versions]({{< relref "dependency-specification#multiple-constraints-dependencies" >}}),
+if you know that it's not needed in all versions.
+
+
+### Why does Poetry enforce PEP 440 versions?
+
+This is done so to be compliant with the broader Python ecosystem.
+
+For example, if Poetry builds a distribution for a project that uses a version that is not valid according to
+[PEP 440](https://peps.python.org/pep-0440), third party tools will be unable to parse the version correctly.
diff --git a/vendor/poetry/docs/libraries.md b/vendor/poetry/docs/libraries.md
index df23e68b..ab569795 100644
--- a/vendor/poetry/docs/libraries.md
+++ b/vendor/poetry/docs/libraries.md
@@ -9,6 +9,7 @@ menu:
     weight: 20
 ---
 
+
 # Libraries
 
 This chapter will tell you how to make your library installable through Poetry.
@@ -16,11 +17,19 @@ This chapter will tell you how to make your library installable through Poetry.
 
 ## Versioning
 
-While Poetry does not enforce any convention regarding package versioning,
-it **strongly** recommends to follow [semantic versioning](https://semver.org).
+Poetry requires [PEP 440](https://peps.python.org/pep-0440)-compliant versions for all projects.
+
+While Poetry does not enforce any release convention, it does encourage the use of
+[semantic versioning](https://semver.org/) within the scope of
+[PEP 440](https://peps.python.org/pep-0440/#semantic-versioning). This has many advantages for the end users
+and allows them to set appropriate [version constraints]({{< relref "dependency-specification#version-constraints" >}}).
+
+{{% note %}}
+
+As an example, `1.0.0-hotfix.1` is not compatible with [PEP 440](https://peps.python.org/pep-0440). You can instead
+choose to use `1.0.0-post1` or `1.0.0.post1`.
 
-This has many advantages for the end users and allows them to set appropriate
-[version constraints]({{< relref "dependency-specification#version-constraints" >}}).
+{{% /note %}}
 
 ## Lock file
 
diff --git a/vendor/poetry/docs/managing-dependencies.md b/vendor/poetry/docs/managing-dependencies.md
new file mode 100644
index 00000000..1bb35310
--- /dev/null
+++ b/vendor/poetry/docs/managing-dependencies.md
@@ -0,0 +1,211 @@
+---
+draft: false
+layout: single
+menu:
+  docs:
+    weight: 11
+title: Managing dependencies
+type: docs
+---
+
+
+# Managing dependencies
+
+## Dependency groups
+
+Poetry provides a way to **organize** your dependencies by **groups**. For instance, you might have
+dependencies that are only needed to test your project or to build the documentation.
+
+To declare a new dependency group, use a `tool.poetry.group.` section
+where `` is the name of your dependency group (for instance, `test`):
+
+```toml
+[tool.poetry.group.test]  # This part can be left out
+
+[tool.poetry.group.test.dependencies]
+pytest = "^6.0.0"
+pytest-mock = "*"
+```
+
+{{% note %}}
+All dependencies **must be compatible with each other** across groups since they will
+be resolved regardless of whether they are required for installation or not (see [Installing group dependencies]({{< relref "#installing-group-dependencies" >}})).
+
+Think of dependency groups as **labels** associated with your dependencies: they don't have any bearings
+on whether their dependencies will be resolved and installed **by default**, they are simply a way to organize
+the dependencies logically.
+{{% /note %}}
+
+{{% note %}}
+The dependencies declared in `tool.poetry.dependencies` are part of an implicit `main` group.
+
+```toml
+[tool.poetry.dependencies]  # main dependency group
+httpx = "*"
+pendulum = "*"
+
+[tool.poetry.group.test.dependencies]
+pytest = "^6.0.0"
+pytest-mock = "*"
+```
+{{% /note %}}
+
+{{% note %}}
+Dependency groups, other than the implicit `main` group, must only contain dependencies you need in your development
+process. Installing them is only possible by using Poetry.
+
+To declare a set of dependencies, which add additional functionality to the project during runtime,
+use [extras]({{< relref "pyproject#extras" >}}) instead. Extras can be installed by the end user using `pip`.
+{{% /note %}}
+
+{{% note %}}
+**A note about the `dev-dependencies` section**
+
+Any dependency declared in the `dev-dependencies` section will automatically be added to a `dev` group.
+So the two following notations are equivalent:
+
+```toml
+[tool.poetry.dev-dependencies]
+pytest = "^6.0.0"
+pytest-mock = "*"
+```
+
+```toml
+[tool.poetry.group.dev.dependencies]
+pytest = "^6.0.0"
+pytest-mock = "*"
+```
+
+Poetry will slowly transition away from the `dev-dependencies` notation which will soon be deprecated,
+so it's advised to migrate your existing development dependencies to the new `group` notation.
+{{% /note %}}
+
+### Optional groups
+
+A dependency group can be declared as optional. This makes sense when you have
+a group of dependencies that are only required in a particular environment or for
+a specific purpose.
+
+```toml
+[tool.poetry.group.docs]
+optional = true
+
+[tool.poetry.group.docs.dependencies]
+mkdocs = "*"
+```
+
+Optional groups can be installed in addition to the **default** dependencies by using the `--with`
+option of the [`install`]({{< relref "cli#install" >}}) command.
+
+```bash
+poetry install --with docs
+```
+
+{{% warning %}}
+Optional group dependencies will **still** be resolved alongside other dependencies, so
+special care should be taken to ensure they are compatible with each other.
+{{% /warning %}}
+
+### Adding a dependency to a group
+
+The [`add`]({{< relref "cli#add" >}}) command is the preferred way to add dependencies
+to a group. This is done by using the `--group (-G)` option.
+
+```bash
+poetry add pytest --group test
+```
+
+If the group does not already exist, it will be created automatically.
+
+### Installing group dependencies
+
+**By default**, dependencies across **all non-optional groups** will be installed when executing
+`poetry install`.
+
+{{% note %}}
+The default set of dependencies for a project includes the implicit `main` group defined in
+`tool.poetry.dependencies` as well as all groups that are not explicitly marked as an
+[optional group]({{< relref "#optional-groups" >}}).
+{{% /note %}}
+
+You can **exclude** one or more groups with the `--without` option:
+
+```bash
+poetry install --without test,docs
+```
+
+You can also opt in [optional groups]({{< relref "#optional-groups" >}}) by using the `--with` option:
+
+```bash
+poetry install --with docs
+```
+
+{{% warning %}}
+When used together, `--without` takes precedence over `--with`. For example, the following command
+will only install the dependencies specified in the `test` group.
+
+```bash
+poetry install --with docs --without test,docs
+```
+{{% /warning %}}
+
+Finally, in some case you might want to install **only specific groups** of dependencies
+without installing the default set of dependencies. For that purpose, you can use
+the `--only` option.
+
+```bash
+poetry install --only docs
+```
+
+{{% note %}}
+If you only want to install the project's runtime dependencies, you can do so  with the
+`--only main` notation:
+
+```bash
+poetry install --only main
+```
+{{% /note %}}
+
+{{% note %}}
+If you want to install the project root, and no other dependencies, you can use
+the `--only-root` option.
+
+```bash
+poetry install --only-root
+```
+{{% /note %}}
+
+### Removing dependencies from a group
+
+The [`remove`]({{< relref "cli#remove" >}}) command supports a `--group` option
+to remove packages from a specific group:
+
+```bash
+poetry remove mkdocs --group docs
+```
+
+
+## Synchronizing dependencies
+
+Poetry supports what's called dependency synchronization. What this does is ensuring
+that the locked dependencies in the `poetry.lock` file are the only ones present
+in the environment, removing anything that's not necessary.
+
+This is done by using the `--sync` option of the `install` command:
+
+```bash
+poetry install --sync
+```
+
+The `--sync` option can be combined with any [dependency groups]({{< relref "#dependency-groups" >}}) related options
+to synchronize the environment with specific groups.
+
+```bash
+poetry install --without dev --sync
+poetry install --with docs --sync
+poetry install --only dev
+```
+
+{{% note %}}
+The `--sync` option replaces the `--remove-untracked` option which is now deprecated.
+{{% /note %}}
diff --git a/vendor/poetry/docs/managing-environments.md b/vendor/poetry/docs/managing-environments.md
index 7d3e599b..1716a2f9 100644
--- a/vendor/poetry/docs/managing-environments.md
+++ b/vendor/poetry/docs/managing-environments.md
@@ -18,7 +18,7 @@ To achieve this, it will first check if it's currently running inside a virtual
 If it is, it will use it directly without creating a new one. But if it's not, it will use
 one that it has already created or create a brand new one for you.
 
-By default, Poetry will try to use the currently activated Python version
+By default, Poetry will try to use the Python version used during Poetry's installation
 to create the virtual environment for the current project.
 
 However, for various reasons, this Python version might not be compatible
@@ -26,6 +26,25 @@ with the `python` requirement of the project. In this case, Poetry will try
 to find one that is and use it. If it's unable to do so then you will be prompted
 to activate one explicitly, see [Switching environments](#switching-between-environments).
 
+{{% note %}}
+If you use a tool like [pyenv](https://github.com/pyenv/pyenv) to manage different Python versions,
+you can set the experimental `virtualenvs.prefer-active-python` option to `true`. Poetry
+will then try to find the current `python` of your shell.
+
+For instance, if your project requires a newer Python than is available with
+your system, a standard workflow would be:
+
+```bash
+pyenv install 3.9.8
+pyenv local 3.9.8  # Activate Python 3.9 for the current project
+poetry install
+```
+{{% /note %}}
+
+{{% note %}}
+Since version 1.2, Poetry no longer supports managing environments for Python 2.7.
+{{% /note %}}
+
 ## Switching between environments
 
 Sometimes this might not be feasible for your system, especially Windows where `pyenv`
@@ -99,11 +118,16 @@ poetry env list
 will output something like the following:
 
 ```text
-test-O3eWbxRl-py2.7
 test-O3eWbxRl-py3.6
 test-O3eWbxRl-py3.7 (Activated)
 ```
 
+You can pass the option `--full-path` to display the full path to the environments:
+
+```bash
+poetry env list --full-path
+```
+
 ## Deleting the environments
 
 Finally, you can delete existing virtual environments by using `env remove`:
@@ -115,4 +139,13 @@ poetry env remove 3.7
 poetry env remove test-O3eWbxRl-py3.7
 ```
 
+You can delete more than one environment at a time.
+```bash
+poetry env remove python3.6 python3.7 python3.8
+```
+Use the `--all` option to delete all virtual environments at once.
+```bash
+poetry env remove --all
+```
+
 If you remove the currently activated virtual environment, it will be automatically deactivated.
diff --git a/vendor/poetry/docs/plugins.md b/vendor/poetry/docs/plugins.md
new file mode 100644
index 00000000..d9eee07a
--- /dev/null
+++ b/vendor/poetry/docs/plugins.md
@@ -0,0 +1,251 @@
+---
+title: "Plugins"
+draft: false
+type: docs
+layout: single
+
+menu:
+  docs:
+    weight: 80
+---
+
+# Plugins
+
+Poetry supports using and building plugins if you wish to
+alter or expand Poetry's functionality with your own.
+
+For example if your environment poses special requirements
+on the behaviour of Poetry which do not apply to the majority of its users
+or if you wish to accomplish something with Poetry in a way that is not desired by most users.
+
+In these cases you could consider creating a plugin to handle your specific logic.
+
+
+## Creating a plugin
+
+A plugin is a regular Python package which ships its code as part of the package
+and may also depend on further packages.
+
+### Plugin package
+
+The plugin package must depend on Poetry
+and declare a proper [plugin]({{< relref "pyproject#plugins" >}}) in the `pyproject.toml` file.
+
+```toml
+[tool.poetry]
+name = "my-poetry-plugin"
+version = "1.0.0"
+
+# ...
+[tool.poetry.dependencies]
+python = "^3.7"
+poetry = "^1.0"
+
+[tool.poetry.plugins."poetry.plugin"]
+demo = "poetry_demo_plugin.plugin:MyPlugin"
+```
+
+### Generic plugins
+
+Every plugin has to supply a class which implements the `poetry.plugins.Plugin` interface.
+
+The `activate()` method of the plugin is called after the plugin is loaded
+and receives an instance of `Poetry` as well as an instance of `cleo.io.IO`.
+
+Using these two objects all configuration can be read
+and all public internal objects and state can be manipulated as desired.
+
+Example:
+
+```python
+from cleo.io.io import IO
+
+from poetry.plugins.plugin import Plugin
+from poetry.poetry import Poetry
+
+
+class MyPlugin(Plugin):
+
+    def activate(self, poetry: Poetry, io: IO):
+        io.write_line("Setting readme")
+        poetry.package.readme = "README.md"
+        ...
+```
+
+### Application plugins
+
+If you want to add commands or options to the `poetry` script you need
+to create an application plugin which implements the `poetry.plugins.ApplicationPlugin` interface.
+
+The `activate()` method of the application plugin is called after the plugin is loaded
+and receives an instance of `console.Application`.
+
+```python
+from cleo.commands.command import Command
+from poetry.plugins.application_plugin import ApplicationPlugin
+
+
+class CustomCommand(Command):
+
+    name = "my-command"
+
+    def handle(self) -> int:
+        self.line("My command")
+
+        return 0
+
+
+def factory():
+    return CustomCommand()
+
+
+class MyApplicationPlugin(ApplicationPlugin):
+    def activate(self, application):
+        application.command_loader.register_factory("my-command", factory)
+```
+
+{{% note %}}
+It's possible to do the following to register the command:
+
+```python
+application.add(MyCommand())
+```
+
+However, it is **strongly** recommended to register a new factory
+in the command loader to defer the loading of the command when it's actually
+called.
+
+This will help keep the performances of Poetry good.
+{{% /note %}}
+
+The plugin also must be declared in the `pyproject.toml` file of the plugin package
+as an `application.plugin` plugin:
+
+```toml
+[tool.poetry.plugins."poetry.application.plugin"]
+foo-command = "poetry_demo_plugin.plugin:MyApplicationPlugin"
+```
+
+{{% warning %}}
+A plugin **must not** remove or modify in any way the core commands of Poetry.
+{{% /warning %}}
+
+
+### Event handler
+
+Plugins can also listen to specific events and act on them if necessary.
+
+These events are fired by [Cleo](https://github.com/sdispater/cleo)
+and are accessible from the `cleo.events.console_events` module.
+
+- `COMMAND`: this event allows attaching listeners before any command is executed.
+- `SIGNAL`: this event allows some actions to be performed after the command execution is interrupted.
+- `TERMINATE`: this event allows listeners to be attached after the command.
+- `ERROR`: this event occurs when an uncaught exception is raised.
+
+Let's see how to implement an application event handler. For this example
+we will see how to load environment variables from a `.env` file before executing
+a command.
+
+
+```python
+from cleo.events.console_events import COMMAND
+from cleo.events.console_command_event import ConsoleCommandEvent
+from cleo.events.event_dispatcher import EventDispatcher
+from dotenv import load_dotenv
+from poetry.console.application import Application
+from poetry.console.commands.env_command import EnvCommand
+from poetry.plugins.application_plugin import ApplicationPlugin
+
+
+class MyApplicationPlugin(ApplicationPlugin):
+    def activate(self, application: Application):
+        application.event_dispatcher.add_listener(
+            COMMAND, self.load_dotenv
+        )
+
+    def load_dotenv(
+        self,
+        event: ConsoleCommandEvent,
+        event_name: str,
+        dispatcher: EventDispatcher
+    ) -> None:
+        command = event.command
+        if not isinstance(command, EnvCommand):
+            return
+
+        io = event.io
+
+        if io.is_debug():
+            io.write_line(
+                "Loading environment variables."
+            )
+
+        load_dotenv()
+```
+
+
+## Using plugins
+
+Installed plugin packages are automatically loaded when Poetry starts up.
+
+You have multiple ways to install plugins for Poetry
+
+### The `self add` command
+
+This is the easiest way and should account for all the ways Poetry can be installed.
+
+```bash
+poetry self add poetry-plugin
+```
+
+The `self add` command will ensure that the plugin is compatible with the current version of Poetry
+and install the needed packages for the plugin to work.
+
+The package specification formats supported by the `self add` command are the same as the ones supported
+by the [`add` command]({{< relref "cli#add" >}}).
+
+If you no longer need a plugin and want to uninstall it, you can use the `self remove` command.
+
+```shell
+poetry self remove poetry-plugin
+```
+
+You can also list all currently installed plugins by running:
+
+```shell
+poetry self show
+```
+
+### With `pipx inject`
+
+If you used `pipx` to install Poetry you can add the plugin packages via the `pipx inject` command.
+
+```shell
+pipx inject poetry poetry-plugin
+```
+
+If you want to uninstall a plugin, you can run:
+
+```shell
+pipx runpip poetry uninstall poetry-plugin
+```
+
+### With `pip`
+
+The `pip` binary in Poetry's virtual environment can also be used to install and remove plugins.
+The environment variable `$POETRY_HOME` here is used to represent the path to the virtual environment.
+The [installation instructions](/docs/) can be referenced if you are not
+sure where Poetry has been installed.
+
+To add a plugin, you can use `pip install`:
+
+```shell
+$POETRY_HOME/bin/pip install --user poetry-plugin
+```
+
+If you want to uninstall a plugin, you can run:
+
+```shell
+$POETRY_HOME/bin/pip uninstall poetry-plugin
+```
diff --git a/vendor/poetry/docs/pre-commit-hooks.md b/vendor/poetry/docs/pre-commit-hooks.md
new file mode 100644
index 00000000..a032c10e
--- /dev/null
+++ b/vendor/poetry/docs/pre-commit-hooks.md
@@ -0,0 +1,101 @@
+---
+title: "pre-commit hooks"
+draft: false
+type: docs
+layout: single
+
+menu:
+  docs:
+    weight: 120
+---
+
+# pre-commit hooks
+
+pre-commit is a framework for building and running
+[git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks).
+See the official documentation for more information: [pre-commit.com](https://pre-commit.com/)
+
+This document provides a list of available pre-commit hooks provided by Poetry.
+
+
+{{% note %}}
+If you specify the `args:` for a hook in your `.pre-commit-config.yaml`,
+the defaults are overwritten. You must fully specify all arguments for
+your hook if you make use of `args:`.
+{{% /note %}}
+
+
+## poetry-check
+
+The `poetry-check` hook calls the `poetry check` command
+to make sure the poetry configuration does not get committed in a broken state.
+
+### Arguments
+
+The hook takes the same arguments as the poetry command.
+For more information see the [check command]({{< relref "cli#check" >}}).
+
+
+## poetry-lock
+
+The `poetry-lock` hook calls the `poetry lock` command
+to make sure the lock file is up-to-date when committing changes.
+
+### Arguments
+
+The hook takes the same arguments as the poetry command.
+For more information see the [lock command]({{< relref "cli#lock" >}}).
+
+
+## poetry-export
+
+The `poetry-export` hook calls the `poetry export` command
+to sync your `requirements.txt` file with your current dependencies.
+
+{{% note %}}
+It is recommended to run the [`poetry-lock`](#poetry-lock) hook prior to this one.
+{{% /note %}}
+
+### Arguments
+
+The hook takes the same arguments as the poetry command.
+For more information see the [export command]({{< relref "cli#export" >}}).
+
+The default arguments are `args: ["-f", "requirements.txt", "-o", "requirements.txt"]`,
+which will create/update the requirements.txt file in the current working directory.
+
+You may add `verbose: true` in your `.pre-commit-config.yaml` in order to output to the
+console:
+
+```yaml
+hooks:
+  - id: poetry-export
+    args: ["-f", "requirements.txt"]
+    verbose: true
+```
+
+Also, `--dev` can be added to `args` to write dev-dependencies to `requirements.txt`:
+
+```yaml
+hooks:
+  - id: poetry-export
+    args: ["--dev", "-f", "requirements.txt", "-o", "requirements.txt"]
+```
+
+
+## Usage
+
+For more information on how to use pre-commit please see the [official documentation](https://pre-commit.com/).
+
+A full `.pre-commit-config.yaml` example:
+
+```yaml
+repos:
+  - repo: https://github.com/python-poetry/poetry
+    rev: ''  # add version here
+    hooks:
+      - id: poetry-check
+      - id: poetry-lock
+      - id: poetry-export
+        args: ["-f", "requirements.txt", "-o", "requirements.txt"]
+```
diff --git a/vendor/poetry/docs/pyproject.md b/vendor/poetry/docs/pyproject.md
index 6dedc953..defb4636 100644
--- a/vendor/poetry/docs/pyproject.md
+++ b/vendor/poetry/docs/pyproject.md
@@ -17,17 +17,38 @@ The `tool.poetry` section of the `pyproject.toml` file is composed of multiple s
 
 The name of the package. **Required**
 
+This should be a valid name as defined by [PEP 508](https://peps.python.org/pep-0508/#names).
+
+
+```toml
+name = "my-package"
+```
+
 ## version
 
 The version of the package. **Required**
 
-This should follow [semantic versioning](http://semver.org/). However it will not be enforced and you remain
-free to follow another specification.
+This should be a valid [PEP 440](https://peps.python.org/pep-0440/) string.
+
+```toml
+version = "0.1.0"
+```
+
+{{% note %}}
+
+If you would like to use semantic versioning for your project, please see
+[here]({{< relref "libraries#versioning" >}}).
+
+{{% /note %}}
 
 ## description
 
 A short description of the package. **Required**
 
+```toml
+description = "A short description of the package."
+```
+
 ## license
 
 The license of the package.
@@ -51,6 +72,9 @@ The recommended notation for the most common licenses is (alphabetical):
 Optional, but it is highly recommended to supply this.
 More identifiers are listed at the [SPDX Open Source License Registry](https://spdx.org/licenses/).
 
+```toml
+license = "MIT"
+```
 {{% note %}}
 If your project is proprietary and does not use a specific licence, you can set this value as `Proprietary`.
 {{% /note %}}
@@ -61,33 +85,83 @@ The authors of the package. **Required**
 
 This is a list of authors and should contain at least one author. Authors must be in the form `name `.
 
+```toml
+authors = [
+    "Sébastien Eustace ",
+]
+```
+
 ## maintainers
 
 The maintainers of the package. **Optional**
 
 This is a list of maintainers and should be distinct from authors. Maintainers may contain an email and be in the form `name `.
 
+```toml
+maintainers = [
+    "John Smith ",
+    "Jane Smith ",
+]
+```
+
 ## readme
 
-The readme file of the package. **Optional**
+A path, or list of paths corresponding to the README file(s) of the package.
+**Optional**
+
+The file(s) can be of any format, but if you intend to publish to PyPI keep the
+[recommendations for a PyPI-friendly README](
+https://packaging.python.org/en/latest/guides/making-a-pypi-friendly-readme/) in
+mind. README paths are implicitly relative to `pyproject.toml`.
 
-The file can be either `README.rst` or `README.md`.
+The contents of the README file(s) are used to populate the [Description
+field](https://packaging.python.org/en/latest/specifications/core-metadata/#description-optional)
+of your distribution's metadata (similar to `long_description` in setuptools).
+When multiple files are specified they are concatenated with newlines.
+
+```toml
+[tool.poetry]
+# ...
+readme = "README.md"
+```
+
+```toml
+[tool.poetry]
+# ...
+readme = ["docs/README1.md", "docs/README2.md"]
+```
 
 ## homepage
 
 An URL to the website of the project. **Optional**
 
+```toml
+homepage = "https://python-poetry.org/"
+```
+
 ## repository
 
 An URL to the repository of the project. **Optional**
 
+```toml
+repository = "https://github.com/python-poetry/poetry"
+```
+
 ## documentation
 
 An URL to the documentation of the project. **Optional**
 
+```toml
+documentation = "https://python-poetry.org/docs/"
+```
+
 ## keywords
 
-A list of keywords (max: 5) that the package is related to. **Optional**
+A list of keywords that the package is related to. **Optional**
+
+```toml
+keywords = ["packaging", "poetry"]
+```
 
 ## classifiers
 
@@ -163,6 +237,12 @@ packages = [
 ```
 {{% /note %}}
 
+{{% note %}}
+Poetry is clever enough to detect Python subpackages.
+
+Thus, you only have to specify the directory where your root package resides.
+{{% /note %}}
+
 ## include and exclude
 
 A list of patterns that will be included in the final package.
@@ -172,19 +252,36 @@ The globs specified in the exclude field identify a set of files that are not in
 
 If a VCS is being used for a package, the exclude field will be seeded with the VCS’ ignore settings (`.gitignore` for git for example).
 
+{{% note %}}
+Explicitly declaring entries in `include` will negate VCS' ignore settings.
+{{% /note %}}
+
 ```toml
 [tool.poetry]
 # ...
 include = ["CHANGELOG.md"]
 ```
 
+You can also specify the formats for which these patterns have to be included, as shown here:
+
+```toml
+[tool.poetry]
+# ...
+include = [
+    { path = "tests", format = "sdist" },
+    { path = "for_wheel.txt", format = ["sdist", "wheel"] }
+]
+```
+
+If no format is specified, it will default to include both `sdist` and `wheel`.
+
 ```toml
 exclude = ["my_package/excluded.py"]
 ```
 
-## `dependencies` and `dev-dependencies`
+## dependencies and dependency groups
 
-Poetry is configured to look for dependencies on [PyPi](https://pypi.org) by default.
+Poetry is configured to look for dependencies on [PyPI](https://pypi.org) by default.
 Only the name and a version string are required in this case.
 
 ```toml
@@ -192,12 +289,20 @@ Only the name and a version string are required in this case.
 requests = "^2.13.0"
 ```
 
-If you want to use a private repository, you can add it to your `pyproject.toml` file, like so:
+If you want to use a [private repository]({{< relref "repositories#using-a-private-repository" >}}),
+you can add it to your `pyproject.toml` file, like so:
 
 ```toml
 [[tool.poetry.source]]
-name = 'private'
-url = 'http://example.com/simple'
+name = "private"
+url = "http://example.com/simple"
+```
+
+If you have multiple repositories configured, you can explicitly tell poetry where to look for a specific package:
+
+```toml
+[tool.poetry.dependencies]
+requests = { version = "^2.13.0", source = "private" }
 ```
 
 {{% note %}}
@@ -206,20 +311,42 @@ is compatible is mandatory:
 
 ```toml
 [tool.poetry.dependencies]
-python = "^3.6"
+python = "^3.7"
 ```
 {{% /note %}}
 
+You can organize your dependencies in [groups]({{< relref "managing-dependencies#dependency-groups" >}})
+to manage them in a more granular way.
+
+```toml
+[tool.poetry.group.test.dependencies]
+pytest = "*"
+
+[tool.poetry.group.docs.dependencies]
+mkdocs = "*"
+```
+
+See [Dependency groups]({{< relref "managing-dependencies#dependency-groups" >}}) for a more in-depth look
+at how to manage dependency groups and [Dependency specification]({{< relref "dependency-specification" >}})
+for more information on other keys and specifying version ranges.
+
 ## `scripts`
 
 This section describes the scripts or executables that will be installed when installing the package
 
 ```toml
 [tool.poetry.scripts]
-poetry = 'poetry.console:run'
+my_package_cli = 'my_package.console:run'
 ```
 
-Here, we will have the `poetry` script installed which will execute `console.run` in the `poetry` package.
+Here, we will have the `my_package_cli` script installed which will execute the `run` function in the `console` module in the `my_package` package.
+
+To specify a script that [depends on an extra](#extras), you may provide an entry as an inline table:
+
+```toml
+[tool.poetry.scripts]
+devtest = { callable = "mypackage:test.run_tests", extras = ["test"] }
+```
 
 {{% note %}}
 When a script is added or updated, run `poetry install` to make them available in the project's virtualenv.
@@ -242,21 +369,45 @@ mandatory = "^1.0"
 
 # A list of all of the optional dependencies, some of which are included in the
 # below `extras`. They can be opted into by apps.
-psycopg2 = { version = "^2.7", optional = true }
+psycopg2 = { version = "^2.9", optional = true }
 mysqlclient = { version = "^1.3", optional = true }
 
 [tool.poetry.extras]
 mysql = ["mysqlclient"]
 pgsql = ["psycopg2"]
+databases = ["mysqlclient", "psycopg2"]
 ```
 
-When installing packages, you can specify extras by using the `-E|--extras` option:
+When installing packages with Poetry, you can specify extras by using the `-E|--extras` option:
 
 ```bash
 poetry install --extras "mysql pgsql"
 poetry install -E mysql -E pgsql
 ```
 
+Or, you can install all extras with the `--all-extras` option:
+
+```bash
+poetry install --all-extras
+```
+
+When installing or specifying Poetry-built packages, the extras defined in this section can be activated
+as described in [PEP 508](https://www.python.org/dev/peps/pep-0508/#extras).
+
+For example, when installing the package using `pip`, the dependencies required by
+the `databases` extra can be installed as shown below.
+
+```bash
+pip install awesome[databases]
+```
+
+{{% note %}}
+The dependencies specified for each `extra` must already be defined as project dependencies.
+
+Dependencies listed in [dependency groups]({{< relref "managing-dependencies#dependency-groups" >}}) cannot be specified as extras.
+{{% /note %}}
+
+
 ## `plugins`
 
 Poetry supports arbitrary plugins which work similarly to
@@ -280,7 +431,7 @@ any custom url in the `urls` section.
 "Bug Tracker" = "https://github.com/python-poetry/poetry/issues"
 ```
 
-If you publish you package on PyPI, they will appear in the `Project Links` section.
+If you publish your package on PyPI, they will appear in the `Project Links` section.
 
 ## Poetry and PEP-517
 
@@ -293,7 +444,7 @@ it in the `build-system` section of the `pyproject.toml` file like so:
 
 ```toml
 [build-system]
-requires = ["poetry_core>=1.0.0"]
+requires = ["poetry-core>=1.0.0"]
 build-backend = "poetry.core.masonry.api"
 ```
 
diff --git a/vendor/poetry/docs/repositories.md b/vendor/poetry/docs/repositories.md
index f2abe573..1b4128fa 100644
--- a/vendor/poetry/docs/repositories.md
+++ b/vendor/poetry/docs/repositories.md
@@ -11,7 +11,8 @@ menu:
 
 # Repositories
 
-## Using the PyPI repository
+Poetry supports the use of [PyPI](https://pypi.org) and private repositories for discovery of
+packages as well as for publishing your projects.
 
 By default, Poetry is configured to use the [PyPI](https://pypi.org) repository,
 for package installation and publishing.
@@ -21,34 +22,299 @@ on PyPI.
 
 This represents most cases and will likely be enough for most users.
 
+### Private Repository Example
 
-## Using a private repository
+#### Installing from private package sources
+By default, Poetry discovers and installs packages from [PyPI](https://pypi.org). But, you want to
+install a dependency to your project for a [simple API repository](#simple-api-repository)? Let's
+do it.
 
-However, at times, you may need to keep your package private while still being
-able to share it with your teammates. In this case, you will need to use a private
-repository.
+First, [configure](#project-configuration) the [package source](#package-source) repository to your
+project.
 
-### Adding a repository
+```bash
+poetry source add foo https://pypi.example.org/simple/
+```
+
+Then, assuming the repository requires authentication, configure credentials for it.
+
+```bash
+poetry config http-basic.foo  
+```
+
+{{% warning %}}
+Depending on your system configuration, credentials might be saved in your command line history.
+Many shells do not save commands to history when they are prefixed by a space character. For more information, please refer to your shell's documentation.
+{{% /warning %}}
+
+Once this is done, you can add dependencies to your project from this source.
+
+```bash
+poetry add --source foo private-package
+```
+
+#### Publishing to a private repository
+
+Great, now all that is left is to publish your package. Assuming you'd want to share it privately
+with your team, you can configure the
+[Upload API](https://warehouse.pypa.io/api-reference/legacy.html#upload-api) endpoint for your
+[publishable repository](#publishable-repository).
+
+```bash
+poetry config repositories.foo https://pypi.example.org/legacy/
+```
+
+{{% note %}}
+
+If you need to use a different credential for your [package source](#package-sources), then it is
+recommended to use a different name for your publishing repository.
+
+```bash
+poetry config repositories.foo-pub https://pypi.example.org/legacy/
+poetry config http-basic.foo-pub  
+```
+
+{{% /note %}}
+
+Now, all the is left is to build and publish your project using the
+[`publish`]({{< relref "cli#publish" >}}).
+
+```bash
+poetry publish --build --repository foo-pub
+```
+
+## Package Sources
+
+By default, Poetry is configured to use the Python ecosystem's canonical package index
+[PyPI](https://pypi.org).
+
+{{% note %}}
+
+With the exception of the implicitly configured source for [PyPI](https://pypi.org) named `pypi`,
+package sources are local to a project and must be configured within the project's
+[`pyproject.toml`]({{< relref "pyproject" >}}) file. This is **not** the same configuration used
+when publishing a package.
+
+{{% /note %}}
+
+### Project Configuration
+
+These package sources maybe managed using the [`source`]({{< relref "cli#source" >}}) command for
+your project.
+
+```bash
+poetry source add foo https://foo.bar/simple/
+```
+
+This will generate the following configuration snippet in your
+[`pyproject.toml`]({{< relref "pyproject" >}}) file.
+
+```toml
+[[tool.poetry.source]]
+name = "foo"
+url = "https://foo.bar/simple/"
+default = false
+secondary = false
+```
+
+{{% warning %}}
+
+If package sources are defined for a project, these will take precedence over
+[PyPI](https://pypi.org). If you do not want this to be the case, you should declare **all** package
+sources to be [secondary](#secondary-package-sources).
+
+{{% /warning %}}
+
+See [Supported Package Sources](#supported-package-sources) for source type specific information.
+
+{{% note %}}
+
+If your package source requires [credentials](#configuring-credentials) or
+[certificates](#certificates), please refer to the relevant sections below.
+
+{{% /note %}}
+
+#### Default Package Source
+
+By default, Poetry configures [PyPI](https://pypi.org) as the default package source for your
+project. You can alter this behaviour and exclusively look up packages only from the configured
+package sources by adding a **single** source with `default = true`.
+
+```bash
+poetry source add --default foo https://foo.bar/simple/
+```
+
+{{% warning %}}
+
+Configuring a custom package source as default, will effectively disable [PyPI](https://pypi.org)
+as a package source for your project.
+
+{{% /warning %}}
+
+#### Secondary Package Sources
+
+If package sources are configured as secondary, all it means is that these will be given a lower
+priority when selecting compatible package distribution that also exists in your default package
+source.
+
+You can configure a package source as a secondary source with `secondary = true` in your package
+source configuration.
+
+```bash
+poetry source add --secondary foo https://foo.bar/simple/
+```
+
+There can be more than one secondary package source.
+
+{{% note %}}
+
+All package sources (including secondary sources) will be searched during the package lookup
+process. These network requests will occur for all sources, regardless of if the package is
+found at one or more sources.
+
+If you wish to avoid this, you may explicitly specify which source to search in for a particular
+package.
+
+```bash
+poetry add --source pypi httpx
+```
+
+{{% /note %}}
+
+### Supported Package Sources
+
+#### Python Package Index (PyPI)
+
+Poetry interacts with [PyPI](https://pypi.org) via its
+[JSON API](https://warehouse.pypa.io/api-reference/json.html). This is used to retrieve a requested
+package's versions, metadata, files, etc.
+
+{{% note %}}
+
+If the the package's published metadata is invalid, Poetry will download the available bdist/sdist to
+inspect it locally to identify the relevant metadata.
+
+{{% /note %}}
 
-Adding a new repository is easy with the `config` command.
+If you want to explicitly select a package from [PyPI](https://pypi.org) you can use the `--source`
+option with the [`add`]({{< relref "cli#add" >}}) command, like shown below.
 
 ```bash
-poetry config repositories.foo https://foo.bar/simple/
+poetry add --source pypi httpx@^0.22.0
 ```
 
-This will set the url for repository `foo` to `https://foo.bar/simple/`.
+This will generate the following configuration snippet in your `pyproject.toml` file.
 
-### Configuring credentials
+```toml
+httpx = {version = "^0.22.0", source = "pypi"}
+```
+
+{{% warning %}}
+
+If any source within a project is configured with `default = true`, The implicit `pypi` source will
+be disabled and not used for any packages.
+
+{{% /warning %}}
+
+#### Simple API Repository
+
+Poetry can fetch and install package dependencies from public or private custom repositories that
+implement the simple repository API as described in [PEP 503](https://peps.python.org/pep-0503/).
+
+{{% warning %}}
+
+When using sources that distributes large wheels without providing file checksum in file URLs,
+Poetry will download each candidate wheel at least once in order to generate the checksum. This can
+manifest as long dependency resolution times when adding packages from this source.
+
+{{% /warning %}}
+
+These package sources maybe configured via the following command in your project.
+
+```bash
+poetry source add testpypi https://test.pypi.org/simple/
+```
+
+{{% note %}}
+
+Note the trailing `/simple/`. This is important when configuring
+[PEP 503](https://peps.python.org/pep-0503/) compliant package sources.
+
+{{% /note %}}
+
+In addition to [PEP 503](https://peps.python.org/pep-0503/), Poetry can also handle simple API
+repositories that implement [PEP 658](https://peps.python.org/pep-0658/) (*Introduced in 1.2.0*).
+This is helpful in reducing dependency resolution time for packages from these sources as Poetry can
+avoid having to download each candidate distribution, in order to determine associated metadata.
+
+{{% note %}}
+
+*Why does Poetry insist on downloading all candidate distributions for all platforms when metadata
+is not available?*
+
+The need for this stems from the fact that Poetry's lock file is platform-agnostic. This means, in
+order to resolve dependencies for a project, Poetry needs metadata for all platform specific
+distributions. And when this metadata is not readily available, downloading the distribution and
+inspecting it locally is the only remaining option.
+
+{{% /note %}}
+
+#### Single Page Link Source
+
+*Introduced in 1.2.0*
+
+Some projects choose to release their binary distributions via a single page link source that
+partially follows the structure of a package page in [PEP 503](https://peps.python.org/pep-0503/).
+
+These package sources maybe configured via the following command in your project.
+
+```bash
+poetry source add jax https://storage.googleapis.com/jax-releases/jax_releases.html
+```
+
+{{% note %}}
+
+All caveats regarding slower resolution times described for simple API repositories do apply here as
+well.
+
+{{% /note %}}
+
+
+## Publishable Repositories
+
+Poetry treats repositories to which you publish packages as user specific and not project specific
+configuration unlike [package sources](#package-sources). Poetry, today, only supports the
+[Legacy Upload API](https://warehouse.pypa.io/api-reference/legacy.html#upload-api) when publishing
+your project.
+
+These are configured using the [`config`]({{< relref "cli#config" >}}) command, under the
+`repositories` key.
+
+```bash
+poetry config repositories.testpypi https://test.pypi.org/legacy/
+```
+
+{{% note %}}
+
+[Legacy Upload API](https://warehouse.pypa.io/api-reference/legacy.html#upload-api) URLs are
+typically different to the same one provided by the repository for the simple API. You'll note that
+in the example of [Test PyPI](https://test.pypi.org/), both the host (`test.pypi.org`) as
+well as the path (`/legacy`) are different to it's simple API (`https://test.pypi.org/simple`).
+
+{{% /note %}}
+
+## Configuring Credentials
 
 If you want to store your credentials for a specific repository, you can do so easily:
 
 ```bash
-poetry config http-basic.foo username password
+poetry config http-basic.foo  
 ```
 
 If you do not specify the password you will be prompted to write it.
 
 {{% note %}}
+
 To publish to PyPI, you can set your credentials for the repository named `pypi`.
 
 Note that it is recommended to use [API tokens](https://pypi.org/help/#apitoken)
@@ -59,12 +325,13 @@ Once you have created a new token, you can tell Poetry to use it:
 poetry config pypi-token.pypi my-token
 ```
 
-If you still want to use you username and password, you can do so with the following
+If you still want to use your username and password, you can do so with the following
 call to `config`.
 
 ```bash
-poetry config http-basic.pypi username password
+poetry config http-basic.pypi  
 ```
+
 {{% /note %}}
 
 You can also specify the username and password when using the `publish` command
@@ -74,18 +341,43 @@ If a system keyring is available and supported, the password is stored to and re
 
 Keyring support is enabled using the [keyring library](https://pypi.org/project/keyring/). For more information on supported backends refer to the [library documentation](https://keyring.readthedocs.io/en/latest/?badge=latest).
 
+{{% note %}}
+
+Poetry will fallback to Pip style use of keyring so that backends like
+Microsoft's [artifacts-keyring](https://pypi.org/project/artifacts-keyring/) get a chance to retrieve
+valid credentials. It will need to be properly installed into Poetry's virtualenv,
+preferably by installing a plugin.
+
+If you are letting Poetry manage your virtual environments you will want a virtualenv
+seeder installed in Poetry's virtualenv that installs the desired keyring backend
+during `poetry install`. To again use Azure DevOps as an example: [azure-devops-artifacts-helpers](https://pypi.org/project/azure-devops-artifacts-helpers/)
+provides such a seeder. This would of course best achieved by installing a Poetry plugin
+if it exists for you use case instead of doing it yourself.
+
+{{% /note %}}
+
 Alternatively, you can use environment variables to provide the credentials:
 
 ```bash
 export POETRY_PYPI_TOKEN_PYPI=my-token
-export POETRY_HTTP_BASIC_PYPI_USERNAME=username
-export POETRY_HTTP_BASIC_PYPI_PASSWORD=password
+export POETRY_HTTP_BASIC_PYPI_USERNAME=
+export POETRY_HTTP_BASIC_PYPI_PASSWORD=
 ```
 
 See [Using environment variables]({{< relref "configuration#using-environment-variables" >}}) for more information
 on how to configure Poetry with environment variables.
 
-#### Custom certificate authority and mutual TLS authentication
+If your password starts with a dash (e.g. randomly generated tokens in a CI environment), it will be parsed as a
+command line option instead of a password.
+You can prevent this by adding double dashes to prevent any following argument from being parsed as an option.
+
+```bash
+poetry config -- http-basic.pypi myUsername -myPasswordStartingWithDash
+```
+
+## Certificates
+
+### Custom certificate authority and mutual TLS authentication
 
 Poetry supports repositories that are secured by a custom certificate authority as well as those that require
 certificate-based client authentication.  The following will configure the "foo" repository to validate the repository's
@@ -97,53 +389,42 @@ poetry config certificates.foo.cert /path/to/ca.pem
 poetry config certificates.foo.client-cert /path/to/client.pem
 ```
 
-### Install dependencies from a private repository
-
-Now that you can publish to your private repository, you need to be able to
-install dependencies from it.
-
-For that, you have to edit your `pyproject.toml` file, like so
-
-```toml
-[[tool.poetry.source]]
-name = "foo"
-url = "https://foo.bar/simple/"
-```
-
-From now on, Poetry will also look for packages in your private repository.
-
 {{% note %}}
-Any custom repository will have precedence over PyPI.
-
-If you still want PyPI to be your primary source for your packages
-you can declare custom repositories as secondary.
+The value of `certificates..cert` can be set to `false` if certificate verification is
+required to be skipped. This is useful for cases where a package source with self-signed certificates
+are used.
 
-```toml
-[[tool.poetry.source]]
-name = "foo"
-url = "https://foo.bar/simple/"
-secondary = true
+```bash
+poetry config certificates.foo.cert false
 ```
+
+{{% warning %}}
+Disabling certificate verification is not recommended as it is does not conform to security
+best practices.
+{{% /warning %}}
 {{% /note %}}
 
-If your private repository requires HTTP Basic Auth be sure to add the username and
-password to your `http-basic` configuration using the example above (be sure to use the
-same name that is in the `tool.poetry.source` section). If your repository requires either
-a custom certificate authority or client certificates, similarly refer to the example above to configure the
-`certificates` section. Poetry will use these values to authenticate to your private repository when downloading or
-looking for packages.
+## Caches
 
+Poetry employs multiple caches for package sources in order to improve user experience and avoid duplicate network
+requests.
 
-### Disabling the PyPI repository
+The first level cache is a [Cache-Control](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control)
+header based cache for almost all HTTP requests.
 
-If you want your packages to be exclusively looked up from a private
-repository, you can set it as the default one by using the `default` keyword
+Further, every HTTP backed package source caches metadata associated with a package once it is fetched or generated.
+Additionally, downloaded files (package distributions) are also cached.
 
-```toml
-[[tool.poetry.source]]
-name = "foo"
-url = "https://foo.bar/simple/"
-default = true
+## Debugging Issues
+If you encounter issues with package sources, one of the simplest steps you might take to debug an issue is rerunning
+your command with the `--no-cache` flag.
+
+```bash
+poetry --no-cache add pycowsay
 ```
 
-A default source will also be the fallback source if you add other sources.
+If this solves your issue, you can consider clearing your cache using the [`cache`]({{< relref "cli#cache-clear" >}})
+command.
+
+Alternatively, you could also consider enabling very verbose logging `-vvv` along with the `--no-cache` to see network
+requests being made in the logs.
diff --git a/vendor/poetry/get-poetry.py b/vendor/poetry/get-poetry.py
index c14c9d4d..51eacadb 100644
--- a/vendor/poetry/get-poetry.py
+++ b/vendor/poetry/get-poetry.py
@@ -125,8 +125,8 @@ def is_decorated():
     if platform.system().lower() == "windows":
         return (
             os.getenv("ANSICON") is not None
-            or "ON" == os.getenv("ConEmuANSI")
-            or "xterm" == os.getenv("Term")
+            or os.getenv("ConEmuANSI") == "ON"
+            or os.getenv("Term") == "xterm"
         )
 
     if not hasattr(sys.stdout, "fileno"):
@@ -309,7 +309,6 @@ def expanduser(path):
 
 
 class Installer:
-
     CURRENT_PYTHON = sys.executable
     CURRENT_PYTHON_VERSION = sys.version_info[:2]
     METADATA_URL = "https://pypi.org/pypi/poetry/json"
@@ -363,7 +362,7 @@ def run(self):
                 version, upgrade=current_version is not None, file=self._offline_file
             )
         except subprocess.CalledProcessError as e:
-            print(colorize("error", "An error has occured: {}".format(str(e))))
+            print(colorize("error", "An error has occurred: {}".format(str(e))))
             print(e.output.decode())
 
             return e.returncode
@@ -449,6 +448,35 @@ def _compare_versions(x, y):
 
                 break
 
+        def _is_supported(x):
+            mx = self.VERSION_REGEX.match(x)
+            vx = tuple(int(p) for p in mx.groups()[:3]) + (mx.group(5),)
+            return vx < (1, 2, 0)
+
+        if not _is_supported(version):
+            print(
+                colorize(
+                    "error",
+                    "Version {version} does not support this installation method."
+                    " Please specify a version prior to 1.2.0a1 explicitly using the"
+                    " '--version' option.\nPlease see"
+                    " https://python-poetry.org/blog/announcing-poetry-1-2-0a1.html#deprecation-of-the-get-poetry-py-script"
+                    " for more information.".format(version=version),
+                )
+            )
+            return None, None
+
+        print(
+            colorize(
+                "warning",
+                "This installer is deprecated. Poetry versions installed using this"
+                " script will not be able to use 'self update' command to upgrade to"
+                " 1.2.0a1 or later. It is recommended to use"
+                " https://install.python-poetry.org instead. Instructions are"
+                " available at https://python-poetry.org/docs/#installation",
+            )
+        )
+
         current_version = None
         if os.path.exists(POETRY_LIB):
             with open(
@@ -638,7 +666,7 @@ def extract_lib(self, filename):
 
     def _which_python(self):
         """Decides which python executable we'll embed in the launcher script."""
-        allowed_executables = ["python", "python3"]
+        allowed_executables = ["python3", "python"]
         if WINDOWS:
             allowed_executables += ["py.exe -3", "py.exe -2"]
 
@@ -658,7 +686,8 @@ def _which_python(self):
                 return executable
 
             if fallback is None:
-                # keep this one as the fallback; it was the first valid executable we found.
+                # keep this one as the fallback; it was the first valid executable we
+                # found.
                 fallback = executable
 
         if fallback is None:
@@ -746,7 +775,8 @@ def add_to_fish_path(self):
             print(
                 colorize(
                     "warning",
-                    "\nUnable to get the PATH value. It will not be updated automatically.",
+                    "\nUnable to get the PATH value. It will not be updated"
+                    " automatically.",
                 )
             )
             self._modify_path = False
@@ -781,7 +811,8 @@ def add_to_windows_path(self):
             print(
                 colorize(
                     "warning",
-                    "Unable to get the PATH value. It will not be updated automatically",
+                    "Unable to get the PATH value. It will not be updated"
+                    " automatically",
                 )
             )
             self._modify_path = False
@@ -824,7 +855,7 @@ def set_windows_path_var(self, value):
             HWND_BROADCAST,
             WM_SETTINGCHANGE,
             0,
-            u"Environment",
+            "Environment",
             SMTO_ABORTIFHUNG,
             5000,
             ctypes.byref(result),
@@ -903,7 +934,7 @@ def get_unix_profiles(self):
 
         if "zsh" in SHELL:
             zdotdir = os.getenv("ZDOTDIR", HOME)
-            profiles.append(os.path.join(zdotdir, ".zprofile"))
+            profiles.append(os.path.join(zdotdir, ".zshrc"))
 
         bash_profile = os.path.join(HOME, ".bash_profile")
         if os.path.exists(bash_profile):
@@ -1047,8 +1078,10 @@ def main():
         "--file",
         dest="file",
         action="store",
-        help="Install from a local file instead of fetching the latest version "
-        "of Poetry available online.",
+        help=(
+            "Install from a local file instead of fetching the latest version "
+            "of Poetry available online."
+        ),
     )
 
     args = parser.parse_args()
diff --git a/vendor/poetry/install-poetry.py b/vendor/poetry/install-poetry.py
new file mode 100644
index 00000000..a5e56fd1
--- /dev/null
+++ b/vendor/poetry/install-poetry.py
@@ -0,0 +1,908 @@
+"""
+This script will install Poetry and its dependencies.
+
+It does, in order:
+
+  - Creates a virtual environment using venv (or virtualenv zipapp) in the correct OS data dir which will be
+      - `%APPDATA%\\pypoetry` on Windows
+      -  ~/Library/Application Support/pypoetry on MacOS
+      - `${XDG_DATA_HOME}/pypoetry` (or `~/.local/share/pypoetry` if it's not set) on UNIX systems
+      - In `${POETRY_HOME}` if it's set.
+  - Installs the latest or given version of Poetry inside this virtual environment.
+  - Installs a `poetry` script in the Python user directory (or `${POETRY_HOME/bin}` if `POETRY_HOME` is set).
+  - On failure, the error log is written to poetry-installer-error-*.log and any previously existing environment
+    is restored.
+"""
+
+import argparse
+import json
+import os
+import re
+import shutil
+import site
+import subprocess
+import sys
+import tempfile
+
+from contextlib import closing
+from contextlib import contextmanager
+from functools import cmp_to_key
+from io import UnsupportedOperation
+from pathlib import Path
+from typing import Optional
+from urllib.request import Request
+from urllib.request import urlopen
+
+
+SHELL = os.getenv("SHELL", "")
+WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
+MACOS = sys.platform == "darwin"
+
+FOREGROUND_COLORS = {
+    "black": 30,
+    "red": 31,
+    "green": 32,
+    "yellow": 33,
+    "blue": 34,
+    "magenta": 35,
+    "cyan": 36,
+    "white": 37,
+}
+
+BACKGROUND_COLORS = {
+    "black": 40,
+    "red": 41,
+    "green": 42,
+    "yellow": 43,
+    "blue": 44,
+    "magenta": 45,
+    "cyan": 46,
+    "white": 47,
+}
+
+OPTIONS = {"bold": 1, "underscore": 4, "blink": 5, "reverse": 7, "conceal": 8}
+
+
+def style(fg, bg, options):
+    codes = []
+
+    if fg:
+        codes.append(FOREGROUND_COLORS[fg])
+
+    if bg:
+        codes.append(BACKGROUND_COLORS[bg])
+
+    if options:
+        if not isinstance(options, (list, tuple)):
+            options = [options]
+
+        for option in options:
+            codes.append(OPTIONS[option])
+
+    return "\033[{}m".format(";".join(map(str, codes)))
+
+
+STYLES = {
+    "info": style("cyan", None, None),
+    "comment": style("yellow", None, None),
+    "success": style("green", None, None),
+    "error": style("red", None, None),
+    "warning": style("yellow", None, None),
+    "b": style(None, None, ("bold",)),
+}
+
+
+def is_decorated():
+    if WINDOWS:
+        return (
+            os.getenv("ANSICON") is not None
+            or os.getenv("ConEmuANSI") == "ON"
+            or os.getenv("Term") == "xterm"
+        )
+
+    if not hasattr(sys.stdout, "fileno"):
+        return False
+
+    try:
+        return os.isatty(sys.stdout.fileno())
+    except UnsupportedOperation:
+        return False
+
+
+def is_interactive():
+    if not hasattr(sys.stdin, "fileno"):
+        return False
+
+    try:
+        return os.isatty(sys.stdin.fileno())
+    except UnsupportedOperation:
+        return False
+
+
+def colorize(style, text):
+    if not is_decorated():
+        return text
+
+    return "{}{}\033[0m".format(STYLES[style], text)
+
+
+def string_to_bool(value):
+    value = value.lower()
+
+    return value in {"true", "1", "y", "yes"}
+
+
+def data_dir(version: Optional[str] = None) -> Path:
+    if os.getenv("POETRY_HOME"):
+        return Path(os.getenv("POETRY_HOME")).expanduser()
+
+    if WINDOWS:
+        const = "CSIDL_APPDATA"
+        path = os.path.normpath(_get_win_folder(const))
+        path = os.path.join(path, "pypoetry")
+    elif MACOS:
+        path = os.path.expanduser("~/Library/Application Support/pypoetry")
+    else:
+        path = os.getenv("XDG_DATA_HOME", os.path.expanduser("~/.local/share"))
+        path = os.path.join(path, "pypoetry")
+
+    if version:
+        path = os.path.join(path, version)
+
+    return Path(path)
+
+
+def bin_dir(version: Optional[str] = None) -> Path:
+    if os.getenv("POETRY_HOME"):
+        return Path(os.getenv("POETRY_HOME"), "bin").expanduser()
+
+    user_base = site.getuserbase()
+
+    if WINDOWS:
+        bin_dir = os.path.join(user_base, "Scripts")
+    else:
+        bin_dir = os.path.join(user_base, "bin")
+
+    return Path(bin_dir)
+
+
+def _get_win_folder_from_registry(csidl_name):
+    import winreg as _winreg
+
+    shell_folder_name = {
+        "CSIDL_APPDATA": "AppData",
+        "CSIDL_COMMON_APPDATA": "Common AppData",
+        "CSIDL_LOCAL_APPDATA": "Local AppData",
+    }[csidl_name]
+
+    key = _winreg.OpenKey(
+        _winreg.HKEY_CURRENT_USER,
+        r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders",
+    )
+    dir, type = _winreg.QueryValueEx(key, shell_folder_name)
+
+    return dir
+
+
+def _get_win_folder_with_ctypes(csidl_name):
+    import ctypes
+
+    csidl_const = {
+        "CSIDL_APPDATA": 26,
+        "CSIDL_COMMON_APPDATA": 35,
+        "CSIDL_LOCAL_APPDATA": 28,
+    }[csidl_name]
+
+    buf = ctypes.create_unicode_buffer(1024)
+    ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+    # Downgrade to short path name if have highbit chars. See
+    # .
+    has_high_char = False
+    for c in buf:
+        if ord(c) > 255:
+            has_high_char = True
+            break
+    if has_high_char:
+        buf2 = ctypes.create_unicode_buffer(1024)
+        if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+            buf = buf2
+
+    return buf.value
+
+
+if WINDOWS:
+    try:
+        from ctypes import windll  # noqa: F401
+
+        _get_win_folder = _get_win_folder_with_ctypes
+    except ImportError:
+        _get_win_folder = _get_win_folder_from_registry
+
+
+PRE_MESSAGE = """# Welcome to {poetry}!
+
+This will download and install the latest version of {poetry},
+a dependency and package manager for Python.
+
+It will add the `poetry` command to {poetry}'s bin directory, located at:
+
+{poetry_home_bin}
+
+You can uninstall at any time by executing this script with the --uninstall option,
+and these changes will be reverted.
+"""
+
+POST_MESSAGE = """{poetry} ({version}) is installed now. Great!
+
+You can test that everything is set up by executing:
+
+`{test_command}`
+"""
+
+POST_MESSAGE_NOT_IN_PATH = """{poetry} ({version}) is installed now. Great!
+
+To get started you need {poetry}'s bin directory ({poetry_home_bin}) in your `PATH`
+environment variable.
+{configure_message}
+Alternatively, you can call {poetry} explicitly with `{poetry_executable}`.
+
+You can test that everything is set up by executing:
+
+`{test_command}`
+"""
+
+POST_MESSAGE_CONFIGURE_UNIX = """
+Add `export PATH="{poetry_home_bin}:$PATH"` to your shell configuration file.
+"""
+
+POST_MESSAGE_CONFIGURE_FISH = """
+You can execute `set -U fish_user_paths {poetry_home_bin} $fish_user_paths`
+"""
+
+POST_MESSAGE_CONFIGURE_WINDOWS = """"""
+
+
+class PoetryInstallationError(RuntimeError):
+    def __init__(self, return_code: int = 0, log: Optional[str] = None):
+        super(PoetryInstallationError, self).__init__()
+        self.return_code = return_code
+        self.log = log
+
+
+class VirtualEnvironment:
+    def __init__(self, path: Path) -> None:
+        self._path = path
+        # str is required for compatibility with subprocess run on CPython <= 3.7 on
+        # Windows
+        self._python = str(
+            self._path.joinpath("Scripts/python.exe" if WINDOWS else "bin/python")
+        )
+
+    @property
+    def path(self):
+        return self._path
+
+    @classmethod
+    def make(cls, target: Path) -> "VirtualEnvironment":
+        try:
+            import venv
+
+            builder = venv.EnvBuilder(clear=True, with_pip=True, symlinks=False)
+            builder.ensure_directories(target)
+            builder.create(target)
+        except ImportError:
+            # fallback to using virtualenv package if venv is not available, eg: ubuntu
+            python_version = f"{sys.version_info.major}.{sys.version_info.minor}"
+            virtualenv_bootstrap_url = (
+                f"https://bootstrap.pypa.io/virtualenv/{python_version}/virtualenv.pyz"
+            )
+
+            with tempfile.TemporaryDirectory(prefix="poetry-installer") as temp_dir:
+                virtualenv_pyz = Path(temp_dir) / "virtualenv.pyz"
+                request = Request(
+                    virtualenv_bootstrap_url, headers={"User-Agent": "Python Poetry"}
+                )
+                virtualenv_pyz.write_bytes(urlopen(request).read())
+                cls.run(
+                    sys.executable, virtualenv_pyz, "--clear", "--always-copy", target
+                )
+
+        # We add a special file so that Poetry can detect
+        # its own virtual environment
+        target.joinpath("poetry_env").touch()
+
+        env = cls(target)
+
+        # we do this here to ensure that outdated system default pip does not trigger
+        # older bugs
+        env.pip("install", "--disable-pip-version-check", "--upgrade", "pip")
+
+        return env
+
+    @staticmethod
+    def run(*args, **kwargs) -> subprocess.CompletedProcess:
+        completed_process = subprocess.run(
+            args,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
+            **kwargs,
+        )
+        if completed_process.returncode != 0:
+            raise PoetryInstallationError(
+                return_code=completed_process.returncode,
+                log=completed_process.stdout.decode(),
+            )
+        return completed_process
+
+    def python(self, *args, **kwargs) -> subprocess.CompletedProcess:
+        return self.run(self._python, *args, **kwargs)
+
+    def pip(self, *args, **kwargs) -> subprocess.CompletedProcess:
+        return self.python("-m", "pip", "--isolated", *args, **kwargs)
+
+
+class Cursor:
+    def __init__(self) -> None:
+        self._output = sys.stdout
+
+    def move_up(self, lines: int = 1) -> "Cursor":
+        self._output.write("\x1b[{}A".format(lines))
+
+        return self
+
+    def move_down(self, lines: int = 1) -> "Cursor":
+        self._output.write("\x1b[{}B".format(lines))
+
+        return self
+
+    def move_right(self, columns: int = 1) -> "Cursor":
+        self._output.write("\x1b[{}C".format(columns))
+
+        return self
+
+    def move_left(self, columns: int = 1) -> "Cursor":
+        self._output.write("\x1b[{}D".format(columns))
+
+        return self
+
+    def move_to_column(self, column: int) -> "Cursor":
+        self._output.write("\x1b[{}G".format(column))
+
+        return self
+
+    def move_to_position(self, column: int, row: int) -> "Cursor":
+        self._output.write("\x1b[{};{}H".format(row + 1, column))
+
+        return self
+
+    def save_position(self) -> "Cursor":
+        self._output.write("\x1b7")
+
+        return self
+
+    def restore_position(self) -> "Cursor":
+        self._output.write("\x1b8")
+
+        return self
+
+    def hide(self) -> "Cursor":
+        self._output.write("\x1b[?25l")
+
+        return self
+
+    def show(self) -> "Cursor":
+        self._output.write("\x1b[?25h\x1b[?0c")
+
+        return self
+
+    def clear_line(self) -> "Cursor":
+        """
+        Clears all the output from the current line.
+        """
+        self._output.write("\x1b[2K")
+
+        return self
+
+    def clear_line_after(self) -> "Cursor":
+        """
+        Clears all the output from the current line after the current position.
+        """
+        self._output.write("\x1b[K")
+
+        return self
+
+    def clear_output(self) -> "Cursor":
+        """
+        Clears all the output from the cursors' current position
+        to the end of the screen.
+        """
+        self._output.write("\x1b[0J")
+
+        return self
+
+    def clear_screen(self) -> "Cursor":
+        """
+        Clears the entire screen.
+        """
+        self._output.write("\x1b[2J")
+
+        return self
+
+
+class Installer:
+    METADATA_URL = "https://pypi.org/pypi/poetry/json"
+    VERSION_REGEX = re.compile(
+        r"v?(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:\.(\d+))?"
+        "("
+        "[._-]?"
+        r"(?:(stable|beta|b|rc|RC|alpha|a|patch|pl|p)((?:[.-]?\d+)*)?)?"
+        "([.-]?dev)?"
+        ")?"
+        r"(?:\+[^\s]+)?"
+    )
+
+    def __init__(
+        self,
+        version: Optional[str] = None,
+        preview: bool = False,
+        force: bool = False,
+        accept_all: bool = False,
+        git: Optional[str] = None,
+        path: Optional[str] = None,
+    ) -> None:
+        self._version = version
+        self._preview = preview
+        self._force = force
+        self._accept_all = accept_all
+        self._git = git
+        self._path = path
+        self._data_dir = data_dir()
+        self._bin_dir = bin_dir()
+        self._cursor = Cursor()
+
+    def allows_prereleases(self) -> bool:
+        return self._preview
+
+    def run(self) -> int:
+        if self._git:
+            version = self._git
+        elif self._path:
+            version = self._path
+        else:
+            version, current_version = self.get_version()
+
+        if version is None:
+            return 0
+
+        self.display_pre_message()
+        self.ensure_directories()
+
+        def _is_self_upgrade_supported(x):
+            mx = self.VERSION_REGEX.match(x)
+
+            if mx is None:
+                # the version is not semver, perhaps scm or file, we assume upgrade is
+                # supported
+                return True
+
+            vx = tuple(int(p) for p in mx.groups()[:3]) + (mx.group(5),)
+            return vx >= (1, 1, 7)
+
+        if version and not _is_self_upgrade_supported(version):
+            self._write(
+                colorize(
+                    "warning",
+                    f"You are installing {version}. When using the current installer,"
+                    " this version does not support updating using the 'self update'"
+                    " command. Please use 1.1.7 or later.",
+                )
+            )
+            if not self._accept_all:
+                continue_install = input("Do you want to continue? ([y]/n) ") or "y"
+                if continue_install.lower() in {"n", "no"}:
+                    return 0
+
+        try:
+            self.install(version)
+        except subprocess.CalledProcessError as e:
+            raise PoetryInstallationError(
+                return_code=e.returncode, log=e.output.decode()
+            )
+
+        self._write("")
+        self.display_post_message(version)
+
+        return 0
+
+    def install(self, version, upgrade=False):
+        """
+        Installs Poetry in $POETRY_HOME.
+        """
+        self._write(
+            "Installing {} ({})".format(
+                colorize("info", "Poetry"), colorize("info", version)
+            )
+        )
+
+        with self.make_env(version) as env:
+            self.install_poetry(version, env)
+            self.make_bin(version, env)
+            self._data_dir.joinpath("VERSION").write_text(version)
+            self._install_comment(version, "Done")
+
+            return 0
+
+    def uninstall(self) -> int:
+        if not self._data_dir.exists():
+            self._write(
+                "{} is not currently installed.".format(colorize("info", "Poetry"))
+            )
+
+            return 1
+
+        version = None
+        if self._data_dir.joinpath("VERSION").exists():
+            version = self._data_dir.joinpath("VERSION").read_text().strip()
+
+        if version:
+            self._write(
+                "Removing {} ({})".format(
+                    colorize("info", "Poetry"), colorize("b", version)
+                )
+            )
+        else:
+            self._write("Removing {}".format(colorize("info", "Poetry")))
+
+        shutil.rmtree(str(self._data_dir))
+        for script in ["poetry", "poetry.bat"]:
+            if self._bin_dir.joinpath(script).exists():
+                self._bin_dir.joinpath(script).unlink()
+
+        return 0
+
+    def _install_comment(self, version: str, message: str):
+        self._overwrite(
+            "Installing {} ({}): {}".format(
+                colorize("info", "Poetry"),
+                colorize("b", version),
+                colorize("comment", message),
+            )
+        )
+
+    @contextmanager
+    def make_env(self, version: str) -> VirtualEnvironment:
+        env_path = self._data_dir.joinpath("venv")
+        env_path_saved = env_path.with_suffix(".save")
+
+        if env_path.exists():
+            self._install_comment(version, "Saving existing environment")
+            if env_path_saved.exists():
+                shutil.rmtree(env_path_saved)
+            shutil.move(env_path, env_path_saved)
+
+        try:
+            self._install_comment(version, "Creating environment")
+            yield VirtualEnvironment.make(env_path)
+        except Exception as e:
+            if env_path.exists():
+                self._install_comment(
+                    version, "An error occurred. Removing partial environment."
+                )
+                shutil.rmtree(env_path)
+
+            if env_path_saved.exists():
+                self._install_comment(
+                    version, "Restoring previously saved environment."
+                )
+                shutil.move(env_path_saved, env_path)
+
+            raise e
+        else:
+            if env_path_saved.exists():
+                shutil.rmtree(env_path_saved, ignore_errors=True)
+
+    def make_bin(self, version: str, env: VirtualEnvironment) -> None:
+        self._install_comment(version, "Creating script")
+        self._bin_dir.mkdir(parents=True, exist_ok=True)
+
+        script = "poetry"
+        script_bin = "bin"
+        if WINDOWS:
+            script = "poetry.exe"
+            script_bin = "Scripts"
+        target_script = env.path.joinpath(script_bin, script)
+
+        if self._bin_dir.joinpath(script).exists():
+            self._bin_dir.joinpath(script).unlink()
+
+        try:
+            self._bin_dir.joinpath(script).symlink_to(target_script)
+        except OSError:
+            # This can happen if the user
+            # does not have the correct permission on Windows
+            shutil.copy(target_script, self._bin_dir.joinpath(script))
+
+    def install_poetry(self, version: str, env: VirtualEnvironment) -> None:
+        self._install_comment(version, "Installing Poetry")
+
+        if self._git:
+            specification = "git+" + version
+        elif self._path:
+            specification = version
+        else:
+            specification = f"poetry=={version}"
+
+        env.pip("install", specification)
+
+    def display_pre_message(self) -> None:
+        kwargs = {
+            "poetry": colorize("info", "Poetry"),
+            "poetry_home_bin": colorize("comment", self._bin_dir),
+        }
+        self._write(PRE_MESSAGE.format(**kwargs))
+
+    def display_post_message(self, version: str) -> None:
+        if WINDOWS:
+            return self.display_post_message_windows(version)
+
+        if SHELL == "fish":
+            return self.display_post_message_fish(version)
+
+        return self.display_post_message_unix(version)
+
+    def display_post_message_windows(self, version: str) -> None:
+        path = self.get_windows_path_var()
+
+        message = POST_MESSAGE_NOT_IN_PATH
+        if path and str(self._bin_dir) in path:
+            message = POST_MESSAGE
+
+        self._write(
+            message.format(
+                poetry=colorize("info", "Poetry"),
+                version=colorize("b", version),
+                poetry_home_bin=colorize("comment", self._bin_dir),
+                poetry_executable=colorize("b", self._bin_dir.joinpath("poetry")),
+                configure_message=POST_MESSAGE_CONFIGURE_WINDOWS.format(
+                    poetry_home_bin=colorize("comment", self._bin_dir)
+                ),
+                test_command=colorize("b", "poetry --version"),
+            )
+        )
+
+    def get_windows_path_var(self) -> Optional[str]:
+        import winreg
+
+        with winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) as root:
+            with winreg.OpenKey(root, "Environment", 0, winreg.KEY_ALL_ACCESS) as key:
+                path, _ = winreg.QueryValueEx(key, "PATH")
+
+                return path
+
+    def display_post_message_fish(self, version: str) -> None:
+        fish_user_paths = subprocess.check_output(
+            ["fish", "-c", "echo $fish_user_paths"]
+        ).decode("utf-8")
+
+        message = POST_MESSAGE_NOT_IN_PATH
+        if fish_user_paths and str(self._bin_dir) in fish_user_paths:
+            message = POST_MESSAGE
+
+        self._write(
+            message.format(
+                poetry=colorize("info", "Poetry"),
+                version=colorize("b", version),
+                poetry_home_bin=colorize("comment", self._bin_dir),
+                poetry_executable=colorize("b", self._bin_dir.joinpath("poetry")),
+                configure_message=POST_MESSAGE_CONFIGURE_FISH.format(
+                    poetry_home_bin=colorize("comment", self._bin_dir)
+                ),
+                test_command=colorize("b", "poetry --version"),
+            )
+        )
+
+    def display_post_message_unix(self, version: str) -> None:
+        paths = os.getenv("PATH", "").split(":")
+
+        message = POST_MESSAGE_NOT_IN_PATH
+        if paths and str(self._bin_dir) in paths:
+            message = POST_MESSAGE
+
+        self._write(
+            message.format(
+                poetry=colorize("info", "Poetry"),
+                version=colorize("b", version),
+                poetry_home_bin=colorize("comment", self._bin_dir),
+                poetry_executable=colorize("b", self._bin_dir.joinpath("poetry")),
+                configure_message=POST_MESSAGE_CONFIGURE_UNIX.format(
+                    poetry_home_bin=colorize("comment", self._bin_dir)
+                ),
+                test_command=colorize("b", "poetry --version"),
+            )
+        )
+
+    def ensure_directories(self) -> None:
+        self._data_dir.mkdir(parents=True, exist_ok=True)
+        self._bin_dir.mkdir(parents=True, exist_ok=True)
+
+    def get_version(self):
+        current_version = None
+        if self._data_dir.joinpath("VERSION").exists():
+            current_version = self._data_dir.joinpath("VERSION").read_text().strip()
+
+        self._write(colorize("info", "Retrieving Poetry metadata"))
+
+        metadata = json.loads(self._get(self.METADATA_URL).decode())
+
+        def _compare_versions(x, y):
+            mx = self.VERSION_REGEX.match(x)
+            my = self.VERSION_REGEX.match(y)
+
+            vx = tuple(int(p) for p in mx.groups()[:3]) + (mx.group(5),)
+            vy = tuple(int(p) for p in my.groups()[:3]) + (my.group(5),)
+
+            if vx < vy:
+                return -1
+            elif vx > vy:
+                return 1
+
+            return 0
+
+        self._write("")
+        releases = sorted(
+            metadata["releases"].keys(), key=cmp_to_key(_compare_versions)
+        )
+
+        if self._version and self._version not in releases:
+            self._write(
+                colorize("error", "Version {} does not exist.".format(self._version))
+            )
+
+            return None, None
+
+        version = self._version
+        if not version:
+            for release in reversed(releases):
+                m = self.VERSION_REGEX.match(release)
+                if m.group(5) and not self.allows_prereleases():
+                    continue
+
+                version = release
+
+                break
+
+        if current_version == version and not self._force:
+            self._write(
+                "The latest version ({}) is already installed.".format(
+                    colorize("b", version)
+                )
+            )
+
+            return None, current_version
+
+        return version, current_version
+
+    def _write(self, line) -> None:
+        sys.stdout.write(line + "\n")
+
+    def _overwrite(self, line) -> None:
+        if not is_decorated():
+            return self._write(line)
+
+        self._cursor.move_up()
+        self._cursor.clear_line()
+        self._write(line)
+
+    def _get(self, url):
+        request = Request(url, headers={"User-Agent": "Python Poetry"})
+
+        with closing(urlopen(request)) as r:
+            return r.read()
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description="Installs the latest (or given) version of poetry"
+    )
+    parser.add_argument(
+        "-p",
+        "--preview",
+        help="install preview version",
+        dest="preview",
+        action="store_true",
+        default=False,
+    )
+    parser.add_argument("--version", help="install named version", dest="version")
+    parser.add_argument(
+        "-f",
+        "--force",
+        help="install on top of existing version",
+        dest="force",
+        action="store_true",
+        default=False,
+    )
+    parser.add_argument(
+        "-y",
+        "--yes",
+        help="accept all prompts",
+        dest="accept_all",
+        action="store_true",
+        default=False,
+    )
+    parser.add_argument(
+        "--uninstall",
+        help="uninstall poetry",
+        dest="uninstall",
+        action="store_true",
+        default=False,
+    )
+    parser.add_argument(
+        "--path",
+        dest="path",
+        action="store",
+        help=(
+            "Install from a given path (file or directory) instead of "
+            "fetching the latest version of Poetry available online."
+        ),
+    )
+    parser.add_argument(
+        "--git",
+        dest="git",
+        action="store",
+        help=(
+            "Install from a git repository instead of fetching the latest version "
+            "of Poetry available online."
+        ),
+    )
+
+    args = parser.parse_args()
+
+    installer = Installer(
+        version=args.version or os.getenv("POETRY_VERSION"),
+        preview=args.preview or string_to_bool(os.getenv("POETRY_PREVIEW", "0")),
+        force=args.force,
+        accept_all=args.accept_all
+        or string_to_bool(os.getenv("POETRY_ACCEPT", "0"))
+        or not is_interactive(),
+        path=args.path,
+        git=args.git,
+    )
+
+    if args.uninstall or string_to_bool(os.getenv("POETRY_UNINSTALL", "0")):
+        return installer.uninstall()
+
+    try:
+        return installer.run()
+    except PoetryInstallationError as e:
+        installer._write(colorize("error", "Poetry installation failed."))
+
+        if e.log is not None:
+            import traceback
+
+            _, path = tempfile.mkstemp(
+                suffix=".log",
+                prefix="poetry-installer-error-",
+                dir=str(Path.cwd()),
+                text=True,
+            )
+            installer._write(colorize("error", f"See {path} for error logs."))
+            text = (
+                f"{e.log}\n"
+                f"Traceback:\n\n{''.join(traceback.format_tb(e.__traceback__))}"
+            )
+            Path(path).write_text(text)
+
+        return e.return_code
+
+
+if __name__ == "__main__":
+    sys.stdout.write(
+        colorize(
+            "warning",
+            "The canonical source for Poetry's installation script is now"
+            " https://install.python-poetry.org. Please update your usage to reflect"
+            " this.\n",
+        )
+    )
+    sys.exit(main())
diff --git a/vendor/poetry/make-nix-release.sh b/vendor/poetry/make-nix-release.sh
deleted file mode 100755
index 074babb2..00000000
--- a/vendor/poetry/make-nix-release.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-
-set -ex
-
-RUNTIMES[0]="${PYTHON27:+-P "2.7:$PYTHON27"}"
-RUNTIMES[1]="${PYTHON35:+-P "3.5:$PYTHON35"}"
-RUNTIMES[2]="${PYTHON36:+-P "3.6:$PYTHON36"}"
-RUNTIMES[3]="${PYTHON37:+-P "3.7:$PYTHON37"}"
-RUNTIMES[4]="${PYTHON38:+-P "3.8:$PYTHON38"}"
-
-test -n "$PYTHON" || PYTHON="python3"
-
-if [ "$OSTYPE" == "linux-gnu" ]; then
-  $PYTHON get-poetry.py -y
-  POETRY="$PYTHON $HOME/.poetry/bin/poetry"
-  RUNTIMES[5]="${PYTHON39:+-P "3.9:$PYTHON39"}"
-  RUNTIMES[6]="${PYTHON310:+-P "3.10:$PYTHON310"}"
-else
-  $PYTHON -m pip install poetry -U
-  POETRY="$PYTHON -m poetry"
-fi
-
-$POETRY config virtualenvs.in-project true
-$POETRY install --no-dev
-$POETRY run python sonnet make release ${RUNTIMES[@]}
diff --git a/vendor/poetry/mypy.ini b/vendor/poetry/mypy.ini
deleted file mode 100644
index 9ab0f390..00000000
--- a/vendor/poetry/mypy.ini
+++ /dev/null
@@ -1,8 +0,0 @@
-[mypy]
-check_untyped_defs = True
-ignore_errors = False
-ignore_missing_imports = True
-strict_optional = True
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_unused_configs = True
diff --git a/vendor/poetry/poetry.lock b/vendor/poetry/poetry.lock
index 21a67566..03294acc 100644
--- a/vendor/poetry/poetry.lock
+++ b/vendor/poetry/poetry.lock
@@ -1,6 +1,6 @@
 [[package]]
 name = "atomicwrites"
-version = "1.4.0"
+version = "1.4.1"
 description = "Atomic file writes."
 category = "dev"
 optional = false
@@ -8,50 +8,21 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
 
 [[package]]
 name = "attrs"
-version = "21.4.0"
+version = "22.1.0"
 description = "Classes Without Boilerplate"
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-
-[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
-docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
-
-[[package]]
-name = "backports.functools-lru-cache"
-version = "1.6.4"
-description = "Backport of functools.lru_cache"
-category = "dev"
-optional = false
-python-versions = ">=2.6"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-checkdocs (>=2.4)"]
-
-[[package]]
-name = "cachecontrol"
-version = "0.12.6"
-description = "httplib2 caching for requests"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-
-[package.dependencies]
-lockfile = {version = ">=0.9", optional = true, markers = "extra == \"filecache\""}
-msgpack = ">=0.5.2"
-requests = "*"
+python-versions = ">=3.5"
 
 [package.extras]
-filecache = ["lockfile (>=0.9)"]
-redis = ["redis (>=2.10.5)"]
+dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope-interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
+docs = ["furo", "sphinx", "zope-interface", "sphinx-notfound-page"]
+tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope-interface", "cloudpickle"]
+tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
 
 [[package]]
 name = "cachecontrol"
-version = "0.12.10"
+version = "0.12.11"
 description = "httplib2 caching for requests"
 category = "main"
 optional = false
@@ -75,21 +46,21 @@ optional = false
 python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
 
 [package.extras]
-redis = ["redis (>=3.3.6,<4.0.0)"]
-memcached = ["python-memcached (>=1.59,<2.0)"]
 msgpack = ["msgpack-python (>=0.5,<0.6)"]
+memcached = ["python-memcached (>=1.59,<2.0)"]
+redis = ["redis (>=3.3.6,<4.0.0)"]
 
 [[package]]
 name = "certifi"
-version = "2021.10.8"
+version = "2022.6.15"
 description = "Python package for providing Mozilla's CA Bundle."
 category = "main"
 optional = false
-python-versions = "*"
+python-versions = ">=3.6"
 
 [[package]]
 name = "cffi"
-version = "1.15.0"
+version = "1.15.1"
 description = "Foreign Function Interface for Python calling C code."
 category = "main"
 optional = false
@@ -107,78 +78,49 @@ optional = false
 python-versions = ">=3.6.1"
 
 [[package]]
-name = "chardet"
-version = "4.0.0"
-description = "Universal encoding detector for Python 2 and 3"
+name = "charset-normalizer"
+version = "2.1.1"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6.0"
+
+[package.extras]
+unicode_backport = ["unicodedata2"]
 
 [[package]]
 name = "cleo"
-version = "0.8.1"
+version = "1.0.0a5"
 description = "Cleo allows you to create beautiful and testable command-line interfaces."
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.7,<4.0"
 
 [package.dependencies]
-clikit = ">=0.6.0,<0.7.0"
-
-[[package]]
-name = "clikit"
-version = "0.6.2"
-description = "CliKit is a group of utilities to build beautiful and testable command line interfaces."
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-
-[package.dependencies]
-crashtest = {version = ">=0.3.0,<0.4.0", markers = "python_version >= \"3.6\" and python_version < \"4.0\""}
-enum34 = {version = ">=1.1,<2.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}
-pastel = ">=0.2.0,<0.3.0"
-pylev = ">=1.3,<2.0"
-typing = {version = ">=3.6,<4.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\" or python_version >= \"3.4\" and python_version < \"3.5\""}
-typing-extensions = {version = ">=3.6,<4.0", markers = "python_version >= \"3.5\" and python_full_version < \"3.5.4\""}
+crashtest = ">=0.3.1,<0.4.0"
+pylev = ">=1.3.0,<2.0.0"
 
 [[package]]
 name = "colorama"
-version = "0.4.4"
+version = "0.4.5"
 description = "Cross-platform colored terminal text."
 category = "dev"
 optional = false
 python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
 
-[[package]]
-name = "configparser"
-version = "4.0.2"
-description = "Updated configparser from Python 3.7 for Python 2.6+."
-category = "main"
-optional = false
-python-versions = ">=2.6"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2)", "pytest-flake8", "pytest-black-multipy"]
-
-[[package]]
-name = "contextlib2"
-version = "0.6.0.post1"
-description = "Backports and enhancements for the contextlib module"
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-
 [[package]]
 name = "coverage"
-version = "5.5"
+version = "6.4.4"
 description = "Code coverage measurement for Python"
 category = "dev"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+python-versions = ">=3.7"
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
 
 [package.extras]
-toml = ["toml"]
+toml = ["tomli"]
 
 [[package]]
 name = "crashtest"
@@ -190,134 +132,94 @@ python-versions = ">=3.6,<4.0"
 
 [[package]]
 name = "cryptography"
-version = "3.2.1"
+version = "37.0.4"
 description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
 category = "main"
 optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
-
-[package.dependencies]
-cffi = ">=1.8,<1.11.3 || >1.11.3"
-six = ">=1.4.1"
-
-[package.extras]
-docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
-docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
-pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
-ssh = ["bcrypt (>=3.1.5)"]
-test = ["pytest (>=3.6.0,!=3.9.0,!=3.9.1,!=3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
-
-[[package]]
-name = "cryptography"
-version = "3.3.2"
-description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
-category = "main"
-optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*"
+python-versions = ">=3.6"
 
 [package.dependencies]
 cffi = ">=1.12"
-enum34 = {version = "*", markers = "python_version < \"3\""}
-ipaddress = {version = "*", markers = "python_version < \"3\""}
-six = ">=1.4.1"
 
 [package.extras]
 docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
-docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
+docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
 pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
+sdist = ["setuptools_rust (>=0.11.4)"]
 ssh = ["bcrypt (>=3.1.5)"]
-test = ["pytest (>=3.6.0,!=3.9.0,!=3.9.1,!=3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
+test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
 
 [[package]]
-name = "cryptography"
-version = "36.0.1"
-description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
-category = "main"
+name = "deepdiff"
+version = "5.8.1"
+description = "Deep Difference and Search of any Python object/data."
+category = "dev"
 optional = false
 python-versions = ">=3.6"
 
 [package.dependencies]
-cffi = ">=1.12"
+ordered-set = ">=4.1.0,<4.2.0"
 
 [package.extras]
-docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
-docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
-pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
-sdist = ["setuptools_rust (>=0.11.4)"]
-ssh = ["bcrypt (>=3.1.5)"]
-test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
+cli = ["click (==8.0.3)", "pyyaml (==5.4.1)", "toml (==0.10.2)", "clevercsv (==0.7.1)"]
 
 [[package]]
 name = "distlib"
-version = "0.3.4"
+version = "0.3.6"
 description = "Distribution utilities"
 category = "main"
 optional = false
 python-versions = "*"
 
 [[package]]
-name = "entrypoints"
-version = "0.3"
-description = "Discover and load entry points from installed packages."
+name = "dulwich"
+version = "0.20.45"
+description = "Python Git Library"
 category = "main"
 optional = false
-python-versions = ">=2.7"
+python-versions = ">=3.6"
 
 [package.dependencies]
-configparser = {version = ">=3.5", markers = "python_version == \"2.7\""}
+certifi = "*"
+urllib3 = ">=1.24.1"
+
+[package.extras]
+fastimport = ["fastimport"]
+https = ["urllib3[secure] (>=1.24.1)"]
+paramiko = ["paramiko"]
+pgp = ["gpg"]
 
 [[package]]
-name = "enum34"
-version = "1.1.10"
-description = "Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4"
-category = "main"
+name = "execnet"
+version = "1.9.0"
+description = "execnet: rapid multi-Python deployment"
+category = "dev"
 optional = false
-python-versions = "*"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.extras]
+testing = ["pre-commit"]
 
 [[package]]
 name = "filelock"
-version = "3.2.1"
+version = "3.8.0"
 description = "A platform independent file lock."
 category = "main"
 optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.7"
 
 [package.extras]
-docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
-testing = ["coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
+docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"]
+testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"]
 
 [[package]]
-name = "funcsigs"
-version = "1.0.2"
-description = "Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+"
+name = "flatdict"
+version = "4.0.1"
+description = "Python module for interacting with nested dicts as a single level dict with delimited keys."
 category = "dev"
 optional = false
 python-versions = "*"
 
-[[package]]
-name = "functools32"
-version = "3.2.3-2"
-description = "Backport of the functools module from Python 3.2.3 for use on 2.7 and PyPy."
-category = "main"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "futures"
-version = "3.3.0"
-description = "Backport of the concurrent.futures package from Python 3"
-category = "main"
-optional = false
-python-versions = ">=2.6, <3"
-
-[[package]]
-name = "glob2"
-version = "0.6"
-description = "Version of the glob module that can capture patterns and supports recursive wildcards"
-category = "main"
-optional = false
-python-versions = "*"
-
 [[package]]
 name = "html5lib"
 version = "1.1"
@@ -331,76 +233,69 @@ six = ">=1.9"
 webencodings = "*"
 
 [package.extras]
-all = ["genshi", "chardet (>=2.2)", "lxml"]
-chardet = ["chardet (>=2.2)"]
-genshi = ["genshi"]
 lxml = ["lxml"]
+genshi = ["genshi"]
+chardet = ["chardet (>=2.2)"]
+all = ["lxml", "chardet (>=2.2)", "genshi"]
 
 [[package]]
 name = "httpretty"
-version = "0.9.7"
+version = "1.1.4"
 description = "HTTP client mock for Python"
 category = "dev"
 optional = false
-python-versions = "*"
-
-[package.dependencies]
-six = "*"
+python-versions = ">=3"
 
 [[package]]
 name = "identify"
-version = "2.4.4"
+version = "2.5.3"
 description = "File identification library for Python"
 category = "dev"
 optional = false
-python-versions = ">=3.6.1"
+python-versions = ">=3.7"
 
 [package.extras]
 license = ["ukkonen"]
 
 [[package]]
 name = "idna"
-version = "2.10"
+version = "3.3"
 description = "Internationalized Domain Names in Applications (IDNA)"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.5"
 
 [[package]]
 name = "importlib-metadata"
-version = "1.7.0"
+version = "4.12.0"
 description = "Read metadata from Python packages"
 category = "main"
 optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.7"
 
 [package.dependencies]
-configparser = {version = ">=3.5", markers = "python_version < \"3\""}
-contextlib2 = {version = "*", markers = "python_version < \"3\""}
-pathlib2 = {version = "*", markers = "python_version < \"3\""}
+typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
 zipp = ">=0.5"
 
 [package.extras]
-docs = ["sphinx", "rst.linker"]
-testing = ["packaging", "pep517", "importlib-resources (>=1.3)"]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
+perf = ["ipython"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl-flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
 
 [[package]]
 name = "importlib-resources"
-version = "3.2.1"
+version = "5.9.0"
 description = "Read resources from Python packages"
 category = "main"
 optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.7"
 
 [package.dependencies]
-contextlib2 = {version = "*", markers = "python_version < \"3\""}
-pathlib2 = {version = "*", markers = "python_version < \"3\""}
-singledispatch = {version = "*", markers = "python_version < \"3.4\""}
-typing = {version = "*", markers = "python_version < \"3.5\""}
-zipp = {version = ">=0.4", markers = "python_version < \"3.8\""}
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
 
 [package.extras]
-docs = ["sphinx", "rst.linker", "jaraco.packaging"]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
 
 [[package]]
 name = "iniconfig"
@@ -410,88 +305,55 @@ category = "dev"
 optional = false
 python-versions = "*"
 
-[[package]]
-name = "ipaddress"
-version = "1.0.23"
-description = "IPv4/IPv6 manipulation library"
-category = "main"
-optional = false
-python-versions = "*"
-
 [[package]]
 name = "jeepney"
-version = "0.4.3"
+version = "0.8.0"
 description = "Low-level, pure Python DBus protocol wrapper."
 category = "main"
 optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.7"
 
 [package.extras]
-dev = ["testpath"]
+trio = ["async-generator", "trio"]
+test = ["async-timeout", "trio", "testpath", "pytest-asyncio (>=0.17)", "pytest-trio", "pytest"]
 
 [[package]]
-name = "jeepney"
-version = "0.7.1"
-description = "Low-level, pure Python DBus protocol wrapper."
+name = "jsonschema"
+version = "4.14.0"
+description = "An implementation of JSON Schema validation for Python"
 category = "main"
 optional = false
-python-versions = ">=3.6"
-
-[package.extras]
-test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"]
-trio = ["trio", "async-generator"]
-
-[[package]]
-name = "keyring"
-version = "18.0.1"
-description = "Store and access your passwords safely."
-category = "main"
-optional = false
-python-versions = ">=2.7"
-
-[package.dependencies]
-entrypoints = "*"
-pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_platform == \"win32\""}
-secretstorage = {version = "<3", markers = "(sys_platform == \"linux2\" or sys_platform == \"linux\") and python_version < \"3.5\""}
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs", "pytest-flake8"]
-
-[[package]]
-name = "keyring"
-version = "20.0.1"
-description = "Store and access your passwords safely."
-category = "main"
-optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.7"
 
 [package.dependencies]
+attrs = ">=17.4.0"
 importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
-pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_platform == \"win32\""}
-secretstorage = {version = "*", markers = "sys_platform == \"linux\""}
+importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
+pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
+pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2"
+typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
 
 [package.extras]
-docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov"]
+format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
+format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
 
 [[package]]
 name = "keyring"
-version = "22.3.0"
+version = "23.8.2"
 description = "Store and access your passwords safely."
 category = "main"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
-importlib-metadata = {version = ">=1", markers = "python_version < \"3.8\""}
+importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""}
 jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""}
 pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_platform == \"win32\""}
 SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""}
 
 [package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
 
 [[package]]
 name = "lockfile"
@@ -502,95 +364,72 @@ optional = false
 python-versions = "*"
 
 [[package]]
-name = "mock"
-version = "3.0.5"
-description = "Rolling backport of unittest.mock for all Pythons"
-category = "dev"
+name = "msgpack"
+version = "1.0.4"
+description = "MessagePack serializer"
+category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-
-[package.dependencies]
-funcsigs = {version = ">=1", markers = "python_version < \"3.3\""}
-six = "*"
-
-[package.extras]
-build = ["twine", "wheel", "blurb"]
-docs = ["sphinx"]
-test = ["pytest", "pytest-cov"]
+python-versions = "*"
 
 [[package]]
-name = "more-itertools"
-version = "5.0.0"
-description = "More routines for operating on iterables, beyond itertools"
+name = "mypy"
+version = "0.971"
+description = "Optional static typing for Python"
 category = "dev"
 optional = false
-python-versions = "*"
+python-versions = ">=3.6"
 
 [package.dependencies]
-six = ">=1.0.0,<2.0.0"
+mypy-extensions = ">=0.4.3"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""}
+typing-extensions = ">=3.10"
 
-[[package]]
-name = "more-itertools"
-version = "7.2.0"
-description = "More routines for operating on iterables, beyond itertools"
-category = "dev"
-optional = false
-python-versions = ">=3.4"
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+python2 = ["typed-ast (>=1.4.0,<2)"]
+reports = ["lxml"]
 
 [[package]]
-name = "more-itertools"
-version = "8.12.0"
-description = "More routines for operating on iterables, beyond itertools"
+name = "mypy-extensions"
+version = "0.4.3"
+description = "Experimental type system extensions for programs checked with the mypy typechecker."
 category = "dev"
 optional = false
-python-versions = ">=3.5"
-
-[[package]]
-name = "msgpack"
-version = "1.0.3"
-description = "MessagePack (de)serializer."
-category = "main"
-optional = false
 python-versions = "*"
 
 [[package]]
 name = "nodeenv"
-version = "1.6.0"
+version = "1.7.0"
 description = "Node.js virtual environment builder"
 category = "dev"
 optional = false
-python-versions = "*"
-
-[[package]]
-name = "packaging"
-version = "20.9"
-description = "Core utilities for Python packages"
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
 
 [package.dependencies]
-pyparsing = ">=2.0.2"
+setuptools = "*"
 
 [[package]]
-name = "pastel"
-version = "0.2.1"
-description = "Bring colors to your terminal."
-category = "main"
+name = "ordered-set"
+version = "4.1.0"
+description = "An OrderedSet is a custom MutableSet that remembers its order, so that every"
+category = "dev"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.7"
+
+[package.extras]
+dev = ["pytest", "black", "mypy"]
 
 [[package]]
-name = "pathlib2"
-version = "2.3.6"
-description = "Object-oriented filesystem paths"
+name = "packaging"
+version = "21.3"
+description = "Core utilities for Python packages"
 category = "main"
 optional = false
-python-versions = "*"
+python-versions = ">=3.6"
 
 [package.dependencies]
-scandir = {version = "*", markers = "python_version < \"3.5\""}
-six = "*"
+pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
 
 [[package]]
 name = "pexpect"
@@ -605,70 +444,101 @@ ptyprocess = ">=0.5"
 
 [[package]]
 name = "pkginfo"
-version = "1.8.2"
+version = "1.8.3"
 description = "Query metadatdata from sdists / bdists / installed packages."
 category = "main"
 optional = false
-python-versions = "*"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
 
 [package.extras]
-testing = ["coverage", "nose"]
+testing = ["nose", "coverage"]
+
+[[package]]
+name = "pkgutil-resolve-name"
+version = "1.3.10"
+description = "Resolve a name to an object."
+category = "main"
+optional = false
+python-versions = ">=3.6"
 
 [[package]]
 name = "platformdirs"
-version = "2.0.2"
+version = "2.5.2"
 description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
+test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
 
 [[package]]
 name = "pluggy"
-version = "0.13.1"
+version = "1.0.0"
 description = "plugin and hook calling mechanisms for python"
 category = "dev"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.6"
 
 [package.dependencies]
 importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
 
 [package.extras]
-dev = ["pre-commit", "tox"]
+testing = ["pytest-benchmark", "pytest"]
+dev = ["tox", "pre-commit"]
 
 [[package]]
 name = "poetry-core"
-version = "1.0.7"
+version = "1.1.0"
 description = "Poetry PEP 517 Build Backend"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.7,<4.0"
 
 [package.dependencies]
-enum34 = {version = ">=1.1.10,<2.0.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}
-functools32 = {version = ">=3.2.3-2,<4.0.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}
-importlib-metadata = {version = ">=1.7.0,<2.0.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\" or python_version >= \"3.5\" and python_version < \"3.8\""}
-pathlib2 = {version = ">=2.3.5,<3.0.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}
-typing = {version = ">=3.7.4.1,<4.0.0.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}
+importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "poetry-plugin-export"
+version = "1.0.6"
+description = "Poetry plugin to export the dependencies to various formats"
+category = "main"
+optional = false
+python-versions = ">=3.7,<4.0"
+
+[package.dependencies]
+poetry = ">=1.2.0b3,<2.0.0"
+poetry-core = ">=1.1.0b3,<2.0.0"
 
 [[package]]
 name = "pre-commit"
-version = "2.17.0"
+version = "2.20.0"
 description = "A framework for managing and maintaining multi-language pre-commit hooks."
 category = "dev"
 optional = false
-python-versions = ">=3.6.1"
+python-versions = ">=3.7"
 
 [package.dependencies]
 cfgv = ">=2.0.0"
 identify = ">=1.0.0"
 importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
-importlib-resources = {version = "<5.3", markers = "python_version < \"3.7\""}
 nodeenv = ">=0.11.1"
 pyyaml = ">=5.1"
 toml = "*"
 virtualenv = ">=20.0.8"
 
+[[package]]
+name = "psutil"
+version = "5.9.1"
+description = "Cross-platform lib for process and system monitoring in Python."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.extras]
+test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"]
+
 [[package]]
 name = "ptyprocess"
 version = "0.7.0"
@@ -703,71 +573,30 @@ python-versions = "*"
 
 [[package]]
 name = "pyparsing"
-version = "2.4.7"
-description = "Python parsing module"
+version = "3.0.9"
+description = "pyparsing module - Classes and methods to define and execute parsing grammars"
 category = "main"
 optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
-
-[[package]]
-name = "pytest"
-version = "4.6.11"
-description = "pytest: simple powerful testing with Python"
-category = "dev"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
-
-[package.dependencies]
-atomicwrites = ">=1.0"
-attrs = ">=17.4.0"
-colorama = {version = "*", markers = "sys_platform == \"win32\" and python_version != \"3.4\""}
-funcsigs = {version = ">=1.0", markers = "python_version < \"3.0\""}
-importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
-more-itertools = [
-    {version = ">=4.0.0,<6.0.0", markers = "python_version <= \"2.7\""},
-    {version = ">=4.0.0", markers = "python_version > \"2.7\""},
-]
-packaging = "*"
-pathlib2 = {version = ">=2.2.0", markers = "python_version < \"3.6\""}
-pluggy = ">=0.12,<1.0"
-py = ">=1.5.0"
-six = ">=1.10.0"
-wcwidth = "*"
+python-versions = ">=3.6.8"
 
 [package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "nose", "requests", "mock"]
+diagrams = ["railroad-diagrams", "jinja2"]
 
 [[package]]
-name = "pytest"
-version = "5.4.3"
-description = "pytest: simple powerful testing with Python"
-category = "dev"
+name = "pyrsistent"
+version = "0.18.1"
+description = "Persistent/Functional/Immutable data structures"
+category = "main"
 optional = false
-python-versions = ">=3.5"
-
-[package.dependencies]
-atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
-attrs = ">=17.4.0"
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
-more-itertools = ">=4.0.0"
-packaging = "*"
-pathlib2 = {version = ">=2.2.0", markers = "python_version < \"3.6\""}
-pluggy = ">=0.12,<1.0"
-py = ">=1.5.0"
-wcwidth = "*"
-
-[package.extras]
-checkqa-mypy = ["mypy (==v0.761)"]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
+python-versions = ">=3.7"
 
 [[package]]
 name = "pytest"
-version = "6.2.5"
+version = "7.1.2"
 description = "pytest: simple powerful testing with Python"
 category = "dev"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
 atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
@@ -778,45 +607,67 @@ iniconfig = "*"
 packaging = "*"
 pluggy = ">=0.12,<2.0"
 py = ">=1.8.2"
-toml = "*"
+tomli = ">=1.0.0"
 
 [package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
 
 [[package]]
 name = "pytest-cov"
-version = "2.12.1"
+version = "3.0.0"
 description = "Pytest plugin for measuring coverage."
 category = "dev"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6"
 
 [package.dependencies]
-coverage = ">=5.2.1"
+coverage = {version = ">=5.2.1", extras = ["toml"]}
 pytest = ">=4.6"
-toml = "*"
 
 [package.extras]
-testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
+testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"]
+
+[[package]]
+name = "pytest-forked"
+version = "1.4.0"
+description = "run tests in isolated forked subprocesses"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+py = "*"
+pytest = ">=3.10"
 
 [[package]]
 name = "pytest-mock"
-version = "1.13.0"
-description = "Thin-wrapper around the mock package for easier use with py.test"
+version = "3.8.2"
+description = "Thin-wrapper around the mock package for easier use with pytest"
 category = "dev"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.7"
 
 [package.dependencies]
-mock = {version = "*", markers = "python_version < \"3.0\""}
-pytest = ">=2.7"
+pytest = ">=5.0"
 
 [package.extras]
-dev = ["pre-commit", "tox"]
+dev = ["pre-commit", "tox", "pytest-asyncio"]
+
+[[package]]
+name = "pytest-randomly"
+version = "3.12.0"
+description = "Pytest plugin to randomly order tests and control random.seed."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
+pytest = "*"
 
 [[package]]
 name = "pytest-sugar"
-version = "0.9.4"
+version = "0.9.5"
 description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)."
 category = "dev"
 optional = false
@@ -827,6 +678,25 @@ packaging = ">=14.1"
 pytest = ">=2.9"
 termcolor = ">=1.1.0"
 
+[[package]]
+name = "pytest-xdist"
+version = "2.5.0"
+description = "pytest xdist plugin for distributed testing and loop-on-failing modes"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+execnet = ">=1.1"
+psutil = {version = ">=3.0", optional = true, markers = "extra == \"psutil\""}
+pytest = ">=6.2.0"
+pytest-forked = "*"
+
+[package.extras]
+psutil = ["psutil (>=3.0)"]
+setproctitle = ["setproctitle"]
+testing = ["filelock"]
+
 [[package]]
 name = "pywin32-ctypes"
 version = "0.2.0"
@@ -845,21 +715,21 @@ python-versions = ">=3.6"
 
 [[package]]
 name = "requests"
-version = "2.25.1"
+version = "2.28.1"
 description = "Python HTTP for Humans."
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.7, <4"
 
 [package.dependencies]
 certifi = ">=2017.4.17"
-chardet = ">=3.0.2,<5"
-idna = ">=2.5,<3"
+charset-normalizer = ">=2,<3"
+idna = ">=2.5,<4"
 urllib3 = ">=1.21.1,<1.27"
 
 [package.extras]
-security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
-socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
 
 [[package]]
 name = "requests-toolbelt"
@@ -872,74 +742,38 @@ python-versions = "*"
 [package.dependencies]
 requests = ">=2.0.1,<3.0.0"
 
-[[package]]
-name = "scandir"
-version = "1.10.0"
-description = "scandir, a better directory iterator and faster os.walk()"
-category = "main"
-optional = false
-python-versions = "*"
-
 [[package]]
 name = "secretstorage"
-version = "2.3.1"
+version = "3.3.3"
 description = "Python bindings to FreeDesktop.org Secret Service API"
 category = "main"
 optional = false
-python-versions = "*"
-
-[package.dependencies]
-cryptography = "*"
-
-[package.extras]
-dbus-python = ["dbus-python"]
-
-[[package]]
-name = "secretstorage"
-version = "3.2.0"
-description = "Python bindings to FreeDesktop.org Secret Service API"
-category = "main"
-optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.6"
 
 [package.dependencies]
 cryptography = ">=2.0"
-jeepney = ">=0.4.2"
+jeepney = ">=0.6"
 
 [[package]]
-name = "secretstorage"
-version = "3.3.1"
-description = "Python bindings to FreeDesktop.org Secret Service API"
-category = "main"
+name = "setuptools"
+version = "65.3.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "dev"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
-[package.dependencies]
-cryptography = ">=2.0"
-jeepney = ">=0.6"
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)", "sphinx-notfound-page (==0.8.3)", "sphinx-hoverxref (<2)", "pygments-github-lexers (==0.0.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-reredirects", "sphinxcontrib-towncrier", "furo"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-enabler (>=1.3)", "pytest-perf", "mock", "flake8-2020", "virtualenv (>=13.0.0)", "wheel", "pip (>=19.1)", "jaraco.envs (>=2.2)", "pytest-xdist", "jaraco.path (>=3.2.0)", "build", "filelock (>=3.4.0)", "pip-run (>=8.8)", "ini2toml[lite] (>=0.9)", "tomli-w (>=1.0.0)", "pytest-black (>=0.3.7)", "pytest-cov", "pytest-mypy (>=0.9.1)"]
+testing-integration = ["pytest", "pytest-xdist", "pytest-enabler", "virtualenv (>=13.0.0)", "tomli", "wheel", "jaraco.path (>=3.2.0)", "jaraco.envs (>=2.2)", "build", "filelock (>=3.4.0)"]
 
 [[package]]
 name = "shellingham"
-version = "1.4.0"
+version = "1.5.0"
 description = "Tool to Detect Surrounding Shell"
 category = "main"
 optional = false
-python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,>=2.6"
-
-[[package]]
-name = "singledispatch"
-version = "3.7.0"
-description = "Backport functools.singledispatch from Python 3.4 to Python 2.6-3.3."
-category = "main"
-optional = false
-python-versions = ">=2.6"
-
-[package.dependencies]
-six = "*"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "unittest2", "pytest-checkdocs (>=2.4)"]
+python-versions = ">=3.4"
 
 [[package]]
 name = "six"
@@ -949,14 +783,6 @@ category = "main"
 optional = false
 python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
 
-[[package]]
-name = "subprocess32"
-version = "3.5.4"
-description = "A backport of the subprocess module from Python 3 for use on 2.x."
-category = "main"
-optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4"
-
 [[package]]
 name = "termcolor"
 version = "1.1.0"
@@ -973,22 +799,25 @@ category = "dev"
 optional = false
 python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
 
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
 [[package]]
 name = "tomlkit"
-version = "0.7.2"
+version = "0.11.4"
 description = "Style preserving TOML library"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-
-[package.dependencies]
-enum34 = {version = ">=1.1,<2.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}
-functools32 = {version = ">=3.2.3,<4.0.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}
-typing = {version = ">=3.6,<4.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\" or python_version >= \"3.4\" and python_version < \"3.5\""}
+python-versions = ">=3.6,<4.0"
 
 [[package]]
 name = "tox"
-version = "3.24.5"
+version = "3.25.1"
 description = "tox is a generic virtualenv management and test command line tool"
 category = "dev"
 optional = false
@@ -1010,652 +839,203 @@ docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-a
 testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"]
 
 [[package]]
-name = "typing"
-version = "3.10.0.0"
-description = "Type Hints for Python"
-category = "main"
+name = "typed-ast"
+version = "1.5.4"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "types-html5lib"
+version = "1.1.10"
+description = "Typing stubs for html5lib"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "types-jsonschema"
+version = "4.14.0"
+description = "Typing stubs for jsonschema"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "types-requests"
+version = "2.28.9"
+description = "Typing stubs for requests"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+types-urllib3 = "<1.27"
+
+[[package]]
+name = "types-urllib3"
+version = "1.26.23"
+description = "Typing stubs for urllib3"
+category = "dev"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <3.5"
+python-versions = "*"
 
 [[package]]
 name = "typing-extensions"
-version = "3.10.0.2"
-description = "Backported and Experimental Type Hints for Python 3.5+"
+version = "4.3.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
 category = "main"
 optional = false
-python-versions = "*"
+python-versions = ">=3.7"
 
 [[package]]
 name = "urllib3"
-version = "1.25.11"
+version = "1.26.12"
 description = "HTTP library with thread-safe connection pooling, file post, and more."
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
 
 [package.extras]
-brotli = ["brotlipy (>=0.6.0)"]
-secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
+secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"]
 socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
 
 [[package]]
 name = "virtualenv"
-version = "20.13.1"
+version = "20.16.4"
 description = "Virtual Python Environment builder"
 category = "main"
 optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.6"
 
 [package.dependencies]
-distlib = ">=0.3.1,<1"
-filelock = ">=3.2,<4"
-importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
-importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""}
-pathlib2 = {version = ">=2.3.3,<3", markers = "python_version < \"3.4\" and sys_platform != \"win32\""}
-platformdirs = ">=2,<3"
-six = ">=1.9.0,<2"
+distlib = ">=0.3.5,<1"
+filelock = ">=3.4.1,<4"
+importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""}
+platformdirs = ">=2.4,<3"
 
 [package.extras]
-docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
-testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
+docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"]
+testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
 
 [[package]]
-name = "wcwidth"
-version = "0.2.5"
-description = "Measures the displayed width of unicode strings in a terminal"
-category = "dev"
+name = "webencodings"
+version = "0.5.1"
+description = "Character encoding aliases for legacy web content"
+category = "main"
 optional = false
 python-versions = "*"
 
-[package.dependencies]
-"backports.functools-lru-cache" = {version = ">=1.2.1", markers = "python_version < \"3.2\""}
-
 [[package]]
-name = "webencodings"
-version = "0.5.1"
-description = "Character encoding aliases for legacy web content"
+name = "xattr"
+version = "0.9.9"
+description = "Python wrapper for extended filesystem attributes"
 category = "main"
 optional = false
 python-versions = "*"
 
+[package.dependencies]
+cffi = ">=1.0"
+
 [[package]]
 name = "zipp"
-version = "1.2.0"
+version = "3.8.1"
 description = "Backport of pathlib-compatible object wrapper for zip files"
 category = "main"
 optional = false
-python-versions = ">=2.7"
-
-[package.dependencies]
-contextlib2 = {version = "*", markers = "python_version < \"3.4\""}
+python-versions = ">=3.7"
 
 [package.extras]
-docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
-testing = ["pathlib2", "unittest2", "jaraco.itertools", "func-timeout"]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco-itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
 
 [metadata]
 lock-version = "1.1"
-python-versions = "~2.7 || ^3.5"
-content-hash = "3de9a28e5a2f53d26b75a9aa3eb333b360eb04470769675fb435183ab871798c"
+python-versions = "^3.7"
+content-hash = "59769b5de61c824843ca787bc97b8eb3fdc15883aad6fdc3523c1180c0075af2"
 
 [metadata.files]
-atomicwrites = [
-    {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
-    {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
-]
-attrs = [
-    {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
-    {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
-]
-"backports.functools-lru-cache" = [
-    {file = "backports.functools_lru_cache-1.6.4-py2.py3-none-any.whl", hash = "sha256:dbead04b9daa817909ec64e8d2855fb78feafe0b901d4568758e3a60559d8978"},
-    {file = "backports.functools_lru_cache-1.6.4.tar.gz", hash = "sha256:d5ed2169378b67d3c545e5600d363a923b09c456dab1593914935a68ad478271"},
-]
-cachecontrol = [
-    {file = "CacheControl-0.12.6-py2.py3-none-any.whl", hash = "sha256:10d056fa27f8563a271b345207402a6dcce8efab7e5b377e270329c62471b10d"},
-    {file = "CacheControl-0.12.6.tar.gz", hash = "sha256:be9aa45477a134aee56c8fac518627e1154df063e85f67d4f83ce0ccc23688e8"},
-    {file = "CacheControl-0.12.10-py2.py3-none-any.whl", hash = "sha256:b0d43d8f71948ef5ebdee5fe236b86c6ffc7799370453dccb0e894c20dfa487c"},
-    {file = "CacheControl-0.12.10.tar.gz", hash = "sha256:d8aca75b82eec92d84b5d6eb8c8f66ea16f09d2adb09dbca27fe2d5fc8d3732d"},
-]
-cachy = [
-    {file = "cachy-0.3.0-py2.py3-none-any.whl", hash = "sha256:338ca09c8860e76b275aff52374330efedc4d5a5e45dc1c5b539c1ead0786fe7"},
-    {file = "cachy-0.3.0.tar.gz", hash = "sha256:186581f4ceb42a0bbe040c407da73c14092379b1e4c0e327fdb72ae4a9b269b1"},
-]
-certifi = [
-    {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
-    {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
-]
-cffi = [
-    {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
-    {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"},
-    {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"},
-    {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"},
-    {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"},
-    {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"},
-    {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"},
-    {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"},
-    {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"},
-    {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"},
-    {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"},
-    {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"},
-    {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"},
-    {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"},
-    {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"},
-    {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"},
-    {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"},
-    {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"},
-    {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"},
-    {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"},
-    {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"},
-    {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"},
-    {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"},
-    {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"},
-    {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"},
-    {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"},
-    {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"},
-    {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"},
-    {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"},
-    {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"},
-    {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"},
-    {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"},
-    {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"},
-    {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"},
-    {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"},
-    {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"},
-    {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"},
-    {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"},
-    {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"},
-    {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"},
-    {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"},
-    {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"},
-    {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"},
-    {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"},
-    {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"},
-    {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"},
-    {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"},
-    {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"},
-    {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"},
-    {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"},
-]
-cfgv = [
-    {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
-    {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
-]
-chardet = [
-    {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"},
-    {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"},
-]
-cleo = [
-    {file = "cleo-0.8.1-py2.py3-none-any.whl", hash = "sha256:141cda6dc94a92343be626bb87a0b6c86ae291dfc732a57bf04310d4b4201753"},
-    {file = "cleo-0.8.1.tar.gz", hash = "sha256:3d0e22d30117851b45970b6c14aca4ab0b18b1b53c8af57bed13208147e4069f"},
-]
-clikit = [
-    {file = "clikit-0.6.2-py2.py3-none-any.whl", hash = "sha256:71268e074e68082306e23d7369a7b99f824a0ef926e55ba2665e911f7208489e"},
-    {file = "clikit-0.6.2.tar.gz", hash = "sha256:442ee5db9a14120635c5990bcdbfe7c03ada5898291f0c802f77be71569ded59"},
-]
-colorama = [
-    {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
-    {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
-]
-configparser = [
-    {file = "configparser-4.0.2-py2.py3-none-any.whl", hash = "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c"},
-    {file = "configparser-4.0.2.tar.gz", hash = "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df"},
-]
-contextlib2 = [
-    {file = "contextlib2-0.6.0.post1-py2.py3-none-any.whl", hash = "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b"},
-    {file = "contextlib2-0.6.0.post1.tar.gz", hash = "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e"},
-]
-coverage = [
-    {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"},
-    {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"},
-    {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"},
-    {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"},
-    {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"},
-    {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"},
-    {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"},
-    {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"},
-    {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"},
-    {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"},
-    {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"},
-    {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"},
-    {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"},
-    {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"},
-    {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"},
-    {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"},
-    {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"},
-    {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"},
-    {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"},
-    {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"},
-    {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"},
-    {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"},
-    {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"},
-    {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"},
-    {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"},
-    {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"},
-    {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"},
-    {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"},
-    {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"},
-    {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"},
-    {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"},
-    {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"},
-    {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"},
-    {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"},
-    {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"},
-    {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"},
-    {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"},
-    {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"},
-    {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"},
-    {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"},
-    {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"},
-    {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"},
-    {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"},
-    {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"},
-    {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"},
-    {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"},
-    {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"},
-    {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"},
-    {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"},
-    {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"},
-    {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"},
-    {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"},
-]
-crashtest = [
-    {file = "crashtest-0.3.1-py3-none-any.whl", hash = "sha256:300f4b0825f57688b47b6d70c6a31de33512eb2fa1ac614f780939aa0cf91680"},
-    {file = "crashtest-0.3.1.tar.gz", hash = "sha256:42ca7b6ce88b6c7433e2ce47ea884e91ec93104a4b754998be498a8e6c3d37dd"},
-]
-cryptography = [
-    {file = "cryptography-3.2.1-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:6dc59630ecce8c1f558277ceb212c751d6730bd12c80ea96b4ac65637c4f55e7"},
-    {file = "cryptography-3.2.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:75e8e6684cf0034f6bf2a97095cb95f81537b12b36a8fedf06e73050bb171c2d"},
-    {file = "cryptography-3.2.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4e7268a0ca14536fecfdf2b00297d4e407da904718658c1ff1961c713f90fd33"},
-    {file = "cryptography-3.2.1-cp27-cp27m-win32.whl", hash = "sha256:7117319b44ed1842c617d0a452383a5a052ec6aa726dfbaffa8b94c910444297"},
-    {file = "cryptography-3.2.1-cp27-cp27m-win_amd64.whl", hash = "sha256:a733671100cd26d816eed39507e585c156e4498293a907029969234e5e634bc4"},
-    {file = "cryptography-3.2.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:a75f306a16d9f9afebfbedc41c8c2351d8e61e818ba6b4c40815e2b5740bb6b8"},
-    {file = "cryptography-3.2.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5849d59358547bf789ee7e0d7a9036b2d29e9a4ddf1ce5e06bb45634f995c53e"},
-    {file = "cryptography-3.2.1-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:bd717aa029217b8ef94a7d21632a3bb5a4e7218a4513d2521c2a2fd63011e98b"},
-    {file = "cryptography-3.2.1-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:efe15aca4f64f3a7ea0c09c87826490e50ed166ce67368a68f315ea0807a20df"},
-    {file = "cryptography-3.2.1-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:32434673d8505b42c0de4de86da8c1620651abd24afe91ae0335597683ed1b77"},
-    {file = "cryptography-3.2.1-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:7b8d9d8d3a9bd240f453342981f765346c87ade811519f98664519696f8e6ab7"},
-    {file = "cryptography-3.2.1-cp35-cp35m-win32.whl", hash = "sha256:d3545829ab42a66b84a9aaabf216a4dce7f16dbc76eb69be5c302ed6b8f4a29b"},
-    {file = "cryptography-3.2.1-cp35-cp35m-win_amd64.whl", hash = "sha256:a4e27ed0b2504195f855b52052eadcc9795c59909c9d84314c5408687f933fc7"},
-    {file = "cryptography-3.2.1-cp36-abi3-win32.whl", hash = "sha256:13b88a0bd044b4eae1ef40e265d006e34dbcde0c2f1e15eb9896501b2d8f6c6f"},
-    {file = "cryptography-3.2.1-cp36-abi3-win_amd64.whl", hash = "sha256:07ca431b788249af92764e3be9a488aa1d39a0bc3be313d826bbec690417e538"},
-    {file = "cryptography-3.2.1-cp36-cp36m-win32.whl", hash = "sha256:a035a10686532b0587d58a606004aa20ad895c60c4d029afa245802347fab57b"},
-    {file = "cryptography-3.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:d26a2557d8f9122f9bf445fc7034242f4375bd4e95ecda007667540270965b13"},
-    {file = "cryptography-3.2.1-cp37-cp37m-win32.whl", hash = "sha256:545a8550782dda68f8cdc75a6e3bf252017aa8f75f19f5a9ca940772fc0cb56e"},
-    {file = "cryptography-3.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:55d0b896631412b6f0c7de56e12eb3e261ac347fbaa5d5e705291a9016e5f8cb"},
-    {file = "cryptography-3.2.1-cp38-cp38-win32.whl", hash = "sha256:3cd75a683b15576cfc822c7c5742b3276e50b21a06672dc3a800a2d5da4ecd1b"},
-    {file = "cryptography-3.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:d25cecbac20713a7c3bc544372d42d8eafa89799f492a43b79e1dfd650484851"},
-    {file = "cryptography-3.2.1.tar.gz", hash = "sha256:d3d5e10be0cf2a12214ddee45c6bd203dab435e3d83b4560c03066eda600bfe3"},
-    {file = "cryptography-3.3.2-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:541dd758ad49b45920dda3b5b48c968f8b2533d8981bcdb43002798d8f7a89ed"},
-    {file = "cryptography-3.3.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:49570438e60f19243e7e0d504527dd5fe9b4b967b5a1ff21cc12b57602dd85d3"},
-    {file = "cryptography-3.3.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a4ac9648d39ce71c2f63fe7dc6db144b9fa567ddfc48b9fde1b54483d26042"},
-    {file = "cryptography-3.3.2-cp27-cp27m-win32.whl", hash = "sha256:aa4969f24d536ae2268c902b2c3d62ab464b5a66bcb247630d208a79a8098e9b"},
-    {file = "cryptography-3.3.2-cp27-cp27m-win_amd64.whl", hash = "sha256:1bd0ccb0a1ed775cd7e2144fe46df9dc03eefd722bbcf587b3e0616ea4a81eff"},
-    {file = "cryptography-3.3.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e18e6ab84dfb0ab997faf8cca25a86ff15dfea4027b986322026cc99e0a892da"},
-    {file = "cryptography-3.3.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:c7390f9b2119b2b43160abb34f63277a638504ef8df99f11cb52c1fda66a2e6f"},
-    {file = "cryptography-3.3.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:0d7b69674b738068fa6ffade5c962ecd14969690585aaca0a1b1fc9058938a72"},
-    {file = "cryptography-3.3.2-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:922f9602d67c15ade470c11d616f2b2364950602e370c76f0c94c94ae672742e"},
-    {file = "cryptography-3.3.2-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:a0f0b96c572fc9f25c3f4ddbf4688b9b38c69836713fb255f4a2715d93cbaf44"},
-    {file = "cryptography-3.3.2-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:a777c096a49d80f9d2979695b835b0f9c9edab73b59e4ceb51f19724dda887ed"},
-    {file = "cryptography-3.3.2-cp36-abi3-win32.whl", hash = "sha256:3c284fc1e504e88e51c428db9c9274f2da9f73fdf5d7e13a36b8ecb039af6e6c"},
-    {file = "cryptography-3.3.2-cp36-abi3-win_amd64.whl", hash = "sha256:7951a966613c4211b6612b0352f5bf29989955ee592c4a885d8c7d0f830d0433"},
-    {file = "cryptography-3.3.2.tar.gz", hash = "sha256:5a60d3780149e13b7a6ff7ad6526b38846354d11a15e21068e57073e29e19bed"},
-    {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"},
-    {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"},
-    {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a"},
-    {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173"},
-    {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12"},
-    {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3"},
-    {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2"},
-    {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f"},
-    {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3"},
-    {file = "cryptography-36.0.1-cp36-abi3-win32.whl", hash = "sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca"},
-    {file = "cryptography-36.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf"},
-    {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9"},
-    {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1"},
-    {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94"},
-    {file = "cryptography-36.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac"},
-    {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee"},
-    {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46"},
-    {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903"},
-    {file = "cryptography-36.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316"},
-    {file = "cryptography-36.0.1.tar.gz", hash = "sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"},
-]
-distlib = [
-    {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
-    {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
-]
-entrypoints = [
-    {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"},
-    {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"},
-]
-enum34 = [
-    {file = "enum34-1.1.10-py2-none-any.whl", hash = "sha256:a98a201d6de3f2ab3db284e70a33b0f896fbf35f8086594e8c9e74b909058d53"},
-    {file = "enum34-1.1.10-py3-none-any.whl", hash = "sha256:c3858660960c984d6ab0ebad691265180da2b43f07e061c0f8dca9ef3cffd328"},
-    {file = "enum34-1.1.10.tar.gz", hash = "sha256:cce6a7477ed816bd2542d03d53db9f0db935dd013b70f336a95c73979289f248"},
-]
-filelock = [
-    {file = "filelock-3.2.1-py2.py3-none-any.whl", hash = "sha256:7f07b08d731907441ff40d0c5b81f9512cd968842e0b6264c8bd18a8ce877760"},
-    {file = "filelock-3.2.1.tar.gz", hash = "sha256:9cdd29c411ab196cf4c35a1da684f7b9da723696cb356efa45bf5eb1ff313ee3"},
-]
-funcsigs = [
-    {file = "funcsigs-1.0.2-py2.py3-none-any.whl", hash = "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca"},
-    {file = "funcsigs-1.0.2.tar.gz", hash = "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"},
-]
-functools32 = [
-    {file = "functools32-3.2.3-2.tar.gz", hash = "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"},
-    {file = "functools32-3.2.3-2.zip", hash = "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0"},
-]
-futures = [
-    {file = "futures-3.3.0-py2-none-any.whl", hash = "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16"},
-    {file = "futures-3.3.0.tar.gz", hash = "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"},
-]
-glob2 = [
-    {file = "glob2-0.6.tar.gz", hash = "sha256:f5b0a686ff21f820c4d3f0c4edd216704cea59d79d00fa337e244a2f2ff83ed6"},
-]
-html5lib = [
-    {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"},
-    {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"},
-]
-httpretty = [
-    {file = "httpretty-0.9.7.tar.gz", hash = "sha256:66216f26b9d2c52e81808f3e674a6fb65d4bf719721394a1a9be926177e55fbe"},
-]
-identify = [
-    {file = "identify-2.4.4-py2.py3-none-any.whl", hash = "sha256:aa68609c7454dbcaae60a01ff6b8df1de9b39fe6e50b1f6107ec81dcda624aa6"},
-    {file = "identify-2.4.4.tar.gz", hash = "sha256:6b4b5031f69c48bf93a646b90de9b381c6b5f560df4cbe0ed3cf7650ae741e4d"},
-]
-idna = [
-    {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
-    {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
-]
-importlib-metadata = [
-    {file = "importlib_metadata-1.7.0-py2.py3-none-any.whl", hash = "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"},
-    {file = "importlib_metadata-1.7.0.tar.gz", hash = "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83"},
-]
-importlib-resources = [
-    {file = "importlib_resources-3.2.1-py2.py3-none-any.whl", hash = "sha256:e2860cf0c4bc999947228d18be154fa3779c5dde0b882bd2d7b3f4d25e698bd6"},
-    {file = "importlib_resources-3.2.1.tar.gz", hash = "sha256:a9fe213ab6452708ec1b3f4ec6f2881b8ab3645cb4e5efb7fea2bbf05a91db3b"},
-]
-iniconfig = [
-    {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
-    {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
-]
-ipaddress = [
-    {file = "ipaddress-1.0.23-py2.py3-none-any.whl", hash = "sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc"},
-    {file = "ipaddress-1.0.23.tar.gz", hash = "sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2"},
-]
-jeepney = [
-    {file = "jeepney-0.4.3-py3-none-any.whl", hash = "sha256:d6c6b49683446d2407d2fe3acb7a368a77ff063f9182fe427da15d622adc24cf"},
-    {file = "jeepney-0.4.3.tar.gz", hash = "sha256:3479b861cc2b6407de5188695fa1a8d57e5072d7059322469b62628869b8e36e"},
-    {file = "jeepney-0.7.1-py3-none-any.whl", hash = "sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac"},
-    {file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"},
-]
-keyring = [
-    {file = "keyring-18.0.1-py2.py3-none-any.whl", hash = "sha256:7b29ebfcf8678c4da531b2478a912eea01e80007e5ddca9ee0c7038cb3489ec6"},
-    {file = "keyring-18.0.1.tar.gz", hash = "sha256:67d6cc0132bd77922725fae9f18366bb314fd8f95ff4d323a4df41890a96a838"},
-    {file = "keyring-20.0.1-py2.py3-none-any.whl", hash = "sha256:c674f032424b4bffc62abeac5523ec49cc84aed07a480c3233e0baf618efc15c"},
-    {file = "keyring-20.0.1.tar.gz", hash = "sha256:963bfa7f090269d30bdc5e25589e5fd9dad2cf2a7c6f176a7f2386910e5d0d8d"},
-    {file = "keyring-22.3.0-py3-none-any.whl", hash = "sha256:2bc8363ebdd63886126a012057a85c8cb6e143877afa02619ac7dbc9f38a207b"},
-    {file = "keyring-22.3.0.tar.gz", hash = "sha256:16927a444b2c73f983520a48dec79ddab49fe76429ea05b8d528d778c8339522"},
-]
-lockfile = [
-    {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"},
-    {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"},
-]
-mock = [
-    {file = "mock-3.0.5-py2.py3-none-any.whl", hash = "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"},
-    {file = "mock-3.0.5.tar.gz", hash = "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3"},
-]
-more-itertools = [
-    {file = "more-itertools-5.0.0.tar.gz", hash = "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4"},
-    {file = "more_itertools-5.0.0-py2-none-any.whl", hash = "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc"},
-    {file = "more_itertools-5.0.0-py3-none-any.whl", hash = "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"},
-    {file = "more-itertools-7.2.0.tar.gz", hash = "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832"},
-    {file = "more_itertools-7.2.0-py3-none-any.whl", hash = "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"},
-    {file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"},
-    {file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"},
-]
-msgpack = [
-    {file = "msgpack-1.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079"},
-    {file = "msgpack-1.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3"},
-    {file = "msgpack-1.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73"},
-    {file = "msgpack-1.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c58cdec1cb5fcea8c2f1771d7b5fec79307d056874f746690bd2bdd609ab147"},
-    {file = "msgpack-1.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f97c0f35b3b096a330bb4a1a9247d0bd7e1f3a2eba7ab69795501504b1c2c39"},
-    {file = "msgpack-1.0.3-cp310-cp310-win32.whl", hash = "sha256:36a64a10b16c2ab31dcd5f32d9787ed41fe68ab23dd66957ca2826c7f10d0b85"},
-    {file = "msgpack-1.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7"},
-    {file = "msgpack-1.0.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d"},
-    {file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b"},
-    {file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec"},
-    {file = "msgpack-1.0.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770"},
-    {file = "msgpack-1.0.3-cp36-cp36m-win32.whl", hash = "sha256:3d875631ecab42f65f9dce6f55ce6d736696ced240f2634633188de2f5f21af9"},
-    {file = "msgpack-1.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:40fb89b4625d12d6027a19f4df18a4de5c64f6f3314325049f219683e07e678a"},
-    {file = "msgpack-1.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a"},
-    {file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc"},
-    {file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a"},
-    {file = "msgpack-1.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920"},
-    {file = "msgpack-1.0.3-cp37-cp37m-win32.whl", hash = "sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50"},
-    {file = "msgpack-1.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba"},
-    {file = "msgpack-1.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea"},
-    {file = "msgpack-1.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a"},
-    {file = "msgpack-1.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef"},
-    {file = "msgpack-1.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611"},
-    {file = "msgpack-1.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1"},
-    {file = "msgpack-1.0.3-cp38-cp38-win32.whl", hash = "sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4"},
-    {file = "msgpack-1.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a"},
-    {file = "msgpack-1.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3"},
-    {file = "msgpack-1.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52"},
-    {file = "msgpack-1.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2"},
-    {file = "msgpack-1.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996"},
-    {file = "msgpack-1.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2"},
-    {file = "msgpack-1.0.3-cp39-cp39-win32.whl", hash = "sha256:494471d65b25a8751d19c83f1a482fd411d7ca7a3b9e17d25980a74075ba0e88"},
-    {file = "msgpack-1.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d"},
-    {file = "msgpack-1.0.3.tar.gz", hash = "sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e"},
-]
-nodeenv = [
-    {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"},
-    {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"},
-]
-packaging = [
-    {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"},
-    {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"},
-]
-pastel = [
-    {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"},
-    {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"},
-]
-pathlib2 = [
-    {file = "pathlib2-2.3.6-py2.py3-none-any.whl", hash = "sha256:3a130b266b3a36134dcc79c17b3c7ac9634f083825ca6ea9d8f557ee6195c9c8"},
-    {file = "pathlib2-2.3.6.tar.gz", hash = "sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f"},
-]
-pexpect = [
-    {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
-    {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
-]
-pkginfo = [
-    {file = "pkginfo-1.8.2-py2.py3-none-any.whl", hash = "sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc"},
-    {file = "pkginfo-1.8.2.tar.gz", hash = "sha256:542e0d0b6750e2e21c20179803e40ab50598d8066d51097a0e382cba9eb02bff"},
-]
-platformdirs = [
-    {file = "platformdirs-2.0.2-py2.py3-none-any.whl", hash = "sha256:0b9547541f599d3d242078ae60b927b3e453f0ad52f58b4d4bc3be86aed3ec41"},
-    {file = "platformdirs-2.0.2.tar.gz", hash = "sha256:3b00d081227d9037bbbca521a5787796b5ef5000faea1e43fd76f1d44b06fcfa"},
-]
-pluggy = [
-    {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
-    {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
-]
-poetry-core = [
-    {file = "poetry-core-1.0.7.tar.gz", hash = "sha256:98c11c755a16ef6c5673c22ca94a3802a7df4746a0853a70b6fae8b9f5cac206"},
-    {file = "poetry_core-1.0.7-py2.py3-none-any.whl", hash = "sha256:4f8a7f5390d772f42c4c4c3f188e6424b802cb4b57466c6633a1b9ac36f18a43"},
-]
-pre-commit = [
-    {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"},
-    {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"},
-]
-ptyprocess = [
-    {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
-    {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
-]
-py = [
-    {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
-    {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
-]
-pycparser = [
-    {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
-    {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
-]
-pylev = [
-    {file = "pylev-1.4.0-py2.py3-none-any.whl", hash = "sha256:7b2e2aa7b00e05bb3f7650eb506fc89f474f70493271a35c242d9a92188ad3dd"},
-    {file = "pylev-1.4.0.tar.gz", hash = "sha256:9e77e941042ad3a4cc305dcdf2b2dec1aec2fbe3dd9015d2698ad02b173006d1"},
-]
-pyparsing = [
-    {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
-    {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
-]
-pytest = [
-    {file = "pytest-4.6.11-py2.py3-none-any.whl", hash = "sha256:a00a7d79cbbdfa9d21e7d0298392a8dd4123316bfac545075e6f8f24c94d8c97"},
-    {file = "pytest-4.6.11.tar.gz", hash = "sha256:50fa82392f2120cc3ec2ca0a75ee615be4c479e66669789771f1758332be4353"},
-    {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
-    {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
-    {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
-    {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
-]
-pytest-cov = [
-    {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"},
-    {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"},
-]
-pytest-mock = [
-    {file = "pytest-mock-1.13.0.tar.gz", hash = "sha256:e24a911ec96773022ebcc7030059b57cd3480b56d4f5d19b7c370ec635e6aed5"},
-    {file = "pytest_mock-1.13.0-py2.py3-none-any.whl", hash = "sha256:67e414b3caef7bff6fc6bd83b22b5bc39147e4493f483c2679bc9d4dc485a94d"},
-]
-pytest-sugar = [
-    {file = "pytest-sugar-0.9.4.tar.gz", hash = "sha256:b1b2186b0a72aada6859bea2a5764145e3aaa2c1cfbb23c3a19b5f7b697563d3"},
-]
-pywin32-ctypes = [
-    {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"},
-    {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"},
-]
-pyyaml = [
-    {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
-    {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
-    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
-    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
-    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
-    {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
-    {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
-    {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
-    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
-    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
-    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
-    {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
-    {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
-    {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
-    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
-    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
-    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
-    {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
-    {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
-    {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
-    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
-    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
-    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
-    {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
-    {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
-    {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
-    {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
-    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
-    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
-    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
-    {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
-    {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
-    {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
-]
-requests = [
-    {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"},
-    {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"},
-]
-requests-toolbelt = [
-    {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"},
-    {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"},
-]
-scandir = [
-    {file = "scandir-1.10.0-cp27-cp27m-win32.whl", hash = "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188"},
-    {file = "scandir-1.10.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"},
-    {file = "scandir-1.10.0-cp34-cp34m-win32.whl", hash = "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f"},
-    {file = "scandir-1.10.0-cp34-cp34m-win_amd64.whl", hash = "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e"},
-    {file = "scandir-1.10.0-cp35-cp35m-win32.whl", hash = "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f"},
-    {file = "scandir-1.10.0-cp35-cp35m-win_amd64.whl", hash = "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32"},
-    {file = "scandir-1.10.0-cp36-cp36m-win32.whl", hash = "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022"},
-    {file = "scandir-1.10.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4"},
-    {file = "scandir-1.10.0-cp37-cp37m-win32.whl", hash = "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173"},
-    {file = "scandir-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d"},
-    {file = "scandir-1.10.0.tar.gz", hash = "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae"},
-]
-secretstorage = [
-    {file = "SecretStorage-2.3.1.tar.gz", hash = "sha256:3af65c87765323e6f64c83575b05393f9e003431959c9395d1791d51497f29b6"},
-    {file = "SecretStorage-3.2.0-py3-none-any.whl", hash = "sha256:ed5279d788af258e4676fa26b6efb6d335a31f1f9f529b6f1e200f388fac33e1"},
-    {file = "SecretStorage-3.2.0.tar.gz", hash = "sha256:46305c3847ee3f7252b284e0eee5590fa6341c891104a2fd2313f8798c615a82"},
-    {file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"},
-    {file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"},
-]
-shellingham = [
-    {file = "shellingham-1.4.0-py2.py3-none-any.whl", hash = "sha256:536b67a0697f2e4af32ab176c00a50ac2899c5a05e0d8e2dadac8e58888283f9"},
-    {file = "shellingham-1.4.0.tar.gz", hash = "sha256:4855c2458d6904829bd34c299f11fdeed7cfefbf8a2c522e4caea6cd76b3171e"},
-]
-singledispatch = [
-    {file = "singledispatch-3.7.0-py2.py3-none-any.whl", hash = "sha256:bc77afa97c8a22596d6d4fc20f1b7bdd2b86edc2a65a4262bdd7cc3cc19aa989"},
-    {file = "singledispatch-3.7.0.tar.gz", hash = "sha256:c1a4d5c1da310c3fd8fccfb8d4e1cb7df076148fd5d858a819e37fffe44f3092"},
-]
-six = [
-    {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
-    {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
-]
-subprocess32 = [
-    {file = "subprocess32-3.5.4-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:88e37c1aac5388df41cc8a8456bb49ebffd321a3ad4d70358e3518176de3a56b"},
-    {file = "subprocess32-3.5.4-cp27-cp27mu-manylinux2014_x86_64.whl", hash = "sha256:e45d985aef903c5b7444d34350b05da91a9e0ea015415ab45a21212786c649d0"},
-    {file = "subprocess32-3.5.4.tar.gz", hash = "sha256:eb2937c80497978d181efa1b839ec2d9622cf9600a039a79d0e108d1f9aec79d"},
-]
-termcolor = [
-    {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"},
-]
-toml = [
-    {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
-    {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
-]
-tomlkit = [
-    {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"},
-    {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"},
-]
-tox = [
-    {file = "tox-3.24.5-py2.py3-none-any.whl", hash = "sha256:be3362472a33094bce26727f5f771ca0facf6dafa217f65875314e9a6600c95c"},
-    {file = "tox-3.24.5.tar.gz", hash = "sha256:67e0e32c90e278251fea45b696d0fef3879089ccbe979b0c556d35d5a70e2993"},
-]
-typing = [
-    {file = "typing-3.10.0.0-py2-none-any.whl", hash = "sha256:c7219ef20c5fbf413b4567092adfc46fa6203cb8454eda33c3fc1afe1398a308"},
-    {file = "typing-3.10.0.0-py3-none-any.whl", hash = "sha256:12fbdfbe7d6cca1a42e485229afcb0b0c8259258cfb919b8a5e2a5c953742f89"},
-    {file = "typing-3.10.0.0.tar.gz", hash = "sha256:13b4ad211f54ddbf93e5901a9967b1e07720c1d1b78d596ac6a439641aa1b130"},
-]
-typing-extensions = [
-    {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"},
-    {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"},
-    {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"},
-]
-urllib3 = [
-    {file = "urllib3-1.25.11-py2.py3-none-any.whl", hash = "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"},
-    {file = "urllib3-1.25.11.tar.gz", hash = "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2"},
-]
-virtualenv = [
-    {file = "virtualenv-20.13.1-py2.py3-none-any.whl", hash = "sha256:45e1d053cad4cd453181ae877c4ffc053546ae99e7dd049b9ff1d9be7491abf7"},
-    {file = "virtualenv-20.13.1.tar.gz", hash = "sha256:e0621bcbf4160e4e1030f05065c8834b4e93f4fcc223255db2a823440aca9c14"},
-]
-wcwidth = [
-    {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
-    {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
-]
-webencodings = [
-    {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
-    {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
-]
-zipp = [
-    {file = "zipp-1.2.0-py2.py3-none-any.whl", hash = "sha256:e0d9e63797e483a30d27e09fffd308c59a700d365ec34e93cc100844168bf921"},
-    {file = "zipp-1.2.0.tar.gz", hash = "sha256:c70410551488251b0fee67b460fb9a536af8d6f9f008ad10ac51f615b6a521b1"},
-]
+atomicwrites = []
+attrs = []
+cachecontrol = []
+cachy = []
+certifi = []
+cffi = []
+cfgv = []
+charset-normalizer = []
+cleo = []
+colorama = []
+coverage = []
+crashtest = []
+cryptography = []
+deepdiff = []
+distlib = []
+dulwich = []
+execnet = []
+filelock = []
+flatdict = []
+html5lib = []
+httpretty = []
+identify = []
+idna = []
+importlib-metadata = []
+importlib-resources = []
+iniconfig = []
+jeepney = []
+jsonschema = []
+keyring = []
+lockfile = []
+msgpack = []
+mypy = []
+mypy-extensions = []
+nodeenv = []
+ordered-set = []
+packaging = []
+pexpect = []
+pkginfo = []
+pkgutil-resolve-name = []
+platformdirs = []
+pluggy = []
+poetry-core = []
+poetry-plugin-export = []
+pre-commit = []
+psutil = []
+ptyprocess = []
+py = []
+pycparser = []
+pylev = []
+pyparsing = []
+pyrsistent = []
+pytest = []
+pytest-cov = []
+pytest-forked = []
+pytest-mock = []
+pytest-randomly = []
+pytest-sugar = []
+pytest-xdist = []
+pywin32-ctypes = []
+pyyaml = []
+requests = []
+requests-toolbelt = []
+secretstorage = []
+setuptools = []
+shellingham = []
+six = []
+termcolor = []
+toml = []
+tomli = []
+tomlkit = []
+tox = []
+typed-ast = []
+types-html5lib = []
+types-jsonschema = []
+types-requests = []
+types-urllib3 = []
+typing-extensions = []
+urllib3 = []
+virtualenv = []
+webencodings = []
+xattr = []
+zipp = []
diff --git a/vendor/poetry/poetry/__init__.py b/vendor/poetry/poetry/__init__.py
deleted file mode 100644
index 26cfe405..00000000
--- a/vendor/poetry/poetry/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from pkgutil import extend_path
-
-
-__path__ = extend_path(__path__, __name__)
diff --git a/vendor/poetry/poetry/__main__.py b/vendor/poetry/poetry/__main__.py
deleted file mode 100644
index b280ed84..00000000
--- a/vendor/poetry/poetry/__main__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import sys
-
-
-if __name__ == "__main__":
-    from .console import main
-
-    sys.exit(main())
diff --git a/vendor/poetry/poetry/__version__.py b/vendor/poetry/poetry/__version__.py
deleted file mode 100644
index 316ae3d0..00000000
--- a/vendor/poetry/poetry/__version__.py
+++ /dev/null
@@ -1 +0,0 @@
-__version__ = "1.1.15"
diff --git a/vendor/poetry/poetry/config/config.py b/vendor/poetry/poetry/config/config.py
deleted file mode 100644
index e8c4f0b3..00000000
--- a/vendor/poetry/poetry/config/config.py
+++ /dev/null
@@ -1,156 +0,0 @@
-from __future__ import absolute_import
-
-import os
-import re
-
-from copy import deepcopy
-from typing import Any
-from typing import Callable
-from typing import Dict
-from typing import Optional
-
-from poetry.locations import CACHE_DIR
-from poetry.utils._compat import Path
-from poetry.utils._compat import basestring
-
-from .config_source import ConfigSource
-from .dict_config_source import DictConfigSource
-
-
-_NOT_SET = object()
-
-
-def boolean_validator(val):
-    return val in {"true", "false", "1", "0"}
-
-
-def boolean_normalizer(val):
-    return val in ["true", "1"]
-
-
-class Config(object):
-
-    default_config = {
-        "cache-dir": str(CACHE_DIR),
-        "virtualenvs": {
-            "create": True,
-            "in-project": None,
-            "path": os.path.join("{cache-dir}", "virtualenvs"),
-        },
-        "experimental": {"new-installer": True},
-        "installer": {"parallel": True},
-    }
-
-    def __init__(
-        self, use_environment=True, base_dir=None
-    ):  # type: (bool, Optional[Path]) -> None
-        self._config = deepcopy(self.default_config)
-        self._use_environment = use_environment
-        self._base_dir = base_dir
-        self._config_source = DictConfigSource()
-        self._auth_config_source = DictConfigSource()
-
-    @property
-    def name(self):
-        return str(self._file.path)
-
-    @property
-    def config(self):
-        return self._config
-
-    @property
-    def config_source(self):  # type: () -> ConfigSource
-        return self._config_source
-
-    @property
-    def auth_config_source(self):  # type: () -> ConfigSource
-        return self._auth_config_source
-
-    def set_config_source(self, config_source):  # type: (ConfigSource) -> Config
-        self._config_source = config_source
-
-        return self
-
-    def set_auth_config_source(self, config_source):  # type: (ConfigSource) -> Config
-        self._auth_config_source = config_source
-
-        return self
-
-    def merge(self, config):  # type: (Dict[str, Any]) -> None
-        from poetry.utils.helpers import merge_dicts
-
-        merge_dicts(self._config, config)
-
-    def all(self):  # type: () -> Dict[str, Any]
-        def _all(config, parent_key=""):
-            all_ = {}
-
-            for key in config:
-                value = self.get(parent_key + key)
-                if isinstance(value, dict):
-                    all_[key] = _all(config[key], parent_key=key + ".")
-                    continue
-
-                all_[key] = value
-
-            return all_
-
-        return _all(self.config)
-
-    def raw(self):  # type: () -> Dict[str, Any]
-        return self._config
-
-    def get(self, setting_name, default=None):  # type: (str, Any) -> Any
-        """
-        Retrieve a setting value.
-        """
-        keys = setting_name.split(".")
-
-        # Looking in the environment if the setting
-        # is set via a POETRY_* environment variable
-        if self._use_environment:
-            env = "POETRY_{}".format(
-                "_".join(k.upper().replace("-", "_") for k in keys)
-            )
-            value = os.getenv(env, _NOT_SET)
-            if value is not _NOT_SET:
-                return self.process(self._get_normalizer(setting_name)(value))
-
-        value = self._config
-        for key in keys:
-            if key not in value:
-                return self.process(default)
-
-            value = value[key]
-
-        return self.process(value)
-
-    def process(self, value):  # type: (Any) -> Any
-        if not isinstance(value, basestring):
-            return value
-
-        return re.sub(r"{(.+?)}", lambda m: self.get(m.group(1)), value)
-
-    def _get_validator(self, name):  # type: (str) -> Callable
-        if name in {
-            "virtualenvs.create",
-            "virtualenvs.in-project",
-            "installer.parallel",
-        }:
-            return boolean_validator
-
-        if name == "virtualenvs.path":
-            return str
-
-    def _get_normalizer(self, name):  # type: (str) -> Callable
-        if name in {
-            "virtualenvs.create",
-            "virtualenvs.in-project",
-            "installer.parallel",
-        }:
-            return boolean_normalizer
-
-        if name == "virtualenvs.path":
-            return lambda val: str(Path(val))
-
-        return lambda val: val
diff --git a/vendor/poetry/poetry/config/config_source.py b/vendor/poetry/poetry/config/config_source.py
deleted file mode 100644
index 63a4ad6b..00000000
--- a/vendor/poetry/poetry/config/config_source.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from typing import Any
-
-
-class ConfigSource(object):
-    def add_property(self, key, value):  # type: (str, Any) -> None
-        raise NotImplementedError()
-
-    def remove_property(self, key):  # type: (str) -> None
-        raise NotImplementedError()
diff --git a/vendor/poetry/poetry/config/dict_config_source.py b/vendor/poetry/poetry/config/dict_config_source.py
deleted file mode 100644
index aaa6ee3b..00000000
--- a/vendor/poetry/poetry/config/dict_config_source.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from typing import Any
-from typing import Dict
-
-from .config_source import ConfigSource
-
-
-class DictConfigSource(ConfigSource):
-    def __init__(self):  # type: () -> None
-        self._config = {}
-
-    @property
-    def config(self):  # type: () -> Dict[str, Any]
-        return self._config
-
-    def add_property(self, key, value):  # type: (str, Any) -> None
-        keys = key.split(".")
-        config = self._config
-
-        for i, key in enumerate(keys):
-            if key not in config and i < len(keys) - 1:
-                config[key] = {}
-
-            if i == len(keys) - 1:
-                config[key] = value
-                break
-
-            config = config[key]
-
-    def remove_property(self, key):  # type: (str) -> None
-        keys = key.split(".")
-
-        config = self._config
-        for i, key in enumerate(keys):
-            if key not in config:
-                return
-
-            if i == len(keys) - 1:
-                del config[key]
-
-                break
-
-            config = config[key]
diff --git a/vendor/poetry/poetry/config/file_config_source.py b/vendor/poetry/poetry/config/file_config_source.py
deleted file mode 100644
index ed4e3a85..00000000
--- a/vendor/poetry/poetry/config/file_config_source.py
+++ /dev/null
@@ -1,83 +0,0 @@
-from contextlib import contextmanager
-from typing import TYPE_CHECKING
-from typing import Any
-
-from tomlkit import document
-from tomlkit import table
-
-from .config_source import ConfigSource
-
-
-if TYPE_CHECKING:
-    from poetry.core.toml.file import TOMLFile  # noqa
-
-
-class FileConfigSource(ConfigSource):
-    def __init__(self, file, auth_config=False):  # type: ("TOMLFile", bool) -> None
-        self._file = file
-        self._auth_config = auth_config
-
-    @property
-    def name(self):  # type: () -> str
-        return str(self._file.path)
-
-    @property
-    def file(self):  # type: () -> "TOMLFile"
-        return self._file
-
-    def add_property(self, key, value):  # type: (str, Any) -> None
-        with self.secure() as config:
-            keys = key.split(".")
-
-            for i, key in enumerate(keys):
-                if key not in config and i < len(keys) - 1:
-                    config[key] = table()
-
-                if i == len(keys) - 1:
-                    config[key] = value
-                    break
-
-                config = config[key]
-
-    def remove_property(self, key):  # type: (str) -> None
-        with self.secure() as config:
-            keys = key.split(".")
-
-            current_config = config
-            for i, key in enumerate(keys):
-                if key not in current_config:
-                    return
-
-                if i == len(keys) - 1:
-                    del current_config[key]
-
-                    break
-
-                current_config = current_config[key]
-
-    @contextmanager
-    def secure(self):
-        if self.file.exists():
-            initial_config = self.file.read()
-            config = self.file.read()
-        else:
-            initial_config = document()
-            config = document()
-
-        new_file = not self.file.exists()
-
-        yield config
-
-        try:
-            # Ensuring the file is only readable and writable
-            # by the current user
-            mode = 0o600
-
-            if new_file:
-                self.file.touch(mode=mode)
-
-            self.file.write(config)
-        except Exception:
-            self.file.write(initial_config)
-
-            raise
diff --git a/vendor/poetry/poetry/console/application.py b/vendor/poetry/poetry/console/application.py
deleted file mode 100644
index 8fb32480..00000000
--- a/vendor/poetry/poetry/console/application.py
+++ /dev/null
@@ -1,114 +0,0 @@
-import sys
-
-from cleo import Application as BaseApplication
-
-from poetry.__version__ import __version__
-
-from .commands.about import AboutCommand
-from .commands.add import AddCommand
-from .commands.build import BuildCommand
-from .commands.cache.cache import CacheCommand
-from .commands.check import CheckCommand
-from .commands.config import ConfigCommand
-from .commands.debug.debug import DebugCommand
-from .commands.env.env import EnvCommand
-from .commands.export import ExportCommand
-from .commands.init import InitCommand
-from .commands.install import InstallCommand
-from .commands.lock import LockCommand
-from .commands.new import NewCommand
-from .commands.publish import PublishCommand
-from .commands.remove import RemoveCommand
-from .commands.run import RunCommand
-from .commands.search import SearchCommand
-from .commands.self.self import SelfCommand
-from .commands.shell import ShellCommand
-from .commands.show import ShowCommand
-from .commands.update import UpdateCommand
-from .commands.version import VersionCommand
-from .config import ApplicationConfig
-
-
-class Application(BaseApplication):
-    def __init__(self):
-        super(Application, self).__init__(
-            "poetry", __version__, config=ApplicationConfig("poetry", __version__)
-        )
-
-        self._poetry = None
-
-        for command in self.get_default_commands():
-            self.add(command)
-
-        if sys.version_info[:2] < (3, 6):
-            python_version = "{}".format(
-                ".".join(str(v) for v in sys.version_info[:2])
-            )
-            poetry_feature_release = "1.2"
-            message = (
-                "\n"
-                "Python {} will no longer be supported "
-                "in the next feature release of Poetry ({}).\n"
-                "You should consider updating your Python version to a supported one.\n\n"
-                ""
-                "Note that you will still be able to manage Python {} projects "
-                "by using the env command.\n"
-                "See https://python-poetry.org/docs/managing-environments/ "
-                "for more information."
-            ).format(python_version, poetry_feature_release, python_version)
-            self._preliminary_io.error_line("{}\n".format(message))
-
-    @property
-    def poetry(self):
-        from poetry.factory import Factory
-        from poetry.utils._compat import Path
-
-        if self._poetry is not None:
-            return self._poetry
-
-        self._poetry = Factory().create_poetry(Path.cwd())
-
-        return self._poetry
-
-    def reset_poetry(self):  # type: () -> None
-        self._poetry = None
-
-    def get_default_commands(self):  # type: () -> list
-        commands = [
-            AboutCommand(),
-            AddCommand(),
-            BuildCommand(),
-            CheckCommand(),
-            ConfigCommand(),
-            ExportCommand(),
-            InitCommand(),
-            InstallCommand(),
-            LockCommand(),
-            NewCommand(),
-            PublishCommand(),
-            RemoveCommand(),
-            RunCommand(),
-            SearchCommand(),
-            ShellCommand(),
-            ShowCommand(),
-            UpdateCommand(),
-            VersionCommand(),
-        ]
-
-        # Cache commands
-        commands += [CacheCommand()]
-
-        # Debug command
-        commands += [DebugCommand()]
-
-        # Env command
-        commands += [EnvCommand()]
-
-        # Self commands
-        commands += [SelfCommand()]
-
-        return commands
-
-
-if __name__ == "__main__":
-    Application().run()
diff --git a/vendor/poetry/poetry/console/args/run_args_parser.py b/vendor/poetry/poetry/console/args/run_args_parser.py
deleted file mode 100644
index 9f8cff8b..00000000
--- a/vendor/poetry/poetry/console/args/run_args_parser.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from clikit.api.args import Args
-from clikit.api.args import RawArgs
-from clikit.api.args.format import ArgsFormat
-from clikit.api.args.format import ArgsFormatBuilder
-from clikit.args import DefaultArgsParser
-
-
-class RunArgsParser(DefaultArgsParser):
-    """
-    Parser that just parses command names and leave the rest
-    alone to be passed to the command.
-    """
-
-    def parse(
-        self, args, fmt, lenient=False
-    ):  # type: (RawArgs, ArgsFormat, bool) -> Args
-        builder = ArgsFormatBuilder()
-        builder.set_command_names(*fmt.get_command_names())
-        builder.set_arguments(*fmt.get_arguments().values())
-        fmt = builder.format
-
-        return super(RunArgsParser, self).parse(args, fmt, True)
-
-    def _parse(
-        self, raw_args, fmt, lenient
-    ):  # type: (RawArgs, ArgsFormat, bool) -> None
-        """
-        Parse everything as a single, multi-valued argument.
-        """
-        tokens = raw_args.tokens[:]
-
-        last_arg = list(fmt.get_arguments().values())[-1]
-        self._arguments[last_arg.name] = []
-
-        while True:
-            try:
-                token = tokens.pop(0)
-            except IndexError:
-                break
-
-            self._arguments[last_arg.name].append(token)
diff --git a/vendor/poetry/poetry/console/commands/__init__.py b/vendor/poetry/poetry/console/commands/__init__.py
deleted file mode 100644
index b8cb3f4e..00000000
--- a/vendor/poetry/poetry/console/commands/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from .about import AboutCommand
-from .add import AddCommand
-from .build import BuildCommand
-from .check import CheckCommand
-from .config import ConfigCommand
-from .export import ExportCommand
-from .init import InitCommand
-from .install import InstallCommand
-from .lock import LockCommand
-from .new import NewCommand
-from .publish import PublishCommand
-from .remove import RemoveCommand
-from .run import RunCommand
-from .search import SearchCommand
-from .shell import ShellCommand
-from .show import ShowCommand
-from .update import UpdateCommand
-from .version import VersionCommand
diff --git a/vendor/poetry/poetry/console/commands/about.py b/vendor/poetry/poetry/console/commands/about.py
deleted file mode 100644
index a84a2b6f..00000000
--- a/vendor/poetry/poetry/console/commands/about.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from .command import Command
-
-
-class AboutCommand(Command):
-
-    name = "about"
-
-    description = "Shows information about Poetry."
-
-    def handle(self):
-        self.line(
-            """Poetry - Package Management for Python
-
-Poetry is a dependency manager tracking local dependencies of your projects and libraries.
-See https://github.com/python-poetry/poetry for more information."""
-        )
diff --git a/vendor/poetry/poetry/console/commands/add.py b/vendor/poetry/poetry/console/commands/add.py
deleted file mode 100644
index 29cf07d2..00000000
--- a/vendor/poetry/poetry/console/commands/add.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# -*- coding: utf-8 -*-
-from typing import Dict
-from typing import List
-
-from cleo import argument
-from cleo import option
-
-from .init import InitCommand
-from .installer_command import InstallerCommand
-
-
-class AddCommand(InstallerCommand, InitCommand):
-
-    name = "add"
-    description = "Adds a new dependency to pyproject.toml."
-
-    arguments = [argument("name", "The packages to add.", multiple=True)]
-    options = [
-        option("dev", "D", "Add as a development dependency."),
-        option(
-            "extras",
-            "E",
-            "Extras to activate for the dependency.",
-            flag=False,
-            multiple=True,
-        ),
-        option("optional", None, "Add as an optional dependency."),
-        option(
-            "python",
-            None,
-            "Python version for which the dependency must be installed.",
-            flag=False,
-        ),
-        option(
-            "platform",
-            None,
-            "Platforms for which the dependency must be installed.",
-            flag=False,
-        ),
-        option(
-            "source",
-            None,
-            "Name of the source to use to install the package.",
-            flag=False,
-        ),
-        option("allow-prereleases", None, "Accept prereleases."),
-        option(
-            "dry-run",
-            None,
-            "Output the operations but do not execute anything (implicitly enables --verbose).",
-        ),
-        option("lock", None, "Do not perform operations (only update the lockfile)."),
-    ]
-    help = (
-        "The add command adds required packages to your pyproject.toml and installs them.\n\n"
-        "If you do not specify a version constraint, poetry will choose a suitable one based on the available package versions.\n\n"
-        "You can specify a package in the following forms:\n"
-        "  - A single name (requests)\n"
-        "  - A name and a constraint (requests@^2.23.0)\n"
-        "  - A git url (git+https://github.com/python-poetry/poetry.git)\n"
-        "  - A git url with a revision (git+https://github.com/python-poetry/poetry.git#develop)\n"
-        "  - A file path (../my-package/my-package.whl)\n"
-        "  - A directory (../my-package/)\n"
-        "  - A url (https://example.com/packages/my-package-0.1.0.tar.gz)\n"
-    )
-
-    loggers = ["poetry.repositories.pypi_repository", "poetry.inspection.info"]
-
-    def handle(self):
-        from tomlkit import inline_table
-
-        from poetry.core.semver import parse_constraint
-
-        packages = self.argument("name")
-        is_dev = self.option("dev")
-
-        if self.option("extras") and len(packages) > 1:
-            raise ValueError(
-                "You can only specify one package " "when using the --extras option"
-            )
-
-        section = "dependencies"
-        if is_dev:
-            section = "dev-dependencies"
-
-        original_content = self.poetry.file.read()
-        content = self.poetry.file.read()
-        poetry_content = content["tool"]["poetry"]
-
-        if section not in poetry_content:
-            poetry_content[section] = {}
-
-        existing_packages = self.get_existing_packages_from_input(
-            packages, poetry_content, section
-        )
-
-        if existing_packages:
-            self.notify_about_existing_packages(existing_packages)
-
-        packages = [name for name in packages if name not in existing_packages]
-
-        if not packages:
-            self.line("Nothing to add.")
-            return 0
-
-        requirements = self._determine_requirements(
-            packages,
-            allow_prereleases=self.option("allow-prereleases"),
-            source=self.option("source"),
-        )
-
-        for _constraint in requirements:
-            if "version" in _constraint:
-                # Validate version constraint
-                parse_constraint(_constraint["version"])
-
-            constraint = inline_table()
-            for name, value in _constraint.items():
-                if name == "name":
-                    continue
-
-                constraint[name] = value
-
-            if self.option("optional"):
-                constraint["optional"] = True
-
-            if self.option("allow-prereleases"):
-                constraint["allow-prereleases"] = True
-
-            if self.option("extras"):
-                extras = []
-                for extra in self.option("extras"):
-                    if " " in extra:
-                        extras += [e.strip() for e in extra.split(" ")]
-                    else:
-                        extras.append(extra)
-
-                constraint["extras"] = self.option("extras")
-
-            if self.option("python"):
-                constraint["python"] = self.option("python")
-
-            if self.option("platform"):
-                constraint["platform"] = self.option("platform")
-
-            if self.option("source"):
-                constraint["source"] = self.option("source")
-
-            if len(constraint) == 1 and "version" in constraint:
-                constraint = constraint["version"]
-
-            poetry_content[section][_constraint["name"]] = constraint
-
-        try:
-            # Write new content
-            self.poetry.file.write(content)
-
-            # Cosmetic new line
-            self.line("")
-
-            # Update packages
-            self.reset_poetry()
-
-            self._installer.set_package(self.poetry.package)
-            self._installer.dry_run(self.option("dry-run"))
-            self._installer.verbose(self._io.is_verbose())
-            self._installer.update(True)
-            if self.option("lock"):
-                self._installer.lock()
-
-            self._installer.whitelist([r["name"] for r in requirements])
-
-            status = self._installer.run()
-        except BaseException:
-            # Using BaseException here as some exceptions, eg: KeyboardInterrupt, do not inherit from Exception
-            self.poetry.file.write(original_content)
-            raise
-
-        if status != 0 or self.option("dry-run"):
-            # Revert changes
-            if not self.option("dry-run"):
-                self.line_error(
-                    "\n"
-                    "Failed to add packages, reverting the pyproject.toml file "
-                    "to its original content."
-                )
-
-            self.poetry.file.write(original_content)
-
-        return status
-
-    def get_existing_packages_from_input(
-        self, packages, poetry_content, target_section
-    ):  # type: (List[str], Dict, str) -> List[str]
-        existing_packages = []
-
-        for name in packages:
-            for key in poetry_content[target_section]:
-                if key.lower() == name.lower():
-                    existing_packages.append(name)
-
-        return existing_packages
-
-    def notify_about_existing_packages(
-        self, existing_packages
-    ):  # type: (List[str]) -> None
-        self.line(
-            "The following packages are already present in the pyproject.toml and will be skipped:\n"
-        )
-        for name in existing_packages:
-            self.line("  • {name}".format(name=name))
-        self.line(
-            "\nIf you want to update it to the latest compatible version, you can use `poetry update package`.\n"
-            "If you prefer to upgrade it to the latest available version, you can use `poetry add package@latest`.\n"
-        )
diff --git a/vendor/poetry/poetry/console/commands/build.py b/vendor/poetry/poetry/console/commands/build.py
deleted file mode 100644
index 72b7319f..00000000
--- a/vendor/poetry/poetry/console/commands/build.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from cleo import option
-
-from .env_command import EnvCommand
-
-
-class BuildCommand(EnvCommand):
-
-    name = "build"
-    description = "Builds a package, as a tarball and a wheel by default."
-
-    options = [
-        option("format", "f", "Limit the format to either sdist or wheel.", flag=False)
-    ]
-
-    loggers = [
-        "poetry.core.masonry.builders.builder",
-        "poetry.core.masonry.builders.sdist",
-        "poetry.core.masonry.builders.wheel",
-    ]
-
-    def handle(self):
-        from poetry.core.masonry import Builder
-
-        fmt = "all"
-        if self.option("format"):
-            fmt = self.option("format")
-
-        package = self.poetry.package
-        self.line(
-            "Building {} ({})".format(
-                package.pretty_name, package.version
-            )
-        )
-
-        builder = Builder(self.poetry)
-        builder.build(fmt, executable=self.env.python)
diff --git a/vendor/poetry/poetry/console/commands/cache/cache.py b/vendor/poetry/poetry/console/commands/cache/cache.py
deleted file mode 100644
index 695e27e0..00000000
--- a/vendor/poetry/poetry/console/commands/cache/cache.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from poetry.console.commands.cache.list import CacheListCommand
-
-from ..command import Command
-from .clear import CacheClearCommand
-
-
-class CacheCommand(Command):
-
-    name = "cache"
-    description = "Interact with Poetry's cache"
-
-    commands = [CacheClearCommand(), CacheListCommand()]
-
-    def handle(self):
-        return self.call("help", self._config.name)
diff --git a/vendor/poetry/poetry/console/commands/cache/clear.py b/vendor/poetry/poetry/console/commands/cache/clear.py
deleted file mode 100644
index 42e71091..00000000
--- a/vendor/poetry/poetry/console/commands/cache/clear.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import os
-
-from cleo import argument
-from cleo import option
-
-from ..command import Command
-
-
-class CacheClearCommand(Command):
-
-    name = "clear"
-    description = "Clears Poetry's cache."
-
-    arguments = [argument("cache", description="The name of the cache to clear.")]
-    options = [option("all", description="Clear all entries in the cache.")]
-
-    def handle(self):
-        from cachy import CacheManager
-
-        from poetry.locations import REPOSITORY_CACHE_DIR
-
-        cache = self.argument("cache")
-
-        parts = cache.split(":")
-        root = parts[0]
-
-        cache_dir = REPOSITORY_CACHE_DIR / root
-
-        try:
-            cache_dir.relative_to(REPOSITORY_CACHE_DIR)
-        except ValueError:
-            raise ValueError("{} is not a valid repository cache".format(root))
-
-        cache = CacheManager(
-            {
-                "default": parts[0],
-                "serializer": "json",
-                "stores": {parts[0]: {"driver": "file", "path": str(cache_dir)}},
-            }
-        )
-
-        if len(parts) == 1:
-            if not self.option("all"):
-                raise RuntimeError(
-                    "Add the --all option if you want to clear all "
-                    "{} caches".format(parts[0])
-                )
-
-            if not os.path.exists(str(cache_dir)):
-                self.line("No cache entries for {}".format(parts[0]))
-                return 0
-
-            # Calculate number of entries
-            entries_count = 0
-            for path, dirs, files in os.walk(str(cache_dir)):
-                entries_count += len(files)
-
-            delete = self.confirm(
-                "Delete {} entries?".format(entries_count)
-            )
-            if not delete:
-                return 0
-
-            cache.flush()
-        elif len(parts) == 2:
-            raise RuntimeError(
-                "Only specifying the package name is not yet supported. "
-                "Add a specific version to clear"
-            )
-        elif len(parts) == 3:
-            package = parts[1]
-            version = parts[2]
-
-            if not cache.has("{}:{}".format(package, version)):
-                self.line("No cache entries for {}:{}".format(package, version))
-                return 0
-
-            delete = self.confirm("Delete cache entry {}:{}".format(package, version))
-            if not delete:
-                return 0
-
-            cache.forget("{}:{}".format(package, version))
-        else:
-            raise ValueError("Invalid cache key")
diff --git a/vendor/poetry/poetry/console/commands/cache/list.py b/vendor/poetry/poetry/console/commands/cache/list.py
deleted file mode 100644
index 6a030fa2..00000000
--- a/vendor/poetry/poetry/console/commands/cache/list.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import os
-
-from ..command import Command
-
-
-class CacheListCommand(Command):
-
-    name = "list"
-    description = "List Poetry's caches."
-
-    def handle(self):
-        from poetry.locations import REPOSITORY_CACHE_DIR
-
-        if os.path.exists(str(REPOSITORY_CACHE_DIR)):
-            caches = list(sorted(REPOSITORY_CACHE_DIR.iterdir()))
-            if caches:
-                for cache in caches:
-                    self.line("{}".format(cache.name))
-                return 0
-
-        self.line("No caches found")
diff --git a/vendor/poetry/poetry/console/commands/check.py b/vendor/poetry/poetry/console/commands/check.py
deleted file mode 100644
index bb97da14..00000000
--- a/vendor/poetry/poetry/console/commands/check.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from poetry.core.pyproject.toml import PyProjectTOML
-from poetry.factory import Factory
-from poetry.utils._compat import Path
-
-from .command import Command
-
-
-class CheckCommand(Command):
-
-    name = "check"
-    description = "Checks the validity of the pyproject.toml file."
-
-    def handle(self):
-        # Load poetry config and display errors, if any
-        poetry_file = Factory.locate(Path.cwd())
-        config = PyProjectTOML(poetry_file).poetry_config
-        check_result = Factory.validate(config, strict=True)
-        if not check_result["errors"] and not check_result["warnings"]:
-            self.info("All set!")
-
-            return 0
-
-        for error in check_result["errors"]:
-            self.line("Error: {}".format(error))
-
-        for error in check_result["warnings"]:
-            self.line("Warning: {}".format(error))
-
-        return 1
diff --git a/vendor/poetry/poetry/console/commands/command.py b/vendor/poetry/poetry/console/commands/command.py
deleted file mode 100644
index 1e221423..00000000
--- a/vendor/poetry/poetry/console/commands/command.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from cleo import Command as BaseCommand
-
-
-class Command(BaseCommand):
-
-    loggers = []
-
-    @property
-    def poetry(self):
-        return self.application.poetry
-
-    def reset_poetry(self):  # type: () -> None
-        self.application.reset_poetry()
diff --git a/vendor/poetry/poetry/console/commands/config.py b/vendor/poetry/poetry/console/commands/config.py
deleted file mode 100644
index fd8a4e23..00000000
--- a/vendor/poetry/poetry/console/commands/config.py
+++ /dev/null
@@ -1,356 +0,0 @@
-import json
-import re
-
-from cleo import argument
-from cleo import option
-
-from poetry.core.pyproject import PyProjectException
-from poetry.core.toml.file import TOMLFile
-from poetry.factory import Factory
-
-from .command import Command
-
-
-class ConfigCommand(Command):
-
-    name = "config"
-    description = "Manages configuration settings."
-
-    arguments = [
-        argument("key", "Setting key.", optional=True),
-        argument("value", "Setting value.", optional=True, multiple=True),
-    ]
-
-    options = [
-        option("list", None, "List configuration settings."),
-        option("unset", None, "Unset configuration setting."),
-        option("local", None, "Set/Get from the project's local configuration."),
-    ]
-
-    help = """This command allows you to edit the poetry config settings and repositories.
-
-To add a repository:
-
-    poetry config repositories.foo https://bar.com/simple/
-
-To remove a repository (repo is a short alias for repositories):
-
-    poetry config --unset repo.foo"""
-
-    LIST_PROHIBITED_SETTINGS = {"http-basic", "pypi-token"}
-
-    @property
-    def unique_config_values(self):
-        from poetry.config.config import boolean_normalizer
-        from poetry.config.config import boolean_validator
-        from poetry.locations import CACHE_DIR
-        from poetry.utils._compat import Path
-
-        unique_config_values = {
-            "cache-dir": (
-                str,
-                lambda val: str(Path(val)),
-                str(Path(CACHE_DIR) / "virtualenvs"),
-            ),
-            "virtualenvs.create": (boolean_validator, boolean_normalizer, True),
-            "virtualenvs.in-project": (boolean_validator, boolean_normalizer, False),
-            "virtualenvs.path": (
-                str,
-                lambda val: str(Path(val)),
-                str(Path(CACHE_DIR) / "virtualenvs"),
-            ),
-            "experimental.new-installer": (
-                boolean_validator,
-                boolean_normalizer,
-                True,
-            ),
-            "installer.parallel": (boolean_validator, boolean_normalizer, True,),
-        }
-
-        return unique_config_values
-
-    def handle(self):
-        from poetry.config.file_config_source import FileConfigSource
-        from poetry.locations import CONFIG_DIR
-        from poetry.utils._compat import Path
-        from poetry.utils._compat import basestring
-
-        config = Factory.create_config(self.io)
-        config_file = TOMLFile(Path(CONFIG_DIR) / "config.toml")
-
-        try:
-            local_config_file = TOMLFile(self.poetry.file.parent / "poetry.toml")
-            if local_config_file.exists():
-                config.merge(local_config_file.read())
-        except (RuntimeError, PyProjectException):
-            local_config_file = TOMLFile(Path.cwd() / "poetry.toml")
-
-        if self.option("local"):
-            config.set_config_source(FileConfigSource(local_config_file))
-
-        if not config_file.exists():
-            config_file.path.parent.mkdir(parents=True, exist_ok=True)
-            config_file.touch(mode=0o0600)
-
-        if self.option("list"):
-            self._list_configuration(config.all(), config.raw())
-
-            return 0
-
-        setting_key = self.argument("key")
-        if not setting_key:
-            return 0
-
-        if self.argument("value") and self.option("unset"):
-            raise RuntimeError("You can not combine a setting value with --unset")
-
-        # show the value if no value is provided
-        if not self.argument("value") and not self.option("unset"):
-            m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key"))
-            if m:
-                if not m.group(1):
-                    value = {}
-                    if config.get("repositories") is not None:
-                        value = config.get("repositories")
-                else:
-                    repo = config.get("repositories.{}".format(m.group(1)))
-                    if repo is None:
-                        raise ValueError(
-                            "There is no {} repository defined".format(m.group(1))
-                        )
-
-                    value = repo
-
-                self.line(str(value))
-            else:
-                values = self.unique_config_values
-                if setting_key not in values:
-                    raise ValueError("There is no {} setting.".format(setting_key))
-
-                value = config.get(setting_key)
-
-                if not isinstance(value, basestring):
-                    value = json.dumps(value)
-
-                self.line(value)
-
-            return 0
-
-        values = self.argument("value")
-
-        unique_config_values = self.unique_config_values
-        if setting_key in unique_config_values:
-            if self.option("unset"):
-                return config.config_source.remove_property(setting_key)
-
-            return self._handle_single_value(
-                config.config_source,
-                setting_key,
-                unique_config_values[setting_key],
-                values,
-            )
-
-        # handle repositories
-        m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key"))
-        if m:
-            if not m.group(1):
-                raise ValueError("You cannot remove the [repositories] section")
-
-            if self.option("unset"):
-                repo = config.get("repositories.{}".format(m.group(1)))
-                if repo is None:
-                    raise ValueError(
-                        "There is no {} repository defined".format(m.group(1))
-                    )
-
-                config.config_source.remove_property(
-                    "repositories.{}".format(m.group(1))
-                )
-
-                return 0
-
-            if len(values) == 1:
-                url = values[0]
-
-                config.config_source.add_property(
-                    "repositories.{}.url".format(m.group(1)), url
-                )
-
-                return 0
-
-            raise ValueError(
-                "You must pass the url. "
-                "Example: poetry config repositories.foo https://bar.com"
-            )
-
-        # handle auth
-        m = re.match(r"^(http-basic|pypi-token)\.(.+)", self.argument("key"))
-        if m:
-            from poetry.utils.password_manager import PasswordManager
-
-            password_manager = PasswordManager(config)
-            if self.option("unset"):
-                if m.group(1) == "http-basic":
-                    password_manager.delete_http_password(m.group(2))
-                elif m.group(1) == "pypi-token":
-                    password_manager.delete_pypi_token(m.group(2))
-
-                return 0
-
-            if m.group(1) == "http-basic":
-                if len(values) == 1:
-                    username = values[0]
-                    # Only username, so we prompt for password
-                    password = self.secret("Password:")
-                elif len(values) != 2:
-                    raise ValueError(
-                        "Expected one or two arguments "
-                        "(username, password), got {}".format(len(values))
-                    )
-                else:
-                    username = values[0]
-                    password = values[1]
-
-                password_manager.set_http_password(m.group(2), username, password)
-            elif m.group(1) == "pypi-token":
-                if len(values) != 1:
-                    raise ValueError(
-                        "Expected only one argument (token), got {}".format(len(values))
-                    )
-
-                token = values[0]
-
-                password_manager.set_pypi_token(m.group(2), token)
-
-            return 0
-
-        # handle certs
-        m = re.match(
-            r"(?:certificates)\.([^.]+)\.(cert|client-cert)", self.argument("key")
-        )
-        if m:
-            if self.option("unset"):
-                config.auth_config_source.remove_property(
-                    "certificates.{}.{}".format(m.group(1), m.group(2))
-                )
-
-                return 0
-
-            if len(values) == 1:
-                config.auth_config_source.add_property(
-                    "certificates.{}.{}".format(m.group(1), m.group(2)), values[0]
-                )
-            else:
-                raise ValueError("You must pass exactly 1 value")
-
-            return 0
-
-        raise ValueError("Setting {} does not exist".format(self.argument("key")))
-
-    def _handle_single_value(self, source, key, callbacks, values):
-        validator, normalizer, _ = callbacks
-
-        if len(values) > 1:
-            raise RuntimeError("You can only pass one value.")
-
-        value = values[0]
-        if not validator(value):
-            raise RuntimeError('"{}" is an invalid value for {}'.format(value, key))
-
-        source.add_property(key, normalizer(value))
-
-        return 0
-
-    def _list_configuration(self, config, raw, k=""):
-        from poetry.utils._compat import basestring
-
-        orig_k = k
-        for key, value in sorted(config.items()):
-            if k + key in self.LIST_PROHIBITED_SETTINGS:
-                continue
-
-            raw_val = raw.get(key)
-
-            if isinstance(value, dict):
-                k += "{}.".format(key)
-                self._list_configuration(value, raw_val, k=k)
-                k = orig_k
-
-                continue
-            elif isinstance(value, list):
-                value = [
-                    json.dumps(val) if isinstance(val, list) else val for val in value
-                ]
-
-                value = "[{}]".format(", ".join(value))
-
-            if k.startswith("repositories."):
-                message = "{} = {}".format(
-                    k + key, json.dumps(raw_val)
-                )
-            elif isinstance(raw_val, basestring) and raw_val != value:
-                message = "{} = {}  # {}".format(
-                    k + key, json.dumps(raw_val), value
-                )
-            else:
-                message = "{} = {}".format(k + key, json.dumps(value))
-
-            self.line(message)
-
-    def _list_setting(self, contents, setting=None, k=None, default=None):
-        values = self._get_setting(contents, setting, k, default)
-
-        for value in values:
-            self.line(
-                "{} = {}".format(value[0], value[1])
-            )
-
-    def _get_setting(self, contents, setting=None, k=None, default=None):
-        orig_k = k
-
-        if setting and setting.split(".")[0] not in contents:
-            value = json.dumps(default)
-
-            return [((k or "") + setting, value)]
-        else:
-            values = []
-            for key, value in contents.items():
-                if setting and key != setting.split(".")[0]:
-                    continue
-
-                if isinstance(value, dict) or key == "repositories" and k is None:
-                    if k is None:
-                        k = ""
-
-                    k += re.sub(r"^config\.", "", key + ".")
-                    if setting and len(setting) > 1:
-                        setting = ".".join(setting.split(".")[1:])
-
-                    values += self._get_setting(
-                        value, k=k, setting=setting, default=default
-                    )
-                    k = orig_k
-
-                    continue
-
-                if isinstance(value, list):
-                    value = [
-                        json.dumps(val) if isinstance(val, list) else val
-                        for val in value
-                    ]
-
-                    value = "[{}]".format(", ".join(value))
-
-                value = json.dumps(value)
-
-                values.append(((k or "") + key, value))
-
-            return values
-
-    def _get_formatted_value(self, value):
-        if isinstance(value, list):
-            value = [json.dumps(val) if isinstance(val, list) else val for val in value]
-
-            value = "[{}]".format(", ".join(value))
-
-        return json.dumps(value)
diff --git a/vendor/poetry/poetry/console/commands/debug/debug.py b/vendor/poetry/poetry/console/commands/debug/debug.py
deleted file mode 100644
index 468e2faa..00000000
--- a/vendor/poetry/poetry/console/commands/debug/debug.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from ..command import Command
-from .info import DebugInfoCommand
-from .resolve import DebugResolveCommand
-
-
-class DebugCommand(Command):
-
-    name = "debug"
-    description = "Debug various elements of Poetry."
-
-    commands = [DebugInfoCommand().default(), DebugResolveCommand()]
diff --git a/vendor/poetry/poetry/console/commands/debug/info.py b/vendor/poetry/poetry/console/commands/debug/info.py
deleted file mode 100644
index 81096a6f..00000000
--- a/vendor/poetry/poetry/console/commands/debug/info.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import sys
-
-from clikit.args import StringArgs
-
-from ..command import Command
-
-
-class DebugInfoCommand(Command):
-
-    name = "info"
-    description = "Shows debug information."
-
-    def handle(self):
-        poetry_python_version = ".".join(str(s) for s in sys.version_info[:3])
-
-        self.line("")
-        self.line("Poetry")
-        self.line(
-            "\n".join(
-                [
-                    "Version: {}".format(self.poetry.VERSION),
-                    "Python:  {}".format(
-                        poetry_python_version
-                    ),
-                ]
-            )
-        )
-        args = StringArgs("")
-        command = self.application.get_command("env").get_sub_command("info")
-
-        return command.run(args, self._io)
diff --git a/vendor/poetry/poetry/console/commands/debug/resolve.py b/vendor/poetry/poetry/console/commands/debug/resolve.py
deleted file mode 100644
index 52ae1951..00000000
--- a/vendor/poetry/poetry/console/commands/debug/resolve.py
+++ /dev/null
@@ -1,137 +0,0 @@
-from cleo import argument
-from cleo import option
-
-from ..init import InitCommand
-
-
-class DebugResolveCommand(InitCommand):
-
-    name = "resolve"
-    description = "Debugs dependency resolution."
-
-    arguments = [
-        argument("package", "The packages to resolve.", optional=True, multiple=True)
-    ]
-    options = [
-        option(
-            "extras",
-            "E",
-            "Extras to activate for the dependency.",
-            flag=False,
-            multiple=True,
-        ),
-        option("python", None, "Python version(s) to use for resolution.", flag=False),
-        option("tree", None, "Display the dependency tree."),
-        option("install", None, "Show what would be installed for the current system."),
-    ]
-
-    loggers = ["poetry.repositories.pypi_repository", "poetry.inspection.info"]
-
-    def handle(self):
-        from poetry.core.packages.project_package import ProjectPackage
-        from poetry.factory import Factory
-        from poetry.io.null_io import NullIO
-        from poetry.puzzle import Solver
-        from poetry.repositories.pool import Pool
-        from poetry.repositories.repository import Repository
-        from poetry.utils.env import EnvManager
-
-        packages = self.argument("package")
-
-        if not packages:
-            package = self.poetry.package
-        else:
-            # Using current pool for determine_requirements()
-            self._pool = self.poetry.pool
-
-            package = ProjectPackage(
-                self.poetry.package.name, self.poetry.package.version
-            )
-
-            # Silencing output
-            is_quiet = self.io.output.is_quiet()
-            if not is_quiet:
-                self.io.output.set_quiet(True)
-
-            requirements = self._determine_requirements(packages)
-
-            if not is_quiet:
-                self.io.output.set_quiet(False)
-
-            for constraint in requirements:
-                name = constraint.pop("name")
-                extras = []
-                for extra in self.option("extras"):
-                    if " " in extra:
-                        extras += [e.strip() for e in extra.split(" ")]
-                    else:
-                        extras.append(extra)
-
-                constraint["extras"] = extras
-
-                package.add_dependency(Factory.create_dependency(name, constraint))
-
-        package.python_versions = self.option("python") or (
-            self.poetry.package.python_versions
-        )
-
-        pool = self.poetry.pool
-
-        solver = Solver(package, pool, Repository(), Repository(), self._io)
-
-        ops = solver.solve()
-
-        self.line("")
-        self.line("Resolution results:")
-        self.line("")
-
-        if self.option("tree"):
-            show_command = self.application.find("show")
-            show_command.init_styles(self.io)
-
-            packages = [op.package for op in ops]
-            repo = Repository(packages)
-
-            requires = package.requires + package.dev_requires
-            for pkg in repo.packages:
-                for require in requires:
-                    if pkg.name == require.name:
-                        show_command.display_package_tree(self.io, pkg, repo)
-                        break
-
-            return 0
-
-        table = self.table([], style="borderless")
-        rows = []
-
-        if self.option("install"):
-            env = EnvManager(self.poetry).get()
-            pool = Pool()
-            locked_repository = Repository()
-            for op in ops:
-                locked_repository.add_package(op.package)
-
-            pool.add_repository(locked_repository)
-
-            solver = Solver(package, pool, Repository(), Repository(), NullIO())
-            with solver.use_environment(env):
-                ops = solver.solve()
-
-        for op in ops:
-            if self.option("install") and op.skipped:
-                continue
-
-            pkg = op.package
-            row = [
-                "{}".format(pkg.complete_name),
-                "{}".format(pkg.version),
-                "",
-            ]
-
-            if not pkg.marker.is_any():
-                row[2] = str(pkg.marker)
-
-            rows.append(row)
-
-        table.set_rows(rows)
-        table.render(self.io)
diff --git a/vendor/poetry/poetry/console/commands/env/env.py b/vendor/poetry/poetry/console/commands/env/env.py
deleted file mode 100644
index f979b66e..00000000
--- a/vendor/poetry/poetry/console/commands/env/env.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from ..command import Command
-from .info import EnvInfoCommand
-from .list import EnvListCommand
-from .remove import EnvRemoveCommand
-from .use import EnvUseCommand
-
-
-class EnvCommand(Command):
-
-    name = "env"
-    description = "Interact with Poetry's project environments."
-
-    commands = [EnvInfoCommand(), EnvListCommand(), EnvRemoveCommand(), EnvUseCommand()]
-
-    def handle(self):  # type: () -> int
-        return self.call("help", self._config.name)
diff --git a/vendor/poetry/poetry/console/commands/env/info.py b/vendor/poetry/poetry/console/commands/env/info.py
deleted file mode 100644
index b80ee500..00000000
--- a/vendor/poetry/poetry/console/commands/env/info.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from cleo import option
-
-from ..command import Command
-
-
-class EnvInfoCommand(Command):
-
-    name = "info"
-    description = "Displays information about the current environment."
-
-    options = [option("path", "p", "Only display the environment's path.")]
-
-    def handle(self):
-        from poetry.utils.env import EnvManager
-
-        env = EnvManager(self.poetry).get()
-
-        if self.option("path"):
-            if not env.is_venv():
-                return 1
-
-            self.line(str(env.path))
-
-            return
-
-        self._display_complete_info(env)
-
-    def _display_complete_info(self, env):
-        env_python_version = ".".join(str(s) for s in env.version_info[:3])
-        self.line("")
-        self.line("Virtualenv")
-        listing = [
-            "Python:         {}".format(env_python_version),
-            "Implementation: {}".format(
-                env.python_implementation
-            ),
-            "Path:           {}".format(
-                env.path if env.is_venv() else "NA"
-            ),
-        ]
-        if env.is_venv():
-            listing.append(
-                "Valid:          <{tag}>{is_valid}".format(
-                    tag="comment" if env.is_sane() else "error", is_valid=env.is_sane()
-                )
-            )
-        self.line("\n".join(listing))
-
-        self.line("")
-
-        self.line("System")
-        self.line(
-            "\n".join(
-                [
-                    "Platform: {}".format(env.platform),
-                    "OS:       {}".format(env.os),
-                    "Python:   {}".format(env.python),
-                ]
-            )
-        )
diff --git a/vendor/poetry/poetry/console/commands/env/list.py b/vendor/poetry/poetry/console/commands/env/list.py
deleted file mode 100644
index 272a853b..00000000
--- a/vendor/poetry/poetry/console/commands/env/list.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from cleo import option
-
-from ..command import Command
-
-
-class EnvListCommand(Command):
-
-    name = "list"
-    description = "Lists all virtualenvs associated with the current project."
-
-    options = [option("full-path", None, "Output the full paths of the virtualenvs.")]
-
-    def handle(self):
-        from poetry.utils.env import EnvManager
-
-        manager = EnvManager(self.poetry)
-        current_env = manager.get()
-
-        for venv in manager.list():
-            name = venv.path.name
-            if self.option("full-path"):
-                name = str(venv.path)
-
-            if venv == current_env:
-                self.line("{} (Activated)".format(name))
-
-                continue
-
-            self.line(name)
diff --git a/vendor/poetry/poetry/console/commands/env/remove.py b/vendor/poetry/poetry/console/commands/env/remove.py
deleted file mode 100644
index 5f208851..00000000
--- a/vendor/poetry/poetry/console/commands/env/remove.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from cleo import argument
-
-from ..command import Command
-
-
-class EnvRemoveCommand(Command):
-
-    name = "remove"
-    description = "Removes a specific virtualenv associated with the project."
-
-    arguments = [
-        argument("python", "The python executable to remove the virtualenv for.")
-    ]
-
-    def handle(self):
-        from poetry.utils.env import EnvManager
-
-        manager = EnvManager(self.poetry)
-        venv = manager.remove(self.argument("python"))
-
-        self.line("Deleted virtualenv: {}".format(venv.path))
diff --git a/vendor/poetry/poetry/console/commands/env/use.py b/vendor/poetry/poetry/console/commands/env/use.py
deleted file mode 100644
index ef9cf3de..00000000
--- a/vendor/poetry/poetry/console/commands/env/use.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from cleo import argument
-
-from ..command import Command
-
-
-class EnvUseCommand(Command):
-
-    name = "use"
-    description = "Activates or creates a new virtualenv for the current project."
-
-    arguments = [argument("python", "The python executable to use.")]
-
-    def handle(self):
-        from poetry.utils.env import EnvManager
-
-        manager = EnvManager(self.poetry)
-
-        if self.argument("python") == "system":
-            manager.deactivate(self._io)
-
-            return
-
-        env = manager.activate(self.argument("python"), self._io)
-
-        self.line("Using virtualenv: {}".format(env.path))
diff --git a/vendor/poetry/poetry/console/commands/env_command.py b/vendor/poetry/poetry/console/commands/env_command.py
deleted file mode 100644
index 2fb298d7..00000000
--- a/vendor/poetry/poetry/console/commands/env_command.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from .command import Command
-
-
-class EnvCommand(Command):
-    def __init__(self):
-        self._env = None
-
-        super(EnvCommand, self).__init__()
-
-    @property
-    def env(self):
-        return self._env
-
-    def set_env(self, env):
-        self._env = env
diff --git a/vendor/poetry/poetry/console/commands/export.py b/vendor/poetry/poetry/console/commands/export.py
deleted file mode 100644
index 9918cd43..00000000
--- a/vendor/poetry/poetry/console/commands/export.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from cleo import option
-
-from poetry.utils.exporter import Exporter
-
-from .command import Command
-
-
-class ExportCommand(Command):
-
-    name = "export"
-    description = "Exports the lock file to alternative formats."
-
-    options = [
-        option(
-            "format",
-            "f",
-            "Format to export to. Currently, only requirements.txt is supported.",
-            flag=False,
-            default=Exporter.FORMAT_REQUIREMENTS_TXT,
-        ),
-        option("output", "o", "The name of the output file.", flag=False),
-        option("without-hashes", None, "Exclude hashes from the exported file."),
-        option("dev", None, "Include development dependencies."),
-        option(
-            "extras",
-            "E",
-            "Extra sets of dependencies to include.",
-            flag=False,
-            multiple=True,
-        ),
-        option("with-credentials", None, "Include credentials for extra indices."),
-    ]
-
-    def handle(self):
-        fmt = self.option("format")
-
-        if fmt not in Exporter.ACCEPTED_FORMATS:
-            raise ValueError("Invalid export format: {}".format(fmt))
-
-        output = self.option("output")
-
-        locker = self.poetry.locker
-        if not locker.is_locked():
-            self.line("The lock file does not exist. Locking.")
-            options = []
-            if self.io.is_debug():
-                options.append("-vvv")
-            elif self.io.is_very_verbose():
-                options.append("-vv")
-            elif self.io.is_verbose():
-                options.append("-v")
-
-            self.call("lock", options)
-
-        if not locker.is_fresh():
-            self.line(
-                ""
-                "Warning: The lock file is not up to date with "
-                "the latest changes in pyproject.toml. "
-                "You may be getting outdated dependencies. "
-                "Run update to update them."
-                ""
-            )
-
-        exporter = Exporter(self.poetry)
-        exporter.export(
-            fmt,
-            self.poetry.file.parent,
-            output or self.io,
-            with_hashes=not self.option("without-hashes"),
-            dev=self.option("dev"),
-            extras=self.option("extras"),
-            with_credentials=self.option("with-credentials"),
-        )
diff --git a/vendor/poetry/poetry/console/commands/init.py b/vendor/poetry/poetry/console/commands/init.py
deleted file mode 100644
index 1d616682..00000000
--- a/vendor/poetry/poetry/console/commands/init.py
+++ /dev/null
@@ -1,547 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-import os
-import re
-import sys
-
-from typing import Dict
-from typing import List
-from typing import Tuple
-from typing import Union
-
-from cleo import option
-from tomlkit import inline_table
-
-from poetry.core.pyproject import PyProjectException
-from poetry.core.pyproject.toml import PyProjectTOML
-from poetry.utils._compat import OrderedDict
-from poetry.utils._compat import Path
-from poetry.utils._compat import urlparse
-
-from .command import Command
-from .env_command import EnvCommand
-
-
-class InitCommand(Command):
-    name = "init"
-    description = (
-        "Creates a basic pyproject.toml file in the current directory."
-    )
-
-    options = [
-        option("name", None, "Name of the package.", flag=False),
-        option("description", None, "Description of the package.", flag=False),
-        option("author", None, "Author name of the package.", flag=False),
-        option("python", None, "Compatible Python versions.", flag=False),
-        option(
-            "dependency",
-            None,
-            "Package to require, with an optional version constraint, "
-            "e.g. requests:^2.10.0 or requests=2.11.1.",
-            flag=False,
-            multiple=True,
-        ),
-        option(
-            "dev-dependency",
-            None,
-            "Package to require for development, with an optional version constraint, "
-            "e.g. requests:^2.10.0 or requests=2.11.1.",
-            flag=False,
-            multiple=True,
-        ),
-        option("license", "l", "License of the package.", flag=False),
-    ]
-
-    help = """\
-The init command creates a basic pyproject.toml file in the current directory.
-"""
-
-    def __init__(self):
-        super(InitCommand, self).__init__()
-
-        self._pool = None
-
-    def handle(self):
-        from poetry.core.vcs.git import GitConfig
-        from poetry.layouts import layout
-        from poetry.utils._compat import Path
-        from poetry.utils.env import SystemEnv, InterpreterLookup
-
-        pyproject = PyProjectTOML(Path.cwd() / "pyproject.toml")
-
-        if pyproject.file.exists():
-            if pyproject.is_poetry_project():
-                self.line(
-                    "A pyproject.toml file with a poetry section already exists."
-                )
-                return 1
-
-            if pyproject.data.get("build-system"):
-                self.line(
-                    "A pyproject.toml file with a defined build-system already exists."
-                )
-                return 1
-
-        vcs_config = GitConfig()
-
-        self.line("")
-        self.line(
-            "This command will guide you through creating your pyproject.toml config."
-        )
-        self.line("")
-
-        name = self.option("name")
-        if not name:
-            name = Path.cwd().name.lower()
-
-            question = self.create_question(
-                "Package name [{}]: ".format(name), default=name
-            )
-            name = self.ask(question)
-
-        version = "0.1.0"
-        question = self.create_question(
-            "Version [{}]: ".format(version), default=version
-        )
-        version = self.ask(question)
-
-        description = self.option("description") or ""
-        question = self.create_question(
-            "Description [{}]: ".format(description),
-            default=description,
-        )
-        description = self.ask(question)
-
-        author = self.option("author")
-        if not author and vcs_config and vcs_config.get("user.name"):
-            author = vcs_config["user.name"]
-            author_email = vcs_config.get("user.email")
-            if author_email:
-                author += " <{}>".format(author_email)
-
-        question = self.create_question(
-            "Author [{}, n to skip]: ".format(author), default=author
-        )
-        question.set_validator(lambda v: self._validate_author(v, author))
-        author = self.ask(question)
-
-        if not author:
-            authors = []
-        else:
-            authors = [author]
-
-        license = self.option("license") or ""
-
-        question = self.create_question(
-            "License [{}]: ".format(license), default=license
-        )
-        question.set_validator(self._validate_license)
-        license = self.ask(question)
-
-        python = self.option("python")
-        if not python:
-            executable, py_minor, py_patch = InterpreterLookup.find()
-            current_env = SystemEnv(executable)
-            default_python = "^{}".format(
-                ".".join(str(v) for v in current_env.version_info[:2])
-            )
-            question = self.create_question(
-                "Compatible Python versions [{}]: ".format(
-                    default_python
-                ),
-                default=default_python,
-            )
-            python = self.ask(question)
-
-        self.line("")
-
-        requirements = {}
-        if self.option("dependency"):
-            requirements = self._format_requirements(
-                self._determine_requirements(self.option("dependency"))
-            )
-
-        question = "Would you like to define your main dependencies interactively?"
-        help_message = (
-            "You can specify a package in the following forms:\n"
-            "  - A single name (requests)\n"
-            "  - A name and a constraint (requests@^2.23.0)\n"
-            "  - A git url (git+https://github.com/python-poetry/poetry.git)\n"
-            "  - A git url with a revision (git+https://github.com/python-poetry/poetry.git#develop)\n"
-            "  - A file path (../my-package/my-package.whl)\n"
-            "  - A directory (../my-package/)\n"
-            "  - A url (https://example.com/packages/my-package-0.1.0.tar.gz)\n"
-        )
-        help_displayed = False
-        if self.confirm(question, True):
-            self.line(help_message)
-            help_displayed = True
-            requirements.update(
-                self._format_requirements(self._determine_requirements([]))
-            )
-            self.line("")
-
-        dev_requirements = {}
-        if self.option("dev-dependency"):
-            dev_requirements = self._format_requirements(
-                self._determine_requirements(self.option("dev-dependency"))
-            )
-
-        question = (
-            "Would you like to define your development dependencies interactively?"
-        )
-        if self.confirm(question, True):
-            if not help_displayed:
-                self.line(help_message)
-
-            dev_requirements.update(
-                self._format_requirements(self._determine_requirements([]))
-            )
-            self.line("")
-
-        layout_ = layout("standard")(
-            name,
-            version,
-            description=description,
-            author=authors[0] if authors else None,
-            license=license,
-            python=python,
-            dependencies=requirements,
-            dev_dependencies=dev_requirements,
-        )
-
-        content = layout_.generate_poetry_content(original=pyproject)
-        if self.io.is_interactive():
-            self.line("Generated file")
-            self.line("")
-            self.line(content)
-            self.line("")
-
-        if not self.confirm("Do you confirm generation?", True):
-            self.line("Command aborted")
-
-            return 1
-
-        with (Path.cwd() / "pyproject.toml").open("w", encoding="utf-8") as f:
-            f.write(content)
-
-    def _determine_requirements(
-        self, requires, allow_prereleases=False, source=None
-    ):  # type: (List[str], bool) -> List[Dict[str, str]]
-        if not requires:
-            requires = []
-
-            package = self.ask(
-                "Search for package to add (or leave blank to continue):"
-            )
-            while package is not None:
-                constraint = self._parse_requirements([package])[0]
-                if (
-                    "git" in constraint
-                    or "url" in constraint
-                    or "path" in constraint
-                    or "version" in constraint
-                ):
-                    self.line("Adding {}".format(package))
-                    requires.append(constraint)
-                    package = self.ask("\nAdd a package:")
-                    continue
-
-                matches = self._get_pool().search(constraint["name"])
-
-                if not matches:
-                    self.line("Unable to find package")
-                    package = False
-                else:
-                    choices = []
-                    matches_names = [p.name for p in matches]
-                    exact_match = constraint["name"] in matches_names
-                    if exact_match:
-                        choices.append(
-                            matches[matches_names.index(constraint["name"])].pretty_name
-                        )
-
-                    for found_package in matches:
-                        if len(choices) >= 10:
-                            break
-
-                        if found_package.name.lower() == constraint["name"].lower():
-                            continue
-
-                        choices.append(found_package.pretty_name)
-
-                    self.line(
-                        "Found {} packages matching {}".format(
-                            len(matches), package
-                        )
-                    )
-
-                    package = self.choice(
-                        "\nEnter package # to add, or the complete package name if it is not listed",
-                        choices,
-                        attempts=3,
-                    )
-
-                    # package selected by user, set constraint name to package name
-                    if package is not False:
-                        constraint["name"] = package
-
-                # no constraint yet, determine the best version automatically
-                if package is not False and "version" not in constraint:
-                    question = self.create_question(
-                        "Enter the version constraint to require "
-                        "(or leave blank to use the latest version):"
-                    )
-                    question.attempts = 3
-                    question.validator = lambda x: (x or "").strip() or False
-
-                    package_constraint = self.ask(question)
-
-                    if package_constraint is None:
-                        _, package_constraint = self._find_best_version_for_package(
-                            package
-                        )
-
-                        self.line(
-                            "Using version {} for {}".format(
-                                package_constraint, package
-                            )
-                        )
-
-                    constraint["version"] = package_constraint
-
-                if package is not False:
-                    requires.append(constraint)
-
-                package = self.ask("\nAdd a package:")
-
-            return requires
-
-        requires = self._parse_requirements(requires)
-        result = []
-        for requirement in requires:
-            if "git" in requirement or "url" in requirement or "path" in requirement:
-                result.append(requirement)
-                continue
-            elif "version" not in requirement:
-                # determine the best version automatically
-                name, version = self._find_best_version_for_package(
-                    requirement["name"],
-                    allow_prereleases=allow_prereleases,
-                    source=source,
-                )
-                requirement["version"] = version
-                requirement["name"] = name
-
-                self.line(
-                    "Using version {} for {}".format(version, name)
-                )
-            else:
-                # check that the specified version/constraint exists
-                # before we proceed
-                name, _ = self._find_best_version_for_package(
-                    requirement["name"],
-                    requirement["version"],
-                    allow_prereleases=allow_prereleases,
-                    source=source,
-                )
-
-                requirement["name"] = name
-
-            result.append(requirement)
-
-        return result
-
-    def _find_best_version_for_package(
-        self, name, required_version=None, allow_prereleases=False, source=None
-    ):  # type: (...) -> Tuple[str, str]
-        from poetry.version.version_selector import VersionSelector
-
-        selector = VersionSelector(self._get_pool())
-        package = selector.find_best_candidate(
-            name, required_version, allow_prereleases=allow_prereleases, source=source
-        )
-
-        if not package:
-            # TODO: find similar
-            raise ValueError(
-                "Could not find a matching version of package {}".format(name)
-            )
-
-        return package.pretty_name, selector.find_recommended_require_version(package)
-
-    def _parse_requirements(
-        self, requirements
-    ):  # type: (List[str]) -> List[Dict[str, str]]
-        from poetry.puzzle.provider import Provider
-
-        result = []
-
-        try:
-            cwd = self.poetry.file.parent
-        except (PyProjectException, RuntimeError):
-            cwd = Path.cwd()
-
-        for requirement in requirements:
-            requirement = requirement.strip()
-            extras = []
-            extras_m = re.search(r"\[([\w\d,-_ ]+)\]$", requirement)
-            if extras_m:
-                extras = [e.strip() for e in extras_m.group(1).split(",")]
-                requirement, _ = requirement.split("[")
-
-            url_parsed = urlparse.urlparse(requirement)
-            if url_parsed.scheme and url_parsed.netloc:
-                # Url
-                if url_parsed.scheme in ["git+https", "git+ssh"]:
-                    from poetry.core.vcs.git import Git
-                    from poetry.core.vcs.git import ParsedUrl
-
-                    parsed = ParsedUrl.parse(requirement)
-                    url = Git.normalize_url(requirement)
-
-                    pair = OrderedDict([("name", parsed.name), ("git", url.url)])
-                    if parsed.rev:
-                        pair["rev"] = url.revision
-
-                    if extras:
-                        pair["extras"] = extras
-
-                    package = Provider.get_package_from_vcs(
-                        "git", url.url, rev=pair.get("rev")
-                    )
-                    pair["name"] = package.name
-                    result.append(pair)
-
-                    continue
-                elif url_parsed.scheme in ["http", "https"]:
-                    package = Provider.get_package_from_url(requirement)
-
-                    pair = OrderedDict(
-                        [("name", package.name), ("url", package.source_url)]
-                    )
-                    if extras:
-                        pair["extras"] = extras
-
-                    result.append(pair)
-                    continue
-            elif (os.path.sep in requirement or "/" in requirement) and cwd.joinpath(
-                requirement
-            ).exists():
-                path = cwd.joinpath(requirement)
-                if path.is_file():
-                    package = Provider.get_package_from_file(path.resolve())
-                else:
-                    package = Provider.get_package_from_directory(path)
-
-                result.append(
-                    OrderedDict(
-                        [
-                            ("name", package.name),
-                            ("path", path.relative_to(cwd).as_posix()),
-                        ]
-                        + ([("extras", extras)] if extras else [])
-                    )
-                )
-
-                continue
-
-            pair = re.sub(
-                "^([^@=: ]+)(?:@|==|(?~!])=|:| )(.*)$", "\\1 \\2", requirement
-            )
-            pair = pair.strip()
-
-            require = OrderedDict()
-            if " " in pair:
-                name, version = pair.split(" ", 2)
-                extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
-                if extras_m:
-                    extras = [e.strip() for e in extras_m.group(1).split(",")]
-                    name, _ = name.split("[")
-
-                require["name"] = name
-                if version != "latest":
-                    require["version"] = version
-            else:
-                m = re.match(
-                    r"^([^><=!: ]+)((?:>=|<=|>|<|!=|~=|~|\^).*)$", requirement.strip()
-                )
-                if m:
-                    name, constraint = m.group(1), m.group(2)
-                    extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
-                    if extras_m:
-                        extras = [e.strip() for e in extras_m.group(1).split(",")]
-                        name, _ = name.split("[")
-
-                    require["name"] = name
-                    require["version"] = constraint
-                else:
-                    extras_m = re.search(r"\[([\w\d,-_]+)\]$", pair)
-                    if extras_m:
-                        extras = [e.strip() for e in extras_m.group(1).split(",")]
-                        pair, _ = pair.split("[")
-
-                    require["name"] = pair
-
-            if extras:
-                require["extras"] = extras
-
-            result.append(require)
-
-        return result
-
-    def _format_requirements(
-        self, requirements
-    ):  # type: (List[Dict[str, str]]) -> Dict[str, Union[str, Dict[str, str]]]
-        requires = {}
-        for requirement in requirements:
-            name = requirement.pop("name")
-            if "version" in requirement and len(requirement) == 1:
-                constraint = requirement["version"]
-            else:
-                constraint = inline_table()
-                constraint.trivia.trail = "\n"
-                constraint.update(requirement)
-
-            requires[name] = constraint
-
-        return requires
-
-    def _validate_author(self, author, default):
-        from poetry.core.packages.package import AUTHOR_REGEX
-
-        author = author or default
-
-        if author in ["n", "no"]:
-            return
-
-        m = AUTHOR_REGEX.match(author)
-        if not m:
-            raise ValueError(
-                "Invalid author string. Must be in the format: "
-                "John Smith "
-            )
-
-        return author
-
-    def _validate_license(self, license):
-        from poetry.core.spdx import license_by_id
-
-        if license:
-            license_by_id(license)
-
-        return license
-
-    def _get_pool(self):
-        from poetry.repositories import Pool
-        from poetry.repositories.pypi_repository import PyPiRepository
-
-        if isinstance(self, EnvCommand):
-            return self.poetry.pool
-
-        if self._pool is None:
-            self._pool = Pool()
-            self._pool.add_repository(PyPiRepository())
-
-        return self._pool
diff --git a/vendor/poetry/poetry/console/commands/install.py b/vendor/poetry/poetry/console/commands/install.py
deleted file mode 100644
index 6a9ef2cb..00000000
--- a/vendor/poetry/poetry/console/commands/install.py
+++ /dev/null
@@ -1,115 +0,0 @@
-from cleo import option
-
-from .installer_command import InstallerCommand
-
-
-class InstallCommand(InstallerCommand):
-
-    name = "install"
-    description = "Installs the project dependencies."
-
-    options = [
-        option("no-dev", None, "Do not install the development dependencies."),
-        option(
-            "no-root", None, "Do not install the root package (the current project)."
-        ),
-        option(
-            "dry-run",
-            None,
-            "Output the operations but do not execute anything "
-            "(implicitly enables --verbose).",
-        ),
-        option(
-            "remove-untracked", None, "Removes packages not present in the lock file.",
-        ),
-        option(
-            "extras",
-            "E",
-            "Extra sets of dependencies to install.",
-            flag=False,
-            multiple=True,
-        ),
-    ]
-
-    help = """The install command reads the poetry.lock file from
-the current directory, processes it, and downloads and installs all the
-libraries and dependencies outlined in that file. If the file does not
-exist it will look for pyproject.toml and do the same.
-
-poetry install
-
-By default, the above command will also install the current project. To install only the
-dependencies and not including the current project, run the command with the
---no-root option like below:
-
- poetry install --no-root
-"""
-
-    _loggers = ["poetry.repositories.pypi_repository", "poetry.inspection.info"]
-
-    def handle(self):
-        from poetry.core.masonry.utils.module import ModuleOrPackageNotFound
-        from poetry.masonry.builders import EditableBuilder
-
-        self._installer.use_executor(
-            self.poetry.config.get("experimental.new-installer", False)
-        )
-
-        extras = []
-        for extra in self.option("extras"):
-            if " " in extra:
-                extras += [e.strip() for e in extra.split(" ")]
-            else:
-                extras.append(extra)
-
-        self._installer.extras(extras)
-        self._installer.dev_mode(not self.option("no-dev"))
-        self._installer.dry_run(self.option("dry-run"))
-        self._installer.remove_untracked(self.option("remove-untracked"))
-        self._installer.verbose(self._io.is_verbose())
-
-        return_code = self._installer.run()
-
-        if return_code != 0:
-            return return_code
-
-        if self.option("no-root"):
-            return 0
-
-        try:
-            builder = EditableBuilder(self.poetry, self._env, self._io)
-        except ModuleOrPackageNotFound:
-            # This is likely due to the fact that the project is an application
-            # not following the structure expected by Poetry
-            # If this is a true error it will be picked up later by build anyway.
-            return 0
-
-        self.line("")
-        if not self._io.supports_ansi() or self.io.is_debug():
-            self.line(
-                "Installing the current project: {} ({})".format(
-                    self.poetry.package.pretty_name, self.poetry.package.pretty_version
-                )
-            )
-        else:
-            self.write(
-                "Installing the current project: {} ({})".format(
-                    self.poetry.package.pretty_name, self.poetry.package.pretty_version
-                )
-            )
-
-        if self.option("dry-run"):
-            self.line("")
-            return 0
-
-        builder.build()
-
-        if self._io.supports_ansi() and not self.io.is_debug():
-            self.overwrite(
-                "Installing the current project: {} ({})".format(
-                    self.poetry.package.pretty_name, self.poetry.package.pretty_version
-                )
-            )
-            self.line("")
-
-        return 0
diff --git a/vendor/poetry/poetry/console/commands/installer_command.py b/vendor/poetry/poetry/console/commands/installer_command.py
deleted file mode 100644
index 51647eff..00000000
--- a/vendor/poetry/poetry/console/commands/installer_command.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from typing import TYPE_CHECKING
-
-from .env_command import EnvCommand
-
-
-if TYPE_CHECKING:
-    from poetry.installation.installer import Installer
-    from poetry.installation.installer import Optional
-
-
-class InstallerCommand(EnvCommand):
-    def __init__(self):
-        self._installer = None  # type: Optional[Installer]
-
-        super(InstallerCommand, self).__init__()
-
-    def reset_poetry(self):
-        super(InstallerCommand, self).reset_poetry()
-
-        self._installer.set_package(self.poetry.package)
-        self._installer.set_locker(self.poetry.locker)
-
-    @property
-    def installer(self):  # type: () -> Installer
-        return self._installer
-
-    def set_installer(self, installer):  # type: (Installer) -> None
-        self._installer = installer
diff --git a/vendor/poetry/poetry/console/commands/lock.py b/vendor/poetry/poetry/console/commands/lock.py
deleted file mode 100644
index 4157c02c..00000000
--- a/vendor/poetry/poetry/console/commands/lock.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from cleo import option
-
-from .installer_command import InstallerCommand
-
-
-class LockCommand(InstallerCommand):
-
-    name = "lock"
-    description = "Locks the project dependencies."
-
-    options = [
-        option(
-            "no-update", None, "Do not update locked versions, only refresh lock file."
-        ),
-    ]
-
-    help = """
-The lock command reads the pyproject.toml file from the
-current directory, processes it, and locks the dependencies in the poetry.lock
-file.
-
-poetry lock
-"""
-
-    loggers = ["poetry.repositories.pypi_repository"]
-
-    def handle(self):
-        self._installer.use_executor(
-            self.poetry.config.get("experimental.new-installer", False)
-        )
-
-        self._installer.lock(update=not self.option("no-update"))
-
-        return self._installer.run()
diff --git a/vendor/poetry/poetry/console/commands/new.py b/vendor/poetry/poetry/console/commands/new.py
deleted file mode 100644
index e77fad50..00000000
--- a/vendor/poetry/poetry/console/commands/new.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import sys
-
-from cleo import argument
-from cleo import option
-
-from poetry.utils.helpers import module_name
-
-from .command import Command
-
-
-class NewCommand(Command):
-
-    name = "new"
-    description = "Creates a new Python project at ."
-
-    arguments = [argument("path", "The path to create the project at.")]
-    options = [
-        option("name", None, "Set the resulting package name.", flag=False),
-        option("src", None, "Use the src layout for the project."),
-    ]
-
-    def handle(self):
-        from poetry.core.semver import parse_constraint
-        from poetry.core.vcs.git import GitConfig
-        from poetry.layouts import layout
-        from poetry.utils._compat import Path
-        from poetry.utils.env import SystemEnv, InterpreterLookup
-
-        if self.option("src"):
-            layout_ = layout("src")
-        else:
-            layout_ = layout("standard")
-
-        path = Path.cwd() / Path(self.argument("path"))
-        name = self.option("name")
-        if not name:
-            name = path.name
-
-        if path.exists():
-            if list(path.glob("*")):
-                # Directory is not empty. Aborting.
-                raise RuntimeError(
-                    "Destination {} "
-                    "exists and is not empty".format(path)
-                )
-
-        readme_format = "rst"
-
-        config = GitConfig()
-        author = None
-        if config.get("user.name"):
-            author = config["user.name"]
-            author_email = config.get("user.email")
-            if author_email:
-                author += " <{}>".format(author_email)
-
-        executable, py_minor, py_patch = InterpreterLookup.find()
-        current_env = SystemEnv(executable)
-        default_python = "^{}".format(
-            ".".join(str(v) for v in current_env.version_info[:2])
-        )
-
-        dev_dependencies = {}
-        python_constraint = parse_constraint(default_python)
-        if parse_constraint("<3.5").allows_any(python_constraint):
-            dev_dependencies["pytest"] = "^4.6"
-        if parse_constraint(">=3.5").allows_all(python_constraint):
-            dev_dependencies["pytest"] = "^5.2"
-
-        layout_ = layout_(
-            name,
-            "0.1.0",
-            author=author,
-            readme_format=readme_format,
-            python=default_python,
-            dev_dependencies=dev_dependencies,
-        )
-        layout_.create(path)
-
-        self.line(
-            "Created package {} in {}".format(
-                module_name(name), path.relative_to(Path.cwd())
-            )
-        )
diff --git a/vendor/poetry/poetry/console/commands/remove.py b/vendor/poetry/poetry/console/commands/remove.py
deleted file mode 100644
index d9a289cb..00000000
--- a/vendor/poetry/poetry/console/commands/remove.py
+++ /dev/null
@@ -1,90 +0,0 @@
-from cleo import argument
-from cleo import option
-
-from .installer_command import InstallerCommand
-
-
-class RemoveCommand(InstallerCommand):
-
-    name = "remove"
-    description = "Removes a package from the project dependencies."
-
-    arguments = [argument("packages", "The packages to remove.", multiple=True)]
-    options = [
-        option("dev", "D", "Remove a package from the development dependencies."),
-        option(
-            "dry-run",
-            None,
-            "Output the operations but do not execute anything "
-            "(implicitly enables --verbose).",
-        ),
-    ]
-
-    help = """The remove command removes a package from the current
-list of installed packages
-
-poetry remove"""
-
-    loggers = ["poetry.repositories.pypi_repository", "poetry.inspection.info"]
-
-    def handle(self):
-        packages = self.argument("packages")
-        is_dev = self.option("dev")
-
-        original_content = self.poetry.file.read()
-        content = self.poetry.file.read()
-        poetry_content = content["tool"]["poetry"]
-        section = "dependencies"
-        if is_dev:
-            section = "dev-dependencies"
-
-        # Deleting entries
-        requirements = {}
-        for name in packages:
-            found = False
-            for key in poetry_content[section]:
-                if key.lower() == name.lower():
-                    found = True
-                    requirements[key] = poetry_content[section][key]
-                    break
-
-            if not found:
-                raise ValueError("Package {} not found".format(name))
-
-        for key in requirements:
-            del poetry_content[section][key]
-
-        # Write the new content back
-        self.poetry.file.write(content)
-
-        # Update packages
-        self.reset_poetry()
-
-        self._installer.use_executor(
-            self.poetry.config.get("experimental.new-installer", False)
-        )
-
-        self._installer.dry_run(self.option("dry-run"))
-        self._installer.verbose(self._io.is_verbose())
-        self._installer.update(True)
-        self._installer.whitelist(requirements)
-
-        try:
-            status = self._installer.run()
-        except Exception:
-            self.poetry.file.write(original_content)
-
-            raise
-
-        if status != 0 or self.option("dry-run"):
-            # Revert changes
-            if not self.option("dry-run"):
-                self.line_error(
-                    "\n"
-                    "Removal failed, reverting pyproject.toml "
-                    "to its original content."
-                )
-
-            self.poetry.file.write(original_content)
-
-        return status
diff --git a/vendor/poetry/poetry/console/commands/run.py b/vendor/poetry/poetry/console/commands/run.py
deleted file mode 100644
index fda01114..00000000
--- a/vendor/poetry/poetry/console/commands/run.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from cleo import argument
-
-from .env_command import EnvCommand
-
-
-class RunCommand(EnvCommand):
-
-    name = "run"
-    description = "Runs a command in the appropriate environment."
-
-    arguments = [
-        argument("args", "The command and arguments/options to run.", multiple=True)
-    ]
-
-    def __init__(self):  # type: () -> None
-        from poetry.console.args.run_args_parser import RunArgsParser
-
-        super(RunCommand, self).__init__()
-
-        self.config.set_args_parser(RunArgsParser())
-
-    def handle(self):
-        args = self.argument("args")
-        script = args[0]
-        scripts = self.poetry.local_config.get("scripts")
-
-        if scripts and script in scripts:
-            return self.run_script(scripts[script], args)
-
-        return self.env.execute(*args)
-
-    def run_script(self, script, args):
-        if isinstance(script, dict):
-            script = script["callable"]
-
-        module, callable_ = script.split(":")
-
-        src_in_sys_path = "sys.path.append('src'); " if self._module.is_in_src() else ""
-
-        cmd = ["python", "-c"]
-
-        cmd += [
-            "import sys; "
-            "from importlib import import_module; "
-            "sys.argv = {!r}; {}"
-            "import_module('{}').{}()".format(args, src_in_sys_path, module, callable_)
-        ]
-
-        return self.env.execute(*cmd)
-
-    @property
-    def _module(self):
-        from poetry.core.masonry.utils.module import Module
-
-        poetry = self.poetry
-        package = poetry.package
-        path = poetry.file.parent
-        module = Module(package.name, path.as_posix(), package.packages)
-
-        return module
diff --git a/vendor/poetry/poetry/console/commands/search.py b/vendor/poetry/poetry/console/commands/search.py
deleted file mode 100644
index 299dee6a..00000000
--- a/vendor/poetry/poetry/console/commands/search.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from cleo import argument
-
-from .command import Command
-
-
-class SearchCommand(Command):
-
-    name = "search"
-    description = "Searches for packages on remote repositories."
-
-    arguments = [argument("tokens", "The tokens to search for.", multiple=True)]
-
-    def handle(self):
-        from poetry.repositories.pypi_repository import PyPiRepository
-
-        results = PyPiRepository().search(self.argument("tokens"))
-
-        for result in results:
-            self.line("")
-            name = "{}".format(result.name)
-
-            name += " ({})".format(result.version)
-
-            self.line(name)
-
-            if result.description:
-                self.line(" {}".format(result.description))
diff --git a/vendor/poetry/poetry/console/commands/self/self.py b/vendor/poetry/poetry/console/commands/self/self.py
deleted file mode 100644
index ff1abefe..00000000
--- a/vendor/poetry/poetry/console/commands/self/self.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from ..command import Command
-
-
-class SelfCommand(Command):
-
-    name = "self"
-    description = "Interact with Poetry directly."
-
-    commands = []
-
-    def handle(self):
-        return self.call("help", self._config.name)
diff --git a/vendor/poetry/poetry/console/commands/self/update.py b/vendor/poetry/poetry/console/commands/self/update.py
deleted file mode 100644
index ff5ec678..00000000
--- a/vendor/poetry/poetry/console/commands/self/update.py
+++ /dev/null
@@ -1,472 +0,0 @@
-from __future__ import unicode_literals
-
-import hashlib
-import os
-import re
-import shutil
-import site
-import stat
-import subprocess
-import sys
-import tarfile
-
-from functools import cmp_to_key
-from gzip import GzipFile
-
-from cleo import argument
-from cleo import option
-
-from poetry.core.packages import Dependency
-from poetry.utils._compat import PY2
-from poetry.utils._compat import Path
-
-from ..command import Command
-
-
-try:
-    from urllib.error import HTTPError
-    from urllib.request import urlopen
-except ImportError:
-    from urllib2 import HTTPError
-    from urllib2 import urlopen
-
-
-BIN = """# -*- coding: utf-8 -*-
-import glob
-import sys
-import os
-
-lib = os.path.normpath(os.path.join(os.path.realpath(__file__), "../..", "lib"))
-vendors = os.path.join(lib, "poetry", "_vendor")
-current_vendors = os.path.join(
-    vendors, "py{}".format(".".join(str(v) for v in sys.version_info[:2]))
-)
-sys.path.insert(0, lib)
-sys.path.insert(0, current_vendors)
-
-if __name__ == "__main__":
-    from poetry.console import main
-    main()
-"""
-
-BAT = '@echo off\r\n{python_executable} "{poetry_bin}" %*\r\n'
-
-
-class SelfUpdateCommand(Command):
-
-    name = "update"
-    description = "Updates Poetry to the latest version."
-
-    arguments = [argument("version", "The version to update to.", optional=True)]
-    options = [option("preview", None, "Install prereleases.")]
-
-    REPOSITORY_URL = "https://github.com/python-poetry/poetry"
-    BASE_URL = REPOSITORY_URL + "/releases/download"
-
-    _data_dir = None
-    _bin_dir = None
-    _pool = None
-
-    @property
-    def home(self):
-        from poetry.utils._compat import Path
-
-        return Path(os.environ.get("POETRY_HOME", "~/.poetry")).expanduser()
-
-    @property
-    def bin(self):
-        return self.home / "bin"
-
-    @property
-    def lib(self):
-        return self.home / "lib"
-
-    @property
-    def lib_backup(self):
-        return self.home / "lib-backup"
-
-    @property
-    def data_dir(self):  # type: () -> Path
-        if self._data_dir is not None:
-            return self._data_dir
-
-        from poetry.locations import data_dir
-
-        self._data_dir = data_dir()
-
-        return self._data_dir
-
-    @property
-    def bin_dir(self):  # type: () -> Path
-        if self._data_dir is not None:
-            return self._data_dir
-
-        from poetry.utils._compat import WINDOWS
-
-        if os.getenv("POETRY_HOME"):
-            return Path(os.getenv("POETRY_HOME"), "bin").expanduser()
-
-        user_base = site.getuserbase()
-
-        if WINDOWS:
-            bin_dir = os.path.join(user_base, "Scripts")
-        else:
-            bin_dir = os.path.join(user_base, "bin")
-
-        self._bin_dir = Path(bin_dir)
-
-        return self._bin_dir
-
-    @property
-    def pool(self):
-        if self._pool is not None:
-            return self._pool
-
-        from poetry.repositories.pool import Pool
-        from poetry.repositories.pypi_repository import PyPiRepository
-
-        pool = Pool()
-        pool.add_repository(PyPiRepository(fallback=False))
-
-        self._pool = pool
-
-        return self._pool
-
-    def handle(self):
-        from poetry.__version__ import __version__
-        from poetry.core.semver import Version
-        from poetry.utils.env import EnvManager
-
-        new_update_method = False
-        try:
-            self._check_recommended_installation()
-        except RuntimeError as e:
-            env = EnvManager.get_system_env(naive=True)
-            try:
-                env.path.relative_to(self.data_dir)
-            except ValueError:
-                raise e
-
-            new_update_method = True
-
-        version = self.argument("version")
-        if not version:
-            version = ">=" + __version__
-
-        repo = self.pool.repositories[0]
-        packages = repo.find_packages(
-            Dependency("poetry", version, allows_prereleases=self.option("preview"))
-        )
-        if not packages:
-            self.line("No release found for the specified version")
-            return
-
-        packages.sort(
-            key=cmp_to_key(
-                lambda x, y: 0
-                if x.version == y.version
-                else int(x.version < y.version or -1)
-            )
-        )
-
-        release = None
-        for package in packages:
-            if package.is_prerelease():
-                if self.option("preview"):
-                    release = package
-
-                    break
-
-                continue
-
-            release = package
-
-            break
-
-        if release is None:
-            self.line("No new release found")
-            return
-
-        if release.version == Version.parse(__version__):
-            self.line("You are using the latest version")
-            return
-
-        if new_update_method:
-            return self.update_with_new_method(release.version)
-
-        self.update(release)
-
-    def update(self, release):
-        version = release.version
-        self.line("Updating to {}".format(version))
-
-        if self.lib_backup.exists():
-            shutil.rmtree(str(self.lib_backup))
-
-        # Backup the current installation
-        if self.lib.exists():
-            shutil.copytree(str(self.lib), str(self.lib_backup))
-            shutil.rmtree(str(self.lib))
-
-        try:
-            self._update(version)
-        except Exception:
-            if not self.lib_backup.exists():
-                raise
-
-            shutil.copytree(str(self.lib_backup), str(self.lib))
-            shutil.rmtree(str(self.lib_backup))
-
-            raise
-        finally:
-            if self.lib_backup.exists():
-                shutil.rmtree(str(self.lib_backup))
-
-        self.make_bin()
-
-        self.line("")
-        self.line("")
-        self.line(
-            "Poetry ({}) is installed now. Great!".format(
-                version
-            )
-        )
-
-    def update_with_new_method(self, version):
-        self.line("Updating Poetry to {}".format(version))
-        self.line("")
-
-        self._update_with_new_method(version)
-        self._make_bin()
-
-        self.line("")
-        self.line(
-            "Poetry ({}) is installed now. Great!".format(version)
-        )
-
-    def _update(self, version):
-        from poetry.utils.helpers import temporary_directory
-
-        release_name = self._get_release_name(version)
-
-        checksum = "{}.sha256sum".format(release_name)
-
-        base_url = self.BASE_URL
-
-        try:
-            r = urlopen(base_url + "/{}/{}".format(version, checksum))
-        except HTTPError as e:
-            if e.code == 404:
-                raise RuntimeError("Could not find {} file".format(checksum))
-
-            raise
-
-        checksum = r.read().decode().strip()
-
-        # We get the payload from the remote host
-        name = "{}.tar.gz".format(release_name)
-        try:
-            r = urlopen(base_url + "/{}/{}".format(version, name))
-        except HTTPError as e:
-            if e.code == 404:
-                raise RuntimeError("Could not find {} file".format(name))
-
-            raise
-
-        meta = r.info()
-        size = int(meta["Content-Length"])
-        current = 0
-        block_size = 8192
-
-        bar = self.progress_bar(max=size)
-        bar.set_format(" - Downloading {} %percent%%".format(name))
-        bar.start()
-
-        sha = hashlib.sha256()
-        with temporary_directory(prefix="poetry-updater-") as dir_:
-            tar = os.path.join(dir_, name)
-            with open(tar, "wb") as f:
-                while True:
-                    buffer = r.read(block_size)
-                    if not buffer:
-                        break
-
-                    current += len(buffer)
-                    f.write(buffer)
-                    sha.update(buffer)
-
-                    bar.set_progress(current)
-
-            bar.finish()
-
-            # Checking hashes
-            if checksum != sha.hexdigest():
-                raise RuntimeError(
-                    "Hashes for {} do not match: {} != {}".format(
-                        name, checksum, sha.hexdigest()
-                    )
-                )
-
-            gz = GzipFile(tar, mode="rb")
-            try:
-                with tarfile.TarFile(tar, fileobj=gz, format=tarfile.PAX_FORMAT) as f:
-                    f.extractall(str(self.lib))
-            finally:
-                gz.close()
-
-    def _update_with_new_method(self, version):
-        from poetry.config.config import Config
-        from poetry.core.packages.dependency import Dependency
-        from poetry.core.packages.project_package import ProjectPackage
-        from poetry.installation.installer import Installer
-        from poetry.packages.locker import NullLocker
-        from poetry.repositories.installed_repository import InstalledRepository
-        from poetry.utils.env import EnvManager
-
-        env = EnvManager.get_system_env(naive=True)
-        installed = InstalledRepository.load(env)
-
-        root = ProjectPackage("poetry-updater", "0.0.0")
-        root.python_versions = ".".join(str(c) for c in env.version_info[:3])
-        root.add_dependency(Dependency("poetry", version.text))
-
-        installer = Installer(
-            self.io,
-            env,
-            root,
-            NullLocker(self.data_dir.joinpath("poetry.lock"), {}),
-            self.pool,
-            Config(),
-            installed=installed,
-        )
-        installer.update(True)
-        installer.run()
-
-    def _make_bin(self):
-        from poetry.utils._compat import WINDOWS
-
-        self.line("")
-        self.line("Updating the poetry script")
-
-        self.bin_dir.mkdir(parents=True, exist_ok=True)
-
-        script = "poetry"
-        target_script = "venv/bin/poetry"
-        if WINDOWS:
-            script = "poetry.exe"
-            target_script = "venv/Scripts/poetry.exe"
-
-        if self.bin_dir.joinpath(script).exists():
-            self.bin_dir.joinpath(script).unlink()
-
-        if not PY2 and not WINDOWS:
-            try:
-                self.bin_dir.joinpath(script).symlink_to(
-                    self.data_dir.joinpath(target_script)
-                )
-            except OSError:
-                # This can happen if the user
-                # does not have the correct permission on Windows
-                shutil.copy(
-                    self.data_dir.joinpath(target_script), self.bin_dir.joinpath(script)
-                )
-        else:
-            shutil.copy(
-                str(self.data_dir.joinpath(target_script)),
-                str(self.bin_dir.joinpath(script)),
-            )
-
-    def process(self, *args):
-        return subprocess.check_output(list(args), stderr=subprocess.STDOUT)
-
-    def _check_recommended_installation(self):
-        from poetry.utils._compat import Path
-
-        current = Path(__file__)
-        try:
-            current.relative_to(self.home)
-        except ValueError:
-            raise RuntimeError(
-                "Poetry was not installed with the recommended installer. "
-                "Cannot update automatically."
-            )
-
-    def _get_release_name(self, version):
-        platform = sys.platform
-        if platform == "linux2":
-            platform = "linux"
-
-        return "poetry-{}-{}".format(version, platform)
-
-    def _bin_path(self, base_path, bin):
-        from poetry.utils._compat import WINDOWS
-
-        if WINDOWS:
-            return (base_path / "Scripts" / bin).with_suffix(".exe")
-
-        return base_path / "bin" / bin
-
-    def make_bin(self):
-        from poetry.utils._compat import WINDOWS
-
-        self.bin.mkdir(0o755, parents=True, exist_ok=True)
-
-        python_executable = self._which_python()
-
-        if WINDOWS:
-            with self.bin.joinpath("poetry.bat").open("w", newline="") as f:
-                f.write(
-                    BAT.format(
-                        python_executable=python_executable,
-                        poetry_bin=str(self.bin / "poetry").replace(
-                            os.environ["USERPROFILE"], "%USERPROFILE%"
-                        ),
-                    )
-                )
-
-        bin_content = BIN
-        if not WINDOWS:
-            bin_content = "#!/usr/bin/env {}\n".format(python_executable) + bin_content
-
-        self.bin.joinpath("poetry").write_text(bin_content, encoding="utf-8")
-
-        if not WINDOWS:
-            # Making the file executable
-            st = os.stat(str(self.bin.joinpath("poetry")))
-            os.chmod(str(self.bin.joinpath("poetry")), st.st_mode | stat.S_IEXEC)
-
-    def _which_python(self):
-        """
-        Decides which python executable we'll embed in the launcher script.
-        """
-        from poetry.utils._compat import WINDOWS
-
-        allowed_executables = ["python", "python3"]
-        if WINDOWS:
-            allowed_executables += ["py.exe -3", "py.exe -2"]
-
-        # \d in regex ensures we can convert to int later
-        version_matcher = re.compile(r"^Python (?P\d+)\.(?P\d+)\..+$")
-        fallback = None
-        for executable in allowed_executables:
-            try:
-                raw_version = subprocess.check_output(
-                    executable + " --version", stderr=subprocess.STDOUT, shell=True
-                ).decode("utf-8")
-            except subprocess.CalledProcessError:
-                continue
-
-            match = version_matcher.match(raw_version.strip())
-            if match and tuple(map(int, match.groups())) >= (3, 0):
-                # favor the first py3 executable we can find.
-                return executable
-
-            if fallback is None:
-                # keep this one as the fallback; it was the first valid executable we found.
-                fallback = executable
-
-        if fallback is None:
-            # Avoid breaking existing scripts
-            fallback = "python"
-
-        return fallback
diff --git a/vendor/poetry/poetry/console/commands/shell.py b/vendor/poetry/poetry/console/commands/shell.py
deleted file mode 100644
index 033ab207..00000000
--- a/vendor/poetry/poetry/console/commands/shell.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import sys
-
-from distutils.util import strtobool
-from os import environ
-
-from .env_command import EnvCommand
-
-
-class ShellCommand(EnvCommand):
-
-    name = "shell"
-    description = "Spawns a shell within the virtual environment."
-
-    help = """The shell command spawns a shell, according to the
-$SHELL environment variable, within the virtual environment.
-If one doesn't exist yet, it will be created.
-"""
-
-    def handle(self):
-        from poetry.utils.shell import Shell
-
-        # Check if it's already activated or doesn't exist and won't be created
-        venv_activated = strtobool(environ.get("POETRY_ACTIVE", "0")) or getattr(
-            sys, "real_prefix", sys.prefix
-        ) == str(self.env.path)
-        if venv_activated:
-            self.line(
-                "Virtual environment already activated: "
-                "{}".format(self.env.path)
-            )
-
-            return
-
-        self.line("Spawning shell within {}".format(self.env.path))
-
-        # Setting this to avoid spawning unnecessary nested shells
-        environ["POETRY_ACTIVE"] = "1"
-        shell = Shell.get()
-        shell.activate(self.env)
-        environ.pop("POETRY_ACTIVE")
diff --git a/vendor/poetry/poetry/console/commands/show.py b/vendor/poetry/poetry/console/commands/show.py
deleted file mode 100644
index 86be1ae7..00000000
--- a/vendor/poetry/poetry/console/commands/show.py
+++ /dev/null
@@ -1,413 +0,0 @@
-# -*- coding: utf-8 -*-
-from cleo import argument
-from cleo import option
-
-from .env_command import EnvCommand
-
-
-class ShowCommand(EnvCommand):
-
-    name = "show"
-    description = "Shows information about packages."
-
-    arguments = [argument("package", "The package to inspect", optional=True)]
-    options = [
-        option("no-dev", None, "Do not list the development dependencies."),
-        option("tree", "t", "List the dependencies as a tree."),
-        option("latest", "l", "Show the latest version."),
-        option(
-            "outdated",
-            "o",
-            "Show the latest version but only for packages that are outdated.",
-        ),
-        option(
-            "all",
-            "a",
-            "Show all packages (even those not compatible with current system).",
-        ),
-    ]
-
-    help = """The show command displays detailed information about a package, or
-lists all packages available."""
-
-    colors = ["cyan", "yellow", "green", "magenta", "blue"]
-
-    def handle(self):
-        from clikit.utils.terminal import Terminal
-
-        from poetry.io.null_io import NullIO
-        from poetry.puzzle.solver import Solver
-        from poetry.repositories.installed_repository import InstalledRepository
-        from poetry.repositories.pool import Pool
-        from poetry.repositories.repository import Repository
-        from poetry.utils.helpers import get_package_version_display_string
-
-        package = self.argument("package")
-
-        if self.option("tree"):
-            self.init_styles(self.io)
-
-        if self.option("outdated"):
-            self._args.set_option("latest", True)
-
-        include_dev = not self.option("no-dev")
-        locked_repo = self.poetry.locker.locked_repository(True)
-
-        # Show tree view if requested
-        if self.option("tree") and not package:
-            requires = self.poetry.package.requires
-            if include_dev:
-                requires += self.poetry.package.dev_requires
-            packages = locked_repo.packages
-            for package in packages:
-                for require in requires:
-                    if package.name == require.name:
-                        self.display_package_tree(self._io, package, locked_repo)
-                        break
-
-            return 0
-
-        table = self.table(style="compact")
-        # table.style.line_vc_char = ""
-        locked_packages = locked_repo.packages
-        pool = Pool(ignore_repository_names=True)
-        pool.add_repository(locked_repo)
-        solver = Solver(
-            self.poetry.package,
-            pool=pool,
-            installed=Repository(),
-            locked=locked_repo,
-            io=NullIO(),
-        )
-        solver.provider.load_deferred(False)
-        with solver.use_environment(self.env):
-            ops = solver.solve()
-
-        required_locked_packages = set([op.package for op in ops if not op.skipped])
-
-        if self.option("no-dev"):
-            required_locked_packages = [
-                p for p in locked_packages if p.category == "main"
-            ]
-
-        if package:
-            pkg = None
-            for locked in locked_packages:
-                if package.lower() == locked.name:
-                    pkg = locked
-                    break
-
-            if not pkg:
-                raise ValueError("Package {} not found".format(package))
-
-            if self.option("tree"):
-                self.display_package_tree(self.io, pkg, locked_repo)
-
-                return 0
-
-            rows = [
-                ["name", " : {}".format(pkg.pretty_name)],
-                ["version", " : {}".format(pkg.pretty_version)],
-                ["description", " : {}".format(pkg.description)],
-            ]
-
-            table.add_rows(rows)
-            table.render(self.io)
-
-            if pkg.requires:
-                self.line("")
-                self.line("dependencies")
-                for dependency in pkg.requires:
-                    self.line(
-                        " - {} {}".format(
-                            dependency.pretty_name, dependency.pretty_constraint
-                        )
-                    )
-
-            return 0
-
-        show_latest = self.option("latest")
-        show_all = self.option("all")
-        terminal = Terminal()
-        width = terminal.width
-        name_length = version_length = latest_length = 0
-        latest_packages = {}
-        latest_statuses = {}
-        installed_repo = InstalledRepository.load(self.env)
-
-        # Computing widths
-        for locked in locked_packages:
-            if locked not in required_locked_packages and not show_all:
-                continue
-
-            current_length = len(locked.pretty_name)
-            if not self._io.output.supports_ansi():
-                installed_status = self.get_installed_status(locked, installed_repo)
-
-                if installed_status == "not-installed":
-                    current_length += 4
-
-            if show_latest:
-                latest = self.find_latest_package(locked, include_dev)
-                if not latest:
-                    latest = locked
-
-                latest_packages[locked.pretty_name] = latest
-                update_status = latest_statuses[
-                    locked.pretty_name
-                ] = self.get_update_status(latest, locked)
-
-                if not self.option("outdated") or update_status != "up-to-date":
-                    name_length = max(name_length, current_length)
-                    version_length = max(
-                        version_length,
-                        len(
-                            get_package_version_display_string(
-                                locked, root=self.poetry.file.parent
-                            )
-                        ),
-                    )
-                    latest_length = max(
-                        latest_length,
-                        len(
-                            get_package_version_display_string(
-                                latest, root=self.poetry.file.parent
-                            )
-                        ),
-                    )
-            else:
-                name_length = max(name_length, current_length)
-                version_length = max(
-                    version_length,
-                    len(
-                        get_package_version_display_string(
-                            locked, root=self.poetry.file.parent
-                        )
-                    ),
-                )
-
-        write_version = name_length + version_length + 3 <= width
-        write_latest = name_length + version_length + latest_length + 3 <= width
-        write_description = name_length + version_length + latest_length + 24 <= width
-
-        for locked in locked_packages:
-            color = "cyan"
-            name = locked.pretty_name
-            install_marker = ""
-            if locked not in required_locked_packages:
-                if not show_all:
-                    continue
-
-                color = "black;options=bold"
-            else:
-                installed_status = self.get_installed_status(locked, installed_repo)
-                if installed_status == "not-installed":
-                    color = "red"
-
-                    if not self._io.output.supports_ansi():
-                        # Non installed in non decorated mode
-                        install_marker = " (!)"
-
-            if (
-                show_latest
-                and self.option("outdated")
-                and latest_statuses[locked.pretty_name] == "up-to-date"
-            ):
-                continue
-
-            line = "{:{}}{}".format(
-                color, name, name_length - len(install_marker), install_marker
-            )
-            if write_version:
-                line += " {:{}}".format(
-                    get_package_version_display_string(
-                        locked, root=self.poetry.file.parent
-                    ),
-                    version_length,
-                )
-            if show_latest:
-                latest = latest_packages[locked.pretty_name]
-                update_status = latest_statuses[locked.pretty_name]
-
-                if write_latest:
-                    color = "green"
-                    if update_status == "semver-safe-update":
-                        color = "red"
-                    elif update_status == "update-possible":
-                        color = "yellow"
-
-                    line += " {:{}}".format(
-                        color,
-                        get_package_version_display_string(
-                            latest, root=self.poetry.file.parent
-                        ),
-                        latest_length,
-                    )
-
-            if write_description:
-                description = locked.description
-                remaining = width - name_length - version_length - 4
-                if show_latest:
-                    remaining -= latest_length
-
-                if len(locked.description) > remaining:
-                    description = description[: remaining - 3] + "..."
-
-                line += " " + description
-
-            self.line(line)
-
-    def display_package_tree(self, io, package, installed_repo):
-        io.write("{}".format(package.pretty_name))
-        description = ""
-        if package.description:
-            description = " " + package.description
-
-        io.write_line(" {}{}".format(package.pretty_version, description))
-
-        dependencies = package.requires
-        dependencies = sorted(dependencies, key=lambda x: x.name)
-        tree_bar = "├"
-        j = 0
-        total = len(dependencies)
-        for dependency in dependencies:
-            j += 1
-            if j == total:
-                tree_bar = "â””"
-
-            level = 1
-            color = self.colors[level]
-            info = "{tree_bar}── <{color}>{name} {constraint}".format(
-                tree_bar=tree_bar,
-                color=color,
-                name=dependency.name,
-                constraint=dependency.pretty_constraint,
-            )
-            self._write_tree_line(io, info)
-
-            tree_bar = tree_bar.replace("â””", " ")
-            packages_in_tree = [package.name, dependency.name]
-
-            self._display_tree(
-                io, dependency, installed_repo, packages_in_tree, tree_bar, level + 1
-            )
-
-    def _display_tree(
-        self,
-        io,
-        dependency,
-        installed_repo,
-        packages_in_tree,
-        previous_tree_bar="├",
-        level=1,
-    ):
-        previous_tree_bar = previous_tree_bar.replace("├", "│")
-
-        dependencies = []
-        for package in installed_repo.packages:
-            if package.name == dependency.name:
-                dependencies = package.requires
-
-                break
-
-        dependencies = sorted(dependencies, key=lambda x: x.name)
-        tree_bar = previous_tree_bar + "   ├"
-        i = 0
-        total = len(dependencies)
-        for dependency in dependencies:
-            i += 1
-            current_tree = packages_in_tree
-            if i == total:
-                tree_bar = previous_tree_bar + "   â””"
-
-            color_ident = level % len(self.colors)
-            color = self.colors[color_ident]
-
-            circular_warn = ""
-            if dependency.name in current_tree:
-                circular_warn = "(circular dependency aborted here)"
-
-            info = "{tree_bar}── <{color}>{name} {constraint} {warn}".format(
-                tree_bar=tree_bar,
-                color=color,
-                name=dependency.name,
-                constraint=dependency.pretty_constraint,
-                warn=circular_warn,
-            )
-            self._write_tree_line(io, info)
-
-            tree_bar = tree_bar.replace("â””", " ")
-
-            if dependency.name not in current_tree:
-                current_tree.append(dependency.name)
-
-                self._display_tree(
-                    io, dependency, installed_repo, current_tree, tree_bar, level + 1
-                )
-
-    def _write_tree_line(self, io, line):
-        if not io.output.supports_ansi():
-            line = line.replace("â””", "`-")
-            line = line.replace("├", "|-")
-            line = line.replace("──", "-")
-            line = line.replace("│", "|")
-
-        io.write_line(line)
-
-    def init_styles(self, io):
-        from clikit.api.formatter import Style
-
-        for color in self.colors:
-            style = Style(color).fg(color)
-            io.output.formatter.add_style(style)
-            io.error_output.formatter.add_style(style)
-
-    def find_latest_package(self, package, include_dev):
-        from clikit.io import NullIO
-
-        from poetry.puzzle.provider import Provider
-        from poetry.version.version_selector import VersionSelector
-
-        # find the latest version allowed in this pool
-        if package.source_type in ("git", "file", "directory"):
-            requires = self.poetry.package.requires
-            if include_dev:
-                requires = requires + self.poetry.package.dev_requires
-
-            for dep in requires:
-                if dep.name == package.name:
-                    provider = Provider(self.poetry.package, self.poetry.pool, NullIO())
-
-                    if dep.is_vcs():
-                        return provider.search_for_vcs(dep)[0]
-                    if dep.is_file():
-                        return provider.search_for_file(dep)[0]
-                    if dep.is_directory():
-                        return provider.search_for_directory(dep)[0]
-
-        name = package.name
-        selector = VersionSelector(self.poetry.pool)
-
-        return selector.find_best_candidate(name, ">={}".format(package.pretty_version))
-
-    def get_update_status(self, latest, package):
-        from poetry.core.semver import parse_constraint
-
-        if latest.full_pretty_version == package.full_pretty_version:
-            return "up-to-date"
-
-        constraint = parse_constraint("^" + package.pretty_version)
-
-        if latest.version and constraint.allows(latest.version):
-            # It needs an immediate semver-compliant upgrade
-            return "semver-safe-update"
-
-        # it needs an upgrade but has potential BC breaks so is not urgent
-        return "update-possible"
-
-    def get_installed_status(self, locked, installed_repo):
-        for package in installed_repo.packages:
-            if locked.name == package.name:
-                return "installed"
-
-        return "not-installed"
diff --git a/vendor/poetry/poetry/console/commands/update.py b/vendor/poetry/poetry/console/commands/update.py
deleted file mode 100644
index 9e18feb7..00000000
--- a/vendor/poetry/poetry/console/commands/update.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from cleo import argument
-from cleo import option
-
-from .installer_command import InstallerCommand
-
-
-class UpdateCommand(InstallerCommand):
-
-    name = "update"
-    description = (
-        "Update the dependencies as according to the pyproject.toml file."
-    )
-
-    arguments = [
-        argument("packages", "The packages to update", optional=True, multiple=True)
-    ]
-    options = [
-        option("no-dev", None, "Do not update the development dependencies."),
-        option(
-            "dry-run",
-            None,
-            "Output the operations but do not execute anything "
-            "(implicitly enables --verbose).",
-        ),
-        option("lock", None, "Do not perform operations (only update the lockfile)."),
-    ]
-
-    loggers = ["poetry.repositories.pypi_repository"]
-
-    def handle(self):
-        packages = self.argument("packages")
-
-        self._installer.use_executor(
-            self.poetry.config.get("experimental.new-installer", False)
-        )
-
-        if packages:
-            self._installer.whitelist({name: "*" for name in packages})
-
-        self._installer.dev_mode(not self.option("no-dev"))
-        self._installer.dry_run(self.option("dry-run"))
-        self._installer.execute_operations(not self.option("lock"))
-
-        # Force update
-        self._installer.update(True)
-
-        return self._installer.run()
diff --git a/vendor/poetry/poetry/console/commands/version.py b/vendor/poetry/poetry/console/commands/version.py
deleted file mode 100644
index 21ed676f..00000000
--- a/vendor/poetry/poetry/console/commands/version.py
+++ /dev/null
@@ -1,109 +0,0 @@
-from cleo import argument
-from cleo import option
-
-from .command import Command
-
-
-class VersionCommand(Command):
-
-    name = "version"
-    description = (
-        "Shows the version of the project or bumps it when a valid "
-        "bump rule is provided."
-    )
-
-    arguments = [
-        argument(
-            "version",
-            "The version number or the rule to update the version.",
-            optional=True,
-        )
-    ]
-    options = [option("short", "s", "Output the version number only")]
-
-    help = """\
-The version command shows the current version of the project or bumps the version of
-the project and writes the new version back to pyproject.toml if a valid
-bump rule is provided.
-
-The new version should ideally be a valid semver string or a valid bump rule:
-patch, minor, major, prepatch, preminor, premajor, prerelease.
-"""
-
-    RESERVED = {
-        "major",
-        "minor",
-        "patch",
-        "premajor",
-        "preminor",
-        "prepatch",
-        "prerelease",
-    }
-
-    def handle(self):
-        version = self.argument("version")
-
-        if version:
-            version = self.increment_version(
-                self.poetry.package.pretty_version, version
-            )
-
-            self.line(
-                "Bumping version from {} to {}".format(
-                    self.poetry.package.pretty_version, version
-                )
-            )
-
-            content = self.poetry.file.read()
-            poetry_content = content["tool"]["poetry"]
-            poetry_content["version"] = version.text
-
-            self.poetry.file.write(content)
-        else:
-            if self.option("short"):
-                self.line("{}".format(self.poetry.package.pretty_version))
-            else:
-                self.line(
-                    "{} {}".format(
-                        self.poetry.package.name, self.poetry.package.pretty_version
-                    )
-                )
-
-    def increment_version(self, version, rule):
-        from poetry.core.semver import Version
-
-        try:
-            version = Version.parse(version)
-        except ValueError:
-            raise ValueError("The project's version doesn't seem to follow semver")
-
-        if rule in {"major", "premajor"}:
-            new = version.next_major
-            if rule == "premajor":
-                new = new.first_prerelease
-        elif rule in {"minor", "preminor"}:
-            new = version.next_minor
-            if rule == "preminor":
-                new = new.first_prerelease
-        elif rule in {"patch", "prepatch"}:
-            new = version.next_patch
-            if rule == "prepatch":
-                new = new.first_prerelease
-        elif rule == "prerelease":
-            if version.is_prerelease():
-                pre = version.prerelease
-                new_prerelease = int(pre[1]) + 1
-                new = Version.parse(
-                    "{}.{}.{}-{}".format(
-                        version.major,
-                        version.minor,
-                        version.patch,
-                        ".".join([pre[0], str(new_prerelease)]),
-                    )
-                )
-            else:
-                new = version.next_patch.first_prerelease
-        else:
-            new = Version.parse(rule)
-
-        return new
diff --git a/vendor/poetry/poetry/console/config/__init__.py b/vendor/poetry/poetry/console/config/__init__.py
deleted file mode 100644
index 14e86b43..00000000
--- a/vendor/poetry/poetry/console/config/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .application_config import ApplicationConfig
diff --git a/vendor/poetry/poetry/console/config/application_config.py b/vendor/poetry/poetry/console/config/application_config.py
deleted file mode 100644
index 09cc2cb1..00000000
--- a/vendor/poetry/poetry/console/config/application_config.py
+++ /dev/null
@@ -1,249 +0,0 @@
-import logging
-
-from typing import Any
-
-from cleo.config import ApplicationConfig as BaseApplicationConfig
-from clikit.api.application.application import Application
-from clikit.api.args.raw_args import RawArgs
-from clikit.api.event import PRE_HANDLE
-from clikit.api.event import PreHandleEvent
-from clikit.api.event import PreResolveEvent
-from clikit.api.event.event_dispatcher import EventDispatcher
-from clikit.api.exceptions import CliKitException
-from clikit.api.formatter import Style
-from clikit.api.io import Input
-from clikit.api.io import InputStream
-from clikit.api.io import Output
-from clikit.api.io import OutputStream
-from clikit.api.io.flags import DEBUG
-from clikit.api.io.flags import VERBOSE
-from clikit.api.io.flags import VERY_VERBOSE
-from clikit.api.io.io import IO
-from clikit.formatter import AnsiFormatter
-from clikit.formatter import PlainFormatter
-from clikit.io.input_stream import StandardInputStream
-from clikit.io.output_stream import ErrorOutputStream
-from clikit.io.output_stream import StandardOutputStream
-
-from poetry.console.commands.command import Command
-from poetry.console.commands.env_command import EnvCommand
-from poetry.console.commands.installer_command import InstallerCommand
-from poetry.console.logging.io_formatter import IOFormatter
-from poetry.console.logging.io_handler import IOHandler
-from poetry.utils._compat import PY36
-
-
-class ApplicationConfig(BaseApplicationConfig):
-    def configure(self):
-        super(ApplicationConfig, self).configure()
-
-        self.add_style(Style("c1").fg("cyan"))
-        self.add_style(Style("c2").fg("default").bold())
-        self.add_style(Style("info").fg("blue"))
-        self.add_style(Style("comment").fg("green"))
-        self.add_style(Style("error").fg("red").bold())
-        self.add_style(Style("warning").fg("yellow").bold())
-        self.add_style(Style("debug").fg("default").dark())
-        self.add_style(Style("success").fg("green"))
-
-        # Dark variants
-        self.add_style(Style("c1_dark").fg("cyan").dark())
-        self.add_style(Style("c2_dark").fg("default").bold().dark())
-        self.add_style(Style("success_dark").fg("green").dark())
-
-        self.add_event_listener(PRE_HANDLE, self.register_command_loggers)
-        self.add_event_listener(PRE_HANDLE, self.set_env)
-        self.add_event_listener(PRE_HANDLE, self.set_installer)
-
-        if PY36:
-            from poetry.mixology.solutions.providers import (
-                PythonRequirementSolutionProvider,
-            )
-
-            self._solution_provider_repository.register_solution_providers(
-                [PythonRequirementSolutionProvider]
-            )
-
-    def register_command_loggers(
-        self, event, event_name, _
-    ):  # type: (PreHandleEvent, str, Any) -> None
-        command = event.command.config.handler
-        if not isinstance(command, Command):
-            return
-
-        io = event.io
-
-        loggers = [
-            "poetry.packages.locker",
-            "poetry.packages.package",
-            "poetry.utils.password_manager",
-        ]
-
-        loggers += command.loggers
-
-        handler = IOHandler(io)
-        handler.setFormatter(IOFormatter())
-
-        for logger in loggers:
-            logger = logging.getLogger(logger)
-
-            logger.handlers = [handler]
-
-            level = logging.WARNING
-            # The builders loggers are special and we can actually
-            # start at the INFO level.
-            if logger.name.startswith("poetry.core.masonry.builders"):
-                level = logging.INFO
-
-            if io.is_debug():
-                level = logging.DEBUG
-            elif io.is_very_verbose() or io.is_verbose():
-                level = logging.INFO
-
-            logger.setLevel(level)
-
-    def set_env(self, event, event_name, _):  # type: (PreHandleEvent, str, Any) -> None
-        from poetry.utils.env import EnvManager
-
-        command = event.command.config.handler  # type: EnvCommand
-        if not isinstance(command, EnvCommand):
-            return
-
-        if command.env is not None:
-            return
-
-        io = event.io
-        poetry = command.poetry
-
-        env_manager = EnvManager(poetry)
-        env = env_manager.create_venv(io)
-
-        if env.is_venv() and io.is_verbose():
-            io.write_line("Using virtualenv: {}".format(env.path))
-
-        command.set_env(env)
-
-    def set_installer(
-        self, event, event_name, _
-    ):  # type: (PreHandleEvent, str, Any) -> None
-        command = event.command.config.handler  # type: InstallerCommand
-        if not isinstance(command, InstallerCommand):
-            return
-
-        # If the command already has an installer
-        # we skip this step
-        if command.installer is not None:
-            return
-
-        from poetry.installation.installer import Installer
-
-        poetry = command.poetry
-        installer = Installer(
-            event.io,
-            command.env,
-            poetry.package,
-            poetry.locker,
-            poetry.pool,
-            poetry.config,
-        )
-        installer.use_executor(poetry.config.get("experimental.new-installer", False))
-        command.set_installer(installer)
-
-    def resolve_help_command(
-        self, event, event_name, dispatcher
-    ):  # type: (PreResolveEvent, str, EventDispatcher) -> None
-        args = event.raw_args
-        application = event.application
-
-        if args.has_option_token("-h") or args.has_option_token("--help"):
-            from clikit.api.resolver import ResolvedCommand
-
-            try:
-                resolved_command = self.command_resolver.resolve(args, application)
-            except CliKitException:
-                # We weren't able to resolve the command,
-                # due to a parse error most likely,
-                # so we fall back on the default behavior
-                return super(ApplicationConfig, self).resolve_help_command(
-                    event, event_name, dispatcher
-                )
-
-            # If the current command is the run one, skip option
-            # check and interpret them as part of the executed command
-            if resolved_command.command.name == "run":
-                event.set_resolved_command(resolved_command)
-
-                return event.stop_propagation()
-
-            command = application.get_command("help")
-
-            # Enable lenient parsing
-            parsed_args = command.parse(args, True)
-
-            event.set_resolved_command(ResolvedCommand(command, parsed_args))
-            event.stop_propagation()
-
-    def create_io(
-        self,
-        application,
-        args,
-        input_stream=None,
-        output_stream=None,
-        error_stream=None,
-    ):  # type: (Application, RawArgs, InputStream, OutputStream, OutputStream) -> IO
-        if input_stream is None:
-            input_stream = StandardInputStream()
-
-        if output_stream is None:
-            output_stream = StandardOutputStream()
-
-        if error_stream is None:
-            error_stream = ErrorOutputStream()
-
-        style_set = application.config.style_set
-
-        if output_stream.supports_ansi():
-            output_formatter = AnsiFormatter(style_set)
-        else:
-            output_formatter = PlainFormatter(style_set)
-
-        if error_stream.supports_ansi():
-            error_formatter = AnsiFormatter(style_set)
-        else:
-            error_formatter = PlainFormatter(style_set)
-
-        io = self.io_class(
-            Input(input_stream),
-            Output(output_stream, output_formatter),
-            Output(error_stream, error_formatter),
-        )
-
-        resolved_command = application.resolve_command(args)
-        # If the current command is the run one, skip option
-        # check and interpret them as part of the executed command
-        if resolved_command.command.name == "run":
-            return io
-
-        if args.has_option_token("--no-ansi"):
-            formatter = PlainFormatter(style_set)
-            io.output.set_formatter(formatter)
-            io.error_output.set_formatter(formatter)
-        elif args.has_option_token("--ansi"):
-            formatter = AnsiFormatter(style_set, True)
-            io.output.set_formatter(formatter)
-            io.error_output.set_formatter(formatter)
-
-        if args.has_option_token("-vvv") or self.is_debug():
-            io.set_verbosity(DEBUG)
-        elif args.has_option_token("-vv"):
-            io.set_verbosity(VERY_VERBOSE)
-        elif args.has_option_token("-v"):
-            io.set_verbosity(VERBOSE)
-
-        if args.has_option_token("--quiet") or args.has_option_token("-q"):
-            io.set_quiet(True)
-
-        if args.has_option_token("--no-interaction") or args.has_option_token("-n"):
-            io.set_interactive(False)
-
-        return io
diff --git a/vendor/poetry/poetry/console/logging/formatters/__init__.py b/vendor/poetry/poetry/console/logging/formatters/__init__.py
deleted file mode 100644
index 20593e38..00000000
--- a/vendor/poetry/poetry/console/logging/formatters/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from .builder_formatter import BuilderLogFormatter
-
-
-FORMATTERS = {
-    "poetry.core.masonry.builders.builder": BuilderLogFormatter(),
-    "poetry.core.masonry.builders.sdist": BuilderLogFormatter(),
-    "poetry.core.masonry.builders.wheel": BuilderLogFormatter(),
-}
diff --git a/vendor/poetry/poetry/console/logging/formatters/formatter.py b/vendor/poetry/poetry/console/logging/formatters/formatter.py
deleted file mode 100644
index 35b59374..00000000
--- a/vendor/poetry/poetry/console/logging/formatters/formatter.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import logging
-
-
-class Formatter(object):
-    def format(self, record):  # type: (logging.LogRecord) -> str
-        raise NotImplementedError()
diff --git a/vendor/poetry/poetry/console/logging/io_formatter.py b/vendor/poetry/poetry/console/logging/io_formatter.py
deleted file mode 100644
index 9ff57fec..00000000
--- a/vendor/poetry/poetry/console/logging/io_formatter.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import logging
-
-from .formatters import FORMATTERS
-
-
-class IOFormatter(logging.Formatter):
-
-    _colors = {
-        "error": "fg=red",
-        "warning": "fg=yellow",
-        "debug": "debug",
-        "info": "fg=blue",
-    }
-
-    def format(self, record):
-        if not record.exc_info:
-            level = record.levelname.lower()
-            msg = record.msg
-
-            if record.name in FORMATTERS:
-                msg = FORMATTERS[record.name].format(msg)
-            elif level in self._colors:
-                msg = "<{}>{}".format(self._colors[level], msg)
-
-            return msg
-
-        return super(IOFormatter, self).format(record)
diff --git a/vendor/poetry/poetry/console/logging/io_handler.py b/vendor/poetry/poetry/console/logging/io_handler.py
deleted file mode 100644
index 14fd1769..00000000
--- a/vendor/poetry/poetry/console/logging/io_handler.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import logging
-
-
-class IOHandler(logging.Handler):
-    def __init__(self, io):
-        self._io = io
-
-        super(IOHandler, self).__init__()
-
-    def emit(self, record):
-        try:
-            msg = self.format(record)
-            level = record.levelname.lower()
-            err = level in ("warning", "error", "exception", "critical")
-            if err:
-                self._io.error_line(msg)
-            else:
-                self._io.write_line(msg)
-        except Exception:
-            self.handleError(record)
diff --git a/vendor/poetry/poetry/exceptions.py b/vendor/poetry/poetry/exceptions.py
deleted file mode 100644
index 0bbaeb80..00000000
--- a/vendor/poetry/poetry/exceptions.py
+++ /dev/null
@@ -1,8 +0,0 @@
-class PoetryException(Exception):
-
-    pass
-
-
-class InvalidProjectFile(PoetryException):
-
-    pass
diff --git a/vendor/poetry/poetry/factory.py b/vendor/poetry/poetry/factory.py
deleted file mode 100755
index cef04eba..00000000
--- a/vendor/poetry/poetry/factory.py
+++ /dev/null
@@ -1,162 +0,0 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-from typing import Dict
-from typing import Optional
-
-from clikit.api.io.io import IO
-
-from poetry.core.factory import Factory as BaseFactory
-from poetry.core.toml.file import TOMLFile
-
-from .config.config import Config
-from .config.file_config_source import FileConfigSource
-from .io.null_io import NullIO
-from .locations import CONFIG_DIR
-from .packages.locker import Locker
-from .poetry import Poetry
-from .repositories.pypi_repository import PyPiRepository
-from .utils._compat import Path
-
-
-class Factory(BaseFactory):
-    """
-    Factory class to create various elements needed by Poetry.
-    """
-
-    def create_poetry(
-        self, cwd=None, io=None
-    ):  # type: (Optional[Path], Optional[IO]) -> Poetry
-        if io is None:
-            io = NullIO()
-
-        base_poetry = super(Factory, self).create_poetry(cwd)
-
-        locker = Locker(
-            base_poetry.file.parent / "poetry.lock", base_poetry.local_config
-        )
-
-        # Loading global configuration
-        config = self.create_config(io)
-
-        # Loading local configuration
-        local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml")
-        if local_config_file.exists():
-            if io.is_debug():
-                io.write_line(
-                    "Loading configuration file {}".format(local_config_file.path)
-                )
-
-            config.merge(local_config_file.read())
-
-        # Load local sources
-        repositories = {}
-        existing_repositories = config.get("repositories", {})
-        for source in base_poetry.pyproject.poetry_config.get("source", []):
-            name = source.get("name")
-            url = source.get("url")
-            if name and url:
-                if name not in existing_repositories:
-                    repositories[name] = {"url": url}
-
-        config.merge({"repositories": repositories})
-
-        poetry = Poetry(
-            base_poetry.file.path,
-            base_poetry.local_config,
-            base_poetry.package,
-            locker,
-            config,
-        )
-
-        # Configuring sources
-        sources = poetry.local_config.get("source", [])
-        for source in sources:
-            repository = self.create_legacy_repository(source, config)
-            is_default = source.get("default", False)
-            is_secondary = source.get("secondary", False)
-            if io.is_debug():
-                message = "Adding repository {} ({})".format(
-                    repository.name, repository.url
-                )
-                if is_default:
-                    message += " and setting it as the default one"
-                elif is_secondary:
-                    message += " and setting it as secondary"
-
-                io.write_line(message)
-
-            poetry.pool.add_repository(repository, is_default, secondary=is_secondary)
-
-        # Put PyPI last to prefer private repositories
-        # unless we have no default source AND no primary sources
-        # (default = false, secondary = false)
-        if poetry.pool.has_default():
-            if io.is_debug():
-                io.write_line("Deactivating the PyPI repository")
-        else:
-            default = not poetry.pool.has_primary_repositories()
-            poetry.pool.add_repository(PyPiRepository(), default, not default)
-
-        return poetry
-
-    @classmethod
-    def create_config(cls, io=None):  # type: (Optional[IO]) -> Config
-        if io is None:
-            io = NullIO()
-
-        config = Config()
-        # Load global config
-        config_file = TOMLFile(Path(CONFIG_DIR) / "config.toml")
-        if config_file.exists():
-            if io.is_debug():
-                io.write_line(
-                    "Loading configuration file {}".format(
-                        config_file.path
-                    )
-                )
-
-            config.merge(config_file.read())
-
-        config.set_config_source(FileConfigSource(config_file))
-
-        # Load global auth config
-        auth_config_file = TOMLFile(Path(CONFIG_DIR) / "auth.toml")
-        if auth_config_file.exists():
-            if io.is_debug():
-                io.write_line(
-                    "Loading configuration file {}".format(
-                        auth_config_file.path
-                    )
-                )
-
-            config.merge(auth_config_file.read())
-
-        config.set_auth_config_source(FileConfigSource(auth_config_file))
-
-        return config
-
-    def create_legacy_repository(
-        self, source, auth_config
-    ):  # type: (Dict[str, str], Config) -> LegacyRepository
-        from .repositories.legacy_repository import LegacyRepository
-        from .utils.helpers import get_cert
-        from .utils.helpers import get_client_cert
-
-        if "url" in source:
-            # PyPI-like repository
-            if "name" not in source:
-                raise RuntimeError("Missing [name] in source.")
-        else:
-            raise RuntimeError("Unsupported source specified")
-
-        name = source["name"]
-        url = source["url"]
-
-        return LegacyRepository(
-            name,
-            url,
-            config=auth_config,
-            cert=get_cert(auth_config, name),
-            client_cert=get_client_cert(auth_config, name),
-        )
diff --git a/vendor/poetry/poetry/inspection/info.py b/vendor/poetry/poetry/inspection/info.py
deleted file mode 100644
index c8a77af1..00000000
--- a/vendor/poetry/poetry/inspection/info.py
+++ /dev/null
@@ -1,610 +0,0 @@
-import glob
-import logging
-import os
-import tarfile
-import zipfile
-
-from typing import Dict
-from typing import Iterator
-from typing import List
-from typing import Optional
-from typing import Union
-
-import pkginfo
-
-from poetry.core.factory import Factory
-from poetry.core.packages import Package
-from poetry.core.packages import ProjectPackage
-from poetry.core.packages import dependency_from_pep_508
-from poetry.core.pyproject.toml import PyProjectTOML
-from poetry.core.utils._compat import PY35
-from poetry.core.utils._compat import Path
-from poetry.core.utils.helpers import parse_requires
-from poetry.core.utils.helpers import temporary_directory
-from poetry.core.version.markers import InvalidMarker
-from poetry.utils.env import EnvCommandError
-from poetry.utils.env import EnvManager
-from poetry.utils.env import VirtualEnv
-from poetry.utils.setup_reader import SetupReader
-
-
-logger = logging.getLogger(__name__)
-
-PEP517_META_BUILD = """\
-import pep517.build
-import pep517.meta
-
-path='{source}'
-system=pep517.build.compat_system(path)
-pep517.meta.build(source_dir=path, dest='{dest}', system=system)
-"""
-
-PEP517_META_BUILD_DEPS = ["pep517===0.8.2", "toml==0.10.1"]
-
-
-class PackageInfoError(ValueError):
-    def __init__(
-        self, path, *reasons
-    ):  # type: (Union[Path, str], *Union[BaseException, str]) -> None
-        reasons = (
-            "Unable to determine package info for path: {}".format(str(path)),
-        ) + reasons
-        super(PackageInfoError, self).__init__(
-            "\n\n".join(str(msg).strip() for msg in reasons if msg)
-        )
-
-
-class PackageInfo:
-    def __init__(
-        self,
-        name=None,  # type: Optional[str]
-        version=None,  # type: Optional[str]
-        summary=None,  # type: Optional[str]
-        platform=None,  # type: Optional[str]
-        requires_dist=None,  # type: Optional[List[str]]
-        requires_python=None,  # type: Optional[str]
-        files=None,  # type: Optional[List[str]]
-        cache_version=None,  # type: Optional[str]
-    ):
-        self.name = name
-        self.version = version
-        self.summary = summary
-        self.platform = platform
-        self.requires_dist = requires_dist
-        self.requires_python = requires_python
-        self.files = files or []
-        self._cache_version = cache_version
-        self._source_type = None
-        self._source_url = None
-        self._source_reference = None
-
-    @property
-    def cache_version(self):  # type: () -> Optional[str]
-        return self._cache_version
-
-    def update(self, other):  # type: (PackageInfo) -> PackageInfo
-        self.name = other.name or self.name
-        self.version = other.version or self.version
-        self.summary = other.summary or self.summary
-        self.platform = other.platform or self.platform
-        self.requires_dist = other.requires_dist or self.requires_dist
-        self.requires_python = other.requires_python or self.requires_python
-        self.files = other.files or self.files
-        self._cache_version = other.cache_version or self._cache_version
-        return self
-
-    def asdict(self):  # type: () -> Dict[str, Optional[Union[str, List[str]]]]
-        """
-        Helper method to convert package info into a dictionary used for caching.
-        """
-        return {
-            "name": self.name,
-            "version": self.version,
-            "summary": self.summary,
-            "platform": self.platform,
-            "requires_dist": self.requires_dist,
-            "requires_python": self.requires_python,
-            "files": self.files,
-            "_cache_version": self._cache_version,
-        }
-
-    @classmethod
-    def load(
-        cls, data
-    ):  # type: (Dict[str, Optional[Union[str, List[str]]]]) -> PackageInfo
-        """
-        Helper method to load data from a dictionary produced by `PackageInfo.asdict()`.
-
-        :param data: Data to load. This is expected to be a `dict` object output by `asdict()`.
-        """
-        cache_version = data.pop("_cache_version", None)
-        return cls(cache_version=cache_version, **data)
-
-    @classmethod
-    def _log(cls, msg, level="info"):
-        """Internal helper method to log information."""
-        getattr(logger, level)("{}: {}".format(cls.__name__, msg))
-
-    def to_package(
-        self, name=None, extras=None, root_dir=None
-    ):  # type: (Optional[str], Optional[List[str]], Optional[Path]) -> Package
-        """
-        Create a new `poetry.core.packages.package.Package` instance using metadata from this instance.
-
-        :param name: Name to use for the package, if not specified name from this instance is used.
-        :param extras: Extras to activate for this package.
-        :param root_dir:  Optional root directory to use for the package. If set, dependency strings
-            will be parsed relative to this directory.
-        """
-        name = name or self.name
-
-        if not self.version:
-            # The version could not be determined, so we raise an error since it is mandatory.
-            raise RuntimeError(
-                "Unable to retrieve the package version for {}".format(name)
-            )
-
-        package = Package(
-            name=name,
-            version=self.version,
-            source_type=self._source_type,
-            source_url=self._source_url,
-            source_reference=self._source_reference,
-        )
-        package.description = self.summary
-        package.root_dir = root_dir
-        package.python_versions = self.requires_python or "*"
-        package.files = self.files
-
-        if root_dir or (self._source_type in {"directory"} and self._source_url):
-            # this is a local poetry project, this means we can extract "richer" requirement information
-            # eg: development requirements etc.
-            poetry_package = self._get_poetry_package(path=root_dir or self._source_url)
-            if poetry_package:
-                package.extras = poetry_package.extras
-                package.requires = poetry_package.requires
-                return package
-
-        seen_requirements = set()
-
-        for req in self.requires_dist or []:
-            try:
-                # Attempt to parse the PEP-508 requirement string
-                dependency = dependency_from_pep_508(req, relative_to=root_dir)
-            except InvalidMarker:
-                # Invalid marker, We strip the markers hoping for the best
-                req = req.split(";")[0]
-                dependency = dependency_from_pep_508(req, relative_to=root_dir)
-            except ValueError:
-                # Likely unable to parse constraint so we skip it
-                self._log(
-                    "Invalid constraint ({}) found in {}-{} dependencies, "
-                    "skipping".format(req, package.name, package.version),
-                    level="warning",
-                )
-                continue
-
-            if dependency.in_extras:
-                # this dependency is required by an extra package
-                for extra in dependency.in_extras:
-                    if extra not in package.extras:
-                        # this is the first time we encounter this extra for this package
-                        package.extras[extra] = []
-
-                    package.extras[extra].append(dependency)
-
-            req = dependency.to_pep_508(with_extras=True)
-
-            if req not in seen_requirements:
-                package.requires.append(dependency)
-                seen_requirements.add(req)
-
-        return package
-
-    @classmethod
-    def _from_distribution(
-        cls, dist
-    ):  # type: (Union[pkginfo.BDist, pkginfo.SDist, pkginfo.Wheel]) -> PackageInfo
-        """
-        Helper method to parse package information from a `pkginfo.Distribution` instance.
-
-        :param dist: The distribution instance to parse information from.
-        """
-        requirements = None
-
-        if dist.requires_dist:
-            requirements = list(dist.requires_dist)
-        else:
-            requires = Path(dist.filename) / "requires.txt"
-            if requires.exists():
-                with requires.open(encoding="utf-8") as f:
-                    requirements = parse_requires(f.read())
-
-        info = cls(
-            name=dist.name,
-            version=dist.version,
-            summary=dist.summary,
-            platform=dist.supported_platforms,
-            requires_dist=requirements,
-            requires_python=dist.requires_python,
-        )
-
-        info._source_type = "file"
-        info._source_url = Path(dist.filename).resolve().as_posix()
-
-        return info
-
-    @classmethod
-    def _from_sdist_file(cls, path):  # type: (Path) -> PackageInfo
-        """
-        Helper method to parse package information from an sdist file. We attempt to first inspect the
-        file using `pkginfo.SDist`. If this does not provide us with package requirements, we extract the
-        source and handle it as a directory.
-
-        :param path: The sdist file to parse information from.
-        """
-        info = None
-
-        try:
-            info = cls._from_distribution(pkginfo.SDist(str(path)))
-        except ValueError:
-            # Unable to determine dependencies
-            # We pass and go deeper
-            pass
-        else:
-            if info.requires_dist is not None:
-                # we successfully retrieved dependencies from sdist metadata
-                return info
-
-        # Still not dependencies found
-        # So, we unpack and introspect
-        suffix = path.suffix
-
-        if suffix == ".zip":
-            context = zipfile.ZipFile
-        else:
-            if suffix == ".bz2":
-                suffixes = path.suffixes
-                if len(suffixes) > 1 and suffixes[-2] == ".tar":
-                    suffix = ".tar.bz2"
-            else:
-                suffix = ".tar.gz"
-
-            context = tarfile.open
-
-        with temporary_directory() as tmp:
-            tmp = Path(tmp)
-            with context(path.as_posix()) as archive:
-                archive.extractall(tmp.as_posix())
-
-            # a little bit of guess work to determine the directory we care about
-            elements = list(tmp.glob("*"))
-
-            if len(elements) == 1 and elements[0].is_dir():
-                sdist_dir = elements[0]
-            else:
-                sdist_dir = tmp / path.name.rstrip(suffix)
-                if not sdist_dir.is_dir():
-                    sdist_dir = tmp
-
-            # now this is an unpacked directory we know how to deal with
-            new_info = cls.from_directory(path=sdist_dir)
-
-        if not info:
-            return new_info
-
-        return info.update(new_info)
-
-    @staticmethod
-    def has_setup_files(path):  # type: (Path) -> bool
-        return any((path / f).exists() for f in SetupReader.FILES)
-
-    @classmethod
-    def from_setup_files(cls, path):  # type: (Path) -> PackageInfo
-        """
-        Mechanism to parse package information from a `setup.[py|cfg]` file. This uses the implementation
-        at `poetry.utils.setup_reader.SetupReader` in order to parse the file. This is not reliable for
-        complex setup files and should only attempted as a fallback.
-
-        :param path: Path to `setup.py` file
-        """
-        if not cls.has_setup_files(path):
-            raise PackageInfoError(
-                path, "No setup files (setup.py, setup.cfg) was found."
-            )
-
-        try:
-            result = SetupReader.read_from_directory(path)
-        except Exception as e:
-            raise PackageInfoError(path, e)
-
-        python_requires = result["python_requires"]
-        if python_requires is None:
-            python_requires = "*"
-
-        requires = ""
-        for dep in result["install_requires"]:
-            requires += dep + "\n"
-
-        if result["extras_require"]:
-            requires += "\n"
-
-        for extra_name, deps in result["extras_require"].items():
-            requires += "[{}]\n".format(extra_name)
-
-            for dep in deps:
-                requires += dep + "\n"
-
-            requires += "\n"
-
-        requirements = parse_requires(requires)
-
-        info = cls(
-            name=result.get("name"),
-            version=result.get("version"),
-            summary=result.get("description", ""),
-            requires_dist=requirements or None,
-            requires_python=python_requires,
-        )
-
-        if not (info.name and info.version) and not info.requires_dist:
-            # there is nothing useful here
-            raise PackageInfoError(
-                path,
-                "No core metadata (name, version, requires-dist) could be retrieved.",
-            )
-
-        return info
-
-    @staticmethod
-    def _find_dist_info(path):  # type: (Path) -> Iterator[Path]
-        """
-        Discover all `*.*-info` directories in a given path.
-
-        :param path: Path to search.
-        """
-        pattern = "**/*.*-info"
-        if PY35:
-            # Sometimes pathlib will fail on recursive symbolic links, so we need to workaround it
-            # and use the glob module instead. Note that this does not happen with pathlib2
-            # so it's safe to use it for Python < 3.4.
-            directories = glob.iglob(path.joinpath(pattern).as_posix(), recursive=True)
-        else:
-            directories = path.glob(pattern)
-
-        for d in directories:
-            yield Path(d)
-
-    @classmethod
-    def from_metadata(cls, path):  # type: (Path) -> Optional[PackageInfo]
-        """
-        Helper method to parse package information from an unpacked metadata directory.
-
-        :param path: The metadata directory to parse information from.
-        """
-        if path.suffix in {".dist-info", ".egg-info"}:
-            directories = [path]
-        else:
-            directories = cls._find_dist_info(path=path)
-
-        for directory in directories:
-            try:
-                if directory.suffix == ".egg-info":
-                    dist = pkginfo.UnpackedSDist(directory.as_posix())
-                elif directory.suffix == ".dist-info":
-                    dist = pkginfo.Wheel(directory.as_posix())
-                else:
-                    continue
-                break
-            except ValueError:
-                continue
-        else:
-            try:
-                # handle PKG-INFO in unpacked sdist root
-                dist = pkginfo.UnpackedSDist(path.as_posix())
-            except ValueError:
-                return
-
-        info = cls._from_distribution(dist=dist)
-        if info:
-            return info
-
-    @classmethod
-    def from_package(cls, package):  # type: (Package) -> PackageInfo
-        """
-        Helper method to inspect a `Package` object, in order to generate package info.
-
-        :param package: This must be a poetry package instance.
-        """
-        requires = {dependency.to_pep_508() for dependency in package.requires}
-
-        for extra_requires in package.extras.values():
-            for dependency in extra_requires:
-                requires.add(dependency.to_pep_508())
-
-        return cls(
-            name=package.name,
-            version=str(package.version),
-            summary=package.description,
-            platform=package.platform,
-            requires_dist=list(requires),
-            requires_python=package.python_versions,
-            files=package.files,
-        )
-
-    @staticmethod
-    def _get_poetry_package(path):  # type: (Path) -> Optional[ProjectPackage]
-        # Note: we ignore any setup.py file at this step
-        # TODO: add support for handling non-poetry PEP-517 builds
-        if PyProjectTOML(path.joinpath("pyproject.toml")).is_poetry_project():
-            try:
-                return Factory().create_poetry(path).package
-            except RuntimeError:
-                return None
-
-        return None
-
-    @classmethod
-    def _pep517_metadata(cls, path):  # type (Path) -> PackageInfo
-        """
-        Helper method to use PEP-517 library to build and read package metadata.
-
-        :param path: Path to package source to build and read metadata for.
-        """
-        info = None
-        try:
-            info = cls.from_setup_files(path)
-            if all([info.version, info.name, info.requires_dist]):
-                return info
-        except PackageInfoError:
-            pass
-
-        with temporary_directory() as tmp_dir:
-            # TODO: cache PEP 517 build environment corresponding to each project venv
-            venv_dir = Path(tmp_dir) / ".venv"
-            EnvManager.build_venv(venv_dir.as_posix())
-            venv = VirtualEnv(venv_dir, venv_dir)
-
-            dest_dir = Path(tmp_dir) / "dist"
-            dest_dir.mkdir()
-
-            try:
-                venv.run_python(
-                    "-m",
-                    "pip",
-                    "install",
-                    "--disable-pip-version-check",
-                    "--ignore-installed",
-                    *PEP517_META_BUILD_DEPS
-                )
-                venv.run_python(
-                    "-",
-                    input_=PEP517_META_BUILD.format(
-                        source=path.as_posix(), dest=dest_dir.as_posix()
-                    ),
-                )
-                return cls.from_metadata(dest_dir)
-            except EnvCommandError as e:
-                # something went wrong while attempting pep517 metadata build
-                # fallback to egg_info if setup.py available
-                cls._log("PEP517 build failed: {}".format(e), level="debug")
-                setup_py = path / "setup.py"
-                if not setup_py.exists():
-                    raise PackageInfoError(
-                        path,
-                        e,
-                        "No fallback setup.py file was found to generate egg_info.",
-                    )
-
-                cwd = Path.cwd()
-                os.chdir(path.as_posix())
-                try:
-                    venv.run_python("setup.py", "egg_info")
-                    return cls.from_metadata(path)
-                except EnvCommandError as fbe:
-                    raise PackageInfoError(
-                        path, "Fallback egg_info generation failed.", fbe
-                    )
-                finally:
-                    os.chdir(cwd.as_posix())
-
-        if info:
-            cls._log(
-                "Falling back to parsed setup.py file for {}".format(path), "debug"
-            )
-            return info
-
-        # if we reach here, everything has failed and all hope is lost
-        raise PackageInfoError(path, "Exhausted all core metadata sources.")
-
-    @classmethod
-    def from_directory(
-        cls, path, disable_build=False
-    ):  # type: (Path, bool) -> PackageInfo
-        """
-        Generate package information from a package source directory. If `disable_build` is not `True` and
-        introspection of all available metadata fails, the package is attempted to be build in an isolated
-        environment so as to generate required metadata.
-
-        :param path: Path to generate package information from.
-        :param disable_build: If not `True` and setup reader fails, PEP 517 isolated build is attempted in
-            order to gather metadata.
-        """
-        project_package = cls._get_poetry_package(path)
-        if project_package:
-            info = cls.from_package(project_package)
-        else:
-            info = cls.from_metadata(path)
-
-            if not info or info.requires_dist is None:
-                try:
-                    if disable_build:
-                        info = cls.from_setup_files(path)
-                    else:
-                        info = cls._pep517_metadata(path)
-                except PackageInfoError:
-                    if not info:
-                        raise
-
-                    # we discovered PkgInfo but no requirements were listed
-
-        info._source_type = "directory"
-        info._source_url = path.as_posix()
-
-        return info
-
-    @classmethod
-    def from_sdist(cls, path):  # type: (Path) -> PackageInfo
-        """
-        Gather package information from an sdist file, packed or unpacked.
-
-        :param path: Path to an sdist file or unpacked directory.
-        """
-        if path.is_file():
-            return cls._from_sdist_file(path=path)
-
-        # if we get here then it is neither an sdist instance nor a file
-        # so, we assume this is an directory
-        return cls.from_directory(path=path)
-
-    @classmethod
-    def from_wheel(cls, path):  # type: (Path) -> PackageInfo
-        """
-        Gather package information from a wheel.
-
-        :param path: Path to wheel.
-        """
-        try:
-            return cls._from_distribution(pkginfo.Wheel(str(path)))
-        except ValueError:
-            return PackageInfo()
-
-    @classmethod
-    def from_bdist(cls, path):  # type: (Path) -> PackageInfo
-        """
-        Gather package information from a bdist (wheel etc.).
-
-        :param path: Path to bdist.
-        """
-        if isinstance(path, (pkginfo.BDist, pkginfo.Wheel)):
-            cls._from_distribution(dist=path)
-
-        if path.suffix == ".whl":
-            return cls.from_wheel(path=path)
-
-        try:
-            return cls._from_distribution(pkginfo.BDist(str(path)))
-        except ValueError as e:
-            raise PackageInfoError(path, e)
-
-    @classmethod
-    def from_path(cls, path):  # type: (Path) -> PackageInfo
-        """
-        Gather package information from a given path (bdist, sdist, directory).
-
-        :param path: Path to inspect.
-        """
-        try:
-            return cls.from_bdist(path=path)
-        except PackageInfoError:
-            return cls.from_sdist(path=path)
diff --git a/vendor/poetry/poetry/installation/__init__.py b/vendor/poetry/poetry/installation/__init__.py
deleted file mode 100644
index 385d7b8c..00000000
--- a/vendor/poetry/poetry/installation/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .installer import Installer
diff --git a/vendor/poetry/poetry/installation/authenticator.py b/vendor/poetry/poetry/installation/authenticator.py
deleted file mode 100644
index 038f14e7..00000000
--- a/vendor/poetry/poetry/installation/authenticator.py
+++ /dev/null
@@ -1,165 +0,0 @@
-import logging
-import time
-
-from typing import TYPE_CHECKING
-
-import requests
-import requests.auth
-import requests.exceptions
-
-from poetry.exceptions import PoetryException
-from poetry.utils._compat import urlparse
-from poetry.utils.password_manager import PasswordManager
-
-
-if TYPE_CHECKING:
-    from typing import Any
-    from typing import Optional
-    from typing import Tuple
-
-    from clikit.api.io import IO
-
-    from poetry.config.config import Config
-
-
-logger = logging.getLogger()
-
-
-class Authenticator(object):
-    def __init__(self, config, io=None):  # type: (Config, Optional[IO]) -> None
-        self._config = config
-        self._io = io
-        self._credentials = {}
-        self._password_manager = PasswordManager(self._config)
-
-    def _log(self, message, level="debug"):  # type: (str, str) -> None
-        if self._io is not None:
-            self._io.write_line(
-                "<{level:s}>{message:s}".format(
-                    message=message, level=level
-                )
-            )
-        else:
-            getattr(logger, level, logger.debug)(message)
-
-    @property
-    def session(self):  # type: () -> requests.Session
-        return requests.Session()
-
-    def request(
-        self, method, url, **kwargs
-    ):  # type: (str, str, Any) -> requests.Response
-        request = requests.Request(method, url)
-        username, password = self.get_credentials_for_url(url)
-
-        if username is not None and password is not None:
-            request = requests.auth.HTTPBasicAuth(username, password)(request)
-
-        session = self.session
-        prepared_request = session.prepare_request(request)
-
-        proxies = kwargs.get("proxies", {})
-        stream = kwargs.get("stream")
-        verify = kwargs.get("verify")
-        cert = kwargs.get("cert")
-
-        settings = session.merge_environment_settings(
-            prepared_request.url, proxies, stream, verify, cert
-        )
-
-        # Send the request.
-        send_kwargs = {
-            "timeout": kwargs.get("timeout"),
-            "allow_redirects": kwargs.get("allow_redirects", True),
-        }
-        send_kwargs.update(settings)
-
-        attempt = 0
-
-        while True:
-            is_last_attempt = attempt >= 5
-            try:
-                resp = session.send(prepared_request, **send_kwargs)
-            except (requests.exceptions.ConnectionError, OSError) as e:
-                if is_last_attempt:
-                    raise e
-            else:
-                if resp.status_code not in [502, 503, 504] or is_last_attempt:
-                    resp.raise_for_status()
-                    return resp
-
-            if not is_last_attempt:
-                attempt += 1
-                delay = 0.5 * attempt
-                self._log(
-                    "Retrying HTTP request in {} seconds.".format(delay), level="debug"
-                )
-                time.sleep(delay)
-                continue
-
-        # this should never really be hit under any sane circumstance
-        raise PoetryException("Failed HTTP {} request", method.upper())
-
-    def get_credentials_for_url(
-        self, url
-    ):  # type: (str) -> Tuple[Optional[str], Optional[str]]
-        parsed_url = urlparse.urlsplit(url)
-
-        netloc = parsed_url.netloc
-
-        credentials = self._credentials.get(netloc, (None, None))
-
-        if credentials == (None, None):
-            if "@" not in netloc:
-                credentials = self._get_credentials_for_netloc_from_config(netloc)
-            else:
-                # Split from the right because that's how urllib.parse.urlsplit()
-                # behaves if more than one @ is present (which can be checked using
-                # the password attribute of urlsplit()'s return value).
-                auth, netloc = netloc.rsplit("@", 1)
-                if ":" in auth:
-                    # Split from the left because that's how urllib.parse.urlsplit()
-                    # behaves if more than one : is present (which again can be checked
-                    # using the password attribute of the return value)
-                    credentials = auth.split(":", 1)
-                else:
-                    credentials = auth, None
-
-                credentials = tuple(
-                    None if x is None else urlparse.unquote(x) for x in credentials
-                )
-
-        if credentials[0] is not None or credentials[1] is not None:
-            credentials = (credentials[0] or "", credentials[1] or "")
-
-            self._credentials[netloc] = credentials
-
-        return credentials[0], credentials[1]
-
-    def _get_credentials_for_netloc_from_config(
-        self, netloc
-    ):  # type: (str) -> Tuple[Optional[str], Optional[str]]
-        credentials = (None, None)
-
-        for repository_name in self._config.get("repositories", []):
-            repository_config = self._config.get(
-                "repositories.{}".format(repository_name)
-            )
-            if not repository_config:
-                continue
-
-            url = repository_config.get("url")
-            if not url:
-                continue
-
-            parsed_url = urlparse.urlsplit(url)
-
-            if netloc == parsed_url.netloc:
-                auth = self._password_manager.get_http_auth(repository_name)
-
-                if auth is None:
-                    continue
-
-                return auth["username"], auth["password"]
-
-        return credentials
diff --git a/vendor/poetry/poetry/installation/base_installer.py b/vendor/poetry/poetry/installation/base_installer.py
deleted file mode 100644
index 1e068d07..00000000
--- a/vendor/poetry/poetry/installation/base_installer.py
+++ /dev/null
@@ -1,9 +0,0 @@
-class BaseInstaller:
-    def install(self, package):
-        raise NotImplementedError
-
-    def update(self, source, target):
-        raise NotImplementedError
-
-    def remove(self, package):
-        raise NotImplementedError
diff --git a/vendor/poetry/poetry/installation/chef.py b/vendor/poetry/poetry/installation/chef.py
deleted file mode 100644
index 669ce177..00000000
--- a/vendor/poetry/poetry/installation/chef.py
+++ /dev/null
@@ -1,110 +0,0 @@
-import hashlib
-import json
-
-from typing import TYPE_CHECKING
-
-from poetry.core.packages.utils.link import Link
-from poetry.utils._compat import Path
-
-from .chooser import InvalidWheelName
-from .chooser import Wheel
-
-
-if TYPE_CHECKING:
-    from typing import List
-    from typing import Optional
-
-    from poetry.config.config import Config
-    from poetry.utils.env import Env
-
-
-class Chef:
-    def __init__(self, config, env):  # type: (Config, Env) -> None
-        self._config = config
-        self._env = env
-        self._cache_dir = (
-            Path(config.get("cache-dir")).expanduser().joinpath("artifacts")
-        )
-
-    def prepare(self, archive):  # type: (Path) -> Path
-        return archive
-
-    def prepare_sdist(self, archive):  # type: (Path) -> Path
-        return archive
-
-    def prepare_wheel(self, archive):  # type: (Path) -> Path
-        return archive
-
-    def should_prepare(self, archive):  # type: (Path) -> bool
-        return not self.is_wheel(archive)
-
-    def is_wheel(self, archive):  # type: (Path) -> bool
-        return archive.suffix == ".whl"
-
-    def get_cached_archive_for_link(self, link):  # type: (Link) -> Optional[Link]
-        # If the archive is already a wheel, there is no need to cache it.
-        if link.is_wheel:
-            pass
-
-        archives = self.get_cached_archives_for_link(link)
-
-        if not archives:
-            return link
-
-        candidates = []
-        for archive in archives:
-            if not archive.is_wheel:
-                candidates.append((float("inf"), archive))
-                continue
-
-            try:
-                wheel = Wheel(archive.filename)
-            except InvalidWheelName:
-                continue
-
-            if not wheel.is_supported_by_environment(self._env):
-                continue
-
-            candidates.append(
-                (wheel.get_minimum_supported_index(self._env.supported_tags), archive),
-            )
-
-        if not candidates:
-            return link
-
-        return min(candidates)[1]
-
-    def get_cached_archives_for_link(self, link):  # type: (Link) -> List[Link]
-        cache_dir = self.get_cache_directory_for_link(link)
-
-        archive_types = ["whl", "tar.gz", "tar.bz2", "bz2", "zip"]
-        links = []
-        for archive_type in archive_types:
-            for archive in cache_dir.glob("*.{}".format(archive_type)):
-                links.append(Link(archive.as_uri()))
-
-        return links
-
-    def get_cache_directory_for_link(self, link):  # type: (Link) -> Path
-        key_parts = {"url": link.url_without_fragment}
-
-        if link.hash_name is not None and link.hash is not None:
-            key_parts[link.hash_name] = link.hash
-
-        if link.subdirectory_fragment:
-            key_parts["subdirectory"] = link.subdirectory_fragment
-
-        key_parts["interpreter_name"] = self._env.marker_env["interpreter_name"]
-        key_parts["interpreter_version"] = "".join(
-            self._env.marker_env["interpreter_version"].split(".")[:2]
-        )
-
-        key = hashlib.sha256(
-            json.dumps(
-                key_parts, sort_keys=True, separators=(",", ":"), ensure_ascii=True
-            ).encode("ascii")
-        ).hexdigest()
-
-        split_key = [key[:2], key[2:4], key[4:6], key[6:]]
-
-        return self._cache_dir.joinpath(*split_key)
diff --git a/vendor/poetry/poetry/installation/chooser.py b/vendor/poetry/poetry/installation/chooser.py
deleted file mode 100644
index 0e97d0ea..00000000
--- a/vendor/poetry/poetry/installation/chooser.py
+++ /dev/null
@@ -1,187 +0,0 @@
-import re
-
-from typing import List
-from typing import Tuple
-
-from packaging.tags import Tag
-
-from poetry.core.packages.package import Package
-from poetry.core.packages.utils.link import Link
-from poetry.repositories.pool import Pool
-from poetry.utils.env import Env
-from poetry.utils.patterns import wheel_file_re
-
-
-class InvalidWheelName(Exception):
-    pass
-
-
-class Wheel(object):
-    def __init__(self, filename):  # type: (str) -> None
-        wheel_info = wheel_file_re.match(filename)
-        if not wheel_info:
-            raise InvalidWheelName("{} is not a valid wheel filename.".format(filename))
-
-        self.filename = filename
-        self.name = wheel_info.group("name").replace("_", "-")
-        self.version = wheel_info.group("ver").replace("_", "-")
-        self.build_tag = wheel_info.group("build")
-        self.pyversions = wheel_info.group("pyver").split(".")
-        self.abis = wheel_info.group("abi").split(".")
-        self.plats = wheel_info.group("plat").split(".")
-
-        self.tags = {
-            Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
-        }
-
-    def get_minimum_supported_index(self, tags):
-        indexes = [tags.index(t) for t in self.tags if t in tags]
-
-        return min(indexes) if indexes else None
-
-    def is_supported_by_environment(self, env):
-        return bool(set(env.supported_tags).intersection(self.tags))
-
-
-class Chooser:
-    """
-    A Chooser chooses an appropriate release archive for packages.
-    """
-
-    def __init__(self, pool, env):  # type: (Pool, Env) -> None
-        self._pool = pool
-        self._env = env
-
-    def choose_for(self, package):  # type: (Package) -> Link
-        """
-        Return the url of the selected archive for a given package.
-        """
-        links = []
-        for link in self._get_links(package):
-            if link.is_wheel and not Wheel(link.filename).is_supported_by_environment(
-                self._env
-            ):
-                continue
-
-            if link.ext in {".egg", ".exe", ".msi", ".rpm", ".srpm"}:
-                continue
-
-            links.append(link)
-
-        if not links:
-            raise RuntimeError(
-                "Unable to find installation candidates for {}".format(package)
-            )
-
-        # Get the best link
-        chosen = max(links, key=lambda link: self._sort_key(package, link))
-        if not chosen:
-            raise RuntimeError(
-                "Unable to find installation candidates for {}".format(package)
-            )
-
-        return chosen
-
-    def _get_links(self, package):  # type: (Package) -> List[Link]
-        if not package.source_type:
-            if not self._pool.has_repository("pypi"):
-                repository = self._pool.repositories[0]
-            else:
-                repository = self._pool.repository("pypi")
-        else:
-            repository = self._pool.repository(package.source_reference)
-
-        links = repository.find_links_for_package(package)
-
-        hashes = [f["hash"] for f in package.files]
-        if not hashes:
-            return links
-
-        selected_links = []
-        for link in links:
-            if not link.hash:
-                selected_links.append(link)
-                continue
-
-            h = link.hash_name + ":" + link.hash
-            if h not in hashes:
-                continue
-
-            selected_links.append(link)
-
-        if links and not selected_links:
-            raise RuntimeError(
-                "Retrieved digest for link {}({}) not in poetry.lock metadata {}".format(
-                    link.filename, h, hashes
-                )
-            )
-
-        return selected_links
-
-    def _sort_key(self, package, link):  # type: (Package, Link) -> Tuple
-        """
-        Function to pass as the `key` argument to a call to sorted() to sort
-        InstallationCandidates by preference.
-        Returns a tuple such that tuples sorting as greater using Python's
-        default comparison operator are more preferred.
-        The preference is as follows:
-        First and foremost, candidates with allowed (matching) hashes are
-        always preferred over candidates without matching hashes. This is
-        because e.g. if the only candidate with an allowed hash is yanked,
-        we still want to use that candidate.
-        Second, excepting hash considerations, candidates that have been
-        yanked (in the sense of PEP 592) are always less preferred than
-        candidates that haven't been yanked. Then:
-        If not finding wheels, they are sorted by version only.
-        If finding wheels, then the sort order is by version, then:
-          1. existing installs
-          2. wheels ordered via Wheel.support_index_min(self._supported_tags)
-          3. source archives
-        If prefer_binary was set, then all wheels are sorted above sources.
-        Note: it was considered to embed this logic into the Link
-              comparison operators, but then different sdist links
-              with the same version, would have to be considered equal
-        """
-        support_num = len(self._env.supported_tags)
-        build_tag = ()
-        binary_preference = 0
-        if link.is_wheel:
-            wheel = Wheel(link.filename)
-            if not wheel.is_supported_by_environment(self._env):
-                raise RuntimeError(
-                    "{} is not a supported wheel for this platform. It "
-                    "can't be sorted.".format(wheel.filename)
-                )
-
-            # TODO: Binary preference
-            pri = -(wheel.get_minimum_supported_index(self._env.supported_tags))
-            if wheel.build_tag is not None:
-                match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
-                build_tag_groups = match.groups()
-                build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
-        else:  # sdist
-            pri = -support_num
-
-        has_allowed_hash = int(self._is_link_hash_allowed_for_package(link, package))
-
-        # TODO: Proper yank value
-        yank_value = 0
-
-        return (
-            has_allowed_hash,
-            yank_value,
-            binary_preference,
-            package.version,
-            build_tag,
-            pri,
-        )
-
-    def _is_link_hash_allowed_for_package(
-        self, link, package
-    ):  # type: (Link, Package) -> bool
-        if not link.hash:
-            return True
-
-        h = link.hash_name + ":" + link.hash
-
-        return h in {f["hash"] for f in package.files}
diff --git a/vendor/poetry/poetry/installation/executor.py b/vendor/poetry/poetry/installation/executor.py
deleted file mode 100644
index cf4856ec..00000000
--- a/vendor/poetry/poetry/installation/executor.py
+++ /dev/null
@@ -1,694 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import division
-
-import itertools
-import os
-import threading
-
-from concurrent.futures import ThreadPoolExecutor
-from concurrent.futures import wait
-from subprocess import CalledProcessError
-
-from poetry.core.packages.file_dependency import FileDependency
-from poetry.core.packages.utils.link import Link
-from poetry.core.packages.utils.utils import url_to_path
-from poetry.core.pyproject.toml import PyProjectTOML
-from poetry.io.null_io import NullIO
-from poetry.utils._compat import PY2
-from poetry.utils._compat import WINDOWS
-from poetry.utils._compat import OrderedDict
-from poetry.utils._compat import Path
-from poetry.utils._compat import cpu_count
-from poetry.utils._compat import decode
-from poetry.utils.env import EnvCommandError
-from poetry.utils.helpers import safe_rmtree
-
-from .authenticator import Authenticator
-from .chef import Chef
-from .chooser import Chooser
-from .operations.install import Install
-from .operations.operation import Operation
-from .operations.uninstall import Uninstall
-from .operations.update import Update
-
-
-class Executor(object):
-    def __init__(self, env, pool, config, io, parallel=None):
-        self._env = env
-        self._io = io
-        self._dry_run = False
-        self._enabled = True
-        self._verbose = False
-        self._authenticator = Authenticator(config, self._io)
-        self._chef = Chef(config, self._env)
-        self._chooser = Chooser(pool, self._env)
-
-        if parallel is None:
-            parallel = config.get("installer.parallel", True)
-
-        if parallel and not (PY2 and WINDOWS):
-            # This should be directly handled by ThreadPoolExecutor
-            # however, on some systems the number of CPUs cannot be determined
-            # (it raises a NotImplementedError), so, in this case, we assume
-            # that the system only has one CPU.
-            try:
-                self._max_workers = cpu_count() + 4
-            except NotImplementedError:
-                self._max_workers = 5
-        else:
-            self._max_workers = 1
-
-        self._executor = ThreadPoolExecutor(max_workers=self._max_workers)
-        self._total_operations = 0
-        self._executed_operations = 0
-        self._executed = {"install": 0, "update": 0, "uninstall": 0}
-        self._skipped = {"install": 0, "update": 0, "uninstall": 0}
-        self._sections = OrderedDict()
-        self._lock = threading.Lock()
-        self._shutdown = False
-
-    @property
-    def installations_count(self):  # type: () -> int
-        return self._executed["install"]
-
-    @property
-    def updates_count(self):  # type: () -> int
-        return self._executed["update"]
-
-    @property
-    def removals_count(self):  # type: () -> int
-        return self._executed["uninstall"]
-
-    def supports_fancy_output(self):  # type: () -> bool
-        return self._io.supports_ansi() and not self._dry_run
-
-    def disable(self):
-        self._enabled = False
-
-        return self
-
-    def dry_run(self, dry_run=True):
-        self._dry_run = dry_run
-
-        return self
-
-    def verbose(self, verbose=True):
-        self._verbose = verbose
-
-        return self
-
-    def execute(self, operations):  # type: (Operation) -> int
-        self._total_operations = len(operations)
-        for job_type in self._executed:
-            self._executed[job_type] = 0
-            self._skipped[job_type] = 0
-
-        if operations and (self._enabled or self._dry_run):
-            self._display_summary(operations)
-
-        # We group operations by priority
-        groups = itertools.groupby(operations, key=lambda o: -o.priority)
-        self._sections = OrderedDict()
-        for _, group in groups:
-            tasks = []
-            serial_operations = []
-            for operation in group:
-                if self._shutdown:
-                    break
-
-                # Some operations are unsafe, we mus execute them serially in a group
-                # https://github.com/python-poetry/poetry/issues/3086
-                # https://github.com/python-poetry/poetry/issues/2658
-                #
-                # We need to explicitly check source type here, see:
-                # https://github.com/python-poetry/poetry-core/pull/98
-                is_parallel_unsafe = operation.job_type == "uninstall" or (
-                    operation.package.develop
-                    and operation.package.source_type in {"directory", "git"}
-                )
-                if not operation.skipped and is_parallel_unsafe:
-                    serial_operations.append(operation)
-                    continue
-
-                tasks.append(self._executor.submit(self._execute_operation, operation))
-
-            try:
-                wait(tasks)
-
-                for operation in serial_operations:
-                    wait([self._executor.submit(self._execute_operation, operation)])
-
-            except KeyboardInterrupt:
-                self._shutdown = True
-
-            if self._shutdown:
-                # Cancelling further tasks from being executed
-                [task.cancel() for task in tasks]
-                self._executor.shutdown(wait=True)
-
-                break
-
-        return 1 if self._shutdown else 0
-
-    def _write(self, operation, line):
-        if not self.supports_fancy_output() or not self._should_write_operation(
-            operation
-        ):
-            return
-
-        if self._io.is_debug():
-            with self._lock:
-                section = self._sections[id(operation)]
-                section.write_line(line)
-
-            return
-
-        with self._lock:
-            section = self._sections[id(operation)]
-            section.output.clear()
-            section.write(line)
-
-    def _execute_operation(self, operation):
-        try:
-            if self.supports_fancy_output():
-                if id(operation) not in self._sections:
-                    if self._should_write_operation(operation):
-                        with self._lock:
-                            self._sections[id(operation)] = self._io.section()
-                            self._sections[id(operation)].write_line(
-                                "  • {message}: Pending...".format(
-                                    message=self.get_operation_message(operation),
-                                ),
-                            )
-            else:
-                if self._should_write_operation(operation):
-                    if not operation.skipped:
-                        self._io.write_line(
-                            "  • {message}".format(
-                                message=self.get_operation_message(operation),
-                            ),
-                        )
-                    else:
-                        self._io.write_line(
-                            "  • {message}: "
-                            "Skipped "
-                            "for the following reason: "
-                            "{reason}".format(
-                                message=self.get_operation_message(operation),
-                                reason=operation.skip_reason,
-                            )
-                        )
-
-            try:
-                result = self._do_execute_operation(operation)
-            except EnvCommandError as e:
-                if e.e.returncode == -2:
-                    result = -2
-                else:
-                    raise
-
-            # If we have a result of -2 it means a KeyboardInterrupt
-            # in the any python subprocess, so we raise a KeyboardInterrupt
-            # error to be picked up by the error handler.
-            if result == -2:
-                raise KeyboardInterrupt
-        except Exception as e:
-            try:
-                from clikit.ui.components.exception_trace import ExceptionTrace
-
-                if not self.supports_fancy_output():
-                    io = self._io
-                else:
-                    message = "  • {message}: Failed".format(
-                        message=self.get_operation_message(operation, error=True),
-                    )
-                    self._write(operation, message)
-                    io = self._sections.get(id(operation), self._io)
-
-                with self._lock:
-                    trace = ExceptionTrace(e)
-                    trace.render(io)
-                    io.write_line("")
-            finally:
-                with self._lock:
-                    self._shutdown = True
-        except KeyboardInterrupt:
-            try:
-                message = "  • {message}: Cancelled".format(
-                    message=self.get_operation_message(operation, warning=True),
-                )
-                if not self.supports_fancy_output():
-                    self._io.write_line(message)
-                else:
-                    self._write(operation, message)
-            finally:
-                with self._lock:
-                    self._shutdown = True
-
-    def _do_execute_operation(self, operation):
-        method = operation.job_type
-
-        operation_message = self.get_operation_message(operation)
-        if operation.skipped:
-            if self.supports_fancy_output():
-                self._write(
-                    operation,
-                    "  • {message}: "
-                    "Skipped "
-                    "for the following reason: "
-                    "{reason}".format(
-                        message=operation_message, reason=operation.skip_reason,
-                    ),
-                )
-
-            self._skipped[operation.job_type] += 1
-
-            return 0
-
-        if not self._enabled or self._dry_run:
-            self._io.write_line(
-                "  • {message}".format(
-                    message=operation_message,
-                )
-            )
-
-            return 0
-
-        result = getattr(self, "_execute_{}".format(method))(operation)
-
-        if result != 0:
-            return result
-
-        message = "  • {message}".format(
-            message=self.get_operation_message(operation, done=True),
-        )
-        self._write(operation, message)
-
-        self._increment_operations_count(operation, True)
-
-        return result
-
-    def _increment_operations_count(self, operation, executed):
-        with self._lock:
-            if executed:
-                self._executed_operations += 1
-                self._executed[operation.job_type] += 1
-            else:
-                self._skipped[operation.job_type] += 1
-
-    def run_pip(self, *args, **kwargs):  # type: (...) -> int
-        try:
-            self._env.run_pip(*args, **kwargs)
-        except EnvCommandError as e:
-            output = decode(e.e.output)
-            if (
-                "KeyboardInterrupt" in output
-                or "ERROR: Operation cancelled by user" in output
-            ):
-                return -2
-
-            raise
-
-        return 0
-
-    def get_operation_message(self, operation, done=False, error=False, warning=False):
-        base_tag = "fg=default"
-        operation_color = "c2"
-        source_operation_color = "c2"
-        package_color = "c1"
-
-        if error:
-            operation_color = "error"
-        elif warning:
-            operation_color = "warning"
-        elif done:
-            operation_color = "success"
-
-        if operation.skipped:
-            base_tag = "fg=default;options=dark"
-            operation_color += "_dark"
-            source_operation_color += "_dark"
-            package_color += "_dark"
-
-        if operation.job_type == "install":
-            return "<{}>Installing <{}>{} (<{}>{})".format(
-                base_tag,
-                package_color,
-                operation.package.name,
-                package_color,
-                operation_color,
-                operation.package.full_pretty_version,
-            )
-
-        if operation.job_type == "uninstall":
-            return "<{}>Removing <{}>{} (<{}>{})".format(
-                base_tag,
-                package_color,
-                operation.package.name,
-                package_color,
-                operation_color,
-                operation.package.full_pretty_version,
-            )
-
-        if operation.job_type == "update":
-            return "<{}>Updating <{}>{} (<{}>{} -> <{}>{})".format(
-                base_tag,
-                package_color,
-                operation.initial_package.name,
-                package_color,
-                source_operation_color,
-                operation.initial_package.full_pretty_version,
-                source_operation_color,
-                operation_color,
-                operation.target_package.full_pretty_version,
-            )
-
-        return ""
-
-    def _display_summary(self, operations):
-        installs = 0
-        updates = 0
-        uninstalls = 0
-        skipped = 0
-        for op in operations:
-            if op.skipped:
-                skipped += 1
-                continue
-
-            if op.job_type == "install":
-                installs += 1
-            elif op.job_type == "update":
-                updates += 1
-            elif op.job_type == "uninstall":
-                uninstalls += 1
-
-        if not installs and not updates and not uninstalls and not self._verbose:
-            self._io.write_line("")
-            self._io.write_line("No dependencies to install or update")
-
-            return
-
-        self._io.write_line("")
-        self._io.write_line(
-            "Package operations: "
-            "{} install{}, "
-            "{} update{}, "
-            "{} removal{}"
-            "{}".format(
-                installs,
-                "" if installs == 1 else "s",
-                updates,
-                "" if updates == 1 else "s",
-                uninstalls,
-                "" if uninstalls == 1 else "s",
-                ", {} skipped".format(skipped)
-                if skipped and self._verbose
-                else "",
-            )
-        )
-        self._io.write_line("")
-
-    def _execute_install(self, operation):  # type: (Install) -> None
-        return self._install(operation)
-
-    def _execute_update(self, operation):  # type: (Update) -> None
-        return self._update(operation)
-
-    def _execute_uninstall(self, operation):  # type: (Uninstall) -> None
-        message = "  • {message}: Removing...".format(
-            message=self.get_operation_message(operation),
-        )
-        self._write(operation, message)
-
-        return self._remove(operation)
-
-    def _install(self, operation):
-        package = operation.package
-        if package.source_type == "directory":
-            return self._install_directory(operation)
-
-        if package.source_type == "git":
-            return self._install_git(operation)
-
-        if package.source_type == "file":
-            archive = self._prepare_file(operation)
-        elif package.source_type == "url":
-            archive = self._download_link(operation, Link(package.source_url))
-        else:
-            archive = self._download(operation)
-
-        operation_message = self.get_operation_message(operation)
-        message = "  • {message}: Installing...".format(
-            message=operation_message,
-        )
-        self._write(operation, message)
-
-        args = ["install", "--no-deps", str(archive)]
-        if operation.job_type == "update":
-            args.insert(2, "-U")
-
-        return self.run_pip(*args)
-
-    def _update(self, operation):
-        return self._install(operation)
-
-    def _remove(self, operation):
-        package = operation.package
-
-        # If we have a VCS package, remove its source directory
-        if package.source_type == "git":
-            src_dir = self._env.path / "src" / package.name
-            if src_dir.exists():
-                safe_rmtree(str(src_dir))
-
-        try:
-            return self.run_pip("uninstall", package.name, "-y")
-        except CalledProcessError as e:
-            if "not installed" in str(e):
-                return 0
-
-            raise
-
-    def _prepare_file(self, operation):
-        package = operation.package
-
-        message = "  • {message}: Preparing...".format(
-            message=self.get_operation_message(operation),
-        )
-        self._write(operation, message)
-
-        archive = Path(package.source_url)
-        if not Path(package.source_url).is_absolute() and package.root_dir:
-            archive = package.root_dir / archive
-
-        archive = self._chef.prepare(archive)
-
-        return archive
-
-    def _install_directory(self, operation):
-        from poetry.factory import Factory
-
-        package = operation.package
-        operation_message = self.get_operation_message(operation)
-
-        message = "  • {message}: Building...".format(
-            message=operation_message,
-        )
-        self._write(operation, message)
-
-        if package.root_dir:
-            req = os.path.join(str(package.root_dir), package.source_url)
-        else:
-            req = os.path.realpath(package.source_url)
-
-        args = ["install", "--no-deps", "-U"]
-
-        pyproject = PyProjectTOML(os.path.join(req, "pyproject.toml"))
-
-        if pyproject.is_poetry_project():
-            # Even if there is a build system specified
-            # some versions of pip (< 19.0.0) don't understand it
-            # so we need to check the version of pip to know
-            # if we can rely on the build system
-            legacy_pip = self._env.pip_version < self._env.pip_version.__class__(
-                19, 0, 0
-            )
-
-            try:
-                package_poetry = Factory().create_poetry(pyproject.file.path.parent)
-            except RuntimeError:
-                package_poetry = None
-
-            if package_poetry is not None:
-                if package.develop and not package_poetry.package.build_script:
-                    from poetry.masonry.builders.editable import EditableBuilder
-
-                    # This is a Poetry package in editable mode
-                    # we can use the EditableBuilder without going through pip
-                    # to install it, unless it has a build script.
-                    builder = EditableBuilder(package_poetry, self._env, NullIO())
-                    builder.build()
-
-                    return 0
-                elif legacy_pip or package_poetry.package.build_script:
-                    from poetry.core.masonry.builders.sdist import SdistBuilder
-
-                    # We need to rely on creating a temporary setup.py
-                    # file since the version of pip does not support
-                    # build-systems
-                    # We also need it for non-PEP-517 packages
-                    builder = SdistBuilder(package_poetry)
-
-                    with builder.setup_py():
-                        if package.develop:
-                            args.append("-e")
-
-                        args.append(req)
-
-                        return self.run_pip(*args)
-
-        if package.develop:
-            args.append("-e")
-
-        args.append(req)
-
-        return self.run_pip(*args)
-
-    def _install_git(self, operation):
-        from poetry.core.vcs import Git
-
-        package = operation.package
-        operation_message = self.get_operation_message(operation)
-
-        message = "  • {message}: Cloning...".format(
-            message=operation_message,
-        )
-        self._write(operation, message)
-
-        src_dir = self._env.path / "src" / package.name
-        if src_dir.exists():
-            safe_rmtree(str(src_dir))
-
-        src_dir.parent.mkdir(exist_ok=True)
-
-        git = Git()
-        git.clone(package.source_url, src_dir)
-
-        reference = package.source_resolved_reference
-        if not reference:
-            reference = package.source_reference
-
-        git.checkout(reference, src_dir)
-
-        # Now we just need to install from the source directory
-        package._source_url = str(src_dir)
-
-        return self._install_directory(operation)
-
-    def _download(self, operation):  # type: (Operation) -> Path
-        link = self._chooser.choose_for(operation.package)
-
-        return self._download_link(operation, link)
-
-    def _download_link(self, operation, link):
-        package = operation.package
-
-        archive = self._chef.get_cached_archive_for_link(link)
-        if archive is link:
-            # No cached distributions was found, so we download and prepare it
-            try:
-                archive = self._download_archive(operation, link)
-            except BaseException:
-                cache_directory = self._chef.get_cache_directory_for_link(link)
-                cached_file = cache_directory.joinpath(link.filename)
-                # We can't use unlink(missing_ok=True) because it's not available
-                # in pathlib2 for Python 2.7
-                if cached_file.exists():
-                    cached_file.unlink()
-
-                raise
-
-            # TODO: Check readability of the created archive
-
-            if not link.is_wheel:
-                archive = self._chef.prepare(archive)
-
-        if package.files:
-            hashes = {f["hash"] for f in package.files}
-            hash_types = {h.split(":")[0] for h in hashes}
-            archive_hashes = set()
-            archive_path = (
-                url_to_path(archive.url) if isinstance(archive, Link) else archive
-            )
-            for hash_type in hash_types:
-                archive_hashes.add(
-                    "{}:{}".format(
-                        hash_type,
-                        FileDependency(package.name, archive_path).hash(hash_type),
-                    )
-                )
-
-            if archive_hashes.isdisjoint(hashes):
-                raise RuntimeError(
-                    "Invalid hashes ({}) for {} using archive {}. Expected one of {}.".format(
-                        ", ".join(sorted(archive_hashes)),
-                        package,
-                        archive_path.name,
-                        ", ".join(sorted(hashes)),
-                    )
-                )
-
-        return archive
-
-    def _download_archive(self, operation, link):  # type: (Operation, Link) -> Path
-        response = self._authenticator.request(
-            "get", link.url, stream=True, io=self._sections.get(id(operation), self._io)
-        )
-        wheel_size = response.headers.get("content-length")
-        operation_message = self.get_operation_message(operation)
-        message = "  • {message}: Downloading...".format(
-            message=operation_message,
-        )
-        progress = None
-        if self.supports_fancy_output():
-            if wheel_size is None:
-                self._write(operation, message)
-            else:
-                from clikit.ui.components.progress_bar import ProgressBar
-
-                progress = ProgressBar(
-                    self._sections[id(operation)].output, max=int(wheel_size)
-                )
-                progress.set_format(message + " %percent%%")
-
-        if progress:
-            with self._lock:
-                progress.start()
-
-        done = 0
-        archive = self._chef.get_cache_directory_for_link(link) / link.filename
-        archive.parent.mkdir(parents=True, exist_ok=True)
-        with archive.open("wb") as f:
-            for chunk in response.iter_content(chunk_size=4096):
-                if not chunk:
-                    break
-
-                done += len(chunk)
-
-                if progress:
-                    with self._lock:
-                        progress.set_progress(done)
-
-                f.write(chunk)
-
-        if progress:
-            with self._lock:
-                progress.finish()
-
-        return archive
-
-    def _should_write_operation(self, operation):  # type: (Operation) -> bool
-        if not operation.skipped:
-            return True
-
-        return self._dry_run or self._verbose
diff --git a/vendor/poetry/poetry/installation/installer.py b/vendor/poetry/poetry/installation/installer.py
deleted file mode 100644
index 8cef65d5..00000000
--- a/vendor/poetry/poetry/installation/installer.py
+++ /dev/null
@@ -1,558 +0,0 @@
-from typing import List
-from typing import Optional
-from typing import Union
-
-from clikit.api.io import IO
-
-from poetry.config.config import Config
-from poetry.core.packages.project_package import ProjectPackage
-from poetry.io.null_io import NullIO
-from poetry.packages import Locker
-from poetry.repositories import Pool
-from poetry.repositories import Repository
-from poetry.repositories.installed_repository import InstalledRepository
-from poetry.utils.extras import get_extra_package_names
-from poetry.utils.helpers import canonicalize_name
-
-from .base_installer import BaseInstaller
-from .executor import Executor
-from .operations import Install
-from .operations import Uninstall
-from .operations import Update
-from .operations.operation import Operation
-from .pip_installer import PipInstaller
-
-
-class Installer:
-    def __init__(
-        self,
-        io,  # type: IO
-        env,
-        package,  # type: ProjectPackage
-        locker,  # type: Locker
-        pool,  # type: Pool
-        config,  # type: Config
-        installed=None,  # type: Union[InstalledRepository, None]
-        executor=None,  # type: Optional[Executor]
-    ):
-        self._io = io
-        self._env = env
-        self._package = package
-        self._locker = locker
-        self._pool = pool
-
-        self._dry_run = False
-        self._remove_untracked = False
-        self._update = False
-        self._verbose = False
-        self._write_lock = True
-        self._dev_mode = True
-        self._execute_operations = True
-        self._lock = False
-
-        self._whitelist = []
-
-        self._extras = []
-
-        if executor is None:
-            executor = Executor(self._env, self._pool, config, self._io)
-
-        self._executor = executor
-        self._use_executor = False
-
-        self._installer = self._get_installer()
-        if installed is None:
-            installed = self._get_installed()
-
-        self._installed_repository = installed
-
-    @property
-    def executor(self):
-        return self._executor
-
-    @property
-    def installer(self):
-        return self._installer
-
-    def set_package(self, package):  # type: (ProjectPackage) -> Installer
-        self._package = package
-
-        return self
-
-    def set_locker(self, locker):  # type: (Locker) -> Installer
-        self._locker = locker
-
-        return self
-
-    def run(self):
-        # Check if refresh
-        if not self._update and self._lock and self._locker.is_locked():
-            return self._do_refresh()
-
-        # Force update if there is no lock file present
-        if not self._update and not self._locker.is_locked():
-            self._update = True
-
-        if self.is_dry_run():
-            self.verbose(True)
-            self._write_lock = False
-            self._execute_operations = False
-
-        local_repo = Repository()
-
-        return self._do_install(local_repo)
-
-    def dry_run(self, dry_run=True):  # type: (bool) -> Installer
-        self._dry_run = dry_run
-        self._executor.dry_run(dry_run)
-
-        return self
-
-    def is_dry_run(self):  # type: () -> bool
-        return self._dry_run
-
-    def remove_untracked(self, remove_untracked=True):  # type: (bool) -> Installer
-        self._remove_untracked = remove_untracked
-
-        return self
-
-    def is_remove_untracked(self):  # type: () -> bool
-        return self._remove_untracked
-
-    def verbose(self, verbose=True):  # type: (bool) -> Installer
-        self._verbose = verbose
-        self._executor.verbose(verbose)
-
-        return self
-
-    def is_verbose(self):  # type: () -> bool
-        return self._verbose
-
-    def dev_mode(self, dev_mode=True):  # type: (bool) -> Installer
-        self._dev_mode = dev_mode
-
-        return self
-
-    def is_dev_mode(self):  # type: () -> bool
-        return self._dev_mode
-
-    def update(self, update=True):  # type: (bool) -> Installer
-        self._update = update
-
-        return self
-
-    def lock(self, update=True):  # type: (bool) -> Installer
-        """
-        Prepare the installer for locking only.
-        """
-        self.update(update=update)
-        self.execute_operations(False)
-        self._lock = True
-
-        return self
-
-    def is_updating(self):  # type: () -> bool
-        return self._update
-
-    def execute_operations(self, execute=True):  # type: (bool) -> Installer
-        self._execute_operations = execute
-
-        if not execute:
-            self._executor.disable()
-
-        return self
-
-    def whitelist(self, packages):  # type: (dict) -> Installer
-        self._whitelist = [canonicalize_name(p) for p in packages]
-
-        return self
-
-    def extras(self, extras):  # type: (list) -> Installer
-        self._extras = extras
-
-        return self
-
-    def use_executor(self, use_executor=True):  # type: (bool) -> Installer
-        self._use_executor = use_executor
-
-        return self
-
-    def _do_refresh(self):
-        from poetry.puzzle import Solver
-
-        # Checking extras
-        for extra in self._extras:
-            if extra not in self._package.extras:
-                raise ValueError("Extra [{}] is not specified.".format(extra))
-
-        locked_repository = self._locker.locked_repository(True)
-        solver = Solver(
-            self._package,
-            self._pool,
-            locked_repository,
-            locked_repository,
-            self._io,  # noqa
-        )
-
-        ops = solver.solve(use_latest=[])
-
-        local_repo = Repository()
-        self._populate_local_repo(local_repo, ops)
-
-        self._write_lock_file(local_repo, force=True)
-
-        return 0
-
-    def _do_install(self, local_repo):
-        from poetry.puzzle import Solver
-
-        locked_repository = Repository()
-        if self._update:
-            if self._locker.is_locked() and not self._lock:
-                locked_repository = self._locker.locked_repository(True)
-
-                # If no packages have been whitelisted (The ones we want to update),
-                # we whitelist every package in the lock file.
-                if not self._whitelist:
-                    for pkg in locked_repository.packages:
-                        self._whitelist.append(pkg.name)
-
-            # Checking extras
-            for extra in self._extras:
-                if extra not in self._package.extras:
-                    raise ValueError("Extra [{}] is not specified.".format(extra))
-
-            self._io.write_line("Updating dependencies")
-            solver = Solver(
-                self._package,
-                self._pool,
-                self._installed_repository,
-                locked_repository,
-                self._io,
-                remove_untracked=self._remove_untracked,
-            )
-
-            ops = solver.solve(use_latest=self._whitelist)
-        else:
-            self._io.write_line("Installing dependencies from lock file")
-
-            locked_repository = self._locker.locked_repository(True)
-
-            if not self._locker.is_fresh():
-                self._io.write_line(
-                    ""
-                    "Warning: The lock file is not up to date with "
-                    "the latest changes in pyproject.toml. "
-                    "You may be getting outdated dependencies. "
-                    "Run update to update them."
-                    ""
-                )
-
-            for extra in self._extras:
-                if extra not in self._locker.lock_data.get("extras", {}):
-                    raise ValueError("Extra [{}] is not specified.".format(extra))
-
-            # If we are installing from lock
-            # Filter the operations by comparing it with what is
-            # currently installed
-            ops = self._get_operations_from_lock(locked_repository)
-
-        self._populate_local_repo(local_repo, ops)
-
-        if self._update:
-            self._write_lock_file(local_repo)
-
-            if self._lock:
-                # If we are only in lock mode, no need to go any further
-                return 0
-
-        root = self._package
-        if not self.is_dev_mode():
-            root = root.clone()
-            del root.dev_requires[:]
-
-        if self._io.is_verbose():
-            self._io.write_line("")
-            self._io.write_line(
-                "Finding the necessary packages for the current system"
-            )
-
-        # We resolve again by only using the lock file
-        pool = Pool(ignore_repository_names=True)
-
-        # Making a new repo containing the packages
-        # newly resolved and the ones from the current lock file
-        repo = Repository()
-        for package in local_repo.packages + locked_repository.packages:
-            if not repo.has_package(package):
-                repo.add_package(package)
-
-        pool.add_repository(repo)
-
-        solver = Solver(
-            root,
-            pool,
-            self._installed_repository,
-            locked_repository,
-            NullIO(),
-            remove_untracked=self._remove_untracked,
-        )
-        # Everything is resolved at this point, so we no longer need
-        # to load deferred dependencies (i.e. VCS, URL and path dependencies)
-        solver.provider.load_deferred(False)
-
-        with solver.use_environment(self._env):
-            ops = solver.solve(use_latest=self._whitelist)
-
-        # We need to filter operations so that packages
-        # not compatible with the current system,
-        # or optional and not requested, are dropped
-        self._filter_operations(ops, local_repo)
-
-        # Execute operations
-        return self._execute(ops)
-
-    def _write_lock_file(self, repo, force=True):  # type: (Repository, bool) -> None
-        if force or (self._update and self._write_lock):
-            updated_lock = self._locker.set_lock_data(self._package, repo.packages)
-
-            if updated_lock:
-                self._io.write_line("")
-                self._io.write_line("Writing lock file")
-
-    def _execute(self, operations):
-        if self._use_executor:
-            return self._executor.execute(operations)
-
-        if not operations and (self._execute_operations or self._dry_run):
-            self._io.write_line("No dependencies to install or update")
-
-        if operations and (self._execute_operations or self._dry_run):
-            installs = 0
-            updates = 0
-            uninstalls = 0
-            skipped = 0
-            for op in operations:
-                if op.skipped:
-                    skipped += 1
-                elif op.job_type == "install":
-                    installs += 1
-                elif op.job_type == "update":
-                    updates += 1
-                elif op.job_type == "uninstall":
-                    uninstalls += 1
-
-            self._io.write_line("")
-            self._io.write_line(
-                "Package operations: "
-                "{} install{}, "
-                "{} update{}, "
-                "{} removal{}"
-                "{}".format(
-                    installs,
-                    "" if installs == 1 else "s",
-                    updates,
-                    "" if updates == 1 else "s",
-                    uninstalls,
-                    "" if uninstalls == 1 else "s",
-                    ", {} skipped".format(skipped)
-                    if skipped and self.is_verbose()
-                    else "",
-                )
-            )
-
-        self._io.write_line("")
-
-        for op in operations:
-            self._execute_operation(op)
-
-        return 0
-
-    def _execute_operation(self, operation):  # type: (Operation) -> None
-        """
-        Execute a given operation.
-        """
-        method = operation.job_type
-
-        getattr(self, "_execute_{}".format(method))(operation)
-
-    def _execute_install(self, operation):  # type: (Install) -> None
-        if operation.skipped:
-            if self.is_verbose() and (self._execute_operations or self.is_dry_run()):
-                self._io.write_line(
-                    "  - Skipping {} ({}) {}".format(
-                        operation.package.pretty_name,
-                        operation.package.full_pretty_version,
-                        operation.skip_reason,
-                    )
-                )
-
-            return
-
-        if self._execute_operations or self.is_dry_run():
-            self._io.write_line(
-                "  - Installing {} ({})".format(
-                    operation.package.pretty_name, operation.package.full_pretty_version
-                )
-            )
-
-        if not self._execute_operations:
-            return
-
-        self._installer.install(operation.package)
-
-    def _execute_update(self, operation):  # type: (Update) -> None
-        source = operation.initial_package
-        target = operation.target_package
-
-        if operation.skipped:
-            if self.is_verbose() and (self._execute_operations or self.is_dry_run()):
-                self._io.write_line(
-                    "  - Skipping {} ({}) {}".format(
-                        target.pretty_name,
-                        target.full_pretty_version,
-                        operation.skip_reason,
-                    )
-                )
-
-            return
-
-        if self._execute_operations or self.is_dry_run():
-            self._io.write_line(
-                "  - Updating {} ({} -> {})".format(
-                    target.pretty_name,
-                    source.full_pretty_version,
-                    target.full_pretty_version,
-                )
-            )
-
-        if not self._execute_operations:
-            return
-
-        self._installer.update(source, target)
-
-    def _execute_uninstall(self, operation):  # type: (Uninstall) -> None
-        if operation.skipped:
-            if self.is_verbose() and (self._execute_operations or self.is_dry_run()):
-                self._io.write_line(
-                    "  - Not removing {} ({}) {}".format(
-                        operation.package.pretty_name,
-                        operation.package.full_pretty_version,
-                        operation.skip_reason,
-                    )
-                )
-
-            return
-
-        if self._execute_operations or self.is_dry_run():
-            self._io.write_line(
-                "  - Removing {} ({})".format(
-                    operation.package.pretty_name, operation.package.full_pretty_version
-                )
-            )
-
-        if not self._execute_operations:
-            return
-
-        self._installer.remove(operation.package)
-
-    def _populate_local_repo(self, local_repo, ops):
-        for op in ops:
-            if isinstance(op, Uninstall):
-                continue
-            elif isinstance(op, Update):
-                package = op.target_package
-            else:
-                package = op.package
-
-            if not local_repo.has_package(package):
-                local_repo.add_package(package)
-
-    def _get_operations_from_lock(
-        self, locked_repository  # type: Repository
-    ):  # type: (...) -> List[Operation]
-        installed_repo = self._installed_repository
-        ops = []
-
-        extra_packages = self._get_extra_packages(locked_repository)
-        for locked in locked_repository.packages:
-            is_installed = False
-            for installed in installed_repo.packages:
-                if locked.name == installed.name:
-                    is_installed = True
-                    if locked.category == "dev" and not self.is_dev_mode():
-                        ops.append(Uninstall(locked))
-                    elif locked.optional and locked.name not in extra_packages:
-                        # Installed but optional and not requested in extras
-                        ops.append(Uninstall(locked))
-                    elif locked.version != installed.version:
-                        ops.append(Update(installed, locked))
-
-            # If it's optional and not in required extras
-            # we do not install
-            if locked.optional and locked.name not in extra_packages:
-                continue
-
-            op = Install(locked)
-            if is_installed:
-                op.skip("Already installed")
-
-            ops.append(op)
-
-        return ops
-
-    def _filter_operations(
-        self, ops, repo
-    ):  # type: (List[Operation], Repository) -> None
-        extra_packages = self._get_extra_packages(repo)
-        for op in ops:
-            if isinstance(op, Update):
-                package = op.target_package
-            else:
-                package = op.package
-
-            if op.job_type == "uninstall":
-                continue
-
-            if not self._env.is_valid_for_marker(package.marker):
-                op.skip("Not needed for the current environment")
-                continue
-
-            if self._update:
-                extras = {}
-                for extra, deps in self._package.extras.items():
-                    extras[extra] = [dep.name for dep in deps]
-            else:
-                extras = {}
-                for extra, deps in self._locker.lock_data.get("extras", {}).items():
-                    extras[extra] = [dep.lower() for dep in deps]
-
-            # If a package is optional and not requested
-            # in any extra we skip it
-            if package.optional:
-                if package.name not in extra_packages:
-                    op.skip("Not required")
-
-            # If the package is a dev package and dev packages
-            # are not requested, we skip it
-            if package.category == "dev" and not self.is_dev_mode():
-                op.skip("Dev dependencies not requested")
-
-    def _get_extra_packages(self, repo):  # type: (Repository) -> List[str]
-        """
-        Returns all package names required by extras.
-
-        Maybe we just let the solver handle it?
-        """
-        if self._update:
-            extras = {k: [d.name for d in v] for k, v in self._package.extras.items()}
-        else:
-            extras = self._locker.lock_data.get("extras", {})
-
-        return list(get_extra_package_names(repo.packages, extras, self._extras))
-
-    def _get_installer(self):  # type: () -> BaseInstaller
-        return PipInstaller(self._env, self._io, self._pool)
-
-    def _get_installed(self):  # type: () -> InstalledRepository
-        return InstalledRepository.load(self._env)
diff --git a/vendor/poetry/poetry/installation/noop_installer.py b/vendor/poetry/poetry/installation/noop_installer.py
deleted file mode 100644
index 0f0c6cda..00000000
--- a/vendor/poetry/poetry/installation/noop_installer.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from .base_installer import BaseInstaller
-
-
-class NoopInstaller(BaseInstaller):
-    def __init__(self):
-        self._installs = []
-        self._updates = []
-        self._removals = []
-
-    @property
-    def installs(self):
-        return self._installs
-
-    @property
-    def updates(self):
-        return self._updates
-
-    @property
-    def removals(self):
-        return self._removals
-
-    def install(self, package):
-        self._installs.append(package)
-
-    def update(self, source, target):
-        self._updates.append((source, target))
-
-    def remove(self, package):
-        self._removals.append(package)
diff --git a/vendor/poetry/poetry/installation/operations/__init__.py b/vendor/poetry/poetry/installation/operations/__init__.py
deleted file mode 100644
index 42573c10..00000000
--- a/vendor/poetry/poetry/installation/operations/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .install import Install
-from .uninstall import Uninstall
-from .update import Update
diff --git a/vendor/poetry/poetry/installation/operations/install.py b/vendor/poetry/poetry/installation/operations/install.py
deleted file mode 100644
index 48097c7c..00000000
--- a/vendor/poetry/poetry/installation/operations/install.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from .operation import Operation
-
-
-class Install(Operation):
-    def __init__(self, package, reason=None, priority=0):
-        super(Install, self).__init__(reason, priority=priority)
-
-        self._package = package
-
-    @property
-    def package(self):
-        return self._package
-
-    @property
-    def job_type(self):
-        return "install"
-
-    def __str__(self):
-        return "Installing {} ({})".format(
-            self.package.pretty_name, self.format_version(self.package)
-        )
-
-    def __repr__(self):
-        return "".format(
-            self.package.pretty_name, self.format_version(self.package)
-        )
diff --git a/vendor/poetry/poetry/installation/operations/operation.py b/vendor/poetry/poetry/installation/operations/operation.py
deleted file mode 100644
index 0c72cc8c..00000000
--- a/vendor/poetry/poetry/installation/operations/operation.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from typing import Union
-
-
-class Operation(object):
-    def __init__(
-        self, reason=None, priority=0
-    ):  # type: (Union[str, None], int) -> None
-        self._reason = reason
-
-        self._skipped = False
-        self._skip_reason = None
-        self._priority = priority
-
-    @property
-    def job_type(self):  # type: () -> str
-        raise NotImplementedError
-
-    @property
-    def reason(self):  # type: () -> str
-        return self._reason
-
-    @property
-    def skipped(self):  # type: () -> bool
-        return self._skipped
-
-    @property
-    def skip_reason(self):  # type: () -> Union[str, None]
-        return self._skip_reason
-
-    @property
-    def priority(self):  # type: () -> int
-        return self._priority
-
-    @property
-    def package(self):
-        raise NotImplementedError()
-
-    def format_version(self, package):  # type: (...) -> str
-        return package.full_pretty_version
-
-    def skip(self, reason):  # type: (str) -> Operation
-        self._skipped = True
-        self._skip_reason = reason
-
-        return self
-
-    def unskip(self):  # type: () -> Operation
-        self._skipped = False
-        self._skip_reason = None
-
-        return self
diff --git a/vendor/poetry/poetry/installation/operations/uninstall.py b/vendor/poetry/poetry/installation/operations/uninstall.py
deleted file mode 100644
index b7e40bc6..00000000
--- a/vendor/poetry/poetry/installation/operations/uninstall.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from .operation import Operation
-
-
-class Uninstall(Operation):
-    def __init__(self, package, reason=None, priority=float("inf")):
-        super(Uninstall, self).__init__(reason, priority=priority)
-
-        self._package = package
-
-    @property
-    def package(self):
-        return self._package
-
-    @property
-    def job_type(self):
-        return "uninstall"
-
-    def __str__(self):
-        return "Uninstalling {} ({})".format(
-            self.package.pretty_name, self.format_version(self._package)
-        )
-
-    def __repr__(self):
-        return "".format(
-            self.package.pretty_name, self.format_version(self.package)
-        )
diff --git a/vendor/poetry/poetry/installation/operations/update.py b/vendor/poetry/poetry/installation/operations/update.py
deleted file mode 100644
index 87803fd7..00000000
--- a/vendor/poetry/poetry/installation/operations/update.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from .operation import Operation
-
-
-class Update(Operation):
-    def __init__(self, initial, target, reason=None, priority=0):
-        self._initial_package = initial
-        self._target_package = target
-
-        super(Update, self).__init__(reason, priority=priority)
-
-    @property
-    def initial_package(self):
-        return self._initial_package
-
-    @property
-    def target_package(self):
-        return self._target_package
-
-    @property
-    def package(self):
-        return self._target_package
-
-    @property
-    def job_type(self):
-        return "update"
-
-    def __str__(self):
-        return "Updating {} ({}) to {} ({})".format(
-            self.initial_package.pretty_name,
-            self.format_version(self.initial_package),
-            self.target_package.pretty_name,
-            self.format_version(self.target_package),
-        )
-
-    def __repr__(self):
-        return "".format(
-            self.initial_package.pretty_name,
-            self.format_version(self.initial_package),
-            self.target_package.pretty_name,
-            self.format_version(self.target_package),
-        )
diff --git a/vendor/poetry/poetry/installation/pip_installer.py b/vendor/poetry/poetry/installation/pip_installer.py
deleted file mode 100644
index 7f4f6793..00000000
--- a/vendor/poetry/poetry/installation/pip_installer.py
+++ /dev/null
@@ -1,271 +0,0 @@
-import os
-import tempfile
-
-from subprocess import CalledProcessError
-
-from clikit.api.io import IO
-
-from poetry.core.pyproject.toml import PyProjectTOML
-from poetry.repositories.pool import Pool
-from poetry.utils._compat import encode
-from poetry.utils.env import Env
-from poetry.utils.helpers import safe_rmtree
-
-from .base_installer import BaseInstaller
-
-
-try:
-    import urllib.parse as urlparse
-except ImportError:
-    import urlparse
-
-
-class PipInstaller(BaseInstaller):
-    def __init__(self, env, io, pool):  # type: (Env, IO, Pool) -> None
-        self._env = env
-        self._io = io
-        self._pool = pool
-
-    def install(self, package, update=False):
-        if package.source_type == "directory":
-            self.install_directory(package)
-
-            return
-
-        if package.source_type == "git":
-            self.install_git(package)
-
-            return
-
-        args = ["install", "--no-deps"]
-
-        if (
-            package.source_type not in {"git", "directory", "file", "url"}
-            and package.source_url
-        ):
-            repository = self._pool.repository(package.source_reference)
-            parsed = urlparse.urlparse(package.source_url)
-            if parsed.scheme == "http":
-                self._io.error(
-                    "    Installing from unsecure host: {}".format(
-                        parsed.hostname
-                    )
-                )
-                args += ["--trusted-host", parsed.hostname]
-
-            if repository.cert:
-                args += ["--cert", str(repository.cert)]
-
-            if repository.client_cert:
-                args += ["--client-cert", str(repository.client_cert)]
-
-            index_url = repository.authenticated_url
-
-            args += ["--index-url", index_url]
-            if self._pool.has_default():
-                if repository.name != self._pool.repositories[0].name:
-                    args += [
-                        "--extra-index-url",
-                        self._pool.repositories[0].authenticated_url,
-                    ]
-
-        if update:
-            args.append("-U")
-
-        if package.files and not package.source_url:
-            # Format as a requirements.txt
-            # We need to create a requirements.txt file
-            # for each package in order to check hashes.
-            # This is far from optimal but we do not have any
-            # other choice since this is the only way for pip
-            # to verify hashes.
-            req = self.create_temporary_requirement(package)
-            args += ["-r", req]
-
-            try:
-                self.run(*args)
-            finally:
-                os.unlink(req)
-        else:
-            req = self.requirement(package)
-            if not isinstance(req, list):
-                args.append(req)
-            else:
-                args += req
-
-            self.run(*args)
-
-    def update(self, package, target):
-        if package.source_type != target.source_type:
-            # If the source type has changed, we remove the current
-            # package to avoid perpetual updates in some cases
-            self.remove(package)
-
-        self.install(target, update=True)
-
-    def remove(self, package):
-        try:
-            self.run("uninstall", package.name, "-y")
-        except CalledProcessError as e:
-            if "not installed" in str(e):
-                return
-
-            raise
-
-        # This is a workaround for https://github.com/pypa/pip/issues/4176
-        nspkg_pth_file = self._env.site_packages.path / "{}-nspkg.pth".format(
-            package.name
-        )
-        if nspkg_pth_file.exists():
-            nspkg_pth_file.unlink()
-
-        # If we have a VCS package, remove its source directory
-        if package.source_type == "git":
-            src_dir = self._env.path / "src" / package.name
-            if src_dir.exists():
-                safe_rmtree(str(src_dir))
-
-    def run(self, *args, **kwargs):  # type: (...) -> str
-        return self._env.run_pip(*args, **kwargs)
-
-    def requirement(self, package, formatted=False):
-        if formatted and not package.source_type:
-            req = "{}=={}".format(package.name, package.version)
-            for f in package.files:
-                hash_type = "sha256"
-                h = f["hash"]
-                if ":" in h:
-                    hash_type, h = h.split(":")
-
-                req += " --hash {}:{}".format(hash_type, h)
-
-            req += "\n"
-
-            return req
-
-        if package.source_type in ["file", "directory"]:
-            if package.root_dir:
-                req = (package.root_dir / package.source_url).as_posix()
-            else:
-                req = os.path.realpath(package.source_url)
-
-            if package.develop and package.source_type == "directory":
-                req = ["-e", req]
-
-            return req
-
-        if package.source_type == "git":
-            req = "git+{}@{}#egg={}".format(
-                package.source_url, package.source_reference, package.name
-            )
-
-            if package.develop:
-                req = ["-e", req]
-
-            return req
-
-        if package.source_type == "url":
-            return "{}#egg={}".format(package.source_url, package.name)
-
-        return "{}=={}".format(package.name, package.version)
-
-    def create_temporary_requirement(self, package):
-        fd, name = tempfile.mkstemp(
-            "reqs.txt", "{}-{}".format(package.name, package.version)
-        )
-
-        try:
-            os.write(fd, encode(self.requirement(package, formatted=True)))
-        finally:
-            os.close(fd)
-
-        return name
-
-    def install_directory(self, package):
-        from poetry.factory import Factory
-        from poetry.io.null_io import NullIO
-
-        if package.root_dir:
-            req = (package.root_dir / package.source_url).as_posix()
-        else:
-            req = os.path.realpath(package.source_url)
-
-        args = ["install", "--no-deps", "-U"]
-
-        pyproject = PyProjectTOML(os.path.join(req, "pyproject.toml"))
-
-        if pyproject.is_poetry_project():
-            # Even if there is a build system specified
-            # some versions of pip (< 19.0.0) don't understand it
-            # so we need to check the version of pip to know
-            # if we can rely on the build system
-            legacy_pip = self._env.pip_version < self._env.pip_version.__class__(
-                19, 0, 0
-            )
-
-            try:
-                package_poetry = Factory().create_poetry(pyproject.file.path.parent)
-            except RuntimeError:
-                package_poetry = None
-
-            if package_poetry is not None:
-                if package.develop and not package_poetry.package.build_script:
-                    from poetry.masonry.builders.editable import EditableBuilder
-
-                    # This is a Poetry package in editable mode
-                    # we can use the EditableBuilder without going through pip
-                    # to install it, unless it has a build script.
-                    builder = EditableBuilder(package_poetry, self._env, NullIO())
-                    builder.build()
-
-                    return 0
-                elif legacy_pip or package_poetry.package.build_script:
-                    from poetry.core.masonry.builders.sdist import SdistBuilder
-
-                    # We need to rely on creating a temporary setup.py
-                    # file since the version of pip does not support
-                    # build-systems
-                    # We also need it for non-PEP-517 packages
-                    builder = SdistBuilder(package_poetry)
-
-                    with builder.setup_py():
-                        if package.develop:
-                            args.append("-e")
-
-                        args.append(req)
-
-                        return self.run(*args)
-
-        if package.develop:
-            args.append("-e")
-
-        args.append(req)
-
-        return self.run(*args)
-
-    def install_git(self, package):
-        from poetry.core.packages import Package
-        from poetry.core.vcs import Git
-
-        src_dir = self._env.path / "src" / package.name
-        if src_dir.exists():
-            safe_rmtree(str(src_dir))
-
-        src_dir.parent.mkdir(exist_ok=True)
-
-        git = Git()
-        git.clone(package.source_url, src_dir)
-
-        reference = package.source_resolved_reference
-        if not reference:
-            reference = package.source_reference
-
-        git.checkout(reference, src_dir)
-
-        # Now we just need to install from the source directory
-        pkg = Package(package.name, package.version)
-        pkg._source_type = "directory"
-        pkg._source_url = str(src_dir)
-        pkg.develop = package.develop
-
-        self.install_directory(pkg)
diff --git a/vendor/poetry/poetry/io/null_io.py b/vendor/poetry/poetry/io/null_io.py
deleted file mode 100644
index d81cd595..00000000
--- a/vendor/poetry/poetry/io/null_io.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from cleo.io.io_mixin import IOMixin
-from clikit.io import NullIO as BaseNullIO
-
-
-class NullIO(IOMixin, BaseNullIO):
-    """
-    A wrapper around CliKit's NullIO.
-    """
-
-    def __init__(self, *args, **kwargs):
-        super(NullIO, self).__init__(*args, **kwargs)
diff --git a/vendor/poetry/poetry/json/__init__.py b/vendor/poetry/poetry/json/__init__.py
deleted file mode 100644
index d50eb7a7..00000000
--- a/vendor/poetry/poetry/json/__init__.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import json
-import os
-
-from io import open
-from typing import List
-
-import jsonschema
-
-
-SCHEMA_DIR = os.path.join(os.path.dirname(__file__), "schemas")
-
-
-class ValidationError(ValueError):
-
-    pass
-
-
-def validate_object(obj, schema_name):  # type: (dict, str) -> List[str]
-    schema = os.path.join(SCHEMA_DIR, "{}.json".format(schema_name))
-
-    if not os.path.exists(schema):
-        raise ValueError("Schema {} does not exist.".format(schema_name))
-
-    with open(schema, encoding="utf-8") as f:
-        schema = json.loads(f.read())
-
-    validator = jsonschema.Draft7Validator(schema)
-    validation_errors = sorted(validator.iter_errors(obj), key=lambda e: e.path)
-
-    errors = []
-
-    for error in validation_errors:
-        message = error.message
-        if error.path:
-            message = "[{}] {}".format(
-                ".".join(str(x) for x in error.absolute_path), message
-            )
-
-        errors.append(message)
-
-    return errors
diff --git a/vendor/poetry/poetry/json/schemas/poetry-schema.json b/vendor/poetry/poetry/json/schemas/poetry-schema.json
deleted file mode 100644
index e94b90d2..00000000
--- a/vendor/poetry/poetry/json/schemas/poetry-schema.json
+++ /dev/null
@@ -1,530 +0,0 @@
-{
-    "$schema": "http://json-schema.org/draft-04/schema#",
-    "name": "Package",
-    "type": "object",
-    "additionalProperties": false,
-    "required": [
-        "name",
-        "version",
-        "description"
-    ],
-    "properties": {
-        "name": {
-            "type": "string",
-            "description": "Package name."
-        },
-        "version": {
-            "type": "string",
-            "description": "Package version."
-        },
-        "description": {
-            "type": "string",
-            "description": "Short package description."
-        },
-        "keywords": {
-            "type": "array",
-            "items": {
-                "type": "string",
-                "description": "A tag/keyword that this package relates to."
-            }
-        },
-        "homepage": {
-            "type": "string",
-            "description": "Homepage URL for the project.",
-            "format": "uri"
-        },
-        "repository": {
-            "type": "string",
-            "description": "Repository URL for the project.",
-            "format": "uri"
-        },
-        "documentation": {
-            "type": "string",
-            "description": "Documentation URL for the project.",
-            "format": "uri"
-        },
-        "license": {
-            "type": "string",
-            "description": "License name."
-        },
-        "authors": {
-            "$ref": "#/definitions/authors"
-        },
-        "maintainers": {
-            "$ref": "#/definitions/maintainers"
-        },
-        "readme": {
-            "type": "string",
-            "description": "The path to the README file"
-        },
-        "classifiers": {
-            "type": "array",
-            "description": "A list of trove classifers."
-        },
-        "packages": {
-            "type": "array",
-            "description": "A list of packages to include in the final distribution.",
-            "items": {
-                "type": "object",
-                "description": "Information about where the package resides.",
-                "additionalProperties": false,
-                "required": [
-                    "include"
-                ],
-                "properties": {
-                    "include": {
-                        "type": "string",
-                        "description": "What to include in the package."
-                    },
-                    "from": {
-                        "type": "string",
-                        "description": "Where the source directory of the package resides."
-                    },
-                    "format": {
-                        "oneOf": [
-                            {"type": "string"},
-                            {"type":  "array", "items": {"type":  "string"}}
-                        ],
-                        "description": "The format(s) for which the package must be included."
-                    }
-                }
-            }
-        },
-        "include": {
-            "type": "array",
-            "description": "A list of files and folders to include."
-        },
-        "exclude": {
-            "type": "array",
-            "description": "A list of files and folders to exclude."
-        },
-        "dependencies": {
-            "type": "object",
-            "description": "This is a hash of package name (keys) and version constraints (values) that are required to run this package.",
-            "required": [
-                "python"
-            ],
-            "properties": {
-                "python": {
-                    "type": "string",
-                    "description": "The Python versions the package is compatible with."
-                }
-            },
-            "$ref": "#/definitions/dependencies",
-            "additionalProperties": false
-        },
-        "dev-dependencies": {
-            "type": "object",
-            "description": "This is a hash of package name (keys) and version constraints (values) that this package requires for developing it (testing tools and such).",
-            "$ref": "#/definitions/dependencies",
-            "additionalProperties": false
-        },
-        "extras": {
-            "type": "object",
-            "patternProperties": {
-                "^[a-zA-Z-_.0-9]+$": {
-                    "type": "array",
-                    "items": {
-                        "type": "string"
-                    }
-                }
-            }
-        },
-        "build": {
-            "type": "string",
-            "description": "The file used to build extensions."
-        },
-        "source": {
-            "type": "array",
-            "description": "A set of additional repositories where packages can be found.",
-            "additionalProperties": {
-                "$ref": "#/definitions/repository"
-            },
-            "items": {
-                "$ref": "#/definitions/repository"
-            }
-        },
-        "scripts": {
-            "type": "object",
-            "description": "A hash of scripts to be installed.",
-            "items": {
-                "type": "string"
-            }
-        },
-        "plugins": {
-            "type": "object",
-            "description": "A hash of hashes representing plugins",
-            "patternProperties": {
-                "^[a-zA-Z-_.0-9]+$": {
-                    "type": "object",
-                    "patternProperties": {
-                        "^[a-zA-Z-_.0-9]+$": {
-                            "type": "string"
-                        }
-                    }
-                }
-            }
-        },
-        "urls": {
-            "type": "object",
-            "patternProperties": {
-                "^.+$": {
-                    "type": "string",
-                    "description": "The full url of the custom url."
-                }
-            }
-        }
-    },
-    "definitions": {
-        "authors": {
-            "type": "array",
-            "description": "List of authors that contributed to the package. This is typically the main maintainers, not the full list.",
-            "items": {
-                "type": "string"
-            }
-        },
-        "maintainers": {
-            "type": "array",
-            "description": "List of maintainers, other than the original author(s), that upkeep the package.",
-            "items": {
-                "type": "string"
-            }
-        },
-        "dependencies": {
-            "type": "object",
-            "patternProperties": {
-                "^[a-zA-Z-_.0-9]+$": {
-                    "oneOf": [
-                        {
-                            "$ref": "#/definitions/dependency"
-                        },
-                        {
-                            "$ref": "#/definitions/long-dependency"
-                        },
-                        {
-                            "$ref": "#/definitions/git-dependency"
-                        },
-                        {
-                            "$ref": "#/definitions/file-dependency"
-                        },
-                        {
-                            "$ref": "#/definitions/path-dependency"
-                        },
-                        {
-                            "$ref": "#/definitions/url-dependency"
-                        },
-                        {
-                            "$ref": "#/definitions/multiple-constraints-dependency"
-                        }
-                    ]
-                }
-            }
-        },
-        "dependency": {
-            "type": "string",
-            "description": "The constraint of the dependency."
-        },
-        "long-dependency": {
-            "type": "object",
-            "required": [
-                "version"
-            ],
-            "additionalProperties": false,
-            "properties": {
-                "version": {
-                    "type": "string",
-                    "description": "The constraint of the dependency."
-                },
-                "python": {
-                    "type": "string",
-                    "description": "The python versions for which the dependency should be installed."
-                },
-                "platform": {
-                    "type": "string",
-                    "description": "The platform(s) for which the dependency should be installed."
-                },
-                "markers": {
-                    "type": "string",
-                    "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
-                },
-                "allow-prereleases": {
-                    "type": "boolean",
-                    "description": "Whether the dependency allows prereleases or not."
-                },
-                "allows-prereleases": {
-                    "type": "boolean",
-                    "description": "Whether the dependency allows prereleases or not."
-                },
-                "optional": {
-                    "type": "boolean",
-                    "description": "Whether the dependency is optional or not."
-                },
-                "extras": {
-                    "type": "array",
-                    "description": "The required extras for this dependency.",
-                    "items": {
-                        "type": "string"
-                    }
-                },
-                "source": {
-                    "type": "string",
-                    "description": "The exclusive source used to search for this dependency."
-                }
-            }
-        },
-        "git-dependency": {
-            "type": "object",
-            "required": [
-                "git"
-            ],
-            "additionalProperties": false,
-            "properties": {
-                "git": {
-                    "type": "string",
-                    "description": "The url of the git repository.",
-                    "format": "uri"
-                },
-                "branch": {
-                    "type": "string",
-                    "description": "The branch to checkout."
-                },
-                "tag": {
-                    "type": "string",
-                    "description": "The tag to checkout."
-                },
-                "rev": {
-                    "type": "string",
-                    "description": "The revision to checkout."
-                },
-                "python": {
-                    "type": "string",
-                    "description": "The python versions for which the dependency should be installed."
-                },
-                "platform": {
-                    "type": "string",
-                    "description": "The platform(s) for which the dependency should be installed."
-                },
-                "markers": {
-                    "type": "string",
-                    "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
-                },
-                "allow-prereleases": {
-                    "type": "boolean",
-                    "description": "Whether the dependency allows prereleases or not."
-                },
-                "allows-prereleases": {
-                    "type": "boolean",
-                    "description": "Whether the dependency allows prereleases or not."
-                },
-                "optional": {
-                    "type": "boolean",
-                    "description": "Whether the dependency is optional or not."
-                },
-                "extras": {
-                    "type": "array",
-                    "description": "The required extras for this dependency.",
-                    "items": {
-                        "type": "string"
-                    }
-                }
-            }
-        },
-        "file-dependency": {
-            "type": "object",
-            "required": [
-                "file"
-            ],
-            "additionalProperties": false,
-            "properties": {
-                "file": {
-                    "type": "string",
-                    "description": "The path to the file."
-                },
-                "python": {
-                    "type": "string",
-                    "description": "The python versions for which the dependency should be installed."
-                },
-                "platform": {
-                    "type": "string",
-                    "description": "The platform(s) for which the dependency should be installed."
-                },
-                "markers": {
-                    "type": "string",
-                    "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
-                },
-                "optional": {
-                    "type": "boolean",
-                    "description": "Whether the dependency is optional or not."
-                },
-                "extras": {
-                    "type": "array",
-                    "description": "The required extras for this dependency.",
-                    "items": {
-                        "type": "string"
-                    }
-                }
-            }
-        },
-        "path-dependency": {
-            "type": "object",
-            "required": [
-                "path"
-            ],
-            "additionalProperties": false,
-            "properties": {
-                "path": {
-                    "type": "string",
-                    "description": "The path to the dependency."
-                },
-                "python": {
-                    "type": "string",
-                    "description": "The python versions for which the dependency should be installed."
-                },
-                "platform": {
-                    "type": "string",
-                    "description": "The platform(s) for which the dependency should be installed."
-                },
-                "markers": {
-                    "type": "string",
-                    "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
-                },
-                "optional": {
-                    "type": "boolean",
-                    "description": "Whether the dependency is optional or not."
-                },
-                "extras": {
-                    "type": "array",
-                    "description": "The required extras for this dependency.",
-                    "items": {
-                        "type": "string"
-                    }
-                },
-                "develop": {
-                    "type": "boolean",
-                    "description": "Whether to install the dependency in development mode."
-                }
-            }
-        },
-        "url-dependency": {
-            "type": "object",
-            "required": [
-                "url"
-            ],
-            "additionalProperties": false,
-            "properties": {
-                "url": {
-                    "type": "string",
-                    "description": "The url to the file."
-                },
-                "python": {
-                    "type": "string",
-                    "description": "The python versions for which the dependency should be installed."
-                },
-                "platform": {
-                    "type": "string",
-                    "description": "The platform(s) for which the dependency should be installed."
-                },
-                "markers": {
-                    "type": "string",
-                    "description": "The PEP 508 compliant environment markers for which the dependency should be installed."
-                },
-                "optional": {
-                    "type": "boolean",
-                    "description": "Whether the dependency is optional or not."
-                },
-                "extras": {
-                    "type": "array",
-                    "description": "The required extras for this dependency.",
-                    "items": {
-                        "type": "string"
-                    }
-                }
-            }
-        },
-        "multiple-constraints-dependency": {
-            "type": "array",
-            "minItems": 1,
-            "items": {
-                "oneOf": [
-                    {
-                        "$ref": "#/definitions/dependency"
-                    },
-                    {
-                        "$ref": "#/definitions/long-dependency"
-                    },
-                    {
-                        "$ref": "#/definitions/git-dependency"
-                    },
-                    {
-                        "$ref": "#/definitions/file-dependency"
-                    },
-                    {
-                        "$ref": "#/definitions/path-dependency"
-                    },
-                    {
-                        "$ref": "#/definitions/url-dependency"
-                    }
-                ]
-            }
-        },
-        "scripts": {
-            "type": "object",
-            "patternProperties": {
-                "^[a-zA-Z-_.0-9]+$": {
-                    "oneOf": [
-                        {
-                            "$ref": "#/definitions/script"
-                        },
-                        {
-                            "$ref": "#/definitions/extra-script"
-                        }
-                    ]
-                }
-            }
-        },
-        "script": {
-            "type": "string",
-            "description": "A simple script pointing to a callable object."
-        },
-        "extra-script": {
-            "type": "object",
-            "description": "A script that should be installed only if extras are activated.",
-            "additionalProperties": false,
-            "properties": {
-                "callable": {
-                    "$ref": "#/definitions/script"
-                },
-                "extras": {
-                    "type": "array",
-                    "description": "The required extras for this script.",
-                    "items": {
-                        "type": "string"
-                    }
-                }
-            }
-        },
-        "repository": {
-            "type": "object",
-            "additionalProperties": false,
-            "properties": {
-                "name": {
-                    "type": "string",
-                    "description": "The name of the repository"
-                },
-                "url": {
-                    "type": "string",
-                    "description": "The url of the repository",
-                    "format": "uri"
-                },
-                "default": {
-                    "type": "boolean",
-                    "description": "Make this repository the default (disable PyPI)"
-                },
-                "secondary": {
-                    "type": "boolean",
-                    "description": "Declare this repository as secondary, i.e. it will only be looked up last for packages."
-                }
-            }
-        }
-    }
-}
diff --git a/vendor/poetry/poetry/layouts/__init__.py b/vendor/poetry/poetry/layouts/__init__.py
deleted file mode 100644
index 9969ce5e..00000000
--- a/vendor/poetry/poetry/layouts/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from typing import Type
-
-from .layout import Layout
-from .src import SrcLayout
-from .standard import StandardLayout
-
-
-_LAYOUTS = {"src": SrcLayout, "standard": StandardLayout}
-
-
-def layout(name):  # type: (str) -> Type[Layout]
-    if name not in _LAYOUTS:
-        raise ValueError("Invalid layout")
-
-    return _LAYOUTS[name]
diff --git a/vendor/poetry/poetry/layouts/layout.py b/vendor/poetry/poetry/layouts/layout.py
deleted file mode 100644
index 8a74060f..00000000
--- a/vendor/poetry/poetry/layouts/layout.py
+++ /dev/null
@@ -1,164 +0,0 @@
-from typing import TYPE_CHECKING
-from typing import Optional
-
-from tomlkit import dumps
-from tomlkit import loads
-from tomlkit import table
-
-from poetry.utils.helpers import module_name
-
-
-if TYPE_CHECKING:
-    from poetry.core.pyproject.toml import PyProjectTOML
-
-TESTS_DEFAULT = u"""from {package_name} import __version__
-
-
-def test_version():
-    assert __version__ == '{version}'
-"""
-
-
-POETRY_DEFAULT = """\
-[tool.poetry]
-name = ""
-version = ""
-description = ""
-authors = []
-
-[tool.poetry.dependencies]
-
-[tool.poetry.dev-dependencies]
-"""
-
-POETRY_WITH_LICENSE = """\
-[tool.poetry]
-name = ""
-version = ""
-description = ""
-authors = []
-license = ""
-
-[tool.poetry.dependencies]
-
-[tool.poetry.dev-dependencies]
-"""
-
-BUILD_SYSTEM_MIN_VERSION = "1.0.0"
-BUILD_SYSTEM_MAX_VERSION = None
-
-
-class Layout(object):
-    def __init__(
-        self,
-        project,
-        version="0.1.0",
-        description="",
-        readme_format="md",
-        author=None,
-        license=None,
-        python="*",
-        dependencies=None,
-        dev_dependencies=None,
-    ):
-        self._project = project
-        self._package_name = module_name(project)
-        self._version = version
-        self._description = description
-        self._readme_format = readme_format
-        self._license = license
-        self._python = python
-        self._dependencies = dependencies or {}
-        self._dev_dependencies = dev_dependencies or {}
-
-        if not author:
-            author = "Your Name "
-
-        self._author = author
-
-    def create(self, path, with_tests=True):
-        path.mkdir(parents=True, exist_ok=True)
-
-        self._create_default(path)
-        self._create_readme(path)
-
-        if with_tests:
-            self._create_tests(path)
-
-        self._write_poetry(path)
-
-    def generate_poetry_content(
-        self, original=None
-    ):  # type: (Optional["PyProjectTOML"]) -> str
-        template = POETRY_DEFAULT
-        if self._license:
-            template = POETRY_WITH_LICENSE
-
-        content = loads(template)
-        poetry_content = content["tool"]["poetry"]
-        poetry_content["name"] = self._project
-        poetry_content["version"] = self._version
-        poetry_content["description"] = self._description
-        poetry_content["authors"].append(self._author)
-        if self._license:
-            poetry_content["license"] = self._license
-
-        poetry_content["dependencies"]["python"] = self._python
-
-        for dep_name, dep_constraint in self._dependencies.items():
-            poetry_content["dependencies"][dep_name] = dep_constraint
-
-        for dep_name, dep_constraint in self._dev_dependencies.items():
-            poetry_content["dev-dependencies"][dep_name] = dep_constraint
-
-        # Add build system
-        build_system = table()
-        build_system_version = ">=" + BUILD_SYSTEM_MIN_VERSION
-        if BUILD_SYSTEM_MAX_VERSION is not None:
-            build_system_version += ",<" + BUILD_SYSTEM_MAX_VERSION
-
-        build_system.add("requires", ["poetry-core" + build_system_version])
-        build_system.add("build-backend", "poetry.core.masonry.api")
-
-        content.add("build-system", build_system)
-
-        content = dumps(content)
-
-        if original and original.file.exists():
-            content = dumps(original.data) + "\n" + content
-
-        return content
-
-    def _create_default(self, path, src=True):
-        raise NotImplementedError()
-
-    def _create_readme(self, path):
-        if self._readme_format == "rst":
-            readme_file = path / "README.rst"
-        else:
-            readme_file = path / "README.md"
-
-        readme_file.touch()
-
-    def _create_tests(self, path):
-        tests = path / "tests"
-        tests_init = tests / "__init__.py"
-        tests_default = tests / "test_{}.py".format(self._package_name)
-
-        tests.mkdir()
-        tests_init.touch(exist_ok=False)
-
-        with tests_default.open("w", encoding="utf-8") as f:
-            f.write(
-                TESTS_DEFAULT.format(
-                    package_name=self._package_name, version=self._version
-                )
-            )
-
-    def _write_poetry(self, path):
-        content = self.generate_poetry_content()
-
-        poetry = path / "pyproject.toml"
-
-        with poetry.open("w", encoding="utf-8") as f:
-            f.write(content)
diff --git a/vendor/poetry/poetry/layouts/src.py b/vendor/poetry/poetry/layouts/src.py
deleted file mode 100644
index 06db7a71..00000000
--- a/vendor/poetry/poetry/layouts/src.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from .layout import Layout
-
-
-DEFAULT = u"""__version__ = '{version}'
-"""
-
-
-class SrcLayout(Layout):
-    def _create_default(self, path):
-        package_path = path / "src" / self._package_name
-
-        package_init = package_path / "__init__.py"
-
-        package_path.mkdir(parents=True)
-
-        with package_init.open("w", encoding="utf-8") as f:
-            f.write(DEFAULT.format(version=self._version))
diff --git a/vendor/poetry/poetry/layouts/standard.py b/vendor/poetry/poetry/layouts/standard.py
deleted file mode 100644
index eca4c435..00000000
--- a/vendor/poetry/poetry/layouts/standard.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from .layout import Layout
-
-
-DEFAULT = u"""__version__ = '{version}'
-"""
-
-
-class StandardLayout(Layout):
-    def _create_default(self, path):
-        package_path = path / self._package_name
-
-        package_init = package_path / "__init__.py"
-
-        package_path.mkdir()
-
-        with package_init.open("w", encoding="utf-8") as f:
-            f.write(DEFAULT.format(version=self._version))
diff --git a/vendor/poetry/poetry/locations.py b/vendor/poetry/poetry/locations.py
deleted file mode 100644
index 001e1a9e..00000000
--- a/vendor/poetry/poetry/locations.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import os
-
-from .utils._compat import Path
-from .utils.appdirs import user_cache_dir
-from .utils.appdirs import user_config_dir
-from .utils.appdirs import user_data_dir
-
-
-CACHE_DIR = user_cache_dir("pypoetry")
-CONFIG_DIR = user_config_dir("pypoetry")
-
-REPOSITORY_CACHE_DIR = Path(CACHE_DIR) / "cache" / "repositories"
-
-
-def data_dir():  # type: () -> Path
-    if os.getenv("POETRY_HOME"):
-        return Path(os.getenv("POETRY_HOME")).expanduser()
-
-    return Path(user_data_dir("pypoetry", roaming=True))
diff --git a/vendor/poetry/poetry/masonry/api.py b/vendor/poetry/poetry/masonry/api.py
deleted file mode 100644
index 417def2c..00000000
--- a/vendor/poetry/poetry/masonry/api.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from poetry.core.masonry.api import build_sdist
-from poetry.core.masonry.api import build_wheel
-from poetry.core.masonry.api import get_requires_for_build_sdist
-from poetry.core.masonry.api import get_requires_for_build_wheel
-from poetry.core.masonry.api import prepare_metadata_for_build_wheel
-
-
-__all__ = [
-    "build_sdist",
-    "build_wheel",
-    "get_requires_for_build_sdist",
-    "get_requires_for_build_wheel",
-    "prepare_metadata_for_build_wheel",
-]
diff --git a/vendor/poetry/poetry/masonry/builders/__init__.py b/vendor/poetry/poetry/masonry/builders/__init__.py
deleted file mode 100644
index f1f02b72..00000000
--- a/vendor/poetry/poetry/masonry/builders/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .editable import EditableBuilder
diff --git a/vendor/poetry/poetry/masonry/builders/editable.py b/vendor/poetry/poetry/masonry/builders/editable.py
deleted file mode 100644
index 74d1f69c..00000000
--- a/vendor/poetry/poetry/masonry/builders/editable.py
+++ /dev/null
@@ -1,257 +0,0 @@
-from __future__ import unicode_literals
-
-import hashlib
-import os
-import shutil
-
-from base64 import urlsafe_b64encode
-
-from poetry.core.masonry.builders.builder import Builder
-from poetry.core.masonry.builders.sdist import SdistBuilder
-from poetry.core.masonry.utils.package_include import PackageInclude
-from poetry.core.semver.version import Version
-from poetry.utils._compat import WINDOWS
-from poetry.utils._compat import Path
-from poetry.utils._compat import decode
-from poetry.utils.helpers import is_dir_writable
-
-
-SCRIPT_TEMPLATE = """\
-#!{python}
-from {module} import {callable_holder}
-
-if __name__ == '__main__':
-    {callable_}()
-"""
-
-WINDOWS_CMD_TEMPLATE = """\
-@echo off\r\n"{python}" "%~dp0\\{script}" %*\r\n
-"""
-
-
-class EditableBuilder(Builder):
-    def __init__(self, poetry, env, io):
-        super(EditableBuilder, self).__init__(poetry)
-
-        self._env = env
-        self._io = io
-
-    def build(self):
-        self._debug(
-            "  - Building package {} in editable mode".format(
-                self._package.name
-            )
-        )
-
-        if self._package.build_script:
-            if self._package.build_should_generate_setup():
-                self._debug(
-                    "  - Falling back on using a setup.py"
-                )
-
-                return self._setup_build()
-
-            self._run_build_script(self._package.build_script)
-
-        added_files = []
-        added_files += self._add_pth()
-        added_files += self._add_scripts()
-        self._add_dist_info(added_files)
-
-    def _run_build_script(self, build_script):
-        self._debug("  - Executing build script: {}".format(build_script))
-        self._env.run("python", str(self._path.joinpath(build_script)), call=True)
-
-    def _setup_build(self):
-        builder = SdistBuilder(self._poetry)
-        setup = self._path / "setup.py"
-        has_setup = setup.exists()
-
-        if has_setup:
-            self._io.write_line(
-                "A setup.py file already exists. Using it."
-            )
-        else:
-            with setup.open("w", encoding="utf-8") as f:
-                f.write(decode(builder.build_setup()))
-
-        try:
-            if self._env.pip_version < Version(19, 0):
-                self._env.run_pip("install", "-e", str(self._path), "--no-deps")
-            else:
-                # Temporarily rename pyproject.toml
-                shutil.move(
-                    str(self._poetry.file), str(self._poetry.file.with_suffix(".tmp"))
-                )
-                try:
-                    self._env.run_pip("install", "-e", str(self._path), "--no-deps")
-                finally:
-                    shutil.move(
-                        str(self._poetry.file.with_suffix(".tmp")),
-                        str(self._poetry.file),
-                    )
-        finally:
-            if not has_setup:
-                os.remove(str(setup))
-
-    def _add_pth(self):
-        paths = set()
-        for include in self._module.includes:
-            if isinstance(include, PackageInclude) and (
-                include.is_module() or include.is_package()
-            ):
-                paths.add(include.base.resolve().as_posix())
-
-        content = ""
-        for path in paths:
-            content += decode(path + os.linesep)
-
-        pth_file = Path(self._module.name).with_suffix(".pth")
-        try:
-            pth_file = self._env.site_packages.write_text(
-                pth_file, content, encoding="utf-8"
-            )
-            self._debug(
-                "  - Adding {} to {} for {}".format(
-                    pth_file.name, pth_file.parent, self._poetry.file.parent
-                )
-            )
-            return [pth_file]
-        except OSError:
-            # TODO: Replace with PermissionError
-            self._io.error_line(
-                "  - Failed to create {} for {}".format(
-                    pth_file.name, self._poetry.file.parent
-                )
-            )
-            return []
-
-    def _add_scripts(self):
-        added = []
-        entry_points = self.convert_entry_points()
-
-        for scripts_path in self._env.script_dirs:
-            if is_dir_writable(path=scripts_path, create=True):
-                break
-        else:
-            self._io.error_line(
-                "  - Failed to find a suitable script installation directory for {}".format(
-                    self._poetry.file.parent
-                )
-            )
-            return []
-
-        scripts = entry_points.get("console_scripts", [])
-        for script in scripts:
-            name, script = script.split(" = ")
-            module, callable_ = script.split(":")
-            callable_holder = callable_.split(".", 1)[0]
-
-            script_file = scripts_path.joinpath(name)
-            self._debug(
-                "  - Adding the {} script to {}".format(
-                    name, scripts_path
-                )
-            )
-            with script_file.open("w", encoding="utf-8") as f:
-                f.write(
-                    decode(
-                        SCRIPT_TEMPLATE.format(
-                            python=self._env.python,
-                            module=module,
-                            callable_holder=callable_holder,
-                            callable_=callable_,
-                        )
-                    )
-                )
-
-            script_file.chmod(0o755)
-
-            added.append(script_file)
-
-            if WINDOWS:
-                cmd_script = script_file.with_suffix(".cmd")
-                cmd = WINDOWS_CMD_TEMPLATE.format(python=self._env.python, script=name)
-                self._debug(
-                    "  - Adding the {} script wrapper to {}".format(
-                        cmd_script.name, scripts_path
-                    )
-                )
-
-                with cmd_script.open("w", encoding="utf-8") as f:
-                    f.write(decode(cmd))
-
-                added.append(cmd_script)
-
-        return added
-
-    def _add_dist_info(self, added_files):
-        from poetry.core.masonry.builders.wheel import WheelBuilder
-
-        added_files = added_files[:]
-
-        builder = WheelBuilder(self._poetry)
-
-        dist_info_path = Path(builder.dist_info)
-        for dist_info in self._env.site_packages.find(
-            dist_info_path, writable_only=True
-        ):
-            if dist_info.exists():
-                self._debug(
-                    "  - Removing existing {} directory from {}".format(
-                        dist_info.name, dist_info.parent
-                    )
-                )
-                shutil.rmtree(str(dist_info))
-
-        dist_info = self._env.site_packages.mkdir(dist_info_path)
-
-        self._debug(
-            "  - Adding the {} directory to {}".format(
-                dist_info.name, dist_info.parent
-            )
-        )
-
-        with dist_info.joinpath("METADATA").open("w", encoding="utf-8") as f:
-            builder._write_metadata_file(f)
-
-        added_files.append(dist_info.joinpath("METADATA"))
-
-        with dist_info.joinpath("INSTALLER").open("w", encoding="utf-8") as f:
-            f.write("poetry")
-
-        added_files.append(dist_info.joinpath("INSTALLER"))
-
-        if self.convert_entry_points():
-            with dist_info.joinpath("entry_points.txt").open(
-                "w", encoding="utf-8"
-            ) as f:
-                builder._write_entry_points(f)
-
-            added_files.append(dist_info.joinpath("entry_points.txt"))
-
-        with dist_info.joinpath("RECORD").open("w", encoding="utf-8") as f:
-            for path in added_files:
-                hash = self._get_file_hash(path)
-                size = path.stat().st_size
-                f.write("{},sha256={},{}\n".format(str(path), hash, size))
-
-            # RECORD itself is recorded with no hash or size
-            f.write("{},,\n".format(dist_info.joinpath("RECORD")))
-
-    def _get_file_hash(self, filepath):
-        hashsum = hashlib.sha256()
-        with filepath.open("rb") as src:
-            while True:
-                buf = src.read(1024 * 8)
-                if not buf:
-                    break
-                hashsum.update(buf)
-
-            src.seek(0)
-
-        return urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=")
-
-    def _debug(self, msg):
-        if self._io.is_debug():
-            self._io.write_line(msg)
diff --git a/vendor/poetry/poetry/mixology/__init__.py b/vendor/poetry/poetry/mixology/__init__.py
deleted file mode 100644
index 50fbffb2..00000000
--- a/vendor/poetry/poetry/mixology/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from .version_solver import VersionSolver
-
-
-def resolve_version(root, provider, locked=None, use_latest=None):
-    solver = VersionSolver(root, provider, locked=locked, use_latest=use_latest)
-
-    return solver.solve()
diff --git a/vendor/poetry/poetry/mixology/assignment.py b/vendor/poetry/poetry/mixology/assignment.py
deleted file mode 100644
index e288c5da..00000000
--- a/vendor/poetry/poetry/mixology/assignment.py
+++ /dev/null
@@ -1,44 +0,0 @@
-from typing import Any
-
-from .incompatibility import Incompatibility
-from .term import Term
-
-
-class Assignment(Term):
-    """
-    A term in a PartialSolution that tracks some additional metadata.
-    """
-
-    def __init__(self, dependency, is_positive, decision_level, index, cause=None):
-        super(Assignment, self).__init__(dependency, is_positive)
-
-        self._decision_level = decision_level
-        self._index = index
-        self._cause = cause
-
-    @property
-    def decision_level(self):  # type: () -> int
-        return self._decision_level
-
-    @property
-    def index(self):  # type: () -> int
-        return self._index
-
-    @property
-    def cause(self):  # type: () -> Incompatibility
-        return self._cause
-
-    @classmethod
-    def decision(
-        cls, package, decision_level, index
-    ):  # type: (Any, int, int) -> Assignment
-        return cls(package.to_dependency(), True, decision_level, index)
-
-    @classmethod
-    def derivation(
-        cls, dependency, is_positive, cause, decision_level, index
-    ):  # type: (Any, bool, Incompatibility, int, int) -> Assignment
-        return cls(dependency, is_positive, decision_level, index, cause)
-
-    def is_decision(self):  # type: () -> bool
-        return self._cause is None
diff --git a/vendor/poetry/poetry/mixology/failure.py b/vendor/poetry/poetry/mixology/failure.py
deleted file mode 100644
index afa33208..00000000
--- a/vendor/poetry/poetry/mixology/failure.py
+++ /dev/null
@@ -1,290 +0,0 @@
-from typing import Dict
-from typing import List
-from typing import Tuple
-
-from poetry.core.semver import parse_constraint
-
-from .incompatibility import Incompatibility
-from .incompatibility_cause import ConflictCause
-from .incompatibility_cause import PythonCause
-
-
-class SolveFailure(Exception):
-    def __init__(self, incompatibility):  # type: (Incompatibility) -> None
-        self._incompatibility = incompatibility
-
-    @property
-    def message(self):
-        return str(self)
-
-    def __str__(self):
-        return _Writer(self._incompatibility).write()
-
-
-class _Writer:
-    def __init__(self, root):  # type: (Incompatibility) -> None
-        self._root = root
-        self._derivations = {}  # type: Dict[Incompatibility, int]
-        self._lines = []  # type: List[Tuple[str, int]]
-        self._line_numbers = {}  # type: Dict[Incompatibility, int]
-
-        self._count_derivations(self._root)
-
-    def write(self):
-        buffer = []
-
-        required_python_version_notification = False
-        for incompatibility in self._root.external_incompatibilities:
-            if isinstance(incompatibility.cause, PythonCause):
-                if not required_python_version_notification:
-                    buffer.append(
-                        "The current project's Python requirement ({}) "
-                        "is not compatible with some of the required "
-                        "packages Python requirement:".format(
-                            incompatibility.cause.root_python_version
-                        )
-                    )
-                    required_python_version_notification = True
-
-                root_constraint = parse_constraint(
-                    incompatibility.cause.root_python_version
-                )
-                constraint = parse_constraint(incompatibility.cause.python_version)
-                buffer.append(
-                    "  - {} requires Python {}, so it will not be satisfied for Python {}".format(
-                        incompatibility.terms[0].dependency.name,
-                        incompatibility.cause.python_version,
-                        root_constraint.difference(constraint),
-                    )
-                )
-
-        if required_python_version_notification:
-            buffer.append("")
-
-        if isinstance(self._root.cause, ConflictCause):
-            self._visit(self._root, {})
-        else:
-            self._write(
-                self._root, "Because {}, version solving failed.".format(self._root)
-            )
-
-        padding = (
-            0
-            if not self._line_numbers
-            else len("({}) ".format(list(self._line_numbers.values())[-1]))
-        )
-
-        last_was_empty = False
-        for line in self._lines:
-            message = line[0]
-            if not message:
-                if not last_was_empty:
-                    buffer.append("")
-
-                last_was_empty = True
-                continue
-
-            last_was_empty = False
-
-            number = line[-1]
-            if number is not None:
-                message = "({})".format(number).ljust(padding) + message
-            else:
-                message = " " * padding + message
-
-            buffer.append(message)
-
-        return "\n".join(buffer)
-
-    def _write(
-        self, incompatibility, message, numbered=False
-    ):  # type: (Incompatibility, str, bool) -> None
-        if numbered:
-            number = len(self._line_numbers) + 1
-            self._line_numbers[incompatibility] = number
-            self._lines.append((message, number))
-        else:
-            self._lines.append((message, None))
-
-    def _visit(
-        self, incompatibility, details_for_incompatibility, conclusion=False
-    ):  # type: (Incompatibility, Dict, bool) -> None
-        numbered = conclusion or self._derivations[incompatibility] > 1
-        conjunction = "So," if conclusion or incompatibility == self._root else "And"
-        incompatibility_string = str(incompatibility)
-
-        cause = incompatibility.cause  # type: ConflictCause
-        details_for_cause = {}
-        if isinstance(cause.conflict.cause, ConflictCause) and isinstance(
-            cause.other.cause, ConflictCause
-        ):
-            conflict_line = self._line_numbers.get(cause.conflict)
-            other_line = self._line_numbers.get(cause.other)
-
-            if conflict_line is not None and other_line is not None:
-                self._write(
-                    incompatibility,
-                    "Because {}, {}.".format(
-                        cause.conflict.and_to_string(
-                            cause.other, details_for_cause, conflict_line, other_line
-                        ),
-                        incompatibility_string,
-                    ),
-                    numbered=numbered,
-                )
-            elif conflict_line is not None or other_line is not None:
-                if conflict_line is not None:
-                    with_line = cause.conflict
-                    without_line = cause.other
-                    line = conflict_line
-                else:
-                    with_line = cause.other
-                    without_line = cause.conflict
-                    line = other_line
-
-                self._visit(without_line, details_for_cause)
-                self._write(
-                    incompatibility,
-                    "{} because {} ({}), {}.".format(
-                        conjunction, str(with_line), line, incompatibility_string
-                    ),
-                    numbered=numbered,
-                )
-            else:
-                single_line_conflict = self._is_single_line(cause.conflict.cause)
-                single_line_other = self._is_single_line(cause.other.cause)
-
-                if single_line_other or single_line_conflict:
-                    first = cause.conflict if single_line_other else cause.other
-                    second = cause.other if single_line_other else cause.conflict
-                    self._visit(first, details_for_cause)
-                    self._visit(second, details_for_cause)
-                    self._write(
-                        incompatibility,
-                        "Thus, {}.".format(incompatibility_string),
-                        numbered=numbered,
-                    )
-                else:
-                    self._visit(cause.conflict, {}, conclusion=True)
-                    self._lines.append(("", None))
-
-                    self._visit(cause.other, details_for_cause)
-
-                    self._write(
-                        incompatibility,
-                        "{} because {} ({}), {}".format(
-                            conjunction,
-                            str(cause.conflict),
-                            self._line_numbers[cause.conflict],
-                            incompatibility_string,
-                        ),
-                        numbered=numbered,
-                    )
-        elif isinstance(cause.conflict.cause, ConflictCause) or isinstance(
-            cause.other.cause, ConflictCause
-        ):
-            derived = (
-                cause.conflict
-                if isinstance(cause.conflict.cause, ConflictCause)
-                else cause.other
-            )
-            ext = (
-                cause.other
-                if isinstance(cause.conflict.cause, ConflictCause)
-                else cause.conflict
-            )
-
-            derived_line = self._line_numbers.get(derived)
-            if derived_line is not None:
-                self._write(
-                    incompatibility,
-                    "Because {}, {}.".format(
-                        ext.and_to_string(
-                            derived, details_for_cause, None, derived_line
-                        ),
-                        incompatibility_string,
-                    ),
-                    numbered=numbered,
-                )
-            elif self._is_collapsible(derived):
-                derived_cause = derived.cause  # type: ConflictCause
-                if isinstance(derived_cause.conflict.cause, ConflictCause):
-                    collapsed_derived = derived_cause.conflict
-                else:
-                    collapsed_derived = derived_cause.other
-
-                if isinstance(derived_cause.conflict.cause, ConflictCause):
-                    collapsed_ext = derived_cause.other
-                else:
-                    collapsed_ext = derived_cause.conflict
-
-                details_for_cause = {}
-
-                self._visit(collapsed_derived, details_for_cause)
-                self._write(
-                    incompatibility,
-                    "{} because {}, {}.".format(
-                        conjunction,
-                        collapsed_ext.and_to_string(ext, details_for_cause, None, None),
-                        incompatibility_string,
-                    ),
-                    numbered=numbered,
-                )
-            else:
-                self._visit(derived, details_for_cause)
-                self._write(
-                    incompatibility,
-                    "{} because {}, {}.".format(
-                        conjunction, str(ext), incompatibility_string
-                    ),
-                    numbered=numbered,
-                )
-        else:
-            self._write(
-                incompatibility,
-                "Because {}, {}.".format(
-                    cause.conflict.and_to_string(
-                        cause.other, details_for_cause, None, None
-                    ),
-                    incompatibility_string,
-                ),
-                numbered=numbered,
-            )
-
-    def _is_collapsible(self, incompatibility):  # type: (Incompatibility) -> bool
-        if self._derivations[incompatibility] > 1:
-            return False
-
-        cause = incompatibility.cause  # type: ConflictCause
-        if isinstance(cause.conflict.cause, ConflictCause) and isinstance(
-            cause.other.cause, ConflictCause
-        ):
-            return False
-
-        if not isinstance(cause.conflict.cause, ConflictCause) and not isinstance(
-            cause.other.cause, ConflictCause
-        ):
-            return False
-
-        complex = (
-            cause.conflict
-            if isinstance(cause.conflict.cause, ConflictCause)
-            else cause.other
-        )
-
-        return complex not in self._line_numbers
-
-    def _is_single_line(self, cause):  # type: (ConflictCause) -> bool
-        return not isinstance(cause.conflict.cause, ConflictCause) and not isinstance(
-            cause.other.cause, ConflictCause
-        )
-
-    def _count_derivations(self, incompatibility):  # type: (Incompatibility) -> None
-        if incompatibility in self._derivations:
-            self._derivations[incompatibility] += 1
-        else:
-            self._derivations[incompatibility] = 1
-            cause = incompatibility.cause
-            if isinstance(cause, ConflictCause):
-                self._count_derivations(cause.conflict)
-                self._count_derivations(cause.other)
diff --git a/vendor/poetry/poetry/mixology/incompatibility.py b/vendor/poetry/poetry/mixology/incompatibility.py
deleted file mode 100644
index bba55bb2..00000000
--- a/vendor/poetry/poetry/mixology/incompatibility.py
+++ /dev/null
@@ -1,458 +0,0 @@
-from typing import Dict
-from typing import Generator
-from typing import List
-
-from .incompatibility_cause import ConflictCause
-from .incompatibility_cause import DependencyCause
-from .incompatibility_cause import IncompatibilityCause
-from .incompatibility_cause import NoVersionsCause
-from .incompatibility_cause import PackageNotFoundCause
-from .incompatibility_cause import PlatformCause
-from .incompatibility_cause import PythonCause
-from .incompatibility_cause import RootCause
-from .term import Term
-
-
-class Incompatibility:
-    def __init__(
-        self, terms, cause
-    ):  # type: (List[Term], IncompatibilityCause) -> None
-        # Remove the root package from generated incompatibilities, since it will
-        # always be satisfied. This makes error reporting clearer, and may also
-        # make solving more efficient.
-        if (
-            len(terms) != 1
-            and isinstance(cause, ConflictCause)
-            and any([term.is_positive() and term.dependency.is_root for term in terms])
-        ):
-            terms = [
-                term
-                for term in terms
-                if not term.is_positive() or not term.dependency.is_root
-            ]
-
-        if (
-            len(terms) == 1
-            # Short-circuit in the common case of a two-term incompatibility with
-            # two different packages (for example, a dependency).
-            or len(terms) == 2
-            and terms[0].dependency.complete_name != terms[-1].dependency.complete_name
-        ):
-            pass
-        else:
-            # Coalesce multiple terms about the same package if possible.
-            by_name = {}  # type: Dict[str, Dict[str, Term]]
-            for term in terms:
-                if term.dependency.complete_name not in by_name:
-                    by_name[term.dependency.complete_name] = {}
-
-                by_ref = by_name[term.dependency.complete_name]
-                ref = term.dependency.complete_name
-
-                if ref in by_ref:
-                    by_ref[ref] = by_ref[ref].intersect(term)
-
-                    # If we have two terms that refer to the same package but have a null
-                    # intersection, they're mutually exclusive, making this incompatibility
-                    # irrelevant, since we already know that mutually exclusive version
-                    # ranges are incompatible. We should never derive an irrelevant
-                    # incompatibility.
-                    assert by_ref[ref] is not None
-                else:
-                    by_ref[ref] = term
-
-            new_terms = []
-            for by_ref in by_name.values():
-                positive_terms = [
-                    term for term in by_ref.values() if term.is_positive()
-                ]
-                if positive_terms:
-                    new_terms += positive_terms
-                    continue
-
-                new_terms += list(by_ref.values())
-
-            terms = new_terms
-
-        self._terms = terms
-        self._cause = cause
-
-    @property
-    def terms(self):  # type: () -> List[Term]
-        return self._terms
-
-    @property
-    def cause(self):  # type: () -> IncompatibilityCause
-        return self._cause
-
-    @property
-    def external_incompatibilities(self):  # type: () -> Generator[Incompatibility]
-        """
-        Returns all external incompatibilities in this incompatibility's
-        derivation graph.
-        """
-        if isinstance(self._cause, ConflictCause):
-            cause = self._cause  # type: ConflictCause
-            for incompatibility in cause.conflict.external_incompatibilities:
-                yield incompatibility
-
-            for incompatibility in cause.other.external_incompatibilities:
-                yield incompatibility
-        else:
-            yield self
-
-    def is_failure(self):  # type: () -> bool
-        return len(self._terms) == 0 or (
-            len(self._terms) == 1 and self._terms[0].dependency.is_root
-        )
-
-    def __str__(self):
-        if isinstance(self._cause, DependencyCause):
-            assert len(self._terms) == 2
-
-            depender = self._terms[0]
-            dependee = self._terms[1]
-            assert depender.is_positive()
-            assert not dependee.is_positive()
-
-            return "{} depends on {}".format(
-                self._terse(depender, allow_every=True), self._terse(dependee)
-            )
-        elif isinstance(self._cause, PythonCause):
-            assert len(self._terms) == 1
-            assert self._terms[0].is_positive()
-
-            cause = self._cause  # type: PythonCause
-            text = "{} requires ".format(self._terse(self._terms[0], allow_every=True))
-            text += "Python {}".format(cause.python_version)
-
-            return text
-        elif isinstance(self._cause, PlatformCause):
-            assert len(self._terms) == 1
-            assert self._terms[0].is_positive()
-
-            cause = self._cause  # type: PlatformCause
-            text = "{} requires ".format(self._terse(self._terms[0], allow_every=True))
-            text += "platform {}".format(cause.platform)
-
-            return text
-        elif isinstance(self._cause, NoVersionsCause):
-            assert len(self._terms) == 1
-            assert self._terms[0].is_positive()
-
-            return "no versions of {} match {}".format(
-                self._terms[0].dependency.name, self._terms[0].constraint
-            )
-        elif isinstance(self._cause, PackageNotFoundCause):
-            assert len(self._terms) == 1
-            assert self._terms[0].is_positive()
-
-            return "{} doesn't exist".format(self._terms[0].dependency.name)
-        elif isinstance(self._cause, RootCause):
-            assert len(self._terms) == 1
-            assert not self._terms[0].is_positive()
-            assert self._terms[0].dependency.is_root
-
-            return "{} is {}".format(
-                self._terms[0].dependency.name, self._terms[0].dependency.constraint
-            )
-        elif self.is_failure():
-            return "version solving failed"
-
-        if len(self._terms) == 1:
-            term = self._terms[0]
-            if term.constraint.is_any():
-                return "{} is {}".format(
-                    term.dependency.name,
-                    "forbidden" if term.is_positive() else "required",
-                )
-            else:
-                return "{} is {}".format(
-                    term.dependency.name,
-                    "forbidden" if term.is_positive() else "required",
-                )
-
-        if len(self._terms) == 2:
-            term1 = self._terms[0]
-            term2 = self._terms[1]
-
-            if term1.is_positive() == term2.is_positive():
-                if term1.is_positive():
-                    package1 = (
-                        term1.dependency.name
-                        if term1.constraint.is_any()
-                        else self._terse(term1)
-                    )
-                    package2 = (
-                        term2.dependency.name
-                        if term2.constraint.is_any()
-                        else self._terse(term2)
-                    )
-
-                    return "{} is incompatible with {}".format(package1, package2)
-                else:
-                    return "either {} or {}".format(
-                        self._terse(term1), self._terse(term2)
-                    )
-
-        positive = []
-        negative = []
-
-        for term in self._terms:
-            if term.is_positive():
-                positive.append(self._terse(term))
-            else:
-                negative.append(self._terse(term))
-
-        if positive and negative:
-            if len(positive) == 1:
-                positive_term = [term for term in self._terms if term.is_positive()][0]
-
-                return "{} requires {}".format(
-                    self._terse(positive_term, allow_every=True), " or ".join(negative)
-                )
-            else:
-                return "if {} then {}".format(
-                    " and ".join(positive), " or ".join(negative)
-                )
-        elif positive:
-            return "one of {} must be false".format(" or ".join(positive))
-        else:
-            return "one of {} must be true".format(" or ".join(negative))
-
-    def and_to_string(
-        self, other, details, this_line, other_line
-    ):  # type: (Incompatibility, dict, int, int) -> str
-        requires_both = self._try_requires_both(other, details, this_line, other_line)
-        if requires_both is not None:
-            return requires_both
-
-        requires_through = self._try_requires_through(
-            other, details, this_line, other_line
-        )
-        if requires_through is not None:
-            return requires_through
-
-        requires_forbidden = self._try_requires_forbidden(
-            other, details, this_line, other_line
-        )
-        if requires_forbidden is not None:
-            return requires_forbidden
-
-        buffer = [str(self)]
-        if this_line is not None:
-            buffer.append(" " + str(this_line))
-
-        buffer.append(" and {}".format(str(other)))
-
-        if other_line is not None:
-            buffer.append(" " + str(other_line))
-
-        return "\n".join(buffer)
-
-    def _try_requires_both(
-        self, other, details, this_line, other_line
-    ):  # type: (Incompatibility, dict, int, int) -> str
-        if len(self._terms) == 1 or len(other.terms) == 1:
-            return
-
-        this_positive = self._single_term_where(lambda term: term.is_positive())
-        if this_positive is None:
-            return
-
-        other_positive = other._single_term_where(lambda term: term.is_positive())
-        if other_positive is None:
-            return
-
-        if this_positive.dependency != other_positive.dependency:
-            return
-
-        this_negatives = " or ".join(
-            [self._terse(term) for term in self._terms if not term.is_positive()]
-        )
-
-        other_negatives = " or ".join(
-            [self._terse(term) for term in other.terms if not term.is_positive()]
-        )
-
-        buffer = [self._terse(this_positive, allow_every=True) + " "]
-        is_dependency = isinstance(self.cause, DependencyCause) and isinstance(
-            other.cause, DependencyCause
-        )
-
-        if is_dependency:
-            buffer.append("depends on")
-        else:
-            buffer.append("requires")
-
-        buffer.append(" both {}".format(this_negatives))
-        if this_line is not None:
-            buffer.append(" ({})".format(this_line))
-
-        buffer.append(" and {}".format(other_negatives))
-
-        if other_line is not None:
-            buffer.append(" ({})".format(other_line))
-
-        return "".join(buffer)
-
-    def _try_requires_through(
-        self, other, details, this_line, other_line
-    ):  # type: (Incompatibility, dict, int, int) -> str
-        if len(self._terms) == 1 or len(other.terms) == 1:
-            return
-
-        this_negative = self._single_term_where(lambda term: not term.is_positive())
-        other_negative = other._single_term_where(lambda term: not term.is_positive())
-
-        if this_negative is None and other_negative is None:
-            return
-
-        this_positive = self._single_term_where(lambda term: term.is_positive())
-        other_positive = self._single_term_where(lambda term: term.is_positive())
-
-        if (
-            this_negative is not None
-            and other_positive is not None
-            and this_negative.dependency.name == other_positive.dependency.name
-            and this_negative.inverse.satisfies(other_positive)
-        ):
-            prior = self
-            prior_negative = this_negative
-            prior_line = this_line
-            latter = other
-            latter_line = other_line
-        elif (
-            other_negative is not None
-            and this_positive is not None
-            and other_negative.dependency.name == this_positive.dependency.name
-            and other_negative.inverse.satisfies(this_positive)
-        ):
-            prior = other
-            prior_negative = other_negative
-            prior_line = other_line
-            latter = self
-            latter_line = this_line
-        else:
-            return
-
-        prior_positives = [term for term in prior.terms if term.is_positive()]
-
-        buffer = []
-        if len(prior_positives) > 1:
-            prior_string = " or ".join([self._terse(term) for term in prior_positives])
-            buffer.append("if {} then ".format(prior_string))
-        else:
-            if isinstance(prior.cause, DependencyCause):
-                verb = "depends on"
-            else:
-                verb = "requires"
-
-            buffer.append(
-                "{} {} ".format(self._terse(prior_positives[0], allow_every=True), verb)
-            )
-
-        buffer.append(self._terse(prior_negative))
-        if prior_line is not None:
-            buffer.append(" ({})".format(prior_line))
-
-        buffer.append(" which ")
-
-        if isinstance(latter.cause, DependencyCause):
-            buffer.append("depends on ")
-        else:
-            buffer.append("requires ")
-
-        buffer.append(
-            " or ".join(
-                [self._terse(term) for term in latter.terms if not term.is_positive()]
-            )
-        )
-
-        if latter_line is not None:
-            buffer.append(" ({})".format(latter_line))
-
-        return "".join(buffer)
-
-    def _try_requires_forbidden(
-        self, other, details, this_line, other_line
-    ):  # type: (Incompatibility, dict, int, int) -> str
-        if len(self._terms) != 1 and len(other.terms) != 1:
-            return None
-
-        if len(self.terms) == 1:
-            prior = other
-            latter = self
-            prior_line = other_line
-            latter_line = this_line
-        else:
-            prior = self
-            latter = other
-            prior_line = this_line
-            latter_line = other_line
-
-        negative = prior._single_term_where(lambda term: not term.is_positive())
-        if negative is None:
-            return
-
-        if not negative.inverse.satisfies(latter.terms[0]):
-            return
-
-        positives = [t for t in prior.terms if t.is_positive()]
-
-        buffer = []
-        if len(positives) > 1:
-            prior_string = " or ".join([self._terse(term) for term in positives])
-            buffer.append("if {} then ".format(prior_string))
-        else:
-            buffer.append(self._terse(positives[0], allow_every=True))
-            if isinstance(prior.cause, DependencyCause):
-                buffer.append(" depends on ")
-            else:
-                buffer.append(" requires ")
-
-        buffer.append(self._terse(latter.terms[0]) + " ")
-        if prior_line is not None:
-            buffer.append("({}) ".format(prior_line))
-
-        if isinstance(latter.cause, PythonCause):
-            cause = latter.cause  # type: PythonCause
-            buffer.append("which requires Python {}".format(cause.python_version))
-        elif isinstance(latter.cause, NoVersionsCause):
-            buffer.append("which doesn't match any versions")
-        elif isinstance(latter.cause, PackageNotFoundCause):
-            buffer.append("which doesn't exist")
-        else:
-            buffer.append("which is forbidden")
-
-        if latter_line is not None:
-            buffer.append(" ({})".format(latter_line))
-
-        return "".join(buffer)
-
-    def _terse(self, term, allow_every=False):
-        if allow_every and term.constraint.is_any():
-            return "every version of {}".format(term.dependency.complete_name)
-
-        if term.dependency.is_root:
-            return term.dependency.pretty_name
-
-        return "{} ({})".format(
-            term.dependency.pretty_name, term.dependency.pretty_constraint
-        )
-
-    def _single_term_where(self, callable):  # type: (callable) -> Term
-        found = None
-        for term in self._terms:
-            if not callable(term):
-                continue
-
-            if found is not None:
-                return
-
-            found = term
-
-        return found
-
-    def __repr__(self):
-        return "".format(str(self))
diff --git a/vendor/poetry/poetry/mixology/incompatibility_cause.py b/vendor/poetry/poetry/mixology/incompatibility_cause.py
deleted file mode 100644
index 8156b4fa..00000000
--- a/vendor/poetry/poetry/mixology/incompatibility_cause.py
+++ /dev/null
@@ -1,89 +0,0 @@
-class IncompatibilityCause(Exception):
-    """
-    The reason and Incompatibility's terms are incompatible.
-    """
-
-
-class RootCause(IncompatibilityCause):
-
-    pass
-
-
-class NoVersionsCause(IncompatibilityCause):
-
-    pass
-
-
-class DependencyCause(IncompatibilityCause):
-
-    pass
-
-
-class ConflictCause(IncompatibilityCause):
-    """
-    The incompatibility was derived from two existing incompatibilities
-    during conflict resolution.
-    """
-
-    def __init__(self, conflict, other):
-        self._conflict = conflict
-        self._other = other
-
-    @property
-    def conflict(self):
-        return self._conflict
-
-    @property
-    def other(self):
-        return self._other
-
-    def __str__(self):
-        return str(self._conflict)
-
-
-class PythonCause(IncompatibilityCause):
-    """
-    The incompatibility represents a package's python constraint
-    (Python versions) being incompatible
-    with the current python version.
-    """
-
-    def __init__(self, python_version, root_python_version):
-        self._python_version = python_version
-        self._root_python_version = root_python_version
-
-    @property
-    def python_version(self):
-        return self._python_version
-
-    @property
-    def root_python_version(self):
-        return self._root_python_version
-
-
-class PlatformCause(IncompatibilityCause):
-    """
-    The incompatibility represents a package's platform constraint
-    (OS most likely) being incompatible with the current platform.
-    """
-
-    def __init__(self, platform):
-        self._platform = platform
-
-    @property
-    def platform(self):
-        return self._platform
-
-
-class PackageNotFoundCause(IncompatibilityCause):
-    """
-    The incompatibility represents a package that couldn't be found by its
-    source.
-    """
-
-    def __init__(self, error):
-        self._error = error
-
-    @property
-    def error(self):
-        return self._error
diff --git a/vendor/poetry/poetry/mixology/partial_solution.py b/vendor/poetry/poetry/mixology/partial_solution.py
deleted file mode 100755
index df17f718..00000000
--- a/vendor/poetry/poetry/mixology/partial_solution.py
+++ /dev/null
@@ -1,218 +0,0 @@
-from collections import OrderedDict
-from typing import Dict
-from typing import List
-
-from poetry.core.packages import Dependency
-from poetry.core.packages import Package
-
-from .assignment import Assignment
-from .incompatibility import Incompatibility
-from .set_relation import SetRelation
-from .term import Term
-
-
-class PartialSolution:
-    """
-    # A list of Assignments that represent the solver's current best guess about
-    # what's true for the eventual set of package versions that will comprise the
-    # total solution.
-    #
-    # See https://github.com/dart-lang/mixology/tree/master/doc/solver.md#partial-solution.
-    """
-
-    def __init__(self):
-        # The assignments that have been made so far, in the order they were
-        # assigned.
-        self._assignments = []  # type: List[Assignment]
-
-        # The decisions made for each package.
-        self._decisions = OrderedDict()  # type: Dict[str, Package]
-
-        # The intersection of all positive Assignments for each package, minus any
-        # negative Assignments that refer to that package.
-        #
-        # This is derived from self._assignments.
-        self._positive = OrderedDict()  # type: Dict[str, Term]
-
-        # The union of all negative Assignments for each package.
-        #
-        # If a package has any positive Assignments, it doesn't appear in this
-        # map.
-        #
-        # This is derived from self._assignments.
-        self._negative = OrderedDict()  # type: Dict[str, Dict[str, Term]]
-
-        # The number of distinct solutions that have been attempted so far.
-        self._attempted_solutions = 1
-
-        # Whether the solver is currently backtracking.
-        self._backtracking = False
-
-    @property
-    def decisions(self):  # type: () -> List[Package]
-        return list(self._decisions.values())
-
-    @property
-    def decision_level(self):  # type: () -> int
-        return len(self._decisions)
-
-    @property
-    def attempted_solutions(self):  # type: () -> int
-        return self._attempted_solutions
-
-    @property
-    def unsatisfied(self):  # type: () -> List[Dependency]
-        return [
-            term.dependency
-            for term in self._positive.values()
-            if term.dependency.complete_name not in self._decisions
-        ]
-
-    def decide(self, package):  # type: (Package) -> None
-        """
-        Adds an assignment of package as a decision
-        and increments the decision level.
-        """
-        # When we make a new decision after backtracking, count an additional
-        # attempted solution. If we backtrack multiple times in a row, though, we
-        # only want to count one, since we haven't actually started attempting a
-        # new solution.
-        if self._backtracking:
-            self._attempted_solutions += 1
-
-        self._backtracking = False
-        self._decisions[package.complete_name] = package
-
-        self._assign(
-            Assignment.decision(package, self.decision_level, len(self._assignments))
-        )
-
-    def derive(
-        self, dependency, is_positive, cause
-    ):  # type: (Dependency, bool, Incompatibility) -> None
-        """
-        Adds an assignment of package as a derivation.
-        """
-        self._assign(
-            Assignment.derivation(
-                dependency,
-                is_positive,
-                cause,
-                self.decision_level,
-                len(self._assignments),
-            )
-        )
-
-    def _assign(self, assignment):  # type: (Assignment) -> None
-        """
-        Adds an Assignment to _assignments and _positive or _negative.
-        """
-        self._assignments.append(assignment)
-        self._register(assignment)
-
-    def backtrack(self, decision_level):  # type: (int) -> None
-        """
-        Resets the current decision level to decision_level, and removes all
-        assignments made after that level.
-        """
-        self._backtracking = True
-
-        packages = set()
-        while self._assignments[-1].decision_level > decision_level:
-            removed = self._assignments.pop(-1)
-            packages.add(removed.dependency.complete_name)
-            if removed.is_decision():
-                del self._decisions[removed.dependency.complete_name]
-
-        # Re-compute _positive and _negative for the packages that were removed.
-        for package in packages:
-            if package in self._positive:
-                del self._positive[package]
-
-            if package in self._negative:
-                del self._negative[package]
-
-        for assignment in self._assignments:
-            if assignment.dependency.complete_name in packages:
-                self._register(assignment)
-
-    def _register(self, assignment):  # type: (Assignment) -> None
-        """
-        Registers an Assignment in _positive or _negative.
-        """
-        name = assignment.dependency.complete_name
-        old_positive = self._positive.get(name)
-        if old_positive is not None:
-            self._positive[name] = old_positive.intersect(assignment)
-
-            return
-
-        ref = assignment.dependency.complete_name
-        negative_by_ref = self._negative.get(name)
-        old_negative = None if negative_by_ref is None else negative_by_ref.get(ref)
-        if old_negative is None:
-            term = assignment
-        else:
-            term = assignment.intersect(old_negative)
-
-        if term.is_positive():
-            if name in self._negative:
-                del self._negative[name]
-
-            self._positive[name] = term
-        else:
-            if name not in self._negative:
-                self._negative[name] = {}
-
-            self._negative[name][ref] = term
-
-    def satisfier(self, term):  # type: (Term) -> Assignment
-        """
-        Returns the first Assignment in this solution such that the sublist of
-        assignments up to and including that entry collectively satisfies term.
-        """
-        assigned_term = None  # type: Term
-
-        for assignment in self._assignments:
-            if assignment.dependency.complete_name != term.dependency.complete_name:
-                continue
-
-            if (
-                not assignment.dependency.is_root
-                and not assignment.dependency.is_same_package_as(term.dependency)
-            ):
-                if not assignment.is_positive():
-                    continue
-
-                assert not term.is_positive()
-
-                return assignment
-
-            if assigned_term is None:
-                assigned_term = assignment
-            else:
-                assigned_term = assigned_term.intersect(assignment)
-
-            # As soon as we have enough assignments to satisfy term, return them.
-            if assigned_term.satisfies(term):
-                return assignment
-
-        raise RuntimeError("[BUG] {} is not satisfied.".format(term))
-
-    def satisfies(self, term):  # type: (Term) -> bool
-        return self.relation(term) == SetRelation.SUBSET
-
-    def relation(self, term):  # type: (Term) -> int
-        positive = self._positive.get(term.dependency.complete_name)
-        if positive is not None:
-            return positive.relation(term)
-
-        by_ref = self._negative.get(term.dependency.complete_name)
-        if by_ref is None:
-            return SetRelation.OVERLAPPING
-
-        negative = by_ref[term.dependency.complete_name]
-        if negative is None:
-            return SetRelation.OVERLAPPING
-
-        return negative.relation(term)
diff --git a/vendor/poetry/poetry/mixology/result.py b/vendor/poetry/poetry/mixology/result.py
deleted file mode 100644
index 5eadeb75..00000000
--- a/vendor/poetry/poetry/mixology/result.py
+++ /dev/null
@@ -1,13 +0,0 @@
-class SolverResult:
-    def __init__(self, root, packages, attempted_solutions):
-        self._root = root
-        self._packages = packages
-        self._attempted_solutions = attempted_solutions
-
-    @property
-    def packages(self):
-        return self._packages
-
-    @property
-    def attempted_solutions(self):
-        return self._attempted_solutions
diff --git a/vendor/poetry/poetry/mixology/solutions/providers/__init__.py b/vendor/poetry/poetry/mixology/solutions/providers/__init__.py
deleted file mode 100644
index 3faec7b6..00000000
--- a/vendor/poetry/poetry/mixology/solutions/providers/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .python_requirement_solution_provider import PythonRequirementSolutionProvider
diff --git a/vendor/poetry/poetry/mixology/solutions/providers/python_requirement_solution_provider.py b/vendor/poetry/poetry/mixology/solutions/providers/python_requirement_solution_provider.py
deleted file mode 100644
index 4c903677..00000000
--- a/vendor/poetry/poetry/mixology/solutions/providers/python_requirement_solution_provider.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import re
-
-from typing import List
-
-from crashtest.contracts.has_solutions_for_exception import HasSolutionsForException
-from crashtest.contracts.solution import Solution
-
-
-class PythonRequirementSolutionProvider(HasSolutionsForException):
-    def can_solve(self, exception):  # type: (Exception) -> bool
-        from poetry.puzzle.exceptions import SolverProblemError
-
-        if not isinstance(exception, SolverProblemError):
-            return False
-
-        m = re.match(
-            "^The current project's Python requirement (.+) is not compatible "
-            "with some of the required packages Python requirement",
-            str(exception),
-        )
-
-        if not m:
-            return False
-
-        return True
-
-    def get_solutions(self, exception):  # type: (Exception) -> List[Solution]
-        from ..solutions.python_requirement_solution import PythonRequirementSolution
-
-        return [PythonRequirementSolution(exception)]
diff --git a/vendor/poetry/poetry/mixology/solutions/solutions/__init__.py b/vendor/poetry/poetry/mixology/solutions/solutions/__init__.py
deleted file mode 100644
index 838e77b0..00000000
--- a/vendor/poetry/poetry/mixology/solutions/solutions/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .python_requirement_solution import PythonRequirementSolution
diff --git a/vendor/poetry/poetry/mixology/solutions/solutions/python_requirement_solution.py b/vendor/poetry/poetry/mixology/solutions/solutions/python_requirement_solution.py
deleted file mode 100644
index 9ec7cf22..00000000
--- a/vendor/poetry/poetry/mixology/solutions/solutions/python_requirement_solution.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from crashtest.contracts.solution import Solution
-
-
-class PythonRequirementSolution(Solution):
-    def __init__(self, exception):
-        from poetry.core.semver import parse_constraint
-        from poetry.mixology.incompatibility_cause import PythonCause
-
-        self._title = "Check your dependencies Python requirement."
-
-        failure = exception.error
-        version_solutions = []
-        for incompatibility in failure._incompatibility.external_incompatibilities:
-            if isinstance(incompatibility.cause, PythonCause):
-                root_constraint = parse_constraint(
-                    incompatibility.cause.root_python_version
-                )
-                constraint = parse_constraint(incompatibility.cause.python_version)
-
-                version_solutions.append(
-                    "For {}, a possible solution would be "
-                    'to set the `python` property to "{}"'.format(
-                        incompatibility.terms[0].dependency.name,
-                        root_constraint.intersect(constraint),
-                    )
-                )
-
-        description = (
-            "The Python requirement can be specified via the `python` "
-            "or `markers` properties"
-        )
-        if version_solutions:
-            description += "\n\n" + "\n".join(version_solutions)
-
-        description += "\n"
-
-        self._description = description
-
-    @property
-    def solution_title(self) -> str:
-        return self._title
-
-    @property
-    def solution_description(self):
-        return self._description
-
-    @property
-    def documentation_links(self):
-        return [
-            "https://python-poetry.org/docs/dependency-specification/#python-restricted-dependencies",
-            "https://python-poetry.org/docs/dependency-specification/#using-environment-markers",
-        ]
diff --git a/vendor/poetry/poetry/mixology/term.py b/vendor/poetry/poetry/mixology/term.py
deleted file mode 100755
index 29413fce..00000000
--- a/vendor/poetry/poetry/mixology/term.py
+++ /dev/null
@@ -1,171 +0,0 @@
-# -*- coding: utf-8 -*-
-from typing import Union
-
-from poetry.core.packages import Dependency
-
-from .set_relation import SetRelation
-
-
-class Term(object):
-    """
-    A statement about a package which is true or false for a given selection of
-    package versions.
-
-    See https://github.com/dart-lang/pub/tree/master/doc/solver.md#term.
-    """
-
-    def __init__(self, dependency, is_positive):  # type: (Dependency, bool)  -> None
-        self._dependency = dependency
-        self._positive = is_positive
-
-    @property
-    def inverse(self):  # type: () -> Term
-        return Term(self._dependency, not self.is_positive())
-
-    @property
-    def dependency(self):
-        return self._dependency
-
-    @property
-    def constraint(self):
-        return self._dependency.constraint
-
-    def is_positive(self):  # type: () -> bool
-        return self._positive
-
-    def satisfies(self, other):  # type: (Term) -> bool
-        """
-        Returns whether this term satisfies another.
-        """
-        return (
-            self.dependency.complete_name == other.dependency.complete_name
-            and self.relation(other) == SetRelation.SUBSET
-        )
-
-    def relation(self, other):  # type: (Term) -> int
-        """
-        Returns the relationship between the package versions
-        allowed by this term and another.
-        """
-        if self.dependency.complete_name != other.dependency.complete_name:
-            raise ValueError(
-                "{} should refer to {}".format(other, self.dependency.complete_name)
-            )
-
-        other_constraint = other.constraint
-
-        if other.is_positive():
-            if self.is_positive():
-                if not self._compatible_dependency(other.dependency):
-                    return SetRelation.DISJOINT
-
-                # foo ^1.5.0 is a subset of foo ^1.0.0
-                if other_constraint.allows_all(self.constraint):
-                    return SetRelation.SUBSET
-
-                # foo ^2.0.0 is disjoint with foo ^1.0.0
-                if not self.constraint.allows_any(other_constraint):
-                    return SetRelation.DISJOINT
-
-                return SetRelation.OVERLAPPING
-            else:
-                if not self._compatible_dependency(other.dependency):
-                    return SetRelation.OVERLAPPING
-
-                # not foo ^1.0.0 is disjoint with foo ^1.5.0
-                if self.constraint.allows_all(other_constraint):
-                    return SetRelation.DISJOINT
-
-                # not foo ^1.5.0 overlaps foo ^1.0.0
-                # not foo ^2.0.0 is a superset of foo ^1.5.0
-                return SetRelation.OVERLAPPING
-        else:
-            if self.is_positive():
-                if not self._compatible_dependency(other.dependency):
-                    return SetRelation.SUBSET
-
-                # foo ^2.0.0 is a subset of not foo ^1.0.0
-                if not other_constraint.allows_any(self.constraint):
-                    return SetRelation.SUBSET
-
-                # foo ^1.5.0 is disjoint with not foo ^1.0.0
-                if other_constraint.allows_all(self.constraint):
-                    return SetRelation.DISJOINT
-
-                # foo ^1.0.0 overlaps not foo ^1.5.0
-                return SetRelation.OVERLAPPING
-            else:
-                if not self._compatible_dependency(other.dependency):
-                    return SetRelation.OVERLAPPING
-
-                # not foo ^1.0.0 is a subset of not foo ^1.5.0
-                if self.constraint.allows_all(other_constraint):
-                    return SetRelation.SUBSET
-
-                # not foo ^2.0.0 overlaps not foo ^1.0.0
-                # not foo ^1.5.0 is a superset of not foo ^1.0.0
-                return SetRelation.OVERLAPPING
-
-    def intersect(self, other):  # type: (Term) -> Union[Term, None]
-        """
-        Returns a Term that represents the packages
-        allowed by both this term and another
-        """
-        if self.dependency.complete_name != other.dependency.complete_name:
-            raise ValueError(
-                "{} should refer to {}".format(other, self.dependency.complete_name)
-            )
-
-        if self._compatible_dependency(other.dependency):
-            if self.is_positive() != other.is_positive():
-                # foo ^1.0.0 ∩ not foo ^1.5.0 → foo >=1.0.0 <1.5.0
-                positive = self if self.is_positive() else other
-                negative = other if self.is_positive() else self
-
-                return self._non_empty_term(
-                    positive.constraint.difference(negative.constraint), True
-                )
-            elif self.is_positive():
-                # foo ^1.0.0 ∩ foo >=1.5.0 <3.0.0 → foo ^1.5.0
-                return self._non_empty_term(
-                    self.constraint.intersect(other.constraint), True
-                )
-            else:
-                # not foo ^1.0.0 ∩ not foo >=1.5.0 <3.0.0 → not foo >=1.0.0 <3.0.0
-                return self._non_empty_term(
-                    self.constraint.union(other.constraint), False
-                )
-        elif self.is_positive() != other.is_positive():
-            return self if self.is_positive() else other
-        else:
-            return
-
-    def difference(self, other):  # type: (Term) -> Term
-        """
-        Returns a Term that represents packages
-        allowed by this term and not by the other
-        """
-        return self.intersect(other.inverse)
-
-    def _compatible_dependency(self, other):
-        return (
-            self.dependency.is_root
-            or other.is_root
-            or other.is_same_package_as(self.dependency)
-        )
-
-    def _non_empty_term(self, constraint, is_positive):
-        if constraint.is_empty():
-            return
-
-        return Term(self.dependency.with_constraint(constraint), is_positive)
-
-    def __str__(self):
-        return "{} {} ({})".format(
-            "not " if not self.is_positive() else "",
-            self._dependency.pretty_name,
-            self._dependency.pretty_constraint,
-        )
-
-    def __repr__(self):
-        return "".format(str(self))
diff --git a/vendor/poetry/poetry/mixology/version_solver.py b/vendor/poetry/poetry/mixology/version_solver.py
deleted file mode 100755
index e19fe577..00000000
--- a/vendor/poetry/poetry/mixology/version_solver.py
+++ /dev/null
@@ -1,475 +0,0 @@
-# -*- coding: utf-8 -*-
-import time
-
-from typing import TYPE_CHECKING
-from typing import Any
-from typing import Dict
-from typing import List
-from typing import Union
-
-from poetry.core.packages import Dependency
-from poetry.core.packages import Package
-from poetry.core.packages import ProjectPackage
-from poetry.core.semver import Version
-from poetry.core.semver import VersionRange
-
-from .failure import SolveFailure
-from .incompatibility import Incompatibility
-from .incompatibility_cause import ConflictCause
-from .incompatibility_cause import NoVersionsCause
-from .incompatibility_cause import PackageNotFoundCause
-from .incompatibility_cause import RootCause
-from .partial_solution import PartialSolution
-from .result import SolverResult
-from .set_relation import SetRelation
-from .term import Term
-
-
-if TYPE_CHECKING:
-    from poetry.puzzle.provider import Provider
-
-
-_conflict = object()
-
-
-class VersionSolver:
-    """
-    The version solver that finds a set of package versions that satisfy the
-    root package's dependencies.
-
-    See https://github.com/dart-lang/pub/tree/master/doc/solver.md for details
-    on how this solver works.
-    """
-
-    def __init__(
-        self,
-        root,  # type: ProjectPackage
-        provider,  # type: Provider
-        locked=None,  # type: Dict[str, Package]
-        use_latest=None,  # type: List[str]
-    ):
-        self._root = root
-        self._provider = provider
-        self._locked = locked or {}
-
-        if use_latest is None:
-            use_latest = []
-
-        self._use_latest = use_latest
-
-        self._incompatibilities = {}  # type: Dict[str, List[Incompatibility]]
-        self._solution = PartialSolution()
-
-    @property
-    def solution(self):  # type: () -> PartialSolution
-        return self._solution
-
-    def solve(self):  # type: () -> SolverResult
-        """
-        Finds a set of dependencies that match the root package's constraints,
-        or raises an error if no such set is available.
-        """
-        start = time.time()
-        root_dependency = Dependency(self._root.name, self._root.version)
-        root_dependency.is_root = True
-
-        self._add_incompatibility(
-            Incompatibility([Term(root_dependency, False)], RootCause())
-        )
-
-        try:
-            next = self._root.name
-            while next is not None:
-                self._propagate(next)
-                next = self._choose_package_version()
-
-            return self._result()
-        except Exception:
-            raise
-        finally:
-            self._log(
-                "Version solving took {:.3f} seconds.\n"
-                "Tried {} solutions.".format(
-                    time.time() - start, self._solution.attempted_solutions
-                )
-            )
-
-    def _propagate(self, package):  # type: (str) -> None
-        """
-        Performs unit propagation on incompatibilities transitively
-        related to package to derive new assignments for _solution.
-        """
-        changed = set()
-        changed.add(package)
-
-        while changed:
-            package = changed.pop()
-
-            # Iterate in reverse because conflict resolution tends to produce more
-            # general incompatibilities as time goes on. If we look at those first,
-            # we can derive stronger assignments sooner and more eagerly find
-            # conflicts.
-            for incompatibility in reversed(self._incompatibilities[package]):
-                result = self._propagate_incompatibility(incompatibility)
-
-                if result is _conflict:
-                    # If the incompatibility is satisfied by the solution, we use
-                    # _resolve_conflict() to determine the root cause of the conflict as a
-                    # new incompatibility.
-                    #
-                    # It also backjumps to a point in the solution
-                    # where that incompatibility will allow us to derive new assignments
-                    # that avoid the conflict.
-                    root_cause = self._resolve_conflict(incompatibility)
-
-                    # Back jumping erases all the assignments we did at the previous
-                    # decision level, so we clear [changed] and refill it with the
-                    # newly-propagated assignment.
-                    changed.clear()
-                    changed.add(str(self._propagate_incompatibility(root_cause)))
-                    break
-                elif result is not None:
-                    changed.add(result)
-
-    def _propagate_incompatibility(
-        self, incompatibility
-    ):  # type: (Incompatibility) -> Union[str, _conflict, None]
-        """
-        If incompatibility is almost satisfied by _solution, adds the
-        negation of the unsatisfied term to _solution.
-
-        If incompatibility is satisfied by _solution, returns _conflict. If
-        incompatibility is almost satisfied by _solution, returns the
-        unsatisfied term's package name.
-
-        Otherwise, returns None.
-        """
-        # The first entry in incompatibility.terms that's not yet satisfied by
-        # _solution, if one exists. If we find more than one, _solution is
-        # inconclusive for incompatibility and we can't deduce anything.
-        unsatisfied = None
-
-        for term in incompatibility.terms:
-            relation = self._solution.relation(term)
-
-            if relation == SetRelation.DISJOINT:
-                # If term is already contradicted by _solution, then
-                # incompatibility is contradicted as well and there's nothing new we
-                # can deduce from it.
-                return
-            elif relation == SetRelation.OVERLAPPING:
-                # If more than one term is inconclusive, we can't deduce anything about
-                # incompatibility.
-                if unsatisfied is not None:
-                    return
-
-                # If exactly one term in incompatibility is inconclusive, then it's
-                # almost satisfied and [term] is the unsatisfied term. We can add the
-                # inverse of the term to _solution.
-                unsatisfied = term
-
-        # If *all* terms in incompatibility are satisfied by _solution, then
-        # incompatibility is satisfied and we have a conflict.
-        if unsatisfied is None:
-            return _conflict
-
-        self._log(
-            "derived: {}{}".format(
-                "not " if unsatisfied.is_positive() else "", unsatisfied.dependency
-            )
-        )
-
-        self._solution.derive(
-            unsatisfied.dependency, not unsatisfied.is_positive(), incompatibility
-        )
-
-        return unsatisfied.dependency.complete_name
-
-    def _resolve_conflict(
-        self, incompatibility
-    ):  # type: (Incompatibility) -> Incompatibility
-        """
-        Given an incompatibility that's satisfied by _solution,
-        The `conflict resolution`_ constructs a new incompatibility that encapsulates the root
-        cause of the conflict and backtracks _solution until the new
-        incompatibility will allow _propagate() to deduce new assignments.
-
-        Adds the new incompatibility to _incompatibilities and returns it.
-
-        .. _conflict resolution: https://github.com/dart-lang/pub/tree/master/doc/solver.md#conflict-resolution
-        """
-        self._log("conflict: {}".format(incompatibility))
-
-        new_incompatibility = False
-        while not incompatibility.is_failure():
-            # The term in incompatibility.terms that was most recently satisfied by
-            # _solution.
-            most_recent_term = None
-
-            # The earliest assignment in _solution such that incompatibility is
-            # satisfied by _solution up to and including this assignment.
-            most_recent_satisfier = None
-
-            # The difference between most_recent_satisfier and most_recent_term;
-            # that is, the versions that are allowed by most_recent_satisfier and not
-            # by most_recent_term. This is None if most_recent_satisfier totally
-            # satisfies most_recent_term.
-            difference = None
-
-            # The decision level of the earliest assignment in _solution *before*
-            # most_recent_satisfier such that incompatibility is satisfied by
-            # _solution up to and including this assignment plus
-            # most_recent_satisfier.
-            #
-            # Decision level 1 is the level where the root package was selected. It's
-            # safe to go back to decision level 0, but stopping at 1 tends to produce
-            # better error messages, because references to the root package end up
-            # closer to the final conclusion that no solution exists.
-            previous_satisfier_level = 1
-
-            for term in incompatibility.terms:
-                satisfier = self._solution.satisfier(term)
-
-                if most_recent_satisfier is None:
-                    most_recent_term = term
-                    most_recent_satisfier = satisfier
-                elif most_recent_satisfier.index < satisfier.index:
-                    previous_satisfier_level = max(
-                        previous_satisfier_level, most_recent_satisfier.decision_level
-                    )
-                    most_recent_term = term
-                    most_recent_satisfier = satisfier
-                    difference = None
-                else:
-                    previous_satisfier_level = max(
-                        previous_satisfier_level, satisfier.decision_level
-                    )
-
-                if most_recent_term == term:
-                    # If most_recent_satisfier doesn't satisfy most_recent_term on its
-                    # own, then the next-most-recent satisfier may be the one that
-                    # satisfies the remainder.
-                    difference = most_recent_satisfier.difference(most_recent_term)
-                    if difference is not None:
-                        previous_satisfier_level = max(
-                            previous_satisfier_level,
-                            self._solution.satisfier(difference.inverse).decision_level,
-                        )
-
-            # If most_recent_identifier is the only satisfier left at its decision
-            # level, or if it has no cause (indicating that it's a decision rather
-            # than a derivation), then incompatibility is the root cause. We then
-            # backjump to previous_satisfier_level, where incompatibility is
-            # guaranteed to allow _propagate to produce more assignments.
-            if (
-                previous_satisfier_level < most_recent_satisfier.decision_level
-                or most_recent_satisfier.cause is None
-            ):
-                self._solution.backtrack(previous_satisfier_level)
-                if new_incompatibility:
-                    self._add_incompatibility(incompatibility)
-
-                return incompatibility
-
-            # Create a new incompatibility by combining incompatibility with the
-            # incompatibility that caused most_recent_satisfier to be assigned. Doing
-            # this iteratively constructs an incompatibility that's guaranteed to be
-            # true (that is, we know for sure no solution will satisfy the
-            # incompatibility) while also approximating the intuitive notion of the
-            # "root cause" of the conflict.
-            new_terms = []
-            for term in incompatibility.terms:
-                if term != most_recent_term:
-                    new_terms.append(term)
-
-            for term in most_recent_satisfier.cause.terms:
-                if term.dependency != most_recent_satisfier.dependency:
-                    new_terms.append(term)
-
-            # The most_recent_satisfier may not satisfy most_recent_term on its own
-            # if there are a collection of constraints on most_recent_term that
-            # only satisfy it together. For example, if most_recent_term is
-            # `foo ^1.0.0` and _solution contains `[foo >=1.0.0,
-            # foo <2.0.0]`, then most_recent_satisfier will be `foo <2.0.0` even
-            # though it doesn't totally satisfy `foo ^1.0.0`.
-            #
-            # In this case, we add `not (most_recent_satisfier \ most_recent_term)` to
-            # the incompatibility as well, See the `algorithm documentation`_ for
-            # details.
-            #
-            # .. _algorithm documentation: https://github.com/dart-lang/pub/tree/master/doc/solver.md#conflict-resolution
-            if difference is not None:
-                new_terms.append(difference.inverse)
-
-            incompatibility = Incompatibility(
-                new_terms, ConflictCause(incompatibility, most_recent_satisfier.cause)
-            )
-            new_incompatibility = True
-
-            partially = "" if difference is None else " partially"
-            bang = "!"
-            self._log(
-                "{} {} is{} satisfied by {}".format(
-                    bang, most_recent_term, partially, most_recent_satisfier
-                )
-            )
-            self._log(
-                '{} which is caused by "{}"'.format(bang, most_recent_satisfier.cause)
-            )
-            self._log("{} thus: {}".format(bang, incompatibility))
-
-        raise SolveFailure(incompatibility)
-
-    def _choose_package_version(self):  # type: () -> Union[str, None]
-        """
-        Tries to select a version of a required package.
-
-        Returns the name of the package whose incompatibilities should be
-        propagated by _propagate(), or None indicating that version solving is
-        complete and a solution has been found.
-        """
-        unsatisfied = self._solution.unsatisfied
-        if not unsatisfied:
-            return
-
-        # Prefer packages with as few remaining versions as possible,
-        # so that if a conflict is necessary it's forced quickly.
-        def _get_min(dependency):
-            if dependency.name in self._use_latest:
-                # If we're forced to use the latest version of a package, it effectively
-                # only has one version to choose from.
-                return not dependency.marker.is_any(), 1
-
-            locked = self._get_locked(dependency)
-            if locked and (
-                dependency.constraint.allows(locked.version)
-                or locked.is_prerelease()
-                and dependency.constraint.allows(locked.version.next_patch)
-            ):
-                return not dependency.marker.is_any(), 1
-
-            # VCS, URL, File or Directory dependencies
-            # represent a single version
-            if (
-                dependency.is_vcs()
-                or dependency.is_url()
-                or dependency.is_file()
-                or dependency.is_directory()
-            ):
-                return not dependency.marker.is_any(), 1
-
-            try:
-                return (
-                    not dependency.marker.is_any(),
-                    len(self._provider.search_for(dependency)),
-                )
-            except ValueError:
-                return not dependency.marker.is_any(), 0
-
-        if len(unsatisfied) == 1:
-            dependency = unsatisfied[0]
-        else:
-            dependency = min(*unsatisfied, key=_get_min)
-
-        locked = self._get_locked(dependency)
-        if locked is None or not dependency.constraint.allows(locked.version):
-            try:
-                packages = self._provider.search_for(dependency)
-            except ValueError as e:
-                self._add_incompatibility(
-                    Incompatibility([Term(dependency, True)], PackageNotFoundCause(e))
-                )
-                return dependency.complete_name
-
-            try:
-                version = packages[0]
-            except IndexError:
-                version = None
-
-            if version is None:
-                # If there are no versions that satisfy the constraint,
-                # add an incompatibility that indicates that.
-                self._add_incompatibility(
-                    Incompatibility([Term(dependency, True)], NoVersionsCause())
-                )
-
-                return dependency.complete_name
-        else:
-            version = locked
-
-        version = self._provider.complete_package(version)
-
-        conflict = False
-        for incompatibility in self._provider.incompatibilities_for(version):
-            self._add_incompatibility(incompatibility)
-
-            # If an incompatibility is already satisfied, then selecting version
-            # would cause a conflict.
-            #
-            # We'll continue adding its dependencies, then go back to
-            # unit propagation which will guide us to choose a better version.
-            conflict = conflict or all(
-                [
-                    term.dependency.complete_name == dependency.complete_name
-                    or self._solution.satisfies(term)
-                    for term in incompatibility.terms
-                ]
-            )
-
-        if not conflict:
-            self._solution.decide(version)
-            self._log(
-                "selecting {} ({})".format(
-                    version.complete_name, version.full_pretty_version
-                )
-            )
-
-        return dependency.complete_name
-
-    def _excludes_single_version(self, constraint):  # type: (Any) -> bool
-        return isinstance(VersionRange().difference(constraint), Version)
-
-    def _result(self):  # type: () -> SolverResult
-        """
-        Creates a #SolverResult from the decisions in _solution
-        """
-        decisions = self._solution.decisions
-
-        return SolverResult(
-            self._root,
-            [p for p in decisions if not p.is_root()],
-            self._solution.attempted_solutions,
-        )
-
-    def _add_incompatibility(self, incompatibility):  # type: (Incompatibility) -> None
-        self._log("fact: {}".format(incompatibility))
-
-        for term in incompatibility.terms:
-            if term.dependency.complete_name not in self._incompatibilities:
-                self._incompatibilities[term.dependency.complete_name] = []
-
-            if (
-                incompatibility
-                in self._incompatibilities[term.dependency.complete_name]
-            ):
-                continue
-
-            self._incompatibilities[term.dependency.complete_name].append(
-                incompatibility
-            )
-
-    def _get_locked(self, dependency):  # type: (Dependency) -> Union[Package, None]
-        if dependency.name in self._use_latest:
-            return
-
-        locked = self._locked.get(dependency.name)
-        if not locked:
-            return
-
-        if not dependency.is_same_package_as(locked):
-            return
-
-        return locked
-
-    def _log(self, text):
-        self._provider.debug(text, self._solution.attempted_solutions)
diff --git a/vendor/poetry/poetry/packages/__init__.py b/vendor/poetry/poetry/packages/__init__.py
deleted file mode 100644
index 555a8317..00000000
--- a/vendor/poetry/poetry/packages/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .dependency_package import DependencyPackage
-from .locker import Locker
-from .package_collection import PackageCollection
diff --git a/vendor/poetry/poetry/packages/dependency_package.py b/vendor/poetry/poetry/packages/dependency_package.py
deleted file mode 100644
index 60c51007..00000000
--- a/vendor/poetry/poetry/packages/dependency_package.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from typing import List
-
-from poetry.core.packages.dependency import Dependency
-from poetry.core.packages.package import Package
-
-
-class DependencyPackage(object):
-    def __init__(self, dependency, package):  # type: (Dependency, Package) -> None
-        self._dependency = dependency
-        self._package = package
-
-    @property
-    def dependency(self):  # type: () -> Dependency
-        return self._dependency
-
-    @property
-    def package(self):  # type: () -> Package
-        return self._package
-
-    def clone(self):  # type: () -> DependencyPackage
-        return self.__class__(self._dependency, self._package.clone())
-
-    def with_features(self, features):  # type: (List[str]) -> "DependencyPackage"
-        return self.__class__(self._dependency, self._package.with_features(features))
-
-    def without_features(self):  # type: () -> "DependencyPackage"
-        return self.with_features([])
-
-    def __getattr__(self, name):
-        return getattr(self._package, name)
-
-    def __setattr__(self, key, value):
-        if key in {"_dependency", "_package"}:
-            return super(DependencyPackage, self).__setattr__(key, value)
-
-        setattr(self._package, key, value)
-
-    def __str__(self):
-        return str(self._package)
-
-    def __repr__(self):
-        return repr(self._package)
-
-    def __hash__(self):
-        return hash(self._package)
-
-    def __eq__(self, other):
-        if isinstance(other, DependencyPackage):
-            other = other.package
-
-        return self._package == other
diff --git a/vendor/poetry/poetry/packages/locker.py b/vendor/poetry/poetry/packages/locker.py
deleted file mode 100644
index 4ab23342..00000000
--- a/vendor/poetry/poetry/packages/locker.py
+++ /dev/null
@@ -1,615 +0,0 @@
-import json
-import logging
-import os
-import re
-
-from copy import deepcopy
-from hashlib import sha256
-from typing import Dict
-from typing import Iterable
-from typing import Iterator
-from typing import List
-from typing import Optional
-from typing import Sequence
-from typing import Set
-from typing import Tuple
-from typing import Union
-
-from tomlkit import array
-from tomlkit import document
-from tomlkit import inline_table
-from tomlkit import item
-from tomlkit import table
-from tomlkit.exceptions import TOMLKitError
-
-import poetry.repositories
-
-from poetry.core.packages import dependency_from_pep_508
-from poetry.core.packages.dependency import Dependency
-from poetry.core.packages.package import Package
-from poetry.core.semver import parse_constraint
-from poetry.core.semver.version import Version
-from poetry.core.toml.file import TOMLFile
-from poetry.core.version.markers import parse_marker
-from poetry.core.version.requirements import InvalidRequirement
-from poetry.packages import DependencyPackage
-from poetry.utils._compat import OrderedDict
-from poetry.utils._compat import Path
-from poetry.utils.extras import get_extra_package_names
-
-
-logger = logging.getLogger(__name__)
-
-
-class Locker(object):
-
-    _VERSION = "1.1"
-
-    _relevant_keys = ["dependencies", "dev-dependencies", "source", "extras"]
-
-    def __init__(self, lock, local_config):  # type: (Path, dict) -> None
-        self._lock = TOMLFile(lock)
-        self._local_config = local_config
-        self._lock_data = None
-        self._content_hash = self._get_content_hash()
-
-    @property
-    def lock(self):  # type: () -> TOMLFile
-        return self._lock
-
-    @property
-    def lock_data(self):
-        if self._lock_data is None:
-            self._lock_data = self._get_lock_data()
-
-        return self._lock_data
-
-    def is_locked(self):  # type: () -> bool
-        """
-        Checks whether the locker has been locked (lockfile found).
-        """
-        if not self._lock.exists():
-            return False
-
-        return "package" in self.lock_data
-
-    def is_fresh(self):  # type: () -> bool
-        """
-        Checks whether the lock file is still up to date with the current hash.
-        """
-        lock = self._lock.read()
-        metadata = lock.get("metadata", {})
-
-        if "content-hash" in metadata:
-            return self._content_hash == lock["metadata"]["content-hash"]
-
-        return False
-
-    def locked_repository(
-        self, with_dev_reqs=False
-    ):  # type: (bool) -> poetry.repositories.Repository
-        """
-        Searches and returns a repository of locked packages.
-        """
-        from poetry.factory import Factory
-
-        if not self.is_locked():
-            return poetry.repositories.Repository()
-
-        lock_data = self.lock_data
-        packages = poetry.repositories.Repository()
-
-        if with_dev_reqs:
-            locked_packages = lock_data["package"]
-        else:
-            locked_packages = [
-                p for p in lock_data["package"] if p["category"] == "main"
-            ]
-
-        if not locked_packages:
-            return packages
-
-        for info in locked_packages:
-            source = info.get("source", {})
-            source_type = source.get("type")
-            url = source.get("url")
-            if source_type in ["directory", "file"]:
-                url = self._lock.path.parent.joinpath(url).resolve().as_posix()
-
-            package = Package(
-                info["name"],
-                info["version"],
-                info["version"],
-                source_type=source_type,
-                source_url=url,
-                source_reference=source.get("reference"),
-                source_resolved_reference=source.get("resolved_reference"),
-            )
-            package.description = info.get("description", "")
-            package.category = info["category"]
-            package.optional = info["optional"]
-            if "hashes" in lock_data["metadata"]:
-                # Old lock so we create dummy files from the hashes
-                package.files = [
-                    {"name": h, "hash": h}
-                    for h in lock_data["metadata"]["hashes"][info["name"]]
-                ]
-            else:
-                package.files = lock_data["metadata"]["files"][info["name"]]
-
-            package.python_versions = info["python-versions"]
-            extras = info.get("extras", {})
-            if extras:
-                for name, deps in extras.items():
-                    package.extras[name] = []
-
-                    for dep in deps:
-                        try:
-                            dependency = dependency_from_pep_508(dep)
-                        except InvalidRequirement:
-                            # handle lock files with invalid PEP 508
-                            m = re.match(r"^(.+?)(?:\[(.+?)])?(?:\s+\((.+)\))?$", dep)
-                            dep_name = m.group(1)
-                            extras = m.group(2) or ""
-                            constraint = m.group(3) or "*"
-                            dependency = Dependency(
-                                dep_name, constraint, extras=extras.split(",")
-                            )
-                        package.extras[name].append(dependency)
-
-            if "marker" in info:
-                package.marker = parse_marker(info["marker"])
-            else:
-                # Compatibility for old locks
-                if "requirements" in info:
-                    dep = Dependency("foo", "0.0.0")
-                    for name, value in info["requirements"].items():
-                        if name == "python":
-                            dep.python_versions = value
-                        elif name == "platform":
-                            dep.platform = value
-
-                    split_dep = dep.to_pep_508(False).split(";")
-                    if len(split_dep) > 1:
-                        package.marker = parse_marker(split_dep[1].strip())
-
-            for dep_name, constraint in info.get("dependencies", {}).items():
-
-                root_dir = self._lock.path.parent
-                if package.source_type == "directory":
-                    # root dir should be the source of the package relative to the lock path
-                    root_dir = Path(package.source_url)
-
-                if isinstance(constraint, list):
-                    for c in constraint:
-                        package.add_dependency(
-                            Factory.create_dependency(dep_name, c, root_dir=root_dir)
-                        )
-
-                    continue
-
-                package.add_dependency(
-                    Factory.create_dependency(dep_name, constraint, root_dir=root_dir)
-                )
-
-            if "develop" in info:
-                package.develop = info["develop"]
-
-            packages.add_package(package)
-
-        return packages
-
-    @staticmethod
-    def __get_locked_package(
-        _dependency, packages_by_name
-    ):  # type: (Dependency, Dict[str, List[Package]]) -> Optional[Package]
-        """
-        Internal helper to identify corresponding locked package using dependency
-        version constraints.
-        """
-        for _package in packages_by_name.get(_dependency.name, []):
-            if _dependency.constraint.allows(_package.version):
-                return _package
-        return None
-
-    @classmethod
-    def __walk_dependency_level(
-        cls,
-        dependencies,
-        level,
-        pinned_versions,
-        packages_by_name,
-        project_level_dependencies,
-        nested_dependencies,
-    ):  # type: (List[Dependency], int,  bool, Dict[str, List[Package]], Set[str], Dict[Tuple[str, str], Dependency]) -> Dict[Tuple[str, str], Dependency]
-        if not dependencies:
-            return nested_dependencies
-
-        next_level_dependencies = []
-
-        for requirement in dependencies:
-            key = (requirement.name, requirement.pretty_constraint)
-            locked_package = cls.__get_locked_package(requirement, packages_by_name)
-
-            if locked_package:
-                # create dependency from locked package to retain dependency metadata
-                # if this is not done, we can end-up with incorrect nested dependencies
-                marker = requirement.marker
-                requirement = locked_package.to_dependency()
-                requirement.marker = requirement.marker.intersect(marker)
-
-                key = (requirement.name, requirement.pretty_constraint)
-
-                if pinned_versions:
-                    requirement.set_constraint(
-                        locked_package.to_dependency().constraint
-                    )
-
-                for require in locked_package.requires:
-                    if require.marker.is_empty():
-                        require.marker = requirement.marker
-                    else:
-                        require.marker = require.marker.intersect(requirement.marker)
-
-                    require.marker = require.marker.intersect(locked_package.marker)
-
-                    if key not in nested_dependencies:
-                        next_level_dependencies.append(require)
-
-            if requirement.name in project_level_dependencies and level == 0:
-                # project level dependencies take precedence
-                continue
-
-            if not locked_package:
-                # we make a copy to avoid any side-effects
-                requirement = deepcopy(requirement)
-
-            if key not in nested_dependencies:
-                nested_dependencies[key] = requirement
-            else:
-                nested_dependencies[key].marker = nested_dependencies[
-                    key
-                ].marker.intersect(requirement.marker)
-
-        return cls.__walk_dependency_level(
-            dependencies=next_level_dependencies,
-            level=level + 1,
-            pinned_versions=pinned_versions,
-            packages_by_name=packages_by_name,
-            project_level_dependencies=project_level_dependencies,
-            nested_dependencies=nested_dependencies,
-        )
-
-    @classmethod
-    def get_project_dependencies(
-        cls, project_requires, locked_packages, pinned_versions=False, with_nested=False
-    ):  # type: (List[Dependency], List[Package], bool, bool) -> Iterable[Dependency]
-        # group packages entries by name, this is required because requirement might use different constraints
-        packages_by_name = {}
-        for pkg in locked_packages:
-            if pkg.name not in packages_by_name:
-                packages_by_name[pkg.name] = []
-            packages_by_name[pkg.name].append(pkg)
-
-        project_level_dependencies = set()
-        dependencies = []
-
-        for dependency in project_requires:
-            dependency = deepcopy(dependency)
-            locked_package = cls.__get_locked_package(dependency, packages_by_name)
-            if locked_package:
-                locked_dependency = locked_package.to_dependency()
-                locked_dependency.marker = dependency.marker.intersect(
-                    locked_package.marker
-                )
-
-                if not pinned_versions:
-                    locked_dependency.set_constraint(dependency.constraint)
-
-                dependency = locked_dependency
-
-            project_level_dependencies.add(dependency.name)
-            dependencies.append(dependency)
-
-        if not with_nested:
-            # return only with project level dependencies
-            return dependencies
-
-        nested_dependencies = cls.__walk_dependency_level(
-            dependencies=dependencies,
-            level=0,
-            pinned_versions=pinned_versions,
-            packages_by_name=packages_by_name,
-            project_level_dependencies=project_level_dependencies,
-            nested_dependencies=dict(),
-        )
-
-        # Merge same dependencies using marker union
-        for requirement in dependencies:
-            key = (requirement.name, requirement.pretty_constraint)
-            if key not in nested_dependencies:
-                nested_dependencies[key] = requirement
-            else:
-                nested_dependencies[key].marker = nested_dependencies[key].marker.union(
-                    requirement.marker
-                )
-
-        return sorted(nested_dependencies.values(), key=lambda x: x.name.lower())
-
-    def get_project_dependency_packages(
-        self, project_requires, dev=False, extras=None
-    ):  # type: (List[Dependency], bool, Optional[Union[bool, Sequence[str]]]) -> Iterator[DependencyPackage]
-        repository = self.locked_repository(with_dev_reqs=dev)
-
-        # Build a set of all packages required by our selected extras
-        extra_package_names = (
-            None if (isinstance(extras, bool) and extras is True) else ()
-        )
-
-        if extra_package_names is not None:
-            extra_package_names = set(
-                get_extra_package_names(
-                    repository.packages, self.lock_data.get("extras", {}), extras or (),
-                )
-            )
-
-        # If a package is optional and we haven't opted in to it, do not select
-        selected = []
-        for dependency in project_requires:
-            try:
-                package = repository.find_packages(dependency=dependency)[0]
-            except IndexError:
-                continue
-
-            if extra_package_names is not None and (
-                package.optional and package.name not in extra_package_names
-            ):
-                # a package is locked as optional, but is not activated via extras
-                continue
-
-            selected.append(dependency)
-
-        for dependency in self.get_project_dependencies(
-            project_requires=selected,
-            locked_packages=repository.packages,
-            with_nested=True,
-        ):
-            try:
-                package = repository.find_packages(dependency=dependency)[0]
-            except IndexError:
-                continue
-
-            for extra in dependency.extras:
-                package.requires_extras.append(extra)
-
-            yield DependencyPackage(dependency=dependency, package=package)
-
-    def set_lock_data(self, root, packages):  # type: (...) -> bool
-        files = table()
-        packages = self._lock_packages(packages)
-        # Retrieving hashes
-        for package in packages:
-            if package["name"] not in files:
-                files[package["name"]] = []
-
-            for f in package["files"]:
-                file_metadata = inline_table()
-                for k, v in sorted(f.items()):
-                    file_metadata[k] = v
-
-                files[package["name"]].append(file_metadata)
-
-            if files[package["name"]]:
-                files[package["name"]] = item(files[package["name"]]).multiline(True)
-
-            del package["files"]
-
-        lock = document()
-        lock["package"] = packages
-
-        if root.extras:
-            lock["extras"] = {
-                extra: [dep.pretty_name for dep in deps]
-                for extra, deps in sorted(root.extras.items())
-            }
-
-        lock["metadata"] = OrderedDict(
-            [
-                ("lock-version", self._VERSION),
-                ("python-versions", root.python_versions),
-                ("content-hash", self._content_hash),
-                ("files", files),
-            ]
-        )
-
-        if not self.is_locked() or lock != self.lock_data:
-            self._write_lock_data(lock)
-
-            return True
-
-        return False
-
-    def _write_lock_data(self, data):
-        self.lock.write(data)
-
-        # Checking lock file data consistency
-        if data != self.lock.read():
-            raise RuntimeError("Inconsistent lock file data.")
-
-        self._lock_data = None
-
-    def _get_content_hash(self):  # type: () -> str
-        """
-        Returns the sha256 hash of the sorted content of the pyproject file.
-        """
-        content = self._local_config
-
-        relevant_content = {}
-        for key in self._relevant_keys:
-            relevant_content[key] = content.get(key)
-
-        content_hash = sha256(
-            json.dumps(relevant_content, sort_keys=True).encode()
-        ).hexdigest()
-
-        return content_hash
-
-    def _get_lock_data(self):  # type: () -> dict
-        if not self._lock.exists():
-            raise RuntimeError("No lockfile found. Unable to read locked packages")
-
-        try:
-            lock_data = self._lock.read()
-        except TOMLKitError as e:
-            raise RuntimeError("Unable to read the lock file ({}).".format(e))
-
-        lock_version = Version.parse(lock_data["metadata"].get("lock-version", "1.0"))
-        current_version = Version.parse(self._VERSION)
-        # We expect the locker to be able to read lock files
-        # from the same semantic versioning range
-        accepted_versions = parse_constraint(
-            "^{}".format(Version(current_version.major, 0))
-        )
-        lock_version_allowed = accepted_versions.allows(lock_version)
-        if lock_version_allowed and current_version < lock_version:
-            logger.warning(
-                "The lock file might not be compatible with the current version of Poetry.\n"
-                "Upgrade Poetry to ensure the lock file is read properly or, alternatively, "
-                "regenerate the lock file with the `poetry lock` command."
-            )
-        elif not lock_version_allowed:
-            raise RuntimeError(
-                "The lock file is not compatible with the current version of Poetry.\n"
-                "Upgrade Poetry to be able to read the lock file or, alternatively, "
-                "regenerate the lock file with the `poetry lock` command."
-            )
-
-        return lock_data
-
-    def _lock_packages(
-        self, packages
-    ):  # type: (List['poetry.packages.Package']) -> list
-        locked = []
-
-        for package in sorted(packages, key=lambda x: x.name):
-            spec = self._dump_package(package)
-
-            locked.append(spec)
-
-        return locked
-
-    def _dump_package(self, package):  # type: (Package) -> dict
-        dependencies = OrderedDict()
-        for dependency in sorted(package.requires, key=lambda d: d.name):
-            if dependency.pretty_name not in dependencies:
-                dependencies[dependency.pretty_name] = []
-
-            constraint = inline_table()
-
-            if dependency.is_directory() or dependency.is_file():
-                constraint["path"] = dependency.path.as_posix()
-
-                if dependency.is_directory() and dependency.develop:
-                    constraint["develop"] = True
-            elif dependency.is_url():
-                constraint["url"] = dependency.url
-            elif dependency.is_vcs():
-                constraint[dependency.vcs] = dependency.source
-
-                if dependency.branch:
-                    constraint["branch"] = dependency.branch
-                elif dependency.tag:
-                    constraint["tag"] = dependency.tag
-                elif dependency.rev:
-                    constraint["rev"] = dependency.rev
-            else:
-                constraint["version"] = str(dependency.pretty_constraint)
-
-            if dependency.extras:
-                constraint["extras"] = sorted(dependency.extras)
-
-            if dependency.is_optional():
-                constraint["optional"] = True
-
-            if not dependency.marker.is_any():
-                constraint["markers"] = str(dependency.marker)
-
-            dependencies[dependency.pretty_name].append(constraint)
-
-        # All the constraints should have the same type,
-        # but we want to simplify them if it's possible
-        for dependency, constraints in tuple(dependencies.items()):
-            if all(
-                len(constraint) == 1 and "version" in constraint
-                for constraint in constraints
-            ):
-                dependencies[dependency] = [
-                    constraint["version"] for constraint in constraints
-                ]
-
-        data = OrderedDict(
-            [
-                ("name", package.pretty_name),
-                ("version", package.pretty_version),
-                ("description", package.description or ""),
-                ("category", package.category),
-                ("optional", package.optional),
-                ("python-versions", package.python_versions),
-                ("files", sorted(package.files, key=lambda x: x["file"])),
-            ]
-        )
-
-        if dependencies:
-            data["dependencies"] = table()
-            for k, constraints in dependencies.items():
-                if len(constraints) == 1:
-                    data["dependencies"][k] = constraints[0]
-                else:
-                    data["dependencies"][k] = array().multiline(True)
-                    for constraint in constraints:
-                        data["dependencies"][k].append(constraint)
-
-        if package.extras:
-            extras = OrderedDict()
-            for name, deps in sorted(package.extras.items()):
-                # TODO: This should use dep.to_pep_508() once this is fixed
-                # https://github.com/python-poetry/poetry-core/pull/102
-                extras[name] = sorted(
-                    dep.base_pep_508_name if not dep.constraint.is_any() else dep.name
-                    for dep in deps
-                )
-
-            data["extras"] = extras
-
-        if package.source_url:
-            url = package.source_url
-            if package.source_type in ["file", "directory"]:
-                # The lock file should only store paths relative to the root project
-                url = Path(
-                    os.path.relpath(
-                        Path(url).as_posix(), self._lock.path.parent.as_posix()
-                    )
-                ).as_posix()
-
-            data["source"] = OrderedDict()
-
-            if package.source_type:
-                data["source"]["type"] = package.source_type
-
-            data["source"]["url"] = url
-
-            if package.source_reference:
-                data["source"]["reference"] = package.source_reference
-
-            if package.source_resolved_reference:
-                data["source"]["resolved_reference"] = package.source_resolved_reference
-
-            if package.source_type in ["directory", "git"]:
-                data["develop"] = package.develop
-
-        return data
-
-
-class NullLocker(Locker):
-    def set_lock_data(self, root, packages):  # type: (Package, List[Package]) -> None
-        pass
diff --git a/vendor/poetry/poetry/packages/package_collection.py b/vendor/poetry/poetry/packages/package_collection.py
deleted file mode 100644
index e10ea635..00000000
--- a/vendor/poetry/poetry/packages/package_collection.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from .dependency_package import DependencyPackage
-
-
-class PackageCollection(list):
-    def __init__(self, dependency, packages=None):
-        self._dependency = dependency
-
-        if packages is None:
-            packages = []
-
-        super(PackageCollection, self).__init__()
-
-        for package in packages:
-            self.append(package)
-
-    def append(self, package):
-        if isinstance(package, DependencyPackage):
-            package = package.package
-
-        package = DependencyPackage(self._dependency, package)
-
-        return super(PackageCollection, self).append(package)
diff --git a/vendor/poetry/poetry/poetry.py b/vendor/poetry/poetry/poetry.py
deleted file mode 100644
index 4878f0a2..00000000
--- a/vendor/poetry/poetry/poetry.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-from poetry.core.packages import ProjectPackage
-from poetry.core.poetry import Poetry as BasePoetry
-
-from .__version__ import __version__
-from .config.config import Config
-from .packages import Locker
-from .repositories.pool import Pool
-from .utils._compat import Path
-
-
-class Poetry(BasePoetry):
-
-    VERSION = __version__
-
-    def __init__(
-        self,
-        file,  # type: Path
-        local_config,  # type: dict
-        package,  # type: ProjectPackage
-        locker,  # type: Locker
-        config,  # type: Config
-    ):
-        super(Poetry, self).__init__(file, local_config, package)
-
-        self._locker = locker
-        self._config = config
-        self._pool = Pool()
-
-    @property
-    def locker(self):  # type: () -> Locker
-        return self._locker
-
-    @property
-    def pool(self):  # type: () -> Pool
-        return self._pool
-
-    @property
-    def config(self):  # type: () -> Config
-        return self._config
-
-    def set_locker(self, locker):  # type: (Locker) -> Poetry
-        self._locker = locker
-
-        return self
-
-    def set_pool(self, pool):  # type: (Pool) -> Poetry
-        self._pool = pool
-
-        return self
-
-    def set_config(self, config):  # type: (Config) -> Poetry
-        self._config = config
-
-        return self
diff --git a/vendor/poetry/poetry/publishing/__init__.py b/vendor/poetry/poetry/publishing/__init__.py
deleted file mode 100644
index 43d6b249..00000000
--- a/vendor/poetry/poetry/publishing/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .publisher import Publisher
diff --git a/vendor/poetry/poetry/publishing/publisher.py b/vendor/poetry/poetry/publishing/publisher.py
deleted file mode 100644
index 67515f77..00000000
--- a/vendor/poetry/poetry/publishing/publisher.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import logging
-
-from typing import Optional
-
-from poetry.utils._compat import Path
-from poetry.utils.helpers import get_cert
-from poetry.utils.helpers import get_client_cert
-from poetry.utils.password_manager import PasswordManager
-
-from .uploader import Uploader
-
-
-logger = logging.getLogger(__name__)
-
-
-class Publisher:
-    """
-    Registers and publishes packages to remote repositories.
-    """
-
-    def __init__(self, poetry, io):
-        self._poetry = poetry
-        self._package = poetry.package
-        self._io = io
-        self._uploader = Uploader(poetry, io)
-        self._password_manager = PasswordManager(poetry.config)
-
-    @property
-    def files(self):
-        return self._uploader.files
-
-    def publish(
-        self,
-        repository_name,
-        username,
-        password,
-        cert=None,
-        client_cert=None,
-        dry_run=False,
-    ):  # type: (Optional[str], Optional[str], Optional[str], Optional[Path], Optional[Path], Optional[bool]) -> None
-        if not repository_name:
-            url = "https://upload.pypi.org/legacy/"
-            repository_name = "pypi"
-        else:
-            # Retrieving config information
-            url = self._poetry.config.get("repositories.{}.url".format(repository_name))
-            if url is None:
-                raise RuntimeError(
-                    "Repository {} is not defined".format(repository_name)
-                )
-
-        if not (username and password):
-            # Check if we have a token first
-            token = self._password_manager.get_pypi_token(repository_name)
-            if token:
-                logger.debug("Found an API token for {}.".format(repository_name))
-                username = "__token__"
-                password = token
-            else:
-                auth = self._password_manager.get_http_auth(repository_name)
-                if auth:
-                    logger.debug(
-                        "Found authentication information for {}.".format(
-                            repository_name
-                        )
-                    )
-                    username = auth["username"]
-                    password = auth["password"]
-
-        resolved_client_cert = client_cert or get_client_cert(
-            self._poetry.config, repository_name
-        )
-        # Requesting missing credentials but only if there is not a client cert defined.
-        if not resolved_client_cert:
-            if username is None:
-                username = self._io.ask("Username:")
-
-            # skip password input if no username is provided, assume unauthenticated
-            if username and password is None:
-                password = self._io.ask_hidden("Password:")
-
-        self._uploader.auth(username, password)
-
-        self._io.write_line(
-            "Publishing {} ({}) "
-            "to {}".format(
-                self._package.pretty_name,
-                self._package.pretty_version,
-                "PyPI" if repository_name == "pypi" else repository_name,
-            )
-        )
-
-        self._uploader.upload(
-            url,
-            cert=cert or get_cert(self._poetry.config, repository_name),
-            client_cert=resolved_client_cert,
-            dry_run=dry_run,
-        )
diff --git a/vendor/poetry/poetry/publishing/uploader.py b/vendor/poetry/poetry/publishing/uploader.py
deleted file mode 100644
index b817676f..00000000
--- a/vendor/poetry/poetry/publishing/uploader.py
+++ /dev/null
@@ -1,336 +0,0 @@
-import hashlib
-import io
-
-from typing import Any
-from typing import Dict
-from typing import List
-from typing import Optional
-from typing import Union
-
-import requests
-
-from requests import adapters
-from requests.exceptions import ConnectionError
-from requests.exceptions import HTTPError
-from requests.packages.urllib3 import util
-from requests_toolbelt import user_agent
-from requests_toolbelt.multipart import MultipartEncoder
-from requests_toolbelt.multipart import MultipartEncoderMonitor
-
-from poetry.__version__ import __version__
-from poetry.core.masonry.metadata import Metadata
-from poetry.core.masonry.utils.helpers import escape_name
-from poetry.core.masonry.utils.helpers import escape_version
-from poetry.utils._compat import Path
-from poetry.utils.helpers import normalize_version
-from poetry.utils.patterns import wheel_file_re
-
-
-_has_blake2 = hasattr(hashlib, "blake2b")
-
-
-class UploadError(Exception):
-    def __init__(self, error):  # type: (Union[ConnectionError, HTTPError]) -> None
-        if isinstance(error, HTTPError):
-            message = "HTTP Error {}: {}".format(
-                error.response.status_code, error.response.reason
-            )
-        elif isinstance(error, ConnectionError):
-            message = (
-                "Connection Error: We were unable to connect to the repository, "
-                "ensure the url is correct and can be reached."
-            )
-        else:
-            message = str(error)
-        super(UploadError, self).__init__(message)
-
-
-class Uploader:
-    def __init__(self, poetry, io):
-        self._poetry = poetry
-        self._package = poetry.package
-        self._io = io
-        self._username = None
-        self._password = None
-
-    @property
-    def user_agent(self):
-        return user_agent("poetry", __version__)
-
-    @property
-    def adapter(self):
-        retry = util.Retry(
-            connect=5,
-            total=10,
-            method_whitelist=["GET"],
-            status_forcelist=[500, 501, 502, 503],
-        )
-
-        return adapters.HTTPAdapter(max_retries=retry)
-
-    @property
-    def files(self):  # type: () -> List[Path]
-        dist = self._poetry.file.parent / "dist"
-        version = normalize_version(self._package.version.text)
-
-        wheels = list(
-            dist.glob(
-                "{}-{}-*.whl".format(
-                    escape_name(self._package.pretty_name), escape_version(version)
-                )
-            )
-        )
-        tars = list(
-            dist.glob("{}-{}.tar.gz".format(self._package.pretty_name, version))
-        )
-
-        return sorted(wheels + tars)
-
-    def auth(self, username, password):
-        self._username = username
-        self._password = password
-
-    def make_session(self):  # type: () -> requests.Session
-        session = requests.session()
-        if self.is_authenticated():
-            session.auth = (self._username, self._password)
-
-        session.headers["User-Agent"] = self.user_agent
-        for scheme in ("http://", "https://"):
-            session.mount(scheme, self.adapter)
-
-        return session
-
-    def is_authenticated(self):
-        return self._username is not None and self._password is not None
-
-    def upload(
-        self, url, cert=None, client_cert=None, dry_run=False
-    ):  # type: (str, Optional[Path], Optional[Path], bool) -> None
-        session = self.make_session()
-
-        if cert:
-            session.verify = str(cert)
-
-        if client_cert:
-            session.cert = str(client_cert)
-
-        try:
-            self._upload(session, url, dry_run)
-        finally:
-            session.close()
-
-    def post_data(self, file):  # type: (Path) -> Dict[str, Any]
-        meta = Metadata.from_package(self._package)
-
-        file_type = self._get_type(file)
-
-        if _has_blake2:
-            blake2_256_hash = hashlib.blake2b(digest_size=256 // 8)
-
-        md5_hash = hashlib.md5()
-        sha256_hash = hashlib.sha256()
-        with file.open("rb") as fp:
-            for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""):
-                md5_hash.update(content)
-                sha256_hash.update(content)
-
-                if _has_blake2:
-                    blake2_256_hash.update(content)
-
-        md5_digest = md5_hash.hexdigest()
-        sha2_digest = sha256_hash.hexdigest()
-        if _has_blake2:
-            blake2_256_digest = blake2_256_hash.hexdigest()
-        else:
-            blake2_256_digest = None
-
-        if file_type == "bdist_wheel":
-            wheel_info = wheel_file_re.match(file.name)
-            py_version = wheel_info.group("pyver")
-        else:
-            py_version = None
-
-        data = {
-            # identify release
-            "name": meta.name,
-            "version": meta.version,
-            # file content
-            "filetype": file_type,
-            "pyversion": py_version,
-            # additional meta-data
-            "metadata_version": meta.metadata_version,
-            "summary": meta.summary,
-            "home_page": meta.home_page,
-            "author": meta.author,
-            "author_email": meta.author_email,
-            "maintainer": meta.maintainer,
-            "maintainer_email": meta.maintainer_email,
-            "license": meta.license,
-            "description": meta.description,
-            "keywords": meta.keywords,
-            "platform": meta.platforms,
-            "classifiers": meta.classifiers,
-            "download_url": meta.download_url,
-            "supported_platform": meta.supported_platforms,
-            "comment": None,
-            "md5_digest": md5_digest,
-            "sha256_digest": sha2_digest,
-            "blake2_256_digest": blake2_256_digest,
-            # PEP 314
-            "provides": meta.provides,
-            "requires": meta.requires,
-            "obsoletes": meta.obsoletes,
-            # Metadata 1.2
-            "project_urls": meta.project_urls,
-            "provides_dist": meta.provides_dist,
-            "obsoletes_dist": meta.obsoletes_dist,
-            "requires_dist": meta.requires_dist,
-            "requires_external": meta.requires_external,
-            "requires_python": meta.requires_python,
-        }
-
-        # Metadata 2.1
-        if meta.description_content_type:
-            data["description_content_type"] = meta.description_content_type
-
-        # TODO: Provides extra
-
-        return data
-
-    def _upload(
-        self, session, url, dry_run=False
-    ):  # type: (requests.Session, str, Optional[bool]) -> None
-        try:
-            self._do_upload(session, url, dry_run)
-        except HTTPError as e:
-            if (
-                e.response.status_code == 400
-                and "was ever registered" in e.response.text
-            ):
-                try:
-                    self._register(session, url)
-                except HTTPError as e:
-                    raise UploadError(e)
-
-            raise UploadError(e)
-
-    def _do_upload(
-        self, session, url, dry_run=False
-    ):  # type: (requests.Session, str, Optional[bool]) -> None
-        for file in self.files:
-            # TODO: Check existence
-
-            resp = self._upload_file(session, url, file, dry_run)
-
-            if not dry_run:
-                resp.raise_for_status()
-
-    def _upload_file(
-        self, session, url, file, dry_run=False
-    ):  # type: (requests.Session, str, Path, Optional[bool]) -> requests.Response
-        data = self.post_data(file)
-        data.update(
-            {
-                # action
-                ":action": "file_upload",
-                "protocol_version": "1",
-            }
-        )
-
-        data_to_send = self._prepare_data(data)
-
-        with file.open("rb") as fp:
-            data_to_send.append(
-                ("content", (file.name, fp, "application/octet-stream"))
-            )
-            encoder = MultipartEncoder(data_to_send)
-            bar = self._io.progress_bar(encoder.len)
-            bar.set_format(
-                " - Uploading {0} %percent%%".format(file.name)
-            )
-            monitor = MultipartEncoderMonitor(
-                encoder, lambda monitor: bar.set_progress(monitor.bytes_read)
-            )
-
-            bar.start()
-
-            resp = None
-
-            try:
-                if not dry_run:
-                    resp = session.post(
-                        url,
-                        data=monitor,
-                        allow_redirects=False,
-                        headers={"Content-Type": monitor.content_type},
-                    )
-                if dry_run or resp.ok:
-                    bar.set_format(
-                        " - Uploading {0} %percent%%".format(
-                            file.name
-                        )
-                    )
-                    bar.finish()
-            except (requests.ConnectionError, requests.HTTPError) as e:
-                if self._io.output.supports_ansi():
-                    self._io.overwrite(
-                        " - Uploading {0} {1}".format(
-                            file.name, "FAILED"
-                        )
-                    )
-                raise UploadError(e)
-            finally:
-                self._io.write_line("")
-
-        return resp
-
-    def _register(
-        self, session, url
-    ):  # type: (requests.Session, str) -> requests.Response
-        """
-        Register a package to a repository.
-        """
-        dist = self._poetry.file.parent / "dist"
-        file = dist / "{}-{}.tar.gz".format(
-            self._package.name, normalize_version(self._package.version.text)
-        )
-
-        if not file.exists():
-            raise RuntimeError('"{0}" does not exist.'.format(file.name))
-
-        data = self.post_data(file)
-        data.update({":action": "submit", "protocol_version": "1"})
-
-        data_to_send = self._prepare_data(data)
-        encoder = MultipartEncoder(data_to_send)
-        resp = session.post(
-            url,
-            data=encoder,
-            allow_redirects=False,
-            headers={"Content-Type": encoder.content_type},
-        )
-
-        resp.raise_for_status()
-
-        return resp
-
-    def _prepare_data(self, data):
-        data_to_send = []
-        for key, value in data.items():
-            if not isinstance(value, (list, tuple)):
-                data_to_send.append((key, value))
-            else:
-                for item in value:
-                    data_to_send.append((key, item))
-
-        return data_to_send
-
-    def _get_type(self, file):
-        exts = file.suffixes
-        if exts[-1] == ".whl":
-            return "bdist_wheel"
-        elif len(exts) >= 2 and "".join(exts[-2:]) == ".tar.gz":
-            return "sdist"
-
-        raise ValueError("Unknown distribution format {}".format("".join(exts)))
diff --git a/vendor/poetry/poetry/puzzle/__init__.py b/vendor/poetry/poetry/puzzle/__init__.py
deleted file mode 100644
index 70089f30..00000000
--- a/vendor/poetry/poetry/puzzle/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .solver import Solver
diff --git a/vendor/poetry/poetry/puzzle/exceptions.py b/vendor/poetry/poetry/puzzle/exceptions.py
deleted file mode 100644
index e2e0b0dc..00000000
--- a/vendor/poetry/poetry/puzzle/exceptions.py
+++ /dev/null
@@ -1,18 +0,0 @@
-class SolverProblemError(Exception):
-    def __init__(self, error):
-        self._error = error
-
-        super(SolverProblemError, self).__init__(str(error))
-
-    @property
-    def error(self):
-        return self._error
-
-
-class OverrideNeeded(Exception):
-    def __init__(self, *overrides):
-        self._overrides = overrides
-
-    @property
-    def overrides(self):
-        return self._overrides
diff --git a/vendor/poetry/poetry/puzzle/provider.py b/vendor/poetry/poetry/puzzle/provider.py
deleted file mode 100755
index c05efbd6..00000000
--- a/vendor/poetry/poetry/puzzle/provider.py
+++ /dev/null
@@ -1,792 +0,0 @@
-import logging
-import os
-import re
-import time
-
-from contextlib import contextmanager
-from tempfile import mkdtemp
-from typing import Any
-from typing import List
-from typing import Optional
-
-from clikit.ui.components import ProgressIndicator
-
-from poetry.core.packages import Dependency
-from poetry.core.packages import DirectoryDependency
-from poetry.core.packages import FileDependency
-from poetry.core.packages import Package
-from poetry.core.packages import URLDependency
-from poetry.core.packages import VCSDependency
-from poetry.core.packages.utils.utils import get_python_constraint_from_marker
-from poetry.core.semver.version import Version
-from poetry.core.vcs.git import Git
-from poetry.core.version.markers import MarkerUnion
-from poetry.inspection.info import PackageInfo
-from poetry.inspection.info import PackageInfoError
-from poetry.mixology.incompatibility import Incompatibility
-from poetry.mixology.incompatibility_cause import DependencyCause
-from poetry.mixology.incompatibility_cause import PythonCause
-from poetry.mixology.term import Term
-from poetry.packages import DependencyPackage
-from poetry.packages.package_collection import PackageCollection
-from poetry.puzzle.exceptions import OverrideNeeded
-from poetry.repositories import Pool
-from poetry.utils._compat import OrderedDict
-from poetry.utils._compat import Path
-from poetry.utils._compat import urlparse
-from poetry.utils.env import Env
-from poetry.utils.helpers import download_file
-from poetry.utils.helpers import safe_rmtree
-from poetry.utils.helpers import temporary_directory
-
-
-logger = logging.getLogger(__name__)
-
-
-class Indicator(ProgressIndicator):
-    def _formatter_elapsed(self):
-        elapsed = time.time() - self._start_time
-
-        return "{:.1f}s".format(elapsed)
-
-
-class Provider:
-
-    UNSAFE_PACKAGES = {"setuptools", "distribute", "pip", "wheel"}
-
-    def __init__(
-        self, package, pool, io, env=None
-    ):  # type: (Package, Pool, Any, Optional[Env]) -> None
-        self._package = package
-        self._pool = pool
-        self._io = io
-        self._env = env
-        self._python_constraint = package.python_constraint
-        self._search_for = {}
-        self._is_debugging = self._io.is_debug() or self._io.is_very_verbose()
-        self._in_progress = False
-        self._overrides = {}
-        self._deferred_cache = {}
-        self._load_deferred = True
-
-    @property
-    def pool(self):  # type: () -> Pool
-        return self._pool
-
-    def is_debugging(self):
-        return self._is_debugging
-
-    def set_overrides(self, overrides):
-        self._overrides = overrides
-
-    def load_deferred(self, load_deferred):  # type: (bool) -> None
-        self._load_deferred = load_deferred
-
-    @contextmanager
-    def use_environment(self, env):  # type: (Env) -> Provider
-        original_env = self._env
-        original_python_constraint = self._python_constraint
-
-        self._env = env
-        self._python_constraint = Version.parse(env.marker_env["python_full_version"])
-
-        yield self
-
-        self._env = original_env
-        self._python_constraint = original_python_constraint
-
-    def search_for(self, dependency):  # type: (Dependency) -> List[Package]
-        """
-        Search for the specifications that match the given dependency.
-
-        The specifications in the returned list will be considered in reverse
-        order, so the latest version ought to be last.
-        """
-        if dependency.is_root:
-            return PackageCollection(dependency, [self._package])
-
-        for constraint in self._search_for.keys():
-            if (
-                constraint.is_same_package_as(dependency)
-                and constraint.constraint.intersect(dependency.constraint)
-                == dependency.constraint
-            ):
-                packages = [
-                    p
-                    for p in self._search_for[constraint]
-                    if dependency.constraint.allows(p.version)
-                ]
-
-                packages.sort(
-                    key=lambda p: (
-                        not p.is_prerelease() and not dependency.allows_prereleases(),
-                        p.version,
-                    ),
-                    reverse=True,
-                )
-
-                return PackageCollection(dependency, packages)
-
-        if dependency.is_vcs():
-            packages = self.search_for_vcs(dependency)
-        elif dependency.is_file():
-            packages = self.search_for_file(dependency)
-        elif dependency.is_directory():
-            packages = self.search_for_directory(dependency)
-        elif dependency.is_url():
-            packages = self.search_for_url(dependency)
-        else:
-            packages = self._pool.find_packages(dependency)
-
-            packages.sort(
-                key=lambda p: (
-                    not p.is_prerelease() and not dependency.allows_prereleases(),
-                    p.version,
-                ),
-                reverse=True,
-            )
-
-        self._search_for[dependency] = packages
-
-        return PackageCollection(dependency, packages)
-
-    def search_for_vcs(self, dependency):  # type: (VCSDependency) -> List[Package]
-        """
-        Search for the specifications that match the given VCS dependency.
-
-        Basically, we clone the repository in a temporary directory
-        and get the information we need by checking out the specified reference.
-        """
-        if dependency in self._deferred_cache:
-            return [self._deferred_cache[dependency]]
-
-        package = self.get_package_from_vcs(
-            dependency.vcs,
-            dependency.source,
-            branch=dependency.branch,
-            tag=dependency.tag,
-            rev=dependency.rev,
-            name=dependency.name,
-        )
-        package.develop = dependency.develop
-
-        dependency._constraint = package.version
-        dependency._pretty_constraint = package.version.text
-
-        self._deferred_cache[dependency] = package
-
-        return [package]
-
-    @classmethod
-    def get_package_from_vcs(
-        cls, vcs, url, branch=None, tag=None, rev=None, name=None
-    ):  # type: (str, str, Optional[str], Optional[str]) -> Package
-        if vcs != "git":
-            raise ValueError("Unsupported VCS dependency {}".format(vcs))
-
-        tmp_dir = Path(
-            mkdtemp(prefix="pypoetry-git-{}".format(url.split("/")[-1].rstrip(".git")))
-        )
-
-        try:
-            git = Git()
-            git.clone(url, tmp_dir)
-            reference = branch or tag or rev
-            if reference is not None:
-                git.checkout(reference, tmp_dir)
-            else:
-                reference = "HEAD"
-
-            revision = git.rev_parse(reference, tmp_dir).strip()
-
-            package = cls.get_package_from_directory(tmp_dir, name=name)
-            package._source_type = "git"
-            package._source_url = url
-            package._source_reference = reference
-            package._source_resolved_reference = revision
-        except Exception:
-            raise
-        finally:
-            safe_rmtree(str(tmp_dir))
-
-        return package
-
-    def search_for_file(self, dependency):  # type: (FileDependency) -> List[Package]
-        if dependency in self._deferred_cache:
-            dependency, _package = self._deferred_cache[dependency]
-
-            package = _package.clone()
-        else:
-            package = self.get_package_from_file(dependency.full_path)
-
-            dependency._constraint = package.version
-            dependency._pretty_constraint = package.version.text
-
-            self._deferred_cache[dependency] = (dependency, package)
-
-        if dependency.name != package.name:
-            # For now, the dependency's name must match the actual package's name
-            raise RuntimeError(
-                "The dependency name for {} does not match the actual package's name: {}".format(
-                    dependency.name, package.name
-                )
-            )
-
-        if dependency.base is not None:
-            package.root_dir = dependency.base
-
-        package.files = [
-            {"file": dependency.path.name, "hash": "sha256:" + dependency.hash()}
-        ]
-
-        return [package]
-
-    @classmethod
-    def get_package_from_file(cls, file_path):  # type: (Path) -> Package
-        try:
-            package = PackageInfo.from_path(path=file_path).to_package(
-                root_dir=file_path
-            )
-        except PackageInfoError:
-            raise RuntimeError(
-                "Unable to determine package info from path: {}".format(file_path)
-            )
-
-        return package
-
-    def search_for_directory(
-        self, dependency
-    ):  # type: (DirectoryDependency) -> List[Package]
-        if dependency in self._deferred_cache:
-            dependency, _package = self._deferred_cache[dependency]
-
-            package = _package.clone()
-        else:
-            package = self.get_package_from_directory(
-                dependency.full_path, name=dependency.name
-            )
-
-            dependency._constraint = package.version
-            dependency._pretty_constraint = package.version.text
-
-            self._deferred_cache[dependency] = (dependency, package)
-
-        package.develop = dependency.develop
-
-        if dependency.base is not None:
-            package.root_dir = dependency.base
-
-        return [package]
-
-    @classmethod
-    def get_package_from_directory(
-        cls, directory, name=None
-    ):  # type: (Path, Optional[str]) -> Package
-        package = PackageInfo.from_directory(path=directory).to_package(
-            root_dir=directory
-        )
-
-        if name and name != package.name:
-            # For now, the dependency's name must match the actual package's name
-            raise RuntimeError(
-                "The dependency name for {} does not match the actual package's name: {}".format(
-                    name, package.name
-                )
-            )
-
-        return package
-
-    def search_for_url(self, dependency):  # type: (URLDependency) -> List[Package]
-        if dependency in self._deferred_cache:
-            return [self._deferred_cache[dependency]]
-
-        package = self.get_package_from_url(dependency.url)
-
-        if dependency.name != package.name:
-            # For now, the dependency's name must match the actual package's name
-            raise RuntimeError(
-                "The dependency name for {} does not match the actual package's name: {}".format(
-                    dependency.name, package.name
-                )
-            )
-
-        for extra in dependency.extras:
-            if extra in package.extras:
-                for dep in package.extras[extra]:
-                    dep.activate()
-
-                package.requires += package.extras[extra]
-
-        dependency._constraint = package.version
-        dependency._pretty_constraint = package.version.text
-
-        self._deferred_cache[dependency] = package
-
-        return [package]
-
-    @classmethod
-    def get_package_from_url(cls, url):  # type: (str) -> Package
-        with temporary_directory() as temp_dir:
-            temp_dir = Path(temp_dir)
-            file_name = os.path.basename(urlparse.urlparse(url).path)
-            download_file(url, str(temp_dir / file_name))
-
-            package = cls.get_package_from_file(temp_dir / file_name)
-
-        package._source_type = "url"
-        package._source_url = url
-
-        return package
-
-    def incompatibilities_for(
-        self, package
-    ):  # type: (DependencyPackage) -> List[Incompatibility]
-        """
-        Returns incompatibilities that encapsulate a given package's dependencies,
-        or that it can't be safely selected.
-
-        If multiple subsequent versions of this package have the same
-        dependencies, this will return incompatibilities that reflect that. It
-        won't return incompatibilities that have already been returned by a
-        previous call to _incompatibilities_for().
-        """
-        if package.is_root():
-            dependencies = package.all_requires
-        else:
-            dependencies = package.requires
-
-            if not package.python_constraint.allows_all(self._python_constraint):
-                transitive_python_constraint = get_python_constraint_from_marker(
-                    package.dependency.transitive_marker
-                )
-                intersection = package.python_constraint.intersect(
-                    transitive_python_constraint
-                )
-                difference = transitive_python_constraint.difference(intersection)
-
-                # The difference is only relevant if it intersects
-                # the root package python constraint
-                difference = difference.intersect(self._python_constraint)
-                if (
-                    transitive_python_constraint.is_any()
-                    or self._python_constraint.intersect(
-                        package.dependency.python_constraint
-                    ).is_empty()
-                    or intersection.is_empty()
-                    or not difference.is_empty()
-                ):
-                    return [
-                        Incompatibility(
-                            [Term(package.to_dependency(), True)],
-                            PythonCause(
-                                package.python_versions, str(self._python_constraint)
-                            ),
-                        )
-                    ]
-
-        _dependencies = [
-            dep
-            for dep in dependencies
-            if dep.name not in self.UNSAFE_PACKAGES
-            and self._python_constraint.allows_any(dep.python_constraint)
-            and (not self._env or dep.marker.validate(self._env.marker_env))
-        ]
-
-        overrides = self._overrides.get(package, {})
-        dependencies = []
-        overridden = []
-        for dep in _dependencies:
-            if dep.name in overrides:
-                if dep.name in overridden:
-                    continue
-
-                dependencies.append(overrides[dep.name])
-                overridden.append(dep.name)
-
-                continue
-
-            dependencies.append(dep)
-
-        return [
-            Incompatibility(
-                [Term(package.to_dependency(), True), Term(dep, False)],
-                DependencyCause(),
-            )
-            for dep in dependencies
-        ]
-
-    def complete_package(
-        self, package
-    ):  # type: (DependencyPackage) -> DependencyPackage
-
-        if package.is_root():
-            package = package.clone()
-            requires = package.all_requires
-        elif not package.is_root() and package.source_type not in {
-            "directory",
-            "file",
-            "url",
-            "git",
-        }:
-            package = DependencyPackage(
-                package.dependency,
-                self._pool.package(
-                    package.name,
-                    package.version.text,
-                    extras=list(package.dependency.extras),
-                    repository=package.dependency.source_name,
-                ),
-            )
-            requires = package.requires
-        else:
-            requires = package.requires
-
-        if self._load_deferred:
-            # Retrieving constraints for deferred dependencies
-            for r in requires:
-                if r.is_directory():
-                    self.search_for_directory(r)
-                elif r.is_file():
-                    self.search_for_file(r)
-                elif r.is_vcs():
-                    self.search_for_vcs(r)
-                elif r.is_url():
-                    self.search_for_url(r)
-
-        optional_dependencies = []
-        _dependencies = []
-
-        # If some extras/features were required, we need to
-        # add a special dependency representing the base package
-        # to the current package
-        if package.dependency.extras:
-            for extra in package.dependency.extras:
-                if extra not in package.extras:
-                    continue
-
-                optional_dependencies += [d.name for d in package.extras[extra]]
-
-            package = package.with_features(list(package.dependency.extras))
-            _dependencies.append(package.without_features().to_dependency())
-
-        for dep in requires:
-            if not self._python_constraint.allows_any(dep.python_constraint):
-                continue
-
-            if dep.name in self.UNSAFE_PACKAGES:
-                continue
-
-            if self._env and not dep.marker.validate(self._env.marker_env):
-                continue
-
-            if not package.is_root():
-                if (dep.is_optional() and dep.name not in optional_dependencies) or (
-                    dep.in_extras
-                    and not set(dep.in_extras).intersection(package.dependency.extras)
-                ):
-                    continue
-
-            _dependencies.append(dep)
-
-        overrides = self._overrides.get(package, {})
-        dependencies = []
-        overridden = []
-        for dep in _dependencies:
-            if dep.name in overrides:
-                if dep.name in overridden:
-                    continue
-
-                dependencies.append(overrides[dep.name])
-                overridden.append(dep.name)
-
-                continue
-
-            dependencies.append(dep)
-
-        # Searching for duplicate dependencies
-        #
-        # If the duplicate dependencies have the same constraint,
-        # the requirements will be merged.
-        #
-        # For instance:
-        #   - enum34; python_version=="2.7"
-        #   - enum34; python_version=="3.3"
-        #
-        # will become:
-        #   - enum34; python_version=="2.7" or python_version=="3.3"
-        #
-        # If the duplicate dependencies have different constraints
-        # we have to split the dependency graph.
-        #
-        # An example of this is:
-        #   - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
-        #   - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
-        duplicates = OrderedDict()
-        for dep in dependencies:
-            if dep.name not in duplicates:
-                duplicates[dep.name] = []
-
-            duplicates[dep.name].append(dep)
-
-        dependencies = []
-        for dep_name, deps in duplicates.items():
-            if len(deps) == 1:
-                dependencies.append(deps[0])
-                continue
-
-            self.debug("Duplicate dependencies for {}".format(dep_name))
-
-            # Regrouping by constraint
-            by_constraint = OrderedDict()
-            for dep in deps:
-                if dep.constraint not in by_constraint:
-                    by_constraint[dep.constraint] = []
-
-                by_constraint[dep.constraint].append(dep)
-
-            # We merge by constraint
-            for constraint, _deps in by_constraint.items():
-                new_markers = []
-                for dep in _deps:
-                    marker = dep.marker.without_extras()
-                    if marker.is_any():
-                        # No marker or only extras
-                        continue
-
-                    new_markers.append(marker)
-
-                if not new_markers:
-                    continue
-
-                dep = _deps[0]
-                dep.marker = dep.marker.union(MarkerUnion(*new_markers))
-                by_constraint[constraint] = [dep]
-
-                continue
-
-            if len(by_constraint) == 1:
-                self.debug(
-                    "Merging requirements for {}".format(str(deps[0]))
-                )
-                dependencies.append(list(by_constraint.values())[0][0])
-                continue
-
-            # We leave dependencies as-is if they have the same
-            # python/platform constraints.
-            # That way the resolver will pickup the conflict
-            # and display a proper error.
-            _deps = [value[0] for value in by_constraint.values()]
-            seen = set()
-            for _dep in _deps:
-                pep_508_dep = _dep.to_pep_508(False)
-                if ";" not in pep_508_dep:
-                    _requirements = ""
-                else:
-                    _requirements = pep_508_dep.split(";")[1].strip()
-
-                if _requirements not in seen:
-                    seen.add(_requirements)
-
-            if len(_deps) != len(seen):
-                for _dep in _deps:
-                    dependencies.append(_dep)
-
-                continue
-
-            # At this point, we raise an exception that will
-            # tell the solver to make new resolutions with specific overrides.
-            #
-            # For instance, if the foo (1.2.3) package has the following dependencies:
-            #   - bar (>=2.0) ; python_version >= "3.6"
-            #   - bar (<2.0) ; python_version < "3.6"
-            #
-            # then the solver will need to make two new resolutions
-            # with the following overrides:
-            #   - {=2.0)>}
-            #   - {}
-            markers = []
-            for constraint, _deps in by_constraint.items():
-                markers.append(_deps[0].marker)
-
-            _deps = [_dep[0] for _dep in by_constraint.values()]
-            self.debug(
-                "Different requirements found for {}.".format(
-                    ", ".join(
-                        "{} ({}) with markers {}".format(
-                            d.name,
-                            d.pretty_constraint,
-                            d.marker if not d.marker.is_any() else "*",
-                        )
-                        for d in _deps[:-1]
-                    )
-                    + " and "
-                    + "{} ({}) with markers {}".format(
-                        _deps[-1].name,
-                        _deps[-1].pretty_constraint,
-                        _deps[-1].marker if not _deps[-1].marker.is_any() else "*",
-                    )
-                )
-            )
-
-            # We need to check if one of the duplicate dependencies
-            # has no markers. If there is one, we need to change its
-            # environment markers to the inverse of the union of the
-            # other dependencies markers.
-            # For instance, if we have the following dependencies:
-            #   - ipython
-            #   - ipython (1.2.4) ; implementation_name == "pypy"
-            #
-            # the marker for `ipython` will become `implementation_name != "pypy"`.
-            any_markers_dependencies = [d for d in _deps if d.marker.is_any()]
-            other_markers_dependencies = [d for d in _deps if not d.marker.is_any()]
-
-            if any_markers_dependencies:
-                marker = other_markers_dependencies[0].marker
-                for other_dep in other_markers_dependencies[1:]:
-                    marker = marker.union(other_dep.marker)
-
-                for i, d in enumerate(_deps):
-                    if d.marker.is_any():
-                        _deps[i].marker = marker.invert()
-
-            overrides = []
-            for _dep in _deps:
-                current_overrides = self._overrides.copy()
-                package_overrides = current_overrides.get(package, {}).copy()
-                package_overrides.update({_dep.name: _dep})
-                current_overrides.update({package: package_overrides})
-                overrides.append(current_overrides)
-
-            raise OverrideNeeded(*overrides)
-
-        # Modifying dependencies as needed
-        clean_dependencies = []
-        for dep in dependencies:
-            if not package.dependency.transitive_marker.without_extras().is_any():
-                marker_intersection = package.dependency.transitive_marker.without_extras().intersect(
-                    dep.marker.without_extras()
-                )
-                if marker_intersection.is_empty():
-                    # The dependency is not needed, since the markers specified
-                    # for the current package selection are not compatible with
-                    # the markers for the current dependency, so we skip it
-                    continue
-
-                dep.transitive_marker = marker_intersection
-
-            if not package.dependency.python_constraint.is_any():
-                python_constraint_intersection = dep.python_constraint.intersect(
-                    package.dependency.python_constraint
-                )
-                if python_constraint_intersection.is_empty():
-                    # This dependency is not needed under current python constraint.
-                    continue
-                dep.transitive_python_versions = str(python_constraint_intersection)
-
-            clean_dependencies.append(dep)
-
-        package.requires = clean_dependencies
-
-        return package
-
-    def debug(self, message, depth=0):
-        if not (self._io.is_very_verbose() or self._io.is_debug()):
-            return
-
-        if message.startswith("fact:"):
-            if "depends on" in message:
-                m = re.match(r"fact: (.+?) depends on (.+?) \((.+?)\)", message)
-                m2 = re.match(r"(.+?) \((.+?)\)", m.group(1))
-                if m2:
-                    name = m2.group(1)
-                    version = " ({})".format(m2.group(2))
-                else:
-                    name = m.group(1)
-                    version = ""
-
-                message = (
-                    "fact: {}{} "
-                    "depends on {} ({})".format(
-                        name, version, m.group(2), m.group(3)
-                    )
-                )
-            elif " is " in message:
-                message = re.sub(
-                    "fact: (.+) is (.+)",
-                    "fact: \\1 is \\2",
-                    message,
-                )
-            else:
-                message = re.sub(
-                    r"(?<=: )(.+?) \((.+?)\)", "\\1 (\\2)", message
-                )
-                message = "fact: {}".format(message.split("fact: ")[1])
-        elif message.startswith("selecting "):
-            message = re.sub(
-                r"selecting (.+?) \((.+?)\)",
-                "selecting \\1 (\\2)",
-                message,
-            )
-        elif message.startswith("derived:"):
-            m = re.match(r"derived: (.+?) \((.+?)\)$", message)
-            if m:
-                message = "derived: {} ({})".format(
-                    m.group(1), m.group(2)
-                )
-            else:
-                message = "derived: {}".format(
-                    message.split("derived: ")[1]
-                )
-        elif message.startswith("conflict:"):
-            m = re.match(r"conflict: (.+?) depends on (.+?) \((.+?)\)", message)
-            if m:
-                m2 = re.match(r"(.+?) \((.+?)\)", m.group(1))
-                if m2:
-                    name = m2.group(1)
-                    version = " ({})".format(m2.group(2))
-                else:
-                    name = m.group(1)
-                    version = ""
-
-                message = (
-                    "conflict: {}{} "
-                    "depends on {} ({})".format(
-                        name, version, m.group(2), m.group(3)
-                    )
-                )
-            else:
-                message = "conflict: {}".format(
-                    message.split("conflict: ")[1]
-                )
-
-        message = message.replace("! ", "! ")
-
-        if self.is_debugging():
-            debug_info = str(message)
-            debug_info = (
-                "\n".join(
-                    [
-                        "{}: {}".format(str(depth).rjust(4), s)
-                        for s in debug_info.split("\n")
-                    ]
-                )
-                + "\n"
-            )
-
-            self._io.write(debug_info)
-
-    @contextmanager
-    def progress(self):
-        if not self._io.output.supports_ansi() or self.is_debugging():
-            self._io.write_line("Resolving dependencies...")
-            yield
-        else:
-            indicator = Indicator(self._io, "{message} ({elapsed:2s})")
-
-            with indicator.auto(
-                "Resolving dependencies...",
-                "Resolving dependencies...",
-            ):
-                yield
-
-        self._in_progress = False
diff --git a/vendor/poetry/poetry/puzzle/solver.py b/vendor/poetry/poetry/puzzle/solver.py
deleted file mode 100644
index 31858bb3..00000000
--- a/vendor/poetry/poetry/puzzle/solver.py
+++ /dev/null
@@ -1,441 +0,0 @@
-import enum
-import time
-
-from collections import defaultdict
-from contextlib import contextmanager
-from typing import List
-from typing import Optional
-
-from clikit.io import ConsoleIO
-
-from poetry.core.packages import Package
-from poetry.core.packages.project_package import ProjectPackage
-from poetry.installation.operations import Install
-from poetry.installation.operations import Uninstall
-from poetry.installation.operations import Update
-from poetry.installation.operations.operation import Operation
-from poetry.mixology import resolve_version
-from poetry.mixology.failure import SolveFailure
-from poetry.packages import DependencyPackage
-from poetry.repositories import Pool
-from poetry.repositories import Repository
-from poetry.utils.env import Env
-
-from .exceptions import OverrideNeeded
-from .exceptions import SolverProblemError
-from .provider import Provider
-
-
-class Solver:
-    def __init__(
-        self,
-        package,  # type: ProjectPackage
-        pool,  # type: Pool
-        installed,  # type: Repository
-        locked,  # type: Repository
-        io,  # type: ConsoleIO
-        remove_untracked=False,  # type: bool
-        provider=None,  # type: Optional[Provider]
-    ):
-        self._package = package
-        self._pool = pool
-        self._installed = installed
-        self._locked = locked
-        self._io = io
-
-        if provider is None:
-            provider = Provider(self._package, self._pool, self._io)
-
-        self._provider = provider
-        self._overrides = []
-        self._remove_untracked = remove_untracked
-
-    @property
-    def provider(self):  # type: () -> Provider
-        return self._provider
-
-    @contextmanager
-    def use_environment(self, env):  # type: (Env) -> None
-        with self.provider.use_environment(env):
-            yield
-
-    def solve(self, use_latest=None):  # type: (...) -> List[Operation]
-        with self._provider.progress():
-            start = time.time()
-            packages, depths = self._solve(use_latest=use_latest)
-            end = time.time()
-
-            if len(self._overrides) > 1:
-                self._provider.debug(
-                    "Complete version solving took {:.3f} seconds with {} overrides".format(
-                        end - start, len(self._overrides)
-                    )
-                )
-                self._provider.debug(
-                    "Resolved with overrides: {}".format(
-                        ", ".join("({})".format(b) for b in self._overrides)
-                    )
-                )
-
-        operations = []
-        for i, package in enumerate(packages):
-            installed = False
-            for pkg in self._installed.packages:
-                if package.name == pkg.name:
-                    installed = True
-
-                    if pkg.source_type == "git" and package.source_type == "git":
-                        from poetry.core.vcs.git import Git
-
-                        # Trying to find the currently installed version
-                        pkg_source_url = Git.normalize_url(pkg.source_url)
-                        package_source_url = Git.normalize_url(package.source_url)
-                        for locked in self._locked.packages:
-                            if locked.name != pkg.name or locked.source_type != "git":
-                                continue
-
-                            locked_source_url = Git.normalize_url(locked.source_url)
-                            if (
-                                locked.name == pkg.name
-                                and locked.source_type == pkg.source_type
-                                and locked_source_url == pkg_source_url
-                                and locked.source_reference == pkg.source_reference
-                                and locked.source_resolved_reference
-                                == pkg.source_resolved_reference
-                            ):
-                                pkg = Package(
-                                    pkg.name,
-                                    locked.version,
-                                    source_type="git",
-                                    source_url=locked.source_url,
-                                    source_reference=locked.source_reference,
-                                    source_resolved_reference=locked.source_resolved_reference,
-                                )
-                                break
-
-                        if pkg_source_url != package_source_url or (
-                            (
-                                not pkg.source_resolved_reference
-                                or not package.source_resolved_reference
-                            )
-                            and pkg.source_reference != package.source_reference
-                            and not pkg.source_reference.startswith(
-                                package.source_reference
-                            )
-                            or (
-                                pkg.source_resolved_reference
-                                and package.source_resolved_reference
-                                and pkg.source_resolved_reference
-                                != package.source_resolved_reference
-                                and not pkg.source_resolved_reference.startswith(
-                                    package.source_resolved_reference
-                                )
-                            )
-                        ):
-                            operations.append(Update(pkg, package, priority=depths[i]))
-                        else:
-                            operations.append(
-                                Install(package).skip("Already installed")
-                            )
-                    elif package.version != pkg.version:
-                        # Checking version
-                        operations.append(Update(pkg, package, priority=depths[i]))
-                    elif pkg.source_type and package.source_type != pkg.source_type:
-                        operations.append(Update(pkg, package, priority=depths[i]))
-                    else:
-                        operations.append(
-                            Install(package, priority=depths[i]).skip(
-                                "Already installed"
-                            )
-                        )
-
-                    break
-
-            if not installed:
-                operations.append(Install(package, priority=depths[i]))
-
-        # Checking for removals
-        for pkg in self._locked.packages:
-            remove = True
-            for package in packages:
-                if pkg.name == package.name:
-                    remove = False
-                    break
-
-            if remove:
-                skip = True
-                for installed in self._installed.packages:
-                    if installed.name == pkg.name:
-                        skip = False
-                        break
-
-                op = Uninstall(pkg)
-                if skip:
-                    op.skip("Not currently installed")
-
-                operations.append(op)
-
-        if self._remove_untracked:
-            locked_names = {locked.name for locked in self._locked.packages}
-
-            for installed in self._installed.packages:
-                if installed.name == self._package.name:
-                    continue
-                if installed.name in Provider.UNSAFE_PACKAGES:
-                    # Never remove pip, setuptools etc.
-                    continue
-                if installed.name not in locked_names:
-                    operations.append(Uninstall(installed))
-
-        return sorted(
-            operations, key=lambda o: (-o.priority, o.package.name, o.package.version,),
-        )
-
-    def solve_in_compatibility_mode(self, overrides, use_latest=None):
-        locked = {}
-        for package in self._locked.packages:
-            locked[package.name] = DependencyPackage(package.to_dependency(), package)
-
-        packages = []
-        depths = []
-        for override in overrides:
-            self._provider.debug(
-                "Retrying dependency resolution "
-                "with the following overrides ({}).".format(override)
-            )
-            self._provider.set_overrides(override)
-            _packages, _depths = self._solve(use_latest=use_latest)
-            for index, package in enumerate(_packages):
-                if package not in packages:
-                    packages.append(package)
-                    depths.append(_depths[index])
-                    continue
-                else:
-                    idx = packages.index(package)
-                    pkg = packages[idx]
-                    depths[idx] = max(depths[idx], _depths[index])
-
-                    for dep in package.requires:
-                        if dep not in pkg.requires:
-                            pkg.requires.append(dep)
-
-        return packages, depths
-
-    def _solve(self, use_latest=None):
-        if self._provider._overrides:
-            self._overrides.append(self._provider._overrides)
-
-        locked = {}
-        for package in self._locked.packages:
-            locked[package.name] = DependencyPackage(package.to_dependency(), package)
-
-        try:
-            result = resolve_version(
-                self._package, self._provider, locked=locked, use_latest=use_latest
-            )
-
-            packages = result.packages
-        except OverrideNeeded as e:
-            return self.solve_in_compatibility_mode(e.overrides, use_latest=use_latest)
-        except SolveFailure as e:
-            raise SolverProblemError(e)
-
-        results = dict(
-            depth_first_search(
-                PackageNode(self._package, packages), aggregate_package_nodes
-            )
-        )
-
-        # Merging feature packages with base packages
-        final_packages = []
-        depths = []
-        for package in packages:
-            if package.features:
-                for _package in packages:
-                    if (
-                        _package.name == package.name
-                        and not _package.is_same_package_as(package)
-                        and _package.version == package.version
-                    ):
-                        for dep in package.requires:
-                            if dep.is_same_package_as(_package):
-                                continue
-
-                            if dep not in _package.requires:
-                                _package.requires.append(dep)
-
-                continue
-
-            final_packages.append(package)
-            depths.append(results[package])
-
-        # Return the packages in their original order with associated depths
-        return final_packages, depths
-
-
-class DFSNode(object):
-    def __init__(self, id, name, base_name):
-        self.id = id
-        self.name = name
-        self.base_name = base_name
-
-    def reachable(self):
-        return []
-
-    def visit(self, parents):
-        pass
-
-    def __str__(self):
-        return str(self.id)
-
-
-class VisitedState(enum.Enum):
-    Unvisited = 0
-    PartiallyVisited = 1
-    Visited = 2
-
-
-def depth_first_search(source, aggregator):
-    back_edges = defaultdict(list)
-    visited = {}
-    topo_sorted_nodes = []
-
-    dfs_visit(source, back_edges, visited, topo_sorted_nodes)
-
-    # Combine the nodes by name
-    combined_nodes = defaultdict(list)
-    name_children = defaultdict(list)
-    for node in topo_sorted_nodes:
-        node.visit(back_edges[node.id])
-        name_children[node.name].extend(node.reachable())
-        combined_nodes[node.name].append(node)
-
-    combined_topo_sorted_nodes = []
-    for node in topo_sorted_nodes:
-        if node.name in combined_nodes:
-            combined_topo_sorted_nodes.append(combined_nodes.pop(node.name))
-
-    results = [
-        aggregator(nodes, name_children[nodes[0].name])
-        for nodes in combined_topo_sorted_nodes
-    ]
-    return results
-
-
-def dfs_visit(node, back_edges, visited, sorted_nodes):
-    if visited.get(node.id, VisitedState.Unvisited) == VisitedState.Visited:
-        return True
-    if visited.get(node.id, VisitedState.Unvisited) == VisitedState.PartiallyVisited:
-        # We have a circular dependency.
-        # Since the dependencies are resolved we can
-        # simply skip it because we already have it
-        return True
-
-    visited[node.id] = VisitedState.PartiallyVisited
-    for neighbor in node.reachable():
-        back_edges[neighbor.id].append(node)
-        if not dfs_visit(neighbor, back_edges, visited, sorted_nodes):
-            return False
-    visited[node.id] = VisitedState.Visited
-    sorted_nodes.insert(0, node)
-    return True
-
-
-class PackageNode(DFSNode):
-    def __init__(
-        self, package, packages, previous=None, previous_dep=None, dep=None,
-    ):
-        self.package = package
-        self.packages = packages
-
-        self.previous = previous
-        self.previous_dep = previous_dep
-        self.dep = dep
-        self.depth = -1
-
-        if not previous:
-            self.category = "dev"
-            self.optional = True
-        else:
-            self.category = dep.category
-            self.optional = dep.is_optional()
-
-        super(PackageNode, self).__init__(
-            (package.complete_name, self.category, self.optional),
-            package.complete_name,
-            package.name,
-        )
-
-    def reachable(self):
-        children = []  # type: List[PackageNode]
-
-        if (
-            self.previous_dep
-            and self.previous_dep is not self.dep
-            and self.previous_dep.name == self.dep.name
-        ):
-            return []
-
-        for dependency in self.package.all_requires:
-            if self.previous and self.previous.name == dependency.name:
-                # We have a circular dependency.
-                # Since the dependencies are resolved we can
-                # simply skip it because we already have it
-                # N.B. this only catches cycles of length 2;
-                # dependency cycles in general are handled by the DFS traversal
-                continue
-
-            for pkg in self.packages:
-                if pkg.complete_name == dependency.complete_name and (
-                    dependency.constraint.allows(pkg.version)
-                    or dependency.allows_prereleases()
-                    and pkg.version.is_prerelease()
-                    and dependency.constraint.allows(pkg.version.stable)
-                ):
-                    # If there is already a child with this name
-                    # we merge the requirements
-                    if any(
-                        child.package.name == pkg.name
-                        and child.category == dependency.category
-                        for child in children
-                    ):
-                        continue
-
-                    children.append(
-                        PackageNode(
-                            pkg,
-                            self.packages,
-                            self,
-                            dependency,
-                            self.dep or dependency,
-                        )
-                    )
-
-        return children
-
-    def visit(self, parents):
-        # The root package, which has no parents, is defined as having depth -1
-        # So that the root package's top-level dependencies have depth 0.
-        self.depth = 1 + max(
-            [
-                parent.depth if parent.base_name != self.base_name else parent.depth - 1
-                for parent in parents
-            ]
-            + [-2]
-        )
-
-
-def aggregate_package_nodes(nodes, children):
-    package = nodes[0].package
-    depth = max(node.depth for node in nodes)
-    category = (
-        "main" if any(node.category == "main" for node in children + nodes) else "dev"
-    )
-    optional = all(node.optional for node in children + nodes)
-    for node in nodes:
-        node.depth = depth
-        node.category = category
-        node.optional = optional
-    package.category = category
-    package.optional = optional
-    return package, depth
diff --git a/vendor/poetry/poetry/repositories/__init__.py b/vendor/poetry/poetry/repositories/__init__.py
deleted file mode 100644
index ab92fb11..00000000
--- a/vendor/poetry/poetry/repositories/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .pool import Pool
-from .repository import Repository
diff --git a/vendor/poetry/poetry/repositories/base_repository.py b/vendor/poetry/poetry/repositories/base_repository.py
deleted file mode 100644
index 46422ca0..00000000
--- a/vendor/poetry/poetry/repositories/base_repository.py
+++ /dev/null
@@ -1,19 +0,0 @@
-class BaseRepository(object):
-    def __init__(self):
-        self._packages = []
-
-    @property
-    def packages(self):
-        return self._packages
-
-    def has_package(self, package):
-        raise NotImplementedError()
-
-    def package(self, name, version, extras=None):
-        raise NotImplementedError()
-
-    def find_packages(self, dependency):
-        raise NotImplementedError()
-
-    def search(self, query):
-        raise NotImplementedError()
diff --git a/vendor/poetry/poetry/repositories/exceptions.py b/vendor/poetry/poetry/repositories/exceptions.py
deleted file mode 100644
index 170303f3..00000000
--- a/vendor/poetry/poetry/repositories/exceptions.py
+++ /dev/null
@@ -1,8 +0,0 @@
-class RepositoryError(Exception):
-
-    pass
-
-
-class PackageNotFound(Exception):
-
-    pass
diff --git a/vendor/poetry/poetry/repositories/installed_repository.py b/vendor/poetry/poetry/repositories/installed_repository.py
deleted file mode 100644
index 03513103..00000000
--- a/vendor/poetry/poetry/repositories/installed_repository.py
+++ /dev/null
@@ -1,167 +0,0 @@
-import itertools
-
-from typing import Set
-from typing import Union
-
-from poetry.core.packages import Package
-from poetry.core.utils.helpers import module_name
-from poetry.utils._compat import Path
-from poetry.utils._compat import metadata
-from poetry.utils.env import Env
-
-from . import __path__
-from .repository import Repository
-
-
-_VENDORS = Path(__path__[0]).parent.joinpath("_vendor")
-
-
-try:
-    FileNotFoundError
-except NameError:
-    FileNotFoundError = OSError
-
-
-class InstalledRepository(Repository):
-    @classmethod
-    def get_package_paths(cls, env, name):  # type: (Env, str) -> Set[Path]
-        """
-        Process a .pth file within the site-packages directories, and return any valid
-        paths. We skip executable .pth files as there is no reliable means to do this
-        without side-effects to current run-time. Mo check is made that the item refers
-        to a directory rather than a file, however, in order to maintain backwards
-        compatibility, we allow non-existing paths to be discovered. The latter
-        behaviour is different to how Python's site-specific hook configuration works.
-
-        Reference: https://docs.python.org/3.8/library/site.html
-
-        :param env: The environment to search for the .pth file in.
-        :param name: The name of the package to search .pth file for.
-        :return: A `Set` of valid `Path` objects.
-        """
-        paths = set()
-
-        # we identify the candidate pth files to check, this is done so to handle cases
-        # where the pth file for foo-bar might have been installed as either foo-bar.pth or
-        # foo_bar.pth (expected) in either pure or platform lib directories.
-        candidates = itertools.product(
-            {env.purelib, env.platlib}, {name, module_name(name)},
-        )
-
-        for lib, module in candidates:
-            pth_file = lib.joinpath(module).with_suffix(".pth")
-            if not pth_file.exists():
-                continue
-
-            with pth_file.open() as f:
-                for line in f:
-                    line = line.strip()
-                    if line and not line.startswith(("#", "import ", "import\t")):
-                        path = Path(line)
-                        if not path.is_absolute():
-                            try:
-                                path = lib.joinpath(path).resolve()
-                            except FileNotFoundError:
-                                # this is required to handle pathlib oddity on win32 python==3.5
-                                path = lib.joinpath(path)
-                        paths.add(path)
-        return paths
-
-    @classmethod
-    def set_package_vcs_properties_from_path(
-        cls, src, package
-    ):  # type: (Path, Package) -> None
-        from poetry.core.vcs.git import Git
-
-        git = Git()
-        revision = git.rev_parse("HEAD", src).strip()
-        url = git.remote_url(src)
-
-        package._source_type = "git"
-        package._source_url = url
-        package._source_reference = revision
-
-    @classmethod
-    def set_package_vcs_properties(cls, package, env):  # type: (Package, Env) -> None
-        src = env.path / "src" / package.name
-        cls.set_package_vcs_properties_from_path(src, package)
-
-    @classmethod
-    def is_vcs_package(cls, package, env):  # type: (Union[Path, Package], Env) -> bool
-        # A VCS dependency should have been installed
-        # in the src directory.
-        src = env.path / "src"
-        if isinstance(package, Package):
-            return src.joinpath(package.name).is_dir()
-
-        try:
-            package.relative_to(env.path / "src")
-        except ValueError:
-            return False
-        else:
-            return True
-
-    @classmethod
-    def load(cls, env):  # type: (Env) -> InstalledRepository
-        """
-        Load installed packages.
-        """
-        repo = cls()
-        seen = set()
-
-        for entry in reversed(env.sys_path):
-            for distribution in sorted(
-                metadata.distributions(path=[entry]), key=lambda d: str(d._path),
-            ):
-                name = distribution.metadata["name"]
-                path = Path(str(distribution._path))
-                version = distribution.metadata["version"]
-                package = Package(name, version, version)
-                package.description = distribution.metadata.get("summary", "")
-
-                if package.name in seen:
-                    continue
-
-                try:
-                    path.relative_to(_VENDORS)
-                except ValueError:
-                    pass
-                else:
-                    continue
-
-                seen.add(package.name)
-
-                repo.add_package(package)
-
-                is_standard_package = env.is_path_relative_to_lib(path)
-
-                if is_standard_package:
-                    if path.name.endswith(".dist-info"):
-                        paths = cls.get_package_paths(env=env, name=package.pretty_name)
-                        if paths:
-                            is_editable_package = False
-                            for src in paths:
-                                if cls.is_vcs_package(src, env):
-                                    cls.set_package_vcs_properties(package, env)
-                                    break
-
-                                if not (
-                                    is_editable_package
-                                    or env.is_path_relative_to_lib(src)
-                                ):
-                                    is_editable_package = True
-                            else:
-                                # TODO: handle multiple source directories?
-                                if is_editable_package:
-                                    package._source_type = "directory"
-                                    package._source_url = paths.pop().as_posix()
-                    continue
-
-                if cls.is_vcs_package(path, env):
-                    cls.set_package_vcs_properties(package, env)
-                else:
-                    # If not, it's a path dependency
-                    package._source_type = "directory"
-                    package._source_url = str(path.parent)
-
-        return repo
diff --git a/vendor/poetry/poetry/repositories/legacy_repository.py b/vendor/poetry/poetry/repositories/legacy_repository.py
deleted file mode 100755
index 75d0a83f..00000000
--- a/vendor/poetry/poetry/repositories/legacy_repository.py
+++ /dev/null
@@ -1,410 +0,0 @@
-import cgi
-import re
-import warnings
-
-from collections import defaultdict
-from typing import Generator
-from typing import Optional
-from typing import Union
-
-import requests
-import requests.auth
-
-from cachecontrol import CacheControl
-from cachecontrol.caches.file_cache import FileCache
-from cachy import CacheManager
-
-from poetry.core.packages import Package
-from poetry.core.packages.utils.link import Link
-from poetry.core.semver import Version
-from poetry.core.semver import VersionConstraint
-from poetry.core.semver import VersionRange
-from poetry.core.semver import parse_constraint
-from poetry.locations import REPOSITORY_CACHE_DIR
-from poetry.utils._compat import Path
-from poetry.utils.helpers import canonicalize_name
-from poetry.utils.patterns import wheel_file_re
-
-from ..config.config import Config
-from ..inspection.info import PackageInfo
-from ..installation.authenticator import Authenticator
-from .exceptions import PackageNotFound
-from .exceptions import RepositoryError
-from .pypi_repository import PyPiRepository
-
-
-try:
-    import urllib.parse as urlparse
-except ImportError:
-    import urlparse
-
-try:
-    from html import unescape
-except ImportError:
-    try:
-        from html.parser import HTMLParser
-    except ImportError:
-        from HTMLParser import HTMLParser
-
-    unescape = HTMLParser().unescape
-
-
-try:
-    from urllib.parse import quote
-except ImportError:
-    from urllib import quote
-
-
-with warnings.catch_warnings():
-    warnings.simplefilter("ignore")
-    import html5lib
-
-
-class Page:
-
-    VERSION_REGEX = re.compile(r"(?i)([a-z0-9_\-.]+?)-(?=\d)([a-z0-9_.!+-]+)")
-    SUPPORTED_FORMATS = [
-        ".tar.gz",
-        ".whl",
-        ".zip",
-        ".tar.bz2",
-        ".tar.xz",
-        ".tar.Z",
-        ".tar",
-    ]
-
-    def __init__(self, url, content, headers):
-        if not url.endswith("/"):
-            url += "/"
-
-        self._url = url
-        encoding = None
-        if headers and "Content-Type" in headers:
-            content_type, params = cgi.parse_header(headers["Content-Type"])
-
-            if "charset" in params:
-                encoding = params["charset"]
-
-        self._content = content
-
-        if encoding is None:
-            self._parsed = html5lib.parse(content, namespaceHTMLElements=False)
-        else:
-            self._parsed = html5lib.parse(
-                content, transport_encoding=encoding, namespaceHTMLElements=False
-            )
-
-    @property
-    def versions(self):  # type: () -> Generator[Version]
-        seen = set()
-        for link in self.links:
-            version = self.link_version(link)
-
-            if not version:
-                continue
-
-            if version in seen:
-                continue
-
-            seen.add(version)
-
-            yield version
-
-    @property
-    def links(self):  # type: () -> Generator[Link]
-        for anchor in self._parsed.findall(".//a"):
-            if anchor.get("href"):
-                href = anchor.get("href")
-                url = self.clean_link(urlparse.urljoin(self._url, href))
-                pyrequire = anchor.get("data-requires-python")
-                pyrequire = unescape(pyrequire) if pyrequire else None
-
-                link = Link(url, self, requires_python=pyrequire)
-
-                if link.ext not in self.SUPPORTED_FORMATS:
-                    continue
-
-                yield link
-
-    def links_for_version(self, version):  # type: (Version) -> Generator[Link]
-        for link in self.links:
-            if self.link_version(link) == version:
-                yield link
-
-    def link_version(self, link):  # type: (Link) -> Union[Version, None]
-        m = wheel_file_re.match(link.filename)
-        if m:
-            version = m.group("ver")
-        else:
-            info, ext = link.splitext()
-            match = self.VERSION_REGEX.match(info)
-            if not match:
-                return
-
-            version = match.group(2)
-
-        try:
-            version = Version.parse(version)
-        except ValueError:
-            return
-
-        return version
-
-    _clean_re = re.compile(r"[^a-z0-9$&+,/:;=?@.#%_\\|-]", re.I)
-
-    def clean_link(self, url):
-        """Makes sure a link is fully encoded.  That is, if a ' ' shows up in
-        the link, it will be rewritten to %20 (while not over-quoting
-        % or other characters)."""
-        return self._clean_re.sub(lambda match: "%%%2x" % ord(match.group(0)), url)
-
-
-class LegacyRepository(PyPiRepository):
-    def __init__(
-        self, name, url, config=None, disable_cache=False, cert=None, client_cert=None
-    ):  # type: (str, str, Optional[Config], bool, Optional[Path], Optional[Path]) -> None
-        if name == "pypi":
-            raise ValueError("The name [pypi] is reserved for repositories")
-
-        self._packages = []
-        self._name = name
-        self._url = url.rstrip("/")
-        self._client_cert = client_cert
-        self._cert = cert
-        self._cache_dir = REPOSITORY_CACHE_DIR / name
-        self._cache = CacheManager(
-            {
-                "default": "releases",
-                "serializer": "json",
-                "stores": {
-                    "releases": {"driver": "file", "path": str(self._cache_dir)},
-                    "packages": {"driver": "dict"},
-                    "matches": {"driver": "dict"},
-                },
-            }
-        )
-
-        self._authenticator = Authenticator(
-            config=config or Config(use_environment=True)
-        )
-        self._basic_auth = None
-        username, password = self._authenticator.get_credentials_for_url(self._url)
-        if username is not None and password is not None:
-            self._basic_auth = requests.auth.HTTPBasicAuth(username, password)
-
-        self._disable_cache = disable_cache
-
-    @property
-    def cert(self):  # type: () -> Optional[Path]
-        return self._cert
-
-    @property
-    def client_cert(self):  # type: () -> Optional[Path]
-        return self._client_cert
-
-    @property
-    def session(self):
-        session = self._authenticator.session
-
-        if self._basic_auth:
-            session.auth = self._basic_auth
-
-        if self._cert:
-            session.verify = str(self._cert)
-
-        if self._client_cert:
-            session.cert = str(self._client_cert)
-
-        return CacheControl(session, cache=FileCache(str(self._cache_dir / "_http")))
-
-    @property
-    def authenticated_url(self):  # type: () -> str
-        if not self._basic_auth:
-            return self.url
-
-        parsed = urlparse.urlparse(self.url)
-
-        return "{scheme}://{username}:{password}@{netloc}{path}".format(
-            scheme=parsed.scheme,
-            username=quote(self._basic_auth.username, safe=""),
-            password=quote(self._basic_auth.password, safe=""),
-            netloc=parsed.netloc,
-            path=parsed.path,
-        )
-
-    def find_packages(self, dependency):
-        packages = []
-
-        constraint = dependency.constraint
-        if constraint is None:
-            constraint = "*"
-
-        if not isinstance(constraint, VersionConstraint):
-            constraint = parse_constraint(constraint)
-
-        allow_prereleases = dependency.allows_prereleases()
-        if isinstance(constraint, VersionRange):
-            if (
-                constraint.max is not None
-                and constraint.max.is_prerelease()
-                or constraint.min is not None
-                and constraint.min.is_prerelease()
-            ):
-                allow_prereleases = True
-
-        key = dependency.name
-        if not constraint.is_any():
-            key = "{}:{}".format(key, str(constraint))
-
-        ignored_pre_release_versions = []
-
-        if self._cache.store("matches").has(key):
-            versions = self._cache.store("matches").get(key)
-        else:
-            page = self._get("/{}/".format(dependency.name.replace(".", "-")))
-            if page is None:
-                return []
-
-            versions = []
-            for version in page.versions:
-                if version.is_prerelease() and not allow_prereleases:
-                    if constraint.is_any():
-                        # we need this when all versions of the package are pre-releases
-                        ignored_pre_release_versions.append(version)
-                    continue
-
-                if constraint.allows(version):
-                    versions.append(version)
-
-            self._cache.store("matches").put(key, versions, 5)
-
-        for package_versions in (versions, ignored_pre_release_versions):
-            for version in package_versions:
-                package = Package(
-                    dependency.name,
-                    version,
-                    source_type="legacy",
-                    source_reference=self.name,
-                    source_url=self._url,
-                )
-
-                packages.append(package)
-
-            self._log(
-                "{} packages found for {} {}".format(
-                    len(packages), dependency.name, str(constraint)
-                ),
-                level="debug",
-            )
-
-            if packages or not constraint.is_any():
-                # we have matching packages, or constraint is not (*)
-                break
-
-        return packages
-
-    def package(self, name, version, extras=None):  # type: (...) -> Package
-        """
-        Retrieve the release information.
-
-        This is a heavy task which takes time.
-        We have to download a package to get the dependencies.
-        We also need to download every file matching this release
-        to get the various hashes.
-
-        Note that this will be cached so the subsequent operations
-        should be much faster.
-        """
-        try:
-            index = self._packages.index(Package(name, version, version))
-
-            return self._packages[index]
-        except ValueError:
-            package = super(LegacyRepository, self).package(name, version, extras)
-            package._source_type = "legacy"
-            package._source_url = self._url
-            package._source_reference = self.name
-
-            return package
-
-    def find_links_for_package(self, package):
-        page = self._get("/{}/".format(package.name.replace(".", "-")))
-        if page is None:
-            return []
-
-        return list(page.links_for_version(package.version))
-
-    def _get_release_info(self, name, version):  # type: (str, str) -> dict
-        page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-")))
-        if page is None:
-            raise PackageNotFound('No package named "{}"'.format(name))
-
-        data = PackageInfo(
-            name=name,
-            version=version,
-            summary="",
-            platform=None,
-            requires_dist=[],
-            requires_python=None,
-            files=[],
-            cache_version=str(self.CACHE_VERSION),
-        )
-
-        links = list(page.links_for_version(Version.parse(version)))
-        if not links:
-            raise PackageNotFound(
-                'No valid distribution links found for package: "{}" version: "{}"'.format(
-                    name, version
-                )
-            )
-        urls = defaultdict(list)
-        files = []
-        for link in links:
-            if link.is_wheel:
-                urls["bdist_wheel"].append(link.url)
-            elif link.filename.endswith(
-                (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar")
-            ):
-                urls["sdist"].append(link.url)
-
-            h = link.hash
-            if h:
-                h = link.hash_name + ":" + link.hash
-                files.append({"file": link.filename, "hash": h})
-
-        data.files = files
-
-        info = self._get_info_from_urls(urls)
-
-        data.summary = info.summary
-        data.requires_dist = info.requires_dist
-        data.requires_python = info.requires_python
-
-        return data.asdict()
-
-    def _get(self, endpoint):  # type: (str) -> Union[Page, None]
-        url = self._url + endpoint
-        try:
-            response = self.session.get(url)
-            if response.status_code == 404:
-                return
-            response.raise_for_status()
-        except requests.HTTPError as e:
-            raise RepositoryError(e)
-
-        if response.status_code in (401, 403):
-            self._log(
-                "Authorization error accessing {url}".format(url=response.url),
-                level="warn",
-            )
-            return
-
-        if response.url != url:
-            self._log(
-                "Response URL {response_url} differs from request URL {url}".format(
-                    response_url=response.url, url=url
-                ),
-                level="debug",
-            )
-
-        return Page(response.url, response.content, response.headers)
diff --git a/vendor/poetry/poetry/repositories/pool.py b/vendor/poetry/poetry/repositories/pool.py
deleted file mode 100755
index d9b7f8e1..00000000
--- a/vendor/poetry/poetry/repositories/pool.py
+++ /dev/null
@@ -1,184 +0,0 @@
-from typing import TYPE_CHECKING
-from typing import Dict
-from typing import List
-from typing import Optional
-
-from .base_repository import BaseRepository
-from .exceptions import PackageNotFound
-from .repository import Repository
-
-
-if TYPE_CHECKING:
-    from poetry.core.packages import Package
-
-
-class Pool(BaseRepository):
-    def __init__(
-        self, repositories=None, ignore_repository_names=False
-    ):  # type: (Optional[List[Repository]], bool) -> None
-        if repositories is None:
-            repositories = []
-
-        self._lookup = {}  # type: Dict[str, int]
-        self._repositories = []  # type: List[Repository]
-        self._default = False
-        self._has_primary_repositories = False
-        self._secondary_start_idx = None
-
-        for repository in repositories:
-            self.add_repository(repository)
-
-        self._ignore_repository_names = ignore_repository_names
-
-        super(Pool, self).__init__()
-
-    @property
-    def repositories(self):  # type: () -> List[Repository]
-        return self._repositories
-
-    def has_default(self):  # type: () -> bool
-        return self._default
-
-    def has_primary_repositories(self):  # type: () -> bool
-        return self._has_primary_repositories
-
-    def has_repository(self, name):  # type: (str) -> bool
-        name = name.lower() if name is not None else None
-
-        return name in self._lookup
-
-    def repository(self, name):  # type: (str) -> Repository
-        if name is not None:
-            name = name.lower()
-
-        if name in self._lookup:
-            return self._repositories[self._lookup[name]]
-
-        raise ValueError('Repository "{}" does not exist.'.format(name))
-
-    def add_repository(
-        self, repository, default=False, secondary=False
-    ):  # type: (Repository, bool, bool) -> Pool
-        """
-        Adds a repository to the pool.
-        """
-        repository_name = (
-            repository.name.lower() if repository.name is not None else None
-        )
-        if default:
-            if self.has_default():
-                raise ValueError("Only one repository can be the default")
-
-            self._default = True
-            self._repositories.insert(0, repository)
-            for name in self._lookup:
-                self._lookup[name] += 1
-
-            if self._secondary_start_idx is not None:
-                self._secondary_start_idx += 1
-
-            self._lookup[repository_name] = 0
-        elif secondary:
-            if self._secondary_start_idx is None:
-                self._secondary_start_idx = len(self._repositories)
-
-            self._repositories.append(repository)
-            self._lookup[repository_name] = len(self._repositories) - 1
-        else:
-            self._has_primary_repositories = True
-            if self._secondary_start_idx is None:
-                self._repositories.append(repository)
-                self._lookup[repository_name] = len(self._repositories) - 1
-            else:
-                self._repositories.insert(self._secondary_start_idx, repository)
-
-                for name, idx in self._lookup.items():
-                    if idx < self._secondary_start_idx:
-                        continue
-
-                    self._lookup[name] += 1
-
-                self._lookup[repository_name] = self._secondary_start_idx
-                self._secondary_start_idx += 1
-
-        return self
-
-    def remove_repository(self, repository_name):  # type: (str) -> Pool
-        if repository_name is not None:
-            repository_name = repository_name.lower()
-
-        idx = self._lookup.get(repository_name)
-        if idx is not None:
-            del self._repositories[idx]
-
-        return self
-
-    def has_package(self, package):
-        raise NotImplementedError()
-
-    def package(
-        self, name, version, extras=None, repository=None
-    ):  # type: (str, str, List[str], str) -> Package
-        if repository is not None:
-            repository = repository.lower()
-
-        if (
-            repository is not None
-            and repository not in self._lookup
-            and not self._ignore_repository_names
-        ):
-            raise ValueError('Repository "{}" does not exist.'.format(repository))
-
-        if repository is not None and not self._ignore_repository_names:
-            try:
-                return self.repository(repository).package(name, version, extras=extras)
-            except PackageNotFound:
-                pass
-        else:
-            for idx, repo in enumerate(self._repositories):
-                try:
-                    package = repo.package(name, version, extras=extras)
-                except PackageNotFound:
-                    continue
-
-                if package:
-                    self._packages.append(package)
-
-                    return package
-
-        raise PackageNotFound("Package {} ({}) not found.".format(name, version))
-
-    def find_packages(
-        self, dependency,
-    ):
-        repository = dependency.source_name
-        if repository is not None:
-            repository = repository.lower()
-
-        if (
-            repository is not None
-            and repository not in self._lookup
-            and not self._ignore_repository_names
-        ):
-            raise ValueError('Repository "{}" does not exist.'.format(repository))
-
-        if repository is not None and not self._ignore_repository_names:
-            return self.repository(repository).find_packages(dependency)
-
-        packages = []
-        for repo in self._repositories:
-            packages += repo.find_packages(dependency)
-
-        return packages
-
-    def search(self, query):
-        from .legacy_repository import LegacyRepository
-
-        results = []
-        for repository in self._repositories:
-            if isinstance(repository, LegacyRepository):
-                continue
-
-            results += repository.search(query)
-
-        return results
diff --git a/vendor/poetry/poetry/repositories/pypi_repository.py b/vendor/poetry/poetry/repositories/pypi_repository.py
deleted file mode 100755
index 2ee18bc7..00000000
--- a/vendor/poetry/poetry/repositories/pypi_repository.py
+++ /dev/null
@@ -1,453 +0,0 @@
-import logging
-import os
-
-from collections import defaultdict
-from typing import Dict
-from typing import List
-from typing import Union
-
-import requests
-
-from cachecontrol import CacheControl
-from cachecontrol.caches.file_cache import FileCache
-from cachecontrol.controller import logger as cache_control_logger
-from cachy import CacheManager
-from html5lib.html5parser import parse
-
-from poetry.core.packages import Dependency
-from poetry.core.packages import Package
-from poetry.core.packages import dependency_from_pep_508
-from poetry.core.packages.utils.link import Link
-from poetry.core.semver import VersionConstraint
-from poetry.core.semver import VersionRange
-from poetry.core.semver import parse_constraint
-from poetry.core.semver.exceptions import ParseVersionError
-from poetry.core.version.markers import parse_marker
-from poetry.locations import REPOSITORY_CACHE_DIR
-from poetry.utils._compat import Path
-from poetry.utils._compat import to_str
-from poetry.utils.helpers import download_file
-from poetry.utils.helpers import temporary_directory
-from poetry.utils.patterns import wheel_file_re
-
-from ..inspection.info import PackageInfo
-from .exceptions import PackageNotFound
-from .remote_repository import RemoteRepository
-
-
-try:
-    import urllib.parse as urlparse
-except ImportError:
-    import urlparse
-
-
-cache_control_logger.setLevel(logging.ERROR)
-
-logger = logging.getLogger(__name__)
-
-
-class PyPiRepository(RemoteRepository):
-
-    CACHE_VERSION = parse_constraint("1.0.0")
-
-    def __init__(self, url="https://pypi.org/", disable_cache=False, fallback=True):
-        super(PyPiRepository, self).__init__(url.rstrip("/") + "/simple/")
-
-        self._base_url = url
-        self._disable_cache = disable_cache
-        self._fallback = fallback
-
-        release_cache_dir = REPOSITORY_CACHE_DIR / "pypi"
-        self._cache = CacheManager(
-            {
-                "default": "releases",
-                "serializer": "json",
-                "stores": {
-                    "releases": {"driver": "file", "path": str(release_cache_dir)},
-                    "packages": {"driver": "dict"},
-                },
-            }
-        )
-
-        self._cache_control_cache = FileCache(str(release_cache_dir / "_http"))
-        self._name = "PyPI"
-
-    @property
-    def session(self):
-        return CacheControl(requests.session(), cache=self._cache_control_cache)
-
-    def find_packages(self, dependency):  # type: (Dependency) -> List[Package]
-        """
-        Find packages on the remote server.
-        """
-        constraint = dependency.constraint
-        if constraint is None:
-            constraint = "*"
-
-        if not isinstance(constraint, VersionConstraint):
-            constraint = parse_constraint(constraint)
-
-        allow_prereleases = dependency.allows_prereleases()
-        if isinstance(constraint, VersionRange):
-            if (
-                constraint.max is not None
-                and constraint.max.is_prerelease()
-                or constraint.min is not None
-                and constraint.min.is_prerelease()
-            ):
-                allow_prereleases = True
-
-        try:
-            info = self.get_package_info(dependency.name)
-        except PackageNotFound:
-            self._log(
-                "No packages found for {} {}".format(dependency.name, str(constraint)),
-                level="debug",
-            )
-            return []
-
-        packages = []
-        ignored_pre_release_packages = []
-
-        for version, release in info["releases"].items():
-            if not release:
-                # Bad release
-                self._log(
-                    "No release information found for {}-{}, skipping".format(
-                        dependency.name, version
-                    ),
-                    level="debug",
-                )
-                continue
-
-            try:
-                package = Package(info["info"]["name"], version)
-            except ParseVersionError:
-                self._log(
-                    'Unable to parse version "{}" for the {} package, skipping'.format(
-                        version, dependency.name
-                    ),
-                    level="debug",
-                )
-                continue
-
-            if package.is_prerelease() and not allow_prereleases:
-                if constraint.is_any():
-                    # we need this when all versions of the package are pre-releases
-                    ignored_pre_release_packages.append(package)
-                continue
-
-            if not constraint or (constraint and constraint.allows(package.version)):
-                packages.append(package)
-
-        self._log(
-            "{} packages found for {} {}".format(
-                len(packages), dependency.name, str(constraint)
-            ),
-            level="debug",
-        )
-
-        return packages or ignored_pre_release_packages
-
-    def package(
-        self,
-        name,  # type: str
-        version,  # type: str
-        extras=None,  # type: (Union[list, None])
-    ):  # type: (...) -> Package
-        return self.get_release_info(name, version).to_package(name=name, extras=extras)
-
-    def search(self, query):
-        results = []
-
-        search = {"q": query}
-
-        response = requests.session().get(self._base_url + "search", params=search)
-        content = parse(response.content, namespaceHTMLElements=False)
-        for result in content.findall(".//*[@class='package-snippet']"):
-            name = result.find("h3/*[@class='package-snippet__name']").text
-            version = result.find("h3/*[@class='package-snippet__version']").text
-
-            if not name or not version:
-                continue
-
-            description = result.find("p[@class='package-snippet__description']").text
-            if not description:
-                description = ""
-
-            try:
-                result = Package(name, version, description)
-                result.description = to_str(description.strip())
-                results.append(result)
-            except ParseVersionError:
-                self._log(
-                    'Unable to parse version "{}" for the {} package, skipping'.format(
-                        version, name
-                    ),
-                    level="debug",
-                )
-
-        return results
-
-    def get_package_info(self, name):  # type: (str) -> dict
-        """
-        Return the package information given its name.
-
-        The information is returned from the cache if it exists
-        or retrieved from the remote server.
-        """
-        if self._disable_cache:
-            return self._get_package_info(name)
-
-        return self._cache.store("packages").remember_forever(
-            name, lambda: self._get_package_info(name)
-        )
-
-    def _get_package_info(self, name):  # type: (str) -> dict
-        data = self._get("pypi/{}/json".format(name))
-        if data is None:
-            raise PackageNotFound("Package [{}] not found.".format(name))
-
-        return data
-
-    def get_release_info(self, name, version):  # type: (str, str) -> PackageInfo
-        """
-        Return the release information given a package name and a version.
-
-        The information is returned from the cache if it exists
-        or retrieved from the remote server.
-        """
-        if self._disable_cache:
-            return PackageInfo.load(self._get_release_info(name, version))
-
-        cached = self._cache.remember_forever(
-            "{}:{}".format(name, version), lambda: self._get_release_info(name, version)
-        )
-
-        cache_version = cached.get("_cache_version", "0.0.0")
-        if parse_constraint(cache_version) != self.CACHE_VERSION:
-            # The cache must be updated
-            self._log(
-                "The cache for {} {} is outdated. Refreshing.".format(name, version),
-                level="debug",
-            )
-            cached = self._get_release_info(name, version)
-
-            self._cache.forever("{}:{}".format(name, version), cached)
-
-        return PackageInfo.load(cached)
-
-    def find_links_for_package(self, package):
-        json_data = self._get("pypi/{}/{}/json".format(package.name, package.version))
-        if json_data is None:
-            return []
-
-        links = []
-        for url in json_data["urls"]:
-            h = "sha256={}".format(url["digests"]["sha256"])
-            links.append(Link(url["url"] + "#" + h))
-
-        return links
-
-    def _get_release_info(self, name, version):  # type: (str, str) -> dict
-        self._log("Getting info for {} ({}) from PyPI".format(name, version), "debug")
-
-        json_data = self._get("pypi/{}/{}/json".format(name, version))
-        if json_data is None:
-            raise PackageNotFound("Package [{}] not found.".format(name))
-
-        info = json_data["info"]
-
-        data = PackageInfo(
-            name=info["name"],
-            version=info["version"],
-            summary=info["summary"],
-            platform=info["platform"],
-            requires_dist=info["requires_dist"],
-            requires_python=info["requires_python"],
-            files=info.get("files", []),
-            cache_version=str(self.CACHE_VERSION),
-        )
-
-        try:
-            version_info = json_data["urls"]
-        except KeyError:
-            version_info = []
-
-        for file_info in version_info:
-            data.files.append(
-                {
-                    "file": file_info["filename"],
-                    "hash": "sha256:" + file_info["digests"]["sha256"],
-                }
-            )
-
-        if self._fallback and data.requires_dist is None:
-            self._log("No dependencies found, downloading archives", level="debug")
-            # No dependencies set (along with other information)
-            # This might be due to actually no dependencies
-            # or badly set metadata when uploading
-            # So, we need to make sure there is actually no
-            # dependencies by introspecting packages
-            urls = defaultdict(list)
-            for url in json_data["urls"]:
-                # Only get sdist and wheels if they exist
-                dist_type = url["packagetype"]
-                if dist_type not in ["sdist", "bdist_wheel"]:
-                    continue
-
-                urls[dist_type].append(url["url"])
-
-            if not urls:
-                return data.asdict()
-
-            info = self._get_info_from_urls(urls)
-
-            data.requires_dist = info.requires_dist
-
-            if not data.requires_python:
-                data.requires_python = info.requires_python
-
-        return data.asdict()
-
-    def _get(self, endpoint):  # type: (str) -> Union[dict, None]
-        try:
-            json_response = self.session.get(self._base_url + endpoint)
-        except requests.exceptions.TooManyRedirects:
-            # Cache control redirect loop.
-            # We try to remove the cache and try again
-            self._cache_control_cache.delete(self._base_url + endpoint)
-            json_response = self.session.get(self._base_url + endpoint)
-
-        if json_response.status_code == 404:
-            return None
-
-        json_data = json_response.json()
-
-        return json_data
-
-    def _get_info_from_urls(self, urls):  # type: (Dict[str, List[str]]) -> PackageInfo
-        # Checking wheels first as they are more likely to hold
-        # the necessary information
-        if "bdist_wheel" in urls:
-            # Check fo a universal wheel
-            wheels = urls["bdist_wheel"]
-
-            universal_wheel = None
-            universal_python2_wheel = None
-            universal_python3_wheel = None
-            platform_specific_wheels = []
-            for wheel in wheels:
-                link = Link(wheel)
-                m = wheel_file_re.match(link.filename)
-                if not m:
-                    continue
-
-                pyver = m.group("pyver")
-                abi = m.group("abi")
-                plat = m.group("plat")
-                if abi == "none" and plat == "any":
-                    # Universal wheel
-                    if pyver == "py2.py3":
-                        # Any Python
-                        universal_wheel = wheel
-                    elif pyver == "py2":
-                        universal_python2_wheel = wheel
-                    else:
-                        universal_python3_wheel = wheel
-                else:
-                    platform_specific_wheels.append(wheel)
-
-            if universal_wheel is not None:
-                return self._get_info_from_wheel(universal_wheel)
-
-            info = None
-            if universal_python2_wheel and universal_python3_wheel:
-                info = self._get_info_from_wheel(universal_python2_wheel)
-
-                py3_info = self._get_info_from_wheel(universal_python3_wheel)
-                if py3_info.requires_dist:
-                    if not info.requires_dist:
-                        info.requires_dist = py3_info.requires_dist
-
-                        return info
-
-                    py2_requires_dist = set(
-                        dependency_from_pep_508(r).to_pep_508()
-                        for r in info.requires_dist
-                    )
-                    py3_requires_dist = set(
-                        dependency_from_pep_508(r).to_pep_508()
-                        for r in py3_info.requires_dist
-                    )
-                    base_requires_dist = py2_requires_dist & py3_requires_dist
-                    py2_only_requires_dist = py2_requires_dist - py3_requires_dist
-                    py3_only_requires_dist = py3_requires_dist - py2_requires_dist
-
-                    # Normalizing requires_dist
-                    requires_dist = list(base_requires_dist)
-                    for requirement in py2_only_requires_dist:
-                        dep = dependency_from_pep_508(requirement)
-                        dep.marker = dep.marker.intersect(
-                            parse_marker("python_version == '2.7'")
-                        )
-                        requires_dist.append(dep.to_pep_508())
-
-                    for requirement in py3_only_requires_dist:
-                        dep = dependency_from_pep_508(requirement)
-                        dep.marker = dep.marker.intersect(
-                            parse_marker("python_version >= '3'")
-                        )
-                        requires_dist.append(dep.to_pep_508())
-
-                    info.requires_dist = sorted(list(set(requires_dist)))
-
-            if info:
-                return info
-
-            # Prefer non platform specific wheels
-            if universal_python3_wheel:
-                return self._get_info_from_wheel(universal_python3_wheel)
-
-            if universal_python2_wheel:
-                return self._get_info_from_wheel(universal_python2_wheel)
-
-            if platform_specific_wheels and "sdist" not in urls:
-                # Pick the first wheel available and hope for the best
-                return self._get_info_from_wheel(platform_specific_wheels[0])
-
-        return self._get_info_from_sdist(urls["sdist"][0])
-
-    def _get_info_from_wheel(self, url):  # type: (str) -> PackageInfo
-        self._log(
-            "Downloading wheel: {}".format(urlparse.urlparse(url).path.rsplit("/")[-1]),
-            level="debug",
-        )
-
-        filename = os.path.basename(urlparse.urlparse(url).path.rsplit("/")[-1])
-
-        with temporary_directory() as temp_dir:
-            filepath = Path(temp_dir) / filename
-            self._download(url, str(filepath))
-
-            return PackageInfo.from_wheel(filepath)
-
-    def _get_info_from_sdist(self, url):  # type: (str) -> PackageInfo
-        self._log(
-            "Downloading sdist: {}".format(urlparse.urlparse(url).path.rsplit("/")[-1]),
-            level="debug",
-        )
-
-        filename = os.path.basename(urlparse.urlparse(url).path)
-
-        with temporary_directory() as temp_dir:
-            filepath = Path(temp_dir) / filename
-            self._download(url, str(filepath))
-
-            return PackageInfo.from_sdist(filepath)
-
-    def _download(self, url, dest):  # type: (str, str) -> None
-        return download_file(url, dest, session=self.session)
-
-    def _log(self, msg, level="info"):
-        getattr(logger, level)("{}: {}".format(self._name, msg))
diff --git a/vendor/poetry/poetry/repositories/remote_repository.py b/vendor/poetry/poetry/repositories/remote_repository.py
deleted file mode 100644
index 7717740d..00000000
--- a/vendor/poetry/poetry/repositories/remote_repository.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from .repository import Repository
-
-
-class RemoteRepository(Repository):
-    def __init__(self, url):  # type: (str) -> None
-        self._url = url
-
-        super(RemoteRepository, self).__init__()
-
-    @property
-    def url(self):  # type: () -> str
-        return self._url
-
-    @property
-    def authenticated_url(self):  # type: () -> str
-        return self._url
diff --git a/vendor/poetry/poetry/repositories/repository.py b/vendor/poetry/poetry/repositories/repository.py
deleted file mode 100755
index 1ebe702b..00000000
--- a/vendor/poetry/poetry/repositories/repository.py
+++ /dev/null
@@ -1,111 +0,0 @@
-from poetry.core.semver import VersionConstraint
-from poetry.core.semver import VersionRange
-from poetry.core.semver import parse_constraint
-
-from .base_repository import BaseRepository
-
-
-class Repository(BaseRepository):
-    def __init__(self, packages=None, name=None):
-        super(Repository, self).__init__()
-
-        self._name = name
-
-        if packages is None:
-            packages = []
-
-        for package in packages:
-            self.add_package(package)
-
-    @property
-    def name(self):
-        return self._name
-
-    def package(self, name, version, extras=None):
-        name = name.lower()
-
-        for package in self.packages:
-            if name == package.name and package.version.text == version:
-                return package.clone()
-
-    def find_packages(self, dependency):
-        constraint = dependency.constraint
-        packages = []
-        ignored_pre_release_packages = []
-
-        if constraint is None:
-            constraint = "*"
-
-        if not isinstance(constraint, VersionConstraint):
-            constraint = parse_constraint(constraint)
-
-        allow_prereleases = dependency.allows_prereleases()
-        if isinstance(constraint, VersionRange):
-            if (
-                constraint.max is not None
-                and constraint.max.is_prerelease()
-                or constraint.min is not None
-                and constraint.min.is_prerelease()
-            ):
-                allow_prereleases = True
-
-        for package in self.packages:
-            if dependency.name == package.name:
-                if (
-                    package.is_prerelease()
-                    and not allow_prereleases
-                    and not package.source_type
-                ):
-                    # If prereleases are not allowed and the package is a prerelease
-                    # and is a standard package then we skip it
-                    if constraint.is_any():
-                        # we need this when all versions of the package are pre-releases
-                        ignored_pre_release_packages.append(package)
-                    continue
-
-                if constraint.allows(package.version) or (
-                    package.is_prerelease()
-                    and constraint.allows(package.version.next_patch)
-                ):
-                    packages.append(package)
-
-        return packages or ignored_pre_release_packages
-
-    def has_package(self, package):
-        package_id = package.unique_name
-
-        for repo_package in self.packages:
-            if package_id == repo_package.unique_name:
-                return True
-
-        return False
-
-    def add_package(self, package):
-        self._packages.append(package)
-
-    def remove_package(self, package):
-        package_id = package.unique_name
-
-        index = None
-        for i, repo_package in enumerate(self.packages):
-            if package_id == repo_package.unique_name:
-                index = i
-                break
-
-        if index is not None:
-            del self._packages[index]
-
-    def find_links_for_package(self, package):
-        return []
-
-    def search(self, query):
-        results = []
-
-        for package in self.packages:
-            if query in package.name:
-                results.append(package)
-
-        return results
-
-    def __len__(self):
-        return len(self._packages)
diff --git a/vendor/poetry/poetry/utils/_compat.py b/vendor/poetry/poetry/utils/_compat.py
deleted file mode 100644
index 40e41514..00000000
--- a/vendor/poetry/poetry/utils/_compat.py
+++ /dev/null
@@ -1,287 +0,0 @@
-import sys
-import importlib_metadata as metadata
-
-try:
-    from functools32 import lru_cache
-except ImportError:
-    from functools import lru_cache
-
-try:
-    from glob2 import glob
-except ImportError:
-    from glob import glob
-
-try:
-    import zipfile as zipp
-except ImportError:
-    import zipp
-
-try:
-    import urllib.parse as urlparse
-except ImportError:
-    import urlparse
-
-try:
-    from os import cpu_count
-except ImportError:  # Python 2
-    from multiprocessing import cpu_count
-
-try:  # Python 2
-    long = long
-    unicode = unicode
-    basestring = basestring
-except NameError:  # Python 3
-    long = int
-    unicode = str
-    basestring = str
-
-
-PY2 = sys.version_info[0] == 2
-PY34 = sys.version_info >= (3, 4)
-PY35 = sys.version_info >= (3, 5)
-PY36 = sys.version_info >= (3, 6)
-
-WINDOWS = sys.platform == "win32"
-
-try:
-    from shlex import quote
-except ImportError:
-    # PY2
-    from pipes import quote  # noqa
-
-if PY34:
-    from importlib.machinery import EXTENSION_SUFFIXES
-else:
-    from imp import get_suffixes
-
-    EXTENSION_SUFFIXES = [suffix[0] for suffix in get_suffixes()]
-
-
-if PY35:
-    from pathlib import Path
-else:
-    from pathlib2 import Path
-
-if not PY36:
-    from collections import OrderedDict
-else:
-    OrderedDict = dict
-
-
-if PY35:
-    import subprocess as subprocess
-
-    from subprocess import CalledProcessError
-else:
-    import subprocess32 as subprocess
-
-    from subprocess32 import CalledProcessError
-
-
-if PY34:
-    # subprocess32 pass the calls directly to subprocess
-    # on Python 3.3+ but Python 3.4 does not provide run()
-    # so we backport it
-    import signal
-
-    from subprocess import PIPE
-    from subprocess import Popen
-    from subprocess import SubprocessError
-    from subprocess import TimeoutExpired
-
-    class CalledProcessError(SubprocessError):
-        """Raised when run() is called with check=True and the process
-        returns a non-zero exit status.
-
-        Attributes:
-          cmd, returncode, stdout, stderr, output
-        """
-
-        def __init__(self, returncode, cmd, output=None, stderr=None):
-            self.returncode = returncode
-            self.cmd = cmd
-            self.output = output
-            self.stderr = stderr
-
-        def __str__(self):
-            if self.returncode and self.returncode < 0:
-                try:
-                    return "Command '%s' died with %r." % (
-                        self.cmd,
-                        signal.Signals(-self.returncode),
-                    )
-                except ValueError:
-                    return "Command '%s' died with unknown signal %d." % (
-                        self.cmd,
-                        -self.returncode,
-                    )
-            else:
-                return "Command '%s' returned non-zero exit status %d." % (
-                    self.cmd,
-                    self.returncode,
-                )
-
-        @property
-        def stdout(self):
-            """Alias for output attribute, to match stderr"""
-            return self.output
-
-        @stdout.setter
-        def stdout(self, value):
-            # There's no obvious reason to set this, but allow it anyway so
-            # .stdout is a transparent alias for .output
-            self.output = value
-
-    class CompletedProcess(object):
-        """A process that has finished running.
-        This is returned by run().
-        Attributes:
-          args: The list or str args passed to run().
-          returncode: The exit code of the process, negative for signals.
-          stdout: The standard output (None if not captured).
-          stderr: The standard error (None if not captured).
-        """
-
-        def __init__(self, args, returncode, stdout=None, stderr=None):
-            self.args = args
-            self.returncode = returncode
-            self.stdout = stdout
-            self.stderr = stderr
-
-        def __repr__(self):
-            args = [
-                "args={!r}".format(self.args),
-                "returncode={!r}".format(self.returncode),
-            ]
-            if self.stdout is not None:
-                args.append("stdout={!r}".format(self.stdout))
-            if self.stderr is not None:
-                args.append("stderr={!r}".format(self.stderr))
-            return "{}({})".format(type(self).__name__, ", ".join(args))
-
-        def check_returncode(self):
-            """Raise CalledProcessError if the exit code is non-zero."""
-            if self.returncode:
-                raise CalledProcessError(
-                    self.returncode, self.args, self.stdout, self.stderr
-                )
-
-    def run(*popenargs, **kwargs):
-        """Run command with arguments and return a CompletedProcess instance.
-        The returned instance will have attributes args, returncode, stdout and
-        stderr. By default, stdout and stderr are not captured, and those attributes
-        will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them.
-        If check is True and the exit code was non-zero, it raises a
-        CalledProcessError. The CalledProcessError object will have the return code
-        in the returncode attribute, and output & stderr attributes if those streams
-        were captured.
-        If timeout is given, and the process takes too long, a TimeoutExpired
-        exception will be raised.
-        There is an optional argument "input", allowing you to
-        pass a string to the subprocess's stdin.  If you use this argument
-        you may not also use the Popen constructor's "stdin" argument, as
-        it will be used internally.
-        The other arguments are the same as for the Popen constructor.
-        If universal_newlines=True is passed, the "input" argument must be a
-        string and stdout/stderr in the returned object will be strings rather than
-        bytes.
-        """
-        input = kwargs.pop("input", None)
-        timeout = kwargs.pop("timeout", None)
-        check = kwargs.pop("check", False)
-        if input is not None:
-            if "stdin" in kwargs:
-                raise ValueError("stdin and input arguments may not both be used.")
-            kwargs["stdin"] = PIPE
-
-        process = Popen(*popenargs, **kwargs)
-        try:
-            process.__enter__()  # No-Op really... illustrate "with in 2.4"
-            try:
-                stdout, stderr = process.communicate(input, timeout=timeout)
-            except TimeoutExpired:
-                process.kill()
-                stdout, stderr = process.communicate()
-                raise TimeoutExpired(
-                    process.args, timeout, output=stdout, stderr=stderr
-                )
-            except:
-                process.kill()
-                process.wait()
-                raise
-            retcode = process.poll()
-            if check and retcode:
-                raise CalledProcessError(
-                    retcode, process.args, output=stdout, stderr=stderr
-                )
-        finally:
-            # None because our context manager __exit__ does not use them.
-            process.__exit__(None, None, None)
-
-        return CompletedProcess(process.args, retcode, stdout, stderr)
-
-    subprocess.run = run
-    subprocess.CalledProcessError = CalledProcessError
-
-
-def decode(string, encodings=None):
-    if not PY2 and not isinstance(string, bytes):
-        return string
-
-    if PY2 and isinstance(string, unicode):
-        return string
-
-    encodings = encodings or ["utf-8", "latin1", "ascii"]
-
-    for encoding in encodings:
-        try:
-            return string.decode(encoding)
-        except (UnicodeEncodeError, UnicodeDecodeError):
-            pass
-
-    return string.decode(encodings[0], errors="ignore")
-
-
-def encode(string, encodings=None):
-    if not PY2 and isinstance(string, bytes):
-        return string
-
-    if PY2 and isinstance(string, str):
-        return string
-
-    encodings = encodings or ["utf-8", "latin1", "ascii"]
-
-    for encoding in encodings:
-        try:
-            return string.encode(encoding)
-        except (UnicodeEncodeError, UnicodeDecodeError):
-            pass
-
-    return string.encode(encodings[0], errors="ignore")
-
-
-def to_str(string):
-    if isinstance(string, str) or not isinstance(string, (unicode, bytes)):
-        return string
-
-    if PY2:
-        method = "encode"
-    else:
-        method = "decode"
-
-    encodings = ["utf-8", "latin1", "ascii"]
-
-    for encoding in encodings:
-        try:
-            return getattr(string, method)(encoding)
-        except (UnicodeEncodeError, UnicodeDecodeError):
-            pass
-
-    return getattr(string, method)(encodings[0], errors="ignore")
-
-
-def list_to_shell_command(cmd):
-    return " ".join(
-        '"{}"'.format(token) if " " in token and token[0] not in {"'", '"'} else token
-        for token in cmd
-    )
diff --git a/vendor/poetry/poetry/utils/appdirs.py b/vendor/poetry/poetry/utils/appdirs.py
deleted file mode 100644
index 5b9da0cd..00000000
--- a/vendor/poetry/poetry/utils/appdirs.py
+++ /dev/null
@@ -1,252 +0,0 @@
-"""
-This code was taken from https://github.com/ActiveState/appdirs and modified
-to suit our purposes.
-"""
-import os
-import sys
-
-
-WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
-
-
-def expanduser(path):
-    """
-    Expand ~ and ~user constructions.
-
-    Includes a workaround for http://bugs.python.org/issue14768
-    """
-    expanded = os.path.expanduser(path)
-    if path.startswith("~/") and expanded.startswith("//"):
-        expanded = expanded[1:]
-    return expanded
-
-
-def user_cache_dir(appname):
-    r"""
-    Return full path to the user-specific cache dir for this application.
-
-        "appname" is the name of application.
-
-    Typical user cache directories are:
-        macOS:      ~/Library/Caches/
-        Unix:       ~/.cache/ (XDG default)
-        Windows:    C:\Users\\AppData\Local\\Cache
-
-    On Windows the only suggestion in the MSDN docs is that local settings go
-    in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
-    non-roaming app data dir (the default returned by `user_data_dir`). Apps
-    typically put cache data somewhere *under* the given dir here. Some
-    examples:
-        ...\Mozilla\Firefox\Profiles\\Cache
-        ...\Acme\SuperApp\Cache\1.0
-
-    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
-    """
-    if WINDOWS:
-        # Get the base path
-        path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
-
-        # Add our app name and Cache directory to it
-        path = os.path.join(path, appname, "Cache")
-    elif sys.platform == "darwin":
-        # Get the base path
-        path = expanduser("~/Library/Caches")
-
-        # Add our app name to it
-        path = os.path.join(path, appname)
-    else:
-        # Get the base path
-        path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
-
-        # Add our app name to it
-        path = os.path.join(path, appname)
-
-    return path
-
-
-def user_data_dir(appname, roaming=False):
-    r"""
-    Return full path to the user-specific data dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "roaming" (boolean, default False) can be set True to use the Windows
-            roaming appdata directory. That means that for users on a Windows
-            network setup for roaming profiles, this user data will be
-            sync'd on login. See
-            
-            for a discussion of issues.
-
-    Typical user data directories are:
-        macOS:                  ~/Library/Application Support/
-        Unix:                   ~/.local/share/    # or in
-                                $XDG_DATA_HOME, if defined
-        Win XP (not roaming):   C:\Documents and Settings\\ ...
-                                ...Application Data\
-        Win XP (roaming):       C:\Documents and Settings\\Local ...
-                                ...Settings\Application Data\
-        Win 7  (not roaming):   C:\Users\\AppData\Local\
-        Win 7  (roaming):       C:\Users\\AppData\Roaming\
-
-    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
-    That means, by default "~/.local/share/".
-    """
-    if WINDOWS:
-        const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
-        path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
-    elif sys.platform == "darwin":
-        path = os.path.join(expanduser("~/Library/Application Support/"), appname)
-    else:
-        path = os.path.join(
-            os.getenv("XDG_DATA_HOME", expanduser("~/.local/share")), appname
-        )
-
-    return path
-
-
-def user_config_dir(appname, roaming=True):
-    """Return full path to the user-specific config dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "roaming" (boolean, default True) can be set False to not use the
-            Windows roaming appdata directory. That means that for users on a
-            Windows network setup for roaming profiles, this user data will be
-            sync'd on login. See
-            
-            for a discussion of issues.
-
-    Typical user data directories are:
-        macOS:                  same as user_data_dir
-        Unix:                   ~/.config/
-        Win *:                  same as user_data_dir
-
-    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
-    That means, by default "~/.config/".
-    """
-    if WINDOWS:
-        path = user_data_dir(appname, roaming=roaming)
-    elif sys.platform == "darwin":
-        path = user_data_dir(appname)
-    else:
-        path = os.getenv("XDG_CONFIG_HOME", expanduser("~/.config"))
-        path = os.path.join(path, appname)
-
-    return path
-
-
-# for the discussion regarding site_config_dirs locations
-# see 
-def site_config_dirs(appname):
-    r"""Return a list of potential user-shared config dirs for this application.
-
-        "appname" is the name of application.
-
-    Typical user config directories are:
-        macOS:      /Library/Application Support//
-        Unix:       /etc or $XDG_CONFIG_DIRS[i]// for each value in
-                    $XDG_CONFIG_DIRS
-        Win XP:     C:\Documents and Settings\All Users\Application ...
-                    ...Data\\
-        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory
-                    on Vista.)
-        Win 7:      Hidden, but writeable on Win 7:
-                    C:\ProgramData\\
-    """
-    if WINDOWS:
-        path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
-        pathlist = [os.path.join(path, appname)]
-    elif sys.platform == "darwin":
-        pathlist = [os.path.join("/Library/Application Support", appname)]
-    else:
-        # try looking in $XDG_CONFIG_DIRS
-        xdg_config_dirs = os.getenv("XDG_CONFIG_DIRS", "/etc/xdg")
-        if xdg_config_dirs:
-            pathlist = [
-                os.path.join(expanduser(x), appname)
-                for x in xdg_config_dirs.split(os.pathsep)
-            ]
-        else:
-            pathlist = []
-
-        # always look in /etc directly as well
-        pathlist.append("/etc")
-
-    return pathlist
-
-
-# -- Windows support functions --
-
-
-def _get_win_folder_from_registry(csidl_name):
-    """
-    This is a fallback technique at best. I'm not sure if using the
-    registry for this guarantees us the correct answer for all CSIDL_*
-    names.
-    """
-    import _winreg
-
-    shell_folder_name = {
-        "CSIDL_APPDATA": "AppData",
-        "CSIDL_COMMON_APPDATA": "Common AppData",
-        "CSIDL_LOCAL_APPDATA": "Local AppData",
-    }[csidl_name]
-
-    key = _winreg.OpenKey(
-        _winreg.HKEY_CURRENT_USER,
-        r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders",
-    )
-    directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
-    return directory
-
-
-def _get_win_folder_with_ctypes(csidl_name):
-    csidl_const = {
-        "CSIDL_APPDATA": 26,
-        "CSIDL_COMMON_APPDATA": 35,
-        "CSIDL_LOCAL_APPDATA": 28,
-    }[csidl_name]
-
-    buf = ctypes.create_unicode_buffer(1024)
-    ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
-    # Downgrade to short path name if have highbit chars. See
-    # .
-    has_high_char = False
-    for c in buf:
-        if ord(c) > 255:
-            has_high_char = True
-            break
-    if has_high_char:
-        buf2 = ctypes.create_unicode_buffer(1024)
-        if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
-            buf = buf2
-
-    return buf.value
-
-
-if WINDOWS:
-    try:
-        import ctypes
-
-        _get_win_folder = _get_win_folder_with_ctypes
-    except ImportError:
-        _get_win_folder = _get_win_folder_from_registry
-
-
-def _win_path_to_bytes(path):
-    """Encode Windows paths to bytes. Only used on Python 2.
-
-    Motivation is to be consistent with other operating systems where paths
-    are also returned as bytes. This avoids problems mixing bytes and Unicode
-    elsewhere in the codebase. For more details and discussion see
-    .
-
-    If encoding using ASCII and MBCS fails, return the original Unicode path.
-    """
-    for encoding in ("ASCII", "MBCS"):
-        try:
-            return path.encode(encoding)
-        except (UnicodeEncodeError, LookupError):
-            pass
-    return path
diff --git a/vendor/poetry/poetry/utils/env.py b/vendor/poetry/poetry/utils/env.py
deleted file mode 100644
index 1f533f44..00000000
--- a/vendor/poetry/poetry/utils/env.py
+++ /dev/null
@@ -1,1704 +0,0 @@
-import base64
-import hashlib
-import json
-import os
-import platform
-import re
-import shutil
-import sys
-import sysconfig
-
-from contextlib import contextmanager
-from copy import deepcopy
-from importlib import resources
-from typing import Any
-from typing import Dict
-from typing import List
-from typing import Optional
-from typing import Tuple
-from typing import Union
-
-import tomlkit
-import virtualenv
-
-from clikit.api.io import IO
-from packaging.tags import Tag
-from packaging.tags import interpreter_name
-from packaging.tags import interpreter_version
-from packaging.tags import sys_tags
-
-from poetry import __path__ as __pkgpath__
-from poetry.core.packages import Package
-from poetry.core.semver import parse_constraint
-from poetry.core.semver.version import Version
-from poetry.core.toml.file import TOMLFile
-from poetry.core.version.markers import BaseMarker
-from poetry.locations import CACHE_DIR
-from poetry.poetry import Poetry
-from poetry.utils._compat import CalledProcessError
-from poetry.utils._compat import Path
-from poetry.utils._compat import decode
-from poetry.utils._compat import encode
-from poetry.utils._compat import list_to_shell_command
-from poetry.utils._compat import subprocess
-from poetry.utils.helpers import is_dir_writable
-from poetry.utils.helpers import paths_csv
-
-from . import __name__ as _pkg
-
-
-GET_ENVIRONMENT_INFO = """\
-import json
-import os
-import platform
-import sys
-import sysconfig
-
-INTERPRETER_SHORT_NAMES = {
-    "python": "py",
-    "cpython": "cp",
-    "pypy": "pp",
-    "ironpython": "ip",
-    "jython": "jy",
-}
-
-
-def interpreter_version():
-    version = sysconfig.get_config_var("interpreter_version")
-    if version:
-        version = str(version)
-    else:
-        version = _version_nodot(sys.version_info[:2])
-
-    return version
-
-
-def _version_nodot(version):
-    # type: (PythonVersion) -> str
-    if any(v >= 10 for v in version):
-        sep = "_"
-    else:
-        sep = ""
-
-    return sep.join(map(str, version))
-
-
-if hasattr(sys, "implementation"):
-    info = sys.implementation.version
-    iver = "{0.major}.{0.minor}.{0.micro}".format(info)
-    kind = info.releaselevel
-    if kind != "final":
-        iver += kind[0] + str(info.serial)
-
-    implementation_name = sys.implementation.name
-else:
-    iver = "0"
-    implementation_name = platform.python_implementation().lower()
-
-env = {
-    "implementation_name": implementation_name,
-    "implementation_version": iver,
-    "os_name": os.name,
-    "platform_machine": platform.machine(),
-    "platform_release": platform.release(),
-    "platform_system": platform.system(),
-    "platform_version": platform.version(),
-    "python_full_version": platform.python_version(),
-    "platform_python_implementation": platform.python_implementation(),
-    "python_version": ".".join(platform.python_version_tuple()[:2]),
-    "sys_platform": sys.platform,
-    "version_info": tuple(sys.version_info),
-    # Extra information
-    "interpreter_name": INTERPRETER_SHORT_NAMES.get(implementation_name, implementation_name),
-    "interpreter_version": interpreter_version(),
-}
-
-print(json.dumps(env))
-"""
-
-
-GET_BASE_PREFIX = """\
-import sys
-
-if hasattr(sys, "real_prefix"):
-    print(sys.real_prefix)
-elif hasattr(sys, "base_prefix"):
-    print(sys.base_prefix)
-else:
-    print(sys.prefix)
-"""
-
-GET_PYTHON_VERSION = """\
-import sys
-
-print('.'.join([str(s) for s in sys.version_info[:3]]))
-"""
-
-GET_SYS_PATH = """\
-import json
-import sys
-
-print(json.dumps(sys.path))
-"""
-
-GET_PATHS = """\
-import json
-import sysconfig
-
-print(json.dumps(sysconfig.get_paths()))
-"""
-
-GET_PATHS_FOR_GENERIC_ENVS = """\
-# We can't use sysconfig.get_paths() because
-# on some distributions it does not return the proper paths
-# (those used by pip for instance). We go through distutils
-# to get the proper ones.
-import json
-import site
-import sysconfig
-
-from distutils.command.install import SCHEME_KEYS  # noqa
-from distutils.core import Distribution
-
-d = Distribution()
-d.parse_config_files()
-obj = d.get_command_obj("install", create=True)
-obj.finalize_options()
-
-paths = sysconfig.get_paths().copy()
-for key in SCHEME_KEYS:
-    if key == "headers":
-        # headers is not a path returned by sysconfig.get_paths()
-        continue
-
-    paths[key] = getattr(obj, f"install_{key}")
-
-if site.check_enableusersite() and hasattr(obj, "install_usersite"):
-    paths["usersite"] = getattr(obj, "install_usersite")
-    paths["userbase"] = getattr(obj, "install_userbase")
-
-print(json.dumps(paths))
-"""
-
-
-class SitePackages:
-    def __init__(
-        self, path, fallbacks=None, skip_write_checks=False
-    ):  # type: (Path, List[Path], bool) -> None
-        self._path = path
-        self._fallbacks = fallbacks or []
-        self._skip_write_checks = skip_write_checks
-        self._candidates = [self._path] + self._fallbacks
-        self._writable_candidates = None if not skip_write_checks else self._candidates
-
-    @property
-    def path(self):  # type: () -> Path
-        return self._path
-
-    @property
-    def candidates(self):  # type: () -> List[Path]
-        return self._candidates
-
-    @property
-    def writable_candidates(self):  # type: () -> List[Path]
-        if self._writable_candidates is not None:
-            return self._writable_candidates
-
-        self._writable_candidates = []
-        for candidate in self._candidates:
-            if not is_dir_writable(path=candidate, create=True):
-                continue
-            self._writable_candidates.append(candidate)
-
-        return self._writable_candidates
-
-    def make_candidates(
-        self, path, writable_only=False
-    ):  # type: (Path, bool) -> List[Path]
-        candidates = self._candidates if not writable_only else self.writable_candidates
-        if path.is_absolute():
-            for candidate in candidates:
-                try:
-                    path.relative_to(candidate)
-                    return [path]
-                except ValueError:
-                    pass
-            else:
-                raise ValueError(
-                    "{} is not relative to any discovered {}sites".format(
-                        path, "writable " if writable_only else ""
-                    )
-                )
-
-        return [candidate / path for candidate in candidates if candidate]
-
-    def _path_method_wrapper(
-        self, path, method, *args, **kwargs
-    ):  # type: (Path, str, *Any, **Any) -> Union[Tuple[Path, Any], List[Tuple[Path, Any]]]
-
-        # TODO: Move to parameters after dropping Python 2.7
-        return_first = kwargs.pop("return_first", True)
-        writable_only = kwargs.pop("writable_only", False)
-
-        candidates = self.make_candidates(path, writable_only=writable_only)
-
-        if not candidates:
-            raise RuntimeError(
-                'Unable to find a suitable destination for "{}" in {}'.format(
-                    str(path), paths_csv(self._candidates)
-                )
-            )
-
-        results = []
-
-        for candidate in candidates:
-            try:
-                result = candidate, getattr(candidate, method)(*args, **kwargs)
-                if return_first:
-                    return result
-                else:
-                    results.append(result)
-            except (IOError, OSError):
-                # TODO: Replace with PermissionError
-                pass
-
-        if results:
-            return results
-
-        raise OSError("Unable to access any of {}".format(paths_csv(candidates)))
-
-    def write_text(self, path, *args, **kwargs):  # type: (Path, *Any, **Any) -> Path
-        return self._path_method_wrapper(path, "write_text", *args, **kwargs)[0]
-
-    def mkdir(self, path, *args, **kwargs):  # type: (Path, *Any, **Any) -> Path
-        return self._path_method_wrapper(path, "mkdir", *args, **kwargs)[0]
-
-    def exists(self, path):  # type: (Path) -> bool
-        return any(
-            value[-1]
-            for value in self._path_method_wrapper(path, "exists", return_first=False)
-        )
-
-    def find(self, path, writable_only=False):  # type: (Path, bool) -> List[Path]
-        return [
-            value[0]
-            for value in self._path_method_wrapper(
-                path, "exists", return_first=False, writable_only=writable_only
-            )
-            if value[-1] is True
-        ]
-
-    def __getattr__(self, item):
-        try:
-            return super(SitePackages, self).__getattribute__(item)
-        except AttributeError:
-            return getattr(self.path, item)
-
-
-class EnvError(Exception):
-
-    pass
-
-
-class EnvCommandError(EnvError):
-    def __init__(self, e, input=None):  # type: (CalledProcessError) -> None
-        self.e = e
-
-        message = "Command {} errored with the following return code {}, and output: \n{}".format(
-            e.cmd, e.returncode, decode(e.output)
-        )
-        if input:
-            message += "input was : {}".format(input)
-        super(EnvCommandError, self).__init__(message)
-
-
-class NoCompatiblePythonVersionFound(EnvError):
-    def __init__(self, expected, given=None):
-        if given:
-            message = (
-                "The specified Python version ({}) "
-                "is not supported by the project ({}).\n"
-                "Please choose a compatible version "
-                "or loosen the python constraint specified "
-                "in the pyproject.toml file.".format(given, expected)
-            )
-        else:
-            message = (
-                "Poetry was unable to find a compatible version. "
-                "If you have one, you can explicitly use it "
-                'via the "env use" command.'
-            )
-
-        super(NoCompatiblePythonVersionFound, self).__init__(message)
-
-
-class EnvManager(object):
-    """
-    Environments manager
-    """
-
-    _env = None
-
-    ENVS_FILE = "envs.toml"
-
-    def __init__(self, poetry):  # type: (Poetry) -> None
-        self._poetry = poetry
-
-    def activate(self, python, io):  # type: (str, IO) -> Env
-        venv_path = self._poetry.config.get("virtualenvs.path")
-        if venv_path is None:
-            venv_path = Path(CACHE_DIR) / "virtualenvs"
-        else:
-            venv_path = Path(venv_path)
-
-        cwd = self._poetry.file.parent
-
-        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
-
-        try:
-            python_version = Version.parse(python)
-            python = "python{}".format(python_version.major)
-            if python_version.precision > 1:
-                python += ".{}".format(python_version.minor)
-        except ValueError:
-            # Executable in PATH or full executable path
-            pass
-
-        try:
-            python_version = decode(
-                subprocess.check_output(
-                    list_to_shell_command(
-                        [
-                            python,
-                            "-c",
-                            "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"",
-                        ]
-                    ),
-                    shell=True,
-                )
-            )
-        except CalledProcessError as e:
-            raise EnvCommandError(e)
-
-        python_version = Version.parse(python_version.strip())
-        minor = "{}.{}".format(python_version.major, python_version.minor)
-        patch = python_version.text
-
-        create = False
-        is_root_venv = self._poetry.config.get("virtualenvs.in-project")
-        # If we are required to create the virtual environment in the root folder,
-        # create or recreate it if needed
-        if is_root_venv:
-            create = False
-            venv = self._poetry.file.parent / ".venv"
-            if venv.exists():
-                # We need to check if the patch version is correct
-                _venv = VirtualEnv(venv)
-                current_patch = ".".join(str(v) for v in _venv.version_info[:3])
-
-                if patch != current_patch:
-                    create = True
-
-            self.create_venv(io, executable=python, force=create)
-
-            return self.get(reload=True)
-
-        envs = tomlkit.document()
-        base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd))
-        if envs_file.exists():
-            envs = envs_file.read()
-            current_env = envs.get(base_env_name)
-            if current_env is not None:
-                current_minor = current_env["minor"]
-                current_patch = current_env["patch"]
-
-                if current_minor == minor and current_patch != patch:
-                    # We need to recreate
-                    create = True
-
-        name = "{}-py{}".format(base_env_name, minor)
-        venv = venv_path / name
-
-        # Create if needed
-        if not venv.exists() or venv.exists() and create:
-            in_venv = os.environ.get("VIRTUAL_ENV") is not None
-            if in_venv or not venv.exists():
-                create = True
-
-            if venv.exists():
-                # We need to check if the patch version is correct
-                _venv = VirtualEnv(venv)
-                current_patch = ".".join(str(v) for v in _venv.version_info[:3])
-
-                if patch != current_patch:
-                    create = True
-
-            self.create_venv(io, executable=python, force=create)
-
-        # Activate
-        envs[base_env_name] = {"minor": minor, "patch": patch}
-        envs_file.write(envs)
-
-        return self.get(reload=True)
-
-    def deactivate(self, io):  # type: (IO) -> None
-        venv_path = self._poetry.config.get("virtualenvs.path")
-        if venv_path is None:
-            venv_path = Path(CACHE_DIR) / "virtualenvs"
-        else:
-            venv_path = Path(venv_path)
-
-        name = self._poetry.package.name
-        name = self.generate_env_name(name, str(self._poetry.file.parent))
-
-        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
-        if envs_file.exists():
-            envs = envs_file.read()
-            env = envs.get(name)
-            if env is not None:
-                io.write_line(
-                    "Deactivating virtualenv: {}".format(
-                        venv_path / (name + "-py{}".format(env["minor"]))
-                    )
-                )
-                del envs[name]
-
-                envs_file.write(envs)
-
-    def get(self, reload=False):  # type: (bool) -> Env
-        if self._env is not None and not reload:
-            return self._env
-
-        python_minor = (
-            InterpreterLookup.find()[1] or
-            ".".join(str(c) for c in sys.version_info[:2])
-        )
-
-        venv_path = self._poetry.config.get("virtualenvs.path")
-        if venv_path is None:
-            venv_path = Path(CACHE_DIR) / "virtualenvs"
-        else:
-            venv_path = Path(venv_path)
-
-        cwd = self._poetry.file.parent
-        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
-        env = None
-        base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd))
-        if envs_file.exists():
-            envs = envs_file.read()
-            env = envs.get(base_env_name)
-            if env:
-                python_minor = env["minor"]
-
-        # Check if we are inside a virtualenv or not
-        # Conda sets CONDA_PREFIX in its envs, see
-        # https://github.com/conda/conda/issues/2764
-        env_prefix = os.environ.get("VIRTUAL_ENV", os.environ.get("CONDA_PREFIX"))
-        conda_env_name = os.environ.get("CONDA_DEFAULT_ENV")
-        # It's probably not a good idea to pollute Conda's global "base" env, since
-        # most users have it activated all the time.
-        in_venv = env_prefix is not None and conda_env_name != "base"
-
-        if not in_venv or env is not None:
-            # Checking if a local virtualenv exists
-            if self._poetry.config.get("virtualenvs.in-project") is not False:
-                if (cwd / ".venv").exists() and (cwd / ".venv").is_dir():
-                    venv = cwd / ".venv"
-
-                    return VirtualEnv(venv)
-
-            create_venv = self._poetry.config.get("virtualenvs.create", True)
-
-            if not create_venv:
-                return self.get_system_env()
-
-            venv_path = self._poetry.config.get("virtualenvs.path")
-            if venv_path is None:
-                venv_path = Path(CACHE_DIR) / "virtualenvs"
-            else:
-                venv_path = Path(venv_path)
-
-            name = "{}-py{}".format(base_env_name, python_minor.strip())
-
-            venv = venv_path / name
-
-            if not venv.exists():
-                return self.get_system_env()
-
-            return VirtualEnv(venv)
-
-        if env_prefix is not None:
-            prefix = Path(env_prefix)
-            base_prefix = None
-        else:
-            prefix = Path(sys.prefix)
-            base_prefix = self.get_base_prefix()
-
-        return VirtualEnv(prefix, base_prefix)
-
-    def list(self, name=None):  # type: (Optional[str]) -> List[VirtualEnv]
-        if name is None:
-            name = self._poetry.package.name
-
-        venv_name = self.generate_env_name(name, str(self._poetry.file.parent))
-
-        venv_path = self._poetry.config.get("virtualenvs.path")
-        if venv_path is None:
-            venv_path = Path(CACHE_DIR) / "virtualenvs"
-        else:
-            venv_path = Path(venv_path)
-
-        env_list = [
-            VirtualEnv(Path(p))
-            for p in sorted(venv_path.glob("{}-py*".format(venv_name)))
-        ]
-
-        venv = self._poetry.file.parent / ".venv"
-        if (
-            self._poetry.config.get("virtualenvs.in-project")
-            and venv.exists()
-            and venv.is_dir()
-        ):
-            env_list.insert(0, VirtualEnv(venv))
-        return env_list
-
-    def remove(self, python):  # type: (str) -> Env
-        venv_path = self._poetry.config.get("virtualenvs.path")
-        if venv_path is None:
-            venv_path = Path(CACHE_DIR) / "virtualenvs"
-        else:
-            venv_path = Path(venv_path)
-
-        cwd = self._poetry.file.parent
-        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
-        base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd))
-
-        if python.startswith(base_env_name):
-            venvs = self.list()
-            for venv in venvs:
-                if venv.path.name == python:
-                    # Exact virtualenv name
-                    if not envs_file.exists():
-                        self.remove_venv(venv.path)
-
-                        return venv
-
-                    venv_minor = ".".join(str(v) for v in venv.version_info[:2])
-                    base_env_name = self.generate_env_name(cwd.name, str(cwd))
-                    envs = envs_file.read()
-
-                    current_env = envs.get(base_env_name)
-                    if not current_env:
-                        self.remove_venv(venv.path)
-
-                        return venv
-
-                    if current_env["minor"] == venv_minor:
-                        del envs[base_env_name]
-                        envs_file.write(envs)
-
-                    self.remove_venv(venv.path)
-
-                    return venv
-
-            raise ValueError(
-                'Environment "{}" does not exist.'.format(python)
-            )
-
-        try:
-            python_version = Version.parse(python)
-            python = "python{}".format(python_version.major)
-            if python_version.precision > 1:
-                python += ".{}".format(python_version.minor)
-        except ValueError:
-            # Executable in PATH or full executable path
-            pass
-
-        try:
-            python_version = decode(
-                subprocess.check_output(
-                    list_to_shell_command(
-                        [
-                            python,
-                            "-c",
-                            "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"",
-                        ]
-                    ),
-                    shell=True,
-                )
-            )
-        except CalledProcessError as e:
-            raise EnvCommandError(e)
-
-        python_version = Version.parse(python_version.strip())
-        minor = "{}.{}".format(python_version.major, python_version.minor)
-
-        name = "{}-py{}".format(base_env_name, minor)
-        venv = venv_path / name
-
-        if not venv.exists():
-            raise ValueError(
-                'Environment "{}" does not exist.'.format(name)
-            )
-
-        if envs_file.exists():
-            envs = envs_file.read()
-            current_env = envs.get(base_env_name)
-            if current_env is not None:
-                current_minor = current_env["minor"]
-
-                if current_minor == minor:
-                    del envs[base_env_name]
-                    envs_file.write(envs)
-
-        self.remove_venv(venv)
-
-        return VirtualEnv(venv, venv)
-
-    def create_venv(
-        self, io, name=None, executable=None, force=False
-    ):  # type: (IO, Optional[str], Optional[str], bool) -> Env
-        if self._env is not None and not force:
-            return self._env
-
-        cwd = self._poetry.file.parent
-        env = self.get(reload=True)
-
-        if not env.is_sane():
-            force = True
-
-        if env.is_venv() and not force:
-            # Already inside a virtualenv.
-            return env
-
-        create_venv = self._poetry.config.get("virtualenvs.create")
-        root_venv = self._poetry.config.get("virtualenvs.in-project")
-
-        venv_path = self._poetry.config.get("virtualenvs.path")
-        if root_venv:
-            venv_path = cwd / ".venv"
-        elif venv_path is None:
-            venv_path = Path(CACHE_DIR) / "virtualenvs"
-        else:
-            venv_path = Path(venv_path)
-
-        if not name:
-            name = self._poetry.package.name
-
-        python_patch, python_minor = None, None
-        if executable:
-            python_patch = decode(
-                subprocess.check_output(
-                    list_to_shell_command(
-                        [
-                            executable,
-                            "-c",
-                            "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"",
-                        ]
-                    ),
-                    shell=True,
-                ).strip()
-            )
-            python_minor = ".".join(python_patch.split(".")[:2])
-
-        supported_python = self._poetry.package.python_constraint
-        if (
-            not python_patch or
-            python_patch and not supported_python.allows(Version.parse(python_patch))
-        ):
-            # The currently activated or chosen Python version
-            # is not compatible with the Python constraint specified
-            # for the project.
-            # If an executable has been specified, we stop there
-            # and notify the user of the incompatibility.
-            # Otherwise, we try to find a compatible Python version.
-            if executable:
-                raise NoCompatiblePythonVersionFound(
-                    self._poetry.package.python_versions, python_patch
-                )
-
-            executable, python_minor, python_patch = InterpreterLookup.find(supported_python)
-
-            if not executable:
-                raise NoCompatiblePythonVersionFound(
-                    self._poetry.package.python_versions
-                )
-
-        if root_venv:
-            venv = venv_path
-        else:
-            name = self.generate_env_name(name, str(cwd))
-            name = "{}-py{}".format(name, python_minor.strip())
-            venv = venv_path / name
-
-        if not venv.exists():
-            if create_venv is False:
-                io.write_line(
-                    ""
-                    "Skipping virtualenv creation, "
-                    "as specified in config file."
-                    ""
-                )
-
-                return self.get_system_env()
-
-            io.write_line(
-                "Creating virtualenv {} in {}".format(name, str(venv_path))
-            )
-
-            self.build_venv(venv, executable=executable)
-
-            if not root_venv:
-                envs = tomlkit.document()
-                envs_file = TOMLFile(venv_path / self.ENVS_FILE)
-                if envs_file.exists():
-                    envs = envs_file.read()
-                envs[name] = {"minor": python_minor, "patch": python_patch}
-                envs_file.write(envs)
-        else:
-            if force:
-                if not env.is_sane():
-                    io.write_line(
-                        "The virtual environment found in {} seems to be broken.".format(
-                            env.path
-                        )
-                    )
-                io.write_line(
-                    "Recreating virtualenv {} in {}".format(name, str(venv))
-                )
-                self.remove_venv(venv)
-                self.build_venv(venv, executable=executable)
-            elif io.is_very_verbose():
-                io.write_line("Virtualenv {} already exists.".format(name))
-
-        return VirtualEnv(venv)
-
-    @classmethod
-    def build_venv(
-        cls, path, executable=None
-    ):  # type: (Union[Path,str], Optional[Union[str, Path]]) -> virtualenv.run.session.Session
-        if isinstance(executable, Path):
-            executable = executable.resolve().as_posix()
-        return virtualenv.cli_run(
-            [
-                "--no-download",
-                "--no-periodic-update",
-                "--python",
-                executable or "python",
-                str(path),
-            ]
-        )
-
-    @classmethod
-    def remove_venv(cls, path):  # type: (Union[Path,str]) -> None
-        if isinstance(path, str):
-            path = Path(path)
-        assert path.is_dir()
-        try:
-            shutil.rmtree(str(path))
-            return
-        except OSError as e:
-            # Continue only if e.errno == 16
-            if e.errno != 16:  # ERRNO 16: Device or resource busy
-                raise e
-
-        # Delete all files and folders but the toplevel one. This is because sometimes
-        # the venv folder is mounted by the OS, such as in a docker volume. In such
-        # cases, an attempt to delete the folder itself will result in an `OSError`.
-        # See https://github.com/python-poetry/poetry/pull/2064
-        for file_path in path.iterdir():
-            if file_path.is_file() or file_path.is_symlink():
-                file_path.unlink()
-            elif file_path.is_dir():
-                shutil.rmtree(str(file_path))
-
-    @classmethod
-    def get_system_env(
-        cls, naive=False
-    ):  # type: (bool) -> Union["SystemEnv", "GenericEnv"]
-        """
-        Retrieve the current Python environment.
-        This can be the base Python environment or an activated virtual environment.
-        This method also works around the issue that the virtual environment
-        used by Poetry internally (when installed via the custom installer)
-        is incorrectly detected as the system environment. Note that this workaround
-        happens only when `naive` is False since there are times where we actually
-        want to retrieve Poetry's custom virtual environment
-        (e.g. plugin installation or self update).
-        """
-        pydef_executable, _, _ = InterpreterLookup.find()
-        prefix, base_prefix = (
-            Path(pydef_executable) if pydef_executable else None,
-            Path(cls.get_base_prefix())
-        )
-        env = SystemEnv(prefix) if prefix else NullEnv()
-        if not naive and prefix:
-            if prefix.joinpath("poetry_env").exists():
-                env = GenericEnv(base_prefix, child_env=env)
-            else:
-                from poetry.locations import data_dir
-
-                try:
-                    prefix.relative_to(data_dir())
-                except ValueError:
-                    pass
-                else:
-                    env = GenericEnv(base_prefix, child_env=env)
-
-        return env
-
-    @classmethod
-    def get_base_prefix(cls):  # type: () -> str
-        if hasattr(sys, "real_prefix"):
-            return sys.real_prefix
-
-        if hasattr(sys, "base_prefix"):
-            return sys.base_prefix
-
-        return sys.prefix
-
-    @classmethod
-    def generate_env_name(cls, name, cwd):  # type: (str, str) -> str
-        name = name.lower()
-        sanitized_name = re.sub(r'[ $`!*@"\\\r\n\t]', "_", name)[:42]
-        h = hashlib.sha256(encode(cwd)).digest()
-        h = base64.urlsafe_b64encode(h).decode()[:8]
-
-        return "{}-{}".format(sanitized_name, h)
-
-
-class Env(object):
-    """
-    An abstract Python environment.
-    """
-
-    def __init__(self, path, base=None):  # type: (Path, Optional[Path]) -> None
-        self._is_windows = sys.platform == "win32"
-        self._is_mingw = sysconfig.get_platform().startswith("mingw")
-        self._is_conda = bool(os.environ.get("CONDA_DEFAULT_ENV"))
-
-        if not self._is_windows or self._is_mingw:
-            bin_dir = "bin"
-        else:
-            bin_dir = "Scripts"
-
-        self._path = path
-        self._bin_dir = self._path / bin_dir
-
-        self._base = base or path
-
-        self._executable = "python"
-        self._pip_executable = "pip"
-
-        self.find_executables()
-
-        self._marker_env = None
-        self._pip_version = None
-        self._site_packages = None
-        self._paths = None
-        self._supported_tags = None
-        self._purelib = None
-        self._platlib = None
-        self._script_dirs = None
-
-    @property
-    def path(self):  # type: () -> Path
-        return self._path
-
-    @property
-    def base(self):  # type: () -> Path
-        return self._base
-
-    @property
-    def version_info(self):  # type: () -> Tuple[int]
-        return tuple(self.marker_env["version_info"])
-
-    @property
-    def python_implementation(self):  # type: () -> str
-        return self.marker_env["platform_python_implementation"]
-
-    @property
-    def python(self):  # type: () -> str
-        """
-        Path to current python executable
-        """
-        return self._bin(self._executable)
-
-    @property
-    def marker_env(self):
-        if self._marker_env is None:
-            self._marker_env = self.get_marker_env()
-
-        return self._marker_env
-
-    @property
-    def parent_env(self):  # type: () -> GenericEnv
-        return GenericEnv(self.base, child_env=self)
-
-    @property
-    def pip(self):  # type: () -> str
-        """
-        Path to current pip executable
-        """
-        return self._bin(self._pip_executable)
-
-    @property
-    def platform(self):  # type: () -> str
-        return sys.platform
-
-    @property
-    def os(self):  # type: () -> str
-        return os.name
-
-    @property
-    def pip_version(self):
-        if self._pip_version is None:
-            self._pip_version = self.get_pip_version()
-
-        return self._pip_version
-
-    @property
-    def site_packages(self):  # type: () -> SitePackages
-        if self._site_packages is None:
-            # we disable write checks if no user site exist
-            fallbacks = [self.usersite] if self.usersite else []
-            self._site_packages = SitePackages(
-                self.purelib, fallbacks, skip_write_checks=False if fallbacks else True
-            )
-        return self._site_packages
-
-    @property
-    def usersite(self):  # type: () -> Optional[Path]
-        if "usersite" in self.paths:
-            return Path(self.paths["usersite"])
-
-    @property
-    def userbase(self):  # type: () -> Optional[Path]
-        if "userbase" in self.paths:
-            return Path(self.paths["userbase"])
-
-    @property
-    def purelib(self):  # type: () -> Path
-        if self._purelib is None:
-            self._purelib = Path(self.paths["purelib"])
-
-        return self._purelib
-
-    @property
-    def platlib(self):  # type: () -> Path
-        if self._platlib is None:
-            if "platlib" in self.paths:
-                self._platlib = Path(self.paths["platlib"])
-            else:
-                self._platlib = self.purelib
-
-        return self._platlib
-
-    def is_path_relative_to_lib(self, path):  # type: (Path) -> bool
-        for lib_path in [self.purelib, self.platlib]:
-            try:
-                path.relative_to(lib_path)
-                return True
-            except ValueError:
-                pass
-
-        return False
-
-    @property
-    def sys_path(self):  # type: () -> List[str]
-        raise NotImplementedError()
-
-    @property
-    def paths(self):  # type: () -> Dict[str, str]
-        if self._paths is None:
-            self._paths = self.get_paths()
-
-        return self._paths
-
-    @property
-    def supported_tags(self):  # type: () -> List[Tag]
-        if self._supported_tags is None:
-            self._supported_tags = self.get_supported_tags()
-
-        return self._supported_tags
-
-    @classmethod
-    def get_base_prefix(cls):  # type: () -> str
-        if hasattr(sys, "real_prefix"):
-            return sys.real_prefix
-
-        if hasattr(sys, "base_prefix"):
-            return sys.base_prefix
-
-        return sys.prefix
-
-    def _find_python_executable(self):  # type: () -> None
-        bin_dir = self._bin_dir
-
-        if self._is_windows and self._is_conda:
-            bin_dir = self._path
-
-        python_executables = sorted(
-            p.name
-            for p in bin_dir.glob("python*")
-            if re.match(r"python(?:\d+(?:\.\d+)?)?(?:\.exe)?$", p.name)
-        )
-        if python_executables:
-            executable = python_executables[0]
-            if executable.endswith(".exe"):
-                executable = executable[:-4]
-
-            self._executable = executable
-
-    def _find_pip_executable(self):  # type: () -> None
-        pip_executables = sorted(
-            p.name
-            for p in self._bin_dir.glob("pip*")
-            if re.match(r"pip(?:\d+(?:\.\d+)?)?(?:\.exe)?$", p.name)
-        )
-        if pip_executables:
-            pip_executable = pip_executables[0]
-            if pip_executable.endswith(".exe"):
-                pip_executable = pip_executable[:-4]
-
-            self._pip_executable = pip_executable
-
-    def find_executables(self):  # type: () -> None
-        self._find_python_executable()
-        self._find_pip_executable()
-
-    def get_version_info(self):  # type: () -> Tuple[int]
-        raise NotImplementedError()
-
-    def get_python_implementation(self):  # type: () -> str
-        raise NotImplementedError()
-
-    def get_marker_env(self):  # type: () -> Dict[str, Any]
-        raise NotImplementedError()
-
-    def get_pip_command(self):  # type: () -> List[str]
-        raise NotImplementedError()
-
-    def get_supported_tags(self):  # type: () -> List[Tag]
-        raise NotImplementedError()
-
-    def get_pip_version(self):  # type: () -> Version
-        raise NotImplementedError()
-
-    def get_paths(self):  # type: () -> Dict[str, str]
-        raise NotImplementedError()
-
-    def is_valid_for_marker(self, marker):  # type: (BaseMarker) -> bool
-        return marker.validate(self.marker_env)
-
-    def is_sane(self):  # type: () -> bool
-        """
-        Checks whether the current environment is sane or not.
-        """
-        return True
-
-    def run(self, bin, *args, **kwargs):
-        bin = self._bin(bin)
-        cmd = [bin] + list(args)
-        return self._run(cmd, **kwargs)
-
-    def run_python(self, *args, **kwargs):
-        return self.run(self._executable, *args, **kwargs)
-
-    def run_pip(self, *args, **kwargs):
-        pip = self.get_pip_command()
-        cmd = pip + list(args)
-        return self._run(cmd, **kwargs)
-
-    def run_python_script(self, content, **kwargs):  # type: (str, Any) -> str
-        return self.run(self._executable, "-W", "ignore", "-", input_=content, **kwargs)
-
-    def _run(self, cmd, **kwargs):
-        """
-        Run a command inside the Python environment.
-        """
-        call = kwargs.pop("call", False)
-        input_ = kwargs.pop("input_", None)
-
-        try:
-            if self._is_windows:
-                kwargs["shell"] = True
-
-            if kwargs.get("shell", False):
-                cmd = list_to_shell_command(cmd)
-
-            if input_:
-                output = subprocess.run(
-                    cmd,
-                    stdout=subprocess.PIPE,
-                    stderr=subprocess.STDOUT,
-                    input=encode(input_),
-                    check=True,
-                    **kwargs
-                ).stdout
-            elif call:
-                return subprocess.call(cmd, stderr=subprocess.STDOUT, **kwargs)
-            else:
-                output = subprocess.check_output(
-                    cmd, stderr=subprocess.STDOUT, **kwargs
-                )
-        except CalledProcessError as e:
-            raise EnvCommandError(e, input=input_)
-
-        return decode(output)
-
-    def execute(self, bin, *args, **kwargs):
-        bin = self._bin(bin)
-        env = kwargs.pop("env", {k: v for k, v in os.environ.items()})
-
-        if not self._is_windows:
-            args = [bin] + list(args)
-            return os.execvpe(bin, args, env=env)
-        else:
-            exe = subprocess.Popen([bin] + list(args), env=env, **kwargs)
-            exe.communicate()
-            return exe.returncode
-
-    def is_venv(self):  # type: () -> bool
-        raise NotImplementedError()
-
-    @property
-    def script_dirs(self):  # type: () -> List[Path]
-        if self._script_dirs is None:
-            self._script_dirs = (
-                [Path(self.paths["scripts"])]
-                if "scripts" in self.paths
-                else self._bin_dir
-            )
-            if self.userbase:
-                self._script_dirs.append(self.userbase / self._script_dirs[0].name)
-        return self._script_dirs
-
-    def _bin(self, bin):  # type: (str) -> str
-        """
-        Return path to the given executable.
-        """
-        if self._is_windows and not bin.endswith(".exe"):
-            bin_path = self._bin_dir / (bin + ".exe")
-        else:
-            bin_path = self._bin_dir / bin
-
-        if not bin_path.exists():
-            # On Windows, some executables can be in the base path
-            # This is especially true when installing Python with
-            # the official installer, where python.exe will be at
-            # the root of the env path.
-            # This is an edge case and should not be encountered
-            # in normal uses but this happens in the sonnet script
-            # that creates a fake virtual environment pointing to
-            # a base Python install.
-            if self._is_windows:
-                if not bin.endswith(".exe"):
-                    bin_path = self._path / (bin + ".exe")
-                else:
-                    bin_path = self._path / bin
-
-                if bin_path.exists():
-                    return str(bin_path)
-
-            return bin
-
-        return str(bin_path)
-
-    def __eq__(self, other):  # type: (Env) -> bool
-        return other.__class__ == self.__class__ and other.path == self.path
-
-    def __repr__(self):
-        return '{}("{}")'.format(self.__class__.__name__, self._path)
-
-
-class SystemEnv(Env):
-    """
-    A system (i.e. not a virtualenv) Python environment.
-    """
-
-    def __init__(self, path, base=None, auto_path=True):
-        self._is_windows = sys.platform == "win32"
-        if auto_path and path:
-            path = Path(self._run([str(path), "-"], input_=GET_BASE_PREFIX).strip())
-        super().__init__(path, base=base)
-
-    @property
-    def sys_path(self):
-        output = self.run("python", "-", input_=GET_SYS_PATH)
-        return json.loads(output)
-
-    def get_version_info(self):
-        output = self.run("python", "-", input_=GET_PYTHON_VERSION)
-        return tuple([int(s) for s in output.strip().split(".")])
-
-    def get_python_implementation(self):
-        return self.marker_env["platform_python_implementation"]
-
-    def get_marker_env(self):
-        output = self.run("python", "-", input_=GET_ENVIRONMENT_INFO)
-        return json.loads(output)
-
-    def get_paths(self):
-        output = self.run("python", "-", input_=GET_PATHS)
-        return json.loads(output)
-
-    def get_supported_tags(self):  # type: () -> List[Tag]
-        script = resources.read_text(_pkg, "packaging_tags.py.template")
-
-        output = self.run("python", "-", input_=script)
-
-        return [Tag(*t) for t in json.loads(output)]
-
-    def get_pip_command(self):
-        return [self.python, "-m", "pip"]
-
-    def get_pip_version(self):
-        output = self.run_pip("--version").strip()
-        m = re.match("pip (.+?)(?: from .+)?$", output)
-        if not m:
-            return Version.parse("0.0")
-        return Version.parse(m.group(1))
-
-    def is_venv(self):
-        return self._path != self._base
-
-
-class VirtualEnv(Env):
-    """
-    A virtual Python environment.
-    """
-
-    def __init__(self, path, base=None):  # type: (Path, Optional[Path]) -> None
-        super(VirtualEnv, self).__init__(path, base)
-
-        # If base is None, it probably means this is
-        # a virtualenv created from VIRTUAL_ENV.
-        # In this case we need to get sys.base_prefix
-        # from inside the virtualenv.
-        if base is None:
-            self._base = Path(self.run_python_script(GET_BASE_PREFIX).strip())
-
-    @property
-    def sys_path(self):  # type: () -> List[str]
-        output = self.run_python_script(GET_SYS_PATH)
-
-        return json.loads(output)
-
-    def get_version_info(self):  # type: () -> Tuple[int]
-        output = self.run_python_script(GET_PYTHON_VERSION)
-
-        return tuple([int(s) for s in output.strip().split(".")])
-
-    def get_python_implementation(self):  # type: () -> str
-        return self.marker_env["platform_python_implementation"]
-
-    def get_pip_command(self):  # type: () -> List[str]
-        # We're in a virtualenv that is known to be sane,
-        # so assume that we have a functional pip
-        return [self._bin(self._pip_executable)]
-
-    def get_supported_tags(self):  # type: () -> List[Tag]
-        script = resources.read_text(_pkg, "packaging_tags.py.template")
-
-        output = self.run_python_script(script)
-
-        return [Tag(*t) for t in json.loads(output)]
-
-    def get_marker_env(self):  # type: () -> Dict[str, Any]
-        output = self.run(self._executable, "-", input_=GET_ENVIRONMENT_INFO)
-
-        return json.loads(output)
-
-    def get_pip_version(self):  # type: () -> Version
-        output = self.run_pip("--version").strip()
-        m = re.match("pip (.+?)(?: from .+)?$", output)
-        if not m:
-            return Version.parse("0.0")
-
-        return Version.parse(m.group(1))
-
-    def get_paths(self):  # type: () -> Dict[str, str]
-        output = self.run_python_script(GET_PATHS)
-
-        return json.loads(output)
-
-    def is_venv(self):  # type: () -> bool
-        return True
-
-    def is_sane(self):
-        # A virtualenv is considered sane if both "python" and "pip" exist.
-        return os.path.exists(self.python) and os.path.exists(self._bin("pip"))
-
-    def _run(self, cmd, **kwargs):
-        kwargs["env"] = self.get_temp_environ(environ=kwargs.get("env"))
-        return super(VirtualEnv, self)._run(cmd, **kwargs)
-
-    def get_temp_environ(
-        self, environ=None, exclude=None, **kwargs
-    ):  # type: (Optional[Dict[str, str]], Optional[List[str]], **str) -> Dict[str, str]
-        exclude = exclude or []
-        exclude.extend(["PYTHONHOME", "__PYVENV_LAUNCHER__"])
-
-        if environ:
-            environ = deepcopy(environ)
-            for key in exclude:
-                environ.pop(key, None)
-        else:
-            environ = {k: v for k, v in os.environ.items() if k not in exclude}
-
-        environ.update(kwargs)
-
-        environ["PATH"] = self._updated_path()
-        environ["VIRTUAL_ENV"] = str(self._path)
-
-        return environ
-
-    def execute(self, bin, *args, **kwargs):
-        kwargs["env"] = self.get_temp_environ(environ=kwargs.get("env"))
-        return super(VirtualEnv, self).execute(bin, *args, **kwargs)
-
-    @contextmanager
-    def temp_environ(self):
-        environ = dict(os.environ)
-        try:
-            yield
-        finally:
-            os.environ.clear()
-            os.environ.update(environ)
-
-    def _updated_path(self):
-        return os.pathsep.join([str(self._bin_dir), os.environ.get("PATH", "")])
-
-
-class GenericEnv(VirtualEnv):
-    def __init__(
-        self, path, base=None, child_env=None
-    ):  # type: (Path, Optional[Path], Optional[Env]) -> None
-        self._child_env = child_env
-
-        super(GenericEnv, self).__init__(path, base=base)
-
-    def find_executables(self):  # type: () -> None
-        patterns = [("python*", "pip*")]
-
-        if self._child_env:
-            minor_version = "{}.{}".format(
-                self._child_env.version_info[0], self._child_env.version_info[1]
-            )
-            major_version = "{}".format(self._child_env.version_info[0])
-            patterns = [
-                ("python{}".format(minor_version), "pip{}".format(minor_version)),
-                ("python{}".format(major_version), "pip{}".format(major_version)),
-            ]
-
-        python_executable = None
-        pip_executable = None
-
-        for python_pattern, pip_pattern in patterns:
-            if python_executable and pip_executable:
-                break
-
-            if not python_executable:
-                python_executables = sorted(
-                    [
-                        p.name
-                        for p in self._bin_dir.glob(python_pattern)
-                        if re.match(r"python(?:\d+(?:\.\d+)?)?(?:\.exe)?$", p.name)
-                    ]
-                )
-
-                if python_executables:
-                    executable = python_executables[0]
-                    if executable.endswith(".exe"):
-                        executable = executable[:-4]
-
-                    python_executable = executable
-
-            if not pip_executable:
-                pip_executables = sorted(
-                    [
-                        p.name
-                        for p in self._bin_dir.glob(pip_pattern)
-                        if re.match(r"pip(?:\d+(?:\.\d+)?)?(?:\.exe)?$", p.name)
-                    ]
-                )
-                if pip_executables:
-                    pip_executable = pip_executables[0]
-                    if pip_executable.endswith(".exe"):
-                        pip_executable = pip_executable[:-4]
-
-                    pip_executable = pip_executable
-
-            if python_executable:
-                self._executable = python_executable
-
-            if pip_executable:
-                self._pip_executable = pip_executable
-
-    def get_paths(self):  # type: () -> Dict[str, str]
-        output = self.run_python_script(GET_PATHS_FOR_GENERIC_ENVS)
-
-        return json.loads(output)
-
-    def execute(self, bin, *args, **kwargs):  # type: (str, str, Any) -> Optional[int]
-        return super(VirtualEnv, self).execute(bin, *args, **kwargs)
-
-    def _run(self, cmd, **kwargs):  # type: (List[str], Any) -> Optional[int]
-        return super(VirtualEnv, self)._run(cmd, **kwargs)
-
-    def is_venv(self):  # type: () -> bool
-        return self._path != self._base
-
-
-class NullEnv(SystemEnv):
-    def __init__(self, path=None, base=None, execute=False):
-        if path is None:
-            path = Path(sys.prefix)
-
-        super().__init__(path, base=base, auto_path=False)
-
-        self._execute = execute
-        self.executed = []
-
-    @property
-    def python(self):  # type: () -> str
-        return sys.executable
-
-    @property
-    def sys_path(self):  # type: () -> List[str]
-        return sys.path
-
-    def get_version_info(self):  # type: () -> Tuple[int]
-        return sys.version_info
-
-    def get_python_implementation(self):  # type: () -> str
-        return platform.python_implementation()
-
-    def get_paths(self):  # type: () -> Dict[str, str]
-        # We can't use sysconfig.get_paths() because
-        # on some distributions it does not return the proper paths
-        # (those used by pip for instance). We go through distutils
-        # to get the proper ones.
-        import site
-
-        from distutils.command.install import SCHEME_KEYS  # noqa
-        from distutils.core import Distribution
-
-        d = Distribution()
-        d.parse_config_files()
-        obj = d.get_command_obj("install", create=True)
-        obj.finalize_options()
-
-        paths = sysconfig.get_paths().copy()
-        for key in SCHEME_KEYS:
-            if key == "headers":
-                # headers is not a path returned by sysconfig.get_paths()
-                continue
-
-            paths[key] = getattr(obj, "install_{}".format(key))
-
-        if site.check_enableusersite() and hasattr(obj, "install_usersite"):
-            paths["usersite"] = getattr(obj, "install_usersite")
-            paths["userbase"] = getattr(obj, "install_userbase")
-
-        return paths
-
-    def get_supported_tags(self):  # type: () -> List[Tag]
-        return list(sys_tags())
-
-    def get_marker_env(self):  # type: () -> Dict[str, Any]
-        if hasattr(sys, "implementation"):
-            info = sys.implementation.version
-            iver = "{0.major}.{0.minor}.{0.micro}".format(info)
-            kind = info.releaselevel
-            if kind != "final":
-                iver += kind[0] + str(info.serial)
-
-            implementation_name = sys.implementation.name
-        else:
-            iver = "0"
-            implementation_name = ""
-
-        return {
-            "implementation_name": implementation_name,
-            "implementation_version": iver,
-            "os_name": os.name,
-            "platform_machine": platform.machine(),
-            "platform_release": platform.release(),
-            "platform_system": platform.system(),
-            "platform_version": platform.version(),
-            "python_full_version": platform.python_version(),
-            "platform_python_implementation": platform.python_implementation(),
-            "python_version": ".".join(
-                v for v in platform.python_version().split(".")[:2]
-            ),
-            "sys_platform": sys.platform,
-            "version_info": sys.version_info,
-            # Extra information
-            "interpreter_name": interpreter_name(),
-            "interpreter_version": interpreter_version(),
-        }
-
-    def get_pip_version(self):  # type: () -> Version
-        from pip import __version__
-
-        return Version.parse(__version__)
-
-    def is_venv(self):  # type: () -> bool
-        return self._path != self._base
-
-    def get_pip_command(self):  # type: () -> List[str]
-        return [self._bin("python"), "-m", "pip"]
-
-    def _run(self, cmd, **kwargs):
-        self.executed.append(cmd)
-
-        if self._execute:
-            return super(NullEnv, self)._run(cmd, **kwargs)
-
-    def execute(self, bin, *args, **kwargs):
-        self.executed.append([bin] + list(args))
-
-        if self._execute:
-            return super(NullEnv, self).execute(bin, *args, **kwargs)
-
-    def _bin(self, bin):
-        return bin
-
-
-class MockEnv(NullEnv):
-    def __init__(
-        self,
-        version_info=(3, 7, 0),
-        python_implementation="CPython",
-        platform="darwin",
-        os_name="posix",
-        is_venv=False,
-        pip_version="19.1",
-        sys_path=None,
-        marker_env=None,
-        supported_tags=None,
-        **kwargs
-    ):
-        super(MockEnv, self).__init__(**kwargs)
-
-        self._version_info = version_info
-        self._python_implementation = python_implementation
-        self._platform = platform
-        self._os_name = os_name
-        self._is_venv = is_venv
-        self._pip_version = Version.parse(pip_version)
-        self._sys_path = sys_path
-        self._mock_marker_env = marker_env
-        self._supported_tags = supported_tags
-
-    @property
-    def python(self):  # type: () -> str
-        return self._base
-
-    @property
-    def platform(self):  # type: () -> str
-        return self._platform
-
-    @property
-    def os(self):  # type: () -> str
-        return self._os_name
-
-    @property
-    def pip_version(self):
-        return self._pip_version
-
-    @property
-    def sys_path(self):
-        if self._sys_path is None:
-            return super(MockEnv, self).sys_path
-
-        return self._sys_path
-
-    def get_marker_env(self):  # type: () -> Dict[str, Any]
-        if self._mock_marker_env is not None:
-            return self._mock_marker_env
-
-        marker_env = super(MockEnv, self).get_marker_env()
-        marker_env["python_implementation"] = self._python_implementation
-        marker_env["version_info"] = self._version_info
-        marker_env["python_version"] = ".".join(str(v) for v in self._version_info[:2])
-        marker_env["python_full_version"] = ".".join(str(v) for v in self._version_info)
-        marker_env["sys_platform"] = self._platform
-        marker_env["interpreter_name"] = self._python_implementation.lower()
-        marker_env["interpreter_version"] = "cp" + "".join(
-            str(v) for v in self._version_info[:2]
-        )
-
-        return marker_env
-
-    def is_venv(self):  # type: () -> bool
-        return self._is_venv
-
-
-class InterpreterLookup:
-    @staticmethod
-    def _version_check(executable, supported_python=None):
-        try:
-            python_patch = decode(
-                subprocess.check_output(
-                    list_to_shell_command(
-                        [
-                            executable,
-                            "-c",
-                            "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"",
-                        ]
-                    ),
-                    stderr=subprocess.STDOUT,
-                    shell=True,
-                ).strip()
-            )
-        except CalledProcessError:
-            return False, None, None
-
-        if not python_patch:
-            return False, None, None
-
-        if (
-            not supported_python or
-            (supported_python and supported_python.allows(Version.parse(python_patch)))
-        ):
-            python_minor = ".".join(python_patch.split(".")[:2])
-            return True, python_minor, python_patch
-
-        return False, None, None
-
-    @classmethod
-    def find(cls, constraint=None):
-        executable, minor, patch = None, None, None
-
-        for guess in ["python", "python3", "python2"]:
-            match, minor, patch = cls._version_check(guess, constraint)
-            if match:
-                return guess, minor, patch
-
-        for python_to_try in reversed(
-            sorted(
-                Package.AVAILABLE_PYTHONS,
-                key=lambda v: (v.startswith("3"), -len(v), v),
-            )
-        ):
-            if constraint:
-                if len(python_to_try) == 1:
-                    if not parse_constraint("^{}.0".format(python_to_try)).allows_any(
-                        constraint
-                    ):
-                        continue
-                elif not constraint.allows_all(
-                    parse_constraint(python_to_try + ".*")
-                ):
-                    continue
-
-            guess = "python" + python_to_try
-            match, minor, patch = cls._version_check(guess, constraint)
-            if match:
-                executable = guess
-                break
-
-        return executable, minor, patch
diff --git a/vendor/poetry/poetry/utils/exporter.py b/vendor/poetry/poetry/utils/exporter.py
deleted file mode 100644
index b8cced20..00000000
--- a/vendor/poetry/poetry/utils/exporter.py
+++ /dev/null
@@ -1,169 +0,0 @@
-from typing import Optional
-from typing import Sequence
-from typing import Union
-
-from clikit.api.io import IO
-
-from poetry.core.packages.utils.utils import path_to_url
-from poetry.poetry import Poetry
-from poetry.utils._compat import Path
-from poetry.utils._compat import decode
-from poetry.utils._compat import urlparse
-
-
-class Exporter(object):
-    """
-    Exporter class to export a lock file to alternative formats.
-    """
-
-    FORMAT_REQUIREMENTS_TXT = "requirements.txt"
-    #: The names of the supported export formats.
-    ACCEPTED_FORMATS = (FORMAT_REQUIREMENTS_TXT,)
-    ALLOWED_HASH_ALGORITHMS = ("sha256", "sha384", "sha512")
-
-    def __init__(self, poetry):  # type: (Poetry) -> None
-        self._poetry = poetry
-
-    def export(
-        self,
-        fmt,
-        cwd,
-        output,
-        with_hashes=True,
-        dev=False,
-        extras=None,
-        with_credentials=False,
-    ):  # type: (str, Path, Union[IO, str], bool, bool, Optional[Union[bool, Sequence[str]]], bool) -> None
-        if fmt not in self.ACCEPTED_FORMATS:
-            raise ValueError("Invalid export format: {}".format(fmt))
-
-        getattr(self, "_export_{}".format(fmt.replace(".", "_")))(
-            cwd,
-            output,
-            with_hashes=with_hashes,
-            dev=dev,
-            extras=extras,
-            with_credentials=with_credentials,
-        )
-
-    def _export_requirements_txt(
-        self,
-        cwd,
-        output,
-        with_hashes=True,
-        dev=False,
-        extras=None,
-        with_credentials=False,
-    ):  # type: (Path, Union[IO, str], bool, bool, Optional[Union[bool, Sequence[str]]], bool) -> None
-        indexes = set()
-        content = ""
-        dependency_lines = set()
-
-        for dependency_package in self._poetry.locker.get_project_dependency_packages(
-            project_requires=self._poetry.package.all_requires, dev=dev, extras=extras
-        ):
-            line = ""
-
-            dependency = dependency_package.dependency
-            package = dependency_package.package
-
-            if package.develop:
-                line += "-e "
-
-            requirement = dependency.to_pep_508(with_extras=False)
-            is_direct_local_reference = (
-                dependency.is_file() or dependency.is_directory()
-            )
-            is_direct_remote_reference = dependency.is_vcs() or dependency.is_url()
-
-            if is_direct_remote_reference:
-                line = requirement
-            elif is_direct_local_reference:
-                dependency_uri = path_to_url(dependency.source_url)
-                line = "{} @ {}".format(dependency.name, dependency_uri)
-            else:
-                line = "{}=={}".format(package.name, package.version)
-
-            if not is_direct_remote_reference:
-                if ";" in requirement:
-                    markers = requirement.split(";", 1)[1].strip()
-                    if markers:
-                        line += "; {}".format(markers)
-
-            if (
-                not is_direct_remote_reference
-                and not is_direct_local_reference
-                and package.source_url
-            ):
-                indexes.add(package.source_url)
-
-            if package.files and with_hashes:
-                hashes = []
-                for f in package.files:
-                    h = f["hash"]
-                    algorithm = "sha256"
-                    if ":" in h:
-                        algorithm, h = h.split(":")
-
-                        if algorithm not in self.ALLOWED_HASH_ALGORITHMS:
-                            continue
-
-                    hashes.append("{}:{}".format(algorithm, h))
-
-                if hashes:
-                    line += " \\\n"
-                    for i, h in enumerate(hashes):
-                        line += "    --hash={}{}".format(
-                            h, " \\\n" if i < len(hashes) - 1 else ""
-                        )
-            dependency_lines.add(line)
-
-        content += "\n".join(sorted(dependency_lines))
-        content += "\n"
-
-        if indexes:
-            # If we have extra indexes, we add them to the beginning of the output
-            indexes_header = ""
-            for index in sorted(indexes):
-                repositories = [
-                    r
-                    for r in self._poetry.pool.repositories
-                    if r.url == index.rstrip("/")
-                ]
-                if not repositories:
-                    continue
-                repository = repositories[0]
-                if (
-                    self._poetry.pool.has_default()
-                    and repository is self._poetry.pool.repositories[0]
-                ):
-                    url = (
-                        repository.authenticated_url
-                        if with_credentials
-                        else repository.url
-                    )
-                    indexes_header = "--index-url {}\n".format(url)
-                    continue
-
-                url = (
-                    repository.authenticated_url if with_credentials else repository.url
-                )
-                parsed_url = urlparse.urlsplit(url)
-                if parsed_url.scheme == "http":
-                    indexes_header += "--trusted-host {}\n".format(parsed_url.netloc)
-                indexes_header += "--extra-index-url {}\n".format(url)
-
-            content = indexes_header + "\n" + content
-
-        self._output(content, cwd, output)
-
-    def _output(
-        self, content, cwd, output
-    ):  # type: (str, Path, Union[IO, str]) -> None
-        decoded = decode(content)
-        try:
-            output.write(decoded)
-        except AttributeError:
-            filepath = cwd / output
-            with filepath.open("w", encoding="utf-8") as f:
-                f.write(decoded)
diff --git a/vendor/poetry/poetry/utils/extras.py b/vendor/poetry/poetry/utils/extras.py
deleted file mode 100644
index c97d6ee7..00000000
--- a/vendor/poetry/poetry/utils/extras.py
+++ /dev/null
@@ -1,59 +0,0 @@
-from typing import Iterator
-from typing import List
-from typing import Mapping
-from typing import Sequence
-
-from poetry.core.packages import Package
-from poetry.utils.helpers import canonicalize_name
-
-
-def get_extra_package_names(
-    packages,  # type: Sequence[Package]
-    extras,  # type: Mapping[str, List[str]]
-    extra_names,  # type: Sequence[str]
-):  # type: (...) -> Iterator[str]
-    """
-    Returns all package names required by the given extras.
-
-    :param packages: A collection of packages, such as from Repository.packages
-    :param extras: A mapping of `extras` names to lists of package names, as defined
-        in the `extras` section of `poetry.lock`.
-    :param extra_names: A list of strings specifying names of extra groups to resolve.
-    """
-    if not extra_names:
-        return []
-
-    # lookup for packages by name, faster than looping over packages repeatedly
-    packages_by_name = {package.name: package for package in packages}
-
-    # get and flatten names of packages we've opted into as extras
-    extra_package_names = [
-        canonicalize_name(extra_package_name)
-        for extra_name in extra_names
-        for extra_package_name in extras.get(extra_name, ())
-    ]
-
-    # keep record of packages seen during recursion in order to avoid recursion error
-    seen_package_names = set()
-
-    def _extra_packages(package_names):
-        """Recursively find dependencies for packages names"""
-        # for each extra pacakge name
-        for package_name in package_names:
-            # Find the actual Package object. A missing key indicates an implicit
-            # dependency (like setuptools), which should be ignored
-            package = packages_by_name.get(canonicalize_name(package_name))
-            if package:
-                if package.name not in seen_package_names:
-                    seen_package_names.add(package.name)
-                    yield package.name
-                # Recurse for dependencies
-                for dependency_package_name in _extra_packages(
-                    dependency.name
-                    for dependency in package.requires
-                    if dependency.name not in seen_package_names
-                ):
-                    seen_package_names.add(dependency_package_name)
-                    yield dependency_package_name
-
-    return _extra_packages(extra_package_names)
diff --git a/vendor/poetry/poetry/utils/helpers.py b/vendor/poetry/poetry/utils/helpers.py
deleted file mode 100644
index 232e65b7..00000000
--- a/vendor/poetry/poetry/utils/helpers.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import os
-import re
-import shutil
-import stat
-import tempfile
-
-from contextlib import contextmanager
-from typing import List
-from typing import Optional
-
-import requests
-
-from poetry.config.config import Config
-from poetry.core.packages.package import Package
-from poetry.core.version import Version
-from poetry.utils._compat import Path
-
-
-try:
-    from collections.abc import Mapping
-except ImportError:
-    from collections import Mapping
-
-
-_canonicalize_regex = re.compile("[-_]+")
-
-
-def canonicalize_name(name):  # type: (str) -> str
-    return _canonicalize_regex.sub("-", name).lower()
-
-
-def module_name(name):  # type: (str) -> str
-    return canonicalize_name(name).replace(".", "_").replace("-", "_")
-
-
-def normalize_version(version):  # type: (str) -> str
-    return str(Version(version))
-
-
-def _del_ro(action, name, exc):
-    os.chmod(name, stat.S_IWRITE)
-    os.remove(name)
-
-
-@contextmanager
-def temporary_directory(*args, **kwargs):
-    name = tempfile.mkdtemp(*args, **kwargs)
-
-    yield name
-
-    shutil.rmtree(name, onerror=_del_ro)
-
-
-def get_cert(config, repository_name):  # type: (Config, str) -> Optional[Path]
-    cert = config.get("certificates.{}.cert".format(repository_name))
-    if cert:
-        return Path(cert)
-    else:
-        return None
-
-
-def get_client_cert(config, repository_name):  # type: (Config, str) -> Optional[Path]
-    client_cert = config.get("certificates.{}.client-cert".format(repository_name))
-    if client_cert:
-        return Path(client_cert)
-    else:
-        return None
-
-
-def _on_rm_error(func, path, exc_info):
-    if not os.path.exists(path):
-        return
-
-    os.chmod(path, stat.S_IWRITE)
-    func(path)
-
-
-def safe_rmtree(path):
-    if Path(path).is_symlink():
-        return os.unlink(str(path))
-
-    shutil.rmtree(path, onerror=_on_rm_error)
-
-
-def merge_dicts(d1, d2):
-    for k, v in d2.items():
-        if k in d1 and isinstance(d1[k], dict) and isinstance(d2[k], Mapping):
-            merge_dicts(d1[k], d2[k])
-        else:
-            d1[k] = d2[k]
-
-
-def download_file(
-    url, dest, session=None, chunk_size=1024
-):  # type: (str, str, Optional[requests.Session], int) -> None
-    get = requests.get if not session else session.get
-
-    with get(url, stream=True) as response:
-        response.raise_for_status()
-
-        with open(dest, "wb") as f:
-            for chunk in response.iter_content(chunk_size=chunk_size):
-                if chunk:
-                    f.write(chunk)
-
-
-def get_package_version_display_string(
-    package, root=None
-):  # type: (Package, Optional[Path]) -> str
-    if package.source_type in ["file", "directory"] and root:
-        return "{} {}".format(
-            package.version,
-            Path(os.path.relpath(package.source_url, root.as_posix())).as_posix(),
-        )
-
-    return package.full_pretty_version
-
-
-def paths_csv(paths):  # type: (List[Path]) -> str
-    return ", ".join('"{}"'.format(str(c)) for c in paths)
-
-
-def is_dir_writable(path, create=False):  # type: (Path, bool) -> bool
-    try:
-        if not path.exists():
-            if not create:
-                return False
-            path.mkdir(parents=True, exist_ok=True)
-
-        with tempfile.TemporaryFile(dir=str(path)):
-            pass
-    except (IOError, OSError):
-        return False
-    else:
-        return True
diff --git a/vendor/poetry/poetry/utils/packaging_tags.py.template b/vendor/poetry/poetry/utils/packaging_tags.py.template
deleted file mode 100644
index 15804593..00000000
--- a/vendor/poetry/poetry/utils/packaging_tags.py.template
+++ /dev/null
@@ -1,872 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-
-from __future__ import absolute_import
-
-import distutils.util
-
-try:
-    from importlib.machinery import EXTENSION_SUFFIXES
-except ImportError:  # pragma: no cover
-    import imp
-
-    EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
-    del imp
-import collections
-import json
-import logging
-import os
-import platform
-import re
-import struct
-import sys
-import sysconfig
-import warnings
-
-TYPE_CHECKING = False
-cast = lambda type_, value: value
-
-if TYPE_CHECKING:  # pragma: no cover
-    from typing import (
-        IO,
-        Dict,
-        FrozenSet,
-        Iterable,
-        Iterator,
-        List,
-        Optional,
-        Sequence,
-        Tuple,
-        Union,
-    )
-
-    PythonVersion = Sequence[int]
-    MacVersion = Tuple[int, int]
-    GlibcVersion = Tuple[int, int]
-
-
-logger = logging.getLogger(__name__)
-
-INTERPRETER_SHORT_NAMES = {
-    "python": "py",  # Generic.
-    "cpython": "cp",
-    "pypy": "pp",
-    "ironpython": "ip",
-    "jython": "jy",
-}  # type: Dict[str, str]
-
-
-_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
-
-
-_LEGACY_MANYLINUX_MAP = {
-    # CentOS 7 w/ glibc 2.17 (PEP 599)
-    (2, 17): "manylinux2014",
-    # CentOS 6 w/ glibc 2.12 (PEP 571)
-    (2, 12): "manylinux2010",
-    # CentOS 5 w/ glibc 2.5 (PEP 513)
-    (2, 5): "manylinux1",
-}
-
-# If glibc ever changes its major version, we need to know what the last
-# minor version was, so we can build the complete list of all versions.
-# For now, guess what the highest minor version might be, assume it will
-# be 50 for testing. Once this actually happens, update the dictionary
-# with the actual value.
-_LAST_GLIBC_MINOR = collections.defaultdict(lambda: 50)  # type: Dict[int, int]
-glibcVersion = collections.namedtuple("Version", ["major", "minor"])
-
-
-class Tag(object):
-    """
-    A representation of the tag triple for a wheel.
-
-    Instances are considered immutable and thus are hashable. Equality checking
-    is also supported.
-    """
-
-    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
-
-    def __init__(self, interpreter, abi, platform):
-        # type: (str, str, str) -> None
-        self._interpreter = interpreter.lower()
-        self._abi = abi.lower()
-        self._platform = platform.lower()
-        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
-        # that a set calls its `.disjoint()` method, which may be called hundreds of
-        # times when scanning a page of links for packages with tags matching that
-        # Set[Tag]. Pre-computing the value here produces significant speedups for
-        # downstream consumers.
-        self._hash = hash((self._interpreter, self._abi, self._platform))
-
-    @property
-    def interpreter(self):
-        # type: () -> str
-        return self._interpreter
-
-    @property
-    def abi(self):
-        # type: () -> str
-        return self._abi
-
-    @property
-    def platform(self):
-        # type: () -> str
-        return self._platform
-
-    def __eq__(self, other):
-        # type: (object) -> bool
-        if not isinstance(other, Tag):
-            return NotImplemented
-
-        return (
-            (self.platform == other.platform)
-            and (self.abi == other.abi)
-            and (self.interpreter == other.interpreter)
-        )
-
-    def __hash__(self):
-        # type: () -> int
-        return self._hash
-
-    def __str__(self):
-        # type: () -> str
-        return "{}-{}-{}".format(self._interpreter, self._abi, self._platform)
-
-    def __repr__(self):
-        # type: () -> str
-        return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
-
-
-def parse_tag(tag):
-    # type: (str) -> FrozenSet[Tag]
-    """
-    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
-
-    Returning a set is required due to the possibility that the tag is a
-    compressed tag set.
-    """
-    tags = set()
-    interpreters, abis, platforms = tag.split("-")
-    for interpreter in interpreters.split("."):
-        for abi in abis.split("."):
-            for platform_ in platforms.split("."):
-                tags.add(Tag(interpreter, abi, platform_))
-    return frozenset(tags)
-
-
-def _warn_keyword_parameter(func_name, kwargs):
-    # type: (str, Dict[str, bool]) -> bool
-    """
-    Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only.
-    """
-    if not kwargs:
-        return False
-    elif len(kwargs) > 1 or "warn" not in kwargs:
-        kwargs.pop("warn", None)
-        arg = next(iter(kwargs.keys()))
-        raise TypeError(
-            "{}() got an unexpected keyword argument {!r}".format(func_name, arg)
-        )
-    return kwargs["warn"]
-
-
-def _get_config_var(name, warn=False):
-    # type: (str, bool) -> Union[int, str, None]
-    value = sysconfig.get_config_var(name)
-    if value is None and warn:
-        logger.debug(
-            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
-        )
-    return value
-
-
-def _normalize_string(string):
-    # type: (str) -> str
-    return string.replace(".", "_").replace("-", "_")
-
-
-def _abi3_applies(python_version):
-    # type: (PythonVersion) -> bool
-    """
-    Determine if the Python version supports abi3.
-
-    PEP 384 was first implemented in Python 3.2.
-    """
-    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
-
-
-def _cpython_abis(py_version, warn=False):
-    # type: (PythonVersion, bool) -> List[str]
-    py_version = tuple(py_version)  # To allow for version comparison.
-    abis = []
-    version = _version_nodot(py_version[:2])
-    debug = pymalloc = ucs4 = ""
-    with_debug = _get_config_var("Py_DEBUG", warn)
-    has_refcount = hasattr(sys, "gettotalrefcount")
-    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
-    # extension modules is the best option.
-    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
-    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
-    if with_debug or (with_debug is None and (has_refcount or has_ext)):
-        debug = "d"
-    if py_version < (3, 8):
-        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
-        if with_pymalloc or with_pymalloc is None:
-            pymalloc = "m"
-        if py_version < (3, 3):
-            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
-            if unicode_size == 4 or (
-                unicode_size is None and sys.maxunicode == 0x10FFFF
-            ):
-                ucs4 = "u"
-    elif debug:
-        # Debug builds can also load "normal" extension modules.
-        # We can also assume no UCS-4 or pymalloc requirement.
-        abis.append("cp{version}".format(version=version))
-    abis.insert(
-        0,
-        "cp{version}{debug}{pymalloc}{ucs4}".format(
-            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
-        ),
-    )
-    return abis
-
-
-def cpython_tags(
-    python_version=None,  # type: Optional[PythonVersion]
-    abis=None,  # type: Optional[Iterable[str]]
-    platforms=None,  # type: Optional[Iterable[str]]
-    **kwargs  # type: bool
-):
-    # type: (...) -> Iterator[Tag]
-    """
-    Yields the tags for a CPython interpreter.
-
-    The tags consist of:
-    - cp--
-    - cp-abi3-
-    - cp-none-
-    - cp-abi3-  # Older Python versions down to 3.2.
-
-    If python_version only specifies a major version then user-provided ABIs and
-    the 'none' ABItag will be used.
-
-    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
-    their normal position and not at the beginning.
-    """
-    warn = _warn_keyword_parameter("cpython_tags", kwargs)
-    if not python_version:
-        python_version = sys.version_info[:2]
-
-    interpreter = "cp{}".format(_version_nodot(python_version[:2]))
-
-    if abis is None:
-        if len(python_version) > 1:
-            abis = _cpython_abis(python_version, warn)
-        else:
-            abis = []
-    abis = list(abis)
-    # 'abi3' and 'none' are explicitly handled later.
-    for explicit_abi in ("abi3", "none"):
-        try:
-            abis.remove(explicit_abi)
-        except ValueError:
-            pass
-
-    platforms = list(platforms or _platform_tags())
-    for abi in abis:
-        for platform_ in platforms:
-            yield Tag(interpreter, abi, platform_)
-    if _abi3_applies(python_version):
-        for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
-            yield tag
-    for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms):
-        yield tag
-
-    if _abi3_applies(python_version):
-        for minor_version in range(python_version[1] - 1, 1, -1):
-            for platform_ in platforms:
-                interpreter = "cp{version}".format(
-                    version=_version_nodot((python_version[0], minor_version))
-                )
-                yield Tag(interpreter, "abi3", platform_)
-
-
-def _generic_abi():
-    # type: () -> Iterator[str]
-    abi = sysconfig.get_config_var("SOABI")
-    if abi:
-        yield _normalize_string(abi)
-
-
-def generic_tags(
-    interpreter=None,  # type: Optional[str]
-    abis=None,  # type: Optional[Iterable[str]]
-    platforms=None,  # type: Optional[Iterable[str]]
-    **kwargs  # type: bool
-):
-    # type: (...) -> Iterator[Tag]
-    """
-    Yields the tags for a generic interpreter.
-
-    The tags consist of:
-    - --
-
-    The "none" ABI will be added if it was not explicitly provided.
-    """
-    warn = _warn_keyword_parameter("generic_tags", kwargs)
-    if not interpreter:
-        interp_name = interpreter_name()
-        interp_version = interpreter_version(warn=warn)
-        interpreter = "".join([interp_name, interp_version])
-    if abis is None:
-        abis = _generic_abi()
-    platforms = list(platforms or _platform_tags())
-    abis = list(abis)
-    if "none" not in abis:
-        abis.append("none")
-    for abi in abis:
-        for platform_ in platforms:
-            yield Tag(interpreter, abi, platform_)
-
-
-def _py_interpreter_range(py_version):
-    # type: (PythonVersion) -> Iterator[str]
-    """
-    Yields Python versions in descending order.
-
-    After the latest version, the major-only version will be yielded, and then
-    all previous versions of that major version.
-    """
-    if len(py_version) > 1:
-        yield "py{version}".format(version=_version_nodot(py_version[:2]))
-    yield "py{major}".format(major=py_version[0])
-    if len(py_version) > 1:
-        for minor in range(py_version[1] - 1, -1, -1):
-            yield "py{version}".format(version=_version_nodot((py_version[0], minor)))
-
-
-def compatible_tags(
-    python_version=None,  # type: Optional[PythonVersion]
-    interpreter=None,  # type: Optional[str]
-    platforms=None,  # type: Optional[Iterable[str]]
-):
-    # type: (...) -> Iterator[Tag]
-    """
-    Yields the sequence of tags that are compatible with a specific version of Python.
-
-    The tags consist of:
-    - py*-none-
-    - -none-any  # ... if `interpreter` is provided.
-    - py*-none-any
-    """
-    if not python_version:
-        python_version = sys.version_info[:2]
-    platforms = list(platforms or _platform_tags())
-    for version in _py_interpreter_range(python_version):
-        for platform_ in platforms:
-            yield Tag(version, "none", platform_)
-    if interpreter:
-        yield Tag(interpreter, "none", "any")
-    for version in _py_interpreter_range(python_version):
-        yield Tag(version, "none", "any")
-
-
-def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
-    # type: (str, bool) -> str
-    if not is_32bit:
-        return arch
-
-    if arch.startswith("ppc"):
-        return "ppc"
-
-    return "i386"
-
-
-def _mac_binary_formats(version, cpu_arch):
-    # type: (MacVersion, str) -> List[str]
-    formats = [cpu_arch]
-    if cpu_arch == "x86_64":
-        if version < (10, 4):
-            return []
-        formats.extend(["intel", "fat64", "fat32"])
-
-    elif cpu_arch == "i386":
-        if version < (10, 4):
-            return []
-        formats.extend(["intel", "fat32", "fat"])
-
-    elif cpu_arch == "ppc64":
-        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
-        if version > (10, 5) or version < (10, 4):
-            return []
-        formats.append("fat64")
-
-    elif cpu_arch == "ppc":
-        if version > (10, 6):
-            return []
-        formats.extend(["fat32", "fat"])
-
-    if cpu_arch in {"arm64", "x86_64"}:
-        formats.append("universal2")
-
-    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
-        formats.append("universal")
-
-    return formats
-
-
-def mac_platforms(version=None, arch=None):
-    # type: (Optional[MacVersion], Optional[str]) -> Iterator[str]
-    """
-    Yields the platform tags for a macOS system.
-
-    The `version` parameter is a two-item tuple specifying the macOS version to
-    generate platform tags for. The `arch` parameter is the CPU architecture to
-    generate platform tags for. Both parameters default to the appropriate value
-    for the current system.
-    """
-    version_str, _, cpu_arch = platform.mac_ver()  # type: ignore
-    if version is None:
-        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
-    else:
-        version = version
-    if arch is None:
-        arch = _mac_arch(cpu_arch)
-    else:
-        arch = arch
-
-    if (10, 0) <= version and version < (11, 0):
-        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
-        # "minor" version number.  The major version was always 10.
-        for minor_version in range(version[1], -1, -1):
-            compat_version = 10, minor_version
-            binary_formats = _mac_binary_formats(compat_version, arch)
-            for binary_format in binary_formats:
-                yield "macosx_{major}_{minor}_{binary_format}".format(
-                    major=10, minor=minor_version, binary_format=binary_format
-                )
-
-    if version >= (11, 0):
-        # Starting with Mac OS 11, each yearly release bumps the major version
-        # number.   The minor versions are now the midyear updates.
-        for major_version in range(version[0], 10, -1):
-            compat_version = major_version, 0
-            binary_formats = _mac_binary_formats(compat_version, arch)
-            for binary_format in binary_formats:
-                yield "macosx_{major}_{minor}_{binary_format}".format(
-                    major=major_version, minor=0, binary_format=binary_format
-                )
-
-    if version >= (11, 0):
-        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
-        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
-        # releases exist.
-        #
-        # However, the "universal2" binary format can have a
-        # macOS version earlier than 11.0 when the x86_64 part of the binary supports
-        # that version of macOS.
-        if arch == "x86_64":
-            for minor_version in range(16, 3, -1):
-                compat_version = 10, minor_version
-                binary_formats = _mac_binary_formats(compat_version, arch)
-                for binary_format in binary_formats:
-                    yield "macosx_{major}_{minor}_{binary_format}".format(
-                        major=compat_version[0],
-                        minor=compat_version[1],
-                        binary_format=binary_format,
-                    )
-        else:
-            for minor_version in range(16, 3, -1):
-                compat_version = 10, minor_version
-                binary_format = "universal2"
-                yield "macosx_{major}_{minor}_{binary_format}".format(
-                    major=compat_version[0],
-                    minor=compat_version[1],
-                    binary_format=binary_format,
-                )
-
-
-# From PEP 513, PEP 600
-def _is_manylinux_compatible(name, arch, glibc_version):
-    # type: (str, str, GlibcVersion) -> bool
-    sys_glibc = _get_glibc_version()
-    if sys_glibc < glibc_version:
-        return False
-    # Check for presence of _manylinux module.
-    try:
-        import _manylinux  # noqa
-    except ImportError:
-        pass
-    else:
-        if hasattr(_manylinux, "manylinux_compatible"):
-            result = _manylinux.manylinux_compatible(
-                glibc_version[0], glibc_version[1], arch
-            )
-            if result is not None:
-                return bool(result)
-        else:
-            if glibc_version == (2, 5):
-                if hasattr(_manylinux, "manylinux1_compatible"):
-                    return bool(_manylinux.manylinux1_compatible)
-            if glibc_version == (2, 12):
-                if hasattr(_manylinux, "manylinux2010_compatible"):
-                    return bool(_manylinux.manylinux2010_compatible)
-            if glibc_version == (2, 17):
-                if hasattr(_manylinux, "manylinux2014_compatible"):
-                    return bool(_manylinux.manylinux2014_compatible)
-    return True
-
-
-def _glibc_version_string():
-    # type: () -> Optional[str]
-    # Returns glibc version string, or None if not using glibc.
-    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
-
-
-def _glibc_version_string_confstr():
-    # type: () -> Optional[str]
-    """
-    Primary implementation of glibc_version_string using os.confstr.
-    """
-    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
-    # to be broken or missing. This strategy is used in the standard library
-    # platform module.
-    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
-    try:
-        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
-        version_string = os.confstr(  # type: ignore[attr-defined] # noqa: F821
-            "CS_GNU_LIBC_VERSION"
-        )
-        assert version_string is not None
-        _, version = version_string.split()  # type: Tuple[str, str]
-    except (AssertionError, AttributeError, OSError, ValueError):
-        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
-        return None
-    return version
-
-
-def _glibc_version_string_ctypes():
-    # type: () -> Optional[str]
-    """
-    Fallback implementation of glibc_version_string using ctypes.
-    """
-    try:
-        import ctypes
-    except ImportError:
-        return None
-
-    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
-    # manpage says, "If filename is NULL, then the returned handle is for the
-    # main program". This way we can let the linker do the work to figure out
-    # which libc our process is actually using.
-    #
-    # We must also handle the special case where the executable is not a
-    # dynamically linked executable. This can occur when using musl libc,
-    # for example. In this situation, dlopen() will error, leading to an
-    # OSError. Interestingly, at least in the case of musl, there is no
-    # errno set on the OSError. The single string argument used to construct
-    # OSError comes from libc itself and is therefore not portable to
-    # hard code here. In any case, failure to call dlopen() means we
-    # can proceed, so we bail on our attempt.
-    try:
-        # Note: typeshed is wrong here so we are ignoring this line.
-        process_namespace = ctypes.CDLL(None)  # type: ignore
-    except OSError:
-        return None
-
-    try:
-        gnu_get_libc_version = process_namespace.gnu_get_libc_version
-    except AttributeError:
-        # Symbol doesn't exist -> therefore, we are not linked to
-        # glibc.
-        return None
-
-    # Call gnu_get_libc_version, which returns a string like "2.5"
-    gnu_get_libc_version.restype = ctypes.c_char_p
-    version_str = gnu_get_libc_version()  # type: str
-    # py2 / py3 compatibility:
-    if not isinstance(version_str, str):
-        version_str = version_str.decode("ascii")
-
-    return version_str
-
-
-def _parse_glibc_version(version_str):
-    # type: (str) -> Tuple[int, int]
-    # Parse glibc version.
-    #
-    # We use a regexp instead of str.split because we want to discard any
-    # random junk that might come after the minor version -- this might happen
-    # in patched/forked versions of glibc (e.g. Linaro's version of glibc
-    # uses version strings like "2.20-2014.11"). See gh-3588.
-    m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str)
-    if not m:
-        warnings.warn(
-            "Expected glibc version with 2 components major.minor,"
-            " got: %s" % version_str,
-            RuntimeWarning,
-        )
-        return -1, -1
-    return (int(m.group("major")), int(m.group("minor")))
-
-
-_glibc_version = []  #  type: List[Tuple[int, int]]
-
-
-def _get_glibc_version():
-    # type: () -> Tuple[int, int]
-    if _glibc_version:
-        return _glibc_version[0]
-    version_str = _glibc_version_string()
-    if version_str is None:
-        _glibc_version.append((-1, -1))
-    else:
-        _glibc_version.append(_parse_glibc_version(version_str))
-    return _glibc_version[0]
-
-
-# Python does not provide platform information at sufficient granularity to
-# identify the architecture of the running executable in some cases, so we
-# determine it dynamically by reading the information from the running
-# process. This only applies on Linux, which uses the ELF format.
-class _ELFFileHeader(object):
-    # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
-    class _InvalidELFFileHeader(ValueError):
-        """
-        An invalid ELF file header was found.
-        """
-
-    ELF_MAGIC_NUMBER = 0x7F454C46
-    ELFCLASS32 = 1
-    ELFCLASS64 = 2
-    ELFDATA2LSB = 1
-    ELFDATA2MSB = 2
-    EM_386 = 3
-    EM_S390 = 22
-    EM_ARM = 40
-    EM_X86_64 = 62
-    EF_ARM_ABIMASK = 0xFF000000
-    EF_ARM_ABI_VER5 = 0x05000000
-    EF_ARM_ABI_FLOAT_HARD = 0x00000400
-
-    def __init__(self, file):
-        # type: (IO[bytes]) -> None
-        def unpack(fmt):
-            # type: (str) -> int
-            try:
-                (result,) = struct.unpack(
-                    fmt, file.read(struct.calcsize(fmt))
-                )  # type: (int, )
-            except struct.error:
-                raise _ELFFileHeader._InvalidELFFileHeader()
-            return result
-
-        self.e_ident_magic = unpack(">I")
-        if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
-            raise _ELFFileHeader._InvalidELFFileHeader()
-        self.e_ident_class = unpack("B")
-        if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
-            raise _ELFFileHeader._InvalidELFFileHeader()
-        self.e_ident_data = unpack("B")
-        if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
-            raise _ELFFileHeader._InvalidELFFileHeader()
-        self.e_ident_version = unpack("B")
-        self.e_ident_osabi = unpack("B")
-        self.e_ident_abiversion = unpack("B")
-        self.e_ident_pad = file.read(7)
-        format_h = "H"
-        format_i = "I"
-        format_q = "Q"
-        format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
-        self.e_type = unpack(format_h)
-        self.e_machine = unpack(format_h)
-        self.e_version = unpack(format_i)
-        self.e_entry = unpack(format_p)
-        self.e_phoff = unpack(format_p)
-        self.e_shoff = unpack(format_p)
-        self.e_flags = unpack(format_i)
-        self.e_ehsize = unpack(format_h)
-        self.e_phentsize = unpack(format_h)
-        self.e_phnum = unpack(format_h)
-        self.e_shentsize = unpack(format_h)
-        self.e_shnum = unpack(format_h)
-        self.e_shstrndx = unpack(format_h)
-
-
-def _get_elf_header():
-    # type: () -> Optional[_ELFFileHeader]
-    try:
-        with open(sys.executable, "rb") as f:
-            elf_header = _ELFFileHeader(f)
-    except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
-        return None
-    return elf_header
-
-
-def _is_linux_armhf():
-    # type: () -> bool
-    # hard-float ABI can be detected from the ELF header of the running
-    # process
-    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
-    elf_header = _get_elf_header()
-    if elf_header is None:
-        return False
-    result = elf_header.e_ident_class == elf_header.ELFCLASS32
-    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
-    result &= elf_header.e_machine == elf_header.EM_ARM
-    result &= (
-        elf_header.e_flags & elf_header.EF_ARM_ABIMASK
-    ) == elf_header.EF_ARM_ABI_VER5
-    result &= (
-        elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
-    ) == elf_header.EF_ARM_ABI_FLOAT_HARD
-    return result
-
-
-def _is_linux_i686():
-    # type: () -> bool
-    elf_header = _get_elf_header()
-    if elf_header is None:
-        return False
-    result = elf_header.e_ident_class == elf_header.ELFCLASS32
-    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
-    result &= elf_header.e_machine == elf_header.EM_386
-    return result
-
-
-def _have_compatible_manylinux_abi(arch):
-    # type: (str) -> bool
-    if arch == "armv7l":
-        return _is_linux_armhf()
-    if arch == "i686":
-        return _is_linux_i686()
-    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
-
-
-def _manylinux_tags(linux, arch):
-    # type: (str, str) -> Iterator[str]
-    # Oldest glibc to be supported regardless of architecture is (2, 17).
-    too_old_glibc2 = glibcVersion(2, 16)
-    if arch in {"x86_64", "i686"}:
-        # On x86/i686 also oldest glibc to be supported is (2, 5).
-        too_old_glibc2 = glibcVersion(2, 4)
-    current_glibc = glibcVersion(*_get_glibc_version())
-    glibc_max_list = [current_glibc]
-    # We can assume compatibility across glibc major versions.
-    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
-    #
-    # Build a list of maximum glibc versions so that we can
-    # output the canonical list of all glibc from current_glibc
-    # down to too_old_glibc2, including all intermediary versions.
-    for glibc_major in range(current_glibc.major - 1, 1, -1):
-        glibc_max_list.append(glibcVersion(glibc_major, _LAST_GLIBC_MINOR[glibc_major]))
-    for glibc_max in glibc_max_list:
-        if glibc_max.major == too_old_glibc2.major:
-            min_minor = too_old_glibc2.minor
-        else:
-            # For other glibc major versions oldest supported is (x, 0).
-            min_minor = -1
-        for glibc_minor in range(glibc_max.minor, min_minor, -1):
-            glibc_version = (glibc_max.major, glibc_minor)
-            tag = "manylinux_{}_{}".format(*glibc_version)
-            if _is_manylinux_compatible(tag, arch, glibc_version):
-                yield linux.replace("linux", tag)
-            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
-            if glibc_version in _LEGACY_MANYLINUX_MAP:
-                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
-                if _is_manylinux_compatible(legacy_tag, arch, glibc_version):
-                    yield linux.replace("linux", legacy_tag)
-
-
-def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
-    # type: (bool) -> Iterator[str]
-    linux = _normalize_string(distutils.util.get_platform())
-    if is_32bit:
-        if linux == "linux_x86_64":
-            linux = "linux_i686"
-        elif linux == "linux_aarch64":
-            linux = "linux_armv7l"
-    _, arch = linux.split("_", 1)
-    if _have_compatible_manylinux_abi(arch):
-        for tag in _manylinux_tags(linux, arch):
-            yield tag
-    yield linux
-
-
-def _generic_platforms():
-    # type: () -> Iterator[str]
-    yield _normalize_string(distutils.util.get_platform())
-
-
-def _platform_tags():
-    # type: () -> Iterator[str]
-    """
-    Provides the platform tags for this installation.
-    """
-    if platform.system() == "Darwin":
-        return mac_platforms()
-    elif platform.system() == "Linux":
-        return _linux_platforms()
-    else:
-        return _generic_platforms()
-
-
-def interpreter_name():
-    # type: () -> str
-    """
-    Returns the name of the running interpreter.
-    """
-    try:
-        name = sys.implementation.name  # type: ignore
-    except AttributeError:  # pragma: no cover
-        # Python 2.7 compatibility.
-        name = platform.python_implementation().lower()
-    return INTERPRETER_SHORT_NAMES.get(name) or name
-
-
-def interpreter_version(**kwargs):
-    # type: (bool) -> str
-    """
-    Returns the version of the running interpreter.
-    """
-    warn = _warn_keyword_parameter("interpreter_version", kwargs)
-    version = _get_config_var("py_version_nodot", warn=warn)
-    if version:
-        version = str(version)
-    else:
-        version = _version_nodot(sys.version_info[:2])
-    return version
-
-
-def _version_nodot(version):
-    # type: (PythonVersion) -> str
-    return "".join(map(str, version))
-
-
-def sys_tags(**kwargs):
-    # type: (bool) -> Iterator[Tag]
-    """
-    Returns the sequence of tag triples for the running interpreter.
-
-    The order of the sequence corresponds to priority order for the
-    interpreter, from most to least important.
-    """
-    warn = _warn_keyword_parameter("sys_tags", kwargs)
-
-    interp_name = interpreter_name()
-    if interp_name == "cp":
-        for tag in cpython_tags(warn=warn):
-            yield tag
-    else:
-        for tag in generic_tags():
-            yield tag
-
-    for tag in compatible_tags():
-        yield tag
-
-
-if __name__ == "__main__":
-    print(json.dumps([(t.interpreter, t.abi, t.platform) for t in sys_tags()]))
diff --git a/vendor/poetry/poetry/utils/password_manager.py b/vendor/poetry/poetry/utils/password_manager.py
deleted file mode 100644
index 24a615a4..00000000
--- a/vendor/poetry/poetry/utils/password_manager.py
+++ /dev/null
@@ -1,192 +0,0 @@
-import logging
-
-
-logger = logging.getLogger(__name__)
-
-
-class PasswordManagerError(Exception):
-
-    pass
-
-
-class KeyRingError(Exception):
-
-    pass
-
-
-class KeyRing:
-    def __init__(self, namespace):
-        self._namespace = namespace
-        self._is_available = True
-
-        self._check()
-
-    def is_available(self):
-        return self._is_available
-
-    def get_password(self, name, username):
-        if not self.is_available():
-            return
-
-        import keyring
-        import keyring.errors
-
-        name = self.get_entry_name(name)
-
-        try:
-            return keyring.get_password(name, username)
-        except (RuntimeError, keyring.errors.KeyringError):
-            raise KeyRingError(
-                "Unable to retrieve the password for {} from the key ring".format(name)
-            )
-
-    def set_password(self, name, username, password):
-        if not self.is_available():
-            return
-
-        import keyring
-        import keyring.errors
-
-        name = self.get_entry_name(name)
-
-        try:
-            keyring.set_password(name, username, password)
-        except (RuntimeError, keyring.errors.KeyringError) as e:
-            raise KeyRingError(
-                "Unable to store the password for {} in the key ring: {}".format(
-                    name, str(e)
-                )
-            )
-
-    def delete_password(self, name, username):
-        if not self.is_available():
-            return
-
-        import keyring
-        import keyring.errors
-
-        name = self.get_entry_name(name)
-
-        try:
-            keyring.delete_password(name, username)
-        except (RuntimeError, keyring.errors.KeyringError):
-            raise KeyRingError(
-                "Unable to delete the password for {} from the key ring".format(name)
-            )
-
-    def get_entry_name(self, name):
-        return "{}-{}".format(self._namespace, name)
-
-    def _check(self):
-        try:
-            import keyring
-        except Exception as e:
-            logger.debug("An error occurred while importing keyring: {}".format(str(e)))
-            self._is_available = False
-
-            return
-
-        backend = keyring.get_keyring()
-        name = backend.name.split(" ")[0]
-        if name == "fail":
-            logger.debug("No suitable keyring backend found")
-            self._is_available = False
-        elif "plaintext" in backend.name.lower():
-            logger.debug("Only a plaintext keyring backend is available. Not using it.")
-            self._is_available = False
-        elif name == "chainer":
-            try:
-                import keyring.backend
-
-                backends = keyring.backend.get_all_keyring()
-
-                self._is_available = any(
-                    [
-                        b.name.split(" ")[0] not in ["chainer", "fail"]
-                        and "plaintext" not in b.name.lower()
-                        for b in backends
-                    ]
-                )
-            except Exception:
-                self._is_available = False
-
-        if not self._is_available:
-            logger.warning("No suitable keyring backends were found")
-
-
-class PasswordManager:
-    def __init__(self, config):
-        self._config = config
-        self._keyring = None
-
-    @property
-    def keyring(self):
-        if self._keyring is None:
-            self._keyring = KeyRing("poetry-repository")
-            if not self._keyring.is_available():
-                logger.warning(
-                    "Using a plaintext file to store and retrieve credentials"
-                )
-
-        return self._keyring
-
-    def set_pypi_token(self, name, token):
-        if not self.keyring.is_available():
-            self._config.auth_config_source.add_property(
-                "pypi-token.{}".format(name), token
-            )
-        else:
-            self.keyring.set_password(name, "__token__", token)
-
-    def get_pypi_token(self, name):
-        if not self.keyring.is_available():
-            return self._config.get("pypi-token.{}".format(name))
-
-        return self.keyring.get_password(name, "__token__")
-
-    def delete_pypi_token(self, name):
-        if not self.keyring.is_available():
-            return self._config.auth_config_source.remove_property(
-                "pypi-token.{}".format(name)
-            )
-
-        self.keyring.delete_password(name, "__token__")
-
-    def get_http_auth(self, name):
-        auth = self._config.get("http-basic.{}".format(name))
-        if not auth:
-            username = self._config.get("http-basic.{}.username".format(name))
-            password = self._config.get("http-basic.{}.password".format(name))
-            if not username and not password:
-                return None
-        else:
-            username, password = auth["username"], auth.get("password")
-            if password is None:
-                password = self.keyring.get_password(name, username)
-
-        return {
-            "username": username,
-            "password": password,
-        }
-
-    def set_http_password(self, name, username, password):
-        auth = {"username": username}
-
-        if not self.keyring.is_available():
-            auth["password"] = password
-        else:
-            self.keyring.set_password(name, username, password)
-
-        self._config.auth_config_source.add_property("http-basic.{}".format(name), auth)
-
-    def delete_http_password(self, name):
-        auth = self.get_http_auth(name)
-        if not auth or "username" not in auth:
-            return
-
-        try:
-            self.keyring.delete_password(name, auth["username"])
-        except KeyRingError:
-            pass
-
-        self._config.auth_config_source.remove_property("http-basic.{}".format(name))
diff --git a/vendor/poetry/poetry/utils/patterns.py b/vendor/poetry/poetry/utils/patterns.py
deleted file mode 100644
index ec6c53d7..00000000
--- a/vendor/poetry/poetry/utils/patterns.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import re
-
-
-wheel_file_re = re.compile(
-    r"^(?P(?P.+?)-(?P\d.*?))"
-    r"(-(?P\d.*?))?"
-    r"-(?P.+?)"
-    r"-(?P.+?)"
-    r"-(?P.+?)"
-    r"\.whl|\.dist-info$",
-    re.VERBOSE,
-)
diff --git a/vendor/poetry/poetry/utils/setup_reader.py b/vendor/poetry/poetry/utils/setup_reader.py
deleted file mode 100644
index 6a45103c..00000000
--- a/vendor/poetry/poetry/utils/setup_reader.py
+++ /dev/null
@@ -1,381 +0,0 @@
-import ast
-
-from typing import Any
-from typing import Dict
-from typing import Iterable
-from typing import List
-from typing import Optional
-from typing import Tuple
-from typing import Union
-
-from poetry.core.semver import Version
-
-from ._compat import PY35
-from ._compat import Path
-from ._compat import basestring
-
-
-try:
-    from configparser import ConfigParser
-except ImportError:
-    from ConfigParser import ConfigParser
-
-
-class SetupReader(object):
-    """
-    Class that reads a setup.py file without executing it.
-    """
-
-    DEFAULT = {
-        "name": None,
-        "version": None,
-        "install_requires": [],
-        "extras_require": {},
-        "python_requires": None,
-    }
-
-    FILES = ["setup.py", "setup.cfg"]
-
-    @classmethod
-    def read_from_directory(
-        cls, directory
-    ):  # type: (Union[basestring, Path]) -> Dict[str, Union[List, Dict]]
-        if isinstance(directory, basestring):
-            directory = Path(directory)
-
-        result = cls.DEFAULT.copy()
-        for filename in cls.FILES:
-            filepath = directory / filename
-            if not filepath.exists():
-                continue
-
-            new_result = getattr(cls(), "read_{}".format(filename.replace(".", "_")))(
-                filepath
-            )
-
-            for key in result.keys():
-                if new_result[key]:
-                    result[key] = new_result[key]
-
-        return result
-
-    @classmethod
-    def _is_empty_result(cls, result):  # type: (Dict[str, Any]) -> bool
-        return (
-            not result["install_requires"]
-            and not result["extras_require"]
-            and not result["python_requires"]
-        )
-
-    def read_setup_py(
-        self, filepath
-    ):  # type: (Union[basestring, Path]) -> Dict[str, Union[List, Dict]]
-        if not PY35:
-            return self.DEFAULT
-
-        if isinstance(filepath, basestring):
-            filepath = Path(filepath)
-
-        with filepath.open(encoding="utf-8") as f:
-            content = f.read()
-
-        result = {}
-
-        body = ast.parse(content).body
-
-        setup_call, body = self._find_setup_call(body)
-        if not setup_call:
-            return self.DEFAULT
-
-        # Inspecting keyword arguments
-        result["name"] = self._find_single_string(setup_call, body, "name")
-        result["version"] = self._find_single_string(setup_call, body, "version")
-        result["install_requires"] = self._find_install_requires(setup_call, body)
-        result["extras_require"] = self._find_extras_require(setup_call, body)
-        result["python_requires"] = self._find_single_string(
-            setup_call, body, "python_requires"
-        )
-
-        return result
-
-    def read_setup_cfg(
-        self, filepath
-    ):  # type: (Union[basestring, Path]) -> Dict[str, Union[List, Dict]]
-        parser = ConfigParser()
-
-        parser.read(str(filepath))
-
-        name = None
-        version = None
-        if parser.has_option("metadata", "name"):
-            name = parser.get("metadata", "name")
-
-        if parser.has_option("metadata", "version"):
-            version = Version.parse(parser.get("metadata", "version")).text
-
-        install_requires = []
-        extras_require = {}
-        python_requires = None
-        if parser.has_section("options"):
-            if parser.has_option("options", "install_requires"):
-                for dep in parser.get("options", "install_requires").split("\n"):
-                    dep = dep.strip()
-                    if not dep:
-                        continue
-
-                    install_requires.append(dep)
-
-            if parser.has_option("options", "python_requires"):
-                python_requires = parser.get("options", "python_requires")
-
-        if parser.has_section("options.extras_require"):
-            for group in parser.options("options.extras_require"):
-                extras_require[group] = []
-                deps = parser.get("options.extras_require", group)
-                for dep in deps.split("\n"):
-                    dep = dep.strip()
-                    if not dep:
-                        continue
-
-                    extras_require[group].append(dep)
-
-        return {
-            "name": name,
-            "version": version,
-            "install_requires": install_requires,
-            "extras_require": extras_require,
-            "python_requires": python_requires,
-        }
-
-    def _find_setup_call(
-        self, elements
-    ):  # type: (List[Any]) -> Tuple[Optional[ast.Call], Optional[List[Any]]]
-        funcdefs = []
-        for i, element in enumerate(elements):
-            if isinstance(element, ast.If) and i == len(elements) - 1:
-                # Checking if the last element is an if statement
-                # and if it is 'if __name__ == "__main__"' which
-                # could contain the call to setup()
-                test = element.test
-                if not isinstance(test, ast.Compare):
-                    continue
-
-                left = test.left
-                if not isinstance(left, ast.Name):
-                    continue
-
-                if left.id != "__name__":
-                    continue
-
-                setup_call, body = self._find_sub_setup_call([element])
-                if not setup_call:
-                    continue
-
-                return setup_call, body + elements
-            if not isinstance(element, ast.Expr):
-                if isinstance(element, ast.FunctionDef):
-                    funcdefs.append(element)
-
-                continue
-
-            value = element.value
-            if not isinstance(value, ast.Call):
-                continue
-
-            func = value.func
-            if not (isinstance(func, ast.Name) and func.id == "setup") and not (
-                isinstance(func, ast.Attribute)
-                and hasattr(func.value, "id")
-                and func.value.id == "setuptools"
-                and func.attr == "setup"
-            ):
-                continue
-
-            return value, elements
-
-        # Nothing, we inspect the function definitions
-        return self._find_sub_setup_call(funcdefs)
-
-    def _find_sub_setup_call(
-        self, elements
-    ):  # type: (List[Any]) -> Tuple[Optional[ast.Call], Optional[List[Any]]]
-        for element in elements:
-            if not isinstance(element, (ast.FunctionDef, ast.If)):
-                continue
-
-            setup_call = self._find_setup_call(element.body)
-            if setup_call != (None, None):
-                setup_call, body = setup_call
-
-                body = elements + body
-
-                return setup_call, body
-
-        return None, None
-
-    def _find_install_requires(
-        self, call, body
-    ):  # type: (ast.Call, Iterable[Any]) -> List[str]
-        install_requires = []
-        value = self._find_in_call(call, "install_requires")
-        if value is None:
-            # Trying to find in kwargs
-            kwargs = self._find_call_kwargs(call)
-
-            if kwargs is None or not isinstance(kwargs, ast.Name):
-                return install_requires
-
-            variable = self._find_variable_in_body(body, kwargs.id)
-            if not isinstance(variable, (ast.Dict, ast.Call)):
-                return install_requires
-
-            if isinstance(variable, ast.Call):
-                if not isinstance(variable.func, ast.Name):
-                    return install_requires
-
-                if variable.func.id != "dict":
-                    return install_requires
-
-                value = self._find_in_call(variable, "install_requires")
-            else:
-                value = self._find_in_dict(variable, "install_requires")
-
-        if value is None:
-            return install_requires
-
-        if isinstance(value, ast.List):
-            for el in value.elts:
-                install_requires.append(el.s)
-        elif isinstance(value, ast.Name):
-            variable = self._find_variable_in_body(body, value.id)
-
-            if variable is not None and isinstance(variable, ast.List):
-                for el in variable.elts:
-                    install_requires.append(el.s)
-
-        return install_requires
-
-    def _find_extras_require(
-        self, call, body
-    ):  # type: (ast.Call, Iterable[Any]) -> Dict[str, List]
-        extras_require = {}
-        value = self._find_in_call(call, "extras_require")
-        if value is None:
-            # Trying to find in kwargs
-            kwargs = self._find_call_kwargs(call)
-
-            if kwargs is None or not isinstance(kwargs, ast.Name):
-                return extras_require
-
-            variable = self._find_variable_in_body(body, kwargs.id)
-            if not isinstance(variable, (ast.Dict, ast.Call)):
-                return extras_require
-
-            if isinstance(variable, ast.Call):
-                if not isinstance(variable.func, ast.Name):
-                    return extras_require
-
-                if variable.func.id != "dict":
-                    return extras_require
-
-                value = self._find_in_call(variable, "extras_require")
-            else:
-                value = self._find_in_dict(variable, "extras_require")
-
-        if value is None:
-            return extras_require
-
-        if isinstance(value, ast.Dict):
-            for key, val in zip(value.keys, value.values):
-                if isinstance(val, ast.Name):
-                    val = self._find_variable_in_body(body, val.id)
-
-                if isinstance(val, ast.List):
-                    extras_require[key.s] = [e.s for e in val.elts]
-        elif isinstance(value, ast.Name):
-            variable = self._find_variable_in_body(body, value.id)
-
-            if variable is None or not isinstance(variable, ast.Dict):
-                return extras_require
-
-            for key, val in zip(variable.keys, variable.values):
-                if isinstance(val, ast.Name):
-                    val = self._find_variable_in_body(body, val.id)
-
-                if isinstance(val, ast.List):
-                    extras_require[key.s] = [e.s for e in val.elts]
-
-        return extras_require
-
-    def _find_single_string(
-        self, call, body, name
-    ):  # type: (ast.Call, List[Any], str) -> Optional[str]
-        value = self._find_in_call(call, name)
-        if value is None:
-            # Trying to find in kwargs
-            kwargs = self._find_call_kwargs(call)
-
-            if kwargs is None or not isinstance(kwargs, ast.Name):
-                return
-
-            variable = self._find_variable_in_body(body, kwargs.id)
-            if not isinstance(variable, (ast.Dict, ast.Call)):
-                return
-
-            if isinstance(variable, ast.Call):
-                if not isinstance(variable.func, ast.Name):
-                    return
-
-                if variable.func.id != "dict":
-                    return
-
-                value = self._find_in_call(variable, name)
-            else:
-                value = self._find_in_dict(variable, name)
-
-        if value is None:
-            return
-
-        if isinstance(value, ast.Str):
-            return value.s
-        elif isinstance(value, ast.Name):
-            variable = self._find_variable_in_body(body, value.id)
-
-            if variable is not None and isinstance(variable, ast.Str):
-                return variable.s
-
-    def _find_in_call(self, call, name):  # type: (ast.Call, str) -> Optional[Any]
-        for keyword in call.keywords:
-            if keyword.arg == name:
-                return keyword.value
-
-    def _find_call_kwargs(self, call):  # type: (ast.Call) -> Optional[Any]
-        kwargs = None
-        for keyword in call.keywords:
-            if keyword.arg is None:
-                kwargs = keyword.value
-
-        return kwargs
-
-    def _find_variable_in_body(
-        self, body, name
-    ):  # type: (Iterable[Any], str) -> Optional[Any]
-        found = None
-        for elem in body:
-            if found:
-                break
-
-            if not isinstance(elem, ast.Assign):
-                continue
-
-            for target in elem.targets:
-                if not isinstance(target, ast.Name):
-                    continue
-
-                if target.id == name:
-                    return elem.value
-
-    def _find_in_dict(self, dict_, name):  # type: (ast.Call, str) -> Optional[Any]
-        for key, val in zip(dict_.keys, dict_.values):
-            if isinstance(key, ast.Str) and key.s == name:
-                return val
diff --git a/vendor/poetry/poetry/utils/shell.py b/vendor/poetry/poetry/utils/shell.py
deleted file mode 100644
index 2b2fe91f..00000000
--- a/vendor/poetry/poetry/utils/shell.py
+++ /dev/null
@@ -1,115 +0,0 @@
-import os
-import signal
-import sys
-
-import pexpect
-
-from clikit.utils.terminal import Terminal
-from shellingham import ShellDetectionFailure
-from shellingham import detect_shell
-
-from ._compat import WINDOWS
-from ._compat import Path
-from .env import VirtualEnv
-
-
-class Shell:
-    """
-    Represents the current shell.
-    """
-
-    _shell = None
-
-    def __init__(self, name, path):  # type: (str, str) -> None
-        self._name = name
-        self._path = path
-
-    @property
-    def name(self):  # type: () -> str
-        return self._name
-
-    @property
-    def path(self):  # type: () -> str
-        return self._path
-
-    @classmethod
-    def get(cls):  # type: () -> Shell
-        """
-        Retrieve the current shell.
-        """
-        if cls._shell is not None:
-            return cls._shell
-
-        try:
-            name, path = detect_shell(os.getpid())
-        except (RuntimeError, ShellDetectionFailure):
-            shell = None
-
-            if os.name == "posix":
-                shell = os.environ.get("SHELL")
-            elif os.name == "nt":
-                shell = os.environ.get("COMSPEC")
-
-            if not shell:
-                raise RuntimeError("Unable to detect the current shell.")
-
-            name, path = Path(shell).stem, shell
-
-        cls._shell = cls(name, path)
-
-        return cls._shell
-
-    def activate(self, env):  # type: (VirtualEnv) -> None
-        if WINDOWS:
-            return env.execute(self.path)
-
-        terminal = Terminal()
-        with env.temp_environ():
-            c = pexpect.spawn(
-                self._path, ["-i"], dimensions=(terminal.height, terminal.width)
-            )
-
-        if self._name == "zsh":
-            c.setecho(False)
-
-        activate_script = self._get_activate_script()
-        bin_dir = "Scripts" if WINDOWS else "bin"
-        activate_path = env.path / bin_dir / activate_script
-        c.sendline("{} {}".format(self._get_source_command(), activate_path))
-
-        def resize(sig, data):
-            terminal = Terminal()
-            c.setwinsize(terminal.height, terminal.width)
-
-        signal.signal(signal.SIGWINCH, resize)
-
-        # Interact with the new shell.
-        c.interact(escape_character=None)
-        c.close()
-
-        sys.exit(c.exitstatus)
-
-    def _get_activate_script(self):
-        if "fish" == self._name:
-            suffix = ".fish"
-        elif "csh" == self._name:
-            suffix = ".csh"
-        elif "tcsh" == self._name:
-            suffix = ".csh"
-        else:
-            suffix = ""
-
-        return "activate" + suffix
-
-    def _get_source_command(self):
-        if "fish" == self._name:
-            return "source"
-        elif "csh" == self._name:
-            return "source"
-        elif "tcsh" == self._name:
-            return "source"
-
-        return "."
-
-    def __repr__(self):  # type: () -> str
-        return '{}("{}", "{}")'.format(self.__class__.__name__, self._name, self._path)
diff --git a/vendor/poetry/poetry/version/version_selector.py b/vendor/poetry/poetry/version/version_selector.py
deleted file mode 100644
index ea002860..00000000
--- a/vendor/poetry/poetry/version/version_selector.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from typing import Union
-
-from poetry.core.packages import Package
-from poetry.core.semver import Version
-
-
-class VersionSelector(object):
-    def __init__(self, pool):
-        self._pool = pool
-
-    def find_best_candidate(
-        self,
-        package_name,  # type: str
-        target_package_version=None,  # type:  Union[str, None]
-        allow_prereleases=False,  # type: bool
-        source=None,  # type: str
-    ):  # type: (...) -> Union[Package, bool]
-        """
-        Given a package name and optional version,
-        returns the latest Package that matches
-        """
-        from poetry.factory import Factory
-
-        dependency = Factory.create_dependency(
-            package_name,
-            {
-                "version": target_package_version or "*",
-                "allow_prereleases": allow_prereleases,
-                "source": source,
-            },
-        )
-        candidates = self._pool.find_packages(dependency)
-        only_prereleases = all([c.version.is_prerelease() for c in candidates])
-
-        if not candidates:
-            return False
-
-        package = None
-        for candidate in candidates:
-            if (
-                candidate.is_prerelease()
-                and not dependency.allows_prereleases()
-                and not only_prereleases
-            ):
-                continue
-
-            # Select highest version of the two
-            if package is None or package.version < candidate.version:
-                package = candidate
-
-        if package is None:
-            return False
-        return package
-
-    def find_recommended_require_version(self, package):
-        version = package.version
-
-        return self._transform_version(version.text, package.pretty_version)
-
-    def _transform_version(self, version, pretty_version):
-        try:
-            parsed = Version.parse(version)
-            parts = [parsed.major, parsed.minor, parsed.patch]
-        except ValueError:
-            return pretty_version
-
-        parts = parts[: parsed.precision]
-
-        # check to see if we have a semver-looking version
-        if len(parts) < 3:
-            version = pretty_version
-        else:
-            version = ".".join(str(p) for p in parts)
-            if parsed.is_prerelease():
-                version += "-{}".format(".".join(str(p) for p in parsed.prerelease))
-
-        return "^{}".format(version)
diff --git a/vendor/poetry/pyproject.toml b/vendor/poetry/pyproject.toml
index 0ff512a9..4c894530 100644
--- a/vendor/poetry/pyproject.toml
+++ b/vendor/poetry/pyproject.toml
@@ -1,14 +1,31 @@
 [tool.poetry]
 name = "poetry"
-version = "1.1.15"
+version = "1.2.0"
 description = "Python dependency management and packaging made easy."
 authors = [
-    "Sébastien Eustace "
+    "Sébastien Eustace ",
+]
+maintainers = [
+    "Arun Babu Neelicattu ",
+    "Bjorn Neergaard ",
+    "Branch Vincent ",
+    "Bryce Drennan ",
+    "Daniel Eades ",
+    "Randy Döring ",
+    "Steph Samson ",
+    "finswimmer ",
 ]
 license = "MIT"
 
 readme = "README.md"
 
+packages = [
+    { include = "poetry", from = "src" }
+]
+include = [
+    { path = "tests", format = "sdist" }
+]
+
 homepage = "https://python-poetry.org/"
 repository = "https://github.com/python-poetry/poetry"
 documentation = "https://python-poetry.org/docs"
@@ -20,68 +37,61 @@ classifiers = [
     "Topic :: Software Development :: Libraries :: Python Modules"
 ]
 
+[tool.poetry.build]
+generate-setup-file = false
+
 # Requirements
 [tool.poetry.dependencies]
-python = "~2.7 || ^3.5"
+python = "^3.7"
 
-poetry-core = "~1.0.7"
-cleo = "^0.8.1"
-clikit = "^0.6.2"
-crashtest = { version = "^0.3.0", python = "^3.6" }
-requests = "^2.18"
+poetry-core = "1.1.0"
+poetry-plugin-export = "^1.0.6"
+cachecontrol = { version = "^0.12.9", extras = ["filecache"] }
 cachy = "^0.3.0"
-requests-toolbelt = "^0.9.1"
-cachecontrol = [
-    { version = "^0.12.4", extras = ["filecache"], python = "<3.6" },
-    { version = "^0.12.9", extras = ["filecache"], python = "^3.6" }
-]
-pkginfo = "^1.4"
+cleo = "^1.0.0a5"
+crashtest = "^0.3.0"
 html5lib = "^1.0"
-shellingham = "^1.1"
-tomlkit = ">=0.7.0,<1.0.0"
+importlib-metadata = { version = "^4.4", python = "<3.10" }
+jsonschema = "^4.10.0"
+# keyring uses calver, so version is unclamped
+keyring = "~22.3.0"
+# packaging uses calver, so version is unclamped
+packaging = ">=20.4"
 pexpect = "^4.7.0"
-packaging = "^20.4"
-virtualenv = { version = "^20.0.26" }
-
-# The typing module is not in the stdlib in Python 2.7
-typing = { version = "^3.6", python = "~2.7" }
-
-# Use pathlib2 for Python 2.7
-pathlib2 = { version = "^2.3", python = "~2.7" }
-# Use futures on Python 2.7
-futures = { version = "^3.3.0", python = "~2.7" }
-# Use glob2 for Python 2.7 and 3.4
-glob2 = { version = "^0.6", python = "~2.7" }
-# functools32 is needed for Python 2.7
-functools32 = { version = "^3.2.3", python = "~2.7" }
-keyring = [
-    { version = "^18.0.1", python = "~2.7" },
-    { version = "^20.0.1", python = "~3.5" },
-    { version = "~22.3.0", python = "^3.6" }
-]
-# Use subprocess32 for Python 2.7
-subprocess32 = { version = "^3.5", python = "~2.7" }
-importlib-metadata = {version = "^1.6.0", python = "<3.8"}
+pkginfo = "^1.5"
+platformdirs = "^2.5.2"
+requests = "^2.18"
+requests-toolbelt = "^0.9.1"
+shellingham = "^1.5"
+# exclude 0.11.2 and 0.11.3 due to https://github.com/sdispater/tomlkit/issues/225
+tomlkit = ">=0.11.1,<1.0.0,!=0.11.2,!=0.11.3"
+# exclude 20.4.5 - 20.4.6 due to https://github.com/pypa/pip/issues/9953
+virtualenv = "(>=20.4.3,<20.4.5 || >=20.4.7)"
+xattr = { version = "^0.9.7", markers = "sys_platform == 'darwin'" }
+urllib3 = "^1.26.0"
+dulwich = "^0.20.44"
 
 [tool.poetry.dev-dependencies]
-pytest = [
-    {version = "^4.1", python = "<3.5"},
-    {version = "^5.4.3", python = "~3.5"},
-    {version = "^6.2.5", python = ">=3.6"}
-]
-pytest-cov = "^2.5"
-pytest-mock = "^1.9"
-pre-commit = { version = "^2.6", python = "^3.6.1" }
-tox = "^3.0"
-pytest-sugar = "^0.9.2"
-httpretty = "^0.9.6"
-# We need to restrict the version of urllib3 to avoid
-# httpretty breaking. This is fixed in httpretty >= 1.0.3
-# but it's not compatible with Python 2.7 and 3.5.
-urllib3 = "~1.25.10"
+tox = "^3.18"
+pytest = "^7.1"
+pytest-cov = "^3.0"
+pytest-mock = "^3.5"
+pytest-randomly = "^3.10"
+pytest-sugar = "^0.9"
+pytest-xdist = { version = "^2.5", extras = ["psutil"] }
+pre-commit = "^2.6"
+deepdiff = "^5.0"
+httpretty = "^1.0"
+typing-extensions = { version = "^4.0.0", python = "<3.8" }
+zipp = { version = "^3.4", python = "<3.8" }
+flatdict = "^4.0.1"
+mypy = ">=0.971"
+types-html5lib = ">=1.1.9"
+types-jsonschema = ">=4.9.0"
+types-requests = ">=2.28.8"
 
 [tool.poetry.scripts]
-poetry = "poetry.console:main"
+poetry = "poetry.console.application:main"
 
 
 [build-system]
@@ -90,34 +100,72 @@ build-backend = "poetry.core.masonry.api"
 
 
 [tool.isort]
+py_version = 37
 profile = "black"
 force_single_line = true
-atomic = true
-include_trailing_comma = true
-lines_after_imports = 2
+combine_as_imports = true
 lines_between_types = 1
-use_parentheses = true
-src_paths = ["poetry", "tests"]
-skip_glob = ["*/setup.py"]
-filter_files = true
-known_first_party = "poetry"
+lines_after_imports = 2
+src_paths = ["src", "tests"]
+extend_skip = ["setup.py"]
+known_third_party = ["poetry.core"]
 
 
 [tool.black]
-line-length = 88
-include = '\.pyi?$'
-exclude = '''
-/(
-    \.eggs
-  | \.git
-  | \.hg
-  | \.mypy_cache
-  | \.tox
-  | \.venv
-  | _build
-  | buck-out
-  | build
-  | dist
-  | tests/.*/setup.py
-)/
+target-version = ['py37']
+preview = true
+force-exclude = '''
+.*/setup\.py$
 '''
+
+
+[tool.mypy]
+files = "src"
+mypy_path = "src"
+namespace_packages = true
+explicit_package_bases = true
+show_error_codes = true
+strict = true
+enable_error_code = [
+    "ignore-without-code",
+    "redundant-expr",
+    "truthy-bool",
+]
+
+# use of importlib-metadata backport at python3.7 makes it impossible to
+# satisfy mypy without some ignores: but we get a different set of ignores at
+# different python versions.
+#
+# , meanwhile suppress that
+# warning.
+[[tool.mypy.overrides]]
+module = [
+  'poetry.console.commands.self.show.plugins',
+  'poetry.installation.executor',
+  'poetry.mixology.version_solver',
+  'poetry.plugins.plugin_manager',
+  'poetry.repositories.installed_repository',
+  'poetry.utils.env',
+]
+warn_unused_ignores = false
+
+[[tool.mypy.overrides]]
+module = [
+  'cachecontrol.*',
+  'cachy.*',
+  'cleo.*',
+  'crashtest.*',
+  'pexpect.*',
+  'pkginfo.*',
+  'requests_toolbelt.*',
+  'shellingham.*',
+  'virtualenv.*',
+  'xattr.*',
+]
+ignore_missing_imports = true
+
+[tool.coverage.report]
+exclude_lines = [
+    "pragma: no cover",
+    "if TYPE_CHECKING:"
+]
diff --git a/vendor/poetry/sonnet b/vendor/poetry/sonnet
deleted file mode 100755
index efec3e5e..00000000
--- a/vendor/poetry/sonnet
+++ /dev/null
@@ -1,291 +0,0 @@
-#!/usr/bin/env python
-import hashlib
-import os
-import shutil
-import subprocess
-import sys
-import tarfile
-
-from gzip import GzipFile
-
-from cleo import Application
-from cleo import Command
-from clikit.api.formatter import Style
-
-
-WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
-VENDOR_ONLY = "VENDOR_ONLY" in os.environ
-VENDOR_REUSE = "VENDOR_REUSE" in os.environ
-
-
-class MakeReleaseCommand(Command):
-    """
-    Makes a self-contained package of Poetry.
-
-    release
-        {--P|python=?* : Python version to use}
-    """
-
-    PYTHON = {
-        "2.7": "python2.7",
-        "3.5": "python3.5",
-        "3.6": "python3.6",
-        "3.7": "python3.7",
-        "3.8": "python3.8",
-        "3.9": "python3.9",
-        "3.10": "python3.10",
-    }
-
-    def handle(self):
-        pythons = self.PYTHON
-        if self.option("python"):
-            pythons = {}
-            for python in self.option("python"):
-                parts = python.split(":", 1)
-                if len(parts) == 1:
-                    python = self.PYTHON[parts[0]]
-                    version = parts[0]
-                else:
-                    version, python = parts
-                pythons[version] = python
-
-        self.check_system(pythons)
-
-        from poetry.__version__ import __version__
-        from poetry.core.vcs import get_vcs
-        from poetry.factory import Factory
-        from poetry.puzzle import Solver
-        from poetry.repositories.pool import Pool
-        from poetry.repositories.repository import Repository
-        from poetry.utils._compat import Path
-        from poetry.utils.env import EnvManager
-        from poetry.utils.env import VirtualEnv
-        from poetry.utils.helpers import temporary_directory
-
-        project = Factory().create_poetry(Path.cwd())
-        package = project.package
-        del package.dev_requires[:]
-
-        # We only use the lock file to resolve the dependencies
-        pool = Pool()
-        pool.add_repository(project.locker.locked_repository(with_dev_reqs=True))
-
-        vcs = get_vcs(Path(__file__).parent)
-        poetry_vendor_relative = os.path.join("poetry", "_vendor")
-
-        if vcs:
-            # if vendor reuse is enabled do not exclude these files
-            vcs_excluded = [
-                str(f)
-                for f in vcs.get_ignored_files()
-                if not (VENDOR_REUSE and str(f).startswith(poetry_vendor_relative))
-            ]
-        else:
-            vcs_excluded = []
-
-        created_files = []
-        poetry_dir_src = os.path.join(os.path.dirname(__file__), "poetry")
-        poetry_vendor_src = os.path.join(poetry_dir_src, "_vendor")
-
-        with temporary_directory() as tmp_dir:
-            # Copy poetry to tmp dir
-            poetry_dir = os.path.join(tmp_dir, "poetry")
-            shutil.copytree(
-                poetry_dir_src,
-                poetry_dir,
-                ignore=lambda dir_, names: set(vcs_excluded).intersection(
-                    set([os.path.join(dir_, name) for name in names])
-                ),
-            )
-            created_files += [
-                p.relative_to(Path(tmp_dir))
-                for p in Path(poetry_dir).glob("**/*")
-                if p.is_file()
-                and p.suffix != ".pyc"
-                and str(p.relative_to(Path(tmp_dir))) not in vcs_excluded
-            ]
-            for version, python in sorted(pythons.items()):
-                self.line(
-                    "Preparing files for Python {}".format(
-                        version
-                    )
-                )
-
-                with temporary_directory() as tmp_venv_dir:
-                    venv_dir = Path(tmp_venv_dir) / ".venv"
-                    EnvManager.build_venv(venv_dir.as_posix(), executable=python)
-
-                    env = VirtualEnv(venv_dir, venv_dir)
-                    solver = Solver(package, pool, Repository(), Repository(), self.io)
-                    with solver.use_environment(env):
-                        ops = solver.solve()
-                        for op in ops:
-                            if not env.is_valid_for_marker(op.package.marker):
-                                op.skip("Not needed for the current environment")
-
-                    vendor_dir = Path(
-                        self.vendorize_for_python(
-                            env,
-                            [op.package for op in ops if not op.skipped],
-                            poetry_dir,
-                            version,
-                        )
-                    )
-                    created_files += [
-                        p.relative_to(Path(tmp_dir))
-                        for p in vendor_dir.glob("**/*")
-                        if p.is_file()
-                        and p.suffix != ".pyc"
-                        and str(p.relative_to(Path(tmp_dir))) not in vcs_excluded
-                    ]
-
-                self.line("")
-
-            if VENDOR_ONLY:
-                self.line("Preserving generated vendor directory")
-                shutil.copytree(
-                    os.path.join(poetry_dir, "_vendor"),
-                    poetry_vendor_src,
-                    dirs_exist_ok=True,
-                )
-                self.line("Skipping packaging")
-                return
-
-            self.line("Packaging files")
-            with temporary_directory() as tmp_dir2:
-                base_name = "poetry-{}-{}".format(__version__, sys.platform)
-                name = "{}.tar.gz".format(base_name)
-                gz = GzipFile(os.path.join(tmp_dir2, name), mode="wb")
-                try:
-                    with tarfile.TarFile(
-                        os.path.join(tmp_dir2, name),
-                        mode="w",
-                        fileobj=gz,
-                        format=tarfile.PAX_FORMAT,
-                    ) as tar:
-                        for root, dirs, files in os.walk(tmp_dir):
-                            for f in files:
-                                if f.endswith(".pyc"):
-                                    continue
-
-                                path = os.path.join(os.path.realpath(root), f)
-
-                                relpath = os.path.relpath(
-                                    path, os.path.realpath(tmp_dir)
-                                )
-
-                                if relpath in vcs_excluded:
-                                    continue
-
-                                tar_info = tar.gettarinfo(str(path), arcname=relpath)
-
-                                if tar_info.isreg():
-                                    with open(path, "rb") as f:
-                                        tar.addfile(tar_info, f)
-                                else:
-                                    tar.addfile(tar_info)  # Symlinks & ?
-                finally:
-                    gz.close()
-
-                self.line("Checking release file")
-                missing_files = []
-                with tarfile.open(os.path.join(tmp_dir2, name), "r") as tar:
-                    names = tar.getnames()
-
-                    for created_file in created_files:
-                        if created_file.as_posix() not in names:
-                            missing_files.append(created_file.as_posix())
-
-                if missing_files:
-                    self.line("Some files are missing:")
-                    for missing_file in missing_files:
-                        self.line("  - {}".format(missing_file))
-
-                    return 1
-
-                releases_dir = os.path.join(os.path.dirname(__file__), "releases")
-                if not os.path.exists(releases_dir):
-                    os.mkdir(releases_dir)
-
-                shutil.copyfile(
-                    os.path.join(tmp_dir2, name), os.path.join(releases_dir, name)
-                )
-
-                # Compute hash
-                sha = hashlib.sha256()
-                with open(os.path.join(releases_dir, name), "rb") as f:
-                    while True:
-                        buffer = f.read(8192)
-                        if not buffer:
-                            break
-
-                        sha.update(buffer)
-
-                with open(
-                    os.path.join(releases_dir, "{}.sha256sum".format(base_name)), "w"
-                ) as f:
-                    f.write(sha.hexdigest())
-
-                self.line("Built {}".format(name))
-
-    def check_system(self, pythons):
-        for version, python in sorted(pythons.items()):
-            try:
-                subprocess.check_output(
-                    [python, "-V"], stderr=subprocess.STDOUT, shell=WINDOWS
-                )
-                if version == "3.4" and WINDOWS:
-                    continue
-
-                subprocess.check_output([python, "-m", "pip", "install", "pip", "-U"])
-            except subprocess.CalledProcessError:
-                raise RuntimeError("Python {} is not available".format(version))
-
-    def vendorize_for_python(self, env, packages, dest, python_version):
-        vendor_dir = os.path.join(dest, "_vendor", "py{}".format(python_version))
-
-        bar = self.progress_bar(max=len(packages))
-        bar.set_format("%message% %current%/%max%")
-        bar.set_message(
-            "Vendorizing dependencies for Python {}".format(
-                python_version
-            )
-        )
-        bar.start()
-        for package in packages:
-            env.run_pip(
-                "install",
-                "{}=={}".format(package.name, package.version),
-                "--no-deps",
-                "--target",
-                vendor_dir,
-            )
-            bar.advance()
-
-        bar.finish()
-
-        self.line("")
-
-        return vendor_dir
-
-
-class MakeCommand(Command):
-    """
-    Build poetry releases.
-
-    make
-    """
-
-    commands = [MakeReleaseCommand()]
-
-    def handle(self):
-        return self.call("help", self.config.name)
-
-
-app = Application("sonnet")
-app.config.add_style(Style("debug").fg("default").dark())
-
-app.add(MakeCommand())
-
-if __name__ == "__main__":
-    app.run()
diff --git a/vendor/poetry/src/poetry/__main__.py b/vendor/poetry/src/poetry/__main__.py
new file mode 100644
index 00000000..5ae15783
--- /dev/null
+++ b/vendor/poetry/src/poetry/__main__.py
@@ -0,0 +1,9 @@
+from __future__ import annotations
+
+import sys
+
+
+if __name__ == "__main__":
+    from poetry.console.application import main
+
+    sys.exit(main())
diff --git a/vendor/poetry/src/poetry/__version__.py b/vendor/poetry/src/poetry/__version__.py
new file mode 100644
index 00000000..4ab11ba1
--- /dev/null
+++ b/vendor/poetry/src/poetry/__version__.py
@@ -0,0 +1 @@
+__version__ = "1.2.0rc2"
diff --git a/vendor/poetry/poetry/_vendor/.gitignore b/vendor/poetry/src/poetry/_vendor/.gitignore
similarity index 100%
rename from vendor/poetry/poetry/_vendor/.gitignore
rename to vendor/poetry/src/poetry/_vendor/.gitignore
diff --git a/vendor/poetry/src/poetry/config/__init__.py b/vendor/poetry/src/poetry/config/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/config/config.py b/vendor/poetry/src/poetry/config/config.py
new file mode 100644
index 00000000..7cbcba3f
--- /dev/null
+++ b/vendor/poetry/src/poetry/config/config.py
@@ -0,0 +1,308 @@
+from __future__ import annotations
+
+import dataclasses
+import logging
+import os
+import re
+
+from copy import deepcopy
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+from packaging.utils import canonicalize_name
+from poetry.core.toml import TOMLFile
+
+from poetry.config.dict_config_source import DictConfigSource
+from poetry.config.file_config_source import FileConfigSource
+from poetry.locations import CONFIG_DIR
+from poetry.locations import DEFAULT_CACHE_DIR
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+    from poetry.config.config_source import ConfigSource
+
+
+def boolean_validator(val: str) -> bool:
+    return val in {"true", "false", "1", "0"}
+
+
+def boolean_normalizer(val: str) -> bool:
+    return val in ["true", "1"]
+
+
+def int_normalizer(val: str) -> int:
+    return int(val)
+
+
+@dataclasses.dataclass
+class PackageFilterPolicy:
+    policy: dataclasses.InitVar[str | list[str] | None]
+    packages: list[str] = dataclasses.field(init=False)
+
+    def __post_init__(self, policy: str | list[str] | None) -> None:
+        if not policy:
+            policy = []
+        elif isinstance(policy, str):
+            policy = self.normalize(policy)
+        self.packages = policy
+
+    def allows(self, package_name: str) -> bool:
+        if ":all:" in self.packages:
+            return False
+
+        return (
+            not self.packages
+            or ":none:" in self.packages
+            or canonicalize_name(package_name) not in self.packages
+        )
+
+    @classmethod
+    def is_reserved(cls, name: str) -> bool:
+        return bool(re.match(r":(all|none):", name))
+
+    @classmethod
+    def normalize(cls, policy: str) -> list[str]:
+        if boolean_validator(policy):
+            if boolean_normalizer(policy):
+                return [":all:"]
+            else:
+                return [":none:"]
+
+        return list(
+            {
+                name.strip() if cls.is_reserved(name) else canonicalize_name(name)
+                for name in policy.strip().split(",")
+                if name
+            }
+        )
+
+    @classmethod
+    def validator(cls, policy: str) -> bool:
+        if boolean_validator(policy):
+            return True
+
+        names = policy.strip().split(",")
+
+        for name in names:
+            if (
+                not name
+                or (cls.is_reserved(name) and len(names) == 1)
+                or re.match(r"^[a-zA-Z\d_-]+$", name)
+            ):
+                continue
+            return False
+
+        return True
+
+
+logger = logging.getLogger(__name__)
+
+
+_default_config: Config | None = None
+
+
+class Config:
+    default_config: dict[str, Any] = {
+        "cache-dir": str(DEFAULT_CACHE_DIR),
+        "virtualenvs": {
+            "create": True,
+            "in-project": None,
+            "path": os.path.join("{cache-dir}", "virtualenvs"),
+            "options": {
+                "always-copy": False,
+                "system-site-packages": False,
+                # we default to False here in order to prevent development environment
+                # breakages for IDEs etc. as when working in these environments
+                # assumptions are often made about virtual environments having pip and
+                # setuptools.
+                "no-pip": False,
+                "no-setuptools": False,
+            },
+            "prefer-active-python": False,
+            "prompt": "{project_name}-py{python_version}",
+        },
+        "experimental": {"new-installer": True, "system-git-client": False},
+        "installer": {"parallel": True, "max-workers": None, "no-binary": None},
+    }
+
+    def __init__(
+        self, use_environment: bool = True, base_dir: Path | None = None
+    ) -> None:
+        self._config = deepcopy(self.default_config)
+        self._use_environment = use_environment
+        self._base_dir = base_dir
+        self._config_source: ConfigSource = DictConfigSource()
+        self._auth_config_source: ConfigSource = DictConfigSource()
+
+    @property
+    def config(self) -> dict[str, Any]:
+        return self._config
+
+    @property
+    def config_source(self) -> ConfigSource:
+        return self._config_source
+
+    @property
+    def auth_config_source(self) -> ConfigSource:
+        return self._auth_config_source
+
+    def set_config_source(self, config_source: ConfigSource) -> Config:
+        self._config_source = config_source
+
+        return self
+
+    def set_auth_config_source(self, config_source: ConfigSource) -> Config:
+        self._auth_config_source = config_source
+
+        return self
+
+    def merge(self, config: dict[str, Any]) -> None:
+        from poetry.utils.helpers import merge_dicts
+
+        merge_dicts(self._config, config)
+
+    def all(self) -> dict[str, Any]:
+        def _all(config: dict[str, Any], parent_key: str = "") -> dict[str, Any]:
+            all_ = {}
+
+            for key in config:
+                value = self.get(parent_key + key)
+                if isinstance(value, dict):
+                    if parent_key != "":
+                        current_parent = parent_key + key + "."
+                    else:
+                        current_parent = key + "."
+                    all_[key] = _all(config[key], parent_key=current_parent)
+                    continue
+
+                all_[key] = value
+
+            return all_
+
+        return _all(self.config)
+
+    def raw(self) -> dict[str, Any]:
+        return self._config
+
+    @staticmethod
+    def _get_environment_repositories() -> dict[str, dict[str, str]]:
+        repositories = {}
+        pattern = re.compile(r"POETRY_REPOSITORIES_(?P[A-Z_]+)_URL")
+
+        for env_key in os.environ.keys():
+            match = pattern.match(env_key)
+            if match:
+                repositories[match.group("name").lower().replace("_", "-")] = {
+                    "url": os.environ[env_key]
+                }
+
+        return repositories
+
+    @property
+    def repository_cache_directory(self) -> Path:
+        return Path(self.get("cache-dir")) / "cache" / "repositories"
+
+    @property
+    def virtualenvs_path(self) -> Path:
+        path = self.get("virtualenvs.path")
+        if path is None:
+            path = Path(self.get("cache-dir")) / "virtualenvs"
+        return Path(path).expanduser()
+
+    def get(self, setting_name: str, default: Any = None) -> Any:
+        """
+        Retrieve a setting value.
+        """
+        keys = setting_name.split(".")
+
+        # Looking in the environment if the setting
+        # is set via a POETRY_* environment variable
+        if self._use_environment:
+            if setting_name == "repositories":
+                # repositories setting is special for now
+                repositories = self._get_environment_repositories()
+                if repositories:
+                    return repositories
+
+            env = "POETRY_" + "_".join(k.upper().replace("-", "_") for k in keys)
+            env_value = os.getenv(env)
+            if env_value is not None:
+                return self.process(self._get_normalizer(setting_name)(env_value))
+
+        value = self._config
+        for key in keys:
+            if key not in value:
+                return self.process(default)
+
+            value = value[key]
+
+        return self.process(value)
+
+    def process(self, value: Any) -> Any:
+        if not isinstance(value, str):
+            return value
+
+        def resolve_from_config(match: re.Match[str]) -> Any:
+            key = match.group(1)
+            config_value = self.get(key)
+            if config_value:
+                return config_value
+
+            # The key doesn't exist in the config but might be resolved later,
+            # so we keep it as a format variable.
+            return f"{{{key}}}"
+
+        return re.sub(r"{(.+?)}", resolve_from_config, value)
+
+    @staticmethod
+    def _get_normalizer(name: str) -> Callable[[str], Any]:
+        if name in {
+            "virtualenvs.create",
+            "virtualenvs.in-project",
+            "virtualenvs.options.always-copy",
+            "virtualenvs.options.system-site-packages",
+            "virtualenvs.options.prefer-active-python",
+            "experimental.new-installer",
+            "experimental.system-git-client",
+            "installer.parallel",
+        }:
+            return boolean_normalizer
+
+        if name == "virtualenvs.path":
+            return lambda val: str(Path(val))
+
+        if name == "installer.max-workers":
+            return int_normalizer
+
+        if name == "installer.no-binary":
+            return PackageFilterPolicy.normalize
+
+        return lambda val: val
+
+    @classmethod
+    def create(cls, reload: bool = False) -> Config:
+        global _default_config
+
+        if _default_config is None or reload:
+            _default_config = cls()
+
+            # Load global config
+            config_file = TOMLFile(CONFIG_DIR / "config.toml")
+            if config_file.exists():
+                logger.debug("Loading configuration file %s", config_file.path)
+                _default_config.merge(config_file.read())
+
+            _default_config.set_config_source(FileConfigSource(config_file))
+
+            # Load global auth config
+            auth_config_file = TOMLFile(CONFIG_DIR / "auth.toml")
+            if auth_config_file.exists():
+                logger.debug("Loading configuration file %s", auth_config_file.path)
+                _default_config.merge(auth_config_file.read())
+
+            _default_config.set_auth_config_source(FileConfigSource(auth_config_file))
+
+        return _default_config
diff --git a/vendor/poetry/src/poetry/config/config_source.py b/vendor/poetry/src/poetry/config/config_source.py
new file mode 100644
index 00000000..ed97fa91
--- /dev/null
+++ b/vendor/poetry/src/poetry/config/config_source.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from typing import Any
+
+
+class ConfigSource:
+    def add_property(self, key: str, value: Any) -> None:
+        raise NotImplementedError()
+
+    def remove_property(self, key: str) -> None:
+        raise NotImplementedError()
diff --git a/vendor/poetry/src/poetry/config/dict_config_source.py b/vendor/poetry/src/poetry/config/dict_config_source.py
new file mode 100644
index 00000000..942d76ea
--- /dev/null
+++ b/vendor/poetry/src/poetry/config/dict_config_source.py
@@ -0,0 +1,43 @@
+from __future__ import annotations
+
+from typing import Any
+
+from poetry.config.config_source import ConfigSource
+
+
+class DictConfigSource(ConfigSource):
+    def __init__(self) -> None:
+        self._config: dict[str, Any] = {}
+
+    @property
+    def config(self) -> dict[str, Any]:
+        return self._config
+
+    def add_property(self, key: str, value: Any) -> None:
+        keys = key.split(".")
+        config = self._config
+
+        for i, key in enumerate(keys):
+            if key not in config and i < len(keys) - 1:
+                config[key] = {}
+
+            if i == len(keys) - 1:
+                config[key] = value
+                break
+
+            config = config[key]
+
+    def remove_property(self, key: str) -> None:
+        keys = key.split(".")
+
+        config = self._config
+        for i, key in enumerate(keys):
+            if key not in config:
+                return
+
+            if i == len(keys) - 1:
+                del config[key]
+
+                break
+
+            config = config[key]
diff --git a/vendor/poetry/src/poetry/config/file_config_source.py b/vendor/poetry/src/poetry/config/file_config_source.py
new file mode 100644
index 00000000..7119fa19
--- /dev/null
+++ b/vendor/poetry/src/poetry/config/file_config_source.py
@@ -0,0 +1,90 @@
+from __future__ import annotations
+
+from contextlib import contextmanager
+from typing import TYPE_CHECKING
+from typing import Any
+
+from tomlkit import document
+from tomlkit import table
+
+from poetry.config.config_source import ConfigSource
+
+
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from poetry.core.toml.file import TOMLFile
+    from tomlkit.toml_document import TOMLDocument
+
+
+class FileConfigSource(ConfigSource):
+    def __init__(self, file: TOMLFile, auth_config: bool = False) -> None:
+        self._file = file
+        self._auth_config = auth_config
+
+    @property
+    def name(self) -> str:
+        return str(self._file.path)
+
+    @property
+    def file(self) -> TOMLFile:
+        return self._file
+
+    def add_property(self, key: str, value: Any) -> None:
+        with self.secure() as toml:
+            config: dict[str, Any] = toml
+            keys = key.split(".")
+
+            for i, key in enumerate(keys):
+                if key not in config and i < len(keys) - 1:
+                    config[key] = table()
+
+                if i == len(keys) - 1:
+                    config[key] = value
+                    break
+
+                config = config[key]
+
+    def remove_property(self, key: str) -> None:
+        with self.secure() as toml:
+            config: dict[str, Any] = toml
+            keys = key.split(".")
+
+            current_config = config
+            for i, key in enumerate(keys):
+                if key not in current_config:
+                    return
+
+                if i == len(keys) - 1:
+                    del current_config[key]
+
+                    break
+
+                current_config = current_config[key]
+
+    @contextmanager
+    def secure(self) -> Iterator[TOMLDocument]:
+        if self.file.exists():
+            initial_config = self.file.read()
+            config = self.file.read()
+        else:
+            initial_config = document()
+            config = document()
+
+        new_file = not self.file.exists()
+
+        yield config
+
+        try:
+            # Ensuring the file is only readable and writable
+            # by the current user
+            mode = 0o600
+
+            if new_file:
+                self.file.touch(mode=mode)
+
+            self.file.write(config)
+        except Exception:
+            self.file.write(initial_config)
+
+            raise
diff --git a/vendor/poetry/src/poetry/config/source.py b/vendor/poetry/src/poetry/config/source.py
new file mode 100644
index 00000000..f3af0c58
--- /dev/null
+++ b/vendor/poetry/src/poetry/config/source.py
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+import dataclasses
+
+
+@dataclasses.dataclass(order=True, eq=True)
+class Source:
+    name: str
+    url: str
+    default: bool = dataclasses.field(default=False)
+    secondary: bool = dataclasses.field(default=False)
+
+    def to_dict(self) -> dict[str, str | bool]:
+        return dataclasses.asdict(self)
diff --git a/vendor/poetry/src/poetry/console/__init__.py b/vendor/poetry/src/poetry/console/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/application.py b/vendor/poetry/src/poetry/console/application.py
new file mode 100644
index 00000000..151043d5
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/application.py
@@ -0,0 +1,392 @@
+from __future__ import annotations
+
+import logging
+import re
+
+from contextlib import suppress
+from importlib import import_module
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import cast
+
+from cleo.application import Application as BaseApplication
+from cleo.events.console_events import COMMAND
+from cleo.events.event_dispatcher import EventDispatcher
+from cleo.exceptions import CleoException
+from cleo.formatters.style import Style
+from cleo.io.null_io import NullIO
+
+from poetry.__version__ import __version__
+from poetry.console.command_loader import CommandLoader
+from poetry.console.commands.command import Command
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+    from cleo.events.console_command_event import ConsoleCommandEvent
+    from cleo.io.inputs.argv_input import ArgvInput
+    from cleo.io.inputs.definition import Definition
+    from cleo.io.inputs.input import Input
+    from cleo.io.io import IO
+    from cleo.io.outputs.output import Output
+    from crashtest.solution_providers.solution_provider_repository import (
+        SolutionProviderRepository,
+    )
+
+    from poetry.console.commands.installer_command import InstallerCommand
+    from poetry.poetry import Poetry
+
+
+def load_command(name: str) -> Callable[[], Command]:
+    def _load() -> Command:
+        words = name.split(" ")
+        module = import_module("poetry.console.commands." + ".".join(words))
+        command_class = getattr(module, "".join(c.title() for c in words) + "Command")
+        command: Command = command_class()
+        return command
+
+    return _load
+
+
+COMMANDS = [
+    "about",
+    "add",
+    "build",
+    "check",
+    "config",
+    "init",
+    "install",
+    "lock",
+    "new",
+    "publish",
+    "remove",
+    "run",
+    "search",
+    "shell",
+    "show",
+    "update",
+    "version",
+    # Cache commands
+    "cache clear",
+    "cache list",
+    # Debug commands
+    "debug info",
+    "debug resolve",
+    # Env commands
+    "env info",
+    "env list",
+    "env remove",
+    "env use",
+    # Self commands
+    # "self add",
+    # "self install",
+    # "self lock",
+    # "self remove",
+    # "self update",
+    # "self show",
+    # "self show plugins",
+    # Source commands
+    "source add",
+    "source remove",
+    "source show",
+]
+
+
+class Application(BaseApplication):  # type: ignore[misc]
+    def __init__(self) -> None:
+        super().__init__("poetry", __version__)
+
+        self._poetry: Poetry | None = None
+        self._io: IO | None = None
+        self._disable_plugins = False
+        self._disable_cache = False
+        self._plugins_loaded = False
+
+        dispatcher = EventDispatcher()
+        dispatcher.add_listener(COMMAND, self.register_command_loggers)
+        dispatcher.add_listener(COMMAND, self.configure_env)
+        dispatcher.add_listener(COMMAND, self.configure_installer_for_event)
+        self.set_event_dispatcher(dispatcher)
+
+        command_loader = CommandLoader({name: load_command(name) for name in COMMANDS})
+        self.set_command_loader(command_loader)
+
+    @property
+    def poetry(self) -> Poetry:
+        from pathlib import Path
+
+        from poetry.factory import Factory
+
+        if self._poetry is not None:
+            return self._poetry
+
+        self._poetry = Factory().create_poetry(
+            Path.cwd(),
+            io=self._io,
+            disable_plugins=self._disable_plugins,
+            disable_cache=self._disable_cache,
+        )
+
+        return self._poetry
+
+    @property
+    def command_loader(self) -> CommandLoader:
+        command_loader: CommandLoader | None = self._command_loader
+        assert command_loader is not None
+        return command_loader
+
+    def reset_poetry(self) -> None:
+        self._poetry = None
+
+    def create_io(
+        self,
+        input: Input | None = None,
+        output: Output | None = None,
+        error_output: Output | None = None,
+    ) -> IO:
+        io = super().create_io(input, output, error_output)
+
+        # Set our own CLI styles
+        formatter = io.output.formatter
+        formatter.set_style("c1", Style("cyan"))
+        formatter.set_style("c2", Style("default", options=["bold"]))
+        formatter.set_style("info", Style("blue"))
+        formatter.set_style("comment", Style("green"))
+        formatter.set_style("warning", Style("yellow"))
+        formatter.set_style("debug", Style("default", options=["dark"]))
+        formatter.set_style("success", Style("green"))
+
+        # Dark variants
+        formatter.set_style("c1_dark", Style("cyan", options=["dark"]))
+        formatter.set_style("c2_dark", Style("default", options=["bold", "dark"]))
+        formatter.set_style("success_dark", Style("green", options=["dark"]))
+
+        io.output.set_formatter(formatter)
+        io.error_output.set_formatter(formatter)
+
+        self._io = io
+
+        return io
+
+    def render_error(self, error: Exception, io: IO) -> None:
+        # We set the solution provider repository here to load providers
+        # only when an error occurs
+        self.set_solution_provider_repository(self._get_solution_provider_repository())
+
+        super().render_error(error, io)
+
+    def _run(self, io: IO) -> int:
+        self._disable_plugins = io.input.parameter_option("--no-plugins")
+        self._disable_cache = io.input.has_parameter_option("--no-cache")
+
+        self._load_plugins(io)
+
+        exit_code: int = super()._run(io)
+        return exit_code
+
+    def _configure_io(self, io: IO) -> None:
+        # We need to check if the command being run
+        # is the "run" command.
+        definition = self.definition
+        with suppress(CleoException):
+            io.input.bind(definition)
+
+        name = io.input.first_argument
+        if name == "run":
+            from poetry.console.io.inputs.run_argv_input import RunArgvInput
+
+            input = cast("ArgvInput", io.input)
+            run_input = RunArgvInput([self._name or ""] + input._tokens)
+            # For the run command reset the definition
+            # with only the set options (i.e. the options given before the command)
+            for option_name, value in input.options.items():
+                if value:
+                    option = definition.option(option_name)
+                    run_input.add_parameter_option("--" + option.name)
+                    if option.shortcut:
+                        shortcuts = re.split(r"\|-?", option.shortcut.lstrip("-"))
+                        shortcuts = [s for s in shortcuts if s]
+                        for shortcut in shortcuts:
+                            run_input.add_parameter_option("-" + shortcut.lstrip("-"))
+
+            with suppress(CleoException):
+                run_input.bind(definition)
+
+            for option_name, value in input.options.items():
+                if value:
+                    run_input.set_option(option_name, value)
+
+            io.set_input(run_input)
+
+        super()._configure_io(io)
+
+    def register_command_loggers(
+        self, event: ConsoleCommandEvent, event_name: str, _: Any
+    ) -> None:
+        from poetry.console.logging.filters import POETRY_FILTER
+        from poetry.console.logging.io_formatter import IOFormatter
+        from poetry.console.logging.io_handler import IOHandler
+
+        command = event.command
+        if not isinstance(command, Command):
+            return
+
+        io = event.io
+
+        loggers = [
+            "poetry.packages.locker",
+            "poetry.packages.package",
+            "poetry.utils.password_manager",
+        ]
+
+        loggers += command.loggers
+
+        handler = IOHandler(io)
+        handler.setFormatter(IOFormatter())
+
+        level = logging.WARNING
+
+        if io.is_debug():
+            level = logging.DEBUG
+        elif io.is_very_verbose() or io.is_verbose():
+            level = logging.INFO
+
+        logging.basicConfig(level=level, handlers=[handler])
+
+        # only log third-party packages when very verbose
+        if not io.is_very_verbose():
+            handler.addFilter(POETRY_FILTER)
+
+        for name in loggers:
+            logger = logging.getLogger(name)
+
+            _level = level
+            # The builders loggers are special and we can actually
+            # start at the INFO level.
+            if (
+                logger.name.startswith("poetry.core.masonry.builders")
+                and _level > logging.INFO
+            ):
+                _level = logging.INFO
+
+            logger.setLevel(_level)
+
+    def configure_env(
+        self, event: ConsoleCommandEvent, event_name: str, _: Any
+    ) -> None:
+        from poetry.console.commands.env_command import EnvCommand
+        from poetry.console.commands.self.self_command import SelfCommand
+
+        command = event.command
+        if not isinstance(command, EnvCommand) or isinstance(command, SelfCommand):
+            return
+
+        if command._env is not None:
+            return
+
+        from poetry.utils.env import EnvManager
+
+        io = event.io
+        poetry = command.poetry
+
+        env_manager = EnvManager(poetry)
+        env = env_manager.create_venv(io)
+
+        if env.is_venv() and io.is_verbose():
+            io.write_line(f"Using virtualenv: {env.path}")
+
+        command.set_env(env)
+
+    @classmethod
+    def configure_installer_for_event(
+        cls, event: ConsoleCommandEvent, event_name: str, _: Any
+    ) -> None:
+        from poetry.console.commands.installer_command import InstallerCommand
+
+        command = event.command
+        if not isinstance(command, InstallerCommand):
+            return
+
+        # If the command already has an installer
+        # we skip this step
+        if command._installer is not None:
+            return
+
+        cls.configure_installer_for_command(command, event.io)
+
+    @staticmethod
+    def configure_installer_for_command(command: InstallerCommand, io: IO) -> None:
+        from poetry.installation.installer import Installer
+
+        poetry = command.poetry
+        installer = Installer(
+            io,
+            command.env,
+            poetry.package,
+            poetry.locker,
+            poetry.pool,
+            poetry.config,
+        )
+        installer.use_executor(poetry.config.get("experimental.new-installer", False))
+        command.set_installer(installer)
+
+    def _load_plugins(self, io: IO | None = None) -> None:
+        if self._plugins_loaded:
+            return
+
+        if io is None:
+            io = NullIO()
+
+        self._disable_plugins = io.input.has_parameter_option("--no-plugins")
+
+        if not self._disable_plugins:
+            from poetry.plugins.application_plugin import ApplicationPlugin
+            from poetry.plugins.plugin_manager import PluginManager
+
+            manager = PluginManager(ApplicationPlugin.group)
+            manager.load_plugins()
+            manager.activate(self)
+
+        self._plugins_loaded = True
+
+    @property
+    def _default_definition(self) -> Definition:
+        from cleo.io.inputs.option import Option
+
+        definition = super()._default_definition
+
+        definition.add_option(
+            Option("--no-plugins", flag=True, description="Disables plugins.")
+        )
+
+        definition.add_option(
+            Option(
+                "--no-cache", flag=True, description="Disables Poetry source caches."
+            )
+        )
+
+        return definition
+
+    def _get_solution_provider_repository(self) -> SolutionProviderRepository:
+        from crashtest.solution_providers.solution_provider_repository import (
+            SolutionProviderRepository,
+        )
+
+        from poetry.mixology.solutions.providers.python_requirement_solution_provider import (  # noqa: E501
+            PythonRequirementSolutionProvider,
+        )
+
+        repository = SolutionProviderRepository()
+        repository.register_solution_providers([PythonRequirementSolutionProvider])
+
+        return repository
+
+
+def main() -> int:
+    exit_code: int = Application().run()
+    return exit_code
+
+
+if __name__ == "__main__":
+    main()
diff --git a/vendor/poetry/src/poetry/console/command_loader.py b/vendor/poetry/src/poetry/console/command_loader.py
new file mode 100644
index 00000000..40f6b7f3
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/command_loader.py
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from cleo.exceptions import LogicException
+from cleo.loaders.factory_command_loader import FactoryCommandLoader
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+    from poetry.console.commands.command import Command
+
+
+class CommandLoader(FactoryCommandLoader):  # type: ignore[misc]
+    def register_factory(
+        self, command_name: str, factory: Callable[[], Command]
+    ) -> None:
+        if command_name in self._factories:
+            raise LogicException(f'The command "{command_name}" already exists.')
+
+        self._factories[command_name] = factory
diff --git a/vendor/poetry/src/poetry/console/commands/__init__.py b/vendor/poetry/src/poetry/console/commands/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/commands/about.py b/vendor/poetry/src/poetry/console/commands/about.py
new file mode 100644
index 00000000..cf2548d6
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/about.py
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.console.commands.command import Command
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+
+class AboutCommand(Command):
+    name = "about"
+
+    description = "Shows information about Poetry."
+
+    def handle(self) -> int:
+        from poetry.utils._compat import metadata
+
+        # The metadata.version that we import for Python 3.7 is untyped, work around
+        # that.
+        version: Callable[[str], str] = metadata.version
+
+        self.line(
+            f"""\
+Poetry - Package Management for Python
+
+Version: {version('poetry')}
+Poetry-Core Version: {version('poetry-core')}
+
+Poetry is a dependency manager tracking local dependencies of your projects\
+ and libraries.
+See https://github.com/python-poetry/poetry for more information.\
+"""
+        )
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/add.py b/vendor/poetry/src/poetry/console/commands/add.py
new file mode 100644
index 00000000..0df11618
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/add.py
@@ -0,0 +1,286 @@
+from __future__ import annotations
+
+import contextlib
+
+from typing import Any
+
+from cleo.helpers import argument
+from cleo.helpers import option
+from packaging.utils import canonicalize_name
+from poetry.core.packages.dependency_group import MAIN_GROUP
+from tomlkit.toml_document import TOMLDocument
+
+from poetry.console.commands.init import InitCommand
+from poetry.console.commands.installer_command import InstallerCommand
+
+
+class AddCommand(InstallerCommand, InitCommand):
+    name = "add"
+    description = "Adds a new dependency to pyproject.toml."
+
+    arguments = [argument("name", "The packages to add.", multiple=True)]
+    options = [
+        option(
+            "group",
+            "-G",
+            "The group to add the dependency to.",
+            flag=False,
+            default=MAIN_GROUP,
+        ),
+        option("dev", "D", "Add as a development dependency."),
+        option("editable", "e", "Add vcs/path dependencies as editable."),
+        option(
+            "extras",
+            "E",
+            "Extras to activate for the dependency.",
+            flag=False,
+            multiple=True,
+        ),
+        option("optional", None, "Add as an optional dependency."),
+        option(
+            "python",
+            None,
+            "Python version for which the dependency must be installed.",
+            flag=False,
+        ),
+        option(
+            "platform",
+            None,
+            "Platforms for which the dependency must be installed.",
+            flag=False,
+        ),
+        option(
+            "source",
+            None,
+            "Name of the source to use to install the package.",
+            flag=False,
+        ),
+        option("allow-prereleases", None, "Accept prereleases."),
+        option(
+            "dry-run",
+            None,
+            "Output the operations but do not execute anything (implicitly enables"
+            " --verbose).",
+        ),
+        option("lock", None, "Do not perform operations (only update the lockfile)."),
+    ]
+    examples = """\
+If you do not specify a version constraint, poetry will choose a suitable one based on\
+ the available package versions.
+
+You can specify a package in the following forms:
+  - A single name (requests)
+  - A name and a constraint (requests@^2.23.0)
+  - A git url (git+https://github.com/python-poetry/poetry.git)
+  - A git url with a revision\
+ (git+https://github.com/python-poetry/poetry.git#develop)
+  - A git SSH url (git+ssh://github.com/python-poetry/poetry.git)
+  - A git SSH url with a revision\
+ (git+ssh://github.com/python-poetry/poetry.git#develop)
+  - A file path (../my-package/my-package.whl)
+  - A directory (../my-package/)
+  - A url (https://example.com/packages/my-package-0.1.0.tar.gz)
+"""
+    help = f"""\
+The add command adds required packages to your pyproject.toml and installs\
+ them.
+
+{examples}
+"""
+
+    loggers = ["poetry.repositories.pypi_repository", "poetry.inspection.info"]
+
+    def handle(self) -> int:
+        from poetry.core.semver.helpers import parse_constraint
+        from tomlkit import inline_table
+        from tomlkit import parse as parse_toml
+        from tomlkit import table
+
+        from poetry.factory import Factory
+
+        packages = self.argument("name")
+        if self.option("dev"):
+            self.line_error(
+                "The --dev option is deprecated, "
+                "use the `--group dev` notation instead."
+            )
+            group = "dev"
+        else:
+            group = self.option("group", self.default_group or MAIN_GROUP)
+
+        if self.option("extras") and len(packages) > 1:
+            raise ValueError(
+                "You can only specify one package when using the --extras option"
+            )
+
+        # tomlkit types are awkward to work with, treat content as a mostly untyped
+        # dictionary.
+        content: dict[str, Any] = self.poetry.file.read()
+        poetry_content = content["tool"]["poetry"]
+
+        if group == MAIN_GROUP:
+            if "dependencies" not in poetry_content:
+                poetry_content["dependencies"] = table()
+
+            section = poetry_content["dependencies"]
+        else:
+            if "group" not in poetry_content:
+                poetry_content["group"] = table(is_super_table=True)
+
+            groups = poetry_content["group"]
+            if group not in groups:
+                dependencies_toml: dict[str, Any] = parse_toml(
+                    f"[tool.poetry.group.{group}.dependencies]\n\n"
+                )
+                group_table = dependencies_toml["tool"]["poetry"]["group"][group]
+                poetry_content["group"][group] = group_table
+
+            if "dependencies" not in poetry_content["group"][group]:
+                poetry_content["group"][group]["dependencies"] = table()
+
+            section = poetry_content["group"][group]["dependencies"]
+
+        existing_packages = self.get_existing_packages_from_input(packages, section)
+
+        if existing_packages:
+            self.notify_about_existing_packages(existing_packages)
+
+        packages = [name for name in packages if name not in existing_packages]
+
+        if not packages:
+            self.line("Nothing to add.")
+            return 0
+
+        requirements = self._determine_requirements(
+            packages,
+            allow_prereleases=self.option("allow-prereleases"),
+            source=self.option("source"),
+        )
+
+        for _constraint in requirements:
+            version = _constraint.get("version")
+            if version is not None:
+                # Validate version constraint
+                assert isinstance(version, str)
+                parse_constraint(version)
+
+            constraint: dict[str, Any] = inline_table()
+            for name, value in _constraint.items():
+                if name == "name":
+                    continue
+
+                constraint[name] = value
+
+            if self.option("optional"):
+                constraint["optional"] = True
+
+            if self.option("allow-prereleases"):
+                constraint["allow-prereleases"] = True
+
+            if self.option("extras"):
+                extras = []
+                for extra in self.option("extras"):
+                    if " " in extra:
+                        extras += [e.strip() for e in extra.split(" ")]
+                    else:
+                        extras.append(extra)
+
+                constraint["extras"] = self.option("extras")
+
+            if self.option("editable"):
+                if "git" in _constraint or "path" in _constraint:
+                    constraint["develop"] = True
+                else:
+                    self.line_error(
+                        "\n"
+                        "Failed to add packages. "
+                        "Only vcs/path dependencies support editable installs. "
+                        f"{_constraint['name']} is neither."
+                    )
+                    self.line_error("\nNo changes were applied.")
+                    return 1
+
+            if self.option("python"):
+                constraint["python"] = self.option("python")
+
+            if self.option("platform"):
+                constraint["platform"] = self.option("platform")
+
+            if self.option("source"):
+                constraint["source"] = self.option("source")
+
+            if len(constraint) == 1 and "version" in constraint:
+                constraint = constraint["version"]
+
+            constraint_name = _constraint["name"]
+            assert isinstance(constraint_name, str)
+            section[constraint_name] = constraint
+
+            with contextlib.suppress(ValueError):
+                self.poetry.package.dependency_group(group).remove_dependency(
+                    constraint_name
+                )
+
+            self.poetry.package.add_dependency(
+                Factory.create_dependency(
+                    constraint_name,
+                    constraint,
+                    groups=[group],
+                    root_dir=self.poetry.file.parent,
+                )
+            )
+
+        # Refresh the locker
+        self.poetry.set_locker(
+            self.poetry.locker.__class__(self.poetry.locker.lock.path, poetry_content)
+        )
+        self.installer.set_locker(self.poetry.locker)
+
+        # Cosmetic new line
+        self.line("")
+
+        self.installer.set_package(self.poetry.package)
+        self.installer.dry_run(self.option("dry-run"))
+        self.installer.verbose(self.io.is_verbose())
+        self.installer.update(True)
+        if self.option("lock"):
+            self.installer.lock()
+
+        self.installer.whitelist([r["name"] for r in requirements])
+
+        status = self.installer.run()
+
+        if status == 0 and not self.option("dry-run"):
+            assert isinstance(content, TOMLDocument)
+            self.poetry.file.write(content)
+
+        return status
+
+    def get_existing_packages_from_input(
+        self, packages: list[str], section: dict[str, Any]
+    ) -> list[str]:
+        existing_packages = []
+
+        for name in packages:
+            for key in section:
+                if canonicalize_name(key) == canonicalize_name(name):
+                    existing_packages.append(name)
+
+        return existing_packages
+
+    @property
+    def _hint_update_packages(self) -> str:
+        return (
+            "\nIf you want to update it to the latest compatible version, you can use"
+            " `poetry update package`.\nIf you prefer to upgrade it to the latest"
+            " available version, you can use `poetry add package@latest`.\n"
+        )
+
+    def notify_about_existing_packages(self, existing_packages: list[str]) -> None:
+        self.line(
+            "The following packages are already present in the pyproject.toml and will"
+            " be skipped:\n"
+        )
+        for name in existing_packages:
+            self.line(f"  • {name}")
+        self.line(self._hint_update_packages)
diff --git a/vendor/poetry/src/poetry/console/commands/build.py b/vendor/poetry/src/poetry/console/commands/build.py
new file mode 100644
index 00000000..5339a865
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/build.py
@@ -0,0 +1,36 @@
+from __future__ import annotations
+
+from cleo.helpers import option
+
+from poetry.console.commands.env_command import EnvCommand
+from poetry.utils.env import build_environment
+
+
+class BuildCommand(EnvCommand):
+    name = "build"
+    description = "Builds a package, as a tarball and a wheel by default."
+
+    options = [
+        option("format", "f", "Limit the format to either sdist or wheel.", flag=False)
+    ]
+
+    loggers = [
+        "poetry.core.masonry.builders.builder",
+        "poetry.core.masonry.builders.sdist",
+        "poetry.core.masonry.builders.wheel",
+    ]
+
+    def handle(self) -> int:
+        from poetry.core.masonry.builder import Builder
+
+        with build_environment(poetry=self.poetry, env=self.env, io=self.io) as env:
+            fmt = self.option("format") or "all"
+            package = self.poetry.package
+            self.line(
+                f"Building {package.pretty_name} ({package.version})"
+            )
+
+            builder = Builder(self.poetry)
+            builder.build(fmt, executable=env.python)
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/cache/__init__.py b/vendor/poetry/src/poetry/console/commands/cache/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/commands/cache/clear.py b/vendor/poetry/src/poetry/console/commands/cache/clear.py
new file mode 100644
index 00000000..ef16387a
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/cache/clear.py
@@ -0,0 +1,84 @@
+from __future__ import annotations
+
+import os
+
+from cleo.helpers import argument
+from cleo.helpers import option
+
+from poetry.config.config import Config
+from poetry.console.commands.command import Command
+
+
+class CacheClearCommand(Command):
+    name = "cache clear"
+    description = "Clears Poetry's cache."
+
+    arguments = [argument("cache", description="The name of the cache to clear.")]
+    options = [option("all", description="Clear all entries in the cache.")]
+
+    def handle(self) -> int:
+        from cachy import CacheManager
+
+        cache = self.argument("cache")
+
+        parts = cache.split(":")
+        root = parts[0]
+
+        config = Config.create()
+        cache_dir = config.repository_cache_directory / root
+
+        try:
+            cache_dir.relative_to(config.repository_cache_directory)
+        except ValueError:
+            raise ValueError(f"{root} is not a valid repository cache")
+
+        cache = CacheManager(
+            {
+                "default": parts[0],
+                "serializer": "json",
+                "stores": {parts[0]: {"driver": "file", "path": str(cache_dir)}},
+            }
+        )
+
+        if len(parts) == 1:
+            if not self.option("all"):
+                raise RuntimeError(
+                    f"Add the --all option if you want to clear all {parts[0]} caches"
+                )
+
+            if not os.path.exists(cache_dir):
+                self.line(f"No cache entries for {parts[0]}")
+                return 0
+
+            # Calculate number of entries
+            entries_count = sum(
+                len(files) for _path, _dirs, files in os.walk(str(cache_dir))
+            )
+
+            delete = self.confirm(f"Delete {entries_count} entries?")
+            if not delete:
+                return 0
+
+            cache.flush()
+        elif len(parts) == 2:
+            raise RuntimeError(
+                "Only specifying the package name is not yet supported. "
+                "Add a specific version to clear"
+            )
+        elif len(parts) == 3:
+            package = parts[1]
+            version = parts[2]
+
+            if not cache.has(f"{package}:{version}"):
+                self.line(f"No cache entries for {package}:{version}")
+                return 0
+
+            delete = self.confirm(f"Delete cache entry {package}:{version}")
+            if not delete:
+                return 0
+
+            cache.forget(f"{package}:{version}")
+        else:
+            raise ValueError("Invalid cache key")
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/cache/list.py b/vendor/poetry/src/poetry/console/commands/cache/list.py
new file mode 100644
index 00000000..b3f0800a
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/cache/list.py
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+from poetry.config.config import Config
+from poetry.console.commands.command import Command
+
+
+class CacheListCommand(Command):
+    name = "cache list"
+    description = "List Poetry's caches."
+
+    def handle(self) -> int:
+        config = Config.create()
+        if config.repository_cache_directory.exists():
+            caches = sorted(config.repository_cache_directory.iterdir())
+            if caches:
+                for cache in caches:
+                    self.line(f"{cache.name}")
+                return 0
+
+        self.line_error("No caches found")
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/check.py b/vendor/poetry/src/poetry/console/commands/check.py
new file mode 100644
index 00000000..3b3718dc
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/check.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from poetry.console.commands.command import Command
+
+
+class CheckCommand(Command):
+    name = "check"
+    description = "Checks the validity of the pyproject.toml file."
+
+    def handle(self) -> int:
+        from poetry.core.pyproject.toml import PyProjectTOML
+
+        from poetry.factory import Factory
+
+        # Load poetry config and display errors, if any
+        poetry_file = Factory.locate(Path.cwd())
+        config = PyProjectTOML(poetry_file).poetry_config
+        check_result = Factory.validate(config, strict=True)
+        if not check_result["errors"] and not check_result["warnings"]:
+            self.info("All set!")
+
+            return 0
+
+        for error in check_result["errors"]:
+            self.line_error(f"Error: {error}")
+
+        for error in check_result["warnings"]:
+            self.line_error(f"Warning: {error}")
+
+        return 1
diff --git a/vendor/poetry/src/poetry/console/commands/command.py b/vendor/poetry/src/poetry/console/commands/command.py
new file mode 100644
index 00000000..4bc26ad5
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/command.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+from cleo.commands.command import Command as BaseCommand
+from cleo.exceptions import ValueException
+
+
+if TYPE_CHECKING:
+    from poetry.console.application import Application
+    from poetry.poetry import Poetry
+
+
+class Command(BaseCommand):  # type: ignore[misc]
+    loggers: list[str] = []
+
+    _poetry: Poetry | None = None
+
+    @property
+    def poetry(self) -> Poetry:
+        if self._poetry is None:
+            return self.get_application().poetry
+
+        return self._poetry
+
+    def set_poetry(self, poetry: Poetry) -> None:
+        self._poetry = poetry
+
+    def get_application(self) -> Application:
+        application: Application = self.application
+        return application
+
+    def reset_poetry(self) -> None:
+        self.get_application().reset_poetry()
+
+    def option(self, name: str, default: Any = None) -> Any:
+        try:
+            return super().option(name)
+        except ValueException:
+            return default
diff --git a/vendor/poetry/src/poetry/console/commands/config.py b/vendor/poetry/src/poetry/console/commands/config.py
new file mode 100644
index 00000000..03773a5e
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/config.py
@@ -0,0 +1,361 @@
+from __future__ import annotations
+
+import json
+import re
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import cast
+
+from cleo.helpers import argument
+from cleo.helpers import option
+
+from poetry.config.config import PackageFilterPolicy
+from poetry.config.config import boolean_normalizer
+from poetry.config.config import boolean_validator
+from poetry.config.config import int_normalizer
+from poetry.console.commands.command import Command
+from poetry.locations import DEFAULT_CACHE_DIR
+
+
+if TYPE_CHECKING:
+    from poetry.config.config_source import ConfigSource
+
+
+class ConfigCommand(Command):
+    name = "config"
+    description = "Manages configuration settings."
+
+    arguments = [
+        argument("key", "Setting key.", optional=True),
+        argument("value", "Setting value.", optional=True, multiple=True),
+    ]
+
+    options = [
+        option("list", None, "List configuration settings."),
+        option("unset", None, "Unset configuration setting."),
+        option("local", None, "Set/Get from the project's local configuration."),
+    ]
+
+    help = """\
+This command allows you to edit the poetry config settings and repositories.
+
+To add a repository:
+
+    poetry config repositories.foo https://bar.com/simple/
+
+To remove a repository (repo is a short alias for repositories):
+
+    poetry config --unset repo.foo"""
+
+    LIST_PROHIBITED_SETTINGS = {"http-basic", "pypi-token"}
+
+    @property
+    def unique_config_values(self) -> dict[str, tuple[Any, Any, Any]]:
+        unique_config_values = {
+            "cache-dir": (
+                str,
+                lambda val: str(Path(val)),
+                str(DEFAULT_CACHE_DIR / "virtualenvs"),
+            ),
+            "virtualenvs.create": (boolean_validator, boolean_normalizer, True),
+            "virtualenvs.in-project": (boolean_validator, boolean_normalizer, False),
+            "virtualenvs.options.always-copy": (
+                boolean_validator,
+                boolean_normalizer,
+                False,
+            ),
+            "virtualenvs.options.system-site-packages": (
+                boolean_validator,
+                boolean_normalizer,
+                False,
+            ),
+            "virtualenvs.options.no-pip": (
+                boolean_validator,
+                boolean_normalizer,
+                False,
+            ),
+            "virtualenvs.options.no-setuptools": (
+                boolean_validator,
+                boolean_normalizer,
+                False,
+            ),
+            "virtualenvs.path": (
+                str,
+                lambda val: str(Path(val)),
+                str(DEFAULT_CACHE_DIR / "virtualenvs"),
+            ),
+            "virtualenvs.prefer-active-python": (
+                boolean_validator,
+                boolean_normalizer,
+                False,
+            ),
+            "experimental.new-installer": (
+                boolean_validator,
+                boolean_normalizer,
+                True,
+            ),
+            "experimental.system-git-client": (
+                boolean_validator,
+                boolean_normalizer,
+                False,
+            ),
+            "installer.parallel": (
+                boolean_validator,
+                boolean_normalizer,
+                True,
+            ),
+            "installer.max-workers": (
+                lambda val: int(val) > 0,
+                int_normalizer,
+                None,
+            ),
+            "virtualenvs.prompt": (
+                str,
+                lambda val: str(val),
+                "{project_name}-py{python_version}",
+            ),
+            "installer.no-binary": (
+                PackageFilterPolicy.validator,
+                PackageFilterPolicy.normalize,
+                None,
+            ),
+        }
+
+        return unique_config_values
+
+    def handle(self) -> int:
+        from pathlib import Path
+
+        from poetry.core.pyproject.exceptions import PyProjectException
+        from poetry.core.toml.file import TOMLFile
+
+        from poetry.config.config import Config
+        from poetry.config.file_config_source import FileConfigSource
+        from poetry.locations import CONFIG_DIR
+
+        config = Config.create()
+        config_file = TOMLFile(CONFIG_DIR / "config.toml")
+
+        try:
+            local_config_file = TOMLFile(self.poetry.file.parent / "poetry.toml")
+            if local_config_file.exists():
+                config.merge(local_config_file.read())
+        except (RuntimeError, PyProjectException):
+            local_config_file = TOMLFile(Path.cwd() / "poetry.toml")
+
+        if self.option("local"):
+            config.set_config_source(FileConfigSource(local_config_file))
+
+        if not config_file.exists():
+            config_file.path.parent.mkdir(parents=True, exist_ok=True)
+            config_file.touch(mode=0o0600)
+
+        if self.option("list"):
+            self._list_configuration(config.all(), config.raw())
+
+            return 0
+
+        setting_key = self.argument("key")
+        if not setting_key:
+            return 0
+
+        if self.argument("value") and self.option("unset"):
+            raise RuntimeError("You can not combine a setting value with --unset")
+
+        # show the value if no value is provided
+        if not self.argument("value") and not self.option("unset"):
+            m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key"))
+            value: str | dict[str, Any]
+            if m:
+                if not m.group(1):
+                    value = {}
+                    if config.get("repositories") is not None:
+                        value = config.get("repositories")
+                else:
+                    repo = config.get(f"repositories.{m.group(1)}")
+                    if repo is None:
+                        raise ValueError(f"There is no {m.group(1)} repository defined")
+
+                    value = repo
+
+                self.line(str(value))
+            else:
+                if setting_key not in self.unique_config_values:
+                    raise ValueError(f"There is no {setting_key} setting.")
+
+                value = config.get(setting_key)
+
+                if not isinstance(value, str):
+                    value = json.dumps(value)
+
+                self.line(value)
+
+            return 0
+
+        values: list[str] = self.argument("value")
+
+        unique_config_values = self.unique_config_values
+        if setting_key in unique_config_values:
+            if self.option("unset"):
+                config.config_source.remove_property(setting_key)
+                return 0
+
+            return self._handle_single_value(
+                config.config_source,
+                setting_key,
+                unique_config_values[setting_key],
+                values,
+            )
+
+        # handle repositories
+        m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key"))
+        if m:
+            if not m.group(1):
+                raise ValueError("You cannot remove the [repositories] section")
+
+            if self.option("unset"):
+                repo = config.get(f"repositories.{m.group(1)}")
+                if repo is None:
+                    raise ValueError(f"There is no {m.group(1)} repository defined")
+
+                config.config_source.remove_property(f"repositories.{m.group(1)}")
+
+                return 0
+
+            if len(values) == 1:
+                url = values[0]
+
+                config.config_source.add_property(f"repositories.{m.group(1)}.url", url)
+
+                return 0
+
+            raise ValueError(
+                "You must pass the url. "
+                "Example: poetry config repositories.foo https://bar.com"
+            )
+
+        # handle auth
+        m = re.match(r"^(http-basic|pypi-token)\.(.+)", self.argument("key"))
+        if m:
+            from poetry.utils.password_manager import PasswordManager
+
+            password_manager = PasswordManager(config)
+            if self.option("unset"):
+                if m.group(1) == "http-basic":
+                    password_manager.delete_http_password(m.group(2))
+                elif m.group(1) == "pypi-token":
+                    password_manager.delete_pypi_token(m.group(2))
+
+                return 0
+
+            if m.group(1) == "http-basic":
+                if len(values) == 1:
+                    username = values[0]
+                    # Only username, so we prompt for password
+                    password = self.secret("Password:")
+                elif len(values) != 2:
+                    raise ValueError(
+                        "Expected one or two arguments "
+                        f"(username, password), got {len(values)}"
+                    )
+                else:
+                    username = values[0]
+                    password = values[1]
+
+                password_manager.set_http_password(m.group(2), username, password)
+            elif m.group(1) == "pypi-token":
+                if len(values) != 1:
+                    raise ValueError(
+                        f"Expected only one argument (token), got {len(values)}"
+                    )
+
+                token = values[0]
+
+                password_manager.set_pypi_token(m.group(2), token)
+
+            return 0
+
+        # handle certs
+        m = re.match(r"certificates\.([^.]+)\.(cert|client-cert)", self.argument("key"))
+        if m:
+            repository = m.group(1)
+            key = m.group(2)
+
+            if self.option("unset"):
+                config.auth_config_source.remove_property(
+                    f"certificates.{repository}.{key}"
+                )
+
+                return 0
+
+            if len(values) == 1:
+                new_value: str | bool = values[0]
+
+                if key == "cert" and boolean_validator(values[0]):
+                    new_value = boolean_normalizer(values[0])
+
+                config.auth_config_source.add_property(
+                    f"certificates.{repository}.{key}", new_value
+                )
+            else:
+                raise ValueError("You must pass exactly 1 value")
+
+            return 0
+
+        raise ValueError(f"Setting {self.argument('key')} does not exist")
+
+    def _handle_single_value(
+        self,
+        source: ConfigSource,
+        key: str,
+        callbacks: tuple[Any, Any, Any],
+        values: list[Any],
+    ) -> int:
+        validator, normalizer, _ = callbacks
+
+        if len(values) > 1:
+            raise RuntimeError("You can only pass one value.")
+
+        value = values[0]
+        if not validator(value):
+            raise RuntimeError(f'"{value}" is an invalid value for {key}')
+
+        source.add_property(key, normalizer(value))
+
+        return 0
+
+    def _list_configuration(
+        self, config: dict[str, Any], raw: dict[str, Any], k: str = ""
+    ) -> None:
+        orig_k = k
+        for key, value in sorted(config.items()):
+            if k + key in self.LIST_PROHIBITED_SETTINGS:
+                continue
+
+            raw_val = raw.get(key)
+
+            if isinstance(value, dict):
+                k += f"{key}."
+                raw_val = cast("dict[str, Any]", raw_val)
+                self._list_configuration(value, raw_val, k=k)
+                k = orig_k
+
+                continue
+            elif isinstance(value, list):
+                value = ", ".join(
+                    json.dumps(val) if isinstance(val, list) else val for val in value
+                )
+                value = f"[{value}]"
+
+            if k.startswith("repositories."):
+                message = f"{k + key} = {json.dumps(raw_val)}"
+            elif isinstance(raw_val, str) and raw_val != value:
+                message = (
+                    f"{k + key} = {json.dumps(raw_val)}  # {value}"
+                )
+            else:
+                message = f"{k + key} = {json.dumps(value)}"
+
+            self.line(message)
diff --git a/vendor/poetry/src/poetry/console/commands/debug/__init__.py b/vendor/poetry/src/poetry/console/commands/debug/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/commands/debug/info.py b/vendor/poetry/src/poetry/console/commands/debug/info.py
new file mode 100644
index 00000000..f90d8e79
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/debug/info.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+import sys
+
+from poetry.console.commands.command import Command
+
+
+class DebugInfoCommand(Command):
+    name = "debug info"
+    description = "Shows debug information."
+
+    def handle(self) -> int:
+        poetry_python_version = ".".join(str(s) for s in sys.version_info[:3])
+
+        self.line("")
+        self.line("Poetry")
+        self.line(
+            "\n".join(
+                [
+                    f"Version: {self.poetry.VERSION}",
+                    f"Python:  {poetry_python_version}",
+                ]
+            )
+        )
+        command = self.application.get("env info")
+
+        exit_code: int = command.run(self.io)
+        return exit_code
diff --git a/vendor/poetry/src/poetry/console/commands/debug/resolve.py b/vendor/poetry/src/poetry/console/commands/debug/resolve.py
new file mode 100644
index 00000000..00628002
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/debug/resolve.py
@@ -0,0 +1,142 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+from cleo.helpers import option
+from cleo.io.outputs.output import Verbosity
+
+from poetry.console.commands.init import InitCommand
+from poetry.console.commands.show import ShowCommand
+
+
+class DebugResolveCommand(InitCommand):
+    name = "debug resolve"
+    description = "Debugs dependency resolution."
+
+    arguments = [
+        argument("package", "The packages to resolve.", optional=True, multiple=True)
+    ]
+    options = [
+        option(
+            "extras",
+            "E",
+            "Extras to activate for the dependency.",
+            flag=False,
+            multiple=True,
+        ),
+        option("python", None, "Python version(s) to use for resolution.", flag=False),
+        option("tree", None, "Display the dependency tree."),
+        option("install", None, "Show what would be installed for the current system."),
+    ]
+
+    loggers = ["poetry.repositories.pypi_repository", "poetry.inspection.info"]
+
+    def handle(self) -> int:
+        from cleo.io.null_io import NullIO
+        from poetry.core.packages.project_package import ProjectPackage
+
+        from poetry.factory import Factory
+        from poetry.puzzle.solver import Solver
+        from poetry.repositories.pool import Pool
+        from poetry.repositories.repository import Repository
+        from poetry.utils.env import EnvManager
+
+        packages = self.argument("package")
+
+        if not packages:
+            package = self.poetry.package
+        else:
+            # Using current pool for determine_requirements()
+            self._pool = self.poetry.pool
+
+            package = ProjectPackage(
+                self.poetry.package.name, self.poetry.package.version
+            )
+
+            # Silencing output
+            verbosity = self.io.output.verbosity
+            self.io.output.set_verbosity(Verbosity.QUIET)
+
+            requirements = self._determine_requirements(packages)
+
+            self.io.output.set_verbosity(verbosity)
+
+            for constraint in requirements:
+                name = constraint.pop("name")
+                assert isinstance(name, str)
+                extras = []
+                for extra in self.option("extras"):
+                    if " " in extra:
+                        extras += [e.strip() for e in extra.split(" ")]
+                    else:
+                        extras.append(extra)
+
+                constraint["extras"] = extras
+
+                package.add_dependency(Factory.create_dependency(name, constraint))
+
+        package.python_versions = self.option("python") or (
+            self.poetry.package.python_versions
+        )
+
+        pool = self.poetry.pool
+
+        solver = Solver(package, pool, [], [], self.io)
+
+        ops = solver.solve().calculate_operations()
+
+        self.line("")
+        self.line("Resolution results:")
+        self.line("")
+
+        if self.option("tree"):
+            show_command = self.application.find("show")
+            assert isinstance(show_command, ShowCommand)
+            show_command.init_styles(self.io)
+
+            packages = [op.package for op in ops]
+
+            requires = package.all_requires
+            for pkg in packages:
+                for require in requires:
+                    if pkg.name == require.name:
+                        show_command.display_package_tree(self.io, pkg, packages)
+                        break
+
+            return 0
+
+        table = self.table(style="compact")
+        table.style.set_vertical_border_chars("", " ")
+        rows = []
+
+        if self.option("install"):
+            env = EnvManager(self.poetry).get()
+            pool = Pool()
+            locked_repository = Repository("poetry-locked")
+            for op in ops:
+                locked_repository.add_package(op.package)
+
+            pool.add_repository(locked_repository)
+
+            solver = Solver(package, pool, [], [], NullIO())
+            with solver.use_environment(env):
+                ops = solver.solve().calculate_operations()
+
+        for op in ops:
+            if self.option("install") and op.skipped:
+                continue
+
+            pkg = op.package
+            row = [
+                f"{pkg.complete_name}",
+                f"{pkg.version}",
+            ]
+
+            if not pkg.marker.is_any():
+                row[2] = str(pkg.marker)
+
+            rows.append(row)
+
+        table.set_rows(rows)
+        table.render()
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/env/__init__.py b/vendor/poetry/src/poetry/console/commands/env/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/commands/env/info.py b/vendor/poetry/src/poetry/console/commands/env/info.py
new file mode 100644
index 00000000..d114c505
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/env/info.py
@@ -0,0 +1,70 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from cleo.helpers import option
+
+from poetry.console.commands.command import Command
+
+
+if TYPE_CHECKING:
+    from poetry.utils.env import Env
+
+
+class EnvInfoCommand(Command):
+    name = "env info"
+    description = "Displays information about the current environment."
+
+    options = [option("path", "p", "Only display the environment's path.")]
+
+    def handle(self) -> int:
+        from poetry.utils.env import EnvManager
+
+        env = EnvManager(self.poetry).get()
+
+        if self.option("path"):
+            if not env.is_venv():
+                return 1
+
+            self.line(str(env.path))
+
+            return 0
+
+        self._display_complete_info(env)
+        return 0
+
+    def _display_complete_info(self, env: Env) -> None:
+        env_python_version = ".".join(str(s) for s in env.version_info[:3])
+        self.line("")
+        self.line("Virtualenv")
+        listing = [
+            f"Python:         {env_python_version}",
+            f"Implementation: {env.python_implementation}",
+            "Path:          "
+            f" {env.path if env.is_venv() else 'NA'}",
+            "Executable:    "
+            f" {env.python if env.is_venv() else 'NA'}",
+        ]
+        if env.is_venv():
+            listing.append(
+                "Valid:         "
+                f" <{'comment' if env.is_sane() else 'error'}>{env.is_sane()}"
+            )
+        self.line("\n".join(listing))
+
+        self.line("")
+
+        system_env = env.parent_env
+        python = ".".join(str(v) for v in system_env.version_info[:3])
+        self.line("System")
+        self.line(
+            "\n".join(
+                [
+                    f"Platform:   {env.platform}",
+                    f"OS:         {env.os}",
+                    f"Python:     {python}",
+                    f"Path:       {system_env.path}",
+                    f"Executable: {system_env.python}",
+                ]
+            )
+        )
diff --git a/vendor/poetry/src/poetry/console/commands/env/list.py b/vendor/poetry/src/poetry/console/commands/env/list.py
new file mode 100644
index 00000000..e8c89e8b
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/env/list.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from cleo.helpers import option
+
+from poetry.console.commands.command import Command
+
+
+class EnvListCommand(Command):
+    name = "env list"
+    description = "Lists all virtualenvs associated with the current project."
+
+    options = [option("full-path", None, "Output the full paths of the virtualenvs.")]
+
+    def handle(self) -> int:
+        from poetry.utils.env import EnvManager
+
+        manager = EnvManager(self.poetry)
+        current_env = manager.get()
+
+        for venv in manager.list():
+            name = venv.path.name
+            if self.option("full-path"):
+                name = str(venv.path)
+
+            if venv == current_env:
+                self.line(f"{name} (Activated)")
+
+                continue
+
+            self.line(name)
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/env/remove.py b/vendor/poetry/src/poetry/console/commands/env/remove.py
new file mode 100644
index 00000000..d23fafe5
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/env/remove.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+from cleo.helpers import option
+
+from poetry.console.commands.command import Command
+
+
+class EnvRemoveCommand(Command):
+    name = "env remove"
+    description = "Remove virtual environments associated with the project."
+
+    arguments = [
+        argument(
+            "python",
+            "The python executables associated with, or names of the virtual"
+            " environments which are to be removed.",
+            optional=True,
+            multiple=True,
+        )
+    ]
+    options = [
+        option(
+            "all",
+            description=(
+                "Remove all managed virtual environments associated with the project."
+            ),
+        ),
+    ]
+
+    def handle(self) -> int:
+        from poetry.utils.env import EnvManager
+
+        pythons = self.argument("python")
+        all = self.option("all")
+        if not (pythons or all):
+            self.line("No virtualenv provided.")
+
+        manager = EnvManager(self.poetry)
+        # TODO: refactor env.py to allow removal with one loop
+        for python in pythons:
+            venv = manager.remove(python)
+            self.line(f"Deleted virtualenv: {venv.path}")
+        if all:
+            for venv in manager.list():
+                manager.remove_venv(venv.path)
+                self.line(f"Deleted virtualenv: {venv.path}")
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/env/use.py b/vendor/poetry/src/poetry/console/commands/env/use.py
new file mode 100644
index 00000000..cdfc8cbe
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/env/use.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+
+from poetry.console.commands.command import Command
+
+
+class EnvUseCommand(Command):
+    name = "env use"
+    description = "Activates or creates a new virtualenv for the current project."
+
+    arguments = [argument("python", "The python executable to use.")]
+
+    def handle(self) -> int:
+        from poetry.utils.env import EnvManager
+
+        manager = EnvManager(self.poetry)
+
+        if self.argument("python") == "system":
+            manager.deactivate(self.io)
+
+            return 0
+
+        env = manager.activate(self.argument("python"), self.io)
+
+        self.line(f"Using virtualenv: {env.path}")
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/env_command.py b/vendor/poetry/src/poetry/console/commands/env_command.py
new file mode 100644
index 00000000..f6723236
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/env_command.py
@@ -0,0 +1,25 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.console.commands.command import Command
+
+
+if TYPE_CHECKING:
+    from poetry.utils.env import Env
+
+
+class EnvCommand(Command):
+    def __init__(self) -> None:
+        # Set in poetry.console.application.Application.configure_env
+        self._env: Env | None = None
+
+        super().__init__()
+
+    @property
+    def env(self) -> Env:
+        assert self._env is not None
+        return self._env
+
+    def set_env(self, env: Env) -> None:
+        self._env = env
diff --git a/vendor/poetry/src/poetry/console/commands/group_command.py b/vendor/poetry/src/poetry/console/commands/group_command.py
new file mode 100644
index 00000000..4c99ca80
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/group_command.py
@@ -0,0 +1,109 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from cleo.helpers import option
+from poetry.core.packages.dependency_group import MAIN_GROUP
+
+from poetry.console.commands.env_command import EnvCommand
+
+
+if TYPE_CHECKING:
+    from cleo.io.inputs.option import Option
+    from poetry.core.packages.project_package import ProjectPackage
+
+
+class GroupCommand(EnvCommand):
+    @staticmethod
+    def _group_dependency_options() -> list[Option]:
+        return [
+            option(
+                "without",
+                None,
+                "The dependency groups to ignore.",
+                flag=False,
+                multiple=True,
+            ),
+            option(
+                "with",
+                None,
+                "The optional dependency groups to include.",
+                flag=False,
+                multiple=True,
+            ),
+            option(
+                "only",
+                None,
+                "The only dependency groups to include.",
+                flag=False,
+                multiple=True,
+            ),
+        ]
+
+    @property
+    def non_optional_groups(self) -> set[str]:
+        # TODO: this should move into poetry-core
+        return {
+            group.name
+            for group in self.poetry.package._dependency_groups.values()
+            if not group.is_optional()
+        }
+
+    @property
+    def default_group(self) -> str | None:
+        """
+        The default group to use when no group is specified. This is useful
+        for command that have the `--group` option, eg: add, remove.
+
+        Can be overridden to adapt behavior.
+        """
+        return None
+
+    @property
+    def default_groups(self) -> set[str]:
+        """
+        The groups that are considered by the command by default.
+
+        Can be overridden to adapt behavior.
+        """
+        return self.non_optional_groups
+
+    @property
+    def activated_groups(self) -> set[str]:
+        groups = {}
+
+        for key in {"with", "without", "only"}:
+            groups[key] = {
+                group.strip()
+                for groups in self.option(key, "")
+                for group in groups.split(",")
+            }
+
+        for opt, new, group in [
+            ("no-dev", "only", MAIN_GROUP),
+            ("dev", "with", "dev"),
+        ]:
+            if self.io.input.has_option(opt) and self.option(opt):
+                self.line_error(
+                    f"The `--{opt}` option is"
+                    f" deprecated, use the `--{new} {group}`"
+                    " notation instead."
+                )
+                groups[new].add(group)
+
+        if groups["only"] and (groups["with"] or groups["without"]):
+            self.line_error(
+                "The `--with` and "
+                "`--without` options are ignored when used"
+                " along with the `--only` option."
+                ""
+            )
+
+        return groups["only"] or self.default_groups.union(groups["with"]).difference(
+            groups["without"]
+        )
+
+    def project_with_activated_groups_only(self) -> ProjectPackage:
+        return self.poetry.package.with_dependency_groups(
+            list(self.activated_groups), only=True
+        )
diff --git a/vendor/poetry/src/poetry/console/commands/init.py b/vendor/poetry/src/poetry/console/commands/init.py
new file mode 100644
index 00000000..ef4c98f1
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/init.py
@@ -0,0 +1,477 @@
+from __future__ import annotations
+
+import sys
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import Dict
+from typing import Mapping
+from typing import Union
+
+from cleo.helpers import option
+from packaging.utils import canonicalize_name
+from tomlkit import inline_table
+
+from poetry.console.commands.command import Command
+from poetry.console.commands.env_command import EnvCommand
+from poetry.utils.dependency_specification import parse_dependency_specification
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+    from poetry.core.packages.package import Package
+    from tomlkit.items import InlineTable
+
+    from poetry.repositories import Pool
+
+Requirements = Dict[str, Union[str, Mapping[str, Any]]]
+
+
+class InitCommand(Command):
+    name = "init"
+    description = (
+        "Creates a basic pyproject.toml file in the current directory."
+    )
+
+    options = [
+        option("name", None, "Name of the package.", flag=False),
+        option("description", None, "Description of the package.", flag=False),
+        option("author", None, "Author name of the package.", flag=False),
+        option("python", None, "Compatible Python versions.", flag=False),
+        option(
+            "dependency",
+            None,
+            "Package to require, with an optional version constraint, "
+            "e.g. requests:^2.10.0 or requests=2.11.1.",
+            flag=False,
+            multiple=True,
+        ),
+        option(
+            "dev-dependency",
+            None,
+            "Package to require for development, with an optional version constraint, "
+            "e.g. requests:^2.10.0 or requests=2.11.1.",
+            flag=False,
+            multiple=True,
+        ),
+        option("license", "l", "License of the package.", flag=False),
+    ]
+
+    help = """\
+The init command creates a basic pyproject.toml file in the\
+ current directory.
+"""
+
+    def __init__(self) -> None:
+        super().__init__()
+
+        self._pool: Pool | None = None
+
+    def handle(self) -> int:
+        from pathlib import Path
+
+        from poetry.core.pyproject.toml import PyProjectTOML
+        from poetry.core.vcs.git import GitConfig
+
+        from poetry.layouts import layout
+        from poetry.utils.env import SystemEnv, InterpreterLookup
+
+        pyproject = PyProjectTOML(Path.cwd() / "pyproject.toml")
+
+        if pyproject.file.exists():
+            if pyproject.is_poetry_project():
+                self.line_error(
+                    "A pyproject.toml file with a poetry section already"
+                    " exists."
+                )
+                return 1
+
+            if pyproject.data.get("build-system"):
+                self.line_error(
+                    "A pyproject.toml file with a defined build-system already"
+                    " exists."
+                )
+                return 1
+
+        vcs_config = GitConfig()
+
+        if self.io.is_interactive():
+            self.line("")
+            self.line(
+                "This command will guide you through creating your"
+                " pyproject.toml config."
+            )
+            self.line("")
+
+        name = self.option("name")
+        if not name:
+            name = Path.cwd().name.lower()
+
+            question = self.create_question(
+                f"Package name [{name}]: ", default=name
+            )
+            name = self.ask(question)
+
+        version = "0.1.0"
+        question = self.create_question(
+            f"Version [{version}]: ", default=version
+        )
+        version = self.ask(question)
+
+        description = self.option("description")
+        if not description:
+            description = self.ask(self.create_question("Description []: ", default=""))
+
+        author = self.option("author")
+        if not author and vcs_config.get("user.name"):
+            author = vcs_config["user.name"]
+            author_email = vcs_config.get("user.email")
+            if author_email:
+                author += f" <{author_email}>"
+
+        question = self.create_question(
+            f"Author [{author}, n to skip]: ", default=author
+        )
+        question.set_validator(lambda v: self._validate_author(v, author))
+        author = self.ask(question)
+
+        if not author:
+            authors = []
+        else:
+            authors = [author]
+
+        license = self.option("license")
+        if not license:
+            license = self.ask(self.create_question("License []: ", default=""))
+
+        python = self.option("python")
+        if not python:
+            executable, _, _ = InterpreterLookup.find()
+            current_env = SystemEnv(Path(executable))
+            default_python = "^" + ".".join(
+                str(v) for v in current_env.version_info[:2]
+            )
+            question = self.create_question(
+                f"Compatible Python versions [{default_python}]: ",
+                default=default_python,
+            )
+            python = self.ask(question)
+
+        if self.io.is_interactive():
+            self.line("")
+
+        requirements: Requirements = {}
+        if self.option("dependency"):
+            requirements = self._format_requirements(
+                self._determine_requirements(self.option("dependency"))
+            )
+
+        question = "Would you like to define your main dependencies interactively?"
+        help_message = """\
+You can specify a package in the following forms:
+  - A single name (requests): this will search for matches on PyPI
+  - A name and a constraint (requests@^2.23.0)
+  - A git url (git+https://github.com/python-poetry/poetry.git)
+  - A git url with a revision\
+ (git+https://github.com/python-poetry/poetry.git#develop)
+  - A file path (../my-package/my-package.whl)
+  - A directory (../my-package/)
+  - A url (https://example.com/packages/my-package-0.1.0.tar.gz)
+"""
+
+        help_displayed = False
+        if self.confirm(question, True):
+            if self.io.is_interactive():
+                self.line(help_message)
+                help_displayed = True
+            requirements.update(
+                self._format_requirements(self._determine_requirements([]))
+            )
+            if self.io.is_interactive():
+                self.line("")
+
+        dev_requirements: Requirements = {}
+        if self.option("dev-dependency"):
+            dev_requirements = self._format_requirements(
+                self._determine_requirements(self.option("dev-dependency"))
+            )
+
+        question = (
+            "Would you like to define your development dependencies interactively?"
+        )
+        if self.confirm(question, True):
+            if self.io.is_interactive() and not help_displayed:
+                self.line(help_message)
+
+            dev_requirements.update(
+                self._format_requirements(self._determine_requirements([]))
+            )
+            if self.io.is_interactive():
+                self.line("")
+
+        layout_ = layout("standard")(
+            name,
+            version,
+            description=description,
+            author=authors[0] if authors else None,
+            license=license,
+            python=python,
+            dependencies=requirements,
+            dev_dependencies=dev_requirements,
+        )
+
+        content = layout_.generate_poetry_content()
+        for section in content:
+            pyproject.data.append(section, content[section])
+        if self.io.is_interactive():
+            self.line("Generated file")
+            self.line("")
+            self.line(pyproject.data.as_string().replace("\r\n", "\n"))
+            self.line("")
+
+        if not self.confirm("Do you confirm generation?", True):
+            self.line_error("Command aborted")
+
+            return 1
+
+        pyproject.save()
+
+        return 0
+
+    def _generate_choice_list(
+        self, matches: list[Package], canonicalized_name: NormalizedName
+    ) -> list[str]:
+        choices = []
+        matches_names = [p.name for p in matches]
+        exact_match = canonicalized_name in matches_names
+        if exact_match:
+            choices.append(matches[matches_names.index(canonicalized_name)].pretty_name)
+
+        for found_package in matches:
+            if len(choices) >= 10:
+                break
+
+            if found_package.name == canonicalized_name:
+                continue
+
+            choices.append(found_package.pretty_name)
+
+        return choices
+
+    def _determine_requirements(
+        self,
+        requires: list[str],
+        allow_prereleases: bool = False,
+        source: str | None = None,
+    ) -> list[dict[str, Any]]:
+        if not requires:
+            result = []
+
+            question = self.create_question(
+                "Package to add or search for (leave blank to skip):"
+            )
+            question.set_validator(self._validate_package)
+
+            package = self.ask(question)
+            while package:
+                constraint = self._parse_requirements([package])[0]
+                if (
+                    "git" in constraint
+                    or "url" in constraint
+                    or "path" in constraint
+                    or "version" in constraint
+                ):
+                    self.line(f"Adding {package}")
+                    result.append(constraint)
+                    package = self.ask("\nAdd a package (leave blank to skip):")
+                    continue
+
+                canonicalized_name = canonicalize_name(constraint["name"])
+                matches = self._get_pool().search(canonicalized_name)
+                if not matches:
+                    self.line_error("Unable to find package")
+                    package = False
+                else:
+                    choices = self._generate_choice_list(matches, canonicalized_name)
+
+                    info_string = (
+                        f"Found {len(matches)} packages matching"
+                        f" {package}"
+                    )
+
+                    if len(matches) > 10:
+                        info_string += "\nShowing the first 10 matches"
+
+                    self.line(info_string)
+
+                    # Default to an empty value to signal no package was selected
+                    choices.append("")
+
+                    package = self.choice(
+                        "\nEnter package # to add, or the complete package name if it"
+                        " is not listed",
+                        choices,
+                        attempts=3,
+                        default=len(choices) - 1,
+                    )
+
+                    if not package:
+                        self.line("No package selected")
+
+                    # package selected by user, set constraint name to package name
+                    if package:
+                        constraint["name"] = package
+
+                # no constraint yet, determine the best version automatically
+                if package and "version" not in constraint:
+                    question = self.create_question(
+                        "Enter the version constraint to require "
+                        "(or leave blank to use the latest version):"
+                    )
+                    question.attempts = 3
+                    question.validator = lambda x: (x or "").strip() or False
+
+                    package_constraint = self.ask(question)
+
+                    if package_constraint is None:
+                        _, package_constraint = self._find_best_version_for_package(
+                            package
+                        )
+
+                        self.line(
+                            f"Using version {package_constraint} for"
+                            f" {package}"
+                        )
+
+                    constraint["version"] = package_constraint
+
+                if package:
+                    result.append(constraint)
+
+                if self.io.is_interactive():
+                    package = self.ask("\nAdd a package (leave blank to skip):")
+
+            return result
+
+        result = []
+        for requirement in self._parse_requirements(requires):
+            if "git" in requirement or "url" in requirement or "path" in requirement:
+                result.append(requirement)
+                continue
+            elif "version" not in requirement:
+                # determine the best version automatically
+                name, version = self._find_best_version_for_package(
+                    requirement["name"],
+                    allow_prereleases=allow_prereleases,
+                    source=source,
+                )
+                requirement["version"] = version
+                requirement["name"] = name
+
+                self.line(f"Using version {version} for {name}")
+            else:
+                # check that the specified version/constraint exists
+                # before we proceed
+                name, _ = self._find_best_version_for_package(
+                    requirement["name"],
+                    requirement["version"],
+                    allow_prereleases=allow_prereleases,
+                    source=source,
+                )
+
+                requirement["name"] = name
+
+            result.append(requirement)
+
+        return result
+
+    def _find_best_version_for_package(
+        self,
+        name: str,
+        required_version: str | None = None,
+        allow_prereleases: bool = False,
+        source: str | None = None,
+    ) -> tuple[str, str]:
+        from poetry.version.version_selector import VersionSelector
+
+        selector = VersionSelector(self._get_pool())
+        package = selector.find_best_candidate(
+            name, required_version, allow_prereleases=allow_prereleases, source=source
+        )
+
+        if not package:
+            # TODO: find similar
+            raise ValueError(f"Could not find a matching version of package {name}")
+
+        return package.pretty_name, selector.find_recommended_require_version(package)
+
+    def _parse_requirements(self, requirements: list[str]) -> list[dict[str, Any]]:
+        from poetry.core.pyproject.exceptions import PyProjectException
+
+        try:
+            cwd = self.poetry.file.parent
+        except (PyProjectException, RuntimeError):
+            cwd = Path.cwd()
+
+        return [
+            parse_dependency_specification(
+                requirement=requirement,
+                env=self.env if isinstance(self, EnvCommand) else None,
+                cwd=cwd,
+            )
+            for requirement in requirements
+        ]
+
+    def _format_requirements(self, requirements: list[dict[str, str]]) -> Requirements:
+        requires: Requirements = {}
+        for requirement in requirements:
+            name = requirement.pop("name")
+            constraint: str | InlineTable
+            if "version" in requirement and len(requirement) == 1:
+                constraint = requirement["version"]
+            else:
+                constraint = inline_table()
+                constraint.trivia.trail = "\n"
+                constraint.update(requirement)
+
+            requires[name] = constraint
+
+        return requires
+
+    def _validate_author(self, author: str, default: str) -> str | None:
+        from poetry.core.packages.package import AUTHOR_REGEX
+
+        author = author or default
+
+        if author in ["n", "no"]:
+            return None
+
+        m = AUTHOR_REGEX.match(author)
+        if not m:
+            raise ValueError(
+                "Invalid author string. Must be in the format: "
+                "John Smith "
+            )
+
+        return author
+
+    @staticmethod
+    def _validate_package(package: str | None) -> str | None:
+        if package and len(package.split()) > 2:
+            raise ValueError("Invalid package definition.")
+
+        return package
+
+    def _get_pool(self) -> Pool:
+        from poetry.repositories import Pool
+        from poetry.repositories.pypi_repository import PyPiRepository
+
+        if isinstance(self, EnvCommand):
+            return self.poetry.pool
+
+        if self._pool is None:
+            self._pool = Pool()
+            self._pool.add_repository(PyPiRepository())
+
+        return self._pool
diff --git a/vendor/poetry/src/poetry/console/commands/install.py b/vendor/poetry/src/poetry/console/commands/install.py
new file mode 100644
index 00000000..7ce0776a
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/install.py
@@ -0,0 +1,182 @@
+from __future__ import annotations
+
+from cleo.helpers import option
+
+from poetry.console.commands.installer_command import InstallerCommand
+
+
+class InstallCommand(InstallerCommand):
+    name = "install"
+    description = "Installs the project dependencies."
+
+    options = [
+        *InstallerCommand._group_dependency_options(),
+        option(
+            "no-dev",
+            None,
+            "Do not install the development dependencies."
+            " (Deprecated)",
+        ),
+        option(
+            "sync",
+            None,
+            "Synchronize the environment with the locked packages and the specified"
+            " groups.",
+        ),
+        option(
+            "no-root", None, "Do not install the root package (the current project)."
+        ),
+        option(
+            "dry-run",
+            None,
+            "Output the operations but do not execute anything "
+            "(implicitly enables --verbose).",
+        ),
+        option(
+            "remove-untracked",
+            None,
+            "Removes packages not present in the lock file."
+            " (Deprecated)",
+        ),
+        option(
+            "extras",
+            "E",
+            "Extra sets of dependencies to install.",
+            flag=False,
+            multiple=True,
+        ),
+        option("all-extras", None, "Install all extra dependencies."),
+        option(
+            "only-root",
+            None,
+            "Exclude all dependencies.",
+            flag=True,
+            multiple=False,
+        ),
+    ]
+
+    help = """\
+The install command reads the poetry.lock file from
+the current directory, processes it, and downloads and installs all the
+libraries and dependencies outlined in that file. If the file does not
+exist it will look for pyproject.toml and do the same.
+
+poetry install
+
+By default, the above command will also install the current project. To install only the
+dependencies and not including the current project, run the command with the
+--no-root option like below:
+
+ poetry install --no-root
+"""
+
+    _loggers = ["poetry.repositories.pypi_repository", "poetry.inspection.info"]
+
+    @property
+    def activated_groups(self) -> set[str]:
+        if self.option("only-root"):
+            return set()
+        else:
+            return super().activated_groups
+
+    def handle(self) -> int:
+        from poetry.core.masonry.utils.module import ModuleOrPackageNotFound
+
+        from poetry.masonry.builders.editable import EditableBuilder
+
+        self.installer.use_executor(
+            self.poetry.config.get("experimental.new-installer", False)
+        )
+
+        if self.option("extras") and self.option("all-extras"):
+            self.line_error(
+                "You cannot specify explicit"
+                " `--extras` while installing"
+                " using `--all-extras`."
+            )
+            return 1
+
+        if self.option("only-root") and any(
+            self.option(key) for key in {"with", "without", "only"}
+        ):
+            self.line_error(
+                "The `--with`,"
+                " `--without` and"
+                " `--only` options cannot be used with"
+                " the `--only-root`"
+                " option."
+            )
+            return 1
+
+        if self.option("only-root") and self.option("no-root"):
+            self.line_error(
+                "You cannot specify `--no-root`"
+                " when using `--only-root`."
+            )
+            return 1
+
+        if self.option("all-extras"):
+            extras = list(self.poetry.package.extras.keys())
+        else:
+            extras = []
+            for extra in self.option("extras", []):
+                if " " in extra:
+                    extras += [e.strip() for e in extra.split(" ")]
+                else:
+                    extras.append(extra)
+
+        self.installer.extras(extras)
+
+        with_synchronization = self.option("sync")
+        if self.option("remove-untracked"):
+            self.line_error(
+                "The `--remove-untracked` option is"
+                " deprecated, use the `--sync` option"
+                " instead."
+            )
+
+            with_synchronization = True
+
+        self.installer.only_groups(self.activated_groups)
+        self.installer.dry_run(self.option("dry-run"))
+        self.installer.requires_synchronization(with_synchronization)
+        self.installer.verbose(self.io.is_verbose())
+
+        return_code = self.installer.run()
+
+        if return_code != 0:
+            return return_code
+
+        if self.option("no-root"):
+            return 0
+
+        try:
+            builder = EditableBuilder(self.poetry, self.env, self.io)
+        except ModuleOrPackageNotFound:
+            # This is likely due to the fact that the project is an application
+            # not following the structure expected by Poetry
+            # If this is a true error it will be picked up later by build anyway.
+            return 0
+
+        log_install = (
+            "Installing the current project:"
+            f" {self.poetry.package.pretty_name}"
+            f" (<{{tag}}>{self.poetry.package.pretty_version})"
+        )
+        overwrite = self.io.output.is_decorated() and not self.io.is_debug()
+        self.line("")
+        self.write(log_install.format(tag="c2"))
+        if not overwrite:
+            self.line("")
+
+        if self.option("dry-run"):
+            self.line("")
+            return 0
+
+        builder.build()
+
+        if overwrite:
+            self.overwrite(log_install.format(tag="success"))
+            self.line("")
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/installer_command.py b/vendor/poetry/src/poetry/console/commands/installer_command.py
new file mode 100644
index 00000000..b361737d
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/installer_command.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.console.commands.group_command import GroupCommand
+
+
+if TYPE_CHECKING:
+    from poetry.installation.installer import Installer
+
+
+class InstallerCommand(GroupCommand):
+    def __init__(self) -> None:
+        # Set in poetry.console.application.Application.configure_installer
+        self._installer: Installer | None = None
+
+        super().__init__()
+
+    def reset_poetry(self) -> None:
+        super().reset_poetry()
+
+        self.installer.set_package(self.poetry.package)
+        self.installer.set_locker(self.poetry.locker)
+
+    @property
+    def installer(self) -> Installer:
+        assert self._installer is not None
+        return self._installer
+
+    def set_installer(self, installer: Installer) -> None:
+        self._installer = installer
diff --git a/vendor/poetry/src/poetry/console/commands/lock.py b/vendor/poetry/src/poetry/console/commands/lock.py
new file mode 100644
index 00000000..57c9ed74
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/lock.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+from cleo.helpers import option
+
+from poetry.console.commands.installer_command import InstallerCommand
+
+
+class LockCommand(InstallerCommand):
+    name = "lock"
+    description = "Locks the project dependencies."
+
+    options = [
+        option(
+            "no-update", None, "Do not update locked versions, only refresh lock file."
+        ),
+        option(
+            "check",
+            None,
+            "Check that the poetry.lock file corresponds to the current"
+            " version of pyproject.toml.",
+        ),
+    ]
+
+    help = """
+The lock command reads the pyproject.toml file from the
+current directory, processes it, and locks the dependencies in the\
+ poetry.lock
+file.
+
+poetry lock
+"""
+
+    loggers = ["poetry.repositories.pypi_repository"]
+
+    def handle(self) -> int:
+        self.installer.use_executor(
+            self.poetry.config.get("experimental.new-installer", False)
+        )
+
+        if self.option("check"):
+            if self.poetry.locker.is_locked() and self.poetry.locker.is_fresh():
+                self.line("poetry.lock is consistent with pyproject.toml.")
+                return 0
+            self.line_error(
+                ""
+                "Error: poetry.lock is not consistent with pyproject.toml. "
+                "Run `poetry lock [--no-update]` to fix it."
+                ""
+            )
+            return 1
+
+        self.installer.lock(update=not self.option("no-update"))
+
+        return self.installer.run()
diff --git a/vendor/poetry/src/poetry/console/commands/new.py b/vendor/poetry/src/poetry/console/commands/new.py
new file mode 100644
index 00000000..0a25b2a0
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/new.py
@@ -0,0 +1,91 @@
+from __future__ import annotations
+
+import sys
+
+from contextlib import suppress
+
+from cleo.helpers import argument
+from cleo.helpers import option
+
+from poetry.console.commands.command import Command
+
+
+class NewCommand(Command):
+    name = "new"
+    description = "Creates a new Python project at ."
+
+    arguments = [argument("path", "The path to create the project at.")]
+    options = [
+        option("name", None, "Set the resulting package name.", flag=False),
+        option("src", None, "Use the src layout for the project."),
+        option(
+            "readme",
+            None,
+            "Specify the readme file format. One of md (default) or rst",
+            flag=False,
+        ),
+    ]
+
+    def handle(self) -> int:
+        from pathlib import Path
+
+        from poetry.core.vcs.git import GitConfig
+
+        from poetry.layouts import layout
+        from poetry.utils.env import SystemEnv, InterpreterLookup
+
+        if self.option("src"):
+            layout_cls = layout("src")
+        else:
+            layout_cls = layout("standard")
+
+        path = Path(self.argument("path"))
+        if not path.is_absolute():
+            # we do not use resolve here due to compatibility issues
+            # for path.resolve(strict=False)
+            path = Path.cwd().joinpath(path)
+
+        name = self.option("name")
+        if not name:
+            name = path.name
+
+        if path.exists() and list(path.glob("*")):
+            # Directory is not empty. Aborting.
+            raise RuntimeError(
+                f"Destination {path} exists and is not empty"
+            )
+
+        readme_format = self.option("readme") or "md"
+
+        config = GitConfig()
+        author = None
+        if config.get("user.name"):
+            author = config["user.name"]
+            author_email = config.get("user.email")
+            if author_email:
+                author += f" <{author_email}>"
+
+        executable, _, _ = InterpreterLookup.find()
+        current_env = SystemEnv(Path(executable))
+        default_python = "^" + ".".join(str(v) for v in current_env.version_info[:2])
+
+        layout_ = layout_cls(
+            name,
+            "0.1.0",
+            author=author,
+            readme_format=readme_format,
+            python=default_python,
+        )
+        layout_.create(path)
+
+        path = path.resolve()
+
+        with suppress(ValueError):
+            path = path.relative_to(Path.cwd())
+
+        self.line(
+            f"Created package {layout_._package_name} in"
+            f" {path.as_posix()}"
+        )
+
+        return 0
diff --git a/vendor/poetry/poetry/console/commands/publish.py b/vendor/poetry/src/poetry/console/commands/publish.py
similarity index 76%
rename from vendor/poetry/poetry/console/commands/publish.py
rename to vendor/poetry/src/poetry/console/commands/publish.py
index 557cd1d7..b98c58bf 100644
--- a/vendor/poetry/poetry/console/commands/publish.py
+++ b/vendor/poetry/src/poetry/console/commands/publish.py
@@ -1,12 +1,13 @@
-from cleo import option
+from __future__ import annotations
 
-from poetry.utils._compat import Path
+from pathlib import Path
 
-from .command import Command
+from cleo.helpers import option
 
+from poetry.console.commands.command import Command
 
-class PublishCommand(Command):
 
+class PublishCommand(Command):
     name = "publish"
     description = "Publishes a package to a remote repository."
 
@@ -27,6 +28,11 @@ class PublishCommand(Command):
         ),
         option("build", None, "Build the package before publishing."),
         option("dry-run", None, "Perform all actions except upload the package."),
+        option(
+            "skip-existing",
+            None,
+            "Ignore errors from files already existing in the repository.",
+        ),
     ]
 
     help = """The publish command builds and uploads the package to a remote repository.
@@ -40,21 +46,20 @@ class PublishCommand(Command):
 
     loggers = ["poetry.masonry.publishing.publisher"]
 
-    def handle(self):
+    def handle(self) -> int:
         from poetry.publishing.publisher import Publisher
 
         publisher = Publisher(self.poetry, self.io)
 
         # Building package first, if told
         if self.option("build"):
-            if publisher.files:
-                if not self.confirm(
-                    "There are {} files ready for publishing. "
-                    "Build anyway?".format(len(publisher.files))
-                ):
-                    self.line_error("Aborted!")
+            if publisher.files and not self.confirm(
+                f"There are {len(publisher.files)} files ready for"
+                " publishing. Build anyway?"
+            ):
+                self.line_error("Aborted!")
 
-                    return 1
+                return 1
 
             self.call("build")
 
@@ -81,4 +86,7 @@ def handle(self):
             cert,
             client_cert,
             self.option("dry-run"),
+            self.option("skip-existing"),
         )
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/remove.py b/vendor/poetry/src/poetry/console/commands/remove.py
new file mode 100644
index 00000000..01810f80
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/remove.py
@@ -0,0 +1,133 @@
+from __future__ import annotations
+
+from typing import Any
+
+from cleo.helpers import argument
+from cleo.helpers import option
+from packaging.utils import canonicalize_name
+from poetry.core.packages.dependency_group import MAIN_GROUP
+from tomlkit.toml_document import TOMLDocument
+
+from poetry.console.commands.installer_command import InstallerCommand
+
+
+class RemoveCommand(InstallerCommand):
+    name = "remove"
+    description = "Removes a package from the project dependencies."
+
+    arguments = [argument("packages", "The packages to remove.", multiple=True)]
+    options = [
+        option("group", "G", "The group to remove the dependency from.", flag=False),
+        option("dev", "D", "Remove a package from the development dependencies."),
+        option(
+            "dry-run",
+            None,
+            "Output the operations but do not execute anything "
+            "(implicitly enables --verbose).",
+        ),
+    ]
+
+    help = """The remove command removes a package from the current
+list of installed packages
+
+poetry remove"""
+
+    loggers = ["poetry.repositories.pypi_repository", "poetry.inspection.info"]
+
+    def handle(self) -> int:
+        packages = self.argument("packages")
+
+        if self.option("dev"):
+            self.line_error(
+                "The --dev option is deprecated, "
+                "use the `--group dev` notation instead."
+            )
+            group = "dev"
+        else:
+            group = self.option("group", self.default_group)
+
+        content: dict[str, Any] = self.poetry.file.read()
+        poetry_content = content["tool"]["poetry"]
+
+        if group is None:
+            removed = []
+            group_sections = [
+                (group_name, group_section.get("dependencies", {}))
+                for group_name, group_section in poetry_content.get("group", {}).items()
+            ]
+
+            for group_name, section in [
+                (MAIN_GROUP, poetry_content["dependencies"])
+            ] + group_sections:
+                removed += self._remove_packages(packages, section, group_name)
+                if group_name != MAIN_GROUP:
+                    if not section:
+                        del poetry_content["group"][group_name]
+                    else:
+                        poetry_content["group"][group_name]["dependencies"] = section
+        elif group == "dev" and "dev-dependencies" in poetry_content:
+            # We need to account for the old `dev-dependencies` section
+            removed = self._remove_packages(
+                packages, poetry_content["dev-dependencies"], "dev"
+            )
+
+            if not poetry_content["dev-dependencies"]:
+                del poetry_content["dev-dependencies"]
+        else:
+            removed = []
+            if "group" in poetry_content:
+                if group in poetry_content["group"]:
+                    removed = self._remove_packages(
+                        packages,
+                        poetry_content["group"][group].get("dependencies", {}),
+                        group,
+                    )
+
+                if not poetry_content["group"][group]:
+                    del poetry_content["group"][group]
+
+        if "group" in poetry_content and not poetry_content["group"]:
+            del poetry_content["group"]
+
+        removed_set = set(removed)
+        not_found = set(packages).difference(removed_set)
+        if not_found:
+            raise ValueError(
+                "The following packages were not found: " + ", ".join(sorted(not_found))
+            )
+
+        # Refresh the locker
+        self.poetry.set_locker(
+            self.poetry.locker.__class__(self.poetry.locker.lock.path, poetry_content)
+        )
+        self.installer.set_locker(self.poetry.locker)
+
+        self.installer.set_package(self.poetry.package)
+        self.installer.dry_run(self.option("dry-run", False))
+        self.installer.verbose(self.io.is_verbose())
+        self.installer.update(True)
+        self.installer.whitelist(removed_set)
+
+        status = self.installer.run()
+
+        if not self.option("dry-run") and status == 0:
+            assert isinstance(content, TOMLDocument)
+            self.poetry.file.write(content)
+
+        return status
+
+    def _remove_packages(
+        self, packages: list[str], section: dict[str, Any], group_name: str
+    ) -> list[str]:
+        removed = []
+        group = self.poetry.package.dependency_group(group_name)
+        section_keys = list(section.keys())
+
+        for package in packages:
+            for existing_package in section_keys:
+                if canonicalize_name(existing_package) == canonicalize_name(package):
+                    del section[existing_package]
+                    removed.append(package)
+                    group.remove_dependency(package)
+
+        return removed
diff --git a/vendor/poetry/src/poetry/console/commands/run.py b/vendor/poetry/src/poetry/console/commands/run.py
new file mode 100644
index 00000000..6496b45a
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/run.py
@@ -0,0 +1,64 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from cleo.helpers import argument
+
+from poetry.console.commands.env_command import EnvCommand
+
+
+if TYPE_CHECKING:
+    from poetry.core.masonry.utils.module import Module
+
+
+class RunCommand(EnvCommand):
+    name = "run"
+    description = "Runs a command in the appropriate environment."
+
+    arguments = [
+        argument("args", "The command and arguments/options to run.", multiple=True)
+    ]
+
+    def handle(self) -> int:
+        args = self.argument("args")
+        script = args[0]
+        scripts = self.poetry.local_config.get("scripts")
+
+        if scripts and script in scripts:
+            return self.run_script(scripts[script], args)
+
+        try:
+            return self.env.execute(*args)
+        except FileNotFoundError:
+            self.line_error(f"Command not found: {script}")
+            return 1
+
+    @property
+    def _module(self) -> Module:
+        from poetry.core.masonry.utils.module import Module
+
+        poetry = self.poetry
+        package = poetry.package
+        path = poetry.file.parent
+        module = Module(package.name, path.as_posix(), package.packages)
+
+        return module
+
+    def run_script(self, script: str | dict[str, str], args: str) -> int:
+        if isinstance(script, dict):
+            script = script["callable"]
+
+        module, callable_ = script.split(":")
+
+        src_in_sys_path = "sys.path.append('src'); " if self._module.is_in_src() else ""
+
+        cmd = ["python", "-c"]
+
+        cmd += [
+            "import sys; "
+            "from importlib import import_module; "
+            f"sys.argv = {args!r}; {src_in_sys_path}"
+            f"import_module('{module}').{callable_}()"
+        ]
+
+        return self.env.execute(*cmd)
diff --git a/vendor/poetry/src/poetry/console/commands/search.py b/vendor/poetry/src/poetry/console/commands/search.py
new file mode 100644
index 00000000..429f6868
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/search.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+
+from poetry.console.commands.command import Command
+
+
+class SearchCommand(Command):
+    name = "search"
+    description = "Searches for packages on remote repositories."
+
+    arguments = [argument("tokens", "The tokens to search for.", multiple=True)]
+
+    def handle(self) -> int:
+        from poetry.repositories.pypi_repository import PyPiRepository
+
+        results = PyPiRepository().search(self.argument("tokens"))
+
+        for result in results:
+            self.line("")
+            name = f"{result.name}"
+
+            name += f" ({result.version})"
+
+            self.line(name)
+
+            if result.description:
+                self.line(f" {result.description}")
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/self/__init__.py b/vendor/poetry/src/poetry/console/commands/self/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/commands/self/add.py b/vendor/poetry/src/poetry/console/commands/self/add.py
new file mode 100644
index 00000000..487239d4
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/self/add.py
@@ -0,0 +1,40 @@
+from __future__ import annotations
+
+from poetry.core.semver.version import Version
+
+from poetry.__version__ import __version__
+from poetry.console.commands.add import AddCommand
+from poetry.console.commands.self.self_command import SelfCommand
+
+
+class SelfAddCommand(SelfCommand, AddCommand):
+    name = "self add"
+    description = "Add additional packages to Poetry's runtime environment."
+    options = [
+        o
+        for o in AddCommand.options
+        if o.name in {"editable", "extras", "source", "dry-run", "allow-prereleases"}
+    ]
+    help = f"""\
+The self add command installs additional package's to Poetry's runtime \
+environment.
+
+This is managed in the {SelfCommand.get_default_system_pyproject_file()} \
+file.
+
+{AddCommand.examples}
+"""
+
+    @property
+    def _hint_update_packages(self) -> str:
+        version = Version.parse(__version__)
+        flags = ""
+
+        if not version.is_stable():
+            flags = " --preview"
+
+        return (
+            "\nIf you want to update it to the latest compatible version, you can use"
+            f" `poetry self update{flags}`.\nIf you prefer to upgrade it to the latest"
+            " available version, you can use `poetry self add package@latest`.\n"
+        )
diff --git a/vendor/poetry/src/poetry/console/commands/self/install.py b/vendor/poetry/src/poetry/console/commands/self/install.py
new file mode 100644
index 00000000..ec70f734
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/self/install.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+from poetry.core.packages.dependency_group import MAIN_GROUP
+
+from poetry.console.commands.install import InstallCommand
+from poetry.console.commands.self.self_command import SelfCommand
+
+
+class SelfInstallCommand(SelfCommand, InstallCommand):
+    name = "self install"
+    description = (
+        "Install locked packages (incl. addons) required by this Poetry installation."
+    )
+    options = [o for o in InstallCommand.options if o.name in {"sync", "dry-run"}]
+    help = f"""\
+The self install command ensures all additional packages specified are \
+installed in the current runtime environment.
+
+This is managed in the {SelfCommand.get_default_system_pyproject_file()} \
+file.
+
+You can add more packages using the self add command and remove them using \
+the self remove command.
+"""
+
+    @property
+    def activated_groups(self) -> set[str]:
+        return {MAIN_GROUP, self.default_group}
diff --git a/vendor/poetry/src/poetry/console/commands/self/lock.py b/vendor/poetry/src/poetry/console/commands/self/lock.py
new file mode 100644
index 00000000..13743fa7
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/self/lock.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from poetry.console.commands.lock import LockCommand
+from poetry.console.commands.self.self_command import SelfCommand
+
+
+class SelfLockCommand(SelfCommand, LockCommand):
+    name = "self lock"
+    description = "Lock the Poetry installation's system requirements."
+    help = f"""\
+The self lock command reads this Poetry installation's system requirements as \
+specified in the {SelfCommand.get_default_system_pyproject_file()} file.
+
+The system dependencies are locked in the \
+{SelfCommand.get_default_system_pyproject_file().parent.joinpath("poetry.lock")} \
+file.
+"""
diff --git a/vendor/poetry/src/poetry/console/commands/self/remove.py b/vendor/poetry/src/poetry/console/commands/self/remove.py
new file mode 100644
index 00000000..59f1d2fa
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/self/remove.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from poetry.console.commands.remove import RemoveCommand
+from poetry.console.commands.self.self_command import SelfCommand
+
+
+class SelfRemoveCommand(SelfCommand, RemoveCommand):
+    name = "self remove"
+    description = "Remove additional packages from Poetry's runtime environment."
+    options = [o for o in RemoveCommand.options if o.name in {"dry-run"}]
+    help = f"""\
+The self remove command removes additional package's to Poetry's runtime \
+environment.
+
+This is managed in the {SelfCommand.get_default_system_pyproject_file()} \
+file.
+"""
diff --git a/vendor/poetry/src/poetry/console/commands/self/self_command.py b/vendor/poetry/src/poetry/console/commands/self/self_command.py
new file mode 100644
index 00000000..37738f44
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/self/self_command.py
@@ -0,0 +1,130 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.project_package import ProjectPackage
+from poetry.core.pyproject.toml import PyProjectTOML
+
+from poetry.__version__ import __version__
+from poetry.console.commands.installer_command import InstallerCommand
+from poetry.factory import Factory
+from poetry.utils.env import EnvManager
+from poetry.utils.env import SystemEnv
+from poetry.utils.helpers import directory
+
+
+if TYPE_CHECKING:
+    from poetry.poetry import Poetry
+    from poetry.utils.env import Env
+
+
+class SelfCommand(InstallerCommand):
+    ADDITIONAL_PACKAGE_GROUP = "additional"
+
+    @staticmethod
+    def get_default_system_pyproject_file() -> Path:
+        # We separate this out to avoid unwanted side effect during testing while
+        # maintaining dynamic use in help text.
+        #
+        # This is not ideal, but is the simplest solution for now.
+        from poetry.locations import CONFIG_DIR
+
+        return Path(CONFIG_DIR).joinpath("pyproject.toml")
+
+    @property
+    def system_pyproject(self) -> Path:
+        file = self.get_default_system_pyproject_file()
+        file.parent.mkdir(parents=True, exist_ok=True)
+        return file
+
+    def reset_env(self) -> None:
+        self._env = EnvManager.get_system_env(naive=True)
+
+    @property
+    def env(self) -> Env:
+        if not isinstance(self._env, SystemEnv):
+            self.reset_env()
+        assert self._env is not None
+        return self._env
+
+    @property
+    def default_group(self) -> str:
+        return self.ADDITIONAL_PACKAGE_GROUP
+
+    @property
+    def activated_groups(self) -> set[str]:
+        return {self.default_group}
+
+    def generate_system_pyproject(self) -> None:
+        preserved = {}
+
+        if self.system_pyproject.exists():
+            content = PyProjectTOML(self.system_pyproject).poetry_config
+
+            for key in {"group", "source"}:
+                if key in content:
+                    preserved[key] = content[key]
+
+        package = ProjectPackage(name="poetry-instance", version=__version__)
+        package.add_dependency(Dependency(name="poetry", constraint=f"{__version__}"))
+
+        package.python_versions = ".".join(str(v) for v in self.env.version_info[:3])
+
+        content = Factory.create_pyproject_from_package(package=package)
+
+        for key in preserved:
+            content[key] = preserved[key]
+
+        self.system_pyproject.write_text(content.as_string(), encoding="utf-8")
+
+    def reset_poetry(self) -> None:
+        with directory(self.system_pyproject.parent):
+            self.generate_system_pyproject()
+            self._poetry = Factory().create_poetry(
+                self.system_pyproject.parent, io=self.io, disable_plugins=True
+            )
+
+    @property
+    def poetry(self) -> Poetry:
+        if self._poetry is None:
+            self.reset_poetry()
+
+        assert self._poetry is not None
+        return self._poetry
+
+    def _system_project_handle(self) -> int:
+        """
+        This is a helper method that by default calls the handle method implemented in
+        the child class's next MRO sibling. Override this if you want special handling
+        either before calling the handle() from the super class or have custom logic
+        to handle the command.
+
+        The default implementations handles cases where a `self` command delegates
+        handling to an existing command. Eg: `SelfAddCommand(SelfCommand, AddCommand)`.
+        """
+        return_code: int = super().handle()
+        return return_code
+
+    def reset(self) -> None:
+        """
+        Reset current command instance's environment and poetry instances to ensure
+        use of the system specific ones.
+        """
+        self.reset_env()
+        self.reset_poetry()
+
+    def handle(self) -> int:
+        # We override the base class's handle() method to ensure that poetry and env
+        # are reset to work within the system project instead of current context.
+        # Further, during execution, the working directory is temporarily changed
+        # to parent directory of Poetry system pyproject.toml file.
+        #
+        # This method **should not** be overridden in child classes as it may have
+        # unexpected consequences.
+
+        self.reset()
+
+        with directory(self.system_pyproject.parent):
+            return self._system_project_handle()
diff --git a/vendor/poetry/src/poetry/console/commands/self/show/__init__.py b/vendor/poetry/src/poetry/console/commands/self/show/__init__.py
new file mode 100644
index 00000000..7212d69c
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/self/show/__init__.py
@@ -0,0 +1,34 @@
+from __future__ import annotations
+
+from cleo.helpers import option
+
+from poetry.console.commands.self.self_command import SelfCommand
+from poetry.console.commands.show import ShowCommand
+
+
+class SelfShowCommand(SelfCommand, ShowCommand):
+    name = "self show"
+    options = [
+        option("addons", None, "List only add-on packages installed."),
+        *[o for o in ShowCommand.options if o.name in {"tree", "latest", "outdated"}],
+    ]
+    description = "Show packages from Poetry's runtime environment."
+    help = f"""\
+The self show command behaves similar to the show command, but
+working within Poetry's runtime environment. This lists all packages installed within
+the Poetry install environment.
+
+To show only additional packages that have been added via self add and their
+dependencies use self show --addons.
+
+This is managed in the {SelfCommand.get_default_system_pyproject_file()} \
+file.
+"""
+
+    @property
+    def activated_groups(self) -> set[str]:
+        if self.option("addons", False):
+            return {SelfCommand.ADDITIONAL_PACKAGE_GROUP}
+
+        groups: set[str] = super(ShowCommand, self).activated_groups
+        return groups
diff --git a/vendor/poetry/src/poetry/console/commands/self/show/plugins.py b/vendor/poetry/src/poetry/console/commands/self/show/plugins.py
new file mode 100644
index 00000000..fa265343
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/self/show/plugins.py
@@ -0,0 +1,118 @@
+from __future__ import annotations
+
+import dataclasses
+
+from typing import TYPE_CHECKING
+
+from poetry.console.commands.self.self_command import SelfCommand
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+    from poetry.utils._compat import metadata
+
+
+@dataclasses.dataclass
+class PluginPackage:
+    package: Package
+    plugins: list[metadata.EntryPoint] = dataclasses.field(default_factory=list)
+    application_plugins: list[metadata.EntryPoint] = dataclasses.field(
+        default_factory=list
+    )
+
+    def append(self, entry_point: metadata.EntryPoint) -> None:
+        from poetry.plugins.application_plugin import ApplicationPlugin
+        from poetry.plugins.plugin import Plugin
+
+        group = entry_point.group  # type: ignore[attr-defined]
+
+        if group == ApplicationPlugin.group:
+            self.application_plugins.append(entry_point)
+        elif group == Plugin.group:
+            self.plugins.append(entry_point)
+        else:
+            name = entry_point.name  # type: ignore[attr-defined]
+            raise ValueError(f"Unknown plugin group ({group}) for {name}")
+
+
+class SelfShowPluginsCommand(SelfCommand):
+    name = "self show plugins"
+    description = "Shows information about the currently installed plugins."
+    help = """\
+The self show plugins command lists all installed Poetry plugins.
+
+Plugins can be added and removed using the self add and self remove \
+commands respectively.
+
+This command does not list packages that do not provide a Poetry plugin.
+"""
+
+    def _system_project_handle(self) -> int:
+        from packaging.utils import canonicalize_name
+
+        from poetry.plugins.application_plugin import ApplicationPlugin
+        from poetry.plugins.plugin import Plugin
+        from poetry.plugins.plugin_manager import PluginManager
+        from poetry.repositories.installed_repository import InstalledRepository
+        from poetry.utils.env import EnvManager
+        from poetry.utils.helpers import pluralize
+
+        plugins: dict[str, PluginPackage] = {}
+
+        system_env = EnvManager.get_system_env(naive=True)
+        installed_repository = InstalledRepository.load(
+            system_env, with_dependencies=True
+        )
+
+        packages_by_name: dict[str, Package] = {
+            pkg.name: pkg for pkg in installed_repository.packages
+        }
+
+        for group in [ApplicationPlugin.group, Plugin.group]:
+            for entry_point in PluginManager(group).get_plugin_entry_points(
+                env=system_env
+            ):
+                assert entry_point.dist is not None
+
+                package = packages_by_name[canonicalize_name(entry_point.dist.name)]
+
+                name = package.pretty_name
+
+                info = plugins.get(name) or PluginPackage(package=package)
+                info.append(entry_point)
+
+                plugins[name] = info
+
+        for name, info in plugins.items():
+            package = info.package
+            description = " " + package.description if package.description else ""
+            self.line("")
+            self.line(f"  • {name} ({package.version}){description}")
+            provide_line = "     "
+
+            if info.plugins:
+                count = len(info.plugins)
+                provide_line += f" {count} plugin{pluralize(count)}"
+
+            if info.application_plugins:
+                if info.plugins:
+                    provide_line += " and"
+
+                count = len(info.application_plugins)
+                provide_line += (
+                    f" {count} application plugin{pluralize(count)}"
+                )
+
+            self.line(provide_line)
+
+            if package.requires:
+                self.line("")
+                self.line("      Dependencies")
+                for dependency in package.requires:
+                    self.line(
+                        f"        - {dependency.pretty_name}"
+                        f" ({dependency.pretty_constraint})"
+                    )
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/self/update.py b/vendor/poetry/src/poetry/console/commands/self/update.py
new file mode 100644
index 00000000..f42a3d47
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/self/update.py
@@ -0,0 +1,58 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+from cleo.helpers import option
+from cleo.io.inputs.string_input import StringInput
+from cleo.io.io import IO
+
+from poetry.console.commands.add import AddCommand
+from poetry.console.commands.self.self_command import SelfCommand
+
+
+class SelfUpdateCommand(SelfCommand):
+    name = "self update"
+    description = "Updates Poetry to the latest version."
+
+    arguments = [
+        argument(
+            "version", "The version to update to.", optional=True, default="latest"
+        )
+    ]
+    options = [
+        option("preview", None, "Allow the installation of pre-release versions."),
+        option(
+            "dry-run",
+            None,
+            "Output the operations but do not execute anything "
+            "(implicitly enables --verbose).",
+        ),
+    ]
+    help = """\
+The self update command updates Poetry version in its current runtime \
+environment.
+"""
+
+    def _system_project_handle(self) -> int:
+        self.write("Updating Poetry version ...\n\n")
+        application = self.get_application()
+        add_command = application.find("add")
+        assert isinstance(add_command, AddCommand)
+        add_command.set_env(self.env)
+        application.configure_installer_for_command(add_command, self.io)
+
+        argv = ["add", f"poetry@{self.argument('version')}"]
+
+        if self.option("dry-run"):
+            argv.append("--dry-run")
+
+        if self.option("preview"):
+            argv.append("--allow-prereleases")
+
+        exit_code: int = add_command.run(
+            IO(
+                StringInput(" ".join(argv)),
+                self.io.output,
+                self.io.error_output,
+            )
+        )
+        return exit_code
diff --git a/vendor/poetry/src/poetry/console/commands/shell.py b/vendor/poetry/src/poetry/console/commands/shell.py
new file mode 100644
index 00000000..a1be57c7
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/shell.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+import sys
+
+from os import environ
+from typing import TYPE_CHECKING
+from typing import cast
+
+from poetry.console.commands.env_command import EnvCommand
+
+
+if TYPE_CHECKING:
+    from poetry.utils.env import VirtualEnv
+
+
+class ShellCommand(EnvCommand):
+    name = "shell"
+    description = "Spawns a shell within the virtual environment."
+
+    help = """The shell command spawns a shell, according to the
+$SHELL environment variable, within the virtual environment.
+If one doesn't exist yet, it will be created.
+"""
+
+    def handle(self) -> int:
+        from poetry.utils.shell import Shell
+
+        # Check if it's already activated or doesn't exist and won't be created
+        if self._is_venv_activated():
+            self.line(
+                f"Virtual environment already activated: {self.env.path}"
+            )
+
+            return 0
+
+        self.line(f"Spawning shell within {self.env.path}")
+
+        # Be sure that we have the right type of environment.
+        env = self.env
+        assert env.is_venv()
+        env = cast("VirtualEnv", env)
+
+        # Setting this to avoid spawning unnecessary nested shells
+        environ["POETRY_ACTIVE"] = "1"
+        shell = Shell.get()
+        shell.activate(env)
+        environ.pop("POETRY_ACTIVE")
+
+        return 0
+
+    def _is_venv_activated(self) -> bool:
+        return bool(environ.get("POETRY_ACTIVE")) or getattr(
+            sys, "real_prefix", sys.prefix
+        ) == str(self.env.path)
diff --git a/vendor/poetry/src/poetry/console/commands/show.py b/vendor/poetry/src/poetry/console/commands/show.py
new file mode 100644
index 00000000..1b6463c5
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/show.py
@@ -0,0 +1,539 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from cleo.helpers import argument
+from cleo.helpers import option
+from packaging.utils import canonicalize_name
+
+from poetry.console.commands.group_command import GroupCommand
+
+
+if TYPE_CHECKING:
+    from cleo.io.io import IO
+    from packaging.utils import NormalizedName
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.package import Package
+    from poetry.core.packages.project_package import ProjectPackage
+
+    from poetry.repositories.repository import Repository
+
+
+def reverse_deps(pkg: Package, repo: Repository) -> dict[str, str]:
+    required_by = {}
+    for locked in repo.packages:
+        dependencies = {d.name: d.pretty_constraint for d in locked.requires}
+
+        if pkg.name in dependencies:
+            required_by[locked.pretty_name] = dependencies[pkg.name]
+
+    return required_by
+
+
+class ShowCommand(GroupCommand):
+    name = "show"
+    description = "Shows information about packages."
+
+    arguments = [argument("package", "The package to inspect", optional=True)]
+    options = [
+        *GroupCommand._group_dependency_options(),
+        option(
+            "no-dev",
+            None,
+            "Do not list the development dependencies. (Deprecated)",
+        ),
+        option("tree", "t", "List the dependencies as a tree."),
+        option(
+            "why",
+            None,
+            "When showing the full list, or a --tree for a single package,"
+            " also display why it's included.",
+        ),
+        option("latest", "l", "Show the latest version."),
+        option(
+            "outdated",
+            "o",
+            "Show the latest version but only for packages that are outdated.",
+        ),
+        option(
+            "all",
+            "a",
+            "Show all packages (even those not compatible with current system).",
+        ),
+    ]
+
+    help = """The show command displays detailed information about a package, or
+lists all packages available."""
+
+    colors = ["cyan", "yellow", "green", "magenta", "blue"]
+
+    def handle(self) -> int:
+        from cleo.io.null_io import NullIO
+        from cleo.terminal import Terminal
+
+        from poetry.puzzle.solver import Solver
+        from poetry.repositories.installed_repository import InstalledRepository
+        from poetry.repositories.pool import Pool
+        from poetry.utils.helpers import get_package_version_display_string
+
+        package = self.argument("package")
+
+        if self.option("tree"):
+            self.init_styles(self.io)
+
+        if self.option("why"):
+            if self.option("tree") and package is None:
+                self.line_error(
+                    "Error: --why requires a package when combined with"
+                    " --tree."
+                )
+
+                return 1
+
+            if not self.option("tree") and package:
+                self.line_error(
+                    "Error: --why cannot be used without --tree when displaying"
+                    " a single package."
+                )
+
+                return 1
+
+        if self.option("outdated"):
+            self.io.input.set_option("latest", True)
+
+        if not self.poetry.locker.is_locked():
+            self.line_error(
+                "Error: poetry.lock not found. Run `poetry lock` to create"
+                " it."
+            )
+            return 1
+
+        locked_repo = self.poetry.locker.locked_repository()
+        root = self.project_with_activated_groups_only()
+
+        # Show tree view if requested
+        if self.option("tree") and package is None:
+            requires = root.all_requires
+            packages = locked_repo.packages
+            for p in packages:
+                for require in requires:
+                    if p.name == require.name:
+                        self.display_package_tree(self.io, p, packages)
+                        break
+
+            return 0
+
+        table = self.table(style="compact")
+        locked_packages = locked_repo.packages
+        pool = Pool(ignore_repository_names=True)
+        pool.add_repository(locked_repo)
+        solver = Solver(
+            root,
+            pool=pool,
+            installed=[],
+            locked=locked_packages,
+            io=NullIO(),
+        )
+        solver.provider.load_deferred(False)
+        with solver.use_environment(self.env):
+            ops = solver.solve().calculate_operations()
+
+        required_locked_packages = {op.package for op in ops if not op.skipped}
+
+        if package:
+            pkg = None
+            for locked in locked_packages:
+                if canonicalize_name(package) == locked.name:
+                    pkg = locked
+                    break
+
+            if not pkg:
+                raise ValueError(f"Package {package} not found")
+
+            required_by = reverse_deps(pkg, locked_repo)
+
+            if self.option("tree"):
+                if self.option("why"):
+                    # The default case if there's no reverse dependencies is to query
+                    # the subtree for pkg but if any rev-deps exist we'll query for each
+                    # of them in turn
+                    packages = [pkg]
+                    if required_by:
+                        packages = [
+                            p
+                            for p in locked_packages
+                            for r in required_by.keys()
+                            if p.name == r
+                        ]
+                    else:
+                        # if no rev-deps exist we'll make this clear as it can otherwise
+                        # look very odd for packages that also have no or few direct
+                        # dependencies
+                        self.io.write_line(f"Package {package} is a direct dependency.")
+
+                    for p in packages:
+                        self.display_package_tree(
+                            self.io, p, locked_packages, why_package=pkg
+                        )
+
+                else:
+                    self.display_package_tree(self.io, pkg, locked_packages)
+
+                return 0
+
+            rows = [
+                ["name", f" : {pkg.pretty_name}"],
+                ["version", f" : {pkg.pretty_version}"],
+                ["description", f" : {pkg.description}"],
+            ]
+
+            table.add_rows(rows)
+            table.render()
+
+            if pkg.requires:
+                self.line("")
+                self.line("dependencies")
+                for dependency in pkg.requires:
+                    self.line(
+                        f" - {dependency.pretty_name}"
+                        f" {dependency.pretty_constraint}"
+                    )
+
+            if required_by:
+                self.line("")
+                self.line("required by")
+                for parent, requires_version in required_by.items():
+                    self.line(f" - {parent} {requires_version}")
+
+            return 0
+
+        show_latest = self.option("latest")
+        show_all = self.option("all")
+        terminal = Terminal()
+        width = terminal.width
+        name_length = version_length = latest_length = required_by_length = 0
+        latest_packages = {}
+        latest_statuses = {}
+        installed_repo = InstalledRepository.load(self.env)
+
+        # Computing widths
+        for locked in locked_packages:
+            if locked not in required_locked_packages and not show_all:
+                continue
+
+            current_length = len(locked.pretty_name)
+            if not self.io.output.is_decorated():
+                installed_status = self.get_installed_status(
+                    locked, installed_repo.packages
+                )
+
+                if installed_status == "not-installed":
+                    current_length += 4
+
+            if show_latest:
+                latest = self.find_latest_package(locked, root)
+                if not latest:
+                    latest = locked
+
+                latest_packages[locked.pretty_name] = latest
+                update_status = latest_statuses[
+                    locked.pretty_name
+                ] = self.get_update_status(latest, locked)
+
+                if not self.option("outdated") or update_status != "up-to-date":
+                    name_length = max(name_length, current_length)
+                    version_length = max(
+                        version_length,
+                        len(
+                            get_package_version_display_string(
+                                locked, root=self.poetry.file.parent
+                            )
+                        ),
+                    )
+                    latest_length = max(
+                        latest_length,
+                        len(
+                            get_package_version_display_string(
+                                latest, root=self.poetry.file.parent
+                            )
+                        ),
+                    )
+
+                    if self.option("why"):
+                        required_by = reverse_deps(locked, locked_repo)
+                        required_by_length = max(
+                            required_by_length,
+                            len(" from " + ",".join(required_by.keys())),
+                        )
+            else:
+                name_length = max(name_length, current_length)
+                version_length = max(
+                    version_length,
+                    len(
+                        get_package_version_display_string(
+                            locked, root=self.poetry.file.parent
+                        )
+                    ),
+                )
+
+                if self.option("why"):
+                    required_by = reverse_deps(locked, locked_repo)
+                    required_by_length = max(
+                        required_by_length, len(" from " + ",".join(required_by.keys()))
+                    )
+
+        write_version = name_length + version_length + 3 <= width
+        write_latest = name_length + version_length + latest_length + 3 <= width
+
+        why_end_column = (
+            name_length + version_length + latest_length + required_by_length
+        )
+        write_why = self.option("why") and (why_end_column + 3) <= width
+        write_description = (why_end_column + 24) <= width
+
+        for locked in locked_packages:
+            color = "cyan"
+            name = locked.pretty_name
+            install_marker = ""
+            if locked not in required_locked_packages:
+                if not show_all:
+                    continue
+
+                color = "black;options=bold"
+            else:
+                installed_status = self.get_installed_status(
+                    locked, installed_repo.packages
+                )
+                if installed_status == "not-installed":
+                    color = "red"
+
+                    if not self.io.output.is_decorated():
+                        # Non installed in non decorated mode
+                        install_marker = " (!)"
+
+            if (
+                show_latest
+                and self.option("outdated")
+                and latest_statuses[locked.pretty_name] == "up-to-date"
+            ):
+                continue
+
+            line = (
+                f""
+                f"{name:{name_length - len(install_marker)}}{install_marker}"
+            )
+            if write_version:
+                version = get_package_version_display_string(
+                    locked, root=self.poetry.file.parent
+                )
+                line += f" {version:{version_length}}"
+            if show_latest:
+                latest = latest_packages[locked.pretty_name]
+                update_status = latest_statuses[locked.pretty_name]
+
+                if write_latest:
+                    color = "green"
+                    if update_status == "semver-safe-update":
+                        color = "red"
+                    elif update_status == "update-possible":
+                        color = "yellow"
+
+                    version = get_package_version_display_string(
+                        latest, root=self.poetry.file.parent
+                    )
+                    line += f" {version:{latest_length}}"
+
+            if write_why:
+                required_by = reverse_deps(locked, locked_repo)
+                if required_by:
+                    content = ",".join(required_by.keys())
+                    # subtract 6 for ' from '
+                    line += f" from {content:{required_by_length - 6}}"
+                else:
+                    line += " " * required_by_length
+
+            if write_description:
+                description = locked.description
+                remaining = (
+                    width - name_length - version_length - required_by_length - 4
+                )
+
+                if show_latest:
+                    remaining -= latest_length
+
+                if len(locked.description) > remaining:
+                    description = description[: remaining - 3] + "..."
+
+                line += " " + description
+
+            self.line(line)
+
+        return 0
+
+    def display_package_tree(
+        self,
+        io: IO,
+        package: Package,
+        installed_packages: list[Package],
+        why_package: Package | None = None,
+    ) -> None:
+        io.write(f"{package.pretty_name}")
+        description = ""
+        if package.description:
+            description = " " + package.description
+
+        io.write_line(f" {package.pretty_version}{description}")
+
+        if why_package is not None:
+            dependencies = [p for p in package.requires if p.name == why_package.name]
+        else:
+            dependencies = package.requires
+            dependencies = sorted(
+                dependencies,
+                key=lambda x: x.name,
+            )
+
+        tree_bar = "├"
+        total = len(dependencies)
+        for i, dependency in enumerate(dependencies, 1):
+            if i == total:
+                tree_bar = "â””"
+
+            level = 1
+            color = self.colors[level]
+            info = (
+                f"{tree_bar}── <{color}>{dependency.name}"
+                f" {dependency.pretty_constraint}"
+            )
+            self._write_tree_line(io, info)
+
+            tree_bar = tree_bar.replace("â””", " ")
+            packages_in_tree = [package.name, dependency.name]
+
+            self._display_tree(
+                io,
+                dependency,
+                installed_packages,
+                packages_in_tree,
+                tree_bar,
+                level + 1,
+            )
+
+    def _display_tree(
+        self,
+        io: IO,
+        dependency: Dependency,
+        installed_packages: list[Package],
+        packages_in_tree: list[NormalizedName],
+        previous_tree_bar: str = "├",
+        level: int = 1,
+    ) -> None:
+        previous_tree_bar = previous_tree_bar.replace("├", "│")
+
+        dependencies = []
+        for package in installed_packages:
+            if package.name == dependency.name:
+                dependencies = package.requires
+
+                break
+
+        dependencies = sorted(
+            dependencies,
+            key=lambda x: x.name,
+        )
+        tree_bar = previous_tree_bar + "   ├"
+        total = len(dependencies)
+        for i, dependency in enumerate(dependencies, 1):
+            current_tree = packages_in_tree
+            if i == total:
+                tree_bar = previous_tree_bar + "   â””"
+
+            color_ident = level % len(self.colors)
+            color = self.colors[color_ident]
+
+            circular_warn = ""
+            if dependency.name in current_tree:
+                circular_warn = "(circular dependency aborted here)"
+
+            info = (
+                f"{tree_bar}── <{color}>{dependency.name}"
+                f" {dependency.pretty_constraint} {circular_warn}"
+            )
+            self._write_tree_line(io, info)
+
+            tree_bar = tree_bar.replace("â””", " ")
+
+            if dependency.name not in current_tree:
+                current_tree.append(dependency.name)
+
+                self._display_tree(
+                    io,
+                    dependency,
+                    installed_packages,
+                    current_tree,
+                    tree_bar,
+                    level + 1,
+                )
+
+    def _write_tree_line(self, io: IO, line: str) -> None:
+        if not io.output.supports_utf8():
+            line = line.replace("â””", "`-")
+            line = line.replace("├", "|-")
+            line = line.replace("──", "-")
+            line = line.replace("│", "|")
+
+        io.write_line(line)
+
+    def init_styles(self, io: IO) -> None:
+        from cleo.formatters.style import Style
+
+        for color in self.colors:
+            style = Style(color)
+            io.output.formatter.set_style(color, style)
+            io.error_output.formatter.set_style(color, style)
+
+    def find_latest_package(
+        self, package: Package, root: ProjectPackage
+    ) -> Package | None:
+        from cleo.io.null_io import NullIO
+
+        from poetry.puzzle.provider import Provider
+        from poetry.version.version_selector import VersionSelector
+
+        # find the latest version allowed in this pool
+        if package.source_type in ("git", "file", "directory"):
+            requires = root.all_requires
+
+            for dep in requires:
+                if dep.name == package.name:
+                    provider = Provider(root, self.poetry.pool, NullIO())
+                    return provider.search_for_direct_origin_dependency(dep)
+
+        name = package.name
+        selector = VersionSelector(self.poetry.pool)
+
+        return selector.find_best_candidate(name, f">={package.pretty_version}")
+
+    def get_update_status(self, latest: Package, package: Package) -> str:
+        from poetry.core.semver.helpers import parse_constraint
+
+        if latest.full_pretty_version == package.full_pretty_version:
+            return "up-to-date"
+
+        constraint = parse_constraint("^" + package.pretty_version)
+
+        if constraint.allows(latest.version):
+            # It needs an immediate semver-compliant upgrade
+            return "semver-safe-update"
+
+        # it needs an upgrade but has potential BC breaks so is not urgent
+        return "update-possible"
+
+    def get_installed_status(
+        self, locked: Package, installed_packages: list[Package]
+    ) -> str:
+        for package in installed_packages:
+            if locked.name == package.name:
+                return "installed"
+
+        return "not-installed"
diff --git a/vendor/poetry/src/poetry/console/commands/source/__init__.py b/vendor/poetry/src/poetry/console/commands/source/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/commands/source/add.py b/vendor/poetry/src/poetry/console/commands/source/add.py
new file mode 100644
index 00000000..3011980c
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/source/add.py
@@ -0,0 +1,101 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+from cleo.helpers import option
+from cleo.io.null_io import NullIO
+from tomlkit.items import AoT
+
+from poetry.config.source import Source
+from poetry.console.commands.command import Command
+
+
+class SourceAddCommand(Command):
+    name = "source add"
+    description = "Add source configuration for project."
+
+    arguments = [
+        argument(
+            "name",
+            "Source repository name.",
+        ),
+        argument("url", "Source repository url."),
+    ]
+
+    options = [
+        option(
+            "default",
+            "d",
+            "Set this source as the default (disable PyPI). A "
+            "default source will also be the fallback source if "
+            "you add other sources.",
+        ),
+        option("secondary", "s", "Set this source as secondary."),
+    ]
+
+    def handle(self) -> int:
+        from poetry.factory import Factory
+        from poetry.repositories import Pool
+        from poetry.utils.source import source_to_table
+
+        name = self.argument("name")
+        url = self.argument("url")
+        is_default = self.option("default")
+        is_secondary = self.option("secondary")
+
+        if is_default and is_secondary:
+            self.line_error(
+                "Cannot configure a source as both default and"
+                " secondary."
+            )
+            return 1
+
+        new_source: Source | None = Source(
+            name=name, url=url, default=is_default, secondary=is_secondary
+        )
+        existing_sources = self.poetry.get_sources()
+
+        sources = AoT([])
+
+        for source in existing_sources:
+            if source == new_source:
+                self.line(
+                    f"Source with name {name} already exists. Skipping"
+                    " addition."
+                )
+                return 0
+            elif source.default and is_default:
+                self.line_error(
+                    f"Source with name {source.name} is already set to"
+                    " default. Only one default source can be configured at a"
+                    " time."
+                )
+                return 1
+
+            if new_source and source.name == name:
+                self.line(f"Source with name {name} already exists. Updating.")
+                source = new_source
+                new_source = None
+
+            sources.append(source_to_table(source))
+
+        if new_source is not None:
+            self.line(f"Adding source with name {name}.")
+            sources.append(source_to_table(new_source))
+
+        # ensure new source is valid. eg: invalid name etc.
+        self.poetry._pool = Pool()
+        try:
+            Factory.configure_sources(
+                self.poetry, sources, self.poetry.config, NullIO()
+            )
+            self.poetry.pool.repository(name)
+        except ValueError as e:
+            self.line_error(
+                f"Failed to validate addition of {name}: {e}"
+            )
+            return 1
+
+        self.poetry.pyproject.poetry_config["source"] = sources
+        self.poetry.pyproject.save()
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/source/remove.py b/vendor/poetry/src/poetry/console/commands/source/remove.py
new file mode 100644
index 00000000..7d185bf1
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/source/remove.py
@@ -0,0 +1,44 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+from tomlkit.items import AoT
+
+from poetry.console.commands.command import Command
+
+
+class SourceRemoveCommand(Command):
+    name = "source remove"
+    description = "Remove source configured for the project."
+
+    arguments = [
+        argument(
+            "name",
+            "Source repository name.",
+        ),
+    ]
+
+    def handle(self) -> int:
+        from poetry.utils.source import source_to_table
+
+        name = self.argument("name")
+
+        sources = AoT([])
+        removed = False
+
+        for source in self.poetry.get_sources():
+            if source.name == name:
+                self.line(f"Removing source with name {source.name}.")
+                removed = True
+                continue
+            sources.append(source_to_table(source))
+
+        if not removed:
+            self.line_error(
+                f"Source with name {name} was not found."
+            )
+            return 1
+
+        self.poetry.pyproject.poetry_config["source"] = sources
+        self.poetry.pyproject.save()
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/source/show.py b/vendor/poetry/src/poetry/console/commands/source/show.py
new file mode 100644
index 00000000..9643118c
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/source/show.py
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+
+from poetry.console.commands.command import Command
+
+
+class SourceShowCommand(Command):
+    name = "source show"
+    description = "Show information about sources configured for the project."
+
+    arguments = [
+        argument(
+            "source",
+            "Source(s) to show information for. Defaults to showing all sources.",
+            optional=True,
+            multiple=True,
+        ),
+    ]
+
+    def handle(self) -> int:
+        sources = self.poetry.get_sources()
+        names = self.argument("source")
+
+        if not sources:
+            self.line("No sources configured for this project.")
+            return 0
+
+        if names and not any(s.name in names for s in sources):
+            self.line_error(f"No source found with name(s): {', '.join(names)}")
+            return 1
+
+        bool_string = {
+            True: "yes",
+            False: "no",
+        }
+
+        for source in sources:
+            if names and source.name not in names:
+                continue
+
+            table = self.table(style="compact")
+            rows = [
+                ["name", f" : {source.name}"],
+                ["url", f" : {source.url}"],
+                [
+                    "default",
+                    f" : {bool_string.get(source.default, False)}",
+                ],
+                [
+                    "secondary",
+                    f" : {bool_string.get(source.secondary, False)}",
+                ],
+            ]
+            table.add_rows(rows)
+            table.render()
+            self.line("")
+
+        return 0
diff --git a/vendor/poetry/src/poetry/console/commands/source/update.py b/vendor/poetry/src/poetry/console/commands/source/update.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/commands/update.py b/vendor/poetry/src/poetry/console/commands/update.py
new file mode 100644
index 00000000..b880f5a8
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/update.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+from cleo.helpers import argument
+from cleo.helpers import option
+
+from poetry.console.commands.installer_command import InstallerCommand
+
+
+class UpdateCommand(InstallerCommand):
+    name = "update"
+    description = (
+        "Update the dependencies as according to the pyproject.toml file."
+    )
+
+    arguments = [
+        argument("packages", "The packages to update", optional=True, multiple=True)
+    ]
+    options = [
+        *InstallerCommand._group_dependency_options(),
+        option(
+            "no-dev",
+            None,
+            "Do not update the development dependencies."
+            " (Deprecated)",
+        ),
+        option(
+            "dry-run",
+            None,
+            "Output the operations but do not execute anything "
+            "(implicitly enables --verbose).",
+        ),
+        option("lock", None, "Do not perform operations (only update the lockfile)."),
+    ]
+
+    loggers = ["poetry.repositories.pypi_repository"]
+
+    def handle(self) -> int:
+        packages = self.argument("packages")
+
+        self.installer.use_executor(
+            self.poetry.config.get("experimental.new-installer", False)
+        )
+
+        if packages:
+            self.installer.whitelist({name: "*" for name in packages})
+
+        self.installer.only_groups(self.activated_groups)
+        self.installer.dry_run(self.option("dry-run"))
+        self.installer.execute_operations(not self.option("lock"))
+
+        # Force update
+        self.installer.update(True)
+
+        return self.installer.run()
diff --git a/vendor/poetry/src/poetry/console/commands/version.py b/vendor/poetry/src/poetry/console/commands/version.py
new file mode 100644
index 00000000..fbccd8a8
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/commands/version.py
@@ -0,0 +1,123 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+from cleo.helpers import argument
+from cleo.helpers import option
+from tomlkit.toml_document import TOMLDocument
+
+from poetry.console.commands.command import Command
+
+
+if TYPE_CHECKING:
+    from poetry.core.semver.version import Version
+
+
+class VersionCommand(Command):
+    name = "version"
+    description = (
+        "Shows the version of the project or bumps it when a valid "
+        "bump rule is provided."
+    )
+
+    arguments = [
+        argument(
+            "version",
+            "The version number or the rule to update the version.",
+            optional=True,
+        )
+    ]
+    options = [
+        option("short", "s", "Output the version number only"),
+        option(
+            "dry-run",
+            None,
+            "Do not update pyproject.toml file",
+        ),
+    ]
+
+    help = """\
+The version command shows the current version of the project or bumps the version of
+the project and writes the new version back to pyproject.toml if a valid
+bump rule is provided.
+
+The new version should ideally be a valid semver string or a valid bump rule:
+patch, minor, major, prepatch, preminor, premajor, prerelease.
+"""
+
+    RESERVED = {
+        "major",
+        "minor",
+        "patch",
+        "premajor",
+        "preminor",
+        "prepatch",
+        "prerelease",
+    }
+
+    def handle(self) -> int:
+        version = self.argument("version")
+
+        if version:
+            version = self.increment_version(
+                self.poetry.package.pretty_version, version
+            )
+
+            if self.option("short"):
+                self.line(version.to_string())
+            else:
+                self.line(
+                    f"Bumping version from {self.poetry.package.pretty_version}"
+                    f" to {version}"
+                )
+
+            if not self.option("dry-run"):
+                content: dict[str, Any] = self.poetry.file.read()
+                poetry_content = content["tool"]["poetry"]
+                poetry_content["version"] = version.text
+
+                assert isinstance(content, TOMLDocument)
+                self.poetry.file.write(content)
+        else:
+            if self.option("short"):
+                self.line(self.poetry.package.pretty_version)
+            else:
+                self.line(
+                    f"{self.poetry.package.name}"
+                    f" {self.poetry.package.pretty_version}"
+                )
+
+        return 0
+
+    def increment_version(self, version: str, rule: str) -> Version:
+        from poetry.core.semver.version import Version
+
+        try:
+            parsed = Version.parse(version)
+        except ValueError:
+            raise ValueError("The project's version doesn't seem to follow semver")
+
+        if rule in {"major", "premajor"}:
+            new = parsed.next_major()
+            if rule == "premajor":
+                new = new.first_prerelease()
+        elif rule in {"minor", "preminor"}:
+            new = parsed.next_minor()
+            if rule == "preminor":
+                new = new.first_prerelease()
+        elif rule in {"patch", "prepatch"}:
+            new = parsed.next_patch()
+            if rule == "prepatch":
+                new = new.first_prerelease()
+        elif rule == "prerelease":
+            if parsed.is_unstable():
+                pre = parsed.pre
+                assert pre is not None
+                new = Version(parsed.epoch, parsed.release, pre.next())
+            else:
+                new = parsed.next_patch().first_prerelease()
+        else:
+            new = Version.parse(rule)
+
+        return new
diff --git a/vendor/poetry/src/poetry/console/events/__init__.py b/vendor/poetry/src/poetry/console/events/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/events/console_events.py b/vendor/poetry/src/poetry/console/events/console_events.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/exceptions.py b/vendor/poetry/src/poetry/console/exceptions.py
new file mode 100644
index 00000000..09fa60ad
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/exceptions.py
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from cleo.exceptions import CleoSimpleException
+
+
+class PoetrySimpleConsoleException(CleoSimpleException):  # type: ignore[misc]
+    pass
diff --git a/vendor/poetry/src/poetry/console/io/__init__.py b/vendor/poetry/src/poetry/console/io/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/io/inputs/__init__.py b/vendor/poetry/src/poetry/console/io/inputs/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/io/inputs/run_argv_input.py b/vendor/poetry/src/poetry/console/io/inputs/run_argv_input.py
new file mode 100644
index 00000000..b27f19ca
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/io/inputs/run_argv_input.py
@@ -0,0 +1,86 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from cleo.io.inputs.argv_input import ArgvInput
+
+
+if TYPE_CHECKING:
+    from cleo.io.inputs.definition import Definition
+
+
+class RunArgvInput(ArgvInput):  # type: ignore[misc]
+    def __init__(
+        self,
+        argv: list[str] | None = None,
+        definition: Definition | None = None,
+    ) -> None:
+        super().__init__(argv, definition=definition)
+
+        self._parameter_options: list[str] = []
+
+    @property
+    def first_argument(self) -> str | None:
+        return "run"
+
+    def add_parameter_option(self, name: str) -> None:
+        self._parameter_options.append(name)
+
+    def has_parameter_option(
+        self, values: str | list[str], only_params: bool = False
+    ) -> bool:
+        if not isinstance(values, list):
+            values = [values]
+
+        for token in self._tokens:
+            if only_params and token == "--":
+                return False
+
+            for value in values:
+                if value not in self._parameter_options:
+                    continue
+
+                # Options with values:
+                # For long options, test for '--option=' at beginning
+                # For short options, test for '-o' at beginning
+                if value.find("--") == 0:
+                    leading = value + "="
+                else:
+                    leading = value
+
+                if token == value or leading != "" and token.find(leading) == 0:
+                    return True
+
+        return False
+
+    def _parse(self) -> None:
+        parse_options = True
+        self._parsed = self._tokens[:]
+
+        try:
+            token = self._parsed.pop(0)
+        except IndexError:
+            token = None
+
+        while token is not None:
+            if parse_options and token == "":
+                self._parse_argument(token)
+            elif parse_options and token == "--":
+                parse_options = False
+            elif parse_options and token.find("--") == 0:
+                if token in self._parameter_options:
+                    self._parse_long_option(token)
+                else:
+                    self._parse_argument(token)
+            elif parse_options and token[0] == "-" and token != "-":
+                if token in self._parameter_options:
+                    self._parse_short_option(token)
+                else:
+                    self._parse_argument(token)
+            else:
+                self._parse_argument(token)
+
+            try:
+                token = self._parsed.pop(0)
+            except IndexError:
+                token = None
diff --git a/vendor/poetry/src/poetry/console/logging/__init__.py b/vendor/poetry/src/poetry/console/logging/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/console/logging/filters.py b/vendor/poetry/src/poetry/console/logging/filters.py
new file mode 100644
index 00000000..3ae42b5e
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/logging/filters.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+import logging
+
+
+POETRY_FILTER = logging.Filter(name="poetry")
diff --git a/vendor/poetry/src/poetry/console/logging/formatters/__init__.py b/vendor/poetry/src/poetry/console/logging/formatters/__init__.py
new file mode 100644
index 00000000..b26ae50c
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/logging/formatters/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from poetry.console.logging.formatters.builder_formatter import BuilderLogFormatter
+
+
+FORMATTERS = {
+    "poetry.core.masonry.builders.builder": BuilderLogFormatter(),
+    "poetry.core.masonry.builders.sdist": BuilderLogFormatter(),
+    "poetry.core.masonry.builders.wheel": BuilderLogFormatter(),
+}
diff --git a/vendor/poetry/poetry/console/logging/formatters/builder_formatter.py b/vendor/poetry/src/poetry/console/logging/formatters/builder_formatter.py
similarity index 82%
rename from vendor/poetry/poetry/console/logging/formatters/builder_formatter.py
rename to vendor/poetry/src/poetry/console/logging/formatters/builder_formatter.py
index 56bed9b6..541bb2fa 100644
--- a/vendor/poetry/poetry/console/logging/formatters/builder_formatter.py
+++ b/vendor/poetry/src/poetry/console/logging/formatters/builder_formatter.py
@@ -1,10 +1,12 @@
+from __future__ import annotations
+
 import re
 
-from .formatter import Formatter
+from poetry.console.logging.formatters.formatter import Formatter
 
 
 class BuilderLogFormatter(Formatter):
-    def format(self, msg):  # type: (str) -> str
+    def format(self, msg: str) -> str:
         if msg.startswith("Building "):
             msg = re.sub("Building (.+)", "  - Building \\1", msg)
         elif msg.startswith("Built "):
diff --git a/vendor/poetry/src/poetry/console/logging/formatters/formatter.py b/vendor/poetry/src/poetry/console/logging/formatters/formatter.py
new file mode 100644
index 00000000..8b595137
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/logging/formatters/formatter.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+
+class Formatter:
+    def format(self, record: str) -> str:
+        raise NotImplementedError()
diff --git a/vendor/poetry/src/poetry/console/logging/io_formatter.py b/vendor/poetry/src/poetry/console/logging/io_formatter.py
new file mode 100644
index 00000000..274e29e2
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/logging/io_formatter.py
@@ -0,0 +1,39 @@
+from __future__ import annotations
+
+import logging
+
+from typing import TYPE_CHECKING
+
+from poetry.console.logging.filters import POETRY_FILTER
+from poetry.console.logging.formatters import FORMATTERS
+
+
+if TYPE_CHECKING:
+    from logging import LogRecord
+
+
+class IOFormatter(logging.Formatter):
+    _colors = {
+        "error": "fg=red",
+        "warning": "fg=yellow",
+        "debug": "debug",
+        "info": "fg=blue",
+    }
+
+    def format(self, record: LogRecord) -> str:
+        if not record.exc_info:
+            level = record.levelname.lower()
+            msg = record.msg
+
+            if record.name in FORMATTERS:
+                msg = FORMATTERS[record.name].format(msg)
+            elif level in self._colors:
+                msg = f"<{self._colors[level]}>{msg}"
+
+            record.msg = msg
+
+        if not POETRY_FILTER.filter(record):
+            # prefix third-party packages with name for easier debugging
+            record.msg = f"[{record.name}] {record.msg}"
+
+        return super().format(record)
diff --git a/vendor/poetry/src/poetry/console/logging/io_handler.py b/vendor/poetry/src/poetry/console/logging/io_handler.py
new file mode 100644
index 00000000..213a87a3
--- /dev/null
+++ b/vendor/poetry/src/poetry/console/logging/io_handler.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+import logging
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from logging import LogRecord
+
+    from cleo.io.io import IO
+
+
+class IOHandler(logging.Handler):
+    def __init__(self, io: IO) -> None:
+        self._io = io
+
+        super().__init__()
+
+    def emit(self, record: LogRecord) -> None:
+        try:
+            msg = self.format(record)
+            level = record.levelname.lower()
+            err = level in ("warning", "error", "exception", "critical")
+            if err:
+                self._io.write_error_line(msg)
+            else:
+                self._io.write_line(msg)
+        except Exception:
+            self.handleError(record)
diff --git a/vendor/poetry/src/poetry/exceptions.py b/vendor/poetry/src/poetry/exceptions.py
new file mode 100644
index 00000000..0d755667
--- /dev/null
+++ b/vendor/poetry/src/poetry/exceptions.py
@@ -0,0 +1,9 @@
+from __future__ import annotations
+
+
+class PoetryException(Exception):
+    pass
+
+
+class InvalidProjectFile(PoetryException):
+    pass
diff --git a/vendor/poetry/src/poetry/factory.py b/vendor/poetry/src/poetry/factory.py
new file mode 100644
index 00000000..dfa0a4d6
--- /dev/null
+++ b/vendor/poetry/src/poetry/factory.py
@@ -0,0 +1,315 @@
+from __future__ import annotations
+
+import contextlib
+import logging
+import re
+import warnings
+
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import cast
+
+from cleo.io.null_io import NullIO
+from poetry.core.factory import Factory as BaseFactory
+from poetry.core.packages.dependency_group import MAIN_GROUP
+from poetry.core.packages.project_package import ProjectPackage
+from poetry.core.toml.file import TOMLFile
+
+from poetry.config.config import Config
+from poetry.json import validate_object
+from poetry.packages.locker import Locker
+from poetry.plugins.plugin import Plugin
+from poetry.plugins.plugin_manager import PluginManager
+from poetry.poetry import Poetry
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from cleo.io.io import IO
+    from poetry.core.packages.package import Package
+    from tomlkit.toml_document import TOMLDocument
+
+    from poetry.repositories.legacy_repository import LegacyRepository
+    from poetry.utils.dependency_specification import DependencySpec
+
+
+logger = logging.getLogger(__name__)
+
+
+class Factory(BaseFactory):
+    """
+    Factory class to create various elements needed by Poetry.
+    """
+
+    def create_poetry(
+        self,
+        cwd: Path | None = None,
+        with_groups: bool = True,
+        io: IO | None = None,
+        disable_plugins: bool = False,
+        disable_cache: bool = False,
+    ) -> Poetry:
+        if io is None:
+            io = NullIO()
+
+        base_poetry = super().create_poetry(cwd=cwd, with_groups=with_groups)
+
+        locker = Locker(
+            base_poetry.file.parent / "poetry.lock", base_poetry.local_config
+        )
+
+        # Loading global configuration
+        with warnings.catch_warnings():
+            # this is preserved to ensure export plugin tests pass in ci,
+            # once poetry-plugin-export version is updated to use one that do not
+            # use Factory.create_config(), this can be safely removed.
+            warnings.filterwarnings("ignore", category=DeprecationWarning)
+            config = self.create_config()
+
+        # Loading local configuration
+        local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml")
+        if local_config_file.exists():
+            if io.is_debug():
+                io.write_line(f"Loading configuration file {local_config_file.path}")
+
+            config.merge(local_config_file.read())
+
+        # Load local sources
+        repositories = {}
+        existing_repositories = config.get("repositories", {})
+        for source in base_poetry.pyproject.poetry_config.get("source", []):
+            name = source.get("name")
+            url = source.get("url")
+            if name and url and name not in existing_repositories:
+                repositories[name] = {"url": url}
+
+        config.merge({"repositories": repositories})
+
+        poetry = Poetry(
+            base_poetry.file.path,
+            base_poetry.local_config,
+            base_poetry.package,
+            locker,
+            config,
+        )
+
+        # Configuring sources
+        self.configure_sources(
+            poetry,
+            poetry.local_config.get("source", []),
+            config,
+            io,
+            disable_cache=disable_cache,
+        )
+
+        plugin_manager = PluginManager(Plugin.group, disable_plugins=disable_plugins)
+        plugin_manager.load_plugins()
+        poetry.set_plugin_manager(plugin_manager)
+        plugin_manager.activate(poetry, io)
+
+        return poetry
+
+    @classmethod
+    def get_package(cls, name: str, version: str) -> ProjectPackage:
+        return ProjectPackage(name, version, version)
+
+    @classmethod
+    def create_config(cls, io: IO | None = None) -> Config:
+        if io is not None:
+            logger.debug("Ignoring provided io when creating config.")
+        warnings.warn(
+            "Use of Factory.create_config() is deprecated, use Config.create() instead",
+            DeprecationWarning,
+        )
+        return Config.create()
+
+    @classmethod
+    def configure_sources(
+        cls,
+        poetry: Poetry,
+        sources: list[dict[str, str]],
+        config: Config,
+        io: IO,
+        disable_cache: bool = False,
+    ) -> None:
+        if disable_cache:
+            logger.debug("Disabling source caches")
+
+        for source in sources:
+            repository = cls.create_package_source(
+                source, config, disable_cache=disable_cache
+            )
+            is_default = bool(source.get("default", False))
+            is_secondary = bool(source.get("secondary", False))
+            if io.is_debug():
+                message = f"Adding repository {repository.name} ({repository.url})"
+                if is_default:
+                    message += " and setting it as the default one"
+                elif is_secondary:
+                    message += " and setting it as secondary"
+
+                io.write_line(message)
+
+            poetry.pool.add_repository(repository, is_default, secondary=is_secondary)
+
+        # Put PyPI last to prefer private repositories
+        # unless we have no default source AND no primary sources
+        # (default = false, secondary = false)
+        if poetry.pool.has_default():
+            if io.is_debug():
+                io.write_line("Deactivating the PyPI repository")
+        else:
+            from poetry.repositories.pypi_repository import PyPiRepository
+
+            default = not poetry.pool.has_primary_repositories()
+            poetry.pool.add_repository(
+                PyPiRepository(disable_cache=disable_cache), default, not default
+            )
+
+    @classmethod
+    def create_package_source(
+        cls, source: dict[str, str], auth_config: Config, disable_cache: bool = False
+    ) -> LegacyRepository:
+        from poetry.repositories.legacy_repository import LegacyRepository
+        from poetry.repositories.single_page_repository import SinglePageRepository
+
+        if "url" not in source:
+            raise RuntimeError("Unsupported source specified")
+
+        # PyPI-like repository
+        if "name" not in source:
+            raise RuntimeError("Missing [name] in source.")
+        name = source["name"]
+        url = source["url"]
+
+        repository_class = LegacyRepository
+
+        if re.match(r".*\.(htm|html)$", url):
+            repository_class = SinglePageRepository
+
+        return repository_class(
+            name,
+            url,
+            config=auth_config,
+            disable_cache=disable_cache,
+        )
+
+    @classmethod
+    def create_pyproject_from_package(
+        cls, package: Package, path: Path | None = None
+    ) -> TOMLDocument:
+        import tomlkit
+
+        from poetry.utils.dependency_specification import dependency_to_specification
+
+        pyproject: dict[str, Any] = tomlkit.document()
+
+        pyproject["tool"] = tomlkit.table(is_super_table=True)
+
+        content: dict[str, Any] = tomlkit.table()
+        pyproject["tool"]["poetry"] = content
+
+        content["name"] = package.name
+        content["version"] = package.version.text
+        content["description"] = package.description
+        content["authors"] = package.authors
+        content["license"] = package.license.id if package.license else ""
+
+        if package.classifiers:
+            content["classifiers"] = package.classifiers
+
+        for key, attr in {
+            ("documentation", "documentation_url"),
+            ("repository", "repository_url"),
+            ("homepage", "homepage"),
+            ("maintainers", "maintainers"),
+            ("keywords", "keywords"),
+        }:
+            value = getattr(package, attr, None)
+            if value:
+                content[key] = value
+
+        readmes = []
+
+        for readme in package.readmes:
+            readme_posix_path = readme.as_posix()
+
+            with contextlib.suppress(ValueError):
+                if package.root_dir:
+                    readme_posix_path = readme.relative_to(package.root_dir).as_posix()
+
+            readmes.append(readme_posix_path)
+
+        if readmes:
+            content["readme"] = readmes
+
+        optional_dependencies = set()
+        extras_section = None
+
+        if package.extras:
+            extras_section = tomlkit.table()
+
+            for extra in package.extras:
+                _dependencies = []
+                for dependency in package.extras[extra]:
+                    _dependencies.append(dependency.name)
+                    optional_dependencies.add(dependency.name)
+
+                extras_section[extra] = _dependencies
+
+        optional_dependencies = set(optional_dependencies)
+        dependency_section = content["dependencies"] = tomlkit.table()
+        dependency_section["python"] = package.python_versions
+
+        for dep in package.all_requires:
+            constraint: DependencySpec | str = dependency_to_specification(
+                dep, tomlkit.inline_table()
+            )
+
+            if not isinstance(constraint, str):
+                if dep.name in optional_dependencies:
+                    constraint["optional"] = True
+
+                if len(constraint) == 1 and "version" in constraint:
+                    constraint = cast(str, constraint["version"])
+                elif not constraint:
+                    constraint = "*"
+
+            for group in dep.groups:
+                if group == MAIN_GROUP:
+                    dependency_section[dep.name] = constraint
+                else:
+                    if "group" not in content:
+                        content["group"] = tomlkit.table(is_super_table=True)
+
+                    if group not in content["group"]:
+                        content["group"][group] = tomlkit.table(is_super_table=True)
+
+                    if "dependencies" not in content["group"][group]:
+                        content["group"][group]["dependencies"] = tomlkit.table()
+
+                    content["group"][group]["dependencies"][dep.name] = constraint
+
+        if extras_section:
+            content["extras"] = extras_section
+
+        pyproject = cast("TOMLDocument", pyproject)
+        pyproject.add(tomlkit.nl())
+
+        if path:
+            path.joinpath("pyproject.toml").write_text(
+                pyproject.as_string(), encoding="utf-8"
+            )
+
+        return pyproject
+
+    @classmethod
+    def validate(
+        cls, config: dict[str, Any], strict: bool = False
+    ) -> dict[str, list[str]]:
+        results = super().validate(config, strict)
+
+        results["errors"].extend(validate_object(config))
+
+        return results
diff --git a/vendor/poetry/src/poetry/inspection/__init__.py b/vendor/poetry/src/poetry/inspection/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/inspection/info.py b/vendor/poetry/src/poetry/inspection/info.py
new file mode 100644
index 00000000..0d3e760b
--- /dev/null
+++ b/vendor/poetry/src/poetry/inspection/info.py
@@ -0,0 +1,631 @@
+from __future__ import annotations
+
+import contextlib
+import functools
+import glob
+import logging
+import os
+import tarfile
+import zipfile
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+import pkginfo
+
+from poetry.core.factory import Factory
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.package import Package
+from poetry.core.pyproject.toml import PyProjectTOML
+from poetry.core.utils.helpers import parse_requires
+from poetry.core.utils.helpers import temporary_directory
+from poetry.core.version.markers import InvalidMarker
+
+from poetry.utils.env import EnvCommandError
+from poetry.utils.env import ephemeral_environment
+from poetry.utils.setup_reader import SetupReader
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from collections.abc import Iterator
+    from contextlib import AbstractContextManager
+
+    from poetry.core.packages.project_package import ProjectPackage
+
+
+logger = logging.getLogger(__name__)
+
+PEP517_META_BUILD = """\
+import build
+import build.env
+import pep517
+
+source = '{source}'
+dest = '{dest}'
+
+with build.env.IsolatedEnvBuilder() as env:
+    builder = build.ProjectBuilder(
+        srcdir=source,
+        scripts_dir=env.scripts_dir,
+        python_executable=env.executable,
+        runner=pep517.quiet_subprocess_runner,
+    )
+    env.install(builder.build_system_requires)
+    env.install(builder.get_requires_for_build('wheel'))
+    builder.metadata_path(dest)
+"""
+
+PEP517_META_BUILD_DEPS = ["build===0.7.0", "pep517==0.12.0"]
+
+
+class PackageInfoError(ValueError):
+    def __init__(self, path: Path | str, *reasons: BaseException | str) -> None:
+        reasons = (f"Unable to determine package info for path: {path!s}",) + reasons
+        super().__init__("\n\n".join(str(msg).strip() for msg in reasons if msg))
+
+
+class PackageInfo:
+    def __init__(
+        self,
+        *,
+        name: str | None = None,
+        version: str | None = None,
+        summary: str | None = None,
+        platform: str | None = None,
+        requires_dist: list[str] | None = None,
+        requires_python: str | None = None,
+        files: list[dict[str, str]] | None = None,
+        yanked: str | bool = False,
+        cache_version: str | None = None,
+    ) -> None:
+        self.name = name
+        self.version = version
+        self.summary = summary
+        self.platform = platform
+        self.requires_dist = requires_dist
+        self.requires_python = requires_python
+        self.files = files or []
+        self.yanked = yanked
+        self._cache_version = cache_version
+        self._source_type: str | None = None
+        self._source_url: str | None = None
+        self._source_reference: str | None = None
+
+    @property
+    def cache_version(self) -> str | None:
+        return self._cache_version
+
+    def update(self, other: PackageInfo) -> PackageInfo:
+        self.name = other.name or self.name
+        self.version = other.version or self.version
+        self.summary = other.summary or self.summary
+        self.platform = other.platform or self.platform
+        self.requires_dist = other.requires_dist or self.requires_dist
+        self.requires_python = other.requires_python or self.requires_python
+        self.files = other.files or self.files
+        self._cache_version = other.cache_version or self._cache_version
+        return self
+
+    def asdict(self) -> dict[str, Any]:
+        """
+        Helper method to convert package info into a dictionary used for caching.
+        """
+        return {
+            "name": self.name,
+            "version": self.version,
+            "summary": self.summary,
+            "platform": self.platform,
+            "requires_dist": self.requires_dist,
+            "requires_python": self.requires_python,
+            "files": self.files,
+            "yanked": self.yanked,
+            "_cache_version": self._cache_version,
+        }
+
+    @classmethod
+    def load(cls, data: dict[str, Any]) -> PackageInfo:
+        """
+        Helper method to load data from a dictionary produced by `PackageInfo.asdict()`.
+
+        :param data: Data to load. This is expected to be a `dict` object output by
+            `asdict()`.
+        """
+        cache_version = data.pop("_cache_version", None)
+        return cls(cache_version=cache_version, **data)
+
+    def to_package(
+        self,
+        name: str | None = None,
+        extras: list[str] | None = None,
+        root_dir: Path | None = None,
+    ) -> Package:
+        """
+        Create a new `poetry.core.packages.package.Package` instance using metadata from
+        this instance.
+
+        :param name: Name to use for the package, if not specified name from this
+            instance is used.
+        :param extras: Extras to activate for this package.
+        :param root_dir:  Optional root directory to use for the package. If set,
+            dependency strings will be parsed relative to this directory.
+        """
+        name = name or self.name
+
+        if not name:
+            raise RuntimeError("Unable to create package with no name")
+
+        if not self.version:
+            # The version could not be determined, so we raise an error since it is
+            # mandatory.
+            raise RuntimeError(f"Unable to retrieve the package version for {name}")
+
+        package = Package(
+            name=name,
+            version=self.version,
+            source_type=self._source_type,
+            source_url=self._source_url,
+            source_reference=self._source_reference,
+            yanked=self.yanked,
+        )
+        if self.summary is not None:
+            package.description = self.summary
+        package.root_dir = root_dir
+        package.python_versions = self.requires_python or "*"
+        package.files = self.files
+
+        # If this is a local poetry project, we can extract "richer" requirement
+        # information, eg: development requirements etc.
+        if root_dir is not None:
+            path = root_dir
+        elif self._source_type == "directory" and self._source_url is not None:
+            path = Path(self._source_url)
+        else:
+            path = None
+
+        if path is not None:
+            poetry_package = self._get_poetry_package(path=path)
+            if poetry_package:
+                package.extras = poetry_package.extras
+                for dependency in poetry_package.requires:
+                    package.add_dependency(dependency)
+
+                return package
+
+        seen_requirements = set()
+
+        for req in self.requires_dist or []:
+            try:
+                # Attempt to parse the PEP-508 requirement string
+                dependency = Dependency.create_from_pep_508(req, relative_to=root_dir)
+            except InvalidMarker:
+                # Invalid marker, We strip the markers hoping for the best
+                req = req.split(";")[0]
+                dependency = Dependency.create_from_pep_508(req, relative_to=root_dir)
+            except ValueError:
+                # Likely unable to parse constraint so we skip it
+                logger.debug(
+                    f"Invalid constraint ({req}) found in"
+                    f" {package.name}-{package.version} dependencies, skipping",
+                )
+                continue
+
+            if dependency.in_extras:
+                # this dependency is required by an extra package
+                for extra in dependency.in_extras:
+                    if extra not in package.extras:
+                        # this is the first time we encounter this extra for this
+                        # package
+                        package.extras[extra] = []
+
+                    package.extras[extra].append(dependency)
+
+            req = dependency.to_pep_508(with_extras=True)
+
+            if req not in seen_requirements:
+                package.add_dependency(dependency)
+                seen_requirements.add(req)
+
+        return package
+
+    @classmethod
+    def _from_distribution(
+        cls, dist: pkginfo.BDist | pkginfo.SDist | pkginfo.Wheel
+    ) -> PackageInfo:
+        """
+        Helper method to parse package information from a `pkginfo.Distribution`
+        instance.
+
+        :param dist: The distribution instance to parse information from.
+        """
+        requirements = None
+
+        if dist.requires_dist:
+            requirements = list(dist.requires_dist)
+        else:
+            requires = Path(dist.filename) / "requires.txt"
+            if requires.exists():
+                with requires.open(encoding="utf-8") as f:
+                    requirements = parse_requires(f.read())
+
+        info = cls(
+            name=dist.name,
+            version=dist.version,
+            summary=dist.summary,
+            platform=dist.supported_platforms,
+            requires_dist=requirements,
+            requires_python=dist.requires_python,
+        )
+
+        info._source_type = "file"
+        info._source_url = Path(dist.filename).resolve().as_posix()
+
+        return info
+
+    @classmethod
+    def _from_sdist_file(cls, path: Path) -> PackageInfo:
+        """
+        Helper method to parse package information from an sdist file. We attempt to
+        first inspect the file using `pkginfo.SDist`. If this does not provide us with
+        package requirements, we extract the source and handle it as a directory.
+
+        :param path: The sdist file to parse information from.
+        """
+        info = None
+
+        try:
+            info = cls._from_distribution(pkginfo.SDist(str(path)))
+        except ValueError:
+            # Unable to determine dependencies
+            # We pass and go deeper
+            pass
+        else:
+            if info.requires_dist is not None:
+                # we successfully retrieved dependencies from sdist metadata
+                return info
+
+        # Still not dependencies found
+        # So, we unpack and introspect
+        suffix = path.suffix
+
+        context: Callable[
+            [str], AbstractContextManager[zipfile.ZipFile | tarfile.TarFile]
+        ]
+        if suffix == ".zip":
+            context = zipfile.ZipFile
+        else:
+            if suffix == ".bz2":
+                suffixes = path.suffixes
+                if len(suffixes) > 1 and suffixes[-2] == ".tar":
+                    suffix = ".tar.bz2"
+            else:
+                suffix = ".tar.gz"
+
+            context = tarfile.open
+
+        with temporary_directory() as tmp_str:
+            tmp = Path(tmp_str)
+            with context(path.as_posix()) as archive:
+                archive.extractall(tmp.as_posix())
+
+            # a little bit of guess work to determine the directory we care about
+            elements = list(tmp.glob("*"))
+
+            if len(elements) == 1 and elements[0].is_dir():
+                sdist_dir = elements[0]
+            else:
+                sdist_dir = tmp / path.name.rstrip(suffix)
+                if not sdist_dir.is_dir():
+                    sdist_dir = tmp
+
+            # now this is an unpacked directory we know how to deal with
+            new_info = cls.from_directory(path=sdist_dir)
+
+        if not info:
+            return new_info
+
+        return info.update(new_info)
+
+    @staticmethod
+    def has_setup_files(path: Path) -> bool:
+        return any((path / f).exists() for f in SetupReader.FILES)
+
+    @classmethod
+    def from_setup_files(cls, path: Path) -> PackageInfo:
+        """
+        Mechanism to parse package information from a `setup.[py|cfg]` file. This uses
+        the implementation at `poetry.utils.setup_reader.SetupReader` in order to parse
+        the file. This is not reliable for complex setup files and should only attempted
+        as a fallback.
+
+        :param path: Path to `setup.py` file
+        """
+        if not cls.has_setup_files(path):
+            raise PackageInfoError(
+                path, "No setup files (setup.py, setup.cfg) was found."
+            )
+
+        try:
+            result = SetupReader.read_from_directory(path)
+        except Exception as e:
+            raise PackageInfoError(path, e)
+
+        python_requires = result["python_requires"]
+        if python_requires is None:
+            python_requires = "*"
+
+        requires = "".join(dep + "\n" for dep in result["install_requires"])
+        if result["extras_require"]:
+            requires += "\n"
+
+        for extra_name, deps in result["extras_require"].items():
+            requires += f"[{extra_name}]\n"
+
+            for dep in deps:
+                requires += dep + "\n"
+
+            requires += "\n"
+
+        requirements = parse_requires(requires)
+
+        info = cls(
+            name=result.get("name"),
+            version=result.get("version"),
+            summary=result.get("description", ""),
+            requires_dist=requirements or None,
+            requires_python=python_requires,
+        )
+
+        if not (info.name and info.version) and not info.requires_dist:
+            # there is nothing useful here
+            raise PackageInfoError(
+                path,
+                "No core metadata (name, version, requires-dist) could be retrieved.",
+            )
+
+        return info
+
+    @staticmethod
+    def _find_dist_info(path: Path) -> Iterator[Path]:
+        """
+        Discover all `*.*-info` directories in a given path.
+
+        :param path: Path to search.
+        """
+        pattern = "**/*.*-info"
+        # Sometimes pathlib will fail on recursive symbolic links, so we need to work
+        # around it and use the glob module instead. Note that this does not happen with
+        # pathlib2 so it's safe to use it for Python < 3.4.
+        directories = glob.iglob(path.joinpath(pattern).as_posix(), recursive=True)
+
+        for d in directories:
+            yield Path(d)
+
+    @classmethod
+    def from_metadata(cls, path: Path) -> PackageInfo | None:
+        """
+        Helper method to parse package information from an unpacked metadata directory.
+
+        :param path: The metadata directory to parse information from.
+        """
+        if path.suffix in {".dist-info", ".egg-info"}:
+            directories = [path]
+        else:
+            directories = list(cls._find_dist_info(path=path))
+
+        for directory in directories:
+            try:
+                if directory.suffix == ".egg-info":
+                    dist = pkginfo.UnpackedSDist(directory.as_posix())
+                elif directory.suffix == ".dist-info":
+                    dist = pkginfo.Wheel(directory.as_posix())
+                else:
+                    continue
+                break
+            except ValueError:
+                continue
+        else:
+            try:
+                # handle PKG-INFO in unpacked sdist root
+                dist = pkginfo.UnpackedSDist(path.as_posix())
+            except ValueError:
+                return None
+
+        return cls._from_distribution(dist=dist)
+
+    @classmethod
+    def from_package(cls, package: Package) -> PackageInfo:
+        """
+        Helper method to inspect a `Package` object, in order to generate package info.
+
+        :param package: This must be a poetry package instance.
+        """
+        requires = {dependency.to_pep_508() for dependency in package.requires}
+
+        for extra_requires in package.extras.values():
+            for dependency in extra_requires:
+                requires.add(dependency.to_pep_508())
+
+        return cls(
+            name=package.name,
+            version=str(package.version),
+            summary=package.description,
+            platform=package.platform,
+            requires_dist=list(requires),
+            requires_python=package.python_versions,
+            files=package.files,
+            yanked=package.yanked_reason if package.yanked else False,
+        )
+
+    @staticmethod
+    def _get_poetry_package(path: Path) -> ProjectPackage | None:
+        # Note: we ignore any setup.py file at this step
+        # TODO: add support for handling non-poetry PEP-517 builds
+        if PyProjectTOML(path.joinpath("pyproject.toml")).is_poetry_project():
+            with contextlib.suppress(RuntimeError):
+                return Factory().create_poetry(path).package
+
+        return None
+
+    @classmethod
+    def from_directory(cls, path: Path, disable_build: bool = False) -> PackageInfo:
+        """
+        Generate package information from a package source directory. If `disable_build`
+        is not `True` and introspection of all available metadata fails, the package is
+        attempted to be built in an isolated environment so as to generate required
+        metadata.
+
+        :param path: Path to generate package information from.
+        :param disable_build: If not `True` and setup reader fails, PEP 517 isolated
+            build is attempted in order to gather metadata.
+        """
+        project_package = cls._get_poetry_package(path)
+        info: PackageInfo | None
+        if project_package:
+            info = cls.from_package(project_package)
+        else:
+            info = cls.from_metadata(path)
+
+            if not info or info.requires_dist is None:
+                try:
+                    if disable_build:
+                        info = cls.from_setup_files(path)
+                    else:
+                        info = get_pep517_metadata(path)
+                except PackageInfoError:
+                    if not info:
+                        raise
+
+                    # we discovered PkgInfo but no requirements were listed
+
+        info._source_type = "directory"
+        info._source_url = path.as_posix()
+
+        return info
+
+    @classmethod
+    def from_sdist(cls, path: Path) -> PackageInfo:
+        """
+        Gather package information from an sdist file, packed or unpacked.
+
+        :param path: Path to an sdist file or unpacked directory.
+        """
+        if path.is_file():
+            return cls._from_sdist_file(path=path)
+
+        # if we get here then it is neither an sdist instance nor a file
+        # so, we assume this is an directory
+        return cls.from_directory(path=path)
+
+    @classmethod
+    def from_wheel(cls, path: Path) -> PackageInfo:
+        """
+        Gather package information from a wheel.
+
+        :param path: Path to wheel.
+        """
+        try:
+            return cls._from_distribution(pkginfo.Wheel(str(path)))
+        except ValueError:
+            return PackageInfo()
+
+    @classmethod
+    def from_bdist(cls, path: Path) -> PackageInfo:
+        """
+        Gather package information from a bdist (wheel etc.).
+
+        :param path: Path to bdist.
+        """
+        if isinstance(path, (pkginfo.BDist, pkginfo.Wheel)):
+            cls._from_distribution(dist=path)
+
+        if path.suffix == ".whl":
+            return cls.from_wheel(path=path)
+
+        try:
+            return cls._from_distribution(pkginfo.BDist(str(path)))
+        except ValueError as e:
+            raise PackageInfoError(path, e)
+
+    @classmethod
+    def from_path(cls, path: Path) -> PackageInfo:
+        """
+        Gather package information from a given path (bdist, sdist, directory).
+
+        :param path: Path to inspect.
+        """
+        try:
+            return cls.from_bdist(path=path)
+        except PackageInfoError:
+            return cls.from_sdist(path=path)
+
+
+@functools.lru_cache(maxsize=None)
+def get_pep517_metadata(path: Path) -> PackageInfo:
+    """
+    Helper method to use PEP-517 library to build and read package metadata.
+
+    :param path: Path to package source to build and read metadata for.
+    """
+    info = None
+
+    with contextlib.suppress(PackageInfoError):
+        info = PackageInfo.from_setup_files(path)
+        if all([info.version, info.name, info.requires_dist]):
+            return info
+
+    with ephemeral_environment(
+        flags={"no-pip": False, "no-setuptools": False, "no-wheel": False}
+    ) as venv:
+        # TODO: cache PEP 517 build environment corresponding to each project venv
+        dest_dir = venv.path.parent / "dist"
+        dest_dir.mkdir()
+
+        pep517_meta_build_script = PEP517_META_BUILD.format(
+            source=path.as_posix(), dest=dest_dir.as_posix()
+        )
+
+        try:
+            venv.run_pip(
+                "install",
+                "--disable-pip-version-check",
+                "--ignore-installed",
+                *PEP517_META_BUILD_DEPS,
+            )
+            venv.run(
+                "python",
+                "-",
+                input_=pep517_meta_build_script,
+            )
+            info = PackageInfo.from_metadata(dest_dir)
+        except EnvCommandError as e:
+            # something went wrong while attempting pep517 metadata build
+            # fallback to egg_info if setup.py available
+            logger.debug("PEP517 build failed: %s", e)
+            setup_py = path / "setup.py"
+            if not setup_py.exists():
+                raise PackageInfoError(
+                    path,
+                    e,
+                    "No fallback setup.py file was found to generate egg_info.",
+                )
+
+            cwd = Path.cwd()
+            os.chdir(path.as_posix())
+            try:
+                venv.run("python", "setup.py", "egg_info")
+                info = PackageInfo.from_metadata(path)
+            except EnvCommandError as fbe:
+                raise PackageInfoError(
+                    path, "Fallback egg_info generation failed.", fbe
+                )
+            finally:
+                os.chdir(cwd.as_posix())
+
+    if info:
+        logger.debug("Falling back to parsed setup.py file for %s", path)
+        return info
+
+    # if we reach here, everything has failed and all hope is lost
+    raise PackageInfoError(path, "Exhausted all core metadata sources.")
diff --git a/vendor/poetry/src/poetry/installation/__init__.py b/vendor/poetry/src/poetry/installation/__init__.py
new file mode 100644
index 00000000..42ff15e3
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from poetry.installation.installer import Installer
+
+
+__all__ = ["Installer"]
diff --git a/vendor/poetry/src/poetry/installation/base_installer.py b/vendor/poetry/src/poetry/installation/base_installer.py
new file mode 100644
index 00000000..8e1d3197
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/base_installer.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+
+class BaseInstaller:
+    def install(self, package: Package) -> None:
+        raise NotImplementedError
+
+    def update(self, source: Package, target: Package) -> None:
+        raise NotImplementedError
+
+    def remove(self, package: Package) -> None:
+        raise NotImplementedError
diff --git a/vendor/poetry/src/poetry/installation/chef.py b/vendor/poetry/src/poetry/installation/chef.py
new file mode 100644
index 00000000..fa3eb267
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/chef.py
@@ -0,0 +1,88 @@
+from __future__ import annotations
+
+import hashlib
+import json
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from poetry.installation.chooser import InvalidWheelName
+from poetry.installation.chooser import Wheel
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.utils.link import Link
+
+    from poetry.config.config import Config
+    from poetry.utils.env import Env
+
+
+class Chef:
+    def __init__(self, config: Config, env: Env) -> None:
+        self._env = env
+        self._cache_dir = (
+            Path(config.get("cache-dir")).expanduser().joinpath("artifacts")
+        )
+
+    def get_cached_archive_for_link(self, link: Link) -> Path | None:
+        archives = self.get_cached_archives_for_link(link)
+        if not archives:
+            return None
+
+        candidates: list[tuple[float | None, Path]] = []
+        for archive in archives:
+            if archive.suffix != ".whl":
+                candidates.append((float("inf"), archive))
+                continue
+
+            try:
+                wheel = Wheel(archive.name)
+            except InvalidWheelName:
+                continue
+
+            if not wheel.is_supported_by_environment(self._env):
+                continue
+
+            candidates.append(
+                (wheel.get_minimum_supported_index(self._env.supported_tags), archive),
+            )
+
+        if not candidates:
+            return None
+
+        return min(candidates)[1]
+
+    def get_cached_archives_for_link(self, link: Link) -> list[Path]:
+        cache_dir = self.get_cache_directory_for_link(link)
+
+        archive_types = ["whl", "tar.gz", "tar.bz2", "bz2", "zip"]
+        paths = []
+        for archive_type in archive_types:
+            for archive in cache_dir.glob(f"*.{archive_type}"):
+                paths.append(Path(archive))
+
+        return paths
+
+    def get_cache_directory_for_link(self, link: Link) -> Path:
+        key_parts = {"url": link.url_without_fragment}
+
+        if link.hash_name is not None and link.hash is not None:
+            key_parts[link.hash_name] = link.hash
+
+        if link.subdirectory_fragment:
+            key_parts["subdirectory"] = link.subdirectory_fragment
+
+        key_parts["interpreter_name"] = self._env.marker_env["interpreter_name"]
+        key_parts["interpreter_version"] = "".join(
+            self._env.marker_env["interpreter_version"].split(".")[:2]
+        )
+
+        key = hashlib.sha256(
+            json.dumps(
+                key_parts, sort_keys=True, separators=(",", ":"), ensure_ascii=True
+            ).encode("ascii")
+        ).hexdigest()
+
+        split_key = [key[:2], key[2:4], key[4:6], key[6:]]
+
+        return self._cache_dir.joinpath(*split_key)
diff --git a/vendor/poetry/src/poetry/installation/chooser.py b/vendor/poetry/src/poetry/installation/chooser.py
new file mode 100644
index 00000000..53e746fd
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/chooser.py
@@ -0,0 +1,219 @@
+from __future__ import annotations
+
+import logging
+import re
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+from packaging.tags import Tag
+
+from poetry.config.config import Config
+from poetry.config.config import PackageFilterPolicy
+from poetry.utils.patterns import wheel_file_re
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+    from poetry.core.packages.utils.link import Link
+    from poetry.core.semver.version import Version
+
+    from poetry.repositories.pool import Pool
+    from poetry.utils.env import Env
+
+
+logger = logging.getLogger(__name__)
+
+
+class InvalidWheelName(Exception):
+    pass
+
+
+class Wheel:
+    def __init__(self, filename: str) -> None:
+        wheel_info = wheel_file_re.match(filename)
+        if not wheel_info:
+            raise InvalidWheelName(f"{filename} is not a valid wheel filename.")
+
+        self.filename = filename
+        self.name = wheel_info.group("name").replace("_", "-")
+        self.version = wheel_info.group("ver").replace("_", "-")
+        self.build_tag = wheel_info.group("build")
+        self.pyversions = wheel_info.group("pyver").split(".")
+        self.abis = wheel_info.group("abi").split(".")
+        self.plats = wheel_info.group("plat").split(".")
+
+        self.tags = {
+            Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
+        }
+
+    def get_minimum_supported_index(self, tags: list[Tag]) -> int | None:
+        indexes = [tags.index(t) for t in self.tags if t in tags]
+
+        return min(indexes) if indexes else None
+
+    def is_supported_by_environment(self, env: Env) -> bool:
+        return bool(set(env.supported_tags).intersection(self.tags))
+
+
+class Chooser:
+    """
+    A Chooser chooses an appropriate release archive for packages.
+    """
+
+    def __init__(self, pool: Pool, env: Env, config: Config | None = None) -> None:
+        self._pool = pool
+        self._env = env
+        self._config = config or Config.create()
+        self._no_binary_policy: PackageFilterPolicy = PackageFilterPolicy(
+            self._config.get("installer.no-binary", [])
+        )
+
+    def choose_for(self, package: Package) -> Link:
+        """
+        Return the url of the selected archive for a given package.
+        """
+        links = []
+        for link in self._get_links(package):
+            if link.is_wheel:
+                if not self._no_binary_policy.allows(package.name):
+                    logger.debug(
+                        "Skipping wheel for %s as requested in no binary policy for"
+                        " package (%s)",
+                        link.filename,
+                        package.name,
+                    )
+                    continue
+
+                if not Wheel(link.filename).is_supported_by_environment(self._env):
+                    logger.debug(
+                        "Skipping wheel %s as this is not supported by the current"
+                        " environment",
+                        link.filename,
+                    )
+                    continue
+
+            if link.ext in {".egg", ".exe", ".msi", ".rpm", ".srpm"}:
+                logger.debug("Skipping unsupported distribution %s", link.filename)
+                continue
+
+            links.append(link)
+
+        if not links:
+            raise RuntimeError(f"Unable to find installation candidates for {package}")
+
+        # Get the best link
+        chosen = max(links, key=lambda link: self._sort_key(package, link))
+
+        return chosen
+
+    def _get_links(self, package: Package) -> list[Link]:
+        if package.source_type:
+            assert package.source_reference is not None
+            repository = self._pool.repository(package.source_reference)
+
+        elif not self._pool.has_repository("pypi"):
+            repository = self._pool.repositories[0]
+        else:
+            repository = self._pool.repository("pypi")
+        links = repository.find_links_for_package(package)
+
+        hashes = [f["hash"] for f in package.files]
+        if not hashes:
+            return links
+
+        selected_links = []
+        for link in links:
+            if not link.hash:
+                selected_links.append(link)
+                continue
+
+            assert link.hash_name is not None
+            h = link.hash_name + ":" + link.hash
+            if h not in hashes:
+                logger.debug(
+                    "Skipping %s as %s checksum does not match expected value",
+                    link.filename,
+                    link.hash_name,
+                )
+                continue
+
+            selected_links.append(link)
+
+        if links and not selected_links:
+            raise RuntimeError(
+                f"Retrieved digest for link {link.filename}({h}) not in poetry.lock"
+                f" metadata {hashes}"
+            )
+
+        return selected_links
+
+    def _sort_key(
+        self, package: Package, link: Link
+    ) -> tuple[int, int, int, Version, tuple[Any, ...], int]:
+        """
+        Function to pass as the `key` argument to a call to sorted() to sort
+        InstallationCandidates by preference.
+        Returns a tuple such that tuples sorting as greater using Python's
+        default comparison operator are more preferred.
+        The preference is as follows:
+        First and foremost, candidates with allowed (matching) hashes are
+        always preferred over candidates without matching hashes. This is
+        because e.g. if the only candidate with an allowed hash is yanked,
+        we still want to use that candidate.
+        Second, excepting hash considerations, candidates that have been
+        yanked (in the sense of PEP 592) are always less preferred than
+        candidates that haven't been yanked. Then:
+        If not finding wheels, they are sorted by version only.
+        If finding wheels, then the sort order is by version, then:
+          1. existing installs
+          2. wheels ordered via Wheel.support_index_min(self._supported_tags)
+          3. source archives
+        If prefer_binary was set, then all wheels are sorted above sources.
+        Note: it was considered to embed this logic into the Link
+              comparison operators, but then different sdist links
+              with the same version, would have to be considered equal
+        """
+        build_tag: tuple[Any, ...] = ()
+        binary_preference = 0
+        if link.is_wheel:
+            wheel = Wheel(link.filename)
+            if not wheel.is_supported_by_environment(self._env):
+                raise RuntimeError(
+                    f"{wheel.filename} is not a supported wheel for this platform. It "
+                    "can't be sorted."
+                )
+
+            # TODO: Binary preference
+            pri = -(wheel.get_minimum_supported_index(self._env.supported_tags) or 0)
+            if wheel.build_tag is not None:
+                match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
+                if not match:
+                    raise ValueError(f"Unable to parse build tag: {wheel.build_tag}")
+                build_tag_groups = match.groups()
+                build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+        else:  # sdist
+            support_num = len(self._env.supported_tags)
+            pri = -support_num
+
+        has_allowed_hash = int(self._is_link_hash_allowed_for_package(link, package))
+
+        yank_value = int(not link.yanked)
+
+        return (
+            has_allowed_hash,
+            yank_value,
+            binary_preference,
+            package.version,
+            build_tag,
+            pri,
+        )
+
+    def _is_link_hash_allowed_for_package(self, link: Link, package: Package) -> bool:
+        if not link.hash:
+            return True
+
+        assert link.hash_name is not None
+        h = link.hash_name + ":" + link.hash
+
+        return h in {f["hash"] for f in package.files}
diff --git a/vendor/poetry/src/poetry/installation/executor.py b/vendor/poetry/src/poetry/installation/executor.py
new file mode 100644
index 00000000..14f31c55
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/executor.py
@@ -0,0 +1,825 @@
+from __future__ import annotations
+
+import csv
+import itertools
+import json
+import os
+import threading
+
+from concurrent.futures import ThreadPoolExecutor
+from concurrent.futures import wait
+from pathlib import Path
+from subprocess import CalledProcessError
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import cast
+
+from cleo.io.null_io import NullIO
+from poetry.core.packages.file_dependency import FileDependency
+from poetry.core.packages.utils.link import Link
+from poetry.core.pyproject.toml import PyProjectTOML
+
+from poetry.installation.chef import Chef
+from poetry.installation.chooser import Chooser
+from poetry.installation.operations import Install
+from poetry.installation.operations import Uninstall
+from poetry.installation.operations import Update
+from poetry.utils._compat import decode
+from poetry.utils.authenticator import Authenticator
+from poetry.utils.env import EnvCommandError
+from poetry.utils.helpers import pluralize
+from poetry.utils.helpers import remove_directory
+from poetry.utils.pip import pip_install
+
+
+if TYPE_CHECKING:
+    from cleo.io.io import IO
+    from cleo.io.outputs.section_output import SectionOutput
+    from poetry.core.masonry.builders.builder import Builder
+    from poetry.core.packages.package import Package
+
+    from poetry.config.config import Config
+    from poetry.installation.operations.operation import Operation
+    from poetry.repositories import Pool
+    from poetry.utils.env import Env
+
+
+class Executor:
+    def __init__(
+        self,
+        env: Env,
+        pool: Pool,
+        config: Config,
+        io: IO,
+        parallel: bool | None = None,
+    ) -> None:
+        self._env = env
+        self._io = io
+        self._dry_run = False
+        self._enabled = True
+        self._verbose = False
+        self._authenticator = Authenticator(config, self._io)
+        self._chef = Chef(config, self._env)
+        self._chooser = Chooser(pool, self._env, config)
+
+        if parallel is None:
+            parallel = config.get("installer.parallel", True)
+
+        if parallel:
+            self._max_workers = self._get_max_workers(
+                desired_max_workers=config.get("installer.max-workers")
+            )
+        else:
+            self._max_workers = 1
+
+        self._executor = ThreadPoolExecutor(max_workers=self._max_workers)
+        self._total_operations = 0
+        self._executed_operations = 0
+        self._executed = {"install": 0, "update": 0, "uninstall": 0}
+        self._skipped = {"install": 0, "update": 0, "uninstall": 0}
+        self._sections: dict[int, SectionOutput] = {}
+        self._yanked_warnings: list[str] = []
+        self._lock = threading.Lock()
+        self._shutdown = False
+        self._hashes: dict[str, str] = {}
+
+    @property
+    def installations_count(self) -> int:
+        return self._executed["install"]
+
+    @property
+    def updates_count(self) -> int:
+        return self._executed["update"]
+
+    @property
+    def removals_count(self) -> int:
+        return self._executed["uninstall"]
+
+    def supports_fancy_output(self) -> bool:
+        return self._io.output.is_decorated() and not self._dry_run
+
+    def disable(self) -> Executor:
+        self._enabled = False
+
+        return self
+
+    def dry_run(self, dry_run: bool = True) -> Executor:
+        self._dry_run = dry_run
+
+        return self
+
+    def verbose(self, verbose: bool = True) -> Executor:
+        self._verbose = verbose
+
+        return self
+
+    def pip_install(
+        self, req: Path, upgrade: bool = False, editable: bool = False
+    ) -> int:
+        try:
+            pip_install(req, self._env, upgrade=upgrade, editable=editable)
+        except EnvCommandError as e:
+            output = decode(e.e.output)
+            if (
+                "KeyboardInterrupt" in output
+                or "ERROR: Operation cancelled by user" in output
+            ):
+                return -2
+            raise
+
+        return 0
+
+    def execute(self, operations: list[Operation]) -> int:
+        self._total_operations = len(operations)
+        for job_type in self._executed:
+            self._executed[job_type] = 0
+            self._skipped[job_type] = 0
+
+        if operations and (self._enabled or self._dry_run):
+            self._display_summary(operations)
+
+        # We group operations by priority
+        groups = itertools.groupby(operations, key=lambda o: -o.priority)
+        self._sections = {}
+        self._yanked_warnings = []
+        for _, group in groups:
+            tasks = []
+            serial_operations = []
+            for operation in group:
+                if self._shutdown:
+                    break
+
+                # Some operations are unsafe, we must execute them serially in a group
+                # https://github.com/python-poetry/poetry/issues/3086
+                # https://github.com/python-poetry/poetry/issues/2658
+                #
+                # We need to explicitly check source type here, see:
+                # https://github.com/python-poetry/poetry-core/pull/98
+                is_parallel_unsafe = operation.job_type == "uninstall" or (
+                    operation.package.develop
+                    and operation.package.source_type in {"directory", "git"}
+                )
+                if not operation.skipped and is_parallel_unsafe:
+                    serial_operations.append(operation)
+                    continue
+
+                tasks.append(self._executor.submit(self._execute_operation, operation))
+
+            try:
+                wait(tasks)
+
+                for operation in serial_operations:
+                    wait([self._executor.submit(self._execute_operation, operation)])
+
+            except KeyboardInterrupt:
+                self._shutdown = True
+
+            if self._shutdown:
+                # Cancelling further tasks from being executed
+                [task.cancel() for task in tasks]
+                self._executor.shutdown(wait=True)
+
+                break
+
+        for warning in self._yanked_warnings:
+            self._io.write_error_line(f"Warning: {warning}")
+
+        return 1 if self._shutdown else 0
+
+    @staticmethod
+    def _get_max_workers(desired_max_workers: int | None = None) -> int:
+        # This should be directly handled by ThreadPoolExecutor
+        # however, on some systems the number of CPUs cannot be determined
+        # (it raises a NotImplementedError), so, in this case, we assume
+        # that the system only has one CPU.
+        try:
+            default_max_workers = (os.cpu_count() or 1) + 4
+        except NotImplementedError:
+            default_max_workers = 5
+
+        if desired_max_workers is None:
+            return default_max_workers
+        return min(default_max_workers, desired_max_workers)
+
+    def _write(self, operation: Operation, line: str) -> None:
+        if not self.supports_fancy_output() or not self._should_write_operation(
+            operation
+        ):
+            return
+
+        if self._io.is_debug():
+            with self._lock:
+                section = self._sections[id(operation)]
+                section.write_line(line)
+
+            return
+
+        with self._lock:
+            section = self._sections[id(operation)]
+            section.clear()
+            section.write(line)
+
+    def _execute_operation(self, operation: Operation) -> None:
+        try:
+            op_message = self.get_operation_message(operation)
+            if self.supports_fancy_output():
+                if id(operation) not in self._sections and self._should_write_operation(
+                    operation
+                ):
+                    with self._lock:
+                        self._sections[id(operation)] = self._io.section()
+                        self._sections[id(operation)].write_line(
+                            f"  • {op_message}:"
+                            " Pending..."
+                        )
+            else:
+                if self._should_write_operation(operation):
+                    if not operation.skipped:
+                        self._io.write_line(
+                            f"  • {op_message}"
+                        )
+                    else:
+                        self._io.write_line(
+                            f"  • {op_message}: "
+                            "Skipped "
+                            "for the following reason: "
+                            f"{operation.skip_reason}"
+                        )
+
+            try:
+                result = self._do_execute_operation(operation)
+            except EnvCommandError as e:
+                if e.e.returncode == -2:
+                    result = -2
+                else:
+                    raise
+
+            # If we have a result of -2 it means a KeyboardInterrupt
+            # in the any python subprocess, so we raise a KeyboardInterrupt
+            # error to be picked up by the error handler.
+            if result == -2:
+                raise KeyboardInterrupt
+        except Exception as e:
+            try:
+                from cleo.ui.exception_trace import ExceptionTrace
+
+                io: IO | SectionOutput
+                if not self.supports_fancy_output():
+                    io = self._io
+                else:
+                    message = (
+                        "  •"
+                        f" {self.get_operation_message(operation, error=True)}:"
+                        " Failed"
+                    )
+                    self._write(operation, message)
+                    io = self._sections.get(id(operation), self._io)
+
+                with self._lock:
+                    trace = ExceptionTrace(e)
+                    trace.render(io)
+                    io.write_line("")
+            finally:
+                with self._lock:
+                    self._shutdown = True
+        except KeyboardInterrupt:
+            try:
+                message = (
+                    "  •"
+                    f" {self.get_operation_message(operation, warning=True)}:"
+                    " Cancelled"
+                )
+                if not self.supports_fancy_output():
+                    self._io.write_line(message)
+                else:
+                    self._write(operation, message)
+            finally:
+                with self._lock:
+                    self._shutdown = True
+
+    def _do_execute_operation(self, operation: Operation) -> int:
+        method = operation.job_type
+
+        operation_message = self.get_operation_message(operation)
+        if operation.skipped:
+            if self.supports_fancy_output():
+                self._write(
+                    operation,
+                    f"  • {operation_message}: "
+                    "Skipped "
+                    "for the following reason: "
+                    f"{operation.skip_reason}",
+                )
+
+            self._skipped[operation.job_type] += 1
+
+            return 0
+
+        if not self._enabled or self._dry_run:
+            self._io.write_line(f"  • {operation_message}")
+
+            return 0
+
+        result: int = getattr(self, f"_execute_{method}")(operation)
+
+        if result != 0:
+            return result
+
+        operation_message = self.get_operation_message(operation, done=True)
+        message = f"  • {operation_message}"
+        self._write(operation, message)
+
+        self._increment_operations_count(operation, True)
+
+        return result
+
+    def _increment_operations_count(self, operation: Operation, executed: bool) -> None:
+        with self._lock:
+            if executed:
+                self._executed_operations += 1
+                self._executed[operation.job_type] += 1
+            else:
+                self._skipped[operation.job_type] += 1
+
+    def run_pip(self, *args: Any, **kwargs: Any) -> int:
+        try:
+            self._env.run_pip(*args, **kwargs)
+        except EnvCommandError as e:
+            output = decode(e.e.output)
+            if (
+                "KeyboardInterrupt" in output
+                or "ERROR: Operation cancelled by user" in output
+            ):
+                return -2
+
+            raise
+
+        return 0
+
+    def get_operation_message(
+        self,
+        operation: Operation,
+        done: bool = False,
+        error: bool = False,
+        warning: bool = False,
+    ) -> str:
+        base_tag = "fg=default"
+        operation_color = "c2"
+        source_operation_color = "c2"
+        package_color = "c1"
+
+        if error:
+            operation_color = "error"
+        elif warning:
+            operation_color = "warning"
+        elif done:
+            operation_color = "success"
+
+        if operation.skipped:
+            base_tag = "fg=default;options=dark"
+            operation_color += "_dark"
+            source_operation_color += "_dark"
+            package_color += "_dark"
+
+        if isinstance(operation, Install):
+            return (
+                f"<{base_tag}>Installing"
+                f" <{package_color}>{operation.package.name}"
+                f" (<{operation_color}>{operation.package.full_pretty_version})"
+            )
+
+        if isinstance(operation, Uninstall):
+            return (
+                f"<{base_tag}>Removing"
+                f" <{package_color}>{operation.package.name}"
+                f" (<{operation_color}>{operation.package.full_pretty_version})"
+            )
+
+        if isinstance(operation, Update):
+            return (
+                f"<{base_tag}>Updating"
+                f" <{package_color}>{operation.initial_package.name} "
+                f"(<{source_operation_color}>"
+                f"{operation.initial_package.full_pretty_version}"
+                f" -> <{operation_color}>"
+                f"{operation.target_package.full_pretty_version})"
+            )
+        return ""
+
+    def _display_summary(self, operations: list[Operation]) -> None:
+        installs = 0
+        updates = 0
+        uninstalls = 0
+        skipped = 0
+        for op in operations:
+            if op.skipped:
+                skipped += 1
+                continue
+
+            if op.job_type == "install":
+                installs += 1
+            elif op.job_type == "update":
+                updates += 1
+            elif op.job_type == "uninstall":
+                uninstalls += 1
+
+        if not installs and not updates and not uninstalls and not self._verbose:
+            self._io.write_line("")
+            self._io.write_line("No dependencies to install or update")
+
+            return
+
+        self._io.write_line("")
+        self._io.write("Package operations: ")
+        self._io.write(f"{installs} install{pluralize(installs)}, ")
+        self._io.write(f"{updates} update{pluralize(updates)}, ")
+        self._io.write(f"{uninstalls} removal{pluralize(uninstalls)}")
+        if skipped and self._verbose:
+            self._io.write(f", {skipped} skipped")
+        self._io.write_line("")
+        self._io.write_line("")
+
+    def _execute_install(self, operation: Install | Update) -> int:
+        status_code = self._install(operation)
+
+        self._save_url_reference(operation)
+
+        return status_code
+
+    def _execute_update(self, operation: Install | Update) -> int:
+        status_code = self._update(operation)
+
+        self._save_url_reference(operation)
+
+        return status_code
+
+    def _execute_uninstall(self, operation: Uninstall) -> int:
+        op_msg = self.get_operation_message(operation)
+        message = f"  • {op_msg}: Removing..."
+        self._write(operation, message)
+
+        return self._remove(operation)
+
+    def _install(self, operation: Install | Update) -> int:
+        package = operation.package
+        if package.source_type == "directory":
+            return self._install_directory(operation)
+
+        if package.source_type == "git":
+            return self._install_git(operation)
+
+        if package.source_type == "file":
+            archive = self._prepare_file(operation)
+        elif package.source_type == "url":
+            assert package.source_url is not None
+            archive = self._download_link(operation, Link(package.source_url))
+        else:
+            archive = self._download(operation)
+
+        operation_message = self.get_operation_message(operation)
+        message = (
+            f"  • {operation_message}:"
+            " Installing..."
+        )
+        self._write(operation, message)
+        return self.pip_install(archive, upgrade=operation.job_type == "update")
+
+    def _update(self, operation: Install | Update) -> int:
+        return self._install(operation)
+
+    def _remove(self, operation: Uninstall) -> int:
+        package = operation.package
+
+        # If we have a VCS package, remove its source directory
+        if package.source_type == "git":
+            src_dir = self._env.path / "src" / package.name
+            if src_dir.exists():
+                remove_directory(src_dir, force=True)
+
+        try:
+            return self.run_pip("uninstall", package.name, "-y")
+        except CalledProcessError as e:
+            if "not installed" in str(e):
+                return 0
+
+            raise
+
+    def _prepare_file(self, operation: Install | Update) -> Path:
+        package = operation.package
+        operation_message = self.get_operation_message(operation)
+
+        message = (
+            f"  • {operation_message}:"
+            " Preparing..."
+        )
+        self._write(operation, message)
+
+        assert package.source_url is not None
+        archive = Path(package.source_url)
+        if not Path(package.source_url).is_absolute() and package.root_dir:
+            archive = package.root_dir / archive
+
+        return archive
+
+    def _install_directory(self, operation: Install | Update) -> int:
+        from poetry.factory import Factory
+
+        package = operation.package
+        operation_message = self.get_operation_message(operation)
+
+        message = (
+            f"  • {operation_message}:"
+            " Building..."
+        )
+        self._write(operation, message)
+
+        assert package.source_url is not None
+        if package.root_dir:
+            req = package.root_dir / package.source_url
+        else:
+            req = Path(package.source_url).resolve(strict=False)
+
+        if package.source_subdirectory:
+            req /= package.source_subdirectory
+
+        pyproject = PyProjectTOML(os.path.join(req, "pyproject.toml"))
+
+        if pyproject.is_poetry_project():
+            # Even if there is a build system specified
+            # some versions of pip (< 19.0.0) don't understand it
+            # so we need to check the version of pip to know
+            # if we can rely on the build system
+            legacy_pip = (
+                self._env.pip_version
+                < self._env.pip_version.__class__.from_parts(19, 0, 0)
+            )
+
+            try:
+                package_poetry = Factory().create_poetry(pyproject.file.path.parent)
+            except RuntimeError:
+                package_poetry = None
+
+            if package_poetry is not None:
+                builder: Builder
+                if package.develop and not package_poetry.package.build_script:
+                    from poetry.masonry.builders.editable import EditableBuilder
+
+                    # This is a Poetry package in editable mode
+                    # we can use the EditableBuilder without going through pip
+                    # to install it, unless it has a build script.
+                    builder = EditableBuilder(package_poetry, self._env, NullIO())
+                    builder.build()
+
+                    return 0
+                elif legacy_pip or package_poetry.package.build_script:
+                    from poetry.core.masonry.builders.sdist import SdistBuilder
+
+                    # We need to rely on creating a temporary setup.py
+                    # file since the version of pip does not support
+                    # build-systems
+                    # We also need it for non-PEP-517 packages
+                    builder = SdistBuilder(package_poetry)
+
+                    with builder.setup_py():
+                        if package.develop:
+                            return self.pip_install(req, upgrade=True, editable=True)
+                        return self.pip_install(req, upgrade=True)
+
+        if package.develop:
+            return self.pip_install(req, upgrade=True, editable=True)
+
+        return self.pip_install(req, upgrade=True)
+
+    def _install_git(self, operation: Install | Update) -> int:
+        from poetry.vcs.git import Git
+
+        package = operation.package
+        operation_message = self.get_operation_message(operation)
+
+        message = (
+            f"  • {operation_message}: Cloning..."
+        )
+        self._write(operation, message)
+
+        assert package.source_url is not None
+        source = Git.clone(
+            url=package.source_url,
+            source_root=self._env.path / "src",
+            revision=package.source_resolved_reference or package.source_reference,
+        )
+
+        # Now we just need to install from the source directory
+        original_url = package.source_url
+        package._source_url = str(source.path)
+
+        status_code = self._install_directory(operation)
+
+        package._source_url = original_url
+
+        return status_code
+
+    def _download(self, operation: Install | Update) -> Path:
+        link = self._chooser.choose_for(operation.package)
+
+        if link.yanked:
+            # Store yanked warnings in a list and print after installing, so they can't
+            # be overlooked. Further, printing them in the concerning section would have
+            # the risk of overwriting the warning, so it is only briefly visible.
+            message = (
+                f"The file chosen for install of {operation.package.pretty_name} "
+                f"{operation.package.pretty_version} ({link.show_url}) is yanked."
+            )
+            if link.yanked_reason:
+                message += f" Reason for being yanked: {link.yanked_reason}"
+            self._yanked_warnings.append(message)
+
+        return self._download_link(operation, link)
+
+    def _download_link(self, operation: Install | Update, link: Link) -> Path:
+        package = operation.package
+
+        archive = self._chef.get_cached_archive_for_link(link)
+        if archive is None:
+            # No cached distributions was found, so we download and prepare it
+            try:
+                archive = self._download_archive(operation, link)
+            except BaseException:
+                cache_directory = self._chef.get_cache_directory_for_link(link)
+                cached_file = cache_directory.joinpath(link.filename)
+                # We can't use unlink(missing_ok=True) because it's not available
+                # prior to Python 3.8
+                if cached_file.exists():
+                    cached_file.unlink()
+
+                raise
+
+        if package.files:
+            archive_hash = self._validate_archive_hash(archive, package)
+
+            self._hashes[package.name] = archive_hash
+
+        return archive
+
+    @staticmethod
+    def _validate_archive_hash(archive: Path, package: Package) -> str:
+        file_dep = FileDependency(package.name, archive)
+        archive_hash: str = "sha256:" + file_dep.hash()
+        known_hashes = {f["hash"] for f in package.files}
+
+        if archive_hash not in known_hashes:
+            raise RuntimeError(
+                f"Hash for {package} from archive {archive.name} not found in"
+                f" known hashes (was: {archive_hash})"
+            )
+
+        return archive_hash
+
+    def _download_archive(self, operation: Install | Update, link: Link) -> Path:
+        response = self._authenticator.request(
+            "get", link.url, stream=True, io=self._sections.get(id(operation), self._io)
+        )
+        wheel_size = response.headers.get("content-length")
+        operation_message = self.get_operation_message(operation)
+        message = (
+            f"  • {operation_message}: Downloading..."
+        )
+        progress = None
+        if self.supports_fancy_output():
+            if wheel_size is None:
+                self._write(operation, message)
+            else:
+                from cleo.ui.progress_bar import ProgressBar
+
+                progress = ProgressBar(
+                    self._sections[id(operation)], max=int(wheel_size)
+                )
+                progress.set_format(message + " %percent%%")
+
+        if progress:
+            with self._lock:
+                self._sections[id(operation)].clear()
+                progress.start()
+
+        done = 0
+        archive = self._chef.get_cache_directory_for_link(link) / link.filename
+        archive.parent.mkdir(parents=True, exist_ok=True)
+        with archive.open("wb") as f:
+            for chunk in response.iter_content(chunk_size=4096):
+                if not chunk:
+                    break
+
+                done += len(chunk)
+
+                if progress:
+                    with self._lock:
+                        progress.set_progress(done)
+
+                f.write(chunk)
+
+        if progress:
+            with self._lock:
+                progress.finish()
+
+        return archive
+
+    def _should_write_operation(self, operation: Operation) -> bool:
+        return not operation.skipped or self._dry_run or self._verbose
+
+    def _save_url_reference(self, operation: Operation) -> None:
+        """
+        Create and store a PEP-610 `direct_url.json` file, if needed.
+        """
+        if operation.job_type not in {"install", "update"}:
+            return
+
+        package = operation.package
+
+        if not package.source_url or package.source_type == "legacy":
+            # Since we are installing from our own distribution cache
+            # pip will write a `direct_url.json` file pointing to the cache
+            # distribution.
+            # That's not what we want, so we remove the direct_url.json file,
+            # if it exists.
+            for (
+                direct_url_json
+            ) in self._env.site_packages.find_distribution_direct_url_json_files(
+                distribution_name=package.name, writable_only=True
+            ):
+                # We can't use unlink(missing_ok=True) because it's not always available
+                if direct_url_json.exists():
+                    direct_url_json.unlink()
+            return
+
+        url_reference: dict[str, Any] | None = None
+
+        if package.source_type == "git":
+            url_reference = self._create_git_url_reference(package)
+        elif package.source_type == "url":
+            url_reference = self._create_url_url_reference(package)
+        elif package.source_type == "directory":
+            url_reference = self._create_directory_url_reference(package)
+        elif package.source_type == "file":
+            url_reference = self._create_file_url_reference(package)
+
+        if url_reference:
+            for dist in self._env.site_packages.distributions(
+                name=package.name, writable_only=True
+            ):
+                dist_path = cast(Path, dist._path)  # type: ignore[attr-defined]
+                url = dist_path / "direct_url.json"
+                url.write_text(json.dumps(url_reference), encoding="utf-8")
+
+                record = dist_path / "RECORD"
+                if record.exists():
+                    with record.open(mode="a", encoding="utf-8", newline="") as f:
+                        writer = csv.writer(f)
+                        path = url.relative_to(record.parent.parent)
+                        writer.writerow([str(path), "", ""])
+
+    def _create_git_url_reference(self, package: Package) -> dict[str, Any]:
+        reference = {
+            "url": package.source_url,
+            "vcs_info": {
+                "vcs": "git",
+                "requested_revision": package.source_reference,
+                "commit_id": package.source_resolved_reference,
+            },
+        }
+        if package.source_subdirectory:
+            reference["subdirectory"] = package.source_subdirectory
+
+        return reference
+
+    def _create_url_url_reference(self, package: Package) -> dict[str, Any]:
+        archive_info = {}
+
+        if package.name in self._hashes:
+            archive_info["hash"] = self._hashes[package.name]
+
+        reference = {"url": package.source_url, "archive_info": archive_info}
+
+        return reference
+
+    def _create_file_url_reference(self, package: Package) -> dict[str, Any]:
+        archive_info = {}
+
+        if package.name in self._hashes:
+            archive_info["hash"] = self._hashes[package.name]
+
+        assert package.source_url is not None
+        return {
+            "url": Path(package.source_url).as_uri(),
+            "archive_info": archive_info,
+        }
+
+    def _create_directory_url_reference(self, package: Package) -> dict[str, Any]:
+        dir_info = {}
+
+        if package.develop:
+            dir_info["editable"] = True
+
+        assert package.source_url is not None
+        return {
+            "url": Path(package.source_url).as_uri(),
+            "dir_info": dir_info,
+        }
diff --git a/vendor/poetry/src/poetry/installation/installer.py b/vendor/poetry/src/poetry/installation/installer.py
new file mode 100644
index 00000000..b590483b
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/installer.py
@@ -0,0 +1,559 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from cleo.io.null_io import NullIO
+from packaging.utils import canonicalize_name
+
+from poetry.installation.executor import Executor
+from poetry.installation.operations import Install
+from poetry.installation.operations import Uninstall
+from poetry.installation.operations import Update
+from poetry.installation.pip_installer import PipInstaller
+from poetry.repositories import Pool
+from poetry.repositories import Repository
+from poetry.repositories.installed_repository import InstalledRepository
+from poetry.repositories.lockfile_repository import LockfileRepository
+from poetry.utils.extras import get_extra_package_names
+from poetry.utils.helpers import pluralize
+
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+    from collections.abc import Sequence
+
+    from cleo.io.io import IO
+    from poetry.core.packages.project_package import ProjectPackage
+
+    from poetry.config.config import Config
+    from poetry.installation.base_installer import BaseInstaller
+    from poetry.installation.operations.operation import Operation
+    from poetry.packages import Locker
+    from poetry.utils.env import Env
+
+
+class Installer:
+    def __init__(
+        self,
+        io: IO,
+        env: Env,
+        package: ProjectPackage,
+        locker: Locker,
+        pool: Pool,
+        config: Config,
+        installed: Repository | None = None,
+        executor: Executor | None = None,
+    ) -> None:
+        self._io = io
+        self._env = env
+        self._package = package
+        self._locker = locker
+        self._pool = pool
+
+        self._dry_run = False
+        self._requires_synchronization = False
+        self._update = False
+        self._verbose = False
+        self._write_lock = True
+        self._groups: Iterable[str] | None = None
+
+        self._execute_operations = True
+        self._lock = False
+
+        self._whitelist: list[str] = []
+
+        self._extras: list[str] = []
+
+        if executor is None:
+            executor = Executor(self._env, self._pool, config, self._io)
+
+        self._executor = executor
+        self._use_executor = False
+
+        self._installer = self._get_installer()
+        if installed is None:
+            installed = self._get_installed()
+
+        self._installed_repository = installed
+
+    @property
+    def executor(self) -> Executor:
+        return self._executor
+
+    @property
+    def installer(self) -> BaseInstaller:
+        return self._installer
+
+    def set_package(self, package: ProjectPackage) -> Installer:
+        self._package = package
+
+        return self
+
+    def set_locker(self, locker: Locker) -> Installer:
+        self._locker = locker
+
+        return self
+
+    def run(self) -> int:
+        # Check if refresh
+        if not self._update and self._lock and self._locker.is_locked():
+            return self._do_refresh()
+
+        # Force update if there is no lock file present
+        if not self._update and not self._locker.is_locked():
+            self._update = True
+
+        if self.is_dry_run():
+            self.verbose(True)
+            self._write_lock = False
+            self._execute_operations = False
+
+        return self._do_install()
+
+    def dry_run(self, dry_run: bool = True) -> Installer:
+        self._dry_run = dry_run
+        self._executor.dry_run(dry_run)
+
+        return self
+
+    def is_dry_run(self) -> bool:
+        return self._dry_run
+
+    def requires_synchronization(
+        self, requires_synchronization: bool = True
+    ) -> Installer:
+        self._requires_synchronization = requires_synchronization
+
+        return self
+
+    def verbose(self, verbose: bool = True) -> Installer:
+        self._verbose = verbose
+        self._executor.verbose(verbose)
+
+        return self
+
+    def is_verbose(self) -> bool:
+        return self._verbose
+
+    def only_groups(self, groups: Iterable[str]) -> Installer:
+        self._groups = groups
+
+        return self
+
+    def update(self, update: bool = True) -> Installer:
+        self._update = update
+
+        return self
+
+    def lock(self, update: bool = True) -> Installer:
+        """
+        Prepare the installer for locking only.
+        """
+        self.update(update=update)
+        self.execute_operations(False)
+        self._lock = True
+
+        return self
+
+    def is_updating(self) -> bool:
+        return self._update
+
+    def execute_operations(self, execute: bool = True) -> Installer:
+        self._execute_operations = execute
+
+        if not execute:
+            self._executor.disable()
+
+        return self
+
+    def whitelist(self, packages: Iterable[str]) -> Installer:
+        self._whitelist = [canonicalize_name(p) for p in packages]
+
+        return self
+
+    def extras(self, extras: list[str]) -> Installer:
+        self._extras = extras
+
+        return self
+
+    def use_executor(self, use_executor: bool = True) -> Installer:
+        self._use_executor = use_executor
+
+        return self
+
+    def _do_refresh(self) -> int:
+        from poetry.puzzle.solver import Solver
+
+        # Checking extras
+        for extra in self._extras:
+            if extra not in self._package.extras:
+                raise ValueError(f"Extra [{extra}] is not specified.")
+
+        locked_repository = self._locker.locked_repository()
+        solver = Solver(
+            self._package,
+            self._pool,
+            locked_repository.packages,
+            locked_repository.packages,
+            self._io,
+        )
+
+        with solver.provider.use_source_root(
+            source_root=self._env.path.joinpath("src")
+        ):
+            ops = solver.solve(use_latest=[]).calculate_operations()
+
+        lockfile_repo = LockfileRepository()
+        self._populate_lockfile_repo(lockfile_repo, ops)
+
+        self._write_lock_file(lockfile_repo, force=True)
+
+        return 0
+
+    def _do_install(self) -> int:
+        from poetry.puzzle.solver import Solver
+
+        locked_repository = Repository("poetry-locked")
+        if self._update:
+            if self._locker.is_locked() and not self._lock:
+                locked_repository = self._locker.locked_repository()
+
+                # If no packages have been whitelisted (The ones we want to update),
+                # we whitelist every package in the lock file.
+                if not self._whitelist:
+                    for pkg in locked_repository.packages:
+                        self._whitelist.append(pkg.name)
+
+            # Checking extras
+            for extra in self._extras:
+                if extra not in self._package.extras:
+                    raise ValueError(f"Extra [{extra}] is not specified.")
+
+            self._io.write_line("Updating dependencies")
+            solver = Solver(
+                self._package,
+                self._pool,
+                self._installed_repository.packages,
+                locked_repository.packages,
+                self._io,
+            )
+
+            with solver.provider.use_source_root(
+                source_root=self._env.path.joinpath("src")
+            ):
+                ops = solver.solve(use_latest=self._whitelist).calculate_operations()
+        else:
+            self._io.write_line("Installing dependencies from lock file")
+
+            locked_repository = self._locker.locked_repository()
+
+            if not self._locker.is_fresh():
+                self._io.write_error_line(
+                    ""
+                    "Warning: poetry.lock is not consistent with pyproject.toml. "
+                    "You may be getting improper dependencies. "
+                    "Run `poetry lock [--no-update]` to fix it."
+                    ""
+                )
+
+            for extra in self._extras:
+                if extra not in self._locker.lock_data.get("extras", {}):
+                    raise ValueError(f"Extra [{extra}] is not specified.")
+
+            # If we are installing from lock
+            # Filter the operations by comparing it with what is
+            # currently installed
+            ops = self._get_operations_from_lock(locked_repository)
+
+        lockfile_repo = LockfileRepository()
+        self._populate_lockfile_repo(lockfile_repo, ops)
+
+        if self._update:
+            self._write_lock_file(lockfile_repo)
+
+            if self._lock:
+                # If we are only in lock mode, no need to go any further
+                return 0
+
+        if self._groups is not None:
+            root = self._package.with_dependency_groups(list(self._groups), only=True)
+        else:
+            root = self._package.without_optional_dependency_groups()
+
+        if self._io.is_verbose():
+            self._io.write_line("")
+            self._io.write_line(
+                "Finding the necessary packages for the current system"
+            )
+
+        # We resolve again by only using the lock file
+        pool = Pool(ignore_repository_names=True)
+
+        # Making a new repo containing the packages
+        # newly resolved and the ones from the current lock file
+        repo = Repository("poetry-repo")
+        for package in lockfile_repo.packages + locked_repository.packages:
+            if not package.is_direct_origin() and not repo.has_package(package):
+                repo.add_package(package)
+
+        pool.add_repository(repo)
+
+        solver = Solver(
+            root,
+            pool,
+            self._installed_repository.packages,
+            locked_repository.packages,
+            NullIO(),
+        )
+        # Everything is resolved at this point, so we no longer need
+        # to load deferred dependencies (i.e. VCS, URL and path dependencies)
+        solver.provider.load_deferred(False)
+
+        with solver.use_environment(self._env):
+            ops = solver.solve(use_latest=self._whitelist).calculate_operations(
+                with_uninstalls=self._requires_synchronization,
+                synchronize=self._requires_synchronization,
+            )
+
+        if not self._requires_synchronization:
+            # If no packages synchronisation has been requested we need
+            # to calculate the uninstall operations
+            from poetry.puzzle.transaction import Transaction
+
+            transaction = Transaction(
+                locked_repository.packages,
+                [(package, 0) for package in lockfile_repo.packages],
+                installed_packages=self._installed_repository.packages,
+                root_package=root,
+            )
+
+            ops = [
+                op
+                for op in transaction.calculate_operations(with_uninstalls=True)
+                if op.job_type == "uninstall"
+            ] + ops
+
+        # We need to filter operations so that packages
+        # not compatible with the current system,
+        # or optional and not requested, are dropped
+        self._filter_operations(ops, lockfile_repo)
+
+        # Execute operations
+        return self._execute(ops)
+
+    def _write_lock_file(self, repo: LockfileRepository, force: bool = False) -> None:
+        if self._write_lock and (force or self._update):
+            updated_lock = self._locker.set_lock_data(self._package, repo.packages)
+
+            if updated_lock:
+                self._io.write_line("")
+                self._io.write_line("Writing lock file")
+
+    def _execute(self, operations: list[Operation]) -> int:
+        if self._use_executor:
+            return self._executor.execute(operations)
+
+        if not operations and (self._execute_operations or self._dry_run):
+            self._io.write_line("No dependencies to install or update")
+
+        if operations and (self._execute_operations or self._dry_run):
+            installs = 0
+            updates = 0
+            uninstalls = 0
+            skipped = 0
+            for op in operations:
+                if op.skipped:
+                    skipped += 1
+                elif op.job_type == "install":
+                    installs += 1
+                elif op.job_type == "update":
+                    updates += 1
+                elif op.job_type == "uninstall":
+                    uninstalls += 1
+
+            self._io.write_line("")
+            self._io.write("Package operations: ")
+            self._io.write(f"{installs} install{pluralize(installs)}, ")
+            self._io.write(f"{updates} update{pluralize(updates)}, ")
+            self._io.write(f"{uninstalls} removal{pluralize(uninstalls)}")
+            if skipped and self.is_verbose():
+                self._io.write(f", {skipped} skipped")
+            self._io.write_line("")
+
+        self._io.write_line("")
+
+        for op in operations:
+            self._execute_operation(op)
+
+        return 0
+
+    def _execute_operation(self, operation: Operation) -> None:
+        """
+        Execute a given operation.
+        """
+        method = operation.job_type
+
+        getattr(self, f"_execute_{method}")(operation)
+
+    def _execute_install(self, operation: Install) -> None:
+        target = operation.package
+        if operation.skipped:
+            if self.is_verbose() and (self._execute_operations or self.is_dry_run()):
+                self._io.write_line(
+                    f"  - Skipping {target.pretty_name}"
+                    f" ({target.full_pretty_version}) {operation.skip_reason}"
+                )
+
+            return
+
+        if self._execute_operations or self.is_dry_run():
+            self._io.write_line(
+                f"  - Installing {target.pretty_name}"
+                f" ({target.full_pretty_version})"
+            )
+
+        if not self._execute_operations:
+            return
+
+        self._installer.install(operation.package)
+
+    def _execute_update(self, operation: Update) -> None:
+        source = operation.initial_package
+        target = operation.target_package
+
+        if operation.skipped:
+            if self.is_verbose() and (self._execute_operations or self.is_dry_run()):
+                self._io.write_line(
+                    f"  - Skipping {target.pretty_name} "
+                    f"({target.full_pretty_version}) {operation.skip_reason}"
+                )
+
+            return
+
+        if self._execute_operations or self.is_dry_run():
+            self._io.write_line(
+                f"  - Updating {target.pretty_name}"
+                f" ({source.full_pretty_version} ->"
+                f" {target.full_pretty_version})"
+            )
+
+        if not self._execute_operations:
+            return
+
+        self._installer.update(source, target)
+
+    def _execute_uninstall(self, operation: Uninstall) -> None:
+        target = operation.package
+        if operation.skipped:
+            if self.is_verbose() and (self._execute_operations or self.is_dry_run()):
+                self._io.write_line(
+                    f"  - Not removing {target.pretty_name}"
+                    f" ({target.pretty_version}) {operation.skip_reason}"
+                )
+
+            return
+
+        if self._execute_operations or self.is_dry_run():
+            self._io.write_line(
+                f"  - Removing {target.pretty_name}"
+                f" ({target.pretty_version})"
+            )
+
+        if not self._execute_operations:
+            return
+
+        self._installer.remove(operation.package)
+
+    def _populate_lockfile_repo(
+        self, repo: LockfileRepository, ops: Sequence[Operation]
+    ) -> None:
+        for op in ops:
+            if isinstance(op, Uninstall):
+                continue
+            elif isinstance(op, Update):
+                package = op.target_package
+            else:
+                package = op.package
+
+            if not repo.has_package(package):
+                repo.add_package(package)
+
+    def _get_operations_from_lock(
+        self, locked_repository: Repository
+    ) -> list[Operation]:
+        installed_repo = self._installed_repository
+        ops: list[Operation] = []
+
+        extra_packages = self._get_extra_packages(locked_repository)
+        for locked in locked_repository.packages:
+            is_installed = False
+            for installed in installed_repo.packages:
+                if locked.name == installed.name:
+                    is_installed = True
+                    if locked.optional and locked.name not in extra_packages:
+                        # Installed but optional and not requested in extras
+                        ops.append(Uninstall(locked))
+                    elif locked.version != installed.version:
+                        ops.append(Update(installed, locked))
+
+            # If it's optional and not in required extras
+            # we do not install
+            if locked.optional and locked.name not in extra_packages:
+                continue
+
+            op = Install(locked)
+            if is_installed:
+                op.skip("Already installed")
+
+            ops.append(op)
+
+        return ops
+
+    def _filter_operations(self, ops: Sequence[Operation], repo: Repository) -> None:
+        extra_packages = self._get_extra_packages(repo)
+        for op in ops:
+            if isinstance(op, Update):
+                package = op.target_package
+            else:
+                package = op.package
+
+            if op.job_type == "uninstall":
+                continue
+
+            if not self._env.is_valid_for_marker(package.marker):
+                op.skip("Not needed for the current environment")
+                continue
+
+            if self._update:
+                extras = {}
+                for extra, dependencies in self._package.extras.items():
+                    extras[extra] = [dependency.name for dependency in dependencies]
+            else:
+                extras = {}
+                for extra, deps in self._locker.lock_data.get("extras", {}).items():
+                    extras[extra] = [dep.lower() for dep in deps]
+
+            # If a package is optional and not requested
+            # in any extra we skip it
+            if package.optional and package.name not in extra_packages:
+                op.skip("Not required")
+
+    def _get_extra_packages(self, repo: Repository) -> list[str]:
+        """
+        Returns all package names required by extras.
+
+        Maybe we just let the solver handle it?
+        """
+        extras: dict[str, list[str]]
+        if self._update:
+            extras = {k: [d.name for d in v] for k, v in self._package.extras.items()}
+        else:
+            extras = self._locker.lock_data.get("extras", {})
+
+        return list(get_extra_package_names(repo.packages, extras, self._extras))
+
+    def _get_installer(self) -> BaseInstaller:
+        return PipInstaller(self._env, self._io, self._pool)
+
+    def _get_installed(self) -> InstalledRepository:
+        return InstalledRepository.load(self._env)
diff --git a/vendor/poetry/src/poetry/installation/noop_installer.py b/vendor/poetry/src/poetry/installation/noop_installer.py
new file mode 100644
index 00000000..0a994f14
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/noop_installer.py
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.installation.base_installer import BaseInstaller
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+
+class NoopInstaller(BaseInstaller):
+    def __init__(self) -> None:
+        self._installs: list[Package] = []
+        self._updates: list[tuple[Package, Package]] = []
+        self._removals: list[Package] = []
+
+    @property
+    def installs(self) -> list[Package]:
+        return self._installs
+
+    @property
+    def updates(self) -> list[tuple[Package, Package]]:
+        return self._updates
+
+    @property
+    def removals(self) -> list[Package]:
+        return self._removals
+
+    def install(self, package: Package) -> None:
+        self._installs.append(package)
+
+    def update(self, source: Package, target: Package) -> None:
+        self._updates.append((source, target))
+
+    def remove(self, package: Package) -> None:
+        self._removals.append(package)
diff --git a/vendor/poetry/src/poetry/installation/operations/__init__.py b/vendor/poetry/src/poetry/installation/operations/__init__.py
new file mode 100644
index 00000000..d579ac6b
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/operations/__init__.py
@@ -0,0 +1,8 @@
+from __future__ import annotations
+
+from poetry.installation.operations.install import Install
+from poetry.installation.operations.uninstall import Uninstall
+from poetry.installation.operations.update import Update
+
+
+__all__ = ["Install", "Uninstall", "Update"]
diff --git a/vendor/poetry/src/poetry/installation/operations/install.py b/vendor/poetry/src/poetry/installation/operations/install.py
new file mode 100644
index 00000000..3abb0fd5
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/operations/install.py
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.installation.operations.operation import Operation
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+
+class Install(Operation):
+    def __init__(
+        self, package: Package, reason: str | None = None, priority: int = 0
+    ) -> None:
+        super().__init__(reason, priority=priority)
+
+        self._package = package
+
+    @property
+    def package(self) -> Package:
+        return self._package
+
+    @property
+    def job_type(self) -> str:
+        return "install"
+
+    def __str__(self) -> str:
+        return (
+            "Installing"
+            f" {self.package.pretty_name} ({self.format_version(self.package)})"
+        )
+
+    def __repr__(self) -> str:
+        return (
+            ""
+        )
diff --git a/vendor/poetry/src/poetry/installation/operations/operation.py b/vendor/poetry/src/poetry/installation/operations/operation.py
new file mode 100644
index 00000000..816618a1
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/operations/operation.py
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import TypeVar
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+T = TypeVar("T", bound="Operation")
+
+
+class Operation:
+    def __init__(self, reason: str | None = None, priority: int | float = 0) -> None:
+        self._reason = reason
+
+        self._skipped = False
+        self._skip_reason: str | None = None
+        self._priority = priority
+
+    @property
+    def job_type(self) -> str:
+        raise NotImplementedError
+
+    @property
+    def reason(self) -> str | None:
+        return self._reason
+
+    @property
+    def skipped(self) -> bool:
+        return self._skipped
+
+    @property
+    def skip_reason(self) -> str | None:
+        return self._skip_reason
+
+    @property
+    def priority(self) -> float | int:
+        return self._priority
+
+    @property
+    def package(self) -> Package:
+        raise NotImplementedError()
+
+    def format_version(self, package: Package) -> str:
+        version: str = package.full_pretty_version
+        return version
+
+    def skip(self: T, reason: str) -> T:
+        self._skipped = True
+        self._skip_reason = reason
+
+        return self
+
+    def unskip(self: T) -> T:
+        self._skipped = False
+        self._skip_reason = None
+
+        return self
diff --git a/vendor/poetry/src/poetry/installation/operations/uninstall.py b/vendor/poetry/src/poetry/installation/operations/uninstall.py
new file mode 100644
index 00000000..9b4a4981
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/operations/uninstall.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.installation.operations.operation import Operation
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+
+class Uninstall(Operation):
+    def __init__(
+        self,
+        package: Package,
+        reason: str | None = None,
+        priority: float | int = float("inf"),
+    ) -> None:
+        super().__init__(reason, priority=priority)
+
+        self._package = package
+
+    @property
+    def package(self) -> Package:
+        return self._package
+
+    @property
+    def job_type(self) -> str:
+        return "uninstall"
+
+    def __str__(self) -> str:
+        return (
+            "Uninstalling"
+            f" {self.package.pretty_name} ({self.format_version(self._package)})"
+        )
+
+    def __repr__(self) -> str:
+        return (
+            ""
+        )
diff --git a/vendor/poetry/src/poetry/installation/operations/update.py b/vendor/poetry/src/poetry/installation/operations/update.py
new file mode 100644
index 00000000..e67cc869
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/operations/update.py
@@ -0,0 +1,55 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.installation.operations.operation import Operation
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+
+class Update(Operation):
+    def __init__(
+        self,
+        initial: Package,
+        target: Package,
+        reason: str | None = None,
+        priority: int = 0,
+    ) -> None:
+        self._initial_package = initial
+        self._target_package = target
+
+        super().__init__(reason, priority=priority)
+
+    @property
+    def initial_package(self) -> Package:
+        return self._initial_package
+
+    @property
+    def target_package(self) -> Package:
+        return self._target_package
+
+    @property
+    def package(self) -> Package:
+        return self._target_package
+
+    @property
+    def job_type(self) -> str:
+        return "update"
+
+    def __str__(self) -> str:
+        init_version = self.format_version(self.initial_package)
+        target_version = self.format_version(self.target_package)
+        return (
+            f"Updating {self.initial_package.pretty_name} ({init_version}) "
+            f"to {self.target_package.pretty_name} ({target_version})"
+        )
+
+    def __repr__(self) -> str:
+        init_version = self.format_version(self.initial_package)
+        target_version = self.format_version(self.target_package)
+        return (
+            f""
+        )
diff --git a/vendor/poetry/src/poetry/installation/pip_installer.py b/vendor/poetry/src/poetry/installation/pip_installer.py
new file mode 100644
index 00000000..a6ea6f46
--- /dev/null
+++ b/vendor/poetry/src/poetry/installation/pip_installer.py
@@ -0,0 +1,301 @@
+from __future__ import annotations
+
+import os
+import tempfile
+import urllib.parse
+
+from pathlib import Path
+from subprocess import CalledProcessError
+from typing import TYPE_CHECKING
+from typing import Any
+
+from poetry.core.pyproject.toml import PyProjectTOML
+from poetry.core.semver.version import Version
+
+from poetry.installation.base_installer import BaseInstaller
+from poetry.repositories.http import HTTPRepository
+from poetry.utils._compat import encode
+from poetry.utils.helpers import remove_directory
+from poetry.utils.pip import pip_install
+
+
+if TYPE_CHECKING:
+    from cleo.io.io import IO
+    from poetry.core.masonry.builders.builder import Builder
+    from poetry.core.packages.package import Package
+
+    from poetry.repositories.pool import Pool
+    from poetry.utils.env import Env
+
+
+class PipInstaller(BaseInstaller):
+    def __init__(self, env: Env, io: IO, pool: Pool) -> None:
+        self._env = env
+        self._io = io
+        self._pool = pool
+
+    def install(self, package: Package, update: bool = False) -> None:
+        if package.source_type == "directory":
+            self.install_directory(package)
+
+            return
+
+        if package.source_type == "git":
+            self.install_git(package)
+
+            return
+
+        args = ["install", "--no-deps"]
+
+        if (
+            package.source_type not in {"git", "directory", "file", "url"}
+            and package.source_url
+        ):
+            assert package.source_reference is not None
+            repository = self._pool.repository(package.source_reference)
+            parsed = urllib.parse.urlparse(package.source_url)
+            if parsed.scheme == "http":
+                assert parsed.hostname is not None
+                self._io.write_error(
+                    "    Installing from unsecure host:"
+                    f" {parsed.hostname}"
+                )
+                args += ["--trusted-host", parsed.hostname]
+
+            if isinstance(repository, HTTPRepository):
+                certificates = repository.certificates
+
+                if certificates.cert:
+                    args += ["--cert", str(certificates.cert)]
+
+                if parsed.scheme == "https" and not certificates.verify:
+                    assert parsed.hostname is not None
+                    args += ["--trusted-host", parsed.hostname]
+
+                if certificates.client_cert:
+                    args += ["--client-cert", str(certificates.client_cert)]
+
+                index_url = repository.authenticated_url
+
+                args += ["--index-url", index_url]
+
+            if (
+                self._pool.has_default()
+                and repository.name != self._pool.repositories[0].name
+            ):
+                first_repository = self._pool.repositories[0]
+
+                if isinstance(first_repository, HTTPRepository):
+                    args += [
+                        "--extra-index-url",
+                        first_repository.authenticated_url,
+                    ]
+
+        if update:
+            args.append("-U")
+
+        req: str | list[str]
+        if package.files and not package.source_url:
+            # Format as a requirements.txt
+            # We need to create a requirements.txt file
+            # for each package in order to check hashes.
+            # This is far from optimal but we do not have any
+            # other choice since this is the only way for pip
+            # to verify hashes.
+            req = self.create_temporary_requirement(package)
+            args += ["-r", req]
+
+            try:
+                self.run(*args)
+            finally:
+                os.unlink(req)
+        else:
+            req = self.requirement(package)
+            if not isinstance(req, list):
+                args.append(req)
+            else:
+                args += req
+
+            self.run(*args)
+
+    def update(self, package: Package, target: Package) -> None:
+        if package.source_type != target.source_type:
+            # If the source type has changed, we remove the current
+            # package to avoid perpetual updates in some cases
+            self.remove(package)
+
+        self.install(target, update=True)
+
+    def remove(self, package: Package) -> None:
+        try:
+            self.run("uninstall", package.name, "-y")
+        except CalledProcessError as e:
+            if "not installed" in str(e):
+                return
+
+            raise
+
+        # This is a workaround for https://github.com/pypa/pip/issues/4176
+        for nspkg_pth_file in self._env.site_packages.find_distribution_nspkg_pth_files(
+            distribution_name=package.name
+        ):
+            nspkg_pth_file.unlink()
+
+        # If we have a VCS package, remove its source directory
+        if package.source_type == "git":
+            src_dir = self._env.path / "src" / package.name
+            if src_dir.exists():
+                remove_directory(src_dir, force=True)
+
+    def run(self, *args: Any, **kwargs: Any) -> int | str:
+        return self._env.run_pip(*args, **kwargs)
+
+    def requirement(self, package: Package, formatted: bool = False) -> str | list[str]:
+        if formatted and not package.source_type:
+            req = f"{package.name}=={package.version}"
+            for f in package.files:
+                hash_type = "sha256"
+                h = f["hash"]
+                if ":" in h:
+                    hash_type, h = h.split(":")
+
+                req += f" --hash {hash_type}:{h}"
+
+            req += "\n"
+
+            return req
+
+        if package.source_type in ["file", "directory"]:
+            assert package.source_url is not None
+            if package.root_dir:
+                req = (package.root_dir / package.source_url).as_posix()
+            else:
+                req = os.path.realpath(package.source_url)
+
+            if package.develop and package.source_type == "directory":
+                return ["-e", req]
+
+            return req
+
+        if package.source_type == "git":
+            req = (
+                f"git+{package.source_url}@{package.source_reference}"
+                f"#egg={package.name}"
+            )
+
+            if package.source_subdirectory:
+                req += f"&subdirectory={package.source_subdirectory}"
+
+            if package.develop:
+                return ["-e", req]
+
+            return req
+
+        if package.source_type == "url":
+            return f"{package.source_url}#egg={package.name}"
+
+        return f"{package.name}=={package.version}"
+
+    def create_temporary_requirement(self, package: Package) -> str:
+        fd, name = tempfile.mkstemp("reqs.txt", f"{package.name}-{package.version}")
+        req = self.requirement(package, formatted=True)
+        if isinstance(req, list):
+            req = " ".join(req)
+
+        try:
+            os.write(fd, encode(req))
+        finally:
+            os.close(fd)
+
+        return name
+
+    def install_directory(self, package: Package) -> str | int:
+        from cleo.io.null_io import NullIO
+
+        from poetry.factory import Factory
+
+        assert package.source_url is not None
+        if package.root_dir:
+            req = package.root_dir / package.source_url
+        else:
+            req = Path(package.source_url).resolve(strict=False)
+
+        if package.source_subdirectory:
+            req /= package.source_subdirectory
+
+        pyproject = PyProjectTOML(os.path.join(req, "pyproject.toml"))
+
+        if pyproject.is_poetry_project():
+            # Even if there is a build system specified
+            # some versions of pip (< 19.0.0) don't understand it
+            # so we need to check the version of pip to know
+            # if we can rely on the build system
+            legacy_pip = self._env.pip_version < Version.from_parts(19, 0, 0)
+
+            try:
+                package_poetry = Factory().create_poetry(pyproject.file.path.parent)
+            except RuntimeError:
+                package_poetry = None
+
+            if package_poetry is not None:
+                builder: Builder
+                if package.develop and not package_poetry.package.build_script:
+                    from poetry.masonry.builders.editable import EditableBuilder
+
+                    # This is a Poetry package in editable mode
+                    # we can use the EditableBuilder without going through pip
+                    # to install it, unless it has a build script.
+                    builder = EditableBuilder(package_poetry, self._env, NullIO())
+                    builder.build()
+
+                    return 0
+                elif legacy_pip or package_poetry.package.build_script:
+                    from poetry.core.masonry.builders.sdist import SdistBuilder
+
+                    # We need to rely on creating a temporary setup.py
+                    # file since the version of pip does not support
+                    # build-systems
+                    # We also need it for non-PEP-517 packages
+                    builder = SdistBuilder(package_poetry)
+
+                    with builder.setup_py():
+                        if package.develop:
+                            return pip_install(
+                                path=req,
+                                environment=self._env,
+                                upgrade=True,
+                                editable=True,
+                            )
+                        return pip_install(
+                            path=req, environment=self._env, deps=False, upgrade=True
+                        )
+
+        if package.develop:
+            return pip_install(
+                path=req, environment=self._env, upgrade=True, editable=True
+            )
+        return pip_install(path=req, environment=self._env, deps=False, upgrade=True)
+
+    def install_git(self, package: Package) -> None:
+        from poetry.core.packages.package import Package
+
+        from poetry.vcs.git import Git
+
+        assert package.source_url is not None
+        source = Git.clone(
+            url=package.source_url,
+            source_root=self._env.path / "src",
+            revision=package.source_resolved_reference or package.source_reference,
+        )
+
+        # Now we just need to install from the source directory
+        pkg = Package(
+            name=package.name,
+            version=package.version,
+            source_type="directory",
+            source_url=str(source.path),
+            source_subdirectory=package.source_subdirectory,
+            develop=package.develop,
+        )
+
+        self.install_directory(pkg)
diff --git a/vendor/poetry/src/poetry/json/__init__.py b/vendor/poetry/src/poetry/json/__init__.py
new file mode 100644
index 00000000..a5849e00
--- /dev/null
+++ b/vendor/poetry/src/poetry/json/__init__.py
@@ -0,0 +1,47 @@
+from __future__ import annotations
+
+import json
+
+from importlib import resources
+from typing import Any
+
+import jsonschema
+
+
+class ValidationError(ValueError):
+    pass
+
+
+def validate_object(obj: dict[str, Any]) -> list[str]:
+    schema = json.loads(resources.read_text(f"{__name__}.schemas", "poetry.json"))
+
+    validator = jsonschema.Draft7Validator(schema)
+    validation_errors = sorted(
+        validator.iter_errors(obj),
+        key=lambda e: e.path,  # type: ignore[no-any-return]
+    )
+
+    errors = []
+
+    for error in validation_errors:
+        message = error.message
+        if error.path:
+            path = ".".join(str(x) for x in error.absolute_path)
+            message = f"[{path}] {message}"
+
+        errors.append(message)
+
+    core_schema = json.loads(
+        resources.read_text(f"poetry.core.json.schemas", "poetry-schema.json")
+    )
+
+    if core_schema["additionalProperties"]:
+        # TODO: make this un-conditional once core update to >1.1.0b2
+        properties = {*schema["properties"].keys(), *core_schema["properties"].keys()}
+        additional_properties = set(obj.keys()) - properties
+        for key in additional_properties:
+            errors.append(
+                f"Additional properties are not allowed ('{key}' was unexpected)"
+            )
+
+    return errors
diff --git a/vendor/poetry/src/poetry/json/schemas/__init__.py b/vendor/poetry/src/poetry/json/schemas/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/json/schemas/poetry.json b/vendor/poetry/src/poetry/json/schemas/poetry.json
new file mode 100644
index 00000000..7532fd83
--- /dev/null
+++ b/vendor/poetry/src/poetry/json/schemas/poetry.json
@@ -0,0 +1,55 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "additionalProperties": true,
+  "type": "object",
+  "required": [],
+  "properties": {
+    "source": {
+      "type": "array",
+      "description": "A set of additional repositories where packages can be found.",
+      "additionalProperties": {
+        "$ref": "#/definitions/repository"
+      },
+      "items": {
+        "$ref": "#/definitions/repository"
+      }
+    }
+  },
+  "definitions": {
+    "repository": {
+      "type": "object",
+      "additionalProperties": false,
+      "required": [
+        "name",
+        "url"
+      ],
+      "properties": {
+        "name": {
+          "type": "string",
+          "description": "The name of the repository"
+        },
+        "url": {
+          "type": "string",
+          "description": "The url of the repository",
+          "format": "uri"
+        },
+        "default": {
+          "type": "boolean",
+          "description": "Make this repository the default (disable PyPI)"
+        },
+        "secondary": {
+          "type": "boolean",
+          "description": "Declare this repository as secondary, i.e. it will only be looked up last for packages."
+        },
+        "links": {
+          "type": "boolean",
+          "description": "Declare this as a link source. Links at uri/path can point to sdist or bdist archives."
+        },
+        "indexed": {
+          "type": "boolean",
+          "description": "For PEP 503 simple API repositories, pre-fetch and index the available packages. (experimental)"
+        }
+      }
+    }
+  }
+}
diff --git a/vendor/poetry/src/poetry/layouts/__init__.py b/vendor/poetry/src/poetry/layouts/__init__.py
new file mode 100644
index 00000000..033b16a6
--- /dev/null
+++ b/vendor/poetry/src/poetry/layouts/__init__.py
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+from poetry.layouts.layout import Layout
+from poetry.layouts.src import SrcLayout
+
+
+_LAYOUTS = {"src": SrcLayout, "standard": Layout}
+
+
+def layout(name: str) -> type[Layout]:
+    if name not in _LAYOUTS:
+        raise ValueError("Invalid layout")
+
+    return _LAYOUTS[name]
diff --git a/vendor/poetry/src/poetry/layouts/layout.py b/vendor/poetry/src/poetry/layouts/layout.py
new file mode 100644
index 00000000..66b5ea94
--- /dev/null
+++ b/vendor/poetry/src/poetry/layouts/layout.py
@@ -0,0 +1,199 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+from packaging.utils import canonicalize_name
+from poetry.core.pyproject.toml import PyProjectTOML
+from poetry.core.utils.helpers import module_name
+from tomlkit import inline_table
+from tomlkit import loads
+from tomlkit import table
+from tomlkit.toml_document import TOMLDocument
+
+
+if TYPE_CHECKING:
+    from typing import Mapping
+
+    from tomlkit.items import InlineTable
+
+
+POETRY_DEFAULT = """\
+[tool.poetry]
+name = ""
+version = ""
+description = ""
+authors = []
+license = ""
+readme = ""
+packages = []
+
+[tool.poetry.dependencies]
+
+[tool.poetry.group.dev.dependencies]
+"""
+
+BUILD_SYSTEM_MIN_VERSION: str | None = None
+BUILD_SYSTEM_MAX_VERSION: str | None = None
+
+
+class Layout:
+    def __init__(
+        self,
+        project: str,
+        version: str = "0.1.0",
+        description: str = "",
+        readme_format: str = "md",
+        author: str | None = None,
+        license: str | None = None,
+        python: str = "*",
+        dependencies: dict[str, str | Mapping[str, Any]] | None = None,
+        dev_dependencies: dict[str, str | Mapping[str, Any]] | None = None,
+    ) -> None:
+        self._project = canonicalize_name(project)
+        self._package_path_relative = Path(
+            *(module_name(part) for part in project.split("."))
+        )
+        self._package_name = ".".join(self._package_path_relative.parts)
+        self._version = version
+        self._description = description
+
+        self._readme_format = readme_format.lower()
+
+        self._license = license
+        self._python = python
+        self._dependencies = dependencies or {}
+        self._dev_dependencies = dev_dependencies or {}
+
+        if not author:
+            author = "Your Name "
+
+        self._author = author
+
+    @property
+    def basedir(self) -> Path:
+        return Path()
+
+    @property
+    def package_path(self) -> Path:
+        return self.basedir / self._package_path_relative
+
+    def get_package_include(self) -> InlineTable | None:
+        package = inline_table()
+
+        # If a project is created in the root directory (this is reasonable inside a
+        # docker container, eg )
+        # then parts will be empty.
+        parts = self._package_path_relative.parts
+        if not parts:
+            return None
+
+        include = parts[0]
+        package.append("include", include)  # type: ignore[no-untyped-call]
+
+        if self.basedir != Path():
+            package.append(  # type: ignore[no-untyped-call]
+                "from",
+                self.basedir.as_posix(),
+            )
+        else:
+            if include == self._project:
+                # package include and package name are the same,
+                # packages table is redundant here.
+                return None
+
+        return package
+
+    def create(self, path: Path, with_tests: bool = True) -> None:
+        path.mkdir(parents=True, exist_ok=True)
+
+        self._create_default(path)
+        self._create_readme(path)
+
+        if with_tests:
+            self._create_tests(path)
+
+        self._write_poetry(path)
+
+    def generate_poetry_content(self) -> TOMLDocument:
+        template = POETRY_DEFAULT
+
+        content: dict[str, Any] = loads(template)
+
+        poetry_content = content["tool"]["poetry"]
+        poetry_content["name"] = self._project
+        poetry_content["version"] = self._version
+        poetry_content["description"] = self._description
+        poetry_content["authors"].append(self._author)
+
+        if self._license:
+            poetry_content["license"] = self._license
+        else:
+            poetry_content.remove("license")
+
+        poetry_content["readme"] = f"README.{self._readme_format}"
+        packages = self.get_package_include()
+        if packages:
+            poetry_content["packages"].append(packages)
+        else:
+            poetry_content.remove("packages")
+
+        poetry_content["dependencies"]["python"] = self._python
+
+        for dep_name, dep_constraint in self._dependencies.items():
+            poetry_content["dependencies"][dep_name] = dep_constraint
+
+        if self._dev_dependencies:
+            for dep_name, dep_constraint in self._dev_dependencies.items():
+                poetry_content["group"]["dev"]["dependencies"][
+                    dep_name
+                ] = dep_constraint
+        else:
+            del poetry_content["group"]
+
+        # Add build system
+        build_system = table()
+        build_system_version = ""
+
+        if BUILD_SYSTEM_MIN_VERSION is not None:
+            build_system_version = ">=" + BUILD_SYSTEM_MIN_VERSION
+        if BUILD_SYSTEM_MAX_VERSION is not None:
+            if build_system_version:
+                build_system_version += ","
+            build_system_version += "<" + BUILD_SYSTEM_MAX_VERSION
+
+        build_system.add("requires", ["poetry-core" + build_system_version])
+        build_system.add("build-backend", "poetry.core.masonry.api")
+
+        assert isinstance(content, TOMLDocument)
+        content.add("build-system", build_system)
+
+        return content
+
+    def _create_default(self, path: Path, src: bool = True) -> None:
+        package_path = path / self.package_path
+        package_path.mkdir(parents=True)
+
+        package_init = package_path / "__init__.py"
+        package_init.touch()
+
+    def _create_readme(self, path: Path) -> Path:
+        readme_file = path.joinpath(f"README.{self._readme_format}")
+        readme_file.touch()
+        return readme_file
+
+    @staticmethod
+    def _create_tests(path: Path) -> None:
+        tests = path / "tests"
+        tests.mkdir()
+
+        tests_init = tests / "__init__.py"
+        tests_init.touch(exist_ok=False)
+
+    def _write_poetry(self, path: Path) -> None:
+        pyproject = PyProjectTOML(path / "pyproject.toml")
+        content = self.generate_poetry_content()
+        for section in content:
+            pyproject.data.append(section, content[section])
+        pyproject.save()
diff --git a/vendor/poetry/src/poetry/layouts/src.py b/vendor/poetry/src/poetry/layouts/src.py
new file mode 100644
index 00000000..fbaee68d
--- /dev/null
+++ b/vendor/poetry/src/poetry/layouts/src.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from poetry.layouts.layout import Layout
+
+
+class SrcLayout(Layout):
+    @property
+    def basedir(self) -> Path:
+        return Path("src")
diff --git a/vendor/poetry/src/poetry/layouts/standard.py b/vendor/poetry/src/poetry/layouts/standard.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/locations.py b/vendor/poetry/src/poetry/locations.py
new file mode 100644
index 00000000..0e3b884e
--- /dev/null
+++ b/vendor/poetry/src/poetry/locations.py
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+import logging
+import os
+import sys
+
+from pathlib import Path
+
+from platformdirs import user_cache_path
+from platformdirs import user_config_path
+from platformdirs import user_data_path
+
+
+logger = logging.getLogger(__name__)
+
+_APP_NAME = "pypoetry"
+
+DEFAULT_CACHE_DIR = user_cache_path(_APP_NAME, appauthor=False)
+CONFIG_DIR = Path(
+    os.getenv("POETRY_CONFIG_DIR")
+    or user_config_path(_APP_NAME, appauthor=False, roaming=True)
+)
+
+# platformdirs 2.0.0 corrected the OSX/macOS config directory from
+# /Users//Library/Application Support/ to
+# /Users//Library/Preferences/.
+#
+# For now we only deprecate use of the old directory.
+if sys.platform == "darwin":
+    _LEGACY_CONFIG_DIR = CONFIG_DIR.parent.parent / "Application Support" / _APP_NAME
+    config_toml = _LEGACY_CONFIG_DIR / "config.toml"
+    auth_toml = _LEGACY_CONFIG_DIR / "auth.toml"
+
+    if any(file.exists() for file in (auth_toml, config_toml)):
+        logger.warning(
+            "Configuration file exists at %s, reusing this directory.\n\nConsider"
+            " moving configuration to %s, as support for the legacy directory will be"
+            " removed in an upcoming release.",
+            _LEGACY_CONFIG_DIR,
+            CONFIG_DIR,
+        )
+        CONFIG_DIR = _LEGACY_CONFIG_DIR
+
+
+def data_dir() -> Path:
+    poetry_home = os.getenv("POETRY_HOME")
+    if poetry_home:
+        return Path(poetry_home).expanduser()
+
+    return user_data_path(_APP_NAME, appauthor=False, roaming=True)
diff --git a/vendor/poetry/src/poetry/masonry/__init__.py b/vendor/poetry/src/poetry/masonry/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/masonry/api.py b/vendor/poetry/src/poetry/masonry/api.py
new file mode 100644
index 00000000..807585ae
--- /dev/null
+++ b/vendor/poetry/src/poetry/masonry/api.py
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+from poetry.core.masonry.api import build_sdist
+from poetry.core.masonry.api import build_wheel
+from poetry.core.masonry.api import get_requires_for_build_sdist
+from poetry.core.masonry.api import get_requires_for_build_wheel
+from poetry.core.masonry.api import prepare_metadata_for_build_wheel
+
+
+__all__ = [
+    "build_sdist",
+    "build_wheel",
+    "get_requires_for_build_sdist",
+    "get_requires_for_build_wheel",
+    "prepare_metadata_for_build_wheel",
+]
diff --git a/vendor/poetry/src/poetry/masonry/builders/__init__.py b/vendor/poetry/src/poetry/masonry/builders/__init__.py
new file mode 100644
index 00000000..61662422
--- /dev/null
+++ b/vendor/poetry/src/poetry/masonry/builders/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from poetry.masonry.builders.editable import EditableBuilder
+
+
+__all__ = ["EditableBuilder"]
diff --git a/vendor/poetry/src/poetry/masonry/builders/editable.py b/vendor/poetry/src/poetry/masonry/builders/editable.py
new file mode 100644
index 00000000..fac1569e
--- /dev/null
+++ b/vendor/poetry/src/poetry/masonry/builders/editable.py
@@ -0,0 +1,284 @@
+from __future__ import annotations
+
+import csv
+import hashlib
+import json
+import os
+import shutil
+
+from base64 import urlsafe_b64encode
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from poetry.core.masonry.builders.builder import Builder
+from poetry.core.masonry.builders.sdist import SdistBuilder
+from poetry.core.masonry.utils.package_include import PackageInclude
+from poetry.core.semver.version import Version
+
+from poetry.utils._compat import WINDOWS
+from poetry.utils._compat import decode
+from poetry.utils.env import build_environment
+from poetry.utils.helpers import is_dir_writable
+from poetry.utils.pip import pip_install
+
+
+if TYPE_CHECKING:
+    from cleo.io.io import IO
+
+    from poetry.poetry import Poetry
+    from poetry.utils.env import Env
+
+SCRIPT_TEMPLATE = """\
+#!{python}
+import sys
+from {module} import {callable_holder}
+
+if __name__ == '__main__':
+    sys.exit({callable_}())
+"""
+
+WINDOWS_CMD_TEMPLATE = """\
+@echo off\r\n"{python}" "%~dp0\\{script}" %*\r\n
+"""
+
+
+class EditableBuilder(Builder):
+    def __init__(self, poetry: Poetry, env: Env, io: IO) -> None:
+        super().__init__(poetry)
+
+        self._env = env
+        self._io = io
+
+    def build(self, target_dir: Path | None = None) -> Path:
+        self._debug(
+            f"  - Building package {self._package.name} in"
+            " editable mode"
+        )
+
+        if self._package.build_script:
+            if self._package.build_should_generate_setup():
+                self._debug(
+                    "  - Falling back on using a setup.py"
+                )
+                self._setup_build()
+                path: Path = self._path
+                return path
+
+            self._run_build_script(self._package.build_script)
+
+        for removed in self._env.site_packages.remove_distribution_files(
+            distribution_name=self._package.name
+        ):
+            self._debug(
+                f"  - Removed {removed.name} directory from"
+                f" {removed.parent}"
+            )
+
+        added_files = []
+        added_files += self._add_pth()
+        added_files += self._add_scripts()
+        self._add_dist_info(added_files)
+
+        path = self._path
+        return path
+
+    def _run_build_script(self, build_script: str) -> None:
+        with build_environment(poetry=self._poetry, env=self._env, io=self._io) as env:
+            self._debug(f"  - Executing build script: {build_script}")
+            env.run("python", str(self._path.joinpath(build_script)), call=True)
+
+    def _setup_build(self) -> None:
+        builder = SdistBuilder(self._poetry)
+        setup = self._path / "setup.py"
+        has_setup = setup.exists()
+
+        if has_setup:
+            self._io.write_error_line(
+                "A setup.py file already exists. Using it."
+            )
+        else:
+            with setup.open("w", encoding="utf-8") as f:
+                f.write(decode(builder.build_setup()))
+
+        try:
+            if self._env.pip_version < Version.from_parts(19, 0):
+                pip_install(self._path, self._env, upgrade=True, editable=True)
+            else:
+                # Temporarily rename pyproject.toml
+                shutil.move(
+                    str(self._poetry.file), str(self._poetry.file.with_suffix(".tmp"))
+                )
+                try:
+                    pip_install(self._path, self._env, upgrade=True, editable=True)
+                finally:
+                    shutil.move(
+                        str(self._poetry.file.with_suffix(".tmp")),
+                        str(self._poetry.file),
+                    )
+        finally:
+            if not has_setup:
+                os.remove(str(setup))
+
+    def _add_pth(self) -> list[Path]:
+        paths = {
+            include.base.resolve().as_posix()
+            for include in self._module.includes
+            if isinstance(include, PackageInclude)
+            and (include.is_module() or include.is_package())
+        }
+
+        content = "".join(decode(path + os.linesep) for path in paths)
+        pth_file = Path(self._module.name).with_suffix(".pth")
+
+        # remove any pre-existing pth files for this package
+        for file in self._env.site_packages.find(path=pth_file, writable_only=True):
+            self._debug(
+                f"  - Removing existing {file.name} from {file.parent}"
+                f" for {self._poetry.file.parent}"
+            )
+            # We can't use unlink(missing_ok=True) because it's not always available
+            if file.exists():
+                file.unlink()
+
+        try:
+            pth_file = self._env.site_packages.write_text(
+                pth_file, content, encoding="utf-8"
+            )
+            self._debug(
+                f"  - Adding {pth_file.name} to {pth_file.parent} for"
+                f" {self._poetry.file.parent}"
+            )
+            return [pth_file]
+        except OSError:
+            # TODO: Replace with PermissionError
+            self._io.write_error_line(
+                f"  - Failed to create {pth_file.name} for"
+                f" {self._poetry.file.parent}"
+            )
+            return []
+
+    def _add_scripts(self) -> list[Path]:
+        added = []
+        entry_points = self.convert_entry_points()
+
+        for scripts_path in self._env.script_dirs:
+            if is_dir_writable(path=scripts_path, create=True):
+                break
+        else:
+            self._io.write_error_line(
+                "  - Failed to find a suitable script installation directory for"
+                f" {self._poetry.file.parent}"
+            )
+            return []
+
+        scripts = entry_points.get("console_scripts", [])
+        for script in scripts:
+            name, script = script.split(" = ")
+            module, callable_ = script.split(":")
+            callable_holder = callable_.split(".", 1)[0]
+
+            script_file = scripts_path.joinpath(name)
+            self._debug(
+                f"  - Adding the {name} script to {scripts_path}"
+            )
+            with script_file.open("w", encoding="utf-8") as f:
+                f.write(
+                    decode(
+                        SCRIPT_TEMPLATE.format(
+                            python=self._env.python,
+                            module=module,
+                            callable_holder=callable_holder,
+                            callable_=callable_,
+                        )
+                    )
+                )
+
+            script_file.chmod(0o755)
+
+            added.append(script_file)
+
+            if WINDOWS:
+                cmd_script = script_file.with_suffix(".cmd")
+                cmd = WINDOWS_CMD_TEMPLATE.format(python=self._env.python, script=name)
+                self._debug(
+                    f"  - Adding the {cmd_script.name} script wrapper to"
+                    f" {scripts_path}"
+                )
+
+                with cmd_script.open("w", encoding="utf-8") as f:
+                    f.write(decode(cmd))
+
+                added.append(cmd_script)
+
+        return added
+
+    def _add_dist_info(self, added_files: list[Path]) -> None:
+        from poetry.core.masonry.builders.wheel import WheelBuilder
+
+        added_files = added_files[:]
+
+        builder = WheelBuilder(self._poetry)
+        dist_info = self._env.site_packages.mkdir(Path(builder.dist_info))
+
+        self._debug(
+            f"  - Adding the {dist_info.name} directory to"
+            f" {dist_info.parent}"
+        )
+
+        with dist_info.joinpath("METADATA").open("w", encoding="utf-8") as f:
+            builder._write_metadata_file(f)
+
+        added_files.append(dist_info.joinpath("METADATA"))
+
+        with dist_info.joinpath("INSTALLER").open("w", encoding="utf-8") as f:
+            f.write("poetry")
+
+        added_files.append(dist_info.joinpath("INSTALLER"))
+
+        if self.convert_entry_points():
+            with dist_info.joinpath("entry_points.txt").open(
+                "w", encoding="utf-8"
+            ) as f:
+                builder._write_entry_points(f)
+
+            added_files.append(dist_info.joinpath("entry_points.txt"))
+
+        # write PEP 610 metadata
+        direct_url_json = dist_info.joinpath("direct_url.json")
+        direct_url_json.write_text(
+            json.dumps(
+                {
+                    "dir_info": {"editable": True},
+                    "url": self._poetry.file.path.parent.as_uri(),
+                }
+            )
+        )
+        added_files.append(direct_url_json)
+
+        record = dist_info.joinpath("RECORD")
+        with record.open("w", encoding="utf-8", newline="") as f:
+            csv_writer = csv.writer(f)
+            for path in added_files:
+                hash = self._get_file_hash(path)
+                size = path.stat().st_size
+                csv_writer.writerow((path, f"sha256={hash}", size))
+
+            # RECORD itself is recorded with no hash or size
+            csv_writer.writerow((record, "", ""))
+
+    def _get_file_hash(self, filepath: Path) -> str:
+        hashsum = hashlib.sha256()
+        with filepath.open("rb") as src:
+            while True:
+                buf = src.read(1024 * 8)
+                if not buf:
+                    break
+                hashsum.update(buf)
+
+            src.seek(0)
+
+        return urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=")
+
+    def _debug(self, msg: str) -> None:
+        if self._io.is_debug():
+            self._io.write_line(msg)
diff --git a/vendor/poetry/src/poetry/mixology/__init__.py b/vendor/poetry/src/poetry/mixology/__init__.py
new file mode 100644
index 00000000..88d9d174
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/__init__.py
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.mixology.version_solver import VersionSolver
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
+
+    from poetry.mixology.result import SolverResult
+    from poetry.packages import DependencyPackage
+    from poetry.puzzle.provider import Provider
+
+
+def resolve_version(
+    root: ProjectPackage,
+    provider: Provider,
+    locked: dict[str, list[DependencyPackage]] | None = None,
+    use_latest: list[str] | None = None,
+) -> SolverResult:
+    solver = VersionSolver(root, provider, locked=locked, use_latest=use_latest)
+
+    return solver.solve()
diff --git a/vendor/poetry/src/poetry/mixology/assignment.py b/vendor/poetry/src/poetry/mixology/assignment.py
new file mode 100644
index 00000000..9c0e0b1a
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/assignment.py
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.mixology.term import Term
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.package import Package
+
+    from poetry.mixology.incompatibility import Incompatibility
+
+
+class Assignment(Term):
+    """
+    A term in a PartialSolution that tracks some additional metadata.
+    """
+
+    def __init__(
+        self,
+        dependency: Dependency,
+        is_positive: bool,
+        decision_level: int,
+        index: int,
+        cause: Incompatibility | None = None,
+    ) -> None:
+        super().__init__(dependency, is_positive)
+
+        self._decision_level = decision_level
+        self._index = index
+        self._cause = cause
+
+    @property
+    def decision_level(self) -> int:
+        return self._decision_level
+
+    @property
+    def index(self) -> int:
+        return self._index
+
+    @property
+    def cause(self) -> Incompatibility | None:
+        return self._cause
+
+    @classmethod
+    def decision(cls, package: Package, decision_level: int, index: int) -> Assignment:
+        return cls(package.to_dependency(), True, decision_level, index)
+
+    @classmethod
+    def derivation(
+        cls,
+        dependency: Dependency,
+        is_positive: bool,
+        cause: Incompatibility,
+        decision_level: int,
+        index: int,
+    ) -> Assignment:
+        return cls(dependency, is_positive, decision_level, index, cause)
+
+    def is_decision(self) -> bool:
+        return self._cause is None
diff --git a/vendor/poetry/src/poetry/mixology/failure.py b/vendor/poetry/src/poetry/mixology/failure.py
new file mode 100644
index 00000000..6c11350a
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/failure.py
@@ -0,0 +1,268 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import cast
+
+from poetry.core.semver.helpers import parse_constraint
+
+from poetry.mixology.incompatibility_cause import ConflictCause
+from poetry.mixology.incompatibility_cause import PythonCause
+
+
+if TYPE_CHECKING:
+    from poetry.mixology.incompatibility import Incompatibility
+
+
+class SolveFailure(Exception):
+    def __init__(self, incompatibility: Incompatibility) -> None:
+        self._incompatibility = incompatibility
+
+    @property
+    def message(self) -> str:
+        return str(self)
+
+    def __str__(self) -> str:
+        return _Writer(self._incompatibility).write()
+
+
+class _Writer:
+    def __init__(self, root: Incompatibility) -> None:
+        self._root = root
+        self._derivations: dict[Incompatibility, int] = {}
+        self._lines: list[tuple[str, int | None]] = []
+        self._line_numbers: dict[Incompatibility, int] = {}
+
+        self._count_derivations(self._root)
+
+    def write(self) -> str:
+        buffer = []
+
+        required_python_version_notification = False
+        for incompatibility in self._root.external_incompatibilities:
+            if isinstance(incompatibility.cause, PythonCause):
+                if not required_python_version_notification:
+                    buffer.append(
+                        "The current project's Python requirement"
+                        f" ({incompatibility.cause.root_python_version}) is not"
+                        " compatible with some of the required packages Python"
+                        " requirement:"
+                    )
+                    required_python_version_notification = True
+
+                root_constraint = parse_constraint(
+                    incompatibility.cause.root_python_version
+                )
+                constraint = parse_constraint(incompatibility.cause.python_version)
+                buffer.append(
+                    f"  - {incompatibility.terms[0].dependency.name} requires Python"
+                    f" {incompatibility.cause.python_version}, so it will not be"
+                    f" satisfied for Python {root_constraint.difference(constraint)}"
+                )
+
+        if required_python_version_notification:
+            buffer.append("")
+
+        if isinstance(self._root.cause, ConflictCause):
+            self._visit(self._root)
+        else:
+            self._write(self._root, f"Because {self._root}, version solving failed.")
+
+        padding = (
+            0
+            if not self._line_numbers
+            else len(f"({list(self._line_numbers.values())[-1]}) ")
+        )
+
+        last_was_empty = False
+        for line in self._lines:
+            message = line[0]
+            if not message:
+                if not last_was_empty:
+                    buffer.append("")
+
+                last_was_empty = True
+                continue
+
+            last_was_empty = False
+
+            number = line[-1]
+            if number is not None:
+                message = f"({number})".ljust(padding) + message
+            else:
+                message = " " * padding + message
+
+            buffer.append(message)
+
+        return "\n".join(buffer)
+
+    def _write(
+        self, incompatibility: Incompatibility, message: str, numbered: bool = False
+    ) -> None:
+        if numbered:
+            number = len(self._line_numbers) + 1
+            self._line_numbers[incompatibility] = number
+            self._lines.append((message, number))
+        else:
+            self._lines.append((message, None))
+
+    def _visit(
+        self,
+        incompatibility: Incompatibility,
+        conclusion: bool = False,
+    ) -> None:
+        numbered = conclusion or self._derivations[incompatibility] > 1
+        conjunction = "So," if conclusion or incompatibility == self._root else "And"
+        incompatibility_string = str(incompatibility)
+
+        cause: ConflictCause = cast(ConflictCause, incompatibility.cause)
+
+        if isinstance(cause.conflict.cause, ConflictCause) and isinstance(
+            cause.other.cause, ConflictCause
+        ):
+            conflict_line = self._line_numbers.get(cause.conflict)
+            other_line = self._line_numbers.get(cause.other)
+
+            if conflict_line is not None and other_line is not None:
+                reason = cause.conflict.and_to_string(
+                    cause.other, conflict_line, other_line
+                )
+                self._write(
+                    incompatibility,
+                    f"Because {reason}, {incompatibility_string}.",
+                    numbered=numbered,
+                )
+            elif conflict_line is not None or other_line is not None:
+                if conflict_line is not None:
+                    with_line = cause.conflict
+                    without_line = cause.other
+                    line = conflict_line
+                elif other_line is not None:
+                    with_line = cause.other
+                    without_line = cause.conflict
+                    line = other_line
+
+                self._visit(without_line)
+                self._write(
+                    incompatibility,
+                    f"{conjunction} because {with_line!s} ({line}),"
+                    f" {incompatibility_string}.",
+                    numbered=numbered,
+                )
+            else:
+                single_line_conflict = self._is_single_line(cause.conflict.cause)
+                single_line_other = self._is_single_line(cause.other.cause)
+
+                if single_line_other or single_line_conflict:
+                    first = cause.conflict if single_line_other else cause.other
+                    second = cause.other if single_line_other else cause.conflict
+                    self._visit(first)
+                    self._visit(second)
+                    self._write(
+                        incompatibility,
+                        f"Thus, {incompatibility_string}.",
+                        numbered=numbered,
+                    )
+                else:
+                    self._visit(cause.conflict, conclusion=True)
+                    self._lines.append(("", None))
+
+                    self._visit(cause.other)
+
+                    self._write(
+                        incompatibility,
+                        f"{conjunction} because"
+                        f" {cause.conflict!s} ({self._line_numbers[cause.conflict]}),"
+                        f" {incompatibility_string}",
+                        numbered=numbered,
+                    )
+        elif isinstance(cause.conflict.cause, ConflictCause) or isinstance(
+            cause.other.cause, ConflictCause
+        ):
+            derived = (
+                cause.conflict
+                if isinstance(cause.conflict.cause, ConflictCause)
+                else cause.other
+            )
+            ext = (
+                cause.other
+                if isinstance(cause.conflict.cause, ConflictCause)
+                else cause.conflict
+            )
+
+            derived_line = self._line_numbers.get(derived)
+            if derived_line is not None:
+                reason = ext.and_to_string(derived, None, derived_line)
+                self._write(
+                    incompatibility,
+                    f"Because {reason}, {incompatibility_string}.",
+                    numbered=numbered,
+                )
+            elif self._is_collapsible(derived):
+                derived_cause: ConflictCause = cast(ConflictCause, derived.cause)
+                if isinstance(derived_cause.conflict.cause, ConflictCause):
+                    collapsed_derived = derived_cause.conflict
+                    collapsed_ext = derived_cause.other
+                else:
+                    collapsed_derived = derived_cause.other
+
+                    collapsed_ext = derived_cause.conflict
+
+                self._visit(collapsed_derived)
+                reason = collapsed_ext.and_to_string(ext, None, None)
+                self._write(
+                    incompatibility,
+                    f"{conjunction} because {reason}, {incompatibility_string}.",
+                    numbered=numbered,
+                )
+            else:
+                self._visit(derived)
+                self._write(
+                    incompatibility,
+                    f"{conjunction} because {ext!s}, {incompatibility_string}.",
+                    numbered=numbered,
+                )
+        else:
+            reason = cause.conflict.and_to_string(cause.other, None, None)
+            self._write(
+                incompatibility,
+                f"Because {reason}, {incompatibility_string}.",
+                numbered=numbered,
+            )
+
+    def _is_collapsible(self, incompatibility: Incompatibility) -> bool:
+        if self._derivations[incompatibility] > 1:
+            return False
+
+        cause: ConflictCause = cast(ConflictCause, incompatibility.cause)
+        if isinstance(cause.conflict.cause, ConflictCause) and isinstance(
+            cause.other.cause, ConflictCause
+        ):
+            return False
+
+        if not isinstance(cause.conflict.cause, ConflictCause) and not isinstance(
+            cause.other.cause, ConflictCause
+        ):
+            return False
+
+        complex = (
+            cause.conflict
+            if isinstance(cause.conflict.cause, ConflictCause)
+            else cause.other
+        )
+
+        return complex not in self._line_numbers
+
+    def _is_single_line(self, cause: ConflictCause) -> bool:
+        return not isinstance(cause.conflict.cause, ConflictCause) and not isinstance(
+            cause.other.cause, ConflictCause
+        )
+
+    def _count_derivations(self, incompatibility: Incompatibility) -> None:
+        if incompatibility in self._derivations:
+            self._derivations[incompatibility] += 1
+        else:
+            self._derivations[incompatibility] = 1
+            cause = incompatibility.cause
+            if isinstance(cause, ConflictCause):
+                self._count_derivations(cause.conflict)
+                self._count_derivations(cause.other)
diff --git a/vendor/poetry/src/poetry/mixology/incompatibility.py b/vendor/poetry/src/poetry/mixology/incompatibility.py
new file mode 100644
index 00000000..b629ea7a
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/incompatibility.py
@@ -0,0 +1,459 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.mixology.incompatibility_cause import ConflictCause
+from poetry.mixology.incompatibility_cause import DependencyCause
+from poetry.mixology.incompatibility_cause import NoVersionsCause
+from poetry.mixology.incompatibility_cause import PackageNotFoundCause
+from poetry.mixology.incompatibility_cause import PlatformCause
+from poetry.mixology.incompatibility_cause import PythonCause
+from poetry.mixology.incompatibility_cause import RootCause
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from collections.abc import Iterator
+
+    from poetry.mixology.incompatibility_cause import IncompatibilityCause
+    from poetry.mixology.term import Term
+
+
+class Incompatibility:
+    def __init__(self, terms: list[Term], cause: IncompatibilityCause) -> None:
+        # Remove the root package from generated incompatibilities, since it will
+        # always be satisfied. This makes error reporting clearer, and may also
+        # make solving more efficient.
+        if (
+            len(terms) != 1
+            and isinstance(cause, ConflictCause)
+            and any(term.is_positive() and term.dependency.is_root for term in terms)
+        ):
+            terms = [
+                term
+                for term in terms
+                if not term.is_positive() or not term.dependency.is_root
+            ]
+
+        if len(terms) != 1 and (
+            # Short-circuit in the common case of a two-term incompatibility with
+            # two different packages (for example, a dependency).
+            len(terms) != 2
+            or terms[0].dependency.complete_name == terms[-1].dependency.complete_name
+        ):
+            # Coalesce multiple terms about the same package if possible.
+            by_name: dict[str, dict[str, Term]] = {}
+            for term in terms:
+                if term.dependency.complete_name not in by_name:
+                    by_name[term.dependency.complete_name] = {}
+
+                by_ref = by_name[term.dependency.complete_name]
+                ref = term.dependency.complete_name
+
+                if ref in by_ref:
+                    value = by_ref[ref].intersect(term)
+
+                    # If we have two terms that refer to the same package but have a
+                    # null intersection, they're mutually exclusive, making this
+                    # incompatibility irrelevant, since we already know that mutually
+                    # exclusive version ranges are incompatible. We should never derive
+                    # an irrelevant incompatibility.
+                    err_msg = f"Package '{ref}' is listed as a dependency of itself."
+                    assert value is not None, err_msg
+                    by_ref[ref] = value
+                else:
+                    by_ref[ref] = term
+
+            new_terms = []
+            for by_ref in by_name.values():
+                positive_terms = [
+                    term for term in by_ref.values() if term.is_positive()
+                ]
+                if positive_terms:
+                    new_terms += positive_terms
+                    continue
+
+                new_terms += list(by_ref.values())
+
+            terms = new_terms
+
+        self._terms = terms
+        self._cause = cause
+
+    @property
+    def terms(self) -> list[Term]:
+        return self._terms
+
+    @property
+    def cause(self) -> IncompatibilityCause:
+        return self._cause
+
+    @property
+    def external_incompatibilities(
+        self,
+    ) -> Iterator[Incompatibility]:
+        """
+        Returns all external incompatibilities in this incompatibility's
+        derivation graph.
+        """
+        if isinstance(self._cause, ConflictCause):
+            cause: ConflictCause = self._cause
+            yield from cause.conflict.external_incompatibilities
+
+            yield from cause.other.external_incompatibilities
+        else:
+            yield self
+
+    def is_failure(self) -> bool:
+        return len(self._terms) == 0 or (
+            len(self._terms) == 1 and self._terms[0].dependency.is_root
+        )
+
+    def __str__(self) -> str:
+        if isinstance(self._cause, DependencyCause):
+            assert len(self._terms) == 2
+
+            depender = self._terms[0]
+            dependee = self._terms[1]
+            assert depender.is_positive()
+            assert not dependee.is_positive()
+
+            return (
+                f"{self._terse(depender, allow_every=True)} depends on"
+                f" {self._terse(dependee)}"
+            )
+        elif isinstance(self._cause, PythonCause):
+            assert len(self._terms) == 1
+            assert self._terms[0].is_positive()
+
+            text = f"{self._terse(self._terms[0], allow_every=True)} requires "
+            text += f"Python {self._cause.python_version}"
+
+            return text
+        elif isinstance(self._cause, PlatformCause):
+            assert len(self._terms) == 1
+            assert self._terms[0].is_positive()
+
+            text = f"{self._terse(self._terms[0], allow_every=True)} requires "
+            text += f"platform {self._cause.platform}"
+
+            return text
+        elif isinstance(self._cause, NoVersionsCause):
+            assert len(self._terms) == 1
+            assert self._terms[0].is_positive()
+
+            return (
+                f"no versions of {self._terms[0].dependency.name} match"
+                f" {self._terms[0].constraint}"
+            )
+        elif isinstance(self._cause, PackageNotFoundCause):
+            assert len(self._terms) == 1
+            assert self._terms[0].is_positive()
+
+            return f"{self._terms[0].dependency.name} doesn't exist"
+        elif isinstance(self._cause, RootCause):
+            assert len(self._terms) == 1
+            assert not self._terms[0].is_positive()
+            assert self._terms[0].dependency.is_root
+
+            return (
+                f"{self._terms[0].dependency.name} is"
+                f" {self._terms[0].dependency.constraint}"
+            )
+        elif self.is_failure():
+            return "version solving failed"
+
+        if len(self._terms) == 1:
+            term = self._terms[0]
+            verb = "forbidden" if term.is_positive() else "required"
+            return f"{term.dependency.name} is {verb}"
+
+        if len(self._terms) == 2:
+            term1 = self._terms[0]
+            term2 = self._terms[1]
+
+            if term1.is_positive() == term2.is_positive():
+                if not term1.is_positive():
+                    return f"either {self._terse(term1)} or {self._terse(term2)}"
+
+                package1 = (
+                    term1.dependency.name
+                    if term1.constraint.is_any()
+                    else self._terse(term1)
+                )
+                package2 = (
+                    term2.dependency.name
+                    if term2.constraint.is_any()
+                    else self._terse(term2)
+                )
+
+                return f"{package1} is incompatible with {package2}"
+
+        positive = []
+        negative = []
+
+        for term in self._terms:
+            if term.is_positive():
+                positive.append(self._terse(term))
+            else:
+                negative.append(self._terse(term))
+
+        if positive and negative:
+            if len(positive) != 1:
+                return f"if {' and '.join(positive)} then {' or '.join(negative)}"
+
+            positive_term = [term for term in self._terms if term.is_positive()][0]
+            return (
+                f"{self._terse(positive_term, allow_every=True)} requires"
+                f" {' or '.join(negative)}"
+            )
+        elif positive:
+            return f"one of {' or '.join(positive)} must be false"
+        else:
+            return f"one of {' or '.join(negative)} must be true"
+
+    def and_to_string(
+        self,
+        other: Incompatibility,
+        this_line: int | None,
+        other_line: int | None,
+    ) -> str:
+        requires_both = self._try_requires_both(other, this_line, other_line)
+        if requires_both is not None:
+            return requires_both
+
+        requires_through = self._try_requires_through(other, this_line, other_line)
+        if requires_through is not None:
+            return requires_through
+
+        requires_forbidden = self._try_requires_forbidden(other, this_line, other_line)
+        if requires_forbidden is not None:
+            return requires_forbidden
+
+        buffer = [str(self)]
+        if this_line is not None:
+            buffer.append(f" {this_line!s}")
+
+        buffer.append(f" and {other!s}")
+
+        if other_line is not None:
+            buffer.append(f" {other_line!s}")
+
+        return "\n".join(buffer)
+
+    def _try_requires_both(
+        self,
+        other: Incompatibility,
+        this_line: int | None,
+        other_line: int | None,
+    ) -> str | None:
+        if len(self._terms) == 1 or len(other.terms) == 1:
+            return None
+
+        this_positive = self._single_term_where(lambda term: term.is_positive())
+        if this_positive is None:
+            return None
+
+        other_positive = other._single_term_where(lambda term: term.is_positive())
+        if other_positive is None:
+            return None
+
+        if this_positive.dependency != other_positive.dependency:
+            return None
+
+        this_negatives = " or ".join(
+            [self._terse(term) for term in self._terms if not term.is_positive()]
+        )
+
+        other_negatives = " or ".join(
+            [self._terse(term) for term in other.terms if not term.is_positive()]
+        )
+
+        buffer = [self._terse(this_positive, allow_every=True) + " "]
+        is_dependency = isinstance(self.cause, DependencyCause) and isinstance(
+            other.cause, DependencyCause
+        )
+
+        if is_dependency:
+            buffer.append("depends on")
+        else:
+            buffer.append("requires")
+
+        buffer.append(f" both {this_negatives}")
+        if this_line is not None:
+            buffer.append(f" ({this_line})")
+
+        buffer.append(f" and {other_negatives}")
+
+        if other_line is not None:
+            buffer.append(f" ({other_line})")
+
+        return "".join(buffer)
+
+    def _try_requires_through(
+        self,
+        other: Incompatibility,
+        this_line: int | None,
+        other_line: int | None,
+    ) -> str | None:
+        if len(self._terms) == 1 or len(other.terms) == 1:
+            return None
+
+        this_negative = self._single_term_where(lambda term: not term.is_positive())
+        other_negative = other._single_term_where(lambda term: not term.is_positive())
+
+        if this_negative is None and other_negative is None:
+            return None
+
+        this_positive = self._single_term_where(lambda term: term.is_positive())
+        other_positive = self._single_term_where(lambda term: term.is_positive())
+
+        if (
+            this_negative is not None
+            and other_positive is not None
+            and this_negative.dependency.name == other_positive.dependency.name
+            and this_negative.inverse.satisfies(other_positive)
+        ):
+            prior = self
+            prior_negative = this_negative
+            prior_line = this_line
+            latter = other
+            latter_line = other_line
+        elif (
+            other_negative is not None
+            and this_positive is not None
+            and other_negative.dependency.name == this_positive.dependency.name
+            and other_negative.inverse.satisfies(this_positive)
+        ):
+            prior = other
+            prior_negative = other_negative
+            prior_line = other_line
+            latter = self
+            latter_line = this_line
+        else:
+            return None
+
+        prior_positives = [term for term in prior.terms if term.is_positive()]
+
+        buffer = []
+        if len(prior_positives) > 1:
+            prior_string = " or ".join([self._terse(term) for term in prior_positives])
+            buffer.append(f"if {prior_string} then ")
+        else:
+            if isinstance(prior.cause, DependencyCause):
+                verb = "depends on"
+            else:
+                verb = "requires"
+
+            buffer.append(
+                f"{self._terse(prior_positives[0], allow_every=True)} {verb} "
+            )
+
+        buffer.append(self._terse(prior_negative))
+        if prior_line is not None:
+            buffer.append(f" ({prior_line})")
+
+        buffer.append(" which ")
+
+        if isinstance(latter.cause, DependencyCause):
+            buffer.append("depends on ")
+        else:
+            buffer.append("requires ")
+
+        buffer.append(
+            " or ".join(
+                [self._terse(term) for term in latter.terms if not term.is_positive()]
+            )
+        )
+
+        if latter_line is not None:
+            buffer.append(f" ({latter_line})")
+
+        return "".join(buffer)
+
+    def _try_requires_forbidden(
+        self,
+        other: Incompatibility,
+        this_line: int | None,
+        other_line: int | None,
+    ) -> str | None:
+        if len(self._terms) != 1 and len(other.terms) != 1:
+            return None
+
+        if len(self.terms) == 1:
+            prior = other
+            latter = self
+            prior_line = other_line
+            latter_line = this_line
+        else:
+            prior = self
+            latter = other
+            prior_line = this_line
+            latter_line = other_line
+
+        negative = prior._single_term_where(lambda term: not term.is_positive())
+        if negative is None:
+            return None
+
+        if not negative.inverse.satisfies(latter.terms[0]):
+            return None
+
+        positives = [t for t in prior.terms if t.is_positive()]
+
+        buffer = []
+        if len(positives) > 1:
+            prior_string = " or ".join([self._terse(term) for term in positives])
+            buffer.append(f"if {prior_string} then ")
+        else:
+            buffer.append(self._terse(positives[0], allow_every=True))
+            if isinstance(prior.cause, DependencyCause):
+                buffer.append(" depends on ")
+            else:
+                buffer.append(" requires ")
+
+        buffer.append(self._terse(latter.terms[0]) + " ")
+        if prior_line is not None:
+            buffer.append(f"({prior_line}) ")
+
+        if isinstance(latter.cause, PythonCause):
+            cause: PythonCause = latter.cause
+            buffer.append(f"which requires Python {cause.python_version}")
+        elif isinstance(latter.cause, NoVersionsCause):
+            buffer.append("which doesn't match any versions")
+        elif isinstance(latter.cause, PackageNotFoundCause):
+            buffer.append("which doesn't exist")
+        else:
+            buffer.append("which is forbidden")
+
+        if latter_line is not None:
+            buffer.append(f" ({latter_line})")
+
+        return "".join(buffer)
+
+    def _terse(self, term: Term, allow_every: bool = False) -> str:
+        if allow_every and term.constraint.is_any():
+            return f"every version of {term.dependency.complete_name}"
+
+        if term.dependency.is_root:
+            pretty_name: str = term.dependency.pretty_name
+            return pretty_name
+
+        if term.dependency.source_type:
+            return str(term.dependency)
+        return f"{term.dependency.pretty_name} ({term.dependency.pretty_constraint})"
+
+    def _single_term_where(self, callable: Callable[[Term], bool]) -> Term | None:
+        found = None
+        for term in self._terms:
+            if not callable(term):
+                continue
+
+            if found is not None:
+                return None
+
+            found = term
+
+        return found
+
+    def __repr__(self) -> str:
+        return f""
diff --git a/vendor/poetry/src/poetry/mixology/incompatibility_cause.py b/vendor/poetry/src/poetry/mixology/incompatibility_cause.py
new file mode 100644
index 00000000..87eee3ee
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/incompatibility_cause.py
@@ -0,0 +1,95 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.mixology.incompatibility import Incompatibility
+
+
+class IncompatibilityCause(Exception):
+    """
+    The reason and Incompatibility's terms are incompatible.
+    """
+
+
+class RootCause(IncompatibilityCause):
+    pass
+
+
+class NoVersionsCause(IncompatibilityCause):
+    pass
+
+
+class DependencyCause(IncompatibilityCause):
+    pass
+
+
+class ConflictCause(IncompatibilityCause):
+    """
+    The incompatibility was derived from two existing incompatibilities
+    during conflict resolution.
+    """
+
+    def __init__(self, conflict: Incompatibility, other: Incompatibility) -> None:
+        self._conflict = conflict
+        self._other = other
+
+    @property
+    def conflict(self) -> Incompatibility:
+        return self._conflict
+
+    @property
+    def other(self) -> Incompatibility:
+        return self._other
+
+    def __str__(self) -> str:
+        return str(self._conflict)
+
+
+class PythonCause(IncompatibilityCause):
+    """
+    The incompatibility represents a package's python constraint
+    (Python versions) being incompatible
+    with the current python version.
+    """
+
+    def __init__(self, python_version: str, root_python_version: str) -> None:
+        self._python_version = python_version
+        self._root_python_version = root_python_version
+
+    @property
+    def python_version(self) -> str:
+        return self._python_version
+
+    @property
+    def root_python_version(self) -> str:
+        return self._root_python_version
+
+
+class PlatformCause(IncompatibilityCause):
+    """
+    The incompatibility represents a package's platform constraint
+    (OS most likely) being incompatible with the current platform.
+    """
+
+    def __init__(self, platform: str) -> None:
+        self._platform = platform
+
+    @property
+    def platform(self) -> str:
+        return self._platform
+
+
+class PackageNotFoundCause(IncompatibilityCause):
+    """
+    The incompatibility represents a package that couldn't be found by its
+    source.
+    """
+
+    def __init__(self, error: Exception) -> None:
+        self._error = error
+
+    @property
+    def error(self) -> Exception:
+        return self._error
diff --git a/vendor/poetry/src/poetry/mixology/partial_solution.py b/vendor/poetry/src/poetry/mixology/partial_solution.py
new file mode 100644
index 00000000..135a7658
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/partial_solution.py
@@ -0,0 +1,215 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.mixology.assignment import Assignment
+from poetry.mixology.set_relation import SetRelation
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.package import Package
+
+    from poetry.mixology.incompatibility import Incompatibility
+    from poetry.mixology.term import Term
+
+
+class PartialSolution:
+    """
+    # A list of Assignments that represent the solver's current best guess about
+    # what's true for the eventual set of package versions that will comprise the
+    # total solution.
+    #
+    # See:
+    # https://github.com/dart-lang/mixology/tree/master/doc/solver.md#partial-solution.
+    """
+
+    def __init__(self) -> None:
+        # The assignments that have been made so far, in the order they were
+        # assigned.
+        self._assignments: list[Assignment] = []
+
+        # The decisions made for each package.
+        self._decisions: dict[str, Package] = {}
+
+        # The intersection of all positive Assignments for each package, minus any
+        # negative Assignments that refer to that package.
+        #
+        # This is derived from self._assignments.
+        self._positive: dict[str, Term] = {}
+
+        # The union of all negative Assignments for each package.
+        #
+        # If a package has any positive Assignments, it doesn't appear in this
+        # map.
+        #
+        # This is derived from self._assignments.
+        self._negative: dict[str, Term] = {}
+
+        # The number of distinct solutions that have been attempted so far.
+        self._attempted_solutions = 1
+
+        # Whether the solver is currently backtracking.
+        self._backtracking = False
+
+    @property
+    def decisions(self) -> list[Package]:
+        return list(self._decisions.values())
+
+    @property
+    def decision_level(self) -> int:
+        return len(self._decisions)
+
+    @property
+    def attempted_solutions(self) -> int:
+        return self._attempted_solutions
+
+    @property
+    def unsatisfied(self) -> list[Dependency]:
+        return [
+            term.dependency
+            for term in self._positive.values()
+            if term.dependency.complete_name not in self._decisions
+        ]
+
+    def decide(self, package: Package) -> None:
+        """
+        Adds an assignment of package as a decision
+        and increments the decision level.
+        """
+        # When we make a new decision after backtracking, count an additional
+        # attempted solution. If we backtrack multiple times in a row, though, we
+        # only want to count one, since we haven't actually started attempting a
+        # new solution.
+        if self._backtracking:
+            self._attempted_solutions += 1
+
+        self._backtracking = False
+        self._decisions[package.complete_name] = package
+
+        self._assign(
+            Assignment.decision(package, self.decision_level, len(self._assignments))
+        )
+
+    def derive(
+        self, dependency: Dependency, is_positive: bool, cause: Incompatibility
+    ) -> None:
+        """
+        Adds an assignment of package as a derivation.
+        """
+        self._assign(
+            Assignment.derivation(
+                dependency,
+                is_positive,
+                cause,
+                self.decision_level,
+                len(self._assignments),
+            )
+        )
+
+    def _assign(self, assignment: Assignment) -> None:
+        """
+        Adds an Assignment to _assignments and _positive or _negative.
+        """
+        self._assignments.append(assignment)
+        self._register(assignment)
+
+    def backtrack(self, decision_level: int) -> None:
+        """
+        Resets the current decision level to decision_level, and removes all
+        assignments made after that level.
+        """
+        self._backtracking = True
+
+        packages = set()
+        while self._assignments[-1].decision_level > decision_level:
+            removed = self._assignments.pop(-1)
+            packages.add(removed.dependency.complete_name)
+            if removed.is_decision():
+                del self._decisions[removed.dependency.complete_name]
+
+        # Re-compute _positive and _negative for the packages that were removed.
+        for package in packages:
+            if package in self._positive:
+                del self._positive[package]
+
+            if package in self._negative:
+                del self._negative[package]
+
+        for assignment in self._assignments:
+            if assignment.dependency.complete_name in packages:
+                self._register(assignment)
+
+    def _register(self, assignment: Assignment) -> None:
+        """
+        Registers an Assignment in _positive or _negative.
+        """
+        name = assignment.dependency.complete_name
+        old_positive = self._positive.get(name)
+        if old_positive is not None:
+            value = old_positive.intersect(assignment)
+            assert value is not None
+            self._positive[name] = value
+
+            return
+
+        old_negative = self._negative.get(name)
+        term = (
+            assignment if old_negative is None else assignment.intersect(old_negative)
+        )
+        assert term is not None
+
+        if term.is_positive():
+            if name in self._negative:
+                del self._negative[name]
+
+            self._positive[name] = term
+        else:
+            self._negative[name] = term
+
+    def satisfier(self, term: Term) -> Assignment:
+        """
+        Returns the first Assignment in this solution such that the sublist of
+        assignments up to and including that entry collectively satisfies term.
+        """
+        assigned_term = None
+
+        for assignment in self._assignments:
+            if assignment.dependency.complete_name != term.dependency.complete_name:
+                continue
+
+            if (
+                not assignment.dependency.is_root
+                and not assignment.dependency.is_same_package_as(term.dependency)
+            ):
+                if not assignment.is_positive():
+                    continue
+
+                assert not term.is_positive()
+
+                return assignment
+
+            if assigned_term is None:
+                assigned_term = assignment
+            else:
+                assigned_term = assigned_term.intersect(assignment)
+
+            # As soon as we have enough assignments to satisfy term, return them.
+            if assigned_term.satisfies(term):
+                return assignment
+
+        raise RuntimeError(f"[BUG] {term} is not satisfied.")
+
+    def satisfies(self, term: Term) -> bool:
+        return self.relation(term) == SetRelation.SUBSET
+
+    def relation(self, term: Term) -> str:
+        positive = self._positive.get(term.dependency.complete_name)
+        if positive is not None:
+            return positive.relation(term)
+
+        negative = self._negative.get(term.dependency.complete_name)
+        if negative is None:
+            return SetRelation.OVERLAPPING
+
+        return negative.relation(term)
diff --git a/vendor/poetry/src/poetry/mixology/result.py b/vendor/poetry/src/poetry/mixology/result.py
new file mode 100644
index 00000000..b4db7acb
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/result.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+    from poetry.core.packages.project_package import ProjectPackage
+
+
+class SolverResult:
+    def __init__(
+        self,
+        root: ProjectPackage,
+        packages: list[Package],
+        attempted_solutions: int,
+    ) -> None:
+        self._root = root
+        self._packages = packages
+        self._attempted_solutions = attempted_solutions
+
+    @property
+    def packages(self) -> list[Package]:
+        return self._packages
+
+    @property
+    def attempted_solutions(self) -> int:
+        return self._attempted_solutions
diff --git a/vendor/poetry/poetry/mixology/set_relation.py b/vendor/poetry/src/poetry/mixology/set_relation.py
similarity index 82%
rename from vendor/poetry/poetry/mixology/set_relation.py
rename to vendor/poetry/src/poetry/mixology/set_relation.py
index 4bd333bc..a71e8261 100644
--- a/vendor/poetry/poetry/mixology/set_relation.py
+++ b/vendor/poetry/src/poetry/mixology/set_relation.py
@@ -1,3 +1,6 @@
+from __future__ import annotations
+
+
 class SetRelation:
     """
     An enum of possible relationships between two sets.
diff --git a/vendor/poetry/src/poetry/mixology/solutions/__init__.py b/vendor/poetry/src/poetry/mixology/solutions/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/mixology/solutions/providers/__init__.py b/vendor/poetry/src/poetry/mixology/solutions/providers/__init__.py
new file mode 100644
index 00000000..cfbd1873
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/solutions/providers/__init__.py
@@ -0,0 +1,8 @@
+from __future__ import annotations
+
+from poetry.mixology.solutions.providers.python_requirement_solution_provider import (
+    PythonRequirementSolutionProvider,
+)
+
+
+__all__ = ["PythonRequirementSolutionProvider"]
diff --git a/vendor/poetry/src/poetry/mixology/solutions/providers/python_requirement_solution_provider.py b/vendor/poetry/src/poetry/mixology/solutions/providers/python_requirement_solution_provider.py
new file mode 100644
index 00000000..dba0d584
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/solutions/providers/python_requirement_solution_provider.py
@@ -0,0 +1,36 @@
+from __future__ import annotations
+
+import re
+
+from typing import TYPE_CHECKING
+
+from crashtest.contracts.has_solutions_for_exception import HasSolutionsForException
+
+
+if TYPE_CHECKING:
+    from crashtest.contracts.solution import Solution
+
+    from poetry.puzzle.exceptions import SolverProblemError
+
+
+class PythonRequirementSolutionProvider(HasSolutionsForException):  # type: ignore[misc]
+    def can_solve(self, exception: Exception) -> bool:
+        from poetry.puzzle.exceptions import SolverProblemError
+
+        if not isinstance(exception, SolverProblemError):
+            return False
+
+        m = re.match(
+            "^The current project's Python requirement (.+) is not compatible "
+            "with some of the required packages Python requirement",
+            str(exception),
+        )
+
+        return bool(m)
+
+    def get_solutions(self, exception: SolverProblemError) -> list[Solution]:
+        from poetry.mixology.solutions.solutions.python_requirement_solution import (
+            PythonRequirementSolution,
+        )
+
+        return [PythonRequirementSolution(exception)]
diff --git a/vendor/poetry/src/poetry/mixology/solutions/solutions/__init__.py b/vendor/poetry/src/poetry/mixology/solutions/solutions/__init__.py
new file mode 100644
index 00000000..e78e9a53
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/solutions/solutions/__init__.py
@@ -0,0 +1,8 @@
+from __future__ import annotations
+
+from poetry.mixology.solutions.solutions.python_requirement_solution import (
+    PythonRequirementSolution,
+)
+
+
+__all__ = ["PythonRequirementSolution"]
diff --git a/vendor/poetry/src/poetry/mixology/solutions/solutions/python_requirement_solution.py b/vendor/poetry/src/poetry/mixology/solutions/solutions/python_requirement_solution.py
new file mode 100644
index 00000000..cfade766
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/solutions/solutions/python_requirement_solution.py
@@ -0,0 +1,63 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from crashtest.contracts.solution import Solution
+
+
+if TYPE_CHECKING:
+    from poetry.mixology.failure import SolveFailure
+    from poetry.puzzle.exceptions import SolverProblemError
+
+
+class PythonRequirementSolution(Solution):  # type: ignore[misc]
+    def __init__(self, exception: SolverProblemError) -> None:
+        from poetry.core.semver.helpers import parse_constraint
+
+        from poetry.mixology.incompatibility_cause import PythonCause
+
+        self._title = "Check your dependencies Python requirement."
+
+        failure: SolveFailure = exception.error
+        version_solutions = []
+        for incompatibility in failure._incompatibility.external_incompatibilities:
+            if isinstance(incompatibility.cause, PythonCause):
+                root_constraint = parse_constraint(
+                    incompatibility.cause.root_python_version
+                )
+                constraint = parse_constraint(incompatibility.cause.python_version)
+
+                version_solutions.append(
+                    "For "
+                    f"{incompatibility.terms[0].dependency.name},"
+                    " a possible solution would be to set the"
+                    " `python` property to"
+                    f' "{root_constraint.intersect(constraint)}"'
+                )
+
+        description = (
+            "The Python requirement can be specified via the"
+            " `python` or"
+            " `markers` properties"
+        )
+        if version_solutions:
+            description += "\n\n" + "\n".join(version_solutions)
+
+        description += "\n"
+
+        self._description = description
+
+    @property
+    def solution_title(self) -> str:
+        return self._title
+
+    @property
+    def solution_description(self) -> str:
+        return self._description
+
+    @property
+    def documentation_links(self) -> list[str]:
+        return [
+            "https://python-poetry.org/docs/dependency-specification/#python-restricted-dependencies",  # noqa: E501
+            "https://python-poetry.org/docs/dependency-specification/#using-environment-markers",  # noqa: E501
+        ]
diff --git a/vendor/poetry/src/poetry/mixology/term.py b/vendor/poetry/src/poetry/mixology/term.py
new file mode 100644
index 00000000..d5c4577c
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/term.py
@@ -0,0 +1,187 @@
+from __future__ import annotations
+
+import functools
+
+from typing import TYPE_CHECKING
+
+from poetry.mixology.set_relation import SetRelation
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.semver.version_constraint import VersionConstraint
+
+
+class Term:
+    """
+    A statement about a package which is true or false for a given selection of
+    package versions.
+
+    See https://github.com/dart-lang/pub/tree/master/doc/solver.md#term.
+    """
+
+    def __init__(self, dependency: Dependency, is_positive: bool) -> None:
+        self._dependency = dependency
+        self._positive = is_positive
+        self.relation = functools.lru_cache(maxsize=None)(self._relation)
+        self.intersect = functools.lru_cache(maxsize=None)(self._intersect)
+
+    @property
+    def inverse(self) -> Term:
+        return Term(self._dependency, not self.is_positive())
+
+    @property
+    def dependency(self) -> Dependency:
+        return self._dependency
+
+    @property
+    def constraint(self) -> VersionConstraint:
+        return self._dependency.constraint
+
+    def is_positive(self) -> bool:
+        return self._positive
+
+    def satisfies(self, other: Term) -> bool:
+        """
+        Returns whether this term satisfies another.
+        """
+        return (
+            self.dependency.complete_name == other.dependency.complete_name
+            and self.relation(other) == SetRelation.SUBSET
+        )
+
+    def _relation(self, other: Term) -> str:
+        """
+        Returns the relationship between the package versions
+        allowed by this term and another.
+        """
+        if self.dependency.complete_name != other.dependency.complete_name:
+            raise ValueError(f"{other} should refer to {self.dependency.complete_name}")
+
+        other_constraint = other.constraint
+
+        if other.is_positive():
+            if self.is_positive():
+                if not self._compatible_dependency(other.dependency):
+                    return SetRelation.DISJOINT
+
+                # foo ^1.5.0 is a subset of foo ^1.0.0
+                if other_constraint.allows_all(self.constraint):
+                    return SetRelation.SUBSET
+
+                # foo ^2.0.0 is disjoint with foo ^1.0.0
+                if not self.constraint.allows_any(other_constraint):
+                    return SetRelation.DISJOINT
+
+                return SetRelation.OVERLAPPING
+            else:
+                if not self._compatible_dependency(other.dependency):
+                    return SetRelation.OVERLAPPING
+
+                # not foo ^1.0.0 is disjoint with foo ^1.5.0
+                if self.constraint.allows_all(other_constraint):
+                    return SetRelation.DISJOINT
+
+                # not foo ^1.5.0 overlaps foo ^1.0.0
+                # not foo ^2.0.0 is a superset of foo ^1.5.0
+                return SetRelation.OVERLAPPING
+        else:
+            if self.is_positive():
+                if not self._compatible_dependency(other.dependency):
+                    return SetRelation.SUBSET
+
+                # foo ^2.0.0 is a subset of not foo ^1.0.0
+                if not other_constraint.allows_any(self.constraint):
+                    return SetRelation.SUBSET
+
+                # foo ^1.5.0 is disjoint with not foo ^1.0.0
+                if other_constraint.allows_all(self.constraint):
+                    return SetRelation.DISJOINT
+
+                # foo ^1.0.0 overlaps not foo ^1.5.0
+                return SetRelation.OVERLAPPING
+            else:
+                if not self._compatible_dependency(other.dependency):
+                    return SetRelation.OVERLAPPING
+
+                # not foo ^1.0.0 is a subset of not foo ^1.5.0
+                if self.constraint.allows_all(other_constraint):
+                    return SetRelation.SUBSET
+
+                # not foo ^2.0.0 overlaps not foo ^1.0.0
+                # not foo ^1.5.0 is a superset of not foo ^1.0.0
+                return SetRelation.OVERLAPPING
+
+    def _intersect(self, other: Term) -> Term | None:
+        """
+        Returns a Term that represents the packages
+        allowed by both this term and another
+        """
+        if self.dependency.complete_name != other.dependency.complete_name:
+            raise ValueError(f"{other} should refer to {self.dependency.complete_name}")
+
+        if self._compatible_dependency(other.dependency):
+            if self.is_positive() != other.is_positive():
+                # foo ^1.0.0 ∩ not foo ^1.5.0 → foo >=1.0.0 <1.5.0
+                positive = self if self.is_positive() else other
+                negative = other if self.is_positive() else self
+
+                return self._non_empty_term(
+                    positive.constraint.difference(negative.constraint), True, other
+                )
+            elif self.is_positive():
+                # foo ^1.0.0 ∩ foo >=1.5.0 <3.0.0 → foo ^1.5.0
+                return self._non_empty_term(
+                    self.constraint.intersect(other.constraint), True, other
+                )
+            else:
+                # not foo ^1.0.0 ∩ not foo >=1.5.0 <3.0.0 → not foo >=1.0.0 <3.0.0
+                return self._non_empty_term(
+                    self.constraint.union(other.constraint), False, other
+                )
+        elif self.is_positive() != other.is_positive():
+            return self if self.is_positive() else other
+        else:
+            return None
+
+    def difference(self, other: Term) -> Term | None:
+        """
+        Returns a Term that represents packages
+        allowed by this term and not by the other
+        """
+        return self.intersect(other.inverse)
+
+    def _compatible_dependency(self, other: Dependency) -> bool:
+        return (
+            self.dependency.is_root
+            or other.is_root
+            or other.is_same_package_as(self.dependency)
+            or (
+                # we do this here to indicate direct origin dependencies are
+                # compatible with NVR dependencies
+                self.dependency.complete_name == other.complete_name
+                and self.dependency.is_direct_origin() != other.is_direct_origin()
+            )
+        )
+
+    def _non_empty_term(
+        self, constraint: VersionConstraint, is_positive: bool, other: Term
+    ) -> Term | None:
+        if constraint.is_empty():
+            return None
+
+        # when creating a new term prefer direct-reference dependencies
+        dependency = (
+            other.dependency
+            if not self.dependency.is_direct_origin()
+            and other.dependency.is_direct_origin()
+            else self.dependency
+        )
+        return Term(dependency.with_constraint(constraint), is_positive)
+
+    def __str__(self) -> str:
+        prefix = "not " if not self.is_positive() else ""
+        return f"{prefix}{self._dependency}"
+
+    def __repr__(self) -> str:
+        return f""
diff --git a/vendor/poetry/src/poetry/mixology/version_solver.py b/vendor/poetry/src/poetry/mixology/version_solver.py
new file mode 100644
index 00000000..7ff1d13b
--- /dev/null
+++ b/vendor/poetry/src/poetry/mixology/version_solver.py
@@ -0,0 +1,521 @@
+from __future__ import annotations
+
+import functools
+import time
+
+from contextlib import suppress
+from typing import TYPE_CHECKING
+
+from poetry.core.packages.dependency import Dependency
+
+from poetry.mixology.failure import SolveFailure
+from poetry.mixology.incompatibility import Incompatibility
+from poetry.mixology.incompatibility_cause import ConflictCause
+from poetry.mixology.incompatibility_cause import NoVersionsCause
+from poetry.mixology.incompatibility_cause import PackageNotFoundCause
+from poetry.mixology.incompatibility_cause import RootCause
+from poetry.mixology.partial_solution import PartialSolution
+from poetry.mixology.result import SolverResult
+from poetry.mixology.set_relation import SetRelation
+from poetry.mixology.term import Term
+from poetry.packages import DependencyPackage
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
+
+    from poetry.puzzle.provider import Provider
+
+
+_conflict = object()
+
+
+class DependencyCache:
+    """
+    A cache of the valid dependencies.
+
+    The key observation here is that during the search - except at backtracking
+    - once we have decided that a dependency is invalid, we never need check it
+    again.
+    """
+
+    def __init__(self, provider: Provider) -> None:
+        self.provider = provider
+        self.cache: dict[
+            tuple[str, str | None, str | None, str | None, str | None],
+            list[DependencyPackage],
+        ] = {}
+
+        self.search_for = functools.lru_cache(maxsize=128)(self._search_for)
+
+    def _search_for(self, dependency: Dependency) -> list[DependencyPackage]:
+        key = (
+            dependency.complete_name,
+            dependency.source_type,
+            dependency.source_url,
+            dependency.source_reference,
+            dependency.source_subdirectory,
+        )
+
+        packages = self.cache.get(key)
+        if packages is None:
+            packages = self.provider.search_for(dependency)
+        else:
+            packages = [
+                p for p in packages if dependency.constraint.allows(p.package.version)
+            ]
+
+        self.cache[key] = packages
+
+        return packages
+
+    def clear(self) -> None:
+        self.cache.clear()
+
+
+class VersionSolver:
+    """
+    The version solver that finds a set of package versions that satisfy the
+    root package's dependencies.
+
+    See https://github.com/dart-lang/pub/tree/master/doc/solver.md for details
+    on how this solver works.
+    """
+
+    def __init__(
+        self,
+        root: ProjectPackage,
+        provider: Provider,
+        locked: dict[str, list[DependencyPackage]] | None = None,
+        use_latest: list[str] | None = None,
+    ) -> None:
+        self._root = root
+        self._provider = provider
+        self._dependency_cache = DependencyCache(provider)
+        self._locked = locked or {}
+
+        if use_latest is None:
+            use_latest = []
+
+        self._use_latest = use_latest
+
+        self._incompatibilities: dict[str, list[Incompatibility]] = {}
+        self._contradicted_incompatibilities: set[Incompatibility] = set()
+        self._solution = PartialSolution()
+
+    @property
+    def solution(self) -> PartialSolution:
+        return self._solution
+
+    def solve(self) -> SolverResult:
+        """
+        Finds a set of dependencies that match the root package's constraints,
+        or raises an error if no such set is available.
+        """
+        start = time.time()
+        root_dependency = Dependency(self._root.name, self._root.version)
+        root_dependency.is_root = True
+
+        self._add_incompatibility(
+            Incompatibility([Term(root_dependency, False)], RootCause())
+        )
+
+        try:
+            next: str | None = self._root.name
+            while next is not None:
+                self._propagate(next)
+                next = self._choose_package_version()
+
+            return self._result()
+        except Exception:
+            raise
+        finally:
+            self._log(
+                f"Version solving took {time.time() - start:.3f} seconds.\n"
+                f"Tried {self._solution.attempted_solutions} solutions."
+            )
+
+    def _propagate(self, package: str) -> None:
+        """
+        Performs unit propagation on incompatibilities transitively
+        related to package to derive new assignments for _solution.
+        """
+        changed = {package}
+        while changed:
+            package = changed.pop()
+
+            # Iterate in reverse because conflict resolution tends to produce more
+            # general incompatibilities as time goes on. If we look at those first,
+            # we can derive stronger assignments sooner and more eagerly find
+            # conflicts.
+            for incompatibility in reversed(self._incompatibilities[package]):
+                if incompatibility in self._contradicted_incompatibilities:
+                    continue
+
+                result = self._propagate_incompatibility(incompatibility)
+
+                if result is _conflict:
+                    # If the incompatibility is satisfied by the solution, we use
+                    # _resolve_conflict() to determine the root cause of the conflict as
+                    # a new incompatibility.
+                    #
+                    # It also backjumps to a point in the solution
+                    # where that incompatibility will allow us to derive new assignments
+                    # that avoid the conflict.
+                    root_cause = self._resolve_conflict(incompatibility)
+
+                    # Back jumping erases all the assignments we did at the previous
+                    # decision level, so we clear [changed] and refill it with the
+                    # newly-propagated assignment.
+                    changed.clear()
+                    changed.add(str(self._propagate_incompatibility(root_cause)))
+                    break
+                elif result is not None:
+                    changed.add(str(result))
+
+    def _propagate_incompatibility(
+        self, incompatibility: Incompatibility
+    ) -> str | object | None:
+        """
+        If incompatibility is almost satisfied by _solution, adds the
+        negation of the unsatisfied term to _solution.
+
+        If incompatibility is satisfied by _solution, returns _conflict. If
+        incompatibility is almost satisfied by _solution, returns the
+        unsatisfied term's package name.
+
+        Otherwise, returns None.
+        """
+        # The first entry in incompatibility.terms that's not yet satisfied by
+        # _solution, if one exists. If we find more than one, _solution is
+        # inconclusive for incompatibility and we can't deduce anything.
+        unsatisfied = None
+
+        for term in incompatibility.terms:
+            relation = self._solution.relation(term)
+
+            if relation == SetRelation.DISJOINT:
+                # If term is already contradicted by _solution, then
+                # incompatibility is contradicted as well and there's nothing new we
+                # can deduce from it.
+                self._contradicted_incompatibilities.add(incompatibility)
+                return None
+            elif relation == SetRelation.OVERLAPPING:
+                # If more than one term is inconclusive, we can't deduce anything about
+                # incompatibility.
+                if unsatisfied is not None:
+                    return None
+
+                # If exactly one term in incompatibility is inconclusive, then it's
+                # almost satisfied and [term] is the unsatisfied term. We can add the
+                # inverse of the term to _solution.
+                unsatisfied = term
+
+        # If *all* terms in incompatibility are satisfied by _solution, then
+        # incompatibility is satisfied and we have a conflict.
+        if unsatisfied is None:
+            return _conflict
+
+        self._contradicted_incompatibilities.add(incompatibility)
+
+        adverb = "not " if unsatisfied.is_positive() else ""
+        self._log(f"derived: {adverb}{unsatisfied.dependency}")
+
+        self._solution.derive(
+            unsatisfied.dependency, not unsatisfied.is_positive(), incompatibility
+        )
+
+        complete_name: str = unsatisfied.dependency.complete_name
+        return complete_name
+
+    def _resolve_conflict(self, incompatibility: Incompatibility) -> Incompatibility:
+        """
+        Given an incompatibility that's satisfied by _solution,
+        The `conflict resolution`_ constructs a new incompatibility that encapsulates
+        the root cause of the conflict and backtracks _solution until the new
+        incompatibility will allow _propagate() to deduce new assignments.
+
+        Adds the new incompatibility to _incompatibilities and returns it.
+
+        .. _conflict resolution:
+        https://github.com/dart-lang/pub/tree/master/doc/solver.md#conflict-resolution
+        """
+        self._log(f"conflict: {incompatibility}")
+
+        new_incompatibility = False
+        while not incompatibility.is_failure():
+            # The term in incompatibility.terms that was most recently satisfied by
+            # _solution.
+            most_recent_term = None
+
+            # The earliest assignment in _solution such that incompatibility is
+            # satisfied by _solution up to and including this assignment.
+            most_recent_satisfier = None
+
+            # The difference between most_recent_satisfier and most_recent_term;
+            # that is, the versions that are allowed by most_recent_satisfier and not
+            # by most_recent_term. This is None if most_recent_satisfier totally
+            # satisfies most_recent_term.
+            difference = None
+
+            # The decision level of the earliest assignment in _solution *before*
+            # most_recent_satisfier such that incompatibility is satisfied by
+            # _solution up to and including this assignment plus
+            # most_recent_satisfier.
+            #
+            # Decision level 1 is the level where the root package was selected. It's
+            # safe to go back to decision level 0, but stopping at 1 tends to produce
+            # better error messages, because references to the root package end up
+            # closer to the final conclusion that no solution exists.
+            previous_satisfier_level = 1
+
+            for term in incompatibility.terms:
+                satisfier = self._solution.satisfier(term)
+
+                if most_recent_satisfier is None:
+                    most_recent_term = term
+                    most_recent_satisfier = satisfier
+                elif most_recent_satisfier.index < satisfier.index:
+                    previous_satisfier_level = max(
+                        previous_satisfier_level, most_recent_satisfier.decision_level
+                    )
+                    most_recent_term = term
+                    most_recent_satisfier = satisfier
+                    difference = None
+                else:
+                    previous_satisfier_level = max(
+                        previous_satisfier_level, satisfier.decision_level
+                    )
+
+                if most_recent_term == term:
+                    # If most_recent_satisfier doesn't satisfy most_recent_term on its
+                    # own, then the next-most-recent satisfier may be the one that
+                    # satisfies the remainder.
+                    difference = most_recent_satisfier.difference(most_recent_term)
+                    if difference is not None:
+                        previous_satisfier_level = max(
+                            previous_satisfier_level,
+                            self._solution.satisfier(difference.inverse).decision_level,
+                        )
+
+            # If most_recent_identifier is the only satisfier left at its decision
+            # level, or if it has no cause (indicating that it's a decision rather
+            # than a derivation), then incompatibility is the root cause. We then
+            # backjump to previous_satisfier_level, where incompatibility is
+            # guaranteed to allow _propagate to produce more assignments.
+
+            # using assert to suppress mypy [union-attr]
+            assert most_recent_satisfier is not None
+            if (
+                previous_satisfier_level < most_recent_satisfier.decision_level
+                or most_recent_satisfier.cause is None
+            ):
+                self._solution.backtrack(previous_satisfier_level)
+                self._contradicted_incompatibilities.clear()
+                self._dependency_cache.clear()
+                if new_incompatibility:
+                    self._add_incompatibility(incompatibility)
+
+                return incompatibility
+
+            # Create a new incompatibility by combining incompatibility with the
+            # incompatibility that caused most_recent_satisfier to be assigned. Doing
+            # this iteratively constructs an incompatibility that's guaranteed to be
+            # true (that is, we know for sure no solution will satisfy the
+            # incompatibility) while also approximating the intuitive notion of the
+            # "root cause" of the conflict.
+            new_terms = [
+                term for term in incompatibility.terms if term != most_recent_term
+            ]
+
+            for term in most_recent_satisfier.cause.terms:
+                if term.dependency != most_recent_satisfier.dependency:
+                    new_terms.append(term)
+
+            # The most_recent_satisfier may not satisfy most_recent_term on its own
+            # if there are a collection of constraints on most_recent_term that
+            # only satisfy it together. For example, if most_recent_term is
+            # `foo ^1.0.0` and _solution contains `[foo >=1.0.0,
+            # foo <2.0.0]`, then most_recent_satisfier will be `foo <2.0.0` even
+            # though it doesn't totally satisfy `foo ^1.0.0`.
+            #
+            # In this case, we add `not (most_recent_satisfier \ most_recent_term)` to
+            # the incompatibility as well, See the `algorithm documentation`_ for
+            # details.
+            #
+            # .. _algorithm documentation:
+            # https://github.com/dart-lang/pub/tree/master/doc/solver.md#conflict-resolution  # noqa: E501
+            if difference is not None:
+                new_terms.append(difference.inverse)
+
+            incompatibility = Incompatibility(
+                new_terms, ConflictCause(incompatibility, most_recent_satisfier.cause)
+            )
+            new_incompatibility = True
+
+            partially = "" if difference is None else " partially"
+            self._log(
+                f"! {most_recent_term} is{partially} satisfied by"
+                f" {most_recent_satisfier}"
+            )
+            self._log(f'! which is caused by "{most_recent_satisfier.cause}"')
+            self._log(f"! thus: {incompatibility}")
+
+        raise SolveFailure(incompatibility)
+
+    def _choose_package_version(self) -> str | None:
+        """
+        Tries to select a version of a required package.
+
+        Returns the name of the package whose incompatibilities should be
+        propagated by _propagate(), or None indicating that version solving is
+        complete and a solution has been found.
+        """
+        unsatisfied = self._solution.unsatisfied
+        if not unsatisfied:
+            return None
+
+        # Prefer packages with as few remaining versions as possible,
+        # so that if a conflict is necessary it's forced quickly.
+        def _get_min(dependency: Dependency) -> tuple[bool, int]:
+            # Direct origin dependencies must be handled first: we don't want to resolve
+            # a regular dependency for some package only to find later that we had a
+            # direct-origin dependency.
+            if dependency.is_direct_origin():
+                return False, -1
+
+            if dependency.name in self._use_latest:
+                # If we're forced to use the latest version of a package, it effectively
+                # only has one version to choose from.
+                return not dependency.marker.is_any(), 1
+
+            locked = self._get_locked(dependency)
+            if locked:
+                return not dependency.marker.is_any(), 1
+
+            try:
+                return (
+                    not dependency.marker.is_any(),
+                    len(self._dependency_cache.search_for(dependency)),
+                )
+            except ValueError:
+                return not dependency.marker.is_any(), 0
+
+        if len(unsatisfied) == 1:
+            dependency = unsatisfied[0]
+        else:
+            dependency = min(*unsatisfied, key=_get_min)
+
+        locked = self._get_locked(dependency)
+        if locked is None:
+            try:
+                packages = self._dependency_cache.search_for(dependency)
+            except ValueError as e:
+                self._add_incompatibility(
+                    Incompatibility([Term(dependency, True)], PackageNotFoundCause(e))
+                )
+                complete_name: str = dependency.complete_name
+                return complete_name
+
+            package = None
+            if locked is not None:
+                package = next(
+                    (
+                        p
+                        for p in packages
+                        if p.package.version == locked.package.version
+                    ),
+                    None,
+                )
+            if package is None:
+                with suppress(IndexError):
+                    package = packages[0]
+
+            if package is None:
+                # If there are no versions that satisfy the constraint,
+                # add an incompatibility that indicates that.
+                self._add_incompatibility(
+                    Incompatibility([Term(dependency, True)], NoVersionsCause())
+                )
+
+                complete_name = dependency.complete_name
+                return complete_name
+        else:
+            package = locked
+
+        package = self._provider.complete_package(package)
+
+        conflict = False
+        for incompatibility in self._provider.incompatibilities_for(package):
+            self._add_incompatibility(incompatibility)
+
+            # If an incompatibility is already satisfied, then selecting version
+            # would cause a conflict.
+            #
+            # We'll continue adding its dependencies, then go back to
+            # unit propagation which will guide us to choose a better version.
+            conflict = conflict or all(
+                term.dependency.complete_name == dependency.complete_name
+                or self._solution.satisfies(term)
+                for term in incompatibility.terms
+            )
+
+        if not conflict:
+            self._solution.decide(package.package)
+            self._log(
+                f"selecting {package.package.complete_name}"
+                f" ({package.package.full_pretty_version})"
+            )
+
+        complete_name = dependency.complete_name
+        return complete_name
+
+    def _result(self) -> SolverResult:
+        """
+        Creates a #SolverResult from the decisions in _solution
+        """
+        decisions = self._solution.decisions
+
+        return SolverResult(
+            self._root,
+            [p for p in decisions if not p.is_root()],
+            self._solution.attempted_solutions,
+        )
+
+    def _add_incompatibility(self, incompatibility: Incompatibility) -> None:
+        self._log(f"fact: {incompatibility}")
+
+        for term in incompatibility.terms:
+            if term.dependency.complete_name not in self._incompatibilities:
+                self._incompatibilities[term.dependency.complete_name] = []
+
+            if (
+                incompatibility
+                in self._incompatibilities[term.dependency.complete_name]
+            ):
+                continue
+
+            self._incompatibilities[term.dependency.complete_name].append(
+                incompatibility
+            )
+
+    def _get_locked(self, dependency: Dependency) -> DependencyPackage | None:
+        if dependency.name in self._use_latest:
+            return None
+
+        locked = self._locked.get(dependency.name, [])
+        for dependency_package in locked:
+            package = dependency_package.package
+            if (
+                # Locked dependencies are always without features.
+                # Thus, we can't use is_same_package_as() here because it compares
+                # the complete_name (including features).
+                dependency.name == package.name
+                and dependency.is_same_source_as(package)
+                and dependency.constraint.allows(package.version)
+            ):
+                return DependencyPackage(dependency, package)
+        return None
+
+    def _log(self, text: str) -> None:
+        self._provider.debug(text, self._solution.attempted_solutions)
diff --git a/vendor/poetry/src/poetry/packages/__init__.py b/vendor/poetry/src/poetry/packages/__init__.py
new file mode 100644
index 00000000..b97ce25b
--- /dev/null
+++ b/vendor/poetry/src/poetry/packages/__init__.py
@@ -0,0 +1,8 @@
+from __future__ import annotations
+
+from poetry.packages.dependency_package import DependencyPackage
+from poetry.packages.locker import Locker
+from poetry.packages.package_collection import PackageCollection
+
+
+__all__ = ["DependencyPackage", "Locker", "PackageCollection"]
diff --git a/vendor/poetry/src/poetry/packages/dependency_package.py b/vendor/poetry/src/poetry/packages/dependency_package.py
new file mode 100644
index 00000000..a06d28b6
--- /dev/null
+++ b/vendor/poetry/src/poetry/packages/dependency_package.py
@@ -0,0 +1,47 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.package import Package
+
+
+class DependencyPackage:
+    def __init__(self, dependency: Dependency, package: Package) -> None:
+        self._dependency = dependency
+        self._package = package
+
+    @property
+    def dependency(self) -> Dependency:
+        return self._dependency
+
+    @property
+    def package(self) -> Package:
+        return self._package
+
+    def clone(self) -> DependencyPackage:
+        return self.__class__(self._dependency, self._package.clone())
+
+    def with_features(self, features: list[str]) -> DependencyPackage:
+        return self.__class__(self._dependency, self._package.with_features(features))
+
+    def without_features(self) -> DependencyPackage:
+        return self.with_features([])
+
+    def __str__(self) -> str:
+        return str(self._package)
+
+    def __repr__(self) -> str:
+        return repr(self._package)
+
+    def __hash__(self) -> int:
+        return hash(self._package)
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, DependencyPackage):
+            other = other.package
+
+        equal: bool = self._package == other
+        return equal
diff --git a/vendor/poetry/src/poetry/packages/locker.py b/vendor/poetry/src/poetry/packages/locker.py
new file mode 100644
index 00000000..5a71f8a4
--- /dev/null
+++ b/vendor/poetry/src/poetry/packages/locker.py
@@ -0,0 +1,452 @@
+from __future__ import annotations
+
+import json
+import logging
+import os
+import re
+
+from hashlib import sha256
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import cast
+
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.package import Package
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version import Version
+from poetry.core.toml.file import TOMLFile
+from poetry.core.version.markers import parse_marker
+from poetry.core.version.requirements import InvalidRequirement
+from tomlkit import array
+from tomlkit import document
+from tomlkit import inline_table
+from tomlkit import item
+from tomlkit import table
+from tomlkit.exceptions import TOMLKitError
+from tomlkit.items import Array
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.directory_dependency import DirectoryDependency
+    from poetry.core.packages.file_dependency import FileDependency
+    from poetry.core.packages.url_dependency import URLDependency
+    from poetry.core.packages.vcs_dependency import VCSDependency
+    from tomlkit.items import Table
+    from tomlkit.toml_document import TOMLDocument
+
+    from poetry.repositories import Repository
+
+logger = logging.getLogger(__name__)
+
+
+class Locker:
+    _VERSION = "1.1"
+
+    _legacy_keys = ["dependencies", "source", "extras", "dev-dependencies"]
+    _relevant_keys = [*_legacy_keys, "group"]
+
+    def __init__(self, lock: str | Path, local_config: dict[str, Any]) -> None:
+        self._lock = TOMLFile(lock)
+        self._local_config = local_config
+        self._lock_data: TOMLDocument | None = None
+        self._content_hash = self._get_content_hash()
+
+    @property
+    def lock(self) -> TOMLFile:
+        return self._lock
+
+    @property
+    def lock_data(self) -> TOMLDocument:
+        if self._lock_data is None:
+            self._lock_data = self._get_lock_data()
+
+        return self._lock_data
+
+    def is_locked(self) -> bool:
+        """
+        Checks whether the locker has been locked (lockfile found).
+        """
+        if not self._lock.exists():
+            return False
+
+        return "package" in self.lock_data
+
+    def is_fresh(self) -> bool:
+        """
+        Checks whether the lock file is still up to date with the current hash.
+        """
+        lock = self._lock.read()
+        metadata = lock.get("metadata", {})
+
+        if "content-hash" in metadata:
+            fresh: bool = self._content_hash == metadata["content-hash"]
+            return fresh
+
+        return False
+
+    def locked_repository(self) -> Repository:
+        """
+        Searches and returns a repository of locked packages.
+        """
+        from poetry.factory import Factory
+        from poetry.repositories import Repository
+
+        if not self.is_locked():
+            return Repository("poetry-locked")
+
+        lock_data = self.lock_data
+        packages = Repository("poetry-locked")
+        locked_packages = cast("list[dict[str, Any]]", lock_data["package"])
+
+        if not locked_packages:
+            return packages
+
+        for info in locked_packages:
+            source = info.get("source", {})
+            source_type = source.get("type")
+            url = source.get("url")
+            if source_type in ["directory", "file"]:
+                url = self._lock.path.parent.joinpath(url).resolve().as_posix()
+
+            package = Package(
+                info["name"],
+                info["version"],
+                info["version"],
+                source_type=source_type,
+                source_url=url,
+                source_reference=source.get("reference"),
+                source_resolved_reference=source.get("resolved_reference"),
+                source_subdirectory=source.get("subdirectory"),
+            )
+            package.description = info.get("description", "")
+            package.category = info.get("category", "main")
+            package.optional = info["optional"]
+            metadata = cast("dict[str, Any]", lock_data["metadata"])
+            name = info["name"]
+            if "hashes" in metadata:
+                # Old lock so we create dummy files from the hashes
+                hashes = cast("dict[str, Any]", metadata["hashes"])
+                package.files = [{"name": h, "hash": h} for h in hashes[name]]
+            else:
+                files = metadata["files"][name]
+                package.files = files
+
+            package.python_versions = info["python-versions"]
+            extras = info.get("extras", {})
+            if extras:
+                for name, deps in extras.items():
+                    package.extras[name] = []
+
+                    for dep in deps:
+                        try:
+                            dependency = Dependency.create_from_pep_508(dep)
+                        except InvalidRequirement:
+                            # handle lock files with invalid PEP 508
+                            m = re.match(r"^(.+?)(?:\[(.+?)])?(?:\s+\((.+)\))?$", dep)
+                            if not m:
+                                raise
+                            dep_name = m.group(1)
+                            extras = m.group(2) or ""
+                            constraint = m.group(3) or "*"
+                            dependency = Dependency(
+                                dep_name, constraint, extras=extras.split(",")
+                            )
+                        package.extras[name].append(dependency)
+
+            if "marker" in info:
+                package.marker = parse_marker(info["marker"])
+            else:
+                # Compatibility for old locks
+                if "requirements" in info:
+                    dep = Dependency("foo", "0.0.0")
+                    for name, value in info["requirements"].items():
+                        if name == "python":
+                            dep.python_versions = value
+                        elif name == "platform":
+                            dep.platform = value
+
+                    split_dep = dep.to_pep_508(False).split(";")
+                    if len(split_dep) > 1:
+                        package.marker = parse_marker(split_dep[1].strip())
+
+            for dep_name, constraint in info.get("dependencies", {}).items():
+                root_dir = self._lock.path.parent
+                if package.source_type == "directory":
+                    # root dir should be the source of the package relative to the lock
+                    # path
+                    assert package.source_url is not None
+                    root_dir = Path(package.source_url)
+
+                if isinstance(constraint, list):
+                    for c in constraint:
+                        package.add_dependency(
+                            Factory.create_dependency(dep_name, c, root_dir=root_dir)
+                        )
+
+                    continue
+
+                package.add_dependency(
+                    Factory.create_dependency(dep_name, constraint, root_dir=root_dir)
+                )
+
+            if "develop" in info:
+                package.develop = info["develop"]
+
+            packages.add_package(package)
+
+        return packages
+
+    def set_lock_data(self, root: Package, packages: list[Package]) -> bool:
+        files: dict[str, Any] = table()
+        package_specs = self._lock_packages(packages)
+        # Retrieving hashes
+        for package in package_specs:
+            if package["name"] not in files:
+                files[package["name"]] = []
+
+            for f in package["files"]:
+                file_metadata = inline_table()
+                for k, v in sorted(f.items()):
+                    file_metadata[k] = v
+
+                files[package["name"]].append(file_metadata)
+
+            if files[package["name"]]:
+                package_files = item(files[package["name"]])
+                assert isinstance(package_files, Array)
+                files[package["name"]] = package_files.multiline(True)
+
+            del package["files"]
+
+        lock = document()
+        lock["package"] = package_specs
+
+        if root.extras:
+            lock["extras"] = {
+                extra: [dep.pretty_name for dep in deps]
+                for extra, deps in sorted(root.extras.items())
+            }
+
+        lock["metadata"] = {
+            "lock-version": self._VERSION,
+            "python-versions": root.python_versions,
+            "content-hash": self._content_hash,
+            "files": files,
+        }
+
+        if not self.is_locked() or lock != self.lock_data:
+            self._write_lock_data(lock)
+
+            return True
+
+        return False
+
+    def _write_lock_data(self, data: TOMLDocument) -> None:
+        self.lock.write(data)
+
+        # Checking lock file data consistency
+        if data != self.lock.read():
+            raise RuntimeError("Inconsistent lock file data.")
+
+        self._lock_data = None
+
+    def _get_content_hash(self) -> str:
+        """
+        Returns the sha256 hash of the sorted content of the pyproject file.
+        """
+        content = self._local_config
+
+        relevant_content = {}
+        for key in self._relevant_keys:
+            data = content.get(key)
+
+            if data is None and key not in self._legacy_keys:
+                continue
+
+            relevant_content[key] = data
+
+        return sha256(json.dumps(relevant_content, sort_keys=True).encode()).hexdigest()
+
+    def _get_lock_data(self) -> TOMLDocument:
+        if not self._lock.exists():
+            raise RuntimeError("No lockfile found. Unable to read locked packages")
+
+        try:
+            lock_data: TOMLDocument = self._lock.read()
+        except TOMLKitError as e:
+            raise RuntimeError(f"Unable to read the lock file ({e}).")
+
+        metadata = cast("Table", lock_data["metadata"])
+        lock_version = Version.parse(metadata.get("lock-version", "1.0"))
+        current_version = Version.parse(self._VERSION)
+        # We expect the locker to be able to read lock files
+        # from the same semantic versioning range
+        accepted_versions = parse_constraint(
+            f"^{Version.from_parts(current_version.major, 0)}"
+        )
+        lock_version_allowed = accepted_versions.allows(lock_version)
+        if lock_version_allowed and current_version < lock_version:
+            logger.warning(
+                "The lock file might not be compatible with the current version of"
+                " Poetry.\nUpgrade Poetry to ensure the lock file is read properly or,"
+                " alternatively, regenerate the lock file with the `poetry lock`"
+                " command."
+            )
+        elif not lock_version_allowed:
+            raise RuntimeError(
+                "The lock file is not compatible with the current version of Poetry.\n"
+                "Upgrade Poetry to be able to read the lock file or, alternatively, "
+                "regenerate the lock file with the `poetry lock` command."
+            )
+
+        return lock_data
+
+    def _lock_packages(self, packages: list[Package]) -> list[dict[str, Any]]:
+        locked = []
+
+        for package in sorted(
+            packages,
+            key=lambda x: (
+                x.name,
+                x.version,
+                x.source_type or "",
+                x.source_url or "",
+                x.source_subdirectory or "",
+                x.source_reference or "",
+                x.source_resolved_reference or "",
+            ),
+        ):
+            spec = self._dump_package(package)
+
+            locked.append(spec)
+
+        return locked
+
+    def _dump_package(self, package: Package) -> dict[str, Any]:
+        dependencies: dict[str, list[Any]] = {}
+        for dependency in sorted(
+            package.requires,
+            key=lambda d: d.name,
+        ):
+            if dependency.pretty_name not in dependencies:
+                dependencies[dependency.pretty_name] = []
+
+            constraint = inline_table()
+
+            if dependency.is_directory():
+                dependency = cast("DirectoryDependency", dependency)
+                constraint["path"] = dependency.path.as_posix()
+
+                if dependency.develop:
+                    constraint["develop"] = True
+
+            elif dependency.is_file():
+                dependency = cast("FileDependency", dependency)
+                constraint["path"] = dependency.path.as_posix()
+
+            elif dependency.is_url():
+                dependency = cast("URLDependency", dependency)
+                constraint["url"] = dependency.url
+
+            elif dependency.is_vcs():
+                dependency = cast("VCSDependency", dependency)
+                constraint[dependency.vcs] = dependency.source
+
+                if dependency.branch:
+                    constraint["branch"] = dependency.branch
+                elif dependency.tag:
+                    constraint["tag"] = dependency.tag
+                elif dependency.rev:
+                    constraint["rev"] = dependency.rev
+            else:
+                constraint["version"] = str(dependency.pretty_constraint)
+
+            if dependency.extras:
+                constraint["extras"] = sorted(dependency.extras)
+
+            if dependency.is_optional():
+                constraint["optional"] = True
+
+            if not dependency.marker.is_any():
+                constraint["markers"] = str(dependency.marker)
+
+            dependencies[dependency.pretty_name].append(constraint)
+
+        # All the constraints should have the same type,
+        # but we want to simplify them if it's possible
+        for dependency_name, constraints in dependencies.items():
+            if all(
+                len(constraint) == 1 and "version" in constraint
+                for constraint in constraints
+            ):
+                dependencies[dependency_name] = [
+                    constraint["version"] for constraint in constraints
+                ]
+
+        data: dict[str, Any] = {
+            "name": package.pretty_name,
+            "version": package.pretty_version,
+            "description": package.description or "",
+            "category": package.category,
+            "optional": package.optional,
+            "python-versions": package.python_versions,
+            "files": sorted(
+                package.files,
+                key=lambda x: x["file"],  # type: ignore[no-any-return]
+            ),
+        }
+
+        if dependencies:
+            data["dependencies"] = table()
+            for k, constraints in dependencies.items():
+                if len(constraints) == 1:
+                    data["dependencies"][k] = constraints[0]
+                else:
+                    data["dependencies"][k] = array().multiline(True)
+                    for constraint in constraints:
+                        data["dependencies"][k].append(constraint)
+
+        if package.extras:
+            extras = {}
+            for name, deps in sorted(package.extras.items()):
+                extras[name] = sorted(dep.base_pep_508_name for dep in deps)
+
+            data["extras"] = extras
+
+        if package.source_url:
+            url = package.source_url
+            if package.source_type in ["file", "directory"]:
+                # The lock file should only store paths relative to the root project
+                url = Path(
+                    os.path.relpath(
+                        Path(url).resolve(),
+                        Path(self._lock.path.parent).resolve(),
+                    )
+                ).as_posix()
+
+            data["source"] = {}
+
+            if package.source_type:
+                data["source"]["type"] = package.source_type
+
+            data["source"]["url"] = url
+
+            if package.source_reference:
+                data["source"]["reference"] = package.source_reference
+
+            if package.source_resolved_reference:
+                data["source"]["resolved_reference"] = package.source_resolved_reference
+
+            if package.source_subdirectory:
+                data["source"]["subdirectory"] = package.source_subdirectory
+
+            if package.source_type in ["directory", "git"]:
+                data["develop"] = package.develop
+
+        return data
+
+
+class NullLocker(Locker):
+    def set_lock_data(self, root: Package, packages: list[Package]) -> bool:
+        pass
diff --git a/vendor/poetry/src/poetry/packages/package_collection.py b/vendor/poetry/src/poetry/packages/package_collection.py
new file mode 100644
index 00000000..e34d3dde
--- /dev/null
+++ b/vendor/poetry/src/poetry/packages/package_collection.py
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import List
+
+from poetry.packages.dependency_package import DependencyPackage
+
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.package import Package
+
+
+class PackageCollection(List[DependencyPackage]):
+    def __init__(
+        self,
+        dependency: Dependency,
+        packages: Sequence[Package | DependencyPackage] | None = None,
+    ) -> None:
+        self._dependency = dependency
+
+        if packages is None:
+            packages = []
+
+        super().__init__()
+
+        for package in packages:
+            self.append(package)
+
+    def append(self, package: Package | DependencyPackage) -> None:
+        if isinstance(package, DependencyPackage):
+            package = package.package
+
+        package = DependencyPackage(self._dependency, package)
+
+        return super().append(package)
diff --git a/vendor/poetry/src/poetry/plugins/__init__.py b/vendor/poetry/src/poetry/plugins/__init__.py
new file mode 100644
index 00000000..bf62b4e3
--- /dev/null
+++ b/vendor/poetry/src/poetry/plugins/__init__.py
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from poetry.plugins.application_plugin import ApplicationPlugin
+from poetry.plugins.plugin import Plugin
+
+
+__all__ = ["ApplicationPlugin", "Plugin"]
diff --git a/vendor/poetry/src/poetry/plugins/application_plugin.py b/vendor/poetry/src/poetry/plugins/application_plugin.py
new file mode 100644
index 00000000..4dbdb92e
--- /dev/null
+++ b/vendor/poetry/src/poetry/plugins/application_plugin.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.plugins.base_plugin import BasePlugin
+
+
+if TYPE_CHECKING:
+    from poetry.console.application import Application
+    from poetry.console.commands.command import Command
+
+
+class ApplicationPlugin(BasePlugin):
+    """
+    Base class for application plugins.
+    """
+
+    group = "poetry.application.plugin"
+
+    @property
+    def commands(self) -> list[type[Command]]:
+        return []
+
+    def activate(self, application: Application) -> None:
+        for command in self.commands:
+            assert command.name is not None
+            application.command_loader.register_factory(
+                command.name, lambda: command()  # noqa: B023
+            )
diff --git a/vendor/poetry/src/poetry/plugins/base_plugin.py b/vendor/poetry/src/poetry/plugins/base_plugin.py
new file mode 100644
index 00000000..07146060
--- /dev/null
+++ b/vendor/poetry/src/poetry/plugins/base_plugin.py
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+
+
+class BasePlugin:
+    """
+    Base class for all plugin types
+
+    The `activate()` method must be implemented and receives the Poetry instance.
+    """
+
+    PLUGIN_API_VERSION = "1.0.0"
+
+    @property
+    @abstractmethod
+    def group(self) -> str:
+        """
+        Name of entrypoint group the plugin belongs to.
+        """
+        raise NotImplementedError()
diff --git a/vendor/poetry/src/poetry/plugins/plugin.py b/vendor/poetry/src/poetry/plugins/plugin.py
new file mode 100644
index 00000000..ea72662c
--- /dev/null
+++ b/vendor/poetry/src/poetry/plugins/plugin.py
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from typing import TYPE_CHECKING
+
+from poetry.plugins.base_plugin import BasePlugin
+
+
+if TYPE_CHECKING:
+    from cleo.io.io import IO
+
+    from poetry.poetry import Poetry
+
+
+class Plugin(BasePlugin):
+    """
+    Generic plugin not related to the console application.
+    """
+
+    group = "poetry.plugin"
+
+    @abstractmethod
+    def activate(self, poetry: Poetry, io: IO) -> None:
+        raise NotImplementedError()
diff --git a/vendor/poetry/src/poetry/plugins/plugin_manager.py b/vendor/poetry/src/poetry/plugins/plugin_manager.py
new file mode 100644
index 00000000..99d83ee9
--- /dev/null
+++ b/vendor/poetry/src/poetry/plugins/plugin_manager.py
@@ -0,0 +1,83 @@
+from __future__ import annotations
+
+import logging
+
+from typing import TYPE_CHECKING
+
+from poetry.plugins.application_plugin import ApplicationPlugin
+from poetry.plugins.plugin import Plugin
+from poetry.utils._compat import metadata
+
+
+if TYPE_CHECKING:
+    from typing import Any
+
+    from poetry.utils.env import Env
+
+
+logger = logging.getLogger(__name__)
+
+
+class PluginManager:
+    """
+    This class registers and activates plugins.
+    """
+
+    def __init__(self, group: str, disable_plugins: bool = False) -> None:
+        self._group = group
+        self._disable_plugins = disable_plugins
+        self._plugins: list[Plugin] = []
+
+    def load_plugins(self, env: Env | None = None) -> None:
+        if self._disable_plugins:
+            return
+
+        plugin_entrypoints = self.get_plugin_entry_points(env=env)
+
+        for ep in plugin_entrypoints:
+            self._load_plugin_entry_point(ep)
+
+    @staticmethod
+    def _is_plugin_candidate(ep: metadata.EntryPoint, env: Env | None = None) -> bool:
+        """
+        Helper method to check if given entry point is a valid as a plugin candidate.
+        When an environment is specified, the entry point's associated distribution
+        should be installed, and discoverable in the given environment.
+        """
+        return env is None or (
+            ep.dist is not None
+            and env.site_packages.find_distribution(ep.dist.name) is not None
+        )
+
+    def get_plugin_entry_points(
+        self, env: Env | None = None
+    ) -> list[metadata.EntryPoint]:
+        return [
+            ep
+            for ep in metadata.entry_points(group=self._group)
+            if self._is_plugin_candidate(ep, env)
+        ]
+
+    def add_plugin(self, plugin: Plugin) -> None:
+        if not isinstance(plugin, (Plugin, ApplicationPlugin)):
+            raise ValueError(
+                "The Poetry plugin must be an instance of Plugin or ApplicationPlugin"
+            )
+
+        self._plugins.append(plugin)
+
+    def activate(self, *args: Any, **kwargs: Any) -> None:
+        for plugin in self._plugins:
+            plugin.activate(*args, **kwargs)
+
+    def _load_plugin_entry_point(self, ep: metadata.EntryPoint) -> None:
+        logger.debug(f"Loading the {ep.name} plugin")  # type: ignore[attr-defined]
+
+        plugin = ep.load()  # type: ignore[no-untyped-call]
+
+        if not issubclass(plugin, (Plugin, ApplicationPlugin)):
+            raise ValueError(
+                "The Poetry plugin must be an instance of Plugin or ApplicationPlugin"
+            )
+
+        self.add_plugin(plugin())
diff --git a/vendor/poetry/src/poetry/poetry.py b/vendor/poetry/src/poetry/poetry.py
new file mode 100644
index 00000000..4459bdd4
--- /dev/null
+++ b/vendor/poetry/src/poetry/poetry.py
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+from poetry.core.poetry import Poetry as BasePoetry
+
+from poetry.__version__ import __version__
+from poetry.config.source import Source
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from poetry.core.packages.project_package import ProjectPackage
+
+    from poetry.config.config import Config
+    from poetry.packages.locker import Locker
+    from poetry.plugins.plugin_manager import PluginManager
+    from poetry.repositories.pool import Pool
+
+
+class Poetry(BasePoetry):
+    VERSION = __version__
+
+    def __init__(
+        self,
+        file: Path,
+        local_config: dict[str, Any],
+        package: ProjectPackage,
+        locker: Locker,
+        config: Config,
+    ) -> None:
+        from poetry.repositories.pool import Pool
+
+        super().__init__(file, local_config, package)
+
+        self._locker = locker
+        self._config = config
+        self._pool = Pool()
+        self._plugin_manager: PluginManager | None = None
+
+    @property
+    def locker(self) -> Locker:
+        return self._locker
+
+    @property
+    def pool(self) -> Pool:
+        return self._pool
+
+    @property
+    def config(self) -> Config:
+        return self._config
+
+    def set_locker(self, locker: Locker) -> Poetry:
+        self._locker = locker
+
+        return self
+
+    def set_pool(self, pool: Pool) -> Poetry:
+        self._pool = pool
+
+        return self
+
+    def set_config(self, config: Config) -> Poetry:
+        self._config = config
+
+        return self
+
+    def set_plugin_manager(self, plugin_manager: PluginManager) -> Poetry:
+        self._plugin_manager = plugin_manager
+
+        return self
+
+    def get_sources(self) -> list[Source]:
+        return [
+            Source(**source)
+            for source in self.pyproject.poetry_config.get("source", [])
+        ]
diff --git a/vendor/poetry/src/poetry/publishing/__init__.py b/vendor/poetry/src/poetry/publishing/__init__.py
new file mode 100644
index 00000000..2cb619a2
--- /dev/null
+++ b/vendor/poetry/src/poetry/publishing/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from poetry.publishing.publisher import Publisher
+
+
+__all__ = ["Publisher"]
diff --git a/vendor/poetry/src/poetry/publishing/publisher.py b/vendor/poetry/src/poetry/publishing/publisher.py
new file mode 100644
index 00000000..cf8515bc
--- /dev/null
+++ b/vendor/poetry/src/poetry/publishing/publisher.py
@@ -0,0 +1,92 @@
+from __future__ import annotations
+
+import logging
+
+from typing import TYPE_CHECKING
+
+from poetry.publishing.uploader import Uploader
+from poetry.utils.authenticator import Authenticator
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from cleo.io.io import IO
+
+    from poetry.poetry import Poetry
+
+logger = logging.getLogger(__name__)
+
+
+class Publisher:
+    """
+    Registers and publishes packages to remote repositories.
+    """
+
+    def __init__(self, poetry: Poetry, io: IO) -> None:
+        self._poetry = poetry
+        self._package = poetry.package
+        self._io = io
+        self._uploader = Uploader(poetry, io)
+        self._authenticator = Authenticator(poetry.config, self._io)
+
+    @property
+    def files(self) -> list[Path]:
+        return self._uploader.files
+
+    def publish(
+        self,
+        repository_name: str | None,
+        username: str | None,
+        password: str | None,
+        cert: Path | None = None,
+        client_cert: Path | None = None,
+        dry_run: bool = False,
+        skip_existing: bool = False,
+    ) -> None:
+        if not repository_name:
+            url = "https://upload.pypi.org/legacy/"
+            repository_name = "pypi"
+        else:
+            # Retrieving config information
+            url = self._poetry.config.get(f"repositories.{repository_name}.url")
+            if url is None:
+                raise RuntimeError(f"Repository {repository_name} is not defined")
+
+        if not (username and password):
+            # Check if we have a token first
+            token = self._authenticator.get_pypi_token(repository_name)
+            if token:
+                logger.debug(f"Found an API token for {repository_name}.")
+                username = "__token__"
+                password = token
+            else:
+                auth = self._authenticator.get_http_auth(repository_name)
+                if auth:
+                    logger.debug(
+                        f"Found authentication information for {repository_name}."
+                    )
+                    username = auth.username
+                    password = auth.password
+
+        certificates = self._authenticator.get_certs_for_repository(repository_name)
+        resolved_cert = cert or certificates.cert or certificates.verify
+        resolved_client_cert = client_cert or certificates.client_cert
+
+        self._uploader.auth(username, password)
+
+        if repository_name == "pypi":
+            repository_name = "PyPI"
+        self._io.write_line(
+            f"Publishing {self._package.pretty_name}"
+            f" ({self._package.pretty_version}) to"
+            f" {repository_name}"
+        )
+
+        self._uploader.upload(
+            url,
+            cert=resolved_cert,
+            client_cert=resolved_client_cert,
+            dry_run=dry_run,
+            skip_existing=skip_existing,
+        )
diff --git a/vendor/poetry/src/poetry/publishing/uploader.py b/vendor/poetry/src/poetry/publishing/uploader.py
new file mode 100644
index 00000000..566f50ee
--- /dev/null
+++ b/vendor/poetry/src/poetry/publishing/uploader.py
@@ -0,0 +1,371 @@
+from __future__ import annotations
+
+import hashlib
+import io
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+import requests
+
+from poetry.core.masonry.metadata import Metadata
+from poetry.core.masonry.utils.helpers import escape_name
+from poetry.core.masonry.utils.helpers import escape_version
+from poetry.core.utils.helpers import normalize_version
+from requests import adapters
+from requests.exceptions import ConnectionError
+from requests.exceptions import HTTPError
+from requests_toolbelt import user_agent
+from requests_toolbelt.multipart import MultipartEncoder
+from requests_toolbelt.multipart import MultipartEncoderMonitor
+from urllib3 import util
+
+from poetry.__version__ import __version__
+from poetry.utils.constants import REQUESTS_TIMEOUT
+from poetry.utils.patterns import wheel_file_re
+
+
+if TYPE_CHECKING:
+    from cleo.io.io import IO
+
+    from poetry.poetry import Poetry
+
+_has_blake2 = hasattr(hashlib, "blake2b")
+
+
+class UploadError(Exception):
+    def __init__(self, error: ConnectionError | HTTPError | str) -> None:
+        if isinstance(error, HTTPError):
+            message = (
+                f"HTTP Error {error.response.status_code}: {error.response.reason} |"
+                f" {error.response.content!r}"
+            )
+        elif isinstance(error, ConnectionError):
+            message = (
+                "Connection Error: We were unable to connect to the repository, "
+                "ensure the url is correct and can be reached."
+            )
+        else:
+            message = str(error)
+        super().__init__(message)
+
+
+class Uploader:
+    def __init__(self, poetry: Poetry, io: IO) -> None:
+        self._poetry = poetry
+        self._package = poetry.package
+        self._io = io
+        self._username: str | None = None
+        self._password: str | None = None
+
+    @property
+    def user_agent(self) -> str:
+        agent: str = user_agent("poetry", __version__)
+        return agent
+
+    @property
+    def adapter(self) -> adapters.HTTPAdapter:
+        retry = util.Retry(
+            connect=5,
+            total=10,
+            allowed_methods=["GET"],
+            status_forcelist=[500, 501, 502, 503],
+        )
+
+        return adapters.HTTPAdapter(max_retries=retry)
+
+    @property
+    def files(self) -> list[Path]:
+        dist = self._poetry.file.parent / "dist"
+        version = normalize_version(self._package.version.text)
+
+        wheels = list(
+            dist.glob(
+                f"{escape_name(self._package.pretty_name)}-{escape_version(version)}"
+                "-*.whl"
+            )
+        )
+        tars = list(dist.glob(f"{self._package.pretty_name}-{version}.tar.gz"))
+
+        return sorted(wheels + tars)
+
+    def auth(self, username: str | None, password: str | None) -> None:
+        self._username = username
+        self._password = password
+
+    def make_session(self) -> requests.Session:
+        session = requests.session()
+        auth = self.get_auth()
+        if auth is not None:
+            session.auth = auth
+
+        session.headers["User-Agent"] = self.user_agent
+        for scheme in ("http://", "https://"):
+            session.mount(scheme, self.adapter)
+
+        return session
+
+    def get_auth(self) -> tuple[str, str] | None:
+        if self._username is None or self._password is None:
+            return None
+
+        return (self._username, self._password)
+
+    def upload(
+        self,
+        url: str,
+        cert: Path | bool = True,
+        client_cert: Path | None = None,
+        dry_run: bool = False,
+        skip_existing: bool = False,
+    ) -> None:
+        session = self.make_session()
+
+        session.verify = str(cert) if isinstance(cert, Path) else cert
+
+        if client_cert:
+            session.cert = str(client_cert)
+
+        try:
+            self._upload(session, url, dry_run, skip_existing)
+        finally:
+            session.close()
+
+    def post_data(self, file: Path) -> dict[str, Any]:
+        meta = Metadata.from_package(self._package)
+
+        file_type = self._get_type(file)
+
+        if _has_blake2:
+            blake2_256_hash = hashlib.blake2b(digest_size=256 // 8)
+
+        md5_hash = hashlib.md5()
+        sha256_hash = hashlib.sha256()
+        with file.open("rb") as fp:
+            for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""):
+                md5_hash.update(content)
+                sha256_hash.update(content)
+
+                if _has_blake2:
+                    blake2_256_hash.update(content)
+
+        md5_digest = md5_hash.hexdigest()
+        sha2_digest = sha256_hash.hexdigest()
+        blake2_256_digest: str | None = None
+        if _has_blake2:
+            blake2_256_digest = blake2_256_hash.hexdigest()
+
+        py_version: str | None = None
+        if file_type == "bdist_wheel":
+            wheel_info = wheel_file_re.match(file.name)
+            if wheel_info is not None:
+                py_version = wheel_info.group("pyver")
+
+        data = {
+            # identify release
+            "name": meta.name,
+            "version": meta.version,
+            # file content
+            "filetype": file_type,
+            "pyversion": py_version,
+            # additional meta-data
+            "metadata_version": meta.metadata_version,
+            "summary": meta.summary,
+            "home_page": meta.home_page,
+            "author": meta.author,
+            "author_email": meta.author_email,
+            "maintainer": meta.maintainer,
+            "maintainer_email": meta.maintainer_email,
+            "license": meta.license,
+            "description": meta.description,
+            "keywords": meta.keywords,
+            "platform": meta.platforms,
+            "classifiers": meta.classifiers,
+            "download_url": meta.download_url,
+            "supported_platform": meta.supported_platforms,
+            "comment": None,
+            "md5_digest": md5_digest,
+            "sha256_digest": sha2_digest,
+            "blake2_256_digest": blake2_256_digest,
+            # PEP 314
+            "provides": meta.provides,
+            "requires": meta.requires,
+            "obsoletes": meta.obsoletes,
+            # Metadata 1.2
+            "project_urls": meta.project_urls,
+            "provides_dist": meta.provides_dist,
+            "obsoletes_dist": meta.obsoletes_dist,
+            "requires_dist": meta.requires_dist,
+            "requires_external": meta.requires_external,
+            "requires_python": meta.requires_python,
+        }
+
+        # Metadata 2.1
+        if meta.description_content_type:
+            data["description_content_type"] = meta.description_content_type
+
+        # TODO: Provides extra
+
+        return data
+
+    def _upload(
+        self,
+        session: requests.Session,
+        url: str,
+        dry_run: bool = False,
+        skip_existing: bool = False,
+    ) -> None:
+        for file in self.files:
+            # TODO: Check existence
+
+            self._upload_file(session, url, file, dry_run, skip_existing)
+
+    def _upload_file(
+        self,
+        session: requests.Session,
+        url: str,
+        file: Path,
+        dry_run: bool = False,
+        skip_existing: bool = False,
+    ) -> None:
+        from cleo.ui.progress_bar import ProgressBar
+
+        data = self.post_data(file)
+        data.update(
+            {
+                # action
+                ":action": "file_upload",
+                "protocol_version": "1",
+            }
+        )
+
+        data_to_send: list[tuple[str, Any]] = self._prepare_data(data)
+
+        with file.open("rb") as fp:
+            data_to_send.append(
+                ("content", (file.name, fp, "application/octet-stream"))
+            )
+            encoder = MultipartEncoder(data_to_send)
+            bar = ProgressBar(self._io, max=encoder.len)
+            bar.set_format(f" - Uploading {file.name} %percent%%")
+            monitor = MultipartEncoderMonitor(
+                encoder, lambda monitor: bar.set_progress(monitor.bytes_read)
+            )
+
+            bar.start()
+
+            resp = None
+
+            try:
+                if not dry_run:
+                    resp = session.post(
+                        url,
+                        data=monitor,
+                        allow_redirects=False,
+                        headers={"Content-Type": monitor.content_type},
+                        timeout=REQUESTS_TIMEOUT,
+                    )
+                if resp is None or 200 <= resp.status_code < 300:
+                    bar.set_format(
+                        f" - Uploading {file.name} %percent%%"
+                    )
+                    bar.finish()
+                elif resp.status_code == 301:
+                    if self._io.output.is_decorated():
+                        self._io.overwrite(
+                            f" - Uploading {file.name} FAILED"
+                        )
+                    raise UploadError(
+                        "Redirects are not supported. "
+                        "Is the URL missing a trailing slash?"
+                    )
+                elif resp.status_code == 400 and "was ever registered" in resp.text:
+                    self._register(session, url)
+                    resp.raise_for_status()
+                elif skip_existing and self._is_file_exists_error(resp):
+                    bar.set_format(
+                        f" - Uploading {file.name} File exists."
+                        " Skipping"
+                    )
+                    bar.display()
+                else:
+                    resp.raise_for_status()
+            except (requests.ConnectionError, requests.HTTPError) as e:
+                if self._io.output.is_decorated():
+                    self._io.overwrite(
+                        f" - Uploading {file.name} FAILED"
+                    )
+                raise UploadError(e)
+            finally:
+                self._io.write_line("")
+
+    def _register(self, session: requests.Session, url: str) -> requests.Response:
+        """
+        Register a package to a repository.
+        """
+        dist = self._poetry.file.parent / "dist"
+        file = (
+            dist
+            / f"{self._package.name}-{normalize_version(self._package.version.text)}.tar.gz"  # noqa: E501
+        )
+
+        if not file.exists():
+            raise RuntimeError(f'"{file.name}" does not exist.')
+
+        data = self.post_data(file)
+        data.update({":action": "submit", "protocol_version": "1"})
+
+        data_to_send = self._prepare_data(data)
+        encoder = MultipartEncoder(data_to_send)
+        resp = session.post(
+            url,
+            data=encoder,
+            allow_redirects=False,
+            headers={"Content-Type": encoder.content_type},
+            timeout=REQUESTS_TIMEOUT,
+        )
+
+        resp.raise_for_status()
+
+        return resp
+
+    def _prepare_data(self, data: dict[str, Any]) -> list[tuple[str, str]]:
+        data_to_send = []
+        for key, value in data.items():
+            if not isinstance(value, (list, tuple)):
+                data_to_send.append((key, value))
+            else:
+                for item in value:
+                    data_to_send.append((key, item))
+
+        return data_to_send
+
+    def _get_type(self, file: Path) -> str:
+        exts = file.suffixes
+        if exts[-1] == ".whl":
+            return "bdist_wheel"
+        elif len(exts) >= 2 and "".join(exts[-2:]) == ".tar.gz":
+            return "sdist"
+
+        raise ValueError("Unknown distribution format " + "".join(exts))
+
+    def _is_file_exists_error(self, response: requests.Response) -> bool:
+        # based on https://github.com/pypa/twine/blob/a6dd69c79f7b5abfb79022092a5d3776a499e31b/twine/commands/upload.py#L32  # noqa: E501
+        status = response.status_code
+        reason = response.reason.lower()
+        text = response.text.lower()
+        reason_and_text = reason + text
+
+        return (
+            # pypiserver (https://pypi.org/project/pypiserver)
+            status == 409
+            # PyPI / TestPyPI / GCP Artifact Registry
+            or (status == 400 and "already exist" in reason_and_text)
+            # Nexus Repository OSS (https://www.sonatype.com/nexus-repository-oss)
+            or (status == 400 and "updating asset" in reason_and_text)
+            # Artifactory (https://jfrog.com/artifactory/)
+            or (status == 403 and "overwrite artifact" in reason_and_text)
+            # Gitlab Enterprise Edition (https://about.gitlab.com)
+            or (status == 400 and "already been taken" in reason_and_text)
+        )
diff --git a/vendor/poetry/src/poetry/puzzle/__init__.py b/vendor/poetry/src/poetry/puzzle/__init__.py
new file mode 100644
index 00000000..d5bc6595
--- /dev/null
+++ b/vendor/poetry/src/poetry/puzzle/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from poetry.puzzle.solver import Solver
+
+
+__all__ = ["Solver"]
diff --git a/vendor/poetry/src/poetry/puzzle/exceptions.py b/vendor/poetry/src/poetry/puzzle/exceptions.py
new file mode 100644
index 00000000..58087c4a
--- /dev/null
+++ b/vendor/poetry/src/poetry/puzzle/exceptions.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+
+    from poetry.mixology.failure import SolveFailure
+    from poetry.packages import DependencyPackage
+
+
+class SolverProblemError(Exception):
+    def __init__(self, error: SolveFailure) -> None:
+        self._error = error
+
+        super().__init__(str(error))
+
+    @property
+    def error(self) -> SolveFailure:
+        return self._error
+
+
+class OverrideNeeded(Exception):
+    def __init__(
+        self, *overrides: dict[DependencyPackage, dict[str, Dependency]]
+    ) -> None:
+        self._overrides = overrides
+
+    @property
+    def overrides(self) -> tuple[dict[DependencyPackage, dict[str, Dependency]], ...]:
+        return self._overrides
diff --git a/vendor/poetry/src/poetry/puzzle/provider.py b/vendor/poetry/src/poetry/puzzle/provider.py
new file mode 100644
index 00000000..92db9901
--- /dev/null
+++ b/vendor/poetry/src/poetry/puzzle/provider.py
@@ -0,0 +1,959 @@
+from __future__ import annotations
+
+import functools
+import logging
+import os
+import re
+import tempfile
+import time
+import urllib.parse
+
+from collections import defaultdict
+from contextlib import contextmanager
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import cast
+
+from cleo.ui.progress_indicator import ProgressIndicator
+from poetry.core.packages.utils.utils import get_python_constraint_from_marker
+from poetry.core.semver.empty_constraint import EmptyConstraint
+from poetry.core.semver.version import Version
+from poetry.core.version.markers import AnyMarker
+from poetry.core.version.markers import MarkerUnion
+
+from poetry.inspection.info import PackageInfo
+from poetry.inspection.info import PackageInfoError
+from poetry.mixology.incompatibility import Incompatibility
+from poetry.mixology.incompatibility_cause import DependencyCause
+from poetry.mixology.incompatibility_cause import PythonCause
+from poetry.mixology.term import Term
+from poetry.packages import DependencyPackage
+from poetry.packages.package_collection import PackageCollection
+from poetry.puzzle.exceptions import OverrideNeeded
+from poetry.repositories.exceptions import PackageNotFound
+from poetry.utils.helpers import download_file
+from poetry.utils.helpers import safe_extra
+from poetry.vcs.git import Git
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from collections.abc import Iterable
+    from collections.abc import Iterator
+
+    from cleo.io.io import IO
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.directory_dependency import DirectoryDependency
+    from poetry.core.packages.file_dependency import FileDependency
+    from poetry.core.packages.package import Package
+    from poetry.core.packages.url_dependency import URLDependency
+    from poetry.core.packages.vcs_dependency import VCSDependency
+    from poetry.core.semver.version_constraint import VersionConstraint
+    from poetry.core.version.markers import BaseMarker
+
+    from poetry.repositories import Pool
+    from poetry.utils.env import Env
+
+
+logger = logging.getLogger(__name__)
+
+
+class Indicator(ProgressIndicator):  # type: ignore[misc]
+    CONTEXT: str | None = None
+
+    @staticmethod
+    @contextmanager
+    def context() -> Iterator[Callable[[str | None], None]]:
+        def _set_context(context: str | None) -> None:
+            Indicator.CONTEXT = context
+
+        yield _set_context
+
+        _set_context(None)
+
+    def _formatter_context(self) -> str:
+        if Indicator.CONTEXT is None:
+            return " "
+        else:
+            return f" {Indicator.CONTEXT} "
+
+    def _formatter_elapsed(self) -> str:
+        assert self._start_time is not None
+        elapsed = time.time() - self._start_time
+
+        return f"{elapsed:.1f}s"
+
+
+@functools.lru_cache(maxsize=None)
+def _get_package_from_git(
+    url: str,
+    branch: str | None = None,
+    tag: str | None = None,
+    rev: str | None = None,
+    subdirectory: str | None = None,
+    source_root: Path | None = None,
+) -> Package:
+    source = Git.clone(
+        url=url,
+        source_root=source_root,
+        branch=branch,
+        tag=tag,
+        revision=rev,
+        clean=False,
+    )
+    revision = Git.get_revision(source)
+
+    path = Path(source.path)
+    if subdirectory:
+        path = path.joinpath(subdirectory)
+
+    package = Provider.get_package_from_directory(path)
+    package._source_type = "git"
+    package._source_url = url
+    package._source_reference = rev or tag or branch or "HEAD"
+    package._source_resolved_reference = revision
+    package._source_subdirectory = subdirectory
+
+    return package
+
+
+class Provider:
+    UNSAFE_PACKAGES: set[str] = set()
+
+    def __init__(
+        self,
+        package: Package,
+        pool: Pool,
+        io: IO,
+        env: Env | None = None,
+        installed: list[Package] | None = None,
+    ) -> None:
+        self._package = package
+        self._pool = pool
+        self._io = io
+        self._env = env
+        self._python_constraint = package.python_constraint
+        self._is_debugging: bool = self._io.is_debug() or self._io.is_very_verbose()
+        self._in_progress = False
+        self._overrides: dict[DependencyPackage, dict[str, Dependency]] = {}
+        self._deferred_cache: dict[Dependency, Package] = {}
+        self._load_deferred = True
+        self._source_root: Path | None = None
+        self._installed_packages = installed if installed is not None else []
+        self._direct_origin_packages: dict[str, Package] = {}
+
+    @property
+    def pool(self) -> Pool:
+        return self._pool
+
+    def is_debugging(self) -> bool:
+        return self._is_debugging
+
+    def set_overrides(
+        self, overrides: dict[DependencyPackage, dict[str, Dependency]]
+    ) -> None:
+        self._overrides = overrides
+
+    def load_deferred(self, load_deferred: bool) -> None:
+        self._load_deferred = load_deferred
+
+    @contextmanager
+    def use_source_root(self, source_root: Path) -> Iterator[Provider]:
+        original_source_root = self._source_root
+        self._source_root = source_root
+
+        yield self
+
+        self._source_root = original_source_root
+
+    @contextmanager
+    def use_environment(self, env: Env) -> Iterator[Provider]:
+        original_env = self._env
+        original_python_constraint = self._python_constraint
+
+        self._env = env
+        self._python_constraint = Version.parse(env.marker_env["python_full_version"])
+
+        yield self
+
+        self._env = original_env
+        self._python_constraint = original_python_constraint
+
+    @staticmethod
+    def validate_package_for_dependency(
+        dependency: Dependency, package: Package
+    ) -> None:
+        if dependency.name != package.name:
+            # For now, the dependency's name must match the actual package's name
+            raise RuntimeError(
+                f"The dependency name for {dependency.name} does not match the actual"
+                f" package's name: {package.name}"
+            )
+
+    def search_for_installed_packages(
+        self,
+        dependency: Dependency,
+    ) -> list[Package]:
+        """
+        Search for installed packages, when available, that satisfy the given
+        dependency.
+
+        This is useful when dealing with packages that are under development, not
+        published on package sources and/or only available via system installations.
+        """
+        if not self._installed_packages:
+            return []
+
+        logger.debug(
+            "Falling back to installed packages to discover metadata for %s",
+            dependency.complete_name,
+        )
+        packages = [
+            package
+            for package in self._installed_packages
+            if package.satisfies(dependency, ignore_source_type=True)
+        ]
+        logger.debug(
+            "Found %d compatible packages for %s",
+            len(packages),
+            dependency.complete_name,
+        )
+        return packages
+
+    def search_for_direct_origin_dependency(self, dependency: Dependency) -> Package:
+        package = self._deferred_cache.get(dependency)
+        if package is not None:
+            pass
+
+        elif dependency.is_vcs():
+            dependency = cast("VCSDependency", dependency)
+            package = self._search_for_vcs(dependency)
+
+        elif dependency.is_file():
+            dependency = cast("FileDependency", dependency)
+            package = self._search_for_file(dependency)
+
+        elif dependency.is_directory():
+            dependency = cast("DirectoryDependency", dependency)
+            package = self._search_for_directory(dependency)
+
+        elif dependency.is_url():
+            dependency = cast("URLDependency", dependency)
+            package = self._search_for_url(dependency)
+
+        else:
+            raise RuntimeError(
+                f"Unknown direct dependency type {dependency.source_type}"
+            )
+
+        if dependency.is_vcs():
+            dependency._source_reference = package.source_reference
+            dependency._source_resolved_reference = package.source_resolved_reference
+            dependency._source_subdirectory = package.source_subdirectory
+
+        dependency._constraint = package.version
+        dependency._pretty_constraint = package.version.text
+
+        self._deferred_cache[dependency] = package
+
+        return package
+
+    def search_for(self, dependency: Dependency) -> list[DependencyPackage]:
+        """
+        Search for the specifications that match the given dependency.
+
+        The specifications in the returned list will be considered in reverse
+        order, so the latest version ought to be last.
+        """
+        if dependency.is_root:
+            return PackageCollection(dependency, [self._package])
+
+        if dependency.is_direct_origin():
+            package = self.search_for_direct_origin_dependency(dependency)
+            self._direct_origin_packages[dependency.name] = package
+            return PackageCollection(dependency, [package])
+
+        # If we've previously found a direct-origin package that meets this dependency,
+        # use it.
+        #
+        # We rely on the VersionSolver resolving direct-origin dependencies first.
+        direct_origin_package = self._direct_origin_packages.get(dependency.name)
+        if direct_origin_package is not None:
+            packages = (
+                [direct_origin_package]
+                if dependency.constraint.allows(direct_origin_package.version)
+                else []
+            )
+            return PackageCollection(dependency, packages)
+
+        packages = self._pool.find_packages(dependency)
+
+        packages.sort(
+            key=lambda p: (
+                not p.yanked,
+                not p.is_prerelease() and not dependency.allows_prereleases(),
+                p.version,
+            ),
+            reverse=True,
+        )
+
+        if not packages:
+            packages = self.search_for_installed_packages(dependency)
+
+        return PackageCollection(dependency, packages)
+
+    def _search_for_vcs(self, dependency: VCSDependency) -> Package:
+        """
+        Search for the specifications that match the given VCS dependency.
+
+        Basically, we clone the repository in a temporary directory
+        and get the information we need by checking out the specified reference.
+        """
+        package = self.get_package_from_vcs(
+            dependency.vcs,
+            dependency.source,
+            branch=dependency.branch,
+            tag=dependency.tag,
+            rev=dependency.rev,
+            subdirectory=dependency.source_subdirectory,
+            source_root=self._source_root
+            or (self._env.path.joinpath("src") if self._env else None),
+        )
+
+        self.validate_package_for_dependency(dependency=dependency, package=package)
+
+        package.develop = dependency.develop
+
+        return package
+
+    @staticmethod
+    def get_package_from_vcs(
+        vcs: str,
+        url: str,
+        branch: str | None = None,
+        tag: str | None = None,
+        rev: str | None = None,
+        subdirectory: str | None = None,
+        source_root: Path | None = None,
+    ) -> Package:
+        if vcs != "git":
+            raise ValueError(f"Unsupported VCS dependency {vcs}")
+
+        return _get_package_from_git(
+            url=url,
+            branch=branch,
+            tag=tag,
+            rev=rev,
+            subdirectory=subdirectory,
+            source_root=source_root,
+        )
+
+    def _search_for_file(self, dependency: FileDependency) -> Package:
+        package = self.get_package_from_file(dependency.full_path)
+
+        self.validate_package_for_dependency(dependency=dependency, package=package)
+
+        if dependency.base is not None:
+            package.root_dir = dependency.base
+
+        package.files = [
+            {"file": dependency.path.name, "hash": "sha256:" + dependency.hash()}
+        ]
+
+        return package
+
+    @classmethod
+    def get_package_from_file(cls, file_path: Path) -> Package:
+        try:
+            package = PackageInfo.from_path(path=file_path).to_package(
+                root_dir=file_path
+            )
+        except PackageInfoError:
+            raise RuntimeError(
+                f"Unable to determine package info from path: {file_path}"
+            )
+
+        return package
+
+    def _search_for_directory(self, dependency: DirectoryDependency) -> Package:
+        package = self.get_package_from_directory(dependency.full_path)
+
+        self.validate_package_for_dependency(dependency=dependency, package=package)
+
+        package.develop = dependency.develop
+
+        if dependency.base is not None:
+            package.root_dir = dependency.base
+
+        return package
+
+    @classmethod
+    def get_package_from_directory(cls, directory: Path) -> Package:
+        return PackageInfo.from_directory(path=directory).to_package(root_dir=directory)
+
+    def _search_for_url(self, dependency: URLDependency) -> Package:
+        package = self.get_package_from_url(dependency.url)
+
+        self.validate_package_for_dependency(dependency=dependency, package=package)
+
+        for extra in dependency.extras:
+            if extra in package.extras:
+                for dep in package.extras[extra]:
+                    dep.activate()
+
+                for extra_dep in package.extras[extra]:
+                    package.add_dependency(extra_dep)
+
+        return package
+
+    @classmethod
+    def get_package_from_url(cls, url: str) -> Package:
+        file_name = os.path.basename(urllib.parse.urlparse(url).path)
+        with tempfile.TemporaryDirectory() as temp_dir:
+            dest = Path(temp_dir) / file_name
+            download_file(url, dest)
+            package = cls.get_package_from_file(dest)
+
+        package._source_type = "url"
+        package._source_url = url
+
+        return package
+
+    def _get_dependencies_with_overrides(
+        self, dependencies: list[Dependency], package: DependencyPackage
+    ) -> list[Dependency]:
+        overrides = self._overrides.get(package, {})
+        _dependencies = []
+        overridden = []
+        for dep in dependencies:
+            if dep.name in overrides:
+                if dep.name in overridden:
+                    continue
+
+                # empty constraint is used in overrides to mark that the package has
+                # already been handled and is not required for the attached markers
+                if not overrides[dep.name].constraint.is_empty():
+                    _dependencies.append(overrides[dep.name])
+                overridden.append(dep.name)
+
+                continue
+
+            _dependencies.append(dep)
+        return _dependencies
+
+    def incompatibilities_for(
+        self, dependency_package: DependencyPackage
+    ) -> list[Incompatibility]:
+        """
+        Returns incompatibilities that encapsulate a given package's dependencies,
+        or that it can't be safely selected.
+
+        If multiple subsequent versions of this package have the same
+        dependencies, this will return incompatibilities that reflect that. It
+        won't return incompatibilities that have already been returned by a
+        previous call to _incompatibilities_for().
+        """
+        package = dependency_package.package
+        if package.is_root():
+            dependencies = package.all_requires
+        else:
+            dependencies = package.requires
+
+            if not package.python_constraint.allows_all(self._python_constraint):
+                transitive_python_constraint = get_python_constraint_from_marker(
+                    dependency_package.dependency.transitive_marker
+                )
+                intersection = package.python_constraint.intersect(
+                    transitive_python_constraint
+                )
+                difference = transitive_python_constraint.difference(intersection)
+
+                # The difference is only relevant if it intersects
+                # the root package python constraint
+                difference = difference.intersect(self._python_constraint)
+                if (
+                    transitive_python_constraint.is_any()
+                    or self._python_constraint.intersect(
+                        dependency_package.dependency.python_constraint
+                    ).is_empty()
+                    or intersection.is_empty()
+                    or not difference.is_empty()
+                ):
+                    return [
+                        Incompatibility(
+                            [Term(package.to_dependency(), True)],
+                            PythonCause(
+                                package.python_versions, str(self._python_constraint)
+                            ),
+                        )
+                    ]
+
+        _dependencies = [
+            dep
+            for dep in dependencies
+            if dep.name not in self.UNSAFE_PACKAGES
+            and self._python_constraint.allows_any(dep.python_constraint)
+            and (not self._env or dep.marker.validate(self._env.marker_env))
+        ]
+        dependencies = self._get_dependencies_with_overrides(
+            _dependencies, dependency_package
+        )
+
+        return [
+            Incompatibility(
+                [Term(package.to_dependency(), True), Term(dep, False)],
+                DependencyCause(),
+            )
+            for dep in dependencies
+        ]
+
+    def complete_package(
+        self, dependency_package: DependencyPackage
+    ) -> DependencyPackage:
+        package = dependency_package.package
+        dependency = dependency_package.dependency
+
+        if package.is_root():
+            dependency_package = dependency_package.clone()
+            package = dependency_package.package
+            dependency = dependency_package.dependency
+            requires = package.all_requires
+        elif package.source_type not in {
+            "directory",
+            "file",
+            "url",
+            "git",
+        }:
+            try:
+                dependency_package = DependencyPackage(
+                    dependency,
+                    self._pool.package(
+                        package.pretty_name,
+                        package.version,
+                        extras=list(dependency.extras),
+                        repository=dependency.source_name,
+                    ),
+                )
+            except PackageNotFound as e:
+                try:
+                    dependency_package = next(
+                        DependencyPackage(dependency, pkg)
+                        for pkg in self.search_for_installed_packages(dependency)
+                    )
+                except StopIteration:
+                    raise e from e
+
+            package = dependency_package.package
+            dependency = dependency_package.dependency
+            requires = package.requires
+        else:
+            requires = package.requires
+
+        if self._load_deferred:
+            # Retrieving constraints for deferred dependencies
+            for r in requires:
+                if r.is_direct_origin():
+                    self.search_for_direct_origin_dependency(r)
+
+        optional_dependencies = []
+        _dependencies = []
+
+        # If some extras/features were required, we need to
+        # add a special dependency representing the base package
+        # to the current package
+        if dependency.extras:
+            for extra in dependency.extras:
+                extra = safe_extra(extra)
+                if extra not in package.extras:
+                    continue
+
+                optional_dependencies += [d.name for d in package.extras[extra]]
+
+            dependency_package = dependency_package.with_features(
+                list(dependency.extras)
+            )
+            package = dependency_package.package
+            dependency = dependency_package.dependency
+            _dependencies.append(package.without_features().to_dependency())
+
+        for dep in requires:
+            if not self._python_constraint.allows_any(dep.python_constraint):
+                continue
+
+            if dep.name in self.UNSAFE_PACKAGES:
+                continue
+
+            if self._env and not dep.marker.validate(self._env.marker_env):
+                continue
+
+            if not package.is_root() and (
+                (dep.is_optional() and dep.name not in optional_dependencies)
+                or (
+                    dep.in_extras
+                    and not set(dep.in_extras).intersection(
+                        {safe_extra(extra) for extra in dependency.extras}
+                    )
+                )
+            ):
+                continue
+
+            _dependencies.append(dep)
+
+        dependencies = self._get_dependencies_with_overrides(
+            _dependencies, dependency_package
+        )
+
+        # Searching for duplicate dependencies
+        #
+        # If the duplicate dependencies have the same constraint,
+        # the requirements will be merged.
+        #
+        # For instance:
+        #   - enum34; python_version=="2.7"
+        #   - enum34; python_version=="3.3"
+        #
+        # will become:
+        #   - enum34; python_version=="2.7" or python_version=="3.3"
+        #
+        # If the duplicate dependencies have different constraints
+        # we have to split the dependency graph.
+        #
+        # An example of this is:
+        #   - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
+        #   - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
+        duplicates: dict[str, list[Dependency]] = defaultdict(list)
+        for dep in dependencies:
+            duplicates[dep.complete_name].append(dep)
+
+        dependencies = []
+        for dep_name, deps in duplicates.items():
+            if len(deps) == 1:
+                dependencies.append(deps[0])
+                continue
+
+            self.debug(f"Duplicate dependencies for {dep_name}")
+
+            non_direct_origin_deps: list[Dependency] = []
+            direct_origin_deps: list[Dependency] = []
+            for dep in deps:
+                if dep.is_direct_origin():
+                    direct_origin_deps.append(dep)
+                else:
+                    non_direct_origin_deps.append(dep)
+            deps = (
+                self._merge_dependencies_by_constraint(
+                    self._merge_dependencies_by_marker(non_direct_origin_deps)
+                )
+                + direct_origin_deps
+            )
+            if len(deps) == 1:
+                self.debug(f"Merging requirements for {deps[0]!s}")
+                dependencies.append(deps[0])
+                continue
+
+            # We leave dependencies as-is if they have the same
+            # python/platform constraints.
+            # That way the resolver will pickup the conflict
+            # and display a proper error.
+            seen = set()
+            for dep in deps:
+                pep_508_dep = dep.to_pep_508(False)
+                if ";" not in pep_508_dep:
+                    _requirements = ""
+                else:
+                    _requirements = pep_508_dep.split(";")[1].strip()
+
+                if _requirements not in seen:
+                    seen.add(_requirements)
+
+            if len(deps) != len(seen):
+                for dep in deps:
+                    dependencies.append(dep)
+
+                continue
+
+            # At this point, we raise an exception that will
+            # tell the solver to make new resolutions with specific overrides.
+            #
+            # For instance, if the foo (1.2.3) package has the following dependencies:
+            #   - bar (>=2.0) ; python_version >= "3.6"
+            #   - bar (<2.0) ; python_version < "3.6"
+            #
+            # then the solver will need to make two new resolutions
+            # with the following overrides:
+            #   - {=2.0)>}
+            #   - {}
+
+            def fmt_warning(d: Dependency) -> str:
+                dependency_marker = d.marker if not d.marker.is_any() else "*"
+                return (
+                    f"{d.name} ({d.pretty_constraint})"
+                    f" with markers {dependency_marker}"
+                )
+
+            warnings = ", ".join(fmt_warning(d) for d in deps[:-1])
+            warnings += f" and {fmt_warning(deps[-1])}"
+            self.debug(
+                f"Different requirements found for {warnings}."
+            )
+
+            # We need to check if one of the duplicate dependencies
+            # has no markers. If there is one, we need to change its
+            # environment markers to the inverse of the union of the
+            # other dependencies markers.
+            # For instance, if we have the following dependencies:
+            #   - ipython
+            #   - ipython (1.2.4) ; implementation_name == "pypy"
+            #
+            # the marker for `ipython` will become `implementation_name != "pypy"`.
+            #
+            # Further, we have to merge the constraints of the requirements
+            # without markers into the constraints of the requirements with markers.
+            # for instance, if we have the following dependencies:
+            #   - foo (>= 1.2)
+            #   - foo (!= 1.2.1) ; python == 3.10
+            #
+            # the constraint for the second entry will become (!= 1.2.1, >= 1.2)
+            any_markers_dependencies = [d for d in deps if d.marker.is_any()]
+            other_markers_dependencies = [d for d in deps if not d.marker.is_any()]
+
+            marker = other_markers_dependencies[0].marker
+            for other_dep in other_markers_dependencies[1:]:
+                marker = marker.union(other_dep.marker)
+            inverted_marker = marker.invert()
+
+            if any_markers_dependencies:
+                for dep_any in any_markers_dependencies:
+                    dep_any.marker = inverted_marker
+                    for dep_other in other_markers_dependencies:
+                        dep_other.constraint = dep_other.constraint.intersect(
+                            dep_any.constraint
+                        )
+            elif not inverted_marker.is_empty() and self._python_constraint.allows_any(
+                get_python_constraint_from_marker(inverted_marker)
+            ):
+                # if there is no any marker dependency
+                # and the inverted marker is not empty,
+                # a dependency with the inverted union of all markers is required
+                # in order to not miss other dependencies later, for instance:
+                #   - foo (1.0) ; python == 3.7
+                #   - foo (2.0) ; python == 3.8
+                #   - bar (2.0) ; python == 3.8
+                #   - bar (3.0) ; python == 3.9
+                #
+                # the last dependency would be missed without this,
+                # because the intersection with both foo dependencies is empty
+                inverted_marker_dep = deps[0].with_constraint(EmptyConstraint())
+                inverted_marker_dep.marker = inverted_marker
+                deps.append(inverted_marker_dep)
+
+            overrides = []
+            overrides_marker_intersection: BaseMarker = AnyMarker()
+            for dep_overrides in self._overrides.values():
+                for dep in dep_overrides.values():
+                    overrides_marker_intersection = (
+                        overrides_marker_intersection.intersect(dep.marker)
+                    )
+            for dep in deps:
+                if not overrides_marker_intersection.intersect(dep.marker).is_empty():
+                    current_overrides = self._overrides.copy()
+                    package_overrides = current_overrides.get(
+                        dependency_package, {}
+                    ).copy()
+                    package_overrides.update({dep.name: dep})
+                    current_overrides.update({dependency_package: package_overrides})
+                    overrides.append(current_overrides)
+
+            if overrides:
+                raise OverrideNeeded(*overrides)
+
+        # Modifying dependencies as needed
+        clean_dependencies = []
+        for dep in dependencies:
+            if not dependency.transitive_marker.without_extras().is_any():
+                marker_intersection = (
+                    dependency.transitive_marker.without_extras().intersect(
+                        dep.marker.without_extras()
+                    )
+                )
+                if marker_intersection.is_empty():
+                    # The dependency is not needed, since the markers specified
+                    # for the current package selection are not compatible with
+                    # the markers for the current dependency, so we skip it
+                    continue
+
+                dep.transitive_marker = marker_intersection
+
+            if not dependency.python_constraint.is_any():
+                python_constraint_intersection = dep.python_constraint.intersect(
+                    dependency.python_constraint
+                )
+                if python_constraint_intersection.is_empty():
+                    # This dependency is not needed under current python constraint.
+                    continue
+                dep.transitive_python_versions = str(python_constraint_intersection)
+
+            clean_dependencies.append(dep)
+
+        package = package.with_dependency_groups([], only=True)
+        dependency_package = DependencyPackage(dependency, package)
+
+        for dep in clean_dependencies:
+            package.add_dependency(dep)
+
+        return dependency_package
+
+    def debug(self, message: str, depth: int = 0) -> None:
+        if not (self._io.is_very_verbose() or self._io.is_debug()):
+            return
+
+        if message.startswith("fact:"):
+            if "depends on" in message:
+                m = re.match(r"fact: (.+?) depends on (.+?) \((.+?)\)", message)
+                if m is None:
+                    raise ValueError(f"Unable to parse fact: {message}")
+                m2 = re.match(r"(.+?) \((.+?)\)", m.group(1))
+                if m2:
+                    name = m2.group(1)
+                    version = f" ({m2.group(2)})"
+                else:
+                    name = m.group(1)
+                    version = ""
+
+                message = (
+                    f"fact: {name}{version} "
+                    f"depends on {m.group(2)} ({m.group(3)})"
+                )
+            elif " is " in message:
+                message = re.sub(
+                    "fact: (.+) is (.+)",
+                    "fact: \\1 is \\2",
+                    message,
+                )
+            else:
+                message = re.sub(
+                    r"(?<=: )(.+?) \((.+?)\)", "\\1 (\\2)", message
+                )
+                message = f"fact: {message.split('fact: ')[1]}"
+        elif message.startswith("selecting "):
+            message = re.sub(
+                r"selecting (.+?) \((.+?)\)",
+                "selecting \\1 (\\2)",
+                message,
+            )
+        elif message.startswith("derived:"):
+            m = re.match(r"derived: (.+?) \((.+?)\)$", message)
+            if m:
+                message = (
+                    f"derived: {m.group(1)}"
+                    f" ({m.group(2)})"
+                )
+            else:
+                message = (
+                    f"derived: {message.split('derived: ')[1]}"
+                )
+        elif message.startswith("conflict:"):
+            m = re.match(r"conflict: (.+?) depends on (.+?) \((.+?)\)", message)
+            if m:
+                m2 = re.match(r"(.+?) \((.+?)\)", m.group(1))
+                if m2:
+                    name = m2.group(1)
+                    version = f" ({m2.group(2)})"
+                else:
+                    name = m.group(1)
+                    version = ""
+
+                message = (
+                    f"conflict: {name}{version} "
+                    f"depends on {m.group(2)} ({m.group(3)})"
+                )
+            else:
+                message = (
+                    "conflict:"
+                    f" {message.split('conflict: ')[1]}"
+                )
+
+        message = message.replace("! ", "! ")
+
+        if self.is_debugging():
+            debug_info = str(message)
+            debug_info = (
+                "\n".join(
+                    [
+                        f"{str(depth).rjust(4)}: {s}"
+                        for s in debug_info.split("\n")
+                    ]
+                )
+                + "\n"
+            )
+
+            self._io.write(debug_info)
+
+    @contextmanager
+    def progress(self) -> Iterator[None]:
+        if not self._io.output.is_decorated() or self.is_debugging():
+            self._io.write_line("Resolving dependencies...")
+            yield
+        else:
+            indicator = Indicator(
+                self._io, "{message}{context}({elapsed:2s})"
+            )
+
+            with indicator.auto(
+                "Resolving dependencies...",
+                "Resolving dependencies...",
+            ):
+                yield
+
+        self._in_progress = False
+
+    def _merge_dependencies_by_constraint(
+        self, dependencies: Iterable[Dependency]
+    ) -> list[Dependency]:
+        by_constraint: dict[VersionConstraint, list[Dependency]] = defaultdict(list)
+        for dep in dependencies:
+            by_constraint[dep.constraint].append(dep)
+        for constraint, _deps in by_constraint.items():
+            new_markers = []
+            for dep in _deps:
+                marker = dep.marker.without_extras()
+                if marker.is_any():
+                    # No marker or only extras
+                    continue
+
+                new_markers.append(marker)
+
+            if not new_markers:
+                continue
+
+            dep = _deps[0]
+            dep.marker = dep.marker.union(MarkerUnion(*new_markers))
+            by_constraint[constraint] = [dep]
+
+        return [value[0] for value in by_constraint.values()]
+
+    def _merge_dependencies_by_marker(
+        self, dependencies: Iterable[Dependency]
+    ) -> list[Dependency]:
+        by_marker: dict[BaseMarker, list[Dependency]] = defaultdict(list)
+        for dep in dependencies:
+            by_marker[dep.marker].append(dep)
+        deps = []
+        for _deps in by_marker.values():
+            if len(_deps) == 1:
+                deps.extend(_deps)
+            else:
+                new_constraint = _deps[0].constraint
+                for dep in _deps[1:]:
+                    new_constraint = new_constraint.intersect(dep.constraint)
+                if new_constraint.is_empty():
+                    # leave dependencies as-is so the resolver will pickup
+                    # the conflict and display a proper error.
+                    deps.extend(_deps)
+                else:
+                    self.debug(
+                        f"Merging constraints for {_deps[0].name} for"
+                        f" marker {_deps[0].marker}"
+                    )
+                    deps.append(_deps[0].with_constraint(new_constraint))
+        return deps
diff --git a/vendor/poetry/src/poetry/puzzle/solver.py b/vendor/poetry/src/poetry/puzzle/solver.py
new file mode 100644
index 00000000..fb3c389f
--- /dev/null
+++ b/vendor/poetry/src/poetry/puzzle/solver.py
@@ -0,0 +1,359 @@
+from __future__ import annotations
+
+import time
+
+from collections import defaultdict
+from contextlib import contextmanager
+from typing import TYPE_CHECKING
+from typing import FrozenSet
+from typing import Tuple
+from typing import TypeVar
+
+from poetry.core.packages.dependency_group import MAIN_GROUP
+
+from poetry.mixology import resolve_version
+from poetry.mixology.failure import SolveFailure
+from poetry.packages import DependencyPackage
+from poetry.puzzle.exceptions import OverrideNeeded
+from poetry.puzzle.exceptions import SolverProblemError
+from poetry.puzzle.provider import Provider
+
+
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from cleo.io.io import IO
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.package import Package
+    from poetry.core.packages.project_package import ProjectPackage
+
+    from poetry.puzzle.transaction import Transaction
+    from poetry.repositories import Pool
+    from poetry.utils.env import Env
+
+
+class Solver:
+    def __init__(
+        self,
+        package: ProjectPackage,
+        pool: Pool,
+        installed: list[Package],
+        locked: list[Package],
+        io: IO,
+        provider: Provider | None = None,
+    ) -> None:
+        self._package = package
+        self._pool = pool
+        self._installed_packages = installed
+        self._locked_packages = locked
+        self._io = io
+
+        if provider is None:
+            provider = Provider(
+                self._package, self._pool, self._io, installed=installed
+            )
+
+        self._provider = provider
+        self._overrides: list[dict[DependencyPackage, dict[str, Dependency]]] = []
+
+    @property
+    def provider(self) -> Provider:
+        return self._provider
+
+    @contextmanager
+    def use_environment(self, env: Env) -> Iterator[None]:
+        with self.provider.use_environment(env):
+            yield
+
+    def solve(self, use_latest: list[str] | None = None) -> Transaction:
+        from poetry.puzzle.transaction import Transaction
+
+        with self._provider.progress():
+            start = time.time()
+            packages, depths = self._solve(use_latest=use_latest)
+            end = time.time()
+
+            if len(self._overrides) > 1:
+                self._provider.debug(
+                    f"Complete version solving took {end - start:.3f} seconds with"
+                    f" {len(self._overrides)} overrides"
+                )
+                self._provider.debug(
+                    "Resolved with overrides:"
+                    f" {', '.join(f'({b})' for b in self._overrides)}"
+                )
+
+        for p in packages:
+            if p.yanked:
+                message = (
+                    f"The locked version {p.pretty_version} for {p.pretty_name} is a"
+                    " yanked version."
+                )
+                if p.yanked_reason:
+                    message += f" Reason for being yanked: {p.yanked_reason}"
+                self._io.write_error_line(f"Warning: {message}")
+
+        return Transaction(
+            self._locked_packages,
+            list(zip(packages, depths)),
+            installed_packages=self._installed_packages,
+            root_package=self._package,
+        )
+
+    def solve_in_compatibility_mode(
+        self,
+        overrides: tuple[dict[DependencyPackage, dict[str, Dependency]], ...],
+        use_latest: list[str] | None = None,
+    ) -> tuple[list[Package], list[int]]:
+        packages = []
+        depths = []
+        for override in overrides:
+            self._provider.debug(
+                "Retrying dependency resolution "
+                f"with the following overrides ({override})."
+            )
+            self._provider.set_overrides(override)
+            _packages, _depths = self._solve(use_latest=use_latest)
+            for index, package in enumerate(_packages):
+                if package not in packages:
+                    packages.append(package)
+                    depths.append(_depths[index])
+                    continue
+                else:
+                    idx = packages.index(package)
+                    pkg = packages[idx]
+                    depths[idx] = max(depths[idx], _depths[index])
+
+                    for dep in package.requires:
+                        if dep not in pkg.requires:
+                            pkg.add_dependency(dep)
+
+        return packages, depths
+
+    def _solve(
+        self, use_latest: list[str] | None = None
+    ) -> tuple[list[Package], list[int]]:
+        if self._provider._overrides:
+            self._overrides.append(self._provider._overrides)
+
+        locked: dict[str, list[DependencyPackage]] = defaultdict(list)
+        for package in self._locked_packages:
+            locked[package.name].append(
+                DependencyPackage(package.to_dependency(), package)
+            )
+        for dependency_packages in locked.values():
+            dependency_packages.sort(
+                key=lambda p: p.package.version,
+                reverse=True,
+            )
+
+        try:
+            result = resolve_version(
+                self._package, self._provider, locked=locked, use_latest=use_latest
+            )
+
+            packages = result.packages
+        except OverrideNeeded as e:
+            return self.solve_in_compatibility_mode(e.overrides, use_latest=use_latest)
+        except SolveFailure as e:
+            raise SolverProblemError(e)
+
+        combined_nodes = depth_first_search(PackageNode(self._package, packages))
+        results = dict(aggregate_package_nodes(nodes) for nodes in combined_nodes)
+
+        # Merging feature packages with base packages
+        final_packages = []
+        depths = []
+        for package in packages:
+            if package.features:
+                for _package in packages:
+                    if (
+                        not _package.features
+                        and _package.name == package.name
+                        and _package.version == package.version
+                    ):
+                        for dep in package.requires:
+                            # Prevent adding base package as a dependency to itself
+                            if _package.name == dep.name:
+                                continue
+
+                            if dep not in _package.requires:
+                                _package.add_dependency(dep)
+            else:
+                final_packages.append(package)
+                depths.append(results[package])
+
+        # Return the packages in their original order with associated depths
+        return final_packages, depths
+
+
+DFSNodeID = Tuple[str, FrozenSet[str], bool]
+
+T = TypeVar("T", bound="DFSNode")
+
+
+class DFSNode:
+    def __init__(self, id: DFSNodeID, name: str, base_name: str) -> None:
+        self.id = id
+        self.name = name
+        self.base_name = base_name
+
+    def reachable(self: T) -> list[T]:
+        return []
+
+    def visit(self, parents: list[PackageNode]) -> None:
+        pass
+
+    def __str__(self) -> str:
+        return str(self.id)
+
+
+def depth_first_search(source: PackageNode) -> list[list[PackageNode]]:
+    back_edges: dict[DFSNodeID, list[PackageNode]] = defaultdict(list)
+    visited: set[DFSNodeID] = set()
+    topo_sorted_nodes: list[PackageNode] = []
+
+    dfs_visit(source, back_edges, visited, topo_sorted_nodes)
+
+    # Combine the nodes by name
+    combined_nodes: dict[str, list[PackageNode]] = defaultdict(list)
+    for node in topo_sorted_nodes:
+        node.visit(back_edges[node.id])
+        combined_nodes[node.name].append(node)
+
+    combined_topo_sorted_nodes: list[list[PackageNode]] = [
+        combined_nodes.pop(node.name)
+        for node in topo_sorted_nodes
+        if node.name in combined_nodes
+    ]
+
+    return combined_topo_sorted_nodes
+
+
+def dfs_visit(
+    node: PackageNode,
+    back_edges: dict[DFSNodeID, list[PackageNode]],
+    visited: set[DFSNodeID],
+    sorted_nodes: list[PackageNode],
+) -> None:
+    if node.id in visited:
+        return
+    visited.add(node.id)
+
+    for neighbor in node.reachable():
+        back_edges[neighbor.id].append(node)
+        dfs_visit(neighbor, back_edges, visited, sorted_nodes)
+    sorted_nodes.insert(0, node)
+
+
+class PackageNode(DFSNode):
+    def __init__(
+        self,
+        package: Package,
+        packages: list[Package],
+        previous: PackageNode | None = None,
+        previous_dep: Dependency | None = None,
+        dep: Dependency | None = None,
+    ) -> None:
+        self.package = package
+        self.packages = packages
+
+        self.previous = previous
+        self.previous_dep = previous_dep
+        self.dep = dep
+        self.depth = -1
+
+        if not previous:
+            self.category = "dev"
+            self.groups: frozenset[str] = frozenset()
+            self.optional = True
+        elif dep:
+            self.category = "main" if MAIN_GROUP in dep.groups else "dev"
+            self.groups = dep.groups
+            self.optional = dep.is_optional()
+        else:
+            raise ValueError("Both previous and dep must be passed")
+
+        super().__init__(
+            (package.complete_name, self.groups, self.optional),
+            package.complete_name,
+            package.name,
+        )
+
+    def reachable(self) -> list[PackageNode]:
+        children: list[PackageNode] = []
+
+        if (
+            self.dep
+            and self.previous_dep
+            and self.previous_dep is not self.dep
+            and self.previous_dep.name == self.dep.name
+        ):
+            return []
+
+        for dependency in self.package.all_requires:
+            if self.previous and self.previous.name == dependency.name:
+                # We have a circular dependency.
+                # Since the dependencies are resolved we can
+                # simply skip it because we already have it
+                # N.B. this only catches cycles of length 2;
+                # dependency cycles in general are handled by the DFS traversal
+                continue
+
+            for pkg in self.packages:
+                if (
+                    pkg.complete_name == dependency.complete_name
+                    and (
+                        dependency.constraint.allows(pkg.version)
+                        or dependency.allows_prereleases()
+                        and pkg.version.is_unstable()
+                        and dependency.constraint.allows(pkg.version.stable)
+                    )
+                    and not any(
+                        child.package.complete_name == pkg.complete_name
+                        and child.groups == dependency.groups
+                        for child in children
+                    )
+                ):
+                    children.append(
+                        PackageNode(
+                            pkg,
+                            self.packages,
+                            self,
+                            dependency,
+                            self.dep or dependency,
+                        )
+                    )
+
+        return children
+
+    def visit(self, parents: list[PackageNode]) -> None:
+        # The root package, which has no parents, is defined as having depth -1
+        # So that the root package's top-level dependencies have depth 0.
+        self.depth = 1 + max(
+            [
+                parent.depth if parent.base_name != self.base_name else parent.depth - 1
+                for parent in parents
+            ]
+            + [-2]
+        )
+
+
+def aggregate_package_nodes(nodes: list[PackageNode]) -> tuple[Package, int]:
+    package = nodes[0].package
+    depth = max(node.depth for node in nodes)
+    groups: list[str] = []
+    for node in nodes:
+        groups.extend(node.groups)
+
+    category = "main" if any(MAIN_GROUP in node.groups for node in nodes) else "dev"
+    optional = all(node.optional for node in nodes)
+    for node in nodes:
+        node.depth = depth
+        node.category = category
+        node.optional = optional
+
+    package.category = category
+    package.optional = optional
+
+    return package, depth
diff --git a/vendor/poetry/src/poetry/puzzle/transaction.py b/vendor/poetry/src/poetry/puzzle/transaction.py
new file mode 100644
index 00000000..5c5dbd06
--- /dev/null
+++ b/vendor/poetry/src/poetry/puzzle/transaction.py
@@ -0,0 +1,121 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+    from poetry.installation.operations.operation import Operation
+
+
+class Transaction:
+    def __init__(
+        self,
+        current_packages: list[Package],
+        result_packages: list[tuple[Package, int]],
+        installed_packages: list[Package] | None = None,
+        root_package: Package | None = None,
+    ) -> None:
+        self._current_packages = current_packages
+        self._result_packages = result_packages
+
+        if installed_packages is None:
+            installed_packages = []
+
+        self._installed_packages = installed_packages
+        self._root_package = root_package
+
+    def calculate_operations(
+        self, with_uninstalls: bool = True, synchronize: bool = False
+    ) -> list[Operation]:
+        from poetry.installation.operations import Install
+        from poetry.installation.operations import Uninstall
+        from poetry.installation.operations import Update
+
+        operations: list[Operation] = []
+
+        for result_package, priority in self._result_packages:
+            installed = False
+
+            for installed_package in self._installed_packages:
+                if result_package.name == installed_package.name:
+                    installed = True
+
+                    # We have to perform an update if the version or another
+                    # attribute of the package has changed (source type, url, ref, ...).
+                    if result_package.version != installed_package.version or (
+                        (
+                            # This has to be done because installed packages cannot
+                            # have type "legacy". If a package with type "legacy"
+                            # is installed, the installed package has no source_type.
+                            # Thus, if installed_package has no source_type and
+                            # the result_package has source_type "legacy" (negation of
+                            # the following condition), update must not be performed.
+                            # This quirk has the side effect that when switching
+                            # from PyPI to legacy (or vice versa),
+                            # no update is performed.
+                            installed_package.source_type
+                            or result_package.source_type != "legacy"
+                        )
+                        and not result_package.is_same_package_as(installed_package)
+                    ):
+                        operations.append(
+                            Update(installed_package, result_package, priority=priority)
+                        )
+                    else:
+                        operations.append(
+                            Install(result_package).skip("Already installed")
+                        )
+
+                    break
+
+            if not installed:
+                operations.append(Install(result_package, priority=priority))
+
+        if with_uninstalls:
+            for current_package in self._current_packages:
+                found = any(
+                    current_package.name == result_package.name
+                    for result_package, _ in self._result_packages
+                )
+
+                if not found:
+                    for installed_package in self._installed_packages:
+                        if installed_package.name == current_package.name:
+                            operations.append(Uninstall(current_package))
+
+            if synchronize:
+                current_package_names = {
+                    current_package.name for current_package in self._current_packages
+                }
+                # We preserve pip/setuptools/wheel when not managed by poetry, this is
+                # done to avoid externally managed virtual environments causing
+                # unnecessary removals.
+                preserved_package_names = {
+                    "pip",
+                    "setuptools",
+                    "wheel",
+                } - current_package_names
+
+                for installed_package in self._installed_packages:
+                    if (
+                        self._root_package
+                        and installed_package.name == self._root_package.name
+                    ):
+                        continue
+
+                    if installed_package.name in preserved_package_names:
+                        continue
+
+                    if installed_package.name not in current_package_names:
+                        operations.append(Uninstall(installed_package))
+
+        return sorted(
+            operations,
+            key=lambda o: (
+                -o.priority,
+                o.package.name,
+                o.package.version,
+            ),
+        )
diff --git a/vendor/poetry/src/poetry/py.typed b/vendor/poetry/src/poetry/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/repositories/__init__.py b/vendor/poetry/src/poetry/repositories/__init__.py
new file mode 100644
index 00000000..ca887e51
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/__init__.py
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from poetry.repositories.pool import Pool
+from poetry.repositories.repository import Repository
+
+
+__all__ = ["Pool", "Repository"]
diff --git a/vendor/poetry/src/poetry/repositories/cached.py b/vendor/poetry/src/poetry/repositories/cached.py
new file mode 100644
index 00000000..8c5b74a6
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/cached.py
@@ -0,0 +1,88 @@
+from __future__ import annotations
+
+from abc import ABC
+from abc import abstractmethod
+from typing import TYPE_CHECKING
+from typing import Any
+
+from cachy import CacheManager
+from packaging.utils import canonicalize_name
+from poetry.core.semver.helpers import parse_constraint
+
+from poetry.config.config import Config
+from poetry.repositories.repository import Repository
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+    from poetry.core.packages.package import Package
+    from poetry.core.semver.version import Version
+
+    from poetry.inspection.info import PackageInfo
+
+
+class CachedRepository(Repository, ABC):
+    CACHE_VERSION = parse_constraint("1.1.0")
+
+    def __init__(
+        self, name: str, disable_cache: bool = False, config: Config | None = None
+    ) -> None:
+        super().__init__(name)
+        self._disable_cache = disable_cache
+        self._cache_dir = (config or Config.create()).repository_cache_directory / name
+        self._cache = CacheManager(
+            {
+                "default": "releases",
+                "serializer": "json",
+                "stores": {
+                    "releases": {"driver": "file", "path": str(self._cache_dir)},
+                    "packages": {"driver": "dict"},
+                    "matches": {"driver": "dict"},
+                },
+            }
+        )
+
+    @abstractmethod
+    def _get_release_info(
+        self, name: NormalizedName, version: Version
+    ) -> dict[str, Any]:
+        raise NotImplementedError()
+
+    def get_release_info(self, name: NormalizedName, version: Version) -> PackageInfo:
+        """
+        Return the release information given a package name and a version.
+
+        The information is returned from the cache if it exists
+        or retrieved from the remote server.
+        """
+        from poetry.inspection.info import PackageInfo
+
+        if self._disable_cache:
+            return PackageInfo.load(self._get_release_info(name, version))
+
+        cached = self._cache.remember_forever(
+            f"{name}:{version}", lambda: self._get_release_info(name, version)
+        )
+
+        cache_version = cached.get("_cache_version", "0.0.0")
+        if parse_constraint(cache_version) != self.CACHE_VERSION:
+            # The cache must be updated
+            self._log(
+                f"The cache for {name} {version} is outdated. Refreshing.",
+                level="debug",
+            )
+            cached = self._get_release_info(name, version)
+
+            self._cache.forever(f"{name}:{version}", cached)
+
+        return PackageInfo.load(cached)
+
+    def package(
+        self,
+        name: str,
+        version: Version,
+        extras: list[str] | None = None,
+    ) -> Package:
+        return self.get_release_info(canonicalize_name(name), version).to_package(
+            name=name, extras=extras
+        )
diff --git a/vendor/poetry/src/poetry/repositories/exceptions.py b/vendor/poetry/src/poetry/repositories/exceptions.py
new file mode 100644
index 00000000..10ad3c46
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/exceptions.py
@@ -0,0 +1,9 @@
+from __future__ import annotations
+
+
+class RepositoryError(Exception):
+    pass
+
+
+class PackageNotFound(Exception):
+    pass
diff --git a/vendor/poetry/src/poetry/repositories/http.py b/vendor/poetry/src/poetry/repositories/http.py
new file mode 100644
index 00000000..c63f2b19
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/http.py
@@ -0,0 +1,293 @@
+from __future__ import annotations
+
+import hashlib
+import os
+import urllib
+import urllib.parse
+
+from abc import ABC
+from collections import defaultdict
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+import requests
+
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.utils.link import Link
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.utils.helpers import temporary_directory
+from poetry.core.version.markers import parse_marker
+
+from poetry.repositories.cached import CachedRepository
+from poetry.repositories.exceptions import PackageNotFound
+from poetry.repositories.exceptions import RepositoryError
+from poetry.repositories.link_sources.html import HTMLPage
+from poetry.utils.authenticator import Authenticator
+from poetry.utils.constants import REQUESTS_TIMEOUT
+from poetry.utils.helpers import download_file
+from poetry.utils.patterns import wheel_file_re
+
+
+if TYPE_CHECKING:
+    from poetry.config.config import Config
+    from poetry.inspection.info import PackageInfo
+    from poetry.utils.authenticator import RepositoryCertificateConfig
+
+
+class HTTPRepository(CachedRepository, ABC):
+    def __init__(
+        self,
+        name: str,
+        url: str,
+        config: Config | None = None,
+        disable_cache: bool = False,
+    ) -> None:
+        super().__init__(name, disable_cache, config)
+        self._url = url
+        self._authenticator = Authenticator(
+            config=config,
+            cache_id=name,
+            disable_cache=disable_cache,
+        )
+        self._authenticator.add_repository(name, url)
+
+    @property
+    def session(self) -> Authenticator:
+        return self._authenticator
+
+    @property
+    def url(self) -> str:
+        return self._url
+
+    @property
+    def certificates(self) -> RepositoryCertificateConfig:
+        return self._authenticator.get_certs_for_url(self.url)
+
+    @property
+    def authenticated_url(self) -> str:
+        return self._authenticator.authenticated_url(url=self.url)
+
+    def _download(self, url: str, dest: Path) -> None:
+        return download_file(url, dest, session=self.session)
+
+    def _get_info_from_wheel(self, url: str) -> PackageInfo:
+        from poetry.inspection.info import PackageInfo
+
+        wheel_name = urllib.parse.urlparse(url).path.rsplit("/")[-1]
+        self._log(f"Downloading wheel: {wheel_name}", level="debug")
+
+        filename = os.path.basename(wheel_name)
+
+        with temporary_directory() as temp_dir:
+            filepath = Path(temp_dir) / filename
+            self._download(url, filepath)
+
+            return PackageInfo.from_wheel(filepath)
+
+    def _get_info_from_sdist(self, url: str) -> PackageInfo:
+        from poetry.inspection.info import PackageInfo
+
+        sdist_name = urllib.parse.urlparse(url).path
+        sdist_name_log = sdist_name.rsplit("/")[-1]
+
+        self._log(f"Downloading sdist: {sdist_name_log}", level="debug")
+
+        filename = os.path.basename(sdist_name)
+
+        with temporary_directory() as temp_dir:
+            filepath = Path(temp_dir) / filename
+            self._download(url, filepath)
+
+            return PackageInfo.from_sdist(filepath)
+
+    def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
+        # Checking wheels first as they are more likely to hold
+        # the necessary information
+        if "bdist_wheel" in urls:
+            # Check for a universal wheel
+            wheels = urls["bdist_wheel"]
+
+            universal_wheel = None
+            universal_python2_wheel = None
+            universal_python3_wheel = None
+            platform_specific_wheels = []
+            for wheel in wheels:
+                link = Link(wheel)
+                m = wheel_file_re.match(link.filename)
+                if not m:
+                    continue
+
+                pyver = m.group("pyver")
+                abi = m.group("abi")
+                plat = m.group("plat")
+                if abi == "none" and plat == "any":
+                    # Universal wheel
+                    if pyver == "py2.py3":
+                        # Any Python
+                        universal_wheel = wheel
+                    elif pyver == "py2":
+                        universal_python2_wheel = wheel
+                    else:
+                        universal_python3_wheel = wheel
+                else:
+                    platform_specific_wheels.append(wheel)
+
+            if universal_wheel is not None:
+                return self._get_info_from_wheel(universal_wheel)
+
+            info = None
+            if universal_python2_wheel and universal_python3_wheel:
+                info = self._get_info_from_wheel(universal_python2_wheel)
+
+                py3_info = self._get_info_from_wheel(universal_python3_wheel)
+
+                if info.requires_python or py3_info.requires_python:
+                    info.requires_python = str(
+                        parse_constraint(info.requires_python or "^2.7").union(
+                            parse_constraint(py3_info.requires_python or "^3")
+                        )
+                    )
+
+                if py3_info.requires_dist:
+                    if not info.requires_dist:
+                        info.requires_dist = py3_info.requires_dist
+
+                        return info
+
+                    py2_requires_dist = {
+                        Dependency.create_from_pep_508(r).to_pep_508()
+                        for r in info.requires_dist
+                    }
+                    py3_requires_dist = {
+                        Dependency.create_from_pep_508(r).to_pep_508()
+                        for r in py3_info.requires_dist
+                    }
+                    base_requires_dist = py2_requires_dist & py3_requires_dist
+                    py2_only_requires_dist = py2_requires_dist - py3_requires_dist
+                    py3_only_requires_dist = py3_requires_dist - py2_requires_dist
+
+                    # Normalizing requires_dist
+                    requires_dist = list(base_requires_dist)
+                    for requirement in py2_only_requires_dist:
+                        dep = Dependency.create_from_pep_508(requirement)
+                        dep.marker = dep.marker.intersect(
+                            parse_marker("python_version == '2.7'")
+                        )
+                        requires_dist.append(dep.to_pep_508())
+
+                    for requirement in py3_only_requires_dist:
+                        dep = Dependency.create_from_pep_508(requirement)
+                        dep.marker = dep.marker.intersect(
+                            parse_marker("python_version >= '3'")
+                        )
+                        requires_dist.append(dep.to_pep_508())
+
+                    info.requires_dist = sorted(set(requires_dist))
+
+            if info:
+                return info
+
+            # Prefer non platform specific wheels
+            if universal_python3_wheel:
+                return self._get_info_from_wheel(universal_python3_wheel)
+
+            if universal_python2_wheel:
+                return self._get_info_from_wheel(universal_python2_wheel)
+
+            if platform_specific_wheels and "sdist" not in urls:
+                # Pick the first wheel available and hope for the best
+                return self._get_info_from_wheel(platform_specific_wheels[0])
+
+        return self._get_info_from_sdist(urls["sdist"][0])
+
+    def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]:
+        if not links:
+            raise PackageNotFound(
+                f'No valid distribution links found for package: "{data.name}" version:'
+                f' "{data.version}"'
+            )
+        urls = defaultdict(list)
+        files: list[dict[str, Any]] = []
+        for link in links:
+            if link.yanked and not data.yanked:
+                # drop yanked files unless the entire release is yanked
+                continue
+            if link.is_wheel:
+                urls["bdist_wheel"].append(link.url)
+            elif link.filename.endswith(
+                (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar")
+            ):
+                urls["sdist"].append(link.url)
+
+            file_hash = f"{link.hash_name}:{link.hash}" if link.hash else None
+
+            if not link.hash or (
+                link.hash_name is not None
+                and link.hash_name not in ("sha256", "sha384", "sha512")
+                and hasattr(hashlib, link.hash_name)
+            ):
+                with temporary_directory() as temp_dir:
+                    filepath = Path(temp_dir) / link.filename
+                    self._download(link.url, filepath)
+
+                    known_hash = (
+                        getattr(hashlib, link.hash_name)() if link.hash_name else None
+                    )
+                    required_hash = hashlib.sha256()
+
+                    chunksize = 4096
+                    with filepath.open("rb") as f:
+                        while True:
+                            chunk = f.read(chunksize)
+                            if not chunk:
+                                break
+                            if known_hash:
+                                known_hash.update(chunk)
+                            required_hash.update(chunk)
+
+                    if not known_hash or known_hash.hexdigest() == link.hash:
+                        file_hash = f"{required_hash.name}:{required_hash.hexdigest()}"
+
+            files.append({"file": link.filename, "hash": file_hash})
+
+        data.files = files
+
+        info = self._get_info_from_urls(urls)
+
+        data.summary = info.summary
+        data.requires_dist = info.requires_dist
+        data.requires_python = info.requires_python
+
+        return data.asdict()
+
+    def _get_response(self, endpoint: str) -> requests.Response | None:
+        url = self._url + endpoint
+        try:
+            response: requests.Response = self.session.get(
+                url, raise_for_status=False, timeout=REQUESTS_TIMEOUT
+            )
+            if response.status_code in (401, 403):
+                self._log(
+                    f"Authorization error accessing {url}",
+                    level="warning",
+                )
+                return None
+            if response.status_code == 404:
+                return None
+            response.raise_for_status()
+        except requests.exceptions.HTTPError as e:
+            raise RepositoryError(e)
+
+        if response.url != url:
+            self._log(
+                f"Response URL {response.url} differs from request URL {url}",
+                level="debug",
+            )
+        return response
+
+    def _get_page(self, endpoint: str) -> HTMLPage | None:
+        response = self._get_response(endpoint)
+        if not response:
+            return None
+        return HTMLPage(response.url, response.text)
diff --git a/vendor/poetry/src/poetry/repositories/installed_repository.py b/vendor/poetry/src/poetry/repositories/installed_repository.py
new file mode 100644
index 00000000..087d01de
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/installed_repository.py
@@ -0,0 +1,291 @@
+from __future__ import annotations
+
+import itertools
+import json
+import logging
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from packaging.utils import canonicalize_name
+from poetry.core.packages.package import Package
+from poetry.core.packages.utils.utils import url_to_path
+from poetry.core.utils.helpers import module_name
+
+from poetry.repositories.repository import Repository
+from poetry.utils._compat import metadata
+
+
+if TYPE_CHECKING:
+    from poetry.utils.env import Env
+
+
+try:
+    FileNotFoundError
+except NameError:
+    FileNotFoundError = OSError
+
+
+logger = logging.getLogger(__name__)
+
+
+class InstalledRepository(Repository):
+    def __init__(self) -> None:
+        super().__init__("poetry-installed")
+
+    @classmethod
+    def get_package_paths(cls, env: Env, name: str) -> set[Path]:
+        """
+        Process a .pth file within the site-packages directories, and return any valid
+        paths. We skip executable .pth files as there is no reliable means to do this
+        without side-effects to current run-time. Mo check is made that the item refers
+        to a directory rather than a file, however, in order to maintain backwards
+        compatibility, we allow non-existing paths to be discovered. The latter
+        behaviour is different to how Python's site-specific hook configuration works.
+
+        Reference: https://docs.python.org/3.8/library/site.html
+
+        :param env: The environment to search for the .pth file in.
+        :param name: The name of the package to search .pth file for.
+        :return: A `Set` of valid `Path` objects.
+        """
+        paths = set()
+
+        # we identify the candidate pth files to check, this is done so to handle cases
+        # where the pth file for foo-bar might have been installed as either foo-bar.pth
+        # or foo_bar.pth (expected) in either pure or platform lib directories.
+        candidates = itertools.product(
+            {env.purelib, env.platlib},
+            {name, module_name(name)},
+        )
+
+        for lib, module in candidates:
+            pth_file = lib.joinpath(module).with_suffix(".pth")
+            if not pth_file.exists():
+                continue
+
+            with pth_file.open() as f:
+                for line in f:
+                    line = line.strip()
+                    if line and not line.startswith(("#", "import ", "import\t")):
+                        path = Path(line)
+                        if not path.is_absolute():
+                            path = lib.joinpath(path).resolve()
+                        paths.add(path)
+
+        src_path = env.path / "src" / name
+        if not paths and src_path.exists():
+            paths.add(src_path)
+
+        return paths
+
+    @classmethod
+    def get_package_vcs_properties_from_path(cls, src: Path) -> tuple[str, str, str]:
+        from poetry.vcs.git import Git
+
+        info = Git.info(repo=src)
+        return "git", info.origin, info.revision
+
+    @classmethod
+    def is_vcs_package(cls, package: Path | Package, env: Env) -> bool:
+        # A VCS dependency should have been installed
+        # in the src directory.
+        src = env.path / "src"
+        if isinstance(package, Package):
+            return src.joinpath(package.name).is_dir()
+
+        try:
+            package.relative_to(env.path / "src")
+        except ValueError:
+            return False
+        else:
+            return True
+
+    @classmethod
+    def create_package_from_distribution(
+        cls, distribution: metadata.Distribution, env: Env
+    ) -> Package:
+        # We first check for a direct_url.json file to determine
+        # the type of package.
+        path = Path(str(distribution._path))  # type: ignore[attr-defined]
+
+        if (
+            path.name.endswith(".dist-info")
+            and path.joinpath("direct_url.json").exists()
+        ):
+            return cls.create_package_from_pep610(distribution)
+
+        is_standard_package = env.is_path_relative_to_lib(path)
+
+        source_type = None
+        source_url = None
+        source_reference = None
+        source_resolved_reference = None
+        source_subdirectory = None
+        if is_standard_package:
+            if path.name.endswith(".dist-info"):
+                paths = cls.get_package_paths(
+                    env=env, name=distribution.metadata["name"]
+                )
+                if paths:
+                    is_editable_package = False
+                    for src in paths:
+                        if cls.is_vcs_package(src, env):
+                            (
+                                source_type,
+                                source_url,
+                                source_reference,
+                            ) = cls.get_package_vcs_properties_from_path(src)
+                            break
+
+                        if not (
+                            is_editable_package or env.is_path_relative_to_lib(src)
+                        ):
+                            is_editable_package = True
+                    else:
+                        # TODO: handle multiple source directories?
+                        if is_editable_package:
+                            source_type = "directory"
+                            source_url = paths.pop().as_posix()
+        elif cls.is_vcs_package(path, env):
+            (
+                source_type,
+                source_url,
+                source_reference,
+            ) = cls.get_package_vcs_properties_from_path(
+                env.path / "src" / canonicalize_name(distribution.metadata["name"])
+            )
+        else:
+            # If not, it's a path dependency
+            source_type = "directory"
+            source_url = str(path.parent)
+
+        package = Package(
+            distribution.metadata["name"],
+            distribution.metadata["version"],
+            source_type=source_type,
+            source_url=source_url,
+            source_reference=source_reference,
+            source_resolved_reference=source_resolved_reference,
+            source_subdirectory=source_subdirectory,
+        )
+
+        package.description = distribution.metadata.get(  # type: ignore[attr-defined]
+            "summary",
+            "",
+        )
+
+        return package
+
+    @classmethod
+    def create_package_from_pep610(cls, distribution: metadata.Distribution) -> Package:
+        path = Path(str(distribution._path))  # type: ignore[attr-defined]
+        source_type = None
+        source_url = None
+        source_reference = None
+        source_resolved_reference = None
+        source_subdirectory = None
+        develop = False
+
+        url_reference = json.loads(
+            path.joinpath("direct_url.json").read_text(encoding="utf-8")
+        )
+        if "archive_info" in url_reference:
+            # File or URL distribution
+            if url_reference["url"].startswith("file:"):
+                # File distribution
+                source_type = "file"
+                source_url = url_to_path(url_reference["url"]).as_posix()
+            else:
+                # URL distribution
+                source_type = "url"
+                source_url = url_reference["url"]
+        elif "dir_info" in url_reference:
+            # Directory distribution
+            source_type = "directory"
+            source_url = url_to_path(url_reference["url"]).as_posix()
+            develop = url_reference["dir_info"].get("editable", False)
+        elif "vcs_info" in url_reference:
+            # VCS distribution
+            source_type = url_reference["vcs_info"]["vcs"]
+            source_url = url_reference["url"]
+            source_resolved_reference = url_reference["vcs_info"]["commit_id"]
+            source_reference = url_reference["vcs_info"].get(
+                "requested_revision", source_resolved_reference
+            )
+        source_subdirectory = url_reference.get("subdirectory")
+
+        package = Package(
+            distribution.metadata["name"],
+            distribution.metadata["version"],
+            source_type=source_type,
+            source_url=source_url,
+            source_reference=source_reference,
+            source_resolved_reference=source_resolved_reference,
+            source_subdirectory=source_subdirectory,
+            develop=develop,
+        )
+
+        package.description = distribution.metadata.get(  # type: ignore[attr-defined]
+            "summary",
+            "",
+        )
+
+        return package
+
+    @classmethod
+    def load(cls, env: Env, with_dependencies: bool = False) -> InstalledRepository:
+        """
+        Load installed packages.
+        """
+        from poetry.core.packages.dependency import Dependency
+
+        repo = cls()
+        seen = set()
+        skipped = set()
+
+        for entry in reversed(env.sys_path):
+            if not entry.strip():
+                logger.debug(
+                    "Project environment contains an empty path in sys_path,"
+                    " ignoring."
+                )
+                continue
+
+            for distribution in sorted(
+                metadata.distributions(  # type: ignore[no-untyped-call]
+                    path=[entry],
+                ),
+                key=lambda d: str(d._path),  # type: ignore[attr-defined]
+            ):
+                path = Path(str(distribution._path))  # type: ignore[attr-defined]
+
+                if path in skipped:
+                    continue
+
+                try:
+                    name = canonicalize_name(distribution.metadata["name"])
+                except TypeError:
+                    logger.warning(
+                        "Project environment contains an invalid distribution"
+                        " (%s). Consider removing it manually or recreate the"
+                        " environment.",
+                        path,
+                    )
+                    skipped.add(path)
+                    continue
+
+                if name in seen:
+                    continue
+
+                package = cls.create_package_from_distribution(distribution, env)
+
+                if with_dependencies:
+                    for require in distribution.metadata.get_all("requires-dist", []):
+                        dep = Dependency.create_from_pep_508(require)
+                        package.add_dependency(dep)
+
+                seen.add(package.name)
+                repo.add_package(package)
+
+        return repo
diff --git a/vendor/poetry/src/poetry/repositories/legacy_repository.py b/vendor/poetry/src/poetry/repositories/legacy_repository.py
new file mode 100644
index 00000000..c4019b6f
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/legacy_repository.py
@@ -0,0 +1,140 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+from poetry.core.packages.package import Package
+
+from poetry.inspection.info import PackageInfo
+from poetry.repositories.exceptions import PackageNotFound
+from poetry.repositories.http import HTTPRepository
+from poetry.repositories.link_sources.html import SimpleRepositoryPage
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+    from poetry.core.packages.utils.link import Link
+    from poetry.core.semver.version import Version
+    from poetry.core.semver.version_constraint import VersionConstraint
+
+    from poetry.config.config import Config
+
+
+class LegacyRepository(HTTPRepository):
+    def __init__(
+        self,
+        name: str,
+        url: str,
+        config: Config | None = None,
+        disable_cache: bool = False,
+    ) -> None:
+        if name == "pypi":
+            raise ValueError("The name [pypi] is reserved for repositories")
+
+        super().__init__(name, url.rstrip("/"), config, disable_cache)
+
+    def package(
+        self, name: str, version: Version, extras: list[str] | None = None
+    ) -> Package:
+        """
+        Retrieve the release information.
+
+        This is a heavy task which takes time.
+        We have to download a package to get the dependencies.
+        We also need to download every file matching this release
+        to get the various hashes.
+
+        Note that this will be cached so the subsequent operations
+        should be much faster.
+        """
+        try:
+            index = self._packages.index(Package(name, version))
+
+            return self._packages[index]
+        except ValueError:
+            package = super().package(name, version, extras)
+            package._source_type = "legacy"
+            package._source_url = self._url
+            package._source_reference = self.name
+
+            return package
+
+    def find_links_for_package(self, package: Package) -> list[Link]:
+        page = self._get_page(f"/{package.name}/")
+        if page is None:
+            return []
+
+        return list(page.links_for_version(package.name, package.version))
+
+    def _find_packages(
+        self, name: NormalizedName, constraint: VersionConstraint
+    ) -> list[Package]:
+        """
+        Find packages on the remote server.
+        """
+        versions: list[tuple[Version, str | bool]]
+
+        key: str = name
+        if not constraint.is_any():
+            key = f"{key}:{constraint!s}"
+
+        if self._cache.store("matches").has(key):
+            versions = self._cache.store("matches").get(key)
+        else:
+            page = self._get_page(f"/{name}/")
+            if page is None:
+                self._log(
+                    f"No packages found for {name}",
+                    level="debug",
+                )
+                return []
+
+            versions = [
+                (version, page.yanked(name, version))
+                for version in page.versions(name)
+                if constraint.allows(version)
+            ]
+            self._cache.store("matches").put(key, versions, 5)
+
+        return [
+            Package(
+                name,
+                version,
+                source_type="legacy",
+                source_reference=self.name,
+                source_url=self._url,
+                yanked=yanked,
+            )
+            for version, yanked in versions
+        ]
+
+    def _get_release_info(
+        self, name: NormalizedName, version: Version
+    ) -> dict[str, Any]:
+        page = self._get_page(f"/{name}/")
+        if page is None:
+            raise PackageNotFound(f'No package named "{name}"')
+
+        links = list(page.links_for_version(name, version))
+        yanked = page.yanked(name, version)
+
+        return self._links_to_data(
+            links,
+            PackageInfo(
+                name=name,
+                version=version.text,
+                summary="",
+                platform=None,
+                requires_dist=[],
+                requires_python=None,
+                files=[],
+                yanked=yanked,
+                cache_version=str(self.CACHE_VERSION),
+            ),
+        )
+
+    def _get_page(self, endpoint: str) -> SimpleRepositoryPage | None:
+        response = self._get_response(endpoint)
+        if not response:
+            return None
+        return SimpleRepositoryPage(response.url, response.text)
diff --git a/vendor/poetry/src/poetry/repositories/link_sources/__init__.py b/vendor/poetry/src/poetry/repositories/link_sources/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/repositories/link_sources/base.py b/vendor/poetry/src/poetry/repositories/link_sources/base.py
new file mode 100644
index 00000000..6e07de3c
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/link_sources/base.py
@@ -0,0 +1,129 @@
+from __future__ import annotations
+
+import logging
+import re
+
+from abc import abstractmethod
+from typing import TYPE_CHECKING
+
+from packaging.utils import canonicalize_name
+from poetry.core.packages.package import Package
+from poetry.core.semver.version import Version
+
+from poetry.utils.patterns import sdist_file_re
+from poetry.utils.patterns import wheel_file_re
+
+
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from packaging.utils import NormalizedName
+    from poetry.core.packages.utils.link import Link
+
+
+logger = logging.getLogger(__name__)
+
+
+class LinkSource:
+    VERSION_REGEX = re.compile(r"(?i)([a-z0-9_\-.]+?)-(?=\d)([a-z0-9_.!+-]+)")
+    CLEAN_REGEX = re.compile(r"[^a-z0-9$&+,/:;=?@.#%_\\|-]", re.I)
+    SUPPORTED_FORMATS = [
+        ".tar.gz",
+        ".whl",
+        ".zip",
+        ".tar.bz2",
+        ".tar.xz",
+        ".tar.Z",
+        ".tar",
+    ]
+
+    def __init__(self, url: str) -> None:
+        self._url = url
+
+    @property
+    def url(self) -> str:
+        return self._url
+
+    def versions(self, name: str) -> Iterator[Version]:
+        name = canonicalize_name(name)
+        seen: set[Version] = set()
+
+        for link in self.links:
+            pkg = self.link_package_data(link)
+
+            if pkg and pkg.name == name and pkg.version not in seen:
+                seen.add(pkg.version)
+                yield pkg.version
+
+    @property
+    def packages(self) -> Iterator[Package]:
+        for link in self.links:
+            pkg = self.link_package_data(link)
+
+            if pkg:
+                yield pkg
+
+    @property
+    @abstractmethod
+    def links(self) -> Iterator[Link]:
+        raise NotImplementedError()
+
+    @classmethod
+    def link_package_data(cls, link: Link) -> Package | None:
+        name: str | None = None
+        version_string: str | None = None
+        version: Version | None = None
+        m = wheel_file_re.match(link.filename) or sdist_file_re.match(link.filename)
+
+        if m:
+            name = canonicalize_name(m.group("name"))
+            version_string = m.group("ver")
+        else:
+            info, ext = link.splitext()
+            match = cls.VERSION_REGEX.match(info)
+            if match:
+                name = match.group(1)
+                version_string = match.group(2)
+
+        if version_string:
+            try:
+                version = Version.parse(version_string)
+            except ValueError:
+                logger.debug(
+                    "Skipping url (%s) due to invalid version (%s)", link.url, version
+                )
+                return None
+
+        pkg = None
+        if name and version:
+            pkg = Package(name, version, source_url=link.url)
+        return pkg
+
+    def links_for_version(
+        self, name: NormalizedName, version: Version
+    ) -> Iterator[Link]:
+        for link in self.links:
+            pkg = self.link_package_data(link)
+
+            if pkg and pkg.name == name and pkg.version == version:
+                yield link
+
+    def clean_link(self, url: str) -> str:
+        """Makes sure a link is fully encoded.  That is, if a ' ' shows up in
+        the link, it will be rewritten to %20 (while not over-quoting
+        % or other characters)."""
+        return self.CLEAN_REGEX.sub(lambda match: f"%{ord(match.group(0)):02x}", url)
+
+    def yanked(self, name: NormalizedName, version: Version) -> str | bool:
+        reasons = set()
+        for link in self.links_for_version(name, version):
+            if link.yanked:
+                if link.yanked_reason:
+                    reasons.add(link.yanked_reason)
+            else:
+                # release is not yanked if at least one file is not yanked
+                return False
+        # if all files are yanked (or there are no files) the release is yanked
+        if reasons:
+            return "\n".join(sorted(reasons))
+        return True
diff --git a/vendor/poetry/src/poetry/repositories/link_sources/html.py b/vendor/poetry/src/poetry/repositories/link_sources/html.py
new file mode 100644
index 00000000..d72a8e06
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/link_sources/html.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+import urllib.parse
+import warnings
+
+from html import unescape
+from typing import TYPE_CHECKING
+
+from poetry.core.packages.utils.link import Link
+
+from poetry.repositories.link_sources.base import LinkSource
+
+
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+with warnings.catch_warnings():
+    warnings.simplefilter("ignore")
+    import html5lib
+
+
+class HTMLPage(LinkSource):
+    def __init__(self, url: str, content: str) -> None:
+        super().__init__(url=url)
+
+        self._parsed = html5lib.parse(content, namespaceHTMLElements=False)
+
+    @property
+    def links(self) -> Iterator[Link]:
+        for anchor in self._parsed.findall(".//a"):
+            if anchor.get("href"):
+                href = anchor.get("href")
+                url = self.clean_link(urllib.parse.urljoin(self._url, href))
+                pyrequire = anchor.get("data-requires-python")
+                pyrequire = unescape(pyrequire) if pyrequire else None
+                yanked_value = anchor.get("data-yanked")
+                yanked: str | bool
+                if yanked_value:
+                    yanked = unescape(yanked_value)
+                else:
+                    yanked = "data-yanked" in anchor.attrib
+                link = Link(url, requires_python=pyrequire, yanked=yanked)
+
+                if link.ext not in self.SUPPORTED_FORMATS:
+                    continue
+
+                yield link
+
+
+class SimpleRepositoryPage(HTMLPage):
+    def __init__(self, url: str, content: str) -> None:
+        if not url.endswith("/"):
+            url += "/"
+        super().__init__(url=url, content=content)
diff --git a/vendor/poetry/src/poetry/repositories/lockfile_repository.py b/vendor/poetry/src/poetry/repositories/lockfile_repository.py
new file mode 100644
index 00000000..25eff224
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/lockfile_repository.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.repositories import Repository
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+
+class LockfileRepository(Repository):
+    """
+    Special repository that distinguishes packages not only by name and version,
+    but also by source type, url, etc.
+    """
+
+    def __init__(self) -> None:
+        super().__init__("poetry-lockfile")
+
+    def has_package(self, package: Package) -> bool:
+        return any(p == package for p in self.packages)
+
+    def remove_package(self, package: Package) -> None:
+        index = None
+        for i, repo_package in enumerate(self.packages):
+            if repo_package == package:
+                index = i
+                break
+
+        if index is not None:
+            del self._packages[index]
diff --git a/vendor/poetry/src/poetry/repositories/pool.py b/vendor/poetry/src/poetry/repositories/pool.py
new file mode 100644
index 00000000..edc53e71
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/pool.py
@@ -0,0 +1,195 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.repositories.exceptions import PackageNotFound
+from poetry.repositories.repository import Repository
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.package import Package
+    from poetry.core.semver.version import Version
+
+
+class Pool(Repository):
+    def __init__(
+        self,
+        repositories: list[Repository] | None = None,
+        ignore_repository_names: bool = False,
+    ) -> None:
+        super().__init__("poetry-pool")
+
+        if repositories is None:
+            repositories = []
+
+        self._lookup: dict[str, int] = {}
+        self._repositories: list[Repository] = []
+        self._default = False
+        self._has_primary_repositories = False
+        self._secondary_start_idx: int | None = None
+
+        for repository in repositories:
+            self.add_repository(repository)
+
+        self._ignore_repository_names = ignore_repository_names
+
+    @property
+    def repositories(self) -> list[Repository]:
+        return self._repositories
+
+    def has_default(self) -> bool:
+        return self._default
+
+    def has_primary_repositories(self) -> bool:
+        return self._has_primary_repositories
+
+    def has_repository(self, name: str) -> bool:
+        return name.lower() in self._lookup
+
+    def repository(self, name: str) -> Repository:
+        name = name.lower()
+
+        lookup = self._lookup.get(name)
+        if lookup is not None:
+            return self._repositories[lookup]
+
+        raise ValueError(f'Repository "{name}" does not exist.')
+
+    def add_repository(
+        self, repository: Repository, default: bool = False, secondary: bool = False
+    ) -> Pool:
+        """
+        Adds a repository to the pool.
+        """
+        repository_name = repository.name.lower()
+        if repository_name in self._lookup:
+            raise ValueError(f"{repository_name} already added")
+
+        if default:
+            if self.has_default():
+                raise ValueError("Only one repository can be the default")
+
+            self._default = True
+            self._repositories.insert(0, repository)
+            for name in self._lookup:
+                self._lookup[name] += 1
+
+            if self._secondary_start_idx is not None:
+                self._secondary_start_idx += 1
+
+            self._lookup[repository_name] = 0
+        elif secondary:
+            if self._secondary_start_idx is None:
+                self._secondary_start_idx = len(self._repositories)
+
+            self._repositories.append(repository)
+            self._lookup[repository_name] = len(self._repositories) - 1
+        else:
+            self._has_primary_repositories = True
+            if self._secondary_start_idx is None:
+                self._repositories.append(repository)
+                self._lookup[repository_name] = len(self._repositories) - 1
+            else:
+                self._repositories.insert(self._secondary_start_idx, repository)
+
+                for name, idx in self._lookup.items():
+                    if idx < self._secondary_start_idx:
+                        continue
+
+                    self._lookup[name] += 1
+
+                self._lookup[repository_name] = self._secondary_start_idx
+                self._secondary_start_idx += 1
+
+        return self
+
+    def remove_repository(self, repository_name: str) -> Pool:
+        if repository_name is not None:
+            repository_name = repository_name.lower()
+
+        idx = self._lookup.get(repository_name)
+        if idx is not None:
+            del self._repositories[idx]
+            del self._lookup[repository_name]
+
+            if idx == 0:
+                self._default = False
+
+            for name in self._lookup:
+                if self._lookup[name] > idx:
+                    self._lookup[name] -= 1
+
+            if (
+                self._secondary_start_idx is not None
+                and self._secondary_start_idx > idx
+            ):
+                self._secondary_start_idx -= 1
+
+        return self
+
+    def has_package(self, package: Package) -> bool:
+        raise NotImplementedError()
+
+    def package(
+        self,
+        name: str,
+        version: Version,
+        extras: list[str] | None = None,
+        repository: str | None = None,
+    ) -> Package:
+        if repository is not None:
+            repository = repository.lower()
+
+        if (
+            repository is not None
+            and repository not in self._lookup
+            and not self._ignore_repository_names
+        ):
+            raise ValueError(f'Repository "{repository}" does not exist.')
+
+        if repository is not None and not self._ignore_repository_names:
+            return self.repository(repository).package(name, version, extras=extras)
+
+        for repo in self._repositories:
+            try:
+                package = repo.package(name, version, extras=extras)
+            except PackageNotFound:
+                continue
+
+            return package
+
+        raise PackageNotFound(f"Package {name} ({version}) not found.")
+
+    def find_packages(self, dependency: Dependency) -> list[Package]:
+        repository = dependency.source_name
+        if repository is not None:
+            repository = repository.lower()
+
+        if (
+            repository is not None
+            and repository not in self._lookup
+            and not self._ignore_repository_names
+        ):
+            raise ValueError(f'Repository "{repository}" does not exist.')
+
+        if repository is not None and not self._ignore_repository_names:
+            return self.repository(repository).find_packages(dependency)
+
+        packages = []
+        for repo in self._repositories:
+            packages += repo.find_packages(dependency)
+
+        return packages
+
+    def search(self, query: str) -> list[Package]:
+        from poetry.repositories.legacy_repository import LegacyRepository
+
+        results = []
+        for repository in self._repositories:
+            if isinstance(repository, LegacyRepository):
+                continue
+
+            results += repository.search(query)
+
+        return results
diff --git a/vendor/poetry/src/poetry/repositories/pypi_repository.py b/vendor/poetry/src/poetry/repositories/pypi_repository.py
new file mode 100644
index 00000000..277df3cf
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/pypi_repository.py
@@ -0,0 +1,266 @@
+from __future__ import annotations
+
+import logging
+
+from collections import defaultdict
+from typing import TYPE_CHECKING
+from typing import Any
+
+import requests
+
+from cachecontrol.controller import logger as cache_control_logger
+from html5lib.html5parser import parse
+from poetry.core.packages.package import Package
+from poetry.core.packages.utils.link import Link
+from poetry.core.semver.version import Version
+from poetry.core.version.exceptions import InvalidVersion
+
+from poetry.repositories.exceptions import PackageNotFound
+from poetry.repositories.http import HTTPRepository
+from poetry.utils._compat import to_str
+from poetry.utils.constants import REQUESTS_TIMEOUT
+
+
+cache_control_logger.setLevel(logging.ERROR)
+
+logger = logging.getLogger(__name__)
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+    from poetry.core.semver.version_constraint import VersionConstraint
+
+
+class PyPiRepository(HTTPRepository):
+    def __init__(
+        self,
+        url: str = "https://pypi.org/",
+        disable_cache: bool = False,
+        fallback: bool = True,
+    ) -> None:
+        super().__init__(
+            "PyPI", url.rstrip("/") + "/simple/", disable_cache=disable_cache
+        )
+
+        self._base_url = url
+        self._fallback = fallback
+
+    def search(self, query: str) -> list[Package]:
+        results = []
+
+        search = {"q": query}
+
+        response = requests.session().get(
+            self._base_url + "search", params=search, timeout=REQUESTS_TIMEOUT
+        )
+        content = parse(response.content, namespaceHTMLElements=False)
+        for result in content.findall(".//*[@class='package-snippet']"):
+            name_element = result.find("h3/*[@class='package-snippet__name']")
+            version_element = result.find("h3/*[@class='package-snippet__version']")
+
+            if (
+                name_element is None
+                or version_element is None
+                or not name_element.text
+                or not version_element.text
+            ):
+                continue
+
+            name = name_element.text
+            version = version_element.text
+
+            description_element = result.find(
+                "p[@class='package-snippet__description']"
+            )
+            description = (
+                description_element.text
+                if description_element is not None and description_element.text
+                else ""
+            )
+
+            try:
+                package = Package(name, version)
+                package.description = to_str(description.strip())
+                results.append(package)
+            except InvalidVersion:
+                self._log(
+                    f'Unable to parse version "{version}" for the {name} package,'
+                    " skipping",
+                    level="debug",
+                )
+
+        return results
+
+    def get_package_info(self, name: NormalizedName) -> dict[str, Any]:
+        """
+        Return the package information given its name.
+
+        The information is returned from the cache if it exists
+        or retrieved from the remote server.
+        """
+        if self._disable_cache:
+            return self._get_package_info(name)
+
+        package_info: dict[str, Any] = self._cache.store("packages").remember_forever(
+            name, lambda: self._get_package_info(name)
+        )
+        return package_info
+
+    def _find_packages(
+        self, name: NormalizedName, constraint: VersionConstraint
+    ) -> list[Package]:
+        """
+        Find packages on the remote server.
+        """
+        try:
+            info = self.get_package_info(name)
+        except PackageNotFound:
+            self._log(
+                f"No packages found for {name} {constraint!s}",
+                level="debug",
+            )
+            return []
+
+        packages = []
+
+        for version_string, release in info["releases"].items():
+            if not release:
+                # Bad release
+                self._log(
+                    f"No release information found for {name}-{version_string},"
+                    " skipping",
+                    level="debug",
+                )
+                continue
+
+            try:
+                version = Version.parse(version_string)
+            except InvalidVersion:
+                self._log(
+                    f'Unable to parse version "{version_string}" for the'
+                    f" {name} package, skipping",
+                    level="debug",
+                )
+                continue
+
+            if constraint.allows(version):
+                # PEP 592: PyPI always yanks entire releases, not individual files,
+                # so we just have to look for the first file
+                yanked = self._get_yanked(release[0])
+                packages.append(Package(info["info"]["name"], version, yanked=yanked))
+
+        return packages
+
+    def _get_package_info(self, name: NormalizedName) -> dict[str, Any]:
+        data = self._get(f"pypi/{name}/json")
+        if data is None:
+            raise PackageNotFound(f"Package [{name}] not found.")
+
+        return data
+
+    def find_links_for_package(self, package: Package) -> list[Link]:
+        json_data = self._get(f"pypi/{package.name}/{package.version}/json")
+        if json_data is None:
+            return []
+
+        links = []
+        for url in json_data["urls"]:
+            h = f"sha256={url['digests']['sha256']}"
+            links.append(Link(url["url"] + "#" + h, yanked=self._get_yanked(url)))
+
+        return links
+
+    def _get_release_info(
+        self, name: NormalizedName, version: Version
+    ) -> dict[str, str | list[str] | None]:
+        from poetry.inspection.info import PackageInfo
+
+        self._log(f"Getting info for {name} ({version}) from PyPI", "debug")
+
+        json_data = self._get(f"pypi/{name}/{version}/json")
+        if json_data is None:
+            raise PackageNotFound(f"Package [{name}] not found.")
+
+        info = json_data["info"]
+
+        data = PackageInfo(
+            name=info["name"],
+            version=info["version"],
+            summary=info["summary"],
+            platform=info["platform"],
+            requires_dist=info["requires_dist"],
+            requires_python=info["requires_python"],
+            files=info.get("files", []),
+            yanked=self._get_yanked(info),
+            cache_version=str(self.CACHE_VERSION),
+        )
+
+        try:
+            version_info = json_data["urls"]
+        except KeyError:
+            version_info = []
+
+        for file_info in version_info:
+            data.files.append(
+                {
+                    "file": file_info["filename"],
+                    "hash": "sha256:" + file_info["digests"]["sha256"],
+                }
+            )
+
+        if self._fallback and data.requires_dist is None:
+            self._log("No dependencies found, downloading archives", level="debug")
+            # No dependencies set (along with other information)
+            # This might be due to actually no dependencies
+            # or badly set metadata when uploading
+            # So, we need to make sure there is actually no
+            # dependencies by introspecting packages
+            urls = defaultdict(list)
+            for url in json_data["urls"]:
+                # Only get sdist and wheels if they exist
+                dist_type = url["packagetype"]
+                if dist_type not in ["sdist", "bdist_wheel"]:
+                    continue
+
+                urls[dist_type].append(url["url"])
+
+            if not urls:
+                return data.asdict()
+
+            info = self._get_info_from_urls(urls)
+
+            data.requires_dist = info.requires_dist
+
+            if not data.requires_python:
+                data.requires_python = info.requires_python
+
+        return data.asdict()
+
+    def _get(self, endpoint: str) -> dict[str, Any] | None:
+        try:
+            json_response = self.session.get(
+                self._base_url + endpoint,
+                raise_for_status=False,
+                timeout=REQUESTS_TIMEOUT,
+            )
+        except requests.exceptions.TooManyRedirects:
+            # Cache control redirect loop.
+            # We try to remove the cache and try again
+            self.session.delete_cache(self._base_url + endpoint)
+            json_response = self.session.get(
+                self._base_url + endpoint,
+                raise_for_status=False,
+                timeout=REQUESTS_TIMEOUT,
+            )
+
+        if json_response.status_code != 200:
+            return None
+
+        json: dict[str, Any] = json_response.json()
+        return json
+
+    @staticmethod
+    def _get_yanked(json_data: dict[str, Any]) -> str | bool:
+        if json_data.get("yanked", False):
+            return json_data.get("yanked_reason") or True  # noqa: SIM222
+        return False
diff --git a/vendor/poetry/src/poetry/repositories/repository.py b/vendor/poetry/src/poetry/repositories/repository.py
new file mode 100644
index 00000000..ee60bcf1
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/repository.py
@@ -0,0 +1,150 @@
+from __future__ import annotations
+
+import logging
+
+from typing import TYPE_CHECKING
+
+from packaging.utils import canonicalize_name
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version import Version
+from poetry.core.semver.version_constraint import VersionConstraint
+from poetry.core.semver.version_range import VersionRange
+
+from poetry.repositories.exceptions import PackageNotFound
+
+
+if TYPE_CHECKING:
+    from packaging.utils import NormalizedName
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.packages.package import Package
+    from poetry.core.packages.utils.link import Link
+
+
+class Repository:
+    def __init__(self, name: str, packages: list[Package] | None = None) -> None:
+        self._name = name
+        self._packages: list[Package] = []
+
+        for package in packages or []:
+            self.add_package(package)
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    @property
+    def packages(self) -> list[Package]:
+        return self._packages
+
+    def find_packages(self, dependency: Dependency) -> list[Package]:
+        packages = []
+        constraint, allow_prereleases = self._get_constraints_from_dependency(
+            dependency
+        )
+        ignored_pre_release_packages = []
+
+        for package in self._find_packages(dependency.name, constraint):
+            if package.yanked and not isinstance(constraint, Version):
+                # PEP 592: yanked files are always ignored, unless they are the only
+                # file that matches a version specifier that "pins" to an exact
+                # version
+                continue
+            if (
+                package.is_prerelease()
+                and not allow_prereleases
+                and not package.is_direct_origin()
+            ):
+                if constraint.is_any():
+                    # we need this when all versions of the package are pre-releases
+                    ignored_pre_release_packages.append(package)
+                continue
+
+            packages.append(package)
+
+        self._log(
+            f"{len(packages)} packages found for {dependency.name} {constraint!s}",
+            level="debug",
+        )
+
+        return packages or ignored_pre_release_packages
+
+    def has_package(self, package: Package) -> bool:
+        package_id = package.unique_name
+        return any(
+            package_id == repo_package.unique_name for repo_package in self.packages
+        )
+
+    def add_package(self, package: Package) -> None:
+        self._packages.append(package)
+
+    def remove_package(self, package: Package) -> None:
+        package_id = package.unique_name
+
+        index = None
+        for i, repo_package in enumerate(self.packages):
+            if package_id == repo_package.unique_name:
+                index = i
+                break
+
+        if index is not None:
+            del self._packages[index]
+
+    def search(self, query: str) -> list[Package]:
+        results: list[Package] = []
+
+        for package in self.packages:
+            if query in package.name:
+                results.append(package)
+
+        return results
+
+    @staticmethod
+    def _get_constraints_from_dependency(
+        dependency: Dependency,
+    ) -> tuple[VersionConstraint, bool]:
+        constraint = dependency.constraint
+        if constraint is None:
+            constraint = "*"
+
+        if not isinstance(constraint, VersionConstraint):
+            constraint = parse_constraint(constraint)
+
+        allow_prereleases = dependency.allows_prereleases()
+        if isinstance(constraint, VersionRange) and (
+            constraint.max is not None
+            and constraint.max.is_unstable()
+            or constraint.min is not None
+            and constraint.min.is_unstable()
+        ):
+            allow_prereleases = True
+
+        return constraint, allow_prereleases
+
+    def _find_packages(
+        self, name: NormalizedName, constraint: VersionConstraint
+    ) -> list[Package]:
+        return [
+            package
+            for package in self._packages
+            if package.name == name and constraint.allows(package.version)
+        ]
+
+    def _log(self, msg: str, level: str = "info") -> None:
+        logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
+        getattr(logger, level)(f"Source ({self.name}): {msg}")
+
+    def __len__(self) -> int:
+        return len(self._packages)
+
+    def find_links_for_package(self, package: Package) -> list[Link]:
+        return []
+
+    def package(
+        self, name: str, version: Version, extras: list[str] | None = None
+    ) -> Package:
+        canonicalized_name = canonicalize_name(name)
+        for package in self.packages:
+            if canonicalized_name == package.name and package.version == version:
+                return package.clone()
+
+        raise PackageNotFound(f"Package {name} ({version}) not found.")
diff --git a/vendor/poetry/src/poetry/repositories/single_page_repository.py b/vendor/poetry/src/poetry/repositories/single_page_repository.py
new file mode 100644
index 00000000..e8de0b14
--- /dev/null
+++ b/vendor/poetry/src/poetry/repositories/single_page_repository.py
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from poetry.repositories.legacy_repository import LegacyRepository
+from poetry.repositories.link_sources.html import SimpleRepositoryPage
+
+
+class SinglePageRepository(LegacyRepository):
+    def _get_page(self, endpoint: str | None = None) -> SimpleRepositoryPage | None:
+        """
+        Single page repositories only have one page irrespective of endpoint.
+        """
+        response = self._get_response("")
+        if not response:
+            return None
+        return SimpleRepositoryPage(response.url, response.text)
diff --git a/vendor/poetry/src/poetry/utils/__init__.py b/vendor/poetry/src/poetry/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/utils/_compat.py b/vendor/poetry/src/poetry/utils/_compat.py
new file mode 100644
index 00000000..b06ce4fb
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/_compat.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+import sys
+
+from contextlib import suppress
+
+import importlib_metadata as metadata
+
+WINDOWS = sys.platform == "win32"
+
+
+def decode(string: bytes | str, encodings: list[str] | None = None) -> str:
+    if not isinstance(string, bytes):
+        return string
+
+    encodings = encodings or ["utf-8", "latin1", "ascii"]
+
+    for encoding in encodings:
+        with suppress(UnicodeEncodeError, UnicodeDecodeError):
+            return string.decode(encoding)
+
+    return string.decode(encodings[0], errors="ignore")
+
+
+def encode(string: str, encodings: list[str] | None = None) -> bytes:
+    if isinstance(string, bytes):
+        return string
+
+    encodings = encodings or ["utf-8", "latin1", "ascii"]
+
+    for encoding in encodings:
+        with suppress(UnicodeEncodeError, UnicodeDecodeError):
+            return string.encode(encoding)
+
+    return string.encode(encodings[0], errors="ignore")
+
+
+def to_str(string: str) -> str:
+    return decode(string)
+
+
+def list_to_shell_command(cmd: list[str]) -> str:
+    return " ".join(
+        f'"{token}"' if " " in token and token[0] not in {"'", '"'} else token
+        for token in cmd
+    )
+
+
+__all__ = ["WINDOWS", "decode", "encode", "list_to_shell_command", "metadata", "to_str"]
diff --git a/vendor/poetry/src/poetry/utils/authenticator.py b/vendor/poetry/src/poetry/utils/authenticator.py
new file mode 100644
index 00000000..8e5239cd
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/authenticator.py
@@ -0,0 +1,433 @@
+from __future__ import annotations
+
+import contextlib
+import dataclasses
+import functools
+import logging
+import time
+import urllib.parse
+
+from os.path import commonprefix
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+import requests
+import requests.auth
+import requests.exceptions
+
+from cachecontrol import CacheControl
+from cachecontrol.caches import FileCache
+
+from poetry.config.config import Config
+from poetry.exceptions import PoetryException
+from poetry.utils.constants import REQUESTS_TIMEOUT
+from poetry.utils.password_manager import HTTPAuthCredential
+from poetry.utils.password_manager import PasswordManager
+
+
+if TYPE_CHECKING:
+    from cleo.io.io import IO
+
+
+logger = logging.getLogger(__name__)
+
+
+@dataclasses.dataclass(frozen=True)
+class RepositoryCertificateConfig:
+    cert: Path | None = dataclasses.field(default=None)
+    client_cert: Path | None = dataclasses.field(default=None)
+    verify: bool = dataclasses.field(default=True)
+
+    @classmethod
+    def create(
+        cls, repository: str, config: Config | None
+    ) -> RepositoryCertificateConfig:
+        config = config if config else Config.create()
+
+        verify: str | bool = config.get(
+            f"certificates.{repository}.verify",
+            config.get(f"certificates.{repository}.cert", True),
+        )
+        client_cert: str = config.get(f"certificates.{repository}.client-cert")
+
+        return cls(
+            cert=Path(verify) if isinstance(verify, str) else None,
+            client_cert=Path(client_cert) if client_cert else None,
+            verify=verify if isinstance(verify, bool) else True,
+        )
+
+
+@dataclasses.dataclass
+class AuthenticatorRepositoryConfig:
+    name: str
+    url: str
+    netloc: str = dataclasses.field(init=False)
+    path: str = dataclasses.field(init=False)
+
+    def __post_init__(self) -> None:
+        parsed_url = urllib.parse.urlsplit(self.url)
+        self.netloc = parsed_url.netloc
+        self.path = parsed_url.path
+
+    def certs(self, config: Config) -> RepositoryCertificateConfig:
+        return RepositoryCertificateConfig.create(self.name, config)
+
+    @property
+    def http_credential_keys(self) -> list[str]:
+        return [self.url, self.netloc, self.name]
+
+    def get_http_credentials(
+        self, password_manager: PasswordManager, username: str | None = None
+    ) -> HTTPAuthCredential:
+        # try with the repository name via the password manager
+        credential = HTTPAuthCredential(
+            **(password_manager.get_http_auth(self.name) or {})
+        )
+
+        if credential.password is None:
+            # fallback to url and netloc based keyring entries
+            credential = password_manager.keyring.get_credential(
+                self.url, self.netloc, username=credential.username
+            )
+
+            if credential.password is not None:
+                return HTTPAuthCredential(
+                    username=credential.username, password=credential.password
+                )
+
+        return credential
+
+
+class Authenticator:
+    def __init__(
+        self,
+        config: Config | None = None,
+        io: IO | None = None,
+        cache_id: str | None = None,
+        disable_cache: bool = False,
+    ) -> None:
+        self._config = config or Config.create()
+        self._io = io
+        self._sessions_for_netloc: dict[str, requests.Session] = {}
+        self._credentials: dict[str, HTTPAuthCredential] = {}
+        self._certs: dict[str, RepositoryCertificateConfig] = {}
+        self._configured_repositories: dict[
+            str, AuthenticatorRepositoryConfig
+        ] | None = None
+        self._password_manager = PasswordManager(self._config)
+        self._cache_control = (
+            FileCache(
+                str(
+                    self._config.repository_cache_directory
+                    / (cache_id or "_default_cache")
+                    / "_http"
+                )
+            )
+            if not disable_cache
+            else None
+        )
+        self.get_repository_config_for_url = functools.lru_cache(maxsize=None)(
+            self._get_repository_config_for_url
+        )
+
+    def create_session(self) -> requests.Session:
+        session = requests.Session()
+
+        if self._cache_control is None:
+            return session
+
+        session = CacheControl(sess=session, cache=self._cache_control)
+        return session
+
+    def get_session(self, url: str | None = None) -> requests.Session:
+        if not url:
+            return self.create_session()
+
+        parsed_url = urllib.parse.urlsplit(url)
+        netloc = parsed_url.netloc
+
+        if netloc not in self._sessions_for_netloc:
+            logger.debug("Creating new session for %s", netloc)
+            self._sessions_for_netloc[netloc] = self.create_session()
+
+        return self._sessions_for_netloc[netloc]
+
+    def close(self) -> None:
+        for session in self._sessions_for_netloc.values():
+            if session is not None:
+                with contextlib.suppress(AttributeError):
+                    session.close()
+
+    def __del__(self) -> None:
+        self.close()
+
+    def delete_cache(self, url: str) -> None:
+        if self._cache_control is not None:
+            self._cache_control.delete(key=url)
+
+    def authenticated_url(self, url: str) -> str:
+        parsed = urllib.parse.urlparse(url)
+        credential = self.get_credentials_for_url(url)
+
+        if credential.username is not None and credential.password is not None:
+            username = urllib.parse.quote(credential.username, safe="")
+            password = urllib.parse.quote(credential.password, safe="")
+
+            return (
+                f"{parsed.scheme}://{username}:{password}@{parsed.netloc}{parsed.path}"
+            )
+
+        return url
+
+    def request(
+        self, method: str, url: str, raise_for_status: bool = True, **kwargs: Any
+    ) -> requests.Response:
+        request = requests.Request(method, url)
+        credential = self.get_credentials_for_url(url)
+
+        if credential.username is not None or credential.password is not None:
+            request = requests.auth.HTTPBasicAuth(
+                credential.username or "", credential.password or ""
+            )(request)
+
+        session = self.get_session(url=url)
+        prepared_request = session.prepare_request(request)
+
+        proxies: dict[str, str] = kwargs.get("proxies", {})
+        stream: bool | None = kwargs.get("stream")
+
+        certs = self.get_certs_for_url(url)
+        verify: bool | str | Path = kwargs.get("verify") or certs.cert or certs.verify
+        cert: str | Path | None = kwargs.get("cert") or certs.client_cert
+
+        if cert is not None:
+            cert = str(cert)
+
+        verify = str(verify) if isinstance(verify, Path) else verify
+
+        settings = session.merge_environment_settings(
+            prepared_request.url, proxies, stream, verify, cert
+        )
+
+        # Send the request.
+        send_kwargs = {
+            "timeout": kwargs.get("timeout", REQUESTS_TIMEOUT),
+            "allow_redirects": kwargs.get("allow_redirects", True),
+        }
+        send_kwargs.update(settings)
+
+        attempt = 0
+
+        while True:
+            is_last_attempt = attempt >= 5
+            try:
+                resp = session.send(prepared_request, **send_kwargs)
+            except (requests.exceptions.ConnectionError, OSError) as e:
+                if is_last_attempt:
+                    raise e
+            else:
+                if resp.status_code not in [502, 503, 504] or is_last_attempt:
+                    if raise_for_status:
+                        resp.raise_for_status()
+                    return resp
+
+            if not is_last_attempt:
+                attempt += 1
+                delay = 0.5 * attempt
+                logger.debug(f"Retrying HTTP request in {delay} seconds.")
+                time.sleep(delay)
+                continue
+
+        # this should never really be hit under any sane circumstance
+        raise PoetryException("Failed HTTP {} request", method.upper())
+
+    def get(self, url: str, **kwargs: Any) -> requests.Response:
+        return self.request("get", url, **kwargs)
+
+    def post(self, url: str, **kwargs: Any) -> requests.Response:
+        return self.request("post", url, **kwargs)
+
+    def _get_credentials_for_repository(
+        self, repository: AuthenticatorRepositoryConfig, username: str | None = None
+    ) -> HTTPAuthCredential:
+        # cache repository credentials by repository url to avoid multiple keyring
+        # backend queries when packages are being downloaded from the same source
+        key = f"{repository.url}#username={username or ''}"
+
+        if key not in self._credentials:
+            self._credentials[key] = repository.get_http_credentials(
+                password_manager=self._password_manager, username=username
+            )
+
+        return self._credentials[key]
+
+    def _get_credentials_for_url(
+        self, url: str, exact_match: bool = False
+    ) -> HTTPAuthCredential:
+        repository = self.get_repository_config_for_url(url, exact_match)
+
+        credential = (
+            self._get_credentials_for_repository(repository=repository)
+            if repository is not None
+            else HTTPAuthCredential()
+        )
+
+        if credential.password is None:
+            parsed_url = urllib.parse.urlsplit(url)
+            netloc = parsed_url.netloc
+            credential = self._password_manager.keyring.get_credential(
+                url, netloc, username=credential.username
+            )
+
+            return HTTPAuthCredential(
+                username=credential.username, password=credential.password
+            )
+
+        return credential
+
+    def get_credentials_for_git_url(self, url: str) -> HTTPAuthCredential:
+        parsed_url = urllib.parse.urlsplit(url)
+
+        if parsed_url.scheme not in {"http", "https"}:
+            return HTTPAuthCredential()
+
+        key = f"git+{url}"
+
+        if key not in self._credentials:
+            self._credentials[key] = self._get_credentials_for_url(url, True)
+
+        return self._credentials[key]
+
+    def get_credentials_for_url(self, url: str) -> HTTPAuthCredential:
+        parsed_url = urllib.parse.urlsplit(url)
+        netloc = parsed_url.netloc
+
+        if url not in self._credentials:
+            if "@" not in netloc:
+                # no credentials were provided in the url, try finding the
+                # best repository configuration
+                self._credentials[url] = self._get_credentials_for_url(url)
+            else:
+                # Split from the right because that's how urllib.parse.urlsplit()
+                # behaves if more than one @ is present (which can be checked using
+                # the password attribute of urlsplit()'s return value).
+                auth, netloc = netloc.rsplit("@", 1)
+                # Split from the left because that's how urllib.parse.urlsplit()
+                # behaves if more than one : is present (which again can be checked
+                # using the password attribute of the return value)
+                user, password = auth.split(":", 1) if ":" in auth else (auth, "")
+                self._credentials[url] = HTTPAuthCredential(
+                    urllib.parse.unquote(user),
+                    urllib.parse.unquote(password),
+                )
+
+        return self._credentials[url]
+
+    def get_pypi_token(self, name: str) -> str | None:
+        return self._password_manager.get_pypi_token(name)
+
+    def get_http_auth(
+        self, name: str, username: str | None = None
+    ) -> HTTPAuthCredential | None:
+        if name == "pypi":
+            repository = AuthenticatorRepositoryConfig(
+                name, "https://upload.pypi.org/legacy/"
+            )
+        else:
+            if name not in self.configured_repositories:
+                return None
+            repository = self.configured_repositories[name]
+
+        return self._get_credentials_for_repository(
+            repository=repository, username=username
+        )
+
+    def get_certs_for_repository(self, name: str) -> RepositoryCertificateConfig:
+        if name.lower() == "pypi" or name not in self.configured_repositories:
+            return RepositoryCertificateConfig()
+        return self.configured_repositories[name].certs(self._config)
+
+    @property
+    def configured_repositories(self) -> dict[str, AuthenticatorRepositoryConfig]:
+        if self._configured_repositories is None:
+            self._configured_repositories = {}
+            for repository_name in self._config.get("repositories", []):
+                url = self._config.get(f"repositories.{repository_name}.url")
+                self._configured_repositories[
+                    repository_name
+                ] = AuthenticatorRepositoryConfig(repository_name, url)
+
+        return self._configured_repositories
+
+    def reset_credentials_cache(self) -> None:
+        self.get_repository_config_for_url.cache_clear()
+        self._credentials = {}
+
+    def add_repository(self, name: str, url: str) -> None:
+        self.configured_repositories[name] = AuthenticatorRepositoryConfig(name, url)
+        self.reset_credentials_cache()
+
+    def get_certs_for_url(self, url: str) -> RepositoryCertificateConfig:
+        if url not in self._certs:
+            self._certs[url] = self._get_certs_for_url(url)
+        return self._certs[url]
+
+    def _get_repository_config_for_url(
+        self, url: str, exact_match: bool = False
+    ) -> AuthenticatorRepositoryConfig | None:
+        parsed_url = urllib.parse.urlsplit(url)
+        candidates_netloc_only = []
+        candidates_path_match = []
+
+        for repository in self.configured_repositories.values():
+            if exact_match:
+                if parsed_url.path == repository.path:
+                    return repository
+                continue
+
+            if repository.netloc == parsed_url.netloc:
+                if parsed_url.path.startswith(repository.path) or commonprefix(
+                    (parsed_url.path, repository.path)
+                ):
+                    candidates_path_match.append(repository)
+                    continue
+                candidates_netloc_only.append(repository)
+
+        if candidates_path_match:
+            candidates = candidates_path_match
+        elif candidates_netloc_only:
+            candidates = candidates_netloc_only
+        else:
+            return None
+
+        if len(candidates) > 1:
+            logger.debug(
+                "Multiple source configurations found for %s - %s",
+                parsed_url.netloc,
+                ", ".join(c.name for c in candidates),
+            )
+            # prefer the more specific path
+            candidates.sort(
+                key=lambda c: len(commonprefix([parsed_url.path, c.path])), reverse=True
+            )
+
+        return candidates[0]
+
+    def _get_certs_for_url(self, url: str) -> RepositoryCertificateConfig:
+        selected = self.get_repository_config_for_url(url)
+        if selected:
+            return selected.certs(config=self._config)
+        return RepositoryCertificateConfig()
+
+
+_authenticator: Authenticator | None = None
+
+
+def get_default_authenticator() -> Authenticator:
+    global _authenticator
+
+    if _authenticator is None:
+        _authenticator = Authenticator()
+
+    return _authenticator
diff --git a/vendor/poetry/src/poetry/utils/constants.py b/vendor/poetry/src/poetry/utils/constants.py
new file mode 100644
index 00000000..0f799b16
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/constants.py
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+
+# Timeout for HTTP requests using the requests library.
+REQUESTS_TIMEOUT = 15
diff --git a/vendor/poetry/src/poetry/utils/dependency_specification.py b/vendor/poetry/src/poetry/utils/dependency_specification.py
new file mode 100644
index 00000000..e80a2b97
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/dependency_specification.py
@@ -0,0 +1,226 @@
+from __future__ import annotations
+
+import contextlib
+import os
+import re
+import urllib.parse
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Dict
+from typing import List
+from typing import TypeVar
+from typing import Union
+from typing import cast
+
+from poetry.core.packages.dependency import Dependency
+from tomlkit.items import InlineTable
+
+from poetry.puzzle.provider import Provider
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.vcs_dependency import VCSDependency
+
+    from poetry.utils.env import Env
+
+
+DependencySpec = Dict[str, Union[str, bool, Dict[str, Union[str, bool]], List[str]]]
+
+
+def _parse_dependency_specification_git_url(
+    requirement: str, env: Env | None = None
+) -> DependencySpec | None:
+    from poetry.core.vcs.git import Git
+    from poetry.core.vcs.git import ParsedUrl
+
+    parsed = ParsedUrl.parse(requirement)
+    url = Git.normalize_url(requirement)
+
+    pair = {"name": parsed.name, "git": url.url}
+
+    if parsed.rev:
+        pair["rev"] = url.revision
+
+    if parsed.subdirectory:
+        pair["subdirectory"] = parsed.subdirectory
+
+    source_root = env.path.joinpath("src") if env else None
+    package = Provider.get_package_from_vcs(
+        "git",
+        url=url.url,
+        rev=pair.get("rev"),
+        subdirectory=parsed.subdirectory,
+        source_root=source_root,
+    )
+    pair["name"] = package.name
+    return pair
+
+
+def _parse_dependency_specification_url(
+    requirement: str, env: Env | None = None
+) -> DependencySpec | None:
+    url_parsed = urllib.parse.urlparse(requirement)
+    if not (url_parsed.scheme and url_parsed.netloc):
+        return None
+
+    if url_parsed.scheme in ["git+https", "git+ssh"]:
+        return _parse_dependency_specification_git_url(requirement, env)
+
+    if url_parsed.scheme in ["http", "https"]:
+        package = Provider.get_package_from_url(requirement)
+        assert package.source_url is not None
+        return {"name": package.name, "url": package.source_url}
+
+    return None
+
+
+def _parse_dependency_specification_path(
+    requirement: str, cwd: Path
+) -> DependencySpec | None:
+    if (os.path.sep in requirement or "/" in requirement) and (
+        cwd.joinpath(requirement).exists()
+        or Path(requirement).expanduser().exists()
+        and Path(requirement).expanduser().is_absolute()
+    ):
+        path = Path(requirement).expanduser()
+        is_absolute = path.is_absolute()
+
+        if not path.is_absolute():
+            path = cwd.joinpath(requirement)
+
+        if path.is_file():
+            package = Provider.get_package_from_file(path.resolve())
+        else:
+            package = Provider.get_package_from_directory(path.resolve())
+
+        return {
+            "name": package.name,
+            "path": path.relative_to(cwd).as_posix()
+            if not is_absolute
+            else path.as_posix(),
+        }
+
+    return None
+
+
+def _parse_dependency_specification_simple(
+    requirement: str,
+) -> DependencySpec | None:
+    extras: list[str] = []
+    pair = re.sub("^([^@=: ]+)(?:@|==|(?~!])=|:| )(.*)$", "\\1 \\2", requirement)
+    pair = pair.strip()
+
+    require: DependencySpec = {}
+
+    if " " in pair:
+        name, version = pair.split(" ", 2)
+        extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
+        if extras_m:
+            extras = [e.strip() for e in extras_m.group(1).split(",")]
+            name, _ = name.split("[")
+
+        require["name"] = name
+        if version != "latest":
+            require["version"] = version
+    else:
+        m = re.match(r"^([^><=!: ]+)((?:>=|<=|>|<|!=|~=|~|\^).*)$", requirement.strip())
+        if m:
+            name, constraint = m.group(1), m.group(2)
+            extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
+            if extras_m:
+                extras = [e.strip() for e in extras_m.group(1).split(",")]
+                name, _ = name.split("[")
+
+            require["name"] = name
+            require["version"] = constraint
+        else:
+            extras_m = re.search(r"\[([\w\d,-_]+)\]$", pair)
+            if extras_m:
+                extras = [e.strip() for e in extras_m.group(1).split(",")]
+                pair, _ = pair.split("[")
+
+            require["name"] = pair
+
+    if extras:
+        require["extras"] = extras
+
+    return require
+
+
+BaseSpec = TypeVar("BaseSpec", DependencySpec, InlineTable)
+
+
+def dependency_to_specification(
+    dependency: Dependency, specification: BaseSpec
+) -> BaseSpec:
+    if dependency.is_vcs():
+        dependency = cast("VCSDependency", dependency)
+        assert dependency.source_url is not None
+        specification[dependency.vcs] = dependency.source_url
+        if dependency.reference:
+            specification["rev"] = dependency.reference
+    elif dependency.is_file() or dependency.is_directory():
+        assert dependency.source_url is not None
+        specification["path"] = dependency.source_url
+    elif dependency.is_url():
+        assert dependency.source_url is not None
+        specification["url"] = dependency.source_url
+    elif dependency.pretty_constraint != "*" and not dependency.constraint.is_empty():
+        specification["version"] = dependency.pretty_constraint
+
+    if not dependency.marker.is_any():
+        specification["markers"] = str(dependency.marker)
+
+    if dependency.extras:
+        specification["extras"] = sorted(dependency.extras)
+
+    return specification
+
+
+def pep508_to_dependency_specification(requirement: str) -> DependencySpec | None:
+    if " ; " not in requirement and re.search(r"@[\^~!=<>\d]", requirement):
+        # this is of the form package@, do not attempt to parse it
+        return None
+
+    with contextlib.suppress(ValueError):
+        dependency = Dependency.create_from_pep_508(requirement)
+        specification: DependencySpec = {}
+        specification = dependency_to_specification(dependency, specification)
+
+        if specification:
+            specification["name"] = dependency.name
+            return specification
+
+    return None
+
+
+def parse_dependency_specification(
+    requirement: str, env: Env | None = None, cwd: Path | None = None
+) -> DependencySpec:
+    requirement = requirement.strip()
+    cwd = cwd or Path.cwd()
+
+    specification = pep508_to_dependency_specification(requirement)
+
+    if specification is not None:
+        return specification
+
+    extras = []
+    extras_m = re.search(r"\[([\w\d,-_ ]+)\]$", requirement)
+    if extras_m:
+        extras = [e.strip() for e in extras_m.group(1).split(",")]
+        requirement, _ = requirement.split("[")
+
+    specification = (
+        _parse_dependency_specification_url(requirement, env=env)
+        or _parse_dependency_specification_path(requirement, cwd=cwd)
+        or _parse_dependency_specification_simple(requirement)
+    )
+
+    if specification:
+        if extras and "extras" not in specification:
+            specification["extras"] = extras
+        return specification
+
+    raise ValueError(f"Invalid dependency specification: {requirement}")
diff --git a/vendor/poetry/src/poetry/utils/env.py b/vendor/poetry/src/poetry/utils/env.py
new file mode 100644
index 00000000..3b8e444b
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/env.py
@@ -0,0 +1,2034 @@
+from __future__ import annotations
+
+import base64
+import contextlib
+import hashlib
+import itertools
+import json
+import os
+import platform
+import plistlib
+import re
+import subprocess
+import sys
+import sysconfig
+import warnings
+
+from contextlib import contextmanager
+from copy import deepcopy
+from importlib import resources
+from pathlib import Path
+from subprocess import CalledProcessError
+from typing import TYPE_CHECKING
+from typing import Any
+
+import packaging.tags
+import tomlkit
+import virtualenv
+
+from cleo.io.outputs.output import Verbosity
+from packaging.tags import Tag
+from packaging.tags import interpreter_name
+from packaging.tags import interpreter_version
+from packaging.tags import sys_tags
+from poetry.core.packages.package import Package
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version import Version
+from poetry.core.toml.file import TOMLFile
+from poetry.core.utils.helpers import temporary_directory
+from virtualenv.seed.wheels.embed import get_embed_wheel
+
+from poetry.utils._compat import WINDOWS
+from poetry.utils._compat import decode
+from poetry.utils._compat import encode
+from poetry.utils._compat import list_to_shell_command
+from poetry.utils._compat import metadata
+from poetry.utils.helpers import get_real_windows_path
+from poetry.utils.helpers import is_dir_writable
+from poetry.utils.helpers import paths_csv
+from poetry.utils.helpers import remove_directory
+
+from . import __name__ as _pkg
+
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+    from collections.abc import Iterator
+
+    from cleo.io.io import IO
+    from poetry.core.poetry import Poetry as CorePoetry
+    from poetry.core.version.markers import BaseMarker
+    from virtualenv.seed.wheels.util import Wheel
+
+    from poetry.poetry import Poetry
+
+
+GET_SYS_TAGS = resources.read_text(_pkg, "packaging_tags.py.template")
+
+
+GET_ENVIRONMENT_INFO = """\
+import json
+import os
+import platform
+import sys
+import sysconfig
+
+INTERPRETER_SHORT_NAMES = {
+    "python": "py",
+    "cpython": "cp",
+    "pypy": "pp",
+    "ironpython": "ip",
+    "jython": "jy",
+}
+
+
+def interpreter_version():
+    version = sysconfig.get_config_var("interpreter_version")
+    if version:
+        version = str(version)
+    else:
+        version = _version_nodot(sys.version_info[:2])
+
+    return version
+
+
+def _version_nodot(version):
+    if any(v >= 10 for v in version):
+        sep = "_"
+    else:
+        sep = ""
+
+    return sep.join(map(str, version))
+
+
+if hasattr(sys, "implementation"):
+    info = sys.implementation.version
+    iver = "{0.major}.{0.minor}.{0.micro}".format(info)
+    kind = info.releaselevel
+    if kind != "final":
+        iver += kind[0] + str(info.serial)
+
+    implementation_name = sys.implementation.name
+else:
+    iver = "0"
+    implementation_name = platform.python_implementation().lower()
+
+env = {
+    "implementation_name": implementation_name,
+    "implementation_version": iver,
+    "os_name": os.name,
+    "platform_machine": platform.machine(),
+    "platform_release": platform.release(),
+    "platform_system": platform.system(),
+    "platform_version": platform.version(),
+    "python_full_version": platform.python_version(),
+    "platform_python_implementation": platform.python_implementation(),
+    "python_version": ".".join(platform.python_version_tuple()[:2]),
+    "sys_platform": sys.platform,
+    "version_info": tuple(sys.version_info),
+    # Extra information
+    "interpreter_name": INTERPRETER_SHORT_NAMES.get(
+        implementation_name, implementation_name
+    ),
+    "interpreter_version": interpreter_version(),
+}
+
+print(json.dumps(env))
+"""
+
+
+GET_BASE_PREFIX = """\
+import sys
+
+if hasattr(sys, "real_prefix"):
+    print(sys.real_prefix)
+elif hasattr(sys, "base_prefix"):
+    print(sys.base_prefix)
+else:
+    print(sys.prefix)
+"""
+
+GET_PYTHON_VERSION = """\
+import sys
+
+print('.'.join([str(s) for s in sys.version_info[:3]]))
+"""
+
+GET_PYTHON_VERSION_ONELINER = (
+    "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\""
+)
+
+GET_SYS_PATH = """\
+import json
+import sys
+
+print(json.dumps(sys.path))
+"""
+
+GET_PATHS = """\
+import json
+import sysconfig
+
+print(json.dumps(sysconfig.get_paths()))
+"""
+
+GET_PATHS_FOR_GENERIC_ENVS = """\
+# We can't use sysconfig.get_paths() because
+# on some distributions it does not return the proper paths
+# (those used by pip for instance). We go through distutils
+# to get the proper ones.
+import json
+import site
+import sysconfig
+
+from distutils.command.install import SCHEME_KEYS
+from distutils.core import Distribution
+
+d = Distribution()
+d.parse_config_files()
+obj = d.get_command_obj("install", create=True)
+obj.finalize_options()
+
+paths = sysconfig.get_paths().copy()
+for key in SCHEME_KEYS:
+    if key == "headers":
+        # headers is not a path returned by sysconfig.get_paths()
+        continue
+
+    paths[key] = getattr(obj, f"install_{key}")
+
+if site.check_enableusersite() and hasattr(obj, "install_usersite"):
+    paths["usersite"] = getattr(obj, "install_usersite")
+    paths["userbase"] = getattr(obj, "install_userbase")
+
+print(json.dumps(paths))
+"""
+
+
+class SitePackages:
+    def __init__(
+        self,
+        purelib: Path,
+        platlib: Path | None = None,
+        fallbacks: list[Path] | None = None,
+        skip_write_checks: bool = False,
+    ) -> None:
+        self._purelib = purelib
+        self._platlib = platlib or purelib
+
+        if platlib and platlib.resolve() == purelib.resolve():
+            self._platlib = purelib
+
+        self._fallbacks = fallbacks or []
+        self._skip_write_checks = skip_write_checks
+
+        self._candidates: list[Path] = []
+        for path in itertools.chain([self._purelib, self._platlib], self._fallbacks):
+            if path not in self._candidates:
+                self._candidates.append(path)
+
+        self._writable_candidates = None if not skip_write_checks else self._candidates
+
+    @property
+    def path(self) -> Path:
+        return self._purelib
+
+    @property
+    def purelib(self) -> Path:
+        return self._purelib
+
+    @property
+    def platlib(self) -> Path:
+        return self._platlib
+
+    @property
+    def candidates(self) -> list[Path]:
+        return self._candidates
+
+    @property
+    def writable_candidates(self) -> list[Path]:
+        if self._writable_candidates is not None:
+            return self._writable_candidates
+
+        self._writable_candidates = []
+        for candidate in self._candidates:
+            if not is_dir_writable(path=candidate, create=True):
+                continue
+            self._writable_candidates.append(candidate)
+
+        return self._writable_candidates
+
+    def make_candidates(
+        self, path: Path, writable_only: bool = False, strict: bool = False
+    ) -> list[Path]:
+        candidates = self._candidates if not writable_only else self.writable_candidates
+        if path.is_absolute():
+            for candidate in candidates:
+                with contextlib.suppress(ValueError):
+                    path.relative_to(candidate)
+                    return [path]
+            site_type = "writable " if writable_only else ""
+            raise ValueError(
+                f"{path} is not relative to any discovered {site_type}sites"
+            )
+
+        results = [candidate / path for candidate in candidates]
+
+        if not results and strict:
+            raise RuntimeError(
+                f'Unable to find a suitable destination for "{path}" in'
+                f" {paths_csv(self._candidates)}"
+            )
+
+        return results
+
+    def distributions(
+        self, name: str | None = None, writable_only: bool = False
+    ) -> Iterable[metadata.Distribution]:
+        path = list(
+            map(
+                str, self._candidates if not writable_only else self.writable_candidates
+            )
+        )
+
+        yield from metadata.PathDistribution.discover(  # type: ignore[no-untyped-call]
+            name=name,
+            path=path,
+        )
+
+    def find_distribution(
+        self, name: str, writable_only: bool = False
+    ) -> metadata.Distribution | None:
+        for distribution in self.distributions(name=name, writable_only=writable_only):
+            return distribution
+        return None
+
+    def find_distribution_files_with_suffix(
+        self, distribution_name: str, suffix: str, writable_only: bool = False
+    ) -> Iterable[Path]:
+        for distribution in self.distributions(
+            name=distribution_name, writable_only=writable_only
+        ):
+            assert distribution.files is not None
+            for file in distribution.files:
+                if file.name.endswith(suffix):
+                    yield Path(
+                        distribution.locate_file(file),  # type: ignore[no-untyped-call]
+                    )
+
+    def find_distribution_files_with_name(
+        self, distribution_name: str, name: str, writable_only: bool = False
+    ) -> Iterable[Path]:
+        for distribution in self.distributions(
+            name=distribution_name, writable_only=writable_only
+        ):
+            assert distribution.files is not None
+            for file in distribution.files:
+                if file.name == name:
+                    yield Path(
+                        distribution.locate_file(file),  # type: ignore[no-untyped-call]
+                    )
+
+    def find_distribution_nspkg_pth_files(
+        self, distribution_name: str, writable_only: bool = False
+    ) -> Iterable[Path]:
+        return self.find_distribution_files_with_suffix(
+            distribution_name=distribution_name,
+            suffix="-nspkg.pth",
+            writable_only=writable_only,
+        )
+
+    def find_distribution_direct_url_json_files(
+        self, distribution_name: str, writable_only: bool = False
+    ) -> Iterable[Path]:
+        return self.find_distribution_files_with_name(
+            distribution_name=distribution_name,
+            name="direct_url.json",
+            writable_only=writable_only,
+        )
+
+    def remove_distribution_files(self, distribution_name: str) -> list[Path]:
+        paths = []
+
+        for distribution in self.distributions(
+            name=distribution_name, writable_only=True
+        ):
+            assert distribution.files is not None
+            for file in distribution.files:
+                path = Path(
+                    distribution.locate_file(file),  # type: ignore[no-untyped-call]
+                )
+                # We can't use unlink(missing_ok=True) because it's not always available
+                if path.exists():
+                    path.unlink()
+
+            distribution_path: Path = distribution._path  # type: ignore[attr-defined]
+            if distribution_path.exists():
+                remove_directory(str(distribution_path), force=True)
+
+            paths.append(distribution_path)
+
+        return paths
+
+    def _path_method_wrapper(
+        self,
+        path: str | Path,
+        method: str,
+        *args: Any,
+        return_first: bool = True,
+        writable_only: bool = False,
+        **kwargs: Any,
+    ) -> tuple[Path, Any] | list[tuple[Path, Any]]:
+        if isinstance(path, str):
+            path = Path(path)
+
+        candidates = self.make_candidates(
+            path, writable_only=writable_only, strict=True
+        )
+
+        results = []
+
+        for candidate in candidates:
+            try:
+                result = candidate, getattr(candidate, method)(*args, **kwargs)
+                if return_first:
+                    return result
+                results.append(result)
+            except OSError:
+                # TODO: Replace with PermissionError
+                pass
+
+        if results:
+            return results
+
+        raise OSError(f"Unable to access any of {paths_csv(candidates)}")
+
+    def write_text(self, path: str | Path, *args: Any, **kwargs: Any) -> Path:
+        paths = self._path_method_wrapper(path, "write_text", *args, **kwargs)
+        assert isinstance(paths, tuple)
+        return paths[0]
+
+    def mkdir(self, path: str | Path, *args: Any, **kwargs: Any) -> Path:
+        paths = self._path_method_wrapper(path, "mkdir", *args, **kwargs)
+        assert isinstance(paths, tuple)
+        return paths[0]
+
+    def exists(self, path: str | Path) -> bool:
+        return any(
+            value[-1]
+            for value in self._path_method_wrapper(path, "exists", return_first=False)
+        )
+
+    def find(
+        self,
+        path: str | Path,
+        writable_only: bool = False,
+    ) -> list[Path]:
+        return [
+            value[0]
+            for value in self._path_method_wrapper(
+                path, "exists", return_first=False, writable_only=writable_only
+            )
+            if value[-1] is True
+        ]
+
+    def __getattr__(self, item: str) -> Any:
+        try:
+            return super().__getattribute__(item)
+        except AttributeError:
+            return getattr(self.path, item)
+
+
+class EnvError(Exception):
+    pass
+
+
+class EnvCommandError(EnvError):
+    def __init__(self, e: CalledProcessError, input: str | None = None) -> None:
+        self.e = e
+
+        message = (
+            f"Command {e.cmd} errored with the following return code {e.returncode},"
+            f" and output: \n{decode(e.output)}"
+        )
+        if input:
+            message += f"input was : {input}"
+        super().__init__(message)
+
+
+class NoCompatiblePythonVersionFound(EnvError):
+    def __init__(self, expected: str, given: str | None = None) -> None:
+        if given:
+            message = (
+                f"The specified Python version ({given}) "
+                f"is not supported by the project ({expected}).\n"
+                "Please choose a compatible version "
+                "or loosen the python constraint specified "
+                "in the pyproject.toml file."
+            )
+        else:
+            message = (
+                "Poetry was unable to find a compatible version. "
+                "If you have one, you can explicitly use it "
+                'via the "env use" command.'
+            )
+
+        super().__init__(message)
+
+
+class InvalidCurrentPythonVersionError(EnvError):
+    def __init__(self, expected: str, given: str) -> None:
+        message = (
+            f"Current Python version ({given}) "
+            f"is not allowed by the project ({expected}).\n"
+            'Please change python executable via the "env use" command.'
+        )
+
+        super().__init__(message)
+
+
+class EnvManager:
+    """
+    Environments manager
+    """
+
+    _env = None
+
+    ENVS_FILE = "envs.toml"
+
+    def __init__(self, poetry: Poetry) -> None:
+        self._poetry = poetry
+
+    def _full_python_path(self, python: str) -> str:
+        try:
+            executable = decode(
+                subprocess.check_output(
+                    list_to_shell_command(
+                        [python, "-c", '"import sys; print(sys.executable)"']
+                    ),
+                    shell=True,
+                ).strip()
+            )
+        except CalledProcessError as e:
+            raise EnvCommandError(e)
+
+        return executable
+
+    def _detect_active_python(self, io: IO) -> str | None:
+        executable = None
+
+        try:
+            io.write_line(
+                "Trying to detect current active python executable as specified in the"
+                " config.",
+                verbosity=Verbosity.VERBOSE,
+            )
+            executable = self._full_python_path("python")
+            io.write_line(f"Found: {executable}", verbosity=Verbosity.VERBOSE)
+        except CalledProcessError:
+            io.write_line(
+                "Unable to detect the current active python executable. Falling back to"
+                " default.",
+                verbosity=Verbosity.VERBOSE,
+            )
+        return executable
+
+    def activate(self, python: str, io: IO) -> Env:
+        venv_path = self._poetry.config.virtualenvs_path
+        cwd = self._poetry.file.parent
+
+        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
+
+        try:
+            python_version = Version.parse(python)
+            python = f"python{python_version.major}"
+            if python_version.precision > 1:
+                python += f".{python_version.minor}"
+        except ValueError:
+            # Executable in PATH or full executable path
+            pass
+
+        python = self._full_python_path(python)
+
+        try:
+            python_version_string = decode(
+                subprocess.check_output(
+                    list_to_shell_command([python, "-c", GET_PYTHON_VERSION_ONELINER]),
+                    shell=True,
+                )
+            )
+        except CalledProcessError as e:
+            raise EnvCommandError(e)
+
+        python_version = Version.parse(python_version_string.strip())
+        minor = f"{python_version.major}.{python_version.minor}"
+        patch = python_version.text
+
+        create = False
+        is_root_venv = self._poetry.config.get("virtualenvs.in-project")
+        # If we are required to create the virtual environment in the root folder,
+        # create or recreate it if needed
+        if is_root_venv:
+            create = False
+            venv = self._poetry.file.parent / ".venv"
+            if venv.exists():
+                # We need to check if the patch version is correct
+                _venv = VirtualEnv(venv)
+                current_patch = ".".join(str(v) for v in _venv.version_info[:3])
+
+                if patch != current_patch:
+                    create = True
+
+            self.create_venv(io, executable=python, force=create)
+
+            return self.get(reload=True)
+
+        envs = tomlkit.document()
+        base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd))
+        if envs_file.exists():
+            envs = envs_file.read()
+            current_env = envs.get(base_env_name)
+            if current_env is not None:
+                current_minor = current_env["minor"]
+                current_patch = current_env["patch"]
+
+                if current_minor == minor and current_patch != patch:
+                    # We need to recreate
+                    create = True
+
+        name = f"{base_env_name}-py{minor}"
+        venv = venv_path / name
+
+        # Create if needed
+        if not venv.exists() or venv.exists() and create:
+            in_venv = os.environ.get("VIRTUAL_ENV") is not None
+            if in_venv or not venv.exists():
+                create = True
+
+            if venv.exists():
+                # We need to check if the patch version is correct
+                _venv = VirtualEnv(venv)
+                current_patch = ".".join(str(v) for v in _venv.version_info[:3])
+
+                if patch != current_patch:
+                    create = True
+
+            self.create_venv(io, executable=python, force=create)
+
+        # Activate
+        envs[base_env_name] = {"minor": minor, "patch": patch}
+        envs_file.write(envs)
+
+        return self.get(reload=True)
+
+    def deactivate(self, io: IO) -> None:
+        venv_path = self._poetry.config.virtualenvs_path
+        name = self.generate_env_name(
+            self._poetry.package.name, str(self._poetry.file.parent)
+        )
+
+        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
+        if envs_file.exists():
+            envs = envs_file.read()
+            env = envs.get(name)
+            if env is not None:
+                venv = venv_path / f"{name}-py{env['minor']}"
+                io.write_line(f"Deactivating virtualenv: {venv}")
+                del envs[name]
+
+                envs_file.write(envs)
+
+    def get(self, reload: bool = False) -> Env:
+        if self._env is not None and not reload:
+            return self._env
+
+        python_minor = (
+            InterpreterLookup.find()[1] or
+            ".".join(str(c) for c in sys.version_info[:2])
+        )
+
+        venv_path = self._poetry.config.virtualenvs_path
+
+        cwd = self._poetry.file.parent
+        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
+        env = None
+        base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd))
+        if envs_file.exists():
+            envs = envs_file.read()
+            env = envs.get(base_env_name)
+            if env:
+                python_minor = env["minor"]
+
+        # Check if we are inside a virtualenv or not
+        # Conda sets CONDA_PREFIX in its envs, see
+        # https://github.com/conda/conda/issues/2764
+        env_prefix = os.environ.get("VIRTUAL_ENV", os.environ.get("CONDA_PREFIX"))
+        conda_env_name = os.environ.get("CONDA_DEFAULT_ENV")
+        # It's probably not a good idea to pollute Conda's global "base" env, since
+        # most users have it activated all the time.
+        in_venv = env_prefix is not None and conda_env_name != "base"
+
+        if not in_venv or env is not None:
+            # Checking if a local virtualenv exists
+            if (
+                self._poetry.config.get("virtualenvs.in-project") is not False
+                and (cwd / ".venv").exists()
+                and (cwd / ".venv").is_dir()
+            ):
+                venv = cwd / ".venv"
+
+                return VirtualEnv(venv)
+
+            create_venv = self._poetry.config.get("virtualenvs.create", True)
+
+            if not create_venv:
+                return self.get_system_env()
+
+            venv_path = self._poetry.config.virtualenvs_path
+
+            name = f"{base_env_name}-py{python_minor.strip()}"
+
+            venv = venv_path / name
+
+            if not venv.exists():
+                return self.get_system_env()
+
+            return VirtualEnv(venv)
+
+        if env_prefix is not None:
+            prefix = Path(env_prefix)
+            base_prefix = None
+        else:
+            prefix = Path(sys.prefix)
+            base_prefix = self.get_base_prefix()
+
+        return VirtualEnv(prefix, base_prefix)
+
+    def list(self, name: str | None = None) -> list[VirtualEnv]:
+        if name is None:
+            name = self._poetry.package.name
+
+        venv_name = self.generate_env_name(name, str(self._poetry.file.parent))
+        venv_path = self._poetry.config.virtualenvs_path
+        env_list = [
+            VirtualEnv(Path(p)) for p in sorted(venv_path.glob(f"{venv_name}-py*"))
+        ]
+
+        venv = self._poetry.file.parent / ".venv"
+        if (
+            self._poetry.config.get("virtualenvs.in-project")
+            and venv.exists()
+            and venv.is_dir()
+        ):
+            env_list.insert(0, VirtualEnv(venv))
+        return env_list
+
+    def remove(self, python: str) -> Env:
+        venv_path = self._poetry.config.virtualenvs_path
+
+        cwd = self._poetry.file.parent
+        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
+        base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd))
+
+        if python.startswith(base_env_name):
+            venvs = self.list()
+            for venv in venvs:
+                if venv.path.name == python:
+                    # Exact virtualenv name
+                    if not envs_file.exists():
+                        self.remove_venv(venv.path)
+
+                        return venv
+
+                    venv_minor = ".".join(str(v) for v in venv.version_info[:2])
+                    base_env_name = self.generate_env_name(cwd.name, str(cwd))
+                    envs = envs_file.read()
+
+                    current_env = envs.get(base_env_name)
+                    if not current_env:
+                        self.remove_venv(venv.path)
+
+                        return venv
+
+                    if current_env["minor"] == venv_minor:
+                        del envs[base_env_name]
+                        envs_file.write(envs)
+
+                    self.remove_venv(venv.path)
+
+                    return venv
+
+            raise ValueError(
+                f'Environment "{python}" does not exist.'
+            )
+
+        try:
+            python_version = Version.parse(python)
+            python = f"python{python_version.major}"
+            if python_version.precision > 1:
+                python += f".{python_version.minor}"
+        except ValueError:
+            # Executable in PATH or full executable path
+            pass
+
+        try:
+            python_version_string = decode(
+                subprocess.check_output(
+                    list_to_shell_command([python, "-c", GET_PYTHON_VERSION_ONELINER]),
+                    shell=True,
+                )
+            )
+        except CalledProcessError as e:
+            raise EnvCommandError(e)
+
+        python_version = Version.parse(python_version_string.strip())
+        minor = f"{python_version.major}.{python_version.minor}"
+
+        name = f"{base_env_name}-py{minor}"
+        venv_path = venv_path / name
+
+        if not venv_path.exists():
+            raise ValueError(f'Environment "{name}" does not exist.')
+
+        if envs_file.exists():
+            envs = envs_file.read()
+            current_env = envs.get(base_env_name)
+            if current_env is not None:
+                current_minor = current_env["minor"]
+
+                if current_minor == minor:
+                    del envs[base_env_name]
+                    envs_file.write(envs)
+
+        self.remove_venv(venv_path)
+
+        return VirtualEnv(venv_path, venv_path)
+
+    def create_venv(
+        self,
+        io: IO,
+        name: str | None = None,
+        executable: str | None = None,
+        force: bool = False,
+    ) -> Env:
+        if self._env is not None and not force:
+            return self._env
+
+        cwd = self._poetry.file.parent
+        env = self.get(reload=True)
+
+        if not env.is_sane():
+            force = True
+
+        if env.is_venv() and not force:
+            # Already inside a virtualenv.
+            current_python = Version.parse(
+                ".".join(str(c) for c in env.version_info[:3])
+            )
+            if not self._poetry.package.python_constraint.allows(current_python):
+                raise InvalidCurrentPythonVersionError(
+                    self._poetry.package.python_versions, str(current_python)
+                )
+            return env
+
+        create_venv = self._poetry.config.get("virtualenvs.create")
+        root_venv = self._poetry.config.get("virtualenvs.in-project")
+        prefer_active_python = self._poetry.config.get(
+            "virtualenvs.prefer-active-python"
+        )
+        venv_prompt = self._poetry.config.get("virtualenvs.prompt")
+
+        if not executable and prefer_active_python:
+            executable = self._detect_active_python(io)
+
+        venv_path = cwd / ".venv" if root_venv else self._poetry.config.virtualenvs_path
+        if not name:
+            name = self._poetry.package.name
+        assert name is not None
+
+        python_patch, python_minor = None, None
+        if executable:
+            python_patch = decode(
+                subprocess.check_output(
+                    list_to_shell_command(
+                        [executable, "-c", GET_PYTHON_VERSION_ONELINER]
+                    ),
+                    shell=True,
+                ).strip()
+            )
+            python_minor = ".".join(python_patch.split(".")[:2])
+
+        supported_python = self._poetry.package.python_constraint
+        if (
+            not python_patch or
+            python_patch and not supported_python.allows(Version.parse(python_patch))
+        ):
+            # The currently activated or chosen Python version
+            # is not compatible with the Python constraint specified
+            # for the project.
+            # If an executable has been specified, we stop there
+            # and notify the user of the incompatibility.
+            # Otherwise, we try to find a compatible Python version.
+            if executable and not prefer_active_python:
+                raise NoCompatiblePythonVersionFound(
+                    self._poetry.package.python_versions, python_patch
+                )
+
+            executable, python_minor, python_patch = InterpreterLookup.find(supported_python)
+
+            if not executable:
+                raise NoCompatiblePythonVersionFound(
+                    self._poetry.package.python_versions
+                )
+
+        if root_venv:
+            venv = venv_path
+        else:
+            name = self.generate_env_name(name, str(cwd))
+            name = f"{name}-py{python_minor.strip()}"
+            venv = venv_path / name
+
+        if venv_prompt is not None:
+            venv_prompt = venv_prompt.format(
+                project_name=self._poetry.package.name or "virtualenv",
+                python_version=python_minor,
+            )
+
+        if not venv.exists():
+            if create_venv is False:
+                io.write_line(
+                    ""
+                    "Skipping virtualenv creation, "
+                    "as specified in config file."
+                    ""
+                )
+
+                return self.get_system_env()
+
+            io.write_line(
+                f"Creating virtualenv {name} in"
+                f" {venv_path if not WINDOWS else get_real_windows_path(venv_path)!s}"
+            )
+        else:
+            create_venv = False
+            if force:
+                if not env.is_sane():
+                    io.write_error_line(
+                        f"The virtual environment found in {env.path} seems to"
+                        " be broken."
+                    )
+                io.write_line(f"Recreating virtualenv {name} in {venv!s}")
+                self.remove_venv(venv)
+                create_venv = True
+            elif io.is_very_verbose():
+                io.write_line(f"Virtualenv {name} already exists.")
+
+        if create_venv:
+            self.build_venv(
+                venv,
+                executable=executable,
+                flags=self._poetry.config.get("virtualenvs.options"),
+                prompt=venv_prompt,
+            )
+
+        return VirtualEnv(venv)
+
+    @classmethod
+    def build_venv(
+        cls,
+        path: Path | str,
+        executable: str | Path | None = None,
+        flags: dict[str, bool] | None = None,
+        with_pip: bool | None = None,
+        with_wheel: bool | None = None,
+        with_setuptools: bool | None = None,
+        prompt: str | None = None,
+    ) -> virtualenv.run.session.Session:
+        if WINDOWS:
+            path = get_real_windows_path(path)
+            executable = get_real_windows_path(executable) if executable else None
+
+        flags = flags or {}
+
+        flags["no-pip"] = (
+            not with_pip if with_pip is not None else flags.pop("no-pip", True)
+        )
+
+        flags["no-setuptools"] = (
+            not with_setuptools
+            if with_setuptools is not None
+            else flags.pop("no-setuptools", True)
+        )
+
+        # we want wheels to be enabled when pip is required and it has not been
+        # explicitly disabled
+        flags["no-wheel"] = (
+            not with_wheel
+            if with_wheel is not None
+            else flags.pop("no-wheel", flags["no-pip"])
+        )
+
+        if isinstance(executable, Path):
+            executable = executable.resolve().as_posix()
+
+        args = [
+            "--no-download",
+            "--no-periodic-update",
+            "--python",
+            executable or "python",
+        ]
+
+        if prompt is not None:
+            args.extend(["--prompt", prompt])
+
+        for flag, value in flags.items():
+            if value is True:
+                args.append(f"--{flag}")
+
+        args.append(str(path))
+
+        cli_result = virtualenv.cli_run(args)
+
+        # Exclude the venv folder from from macOS Time Machine backups
+        # TODO: Add backup-ignore markers for other platforms too
+        if sys.platform == "darwin":
+            import xattr
+
+            xattr.setxattr(
+                str(path),
+                "com.apple.metadata:com_apple_backup_excludeItem",
+                plistlib.dumps("com.apple.backupd", fmt=plistlib.FMT_BINARY),
+            )
+
+        return cli_result
+
+    @classmethod
+    def remove_venv(cls, path: Path | str) -> None:
+        if isinstance(path, str):
+            path = Path(path)
+        assert path.is_dir()
+        try:
+            remove_directory(path)
+            return
+        except OSError as e:
+            # Continue only if e.errno == 16
+            if e.errno != 16:  # ERRNO 16: Device or resource busy
+                raise e
+
+        # Delete all files and folders but the toplevel one. This is because sometimes
+        # the venv folder is mounted by the OS, such as in a docker volume. In such
+        # cases, an attempt to delete the folder itself will result in an `OSError`.
+        # See https://github.com/python-poetry/poetry/pull/2064
+        for file_path in path.iterdir():
+            if file_path.is_file() or file_path.is_symlink():
+                file_path.unlink()
+            elif file_path.is_dir():
+                remove_directory(file_path, force=True)
+
+    @classmethod
+    def get_system_env(cls, naive: bool = False) -> Env:
+        """
+        Retrieve the current Python environment.
+
+        This can be the base Python environment or an activated virtual environment.
+
+        This method also workaround the issue that the virtual environment
+        used by Poetry internally (when installed via the custom installer)
+        is incorrectly detected as the system environment. Note that this workaround
+        happens only when `naive` is False since there are times where we actually
+        want to retrieve Poetry's custom virtual environment
+        (e.g. plugin installation or self update).
+        """
+        pydef_executable, _, _ = InterpreterLookup.find()
+        prefix, base_prefix = (
+            Path(pydef_executable) if pydef_executable else None,
+            Path(cls.get_base_prefix())
+        )
+        env: Env = SystemEnv(prefix) if prefix else NullEnv()
+        if not naive and prefix:
+            if prefix.joinpath("poetry_env").exists():
+                env = GenericEnv(base_prefix, child_env=env)
+            else:
+                from poetry.locations import data_dir
+
+                try:
+                    prefix.relative_to(data_dir())
+                except ValueError:
+                    pass
+                else:
+                    env = GenericEnv(base_prefix, child_env=env)
+
+        return env
+
+    @classmethod
+    def get_base_prefix(cls) -> Path:
+        real_prefix = getattr(sys, "real_prefix", None)
+        if real_prefix is not None:
+            return Path(real_prefix)
+
+        base_prefix = getattr(sys, "base_prefix", None)
+        if base_prefix is not None:
+            return Path(base_prefix)
+
+        return Path(sys.prefix)
+
+    @classmethod
+    def generate_env_name(cls, name: str, cwd: str) -> str:
+        name = name.lower()
+        sanitized_name = re.sub(r'[ $`!*@"\\\r\n\t]', "_", name)[:42]
+        normalized_cwd = os.path.normcase(os.path.realpath(cwd))
+        h_bytes = hashlib.sha256(encode(normalized_cwd)).digest()
+        h_str = base64.urlsafe_b64encode(h_bytes).decode()[:8]
+
+        return f"{sanitized_name}-{h_str}"
+
+
+class Env:
+    """
+    An abstract Python environment.
+    """
+
+    def __init__(self, path: Path, base: Path | None = None) -> None:
+        self._is_windows = sys.platform == "win32"
+        self._is_mingw = sysconfig.get_platform().startswith("mingw")
+        self._is_conda = bool(os.environ.get("CONDA_DEFAULT_ENV"))
+
+        if self._is_windows:
+            path = get_real_windows_path(path)
+            base = get_real_windows_path(base) if base else None
+
+        if not self._is_windows or self._is_mingw:
+            bin_dir = "bin"
+        else:
+            bin_dir = "Scripts"
+        self._path = path
+        self._bin_dir = self._path / bin_dir
+
+        self._executable = "python"
+        self._pip_executable = "pip"
+
+        self.find_executables()
+
+        self._base = base or path
+
+        self._marker_env: dict[str, Any] | None = None
+        self._pip_version: Version | None = None
+        self._site_packages: SitePackages | None = None
+        self._paths: dict[str, str] | None = None
+        self._supported_tags: list[Tag] | None = None
+        self._purelib: Path | None = None
+        self._platlib: Path | None = None
+        self._script_dirs: list[Path] | None = None
+
+        self._embedded_pip_path: str | None = None
+
+    @property
+    def path(self) -> Path:
+        return self._path
+
+    @property
+    def base(self) -> Path:
+        return self._base
+
+    @property
+    def version_info(self) -> tuple[Any, ...]:
+        return tuple(self.marker_env["version_info"])
+
+    @property
+    def python_implementation(self) -> str:
+        implementation: str = self.marker_env["platform_python_implementation"]
+        return implementation
+
+    @property
+    def python(self) -> str:
+        """
+        Path to current python executable
+        """
+        return self._bin(self._executable)
+
+    @property
+    def marker_env(self) -> dict[str, Any]:
+        if self._marker_env is None:
+            self._marker_env = self.get_marker_env()
+
+        return self._marker_env
+
+    @property
+    def parent_env(self) -> GenericEnv:
+        return GenericEnv(self.base, child_env=self)
+
+    def _find_python_executable(self) -> None:
+        bin_dir = self._bin_dir
+
+        if self._is_windows and self._is_conda:
+            bin_dir = self._path
+
+        python_executables = sorted(
+            p.name
+            for p in bin_dir.glob("python*")
+            if re.match(r"python(?:\d+(?:\.\d+)?)?(?:\.exe)?$", p.name)
+        )
+        if python_executables:
+            executable = python_executables[0]
+            if executable.endswith(".exe"):
+                executable = executable[:-4]
+
+            self._executable = executable
+
+    def _find_pip_executable(self) -> None:
+        pip_executables = sorted(
+            p.name
+            for p in self._bin_dir.glob("pip*")
+            if re.match(r"pip(?:\d+(?:\.\d+)?)?(?:\.exe)?$", p.name)
+        )
+        if pip_executables:
+            pip_executable = pip_executables[0]
+            if pip_executable.endswith(".exe"):
+                pip_executable = pip_executable[:-4]
+
+            self._pip_executable = pip_executable
+
+    def find_executables(self) -> None:
+        self._find_python_executable()
+        self._find_pip_executable()
+
+    def get_embedded_wheel(self, distribution: str) -> Path:
+        wheel: Wheel = get_embed_wheel(
+            distribution, f"{self.version_info[0]}.{self.version_info[1]}"
+        )
+        path: Path = wheel.path
+        return path
+
+    @property
+    def pip_embedded(self) -> str:
+        if self._embedded_pip_path is None:
+            self._embedded_pip_path = str(self.get_embedded_wheel("pip") / "pip")
+        return self._embedded_pip_path
+
+    @property
+    def pip(self) -> str:
+        """
+        Path to current pip executable
+        """
+        # we do not use as_posix() here due to issues with windows pathlib2
+        # implementation
+        path = self._bin(self._pip_executable)
+        if not Path(path).exists():
+            return str(self.pip_embedded)
+        return path
+
+    @property
+    def platform(self) -> str:
+        return sys.platform
+
+    @property
+    def os(self) -> str:
+        return os.name
+
+    @property
+    def pip_version(self) -> Version:
+        if self._pip_version is None:
+            self._pip_version = self.get_pip_version()
+
+        return self._pip_version
+
+    @property
+    def site_packages(self) -> SitePackages:
+        if self._site_packages is None:
+            # we disable write checks if no user site exist
+            fallbacks = [self.usersite] if self.usersite else []
+            self._site_packages = SitePackages(
+                self.purelib,
+                self.platlib,
+                fallbacks,
+                skip_write_checks=not fallbacks,
+            )
+        return self._site_packages
+
+    @property
+    def usersite(self) -> Path | None:
+        if "usersite" in self.paths:
+            return Path(self.paths["usersite"])
+        return None
+
+    @property
+    def userbase(self) -> Path | None:
+        if "userbase" in self.paths:
+            return Path(self.paths["userbase"])
+        return None
+
+    @property
+    def purelib(self) -> Path:
+        if self._purelib is None:
+            self._purelib = Path(self.paths["purelib"])
+
+        return self._purelib
+
+    @property
+    def platlib(self) -> Path:
+        if self._platlib is None:
+            if "platlib" in self.paths:
+                self._platlib = Path(self.paths["platlib"])
+            else:
+                self._platlib = self.purelib
+
+        return self._platlib
+
+    def is_path_relative_to_lib(self, path: Path) -> bool:
+        for lib_path in [self.purelib, self.platlib]:
+            with contextlib.suppress(ValueError):
+                path.relative_to(lib_path)
+                return True
+
+        return False
+
+    @property
+    def sys_path(self) -> list[str]:
+        raise NotImplementedError()
+
+    @property
+    def paths(self) -> dict[str, str]:
+        if self._paths is None:
+            self._paths = self.get_paths()
+
+        return self._paths
+
+    @property
+    def supported_tags(self) -> list[Tag]:
+        if self._supported_tags is None:
+            self._supported_tags = self.get_supported_tags()
+
+        return self._supported_tags
+
+    @classmethod
+    def get_base_prefix(cls) -> Path:
+        real_prefix = getattr(sys, "real_prefix", None)
+        if real_prefix is not None:
+            return Path(real_prefix)
+
+        base_prefix = getattr(sys, "base_prefix", None)
+        if base_prefix is not None:
+            return Path(base_prefix)
+
+        return Path(sys.prefix)
+
+    def get_version_info(self) -> tuple[Any, ...]:
+        raise NotImplementedError()
+
+    def get_python_implementation(self) -> str:
+        raise NotImplementedError()
+
+    def get_marker_env(self) -> dict[str, Any]:
+        raise NotImplementedError()
+
+    def get_pip_command(self, embedded: bool = False) -> list[str]:
+        raise NotImplementedError()
+
+    def get_supported_tags(self) -> list[Tag]:
+        raise NotImplementedError()
+
+    def get_pip_version(self) -> Version:
+        raise NotImplementedError()
+
+    def get_paths(self) -> dict[str, str]:
+        raise NotImplementedError()
+
+    def is_valid_for_marker(self, marker: BaseMarker) -> bool:
+        valid: bool = marker.validate(self.marker_env)
+        return valid
+
+    def is_sane(self) -> bool:
+        """
+        Checks whether the current environment is sane or not.
+        """
+        return True
+
+    def get_command_from_bin(self, bin: str) -> list[str]:
+        if bin == "pip":
+            # when pip is required we need to ensure that we fallback to
+            # embedded pip when pip is not available in the environment
+            return self.get_pip_command()
+
+        return [self._bin(bin)]
+
+    def run(self, bin: str, *args: str, **kwargs: Any) -> str | int:
+        cmd = self.get_command_from_bin(bin) + list(args)
+        return self._run(cmd, **kwargs)
+
+    def run_pip(self, *args: str, **kwargs: Any) -> int | str:
+        pip = self.get_pip_command(embedded=True)
+        cmd = pip + list(args)
+        return self._run(cmd, **kwargs)
+
+    def run_python_script(self, content: str, **kwargs: Any) -> int | str:
+        return self.run(self._executable, "-W", "ignore", "-", input_=content, **kwargs)
+
+    def _run(self, cmd: list[str], **kwargs: Any) -> int | str:
+        """
+        Run a command inside the Python environment.
+        """
+        call = kwargs.pop("call", False)
+        input_ = kwargs.pop("input_", None)
+        env = kwargs.pop("env", dict(os.environ))
+
+        try:
+            if self._is_windows:
+                kwargs["shell"] = True
+
+            command: str | list[str]
+            if kwargs.get("shell", False):
+                command = list_to_shell_command(cmd)
+            else:
+                command = cmd
+
+            if input_:
+                output = subprocess.run(
+                    command,
+                    stdout=subprocess.PIPE,
+                    stderr=subprocess.STDOUT,
+                    input=encode(input_),
+                    check=True,
+                    **kwargs,
+                ).stdout
+            elif call:
+                return subprocess.call(
+                    command, stderr=subprocess.STDOUT, env=env, **kwargs
+                )
+            else:
+                output = subprocess.check_output(
+                    command, stderr=subprocess.STDOUT, env=env, **kwargs
+                )
+        except CalledProcessError as e:
+            raise EnvCommandError(e, input=input_)
+
+        return decode(output)
+
+    def execute(self, bin: str, *args: str, **kwargs: Any) -> int:
+        command = self.get_command_from_bin(bin) + list(args)
+        env = kwargs.pop("env", dict(os.environ))
+
+        if not self._is_windows:
+            return os.execvpe(command[0], command, env=env)
+
+        kwargs["shell"] = True
+        exe = subprocess.Popen([command[0]] + command[1:], env=env, **kwargs)
+        exe.communicate()
+        return exe.returncode
+
+    def is_venv(self) -> bool:
+        raise NotImplementedError()
+
+    @property
+    def script_dirs(self) -> list[Path]:
+        if self._script_dirs is None:
+            scripts = self.paths.get("scripts")
+            self._script_dirs = [
+                Path(scripts) if scripts is not None else self._bin_dir
+            ]
+            if self.userbase:
+                self._script_dirs.append(self.userbase / self._script_dirs[0].name)
+        return self._script_dirs
+
+    def _bin(self, bin: str) -> str:
+        """
+        Return path to the given executable.
+        """
+        if self._is_windows and not bin.endswith(".exe"):
+            bin_path = self._bin_dir / (bin + ".exe")
+        else:
+            bin_path = self._bin_dir / bin
+
+        if not bin_path.exists():
+            # On Windows, some executables can be in the base path
+            # This is especially true when installing Python with
+            # the official installer, where python.exe will be at
+            # the root of the env path.
+            if self._is_windows:
+                if not bin.endswith(".exe"):
+                    bin_path = self._path / (bin + ".exe")
+                else:
+                    bin_path = self._path / bin
+
+                if bin_path.exists():
+                    return str(bin_path)
+
+            return bin
+
+        return str(bin_path)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Env):
+            return False
+
+        return other.__class__ == self.__class__ and other.path == self.path
+
+    def __repr__(self) -> str:
+        return f'{self.__class__.__name__}("{self._path}")'
+
+
+class SystemEnv(Env):
+    """
+    A system (i.e. not a virtualenv) Python environment.
+    """
+
+    def __init__(self, path: Path, base: Path | None = None, auto_path: bool = True) -> None:
+        self._is_windows = sys.platform == "win32"
+        if auto_path and path:
+            path = Path(
+                self._run(
+                    [str(path), "-W", "ignore", "-"],
+                    input_=GET_BASE_PREFIX
+                ).strip()
+            )
+        super().__init__(path, base=base)
+
+    @property
+    def sys_path(self) -> list[str]:
+        output = self.run_python_script(GET_SYS_PATH)
+        return json.loads(output)
+
+    def get_version_info(self) -> tuple[Any, ...]:
+        output = self.run_python_script(GET_PYTHON_VERSION)
+        return tuple([int(s) for s in output.strip().split(".")])
+
+    def get_python_implementation(self) -> str:
+        return self.marker_env["platform_python_implementation"]
+
+    def get_marker_env(self) -> dict[str, Any]:
+        output = self.run_python_script(GET_ENVIRONMENT_INFO)
+        return json.loads(output)
+
+    def get_paths(self) -> dict[str, str]:
+        output = self.run_python_script(GET_PATHS)
+        return json.loads(output)
+
+    def get_supported_tags(self) -> list[Tag]:
+        output = self.run_python_script(GET_SYS_TAGS)
+        return [Tag(*t) for t in json.loads(output)]
+
+    def get_pip_command(self, embedded: bool = False) -> list[str]:
+        return [
+            self._bin(self._executable),
+            self.pip_embedded if embedded else self.pip,
+        ]
+
+    def get_pip_version(self):
+        output = self.run_pip("--version").strip()
+        m = re.match("pip (.+?)(?: from .+)?$", output)
+        if not m:
+            return Version.parse("0.0")
+        return Version.parse(m.group(1))
+
+    def is_venv(self) -> bool:
+        return self._path != self._base
+
+
+class VirtualEnv(Env):
+    """
+    A virtual Python environment.
+    """
+
+    def __init__(self, path: Path, base: Path | None = None) -> None:
+        super().__init__(path, base)
+
+        # If base is None, it probably means this is
+        # a virtualenv created from VIRTUAL_ENV.
+        # In this case we need to get sys.base_prefix
+        # from inside the virtualenv.
+        if base is None:
+            output = self.run_python_script(GET_BASE_PREFIX)
+            assert isinstance(output, str)
+            self._base = Path(output.strip())
+
+    @property
+    def sys_path(self) -> list[str]:
+        output = self.run_python_script(GET_SYS_PATH)
+        assert isinstance(output, str)
+        paths: list[str] = json.loads(output)
+        return paths
+
+    def get_version_info(self) -> tuple[Any, ...]:
+        output = self.run_python_script(GET_PYTHON_VERSION)
+        assert isinstance(output, str)
+
+        return tuple(int(s) for s in output.strip().split("."))
+
+    def get_python_implementation(self) -> str:
+        implementation: str = self.marker_env["platform_python_implementation"]
+        return implementation
+
+    def get_pip_command(self, embedded: bool = False) -> list[str]:
+        # We're in a virtualenv that is known to be sane,
+        # so assume that we have a functional pip
+        return [
+            self._bin(self._executable),
+            self.pip_embedded if embedded else self.pip,
+        ]
+
+    def get_supported_tags(self) -> list[Tag]:
+        output = self.run_python_script(GET_SYS_TAGS)
+        assert isinstance(output, str)
+
+        return [Tag(*t) for t in json.loads(output)]
+
+    def get_marker_env(self) -> dict[str, Any]:
+        output = self.run_python_script(GET_ENVIRONMENT_INFO)
+        assert isinstance(output, str)
+
+        env: dict[str, Any] = json.loads(output)
+        return env
+
+    def get_pip_version(self) -> Version:
+        output = self.run_pip("--version")
+        assert isinstance(output, str)
+        output = output.strip()
+
+        m = re.match("pip (.+?)(?: from .+)?$", output)
+        if not m:
+            return Version.parse("0.0")
+
+        return Version.parse(m.group(1))
+
+    def get_paths(self) -> dict[str, str]:
+        output = self.run_python_script(GET_PATHS)
+        assert isinstance(output, str)
+        paths: dict[str, str] = json.loads(output)
+        return paths
+
+    def is_venv(self) -> bool:
+        return True
+
+    def is_sane(self) -> bool:
+        # A virtualenv is considered sane if "python" exists.
+        return os.path.exists(self.python)
+
+    def _run(self, cmd: list[str], **kwargs: Any) -> int | str:
+        kwargs["env"] = self.get_temp_environ(environ=kwargs.get("env"))
+        return super()._run(cmd, **kwargs)
+
+    def get_temp_environ(
+        self,
+        environ: dict[str, str] | None = None,
+        exclude: list[str] | None = None,
+        **kwargs: str,
+    ) -> dict[str, str]:
+        exclude = exclude or []
+        exclude.extend(["PYTHONHOME", "__PYVENV_LAUNCHER__"])
+
+        if environ:
+            environ = deepcopy(environ)
+            for key in exclude:
+                environ.pop(key, None)
+        else:
+            environ = {k: v for k, v in os.environ.items() if k not in exclude}
+
+        environ.update(kwargs)
+
+        environ["PATH"] = self._updated_path()
+        environ["VIRTUAL_ENV"] = str(self._path)
+
+        return environ
+
+    def execute(self, bin: str, *args: str, **kwargs: Any) -> int:
+        kwargs["env"] = self.get_temp_environ(environ=kwargs.get("env"))
+        return super().execute(bin, *args, **kwargs)
+
+    @contextmanager
+    def temp_environ(self) -> Iterator[None]:
+        environ = dict(os.environ)
+        try:
+            yield
+        finally:
+            os.environ.clear()
+            os.environ.update(environ)
+
+    def _updated_path(self) -> str:
+        return os.pathsep.join([str(self._bin_dir), os.environ.get("PATH", "")])
+
+
+class GenericEnv(VirtualEnv):
+    def __init__(
+        self, path: Path, base: Path | None = None, child_env: Env | None = None
+    ) -> None:
+        self._child_env = child_env
+
+        super().__init__(path, base=base)
+
+    def find_executables(self) -> None:
+        patterns = [("python*", "pip*")]
+
+        if self._child_env:
+            minor_version = (
+                f"{self._child_env.version_info[0]}.{self._child_env.version_info[1]}"
+            )
+            major_version = f"{self._child_env.version_info[0]}"
+            patterns = [
+                (f"python{minor_version}", f"pip{minor_version}"),
+                (f"python{major_version}", f"pip{major_version}"),
+            ]
+
+        python_executable = None
+        pip_executable = None
+
+        for python_pattern, pip_pattern in patterns:
+            if python_executable and pip_executable:
+                break
+
+            if not python_executable:
+                python_executables = sorted(
+                    p.name
+                    for p in self._bin_dir.glob(python_pattern)
+                    if re.match(r"python(?:\d+(?:\.\d+)?)?(?:\.exe)?$", p.name)
+                )
+
+                if python_executables:
+                    executable = python_executables[0]
+                    if executable.endswith(".exe"):
+                        executable = executable[:-4]
+
+                    python_executable = executable
+
+            if not pip_executable:
+                pip_executables = sorted(
+                    p.name
+                    for p in self._bin_dir.glob(pip_pattern)
+                    if re.match(r"pip(?:\d+(?:\.\d+)?)?(?:\.exe)?$", p.name)
+                )
+                if pip_executables:
+                    pip_executable = pip_executables[0]
+                    if pip_executable.endswith(".exe"):
+                        pip_executable = pip_executable[:-4]
+
+            if python_executable:
+                self._executable = python_executable
+
+            if pip_executable:
+                self._pip_executable = pip_executable
+
+    def get_paths(self) -> dict[str, str]:
+        output = self.run_python_script(GET_PATHS_FOR_GENERIC_ENVS)
+        assert isinstance(output, str)
+
+        paths: dict[str, str] = json.loads(output)
+        return paths
+
+    def execute(self, bin: str, *args: str, **kwargs: Any) -> int:
+        command = self.get_command_from_bin(bin) + list(args)
+        env = kwargs.pop("env", dict(os.environ))
+
+        if not self._is_windows:
+            return os.execvpe(command[0], command, env=env)
+
+        exe = subprocess.Popen([command[0]] + command[1:], env=env, **kwargs)
+        exe.communicate()
+
+        return exe.returncode
+
+    def _run(self, cmd: list[str], **kwargs: Any) -> int | str:
+        return super(VirtualEnv, self)._run(cmd, **kwargs)
+
+    def is_venv(self) -> bool:
+        return self._path != self._base
+
+
+class NullEnv(SystemEnv):
+    def __init__(
+        self, path: Path | None = None, base: Path | None = None, execute: bool = False
+    ) -> None:
+        if path is None:
+            path = Path(sys.prefix)
+
+        super().__init__(path, base=base, auto_path=False)
+
+        self._execute = execute
+        self.executed: list[list[str]] = []
+
+    @property
+    def python(self) -> str:
+        return sys.executable
+
+    @property
+    def sys_path(self) -> list[str]:
+        return sys.path
+
+    def get_version_info(self) -> tuple[Any, ...]:
+        return tuple(sys.version_info)
+
+    def get_python_implementation(self) -> str:
+        return platform.python_implementation()
+
+    def get_pip_command(self, embedded: bool = False) -> list[str]:
+        return [sys.executable, self.pip_embedded if embedded else self.pip]
+
+    def get_paths(self) -> dict[str, str]:
+        # We can't use sysconfig.get_paths() because
+        # on some distributions it does not return the proper paths
+        # (those used by pip for instance). We go through distutils
+        # to get the proper ones.
+        import site
+
+        from distutils.command.install import SCHEME_KEYS
+        from distutils.core import Distribution
+
+        d = Distribution()
+        d.parse_config_files()
+        with warnings.catch_warnings():
+            warnings.filterwarnings("ignore", "setup.py install is deprecated")
+            obj = d.get_command_obj("install", create=True)
+        assert obj is not None
+        obj.finalize_options()
+
+        paths = sysconfig.get_paths().copy()
+        for key in SCHEME_KEYS:
+            if key == "headers":
+                # headers is not a path returned by sysconfig.get_paths()
+                continue
+
+            paths[key] = getattr(obj, f"install_{key}")
+
+        if site.check_enableusersite():
+            usersite = getattr(obj, "install_usersite", None)
+            userbase = getattr(obj, "install_userbase", None)
+            if usersite is not None and userbase is not None:
+                paths["usersite"] = usersite
+                paths["userbase"] = userbase
+
+        return paths
+
+    def get_supported_tags(self) -> list[Tag]:
+        return list(sys_tags())
+
+    def get_marker_env(self) -> dict[str, Any]:
+        if hasattr(sys, "implementation"):
+            info = sys.implementation.version
+            iver = f"{info.major}.{info.minor}.{info.micro}"
+            kind = info.releaselevel
+            if kind != "final":
+                iver += kind[0] + str(info.serial)
+
+            implementation_name = sys.implementation.name
+        else:
+            iver = "0"
+            implementation_name = ""
+
+        return {
+            "implementation_name": implementation_name,
+            "implementation_version": iver,
+            "os_name": os.name,
+            "platform_machine": platform.machine(),
+            "platform_release": platform.release(),
+            "platform_system": platform.system(),
+            "platform_version": platform.version(),
+            "python_full_version": platform.python_version(),
+            "platform_python_implementation": platform.python_implementation(),
+            "python_version": ".".join(platform.python_version().split(".")[:2]),
+            "sys_platform": sys.platform,
+            "version_info": sys.version_info,
+            "interpreter_name": interpreter_name(),
+            "interpreter_version": interpreter_version(),
+        }
+
+    def get_pip_version(self) -> Version:
+        from pip import __version__
+
+        return Version.parse(__version__)
+
+    def _run(self, cmd: list[str], **kwargs: Any) -> int | str:
+        self.executed.append(cmd)
+
+        if self._execute:
+            return super()._run(cmd, **kwargs)
+        return 0
+
+    def execute(self, bin: str, *args: str, **kwargs: Any) -> int:
+        self.executed.append([bin] + list(args))
+
+        if self._execute:
+            return super().execute(bin, *args, **kwargs)
+        return 0
+
+    def _bin(self, bin: str) -> str:
+        return bin
+
+
+@contextmanager
+def ephemeral_environment(
+    executable: str | Path | None = None,
+    flags: dict[str, bool] | None = None,
+) -> Iterator[VirtualEnv]:
+    with temporary_directory() as tmp_dir:
+        # TODO: cache PEP 517 build environment corresponding to each project venv
+        venv_dir = Path(tmp_dir) / ".venv"
+        EnvManager.build_venv(
+            path=venv_dir.as_posix(),
+            executable=executable,
+            flags=flags,
+        )
+        yield VirtualEnv(venv_dir, venv_dir)
+
+
+@contextmanager
+def build_environment(
+    poetry: CorePoetry, env: Env | None = None, io: IO | None = None
+) -> Iterator[Env]:
+    """
+    If a build script is specified for the project, there could be additional build
+    time dependencies, eg: cython, setuptools etc. In these cases, we create an
+    ephemeral build environment with all requirements specified under
+    `build-system.requires` and return this. Otherwise, the given default project
+    environment is returned.
+    """
+    if not env or poetry.package.build_script:
+        with ephemeral_environment(executable=env.python if env else None) as venv:
+            overwrite = (
+                io is not None and io.output.is_decorated() and not io.is_debug()
+            )
+
+            if io:
+                if not overwrite:
+                    io.write_line("")
+
+                requires = [
+                    f"{requirement}"
+                    for requirement in poetry.pyproject.build_system.requires
+                ]
+
+                io.overwrite(
+                    "Preparing build environment with build-system requirements"
+                    f" {', '.join(requires)}"
+                )
+
+            venv.run_pip(
+                "install",
+                "--disable-pip-version-check",
+                "--ignore-installed",
+                *poetry.pyproject.build_system.requires,
+            )
+
+            if overwrite:
+                assert io is not None
+                io.write_line("")
+
+            yield venv
+    else:
+        yield env
+
+
+class MockEnv(NullEnv):
+    def __init__(
+        self,
+        version_info: tuple[int, int, int] = (3, 7, 0),
+        python_implementation: str = "CPython",
+        platform: str = "darwin",
+        os_name: str = "posix",
+        is_venv: bool = False,
+        pip_version: str = "19.1",
+        sys_path: list[str] | None = None,
+        marker_env: dict[str, Any] | None = None,
+        supported_tags: list[Tag] | None = None,
+        **kwargs: Any,
+    ) -> None:
+        super().__init__(**kwargs)
+
+        self._version_info = version_info
+        self._python_implementation = python_implementation
+        self._platform = platform
+        self._os_name = os_name
+        self._is_venv = is_venv
+        self._pip_version: Version = Version.parse(pip_version)
+        self._sys_path = sys_path
+        self._mock_marker_env = marker_env
+        self._supported_tags = supported_tags
+
+    @property
+    def platform(self) -> str:
+        return self._platform
+
+    @property
+    def os(self) -> str:
+        return self._os_name
+
+    @property
+    def pip_version(self) -> Version:
+        return self._pip_version
+
+    @property
+    def sys_path(self) -> list[str]:
+        if self._sys_path is None:
+            return super().sys_path
+
+        return self._sys_path
+
+    def get_marker_env(self) -> dict[str, Any]:
+        if self._mock_marker_env is not None:
+            return self._mock_marker_env
+
+        marker_env = super().get_marker_env()
+        marker_env["python_implementation"] = self._python_implementation
+        marker_env["version_info"] = self._version_info
+        marker_env["python_version"] = ".".join(str(v) for v in self._version_info[:2])
+        marker_env["python_full_version"] = ".".join(str(v) for v in self._version_info)
+        marker_env["sys_platform"] = self._platform
+        marker_env["interpreter_name"] = self._python_implementation.lower()
+        marker_env["interpreter_version"] = "cp" + "".join(
+            str(v) for v in self._version_info[:2]
+        )
+
+        return marker_env
+
+    def is_venv(self) -> bool:
+        return self._is_venv
+
+
+class InterpreterLookup:
+    @staticmethod
+    def _version_check(executable, supported_python=None):
+        try:
+            python_patch = decode(
+                subprocess.check_output(
+                    list_to_shell_command(
+                        [executable, "-c", GET_PYTHON_VERSION_ONELINER]
+                    ),
+                    shell=True
+                )
+            )
+        except CalledProcessError:
+            return False, None, None
+
+        if not python_patch:
+            return False, None, None
+
+        if (
+            not supported_python or
+            (supported_python and supported_python.allows(Version.parse(python_patch)))
+        ):
+            python_minor = ".".join(python_patch.split(".")[:2])
+            return True, python_minor, python_patch
+
+        return False, None, None
+
+    @classmethod
+    def find(cls, constraint=None):
+        executable, minor, patch = None, None, None
+
+        for guess in ["python", "python3", "python2"]:
+            match, minor, patch = cls._version_check(guess, constraint)
+            if match:
+                return guess, minor, patch
+
+        for python_to_try in sorted(
+            Package.AVAILABLE_PYTHONS,
+            key=lambda v: (
+                v.startswith("3"),
+                len(v) == 1,
+                int(v.split(".")[0]) * 100 + int((v.split(".") + ["0"])[1])
+            ),
+            reverse=True
+        ):
+            guess = f"python{python_to_try}"
+            match, minor, patch = cls._version_check(guess, constraint)
+            if match:
+                executable = guess
+                break
+
+        return executable, minor, patch
diff --git a/vendor/poetry/src/poetry/utils/extras.py b/vendor/poetry/src/poetry/utils/extras.py
new file mode 100644
index 00000000..e0681d46
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/extras.py
@@ -0,0 +1,69 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+    from collections.abc import Iterator
+    from collections.abc import Sequence
+    from typing import Mapping
+
+    from packaging.utils import NormalizedName
+    from poetry.core.packages.package import Package
+
+
+def get_extra_package_names(
+    packages: Sequence[Package],
+    extras: Mapping[str, list[str]],
+    extra_names: Sequence[str],
+) -> Iterable[str]:
+    """
+    Returns all package names required by the given extras.
+
+    :param packages: A collection of packages, such as from Repository.packages
+    :param extras: A mapping of `extras` names to lists of package names, as defined
+        in the `extras` section of `poetry.lock`.
+    :param extra_names: A list of strings specifying names of extra groups to resolve.
+    """
+    from packaging.utils import canonicalize_name
+
+    if not extra_names:
+        return []
+
+    # lookup for packages by name, faster than looping over packages repeatedly
+    packages_by_name = {package.name: package for package in packages}
+
+    # get and flatten names of packages we've opted into as extras
+    extra_package_names = [
+        canonicalize_name(extra_package_name)
+        for extra_name in extra_names
+        for extra_package_name in extras.get(extra_name, ())
+    ]
+
+    # keep record of packages seen during recursion in order to avoid recursion error
+    seen_package_names = set()
+
+    def _extra_packages(
+        package_names: Iterable[NormalizedName],
+    ) -> Iterator[NormalizedName]:
+        """Recursively find dependencies for packages names"""
+        # for each extra package name
+        for package_name in package_names:
+            # Find the actual Package object. A missing key indicates an implicit
+            # dependency (like setuptools), which should be ignored
+            package = packages_by_name.get(package_name)
+            if package:
+                if package.name not in seen_package_names:
+                    seen_package_names.add(package.name)
+                    yield package.name
+                # Recurse for dependencies
+                for dependency_package_name in _extra_packages(
+                    dependency.name
+                    for dependency in package.requires
+                    if dependency.name not in seen_package_names
+                ):
+                    seen_package_names.add(dependency_package_name)
+                    yield dependency_package_name
+
+    return _extra_packages(extra_package_names)
diff --git a/vendor/poetry/src/poetry/utils/helpers.py b/vendor/poetry/src/poetry/utils/helpers.py
new file mode 100644
index 00000000..caee1b27
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/helpers.py
@@ -0,0 +1,248 @@
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import stat
+import sys
+import tempfile
+
+from contextlib import contextmanager
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import Iterator
+from typing import Mapping
+from typing import cast
+
+from poetry.utils.constants import REQUESTS_TIMEOUT
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+    from poetry.core.packages.package import Package
+    from requests import Session
+
+    from poetry.utils.authenticator import Authenticator
+
+
+@contextmanager
+def directory(path: Path) -> Iterator[Path]:
+    cwd = Path.cwd()
+    try:
+        os.chdir(path)
+        yield path
+    finally:
+        os.chdir(cwd)
+
+
+def _on_rm_error(func: Callable[[str], None], path: str, exc_info: Exception) -> None:
+    if not os.path.exists(path):
+        return
+
+    os.chmod(path, stat.S_IWRITE)
+    func(path)
+
+
+def remove_directory(
+    path: Path | str, *args: Any, force: bool = False, **kwargs: Any
+) -> None:
+    """
+    Helper function handle safe removal, and optionally forces stubborn file removal.
+    This is particularly useful when dist files are read-only or git writes read-only
+    files on Windows.
+
+    Internally, all arguments are passed to `shutil.rmtree`.
+    """
+    if Path(path).is_symlink():
+        return os.unlink(str(path))
+
+    kwargs["onerror"] = kwargs.pop("onerror", _on_rm_error if force else None)
+    shutil.rmtree(path, *args, **kwargs)
+
+
+def merge_dicts(d1: dict[str, Any], d2: dict[str, Any]) -> None:
+    for k in d2.keys():
+        if k in d1 and isinstance(d1[k], dict) and isinstance(d2[k], Mapping):
+            merge_dicts(d1[k], d2[k])
+        else:
+            d1[k] = d2[k]
+
+
+def download_file(
+    url: str,
+    dest: Path,
+    session: Authenticator | Session | None = None,
+    chunk_size: int = 1024,
+) -> None:
+    import requests
+
+    from poetry.puzzle.provider import Indicator
+
+    get = requests.get if not session else session.get
+
+    response = get(url, stream=True, timeout=REQUESTS_TIMEOUT)
+    response.raise_for_status()
+
+    set_indicator = False
+    with Indicator.context() as update_context:
+        update_context(f"Downloading {url}")
+
+        if "Content-Length" in response.headers:
+            try:
+                total_size = int(response.headers["Content-Length"])
+            except ValueError:
+                total_size = 0
+
+            fetched_size = 0
+            last_percent = 0
+
+            # if less than 1MB, we simply show that we're downloading
+            # but skip the updating
+            set_indicator = total_size > 1024 * 1024
+
+        with open(dest, "wb") as f:
+            for chunk in response.iter_content(chunk_size=chunk_size):
+                if chunk:
+                    f.write(chunk)
+
+                    if set_indicator:
+                        fetched_size += len(chunk)
+                        percent = (fetched_size * 100) // total_size
+                        if percent > last_percent:
+                            last_percent = percent
+                            update_context(f"Downloading {url} {percent:3}%")
+
+
+def get_package_version_display_string(
+    package: Package, root: Path | None = None
+) -> str:
+    if package.source_type in ["file", "directory"] and root:
+        assert package.source_url is not None
+        path = Path(os.path.relpath(package.source_url, root)).as_posix()
+        return f"{package.version} {path}"
+
+    pretty_version: str = package.full_pretty_version
+    return pretty_version
+
+
+def paths_csv(paths: list[Path]) -> str:
+    return ", ".join(f'"{c!s}"' for c in paths)
+
+
+def is_dir_writable(path: Path, create: bool = False) -> bool:
+    try:
+        if not path.exists():
+            if not create:
+                return False
+            path.mkdir(parents=True, exist_ok=True)
+
+        with tempfile.TemporaryFile(dir=str(path)):
+            pass
+    except OSError:
+        return False
+    else:
+        return True
+
+
+def pluralize(count: int, word: str = "") -> str:
+    if count == 1:
+        return word
+    return word + "s"
+
+
+def safe_extra(extra: str) -> str:
+    """Convert an arbitrary string to a standard 'extra' name.
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+
+    See
+    https://github.com/pypa/setuptools/blob/452e13c/pkg_resources/__init__.py#L1423-L1431.
+    """
+    return re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()
+
+
+def _get_win_folder_from_registry(csidl_name: str) -> str:
+    if sys.platform != "win32":
+        raise RuntimeError("Method can only be called on Windows.")
+
+    import winreg as _winreg
+
+    shell_folder_name = {
+        "CSIDL_APPDATA": "AppData",
+        "CSIDL_COMMON_APPDATA": "Common AppData",
+        "CSIDL_LOCAL_APPDATA": "Local AppData",
+        "CSIDL_PROGRAM_FILES": "Program Files",
+    }[csidl_name]
+
+    key = _winreg.OpenKey(
+        _winreg.HKEY_CURRENT_USER,
+        r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders",
+    )
+    dir, type = _winreg.QueryValueEx(key, shell_folder_name)
+
+    return cast(str, dir)
+
+
+def _get_win_folder_with_ctypes(csidl_name: str) -> str:
+    if sys.platform != "win32":
+        raise RuntimeError("Method can only be called on Windows.")
+
+    import ctypes
+
+    csidl_const = {
+        "CSIDL_APPDATA": 26,
+        "CSIDL_COMMON_APPDATA": 35,
+        "CSIDL_LOCAL_APPDATA": 28,
+        "CSIDL_PROGRAM_FILES": 38,
+    }[csidl_name]
+
+    buf = ctypes.create_unicode_buffer(1024)
+    ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+    # Downgrade to short path name if have highbit chars. See
+    # .
+    has_high_char = False
+    for c in buf:
+        if ord(c) > 255:
+            has_high_char = True
+            break
+    if has_high_char:
+        buf2 = ctypes.create_unicode_buffer(1024)
+        if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+            buf = buf2
+
+    return buf.value
+
+
+def get_win_folder(csidl_name: str) -> Path:
+    if sys.platform == "win32":
+        try:
+            from ctypes import windll  # noqa: F401
+
+            _get_win_folder = _get_win_folder_with_ctypes
+        except ImportError:
+            _get_win_folder = _get_win_folder_from_registry
+
+        return Path(_get_win_folder(csidl_name))
+
+    raise RuntimeError("Method can only be called on Windows.")
+
+
+def get_real_windows_path(path: str | Path) -> Path:
+    program_files = get_win_folder("CSIDL_PROGRAM_FILES")
+    local_appdata = get_win_folder("CSIDL_LOCAL_APPDATA")
+
+    path = Path(
+        str(path).replace(
+            str(program_files / "WindowsApps"),
+            str(local_appdata / "Microsoft/WindowsApps"),
+        )
+    )
+
+    if path.as_posix().startswith(local_appdata.as_posix()):
+        path = path.resolve()
+
+    return path
diff --git a/vendor/poetry/src/poetry/utils/packaging_tags.py.template b/vendor/poetry/src/poetry/utils/packaging_tags.py.template
new file mode 100644
index 00000000..4e98b8b3
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/packaging_tags.py.template
@@ -0,0 +1,905 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import json
+import logging
+import platform
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+    Dict,
+    FrozenSet,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+    cast,
+)
+
+import collections
+import contextlib
+import functools
+import operator
+import os
+import re
+import struct
+import subprocess
+import sys
+import warnings
+from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
+
+
+# Python does not provide platform information at sufficient granularity to
+# identify the architecture of the running executable in some cases, so we
+# determine it dynamically by reading the information from the running
+# process. This only applies on Linux, which uses the ELF format.
+class _ELFFileHeader:
+    # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
+    class _InvalidELFFileHeader(ValueError):
+        """
+        An invalid ELF file header was found.
+        """
+
+    ELF_MAGIC_NUMBER = 0x7F454C46
+    ELFCLASS32 = 1
+    ELFCLASS64 = 2
+    ELFDATA2LSB = 1
+    ELFDATA2MSB = 2
+    EM_386 = 3
+    EM_S390 = 22
+    EM_ARM = 40
+    EM_X86_64 = 62
+    EF_ARM_ABIMASK = 0xFF000000
+    EF_ARM_ABI_VER5 = 0x05000000
+    EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+    def __init__(self, file: IO[bytes]) -> None:
+        def unpack(fmt: str) -> int:
+            try:
+                data = file.read(struct.calcsize(fmt))
+                result: Tuple[int, ...] = struct.unpack(fmt, data)
+            except struct.error:
+                raise _ELFFileHeader._InvalidELFFileHeader()
+            return result[0]
+
+        self.e_ident_magic = unpack(">I")
+        if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_class = unpack("B")
+        if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_data = unpack("B")
+        if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
+            raise _ELFFileHeader._InvalidELFFileHeader()
+        self.e_ident_version = unpack("B")
+        self.e_ident_osabi = unpack("B")
+        self.e_ident_abiversion = unpack("B")
+        self.e_ident_pad = file.read(7)
+        format_h = "H"
+        format_i = "I"
+        format_q = "Q"
+        format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
+        self.e_type = unpack(format_h)
+        self.e_machine = unpack(format_h)
+        self.e_version = unpack(format_i)
+        self.e_entry = unpack(format_p)
+        self.e_phoff = unpack(format_p)
+        self.e_shoff = unpack(format_p)
+        self.e_flags = unpack(format_i)
+        self.e_ehsize = unpack(format_h)
+        self.e_phentsize = unpack(format_h)
+        self.e_phnum = unpack(format_h)
+        self.e_shentsize = unpack(format_h)
+        self.e_shnum = unpack(format_h)
+        self.e_shstrndx = unpack(format_h)
+
+
+def _get_elf_header() -> Optional[_ELFFileHeader]:
+    try:
+        with open(sys.executable, "rb") as f:
+            elf_header = _ELFFileHeader(f)
+    except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
+        return None
+    return elf_header
+
+
+def _is_linux_armhf() -> bool:
+    # hard-float ABI can be detected from the ELF header of the running
+    # process
+    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+    elf_header = _get_elf_header()
+    if elf_header is None:
+        return False
+    result = elf_header.e_ident_class == elf_header.ELFCLASS32
+    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+    result &= elf_header.e_machine == elf_header.EM_ARM
+    result &= (
+        elf_header.e_flags & elf_header.EF_ARM_ABIMASK
+    ) == elf_header.EF_ARM_ABI_VER5
+    result &= (
+        elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
+    ) == elf_header.EF_ARM_ABI_FLOAT_HARD
+    return result
+
+
+def _is_linux_i686() -> bool:
+    elf_header = _get_elf_header()
+    if elf_header is None:
+        return False
+    result = elf_header.e_ident_class == elf_header.ELFCLASS32
+    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+    result &= elf_header.e_machine == elf_header.EM_386
+    return result
+
+
+def _have_compatible_abi(arch: str) -> bool:
+    if arch == "armv7l":
+        return _is_linux_armhf()
+    if arch == "i686":
+        return _is_linux_i686()
+    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _glibc_version_string_confstr() -> Optional[str]:
+    """
+    Primary implementation of glibc_version_string using os.confstr.
+    """
+    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+    # to be broken or missing. This strategy is used in the standard library
+    # platform module.
+    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+    try:
+        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
+        version_string = os.confstr("CS_GNU_LIBC_VERSION")
+        assert version_string is not None
+        _, version = version_string.split()
+    except (AssertionError, AttributeError, OSError, ValueError):
+        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+        return None
+    return version
+
+
+def _glibc_version_string_ctypes() -> Optional[str]:
+    """
+    Fallback implementation of glibc_version_string using ctypes.
+    """
+    try:
+        import ctypes
+    except ImportError:
+        return None
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    #
+    # We must also handle the special case where the executable is not a
+    # dynamically linked executable. This can occur when using musl libc,
+    # for example. In this situation, dlopen() will error, leading to an
+    # OSError. Interestingly, at least in the case of musl, there is no
+    # errno set on the OSError. The single string argument used to construct
+    # OSError comes from libc itself and is therefore not portable to
+    # hard code here. In any case, failure to call dlopen() means we
+    # can proceed, so we bail on our attempt.
+    try:
+        process_namespace = ctypes.CDLL(None)
+    except OSError:
+        return None
+
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str: str = gnu_get_libc_version()
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+def _glibc_version_string() -> Optional[str]:
+    """Returns glibc version string, or None if not using glibc."""
+    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+    """Parse glibc version.
+
+    We use a regexp instead of str.split because we want to discard any
+    random junk that might come after the minor version -- this might happen
+    in patched/forked versions of glibc (e.g. Linaro's version of glibc
+    uses version strings like "2.20-2014.11"). See gh-3588.
+    """
+    m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str)
+    if not m:
+        warnings.warn(
+            "Expected glibc version with 2 components major.minor,"
+            " got: %s" % version_str,
+            RuntimeWarning,
+        )
+        return -1, -1
+    return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache()
+def _get_glibc_version() -> Tuple[int, int]:
+    version_str = _glibc_version_string()
+    if version_str is None:
+        return (-1, -1)
+    return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+    sys_glibc = _get_glibc_version()
+    if sys_glibc < version:
+        return False
+    # Check for presence of _manylinux module.
+    try:
+        import _manylinux  # noqa
+    except ImportError:
+        return True
+    if hasattr(_manylinux, "manylinux_compatible"):
+        result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+        if result is not None:
+            return bool(result)
+        return True
+    if version == _GLibCVersion(2, 5):
+        if hasattr(_manylinux, "manylinux1_compatible"):
+            return bool(_manylinux.manylinux1_compatible)
+    if version == _GLibCVersion(2, 12):
+        if hasattr(_manylinux, "manylinux2010_compatible"):
+            return bool(_manylinux.manylinux2010_compatible)
+    if version == _GLibCVersion(2, 17):
+        if hasattr(_manylinux, "manylinux2014_compatible"):
+            return bool(_manylinux.manylinux2014_compatible)
+    return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+    # CentOS 7 w/ glibc 2.17 (PEP 599)
+    (2, 17): "manylinux2014",
+    # CentOS 6 w/ glibc 2.12 (PEP 571)
+    (2, 12): "manylinux2010",
+    # CentOS 5 w/ glibc 2.5 (PEP 513)
+    (2, 5): "manylinux1",
+}
+
+
+def _manylinux_platform_tags(linux: str, arch: str) -> Iterator[str]:
+    if not _have_compatible_abi(arch):
+        return
+    # Oldest glibc to be supported regardless of architecture is (2, 17).
+    too_old_glibc2 = _GLibCVersion(2, 16)
+    if arch in {"x86_64", "i686"}:
+        # On x86/i686 also oldest glibc to be supported is (2, 5).
+        too_old_glibc2 = _GLibCVersion(2, 4)
+    current_glibc = _GLibCVersion(*_get_glibc_version())
+    glibc_max_list = [current_glibc]
+    # We can assume compatibility across glibc major versions.
+    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+    #
+    # Build a list of maximum glibc versions so that we can
+    # output the canonical list of all glibc from current_glibc
+    # down to too_old_glibc2, including all intermediary versions.
+    for glibc_major in range(current_glibc.major - 1, 1, -1):
+        glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+        glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+    for glibc_max in glibc_max_list:
+        if glibc_max.major == too_old_glibc2.major:
+            min_minor = too_old_glibc2.minor
+        else:
+            # For other glibc major versions oldest supported is (x, 0).
+            min_minor = -1
+        for glibc_minor in range(glibc_max.minor, min_minor, -1):
+            glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+            tag = "manylinux_{}_{}".format(*glibc_version)
+            if _is_compatible(tag, arch, glibc_version):
+                yield linux.replace("linux", tag)
+            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+            if glibc_version in _LEGACY_MANYLINUX_MAP:
+                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+                if _is_compatible(legacy_tag, arch, glibc_version):
+                    yield linux.replace("linux", legacy_tag)
+
+
+def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
+    return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
+
+
+def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
+    """Detect musl libc location by parsing the Python executable.
+
+    Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+    ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+    """
+    f.seek(0)
+    try:
+        ident = _read_unpacked(f, "16B")
+    except struct.error:
+        return None
+    if ident[:4] != tuple(b"\x7fELF"):  # Invalid magic, not ELF.
+        return None
+    f.seek(struct.calcsize("HHI"), 1)  # Skip file type, machine, and version.
+
+    try:
+        # e_fmt: Format for program header.
+        # p_fmt: Format for section header.
+        # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+        e_fmt, p_fmt, p_idx = {
+            1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)),  # 32-bit.
+            2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)),  # 64-bit.
+        }[ident[4]]
+    except KeyError:
+        return None
+    else:
+        p_get = operator.itemgetter(*p_idx)
+
+    # Find the interpreter section and return its content.
+    try:
+        _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
+    except struct.error:
+        return None
+    for i in range(e_phnum + 1):
+        f.seek(e_phoff + e_phentsize * i)
+        try:
+            p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
+        except struct.error:
+            return None
+        if p_type != 3:  # Not PT_INTERP.
+            continue
+        f.seek(p_offset)
+        interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
+        if "musl" not in interpreter:
+            return None
+        return interpreter
+    return None
+
+
+class _MuslVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+    lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+    if len(lines) < 2 or lines[0][:4] != "musl":
+        return None
+    m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+    if not m:
+        return None
+    return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache()
+def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+    """Detect currently-running musl runtime version.
+
+    This is done by checking the specified executable's dynamic linking
+    information, and invoking the loader to parse its output for a version
+    string. If the loader is musl, the output would be something like::
+
+        musl libc (x86_64)
+        Version 1.2.2
+        Dynamic Program Loader
+    """
+    with contextlib.ExitStack() as stack:
+        try:
+            f = stack.enter_context(open(executable, "rb"))
+        except OSError:
+            return None
+        ld = _parse_ld_musl_from_elf(f)
+    if not ld:
+        return None
+    proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+    return _parse_musl_version(proc.stderr)
+
+
+def _musllinux_platform_tags(arch: str) -> Iterator[str]:
+    """Generate musllinux tags compatible to the current platform.
+
+    :param arch: Should be the part of platform tag after the ``linux_``
+        prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
+        prerequisite for the current platform to be musllinux-compatible.
+
+    :returns: An iterator of compatible musllinux tags.
+    """
+    sys_musl = _get_musl_version(sys.executable)
+    if sys_musl is None:  # Python not dynamically linked against musl.
+        return
+    for minor in range(sys_musl.minor, -1, -1):
+        yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+MacVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+    "python": "py",  # Generic.
+    "cpython": "cp",
+    "pypy": "pp",
+    "ironpython": "ip",
+    "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
+
+
+class Tag:
+    """
+    A representation of the tag triple for a wheel.
+
+    Instances are considered immutable and thus are hashable. Equality checking
+    is also supported.
+    """
+
+    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+    def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+        self._interpreter = interpreter.lower()
+        self._abi = abi.lower()
+        self._platform = platform.lower()
+        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+        # that a set calls its `.disjoint()` method, which may be called hundreds of
+        # times when scanning a page of links for packages with tags matching that
+        # Set[Tag]. Pre-computing the value here produces significant speedups for
+        # downstream consumers.
+        self._hash = hash((self._interpreter, self._abi, self._platform))
+
+    @property
+    def interpreter(self) -> str:
+        return self._interpreter
+
+    @property
+    def abi(self) -> str:
+        return self._abi
+
+    @property
+    def platform(self) -> str:
+        return self._platform
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Tag):
+            return NotImplemented
+
+        return (
+            (self._hash == other._hash)  # Short-circuit ASAP for perf reasons.
+            and (self._platform == other._platform)
+            and (self._abi == other._abi)
+            and (self._interpreter == other._interpreter)
+        )
+
+    def __hash__(self) -> int:
+        return self._hash
+
+    def __str__(self) -> str:
+        return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+    def __repr__(self) -> str:
+        return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> FrozenSet[Tag]:
+    """
+    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+    Returning a set is required due to the possibility that the tag is a
+    compressed tag set.
+    """
+    tags = set()
+    interpreters, abis, platforms = tag.split("-")
+    for interpreter in interpreters.split("."):
+        for abi in abis.split("."):
+            for platform_ in platforms.split("."):
+                tags.add(Tag(interpreter, abi, platform_))
+    return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
+    value = sysconfig.get_config_var(name)
+    if value is None and warn:
+        logger.debug(
+            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+        )
+    return value
+
+
+def _normalize_string(string: str) -> str:
+    return string.replace(".", "_").replace("-", "_")
+
+
+def _abi3_applies(python_version: PythonVersion) -> bool:
+    """
+    Determine if the Python version supports abi3.
+
+    PEP 384 was first implemented in Python 3.2.
+    """
+    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+    py_version = tuple(py_version)  # To allow for version comparison.
+    abis = []
+    version = _version_nodot(py_version[:2])
+    debug = pymalloc = ucs4 = ""
+    with_debug = _get_config_var("Py_DEBUG", warn)
+    has_refcount = hasattr(sys, "gettotalrefcount")
+    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+    # extension modules is the best option.
+    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+    if with_debug or (with_debug is None and (has_refcount or has_ext)):
+        debug = "d"
+    if py_version < (3, 8):
+        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+        if with_pymalloc or with_pymalloc is None:
+            pymalloc = "m"
+        if py_version < (3, 3):
+            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+            if unicode_size == 4 or (
+                unicode_size is None and sys.maxunicode == 0x10FFFF
+            ):
+                ucs4 = "u"
+    elif debug:
+        # Debug builds can also load "normal" extension modules.
+        # We can also assume no UCS-4 or pymalloc requirement.
+        abis.append(f"cp{version}")
+    abis.insert(
+        0,
+        "cp{version}{debug}{pymalloc}{ucs4}".format(
+            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+        ),
+    )
+    return abis
+
+
+def cpython_tags(
+    python_version: Optional[PythonVersion] = None,
+    abis: Optional[Iterable[str]] = None,
+    platforms: Optional[Iterable[str]] = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a CPython interpreter.
+
+    The tags consist of:
+    - cp--
+    - cp-abi3-
+    - cp-none-
+    - cp-abi3-  # Older Python versions down to 3.2.
+
+    If python_version only specifies a major version then user-provided ABIs and
+    the 'none' ABItag will be used.
+
+    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+    their normal position and not at the beginning.
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+
+    interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+    if abis is None:
+        if len(python_version) > 1:
+            abis = _cpython_abis(python_version, warn)
+        else:
+            abis = []
+    abis = list(abis)
+    # 'abi3' and 'none' are explicitly handled later.
+    for explicit_abi in ("abi3", "none"):
+        try:
+            abis.remove(explicit_abi)
+        except ValueError:
+            pass
+
+    platforms = list(platforms or platform_tags())
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+    if _abi3_applies(python_version):
+        yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+    yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+    if _abi3_applies(python_version):
+        for minor_version in range(python_version[1] - 1, 1, -1):
+            for platform_ in platforms:
+                interpreter = "cp{version}".format(
+                    version=_version_nodot((python_version[0], minor_version))
+                )
+                yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> Iterator[str]:
+    abi = sysconfig.get_config_var("SOABI")
+    if abi:
+        yield _normalize_string(abi)
+
+
+def generic_tags(
+    interpreter: Optional[str] = None,
+    abis: Optional[Iterable[str]] = None,
+    platforms: Optional[Iterable[str]] = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a generic interpreter.
+
+    The tags consist of:
+    - --
+
+    The "none" ABI will be added if it was not explicitly provided.
+    """
+    if not interpreter:
+        interp_name = interpreter_name()
+        interp_version = interpreter_version(warn=warn)
+        interpreter = "".join([interp_name, interp_version])
+    if abis is None:
+        abis = _generic_abi()
+    platforms = list(platforms or platform_tags())
+    abis = list(abis)
+    if "none" not in abis:
+        abis.append("none")
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+    """
+    Yields Python versions in descending order.
+
+    After the latest version, the major-only version will be yielded, and then
+    all previous versions of that major version.
+    """
+    if len(py_version) > 1:
+        yield f"py{_version_nodot(py_version[:2])}"
+    yield f"py{py_version[0]}"
+    if len(py_version) > 1:
+        for minor in range(py_version[1] - 1, -1, -1):
+            yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+    python_version: Optional[PythonVersion] = None,
+    interpreter: Optional[str] = None,
+    platforms: Optional[Iterable[str]] = None,
+) -> Iterator[Tag]:
+    """
+    Yields the sequence of tags that are compatible with a specific version of Python.
+
+    The tags consist of:
+    - py*-none-
+    - -none-any  # ... if `interpreter` is provided.
+    - py*-none-any
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+    platforms = list(platforms or platform_tags())
+    for version in _py_interpreter_range(python_version):
+        for platform_ in platforms:
+            yield Tag(version, "none", platform_)
+    if interpreter:
+        yield Tag(interpreter, "none", "any")
+    for version in _py_interpreter_range(python_version):
+        yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+    if not is_32bit:
+        return arch
+
+    if arch.startswith("ppc"):
+        return "ppc"
+
+    return "i386"
+
+
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+    formats = [cpu_arch]
+    if cpu_arch == "x86_64":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat64", "fat32"])
+
+    elif cpu_arch == "i386":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat32", "fat"])
+
+    elif cpu_arch == "ppc64":
+        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+        if version > (10, 5) or version < (10, 4):
+            return []
+        formats.append("fat64")
+
+    elif cpu_arch == "ppc":
+        if version > (10, 6):
+            return []
+        formats.extend(["fat32", "fat"])
+
+    if cpu_arch in {"arm64", "x86_64"}:
+        formats.append("universal2")
+
+    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+        formats.append("universal")
+
+    return formats
+
+
+def mac_platforms(
+    version: Optional[MacVersion] = None, arch: Optional[str] = None
+) -> Iterator[str]:
+    """
+    Yields the platform tags for a macOS system.
+
+    The `version` parameter is a two-item tuple specifying the macOS version to
+    generate platform tags for. The `arch` parameter is the CPU architecture to
+    generate platform tags for. Both parameters default to the appropriate value
+    for the current system.
+    """
+    version_str, _, cpu_arch = platform.mac_ver()
+    if version is None:
+        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+    else:
+        version = version
+    if arch is None:
+        arch = _mac_arch(cpu_arch)
+    else:
+        arch = arch
+
+    if (10, 0) <= version and version < (11, 0):
+        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+        # "minor" version number.  The major version was always 10.
+        for minor_version in range(version[1], -1, -1):
+            compat_version = 10, minor_version
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=10, minor=minor_version, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Starting with Mac OS 11, each yearly release bumps the major version
+        # number.   The minor versions are now the midyear updates.
+        for major_version in range(version[0], 10, -1):
+            compat_version = major_version, 0
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=major_version, minor=0, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+        # releases exist.
+        #
+        # However, the "universal2" binary format can have a
+        # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+        # that version of macOS.
+        if arch == "x86_64":
+            for minor_version in range(16, 3, -1):
+                compat_version = 10, minor_version
+                binary_formats = _mac_binary_formats(compat_version, arch)
+                for binary_format in binary_formats:
+                    yield "macosx_{major}_{minor}_{binary_format}".format(
+                        major=compat_version[0],
+                        minor=compat_version[1],
+                        binary_format=binary_format,
+                    )
+        else:
+            for minor_version in range(16, 3, -1):
+                compat_version = 10, minor_version
+                binary_format = "universal2"
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=compat_version[0],
+                    minor=compat_version[1],
+                    binary_format=binary_format,
+                )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+    linux = _normalize_string(sysconfig.get_platform())
+    if is_32bit:
+        if linux == "linux_x86_64":
+            linux = "linux_i686"
+        elif linux == "linux_aarch64":
+            linux = "linux_armv7l"
+    _, arch = linux.split("_", 1)
+    yield from _manylinux_platform_tags(linux, arch)
+    yield from _musllinux_platform_tags(arch)
+    yield linux
+
+
+def _generic_platforms() -> Iterator[str]:
+    yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+    """
+    Provides the platform tags for this installation.
+    """
+    if platform.system() == "Darwin":
+        return mac_platforms()
+    elif platform.system() == "Linux":
+        return _linux_platforms()
+    else:
+        return _generic_platforms()
+
+
+def interpreter_name() -> str:
+    """
+    Returns the name of the running interpreter.
+    """
+    name = sys.implementation.name
+    return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+    """
+    Returns the version of the running interpreter.
+    """
+    version = _get_config_var("py_version_nodot", warn=warn)
+    if version:
+        version = str(version)
+    else:
+        version = _version_nodot(sys.version_info[:2])
+    return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+    return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+    """
+    Returns the sequence of tag triples for the running interpreter.
+
+    The order of the sequence corresponds to priority order for the
+    interpreter, from most to least important.
+    """
+
+    interp_name = interpreter_name()
+    if interp_name == "cp":
+        yield from cpython_tags(warn=warn)
+    else:
+        yield from generic_tags()
+
+    if interp_name == "pp":
+        yield from compatible_tags(interpreter="pp3")
+    else:
+        yield from compatible_tags()
+
+
+print(
+    json.dumps([(t.interpreter, t.abi, t.platform) for t in sys_tags()])
+)
diff --git a/vendor/poetry/src/poetry/utils/password_manager.py b/vendor/poetry/src/poetry/utils/password_manager.py
new file mode 100644
index 00000000..e687711b
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/password_manager.py
@@ -0,0 +1,235 @@
+from __future__ import annotations
+
+import dataclasses
+import logging
+
+from contextlib import suppress
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from poetry.config.config import Config
+
+logger = logging.getLogger(__name__)
+
+
+class PasswordManagerError(Exception):
+    pass
+
+
+class PoetryKeyringError(Exception):
+    pass
+
+
+@dataclasses.dataclass
+class HTTPAuthCredential:
+    username: str | None = dataclasses.field(default=None)
+    password: str | None = dataclasses.field(default=None)
+
+
+class PoetryKeyring:
+    def __init__(self, namespace: str) -> None:
+        self._namespace = namespace
+        self._is_available = True
+
+        self._check()
+
+    def is_available(self) -> bool:
+        return self._is_available
+
+    def get_credential(
+        self, *names: str, username: str | None = None
+    ) -> HTTPAuthCredential:
+        default = HTTPAuthCredential(username=username, password=None)
+
+        if not self.is_available():
+            return default
+
+        import keyring
+
+        for name in names:
+            credential = keyring.get_credential(name, username)
+            if credential:
+                return HTTPAuthCredential(
+                    username=credential.username, password=credential.password
+                )
+
+        return default
+
+    def get_password(self, name: str, username: str) -> str | None:
+        if not self.is_available():
+            return None
+
+        import keyring
+        import keyring.errors
+
+        name = self.get_entry_name(name)
+
+        try:
+            return keyring.get_password(name, username)
+        except (RuntimeError, keyring.errors.KeyringError):
+            raise PoetryKeyringError(
+                f"Unable to retrieve the password for {name} from the key ring"
+            )
+
+    def set_password(self, name: str, username: str, password: str) -> None:
+        if not self.is_available():
+            return
+
+        import keyring
+        import keyring.errors
+
+        name = self.get_entry_name(name)
+
+        try:
+            keyring.set_password(name, username, password)
+        except (RuntimeError, keyring.errors.KeyringError) as e:
+            raise PoetryKeyringError(
+                f"Unable to store the password for {name} in the key ring: {e}"
+            )
+
+    def delete_password(self, name: str, username: str) -> None:
+        if not self.is_available():
+            return
+
+        import keyring.errors
+
+        name = self.get_entry_name(name)
+
+        try:
+            keyring.delete_password(name, username)
+        except (RuntimeError, keyring.errors.KeyringError):
+            raise PoetryKeyringError(
+                f"Unable to delete the password for {name} from the key ring"
+            )
+
+    def get_entry_name(self, name: str) -> str:
+        return f"{self._namespace}-{name}"
+
+    def _check(self) -> None:
+        try:
+            import keyring
+        except Exception as e:
+            logger.debug(f"An error occurred while importing keyring: {e!s}")
+            self._is_available = False
+
+            return
+
+        backend = keyring.get_keyring()
+        name = backend.name.split(" ")[0]
+        if name in ("fail", "null"):
+            logger.debug("No suitable keyring backend found")
+            self._is_available = False
+        elif "plaintext" in backend.name.lower():
+            logger.debug("Only a plaintext keyring backend is available. Not using it.")
+            self._is_available = False
+        elif name == "chainer":
+            try:
+                import keyring.backend
+
+                backends = keyring.backend.get_all_keyring()
+
+                self._is_available = any(
+                    b.name.split(" ")[0] not in ["chainer", "fail", "null"]
+                    and "plaintext" not in b.name.lower()
+                    for b in backends
+                )
+            except Exception:
+                self._is_available = False
+
+        if not self._is_available:
+            logger.debug("No suitable keyring backends were found")
+
+
+class PasswordManager:
+    def __init__(self, config: Config) -> None:
+        self._config = config
+        self._keyring: PoetryKeyring | None = None
+
+    @property
+    def keyring(self) -> PoetryKeyring:
+        if self._keyring is None:
+            self._keyring = PoetryKeyring("poetry-repository")
+
+            if not self._keyring.is_available():
+                logger.debug(
+                    "Keyring is not available, credentials will be stored and "
+                    "retrieved from configuration files as plaintext."
+                )
+
+        return self._keyring
+
+    @staticmethod
+    def warn_plaintext_credentials_stored() -> None:
+        logger.warning("Using a plaintext file to store credentials")
+
+    def set_pypi_token(self, name: str, token: str) -> None:
+        if not self.keyring.is_available():
+            self.warn_plaintext_credentials_stored()
+            self._config.auth_config_source.add_property(f"pypi-token.{name}", token)
+        else:
+            self.keyring.set_password(name, "__token__", token)
+
+    def get_pypi_token(self, repo_name: str) -> str | None:
+        """Get PyPi token.
+
+        First checks the environment variables for a token,
+        then the configured username/password and the
+        available keyring.
+
+        :param repo_name:  Name of repository.
+        :return: Returns a token as a string if found, otherwise None.
+        """
+        token: str | None = self._config.get(f"pypi-token.{repo_name}")
+        if token:
+            return token
+
+        return self.keyring.get_password(repo_name, "__token__")
+
+    def delete_pypi_token(self, name: str) -> None:
+        if not self.keyring.is_available():
+            return self._config.auth_config_source.remove_property(f"pypi-token.{name}")
+
+        self.keyring.delete_password(name, "__token__")
+
+    def get_http_auth(self, name: str) -> dict[str, str | None] | None:
+        auth = self._config.get(f"http-basic.{name}")
+        if not auth:
+            username = self._config.get(f"http-basic.{name}.username")
+            password = self._config.get(f"http-basic.{name}.password")
+            if not username and not password:
+                return None
+        else:
+            username, password = auth["username"], auth.get("password")
+            if password is None:
+                password = self.keyring.get_password(name, username)
+
+        return {
+            "username": username,
+            "password": password,
+        }
+
+    def set_http_password(self, name: str, username: str, password: str) -> None:
+        auth = {"username": username}
+
+        if not self.keyring.is_available():
+            self.warn_plaintext_credentials_stored()
+            auth["password"] = password
+        else:
+            self.keyring.set_password(name, username, password)
+
+        self._config.auth_config_source.add_property(f"http-basic.{name}", auth)
+
+    def delete_http_password(self, name: str) -> None:
+        auth = self.get_http_auth(name)
+        if not auth:
+            return
+
+        username = auth.get("username")
+        if username is None:
+            return
+
+        with suppress(PoetryKeyringError):
+            self.keyring.delete_password(name, username)
+
+        self._config.auth_config_source.remove_property(f"http-basic.{name}")
diff --git a/vendor/poetry/src/poetry/utils/patterns.py b/vendor/poetry/src/poetry/utils/patterns.py
new file mode 100644
index 00000000..bf88e51b
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/patterns.py
@@ -0,0 +1,19 @@
+from __future__ import annotations
+
+import re
+
+
+wheel_file_re = re.compile(
+    r"^(?P(?P.+?)-(?P\d.*?))"
+    r"(-(?P\d.*?))?"
+    r"-(?P.+?)"
+    r"-(?P.+?)"
+    r"-(?P.+?)"
+    r"\.whl|\.dist-info$",
+    re.VERBOSE,
+)
+
+sdist_file_re = re.compile(
+    r"^(?P(?P.+?)-(?P\d.*?))"
+    r"(\.sdist)?\.(?P(zip|tar(\.(gz|bz2|xz|Z))?))$"
+)
diff --git a/vendor/poetry/src/poetry/utils/pip.py b/vendor/poetry/src/poetry/utils/pip.py
new file mode 100644
index 00000000..24f9cbac
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/pip.py
@@ -0,0 +1,51 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.exceptions import PoetryException
+from poetry.utils.env import EnvCommandError
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from poetry.utils.env import Env
+
+
+def pip_install(
+    path: Path,
+    environment: Env,
+    editable: bool = False,
+    deps: bool = False,
+    upgrade: bool = False,
+) -> int | str:
+    is_wheel = path.suffix == ".whl"
+
+    # We disable version check here as we are already pinning to version available in
+    # either the virtual environment or the virtualenv package embedded wheel. Version
+    # checks are a wasteful network call that adds a lot of wait time when installing a
+    # lot of packages.
+    args = ["install", "--disable-pip-version-check", "--prefix", str(environment.path)]
+
+    if not is_wheel and not editable:
+        args.insert(1, "--use-pep517")
+
+    if upgrade:
+        args.append("--upgrade")
+
+    if not deps:
+        args.append("--no-deps")
+
+    if editable:
+        if not path.is_dir():
+            raise PoetryException(
+                "Cannot install non directory dependencies in editable mode"
+            )
+        args.append("-e")
+
+    args.append(str(path))
+
+    try:
+        return environment.run_pip(*args)
+    except EnvCommandError as e:
+        raise PoetryException(f"Failed to install {path.as_posix()}") from e
diff --git a/vendor/poetry/src/poetry/utils/setup_reader.py b/vendor/poetry/src/poetry/utils/setup_reader.py
new file mode 100644
index 00000000..8676b1ee
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/setup_reader.py
@@ -0,0 +1,367 @@
+from __future__ import annotations
+
+import ast
+
+from configparser import ConfigParser
+from pathlib import Path
+from typing import Any
+
+from poetry.core.semver.version import Version
+
+
+class SetupReader:
+    """
+    Class that reads a setup.py file without executing it.
+    """
+
+    DEFAULT: dict[str, Any] = {
+        "name": None,
+        "version": None,
+        "install_requires": [],
+        "extras_require": {},
+        "python_requires": None,
+    }
+
+    FILES = ["setup.py", "setup.cfg"]
+
+    @classmethod
+    def read_from_directory(cls, directory: str | Path) -> dict[str, Any]:
+        if isinstance(directory, str):
+            directory = Path(directory)
+
+        result = cls.DEFAULT.copy()
+        for filename in cls.FILES:
+            filepath = directory / filename
+            if not filepath.exists():
+                continue
+
+            read_file_func = getattr(cls(), "read_" + filename.replace(".", "_"))
+            new_result = read_file_func(filepath)
+
+            for key in result.keys():
+                if new_result[key]:
+                    result[key] = new_result[key]
+
+        return result
+
+    def read_setup_py(self, filepath: str | Path) -> dict[str, Any]:
+        if isinstance(filepath, str):
+            filepath = Path(filepath)
+
+        with filepath.open(encoding="utf-8") as f:
+            content = f.read()
+
+        result: dict[str, Any] = {}
+
+        body = ast.parse(content).body
+
+        setup_call = self._find_setup_call(body)
+        if setup_call is None:
+            return self.DEFAULT
+
+        # Inspecting keyword arguments
+        call, body = setup_call
+        result["name"] = self._find_single_string(call, body, "name")
+        result["version"] = self._find_single_string(call, body, "version")
+        result["install_requires"] = self._find_install_requires(call, body)
+        result["extras_require"] = self._find_extras_require(call, body)
+        result["python_requires"] = self._find_single_string(
+            call, body, "python_requires"
+        )
+
+        return result
+
+    def read_setup_cfg(self, filepath: str | Path) -> dict[str, Any]:
+        parser = ConfigParser()
+
+        parser.read(str(filepath))
+
+        name = None
+        version = None
+        if parser.has_option("metadata", "name"):
+            name = parser.get("metadata", "name")
+
+        if parser.has_option("metadata", "version"):
+            version = Version.parse(parser.get("metadata", "version")).text
+
+        install_requires = []
+        extras_require: dict[str, list[str]] = {}
+        python_requires = None
+        if parser.has_section("options"):
+            if parser.has_option("options", "install_requires"):
+                for dep in parser.get("options", "install_requires").split("\n"):
+                    dep = dep.strip()
+                    if not dep:
+                        continue
+
+                    install_requires.append(dep)
+
+            if parser.has_option("options", "python_requires"):
+                python_requires = parser.get("options", "python_requires")
+
+        if parser.has_section("options.extras_require"):
+            for group in parser.options("options.extras_require"):
+                extras_require[group] = []
+                deps = parser.get("options.extras_require", group)
+                for dep in deps.split("\n"):
+                    dep = dep.strip()
+                    if not dep:
+                        continue
+
+                    extras_require[group].append(dep)
+
+        return {
+            "name": name,
+            "version": version,
+            "install_requires": install_requires,
+            "extras_require": extras_require,
+            "python_requires": python_requires,
+        }
+
+    def _find_setup_call(
+        self, elements: list[ast.stmt]
+    ) -> tuple[ast.Call, list[ast.stmt]] | None:
+        funcdefs: list[ast.stmt] = []
+        for i, element in enumerate(elements):
+            if isinstance(element, ast.If) and i == len(elements) - 1:
+                # Checking if the last element is an if statement
+                # and if it is 'if __name__ == "__main__"' which
+                # could contain the call to setup()
+                test = element.test
+                if not isinstance(test, ast.Compare):
+                    continue
+
+                left = test.left
+                if not isinstance(left, ast.Name):
+                    continue
+
+                if left.id != "__name__":
+                    continue
+
+                setup_call = self._find_sub_setup_call([element])
+                if setup_call is None:
+                    continue
+
+                call, body = setup_call
+                return call, body + elements
+
+            if not isinstance(element, ast.Expr):
+                if isinstance(element, ast.FunctionDef):
+                    funcdefs.append(element)
+
+                continue
+
+            value = element.value
+            if not isinstance(value, ast.Call):
+                continue
+
+            func = value.func
+            if not (isinstance(func, ast.Name) and func.id == "setup") and not (
+                isinstance(func, ast.Attribute)
+                and getattr(func.value, "id", None) == "setuptools"
+                and func.attr == "setup"
+            ):
+                continue
+
+            return value, elements
+
+        # Nothing, we inspect the function definitions
+        return self._find_sub_setup_call(funcdefs)
+
+    def _find_sub_setup_call(
+        self, elements: list[ast.stmt]
+    ) -> tuple[ast.Call, list[ast.stmt]] | None:
+        for element in elements:
+            if not isinstance(element, (ast.FunctionDef, ast.If)):
+                continue
+
+            setup_call = self._find_setup_call(element.body)
+            if setup_call is not None:
+                sub_call, body = setup_call
+
+                body = elements + body
+
+                return sub_call, body
+
+        return None
+
+    def _find_install_requires(self, call: ast.Call, body: list[ast.stmt]) -> list[str]:
+        install_requires: list[str] = []
+        value = self._find_in_call(call, "install_requires")
+        if value is None:
+            # Trying to find in kwargs
+            kwargs = self._find_call_kwargs(call)
+
+            if kwargs is None or not isinstance(kwargs, ast.Name):
+                return install_requires
+
+            variable = self._find_variable_in_body(body, kwargs.id)
+            if not isinstance(variable, (ast.Dict, ast.Call)):
+                return install_requires
+
+            if isinstance(variable, ast.Call):
+                if not isinstance(variable.func, ast.Name):
+                    return install_requires
+
+                if variable.func.id != "dict":
+                    return install_requires
+
+                value = self._find_in_call(variable, "install_requires")
+            else:
+                value = self._find_in_dict(variable, "install_requires")
+
+        if value is None:
+            return install_requires
+
+        if isinstance(value, ast.List):
+            for el in value.elts:
+                if isinstance(el, ast.Str):
+                    install_requires.append(el.s)
+        elif isinstance(value, ast.Name):
+            variable = self._find_variable_in_body(body, value.id)
+
+            if variable is not None and isinstance(variable, ast.List):
+                for el in variable.elts:
+                    if isinstance(el, ast.Str):
+                        install_requires.append(el.s)
+
+        return install_requires
+
+    def _find_extras_require(
+        self, call: ast.Call, body: list[ast.stmt]
+    ) -> dict[str, list[str]]:
+        extras_require: dict[str, list[str]] = {}
+        value = self._find_in_call(call, "extras_require")
+        if value is None:
+            # Trying to find in kwargs
+            kwargs = self._find_call_kwargs(call)
+
+            if kwargs is None or not isinstance(kwargs, ast.Name):
+                return extras_require
+
+            variable = self._find_variable_in_body(body, kwargs.id)
+            if not isinstance(variable, (ast.Dict, ast.Call)):
+                return extras_require
+
+            if isinstance(variable, ast.Call):
+                if not isinstance(variable.func, ast.Name):
+                    return extras_require
+
+                if variable.func.id != "dict":
+                    return extras_require
+
+                value = self._find_in_call(variable, "extras_require")
+            else:
+                value = self._find_in_dict(variable, "extras_require")
+
+        if value is None:
+            return extras_require
+
+        if isinstance(value, ast.Dict):
+            val: ast.expr | None
+            for key, val in zip(value.keys, value.values):
+                if not isinstance(key, ast.Str):
+                    continue
+
+                if isinstance(val, ast.Name):
+                    val = self._find_variable_in_body(body, val.id)
+
+                if isinstance(val, ast.List):
+                    extras_require[key.s] = [
+                        e.s for e in val.elts if isinstance(e, ast.Str)
+                    ]
+        elif isinstance(value, ast.Name):
+            variable = self._find_variable_in_body(body, value.id)
+
+            if variable is None or not isinstance(variable, ast.Dict):
+                return extras_require
+
+            for key, val in zip(variable.keys, variable.values):
+                if not isinstance(key, ast.Str):
+                    continue
+
+                if isinstance(val, ast.Name):
+                    val = self._find_variable_in_body(body, val.id)
+
+                if isinstance(val, ast.List):
+                    extras_require[key.s] = [
+                        e.s for e in val.elts if isinstance(e, ast.Str)
+                    ]
+
+        return extras_require
+
+    def _find_single_string(
+        self, call: ast.Call, body: list[ast.stmt], name: str
+    ) -> str | None:
+        value = self._find_in_call(call, name)
+        if value is None:
+            # Trying to find in kwargs
+            kwargs = self._find_call_kwargs(call)
+
+            if kwargs is None or not isinstance(kwargs, ast.Name):
+                return None
+
+            variable = self._find_variable_in_body(body, kwargs.id)
+            if not isinstance(variable, (ast.Dict, ast.Call)):
+                return None
+
+            if isinstance(variable, ast.Call):
+                if not isinstance(variable.func, ast.Name):
+                    return None
+
+                if variable.func.id != "dict":
+                    return None
+
+                value = self._find_in_call(variable, name)
+            else:
+                value = self._find_in_dict(variable, name)
+
+        if value is None:
+            return None
+
+        if isinstance(value, ast.Str):
+            return value.s
+        elif isinstance(value, ast.Name):
+            variable = self._find_variable_in_body(body, value.id)
+
+            if variable is not None and isinstance(variable, ast.Str):
+                return variable.s
+
+        return None
+
+    def _find_in_call(self, call: ast.Call, name: str) -> Any | None:
+        for keyword in call.keywords:
+            if keyword.arg == name:
+                return keyword.value
+        return None
+
+    def _find_call_kwargs(self, call: ast.Call) -> Any | None:
+        kwargs = None
+        for keyword in call.keywords:
+            if keyword.arg is None:
+                kwargs = keyword.value
+
+        return kwargs
+
+    def _find_variable_in_body(
+        self, body: list[ast.stmt], name: str
+    ) -> ast.expr | None:
+        for elem in body:
+            if not isinstance(elem, ast.Assign):
+                continue
+
+            for target in elem.targets:
+                if not isinstance(target, ast.Name):
+                    continue
+
+                if target.id == name:
+                    return elem.value
+
+        return None
+
+    def _find_in_dict(self, dict_: ast.Dict, name: str) -> ast.expr | None:
+        for key, val in zip(dict_.keys, dict_.values):
+            if isinstance(key, ast.Str) and key.s == name:
+                return val
+
+        return None
diff --git a/vendor/poetry/src/poetry/utils/shell.py b/vendor/poetry/src/poetry/utils/shell.py
new file mode 100644
index 00000000..bce274f3
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/shell.py
@@ -0,0 +1,135 @@
+from __future__ import annotations
+
+import os
+import signal
+import subprocess
+import sys
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
+import pexpect
+
+from cleo.terminal import Terminal
+from shellingham import ShellDetectionFailure
+from shellingham import detect_shell
+
+from poetry.utils._compat import WINDOWS
+
+
+if TYPE_CHECKING:
+    from poetry.utils.env import VirtualEnv
+
+
+class Shell:
+    """
+    Represents the current shell.
+    """
+
+    _shell = None
+
+    def __init__(self, name: str, path: str) -> None:
+        self._name = name
+        self._path = path
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    @property
+    def path(self) -> str:
+        return self._path
+
+    @classmethod
+    def get(cls) -> Shell:
+        """
+        Retrieve the current shell.
+        """
+        if cls._shell is not None:
+            return cls._shell
+
+        try:
+            name, path = detect_shell(os.getpid())
+        except (RuntimeError, ShellDetectionFailure):
+            shell = None
+
+            if os.name == "posix":
+                shell = os.environ.get("SHELL")
+            elif os.name == "nt":
+                shell = os.environ.get("COMSPEC")
+
+            if not shell:
+                raise RuntimeError("Unable to detect the current shell.")
+
+            name, path = Path(shell).stem, shell
+
+        cls._shell = cls(name, path)
+
+        return cls._shell
+
+    def activate(self, env: VirtualEnv) -> int | None:
+        activate_script = self._get_activate_script()
+        bin_dir = "Scripts" if WINDOWS else "bin"
+        activate_path = env.path / bin_dir / activate_script
+
+        # mypy requires using sys.platform instead of WINDOWS constant
+        # in if statements to properly type check on Windows
+        if sys.platform == "win32":
+            if self._name in ("powershell", "pwsh"):
+                args = ["-NoExit", "-File", str(activate_path)]
+            else:
+                # /K will execute the bat file and
+                # keep the cmd process from terminating
+                args = ["/K", str(activate_path)]
+            completed_proc = subprocess.run([self.path, *args])
+            return completed_proc.returncode
+
+        import shlex
+
+        terminal = Terminal()
+        with env.temp_environ():
+            c = pexpect.spawn(
+                self._path, ["-i"], dimensions=(terminal.height, terminal.width)
+            )
+
+        if self._name in ["zsh", "nu"]:
+            c.setecho(False)
+
+        c.sendline(f"{self._get_source_command()} {shlex.quote(str(activate_path))}")
+
+        def resize(sig: Any, data: Any) -> None:
+            terminal = Terminal()
+            c.setwinsize(terminal.height, terminal.width)
+
+        signal.signal(signal.SIGWINCH, resize)
+
+        # Interact with the new shell.
+        c.interact(escape_character=None)
+        c.close()
+
+        sys.exit(c.exitstatus)
+
+    def _get_activate_script(self) -> str:
+        if self._name == "fish":
+            suffix = ".fish"
+        elif self._name in ("csh", "tcsh"):
+            suffix = ".csh"
+        elif self._name in ("powershell", "pwsh"):
+            suffix = ".ps1"
+        elif self._name == "cmd":
+            suffix = ".bat"
+        elif self._name == "nu":
+            suffix = ".nu"
+        else:
+            suffix = ""
+
+        return "activate" + suffix
+
+    def _get_source_command(self) -> str:
+        if self._name in ("fish", "csh", "tcsh", "nu"):
+            return "source"
+        return "."
+
+    def __repr__(self) -> str:
+        return f'{self.__class__.__name__}("{self._name}", "{self._path}")'
diff --git a/vendor/poetry/src/poetry/utils/source.py b/vendor/poetry/src/poetry/utils/source.py
new file mode 100644
index 00000000..dc8e1c8c
--- /dev/null
+++ b/vendor/poetry/src/poetry/utils/source.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from tomlkit.items import Table
+
+    from poetry.config.source import Source
+
+
+def source_to_table(source: Source) -> Table:
+    from tomlkit import nl
+    from tomlkit import table
+
+    source_table: Table = table()
+    for key, value in source.to_dict().items():
+        source_table.add(key, value)
+    source_table.add(nl())
+    return source_table
diff --git a/vendor/poetry/src/poetry/vcs/__init__.py b/vendor/poetry/src/poetry/vcs/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/vcs/git/__init__.py b/vendor/poetry/src/poetry/vcs/git/__init__.py
new file mode 100644
index 00000000..9882df85
--- /dev/null
+++ b/vendor/poetry/src/poetry/vcs/git/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from poetry.vcs.git.backend import Git
+
+
+__all__ = ["Git"]
diff --git a/vendor/poetry/src/poetry/vcs/git/backend.py b/vendor/poetry/src/poetry/vcs/git/backend.py
new file mode 100644
index 00000000..4eba0b37
--- /dev/null
+++ b/vendor/poetry/src/poetry/vcs/git/backend.py
@@ -0,0 +1,444 @@
+from __future__ import annotations
+
+import dataclasses
+import logging
+import re
+
+from pathlib import Path
+from subprocess import CalledProcessError
+from typing import TYPE_CHECKING
+
+from dulwich import porcelain
+from dulwich.client import HTTPUnauthorized
+from dulwich.client import get_transport_and_path
+from dulwich.config import ConfigFile
+from dulwich.config import parse_submodules
+from dulwich.errors import NotGitRepository
+from dulwich.refs import ANNOTATED_TAG_SUFFIX
+from dulwich.repo import Repo
+
+from poetry.console.exceptions import PoetrySimpleConsoleException
+from poetry.utils.authenticator import get_default_authenticator
+from poetry.utils.helpers import remove_directory
+
+
+if TYPE_CHECKING:
+    from dulwich.client import FetchPackResult
+    from dulwich.client import GitClient
+
+
+logger = logging.getLogger(__name__)
+
+
+def is_revision_sha(revision: str | None) -> bool:
+    return re.match(r"^\b[0-9a-f]{5,40}\b$", revision or "") is not None
+
+
+def annotated_tag(ref: str | bytes) -> bytes:
+    if isinstance(ref, str):
+        ref = ref.encode("utf-8")
+    return ref + ANNOTATED_TAG_SUFFIX
+
+
+@dataclasses.dataclass
+class GitRefSpec:
+    branch: str | None = None
+    revision: str | None = None
+    tag: str | None = None
+    ref: bytes = dataclasses.field(default_factory=lambda: b"HEAD")
+
+    def resolve(self, remote_refs: FetchPackResult) -> None:
+        """
+        Resolve the ref using the provided remote refs.
+        """
+        self._normalise(remote_refs=remote_refs)
+        self._set_head(remote_refs=remote_refs)
+
+    def _normalise(self, remote_refs: FetchPackResult) -> None:
+        """
+        Internal helper method to determine if given revision is
+            1. a branch or tag; if so, set corresponding properties.
+            2. a short sha; if so, resolve full sha and set as revision
+        """
+        if self.revision:
+            ref = f"refs/tags/{self.revision}".encode()
+            if ref in remote_refs.refs or annotated_tag(ref) in remote_refs.refs:
+                # this is a tag, incorrectly specified as a revision, tags take priority
+                self.tag = self.revision
+                self.revision = None
+            elif (
+                self.revision.encode("utf-8") in remote_refs.refs
+                or f"refs/heads/{self.revision}".encode() in remote_refs.refs
+            ):
+                # this is most likely a ref spec or a branch incorrectly specified
+                self.branch = self.revision
+                self.revision = None
+        elif (
+            self.branch
+            and f"refs/heads/{self.branch}".encode() not in remote_refs.refs
+            and (
+                f"refs/tags/{self.branch}".encode() in remote_refs.refs
+                or annotated_tag(f"refs/tags/{self.branch}") in remote_refs.refs
+            )
+        ):
+            # this is a tag incorrectly specified as a branch
+            self.tag = self.branch
+            self.branch = None
+
+        if self.revision and self.is_sha_short:
+            # revision is a short sha, resolve to full sha
+            short_sha = self.revision.encode("utf-8")
+            for sha in remote_refs.refs.values():
+                if sha.startswith(short_sha):
+                    self.revision = sha.decode("utf-8")
+                    break
+
+    def _set_head(self, remote_refs: FetchPackResult) -> None:
+        """
+        Internal helper method to populate ref and set it's sha as the remote's head
+        and default ref.
+        """
+        self.ref = remote_refs.symrefs[b"HEAD"]
+
+        if self.revision:
+            head = self.revision.encode("utf-8")
+        else:
+            if self.tag:
+                ref = f"refs/tags/{self.tag}".encode()
+                annotated = annotated_tag(ref)
+                self.ref = annotated if annotated in remote_refs.refs else ref
+            elif self.branch:
+                self.ref = (
+                    self.branch.encode("utf-8")
+                    if self.is_ref
+                    else f"refs/heads/{self.branch}".encode()
+                )
+            head = remote_refs.refs[self.ref]
+
+        remote_refs.refs[self.ref] = remote_refs.refs[b"HEAD"] = head
+
+    @property
+    def key(self) -> str:
+        return self.revision or self.branch or self.tag or self.ref.decode("utf-8")
+
+    @property
+    def is_sha(self) -> bool:
+        return is_revision_sha(revision=self.revision)
+
+    @property
+    def is_ref(self) -> bool:
+        return self.branch is not None and self.branch.startswith("refs/")
+
+    @property
+    def is_sha_short(self) -> bool:
+        return self.revision is not None and self.is_sha and len(self.revision) < 40
+
+
+@dataclasses.dataclass
+class GitRepoLocalInfo:
+    repo: dataclasses.InitVar[Repo | Path | str]
+    origin: str = dataclasses.field(init=False)
+    revision: str = dataclasses.field(init=False)
+
+    def __post_init__(self, repo: Repo | Path | str) -> None:
+        repo = Git.as_repo(repo=repo) if not isinstance(repo, Repo) else repo
+        self.origin = Git.get_remote_url(repo=repo, remote="origin")
+        self.revision = Git.get_revision(repo=repo)
+
+
+class Git:
+    @staticmethod
+    def as_repo(repo: Path | str) -> Repo:
+        return Repo(str(repo))
+
+    @staticmethod
+    def get_remote_url(repo: Repo, remote: str = "origin") -> str:
+        with repo:
+            config = repo.get_config()
+            section = (b"remote", remote.encode("utf-8"))
+
+            url = ""
+            if config.has_section(section):
+                value = config.get(section, b"url")
+                url = value.decode("utf-8")
+
+            return url
+
+    @staticmethod
+    def get_revision(repo: Repo) -> str:
+        with repo:
+            return repo.head().decode("utf-8")
+
+    @classmethod
+    def info(cls, repo: Repo | Path | str) -> GitRepoLocalInfo:
+        return GitRepoLocalInfo(repo=repo)
+
+    @staticmethod
+    def get_name_from_source_url(url: str) -> str:
+        return re.sub(r"(.git)?$", "", url.rsplit("/", 1)[-1])
+
+    @classmethod
+    def _fetch_remote_refs(cls, url: str, local: Repo) -> FetchPackResult:
+        """
+        Helper method to fetch remote refs.
+        """
+        client: GitClient
+        path: str
+
+        kwargs: dict[str, str] = {}
+        credentials = get_default_authenticator().get_credentials_for_git_url(url=url)
+
+        if credentials.password and credentials.username:
+            # we do this conditionally as otherwise, dulwich might complain if these
+            # parameters are passed in for an ssh url
+            kwargs["username"] = credentials.username
+            kwargs["password"] = credentials.password
+
+        config = local.get_config_stack()
+        client, path = get_transport_and_path(url, config=config, **kwargs)
+
+        with local:
+            result: FetchPackResult = client.fetch(
+                path,
+                local,
+                determine_wants=local.object_store.determine_wants_all,
+            )
+            return result
+
+    @staticmethod
+    def _clone_legacy(url: str, refspec: GitRefSpec, target: Path) -> Repo:
+        """
+        Helper method to facilitate fallback to using system provided git client via
+        subprocess calls.
+        """
+        from poetry.vcs.git.system import SystemGit
+
+        logger.debug("Cloning '%s' using system git client", url)
+
+        if target.exists():
+            remove_directory(path=target, force=True)
+
+        revision = refspec.tag or refspec.branch or refspec.revision or "HEAD"
+
+        try:
+            SystemGit.clone(url, target)
+        except CalledProcessError:
+            raise PoetrySimpleConsoleException(
+                f"Failed to clone {url}, check your git configuration and permissions"
+                " for this repository."
+            )
+
+        if revision:
+            revision.replace("refs/head/", "")
+            revision.replace("refs/tags/", "")
+
+        try:
+            SystemGit.checkout(revision, target)
+        except CalledProcessError:
+            raise PoetrySimpleConsoleException(
+                f"Failed to checkout {url} at '{revision}'"
+            )
+
+        repo = Repo(str(target))
+        return repo
+
+    @classmethod
+    def _clone(cls, url: str, refspec: GitRefSpec, target: Path) -> Repo:
+        """
+        Helper method to clone a remove repository at the given `url` at the specified
+        ref spec.
+        """
+        local: Repo
+        if not target.exists():
+            local = Repo.init(str(target), mkdir=True)
+            porcelain.remote_add(local, "origin", url)
+        else:
+            local = Repo(str(target))
+
+        remote_refs = cls._fetch_remote_refs(url=url, local=local)
+
+        logger.debug(
+            "Cloning %s at '%s' to %s", url, refspec.key, target
+        )
+
+        try:
+            refspec.resolve(remote_refs=remote_refs)
+        except KeyError:  # branch / ref does not exist
+            raise PoetrySimpleConsoleException(
+                f"Failed to clone {url} at '{refspec.key}', verify ref exists on"
+                " remote."
+            )
+
+        # ensure local HEAD matches remote
+        local.refs[b"HEAD"] = remote_refs.refs[b"HEAD"]
+
+        if refspec.is_ref:
+            # set ref to current HEAD
+            local.refs[refspec.ref] = local.refs[b"HEAD"]
+
+        for base, prefix in {
+            (b"refs/remotes/origin", b"refs/heads/"),
+            (b"refs/tags", b"refs/tags"),
+        }:
+            local.refs.import_refs(
+                base=base,
+                other={
+                    n[len(prefix) :]: v
+                    for (n, v) in remote_refs.refs.items()
+                    if n.startswith(prefix) and not n.endswith(ANNOTATED_TAG_SUFFIX)
+                },
+            )
+
+        try:
+            with local:
+                local.reset_index()
+        except (AssertionError, KeyError) as e:
+            # this implies the ref we need does not exist or is invalid
+            if isinstance(e, KeyError):
+                # the local copy is at a bad state, lets remove it
+                logger.debug(
+                    "Removing local clone (%s) of repository as it is in a"
+                    " broken state.",
+                    local.path,
+                )
+                remove_directory(local.path, force=True)
+
+            if isinstance(e, AssertionError) and "Invalid object name" not in str(e):
+                raise
+
+            logger.debug(
+                "\nRequested ref (%s) was not fetched to local copy and cannot"
+                " be used. The following error was raised:\n\n\t%s",
+                refspec.key,
+                e,
+            )
+
+            raise PoetrySimpleConsoleException(
+                f"Failed to clone {url} at '{refspec.key}', verify ref exists on"
+                " remote."
+            )
+
+        return local
+
+    @classmethod
+    def _clone_submodules(cls, repo: Repo) -> None:
+        """
+        Helper method to identify configured submodules and clone them recursively.
+        """
+        repo_root = Path(repo.path)
+        modules_config = repo_root.joinpath(".gitmodules")
+
+        if modules_config.exists():
+            config = ConfigFile.from_path(str(modules_config))
+
+            url: bytes
+            path: bytes
+            submodules = parse_submodules(config)
+            for path, url, name in submodules:
+                path_relative = Path(path.decode("utf-8"))
+                path_absolute = repo_root.joinpath(path_relative)
+
+                source_root = path_absolute.parent
+                source_root.mkdir(parents=True, exist_ok=True)
+
+                with repo:
+                    try:
+                        revision = repo.open_index()[path].sha.decode("utf-8")
+                    except KeyError:
+                        logger.debug(
+                            "Skip submodule %s in %s, path %s not found",
+                            name,
+                            repo.path,
+                            path,
+                        )
+                        continue
+
+                cls.clone(
+                    url=url.decode("utf-8"),
+                    source_root=source_root,
+                    name=path_relative.name,
+                    revision=revision,
+                    clean=path_absolute.exists()
+                    and not path_absolute.joinpath(".git").is_dir(),
+                )
+
+    @staticmethod
+    def is_using_legacy_client() -> bool:
+        from poetry.config.config import Config
+
+        legacy_client: bool = (
+            Config.create().get("experimental", {}).get("system-git-client", False)
+        )
+        return legacy_client
+
+    @staticmethod
+    def get_default_source_root() -> Path:
+        from poetry.config.config import Config
+
+        return Path(Config.create().get("cache-dir")) / "src"
+
+    @classmethod
+    def clone(
+        cls,
+        url: str,
+        name: str | None = None,
+        branch: str | None = None,
+        tag: str | None = None,
+        revision: str | None = None,
+        source_root: Path | None = None,
+        clean: bool = False,
+    ) -> Repo:
+        source_root = source_root or cls.get_default_source_root()
+        source_root.mkdir(parents=True, exist_ok=True)
+
+        name = name or cls.get_name_from_source_url(url=url)
+        target = source_root / name
+        refspec = GitRefSpec(branch=branch, revision=revision, tag=tag)
+
+        if target.exists():
+            if clean:
+                # force clean the local copy if it exists, do not reuse
+                remove_directory(target, force=True)
+            else:
+                # check if the current local copy matches the requested ref spec
+                try:
+                    current_repo = Repo(str(target))
+
+                    with current_repo:
+                        current_sha = current_repo.head().decode("utf-8")
+                except (NotGitRepository, AssertionError, KeyError):
+                    # something is wrong with the current checkout, clean it
+                    remove_directory(target, force=True)
+                else:
+                    if not is_revision_sha(revision=current_sha):
+                        # head is not a sha, this will cause issues later, lets reset
+                        remove_directory(target, force=True)
+                    elif (
+                        refspec.is_sha
+                        and refspec.revision is not None
+                        and current_sha.startswith(refspec.revision)
+                    ):
+                        # if revision is used short-circuit remote fetch head matches
+                        return current_repo
+
+        try:
+            if not cls.is_using_legacy_client():
+                local = cls._clone(url=url, refspec=refspec, target=target)
+                cls._clone_submodules(repo=local)
+                return local
+        except HTTPUnauthorized:
+            # we do this here to handle http authenticated repositories as dulwich
+            # does not currently support using credentials from git-credential helpers.
+            # upstream issue: https://github.com/jelmer/dulwich/issues/873
+            #
+            # this is a little inefficient, however preferred as this is transparent
+            # without additional configuration or changes for existing projects that
+            # use http basic auth credentials.
+            logger.debug(
+                "Unable to fetch from private repository '%s', falling back to"
+                " system git",
+                url,
+            )
+
+        # fallback to legacy git client
+        return cls._clone_legacy(url=url, refspec=refspec, target=target)
diff --git a/vendor/poetry/src/poetry/vcs/git/system.py b/vendor/poetry/src/poetry/vcs/git/system.py
new file mode 100644
index 00000000..5ed84733
--- /dev/null
+++ b/vendor/poetry/src/poetry/vcs/git/system.py
@@ -0,0 +1,71 @@
+from __future__ import annotations
+
+import os
+import subprocess
+
+from typing import TYPE_CHECKING
+
+from dulwich.client import find_git_command
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+    from typing import Any
+
+
+class SystemGit:
+    @classmethod
+    def clone(cls, repository: str, dest: Path) -> str:
+        cls._check_parameter(repository)
+
+        return cls.run("clone", "--recurse-submodules", "--", repository, str(dest))
+
+    @classmethod
+    def checkout(cls, rev: str, target: Path | None = None) -> str:
+        args = []
+
+        if target:
+            args += [
+                "--git-dir",
+                (target / ".git").as_posix(),
+                "--work-tree",
+                target.as_posix(),
+            ]
+
+        cls._check_parameter(rev)
+
+        args += ["checkout", rev]
+
+        return cls.run(*args)
+
+    @staticmethod
+    def run(*args: Any, **kwargs: Any) -> str:
+        folder = kwargs.pop("folder", None)
+        if folder:
+            args = (
+                "--git-dir",
+                (folder / ".git").as_posix(),
+                "--work-tree",
+                folder.as_posix(),
+            ) + args
+
+        git_command = find_git_command()
+        env = os.environ.copy()
+        env["GIT_TERMINAL_PROMPT"] = "0"
+        return (
+            subprocess.check_output(
+                git_command + list(args),
+                stderr=subprocess.STDOUT,
+                env=env,
+            )
+            .decode()
+            .strip()
+        )
+
+    @staticmethod
+    def _check_parameter(parameter: str) -> None:
+        """
+        Checks a git parameter to avoid unwanted code execution.
+        """
+        if parameter.strip().startswith("-"):
+            raise RuntimeError(f"Invalid Git parameter: {parameter}")
diff --git a/vendor/poetry/src/poetry/version/__init__.py b/vendor/poetry/src/poetry/version/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/src/poetry/version/version_selector.py b/vendor/poetry/src/poetry/version/version_selector.py
new file mode 100644
index 00000000..adac9fcb
--- /dev/null
+++ b/vendor/poetry/src/poetry/version/version_selector.py
@@ -0,0 +1,69 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.semver.version import Version
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.package import Package
+
+    from poetry.repositories import Pool
+
+
+class VersionSelector:
+    def __init__(self, pool: Pool) -> None:
+        self._pool = pool
+
+    def find_best_candidate(
+        self,
+        package_name: str,
+        target_package_version: str | None = None,
+        allow_prereleases: bool = False,
+        source: str | None = None,
+    ) -> Package | None:
+        """
+        Given a package name and optional version,
+        returns the latest Package that matches
+        """
+        from poetry.factory import Factory
+
+        dependency = Factory.create_dependency(
+            package_name,
+            {
+                "version": target_package_version or "*",
+                "allow-prereleases": allow_prereleases,
+                "source": source,
+            },
+        )
+        candidates = self._pool.find_packages(dependency)
+        only_prereleases = all(c.version.is_unstable() for c in candidates)
+
+        if not candidates:
+            return None
+
+        package = None
+        for candidate in candidates:
+            if (
+                candidate.is_prerelease()
+                and not dependency.allows_prereleases()
+                and not only_prereleases
+            ):
+                continue
+
+            # Select highest version of the two
+            if package is None or package.version < candidate.version:
+                package = candidate
+
+        return package
+
+    def find_recommended_require_version(self, package: Package) -> str:
+        version = package.version
+
+        return self._transform_version(version.text, package.pretty_version)
+
+    def _transform_version(self, version: str, pretty_version: str) -> str:
+        try:
+            return f"^{Version.parse(version).to_string()}"
+        except ValueError:
+            return pretty_version
diff --git a/vendor/poetry/tests/compat.py b/vendor/poetry/tests/compat.py
new file mode 100644
index 00000000..7bcd4e4c
--- /dev/null
+++ b/vendor/poetry/tests/compat.py
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+import sys
+
+
+if sys.version_info < (3, 8):
+    import zipp as zipfile  # nopycln: import
+else:
+    import zipfile  # noqa: F401
+
+try:
+    from typing import Protocol  # nopycln: import
+except ImportError:
+    from typing_extensions import Protocol  # noqa: F401
diff --git a/vendor/poetry/tests/config/test_config.py b/vendor/poetry/tests/config/test_config.py
index d79e8631..ce691470 100644
--- a/vendor/poetry/tests/config/test_config.py
+++ b/vendor/poetry/tests/config/test_config.py
@@ -1,31 +1,79 @@
+from __future__ import annotations
+
 import os
+import re
 
 from pathlib import Path
+from typing import TYPE_CHECKING
 
 import pytest
 
+from flatdict import FlatDict
+
+from poetry.config.config import Config
+from poetry.config.config import boolean_normalizer
+from poetry.config.config import int_normalizer
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from collections.abc import Iterator
+
+
+def get_options_based_on_normalizer(normalizer: Callable) -> str:
+    flattened_config = FlatDict(Config.default_config, delimiter=".")
+
+    for k in flattened_config:
+        if Config._get_normalizer(k) == normalizer:
+            yield k
+
 
 @pytest.mark.parametrize(
     ("name", "value"), [("installer.parallel", True), ("virtualenvs.create", True)]
 )
-def test_config_get_default_value(config, name, value):
+def test_config_get_default_value(config: Config, name: str, value: bool):
     assert config.get(name) is value
 
 
-def test_config_get_processes_depended_on_values(config):
-    assert str(Path.cwd() / ".pypoetrycache" / "virtualenvs") == config.get("virtualenvs.path")
+def test_config_get_processes_depended_on_values(
+    config: Config, config_cache_dir: Path
+):
+    assert str(config_cache_dir / "virtualenvs") == config.get("virtualenvs.path")
+
+
+def generate_environment_variable_tests() -> Iterator[tuple[str, str, str, bool]]:
+    for normalizer, values in [
+        (boolean_normalizer, [("true", True), ("false", False)]),
+        (int_normalizer, [("4", 4), ("2", 2)]),
+    ]:
+        for env_value, value in values:
+            for name in get_options_based_on_normalizer(normalizer=normalizer):
+                env_var = "POETRY_" + re.sub("[.-]+", "_", name).upper()
+                yield name, env_var, env_value, value
 
 
 @pytest.mark.parametrize(
-    ("name", "env_value", "value"),
-    [
-        ("installer.parallel", "true", True),
-        ("installer.parallel", "false", False),
-        ("virtualenvs.create", "true", True),
-        ("virtualenvs.create", "false", False),
-    ],
+    ("name", "env_var", "env_value", "value"),
+    list(generate_environment_variable_tests()),
 )
-def test_config_get_from_environment_variable(config, environ, name, env_value, value):
-    env_var = "POETRY_{}".format("_".join(k.upper() for k in name.split(".")))
+def test_config_get_from_environment_variable(
+    config: Config,
+    environ: Iterator[None],
+    name: str,
+    env_var: str,
+    env_value: str,
+    value: bool,
+):
     os.environ[env_var] = env_value
     assert config.get(name) is value
+
+
+@pytest.mark.parametrize(
+    ("path_config", "expected"),
+    [("~/.venvs", Path.home() / ".venvs"), ("venv", Path("venv"))],
+)
+def test_config_expands_tilde_for_virtualenvs_path(
+    config: Config, path_config: str, expected: Path
+):
+    config.merge({"virtualenvs": {"path": path_config}})
+    assert config.virtualenvs_path == expected
diff --git a/vendor/poetry/tests/conftest.py b/vendor/poetry/tests/conftest.py
index 22741344..8e588937 100644
--- a/vendor/poetry/tests/conftest.py
+++ b/vendor/poetry/tests/conftest.py
@@ -1,76 +1,203 @@
+from __future__ import annotations
+
+import logging
 import os
 import re
 import shutil
 import sys
 import tempfile
 
+from contextlib import suppress
 from pathlib import Path
+from typing import TYPE_CHECKING
 from typing import Any
-from typing import Dict
+from typing import TextIO
 
 import httpretty
 import pytest
 
-from cleo import CommandTester
+from keyring.backend import KeyringBackend
 
 from poetry.config.config import Config as BaseConfig
 from poetry.config.dict_config_source import DictConfigSource
 from poetry.factory import Factory
 from poetry.inspection.info import PackageInfo
 from poetry.inspection.info import PackageInfoError
-from poetry.installation import Installer
 from poetry.layouts import layout
 from poetry.repositories import Pool
 from poetry.repositories import Repository
-from poetry.utils._compat import Path
 from poetry.utils.env import EnvManager
 from poetry.utils.env import SystemEnv
 from poetry.utils.env import VirtualEnv
-from tests.helpers import TestExecutor
+from poetry.utils.helpers import remove_directory
+from tests.helpers import MOCK_DEFAULT_GIT_REVISION
 from tests.helpers import TestLocker
 from tests.helpers import TestRepository
 from tests.helpers import get_package
+from tests.helpers import isolated_environment
 from tests.helpers import mock_clone
 from tests.helpers import mock_download
 
 
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from _pytest.config import Config as PyTestConfig
+    from _pytest.config.argparsing import Parser
+    from pytest_mock import MockerFixture
+
+    from poetry.poetry import Poetry
+    from tests.types import FixtureCopier
+    from tests.types import FixtureDirGetter
+    from tests.types import ProjectFactory
+
+
+def pytest_addoption(parser: Parser) -> None:
+    parser.addoption(
+        "--integration",
+        action="store_true",
+        dest="integration",
+        default=False,
+        help="enable integration tests",
+    )
+
+
+def pytest_configure(config: PyTestConfig) -> None:
+    config.addinivalue_line("markers", "integration: mark integration tests")
+
+    if not config.option.integration:
+        config.option.markexpr = "not integration"
+
+
 class Config(BaseConfig):
-    def get(self, setting_name, default=None):  # type: (str, Any) -> Any
+    def get(self, setting_name: str, default: Any = None) -> Any:
         self.merge(self._config_source.config)
         self.merge(self._auth_config_source.config)
 
-        return super(Config, self).get(setting_name, default=default)
+        return super().get(setting_name, default=default)
 
-    def raw(self):  # type: () -> Dict[str, Any]
+    def raw(self) -> dict[str, Any]:
         self.merge(self._config_source.config)
         self.merge(self._auth_config_source.config)
 
-        return super(Config, self).raw()
+        return super().raw()
 
-    def all(self):  # type: () -> Dict[str, Any]
+    def all(self) -> dict[str, Any]:
         self.merge(self._config_source.config)
         self.merge(self._auth_config_source.config)
 
-        return super(Config, self).all()
+        return super().all()
+
+
+class DummyBackend(KeyringBackend):
+    def __init__(self) -> None:
+        self._passwords = {}
+
+    @classmethod
+    def priority(cls) -> int:
+        return 42
+
+    def set_password(self, service: str, username: str | None, password: Any) -> None:
+        self._passwords[service] = {username: password}
+
+    def get_password(self, service: str, username: str | None) -> Any:
+        return self._passwords.get(service, {}).get(username)
+
+    def get_credential(self, service: str, username: str | None) -> Any:
+        return self._passwords.get(service, {}).get(username)
+
+    def delete_password(self, service: str, username: str | None) -> None:
+        if service in self._passwords and username in self._passwords[service]:
+            del self._passwords[service][username]
+
+
+@pytest.fixture()
+def dummy_keyring() -> DummyBackend:
+    return DummyBackend()
+
+
+@pytest.fixture()
+def with_simple_keyring(dummy_keyring: DummyBackend) -> None:
+    import keyring
+
+    keyring.set_keyring(dummy_keyring)
+
+
+@pytest.fixture()
+def with_fail_keyring() -> None:
+    import keyring
+
+    from keyring.backends.fail import Keyring
+
+    keyring.set_keyring(Keyring())
+
+
+@pytest.fixture()
+def with_null_keyring() -> None:
+    import keyring
+
+    from keyring.backends.null import Keyring
+
+    keyring.set_keyring(Keyring())
+
+
+@pytest.fixture()
+def with_chained_fail_keyring(mocker: MockerFixture) -> None:
+    from keyring.backends.fail import Keyring
+
+    mocker.patch("keyring.backend.get_all_keyring", lambda: [Keyring()])
+    import keyring
+
+    from keyring.backends.chainer import ChainerBackend
+
+    keyring.set_keyring(ChainerBackend())
+
+
+@pytest.fixture()
+def with_chained_null_keyring(mocker: MockerFixture) -> None:
+    from keyring.backends.null import Keyring
+
+    mocker.patch("keyring.backend.get_all_keyring", lambda: [Keyring()])
+    import keyring
+
+    from keyring.backends.chainer import ChainerBackend
+
+    keyring.set_keyring(ChainerBackend())
+
+
+@pytest.fixture
+def config_cache_dir(tmp_dir: str) -> Path:
+    path = Path(tmp_dir) / ".cache" / "pypoetry"
+    path.mkdir(parents=True)
+    return path
+
+
+@pytest.fixture
+def config_virtualenvs_path(config_cache_dir: Path) -> Path:
+    return config_cache_dir / "virtualenvs"
 
 
 @pytest.fixture
-def config_source():
+def config_source(config_cache_dir: Path) -> DictConfigSource:
     source = DictConfigSource()
-    source.add_property("cache-dir", str(Path.cwd() / ".pypoetrycache"))
+    source.add_property("cache-dir", str(config_cache_dir))
 
     return source
 
 
 @pytest.fixture
-def auth_config_source():
+def auth_config_source() -> DictConfigSource:
     source = DictConfigSource()
 
     return source
 
 
-@pytest.fixture
-def config(config_source, auth_config_source, mocker):
+@pytest.fixture(autouse=True)
+def config(
+    config_source: DictConfigSource,
+    auth_config_source: DictConfigSource,
+    mocker: MockerFixture,
+) -> Config:
     import keyring
 
     from keyring.backends.fail import Keyring
@@ -82,56 +209,71 @@ def config(config_source, auth_config_source, mocker):
     c.set_config_source(config_source)
     c.set_auth_config_source(auth_config_source)
 
-    mocker.patch("poetry.factory.Factory.create_config", return_value=c)
+    mocker.patch("poetry.config.config.Config.create", return_value=c)
     mocker.patch("poetry.config.config.Config.set_config_source")
 
     return c
 
 
+@pytest.fixture()
+def config_dir(tmp_dir: str) -> Path:
+    return Path(tempfile.mkdtemp(prefix="poetry_config_", dir=tmp_dir))
+
+
 @pytest.fixture(autouse=True)
-def download_mock(mocker):
+def mock_user_config_dir(mocker: MockerFixture, config_dir: Path) -> None:
+    mocker.patch("poetry.locations.CONFIG_DIR", new=config_dir)
+    mocker.patch("poetry.config.config.CONFIG_DIR", new=config_dir)
+
+
+@pytest.fixture(autouse=True)
+def download_mock(mocker: MockerFixture) -> None:
     # Patch download to not download anything but to just copy from fixtures
     mocker.patch("poetry.utils.helpers.download_file", new=mock_download)
     mocker.patch("poetry.puzzle.provider.download_file", new=mock_download)
-    mocker.patch("poetry.repositories.pypi_repository.download_file", new=mock_download)
+    mocker.patch("poetry.repositories.http.download_file", new=mock_download)
 
 
 @pytest.fixture(autouse=True)
-def pep517_metadata_mock(mocker):
-    @classmethod  # noqa
-    def _pep517_metadata(cls, path):
-        try:
+def pep517_metadata_mock(mocker: MockerFixture) -> None:
+    def get_pep517_metadata(path: Path) -> PackageInfo:
+        with suppress(PackageInfoError):
             return PackageInfo.from_setup_files(path)
-        except PackageInfoError:
-            pass
         return PackageInfo(name="demo", version="0.1.2")
 
     mocker.patch(
-        "poetry.inspection.info.PackageInfo._pep517_metadata", _pep517_metadata,
+        "poetry.inspection.info.get_pep517_metadata",
+        get_pep517_metadata,
     )
 
 
 @pytest.fixture
-def environ():
-    original_environ = dict(os.environ)
+def environ() -> Iterator[None]:
+    with isolated_environment():
+        yield
 
-    yield
 
-    os.environ.clear()
-    os.environ.update(original_environ)
+@pytest.fixture(autouse=True)
+def isolate_environ() -> Iterator[None]:
+    """Ensure the environment is isolated from user configuration."""
+    with isolated_environment():
+        for var in os.environ:
+            if var.startswith("POETRY_") or var in {"PYTHONPATH", "VIRTUAL_ENV"}:
+                del os.environ[var]
+
+        yield
 
 
 @pytest.fixture(autouse=True)
-def git_mock(mocker):
+def git_mock(mocker: MockerFixture) -> None:
     # Patch git module to not actually clone projects
-    mocker.patch("poetry.core.vcs.git.Git.clone", new=mock_clone)
-    mocker.patch("poetry.core.vcs.git.Git.checkout", new=lambda *_: None)
-    p = mocker.patch("poetry.core.vcs.git.Git.rev_parse")
-    p.return_value = "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
+    mocker.patch("poetry.vcs.git.Git.clone", new=mock_clone)
+    p = mocker.patch("poetry.vcs.git.Git.get_revision")
+    p.return_value = MOCK_DEFAULT_GIT_REVISION
 
 
 @pytest.fixture
-def http():
+def http() -> Iterator[type[httpretty.httpretty]]:
     httpretty.reset()
     httpretty.enable(allow_net_connect=False)
 
@@ -142,44 +284,44 @@ def http():
 
 
 @pytest.fixture
-def fixture_base():
+def fixture_base() -> Path:
     return Path(__file__).parent / "fixtures"
 
 
 @pytest.fixture
-def fixture_dir(fixture_base):
-    def _fixture_dir(name):
+def fixture_dir(fixture_base: Path) -> FixtureDirGetter:
+    def _fixture_dir(name: str) -> Path:
         return fixture_base / name
 
     return _fixture_dir
 
 
 @pytest.fixture
-def tmp_dir():
+def tmp_dir() -> Iterator[str]:
     dir_ = tempfile.mkdtemp(prefix="poetry_")
 
-    yield dir_
+    yield Path(dir_).resolve().as_posix()
 
-    shutil.rmtree(dir_)
+    remove_directory(dir_, force=True)
 
 
 @pytest.fixture
-def mocked_open_files(mocker):
+def mocked_open_files(mocker: MockerFixture) -> list:
     files = []
     original = Path.open
 
-    def mocked_open(self, *args, **kwargs):
+    def mocked_open(self: Path, *args: Any, **kwargs: Any) -> TextIO:
         if self.name in {"pyproject.toml"}:
             return mocker.MagicMock()
         return original(self, *args, **kwargs)
 
-    mocker.patch("poetry.utils._compat.Path.open", mocked_open)
+    mocker.patch("pathlib.Path.open", mocked_open)
 
-    yield files
+    return files
 
 
 @pytest.fixture
-def tmp_venv(tmp_dir):
+def tmp_venv(tmp_dir: str) -> Iterator[VirtualEnv]:
     venv_path = Path(tmp_dir) / "venv"
 
     EnvManager.build_venv(str(venv_path))
@@ -191,54 +333,71 @@ def tmp_venv(tmp_dir):
 
 
 @pytest.fixture
-def installed():
-    return Repository()
+def installed() -> Repository:
+    return Repository("installed")
 
 
 @pytest.fixture(scope="session")
-def current_env():
+def current_env() -> SystemEnv:
     return SystemEnv(Path(sys.executable))
 
 
 @pytest.fixture(scope="session")
-def current_python(current_env):
+def current_python(current_env: SystemEnv) -> tuple[int, int, int]:
     return current_env.version_info[:3]
 
 
 @pytest.fixture(scope="session")
-def default_python(current_python):
-    return "^{}".format(".".join(str(v) for v in current_python[:2]))
+def default_python(current_python: tuple[int, int, int]) -> str:
+    return "^" + ".".join(str(v) for v in current_python[:2])
 
 
 @pytest.fixture
-def repo(http):
+def repo(http: type[httpretty.httpretty]) -> TestRepository:
     http.register_uri(
-        http.GET, re.compile("^https?://foo.bar/(.+?)$"),
+        http.GET,
+        re.compile("^https?://foo.bar/(.+?)$"),
     )
     return TestRepository(name="foo")
 
 
 @pytest.fixture
-def project_factory(tmp_dir, config, repo, installed, default_python):
+def project_factory(
+    tmp_dir: str,
+    config: Config,
+    repo: TestRepository,
+    installed: Repository,
+    default_python: str,
+    load_required_fixtures: None,
+) -> ProjectFactory:
     workspace = Path(tmp_dir)
 
     def _factory(
-        name=None,
-        dependencies=None,
-        dev_dependencies=None,
-        pyproject_content=None,
-        install_deps=True,
-    ):
-        project_dir = workspace / "poetry-fixture-{}".format(name)
+        name: str | None = None,
+        dependencies: dict[str, str] | None = None,
+        dev_dependencies: dict[str, str] | None = None,
+        pyproject_content: str | None = None,
+        poetry_lock_content: str | None = None,
+        install_deps: bool = True,
+        source: Path | None = None,
+        locker_config: dict[str, Any] | None = None,
+    ) -> Poetry:
+        project_dir = workspace / f"poetry-fixture-{name}"
         dependencies = dependencies or {}
         dev_dependencies = dev_dependencies or {}
 
-        if pyproject_content:
-            project_dir.mkdir(parents=True, exist_ok=True)
-            with project_dir.joinpath("pyproject.toml").open(
-                "w", encoding="utf-8"
-            ) as f:
-                f.write(pyproject_content)
+        if pyproject_content or source:
+            if source:
+                project_dir.parent.mkdir(parents=True, exist_ok=True)
+                shutil.copytree(source, project_dir)
+            else:
+                project_dir.mkdir(parents=True, exist_ok=True)
+
+            if pyproject_content:
+                with project_dir.joinpath("pyproject.toml").open(
+                    "w", encoding="utf-8"
+                ) as f:
+                    f.write(pyproject_content)
         else:
             layout("src")(
                 name,
@@ -250,11 +409,15 @@ def _factory(
                 dev_dependencies=dev_dependencies,
             ).create(project_dir, with_tests=False)
 
+        if poetry_lock_content:
+            lock_file = project_dir / "poetry.lock"
+            lock_file.write_text(data=poetry_lock_content, encoding="utf-8")
+
         poetry = Factory().create_poetry(project_dir)
 
         locker = TestLocker(
-            poetry.locker.lock.path, poetry.locker._local_config
-        )  # noqa
+            poetry.locker.lock.path, locker_config or poetry.locker._local_config
+        )
         locker.write()
 
         poetry.set_locker(locker)
@@ -278,40 +441,49 @@ def _factory(
 
 
 @pytest.fixture
-def command_tester_factory(app, env):
-    def _tester(command, poetry=None, installer=None, executor=None, environment=None):
-        command = app.find(command)
-        tester = CommandTester(command)
+def project_root() -> Path:
+    return Path(__file__).parent.parent
 
-        if poetry:
-            app._poetry = poetry
 
-        poetry = app.poetry
-        command._pool = poetry.pool
+@pytest.fixture(autouse=True)
+def set_simple_log_formatter() -> None:
+    """
+    This fixture removes any formatting added via IOFormatter.
+    """
+    for name in logging.Logger.manager.loggerDict:
+        for handler in logging.getLogger(name).handlers:
+            # replace formatter with simple formatter for testing
+            handler.setFormatter(logging.Formatter(fmt="%(message)s"))
 
-        if hasattr(command, "set_env"):
-            command.set_env(environment or env)
 
-        if hasattr(command, "set_installer"):
-            installer = installer or Installer(
-                tester.io,
-                env,
-                poetry.package,
-                poetry.locker,
-                poetry.pool,
-                poetry.config,
-                executor=executor
-                or TestExecutor(env, poetry.pool, poetry.config, tester.io),
-            )
-            installer.use_executor(True)
-            command.set_installer(installer)
+@pytest.fixture
+def fixture_copier(fixture_base: Path, tmp_dir: str) -> FixtureCopier:
+    def _copy(relative_path: str, target: Path | None = None) -> Path:
+        path = fixture_base.joinpath(relative_path)
+        target = target or Path(tmp_dir, relative_path)
+        target.parent.mkdir(parents=True, exist_ok=True)
 
-        return tester
+        if target.exists():
+            return target
+
+        if path.is_dir():
+            shutil.copytree(path, target)
+        else:
+            shutil.copyfile(path, target)
 
-    return _tester
+        return target
+
+    return _copy
 
 
 @pytest.fixture
-def do_lock(command_tester_factory, poetry):
-    command_tester_factory("lock").execute()
-    assert poetry.locker.lock.exists()
+def required_fixtures() -> list[str]:
+    return []
+
+
+@pytest.fixture(autouse=True)
+def load_required_fixtures(
+    required_fixtures: list[str], fixture_copier: FixtureCopier
+) -> None:
+    for fixture in required_fixtures:
+        fixture_copier(fixture)
diff --git a/vendor/poetry/tests/console/commands/cache/__init__.py b/vendor/poetry/tests/console/commands/cache/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/console/commands/cache/conftest.py b/vendor/poetry/tests/console/commands/cache/conftest.py
new file mode 100644
index 00000000..c3d89ba0
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/cache/conftest.py
@@ -0,0 +1,65 @@
+from __future__ import annotations
+
+import uuid
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from cachy import CacheManager
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from _pytest.monkeypatch import MonkeyPatch
+
+    from tests.conftest import Config
+
+
+@pytest.fixture
+def repository_cache_dir(monkeypatch: MonkeyPatch, config: Config) -> Path:
+    return config.repository_cache_directory
+
+
+@pytest.fixture
+def repository_one() -> str:
+    return f"01_{uuid.uuid4()}"
+
+
+@pytest.fixture
+def repository_two() -> str:
+    return f"02_{uuid.uuid4()}"
+
+
+@pytest.fixture
+def mock_caches(
+    repository_cache_dir: Path,
+    repository_one: str,
+    repository_two: str,
+) -> None:
+    (repository_cache_dir / repository_one).mkdir(parents=True)
+    (repository_cache_dir / repository_two).mkdir(parents=True)
+
+
+@pytest.fixture
+def cache(
+    repository_cache_dir: Path,
+    repository_one: str,
+    mock_caches: None,
+) -> CacheManager:
+    cache = CacheManager(
+        {
+            "default": repository_one,
+            "serializer": "json",
+            "stores": {
+                repository_one: {
+                    "driver": "file",
+                    "path": str(repository_cache_dir / repository_one),
+                }
+            },
+        }
+    )
+    cache.remember_forever("cachy:0.1", lambda: {"name": "cachy", "version": "0.1"})
+    cache.remember_forever("cleo:0.2", lambda: {"name": "cleo", "version": "0.2"})
+    return cache
diff --git a/vendor/poetry/tests/console/commands/cache/test_clear.py b/vendor/poetry/tests/console/commands/cache/test_clear.py
new file mode 100644
index 00000000..ce0243f4
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/cache/test_clear.py
@@ -0,0 +1,81 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from cleo.testers.application_tester import ApplicationTester
+
+from poetry.console.application import Application
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from cachy import CacheManager
+
+
+@pytest.fixture
+def tester() -> ApplicationTester:
+    app = Application()
+
+    tester = ApplicationTester(app)
+    return tester
+
+
+def test_cache_clear_all(
+    tester: ApplicationTester,
+    repository_one: str,
+    repository_cache_dir: Path,
+    cache: CacheManager,
+):
+    exit_code = tester.execute(f"cache clear {repository_one} --all", inputs="yes")
+
+    assert exit_code == 0
+    assert tester.io.fetch_output() == ""
+    # ensure directory is empty
+    assert not any((repository_cache_dir / repository_one).iterdir())
+    assert not cache.has("cachy:0.1")
+    assert not cache.has("cleo:0.2")
+
+
+def test_cache_clear_all_no(
+    tester: ApplicationTester,
+    repository_one: str,
+    repository_cache_dir: Path,
+    cache: CacheManager,
+):
+    exit_code = tester.execute(f"cache clear {repository_one} --all", inputs="no")
+
+    assert exit_code == 0
+    assert tester.io.fetch_output() == ""
+    # ensure directory is not empty
+    assert any((repository_cache_dir / repository_one).iterdir())
+    assert cache.has("cachy:0.1")
+    assert cache.has("cleo:0.2")
+
+
+def test_cache_clear_pkg(
+    tester: ApplicationTester,
+    repository_one: str,
+    cache: CacheManager,
+):
+    exit_code = tester.execute(f"cache clear {repository_one}:cachy:0.1", inputs="yes")
+
+    assert exit_code == 0
+    assert tester.io.fetch_output() == ""
+    assert not cache.has("cachy:0.1")
+    assert cache.has("cleo:0.2")
+
+
+def test_cache_clear_pkg_no(
+    tester: ApplicationTester,
+    repository_one: str,
+    cache: CacheManager,
+):
+    exit_code = tester.execute(f"cache clear {repository_one}:cachy:0.1", inputs="no")
+
+    assert exit_code == 0
+    assert tester.io.fetch_output() == ""
+    assert cache.has("cachy:0.1")
+    assert cache.has("cleo:0.2")
diff --git a/vendor/poetry/tests/console/commands/cache/test_list.py b/vendor/poetry/tests/console/commands/cache/test_list.py
new file mode 100644
index 00000000..22db34cb
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/cache/test_list.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from cleo.testers.command_tester import CommandTester
+
+    from tests.types import CommandTesterFactory
+
+
+@pytest.fixture
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
+    return command_tester_factory("cache list")
+
+
+def test_cache_list(
+    tester: CommandTester, mock_caches: None, repository_one: str, repository_two: str
+):
+    tester.execute()
+
+    expected = f"""\
+{repository_one}
+{repository_two}
+"""
+
+    assert tester.io.fetch_output() == expected
+
+
+def test_cache_list_empty(tester: CommandTester, repository_cache_dir: Path):
+    tester.execute()
+
+    expected = """\
+No caches found
+"""
+
+    assert tester.io.fetch_error() == expected
diff --git a/vendor/poetry/tests/console/commands/debug/test_resolve.py b/vendor/poetry/tests/console/commands/debug/test_resolve.py
index ad7e9432..ddb45d4d 100644
--- a/vendor/poetry/tests/console/commands/debug/test_resolve.py
+++ b/vendor/poetry/tests/console/commands/debug/test_resolve.py
@@ -1,22 +1,39 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
 from poetry.factory import Factory
 from tests.helpers import get_package
 
 
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+
+    from tests.helpers import TestRepository
+    from tests.types import CommandTesterFactory
+
+
 @pytest.fixture()
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("debug resolve")
 
 
-def test_debug_resolve_gives_resolution_results(tester, repo):
-    cachy2 = get_package("cachy", "0.2.0")
-    cachy2.add_dependency(Factory.create_dependency("msgpack-python", ">=0.5 <0.6"))
+@pytest.fixture(autouse=True)
+def __add_packages(repo: TestRepository) -> None:
+    cachy020 = get_package("cachy", "0.2.0")
+    cachy020.add_dependency(Factory.create_dependency("msgpack-python", ">=0.5 <0.6"))
 
     repo.add_package(get_package("cachy", "0.1.0"))
-    repo.add_package(cachy2)
+    repo.add_package(cachy020)
     repo.add_package(get_package("msgpack-python", "0.5.3"))
 
+    repo.add_package(get_package("pendulum", "2.0.3"))
+    repo.add_package(get_package("cleo", "0.6.5"))
+
+
+def test_debug_resolve_gives_resolution_results(tester: CommandTester):
     tester.execute("cachy")
 
     expected = """\
@@ -28,17 +45,10 @@ def test_debug_resolve_gives_resolution_results(tester, repo):
 cachy          0.2.0
 """
 
-    assert expected == tester.io.fetch_output()
-
+    assert tester.io.fetch_output() == expected
 
-def test_debug_resolve_tree_option_gives_the_dependency_tree(tester, repo):
-    cachy2 = get_package("cachy", "0.2.0")
-    cachy2.add_dependency(Factory.create_dependency("msgpack-python", ">=0.5 <0.6"))
-
-    repo.add_package(get_package("cachy", "0.1.0"))
-    repo.add_package(cachy2)
-    repo.add_package(get_package("msgpack-python", "0.5.3"))
 
+def test_debug_resolve_tree_option_gives_the_dependency_tree(tester: CommandTester):
     tester.execute("cachy --tree")
 
     expected = """\
@@ -47,16 +57,13 @@ def test_debug_resolve_tree_option_gives_the_dependency_tree(tester, repo):
 Resolution results:
 
 cachy 0.2.0
-`-- msgpack-python >=0.5 <0.6
+└── msgpack-python >=0.5 <0.6
 """
 
-    assert expected == tester.io.fetch_output()
-
+    assert tester.io.fetch_output() == expected
 
-def test_debug_resolve_git_dependency(tester, repo):
-    repo.add_package(get_package("pendulum", "2.0.3"))
-    repo.add_package(get_package("cleo", "0.6.5"))
 
+def test_debug_resolve_git_dependency(tester: CommandTester):
     tester.execute("git+https://github.com/demo/demo.git")
 
     expected = """\
@@ -68,4 +75,4 @@ def test_debug_resolve_git_dependency(tester, repo):
 demo     0.1.2
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
diff --git a/vendor/poetry/tests/console/commands/env/conftest.py b/vendor/poetry/tests/console/commands/env/conftest.py
index 5fbddf1a..d7caccd7 100644
--- a/vendor/poetry/tests/console/commands/env/conftest.py
+++ b/vendor/poetry/tests/console/commands/env/conftest.py
@@ -1,45 +1,59 @@
+from __future__ import annotations
+
 import os
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from poetry.utils._compat import Path
 from poetry.utils.env import EnvManager
 
 
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from tests.helpers import PoetryTestApplication
+
+
 @pytest.fixture
-def venv_name(app):
+def venv_name(app: PoetryTestApplication) -> str:
     return EnvManager.generate_env_name("simple-project", str(app.poetry.file.parent))
 
 
 @pytest.fixture
-def venv_cache(tmp_dir):
+def venv_cache(tmp_dir: str) -> Path:
     return Path(tmp_dir)
 
 
 @pytest.fixture(scope="module")
-def python_versions():
+def python_versions() -> list[str]:
     return ["3.6", "3.7"]
 
 
 @pytest.fixture
-def venvs_in_cache_config(app, venv_cache):
+def venvs_in_cache_config(app: PoetryTestApplication, venv_cache: Path) -> None:
     app.poetry.config.merge({"virtualenvs": {"path": str(venv_cache)}})
 
 
 @pytest.fixture
 def venvs_in_cache_dirs(
-    app, venvs_in_cache_config, venv_name, venv_cache, python_versions
-):
+    app: PoetryTestApplication,
+    venvs_in_cache_config: None,
+    venv_name: str,
+    venv_cache: Path,
+    python_versions: list[str],
+) -> list[str]:
     directories = []
     for version in python_versions:
-        directory = venv_cache.joinpath("{}-py{}".format(venv_name, version))
+        directory = venv_cache.joinpath(f"{venv_name}-py{version}")
         directory.mkdir(parents=True, exist_ok=True)
         directories.append(directory.name)
     return directories
 
 
 @pytest.fixture
-def venvs_in_project_dir(app):
+def venvs_in_project_dir(app: PoetryTestApplication) -> Iterator[Path]:
     os.environ.pop("VIRTUAL_ENV", None)
     venv_dir = app.poetry.file.parent.joinpath(".venv")
     venv_dir.mkdir(exist_ok=True)
diff --git a/vendor/poetry/tests/console/commands/env/helpers.py b/vendor/poetry/tests/console/commands/env/helpers.py
index 17d4c2ac..c9dc0a7a 100644
--- a/vendor/poetry/tests/console/commands/env/helpers.py
+++ b/vendor/poetry/tests/console/commands/env/helpers.py
@@ -1,20 +1,34 @@
-from typing import Optional
-from typing import Union
+from __future__ import annotations
 
-from poetry.core.semver import Version
-from poetry.utils._compat import Path
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
 
+from poetry.core.semver.version import Version
 
-def build_venv(path, executable=None):  # type: (Union[Path,str], Optional[str]) -> ()
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+    from poetry.core.version.pep440.version import PEP440Version
+
+VERSION_3_7_1 = Version.parse("3.7.1")
+
+
+def build_venv(path: Path | str, **_: Any) -> None:
     Path(path).mkdir(parents=True, exist_ok=True)
 
 
-def check_output_wrapper(version=Version.parse("3.7.1")):
-    def check_output(cmd, *args, **kwargs):
+def check_output_wrapper(
+    version: PEP440Version = VERSION_3_7_1,
+) -> Callable[[str, Any, Any], str]:
+    def check_output(cmd: str, *_: Any, **__: Any) -> str:
         if "sys.version_info[:3]" in cmd:
             return version.text
         elif "sys.version_info[:2]" in cmd:
-            return "{}.{}".format(version.major, version.minor)
+            return f"{version.major}.{version.minor}"
+        elif '-c "import sys; print(sys.executable)"' in cmd:
+            return f"/usr/bin/{cmd.split()[0]}"
         else:
             return str(Path("/prefix"))
 
diff --git a/vendor/poetry/tests/console/commands/env/test_info.py b/vendor/poetry/tests/console/commands/env/test_info.py
index 9d1a0c84..e4f5826e 100644
--- a/vendor/poetry/tests/console/commands/env/test_info.py
+++ b/vendor/poetry/tests/console/commands/env/test_info.py
@@ -1,11 +1,24 @@
+from __future__ import annotations
+
+import sys
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from poetry.utils._compat import Path
 from poetry.utils.env import MockEnv
 
 
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from tests.types import CommandTesterFactory
+
+
 @pytest.fixture(autouse=True)
-def setup(mocker):
+def setup(mocker: MockerFixture) -> None:
     mocker.patch(
         "poetry.utils.env.EnvManager.get",
         return_value=MockEnv(
@@ -15,32 +28,33 @@ def setup(mocker):
 
 
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("env info")
 
 
-def test_env_info_displays_complete_info(tester):
+def test_env_info_displays_complete_info(tester: CommandTester):
     tester.execute()
 
-    expected = """
+    expected = f"""
 Virtualenv
 Python:         3.7.0
 Implementation: CPython
-Path:           {prefix}
+Path:           {Path('/prefix')}
+Executable:     {sys.executable}
 Valid:          True
 
 System
-Platform: darwin
-OS:       posix
-Python:   {base_prefix}
-""".format(
-        prefix=str(Path("/prefix")), base_prefix=str(Path("/base/prefix"))
-    )
+Platform:   darwin
+OS:         posix
+Python:     {'.'.join(str(v) for v in sys.version_info[:3])}
+Path:       {Path('/base/prefix')}
+Executable: python
+"""
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_env_info_displays_path_only(tester):
+def test_env_info_displays_path_only(tester: CommandTester):
     tester.execute("--path")
-    expected = str(Path("/prefix"))
-    assert expected + "\n" == tester.io.fetch_output()
+    expected = str(Path("/prefix")) + "\n"
+    assert tester.io.fetch_output() == expected
diff --git a/vendor/poetry/tests/console/commands/env/test_list.py b/vendor/poetry/tests/console/commands/env/test_list.py
index 2495c1d3..2f284470 100644
--- a/vendor/poetry/tests/console/commands/env/test_list.py
+++ b/vendor/poetry/tests/console/commands/env/test_list.py
@@ -1,11 +1,25 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 import tomlkit
 
 from poetry.core.toml.file import TOMLFile
 
 
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from poetry.utils.env import MockEnv
+    from tests.types import CommandTesterFactory
+
+
 @pytest.fixture
-def venv_activate_37(venv_cache, venv_name):
+def venv_activate_37(venv_cache: Path, venv_name: str) -> None:
     envs_file = TOMLFile(venv_cache / "envs.toml")
     doc = tomlkit.document()
     doc[venv_name] = {"minor": "3.7", "patch": "3.7.0"}
@@ -13,26 +27,36 @@ def venv_activate_37(venv_cache, venv_name):
 
 
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("env list")
 
 
-def test_none_activated(tester, venvs_in_cache_dirs, mocker, env):
+def test_none_activated(
+    tester: CommandTester,
+    venvs_in_cache_dirs: list[str],
+    mocker: MockerFixture,
+    env: MockEnv,
+):
     mocker.patch("poetry.utils.env.EnvManager.get", return_value=env)
     tester.execute()
     expected = "\n".join(venvs_in_cache_dirs).strip()
-    assert expected == tester.io.fetch_output().strip()
+    assert tester.io.fetch_output().strip() == expected
 
 
-def test_activated(tester, venvs_in_cache_dirs, venv_cache, venv_activate_37):
+def test_activated(
+    tester: CommandTester,
+    venvs_in_cache_dirs: list[str],
+    venv_cache: Path,
+    venv_activate_37: None,
+):
     tester.execute()
     expected = (
         "\n".join(venvs_in_cache_dirs).strip().replace("py3.7", "py3.7 (Activated)")
     )
-    assert expected == tester.io.fetch_output().strip()
+    assert tester.io.fetch_output().strip() == expected
 
 
-def test_in_project_venv(tester, venvs_in_project_dir):
+def test_in_project_venv(tester: CommandTester, venvs_in_project_dir: list[str]):
     tester.execute()
     expected = ".venv (Activated)\n"
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
diff --git a/vendor/poetry/tests/console/commands/env/test_remove.py b/vendor/poetry/tests/console/commands/env/test_remove.py
index 2b4f3ae7..0511e4ec 100644
--- a/vendor/poetry/tests/console/commands/env/test_remove.py
+++ b/vendor/poetry/tests/console/commands/env/test_remove.py
@@ -1,34 +1,55 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
 from poetry.core.semver.version import Version
+
 from tests.console.commands.env.helpers import check_output_wrapper
 
 
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from tests.types import CommandTesterFactory
+
+
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("env remove")
 
 
 def test_remove_by_python_version(
-    mocker, tester, venvs_in_cache_dirs, venv_name, venv_cache
+    mocker: MockerFixture,
+    tester: CommandTester,
+    venvs_in_cache_dirs: list[str],
+    venv_name: str,
+    venv_cache: Path,
 ):
     check_output = mocker.patch(
-        "poetry.utils._compat.subprocess.check_output",
+        "subprocess.check_output",
         side_effect=check_output_wrapper(Version.parse("3.6.6")),
     )
 
     tester.execute("3.6")
 
     assert check_output.called
-    assert not (venv_cache / "{}-py3.6".format(venv_name)).exists()
+    assert not (venv_cache / f"{venv_name}-py3.6").exists()
 
-    expected = "Deleted virtualenv: {}\n".format(
-        (venv_cache / "{}-py3.6".format(venv_name))
-    )
-    assert expected == tester.io.fetch_output()
+    expected = f"Deleted virtualenv: {venv_cache / venv_name}-py3.6\n"
+    assert tester.io.fetch_output() == expected
 
 
-def test_remove_by_name(tester, venvs_in_cache_dirs, venv_name, venv_cache):
+def test_remove_by_name(
+    tester: CommandTester,
+    venvs_in_cache_dirs: list[str],
+    venv_name: str,
+    venv_cache: Path,
+):
     expected = ""
 
     for name in venvs_in_cache_dirs:
@@ -36,6 +57,52 @@ def test_remove_by_name(tester, venvs_in_cache_dirs, venv_name, venv_cache):
 
         assert not (venv_cache / name).exists()
 
-        expected += "Deleted virtualenv: {}\n".format((venv_cache / name))
+        expected += f"Deleted virtualenv: {venv_cache / name}\n"
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
+
+
+def test_remove_all(
+    tester: CommandTester,
+    venvs_in_cache_dirs: list[str],
+    venv_name: str,
+    venv_cache: Path,
+):
+    expected = {""}
+    tester.execute("--all")
+    for name in venvs_in_cache_dirs:
+        assert not (venv_cache / name).exists()
+        expected.add(f"Deleted virtualenv: {venv_cache / name}")
+    assert set(tester.io.fetch_output().split("\n")) == expected
+
+
+def test_remove_all_and_version(
+    tester: CommandTester,
+    venvs_in_cache_dirs: list[str],
+    venv_name: str,
+    venv_cache: Path,
+):
+    expected = {""}
+    tester.execute(f"--all {venvs_in_cache_dirs[0]}")
+    for name in venvs_in_cache_dirs:
+        assert not (venv_cache / name).exists()
+        expected.add(f"Deleted virtualenv: {venv_cache / name}")
+    assert set(tester.io.fetch_output().split("\n")) == expected
+
+
+def test_remove_multiple(
+    tester: CommandTester,
+    venvs_in_cache_dirs: list[str],
+    venv_name: str,
+    venv_cache: Path,
+):
+    expected = {""}
+    removed_envs = venvs_in_cache_dirs[0:2]
+    remaining_envs = venvs_in_cache_dirs[2:]
+    tester.execute(" ".join(removed_envs))
+    for name in removed_envs:
+        assert not (venv_cache / name).exists()
+        expected.add(f"Deleted virtualenv: {venv_cache / name}")
+    for name in remaining_envs:
+        assert (venv_cache / name).exists()
+    assert set(tester.io.fetch_output().split("\n")) == expected
diff --git a/vendor/poetry/tests/console/commands/env/test_use.py b/vendor/poetry/tests/console/commands/env/test_use.py
index 563e0ac7..e3287736 100644
--- a/vendor/poetry/tests/console/commands/env/test_use.py
+++ b/vendor/poetry/tests/console/commands/env/test_use.py
@@ -1,45 +1,63 @@
+from __future__ import annotations
+
 import os
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 import tomlkit
 
-from poetry.core.semver import Version
+from poetry.core.semver.version import Version
 from poetry.core.toml.file import TOMLFile
-from poetry.utils._compat import Path
+
 from poetry.utils.env import MockEnv
 from tests.console.commands.env.helpers import build_venv
 from tests.console.commands.env.helpers import check_output_wrapper
 
 
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from tests.types import CommandTesterFactory
+
+
 @pytest.fixture(autouse=True)
-def setup(mocker):
+def setup(mocker: MockerFixture) -> None:
     mocker.stopall()
     if "VIRTUAL_ENV" in os.environ:
         del os.environ["VIRTUAL_ENV"]
 
 
 @pytest.fixture(autouse=True)
-def mock_subprocess_calls(setup, current_python, mocker):
+def mock_subprocess_calls(
+    setup: None, current_python: tuple[int, int, int], mocker: MockerFixture
+) -> None:
     mocker.patch(
-        "poetry.utils._compat.subprocess.check_output",
-        side_effect=check_output_wrapper(Version(*current_python)),
+        "subprocess.check_output",
+        side_effect=check_output_wrapper(Version.from_parts(*current_python)),
     )
     mocker.patch(
-        "poetry.utils._compat.subprocess.Popen.communicate",
+        "subprocess.Popen.communicate",
         side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)],
     )
 
 
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("env use")
 
 
 def test_activate_activates_non_existing_virtualenv_no_envs_file(
-    mocker, tester, venv_cache, venv_name, venvs_in_cache_config
+    mocker: MockerFixture,
+    tester: CommandTester,
+    venv_cache: Path,
+    venv_name: str,
+    venvs_in_cache_config: None,
 ):
     mocker.patch(
-        "poetry.utils._compat.subprocess.check_output",
+        "subprocess.check_output",
         side_effect=check_output_wrapper(),
     )
 
@@ -49,8 +67,18 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file(
 
     tester.execute("3.7")
 
-    venv_py37 = venv_cache / "{}-py3.7".format(venv_name)
-    mock_build_env.assert_called_with(venv_py37, executable="python3.7")
+    venv_py37 = venv_cache / f"{venv_name}-py3.7"
+    mock_build_env.assert_called_with(
+        venv_py37,
+        executable="/usr/bin/python3.7",
+        flags={
+            "always-copy": False,
+            "system-site-packages": False,
+            "no-pip": False,
+            "no-setuptools": False,
+        },
+        prompt="simple-project-py3.7",
+    )
 
     envs_file = TOMLFile(venv_cache / "envs.toml")
     assert envs_file.exists()
@@ -58,24 +86,26 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file(
     assert envs[venv_name]["minor"] == "3.7"
     assert envs[venv_name]["patch"] == "3.7.1"
 
-    expected = """\
-Creating virtualenv {} in {}
-Using virtualenv: {}
-""".format(
-        venv_py37.name, venv_py37.parent, venv_py37,
-    )
+    expected = f"""\
+Creating virtualenv {venv_py37.name} in {venv_py37.parent}
+Using virtualenv: {venv_py37}
+"""
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
 def test_get_prefers_explicitly_activated_virtualenvs_over_env_var(
-    tester, current_python, venv_cache, venv_name, venvs_in_cache_config
+    tester: CommandTester,
+    current_python: tuple[int, int, int],
+    venv_cache: Path,
+    venv_name: str,
+    venvs_in_cache_config: None,
 ):
     os.environ["VIRTUAL_ENV"] = "/environment/prefix"
 
     python_minor = ".".join(str(v) for v in current_python[:2])
     python_patch = ".".join(str(v) for v in current_python[:3])
-    venv_dir = venv_cache / "{}-py{}".format(venv_name, python_minor)
+    venv_dir = venv_cache / f"{venv_name}-py{python_minor}"
     venv_dir.mkdir(parents=True, exist_ok=True)
 
     envs_file = TOMLFile(venv_cache / "envs.toml")
@@ -85,22 +115,25 @@ def test_get_prefers_explicitly_activated_virtualenvs_over_env_var(
 
     tester.execute(python_minor)
 
-    expected = """\
-Using virtualenv: {}
-""".format(
-        venv_dir
-    )
+    expected = f"""\
+Using virtualenv: {venv_dir}
+"""
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
 def test_get_prefers_explicitly_activated_non_existing_virtualenvs_over_env_var(
-    mocker, tester, current_python, venv_cache, venv_name, venvs_in_cache_config
+    mocker: MockerFixture,
+    tester: CommandTester,
+    current_python: tuple[int, int, int],
+    venv_cache: Path,
+    venv_name: str,
+    venvs_in_cache_config: None,
 ):
     os.environ["VIRTUAL_ENV"] = "/environment/prefix"
 
     python_minor = ".".join(str(v) for v in current_python[:2])
-    venv_dir = venv_cache / "{}-py{}".format(venv_name, python_minor)
+    venv_dir = venv_cache / f"{venv_name}-py{python_minor}"
 
     mocker.patch(
         "poetry.utils.env.EnvManager._env",
@@ -116,11 +149,9 @@ def test_get_prefers_explicitly_activated_non_existing_virtualenvs_over_env_var(
 
     tester.execute(python_minor)
 
-    expected = """\
-Creating virtualenv {} in {}
-Using virtualenv: {}
-""".format(
-        venv_dir.name, venv_dir.parent, venv_dir,
-    )
+    expected = f"""\
+Creating virtualenv {venv_dir.name} in {venv_dir.parent}
+Using virtualenv: {venv_dir}
+"""
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
diff --git a/vendor/poetry/tests/console/commands/self/__init__.py b/vendor/poetry/tests/console/commands/self/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/console/commands/self/conftest.py b/vendor/poetry/tests/console/commands/self/conftest.py
new file mode 100644
index 00000000..6a175b5f
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/self/conftest.py
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from poetry.core.packages.package import Package
+
+from poetry.__version__ import __version__
+from poetry.repositories import Pool
+from poetry.utils.env import EnvManager
+
+
+if TYPE_CHECKING:
+    import httpretty
+
+    from pytest_mock import MockerFixture
+
+    from poetry.repositories.repository import Repository
+    from poetry.utils.env import VirtualEnv
+    from tests.helpers import TestRepository
+
+
+@pytest.fixture(autouse=True)
+def _patch_repos(repo: TestRepository, installed: Repository) -> None:
+    poetry = Package("poetry", __version__)
+    repo.add_package(poetry)
+    installed.add_package(poetry)
+
+
+@pytest.fixture(autouse=True)
+def save_environ(environ: None) -> Repository:
+    yield
+
+
+@pytest.fixture()
+def pool(repo: TestRepository) -> Pool:
+    return Pool([repo])
+
+
+@pytest.fixture(autouse=True)
+def setup_mocks(
+    mocker: MockerFixture,
+    tmp_venv: VirtualEnv,
+    installed: Repository,
+    pool: Pool,
+    http: type[httpretty.httpretty],
+) -> None:
+    mocker.patch.object(EnvManager, "get_system_env", return_value=tmp_venv)
+    mocker.patch("poetry.repositories.pool.Pool.find_packages", pool.find_packages)
+    mocker.patch("poetry.repositories.pool.Pool.package", pool.package)
+    mocker.patch("poetry.installation.executor.pip_install")
+    mocker.patch(
+        "poetry.installation.installer.Installer._get_installed",
+        return_value=installed,
+    )
diff --git a/vendor/poetry/tests/console/commands/self/utils.py b/vendor/poetry/tests/console/commands/self/utils.py
new file mode 100644
index 00000000..793f36a0
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/self/utils.py
@@ -0,0 +1,33 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from poetry.factory import Factory
+
+
+if TYPE_CHECKING:
+    from tomlkit.container import Table as TOMLTable
+
+
+def get_self_command_dependencies(locked: bool = True) -> TOMLTable:
+    from poetry.console.commands.self.self_command import SelfCommand
+    from poetry.locations import CONFIG_DIR
+
+    system_pyproject_file = SelfCommand.get_default_system_pyproject_file()
+
+    assert system_pyproject_file.exists()
+    assert system_pyproject_file.parent == Path(CONFIG_DIR)
+
+    if locked:
+        assert system_pyproject_file.parent.joinpath("poetry.lock").exists()
+
+    poetry = Factory().create_poetry(system_pyproject_file.parent, disable_plugins=True)
+
+    content = poetry.file.read()["tool"]["poetry"]
+
+    assert "group" in content
+    assert SelfCommand.ADDITIONAL_PACKAGE_GROUP in content["group"]
+    assert "dependencies" in content["group"][SelfCommand.ADDITIONAL_PACKAGE_GROUP]
+
+    return content["group"][SelfCommand.ADDITIONAL_PACKAGE_GROUP]["dependencies"]
diff --git a/vendor/poetry/tests/console/commands/source/__init__.py b/vendor/poetry/tests/console/commands/source/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/console/commands/source/conftest.py b/vendor/poetry/tests/console/commands/source/conftest.py
new file mode 100644
index 00000000..254b3454
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/source/conftest.py
@@ -0,0 +1,76 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from poetry.config.source import Source
+
+
+if TYPE_CHECKING:
+    from poetry.poetry import Poetry
+    from tests.types import CommandTesterFactory
+    from tests.types import ProjectFactory
+
+
+@pytest.fixture
+def source_one() -> Source:
+    return Source(name="one", url="https://one.com")
+
+
+@pytest.fixture
+def source_two() -> Source:
+    return Source(name="two", url="https://two.com")
+
+
+@pytest.fixture
+def source_default() -> Source:
+    return Source(name="default", url="https://default.com", default=True)
+
+
+@pytest.fixture
+def source_secondary() -> Source:
+    return Source(name="secondary", url="https://secondary.com", secondary=True)
+
+
+_existing_source = Source(name="existing", url="https://existing.com")
+
+
+@pytest.fixture
+def source_existing() -> Source:
+    return _existing_source
+
+
+PYPROJECT_WITH_SOURCES = f"""
+[tool.poetry]
+name = "source-command-test"
+version = "0.1.0"
+description = ""
+authors = ["Poetry Tester "]
+
+[tool.poetry.dependencies]
+python = "^3.9"
+
+[tool.poetry.dev-dependencies]
+
+[[tool.poetry.source]]
+name = "{_existing_source.name}"
+url = "{_existing_source.url}"
+"""
+
+
+@pytest.fixture
+def poetry_with_source(project_factory: ProjectFactory) -> Poetry:
+    return project_factory(pyproject_content=PYPROJECT_WITH_SOURCES)
+
+
+@pytest.fixture
+def add_multiple_sources(
+    command_tester_factory: CommandTesterFactory,
+    poetry_with_source: Poetry,
+    source_one: Source,
+    source_two: Source,
+) -> None:
+    add = command_tester_factory("source add", poetry=poetry_with_source)
+    for source in [source_one, source_two]:
+        add.execute(f"{source.name} {source.url}")
diff --git a/vendor/poetry/tests/console/commands/source/test_add.py b/vendor/poetry/tests/console/commands/source/test_add.py
new file mode 100644
index 00000000..7e43b9e2
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/source/test_add.py
@@ -0,0 +1,104 @@
+from __future__ import annotations
+
+import dataclasses
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+
+    from poetry.config.source import Source
+    from poetry.poetry import Poetry
+    from tests.types import CommandTesterFactory
+
+
+@pytest.fixture
+def tester(
+    command_tester_factory: CommandTesterFactory, poetry_with_source: Poetry
+) -> CommandTester:
+    return command_tester_factory("source add", poetry=poetry_with_source)
+
+
+def assert_source_added(
+    tester: CommandTester,
+    poetry: Poetry,
+    source_existing: Source,
+    source_added: Source,
+) -> None:
+    assert (
+        tester.io.fetch_output().strip()
+        == f"Adding source with name {source_added.name}."
+    )
+    poetry.pyproject.reload()
+    sources = poetry.get_sources()
+    assert sources == [source_existing, source_added]
+    assert tester.status_code == 0
+
+
+def test_source_add_simple(
+    tester: CommandTester,
+    source_existing: Source,
+    source_one: Source,
+    poetry_with_source: Poetry,
+):
+    tester.execute(f"{source_one.name} {source_one.url}")
+    assert_source_added(tester, poetry_with_source, source_existing, source_one)
+
+
+def test_source_add_default(
+    tester: CommandTester,
+    source_existing: Source,
+    source_default: Source,
+    poetry_with_source: Poetry,
+):
+    tester.execute(f"--default {source_default.name} {source_default.url}")
+    assert_source_added(tester, poetry_with_source, source_existing, source_default)
+
+
+def test_source_add_secondary(
+    tester: CommandTester,
+    source_existing: Source,
+    source_secondary: Source,
+    poetry_with_source: Poetry,
+):
+    tester.execute(f"--secondary {source_secondary.name} {source_secondary.url}")
+    assert_source_added(tester, poetry_with_source, source_existing, source_secondary)
+
+
+def test_source_add_error_default_and_secondary(tester: CommandTester):
+    tester.execute("--default --secondary error https://error.com")
+    assert (
+        tester.io.fetch_error().strip()
+        == "Cannot configure a source as both default and secondary."
+    )
+    assert tester.status_code == 1
+
+
+def test_source_add_error_pypi(tester: CommandTester):
+    tester.execute("pypi https://test.pypi.org/simple/")
+    assert (
+        tester.io.fetch_error().strip()
+        == "Failed to validate addition of pypi: The name [pypi] is reserved for"
+        " repositories"
+    )
+    assert tester.status_code == 1
+
+
+def test_source_add_existing(
+    tester: CommandTester, source_existing: Source, poetry_with_source: Poetry
+):
+    tester.execute(f"--default {source_existing.name} {source_existing.url}")
+    assert (
+        tester.io.fetch_output().strip()
+        == f"Source with name {source_existing.name} already exists. Updating."
+    )
+
+    poetry_with_source.pyproject.reload()
+    sources = poetry_with_source.get_sources()
+
+    assert len(sources) == 1
+    assert sources[0] != source_existing
+    assert sources[0] == dataclasses.replace(source_existing, default=True)
diff --git a/vendor/poetry/tests/console/commands/source/test_remove.py b/vendor/poetry/tests/console/commands/source/test_remove.py
new file mode 100644
index 00000000..914eb3e0
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/source/test_remove.py
@@ -0,0 +1,48 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+
+    from poetry.config.source import Source
+    from poetry.poetry import Poetry
+    from tests.types import CommandTesterFactory
+
+
+@pytest.fixture
+def tester(
+    command_tester_factory: CommandTesterFactory,
+    poetry_with_source: Poetry,
+    add_multiple_sources: None,
+) -> CommandTester:
+    return command_tester_factory("source remove", poetry=poetry_with_source)
+
+
+def test_source_remove_simple(
+    tester: CommandTester,
+    poetry_with_source: Poetry,
+    source_existing: Source,
+    source_one: Source,
+    source_two: Source,
+):
+    tester.execute(f"{source_existing.name}")
+    assert (
+        tester.io.fetch_output().strip()
+        == f"Removing source with name {source_existing.name}."
+    )
+
+    poetry_with_source.pyproject.reload()
+    sources = poetry_with_source.get_sources()
+    assert sources == [source_one, source_two]
+
+    assert tester.status_code == 0
+
+
+def test_source_remove_error(tester: CommandTester):
+    tester.execute("error")
+    assert tester.io.fetch_error().strip() == "Source with name error was not found."
+    assert tester.status_code == 1
diff --git a/vendor/poetry/tests/console/commands/source/test_show.py b/vendor/poetry/tests/console/commands/source/test_show.py
new file mode 100644
index 00000000..8b975cf4
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/source/test_show.py
@@ -0,0 +1,88 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+
+    from poetry.config.source import Source
+    from poetry.poetry import Poetry
+    from tests.types import CommandTesterFactory
+
+
+@pytest.fixture
+def tester(
+    command_tester_factory: CommandTesterFactory,
+    poetry_with_source: Poetry,
+    add_multiple_sources: None,
+) -> CommandTester:
+    return command_tester_factory("source show", poetry=poetry_with_source)
+
+
+def test_source_show_simple(tester: CommandTester):
+    tester.execute("")
+
+    expected = """\
+name       : existing
+url        : https://existing.com
+default    : no
+secondary  : no
+
+name       : one
+url        : https://one.com
+default    : no
+secondary  : no
+
+name       : two
+url        : https://two.com
+default    : no
+secondary  : no
+""".splitlines()
+    assert [
+        line.strip() for line in tester.io.fetch_output().strip().splitlines()
+    ] == expected
+    assert tester.status_code == 0
+
+
+def test_source_show_one(tester: CommandTester, source_one: Source):
+    tester.execute(f"{source_one.name}")
+
+    expected = """\
+name       : one
+url        : https://one.com
+default    : no
+secondary  : no
+""".splitlines()
+    assert [
+        line.strip() for line in tester.io.fetch_output().strip().splitlines()
+    ] == expected
+    assert tester.status_code == 0
+
+
+def test_source_show_two(tester: CommandTester, source_one: Source, source_two: Source):
+    tester.execute(f"{source_one.name} {source_two.name}")
+
+    expected = """\
+name       : one
+url        : https://one.com
+default    : no
+secondary  : no
+
+name       : two
+url        : https://two.com
+default    : no
+secondary  : no
+""".splitlines()
+    assert [
+        line.strip() for line in tester.io.fetch_output().strip().splitlines()
+    ] == expected
+    assert tester.status_code == 0
+
+
+def test_source_show_error(tester: CommandTester):
+    tester.execute("error")
+    assert tester.io.fetch_error().strip() == "No source found with name(s): error"
+    assert tester.status_code == 1
diff --git a/vendor/poetry/tests/console/commands/test_about.py b/vendor/poetry/tests/console/commands/test_about.py
index d61308ab..8f6902c3 100644
--- a/vendor/poetry/tests/console/commands/test_about.py
+++ b/vendor/poetry/tests/console/commands/test_about.py
@@ -1,18 +1,35 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
 
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+
+    from tests.types import CommandTesterFactory
+
+
 @pytest.fixture()
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("about")
 
 
-def test_about(tester):
+def test_about(tester: CommandTester):
+    from poetry.utils._compat import metadata
+
     tester.execute()
-    expected = """\
+
+    expected = f"""\
 Poetry - Package Management for Python
 
-Poetry is a dependency manager tracking local dependencies of your projects and libraries.
+Version: {metadata.version('poetry')}
+Poetry-Core Version: {metadata.version('poetry-core')}
+
+Poetry is a dependency manager tracking local dependencies of your projects and\
+ libraries.
 See https://github.com/python-poetry/poetry for more information.
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
diff --git a/vendor/poetry/tests/console/commands/test_add.py b/vendor/poetry/tests/console/commands/test_add.py
index 19998592..72dc8836 100644
--- a/vendor/poetry/tests/console/commands/test_add.py
+++ b/vendor/poetry/tests/console/commands/test_add.py
@@ -1,30 +1,62 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
+from __future__ import annotations
 
 import sys
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from poetry.core.semver import Version
+from poetry.core.semver.version import Version
+
 from poetry.repositories.legacy_repository import LegacyRepository
-from poetry.utils._compat import Path
 from tests.helpers import get_dependency
 from tests.helpers import get_package
 
 
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from poetry.installation.noop_installer import NoopInstaller
+    from poetry.poetry import Poetry
+    from poetry.utils.env import MockEnv
+    from poetry.utils.env import VirtualEnv
+    from tests.helpers import PoetryTestApplication
+    from tests.helpers import TestRepository
+    from tests.types import CommandTesterFactory
+    from tests.types import FixtureDirGetter
+    from tests.types import ProjectFactory
+
+
+@pytest.fixture
+def poetry_with_up_to_date_lockfile(
+    project_factory: ProjectFactory, fixture_dir: FixtureDirGetter
+) -> Poetry:
+    source = fixture_dir("up_to_date_lock")
+
+    return project_factory(
+        name="foobar",
+        pyproject_content=(source / "pyproject.toml").read_text(encoding="utf-8"),
+        poetry_lock_content=(source / "poetry.lock").read_text(encoding="utf-8"),
+    )
+
+
 @pytest.fixture()
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("add")
 
 
 @pytest.fixture()
-def old_tester(tester):
-    tester._command.installer.use_executor(False)
+def old_tester(tester: CommandTester) -> CommandTester:
+    tester.command.installer.use_executor(False)
 
     return tester
 
 
-def test_add_no_constraint(app, repo, tester):
+def test_add_no_constraint(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(get_package("cachy", "0.2.0"))
 
@@ -43,8 +75,8 @@ def test_add_no_constraint(app, repo, tester):
   • Installing cachy (0.2.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 1 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
@@ -52,7 +84,77 @@ def test_add_no_constraint(app, repo, tester):
     assert content["dependencies"]["cachy"] == "^0.2.0"
 
 
-def test_add_equal_constraint(app, repo, tester):
+def test_add_replace_by_constraint(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
+    repo.add_package(get_package("cachy", "0.1.0"))
+    repo.add_package(get_package("cachy", "0.2.0"))
+
+    tester.execute("cachy")
+
+    expected = """\
+Using version ^0.2.0 for cachy
+
+Updating dependencies
+Resolving dependencies...
+
+Writing lock file
+
+Package operations: 1 install, 0 updates, 0 removals
+
+  • Installing cachy (0.2.0)
+"""
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
+
+    content = app.poetry.file.read()["tool"]["poetry"]
+
+    assert "cachy" in content["dependencies"]
+    assert content["dependencies"]["cachy"] == "^0.2.0"
+
+    tester.execute("cachy@0.1.0")
+    expected = """
+Updating dependencies
+Resolving dependencies...
+
+Writing lock file
+
+Package operations: 1 install, 0 updates, 0 removals
+
+  • Installing cachy (0.1.0)
+"""
+    assert tester.io.fetch_output() == expected
+
+    content = app.poetry.file.read()["tool"]["poetry"]
+
+    assert "cachy" in content["dependencies"]
+    assert content["dependencies"]["cachy"] == "0.1.0"
+
+
+def test_add_no_constraint_editable_error(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
+    content = app.poetry.file.read()["tool"]["poetry"]
+
+    repo.add_package(get_package("cachy", "0.2.0"))
+
+    tester.execute("-e cachy")
+
+    expected = """
+Failed to add packages. Only vcs/path dependencies support editable installs.\
+ cachy is neither.
+
+No changes were applied.
+"""
+    assert tester.status_code == 1
+    assert tester.io.fetch_error() == expected
+    assert tester.command.installer.executor.installations_count == 0
+    assert content == app.poetry.file.read()["tool"]["poetry"]
+
+
+def test_add_equal_constraint(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(get_package("cachy", "0.2.0"))
 
@@ -70,11 +172,13 @@ def test_add_equal_constraint(app, repo, tester):
   • Installing cachy (0.1.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 1 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
 
 
-def test_add_greater_constraint(app, repo, tester):
+def test_add_greater_constraint(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(get_package("cachy", "0.2.0"))
 
@@ -92,21 +196,27 @@ def test_add_greater_constraint(app, repo, tester):
   • Installing cachy (0.2.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 1 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
 
 
-def test_add_constraint_with_extras(app, repo, tester):
+@pytest.mark.parametrize("extra_name", ["msgpack", "MsgPack"])
+def test_add_constraint_with_extras(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    extra_name: str,
+):
     cachy1 = get_package("cachy", "0.1.0")
     cachy1.extras = {"msgpack": [get_dependency("msgpack-python")]}
     msgpack_dep = get_dependency("msgpack-python", ">=0.5 <0.6", optional=True)
-    cachy1.requires = [msgpack_dep]
+    cachy1.add_dependency(msgpack_dep)
 
     repo.add_package(get_package("cachy", "0.2.0"))
     repo.add_package(cachy1)
     repo.add_package(get_package("msgpack-python", "0.5.3"))
 
-    tester.execute("cachy[msgpack]>=0.1.0,<0.2.0")
+    tester.execute(f"cachy[{extra_name}]>=0.1.0,<0.2.0")
 
     expected = """\
 
@@ -121,14 +231,16 @@ def test_add_constraint_with_extras(app, repo, tester):
   • Installing cachy (0.1.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
 
-def test_add_constraint_dependencies(app, repo, tester):
+def test_add_constraint_dependencies(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     cachy2 = get_package("cachy", "0.2.0")
     msgpack_dep = get_dependency("msgpack-python", ">=0.5 <0.6")
-    cachy2.requires = [msgpack_dep]
+    cachy2.add_dependency(msgpack_dep)
 
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(cachy2)
@@ -149,12 +261,17 @@ def test_add_constraint_dependencies(app, repo, tester):
   • Installing cachy (0.2.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
 
-def test_add_git_constraint(app, repo, tester, tmp_venv):
-    tester._command.set_env(tmp_venv)
+def test_add_git_constraint(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    tmp_venv: VirtualEnv,
+):
+    tester.command.set_env(tmp_venv)
 
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
@@ -174,8 +291,8 @@ def test_add_git_constraint(app, repo, tester, tmp_venv):
   • Installing demo (0.1.2 9cf87a2)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
@@ -185,8 +302,13 @@ def test_add_git_constraint(app, repo, tester, tmp_venv):
     }
 
 
-def test_add_git_constraint_with_poetry(app, repo, tester, tmp_venv):
-    tester._command.set_env(tmp_venv)
+def test_add_git_constraint_with_poetry(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    tmp_venv: VirtualEnv,
+):
+    tester.command.set_env(tmp_venv)
 
     repo.add_package(get_package("pendulum", "1.4.4"))
 
@@ -205,18 +327,25 @@ def test_add_git_constraint_with_poetry(app, repo, tester, tmp_venv):
   • Installing demo (0.1.2 9cf87a2)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
 
-def test_add_git_constraint_with_extras(app, repo, tester, tmp_venv):
-    tester._command.set_env(tmp_venv)
+@pytest.mark.parametrize("extra_name", ["foo", "FOO"])
+def test_add_git_constraint_with_extras(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    tmp_venv: VirtualEnv,
+    extra_name: str,
+):
+    tester.command.set_env(tmp_venv)
 
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
     repo.add_package(get_package("tomlkit", "0.5.5"))
 
-    tester.execute("git+https://github.com/demo/demo.git[foo,bar]")
+    tester.execute(f"git+https://github.com/demo/demo.git[{extra_name},bar]")
 
     expected = """\
 
@@ -233,25 +362,80 @@ def test_add_git_constraint_with_extras(app, repo, tester, tmp_venv):
   • Installing demo (0.1.2 9cf87a2)
 """
 
-    assert expected.strip() == tester.io.fetch_output().strip()
-    assert 4 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output().strip() == expected.strip()
+    assert tester.command.installer.executor.installations_count == 4
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
     assert content["dependencies"]["demo"] == {
         "git": "https://github.com/demo/demo.git",
-        "extras": ["foo", "bar"],
+        "extras": [extra_name, "bar"],
+    }
+
+
+@pytest.mark.parametrize(
+    "url, rev",
+    [
+        ("git+https://github.com/demo/subdirectories.git#subdirectory=two", None),
+        (
+            "git+https://github.com/demo/subdirectories.git@master#subdirectory=two",
+            "master",
+        ),
+    ],
+)
+def test_add_git_constraint_with_subdirectory(
+    url: str,
+    rev: str | None,
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    env: MockEnv,
+):
+    tester.execute(url)
+
+    expected = """\
+Updating dependencies
+Resolving dependencies...
+
+Writing lock file
+
+Package operations: 1 install, 0 updates, 0 removals
+
+  • Installing two (2.0.0 9cf87a2)
+"""
+    assert tester.io.fetch_output().strip() == expected.strip()
+    assert tester.command.installer.executor.installations_count == 1
+
+    content = app.poetry.file.read()["tool"]["poetry"]
+
+    constraint = {
+        "git": "https://github.com/demo/subdirectories.git",
+        "subdirectory": "two",
     }
 
+    if rev:
+        constraint["rev"] = rev
+
+    assert "two" in content["dependencies"]
+    assert content["dependencies"]["two"] == constraint
+
 
-def test_add_git_ssh_constraint(app, repo, tester, tmp_venv):
-    tester._command.set_env(tmp_venv)
+@pytest.mark.parametrize("editable", [False, True])
+def test_add_git_ssh_constraint(
+    editable: bool,
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    tmp_venv: VirtualEnv,
+):
+    tester.command.set_env(tmp_venv)
 
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
 
-    tester.execute("git+ssh://git@github.com/demo/demo.git@develop")
+    url = "git+ssh://git@github.com/demo/demo.git@develop"
+    tester.execute(f"{url}" if not editable else f"-e {url}")
 
     expected = """\
 
@@ -266,29 +450,41 @@ def test_add_git_ssh_constraint(app, repo, tester, tmp_venv):
   • Installing demo (0.1.2 9cf87a2)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
-    assert content["dependencies"]["demo"] == {
+
+    expected = {
         "git": "ssh://git@github.com/demo/demo.git",
         "rev": "develop",
     }
-
-
-def test_add_directory_constraint(app, repo, tester, mocker):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
-    p.return_value = Path(__file__).parent
-
+    if editable:
+        expected["develop"] = True
+
+    assert content["dependencies"]["demo"] == expected
+
+
+@pytest.mark.parametrize(
+    "required_fixtures",
+    [["git/github.com/demo/demo"]],
+)
+@pytest.mark.parametrize("editable", [False, True])
+def test_add_directory_constraint(
+    editable: bool,
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
 
     path = "../git/github.com/demo/demo"
-    tester.execute("{}".format(path))
+    tester.execute(f"{path}" if not editable else f"-e {path}")
 
-    expected = """\
+    expected = f"""\
 
 Updating dependencies
 Resolving dependencies...
@@ -298,30 +494,38 @@ def test_add_directory_constraint(app, repo, tester, mocker):
 Package operations: 2 installs, 0 updates, 0 removals
 
   • Installing pendulum (1.4.4)
-  • Installing demo (0.1.2 {})
-""".format(
-        app.poetry.file.parent.joinpath(path).resolve().as_posix()
-    )
+  • Installing demo (0.1.2 {app.poetry.file.parent.joinpath(path).resolve().as_posix()})
+"""
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
-    assert content["dependencies"]["demo"] == {"path": "../git/github.com/demo/demo"}
 
+    expected = {"path": path}
+    if editable:
+        expected["develop"] = True
 
-def test_add_directory_with_poetry(app, repo, tester, mocker):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
-    p.return_value = Path(__file__) / ".."
+    assert content["dependencies"]["demo"] == expected
 
+
+@pytest.mark.parametrize(
+    "required_fixtures",
+    [["git/github.com/demo/pyproject-demo"]],
+)
+def test_add_directory_with_poetry(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
 
     path = "../git/github.com/demo/pyproject-demo"
-    tester.execute("{}".format(path))
+    tester.execute(f"{path}")
 
-    expected = """\
+    expected = f"""\
 
 Updating dependencies
 Resolving dependencies...
@@ -331,25 +535,29 @@ def test_add_directory_with_poetry(app, repo, tester, mocker):
 Package operations: 2 installs, 0 updates, 0 removals
 
   • Installing pendulum (1.4.4)
-  • Installing demo (0.1.2 {})
-""".format(
-        app.poetry.file.parent.joinpath(path).resolve().as_posix()
-    )
-
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+  • Installing demo (0.1.2 {app.poetry.file.parent.joinpath(path).resolve().as_posix()})
+"""
 
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
-def test_add_file_constraint_wheel(app, repo, tester, mocker, poetry):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
-    p.return_value = poetry.file.parent
 
+@pytest.mark.parametrize(
+    "required_fixtures",
+    [["distributions/demo-0.1.0-py2.py3-none-any.whl"]],
+)
+def test_add_file_constraint_wheel(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    poetry: Poetry,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
 
     path = "../distributions/demo-0.1.0-py2.py3-none-any.whl"
-    tester.execute("{}".format(path))
+    tester.execute(f"{path}")
 
-    expected = """\
+    expected = f"""\
 
 Updating dependencies
 Resolving dependencies...
@@ -359,32 +567,33 @@ def test_add_file_constraint_wheel(app, repo, tester, mocker, poetry):
 Package operations: 2 installs, 0 updates, 0 removals
 
   • Installing pendulum (1.4.4)
-  • Installing demo (0.1.0 {})
-""".format(
-        app.poetry.file.parent.joinpath(path).resolve().as_posix()
-    )
+  • Installing demo (0.1.0 {app.poetry.file.parent.joinpath(path).resolve().as_posix()})
+"""
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
-    assert content["dependencies"]["demo"] == {
-        "path": "../distributions/demo-0.1.0-py2.py3-none-any.whl"
-    }
+    assert content["dependencies"]["demo"] == {"path": path}
 
 
-def test_add_file_constraint_sdist(app, repo, tester, mocker):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
-    p.return_value = Path(__file__) / ".."
-
+@pytest.mark.parametrize(
+    "required_fixtures",
+    [["distributions/demo-0.1.0.tar.gz"]],
+)
+def test_add_file_constraint_sdist(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
 
     path = "../distributions/demo-0.1.0.tar.gz"
-    tester.execute("{}".format(path))
+    tester.execute(f"{path}")
 
-    expected = """\
+    expected = f"""\
 
 Updating dependencies
 Resolving dependencies...
@@ -394,33 +603,35 @@ def test_add_file_constraint_sdist(app, repo, tester, mocker):
 Package operations: 2 installs, 0 updates, 0 removals
 
   • Installing pendulum (1.4.4)
-  • Installing demo (0.1.0 {})
-""".format(
-        app.poetry.file.parent.joinpath(path).resolve().as_posix()
-    )
+  • Installing demo (0.1.0 {app.poetry.file.parent.joinpath(path).resolve().as_posix()})
+"""
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
-    assert content["dependencies"]["demo"] == {
-        "path": "../distributions/demo-0.1.0.tar.gz"
-    }
+    assert content["dependencies"]["demo"] == {"path": path}
 
 
-def test_add_constraint_with_extras_option(app, repo, tester):
+@pytest.mark.parametrize("extra_name", ["msgpack", "MsgPack"])
+def test_add_constraint_with_extras_option(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    extra_name: str,
+):
     cachy2 = get_package("cachy", "0.2.0")
     cachy2.extras = {"msgpack": [get_dependency("msgpack-python")]}
     msgpack_dep = get_dependency("msgpack-python", ">=0.5 <0.6", optional=True)
-    cachy2.requires = [msgpack_dep]
+    cachy2.add_dependency(msgpack_dep)
 
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(cachy2)
     repo.add_package(get_package("msgpack-python", "0.5.3"))
 
-    tester.execute("cachy=0.2.0 --extras msgpack")
+    tester.execute(f"cachy=0.2.0 --extras {extra_name}")
 
     expected = """\
 
@@ -435,20 +646,25 @@ def test_add_constraint_with_extras_option(app, repo, tester):
   • Installing cachy (0.2.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "cachy" in content["dependencies"]
     assert content["dependencies"]["cachy"] == {
         "version": "0.2.0",
-        "extras": ["msgpack"],
+        "extras": [extra_name],
     }
 
 
-def test_add_url_constraint_wheel(app, repo, tester, mocker):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
+def test_add_url_constraint_wheel(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    mocker: MockerFixture,
+):
+    p = mocker.patch("pathlib.Path.cwd")
     p.return_value = Path(__file__) / ".."
 
     repo.add_package(get_package("pendulum", "1.4.4"))
@@ -467,11 +683,12 @@ def test_add_url_constraint_wheel(app, repo, tester, mocker):
 Package operations: 2 installs, 0 updates, 0 removals
 
   • Installing pendulum (1.4.4)
-  • Installing demo (0.1.0 https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl)
+  • Installing demo\
+ (0.1.0 https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 2 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
@@ -481,13 +698,21 @@ def test_add_url_constraint_wheel(app, repo, tester, mocker):
     }
 
 
-def test_add_url_constraint_wheel_with_extras(app, repo, tester, mocker):
+@pytest.mark.parametrize("extra_name", ["foo", "FOO"])
+def test_add_url_constraint_wheel_with_extras(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    extra_name: str,
+    mocker: MockerFixture,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
     repo.add_package(get_package("tomlkit", "0.5.5"))
 
     tester.execute(
-        "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl[foo,bar]"
+        "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
+        f"[{extra_name},bar]"
     )
 
     expected = """\
@@ -502,24 +727,29 @@ def test_add_url_constraint_wheel_with_extras(app, repo, tester, mocker):
   • Installing cleo (0.6.5)
   • Installing pendulum (1.4.4)
   • Installing tomlkit (0.5.5)
-  • Installing demo (0.1.0 https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl)
+  • Installing demo\
+ (0.1.0 https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl)
 """
     # Order might be different, split into lines and compare the overall output.
     expected = set(expected.splitlines())
     output = set(tester.io.fetch_output().splitlines())
-    assert expected == output
-    assert 4 == tester._command.installer.executor.installations_count
+    assert output == expected
+    assert tester.command.installer.executor.installations_count == 4
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
     assert content["dependencies"]["demo"] == {
-        "url": "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl",
-        "extras": ["foo", "bar"],
+        "url": (
+            "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
+        ),
+        "extras": [extra_name, "bar"],
     }
 
 
-def test_add_constraint_with_python(app, repo, tester):
+def test_add_constraint_with_python(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     cachy2 = get_package("cachy", "0.2.0")
 
     repo.add_package(get_package("cachy", "0.1.0"))
@@ -539,8 +769,8 @@ def test_add_constraint_with_python(app, repo, tester):
   • Installing cachy (0.2.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 1 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
@@ -548,7 +778,12 @@ def test_add_constraint_with_python(app, repo, tester):
     assert content["dependencies"]["cachy"] == {"version": "0.2.0", "python": ">=2.7"}
 
 
-def test_add_constraint_with_platform(app, repo, tester, env):
+def test_add_constraint_with_platform(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    tester: CommandTester,
+    env: MockEnv,
+):
     platform = sys.platform
     env._platform = platform
 
@@ -557,7 +792,7 @@ def test_add_constraint_with_platform(app, repo, tester, env):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(cachy2)
 
-    tester.execute("cachy=0.2.0 --platform {} -vvv".format(platform))
+    tester.execute(f"cachy=0.2.0 --platform {platform} -vvv")
 
     expected = """\
 
@@ -571,8 +806,8 @@ def test_add_constraint_with_platform(app, repo, tester, env):
   • Installing cachy (0.2.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 1 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
@@ -583,10 +818,14 @@ def test_add_constraint_with_platform(app, repo, tester, env):
     }
 
 
-def test_add_constraint_with_source(app, poetry, tester):
+def test_add_constraint_with_source(
+    app: PoetryTestApplication, poetry: Poetry, tester: CommandTester
+):
     repo = LegacyRepository(name="my-index", url="https://my-index.fake")
     repo.add_package(get_package("cachy", "0.2.0"))
-    repo._cache.store("matches").put("cachy:0.2.0", [Version.parse("0.2.0")], 5)
+    repo._cache.store("matches").put(
+        "cachy:0.2.0", [(Version.parse("0.2.0"), False)], 5
+    )
 
     poetry.pool.add_repository(repo)
 
@@ -604,8 +843,8 @@ def test_add_constraint_with_source(app, poetry, tester):
   • Installing cachy (0.2.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 1 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
@@ -616,14 +855,21 @@ def test_add_constraint_with_source(app, poetry, tester):
     }
 
 
-def test_add_constraint_with_source_that_does_not_exist(app, tester):
+def test_add_constraint_with_source_that_does_not_exist(
+    app: PoetryTestApplication, tester: CommandTester
+):
     with pytest.raises(ValueError) as e:
         tester.execute("foo --source i-dont-exist")
 
-    assert 'Repository "i-dont-exist" does not exist.' == str(e.value)
+    assert str(e.value) == 'Repository "i-dont-exist" does not exist.'
 
 
-def test_add_constraint_not_found_with_source(app, poetry, mocker, tester):
+def test_add_constraint_not_found_with_source(
+    app: PoetryTestApplication,
+    poetry: Poetry,
+    mocker: MockerFixture,
+    tester: CommandTester,
+):
     repo = LegacyRepository(name="my-index", url="https://my-index.fake")
     mocker.patch.object(repo, "find_packages", return_value=[])
 
@@ -635,15 +881,64 @@ def test_add_constraint_not_found_with_source(app, poetry, mocker, tester):
     with pytest.raises(ValueError) as e:
         tester.execute("cachy --source my-index")
 
-    assert "Could not find a matching version of package cachy" == str(e.value)
+    assert str(e.value) == "Could not find a matching version of package cachy"
+
+
+def test_add_to_section_that_does_not_exist_yet(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
+    repo.add_package(get_package("cachy", "0.1.0"))
+    repo.add_package(get_package("cachy", "0.2.0"))
+
+    tester.execute("cachy --group dev")
+
+    expected = """\
+Using version ^0.2.0 for cachy
+
+Updating dependencies
+Resolving dependencies...
+
+Writing lock file
+
+Package operations: 1 install, 0 updates, 0 removals
+
+  • Installing cachy (0.2.0)
+"""
+
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
+
+    content = app.poetry.file.read()["tool"]["poetry"]
+
+    assert "cachy" in content["group"]["dev"]["dependencies"]
+    assert content["group"]["dev"]["dependencies"]["cachy"] == "^0.2.0"
+
+    expected = """\
+
+[tool.poetry.group.dev.dependencies]
+cachy = "^0.2.0"
+
+"""
+    string_content = content.as_string()
+    if "\r\n" in string_content:
+        # consistent line endings
+        expected = expected.replace("\n", "\r\n")
+
+    assert expected in string_content
 
 
-def test_add_to_section_that_does_no_exist_yet(app, repo, tester):
+def test_add_to_dev_section_deprecated(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(get_package("cachy", "0.2.0"))
 
     tester.execute("cachy --dev")
 
+    warning = """\
+The --dev option is deprecated, use the `--group dev` notation instead.
+"""
+
     expected = """\
 Using version ^0.2.0 for cachy
 
@@ -657,16 +952,19 @@ def test_add_to_section_that_does_no_exist_yet(app, repo, tester):
   • Installing cachy (0.2.0)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 1 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_error() == warning
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
-    assert "cachy" in content["dev-dependencies"]
-    assert content["dev-dependencies"]["cachy"] == "^0.2.0"
+    assert "cachy" in content["group"]["dev"]["dependencies"]
+    assert content["group"]["dev"]["dependencies"]["cachy"] == "^0.2.0"
 
 
-def test_add_should_not_select_prereleases(app, repo, tester):
+def test_add_should_not_select_prereleases(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     repo.add_package(get_package("pyyaml", "3.13"))
     repo.add_package(get_package("pyyaml", "4.2b2"))
 
@@ -685,8 +983,8 @@ def test_add_should_not_select_prereleases(app, repo, tester):
   • Installing pyyaml (3.13)
 """
 
-    assert expected == tester.io.fetch_output()
-    assert 1 == tester._command.installer.executor.installations_count
+    assert tester.io.fetch_output() == expected
+    assert tester.command.installer.executor.installations_count == 1
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
@@ -695,7 +993,7 @@ def test_add_should_not_select_prereleases(app, repo, tester):
 
 
 def test_add_should_skip_when_adding_existing_package_with_no_constraint(
-    app, repo, tester
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
 ):
     content = app.poetry.file.read()
     content["tool"]["poetry"]["dependencies"]["foo"] = "^1.0"
@@ -709,15 +1007,41 @@ def test_add_should_skip_when_adding_existing_package_with_no_constraint(
 
   • foo
 
-If you want to update it to the latest compatible version, you can use `poetry update package`.
-If you prefer to upgrade it to the latest available version, you can use `poetry add package@latest`.
+If you want to update it to the latest compatible version,\
+ you can use `poetry update package`.
+If you prefer to upgrade it to the latest available version,\
+ you can use `poetry add package@latest`.
+"""
+
+    assert expected in tester.io.fetch_output()
+
+
+def test_add_should_skip_when_adding_canonicalized_existing_package_with_no_constraint(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
+    content = app.poetry.file.read()
+    content["tool"]["poetry"]["dependencies"]["foo-bar"] = "^1.0"
+    app.poetry.file.write(content)
+
+    repo.add_package(get_package("foo-bar", "1.1.2"))
+    tester.execute("Foo_Bar")
+
+    expected = """\
+The following packages are already present in the pyproject.toml and will be skipped:
+
+  • Foo_Bar
+
+If you want to update it to the latest compatible version,\
+ you can use `poetry update package`.
+If you prefer to upgrade it to the latest available version,\
+ you can use `poetry add package@latest`.
 """
 
     assert expected in tester.io.fetch_output()
 
 
 def test_add_should_work_when_adding_existing_package_with_latest_constraint(
-    app, repo, tester
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
 ):
     content = app.poetry.file.read()
     content["tool"]["poetry"]["dependencies"]["foo"] = "^1.0"
@@ -748,14 +1072,16 @@ def test_add_should_work_when_adding_existing_package_with_latest_constraint(
     assert content["dependencies"]["foo"] == "^1.1.2"
 
 
-def test_add_chooses_prerelease_if_only_prereleases_are_available(app, repo, tester):
+def test_add_chooses_prerelease_if_only_prereleases_are_available(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     repo.add_package(get_package("foo", "1.2.3b0"))
     repo.add_package(get_package("foo", "1.2.3b1"))
 
     tester.execute("foo")
 
     expected = """\
-Using version ^1.2.3-beta.1 for foo
+Using version ^1.2.3b1 for foo
 
 Updating dependencies
 Resolving dependencies...
@@ -766,11 +1092,12 @@ def test_add_chooses_prerelease_if_only_prereleases_are_available(app, repo, tes
 
   • Installing foo (1.2.3b1)
 """
-
     assert expected in tester.io.fetch_output()
 
 
-def test_add_prefers_stable_releases(app, repo, tester):
+def test_add_prefers_stable_releases(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     repo.add_package(get_package("foo", "1.2.3"))
     repo.add_package(get_package("foo", "1.2.4b1"))
 
@@ -792,7 +1119,9 @@ def test_add_prefers_stable_releases(app, repo, tester):
     assert expected in tester.io.fetch_output()
 
 
-def test_add_with_lock(app, repo, tester):
+def test_add_with_lock(
+    app: PoetryTestApplication, repo: TestRepository, tester: CommandTester
+):
     content_hash = app.poetry.locker._get_content_hash()
     repo.add_package(get_package("cachy", "0.2.0"))
 
@@ -807,11 +1136,16 @@ def test_add_with_lock(app, repo, tester):
 Writing lock file
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
     assert content_hash != app.poetry.locker.lock_data["metadata"]["content-hash"]
 
 
-def test_add_no_constraint_old_installer(app, repo, installer, old_tester):
+def test_add_no_constraint_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(get_package("cachy", "0.2.0"))
 
@@ -830,7 +1164,7 @@ def test_add_no_constraint_old_installer(app, repo, installer, old_tester):
   - Installing cachy (0.2.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 1
 
@@ -840,7 +1174,12 @@ def test_add_no_constraint_old_installer(app, repo, installer, old_tester):
     assert content["dependencies"]["cachy"] == "^0.2.0"
 
 
-def test_add_equal_constraint_old_installer(app, repo, installer, old_tester):
+def test_add_equal_constraint_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(get_package("cachy", "0.2.0"))
 
@@ -858,12 +1197,17 @@ def test_add_equal_constraint_old_installer(app, repo, installer, old_tester):
   - Installing cachy (0.1.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 1
 
 
-def test_add_greater_constraint_old_installer(app, repo, installer, old_tester):
+def test_add_greater_constraint_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(get_package("cachy", "0.2.0"))
 
@@ -881,22 +1225,29 @@ def test_add_greater_constraint_old_installer(app, repo, installer, old_tester):
   - Installing cachy (0.2.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 1
 
 
-def test_add_constraint_with_extras_old_installer(app, repo, installer, old_tester):
+@pytest.mark.parametrize("extra_name", ["msgpack", "MsgPack"])
+def test_add_constraint_with_extras_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+    extra_name: str,
+):
     cachy1 = get_package("cachy", "0.1.0")
     cachy1.extras = {"msgpack": [get_dependency("msgpack-python")]}
     msgpack_dep = get_dependency("msgpack-python", ">=0.5 <0.6", optional=True)
-    cachy1.requires = [msgpack_dep]
+    cachy1.add_dependency(msgpack_dep)
 
     repo.add_package(get_package("cachy", "0.2.0"))
     repo.add_package(cachy1)
     repo.add_package(get_package("msgpack-python", "0.5.3"))
 
-    old_tester.execute("cachy[msgpack]>=0.1.0,<0.2.0")
+    old_tester.execute(f"cachy[{extra_name}]>=0.1.0,<0.2.0")
 
     expected = """\
 
@@ -911,15 +1262,20 @@ def test_add_constraint_with_extras_old_installer(app, repo, installer, old_test
   - Installing cachy (0.1.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
 
-def test_add_constraint_dependencies_old_installer(app, repo, installer, old_tester):
+def test_add_constraint_dependencies_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     cachy2 = get_package("cachy", "0.2.0")
     msgpack_dep = get_dependency("msgpack-python", ">=0.5 <0.6")
-    cachy2.requires = [msgpack_dep]
+    cachy2.add_dependency(msgpack_dep)
 
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(cachy2)
@@ -940,12 +1296,17 @@ def test_add_constraint_dependencies_old_installer(app, repo, installer, old_tes
   - Installing cachy (0.2.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
 
-def test_add_git_constraint_old_installer(app, repo, installer, old_tester):
+def test_add_git_constraint_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
 
@@ -964,7 +1325,7 @@ def test_add_git_constraint_old_installer(app, repo, installer, old_tester):
   - Installing demo (0.1.2 9cf87a2)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
@@ -976,7 +1337,12 @@ def test_add_git_constraint_old_installer(app, repo, installer, old_tester):
     }
 
 
-def test_add_git_constraint_with_poetry_old_installer(app, repo, installer, old_tester):
+def test_add_git_constraint_with_poetry_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
 
     old_tester.execute("git+https://github.com/demo/pyproject-demo.git")
@@ -994,17 +1360,24 @@ def test_add_git_constraint_with_poetry_old_installer(app, repo, installer, old_
   - Installing demo (0.1.2 9cf87a2)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
 
-def test_add_git_constraint_with_extras_old_installer(app, repo, installer, old_tester):
+@pytest.mark.parametrize("extra_name", ["foo", "FOO"])
+def test_add_git_constraint_with_extras_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+    extra_name: str,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
     repo.add_package(get_package("tomlkit", "0.5.5"))
 
-    old_tester.execute("git+https://github.com/demo/demo.git[foo,bar]")
+    old_tester.execute(f"git+https://github.com/demo/demo.git[{extra_name},bar]")
 
     expected = """\
 
@@ -1021,7 +1394,7 @@ def test_add_git_constraint_with_extras_old_installer(app, repo, installer, old_
   - Installing demo (0.1.2 9cf87a2)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 4
 
@@ -1030,11 +1403,16 @@ def test_add_git_constraint_with_extras_old_installer(app, repo, installer, old_
     assert "demo" in content["dependencies"]
     assert content["dependencies"]["demo"] == {
         "git": "https://github.com/demo/demo.git",
-        "extras": ["foo", "bar"],
+        "extras": [extra_name, "bar"],
     }
 
 
-def test_add_git_ssh_constraint_old_installer(app, repo, installer, old_tester):
+def test_add_git_ssh_constraint_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
 
@@ -1053,7 +1431,7 @@ def test_add_git_ssh_constraint_old_installer(app, repo, installer, old_tester):
   - Installing demo (0.1.2 9cf87a2)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
@@ -1066,19 +1444,23 @@ def test_add_git_ssh_constraint_old_installer(app, repo, installer, old_tester):
     }
 
 
+@pytest.mark.parametrize(
+    "required_fixtures",
+    [["git/github.com/demo/demo"]],
+)
 def test_add_directory_constraint_old_installer(
-    app, repo, installer, mocker, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
-    p.return_value = Path(__file__) / ".."
-
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
 
     path = "../git/github.com/demo/demo"
-    old_tester.execute("{}".format(path))
+    old_tester.execute(f"{path}")
 
-    expected = """\
+    expected = f"""\
 
 Updating dependencies
 Resolving dependencies...
@@ -1088,33 +1470,35 @@ def test_add_directory_constraint_old_installer(
 Package operations: 2 installs, 0 updates, 0 removals
 
   - Installing pendulum (1.4.4)
-  - Installing demo (0.1.2 {})
-""".format(
-        app.poetry.file.parent.joinpath(path).resolve().as_posix()
-    )
+  - Installing demo (0.1.2 {app.poetry.file.parent.joinpath(path).resolve().as_posix()})
+"""
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
-    assert content["dependencies"]["demo"] == {"path": "../git/github.com/demo/demo"}
+    assert content["dependencies"]["demo"] == {"path": path}
 
 
+@pytest.mark.parametrize(
+    "required_fixtures",
+    [["git/github.com/demo/pyproject-demo"]],
+)
 def test_add_directory_with_poetry_old_installer(
-    app, repo, installer, mocker, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
-    p.return_value = Path(__file__) / ".."
-
     repo.add_package(get_package("pendulum", "1.4.4"))
 
     path = "../git/github.com/demo/pyproject-demo"
-    old_tester.execute("{}".format(path))
+    old_tester.execute(f"{path}")
 
-    expected = """\
+    expected = f"""\
 
 Updating dependencies
 Resolving dependencies...
@@ -1124,28 +1508,30 @@ def test_add_directory_with_poetry_old_installer(
 Package operations: 2 installs, 0 updates, 0 removals
 
   - Installing pendulum (1.4.4)
-  - Installing demo (0.1.2 {})
-""".format(
-        app.poetry.file.parent.joinpath(path).resolve().as_posix()
-    )
+  - Installing demo (0.1.2 {app.poetry.file.parent.joinpath(path).resolve().as_posix()})
+"""
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
 
+@pytest.mark.parametrize(
+    "required_fixtures",
+    [["distributions/demo-0.1.0-py2.py3-none-any.whl"]],
+)
 def test_add_file_constraint_wheel_old_installer(
-    app, repo, installer, mocker, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
-    p.return_value = Path(__file__) / ".."
-
     repo.add_package(get_package("pendulum", "1.4.4"))
 
     path = "../distributions/demo-0.1.0-py2.py3-none-any.whl"
-    old_tester.execute("{}".format(path))
+    old_tester.execute(f"{path}")
 
-    expected = """\
+    expected = f"""\
 
 Updating dependencies
 Resolving dependencies...
@@ -1155,35 +1541,35 @@ def test_add_file_constraint_wheel_old_installer(
 Package operations: 2 installs, 0 updates, 0 removals
 
   - Installing pendulum (1.4.4)
-  - Installing demo (0.1.0 {})
-""".format(
-        app.poetry.file.parent.joinpath(path).resolve().as_posix()
-    )
+  - Installing demo (0.1.0 {app.poetry.file.parent.joinpath(path).resolve().as_posix()})
+"""
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
-    assert content["dependencies"]["demo"] == {
-        "path": "../distributions/demo-0.1.0-py2.py3-none-any.whl"
-    }
+    assert content["dependencies"]["demo"] == {"path": path}
 
 
+@pytest.mark.parametrize(
+    "required_fixtures",
+    [["distributions/demo-0.1.0.tar.gz"]],
+)
 def test_add_file_constraint_sdist_old_installer(
-    app, repo, installer, mocker, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
-    p.return_value = Path(__file__) / ".."
-
     repo.add_package(get_package("pendulum", "1.4.4"))
 
     path = "../distributions/demo-0.1.0.tar.gz"
-    old_tester.execute("{}".format(path))
+    old_tester.execute(f"{path}")
 
-    expected = """\
+    expected = f"""\
 
 Updating dependencies
 Resolving dependencies...
@@ -1193,36 +1579,37 @@ def test_add_file_constraint_sdist_old_installer(
 Package operations: 2 installs, 0 updates, 0 removals
 
   - Installing pendulum (1.4.4)
-  - Installing demo (0.1.0 {})
-""".format(
-        app.poetry.file.parent.joinpath(path).resolve().as_posix()
-    )
+  - Installing demo (0.1.0 {app.poetry.file.parent.joinpath(path).resolve().as_posix()})
+"""
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
     assert "demo" in content["dependencies"]
-    assert content["dependencies"]["demo"] == {
-        "path": "../distributions/demo-0.1.0.tar.gz"
-    }
+    assert content["dependencies"]["demo"] == {"path": path}
 
 
+@pytest.mark.parametrize("extra_name", ["msgpack", "MsgPack"])
 def test_add_constraint_with_extras_option_old_installer(
-    app, repo, installer, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+    extra_name: str,
 ):
     cachy2 = get_package("cachy", "0.2.0")
     cachy2.extras = {"msgpack": [get_dependency("msgpack-python")]}
     msgpack_dep = get_dependency("msgpack-python", ">=0.5 <0.6", optional=True)
-    cachy2.requires = [msgpack_dep]
+    cachy2.add_dependency(msgpack_dep)
 
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(cachy2)
     repo.add_package(get_package("msgpack-python", "0.5.3"))
 
-    old_tester.execute("cachy=0.2.0 --extras msgpack")
+    old_tester.execute(f"cachy=0.2.0 --extras {extra_name}")
 
     expected = """\
 
@@ -1237,7 +1624,7 @@ def test_add_constraint_with_extras_option_old_installer(
   - Installing cachy (0.2.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
@@ -1246,14 +1633,18 @@ def test_add_constraint_with_extras_option_old_installer(
     assert "cachy" in content["dependencies"]
     assert content["dependencies"]["cachy"] == {
         "version": "0.2.0",
-        "extras": ["msgpack"],
+        "extras": [extra_name],
     }
 
 
 def test_add_url_constraint_wheel_old_installer(
-    app, repo, installer, mocker, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    mocker: MockerFixture,
+    old_tester: CommandTester,
 ):
-    p = mocker.patch("poetry.utils._compat.Path.cwd")
+    p = mocker.patch("pathlib.Path.cwd")
     p.return_value = Path(__file__) / ".."
 
     repo.add_package(get_package("pendulum", "1.4.4"))
@@ -1272,10 +1663,11 @@ def test_add_url_constraint_wheel_old_installer(
 Package operations: 2 installs, 0 updates, 0 removals
 
   - Installing pendulum (1.4.4)
-  - Installing demo (0.1.0 https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl)
+  - Installing demo\
+ (0.1.0 https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 2
 
@@ -1287,15 +1679,21 @@ def test_add_url_constraint_wheel_old_installer(
     }
 
 
+@pytest.mark.parametrize("extra_name", ["foo", "FOO"])
 def test_add_url_constraint_wheel_with_extras_old_installer(
-    app, repo, installer, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+    extra_name: str,
 ):
     repo.add_package(get_package("pendulum", "1.4.4"))
     repo.add_package(get_package("cleo", "0.6.5"))
     repo.add_package(get_package("tomlkit", "0.5.5"))
 
     old_tester.execute(
-        "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl[foo,bar]"
+        "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
+        f"[{extra_name},bar]"
     )
 
     expected = """\
@@ -1310,10 +1708,11 @@ def test_add_url_constraint_wheel_with_extras_old_installer(
   - Installing cleo (0.6.5)
   - Installing pendulum (1.4.4)
   - Installing tomlkit (0.5.5)
-  - Installing demo (0.1.0 https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl)
+  - Installing demo\
+ (0.1.0 https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 4
 
@@ -1321,12 +1720,19 @@ def test_add_url_constraint_wheel_with_extras_old_installer(
 
     assert "demo" in content["dependencies"]
     assert content["dependencies"]["demo"] == {
-        "url": "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl",
-        "extras": ["foo", "bar"],
+        "url": (
+            "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
+        ),
+        "extras": [extra_name, "bar"],
     }
 
 
-def test_add_constraint_with_python_old_installer(app, repo, installer, old_tester):
+def test_add_constraint_with_python_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     cachy2 = get_package("cachy", "0.2.0")
 
     repo.add_package(get_package("cachy", "0.1.0"))
@@ -1346,7 +1752,7 @@ def test_add_constraint_with_python_old_installer(app, repo, installer, old_test
   - Installing cachy (0.2.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 1
 
@@ -1357,7 +1763,11 @@ def test_add_constraint_with_python_old_installer(app, repo, installer, old_test
 
 
 def test_add_constraint_with_platform_old_installer(
-    app, repo, installer, env, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    env: MockEnv,
+    old_tester: CommandTester,
 ):
     platform = sys.platform
     env._platform = platform
@@ -1367,7 +1777,7 @@ def test_add_constraint_with_platform_old_installer(
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(cachy2)
 
-    old_tester.execute("cachy=0.2.0 --platform {} -vvv".format(platform))
+    old_tester.execute(f"cachy=0.2.0 --platform {platform} -vvv")
 
     expected = """\
 
@@ -1381,7 +1791,7 @@ def test_add_constraint_with_platform_old_installer(
   - Installing cachy (0.2.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 1
 
@@ -1394,10 +1804,17 @@ def test_add_constraint_with_platform_old_installer(
     }
 
 
-def test_add_constraint_with_source_old_installer(app, poetry, installer, old_tester):
+def test_add_constraint_with_source_old_installer(
+    app: PoetryTestApplication,
+    poetry: Poetry,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo = LegacyRepository(name="my-index", url="https://my-index.fake")
     repo.add_package(get_package("cachy", "0.2.0"))
-    repo._cache.store("matches").put("cachy:0.2.0", [Version.parse("0.2.0")], 5)
+    repo._cache.store("matches").put(
+        "cachy:0.2.0", [(Version.parse("0.2.0"), False)], 5
+    )
 
     poetry.pool.add_repository(repo)
 
@@ -1415,7 +1832,7 @@ def test_add_constraint_with_source_old_installer(app, poetry, installer, old_te
   - Installing cachy (0.2.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 1
 
@@ -1428,15 +1845,20 @@ def test_add_constraint_with_source_old_installer(app, poetry, installer, old_te
     }
 
 
-def test_add_constraint_with_source_that_does_not_exist_old_installer(app, old_tester):
+def test_add_constraint_with_source_that_does_not_exist_old_installer(
+    app: PoetryTestApplication, old_tester: CommandTester
+):
     with pytest.raises(ValueError) as e:
         old_tester.execute("foo --source i-dont-exist")
 
-    assert 'Repository "i-dont-exist" does not exist.' == str(e.value)
+    assert str(e.value) == 'Repository "i-dont-exist" does not exist.'
 
 
 def test_add_constraint_not_found_with_source_old_installer(
-    app, poetry, mocker, old_tester
+    app: PoetryTestApplication,
+    poetry: Poetry,
+    mocker: MockerFixture,
+    old_tester: CommandTester,
 ):
     repo = LegacyRepository(name="my-index", url="https://my-index.fake")
     mocker.patch.object(repo, "find_packages", return_value=[])
@@ -1449,16 +1871,19 @@ def test_add_constraint_not_found_with_source_old_installer(
     with pytest.raises(ValueError) as e:
         old_tester.execute("cachy --source my-index")
 
-    assert "Could not find a matching version of package cachy" == str(e.value)
+    assert str(e.value) == "Could not find a matching version of package cachy"
 
 
 def test_add_to_section_that_does_no_exist_yet_old_installer(
-    app, repo, installer, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
     repo.add_package(get_package("cachy", "0.1.0"))
     repo.add_package(get_package("cachy", "0.2.0"))
 
-    old_tester.execute("cachy --dev")
+    old_tester.execute("cachy --group dev")
 
     expected = """\
 Using version ^0.2.0 for cachy
@@ -1473,18 +1898,21 @@ def test_add_to_section_that_does_no_exist_yet_old_installer(
   - Installing cachy (0.2.0)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 1
 
     content = app.poetry.file.read()["tool"]["poetry"]
 
-    assert "cachy" in content["dev-dependencies"]
-    assert content["dev-dependencies"]["cachy"] == "^0.2.0"
+    assert "cachy" in content["group"]["dev"]["dependencies"]
+    assert content["group"]["dev"]["dependencies"]["cachy"] == "^0.2.0"
 
 
 def test_add_should_not_select_prereleases_old_installer(
-    app, repo, installer, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
     repo.add_package(get_package("pyyaml", "3.13"))
     repo.add_package(get_package("pyyaml", "4.2b2"))
@@ -1504,7 +1932,7 @@ def test_add_should_not_select_prereleases_old_installer(
   - Installing pyyaml (3.13)
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
     assert len(installer.installs) == 1
 
@@ -1515,7 +1943,10 @@ def test_add_should_not_select_prereleases_old_installer(
 
 
 def test_add_should_skip_when_adding_existing_package_with_no_constraint_old_installer(
-    app, repo, installer, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
     content = app.poetry.file.read()
     content["tool"]["poetry"]["dependencies"]["foo"] = "^1.0"
@@ -1530,15 +1961,20 @@ def test_add_should_skip_when_adding_existing_package_with_no_constraint_old_ins
 
   • foo
 
-If you want to update it to the latest compatible version, you can use `poetry update package`.
-If you prefer to upgrade it to the latest available version, you can use `poetry add package@latest`.
+If you want to update it to the latest compatible version,\
+ you can use `poetry update package`.
+If you prefer to upgrade it to the latest available version,\
+ you can use `poetry add package@latest`.
 """
 
     assert expected in old_tester.io.fetch_output()
 
 
-def test_add_should_work_when_adding_existing_package_with_latest_constraint_old_installer(
-    app, repo, installer, old_tester
+def test_add_should_work_when_adding_existing_package_with_latest_constraint_old_installer(  # noqa: E501
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
     content = app.poetry.file.read()
     content["tool"]["poetry"]["dependencies"]["foo"] = "^1.0"
@@ -1570,7 +2006,10 @@ def test_add_should_work_when_adding_existing_package_with_latest_constraint_old
 
 
 def test_add_chooses_prerelease_if_only_prereleases_are_available_old_installer(
-    app, repo, installer, old_tester
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
 ):
     repo.add_package(get_package("foo", "1.2.3b0"))
     repo.add_package(get_package("foo", "1.2.3b1"))
@@ -1578,7 +2017,7 @@ def test_add_chooses_prerelease_if_only_prereleases_are_available_old_installer(
     old_tester.execute("foo")
 
     expected = """\
-Using version ^1.2.3-beta.1 for foo
+Using version ^1.2.3b1 for foo
 
 Updating dependencies
 Resolving dependencies...
@@ -1589,11 +2028,15 @@ def test_add_chooses_prerelease_if_only_prereleases_are_available_old_installer(
 
   - Installing foo (1.2.3b1)
 """
-
     assert expected in old_tester.io.fetch_output()
 
 
-def test_add_preferes_stable_releases_old_installer(app, repo, installer, old_tester):
+def test_add_preferes_stable_releases_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo.add_package(get_package("foo", "1.2.3"))
     repo.add_package(get_package("foo", "1.2.4b1"))
 
@@ -1615,7 +2058,12 @@ def test_add_preferes_stable_releases_old_installer(app, repo, installer, old_te
     assert expected in old_tester.io.fetch_output()
 
 
-def test_add_with_lock_old_installer(app, repo, installer, old_tester):
+def test_add_with_lock_old_installer(
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    installer: NoopInstaller,
+    old_tester: CommandTester,
+):
     repo.add_package(get_package("cachy", "0.2.0"))
 
     old_tester.execute("cachy --lock")
@@ -1629,27 +2077,58 @@ def test_add_with_lock_old_installer(app, repo, installer, old_tester):
 Writing lock file
 """
 
-    assert expected == old_tester.io.fetch_output()
+    assert old_tester.io.fetch_output() == expected
 
 
-def test_add_keyboard_interrupt_restore_content(app, repo, installer, tester, mocker):
+def test_add_keyboard_interrupt_restore_content(
+    poetry_with_up_to_date_lockfile: Poetry,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+    mocker: MockerFixture,
+):
+    tester = command_tester_factory("add", poetry=poetry_with_up_to_date_lockfile)
+
     mocker.patch(
         "poetry.installation.installer.Installer.run", side_effect=KeyboardInterrupt()
     )
-    original_content = app.poetry.file.read()
+    original_pyproject_content = poetry_with_up_to_date_lockfile.file.read()
+    original_lockfile_content = poetry_with_up_to_date_lockfile._locker.lock_data
 
     repo.add_package(get_package("cachy", "0.2.0"))
+    repo.add_package(get_package("docker", "4.3.1"))
 
-    tester.execute("cachy --dry-run")
+    tester.execute("cachy")
 
-    assert original_content == app.poetry.file.read()
+    assert poetry_with_up_to_date_lockfile.file.read() == original_pyproject_content
+    assert (
+        poetry_with_up_to_date_lockfile._locker.lock_data == original_lockfile_content
+    )
 
 
-def test_dry_run_restore_original_content(app, repo, installer, tester):
-    original_content = app.poetry.file.read()
+@pytest.mark.parametrize(
+    "command",
+    [
+        "cachy --dry-run",
+        "cachy --lock --dry-run",
+    ],
+)
+def test_add_with_dry_run_keep_files_intact(
+    command: str,
+    poetry_with_up_to_date_lockfile: Poetry,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+):
+    tester = command_tester_factory("add", poetry=poetry_with_up_to_date_lockfile)
+
+    original_pyproject_content = poetry_with_up_to_date_lockfile.file.read()
+    original_lockfile_content = poetry_with_up_to_date_lockfile._locker.lock_data
 
     repo.add_package(get_package("cachy", "0.2.0"))
+    repo.add_package(get_package("docker", "4.3.1"))
 
-    tester.execute("cachy --dry-run")
+    tester.execute(command)
 
-    assert original_content == app.poetry.file.read()
+    assert poetry_with_up_to_date_lockfile.file.read() == original_pyproject_content
+    assert (
+        poetry_with_up_to_date_lockfile._locker.lock_data == original_lockfile_content
+    )
diff --git a/vendor/poetry/tests/console/commands/test_cache.py b/vendor/poetry/tests/console/commands/test_cache.py
deleted file mode 100644
index a8d47842..00000000
--- a/vendor/poetry/tests/console/commands/test_cache.py
+++ /dev/null
@@ -1,58 +0,0 @@
-import uuid
-
-import pytest
-
-
-@pytest.fixture
-def repository_cache_dir(monkeypatch, tmpdir):
-    import poetry.locations
-
-    from poetry.utils._compat import Path
-
-    path = Path(str(tmpdir))
-    monkeypatch.setattr(poetry.locations, "REPOSITORY_CACHE_DIR", path)
-    return path
-
-
-@pytest.fixture
-def repository_one():
-    return "01_{}".format(uuid.uuid4())
-
-
-@pytest.fixture
-def repository_two():
-    return "02_{}".format(uuid.uuid4())
-
-
-@pytest.fixture
-def mock_caches(repository_cache_dir, repository_one, repository_two):
-    (repository_cache_dir / repository_one).mkdir()
-    (repository_cache_dir / repository_two).mkdir()
-
-
-@pytest.fixture
-def tester(command_tester_factory):
-    return command_tester_factory("cache list")
-
-
-def test_cache_list(tester, mock_caches, repository_one, repository_two):
-    tester.execute()
-
-    expected = """\
-{}
-{}
-""".format(
-        repository_one, repository_two
-    )
-
-    assert expected == tester.io.fetch_output()
-
-
-def test_cache_list_empty(tester, repository_cache_dir):
-    tester.execute()
-
-    expected = """\
-No caches found
-"""
-
-    assert expected == tester.io.fetch_output()
diff --git a/vendor/poetry/tests/console/commands/test_check.py b/vendor/poetry/tests/console/commands/test_check.py
index caa54851..9c913616 100644
--- a/vendor/poetry/tests/console/commands/test_check.py
+++ b/vendor/poetry/tests/console/commands/test_check.py
@@ -1,25 +1,34 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from poetry.utils._compat import PY2
-from poetry.utils._compat import Path
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from tests.types import CommandTesterFactory
 
 
 @pytest.fixture()
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("check")
 
 
-def test_check_valid(tester):
+def test_check_valid(tester: CommandTester):
     tester.execute()
 
     expected = """\
 All set!
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_check_invalid(mocker, tester):
+def test_check_invalid(mocker: MockerFixture, tester: CommandTester):
     mocker.patch(
         "poetry.factory.Factory.locate",
         return_value=Path(__file__).parent.parent.parent
@@ -30,17 +39,12 @@ def test_check_invalid(mocker, tester):
 
     tester.execute()
 
-    if PY2:
-        expected = """\
-Error: u'description' is a required property
-Warning: A wildcard Python dependency is ambiguous. Consider specifying a more explicit one.
-Warning: The "pendulum" dependency specifies the "allows-prereleases" property, which is deprecated. Use "allow-prereleases" instead.
-"""
-    else:
-        expected = """\
+    expected = """\
 Error: 'description' is a required property
-Warning: A wildcard Python dependency is ambiguous. Consider specifying a more explicit one.
-Warning: The "pendulum" dependency specifies the "allows-prereleases" property, which is deprecated. Use "allow-prereleases" instead.
+Warning: A wildcard Python dependency is ambiguous.\
+ Consider specifying a more explicit one.
+Warning: The "pendulum" dependency specifies the "allows-prereleases" property,\
+ which is deprecated. Use "allow-prereleases" instead.
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_error() == expected
diff --git a/vendor/poetry/tests/console/commands/test_config.py b/vendor/poetry/tests/console/commands/test_config.py
index df86d165..052b726c 100644
--- a/vendor/poetry/tests/console/commands/test_config.py
+++ b/vendor/poetry/tests/console/commands/test_config.py
@@ -1,81 +1,117 @@
+from __future__ import annotations
+
 import json
 import os
 
-from pathlib import Path
+from typing import TYPE_CHECKING
 
 import pytest
 
+from deepdiff import DeepDiff
+from poetry.core.pyproject.exceptions import PyProjectException
+
 from poetry.config.config_source import ConfigSource
-from poetry.core.pyproject import PyProjectException
 from poetry.factory import Factory
-from poetry.utils._compat import PY2
-from poetry.utils._compat import WINDOWS
+from tests.conftest import Config
+
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from poetry.config.dict_config_source import DictConfigSource
+    from tests.types import CommandTesterFactory
+    from tests.types import FixtureDirGetter
 
 
 @pytest.fixture()
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("config")
 
 
-def test_show_config_with_local_config_file_empty(tester, mocker):
+def test_show_config_with_local_config_file_empty(
+    tester: CommandTester, mocker: MockerFixture
+):
     mocker.patch(
         "poetry.factory.Factory.create_poetry",
         side_effect=PyProjectException("[tool.poetry] section not found"),
     )
     tester.execute()
 
-    assert "" == tester.io.fetch_output()
+    assert tester.io.fetch_output() == ""
 
 
-def test_list_displays_default_value_if_not_set(tester, config):
+def test_list_displays_default_value_if_not_set(
+    tester: CommandTester, config: Config, config_cache_dir: Path
+):
     tester.execute("--list")
 
-    expected = """cache-dir = "{cache_dir}"
+    cache_dir = json.dumps(str(config_cache_dir))
+    venv_path = json.dumps(os.path.join("{cache-dir}", "virtualenvs"))
+    expected = f"""cache-dir = {cache_dir}
 experimental.new-installer = true
+experimental.system-git-client = false
+installer.max-workers = null
+installer.no-binary = null
 installer.parallel = true
 virtualenvs.create = true
 virtualenvs.in-project = null
-virtualenvs.path = {path}  # {cache_dir}{sep}virtualenvs
-""".format(
-        path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")),
-        sep=os.path.sep,
-        cache_dir=str(Path.cwd() / ".pypoetrycache")
-    )
+virtualenvs.options.always-copy = false
+virtualenvs.options.no-pip = false
+virtualenvs.options.no-setuptools = false
+virtualenvs.options.system-site-packages = false
+virtualenvs.path = {venv_path}  # {config_cache_dir / 'virtualenvs'}
+virtualenvs.prefer-active-python = false
+virtualenvs.prompt = "{{project_name}}-py{{python_version}}"
+"""
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_list_displays_set_get_setting(tester, config):
+def test_list_displays_set_get_setting(
+    tester: CommandTester, config: Config, config_cache_dir: Path
+):
     tester.execute("virtualenvs.create false")
 
     tester.execute("--list")
 
-    expected = """cache-dir = "{cache_dir}"
+    cache_dir = json.dumps(str(config_cache_dir))
+    venv_path = json.dumps(os.path.join("{cache-dir}", "virtualenvs"))
+    expected = f"""cache-dir = {cache_dir}
 experimental.new-installer = true
+experimental.system-git-client = false
+installer.max-workers = null
+installer.no-binary = null
 installer.parallel = true
 virtualenvs.create = false
 virtualenvs.in-project = null
-virtualenvs.path = {path}  # {cache_dir}{sep}virtualenvs
-""".format(
-        path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")),
-        sep=os.path.sep,
-        cache_dir=str(Path.cwd() / ".pypoetrycache")
-    )
+virtualenvs.options.always-copy = false
+virtualenvs.options.no-pip = false
+virtualenvs.options.no-setuptools = false
+virtualenvs.options.system-site-packages = false
+virtualenvs.path = {venv_path}  # {config_cache_dir / 'virtualenvs'}
+virtualenvs.prefer-active-python = false
+virtualenvs.prompt = "{{project_name}}-py{{python_version}}"
+"""
 
-    assert 0 == config.set_config_source.call_count
-    assert expected == tester.io.fetch_output()
+    assert config.set_config_source.call_count == 0
+    assert tester.io.fetch_output() == expected
 
 
-def test_display_single_setting(tester, config):
+def test_display_single_setting(tester: CommandTester, config: Config):
     tester.execute("virtualenvs.create")
 
     expected = """true
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_display_single_local_setting(command_tester_factory, fixture_dir):
+def test_display_single_local_setting(
+    command_tester_factory: CommandTesterFactory, fixture_dir: FixtureDirGetter
+):
     tester = command_tester_factory(
         "config", poetry=Factory().create_poetry(fixture_dir("with_local_config"))
     )
@@ -84,66 +120,93 @@ def test_display_single_local_setting(command_tester_factory, fixture_dir):
     expected = """false
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_list_displays_set_get_local_setting(tester, config):
+def test_list_displays_set_get_local_setting(
+    tester: CommandTester, config: Config, config_cache_dir: Path
+):
     tester.execute("virtualenvs.create false --local")
 
     tester.execute("--list")
 
-    expected = """cache-dir = "{cache_dir}"
+    cache_dir = json.dumps(str(config_cache_dir))
+    venv_path = json.dumps(os.path.join("{cache-dir}", "virtualenvs"))
+    expected = f"""cache-dir = {cache_dir}
 experimental.new-installer = true
+experimental.system-git-client = false
+installer.max-workers = null
+installer.no-binary = null
 installer.parallel = true
 virtualenvs.create = false
 virtualenvs.in-project = null
-virtualenvs.path = {path}  # {cache_dir}{sep}virtualenvs
-""".format(
-        path=json.dumps(os.path.join("{cache-dir}", "virtualenvs")),
-        sep=os.path.sep,
-        cache_dir=str(Path.cwd() / ".pypoetrycache")
-    )
+virtualenvs.options.always-copy = false
+virtualenvs.options.no-pip = false
+virtualenvs.options.no-setuptools = false
+virtualenvs.options.system-site-packages = false
+virtualenvs.path = {venv_path}  # {config_cache_dir / 'virtualenvs'}
+virtualenvs.prefer-active-python = false
+virtualenvs.prompt = "{{project_name}}-py{{python_version}}"
+"""
 
-    assert 1 == config.set_config_source.call_count
-    assert expected == tester.io.fetch_output()
+    assert config.set_config_source.call_count == 1
+    assert tester.io.fetch_output() == expected
 
 
-def test_set_pypi_token(tester, auth_config_source):
+def test_set_pypi_token(tester: CommandTester, auth_config_source: DictConfigSource):
     tester.execute("pypi-token.pypi mytoken")
     tester.execute("--list")
 
-    assert "mytoken" == auth_config_source.config["pypi-token"]["pypi"]
+    assert auth_config_source.config["pypi-token"]["pypi"] == "mytoken"
 
 
-def test_set_client_cert(tester, auth_config_source, mocker):
+def test_set_client_cert(
+    tester: CommandTester,
+    auth_config_source: DictConfigSource,
+    mocker: MockerFixture,
+):
     mocker.spy(ConfigSource, "__init__")
 
     tester.execute("certificates.foo.client-cert path/to/cert.pem")
 
     assert (
-        "path/to/cert.pem"
-        == auth_config_source.config["certificates"]["foo"]["client-cert"]
+        auth_config_source.config["certificates"]["foo"]["client-cert"]
+        == "path/to/cert.pem"
     )
 
 
-def test_set_cert(tester, auth_config_source, mocker):
+@pytest.mark.parametrize(
+    ("value", "result"),
+    [
+        ("path/to/ca.pem", "path/to/ca.pem"),
+        ("true", True),
+        ("false", False),
+    ],
+)
+def test_set_cert(
+    tester: CommandTester,
+    auth_config_source: DictConfigSource,
+    mocker: MockerFixture,
+    value: str,
+    result: str | bool,
+):
     mocker.spy(ConfigSource, "__init__")
 
-    tester.execute("certificates.foo.cert path/to/ca.pem")
+    tester.execute(f"certificates.foo.cert {value}")
 
-    assert "path/to/ca.pem" == auth_config_source.config["certificates"]["foo"]["cert"]
+    assert auth_config_source.config["certificates"]["foo"]["cert"] == result
 
 
-def test_config_installer_parallel(tester, command_tester_factory):
-    serial_enforced = PY2 and WINDOWS
-
+def test_config_installer_parallel(
+    tester: CommandTester, command_tester_factory: CommandTesterFactory
+):
     tester.execute("--local installer.parallel")
     assert tester.io.fetch_output().strip() == "true"
 
     workers = command_tester_factory(
         "install"
     )._command._installer._executor._max_workers
-    assert workers > 1 or (serial_enforced and workers == 1)
+    assert workers > 1
 
     tester.io.clear_output()
     tester.execute("--local installer.parallel false")
@@ -154,3 +217,33 @@ def test_config_installer_parallel(tester, command_tester_factory):
         "install"
     )._command._installer._executor._max_workers
     assert workers == 1
+
+
+@pytest.mark.parametrize(
+    ("value", "expected"),
+    [
+        ("true", [":all:"]),
+        ("1", [":all:"]),
+        ("false", [":none:"]),
+        ("0", [":none:"]),
+        ("pytest", ["pytest"]),
+        ("PyTest", ["pytest"]),
+        ("pytest,black", ["pytest", "black"]),
+        ("", []),
+    ],
+)
+def test_config_installer_no_binary(
+    tester: CommandTester, value: str, expected: list[str]
+) -> None:
+    setting = "installer.no-binary"
+
+    tester.execute(setting)
+    assert tester.io.fetch_output().strip() == "null"
+
+    config = Config.create()
+    assert not config.get(setting)
+
+    tester.execute(f"{setting} '{value}'")
+
+    config = Config.create(reload=True)
+    assert not DeepDiff(config.get(setting), expected, ignore_order=True)
diff --git a/vendor/poetry/tests/console/commands/test_export.py b/vendor/poetry/tests/console/commands/test_export.py
deleted file mode 100644
index f5b9fba4..00000000
--- a/vendor/poetry/tests/console/commands/test_export.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
-import pytest
-
-from tests.helpers import get_package
-
-
-PYPROJECT_CONTENT = """\
-[tool.poetry]
-name = "simple-project"
-version = "1.2.3"
-description = "Some description."
-authors = [
-    "Sébastien Eustace "
-]
-license = "MIT"
-
-readme = "README.rst"
-
-homepage = "https://python-poetry.org"
-repository = "https://github.com/python-poetry/poetry"
-documentation = "https://python-poetry.org/docs"
-
-keywords = ["packaging", "dependency", "poetry"]
-
-classifiers = [
-    "Topic :: Software Development :: Build Tools",
-    "Topic :: Software Development :: Libraries :: Python Modules"
-]
-
-# Requirements
-[tool.poetry.dependencies]
-python = "~2.7 || ^3.4"
-foo = "^1.0"
-bar = { version = "^1.1", optional = true }
-
-[tool.poetry.extras]
-feature_bar = ["bar"]
-"""
-
-
-@pytest.fixture(autouse=True)
-def setup(repo):
-    repo.add_package(get_package("foo", "1.0.0"))
-    repo.add_package(get_package("bar", "1.1.0"))
-
-
-@pytest.fixture
-def poetry(project_factory):
-    return project_factory(name="export", pyproject_content=PYPROJECT_CONTENT)
-
-
-@pytest.fixture
-def tester(command_tester_factory, poetry):
-    return command_tester_factory("export", poetry=poetry)
-
-
-def _export_requirements(tester, poetry):
-    tester.execute("--format requirements.txt --output requirements.txt")
-
-    requirements = poetry.file.parent / "requirements.txt"
-    assert requirements.exists()
-
-    with requirements.open(encoding="utf-8") as f:
-        content = f.read()
-
-    assert poetry.locker.lock.exists()
-
-    expected = """\
-foo==1.0.0
-"""
-
-    assert expected == content
-
-
-def test_export_exports_requirements_txt_file_locks_if_no_lock_file(tester, poetry):
-    assert not poetry.locker.lock.exists()
-    _export_requirements(tester, poetry)
-    assert "The lock file does not exist. Locking." in tester.io.fetch_output()
-
-
-def test_export_exports_requirements_txt_uses_lock_file(tester, poetry, do_lock):
-    _export_requirements(tester, poetry)
-    assert "The lock file does not exist. Locking." not in tester.io.fetch_output()
-
-
-def test_export_fails_on_invalid_format(tester, do_lock):
-    with pytest.raises(ValueError):
-        tester.execute("--format invalid")
-
-
-def test_export_prints_to_stdout_by_default(tester, do_lock):
-    tester.execute("--format requirements.txt")
-    expected = """\
-foo==1.0.0
-"""
-    assert expected == tester.io.fetch_output()
-
-
-def test_export_uses_requirements_txt_format_by_default(tester, do_lock):
-    tester.execute()
-    expected = """\
-foo==1.0.0
-"""
-    assert expected == tester.io.fetch_output()
-
-
-def test_export_includes_extras_by_flag(tester, do_lock):
-    tester.execute("--format requirements.txt --extras feature_bar")
-    expected = """\
-bar==1.1.0
-foo==1.0.0
-"""
-    assert expected == tester.io.fetch_output()
diff --git a/vendor/poetry/tests/console/commands/test_init.py b/vendor/poetry/tests/console/commands/test_init.py
index 7a9212ab..0a300185 100644
--- a/vendor/poetry/tests/console/commands/test_init.py
+++ b/vendor/poetry/tests/console/commands/test_init.py
@@ -1,20 +1,38 @@
+from __future__ import annotations
+
 import os
 import shutil
 import sys
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from cleo import CommandTester
+from cleo.testers.command_tester import CommandTester
+from packaging.utils import canonicalize_name
 
+from poetry.console.commands.init import InitCommand
 from poetry.repositories import Pool
-from poetry.utils._compat import Path
 from poetry.utils._compat import decode
-from tests.helpers import TestApplication
+from tests.helpers import PoetryTestApplication
 from tests.helpers import get_package
 
 
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from _pytest.fixtures import FixtureRequest
+    from poetry.core.packages.package import Package
+    from pytest_mock import MockerFixture
+
+    from poetry.poetry import Poetry
+    from tests.helpers import TestRepository
+    from tests.types import FixtureDirGetter
+
+
 @pytest.fixture
-def source_dir(tmp_path):  # type: (...) -> Path
+def source_dir(tmp_path: Path) -> Iterator[Path]:
     cwd = os.getcwd()
 
     try:
@@ -25,22 +43,22 @@ def source_dir(tmp_path):  # type: (...) -> Path
 
 
 @pytest.fixture
-def patches(mocker, source_dir, repo):
-    mocker.patch("poetry.utils._compat.Path.cwd", return_value=source_dir)
+def patches(mocker: MockerFixture, source_dir: Path, repo: TestRepository) -> None:
+    mocker.patch("pathlib.Path.cwd", return_value=source_dir)
     mocker.patch(
         "poetry.console.commands.init.InitCommand._get_pool", return_value=Pool([repo])
     )
 
 
 @pytest.fixture
-def tester(patches):
+def tester(patches: None) -> CommandTester:
     # we need a test application without poetry here.
-    app = TestApplication(None)
+    app = PoetryTestApplication(None)
     return CommandTester(app.find("init"))
 
 
 @pytest.fixture
-def init_basic_inputs():
+def init_basic_inputs() -> str:
     return "\n".join(
         [
             "my-package",  # Package name
@@ -57,7 +75,7 @@ def init_basic_inputs():
 
 
 @pytest.fixture()
-def init_basic_toml():
+def init_basic_toml() -> str:
     return """\
 [tool.poetry]
 name = "my-package"
@@ -65,23 +83,54 @@ def init_basic_toml():
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
-
-[tool.poetry.dev-dependencies]
 """
 
 
-def test_basic_interactive(tester, init_basic_inputs, init_basic_toml):
+def test_basic_interactive(
+    tester: CommandTester, init_basic_inputs: str, init_basic_toml: str
+):
     tester.execute(inputs=init_basic_inputs)
     assert init_basic_toml in tester.io.fetch_output()
 
 
-def test_interactive_with_dependencies(tester, repo):
+def test_noninteractive(
+    app: PoetryTestApplication,
+    mocker: MockerFixture,
+    poetry: Poetry,
+    repo: TestRepository,
+    tmp_path: Path,
+):
+    command = app.find("init")
+    command._pool = poetry.pool
+
+    repo.add_package(get_package("pytest", "3.6.0"))
+
+    p = mocker.patch("pathlib.Path.cwd")
+    p.return_value = tmp_path
+
+    tester = CommandTester(command)
+    args = "--name my-package --dependency pytest"
+    tester.execute(args=args, interactive=False)
+
+    expected = "Using version ^3.6.0 for pytest\n"
+    assert tester.io.fetch_output() == expected
+    assert tester.io.fetch_error() == ""
+
+    toml_content = (tmp_path / "pyproject.toml").read_text()
+    assert 'name = "my-package"' in toml_content
+    assert 'pytest = "^3.6.0"' in toml_content
+
+
+def test_interactive_with_dependencies(tester: CommandTester, repo: TestRepository):
     repo.add_package(get_package("django-pendulum", "0.1.6-pre4"))
     repo.add_package(get_package("pendulum", "2.0.0"))
     repo.add_package(get_package("pytest", "3.6.0"))
+    repo.add_package(get_package("flask", "2.0.0"))
 
     inputs = [
         "my-package",  # Package name
@@ -94,6 +143,9 @@ def test_interactive_with_dependencies(tester, repo):
         "pendulu",  # Search for package
         "1",  # Second option is pendulum
         "",  # Do not set constraint
+        "Flask",
+        "0",
+        "",
         "",  # Stop searching for packages
         "",  # Interactive dev packages
         "pytest",  # Search for package
@@ -111,19 +163,66 @@ def test_interactive_with_dependencies(tester, repo):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 pendulum = "^2.0.0"
+flask = "^2.0.0"
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "^3.6.0"
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_empty_license(tester):
+# Regression test for https://github.com/python-poetry/poetry/issues/2355
+def test_interactive_with_dependencies_and_no_selection(
+    tester: CommandTester, repo: TestRepository
+):
+    repo.add_package(get_package("django-pendulum", "0.1.6-pre4"))
+    repo.add_package(get_package("pendulum", "2.0.0"))
+    repo.add_package(get_package("pytest", "3.6.0"))
+
+    inputs = [
+        "my-package",  # Package name
+        "1.2.3",  # Version
+        "This is a description",  # Description
+        "n",  # Author
+        "MIT",  # License
+        "~2.7 || ^3.6",  # Python
+        "",  # Interactive packages
+        "pendulu",  # Search for package
+        "",  # Do not select an option
+        "",  # Stop searching for packages
+        "",  # Interactive dev packages
+        "pytest",  # Search for package
+        "",  # Do not select an option
+        "",
+        "",
+        "\n",  # Generate
+    ]
+    tester.execute(inputs="\n".join(inputs))
+    expected = """\
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "This is a description"
+authors = ["Your Name "]
+license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
+
+[tool.poetry.dependencies]
+python = "~2.7 || ^3.6"
+"""
+
+    assert expected in tester.io.fetch_output()
+
+
+def test_empty_license(tester: CommandTester):
     inputs = [
         "my-package",  # Package name
         "1.2.3",  # Version
@@ -137,25 +236,23 @@ def test_empty_license(tester):
     ]
     tester.execute(inputs="\n".join(inputs))
 
-    expected = """\
+    python = ".".join(str(c) for c in sys.version_info[:2])
+    expected = f"""\
 [tool.poetry]
 name = "my-package"
 version = "1.2.3"
 description = ""
 authors = ["Your Name "]
+readme = "README.md"
+packages = [{{include = "my_package"}}]
 
 [tool.poetry.dependencies]
 python = "^{python}"
-
-[tool.poetry.dev-dependencies]
-""".format(
-        python=".".join(str(c) for c in sys.version_info[:2])
-    )
-
+"""
     assert expected in tester.io.fetch_output()
 
 
-def test_interactive_with_git_dependencies(tester, repo):
+def test_interactive_with_git_dependencies(tester: CommandTester, repo: TestRepository):
     repo.add_package(get_package("pendulum", "2.0.0"))
     repo.add_package(get_package("pytest", "3.6.0"))
 
@@ -185,19 +282,68 @@ def test_interactive_with_git_dependencies(tester, repo):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 demo = {git = "https://github.com/demo/demo.git"}
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "^3.6.0"
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_interactive_with_git_dependencies_with_reference(tester, repo):
+_generate_choice_list_packages_params: list[list[Package]] = [
+    [
+        get_package("flask-blacklist", "1.0.0"),
+        get_package("Flask-Shelve", "1.0.0"),
+        get_package("flask-pwa", "1.0.0"),
+        get_package("Flask-test1", "1.0.0"),
+        get_package("Flask-test2", "1.0.0"),
+        get_package("Flask-test3", "1.0.0"),
+        get_package("Flask-test4", "1.0.0"),
+        get_package("Flask-test5", "1.0.0"),
+        get_package("Flask", "1.0.0"),
+        get_package("Flask-test6", "1.0.0"),
+        get_package("Flask-test7", "1.0.0"),
+    ],
+    [
+        get_package("flask-blacklist", "1.0.0"),
+        get_package("Flask-Shelve", "1.0.0"),
+        get_package("flask-pwa", "1.0.0"),
+        get_package("Flask-test1", "1.0.0"),
+        get_package("Flask", "1.0.0"),
+    ],
+]
+
+
+@pytest.fixture(params=_generate_choice_list_packages_params)
+def _generate_choice_list_packages(request: FixtureRequest) -> list[Package]:
+    return request.param
+
+
+@pytest.mark.parametrize("package_name", ["flask", "Flask", "flAsK"])
+def test_generate_choice_list(
+    tester: CommandTester,
+    package_name: str,
+    _generate_choice_list_packages: list[Package],
+):
+    init_command = tester.command
+
+    packages = _generate_choice_list_packages
+    choices = init_command._generate_choice_list(
+        packages, canonicalize_name(package_name)
+    )
+
+    assert choices[0] == "Flask"
+
+
+def test_interactive_with_git_dependencies_with_reference(
+    tester: CommandTester, repo: TestRepository
+):
     repo.add_package(get_package("pendulum", "2.0.0"))
     repo.add_package(get_package("pytest", "3.6.0"))
 
@@ -227,19 +373,23 @@ def test_interactive_with_git_dependencies_with_reference(tester, repo):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 demo = {git = "https://github.com/demo/demo.git", rev = "develop"}
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "^3.6.0"
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_interactive_with_git_dependencies_and_other_name(tester, repo):
+def test_interactive_with_git_dependencies_and_other_name(
+    tester: CommandTester, repo: TestRepository
+):
     repo.add_package(get_package("pendulum", "2.0.0"))
     repo.add_package(get_package("pytest", "3.6.0"))
 
@@ -269,19 +419,26 @@ def test_interactive_with_git_dependencies_and_other_name(tester, repo):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 demo = {git = "https://github.com/demo/pyproject-demo.git"}
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "^3.6.0"
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_interactive_with_directory_dependency(tester, repo, source_dir, fixture_dir):
+def test_interactive_with_directory_dependency(
+    tester: CommandTester,
+    repo: TestRepository,
+    source_dir: Path,
+    fixture_dir: FixtureDirGetter,
+):
     repo.add_package(get_package("pendulum", "2.0.0"))
     repo.add_package(get_package("pytest", "3.6.0"))
 
@@ -314,19 +471,24 @@ def test_interactive_with_directory_dependency(tester, repo, source_dir, fixture
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 demo = {path = "demo"}
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "^3.6.0"
 """
     assert expected in tester.io.fetch_output()
 
 
 def test_interactive_with_directory_dependency_and_other_name(
-    tester, repo, source_dir, fixture_dir
+    tester: CommandTester,
+    repo: TestRepository,
+    source_dir: Path,
+    fixture_dir: FixtureDirGetter,
 ):
     repo.add_package(get_package("pendulum", "2.0.0"))
     repo.add_package(get_package("pytest", "3.6.0"))
@@ -360,19 +522,26 @@ def test_interactive_with_directory_dependency_and_other_name(
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 demo = {path = "pyproject-demo"}
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "^3.6.0"
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_interactive_with_file_dependency(tester, repo, source_dir, fixture_dir):
+def test_interactive_with_file_dependency(
+    tester: CommandTester,
+    repo: TestRepository,
+    source_dir: Path,
+    fixture_dir: FixtureDirGetter,
+):
     repo.add_package(get_package("pendulum", "2.0.0"))
     repo.add_package(get_package("pytest", "3.6.0"))
 
@@ -405,19 +574,71 @@ def test_interactive_with_file_dependency(tester, repo, source_dir, fixture_dir)
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 demo = {path = "demo-0.1.0-py2.py3-none-any.whl"}
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "^3.6.0"
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_python_option(tester):
+def test_interactive_with_wrong_dependency_inputs(
+    tester: CommandTester, repo: TestRepository
+):
+    repo.add_package(get_package("pendulum", "2.0.0"))
+    repo.add_package(get_package("pytest", "3.6.0"))
+
+    inputs = [
+        "my-package",  # Package name
+        "1.2.3",  # Version
+        "This is a description",  # Description
+        "n",  # Author
+        "MIT",  # License
+        "^3.8",  # Python
+        "",  # Interactive packages
+        "pendulum 2.0.0 foo",  # Package name and constraint (invalid)
+        "pendulum 2.0.0",  # Package name and constraint (invalid)
+        "pendulum 2.0.0",  # Package name and constraint (invalid)
+        "pendulum 2.0.0",  # Package name and constraint (invalid)
+        "pendulum@^2.0.0",  # Package name and constraint (valid)
+        "",  # End package selection
+        "",  # Interactive dev packages
+        "pytest 3.6.0 foo",  # Dev package name and constraint (invalid)
+        "pytest 3.6.0",  # Dev package name and constraint (invalid)
+        "pytest@3.6.0",  # Dev package name and constraint (valid)
+        "",  # End package selection
+        "\n",  # Generate
+    ]
+    tester.execute(inputs="\n".join(inputs))
+
+    expected = """\
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "This is a description"
+authors = ["Your Name "]
+license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
+
+[tool.poetry.dependencies]
+python = "^3.8"
+pendulum = "^2.0.0"
+
+[tool.poetry.group.dev.dependencies]
+pytest = "3.6.0"
+"""
+
+    assert expected in tester.io.fetch_output()
+
+
+def test_python_option(tester: CommandTester):
     inputs = [
         "my-package",  # Package name
         "1.2.3",  # Version
@@ -437,17 +658,17 @@ def test_python_option(tester):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
-
-[tool.poetry.dev-dependencies]
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_predefined_dependency(tester, repo):
+def test_predefined_dependency(tester: CommandTester, repo: TestRepository):
     repo.add_package(get_package("pendulum", "2.0.0"))
 
     inputs = [
@@ -470,18 +691,20 @@ def test_predefined_dependency(tester, repo):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 pendulum = "^2.0.0"
-
-[tool.poetry.dev-dependencies]
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_predefined_and_interactive_dependencies(tester, repo):
+def test_predefined_and_interactive_dependencies(
+    tester: CommandTester, repo: TestRepository
+):
     repo.add_package(get_package("pendulum", "2.0.0"))
     repo.add_package(get_package("pyramid", "1.10"))
 
@@ -510,6 +733,8 @@ def test_predefined_and_interactive_dependencies(tester, repo):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
@@ -520,7 +745,7 @@ def test_predefined_and_interactive_dependencies(tester, repo):
     assert 'pyramid = "^1.10"' in output
 
 
-def test_predefined_dev_dependency(tester, repo):
+def test_predefined_dev_dependency(tester: CommandTester, repo: TestRepository):
     repo.add_package(get_package("pytest", "3.6.0"))
 
     inputs = [
@@ -544,18 +769,22 @@ def test_predefined_dev_dependency(tester, repo):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "^3.6.0"
 """
 
     assert expected in tester.io.fetch_output()
 
 
-def test_predefined_and_interactive_dev_dependencies(tester, repo):
+def test_predefined_and_interactive_dev_dependencies(
+    tester: CommandTester, repo: TestRepository
+):
     repo.add_package(get_package("pytest", "3.6.0"))
     repo.add_package(get_package("pytest-requests", "0.2.0"))
 
@@ -584,11 +813,15 @@ def test_predefined_and_interactive_dev_dependencies(tester, repo):
 description = "This is a description"
 authors = ["Your Name "]
 license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.6"
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
+pytest = "^3.6.0"
+pytest-requests = "^0.2.0"
 """
 
     output = tester.io.fetch_output()
@@ -597,8 +830,53 @@ def test_predefined_and_interactive_dev_dependencies(tester, repo):
     assert 'pytest = "^3.6.0"' in output
 
 
-def test_add_package_with_extras_and_whitespace(tester):
-    result = tester._command._parse_requirements(["databases[postgresql, sqlite]"])
+def test_predefined_all_options(tester: CommandTester, repo: TestRepository):
+    repo.add_package(get_package("pendulum", "2.0.0"))
+    repo.add_package(get_package("pytest", "3.6.0"))
+
+    inputs = [
+        "1.2.3",  # Version
+        "",  # Author
+        "n",  # Interactive packages
+        "n",  # Interactive dev packages
+        "\n",  # Generate
+    ]
+
+    tester.execute(
+        "--name my-package "
+        "--description 'This is a description' "
+        "--author 'Foo Bar ' "
+        "--python '^3.8' "
+        "--license MIT "
+        "--dependency pendulum "
+        "--dev-dependency pytest",
+        inputs="\n".join(inputs),
+    )
+
+    expected = """\
+[tool.poetry]
+name = "my-package"
+version = "1.2.3"
+description = "This is a description"
+authors = ["Foo Bar "]
+license = "MIT"
+readme = "README.md"
+packages = [{include = "my_package"}]
+
+[tool.poetry.dependencies]
+python = "^3.8"
+pendulum = "^2.0.0"
+
+[tool.poetry.group.dev.dependencies]
+pytest = "^3.6.0"
+"""
+
+    output = tester.io.fetch_output()
+    assert expected in output
+
+
+def test_add_package_with_extras_and_whitespace(tester: CommandTester):
+    result = tester.command._parse_requirements(["databases[postgresql, sqlite]"])
 
     assert result[0]["name"] == "databases"
     assert len(result[0]["extras"]) == 2
@@ -607,7 +885,10 @@ def test_add_package_with_extras_and_whitespace(tester):
 
 
 def test_init_existing_pyproject_simple(
-    tester, source_dir, init_basic_inputs, init_basic_toml
+    tester: CommandTester,
+    source_dir: Path,
+    init_basic_inputs: str,
+    init_basic_toml: str,
 ):
     pyproject_file = source_dir / "pyproject.toml"
     existing_section = """
@@ -616,13 +897,38 @@ def test_init_existing_pyproject_simple(
 """
     pyproject_file.write_text(decode(existing_section))
     tester.execute(inputs=init_basic_inputs)
-    assert (
-        "{}\n{}".format(existing_section, init_basic_toml) in pyproject_file.read_text()
+    assert f"{existing_section}\n{init_basic_toml}" in pyproject_file.read_text()
+
+
+@pytest.mark.parametrize("linesep", ["\n", "\r\n"])
+def test_init_existing_pyproject_consistent_linesep(
+    tester: CommandTester,
+    source_dir: Path,
+    init_basic_inputs: str,
+    init_basic_toml: str,
+    linesep: str,
+):
+    pyproject_file = source_dir / "pyproject.toml"
+    existing_section = """
+[tool.black]
+line-length = 88
+""".replace(
+        "\n", linesep
     )
+    with open(pyproject_file, "w", newline="") as f:
+        f.write(existing_section)
+    tester.execute(inputs=init_basic_inputs)
+    with open(pyproject_file, newline="") as f:
+        content = f.read()
+    init_basic_toml = init_basic_toml.replace("\n", linesep)
+    assert f"{existing_section}{linesep}{init_basic_toml}" in content
 
 
 def test_init_non_interactive_existing_pyproject_add_dependency(
-    tester, source_dir, init_basic_inputs, repo
+    tester: CommandTester,
+    source_dir: Path,
+    init_basic_inputs: str,
+    repo: TestRepository,
 ):
     pyproject_file = source_dir / "pyproject.toml"
     existing_section = """
@@ -647,18 +953,18 @@ def test_init_non_interactive_existing_pyproject_add_dependency(
 version = "0.1.0"
 description = ""
 authors = ["Your Name "]
+readme = "README.md"
+packages = [{include = "my_package"}]
 
 [tool.poetry.dependencies]
 python = "^3.6"
 foo = "^1.19.2"
-
-[tool.poetry.dev-dependencies]
 """
-    assert "{}\n{}".format(existing_section, expected) in pyproject_file.read_text()
+    assert f"{existing_section}\n{expected}" in pyproject_file.read_text()
 
 
 def test_init_existing_pyproject_with_build_system_fails(
-    tester, source_dir, init_basic_inputs
+    tester: CommandTester, source_dir: Path, init_basic_inputs: str
 ):
     pyproject_file = source_dir / "pyproject.toml"
     existing_section = """
@@ -669,7 +975,33 @@ def test_init_existing_pyproject_with_build_system_fails(
     pyproject_file.write_text(decode(existing_section))
     tester.execute(inputs=init_basic_inputs)
     assert (
-        tester.io.fetch_output().strip()
+        tester.io.fetch_error().strip()
         == "A pyproject.toml file with a defined build-system already exists."
     )
-    assert "{}".format(existing_section) in pyproject_file.read_text()
+    assert existing_section in pyproject_file.read_text()
+
+
+@pytest.mark.parametrize(
+    "name",
+    [
+        None,
+        "",
+        "foo",
+        "   foo  ",
+        "foo==2.0",
+        "foo@2.0",
+        "  foo@2.0   ",
+        "foo 2.0",
+        "   foo 2.0  ",
+    ],
+)
+def test__validate_package_valid(name: str | None):
+    assert InitCommand._validate_package(name) == name
+
+
+@pytest.mark.parametrize(
+    "name", ["foo bar 2.0", "   foo bar 2.0   ", "foo bar foobar 2.0"]
+)
+def test__validate_package_invalid(name: str):
+    with pytest.raises(ValueError):
+        assert InitCommand._validate_package(name)
diff --git a/vendor/poetry/tests/console/commands/test_install.py b/vendor/poetry/tests/console/commands/test_install.py
new file mode 100644
index 00000000..09efe8ec
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/test_install.py
@@ -0,0 +1,185 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from poetry.core.masonry.utils.module import ModuleOrPackageNotFound
+from poetry.core.packages.dependency_group import MAIN_GROUP
+
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from poetry.poetry import Poetry
+    from tests.types import CommandTesterFactory
+    from tests.types import ProjectFactory
+
+
+PYPROJECT_CONTENT = """\
+[tool.poetry]
+name = "simple-project"
+version = "1.2.3"
+description = "Some description."
+authors = [
+    "Python Poetry "
+]
+license = "MIT"
+readme = "README.rst"
+
+[tool.poetry.dependencies]
+python = "~2.7 || ^3.4"
+fizz = { version = "^1.0", optional = true }
+buzz = { version = "^2.0", optional = true }
+
+[tool.poetry.group.foo.dependencies]
+foo = "^1.0"
+
+[tool.poetry.group.bar.dependencies]
+bar = "^1.1"
+
+[tool.poetry.group.baz.dependencies]
+baz = "^1.2"
+
+[tool.poetry.group.bim.dependencies]
+bim = "^1.3"
+
+[tool.poetry.group.bam]
+optional = true
+
+[tool.poetry.group.bam.dependencies]
+bam = "^1.4"
+
+[tool.poetry.extras]
+extras_a = [ "fizz" ]
+extras_b = [ "buzz" ]
+"""
+
+
+@pytest.fixture
+def poetry(project_factory: ProjectFactory) -> Poetry:
+    return project_factory(name="export", pyproject_content=PYPROJECT_CONTENT)
+
+
+@pytest.fixture
+def tester(
+    command_tester_factory: CommandTesterFactory, poetry: Poetry
+) -> CommandTester:
+    return command_tester_factory("install")
+
+
+@pytest.mark.parametrize(
+    ("options", "groups"),
+    [
+        ("", {MAIN_GROUP, "foo", "bar", "baz", "bim"}),
+        ("--only-root", set()),
+        (f"--only {MAIN_GROUP}", {MAIN_GROUP}),
+        ("--only foo", {"foo"}),
+        ("--only foo,bar", {"foo", "bar"}),
+        ("--only bam", {"bam"}),
+        ("--with bam", {MAIN_GROUP, "foo", "bar", "baz", "bim", "bam"}),
+        ("--without foo,bar", {MAIN_GROUP, "baz", "bim"}),
+        (f"--without {MAIN_GROUP}", {"foo", "bar", "baz", "bim"}),
+        ("--with foo,bar --without baz --without bim --only bam", {"bam"}),
+        # net result zero options
+        ("--with foo", {MAIN_GROUP, "foo", "bar", "baz", "bim"}),
+        ("--without bam", {MAIN_GROUP, "foo", "bar", "baz", "bim"}),
+        ("--with bam --without bam", {MAIN_GROUP, "foo", "bar", "baz", "bim"}),
+        ("--with foo --without foo", {MAIN_GROUP, "bar", "baz", "bim"}),
+        # deprecated options
+        ("--no-dev", {MAIN_GROUP}),
+    ],
+)
+@pytest.mark.parametrize("with_root", [True, False])
+def test_group_options_are_passed_to_the_installer(
+    options: str,
+    groups: set[str],
+    with_root: bool,
+    tester: CommandTester,
+    mocker: MockerFixture,
+):
+    """
+    Group options are passed properly to the installer.
+    """
+    mocker.patch.object(tester.command.installer, "run", return_value=0)
+    editable_builder_mock = mocker.patch(
+        "poetry.masonry.builders.editable.EditableBuilder",
+        side_effect=ModuleOrPackageNotFound(),
+    )
+
+    if not with_root:
+        options = f"--no-root {options}"
+
+    status_code = tester.execute(options)
+
+    if options == "--no-root --only-root":
+        assert status_code == 1
+        return
+    else:
+        assert status_code == 0
+
+    package_groups = set(tester.command.poetry.package._dependency_groups.keys())
+    installer_groups = set(tester.command.installer._groups)
+
+    assert installer_groups <= package_groups
+    assert set(installer_groups) == groups
+
+    if with_root:
+        assert editable_builder_mock.call_count == 1
+        assert editable_builder_mock.call_args_list[0][0][0] == tester.command.poetry
+    else:
+        assert editable_builder_mock.call_count == 0
+
+
+def test_sync_option_is_passed_to_the_installer(
+    tester: CommandTester, mocker: MockerFixture
+):
+    """
+    The --sync option is passed properly to the installer.
+    """
+    mocker.patch.object(tester.command.installer, "run", return_value=1)
+
+    tester.execute("--sync")
+
+    assert tester.command.installer._requires_synchronization
+
+
+def test_no_all_extras_doesnt_populate_installer(
+    tester: CommandTester, mocker: MockerFixture
+):
+    """
+    Not passing --all-extras means the installer doesn't see any extras.
+    """
+    mocker.patch.object(tester.command.installer, "run", return_value=1)
+
+    tester.execute()
+
+    assert not tester.command.installer._extras
+
+
+def test_all_extras_populates_installer(tester: CommandTester, mocker: MockerFixture):
+    """
+    The --all-extras option results in extras passed to the installer.
+    """
+    mocker.patch.object(tester.command.installer, "run", return_value=1)
+
+    tester.execute("--all-extras")
+
+    assert tester.command.installer._extras == ["extras_a", "extras_b"]
+
+
+def test_extras_conlicts_all_extras(tester: CommandTester, mocker: MockerFixture):
+    """
+    The --extras doesn't make sense with --all-extras.
+    """
+    mocker.patch.object(tester.command.installer, "run", return_value=0)
+
+    tester.execute("--extras foo --all-extras")
+
+    assert tester.status_code == 1
+    assert (
+        tester.io.fetch_error()
+        == "You cannot specify explicit `--extras` while installing using"
+        " `--all-extras`.\n"
+    )
diff --git a/vendor/poetry/tests/console/commands/test_lock.py b/vendor/poetry/tests/console/commands/test_lock.py
index 823a8ba4..dae977a1 100644
--- a/vendor/poetry/tests/console/commands/test_lock.py
+++ b/vendor/poetry/tests/console/commands/test_lock.py
@@ -1,40 +1,135 @@
-import shutil
-import sys
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
 
 import pytest
 
-from poetry.factory import Factory
 from poetry.packages import Locker
-from poetry.utils._compat import Path
+from tests.helpers import get_package
+
+
+if TYPE_CHECKING:
+    import httpretty
+
+    from cleo.testers.command_tester import CommandTester
+
+    from poetry.poetry import Poetry
+    from tests.helpers import TestRepository
+    from tests.types import CommandTesterFactory
+    from tests.types import FixtureDirGetter
+    from tests.types import ProjectFactory
 
 
 @pytest.fixture
-def source_dir(tmp_path):  # type: (Path) -> Path
-    yield Path(tmp_path.as_posix())
+def source_dir(tmp_path: Path) -> Path:
+    return Path(tmp_path.as_posix())
 
 
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("lock")
 
 
+def _project_factory(
+    fixture_name: str,
+    project_factory: ProjectFactory,
+    fixture_dir: FixtureDirGetter,
+) -> Poetry:
+    source = fixture_dir(fixture_name)
+    pyproject_content = (source / "pyproject.toml").read_text(encoding="utf-8")
+    poetry_lock_content = (source / "poetry.lock").read_text(encoding="utf-8")
+    return project_factory(
+        name="foobar",
+        pyproject_content=pyproject_content,
+        poetry_lock_content=poetry_lock_content,
+    )
+
+
 @pytest.fixture
-def poetry_with_old_lockfile(fixture_dir, source_dir):
-    project_dir = source_dir / "project"
-    shutil.copytree(str(fixture_dir("old_lock")), str(project_dir))
-    poetry = Factory().create_poetry(cwd=project_dir)
-    return poetry
+def poetry_with_outdated_lockfile(
+    project_factory: ProjectFactory, fixture_dir: FixtureDirGetter
+) -> Poetry:
+    return _project_factory("outdated_lock", project_factory, fixture_dir)
+
+
+@pytest.fixture
+def poetry_with_up_to_date_lockfile(
+    project_factory: ProjectFactory, fixture_dir: FixtureDirGetter
+) -> Poetry:
+    return _project_factory("up_to_date_lock", project_factory, fixture_dir)
+
+
+@pytest.fixture
+def poetry_with_old_lockfile(
+    project_factory: ProjectFactory, fixture_dir: FixtureDirGetter
+) -> Poetry:
+    return _project_factory("old_lock", project_factory, fixture_dir)
+
+
+def test_lock_check_outdated(
+    command_tester_factory: CommandTesterFactory,
+    poetry_with_outdated_lockfile: Poetry,
+    http: type[httpretty.httpretty],
+):
+    http.disable()
 
+    locker = Locker(
+        lock=poetry_with_outdated_lockfile.pyproject.file.path.parent / "poetry.lock",
+        local_config=poetry_with_outdated_lockfile.locker._local_config,
+    )
+    poetry_with_outdated_lockfile.set_locker(locker)
+
+    tester = command_tester_factory("lock", poetry=poetry_with_outdated_lockfile)
+    status_code = tester.execute("--check")
+    expected = (
+        "Error: poetry.lock is not consistent with pyproject.toml. "
+        "Run `poetry lock [--no-update]` to fix it.\n"
+    )
+
+    assert tester.io.fetch_error() == expected
 
-@pytest.mark.skipif(
-    sys.platform == "win32", reason="does not work on windows under ci environments"
-)
-def test_lock_no_update(command_tester_factory, poetry_with_old_lockfile, http):
+    # exit with an error
+    assert status_code == 1
+
+
+def test_lock_check_up_to_date(
+    command_tester_factory: CommandTesterFactory,
+    poetry_with_up_to_date_lockfile: Poetry,
+    http: type[httpretty.httpretty],
+):
     http.disable()
 
-    locked_repository = poetry_with_old_lockfile.locker.locked_repository(
-        with_dev_reqs=True
+    locker = Locker(
+        lock=poetry_with_up_to_date_lockfile.pyproject.file.path.parent / "poetry.lock",
+        local_config=poetry_with_up_to_date_lockfile.locker._local_config,
     )
+    poetry_with_up_to_date_lockfile.set_locker(locker)
+
+    tester = command_tester_factory("lock", poetry=poetry_with_up_to_date_lockfile)
+    status_code = tester.execute("--check")
+    expected = "poetry.lock is consistent with pyproject.toml.\n"
+    assert tester.io.fetch_output() == expected
+
+    # exit with an error
+    assert status_code == 0
+
+
+def test_lock_no_update(
+    command_tester_factory: CommandTesterFactory,
+    poetry_with_old_lockfile: Poetry,
+    repo: TestRepository,
+):
+    repo.add_package(get_package("sampleproject", "1.3.1"))
+    repo.add_package(get_package("sampleproject", "2.0.0"))
+
+    locker = Locker(
+        lock=poetry_with_old_lockfile.pyproject.file.path.parent / "poetry.lock",
+        local_config=poetry_with_old_lockfile.locker._local_config,
+    )
+    poetry_with_old_lockfile.set_locker(locker)
+
+    locked_repository = poetry_with_old_lockfile.locker.locked_repository()
     assert (
         poetry_with_old_lockfile.locker.lock_data["metadata"].get("lock-version")
         == "1.0"
@@ -47,7 +142,7 @@ def test_lock_no_update(command_tester_factory, poetry_with_old_lockfile, http):
         lock=poetry_with_old_lockfile.pyproject.file.path.parent / "poetry.lock",
         local_config={},
     )
-    packages = locker.locked_repository(True).packages
+    packages = locker.locked_repository().packages
 
     assert len(packages) == len(locked_repository.packages)
 
diff --git a/vendor/poetry/tests/console/commands/test_new.py b/vendor/poetry/tests/console/commands/test_new.py
new file mode 100644
index 00000000..79f09e20
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/test_new.py
@@ -0,0 +1,172 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+
+from poetry.factory import Factory
+
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+
+    from poetry.poetry import Poetry
+    from tests.types import CommandTesterFactory
+
+
+@pytest.fixture
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
+    return command_tester_factory("new")
+
+
+def verify_project_directory(
+    path: Path, package_name: str, package_path: str, include_from: str | None = None
+) -> Poetry:
+    package_path = Path(package_path)
+    assert path.is_dir()
+
+    pyproject = path / "pyproject.toml"
+    assert pyproject.is_file()
+
+    init_file = path / package_path / "__init__.py"
+    assert init_file.is_file()
+
+    tests_init_file = path / "tests" / "__init__.py"
+    assert tests_init_file.is_file()
+
+    poetry = Factory().create_poetry(cwd=path)
+    assert poetry.package.name == package_name
+
+    if include_from:
+        package_include = {
+            "include": package_path.relative_to(include_from).parts[0],
+            "from": include_from,
+        }
+    else:
+        package_include = {"include": package_path.parts[0]}
+
+    packages = poetry.local_config.get("packages")
+
+    if not packages:
+        assert poetry.local_config.get("name") == package_include.get("include")
+    else:
+        assert len(packages) == 1
+        assert packages[0] == package_include
+
+    return poetry
+
+
+@pytest.mark.parametrize(
+    "options,directory,package_name,package_path,include_from",
+    [
+        ([], "package", "package", "package", None),
+        (["--src"], "package", "package", "src/package", "src"),
+        (
+            ["--name namespace.package"],
+            "namespace-package",
+            "namespace-package",
+            "namespace/package",
+            None,
+        ),
+        (
+            ["--src", "--name namespace.package"],
+            "namespace-package",
+            "namespace-package",
+            "src/namespace/package",
+            "src",
+        ),
+        (
+            ["--name namespace.package_a"],
+            "namespace-package_a",
+            "namespace-package-a",
+            "namespace/package_a",
+            None,
+        ),
+        (
+            ["--src", "--name namespace.package_a"],
+            "namespace-package_a",
+            "namespace-package-a",
+            "src/namespace/package_a",
+            "src",
+        ),
+        (
+            ["--name namespace_package"],
+            "namespace-package",
+            "namespace-package",
+            "namespace_package",
+            None,
+        ),
+        (
+            ["--name namespace_package", "--src"],
+            "namespace-package",
+            "namespace-package",
+            "src/namespace_package",
+            "src",
+        ),
+        (
+            ["--name namespace.package"],
+            "package",
+            "namespace-package",
+            "namespace/package",
+            None,
+        ),
+        (
+            ["--name namespace.package", "--src"],
+            "package",
+            "namespace-package",
+            "src/namespace/package",
+            "src",
+        ),
+        (
+            ["--name namespace.package"],
+            "package",
+            "namespace-package",
+            "namespace/package",
+            None,
+        ),
+        (
+            ["--name namespace.package", "--src"],
+            "package",
+            "namespace-package",
+            "src/namespace/package",
+            "src",
+        ),
+        ([], "namespace_package", "namespace-package", "namespace_package", None),
+        (
+            ["--src", "--name namespace_package"],
+            "namespace_package",
+            "namespace-package",
+            "src/namespace_package",
+            "src",
+        ),
+    ],
+)
+def test_command_new(
+    options: list[str],
+    directory: str,
+    package_name: str,
+    package_path: str,
+    include_from: str | None,
+    tester: CommandTester,
+    tmp_dir: str,
+):
+    path = Path(tmp_dir) / directory
+    options.append(path.as_posix())
+    tester.execute(" ".join(options))
+    verify_project_directory(path, package_name, package_path, include_from)
+
+
+@pytest.mark.parametrize(("fmt",), [(None,), ("md",), ("rst",), ("adoc",), ("creole",)])
+def test_command_new_with_readme(fmt: str | None, tester: CommandTester, tmp_dir: str):
+    package = "package"
+    path = Path(tmp_dir) / package
+    options = [path.as_posix()]
+
+    if fmt:
+        options.insert(0, f"--readme {fmt}")
+
+    tester.execute(" ".join(options))
+
+    poetry = verify_project_directory(path, package, package, None)
+    assert poetry.local_config.get("readme") == f"README.{fmt or 'md'}"
diff --git a/vendor/poetry/tests/console/commands/test_publish.py b/vendor/poetry/tests/console/commands/test_publish.py
index dccc43ee..559f58f3 100644
--- a/vendor/poetry/tests/console/commands/test_publish.py
+++ b/vendor/poetry/tests/console/commands/test_publish.py
@@ -1,37 +1,55 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
 import pytest
 import requests
 
 from poetry.publishing.uploader import UploadError
-from poetry.utils._compat import PY36
-from poetry.utils._compat import Path
 
 
-@pytest.mark.skipif(
-    not PY36, reason="Improved error rendering is only available on Python >=3.6"
-)
-def test_publish_returns_non_zero_code_for_upload_errors(app, app_tester, http):
+if TYPE_CHECKING:
+    import httpretty
+
+    from cleo.testers.application_tester import ApplicationTester
+    from pytest_mock import MockerFixture
+
+    from tests.helpers import PoetryTestApplication
+
+
+def test_publish_returns_non_zero_code_for_upload_errors(
+    app: PoetryTestApplication,
+    app_tester: ApplicationTester,
+    http: type[httpretty.httpretty],
+):
     http.register_uri(
         http.POST, "https://upload.pypi.org/legacy/", status=400, body="Bad Request"
     )
 
     exit_code = app_tester.execute("publish --username foo --password bar")
 
-    assert 1 == exit_code
+    assert exit_code == 1
 
-    expected = """
+    expected_output = """
 Publishing simple-project (1.2.3) to PyPI
-
-
-  UploadError
-
-  HTTP Error 400: Bad Request
+"""
+    expected_error_output = """\
+HTTP Error 400: Bad Request | b'Bad Request'
 """
 
-    assert expected in app_tester.io.fetch_output()
+    assert expected_output in app_tester.io.fetch_output()
+    assert expected_error_output in app_tester.io.fetch_error()
 
 
-def test_publish_returns_non_zero_code_for_connection_errors(app, app_tester, http):
-    def request_callback(*_, **__):
+@pytest.mark.filterwarnings("ignore::pytest.PytestUnhandledThreadExceptionWarning")
+def test_publish_returns_non_zero_code_for_connection_errors(
+    app: PoetryTestApplication,
+    app_tester: ApplicationTester,
+    http: type[httpretty.httpretty],
+):
+    def request_callback(*_: Any, **__: Any) -> None:
         raise requests.ConnectionError()
 
     http.register_uri(
@@ -40,66 +58,50 @@ def request_callback(*_, **__):
 
     exit_code = app_tester.execute("publish --username foo --password bar")
 
-    assert 1 == exit_code
+    assert exit_code == 1
 
     expected = str(UploadError(error=requests.ConnectionError()))
 
-    assert expected in app_tester.io.fetch_output()
-
-
-@pytest.mark.skipif(
-    PY36, reason="Improved error rendering is not available on Python <3.6"
-)
-def test_publish_returns_non_zero_code_for_upload_errors_older_python(
-    app, app_tester, http
-):
-    http.register_uri(
-        http.POST, "https://upload.pypi.org/legacy/", status=400, body="Bad Request"
-    )
-
-    exit_code = app_tester.execute("publish --username foo --password bar")
-
-    assert 1 == exit_code
-
-    expected = """
-Publishing simple-project (1.2.3) to PyPI
-
+    assert expected in app_tester.io.fetch_error()
 
-UploadError
 
-HTTP Error 400: Bad Request
-"""
-
-    assert app_tester.io.fetch_output() == expected
-
-
-def test_publish_with_cert(app_tester, mocker):
+def test_publish_with_cert(app_tester: ApplicationTester, mocker: MockerFixture):
     publisher_publish = mocker.patch("poetry.publishing.Publisher.publish")
 
     app_tester.execute("publish --cert path/to/ca.pem")
 
     assert [
-        (None, None, None, Path("path/to/ca.pem"), None, False)
+        (None, None, None, Path("path/to/ca.pem"), None, False, False)
     ] == publisher_publish.call_args
 
 
-def test_publish_with_client_cert(app_tester, mocker):
+def test_publish_with_client_cert(app_tester: ApplicationTester, mocker: MockerFixture):
     publisher_publish = mocker.patch("poetry.publishing.Publisher.publish")
 
     app_tester.execute("publish --client-cert path/to/client.pem")
     assert [
-        (None, None, None, None, Path("path/to/client.pem"), False)
+        (None, None, None, None, Path("path/to/client.pem"), False, False)
     ] == publisher_publish.call_args
 
 
-def test_publish_dry_run(app_tester, http):
+@pytest.mark.parametrize(
+    "options",
+    [
+        "--dry-run",
+        "--skip-existing",
+        "--dry-run --skip-existing",
+    ],
+)
+def test_publish_dry_run_skip_existing(
+    app_tester: ApplicationTester, http: type[httpretty.httpretty], options: str
+):
     http.register_uri(
-        http.POST, "https://upload.pypi.org/legacy/", status=403, body="Forbidden"
+        http.POST, "https://upload.pypi.org/legacy/", status=409, body="Conflict"
     )
 
-    exit_code = app_tester.execute("publish --dry-run --username foo --password bar")
+    exit_code = app_tester.execute(f"publish {options} --username foo --password bar")
 
-    assert 0 == exit_code
+    assert exit_code == 0
 
     output = app_tester.io.fetch_output()
     error = app_tester.io.fetch_error()
@@ -107,3 +109,20 @@ def test_publish_dry_run(app_tester, http):
     assert "Publishing simple-project (1.2.3) to PyPI" in output
     assert "- Uploading simple-project-1.2.3.tar.gz" in error
     assert "- Uploading simple_project-1.2.3-py2.py3-none-any.whl" in error
+
+
+def test_skip_existing_output(
+    app_tester: ApplicationTester, http: type[httpretty.httpretty]
+):
+    http.register_uri(
+        http.POST, "https://upload.pypi.org/legacy/", status=409, body="Conflict"
+    )
+
+    exit_code = app_tester.execute(
+        "publish --skip-existing --username foo --password bar"
+    )
+
+    assert exit_code == 0
+
+    error = app_tester.io.fetch_error()
+    assert "- Uploading simple-project-1.2.3.tar.gz File exists. Skipping" in error
diff --git a/vendor/poetry/tests/console/commands/test_remove.py b/vendor/poetry/tests/console/commands/test_remove.py
new file mode 100644
index 00000000..0a2fa1b6
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/test_remove.py
@@ -0,0 +1,301 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+import tomlkit
+
+from poetry.core.packages.package import Package
+
+from poetry.factory import Factory
+from tests.helpers import get_package
+
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from poetry.poetry import Poetry
+    from poetry.repositories import Repository
+    from tests.helpers import PoetryTestApplication
+    from tests.helpers import TestRepository
+    from tests.types import CommandTesterFactory
+    from tests.types import FixtureDirGetter
+    from tests.types import ProjectFactory
+
+
+@pytest.fixture
+def poetry_with_up_to_date_lockfile(
+    project_factory: ProjectFactory, fixture_dir: FixtureDirGetter
+) -> Poetry:
+    source = fixture_dir("up_to_date_lock")
+
+    return project_factory(
+        name="foobar",
+        pyproject_content=(source / "pyproject.toml").read_text(encoding="utf-8"),
+        poetry_lock_content=(source / "poetry.lock").read_text(encoding="utf-8"),
+    )
+
+
+@pytest.fixture()
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
+    return command_tester_factory("remove")
+
+
+def test_remove_without_specific_group_removes_from_all_groups(
+    tester: CommandTester,
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+    installed: Repository,
+):
+    """
+    Removing without specifying a group removes packages from all groups.
+    """
+    installed.add_package(Package("foo", "2.0.0"))
+    repo.add_package(Package("foo", "2.0.0"))
+    repo.add_package(Package("baz", "1.0.0"))
+
+    content = app.poetry.file.read()
+
+    groups_content = tomlkit.parse(
+        """\
+[tool.poetry.group.bar.dependencies]
+foo = "^2.0.0"
+baz = "^1.0.0"
+
+"""
+    )
+    content["tool"]["poetry"]["dependencies"]["foo"] = "^2.0.0"
+    content["tool"]["poetry"]["group"] = groups_content["tool"]["poetry"]["group"]
+    app.poetry.file.write(content)
+
+    app.poetry.package.add_dependency(Factory.create_dependency("foo", "^2.0.0"))
+    app.poetry.package.add_dependency(
+        Factory.create_dependency("foo", "^2.0.0", groups=["bar"])
+    )
+    app.poetry.package.add_dependency(
+        Factory.create_dependency("baz", "^1.0.0", groups=["bar"])
+    )
+
+    tester.execute("foo")
+
+    content = app.poetry.file.read()["tool"]["poetry"]
+    assert "foo" not in content["dependencies"]
+    assert "foo" not in content["group"]["bar"]["dependencies"]
+    assert "baz" in content["group"]["bar"]["dependencies"]
+
+    expected = """\
+
+[tool.poetry.group.bar.dependencies]
+baz = "^1.0.0"
+
+"""
+    string_content = content.as_string()
+    if "\r\n" in string_content:
+        # consistent line endings
+        expected = expected.replace("\n", "\r\n")
+
+    assert expected in string_content
+
+
+def test_remove_without_specific_group_removes_from_specific_groups(
+    tester: CommandTester,
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+    installed: Repository,
+):
+    """
+    Removing with a specific group given removes packages only from this group.
+    """
+    installed.add_package(Package("foo", "2.0.0"))
+    repo.add_package(Package("foo", "2.0.0"))
+    repo.add_package(Package("baz", "1.0.0"))
+
+    content = app.poetry.file.read()
+
+    groups_content = tomlkit.parse(
+        """\
+[tool.poetry.group.bar.dependencies]
+foo = "^2.0.0"
+baz = "^1.0.0"
+
+"""
+    )
+    content["tool"]["poetry"]["dependencies"]["foo"] = "^2.0.0"
+    content["tool"]["poetry"]["group"] = groups_content["tool"]["poetry"]["group"]
+    app.poetry.file.write(content)
+
+    app.poetry.package.add_dependency(Factory.create_dependency("foo", "^2.0.0"))
+    app.poetry.package.add_dependency(
+        Factory.create_dependency("foo", "^2.0.0", groups=["bar"])
+    )
+    app.poetry.package.add_dependency(
+        Factory.create_dependency("baz", "^1.0.0", groups=["bar"])
+    )
+
+    tester.execute("foo --group bar")
+
+    content = app.poetry.file.read()["tool"]["poetry"]
+    assert "foo" in content["dependencies"]
+    assert "foo" not in content["group"]["bar"]["dependencies"]
+    assert "baz" in content["group"]["bar"]["dependencies"]
+
+    expected = """\
+
+[tool.poetry.group.bar.dependencies]
+baz = "^1.0.0"
+
+"""
+    string_content = content.as_string()
+    if "\r\n" in string_content:
+        # consistent line endings
+        expected = expected.replace("\n", "\r\n")
+
+    assert expected in string_content
+
+
+def test_remove_does_not_live_empty_groups(
+    tester: CommandTester,
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+    installed: Repository,
+):
+    """
+    Empty groups are automatically discarded after package removal.
+    """
+    installed.add_package(Package("foo", "2.0.0"))
+    repo.add_package(Package("foo", "2.0.0"))
+    repo.add_package(Package("baz", "1.0.0"))
+
+    content = app.poetry.file.read()
+
+    groups_content = tomlkit.parse(
+        """\
+[tool.poetry.group.bar.dependencies]
+foo = "^2.0.0"
+baz = "^1.0.0"
+
+"""
+    )
+    content["tool"]["poetry"]["dependencies"]["foo"] = "^2.0.0"
+    content["tool"]["poetry"]["group"] = groups_content["tool"]["poetry"]["group"]
+    app.poetry.file.write(content)
+
+    app.poetry.package.add_dependency(Factory.create_dependency("foo", "^2.0.0"))
+    app.poetry.package.add_dependency(
+        Factory.create_dependency("foo", "^2.0.0", groups=["bar"])
+    )
+    app.poetry.package.add_dependency(
+        Factory.create_dependency("baz", "^1.0.0", groups=["bar"])
+    )
+
+    tester.execute("foo baz --group bar")
+
+    content = app.poetry.file.read()["tool"]["poetry"]
+    assert "foo" in content["dependencies"]
+    assert "foo" not in content["group"]["bar"]["dependencies"]
+    assert "baz" not in content["group"]["bar"]["dependencies"]
+    assert "[tool.poetry.group.bar]" not in content.as_string()
+    assert "[tool.poetry.group]" not in content.as_string()
+
+
+def test_remove_canonicalized_named_removes_dependency_correctly(
+    tester: CommandTester,
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+    installed: Repository,
+):
+    """
+    Removing a dependency using a canonicalized named removes the dependency.
+    """
+    installed.add_package(Package("foo-bar", "2.0.0"))
+    repo.add_package(Package("foo-bar", "2.0.0"))
+    repo.add_package(Package("baz", "1.0.0"))
+
+    content = app.poetry.file.read()
+
+    groups_content = tomlkit.parse(
+        """\
+[tool.poetry.group.bar.dependencies]
+foo-bar = "^2.0.0"
+baz = "^1.0.0"
+
+"""
+    )
+    content["tool"]["poetry"]["dependencies"]["foo-bar"] = "^2.0.0"
+    content["tool"]["poetry"].value._insert_after(
+        "dependencies", "group", groups_content["tool"]["poetry"]["group"]
+    )
+    app.poetry.file.write(content)
+
+    app.poetry.package.add_dependency(Factory.create_dependency("foo-bar", "^2.0.0"))
+    app.poetry.package.add_dependency(
+        Factory.create_dependency("foo-bar", "^2.0.0", groups=["bar"])
+    )
+    app.poetry.package.add_dependency(
+        Factory.create_dependency("baz", "^1.0.0", groups=["bar"])
+    )
+
+    tester.execute("Foo_Bar")
+
+    content = app.poetry.file.read()["tool"]["poetry"]
+    assert "foo-bar" not in content["dependencies"]
+    assert "foo-bar" not in content["group"]["bar"]["dependencies"]
+    assert "baz" in content["group"]["bar"]["dependencies"]
+
+    expected = """\
+
+[tool.poetry.group.bar.dependencies]
+baz = "^1.0.0"
+
+"""
+    string_content = content.as_string()
+    if "\r\n" in string_content:
+        # consistent line endings
+        expected = expected.replace("\n", "\r\n")
+
+    assert expected in string_content
+
+
+def test_remove_command_should_not_write_changes_upon_installer_errors(
+    tester: CommandTester,
+    app: PoetryTestApplication,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+    mocker: MockerFixture,
+):
+    repo.add_package(Package("foo", "2.0.0"))
+
+    command_tester_factory("add").execute("foo")
+
+    mocker.patch("poetry.installation.installer.Installer.run", return_value=1)
+
+    original_content = app.poetry.file.read().as_string()
+
+    tester.execute("foo")
+
+    assert app.poetry.file.read().as_string() == original_content
+
+
+def test_remove_with_dry_run_keep_files_intact(
+    poetry_with_up_to_date_lockfile: Poetry,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+):
+    tester = command_tester_factory("remove", poetry=poetry_with_up_to_date_lockfile)
+
+    original_pyproject_content = poetry_with_up_to_date_lockfile.file.read()
+    original_lockfile_content = poetry_with_up_to_date_lockfile._locker.lock_data
+
+    repo.add_package(get_package("docker", "4.3.1"))
+
+    tester.execute("docker --dry-run")
+
+    assert poetry_with_up_to_date_lockfile.file.read() == original_pyproject_content
+    assert (
+        poetry_with_up_to_date_lockfile._locker.lock_data == original_lockfile_content
+    )
diff --git a/vendor/poetry/tests/console/commands/test_run.py b/vendor/poetry/tests/console/commands/test_run.py
index 351d869d..d86e8552 100644
--- a/vendor/poetry/tests/console/commands/test_run.py
+++ b/vendor/poetry/tests/console/commands/test_run.py
@@ -1,16 +1,105 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
+from poetry.utils._compat import WINDOWS
+
+
+if TYPE_CHECKING:
+    from cleo.testers.application_tester import ApplicationTester
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from poetry.utils.env import MockEnv
+    from poetry.utils.env import VirtualEnv
+    from tests.types import CommandTesterFactory
+
 
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("run")
 
 
 @pytest.fixture(autouse=True)
-def patches(mocker, env):
+def patches(mocker: MockerFixture, env: MockEnv) -> None:
     mocker.patch("poetry.utils.env.EnvManager.get", return_value=env)
 
 
-def test_run_passes_all_args(tester, env):
-    tester.execute("python -V")
+def test_run_passes_all_args(app_tester: ApplicationTester, env: MockEnv):
+    app_tester.execute("run python -V")
     assert [["python", "-V"]] == env.executed
+
+
+def test_run_keeps_options_passed_before_command(
+    app_tester: ApplicationTester, env: MockEnv
+):
+    app_tester.execute("-V --no-ansi run python", decorated=True)
+
+    assert not app_tester.io.is_decorated()
+    assert app_tester.io.fetch_output() == app_tester.io.remove_format(
+        app_tester.application.long_version + "\n"
+    )
+    assert [] == env.executed
+
+
+def test_run_has_helpful_error_when_command_not_found(
+    app_tester: ApplicationTester, env: MockEnv, capfd: pytest.CaptureFixture[str]
+):
+    env._execute = True
+    app_tester.execute("run nonexistent-command")
+
+    assert env.executed == [["nonexistent-command"]]
+    assert app_tester.status_code == 1
+    if WINDOWS:
+        # On Windows we use a shell to run commands which provides its own error
+        # message when a command is not found that is not captured by the
+        # ApplicationTester but is captured by pytest, and we can access it via capfd.
+        # The expected string in this assertion assumes Command Prompt (cmd.exe) is the
+        # shell used.
+        assert capfd.readouterr().err.splitlines() == [
+            "'nonexistent-command' is not recognized as an internal or external"
+            " command,",
+            "operable program or batch file.",
+        ]
+    else:
+        assert app_tester.io.fetch_error() == "Command not found: nonexistent-command\n"
+
+
+@pytest.mark.skipif(
+    not WINDOWS,
+    reason=(
+        "Poetry only installs CMD script files for console scripts of editable"
+        " dependencies on Windows"
+    ),
+)
+def test_run_console_scripts_of_editable_dependencies_on_windows(
+    tmp_venv: VirtualEnv,
+    command_tester_factory: CommandTesterFactory,
+):
+    """
+    On Windows, Poetry installs console scripts of editable dependencies by creating
+    in the environment's `Scripts/` directory both:
+
+        A) a Python file named after the console script (no `.py` extension) which
+            imports and calls the console script using Python code
+        B) a CMD script file also named after the console script
+            (with `.cmd` extension) which calls `python.exe` to execute (A)
+
+    This configuration enables calling the console script by name from `cmd.exe`
+    because the `.cmd` file extension appears by default in the PATHEXT environment
+    variable that `cmd.exe` uses to determine which file should be executed if a
+    filename without an extension is executed as a command.
+
+    This test validates that you can also run such a CMD script file via `poetry run`
+    just by providing the script's name without the `.cmd` extension.
+    """
+    tester = command_tester_factory("run", environment=tmp_venv)
+
+    cmd_script_file = tmp_venv._bin_dir / "quix.cmd"
+    # `/b` ensures we only exit the script instead of any cmd.exe proc that called it
+    cmd_script_file.write_text("exit /b 123")
+    # We prove that the CMD script executed successfully by verifying the exit code
+    # matches what we wrote in the script
+    assert tester.execute("quix") == 123
diff --git a/vendor/poetry/tests/console/commands/test_search.py b/vendor/poetry/tests/console/commands/test_search.py
index 9b61476c..f41021f1 100644
--- a/vendor/poetry/tests/console/commands/test_search.py
+++ b/vendor/poetry/tests/console/commands/test_search.py
@@ -1,7 +1,17 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from poetry.utils._compat import Path
 
+if TYPE_CHECKING:
+    import httpretty
+
+    from cleo.testers.command_tester import CommandTester
+
+    from tests.types import CommandTesterFactory
 
 TESTS_DIRECTORY = Path(__file__).parent.parent.parent
 FIXTURES_DIRECTORY = (
@@ -10,19 +20,17 @@
 
 
 @pytest.fixture(autouse=True)
-def mock_search_http_response(http):
+def mock_search_http_response(http: type[httpretty.httpretty]) -> None:
     with FIXTURES_DIRECTORY.joinpath("search.html").open(encoding="utf-8") as f:
         http.register_uri("GET", "https://pypi.org/search", f.read())
 
 
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("search")
 
 
-def test_search(
-    tester, http,
-):
+def test_search(tester: CommandTester, http: type[httpretty.httpretty]):
     tester.execute("sqlalchemy")
 
     expected = """
@@ -44,7 +52,7 @@ def test_search(
 sqlalchemy-audit (0.1.0)
  sqlalchemy-audit provides an easy way to set up revision tracking for your data.
 
-transmogrify.sqlalchemy (1.0.2)
+transmogrify-sqlalchemy (1.0.2)
  Feed data from SQLAlchemy into a transmogrifier pipeline
 
 sqlalchemy-schemadisplay (1.3)
@@ -60,7 +68,8 @@ def test_search(
  Python wrapper for the CircleCI API
 
 sqlalchemy-nav (0.0.2)
- SQLAlchemy-Nav provides SQLAlchemy Mixins for creating navigation bars compatible with Bootstrap
+ SQLAlchemy-Nav provides SQLAlchemy Mixins for creating navigation bars compatible with\
+ Bootstrap
 
 sqlalchemy-repr (0.0.1)
  Automatically generates pretty repr of a SQLAlchemy model.
@@ -87,4 +96,6 @@ def test_search(
  SAP Sybase SQL Anywhere dialect for SQLAlchemy
 """
 
-    assert expected == tester.io.fetch_output()
+    output = tester.io.fetch_output()
+
+    assert output == expected
diff --git a/vendor/poetry/tests/console/commands/test_shell.py b/vendor/poetry/tests/console/commands/test_shell.py
new file mode 100644
index 00000000..abdaec46
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/test_shell.py
@@ -0,0 +1,84 @@
+from __future__ import annotations
+
+import os
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+    from pytest_mock import MockerFixture
+
+    from tests.types import CommandTesterFactory
+
+
+@pytest.fixture
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
+    return command_tester_factory("shell")
+
+
+def test_shell(tester: CommandTester, mocker: MockerFixture):
+    shell_activate = mocker.patch("poetry.utils.shell.Shell.activate")
+
+    tester.execute()
+
+    expected_output = f"Spawning shell within {tester.command.env.path}\n"
+
+    shell_activate.assert_called_once_with(tester.command.env)
+    assert tester.io.fetch_output() == expected_output
+    assert tester.status_code == 0
+
+
+def test_shell_already_active(tester: CommandTester, mocker: MockerFixture):
+    os.environ["POETRY_ACTIVE"] = "1"
+    shell_activate = mocker.patch("poetry.utils.shell.Shell.activate")
+
+    tester.execute()
+
+    expected_output = (
+        f"Virtual environment already activated: {tester.command.env.path}\n"
+    )
+
+    shell_activate.assert_not_called()
+    assert tester.io.fetch_output() == expected_output
+    assert tester.status_code == 0
+
+
+@pytest.mark.parametrize(
+    ("poetry_active", "real_prefix", "prefix", "expected"),
+    [
+        (None, None, "", False),
+        ("", None, "", False),
+        (" ", None, "", True),
+        ("0", None, "", True),
+        ("1", None, "", True),
+        ("foobar", None, "", True),
+        ("1", "foobar", "foobar", True),
+        (None, "foobar", "foobar", True),
+        (None, "foobar", "foo", True),
+        (None, None, "foobar", True),
+        (None, "foo", "foobar", False),
+        (None, "foo", "foo", False),
+    ],
+)
+def test__is_venv_activated(
+    tester: CommandTester,
+    mocker: MockerFixture,
+    poetry_active: str | None,
+    real_prefix: str | None,
+    prefix: str,
+    expected: bool,
+):
+    mocker.patch.object(tester.command.env, "_path", Path("foobar"))
+    mocker.patch("sys.prefix", prefix)
+
+    if real_prefix is not None:
+        mocker.patch("sys.real_prefix", real_prefix, create=True)
+
+    if poetry_active is not None:
+        os.environ["POETRY_ACTIVE"] = poetry_active
+
+    assert tester.command._is_venv_activated() is expected
diff --git a/vendor/poetry/tests/console/commands/test_show.py b/vendor/poetry/tests/console/commands/test_show.py
index 56e964ab..5040b5f0 100644
--- a/vendor/poetry/tests/console/commands/test_show.py
+++ b/vendor/poetry/tests/console/commands/test_show.py
@@ -1,21 +1,38 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
-from clikit.formatter.ansi_formatter import AnsiFormatter
+from poetry.core.packages.dependency_group import MAIN_GROUP
+from poetry.core.packages.dependency_group import DependencyGroup
 
 from poetry.factory import Factory
+from tests.helpers import MOCK_DEFAULT_GIT_REVISION
 from tests.helpers import get_package
 
 
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+
+    from poetry.poetry import Poetry
+    from poetry.repositories import Repository
+    from tests.helpers import TestRepository
+    from tests.types import CommandTesterFactory
+
+
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("show")
 
 
-def test_show_basic_with_installed_packages(tester, poetry, installed):
+def test_show_basic_with_installed_packages(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
     poetry.package.add_dependency(
-        Factory.create_dependency("pytest", "^3.7.3", category="dev")
+        Factory.create_dependency("pytest", "^3.7.3", groups=["dev"])
     )
 
     cachy_010 = get_package("cachy", "0.1.0")
@@ -83,11 +100,257 @@ def test_show_basic_with_installed_packages(tester, poetry, installed):
 pytest   3.7.3 Pytest package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
+
+
+def _configure_project_with_groups(poetry: Poetry, installed: Repository) -> None:
+    poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
+
+    poetry.package.add_dependency_group(DependencyGroup(name="time", optional=True))
+    poetry.package.add_dependency(
+        Factory.create_dependency("pendulum", "^2.0.0", groups=["time"])
+    )
+
+    poetry.package.add_dependency(
+        Factory.create_dependency("pytest", "^3.7.3", groups=["test"])
+    )
+
+    cachy_010 = get_package("cachy", "0.1.0")
+    cachy_010.description = "Cachy package"
+
+    pendulum_200 = get_package("pendulum", "2.0.0")
+    pendulum_200.description = "Pendulum package"
+    pendulum_200.category = "dev"
+
+    pytest_373 = get_package("pytest", "3.7.3")
+    pytest_373.description = "Pytest package"
+    pytest_373.category = "dev"
+
+    installed.add_package(cachy_010)
+    installed.add_package(pendulum_200)
+    installed.add_package(pytest_373)
+
+    poetry.locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": "cachy",
+                    "version": "0.1.0",
+                    "description": "Cachy package",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": "pendulum",
+                    "version": "2.0.0",
+                    "description": "Pendulum package",
+                    "category": "dev",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": "pytest",
+                    "version": "3.7.3",
+                    "description": "Pytest package",
+                    "category": "dev",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {"cachy": [], "pendulum": [], "pytest": []},
+            },
+        }
+    )
+
+
+@pytest.mark.parametrize(
+    ("options", "expected"),
+    [
+        (
+            "",
+            """\
+cachy  0.1.0 Cachy package
+pytest 3.7.3 Pytest package
+""",
+        ),
+        (
+            "--with time",
+            """\
+cachy    0.1.0 Cachy package
+pendulum 2.0.0 Pendulum package
+pytest   3.7.3 Pytest package
+""",
+        ),
+        (
+            "--without test",
+            """\
+cachy 0.1.0 Cachy package
+""",
+        ),
+        (
+            f"--without {MAIN_GROUP}",
+            """\
+pytest 3.7.3 Pytest package
+""",
+        ),
+        (
+            f"--only {MAIN_GROUP}",
+            """\
+cachy 0.1.0 Cachy package
+""",
+        ),
+        (
+            "--no-dev",
+            """\
+cachy 0.1.0 Cachy package
+""",
+        ),
+        (
+            "--with time --without test",
+            """\
+cachy    0.1.0 Cachy package
+pendulum 2.0.0 Pendulum package
+""",
+        ),
+        (
+            f"--with time --without {MAIN_GROUP},test",
+            """\
+pendulum 2.0.0 Pendulum package
+""",
+        ),
+        (
+            "--only time",
+            """\
+pendulum 2.0.0 Pendulum package
+""",
+        ),
+        (
+            "--only time --with test",
+            """\
+pendulum 2.0.0 Pendulum package
+""",
+        ),
+        (
+            "--with time",
+            """\
+cachy    0.1.0 Cachy package
+pendulum 2.0.0 Pendulum package
+pytest   3.7.3 Pytest package
+""",
+        ),
+    ],
+)
+def test_show_basic_with_group_options(
+    options: str,
+    expected: str,
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+):
+    _configure_project_with_groups(poetry, installed)
+
+    tester.execute(options)
+
+    assert tester.io.fetch_output() == expected
+
+
+def test_show_basic_with_installed_packages_single(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
+    poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
+
+    cachy_010 = get_package("cachy", "0.1.0")
+    cachy_010.description = "Cachy package"
+
+    installed.add_package(cachy_010)
+
+    poetry.locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": "cachy",
+                    "version": "0.1.0",
+                    "description": "Cachy package",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {"cachy": []},
+            },
+        }
+    )
+
+    tester.execute("cachy")
+
+    assert [
+        "name         : cachy",
+        "version      : 0.1.0",
+        "description  : Cachy package",
+    ] == [line.strip() for line in tester.io.fetch_output().splitlines()]
+
+
+def test_show_basic_with_installed_packages_single_canonicalized(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
+    poetry.package.add_dependency(Factory.create_dependency("foo-bar", "^0.1.0"))
+
+    foo_bar = get_package("foo-bar", "0.1.0")
+    foo_bar.description = "Foobar package"
+
+    installed.add_package(foo_bar)
+
+    poetry.locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": "foo-bar",
+                    "version": "0.1.0",
+                    "description": "Foobar package",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {"foo-bar": []},
+            },
+        }
+    )
+
+    tester.execute("Foo_Bar")
+
+    assert [
+        "name         : foo-bar",
+        "version      : 0.1.0",
+        "description  : Foobar package",
+    ] == [line.strip() for line in tester.io.fetch_output().splitlines()]
 
 
 def test_show_basic_with_not_installed_packages_non_decorated(
-    tester, poetry, installed
+    tester: CommandTester, poetry: Poetry, installed: Repository
 ):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
@@ -139,10 +402,12 @@ def test_show_basic_with_not_installed_packages_non_decorated(
 pendulum (!) 2.0.0 Pendulum package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_basic_with_not_installed_packages_decorated(tester, poetry, installed):
+def test_show_basic_with_not_installed_packages_decorated(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
 
@@ -186,18 +451,22 @@ def test_show_basic_with_not_installed_packages_decorated(tester, poetry, instal
         }
     )
 
-    tester.io.set_formatter(AnsiFormatter(forced=True))
-    tester.execute()
+    tester.execute(decorated=True)
 
     expected = """\
-\033[36mcachy   \033[0m \033[1m0.1.0\033[0m Cachy package
-\033[31mpendulum\033[0m \033[1m2.0.0\033[0m Pendulum package
+\033[36mcachy   \033[39m \033[39;1m0.1.0\033[39;22m Cachy package
+\033[31mpendulum\033[39m \033[39;1m2.0.0\033[39;22m Pendulum package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_latest_non_decorated(tester, poetry, installed, repo):
+def test_show_latest_non_decorated(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
 
@@ -259,10 +528,15 @@ def test_show_latest_non_decorated(tester, poetry, installed, repo):
 pendulum 2.0.0 2.0.1 Pendulum package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_latest_decorated(tester, poetry, installed, repo):
+def test_show_latest_decorated(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
 
@@ -317,18 +591,24 @@ def test_show_latest_decorated(tester, poetry, installed, repo):
         }
     )
 
-    tester.io.set_formatter(AnsiFormatter(forced=True))
-    tester.execute("--latest")
+    tester.execute("--latest", decorated=True)
 
     expected = """\
-\033[36mcachy   \033[0m \033[1m0.1.0\033[0m \033[33m0.2.0\033[0m Cachy package
-\033[36mpendulum\033[0m \033[1m2.0.0\033[0m \033[31m2.0.1\033[0m Pendulum package
+\033[36mcachy   \033[39m \033[39;1m0.1.0\033[39;22m\
+ \033[33m0.2.0\033[39m Cachy package
+\033[36mpendulum\033[39m \033[39;1m2.0.0\033[39;22m\
+ \033[31m2.0.1\033[39m Pendulum package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_outdated(tester, poetry, installed, repo):
+def test_show_outdated(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
 
@@ -386,10 +666,15 @@ def test_show_outdated(tester, poetry, installed, repo):
 cachy 0.1.0 0.2.0 Cachy package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_outdated_with_only_up_to_date_packages(tester, poetry, installed, repo):
+def test_show_outdated_with_only_up_to_date_packages(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     cachy_020 = get_package("cachy", "0.2.0")
     cachy_020.description = "Cachy package"
 
@@ -423,10 +708,15 @@ def test_show_outdated_with_only_up_to_date_packages(tester, poetry, installed,
 
     expected = ""
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_outdated_has_prerelease_but_not_allowed(tester, poetry, installed, repo):
+def test_show_outdated_has_prerelease_but_not_allowed(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
 
@@ -489,13 +779,18 @@ def test_show_outdated_has_prerelease_but_not_allowed(tester, poetry, installed,
 cachy 0.1.0 0.2.0 Cachy package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_outdated_has_prerelease_and_allowed(tester, poetry, installed, repo):
+def test_show_outdated_has_prerelease_and_allowed(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     poetry.package.add_dependency(
         Factory.create_dependency(
-            "cachy", {"version": "^0.1.0", "allow-prereleases": True}
+            "cachy", {"version": ">=0.0.1", "allow-prereleases": True}
         )
     )
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
@@ -559,10 +854,15 @@ def test_show_outdated_has_prerelease_and_allowed(tester, poetry, installed, rep
 cachy 0.1.0.dev1 0.3.0.dev123 Cachy package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_outdated_formatting(tester, poetry, installed, repo):
+def test_show_outdated_formatting(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
     poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
 
@@ -624,11 +924,24 @@ def test_show_outdated_formatting(tester, poetry, installed, repo):
 pendulum 2.0.0 2.0.1 Pendulum package
 """
 
-    assert expected == tester.io.fetch_output()
-
-
-@pytest.mark.parametrize("project_directory", ["project_with_local_dependencies"])
-def test_show_outdated_local_dependencies(tester, poetry, installed, repo):
+    assert tester.io.fetch_output() == expected
+
+
+@pytest.mark.parametrize(
+    ("project_directory", "required_fixtures"),
+    [
+        (
+            "project_with_local_dependencies",
+            ["distributions/demo-0.1.0-py2.py3-none-any.whl", "project_with_setup"],
+        ),
+    ],
+)
+def test_show_outdated_local_dependencies(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     cachy_010 = get_package("cachy", "0.1.0")
     cachy_010.description = "Cachy package"
     cachy_020 = get_package("cachy", "0.2.0")
@@ -642,13 +955,13 @@ def test_show_outdated_local_dependencies(tester, poetry, installed, repo):
     demo_010 = get_package("demo", "0.1.0")
     demo_010.description = ""
 
-    my_package_012 = get_package("project-with-setup", "0.1.2")
-    my_package_012.description = "Demo project."
+    my_package_011 = get_package("project-with-setup", "0.1.1")
+    my_package_011.description = "Demo project."
 
     installed.add_package(cachy_020)
     installed.add_package(pendulum_200)
     installed.add_package(demo_010)
-    installed.add_package(my_package_012)
+    installed.add_package(my_package_011)
 
     repo.add_package(cachy_020)
     repo.add_package(cachy_030)
@@ -732,13 +1045,19 @@ def test_show_outdated_local_dependencies(tester, poetry, installed, repo):
 cachy              0.2.0                       0.3.0
 project-with-setup 0.1.1 ../project_with_setup 0.1.2 ../project_with_setup
 """
-    assert expected.rstrip() == "\n".join(
-        line.rstrip() for line in tester.io.fetch_output().splitlines()
+    assert (
+        "\n".join(line.rstrip() for line in tester.io.fetch_output().splitlines())
+        == expected.rstrip()
     )
 
 
 @pytest.mark.parametrize("project_directory", ["project_with_git_dev_dependency"])
-def test_show_outdated_git_dev_dependency(tester, poetry, installed, repo):
+def test_show_outdated_git_dev_dependency(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     cachy_010 = get_package("cachy", "0.1.0")
     cachy_010.description = "Cachy package"
     cachy_020 = get_package("cachy", "0.2.0")
@@ -797,8 +1116,8 @@ def test_show_outdated_git_dev_dependency(tester, poetry, installed, repo):
                     "checksum": [],
                     "source": {
                         "type": "git",
-                        "reference": "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
-                        "resolved_reference": "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+                        "reference": MOCK_DEFAULT_GIT_REVISION,
+                        "resolved_reference": MOCK_DEFAULT_GIT_REVISION,
                         "url": "https://github.com/demo/demo.git",
                     },
                 },
@@ -829,11 +1148,16 @@ def test_show_outdated_git_dev_dependency(tester, poetry, installed, repo):
 demo  0.1.1 9cf87a2 0.1.2 9cf87a2 Demo package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
 @pytest.mark.parametrize("project_directory", ["project_with_git_dev_dependency"])
-def test_show_outdated_no_dev_git_dev_dependency(tester, poetry, installed, repo):
+def test_show_outdated_no_dev_git_dev_dependency(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     cachy_010 = get_package("cachy", "0.1.0")
     cachy_010.description = "Cachy package"
     cachy_020 = get_package("cachy", "0.2.0")
@@ -892,7 +1216,7 @@ def test_show_outdated_no_dev_git_dev_dependency(tester, poetry, installed, repo
                     "checksum": [],
                     "source": {
                         "type": "git",
-                        "reference": "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+                        "reference": MOCK_DEFAULT_GIT_REVISION,
                         "url": "https://github.com/demo/pyproject-demo.git",
                     },
                 },
@@ -916,16 +1240,21 @@ def test_show_outdated_no_dev_git_dev_dependency(tester, poetry, installed, repo
         }
     )
 
-    tester.execute("--outdated --no-dev")
+    tester.execute("--outdated --without dev")
 
     expected = """\
 cachy 0.1.0 0.2.0 Cachy package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_hides_incompatible_package(tester, poetry, installed, repo):
+def test_show_hides_incompatible_package(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     poetry.package.add_dependency(
         Factory.create_dependency("cachy", {"version": "^0.1.0", "python": "< 2.0"})
     )
@@ -978,10 +1307,15 @@ def test_show_hides_incompatible_package(tester, poetry, installed, repo):
 pendulum 2.0.0 Pendulum package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_all_shows_incompatible_package(tester, poetry, installed, repo):
+def test_show_all_shows_incompatible_package(
+    tester: CommandTester,
+    poetry: Poetry,
+    installed: Repository,
+    repo: TestRepository,
+):
     cachy_010 = get_package("cachy", "0.1.0")
     cachy_010.description = "Cachy package"
 
@@ -1031,10 +1365,18 @@ def test_show_all_shows_incompatible_package(tester, poetry, installed, repo):
 pendulum  2.0.0 Pendulum package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_non_dev_with_basic_installed_packages(tester, poetry, installed):
+def test_show_non_dev_with_basic_installed_packages(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
+    poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
+    poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
+    poetry.package.add_dependency(
+        Factory.create_dependency("pytest", "*", groups=["dev"])
+    )
+
     cachy_010 = get_package("cachy", "0.1.0")
     cachy_010.description = "Cachy package"
 
@@ -1092,17 +1434,178 @@ def test_show_non_dev_with_basic_installed_packages(tester, poetry, installed):
         }
     )
 
-    tester.execute("--no-dev")
+    tester.execute("--without dev")
 
     expected = """\
 cachy    0.1.0 Cachy package
 pendulum 2.0.0 Pendulum package
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
+
+
+def test_show_with_group_only(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
+    poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
+    poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
+    poetry.package.add_dependency(
+        Factory.create_dependency("pytest", "*", groups=["dev"])
+    )
+
+    cachy_010 = get_package("cachy", "0.1.0")
+    cachy_010.description = "Cachy package"
+
+    pendulum_200 = get_package("pendulum", "2.0.0")
+    pendulum_200.description = "Pendulum package"
 
+    pytest_373 = get_package("pytest", "3.7.3")
+    pytest_373.description = "Pytest package"
+    pytest_373.category = "dev"
+
+    installed.add_package(cachy_010)
+    installed.add_package(pendulum_200)
+    installed.add_package(pytest_373)
 
-def test_show_tree(tester, poetry, installed):
+    poetry.locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": "cachy",
+                    "version": "0.1.0",
+                    "description": "Cachy package",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": "pendulum",
+                    "version": "2.0.0",
+                    "description": "Pendulum package",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": "pytest",
+                    "version": "3.7.3",
+                    "description": "Pytest package",
+                    "category": "dev",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {"cachy": [], "pendulum": [], "pytest": []},
+            },
+        }
+    )
+
+    tester.execute("--only dev")
+
+    expected = """\
+pytest 3.7.3 Pytest package
+"""
+
+    assert tester.io.fetch_output() == expected
+
+
+def test_show_with_optional_group(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
+    poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.1.0"))
+    poetry.package.add_dependency(Factory.create_dependency("pendulum", "^2.0.0"))
+    group = DependencyGroup("dev", optional=True)
+    group.add_dependency(Factory.create_dependency("pytest", "*", groups=["dev"]))
+    poetry.package.add_dependency_group(group)
+
+    cachy_010 = get_package("cachy", "0.1.0")
+    cachy_010.description = "Cachy package"
+
+    pendulum_200 = get_package("pendulum", "2.0.0")
+    pendulum_200.description = "Pendulum package"
+
+    pytest_373 = get_package("pytest", "3.7.3")
+    pytest_373.description = "Pytest package"
+    pytest_373.category = "dev"
+
+    installed.add_package(cachy_010)
+    installed.add_package(pendulum_200)
+    installed.add_package(pytest_373)
+
+    poetry.locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": "cachy",
+                    "version": "0.1.0",
+                    "description": "Cachy package",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": "pendulum",
+                    "version": "2.0.0",
+                    "description": "Pendulum package",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": "pytest",
+                    "version": "3.7.3",
+                    "description": "Pytest package",
+                    "category": "dev",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {"cachy": [], "pendulum": [], "pytest": []},
+            },
+        }
+    )
+
+    tester.execute()
+
+    expected = """\
+cachy    0.1.0 Cachy package
+pendulum 2.0.0 Pendulum package
+"""
+
+    assert tester.io.fetch_output() == expected
+
+    tester.execute("--with dev")
+
+    expected = """\
+cachy    0.1.0 Cachy package
+pendulum 2.0.0 Pendulum package
+pytest   3.7.3 Pytest package
+"""
+
+    assert tester.io.fetch_output() == expected
+
+
+def test_show_tree(tester: CommandTester, poetry: Poetry, installed: Repository):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.2.0"))
 
     cachy2 = get_package("cachy", "0.2.0")
@@ -1144,20 +1647,20 @@ def test_show_tree(tester, poetry, installed):
         }
     )
 
-    tester.execute("--tree")
+    tester.execute("--tree", supports_utf8=False)
 
     expected = """\
 cachy 0.2.0
 `-- msgpack-python >=0.5 <0.6
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
 
 
-def test_show_tree_no_dev(tester, poetry, installed):
+def test_show_tree_no_dev(tester: CommandTester, poetry: Poetry, installed: Repository):
     poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.2.0"))
     poetry.package.add_dependency(
-        Factory.create_dependency("pytest", "^6.1.0", category="dev")
+        Factory.create_dependency("pytest", "^6.1.0", groups=["dev"])
     )
 
     cachy2 = get_package("cachy", "0.2.0")
@@ -1211,11 +1714,213 @@ def test_show_tree_no_dev(tester, poetry, installed):
         }
     )
 
-    tester.execute("--tree --no-dev")
+    tester.execute("--tree --without dev")
 
     expected = """\
 cachy 0.2.0
-`-- msgpack-python >=0.5 <0.6
+└── msgpack-python >=0.5 <0.6
 """
 
-    assert expected == tester.io.fetch_output()
+    assert tester.io.fetch_output() == expected
+
+
+def test_show_tree_why_package(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
+    poetry.package.add_dependency(Factory.create_dependency("a", "=0.0.1"))
+
+    a = get_package("a", "0.0.1")
+    installed.add_package(a)
+    a.add_dependency(Factory.create_dependency("b", "=0.0.1"))
+
+    b = get_package("b", "0.0.1")
+    a.add_dependency(Factory.create_dependency("c", "=0.0.1"))
+    installed.add_package(b)
+
+    c = get_package("c", "0.0.1")
+    installed.add_package(c)
+
+    poetry.locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": "a",
+                    "version": "0.0.1",
+                    "dependencies": {"b": "=0.0.1"},
+                    "python-versions": "*",
+                    "optional": False,
+                },
+                {
+                    "name": "b",
+                    "version": "0.0.1",
+                    "dependencies": {"c": "=0.0.1"},
+                    "python-versions": "*",
+                    "optional": False,
+                },
+                {
+                    "name": "c",
+                    "version": "0.0.1",
+                    "python-versions": "*",
+                    "optional": False,
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {"a": [], "b": [], "c": []},
+            },
+        }
+    )
+
+    tester.execute("--tree --why b")
+
+    expected = """\
+a 0.0.1
+└── b =0.0.1
+    └── c =0.0.1 \n"""
+
+    assert tester.io.fetch_output() == expected
+
+
+def test_show_tree_why(tester: CommandTester, poetry: Poetry, installed: Repository):
+    poetry.package.add_dependency(Factory.create_dependency("a", "=0.0.1"))
+
+    a = get_package("a", "0.0.1")
+    installed.add_package(a)
+    a.add_dependency(Factory.create_dependency("b", "=0.0.1"))
+
+    b = get_package("b", "0.0.1")
+    a.add_dependency(Factory.create_dependency("c", "=0.0.1"))
+    installed.add_package(b)
+
+    c = get_package("c", "0.0.1")
+    installed.add_package(c)
+
+    poetry.locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": "a",
+                    "version": "0.0.1",
+                    "dependencies": {"b": "=0.0.1"},
+                    "python-versions": "*",
+                    "optional": False,
+                },
+                {
+                    "name": "b",
+                    "version": "0.0.1",
+                    "dependencies": {"c": "=0.0.1"},
+                    "python-versions": "*",
+                    "optional": False,
+                },
+                {
+                    "name": "c",
+                    "version": "0.0.1",
+                    "python-versions": "*",
+                    "optional": False,
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {"a": [], "b": [], "c": []},
+            },
+        }
+    )
+
+    tester.execute("--why")
+
+    # this has to be on a single line due to the padding whitespace, which gets stripped
+    # by pre-commit.
+    expected = """a 0.0.1        \nb 0.0.1 from a \nc 0.0.1 from b \n"""
+
+    assert tester.io.fetch_output() == expected
+
+
+def test_show_required_by_deps(
+    tester: CommandTester, poetry: Poetry, installed: Repository
+):
+    poetry.package.add_dependency(Factory.create_dependency("cachy", "^0.2.0"))
+    poetry.package.add_dependency(Factory.create_dependency("pendulum", "2.0.0"))
+
+    cachy2 = get_package("cachy", "0.2.0")
+    cachy2.add_dependency(Factory.create_dependency("msgpack-python", ">=0.5 <0.6"))
+
+    pendulum = get_package("pendulum", "2.0.0")
+    pendulum.add_dependency(Factory.create_dependency("CachY", "^0.2.0"))
+
+    installed.add_package(cachy2)
+    installed.add_package(pendulum)
+
+    poetry.locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": "cachy",
+                    "version": "0.2.0",
+                    "description": "",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                    "dependencies": {"msgpack-python": ">=0.5 <0.6"},
+                },
+                {
+                    "name": "pendulum",
+                    "version": "2.0.0",
+                    "description": "Pendulum package",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                    "dependencies": {"cachy": ">=0.2.0 <0.3.0"},
+                },
+                {
+                    "name": "msgpack-python",
+                    "version": "0.5.1",
+                    "description": "",
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {"cachy": [], "pendulum": [], "msgpack-python": []},
+            },
+        }
+    )
+
+    tester.execute("cachy")
+
+    expected = """\
+ name         : cachy
+ version      : 0.2.0
+ description  :
+
+dependencies
+ - msgpack-python >=0.5 <0.6
+
+required by
+ - pendulum >=0.2.0 <0.3.0
+""".splitlines()
+    actual = [line.rstrip() for line in tester.io.fetch_output().splitlines()]
+    assert actual == expected
+
+
+def test_show_errors_without_lock_file(tester: CommandTester, poetry: Poetry):
+    assert not poetry.locker.lock.exists()
+
+    tester.execute()
+
+    expected = "Error: poetry.lock not found. Run `poetry lock` to create it.\n"
+    assert tester.io.fetch_error() == expected
+    assert tester.status_code == 1
diff --git a/vendor/poetry/tests/console/commands/test_update.py b/vendor/poetry/tests/console/commands/test_update.py
new file mode 100644
index 00000000..dd3306cf
--- /dev/null
+++ b/vendor/poetry/tests/console/commands/test_update.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from tests.helpers import get_package
+
+
+if TYPE_CHECKING:
+    from poetry.poetry import Poetry
+    from tests.helpers import TestRepository
+    from tests.types import CommandTesterFactory
+    from tests.types import FixtureDirGetter
+    from tests.types import ProjectFactory
+
+
+@pytest.fixture
+def poetry_with_up_to_date_lockfile(
+    project_factory: ProjectFactory, fixture_dir: FixtureDirGetter
+) -> Poetry:
+    source = fixture_dir("outdated_lock")
+
+    return project_factory(
+        name="foobar",
+        pyproject_content=(source / "pyproject.toml").read_text(encoding="utf-8"),
+        poetry_lock_content=(source / "poetry.lock").read_text(encoding="utf-8"),
+    )
+
+
+@pytest.mark.parametrize(
+    "command",
+    [
+        "--dry-run",
+        "docker --dry-run",
+    ],
+)
+def test_update_with_dry_run_keep_files_intact(
+    command: str,
+    poetry_with_up_to_date_lockfile: Poetry,
+    repo: TestRepository,
+    command_tester_factory: CommandTesterFactory,
+):
+    tester = command_tester_factory("update", poetry=poetry_with_up_to_date_lockfile)
+
+    original_pyproject_content = poetry_with_up_to_date_lockfile.file.read()
+    original_lockfile_content = poetry_with_up_to_date_lockfile._locker.lock_data
+
+    repo.add_package(get_package("docker", "4.3.0"))
+    repo.add_package(get_package("docker", "4.3.1"))
+
+    tester.execute(command)
+
+    assert poetry_with_up_to_date_lockfile.file.read() == original_pyproject_content
+    assert (
+        poetry_with_up_to_date_lockfile._locker.lock_data == original_lockfile_content
+    )
diff --git a/vendor/poetry/tests/console/commands/test_version.py b/vendor/poetry/tests/console/commands/test_version.py
index 77f6d8aa..d578b7fd 100644
--- a/vendor/poetry/tests/console/commands/test_version.py
+++ b/vendor/poetry/tests/console/commands/test_version.py
@@ -1,15 +1,25 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
-from poetry.console.commands import VersionCommand
+from poetry.console.commands.version import VersionCommand
+
+
+if TYPE_CHECKING:
+    from cleo.testers.command_tester import CommandTester
+
+    from tests.types import CommandTesterFactory
 
 
 @pytest.fixture()
-def command():
+def command() -> VersionCommand:
     return VersionCommand()
 
 
 @pytest.fixture
-def tester(command_tester_factory):
+def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
     return command_tester_factory("version")
 
 
@@ -25,29 +35,50 @@ def tester(command_tester_factory):
         ("1.2.3", "patch", "1.2.4"),
         ("1.2.3", "minor", "1.3.0"),
         ("1.2.3", "major", "2.0.0"),
-        ("1.2.3", "prepatch", "1.2.4-alpha.0"),
-        ("1.2.3", "preminor", "1.3.0-alpha.0"),
-        ("1.2.3", "premajor", "2.0.0-alpha.0"),
+        ("1.2.3", "prepatch", "1.2.4a0"),
+        ("1.2.3", "preminor", "1.3.0a0"),
+        ("1.2.3", "premajor", "2.0.0a0"),
         ("1.2.3-beta.1", "patch", "1.2.3"),
         ("1.2.3-beta.1", "minor", "1.3.0"),
         ("1.2.3-beta.1", "major", "2.0.0"),
-        ("1.2.3-beta.1", "prerelease", "1.2.3-beta.2"),
-        ("1.2.3-beta1", "prerelease", "1.2.3-beta.2"),
-        ("1.2.3beta1", "prerelease", "1.2.3-beta.2"),
-        ("1.2.3b1", "prerelease", "1.2.3-beta.2"),
-        ("1.2.3", "prerelease", "1.2.4-alpha.0"),
+        ("1.2.3-beta.1", "prerelease", "1.2.3b2"),
+        ("1.2.3-beta1", "prerelease", "1.2.3b2"),
+        ("1.2.3beta1", "prerelease", "1.2.3b2"),
+        ("1.2.3b1", "prerelease", "1.2.3b2"),
+        ("1.2.3", "prerelease", "1.2.4a0"),
         ("0.0.0", "1.2.3", "1.2.3"),
     ],
 )
-def test_increment_version(version, rule, expected, command):
-    assert expected == command.increment_version(version, rule).text
+def test_increment_version(
+    version: str, rule: str, expected: str, command: VersionCommand
+):
+    assert command.increment_version(version, rule).text == expected
 
 
-def test_version_show(tester):
+def test_version_show(tester: CommandTester):
     tester.execute()
-    assert "simple-project 1.2.3\n" == tester.io.fetch_output()
+    assert tester.io.fetch_output() == "simple-project 1.2.3\n"
 
 
-def test_short_version_show(tester):
+def test_short_version_show(tester: CommandTester):
     tester.execute("--short")
-    assert "1.2.3\n" == tester.io.fetch_output()
+    assert tester.io.fetch_output() == "1.2.3\n"
+
+
+def test_version_update(tester: CommandTester):
+    tester.execute("2.0.0")
+    assert tester.io.fetch_output() == "Bumping version from 1.2.3 to 2.0.0\n"
+
+
+def test_short_version_update(tester: CommandTester):
+    tester.execute("--short 2.0.0")
+    assert tester.io.fetch_output() == "2.0.0\n"
+
+
+def test_dry_run(tester: CommandTester):
+    old_pyproject = tester.command.poetry.file.path.read_text()
+    tester.execute("--dry-run major")
+
+    new_pyproject = tester.command.poetry.file.path.read_text()
+    assert tester.io.fetch_output() == "Bumping version from 1.2.3 to 2.0.0\n"
+    assert old_pyproject == new_pyproject
diff --git a/vendor/poetry/tests/console/conftest.py b/vendor/poetry/tests/console/conftest.py
index 8a3a5cba..a78ceaa8 100644
--- a/vendor/poetry/tests/console/conftest.py
+++ b/vendor/poetry/tests/console/conftest.py
@@ -1,33 +1,59 @@
+from __future__ import annotations
+
 import os
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from cleo import ApplicationTester
+from cleo.io.null_io import NullIO
+from cleo.testers.application_tester import ApplicationTester
+from cleo.testers.command_tester import CommandTester
 
-from poetry.factory import Factory
+from poetry.installation import Installer
 from poetry.installation.noop_installer import NoopInstaller
-from poetry.io.null_io import NullIO
-from poetry.repositories import Pool
-from poetry.utils._compat import Path
 from poetry.utils.env import MockEnv
-from tests.helpers import TestApplication
+from tests.helpers import MOCK_DEFAULT_GIT_REVISION
+from tests.helpers import PoetryTestApplication
 from tests.helpers import TestExecutor
-from tests.helpers import TestLocker
 from tests.helpers import mock_clone
 
 
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from pytest_mock import MockerFixture
+
+    from poetry.installation.executor import Executor
+    from poetry.poetry import Poetry
+    from poetry.repositories import Repository
+    from poetry.utils.env import Env
+    from tests.conftest import Config
+    from tests.types import CommandTesterFactory
+    from tests.types import ProjectFactory
+
+
 @pytest.fixture()
-def installer():
+def installer() -> NoopInstaller:
     return NoopInstaller()
 
 
 @pytest.fixture
-def env():
-    return MockEnv(path=Path("/prefix"), base=Path("/base/prefix"), is_venv=True)
+def env(tmp_dir: str) -> MockEnv:
+    path = Path(tmp_dir) / ".venv"
+    path.mkdir(parents=True)
+    return MockEnv(path=path, is_venv=True)
 
 
 @pytest.fixture(autouse=True)
-def setup(mocker, installer, installed, config, env):
+def setup(
+    mocker: MockerFixture,
+    installer: NoopInstaller,
+    installed: Repository,
+    config: Config,
+    env: MockEnv,
+) -> Iterator[None]:
     # Set Installer's installer
     p = mocker.patch("poetry.installation.installer.Installer._get_installer")
     p.return_value = installer
@@ -44,10 +70,9 @@ def setup(mocker, installer, installed, config, env):
     p.return_value = installed
 
     # Patch git module to not actually clone projects
-    mocker.patch("poetry.core.vcs.git.Git.clone", new=mock_clone)
-    mocker.patch("poetry.core.vcs.git.Git.checkout", new=lambda *_: None)
-    p = mocker.patch("poetry.core.vcs.git.Git.rev_parse")
-    p.return_value = "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
+    mocker.patch("poetry.vcs.git.Git.clone", new=mock_clone)
+    p = mocker.patch("poetry.vcs.git.Git.get_revision")
+    p.return_value = MOCK_DEFAULT_GIT_REVISION
 
     # Patch the virtual environment creation do actually do nothing
     mocker.patch("poetry.utils.env.EnvManager.create_venv", return_value=env)
@@ -66,50 +91,90 @@ def setup(mocker, installer, installed, config, env):
 
 
 @pytest.fixture
-def project_directory():
+def project_directory() -> str:
     return "simple_project"
 
 
 @pytest.fixture
-def poetry(repo, project_directory, config):
-    p = Factory().create_poetry(
-        Path(__file__).parent.parent / "fixtures" / project_directory
+def poetry(project_directory: str, project_factory: ProjectFactory) -> Poetry:
+    return project_factory(
+        name="simple",
+        source=Path(__file__).parent.parent / "fixtures" / project_directory,
     )
-    p.set_locker(TestLocker(p.locker.lock.path, p.locker._local_config))
-
-    with p.file.path.open(encoding="utf-8") as f:
-        content = f.read()
-
-    p.set_config(config)
-
-    pool = Pool()
-    pool.add_repository(repo)
-    p.set_pool(pool)
-
-    yield p
-
-    with p.file.path.open("w", encoding="utf-8") as f:
-        f.write(content)
 
 
 @pytest.fixture
-def app(poetry):
-    app_ = TestApplication(poetry)
-    app_.config.set_terminate_after_run(False)
-
+def app(poetry: Poetry) -> PoetryTestApplication:
+    app_ = PoetryTestApplication(poetry)
+    app_._load_plugins()
     return app_
 
 
 @pytest.fixture
-def app_tester(app):
+def app_tester(app: PoetryTestApplication) -> ApplicationTester:
     return ApplicationTester(app)
 
 
 @pytest.fixture
-def new_installer_disabled(config):
+def new_installer_disabled(config: Config) -> None:
     config.merge({"experimental": {"new-installer": False}})
 
 
 @pytest.fixture()
-def executor(poetry, config, env):
+def executor(poetry: Poetry, config: Config, env: MockEnv) -> TestExecutor:
     return TestExecutor(env, poetry.pool, config, NullIO())
+
+
+@pytest.fixture
+def command_tester_factory(
+    app: PoetryTestApplication, env: MockEnv
+) -> CommandTesterFactory:
+    def _tester(
+        command: str,
+        poetry: Poetry | None = None,
+        installer: Installer | None = None,
+        executor: Executor | None = None,
+        environment: Env | None = None,
+    ) -> CommandTester:
+        command = app.find(command)
+        tester = CommandTester(command)
+
+        # Setting the formatter from the application
+        # TODO: Find a better way to do this in Cleo
+        app_io = app.create_io()
+        formatter = app_io.output.formatter
+        tester.io.output.set_formatter(formatter)
+        tester.io.error_output.set_formatter(formatter)
+
+        if poetry:
+            app._poetry = poetry
+
+        poetry = app.poetry
+        command._pool = poetry.pool
+
+        if hasattr(command, "set_env"):
+            command.set_env(environment or env)
+
+        if hasattr(command, "set_installer"):
+            installer = installer or Installer(
+                tester.io,
+                env,
+                poetry.package,
+                poetry.locker,
+                poetry.pool,
+                poetry.config,
+                executor=executor
+                or TestExecutor(env, poetry.pool, poetry.config, tester.io),
+            )
+            installer.use_executor(True)
+            command.set_installer(installer)
+
+        return tester
+
+    return _tester
+
+
+@pytest.fixture
+def do_lock(command_tester_factory: CommandTesterFactory, poetry: Poetry) -> None:
+    command_tester_factory("lock").execute()
+    assert poetry.locker.lock.exists()
diff --git a/vendor/poetry/tests/console/test_application.py b/vendor/poetry/tests/console/test_application.py
new file mode 100644
index 00000000..06a99421
--- /dev/null
+++ b/vendor/poetry/tests/console/test_application.py
@@ -0,0 +1,101 @@
+from __future__ import annotations
+
+import re
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from cleo.testers.application_tester import ApplicationTester
+
+from poetry.console.application import Application
+from poetry.console.commands.command import Command
+from poetry.plugins.application_plugin import ApplicationPlugin
+from tests.helpers import mock_metadata_entry_points
+
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+
+class FooCommand(Command):
+    name = "foo"
+
+    description = "Foo Command"
+
+    def handle(self) -> int:
+        self.line("foo called")
+
+        return 0
+
+
+class AddCommandPlugin(ApplicationPlugin):
+    commands = [FooCommand]
+
+
+@pytest.fixture
+def with_add_command_plugin(mocker: MockerFixture) -> None:
+    mock_metadata_entry_points(mocker, AddCommandPlugin)
+
+
+def test_application_with_plugins(with_add_command_plugin: None):
+    app = Application()
+
+    tester = ApplicationTester(app)
+    tester.execute("")
+
+    assert re.search(r"\s+foo\s+Foo Command", tester.io.fetch_output()) is not None
+    assert tester.status_code == 0
+
+
+def test_application_with_plugins_disabled(with_add_command_plugin: None):
+    app = Application()
+
+    tester = ApplicationTester(app)
+    tester.execute("--no-plugins")
+
+    assert re.search(r"\s+foo\s+Foo Command", tester.io.fetch_output()) is None
+    assert tester.status_code == 0
+
+
+def test_application_execute_plugin_command(with_add_command_plugin: None):
+    app = Application()
+
+    tester = ApplicationTester(app)
+    tester.execute("foo")
+
+    assert tester.io.fetch_output() == "foo called\n"
+    assert tester.status_code == 0
+
+
+def test_application_execute_plugin_command_with_plugins_disabled(
+    with_add_command_plugin: None,
+):
+    app = Application()
+
+    tester = ApplicationTester(app)
+    tester.execute("foo --no-plugins")
+
+    assert tester.io.fetch_output() == ""
+    assert tester.io.fetch_error() == '\nThe command "foo" does not exist.\n'
+    assert tester.status_code == 1
+
+
+@pytest.mark.parametrize("disable_cache", [True, False])
+def test_application_verify_source_cache_flag(disable_cache: bool):
+    app = Application()
+
+    tester = ApplicationTester(app)
+    command = "debug info"
+
+    if disable_cache:
+        command = f"{command} --no-cache"
+
+    assert not app._poetry
+
+    tester.execute(command)
+
+    assert app.poetry.pool.repositories
+
+    for repo in app.poetry.pool.repositories:
+        assert repo._disable_cache == disable_cache
diff --git a/vendor/poetry/tests/fixtures/directory/project_with_transitive_directory_dependencies/setup.py b/vendor/poetry/tests/fixtures/directory/project_with_transitive_directory_dependencies/setup.py
index 24a8f05b..7cf236bd 100644
--- a/vendor/poetry/tests/fixtures/directory/project_with_transitive_directory_dependencies/setup.py
+++ b/vendor/poetry/tests/fixtures/directory/project_with_transitive_directory_dependencies/setup.py
@@ -1,6 +1,8 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
 from distutils.core import setup
 
+
 packages = ["project_with_extras"]
 
 package_data = {"": ["*"]}
diff --git a/vendor/poetry/tests/fixtures/excluded_subpackage/example/__init__.py b/vendor/poetry/tests/fixtures/excluded_subpackage/example/__init__.py
index 3dc1f76b..4e562f46 100644
--- a/vendor/poetry/tests/fixtures/excluded_subpackage/example/__init__.py
+++ b/vendor/poetry/tests/fixtures/excluded_subpackage/example/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 __version__ = "0.1.0"
diff --git a/vendor/poetry/tests/fixtures/excluded_subpackage/example/test/excluded.py b/vendor/poetry/tests/fixtures/excluded_subpackage/example/test/excluded.py
index bf6e1f89..de77b168 100644
--- a/vendor/poetry/tests/fixtures/excluded_subpackage/example/test/excluded.py
+++ b/vendor/poetry/tests/fixtures/excluded_subpackage/example/test/excluded.py
@@ -1,4 +1,6 @@
-from .. import __version__
+from __future__ import annotations
+
+from tests.fixtures.excluded_subpackage.example import __version__
 
 
 def test_version():
diff --git a/vendor/poetry/tests/fixtures/extended_project/build.py b/vendor/poetry/tests/fixtures/extended_project/build.py
index e69de29b..7a12c342 100644
--- a/vendor/poetry/tests/fixtures/extended_project/build.py
+++ b/vendor/poetry/tests/fixtures/extended_project/build.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import Any
+
+
+def build(setup_kwargs: dict[str, Any]):
+    assert setup_kwargs["name"] == "extended-project"
+    assert setup_kwargs["version"] == "1.2.3"
+
+    dynamic_module = Path(__file__).parent / "extended_project" / "built.py"
+    dynamic_module.write_text("# Generated by build.py")
diff --git a/vendor/poetry/tests/fixtures/extended_project/pyproject.toml b/vendor/poetry/tests/fixtures/extended_project/pyproject.toml
index 954b12b3..15b72917 100644
--- a/vendor/poetry/tests/fixtures/extended_project/pyproject.toml
+++ b/vendor/poetry/tests/fixtures/extended_project/pyproject.toml
@@ -20,11 +20,10 @@ classifiers = [
     "Topic :: Software Development :: Libraries :: Python Modules"
 ]
 
-build = "build.py"
+[tool.poetry.build]
+script = "build.py"
+generate-setup-file = true
 
 # Requirements
 [tool.poetry.dependencies]
-python = "~2.7 || ^3.4"
-
-[tool.poetry.scripts]
-foo = "foo:bar"
+python = "^3.7"
diff --git a/vendor/poetry/tests/fixtures/extended_project_without_setup/pyproject.toml b/vendor/poetry/tests/fixtures/extended_project_without_setup/pyproject.toml
index 42375c03..b3f4818f 100644
--- a/vendor/poetry/tests/fixtures/extended_project_without_setup/pyproject.toml
+++ b/vendor/poetry/tests/fixtures/extended_project_without_setup/pyproject.toml
@@ -28,5 +28,6 @@ generate-setup-file = false
 [tool.poetry.dependencies]
 python = "~2.7 || ^3.4"
 
-[tool.poetry.scripts]
-foo = "foo:bar"
+[build-system]
+requires = ["poetry-core", "cython"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/PKG-INFO b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/PKG-INFO
old mode 100755
new mode 100644
index 7f543c49..c6d97810
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/PKG-INFO
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/PKG-INFO
@@ -6,7 +6,5 @@ Home-page: https://github.com/demo/demo
 Author: Sébastien Eustace
 Author-email: sebastien@eustace.io
 License: MIT
-Description: UNKNOWN
-Platform: UNKNOWN
-Provides-Extra: bar
 Provides-Extra: foo
+Provides-Extra: bar
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/SOURCES.txt b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/SOURCES.txt
old mode 100755
new mode 100644
index 6eb8a3ef..3c0a858a
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/SOURCES.txt
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/SOURCES.txt
@@ -4,4 +4,4 @@ demo.egg-info/PKG-INFO
 demo.egg-info/SOURCES.txt
 demo.egg-info/dependency_links.txt
 demo.egg-info/requires.txt
-demo.egg-info/top_level.txt
+demo.egg-info/top_level.txt
\ No newline at end of file
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/dependency_links.txt b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/dependency_links.txt
old mode 100755
new mode 100644
index e69de29b..8b137891
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/dependency_links.txt
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/requires.txt b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/requires.txt
old mode 100755
new mode 100644
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/top_level.txt b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo.egg-info/top_level.txt
old mode 100755
new mode 100644
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo/__init__.py
index 10aa336c..54887185 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo/__init__.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/demo/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 __version__ = "1.2.3"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/setup.py b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/setup.py
index faebbc83..7a4d0142 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/demo/setup.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/demo/setup.py
@@ -1,4 +1,5 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
 from setuptools import setup
 
 
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/namespace_package/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/namespace_package/__init__.py
index 5284146e..77e038d7 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/namespace_package/__init__.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/namespace_package/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 __import__("pkg_resources").declare_namespace(__name__)
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/namespace_package/one/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/namespace_package/one/__init__.py
index 1ac9243c..29fc8168 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/namespace_package/one/__init__.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/namespace_package/one/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 name = "one"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/setup.py b/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/setup.py
index 871fae6c..b869dbc8 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/setup.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/namespace-package-one/setup.py
@@ -1,4 +1,7 @@
-from setuptools import setup, find_packages
+from __future__ import annotations
+
+from setuptools import find_packages
+from setuptools import setup
 
 
 setup(
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/PKG-INFO b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/PKG-INFO
index 7ba97897..f25539da 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/PKG-INFO
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/PKG-INFO
@@ -6,5 +6,7 @@ Home-page: https://github.com/demo/demo
 Author: Sébastien Eustace
 Author-email: sebastien@eustace.io
 License: MIT
-Description: UNKNOWN
 Platform: UNKNOWN
+
+UNKNOWN
+
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/SOURCES.txt b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/SOURCES.txt
index e19f4a4e..82ad0111 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/SOURCES.txt
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/SOURCES.txt
@@ -3,4 +3,4 @@ demo/__init__.py
 demo.egg-info/PKG-INFO
 demo.egg-info/SOURCES.txt
 demo.egg-info/dependency_links.txt
-demo.egg-info/top_level.txt
+demo.egg-info/top_level.txt
\ No newline at end of file
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/dependency_links.txt b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/dependency_links.txt
index e69de29b..8b137891 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/dependency_links.txt
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo/__init__.py
index 10aa336c..54887185 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo/__init__.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/demo/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 __version__ = "1.2.3"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/setup.py b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/setup.py
index da86b53b..60da31e8 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/setup.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/no-dependencies/setup.py
@@ -1,4 +1,5 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
 from setuptools import setup
 
 
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/no-version/demo/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/no-version/demo/__init__.py
index 10aa336c..54887185 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/no-version/demo/__init__.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/no-version/demo/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 __version__ = "1.2.3"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/no-version/setup.py b/vendor/poetry/tests/fixtures/git/github.com/demo/no-version/setup.py
index 4e1aea30..e85c711c 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/no-version/setup.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/no-version/setup.py
@@ -1,4 +1,5 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
 import ast
 import os
 
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/PKG-INFO b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/PKG-INFO
index 996589ec..662e9141 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/PKG-INFO
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/PKG-INFO
@@ -6,7 +6,9 @@ Home-page: https://github.com/demo/demo
 Author: Sébastien Eustace
 Author-email: sebastien@eustace.io
 License: MIT
-Description: UNKNOWN
 Platform: UNKNOWN
 Provides-Extra: foo
 Provides-Extra: bar
+
+UNKNOWN
+
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/SOURCES.txt b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/SOURCES.txt
index 8134b008..a435a717 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/SOURCES.txt
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/SOURCES.txt
@@ -4,4 +4,4 @@ Demo.egg-info/SOURCES.txt
 Demo.egg-info/dependency_links.txt
 Demo.egg-info/requires.txt
 Demo.egg-info/top_level.txt
-demo/__init__.py
+demo/__init__.py
\ No newline at end of file
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/dependency_links.txt b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/dependency_links.txt
index e69de29b..8b137891 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/dependency_links.txt
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/Demo.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/demo/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/demo/__init__.py
index 10aa336c..54887185 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/demo/__init__.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/demo/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 __version__ = "1.2.3"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/setup.py b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/setup.py
index 3e6da62e..f2688f6e 100644
--- a/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/setup.py
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/non-canonical-name/setup.py
@@ -1,4 +1,5 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
 from setuptools import setup
 
 
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin/poetry_plugin/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin/poetry_plugin/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin/pyproject.toml b/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin/pyproject.toml
new file mode 100644
index 00000000..b45d9d97
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin/pyproject.toml
@@ -0,0 +1,18 @@
+[tool.poetry]
+name = "poetry-plugin"
+version = "0.1.2"
+description = "Some description."
+authors = [
+    "Sébastien Eustace "
+]
+license = "MIT"
+
+[tool.poetry.dependencies]
+python = "^3.6"
+pendulum = "^2.0"
+tomlkit = {version = "^0.7.0", optional = true}
+
+[tool.poetry.extras]
+foo = ["tomlkit"]
+
+[tool.poetry.dev-dependencies]
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin2/subdir/poetry_plugin/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin2/subdir/poetry_plugin/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin2/subdir/pyproject.toml b/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin2/subdir/pyproject.toml
new file mode 100644
index 00000000..b45d9d97
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/poetry-plugin2/subdir/pyproject.toml
@@ -0,0 +1,18 @@
+[tool.poetry]
+name = "poetry-plugin"
+version = "0.1.2"
+description = "Some description."
+authors = [
+    "Sébastien Eustace "
+]
+license = "MIT"
+
+[tool.poetry.dependencies]
+python = "^3.6"
+pendulum = "^2.0"
+tomlkit = {version = "^0.7.0", optional = true}
+
+[tool.poetry.extras]
+foo = ["tomlkit"]
+
+[tool.poetry.dev-dependencies]
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one-copy/one/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one-copy/one/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one-copy/pyproject.toml b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one-copy/pyproject.toml
new file mode 100644
index 00000000..1548c3a3
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one-copy/pyproject.toml
@@ -0,0 +1,13 @@
+[tool.poetry]
+name = "one"
+version = "1.0.0"
+description = "Some description."
+authors = []
+license = "MIT"
+
+[tool.poetry.dependencies]
+python = "^3.7"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one/one/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one/one/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one/pyproject.toml b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one/pyproject.toml
new file mode 100644
index 00000000..1548c3a3
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/one/pyproject.toml
@@ -0,0 +1,13 @@
+[tool.poetry]
+name = "one"
+version = "1.0.0"
+description = "Some description."
+authors = []
+license = "MIT"
+
+[tool.poetry.dependencies]
+python = "^3.7"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/two/pyproject.toml b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/two/pyproject.toml
new file mode 100644
index 00000000..6a54d893
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/two/pyproject.toml
@@ -0,0 +1,13 @@
+[tool.poetry]
+name = "two"
+version = "2.0.0"
+description = "Some description."
+authors = []
+license = "MIT"
+
+[tool.poetry.dependencies]
+python = "~2.7 || ^3.4"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/two/two/__init__.py b/vendor/poetry/tests/fixtures/git/github.com/demo/subdirectories/two/two/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/fixtures/no_name_project/README.rst b/vendor/poetry/tests/fixtures/no_name_project/README.rst
new file mode 100644
index 00000000..8b194670
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/no_name_project/README.rst
@@ -0,0 +1,2 @@
+No name project
+===============
diff --git a/vendor/poetry/tests/fixtures/no_name_project/pyproject.toml b/vendor/poetry/tests/fixtures/no_name_project/pyproject.toml
new file mode 100644
index 00000000..f18fa403
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/no_name_project/pyproject.toml
@@ -0,0 +1,18 @@
+[tool.poetry]
+name = ""
+version = "1.2.3"
+description = "This project has no name"
+authors = [
+    "Sébastien Eustace "
+]
+license = "MIT"
+
+readme = "README.rst"
+
+
+# Requirements
+[tool.poetry.dependencies]
+python = "~2.7 || ^3.6"
+
+[tool.poetry.group.dev.dependencies]
+pytest = "~3.4"
diff --git a/vendor/poetry/tests/fixtures/old_lock/poetry.lock b/vendor/poetry/tests/fixtures/old_lock/poetry.lock
index 57d58570..498df2ed 100644
--- a/vendor/poetry/tests/fixtures/old_lock/poetry.lock
+++ b/vendor/poetry/tests/fixtures/old_lock/poetry.lock
@@ -1,153 +1,19 @@
 [[package]]
 category = "main"
-description = "Python package for providing Mozilla's CA Bundle."
-name = "certifi"
-optional = false
-python-versions = "*"
-version = "2020.6.20"
-
-[[package]]
-category = "main"
-description = "Universal encoding detector for Python 2 and 3"
-name = "chardet"
-optional = false
-python-versions = "*"
-version = "3.0.4"
-
-[[package]]
-category = "main"
-description = "A Python library for the Docker Engine API."
-name = "docker"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-version = "4.3.1"
-
-[package.dependencies]
-pywin32 = "227"
-requests = ">=2.14.2,<2.18.0 || >2.18.0"
-six = ">=1.4.0"
-websocket-client = ">=0.32.0"
-
-[package.extras]
-ssh = ["paramiko (>=2.4.2)"]
-tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=1.3.4)", "idna (>=2.0.0)"]
-
-[[package]]
-category = "main"
-description = "Internationalized Domain Names in Applications (IDNA)"
-name = "idna"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "2.10"
-
-[[package]]
-category = "main"
-description = "Python for Window Extensions"
-marker = "sys_platform == \"win32\""
-name = "pywin32"
-optional = false
-python-versions = "*"
-version = "227"
-
-[[package]]
-category = "main"
-description = "Python HTTP for Humans."
-name = "requests"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-version = "2.24.0"
-
-[package.dependencies]
-certifi = ">=2017.4.17"
-chardet = ">=3.0.2,<4"
-idna = ">=2.5,<3"
-urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26"
-
-[package.extras]
-security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
-socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"]
-
-[[package]]
-category = "main"
-description = "Python 2 and 3 compatibility utilities"
-name = "six"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
-version = "1.15.0"
-
-[[package]]
-category = "main"
-description = "HTTP library with thread-safe connection pooling, file post, and more."
-name = "urllib3"
+description = "A sample Python project"
+name = "sampleproject"
 optional = false
 python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
-version = "1.25.10"
-
-[package.extras]
-brotli = ["brotlipy (>=0.6.0)"]
-secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"]
-socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
-
-[[package]]
-category = "main"
-description = "WebSocket client for Python. hybi13 is supported."
-name = "websocket-client"
-optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "0.57.0"
-
-[package.dependencies]
-six = "*"
+version = "1.3.1"
 
 [metadata]
-content-hash = "bb4c2f3c089b802c1930b6acbeed04711d93e9cdfd9a003eb17518a6d9f350c6"
+content-hash = "c8c2c9d899e47bac3972e029ef0e71b75d5df98a28eebef25a75640a19aac177"
 lock-version = "1.0"
 python-versions = "^3.8"
 
 [metadata.files]
-certifi = [
-    {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"},
-    {file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"},
-]
-chardet = [
-    {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"},
-    {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"},
-]
-docker = [
-    {file = "docker-4.3.1-py2.py3-none-any.whl", hash = "sha256:13966471e8bc23b36bfb3a6fb4ab75043a5ef1dac86516274777576bed3b9828"},
-    {file = "docker-4.3.1.tar.gz", hash = "sha256:bad94b8dd001a8a4af19ce4becc17f41b09f228173ffe6a4e0355389eef142f2"},
-]
-idna = [
-    {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
-    {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
-]
-pywin32 = [
-    {file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"},
-    {file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"},
-    {file = "pywin32-227-cp35-cp35m-win32.whl", hash = "sha256:f4c5be1a293bae0076d93c88f37ee8da68136744588bc5e2be2f299a34ceb7aa"},
-    {file = "pywin32-227-cp35-cp35m-win_amd64.whl", hash = "sha256:a929a4af626e530383a579431b70e512e736e9588106715215bf685a3ea508d4"},
-    {file = "pywin32-227-cp36-cp36m-win32.whl", hash = "sha256:300a2db938e98c3e7e2093e4491439e62287d0d493fe07cce110db070b54c0be"},
-    {file = "pywin32-227-cp36-cp36m-win_amd64.whl", hash = "sha256:9b31e009564fb95db160f154e2aa195ed66bcc4c058ed72850d047141b36f3a2"},
-    {file = "pywin32-227-cp37-cp37m-win32.whl", hash = "sha256:47a3c7551376a865dd8d095a98deba954a98f326c6fe3c72d8726ca6e6b15507"},
-    {file = "pywin32-227-cp37-cp37m-win_amd64.whl", hash = "sha256:31f88a89139cb2adc40f8f0e65ee56a8c585f629974f9e07622ba80199057511"},
-    {file = "pywin32-227-cp38-cp38-win32.whl", hash = "sha256:7f18199fbf29ca99dff10e1f09451582ae9e372a892ff03a28528a24d55875bc"},
-    {file = "pywin32-227-cp38-cp38-win_amd64.whl", hash = "sha256:7c1ae32c489dc012930787f06244426f8356e129184a02c25aef163917ce158e"},
-    {file = "pywin32-227-cp39-cp39-win32.whl", hash = "sha256:c054c52ba46e7eb6b7d7dfae4dbd987a1bb48ee86debe3f245a2884ece46e295"},
-    {file = "pywin32-227-cp39-cp39-win_amd64.whl", hash = "sha256:f27cec5e7f588c3d1051651830ecc00294f90728d19c3bf6916e6dba93ea357c"},
-]
-requests = [
-    {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"},
-    {file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"},
-]
-six = [
-    {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
-    {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
-]
-urllib3 = [
-    {file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"},
-    {file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"},
-]
-websocket-client = [
-    {file = "websocket_client-0.57.0-py2.py3-none-any.whl", hash = "sha256:0fc45c961324d79c781bab301359d5a1b00b13ad1b10415a4780229ef71a5549"},
-    {file = "websocket_client-0.57.0.tar.gz", hash = "sha256:d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010"},
+sampleproject = [
+    {file = "sampleproject-1.3.1-py2.py3-none-any.whl", hash = "sha256:26c9172e08244873b0e09c574a229bf2c251c67723a05e08fd3ec0c5ee423796"},
+    {file = "sampleproject-1.3.1-py3-none-any.whl", hash = "sha256:75bb5bb4e74a1b77dc0cff25ebbacb54fe1318aaf99a86a036cefc86ed885ced"},
+    {file = "sampleproject-1.3.1.tar.gz", hash = "sha256:3593ca2f1e057279d70d6144b14472fb28035b1da213dde60906b703d6f82c55"},
 ]
diff --git a/vendor/poetry/tests/fixtures/old_lock/pyproject.toml b/vendor/poetry/tests/fixtures/old_lock/pyproject.toml
index 56ea6350..377aa676 100644
--- a/vendor/poetry/tests/fixtures/old_lock/pyproject.toml
+++ b/vendor/poetry/tests/fixtures/old_lock/pyproject.toml
@@ -6,7 +6,7 @@ authors = ["Poetry Developer "]
 
 [tool.poetry.dependencies]
 python = "^3.8"
-docker = "^4.3.1"
+sampleproject = ">=1.3.1"
 
 [tool.poetry.dev-dependencies]
 
diff --git a/vendor/poetry/tests/fixtures/outdated_lock/poetry.lock b/vendor/poetry/tests/fixtures/outdated_lock/poetry.lock
new file mode 100644
index 00000000..1d950ca7
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/outdated_lock/poetry.lock
@@ -0,0 +1,152 @@
+[[package]]
+name = "certifi"
+version = "2020.6.20"
+description = "Python package for providing Mozilla's CA Bundle."
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "chardet"
+version = "3.0.4"
+description = "Universal encoding detector for Python 2 and 3"
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "docker"
+version = "4.3.0"
+description = "A Python library for the Docker Engine API."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.dependencies]
+pywin32 = {version = "227", markers = "sys_platform == \"win32\""}
+requests = ">=2.14.2,<2.18.0 || >2.18.0"
+six = ">=1.4.0"
+websocket-client = ">=0.32.0"
+
+[package.extras]
+ssh = ["paramiko (>=2.4.2)"]
+tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=1.3.4)", "idna (>=2.0.0)"]
+
+[[package]]
+name = "idna"
+version = "2.10"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "pywin32"
+version = "227"
+description = "Python for Window Extensions"
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "requests"
+version = "2.24.0"
+description = "Python HTTP for Humans."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+chardet = ">=3.0.2,<4"
+idna = ">=2.5,<3"
+urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26"
+
+[package.extras]
+security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
+socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"]
+
+[[package]]
+name = "six"
+version = "1.15.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+
+[[package]]
+name = "urllib3"
+version = "1.25.10"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+
+[package.extras]
+brotli = ["brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"]
+socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
+
+[[package]]
+name = "websocket-client"
+version = "0.57.0"
+description = "WebSocket client for Python. hybi13 is supported."
+category = "main"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.dependencies]
+six = "*"
+
+[metadata]
+lock-version = "1.1"
+python-versions = "^3.8"
+content-hash = "2f47de5e052dabeff3c1362d3a37b5cfcaf9bbe9d9ce1681207e72ca1f4dab55"
+
+[metadata.files]
+certifi = [
+    {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"},
+    {file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"},
+]
+chardet = [
+    {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"},
+    {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"},
+]
+docker = [
+    {file = "docker-4.3.0-py2.py3-none-any.whl", hash = "sha256:ba118607b0ba6bfc1b236ec32019a355c47b5d012d01d976467d4692ef443929"},
+    {file = "docker-4.3.0.tar.gz", hash = "sha256:431a268f2caf85aa30613f9642da274c62f6ee8bae7d70d968e01529f7d6af93"},
+]
+idna = [
+    {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
+    {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
+]
+pywin32 = [
+    {file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"},
+    {file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"},
+    {file = "pywin32-227-cp35-cp35m-win32.whl", hash = "sha256:f4c5be1a293bae0076d93c88f37ee8da68136744588bc5e2be2f299a34ceb7aa"},
+    {file = "pywin32-227-cp35-cp35m-win_amd64.whl", hash = "sha256:a929a4af626e530383a579431b70e512e736e9588106715215bf685a3ea508d4"},
+    {file = "pywin32-227-cp36-cp36m-win32.whl", hash = "sha256:300a2db938e98c3e7e2093e4491439e62287d0d493fe07cce110db070b54c0be"},
+    {file = "pywin32-227-cp36-cp36m-win_amd64.whl", hash = "sha256:9b31e009564fb95db160f154e2aa195ed66bcc4c058ed72850d047141b36f3a2"},
+    {file = "pywin32-227-cp37-cp37m-win32.whl", hash = "sha256:47a3c7551376a865dd8d095a98deba954a98f326c6fe3c72d8726ca6e6b15507"},
+    {file = "pywin32-227-cp37-cp37m-win_amd64.whl", hash = "sha256:31f88a89139cb2adc40f8f0e65ee56a8c585f629974f9e07622ba80199057511"},
+    {file = "pywin32-227-cp38-cp38-win32.whl", hash = "sha256:7f18199fbf29ca99dff10e1f09451582ae9e372a892ff03a28528a24d55875bc"},
+    {file = "pywin32-227-cp38-cp38-win_amd64.whl", hash = "sha256:7c1ae32c489dc012930787f06244426f8356e129184a02c25aef163917ce158e"},
+    {file = "pywin32-227-cp39-cp39-win32.whl", hash = "sha256:c054c52ba46e7eb6b7d7dfae4dbd987a1bb48ee86debe3f245a2884ece46e295"},
+    {file = "pywin32-227-cp39-cp39-win_amd64.whl", hash = "sha256:f27cec5e7f588c3d1051651830ecc00294f90728d19c3bf6916e6dba93ea357c"},
+]
+requests = [
+    {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"},
+    {file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"},
+]
+six = [
+    {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
+    {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
+]
+urllib3 = [
+    {file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"},
+    {file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"},
+]
+websocket-client = [
+    {file = "websocket_client-0.57.0-py2.py3-none-any.whl", hash = "sha256:0fc45c961324d79c781bab301359d5a1b00b13ad1b10415a4780229ef71a5549"},
+    {file = "websocket_client-0.57.0.tar.gz", hash = "sha256:d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010"},
+]
diff --git a/vendor/poetry/tests/fixtures/outdated_lock/pyproject.toml b/vendor/poetry/tests/fixtures/outdated_lock/pyproject.toml
new file mode 100644
index 00000000..55514760
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/outdated_lock/pyproject.toml
@@ -0,0 +1,15 @@
+[tool.poetry]
+name = "foobar"
+version = "0.1.0"
+description = ""
+authors = ["Poetry Developer "]
+
+[tool.poetry.dependencies]
+python = "^3.8"
+docker = "4.3.1"
+
+[tool.poetry.dev-dependencies]
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/fixtures/project_with_setup/setup.py b/vendor/poetry/tests/fixtures/project_with_setup/setup.py
index 0f9e0d09..c4b84259 100644
--- a/vendor/poetry/tests/fixtures/project_with_setup/setup.py
+++ b/vendor/poetry/tests/fixtures/project_with_setup/setup.py
@@ -1,4 +1,4 @@
-# -*- coding: utf-8 -*-
+from __future__ import annotations
 
 from setuptools import setup
 
diff --git a/vendor/poetry/tests/fixtures/sample_project/pyproject.toml b/vendor/poetry/tests/fixtures/sample_project/pyproject.toml
index aff10e12..5a5c1356 100644
--- a/vendor/poetry/tests/fixtures/sample_project/pyproject.toml
+++ b/vendor/poetry/tests/fixtures/sample_project/pyproject.toml
@@ -46,7 +46,7 @@ functools32 = { version = "^3.2.3", markers = "python_version ~= '2.7' and sys_p
 [tool.poetry.extras]
 db = [ "orator" ]
 
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.dev.dependencies]
 pytest = "~3.4"
 
 
diff --git a/vendor/poetry/tests/fixtures/simple_project/pyproject.toml b/vendor/poetry/tests/fixtures/simple_project/pyproject.toml
index 0fd938e4..45a61d43 100644
--- a/vendor/poetry/tests/fixtures/simple_project/pyproject.toml
+++ b/vendor/poetry/tests/fixtures/simple_project/pyproject.toml
@@ -7,7 +7,7 @@ authors = [
 ]
 license = "MIT"
 
-readme = "README.rst"
+readme = ["README.rst"]
 
 homepage = "https://python-poetry.org"
 repository = "https://github.com/python-poetry/poetry"
@@ -28,3 +28,8 @@ python = "~2.7 || ^3.4"
 foo = "foo:bar"
 baz = "bar:baz.boom.bim"
 fox = "fuz.foo:bar.baz"
+
+
+[build-system]
+requires = ["poetry-core>=1.1.0a7"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/fixtures/up_to_date_lock/poetry.lock b/vendor/poetry/tests/fixtures/up_to_date_lock/poetry.lock
new file mode 100644
index 00000000..a896b5d0
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/up_to_date_lock/poetry.lock
@@ -0,0 +1,152 @@
+[[package]]
+name = "certifi"
+version = "2020.12.5"
+description = "Python package for providing Mozilla's CA Bundle."
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "chardet"
+version = "4.0.0"
+description = "Universal encoding detector for Python 2 and 3"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[[package]]
+name = "docker"
+version = "4.3.1"
+description = "A Python library for the Docker Engine API."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.dependencies]
+pywin32 = {version = "227", markers = "sys_platform == \"win32\""}
+requests = ">=2.14.2,<2.18.0 || >2.18.0"
+six = ">=1.4.0"
+websocket-client = ">=0.32.0"
+
+[package.extras]
+ssh = ["paramiko (>=2.4.2)"]
+tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=1.3.4)", "idna (>=2.0.0)"]
+
+[[package]]
+name = "idna"
+version = "2.10"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "pywin32"
+version = "227"
+description = "Python for Window Extensions"
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "requests"
+version = "2.25.1"
+description = "Python HTTP for Humans."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+chardet = ">=3.0.2,<5"
+idna = ">=2.5,<3"
+urllib3 = ">=1.21.1,<1.27"
+
+[package.extras]
+security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
+socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
+
+[[package]]
+name = "six"
+version = "1.15.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+
+[[package]]
+name = "urllib3"
+version = "1.26.3"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+
+[package.extras]
+brotli = ["brotlipy (>=0.6.0)"]
+secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+
+[[package]]
+name = "websocket-client"
+version = "0.58.0"
+description = "WebSocket client for Python with low level API options"
+category = "main"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.dependencies]
+six = "*"
+
+[metadata]
+lock-version = "1.1"
+python-versions = "^3.8"
+content-hash = "0cd068218f235c162f7b74bc8faf4ce3387b82daee1c1bb7a97af034f27ee116"
+
+[metadata.files]
+certifi = [
+    {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"},
+    {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"},
+]
+chardet = [
+    {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"},
+    {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"},
+]
+docker = [
+    {file = "docker-4.3.1-py2.py3-none-any.whl", hash = "sha256:13966471e8bc23b36bfb3a6fb4ab75043a5ef1dac86516274777576bed3b9828"},
+    {file = "docker-4.3.1.tar.gz", hash = "sha256:bad94b8dd001a8a4af19ce4becc17f41b09f228173ffe6a4e0355389eef142f2"},
+]
+idna = [
+    {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
+    {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
+]
+pywin32 = [
+    {file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"},
+    {file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"},
+    {file = "pywin32-227-cp35-cp35m-win32.whl", hash = "sha256:f4c5be1a293bae0076d93c88f37ee8da68136744588bc5e2be2f299a34ceb7aa"},
+    {file = "pywin32-227-cp35-cp35m-win_amd64.whl", hash = "sha256:a929a4af626e530383a579431b70e512e736e9588106715215bf685a3ea508d4"},
+    {file = "pywin32-227-cp36-cp36m-win32.whl", hash = "sha256:300a2db938e98c3e7e2093e4491439e62287d0d493fe07cce110db070b54c0be"},
+    {file = "pywin32-227-cp36-cp36m-win_amd64.whl", hash = "sha256:9b31e009564fb95db160f154e2aa195ed66bcc4c058ed72850d047141b36f3a2"},
+    {file = "pywin32-227-cp37-cp37m-win32.whl", hash = "sha256:47a3c7551376a865dd8d095a98deba954a98f326c6fe3c72d8726ca6e6b15507"},
+    {file = "pywin32-227-cp37-cp37m-win_amd64.whl", hash = "sha256:31f88a89139cb2adc40f8f0e65ee56a8c585f629974f9e07622ba80199057511"},
+    {file = "pywin32-227-cp38-cp38-win32.whl", hash = "sha256:7f18199fbf29ca99dff10e1f09451582ae9e372a892ff03a28528a24d55875bc"},
+    {file = "pywin32-227-cp38-cp38-win_amd64.whl", hash = "sha256:7c1ae32c489dc012930787f06244426f8356e129184a02c25aef163917ce158e"},
+    {file = "pywin32-227-cp39-cp39-win32.whl", hash = "sha256:c054c52ba46e7eb6b7d7dfae4dbd987a1bb48ee86debe3f245a2884ece46e295"},
+    {file = "pywin32-227-cp39-cp39-win_amd64.whl", hash = "sha256:f27cec5e7f588c3d1051651830ecc00294f90728d19c3bf6916e6dba93ea357c"},
+]
+requests = [
+    {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"},
+    {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"},
+]
+six = [
+    {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
+    {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
+]
+urllib3 = [
+    {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"},
+    {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"},
+]
+websocket-client = [
+    {file = "websocket_client-0.58.0-py2.py3-none-any.whl", hash = "sha256:44b5df8f08c74c3d82d28100fdc81f4536809ce98a17f0757557813275fbb663"},
+    {file = "websocket_client-0.58.0.tar.gz", hash = "sha256:63509b41d158ae5b7f67eb4ad20fecbb4eee99434e73e140354dc3ff8e09716f"},
+]
diff --git a/vendor/poetry/tests/fixtures/up_to_date_lock/pyproject.toml b/vendor/poetry/tests/fixtures/up_to_date_lock/pyproject.toml
new file mode 100644
index 00000000..55514760
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/up_to_date_lock/pyproject.toml
@@ -0,0 +1,15 @@
+[tool.poetry]
+name = "foobar"
+version = "0.1.0"
+description = ""
+authors = ["Poetry Developer "]
+
+[tool.poetry.dependencies]
+python = "^3.8"
+docker = "4.3.1"
+
+[tool.poetry.dev-dependencies]
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/fixtures/with-include/package_with_include/__init__.py b/vendor/poetry/tests/fixtures/with-include/package_with_include/__init__.py
index 10aa336c..54887185 100644
--- a/vendor/poetry/tests/fixtures/with-include/package_with_include/__init__.py
+++ b/vendor/poetry/tests/fixtures/with-include/package_with_include/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 __version__ = "1.2.3"
diff --git a/vendor/poetry/tests/fixtures/with_conditional_path_deps/demo_one/pyproject.toml b/vendor/poetry/tests/fixtures/with_conditional_path_deps/demo_one/pyproject.toml
new file mode 100644
index 00000000..85dab468
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/with_conditional_path_deps/demo_one/pyproject.toml
@@ -0,0 +1,9 @@
+[tool.poetry]
+name = "demo"
+version = "1.2.3"
+description = "Some description."
+authors = []
+license = "MIT"
+
+[tool.poetry.dependencies]
+python = "^3.7"
diff --git a/vendor/poetry/tests/fixtures/with_conditional_path_deps/demo_two/pyproject.toml b/vendor/poetry/tests/fixtures/with_conditional_path_deps/demo_two/pyproject.toml
new file mode 100644
index 00000000..85dab468
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/with_conditional_path_deps/demo_two/pyproject.toml
@@ -0,0 +1,9 @@
+[tool.poetry]
+name = "demo"
+version = "1.2.3"
+description = "Some description."
+authors = []
+license = "MIT"
+
+[tool.poetry.dependencies]
+python = "^3.7"
diff --git a/vendor/poetry/tests/fixtures/with_conditional_path_deps/pyproject.toml b/vendor/poetry/tests/fixtures/with_conditional_path_deps/pyproject.toml
new file mode 100644
index 00000000..1e2ea754
--- /dev/null
+++ b/vendor/poetry/tests/fixtures/with_conditional_path_deps/pyproject.toml
@@ -0,0 +1,13 @@
+[tool.poetry]
+name = "sample"
+version = "1.0.0"
+description = "Sample Project"
+authors = []
+license = "MIT"
+
+[tool.poetry.dependencies]
+python = "^3.7"
+demo = [
+    { path = "demo_one", platform = "linux" },
+    { path = "demo_two", platform = "win32" },
+]
diff --git a/vendor/poetry/tests/helpers.py b/vendor/poetry/tests/helpers.py
index 1de03111..05c0ea7b 100644
--- a/vendor/poetry/tests/helpers.py
+++ b/vendor/poetry/tests/helpers.py
@@ -1,147 +1,183 @@
+from __future__ import annotations
+
+import contextlib
 import os
+import re
 import shutil
+import sys
+import urllib.parse
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
 
-from poetry.console.application import Application
 from poetry.core.masonry.utils.helpers import escape_name
 from poetry.core.masonry.utils.helpers import escape_version
-from poetry.core.packages import Dependency
-from poetry.core.packages import Link
-from poetry.core.packages import Package
+from poetry.core.packages.package import Package
+from poetry.core.packages.utils.link import Link
 from poetry.core.toml.file import TOMLFile
 from poetry.core.vcs.git import ParsedUrl
+
+from poetry.config.config import Config
+from poetry.console.application import Application
 from poetry.factory import Factory
 from poetry.installation.executor import Executor
 from poetry.packages import Locker
 from poetry.repositories import Repository
 from poetry.repositories.exceptions import PackageNotFound
-from poetry.utils._compat import PY2
-from poetry.utils._compat import WINDOWS
-from poetry.utils._compat import Path
-from poetry.utils._compat import urlparse
+from poetry.utils._compat import metadata
 
 
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from poetry.core.packages.dependency import Dependency
+    from poetry.core.semver.version import Version
+    from pytest_mock import MockerFixture
+    from tomlkit.toml_document import TOMLDocument
+
+    from poetry.installation.operations.operation import Operation
+    from poetry.poetry import Poetry
+
 FIXTURE_PATH = Path(__file__).parent / "fixtures"
 
+# Used as a mock for latest git revision.
+MOCK_DEFAULT_GIT_REVISION = "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
+
 
-def get_package(name, version):
-    return Package(name, version)
+def get_package(
+    name: str, version: str | Version, yanked: str | bool = False
+) -> Package:
+    return Package(name, version, yanked=yanked)
 
 
 def get_dependency(
-    name, constraint=None, category="main", optional=False, allows_prereleases=False
-):
-    return Dependency(
-        name,
-        constraint or "*",
-        category=category,
-        optional=optional,
-        allows_prereleases=allows_prereleases,
-    )
+    name: str,
+    constraint: str | dict[str, Any] | None = None,
+    groups: list[str] | None = None,
+    optional: bool = False,
+    allows_prereleases: bool = False,
+) -> Dependency:
+    if constraint is None:
+        constraint = "*"
+
+    if isinstance(constraint, str):
+        constraint = {"version": constraint}
+
+    constraint["optional"] = optional
+    constraint["allow-prereleases"] = allows_prereleases
+
+    return Factory.create_dependency(name, constraint or "*", groups=groups)
 
 
-def fixture(path=None):
+def fixture(path: str | None = None) -> Path:
     if path:
         return FIXTURE_PATH / path
     else:
         return FIXTURE_PATH
 
 
-def copy_or_symlink(source, dest):
-    if dest.exists():
-        if dest.is_symlink():
-            os.unlink(str(dest))
-        elif dest.is_dir():
-            shutil.rmtree(str(dest))
-        else:
-            os.unlink(str(dest))
+def copy_or_symlink(source: Path, dest: Path) -> None:
+    if dest.is_symlink() or dest.is_file():
+        dest.unlink()  # missing_ok is only available in Python >= 3.8
+    elif dest.is_dir():
+        shutil.rmtree(dest)
 
-    # Python2 does not support os.symlink on Windows whereas Python3 does.
     # os.symlink requires either administrative privileges or developer mode on Win10,
     # throwing an OSError if neither is active.
-    if WINDOWS:
-        if PY2:
+    if sys.platform == "win32":
+        try:
+            os.symlink(str(source), str(dest), target_is_directory=source.is_dir())
+        except OSError:
             if source.is_dir():
                 shutil.copytree(str(source), str(dest))
             else:
                 shutil.copyfile(str(source), str(dest))
-        else:
-            try:
-                os.symlink(str(source), str(dest), target_is_directory=source.is_dir())
-            except OSError:
-                if source.is_dir():
-                    shutil.copytree(str(source), str(dest))
-                else:
-                    shutil.copyfile(str(source), str(dest))
     else:
         os.symlink(str(source), str(dest))
 
 
-def mock_clone(_, source, dest):
+class MockDulwichRepo:
+    def __init__(self, root: Path | str, **__: Any) -> None:
+        self.path = str(root)
+
+    def head(self) -> bytes:
+        return MOCK_DEFAULT_GIT_REVISION.encode()
+
+
+def mock_clone(
+    url: str,
+    *_: Any,
+    source_root: Path | None = None,
+    **__: Any,
+) -> MockDulwichRepo:
     # Checking source to determine which folder we need to copy
-    parsed = ParsedUrl.parse(source)
-
-    folder = (
-        Path(__file__).parent
-        / "fixtures"
-        / "git"
-        / parsed.resource
-        / parsed.pathname.lstrip("/").rstrip(".git")
-    )
+    parsed = ParsedUrl.parse(url)
+    path = re.sub(r"(.git)?$", "", parsed.pathname.lstrip("/"))
+
+    folder = Path(__file__).parent / "fixtures" / "git" / parsed.resource / path
+
+    if not source_root:
+        source_root = Path(Config.create().get("cache-dir")) / "src"
+
+    dest = source_root / path
+    dest.parent.mkdir(parents=True, exist_ok=True)
 
     copy_or_symlink(folder, dest)
+    return MockDulwichRepo(dest)
 
 
-def mock_download(url, dest, **__):
-    parts = urlparse.urlparse(url)
+def mock_download(url: str, dest: Path) -> None:
+    parts = urllib.parse.urlparse(url)
 
     fixtures = Path(__file__).parent / "fixtures"
     fixture = fixtures / parts.path.lstrip("/")
 
-    copy_or_symlink(fixture, Path(dest))
+    copy_or_symlink(fixture, dest)
 
 
 class TestExecutor(Executor):
-    def __init__(self, *args, **kwargs):
-        super(TestExecutor, self).__init__(*args, **kwargs)
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
+        super().__init__(*args, **kwargs)
 
         self._installs = []
         self._updates = []
         self._uninstalls = []
 
     @property
-    def installations(self):
+    def installations(self) -> list[Package]:
         return self._installs
 
     @property
-    def updates(self):
+    def updates(self) -> list[Package]:
         return self._updates
 
     @property
-    def removals(self):
+    def removals(self) -> list[Package]:
         return self._uninstalls
 
-    def _do_execute_operation(self, operation):
-        super(TestExecutor, self)._do_execute_operation(operation)
+    def _do_execute_operation(self, operation: Operation) -> None:
+        super()._do_execute_operation(operation)
 
         if not operation.skipped:
-            getattr(self, "_{}s".format(operation.job_type)).append(operation.package)
+            getattr(self, f"_{operation.job_type}s").append(operation.package)
 
-    def _execute_install(self, operation):
+    def _execute_install(self, operation: Operation) -> int:
         return 0
 
-    def _execute_update(self, operation):
+    def _execute_update(self, operation: Operation) -> int:
         return 0
 
-    def _execute_remove(self, operation):
+    def _execute_remove(self, operation: Operation) -> int:
         return 0
 
 
-class TestApplication(Application):
-    def __init__(self, poetry):
-        super(TestApplication, self).__init__()
+class PoetryTestApplication(Application):
+    def __init__(self, poetry: Poetry) -> None:
+        super().__init__()
         self._poetry = poetry
 
-    def reset_poetry(self):
+    def reset_poetry(self) -> None:
         poetry = self._poetry
         self._poetry = Factory().create_poetry(self._poetry.file.path.parent)
         self._poetry.set_pool(poetry.pool)
@@ -152,7 +188,7 @@ def reset_poetry(self):
 
 
 class TestLocker(Locker):
-    def __init__(self, lock, local_config):  # noqa
+    def __init__(self, lock: str | Path, local_config: dict) -> None:
         self._lock = TOMLFile(lock)
         self._local_config = local_config
         self._lock_data = None
@@ -161,28 +197,28 @@ def __init__(self, lock, local_config):  # noqa
         self._lock_data = None
         self._write = False
 
-    def write(self, write=True):
+    def write(self, write: bool = True) -> None:
         self._write = write
 
-    def is_locked(self):
+    def is_locked(self) -> bool:
         return self._locked
 
-    def locked(self, is_locked=True):
+    def locked(self, is_locked: bool = True) -> TestLocker:
         self._locked = is_locked
 
         return self
 
-    def mock_lock_data(self, data):
+    def mock_lock_data(self, data: dict) -> None:
         self.locked()
 
         self._lock_data = data
 
-    def is_fresh(self):
+    def is_fresh(self) -> bool:
         return True
 
-    def _write_lock_data(self, data):
+    def _write_lock_data(self, data: TOMLDocument) -> None:
         if self._write:
-            super(TestLocker, self)._write_lock_data(data)
+            super()._write_lock_data(data)
             self._locked = True
             return
 
@@ -190,18 +226,63 @@ def _write_lock_data(self, data):
 
 
 class TestRepository(Repository):
-    def find_packages(self, dependency):
-        packages = super(TestRepository, self).find_packages(dependency)
+    def find_packages(self, dependency: Dependency) -> list[Package]:
+        packages = super().find_packages(dependency)
         if len(packages) == 0:
-            raise PackageNotFound("Package [{}] not found.".format(dependency.name))
+            raise PackageNotFound(f"Package [{dependency.name}] not found.")
 
         return packages
 
-    def find_links_for_package(self, package):
+    def find_links_for_package(self, package: Package) -> list[Link]:
         return [
             Link(
-                "https://foo.bar/files/{}-{}-py2.py3-none-any.whl".format(
-                    escape_name(package.name), escape_version(package.version.text)
-                )
+                f"https://foo.bar/files/{escape_name(package.name)}"
+                f"-{escape_version(package.version.text)}-py2.py3-none-any.whl"
             )
         ]
+
+
+@contextlib.contextmanager
+def isolated_environment(
+    environ: dict[str, Any] | None = None, clear: bool = False
+) -> Iterator[None]:
+    original_environ = dict(os.environ)
+
+    if clear:
+        os.environ.clear()
+
+    if environ:
+        os.environ.update(environ)
+
+    yield
+
+    os.environ.clear()
+    os.environ.update(original_environ)
+
+
+def make_entry_point_from_plugin(
+    name: str, cls: type[Any], dist: metadata.Distribution | None = None
+) -> metadata.EntryPoint:
+    ep = metadata.EntryPoint(
+        name=name,
+        group=getattr(cls, "group", None),
+        value=f"{cls.__module__}:{cls.__name__}",
+    )
+
+    if dist:
+        return ep._for(dist)
+
+    return ep
+
+
+def mock_metadata_entry_points(
+    mocker: MockerFixture,
+    cls: type[Any],
+    name: str = "my-plugin",
+    dist: metadata.Distribution | None = None,
+) -> None:
+    mocker.patch.object(
+        metadata,
+        "entry_points",
+        return_value=[make_entry_point_from_plugin(name, cls, dist)],
+    )
diff --git a/vendor/poetry/tests/inspection/__init__.py b/vendor/poetry/tests/inspection/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/inspection/test_info.py b/vendor/poetry/tests/inspection/test_info.py
index 0cdf6e79..4c7b34bd 100644
--- a/vendor/poetry/tests/inspection/test_info.py
+++ b/vendor/poetry/tests/inspection/test_info.py
@@ -1,17 +1,21 @@
-from typing import Set
+from __future__ import annotations
+
+from pathlib import Path
+from subprocess import CalledProcessError
+from typing import TYPE_CHECKING
 
 import pytest
 
 from poetry.inspection.info import PackageInfo
 from poetry.inspection.info import PackageInfoError
-from poetry.utils._compat import PY35
-from poetry.utils._compat import CalledProcessError
-from poetry.utils._compat import Path
 from poetry.utils._compat import decode
 from poetry.utils.env import EnvCommandError
 from poetry.utils.env import VirtualEnv
 
 
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
 FIXTURE_DIR_BASE = Path(__file__).parent.parent / "fixtures"
 FIXTURE_DIR_INSPECTIONS = FIXTURE_DIR_BASE / "inspection"
 
@@ -22,22 +26,22 @@ def pep517_metadata_mock():
 
 
 @pytest.fixture
-def demo_sdist():  # type: () -> Path
+def demo_sdist() -> Path:
     return FIXTURE_DIR_BASE / "distributions" / "demo-0.1.0.tar.gz"
 
 
 @pytest.fixture
-def demo_wheel():  # type: () -> Path
+def demo_wheel() -> Path:
     return FIXTURE_DIR_BASE / "distributions" / "demo-0.1.0-py2.py3-none-any.whl"
 
 
 @pytest.fixture
-def source_dir(tmp_path):  # type: (Path) -> Path
-    yield Path(tmp_path.as_posix())
+def source_dir(tmp_path: Path) -> Path:
+    return Path(tmp_path.as_posix())
 
 
 @pytest.fixture
-def demo_setup(source_dir):  # type: (Path) -> Path
+def demo_setup(source_dir: Path) -> Path:
     setup_py = source_dir / "setup.py"
     setup_py.write_text(
         decode(
@@ -47,11 +51,11 @@ def demo_setup(source_dir):  # type: (Path) -> Path
             'install_requires=["package"])'
         )
     )
-    yield source_dir
+    return source_dir
 
 
 @pytest.fixture
-def demo_setup_cfg(source_dir):  # type: (Path) -> Path
+def demo_setup_cfg(source_dir: Path) -> Path:
     setup_cfg = source_dir / "setup.cfg"
     setup_cfg.write_text(
         decode(
@@ -66,11 +70,11 @@ def demo_setup_cfg(source_dir):  # type: (Path) -> Path
             )
         )
     )
-    yield source_dir
+    return source_dir
 
 
 @pytest.fixture
-def demo_setup_complex(source_dir):  # type: (Path) -> Path
+def demo_setup_complex(source_dir: Path) -> Path:
     setup_py = source_dir / "setup.py"
     setup_py.write_text(
         decode(
@@ -80,19 +84,19 @@ def demo_setup_complex(source_dir):  # type: (Path) -> Path
             'install_requires=[i for i in ["package"]])'
         )
     )
-    yield source_dir
+    return source_dir
 
 
 @pytest.fixture
-def demo_setup_complex_pep517_legacy(demo_setup_complex):  # type: (Path) -> Path
+def demo_setup_complex_pep517_legacy(demo_setup_complex: Path) -> Path:
     pyproject_toml = demo_setup_complex / "pyproject.toml"
     pyproject_toml.write_text(
-        decode("[build-system]\n" 'requires = ["setuptools", "wheel"]')
+        decode('[build-system]\nrequires = ["setuptools", "wheel"]')
     )
-    yield demo_setup_complex
+    return demo_setup_complex
 
 
-def demo_check_info(info, requires_dist=None):  # type: (PackageInfo, Set[str]) -> None
+def demo_check_info(info: PackageInfo, requires_dist: set[str] = None) -> None:
     assert info.name == "demo"
     assert info.version == "0.1.0"
     assert info.requires_dist
@@ -105,17 +109,17 @@ def demo_check_info(info, requires_dist=None):  # type: (PackageInfo, Set[str])
     assert set(info.requires_dist) == requires_dist
 
 
-def test_info_from_sdist(demo_sdist):
+def test_info_from_sdist(demo_sdist: Path):
     info = PackageInfo.from_sdist(demo_sdist)
     demo_check_info(info)
 
 
-def test_info_from_wheel(demo_wheel):
+def test_info_from_wheel(demo_wheel: Path):
     info = PackageInfo.from_wheel(demo_wheel)
     demo_check_info(info)
 
 
-def test_info_from_bdist(demo_wheel):
+def test_info_from_bdist(demo_wheel: Path):
     info = PackageInfo.from_bdist(demo_wheel)
     demo_check_info(info)
 
@@ -127,13 +131,15 @@ def test_info_from_poetry_directory():
     demo_check_info(info)
 
 
-def test_info_from_poetry_directory_fallback_on_poetry_create_error(mocker):
+def test_info_from_poetry_directory_fallback_on_poetry_create_error(
+    mocker: MockerFixture,
+):
     mock_create_poetry = mocker.patch(
         "poetry.inspection.info.Factory.create_poetry", side_effect=RuntimeError
     )
     mock_get_poetry_package = mocker.spy(PackageInfo, "_get_poetry_package")
     mock_get_pep517_metadata = mocker.patch(
-        "poetry.inspection.info.PackageInfo._pep517_metadata"
+        "poetry.inspection.info.get_pep517_metadata"
     )
 
     PackageInfo.from_directory(FIXTURE_DIR_INSPECTIONS / "demo_poetry_package")
@@ -150,60 +156,48 @@ def test_info_from_requires_txt():
     demo_check_info(info)
 
 
-@pytest.mark.skipif(not PY35, reason="Parsing of setup.py is skipped for Python < 3.5")
-def test_info_from_setup_py(demo_setup):
+def test_info_from_setup_py(demo_setup: Path):
     info = PackageInfo.from_setup_files(demo_setup)
     demo_check_info(info, requires_dist={"package"})
 
 
-@pytest.mark.skipif(not PY35, reason="Parsing of setup.cfg is skipped for Python < 3.5")
-def test_info_from_setup_cfg(demo_setup_cfg):
+def test_info_from_setup_cfg(demo_setup_cfg: Path):
     info = PackageInfo.from_setup_files(demo_setup_cfg)
     demo_check_info(info, requires_dist={"package"})
 
 
 def test_info_no_setup_pkg_info_no_deps():
     info = PackageInfo.from_directory(
-        FIXTURE_DIR_INSPECTIONS / "demo_no_setup_pkg_info_no_deps", disable_build=True,
+        FIXTURE_DIR_INSPECTIONS / "demo_no_setup_pkg_info_no_deps",
+        disable_build=True,
     )
     assert info.name == "demo"
     assert info.version == "0.1.0"
     assert info.requires_dist is None
 
 
-@pytest.mark.skipif(not PY35, reason="Parsing of setup.py is skipped for Python < 3.5")
-def test_info_setup_simple(mocker, demo_setup):
+def test_info_setup_simple(mocker: MockerFixture, demo_setup: Path):
     spy = mocker.spy(VirtualEnv, "run")
     info = PackageInfo.from_directory(demo_setup)
     assert spy.call_count == 0
     demo_check_info(info, requires_dist={"package"})
 
 
-@pytest.mark.skipif(
-    PY35,
-    reason="For projects with setup.py using Python < 3.5 fallback to pep517 build",
-)
-def test_info_setup_simple_py2(mocker, demo_setup):
-    spy = mocker.spy(VirtualEnv, "run")
-    info = PackageInfo.from_directory(demo_setup)
-    assert spy.call_count == 2
-    demo_check_info(info, requires_dist={"package"})
-
-
-@pytest.mark.skipif(not PY35, reason="Parsing of setup.cfg is skipped for Python < 3.5")
-def test_info_setup_cfg(mocker, demo_setup_cfg):
+def test_info_setup_cfg(mocker: MockerFixture, demo_setup_cfg: Path):
     spy = mocker.spy(VirtualEnv, "run")
     info = PackageInfo.from_directory(demo_setup_cfg)
     assert spy.call_count == 0
     demo_check_info(info, requires_dist={"package"})
 
 
-def test_info_setup_complex(demo_setup_complex):
+def test_info_setup_complex(demo_setup_complex: Path):
     info = PackageInfo.from_directory(demo_setup_complex)
     demo_check_info(info, requires_dist={"package"})
 
 
-def test_info_setup_complex_pep517_error(mocker, demo_setup_complex):
+def test_info_setup_complex_pep517_error(
+    mocker: MockerFixture, demo_setup_complex: Path
+):
     mocker.patch(
         "poetry.utils.env.VirtualEnv.run",
         autospec=True,
@@ -214,13 +208,14 @@ def test_info_setup_complex_pep517_error(mocker, demo_setup_complex):
         PackageInfo.from_directory(demo_setup_complex)
 
 
-def test_info_setup_complex_pep517_legacy(demo_setup_complex_pep517_legacy):
+def test_info_setup_complex_pep517_legacy(demo_setup_complex_pep517_legacy: Path):
     info = PackageInfo.from_directory(demo_setup_complex_pep517_legacy)
     demo_check_info(info, requires_dist={"package"})
 
 
-@pytest.mark.skipif(not PY35, reason="Parsing of setup.py is skipped for Python < 3.5")
-def test_info_setup_complex_disable_build(mocker, demo_setup_complex):
+def test_info_setup_complex_disable_build(
+    mocker: MockerFixture, demo_setup_complex: Path
+):
     spy = mocker.spy(VirtualEnv, "run")
     info = PackageInfo.from_directory(demo_setup_complex, disable_build=True)
     assert spy.call_count == 0
@@ -229,10 +224,9 @@ def test_info_setup_complex_disable_build(mocker, demo_setup_complex):
     assert info.requires_dist is None
 
 
-@pytest.mark.skipif(not PY35, reason="Parsing of setup.py is skipped for Python < 3.5")
 @pytest.mark.parametrize("missing", ["version", "name", "install_requires"])
 def test_info_setup_missing_mandatory_should_trigger_pep517(
-    mocker, source_dir, missing
+    mocker: MockerFixture, source_dir: Path, missing: str
 ):
     setup = "from setuptools import setup; "
     setup += "setup("
diff --git a/vendor/poetry/tests/installation/fixtures/update-with-locked-extras.test b/vendor/poetry/tests/installation/fixtures/update-with-locked-extras.test
index 4872311c..3609212e 100644
--- a/vendor/poetry/tests/installation/fixtures/update-with-locked-extras.test
+++ b/vendor/poetry/tests/installation/fixtures/update-with-locked-extras.test
@@ -11,7 +11,7 @@ python-versions = "*"
 "C" = {version = "^1.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}
 
 [package.extras]
-foo = ["b"]
+foo = ["B"]
 
 [[package]]
 name = "B"
diff --git a/vendor/poetry/tests/installation/fixtures/with-dependencies-nested-extras.test b/vendor/poetry/tests/installation/fixtures/with-dependencies-nested-extras.test
index 48a22a7c..369aa3cd 100644
--- a/vendor/poetry/tests/installation/fixtures/with-dependencies-nested-extras.test
+++ b/vendor/poetry/tests/installation/fixtures/with-dependencies-nested-extras.test
@@ -10,7 +10,7 @@ python-versions = "*"
 B = {version = "^1.0", optional = true, extras = ["C"]}
 
 [package.extras]
-B = ["B[C] (>=1.0,<2.0)"]
+b = ["B[C] (>=1.0,<2.0)"]
 
 [[package]]
 name = "B"
@@ -24,7 +24,7 @@ python-versions = "*"
 C = {version = "^1.0", optional = true}
 
 [package.extras]
-C = ["C (>=1.0,<2.0)"]
+c = ["C (>=1.0,<2.0)"]
 
 [[package]]
 name = "C"
diff --git a/vendor/poetry/tests/installation/fixtures/with-pypi-repository.test b/vendor/poetry/tests/installation/fixtures/with-pypi-repository.test
index 37a5b528..1bde9b82 100644
--- a/vendor/poetry/tests/installation/fixtures/with-pypi-repository.test
+++ b/vendor/poetry/tests/installation/fixtures/with-pypi-repository.test
@@ -66,11 +66,24 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
 py = ">=1.5.0"
 six = ">=1.10.0"
 attrs = ">=17.4.0"
+setuptools = "*"
 more-itertools = ">=4.0.0"
 pluggy = ">=0.5,<0.7"
 funcsigs = {"version" = "*", "markers" = "python_version < \"3.0\""}
 colorama = {"version" = "*", "markers" = "sys_platform == \"win32\""}
 
+[[package]]
+name = "setuptools"
+version = "39.2.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "dev"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*"
+
+[package.extras]
+certs = ["certifi (==2016.9.26)"]
+ssl = ["wincertstore (==0.2)"]
+
 [[package]]
 name = "six"
 version = "1.11.0"
@@ -113,6 +126,10 @@ pytest = [
     {file = "pytest-3.5.0-py2.py3-none-any.whl", hash = "sha256:6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c"},
     {file = "pytest-3.5.0.tar.gz", hash = "sha256:fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1"},
 ]
+setuptools = [
+    {file = "setuptools-39.2.0-py2.py3-none-any.whl", hash = "sha256:8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926"},
+    {file = "setuptools-39.2.0.zip", hash = "sha256:f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2"},
+]
 six = [
     {file = "six-1.11.0-py2.py3-none-any.whl", hash = "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"},
     {file = "six-1.11.0.tar.gz", hash = "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"},
diff --git a/vendor/poetry/tests/installation/fixtures/with-same-version-url-dependencies.test b/vendor/poetry/tests/installation/fixtures/with-same-version-url-dependencies.test
new file mode 100644
index 00000000..aec9fefa
--- /dev/null
+++ b/vendor/poetry/tests/installation/fixtures/with-same-version-url-dependencies.test
@@ -0,0 +1,54 @@
+[[package]]
+name = "demo"
+version = "0.1.0"
+description = ""
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.source]
+type = "url"
+url = "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
+
+[package.dependencies]
+pendulum = ">=1.4.4"
+
+[package.extras]
+bar = ["tomlkit"]
+foo = ["cleo"]
+
+[[package]]
+name = "demo"
+version = "0.1.0"
+description = ""
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.source]
+type = "url"
+url = "https://python-poetry.org/distributions/demo-0.1.0.tar.gz"
+
+[package.dependencies]
+pendulum = ">=1.4.4"
+
+[package.extras]
+bar = ["tomlkit"]
+foo = ["cleo"]
+
+[[package]]
+name = "pendulum"
+version = "1.4.4"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+
+[metadata]
+python-versions = "*"
+lock-version = "1.1"
+content-hash = "123456789"
+
+[metadata.files]
+demo = []
+pendulum = []
diff --git a/vendor/poetry/tests/installation/fixtures/with-vcs-dependency-with-extras.test b/vendor/poetry/tests/installation/fixtures/with-vcs-dependency-with-extras.test
new file mode 100644
index 00000000..f64ba8e8
--- /dev/null
+++ b/vendor/poetry/tests/installation/fixtures/with-vcs-dependency-with-extras.test
@@ -0,0 +1,47 @@
+[[package]]
+name = "demo"
+version = "0.1.2"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+develop = false
+
+[package.dependencies]
+cleo = {version = "*", optional = true, markers = "extra == \"foo\""}
+pendulum = ">=1.4.4"
+
+[package.extras]
+foo = ["cleo"]
+
+[package.source]
+type = "git"
+url = "https://github.com/demo/demo.git"
+reference = "master"
+resolved_reference = "123456"
+
+[[package]]
+name = "pendulum"
+version = "1.4.4"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "cleo"
+version = "1.0.0"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+
+[metadata]
+python-versions = "*"
+lock-version = "1.1"
+content-hash = "123456789"
+
+[metadata.files]
+demo = []
+pendulum = []
+cleo = []
diff --git a/vendor/poetry/tests/installation/fixtures/with-vcs-dependency-without-ref.test b/vendor/poetry/tests/installation/fixtures/with-vcs-dependency-without-ref.test
new file mode 100644
index 00000000..16521bde
--- /dev/null
+++ b/vendor/poetry/tests/installation/fixtures/with-vcs-dependency-without-ref.test
@@ -0,0 +1,34 @@
+[[package]]
+name = "demo"
+version = "0.1.2"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+develop = false
+
+[package.dependencies]
+pendulum = ">=1.4.4"
+
+[package.source]
+type = "git"
+url = "https://github.com/demo/demo.git"
+reference = "HEAD"
+resolved_reference = "123456"
+
+[[package]]
+name = "pendulum"
+version = "1.4.4"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+
+[metadata]
+python-versions = "*"
+lock-version = "1.1"
+content-hash = "123456789"
+
+[metadata.files]
+demo = []
+pendulum = []
diff --git a/vendor/poetry/tests/installation/test_authenticator.py b/vendor/poetry/tests/installation/test_authenticator.py
deleted file mode 100644
index d1936474..00000000
--- a/vendor/poetry/tests/installation/test_authenticator.py
+++ /dev/null
@@ -1,201 +0,0 @@
-import re
-import uuid
-
-import httpretty
-import pytest
-import requests
-
-from poetry.installation.authenticator import Authenticator
-from poetry.io.null_io import NullIO
-
-
-@pytest.fixture()
-def mock_remote(http):
-    http.register_uri(
-        http.GET, re.compile("^https?://foo.bar/(.+?)$"),
-    )
-
-
-def test_authenticator_uses_url_provided_credentials(config, mock_remote, http):
-    config.merge(
-        {
-            "repositories": {"foo": {"url": "https://foo.bar/simple/"}},
-            "http-basic": {"foo": {"username": "bar", "password": "baz"}},
-        }
-    )
-
-    authenticator = Authenticator(config, NullIO())
-    authenticator.request("get", "https://foo001:bar002@foo.bar/files/foo-0.1.0.tar.gz")
-
-    request = http.last_request()
-
-    assert "Basic Zm9vMDAxOmJhcjAwMg==" == request.headers["Authorization"]
-
-
-def test_authenticator_uses_credentials_from_config_if_not_provided(
-    config, mock_remote, http
-):
-    config.merge(
-        {
-            "repositories": {"foo": {"url": "https://foo.bar/simple/"}},
-            "http-basic": {"foo": {"username": "bar", "password": "baz"}},
-        }
-    )
-
-    authenticator = Authenticator(config, NullIO())
-    authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz")
-
-    request = http.last_request()
-
-    assert "Basic YmFyOmJheg==" == request.headers["Authorization"]
-
-
-def test_authenticator_uses_username_only_credentials(config, mock_remote, http):
-    config.merge(
-        {
-            "repositories": {"foo": {"url": "https://foo.bar/simple/"}},
-            "http-basic": {"foo": {"username": "bar", "password": "baz"}},
-        }
-    )
-
-    authenticator = Authenticator(config, NullIO())
-    authenticator.request("get", "https://foo001@foo.bar/files/foo-0.1.0.tar.gz")
-
-    request = http.last_request()
-
-    assert "Basic Zm9vMDAxOg==" == request.headers["Authorization"]
-
-
-def test_authenticator_uses_password_only_credentials(config, mock_remote, http):
-    config.merge(
-        {
-            "repositories": {"foo": {"url": "https://foo.bar/simple/"}},
-            "http-basic": {"foo": {"username": "bar", "password": "baz"}},
-        }
-    )
-
-    authenticator = Authenticator(config, NullIO())
-    authenticator.request("get", "https://:bar002@foo.bar/files/foo-0.1.0.tar.gz")
-
-    request = http.last_request()
-
-    assert "Basic OmJhcjAwMg==" == request.headers["Authorization"]
-
-
-def test_authenticator_uses_empty_strings_as_default_password(
-    config, mock_remote, http
-):
-    config.merge(
-        {
-            "repositories": {"foo": {"url": "https://foo.bar/simple/"}},
-            "http-basic": {"foo": {"username": "bar"}},
-        }
-    )
-
-    authenticator = Authenticator(config, NullIO())
-    authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz")
-
-    request = http.last_request()
-
-    assert "Basic YmFyOg==" == request.headers["Authorization"]
-
-
-def test_authenticator_uses_empty_strings_as_default_username(
-    config, mock_remote, http
-):
-    config.merge(
-        {
-            "repositories": {"foo": {"url": "https://foo.bar/simple/"}},
-            "http-basic": {"foo": {"username": None, "password": "bar"}},
-        }
-    )
-
-    authenticator = Authenticator(config, NullIO())
-    authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz")
-
-    request = http.last_request()
-
-    assert "Basic OmJhcg==" == request.headers["Authorization"]
-
-
-def test_authenticator_request_retries_on_exception(mocker, config, http):
-    sleep = mocker.patch("time.sleep")
-    sdist_uri = "https://foo.bar/files/{}/foo-0.1.0.tar.gz".format(str(uuid.uuid4()))
-    content = str(uuid.uuid4())
-    seen = list()
-
-    def callback(request, uri, response_headers):
-        if seen.count(uri) < 2:
-            seen.append(uri)
-            raise requests.exceptions.ConnectionError("Disconnected")
-        return [200, response_headers, content]
-
-    httpretty.register_uri(httpretty.GET, sdist_uri, body=callback)
-
-    authenticator = Authenticator(config, NullIO())
-    response = authenticator.request("get", sdist_uri)
-    assert response.text == content
-    assert sleep.call_count == 2
-
-
-def test_authenticator_request_raises_exception_when_attempts_exhausted(
-    mocker, config, http
-):
-    sleep = mocker.patch("time.sleep")
-    sdist_uri = "https://foo.bar/files/{}/foo-0.1.0.tar.gz".format(str(uuid.uuid4()))
-
-    def callback(*_, **__):
-        raise requests.exceptions.ConnectionError(str(uuid.uuid4()))
-
-    httpretty.register_uri(httpretty.GET, sdist_uri, body=callback)
-    authenticator = Authenticator(config, NullIO())
-
-    with pytest.raises(requests.exceptions.ConnectionError):
-        authenticator.request("get", sdist_uri)
-
-    assert sleep.call_count == 5
-
-
-@pytest.mark.parametrize(
-    "status, attempts",
-    [(400, 0), (401, 0), (403, 0), (404, 0), (500, 0), (502, 5), (503, 5), (504, 5)],
-)
-def test_authenticator_request_retries_on_status_code(
-    mocker, config, http, status, attempts
-):
-    sleep = mocker.patch("time.sleep")
-    sdist_uri = "https://foo.bar/files/{}/foo-0.1.0.tar.gz".format(str(uuid.uuid4()))
-    content = str(uuid.uuid4())
-
-    def callback(request, uri, response_headers):
-        return [status, response_headers, content]
-
-    httpretty.register_uri(httpretty.GET, sdist_uri, body=callback)
-    authenticator = Authenticator(config, NullIO())
-
-    with pytest.raises(requests.exceptions.HTTPError) as excinfo:
-        authenticator.request("get", sdist_uri)
-
-    assert excinfo.value.response.status_code == status
-    assert excinfo.value.response.text == content
-
-    assert sleep.call_count == attempts
-
-
-@pytest.fixture
-def environment_repository_credentials(monkeypatch):
-    monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_USERNAME", "bar")
-    monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_PASSWORD", "baz")
-
-
-def test_authenticator_uses_env_provided_credentials(
-    config, environ, mock_remote, http, environment_repository_credentials
-):
-    config.merge({"repositories": {"foo": {"url": "https://foo.bar/simple/"}}})
-
-    authenticator = Authenticator(config, NullIO())
-    authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz")
-
-    request = http.last_request()
-
-    assert "Basic YmFyOmJheg==" == request.headers["Authorization"]
diff --git a/vendor/poetry/tests/installation/test_chef.py b/vendor/poetry/tests/installation/test_chef.py
index 7d0d98b1..6af8f682 100644
--- a/vendor/poetry/tests/installation/test_chef.py
+++ b/vendor/poetry/tests/installation/test_chef.py
@@ -1,12 +1,39 @@
-from packaging.tags import Tag
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
 
+import pytest
+
+from packaging.tags import Tag
 from poetry.core.packages.utils.link import Link
+
 from poetry.installation.chef import Chef
-from poetry.utils._compat import Path
 from poetry.utils.env import MockEnv
 
 
-def test_get_cached_archive_for_link(config, mocker):
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+    from tests.conftest import Config
+
+
+@pytest.mark.parametrize(
+    ("link", "cached"),
+    [
+        (
+            "https://files.python-poetry.org/demo-0.1.0.tar.gz",
+            "/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
+        ),
+        (
+            "https://example.com/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
+            "/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
+        ),
+    ],
+)
+def test_get_cached_archive_for_link(
+    config: Config, mocker: MockerFixture, link: str, cached: str
+):
     chef = Chef(
         config,
         MockEnv(
@@ -19,27 +46,23 @@ def test_get_cached_archive_for_link(config, mocker):
         ),
     )
 
-    cwd = Path.cwd() / ".pypoetrycache"
-
     mocker.patch.object(
         chef,
         "get_cached_archives_for_link",
         return_value=[
-            Link(f"file:///{cwd}demo-0.1.0-py2.py3-none-any"),
-            Link(f"file:///{cwd}demo-0.1.0.tar.gz"),
-            Link(f"file:///{cwd}demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"),
-            Link(f"file:///{cwd}demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"),
+            Path("/cache/demo-0.1.0-py2.py3-none-any"),
+            Path("/cache/demo-0.1.0.tar.gz"),
+            Path("/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"),
+            Path("/cache/demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"),
         ],
     )
 
-    archive = chef.get_cached_archive_for_link(
-        Link("https://files.python-poetry.org/demo-0.1.0.tar.gz")
-    )
+    archive = chef.get_cached_archive_for_link(Link(link))
 
-    assert Link(f"file:///{cwd}demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl") == archive
+    assert Path(cached) == archive
 
 
-def test_get_cached_archives_for_link(config, mocker):
+def test_get_cached_archives_for_link(config: Config, mocker: MockerFixture):
     chef = Chef(
         config,
         MockEnv(
@@ -49,7 +72,9 @@ def test_get_cached_archives_for_link(config, mocker):
 
     distributions = Path(__file__).parent.parent.joinpath("fixtures/distributions")
     mocker.patch.object(
-        chef, "get_cache_directory_for_link", return_value=distributions,
+        chef,
+        "get_cache_directory_for_link",
+        return_value=distributions,
     )
 
     archives = chef.get_cached_archives_for_link(
@@ -57,12 +82,10 @@ def test_get_cached_archives_for_link(config, mocker):
     )
 
     assert archives
-    assert set(archives) == {
-        Link(path.as_uri()) for path in distributions.glob("demo-0.1.0*")
-    }
+    assert set(archives) == {Path(path) for path in distributions.glob("demo-0.1.0*")}
 
 
-def test_get_cache_directory_for_link(config):
+def test_get_cache_directory_for_link(config: Config, config_cache_dir: Path):
     chef = Chef(
         config,
         MockEnv(
@@ -70,13 +93,13 @@ def test_get_cache_directory_for_link(config):
         ),
     )
 
-    cwd = Path.cwd() / ".pypoetrycache"
-
     directory = chef.get_cache_directory_for_link(
         Link("https://files.python-poetry.org/poetry-1.1.0.tar.gz")
     )
+
     expected = Path(
-        f"{cwd}/artifacts/ba/63/13/283a3b3b7f95f05e9e6f84182d276f7bb0951d5b0cc24422b33f7a4648"
+        f"{config_cache_dir.as_posix()}/artifacts/ba/63/13/"
+        "283a3b3b7f95f05e9e6f84182d276f7bb0951d5b0cc24422b33f7a4648"
     )
 
-    assert expected == directory
+    assert directory == expected
diff --git a/vendor/poetry/tests/installation/test_chooser.py b/vendor/poetry/tests/installation/test_chooser.py
index 7586d27c..b4a8d36b 100644
--- a/vendor/poetry/tests/installation/test_chooser.py
+++ b/vendor/poetry/tests/installation/test_chooser.py
@@ -1,18 +1,31 @@
+from __future__ import annotations
+
 import re
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
+
 import pytest
 
 from packaging.tags import Tag
-
 from poetry.core.packages.package import Package
+
 from poetry.installation.chooser import Chooser
 from poetry.repositories.legacy_repository import LegacyRepository
 from poetry.repositories.pool import Pool
 from poetry.repositories.pypi_repository import PyPiRepository
-from poetry.utils._compat import Path
 from poetry.utils.env import MockEnv
 
 
+if TYPE_CHECKING:
+    import httpretty
+
+    from httpretty.core import HTTPrettyRequest
+
+    from tests.conftest import Config
+
+
 JSON_FIXTURES = (
     Path(__file__).parent.parent / "repositories" / "fixtures" / "pypi.org" / "json"
 )
@@ -21,7 +34,7 @@
 
 
 @pytest.fixture()
-def env():
+def env() -> MockEnv:
     return MockEnv(
         supported_tags=[
             Tag("cp37", "cp37", "macosx_10_15_x86_64"),
@@ -31,8 +44,10 @@ def env():
 
 
 @pytest.fixture()
-def mock_pypi(http):
-    def callback(request, uri, headers):
+def mock_pypi(http: type[httpretty.httpretty]) -> None:
+    def callback(
+        request: HTTPrettyRequest, uri: str, headers: dict[str, Any]
+    ) -> list[int | dict[str, Any] | str] | None:
         parts = uri.rsplit("/")
 
         name = parts[-3]
@@ -49,13 +64,17 @@ def callback(request, uri, headers):
             return [200, headers, f.read()]
 
     http.register_uri(
-        http.GET, re.compile("^https://pypi.org/(.+?)/(.+?)/json$"), body=callback,
+        http.GET,
+        re.compile("^https://pypi.org/(.+?)/(.+?)/json$"),
+        body=callback,
     )
 
 
 @pytest.fixture()
-def mock_legacy(http):
-    def callback(request, uri, headers):
+def mock_legacy(http: type[httpretty.httpretty]) -> None:
+    def callback(
+        request: HTTPrettyRequest, uri: str, headers: dict[str, Any]
+    ) -> list[int | dict[str, Any] | str]:
         parts = uri.rsplit("/")
         name = parts[-2]
 
@@ -65,25 +84,50 @@ def callback(request, uri, headers):
             return [200, headers, f.read()]
 
     http.register_uri(
-        http.GET, re.compile("^https://foo.bar/simple/(.+?)$"), body=callback,
+        http.GET,
+        re.compile("^https://foo.bar/simple/(.+?)$"),
+        body=callback,
+    )
+
+
+@pytest.fixture()
+def mock_legacy_partial_yank(http: type[httpretty.httpretty]) -> None:
+    def callback(
+        request: HTTPrettyRequest, uri: str, headers: dict[str, Any]
+    ) -> list[int | dict[str, Any] | str]:
+        parts = uri.rsplit("/")
+        name = parts[-2]
+
+        fixture = LEGACY_FIXTURES / (name + "_partial_yank" + ".html")
+
+        with fixture.open(encoding="utf-8") as f:
+            return [200, headers, f.read()]
+
+    http.register_uri(
+        http.GET,
+        re.compile("^https://foo2.bar/simple/(.+?)$"),
+        body=callback,
     )
 
 
 @pytest.fixture()
-def pool():
+def pool() -> Pool:
     pool = Pool()
 
     pool.add_repository(PyPiRepository(disable_cache=True))
     pool.add_repository(
         LegacyRepository("foo", "https://foo.bar/simple/", disable_cache=True)
     )
+    pool.add_repository(
+        LegacyRepository("foo2", "https://foo2.bar/simple/", disable_cache=True)
+    )
 
     return pool
 
 
 @pytest.mark.parametrize("source_type", ["", "legacy"])
 def test_chooser_chooses_universal_wheel_link_if_available(
-    env, mock_pypi, mock_legacy, source_type, pool
+    env: MockEnv, mock_pypi: None, mock_legacy: None, source_type: str, pool: Pool
 ):
     chooser = Chooser(pool, env)
 
@@ -99,12 +143,52 @@ def test_chooser_chooses_universal_wheel_link_if_available(
 
     link = chooser.choose_for(package)
 
-    assert "pytest-3.5.0-py2.py3-none-any.whl" == link.filename
+    assert link.filename == "pytest-3.5.0-py2.py3-none-any.whl"
+
+
+@pytest.mark.parametrize(
+    ("policy", "filename"),
+    [
+        (":all:", "pytest-3.5.0.tar.gz"),
+        (":none:", "pytest-3.5.0-py2.py3-none-any.whl"),
+        ("black", "pytest-3.5.0-py2.py3-none-any.whl"),
+        ("pytest", "pytest-3.5.0.tar.gz"),
+        ("pytest,black", "pytest-3.5.0.tar.gz"),
+    ],
+)
+@pytest.mark.parametrize("source_type", ["", "legacy"])
+def test_chooser_no_binary_policy(
+    env: MockEnv,
+    mock_pypi: None,
+    mock_legacy: None,
+    source_type: str,
+    pool: Pool,
+    policy: str,
+    filename: str,
+    config: Config,
+):
+    config.merge({"installer": {"no-binary": policy.split(",")}})
+
+    chooser = Chooser(pool, env, config)
+
+    package = Package("pytest", "3.5.0")
+    if source_type == "legacy":
+        package = Package(
+            package.name,
+            package.version.text,
+            source_type="legacy",
+            source_reference="foo",
+            source_url="https://foo.bar/simple/",
+        )
+
+    link = chooser.choose_for(package)
+
+    assert link.filename == filename
 
 
 @pytest.mark.parametrize("source_type", ["", "legacy"])
 def test_chooser_chooses_specific_python_universal_wheel_link_if_available(
-    env, mock_pypi, mock_legacy, source_type, pool
+    env: MockEnv, mock_pypi: None, mock_legacy: None, source_type: str, pool: Pool
 ):
     chooser = Chooser(pool, env)
 
@@ -120,12 +204,12 @@ def test_chooser_chooses_specific_python_universal_wheel_link_if_available(
 
     link = chooser.choose_for(package)
 
-    assert "isort-4.3.4-py3-none-any.whl" == link.filename
+    assert link.filename == "isort-4.3.4-py3-none-any.whl"
 
 
 @pytest.mark.parametrize("source_type", ["", "legacy"])
 def test_chooser_chooses_system_specific_wheel_link_if_available(
-    mock_pypi, mock_legacy, source_type, pool
+    mock_pypi: None, mock_legacy: None, source_type: str, pool: Pool
 ):
     env = MockEnv(
         supported_tags=[Tag("cp37", "cp37m", "win32"), Tag("py3", "none", "any")]
@@ -144,12 +228,16 @@ def test_chooser_chooses_system_specific_wheel_link_if_available(
 
     link = chooser.choose_for(package)
 
-    assert "PyYAML-3.13-cp37-cp37m-win32.whl" == link.filename
+    assert link.filename == "PyYAML-3.13-cp37-cp37m-win32.whl"
 
 
 @pytest.mark.parametrize("source_type", ["", "legacy"])
 def test_chooser_chooses_sdist_if_no_compatible_wheel_link_is_available(
-    env, mock_pypi, mock_legacy, source_type, pool,
+    env: MockEnv,
+    mock_pypi: None,
+    mock_legacy: None,
+    source_type: str,
+    pool: Pool,
 ):
     chooser = Chooser(pool, env)
 
@@ -165,19 +253,23 @@ def test_chooser_chooses_sdist_if_no_compatible_wheel_link_is_available(
 
     link = chooser.choose_for(package)
 
-    assert "PyYAML-3.13.tar.gz" == link.filename
+    assert link.filename == "PyYAML-3.13.tar.gz"
 
 
 @pytest.mark.parametrize("source_type", ["", "legacy"])
 def test_chooser_chooses_distributions_that_match_the_package_hashes(
-    env, mock_pypi, mock_legacy, source_type, pool,
+    env: MockEnv,
+    mock_pypi: None,
+    mock_legacy: None,
+    source_type: str,
+    pool: Pool,
 ):
     chooser = Chooser(pool, env)
 
     package = Package("isort", "4.3.4")
     files = [
         {
-            "hash": "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8",
+            "hash": "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8",  # noqa: E501
             "filename": "isort-4.3.4.tar.gz",
         }
     ]
@@ -194,19 +286,100 @@ def test_chooser_chooses_distributions_that_match_the_package_hashes(
 
     link = chooser.choose_for(package)
 
-    assert "isort-4.3.4.tar.gz" == link.filename
+    assert link.filename == "isort-4.3.4.tar.gz"
+
+
+@pytest.mark.parametrize("source_type", ["", "legacy"])
+def test_chooser_chooses_yanked_if_no_others(
+    env: MockEnv,
+    mock_pypi: None,
+    mock_legacy: None,
+    source_type: str,
+    pool: Pool,
+) -> None:
+    chooser = Chooser(pool, env)
+
+    package = Package("black", "21.11b0")
+    files = [
+        {
+            "filename": "black-21.11b0-py3-none-any.whl",
+            "hash": "sha256:0b1f66cbfadcd332ceeaeecf6373d9991d451868d2e2219ad0ac1213fb701117",  # noqa: E501
+        }
+    ]
+    if source_type == "legacy":
+        package = Package(
+            package.name,
+            package.version.text,
+            source_type="legacy",
+            source_reference="foo",
+            source_url="https://foo.bar/simple/",
+        )
+
+    package.files = files
+
+    link = chooser.choose_for(package)
+
+    assert link.filename == "black-21.11b0-py3-none-any.whl"
+    assert link.yanked
+
+
+def test_chooser_does_not_choose_yanked_if_others(
+    mock_legacy: None,
+    mock_legacy_partial_yank: None,
+    pool: Pool,
+) -> None:
+    chooser = Chooser(pool, MockEnv(supported_tags=[Tag("py2", "none", "any")]))
+
+    package = Package("futures", "3.2.0")
+    files = [
+        {
+            "filename": "futures-3.2.0-py2-none-any.whl",
+            "hash": "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1",  # noqa: E501
+        },
+        {
+            "filename": "futures-3.2.0.tar.gz",
+            "hash": "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",  # noqa: E501
+        },
+    ]
+    package = Package(
+        package.name,
+        package.version.text,
+        source_type="legacy",
+        source_reference="foo",
+        source_url="https://foo.bar/simple/",
+    )
+    package_partial_yank = Package(
+        package.name,
+        package.version.text,
+        source_type="legacy",
+        source_reference="foo2",
+        source_url="https://foo2.bar/simple/",
+    )
+
+    package.files = files
+    package_partial_yank.files = files
+
+    link = chooser.choose_for(package)
+    link_partial_yank = chooser.choose_for(package_partial_yank)
+
+    assert link.filename == "futures-3.2.0-py2-none-any.whl"
+    assert link_partial_yank.filename == "futures-3.2.0.tar.gz"
 
 
 @pytest.mark.parametrize("source_type", ["", "legacy"])
 def test_chooser_throws_an_error_if_package_hashes_do_not_match(
-    env, mock_pypi, mock_legacy, source_type, pool,
+    env: MockEnv,
+    mock_pypi: None,
+    mock_legacy: None,
+    source_type: None,
+    pool: Pool,
 ):
     chooser = Chooser(pool, env)
 
     package = Package("isort", "4.3.4")
     files = [
         {
-            "hash": "sha256:0000000000000000000000000000000000000000000000000000000000000000",
+            "hash": "sha256:0000000000000000000000000000000000000000000000000000000000000000",  # noqa: E501
             "filename": "isort-4.3.4.tar.gz",
         }
     ]
diff --git a/vendor/poetry/tests/installation/test_executor.py b/vendor/poetry/tests/installation/test_executor.py
index 21404f72..b2e06de6 100644
--- a/vendor/poetry/tests/installation/test_executor.py
+++ b/vendor/poetry/tests/installation/test_executor.py
@@ -1,42 +1,79 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
+from __future__ import annotations
 
+import csv
+import json
 import re
 import shutil
 
-import pytest
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
 
-from clikit.api.formatter.style import Style
-from clikit.io.buffered_io import BufferedIO
+import pytest
 
-from poetry.config.config import Config
+from cleo.formatters.style import Style
+from cleo.io.buffered_io import BufferedIO
 from poetry.core.packages.package import Package
 from poetry.core.packages.utils.link import Link
-from poetry.installation.chef import Chef
+
 from poetry.installation.executor import Executor
 from poetry.installation.operations import Install
 from poetry.installation.operations import Uninstall
 from poetry.installation.operations import Update
 from poetry.repositories.pool import Pool
-from poetry.utils._compat import PY36
-from poetry.utils._compat import Path
 from poetry.utils.env import MockEnv
 from tests.repositories.test_pypi_repository import MockRepository
 
 
+if TYPE_CHECKING:
+    import httpretty
+
+    from httpretty.core import HTTPrettyRequest
+    from pytest_mock import MockerFixture
+
+    from poetry.config.config import Config
+    from poetry.installation.operations.operation import Operation
+    from poetry.utils.env import VirtualEnv
+    from tests.types import FixtureDirGetter
+
+
+@pytest.fixture
+def env(tmp_dir: str) -> MockEnv:
+    path = Path(tmp_dir) / ".venv"
+    path.mkdir(parents=True)
+
+    return MockEnv(path=path, is_venv=True)
+
+
 @pytest.fixture()
-def io():
+def io() -> BufferedIO:
     io = BufferedIO()
-    io.formatter.add_style(Style("c1_dark").fg("cyan").dark())
-    io.formatter.add_style(Style("c2_dark").fg("default").bold().dark())
-    io.formatter.add_style(Style("success_dark").fg("green").dark())
-    io.formatter.add_style(Style("warning").fg("yellow"))
+    io.output.formatter.set_style("c1_dark", Style("cyan", options=["dark"]))
+    io.output.formatter.set_style("c2_dark", Style("default", options=["bold", "dark"]))
+    io.output.formatter.set_style("success_dark", Style("green", options=["dark"]))
+    io.output.formatter.set_style("warning", Style("yellow"))
+
+    return io
+
+
+@pytest.fixture()
+def io_decorated() -> BufferedIO:
+    io = BufferedIO(decorated=True)
+    io.output.formatter.set_style("c1", Style("cyan"))
+    io.output.formatter.set_style("success", Style("green"))
+
+    return io
+
+
+@pytest.fixture()
+def io_not_decorated() -> BufferedIO:
+    io = BufferedIO(decorated=False)
 
     return io
 
 
 @pytest.fixture()
-def pool():
+def pool() -> Pool:
     pool = Pool()
     pool.add_repository(MockRepository())
 
@@ -44,8 +81,10 @@ def pool():
 
 
 @pytest.fixture()
-def mock_file_downloads(http):
-    def callback(request, uri, headers):
+def mock_file_downloads(http: type[httpretty.httpretty]) -> None:
+    def callback(
+        request: HTTPrettyRequest, uri: str, headers: dict[str, Any]
+    ) -> list[int | dict[str, Any] | str]:
         fixture = Path(__file__).parent.parent.joinpath(
             "fixtures/distributions/demo-0.1.0-py2.py3-none-any.whl"
         )
@@ -54,17 +93,25 @@ def callback(request, uri, headers):
             return [200, headers, f.read()]
 
     http.register_uri(
-        http.GET, re.compile("^https://files.pythonhosted.org/.*$"), body=callback,
+        http.GET,
+        re.compile("^https://files.pythonhosted.org/.*$"),
+        body=callback,
     )
 
 
 def test_execute_executes_a_batch_of_operations(
-    config, pool, io, tmp_dir, mock_file_downloads
+    mocker: MockerFixture,
+    config: Config,
+    pool: Pool,
+    io: BufferedIO,
+    tmp_dir: str,
+    mock_file_downloads: None,
+    env: MockEnv,
 ):
-    config = Config()
+    pip_install = mocker.patch("poetry.installation.executor.pip_install")
+
     config.merge({"cache-dir": tmp_dir})
 
-    env = MockEnv(path=Path(tmp_dir))
     executor = Executor(env, pool, config, io)
 
     file_package = Package(
@@ -95,9 +142,10 @@ def test_execute_executes_a_batch_of_operations(
         source_type="git",
         source_reference="master",
         source_url="https://github.com/demo/demo.git",
+        develop=True,
     )
 
-    assert 0 == executor.execute(
+    return_code = executor.execute(
         [
             Install(Package("pytest", "3.5.2")),
             Uninstall(Package("attrs", "17.4.0")),
@@ -109,35 +157,95 @@ def test_execute_executes_a_batch_of_operations(
         ]
     )
 
-    expected = """
+    expected = f"""
 Package operations: 4 installs, 1 update, 1 removal
 
   • Installing pytest (3.5.2)
   • Removing attrs (17.4.0)
   • Updating requests (2.18.3 -> 2.18.4)
-  • Installing demo (0.1.0 {})
-  • Installing simple-project (1.2.3 {})
+  • Installing demo (0.1.0 {file_package.source_url})
+  • Installing simple-project (1.2.3 {directory_package.source_url})
   • Installing demo (0.1.0 master)
-""".format(
-        file_package.source_url, directory_package.source_url
-    )
+"""
 
     expected = set(expected.splitlines())
     output = set(io.fetch_output().splitlines())
-    assert expected == output
-    assert 5 == len(env.executed)
+    assert output == expected
+    assert len(env.executed) == 1
+    assert return_code == 0
+    assert pip_install.call_count == 5
+    assert pip_install.call_args.kwargs.get("upgrade", False)
+    assert pip_install.call_args.kwargs.get("editable", False)
+
+
+@pytest.mark.parametrize(
+    "operations, has_warning",
+    [
+        (
+            [Install(Package("black", "21.11b0")), Install(Package("pytest", "3.5.2"))],
+            True,
+        ),
+        (
+            [
+                Uninstall(Package("black", "21.11b0")),
+                Uninstall(Package("pytest", "3.5.2")),
+            ],
+            False,
+        ),
+        (
+            [
+                Update(Package("black", "19.10b0"), Package("black", "21.11b0")),
+                Update(Package("pytest", "3.5.1"), Package("pytest", "3.5.2")),
+            ],
+            True,
+        ),
+    ],
+)
+def test_execute_prints_warning_for_yanked_package(
+    config: Config,
+    pool: Pool,
+    io: BufferedIO,
+    tmp_dir: str,
+    mock_file_downloads: None,
+    env: MockEnv,
+    operations: list[Operation],
+    has_warning: bool,
+):
+    config.merge({"cache-dir": tmp_dir})
+
+    executor = Executor(env, pool, config, io)
+
+    return_code = executor.execute(operations)
+
+    expected = (
+        "Warning: The file chosen for install of black 21.11b0 "
+        "(black-21.11b0-py3-none-any.whl) is yanked. Reason for being yanked: "
+        "Broken regex dependency. Use 21.11b1 instead."
+    )
+    error = io.fetch_error()
+    assert return_code == 0
+    assert "pytest" not in error
+    if has_warning:
+        assert expected in error
+        assert error.count("is yanked") == 1
+    else:
+        assert expected not in error
+        assert error.count("yanked") == 0
 
 
-def test_execute_shows_skipped_operations_if_verbose(config, pool, io):
-    config = Config()
-    config.merge({"cache-dir": "/foo"})
+def test_execute_shows_skipped_operations_if_verbose(
+    config: Config, pool: Pool, io: BufferedIO, config_cache_dir: Path, env: MockEnv
+):
+    config.merge({"cache-dir": config_cache_dir.as_posix()})
 
-    env = MockEnv()
     executor = Executor(env, pool, config, io)
     executor.verbose()
 
-    assert 0 == executor.execute(
-        [Uninstall(Package("clikit", "0.2.3")).skip("Not currently installed")]
+    assert (
+        executor.execute(
+            [Uninstall(Package("clikit", "0.2.3")).skip("Not currently installed")]
+        )
+        == 0
     )
 
     expected = """
@@ -145,21 +253,19 @@ def test_execute_shows_skipped_operations_if_verbose(config, pool, io):
 
   • Removing clikit (0.2.3): Skipped for the following reason: Not currently installed
 """
-    assert expected == io.fetch_output()
-    assert 0 == len(env.executed)
+    assert io.fetch_output() == expected
+    assert len(env.executed) == 0
 
 
-@pytest.mark.skipif(
-    not PY36, reason="Improved error rendering is only available on Python >=3.6"
-)
-def test_execute_should_show_errors(config, mocker, io):
-    env = MockEnv()
+def test_execute_should_show_errors(
+    config: Config, pool: Pool, mocker: MockerFixture, io: BufferedIO, env: MockEnv
+):
     executor = Executor(env, pool, config, io)
     executor.verbose()
 
     mocker.patch.object(executor, "_install", side_effect=Exception("It failed!"))
 
-    assert 1 == executor.execute([Install(Package("clikit", "0.2.3"))])
+    assert executor.execute([Install(Package("clikit", "0.2.3"))]) == 1
 
     expected = """
 Package operations: 1 install, 0 updates, 0 removals
@@ -174,17 +280,93 @@ def test_execute_should_show_errors(config, mocker, io):
     assert expected in io.fetch_output()
 
 
+def test_execute_works_with_ansi_output(
+    mocker: MockerFixture,
+    config: Config,
+    pool: Pool,
+    io_decorated: BufferedIO,
+    tmp_dir: str,
+    mock_file_downloads: None,
+    env: MockEnv,
+):
+    config.merge({"cache-dir": tmp_dir})
+
+    executor = Executor(env, pool, config, io_decorated)
+
+    install_output = (
+        "some string that does not contain a keyb0ard !nterrupt or cance11ed by u$er"
+    )
+    mocker.patch.object(env, "_run", return_value=install_output)
+    return_code = executor.execute(
+        [
+            Install(Package("pytest", "3.5.2")),
+        ]
+    )
+    env._run.assert_called_once()
+
+    # fmt: off
+    expected = [
+        "\x1b[39;1mPackage operations\x1b[39;22m: \x1b[34m1\x1b[39m install, \x1b[34m0\x1b[39m updates, \x1b[34m0\x1b[39m removals",  # noqa: E501
+        "\x1b[34;1m•\x1b[39;22m \x1b[39mInstalling \x1b[39m\x1b[36mpytest\x1b[39m\x1b[39m (\x1b[39m\x1b[39;1m3.5.2\x1b[39;22m\x1b[39m)\x1b[39m: \x1b[34mPending...\x1b[39m",  # noqa: E501
+        "\x1b[34;1m•\x1b[39;22m \x1b[39mInstalling \x1b[39m\x1b[36mpytest\x1b[39m\x1b[39m (\x1b[39m\x1b[39;1m3.5.2\x1b[39;22m\x1b[39m)\x1b[39m: \x1b[34mDownloading...\x1b[39m",  # noqa: E501
+        "\x1b[34;1m•\x1b[39;22m \x1b[39mInstalling \x1b[39m\x1b[36mpytest\x1b[39m\x1b[39m (\x1b[39m\x1b[39;1m3.5.2\x1b[39;22m\x1b[39m)\x1b[39m: \x1b[34mInstalling...\x1b[39m",  # noqa: E501
+        "\x1b[32;1m•\x1b[39;22m \x1b[39mInstalling \x1b[39m\x1b[36mpytest\x1b[39m\x1b[39m (\x1b[39m\x1b[32m3.5.2\x1b[39m\x1b[39m)\x1b[39m",  # finished  # noqa: E501
+    ]
+    # fmt: on
+
+    output = io_decorated.fetch_output()
+    # hint: use print(repr(output)) if you need to debug this
+
+    for line in expected:
+        assert line in output
+    assert return_code == 0
+
+
+def test_execute_works_with_no_ansi_output(
+    mocker: MockerFixture,
+    config: Config,
+    pool: Pool,
+    io_not_decorated: BufferedIO,
+    tmp_dir: str,
+    mock_file_downloads: None,
+    env: MockEnv,
+):
+    config.merge({"cache-dir": tmp_dir})
+
+    executor = Executor(env, pool, config, io_not_decorated)
+
+    install_output = (
+        "some string that does not contain a keyb0ard !nterrupt or cance11ed by u$er"
+    )
+    mocker.patch.object(env, "_run", return_value=install_output)
+    return_code = executor.execute(
+        [
+            Install(Package("pytest", "3.5.2")),
+        ]
+    )
+    env._run.assert_called_once()
+
+    expected = """
+Package operations: 1 install, 0 updates, 0 removals
+
+  • Installing pytest (3.5.2)
+"""
+    expected = set(expected.splitlines())
+    output = set(io_not_decorated.fetch_output().splitlines())
+    assert output == expected
+    assert return_code == 0
+
+
 def test_execute_should_show_operation_as_cancelled_on_subprocess_keyboard_interrupt(
-    config, mocker, io
+    config: Config, pool: Pool, mocker: MockerFixture, io: BufferedIO, env: MockEnv
 ):
-    env = MockEnv()
     executor = Executor(env, pool, config, io)
     executor.verbose()
 
     # A return code of -2 means KeyboardInterrupt in the pip subprocess
     mocker.patch.object(executor, "_install", return_value=-2)
 
-    assert 1 == executor.execute([Install(Package("clikit", "0.2.3"))])
+    assert executor.execute([Install(Package("clikit", "0.2.3"))]) == 1
 
     expected = """
 Package operations: 1 install, 0 updates, 0 removals
@@ -193,24 +375,25 @@ def test_execute_should_show_operation_as_cancelled_on_subprocess_keyboard_inter
   • Installing clikit (0.2.3): Cancelled
 """
 
-    assert expected == io.fetch_output()
+    assert io.fetch_output() == expected
 
 
-def test_execute_should_gracefully_handle_io_error(config, mocker, io):
-    env = MockEnv()
+def test_execute_should_gracefully_handle_io_error(
+    config: Config, pool: Pool, mocker: MockerFixture, io: BufferedIO, env: MockEnv
+):
     executor = Executor(env, pool, config, io)
     executor.verbose()
 
     original_write_line = executor._io.write_line
 
-    def write_line(string, flags=None):
+    def write_line(string: str, **kwargs: Any) -> None:
         # Simulate UnicodeEncodeError
         string.encode("ascii")
-        original_write_line(string, flags)
+        original_write_line(string, **kwargs)
 
     mocker.patch.object(io, "write_line", side_effect=write_line)
 
-    assert 1 == executor.execute([Install(Package("clikit", "0.2.3"))])
+    assert executor.execute([Install(Package("clikit", "0.2.3"))]) == 1
 
     expected = r"""
 Package operations: 1 install, 0 updates, 0 removals
@@ -223,7 +406,13 @@ def write_line(string, flags=None):
 
 
 def test_executor_should_delete_incomplete_downloads(
-    config, io, tmp_dir, mocker, pool, mock_file_downloads
+    config: Config,
+    io: BufferedIO,
+    tmp_dir: str,
+    mocker: MockerFixture,
+    pool: Pool,
+    mock_file_downloads: None,
+    env: MockEnv,
 ):
     fixture = Path(__file__).parent.parent.joinpath(
         "fixtures/distributions/demo-0.1.0-py2.py3-none-any.whl"
@@ -236,17 +425,15 @@ def test_executor_should_delete_incomplete_downloads(
     )
     mocker.patch(
         "poetry.installation.chef.Chef.get_cached_archive_for_link",
-        side_effect=lambda link: link,
+        side_effect=lambda link: None,
     )
     mocker.patch(
         "poetry.installation.chef.Chef.get_cache_directory_for_link",
         return_value=Path(tmp_dir),
     )
 
-    config = Config()
     config.merge({"cache-dir": tmp_dir})
 
-    env = MockEnv(path=Path(tmp_dir))
     executor = Executor(env, pool, config, io)
 
     with pytest.raises(Exception, match="Download error"):
@@ -255,119 +442,280 @@ def test_executor_should_delete_incomplete_downloads(
     assert not destination_fixture.exists()
 
 
-def test_executor_should_check_every_possible_hash_types(
-    config, io, pool, mocker, fixture_dir, tmp_dir
+def verify_installed_distribution(
+    venv: VirtualEnv, package: Package, url_reference: dict[str, Any] | None = None
 ):
-    mocker.patch.object(
-        Chef, "get_cached_archive_for_link", side_effect=lambda link: link,
-    )
-    mocker.patch.object(
-        Executor,
-        "_download_archive",
-        return_value=fixture_dir("distributions").joinpath(
-            "demo-0.1.0-py2.py3-none-any.whl"
+    distributions = list(venv.site_packages.distributions(name=package.name))
+    assert len(distributions) == 1
+
+    distribution = distributions[0]
+    metadata = distribution.metadata
+    assert metadata["Name"] == package.name
+    assert metadata["Version"] == package.version.text
+
+    direct_url_file = distribution._path.joinpath("direct_url.json")
+
+    if url_reference is not None:
+        record_file = distribution._path.joinpath("RECORD")
+        with open(record_file, encoding="utf-8", newline="") as f:
+            reader = csv.reader(f)
+            rows = list(reader)
+        assert all(len(row) == 3 for row in rows)
+        record_entries = {row[0] for row in rows}
+        direct_url_entry = direct_url_file.relative_to(record_file.parent.parent)
+        assert direct_url_file.exists()
+        assert str(direct_url_entry) in record_entries
+        assert json.loads(direct_url_file.read_text(encoding="utf-8")) == url_reference
+    else:
+        assert not direct_url_file.exists()
+
+
+@pytest.mark.parametrize(
+    "package",
+    [
+        Package("demo", "0.1.0"),  # PyPI
+        Package(  # private source
+            "demo",
+            "0.1.0",
+            source_type="legacy",
+            source_url="http://localhost:3141/root/pypi/+simple",
+            source_reference="private",
         ),
+    ],
+)
+def test_executor_should_not_write_pep610_url_references_for_cached_package(
+    package: Package,
+    mocker: MockerFixture,
+    fixture_dir: FixtureDirGetter,
+    tmp_venv: VirtualEnv,
+    pool: Pool,
+    config: Config,
+    io: BufferedIO,
+):
+    link_cached = fixture_dir("distributions") / "demo-0.1.0-py2.py3-none-any.whl"
+
+    mocker.patch(
+        "poetry.installation.executor.Executor._download", return_value=link_cached
     )
 
-    env = MockEnv(path=Path(tmp_dir))
-    executor = Executor(env, pool, config, io)
+    executor = Executor(tmp_venv, pool, config, io)
+    executor.execute([Install(package)])
+    verify_installed_distribution(tmp_venv, package)
 
-    package = Package("demo", "0.1.0")
-    package.files = [
-        {
-            "file": "demo-0.1.0-py2.py3-none-any.whl",
-            "hash": "md5:15507846fd4299596661d0197bfb4f90",
-        }
-    ]
 
-    archive = executor._download_link(
-        Install(package), Link("https://example.com/demo-0.1.0-py2.py3-none-any.whl")
+def test_executor_should_write_pep610_url_references_for_files(
+    tmp_venv: VirtualEnv, pool: Pool, config: Config, io: BufferedIO
+):
+    url = (
+        Path(__file__)
+        .parent.parent.joinpath(
+            "fixtures/distributions/demo-0.1.0-py2.py3-none-any.whl"
+        )
+        .resolve()
     )
+    package = Package("demo", "0.1.0", source_type="file", source_url=url.as_posix())
 
-    assert archive == fixture_dir("distributions").joinpath(
-        "demo-0.1.0-py2.py3-none-any.whl"
+    executor = Executor(tmp_venv, pool, config, io)
+    executor.execute([Install(package)])
+    verify_installed_distribution(
+        tmp_venv, package, {"archive_info": {}, "url": url.as_uri()}
     )
 
 
-def test_executor_should_check_every_possible_hash_types_before_failing(
-    config, io, pool, mocker, fixture_dir, tmp_dir
+def test_executor_should_write_pep610_url_references_for_directories(
+    tmp_venv: VirtualEnv, pool: Pool, config: Config, io: BufferedIO
 ):
-    mocker.patch.object(
-        Chef, "get_cached_archive_for_link", side_effect=lambda link: link,
-    )
-    mocker.patch.object(
-        Executor,
-        "_download_archive",
-        return_value=fixture_dir("distributions").joinpath(
-            "demo-0.1.0-py2.py3-none-any.whl"
-        ),
+    url = Path(__file__).parent.parent.joinpath("fixtures/simple_project").resolve()
+    package = Package(
+        "simple-project", "1.2.3", source_type="directory", source_url=url.as_posix()
     )
 
-    env = MockEnv(path=Path(tmp_dir))
-    executor = Executor(env, pool, config, io)
+    executor = Executor(tmp_venv, pool, config, io)
+    executor.execute([Install(package)])
+    verify_installed_distribution(
+        tmp_venv, package, {"dir_info": {}, "url": url.as_uri()}
+    )
 
-    package = Package("demo", "0.1.0")
-    package.files = [
-        {"file": "demo-0.1.0-py2.py3-none-any.whl", "hash": "md5:123456"},
-        {"file": "demo-0.1.0-py2.py3-none-any.whl", "hash": "sha256:123456"},
-    ]
 
-    expected_message = (
-        "Invalid hashes "
-        "("
-        "md5:15507846fd4299596661d0197bfb4f90, "
-        "sha256:70e704135718fffbcbf61ed1fc45933cfd86951a744b681000eaaa75da31f17a"
-        ") "
-        "for demo (0.1.0) using archive demo-0.1.0-py2.py3-none-any.whl. "
-        "Expected one of md5:123456, sha256:123456."
+def test_executor_should_write_pep610_url_references_for_editable_directories(
+    tmp_venv: VirtualEnv, pool: Pool, config: Config, io: BufferedIO
+):
+    url = Path(__file__).parent.parent.joinpath("fixtures/simple_project").resolve()
+    package = Package(
+        "simple-project",
+        "1.2.3",
+        source_type="directory",
+        source_url=url.as_posix(),
+        develop=True,
     )
 
-    with pytest.raises(RuntimeError, match=re.escape(expected_message)):
-        executor._download_link(
-            Install(package),
-            Link("https://example.com/demo-0.1.0-py2.py3-none-any.whl"),
-        )
+    executor = Executor(tmp_venv, pool, config, io)
+    executor.execute([Install(package)])
+    verify_installed_distribution(
+        tmp_venv, package, {"dir_info": {"editable": True}, "url": url.as_uri()}
+    )
 
 
-def test_executor_should_use_cached_link_and_hash(
-    config, io, pool, mocker, fixture_dir, tmp_dir
+def test_executor_should_write_pep610_url_references_for_urls(
+    tmp_venv: VirtualEnv,
+    pool: Pool,
+    config: Config,
+    io: BufferedIO,
+    mock_file_downloads: None,
 ):
-    # Produce a file:/// URI that is a valid link
-    link_cached = Link(
-        fixture_dir("distributions")
-        .joinpath("demo-0.1.0-py2.py3-none-any.whl")
-        .as_uri()
+    package = Package(
+        "demo",
+        "0.1.0",
+        source_type="url",
+        source_url="https://files.pythonhosted.org/demo-0.1.0-py2.py3-none-any.whl",
     )
-    mocker.patch.object(
-        Chef, "get_cached_archive_for_link", side_effect=lambda _: link_cached
+
+    executor = Executor(tmp_venv, pool, config, io)
+    executor.execute([Install(package)])
+    verify_installed_distribution(
+        tmp_venv, package, {"archive_info": {}, "url": package.source_url}
     )
 
-    env = MockEnv(path=Path(tmp_dir))
-    executor = Executor(env, pool, config, io)
+
+def test_executor_should_write_pep610_url_references_for_git(
+    tmp_venv: VirtualEnv,
+    pool: Pool,
+    config: Config,
+    io: BufferedIO,
+    mock_file_downloads: None,
+):
+    package = Package(
+        "demo",
+        "0.1.2",
+        source_type="git",
+        source_reference="master",
+        source_resolved_reference="123456",
+        source_url="https://github.com/demo/demo.git",
+    )
+
+    executor = Executor(tmp_venv, pool, config, io)
+    executor.execute([Install(package)])
+    verify_installed_distribution(
+        tmp_venv,
+        package,
+        {
+            "vcs_info": {
+                "vcs": "git",
+                "requested_revision": "master",
+                "commit_id": "123456",
+            },
+            "url": package.source_url,
+        },
+    )
+
+
+def test_executor_should_write_pep610_url_references_for_git_with_subdirectories(
+    tmp_venv: VirtualEnv,
+    pool: Pool,
+    config: Config,
+    io: BufferedIO,
+    mock_file_downloads: None,
+):
+    package = Package(
+        "two",
+        "2.0.0",
+        source_type="git",
+        source_reference="master",
+        source_resolved_reference="123456",
+        source_url="https://github.com/demo/subdirectories.git",
+        source_subdirectory="two",
+    )
+
+    executor = Executor(tmp_venv, pool, config, io)
+    executor.execute([Install(package)])
+    verify_installed_distribution(
+        tmp_venv,
+        package,
+        {
+            "vcs_info": {
+                "vcs": "git",
+                "requested_revision": "master",
+                "commit_id": "123456",
+            },
+            "url": package.source_url,
+            "subdirectory": package.source_subdirectory,
+        },
+    )
+
+
+def test_executor_should_use_cached_link_and_hash(
+    tmp_venv: VirtualEnv,
+    pool: Pool,
+    config: Config,
+    io: BufferedIO,
+    mocker: MockerFixture,
+    fixture_dir: FixtureDirGetter,
+):
+    link_cached = fixture_dir("distributions") / "demo-0.1.0-py2.py3-none-any.whl"
+
+    mocker.patch(
+        "poetry.installation.chef.Chef.get_cached_archive_for_link",
+        return_value=link_cached,
+    )
 
     package = Package("demo", "0.1.0")
+    # Set package.files so the executor will attempt to hash the package
     package.files = [
         {
             "file": "demo-0.1.0-py2.py3-none-any.whl",
-            "hash": "md5:15507846fd4299596661d0197bfb4f90",
+            "hash": "sha256:70e704135718fffbcbf61ed1fc45933cfd86951a744b681000eaaa75da31f17a",  # noqa: E501
         }
     ]
 
+    executor = Executor(tmp_venv, pool, config, io)
     archive = executor._download_link(
-        Install(package), Link("https://example.com/demo-0.1.0-py2.py3-none-any.whl")
+        Install(package),
+        Link("https://example.com/demo-0.1.0-py2.py3-none-any.whl"),
     )
-
     assert archive == link_cached
 
 
-def test_executer_fallback_on_poetry_create_error(
-    mocker, config, pool, io, tmp_dir, mock_file_downloads,
+@pytest.mark.parametrize(
+    ("max_workers", "cpu_count", "side_effect", "expected_workers"),
+    [
+        (None, 3, None, 7),
+        (3, 4, None, 3),
+        (8, 3, None, 7),
+        (None, 8, NotImplementedError(), 5),
+        (2, 8, NotImplementedError(), 2),
+        (8, 8, NotImplementedError(), 5),
+    ],
+)
+def test_executor_should_be_initialized_with_correct_workers(
+    tmp_venv: VirtualEnv,
+    pool: Pool,
+    config: Config,
+    io: BufferedIO,
+    mocker: MockerFixture,
+    max_workers: int | None,
+    cpu_count: int | None,
+    side_effect: Exception | None,
+    expected_workers: int,
 ):
-    import poetry.installation.executor
+    config.merge({"installer": {"max-workers": max_workers}})
 
-    mock_pip_install = mocker.patch.object(
-        poetry.installation.executor.Executor, "run_pip"
-    )
+    mocker.patch("os.cpu_count", return_value=cpu_count, side_effect=side_effect)
+
+    executor = Executor(tmp_venv, pool, config, io)
+
+    assert executor._max_workers == expected_workers
+
+
+def test_executer_fallback_on_poetry_create_error(
+    mocker: MockerFixture,
+    config: Config,
+    pool: Pool,
+    io: BufferedIO,
+    tmp_dir: str,
+    mock_file_downloads: None,
+    env: MockEnv,
+):
+    mock_pip_install = mocker.patch("poetry.installation.executor.pip_install")
     mock_sdist_builder = mocker.patch("poetry.core.masonry.builders.sdist.SdistBuilder")
     mock_editable_builder = mocker.patch(
         "poetry.masonry.builders.editable.EditableBuilder"
@@ -378,7 +726,6 @@ def test_executer_fallback_on_poetry_create_error(
 
     config.merge({"cache-dir": tmp_dir})
 
-    env = MockEnv(path=Path(tmp_dir))
     executor = Executor(env, pool, config, io)
 
     directory_package = Package(
@@ -391,14 +738,17 @@ def test_executer_fallback_on_poetry_create_error(
         .as_posix(),
     )
 
-    return_code = executor.execute([Install(directory_package)])
+    return_code = executor.execute(
+        [
+            Install(directory_package),
+        ]
+    )
 
-    expected = """
+    expected = f"""
 Package operations: 1 install, 0 updates, 0 removals
-  • Installing simple-project (1.2.3 {source_url})
-""".format(
-        source_url=directory_package.source_url
-    )
+
+  • Installing simple-project (1.2.3 {directory_package.source_url})
+"""
 
     expected = set(expected.splitlines())
     output = set(io.fetch_output().splitlines())
@@ -408,3 +758,5 @@ def test_executer_fallback_on_poetry_create_error(
     assert mock_sdist_builder.call_count == 0
     assert mock_editable_builder.call_count == 0
     assert mock_pip_install.call_count == 1
+    assert mock_pip_install.call_args[1].get("upgrade") is True
+    assert mock_pip_install.call_args[1].get("editable") is False
diff --git a/vendor/poetry/tests/installation/test_installer.py b/vendor/poetry/tests/installation/test_installer.py
index 77fb58da..1b8938d4 100644
--- a/vendor/poetry/tests/installation/test_installer.py
+++ b/vendor/poetry/tests/installation/test_installer.py
@@ -1,15 +1,25 @@
-from __future__ import unicode_literals
+from __future__ import annotations
 
+import itertools
 import json
-import sys
 
-import pytest
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
 
-from clikit.io import NullIO
+import pytest
 
-from poetry.core.packages import ProjectPackage
+from cleo.io.inputs.input import Input
+from cleo.io.io import IO
+from cleo.io.null_io import NullIO
+from cleo.io.outputs.buffered_output import BufferedOutput
+from cleo.io.outputs.output import Verbosity
+from poetry.core.packages.dependency_group import MAIN_GROUP
+from poetry.core.packages.dependency_group import DependencyGroup
 from poetry.core.packages.package import Package
+from poetry.core.packages.project_package import ProjectPackage
 from poetry.core.toml.file import TOMLFile
+
 from poetry.factory import Factory
 from poetry.installation import Installer as BaseInstaller
 from poetry.installation.executor import Executor as BaseExecutor
@@ -18,10 +28,9 @@
 from poetry.repositories import Pool
 from poetry.repositories import Repository
 from poetry.repositories.installed_repository import InstalledRepository
-from poetry.utils._compat import PY2
-from poetry.utils._compat import Path
 from poetry.utils.env import MockEnv
 from poetry.utils.env import NullEnv
+from tests.helpers import MOCK_DEFAULT_GIT_REVISION
 from tests.helpers import get_dependency
 from tests.helpers import get_package
 from tests.repositories.test_legacy_repository import (
@@ -30,98 +39,103 @@
 from tests.repositories.test_pypi_repository import MockRepository
 
 
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+    from poetry.installation.operations.operation import Operation
+    from poetry.packages import DependencyPackage
+    from poetry.utils.env import Env
+    from tests.conftest import Config
+    from tests.types import FixtureDirGetter
+
+RESERVED_PACKAGES = ("pip", "setuptools", "wheel")
+
+
 class Installer(BaseInstaller):
-    def _get_installer(self):
+    def _get_installer(self) -> NoopInstaller:
         return NoopInstaller()
 
 
 class Executor(BaseExecutor):
-    def __init__(self, *args, **kwargs):
-        super(Executor, self).__init__(*args, **kwargs)
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
+        super().__init__(*args, **kwargs)
 
-        self._installs = []
-        self._updates = []
-        self._uninstalls = []
+        self._installs: list[DependencyPackage] = []
+        self._updates: list[DependencyPackage] = []
+        self._uninstalls: list[DependencyPackage] = []
 
     @property
-    def installations(self):
+    def installations(self) -> list[DependencyPackage]:
         return self._installs
 
     @property
-    def updates(self):
+    def updates(self) -> list[DependencyPackage]:
         return self._updates
 
     @property
-    def removals(self):
+    def removals(self) -> list[DependencyPackage]:
         return self._uninstalls
 
-    def _do_execute_operation(self, operation):
-        super(Executor, self)._do_execute_operation(operation)
+    def _do_execute_operation(self, operation: Operation) -> None:
+        super()._do_execute_operation(operation)
 
         if not operation.skipped:
-            getattr(self, "_{}s".format(operation.job_type)).append(operation.package)
+            getattr(self, f"_{operation.job_type}s").append(operation.package)
 
-    def _execute_install(self, operation):
+    def _execute_install(self, operation: Operation) -> int:
         return 0
 
-    def _execute_update(self, operation):
+    def _execute_update(self, operation: Operation) -> int:
         return 0
 
-    def _execute_uninstall(self, operation):
+    def _execute_uninstall(self, operation: Operation) -> int:
         return 0
 
 
 class CustomInstalledRepository(InstalledRepository):
     @classmethod
-    def load(cls, env):
+    def load(
+        cls, env: Env, with_dependencies: bool = False
+    ) -> CustomInstalledRepository:
         return cls()
 
 
 class Locker(BaseLocker):
-    def __init__(self):
-        self._lock = TOMLFile(Path.cwd().joinpath("poetry.lock"))
+    def __init__(self, lock_path: str | Path) -> None:
+        self._lock = TOMLFile(Path(lock_path).joinpath("poetry.lock"))
         self._written_data = None
         self._locked = False
         self._content_hash = self._get_content_hash()
 
     @property
-    def written_data(self):
+    def written_data(self) -> dict | None:
         return self._written_data
 
-    def set_lock_path(self, lock):
+    def set_lock_path(self, lock: str | Path) -> Locker:
         self._lock = TOMLFile(Path(lock).joinpath("poetry.lock"))
 
         return self
 
-    def locked(self, is_locked=True):
+    def locked(self, is_locked: bool = True) -> Locker:
         self._locked = is_locked
 
         return self
 
-    def mock_lock_data(self, data):
+    def mock_lock_data(self, data: dict) -> None:
         self._lock_data = data
 
-    def is_locked(self):
+    def is_locked(self) -> bool:
         return self._locked
 
-    def is_fresh(self):
+    def is_fresh(self) -> bool:
         return True
 
-    def _get_content_hash(self):
+    def _get_content_hash(self) -> str:
         return "123456789"
 
-    def _write_lock_data(self, data):
+    def _write_lock_data(self, data: dict) -> None:
         for package in data["package"]:
             python_versions = str(package["python-versions"])
-            if PY2:
-                python_versions = python_versions.decode()
-                if "requirements" in package:
-                    requirements = {}
-                    for key, value in package["requirements"].items():
-                        requirements[key.decode()] = value.decode()
-
-                    package["requirements"] = requirements
-
             package["python-versions"] = python_versions
 
         self._written_data = json.loads(json.dumps(data))
@@ -129,7 +143,7 @@ def _write_lock_data(self, data):
 
 
 @pytest.fixture()
-def package():
+def package() -> ProjectPackage:
     p = ProjectPackage("root", "1.0")
     p.root_dir = Path.cwd()
 
@@ -137,12 +151,12 @@ def package():
 
 
 @pytest.fixture()
-def repo():
-    return Repository()
+def repo() -> Repository:
+    return Repository("repo")
 
 
 @pytest.fixture()
-def pool(repo):
+def pool(repo: Repository) -> Pool:
     pool = Pool()
     pool.add_repository(repo)
 
@@ -150,22 +164,29 @@ def pool(repo):
 
 
 @pytest.fixture()
-def installed():
+def installed() -> CustomInstalledRepository:
     return CustomInstalledRepository()
 
 
 @pytest.fixture()
-def locker():
-    return Locker()
+def locker(project_root: Path) -> Locker:
+    return Locker(lock_path=project_root)
 
 
 @pytest.fixture()
-def env():
+def env() -> NullEnv:
     return NullEnv()
 
 
 @pytest.fixture()
-def installer(package, pool, locker, env, installed, config):
+def installer(
+    package: ProjectPackage,
+    pool: Pool,
+    locker: Locker,
+    env: NullEnv,
+    installed: CustomInstalledRepository,
+    config: Config,
+) -> Installer:
     installer = Installer(
         NullIO(),
         env,
@@ -181,20 +202,22 @@ def installer(package, pool, locker, env, installed, config):
     return installer
 
 
-def fixture(name):
-    file = TOMLFile(Path(__file__).parent / "fixtures" / "{}.test".format(name))
+def fixture(name: str) -> dict:
+    file = TOMLFile(Path(__file__).parent / "fixtures" / f"{name}.test")
 
     return json.loads(json.dumps(file.read()))
 
 
-def test_run_no_dependencies(installer, locker):
+def test_run_no_dependencies(installer: Installer, locker: Locker):
     installer.run()
     expected = fixture("no-dependencies")
 
     assert locker.written_data == expected
 
 
-def test_run_with_dependencies(installer, locker, repo, package):
+def test_run_with_dependencies(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.1")
     repo.add_package(package_a)
@@ -210,7 +233,11 @@ def test_run_with_dependencies(installer, locker, repo, package):
 
 
 def test_run_update_after_removing_dependencies(
-    installer, locker, repo, package, installed
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -272,12 +299,22 @@ def test_run_update_after_removing_dependencies(
 
     assert locker.written_data == expected
 
-    assert 0 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 1 == installer.executor.removals_count
-
-
-def test_run_install_no_dev(installer, locker, repo, package, installed):
+    assert installer.executor.installations_count == 0
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 1
+
+
+def _configure_run_install_dev(
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+    with_optional_group: bool = False,
+    with_packages_installed: bool = False,
+) -> None:
+    """
+    Perform common test setup for `test_run_install_*dev*()` methods.
+    """
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -325,72 +362,367 @@ def test_run_install_no_dev(installer, locker, repo, package, installed):
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    installed.add_package(package_a)
-    installed.add_package(package_b)
-    installed.add_package(package_c)
+    if with_packages_installed:
+        installed.add_package(package_a)
+        installed.add_package(package_b)
+        installed.add_package(package_c)
 
     package.add_dependency(Factory.create_dependency("A", "~1.0"))
     package.add_dependency(Factory.create_dependency("B", "~1.1"))
-    package.add_dependency(Factory.create_dependency("C", "~1.2", category="dev"))
 
-    installer.dev_mode(False)
+    group = DependencyGroup("dev", optional=with_optional_group)
+    group.add_dependency(Factory.create_dependency("C", "~1.2", groups=["dev"]))
+    package.add_dependency_group(group)
+
+
+@pytest.mark.parametrize(
+    ("groups", "installs", "updates", "removals", "with_packages_installed"),
+    [
+        (None, 2, 0, 0, False),
+        (None, 0, 0, 1, True),
+        ([], 0, 0, 0, False),
+        ([], 0, 0, 3, True),
+        (["dev"], 1, 0, 0, False),
+        (["dev"], 0, 0, 2, True),
+        ([MAIN_GROUP], 2, 0, 0, False),
+        ([MAIN_GROUP], 0, 0, 1, True),
+        ([MAIN_GROUP, "dev"], 3, 0, 0, False),
+        ([MAIN_GROUP, "dev"], 0, 0, 0, True),
+    ],
+)
+def test_run_install_with_dependency_groups(
+    groups: list[str] | None,
+    installs: int,
+    updates: int,
+    removals: int,
+    with_packages_installed: bool,
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
+    _configure_run_install_dev(
+        locker,
+        repo,
+        package,
+        installed,
+        with_optional_group=True,
+        with_packages_installed=with_packages_installed,
+    )
+
+    if groups is not None:
+        installer.only_groups(groups)
+
+    installer.requires_synchronization(True)
     installer.run()
 
-    assert 0 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 1 == installer.executor.removals_count
+    assert installer.executor.installations_count == installs
+    assert installer.executor.updates_count == updates
+    assert installer.executor.removals_count == removals
+
 
+def test_run_install_does_not_remove_locked_packages_if_installed_but_not_required(
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
+    package_a = get_package("a", "1.0")
+    package_b = get_package("b", "1.1")
+    package_c = get_package("c", "1.2")
+
+    repo.add_package(package_a)
+    installed.add_package(package_a)
+    repo.add_package(package_b)
+    installed.add_package(package_b)
+    repo.add_package(package_c)
+    installed.add_package(package_c)
+
+    installed.add_package(package)  # Root package never removed.
+
+    package.add_dependency(Factory.create_dependency(package_a.name, package_a.version))
 
-def test_run_install_remove_untracked(installer, locker, repo, package, installed):
     locker.locked(True)
     locker.mock_lock_data(
         {
             "package": [
                 {
-                    "name": "a",
-                    "version": "1.0",
+                    "name": package_a.name,
+                    "version": package_a.version.text,
                     "category": "main",
                     "optional": False,
                     "platform": "*",
                     "python-versions": "*",
                     "checksum": [],
-                }
+                },
+                {
+                    "name": package_b.name,
+                    "version": package_b.version.text,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": package_c.name,
+                    "version": package_c.version.text,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
             ],
             "metadata": {
                 "python-versions": "*",
                 "platform": "*",
                 "content-hash": "123456789",
-                "hashes": {"a": []},
+                "hashes": {package_a.name: [], package_b.name: [], package_c.name: []},
             },
         }
     )
+
+    installer.run()
+
+    assert installer.executor.installations_count == 0
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
+
+
+def test_run_install_removes_locked_packages_if_installed_and_synchronization_is_required(  # noqa: E501
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
     package_a = get_package("a", "1.0")
     package_b = get_package("b", "1.1")
     package_c = get_package("c", "1.2")
-    package_pip = get_package("pip", "20.0.0")
+
     repo.add_package(package_a)
+    installed.add_package(package_a)
     repo.add_package(package_b)
+    installed.add_package(package_b)
     repo.add_package(package_c)
-    repo.add_package(package_pip)
+    installed.add_package(package_c)
+
+    installed.add_package(package)  # Root package never removed.
+
+    package.add_dependency(Factory.create_dependency(package_a.name, package_a.version))
+
+    locker.locked(True)
+    locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": package_a.name,
+                    "version": package_a.version.text,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": package_b.name,
+                    "version": package_b.version.text,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": package_c.name,
+                    "version": package_c.version.text,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {package_a.name: [], package_b.name: [], package_c.name: []},
+            },
+        }
+    )
+
+    installer.requires_synchronization(True)
+    installer.run()
+
+    assert installer.executor.installations_count == 0
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 2
+
 
+def test_run_install_removes_no_longer_locked_packages_if_installed(
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
+    package_a = get_package("a", "1.0")
+    package_b = get_package("b", "1.1")
+    package_c = get_package("c", "1.2")
+
+    repo.add_package(package_a)
     installed.add_package(package_a)
+    repo.add_package(package_b)
     installed.add_package(package_b)
+    repo.add_package(package_c)
     installed.add_package(package_c)
-    installed.add_package(package_pip)  # Always required and never removed.
+
     installed.add_package(package)  # Root package never removed.
 
-    package.add_dependency(Factory.create_dependency("A", "~1.0"))
+    package.add_dependency(Factory.create_dependency(package_a.name, package_a.version))
+
+    locker.locked(True)
+    locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": package_a.name,
+                    "version": package_a.version.text,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": package_b.name,
+                    "version": package_b.version.text,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+                {
+                    "name": package_c.name,
+                    "version": package_c.version.text,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                },
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {package_a.name: [], package_b.name: [], package_c.name: []},
+            },
+        }
+    )
+
+    installer.update(True)
+    installer.run()
+
+    assert installer.executor.installations_count == 0
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 2
+
+
+@pytest.mark.parametrize(
+    "managed_reserved_package_names",
+    itertools.chain(
+        [()],
+        itertools.permutations(RESERVED_PACKAGES, 1),
+        itertools.permutations(RESERVED_PACKAGES, 2),
+        [RESERVED_PACKAGES],
+    ),
+)
+def test_run_install_with_synchronization(
+    managed_reserved_package_names: tuple[str, ...],
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
+    package_a = get_package("a", "1.0")
+    package_b = get_package("b", "1.1")
+    package_c = get_package("c", "1.2")
+    package_pip = get_package("pip", "20.0.0")
+    package_setuptools = get_package("setuptools", "20.0.0")
+    package_wheel = get_package("wheel", "20.0.0")
+
+    all_packages = [
+        package_a,
+        package_b,
+        package_c,
+        package_pip,
+        package_setuptools,
+        package_wheel,
+    ]
+
+    managed_reserved_packages = [
+        pkg for pkg in all_packages if pkg.name in managed_reserved_package_names
+    ]
+    locked_packages = [package_a, *managed_reserved_packages]
+
+    for pkg in all_packages:
+        repo.add_package(pkg)
+        installed.add_package(pkg)
+
+    installed.add_package(package)  # Root package never removed.
+
+    package.add_dependency(Factory.create_dependency(package_a.name, package_a.version))
 
-    installer.dev_mode(True).remove_untracked(True)
+    locker.locked(True)
+    locker.mock_lock_data(
+        {
+            "package": [
+                {
+                    "name": pkg.name,
+                    "version": pkg.version,
+                    "category": "main",
+                    "optional": False,
+                    "platform": "*",
+                    "python-versions": "*",
+                    "checksum": [],
+                }
+                for pkg in locked_packages
+            ],
+            "metadata": {
+                "python-versions": "*",
+                "platform": "*",
+                "content-hash": "123456789",
+                "hashes": {pkg.name: [] for pkg in locked_packages},
+            },
+        }
+    )
+
+    installer.requires_synchronization(True)
     installer.run()
 
-    assert 0 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 2 == installer.executor.removals_count
-    assert {"b", "c"} == set(r.name for r in installer.executor.removals)
+    assert installer.executor.installations_count == 0
+    assert installer.executor.updates_count == 0
+    assert 2 + len(managed_reserved_packages) == installer.executor.removals_count
 
+    expected_removals = {
+        package_b.name,
+        package_c.name,
+        *managed_reserved_package_names,
+    }
 
-def test_run_whitelist_add(installer, locker, repo, package):
+    assert {r.name for r in installer.executor.removals} == expected_removals
+
+
+def test_run_whitelist_add(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -432,7 +764,13 @@ def test_run_whitelist_add(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_whitelist_remove(installer, locker, repo, package, installed):
+def test_run_whitelist_remove(
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -479,12 +817,14 @@ def test_run_whitelist_remove(installer, locker, repo, package, installed):
     expected = fixture("remove")
 
     assert locker.written_data == expected
-    assert 1 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 1 == installer.executor.removals_count
+    assert installer.executor.installations_count == 1
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 1
 
 
-def test_add_with_sub_dependencies(installer, locker, repo, package):
+def test_add_with_sub_dependencies(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.1")
     package_c = get_package("C", "1.2")
@@ -506,7 +846,9 @@ def test_add_with_sub_dependencies(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_with_python_versions(installer, locker, repo, package):
+def test_run_with_python_versions(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package.python_versions = "~2.7 || ^3.4"
 
     package_a = get_package("A", "1.0")
@@ -532,7 +874,7 @@ def test_run_with_python_versions(installer, locker, repo, package):
 
 
 def test_run_with_optional_and_python_restricted_dependencies(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     package.python_versions = "~2.7 || ^3.4"
 
@@ -568,13 +910,17 @@ def test_run_with_optional_and_python_restricted_dependencies(
     # We should only have 2 installs:
     # C,D since python version is not compatible
     # with B's python constraint and A is optional
-    assert 2 == installer.executor.installations_count
-    assert "d" == installer.executor.installations[0].name
-    assert "c" == installer.executor.installations[1].name
+    assert installer.executor.installations_count == 2
+    assert installer.executor.installations[0].name == "d"
+    assert installer.executor.installations[1].name == "c"
 
 
 def test_run_with_optional_and_platform_restricted_dependencies(
-    installer, locker, repo, package, mocker
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    mocker: MockerFixture,
 ):
     mocker.patch("sys.platform", "darwin")
 
@@ -610,12 +956,14 @@ def test_run_with_optional_and_platform_restricted_dependencies(
     # We should only have 2 installs:
     # C,D since the mocked python version is not compatible
     # with B's python constraint and A is optional
-    assert 2 == installer.executor.installations_count
-    assert "d" == installer.executor.installations[0].name
-    assert "c" == installer.executor.installations[1].name
+    assert installer.executor.installations_count == 2
+    assert installer.executor.installations[0].name == "d"
+    assert installer.executor.installations[1].name == "c"
 
 
-def test_run_with_dependencies_extras(installer, locker, repo, package):
+def test_run_with_dependencies_extras(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
     package_c = get_package("C", "1.0")
@@ -640,7 +988,9 @@ def test_run_with_dependencies_extras(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_with_dependencies_nested_extras(installer, locker, repo, package):
+def test_run_with_dependencies_nested_extras(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
     package_c = get_package("C", "1.0")
@@ -651,11 +1001,11 @@ def test_run_with_dependencies_nested_extras(installer, locker, repo, package):
     )
     dependency_a = Factory.create_dependency("A", {"version": "^1.0", "extras": ["B"]})
 
-    package_b.extras = {"C": [dependency_c]}
+    package_b.extras = {"c": [dependency_c]}
     package_b.add_dependency(dependency_c)
 
     package_a.add_dependency(dependency_b)
-    package_a.extras = {"B": [dependency_b]}
+    package_a.extras = {"b": [dependency_b]}
 
     repo.add_package(package_a)
     repo.add_package(package_b)
@@ -669,7 +1019,9 @@ def test_run_with_dependencies_nested_extras(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_does_not_install_extras_if_not_requested(installer, locker, repo, package):
+def test_run_does_not_install_extras_if_not_requested(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package.extras["foo"] = [get_dependency("D")]
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
@@ -695,10 +1047,12 @@ def test_run_does_not_install_extras_if_not_requested(installer, locker, repo, p
     assert locker.written_data == expected
 
     # But should not be installed
-    assert 3 == installer.executor.installations_count  # A, B, C
+    assert installer.executor.installations_count == 3  # A, B, C
 
 
-def test_run_installs_extras_if_requested(installer, locker, repo, package):
+def test_run_installs_extras_if_requested(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package.extras["foo"] = [get_dependency("D")]
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
@@ -725,10 +1079,12 @@ def test_run_installs_extras_if_requested(installer, locker, repo, package):
     assert locker.written_data == expected
 
     # But should not be installed
-    assert 4 == installer.executor.installations_count  # A, B, C, D
+    assert installer.executor.installations_count == 4  # A, B, C, D
 
 
-def test_run_installs_extras_with_deps_if_requested(installer, locker, repo, package):
+def test_run_installs_extras_with_deps_if_requested(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package.extras["foo"] = [get_dependency("C")]
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
@@ -756,11 +1112,11 @@ def test_run_installs_extras_with_deps_if_requested(installer, locker, repo, pac
     assert locker.written_data == expected
 
     # But should not be installed
-    assert 4 == installer.executor.installations_count  # A, B, C, D
+    assert installer.executor.installations_count == 4  # A, B, C, D
 
 
 def test_run_installs_extras_with_deps_if_requested_locked(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     locker.locked(True)
     locker.mock_lock_data(fixture("extras-with-dependencies"))
@@ -787,10 +1143,15 @@ def test_run_installs_extras_with_deps_if_requested_locked(
     installer.run()
 
     # But should not be installed
-    assert 4 == installer.executor.installations_count  # A, B, C, D
+    assert installer.executor.installations_count == 4  # A, B, C, D
 
 
-def test_installer_with_pypi_repository(package, locker, installed, config):
+def test_installer_with_pypi_repository(
+    package: ProjectPackage,
+    locker: Locker,
+    installed: CustomInstalledRepository,
+    config: Config,
+):
     pool = Pool()
     pool.add_repository(MockRepository())
 
@@ -798,17 +1159,30 @@ def test_installer_with_pypi_repository(package, locker, installed, config):
         NullIO(), NullEnv(), package, locker, pool, config, installed=installed
     )
 
-    package.add_dependency(Factory.create_dependency("pytest", "^3.5", category="dev"))
+    package.add_dependency(Factory.create_dependency("pytest", "^3.5", groups=["dev"]))
     installer.run()
 
     expected = fixture("with-pypi-repository")
 
-    assert locker.written_data == expected
+    assert expected == locker.written_data
 
 
-def test_run_installs_with_local_file(installer, locker, repo, package, fixture_dir):
+def test_run_installs_with_local_file(
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    fixture_dir: FixtureDirGetter,
+):
+    root_dir = Path(__file__).parent.parent.parent
+    package.root_dir = root_dir
+    locker.set_lock_path(root_dir)
     file_path = fixture_dir("distributions/demo-0.1.0-py2.py3-none-any.whl")
-    package.add_dependency(Factory.create_dependency("demo", {"file": str(file_path)}))
+    package.add_dependency(
+        Factory.create_dependency(
+            "demo", {"file": str(file_path.relative_to(root_dir))}, root_dir=root_dir
+        )
+    )
 
     repo.add_package(get_package("pendulum", "1.4.4"))
 
@@ -817,16 +1191,27 @@ def test_run_installs_with_local_file(installer, locker, repo, package, fixture_
     expected = fixture("with-file-dependency")
 
     assert locker.written_data == expected
-    assert 2 == installer.executor.installations_count
+    assert installer.executor.installations_count == 2
 
 
 def test_run_installs_wheel_with_no_requires_dist(
-    installer, locker, repo, package, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    fixture_dir: FixtureDirGetter,
 ):
+    root_dir = Path(__file__).parent.parent.parent
+    package.root_dir = root_dir
+    locker.set_lock_path(root_dir)
     file_path = fixture_dir(
         "wheel_with_no_requires_dist/demo-0.1.0-py2.py3-none-any.whl"
     )
-    package.add_dependency(Factory.create_dependency("demo", {"file": str(file_path)}))
+    package.add_dependency(
+        Factory.create_dependency(
+            "demo", {"file": str(file_path.relative_to(root_dir))}, root_dir=root_dir
+        )
+    )
 
     installer.run()
 
@@ -834,16 +1219,26 @@ def test_run_installs_wheel_with_no_requires_dist(
 
     assert locker.written_data == expected
 
-    assert 1 == installer.executor.installations_count
+    assert installer.executor.installations_count == 1
 
 
 def test_run_installs_with_local_poetry_directory_and_extras(
-    installer, locker, repo, package, tmpdir, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    tmpdir: Path,
+    fixture_dir: FixtureDirGetter,
 ):
+    root_dir = Path(__file__).parent.parent.parent
+    package.root_dir = root_dir
+    locker.set_lock_path(root_dir)
     file_path = fixture_dir("project_with_extras")
     package.add_dependency(
         Factory.create_dependency(
-            "project-with-extras", {"path": str(file_path), "extras": ["extras_a"]}
+            "project-with-extras",
+            {"path": str(file_path.relative_to(root_dir)), "extras": ["extras_a"]},
+            root_dir=root_dir,
         )
     )
 
@@ -854,11 +1249,16 @@ def test_run_installs_with_local_poetry_directory_and_extras(
     expected = fixture("with-directory-dependency-poetry")
     assert locker.written_data == expected
 
-    assert 2 == installer.executor.installations_count
+    assert installer.executor.installations_count == 2
 
 
 def test_run_installs_with_local_poetry_directory_transitive(
-    installer, locker, repo, package, tmpdir, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    tmpdir: Path,
+    fixture_dir: FixtureDirGetter,
 ):
     root_dir = fixture_dir("directory")
     package.root_dir = root_dir
@@ -881,11 +1281,16 @@ def test_run_installs_with_local_poetry_directory_transitive(
 
     assert locker.written_data == expected
 
-    assert 6 == installer.executor.installations_count
+    assert installer.executor.installations_count == 6
 
 
 def test_run_installs_with_local_poetry_file_transitive(
-    installer, locker, repo, package, tmpdir, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    tmpdir: str,
+    fixture_dir: FixtureDirGetter,
 ):
     root_dir = fixture_dir("directory")
     package.root_dir = root_dir
@@ -910,15 +1315,27 @@ def test_run_installs_with_local_poetry_file_transitive(
 
     assert locker.written_data == expected
 
-    assert 4 == installer.executor.installations_count
+    assert installer.executor.installations_count == 4
 
 
 def test_run_installs_with_local_setuptools_directory(
-    installer, locker, repo, package, tmpdir, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    tmpdir: Path,
+    fixture_dir: FixtureDirGetter,
 ):
+    root_dir = Path(__file__).parent.parent.parent
+    package.root_dir = root_dir
+    locker.set_lock_path(root_dir)
     file_path = fixture_dir("project_with_setup/")
     package.add_dependency(
-        Factory.create_dependency("project-with-setup", {"path": str(file_path)})
+        Factory.create_dependency(
+            "project-with-setup",
+            {"path": str(file_path.relative_to(root_dir))},
+            root_dir=root_dir,
+        )
     )
 
     repo.add_package(get_package("pendulum", "1.4.4"))
@@ -929,10 +1346,12 @@ def test_run_installs_with_local_setuptools_directory(
     expected = fixture("with-directory-dependency-setuptools")
 
     assert locker.written_data == expected
-    assert 3 == installer.executor.installations_count
+    assert installer.executor.installations_count == 3
 
 
-def test_run_with_prereleases(installer, locker, repo, package):
+def test_run_with_prereleases(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -974,7 +1393,9 @@ def test_run_with_prereleases(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_changes_category_if_needed(installer, locker, repo, package):
+def test_run_changes_category_if_needed(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -1005,7 +1426,7 @@ def test_run_changes_category_if_needed(installer, locker, repo, package):
 
     package.add_dependency(
         Factory.create_dependency(
-            "A", {"version": "^1.0", "optional": True}, category="dev"
+            "A", {"version": "^1.0", "optional": True}, groups=["dev"]
         )
     )
     package.add_dependency(Factory.create_dependency("B", "^1.1"))
@@ -1019,7 +1440,9 @@ def test_run_changes_category_if_needed(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_update_all_with_lock(installer, locker, repo, package):
+def test_run_update_all_with_lock(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -1056,7 +1479,9 @@ def test_run_update_all_with_lock(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_update_with_locked_extras(installer, locker, repo, package):
+def test_run_update_with_locked_extras(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -1105,8 +1530,8 @@ def test_run_update_with_locked_extras(installer, locker, repo, package):
     b_dependency.in_extras.append("foo")
     c_dependency = get_dependency("C", "^1.0")
     c_dependency.python_versions = "~2.7"
-    package_a.requires.append(b_dependency)
-    package_a.requires.append(c_dependency)
+    package_a.add_dependency(b_dependency)
+    package_a.add_dependency(c_dependency)
 
     repo.add_package(package_a)
     repo.add_package(get_package("B", "1.0"))
@@ -1128,7 +1553,7 @@ def test_run_update_with_locked_extras(installer, locker, repo, package):
 
 
 def test_run_install_duplicate_dependencies_different_constraints(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
@@ -1161,17 +1586,17 @@ def test_run_install_duplicate_dependencies_different_constraints(
     assert locker.written_data == expected
 
     installs = installer.executor.installations
-    assert 3 == installer.executor.installations_count
+    assert installer.executor.installations_count == 3
     assert installs[0] == package_c12
     assert installs[1] == package_b10
     assert installs[2] == package_a
 
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
 
 def test_run_install_duplicate_dependencies_different_constraints_with_lock(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1272,13 +1697,17 @@ def test_run_install_duplicate_dependencies_different_constraints_with_lock(
 
     assert locker.written_data == expected
 
-    assert 3 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 3
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
 
 def test_run_update_uninstalls_after_removal_transient_dependency(
-    installer, locker, repo, package, installed
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1330,13 +1759,17 @@ def test_run_update_uninstalls_after_removal_transient_dependency(
     installer.update(True)
     installer.run()
 
-    assert 0 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 1 == installer.executor.removals_count
+    assert installer.executor.installations_count == 0
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 1
 
 
 def test_run_install_duplicate_dependencies_different_constraints_with_lock_update(
-    installer, locker, repo, package, installed
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1435,16 +1868,20 @@ def test_run_install_duplicate_dependencies_different_constraints_with_lock_upda
 
     assert locker.written_data == expected
 
-    assert 2 == installer.executor.installations_count
-    assert 1 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 2
+    assert installer.executor.updates_count == 1
+    assert installer.executor.removals_count == 0
 
 
 @pytest.mark.skip(
     "This is not working at the moment due to limitations in the resolver"
 )
-def test_installer_test_solver_finds_compatible_package_for_dependency_python_not_fully_compatible_with_package_python(
-    installer, locker, repo, package, installed
+def test_installer_test_solver_finds_compatible_package_for_dependency_python_not_fully_compatible_with_package_python(  # noqa: E501
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
 ):
     package.python_versions = "~2.7 || ^3.4"
     package.add_dependency(
@@ -1464,15 +1901,18 @@ def test_installer_test_solver_finds_compatible_package_for_dependency_python_no
 
     expected = fixture("with-conditional-dependency")
     assert locker.written_data == expected
-
-    if sys.version_info >= (3, 5, 0):
-        assert 1 == installer.executor.installations_count
-    else:
-        assert 0 == installer.executor.installations_count
-
-
-def test_installer_required_extras_should_not_be_removed_when_updating_single_dependency(
-    installer, locker, repo, package, installed, env, pool, config
+    assert installer.executor.installations_count == 1
+
+
+def test_installer_required_extras_should_not_be_removed_when_updating_single_dependency(  # noqa: E501
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+    env: NullEnv,
+    pool: Pool,
+    config: Config,
 ):
     package.add_dependency(Factory.create_dependency("A", {"version": "^1.0"}))
 
@@ -1498,9 +1938,9 @@ def test_installer_required_extras_should_not_be_removed_when_updating_single_de
     installer.update(True)
     installer.run()
 
-    assert 3 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 3
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
     package.add_dependency(Factory.create_dependency("D", "^1.0"))
     locker.locked(True)
@@ -1526,13 +1966,19 @@ def test_installer_required_extras_should_not_be_removed_when_updating_single_de
     installer.whitelist(["D"])
     installer.run()
 
-    assert 1 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 1
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
 
-def test_installer_required_extras_should_not_be_removed_when_updating_single_dependency_pypi_repository(
-    locker, repo, package, installed, env, mocker, config
+def test_installer_required_extras_should_not_be_removed_when_updating_single_dependency_pypi_repository(  # noqa: E501
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+    env: NullEnv,
+    mocker: MockerFixture,
+    config: Config,
 ):
     mocker.patch("sys.platform", "darwin")
 
@@ -1556,9 +2002,9 @@ def test_installer_required_extras_should_not_be_removed_when_updating_single_de
     installer.update(True)
     installer.run()
 
-    assert 3 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 3
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
     package.add_dependency(Factory.create_dependency("pytest", "^3.5"))
 
@@ -1584,13 +2030,18 @@ def test_installer_required_extras_should_not_be_removed_when_updating_single_de
     installer.whitelist(["pytest"])
     installer.run()
 
-    assert (6 if not PY2 else 7) == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 7
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
 
 def test_installer_required_extras_should_be_installed(
-    locker, repo, package, installed, env, config
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+    env: NullEnv,
+    config: Config,
 ):
     pool = Pool()
     pool.add_repository(MockRepository())
@@ -1616,9 +2067,9 @@ def test_installer_required_extras_should_be_installed(
     installer.update(True)
     installer.run()
 
-    assert 2 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 2
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
     locker.locked(True)
     locker.mock_lock_data(locker.written_data)
@@ -1638,13 +2089,13 @@ def test_installer_required_extras_should_be_installed(
     installer.update(True)
     installer.run()
 
-    assert 2 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 2
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
 
 def test_update_multiple_times_with_split_dependencies_is_idempotent(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1705,25 +2156,29 @@ def test_update_multiple_times_with_split_dependencies_is_idempotent(
     installer.update(True)
     installer.run()
 
-    assert expected == locker.written_data
+    assert locker.written_data == expected
 
     locker.mock_lock_data(locker.written_data)
 
     installer.update(True)
     installer.run()
 
-    assert expected == locker.written_data
+    assert locker.written_data == expected
 
     locker.mock_lock_data(locker.written_data)
 
     installer.update(True)
     installer.run()
 
-    assert expected == locker.written_data
+    assert locker.written_data == expected
 
 
 def test_installer_can_install_dependencies_from_forced_source(
-    locker, package, installed, env, config
+    locker: Locker,
+    package: Package,
+    installed: CustomInstalledRepository,
+    env: NullEnv,
+    config: Config,
 ):
     package.python_versions = "^3.7"
     package.add_dependency(
@@ -1749,12 +2204,14 @@ def test_installer_can_install_dependencies_from_forced_source(
     installer.update(True)
     installer.run()
 
-    assert 1 == installer.executor.installations_count
-    assert 0 == installer.executor.updates_count
-    assert 0 == installer.executor.removals_count
+    assert installer.executor.installations_count == 1
+    assert installer.executor.updates_count == 0
+    assert installer.executor.removals_count == 0
 
 
-def test_run_installs_with_url_file(installer, locker, repo, package):
+def test_run_installs_with_url_file(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     url = "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
     package.add_dependency(Factory.create_dependency("demo", {"url": url}))
 
@@ -1766,11 +2223,61 @@ def test_run_installs_with_url_file(installer, locker, repo, package):
 
     assert locker.written_data == expected
 
-    assert 2 == installer.executor.installations_count
+    assert installer.executor.installations_count == 2
+
+
+@pytest.mark.parametrize("env_platform", ["linux", "win32"])
+def test_run_installs_with_same_version_url_files(
+    pool: Pool,
+    locker: Locker,
+    installed: CustomInstalledRepository,
+    config: Config,
+    repo: Repository,
+    package: ProjectPackage,
+    env_platform: str,
+) -> None:
+    urls = {
+        "linux": "https://python-poetry.org/distributions/demo-0.1.0.tar.gz",
+        "win32": (
+            "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
+        ),
+    }
+    for platform, url in urls.items():
+        package.add_dependency(
+            Factory.create_dependency(
+                "demo",
+                {"url": url, "markers": f"sys_platform == '{platform}'"},
+            )
+        )
+    repo.add_package(get_package("pendulum", "1.4.4"))
+
+    installer = Installer(
+        NullIO(),
+        MockEnv(platform=env_platform),
+        package,
+        locker,
+        pool,
+        config,
+        installed=installed,
+        executor=Executor(
+            MockEnv(platform=env_platform),
+            pool,
+            config,
+            NullIO(),
+        ),
+    )
+    installer.use_executor(True)
+    installer.run()
+
+    expected = fixture("with-same-version-url-dependencies")
+    assert locker.written_data == expected
+    assert installer.executor.installations_count == 2
+    demo_package = next(p for p in installer.executor.installations if p.name == "demo")
+    assert demo_package.source_url == urls[env_platform]
 
 
 def test_installer_uses_prereleases_if_they_are_compatible(
-    installer, locker, package, repo
+    installer: Installer, locker: Locker, package: ProjectPackage, repo: Repository
 ):
     package.python_versions = "~2.7 || ^3.4"
     package.add_dependency(
@@ -1796,16 +2303,20 @@ def test_installer_uses_prereleases_if_they_are_compatible(
     installer.update(True)
     installer.run()
 
-    assert 2 == installer.executor.installations_count
+    assert installer.executor.installations_count == 2
 
 
 def test_installer_can_handle_old_lock_files(
-    installer, locker, package, repo, installed, config
+    locker: Locker,
+    package: ProjectPackage,
+    repo: Repository,
+    installed: CustomInstalledRepository,
+    config: Config,
 ):
     pool = Pool()
     pool.add_repository(MockRepository())
 
-    package.add_dependency(Factory.create_dependency("pytest", "^3.5", category="dev"))
+    package.add_dependency(Factory.create_dependency("pytest", "^3.5", groups=["dev"]))
 
     locker.locked()
     locker.mock_lock_data(fixture("old-lock"))
@@ -1824,7 +2335,7 @@ def test_installer_can_handle_old_lock_files(
 
     installer.run()
 
-    assert 6 == installer.executor.installations_count
+    assert installer.executor.installations_count == 6
 
     installer = Installer(
         NullIO(),
@@ -1834,14 +2345,19 @@ def test_installer_can_handle_old_lock_files(
         pool,
         config,
         installed=installed,
-        executor=Executor(MockEnv(version_info=(2, 7, 18)), pool, config, NullIO(),),
+        executor=Executor(
+            MockEnv(version_info=(2, 7, 18)),
+            pool,
+            config,
+            NullIO(),
+        ),
     )
     installer.use_executor()
 
     installer.run()
 
     # funcsigs will be added
-    assert 7 == installer.executor.installations_count
+    assert installer.executor.installations_count == 7
 
     installer = Installer(
         NullIO(),
@@ -1852,7 +2368,10 @@ def test_installer_can_handle_old_lock_files(
         config,
         installed=installed,
         executor=Executor(
-            MockEnv(version_info=(2, 7, 18), platform="win32"), pool, config, NullIO(),
+            MockEnv(version_info=(2, 7, 18), platform="win32"),
+            pool,
+            config,
+            NullIO(),
         ),
     )
     installer.use_executor()
@@ -1860,11 +2379,42 @@ def test_installer_can_handle_old_lock_files(
     installer.run()
 
     # colorama will be added
-    assert 8 == installer.executor.installations_count
+    assert installer.executor.installations_count == 8
+
+
+@pytest.mark.parametrize("quiet", [True, False])
+def test_run_with_dependencies_quiet(
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    quiet: bool,
+):
+    package_a = get_package("A", "1.0")
+    package_b = get_package("B", "1.1")
+    repo.add_package(package_a)
+    repo.add_package(package_b)
+
+    installer._io = IO(Input(), BufferedOutput(), BufferedOutput())
+    installer._io.set_verbosity(Verbosity.QUIET if quiet else Verbosity.NORMAL)
+
+    package.add_dependency(Factory.create_dependency("A", "~1.0"))
+    package.add_dependency(Factory.create_dependency("B", "^1.0"))
+
+    installer.run()
+    expected = fixture("with-dependencies")
+
+    assert locker.written_data == expected
+
+    installer._io.output._buffer.seek(0)
+    if quiet:
+        assert installer._io.output._buffer.read() == ""
+    else:
+        assert installer._io.output._buffer.read() != ""
 
 
 def test_installer_should_use_the_locked_version_of_git_dependencies(
-    installer, locker, package, repo
+    installer: Installer, locker: Locker, package: ProjectPackage, repo: Repository
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1924,3 +2474,83 @@ def test_installer_should_use_the_locked_version_of_git_dependencies(
         source_reference="master",
         source_resolved_reference="123456",
     )
+
+
+@pytest.mark.parametrize("is_locked", [False, True])
+def test_installer_should_use_the_locked_version_of_git_dependencies_with_extras(
+    installer: Installer,
+    locker: Locker,
+    package: ProjectPackage,
+    repo: Repository,
+    is_locked: bool,
+):
+    if is_locked:
+        locker.locked(True)
+        locker.mock_lock_data(fixture("with-vcs-dependency-with-extras"))
+        expected_reference = "123456"
+    else:
+        expected_reference = MOCK_DEFAULT_GIT_REVISION
+
+    package.add_dependency(
+        Factory.create_dependency(
+            "demo",
+            {
+                "git": "https://github.com/demo/demo.git",
+                "branch": "master",
+                "extras": ["foo"],
+            },
+        )
+    )
+
+    repo.add_package(get_package("pendulum", "1.4.4"))
+    repo.add_package(get_package("cleo", "1.0.0"))
+
+    installer.run()
+
+    assert len(installer.executor.installations) == 3
+    assert installer.executor.installations[-1] == Package(
+        "demo",
+        "0.1.2",
+        source_type="git",
+        source_url="https://github.com/demo/demo.git",
+        source_reference="master",
+        source_resolved_reference=expected_reference,
+    )
+
+
+@pytest.mark.parametrize("is_locked", [False, True])
+def test_installer_should_use_the_locked_version_of_git_dependencies_without_reference(
+    installer: Installer,
+    locker: Locker,
+    package: ProjectPackage,
+    repo: Repository,
+    is_locked: bool,
+):
+    """
+    If there is no explicit reference (branch or tag or rev) in pyproject.toml,
+    HEAD is used.
+    """
+    if is_locked:
+        locker.locked(True)
+        locker.mock_lock_data(fixture("with-vcs-dependency-without-ref"))
+        expected_reference = "123456"
+    else:
+        expected_reference = MOCK_DEFAULT_GIT_REVISION
+
+    package.add_dependency(
+        Factory.create_dependency("demo", {"git": "https://github.com/demo/demo.git"})
+    )
+
+    repo.add_package(get_package("pendulum", "1.4.4"))
+
+    installer.run()
+
+    assert len(installer.executor.installations) == 2
+    assert installer.executor.installations[-1] == Package(
+        "demo",
+        "0.1.2",
+        source_type="git",
+        source_url="https://github.com/demo/demo.git",
+        source_reference="HEAD",
+        source_resolved_reference=expected_reference,
+    )
diff --git a/vendor/poetry/tests/installation/test_installer_old.py b/vendor/poetry/tests/installation/test_installer_old.py
index b92bdce4..baa59649 100644
--- a/vendor/poetry/tests/installation/test_installer_old.py
+++ b/vendor/poetry/tests/installation/test_installer_old.py
@@ -1,13 +1,16 @@
-from __future__ import unicode_literals
+from __future__ import annotations
 
-import sys
+import itertools
 
-import pytest
+from pathlib import Path
+from typing import TYPE_CHECKING
 
-from clikit.io import NullIO
+import pytest
 
-from poetry.core.packages import ProjectPackage
+from cleo.io.null_io import NullIO
+from poetry.core.packages.project_package import ProjectPackage
 from poetry.core.toml.file import TOMLFile
+
 from poetry.factory import Factory
 from poetry.installation import Installer as BaseInstaller
 from poetry.installation.noop_installer import NoopInstaller
@@ -15,8 +18,6 @@
 from poetry.repositories import Pool
 from poetry.repositories import Repository
 from poetry.repositories.installed_repository import InstalledRepository
-from poetry.utils._compat import PY2
-from poetry.utils._compat import Path
 from poetry.utils.env import MockEnv
 from poetry.utils.env import NullEnv
 from tests.helpers import get_dependency
@@ -27,62 +28,66 @@
 from tests.repositories.test_pypi_repository import MockRepository
 
 
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+    from poetry.utils.env import Env
+    from tests.conftest import Config
+    from tests.types import FixtureDirGetter
+
+
+RESERVED_PACKAGES = ("pip", "setuptools", "wheel")
+
+
 class Installer(BaseInstaller):
-    def _get_installer(self):
+    def _get_installer(self) -> NoopInstaller:
         return NoopInstaller()
 
 
 class CustomInstalledRepository(InstalledRepository):
     @classmethod
-    def load(cls, env):
+    def load(
+        cls, env: Env, with_dependencies: bool = False
+    ) -> CustomInstalledRepository:
         return cls()
 
 
 class Locker(BaseLocker):
-    def __init__(self):
-        self._lock = TOMLFile(Path.cwd().joinpath("poetry.lock"))
+    def __init__(self, lock_path: str | Path) -> None:
+        self._lock = TOMLFile(Path(lock_path).joinpath("poetry.lock"))
         self._written_data = None
         self._locked = False
         self._content_hash = self._get_content_hash()
 
     @property
-    def written_data(self):
+    def written_data(self) -> dict | None:
         return self._written_data
 
-    def set_lock_path(self, lock):
+    def set_lock_path(self, lock: str | Path) -> Locker:
         self._lock = TOMLFile(Path(lock).joinpath("poetry.lock"))
 
         return self
 
-    def locked(self, is_locked=True):
+    def locked(self, is_locked: bool = True) -> Locker:
         self._locked = is_locked
 
         return self
 
-    def mock_lock_data(self, data):
+    def mock_lock_data(self, data: dict) -> None:
         self._lock_data = data
 
-    def is_locked(self):
+    def is_locked(self) -> bool:
         return self._locked
 
-    def is_fresh(self):
+    def is_fresh(self) -> bool:
         return True
 
-    def _get_content_hash(self):
+    def _get_content_hash(self) -> str:
         return "123456789"
 
-    def _write_lock_data(self, data):
+    def _write_lock_data(self, data: dict) -> None:
         for package in data["package"]:
             python_versions = str(package["python-versions"])
-            if PY2:
-                python_versions = python_versions.decode()
-                if "requirements" in package:
-                    requirements = {}
-                    for key, value in package["requirements"].items():
-                        requirements[key.decode()] = value.decode()
-
-                    package["requirements"] = requirements
-
             package["python-versions"] = python_versions
 
         self._written_data = data
@@ -90,7 +95,7 @@ def _write_lock_data(self, data):
 
 
 @pytest.fixture()
-def package():
+def package() -> ProjectPackage:
     p = ProjectPackage("root", "1.0")
     p.root_dir = Path.cwd()
 
@@ -98,12 +103,12 @@ def package():
 
 
 @pytest.fixture()
-def repo():
-    return Repository()
+def repo() -> Repository:
+    return Repository("repo")
 
 
 @pytest.fixture()
-def pool(repo):
+def pool(repo: Repository) -> Pool:
     pool = Pool()
     pool.add_repository(repo)
 
@@ -111,39 +116,48 @@ def pool(repo):
 
 
 @pytest.fixture()
-def installed():
+def installed() -> CustomInstalledRepository:
     return CustomInstalledRepository()
 
 
 @pytest.fixture()
-def locker():
-    return Locker()
+def locker(project_root: Path) -> Locker:
+    return Locker(lock_path=project_root)
 
 
 @pytest.fixture()
-def env():
+def env() -> NullEnv:
     return NullEnv()
 
 
 @pytest.fixture()
-def installer(package, pool, locker, env, installed, config):
+def installer(
+    package: ProjectPackage,
+    pool: Pool,
+    locker: Locker,
+    env: NullEnv,
+    installed: CustomInstalledRepository,
+    config: Config,
+):
     return Installer(NullIO(), env, package, locker, pool, config, installed=installed)
 
 
-def fixture(name):
-    file = TOMLFile(Path(__file__).parent / "fixtures" / "{}.test".format(name))
+def fixture(name: str) -> str:
+    file = TOMLFile(Path(__file__).parent / "fixtures" / f"{name}.test")
 
     return file.read()
 
 
-def test_run_no_dependencies(installer, locker):
+def test_run_no_dependencies(installer: Installer, locker: Locker):
     installer.run()
     expected = fixture("no-dependencies")
 
     assert locker.written_data == expected
 
 
-def test_run_with_dependencies(installer, locker, repo, package):
+def test_run_with_dependencies(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.1")
     repo.add_package(package_a)
@@ -159,7 +173,11 @@ def test_run_with_dependencies(installer, locker, repo, package):
 
 
 def test_run_update_after_removing_dependencies(
-    installer, locker, repo, package, installed
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -231,7 +249,13 @@ def test_run_update_after_removing_dependencies(
     assert len(removals) == 1
 
 
-def test_run_install_no_dev(installer, locker, repo, package, installed):
+def test_run_install_no_group(
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -285,9 +309,9 @@ def test_run_install_no_dev(installer, locker, repo, package, installed):
 
     package.add_dependency(Factory.create_dependency("A", "~1.0"))
     package.add_dependency(Factory.create_dependency("B", "~1.1"))
-    package.add_dependency(Factory.create_dependency("C", "~1.2", category="dev"))
+    package.add_dependency(Factory.create_dependency("C", "~1.2", groups=["dev"]))
 
-    installer.dev_mode(False)
+    installer.only_groups([])
     installer.run()
 
     installs = installer.installer.installs
@@ -297,50 +321,80 @@ def test_run_install_no_dev(installer, locker, repo, package, installed):
     assert len(updates) == 0
 
     removals = installer.installer.removals
-    assert len(removals) == 1
+    assert len(removals) == 0
 
 
-def test_run_install_remove_untracked(installer, locker, repo, package, installed):
+@pytest.mark.parametrize(
+    "managed_reserved_package_names",
+    itertools.chain(
+        [()],
+        itertools.permutations(RESERVED_PACKAGES, 1),
+        itertools.permutations(RESERVED_PACKAGES, 2),
+        [RESERVED_PACKAGES],
+    ),
+)
+def test_run_install_with_synchronization(
+    managed_reserved_package_names: tuple[str, ...],
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
+    package_a = get_package("a", "1.0")
+    package_b = get_package("b", "1.1")
+    package_c = get_package("c", "1.2")
+    package_pip = get_package("pip", "20.0.0")
+    package_setuptools = get_package("setuptools", "20.0.0")
+    package_wheel = get_package("wheel", "20.0.0")
+
+    all_packages = [
+        package_a,
+        package_b,
+        package_c,
+        package_pip,
+        package_setuptools,
+        package_wheel,
+    ]
+
+    managed_reserved_packages = [
+        pkg for pkg in all_packages if pkg.name in managed_reserved_package_names
+    ]
+    locked_packages = [package_a, *managed_reserved_packages]
+
+    for pkg in all_packages:
+        repo.add_package(pkg)
+        installed.add_package(pkg)
+
+    installed.add_package(package)  # Root package never removed.
+
+    package.add_dependency(Factory.create_dependency(package_a.name, package_a.version))
+
     locker.locked(True)
     locker.mock_lock_data(
         {
             "package": [
                 {
-                    "name": "a",
-                    "version": "1.0",
+                    "name": pkg.name,
+                    "version": pkg.version,
                     "category": "main",
                     "optional": False,
                     "platform": "*",
                     "python-versions": "*",
                     "checksum": [],
                 }
+                for pkg in locked_packages
             ],
             "metadata": {
                 "python-versions": "*",
                 "platform": "*",
                 "content-hash": "123456789",
-                "hashes": {"a": []},
+                "hashes": {pkg.name: [] for pkg in locked_packages},
             },
         }
     )
-    package_a = get_package("a", "1.0")
-    package_b = get_package("b", "1.1")
-    package_c = get_package("c", "1.2")
-    package_pip = get_package("pip", "20.0.0")
-    repo.add_package(package_a)
-    repo.add_package(package_b)
-    repo.add_package(package_c)
-    repo.add_package(package_pip)
 
-    installed.add_package(package_a)
-    installed.add_package(package_b)
-    installed.add_package(package_c)
-    installed.add_package(package_pip)  # Always required and never removed.
-    installed.add_package(package)  # Root package never removed.
-
-    package.add_dependency(Factory.create_dependency("A", "~1.0"))
-
-    installer.dev_mode(True).remove_untracked(True)
+    installer.requires_synchronization(True)
     installer.run()
 
     installs = installer.installer.installs
@@ -350,10 +404,18 @@ def test_run_install_remove_untracked(installer, locker, repo, package, installe
     assert len(updates) == 0
 
     removals = installer.installer.removals
-    assert set(r.name for r in removals) == {"b", "c"}
+
+    expected_removals = {
+        package_b.name,
+        package_c.name,
+        *managed_reserved_package_names,
+    }
+    assert {r.name for r in removals} == expected_removals
 
 
-def test_run_whitelist_add(installer, locker, repo, package):
+def test_run_whitelist_add(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -395,7 +457,13 @@ def test_run_whitelist_add(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_whitelist_remove(installer, locker, repo, package, installed):
+def test_run_whitelist_remove(
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -447,7 +515,9 @@ def test_run_whitelist_remove(installer, locker, repo, package, installed):
     assert len(installer.installer.removals) == 1
 
 
-def test_add_with_sub_dependencies(installer, locker, repo, package):
+def test_add_with_sub_dependencies(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.1")
     package_c = get_package("C", "1.2")
@@ -469,7 +539,9 @@ def test_add_with_sub_dependencies(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_with_python_versions(installer, locker, repo, package):
+def test_run_with_python_versions(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package.python_versions = "~2.7 || ^3.4"
 
     package_a = get_package("A", "1.0")
@@ -495,7 +567,7 @@ def test_run_with_python_versions(installer, locker, repo, package):
 
 
 def test_run_with_optional_and_python_restricted_dependencies(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     package.python_versions = "~2.7 || ^3.4"
 
@@ -538,7 +610,11 @@ def test_run_with_optional_and_python_restricted_dependencies(
 
 
 def test_run_with_optional_and_platform_restricted_dependencies(
-    installer, locker, repo, package, mocker
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    mocker: MockerFixture,
 ):
     mocker.patch("sys.platform", "darwin")
 
@@ -580,7 +656,9 @@ def test_run_with_optional_and_platform_restricted_dependencies(
     assert installer.installs[1].name == "c"
 
 
-def test_run_with_dependencies_extras(installer, locker, repo, package):
+def test_run_with_dependencies_extras(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
     package_c = get_package("C", "1.0")
@@ -605,7 +683,9 @@ def test_run_with_dependencies_extras(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_does_not_install_extras_if_not_requested(installer, locker, repo, package):
+def test_run_does_not_install_extras_if_not_requested(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package.extras["foo"] = [get_dependency("D")]
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
@@ -635,7 +715,9 @@ def test_run_does_not_install_extras_if_not_requested(installer, locker, repo, p
     assert len(installer.installs) == 3  # A, B, C
 
 
-def test_run_installs_extras_if_requested(installer, locker, repo, package):
+def test_run_installs_extras_if_requested(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package.extras["foo"] = [get_dependency("D")]
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
@@ -666,7 +748,9 @@ def test_run_installs_extras_if_requested(installer, locker, repo, package):
     assert len(installer.installs) == 4  # A, B, C, D
 
 
-def test_run_installs_extras_with_deps_if_requested(installer, locker, repo, package):
+def test_run_installs_extras_with_deps_if_requested(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     package.extras["foo"] = [get_dependency("C")]
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
@@ -699,7 +783,7 @@ def test_run_installs_extras_with_deps_if_requested(installer, locker, repo, pac
 
 
 def test_run_installs_extras_with_deps_if_requested_locked(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     locker.locked(True)
     locker.mock_lock_data(fixture("extras-with-dependencies"))
@@ -730,7 +814,12 @@ def test_run_installs_extras_with_deps_if_requested_locked(
     assert len(installer.installs) == 4  # A, B, C, D
 
 
-def test_installer_with_pypi_repository(package, locker, installed, config):
+def test_installer_with_pypi_repository(
+    package: ProjectPackage,
+    locker: Locker,
+    installed: CustomInstalledRepository,
+    config: Config,
+):
     pool = Pool()
     pool.add_repository(MockRepository())
 
@@ -738,15 +827,21 @@ def test_installer_with_pypi_repository(package, locker, installed, config):
         NullIO(), NullEnv(), package, locker, pool, config, installed=installed
     )
 
-    package.add_dependency(Factory.create_dependency("pytest", "^3.5", category="dev"))
+    package.add_dependency(Factory.create_dependency("pytest", "^3.5", groups=["dev"]))
     installer.run()
 
     expected = fixture("with-pypi-repository")
 
-    assert locker.written_data == expected
+    assert expected == locker.written_data
 
 
-def test_run_installs_with_local_file(installer, locker, repo, package, fixture_dir):
+def test_run_installs_with_local_file(
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    fixture_dir: FixtureDirGetter,
+):
     file_path = fixture_dir("distributions/demo-0.1.0-py2.py3-none-any.whl")
     package.add_dependency(Factory.create_dependency("demo", {"file": str(file_path)}))
 
@@ -762,7 +857,11 @@ def test_run_installs_with_local_file(installer, locker, repo, package, fixture_
 
 
 def test_run_installs_wheel_with_no_requires_dist(
-    installer, locker, repo, package, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    fixture_dir: FixtureDirGetter,
 ):
     file_path = fixture_dir(
         "wheel_with_no_requires_dist/demo-0.1.0-py2.py3-none-any.whl"
@@ -779,7 +878,12 @@ def test_run_installs_wheel_with_no_requires_dist(
 
 
 def test_run_installs_with_local_poetry_directory_and_extras(
-    installer, locker, repo, package, tmpdir, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    tmpdir: Path,
+    fixture_dir: FixtureDirGetter,
 ):
     file_path = fixture_dir("project_with_extras")
     package.add_dependency(
@@ -800,7 +904,12 @@ def test_run_installs_with_local_poetry_directory_and_extras(
 
 
 def test_run_installs_with_local_poetry_directory_transitive(
-    installer, locker, repo, package, tmpdir, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    tmpdir: Path,
+    fixture_dir: FixtureDirGetter,
 ):
     root_dir = fixture_dir("directory")
     package.root_dir = root_dir
@@ -827,7 +936,12 @@ def test_run_installs_with_local_poetry_directory_transitive(
 
 
 def test_run_installs_with_local_poetry_file_transitive(
-    installer, locker, repo, package, tmpdir, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    tmpdir: Path,
+    fixture_dir: FixtureDirGetter,
 ):
     root_dir = fixture_dir("directory")
     package.root_dir = root_dir
@@ -854,7 +968,12 @@ def test_run_installs_with_local_poetry_file_transitive(
 
 
 def test_run_installs_with_local_setuptools_directory(
-    installer, locker, repo, package, tmpdir, fixture_dir
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    tmpdir: Path,
+    fixture_dir: FixtureDirGetter,
 ):
     file_path = fixture_dir("project_with_setup/")
     package.add_dependency(
@@ -873,7 +992,9 @@ def test_run_installs_with_local_setuptools_directory(
     assert len(installer.installer.installs) == 3
 
 
-def test_run_with_prereleases(installer, locker, repo, package):
+def test_run_with_prereleases(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -915,7 +1036,9 @@ def test_run_with_prereleases(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_changes_category_if_needed(installer, locker, repo, package):
+def test_run_changes_category_if_needed(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -946,7 +1069,7 @@ def test_run_changes_category_if_needed(installer, locker, repo, package):
 
     package.add_dependency(
         Factory.create_dependency(
-            "A", {"version": "^1.0", "optional": True}, category="dev"
+            "A", {"version": "^1.0", "optional": True}, groups=["dev"]
         )
     )
     package.add_dependency(Factory.create_dependency("B", "^1.1"))
@@ -960,7 +1083,9 @@ def test_run_changes_category_if_needed(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_update_all_with_lock(installer, locker, repo, package):
+def test_run_update_all_with_lock(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -997,7 +1122,9 @@ def test_run_update_all_with_lock(installer, locker, repo, package):
     assert locker.written_data == expected
 
 
-def test_run_update_with_locked_extras(installer, locker, repo, package):
+def test_run_update_with_locked_extras(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     locker.locked(True)
     locker.mock_lock_data(
         {
@@ -1046,8 +1173,8 @@ def test_run_update_with_locked_extras(installer, locker, repo, package):
     b_dependency.in_extras.append("foo")
     c_dependency = get_dependency("C", "^1.0")
     c_dependency.python_versions = "~2.7"
-    package_a.requires.append(b_dependency)
-    package_a.requires.append(c_dependency)
+    package_a.add_dependency(b_dependency)
+    package_a.add_dependency(c_dependency)
 
     repo.add_package(package_a)
     repo.add_package(get_package("B", "1.0"))
@@ -1069,7 +1196,7 @@ def test_run_update_with_locked_extras(installer, locker, repo, package):
 
 
 def test_run_install_duplicate_dependencies_different_constraints(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
@@ -1114,7 +1241,7 @@ def test_run_install_duplicate_dependencies_different_constraints(
 
 
 def test_run_install_duplicate_dependencies_different_constraints_with_lock(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1224,7 +1351,11 @@ def test_run_install_duplicate_dependencies_different_constraints_with_lock(
 
 
 def test_run_update_uninstalls_after_removal_transient_dependency(
-    installer, locker, repo, package, installed
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1285,7 +1416,11 @@ def test_run_update_uninstalls_after_removal_transient_dependency(
 
 
 def test_run_install_duplicate_dependencies_different_constraints_with_lock_update(
-    installer, locker, repo, package, installed
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1395,8 +1530,12 @@ def test_run_install_duplicate_dependencies_different_constraints_with_lock_upda
 @pytest.mark.skip(
     "This is not working at the moment due to limitations in the resolver"
 )
-def test_installer_test_solver_finds_compatible_package_for_dependency_python_not_fully_compatible_with_package_python(
-    installer, locker, repo, package, installed
+def test_installer_test_solver_finds_compatible_package_for_dependency_python_not_fully_compatible_with_package_python(  # noqa: E501
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
 ):
     package.python_versions = "~2.7 || ^3.4"
     package.add_dependency(
@@ -1418,15 +1557,18 @@ def test_installer_test_solver_finds_compatible_package_for_dependency_python_no
     assert locker.written_data == expected
 
     installs = installer.installer.installs
-
-    if sys.version_info >= (3, 5, 0):
-        assert len(installs) == 1
-    else:
-        assert len(installs) == 0
-
-
-def test_installer_required_extras_should_not_be_removed_when_updating_single_dependency(
-    installer, locker, repo, package, installed, env, pool, config
+    assert len(installs) == 1
+
+
+def test_installer_required_extras_should_not_be_removed_when_updating_single_dependency(  # noqa: E501
+    installer: Installer,
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+    env: NullEnv,
+    pool: Pool,
+    config: Config,
 ):
     package.add_dependency(Factory.create_dependency("A", {"version": "^1.0"}))
 
@@ -1477,8 +1619,14 @@ def test_installer_required_extras_should_not_be_removed_when_updating_single_de
     assert len(installer.installer.removals) == 0
 
 
-def test_installer_required_extras_should_not_be_removed_when_updating_single_dependency_pypi_repository(
-    locker, repo, package, installed, env, mocker, config
+def test_installer_required_extras_should_not_be_removed_when_updating_single_dependency_pypi_repository(  # noqa: E501
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+    env: NullEnv,
+    mocker: MockerFixture,
+    config: Config,
 ):
     mocker.patch("sys.platform", "darwin")
 
@@ -1514,13 +1662,18 @@ def test_installer_required_extras_should_not_be_removed_when_updating_single_de
     installer.whitelist(["pytest"])
     installer.run()
 
-    assert len(installer.installer.installs) == 6 if not PY2 else 7
+    assert len(installer.installer.installs) == 7
     assert len(installer.installer.updates) == 0
     assert len(installer.installer.removals) == 0
 
 
 def test_installer_required_extras_should_be_installed(
-    locker, repo, package, installed, env, config
+    locker: Locker,
+    repo: Repository,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+    env: NullEnv,
+    config: Config,
 ):
     pool = Pool()
     pool.add_repository(MockRepository())
@@ -1558,7 +1711,7 @@ def test_installer_required_extras_should_be_installed(
 
 
 def test_update_multiple_times_with_split_dependencies_is_idempotent(
-    installer, locker, repo, package
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
 ):
     locker.locked(True)
     locker.mock_lock_data(
@@ -1619,25 +1772,29 @@ def test_update_multiple_times_with_split_dependencies_is_idempotent(
     installer.update(True)
     installer.run()
 
-    assert expected == locker.written_data
+    assert locker.written_data == expected
 
     locker.mock_lock_data(locker.written_data)
 
     installer.update(True)
     installer.run()
 
-    assert expected == locker.written_data
+    assert locker.written_data == expected
 
     locker.mock_lock_data(locker.written_data)
 
     installer.update(True)
     installer.run()
 
-    assert expected == locker.written_data
+    assert locker.written_data == expected
 
 
 def test_installer_can_install_dependencies_from_forced_source(
-    locker, package, installed, env, config
+    locker: Locker,
+    package: ProjectPackage,
+    installed: CustomInstalledRepository,
+    env: NullEnv,
+    config: Config,
 ):
     package.python_versions = "^3.7"
     package.add_dependency(
@@ -1660,7 +1817,9 @@ def test_installer_can_install_dependencies_from_forced_source(
     assert len(installer.installer.removals) == 0
 
 
-def test_run_installs_with_url_file(installer, locker, repo, package):
+def test_run_installs_with_url_file(
+    installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage
+):
     url = "https://python-poetry.org/distributions/demo-0.1.0-py2.py3-none-any.whl"
     package.add_dependency(Factory.create_dependency("demo", {"url": url}))
 
@@ -1676,7 +1835,7 @@ def test_run_installs_with_url_file(installer, locker, repo, package):
 
 
 def test_installer_uses_prereleases_if_they_are_compatible(
-    installer, locker, package, repo
+    installer: Installer, locker: Locker, package: ProjectPackage, repo: Repository
 ):
     package.python_versions = "~2.7 || ^3.4"
     package.add_dependency(
@@ -1706,12 +1865,16 @@ def test_installer_uses_prereleases_if_they_are_compatible(
 
 
 def test_installer_can_handle_old_lock_files(
-    installer, locker, package, repo, installed, config
+    locker: Locker,
+    package: ProjectPackage,
+    repo: Repository,
+    installed: CustomInstalledRepository,
+    config: Config,
 ):
     pool = Pool()
     pool.add_repository(MockRepository())
 
-    package.add_dependency(Factory.create_dependency("pytest", "^3.5", category="dev"))
+    package.add_dependency(Factory.create_dependency("pytest", "^3.5", groups=["dev"]))
 
     locker.locked()
     locker.mock_lock_data(fixture("old-lock"))
@@ -1722,7 +1885,7 @@ def test_installer_can_handle_old_lock_files(
 
     installer.run()
 
-    assert 6 == len(installer.installer.installs)
+    assert len(installer.installer.installs) == 6
 
     installer = Installer(
         NullIO(),
@@ -1737,7 +1900,7 @@ def test_installer_can_handle_old_lock_files(
     installer.run()
 
     # funcsigs will be added
-    assert 7 == len(installer.installer.installs)
+    assert len(installer.installer.installs) == 7
 
     installer = Installer(
         NullIO(),
@@ -1752,4 +1915,4 @@ def test_installer_can_handle_old_lock_files(
     installer.run()
 
     # colorama will be added
-    assert 8 == len(installer.installer.installs)
+    assert len(installer.installer.installs) == 8
diff --git a/vendor/poetry/tests/installation/test_pip_installer.py b/vendor/poetry/tests/installation/test_pip_installer.py
index 4cb00c49..62624b68 100644
--- a/vendor/poetry/tests/installation/test_pip_installer.py
+++ b/vendor/poetry/tests/installation/test_pip_installer.py
@@ -1,18 +1,32 @@
+from __future__ import annotations
+
+import re
 import shutil
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
+from cleo.io.null_io import NullIO
 from poetry.core.packages.package import Package
+
 from poetry.installation.pip_installer import PipInstaller
-from poetry.io.null_io import NullIO
 from poetry.repositories.legacy_repository import LegacyRepository
 from poetry.repositories.pool import Pool
-from poetry.utils._compat import Path
+from poetry.utils.authenticator import RepositoryCertificateConfig
 from poetry.utils.env import NullEnv
 
 
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+    from poetry.utils.env import VirtualEnv
+    from tests.conftest import Config
+
+
 @pytest.fixture
-def package_git():
+def package_git() -> Package:
     package = Package(
         "demo",
         "1.0.0",
@@ -25,16 +39,30 @@ def package_git():
 
 
 @pytest.fixture
-def pool():
+def package_git_with_subdirectory() -> Package:
+    package = Package(
+        "subdirectories",
+        "2.0.0",
+        source_type="git",
+        source_url="https://github.com/demo/subdirectories.git",
+        source_reference="master",
+        source_subdirectory="two",
+    )
+
+    return package
+
+
+@pytest.fixture
+def pool() -> Pool:
     return Pool()
 
 
 @pytest.fixture
-def installer(pool):
+def installer(pool: Pool) -> PipInstaller:
     return PipInstaller(NullEnv(), NullIO(), pool)
 
 
-def test_requirement(installer):
+def test_requirement(installer: PipInstaller):
     package = Package("ipython", "7.5.0")
     package.files = [
         {"file": "foo-0.1.0.tar.gz", "hash": "md5:dbdc53e3918f28fa335a173432402a00"},
@@ -52,7 +80,7 @@ def test_requirement(installer):
         "\n"
     )
 
-    assert expected == result
+    assert result == expected
 
 
 def test_requirement_source_type_url():
@@ -62,24 +90,42 @@ def test_requirement_source_type_url():
         "foo",
         "0.0.0",
         source_type="url",
-        source_url="https://somehwere.com/releases/foo-1.0.0.tar.gz",
+        source_url="https://somewhere.com/releases/foo-1.0.0.tar.gz",
     )
 
     result = installer.requirement(foo, formatted=True)
-    expected = "{}#egg={}".format(foo.source_url, foo.name)
+    expected = f"{foo.source_url}#egg={foo.name}"
+
+    assert result == expected
+
+
+def test_requirement_git_subdirectory(
+    pool: Pool, package_git_with_subdirectory: Package
+) -> None:
+    null_env = NullEnv()
+    installer = PipInstaller(null_env, NullIO(), pool)
+    result = installer.requirement(package_git_with_subdirectory)
+    expected = (
+        "git+https://github.com/demo/subdirectories.git"
+        "@master#egg=subdirectories&subdirectory=two"
+    )
 
-    assert expected == result
+    assert result == expected
+    installer.install(package_git_with_subdirectory)
+    assert len(null_env.executed) == 1
+    cmd = null_env.executed[0]
+    assert Path(cmd[-1]).parts[-3:] == ("demo", "subdirectories", "two")
 
 
-def test_requirement_git_develop_false(installer, package_git):
+def test_requirement_git_develop_false(installer: PipInstaller, package_git: Package):
     package_git.develop = False
     result = installer.requirement(package_git)
     expected = "git+git@github.com:demo/demo.git@master#egg=demo"
 
-    assert expected == result
+    assert result == expected
 
 
-def test_install_with_non_pypi_default_repository(pool, installer):
+def test_install_with_non_pypi_default_repository(pool: Pool, installer: PipInstaller):
     default = LegacyRepository("default", "https://default.com")
     another = LegacyRepository("another", "https://another.com")
 
@@ -105,44 +151,22 @@ def test_install_with_non_pypi_default_repository(pool, installer):
     installer.install(bar)
 
 
-def test_install_with_cert():
-    ca_path = "path/to/cert.pem"
-    pool = Pool()
-
-    default = LegacyRepository("default", "https://foo.bar", cert=Path(ca_path))
-
-    pool.add_repository(default, default=True)
-
-    null_env = NullEnv()
-
-    installer = PipInstaller(null_env, NullIO(), pool)
-
-    foo = Package(
-        "foo",
-        "0.0.0",
-        source_type="legacy",
-        source_reference=default.name,
-        source_url=default.url,
-    )
-
-    installer.install(foo)
-
-    assert len(null_env.executed) == 1
-    cmd = null_env.executed[0]
-    assert "--cert" in cmd
-    cert_index = cmd.index("--cert")
-    # Need to do the str(Path()) bit because Windows paths get modified by Path
-    assert cmd[cert_index + 1] == str(Path(ca_path))
-
-
-def test_install_with_client_cert():
+@pytest.mark.parametrize(
+    ("key", "option"),
+    [
+        ("client_cert", "client-cert"),
+        ("cert", "cert"),
+    ],
+)
+def test_install_with_certs(mocker: MockerFixture, key: str, option: str):
     client_path = "path/to/client.pem"
-    pool = Pool()
-
-    default = LegacyRepository(
-        "default", "https://foo.bar", client_cert=Path(client_path)
+    mocker.patch(
+        "poetry.utils.authenticator.Authenticator.get_certs_for_url",
+        return_value=RepositoryCertificateConfig(**{key: Path(client_path)}),
     )
 
+    default = LegacyRepository("default", "https://foo.bar")
+    pool = Pool()
     pool.add_repository(default, default=True)
 
     null_env = NullEnv()
@@ -161,25 +185,27 @@ def test_install_with_client_cert():
 
     assert len(null_env.executed) == 1
     cmd = null_env.executed[0]
-    assert "--client-cert" in cmd
-    cert_index = cmd.index("--client-cert")
+    assert f"--{option}" in cmd
+    cert_index = cmd.index(f"--{option}")
     # Need to do the str(Path()) bit because Windows paths get modified by Path
     assert cmd[cert_index + 1] == str(Path(client_path))
 
 
-def test_requirement_git_develop_true(installer, package_git):
+def test_requirement_git_develop_true(installer: PipInstaller, package_git: Package):
     package_git.develop = True
     result = installer.requirement(package_git)
     expected = ["-e", "git+git@github.com:demo/demo.git@master#egg=demo"]
 
-    assert expected == result
+    assert result == expected
 
 
-def test_uninstall_git_package_nspkg_pth_cleanup(mocker, tmp_venv, pool):
+def test_uninstall_git_package_nspkg_pth_cleanup(
+    mocker: MockerFixture, tmp_venv: VirtualEnv, pool: Pool
+):
     # this test scenario requires a real installation using the pip installer
     installer = PipInstaller(tmp_venv, NullIO(), pool)
 
-    # use a namepspace package
+    # use a namespace package
     package = Package(
         "namespace-package-one",
         "1.0.0",
@@ -188,15 +214,10 @@ def test_uninstall_git_package_nspkg_pth_cleanup(mocker, tmp_venv, pool):
         source_reference="master",
     )
 
-    # we do this here because the virtual env might not be usable if failure case is triggered
-    pth_file_candidate = tmp_venv.site_packages.path / "{}-nspkg.pth".format(
-        package.name
-    )
-
     # in order to reproduce the scenario where the git source is removed prior to proper
     # clean up of nspkg.pth file, we need to make sure the fixture is copied and not
     # symlinked into the git src directory
-    def copy_only(source, dest):
+    def copy_only(source: Path, dest: Path) -> None:
         if dest.exists():
             dest.unlink()
 
@@ -211,16 +232,45 @@ def copy_only(source, dest):
     installer.install(package)
     installer.remove(package)
 
-    assert not Path(pth_file_candidate).exists()
+    pth_file = f"{package.name}-nspkg.pth"
+    assert not tmp_venv.site_packages.exists(pth_file)
 
     # any command in the virtual environment should trigger the error message
     output = tmp_venv.run("python", "-m", "site")
-    assert "Error processing line 1 of {}".format(pth_file_candidate) not in output
+    assert not re.match(rf"Error processing line 1 of .*{pth_file}", output)
 
 
-def test_install_directory_fallback_on_poetry_create_error(mocker, tmp_venv, pool):
-    import poetry.installation.pip_installer
+def test_install_with_trusted_host(config: Config):
+    config.merge({"certificates": {"default": {"cert": False}}})
 
+    default = LegacyRepository("default", "https://foo.bar")
+    pool = Pool()
+    pool.add_repository(default, default=True)
+
+    null_env = NullEnv()
+
+    installer = PipInstaller(null_env, NullIO(), pool)
+
+    foo = Package(
+        "foo",
+        "0.0.0",
+        source_type="legacy",
+        source_reference=default.name,
+        source_url=default.url,
+    )
+
+    installer.install(foo)
+
+    assert len(null_env.executed) == 1
+    cmd = null_env.executed[0]
+    assert "--trusted-host" in cmd
+    cert_index = cmd.index("--trusted-host")
+    assert cmd[cert_index + 1] == "foo.bar"
+
+
+def test_install_directory_fallback_on_poetry_create_error(
+    mocker: MockerFixture, tmp_venv: VirtualEnv, pool: Pool
+):
     mock_create_poetry = mocker.patch(
         "poetry.factory.Factory.create_poetry", side_effect=RuntimeError
     )
@@ -228,9 +278,7 @@ def test_install_directory_fallback_on_poetry_create_error(mocker, tmp_venv, poo
     mock_editable_builder = mocker.patch(
         "poetry.masonry.builders.editable.EditableBuilder"
     )
-    mock_pip_install = mocker.patch.object(
-        poetry.installation.pip_installer.PipInstaller, "run"
-    )
+    mock_pip_install = mocker.patch("poetry.installation.pip_installer.pip_install")
 
     package = Package(
         "demo",
@@ -248,3 +296,5 @@ def test_install_directory_fallback_on_poetry_create_error(mocker, tmp_venv, poo
     assert mock_sdist_builder.call_count == 0
     assert mock_editable_builder.call_count == 0
     assert mock_pip_install.call_count == 1
+    assert mock_pip_install.call_args[1].get("deps") is False
+    assert mock_pip_install.call_args[1].get("upgrade") is True
diff --git a/vendor/poetry/tests/integration/__init__.py b/vendor/poetry/tests/integration/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/integration/test_utils_vcs_git.py b/vendor/poetry/tests/integration/test_utils_vcs_git.py
new file mode 100644
index 00000000..334ae04a
--- /dev/null
+++ b/vendor/poetry/tests/integration/test_utils_vcs_git.py
@@ -0,0 +1,352 @@
+from __future__ import annotations
+
+import os
+import uuid
+
+from copy import deepcopy
+from hashlib import sha1
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+
+from dulwich.client import HTTPUnauthorized
+from dulwich.client import get_transport_and_path
+from dulwich.config import ConfigFile
+from dulwich.repo import Repo
+from poetry.core.pyproject.toml import PyProjectTOML
+
+from poetry.console.exceptions import PoetrySimpleConsoleException
+from poetry.utils.authenticator import Authenticator
+from poetry.vcs.git import Git
+from poetry.vcs.git.backend import GitRefSpec
+
+
+if TYPE_CHECKING:
+    from _pytest.tmpdir import TempdirFactory
+    from dulwich.client import FetchPackResult
+    from dulwich.client import GitClient
+    from pytest_mock import MockerFixture
+
+    from tests.conftest import Config
+
+
+# these tests are integration as they rely on an external repository
+# see `source_url` fixture
+pytestmark = pytest.mark.integration
+
+
+@pytest.fixture(autouse=True)
+def git_mock() -> None:
+    pass
+
+
+@pytest.fixture(autouse=True)
+def setup(config: Config) -> None:
+    pass
+
+
+REVISION_TO_VERSION_MAP = {
+    "b6204750a763268e941cec1f05f8986b6c66913e": "0.1.0",  # Annotated Tag
+    "18d3ff247d288da701fc7f9ce2ec718388fca266": "0.1.1-alpha.0",
+    "dd07e8d4efb82690e7975b289917a7782fbef29b": "0.2.0-alpha.0",
+    "7263819922b4cd008afbb447f425a562432dad7d": "0.2.0-alpha.1",
+}
+
+BRANCH_TO_REVISION_MAP = {"0.1": "18d3ff247d288da701fc7f9ce2ec718388fca266"}
+
+TAG_TO_REVISION_MAP = {"v0.1.0": "b6204750a763268e941cec1f05f8986b6c66913e"}
+
+REF_TO_REVISION_MAP = {
+    "branch": BRANCH_TO_REVISION_MAP,
+    "tag": TAG_TO_REVISION_MAP,
+}
+
+
+@pytest.fixture
+def use_system_git_client(config: Config) -> None:
+    config.merge({"experimental": {"system-git-client": True}})
+
+
+@pytest.fixture(scope="module")
+def source_url() -> str:
+    return "https://github.com/python-poetry/test-fixture-vcs-repository.git"
+
+
+@pytest.fixture(scope="module")
+def source_directory_name(source_url: str) -> str:
+    return Git.get_name_from_source_url(url=source_url)
+
+
+@pytest.fixture(scope="module")
+def local_repo(tmpdir_factory: TempdirFactory, source_directory_name: str) -> Repo:
+    with Repo.init(
+        tmpdir_factory.mktemp("src") / source_directory_name, mkdir=True
+    ) as repo:
+        yield repo
+
+
+@pytest.fixture(scope="module")
+def _remote_refs(source_url: str, local_repo: Repo) -> FetchPackResult:
+    client: GitClient
+    path: str
+    client, path = get_transport_and_path(source_url)
+    return client.fetch(
+        path, local_repo, determine_wants=local_repo.object_store.determine_wants_all
+    )
+
+
+@pytest.fixture
+def remote_refs(_remote_refs: FetchPackResult) -> FetchPackResult:
+    return deepcopy(_remote_refs)
+
+
+@pytest.fixture(scope="module")
+def remote_default_ref(_remote_refs: FetchPackResult) -> bytes:
+    return _remote_refs.symrefs[b"HEAD"]
+
+
+@pytest.fixture(scope="module")
+def remote_default_branch(remote_default_ref: bytes) -> str:
+    return remote_default_ref.decode("utf-8").replace("refs/heads/", "")
+
+
+def test_git_local_info(
+    source_url: str, remote_refs: FetchPackResult, remote_default_ref: bytes
+) -> None:
+    with Git.clone(url=source_url) as repo:
+        info = Git.info(repo=repo)
+        assert info.origin == source_url
+        assert info.revision == remote_refs.refs[remote_default_ref].decode("utf-8")
+
+
+def test_git_clone_default_branch_head(
+    source_url: str,
+    remote_refs: FetchPackResult,
+    remote_default_ref: bytes,
+    mocker: MockerFixture,
+):
+    spy = mocker.spy(Git, "_clone")
+    spy_legacy = mocker.spy(Git, "_clone_legacy")
+
+    with Git.clone(url=source_url) as repo:
+        assert remote_refs.refs[remote_default_ref] == repo.head()
+
+    spy_legacy.assert_not_called()
+    spy.assert_called()
+
+
+def test_git_clone_fails_for_non_existent_branch(source_url: str):
+    branch = uuid.uuid4().hex
+
+    with pytest.raises(PoetrySimpleConsoleException) as e:
+        Git.clone(url=source_url, branch=branch)
+
+    assert f"Failed to clone {source_url} at '{branch}'" in str(e.value)
+
+
+def test_git_clone_fails_for_non_existent_revision(source_url: str):
+    revision = sha1(uuid.uuid4().bytes).hexdigest()
+
+    with pytest.raises(PoetrySimpleConsoleException) as e:
+        Git.clone(url=source_url, revision=revision)
+
+    assert f"Failed to clone {source_url} at '{revision}'" in str(e.value)
+
+
+def assert_version(repo: Repo, expected_revision: str) -> None:
+    version = PyProjectTOML(
+        path=Path(repo.path).joinpath("pyproject.toml")
+    ).poetry_config["version"]
+
+    revision = Git.get_revision(repo=repo)
+
+    assert revision == expected_revision
+    assert revision in REVISION_TO_VERSION_MAP
+    assert version == REVISION_TO_VERSION_MAP[revision]
+
+
+def test_git_clone_when_branch_is_ref(source_url: str) -> None:
+    with Git.clone(url=source_url, branch="refs/heads/0.1") as repo:
+        assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+
+@pytest.mark.parametrize("branch", [*BRANCH_TO_REVISION_MAP.keys()])
+def test_git_clone_branch(
+    source_url: str, remote_refs: FetchPackResult, branch: str
+) -> None:
+    with Git.clone(url=source_url, branch=branch) as repo:
+        assert_version(repo, BRANCH_TO_REVISION_MAP[branch])
+
+
+@pytest.mark.parametrize("tag", [*TAG_TO_REVISION_MAP.keys()])
+def test_git_clone_tag(source_url: str, remote_refs: FetchPackResult, tag: str) -> None:
+    with Git.clone(url=source_url, tag=tag) as repo:
+        assert_version(repo, TAG_TO_REVISION_MAP[tag])
+
+
+def test_git_clone_multiple_times(
+    source_url: str, remote_refs: FetchPackResult
+) -> None:
+    for revision in REVISION_TO_VERSION_MAP:
+        with Git.clone(url=source_url, revision=revision) as repo:
+            assert_version(repo, revision)
+
+
+def test_git_clone_revision_is_branch(
+    source_url: str, remote_refs: FetchPackResult
+) -> None:
+    with Git.clone(url=source_url, revision="0.1") as repo:
+        assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+
+def test_git_clone_revision_is_ref(
+    source_url: str, remote_refs: FetchPackResult
+) -> None:
+    with Git.clone(url=source_url, revision="refs/heads/0.1") as repo:
+        assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+
+@pytest.mark.parametrize(
+    ("revision", "expected_revision"),
+    [
+        ("0.1", BRANCH_TO_REVISION_MAP["0.1"]),
+        ("v0.1.0", TAG_TO_REVISION_MAP["v0.1.0"]),
+        *zip(REVISION_TO_VERSION_MAP, REVISION_TO_VERSION_MAP),
+    ],
+)
+def test_git_clone_revision_is_tag(
+    source_url: str, remote_refs: FetchPackResult, revision: str, expected_revision: str
+) -> None:
+    with Git.clone(url=source_url, revision=revision) as repo:
+        assert_version(repo, expected_revision)
+
+
+def test_git_clone_clones_submodules(source_url: str) -> None:
+    with Git.clone(url=source_url) as repo:
+        submodule_package_directory = (
+            Path(repo.path) / "submodules" / "sample-namespace-packages"
+        )
+
+    assert submodule_package_directory.exists()
+    assert submodule_package_directory.joinpath("README.md").exists()
+    assert len(list(submodule_package_directory.glob("*"))) > 1
+
+
+def test_system_git_fallback_on_http_401(
+    mocker: MockerFixture,
+    source_url: str,
+) -> None:
+    spy = mocker.spy(Git, "_clone_legacy")
+    mocker.patch.object(Git, "_clone", side_effect=HTTPUnauthorized(None, None))
+
+    with Git.clone(url=source_url, branch="0.1") as repo:
+        path = Path(repo.path)
+        assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+    spy.assert_called_with(
+        url="https://github.com/python-poetry/test-fixture-vcs-repository.git",
+        target=path,
+        refspec=GitRefSpec(branch="0.1", revision=None, tag=None, ref=b"HEAD"),
+    )
+    spy.assert_called_once()
+
+
+GIT_USERNAME = os.environ.get("POETRY_TEST_INTEGRATION_GIT_USERNAME")
+GIT_PASSWORD = os.environ.get("POETRY_TEST_INTEGRATION_GIT_PASSWORD")
+HTTP_AUTH_CREDENTIALS_AVAILABLE = not (GIT_USERNAME and GIT_PASSWORD)
+
+
+@pytest.mark.skipif(
+    HTTP_AUTH_CREDENTIALS_AVAILABLE,
+    reason="HTTP authentication credentials not available",
+)
+def test_configured_repository_http_auth(
+    mocker: MockerFixture, source_url: str, config: Config
+) -> None:
+    from poetry.vcs.git import backend
+
+    spy_clone_legacy = mocker.spy(Git, "_clone_legacy")
+    spy_get_transport_and_path = mocker.spy(backend, "get_transport_and_path")
+
+    config.merge(
+        {
+            "repositories": {"git-repo": {"url": source_url}},
+            "http-basic": {
+                "git-repo": {
+                    "username": GIT_USERNAME,
+                    "password": GIT_PASSWORD,
+                }
+            },
+        }
+    )
+
+    dummy_git_config = ConfigFile()
+    mocker.patch(
+        "poetry.vcs.git.backend.Repo.get_config_stack",
+        return_value=dummy_git_config,
+    )
+
+    mocker.patch(
+        "poetry.vcs.git.backend.get_default_authenticator",
+        return_value=Authenticator(config=config),
+    )
+
+    with Git.clone(url=source_url, branch="0.1") as repo:
+        assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+    spy_clone_legacy.assert_not_called()
+
+    spy_get_transport_and_path.assert_called_with(
+        location=source_url,
+        config=dummy_git_config,
+        username=GIT_USERNAME,
+        password=GIT_PASSWORD,
+    )
+    spy_get_transport_and_path.assert_called_once()
+
+
+def test_username_password_parameter_is_not_passed_to_dulwich(
+    mocker: MockerFixture, source_url: str, config: Config
+) -> None:
+    from poetry.vcs.git import backend
+
+    spy_clone = mocker.spy(Git, "_clone")
+    spy_get_transport_and_path = mocker.spy(backend, "get_transport_and_path")
+
+    dummy_git_config = ConfigFile()
+    mocker.patch(
+        "poetry.vcs.git.backend.Repo.get_config_stack",
+        return_value=dummy_git_config,
+    )
+
+    with Git.clone(url=source_url, branch="0.1") as repo:
+        assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+    spy_clone.assert_called_once()
+
+    spy_get_transport_and_path.assert_called_with(
+        location=source_url,
+        config=dummy_git_config,
+    )
+    spy_get_transport_and_path.assert_called_once()
+
+
+def test_system_git_called_when_configured(
+    mocker: MockerFixture, source_url: str, use_system_git_client: None
+) -> None:
+    spy_legacy = mocker.spy(Git, "_clone_legacy")
+    spy = mocker.spy(Git, "_clone")
+
+    with Git.clone(url=source_url, branch="0.1") as repo:
+        path = Path(repo.path)
+        assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+    spy.assert_not_called()
+
+    spy_legacy.assert_called_once()
+    spy_legacy.assert_called_with(
+        url=source_url,
+        target=path,
+        refspec=GitRefSpec(branch="0.1", revision=None, tag=None, ref=b"HEAD"),
+    )
diff --git a/vendor/poetry/tests/json/__init__.py b/vendor/poetry/tests/json/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/json/fixtures/source/complete_invalid.toml b/vendor/poetry/tests/json/fixtures/source/complete_invalid.toml
new file mode 100644
index 00000000..bd70bc6a
--- /dev/null
+++ b/vendor/poetry/tests/json/fixtures/source/complete_invalid.toml
@@ -0,0 +1,17 @@
+[tool.poetry]
+name = "foobar"
+version = "0.1.0"
+description = ""
+authors = ["Your Name "]
+
+[tool.poetry.dependencies]
+python = "^3.10"
+
+[[tool.poetry.source]]
+name = "pypi-simple"
+default = false
+secondary = false
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/json/fixtures/source/complete_valid.toml b/vendor/poetry/tests/json/fixtures/source/complete_valid.toml
new file mode 100644
index 00000000..d0b4565f
--- /dev/null
+++ b/vendor/poetry/tests/json/fixtures/source/complete_valid.toml
@@ -0,0 +1,18 @@
+[tool.poetry]
+name = "foobar"
+version = "0.1.0"
+description = ""
+authors = ["Your Name "]
+
+[tool.poetry.dependencies]
+python = "^3.10"
+
+[[tool.poetry.source]]
+name = "pypi-simple"
+url = "https://pypi.org/simple/"
+default = false
+secondary = false
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
diff --git a/vendor/poetry/tests/json/test_schema_sources.py b/vendor/poetry/tests/json/test_schema_sources.py
new file mode 100644
index 00000000..4f20a0b3
--- /dev/null
+++ b/vendor/poetry/tests/json/test_schema_sources.py
@@ -0,0 +1,25 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from poetry.core.toml import TOMLFile
+
+from poetry.factory import Factory
+
+
+FIXTURE_DIR = Path(__file__).parent / "fixtures" / "source"
+
+
+def test_pyproject_toml_valid() -> None:
+    toml = TOMLFile(FIXTURE_DIR / "complete_valid.toml").read()
+    content = toml["tool"]["poetry"]
+    assert Factory.validate(content) == {"errors": [], "warnings": []}
+
+
+def test_pyproject_toml_invalid() -> None:
+    toml = TOMLFile(FIXTURE_DIR / "complete_invalid.toml").read()
+    content = toml["tool"]["poetry"]
+    assert Factory.validate(content) == {
+        "errors": ["[source.0] 'url' is a required property"],
+        "warnings": [],
+    }
diff --git a/vendor/poetry/tests/masonry/builders/__init__.py b/vendor/poetry/tests/masonry/builders/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/masonry/builders/fixtures/excluded_subpackage/example/__init__.py b/vendor/poetry/tests/masonry/builders/fixtures/excluded_subpackage/example/__init__.py
index 3dc1f76b..4e562f46 100644
--- a/vendor/poetry/tests/masonry/builders/fixtures/excluded_subpackage/example/__init__.py
+++ b/vendor/poetry/tests/masonry/builders/fixtures/excluded_subpackage/example/__init__.py
@@ -1 +1,4 @@
+from __future__ import annotations
+
+
 __version__ = "0.1.0"
diff --git a/vendor/poetry/tests/masonry/builders/fixtures/excluded_subpackage/example/test/excluded.py b/vendor/poetry/tests/masonry/builders/fixtures/excluded_subpackage/example/test/excluded.py
index bf6e1f89..3bf5a933 100644
--- a/vendor/poetry/tests/masonry/builders/fixtures/excluded_subpackage/example/test/excluded.py
+++ b/vendor/poetry/tests/masonry/builders/fixtures/excluded_subpackage/example/test/excluded.py
@@ -1,4 +1,6 @@
-from .. import __version__
+from __future__ import annotations
+
+from tests.masonry.builders.fixtures.excluded_subpackage.example import __version__
 
 
 def test_version():
diff --git a/vendor/poetry/tests/masonry/builders/test_editable_builder.py b/vendor/poetry/tests/masonry/builders/test_editable_builder.py
index 70f4e125..d8375198 100644
--- a/vendor/poetry/tests/masonry/builders/test_editable_builder.py
+++ b/vendor/poetry/tests/masonry/builders/test_editable_builder.py
@@ -1,22 +1,37 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
+from __future__ import annotations
 
+import csv
+import json
 import os
 import shutil
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
+from cleo.io.null_io import NullIO
+from deepdiff import DeepDiff
+from poetry.core.semver.version import Version
+
 from poetry.factory import Factory
-from poetry.io.null_io import NullIO
 from poetry.masonry.builders.editable import EditableBuilder
-from poetry.utils._compat import Path
+from poetry.repositories.installed_repository import InstalledRepository
+from poetry.utils.env import EnvCommandError
 from poetry.utils.env import EnvManager
 from poetry.utils.env import MockEnv
 from poetry.utils.env import VirtualEnv
+from poetry.utils.env import ephemeral_environment
+
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+    from poetry.poetry import Poetry
 
 
 @pytest.fixture()
-def simple_poetry():
+def simple_poetry() -> Poetry:
     poetry = Factory().create_poetry(
         Path(__file__).parent.parent.parent / "fixtures" / "simple_project"
     )
@@ -25,7 +40,7 @@ def simple_poetry():
 
 
 @pytest.fixture()
-def project_with_include():
+def project_with_include() -> Poetry:
     poetry = Factory().create_poetry(
         Path(__file__).parent.parent.parent / "fixtures" / "with-include"
     )
@@ -34,7 +49,7 @@ def project_with_include():
 
 
 @pytest.fixture()
-def extended_poetry():
+def extended_poetry() -> Poetry:
     poetry = Factory().create_poetry(
         Path(__file__).parent.parent.parent / "fixtures" / "extended_project"
     )
@@ -43,7 +58,7 @@ def extended_poetry():
 
 
 @pytest.fixture()
-def extended_without_setup_poetry():
+def extended_without_setup_poetry() -> Poetry:
     poetry = Factory().create_poetry(
         Path(__file__).parent.parent.parent
         / "fixtures"
@@ -54,12 +69,12 @@ def extended_without_setup_poetry():
 
 
 @pytest.fixture()
-def env_manager(simple_poetry):
+def env_manager(simple_poetry: Poetry) -> EnvManager:
     return EnvManager(simple_poetry)
 
 
 @pytest.fixture
-def tmp_venv(tmp_dir, env_manager):
+def tmp_venv(tmp_dir: str, env_manager: EnvManager) -> VirtualEnv:
     venv_path = Path(tmp_dir) / "venv"
 
     env_manager.build_venv(str(venv_path))
@@ -70,30 +85,45 @@ def tmp_venv(tmp_dir, env_manager):
     shutil.rmtree(str(venv.path))
 
 
-def test_builder_installs_proper_files_for_standard_packages(simple_poetry, tmp_venv):
+def test_builder_installs_proper_files_for_standard_packages(
+    simple_poetry: Poetry, tmp_venv: VirtualEnv
+):
     builder = EditableBuilder(simple_poetry, tmp_venv, NullIO())
 
     builder.build()
 
     assert tmp_venv._bin_dir.joinpath("foo").exists()
-    assert tmp_venv.site_packages.path.joinpath("simple_project.pth").exists()
-    assert simple_poetry.file.parent.resolve().as_posix() == tmp_venv.site_packages.path.joinpath(
-        "simple_project.pth"
-    ).read_text().strip(
-        os.linesep
+    pth_file = "simple_project.pth"
+    assert tmp_venv.site_packages.exists(pth_file)
+    assert (
+        simple_poetry.file.parent.resolve().as_posix()
+        == tmp_venv.site_packages.find(pth_file)[0].read_text().strip(os.linesep)
     )
 
-    dist_info = tmp_venv.site_packages.path.joinpath("simple_project-1.2.3.dist-info")
-    assert dist_info.exists()
+    dist_info = "simple_project-1.2.3.dist-info"
+    assert tmp_venv.site_packages.exists(dist_info)
+
+    dist_info = tmp_venv.site_packages.find(dist_info)[0]
+
     assert dist_info.joinpath("INSTALLER").exists()
     assert dist_info.joinpath("METADATA").exists()
     assert dist_info.joinpath("RECORD").exists()
     assert dist_info.joinpath("entry_points.txt").exists()
+    assert dist_info.joinpath("direct_url.json").exists()
+
+    assert not DeepDiff(
+        {
+            "dir_info": {"editable": True},
+            "url": simple_poetry.file.path.parent.as_uri(),
+        },
+        json.loads(dist_info.joinpath("direct_url.json").read_text()),
+    )
 
-    assert "poetry" == dist_info.joinpath("INSTALLER").read_text()
+    assert dist_info.joinpath("INSTALLER").read_text() == "poetry"
     assert (
-        "[console_scripts]\nbaz=bar:baz.boom.bim\nfoo=foo:bar\nfox=fuz.foo:bar.baz\n\n"
-        == dist_info.joinpath("entry_points.txt").read_text()
+        dist_info.joinpath("entry_points.txt").read_text()
+        == "[console_scripts]\nbaz=bar:baz.boom.bim\nfoo=foo:bar\n"
+        "fox=fuz.foo:bar.baz\n\n"
     )
 
     metadata = """\
@@ -111,13 +141,13 @@ def test_builder_installs_proper_files_for_standard_packages(simple_poetry, tmp_
 Classifier: Programming Language :: Python :: 2
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
 Classifier: Topic :: Software Development :: Build Tools
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
 Project-URL: Documentation, https://python-poetry.org/docs
@@ -130,81 +160,117 @@ def test_builder_installs_proper_files_for_standard_packages(simple_poetry, tmp_
 """
     assert metadata == dist_info.joinpath("METADATA").read_text(encoding="utf-8")
 
-    records = dist_info.joinpath("RECORD").read_text()
-    assert str(tmp_venv.site_packages.path.joinpath("simple_project.pth")) in records
-    assert str(tmp_venv._bin_dir.joinpath("foo")) in records
-    assert str(tmp_venv._bin_dir.joinpath("baz")) in records
-    assert str(dist_info.joinpath("METADATA")) in records
-    assert str(dist_info.joinpath("INSTALLER")) in records
-    assert str(dist_info.joinpath("entry_points.txt")) in records
-    assert str(dist_info.joinpath("RECORD")) in records
-
-    baz_script = """\
-#!{python}
+    with open(dist_info.joinpath("RECORD"), encoding="utf-8", newline="") as f:
+        reader = csv.reader(f)
+        records = list(reader)
+
+    assert all(len(row) == 3 for row in records)
+    record_entries = {row[0] for row in records}
+    pth_file = "simple_project.pth"
+    assert tmp_venv.site_packages.exists(pth_file)
+    assert str(tmp_venv.site_packages.find(pth_file)[0]) in record_entries
+    assert str(tmp_venv._bin_dir.joinpath("foo")) in record_entries
+    assert str(tmp_venv._bin_dir.joinpath("baz")) in record_entries
+    assert str(dist_info.joinpath("METADATA")) in record_entries
+    assert str(dist_info.joinpath("INSTALLER")) in record_entries
+    assert str(dist_info.joinpath("entry_points.txt")) in record_entries
+    assert str(dist_info.joinpath("RECORD")) in record_entries
+    assert str(dist_info.joinpath("direct_url.json")) in record_entries
+
+    baz_script = f"""\
+#!{tmp_venv.python}
+import sys
 from bar import baz
 
 if __name__ == '__main__':
-    baz.boom.bim()
-""".format(
-        python=tmp_venv._bin("python")
-    )
+    sys.exit(baz.boom.bim())
+"""
 
     assert baz_script == tmp_venv._bin_dir.joinpath("baz").read_text()
 
-    foo_script = """\
-#!{python}
+    foo_script = f"""\
+#!{tmp_venv.python}
+import sys
 from foo import bar
 
 if __name__ == '__main__':
-    bar()
-""".format(
-        python=tmp_venv._bin("python")
-    )
+    sys.exit(bar())
+"""
 
     assert foo_script == tmp_venv._bin_dir.joinpath("foo").read_text()
 
-    fox_script = """\
-#!{python}
+    fox_script = f"""\
+#!{tmp_venv.python}
+import sys
 from fuz.foo import bar
 
 if __name__ == '__main__':
-    bar.baz()
-""".format(
-        python=tmp_venv._bin("python")
-    )
+    sys.exit(bar.baz())
+"""
 
     assert fox_script == tmp_venv._bin_dir.joinpath("fox").read_text()
 
 
 def test_builder_falls_back_on_setup_and_pip_for_packages_with_build_scripts(
-    extended_poetry,
+    mocker: MockerFixture, extended_poetry: Poetry, tmp_dir: str
 ):
-    env = MockEnv(path=Path("/foo"))
+    pip_install = mocker.patch("poetry.masonry.builders.editable.pip_install")
+    env = MockEnv(path=Path(tmp_dir) / "foo")
     builder = EditableBuilder(extended_poetry, env, NullIO())
 
     builder.build()
+    pip_install.assert_called_once_with(
+        extended_poetry.pyproject.file.path.parent, env, upgrade=True, editable=True
+    )
+    assert [] == env.executed
 
-    assert [
-        [
-            "python",
-            "-m",
-            "pip",
-            "install",
-            "-e",
-            str(extended_poetry.file.parent),
-            "--no-deps",
-        ]
-    ] == env.executed
+
+def test_builder_setup_generation_runs_with_pip_editable(tmp_dir: str) -> None:
+    # create an isolated copy of the project
+    fixture = Path(__file__).parent.parent.parent / "fixtures" / "extended_project"
+    extended_project = Path(tmp_dir) / "extended_project"
+
+    shutil.copytree(fixture, extended_project)
+    assert extended_project.exists()
+
+    poetry = Factory().create_poetry(extended_project)
+
+    # we need a venv with setuptools since we are verifying setup.py builds
+    with ephemeral_environment(flags={"no-setuptools": False}) as venv:
+        builder = EditableBuilder(poetry, venv, NullIO())
+        builder.build()
+
+        # is the package installed?
+        repository = InstalledRepository.load(venv)
+        package = repository.package("extended-project", Version.parse("1.2.3"))
+        assert package.name == "extended-project"
+
+        # check for the module built by build.py
+        try:
+            output = venv.run_python_script(
+                "from extended_project import built; print(built.__file__)"
+            ).strip()
+        except EnvCommandError:
+            pytest.fail("Unable to import built module")
+        else:
+            built_py = Path(output).resolve()
+
+        expected = extended_project / "extended_project" / "built.py"
+
+        # ensure the package was installed as editable
+        assert built_py == expected.resolve()
 
 
 def test_builder_installs_proper_files_when_packages_configured(
-    project_with_include, tmp_venv
+    project_with_include: Poetry, tmp_venv: VirtualEnv
 ):
     builder = EditableBuilder(project_with_include, tmp_venv, NullIO())
     builder.build()
 
-    pth_file = tmp_venv.site_packages.path.joinpath("with_include.pth")
-    assert pth_file.is_file()
+    pth_file = "with_include.pth"
+    assert tmp_venv.site_packages.exists(pth_file)
+
+    pth_file = tmp_venv.site_packages.find(pth_file)[0]
 
     paths = set()
     with pth_file.open() as f:
@@ -220,8 +286,14 @@ def test_builder_installs_proper_files_when_packages_configured(
     assert len(paths) == len(expected)
 
 
-def test_builder_should_execute_build_scripts(extended_without_setup_poetry):
-    env = MockEnv(path=Path("/foo"))
+def test_builder_should_execute_build_scripts(
+    mocker: MockerFixture, extended_without_setup_poetry: Poetry, tmp_dir: str
+):
+    env = MockEnv(path=Path(tmp_dir) / "foo")
+    mocker.patch(
+        "poetry.masonry.builders.editable.build_environment"
+    ).return_value.__enter__.return_value = env
+
     builder = EditableBuilder(extended_without_setup_poetry, env, NullIO())
 
     builder.build()
diff --git a/vendor/poetry/tests/mixology/helpers.py b/vendor/poetry/tests/mixology/helpers.py
index fcde6701..225807e2 100644
--- a/vendor/poetry/tests/mixology/helpers.py
+++ b/vendor/poetry/tests/mixology/helpers.py
@@ -1,12 +1,32 @@
-from poetry.core.packages import Package
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.core.packages.package import Package
+
 from poetry.factory import Factory
 from poetry.mixology.failure import SolveFailure
 from poetry.mixology.version_solver import VersionSolver
 from poetry.packages import DependencyPackage
 
 
-def add_to_repo(repository, name, version, deps=None, python=None):
-    package = Package(name, version)
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
+
+    from poetry.mixology import SolverResult
+    from poetry.repositories import Repository
+    from tests.mixology.version_solver.conftest import Provider
+
+
+def add_to_repo(
+    repository: Repository,
+    name: str,
+    version: str,
+    deps: dict[str, str] | None = None,
+    python: str | None = None,
+    yanked: bool = False,
+) -> None:
+    package = Package(name, version, yanked=yanked)
     if python:
         package.python_versions = python
 
@@ -18,13 +38,20 @@ def add_to_repo(repository, name, version, deps=None, python=None):
 
 
 def check_solver_result(
-    root, provider, result=None, error=None, tries=None, locked=None, use_latest=None
-):
+    root: ProjectPackage,
+    provider: Provider,
+    result: dict[str, str] | None = None,
+    error: str | None = None,
+    tries: int | None = None,
+    locked: dict[str, Package] | None = None,
+    use_latest: list[str] | None = None,
+) -> SolverResult | None:
     if locked is not None:
-        locked = {k: DependencyPackage(l.to_dependency(), l) for k, l in locked.items()}
+        locked = {
+            k: [DependencyPackage(l.to_dependency(), l)] for k, l in locked.items()
+        }
 
     solver = VersionSolver(root, provider, locked=locked, use_latest=use_latest)
-
     try:
         solution = solver.solve()
     except SolveFailure as e:
@@ -34,15 +61,22 @@ def check_solver_result(
             if tries is not None:
                 assert solver.solution.attempted_solutions == tries
 
-            return
+            return None
 
         raise
+    except AssertionError as e:
+        if error:
+            assert str(e) == error
+            return None
+        raise
 
     packages = {}
     for package in solution.packages:
         packages[package.name] = str(package.version)
 
-    assert result == packages
+    assert packages == result
 
     if tries is not None:
         assert solution.attempted_solutions == tries
+
+    return solution
diff --git a/vendor/poetry/tests/mixology/solutions/providers/test_python_requirement_solution_provider.py b/vendor/poetry/tests/mixology/solutions/providers/test_python_requirement_solution_provider.py
index 81c11d21..c34e9322 100644
--- a/vendor/poetry/tests/mixology/solutions/providers/test_python_requirement_solution_provider.py
+++ b/vendor/poetry/tests/mixology/solutions/providers/test_python_requirement_solution_provider.py
@@ -1,18 +1,15 @@
-import pytest
+from __future__ import annotations
 
 from poetry.core.packages.dependency import Dependency
+
 from poetry.mixology.failure import SolveFailure
 from poetry.mixology.incompatibility import Incompatibility
 from poetry.mixology.incompatibility_cause import NoVersionsCause
 from poetry.mixology.incompatibility_cause import PythonCause
 from poetry.mixology.term import Term
 from poetry.puzzle.exceptions import SolverProblemError
-from poetry.utils._compat import PY36
 
 
-@pytest.mark.skipif(
-    not PY36, reason="Error solutions are only available for Python ^3.6"
-)
 def test_it_can_solve_python_incompatibility_solver_errors():
     from poetry.mixology.solutions.providers import PythonRequirementSolutionProvider
     from poetry.mixology.solutions.solutions import PythonRequirementSolution
@@ -27,9 +24,6 @@ def test_it_can_solve_python_incompatibility_solver_errors():
     assert isinstance(provider.get_solutions(exception)[0], PythonRequirementSolution)
 
 
-@pytest.mark.skipif(
-    not PY36, reason="Error solutions are only available for Python ^3.6"
-)
 def test_it_cannot_solve_other_solver_errors():
     from poetry.mixology.solutions.providers import PythonRequirementSolutionProvider
 
diff --git a/vendor/poetry/tests/mixology/solutions/solutions/test_python_requirement_solution.py b/vendor/poetry/tests/mixology/solutions/solutions/test_python_requirement_solution.py
index e264ad8d..9c8950ae 100644
--- a/vendor/poetry/tests/mixology/solutions/solutions/test_python_requirement_solution.py
+++ b/vendor/poetry/tests/mixology/solutions/solutions/test_python_requirement_solution.py
@@ -1,19 +1,15 @@
-import pytest
-
-from clikit.io.buffered_io import BufferedIO
+from __future__ import annotations
 
+from cleo.io.buffered_io import BufferedIO
 from poetry.core.packages.dependency import Dependency
+
 from poetry.mixology.failure import SolveFailure
 from poetry.mixology.incompatibility import Incompatibility
 from poetry.mixology.incompatibility_cause import PythonCause
 from poetry.mixology.term import Term
 from poetry.puzzle.exceptions import SolverProblemError
-from poetry.utils._compat import PY36
 
 
-@pytest.mark.skipif(
-    not PY36, reason="Error solutions are only available for Python ^3.6"
-)
 def test_it_provides_the_correct_solution():
     from poetry.mixology.solutions.solutions import PythonRequirementSolution
 
@@ -30,8 +26,8 @@ def test_it_provides_the_correct_solution():
 For foo, a possible solution would be to set the `python` property to ">=3.6,<4.0"\
 """
     links = [
-        "https://python-poetry.org/docs/dependency-specification/#python-restricted-dependencies",
-        "https://python-poetry.org/docs/dependency-specification/#using-environment-markers",
+        "https://python-poetry.org/docs/dependency-specification/#python-restricted-dependencies",  # noqa: E501
+        "https://python-poetry.org/docs/dependency-specification/#using-environment-markers",  # noqa: E501
     ]
 
     assert title == solution.solution_title
diff --git a/vendor/poetry/tests/mixology/version_solver/__init__.py b/vendor/poetry/tests/mixology/version_solver/__init__.py
index 2382bd6f..b7473c5b 100644
--- a/vendor/poetry/tests/mixology/version_solver/__init__.py
+++ b/vendor/poetry/tests/mixology/version_solver/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 
diff --git a/vendor/poetry/tests/mixology/version_solver/conftest.py b/vendor/poetry/tests/mixology/version_solver/conftest.py
index b31634b8..453cc004 100644
--- a/vendor/poetry/tests/mixology/version_solver/conftest.py
+++ b/vendor/poetry/tests/mixology/version_solver/conftest.py
@@ -1,26 +1,34 @@
-import pytest
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
 
-from clikit.io import NullIO
+import pytest
 
+from cleo.io.null_io import NullIO
 from poetry.core.packages.project_package import ProjectPackage
+
 from poetry.puzzle.provider import Provider as BaseProvider
 from poetry.repositories import Pool
 from poetry.repositories import Repository
 
 
+if TYPE_CHECKING:
+    from tests.helpers import TestRepository
+
+
 class Provider(BaseProvider):
-    def set_package_python_versions(self, python_versions):
+    def set_package_python_versions(self, python_versions: str) -> None:
         self._package.python_versions = python_versions
         self._python_constraint = self._package.python_constraint
 
 
 @pytest.fixture
-def repo():
-    return Repository()
+def repo() -> Repository:
+    return Repository("repo")
 
 
 @pytest.fixture
-def pool(repo):
+def pool(repo: TestRepository) -> Pool:
     pool = Pool()
     pool.add_repository(repo)
 
@@ -28,10 +36,10 @@ def pool(repo):
 
 
 @pytest.fixture
-def root():
+def root() -> ProjectPackage:
     return ProjectPackage("myapp", "0.0.0")
 
 
 @pytest.fixture
-def provider(pool, root):
+def provider(pool: Pool, root: ProjectPackage) -> Provider:
     return Provider(root, pool, NullIO())
diff --git a/vendor/poetry/tests/mixology/version_solver/test_backtracking.py b/vendor/poetry/tests/mixology/version_solver/test_backtracking.py
index 1716ca35..6354d4c6 100644
--- a/vendor/poetry/tests/mixology/version_solver/test_backtracking.py
+++ b/vendor/poetry/tests/mixology/version_solver/test_backtracking.py
@@ -1,10 +1,22 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 from poetry.factory import Factory
+from tests.mixology.helpers import add_to_repo
+from tests.mixology.helpers import check_solver_result
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
 
-from ..helpers import add_to_repo
-from ..helpers import check_solver_result
+    from poetry.repositories import Repository
+    from tests.mixology.version_solver.conftest import Provider
 
 
-def test_circular_dependency_on_older_version(root, provider, repo):
+def test_circular_dependency_on_older_version(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("a", ">=1.0.0"))
 
     add_to_repo(repo, "a", "1.0.0")
@@ -14,7 +26,9 @@ def test_circular_dependency_on_older_version(root, provider, repo):
     check_solver_result(root, provider, {"a": "1.0.0"}, tries=2)
 
 
-def test_diamond_dependency_graph(root, provider, repo):
+def test_diamond_dependency_graph(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("a", "*"))
     root.add_dependency(Factory.create_dependency("b", "*"))
 
@@ -31,7 +45,9 @@ def test_diamond_dependency_graph(root, provider, repo):
     check_solver_result(root, provider, {"a": "1.0.0", "b": "2.0.0", "c": "3.0.0"})
 
 
-def test_backjumps_after_partial_satisfier(root, provider, repo):
+def test_backjumps_after_partial_satisfier(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     # c 2.0.0 is incompatible with y 2.0.0 because it requires x 1.0.0, but that
     # requirement only exists because of both a and b. The solver should be able
     # to deduce c 2.0.0's incompatibility and select c 1.0.0 instead.
@@ -54,7 +70,9 @@ def test_backjumps_after_partial_satisfier(root, provider, repo):
     check_solver_result(root, provider, {"c": "1.0.0", "y": "2.0.0"}, tries=2)
 
 
-def test_rolls_back_leaf_versions_first(root, provider, repo):
+def test_rolls_back_leaf_versions_first(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     # The latest versions of a and b disagree on c. An older version of either
     # will resolve the problem. This test validates that b, which is farther
     # in the dependency graph from myapp is downgraded first.
@@ -70,7 +88,7 @@ def test_rolls_back_leaf_versions_first(root, provider, repo):
     check_solver_result(root, provider, {"a": "2.0.0", "b": "1.0.0", "c": "2.0.0"})
 
 
-def test_simple_transitive(root, provider, repo):
+def test_simple_transitive(root: ProjectPackage, provider: Provider, repo: Repository):
     # Only one version of baz, so foo and bar will have to downgrade
     # until they reach it
     root.add_dependency(Factory.create_dependency("foo", "*"))
@@ -90,7 +108,9 @@ def test_simple_transitive(root, provider, repo):
     )
 
 
-def test_backjump_to_nearer_unsatisfied_package(root, provider, repo):
+def test_backjump_to_nearer_unsatisfied_package(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     # This ensures it doesn't exhaustively search all versions of b when it's
     # a-2.0.0 whose dependency on c-2.0.0-nonexistent led to the problem. We
     # make sure b has more versions than a so that the solver tries a first
@@ -99,7 +119,7 @@ def test_backjump_to_nearer_unsatisfied_package(root, provider, repo):
     root.add_dependency(Factory.create_dependency("b", "*"))
 
     add_to_repo(repo, "a", "1.0.0", deps={"c": "1.0.0"})
-    add_to_repo(repo, "a", "2.0.0", deps={"c": "2.0.0-nonexistent"})
+    add_to_repo(repo, "a", "2.0.0", deps={"c": "2.0.0-1"})
     add_to_repo(repo, "b", "1.0.0")
     add_to_repo(repo, "b", "2.0.0")
     add_to_repo(repo, "b", "3.0.0")
@@ -110,7 +130,9 @@ def test_backjump_to_nearer_unsatisfied_package(root, provider, repo):
     )
 
 
-def test_traverse_into_package_with_fewer_versions_first(root, provider, repo):
+def test_traverse_into_package_with_fewer_versions_first(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     # Dependencies are ordered so that packages with fewer versions are tried
     # first. Here, there are two valid solutions (either a or b must be
     # downgraded once). The chosen one depends on which dep is traversed first.
@@ -134,7 +156,9 @@ def test_traverse_into_package_with_fewer_versions_first(root, provider, repo):
     check_solver_result(root, provider, {"a": "4.0.0", "b": "4.0.0", "c": "2.0.0"})
 
 
-def test_backjump_past_failed_package_on_disjoint_constraint(root, provider, repo):
+def test_backjump_past_failed_package_on_disjoint_constraint(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("a", "*"))
     root.add_dependency(Factory.create_dependency("foo", ">2.0.0"))
 
diff --git a/vendor/poetry/tests/mixology/version_solver/test_basic_graph.py b/vendor/poetry/tests/mixology/version_solver/test_basic_graph.py
index 0e70e938..210abc37 100644
--- a/vendor/poetry/tests/mixology/version_solver/test_basic_graph.py
+++ b/vendor/poetry/tests/mixology/version_solver/test_basic_graph.py
@@ -1,10 +1,24 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
 from poetry.factory import Factory
+from tests.mixology.helpers import add_to_repo
+from tests.mixology.helpers import check_solver_result
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
 
-from ..helpers import add_to_repo
-from ..helpers import check_solver_result
+    from poetry.repositories import Repository
+    from tests.mixology.version_solver.conftest import Provider
 
 
-def test_simple_dependencies(root, provider, repo):
+def test_simple_dependencies(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("a", "1.0.0"))
     root.add_dependency(Factory.create_dependency("b", "1.0.0"))
 
@@ -29,7 +43,9 @@ def test_simple_dependencies(root, provider, repo):
     )
 
 
-def test_shared_dependencies_with_overlapping_constraints(root, provider, repo):
+def test_shared_dependencies_with_overlapping_constraints(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("a", "1.0.0"))
     root.add_dependency(Factory.create_dependency("b", "1.0.0"))
 
@@ -45,7 +61,7 @@ def test_shared_dependencies_with_overlapping_constraints(root, provider, repo):
 
 
 def test_shared_dependency_where_dependent_version_affects_other_dependencies(
-    root, provider, repo
+    root: ProjectPackage, provider: Provider, repo: Repository
 ):
     root.add_dependency(Factory.create_dependency("foo", "<=1.0.2"))
     root.add_dependency(Factory.create_dependency("bar", "1.0.0"))
@@ -64,10 +80,53 @@ def test_shared_dependency_where_dependent_version_affects_other_dependencies(
     )
 
 
-def test_circular_dependency(root, provider, repo):
+def test_circular_dependency(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", "1.0.0"))
 
     add_to_repo(repo, "foo", "1.0.0", deps={"bar": "1.0.0"})
     add_to_repo(repo, "bar", "1.0.0", deps={"foo": "1.0.0"})
 
     check_solver_result(root, provider, {"foo": "1.0.0", "bar": "1.0.0"})
+
+
+@pytest.mark.parametrize(
+    "constraint, versions, yanked_versions, expected",
+    [
+        (">=1", ["1", "2"], [], "2"),
+        (">=1", ["1", "2"], ["2"], "1"),
+        (">=1", ["1", "2", "3"], ["2"], "3"),
+        (">=1", ["1", "2", "3"], ["2", "3"], "1"),
+        (">1", ["1", "2"], ["2"], "error"),
+        (">1", ["2"], ["2"], "error"),
+        (">=2", ["2"], ["2"], "error"),
+        ("==2", ["2"], ["2"], "2"),
+        ("==2", ["2", "2+local"], [], "2+local"),
+        ("==2", ["2", "2+local"], ["2+local"], "2"),
+    ],
+)
+def test_yanked_release(
+    root: ProjectPackage,
+    provider: Provider,
+    repo: Repository,
+    constraint: str,
+    versions: list[str],
+    yanked_versions: list[str],
+    expected: str,
+) -> None:
+    root.add_dependency(Factory.create_dependency("foo", constraint))
+
+    for version in versions:
+        add_to_repo(repo, "foo", version, yanked=version in yanked_versions)
+
+    if expected == "error":
+        result = None
+        error = (
+            f"Because myapp depends on foo ({constraint}) which doesn't match any "
+            "versions, version solving failed."
+        )
+    else:
+        result = {"foo": expected}
+        error = None
+    check_solver_result(root, provider, result, error)
diff --git a/vendor/poetry/tests/mixology/version_solver/test_dependency_cache.py b/vendor/poetry/tests/mixology/version_solver/test_dependency_cache.py
new file mode 100644
index 00000000..4606911e
--- /dev/null
+++ b/vendor/poetry/tests/mixology/version_solver/test_dependency_cache.py
@@ -0,0 +1,126 @@
+from __future__ import annotations
+
+from copy import deepcopy
+from typing import TYPE_CHECKING
+
+from poetry.factory import Factory
+from poetry.mixology.version_solver import DependencyCache
+from tests.helpers import MOCK_DEFAULT_GIT_REVISION
+from tests.mixology.helpers import add_to_repo
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
+
+    from poetry.repositories import Repository
+    from tests.mixology.version_solver.conftest import Provider
+
+
+def test_solver_dependency_cache_respects_source_type(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
+    dependency_pypi = Factory.create_dependency("demo", ">=0.1.0")
+    dependency_git = Factory.create_dependency(
+        "demo", {"git": "https://github.com/demo/demo.git"}, groups=["dev"]
+    )
+    root.add_dependency(dependency_pypi)
+    root.add_dependency(dependency_git)
+
+    add_to_repo(repo, "demo", "1.0.0")
+
+    cache = DependencyCache(provider)
+    cache.search_for.cache_clear()
+
+    # ensure cache was never hit for both calls
+    cache.search_for(dependency_pypi)
+    cache.search_for(dependency_git)
+    assert not cache.search_for.cache_info().hits
+
+    # increase test coverage by searching for copies
+    # (when searching for the exact same object, __eq__ is never called)
+    packages_pypi = cache.search_for(deepcopy(dependency_pypi))
+    packages_git = cache.search_for(deepcopy(dependency_git))
+
+    assert cache.search_for.cache_info().hits == 2
+    assert cache.search_for.cache_info().currsize == 2
+
+    assert len(packages_pypi) == len(packages_git) == 1
+    assert packages_pypi != packages_git
+
+    package_pypi = packages_pypi[0]
+    package_git = packages_git[0]
+
+    assert package_pypi.package.name == dependency_pypi.name
+    assert package_pypi.package.version.text == "1.0.0"
+
+    assert package_git.package.name == dependency_git.name
+    assert package_git.package.version.text == "0.1.2"
+    assert package_git.package.source_type == dependency_git.source_type
+    assert package_git.package.source_url == dependency_git.source_url
+    assert package_git.package.source_resolved_reference == MOCK_DEFAULT_GIT_REVISION
+
+
+def test_solver_dependency_cache_respects_subdirectories(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
+    dependency_one = Factory.create_dependency(
+        "one",
+        {
+            "git": "https://github.com/demo/subdirectories.git",
+            "subdirectory": "one",
+            "platform": "linux",
+        },
+    )
+    dependency_one_copy = Factory.create_dependency(
+        "one",
+        {
+            "git": "https://github.com/demo/subdirectories.git",
+            "subdirectory": "one-copy",
+            "platform": "win32",
+        },
+    )
+
+    root.add_dependency(dependency_one)
+    root.add_dependency(dependency_one_copy)
+
+    cache = DependencyCache(provider)
+    cache.search_for.cache_clear()
+
+    # ensure cache was never hit for both calls
+    cache.search_for(dependency_one)
+    cache.search_for(dependency_one_copy)
+    assert not cache.search_for.cache_info().hits
+
+    # increase test coverage by searching for copies
+    # (when searching for the exact same object, __eq__ is never called)
+    packages_one = cache.search_for(deepcopy(dependency_one))
+    packages_one_copy = cache.search_for(deepcopy(dependency_one_copy))
+
+    assert cache.search_for.cache_info().hits == 2
+    assert cache.search_for.cache_info().currsize == 2
+
+    assert len(packages_one) == len(packages_one_copy) == 1
+
+    package_one = packages_one[0]
+    package_one_copy = packages_one_copy[0]
+
+    assert package_one.package.name == package_one_copy.package.name
+    assert package_one.package.version.text == package_one_copy.package.version.text
+    assert (
+        package_one.package.source_type == package_one_copy.package.source_type == "git"
+    )
+    assert (
+        package_one.package.source_resolved_reference
+        == package_one_copy.package.source_resolved_reference
+        == MOCK_DEFAULT_GIT_REVISION
+    )
+    assert (
+        package_one.package.source_subdirectory
+        != package_one_copy.package.source_subdirectory
+    )
+    assert package_one.package.source_subdirectory == "one"
+    assert package_one_copy.package.source_subdirectory == "one-copy"
+
+    assert package_one.dependency.marker.intersect(
+        package_one_copy.dependency.marker
+    ).is_empty()
diff --git a/vendor/poetry/tests/mixology/version_solver/test_python_constraint.py b/vendor/poetry/tests/mixology/version_solver/test_python_constraint.py
index 739b1150..52bbdd7b 100644
--- a/vendor/poetry/tests/mixology/version_solver/test_python_constraint.py
+++ b/vendor/poetry/tests/mixology/version_solver/test_python_constraint.py
@@ -1,17 +1,30 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 from poetry.factory import Factory
+from tests.mixology.helpers import add_to_repo
+from tests.mixology.helpers import check_solver_result
+
+
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
 
-from ..helpers import add_to_repo
-from ..helpers import check_solver_result
+    from poetry.repositories import Repository
+    from tests.mixology.version_solver.conftest import Provider
 
 
-def test_dependency_does_not_match_root_python_constraint(root, provider, repo):
+def test_dependency_does_not_match_root_python_constraint(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     provider.set_package_python_versions("^3.6")
     root.add_dependency(Factory.create_dependency("foo", "*"))
 
     add_to_repo(repo, "foo", "1.0.0", python="<3.5")
 
-    error = """The current project's Python requirement (>=3.6,<4.0) \
-is not compatible with some of the required packages Python requirement:
+    error = """\
+The current project's Python requirement (>=3.6,<4.0) is not compatible with some of\
+ the required packages Python requirement:
   - foo requires Python <3.5, so it will not be satisfied for Python >=3.6,<4.0
 
 Because no versions of foo match !=1.0.0
diff --git a/vendor/poetry/tests/mixology/version_solver/test_unsolvable.py b/vendor/poetry/tests/mixology/version_solver/test_unsolvable.py
index 6baa7880..75ff37da 100644
--- a/vendor/poetry/tests/mixology/version_solver/test_unsolvable.py
+++ b/vendor/poetry/tests/mixology/version_solver/test_unsolvable.py
@@ -1,10 +1,23 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 from poetry.factory import Factory
+from tests.mixology.helpers import add_to_repo
+from tests.mixology.helpers import check_solver_result
+
 
-from ..helpers import add_to_repo
-from ..helpers import check_solver_result
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
 
+    from poetry.repositories import Repository
+    from tests.mixology.version_solver.conftest import Provider
 
-def test_no_version_matching_constraint(root, provider, repo):
+
+def test_no_version_matching_constraint(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", "^1.0"))
 
     add_to_repo(repo, "foo", "2.0.0")
@@ -20,7 +33,9 @@ def test_no_version_matching_constraint(root, provider, repo):
     )
 
 
-def test_no_version_that_matches_combined_constraints(root, provider, repo):
+def test_no_version_that_matches_combined_constraints(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", "1.0.0"))
     root.add_dependency(Factory.create_dependency("bar", "1.0.0"))
 
@@ -31,14 +46,19 @@ def test_no_version_that_matches_combined_constraints(root, provider, repo):
 
     error = """\
 Because foo (1.0.0) depends on shared (>=2.0.0 <3.0.0)
- and no versions of shared match >=2.9.0,<3.0.0, foo (1.0.0) requires shared (>=2.0.0,<2.9.0).
-And because bar (1.0.0) depends on shared (>=2.9.0 <4.0.0), bar (1.0.0) is incompatible with foo (1.0.0).
-So, because myapp depends on both foo (1.0.0) and bar (1.0.0), version solving failed."""
+ and no versions of shared match >=2.9.0,<3.0.0,\
+ foo (1.0.0) requires shared (>=2.0.0,<2.9.0).
+And because bar (1.0.0) depends on shared (>=2.9.0 <4.0.0),\
+ bar (1.0.0) is incompatible with foo (1.0.0).
+So, because myapp depends on both foo (1.0.0) and bar (1.0.0), version solving failed.\
+"""
 
     check_solver_result(root, provider, error=error)
 
 
-def test_disjoint_constraints(root, provider, repo):
+def test_disjoint_constraints(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", "1.0.0"))
     root.add_dependency(Factory.create_dependency("bar", "1.0.0"))
 
@@ -49,14 +69,18 @@ def test_disjoint_constraints(root, provider, repo):
 
     error = """\
 Because bar (1.0.0) depends on shared (>3.0.0)
- and foo (1.0.0) depends on shared (<=2.0.0), bar (1.0.0) is incompatible with foo (1.0.0).
-So, because myapp depends on both foo (1.0.0) and bar (1.0.0), version solving failed."""
+ and foo (1.0.0) depends on shared (<=2.0.0),\
+ bar (1.0.0) is incompatible with foo (1.0.0).
+So, because myapp depends on both foo (1.0.0) and bar (1.0.0), version solving failed.\
+"""
 
     check_solver_result(root, provider, error=error)
     check_solver_result(root, provider, error=error)
 
 
-def test_disjoint_root_constraints(root, provider, repo):
+def test_disjoint_root_constraints(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", "1.0.0"))
     root.add_dependency(Factory.create_dependency("foo", "2.0.0"))
 
@@ -69,7 +93,26 @@ def test_disjoint_root_constraints(root, provider, repo):
     check_solver_result(root, provider, error=error)
 
 
-def test_no_valid_solution(root, provider, repo):
+def test_disjoint_root_constraints_path_dependencies(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
+    provider.set_package_python_versions("^3.7")
+    fixtures = Path(__file__).parent.parent.parent / "fixtures"
+    project_dir = fixtures.joinpath("with_conditional_path_deps")
+    dependency1 = Factory.create_dependency("demo", {"path": project_dir / "demo_one"})
+    root.add_dependency(dependency1)
+    dependency2 = Factory.create_dependency("demo", {"path": project_dir / "demo_two"})
+    root.add_dependency(dependency2)
+
+    error = (
+        f"Because myapp depends on both {str(dependency1).replace('*', '1.2.3')} "
+        f"and {str(dependency2).replace('*', '1.2.3')}, version solving failed."
+    )
+
+    check_solver_result(root, provider, error=error)
+
+
+def test_no_valid_solution(root: ProjectPackage, provider: Provider, repo: Repository):
     root.add_dependency(Factory.create_dependency("a", "*"))
     root.add_dependency(Factory.create_dependency("b", "*"))
 
@@ -88,3 +131,13 @@ def test_no_valid_solution(root, provider, repo):
 So, because myapp depends on b (*), version solving failed."""
 
     check_solver_result(root, provider, error=error, tries=2)
+
+
+def test_package_with_the_same_name_gives_clear_error_message(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
+    pkg_name = "a"
+    root.add_dependency(Factory.create_dependency(pkg_name, "*"))
+    add_to_repo(repo, pkg_name, "1.0.0", deps={pkg_name: "1.0.0"})
+    error = f"Package '{pkg_name}' is listed as a dependency of itself."
+    check_solver_result(root, provider, error=error)
diff --git a/vendor/poetry/tests/mixology/version_solver/test_with_lock.py b/vendor/poetry/tests/mixology/version_solver/test_with_lock.py
index ee8535a8..fa0e536c 100644
--- a/vendor/poetry/tests/mixology/version_solver/test_with_lock.py
+++ b/vendor/poetry/tests/mixology/version_solver/test_with_lock.py
@@ -1,11 +1,23 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 from poetry.factory import Factory
+from tests.helpers import get_package
+from tests.mixology.helpers import add_to_repo
+from tests.mixology.helpers import check_solver_result
+
 
-from ...helpers import get_package
-from ..helpers import add_to_repo
-from ..helpers import check_solver_result
+if TYPE_CHECKING:
+    from poetry.core.packages.project_package import ProjectPackage
 
+    from poetry.repositories import Repository
+    from tests.mixology.version_solver.conftest import Provider
 
-def test_with_compatible_locked_dependencies(root, provider, repo):
+
+def test_with_compatible_locked_dependencies(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", "*"))
 
     add_to_repo(repo, "foo", "1.0.0", deps={"bar": "1.0.0"})
@@ -23,7 +35,9 @@ def test_with_compatible_locked_dependencies(root, provider, repo):
     )
 
 
-def test_with_incompatible_locked_dependencies(root, provider, repo):
+def test_with_incompatible_locked_dependencies(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", ">1.0.1"))
 
     add_to_repo(repo, "foo", "1.0.0", deps={"bar": "1.0.0"})
@@ -41,7 +55,9 @@ def test_with_incompatible_locked_dependencies(root, provider, repo):
     )
 
 
-def test_with_unrelated_locked_dependencies(root, provider, repo):
+def test_with_unrelated_locked_dependencies(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", "*"))
 
     add_to_repo(repo, "foo", "1.0.0", deps={"bar": "1.0.0"})
@@ -60,8 +76,8 @@ def test_with_unrelated_locked_dependencies(root, provider, repo):
     )
 
 
-def test_unlocks_dependencies_if_necessary_to_ensure_that_a_new_dependency_is_statisfied(
-    root, provider, repo
+def test_unlocks_dependencies_if_necessary_to_ensure_that_a_new_dependency_is_satisfied(
+    root: ProjectPackage, provider: Provider, repo: Repository
 ):
     root.add_dependency(Factory.create_dependency("foo", "*"))
     root.add_dependency(Factory.create_dependency("newdep", "2.0.0"))
@@ -95,7 +111,9 @@ def test_unlocks_dependencies_if_necessary_to_ensure_that_a_new_dependency_is_st
     )
 
 
-def test_with_compatible_locked_dependencies_use_latest(root, provider, repo):
+def test_with_compatible_locked_dependencies_use_latest(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
     root.add_dependency(Factory.create_dependency("foo", "*"))
     root.add_dependency(Factory.create_dependency("baz", "*"))
 
@@ -119,3 +137,63 @@ def test_with_compatible_locked_dependencies_use_latest(root, provider, repo):
         },
         use_latest=["foo"],
     )
+
+
+def test_with_compatible_locked_dependencies_with_extras(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
+    root.add_dependency(Factory.create_dependency("foo", "^1.0"))
+
+    package_foo_0 = get_package("foo", "1.0.0")
+    package_foo_1 = get_package("foo", "1.0.1")
+    bar_extra_dep = Factory.create_dependency(
+        "bar", {"version": "^1.0", "extras": "extra"}
+    )
+    for package_foo in (package_foo_0, package_foo_1):
+        package_foo.add_dependency(bar_extra_dep)
+        repo.add_package(package_foo)
+
+    bar_deps = {"baz": {"version": "^1.0", "extras": ["extra"]}}
+    add_to_repo(repo, "bar", "1.0.0", bar_deps)
+    add_to_repo(repo, "bar", "1.0.1", bar_deps)
+    add_to_repo(repo, "baz", "1.0.0")
+    add_to_repo(repo, "baz", "1.0.1")
+
+    check_solver_result(
+        root,
+        provider,
+        result={"foo": "1.0.0", "bar": "1.0.0", "baz": "1.0.0"},
+        locked={
+            "foo": get_package("foo", "1.0.0"),
+            "bar": get_package("bar", "1.0.0"),
+            "baz": get_package("baz", "1.0.0"),
+        },
+    )
+
+
+def test_with_yanked_package_in_lock(
+    root: ProjectPackage, provider: Provider, repo: Repository
+):
+    root.add_dependency(Factory.create_dependency("foo", "*"))
+
+    add_to_repo(repo, "foo", "1")
+    add_to_repo(repo, "foo", "2", yanked=True)
+
+    # yanked version is kept in lock file
+    locked_foo = get_package("foo", "2")
+    assert not locked_foo.yanked
+    result = check_solver_result(
+        root,
+        provider,
+        result={"foo": "2"},
+        locked={"foo": locked_foo},
+    )
+    foo = result.packages[0]
+    assert foo.yanked
+
+    # without considering the lock file, the other version is chosen
+    check_solver_result(
+        root,
+        provider,
+        result={"foo": "1"},
+    )
diff --git a/vendor/poetry/tests/packages/test_locker.py b/vendor/poetry/tests/packages/test_locker.py
index ecb24f5f..f41fc2ba 100644
--- a/vendor/poetry/tests/packages/test_locker.py
+++ b/vendor/poetry/tests/packages/test_locker.py
@@ -1,6 +1,15 @@
+from __future__ import annotations
+
+import json
 import logging
+import os
 import sys
 import tempfile
+import uuid
+
+from hashlib import sha256
+from pathlib import Path
+from typing import TYPE_CHECKING
 
 import pytest
 import tomlkit
@@ -8,16 +17,20 @@
 from poetry.core.packages.package import Package
 from poetry.core.packages.project_package import ProjectPackage
 from poetry.core.semver.version import Version
+
 from poetry.factory import Factory
 from poetry.packages.locker import Locker
-from poetry.utils._compat import Path
+from tests.helpers import get_dependency
+from tests.helpers import get_package
 
-from ..helpers import get_dependency
-from ..helpers import get_package
+
+if TYPE_CHECKING:
+    from _pytest.logging import LogCaptureFixture
+    from pytest_mock import MockerFixture
 
 
 @pytest.fixture
-def locker():
+def locker() -> Locker:
     with tempfile.NamedTemporaryFile() as f:
         f.close()
         locker = Locker(f.name, {})
@@ -26,14 +39,16 @@ def locker():
 
 
 @pytest.fixture
-def root():
+def root() -> ProjectPackage:
     return ProjectPackage("root", "1.2.3")
 
 
-def test_lock_file_data_is_ordered(locker, root):
+def test_lock_file_data_is_ordered(locker: Locker, root: ProjectPackage):
     package_a = get_package("A", "1.0.0")
     package_a.add_dependency(Factory.create_dependency("B", "^1.0"))
     package_a.files = [{"file": "foo", "hash": "456"}, {"file": "bar", "hash": "123"}]
+    package_a2 = get_package("A", "2.0.0")
+    package_a2.files = [{"file": "baz", "hash": "345"}]
     package_git = Package(
         "git-package",
         "1.2.3",
@@ -42,14 +57,44 @@ def test_lock_file_data_is_ordered(locker, root):
         source_reference="develop",
         source_resolved_reference="123456",
     )
-    packages = [package_a, get_package("B", "1.2"), package_git]
+    package_git_with_subdirectory = Package(
+        "git-package-subdir",
+        "1.2.3",
+        source_type="git",
+        source_url="https://github.com/python-poetry/poetry.git",
+        source_reference="develop",
+        source_resolved_reference="123456",
+        source_subdirectory="subdir",
+    )
+    package_url_linux = Package(
+        "url-package",
+        "1.0",
+        source_type="url",
+        source_url="https://example.org/url-package-1.0-cp39-manylinux_2_17_x86_64.whl",
+    )
+    package_url_win32 = Package(
+        "url-package",
+        "1.0",
+        source_type="url",
+        source_url="https://example.org/url-package-1.0-cp39-win_amd64.whl",
+    )
+    packages = [
+        package_a2,
+        package_a,
+        get_package("B", "1.2"),
+        package_git,
+        package_git_with_subdirectory,
+        package_url_win32,
+        package_url_linux,
+    ]
 
     locker.set_lock_data(root, packages)
 
     with locker.lock.open(encoding="utf-8") as f:
         content = f.read()
 
-    expected = """[[package]]
+    expected = """\
+[[package]]
 name = "A"
 version = "1.0.0"
 description = ""
@@ -60,6 +105,14 @@ def test_lock_file_data_is_ordered(locker, root):
 [package.dependencies]
 B = "^1.0"
 
+[[package]]
+name = "A"
+version = "2.0.0"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+
 [[package]]
 name = "B"
 version = "1.2"
@@ -75,13 +128,53 @@ def test_lock_file_data_is_ordered(locker, root):
 category = "main"
 optional = false
 python-versions = "*"
-develop = true
+develop = false
+
+[package.source]
+type = "git"
+url = "https://github.com/python-poetry/poetry.git"
+reference = "develop"
+resolved_reference = "123456"
+
+[[package]]
+name = "git-package-subdir"
+version = "1.2.3"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+develop = false
 
 [package.source]
 type = "git"
 url = "https://github.com/python-poetry/poetry.git"
 reference = "develop"
 resolved_reference = "123456"
+subdirectory = "subdir"
+
+[[package]]
+name = "url-package"
+version = "1.0"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.source]
+type = "url"
+url = "https://example.org/url-package-1.0-cp39-manylinux_2_17_x86_64.whl"
+
+[[package]]
+name = "url-package"
+version = "1.0"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.source]
+type = "url"
+url = "https://example.org/url-package-1.0-cp39-win_amd64.whl"
 
 [metadata]
 lock-version = "1.1"
@@ -92,15 +185,18 @@ def test_lock_file_data_is_ordered(locker, root):
 A = [
     {file = "bar", hash = "123"},
     {file = "foo", hash = "456"},
+    {file = "baz", hash = "345"},
 ]
 B = []
 git-package = []
+git-package-subdir = []
+url-package = []
 """
 
-    assert expected == content
+    assert content == expected
 
 
-def test_locker_properly_loads_extras(locker):
+def test_locker_properly_loads_extras(locker: Locker):
     content = """\
 [[package]]
 name = "cachecontrol"
@@ -135,17 +231,17 @@ def test_locker_properly_loads_extras(locker):
 
     packages = locker.locked_repository().packages
 
-    assert 1 == len(packages)
+    assert len(packages) == 1
 
     package = packages[0]
-    assert 3 == len(package.requires)
-    assert 2 == len(package.extras)
+    assert len(package.requires) == 3
+    assert len(package.extras) == 2
 
     lockfile_dep = package.extras["filecache"][0]
     assert lockfile_dep.name == "lockfile"
 
 
-def test_locker_properly_loads_nested_extras(locker):
+def test_locker_properly_loads_nested_extras(locker: Locker):
     content = """\
 [[package]]
 name = "a"
@@ -197,7 +293,7 @@ def test_locker_properly_loads_nested_extras(locker):
     locker.lock.write(tomlkit.parse(content))
 
     repository = locker.locked_repository()
-    assert 3 == len(repository.packages)
+    assert len(repository.packages) == 3
 
     packages = repository.find_packages(get_dependency("a", "1.0"))
     assert len(packages) == 1
@@ -225,7 +321,7 @@ def test_locker_properly_loads_nested_extras(locker):
     assert len(packages) == 1
 
 
-def test_locker_properly_loads_extras_legacy(locker):
+def test_locker_properly_loads_extras_legacy(locker: Locker):
     content = """\
 [[package]]
 name = "a"
@@ -262,7 +358,7 @@ def test_locker_properly_loads_extras_legacy(locker):
     locker.lock.write(tomlkit.parse(content))
 
     repository = locker.locked_repository()
-    assert 2 == len(repository.packages)
+    assert len(repository.packages) == 2
 
     packages = repository.find_packages(get_dependency("a", "1.0"))
     assert len(packages) == 1
@@ -275,7 +371,45 @@ def test_locker_properly_loads_extras_legacy(locker):
     assert dependency_b.name == "b"
 
 
-def test_lock_packages_with_null_description(locker, root):
+def test_locker_properly_loads_subdir(locker: Locker) -> None:
+    content = """\
+[[package]]
+name = "git-package-subdir"
+version = "1.2.3"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+develop = false
+
+[package.source]
+type = "git"
+url = "https://github.com/python-poetry/poetry.git"
+reference = "develop"
+resolved_reference = "123456"
+subdirectory = "subdir"
+
+[metadata]
+lock-version = "1.1"
+python-versions = "*"
+content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8"
+
+[metadata.files]
+git-package-subdir = []
+"""
+    locker.lock.write(tomlkit.parse(content))
+
+    repository = locker.locked_repository()
+    assert len(repository.packages) == 1
+
+    packages = repository.find_packages(get_dependency("git-package-subdir", "1.2.3"))
+    assert len(packages) == 1
+
+    package = packages[0]
+    assert package.source_subdirectory == "subdir"
+
+
+def test_lock_packages_with_null_description(locker: Locker, root: ProjectPackage):
     package_a = get_package("A", "1.0.0")
     package_a.description = None
 
@@ -301,10 +435,10 @@ def test_lock_packages_with_null_description(locker, root):
 A = []
 """
 
-    assert expected == content
+    assert content == expected
 
 
-def test_lock_file_should_not_have_mixed_types(locker, root):
+def test_lock_file_should_not_have_mixed_types(locker: Locker, root: ProjectPackage):
     package_a = get_package("A", "1.0.0")
     package_a.add_dependency(Factory.create_dependency("B", "^1.0.0"))
     package_a.add_dependency(
@@ -344,11 +478,11 @@ def test_lock_file_should_not_have_mixed_types(locker, root):
     with locker.lock.open(encoding="utf-8") as f:
         content = f.read()
 
-    assert expected == content
+    assert content == expected
 
 
-def test_reading_lock_file_should_raise_an_error_on_invalid_data(locker):
-    content = u"""[[package]]
+def test_reading_lock_file_should_raise_an_error_on_invalid_data(locker: Locker):
+    content = """[[package]]
 name = "A"
 version = "1.0.0"
 description = ""
@@ -379,7 +513,9 @@ def test_reading_lock_file_should_raise_an_error_on_invalid_data(locker):
     assert "Unable to read the lock file" in str(e.value)
 
 
-def test_locking_legacy_repository_package_should_include_source_section(root, locker):
+def test_locking_legacy_repository_package_should_include_source_section(
+    root: ProjectPackage, locker: Locker
+):
     package_a = Package(
         "A",
         "1.0.0",
@@ -416,43 +552,42 @@ def test_locking_legacy_repository_package_should_include_source_section(root, l
 A = []
 """
 
-    assert expected == content
+    assert content == expected
 
 
 def test_locker_should_emit_warnings_if_lock_version_is_newer_but_allowed(
-    locker, caplog
+    locker: Locker, caplog: LogCaptureFixture
 ):
-    content = """\
+    version = ".".join(Version.parse(Locker._VERSION).next_minor().text.split(".")[:2])
+    content = f"""\
 [metadata]
 lock-version = "{version}"
 python-versions = "~2.7 || ^3.4"
 content-hash = "c3d07fca33fba542ef2b2a4d75bf5b48d892d21a830e2ad9c952ba5123a52f77"
 
 [metadata.files]
-""".format(
-        version=".".join(Version.parse(Locker._VERSION).next_minor.text.split(".")[:2])
-    )
+"""
     caplog.set_level(logging.WARNING, logger="poetry.packages.locker")
 
     locker.lock.write(tomlkit.parse(content))
 
     _ = locker.lock_data
 
-    assert 1 == len(caplog.records)
+    assert len(caplog.records) == 1
 
     record = caplog.records[0]
-    assert "WARNING" == record.levelname
+    assert record.levelname == "WARNING"
 
     expected = """\
 The lock file might not be compatible with the current version of Poetry.
 Upgrade Poetry to ensure the lock file is read properly or, alternatively, \
 regenerate the lock file with the `poetry lock` command.\
 """
-    assert expected == record.message
+    assert record.message == expected
 
 
 def test_locker_should_raise_an_error_if_lock_version_is_newer_and_not_allowed(
-    locker, caplog
+    locker: Locker, caplog: LogCaptureFixture
 ):
     content = """\
 [metadata]
@@ -470,7 +605,7 @@ def test_locker_should_raise_an_error_if_lock_version_is_newer_and_not_allowed(
         _ = locker.lock_data
 
 
-def test_extras_dependencies_are_ordered(locker, root):
+def test_extras_dependencies_are_ordered(locker: Locker, root: ProjectPackage):
     package_a = get_package("A", "1.0.0")
     package_a.add_dependency(
         Factory.create_dependency(
@@ -504,36 +639,36 @@ def test_extras_dependencies_are_ordered(locker, root):
     with locker.lock.open(encoding="utf-8") as f:
         content = f.read()
 
-    assert expected == content
+    assert content == expected
 
 
-def test_locker_should_neither_emit_warnings_nor_raise_error_for_lower_compatible_versions(
-    locker, caplog
+def test_locker_should_neither_emit_warnings_nor_raise_error_for_lower_compatible_versions(  # noqa: E501
+    locker: Locker, caplog: LogCaptureFixture
 ):
     current_version = Version.parse(Locker._VERSION)
     older_version = ".".join(
         [str(current_version.major), str(current_version.minor - 1)]
     )
-    content = """\
+    content = f"""\
 [metadata]
-lock-version = "{version}"
+lock-version = "{older_version}"
 python-versions = "~2.7 || ^3.4"
 content-hash = "c3d07fca33fba542ef2b2a4d75bf5b48d892d21a830e2ad9c952ba5123a52f77"
 
 [metadata.files]
-""".format(
-        version=older_version
-    )
+"""
     caplog.set_level(logging.WARNING, logger="poetry.packages.locker")
 
     locker.lock.write(tomlkit.parse(content))
 
     _ = locker.lock_data
 
-    assert 0 == len(caplog.records)
+    assert len(caplog.records) == 0
 
 
-def test_locker_dumps_dependency_information_correctly(locker, root):
+def test_locker_dumps_dependency_information_correctly(
+    locker: Locker, root: ProjectPackage
+):
     root_dir = Path(__file__).parent.parent.joinpath("fixtures")
     package_a = get_package("A", "1.0.0")
     package_a.add_dependency(
@@ -595,10 +730,57 @@ def test_locker_dumps_dependency_information_correctly(locker, root):
 A = []
 """
 
-    assert expected == content
+    assert content == expected
 
 
-def test_locker_dumps_dependency_extras_in_correct_order(locker, root):
+def test_locker_dumps_subdir(locker: Locker, root: ProjectPackage) -> None:
+    package_git_with_subdirectory = Package(
+        "git-package-subdir",
+        "1.2.3",
+        source_type="git",
+        source_url="https://github.com/python-poetry/poetry.git",
+        source_reference="develop",
+        source_resolved_reference="123456",
+        source_subdirectory="subdir",
+    )
+
+    locker.set_lock_data(root, [package_git_with_subdirectory])
+
+    with locker.lock.open(encoding="utf-8") as f:
+        content = f.read()
+
+    expected = """\
+[[package]]
+name = "git-package-subdir"
+version = "1.2.3"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+develop = false
+
+[package.source]
+type = "git"
+url = "https://github.com/python-poetry/poetry.git"
+reference = "develop"
+resolved_reference = "123456"
+subdirectory = "subdir"
+
+[metadata]
+lock-version = "1.1"
+python-versions = "*"
+content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8"
+
+[metadata.files]
+git-package-subdir = []
+"""
+
+    assert content == expected
+
+
+def test_locker_dumps_dependency_extras_in_correct_order(
+    locker: Locker, root: ProjectPackage
+):
     root_dir = Path(__file__).parent.parent.joinpath("fixtures")
     package_a = get_package("A", "1.0.0")
     Factory.create_dependency("B", "1.0.0", root_dir=root_dir)
@@ -641,8 +823,9 @@ def test_locker_dumps_dependency_extras_in_correct_order(locker, root):
     assert content == expected
 
 
-@pytest.mark.skipif(sys.version_info[:2] == (3, 5), reason="Skip for Python 3.5")
-def test_locked_repository_uses_root_dir_of_package(locker, mocker):
+def test_locked_repository_uses_root_dir_of_package(
+    locker: Locker, mocker: MockerFixture
+):
     content = """\
 [[package]]
 name = "lib-a"
@@ -671,7 +854,9 @@ def test_locked_repository_uses_root_dir_of_package(locker, mocker):
 """
 
     locker.lock.write(tomlkit.parse(content))
-    create_dependency_patch = mocker.patch("poetry.factory.Factory.create_dependency")
+    create_dependency_patch = mocker.patch(
+        "poetry.factory.Factory.create_dependency", autospec=True
+    )
     locker.locked_repository()
 
     create_dependency_patch.assert_called_once_with(
@@ -682,3 +867,134 @@ def test_locked_repository_uses_root_dir_of_package(locker, mocker):
     assert root_dir.match("*/lib/libA")
     # relative_to raises an exception if not relative - is_relative_to comes in py3.9
     assert root_dir.relative_to(locker.lock.path.parent.resolve()) is not None
+
+
+@pytest.mark.parametrize(
+    ("local_config", "fresh"),
+    [
+        ({}, True),
+        ({"dependencies": [uuid.uuid4().hex]}, True),
+        (
+            {
+                "dependencies": [uuid.uuid4().hex],
+                "dev-dependencies": [uuid.uuid4().hex],
+            },
+            True,
+        ),
+        (
+            {
+                "dependencies": [uuid.uuid4().hex],
+                "dev-dependencies": None,
+            },
+            True,
+        ),
+        ({"dependencies": [uuid.uuid4().hex], "groups": [uuid.uuid4().hex]}, False),
+    ],
+)
+def test_content_hash_with_legacy_is_compatible(
+    local_config: dict[str, list[str]], fresh: bool, locker: Locker
+) -> None:
+    # old hash generation
+    relevant_content = {}
+    for key in locker._legacy_keys:
+        relevant_content[key] = local_config.get(key)
+
+    locker = locker.__class__(
+        lock=locker.lock.path,
+        local_config=local_config,
+    )
+
+    old_content_hash = sha256(
+        json.dumps(relevant_content, sort_keys=True).encode()
+    ).hexdigest()
+    content_hash = locker._get_content_hash()
+
+    assert (content_hash == old_content_hash) or fresh
+
+
+def test_lock_file_resolves_file_url_symlinks(root: ProjectPackage):
+    """
+    Create directories and file structure as follows:
+
+    d1/
+    d1/testsymlink -> d1/d2/d3
+    d1/d2/d3/lock_file
+    d1/d4/source_file
+
+    Using the testsymlink as the Locker.lock file path should correctly resolve to
+    the real physical path of the source_file when calculating the relative path
+    from the lock_file, i.e. "../../d4/source_file" instead of the unresolved path
+    from the symlink itself which would have been "../d4/source_file"
+
+    See https://github.com/python-poetry/poetry/issues/5849
+    """
+    with tempfile.TemporaryDirectory() as d1:
+        symlink_path = Path(d1).joinpath("testsymlink")
+        with tempfile.TemporaryDirectory(dir=d1) as d2, tempfile.TemporaryDirectory(
+            dir=d1
+        ) as d4, tempfile.TemporaryDirectory(dir=d2) as d3, tempfile.NamedTemporaryFile(
+            dir=d4
+        ) as source_file, tempfile.NamedTemporaryFile(
+            dir=d3
+        ) as lock_file:
+            lock_file.close()
+            try:
+                os.symlink(Path(d3), symlink_path)
+            except OSError:
+                if sys.platform == "win32":
+                    # os.symlink requires either administrative privileges or developer
+                    # mode on Win10, throwing an OSError if neither is active.
+                    # Test is not possible in that case.
+                    return
+                raise
+            locker = Locker(str(symlink_path) + os.sep + Path(lock_file.name).name, {})
+
+            package_local = Package(
+                "local-package",
+                "1.2.3",
+                source_type="file",
+                source_url=source_file.name,
+                source_reference="develop",
+                source_resolved_reference="123456",
+            )
+            packages = [
+                package_local,
+            ]
+
+            locker.set_lock_data(root, packages)
+
+            with locker.lock.open(encoding="utf-8") as f:
+                content = f.read()
+
+            expected = f"""\
+[[package]]
+name = "local-package"
+version = "1.2.3"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.source]
+type = "file"
+url = "{
+    Path(
+        os.path.relpath(
+            Path(source_file.name).resolve().as_posix(),
+            Path(Path(lock_file.name).parent).resolve().as_posix(),
+        )
+    ).as_posix()
+}"
+reference = "develop"
+resolved_reference = "123456"
+
+[metadata]
+lock-version = "1.1"
+python-versions = "*"
+content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8"
+
+[metadata.files]
+local-package = []
+"""
+
+            assert content == expected
diff --git a/vendor/poetry/tests/plugins/__init__.py b/vendor/poetry/tests/plugins/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/vendor/poetry/tests/plugins/test_plugin_manager.py b/vendor/poetry/tests/plugins/test_plugin_manager.py
new file mode 100644
index 00000000..868d61f2
--- /dev/null
+++ b/vendor/poetry/tests/plugins/test_plugin_manager.py
@@ -0,0 +1,125 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+
+from cleo.io.buffered_io import BufferedIO
+from poetry.core.packages.project_package import ProjectPackage
+
+from poetry.packages.locker import Locker
+from poetry.plugins import ApplicationPlugin
+from poetry.plugins import Plugin
+from poetry.plugins.plugin_manager import PluginManager
+from poetry.poetry import Poetry
+from tests.compat import Protocol
+from tests.helpers import mock_metadata_entry_points
+
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+    from tests.conftest import Config
+
+CWD = Path(__file__).parent.parent / "fixtures" / "simple_project"
+
+
+class ManagerFactory(Protocol):
+    def __call__(self, group: str = Plugin.group) -> PluginManager:
+        ...
+
+
+class MyPlugin(Plugin):
+    def activate(self, poetry: Poetry, io: BufferedIO) -> None:
+        io.write_line("Setting readmes")
+        poetry.package.readmes = ("README.md",)
+
+
+class MyCommandPlugin(ApplicationPlugin):
+    commands = []
+
+
+class InvalidPlugin:
+    def activate(self, poetry: Poetry, io: BufferedIO) -> None:
+        io.write_line("Updating version")
+        poetry.package.version = "9.9.9"
+
+
+@pytest.fixture()
+def poetry(tmp_dir: str, config: Config) -> Poetry:
+    poetry = Poetry(
+        CWD / "pyproject.toml",
+        {},
+        ProjectPackage("simple-project", "1.2.3"),
+        Locker(CWD / "poetry.lock", {}),
+        config,
+    )
+
+    return poetry
+
+
+@pytest.fixture()
+def io() -> BufferedIO:
+    return BufferedIO()
+
+
+@pytest.fixture()
+def manager_factory(poetry: Poetry, io: BufferedIO) -> ManagerFactory:
+    def _manager(group: str = Plugin.group) -> PluginManager:
+        return PluginManager(group)
+
+    return _manager
+
+
+@pytest.fixture()
+def no_plugin_manager(poetry: Poetry, io: BufferedIO) -> PluginManager:
+    return PluginManager(Plugin.group, disable_plugins=True)
+
+
+def test_load_plugins_and_activate(
+    manager_factory: ManagerFactory,
+    poetry: Poetry,
+    io: BufferedIO,
+    with_my_plugin: None,
+):
+    manager = manager_factory()
+    manager.load_plugins()
+    manager.activate(poetry, io)
+
+    assert poetry.package.readmes == ("README.md",)
+    assert io.fetch_output() == "Setting readmes\n"
+
+
+@pytest.fixture
+def with_my_plugin(mocker: MockerFixture) -> None:
+    mock_metadata_entry_points(mocker, MyPlugin)
+
+
+@pytest.fixture
+def with_invalid_plugin(mocker: MockerFixture) -> None:
+    mock_metadata_entry_points(mocker, InvalidPlugin)
+
+
+def test_load_plugins_with_invalid_plugin(
+    manager_factory: ManagerFactory,
+    poetry: Poetry,
+    io: BufferedIO,
+    with_invalid_plugin: None,
+):
+    manager = manager_factory()
+
+    with pytest.raises(ValueError):
+        manager.load_plugins()
+
+
+def test_load_plugins_with_plugins_disabled(
+    no_plugin_manager: PluginManager,
+    poetry: Poetry,
+    io: BufferedIO,
+    with_my_plugin: None,
+):
+    no_plugin_manager.load_plugins()
+
+    assert poetry.package.version.text == "1.2.3"
+    assert io.fetch_output() == ""
diff --git a/vendor/poetry/tests/publishing/test_publisher.py b/vendor/poetry/tests/publishing/test_publisher.py
index ceb9e7d4..afc992f8 100644
--- a/vendor/poetry/tests/publishing/test_publisher.py
+++ b/vendor/poetry/tests/publishing/test_publisher.py
@@ -1,16 +1,29 @@
+from __future__ import annotations
+
 import os
 
+from pathlib import Path
+from typing import TYPE_CHECKING
+
 import pytest
 
-from cleo.io import BufferedIO
+from cleo.io.buffered_io import BufferedIO
+from cleo.io.null_io import NullIO
 
 from poetry.factory import Factory
-from poetry.io.null_io import NullIO
 from poetry.publishing.publisher import Publisher
-from poetry.utils._compat import Path
 
 
-def test_publish_publishes_to_pypi_by_default(fixture_dir, mocker, config):
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+    from tests.conftest import Config
+    from tests.types import FixtureDirGetter
+
+
+def test_publish_publishes_to_pypi_by_default(
+    fixture_dir: FixtureDirGetter, mocker: MockerFixture, config: Config
+):
     uploader_auth = mocker.patch("poetry.publishing.uploader.Uploader.auth")
     uploader_upload = mocker.patch("poetry.publishing.uploader.Uploader.upload")
     poetry = Factory().create_poetry(fixture_dir("sample_project"))
@@ -25,15 +38,16 @@ def test_publish_publishes_to_pypi_by_default(fixture_dir, mocker, config):
     assert [("foo", "bar")] == uploader_auth.call_args
     assert [
         ("https://upload.pypi.org/legacy/",),
-        {"cert": None, "client_cert": None, "dry_run": False},
+        {"cert": True, "client_cert": None, "dry_run": False, "skip_existing": False},
     ] == uploader_upload.call_args
 
 
-@pytest.mark.parametrize(
-    ("fixture_name",), [("sample_project",), ("with_default_source",)]
-)
+@pytest.mark.parametrize("fixture_name", ["sample_project", "with_default_source"])
 def test_publish_can_publish_to_given_repository(
-    fixture_dir, mocker, config, fixture_name
+    fixture_dir: FixtureDirGetter,
+    mocker: MockerFixture,
+    config: Config,
+    fixture_name: str,
 ):
     uploader_auth = mocker.patch("poetry.publishing.uploader.Uploader.auth")
     uploader_upload = mocker.patch("poetry.publishing.uploader.Uploader.upload")
@@ -45,7 +59,7 @@ def test_publish_can_publish_to_given_repository(
         }
     )
 
-    mocker.patch("poetry.factory.Factory.create_config", return_value=config)
+    mocker.patch("poetry.config.config.Config.create", return_value=config)
     poetry = Factory().create_poetry(fixture_dir(fixture_name))
 
     io = BufferedIO()
@@ -56,12 +70,14 @@ def test_publish_can_publish_to_given_repository(
     assert [("foo", "bar")] == uploader_auth.call_args
     assert [
         ("http://foo.bar",),
-        {"cert": None, "client_cert": None, "dry_run": False},
+        {"cert": True, "client_cert": None, "dry_run": False, "skip_existing": False},
     ] == uploader_upload.call_args
     assert "Publishing my-package (1.2.3) to foo" in io.fetch_output()
 
 
-def test_publish_raises_error_for_undefined_repository(fixture_dir, mocker, config):
+def test_publish_raises_error_for_undefined_repository(
+    fixture_dir: FixtureDirGetter, config: Config
+):
     poetry = Factory().create_poetry(fixture_dir("sample_project"))
     poetry._config = config
     poetry.config.merge(
@@ -73,7 +89,9 @@ def test_publish_raises_error_for_undefined_repository(fixture_dir, mocker, conf
         publisher.publish("my-repo", None, None)
 
 
-def test_publish_uses_token_if_it_exists(fixture_dir, mocker, config):
+def test_publish_uses_token_if_it_exists(
+    fixture_dir: FixtureDirGetter, mocker: MockerFixture, config: Config
+):
     uploader_auth = mocker.patch("poetry.publishing.uploader.Uploader.auth")
     uploader_upload = mocker.patch("poetry.publishing.uploader.Uploader.upload")
     poetry = Factory().create_poetry(fixture_dir("sample_project"))
@@ -86,11 +104,13 @@ def test_publish_uses_token_if_it_exists(fixture_dir, mocker, config):
     assert [("__token__", "my-token")] == uploader_auth.call_args
     assert [
         ("https://upload.pypi.org/legacy/",),
-        {"cert": None, "client_cert": None, "dry_run": False},
+        {"cert": True, "client_cert": None, "dry_run": False, "skip_existing": False},
     ] == uploader_upload.call_args
 
 
-def test_publish_uses_cert(fixture_dir, mocker, config):
+def test_publish_uses_cert(
+    fixture_dir: FixtureDirGetter, mocker: MockerFixture, config: Config
+):
     cert = "path/to/ca.pem"
     uploader_auth = mocker.patch("poetry.publishing.uploader.Uploader.auth")
     uploader_upload = mocker.patch("poetry.publishing.uploader.Uploader.upload")
@@ -110,11 +130,18 @@ def test_publish_uses_cert(fixture_dir, mocker, config):
     assert [("foo", "bar")] == uploader_auth.call_args
     assert [
         ("https://foo.bar",),
-        {"cert": Path(cert), "client_cert": None, "dry_run": False},
+        {
+            "cert": Path(cert),
+            "client_cert": None,
+            "dry_run": False,
+            "skip_existing": False,
+        },
     ] == uploader_upload.call_args
 
 
-def test_publish_uses_client_cert(fixture_dir, mocker, config):
+def test_publish_uses_client_cert(
+    fixture_dir: FixtureDirGetter, mocker: MockerFixture, config: Config
+):
     client_cert = "path/to/client.pem"
     uploader_upload = mocker.patch("poetry.publishing.uploader.Uploader.upload")
     poetry = Factory().create_poetry(fixture_dir("sample_project"))
@@ -131,11 +158,21 @@ def test_publish_uses_client_cert(fixture_dir, mocker, config):
 
     assert [
         ("https://foo.bar",),
-        {"cert": None, "client_cert": Path(client_cert), "dry_run": False},
+        {
+            "cert": True,
+            "client_cert": Path(client_cert),
+            "dry_run": False,
+            "skip_existing": False,
+        },
     ] == uploader_upload.call_args
 
 
-def test_publish_read_from_environment_variable(fixture_dir, environ, mocker, config):
+def test_publish_read_from_environment_variable(
+    fixture_dir: FixtureDirGetter,
+    environ: None,
+    mocker: MockerFixture,
+    config: Config,
+):
     os.environ["POETRY_REPOSITORIES_FOO_URL"] = "https://foo.bar"
     os.environ["POETRY_HTTP_BASIC_FOO_USERNAME"] = "bar"
     os.environ["POETRY_HTTP_BASIC_FOO_PASSWORD"] = "baz"
@@ -149,5 +186,5 @@ def test_publish_read_from_environment_variable(fixture_dir, environ, mocker, co
     assert [("bar", "baz")] == uploader_auth.call_args
     assert [
         ("https://foo.bar",),
-        {"cert": None, "client_cert": None, "dry_run": False},
+        {"cert": True, "client_cert": None, "dry_run": False, "skip_existing": False},
     ] == uploader_upload.call_args
diff --git a/vendor/poetry/tests/publishing/test_uploader.py b/vendor/poetry/tests/publishing/test_uploader.py
index 8c46057b..9a734682 100644
--- a/vendor/poetry/tests/publishing/test_uploader.py
+++ b/vendor/poetry/tests/publishing/test_uploader.py
@@ -1,47 +1,121 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
 import pytest
 
+from cleo.io.null_io import NullIO
+
 from poetry.factory import Factory
-from poetry.io.null_io import NullIO
 from poetry.publishing.uploader import Uploader
 from poetry.publishing.uploader import UploadError
-from poetry.utils._compat import Path
 
 
-fixtures_dir = Path(__file__).parent.parent / "fixtures"
+if TYPE_CHECKING:
+    import httpretty
+
+    from pytest_mock import MockerFixture
 
+    from tests.types import FixtureDirGetter
 
-def project(name):
-    return fixtures_dir / name
 
+@pytest.fixture
+def uploader(fixture_dir: FixtureDirGetter) -> Uploader:
+    return Uploader(Factory().create_poetry(fixture_dir("simple_project")), NullIO())
 
-def test_uploader_properly_handles_400_errors(http):
+
+def test_uploader_properly_handles_400_errors(
+    http: type[httpretty.httpretty], uploader: Uploader
+):
     http.register_uri(http.POST, "https://foo.com", status=400, body="Bad request")
-    uploader = Uploader(Factory().create_poetry(project("simple_project")), NullIO())
 
     with pytest.raises(UploadError) as e:
         uploader.upload("https://foo.com")
 
-    assert "HTTP Error 400: Bad Request" == str(e.value)
+    assert str(e.value) == "HTTP Error 400: Bad Request | b'Bad request'"
 
 
-def test_uploader_properly_handles_403_errors(http):
+def test_uploader_properly_handles_403_errors(
+    http: type[httpretty.httpretty], uploader: Uploader
+):
     http.register_uri(http.POST, "https://foo.com", status=403, body="Unauthorized")
-    uploader = Uploader(Factory().create_poetry(project("simple_project")), NullIO())
 
     with pytest.raises(UploadError) as e:
         uploader.upload("https://foo.com")
 
-    assert "HTTP Error 403: Forbidden" == str(e.value)
+    assert str(e.value) == "HTTP Error 403: Forbidden | b'Unauthorized'"
+
+
+def test_uploader_properly_handles_nonstandard_errors(
+    http: type[httpretty.httpretty], uploader: Uploader
+):
+    # content based off a true story.
+    # Message changed to protect the ~~innocent~~ guilty.
+    content = (
+        b'{\n "errors": [ {\n '
+        b'"status": 400,'
+        b'"message": "I cant let you do that, dave"\n'
+        b"} ]\n}"
+    )
+    http.register_uri(http.POST, "https://foo.com", status=400, body=content)
+
+    with pytest.raises(UploadError) as e:
+        uploader.upload("https://foo.com")
+
+    assert str(e.value) == f"HTTP Error 400: Bad Request | {content}"
+
 
+def test_uploader_properly_handles_301_redirects(
+    http: type[httpretty.httpretty], uploader: Uploader
+):
+    http.register_uri(http.POST, "https://foo.com", status=301, body="Redirect")
 
-def test_uploader_registers_for_appropriate_400_errors(mocker, http):
+    with pytest.raises(UploadError) as e:
+        uploader.upload("https://foo.com")
+
+    assert (
+        str(e.value)
+        == "Redirects are not supported. Is the URL missing a trailing slash?"
+    )
+
+
+def test_uploader_registers_for_appropriate_400_errors(
+    mocker: MockerFixture, http: type[httpretty.httpretty], uploader: Uploader
+):
     register = mocker.patch("poetry.publishing.uploader.Uploader._register")
     http.register_uri(
         http.POST, "https://foo.com", status=400, body="No package was ever registered"
     )
-    uploader = Uploader(Factory().create_poetry(project("simple_project")), NullIO())
 
     with pytest.raises(UploadError):
         uploader.upload("https://foo.com")
 
-    assert 1 == register.call_count
+    assert register.call_count == 1
+
+
+@pytest.mark.parametrize(
+    "status, body",
+    [
+        (409, ""),
+        (400, "File already exists"),
+        (400, "Repository does not allow updating assets"),
+        (403, "Not enough permissions to overwrite artifact"),
+        (400, "file name has already been taken"),
+    ],
+)
+def test_uploader_skips_existing(
+    http: type[httpretty.httpretty], uploader: Uploader, status: int, body: str
+):
+    http.register_uri(http.POST, "https://foo.com", status=status, body=body)
+
+    # should not raise
+    uploader.upload("https://foo.com", skip_existing=True)
+
+
+def test_uploader_skip_existing_bubbles_unskippable_errors(
+    http: type[httpretty.httpretty], uploader: Uploader
+):
+    http.register_uri(http.POST, "https://foo.com", status=403, body="Unauthorized")
+
+    with pytest.raises(UploadError):
+        uploader.upload("https://foo.com", skip_existing=True)
diff --git a/vendor/poetry/tests/puzzle/conftest.py b/vendor/poetry/tests/puzzle/conftest.py
index e3812530..9023893a 100644
--- a/vendor/poetry/tests/puzzle/conftest.py
+++ b/vendor/poetry/tests/puzzle/conftest.py
@@ -1,38 +1,20 @@
-import shutil
-
-import pytest
-
-from poetry.utils._compat import Path
+from __future__ import annotations
 
+from typing import TYPE_CHECKING
 
-try:
-    import urllib.parse as urlparse
-except ImportError:
-    import urlparse
-
+import pytest
 
-def mock_clone(self, source, dest):
-    # Checking source to determine which folder we need to copy
-    parts = urlparse.urlparse(source)
+from tests.helpers import MOCK_DEFAULT_GIT_REVISION
+from tests.helpers import mock_clone
 
-    folder = (
-        Path(__file__).parent.parent
-        / "fixtures"
-        / "git"
-        / parts.netloc
-        / parts.path.lstrip("/").rstrip(".git")
-    )
 
-    shutil.rmtree(str(dest))
-    shutil.copytree(str(folder), str(dest))
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
 
 
 @pytest.fixture(autouse=True)
-def setup(mocker):
+def setup(mocker: MockerFixture) -> None:
     # Patch git module to not actually clone projects
-    mocker.patch("poetry.core.vcs.git.Git.clone", new=mock_clone)
-    mocker.patch("poetry.core.vcs.git.Git.checkout", new=lambda *_: None)
-    p = mocker.patch("poetry.core.vcs.git.Git.rev_parse")
-    p.return_value = "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
-
-    yield
+    mocker.patch("poetry.vcs.git.Git.clone", new=mock_clone)
+    p = mocker.patch("poetry.vcs.git.Git.get_revision")
+    p.return_value = MOCK_DEFAULT_GIT_REVISION
diff --git a/vendor/poetry/tests/puzzle/test_provider.py b/vendor/poetry/tests/puzzle/test_provider.py
index ecab7f3a..60a45e9f 100644
--- a/vendor/poetry/tests/puzzle/test_provider.py
+++ b/vendor/poetry/tests/puzzle/test_provider.py
@@ -1,41 +1,55 @@
+from __future__ import annotations
+
+from pathlib import Path
 from subprocess import CalledProcessError
+from typing import TYPE_CHECKING
 
 import pytest
 
-from clikit.io import NullIO
-
-from poetry.core.packages import ProjectPackage
+from cleo.io.null_io import NullIO
+from poetry.core.packages.dependency import Dependency
 from poetry.core.packages.directory_dependency import DirectoryDependency
 from poetry.core.packages.file_dependency import FileDependency
+from poetry.core.packages.package import Package
+from poetry.core.packages.project_package import ProjectPackage
+from poetry.core.packages.url_dependency import URLDependency
 from poetry.core.packages.vcs_dependency import VCSDependency
+
+from poetry.factory import Factory
 from poetry.inspection.info import PackageInfo
+from poetry.packages import DependencyPackage
 from poetry.puzzle.provider import Provider
 from poetry.repositories.pool import Pool
 from poetry.repositories.repository import Repository
-from poetry.utils._compat import PY35
-from poetry.utils._compat import Path
 from poetry.utils.env import EnvCommandError
 from poetry.utils.env import MockEnv as BaseMockEnv
 from tests.helpers import get_dependency
 
 
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+
+SOME_URL = "https://example.com/path.tar.gz"
+
+
 class MockEnv(BaseMockEnv):
-    def run(self, bin, *args):
+    def run(self, bin: str, *args: str) -> None:
         raise EnvCommandError(CalledProcessError(1, "python", output=""))
 
 
 @pytest.fixture
-def root():
+def root() -> ProjectPackage:
     return ProjectPackage("root", "1.2.3")
 
 
 @pytest.fixture
-def repository():
-    return Repository()
+def repository() -> Repository:
+    return Repository("repo")
 
 
 @pytest.fixture
-def pool(repository):
+def pool(repository: Repository) -> Pool:
     pool = Pool()
     pool.add_repository(repository)
 
@@ -43,23 +57,125 @@ def pool(repository):
 
 
 @pytest.fixture
-def provider(root, pool):
+def provider(root: ProjectPackage, pool: Pool) -> Provider:
     return Provider(root, pool, NullIO())
 
 
+@pytest.mark.parametrize(
+    "dependency, expected",
+    [
+        (Dependency("foo", "<2"), [Package("foo", "1")]),
+        (Dependency("foo", "<2", extras=["bar"]), [Package("foo", "1")]),
+        (Dependency("foo", ">=1"), [Package("foo", "2"), Package("foo", "1")]),
+        (
+            Dependency("foo", ">=1a"),
+            [
+                Package("foo", "3a"),
+                Package("foo", "2"),
+                Package("foo", "2a"),
+                Package("foo", "1"),
+            ],
+        ),
+        (
+            Dependency("foo", ">=1", allows_prereleases=True),
+            [
+                Package("foo", "3a"),
+                Package("foo", "2"),
+                Package("foo", "2a"),
+                Package("foo", "1"),
+            ],
+        ),
+    ],
+)
+def test_search_for(
+    provider: Provider,
+    repository: Repository,
+    dependency: Dependency,
+    expected: list[Package],
+) -> None:
+    foo1 = Package("foo", "1")
+    foo2a = Package("foo", "2a")
+    foo2 = Package("foo", "2")
+    foo3a = Package("foo", "3a")
+    repository.add_package(foo1)
+    repository.add_package(foo2a)
+    repository.add_package(foo2)
+    repository.add_package(foo3a)
+    assert provider.search_for(dependency) == expected
+
+
+@pytest.mark.parametrize(
+    "dependency, direct_origin_dependency, expected_before, expected_after",
+    [
+        (
+            Dependency("foo", ">=1"),
+            URLDependency("foo", SOME_URL),
+            [Package("foo", "3")],
+            [Package("foo", "2a", source_type="url", source_url=SOME_URL)],
+        ),
+        (
+            Dependency("foo", ">=2"),
+            URLDependency("foo", SOME_URL),
+            [Package("foo", "3")],
+            [],
+        ),
+        (
+            Dependency("foo", ">=1", extras=["bar"]),
+            URLDependency("foo", SOME_URL),
+            [Package("foo", "3")],
+            [Package("foo", "2a", source_type="url", source_url=SOME_URL)],
+        ),
+        (
+            Dependency("foo", ">=1"),
+            URLDependency("foo", SOME_URL, extras=["baz"]),
+            [Package("foo", "3")],
+            [Package("foo", "2a", source_type="url", source_url=SOME_URL)],
+        ),
+        (
+            Dependency("foo", ">=1", extras=["bar"]),
+            URLDependency("foo", SOME_URL, extras=["baz"]),
+            [Package("foo", "3")],
+            [Package("foo", "2a", source_type="url", source_url=SOME_URL)],
+        ),
+    ],
+)
+def test_search_for_direct_origin_and_extras(
+    provider: Provider,
+    repository: Repository,
+    mocker: MockerFixture,
+    dependency: Dependency,
+    direct_origin_dependency: Dependency,
+    expected_before: list[Package],
+    expected_after: list[Package],
+) -> None:
+    foo2a_direct_origin = Package("foo", "2a", source_type="url", source_url=SOME_URL)
+    mocker.patch(
+        "poetry.puzzle.provider.Provider.search_for_direct_origin_dependency",
+        return_value=foo2a_direct_origin,
+    )
+    foo2a = Package("foo", "2a")
+    foo3 = Package("foo", "3")
+    repository.add_package(foo2a)
+    repository.add_package(foo3)
+
+    assert provider.search_for(dependency) == expected_before
+    assert provider.search_for(direct_origin_dependency) == [foo2a_direct_origin]
+    assert provider.search_for(dependency) == expected_after
+
+
 @pytest.mark.parametrize("value", [True, False])
-def test_search_for_vcs_retains_develop_flag(provider, value):
+def test_search_for_vcs_retains_develop_flag(provider: Provider, value: bool):
     dependency = VCSDependency(
         "demo", "git", "https://github.com/demo/demo.git", develop=value
     )
-    package = provider.search_for_vcs(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
     assert package.develop == value
 
 
-def test_search_for_vcs_setup_egg_info(provider):
+def test_search_for_vcs_setup_egg_info(provider: Provider):
     dependency = VCSDependency("demo", "git", "https://github.com/demo/demo.git")
 
-    package = provider.search_for_vcs(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -74,12 +190,12 @@ def test_search_for_vcs_setup_egg_info(provider):
     }
 
 
-def test_search_for_vcs_setup_egg_info_with_extras(provider):
+def test_search_for_vcs_setup_egg_info_with_extras(provider: Provider):
     dependency = VCSDependency(
         "demo", "git", "https://github.com/demo/demo.git", extras=["foo"]
     )
 
-    package = provider.search_for_vcs(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -94,13 +210,12 @@ def test_search_for_vcs_setup_egg_info_with_extras(provider):
     }
 
 
-@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4")
-def test_search_for_vcs_read_setup(provider, mocker):
+def test_search_for_vcs_read_setup(provider: Provider, mocker: MockerFixture):
     mocker.patch("poetry.utils.env.EnvManager.get", return_value=MockEnv())
 
     dependency = VCSDependency("demo", "git", "https://github.com/demo/demo.git")
 
-    package = provider.search_for_vcs(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -115,15 +230,16 @@ def test_search_for_vcs_read_setup(provider, mocker):
     }
 
 
-@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4")
-def test_search_for_vcs_read_setup_with_extras(provider, mocker):
+def test_search_for_vcs_read_setup_with_extras(
+    provider: Provider, mocker: MockerFixture
+):
     mocker.patch("poetry.utils.env.EnvManager.get", return_value=MockEnv())
 
     dependency = VCSDependency(
         "demo", "git", "https://github.com/demo/demo.git", extras=["foo"]
     )
 
-    package = provider.search_for_vcs(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -134,20 +250,22 @@ def test_search_for_vcs_read_setup_with_extras(provider, mocker):
     assert optional == [get_dependency("tomlkit"), get_dependency("cleo")]
 
 
-def test_search_for_vcs_read_setup_raises_error_if_no_version(provider, mocker):
+def test_search_for_vcs_read_setup_raises_error_if_no_version(
+    provider: Provider, mocker: MockerFixture
+):
     mocker.patch(
-        "poetry.inspection.info.PackageInfo._pep517_metadata",
+        "poetry.inspection.info.get_pep517_metadata",
         return_value=PackageInfo(name="demo", version=None),
     )
 
     dependency = VCSDependency("demo", "git", "https://github.com/demo/no-version.git")
 
     with pytest.raises(RuntimeError):
-        provider.search_for_vcs(dependency)
+        provider.search_for_direct_origin_dependency(dependency)
 
 
 @pytest.mark.parametrize("directory", ["demo", "non-canonical-name"])
-def test_search_for_directory_setup_egg_info(provider, directory):
+def test_search_for_directory_setup_egg_info(provider: Provider, directory: str):
     dependency = DirectoryDependency(
         "demo",
         Path(__file__).parent.parent
@@ -158,7 +276,7 @@ def test_search_for_directory_setup_egg_info(provider, directory):
         / directory,
     )
 
-    package = provider.search_for_directory(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -173,7 +291,7 @@ def test_search_for_directory_setup_egg_info(provider, directory):
     }
 
 
-def test_search_for_directory_setup_egg_info_with_extras(provider):
+def test_search_for_directory_setup_egg_info_with_extras(provider: Provider):
     dependency = DirectoryDependency(
         "demo",
         Path(__file__).parent.parent
@@ -185,7 +303,7 @@ def test_search_for_directory_setup_egg_info_with_extras(provider):
         extras=["foo"],
     )
 
-    package = provider.search_for_directory(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -201,7 +319,7 @@ def test_search_for_directory_setup_egg_info_with_extras(provider):
 
 
 @pytest.mark.parametrize("directory", ["demo", "non-canonical-name"])
-def test_search_for_directory_setup_with_base(provider, directory):
+def test_search_for_directory_setup_with_base(provider: Provider, directory: str):
     dependency = DirectoryDependency(
         "demo",
         Path(__file__).parent.parent
@@ -218,7 +336,7 @@ def test_search_for_directory_setup_with_base(provider, directory):
         / directory,
     )
 
-    package = provider.search_for_directory(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -241,8 +359,9 @@ def test_search_for_directory_setup_with_base(provider, directory):
     )
 
 
-@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4")
-def test_search_for_directory_setup_read_setup(provider, mocker):
+def test_search_for_directory_setup_read_setup(
+    provider: Provider, mocker: MockerFixture
+):
     mocker.patch("poetry.utils.env.EnvManager.get", return_value=MockEnv())
 
     dependency = DirectoryDependency(
@@ -255,7 +374,7 @@ def test_search_for_directory_setup_read_setup(provider, mocker):
         / "demo",
     )
 
-    package = provider.search_for_directory(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -270,8 +389,9 @@ def test_search_for_directory_setup_read_setup(provider, mocker):
     }
 
 
-@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4")
-def test_search_for_directory_setup_read_setup_with_extras(provider, mocker):
+def test_search_for_directory_setup_read_setup_with_extras(
+    provider: Provider, mocker: MockerFixture
+):
     mocker.patch("poetry.utils.env.EnvManager.get", return_value=MockEnv())
 
     dependency = DirectoryDependency(
@@ -285,7 +405,7 @@ def test_search_for_directory_setup_read_setup_with_extras(provider, mocker):
         extras=["foo"],
     )
 
-    package = provider.search_for_directory(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -300,8 +420,7 @@ def test_search_for_directory_setup_read_setup_with_extras(provider, mocker):
     }
 
 
-@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4")
-def test_search_for_directory_setup_read_setup_with_no_dependencies(provider):
+def test_search_for_directory_setup_read_setup_with_no_dependencies(provider: Provider):
     dependency = DirectoryDependency(
         "demo",
         Path(__file__).parent.parent
@@ -312,7 +431,7 @@ def test_search_for_directory_setup_read_setup_with_no_dependencies(provider):
         / "no-dependencies",
     )
 
-    package = provider.search_for_directory(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.2"
@@ -320,13 +439,13 @@ def test_search_for_directory_setup_read_setup_with_no_dependencies(provider):
     assert package.extras == {}
 
 
-def test_search_for_directory_poetry(provider):
+def test_search_for_directory_poetry(provider: Provider):
     dependency = DirectoryDependency(
         "project-with-extras",
         Path(__file__).parent.parent / "fixtures" / "project_with_extras",
     )
 
-    package = provider.search_for_directory(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "project-with-extras"
     assert package.version.text == "1.2.3"
@@ -348,14 +467,14 @@ def test_search_for_directory_poetry(provider):
     }
 
 
-def test_search_for_directory_poetry_with_extras(provider):
+def test_search_for_directory_poetry_with_extras(provider: Provider):
     dependency = DirectoryDependency(
         "project-with-extras",
         Path(__file__).parent.parent / "fixtures" / "project_with_extras",
         extras=["extras_a"],
     )
 
-    package = provider.search_for_directory(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "project-with-extras"
     assert package.version.text == "1.2.3"
@@ -377,7 +496,7 @@ def test_search_for_directory_poetry_with_extras(provider):
     }
 
 
-def test_search_for_file_sdist(provider):
+def test_search_for_file_sdist(provider: Provider):
     dependency = FileDependency(
         "demo",
         Path(__file__).parent.parent
@@ -386,7 +505,7 @@ def test_search_for_file_sdist(provider):
         / "demo-0.1.0.tar.gz",
     )
 
-    package = provider.search_for_file(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.0"
@@ -408,7 +527,7 @@ def test_search_for_file_sdist(provider):
     }
 
 
-def test_search_for_file_sdist_with_extras(provider):
+def test_search_for_file_sdist_with_extras(provider: Provider):
     dependency = FileDependency(
         "demo",
         Path(__file__).parent.parent
@@ -418,7 +537,7 @@ def test_search_for_file_sdist_with_extras(provider):
         extras=["foo"],
     )
 
-    package = provider.search_for_file(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.0"
@@ -440,7 +559,7 @@ def test_search_for_file_sdist_with_extras(provider):
     }
 
 
-def test_search_for_file_wheel(provider):
+def test_search_for_file_wheel(provider: Provider):
     dependency = FileDependency(
         "demo",
         Path(__file__).parent.parent
@@ -449,7 +568,7 @@ def test_search_for_file_wheel(provider):
         / "demo-0.1.0-py2.py3-none-any.whl",
     )
 
-    package = provider.search_for_file(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.0"
@@ -471,7 +590,7 @@ def test_search_for_file_wheel(provider):
     }
 
 
-def test_search_for_file_wheel_with_extras(provider):
+def test_search_for_file_wheel_with_extras(provider: Provider):
     dependency = FileDependency(
         "demo",
         Path(__file__).parent.parent
@@ -481,7 +600,7 @@ def test_search_for_file_wheel_with_extras(provider):
         extras=["foo"],
     )
 
-    package = provider.search_for_file(dependency)[0]
+    package = provider.search_for_direct_origin_dependency(dependency)
 
     assert package.name == "demo"
     assert package.version.text == "0.1.0"
@@ -501,3 +620,71 @@ def test_search_for_file_wheel_with_extras(provider):
         "foo": [get_dependency("cleo")],
         "bar": [get_dependency("tomlkit")],
     }
+
+
+def test_complete_package_preserves_source_type(
+    provider: Provider, root: ProjectPackage
+) -> None:
+    fixtures = Path(__file__).parent.parent / "fixtures"
+    project_dir = fixtures.joinpath("with_conditional_path_deps")
+    for folder in ["demo_one", "demo_two"]:
+        path = (project_dir / folder).as_posix()
+        root.add_dependency(Factory.create_dependency("demo", {"path": path}))
+
+    complete_package = provider.complete_package(
+        DependencyPackage(root.to_dependency(), root)
+    )
+
+    requires = complete_package.package.all_requires
+    assert len(requires) == 2
+    assert {requires[0].source_url, requires[1].source_url} == {
+        project_dir.joinpath("demo_one").as_posix(),
+        project_dir.joinpath("demo_two").as_posix(),
+    }
+
+
+def test_complete_package_preserves_source_type_with_subdirectories(
+    provider: Provider, root: ProjectPackage
+) -> None:
+    dependency_one = Factory.create_dependency(
+        "one",
+        {
+            "git": "https://github.com/demo/subdirectories.git",
+            "subdirectory": "one",
+        },
+    )
+    dependency_one_copy = Factory.create_dependency(
+        "one",
+        {
+            "git": "https://github.com/demo/subdirectories.git",
+            "subdirectory": "one-copy",
+        },
+    )
+    dependency_two = Factory.create_dependency(
+        "two",
+        {"git": "https://github.com/demo/subdirectories.git", "subdirectory": "two"},
+    )
+
+    root.add_dependency(
+        Factory.create_dependency(
+            "one",
+            {
+                "git": "https://github.com/demo/subdirectories.git",
+                "subdirectory": "one",
+            },
+        )
+    )
+    root.add_dependency(dependency_one_copy)
+    root.add_dependency(dependency_two)
+
+    complete_package = provider.complete_package(
+        DependencyPackage(root.to_dependency(), root)
+    )
+
+    requires = complete_package.package.all_requires
+    assert len(requires) == 3
+    assert {r.to_pep_508() for r in requires} == {
+        dependency_one.to_pep_508(),
+        dependency_one_copy.to_pep_508(),
+        dependency_two.to_pep_508(),
+    }
diff --git a/vendor/poetry/tests/puzzle/test_solver.py b/vendor/poetry/tests/puzzle/test_solver.py
index c832ac6a..12a8fdb2 100644
--- a/vendor/poetry/tests/puzzle/test_solver.py
+++ b/vendor/poetry/tests/puzzle/test_solver.py
@@ -1,20 +1,29 @@
-import pytest
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Any
 
-from clikit.io import NullIO
+import pytest
 
-from poetry.core.packages import Package
-from poetry.core.packages import ProjectPackage
-from poetry.core.packages import dependency_from_pep_508
+from cleo.io.buffered_io import BufferedIO
+from cleo.io.null_io import NullIO
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.package import Package
+from poetry.core.packages.project_package import ProjectPackage
+from poetry.core.packages.vcs_dependency import VCSDependency
 from poetry.core.version.markers import parse_marker
+
 from poetry.factory import Factory
+from poetry.packages import DependencyPackage
 from poetry.puzzle import Solver
 from poetry.puzzle.exceptions import SolverProblemError
 from poetry.puzzle.provider import Provider as BaseProvider
 from poetry.repositories.installed_repository import InstalledRepository
 from poetry.repositories.pool import Pool
 from poetry.repositories.repository import Repository
-from poetry.utils._compat import Path
 from poetry.utils.env import MockEnv
+from tests.helpers import MOCK_DEFAULT_GIT_REVISION
 from tests.helpers import get_dependency
 from tests.helpers import get_package
 from tests.repositories.test_legacy_repository import (
@@ -23,57 +32,85 @@
 from tests.repositories.test_pypi_repository import MockRepository as MockPyPIRepository
 
 
+if TYPE_CHECKING:
+    import httpretty
+
+    from poetry.installation.operations.operation import Operation
+    from poetry.puzzle.transaction import Transaction
+
+DEFAULT_SOURCE_REF = (
+    VCSDependency("poetry", "git", "git@github.com:python-poetry/poetry.git").branch
+    or "HEAD"
+)
+
+
 class Provider(BaseProvider):
-    def set_package_python_versions(self, python_versions):
+    def set_package_python_versions(self, python_versions: str) -> None:
         self._package.python_versions = python_versions
         self._python_constraint = self._package.python_constraint
 
 
 @pytest.fixture()
-def io():
+def io() -> NullIO:
     return NullIO()
 
 
 @pytest.fixture()
-def package():
+def package() -> ProjectPackage:
     return ProjectPackage("root", "1.0")
 
 
 @pytest.fixture()
-def installed():
+def installed() -> InstalledRepository:
     return InstalledRepository()
 
 
 @pytest.fixture()
-def locked():
-    return Repository()
+def locked() -> Repository:
+    return Repository("locked")
 
 
 @pytest.fixture()
-def repo():
-    return Repository()
+def repo() -> Repository:
+    return Repository("repo")
 
 
 @pytest.fixture()
-def pool(repo):
+def pool(repo: Repository) -> Pool:
     return Pool([repo])
 
 
 @pytest.fixture()
-def solver(package, pool, installed, locked, io):
+def solver(
+    package: ProjectPackage,
+    pool: Pool,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
+) -> Solver:
     return Solver(
-        package, pool, installed, locked, io, provider=Provider(package, pool, io)
+        package,
+        pool,
+        installed.packages,
+        locked.packages,
+        io,
+        provider=Provider(package, pool, io, installed=installed.packages),
     )
 
 
-def check_solver_result(ops, expected):
+def check_solver_result(
+    transaction: Transaction,
+    expected: list[dict[str, Any]],
+    synchronize: bool = False,
+) -> list[Operation]:
     for e in expected:
         if "skipped" not in e:
             e["skipped"] = False
 
     result = []
+    ops = transaction.calculate_operations(synchronize=synchronize)
     for op in ops:
-        if "update" == op.job_type:
+        if op.job_type == "update":
             result.append(
                 {
                     "job": "update",
@@ -89,31 +126,37 @@ def check_solver_result(ops, expected):
 
             result.append({"job": job, "package": op.package, "skipped": op.skipped})
 
-    assert expected == result
+    assert result == expected
+
+    return ops
 
 
-def test_solver_install_single(solver, repo, package):
+def test_solver_install_single(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
     repo.add_package(package_a)
 
-    ops = solver.solve([get_dependency("A")])
+    transaction = solver.solve([get_dependency("A")])
 
-    check_solver_result(ops, [{"job": "install", "package": package_a}])
+    check_solver_result(transaction, [{"job": "install", "package": package_a}])
 
 
-def test_solver_remove_if_no_longer_locked(solver, locked, installed):
+def test_solver_remove_if_no_longer_locked(
+    solver: Solver, locked: Repository, installed: InstalledRepository
+):
     package_a = get_package("A", "1.0")
     installed.add_package(package_a)
     locked.add_package(package_a)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(ops, [{"job": "remove", "package": package_a}])
+    check_solver_result(transaction, [{"job": "remove", "package": package_a}])
 
 
-def test_remove_non_installed(solver, repo, locked):
+def test_remove_non_installed(solver: Solver, repo: Repository, locked: Repository):
     package_a = get_package("A", "1.0")
     locked.add_package(package_a)
 
@@ -121,12 +164,14 @@ def test_remove_non_installed(solver, repo, locked):
 
     request = []
 
-    ops = solver.solve(request)
+    transaction = solver.solve(request)
 
-    check_solver_result(ops, [{"job": "remove", "package": package_a, "skipped": True}])
+    check_solver_result(transaction, [])
 
 
-def test_install_non_existing_package_fail(solver, repo, package):
+def test_install_non_existing_package_fail(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("B", "1"))
 
     package_a = get_package("A", "1.0")
@@ -136,7 +181,37 @@ def test_install_non_existing_package_fail(solver, repo, package):
         solver.solve()
 
 
-def test_solver_with_deps(solver, repo, package):
+def test_install_unpublished_package_does_not_fail(
+    installed: InstalledRepository,
+    solver: Solver,
+    repo: Repository,
+    package: ProjectPackage,
+):
+    package.add_dependency(Factory.create_dependency("B", "1"))
+
+    package_a = get_package("A", "1.0")
+    package_b = get_package("B", "1")
+    package_b.add_dependency(Factory.create_dependency("A", "1.0"))
+
+    repo.add_package(package_a)
+    installed.add_package(package_b)
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": package_a},
+            {
+                "job": "install",
+                "package": package_b,
+                "skipped": True,  # already installed
+            },
+        ],
+    )
+
+
+def test_solver_with_deps(solver: Solver, repo: Repository, package: ProjectPackage):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
@@ -147,12 +222,12 @@ def test_solver_with_deps(solver, repo, package):
     repo.add_package(package_b)
     repo.add_package(new_package_b)
 
-    package_a.requires.append(get_dependency("B", "<1.1"))
+    package_a.add_dependency(get_dependency("B", "<1.1"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_b},
             {"job": "install", "package": package_a},
@@ -160,7 +235,9 @@ def test_solver_with_deps(solver, repo, package):
     )
 
 
-def test_install_honours_not_equal(solver, repo, package):
+def test_install_honours_not_equal(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
@@ -175,12 +252,12 @@ def test_install_honours_not_equal(solver, repo, package):
     repo.add_package(new_package_b12)
     repo.add_package(new_package_b13)
 
-    package_a.requires.append(get_dependency("B", "<=1.3,!=1.3,!=1.2"))
+    package_a.add_dependency(get_dependency("B", "<=1.3,!=1.3,!=1.2"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": new_package_b11},
             {"job": "install", "package": package_a},
@@ -188,7 +265,9 @@ def test_install_honours_not_equal(solver, repo, package):
     )
 
 
-def test_install_with_deps_in_order(solver, repo, package):
+def test_install_with_deps_in_order(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
     package.add_dependency(Factory.create_dependency("C", "*"))
@@ -200,15 +279,15 @@ def test_install_with_deps_in_order(solver, repo, package):
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    package_b.requires.append(get_dependency("A", ">=1.0"))
-    package_b.requires.append(get_dependency("C", ">=1.0"))
+    package_b.add_dependency(get_dependency("A", ">=1.0"))
+    package_b.add_dependency(get_dependency("C", ">=1.0"))
 
-    package_c.requires.append(get_dependency("A", ">=1.0"))
+    package_c.add_dependency(get_dependency("A", ">=1.0"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a},
             {"job": "install", "package": package_c},
@@ -217,21 +296,31 @@ def test_install_with_deps_in_order(solver, repo, package):
     )
 
 
-def test_install_installed(solver, repo, installed, package):
+def test_install_installed(
+    solver: Solver,
+    repo: Repository,
+    installed: InstalledRepository,
+    package: ProjectPackage,
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
     installed.add_package(package_a)
     repo.add_package(package_a)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops, [{"job": "install", "package": package_a, "skipped": True}]
+        transaction, [{"job": "install", "package": package_a, "skipped": True}]
     )
 
 
-def test_update_installed(solver, repo, installed, package):
+def test_update_installed(
+    solver: Solver,
+    repo: Repository,
+    installed: InstalledRepository,
+    package: ProjectPackage,
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     installed.add_package(get_package("A", "1.0"))
@@ -241,14 +330,20 @@ def test_update_installed(solver, repo, installed, package):
     repo.add_package(package_a)
     repo.add_package(new_package_a)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops, [{"job": "update", "from": package_a, "to": new_package_a}]
+        transaction, [{"job": "update", "from": package_a, "to": new_package_a}]
     )
 
 
-def test_update_with_use_latest(solver, repo, installed, package, locked):
+def test_update_with_use_latest(
+    solver: Solver,
+    repo: Repository,
+    installed: InstalledRepository,
+    package: ProjectPackage,
+    locked: Repository,
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
 
@@ -266,10 +361,10 @@ def test_update_with_use_latest(solver, repo, installed, package, locked):
     locked.add_package(package_a)
     locked.add_package(package_b)
 
-    ops = solver.solve(use_latest=[package_b.name])
+    transaction = solver.solve(use_latest=[package_b.name])
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a, "skipped": True},
             {"job": "install", "package": new_package_b},
@@ -277,9 +372,9 @@ def test_update_with_use_latest(solver, repo, installed, package, locked):
     )
 
 
-def test_solver_sets_categories(solver, repo, package):
+def test_solver_sets_groups(solver: Solver, repo: Repository, package: ProjectPackage):
     package.add_dependency(Factory.create_dependency("A", "*"))
-    package.add_dependency(Factory.create_dependency("B", "*", category="dev"))
+    package.add_dependency(Factory.create_dependency("B", "*", groups=["dev"]))
 
     package_a = get_package("A", "1.0")
     package_b = get_package("B", "1.0")
@@ -290,10 +385,10 @@ def test_solver_sets_categories(solver, repo, package):
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": package_c},
             {"job": "install", "package": package_a},
@@ -306,7 +401,9 @@ def test_solver_sets_categories(solver, repo, package):
     assert ops[1].package.category == "main"
 
 
-def test_solver_respects_root_package_python_versions(solver, repo, package):
+def test_solver_respects_root_package_python_versions(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     solver.provider.set_package_python_versions("~3.4")
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
@@ -325,10 +422,10 @@ def test_solver_respects_root_package_python_versions(solver, repo, package):
     repo.add_package(package_c)
     repo.add_package(package_c11)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_c},
             {"job": "install", "package": package_a},
@@ -337,7 +434,9 @@ def test_solver_respects_root_package_python_versions(solver, repo, package):
     )
 
 
-def test_solver_fails_if_mismatch_root_python_versions(solver, repo, package):
+def test_solver_fails_if_mismatch_root_python_versions(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     solver.provider.set_package_python_versions("^3.4")
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
@@ -357,7 +456,33 @@ def test_solver_fails_if_mismatch_root_python_versions(solver, repo, package):
         solver.solve()
 
 
-def test_solver_solves_optional_and_compatible_packages(solver, repo, package):
+def test_solver_ignores_python_restricted_if_mismatch_root_package_python_versions(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
+    solver.provider.set_package_python_versions("~3.8")
+    package.add_dependency(
+        Factory.create_dependency("A", {"version": "1.0", "python": "<3.8"})
+    )
+    package.add_dependency(
+        Factory.create_dependency(
+            "B", {"version": "1.0", "markers": "python_version < '3.8'"}
+        )
+    )
+
+    package_a = get_package("A", "1.0")
+    package_b = get_package("B", "1.0")
+
+    repo.add_package(package_a)
+    repo.add_package(package_b)
+
+    transaction = solver.solve()
+
+    check_solver_result(transaction, [])
+
+
+def test_solver_solves_optional_and_compatible_packages(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     solver.provider.set_package_python_versions("~3.4")
     package.extras["foo"] = [get_dependency("B")]
     package.add_dependency(
@@ -378,10 +503,10 @@ def test_solver_solves_optional_and_compatible_packages(solver, repo, package):
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_c},
             {"job": "install", "package": package_a},
@@ -390,7 +515,9 @@ def test_solver_solves_optional_and_compatible_packages(solver, repo, package):
     )
 
 
-def test_solver_does_not_return_extras_if_not_requested(solver, repo, package):
+def test_solver_does_not_return_extras_if_not_requested(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
 
@@ -404,10 +531,10 @@ def test_solver_does_not_return_extras_if_not_requested(solver, repo, package):
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a},
             {"job": "install", "package": package_b},
@@ -415,7 +542,9 @@ def test_solver_does_not_return_extras_if_not_requested(solver, repo, package):
     )
 
 
-def test_solver_returns_extras_if_requested(solver, repo, package):
+def test_solver_returns_extras_if_requested(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(
         Factory.create_dependency("B", {"version": "*", "extras": ["foo"]})
@@ -428,16 +557,16 @@ def test_solver_returns_extras_if_requested(solver, repo, package):
     dep = get_dependency("C", "^1.0", optional=True)
     dep.marker = parse_marker("extra == 'foo'")
     package_b.extras = {"foo": [dep]}
-    package_b.requires.append(dep)
+    package_b.add_dependency(dep)
 
     repo.add_package(package_a)
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": package_c},
             {"job": "install", "package": package_a},
@@ -449,8 +578,13 @@ def test_solver_returns_extras_if_requested(solver, repo, package):
     assert ops[0].package.marker.is_any()
 
 
-@pytest.mark.parametrize(("enabled_extra",), [("one",), ("two",), (None,)])
-def test_solver_returns_extras_only_requested(solver, repo, package, enabled_extra):
+@pytest.mark.parametrize("enabled_extra", ["one", "two", None])
+def test_solver_returns_extras_only_requested(
+    solver: Solver,
+    repo: Repository,
+    package: ProjectPackage,
+    enabled_extra: bool | None,
+):
     extras = [enabled_extra] if enabled_extra is not None else []
 
     package.add_dependency(Factory.create_dependency("A", "*"))
@@ -473,15 +607,15 @@ def test_solver_returns_extras_only_requested(solver, repo, package, enabled_ext
 
     package_b.extras = {"one": [dep10], "two": [dep20]}
 
-    package_b.requires.append(dep10)
-    package_b.requires.append(dep20)
+    package_b.add_dependency(dep10)
+    package_b.add_dependency(dep20)
 
     repo.add_package(package_a)
     repo.add_package(package_b)
     repo.add_package(package_c10)
     repo.add_package(package_c20)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     expected = [
         {"job": "install", "package": package_a},
@@ -497,17 +631,21 @@ def test_solver_returns_extras_only_requested(solver, repo, package, enabled_ext
             },
         )
 
-    check_solver_result(
-        ops, expected,
+    ops = check_solver_result(
+        transaction,
+        expected,
     )
 
     assert ops[-1].package.marker.is_any()
     assert ops[0].package.marker.is_any()
 
 
-@pytest.mark.parametrize(("enabled_extra",), [("one",), ("two",), (None,)])
+@pytest.mark.parametrize("enabled_extra", ["one", "two", None])
 def test_solver_returns_extras_when_multiple_extras_use_same_dependency(
-    solver, repo, package, enabled_extra
+    solver: Solver,
+    repo: Repository,
+    package: ProjectPackage,
+    enabled_extra: bool | None,
 ):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
@@ -521,7 +659,7 @@ def test_solver_returns_extras_when_multiple_extras_use_same_dependency(
 
     package_b.extras = {"one": [dep], "two": [dep]}
 
-    package_b.requires.append(dep)
+    package_b.add_dependency(dep)
 
     extras = [enabled_extra] if enabled_extra is not None else []
     package_a.add_dependency(
@@ -532,7 +670,7 @@ def test_solver_returns_extras_when_multiple_extras_use_same_dependency(
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     expected = [
         {"job": "install", "package": package_b},
@@ -542,17 +680,21 @@ def test_solver_returns_extras_when_multiple_extras_use_same_dependency(
     if enabled_extra is not None:
         expected.insert(0, {"job": "install", "package": package_c})
 
-    check_solver_result(
-        ops, expected,
+    ops = check_solver_result(
+        transaction,
+        expected,
     )
 
     assert ops[-1].package.marker.is_any()
     assert ops[0].package.marker.is_any()
 
 
-@pytest.mark.parametrize(("enabled_extra",), [("one",), ("two",), (None,)])
+@pytest.mark.parametrize("enabled_extra", ["one", "two", None])
 def test_solver_returns_extras_only_requested_nested(
-    solver, repo, package, enabled_extra
+    solver: Solver,
+    repo: Repository,
+    package: ProjectPackage,
+    enabled_extra: bool | None,
 ):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
@@ -571,8 +713,8 @@ def test_solver_returns_extras_only_requested_nested(
 
     package_b.extras = {"one": [dep10], "two": [dep20]}
 
-    package_b.requires.append(dep10)
-    package_b.requires.append(dep20)
+    package_b.add_dependency(dep10)
+    package_b.add_dependency(dep20)
 
     extras = [enabled_extra] if enabled_extra is not None else []
     package_a.add_dependency(
@@ -584,7 +726,7 @@ def test_solver_returns_extras_only_requested_nested(
     repo.add_package(package_c10)
     repo.add_package(package_c20)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     expected = [
         {"job": "install", "package": package_b},
@@ -600,13 +742,94 @@ def test_solver_returns_extras_only_requested_nested(
             },
         )
 
-    check_solver_result(ops, expected)
+    ops = check_solver_result(transaction, expected)
 
     assert ops[-1].package.marker.is_any()
     assert ops[0].package.marker.is_any()
 
 
-def test_solver_returns_prereleases_if_requested(solver, repo, package):
+def test_solver_finds_extras_next_to_non_extras(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
+    # Root depends on A[foo]
+    package.add_dependency(
+        Factory.create_dependency("A", {"version": "*", "extras": ["foo"]})
+    )
+
+    package_a = get_package("A", "1.0")
+    package_b = get_package("B", "1.0")
+    package_c = get_package("C", "1.0")
+    package_d = get_package("D", "1.0")
+
+    # A depends on B; A[foo] depends on B[bar].
+    package_a.add_dependency(Factory.create_dependency("B", "*"))
+    package_a.add_dependency(
+        Factory.create_dependency(
+            "B", {"version": "*", "extras": ["bar"], "markers": "extra == 'foo'"}
+        )
+    )
+    package_a.extras = {"foo": [get_dependency("B", "*")]}
+
+    # B depends on C; B[bar] depends on D.
+    package_b.add_dependency(Factory.create_dependency("C", "*"))
+    package_b.add_dependency(
+        Factory.create_dependency("D", {"version": "*", "markers": 'extra == "bar"'})
+    )
+    package_b.extras = {"bar": [get_dependency("D", "*")]}
+
+    repo.add_package(package_a)
+    repo.add_package(package_b)
+    repo.add_package(package_c)
+    repo.add_package(package_d)
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": package_c},
+            {"job": "install", "package": package_d},
+            {"job": "install", "package": package_b},
+            {"job": "install", "package": package_a},
+        ],
+    )
+
+
+def test_solver_merge_extras_into_base_package_multiple_repos_fixes_5727(
+    solver: Solver, repo: Repository, pool: Pool, package: ProjectPackage
+):
+    package.add_dependency(
+        Factory.create_dependency("A", {"version": "*", "source": "legacy"})
+    )
+    package.add_dependency(Factory.create_dependency("B", {"version": "*"}))
+
+    package_a = get_package("A", "1.0")
+    package_a.extras = {"foo": []}
+
+    repo.add_package(package_a)
+
+    package_b = Package("B", "1.0", source_type="legacy")
+    package_b.add_dependency(package_a.with_features(["foo"]).to_dependency())
+
+    package_a = Package("A", "1.0", source_type="legacy")
+    package_a.extras = {"foo": []}
+
+    repo = Repository("legacy")
+    repo.add_package(package_a)
+    repo.add_package(package_b)
+
+    pool.add_repository(repo)
+
+    transaction = solver.solve()
+
+    ops = transaction.calculate_operations(synchronize=True)
+
+    assert len(ops[0].package.requires) == 0, "a should not require itself"
+
+
+def test_solver_returns_prereleases_if_requested(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
     package.add_dependency(
@@ -623,10 +846,10 @@ def test_solver_returns_prereleases_if_requested(solver, repo, package):
     repo.add_package(package_c)
     repo.add_package(package_c_dev)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a},
             {"job": "install", "package": package_b},
@@ -635,7 +858,9 @@ def test_solver_returns_prereleases_if_requested(solver, repo, package):
     )
 
 
-def test_solver_does_not_return_prereleases_if_not_requested(solver, repo, package):
+def test_solver_does_not_return_prereleases_if_not_requested(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
     package.add_dependency(Factory.create_dependency("C", "*"))
@@ -650,10 +875,10 @@ def test_solver_does_not_return_prereleases_if_not_requested(solver, repo, packa
     repo.add_package(package_c)
     repo.add_package(package_c_dev)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a},
             {"job": "install", "package": package_b},
@@ -662,7 +887,9 @@ def test_solver_does_not_return_prereleases_if_not_requested(solver, repo, packa
     )
 
 
-def test_solver_sub_dependencies_with_requirements(solver, repo, package):
+def test_solver_sub_dependencies_with_requirements(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
 
@@ -682,10 +909,10 @@ def test_solver_sub_dependencies_with_requirements(solver, repo, package):
     repo.add_package(package_c)
     repo.add_package(package_d)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": package_d},
             {"job": "install", "package": package_c},
@@ -698,7 +925,9 @@ def test_solver_sub_dependencies_with_requirements(solver, repo, package):
     assert op.package.marker.is_any()
 
 
-def test_solver_sub_dependencies_with_requirements_complex(solver, repo, package):
+def test_solver_sub_dependencies_with_requirements_complex(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(
         Factory.create_dependency("A", {"version": "^1.0", "python": "<5.0"})
     )
@@ -740,10 +969,10 @@ def test_solver_sub_dependencies_with_requirements_complex(solver, repo, package
     repo.add_package(package_e)
     repo.add_package(package_f)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_e},
             {"job": "install", "package": package_f},
@@ -756,7 +985,7 @@ def test_solver_sub_dependencies_with_requirements_complex(solver, repo, package
 
 
 def test_solver_sub_dependencies_with_not_supported_python_version(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("^3.5")
     package.add_dependency(Factory.create_dependency("A", "*"))
@@ -772,19 +1001,63 @@ def test_solver_sub_dependencies_with_not_supported_python_version(
     repo.add_package(package_a)
     repo.add_package(package_b)
 
-    ops = solver.solve()
+    transaction = solver.solve()
+
+    check_solver_result(transaction, [{"job": "install", "package": package_a}])
 
-    check_solver_result(ops, [{"job": "install", "package": package_a}])
+
+def test_solver_sub_dependencies_with_not_supported_python_version_transitive(
+    solver: Solver, repo: Repository, package: Package
+):
+    solver.provider.set_package_python_versions("^3.4")
+
+    package.add_dependency(
+        Factory.create_dependency("httpx", {"version": "^0.17.1", "python": "^3.6"})
+    )
+
+    httpx = get_package("httpx", "0.17.1")
+    httpx.python_versions = ">=3.6"
+
+    httpcore = get_package("httpcore", "0.12.3")
+    httpcore.python_versions = ">=3.6"
+
+    sniffio_1_1_0 = get_package("sniffio", "1.1.0")
+    sniffio_1_1_0.python_versions = ">=3.5"
+
+    sniffio = get_package("sniffio", "1.2.0")
+    sniffio.python_versions = ">=3.5"
+
+    httpx.add_dependency(
+        Factory.create_dependency("httpcore", {"version": ">=0.12.1,<0.13"})
+    )
+    httpx.add_dependency(Factory.create_dependency("sniffio", {"version": "*"}))
+    httpcore.add_dependency(Factory.create_dependency("sniffio", {"version": "==1.*"}))
+
+    repo.add_package(httpx)
+    repo.add_package(httpcore)
+    repo.add_package(sniffio)
+    repo.add_package(sniffio_1_1_0)
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": sniffio, "skipped": False},
+            {"job": "install", "package": httpcore, "skipped": False},
+            {"job": "install", "package": httpx, "skipped": False},
+        ],
+    )
 
 
 def test_solver_with_dependency_in_both_main_and_dev_dependencies(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("^3.5")
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(
         Factory.create_dependency(
-            "A", {"version": "*", "extras": ["foo"]}, category="dev"
+            "A", {"version": "*", "extras": ["foo"]}, groups=["dev"]
         )
     )
 
@@ -807,10 +1080,10 @@ def test_solver_with_dependency_in_both_main_and_dev_dependencies(
     repo.add_package(package_c)
     repo.add_package(package_d)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": package_d},
             {"job": "install", "package": package_b},
@@ -819,25 +1092,25 @@ def test_solver_with_dependency_in_both_main_and_dev_dependencies(
         ],
     )
 
+    d = ops[0].package
     b = ops[1].package
     c = ops[2].package
-    d = ops[0].package
     a = ops[3].package
 
     assert d.category == "dev"
-    assert c.category == "dev"
     assert b.category == "main"
+    assert c.category == "dev"
     assert a.category == "main"
 
 
-def test_solver_with_dependency_in_both_main_and_dev_dependencies_with_one_more_dependent(
-    solver, repo, package
+def test_solver_with_dependency_in_both_main_and_dev_dependencies_with_one_more_dependent(  # noqa: E501
+    solver: Solver, repo: Repository, package: Package
 ):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("E", "*"))
     package.add_dependency(
         Factory.create_dependency(
-            "A", {"version": "*", "extras": ["foo"]}, category="dev"
+            "A", {"version": "*", "extras": ["foo"]}, groups=["dev"]
         )
     )
 
@@ -864,10 +1137,10 @@ def test_solver_with_dependency_in_both_main_and_dev_dependencies_with_one_more_
     repo.add_package(package_d)
     repo.add_package(package_e)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": package_b},
             {"job": "install", "package": package_d},
@@ -878,19 +1151,21 @@ def test_solver_with_dependency_in_both_main_and_dev_dependencies_with_one_more_
     )
 
     b = ops[0].package
-    c = ops[3].package
     d = ops[1].package
     a = ops[2].package
+    c = ops[3].package
     e = ops[4].package
 
-    assert d.category == "dev"
-    assert c.category == "dev"
     assert b.category == "main"
+    assert d.category == "dev"
     assert a.category == "main"
+    assert c.category == "dev"
     assert e.category == "main"
 
 
-def test_solver_with_dependency_and_prerelease_sub_dependencies(solver, repo, package):
+def test_solver_with_dependency_and_prerelease_sub_dependencies(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
@@ -904,10 +1179,10 @@ def test_solver_with_dependency_and_prerelease_sub_dependencies(solver, repo, pa
     package_b = get_package("B", "1.0.0.dev4")
     repo.add_package(package_b)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_b},
             {"job": "install", "package": package_a},
@@ -915,7 +1190,9 @@ def test_solver_with_dependency_and_prerelease_sub_dependencies(solver, repo, pa
     )
 
 
-def test_solver_circular_dependency(solver, repo, package):
+def test_solver_circular_dependency(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
@@ -931,10 +1208,10 @@ def test_solver_circular_dependency(solver, repo, package):
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": package_c},
             {"job": "install", "package": package_b},
@@ -942,10 +1219,12 @@ def test_solver_circular_dependency(solver, repo, package):
         ],
     )
 
-    assert "main" == ops[0].package.category
+    assert ops[0].package.category == "main"
 
 
-def test_solver_circular_dependency_chain(solver, repo, package):
+def test_solver_circular_dependency_chain(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
@@ -965,10 +1244,10 @@ def test_solver_circular_dependency_chain(solver, repo, package):
     repo.add_package(package_c)
     repo.add_package(package_d)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": package_d},
             {"job": "install", "package": package_c},
@@ -977,10 +1256,12 @@ def test_solver_circular_dependency_chain(solver, repo, package):
         ],
     )
 
-    assert "main" == ops[0].package.category
+    assert ops[0].package.category == "main"
 
 
-def test_solver_dense_dependencies(solver, repo, package):
+def test_solver_dense_dependencies(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     # The root package depends on packages A0...An-1,
     # And package Ai depends  on packages A0...Ai-1
     # This graph is a transitive tournament
@@ -994,14 +1275,16 @@ def test_solver_dense_dependencies(solver, repo, package):
         for j in range(i):
             package_ai.add_dependency(Factory.create_dependency("a" + str(j), "^1.0"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops, [{"job": "install", "package": packages[i]} for i in range(n)]
+        transaction, [{"job": "install", "package": packages[i]} for i in range(n)]
     )
 
 
-def test_solver_duplicate_dependencies_same_constraint(solver, repo, package):
+def test_solver_duplicate_dependencies_same_constraint(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
@@ -1017,10 +1300,10 @@ def test_solver_duplicate_dependencies_same_constraint(solver, repo, package):
     repo.add_package(package_a)
     repo.add_package(package_b)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_b},
             {"job": "install", "package": package_a},
@@ -1028,7 +1311,9 @@ def test_solver_duplicate_dependencies_same_constraint(solver, repo, package):
     )
 
 
-def test_solver_duplicate_dependencies_different_constraints(solver, repo, package):
+def test_solver_duplicate_dependencies_different_constraints(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
@@ -1046,10 +1331,10 @@ def test_solver_duplicate_dependencies_different_constraints(solver, repo, packa
     repo.add_package(package_b10)
     repo.add_package(package_b20)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_b10},
             {"job": "install", "package": package_b20},
@@ -1059,7 +1344,7 @@ def test_solver_duplicate_dependencies_different_constraints(solver, repo, packa
 
 
 def test_solver_duplicate_dependencies_different_constraints_same_requirements(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
@@ -1085,7 +1370,264 @@ def test_solver_duplicate_dependencies_different_constraints_same_requirements(
     assert str(e.value) == expected
 
 
-def test_solver_duplicate_dependencies_sub_dependencies(solver, repo, package):
+def test_solver_duplicate_dependencies_different_constraints_merge_by_marker(
+    solver: Solver, repo: Repository, package: Package
+):
+    package.add_dependency(Factory.create_dependency("A", "*"))
+
+    package_a = get_package("A", "1.0")
+    package_a.add_dependency(
+        Factory.create_dependency("B", {"version": "^1.0", "python": "<3.4"})
+    )
+    package_a.add_dependency(
+        Factory.create_dependency("B", {"version": "^2.0", "python": ">=3.4"})
+    )
+    package_a.add_dependency(
+        Factory.create_dependency("B", {"version": "!=1.1", "python": "<3.4"})
+    )
+
+    package_b10 = get_package("B", "1.0")
+    package_b11 = get_package("B", "1.1")
+    package_b20 = get_package("B", "2.0")
+
+    repo.add_package(package_a)
+    repo.add_package(package_b10)
+    repo.add_package(package_b11)
+    repo.add_package(package_b20)
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": package_b10},
+            {"job": "install", "package": package_b20},
+            {"job": "install", "package": package_a},
+        ],
+    )
+
+
+def test_solver_duplicate_dependencies_different_sources_types_are_preserved(
+    solver: Solver, repo: Repository, package: Package
+):
+    pendulum = get_package("pendulum", "2.0.3")
+    repo.add_package(pendulum)
+    repo.add_package(get_package("cleo", "1.0.0"))
+    repo.add_package(get_package("demo", "0.1.0"))
+
+    dependency_pypi = Factory.create_dependency("demo", ">=0.1.0")
+    dependency_git = Factory.create_dependency(
+        "demo", {"git": "https://github.com/demo/demo.git"}, groups=["dev"]
+    )
+    package.add_dependency(dependency_git)
+    package.add_dependency(dependency_pypi)
+
+    demo = Package(
+        "demo",
+        "0.1.2",
+        source_type="git",
+        source_url="https://github.com/demo/demo.git",
+        source_reference=DEFAULT_SOURCE_REF,
+        source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
+    )
+
+    transaction = solver.solve()
+
+    ops = check_solver_result(
+        transaction,
+        [{"job": "install", "package": pendulum}, {"job": "install", "package": demo}],
+    )
+
+    op = ops[1]
+
+    assert op.package.source_type == demo.source_type
+    assert op.package.source_reference == DEFAULT_SOURCE_REF
+    assert op.package.source_resolved_reference.startswith(
+        demo.source_resolved_reference
+    )
+
+    complete_package = solver.provider.complete_package(
+        DependencyPackage(package.to_dependency(), package)
+    )
+
+    assert len(complete_package.package.all_requires) == 2
+
+    pypi, git = complete_package.package.all_requires
+
+    assert isinstance(pypi, Dependency)
+    assert pypi == dependency_pypi
+
+    assert isinstance(git, VCSDependency)
+    assert git.constraint
+    assert git.constraint != dependency_git.constraint
+    assert (git.name, git.source_type, git.source_url, git.source_reference) == (
+        dependency_git.name,
+        dependency_git.source_type,
+        dependency_git.source_url,
+        DEFAULT_SOURCE_REF,
+    )
+
+
+def test_solver_duplicate_dependencies_different_constraints_merge_no_markers(
+    solver: Solver, repo: Repository, package: Package
+):
+    package.add_dependency(Factory.create_dependency("A", "*"))
+    package.add_dependency(Factory.create_dependency("B", "1.0"))
+
+    package_a10 = get_package("A", "1.0")
+    package_a10.add_dependency(Factory.create_dependency("C", {"version": "^1.0"}))
+
+    package_a20 = get_package("A", "2.0")
+    package_a20.add_dependency(
+        Factory.create_dependency("C", {"version": "^2.0"})  # incompatible with B
+    )
+    package_a20.add_dependency(
+        Factory.create_dependency("C", {"version": "!=2.1", "python": "3.10"})
+    )
+
+    package_b = get_package("B", "1.0")
+    package_b.add_dependency(Factory.create_dependency("C", {"version": "<2.0"}))
+
+    package_c10 = get_package("C", "1.0")
+    package_c20 = get_package("C", "2.0")
+    package_c21 = get_package("C", "2.1")
+
+    repo.add_package(package_a10)
+    repo.add_package(package_a20)
+    repo.add_package(package_b)
+    repo.add_package(package_c10)
+    repo.add_package(package_c20)
+    repo.add_package(package_c21)
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": package_c10},
+            {"job": "install", "package": package_a10},  # only a10, not a20
+            {"job": "install", "package": package_b},
+        ],
+    )
+
+
+def test_solver_duplicate_dependencies_ignore_overrides_with_empty_marker_intersection(
+    solver: Solver, repo: Repository, package: Package
+):
+    """
+    Distinct requirements per marker:
+    * Python 2.7: A (which requires B) and B
+    * Python 3.6: same as Python 2.7 but with different versions
+    * Python 3.7: only A
+    * Python 3.8: only B
+    """
+    package.add_dependency(
+        Factory.create_dependency("A", {"version": "1.0", "python": "~2.7"})
+    )
+    package.add_dependency(
+        Factory.create_dependency("A", {"version": "2.0", "python": "~3.6"})
+    )
+    package.add_dependency(
+        Factory.create_dependency("A", {"version": "3.0", "python": "~3.7"})
+    )
+    package.add_dependency(
+        Factory.create_dependency("B", {"version": "1.0", "python": "~2.7"})
+    )
+    package.add_dependency(
+        Factory.create_dependency("B", {"version": "2.0", "python": "~3.6"})
+    )
+    package.add_dependency(
+        Factory.create_dependency("B", {"version": "3.0", "python": "~3.8"})
+    )
+
+    package_a10 = get_package("A", "1.0")
+    package_a10.add_dependency(
+        Factory.create_dependency("B", {"version": "^1.0", "python": "~2.7"})
+    )
+
+    package_a20 = get_package("A", "2.0")
+    package_a20.add_dependency(
+        Factory.create_dependency("B", {"version": "^2.0", "python": "~3.6"})
+    )
+
+    package_a30 = get_package("A", "3.0")  # no dep to B
+
+    package_b10 = get_package("B", "1.0")
+    package_b11 = get_package("B", "1.1")
+    package_b20 = get_package("B", "2.0")
+    package_b21 = get_package("B", "2.1")
+    package_b30 = get_package("B", "3.0")
+
+    repo.add_package(package_a10)
+    repo.add_package(package_a20)
+    repo.add_package(package_a30)
+    repo.add_package(package_b10)
+    repo.add_package(package_b11)
+    repo.add_package(package_b20)
+    repo.add_package(package_b21)
+    repo.add_package(package_b30)
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": package_b10},
+            {"job": "install", "package": package_b20},
+            {"job": "install", "package": package_a10},
+            {"job": "install", "package": package_a20},
+            {"job": "install", "package": package_a30},
+            {"job": "install", "package": package_b30},
+        ],
+    )
+
+
+def test_solver_duplicate_dependencies_ignore_overrides_with_empty_marker_intersection2(
+    solver: Solver, repo: Repository, package: Package
+):
+    """
+    Empty intersection between top level dependency and transient dependency.
+    """
+    package.add_dependency(Factory.create_dependency("A", {"version": "1.0"}))
+    package.add_dependency(
+        Factory.create_dependency("B", {"version": ">=2.0", "python": ">=3.7"})
+    )
+    package.add_dependency(
+        Factory.create_dependency("B", {"version": "*", "python": "<3.7"})
+    )
+
+    package_a10 = get_package("A", "1.0")
+    package_a10.add_dependency(
+        Factory.create_dependency("B", {"version": ">=2.0", "python": ">=3.7"})
+    )
+    package_a10.add_dependency(
+        Factory.create_dependency("B", {"version": "*", "python": "<3.7"})
+    )
+
+    package_b10 = get_package("B", "1.0")
+    package_b10.python_versions = "<3.7"
+    package_b20 = get_package("B", "2.0")
+    package_b20.python_versions = ">=3.7"
+
+    repo.add_package(package_a10)
+    repo.add_package(package_b10)
+    repo.add_package(package_b20)
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": package_b10},
+            {"job": "install", "package": package_b20},
+            {"job": "install", "package": package_a10},
+        ],
+    )
+
+
+def test_solver_duplicate_dependencies_sub_dependencies(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("A", "*"))
 
     package_a = get_package("A", "1.0")
@@ -1110,10 +1652,10 @@ def test_solver_duplicate_dependencies_sub_dependencies(solver, repo, package):
     repo.add_package(package_c12)
     repo.add_package(package_c15)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_c12},
             {"job": "install", "package": package_c15},
@@ -1124,7 +1666,66 @@ def test_solver_duplicate_dependencies_sub_dependencies(solver, repo, package):
     )
 
 
-def test_solver_fails_if_dependency_name_does_not_match_package(solver, repo, package):
+def test_duplicate_path_dependencies(solver: Solver, package: ProjectPackage) -> None:
+    solver.provider.set_package_python_versions("^3.7")
+    fixtures = Path(__file__).parent.parent / "fixtures"
+    project_dir = fixtures / "with_conditional_path_deps"
+
+    path1 = (project_dir / "demo_one").as_posix()
+    demo1 = Package("demo", "1.2.3", source_type="directory", source_url=path1)
+    package.add_dependency(
+        Factory.create_dependency(
+            "demo", {"path": path1, "markers": "sys_platform == 'linux'"}
+        )
+    )
+
+    path2 = (project_dir / "demo_two").as_posix()
+    demo2 = Package("demo", "1.2.3", source_type="directory", source_url=path2)
+    package.add_dependency(
+        Factory.create_dependency(
+            "demo", {"path": path2, "markers": "sys_platform == 'win32'"}
+        )
+    )
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": demo1},
+            {"job": "install", "package": demo2},
+        ],
+    )
+
+
+def test_duplicate_path_dependencies_same_path(
+    solver: Solver, package: ProjectPackage
+) -> None:
+    solver.provider.set_package_python_versions("^3.7")
+    fixtures = Path(__file__).parent.parent / "fixtures"
+    project_dir = fixtures / "with_conditional_path_deps"
+
+    path1 = (project_dir / "demo_one").as_posix()
+    demo1 = Package("demo", "1.2.3", source_type="directory", source_url=path1)
+    package.add_dependency(
+        Factory.create_dependency(
+            "demo", {"path": path1, "markers": "sys_platform == 'linux'"}
+        )
+    )
+    package.add_dependency(
+        Factory.create_dependency(
+            "demo", {"path": path1, "markers": "sys_platform == 'win32'"}
+        )
+    )
+
+    transaction = solver.solve()
+
+    check_solver_result(transaction, [{"job": "install", "package": demo1}])
+
+
+def test_solver_fails_if_dependency_name_does_not_match_package(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(
         Factory.create_dependency(
             "my-demo", {"git": "https://github.com/demo/demo.git"}
@@ -1136,7 +1737,7 @@ def test_solver_fails_if_dependency_name_does_not_match_package(solver, repo, pa
 
 
 def test_solver_does_not_get_stuck_in_recursion_on_circular_dependency(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     package_a = get_package("A", "1.0")
     package_a.add_dependency(Factory.create_dependency("B", "^1.0"))
@@ -1151,10 +1752,10 @@ def test_solver_does_not_get_stuck_in_recursion_on_circular_dependency(
 
     package.add_dependency(Factory.create_dependency("A", "^1.0"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_c},
             {"job": "install", "package": package_b},
@@ -1163,7 +1764,9 @@ def test_solver_does_not_get_stuck_in_recursion_on_circular_dependency(
     )
 
 
-def test_solver_can_resolve_git_dependencies(solver, repo, package):
+def test_solver_can_resolve_git_dependencies(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
     repo.add_package(pendulum)
@@ -1173,30 +1776,32 @@ def test_solver_can_resolve_git_dependencies(solver, repo, package):
         Factory.create_dependency("demo", {"git": "https://github.com/demo/demo.git"})
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     demo = Package(
         "demo",
         "0.1.2",
         source_type="git",
         source_url="https://github.com/demo/demo.git",
-        source_reference="master",
-        source_resolved_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+        source_reference=DEFAULT_SOURCE_REF,
+        source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
     )
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [{"job": "install", "package": pendulum}, {"job": "install", "package": demo}],
     )
 
     op = ops[1]
 
     assert op.package.source_type == "git"
-    assert op.package.source_reference == "master"
+    assert op.package.source_reference == DEFAULT_SOURCE_REF
     assert op.package.source_resolved_reference.startswith("9cf87a2")
 
 
-def test_solver_can_resolve_git_dependencies_with_extras(solver, repo, package):
+def test_solver_can_resolve_git_dependencies_with_extras(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
     repo.add_package(pendulum)
@@ -1208,19 +1813,19 @@ def test_solver_can_resolve_git_dependencies_with_extras(solver, repo, package):
         )
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     demo = Package(
         "demo",
         "0.1.2",
         source_type="git",
         source_url="https://github.com/demo/demo.git",
-        source_reference="master",
-        source_resolved_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+        source_reference=DEFAULT_SOURCE_REF,
+        source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
     )
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": cleo},
             {"job": "install", "package": pendulum},
@@ -1234,7 +1839,9 @@ def test_solver_can_resolve_git_dependencies_with_extras(solver, repo, package):
     [{"branch": "a-branch"}, {"tag": "a-tag"}, {"rev": "9cf8"}],
     ids=["branch", "tag", "rev"],
 )
-def test_solver_can_resolve_git_dependencies_with_ref(solver, repo, package, ref):
+def test_solver_can_resolve_git_dependencies_with_ref(
+    solver: Solver, repo: Repository, package: Package, ref: dict[str, str]
+):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
     repo.add_package(pendulum)
@@ -1246,17 +1853,17 @@ def test_solver_can_resolve_git_dependencies_with_ref(solver, repo, package, ref
         source_type="git",
         source_url="https://github.com/demo/demo.git",
         source_reference=ref[list(ref.keys())[0]],
-        source_resolved_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+        source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
     )
 
     git_config = {demo.source_type: demo.source_url}
     git_config.update(ref)
     package.add_dependency(Factory.create_dependency("demo", git_config))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [{"job": "install", "package": pendulum}, {"job": "install", "package": demo}],
     )
 
@@ -1267,8 +1874,8 @@ def test_solver_can_resolve_git_dependencies_with_ref(solver, repo, package, ref
     assert op.package.source_resolved_reference.startswith("9cf87a2")
 
 
-def test_solver_does_not_trigger_conflict_for_python_constraint_if_python_requirement_is_compatible(
-    solver, repo, package
+def test_solver_does_not_trigger_conflict_for_python_constraint_if_python_requirement_is_compatible(  # noqa: E501
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.4")
     package.add_dependency(
@@ -1280,13 +1887,13 @@ def test_solver_does_not_trigger_conflict_for_python_constraint_if_python_requir
 
     repo.add_package(package_a)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(ops, [{"job": "install", "package": package_a}])
+    check_solver_result(transaction, [{"job": "install", "package": package_a}])
 
 
-def test_solver_does_not_trigger_conflict_for_python_constraint_if_python_requirement_is_compatible_multiple(
-    solver, repo, package
+def test_solver_does_not_trigger_conflict_for_python_constraint_if_python_requirement_is_compatible_multiple(  # noqa: E501
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.4")
     package.add_dependency(
@@ -1306,10 +1913,10 @@ def test_solver_does_not_trigger_conflict_for_python_constraint_if_python_requir
     repo.add_package(package_a)
     repo.add_package(package_b)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_b},
             {"job": "install", "package": package_a},
@@ -1317,8 +1924,8 @@ def test_solver_does_not_trigger_conflict_for_python_constraint_if_python_requir
     )
 
 
-def test_solver_triggers_conflict_for_dependency_python_not_fully_compatible_with_package_python(
-    solver, repo, package
+def test_solver_triggers_conflict_for_dependency_python_not_fully_compatible_with_package_python(  # noqa: E501
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.4")
     package.add_dependency(
@@ -1334,8 +1941,8 @@ def test_solver_triggers_conflict_for_dependency_python_not_fully_compatible_wit
         solver.solve()
 
 
-def test_solver_finds_compatible_package_for_dependency_python_not_fully_compatible_with_package_python(
-    solver, repo, package
+def test_solver_finds_compatible_package_for_dependency_python_not_fully_compatible_with_package_python(  # noqa: E501
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.4")
     package.add_dependency(
@@ -1351,17 +1958,17 @@ def test_solver_finds_compatible_package_for_dependency_python_not_fully_compati
     repo.add_package(package_a100)
     repo.add_package(package_a101)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(ops, [{"job": "install", "package": package_a100}])
+    check_solver_result(transaction, [{"job": "install", "package": package_a100}])
 
 
-def test_solver_does_not_trigger_new_resolution_on_duplicate_dependencies_if_only_extras(
-    solver, repo, package
+def test_solver_does_not_trigger_new_resolution_on_duplicate_dependencies_if_only_extras(  # noqa: E501
+    solver: Solver, repo: Repository, package: Package
 ):
-    dep1 = dependency_from_pep_508('B (>=1.0); extra == "foo"')
+    dep1 = Dependency.create_from_pep_508('B (>=1.0); extra == "foo"')
     dep1.activate()
-    dep2 = dependency_from_pep_508('B (>=2.0); extra == "bar"')
+    dep2 = Dependency.create_from_pep_508('B (>=2.0); extra == "bar"')
     dep2.activate()
 
     package.add_dependency(
@@ -1370,8 +1977,8 @@ def test_solver_does_not_trigger_new_resolution_on_duplicate_dependencies_if_onl
 
     package_a = get_package("A", "1.0.0")
     package_a.extras = {"foo": [dep1], "bar": [dep2]}
-    package_a.requires.append(dep1)
-    package_a.requires.append(dep2)
+    package_a.add_dependency(dep1)
+    package_a.add_dependency(dep2)
 
     package_b2 = get_package("B", "2.0.0")
     package_b1 = get_package("B", "1.0.0")
@@ -1380,10 +1987,10 @@ def test_solver_does_not_trigger_new_resolution_on_duplicate_dependencies_if_onl
     repo.add_package(package_b1)
     repo.add_package(package_b2)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": package_b2},
             {"job": "install", "package": package_a},
@@ -1395,7 +2002,7 @@ def test_solver_does_not_trigger_new_resolution_on_duplicate_dependencies_if_onl
 
 
 def test_solver_does_not_raise_conflict_for_locked_conditional_dependencies(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.4")
     package.add_dependency(
@@ -1414,11 +2021,11 @@ def test_solver_does_not_raise_conflict_for_locked_conditional_dependencies(
     repo.add_package(package_a)
     repo.add_package(package_b)
 
-    solver._locked = Repository([package_a])
-    ops = solver.solve(use_latest=[package_b.name])
+    solver._locked = Repository("locked", [package_a])
+    transaction = solver.solve(use_latest=[package_b.name])
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a},
             {"job": "install", "package": package_b},
@@ -1427,7 +2034,7 @@ def test_solver_does_not_raise_conflict_for_locked_conditional_dependencies(
 
 
 def test_solver_returns_extras_if_requested_in_dependencies_and_not_in_root_package(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     package.add_dependency(Factory.create_dependency("A", "*"))
     package.add_dependency(Factory.create_dependency("B", "*"))
@@ -1452,10 +2059,10 @@ def test_solver_returns_extras_if_requested_in_dependencies_and_not_in_root_pack
     repo.add_package(package_c)
     repo.add_package(package_d)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_d},
             {"job": "install", "package": package_c},
@@ -1466,7 +2073,7 @@ def test_solver_returns_extras_if_requested_in_dependencies_and_not_in_root_pack
 
 
 def test_solver_should_not_resolve_prerelease_version_if_not_requested(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     package.add_dependency(Factory.create_dependency("A", "~1.8.0"))
     package.add_dependency(Factory.create_dependency("B", "^0.5.0"))
@@ -1485,16 +2092,17 @@ def test_solver_should_not_resolve_prerelease_version_if_not_requested(
 
 
 def test_solver_ignores_dependencies_with_incompatible_python_full_version_marker(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("^3.6")
     package.add_dependency(Factory.create_dependency("A", "^1.0"))
     package.add_dependency(Factory.create_dependency("B", "^2.0"))
 
     package_a = get_package("A", "1.0.0")
-    package_a.requires.append(
-        dependency_from_pep_508(
-            'B (<2.0); platform_python_implementation == "PyPy" and python_full_version < "2.7.9"'
+    package_a.add_dependency(
+        Dependency.create_from_pep_508(
+            'B (<2.0); platform_python_implementation == "PyPy" and python_full_version'
+            ' < "2.7.9"'
         )
     )
 
@@ -1505,10 +2113,10 @@ def test_solver_ignores_dependencies_with_incompatible_python_full_version_marke
     repo.add_package(package_b100)
     repo.add_package(package_b200)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a},
             {"job": "install", "package": package_b200},
@@ -1516,7 +2124,9 @@ def test_solver_ignores_dependencies_with_incompatible_python_full_version_marke
     )
 
 
-def test_solver_git_dependencies_update(solver, repo, package, installed):
+def test_solver_git_dependencies_update(
+    solver: Solver, repo: Repository, package: Package, installed: InstalledRepository
+):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
     repo.add_package(pendulum)
@@ -1527,7 +2137,7 @@ def test_solver_git_dependencies_update(solver, repo, package, installed):
         "0.1.2",
         source_type="git",
         source_url="https://github.com/demo/demo.git",
-        source_reference="master",
+        source_reference=DEFAULT_SOURCE_REF,
         source_resolved_reference="123456",
     )
     demo = Package(
@@ -1535,8 +2145,8 @@ def test_solver_git_dependencies_update(solver, repo, package, installed):
         "0.1.2",
         source_type="git",
         source_url="https://github.com/demo/demo.git",
-        source_reference="master",
-        source_resolved_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+        source_reference=DEFAULT_SOURCE_REF,
+        source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
     )
     installed.add_package(demo_installed)
 
@@ -1544,10 +2154,10 @@ def test_solver_git_dependencies_update(solver, repo, package, installed):
         Factory.create_dependency("demo", {"git": "https://github.com/demo/demo.git"})
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": pendulum},
             {"job": "update", "from": demo_installed, "to": demo},
@@ -1558,12 +2168,14 @@ def test_solver_git_dependencies_update(solver, repo, package, installed):
 
     assert op.job_type == "update"
     assert op.package.source_type == "git"
-    assert op.package.source_reference == "master"
+    assert op.package.source_reference == DEFAULT_SOURCE_REF
     assert op.package.source_resolved_reference.startswith("9cf87a2")
     assert op.initial_package.source_resolved_reference == "123456"
 
 
-def test_solver_git_dependencies_update_skipped(solver, repo, package, installed):
+def test_solver_git_dependencies_update_skipped(
+    solver: Solver, repo: Repository, package: Package, installed: InstalledRepository
+):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
     repo.add_package(pendulum)
@@ -1575,7 +2187,7 @@ def test_solver_git_dependencies_update_skipped(solver, repo, package, installed
         source_type="git",
         source_url="https://github.com/demo/demo.git",
         source_reference="master",
-        source_resolved_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+        source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
     )
     installed.add_package(demo)
 
@@ -1583,10 +2195,10 @@ def test_solver_git_dependencies_update_skipped(solver, repo, package, installed
         Factory.create_dependency("demo", {"git": "https://github.com/demo/demo.git"})
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": pendulum},
             {"job": "install", "package": demo, "skipped": True},
@@ -1595,7 +2207,7 @@ def test_solver_git_dependencies_update_skipped(solver, repo, package, installed
 
 
 def test_solver_git_dependencies_short_hash_update_skipped(
-    solver, repo, package, installed
+    solver: Solver, repo: Repository, package: Package, installed: InstalledRepository
 ):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
@@ -1607,8 +2219,8 @@ def test_solver_git_dependencies_short_hash_update_skipped(
         "0.1.2",
         source_type="git",
         source_url="https://github.com/demo/demo.git",
-        source_reference="master",
-        source_resolved_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+        source_reference=MOCK_DEFAULT_GIT_REVISION,
+        source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
     )
     installed.add_package(demo)
 
@@ -1618,10 +2230,10 @@ def test_solver_git_dependencies_short_hash_update_skipped(
         )
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": pendulum},
             {
@@ -1631,8 +2243,8 @@ def test_solver_git_dependencies_short_hash_update_skipped(
                     "0.1.2",
                     source_type="git",
                     source_url="https://github.com/demo/demo.git",
-                    source_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
-                    source_resolved_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+                    source_reference=MOCK_DEFAULT_GIT_REVISION,
+                    source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
                 ),
                 "skipped": True,
             },
@@ -1640,7 +2252,9 @@ def test_solver_git_dependencies_short_hash_update_skipped(
     )
 
 
-def test_solver_can_resolve_directory_dependencies(solver, repo, package):
+def test_solver_can_resolve_directory_dependencies(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pendulum = get_package("pendulum", "2.0.3")
     repo.add_package(pendulum)
 
@@ -1655,12 +2269,12 @@ def test_solver_can_resolve_directory_dependencies(solver, repo, package):
 
     package.add_dependency(Factory.create_dependency("demo", {"path": path}))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     demo = Package("demo", "0.1.2", source_type="directory", source_url=path)
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [{"job": "install", "package": pendulum}, {"job": "install", "package": demo}],
     )
 
@@ -1673,20 +2287,29 @@ def test_solver_can_resolve_directory_dependencies(solver, repo, package):
 
 
 def test_solver_can_resolve_directory_dependencies_nested_editable(
-    solver, repo, pool, installed, locked, io
+    repo: Repository,
+    pool: Pool,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
 ):
     base = Path(__file__).parent.parent / "fixtures" / "project_with_nested_local"
     poetry = Factory().create_poetry(cwd=base)
     package = poetry.package
 
     solver = Solver(
-        package, pool, installed, locked, io, provider=Provider(package, pool, io)
+        package,
+        pool,
+        installed.packages,
+        locked.packages,
+        io,
+        provider=Provider(package, pool, io),
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {
                 "job": "install",
@@ -1726,7 +2349,9 @@ def test_solver_can_resolve_directory_dependencies_nested_editable(
         assert op.package.develop is True
 
 
-def test_solver_can_resolve_directory_dependencies_with_extras(solver, repo, package):
+def test_solver_can_resolve_directory_dependencies_with_extras(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
     repo.add_package(pendulum)
@@ -1745,12 +2370,12 @@ def test_solver_can_resolve_directory_dependencies_with_extras(solver, repo, pac
         Factory.create_dependency("demo", {"path": path, "extras": ["foo"]})
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     demo = Package("demo", "0.1.2", source_type="directory", source_url=path)
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": cleo},
             {"job": "install", "package": pendulum},
@@ -1766,7 +2391,9 @@ def test_solver_can_resolve_directory_dependencies_with_extras(solver, repo, pac
     assert op.package.source_url == path
 
 
-def test_solver_can_resolve_sdist_dependencies(solver, repo, package):
+def test_solver_can_resolve_sdist_dependencies(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pendulum = get_package("pendulum", "2.0.3")
     repo.add_package(pendulum)
 
@@ -1779,12 +2406,12 @@ def test_solver_can_resolve_sdist_dependencies(solver, repo, package):
 
     package.add_dependency(Factory.create_dependency("demo", {"path": path}))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     demo = Package("demo", "0.1.0", source_type="file", source_url=path)
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [{"job": "install", "package": pendulum}, {"job": "install", "package": demo}],
     )
 
@@ -1796,7 +2423,9 @@ def test_solver_can_resolve_sdist_dependencies(solver, repo, package):
     assert op.package.source_url == path
 
 
-def test_solver_can_resolve_sdist_dependencies_with_extras(solver, repo, package):
+def test_solver_can_resolve_sdist_dependencies_with_extras(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
     repo.add_package(pendulum)
@@ -1813,12 +2442,12 @@ def test_solver_can_resolve_sdist_dependencies_with_extras(solver, repo, package
         Factory.create_dependency("demo", {"path": path, "extras": ["foo"]})
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     demo = Package("demo", "0.1.0", source_type="file", source_url=path)
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": cleo},
             {"job": "install", "package": pendulum},
@@ -1834,7 +2463,9 @@ def test_solver_can_resolve_sdist_dependencies_with_extras(solver, repo, package
     assert op.package.source_url == path
 
 
-def test_solver_can_resolve_wheel_dependencies(solver, repo, package):
+def test_solver_can_resolve_wheel_dependencies(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pendulum = get_package("pendulum", "2.0.3")
     repo.add_package(pendulum)
 
@@ -1847,12 +2478,12 @@ def test_solver_can_resolve_wheel_dependencies(solver, repo, package):
 
     package.add_dependency(Factory.create_dependency("demo", {"path": path}))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     demo = Package("demo", "0.1.0", source_type="file", source_url=path)
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [{"job": "install", "package": pendulum}, {"job": "install", "package": demo}],
     )
 
@@ -1864,7 +2495,9 @@ def test_solver_can_resolve_wheel_dependencies(solver, repo, package):
     assert op.package.source_url == path
 
 
-def test_solver_can_resolve_wheel_dependencies_with_extras(solver, repo, package):
+def test_solver_can_resolve_wheel_dependencies_with_extras(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pendulum = get_package("pendulum", "2.0.3")
     cleo = get_package("cleo", "1.0.0")
     repo.add_package(pendulum)
@@ -1881,12 +2514,12 @@ def test_solver_can_resolve_wheel_dependencies_with_extras(solver, repo, package
         Factory.create_dependency("demo", {"path": path, "extras": ["foo"]})
     )
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     demo = Package("demo", "0.1.0", source_type="file", source_url=path)
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {"job": "install", "package": cleo},
             {"job": "install", "package": pendulum},
@@ -1903,19 +2536,22 @@ def test_solver_can_resolve_wheel_dependencies_with_extras(solver, repo, package
 
 
 def test_solver_can_solve_with_legacy_repository_using_proper_dists(
-    package, installed, locked, io
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
 ):
     repo = MockLegacyRepository()
     pool = Pool([repo])
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
     package.add_dependency(Factory.create_dependency("isort", "4.3.4"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {
                 "job": "install",
@@ -1945,21 +2581,24 @@ def test_solver_can_solve_with_legacy_repository_using_proper_dists(
 
 
 def test_solver_can_solve_with_legacy_repository_using_proper_python_compatible_dists(
-    package, installed, locked, io
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
 ):
     package.python_versions = "^3.7"
 
     repo = MockLegacyRepository()
     pool = Pool([repo])
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
     package.add_dependency(Factory.create_dependency("isort", "4.3.4"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {
                 "job": "install",
@@ -1975,24 +2614,31 @@ def test_solver_can_solve_with_legacy_repository_using_proper_python_compatible_
     )
 
 
-def test_solver_skips_invalid_versions(package, installed, locked, io):
+def test_solver_skips_invalid_versions(
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
+):
     package.python_versions = "^3.7"
 
     repo = MockPyPIRepository()
     pool = Pool([repo])
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
     package.add_dependency(Factory.create_dependency("trackpy", "^0.4"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops, [{"job": "install", "package": get_package("trackpy", "0.4.1")}]
+        transaction, [{"job": "install", "package": get_package("trackpy", "0.4.1")}]
     )
 
 
-def test_multiple_constraints_on_root(package, solver, repo):
+def test_multiple_constraints_on_root(
+    package: ProjectPackage, solver: Solver, repo: Repository
+):
     package.add_dependency(
         Factory.create_dependency("foo", {"version": "^1.0", "python": "^2.7"})
     )
@@ -2006,16 +2652,19 @@ def test_multiple_constraints_on_root(package, solver, repo):
     repo.add_package(foo15)
     repo.add_package(foo25)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [{"job": "install", "package": foo15}, {"job": "install", "package": foo25}],
     )
 
 
 def test_solver_chooses_most_recent_version_amongst_repositories(
-    package, installed, locked, io
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
 ):
     package.python_versions = "^3.7"
     package.add_dependency(Factory.create_dependency("tomlkit", {"version": "^0.5"}))
@@ -2023,12 +2672,12 @@ def test_solver_chooses_most_recent_version_amongst_repositories(
     repo = MockLegacyRepository()
     pool = Pool([repo, MockPyPIRepository()])
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops, [{"job": "install", "package": get_package("tomlkit", "0.5.3")}]
+    ops = check_solver_result(
+        transaction, [{"job": "install", "package": get_package("tomlkit", "0.5.3")}]
     )
 
     assert ops[0].package.source_type is None
@@ -2036,7 +2685,10 @@ def test_solver_chooses_most_recent_version_amongst_repositories(
 
 
 def test_solver_chooses_from_correct_repository_if_forced(
-    package, installed, locked, io
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
 ):
     package.python_versions = "^3.7"
     package.add_dependency(
@@ -2046,12 +2698,12 @@ def test_solver_chooses_from_correct_repository_if_forced(
     repo = MockLegacyRepository()
     pool = Pool([repo, MockPyPIRepository()])
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {
                 "job": "install",
@@ -2066,11 +2718,14 @@ def test_solver_chooses_from_correct_repository_if_forced(
         ],
     )
 
-    assert "http://legacy.foo.bar" == ops[0].package.source_url
+    assert ops[0].package.source_url == "http://legacy.foo.bar"
 
 
 def test_solver_chooses_from_correct_repository_if_forced_and_transitive_dependency(
-    package, installed, locked, io
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
 ):
     package.python_versions = "^3.7"
     package.add_dependency(Factory.create_dependency("foo", "^1.0"))
@@ -2078,18 +2733,18 @@ def test_solver_chooses_from_correct_repository_if_forced_and_transitive_depende
         Factory.create_dependency("tomlkit", {"version": "^0.5", "source": "legacy"})
     )
 
-    repo = Repository()
+    repo = Repository("repo")
     foo = get_package("foo", "1.0.0")
     foo.add_dependency(Factory.create_dependency("tomlkit", "^0.5.0"))
     repo.add_package(foo)
     pool = Pool([MockLegacyRepository(), repo, MockPyPIRepository()])
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {
                 "job": "install",
@@ -2105,14 +2760,17 @@ def test_solver_chooses_from_correct_repository_if_forced_and_transitive_depende
         ],
     )
 
-    assert "http://legacy.foo.bar" == ops[0].package.source_url
+    assert ops[0].package.source_url == "http://legacy.foo.bar"
 
     assert ops[1].package.source_type is None
     assert ops[1].package.source_url is None
 
 
 def test_solver_does_not_choose_from_secondary_repository_by_default(
-    package, installed, locked, io
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
 ):
     package.python_versions = "^3.7"
     package.add_dependency(Factory.create_dependency("clikit", {"version": "^0.2.0"}))
@@ -2121,12 +2779,12 @@ def test_solver_does_not_choose_from_secondary_repository_by_default(
     pool.add_repository(MockPyPIRepository(), secondary=True)
     pool.add_repository(MockLegacyRepository())
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {
                 "job": "install",
@@ -2152,13 +2810,18 @@ def test_solver_does_not_choose_from_secondary_repository_by_default(
         ],
     )
 
-    assert "http://legacy.foo.bar" == ops[0].package.source_url
+    assert ops[0].package.source_url == "http://legacy.foo.bar"
     assert ops[1].package.source_type is None
     assert ops[1].package.source_url is None
-    assert "http://legacy.foo.bar" == ops[2].package.source_url
+    assert ops[2].package.source_url == "http://legacy.foo.bar"
 
 
-def test_solver_chooses_from_secondary_if_explicit(package, installed, locked, io):
+def test_solver_chooses_from_secondary_if_explicit(
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
+):
     package.python_versions = "^3.7"
     package.add_dependency(
         Factory.create_dependency("clikit", {"version": "^0.2.0", "source": "PyPI"})
@@ -2168,12 +2831,12 @@ def test_solver_chooses_from_secondary_if_explicit(package, installed, locked, i
     pool.add_repository(MockPyPIRepository(), secondary=True)
     pool.add_repository(MockLegacyRepository())
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(
-        ops,
+    ops = check_solver_result(
+        transaction,
         [
             {
                 "job": "install",
@@ -2190,7 +2853,7 @@ def test_solver_chooses_from_secondary_if_explicit(package, installed, locked, i
         ],
     )
 
-    assert "http://legacy.foo.bar" == ops[0].package.source_url
+    assert ops[0].package.source_url == "http://legacy.foo.bar"
     assert ops[1].package.source_type is None
     assert ops[1].package.source_url is None
     assert ops[2].package.source_type is None
@@ -2198,7 +2861,12 @@ def test_solver_chooses_from_secondary_if_explicit(package, installed, locked, i
 
 
 def test_solver_discards_packages_with_empty_markers(
-    package, installed, locked, io, pool, repo
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
+    pool: Pool,
+    repo: Repository,
 ):
     package.python_versions = "~2.7 || ^3.4"
     package.add_dependency(
@@ -2220,12 +2888,12 @@ def test_solver_discards_packages_with_empty_markers(
     repo.add_package(package_b)
     repo.add_package(package_c)
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_c},
             {"job": "install", "package": package_a},
@@ -2234,17 +2902,17 @@ def test_solver_discards_packages_with_empty_markers(
 
 
 def test_solver_does_not_raise_conflict_for_conditional_dev_dependencies(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.5")
     package.add_dependency(
         Factory.create_dependency(
-            "A", {"version": "^1.0", "python": "~2.7"}, category="dev"
+            "A", {"version": "^1.0", "python": "~2.7"}, groups=["dev"]
         )
     )
     package.add_dependency(
         Factory.create_dependency(
-            "A", {"version": "^2.0", "python": "^3.5"}, category="dev"
+            "A", {"version": "^2.0", "python": "^3.5"}, groups=["dev"]
         )
     )
 
@@ -2254,10 +2922,10 @@ def test_solver_does_not_raise_conflict_for_conditional_dev_dependencies(
     repo.add_package(package_a100)
     repo.add_package(package_a200)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a100},
             {"job": "install", "package": package_a200},
@@ -2266,7 +2934,7 @@ def test_solver_does_not_raise_conflict_for_conditional_dev_dependencies(
 
 
 def test_solver_does_not_loop_indefinitely_on_duplicate_constraints_with_extras(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.5")
     package.add_dependency(
@@ -2288,16 +2956,21 @@ def test_solver_does_not_loop_indefinitely_on_duplicate_constraints_with_extras(
     repo.add_package(requests)
     repo.add_package(idna)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [{"job": "install", "package": idna}, {"job": "install", "package": requests}],
     )
 
 
 def test_solver_does_not_fail_with_locked_git_and_non_git_dependencies(
-    solver, repo, package, locked, pool, installed, io
+    repo: Repository,
+    package: Package,
+    locked: Repository,
+    pool: Pool,
+    installed: InstalledRepository,
+    io: NullIO,
 ):
     package.add_dependency(
         Factory.create_dependency("demo", {"git": "https://github.com/demo/demo.git"})
@@ -2309,8 +2982,8 @@ def test_solver_does_not_fail_with_locked_git_and_non_git_dependencies(
         "0.1.2",
         source_type="git",
         source_url="https://github.com/demo/demo.git",
-        source_reference="master",
-        source_resolved_reference="commit",
+        source_reference=DEFAULT_SOURCE_REF,
+        source_resolved_reference=MOCK_DEFAULT_GIT_REVISION,
     )
 
     installed.add_package(git_package)
@@ -2321,12 +2994,12 @@ def test_solver_does_not_fail_with_locked_git_and_non_git_dependencies(
     repo.add_package(get_package("a", "1.2.3"))
     repo.add_package(Package("pendulum", "2.1.2"))
 
-    solver = Solver(package, pool, installed, locked, io)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": get_package("a", "1.2.3")},
             {"job": "install", "package": git_package, "skipped": True},
@@ -2334,7 +3007,9 @@ def test_solver_does_not_fail_with_locked_git_and_non_git_dependencies(
     )
 
 
-def test_ignore_python_constraint_no_overlap_dependencies(solver, repo, package):
+def test_ignore_python_constraint_no_overlap_dependencies(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     pytest = get_package("demo", "1.0.0")
     pytest.add_dependency(
         Factory.create_dependency(
@@ -2349,15 +3024,16 @@ def test_ignore_python_constraint_no_overlap_dependencies(solver, repo, package)
     repo.add_package(pytest)
     repo.add_package(get_package("configparser", "1.2.3"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops, [{"job": "install", "package": pytest}],
+        transaction,
+        [{"job": "install", "package": pytest}],
     )
 
 
 def test_solver_should_not_go_into_an_infinite_loop_on_duplicate_dependencies(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.5")
     package.add_dependency(Factory.create_dependency("A", "^1.0"))
@@ -2377,10 +3053,10 @@ def test_solver_should_not_go_into_an_infinite_loop_on_duplicate_dependencies(
     repo.add_package(package_b10)
     repo.add_package(package_b20)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_b10},
             {"job": "install", "package": package_b20},
@@ -2389,30 +3065,43 @@ def test_solver_should_not_go_into_an_infinite_loop_on_duplicate_dependencies(
     )
 
 
-def test_solver_remove_untracked_single(package, pool, installed, locked, io):
-    solver = Solver(package, pool, installed, locked, io, remove_untracked=True)
+def test_solver_synchronize_single(
+    package: ProjectPackage,
+    pool: Pool,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
+):
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
     package_a = get_package("a", "1.0")
     installed.add_package(package_a)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(ops, [{"job": "remove", "package": package_a}])
+    check_solver_result(
+        transaction, [{"job": "remove", "package": package_a}], synchronize=True
+    )
 
 
-def test_solver_remove_untracked_keeps_critical_package(
-    package, pool, installed, locked, io
+@pytest.mark.skip(reason="Poetry no longer has critical package requirements")
+def test_solver_with_synchronization_keeps_critical_package(
+    package: ProjectPackage,
+    pool: Pool,
+    installed: InstalledRepository,
+    locked: Repository,
+    io: NullIO,
 ):
-    solver = Solver(package, pool, installed, locked, io, remove_untracked=True)
-    package_pip = get_package("pip", "1.0")
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
+    package_pip = get_package("setuptools", "1.0")
     installed.add_package(package_pip)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(ops, [])
+    check_solver_result(transaction, [])
 
 
 def test_solver_cannot_choose_another_version_for_directory_dependencies(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     pendulum = get_package("pendulum", "2.0.3")
     demo = get_package("demo", "0.1.0")
@@ -2441,7 +3130,7 @@ def test_solver_cannot_choose_another_version_for_directory_dependencies(
 
 
 def test_solver_cannot_choose_another_version_for_file_dependencies(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     pendulum = get_package("pendulum", "2.0.3")
     demo = get_package("demo", "0.0.8")
@@ -2468,7 +3157,7 @@ def test_solver_cannot_choose_another_version_for_file_dependencies(
 
 
 def test_solver_cannot_choose_another_version_for_git_dependencies(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     pendulum = get_package("pendulum", "2.0.3")
     demo = get_package("demo", "0.0.8")
@@ -2490,7 +3179,10 @@ def test_solver_cannot_choose_another_version_for_git_dependencies(
 
 
 def test_solver_cannot_choose_another_version_for_url_dependencies(
-    solver, repo, package, http
+    solver: Solver,
+    repo: Repository,
+    package: Package,
+    http: type[httpretty.httpretty],
 ):
     path = (
         Path(__file__).parent.parent
@@ -2528,7 +3220,7 @@ def test_solver_cannot_choose_another_version_for_url_dependencies(
 
 
 def test_solver_should_not_update_same_version_packages_if_installed_has_no_source_type(
-    solver, repo, package, installed
+    solver: Solver, repo: Repository, package: Package, installed: InstalledRepository
 ):
     package.add_dependency(Factory.create_dependency("foo", "1.0.0"))
 
@@ -2542,13 +3234,15 @@ def test_solver_should_not_update_same_version_packages_if_installed_has_no_sour
     repo.add_package(foo)
     installed.add_package(get_package("foo", "1.0.0"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(ops, [{"job": "install", "package": foo, "skipped": True}])
+    check_solver_result(
+        transaction, [{"job": "install", "package": foo, "skipped": True}]
+    )
 
 
 def test_solver_should_use_the_python_constraint_from_the_environment_if_available(
-    solver, repo, package, installed
+    solver: Solver, repo: Repository, package: Package, installed: InstalledRepository
 ):
     solver.provider.set_package_python_versions("~2.7 || ^3.5")
     package.add_dependency(Factory.create_dependency("A", "^1.0"))
@@ -2566,15 +3260,16 @@ def test_solver_should_use_the_python_constraint_from_the_environment_if_availab
     repo.add_package(b)
 
     with solver.use_environment(MockEnv((2, 7, 18))):
-        ops = solver.solve()
+        transaction = solver.solve()
 
     check_solver_result(
-        ops, [{"job": "install", "package": b}, {"job": "install", "package": a}],
+        transaction,
+        [{"job": "install", "package": b}, {"job": "install", "package": a}],
     )
 
 
 def test_solver_should_resolve_all_versions_for_multiple_duplicate_dependencies(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     package.python_versions = "~2.7 || ^3.5"
     package.add_dependency(
@@ -2608,10 +3303,10 @@ def test_solver_should_resolve_all_versions_for_multiple_duplicate_dependencies(
     repo.add_package(package_b30)
     repo.add_package(package_b40)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": package_a10},
             {"job": "install", "package": package_a20},
@@ -2622,7 +3317,7 @@ def test_solver_should_resolve_all_versions_for_multiple_duplicate_dependencies(
 
 
 def test_solver_should_not_raise_errors_for_irrelevant_python_constraints(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     package.python_versions = "^3.6"
     solver.provider.set_package_python_versions("^3.6")
@@ -2634,12 +3329,14 @@ def test_solver_should_not_raise_errors_for_irrelevant_python_constraints(
     dataclasses.python_versions = ">=3.6, <3.7"
 
     repo.add_package(dataclasses)
-    ops = solver.solve()
+    transaction = solver.solve()
 
-    check_solver_result(ops, [{"job": "install", "package": dataclasses}])
+    check_solver_result(transaction, [{"job": "install", "package": dataclasses}])
 
 
-def test_solver_can_resolve_transitive_extras(solver, repo, package):
+def test_solver_can_resolve_transitive_extras(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(Factory.create_dependency("requests", "^2.24.0"))
     package.add_dependency(Factory.create_dependency("PyOTA", "^2.1.0"))
 
@@ -2662,10 +3359,10 @@ def test_solver_can_resolve_transitive_extras(solver, repo, package):
     repo.add_package(get_package("certifi", "2017.4.17"))
     repo.add_package(get_package("pyopenssl", "0.14"))
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": get_package("certifi", "2017.4.17")},
             {"job": "install", "package": get_package("pyopenssl", "0.14")},
@@ -2675,7 +3372,9 @@ def test_solver_can_resolve_transitive_extras(solver, repo, package):
     )
 
 
-def test_solver_can_resolve_for_packages_with_missing_extras(solver, repo, package):
+def test_solver_can_resolve_for_packages_with_missing_extras(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
     package.add_dependency(
         Factory.create_dependency(
             "django-anymail", {"version": "^6.0", "extras": ["postmark"]}
@@ -2698,10 +3397,10 @@ def test_solver_can_resolve_for_packages_with_missing_extras(solver, repo, packa
     repo.add_package(boto3)
     repo.add_package(requests)
 
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": django},
             {"job": "install", "package": requests},
@@ -2711,7 +3410,7 @@ def test_solver_can_resolve_for_packages_with_missing_extras(solver, repo, packa
 
 
 def test_solver_can_resolve_python_restricted_package_dependencies(
-    solver, repo, package, locked
+    solver: Solver, repo: Repository, package: Package, locked: Repository
 ):
     package.add_dependency(
         Factory.create_dependency("futures", {"version": "^3.3.0", "python": "~2.7"})
@@ -2732,10 +3431,10 @@ def test_solver_can_resolve_python_restricted_package_dependencies(
     repo.add_package(futures)
     repo.add_package(pre_commit)
 
-    ops = solver.solve(use_latest=["pre-commit"])
+    transaction = solver.solve(use_latest=["pre-commit"])
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": futures},
             {"job": "install", "package": pre_commit},
@@ -2744,7 +3443,7 @@ def test_solver_can_resolve_python_restricted_package_dependencies(
 
 
 def test_solver_should_not_raise_errors_for_irrelevant_transitive_python_constraints(
-    solver, repo, package
+    solver: Solver, repo: Repository, package: Package
 ):
     package.python_versions = "~2.7 || ^3.5"
     solver.provider.set_package_python_versions("~2.7 || ^3.5")
@@ -2781,13 +3480,245 @@ def test_solver_should_not_raise_errors_for_irrelevant_transitive_python_constra
     repo.add_package(pre_commit)
     repo.add_package(importlib_resources)
     repo.add_package(importlib_resources_3_2_1)
-    ops = solver.solve()
+    transaction = solver.solve()
 
     check_solver_result(
-        ops,
+        transaction,
         [
             {"job": "install", "package": importlib_resources_3_2_1},
             {"job": "install", "package": pre_commit},
             {"job": "install", "package": virtualenv},
         ],
     )
+
+
+@pytest.mark.parametrize("is_locked", [False, True])
+def test_solver_keeps_multiple_locked_dependencies_for_same_package(
+    solver: Solver,
+    repo: Repository,
+    package: Package,
+    locked: Repository,
+    is_locked: bool,
+):
+    solver.provider.set_package_python_versions("^3.6")
+    package.add_dependency(
+        Factory.create_dependency("A", {"version": "~1.1", "python": "<3.7"})
+    )
+    package.add_dependency(
+        Factory.create_dependency("A", {"version": "~1.2", "python": ">=3.7"})
+    )
+
+    a11 = Package("A", "1.1")
+    a12 = Package("A", "1.2")
+
+    a11.add_dependency(Factory.create_dependency("B", {"version": ">=0.3"}))
+    a12.add_dependency(Factory.create_dependency("B", {"version": ">=0.3"}))
+
+    b03 = Package("B", "0.3")
+    b04 = Package("B", "0.4")
+    b04.python_versions = ">=3.6.2,<4.0.0"
+
+    repo.add_package(a11)
+    repo.add_package(a12)
+    repo.add_package(b03)
+    repo.add_package(b04)
+
+    if is_locked:
+        a11_locked = a11.clone()
+        a11_locked.python_versions = "<3.7"
+        locked.add_package(a11_locked)
+        a12_locked = a12.clone()
+        a12_locked.python_versions = ">=3.7"
+        locked.add_package(a12_locked)
+        locked.add_package(b03.clone())
+        locked.add_package(b04.clone())
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": b03},
+            {"job": "install", "package": b04},
+            {"job": "install", "package": a11},
+            {"job": "install", "package": a12},
+        ],
+    )
+
+
+def test_solver_direct_origin_dependency_with_extras_requested_by_other_package(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
+    """
+    Another package requires the same dependency with extras that is required
+    by the project as direct origin dependency without any extras.
+    """
+    pendulum = get_package("pendulum", "2.0.3")  # required by demo
+    cleo = get_package("cleo", "1.0.0")  # required by demo[foo]
+    demo_foo = get_package("demo-foo", "1.2.3")
+    demo_foo.add_dependency(
+        Factory.create_dependency("demo", {"version": ">=0.1", "extras": ["foo"]})
+    )
+    repo.add_package(demo_foo)
+    repo.add_package(pendulum)
+    repo.add_package(cleo)
+
+    path = (
+        Path(__file__).parent.parent
+        / "fixtures"
+        / "git"
+        / "github.com"
+        / "demo"
+        / "demo"
+    ).as_posix()
+
+    # project requires path dependency of demo while demo-foo requires demo[foo]
+    package.add_dependency(Factory.create_dependency("demo", {"path": path}))
+    package.add_dependency(Factory.create_dependency("demo-foo", "^1.2.3"))
+
+    transaction = solver.solve()
+
+    demo = Package("demo", "0.1.2", source_type="directory", source_url=path)
+
+    ops = check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": cleo},
+            {"job": "install", "package": pendulum},
+            {"job": "install", "package": demo},
+            {"job": "install", "package": demo_foo},
+        ],
+    )
+
+    op = ops[2]
+
+    assert op.package.name == "demo"
+    assert op.package.version.text == "0.1.2"
+    assert op.package.source_type == "directory"
+    assert op.package.source_url == path
+
+
+def test_solver_incompatible_dependency_with_and_without_extras(
+    solver: Solver, repo: Repository, package: ProjectPackage
+):
+    """
+    The solver first encounters a requirement for google-auth and then later an
+    incompatible requirement for google-auth[aiohttp].
+
+    Testcase derived from https://github.com/python-poetry/poetry/issues/6054.
+    """
+    # Incompatible requirements from foo and bar2.
+    foo = get_package("foo", "1.0.0")
+    foo.add_dependency(Factory.create_dependency("google-auth", {"version": "^1"}))
+
+    bar = get_package("bar", "1.0.0")
+
+    bar2 = get_package("bar", "2.0.0")
+    bar2.add_dependency(
+        Factory.create_dependency(
+            "google-auth", {"version": "^2", "extras": ["aiohttp"]}
+        )
+    )
+
+    baz = get_package("baz", "1.0.0")  # required by google-auth[aiohttp]
+
+    google_auth = get_package("google-auth", "1.2.3")
+    google_auth.extras = {"aiohttp": [get_dependency("baz", "^1.0")]}
+
+    google_auth2 = get_package("google-auth", "2.3.4")
+    google_auth2.extras = {"aiohttp": [get_dependency("baz", "^1.0")]}
+
+    repo.add_package(foo)
+    repo.add_package(bar)
+    repo.add_package(bar2)
+    repo.add_package(baz)
+    repo.add_package(google_auth)
+    repo.add_package(google_auth2)
+
+    package.add_dependency(Factory.create_dependency("foo", ">=1"))
+    package.add_dependency(Factory.create_dependency("bar", ">=1"))
+
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": google_auth},
+            {"job": "install", "package": bar},
+            {"job": "install", "package": foo},
+        ],
+    )
+
+
+def test_update_with_prerelease_and_no_solution(
+    solver: Solver,
+    repo: Repository,
+    installed: InstalledRepository,
+    package: ProjectPackage,
+    locked: Repository,
+):
+    # Locked and installed: cleo which depends on an old version of crashtest.
+    cleo = get_package("cleo", "1.0.0a5")
+    crashtest = get_package("crashtest", "0.3.0")
+    cleo.add_dependency(Factory.create_dependency("crashtest", {"version": "<0.4.0"}))
+    locked.add_package(cleo)
+    locked.add_package(crashtest)
+
+    installed.add_package(cleo)
+    installed.add_package(crashtest)
+
+    # Try to upgrade to a new version of crashtest, this will be disallowed by the
+    # dependency from cleo.
+    package.add_dependency(Factory.create_dependency("cleo", "^1.0.0a5"))
+    package.add_dependency(Factory.create_dependency("crashtest", "^0.4.0"))
+
+    newer_crashtest = get_package("crashtest", "0.4.0")
+    even_newer_crashtest = get_package("crashtest", "0.4.1")
+    repo.add_package(cleo)
+    repo.add_package(crashtest)
+    repo.add_package(newer_crashtest)
+    repo.add_package(even_newer_crashtest)
+
+    with pytest.raises(SolverProblemError):
+        solver.solve()
+
+
+def test_solver_yanked_warning(
+    package: ProjectPackage,
+    installed: InstalledRepository,
+    locked: Repository,
+    pool: Pool,
+    repo: Repository,
+) -> None:
+    package.add_dependency(Factory.create_dependency("foo", "==1"))
+    package.add_dependency(Factory.create_dependency("bar", "==2"))
+    package.add_dependency(Factory.create_dependency("baz", "==3"))
+    foo = get_package("foo", "1", yanked=False)
+    bar = get_package("bar", "2", yanked=True)
+    baz = get_package("baz", "3", yanked="just wrong")
+    repo.add_package(foo)
+    repo.add_package(bar)
+    repo.add_package(baz)
+
+    io = BufferedIO(decorated=False)
+    solver = Solver(package, pool, installed.packages, locked.packages, io)
+    transaction = solver.solve()
+
+    check_solver_result(
+        transaction,
+        [
+            {"job": "install", "package": bar},
+            {"job": "install", "package": baz},
+            {"job": "install", "package": foo},
+        ],
+    )
+    error = io.fetch_error()
+    assert "foo" not in error
+    assert "The locked version 2 for bar is a yanked version." in error
+    assert (
+        "The locked version 3 for baz is a yanked version. Reason for being yanked:"
+        " just wrong"
+        in error
+    )
+    assert error.count("is a yanked version") == 2
+    assert error.count("Reason for being yanked") == 1
diff --git a/vendor/poetry/tests/puzzle/test_transaction.py b/vendor/poetry/tests/puzzle/test_transaction.py
new file mode 100644
index 00000000..ae4093f5
--- /dev/null
+++ b/vendor/poetry/tests/puzzle/test_transaction.py
@@ -0,0 +1,159 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Any
+
+from poetry.core.packages.package import Package
+
+from poetry.puzzle.transaction import Transaction
+
+
+if TYPE_CHECKING:
+    from poetry.installation.operations.operation import Operation
+
+
+def check_operations(ops: list[Operation], expected: list[dict[str, Any]]) -> None:
+    for e in expected:
+        if "skipped" not in e:
+            e["skipped"] = False
+
+    result = []
+    for op in ops:
+        if op.job_type == "update":
+            result.append(
+                {
+                    "job": "update",
+                    "from": op.initial_package,
+                    "to": op.target_package,
+                    "skipped": op.skipped,
+                }
+            )
+        else:
+            job = "install"
+            if op.job_type == "uninstall":
+                job = "remove"
+
+            result.append({"job": job, "package": op.package, "skipped": op.skipped})
+
+    assert result == expected
+
+
+def test_it_should_calculate_operations_in_correct_order():
+    transaction = Transaction(
+        [Package("a", "1.0.0"), Package("b", "2.0.0"), Package("c", "3.0.0")],
+        [
+            (Package("a", "1.0.0"), 1),
+            (Package("b", "2.1.0"), 2),
+            (Package("d", "4.0.0"), 0),
+        ],
+    )
+
+    check_operations(
+        transaction.calculate_operations(),
+        [
+            {"job": "install", "package": Package("b", "2.1.0")},
+            {"job": "install", "package": Package("a", "1.0.0")},
+            {"job": "install", "package": Package("d", "4.0.0")},
+        ],
+    )
+
+
+def test_it_should_calculate_operations_for_installed_packages():
+    transaction = Transaction(
+        [Package("a", "1.0.0"), Package("b", "2.0.0"), Package("c", "3.0.0")],
+        [
+            (Package("a", "1.0.0"), 1),
+            (Package("b", "2.1.0"), 2),
+            (Package("d", "4.0.0"), 0),
+        ],
+        installed_packages=[
+            Package("a", "1.0.0"),
+            Package("b", "2.0.0"),
+            Package("c", "3.0.0"),
+            Package("e", "5.0.0"),
+        ],
+    )
+
+    check_operations(
+        transaction.calculate_operations(),
+        [
+            {"job": "remove", "package": Package("c", "3.0.0")},
+            {
+                "job": "update",
+                "from": Package("b", "2.0.0"),
+                "to": Package("b", "2.1.0"),
+            },
+            {"job": "install", "package": Package("a", "1.0.0"), "skipped": True},
+            {"job": "install", "package": Package("d", "4.0.0")},
+        ],
+    )
+
+
+def test_it_should_remove_installed_packages_if_required():
+    transaction = Transaction(
+        [Package("a", "1.0.0"), Package("b", "2.0.0"), Package("c", "3.0.0")],
+        [
+            (Package("a", "1.0.0"), 1),
+            (Package("b", "2.1.0"), 2),
+            (Package("d", "4.0.0"), 0),
+        ],
+        installed_packages=[
+            Package("a", "1.0.0"),
+            Package("b", "2.0.0"),
+            Package("c", "3.0.0"),
+            Package("e", "5.0.0"),
+        ],
+    )
+
+    check_operations(
+        transaction.calculate_operations(synchronize=True),
+        [
+            {"job": "remove", "package": Package("c", "3.0.0")},
+            {"job": "remove", "package": Package("e", "5.0.0")},
+            {
+                "job": "update",
+                "from": Package("b", "2.0.0"),
+                "to": Package("b", "2.1.0"),
+            },
+            {"job": "install", "package": Package("a", "1.0.0"), "skipped": True},
+            {"job": "install", "package": Package("d", "4.0.0")},
+        ],
+    )
+
+
+def test_it_should_update_installed_packages_if_sources_are_different():
+    transaction = Transaction(
+        [Package("a", "1.0.0")],
+        [
+            (
+                Package(
+                    "a",
+                    "1.0.0",
+                    source_url="https://github.com/demo/demo.git",
+                    source_type="git",
+                    source_reference="main",
+                    source_resolved_reference="123456",
+                ),
+                1,
+            )
+        ],
+        installed_packages=[Package("a", "1.0.0")],
+    )
+
+    check_operations(
+        transaction.calculate_operations(synchronize=True),
+        [
+            {
+                "job": "update",
+                "from": Package("a", "1.0.0"),
+                "to": Package(
+                    "a",
+                    "1.0.0",
+                    source_url="https://github.com/demo/demo.git",
+                    source_type="git",
+                    source_reference="main",
+                    source_resolved_reference="123456",
+                ),
+            }
+        ],
+    )
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/directory_pep_610-1.2.3.dist-info/METADATA b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/directory_pep_610-1.2.3.dist-info/METADATA
new file mode 100644
index 00000000..30928a39
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/directory_pep_610-1.2.3.dist-info/METADATA
@@ -0,0 +1,6 @@
+Metadata-Version: 2.1
+Name: directory-pep-610
+Version: 1.2.3
+Summary: Foo
+License: MIT
+Requires-Python: >=3.6
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/directory_pep_610-1.2.3.dist-info/direct_url.json b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/directory_pep_610-1.2.3.dist-info/direct_url.json
new file mode 100644
index 00000000..3385611c
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/directory_pep_610-1.2.3.dist-info/direct_url.json
@@ -0,0 +1,4 @@
+{
+  "url": "file:///path/to/distributions/directory-pep-610",
+  "dir_info": {}
+}
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/editable_directory_pep_610-1.2.3.dist-info/METADATA b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/editable_directory_pep_610-1.2.3.dist-info/METADATA
new file mode 100644
index 00000000..337c6fc4
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/editable_directory_pep_610-1.2.3.dist-info/METADATA
@@ -0,0 +1,6 @@
+Metadata-Version: 2.1
+Name: editable-directory-pep-610
+Version: 1.2.3
+Summary: Foo
+License: MIT
+Requires-Python: >=3.6
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/editable_directory_pep_610-1.2.3.dist-info/direct_url.json b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/editable_directory_pep_610-1.2.3.dist-info/direct_url.json
new file mode 100644
index 00000000..e45f7c31
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/editable_directory_pep_610-1.2.3.dist-info/direct_url.json
@@ -0,0 +1,6 @@
+{
+  "url": "file:///path/to/distributions/directory-pep-610",
+  "dir_info": {
+    "editable": true
+  }
+}
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/file_pep_610-1.2.3.dist-info/METADATA b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/file_pep_610-1.2.3.dist-info/METADATA
new file mode 100644
index 00000000..9478ca1f
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/file_pep_610-1.2.3.dist-info/METADATA
@@ -0,0 +1,6 @@
+Metadata-Version: 2.1
+Name: file-pep-610
+Version: 1.2.3
+Summary: Foo
+License: MIT
+Requires-Python: >=3.6
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/file_pep_610-1.2.3.dist-info/direct_url.json b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/file_pep_610-1.2.3.dist-info/direct_url.json
new file mode 100644
index 00000000..d481649f
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/file_pep_610-1.2.3.dist-info/direct_url.json
@@ -0,0 +1,6 @@
+{
+  "url": "file:///path/to/distributions/file-pep-610-1.2.3.tar.gz",
+  "archive_info": {
+    "hash": "sha256=2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"
+  }
+}
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610-1.2.3.dist-info/METADATA b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610-1.2.3.dist-info/METADATA
new file mode 100644
index 00000000..bfc73cf7
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610-1.2.3.dist-info/METADATA
@@ -0,0 +1,6 @@
+Metadata-Version: 2.1
+Name: git-pep-610
+Version: 1.2.3
+Summary: Foo
+License: MIT
+Requires-Python: >=3.6
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610-1.2.3.dist-info/direct_url.json b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610-1.2.3.dist-info/direct_url.json
new file mode 100644
index 00000000..a3115254
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610-1.2.3.dist-info/direct_url.json
@@ -0,0 +1,8 @@
+{
+  "url": "https://github.com/demo/git-pep-610.git",
+  "vcs_info": {
+    "vcs": "git",
+    "requested_revision": "my-branch",
+    "commit_id": "123456"
+  }
+}
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_no_requested_version-1.2.3.dist-info/METADATA b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_no_requested_version-1.2.3.dist-info/METADATA
new file mode 100644
index 00000000..986f958a
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_no_requested_version-1.2.3.dist-info/METADATA
@@ -0,0 +1,6 @@
+Metadata-Version: 2.1
+Name: git-pep-610-no-requested-version
+Version: 1.2.3
+Summary: Foo
+License: MIT
+Requires-Python: >=3.6
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_no_requested_version-1.2.3.dist-info/direct_url.json b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_no_requested_version-1.2.3.dist-info/direct_url.json
new file mode 100644
index 00000000..d646852c
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_no_requested_version-1.2.3.dist-info/direct_url.json
@@ -0,0 +1,7 @@
+{
+  "url": "https://github.com/demo/git-pep-610-no-requested-version.git",
+  "vcs_info": {
+    "vcs": "git",
+    "commit_id": "123456"
+  }
+}
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_subdirectory-1.2.3.dist-info/METADATA b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_subdirectory-1.2.3.dist-info/METADATA
new file mode 100644
index 00000000..551158eb
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_subdirectory-1.2.3.dist-info/METADATA
@@ -0,0 +1,6 @@
+Metadata-Version: 2.1
+Name: git-pep-610-subdirectory
+Version: 1.2.3
+Summary: Foo
+License: MIT
+Requires-Python: >=3.6
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_subdirectory-1.2.3.dist-info/direct_url.json b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_subdirectory-1.2.3.dist-info/direct_url.json
new file mode 100644
index 00000000..6b6c93ab
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/git_pep_610_subdirectory-1.2.3.dist-info/direct_url.json
@@ -0,0 +1,9 @@
+{
+  "url": "https://github.com/demo/git-pep-610-subdirectory.git",
+  "vcs_info": {
+    "vcs": "git",
+    "requested_revision": "my-branch",
+    "commit_id": "123456"
+  },
+  "subdirectory": "subdir"
+}
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/standard.pth b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/standard.pth
index aa0bc074..a6a7b9cd 100644
--- a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/standard.pth
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/standard.pth
@@ -1 +1 @@
-standard
\ No newline at end of file
+standard
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/url_pep_610-1.2.3.dist-info/METADATA b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/url_pep_610-1.2.3.dist-info/METADATA
new file mode 100644
index 00000000..7b2afd31
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/url_pep_610-1.2.3.dist-info/METADATA
@@ -0,0 +1,6 @@
+Metadata-Version: 2.1
+Name: url-pep-610
+Version: 1.2.3
+Summary: Foo
+License: MIT
+Requires-Python: >=3.6
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/url_pep_610-1.2.3.dist-info/direct_url.json b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/url_pep_610-1.2.3.dist-info/direct_url.json
new file mode 100644
index 00000000..b36e4055
--- /dev/null
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib/python3.7/site-packages/url_pep_610-1.2.3.dist-info/direct_url.json
@@ -0,0 +1,4 @@
+{
+  "url": "https://python-poetry.org/distributions/url-pep-610-1.2.3.tar.gz",
+  "archive_info": {}
+}
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/lib64/python3.7/site-packages/bender.pth b/vendor/poetry/tests/repositories/fixtures/installed/lib64/python3.7/site-packages/bender.pth
index 8450d61d..7f799e42 100644
--- a/vendor/poetry/tests/repositories/fixtures/installed/lib64/python3.7/site-packages/bender.pth
+++ b/vendor/poetry/tests/repositories/fixtures/installed/lib64/python3.7/site-packages/bender.pth
@@ -1 +1 @@
-../../../src/bender
\ No newline at end of file
+../../../src/bender
diff --git a/vendor/poetry/tests/repositories/fixtures/installed/src/pendulum/pendulum.egg-info/PKG-INFO b/vendor/poetry/tests/repositories/fixtures/installed/src/pendulum/pendulum.egg-info/PKG-INFO
index f7ee228c..8b00a917 100644
--- a/vendor/poetry/tests/repositories/fixtures/installed/src/pendulum/pendulum.egg-info/PKG-INFO
+++ b/vendor/poetry/tests/repositories/fixtures/installed/src/pendulum/pendulum.egg-info/PKG-INFO
@@ -8,228 +8,228 @@ Author-email: sebastien@eustace.io
 License: UNKNOWN
 Description: Pendulum
         ########
-        
+
         .. image:: https://img.shields.io/pypi/v/pendulum.svg
             :target: https://pypi.python.org/pypi/pendulum
-        
+
         .. image:: https://img.shields.io/pypi/l/pendulum.svg
             :target: https://pypi.python.org/pypi/pendulum
-        
+
         .. image:: https://img.shields.io/codecov/c/github/sdispater/pendulum/master.svg
             :target: https://codecov.io/gh/sdispater/pendulum/branch/master
-        
+
         .. image:: https://travis-ci.org/sdispater/pendulum.svg
             :alt: Pendulum Build status
             :target: https://travis-ci.org/sdispater/pendulum
-        
+
         Python datetimes made easy.
-        
+
         Supports Python **2.7** and **3.4+**.
-        
-        
+
+
         .. code-block:: python
-        
+
            >>> import pendulum
-        
+
            >>> now_in_paris = pendulum.now('Europe/Paris')
            >>> now_in_paris
            '2016-07-04T00:49:58.502116+02:00'
-        
+
            # Seamless timezone switching
            >>> now_in_paris.in_timezone('UTC')
            '2016-07-03T22:49:58.502116+00:00'
-        
+
            >>> tomorrow = pendulum.now().add(days=1)
            >>> last_week = pendulum.now().subtract(weeks=1)
-        
+
            >>> past = pendulum.now().subtract(minutes=2)
            >>> past.diff_for_humans()
            >>> '2 minutes ago'
-        
+
            >>> delta = past - last_week
            >>> delta.hours
            23
            >>> delta.in_words(locale='en')
            '6 days 23 hours 58 minutes'
-        
+
            # Proper handling of datetime normalization
            >>> pendulum.datetime(2013, 3, 31, 2, 30, tz='Europe/Paris')
            '2013-03-31T03:30:00+02:00' # 2:30 does not exist (Skipped time)
-        
+
            # Proper handling of dst transitions
            >>> just_before = pendulum.datetime(2013, 3, 31, 1, 59, 59, 999999, tz='Europe/Paris')
            '2013-03-31T01:59:59.999999+01:00'
            >>> just_before.add(microseconds=1)
            '2013-03-31T03:00:00+02:00'
-        
-        
+
+
         Why Pendulum?
         =============
-        
+
         Native ``datetime`` instances are enough for basic cases but when you face more complex use-cases
         they often show limitations and are not so intuitive to work with.
         ``Pendulum`` provides a cleaner and more easy to use API while still relying on the standard library.
         So it's still ``datetime`` but better.
-        
+
         Unlike other datetime libraries for Python, Pendulum is a drop-in replacement
         for the standard ``datetime`` class (it inherits from it), so, basically, you can replace all your ``datetime``
         instances by ``DateTime`` instances in you code (exceptions exist for libraries that check
         the type of the objects by using the ``type`` function like ``sqlite3`` or ``PyMySQL`` for instance).
-        
+
         It also removes the notion of naive datetimes: each ``Pendulum`` instance is timezone-aware
         and by default in ``UTC`` for ease of use.
-        
+
         Pendulum also improves the standard ``timedelta`` class by providing more intuitive methods and properties.
-        
-        
+
+
         Why not Arrow?
         ==============
-        
+
         Arrow is the most popular datetime library for Python right now, however its behavior
         and API can be erratic and unpredictable. The ``get()`` method can receive pretty much anything
         and it will try its best to return something while silently failing to handle some cases:
-        
+
         .. code-block:: python
-        
+
             arrow.get('2016-1-17')
             # 
-        
+
             pendulum.parse('2016-1-17')
             # 
-        
+
             arrow.get('20160413')
             # 
-        
+
             pendulum.parse('20160413')
             # 
-        
+
             arrow.get('2016-W07-5')
             # 
-        
+
             pendulum.parse('2016-W07-5')
             # 
-        
+
             # Working with DST
             just_before = arrow.Arrow(2013, 3, 31, 1, 59, 59, 999999, 'Europe/Paris')
             just_after = just_before.replace(microseconds=1)
             '2013-03-31T02:00:00+02:00'
             # Should be 2013-03-31T03:00:00+02:00
-        
+
             (just_after.to('utc') - just_before.to('utc')).total_seconds()
             -3599.999999
             # Should be 1e-06
-        
+
             just_before = pendulum.datetime(2013, 3, 31, 1, 59, 59, 999999, 'Europe/Paris')
             just_after = just_before.add(microseconds=1)
             '2013-03-31T03:00:00+02:00'
-        
+
             (just_after.in_timezone('utc') - just_before.in_timezone('utc')).total_seconds()
             1e-06
-        
+
         Those are a few examples showing that Arrow cannot always be trusted to have a consistent
         behavior with the data you are passing to it.
-        
-        
+
+
         Limitations
         ===========
-        
+
         Even though the ``DateTime`` class is a subclass of ``datetime`` there are some rare cases where
         it can't replace the native class directly. Here is a list (non-exhaustive) of the reported cases with
         a possible solution, if any:
-        
+
         * ``sqlite3`` will use the ``type()`` function to determine the type of the object by default. To work around it you can register a new adapter:
-        
+
         .. code-block:: python
-        
+
             from pendulum import DateTime
             from sqlite3 import register_adapter
-        
+
             register_adapter(DateTime, lambda val: val.isoformat(' '))
-        
+
         * ``mysqlclient`` (former ``MySQLdb``) and ``PyMySQL`` will use the ``type()`` function to determine the type of the object by default. To work around it you can register a new adapter:
-        
+
         .. code-block:: python
-        
+
             import MySQLdb.converters
             import pymysql.converters
-        
+
             from pendulum import DateTime
-        
+
             MySQLdb.converters.conversions[DateTime] = MySQLdb.converters.DateTime2literal
             pymysql.converters.conversions[DateTime] = pymysql.converters.escape_datetime
-        
+
         * ``django`` will use the ``isoformat()`` method to store datetimes in the database. However since ``pendulum`` is always timezone aware the offset information will always be returned by ``isoformat()`` raising an error, at least for MySQL databases. To work around it you can either create your own ``DateTimeField`` or use the previous workaround for ``MySQLdb``:
-        
+
         .. code-block:: python
-        
+
             from django.db.models import DateTimeField as BaseDateTimeField
             from pendulum import DateTime
-        
-        
+
+
             class DateTimeField(BaseDateTimeField):
-        
+
                 def value_to_string(self, obj):
                     val = self.value_from_object(obj)
-        
+
                     if isinstance(value, DateTime):
                         return value.to_datetime_string()
-        
+
                     return '' if val is None else val.isoformat()
-        
-        
+
+
         Resources
         =========
-        
+
         * `Official Website `_
         * `Documentation `_
         * `Issue Tracker `_
-        
-        
+
+
         Contributing
         ============
-        
+
         Contributions are welcome, especially with localization.
-        
+
         Getting started
         ---------------
-        
+
         To work on the Pendulum codebase, you'll want to clone the project locally
         and install the required depedendencies via `poetry `_.
-        
+
         .. code-block:: bash
-        
+
             $ git clone git@github.com:sdispater/pendulum.git
             $ poetry install
-        
+
         Localization
         ------------
-        
+
         If you want to help with localization, there are two different cases: the locale already exists
         or not.
-        
+
         If the locale does not exist you will need to create it by using the ``clock`` utility:
-        
+
         .. code-block:: bash
-        
+
             ./clock locale create 
-        
+
         It will generate a directory in ``pendulum/locales`` named after your locale, with the following
         structure:
-        
+
         .. code-block:: text
-        
+
             /
                 - custom.py
                 - locale.py
-        
+
         The ``locale.py`` file must not be modified. It contains the translations provided by
         the CLDR database.
-        
+
         The ``custom.py`` file is the one you want to modify. It contains the data needed
         by Pendulum that are not provided by the CLDR database. You can take the `en `_
         data as a reference to see which data is needed.
-        
+
         You should also add tests for the created or modified locale.
-        
+
 Platform: UNKNOWN
 Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
diff --git a/vendor/poetry/tests/repositories/fixtures/legacy/black.html b/vendor/poetry/tests/repositories/fixtures/legacy/black.html
index 333fd93e..ea050662 100644
--- a/vendor/poetry/tests/repositories/fixtures/legacy/black.html
+++ b/vendor/poetry/tests/repositories/fixtures/legacy/black.html
@@ -4,7 +4,8 @@
     Links for black
 
     

Links for black

- black-19.10b0.tar.gz + black-19.10b0-py36-none-any.whl + black-21.11b0-py3-none-any.whl - \ No newline at end of file + diff --git a/vendor/poetry/tests/repositories/fixtures/legacy/discord-py.html b/vendor/poetry/tests/repositories/fixtures/legacy/discord-py.html new file mode 100644 index 00000000..af4a2212 --- /dev/null +++ b/vendor/poetry/tests/repositories/fixtures/legacy/discord-py.html @@ -0,0 +1,11 @@ + + + + Links for discord-py + +

Links for discord-py

+ discord.py-2.0.0-py3-none-any.whl
+ discord.py-2.0.0.tar.gz
+ + + diff --git a/vendor/poetry/tests/repositories/fixtures/legacy/futures_partial_yank.html b/vendor/poetry/tests/repositories/fixtures/legacy/futures_partial_yank.html new file mode 100644 index 00000000..41fa4ac3 --- /dev/null +++ b/vendor/poetry/tests/repositories/fixtures/legacy/futures_partial_yank.html @@ -0,0 +1,12 @@ + + + + Links for futures + + +

Links for futures

+ futures-3.2.0-py2-none-any.whl
+ futures-3.2.0.tar.gz
+ + + diff --git a/vendor/poetry/tests/repositories/fixtures/legacy/invalid-version.html b/vendor/poetry/tests/repositories/fixtures/legacy/invalid-version.html new file mode 100644 index 00000000..039a6302 --- /dev/null +++ b/vendor/poetry/tests/repositories/fixtures/legacy/invalid-version.html @@ -0,0 +1,12 @@ + + + + Links for poetry + + +

Links for poetry

+ poetry-21.07.28.5ffb65e2ff8067c732e2b178d03b707c7fb27855-py3-none-any.whl
+ poetry-0.1.0-py3-none-any.whl
+ + + diff --git a/vendor/poetry/tests/repositories/fixtures/legacy/ipython.html b/vendor/poetry/tests/repositories/fixtures/legacy/ipython.html index cbdc19e5..5b61c92d 100644 --- a/vendor/poetry/tests/repositories/fixtures/legacy/ipython.html +++ b/vendor/poetry/tests/repositories/fixtures/legacy/ipython.html @@ -5,10 +5,10 @@

Links for ipython

- ipython-5.7.0-py2-none-any.whl
+ ipython-5.7.0-py2-none-any.whl
ipython-5.7.0-py3-none-any.whl
ipython-5.7.0.tar.gz
- ipython-7.5.0-py3-none-any.whl
+ ipython-7.5.0-py3-none-any.whl
ipython-7.5.0.tar.gz
diff --git a/vendor/poetry/tests/repositories/fixtures/legacy/poetry-test-py2-py3-metadata-merge.html b/vendor/poetry/tests/repositories/fixtures/legacy/poetry-test-py2-py3-metadata-merge.html new file mode 100644 index 00000000..7b43db0f --- /dev/null +++ b/vendor/poetry/tests/repositories/fixtures/legacy/poetry-test-py2-py3-metadata-merge.html @@ -0,0 +1,11 @@ + + + + Links for poetry-test-py2-py3-metadata-merge + + +

Links for ipython

+ poetry_test_py2_py3_metadata_merge-0.1.0-py2-none-any.whl
+ poetry_test_py2_py3_metadata_merge-0.1.0-py3-none-any.whl
+ + diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/black-21.11b0-py3-none-any.whl b/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/black-21.11b0-py3-none-any.whl new file mode 100644 index 00000000..f0e3956e Binary files /dev/null and b/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/black-21.11b0-py3-none-any.whl differ diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/discord.py-2.0.0-py3-none-any.whl b/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/discord.py-2.0.0-py3-none-any.whl new file mode 100644 index 00000000..5a9e7fab Binary files /dev/null and b/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/discord.py-2.0.0-py3-none-any.whl differ diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py2-none-any.whl b/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py2-none-any.whl new file mode 100644 index 00000000..255fcf7d Binary files /dev/null and b/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py2-none-any.whl differ diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py3-none-any.whl b/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py3-none-any.whl new file mode 100644 index 00000000..c4f890ba Binary files /dev/null and b/vendor/poetry/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py3-none-any.whl differ diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/attrs.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/attrs.json index 739fdc32..1242aa45 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/attrs.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/attrs.json @@ -1,134 +1,134 @@ { - "info": { - "author": "Hynek Schlawack", - "author_email": "hs@ox.cx", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries :: Python Modules" - ], - "description": "", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://www.attrs.org/", - "keywords": "class,attribute,boilerplate", - "license": "MIT", - "maintainer": "", - "maintainer_email": "", - "name": "attrs", - "package_url": "https://pypi.org/project/attrs/", - "platform": "", - "project_url": "https://pypi.org/project/attrs/", - "release_url": "https://pypi.org/project/attrs/17.4.0/", - "requires_dist": [ - "coverage; extra == 'dev'", - "hypothesis; extra == 'dev'", - "pympler; extra == 'dev'", - "pytest; extra == 'dev'", - "six; extra == 'dev'", - "zope.interface; extra == 'dev'", - "sphinx; extra == 'dev'", - "zope.interface; extra == 'dev'", - "sphinx; extra == 'docs'", - "zope.interface; extra == 'docs'", - "coverage; extra == 'tests'", - "hypothesis; extra == 'tests'", - "pympler; extra == 'tests'", - "pytest; extra == 'tests'", - "six; extra == 'tests'", - "zope.interface; extra == 'tests'" - ], - "requires_python": "", - "summary": "Classes Without Boilerplate", - "version": "17.4.0" - }, - "last_serial": 3451237, - "releases": { - "17.4.0": [ - { - "comment_text": "", - "digests": { - "md5": "5835a573b3f0316e1602dac3fd9c1daf", - "sha256": "a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450" - }, - "downloads": -1, - "filename": "attrs-17.4.0-py2.py3-none-any.whl", - "has_sig": true, - "md5_digest": "5835a573b3f0316e1602dac3fd9c1daf", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 31658, - "upload_time": "2017-12-30T08:20:05", - "url": "https://files.pythonhosted.org/packages/b5/60/4e178c1e790fd60f1229a9b3cb2f8bc2f4cc6ff2c8838054c142c70b5adc/attrs-17.4.0-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "d7a89063b2e0fd36bd82389c4d82821d", - "sha256": "1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9" - }, - "downloads": -1, - "filename": "attrs-17.4.0.tar.gz", - "has_sig": true, - "md5_digest": "d7a89063b2e0fd36bd82389c4d82821d", - "packagetype": "sdist", - "python_version": "source", - "size": 97071, - "upload_time": "2017-12-30T08:20:08", - "url": "https://files.pythonhosted.org/packages/8b/0b/a06cfcb69d0cb004fde8bc6f0fd192d96d565d1b8aa2829f0f20adb796e5/attrs-17.4.0.tar.gz" - } - ] + "info": { + "author": "Hynek Schlawack", + "author_email": "hs@ox.cx", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "description": "", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "5835a573b3f0316e1602dac3fd9c1daf", - "sha256": "a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450" - }, - "downloads": -1, - "filename": "attrs-17.4.0-py2.py3-none-any.whl", - "has_sig": true, - "md5_digest": "5835a573b3f0316e1602dac3fd9c1daf", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 31658, - "upload_time": "2017-12-30T08:20:05", - "url": "https://files.pythonhosted.org/packages/b5/60/4e178c1e790fd60f1229a9b3cb2f8bc2f4cc6ff2c8838054c142c70b5adc/attrs-17.4.0-py2.py3-none-any.whl" + "home_page": "http://www.attrs.org/", + "keywords": "class,attribute,boilerplate", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "attrs", + "package_url": "https://pypi.org/project/attrs/", + "platform": "", + "project_url": "https://pypi.org/project/attrs/", + "release_url": "https://pypi.org/project/attrs/17.4.0/", + "requires_dist": [ + "coverage; extra == 'dev'", + "hypothesis; extra == 'dev'", + "pympler; extra == 'dev'", + "pytest; extra == 'dev'", + "six; extra == 'dev'", + "zope.interface; extra == 'dev'", + "sphinx; extra == 'dev'", + "zope.interface; extra == 'dev'", + "sphinx; extra == 'docs'", + "zope.interface; extra == 'docs'", + "coverage; extra == 'tests'", + "hypothesis; extra == 'tests'", + "pympler; extra == 'tests'", + "pytest; extra == 'tests'", + "six; extra == 'tests'", + "zope.interface; extra == 'tests'" + ], + "requires_python": "", + "summary": "Classes Without Boilerplate", + "version": "17.4.0" + }, + "last_serial": 3451237, + "releases": { + "17.4.0": [ + { + "comment_text": "", + "digests": { + "md5": "5835a573b3f0316e1602dac3fd9c1daf", + "sha256": "a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450" }, - { - "comment_text": "", - "digests": { - "md5": "d7a89063b2e0fd36bd82389c4d82821d", - "sha256": "1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9" - }, - "downloads": -1, - "filename": "attrs-17.4.0.tar.gz", - "has_sig": true, - "md5_digest": "d7a89063b2e0fd36bd82389c4d82821d", - "packagetype": "sdist", - "python_version": "source", - "size": 97071, - "upload_time": "2017-12-30T08:20:08", - "url": "https://files.pythonhosted.org/packages/8b/0b/a06cfcb69d0cb004fde8bc6f0fd192d96d565d1b8aa2829f0f20adb796e5/attrs-17.4.0.tar.gz" - } + "downloads": -1, + "filename": "attrs-17.4.0-py2.py3-none-any.whl", + "has_sig": true, + "md5_digest": "5835a573b3f0316e1602dac3fd9c1daf", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 31658, + "upload_time": "2017-12-30T08:20:05", + "url": "https://files.pythonhosted.org/packages/b5/60/4e178c1e790fd60f1229a9b3cb2f8bc2f4cc6ff2c8838054c142c70b5adc/attrs-17.4.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "d7a89063b2e0fd36bd82389c4d82821d", + "sha256": "1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9" + }, + "downloads": -1, + "filename": "attrs-17.4.0.tar.gz", + "has_sig": true, + "md5_digest": "d7a89063b2e0fd36bd82389c4d82821d", + "packagetype": "sdist", + "python_version": "source", + "size": 97071, + "upload_time": "2017-12-30T08:20:08", + "url": "https://files.pythonhosted.org/packages/8b/0b/a06cfcb69d0cb004fde8bc6f0fd192d96d565d1b8aa2829f0f20adb796e5/attrs-17.4.0.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "5835a573b3f0316e1602dac3fd9c1daf", + "sha256": "a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450" + }, + "downloads": -1, + "filename": "attrs-17.4.0-py2.py3-none-any.whl", + "has_sig": true, + "md5_digest": "5835a573b3f0316e1602dac3fd9c1daf", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 31658, + "upload_time": "2017-12-30T08:20:05", + "url": "https://files.pythonhosted.org/packages/b5/60/4e178c1e790fd60f1229a9b3cb2f8bc2f4cc6ff2c8838054c142c70b5adc/attrs-17.4.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "d7a89063b2e0fd36bd82389c4d82821d", + "sha256": "1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9" + }, + "downloads": -1, + "filename": "attrs-17.4.0.tar.gz", + "has_sig": true, + "md5_digest": "d7a89063b2e0fd36bd82389c4d82821d", + "packagetype": "sdist", + "python_version": "source", + "size": 97071, + "upload_time": "2017-12-30T08:20:08", + "url": "https://files.pythonhosted.org/packages/8b/0b/a06cfcb69d0cb004fde8bc6f0fd192d96d565d1b8aa2829f0f20adb796e5/attrs-17.4.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/attrs/17.4.0.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/attrs/17.4.0.json index 87cc6eab..e7cc36a4 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/attrs/17.4.0.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/attrs/17.4.0.json @@ -1,98 +1,98 @@ { - "info": { - "author": "Hynek Schlawack", - "author_email": "hs@ox.cx", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries :: Python Modules" - ], - "description": "", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://www.attrs.org/", - "keywords": "class,attribute,boilerplate", - "license": "MIT", - "maintainer": "", - "maintainer_email": "", - "name": "attrs", - "package_url": "https://pypi.org/project/attrs/", - "platform": "", - "project_url": "https://pypi.org/project/attrs/", - "release_url": "https://pypi.org/project/attrs/17.4.0/", - "requires_dist": [ - "coverage; extra == 'dev'", - "hypothesis; extra == 'dev'", - "pympler; extra == 'dev'", - "pytest; extra == 'dev'", - "six; extra == 'dev'", - "zope.interface; extra == 'dev'", - "sphinx; extra == 'dev'", - "zope.interface; extra == 'dev'", - "sphinx; extra == 'docs'", - "zope.interface; extra == 'docs'", - "coverage; extra == 'tests'", - "hypothesis; extra == 'tests'", - "pympler; extra == 'tests'", - "pytest; extra == 'tests'", - "six; extra == 'tests'", - "zope.interface; extra == 'tests'" - ], - "requires_python": "", - "summary": "Classes Without Boilerplate", - "version": "17.4.0" + "info": { + "author": "Hynek Schlawack", + "author_email": "hs@ox.cx", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "description": "", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 3451237, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "5835a573b3f0316e1602dac3fd9c1daf", - "sha256": "a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450" - }, - "downloads": -1, - "filename": "attrs-17.4.0-py2.py3-none-any.whl", - "has_sig": true, - "md5_digest": "5835a573b3f0316e1602dac3fd9c1daf", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 31658, - "upload_time": "2017-12-30T08:20:05", - "url": "https://files.pythonhosted.org/packages/b5/60/4e178c1e790fd60f1229a9b3cb2f8bc2f4cc6ff2c8838054c142c70b5adc/attrs-17.4.0-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "d7a89063b2e0fd36bd82389c4d82821d", - "sha256": "1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9" - }, - "downloads": -1, - "filename": "attrs-17.4.0.tar.gz", - "has_sig": true, - "md5_digest": "d7a89063b2e0fd36bd82389c4d82821d", - "packagetype": "sdist", - "python_version": "source", - "size": 97071, - "upload_time": "2017-12-30T08:20:08", - "url": "https://files.pythonhosted.org/packages/8b/0b/a06cfcb69d0cb004fde8bc6f0fd192d96d565d1b8aa2829f0f20adb796e5/attrs-17.4.0.tar.gz" - } - ] + "home_page": "http://www.attrs.org/", + "keywords": "class,attribute,boilerplate", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "attrs", + "package_url": "https://pypi.org/project/attrs/", + "platform": "", + "project_url": "https://pypi.org/project/attrs/", + "release_url": "https://pypi.org/project/attrs/17.4.0/", + "requires_dist": [ + "coverage; extra == 'dev'", + "hypothesis; extra == 'dev'", + "pympler; extra == 'dev'", + "pytest; extra == 'dev'", + "six; extra == 'dev'", + "zope.interface; extra == 'dev'", + "sphinx; extra == 'dev'", + "zope.interface; extra == 'dev'", + "sphinx; extra == 'docs'", + "zope.interface; extra == 'docs'", + "coverage; extra == 'tests'", + "hypothesis; extra == 'tests'", + "pympler; extra == 'tests'", + "pytest; extra == 'tests'", + "six; extra == 'tests'", + "zope.interface; extra == 'tests'" + ], + "requires_python": "", + "summary": "Classes Without Boilerplate", + "version": "17.4.0" + }, + "last_serial": 3451237, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "5835a573b3f0316e1602dac3fd9c1daf", + "sha256": "a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450" + }, + "downloads": -1, + "filename": "attrs-17.4.0-py2.py3-none-any.whl", + "has_sig": true, + "md5_digest": "5835a573b3f0316e1602dac3fd9c1daf", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 31658, + "upload_time": "2017-12-30T08:20:05", + "url": "https://files.pythonhosted.org/packages/b5/60/4e178c1e790fd60f1229a9b3cb2f8bc2f4cc6ff2c8838054c142c70b5adc/attrs-17.4.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "d7a89063b2e0fd36bd82389c4d82821d", + "sha256": "1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9" + }, + "downloads": -1, + "filename": "attrs-17.4.0.tar.gz", + "has_sig": true, + "md5_digest": "d7a89063b2e0fd36bd82389c4d82821d", + "packagetype": "sdist", + "python_version": "source", + "size": 97071, + "upload_time": "2017-12-30T08:20:08", + "url": "https://files.pythonhosted.org/packages/8b/0b/a06cfcb69d0cb004fde8bc6f0fd192d96d565d1b8aa2829f0f20adb796e5/attrs-17.4.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/black.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/black.json index f4e244d1..38db6d8c 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/black.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/black.json @@ -1,6 +1,6 @@ { "info": { - "author": "\u0141ukasz Langa", + "author": "Åukasz Langa", "author_email": "lukasz@langa.pl", "bugtrack_url": null, "classifiers": [ @@ -99,6 +99,48 @@ "yanked": false, "yanked_reason": null } + ], + "21.11b0": [ + { + "comment_text": "", + "digests": { + "md5": "945da11b34c11738560fc6698cffa425", + "sha256": "0b1f66cbfadcd332ceeaeecf6373d9991d451868d2e2219ad0ac1213fb701117" + }, + "downloads": -1, + "filename": "black-21.11b0-py3-none-any.whl", + "has_sig": false, + "md5_digest": "945da11b34c11738560fc6698cffa425", + "packagetype": "bdist_wheel", + "python_version": "py3", + "requires_python": ">=3.6.2", + "size": 155131, + "upload_time": "2021-11-17T02:32:14", + "upload_time_iso_8601": "2021-11-17T02:32:14.551680Z", + "url": "https://files.pythonhosted.org/packages/3d/ad/1cf514e7f9ee4c3d8df7c839d7977f7605ad76557f3fca741ec67f76dba6/black-21.11b0-py3-none-any.whl", + "yanked": true, + "yanked_reason": "Broken regex dependency. Use 21.11b1 instead." + }, + { + "comment_text": "", + "digests": { + "md5": "6040b4e4c6ccc4e7eb81bb2634ef299a", + "sha256": "83f3852301c8dcb229e9c444dd79f573c8d31c7c2dad9bbaaa94c808630e32aa" + }, + "downloads": -1, + "filename": "black-21.11b0.tar.gz", + "has_sig": false, + "md5_digest": "6040b4e4c6ccc4e7eb81bb2634ef299a", + "packagetype": "sdist", + "python_version": "source", + "requires_python": ">=3.6.2", + "size": 593164, + "upload_time": "2021-11-17T02:32:16", + "upload_time_iso_8601": "2021-11-17T02:32:16.396821Z", + "url": "https://files.pythonhosted.org/packages/2f/db/03e8cef689ab0ff857576ee2ee288d1ff2110ef7f3a77cac62e61f18acaf/black-21.11b0.tar.gz", + "yanked": true, + "yanked_reason": "Broken regex dependency. Use 21.11b1 instead." + } ] }, "urls": [ diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/black/21.11b0.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/black/21.11b0.json new file mode 100644 index 00000000..f816ef3b --- /dev/null +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/black/21.11b0.json @@ -0,0 +1,155 @@ +{ + "info": { + "author": "Åukasz Langa", + "author_email": "lukasz@langa.pl", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 4 - Beta", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3 :: Only", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Software Development :: Quality Assurance" + ], + "description": "", + "description_content_type": "text/markdown", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 + }, + "home_page": "https://github.com/psf/black", + "keywords": "automation formatter yapf autopep8 pyfmt gofmt rustfmt", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "black", + "package_url": "https://pypi.org/project/black/", + "platform": "", + "project_url": "https://pypi.org/project/black/", + "project_urls": { + "Changelog": "https://github.com/psf/black/blob/main/CHANGES.md", + "Homepage": "https://github.com/psf/black" + }, + "release_url": "https://pypi.org/project/black/21.11b0/", + "requires_dist": [ + "click (>=7.1.2)", + "platformdirs (>=2)", + "tomli (<2.0.0,>=0.2.6)", + "regex (>=2020.1.8)", + "pathspec (<1,>=0.9.0)", + "typing-extensions (>=3.10.0.0)", + "mypy-extensions (>=0.4.3)", + "dataclasses (>=0.6) ; python_version < \"3.7\"", + "typed-ast (>=1.4.2) ; python_version < \"3.8\" and implementation_name == \"cpython\"", + "typing-extensions (!=3.10.0.1) ; python_version >= \"3.10\"", + "colorama (>=0.4.3) ; extra == 'colorama'", + "aiohttp (>=3.7.4) ; extra == 'd'", + "ipython (>=7.8.0) ; extra == 'jupyter'", + "tokenize-rt (>=3.2.0) ; extra == 'jupyter'", + "typed-ast (>=1.4.3) ; extra == 'python2'", + "uvloop (>=0.15.2) ; extra == 'uvloop'" + ], + "requires_python": ">=3.6.2", + "summary": "The uncompromising code formatter.", + "version": "21.11b0", + "yanked": true, + "yanked_reason": "Broken regex dependency. Use 21.11b1 instead." + }, + "last_serial": 13326107, + "releases": { + "21.11b0": [ + { + "comment_text": "", + "digests": { + "md5": "945da11b34c11738560fc6698cffa425", + "sha256": "0b1f66cbfadcd332ceeaeecf6373d9991d451868d2e2219ad0ac1213fb701117" + }, + "downloads": -1, + "filename": "black-21.11b0-py3-none-any.whl", + "has_sig": false, + "md5_digest": "945da11b34c11738560fc6698cffa425", + "packagetype": "bdist_wheel", + "python_version": "py3", + "requires_python": ">=3.6.2", + "size": 155131, + "upload_time": "2021-11-17T02:32:14", + "upload_time_iso_8601": "2021-11-17T02:32:14.551680Z", + "url": "https://files.pythonhosted.org/packages/3d/ad/1cf514e7f9ee4c3d8df7c839d7977f7605ad76557f3fca741ec67f76dba6/black-21.11b0-py3-none-any.whl", + "yanked": true, + "yanked_reason": "Broken regex dependency. Use 21.11b1 instead." + }, + { + "comment_text": "", + "digests": { + "md5": "6040b4e4c6ccc4e7eb81bb2634ef299a", + "sha256": "83f3852301c8dcb229e9c444dd79f573c8d31c7c2dad9bbaaa94c808630e32aa" + }, + "downloads": -1, + "filename": "black-21.11b0.tar.gz", + "has_sig": false, + "md5_digest": "6040b4e4c6ccc4e7eb81bb2634ef299a", + "packagetype": "sdist", + "python_version": "source", + "requires_python": ">=3.6.2", + "size": 593164, + "upload_time": "2021-11-17T02:32:16", + "upload_time_iso_8601": "2021-11-17T02:32:16.396821Z", + "url": "https://files.pythonhosted.org/packages/2f/db/03e8cef689ab0ff857576ee2ee288d1ff2110ef7f3a77cac62e61f18acaf/black-21.11b0.tar.gz", + "yanked": true, + "yanked_reason": "Broken regex dependency. Use 21.11b1 instead." + } + ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "945da11b34c11738560fc6698cffa425", + "sha256": "0b1f66cbfadcd332ceeaeecf6373d9991d451868d2e2219ad0ac1213fb701117" + }, + "downloads": -1, + "filename": "black-21.11b0-py3-none-any.whl", + "has_sig": false, + "md5_digest": "945da11b34c11738560fc6698cffa425", + "packagetype": "bdist_wheel", + "python_version": "py3", + "requires_python": ">=3.6.2", + "size": 155131, + "upload_time": "2021-11-17T02:32:14", + "upload_time_iso_8601": "2021-11-17T02:32:14.551680Z", + "url": "https://files.pythonhosted.org/packages/3d/ad/1cf514e7f9ee4c3d8df7c839d7977f7605ad76557f3fca741ec67f76dba6/black-21.11b0-py3-none-any.whl", + "yanked": true, + "yanked_reason": "Broken regex dependency. Use 21.11b1 instead." + }, + { + "comment_text": "", + "digests": { + "md5": "6040b4e4c6ccc4e7eb81bb2634ef299a", + "sha256": "83f3852301c8dcb229e9c444dd79f573c8d31c7c2dad9bbaaa94c808630e32aa" + }, + "downloads": -1, + "filename": "black-21.11b0.tar.gz", + "has_sig": false, + "md5_digest": "6040b4e4c6ccc4e7eb81bb2634ef299a", + "packagetype": "sdist", + "python_version": "source", + "requires_python": ">=3.6.2", + "size": 593164, + "upload_time": "2021-11-17T02:32:16", + "upload_time_iso_8601": "2021-11-17T02:32:16.396821Z", + "url": "https://files.pythonhosted.org/packages/2f/db/03e8cef689ab0ff857576ee2ee288d1ff2110ef7f3a77cac62e61f18acaf/black-21.11b0.tar.gz", + "yanked": true, + "yanked_reason": "Broken regex dependency. Use 21.11b1 instead." + } + ], + "vulnerabilities": [] +} diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/colorama.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/colorama.json index d26defda..5417b41a 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/colorama.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/colorama.json @@ -1,118 +1,118 @@ { - "info": { - "author": "Arnon Yaari", - "author_email": "tartley@tartley.com", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 5 - Production/Stable", - "Environment :: Console", - "Intended Audience :: Developers", - "License :: OSI Approved :: BSD License", - "Operating System :: OS Independent", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.5", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.1", - "Programming Language :: Python :: 3.2", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Topic :: Terminals" - ], - "description": "", - "docs_url": null, - "download_url": "UNKNOWN", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "https://github.com/tartley/colorama", - "keywords": "color colour terminal text ansi windows crossplatform xplatform", - "license": "BSD", - "maintainer": null, - "maintainer_email": null, - "name": "colorama", - "package_url": "https://pypi.org/project/colorama/", - "platform": "UNKNOWN", - "project_url": "https://pypi.org/project/colorama/", - "release_url": "https://pypi.org/project/colorama/0.3.9/", - "requires_dist": null, - "requires_python": null, - "summary": "Cross-platform colored terminal text.", - "version": "0.3.9" - }, - "last_serial": 2833818, - "releases": { - "0.3.9": [ - { - "comment_text": "", - "digests": { - "md5": "cc0c01c7b3b34d0354d813e9ab26aca3", - "sha256": "463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda" - }, - "downloads": -1, - "filename": "colorama-0.3.9-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "cc0c01c7b3b34d0354d813e9ab26aca3", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "size": 20181, - "upload_time": "2017-04-27T07:12:36", - "url": "https://files.pythonhosted.org/packages/db/c8/7dcf9dbcb22429512708fe3a547f8b6101c0d02137acbd892505aee57adf/colorama-0.3.9-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "3a0e415259690f4dd7455c2683ee5850", - "sha256": "48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1" - }, - "downloads": -1, - "filename": "colorama-0.3.9.tar.gz", - "has_sig": false, - "md5_digest": "3a0e415259690f4dd7455c2683ee5850", - "packagetype": "sdist", - "python_version": "source", - "size": 25053, - "upload_time": "2017-04-27T07:12:12", - "url": "https://files.pythonhosted.org/packages/e6/76/257b53926889e2835355d74fec73d82662100135293e17d382e2b74d1669/colorama-0.3.9.tar.gz" - } - ] + "info": { + "author": "Arnon Yaari", + "author_email": "tartley@tartley.com", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.5", + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.1", + "Programming Language :: Python :: 3.2", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Topic :: Terminals" + ], + "description": "", + "docs_url": null, + "download_url": "UNKNOWN", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "cc0c01c7b3b34d0354d813e9ab26aca3", - "sha256": "463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda" - }, - "downloads": -1, - "filename": "colorama-0.3.9-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "cc0c01c7b3b34d0354d813e9ab26aca3", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "size": 20181, - "upload_time": "2017-04-27T07:12:36", - "url": "https://files.pythonhosted.org/packages/db/c8/7dcf9dbcb22429512708fe3a547f8b6101c0d02137acbd892505aee57adf/colorama-0.3.9-py2.py3-none-any.whl" + "home_page": "https://github.com/tartley/colorama", + "keywords": "color colour terminal text ansi windows crossplatform xplatform", + "license": "BSD", + "maintainer": null, + "maintainer_email": null, + "name": "colorama", + "package_url": "https://pypi.org/project/colorama/", + "platform": "UNKNOWN", + "project_url": "https://pypi.org/project/colorama/", + "release_url": "https://pypi.org/project/colorama/0.3.9/", + "requires_dist": null, + "requires_python": null, + "summary": "Cross-platform colored terminal text.", + "version": "0.3.9" + }, + "last_serial": 2833818, + "releases": { + "0.3.9": [ + { + "comment_text": "", + "digests": { + "md5": "cc0c01c7b3b34d0354d813e9ab26aca3", + "sha256": "463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda" }, - { - "comment_text": "", - "digests": { - "md5": "3a0e415259690f4dd7455c2683ee5850", - "sha256": "48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1" - }, - "downloads": -1, - "filename": "colorama-0.3.9.tar.gz", - "has_sig": false, - "md5_digest": "3a0e415259690f4dd7455c2683ee5850", - "packagetype": "sdist", - "python_version": "source", - "size": 25053, - "upload_time": "2017-04-27T07:12:12", - "url": "https://files.pythonhosted.org/packages/e6/76/257b53926889e2835355d74fec73d82662100135293e17d382e2b74d1669/colorama-0.3.9.tar.gz" - } + "downloads": -1, + "filename": "colorama-0.3.9-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "cc0c01c7b3b34d0354d813e9ab26aca3", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "size": 20181, + "upload_time": "2017-04-27T07:12:36", + "url": "https://files.pythonhosted.org/packages/db/c8/7dcf9dbcb22429512708fe3a547f8b6101c0d02137acbd892505aee57adf/colorama-0.3.9-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "3a0e415259690f4dd7455c2683ee5850", + "sha256": "48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1" + }, + "downloads": -1, + "filename": "colorama-0.3.9.tar.gz", + "has_sig": false, + "md5_digest": "3a0e415259690f4dd7455c2683ee5850", + "packagetype": "sdist", + "python_version": "source", + "size": 25053, + "upload_time": "2017-04-27T07:12:12", + "url": "https://files.pythonhosted.org/packages/e6/76/257b53926889e2835355d74fec73d82662100135293e17d382e2b74d1669/colorama-0.3.9.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "cc0c01c7b3b34d0354d813e9ab26aca3", + "sha256": "463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda" + }, + "downloads": -1, + "filename": "colorama-0.3.9-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "cc0c01c7b3b34d0354d813e9ab26aca3", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "size": 20181, + "upload_time": "2017-04-27T07:12:36", + "url": "https://files.pythonhosted.org/packages/db/c8/7dcf9dbcb22429512708fe3a547f8b6101c0d02137acbd892505aee57adf/colorama-0.3.9-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "3a0e415259690f4dd7455c2683ee5850", + "sha256": "48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1" + }, + "downloads": -1, + "filename": "colorama-0.3.9.tar.gz", + "has_sig": false, + "md5_digest": "3a0e415259690f4dd7455c2683ee5850", + "packagetype": "sdist", + "python_version": "source", + "size": 25053, + "upload_time": "2017-04-27T07:12:12", + "url": "https://files.pythonhosted.org/packages/e6/76/257b53926889e2835355d74fec73d82662100135293e17d382e2b74d1669/colorama-0.3.9.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/colorama/0.3.9.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/colorama/0.3.9.json index acf8cd62..6ddef249 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/colorama/0.3.9.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/colorama/0.3.9.json @@ -1,82 +1,82 @@ { - "info": { - "author": "Arnon Yaari", - "author_email": "tartley@tartley.com", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 5 - Production/Stable", - "Environment :: Console", - "Intended Audience :: Developers", - "License :: OSI Approved :: BSD License", - "Operating System :: OS Independent", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.5", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.1", - "Programming Language :: Python :: 3.2", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Topic :: Terminals" - ], - "description": "", - "docs_url": null, - "download_url": "UNKNOWN", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "https://github.com/tartley/colorama", - "keywords": "color colour terminal text ansi windows crossplatform xplatform", - "license": "BSD", - "maintainer": null, - "maintainer_email": null, - "name": "colorama", - "package_url": "https://pypi.org/project/colorama/", - "platform": "UNKNOWN", - "project_url": "https://pypi.org/project/colorama/", - "release_url": "https://pypi.org/project/colorama/0.3.9/", - "requires_dist": null, - "requires_python": null, - "summary": "Cross-platform colored terminal text.", - "version": "0.3.9" + "info": { + "author": "Arnon Yaari", + "author_email": "tartley@tartley.com", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.5", + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.1", + "Programming Language :: Python :: 3.2", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Topic :: Terminals" + ], + "description": "", + "docs_url": null, + "download_url": "UNKNOWN", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 2833818, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "cc0c01c7b3b34d0354d813e9ab26aca3", - "sha256": "463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda" - }, - "downloads": -1, - "filename": "colorama-0.3.9-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "cc0c01c7b3b34d0354d813e9ab26aca3", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "size": 20181, - "upload_time": "2017-04-27T07:12:36", - "url": "https://files.pythonhosted.org/packages/db/c8/7dcf9dbcb22429512708fe3a547f8b6101c0d02137acbd892505aee57adf/colorama-0.3.9-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "3a0e415259690f4dd7455c2683ee5850", - "sha256": "48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1" - }, - "downloads": -1, - "filename": "colorama-0.3.9.tar.gz", - "has_sig": false, - "md5_digest": "3a0e415259690f4dd7455c2683ee5850", - "packagetype": "sdist", - "python_version": "source", - "size": 25053, - "upload_time": "2017-04-27T07:12:12", - "url": "https://files.pythonhosted.org/packages/e6/76/257b53926889e2835355d74fec73d82662100135293e17d382e2b74d1669/colorama-0.3.9.tar.gz" - } - ] + "home_page": "https://github.com/tartley/colorama", + "keywords": "color colour terminal text ansi windows crossplatform xplatform", + "license": "BSD", + "maintainer": null, + "maintainer_email": null, + "name": "colorama", + "package_url": "https://pypi.org/project/colorama/", + "platform": "UNKNOWN", + "project_url": "https://pypi.org/project/colorama/", + "release_url": "https://pypi.org/project/colorama/0.3.9/", + "requires_dist": null, + "requires_python": null, + "summary": "Cross-platform colored terminal text.", + "version": "0.3.9" + }, + "last_serial": 2833818, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "cc0c01c7b3b34d0354d813e9ab26aca3", + "sha256": "463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda" + }, + "downloads": -1, + "filename": "colorama-0.3.9-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "cc0c01c7b3b34d0354d813e9ab26aca3", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "size": 20181, + "upload_time": "2017-04-27T07:12:36", + "url": "https://files.pythonhosted.org/packages/db/c8/7dcf9dbcb22429512708fe3a547f8b6101c0d02137acbd892505aee57adf/colorama-0.3.9-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "3a0e415259690f4dd7455c2683ee5850", + "sha256": "48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1" + }, + "downloads": -1, + "filename": "colorama-0.3.9.tar.gz", + "has_sig": false, + "md5_digest": "3a0e415259690f4dd7455c2683ee5850", + "packagetype": "sdist", + "python_version": "source", + "size": 25053, + "upload_time": "2017-04-27T07:12:12", + "url": "https://files.pythonhosted.org/packages/e6/76/257b53926889e2835355d74fec73d82662100135293e17d382e2b74d1669/colorama-0.3.9.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/discord-py/2.0.0.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/discord-py/2.0.0.json new file mode 100644 index 00000000..be44790b --- /dev/null +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/discord-py/2.0.0.json @@ -0,0 +1,113 @@ +{ + "info": { + "author": "Rapptz", + "author_email": "", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Internet", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Utilities", + "Typing :: Typed" + ], + "description": "", + "description_content_type": "text/x-rst", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 + }, + "home_page": "https://github.com/Rapptz/discord.py", + "keywords": "", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "discord.py", + "package_url": "https://pypi.org/project/discord.py/", + "platform": null, + "project_url": "https://pypi.org/project/discord.py/", + "project_urls": { + "Documentation": "https://discordpy.readthedocs.io/en/latest/", + "Homepage": "https://github.com/Rapptz/discord.py", + "Issue tracker": "https://github.com/Rapptz/discord.py/issues" + }, + "release_url": "https://pypi.org/project/discord.py/2.0.0/", + "requires_dist": [ + "PyNaCl (<1.6,>=1.3.0) ; extra == 'voice'", + "typing-extensions (<5,>=4.3) ; extra == 'test'", + "pytest-mock ; extra == 'test'", + "pytest-cov ; extra == 'test'", + "pytest-asyncio ; extra == 'test'", + "pytest ; extra == 'test'", + "coverage[toml] ; extra == 'test'", + "cchardet ; extra == 'speed'", + "Brotli ; extra == 'speed'", + "aiodns (>=1.1) ; extra == 'speed'", + "orjson (>=3.5.4) ; extra == 'speed'", + "typing-extensions (<5,>=4.3) ; extra == 'docs'", + "sphinxcontrib-websupport ; extra == 'docs'", + "sphinxcontrib-trio (==1.1.2) ; extra == 'docs'", + "sphinx (==4.4.0) ; extra == 'docs'", + "aiohttp (<4,>=3.7.4)" + ], + "requires_python": ">=3.8.0", + "summary": "A Python wrapper for the Discord API", + "version": "2.0.0", + "yanked": false, + "yanked_reason": null + }, + "last_serial": 14796560, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "4df2fceef99934d1fdac5a9b0aa94173", + "sha256": "18b06870bdc85d29e0d55f4a4b2abe9d7cdae2b197e23d49f82886ba27ba1aec" + }, + "downloads": -1, + "filename": "discord.py-2.0.0-py3-none-any.whl", + "has_sig": false, + "md5_digest": "4df2fceef99934d1fdac5a9b0aa94173", + "packagetype": "bdist_wheel", + "python_version": "py3", + "requires_python": ">=3.8.0", + "size": 1059049, + "upload_time": "2022-08-18T03:47:52", + "upload_time_iso_8601": "2022-08-18T03:47:52.438785Z", + "url": "https://files.pythonhosted.org/packages/0e/d9/7b057cab41c16144925ba4f96dab576a8ebb7b80a98d40e06bd94298eb3b/discord.py-2.0.0-py3-none-any.whl", + "yanked": false, + "yanked_reason": null + }, + { + "comment_text": "", + "digests": { + "md5": "3aaca51997210bd2ae4d4b5401c00ab7", + "sha256": "c36f26935938194c3465c2abf8ecfbbf5560c50b189f1b746d6f00d1e78c0d3b" + }, + "downloads": -1, + "filename": "discord.py-2.0.0.tar.gz", + "has_sig": false, + "md5_digest": "3aaca51997210bd2ae4d4b5401c00ab7", + "packagetype": "sdist", + "python_version": "source", + "requires_python": ">=3.8.0", + "size": 955054, + "upload_time": "2022-08-18T03:47:54", + "upload_time_iso_8601": "2022-08-18T03:47:54.173712Z", + "url": "https://files.pythonhosted.org/packages/4c/73/fb89115b07588bf7a46e9eca972b89dd62b5856abd52297fe130b41d9d63/discord.py-2.0.0.tar.gz", + "yanked": false, + "yanked_reason": null + } + ], + "vulnerabilities": [] +} diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/funcsigs.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/funcsigs.json index 29ec8546..9ee371c2 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/funcsigs.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/funcsigs.json @@ -1,117 +1,117 @@ { - "info": { - "author": "Testing Cabal", - "author_email": "testing-in-python@lists.idyll.org", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries :: Python Modules" - ], - "description": "", - "docs_url": null, - "download_url": "UNKNOWN", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://funcsigs.readthedocs.org", - "keywords": null, - "license": "ASL", - "maintainer": null, - "maintainer_email": null, - "name": "funcsigs", - "package_url": "https://pypi.org/project/funcsigs/", - "platform": "UNKNOWN", - "project_url": "https://pypi.org/project/funcsigs/", - "release_url": "https://pypi.org/project/funcsigs/1.0.2/", - "requires_dist": null, - "requires_python": null, - "summary": "Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+", - "version": "1.0.2" - }, - "last_serial": 2083703, - "releases": { - "1.0.2": [ - { - "comment_text": "", - "digests": { - "md5": "701d58358171f34b6d1197de2923a35a", - "sha256": "330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca" - }, - "downloads": -1, - "filename": "funcsigs-1.0.2-py2.py3-none-any.whl", - "has_sig": true, - "md5_digest": "701d58358171f34b6d1197de2923a35a", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "size": 17697, - "upload_time": "2016-04-25T22:22:05", - "url": "https://files.pythonhosted.org/packages/69/cb/f5be453359271714c01b9bd06126eaf2e368f1fddfff30818754b5ac2328/funcsigs-1.0.2-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "7e583285b1fb8a76305d6d68f4ccc14e", - "sha256": "a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - }, - "downloads": -1, - "filename": "funcsigs-1.0.2.tar.gz", - "has_sig": true, - "md5_digest": "7e583285b1fb8a76305d6d68f4ccc14e", - "packagetype": "sdist", - "python_version": "source", - "size": 27947, - "upload_time": "2016-04-25T22:22:33", - "url": "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz" - } - ] + "info": { + "author": "Testing Cabal", + "author_email": "testing-in-python@lists.idyll.org", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "description": "", + "docs_url": null, + "download_url": "UNKNOWN", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "701d58358171f34b6d1197de2923a35a", - "sha256": "330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca" - }, - "downloads": -1, - "filename": "funcsigs-1.0.2-py2.py3-none-any.whl", - "has_sig": true, - "md5_digest": "701d58358171f34b6d1197de2923a35a", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "size": 17697, - "upload_time": "2016-04-25T22:22:05", - "url": "https://files.pythonhosted.org/packages/69/cb/f5be453359271714c01b9bd06126eaf2e368f1fddfff30818754b5ac2328/funcsigs-1.0.2-py2.py3-none-any.whl" + "home_page": "http://funcsigs.readthedocs.org", + "keywords": null, + "license": "ASL", + "maintainer": null, + "maintainer_email": null, + "name": "funcsigs", + "package_url": "https://pypi.org/project/funcsigs/", + "platform": "UNKNOWN", + "project_url": "https://pypi.org/project/funcsigs/", + "release_url": "https://pypi.org/project/funcsigs/1.0.2/", + "requires_dist": null, + "requires_python": null, + "summary": "Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+", + "version": "1.0.2" + }, + "last_serial": 2083703, + "releases": { + "1.0.2": [ + { + "comment_text": "", + "digests": { + "md5": "701d58358171f34b6d1197de2923a35a", + "sha256": "330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca" }, - { - "comment_text": "", - "digests": { - "md5": "7e583285b1fb8a76305d6d68f4ccc14e", - "sha256": "a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - }, - "downloads": -1, - "filename": "funcsigs-1.0.2.tar.gz", - "has_sig": true, - "md5_digest": "7e583285b1fb8a76305d6d68f4ccc14e", - "packagetype": "sdist", - "python_version": "source", - "size": 27947, - "upload_time": "2016-04-25T22:22:33", - "url": "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz" - } + "downloads": -1, + "filename": "funcsigs-1.0.2-py2.py3-none-any.whl", + "has_sig": true, + "md5_digest": "701d58358171f34b6d1197de2923a35a", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "size": 17697, + "upload_time": "2016-04-25T22:22:05", + "url": "https://files.pythonhosted.org/packages/69/cb/f5be453359271714c01b9bd06126eaf2e368f1fddfff30818754b5ac2328/funcsigs-1.0.2-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "7e583285b1fb8a76305d6d68f4ccc14e", + "sha256": "a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" + }, + "downloads": -1, + "filename": "funcsigs-1.0.2.tar.gz", + "has_sig": true, + "md5_digest": "7e583285b1fb8a76305d6d68f4ccc14e", + "packagetype": "sdist", + "python_version": "source", + "size": 27947, + "upload_time": "2016-04-25T22:22:33", + "url": "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "701d58358171f34b6d1197de2923a35a", + "sha256": "330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca" + }, + "downloads": -1, + "filename": "funcsigs-1.0.2-py2.py3-none-any.whl", + "has_sig": true, + "md5_digest": "701d58358171f34b6d1197de2923a35a", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "size": 17697, + "upload_time": "2016-04-25T22:22:05", + "url": "https://files.pythonhosted.org/packages/69/cb/f5be453359271714c01b9bd06126eaf2e368f1fddfff30818754b5ac2328/funcsigs-1.0.2-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "7e583285b1fb8a76305d6d68f4ccc14e", + "sha256": "a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" + }, + "downloads": -1, + "filename": "funcsigs-1.0.2.tar.gz", + "has_sig": true, + "md5_digest": "7e583285b1fb8a76305d6d68f4ccc14e", + "packagetype": "sdist", + "python_version": "source", + "size": 27947, + "upload_time": "2016-04-25T22:22:33", + "url": "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/funcsigs/1.0.2.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/funcsigs/1.0.2.json index 9f7f8fe8..487ed79f 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/funcsigs/1.0.2.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/funcsigs/1.0.2.json @@ -1,81 +1,81 @@ { - "info": { - "author": "Testing Cabal", - "author_email": "testing-in-python@lists.idyll.org", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries :: Python Modules" - ], - "description": "", - "docs_url": null, - "download_url": "UNKNOWN", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://funcsigs.readthedocs.org", - "keywords": null, - "license": "ASL", - "maintainer": null, - "maintainer_email": null, - "name": "funcsigs", - "package_url": "https://pypi.org/project/funcsigs/", - "platform": "UNKNOWN", - "project_url": "https://pypi.org/project/funcsigs/", - "release_url": "https://pypi.org/project/funcsigs/1.0.2/", - "requires_dist": null, - "requires_python": null, - "summary": "Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+", - "version": "1.0.2" + "info": { + "author": "Testing Cabal", + "author_email": "testing-in-python@lists.idyll.org", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "description": "", + "docs_url": null, + "download_url": "UNKNOWN", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 2083703, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "701d58358171f34b6d1197de2923a35a", - "sha256": "330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca" - }, - "downloads": -1, - "filename": "funcsigs-1.0.2-py2.py3-none-any.whl", - "has_sig": true, - "md5_digest": "701d58358171f34b6d1197de2923a35a", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "size": 17697, - "upload_time": "2016-04-25T22:22:05", - "url": "https://files.pythonhosted.org/packages/69/cb/f5be453359271714c01b9bd06126eaf2e368f1fddfff30818754b5ac2328/funcsigs-1.0.2-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "7e583285b1fb8a76305d6d68f4ccc14e", - "sha256": "a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - }, - "downloads": -1, - "filename": "funcsigs-1.0.2.tar.gz", - "has_sig": true, - "md5_digest": "7e583285b1fb8a76305d6d68f4ccc14e", - "packagetype": "sdist", - "python_version": "source", - "size": 27947, - "upload_time": "2016-04-25T22:22:33", - "url": "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz" - } - ] + "home_page": "http://funcsigs.readthedocs.org", + "keywords": null, + "license": "ASL", + "maintainer": null, + "maintainer_email": null, + "name": "funcsigs", + "package_url": "https://pypi.org/project/funcsigs/", + "platform": "UNKNOWN", + "project_url": "https://pypi.org/project/funcsigs/", + "release_url": "https://pypi.org/project/funcsigs/1.0.2/", + "requires_dist": null, + "requires_python": null, + "summary": "Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2+", + "version": "1.0.2" + }, + "last_serial": 2083703, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "701d58358171f34b6d1197de2923a35a", + "sha256": "330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca" + }, + "downloads": -1, + "filename": "funcsigs-1.0.2-py2.py3-none-any.whl", + "has_sig": true, + "md5_digest": "701d58358171f34b6d1197de2923a35a", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "size": 17697, + "upload_time": "2016-04-25T22:22:05", + "url": "https://files.pythonhosted.org/packages/69/cb/f5be453359271714c01b9bd06126eaf2e368f1fddfff30818754b5ac2328/funcsigs-1.0.2-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "7e583285b1fb8a76305d6d68f4ccc14e", + "sha256": "a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" + }, + "downloads": -1, + "filename": "funcsigs-1.0.2.tar.gz", + "has_sig": true, + "md5_digest": "7e583285b1fb8a76305d6d68f4ccc14e", + "packagetype": "sdist", + "python_version": "source", + "size": 27947, + "upload_time": "2016-04-25T22:22:33", + "url": "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/isort.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/isort.json index 3f5f91e9..7a6fdc72 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/isort.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/isort.json @@ -1,160 +1,160 @@ { - "info": { - "author": "Timothy Crosley", - "author_email": "timothy.crosley@gmail.com", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 6 - Mature", - "Environment :: Console", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries", - "Topic :: Utilities" - ], - "description": "", - "description_content_type": null, - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "https://github.com/timothycrosley/isort", - "keywords": "Refactor", - "license": "MIT", - "maintainer": "", - "maintainer_email": "", - "name": "isort", - "package_url": "https://pypi.org/project/isort/", - "platform": "", - "project_url": "https://pypi.org/project/isort/", - "project_urls": { - "Homepage": "https://github.com/timothycrosley/isort" - }, - "release_url": "https://pypi.org/project/isort/4.3.4/", - "requires_dist": null, - "requires_python": "", - "summary": "A Python utility / library to sort Python imports.", - "version": "4.3.4" + "info": { + "author": "Timothy Crosley", + "author_email": "timothy.crosley@gmail.com", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 6 - Mature", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries", + "Topic :: Utilities" + ], + "description": "", + "description_content_type": null, + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 3575149, - "releases": { - "4.3.4": [ - { - "comment_text": "", - "digests": { - "md5": "f0ad7704b6dc947073398ba290c3517f", - "sha256": "ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497" - }, - "downloads": -1, - "filename": "isort-4.3.4-py2-none-any.whl", - "has_sig": false, - "md5_digest": "f0ad7704b6dc947073398ba290c3517f", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "requires_python": null, - "size": 45393, - "upload_time": "2018-02-12T15:06:38", - "url": "https://files.pythonhosted.org/packages/41/d8/a945da414f2adc1d9e2f7d6e7445b27f2be42766879062a2e63616ad4199/isort-4.3.4-py2-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "fbaac4cd669ac21ea9e21ab1ea3180db", - "sha256": "1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af" - }, - "downloads": -1, - "filename": "isort-4.3.4-py3-none-any.whl", - "has_sig": false, - "md5_digest": "fbaac4cd669ac21ea9e21ab1ea3180db", - "packagetype": "bdist_wheel", - "python_version": "3.6", - "requires_python": null, - "size": 45352, - "upload_time": "2018-02-12T15:06:20", - "url": "https://files.pythonhosted.org/packages/1f/2c/22eee714d7199ae0464beda6ad5fedec8fee6a2f7ffd1e8f1840928fe318/isort-4.3.4-py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "fb554e9c8f9aa76e333a03d470a5cf52", - "sha256": "b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8" - }, - "downloads": -1, - "filename": "isort-4.3.4.tar.gz", - "has_sig": false, - "md5_digest": "fb554e9c8f9aa76e333a03d470a5cf52", - "packagetype": "sdist", - "python_version": "source", - "requires_python": null, - "size": 56070, - "upload_time": "2018-02-12T15:06:16", - "url": "https://files.pythonhosted.org/packages/b1/de/a628d16fdba0d38cafb3d7e34d4830f2c9cb3881384ce5c08c44762e1846/isort-4.3.4.tar.gz" - } - ] + "home_page": "https://github.com/timothycrosley/isort", + "keywords": "Refactor", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "isort", + "package_url": "https://pypi.org/project/isort/", + "platform": "", + "project_url": "https://pypi.org/project/isort/", + "project_urls": { + "Homepage": "https://github.com/timothycrosley/isort" }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "f0ad7704b6dc947073398ba290c3517f", - "sha256": "ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497" - }, - "downloads": -1, - "filename": "isort-4.3.4-py2-none-any.whl", - "has_sig": false, - "md5_digest": "f0ad7704b6dc947073398ba290c3517f", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "requires_python": null, - "size": 45393, - "upload_time": "2018-02-12T15:06:38", - "url": "https://files.pythonhosted.org/packages/41/d8/a945da414f2adc1d9e2f7d6e7445b27f2be42766879062a2e63616ad4199/isort-4.3.4-py2-none-any.whl" + "release_url": "https://pypi.org/project/isort/4.3.4/", + "requires_dist": null, + "requires_python": "", + "summary": "A Python utility / library to sort Python imports.", + "version": "4.3.4" + }, + "last_serial": 3575149, + "releases": { + "4.3.4": [ + { + "comment_text": "", + "digests": { + "md5": "f0ad7704b6dc947073398ba290c3517f", + "sha256": "ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497" + }, + "downloads": -1, + "filename": "isort-4.3.4-py2-none-any.whl", + "has_sig": false, + "md5_digest": "f0ad7704b6dc947073398ba290c3517f", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "requires_python": null, + "size": 45393, + "upload_time": "2018-02-12T15:06:38", + "url": "https://files.pythonhosted.org/packages/41/d8/a945da414f2adc1d9e2f7d6e7445b27f2be42766879062a2e63616ad4199/isort-4.3.4-py2-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "fbaac4cd669ac21ea9e21ab1ea3180db", + "sha256": "1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af" }, - { - "comment_text": "", - "digests": { - "md5": "fbaac4cd669ac21ea9e21ab1ea3180db", - "sha256": "1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af" - }, - "downloads": -1, - "filename": "isort-4.3.4-py3-none-any.whl", - "has_sig": false, - "md5_digest": "fbaac4cd669ac21ea9e21ab1ea3180db", - "packagetype": "bdist_wheel", - "python_version": "3.6", - "requires_python": null, - "size": 45352, - "upload_time": "2018-02-12T15:06:20", - "url": "https://files.pythonhosted.org/packages/1f/2c/22eee714d7199ae0464beda6ad5fedec8fee6a2f7ffd1e8f1840928fe318/isort-4.3.4-py3-none-any.whl" + "downloads": -1, + "filename": "isort-4.3.4-py3-none-any.whl", + "has_sig": false, + "md5_digest": "fbaac4cd669ac21ea9e21ab1ea3180db", + "packagetype": "bdist_wheel", + "python_version": "3.6", + "requires_python": null, + "size": 45352, + "upload_time": "2018-02-12T15:06:20", + "url": "https://files.pythonhosted.org/packages/1f/2c/22eee714d7199ae0464beda6ad5fedec8fee6a2f7ffd1e8f1840928fe318/isort-4.3.4-py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "fb554e9c8f9aa76e333a03d470a5cf52", + "sha256": "b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8" }, - { - "comment_text": "", - "digests": { - "md5": "fb554e9c8f9aa76e333a03d470a5cf52", - "sha256": "b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8" - }, - "downloads": -1, - "filename": "isort-4.3.4.tar.gz", - "has_sig": false, - "md5_digest": "fb554e9c8f9aa76e333a03d470a5cf52", - "packagetype": "sdist", - "python_version": "source", - "requires_python": null, - "size": 56070, - "upload_time": "2018-02-12T15:06:16", - "url": "https://files.pythonhosted.org/packages/b1/de/a628d16fdba0d38cafb3d7e34d4830f2c9cb3881384ce5c08c44762e1846/isort-4.3.4.tar.gz" - } + "downloads": -1, + "filename": "isort-4.3.4.tar.gz", + "has_sig": false, + "md5_digest": "fb554e9c8f9aa76e333a03d470a5cf52", + "packagetype": "sdist", + "python_version": "source", + "requires_python": null, + "size": 56070, + "upload_time": "2018-02-12T15:06:16", + "url": "https://files.pythonhosted.org/packages/b1/de/a628d16fdba0d38cafb3d7e34d4830f2c9cb3881384ce5c08c44762e1846/isort-4.3.4.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "f0ad7704b6dc947073398ba290c3517f", + "sha256": "ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497" + }, + "downloads": -1, + "filename": "isort-4.3.4-py2-none-any.whl", + "has_sig": false, + "md5_digest": "f0ad7704b6dc947073398ba290c3517f", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "requires_python": null, + "size": 45393, + "upload_time": "2018-02-12T15:06:38", + "url": "https://files.pythonhosted.org/packages/41/d8/a945da414f2adc1d9e2f7d6e7445b27f2be42766879062a2e63616ad4199/isort-4.3.4-py2-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "fbaac4cd669ac21ea9e21ab1ea3180db", + "sha256": "1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af" + }, + "downloads": -1, + "filename": "isort-4.3.4-py3-none-any.whl", + "has_sig": false, + "md5_digest": "fbaac4cd669ac21ea9e21ab1ea3180db", + "packagetype": "bdist_wheel", + "python_version": "3.6", + "requires_python": null, + "size": 45352, + "upload_time": "2018-02-12T15:06:20", + "url": "https://files.pythonhosted.org/packages/1f/2c/22eee714d7199ae0464beda6ad5fedec8fee6a2f7ffd1e8f1840928fe318/isort-4.3.4-py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "fb554e9c8f9aa76e333a03d470a5cf52", + "sha256": "b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8" + }, + "downloads": -1, + "filename": "isort-4.3.4.tar.gz", + "has_sig": false, + "md5_digest": "fb554e9c8f9aa76e333a03d470a5cf52", + "packagetype": "sdist", + "python_version": "source", + "requires_python": null, + "size": 56070, + "upload_time": "2018-02-12T15:06:16", + "url": "https://files.pythonhosted.org/packages/b1/de/a628d16fdba0d38cafb3d7e34d4830f2c9cb3881384ce5c08c44762e1846/isort-4.3.4.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/jupyter.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/jupyter.json index 1e538842..eba1032a 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/jupyter.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/jupyter.json @@ -1,156 +1,156 @@ { - "info": { - "author": "Jupyter Development Team", - "author_email": "jupyter@googlegroups.org", - "bugtrack_url": null, - "classifiers": [ - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "Intended Audience :: System Administrators", - "License :: OSI Approved :: BSD License", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4" - ], - "description": "Install the Jupyter system, including the notebook, qtconsole, and the IPython kernel.", - "description_content_type": null, - "docs_url": null, - "download_url": "UNKNOWN", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://jupyter.org", - "keywords": null, - "license": "BSD", - "maintainer": null, - "maintainer_email": null, - "name": "jupyter", - "package_url": "https://pypi.org/project/jupyter/", - "platform": "UNKNOWN", - "project_url": "https://pypi.org/project/jupyter/", - "project_urls": { - "Download": "UNKNOWN", - "Homepage": "http://jupyter.org" - }, - "release_url": "https://pypi.org/project/jupyter/1.0.0/", - "requires_dist": null, - "requires_python": null, - "summary": "Jupyter metapackage. Install all the Jupyter components in one go.", - "version": "1.0.0" + "info": { + "author": "Jupyter Development Team", + "author_email": "jupyter@googlegroups.org", + "bugtrack_url": null, + "classifiers": [ + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4" + ], + "description": "Install the Jupyter system, including the notebook, qtconsole, and the IPython kernel.", + "description_content_type": null, + "docs_url": null, + "download_url": "UNKNOWN", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 1673841, - "releases": { - "0.0.0": [], - "1.0.0": [ - { - "comment_text": "", - "digests": { - "md5": "f81d039e084c2c0c4da9e4a86446b863", - "sha256": "5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78" - }, - "downloads": -1, - "filename": "jupyter-1.0.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "f81d039e084c2c0c4da9e4a86446b863", - "packagetype": "bdist_wheel", - "python_version": "3.4", - "requires_python": null, - "size": 2736, - "upload_time": "2015-08-12T00:42:58", - "url": "https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "c6030444c7eb6c05a4d7b1768c72aed7", - "sha256": "d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f" - }, - "downloads": -1, - "filename": "jupyter-1.0.0.tar.gz", - "has_sig": false, - "md5_digest": "c6030444c7eb6c05a4d7b1768c72aed7", - "packagetype": "sdist", - "python_version": "source", - "requires_python": null, - "size": 12916, - "upload_time": "2015-08-12T00:43:08", - "url": "https://files.pythonhosted.org/packages/c9/a9/371d0b8fe37dd231cf4b2cff0a9f0f25e98f3a73c3771742444be27f2944/jupyter-1.0.0.tar.gz" - }, - { - "comment_text": "", - "digests": { - "md5": "25142b08e2ad7142b6f920bc8cc8dfeb", - "sha256": "3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7" - }, - "downloads": -1, - "filename": "jupyter-1.0.0.zip", - "has_sig": false, - "md5_digest": "25142b08e2ad7142b6f920bc8cc8dfeb", - "packagetype": "sdist", - "python_version": "source", - "requires_python": null, - "size": 16690, - "upload_time": "2015-08-12T00:43:12", - "url": "https://files.pythonhosted.org/packages/fc/21/a372b73e3a498b41b92ed915ada7de2ad5e16631546329c03e484c3bf4e9/jupyter-1.0.0.zip" - } - ] + "home_page": "http://jupyter.org", + "keywords": null, + "license": "BSD", + "maintainer": null, + "maintainer_email": null, + "name": "jupyter", + "package_url": "https://pypi.org/project/jupyter/", + "platform": "UNKNOWN", + "project_url": "https://pypi.org/project/jupyter/", + "project_urls": { + "Download": "UNKNOWN", + "Homepage": "http://jupyter.org" }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "f81d039e084c2c0c4da9e4a86446b863", - "sha256": "5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78" - }, - "downloads": -1, - "filename": "jupyter-1.0.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "f81d039e084c2c0c4da9e4a86446b863", - "packagetype": "bdist_wheel", - "python_version": "3.4", - "requires_python": null, - "size": 2736, - "upload_time": "2015-08-12T00:42:58", - "url": "https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl" + "release_url": "https://pypi.org/project/jupyter/1.0.0/", + "requires_dist": null, + "requires_python": null, + "summary": "Jupyter metapackage. Install all the Jupyter components in one go.", + "version": "1.0.0" + }, + "last_serial": 1673841, + "releases": { + "0.0.0": [], + "1.0.0": [ + { + "comment_text": "", + "digests": { + "md5": "f81d039e084c2c0c4da9e4a86446b863", + "sha256": "5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78" }, - { - "comment_text": "", - "digests": { - "md5": "c6030444c7eb6c05a4d7b1768c72aed7", - "sha256": "d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f" - }, - "downloads": -1, - "filename": "jupyter-1.0.0.tar.gz", - "has_sig": false, - "md5_digest": "c6030444c7eb6c05a4d7b1768c72aed7", - "packagetype": "sdist", - "python_version": "source", - "requires_python": null, - "size": 12916, - "upload_time": "2015-08-12T00:43:08", - "url": "https://files.pythonhosted.org/packages/c9/a9/371d0b8fe37dd231cf4b2cff0a9f0f25e98f3a73c3771742444be27f2944/jupyter-1.0.0.tar.gz" + "downloads": -1, + "filename": "jupyter-1.0.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "f81d039e084c2c0c4da9e4a86446b863", + "packagetype": "bdist_wheel", + "python_version": "3.4", + "requires_python": null, + "size": 2736, + "upload_time": "2015-08-12T00:42:58", + "url": "https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "c6030444c7eb6c05a4d7b1768c72aed7", + "sha256": "d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f" }, - { - "comment_text": "", - "digests": { - "md5": "25142b08e2ad7142b6f920bc8cc8dfeb", - "sha256": "3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7" - }, - "downloads": -1, - "filename": "jupyter-1.0.0.zip", - "has_sig": false, - "md5_digest": "25142b08e2ad7142b6f920bc8cc8dfeb", - "packagetype": "sdist", - "python_version": "source", - "requires_python": null, - "size": 16690, - "upload_time": "2015-08-12T00:43:12", - "url": "https://files.pythonhosted.org/packages/fc/21/a372b73e3a498b41b92ed915ada7de2ad5e16631546329c03e484c3bf4e9/jupyter-1.0.0.zip" - } + "downloads": -1, + "filename": "jupyter-1.0.0.tar.gz", + "has_sig": false, + "md5_digest": "c6030444c7eb6c05a4d7b1768c72aed7", + "packagetype": "sdist", + "python_version": "source", + "requires_python": null, + "size": 12916, + "upload_time": "2015-08-12T00:43:08", + "url": "https://files.pythonhosted.org/packages/c9/a9/371d0b8fe37dd231cf4b2cff0a9f0f25e98f3a73c3771742444be27f2944/jupyter-1.0.0.tar.gz" + }, + { + "comment_text": "", + "digests": { + "md5": "25142b08e2ad7142b6f920bc8cc8dfeb", + "sha256": "3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7" + }, + "downloads": -1, + "filename": "jupyter-1.0.0.zip", + "has_sig": false, + "md5_digest": "25142b08e2ad7142b6f920bc8cc8dfeb", + "packagetype": "sdist", + "python_version": "source", + "requires_python": null, + "size": 16690, + "upload_time": "2015-08-12T00:43:12", + "url": "https://files.pythonhosted.org/packages/fc/21/a372b73e3a498b41b92ed915ada7de2ad5e16631546329c03e484c3bf4e9/jupyter-1.0.0.zip" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "f81d039e084c2c0c4da9e4a86446b863", + "sha256": "5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78" + }, + "downloads": -1, + "filename": "jupyter-1.0.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "f81d039e084c2c0c4da9e4a86446b863", + "packagetype": "bdist_wheel", + "python_version": "3.4", + "requires_python": null, + "size": 2736, + "upload_time": "2015-08-12T00:42:58", + "url": "https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "c6030444c7eb6c05a4d7b1768c72aed7", + "sha256": "d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f" + }, + "downloads": -1, + "filename": "jupyter-1.0.0.tar.gz", + "has_sig": false, + "md5_digest": "c6030444c7eb6c05a4d7b1768c72aed7", + "packagetype": "sdist", + "python_version": "source", + "requires_python": null, + "size": 12916, + "upload_time": "2015-08-12T00:43:08", + "url": "https://files.pythonhosted.org/packages/c9/a9/371d0b8fe37dd231cf4b2cff0a9f0f25e98f3a73c3771742444be27f2944/jupyter-1.0.0.tar.gz" + }, + { + "comment_text": "", + "digests": { + "md5": "25142b08e2ad7142b6f920bc8cc8dfeb", + "sha256": "3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7" + }, + "downloads": -1, + "filename": "jupyter-1.0.0.zip", + "has_sig": false, + "md5_digest": "25142b08e2ad7142b6f920bc8cc8dfeb", + "packagetype": "sdist", + "python_version": "source", + "requires_python": null, + "size": 16690, + "upload_time": "2015-08-12T00:43:12", + "url": "https://files.pythonhosted.org/packages/fc/21/a372b73e3a498b41b92ed915ada7de2ad5e16631546329c03e484c3bf4e9/jupyter-1.0.0.zip" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/more-itertools.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/more-itertools.json index 371ecee5..fb2dbcd1 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/more-itertools.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/more-itertools.json @@ -1,148 +1,148 @@ { - "info": { - "author": "Erik Rose", - "author_email": "erikrose@grinchcentral.com", - "bugtrack_url": "", - "classifiers": [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.2", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Topic :: Software Development :: Libraries" - ], - "description": "", - "docs_url": "https://pythonhosted.org/more-itertools/", - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "https://github.com/erikrose/more-itertools", - "keywords": "itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked", - "license": "MIT", - "maintainer": "", - "maintainer_email": "", - "name": "more-itertools", - "package_url": "https://pypi.org/project/more-itertools/", - "platform": "", - "project_url": "https://pypi.org/project/more-itertools/", - "release_url": "https://pypi.org/project/more-itertools/4.1.0/", - "requires_dist": [ - "six (<2.0.0,>=1.0.0)" - ], - "requires_python": "", - "summary": "More routines for operating on iterables, beyond itertools", - "version": "4.1.0" - }, - "last_serial": 3508946, - "releases": { - "4.1.0": [ - { - "comment_text": "", - "digests": { - "md5": "2a6a4b9abf941edf6d190fc995c0c935", - "sha256": "11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e" - }, - "downloads": -1, - "filename": "more_itertools-4.1.0-py2-none-any.whl", - "has_sig": false, - "md5_digest": "2a6a4b9abf941edf6d190fc995c0c935", - "packagetype": "bdist_wheel", - "python_version": "py2", - "size": 47987, - "upload_time": "2018-01-21T15:34:19", - "url": "https://files.pythonhosted.org/packages/4a/88/c28e2a2da8f3dc3a391d9c97ad949f2ea0c05198222e7e6af176e5bf9b26/more_itertools-4.1.0-py2-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "3229d872f8d193e36119ec76e1b0c097", - "sha256": "0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea" - }, - "downloads": -1, - "filename": "more_itertools-4.1.0-py3-none-any.whl", - "has_sig": false, - "md5_digest": "3229d872f8d193e36119ec76e1b0c097", - "packagetype": "bdist_wheel", - "python_version": "py3", - "size": 47988, - "upload_time": "2018-01-21T15:34:20", - "url": "https://files.pythonhosted.org/packages/7a/46/886917c6a4ce49dd3fff250c01c5abac5390d57992751384fe61befc4877/more_itertools-4.1.0-py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "246f46686d95879fbad37855c115dc52", - "sha256": "c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44" - }, - "downloads": -1, - "filename": "more-itertools-4.1.0.tar.gz", - "has_sig": false, - "md5_digest": "246f46686d95879fbad37855c115dc52", - "packagetype": "sdist", - "python_version": "source", - "size": 51310, - "upload_time": "2018-01-21T15:34:22", - "url": "https://files.pythonhosted.org/packages/db/0b/f5660bf6299ec5b9f17bd36096fa8148a1c843fa77ddfddf9bebac9301f7/more-itertools-4.1.0.tar.gz" - } - ] + "info": { + "author": "Erik Rose", + "author_email": "erikrose@grinchcentral.com", + "bugtrack_url": "", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.2", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Topic :: Software Development :: Libraries" + ], + "description": "", + "docs_url": "https://pythonhosted.org/more-itertools/", + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "2a6a4b9abf941edf6d190fc995c0c935", - "sha256": "11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e" - }, - "downloads": -1, - "filename": "more_itertools-4.1.0-py2-none-any.whl", - "has_sig": false, - "md5_digest": "2a6a4b9abf941edf6d190fc995c0c935", - "packagetype": "bdist_wheel", - "python_version": "py2", - "size": 47987, - "upload_time": "2018-01-21T15:34:19", - "url": "https://files.pythonhosted.org/packages/4a/88/c28e2a2da8f3dc3a391d9c97ad949f2ea0c05198222e7e6af176e5bf9b26/more_itertools-4.1.0-py2-none-any.whl" + "home_page": "https://github.com/erikrose/more-itertools", + "keywords": "itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "more-itertools", + "package_url": "https://pypi.org/project/more-itertools/", + "platform": "", + "project_url": "https://pypi.org/project/more-itertools/", + "release_url": "https://pypi.org/project/more-itertools/4.1.0/", + "requires_dist": [ + "six (<2.0.0,>=1.0.0)" + ], + "requires_python": "", + "summary": "More routines for operating on iterables, beyond itertools", + "version": "4.1.0" + }, + "last_serial": 3508946, + "releases": { + "4.1.0": [ + { + "comment_text": "", + "digests": { + "md5": "2a6a4b9abf941edf6d190fc995c0c935", + "sha256": "11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e" + }, + "downloads": -1, + "filename": "more_itertools-4.1.0-py2-none-any.whl", + "has_sig": false, + "md5_digest": "2a6a4b9abf941edf6d190fc995c0c935", + "packagetype": "bdist_wheel", + "python_version": "py2", + "size": 47987, + "upload_time": "2018-01-21T15:34:19", + "url": "https://files.pythonhosted.org/packages/4a/88/c28e2a2da8f3dc3a391d9c97ad949f2ea0c05198222e7e6af176e5bf9b26/more_itertools-4.1.0-py2-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "3229d872f8d193e36119ec76e1b0c097", + "sha256": "0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea" }, - { - "comment_text": "", - "digests": { - "md5": "3229d872f8d193e36119ec76e1b0c097", - "sha256": "0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea" - }, - "downloads": -1, - "filename": "more_itertools-4.1.0-py3-none-any.whl", - "has_sig": false, - "md5_digest": "3229d872f8d193e36119ec76e1b0c097", - "packagetype": "bdist_wheel", - "python_version": "py3", - "size": 47988, - "upload_time": "2018-01-21T15:34:20", - "url": "https://files.pythonhosted.org/packages/7a/46/886917c6a4ce49dd3fff250c01c5abac5390d57992751384fe61befc4877/more_itertools-4.1.0-py3-none-any.whl" + "downloads": -1, + "filename": "more_itertools-4.1.0-py3-none-any.whl", + "has_sig": false, + "md5_digest": "3229d872f8d193e36119ec76e1b0c097", + "packagetype": "bdist_wheel", + "python_version": "py3", + "size": 47988, + "upload_time": "2018-01-21T15:34:20", + "url": "https://files.pythonhosted.org/packages/7a/46/886917c6a4ce49dd3fff250c01c5abac5390d57992751384fe61befc4877/more_itertools-4.1.0-py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "246f46686d95879fbad37855c115dc52", + "sha256": "c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44" }, - { - "comment_text": "", - "digests": { - "md5": "246f46686d95879fbad37855c115dc52", - "sha256": "c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44" - }, - "downloads": -1, - "filename": "more-itertools-4.1.0.tar.gz", - "has_sig": false, - "md5_digest": "246f46686d95879fbad37855c115dc52", - "packagetype": "sdist", - "python_version": "source", - "size": 51310, - "upload_time": "2018-01-21T15:34:22", - "url": "https://files.pythonhosted.org/packages/db/0b/f5660bf6299ec5b9f17bd36096fa8148a1c843fa77ddfddf9bebac9301f7/more-itertools-4.1.0.tar.gz" - } + "downloads": -1, + "filename": "more-itertools-4.1.0.tar.gz", + "has_sig": false, + "md5_digest": "246f46686d95879fbad37855c115dc52", + "packagetype": "sdist", + "python_version": "source", + "size": 51310, + "upload_time": "2018-01-21T15:34:22", + "url": "https://files.pythonhosted.org/packages/db/0b/f5660bf6299ec5b9f17bd36096fa8148a1c843fa77ddfddf9bebac9301f7/more-itertools-4.1.0.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "2a6a4b9abf941edf6d190fc995c0c935", + "sha256": "11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e" + }, + "downloads": -1, + "filename": "more_itertools-4.1.0-py2-none-any.whl", + "has_sig": false, + "md5_digest": "2a6a4b9abf941edf6d190fc995c0c935", + "packagetype": "bdist_wheel", + "python_version": "py2", + "size": 47987, + "upload_time": "2018-01-21T15:34:19", + "url": "https://files.pythonhosted.org/packages/4a/88/c28e2a2da8f3dc3a391d9c97ad949f2ea0c05198222e7e6af176e5bf9b26/more_itertools-4.1.0-py2-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "3229d872f8d193e36119ec76e1b0c097", + "sha256": "0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea" + }, + "downloads": -1, + "filename": "more_itertools-4.1.0-py3-none-any.whl", + "has_sig": false, + "md5_digest": "3229d872f8d193e36119ec76e1b0c097", + "packagetype": "bdist_wheel", + "python_version": "py3", + "size": 47988, + "upload_time": "2018-01-21T15:34:20", + "url": "https://files.pythonhosted.org/packages/7a/46/886917c6a4ce49dd3fff250c01c5abac5390d57992751384fe61befc4877/more_itertools-4.1.0-py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "246f46686d95879fbad37855c115dc52", + "sha256": "c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44" + }, + "downloads": -1, + "filename": "more-itertools-4.1.0.tar.gz", + "has_sig": false, + "md5_digest": "246f46686d95879fbad37855c115dc52", + "packagetype": "sdist", + "python_version": "source", + "size": 51310, + "upload_time": "2018-01-21T15:34:22", + "url": "https://files.pythonhosted.org/packages/db/0b/f5660bf6299ec5b9f17bd36096fa8148a1c843fa77ddfddf9bebac9301f7/more-itertools-4.1.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/more-itertools/4.1.0.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/more-itertools/4.1.0.json index 32981034..7f86866b 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/more-itertools/4.1.0.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/more-itertools/4.1.0.json @@ -1,96 +1,96 @@ { - "info": { - "author": "Erik Rose", - "author_email": "erikrose@grinchcentral.com", - "bugtrack_url": "", - "classifiers": [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.2", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Topic :: Software Development :: Libraries" - ], - "description": "", - "docs_url": "https://pythonhosted.org/more-itertools/", - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "https://github.com/erikrose/more-itertools", - "keywords": "itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked", - "license": "MIT", - "maintainer": "", - "maintainer_email": "", - "name": "more-itertools", - "package_url": "https://pypi.org/project/more-itertools/", - "platform": "", - "project_url": "https://pypi.org/project/more-itertools/", - "release_url": "https://pypi.org/project/more-itertools/4.1.0/", - "requires_dist": [ - "six (<2.0.0,>=1.0.0)" - ], - "requires_python": "", - "summary": "More routines for operating on iterables, beyond itertools", - "version": "4.1.0" + "info": { + "author": "Erik Rose", + "author_email": "erikrose@grinchcentral.com", + "bugtrack_url": "", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.2", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Topic :: Software Development :: Libraries" + ], + "description": "", + "docs_url": "https://pythonhosted.org/more-itertools/", + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 3508946, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "2a6a4b9abf941edf6d190fc995c0c935", - "sha256": "11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e" - }, - "downloads": -1, - "filename": "more_itertools-4.1.0-py2-none-any.whl", - "has_sig": false, - "md5_digest": "2a6a4b9abf941edf6d190fc995c0c935", - "packagetype": "bdist_wheel", - "python_version": "py2", - "size": 47987, - "upload_time": "2018-01-21T15:34:19", - "url": "https://files.pythonhosted.org/packages/4a/88/c28e2a2da8f3dc3a391d9c97ad949f2ea0c05198222e7e6af176e5bf9b26/more_itertools-4.1.0-py2-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "3229d872f8d193e36119ec76e1b0c097", - "sha256": "0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea" - }, - "downloads": -1, - "filename": "more_itertools-4.1.0-py3-none-any.whl", - "has_sig": false, - "md5_digest": "3229d872f8d193e36119ec76e1b0c097", - "packagetype": "bdist_wheel", - "python_version": "py3", - "size": 47988, - "upload_time": "2018-01-21T15:34:20", - "url": "https://files.pythonhosted.org/packages/7a/46/886917c6a4ce49dd3fff250c01c5abac5390d57992751384fe61befc4877/more_itertools-4.1.0-py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "246f46686d95879fbad37855c115dc52", - "sha256": "c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44" - }, - "downloads": -1, - "filename": "more-itertools-4.1.0.tar.gz", - "has_sig": false, - "md5_digest": "246f46686d95879fbad37855c115dc52", - "packagetype": "sdist", - "python_version": "source", - "size": 51310, - "upload_time": "2018-01-21T15:34:22", - "url": "https://files.pythonhosted.org/packages/db/0b/f5660bf6299ec5b9f17bd36096fa8148a1c843fa77ddfddf9bebac9301f7/more-itertools-4.1.0.tar.gz" - } - ] + "home_page": "https://github.com/erikrose/more-itertools", + "keywords": "itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "more-itertools", + "package_url": "https://pypi.org/project/more-itertools/", + "platform": "", + "project_url": "https://pypi.org/project/more-itertools/", + "release_url": "https://pypi.org/project/more-itertools/4.1.0/", + "requires_dist": [ + "six (<2.0.0,>=1.0.0)" + ], + "requires_python": "", + "summary": "More routines for operating on iterables, beyond itertools", + "version": "4.1.0" + }, + "last_serial": 3508946, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "2a6a4b9abf941edf6d190fc995c0c935", + "sha256": "11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e" + }, + "downloads": -1, + "filename": "more_itertools-4.1.0-py2-none-any.whl", + "has_sig": false, + "md5_digest": "2a6a4b9abf941edf6d190fc995c0c935", + "packagetype": "bdist_wheel", + "python_version": "py2", + "size": 47987, + "upload_time": "2018-01-21T15:34:19", + "url": "https://files.pythonhosted.org/packages/4a/88/c28e2a2da8f3dc3a391d9c97ad949f2ea0c05198222e7e6af176e5bf9b26/more_itertools-4.1.0-py2-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "3229d872f8d193e36119ec76e1b0c097", + "sha256": "0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea" + }, + "downloads": -1, + "filename": "more_itertools-4.1.0-py3-none-any.whl", + "has_sig": false, + "md5_digest": "3229d872f8d193e36119ec76e1b0c097", + "packagetype": "bdist_wheel", + "python_version": "py3", + "size": 47988, + "upload_time": "2018-01-21T15:34:20", + "url": "https://files.pythonhosted.org/packages/7a/46/886917c6a4ce49dd3fff250c01c5abac5390d57992751384fe61befc4877/more_itertools-4.1.0-py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "246f46686d95879fbad37855c115dc52", + "sha256": "c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44" + }, + "downloads": -1, + "filename": "more-itertools-4.1.0.tar.gz", + "has_sig": false, + "md5_digest": "246f46686d95879fbad37855c115dc52", + "packagetype": "sdist", + "python_version": "source", + "size": 51310, + "upload_time": "2018-01-21T15:34:22", + "url": "https://files.pythonhosted.org/packages/db/0b/f5660bf6299ec5b9f17bd36096fa8148a1c843fa77ddfddf9bebac9301f7/more-itertools-4.1.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pluggy.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pluggy.json index 15a40eeb..f52ee57b 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pluggy.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pluggy.json @@ -1,87 +1,87 @@ { - "info": { - "author": "Holger Krekel", - "author_email": "holger@merlinux.eu", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries", - "Topic :: Software Development :: Testing", - "Topic :: Utilities" - ], - "description": "", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "https://github.com/pytest-dev/pluggy", - "keywords": "", - "license": "MIT license", - "maintainer": "", - "maintainer_email": "", - "name": "pluggy", - "package_url": "https://pypi.org/project/pluggy/", - "platform": "unix", - "project_url": "https://pypi.org/project/pluggy/", - "release_url": "https://pypi.org/project/pluggy/0.6.0/", - "requires_dist": null, - "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - "summary": "plugin and hook calling mechanisms for python", - "version": "0.6.0" - }, - "last_serial": 3361295, - "releases": { - "0.6.0": [ - { - "comment_text": "", - "digests": { - "md5": "ffdde7c3a5ba9a440404570366ffb6d5", - "sha256": "7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff" - }, - "downloads": -1, - "filename": "pluggy-0.6.0.tar.gz", - "has_sig": false, - "md5_digest": "ffdde7c3a5ba9a440404570366ffb6d5", - "packagetype": "sdist", - "python_version": "source", - "size": 19678, - "upload_time": "2017-11-24T16:33:11", - "url": "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz" - } - ] + "info": { + "author": "Holger Krekel", + "author_email": "holger@merlinux.eu", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Testing", + "Topic :: Utilities" + ], + "description": "", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "ffdde7c3a5ba9a440404570366ffb6d5", - "sha256": "7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff" - }, - "downloads": -1, - "filename": "pluggy-0.6.0.tar.gz", - "has_sig": false, - "md5_digest": "ffdde7c3a5ba9a440404570366ffb6d5", - "packagetype": "sdist", - "python_version": "source", - "size": 19678, - "upload_time": "2017-11-24T16:33:11", - "url": "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz" - } + "home_page": "https://github.com/pytest-dev/pluggy", + "keywords": "", + "license": "MIT license", + "maintainer": "", + "maintainer_email": "", + "name": "pluggy", + "package_url": "https://pypi.org/project/pluggy/", + "platform": "unix", + "project_url": "https://pypi.org/project/pluggy/", + "release_url": "https://pypi.org/project/pluggy/0.6.0/", + "requires_dist": null, + "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", + "summary": "plugin and hook calling mechanisms for python", + "version": "0.6.0" + }, + "last_serial": 3361295, + "releases": { + "0.6.0": [ + { + "comment_text": "", + "digests": { + "md5": "ffdde7c3a5ba9a440404570366ffb6d5", + "sha256": "7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff" + }, + "downloads": -1, + "filename": "pluggy-0.6.0.tar.gz", + "has_sig": false, + "md5_digest": "ffdde7c3a5ba9a440404570366ffb6d5", + "packagetype": "sdist", + "python_version": "source", + "size": 19678, + "upload_time": "2017-11-24T16:33:11", + "url": "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "ffdde7c3a5ba9a440404570366ffb6d5", + "sha256": "7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff" + }, + "downloads": -1, + "filename": "pluggy-0.6.0.tar.gz", + "has_sig": false, + "md5_digest": "ffdde7c3a5ba9a440404570366ffb6d5", + "packagetype": "sdist", + "python_version": "source", + "size": 19678, + "upload_time": "2017-11-24T16:33:11", + "url": "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pluggy/0.6.0.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pluggy/0.6.0.json index 7dd8deea..1ce124ce 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pluggy/0.6.0.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pluggy/0.6.0.json @@ -1,67 +1,67 @@ { - "info": { - "author": "Holger Krekel", - "author_email": "holger@merlinux.eu", - "bugtrack_url": null, - "classifiers": [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries", - "Topic :: Software Development :: Testing", - "Topic :: Utilities" - ], - "description": "", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "https://github.com/pytest-dev/pluggy", - "keywords": "", - "license": "MIT license", - "maintainer": "", - "maintainer_email": "", - "name": "pluggy", - "package_url": "https://pypi.org/project/pluggy/", - "platform": "unix", - "project_url": "https://pypi.org/project/pluggy/", - "release_url": "https://pypi.org/project/pluggy/0.6.0/", - "requires_dist": null, - "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - "summary": "plugin and hook calling mechanisms for python", - "version": "0.6.0" + "info": { + "author": "Holger Krekel", + "author_email": "holger@merlinux.eu", + "bugtrack_url": null, + "classifiers": [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Testing", + "Topic :: Utilities" + ], + "description": "", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 3361295, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "ffdde7c3a5ba9a440404570366ffb6d5", - "sha256": "7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff" - }, - "downloads": -1, - "filename": "pluggy-0.6.0.tar.gz", - "has_sig": false, - "md5_digest": "ffdde7c3a5ba9a440404570366ffb6d5", - "packagetype": "sdist", - "python_version": "source", - "size": 19678, - "upload_time": "2017-11-24T16:33:11", - "url": "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz" - } - ] + "home_page": "https://github.com/pytest-dev/pluggy", + "keywords": "", + "license": "MIT license", + "maintainer": "", + "maintainer_email": "", + "name": "pluggy", + "package_url": "https://pypi.org/project/pluggy/", + "platform": "unix", + "project_url": "https://pypi.org/project/pluggy/", + "release_url": "https://pypi.org/project/pluggy/0.6.0/", + "requires_dist": null, + "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", + "summary": "plugin and hook calling mechanisms for python", + "version": "0.6.0" + }, + "last_serial": 3361295, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "ffdde7c3a5ba9a440404570366ffb6d5", + "sha256": "7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff" + }, + "downloads": -1, + "filename": "pluggy-0.6.0.tar.gz", + "has_sig": false, + "md5_digest": "ffdde7c3a5ba9a440404570366ffb6d5", + "packagetype": "sdist", + "python_version": "source", + "size": 19678, + "upload_time": "2017-11-24T16:33:11", + "url": "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/py.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/py.json index a0d65a6e..41b98bbd 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/py.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/py.json @@ -1,120 +1,120 @@ { - "info": { - "author": "holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others", - "author_email": "pytest-dev@python.org", - "bugtrack_url": "https://github.com/pytest-dev/py/issues", - "classifiers": [ - "Development Status :: 6 - Mature", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries", - "Topic :: Software Development :: Testing", - "Topic :: Utilities" - ], - "description": "", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://py.readthedocs.io/", - "keywords": "", - "license": "MIT license", - "maintainer": "", - "maintainer_email": "", - "name": "py", - "package_url": "https://pypi.org/project/py/", - "platform": "unix", - "project_url": "https://pypi.org/project/py/", - "release_url": "https://pypi.org/project/py/1.5.3/", - "requires_dist": null, - "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - "summary": "library with cross-python path, ini-parsing, io, code, log facilities", - "version": "1.5.3" - }, - "last_serial": 3694828, - "releases": { - "1.5.3": [ - { - "comment_text": "", - "digests": { - "md5": "3184fb17d224b073117a25336040d7c7", - "sha256": "983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a" - }, - "downloads": -1, - "filename": "py-1.5.3-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "3184fb17d224b073117a25336040d7c7", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 84903, - "upload_time": "2018-03-22T10:06:50", - "url": "https://files.pythonhosted.org/packages/67/a5/f77982214dd4c8fd104b066f249adea2c49e25e8703d284382eb5e9ab35a/py-1.5.3-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "667d37a148ad9fb81266492903f2d880", - "sha256": "29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881" - }, - "downloads": -1, - "filename": "py-1.5.3.tar.gz", - "has_sig": false, - "md5_digest": "667d37a148ad9fb81266492903f2d880", - "packagetype": "sdist", - "python_version": "source", - "size": 202335, - "upload_time": "2018-03-22T10:06:52", - "url": "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz" - } - ] + "info": { + "author": "holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others", + "author_email": "pytest-dev@python.org", + "bugtrack_url": "https://github.com/pytest-dev/py/issues", + "classifiers": [ + "Development Status :: 6 - Mature", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Testing", + "Topic :: Utilities" + ], + "description": "", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "3184fb17d224b073117a25336040d7c7", - "sha256": "983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a" - }, - "downloads": -1, - "filename": "py-1.5.3-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "3184fb17d224b073117a25336040d7c7", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 84903, - "upload_time": "2018-03-22T10:06:50", - "url": "https://files.pythonhosted.org/packages/67/a5/f77982214dd4c8fd104b066f249adea2c49e25e8703d284382eb5e9ab35a/py-1.5.3-py2.py3-none-any.whl" + "home_page": "http://py.readthedocs.io/", + "keywords": "", + "license": "MIT license", + "maintainer": "", + "maintainer_email": "", + "name": "py", + "package_url": "https://pypi.org/project/py/", + "platform": "unix", + "project_url": "https://pypi.org/project/py/", + "release_url": "https://pypi.org/project/py/1.5.3/", + "requires_dist": null, + "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", + "summary": "library with cross-python path, ini-parsing, io, code, log facilities", + "version": "1.5.3" + }, + "last_serial": 3694828, + "releases": { + "1.5.3": [ + { + "comment_text": "", + "digests": { + "md5": "3184fb17d224b073117a25336040d7c7", + "sha256": "983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a" }, - { - "comment_text": "", - "digests": { - "md5": "667d37a148ad9fb81266492903f2d880", - "sha256": "29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881" - }, - "downloads": -1, - "filename": "py-1.5.3.tar.gz", - "has_sig": false, - "md5_digest": "667d37a148ad9fb81266492903f2d880", - "packagetype": "sdist", - "python_version": "source", - "size": 202335, - "upload_time": "2018-03-22T10:06:52", - "url": "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz" - } + "downloads": -1, + "filename": "py-1.5.3-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "3184fb17d224b073117a25336040d7c7", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 84903, + "upload_time": "2018-03-22T10:06:50", + "url": "https://files.pythonhosted.org/packages/67/a5/f77982214dd4c8fd104b066f249adea2c49e25e8703d284382eb5e9ab35a/py-1.5.3-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "667d37a148ad9fb81266492903f2d880", + "sha256": "29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881" + }, + "downloads": -1, + "filename": "py-1.5.3.tar.gz", + "has_sig": false, + "md5_digest": "667d37a148ad9fb81266492903f2d880", + "packagetype": "sdist", + "python_version": "source", + "size": 202335, + "upload_time": "2018-03-22T10:06:52", + "url": "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "3184fb17d224b073117a25336040d7c7", + "sha256": "983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a" + }, + "downloads": -1, + "filename": "py-1.5.3-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "3184fb17d224b073117a25336040d7c7", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 84903, + "upload_time": "2018-03-22T10:06:50", + "url": "https://files.pythonhosted.org/packages/67/a5/f77982214dd4c8fd104b066f249adea2c49e25e8703d284382eb5e9ab35a/py-1.5.3-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "667d37a148ad9fb81266492903f2d880", + "sha256": "29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881" + }, + "downloads": -1, + "filename": "py-1.5.3.tar.gz", + "has_sig": false, + "md5_digest": "667d37a148ad9fb81266492903f2d880", + "packagetype": "sdist", + "python_version": "source", + "size": 202335, + "upload_time": "2018-03-22T10:06:52", + "url": "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/py/1.5.3.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/py/1.5.3.json index 8e30b6ff..4c998966 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/py/1.5.3.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/py/1.5.3.json @@ -1,84 +1,84 @@ { - "info": { - "author": "holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others", - "author_email": "pytest-dev@python.org", - "bugtrack_url": "https://github.com/pytest-dev/py/issues", - "classifiers": [ - "Development Status :: 6 - Mature", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - "Topic :: Software Development :: Libraries", - "Topic :: Software Development :: Testing", - "Topic :: Utilities" - ], - "description": "", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://py.readthedocs.io/", - "keywords": "", - "license": "MIT license", - "maintainer": "", - "maintainer_email": "", - "name": "py", - "package_url": "https://pypi.org/project/py/", - "platform": "unix", - "project_url": "https://pypi.org/project/py/", - "release_url": "https://pypi.org/project/py/1.5.3/", - "requires_dist": null, - "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - "summary": "library with cross-python path, ini-parsing, io, code, log facilities", - "version": "1.5.3" + "info": { + "author": "holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others", + "author_email": "pytest-dev@python.org", + "bugtrack_url": "https://github.com/pytest-dev/py/issues", + "classifiers": [ + "Development Status :: 6 - Mature", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Testing", + "Topic :: Utilities" + ], + "description": "", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 3694828, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "3184fb17d224b073117a25336040d7c7", - "sha256": "983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a" - }, - "downloads": -1, - "filename": "py-1.5.3-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "3184fb17d224b073117a25336040d7c7", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 84903, - "upload_time": "2018-03-22T10:06:50", - "url": "https://files.pythonhosted.org/packages/67/a5/f77982214dd4c8fd104b066f249adea2c49e25e8703d284382eb5e9ab35a/py-1.5.3-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "667d37a148ad9fb81266492903f2d880", - "sha256": "29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881" - }, - "downloads": -1, - "filename": "py-1.5.3.tar.gz", - "has_sig": false, - "md5_digest": "667d37a148ad9fb81266492903f2d880", - "packagetype": "sdist", - "python_version": "source", - "size": 202335, - "upload_time": "2018-03-22T10:06:52", - "url": "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz" - } - ] + "home_page": "http://py.readthedocs.io/", + "keywords": "", + "license": "MIT license", + "maintainer": "", + "maintainer_email": "", + "name": "py", + "package_url": "https://pypi.org/project/py/", + "platform": "unix", + "project_url": "https://pypi.org/project/py/", + "release_url": "https://pypi.org/project/py/1.5.3/", + "requires_dist": null, + "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", + "summary": "library with cross-python path, ini-parsing, io, code, log facilities", + "version": "1.5.3" + }, + "last_serial": 3694828, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "3184fb17d224b073117a25336040d7c7", + "sha256": "983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a" + }, + "downloads": -1, + "filename": "py-1.5.3-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "3184fb17d224b073117a25336040d7c7", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 84903, + "upload_time": "2018-03-22T10:06:50", + "url": "https://files.pythonhosted.org/packages/67/a5/f77982214dd4c8fd104b066f249adea2c49e25e8703d284382eb5e9ab35a/py-1.5.3-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "667d37a148ad9fb81266492903f2d880", + "sha256": "29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881" + }, + "downloads": -1, + "filename": "py-1.5.3.tar.gz", + "has_sig": false, + "md5_digest": "667d37a148ad9fb81266492903f2d880", + "packagetype": "sdist", + "python_version": "source", + "size": 202335, + "upload_time": "2018-03-22T10:06:52", + "url": "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pygame-music-grid.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pygame-music-grid.json index 1ba97799..47b67614 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pygame-music-grid.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pygame-music-grid.json @@ -1,81 +1,79 @@ { - "info":{ - "author":"eric dexter", - "author_email":"irc.dexter@gmail.com", - "bugtrack_url":null, - "classifiers":[ - "Topic :: Multimedia :: Sound/Audio :: Editors" - ], - "description":"a clickable grid for drum machines, piano rolls that is customizble from an init \r\nfile)or will be) that will include the script to be ran when a definable button is \r\nhit written in pygame and tested with python 2.5", - "description_content_type":null, - "docs_url":null, - "download_url":"http://www.ziddu.com/download/5498230/pygamepianorollbeta.90.zip.html", - "downloads":{ - "last_day":-1, - "last_month":-1, - "last_week":-1 - }, - "home_page":"http://dexrowem.blogspot.com/search?q=pygame+music+grid", - "keywords":"python, pygame, drum machine, piano roll", - "license":"", - "maintainer":"", - "maintainer_email":"", - "name":"pygame-music-grid", - "package_url":"https://pypi.org/project/pygame-music-grid/", - "platform":"", - "project_url":"https://pypi.org/project/pygame-music-grid/", - "project_urls":{ - "Download":"http://www.ziddu.com/download/5498230/pygamepianorollbeta.90.zip.html", - "Homepage":"http://dexrowem.blogspot.com/search?q=pygame+music+grid" - }, - "release_url":"https://pypi.org/project/pygame-music-grid/.9/", - "requires_dist":null, - "requires_python":null, - "summary":"a grid for music programs", - "version":".9" + "info": { + "author": "eric dexter", + "author_email": "irc.dexter@gmail.com", + "bugtrack_url": null, + "classifiers": [ + "Topic :: Multimedia :: Sound/Audio :: Editors" + ], + "description": "a clickable grid for drum machines, piano rolls that is customizble from an init \r\nfile)or will be) that will include the script to be ran when a definable button is \r\nhit written in pygame and tested with python 2.5", + "description_content_type": null, + "docs_url": null, + "download_url": "http://www.ziddu.com/download/5498230/pygamepianorollbeta.90.zip.html", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial":710340, - "releases":{ - ".9":[ - { - "comment_text": "", - "digests": { - "md5": "76e2c2e8adea20377d9a7e6b6713c952", - "sha256": "8d6d96001aa7f0a6a4a95e8143225b5d06e41b1131044913fecb8f85a125714b" - }, - "downloads": -1, - "filename": "PyYAML-4.2b4-cp27-cp27m-win32.whl", - "has_sig": false, - "md5_digest": "76e2c2e8adea20377d9a7e6b6713c952", - "packagetype": "bdist_wheel", - "python_version": "cp27", - "requires_python": null, - "size": 104988, - "upload_time": "2018-07-02T03:17:55", - "url": "https://files.pythonhosted.org/packages/12/9b/efdbaa3c9694b6315a4410e0d494ad50c5ade22ce33f4b482bfaea3930fd/PyYAML-4.2b4-cp27-cp27m-win32.whl" - } - ], - "1.0":[ - { - "comment_text": "", - "digests": { - "md5": "a83441aa7004e474bed6f6daeb61f27a", - "sha256": "d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f" - }, - "downloads": -1, - "filename": "PyYAML-3.13-cp27-cp27m-win32.whl", - "has_sig": false, - "md5_digest": "a83441aa7004e474bed6f6daeb61f27a", - "packagetype": "bdist_wheel", - "python_version": "cp27", - "requires_python": null, - "size": 191712, - "upload_time": "2018-07-05T22:53:15", - "url": "https://files.pythonhosted.org/packages/b8/2e/9c2285870c9de070a1fa5ede702ab5fb329901b3cc4028c24f44eda27c5f/PyYAML-3.13-cp27-cp27m-win32.whl" - } - ] + "home_page": "http://dexrowem.blogspot.com/search?q=pygame+music+grid", + "keywords": "python, pygame, drum machine, piano roll", + "license": "", + "maintainer": "", + "maintainer_email": "", + "name": "pygame-music-grid", + "package_url": "https://pypi.org/project/pygame-music-grid/", + "platform": "", + "project_url": "https://pypi.org/project/pygame-music-grid/", + "project_urls": { + "Download": "http://www.ziddu.com/download/5498230/pygamepianorollbeta.90.zip.html", + "Homepage": "http://dexrowem.blogspot.com/search?q=pygame+music+grid" }, - "urls":[ - + "release_url": "https://pypi.org/project/pygame-music-grid/.9/", + "requires_dist": null, + "requires_python": null, + "summary": "a grid for music programs", + "version": ".9" + }, + "last_serial": 710340, + "releases": { + ".9": [ + { + "comment_text": "", + "digests": { + "md5": "76e2c2e8adea20377d9a7e6b6713c952", + "sha256": "8d6d96001aa7f0a6a4a95e8143225b5d06e41b1131044913fecb8f85a125714b" + }, + "downloads": -1, + "filename": "PyYAML-4.2b4-cp27-cp27m-win32.whl", + "has_sig": false, + "md5_digest": "76e2c2e8adea20377d9a7e6b6713c952", + "packagetype": "bdist_wheel", + "python_version": "cp27", + "requires_python": null, + "size": 104988, + "upload_time": "2018-07-02T03:17:55", + "url": "https://files.pythonhosted.org/packages/12/9b/efdbaa3c9694b6315a4410e0d494ad50c5ade22ce33f4b482bfaea3930fd/PyYAML-4.2b4-cp27-cp27m-win32.whl" + } + ], + "1.0": [ + { + "comment_text": "", + "digests": { + "md5": "a83441aa7004e474bed6f6daeb61f27a", + "sha256": "d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f" + }, + "downloads": -1, + "filename": "PyYAML-3.13-cp27-cp27m-win32.whl", + "has_sig": false, + "md5_digest": "a83441aa7004e474bed6f6daeb61f27a", + "packagetype": "bdist_wheel", + "python_version": "cp27", + "requires_python": null, + "size": 191712, + "upload_time": "2018-07-05T22:53:15", + "url": "https://files.pythonhosted.org/packages/b8/2e/9c2285870c9de070a1fa5ede702ab5fb329901b3cc4028c24f44eda27c5f/PyYAML-3.13-cp27-cp27m-win32.whl" + } ] + }, + "urls": [] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pytest.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pytest.json index 069f69cb..e1891d3c 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pytest.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pytest.json @@ -1,127 +1,127 @@ { - "info": { - "author": "Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others", - "author_email": "", - "bugtrack_url": "https://github.com/pytest-dev/pytest/issues", - "classifiers": [ - "Development Status :: 6 - Mature", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Topic :: Software Development :: Libraries", - "Topic :: Software Development :: Testing", - "Topic :: Utilities" - ], - "description": ".. image:: http://docs.pytest.org/en/latest/_static/pytest1.png\n :target: http://docs.pytest.org\n :align: center\n :alt: pytest\n\n------\n\n.. image:: https://img.shields.io/pypi/v/pytest.svg\n :target: https://pypi.python.org/pypi/pytest\n\n.. image:: https://anaconda.org/conda-forge/pytest/badges/version.svg\n :target: https://anaconda.org/conda-forge/pytest\n\n.. image:: https://img.shields.io/pypi/pyversions/pytest.svg\n :target: https://pypi.python.org/pypi/pytest\n\n.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg\n :target: https://coveralls.io/r/pytest-dev/pytest\n\n.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master\n :target: https://travis-ci.org/pytest-dev/pytest\n\n.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true\n :target: https://ci.appveyor.com/project/pytestbot/pytest\n\n.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg\n :target: https://www.codetriage.com/pytest-dev/pytest\n\nThe ``pytest`` framework makes it easy to write small tests, yet\nscales to support complex functional testing for applications and libraries.\n\nAn example of a simple test:\n\n.. code-block:: python\n\n # content of test_sample.py\n def inc(x):\n return x + 1\n\n def test_answer():\n assert inc(3) == 5\n\n\nTo execute it::\n\n $ pytest\n ============================= test session starts =============================\n collected 1 items\n\n test_sample.py F\n\n ================================== FAILURES ===================================\n _________________________________ test_answer _________________________________\n\n def test_answer():\n > assert inc(3) == 5\n E assert 4 == 5\n E + where 4 = inc(3)\n\n test_sample.py:5: AssertionError\n ========================== 1 failed in 0.04 seconds ===========================\n\n\nDue to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started `_ for more examples.\n\n\nFeatures\n--------\n\n- Detailed info on failing `assert statements `_ (no need to remember ``self.assert*`` names);\n\n- `Auto-discovery\n `_\n of test modules and functions;\n\n- `Modular fixtures `_ for\n managing small or parametrized long-lived test resources;\n\n- Can run `unittest `_ (or trial),\n `nose `_ test suites out of the box;\n\n- Python 2.7, Python 3.4+, PyPy 2.3, Jython 2.5 (untested);\n\n- Rich plugin architecture, with over 315+ `external plugins `_ and thriving community;\n\n\nDocumentation\n-------------\n\nFor full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.\n\n\nBugs/Requests\n-------------\n\nPlease use the `GitHub issue tracker `_ to submit bugs or request features.\n\n\nChangelog\n---------\n\nConsult the `Changelog `__ page for fixes and enhancements of each version.\n\n\nLicense\n-------\n\nCopyright Holger Krekel and others, 2004-2017.\n\nDistributed under the terms of the `MIT`_ license, pytest is free and open source software.\n\n.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE\n\n\n", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://pytest.org", - "keywords": "test unittest", - "license": "MIT license", - "maintainer": "", - "maintainer_email": "", - "name": "pytest", - "package_url": "https://pypi.org/project/pytest/", - "platform": "unix", - "project_url": "https://pypi.org/project/pytest/", - "release_url": "https://pypi.org/project/pytest/3.5.0/", - "requires_dist": [ - "py (>=1.5.0)", - "six (>=1.10.0)", - "setuptools", - "attrs (>=17.4.0)", - "more-itertools (>=4.0.0)", - "pluggy (<0.7,>=0.5)", - "funcsigs; python_version < \"3.0\"", - "colorama; sys_platform == \"win32\"" - ], - "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - "summary": "pytest: simple powerful testing with Python", - "version": "3.5.0" - }, - "last_serial": 3697219, - "releases": { - "3.5.0": [ - { - "comment_text": "", - "digests": { - "md5": "c0b6697b7130c495aba71cdfcf939cc9", - "sha256": "6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c" - }, - "downloads": -1, - "filename": "pytest-3.5.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "c0b6697b7130c495aba71cdfcf939cc9", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 194247, - "upload_time": "2018-03-22T23:47:54", - "url": "https://files.pythonhosted.org/packages/ed/96/271c93f75212c06e2a7ec3e2fa8a9c90acee0a4838dc05bf379ea09aae31/pytest-3.5.0-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "b8e13a4091f07ff1fda081cf40ff99f1", - "sha256": "fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1" - }, - "downloads": -1, - "filename": "pytest-3.5.0.tar.gz", - "has_sig": false, - "md5_digest": "b8e13a4091f07ff1fda081cf40ff99f1", - "packagetype": "sdist", - "python_version": "source", - "size": 830816, - "upload_time": "2018-03-22T23:47:56", - "url": "https://files.pythonhosted.org/packages/2d/56/6019153cdd743300c5688ab3b07702355283e53c83fbf922242c053ffb7b/pytest-3.5.0.tar.gz" - } - ] + "info": { + "author": "Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others", + "author_email": "", + "bugtrack_url": "https://github.com/pytest-dev/pytest/issues", + "classifiers": [ + "Development Status :: 6 - Mature", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Testing", + "Topic :: Utilities" + ], + "description": ".. image:: http://docs.pytest.org/en/latest/_static/pytest1.png\n :target: http://docs.pytest.org\n :align: center\n :alt: pytest\n\n------\n\n.. image:: https://img.shields.io/pypi/v/pytest.svg\n :target: https://pypi.python.org/pypi/pytest\n\n.. image:: https://anaconda.org/conda-forge/pytest/badges/version.svg\n :target: https://anaconda.org/conda-forge/pytest\n\n.. image:: https://img.shields.io/pypi/pyversions/pytest.svg\n :target: https://pypi.python.org/pypi/pytest\n\n.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg\n :target: https://coveralls.io/r/pytest-dev/pytest\n\n.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master\n :target: https://travis-ci.org/pytest-dev/pytest\n\n.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true\n :target: https://ci.appveyor.com/project/pytestbot/pytest\n\n.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg\n :target: https://www.codetriage.com/pytest-dev/pytest\n\nThe ``pytest`` framework makes it easy to write small tests, yet\nscales to support complex functional testing for applications and libraries.\n\nAn example of a simple test:\n\n.. code-block:: python\n\n # content of test_sample.py\n def inc(x):\n return x + 1\n\n def test_answer():\n assert inc(3) == 5\n\n\nTo execute it::\n\n $ pytest\n ============================= test session starts =============================\n collected 1 items\n\n test_sample.py F\n\n ================================== FAILURES ===================================\n _________________________________ test_answer _________________________________\n\n def test_answer():\n > assert inc(3) == 5\n E assert 4 == 5\n E + where 4 = inc(3)\n\n test_sample.py:5: AssertionError\n ========================== 1 failed in 0.04 seconds ===========================\n\n\nDue to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started `_ for more examples.\n\n\nFeatures\n--------\n\n- Detailed info on failing `assert statements `_ (no need to remember ``self.assert*`` names);\n\n- `Auto-discovery\n `_\n of test modules and functions;\n\n- `Modular fixtures `_ for\n managing small or parametrized long-lived test resources;\n\n- Can run `unittest `_ (or trial),\n `nose `_ test suites out of the box;\n\n- Python 2.7, Python 3.4+, PyPy 2.3, Jython 2.5 (untested);\n\n- Rich plugin architecture, with over 315+ `external plugins `_ and thriving community;\n\n\nDocumentation\n-------------\n\nFor full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.\n\n\nBugs/Requests\n-------------\n\nPlease use the `GitHub issue tracker `_ to submit bugs or request features.\n\n\nChangelog\n---------\n\nConsult the `Changelog `__ page for fixes and enhancements of each version.\n\n\nLicense\n-------\n\nCopyright Holger Krekel and others, 2004-2017.\n\nDistributed under the terms of the `MIT`_ license, pytest is free and open source software.\n\n.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE\n\n\n", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "c0b6697b7130c495aba71cdfcf939cc9", - "sha256": "6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c" - }, - "downloads": -1, - "filename": "pytest-3.5.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "c0b6697b7130c495aba71cdfcf939cc9", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 194247, - "upload_time": "2018-03-22T23:47:54", - "url": "https://files.pythonhosted.org/packages/ed/96/271c93f75212c06e2a7ec3e2fa8a9c90acee0a4838dc05bf379ea09aae31/pytest-3.5.0-py2.py3-none-any.whl" + "home_page": "http://pytest.org", + "keywords": "test unittest", + "license": "MIT license", + "maintainer": "", + "maintainer_email": "", + "name": "pytest", + "package_url": "https://pypi.org/project/pytest/", + "platform": "unix", + "project_url": "https://pypi.org/project/pytest/", + "release_url": "https://pypi.org/project/pytest/3.5.0/", + "requires_dist": [ + "py (>=1.5.0)", + "six (>=1.10.0)", + "setuptools", + "attrs (>=17.4.0)", + "more-itertools (>=4.0.0)", + "pluggy (<0.7,>=0.5)", + "funcsigs; python_version < \"3.0\"", + "colorama; sys_platform == \"win32\"" + ], + "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", + "summary": "pytest: simple powerful testing with Python", + "version": "3.5.0" + }, + "last_serial": 3697219, + "releases": { + "3.5.0": [ + { + "comment_text": "", + "digests": { + "md5": "c0b6697b7130c495aba71cdfcf939cc9", + "sha256": "6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c" }, - { - "comment_text": "", - "digests": { - "md5": "b8e13a4091f07ff1fda081cf40ff99f1", - "sha256": "fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1" - }, - "downloads": -1, - "filename": "pytest-3.5.0.tar.gz", - "has_sig": false, - "md5_digest": "b8e13a4091f07ff1fda081cf40ff99f1", - "packagetype": "sdist", - "python_version": "source", - "size": 830816, - "upload_time": "2018-03-22T23:47:56", - "url": "https://files.pythonhosted.org/packages/2d/56/6019153cdd743300c5688ab3b07702355283e53c83fbf922242c053ffb7b/pytest-3.5.0.tar.gz" - } + "downloads": -1, + "filename": "pytest-3.5.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "c0b6697b7130c495aba71cdfcf939cc9", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 194247, + "upload_time": "2018-03-22T23:47:54", + "url": "https://files.pythonhosted.org/packages/ed/96/271c93f75212c06e2a7ec3e2fa8a9c90acee0a4838dc05bf379ea09aae31/pytest-3.5.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "b8e13a4091f07ff1fda081cf40ff99f1", + "sha256": "fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1" + }, + "downloads": -1, + "filename": "pytest-3.5.0.tar.gz", + "has_sig": false, + "md5_digest": "b8e13a4091f07ff1fda081cf40ff99f1", + "packagetype": "sdist", + "python_version": "source", + "size": 830816, + "upload_time": "2018-03-22T23:47:56", + "url": "https://files.pythonhosted.org/packages/2d/56/6019153cdd743300c5688ab3b07702355283e53c83fbf922242c053ffb7b/pytest-3.5.0.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "c0b6697b7130c495aba71cdfcf939cc9", + "sha256": "6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c" + }, + "downloads": -1, + "filename": "pytest-3.5.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "c0b6697b7130c495aba71cdfcf939cc9", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 194247, + "upload_time": "2018-03-22T23:47:54", + "url": "https://files.pythonhosted.org/packages/ed/96/271c93f75212c06e2a7ec3e2fa8a9c90acee0a4838dc05bf379ea09aae31/pytest-3.5.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "b8e13a4091f07ff1fda081cf40ff99f1", + "sha256": "fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1" + }, + "downloads": -1, + "filename": "pytest-3.5.0.tar.gz", + "has_sig": false, + "md5_digest": "b8e13a4091f07ff1fda081cf40ff99f1", + "packagetype": "sdist", + "python_version": "source", + "size": 830816, + "upload_time": "2018-03-22T23:47:56", + "url": "https://files.pythonhosted.org/packages/2d/56/6019153cdd743300c5688ab3b07702355283e53c83fbf922242c053ffb7b/pytest-3.5.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pytest/3.5.0.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pytest/3.5.0.json index 4ef1bd29..d279d129 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pytest/3.5.0.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/pytest/3.5.0.json @@ -1,91 +1,91 @@ { - "info": { - "author": "Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others", - "author_email": "", - "bugtrack_url": "https://github.com/pytest-dev/pytest/issues", - "classifiers": [ - "Development Status :: 6 - Mature", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Topic :: Software Development :: Libraries", - "Topic :: Software Development :: Testing", - "Topic :: Utilities" - ], - "description": ".. image:: http://docs.pytest.org/en/latest/_static/pytest1.png\n :target: http://docs.pytest.org\n :align: center\n :alt: pytest\n\n------\n\n.. image:: https://img.shields.io/pypi/v/pytest.svg\n :target: https://pypi.python.org/pypi/pytest\n\n.. image:: https://anaconda.org/conda-forge/pytest/badges/version.svg\n :target: https://anaconda.org/conda-forge/pytest\n\n.. image:: https://img.shields.io/pypi/pyversions/pytest.svg\n :target: https://pypi.python.org/pypi/pytest\n\n.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg\n :target: https://coveralls.io/r/pytest-dev/pytest\n\n.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master\n :target: https://travis-ci.org/pytest-dev/pytest\n\n.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true\n :target: https://ci.appveyor.com/project/pytestbot/pytest\n\n.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg\n :target: https://www.codetriage.com/pytest-dev/pytest\n\nThe ``pytest`` framework makes it easy to write small tests, yet\nscales to support complex functional testing for applications and libraries.\n\nAn example of a simple test:\n\n.. code-block:: python\n\n # content of test_sample.py\n def inc(x):\n return x + 1\n\n def test_answer():\n assert inc(3) == 5\n\n\nTo execute it::\n\n $ pytest\n ============================= test session starts =============================\n collected 1 items\n\n test_sample.py F\n\n ================================== FAILURES ===================================\n _________________________________ test_answer _________________________________\n\n def test_answer():\n > assert inc(3) == 5\n E assert 4 == 5\n E + where 4 = inc(3)\n\n test_sample.py:5: AssertionError\n ========================== 1 failed in 0.04 seconds ===========================\n\n\nDue to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started `_ for more examples.\n\n\nFeatures\n--------\n\n- Detailed info on failing `assert statements `_ (no need to remember ``self.assert*`` names);\n\n- `Auto-discovery\n `_\n of test modules and functions;\n\n- `Modular fixtures `_ for\n managing small or parametrized long-lived test resources;\n\n- Can run `unittest `_ (or trial),\n `nose `_ test suites out of the box;\n\n- Python 2.7, Python 3.4+, PyPy 2.3, Jython 2.5 (untested);\n\n- Rich plugin architecture, with over 315+ `external plugins `_ and thriving community;\n\n\nDocumentation\n-------------\n\nFor full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.\n\n\nBugs/Requests\n-------------\n\nPlease use the `GitHub issue tracker `_ to submit bugs or request features.\n\n\nChangelog\n---------\n\nConsult the `Changelog `__ page for fixes and enhancements of each version.\n\n\nLicense\n-------\n\nCopyright Holger Krekel and others, 2004-2017.\n\nDistributed under the terms of the `MIT`_ license, pytest is free and open source software.\n\n.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE\n\n\n", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://pytest.org", - "keywords": "test unittest", - "license": "MIT license", - "maintainer": "", - "maintainer_email": "", - "name": "pytest", - "package_url": "https://pypi.org/project/pytest/", - "platform": "unix", - "project_url": "https://pypi.org/project/pytest/", - "release_url": "https://pypi.org/project/pytest/3.5.0/", - "requires_dist": [ - "py (>=1.5.0)", - "six (>=1.10.0)", - "setuptools", - "attrs (>=17.4.0)", - "more-itertools (>=4.0.0)", - "pluggy (<0.7,>=0.5)", - "funcsigs; python_version < \"3.0\"", - "colorama; sys_platform == \"win32\"" - ], - "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - "summary": "pytest: simple powerful testing with Python", - "version": "3.5.0" + "info": { + "author": "Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others", + "author_email": "", + "bugtrack_url": "https://github.com/pytest-dev/pytest/issues", + "classifiers": [ + "Development Status :: 6 - Mature", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Testing", + "Topic :: Utilities" + ], + "description": ".. image:: http://docs.pytest.org/en/latest/_static/pytest1.png\n :target: http://docs.pytest.org\n :align: center\n :alt: pytest\n\n------\n\n.. image:: https://img.shields.io/pypi/v/pytest.svg\n :target: https://pypi.python.org/pypi/pytest\n\n.. image:: https://anaconda.org/conda-forge/pytest/badges/version.svg\n :target: https://anaconda.org/conda-forge/pytest\n\n.. image:: https://img.shields.io/pypi/pyversions/pytest.svg\n :target: https://pypi.python.org/pypi/pytest\n\n.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg\n :target: https://coveralls.io/r/pytest-dev/pytest\n\n.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master\n :target: https://travis-ci.org/pytest-dev/pytest\n\n.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true\n :target: https://ci.appveyor.com/project/pytestbot/pytest\n\n.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg\n :target: https://www.codetriage.com/pytest-dev/pytest\n\nThe ``pytest`` framework makes it easy to write small tests, yet\nscales to support complex functional testing for applications and libraries.\n\nAn example of a simple test:\n\n.. code-block:: python\n\n # content of test_sample.py\n def inc(x):\n return x + 1\n\n def test_answer():\n assert inc(3) == 5\n\n\nTo execute it::\n\n $ pytest\n ============================= test session starts =============================\n collected 1 items\n\n test_sample.py F\n\n ================================== FAILURES ===================================\n _________________________________ test_answer _________________________________\n\n def test_answer():\n > assert inc(3) == 5\n E assert 4 == 5\n E + where 4 = inc(3)\n\n test_sample.py:5: AssertionError\n ========================== 1 failed in 0.04 seconds ===========================\n\n\nDue to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started `_ for more examples.\n\n\nFeatures\n--------\n\n- Detailed info on failing `assert statements `_ (no need to remember ``self.assert*`` names);\n\n- `Auto-discovery\n `_\n of test modules and functions;\n\n- `Modular fixtures `_ for\n managing small or parametrized long-lived test resources;\n\n- Can run `unittest `_ (or trial),\n `nose `_ test suites out of the box;\n\n- Python 2.7, Python 3.4+, PyPy 2.3, Jython 2.5 (untested);\n\n- Rich plugin architecture, with over 315+ `external plugins `_ and thriving community;\n\n\nDocumentation\n-------------\n\nFor full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.\n\n\nBugs/Requests\n-------------\n\nPlease use the `GitHub issue tracker `_ to submit bugs or request features.\n\n\nChangelog\n---------\n\nConsult the `Changelog `__ page for fixes and enhancements of each version.\n\n\nLicense\n-------\n\nCopyright Holger Krekel and others, 2004-2017.\n\nDistributed under the terms of the `MIT`_ license, pytest is free and open source software.\n\n.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE\n\n\n", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 3697219, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "c0b6697b7130c495aba71cdfcf939cc9", - "sha256": "6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c" - }, - "downloads": -1, - "filename": "pytest-3.5.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "c0b6697b7130c495aba71cdfcf939cc9", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 194247, - "upload_time": "2018-03-22T23:47:54", - "url": "https://files.pythonhosted.org/packages/ed/96/271c93f75212c06e2a7ec3e2fa8a9c90acee0a4838dc05bf379ea09aae31/pytest-3.5.0-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "b8e13a4091f07ff1fda081cf40ff99f1", - "sha256": "fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1" - }, - "downloads": -1, - "filename": "pytest-3.5.0.tar.gz", - "has_sig": false, - "md5_digest": "b8e13a4091f07ff1fda081cf40ff99f1", - "packagetype": "sdist", - "python_version": "source", - "size": 830816, - "upload_time": "2018-03-22T23:47:56", - "url": "https://files.pythonhosted.org/packages/2d/56/6019153cdd743300c5688ab3b07702355283e53c83fbf922242c053ffb7b/pytest-3.5.0.tar.gz" - } - ] + "home_page": "http://pytest.org", + "keywords": "test unittest", + "license": "MIT license", + "maintainer": "", + "maintainer_email": "", + "name": "pytest", + "package_url": "https://pypi.org/project/pytest/", + "platform": "unix", + "project_url": "https://pypi.org/project/pytest/", + "release_url": "https://pypi.org/project/pytest/3.5.0/", + "requires_dist": [ + "py (>=1.5.0)", + "six (>=1.10.0)", + "setuptools", + "attrs (>=17.4.0)", + "more-itertools (>=4.0.0)", + "pluggy (<0.7,>=0.5)", + "funcsigs; python_version < \"3.0\"", + "colorama; sys_platform == \"win32\"" + ], + "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", + "summary": "pytest: simple powerful testing with Python", + "version": "3.5.0" + }, + "last_serial": 3697219, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "c0b6697b7130c495aba71cdfcf939cc9", + "sha256": "6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c" + }, + "downloads": -1, + "filename": "pytest-3.5.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "c0b6697b7130c495aba71cdfcf939cc9", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 194247, + "upload_time": "2018-03-22T23:47:54", + "url": "https://files.pythonhosted.org/packages/ed/96/271c93f75212c06e2a7ec3e2fa8a9c90acee0a4838dc05bf379ea09aae31/pytest-3.5.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "b8e13a4091f07ff1fda081cf40ff99f1", + "sha256": "fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1" + }, + "downloads": -1, + "filename": "pytest-3.5.0.tar.gz", + "has_sig": false, + "md5_digest": "b8e13a4091f07ff1fda081cf40ff99f1", + "packagetype": "sdist", + "python_version": "source", + "size": 830816, + "upload_time": "2018-03-22T23:47:56", + "url": "https://files.pythonhosted.org/packages/2d/56/6019153cdd743300c5688ab3b07702355283e53c83fbf922242c053ffb7b/pytest-3.5.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/setuptools.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/setuptools.json index bc38a3d3..d679e6d7 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/setuptools.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/setuptools.json @@ -1,90 +1,55 @@ { - "info": { - "author": "Python Packaging Authority", - "author_email": "distutils-sig@python.org", - "bugtrack_url": "", - "classifiers": [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Topic :: Software Development :: Libraries :: Python Modules", - "Topic :: System :: Archiving :: Packaging", - "Topic :: System :: Systems Administration", - "Topic :: Utilities" - ], - "description": "", - "description_content_type": "text/x-rst; charset=UTF-8", - "docs_url": null, - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "https://github.com/pypa/setuptools", - "keywords": "CPAN PyPI distutils eggs package management", - "license": "", - "maintainer": "", - "maintainer_email": "", - "name": "setuptools", - "package_url": "https://pypi.org/project/setuptools/", - "platform": "", - "project_url": "https://pypi.org/project/setuptools/", - "release_url": "https://pypi.org/project/setuptools/39.2.0/", - "requires_dist": [ - "wincertstore (==0.2); (sys_platform=='win32') and extra == 'ssl'", - "certifi (==2016.9.26); extra == 'certs'" - ], - "requires_python": ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*", - "summary": "Easily download, build, install, upgrade, and uninstall Python packages", - "version": "39.2.0" - }, - "last_serial": 3879671, - "releases": { - "39.2.0": [ - { - "comment_text": "", - "digests": { - "md5": "8d066d2201311ed30be535b473e32fed", - "sha256": "8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926" - }, - "downloads": -1, - "filename": "setuptools-39.2.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "8d066d2201311ed30be535b473e32fed", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 567556, - "upload_time": "2018-05-19T19:19:22", - "url": "https://files.pythonhosted.org/packages/7f/e1/820d941153923aac1d49d7fc37e17b6e73bfbd2904959fffbad77900cf92/setuptools-39.2.0-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "dd4e3fa83a21bf7bf9c51026dc8a4e59", - "sha256": "f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2" - }, - "downloads": -1, - "filename": "setuptools-39.2.0.zip", - "has_sig": false, - "md5_digest": "dd4e3fa83a21bf7bf9c51026dc8a4e59", - "packagetype": "sdist", - "python_version": "source", - "size": 851112, - "upload_time": "2018-05-19T19:19:24", - "url": "https://files.pythonhosted.org/packages/1a/04/d6f1159feaccdfc508517dba1929eb93a2854de729fa68da9d5c6b48fa00/setuptools-39.2.0.zip" - } - ] + "info": { + "author": "Python Packaging Authority", + "author_email": "distutils-sig@python.org", + "bugtrack_url": "", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: System :: Archiving :: Packaging", + "Topic :: System :: Systems Administration", + "Topic :: Utilities" + ], + "description": "", + "description_content_type": "text/x-rst; charset=UTF-8", + "docs_url": null, + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ + "home_page": "https://github.com/pypa/setuptools", + "keywords": "CPAN PyPI distutils eggs package management", + "license": "", + "maintainer": "", + "maintainer_email": "", + "name": "setuptools", + "package_url": "https://pypi.org/project/setuptools/", + "platform": "", + "project_url": "https://pypi.org/project/setuptools/", + "release_url": "https://pypi.org/project/setuptools/39.2.0/", + "requires_dist": [ + "wincertstore (==0.2); (sys_platform=='win32') and extra == 'ssl'", + "certifi (==2016.9.26); extra == 'certs'" + ], + "requires_python": ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*", + "summary": "Easily download, build, install, upgrade, and uninstall Python packages", + "version": "39.2.0" + }, + "last_serial": 3879671, + "releases": { + "39.2.0": [ { "comment_text": "", "digests": { @@ -118,4 +83,39 @@ "url": "https://files.pythonhosted.org/packages/1a/04/d6f1159feaccdfc508517dba1929eb93a2854de729fa68da9d5c6b48fa00/setuptools-39.2.0.zip" } ] - } + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "8d066d2201311ed30be535b473e32fed", + "sha256": "8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926" + }, + "downloads": -1, + "filename": "setuptools-39.2.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "8d066d2201311ed30be535b473e32fed", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 567556, + "upload_time": "2018-05-19T19:19:22", + "url": "https://files.pythonhosted.org/packages/7f/e1/820d941153923aac1d49d7fc37e17b6e73bfbd2904959fffbad77900cf92/setuptools-39.2.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "dd4e3fa83a21bf7bf9c51026dc8a4e59", + "sha256": "f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2" + }, + "downloads": -1, + "filename": "setuptools-39.2.0.zip", + "has_sig": false, + "md5_digest": "dd4e3fa83a21bf7bf9c51026dc8a4e59", + "packagetype": "sdist", + "python_version": "source", + "size": 851112, + "upload_time": "2018-05-19T19:19:24", + "url": "https://files.pythonhosted.org/packages/1a/04/d6f1159feaccdfc508517dba1929eb93a2854de729fa68da9d5c6b48fa00/setuptools-39.2.0.zip" + } + ] +} diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/six.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/six.json index befe565a..92d79c54 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/six.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/six.json @@ -1,108 +1,108 @@ { - "info": { - "author": "Benjamin Peterson", - "author_email": "benjamin@python.org", - "bugtrack_url": null, - "classifiers": [ - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 3", - "Topic :: Software Development :: Libraries", - "Topic :: Utilities" - ], - "description": "", - "docs_url": "https://pythonhosted.org/six/", - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://pypi.python.org/pypi/six/", - "keywords": "", - "license": "MIT", - "maintainer": "", - "maintainer_email": "", - "name": "six", - "package_url": "https://pypi.org/project/six/", - "platform": "", - "project_url": "https://pypi.org/project/six/", - "release_url": "https://pypi.org/project/six/1.11.0/", - "requires_dist": null, - "requires_python": "", - "summary": "Python 2 and 3 compatibility utilities", - "version": "1.11.0" - }, - "last_serial": 3180827, - "releases": { - "1.11.0": [ - { - "comment_text": "", - "digests": { - "md5": "866ab722be6bdfed6830f3179af65468", - "sha256": "832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" - }, - "downloads": -1, - "filename": "six-1.11.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "866ab722be6bdfed6830f3179af65468", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 10702, - "upload_time": "2017-09-17T18:46:53", - "url": "https://files.pythonhosted.org/packages/67/4b/141a581104b1f6397bfa78ac9d43d8ad29a7ca43ea90a2d863fe3056e86a/six-1.11.0-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "d12789f9baf7e9fb2524c0c64f1773f8", - "sha256": "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" - }, - "downloads": -1, - "filename": "six-1.11.0.tar.gz", - "has_sig": false, - "md5_digest": "d12789f9baf7e9fb2524c0c64f1773f8", - "packagetype": "sdist", - "python_version": "source", - "size": 29860, - "upload_time": "2017-09-17T18:46:54", - "url": "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz" - } - ] + "info": { + "author": "Benjamin Peterson", + "author_email": "benjamin@python.org", + "bugtrack_url": null, + "classifiers": [ + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries", + "Topic :: Utilities" + ], + "description": "", + "docs_url": "https://pythonhosted.org/six/", + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "866ab722be6bdfed6830f3179af65468", - "sha256": "832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" - }, - "downloads": -1, - "filename": "six-1.11.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "866ab722be6bdfed6830f3179af65468", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 10702, - "upload_time": "2017-09-17T18:46:53", - "url": "https://files.pythonhosted.org/packages/67/4b/141a581104b1f6397bfa78ac9d43d8ad29a7ca43ea90a2d863fe3056e86a/six-1.11.0-py2.py3-none-any.whl" + "home_page": "http://pypi.python.org/pypi/six/", + "keywords": "", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "six", + "package_url": "https://pypi.org/project/six/", + "platform": "", + "project_url": "https://pypi.org/project/six/", + "release_url": "https://pypi.org/project/six/1.11.0/", + "requires_dist": null, + "requires_python": "", + "summary": "Python 2 and 3 compatibility utilities", + "version": "1.11.0" + }, + "last_serial": 3180827, + "releases": { + "1.11.0": [ + { + "comment_text": "", + "digests": { + "md5": "866ab722be6bdfed6830f3179af65468", + "sha256": "832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" }, - { - "comment_text": "", - "digests": { - "md5": "d12789f9baf7e9fb2524c0c64f1773f8", - "sha256": "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" - }, - "downloads": -1, - "filename": "six-1.11.0.tar.gz", - "has_sig": false, - "md5_digest": "d12789f9baf7e9fb2524c0c64f1773f8", - "packagetype": "sdist", - "python_version": "source", - "size": 29860, - "upload_time": "2017-09-17T18:46:54", - "url": "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz" - } + "downloads": -1, + "filename": "six-1.11.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "866ab722be6bdfed6830f3179af65468", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 10702, + "upload_time": "2017-09-17T18:46:53", + "url": "https://files.pythonhosted.org/packages/67/4b/141a581104b1f6397bfa78ac9d43d8ad29a7ca43ea90a2d863fe3056e86a/six-1.11.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "d12789f9baf7e9fb2524c0c64f1773f8", + "sha256": "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" + }, + "downloads": -1, + "filename": "six-1.11.0.tar.gz", + "has_sig": false, + "md5_digest": "d12789f9baf7e9fb2524c0c64f1773f8", + "packagetype": "sdist", + "python_version": "source", + "size": 29860, + "upload_time": "2017-09-17T18:46:54", + "url": "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz" + } ] + }, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "866ab722be6bdfed6830f3179af65468", + "sha256": "832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + }, + "downloads": -1, + "filename": "six-1.11.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "866ab722be6bdfed6830f3179af65468", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 10702, + "upload_time": "2017-09-17T18:46:53", + "url": "https://files.pythonhosted.org/packages/67/4b/141a581104b1f6397bfa78ac9d43d8ad29a7ca43ea90a2d863fe3056e86a/six-1.11.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "d12789f9baf7e9fb2524c0c64f1773f8", + "sha256": "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" + }, + "downloads": -1, + "filename": "six-1.11.0.tar.gz", + "has_sig": false, + "md5_digest": "d12789f9baf7e9fb2524c0c64f1773f8", + "packagetype": "sdist", + "python_version": "source", + "size": 29860, + "upload_time": "2017-09-17T18:46:54", + "url": "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/six/1.11.0.json b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/six/1.11.0.json index e914ffee..d6cd8252 100644 --- a/vendor/poetry/tests/repositories/fixtures/pypi.org/json/six/1.11.0.json +++ b/vendor/poetry/tests/repositories/fixtures/pypi.org/json/six/1.11.0.json @@ -1,72 +1,72 @@ { - "info": { - "author": "Benjamin Peterson", - "author_email": "benjamin@python.org", - "bugtrack_url": null, - "classifiers": [ - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 3", - "Topic :: Software Development :: Libraries", - "Topic :: Utilities" - ], - "description": "", - "docs_url": "https://pythonhosted.org/six/", - "download_url": "", - "downloads": { - "last_day": -1, - "last_month": -1, - "last_week": -1 - }, - "home_page": "http://pypi.python.org/pypi/six/", - "keywords": "", - "license": "MIT", - "maintainer": "", - "maintainer_email": "", - "name": "six", - "package_url": "https://pypi.org/project/six/", - "platform": "", - "project_url": "https://pypi.org/project/six/", - "release_url": "https://pypi.org/project/six/1.11.0/", - "requires_dist": null, - "requires_python": "", - "summary": "Python 2 and 3 compatibility utilities", - "version": "1.11.0" + "info": { + "author": "Benjamin Peterson", + "author_email": "benjamin@python.org", + "bugtrack_url": null, + "classifiers": [ + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries", + "Topic :: Utilities" + ], + "description": "", + "docs_url": "https://pythonhosted.org/six/", + "download_url": "", + "downloads": { + "last_day": -1, + "last_month": -1, + "last_week": -1 }, - "last_serial": 3180827, - "urls": [ - { - "comment_text": "", - "digests": { - "md5": "866ab722be6bdfed6830f3179af65468", - "sha256": "832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" - }, - "downloads": -1, - "filename": "six-1.11.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "866ab722be6bdfed6830f3179af65468", - "packagetype": "bdist_wheel", - "python_version": "py2.py3", - "size": 10702, - "upload_time": "2017-09-17T18:46:53", - "url": "https://files.pythonhosted.org/packages/67/4b/141a581104b1f6397bfa78ac9d43d8ad29a7ca43ea90a2d863fe3056e86a/six-1.11.0-py2.py3-none-any.whl" - }, - { - "comment_text": "", - "digests": { - "md5": "d12789f9baf7e9fb2524c0c64f1773f8", - "sha256": "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" - }, - "downloads": -1, - "filename": "six-1.11.0.tar.gz", - "has_sig": false, - "md5_digest": "d12789f9baf7e9fb2524c0c64f1773f8", - "packagetype": "sdist", - "python_version": "source", - "size": 29860, - "upload_time": "2017-09-17T18:46:54", - "url": "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz" - } - ] + "home_page": "http://pypi.python.org/pypi/six/", + "keywords": "", + "license": "MIT", + "maintainer": "", + "maintainer_email": "", + "name": "six", + "package_url": "https://pypi.org/project/six/", + "platform": "", + "project_url": "https://pypi.org/project/six/", + "release_url": "https://pypi.org/project/six/1.11.0/", + "requires_dist": null, + "requires_python": "", + "summary": "Python 2 and 3 compatibility utilities", + "version": "1.11.0" + }, + "last_serial": 3180827, + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "866ab722be6bdfed6830f3179af65468", + "sha256": "832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + }, + "downloads": -1, + "filename": "six-1.11.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "866ab722be6bdfed6830f3179af65468", + "packagetype": "bdist_wheel", + "python_version": "py2.py3", + "size": 10702, + "upload_time": "2017-09-17T18:46:53", + "url": "https://files.pythonhosted.org/packages/67/4b/141a581104b1f6397bfa78ac9d43d8ad29a7ca43ea90a2d863fe3056e86a/six-1.11.0-py2.py3-none-any.whl" + }, + { + "comment_text": "", + "digests": { + "md5": "d12789f9baf7e9fb2524c0c64f1773f8", + "sha256": "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" + }, + "downloads": -1, + "filename": "six-1.11.0.tar.gz", + "has_sig": false, + "md5_digest": "d12789f9baf7e9fb2524c0c64f1773f8", + "packagetype": "sdist", + "python_version": "source", + "size": 29860, + "upload_time": "2017-09-17T18:46:54", + "url": "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz" + } + ] } diff --git a/vendor/poetry/tests/repositories/fixtures/single-page/jax_releases.html b/vendor/poetry/tests/repositories/fixtures/single-page/jax_releases.html new file mode 100644 index 00000000..ce3232a9 --- /dev/null +++ b/vendor/poetry/tests/repositories/fixtures/single-page/jax_releases.html @@ -0,0 +1,27 @@ + + + + +nocuda/jaxlib-0.3.0-cp310-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.0-cp37-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.0-cp38-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.0-cp39-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.2-cp310-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.2-cp37-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.2-cp38-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.2-cp39-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.5-cp310-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.5-cp37-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.5-cp38-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.5-cp39-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.7-cp310-none-manylinux2014_x86_64.whl
+nocuda/jaxlib-0.3.7-cp37-none-manylinux2014_x86_64.whl
+nocuda/jaxlib-0.3.7-cp38-none-manylinux2014_x86_64.whl
+nocuda/jaxlib-0.3.7-cp39-none-manylinux2014_x86_64.whl
+jax/jax-0.3.0.tar.gz
+jax/jax-0.3.2.tar.gz
+jax/jax-0.3.5.tar.gz
+jax/jax-0.3.6.tar.gz
+jax/jax-0.3.7.tar.gz
+ + diff --git a/vendor/poetry/tests/repositories/link_sources/__init__.py b/vendor/poetry/tests/repositories/link_sources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/vendor/poetry/tests/repositories/link_sources/test_base.py b/vendor/poetry/tests/repositories/link_sources/test_base.py new file mode 100644 index 00000000..c949f90a --- /dev/null +++ b/vendor/poetry/tests/repositories/link_sources/test_base.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING +from unittest.mock import PropertyMock + +import pytest + +from packaging.utils import canonicalize_name +from poetry.core.packages.package import Package +from poetry.core.packages.utils.link import Link +from poetry.core.semver.version import Version + +from poetry.repositories.link_sources.base import LinkSource + + +if TYPE_CHECKING: + from collections.abc import Iterable + + from pytest_mock import MockerFixture + + +@pytest.fixture +def link_source(mocker: MockerFixture) -> LinkSource: + url = "https://example.org" + link_source = LinkSource(url) + mocker.patch( + f"{LinkSource.__module__}.{LinkSource.__qualname__}.links", + new_callable=PropertyMock, + return_value=iter( + [ + Link(f"{url}/demo-0.1.0.tar.gz"), + Link(f"{url}/demo-0.1.0_invalid.tar.gz"), + Link(f"{url}/invalid.tar.gz"), + Link(f"{url}/demo-0.1.0-py2.py3-none-any.whl"), + Link(f"{url}/demo-0.1.1.tar.gz"), + ] + ), + ) + return link_source + + +@pytest.mark.parametrize( + "filename, expected", + [ + ("demo-0.1.0-py2.py3-none-any.whl", Package("demo", "0.1.0")), + ("demo-0.1.0.tar.gz", Package("demo", "0.1.0")), + ("demo-0.1.0.egg", Package("demo", "0.1.0")), + ("demo-0.1.0_invalid-py2.py3-none-any.whl", None), # invalid version + ("demo-0.1.0_invalid.egg", None), # invalid version + ("no-package-at-all.txt", None), + ], +) +def test_link_package_data(filename: str, expected: Package | None) -> None: + link = Link(f"https://example.org/{filename}") + assert LinkSource.link_package_data(link) == expected + + +@pytest.mark.parametrize( + "name, expected", + [ + ("demo", {Version.parse("0.1.0"), Version.parse("0.1.1")}), + ("invalid", set()), + ], +) +def test_versions(name: str, expected: set[Version], link_source: LinkSource) -> None: + assert set(link_source.versions(name)) == expected + + +def test_packages(link_source: LinkSource) -> None: + expected = { + Package("demo", "0.1.0"), + Package("demo", "0.1.0"), + Package("demo", "0.1.1"), + } + assert set(link_source.packages) == expected + + +@pytest.mark.parametrize( + "version_string, filenames", + [ + ("0.1.0", ["demo-0.1.0.tar.gz", "demo-0.1.0-py2.py3-none-any.whl"]), + ("0.1.1", ["demo-0.1.1.tar.gz"]), + ("0.1.2", []), + ], +) +def test_links_for_version( + version_string: str, filenames: Iterable[str], link_source: LinkSource +) -> None: + version = Version.parse(version_string) + expected = {Link(f"{link_source.url}/{name}") for name in filenames} + assert ( + set(link_source.links_for_version(canonicalize_name("demo"), version)) + == expected + ) diff --git a/vendor/poetry/tests/repositories/link_sources/test_html.py b/vendor/poetry/tests/repositories/link_sources/test_html.py new file mode 100644 index 00000000..35b9ebfa --- /dev/null +++ b/vendor/poetry/tests/repositories/link_sources/test_html.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import pytest + +from poetry.core.packages.utils.link import Link +from poetry.core.semver.version import Version + +from poetry.repositories.link_sources.html import HTMLPage + + +DEMO_TEMPLATE = """ + + + + + Links for demo + + +

Links for demo

+ {} + + +""" + + +@pytest.mark.parametrize( + "attributes, expected_link", + [ + ("", Link("https://example.org/demo-0.1.whl")), + ( + 'data-requires-python=">=3.7"', + Link("https://example.org/demo-0.1.whl", requires_python=">=3.7"), + ), + ( + "data-yanked", + Link("https://example.org/demo-0.1.whl", yanked=True), + ), + ( + 'data-yanked=""', + Link("https://example.org/demo-0.1.whl", yanked=True), + ), + ( + 'data-yanked="<reason>"', + Link("https://example.org/demo-0.1.whl", yanked=""), + ), + ( + 'data-requires-python=">=3.7" data-yanked', + Link( + "https://example.org/demo-0.1.whl", requires_python=">=3.7", yanked=True + ), + ), + ], +) +def test_link_attributes(attributes: str, expected_link: Link) -> None: + anchor = ( + f'demo-0.1.whl
' + ) + content = DEMO_TEMPLATE.format(anchor) + page = HTMLPage("https://example.org", content) + + assert len(list(page.links)) == 1 + link = list(page.links)[0] + assert link.url == expected_link.url + assert link.requires_python == expected_link.requires_python + assert link.yanked == expected_link.yanked + assert link.yanked_reason == expected_link.yanked_reason + + +@pytest.mark.parametrize( + "yanked_attrs, expected", + [ + (("", ""), False), + (("data-yanked", ""), False), + (("", "data-yanked"), False), + (("data-yanked", "data-yanked"), True), + (("data-yanked='reason'", "data-yanked"), "reason"), + (("data-yanked", "data-yanked='reason'"), "reason"), + (("data-yanked='reason'", "data-yanked=''"), "reason"), + (("data-yanked=''", "data-yanked='reason'"), "reason"), + (("data-yanked='reason'", "data-yanked='reason'"), "reason"), + (("data-yanked='reason 1'", "data-yanked='reason 2'"), "reason 1\nreason 2"), + ], +) +def test_yanked(yanked_attrs: tuple[str, str], expected: bool | str) -> None: + anchors = ( + f'' + "demo-0.1.tar.gz" + f'demo-0.1.whl' + ) + content = DEMO_TEMPLATE.format(anchors) + page = HTMLPage("https://example.org", content) + + assert page.yanked("demo", Version.parse("0.1")) == expected diff --git a/vendor/poetry/tests/repositories/test_installed_repository.py b/vendor/poetry/tests/repositories/test_installed_repository.py index 3caa702a..904a11cf 100644 --- a/vendor/poetry/tests/repositories/test_installed_repository.py +++ b/vendor/poetry/tests/repositories/test_installed_repository.py @@ -1,17 +1,21 @@ -from typing import Optional +from __future__ import annotations -import pytest +from collections import namedtuple +from pathlib import Path +from typing import TYPE_CHECKING -from pytest_mock.plugin import MockFixture +import pytest -from poetry.core.packages import Package from poetry.repositories.installed_repository import InstalledRepository -from poetry.utils._compat import PY36 -from poetry.utils._compat import Path from poetry.utils._compat import metadata -from poetry.utils._compat import zipp from poetry.utils.env import MockEnv as BaseMockEnv +from tests.compat import zipfile + +if TYPE_CHECKING: + from _pytest.logging import LogCaptureFixture + from poetry.core.packages.package import Package + from pytest_mock.plugin import MockerFixture FIXTURES_DIR = Path(__file__).parent / "fixtures" ENV_DIR = (FIXTURES_DIR / "installed").resolve() @@ -23,7 +27,7 @@ metadata.PathDistribution(SITE_PURELIB / "cleo-0.7.6.dist-info"), metadata.PathDistribution(SRC / "pendulum" / "pendulum.egg-info"), metadata.PathDistribution( - zipp.Path(str(SITE_PURELIB / "foo-0.1.0-py3.8.egg"), "EGG-INFO") + zipfile.Path(str(SITE_PURELIB / "foo-0.1.0-py3.8.egg"), "EGG-INFO") ), metadata.PathDistribution(VENDOR_DIR / "attrs-19.3.0.dist-info"), metadata.PathDistribution(SITE_PURELIB / "standard-1.2.3.dist-info"), @@ -31,67 +35,95 @@ metadata.PathDistribution(SITE_PURELIB / "editable-with-import-2.3.4.dist-info"), metadata.PathDistribution(SITE_PLATLIB / "lib64-2.3.4.dist-info"), metadata.PathDistribution(SITE_PLATLIB / "bender-2.0.5.dist-info"), + metadata.PathDistribution(SITE_PURELIB / "git_pep_610-1.2.3.dist-info"), + metadata.PathDistribution( + SITE_PURELIB / "git_pep_610_no_requested_version-1.2.3.dist-info" + ), + metadata.PathDistribution( + SITE_PURELIB / "git_pep_610_subdirectory-1.2.3.dist-info" + ), + metadata.PathDistribution(SITE_PURELIB / "url_pep_610-1.2.3.dist-info"), + metadata.PathDistribution(SITE_PURELIB / "file_pep_610-1.2.3.dist-info"), + metadata.PathDistribution(SITE_PURELIB / "directory_pep_610-1.2.3.dist-info"), + metadata.PathDistribution( + SITE_PURELIB / "editable_directory_pep_610-1.2.3.dist-info" + ), ] class MockEnv(BaseMockEnv): @property - def paths(self): + def paths(self) -> dict[str, Path]: return { "purelib": SITE_PURELIB, "platlib": SITE_PLATLIB, } @property - def sys_path(self): - return [ENV_DIR, SITE_PLATLIB, SITE_PURELIB] + def sys_path(self) -> list[str]: + return [str(path) for path in [ENV_DIR, SITE_PLATLIB, SITE_PURELIB]] @pytest.fixture -def env(): # type: () -> MockEnv +def env() -> MockEnv: return MockEnv(path=ENV_DIR) +@pytest.fixture(autouse=True) +def mock_git_info(mocker: MockerFixture) -> None: + mocker.patch( + "poetry.vcs.git.Git.info", + return_value=namedtuple("GitRepoLocalInfo", "origin revision")( + origin="https://github.com/sdispater/pendulum.git", + revision="bb058f6b78b2d28ef5d9a5e759cfa179a1a713d6", + ), + ) + + @pytest.fixture -def repository(mocker, env): # type: (MockFixture, MockEnv) -> InstalledRepository +def repository(mocker: MockerFixture, env: MockEnv) -> InstalledRepository: mocker.patch( "poetry.utils._compat.metadata.Distribution.discover", return_value=INSTALLED_RESULTS, ) - mocker.patch( - "poetry.core.vcs.git.Git.rev_parse", - return_value="bb058f6b78b2d28ef5d9a5e759cfa179a1a713d6", - ) - mocker.patch( - "poetry.core.vcs.git.Git.remote_urls", - side_effect=[ - {"remote.origin.url": "https://github.com/sdispater/pendulum.git"}, - {"remote.origin.url": "git@github.com:sdispater/pendulum.git"}, - ], - ) - mocker.patch("poetry.repositories.installed_repository._VENDORS", str(VENDOR_DIR)) return InstalledRepository.load(env) def get_package_from_repository( - name, repository -): # type: (str, InstalledRepository) -> Optional[Package] + name: str, repository: InstalledRepository +) -> Package | None: for pkg in repository.packages: if pkg.name == name: return pkg return None -def test_load_successful(repository): - assert len(repository.packages) == len(INSTALLED_RESULTS) - 1 +def test_load_successful(repository: InstalledRepository): + assert len(repository.packages) == len(INSTALLED_RESULTS) + + +def test_load_successful_with_invalid_distribution( + caplog: LogCaptureFixture, mocker: MockerFixture, env: MockEnv, tmp_dir: str +) -> None: + invalid_dist_info = Path(tmp_dir) / "site-packages" / "invalid-0.1.0.dist-info" + invalid_dist_info.mkdir(parents=True) + mocker.patch( + "poetry.utils._compat.metadata.Distribution.discover", + return_value=INSTALLED_RESULTS + [metadata.PathDistribution(invalid_dist_info)], + ) + repository_with_invalid_distribution = InstalledRepository.load(env) + assert ( + len(repository_with_invalid_distribution.packages) == len(INSTALLED_RESULTS) + ) + assert len(caplog.messages) == 1 -def test_load_ensure_isolation(repository): - package = get_package_from_repository("attrs", repository) - assert package is None + message = caplog.messages[0] + assert message.startswith("Project environment contains an invalid distribution") + assert str(invalid_dist_info) in message -def test_load_standard_package(repository): +def test_load_standard_package(repository: InstalledRepository): cleo = get_package_from_repository("cleo", repository) assert cleo is not None assert cleo.name == "cleo" @@ -106,7 +138,7 @@ def test_load_standard_package(repository): assert foo.version.text == "0.1.0" -def test_load_git_package(repository): +def test_load_git_package(repository: InstalledRepository): pendulum = get_package_from_repository("pendulum", repository) assert pendulum is not None assert pendulum.name == "pendulum" @@ -120,7 +152,7 @@ def test_load_git_package(repository): assert pendulum.source_reference == "bb058f6b78b2d28ef5d9a5e759cfa179a1a713d6" -def test_load_git_package_pth(repository): +def test_load_git_package_pth(repository: InstalledRepository): bender = get_package_from_repository("bender", repository) assert bender is not None assert bender.name == "bender" @@ -128,17 +160,14 @@ def test_load_git_package_pth(repository): assert bender.source_type == "git" -def test_load_platlib_package(repository): +def test_load_platlib_package(repository: InstalledRepository): lib64 = get_package_from_repository("lib64", repository) assert lib64 is not None assert lib64.name == "lib64" assert lib64.version.text == "2.3.4" -@pytest.mark.skipif( - not PY36, reason="pathlib.resolve() does not support strict argument" -) -def test_load_editable_package(repository): +def test_load_editable_package(repository: InstalledRepository): # test editable package with text .pth file editable = get_package_from_repository("editable", repository) assert editable is not None @@ -151,7 +180,7 @@ def test_load_editable_package(repository): ) -def test_load_editable_with_import_package(repository): +def test_load_editable_with_import_package(repository: InstalledRepository): # test editable package with executable .pth file editable = get_package_from_repository("editable-with-import", repository) assert editable is not None @@ -161,7 +190,7 @@ def test_load_editable_with_import_package(repository): assert editable.source_url is None -def test_load_standard_package_with_pth_file(repository): +def test_load_standard_package_with_pth_file(repository: InstalledRepository): # test standard packages with .pth file is not treated as editable standard = get_package_from_repository("standard", repository) assert standard is not None @@ -169,3 +198,95 @@ def test_load_standard_package_with_pth_file(repository): assert standard.version.text == "1.2.3" assert standard.source_type is None assert standard.source_url is None + + +def test_load_pep_610_compliant_git_packages(repository: InstalledRepository): + package = get_package_from_repository("git-pep-610", repository) + + assert package is not None + assert package.name == "git-pep-610" + assert package.version.text == "1.2.3" + assert package.source_type == "git" + assert package.source_url == "https://github.com/demo/git-pep-610.git" + assert package.source_reference == "my-branch" + assert package.source_resolved_reference == "123456" + + +def test_load_pep_610_compliant_git_packages_no_requested_version( + repository: InstalledRepository, +): + package = get_package_from_repository( + "git-pep-610-no-requested-version", repository + ) + + assert package is not None + assert package.name == "git-pep-610-no-requested-version" + assert package.version.text == "1.2.3" + assert package.source_type == "git" + assert ( + package.source_url + == "https://github.com/demo/git-pep-610-no-requested-version.git" + ) + assert package.source_resolved_reference == "123456" + assert package.source_reference == package.source_resolved_reference + + +def test_load_pep_610_compliant_git_packages_with_subdirectory( + repository: InstalledRepository, +): + package = get_package_from_repository("git-pep-610-subdirectory", repository) + assert package is not None + assert package.name == "git-pep-610-subdirectory" + assert package.version.text == "1.2.3" + assert package.source_type == "git" + assert package.source_url == "https://github.com/demo/git-pep-610-subdirectory.git" + assert package.source_reference == "my-branch" + assert package.source_resolved_reference == "123456" + assert package.source_subdirectory == "subdir" + + +def test_load_pep_610_compliant_url_packages(repository: InstalledRepository): + package = get_package_from_repository("url-pep-610", repository) + + assert package is not None + assert package.name == "url-pep-610" + assert package.version.text == "1.2.3" + assert package.source_type == "url" + assert ( + package.source_url + == "https://python-poetry.org/distributions/url-pep-610-1.2.3.tar.gz" + ) + + +def test_load_pep_610_compliant_file_packages(repository: InstalledRepository): + package = get_package_from_repository("file-pep-610", repository) + + assert package is not None + assert package.name == "file-pep-610" + assert package.version.text == "1.2.3" + assert package.source_type == "file" + assert package.source_url == "/path/to/distributions/file-pep-610-1.2.3.tar.gz" + + +def test_load_pep_610_compliant_directory_packages(repository: InstalledRepository): + package = get_package_from_repository("directory-pep-610", repository) + + assert package is not None + assert package.name == "directory-pep-610" + assert package.version.text == "1.2.3" + assert package.source_type == "directory" + assert package.source_url == "/path/to/distributions/directory-pep-610" + assert not package.develop + + +def test_load_pep_610_compliant_editable_directory_packages( + repository: InstalledRepository, +): + package = get_package_from_repository("editable-directory-pep-610", repository) + + assert package is not None + assert package.name == "editable-directory-pep-610" + assert package.version.text == "1.2.3" + assert package.source_type == "directory" + assert package.source_url == "/path/to/distributions/directory-pep-610" + assert package.develop diff --git a/vendor/poetry/tests/repositories/test_legacy_repository.py b/vendor/poetry/tests/repositories/test_legacy_repository.py index 55aa67c3..06147140 100644 --- a/vendor/poetry/tests/repositories/test_legacy_repository.py +++ b/vendor/poetry/tests/repositories/test_legacy_repository.py @@ -1,16 +1,24 @@ +from __future__ import annotations + +import base64 +import re import shutil +from pathlib import Path +from typing import TYPE_CHECKING + import pytest import requests -from poetry.core.packages import Dependency +from packaging.utils import canonicalize_name +from poetry.core.packages.dependency import Dependency +from poetry.core.semver.version import Version + from poetry.factory import Factory from poetry.repositories.exceptions import PackageNotFound from poetry.repositories.exceptions import RepositoryError from poetry.repositories.legacy_repository import LegacyRepository -from poetry.repositories.legacy_repository import Page -from poetry.utils._compat import PY35 -from poetry.utils._compat import Path +from poetry.repositories.link_sources.html import SimpleRepositoryPage try: @@ -18,73 +26,129 @@ except ImportError: import urlparse +if TYPE_CHECKING: + import httpretty -class MockRepository(LegacyRepository): + from _pytest.monkeypatch import MonkeyPatch + + from poetry.config.config import Config + +@pytest.fixture(autouse=True) +def _use_simple_keyring(with_simple_keyring: None) -> None: + pass + + +class MockRepository(LegacyRepository): FIXTURES = Path(__file__).parent / "fixtures" / "legacy" - def __init__(self): - super(MockRepository, self).__init__( - "legacy", url="http://legacy.foo.bar", disable_cache=True - ) + def __init__(self) -> None: + super().__init__("legacy", url="http://legacy.foo.bar", disable_cache=True) - def _get(self, endpoint): + def _get_page(self, endpoint: str) -> SimpleRepositoryPage | None: parts = endpoint.split("/") name = parts[1] fixture = self.FIXTURES / (name + ".html") if not fixture.exists(): - return + return None with fixture.open(encoding="utf-8") as f: - return Page(self._url + endpoint, f.read(), {}) + return SimpleRepositoryPage(self._url + endpoint, f.read()) - def _download(self, url, dest): + def _download(self, url: str, dest: Path) -> None: filename = urlparse.urlparse(url).path.rsplit("/")[-1] filepath = self.FIXTURES.parent / "pypi.org" / "dists" / filename shutil.copyfile(str(filepath), dest) -def test_page_relative_links_path_are_correct(): +def test_page_relative_links_path_are_correct() -> None: repo = MockRepository() - page = repo._get("/relative") + page = repo._get_page("/relative") + assert page is not None for link in page.links: assert link.netloc == "legacy.foo.bar" assert link.path.startswith("/relative/poetry") -def test_page_absolute_links_path_are_correct(): +def test_page_absolute_links_path_are_correct() -> None: repo = MockRepository() - page = repo._get("/absolute") + page = repo._get_page("/absolute") + assert page is not None for link in page.links: assert link.netloc == "files.pythonhosted.org" assert link.path.startswith("/packages/") -def test_sdist_format_support(): +def test_page_clean_link() -> None: + repo = MockRepository() + + page = repo._get_page("/relative") + assert page is not None + + cleaned = page.clean_link('https://legacy.foo.bar/test /the"/cleaning\0') + assert cleaned == "https://legacy.foo.bar/test%20/the%22/cleaning%00" + + +def test_page_invalid_version_link() -> None: + repo = MockRepository() + + page = repo._get_page("/invalid-version") + assert page is not None + + links = list(page.links) + assert len(links) == 2 + + versions = list(page.versions("poetry")) + assert len(versions) == 1 + assert versions[0].to_string() == "0.1.0" + + invalid_link = None + + for link in links: + if link.filename.startswith("poetry-21"): + invalid_link = link + break + + links_010 = list(page.links_for_version(canonicalize_name("poetry"), versions[0])) + assert invalid_link not in links_010 + + assert invalid_link + assert not page.link_package_data(invalid_link) + + packages = list(page.packages) + assert len(packages) == 1 + assert packages[0].name == "poetry" + assert packages[0].version.to_string() == "0.1.0" + + +def test_sdist_format_support() -> None: repo = MockRepository() - page = repo._get("/relative") + page = repo._get_page("/relative") + assert page is not None bz2_links = list(filter(lambda link: link.ext == ".tar.bz2", page.links)) assert len(bz2_links) == 1 assert bz2_links[0].filename == "poetry-0.1.1.tar.bz2" -def test_missing_version(): +def test_missing_version() -> None: repo = MockRepository() with pytest.raises(PackageNotFound): - repo._get_release_info("missing_version", "1.1.0") + repo._get_release_info( + canonicalize_name("missing_version"), Version.parse("1.1.0") + ) -def test_get_package_information_fallback_read_setup(): +def test_get_package_information_fallback_read_setup() -> None: repo = MockRepository() - package = repo.package("jupyter", "1.0.0") + package = repo.package("jupyter", Version.parse("1.0.0")) assert package.source_type == "legacy" assert package.source_reference == repo.name @@ -96,21 +160,11 @@ def test_get_package_information_fallback_read_setup(): == "Jupyter metapackage. Install all the Jupyter components in one go." ) - if PY35: - assert package.requires == [ - Dependency("notebook", "*"), - Dependency("qtconsole", "*"), - Dependency("jupyter-console", "*"), - Dependency("nbconvert", "*"), - Dependency("ipykernel", "*"), - Dependency("ipywidgets", "*"), - ] - -def test_get_package_information_skips_dependencies_with_invalid_constraints(): +def test_get_package_information_skips_dependencies_with_invalid_constraints() -> None: repo = MockRepository() - package = repo.package("python-language-server", "0.21.2") + package = repo.package("python-language-server", Version.parse("0.21.2")) assert package.name == "python-language-server" assert package.version.text == "0.21.2" @@ -118,9 +172,9 @@ def test_get_package_information_skips_dependencies_with_invalid_constraints(): package.description == "Python Language Server for the Language Server Protocol" ) - assert 25 == len(package.requires) + assert len(package.requires) == 25 assert sorted( - [r for r in package.requires if not r.is_optional()], key=lambda r: r.name + (r for r in package.requires if not r.is_optional()), key=lambda r: r.name ) == [ Dependency("configparser", "*"), Dependency("future", ">=0.14.0"), @@ -143,7 +197,16 @@ def test_get_package_information_skips_dependencies_with_invalid_constraints(): ] -def test_find_packages_no_prereleases(): +def test_package_not_canonicalized() -> None: + repo = MockRepository() + + package = repo.package("discord.py", Version.parse("2.0.0")) + + assert package.name == "discord-py" + assert package.pretty_name == "discord.py" + + +def test_find_packages_no_prereleases() -> None: repo = MockRepository() packages = repo.find_packages(Factory.create_dependency("pyyaml", "*")) @@ -155,8 +218,10 @@ def test_find_packages_no_prereleases(): assert packages[0].source_url == repo.url -@pytest.mark.parametrize("constraint,count", [("*", 1), (">=1", 0), (">=19.0.0a0", 1)]) -def test_find_packages_only_prereleases(constraint, count): +@pytest.mark.parametrize( + ["constraint", "count"], [("*", 1), (">=1", 0), (">=19.0.0a0", 1)] +) +def test_find_packages_only_prereleases(constraint: str, count: int) -> None: repo = MockRepository() packages = repo.find_packages(Factory.create_dependency("black", constraint)) @@ -169,17 +234,35 @@ def test_find_packages_only_prereleases(constraint, count): assert package.source_url == repo.url -def test_find_packages_only_prereleases_empty_when_not_any(): +def test_find_packages_only_prereleases_empty_when_not_any() -> None: repo = MockRepository() packages = repo.find_packages(Factory.create_dependency("black", ">=1")) assert len(packages) == 0 -def test_get_package_information_chooses_correct_distribution(): +@pytest.mark.parametrize( + ["constraint", "expected"], + [ + # yanked 21.11b0 is ignored except for pinned version + ("*", ["19.10b0"]), + (">=19.0a0", ["19.10b0"]), + (">=20.0a0", []), + (">=21.11b0", []), + ("==21.11b0", ["21.11b0"]), + ], +) +def test_find_packages_yanked(constraint: str, expected: list[str]) -> None: + repo = MockRepository() + packages = repo.find_packages(Factory.create_dependency("black", constraint)) + + assert [str(p.version) for p in packages] == expected + + +def test_get_package_information_chooses_correct_distribution() -> None: repo = MockRepository() - package = repo.package("isort", "4.3.4") + package = repo.package("isort", Version.parse("4.3.4")) assert package.name == "isort" assert package.version.text == "4.3.4" @@ -189,35 +272,37 @@ def test_get_package_information_chooses_correct_distribution(): assert futures_dep.python_versions == "~2.7" -def test_get_package_information_includes_python_requires(): +def test_get_package_information_includes_python_requires() -> None: repo = MockRepository() - package = repo.package("futures", "3.2.0") + package = repo.package("futures", Version.parse("3.2.0")) assert package.name == "futures" assert package.version.text == "3.2.0" assert package.python_versions == ">=2.6, <3" -def test_get_package_information_sets_appropriate_python_versions_if_wheels_only(): +def test_get_package_information_sets_appropriate_python_versions_if_wheels_only() -> ( + None +): repo = MockRepository() - package = repo.package("futures", "3.2.0") + package = repo.package("futures", Version.parse("3.2.0")) assert package.name == "futures" assert package.version.text == "3.2.0" assert package.python_versions == ">=2.6, <3" -def test_get_package_from_both_py2_and_py3_specific_wheels(): +def test_get_package_from_both_py2_and_py3_specific_wheels() -> None: repo = MockRepository() - package = repo.package("ipython", "5.7.0") + package = repo.package("ipython", Version.parse("5.7.0")) - assert "ipython" == package.name - assert "5.7.0" == package.version.text - assert "*" == package.python_versions - assert 41 == len(package.requires) + assert package.name == "ipython" + assert package.version.text == "5.7.0" + assert package.python_versions == "*" + assert len(package.requires) == 41 expected = [ Dependency("appnope", "*"), @@ -235,26 +320,36 @@ def test_get_package_from_both_py2_and_py3_specific_wheels(): Dependency("win-unicode-console", ">=0.5"), ] required = [r for r in package.requires if not r.is_optional()] - assert expected == required + assert required == expected - assert 'python_version == "2.7"' == str(required[1].marker) - assert 'sys_platform == "win32" and python_version < "3.6"' == str( - required[12].marker + assert str(required[1].marker) == 'python_version == "2.7"' + assert ( + str(required[12].marker) == 'sys_platform == "win32" and python_version < "3.6"' ) - assert 'python_version == "2.7" or python_version == "3.3"' == str( - required[4].marker + assert ( + str(required[4].marker) == 'python_version == "2.7" or python_version == "3.3"' ) - assert 'sys_platform != "win32"' == str(required[5].marker) + assert str(required[5].marker) == 'sys_platform != "win32"' + + +def test_get_package_from_both_py2_and_py3_specific_wheels_python_constraint() -> None: + repo = MockRepository() + + package = repo.package("poetry-test-py2-py3-metadata-merge", Version.parse("0.1.0")) + assert package.name == "poetry-test-py2-py3-metadata-merge" + assert package.version.text == "0.1.0" + assert package.python_versions == ">=2.7,<2.8 || >=3.7,<4.0" -def test_get_package_with_dist_and_universal_py3_wheel(): + +def test_get_package_with_dist_and_universal_py3_wheel() -> None: repo = MockRepository() - package = repo.package("ipython", "7.5.0") + package = repo.package("ipython", Version.parse("7.5.0")) - assert "ipython" == package.name - assert "7.5.0" == package.version.text - assert ">=3.5" == package.python_versions + assert package.name == "ipython" + assert package.version.text == "7.5.0" + assert package.python_versions == ">=3.5" expected = [ Dependency("appnope", "*"), @@ -272,78 +367,208 @@ def test_get_package_with_dist_and_universal_py3_wheel(): Dependency("win-unicode-console", ">=0.5"), ] required = [r for r in package.requires if not r.is_optional()] - assert expected == sorted(required, key=lambda dep: dep.name) + assert sorted(required, key=lambda dep: dep.name) == expected -def test_get_package_retrieves_non_sha256_hashes(): +def test_get_package_retrieves_non_sha256_hashes() -> None: repo = MockRepository() - package = repo.package("ipython", "7.5.0") + package = repo.package("ipython", Version.parse("7.5.0")) expected = [ { "file": "ipython-7.5.0-py3-none-any.whl", - "hash": "md5:dbdc53e3918f28fa335a173432402a00", + "hash": "sha256:78aea20b7991823f6a32d55f4e963a61590820e43f666ad95ad07c7f0c704efa", # noqa: E501 }, { "file": "ipython-7.5.0.tar.gz", - "hash": "sha256:e840810029224b56cd0d9e7719dc3b39cf84d577f8ac686547c8ba7a06eeab26", + "hash": "sha256:e840810029224b56cd0d9e7719dc3b39cf84d577f8ac686547c8ba7a06eeab26", # noqa: E501 }, ] - assert expected == package.files + assert package.files == expected -def test_get_package_retrieves_packages_with_no_hashes(): +def test_get_package_retrieves_non_sha256_hashes_mismatching_known_hash() -> None: repo = MockRepository() - package = repo.package("jupyter", "1.0.0") + package = repo.package("ipython", Version.parse("5.7.0")) + + expected = [ + { + "file": "ipython-5.7.0-py2-none-any.whl", + "hash": "md5:a10a802ef98da741cd6f4f6289d47ba7", + }, + { + "file": "ipython-5.7.0-py3-none-any.whl", + "hash": "sha256:fc0464e68f9c65cd8c453474b4175432cc29ecb6c83775baedf6dbfcee9275ab", # noqa: E501 + }, + { + "file": "ipython-5.7.0.tar.gz", + "hash": "sha256:8db43a7fb7619037c98626613ff08d03dda9d5d12c84814a4504c78c0da8323c", # noqa: E501 + }, + ] + + assert package.files == expected + - assert [] == package.files +def test_get_package_retrieves_packages_with_no_hashes() -> None: + repo = MockRepository() + + package = repo.package("jupyter", Version.parse("1.0.0")) + + assert [ + { + "file": "jupyter-1.0.0.tar.gz", + "hash": "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f", # noqa: E501 + } + ] == package.files + + +@pytest.mark.parametrize( + "package_name, version, yanked, yanked_reason", + [ + ("black", "19.10b0", False, ""), + ("black", "21.11b0", True, "Broken regex dependency. Use 21.11b1 instead."), + ], +) +def test_package_yanked( + package_name: str, version: str, yanked: bool, yanked_reason: str +) -> None: + repo = MockRepository() + + package = repo.package(package_name, Version.parse(version)) + + assert package.name == package_name + assert str(package.version) == version + assert package.yanked is yanked + assert package.yanked_reason == yanked_reason + + +def test_package_partial_yank(): + class SpecialMockRepository(MockRepository): + def _get_page(self, endpoint: str) -> SimpleRepositoryPage | None: + return super()._get_page(f"/{endpoint.strip('/')}_partial_yank/") + + repo = MockRepository() + package = repo.package("futures", Version.parse("3.2.0")) + assert len(package.files) == 2 + + repo = SpecialMockRepository() + package = repo.package("futures", Version.parse("3.2.0")) + assert len(package.files) == 1 + assert package.files[0]["file"].endswith(".tar.gz") + + +@pytest.mark.parametrize( + "package_name, version, yanked, yanked_reason", + [ + ("black", "19.10b0", False, ""), + ("black", "21.11b0", True, "Broken regex dependency. Use 21.11b1 instead."), + ], +) +def test_find_links_for_package_yanked( + package_name: str, version: str, yanked: bool, yanked_reason: str +) -> None: + repo = MockRepository() + + package = repo.package(package_name, Version.parse(version)) + links = repo.find_links_for_package(package) + + assert len(links) == 1 + for link in links: + assert link.yanked == yanked + assert link.yanked_reason == yanked_reason class MockHttpRepository(LegacyRepository): - def __init__(self, endpoint_responses, http): + def __init__( + self, endpoint_responses: dict, http: type[httpretty.httpretty] + ) -> None: base_url = "http://legacy.foo.bar" - super(MockHttpRepository, self).__init__( - "legacy", url=base_url, disable_cache=True - ) + super().__init__("legacy", url=base_url, disable_cache=True) for endpoint, response in endpoint_responses.items(): url = base_url + endpoint http.register_uri(http.GET, url, status=response) -def test_get_200_returns_page(http): +def test_get_200_returns_page(http: type[httpretty.httpretty]) -> None: repo = MockHttpRepository({"/foo": 200}, http) - assert repo._get("/foo") + assert repo._get_page("/foo") -def test_get_404_returns_none(http): - repo = MockHttpRepository({"/foo": 404}, http) +@pytest.mark.parametrize("status_code", [401, 403, 404]) +def test_get_40x_and_returns_none( + http: type[httpretty.httpretty], status_code: int +) -> None: + repo = MockHttpRepository({"/foo": status_code}, http) - assert repo._get("/foo") is None + assert repo._get_page("/foo") is None -def test_get_4xx_and_5xx_raises(http): - endpoints = {"/{}".format(code): code for code in {401, 403, 500}} - repo = MockHttpRepository(endpoints, http) +def test_get_5xx_raises(http: type[httpretty.httpretty]) -> None: + repo = MockHttpRepository({"/foo": 500}, http) - for endpoint in endpoints: - with pytest.raises(RepositoryError): - repo._get(endpoint) + with pytest.raises(RepositoryError): + repo._get_page("/foo") -def test_get_redirected_response_url(http, mocker): +def test_get_redirected_response_url( + http: type[httpretty.httpretty], monkeypatch: MonkeyPatch +) -> None: repo = MockHttpRepository({"/foo": 200}, http) redirect_url = "http://legacy.redirect.bar" - def get_mock(url): + def get_mock( + url: str, raise_for_status: bool = True, timeout: int = 5 + ) -> requests.Response: response = requests.Response() response.status_code = 200 response.url = redirect_url + "/foo" return response - mocker.patch.object(requests.Session, "get", side_effect=get_mock) - assert repo._get("/foo")._url == "http://legacy.redirect.bar/foo/" + monkeypatch.setattr(repo.session, "get", get_mock) + page = repo._get_page("/foo") + assert page is not None + assert page._url == "http://legacy.redirect.bar/foo/" + + +@pytest.mark.parametrize( + ("repositories",), + [ + ({},), + # ensure path is respected + ({"publish": {"url": "https://foo.bar/legacy"}},), + # ensure path length does not give incorrect results + ({"publish": {"url": "https://foo.bar/upload/legacy"}},), + ], +) +def test_authenticator_with_implicit_repository_configuration( + http: type[httpretty.httpretty], + config: Config, + repositories: dict[str, dict[str, str]], +) -> None: + http.register_uri( + http.GET, + re.compile("^https?://foo.bar/(.+?)$"), + ) + + config.merge( + { + "repositories": repositories, + "http-basic": { + "source": {"username": "foo", "password": "bar"}, + "publish": {"username": "baz", "password": "qux"}, + }, + } + ) + + repo = LegacyRepository(name="source", url="https://foo.bar/simple", config=config) + repo._get_page("/foo") + + request = http.last_request() + + basic_auth = base64.b64encode(b"foo:bar").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" diff --git a/vendor/poetry/tests/repositories/test_lockfile_repository.py b/vendor/poetry/tests/repositories/test_lockfile_repository.py new file mode 100644 index 00000000..200a48f5 --- /dev/null +++ b/vendor/poetry/tests/repositories/test_lockfile_repository.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +from copy import deepcopy + +from poetry.core.packages.package import Package + +from poetry.repositories.lockfile_repository import LockfileRepository + + +def test_has_package(): + repo = LockfileRepository() + + url_package = Package( + "a", "1.0", source_type="url", source_url="https://example.org/a.whl" + ) + assert not repo.has_package(url_package) + repo.add_package(url_package) + + pypi_package = Package("a", "1.0") + assert not repo.has_package(pypi_package) + repo.add_package(pypi_package) + + url_package_2 = Package( + "a", "1.0", source_type="url", source_url="https://example.org/a-1.whl" + ) + assert not repo.has_package(url_package_2) + repo.add_package(url_package_2) + + assert len(repo.packages) == 3 + assert repo.has_package(deepcopy(url_package)) + assert repo.has_package(deepcopy(pypi_package)) + assert repo.has_package(deepcopy(url_package_2)) + + +def test_remove_package(): + url_package = Package( + "a", "1.0", source_type="url", source_url="https://example.org/a.whl" + ) + pypi_package = Package("a", "1.0") + url_package_2 = Package( + "a", "1.0", source_type="url", source_url="https://example.org/a-1.whl" + ) + + repo = LockfileRepository() + repo.add_package(url_package) + repo.add_package(pypi_package) + repo.add_package(url_package_2) + + assert len(repo.packages) == 3 + + repo.remove_package(deepcopy(pypi_package)) + assert len(repo.packages) == 2 + repo.remove_package(pypi_package) + assert len(repo.packages) == 2 + + repo.remove_package(deepcopy(url_package_2)) + assert len(repo.packages) == 1 + assert repo.packages[0] == url_package + repo.remove_package(url_package_2) + assert len(repo.packages) == 1 diff --git a/vendor/poetry/tests/repositories/test_pool.py b/vendor/poetry/tests/repositories/test_pool.py index 54a405e5..74aa6750 100644 --- a/vendor/poetry/tests/repositories/test_pool.py +++ b/vendor/poetry/tests/repositories/test_pool.py @@ -1,31 +1,35 @@ +from __future__ import annotations + import pytest +from poetry.core.semver.version import Version + from poetry.repositories import Pool from poetry.repositories import Repository from poetry.repositories.exceptions import PackageNotFound from poetry.repositories.legacy_repository import LegacyRepository -def test_pool_raises_package_not_found_when_no_package_is_found(): +def test_pool_raises_package_not_found_when_no_package_is_found() -> None: pool = Pool() - pool.add_repository(Repository()) + pool.add_repository(Repository("repo")) with pytest.raises(PackageNotFound): - pool.package("foo", "1.0.0") + pool.package("foo", Version.parse("1.0.0")) def test_pool(): pool = Pool() - assert 0 == len(pool.repositories) + assert len(pool.repositories) == 0 assert not pool.has_default() def test_pool_with_initial_repositories(): - repo = Repository() + repo = Repository("repo") pool = Pool([repo]) - assert 1 == len(pool.repositories) + assert len(pool.repositories) == 1 assert not pool.has_default() @@ -69,3 +73,71 @@ def test_repository_with_normal_default_and_secondary_repositories(): assert pool.repository("foo") is repo1 assert pool.repository("bar") is repo2 assert pool.has_default() + + +def test_remove_repository(): + repo1 = LegacyRepository("foo", "https://foo.bar") + repo2 = LegacyRepository("bar", "https://bar.baz") + repo3 = LegacyRepository("baz", "https://baz.quux") + + pool = Pool() + pool.add_repository(repo1) + pool.add_repository(repo2) + pool.add_repository(repo3) + pool.remove_repository("bar") + + assert pool.repository("foo") is repo1 + assert not pool.has_repository("bar") + assert pool.repository("baz") is repo3 + + +def test_remove_default_repository(): + default = LegacyRepository("default", "https://default.com") + repo1 = LegacyRepository("foo", "https://foo.bar") + repo2 = LegacyRepository("bar", "https://bar.baz") + new_default = LegacyRepository("new_default", "https://new.default.com") + + pool = Pool() + pool.add_repository(repo1) + pool.add_repository(repo2) + pool.add_repository(default, default=True) + + assert pool.has_default() + + pool.remove_repository("default") + + assert not pool.has_default() + + pool.add_repository(new_default, default=True) + + assert pool.has_default() + assert pool.repositories[0] is new_default + assert not pool.has_repository("default") + + +def test_repository_ordering(): + default1 = LegacyRepository("default1", "https://default1.com") + default2 = LegacyRepository("default2", "https://default2.com") + primary1 = LegacyRepository("primary1", "https://primary1.com") + primary2 = LegacyRepository("primary2", "https://primary2.com") + primary3 = LegacyRepository("primary3", "https://primary3.com") + secondary1 = LegacyRepository("secondary1", "https://secondary1.com") + secondary2 = LegacyRepository("secondary2", "https://secondary2.com") + secondary3 = LegacyRepository("secondary3", "https://secondary3.com") + + pool = Pool() + pool.add_repository(secondary1, secondary=True) + pool.add_repository(primary1) + pool.add_repository(default1, default=True) + pool.add_repository(primary2) + pool.add_repository(secondary2, secondary=True) + + pool.remove_repository("primary2") + pool.remove_repository("secondary2") + + pool.add_repository(primary3) + pool.add_repository(secondary3, secondary=True) + + assert pool.repositories == [default1, primary1, primary3, secondary1, secondary3] + with pytest.raises(ValueError): + pool.add_repository(default2, default=True) diff --git a/vendor/poetry/tests/repositories/test_pypi_repository.py b/vendor/poetry/tests/repositories/test_pypi_repository.py index 4d9cc43f..0b2ab789 100644 --- a/vendor/poetry/tests/repositories/test_pypi_repository.py +++ b/vendor/poetry/tests/repositories/test_pypi_repository.py @@ -1,32 +1,41 @@ +from __future__ import annotations + import json import shutil from io import BytesIO +from pathlib import Path +from typing import TYPE_CHECKING import pytest +from poetry.core.packages.dependency import Dependency +from poetry.core.semver.version import Version from requests.exceptions import TooManyRedirects from requests.models import Response -from poetry.core.packages import Dependency from poetry.factory import Factory from poetry.repositories.pypi_repository import PyPiRepository -from poetry.utils._compat import PY35 -from poetry.utils._compat import Path from poetry.utils._compat import encode -class MockRepository(PyPiRepository): +if TYPE_CHECKING: + from pytest_mock import MockerFixture + + +@pytest.fixture(autouse=True) +def _use_simple_keyring(with_simple_keyring: None) -> None: + pass + +class MockRepository(PyPiRepository): JSON_FIXTURES = Path(__file__).parent / "fixtures" / "pypi.org" / "json" DIST_FIXTURES = Path(__file__).parent / "fixtures" / "pypi.org" / "dists" - def __init__(self, fallback=False): - super(MockRepository, self).__init__( - url="http://foo.bar", disable_cache=True, fallback=fallback - ) + def __init__(self, fallback: bool = False) -> None: + super().__init__(url="http://foo.bar", disable_cache=True, fallback=fallback) - def _get(self, url): + def _get(self, url: str) -> dict | None: parts = url.split("/")[1:] name = parts[0] if len(parts) == 3: @@ -42,12 +51,12 @@ def _get(self, url): fixture = self.JSON_FIXTURES / (name + ".json") if not fixture.exists(): - return + return None with fixture.open(encoding="utf-8") as f: return json.loads(f.read()) - def _download(self, url, dest): + def _download(self, url: str, dest: Path) -> None: filename = url.split("/")[-1] fixture = self.DIST_FIXTURES / filename @@ -55,39 +64,59 @@ def _download(self, url, dest): shutil.copyfile(str(fixture), dest) -def test_find_packages(): +def test_find_packages() -> None: repo = MockRepository() packages = repo.find_packages(Factory.create_dependency("requests", "^2.18")) assert len(packages) == 5 -def test_find_packages_with_prereleases(): +def test_find_packages_with_prereleases() -> None: repo = MockRepository() packages = repo.find_packages(Factory.create_dependency("toga", ">=0.3.0.dev2")) assert len(packages) == 7 -def test_find_packages_does_not_select_prereleases_if_not_allowed(): +def test_find_packages_does_not_select_prereleases_if_not_allowed() -> None: repo = MockRepository() packages = repo.find_packages(Factory.create_dependency("pyyaml", "*")) assert len(packages) == 1 -@pytest.mark.parametrize("constraint,count", [("*", 1), (">=1", 0), (">=19.0.0a0", 1)]) -def test_find_packages_only_prereleases(constraint, count): +@pytest.mark.parametrize( + ["constraint", "count"], [("*", 1), (">=1", 0), (">=19.0.0a0", 1)] +) +def test_find_packages_only_prereleases(constraint: str, count: int) -> None: repo = MockRepository() packages = repo.find_packages(Factory.create_dependency("black", constraint)) assert len(packages) == count -def test_package(): +@pytest.mark.parametrize( + ["constraint", "expected"], + [ + # yanked 21.11b0 is ignored except for pinned version + ("*", ["19.10b0"]), + (">=19.0a0", ["19.10b0"]), + (">=20.0a0", []), + (">=21.11b0", []), + ("==21.11b0", ["21.11b0"]), + ], +) +def test_find_packages_yanked(constraint: str, expected: list[str]) -> None: repo = MockRepository() + packages = repo.find_packages(Factory.create_dependency("black", constraint)) + + assert [str(p.version) for p in packages] == expected - package = repo.package("requests", "2.18.4") + +def test_package() -> None: + repo = MockRepository() + + package = repo.package("requests", Version.parse("2.18.4")) assert package.name == "requests" assert len(package.requires) == 9 @@ -98,32 +127,83 @@ def test_package(): assert package.files == [ { "file": "requests-2.18.4-py2.py3-none-any.whl", - "hash": "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b", + "hash": "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b", # noqa: E501 }, { "file": "requests-2.18.4.tar.gz", - "hash": "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e", + "hash": "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e", # noqa: E501 }, ] win_inet = package.extras["socks"][0] assert win_inet.name == "win-inet-pton" assert win_inet.python_versions == "~2.7 || ~2.6" - assert str(win_inet.marker) == ( - 'sys_platform == "win32" and (python_version == "2.7" ' - 'or python_version == "2.6") and extra == "socks"' + assert ( + str(win_inet.marker) + == 'sys_platform == "win32" and (python_version == "2.7"' + ' or python_version == "2.6") and extra == "socks"' ) -def test_fallback_on_downloading_packages(): +@pytest.mark.parametrize( + "package_name, version, yanked, yanked_reason", + [ + ("black", "19.10b0", False, ""), + ("black", "21.11b0", True, "Broken regex dependency. Use 21.11b1 instead."), + ], +) +def test_package_yanked( + package_name: str, version: str, yanked: bool, yanked_reason: str +) -> None: + repo = MockRepository() + + package = repo.package(package_name, Version.parse(version)) + + assert package.name == package_name + assert str(package.version) == version + assert package.yanked is yanked + assert package.yanked_reason == yanked_reason + + +def test_package_not_canonicalized() -> None: + repo = MockRepository() + + package = repo.package("discord.py", Version.parse("2.0.0")) + + assert package.name == "discord-py" + assert package.pretty_name == "discord.py" + + +@pytest.mark.parametrize( + "package_name, version, yanked, yanked_reason", + [ + ("black", "19.10b0", False, ""), + ("black", "21.11b0", True, "Broken regex dependency. Use 21.11b1 instead."), + ], +) +def test_find_links_for_package_yanked( + package_name: str, version: str, yanked: bool, yanked_reason: str +) -> None: + repo = MockRepository() + + package = repo.package(package_name, Version.parse(version)) + links = repo.find_links_for_package(package) + + assert len(links) == 2 + for link in links: + assert link.yanked == yanked + assert link.yanked_reason == yanked_reason + + +def test_fallback_on_downloading_packages() -> None: repo = MockRepository(fallback=True) - package = repo.package("jupyter", "1.0.0") + package = repo.package("jupyter", Version.parse("1.0.0")) assert package.name == "jupyter" assert len(package.requires) == 6 - dependency_names = sorted([dep.name for dep in package.requires]) + dependency_names = sorted(dep.name for dep in package.requires) assert dependency_names == [ "ipykernel", "ipywidgets", @@ -134,10 +214,10 @@ def test_fallback_on_downloading_packages(): ] -def test_fallback_inspects_sdist_first_if_no_matching_wheels_can_be_found(): +def test_fallback_inspects_sdist_first_if_no_matching_wheels_can_be_found() -> None: repo = MockRepository(fallback=True) - package = repo.package("isort", "4.3.4") + package = repo.package("isort", Version.parse("4.3.4")) assert package.name == "isort" assert len(package.requires) == 1 @@ -147,11 +227,10 @@ def test_fallback_inspects_sdist_first_if_no_matching_wheels_can_be_found(): assert dep.python_versions == "~2.7" -@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4") -def test_fallback_can_read_setup_to_get_dependencies(): +def test_fallback_can_read_setup_to_get_dependencies() -> None: repo = MockRepository(fallback=True) - package = repo.package("sqlalchemy", "1.2.12") + package = repo.package("sqlalchemy", Version.parse("1.2.12")) assert package.name == "sqlalchemy" assert len(package.requires) == 9 @@ -170,15 +249,15 @@ def test_fallback_can_read_setup_to_get_dependencies(): } -def test_pypi_repository_supports_reading_bz2_files(): +def test_pypi_repository_supports_reading_bz2_files() -> None: repo = MockRepository(fallback=True) - package = repo.package("twisted", "18.9.0") + package = repo.package("twisted", Version.parse("18.9.0")) assert package.name == "twisted" - assert 71 == len(package.requires) + assert len(package.requires) == 71 assert sorted( - [r for r in package.requires if not r.is_optional()], key=lambda r: r.name + (r for r in package.requires if not r.is_optional()), key=lambda r: r.name ) == [ Dependency("attrs", ">=17.4.0"), Dependency("Automat", ">=0.3.0"), @@ -204,13 +283,13 @@ def test_pypi_repository_supports_reading_bz2_files(): ] } - for name, deps in expected_extras.items(): - assert expected_extras[name] == sorted( - package.extras[name], key=lambda r: r.name + for name in expected_extras.keys(): + assert ( + sorted(package.extras[name], key=lambda r: r.name) == expected_extras[name] ) -def test_invalid_versions_ignored(): +def test_invalid_versions_ignored() -> None: repo = MockRepository() # the json metadata for this package contains one malformed version @@ -219,14 +298,17 @@ def test_invalid_versions_ignored(): assert len(packages) == 1 -def test_get_should_invalid_cache_on_too_many_redirects_error(mocker): +def test_get_should_invalid_cache_on_too_many_redirects_error( + mocker: MockerFixture, +) -> None: delete_cache = mocker.patch("cachecontrol.caches.file_cache.FileCache.delete") response = Response() + response.status_code = 200 response.encoding = "utf-8" response.raw = BytesIO(encode('{"foo": "bar"}')) mocker.patch( - "cachecontrol.adapter.CacheControlAdapter.send", + "poetry.utils.authenticator.Authenticator.get", side_effect=[TooManyRedirects(), response], ) repository = PyPiRepository() @@ -235,14 +317,14 @@ def test_get_should_invalid_cache_on_too_many_redirects_error(mocker): assert delete_cache.called -def test_urls(): +def test_urls() -> None: repository = PyPiRepository() - assert "https://pypi.org/simple/" == repository.url - assert "https://pypi.org/simple/" == repository.authenticated_url + assert repository.url == "https://pypi.org/simple/" + assert repository.authenticated_url == "https://pypi.org/simple/" -def test_use_pypi_pretty_name(): +def test_use_pypi_pretty_name() -> None: repo = MockRepository(fallback=True) package = repo.find_packages(Factory.create_dependency("twisted", "*")) diff --git a/vendor/poetry/tests/repositories/test_repository.py b/vendor/poetry/tests/repositories/test_repository.py new file mode 100644 index 00000000..aad77894 --- /dev/null +++ b/vendor/poetry/tests/repositories/test_repository.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import pytest + +from poetry.core.packages.package import Package +from poetry.core.semver.version import Version + +from poetry.factory import Factory +from poetry.repositories import Repository + + +@pytest.fixture(scope="module") +def black_repository() -> Repository: + repo = Repository("repo") + repo.add_package(Package("black", "19.10b0")) + repo.add_package(Package("black", "21.11b0", yanked="reason")) + return repo + + +@pytest.mark.parametrize( + ["constraint", "expected"], + [ + # yanked 21.11b0 is ignored except for pinned version + ("*", ["19.10b0"]), + (">=19.0a0", ["19.10b0"]), + (">=20.0a0", []), + (">=21.11b0", []), + ("==21.11b0", ["21.11b0"]), + ], +) +def test_find_packages_yanked( + black_repository: Repository, constraint: str, expected: list[str] +) -> None: + packages = black_repository.find_packages( + Factory.create_dependency("black", constraint) + ) + + assert [str(p.version) for p in packages] == expected + + +@pytest.mark.parametrize( + "package_name, version, yanked, yanked_reason", + [ + ("black", "19.10b0", False, ""), + ("black", "21.11b0", True, "reason"), + ], +) +def test_package_yanked( + black_repository: Repository, + package_name: str, + version: str, + yanked: bool, + yanked_reason: str, +) -> None: + package = black_repository.package(package_name, Version.parse(version)) + + assert package.name == package_name + assert str(package.version) == version + assert package.yanked is yanked + assert package.yanked_reason == yanked_reason + + +def test_package_pretty_name_is_kept() -> None: + pretty_name = "Not_canoni-calized.name" + repo = Repository("repo") + repo.add_package(Package(pretty_name, "1.0")) + package = repo.package(pretty_name, Version.parse("1.0")) + + assert package.pretty_name == pretty_name diff --git a/vendor/poetry/tests/repositories/test_single_page_repository.py b/vendor/poetry/tests/repositories/test_single_page_repository.py new file mode 100644 index 00000000..b90bb29a --- /dev/null +++ b/vendor/poetry/tests/repositories/test_single_page_repository.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import re + +from pathlib import Path + +from poetry.core.packages.dependency import Dependency + +from poetry.repositories.link_sources.html import SimpleRepositoryPage +from poetry.repositories.single_page_repository import SinglePageRepository + + +class MockSinglePageRepository(SinglePageRepository): + FIXTURES = Path(__file__).parent / "fixtures" / "single-page" + + def __init__(self, page: str) -> None: + super().__init__( + "single-page", + url=f"http://single-page.foo.bar/{page}.html", + disable_cache=True, + ) + + def _get_page(self, endpoint: str = None) -> SimpleRepositoryPage | None: + fixture = self.FIXTURES / self.url.rsplit("/", 1)[-1] + if not fixture.exists(): + return + + with fixture.open(encoding="utf-8") as f: + return SimpleRepositoryPage(self._url, f.read()) + + def _download(self, url: str, dest: Path) -> None: + raise RuntimeError("Tests are not configured for downloads") + + +def test_single_page_repository_get_page(): + repo = MockSinglePageRepository("jax_releases") + + page = repo._get_page("/ignored") + links = list(page.links) + + assert len(links) == 21 + + for link in links: + assert re.match(r"^(jax|jaxlib)-0\.3\.\d.*\.(whl|tar\.gz)$", link.filename) + assert link.netloc == "storage.googleapis.com" + assert link.path.startswith("/jax-releases/") + + +def test_single_page_repository_find_packages(): + repo = MockSinglePageRepository("jax_releases") + + dep = Dependency("jaxlib", "0.3.7") + + packages = repo.find_packages(dep) + + assert len(packages) == 1 + + package = packages[0] + assert package.name == dep.name + assert package.to_dependency().to_pep_508() == dep.to_pep_508() diff --git a/vendor/poetry/tests/test_factory.py b/vendor/poetry/tests/test_factory.py index c26cd92d..63eb150e 100644 --- a/vendor/poetry/tests/test_factory.py +++ b/vendor/poetry/tests/test_factory.py @@ -1,20 +1,37 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import unicode_literals +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING import pytest +from deepdiff import DeepDiff +from poetry.core.semver.helpers import parse_constraint from poetry.core.toml.file import TOMLFile + from poetry.factory import Factory +from poetry.plugins.plugin import Plugin from poetry.repositories.legacy_repository import LegacyRepository from poetry.repositories.pypi_repository import PyPiRepository -from poetry.utils._compat import PY2 -from poetry.utils._compat import Path +from tests.helpers import mock_metadata_entry_points + + +if TYPE_CHECKING: + from cleo.io.io import IO + from pytest_mock import MockerFixture + from poetry.poetry import Poetry + from tests.types import FixtureDirGetter fixtures_dir = Path(__file__).parent / "fixtures" +class MyPlugin(Plugin): + def activate(self, poetry: Poetry, io: IO) -> None: + io.write_line("Setting readmes") + poetry.package.readmes = ("README.md",) + + def test_create_poetry(): poetry = Factory().create_poetry(fixtures_dir / "sample_project") @@ -25,10 +42,12 @@ def test_create_poetry(): assert package.description == "Some description." assert package.authors == ["Sébastien Eustace "] assert package.license.id == "MIT" - assert ( - package.readme.relative_to(fixtures_dir).as_posix() - == "sample_project/README.rst" - ) + + for readme in package.readmes: + assert ( + readme.relative_to(fixtures_dir).as_posix() == "sample_project/README.rst" + ) + assert package.homepage == "https://python-poetry.org" assert package.repository_url == "https://github.com/python-poetry/poetry" assert package.keywords == ["packaging", "dependency", "poetry"] @@ -61,7 +80,7 @@ def test_create_poetry(): pathlib2 = dependencies["pathlib2"] assert pathlib2.pretty_constraint == "^2.2" - assert pathlib2.python_versions == "~2.7" + assert parse_constraint(pathlib2.python_versions) == parse_constraint("~2.7") assert not pathlib2.is_optional() demo = dependencies["demo"] @@ -89,7 +108,8 @@ def test_create_poetry(): assert functools32.pretty_constraint == "^3.2.3" assert ( str(functools32.marker) - == 'python_version ~= "2.7" and sys_platform == "win32" or python_version in "3.4 3.5"' + == 'python_version ~= "2.7" and sys_platform == "win32" or python_version in' + ' "3.4 3.5"' ) assert "db" in package.extras @@ -106,16 +126,44 @@ def test_create_poetry(): "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: Software Development :: Build Tools", "Topic :: Software Development :: Libraries :: Python Modules", ] +@pytest.mark.parametrize( + ("project",), + [ + ("simple_project",), + ("project_with_extras",), + ], +) +def test_create_pyproject_from_package(project: str): + poetry = Factory().create_poetry(fixtures_dir / project) + package = poetry.package + + pyproject = Factory.create_pyproject_from_package(package) + + result = pyproject["tool"]["poetry"] + expected = poetry.pyproject.poetry_config + + # packages do not support this at present + expected.pop("scripts", None) + + # remove any empty sections + sections = list(expected.keys()) + for section in sections: + if not expected[section]: + expected.pop(section) + + assert not DeepDiff(expected, result) + + def test_create_poetry_with_packages_and_includes(): poetry = Factory().create_poetry(fixtures_dir / "with-include") @@ -147,13 +195,13 @@ def test_create_poetry_with_multi_constraints_dependency(): assert len(package.requires) == 2 -def test_poetry_with_default_source(): +def test_poetry_with_default_source(with_simple_keyring: None): poetry = Factory().create_poetry(fixtures_dir / "with_default_source") - assert 1 == len(poetry.pool.repositories) + assert len(poetry.pool.repositories) == 1 -def test_poetry_with_non_default_source(): +def test_poetry_with_non_default_source(with_simple_keyring: None): poetry = Factory().create_poetry(fixtures_dir / "with_non_default_source") assert len(poetry.pool.repositories) == 2 @@ -167,7 +215,7 @@ def test_poetry_with_non_default_source(): assert isinstance(poetry.pool.repositories[1], PyPiRepository) -def test_poetry_with_non_default_secondary_source(): +def test_poetry_with_non_default_secondary_source(with_simple_keyring: None): poetry = Factory().create_poetry(fixtures_dir / "with_non_default_secondary_source") assert len(poetry.pool.repositories) == 2 @@ -183,7 +231,7 @@ def test_poetry_with_non_default_secondary_source(): assert isinstance(repository, LegacyRepository) -def test_poetry_with_non_default_multiple_secondary_sources(): +def test_poetry_with_non_default_multiple_secondary_sources(with_simple_keyring: None): poetry = Factory().create_poetry( fixtures_dir / "with_non_default_multiple_secondary_sources" ) @@ -205,7 +253,7 @@ def test_poetry_with_non_default_multiple_secondary_sources(): assert isinstance(repository, LegacyRepository) -def test_poetry_with_non_default_multiple_sources(): +def test_poetry_with_non_default_multiple_sources(with_simple_keyring: None): poetry = Factory().create_poetry(fixtures_dir / "with_non_default_multiple_sources") assert len(poetry.pool.repositories) == 3 @@ -236,11 +284,11 @@ def test_poetry_with_no_default_source(): assert isinstance(poetry.pool.repositories[0], PyPiRepository) -def test_poetry_with_two_default_sources(): +def test_poetry_with_two_default_sources(with_simple_keyring: None): with pytest.raises(ValueError) as e: Factory().create_poetry(fixtures_dir / "with_two_default_sources") - assert "Only one repository can be the default" == str(e.value) + assert str(e.value) == "Only one repository can be the default" def test_validate(): @@ -255,16 +303,10 @@ def test_validate_fails(): content = complete.read()["tool"]["poetry"] content["this key is not in the schema"] = "" - if PY2: - expected = ( - "Additional properties are not allowed " - "(u'this key is not in the schema' was unexpected)" - ) - else: - expected = ( - "Additional properties are not allowed " - "('this key is not in the schema' was unexpected)" - ) + expected = ( + "Additional properties are not allowed " + "('this key is not in the schema' was unexpected)" + ) assert Factory.validate(content) == {"errors": [expected], "warnings": []} @@ -275,21 +317,27 @@ def test_create_poetry_fails_on_invalid_configuration(): Path(__file__).parent / "fixtures" / "invalid_pyproject" / "pyproject.toml" ) - if PY2: - expected = """\ -The Poetry configuration is invalid: - - u'description' is a required property -""" - else: - expected = """\ + expected = """\ The Poetry configuration is invalid: - 'description' is a required property """ - assert expected == str(e.value) + assert str(e.value) == expected -def test_create_poetry_with_local_config(fixture_dir): +def test_create_poetry_with_local_config(fixture_dir: FixtureDirGetter): poetry = Factory().create_poetry(fixture_dir("with_local_config")) assert not poetry.config.get("virtualenvs.in-project") assert not poetry.config.get("virtualenvs.create") + assert not poetry.config.get("virtualenvs.options.always-copy") + assert not poetry.config.get("virtualenvs.options.no-pip") + assert not poetry.config.get("virtualenvs.options.no-setuptools") + assert not poetry.config.get("virtualenvs.options.system-site-packages") + + +def test_create_poetry_with_plugins(mocker: MockerFixture): + mock_metadata_entry_points(mocker, MyPlugin) + + poetry = Factory().create_poetry(fixtures_dir / "sample_project") + + assert poetry.package.readmes == ("README.md",) diff --git a/vendor/poetry/tests/types.py b/vendor/poetry/tests/types.py new file mode 100644 index 00000000..95ce4cbc --- /dev/null +++ b/vendor/poetry/tests/types.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from tests.compat import Protocol + + +if TYPE_CHECKING: + from pathlib import Path + + from cleo.io.io import IO + from cleo.testers.command_tester import CommandTester + + from poetry.config.config import Config + from poetry.config.source import Source + from poetry.installation import Installer + from poetry.installation.executor import Executor + from poetry.poetry import Poetry + from poetry.utils.env import Env + + +class CommandTesterFactory(Protocol): + def __call__( + self, + command: str, + poetry: Poetry | None = None, + installer: Installer | None = None, + executor: Executor | None = None, + environment: Env | None = None, + ) -> CommandTester: + ... + + +class SourcesFactory(Protocol): + def __call__(self, poetry: Poetry, sources: Source, config: Config, io: IO) -> None: + ... + + +class ProjectFactory(Protocol): + def __call__( + self, + name: str | None = None, + dependencies: dict[str, str] | None = None, + dev_dependencies: dict[str, str] | None = None, + pyproject_content: str | None = None, + poetry_lock_content: str | None = None, + install_deps: bool = True, + source: Path | None = None, + ) -> Poetry: + ... + + +class FixtureDirGetter(Protocol): + def __call__(self, name: str) -> Path: + ... + + +class FixtureCopier(Protocol): + def __call__(self, relative_path: str, target: Path | None = None) -> Path: + ... diff --git a/vendor/poetry/tests/utils/fixtures/setups/ansible/setup.py b/vendor/poetry/tests/utils/fixtures/setups/ansible/setup.py index 5d7d1f4e..3bebf1c3 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/ansible/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/ansible/setup.py @@ -1,4 +1,4 @@ -from __future__ import print_function +from __future__ import annotations import json import os @@ -11,8 +11,10 @@ from distutils.command.build_scripts import build_scripts as BuildScripts from distutils.command.sdist import sdist as SDist + try: - from setuptools import setup, find_packages + from setuptools import find_packages + from setuptools import setup from setuptools.command.build_py import build_py as BuildPy from setuptools.command.install_lib import install_lib as InstallLib from setuptools.command.install_scripts import install_scripts as InstallScripts @@ -26,7 +28,8 @@ sys.exit(1) sys.path.insert(0, os.path.abspath("lib")) -from ansible.release import __version__, __author__ +from ansible.release import __author__ +from ansible.release import __version__ SYMLINK_CACHE = "SYMLINK_CACHE.json" @@ -63,9 +66,9 @@ def _maintain_symlinks(symlink_type, base_path): try: # Try the cache first because going from git checkout to sdist is the # only time we know that we're going to cache correctly - with open(SYMLINK_CACHE, "r") as f: + with open(SYMLINK_CACHE) as f: symlink_data = json.load(f) - except (IOError, OSError) as e: + except OSError as e: # IOError on py2, OSError on py3. Both have errno if e.errno == 2: # SYMLINKS_CACHE doesn't exist. Fallback to trying to create the @@ -139,7 +142,7 @@ def run(self): def read_file(file_name): """Read file and return its contents.""" - with open(file_name, "r") as f: + with open(file_name) as f: return f.read() diff --git a/vendor/poetry/tests/utils/fixtures/setups/extras_require_with_vars/setup.py b/vendor/poetry/tests/utils/fixtures/setups/extras_require_with_vars/setup.py index 5d7962df..e40db81d 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/extras_require_with_vars/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/extras_require_with_vars/setup.py @@ -1,5 +1,8 @@ +from __future__ import annotations + from setuptools import setup + tests_require = ["pytest"] setup( diff --git a/vendor/poetry/tests/utils/fixtures/setups/flask/setup.py b/vendor/poetry/tests/utils/fixtures/setups/flask/setup.py index 2117d7cc..74a059de 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/flask/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/flask/setup.py @@ -1,15 +1,17 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- -import io +from __future__ import annotations + import re + from collections import OrderedDict from setuptools import setup -with io.open("README.rst", "rt", encoding="utf8") as f: + +with open("README.rst", encoding="utf8") as f: readme = f.read() -with io.open("flask/__init__.py", "rt", encoding="utf8") as f: +with open("flask/__init__.py", encoding="utf8") as f: version = re.search(r"__version__ = \'(.*?)\'", f.read()).group(1) setup( diff --git a/vendor/poetry/tests/utils/fixtures/setups/pendulum/setup.py b/vendor/poetry/tests/utils/fixtures/setups/pendulum/setup.py index 3a6323fb..705bd404 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/pendulum/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/pendulum/setup.py @@ -1,7 +1,9 @@ -# -*- coding: utf-8 -*- +from __future__ import annotations + from distutils.core import setup -from build import * +from build import * # nopycln: import + packages = [ "pendulum", diff --git a/vendor/poetry/tests/utils/fixtures/setups/pyyaml/setup.py b/vendor/poetry/tests/utils/fixtures/setups/pyyaml/setup.py index 5285386b..ce865368 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/pyyaml/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/pyyaml/setup.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + NAME = "PyYAML" VERSION = "3.13" DESCRIPTION = "YAML parser and emitter for Python" @@ -12,13 +15,13 @@ allow to represent an arbitrary Python object. PyYAML is applicable for a broad range of tasks from complex -configuration files to object serialization and persistance.""" +configuration files to object serialization and persistence.""" AUTHOR = "Kirill Simonov" AUTHOR_EMAIL = "xi@resolvent.net" LICENSE = "MIT" PLATFORMS = "Any" URL = "http://pyyaml.org/wiki/PyYAML" -DOWNLOAD_URL = "http://pyyaml.org/download/pyyaml/%s-%s.tar.gz" % (NAME, VERSION) +DOWNLOAD_URL = f"http://pyyaml.org/download/pyyaml/{NAME}-{VERSION}.tar.gz" CLASSIFIERS = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", @@ -53,21 +56,23 @@ """ -import sys, os.path, platform +import os.path +import platform +import sys from distutils import log -from distutils.core import setup, Command +from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm +from distutils.command.build_ext import build_ext as _build_ext +from distutils.core import Command from distutils.core import Distribution as _Distribution from distutils.core import Extension as _Extension +from distutils.core import setup from distutils.dir_util import mkpath -from distutils.command.build_ext import build_ext as _build_ext -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm -from distutils.errors import ( - DistutilsError, - CompileError, - LinkError, - DistutilsPlatformError, -) +from distutils.errors import CompileError +from distutils.errors import DistutilsError +from distutils.errors import DistutilsPlatformError +from distutils.errors import LinkError + if "setuptools.extension" in sys.modules: _Extension = sys.modules["setuptools.extension"]._Extension @@ -77,8 +82,8 @@ with_cython = False try: - from Cython.Distutils.extension import Extension as _Extension from Cython.Distutils import build_ext as _build_ext + from Cython.Distutils.extension import Extension as _Extension with_cython = True except ImportError: @@ -185,7 +190,7 @@ def get_source_files(self): filenames.append(filename) base = os.path.splitext(filename)[0] for ext in ["c", "h", "pyx", "pxd"]: - filename = "%s.%s" % (base, ext) + filename = f"{base}.{ext}" if filename not in filenames and os.path.isfile(filename): filenames.append(filename) return filenames @@ -308,10 +313,7 @@ def run(self): build_cmd = self.get_finalized_command("build") build_cmd.run() sys.path.insert(0, build_cmd.build_lib) - if sys.version_info[0] < 3: - sys.path.insert(0, "tests/lib") - else: - sys.path.insert(0, "tests/lib3") + sys.path.insert(0, "tests/lib3") import test_all if not test_all.main([]): @@ -337,7 +339,7 @@ def run(self): url=URL, download_url=DOWNLOAD_URL, classifiers=CLASSIFIERS, - package_dir={"": {2: "lib", 3: "lib3"}[sys.version_info[0]]}, + package_dir={"": "lib3"}, packages=["yaml"], ext_modules=[ Extension( diff --git a/vendor/poetry/tests/utils/fixtures/setups/requests/setup.py b/vendor/poetry/tests/utils/fixtures/setups/requests/setup.py index 0591adb8..e8c2df31 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/requests/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/requests/setup.py @@ -1,7 +1,8 @@ #!/usr/bin/env python # Learn more: https://github.com/kennethreitz/setup.py +from __future__ import annotations + import os -import re import sys from codecs import open @@ -9,6 +10,7 @@ from setuptools import setup from setuptools.command.test import test as TestCommand + here = os.path.abspath(os.path.dirname(__file__)) diff --git a/vendor/poetry/tests/utils/fixtures/setups/setuptools_setup/setup.py b/vendor/poetry/tests/utils/fixtures/setups/setuptools_setup/setup.py index 807d8c70..c8e0dafe 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/setuptools_setup/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/setuptools_setup/setup.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import setuptools + setuptools.setup( name="my_package", version="0.1.2", diff --git a/vendor/poetry/tests/utils/fixtures/setups/sqlalchemy/setup.py b/vendor/poetry/tests/utils/fixtures/setups/sqlalchemy/setup.py index 8842a482..b44d2150 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/sqlalchemy/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/sqlalchemy/setup.py @@ -1,19 +1,23 @@ +from __future__ import annotations + import os import platform import re import sys + from distutils.command.build_ext import build_ext from distutils.errors import CCompilerError from distutils.errors import DistutilsExecError from distutils.errors import DistutilsPlatformError -from setuptools import Distribution as _Distribution, Extension -from setuptools import setup + +from setuptools import Distribution as _Distribution +from setuptools import Extension from setuptools import find_packages +from setuptools import setup from setuptools.command.test import test as TestCommand + cmdclass = {} -if sys.version_info < (2, 7): - raise Exception("SQLAlchemy requires Python 2.7 or higher.") cpython = platform.python_implementation() == "CPython" diff --git a/vendor/poetry/tests/utils/fixtures/setups/with-setup-cfg-attr/setup.py b/vendor/poetry/tests/utils/fixtures/setups/with-setup-cfg-attr/setup.py index 60684932..536cf178 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/with-setup-cfg-attr/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/with-setup-cfg-attr/setup.py @@ -1,3 +1,6 @@ +from __future__ import annotations + from setuptools import setup + setup() diff --git a/vendor/poetry/tests/utils/fixtures/setups/with-setup-cfg/setup.py b/vendor/poetry/tests/utils/fixtures/setups/with-setup-cfg/setup.py index 60684932..536cf178 100644 --- a/vendor/poetry/tests/utils/fixtures/setups/with-setup-cfg/setup.py +++ b/vendor/poetry/tests/utils/fixtures/setups/with-setup-cfg/setup.py @@ -1,3 +1,6 @@ +from __future__ import annotations + from setuptools import setup + setup() diff --git a/vendor/poetry/tests/utils/test_authenticator.py b/vendor/poetry/tests/utils/test_authenticator.py new file mode 100644 index 00000000..29af598b --- /dev/null +++ b/vendor/poetry/tests/utils/test_authenticator.py @@ -0,0 +1,647 @@ +from __future__ import annotations + +import base64 +import re +import uuid + +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING +from typing import Any + +import httpretty +import pytest +import requests + +from cleo.io.null_io import NullIO + +from poetry.utils.authenticator import Authenticator +from poetry.utils.authenticator import RepositoryCertificateConfig + + +if TYPE_CHECKING: + from _pytest.monkeypatch import MonkeyPatch + from pytest_mock import MockerFixture + + from tests.conftest import Config + from tests.conftest import DummyBackend + + +@dataclass +class SimpleCredential: + username: str + password: str + + +@pytest.fixture() +def mock_remote(http: type[httpretty.httpretty]) -> None: + http.register_uri( + http.GET, + re.compile("^https?://foo.bar/(.+?)$"), + ) + + +@pytest.fixture() +def repo(): + return {"foo": {"url": "https://foo.bar/simple/"}} + + +@pytest.fixture +def mock_config(config: Config, repo: dict[str, dict[str, str]]): + config.merge( + { + "repositories": repo, + "http-basic": {"foo": {"username": "bar", "password": "baz"}}, + } + ) + + return config + + +def test_authenticator_uses_url_provided_credentials( + mock_config: Config, mock_remote: None, http: type[httpretty.httpretty] +): + authenticator = Authenticator(mock_config, NullIO()) + authenticator.request("get", "https://foo001:bar002@foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic Zm9vMDAxOmJhcjAwMg==" + + +def test_authenticator_uses_credentials_from_config_if_not_provided( + mock_config: Config, mock_remote: None, http: type[httpretty.httpretty] +): + authenticator = Authenticator(mock_config, NullIO()) + authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic YmFyOmJheg==" + + +def test_authenticator_uses_username_only_credentials( + mock_config: Config, + mock_remote: None, + http: type[httpretty.httpretty], + with_simple_keyring: None, +): + authenticator = Authenticator(mock_config, NullIO()) + authenticator.request("get", "https://foo001@foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic Zm9vMDAxOg==" + + +def test_authenticator_uses_password_only_credentials( + mock_config: Config, mock_remote: None, http: type[httpretty.httpretty] +): + authenticator = Authenticator(mock_config, NullIO()) + authenticator.request("get", "https://:bar002@foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic OmJhcjAwMg==" + + +def test_authenticator_uses_empty_strings_as_default_password( + config: Config, + mock_remote: None, + repo: dict[str, dict[str, str]], + http: type[httpretty.httpretty], + with_simple_keyring: None, +): + config.merge( + { + "repositories": repo, + "http-basic": {"foo": {"username": "bar"}}, + } + ) + + authenticator = Authenticator(config, NullIO()) + authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic YmFyOg==" + + +def test_authenticator_uses_empty_strings_as_default_username( + config: Config, + mock_remote: None, + repo: dict[str, dict[str, str]], + http: type[httpretty.httpretty], +): + config.merge( + { + "repositories": repo, + "http-basic": {"foo": {"username": None, "password": "bar"}}, + } + ) + + authenticator = Authenticator(config, NullIO()) + authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic OmJhcg==" + + +def test_authenticator_falls_back_to_keyring_url( + config: Config, + mock_remote: None, + repo: dict[str, dict[str, str]], + http: type[httpretty.httpretty], + with_simple_keyring: None, + dummy_keyring: DummyBackend, +): + config.merge( + { + "repositories": repo, + } + ) + + dummy_keyring.set_password( + "https://foo.bar/simple/", None, SimpleCredential(None, "bar") + ) + + authenticator = Authenticator(config, NullIO()) + authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic OmJhcg==" + + +def test_authenticator_falls_back_to_keyring_netloc( + config: Config, + mock_remote: None, + repo: dict[str, dict[str, str]], + http: type[httpretty.httpretty], + with_simple_keyring: None, + dummy_keyring: DummyBackend, +): + config.merge( + { + "repositories": repo, + } + ) + + dummy_keyring.set_password("foo.bar", None, SimpleCredential(None, "bar")) + + authenticator = Authenticator(config, NullIO()) + authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic OmJhcg==" + + +@pytest.mark.filterwarnings("ignore::pytest.PytestUnhandledThreadExceptionWarning") +def test_authenticator_request_retries_on_exception( + mocker: MockerFixture, config: Config, http: type[httpretty.httpretty] +): + sleep = mocker.patch("time.sleep") + sdist_uri = f"https://foo.bar/files/{uuid.uuid4()!s}/foo-0.1.0.tar.gz" + content = str(uuid.uuid4()) + seen = [] + + def callback( + request: requests.Request, uri: str, response_headers: dict + ) -> list[int | dict | str]: + if seen.count(uri) < 2: + seen.append(uri) + raise requests.exceptions.ConnectionError("Disconnected") + return [200, response_headers, content] + + httpretty.register_uri(httpretty.GET, sdist_uri, body=callback) + + authenticator = Authenticator(config, NullIO()) + response = authenticator.request("get", sdist_uri) + assert response.text == content + assert sleep.call_count == 2 + + +@pytest.mark.filterwarnings("ignore::pytest.PytestUnhandledThreadExceptionWarning") +def test_authenticator_request_raises_exception_when_attempts_exhausted( + mocker: MockerFixture, config: Config, http: type[httpretty.httpretty] +): + sleep = mocker.patch("time.sleep") + sdist_uri = f"https://foo.bar/files/{uuid.uuid4()!s}/foo-0.1.0.tar.gz" + + def callback(*_: Any, **___: Any) -> None: + raise requests.exceptions.ConnectionError(str(uuid.uuid4())) + + httpretty.register_uri(httpretty.GET, sdist_uri, body=callback) + authenticator = Authenticator(config, NullIO()) + + with pytest.raises(requests.exceptions.ConnectionError): + authenticator.request("get", sdist_uri) + + assert sleep.call_count == 5 + + +@pytest.mark.parametrize( + ["status", "attempts"], + [ + (400, 0), + (401, 0), + (403, 0), + (404, 0), + (500, 0), + (502, 5), + (503, 5), + (504, 5), + ], +) +def test_authenticator_request_retries_on_status_code( + mocker: MockerFixture, + config: Config, + http: type[httpretty.httpretty], + status: int, + attempts: int, +): + sleep = mocker.patch("time.sleep") + sdist_uri = f"https://foo.bar/files/{uuid.uuid4()!s}/foo-0.1.0.tar.gz" + content = str(uuid.uuid4()) + + def callback( + request: requests.Request, uri: str, response_headers: dict + ) -> list[int | dict | str]: + return [status, response_headers, content] + + httpretty.register_uri(httpretty.GET, sdist_uri, body=callback) + authenticator = Authenticator(config, NullIO()) + + with pytest.raises(requests.exceptions.HTTPError) as excinfo: + authenticator.request("get", sdist_uri) + + assert excinfo.value.response.status_code == status + assert excinfo.value.response.text == content + + assert sleep.call_count == attempts + + +def test_authenticator_uses_env_provided_credentials( + config: Config, + repo: dict[str, dict[str, str]], + environ: None, + mock_remote: type[httpretty.httpretty], + http: type[httpretty.httpretty], + monkeypatch: MonkeyPatch, +): + monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_USERNAME", "bar") + monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_PASSWORD", "baz") + + config.merge({"repositories": repo}) + + authenticator = Authenticator(config, NullIO()) + authenticator.request("get", "https://foo.bar/files/foo-0.1.0.tar.gz") + + request = http.last_request() + + assert request.headers["Authorization"] == "Basic YmFyOmJheg==" + + +@pytest.mark.parametrize( + "cert,client_cert", + [ + (None, None), + (None, "path/to/provided/client-cert"), + ("/path/to/provided/cert", None), + ("/path/to/provided/cert", "path/to/provided/client-cert"), + ], +) +def test_authenticator_uses_certs_from_config_if_not_provided( + config: Config, + mock_remote: type[httpretty.httpretty], + mock_config: Config, + http: type[httpretty.httpretty], + mocker: MockerFixture, + cert: str | None, + client_cert: str | None, +): + configured_cert = "/path/to/cert" + configured_client_cert = "/path/to/client-cert" + + mock_config.merge( + { + "certificates": { + "foo": {"cert": configured_cert, "client-cert": configured_client_cert} + }, + } + ) + + authenticator = Authenticator(mock_config, NullIO()) + url = "https://foo.bar/files/foo-0.1.0.tar.gz" + session = authenticator.get_session(url) + session_send = mocker.patch.object(session, "send") + authenticator.request( + "get", + url, + verify=cert, + cert=client_cert, + ) + kwargs = session_send.call_args[1] + + assert Path(kwargs["verify"]) == Path(cert or configured_cert) + assert Path(kwargs["cert"]) == Path(client_cert or configured_client_cert) + + +def test_authenticator_uses_credentials_from_config_matched_by_url_path( + config: Config, mock_remote: None, http: type[httpretty.httpretty] +): + config.merge( + { + "repositories": { + "foo-alpha": {"url": "https://foo.bar/alpha/files/simple/"}, + "foo-beta": {"url": "https://foo.bar/beta/files/simple/"}, + }, + "http-basic": { + "foo-alpha": {"username": "bar", "password": "alpha"}, + "foo-beta": {"username": "baz", "password": "beta"}, + }, + } + ) + + authenticator = Authenticator(config, NullIO()) + authenticator.request("get", "https://foo.bar/alpha/files/simple/foo-0.1.0.tar.gz") + + request = http.last_request() + + basic_auth = base64.b64encode(b"bar:alpha").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + # Make request on second repository with the same netloc but different credentials + authenticator.request("get", "https://foo.bar/beta/files/simple/foo-0.1.0.tar.gz") + + request = http.last_request() + + basic_auth = base64.b64encode(b"baz:beta").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + +def test_authenticator_uses_credentials_from_config_with_at_sign_in_path( + config: Config, mock_remote: None, http: type[httpretty.httpretty] +): + config.merge( + { + "repositories": { + "foo": {"url": "https://foo.bar/beta/files/simple/"}, + }, + "http-basic": { + "foo": {"username": "bar", "password": "baz"}, + }, + } + ) + authenticator = Authenticator(config, NullIO()) + authenticator.request("get", "https://foo.bar/beta/files/simple/f@@-0.1.0.tar.gz") + + request = http.last_request() + + basic_auth = base64.b64encode(b"bar:baz").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + +def test_authenticator_falls_back_to_keyring_url_matched_by_path( + config: Config, + mock_remote: None, + http: type[httpretty.httpretty], + with_simple_keyring: None, + dummy_keyring: DummyBackend, +): + config.merge( + { + "repositories": { + "foo-alpha": {"url": "https://foo.bar/alpha/files/simple/"}, + "foo-beta": {"url": "https://foo.bar/beta/files/simple/"}, + } + } + ) + + dummy_keyring.set_password( + "https://foo.bar/alpha/files/simple/", None, SimpleCredential(None, "bar") + ) + dummy_keyring.set_password( + "https://foo.bar/beta/files/simple/", None, SimpleCredential(None, "baz") + ) + + authenticator = Authenticator(config, NullIO()) + + authenticator.request("get", "https://foo.bar/alpha/files/simple/foo-0.1.0.tar.gz") + request = http.last_request() + + basic_auth = base64.b64encode(b":bar").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + authenticator.request("get", "https://foo.bar/beta/files/simple/foo-0.1.0.tar.gz") + request = http.last_request() + + basic_auth = base64.b64encode(b":baz").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + +def test_authenticator_uses_env_provided_credentials_matched_by_url_path( + config: Config, + environ: None, + mock_remote: type[httpretty.httpretty], + http: type[httpretty.httpretty], + monkeypatch: MonkeyPatch, +): + monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_ALPHA_USERNAME", "bar") + monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_ALPHA_PASSWORD", "alpha") + monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_BETA_USERNAME", "baz") + monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_BETA_PASSWORD", "beta") + + config.merge( + { + "repositories": { + "foo-alpha": {"url": "https://foo.bar/alpha/files/simple/"}, + "foo-beta": {"url": "https://foo.bar/beta/files/simple/"}, + } + } + ) + + authenticator = Authenticator(config, NullIO()) + + authenticator.request("get", "https://foo.bar/alpha/files/simple/foo-0.1.0.tar.gz") + request = http.last_request() + + basic_auth = base64.b64encode(b"bar:alpha").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + authenticator.request("get", "https://foo.bar/beta/files/simple/foo-0.1.0.tar.gz") + request = http.last_request() + + basic_auth = base64.b64encode(b"baz:beta").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + +def test_authenticator_azure_feed_guid_credentials( + config: Config, + mock_remote: None, + http: type[httpretty.httpretty], + with_simple_keyring: None, + dummy_keyring: DummyBackend, +): + config.merge( + { + "repositories": { + "alpha": { + "url": "https://foo.bar/org-alpha/_packaging/feed/pypi/simple/" + }, + "beta": { + "url": "https://foo.bar/org-beta/_packaging/feed/pypi/simple/" + }, + }, + "http-basic": { + "alpha": {"username": "foo", "password": "bar"}, + "beta": {"username": "baz", "password": "qux"}, + }, + } + ) + + authenticator = Authenticator(config, NullIO()) + + authenticator.request( + "get", + "https://foo.bar/org-alpha/_packaging/GUID/pypi/simple/a/1.0.0/a-1.0.0.whl", + ) + request = http.last_request() + + basic_auth = base64.b64encode(b"foo:bar").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + authenticator.request( + "get", + "https://foo.bar/org-beta/_packaging/GUID/pypi/simple/b/1.0.0/a-1.0.0.whl", + ) + request = http.last_request() + + basic_auth = base64.b64encode(b"baz:qux").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + +def test_authenticator_add_repository( + config: Config, + mock_remote: None, + http: type[httpretty.httpretty], + with_simple_keyring: None, + dummy_keyring: DummyBackend, +): + config.merge( + { + "http-basic": { + "source": {"username": "foo", "password": "bar"}, + }, + } + ) + + authenticator = Authenticator(config, NullIO()) + + authenticator.request( + "get", + "https://foo.bar/simple/a/1.0.0/a-1.0.0.whl", + ) + request = http.last_request() + assert "Authorization" not in request.headers + + authenticator.add_repository("source", "https://foo.bar/simple/") + + authenticator.request( + "get", + "https://foo.bar/simple/a/1.0.0/a-1.0.0.whl", + ) + request = http.last_request() + + basic_auth = base64.b64encode(b"foo:bar").decode() + assert request.headers["Authorization"] == f"Basic {basic_auth}" + + +def test_authenticator_git_repositories( + config: Config, + mock_remote: None, + http: type[httpretty.httpretty], + with_simple_keyring: None, + dummy_keyring: DummyBackend, +): + config.merge( + { + "repositories": { + "one": {"url": "https://foo.bar/org/one.git"}, + "two": {"url": "https://foo.bar/org/two.git"}, + }, + "http-basic": { + "one": {"username": "foo", "password": "bar"}, + "two": {"username": "baz", "password": "qux"}, + }, + } + ) + + authenticator = Authenticator(config, NullIO()) + + one = authenticator.get_credentials_for_git_url("https://foo.bar/org/one.git") + assert one.username == "foo" + assert one.password == "bar" + + two = authenticator.get_credentials_for_git_url("https://foo.bar/org/two.git") + assert two.username == "baz" + assert two.password == "qux" + + two_ssh = authenticator.get_credentials_for_git_url("ssh://git@foo.bar/org/two.git") + assert not two_ssh.username + assert not two_ssh.password + + three = authenticator.get_credentials_for_git_url("https://foo.bar/org/three.git") + assert not three.username + assert not three.password + + +@pytest.mark.parametrize( + ("ca_cert", "client_cert", "result"), + [ + (None, None, RepositoryCertificateConfig()), + ( + "path/to/ca.pem", + "path/to/client.pem", + RepositoryCertificateConfig( + Path("path/to/ca.pem"), Path("path/to/client.pem") + ), + ), + ( + None, + "path/to/client.pem", + RepositoryCertificateConfig(None, Path("path/to/client.pem")), + ), + ( + "path/to/ca.pem", + None, + RepositoryCertificateConfig(Path("path/to/ca.pem"), None), + ), + (True, None, RepositoryCertificateConfig()), + (False, None, RepositoryCertificateConfig(verify=False)), + ( + False, + "path/to/client.pem", + RepositoryCertificateConfig(None, Path("path/to/client.pem"), verify=False), + ), + ], +) +def test_repository_certificate_configuration_create( + ca_cert: str | bool | None, + client_cert: str | None, + result: RepositoryCertificateConfig, + config: Config, +) -> None: + cert_config = {} + + if ca_cert is not None: + cert_config["cert"] = ca_cert + + if client_cert is not None: + cert_config["client-cert"] = client_cert + + config.merge({"certificates": {"foo": cert_config}}) + + assert RepositoryCertificateConfig.create("foo", config) == result diff --git a/vendor/poetry/tests/utils/test_dependency_specification.py b/vendor/poetry/tests/utils/test_dependency_specification.py new file mode 100644 index 00000000..95cdffea --- /dev/null +++ b/vendor/poetry/tests/utils/test_dependency_specification.py @@ -0,0 +1,118 @@ +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest + +from deepdiff import DeepDiff + +from poetry.utils.dependency_specification import parse_dependency_specification + + +if TYPE_CHECKING: + from pytest_mock import MockerFixture + + from poetry.utils.dependency_specification import DependencySpec + + +@pytest.mark.parametrize( + ("requirement", "specification"), + [ + ( + "git+https://github.com/demo/demo.git", + {"git": "https://github.com/demo/demo.git", "name": "demo"}, + ), + ( + "git+ssh://github.com/demo/demo.git", + {"git": "ssh://github.com/demo/demo.git", "name": "demo"}, + ), + ( + "git+https://github.com/demo/demo.git#main", + {"git": "https://github.com/demo/demo.git", "name": "demo", "rev": "main"}, + ), + ( + "git+https://github.com/demo/demo.git@main", + {"git": "https://github.com/demo/demo.git", "name": "demo", "rev": "main"}, + ), + ( + "git+https://github.com/demo/subdirectories.git@main#subdirectory=two", + { + "git": "https://github.com/demo/subdirectories.git", + "name": "two", + "rev": "main", + "subdirectory": "two", + }, + ), + ("demo", {"name": "demo"}), + ("demo@1.0.0", {"name": "demo", "version": "1.0.0"}), + ("demo@^1.0.0", {"name": "demo", "version": "^1.0.0"}), + ("demo@==1.0.0", {"name": "demo", "version": "==1.0.0"}), + ("demo@!=1.0.0", {"name": "demo", "version": "!=1.0.0"}), + ("demo@~1.0.0", {"name": "demo", "version": "~1.0.0"}), + ("demo[a,b]@1.0.0", {"name": "demo", "version": "1.0.0", "extras": ["a", "b"]}), + ("demo[a,b]", {"name": "demo", "extras": ["a", "b"]}), + ("../demo", {"name": "demo", "path": "../demo"}), + ("../demo/demo.whl", {"name": "demo", "path": "../demo/demo.whl"}), + ( + "https://example.com/distributions/demo-0.1.0.tar.gz", + { + "name": "demo", + "url": "https://example.com/distributions/demo-0.1.0.tar.gz", + }, + ), + # PEP 508 inputs + ( + "poetry-core (>=1.0.7,<1.1.0)", + {"name": "poetry-core", "version": ">=1.0.7,<1.1.0"}, + ), + ( + 'requests [security,tests] >= 2.8.1, == 2.8.* ; python_version < "2.7"', + { + "name": "requests", + "markers": 'python_version < "2.7"', + "version": ">=2.8.1,<2.9.0", + "extras": ["security", "tests"], + }, + ), + ("name (>=3,<4)", {"name": "name", "version": ">=3,<4"}), + ( + "name@http://foo.com", + {"name": "name", "url": "http://foo.com"}, + ), + ( + "name [fred,bar] @ http://foo.com ; python_version=='2.7'", + { + "name": "name", + "markers": 'python_version == "2.7"', + "url": "http://foo.com", + "extras": ["fred", "bar"], + }, + ), + ( + 'cachecontrol[filecache] (>=0.12.9,<0.13.0); python_version >= "3.6" and' + ' python_version < "4.0"', + { + "version": ">=0.12.9,<0.13.0", + "markers": 'python_version >= "3.6" and python_version < "4.0"', + "extras": ["filecache"], + "name": "cachecontrol", + }, + ), + ], +) +def test_parse_dependency_specification( + requirement: str, specification: DependencySpec, mocker: MockerFixture +) -> None: + original = Path.exists + + def _mock(self: Path) -> bool: + if "/" in requirement and self == Path.cwd().joinpath(requirement): + return True + return original(self) + + mocker.patch("pathlib.Path.exists", _mock) + + assert not DeepDiff( + parse_dependency_specification(requirement), specification, ignore_order=True + ) diff --git a/vendor/poetry/tests/utils/test_env.py b/vendor/poetry/tests/utils/test_env.py index eea60119..2c7f58b1 100644 --- a/vendor/poetry/tests/utils/test_env.py +++ b/vendor/poetry/tests/utils/test_env.py @@ -1,29 +1,45 @@ +from __future__ import annotations + import os -import shutil +import subprocess import sys -from typing import Optional -from typing import Union +from pathlib import Path +from typing import TYPE_CHECKING +from typing import Any import pytest import tomlkit -from clikit.io import NullIO - -from poetry.core.semver import Version +from cleo.io.null_io import NullIO +from poetry.core.semver.version import Version from poetry.core.toml.file import TOMLFile + from poetry.factory import Factory -from poetry.utils._compat import PY2 +from poetry.repositories.installed_repository import InstalledRepository from poetry.utils._compat import WINDOWS -from poetry.utils._compat import Path from poetry.utils.env import GET_BASE_PREFIX from poetry.utils.env import EnvCommandError from poetry.utils.env import EnvManager from poetry.utils.env import GenericEnv +from poetry.utils.env import InvalidCurrentPythonVersionError +from poetry.utils.env import MockEnv from poetry.utils.env import NoCompatiblePythonVersionFound from poetry.utils.env import SystemEnv from poetry.utils.env import VirtualEnv +from poetry.utils.env import build_environment +from poetry.utils.helpers import remove_directory + + +if TYPE_CHECKING: + from collections.abc import Callable + from collections.abc import Iterator + + from pytest_mock import MockerFixture + from poetry.poetry import Poetry + from tests.conftest import Config + from tests.types import ProjectFactory MINIMAL_SCRIPT = """\ @@ -39,35 +55,38 @@ class MockVirtualEnv(VirtualEnv): - def __init__(self, path, base=None, sys_path=None): - super(MockVirtualEnv, self).__init__(path, base=base) + def __init__( + self, + path: Path, + base: Path | None = None, + sys_path: list[str] | None = None, + ) -> None: + super().__init__(path, base=base) self._sys_path = sys_path @property - def sys_path(self): + def sys_path(self) -> list[str] | None: if self._sys_path is not None: return self._sys_path - return super(MockVirtualEnv, self).sys_path + return super().sys_path @pytest.fixture() -def poetry(config): - poetry = Factory().create_poetry( - Path(__file__).parent.parent / "fixtures" / "simple_project" - ) - poetry.set_config(config) - - return poetry +def poetry(project_factory: ProjectFactory) -> Poetry: + fixture = Path(__file__).parent.parent / "fixtures" / "simple_project" + return project_factory("simple", source=fixture) @pytest.fixture() -def manager(poetry): +def manager(poetry: Poetry) -> EnvManager: return EnvManager(poetry) -def test_virtualenvs_with_spaces_in_their_path_work_as_expected(tmp_dir, manager): +def test_virtualenvs_with_spaces_in_their_path_work_as_expected( + tmp_dir: str, manager: EnvManager +): venv_path = Path(tmp_dir) / "Virtual Env" manager.build_venv(str(venv_path)) @@ -77,28 +96,65 @@ def test_virtualenvs_with_spaces_in_their_path_work_as_expected(tmp_dir, manager assert venv.run("python", "-V", shell=True).startswith("Python") -def test_env_commands_with_spaces_in_their_arg_work_as_expected(tmp_dir, manager): +@pytest.mark.skipif(sys.platform != "darwin", reason="requires darwin") +def test_venv_backup_exclusion(tmp_dir: str, manager: EnvManager): + import xattr + + venv_path = Path(tmp_dir) / "Virtual Env" + + manager.build_venv(str(venv_path)) + + value = ( + b"bplist00_\x10\x11com.apple.backupd" + b"\x08\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00" + b"\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c" + ) + assert ( + xattr.getxattr( + str(venv_path), "com.apple.metadata:com_apple_backup_excludeItem" + ) + == value + ) + + +def test_env_commands_with_spaces_in_their_arg_work_as_expected( + tmp_dir: str, manager: EnvManager +): venv_path = Path(tmp_dir) / "Virtual Env" manager.build_venv(str(venv_path)) venv = VirtualEnv(venv_path) assert venv.run("python", venv.pip, "--version", shell=True).startswith( - "pip {} from ".format(venv.pip_version) + f"pip {venv.pip_version} from " ) def test_env_shell_commands_with_stdinput_in_their_arg_work_as_expected( - tmp_dir, manager + tmp_dir: str, manager: EnvManager ): venv_path = Path(tmp_dir) / "Virtual Env" manager.build_venv(str(venv_path)) venv = VirtualEnv(venv_path) - assert venv.run("python", "-", input_=GET_BASE_PREFIX, shell=True).strip() == str( - venv.get_base_prefix() + run_output_path = Path( + venv.run("python", "-", input_=GET_BASE_PREFIX, shell=True).strip() ) + venv_base_prefix_path = Path(str(venv.get_base_prefix())) + assert run_output_path.resolve() == venv_base_prefix_path.resolve() + + +def test_env_get_supported_tags_matches_inside_virtualenv( + tmp_dir: str, manager: EnvManager +): + venv_path = Path(tmp_dir) / "Virtual Env" + manager.build_venv(str(venv_path)) + venv = VirtualEnv(venv_path) + + import packaging.tags + + assert venv.get_supported_tags() == list(packaging.tags.sys_tags()) @pytest.fixture -def in_project_venv_dir(poetry): +def in_project_venv_dir(poetry: Poetry) -> Iterator[Path]: os.environ.pop("VIRTUAL_ENV", None) venv_dir = poetry.file.parent.joinpath(".venv") venv_dir.mkdir() @@ -110,7 +166,10 @@ def in_project_venv_dir(poetry): @pytest.mark.parametrize("in_project", [True, False, None]) def test_env_get_venv_with_venv_folder_present( - manager, poetry, in_project_venv_dir, in_project + manager: EnvManager, + poetry: Poetry, + in_project_venv_dir: Path, + in_project: bool | None, ): poetry.config.config["virtualenvs"]["in-project"] = in_project venv = manager.get() @@ -120,16 +179,23 @@ def test_env_get_venv_with_venv_folder_present( assert venv.path == in_project_venv_dir -def build_venv(path, executable=None): # type: (Union[Path,str], Optional[str]) -> () +def build_venv(path: Path | str, **__: Any) -> None: os.mkdir(str(path)) -def check_output_wrapper(version=Version.parse("3.7.1")): - def check_output(cmd, *args, **kwargs): +VERSION_3_7_1 = Version.parse("3.7.1") + + +def check_output_wrapper( + version: Version = VERSION_3_7_1, +) -> Callable[[str, Any, Any], str]: + def check_output(cmd: str, *args: Any, **kwargs: Any) -> str: if "sys.version_info[:3]" in cmd: return version.text elif "sys.version_info[:2]" in cmd: - return "{}.{}".format(version.major, version.minor) + return f"{version.major}.{version.minor}" + elif '-c "import sys; print(sys.executable)"' in cmd: + return f"/usr/bin/{cmd.split()[0]}" else: return str(Path("/prefix")) @@ -137,7 +203,11 @@ def check_output(cmd, *args, **kwargs): def test_activate_activates_non_existing_virtualenv_no_envs_file( - tmp_dir, manager, poetry, config, mocker + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] @@ -145,11 +215,11 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file( config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) @@ -158,7 +228,15 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file( venv_name = EnvManager.generate_env_name("simple-project", str(poetry.file.parent)) m.assert_called_with( - Path(tmp_dir) / "{}-py3.7".format(venv_name), executable="python3.7" + Path(tmp_dir) / f"{venv_name}-py3.7", + executable="/usr/bin/python3.7", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt="simple-project-py3.7", ) envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") @@ -167,28 +245,32 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file( assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" - assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name) + assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7" assert env.base == Path("/prefix") def test_activate_activates_existing_virtualenv_no_envs_file( - tmp_dir, manager, poetry, config, mocker + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name))) + os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7")) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", side_effect=[("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) @@ -203,12 +285,16 @@ def test_activate_activates_existing_virtualenv_no_envs_file( assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" - assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name) + assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7" assert env.base == Path("/prefix") def test_activate_activates_same_virtualenv_with_envs_file( - tmp_dir, manager, poetry, config, mocker + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] @@ -220,16 +306,16 @@ def test_activate_activates_same_virtualenv_with_envs_file( doc[venv_name] = {"minor": "3.7", "patch": "3.7.1"} envs_file.write(doc) - os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name))) + os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7")) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", side_effect=[("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.create_venv") @@ -243,12 +329,16 @@ def test_activate_activates_same_virtualenv_with_envs_file( assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" - assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name) + assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7" assert env.base == Path("/prefix") def test_activate_activates_different_virtualenv_with_envs_file( - tmp_dir, manager, poetry, config, mocker + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] @@ -259,16 +349,16 @@ def test_activate_activates_different_virtualenv_with_envs_file( doc[venv_name] = {"minor": "3.7", "patch": "3.7.1"} envs_file.write(doc) - os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name))) + os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7")) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(Version.parse("3.6.6")), ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) @@ -276,7 +366,15 @@ def test_activate_activates_different_virtualenv_with_envs_file( env = manager.activate("python3.6", NullIO()) m.assert_called_with( - Path(tmp_dir) / "{}-py3.6".format(venv_name), executable="python3.6" + Path(tmp_dir) / f"{venv_name}-py3.6", + executable="/usr/bin/python3.6", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt="simple-project-py3.6", ) assert envs_file.exists() @@ -284,12 +382,16 @@ def test_activate_activates_different_virtualenv_with_envs_file( assert envs[venv_name]["minor"] == "3.6" assert envs[venv_name]["patch"] == "3.6.6" - assert env.path == Path(tmp_dir) / "{}-py3.6".format(venv_name) + assert env.path == Path(tmp_dir) / f"{venv_name}-py3.6" assert env.base == Path("/prefix") def test_activate_activates_recreates_for_different_patch( - tmp_dir, manager, poetry, config, mocker + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] @@ -300,16 +402,16 @@ def test_activate_activates_recreates_for_different_patch( doc[venv_name] = {"minor": "3.7", "patch": "3.7.0"} envs_file.write(doc) - os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name))) + os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7")) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", side_effect=[ ("/prefix", None), ('{"version_info": [3, 7, 0]}', None), @@ -328,22 +430,34 @@ def test_activate_activates_recreates_for_different_patch( env = manager.activate("python3.7", NullIO()) build_venv_m.assert_called_with( - Path(tmp_dir) / "{}-py3.7".format(venv_name), executable="python3.7" - ) - remove_venv_m.assert_called_with(Path(tmp_dir) / "{}-py3.7".format(venv_name)) + Path(tmp_dir) / f"{venv_name}-py3.7", + executable="/usr/bin/python3.7", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt="simple-project-py3.7", + ) + remove_venv_m.assert_called_with(Path(tmp_dir) / f"{venv_name}-py3.7") assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" - assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name) + assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7" assert env.base == Path("/prefix") - assert (Path(tmp_dir) / "{}-py3.7".format(venv_name)).exists() + assert (Path(tmp_dir) / f"{venv_name}-py3.7").exists() def test_activate_does_not_recreate_when_switching_minor( - tmp_dir, manager, poetry, config, mocker + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] @@ -354,17 +468,17 @@ def test_activate_does_not_recreate_when_switching_minor( doc[venv_name] = {"minor": "3.7", "patch": "3.7.0"} envs_file.write(doc) - os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name))) - os.mkdir(os.path.join(tmp_dir, "{}-py3.6".format(venv_name))) + os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7")) + os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.6")) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(Version.parse("3.6.6")), ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)], ) build_venv_m = mocker.patch( @@ -384,59 +498,62 @@ def test_activate_does_not_recreate_when_switching_minor( assert envs[venv_name]["minor"] == "3.6" assert envs[venv_name]["patch"] == "3.6.6" - assert env.path == Path(tmp_dir) / "{}-py3.6".format(venv_name) + assert env.path == Path(tmp_dir) / f"{venv_name}-py3.6" assert env.base == Path("/prefix") - assert (Path(tmp_dir) / "{}-py3.6".format(venv_name)).exists() + assert (Path(tmp_dir) / f"{venv_name}-py3.6").exists() def test_deactivate_non_activated_but_existing( - tmp_dir, manager, poetry, config, mocker + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - ( - Path(tmp_dir) - / "{}-py{}".format(venv_name, "3.9") - ).mkdir() + python = ".".join(str(c) for c in sys.version_info[:2]) + (Path(tmp_dir) / f"{venv_name}-py{python}").mkdir() config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( - "poetry.utils._compat.subprocess.check_output", - side_effect=check_output_wrapper(Version.parse("3.9.5")), + "subprocess.check_output", + side_effect=check_output_wrapper(Version.parse("3.10.5")), ) manager.deactivate(NullIO()) env = manager.get() - assert env.path == Path(tmp_dir) / "{}-py{}".format( - venv_name, "3.9" - ) + assert env.path == Path(tmp_dir) / f"{venv_name}-py{python}" assert Path("/prefix") -def test_deactivate_activated(tmp_dir, manager, poetry, config, mocker): +def test_deactivate_activated( + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, +): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - version = Version.parse("3.9.5") - other_version = Version.parse("3.4.0") + version = Version.from_parts(*sys.version_info[:3]) + other_version = Version.parse("3.4") if version.major == 2 else version.next_minor() + (Path(tmp_dir) / f"{venv_name}-py{version.major}.{version.minor}").mkdir() ( - Path(tmp_dir) / "{}-py{}.{}".format(venv_name, version.major, version.minor) - ).mkdir() - ( - Path(tmp_dir) - / "{}-py{}.{}".format(venv_name, other_version.major, other_version.minor) + Path(tmp_dir) / f"{venv_name}-py{other_version.major}.{other_version.minor}" ).mkdir() envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") doc = tomlkit.document() doc[venv_name] = { - "minor": "{}.{}".format(other_version.major, other_version.minor), + "minor": f"{other_version.major}.{other_version.minor}", "patch": other_version.text, } envs_file.write(doc) @@ -444,16 +561,14 @@ def test_deactivate_activated(tmp_dir, manager, poetry, config, mocker): config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( - "poetry.utils._compat.subprocess.check_output", - side_effect=check_output_wrapper(Version.parse("3.9.5")), + "subprocess.check_output", + side_effect=check_output_wrapper(Version.parse("3.10.5")), ) manager.deactivate(NullIO()) env = manager.get() - assert env.path == Path(tmp_dir) / "{}-py{}.{}".format( - venv_name, version.major, version.minor - ) + assert env.path == Path(tmp_dir) / f"{venv_name}-py{version.major}.{version.minor}" assert Path("/prefix") envs = envs_file.read() @@ -461,14 +576,18 @@ def test_deactivate_activated(tmp_dir, manager, poetry, config, mocker): def test_get_prefers_explicitly_activated_virtualenvs_over_env_var( - tmp_dir, manager, poetry, config, mocker + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, ): os.environ["VIRTUAL_ENV"] = "/environment/prefix" venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) - (Path(tmp_dir) / "{}-py3.7".format(venv_name)).mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.7").mkdir() envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") doc = tomlkit.document() @@ -476,79 +595,99 @@ def test_get_prefers_explicitly_activated_virtualenvs_over_env_var( envs_file.write(doc) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", side_effect=[("/prefix", None)], ) env = manager.get() - assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name) + assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7" assert env.base == Path("/prefix") -def test_list(tmp_dir, manager, poetry, config): +def test_list(tmp_dir: str, manager: EnvManager, poetry: Poetry, config: Config): config.merge({"virtualenvs": {"path": str(tmp_dir)}}) venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - (Path(tmp_dir) / "{}-py3.7".format(venv_name)).mkdir() - (Path(tmp_dir) / "{}-py3.6".format(venv_name)).mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.7").mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.6").mkdir() venvs = manager.list() - assert 2 == len(venvs) - assert (Path(tmp_dir) / "{}-py3.6".format(venv_name)) == venvs[0].path - assert (Path(tmp_dir) / "{}-py3.7".format(venv_name)) == venvs[1].path + assert len(venvs) == 2 + assert venvs[0].path == (Path(tmp_dir) / f"{venv_name}-py3.6") + assert venvs[1].path == (Path(tmp_dir) / f"{venv_name}-py3.7") -def test_remove_by_python_version(tmp_dir, manager, poetry, config, mocker): +def test_remove_by_python_version( + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, +): config.merge({"virtualenvs": {"path": str(tmp_dir)}}) venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - (Path(tmp_dir) / "{}-py3.7".format(venv_name)).mkdir() - (Path(tmp_dir) / "{}-py3.6".format(venv_name)).mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.7").mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.6").mkdir() mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(Version.parse("3.6.6")), ) venv = manager.remove("3.6") - assert (Path(tmp_dir) / "{}-py3.6".format(venv_name)) == venv.path - assert not (Path(tmp_dir) / "{}-py3.6".format(venv_name)).exists() + expected_venv_path = Path(tmp_dir) / f"{venv_name}-py3.6" + assert venv.path == expected_venv_path + assert not expected_venv_path.exists() -def test_remove_by_name(tmp_dir, manager, poetry, config, mocker): +def test_remove_by_name( + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, +): config.merge({"virtualenvs": {"path": str(tmp_dir)}}) venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - (Path(tmp_dir) / "{}-py3.7".format(venv_name)).mkdir() - (Path(tmp_dir) / "{}-py3.6".format(venv_name)).mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.7").mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.6").mkdir() mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(Version.parse("3.6.6")), ) - venv = manager.remove("{}-py3.6".format(venv_name)) + venv = manager.remove(f"{venv_name}-py3.6") - assert (Path(tmp_dir) / "{}-py3.6".format(venv_name)) == venv.path - assert not (Path(tmp_dir) / "{}-py3.6".format(venv_name)).exists() + expected_venv_path = Path(tmp_dir) / f"{venv_name}-py3.6" + assert venv.path == expected_venv_path + assert not expected_venv_path.exists() -def test_remove_also_deactivates(tmp_dir, manager, poetry, config, mocker): +def test_remove_also_deactivates( + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, +): config.merge({"virtualenvs": {"path": str(tmp_dir)}}) venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - (Path(tmp_dir) / "{}-py3.7".format(venv_name)).mkdir() - (Path(tmp_dir) / "{}-py3.6".format(venv_name)).mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.7").mkdir() + (Path(tmp_dir) / f"{venv_name}-py3.6").mkdir() mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(Version.parse("3.6.6")), ) @@ -559,20 +698,27 @@ def test_remove_also_deactivates(tmp_dir, manager, poetry, config, mocker): venv = manager.remove("python3.6") - assert (Path(tmp_dir) / "{}-py3.6".format(venv_name)) == venv.path - assert not (Path(tmp_dir) / "{}-py3.6".format(venv_name)).exists() + expected_venv_path = Path(tmp_dir) / f"{venv_name}-py3.6" + assert venv.path == expected_venv_path + assert not expected_venv_path.exists() envs = envs_file.read() assert venv_name not in envs -def test_remove_keeps_dir_if_not_deleteable(tmp_dir, manager, poetry, config, mocker): +def test_remove_keeps_dir_if_not_deleteable( + tmp_dir: str, + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, +): # Ensure we empty rather than delete folder if its is an active mount point. # See https://github.com/python-poetry/poetry/pull/2064 config.merge({"virtualenvs": {"path": str(tmp_dir)}}) venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - venv_path = Path(tmp_dir) / "{}-py3.6".format(venv_name) + venv_path = Path(tmp_dir) / f"{venv_name}-py3.6" venv_path.mkdir() folder1_path = venv_path / "folder1" @@ -585,23 +731,23 @@ def test_remove_keeps_dir_if_not_deleteable(tmp_dir, manager, poetry, config, mo file2_path.touch(exist_ok=False) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(Version.parse("3.6.6")), ) - original_rmtree = shutil.rmtree - - def err_on_rm_venv_only(path, *args, **kwargs): - if path == str(venv_path): + def err_on_rm_venv_only(path: Path | str, *args: Any, **kwargs: Any) -> None: + if str(path) == str(venv_path): raise OSError(16, "Test error") # ERRNO 16: Device or resource busy else: - original_rmtree(path) + remove_directory(path) - m = mocker.patch("shutil.rmtree", side_effect=err_on_rm_venv_only) + m = mocker.patch( + "poetry.utils.env.remove_directory", side_effect=err_on_rm_venv_only + ) - venv = manager.remove("{}-py3.6".format(venv_name)) + venv = manager.remove(f"{venv_name}-py3.6") - m.assert_any_call(str(venv_path)) + m.assert_any_call(venv_path) assert venv_path == venv.path assert venv_path.exists() @@ -610,33 +756,98 @@ def err_on_rm_venv_only(path, *args, **kwargs): assert not file1_path.exists() assert not file2_path.exists() - m.side_effect = original_rmtree # Avoid teardown using `err_on_rm_venv_only` + m.side_effect = remove_directory # Avoid teardown using `err_on_rm_venv_only` -@pytest.mark.skipif( - os.name == "nt" or PY2, reason="Symlinks are not support for Windows" -) -def test_env_has_symlinks_on_nix(tmp_dir, tmp_venv): +@pytest.mark.skipif(os.name == "nt", reason="Symlinks are not support for Windows") +def test_env_has_symlinks_on_nix(tmp_dir: str, tmp_venv: VirtualEnv): assert os.path.islink(tmp_venv.python) -def test_run_with_input(tmp_dir, tmp_venv): +def test_run_with_input(tmp_dir: str, tmp_venv: VirtualEnv): result = tmp_venv.run("python", "-", input_=MINIMAL_SCRIPT) assert result == "Minimal Output" + os.linesep -def test_run_with_input_non_zero_return(tmp_dir, tmp_venv): - - with pytest.raises(EnvCommandError) as processError: +def test_run_with_input_non_zero_return(tmp_dir: str, tmp_venv: VirtualEnv): + with pytest.raises(EnvCommandError) as process_error: # Test command that will return non-zero returncode. tmp_venv.run("python", "-", input_=ERRORING_SCRIPT) - assert processError.value.e.returncode == 1 + assert process_error.value.e.returncode == 1 + + +def test_run_with_keyboard_interrupt( + tmp_dir: str, tmp_venv: VirtualEnv, mocker: MockerFixture +): + mocker.patch("subprocess.run", side_effect=KeyboardInterrupt()) + with pytest.raises(KeyboardInterrupt): + tmp_venv.run("python", "-", input_=MINIMAL_SCRIPT) + subprocess.run.assert_called_once() + + +def test_call_with_input_and_keyboard_interrupt( + tmp_dir: str, tmp_venv: VirtualEnv, mocker: MockerFixture +): + mocker.patch("subprocess.run", side_effect=KeyboardInterrupt()) + kwargs = {"call": True} + with pytest.raises(KeyboardInterrupt): + tmp_venv.run("python", "-", input_=MINIMAL_SCRIPT, **kwargs) + subprocess.run.assert_called_once() + + +def test_call_no_input_with_keyboard_interrupt( + tmp_dir: str, tmp_venv: VirtualEnv, mocker: MockerFixture +): + mocker.patch("subprocess.call", side_effect=KeyboardInterrupt()) + kwargs = {"call": True} + with pytest.raises(KeyboardInterrupt): + tmp_venv.run("python", "-", **kwargs) + subprocess.call.assert_called_once() + + +def test_run_with_called_process_error( + tmp_dir: str, tmp_venv: VirtualEnv, mocker: MockerFixture +): + mocker.patch( + "subprocess.run", side_effect=subprocess.CalledProcessError(42, "some_command") + ) + with pytest.raises(EnvCommandError): + tmp_venv.run("python", "-", input_=MINIMAL_SCRIPT) + subprocess.run.assert_called_once() + + +def test_call_with_input_and_called_process_error( + tmp_dir: str, tmp_venv: VirtualEnv, mocker: MockerFixture +): + mocker.patch( + "subprocess.run", side_effect=subprocess.CalledProcessError(42, "some_command") + ) + kwargs = {"call": True} + with pytest.raises(EnvCommandError): + tmp_venv.run("python", "-", input_=MINIMAL_SCRIPT, **kwargs) + subprocess.run.assert_called_once() + + +def test_call_no_input_with_called_process_error( + tmp_dir: str, tmp_venv: VirtualEnv, mocker: MockerFixture +): + mocker.patch( + "subprocess.call", side_effect=subprocess.CalledProcessError(42, "some_command") + ) + kwargs = {"call": True} + with pytest.raises(EnvCommandError): + tmp_venv.run("python", "-", **kwargs) + subprocess.call.assert_called_once() -def test_create_venv_tries_to_find_a_compatible_python_executable_using_generic_ones_first( - manager, poetry, config, mocker +def test_create_venv_tries_to_find_a_compatible_python_executable_using_generic_ones_first( # noqa: E501 + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, + config_virtualenvs_path: Path, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] @@ -646,7 +857,7 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_generic_ mocker.patch("sys.version_info", (2, 7, 16)) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=["3.7.5", "3.7.5", "2.7.16", "3.7.5"], ) m = mocker.patch( @@ -656,12 +867,24 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_generic_ manager.create_venv(NullIO()) m.assert_called_with( - Path("{}/virtualenvs/{}-py3.7".format(config.get("cache-dir"), venv_name)), executable="python3" + config_virtualenvs_path / f"{venv_name}-py3.7", + executable="python3", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt="simple-project-py3.7", ) def test_create_venv_tries_to_find_a_compatible_python_executable_using_specific_ones( - manager, poetry, config, mocker + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, + config_virtualenvs_path: Path, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] @@ -670,17 +893,15 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_specific venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) mocker.patch("sys.version_info", (2, 7, 16)) - mocker.patch( - "poetry.utils._compat.subprocess.check_output", side_effect=[ - "3.5.3", - "3.9.0", - "3.5.3", - "3.5.3", - "2.7.16", - "3.5.3", - "3.9.0" - ] - ) + mocker.patch("subprocess.check_output", side_effect=[ + "3.5.3", + "3.10.0", + "3.5.3", + "3.5.3", + "2.7.16", + "3.5.3", + "3.10.0" + ]) m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" ) @@ -688,21 +909,27 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_specific manager.create_venv(NullIO()) m.assert_called_with( - Path("{}/virtualenvs/{}-py3.9".format(config.get("cache-dir"), venv_name)), executable="python3.9" + config_virtualenvs_path / f"{venv_name}-py3.10", + executable="python3.10", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt="simple-project-py3.10", ) def test_create_venv_fails_if_no_compatible_python_version_could_be_found( - manager, poetry, config, mocker + manager: EnvManager, poetry: Poetry, config: Config, mocker: MockerFixture ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] poetry.package.python_versions = "^4.8" - mocker.patch( - "poetry.utils._compat.subprocess.check_output", side_effect=["" for _ in range(4 + 2 * 13)] - ) + mocker.patch("subprocess.check_output", side_effect=lambda *args, **kwargs: "") m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" ) @@ -716,19 +943,19 @@ def test_create_venv_fails_if_no_compatible_python_version_could_be_found( 'via the "env use" command.' ) - assert expected_message == str(e.value) - assert 0 == m.call_count + assert str(e.value) == expected_message + assert m.call_count == 0 def test_create_venv_does_not_try_to_find_compatible_versions_with_executable( - manager, poetry, config, mocker + manager: EnvManager, poetry: Poetry, config: Config, mocker: MockerFixture ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] poetry.package.python_versions = "^4.8" - mocker.patch("poetry.utils._compat.subprocess.check_output", side_effect=["3.8.0" for _ in range(1 + 12)]) + mocker.patch("subprocess.check_output", side_effect=["3.8.0" for _ in range(1 + 12)]) m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" ) @@ -742,24 +969,28 @@ def test_create_venv_does_not_try_to_find_compatible_versions_with_executable( "specified in the pyproject.toml file." ) - assert expected_message == str(e.value) - assert 0 == m.call_count + assert str(e.value) == expected_message + assert m.call_count == 0 def test_create_venv_uses_patch_version_to_detect_compatibility( - manager, poetry, config, mocker + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, + config_virtualenvs_path: Path, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] - version = Version.parse(".".join(str(c) for c in sys.version_info[:3])) - poetry.package.python_versions = "^{}".format( - ".".join(str(c) for c in sys.version_info[:3]) + version = Version.from_parts(*sys.version_info[:3]) + poetry.package.python_versions = "^" + ".".join( + str(c) for c in sys.version_info[:3] ) venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) - mocker.patch( - "poetry.utils._compat.subprocess.check_output", + check_output = mocker.patch( + "subprocess.check_output", side_effect=["2.7.16" for _ in range(3)] + [f"{version.major}.{version.minor}.{version.patch + 1}"], ) m = mocker.patch( @@ -769,31 +1000,36 @@ def test_create_venv_uses_patch_version_to_detect_compatibility( manager.create_venv(NullIO()) m.assert_called_with( - Path( - "{}/virtualenvs/{}-py{}.{}".format( - config.get("cache-dir"), venv_name, version.major, version.minor - ) - ), + config_virtualenvs_path / f"{venv_name}-py{version.major}.{version.minor}", executable="python3", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt=f"simple-project-py{version.major}.{version.minor}", ) def test_create_venv_uses_patch_version_to_detect_compatibility_with_executable( - manager, poetry, config, mocker + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, + config_virtualenvs_path: Path, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] - version = Version.parse(".".join(str(c) for c in sys.version_info[:3])) - poetry.package.python_versions = "~{}".format( - ".".join(str(c) for c in (version.major, version.minor - 1, 0)) - ) + version = Version.from_parts(*sys.version_info[:3]) + poetry.package.python_versions = f"~{version.major}.{version.minor-1}.0" venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) check_output = mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper( - Version.parse("{}.{}.0".format(version.major, version.minor - 1)) + Version.parse(f"{version.major}.{version.minor - 1}.0") ), ) m = mocker.patch( @@ -801,22 +1037,56 @@ def test_create_venv_uses_patch_version_to_detect_compatibility_with_executable( ) manager.create_venv( - NullIO(), executable="python{}.{}".format(version.major, version.minor - 1) + NullIO(), executable=f"python{version.major}.{version.minor - 1}" ) assert check_output.called m.assert_called_with( - Path( - "{}/virtualenvs/{}-py{}.{}".format( - config.get("cache-dir"), venv_name, version.major, version.minor - 1 - ) - ), - executable="python{}.{}".format(version.major, version.minor - 1), + config_virtualenvs_path / f"{venv_name}-py{version.major}.{version.minor - 1}", + executable=f"python{version.major}.{version.minor - 1}", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt=f"simple-project-py{version.major}.{version.minor - 1}", ) +def test_create_venv_fails_if_current_python_version_is_not_supported( + manager: EnvManager, poetry: Poetry +): + if "VIRTUAL_ENV" in os.environ: + del os.environ["VIRTUAL_ENV"] + + manager.create_venv(NullIO()) + + current_version = Version.parse(".".join(str(c) for c in sys.version_info[:3])) + next_version = ".".join( + str(c) for c in (current_version.major, current_version.minor + 1, 0) + ) + package_version = "~" + next_version + poetry.package.python_versions = package_version + + with pytest.raises(InvalidCurrentPythonVersionError) as e: + manager.create_venv(NullIO()) + + expected_message = ( + f"Current Python version ({current_version}) is not allowed by the project" + f' ({package_version}).\nPlease change python executable via the "env use"' + " command." + ) + + assert expected_message == str(e.value) + + def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir( - manager, poetry, config, tmp_dir, mocker + manager: EnvManager, + poetry: Poetry, + config: Config, + tmp_dir: str, + mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] @@ -831,18 +1101,28 @@ def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir( ) mocker.patch( - "poetry.utils._compat.subprocess.check_output", + "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( - "poetry.utils._compat.subprocess.Popen.communicate", + "subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv") manager.activate("python3.7", NullIO()) - m.assert_called_with(poetry.file.parent / ".venv", executable="python3.7") + m.assert_called_with( + poetry.file.parent / ".venv", + executable="/usr/bin/python3.7", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt="simple-project-py3.7", + ) envs_file = TOMLFile(Path(tmp_dir) / "virtualenvs" / "envs.toml") assert not envs_file.exists() @@ -859,7 +1139,7 @@ def test_system_env_has_correct_paths(): assert env.site_packages.path == Path(paths["purelib"]) -def test_venv_has_correct_paths(tmp_venv): +def test_venv_has_correct_paths(tmp_venv: VirtualEnv): paths = tmp_venv.paths assert paths.get("purelib") is not None @@ -868,21 +1148,54 @@ def test_venv_has_correct_paths(tmp_venv): assert tmp_venv.site_packages.path == Path(paths["purelib"]) -def test_env_finds_the_correct_executables(tmp_dir, manager): +def test_env_system_packages(tmp_path: Path, poetry: Poetry): + venv_path = tmp_path / "venv" + pyvenv_cfg = venv_path / "pyvenv.cfg" + + EnvManager(poetry).build_venv(path=venv_path, flags={"system-site-packages": True}) + + assert "include-system-site-packages = true" in pyvenv_cfg.read_text() + + +@pytest.mark.parametrize( + ("flags", "packages"), + [ + ({"no-pip": False}, {"pip", "wheel"}), + ({"no-pip": False, "no-wheel": True}, {"pip"}), + ({"no-pip": True}, set()), + ({"no-setuptools": False}, {"setuptools"}), + ({"no-setuptools": True}, set()), + ({"no-pip": True, "no-setuptools": False}, {"setuptools"}), + ({"no-wheel": False}, {"wheel"}), + ({}, set()), + ], +) +def test_env_no_pip( + tmp_path: Path, poetry: Poetry, flags: dict[str, bool], packages: set[str] +): + venv_path = tmp_path / "venv" + EnvManager(poetry).build_venv(path=venv_path, flags=flags) + env = VirtualEnv(venv_path) + installed_repository = InstalledRepository.load(env=env, with_dependencies=True) + installed_packages = { + package.name + for package in installed_repository.packages + # workaround for BSD test environments + if package.name != "sqlite3" + } + + assert installed_packages == packages + + +def test_env_finds_the_correct_executables(tmp_dir: str, manager: EnvManager): venv_path = Path(tmp_dir) / "Virtual Env" - manager.build_venv(str(venv_path)) + manager.build_venv(str(venv_path), with_pip=True) venv = VirtualEnv(venv_path) - default_executable = expected_executable = "python" + (".exe" if WINDOWS else "") - default_pip_executable = expected_pip_executable = "pip" + ( - ".exe" if WINDOWS else "" - ) - major_executable = "python{}{}".format( - sys.version_info[0], ".exe" if WINDOWS else "" - ) - major_pip_executable = "pip{}{}".format( - sys.version_info[0], ".exe" if WINDOWS else "" - ) + default_executable = expected_executable = f"python{'.exe' if WINDOWS else ''}" + default_pip_executable = expected_pip_executable = f"pip{'.exe' if WINDOWS else ''}" + major_executable = f"python{sys.version_info[0]}{'.exe' if WINDOWS else ''}" + major_pip_executable = f"pip{sys.version_info[0]}{'.exe' if WINDOWS else ''}" if ( venv._bin_dir.joinpath(default_executable).exists() @@ -904,19 +1217,23 @@ def test_env_finds_the_correct_executables(tmp_dir, manager): assert Path(venv.pip).name.startswith(expected_pip_executable.split(".")[0]) -def test_env_finds_the_correct_executables_for_generic_env(tmp_dir, manager): +def test_env_finds_the_correct_executables_for_generic_env( + tmp_dir: str, manager: EnvManager +): venv_path = Path(tmp_dir) / "Virtual Env" child_venv_path = Path(tmp_dir) / "Child Virtual Env" - manager.build_venv(str(venv_path)) + manager.build_venv(str(venv_path), with_pip=True) parent_venv = VirtualEnv(venv_path) - manager.build_venv(str(child_venv_path), executable=parent_venv.python) + manager.build_venv( + str(child_venv_path), executable=parent_venv.python, with_pip=True + ) venv = GenericEnv(parent_venv.path, child_env=VirtualEnv(child_venv_path)) - expected_executable = "python{}.{}{}".format( - sys.version_info[0], sys.version_info[1], ".exe" if WINDOWS else "" + expected_executable = ( + f"python{sys.version_info[0]}.{sys.version_info[1]}{'.exe' if WINDOWS else ''}" ) - expected_pip_executable = "pip{}.{}{}".format( - sys.version_info[0], sys.version_info[1], ".exe" if WINDOWS else "" + expected_pip_executable = ( + f"pip{sys.version_info[0]}.{sys.version_info[1]}{'.exe' if WINDOWS else ''}" ) if WINDOWS: @@ -927,20 +1244,22 @@ def test_env_finds_the_correct_executables_for_generic_env(tmp_dir, manager): assert Path(venv.pip).name == expected_pip_executable -def test_env_finds_fallback_executables_for_generic_env(tmp_dir, manager): +def test_env_finds_fallback_executables_for_generic_env( + tmp_dir: str, manager: EnvManager +): venv_path = Path(tmp_dir) / "Virtual Env" child_venv_path = Path(tmp_dir) / "Child Virtual Env" - manager.build_venv(str(venv_path)) + manager.build_venv(str(venv_path), with_pip=True) parent_venv = VirtualEnv(venv_path) - manager.build_venv(str(child_venv_path), executable=parent_venv.python) + manager.build_venv( + str(child_venv_path), executable=parent_venv.python, with_pip=True + ) venv = GenericEnv(parent_venv.path, child_env=VirtualEnv(child_venv_path)) - default_executable = "python" + (".exe" if WINDOWS else "") - major_executable = "python{}{}".format( - sys.version_info[0], ".exe" if WINDOWS else "" - ) - minor_executable = "python{}.{}{}".format( - sys.version_info[0], sys.version_info[1], ".exe" if WINDOWS else "" + default_executable = f"python{'.exe' if WINDOWS else ''}" + major_executable = f"python{sys.version_info[0]}{'.exe' if WINDOWS else ''}" + minor_executable = ( + f"python{sys.version_info[0]}.{sys.version_info[1]}{'.exe' if WINDOWS else ''}" ) expected_executable = minor_executable if ( @@ -957,12 +1276,10 @@ def test_env_finds_fallback_executables_for_generic_env(tmp_dir, manager): venv._bin_dir.joinpath(expected_executable).unlink() expected_executable = default_executable - default_pip_executable = "pip" + (".exe" if WINDOWS else "") - major_pip_executable = "pip{}{}".format( - sys.version_info[0], ".exe" if WINDOWS else "" - ) - minor_pip_executable = "pip{}.{}{}".format( - sys.version_info[0], sys.version_info[1], ".exe" if WINDOWS else "" + default_pip_executable = f"pip{'.exe' if WINDOWS else ''}" + major_pip_executable = f"pip{sys.version_info[0]}{'.exe' if WINDOWS else ''}" + minor_pip_executable = ( + f"pip{sys.version_info[0]}.{sys.version_info[1]}{'.exe' if WINDOWS else ''}" ) expected_pip_executable = minor_pip_executable if ( @@ -989,3 +1306,146 @@ def test_env_finds_fallback_executables_for_generic_env(tmp_dir, manager): assert Path(venv.python).name == expected_executable assert Path(venv.pip).name == expected_pip_executable + + +def test_create_venv_accepts_fallback_version_w_nonzero_patchlevel( + manager: EnvManager, + poetry: Poetry, + config: Config, + mocker: MockerFixture, + config_virtualenvs_path: Path, +): + if "VIRTUAL_ENV" in os.environ: + del os.environ["VIRTUAL_ENV"] + + poetry.package.python_versions = "~3.5.1" + venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) + + check_output = mocker.patch( + "subprocess.check_output", + side_effect=lambda cmd, *args, **kwargs: str( + "3.5.12" if "python3.5" in cmd else "3.7.1" + ), + ) + m = mocker.patch( + "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" + ) + + manager.create_venv(NullIO()) + + assert check_output.called + m.assert_called_with( + config_virtualenvs_path / f"{venv_name}-py3.5", + executable="python3.5", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt="simple-project-py3.5", + ) + + +def test_generate_env_name_ignores_case_for_case_insensitive_fs(tmp_dir: str): + venv_name1 = EnvManager.generate_env_name("simple-project", "MyDiR") + venv_name2 = EnvManager.generate_env_name("simple-project", "mYdIr") + if sys.platform == "win32": + assert venv_name1 == venv_name2 + else: + assert venv_name1 != venv_name2 + + +def test_generate_env_name_uses_real_path(tmp_dir: str, mocker: MockerFixture): + mocker.patch("os.path.realpath", return_value="the_real_dir") + venv_name1 = EnvManager.generate_env_name("simple-project", "the_real_dir") + venv_name2 = EnvManager.generate_env_name("simple-project", "linked_dir") + assert venv_name1 == venv_name2 + + +@pytest.fixture() +def extended_without_setup_poetry() -> Poetry: + poetry = Factory().create_poetry( + Path(__file__).parent.parent / "fixtures" / "extended_project_without_setup" + ) + + return poetry + + +def test_build_environment_called_build_script_specified( + mocker: MockerFixture, extended_without_setup_poetry: Poetry, tmp_dir: str +): + project_env = MockEnv(path=Path(tmp_dir) / "project") + ephemeral_env = MockEnv(path=Path(tmp_dir) / "ephemeral") + + mocker.patch( + "poetry.utils.env.ephemeral_environment" + ).return_value.__enter__.return_value = ephemeral_env + + with build_environment(extended_without_setup_poetry, project_env) as env: + assert env == ephemeral_env + assert env.executed == [ + [ + env.python, + env.pip_embedded, + "install", + "--disable-pip-version-check", + "--ignore-installed", + *extended_without_setup_poetry.pyproject.build_system.requires, + ] + ] + + +def test_build_environment_not_called_without_build_script_specified( + mocker: MockerFixture, poetry: Poetry, tmp_dir: str +): + project_env = MockEnv(path=Path(tmp_dir) / "project") + ephemeral_env = MockEnv(path=Path(tmp_dir) / "ephemeral") + + mocker.patch( + "poetry.utils.env.ephemeral_environment" + ).return_value.__enter__.return_value = ephemeral_env + + with build_environment(poetry, project_env) as env: + assert env == project_env + assert not env.executed + + +def test_create_venv_project_name_empty_sets_correct_prompt( + project_factory: ProjectFactory, + config: Config, + mocker: MockerFixture, + config_virtualenvs_path: Path, +): + if "VIRTUAL_ENV" in os.environ: + del os.environ["VIRTUAL_ENV"] + + fixture = Path(__file__).parent.parent / "fixtures" / "no_name_project" + poetry = project_factory("no", source=fixture) + manager = EnvManager(poetry) + + poetry.package.python_versions = "^3.7" + venv_name = manager.generate_env_name("", str(poetry.file.parent)) + + mocker.patch("sys.version_info", (2, 7, 16)) + mocker.patch( + "subprocess.check_output", + side_effect=check_output_wrapper(Version.parse("3.7.5")), + ) + m = mocker.patch( + "poetry.utils.env.EnvManager.build_venv", side_effect=lambda *args, **kwargs: "" + ) + + manager.create_venv(NullIO()) + + m.assert_called_with( + config_virtualenvs_path / f"{venv_name}-py3.7", + executable="python", + flags={ + "always-copy": False, + "system-site-packages": False, + "no-pip": False, + "no-setuptools": False, + }, + prompt="virtualenv-py3.7", + ) diff --git a/vendor/poetry/tests/utils/test_env_site.py b/vendor/poetry/tests/utils/test_env_site.py index f25e2142..f3fdc9bb 100644 --- a/vendor/poetry/tests/utils/test_env_site.py +++ b/vendor/poetry/tests/utils/test_env_site.py @@ -1,13 +1,21 @@ +from __future__ import annotations + import uuid -from poetry.utils._compat import Path +from pathlib import Path +from typing import TYPE_CHECKING + from poetry.utils._compat import decode from poetry.utils.env import SitePackages -def test_env_site_simple(tmp_dir, mocker): +if TYPE_CHECKING: + from pytest_mock import MockerFixture + + +def test_env_site_simple(tmp_dir: str, mocker: MockerFixture): # emulate permission error when creating directory - mocker.patch("poetry.utils._compat.Path.mkdir", side_effect=OSError()) + mocker.patch("pathlib.Path.mkdir", side_effect=OSError()) site_packages = SitePackages(Path("/non-existent"), fallbacks=[Path(tmp_dir)]) candidates = site_packages.make_candidates(Path("hello.txt"), writable_only=True) hello = Path(tmp_dir) / "hello.txt" @@ -23,7 +31,7 @@ def test_env_site_simple(tmp_dir, mocker): assert not (site_packages.path / "hello.txt").exists() -def test_env_site_select_first(tmp_dir): +def test_env_site_select_first(tmp_dir: str): path = Path(tmp_dir) fallback = path / "fallback" fallback.mkdir(parents=True) diff --git a/vendor/poetry/tests/utils/test_exporter.py b/vendor/poetry/tests/utils/test_exporter.py deleted file mode 100644 index 6cae9928..00000000 --- a/vendor/poetry/tests/utils/test_exporter.py +++ /dev/null @@ -1,1469 +0,0 @@ -import sys - -import pytest - -from poetry.core.packages import dependency_from_pep_508 -from poetry.core.toml.file import TOMLFile -from poetry.factory import Factory -from poetry.packages import Locker as BaseLocker -from poetry.repositories.legacy_repository import LegacyRepository -from poetry.utils._compat import Path -from poetry.utils.exporter import Exporter - - -class Locker(BaseLocker): - def __init__(self): - self._lock = TOMLFile(Path.cwd().joinpath("poetry.lock")) - self._locked = True - self._content_hash = self._get_content_hash() - - def locked(self, is_locked=True): - self._locked = is_locked - - return self - - def mock_lock_data(self, data): - self._lock_data = data - - def is_locked(self): - return self._locked - - def is_fresh(self): - return True - - def _get_content_hash(self): - return "123456789" - - -@pytest.fixture -def working_directory(): - return Path(__file__).parent.parent.parent - - -@pytest.fixture(autouse=True) -def mock_path_cwd(mocker, working_directory): - yield mocker.patch( - "poetry.core.utils._compat.Path.cwd", return_value=working_directory - ) - - -@pytest.fixture() -def locker(): - return Locker() - - -@pytest.fixture -def poetry(fixture_dir, locker): - p = Factory().create_poetry(fixture_dir("sample_project")) - p._locker = locker - - return p - - -def set_package_requires(poetry, skip=None): - skip = skip or set() - packages = poetry.locker.locked_repository(with_dev_reqs=True).packages - poetry.package.requires = [ - pkg.to_dependency() - for pkg in packages - if pkg.category == "main" and pkg.name not in skip - ] - poetry.package.dev_requires = [ - pkg.to_dependency() - for pkg in packages - if pkg.category == "dev" and pkg.name not in skip - ] - - -def test_exporter_can_export_requirements_txt_with_standard_packages( - tmp_dir, poetry, mocker -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": [], "bar": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar==4.5.6 -foo==1.2.3 -""" - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_standard_packages_and_markers( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "python_version < '3.7'", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "extra =='foo'", - }, - { - "name": "baz", - "version": "7.8.9", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "sys_platform == 'win32'", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": [], "bar": [], "baz": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar==4.5.6 -baz==7.8.9; sys_platform == "win32" -foo==1.2.3; python_version < "3.7" -""" - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_nested_packages_and_markers( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "a", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "python_version < '3.7'", - "dependencies": {"b": ">=0.0.0", "c": ">=0.0.0"}, - }, - { - "name": "b", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "platform_system == 'Windows'", - "dependencies": {"d": ">=0.0.0"}, - }, - { - "name": "c", - "version": "7.8.9", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "sys_platform == 'win32'", - "dependencies": {"d": ">=0.0.0"}, - }, - { - "name": "d", - "version": "0.0.1", - "category": "main", - "optional": False, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"a": [], "b": [], "c": [], "d": []}, - }, - } - ) - set_package_requires(poetry, skip={"b", "c", "d"}) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = { - "a": dependency_from_pep_508("a==1.2.3; python_version < '3.7'"), - "b": dependency_from_pep_508( - "b==4.5.6; platform_system == 'Windows' and python_version < '3.7'" - ), - "c": dependency_from_pep_508( - "c==7.8.9; sys_platform == 'win32' and python_version < '3.7'" - ), - "d": dependency_from_pep_508( - "d==0.0.1; python_version < '3.7' and platform_system == 'Windows' and sys_platform == 'win32'" - ), - } - - for line in content.strip().split("\n"): - dependency = dependency_from_pep_508(line) - assert dependency.name in expected - expected_dependency = expected.pop(dependency.name) - assert dependency == expected_dependency - assert dependency.marker == expected_dependency.marker - - assert expected == {} - - -@pytest.mark.parametrize( - "dev,lines", - [(False, ['a==1.2.3; python_version < "3.8"']), (True, ["a==1.2.3", "b==4.5.6"])], -) -def test_exporter_can_export_requirements_txt_with_nested_packages_and_markers_any( - tmp_dir, poetry, dev, lines -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "a", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "b", - "version": "4.5.6", - "category": "dev", - "optional": False, - "python-versions": "*", - "dependencies": {"a": ">=1.2.3"}, - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"a": [], "b": []}, - }, - } - ) - - poetry.package.requires = [ - Factory.create_dependency( - name="a", constraint=dict(version="^1.2.3", python="<3.8") - ), - ] - poetry.package.dev_requires = [ - Factory.create_dependency( - name="b", constraint=dict(version="^4.5.6"), category="dev" - ), - ] - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt", dev=dev) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - assert content.strip() == "\n".join(lines) - - -def test_exporter_can_export_requirements_txt_with_standard_packages_and_hashes( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"]}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar==4.5.6 \\ - --hash=sha256:67890 -foo==1.2.3 \\ - --hash=sha256:12345 -""" - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_standard_packages_and_hashes_disabled( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"]}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export( - "requirements.txt", Path(tmp_dir), "requirements.txt", with_hashes=False - ) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar==4.5.6 -foo==1.2.3 -""" - - assert expected == content - - -def test_exporter_exports_requirements_txt_without_dev_packages_by_default( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "dev", - "optional": False, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"]}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -foo==1.2.3 \\ - --hash=sha256:12345 -""" - - assert expected == content - - -def test_exporter_exports_requirements_txt_with_dev_packages_if_opted_in( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "dev", - "optional": False, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"]}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt", dev=True) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar==4.5.6 \\ - --hash=sha256:67890 -foo==1.2.3 \\ - --hash=sha256:12345 -""" - - assert expected == content - - -def test_exporter_exports_requirements_txt_without_optional_packages(tmp_dir, poetry): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "dev", - "optional": True, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"]}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt", dev=True) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -foo==1.2.3 \\ - --hash=sha256:12345 -""" - - assert expected == content - - -@pytest.mark.parametrize( - "extras,lines", - [ - (None, ["foo==1.2.3"]), - (False, ["foo==1.2.3"]), - (True, ["bar==4.5.6", "foo==1.2.3", "spam==0.1.0"]), - (["feature_bar"], ["bar==4.5.6", "foo==1.2.3", "spam==0.1.0"]), - ], -) -def test_exporter_exports_requirements_txt_with_optional_packages( - tmp_dir, poetry, extras, lines -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": True, - "python-versions": "*", - "dependencies": {"spam": ">=0.1"}, - }, - { - "name": "spam", - "version": "0.1.0", - "category": "main", - "optional": True, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"], "spam": ["abcde"]}, - }, - "extras": {"feature_bar": ["bar"]}, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export( - "requirements.txt", - Path(tmp_dir), - "requirements.txt", - dev=True, - with_hashes=False, - extras=extras, - ) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = "\n".join(lines) - - assert content.strip() == expected - - -def test_exporter_can_export_requirements_txt_with_git_packages(tmp_dir, poetry): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "source": { - "type": "git", - "url": "https://github.com/foo/foo.git", - "reference": "123456", - }, - } - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -foo @ git+https://github.com/foo/foo.git@123456 -""" - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_nested_packages(tmp_dir, poetry): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "source": { - "type": "git", - "url": "https://github.com/foo/foo.git", - "reference": "123456", - }, - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - "dependencies": {"foo": "rev 123456"}, - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": [], "bar": []}, - }, - } - ) - set_package_requires(poetry, skip={"foo"}) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar==4.5.6 -foo @ git+https://github.com/foo/foo.git@123456 -""" - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_nested_packages_cyclic( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "dependencies": {"bar": {"version": "4.5.6"}}, - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - "dependencies": {"baz": {"version": "7.8.9"}}, - }, - { - "name": "baz", - "version": "7.8.9", - "category": "main", - "optional": False, - "python-versions": "*", - "dependencies": {"foo": {"version": "1.2.3"}}, - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": [], "bar": [], "baz": []}, - }, - } - ) - set_package_requires(poetry, skip={"bar", "baz"}) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar==4.5.6 -baz==7.8.9 -foo==1.2.3 -""" - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_nested_packages_and_multiple_markers( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "dependencies": { - "bar": [ - { - "version": ">=1.2.3,<7.8.10", - "markers": 'platform_system != "Windows"', - }, - { - "version": ">=4.5.6,<7.8.10", - "markers": 'platform_system == "Windows"', - }, - ] - }, - }, - { - "name": "bar", - "version": "7.8.9", - "category": "main", - "optional": True, - "python-versions": "*", - "dependencies": { - "baz": { - "version": "!=10.11.12", - "markers": 'platform_system == "Windows"', - } - }, - }, - { - "name": "baz", - "version": "10.11.13", - "category": "main", - "optional": True, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": [], "bar": [], "baz": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export( - "requirements.txt", Path(tmp_dir), "requirements.txt", with_hashes=False - ) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar==7.8.9 -baz==10.11.13; platform_system == "Windows" -foo==1.2.3 -""" - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_git_packages_and_markers( - tmp_dir, poetry -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "python_version < '3.7'", - "source": { - "type": "git", - "url": "https://github.com/foo/foo.git", - "reference": "123456", - }, - } - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -foo @ git+https://github.com/foo/foo.git@123456 ; python_version < "3.7" -""" - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_directory_packages( - tmp_dir, poetry, working_directory -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "source": { - "type": "directory", - "url": "tests/fixtures/sample_project", - "reference": "", - }, - } - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -foo @ {}/tests/fixtures/sample_project -""".format( - working_directory.as_uri() - ) - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_nested_directory_packages( - tmp_dir, poetry, working_directory -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "source": { - "type": "directory", - "url": "tests/fixtures/sample_project", - "reference": "", - }, - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - "source": { - "type": "directory", - "url": "tests/fixtures/sample_project/../project_with_nested_local/bar", - "reference": "", - }, - }, - { - "name": "baz", - "version": "7.8.9", - "category": "main", - "optional": False, - "python-versions": "*", - "source": { - "type": "directory", - "url": "tests/fixtures/sample_project/../project_with_nested_local/bar/..", - "reference": "", - }, - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": [], "bar": [], "baz": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -bar @ {}/tests/fixtures/project_with_nested_local/bar -baz @ {}/tests/fixtures/project_with_nested_local -foo @ {}/tests/fixtures/sample_project -""".format( - working_directory.as_uri(), - working_directory.as_uri(), - working_directory.as_uri(), - ) - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_directory_packages_and_markers( - tmp_dir, poetry, working_directory -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "python_version < '3.7'", - "source": { - "type": "directory", - "url": "tests/fixtures/sample_project", - "reference": "", - }, - } - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -foo @ {}/tests/fixtures/sample_project; python_version < "3.7" -""".format( - working_directory.as_uri() - ) - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_file_packages( - tmp_dir, poetry, working_directory -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "source": { - "type": "file", - "url": "tests/fixtures/distributions/demo-0.1.0.tar.gz", - "reference": "", - }, - } - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -foo @ {}/tests/fixtures/distributions/demo-0.1.0.tar.gz -""".format( - working_directory.as_uri() - ) - - assert expected == content - - -def test_exporter_can_export_requirements_txt_with_file_packages_and_markers( - tmp_dir, poetry, working_directory -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "marker": "python_version < '3.7'", - "source": { - "type": "file", - "url": "tests/fixtures/distributions/demo-0.1.0.tar.gz", - "reference": "", - }, - } - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt") - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ -foo @ {}/tests/fixtures/distributions/demo-0.1.0.tar.gz; python_version < "3.7" -""".format( - working_directory.as_uri() - ) - - assert expected == content - - -def test_exporter_exports_requirements_txt_with_legacy_packages(tmp_dir, poetry): - poetry.pool.add_repository( - LegacyRepository("custom", "https://example.com/simple",) - ) - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "dev", - "optional": False, - "python-versions": "*", - "source": { - "type": "legacy", - "url": "https://example.com/simple", - "reference": "", - }, - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"]}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt", dev=True) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ ---extra-index-url https://example.com/simple - -bar==4.5.6 \\ - --hash=sha256:67890 -foo==1.2.3 \\ - --hash=sha256:12345 -""" - - assert expected == content - - -def test_exporter_exports_requirements_txt_with_legacy_packages_trusted_host( - tmp_dir, poetry -): - poetry.pool.add_repository(LegacyRepository("custom", "http://example.com/simple",)) - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "bar", - "version": "4.5.6", - "category": "dev", - "optional": False, - "python-versions": "*", - "source": { - "type": "legacy", - "url": "http://example.com/simple", - "reference": "", - }, - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"bar": ["67890"]}, - }, - } - ) - set_package_requires(poetry) - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt", dev=True) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ ---trusted-host example.com ---extra-index-url http://example.com/simple - -bar==4.5.6 \\ - --hash=sha256:67890 -""" - - assert expected == content - - -@pytest.mark.parametrize( - ("dev", "expected"), - [ - (True, ["bar==1.2.2", "baz==1.2.3", "foo==1.2.1"]), - (False, ["bar==1.2.2", "foo==1.2.1"]), - ], -) -def test_exporter_exports_requirements_txt_with_dev_extras( - tmp_dir, poetry, dev, expected -): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.1", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "1.2.2", - "category": "main", - "optional": False, - "python-versions": "*", - "dependencies": { - "baz": { - "version": ">=0.1.0", - "optional": True, - "markers": "extra == 'baz'", - } - }, - "extras": {"baz": ["baz (>=0.1.0)"]}, - }, - { - "name": "baz", - "version": "1.2.3", - "category": "dev", - "optional": False, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": [], "bar": [], "baz": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt", dev=dev) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - assert content == "{}\n".format("\n".join(expected)) - - -def test_exporter_exports_requirements_txt_with_legacy_packages_and_duplicate_sources( - tmp_dir, poetry -): - poetry.pool.add_repository( - LegacyRepository("custom", "https://example.com/simple",) - ) - poetry.pool.add_repository(LegacyRepository("custom", "https://foobaz.com/simple",)) - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - "source": { - "type": "legacy", - "url": "https://example.com/simple", - "reference": "", - }, - }, - { - "name": "bar", - "version": "4.5.6", - "category": "dev", - "optional": False, - "python-versions": "*", - "source": { - "type": "legacy", - "url": "https://example.com/simple", - "reference": "", - }, - }, - { - "name": "baz", - "version": "7.8.9", - "category": "dev", - "optional": False, - "python-versions": "*", - "source": { - "type": "legacy", - "url": "https://foobaz.com/simple", - "reference": "", - }, - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"], "baz": ["24680"]}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), "requirements.txt", dev=True) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ ---extra-index-url https://example.com/simple ---extra-index-url https://foobaz.com/simple - -bar==4.5.6 \\ - --hash=sha256:67890 -baz==7.8.9 \\ - --hash=sha256:24680 -foo==1.2.3 \\ - --hash=sha256:12345 -""" - - assert expected == content - - -def test_exporter_exports_requirements_txt_with_legacy_packages_and_credentials( - tmp_dir, poetry, config -): - poetry.config.merge( - { - "repositories": {"custom": {"url": "https://example.com/simple"}}, - "http-basic": {"custom": {"username": "foo", "password": "bar"}}, - } - ) - poetry.pool.add_repository( - LegacyRepository("custom", "https://example.com/simple", config=poetry.config) - ) - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "dev", - "optional": False, - "python-versions": "*", - "source": { - "type": "legacy", - "url": "https://example.com/simple", - "reference": "", - }, - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": ["12345"], "bar": ["67890"]}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export( - "requirements.txt", - Path(tmp_dir), - "requirements.txt", - dev=True, - with_credentials=True, - ) - - with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: - content = f.read() - - expected = """\ ---extra-index-url https://foo:bar@example.com/simple - -bar==4.5.6 \\ - --hash=sha256:67890 -foo==1.2.3 \\ - --hash=sha256:12345 -""" - - assert expected == content - - -def test_exporter_exports_requirements_txt_to_standard_output(tmp_dir, poetry, capsys): - poetry.locker.mock_lock_data( - { - "package": [ - { - "name": "foo", - "version": "1.2.3", - "category": "main", - "optional": False, - "python-versions": "*", - }, - { - "name": "bar", - "version": "4.5.6", - "category": "main", - "optional": False, - "python-versions": "*", - }, - ], - "metadata": { - "python-versions": "*", - "content-hash": "123456789", - "hashes": {"foo": [], "bar": []}, - }, - } - ) - set_package_requires(poetry) - - exporter = Exporter(poetry) - - exporter.export("requirements.txt", Path(tmp_dir), sys.stdout) - - out, err = capsys.readouterr() - expected = """\ -bar==4.5.6 -foo==1.2.3 -""" - - assert out == expected diff --git a/vendor/poetry/tests/utils/test_extras.py b/vendor/poetry/tests/utils/test_extras.py index e06e4809..77701698 100644 --- a/vendor/poetry/tests/utils/test_extras.py +++ b/vendor/poetry/tests/utils/test_extras.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import pytest -from poetry.core.packages import Package +from poetry.core.packages.package import Package + from poetry.factory import Factory from poetry.utils.extras import get_extra_package_names @@ -18,7 +21,7 @@ @pytest.mark.parametrize( - "packages,extras,extra_names,expected_extra_package_names", + ["packages", "extras", "extra_names", "expected_extra_package_names"], [ # Empty edge case ([], {}, [], []), @@ -40,7 +43,7 @@ ["group0"], ["bar", "foo"], ), - # Selecting multpile extras should get us the union of all package names + # Selecting multiple extras should get us the union of all package names ( [_PACKAGE_FOO, _PACKAGE_SPAM, _PACKAGE_BAR], {"group0": ["bar"], "group1": ["spam"]}, @@ -56,8 +59,12 @@ ], ) def test_get_extra_package_names( - packages, extras, extra_names, expected_extra_package_names -): - assert expected_extra_package_names == list( - get_extra_package_names(packages, extras, extra_names) + packages: list[Package], + extras: dict[str, list[str]], + extra_names: list[str], + expected_extra_package_names: list[str], +) -> None: + assert ( + list(get_extra_package_names(packages, extras, extra_names)) + == expected_extra_package_names ) diff --git a/vendor/poetry/tests/utils/test_helpers.py b/vendor/poetry/tests/utils/test_helpers.py index 83bf2030..6aacaa18 100644 --- a/vendor/poetry/tests/utils/test_helpers.py +++ b/vendor/poetry/tests/utils/test_helpers.py @@ -1,7 +1,8 @@ +from __future__ import annotations + from poetry.core.utils.helpers import parse_requires -from poetry.utils._compat import Path -from poetry.utils.helpers import get_cert -from poetry.utils.helpers import get_client_cert + +from poetry.utils.helpers import safe_extra def test_parse_requires(): @@ -20,7 +21,8 @@ def test_parse_requires(): pyparsing>=2.2.0.0,<3.0.0.0 requests-toolbelt>=0.8.0.0,<0.9.0.0 -[:(python_version >= "2.7.0.0" and python_version < "2.8.0.0") or (python_version >= "3.4.0.0" and python_version < "3.5.0.0")] +[:(python_version >= "2.7.0.0" and python_version < "2.8.0.0")\ + or (python_version >= "3.4.0.0" and python_version < "3.5.0.0")] typing>=3.6.0.0,<4.0.0.0 [:python_version >= "2.7.0.0" and python_version < "2.8.0.0"] @@ -32,8 +34,9 @@ def test_parse_requires(): [dev] isort@ git+git://github.com/timothycrosley/isort.git@e63ae06ec7d70b06df9e528357650281a3d3ec22#egg=isort -""" +""" # noqa: E501 result = parse_requires(requires) + # fmt: off expected = [ "jsonschema>=2.6.0.0,<3.0.0.0", "lockfile>=0.12.0.0,<0.13.0.0", @@ -48,24 +51,18 @@ def test_parse_requires(): "msgpack-python>=0.5.0.0,<0.6.0.0", "pyparsing>=2.2.0.0,<3.0.0.0", "requests-toolbelt>=0.8.0.0,<0.9.0.0", - 'typing>=3.6.0.0,<4.0.0.0 ; (python_version >= "2.7.0.0" and python_version < "2.8.0.0") or (python_version >= "3.4.0.0" and python_version < "3.5.0.0")', - 'virtualenv>=15.2.0.0,<16.0.0.0 ; python_version >= "2.7.0.0" and python_version < "2.8.0.0"', - 'pathlib2>=2.3.0.0,<3.0.0.0 ; python_version >= "2.7.0.0" and python_version < "2.8.0.0"', - 'zipfile36>=0.1.0.0,<0.2.0.0 ; python_version >= "3.4.0.0" and python_version < "3.6.0.0"', - 'isort@ git+git://github.com/timothycrosley/isort.git@e63ae06ec7d70b06df9e528357650281a3d3ec22#egg=isort ; extra == "dev"', + 'typing>=3.6.0.0,<4.0.0.0 ; (python_version >= "2.7.0.0" and python_version < "2.8.0.0") or (python_version >= "3.4.0.0" and python_version < "3.5.0.0")', # noqa: E501 + 'virtualenv>=15.2.0.0,<16.0.0.0 ; python_version >= "2.7.0.0" and python_version < "2.8.0.0"', # noqa: E501 + 'pathlib2>=2.3.0.0,<3.0.0.0 ; python_version >= "2.7.0.0" and python_version < "2.8.0.0"', # noqa: E501 + 'zipfile36>=0.1.0.0,<0.2.0.0 ; python_version >= "3.4.0.0" and python_version < "3.6.0.0"', # noqa: E501 + 'isort@ git+git://github.com/timothycrosley/isort.git@e63ae06ec7d70b06df9e528357650281a3d3ec22#egg=isort ; extra == "dev"', # noqa: E501 ] + # fmt: on assert result == expected -def test_get_cert(config): - ca_cert = "path/to/ca.pem" - config.merge({"certificates": {"foo": {"cert": ca_cert}}}) - - assert get_cert(config, "foo") == Path(ca_cert) - - -def test_get_client_cert(config): - client_cert = "path/to/client.pem" - config.merge({"certificates": {"foo": {"client-cert": client_cert}}}) - - assert get_client_cert(config, "foo") == Path(client_cert) +def test_safe_extra(): + extra = "pandas.CSVDataSet" + result = safe_extra(extra) + expected = "pandas.csvdataset" + assert result == expected diff --git a/vendor/poetry/tests/utils/test_password_manager.py b/vendor/poetry/tests/utils/test_password_manager.py index 31d5812c..468ef14f 100644 --- a/vendor/poetry/tests/utils/test_password_manager.py +++ b/vendor/poetry/tests/utils/test_password_manager.py @@ -1,104 +1,69 @@ +from __future__ import annotations + import os -import pytest +from typing import TYPE_CHECKING -from keyring.backend import KeyringBackend +import pytest -from poetry.utils.password_manager import KeyRing -from poetry.utils.password_manager import KeyRingError from poetry.utils.password_manager import PasswordManager +from poetry.utils.password_manager import PoetryKeyring +from poetry.utils.password_manager import PoetryKeyringError -class DummyBackend(KeyringBackend): - def __init__(self): - self._passwords = {} - - @classmethod - def priority(cls): - return 42 - - def set_password(self, service, username, password): - self._passwords[service] = {username: password} - - def get_password(self, service, username): - return self._passwords.get(service, {}).get(username) - - def delete_password(self, service, username): - if service in self._passwords and username in self._passwords[service]: - del self._passwords[service][username] - - -@pytest.fixture() -def backend(): - return DummyBackend() - - -@pytest.fixture() -def mock_available_backend(backend): - import keyring - - keyring.set_keyring(backend) - - -@pytest.fixture() -def mock_unavailable_backend(): - import keyring - - from keyring.backends.fail import Keyring +if TYPE_CHECKING: + from pytest_mock import MockerFixture - keyring.set_keyring(Keyring()) + from tests.conftest import Config + from tests.conftest import DummyBackend -@pytest.fixture() -def mock_chainer_backend(mocker): - from keyring.backends.fail import Keyring - - mocker.patch("keyring.backend.get_all_keyring", [Keyring()]) - import keyring - - from keyring.backends.chainer import ChainerBackend - - keyring.set_keyring(ChainerBackend()) - - -def test_set_http_password(config, mock_available_backend, backend): +def test_set_http_password( + config: Config, with_simple_keyring: None, dummy_keyring: DummyBackend +): manager = PasswordManager(config) assert manager.keyring.is_available() manager.set_http_password("foo", "bar", "baz") - assert "baz" == backend.get_password("poetry-repository-foo", "bar") + assert dummy_keyring.get_password("poetry-repository-foo", "bar") == "baz" auth = config.get("http-basic.foo") - assert "bar" == auth["username"] + assert auth["username"] == "bar" assert "password" not in auth -def test_get_http_auth(config, mock_available_backend, backend): - backend.set_password("poetry-repository-foo", "bar", "baz") +def test_get_http_auth( + config: Config, with_simple_keyring: None, dummy_keyring: DummyBackend +): + dummy_keyring.set_password("poetry-repository-foo", "bar", "baz") config.auth_config_source.add_property("http-basic.foo", {"username": "bar"}) manager = PasswordManager(config) assert manager.keyring.is_available() auth = manager.get_http_auth("foo") - assert "bar" == auth["username"] - assert "baz" == auth["password"] + assert auth["username"] == "bar" + assert auth["password"] == "baz" -def test_delete_http_password(config, mock_available_backend, backend): - backend.set_password("poetry-repository-foo", "bar", "baz") +def test_delete_http_password( + config: Config, with_simple_keyring: None, dummy_keyring: DummyBackend +): + dummy_keyring.set_password("poetry-repository-foo", "bar", "baz") config.auth_config_source.add_property("http-basic.foo", {"username": "bar"}) manager = PasswordManager(config) assert manager.keyring.is_available() manager.delete_http_password("foo") - assert backend.get_password("poetry-repository-foo", "bar") is None + assert dummy_keyring.get_password("poetry-repository-foo", "bar") is None assert config.get("http-basic.foo") is None -def test_set_pypi_token(config, mock_available_backend, backend): +def test_set_pypi_token( + config: Config, with_simple_keyring: None, dummy_keyring: DummyBackend +): manager = PasswordManager(config) assert manager.keyring.is_available() @@ -106,39 +71,47 @@ def test_set_pypi_token(config, mock_available_backend, backend): assert config.get("pypi-token.foo") is None - assert "baz" == backend.get_password("poetry-repository-foo", "__token__") + assert dummy_keyring.get_password("poetry-repository-foo", "__token__") == "baz" -def test_get_pypi_token(config, mock_available_backend, backend): - backend.set_password("poetry-repository-foo", "__token__", "baz") +def test_get_pypi_token( + config: Config, with_simple_keyring: None, dummy_keyring: DummyBackend +): + dummy_keyring.set_password("poetry-repository-foo", "__token__", "baz") manager = PasswordManager(config) assert manager.keyring.is_available() - assert "baz" == manager.get_pypi_token("foo") + assert manager.get_pypi_token("foo") == "baz" -def test_delete_pypi_token(config, mock_available_backend, backend): - backend.set_password("poetry-repository-foo", "__token__", "baz") +def test_delete_pypi_token( + config: Config, with_simple_keyring: None, dummy_keyring: DummyBackend +): + dummy_keyring.set_password("poetry-repository-foo", "__token__", "baz") manager = PasswordManager(config) assert manager.keyring.is_available() manager.delete_pypi_token("foo") - assert backend.get_password("poetry-repository-foo", "__token__") is None + assert dummy_keyring.get_password("poetry-repository-foo", "__token__") is None -def test_set_http_password_with_unavailable_backend(config, mock_unavailable_backend): +def test_set_http_password_with_unavailable_backend( + config: Config, with_fail_keyring: None +): manager = PasswordManager(config) assert not manager.keyring.is_available() manager.set_http_password("foo", "bar", "baz") auth = config.get("http-basic.foo") - assert "bar" == auth["username"] - assert "baz" == auth["password"] + assert auth["username"] == "bar" + assert auth["password"] == "baz" -def test_get_http_auth_with_unavailable_backend(config, mock_unavailable_backend): +def test_get_http_auth_with_unavailable_backend( + config: Config, with_fail_keyring: None +): config.auth_config_source.add_property( "http-basic.foo", {"username": "bar", "password": "baz"} ) @@ -147,12 +120,12 @@ def test_get_http_auth_with_unavailable_backend(config, mock_unavailable_backend assert not manager.keyring.is_available() auth = manager.get_http_auth("foo") - assert "bar" == auth["username"] - assert "baz" == auth["password"] + assert auth["username"] == "bar" + assert auth["password"] == "baz" def test_delete_http_password_with_unavailable_backend( - config, mock_unavailable_backend + config: Config, with_fail_keyring: None ): config.auth_config_source.add_property( "http-basic.foo", {"username": "bar", "password": "baz"} @@ -165,24 +138,30 @@ def test_delete_http_password_with_unavailable_backend( assert config.get("http-basic.foo") is None -def test_set_pypi_token_with_unavailable_backend(config, mock_unavailable_backend): +def test_set_pypi_token_with_unavailable_backend( + config: Config, with_fail_keyring: None +): manager = PasswordManager(config) assert not manager.keyring.is_available() manager.set_pypi_token("foo", "baz") - assert "baz" == config.get("pypi-token.foo") + assert config.get("pypi-token.foo") == "baz" -def test_get_pypi_token_with_unavailable_backend(config, mock_unavailable_backend): +def test_get_pypi_token_with_unavailable_backend( + config: Config, with_fail_keyring: None +): config.auth_config_source.add_property("pypi-token.foo", "baz") manager = PasswordManager(config) assert not manager.keyring.is_available() - assert "baz" == manager.get_pypi_token("foo") + assert manager.get_pypi_token("foo") == "baz" -def test_delete_pypi_token_with_unavailable_backend(config, mock_unavailable_backend): +def test_delete_pypi_token_with_unavailable_backend( + config: Config, with_fail_keyring: None +): config.auth_config_source.add_property("pypi-token.foo", "baz") manager = PasswordManager(config) @@ -192,30 +171,56 @@ def test_delete_pypi_token_with_unavailable_backend(config, mock_unavailable_bac assert config.get("pypi-token.foo") is None -def test_keyring_raises_errors_on_keyring_errors(mocker, mock_unavailable_backend): - mocker.patch("poetry.utils.password_manager.KeyRing._check") +def test_keyring_raises_errors_on_keyring_errors( + mocker: MockerFixture, with_fail_keyring: None +): + mocker.patch("poetry.utils.password_manager.PoetryKeyring._check") - key_ring = KeyRing("poetry") - with pytest.raises(KeyRingError): + key_ring = PoetryKeyring("poetry") + with pytest.raises(PoetryKeyringError): key_ring.set_password("foo", "bar", "baz") - with pytest.raises(KeyRingError): + with pytest.raises(PoetryKeyringError): key_ring.get_password("foo", "bar") - with pytest.raises(KeyRingError): + with pytest.raises(PoetryKeyringError): key_ring.delete_password("foo", "bar") -def test_keyring_with_chainer_backend_and_not_compatible_only_should_be_unavailable( - mock_chainer_backend, +def test_keyring_with_chainer_backend_and_fail_keyring_should_be_unavailable( + with_chained_fail_keyring: None, +): + key_ring = PoetryKeyring("poetry") + + assert not key_ring.is_available() + + +def test_keyring_with_chainer_backend_and_null_keyring_should_be_unavailable( + with_chained_null_keyring: None, ): - key_ring = KeyRing("poetry") + key_ring = PoetryKeyring("poetry") + + assert not key_ring.is_available() + + +def test_null_keyring_should_be_unavailable( + with_null_keyring: None, +): + key_ring = PoetryKeyring("poetry") + + assert not key_ring.is_available() + + +def test_fail_keyring_should_be_unavailable( + with_fail_keyring: None, +): + key_ring = PoetryKeyring("poetry") assert not key_ring.is_available() def test_get_http_auth_from_environment_variables( - environ, config, mock_available_backend + environ: None, config: Config, with_simple_keyring: None ): os.environ["POETRY_HTTP_BASIC_FOO_USERNAME"] = "bar" os.environ["POETRY_HTTP_BASIC_FOO_PASSWORD"] = "baz" @@ -224,5 +229,33 @@ def test_get_http_auth_from_environment_variables( auth = manager.get_http_auth("foo") - assert "bar" == auth["username"] - assert "baz" == auth["password"] + assert auth["username"] == "bar" + assert auth["password"] == "baz" + + +def test_get_pypi_token_with_env_var_positive( + mocker: MockerFixture, + config: Config, + with_simple_keyring: None, + dummy_keyring: DummyBackend, +): + sample_token = "sampletoken-1234" + repo_name = "foo" + manager = PasswordManager(config) + mocker.patch.dict( + os.environ, + {f"POETRY_PYPI_TOKEN_{repo_name.upper()}": sample_token}, + ) + + assert manager.get_pypi_token(repo_name) == sample_token + + +def test_get_pypi_token_with_env_var_not_available( + config: Config, with_simple_keyring: None, dummy_keyring: DummyBackend +): + repo_name = "foo" + manager = PasswordManager(config) + + result_token = manager.get_pypi_token(repo_name) + + assert result_token is None diff --git a/vendor/poetry/tests/utils/test_patterns.py b/vendor/poetry/tests/utils/test_patterns.py index 9f43db27..098a41d3 100644 --- a/vendor/poetry/tests/utils/test_patterns.py +++ b/vendor/poetry/tests/utils/test_patterns.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from poetry.utils import patterns @@ -32,7 +34,7 @@ ), ], ) -def test_wheel_file_re(filename, expected): +def test_wheel_file_re(filename: str, expected: dict[str, str | None]): match = patterns.wheel_file_re.match(filename) groups = match.groupdict() diff --git a/vendor/poetry/tests/utils/test_pip.py b/vendor/poetry/tests/utils/test_pip.py new file mode 100644 index 00000000..c6bf8422 --- /dev/null +++ b/vendor/poetry/tests/utils/test_pip.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +import subprocess + +from typing import TYPE_CHECKING + +import pytest + +from poetry.utils.pip import pip_install + + +if TYPE_CHECKING: + from pytest_mock import MockerFixture + + from poetry.utils.env import VirtualEnv + from tests.types import FixtureDirGetter + + +def test_pip_install_successful( + tmp_dir: str, tmp_venv: VirtualEnv, fixture_dir: FixtureDirGetter +): + file_path = fixture_dir("distributions/demo-0.1.0-py2.py3-none-any.whl") + result = pip_install(file_path, tmp_venv) + + assert "Successfully installed demo-0.1.0" in result + + +def test_pip_install_with_keyboard_interrupt( + tmp_dir: str, + tmp_venv: VirtualEnv, + fixture_dir: FixtureDirGetter, + mocker: MockerFixture, +): + file_path = fixture_dir("distributions/demo-0.1.0-py2.py3-none-any.whl") + mocker.patch("subprocess.run", side_effect=KeyboardInterrupt()) + with pytest.raises(KeyboardInterrupt): + pip_install(file_path, tmp_venv) + subprocess.run.assert_called_once() diff --git a/vendor/poetry/tests/utils/test_setup_reader.py b/vendor/poetry/tests/utils/test_setup_reader.py index 68fc005a..d72e5386 100644 --- a/vendor/poetry/tests/utils/test_setup_reader.py +++ b/vendor/poetry/tests/utils/test_setup_reader.py @@ -1,22 +1,31 @@ +from __future__ import annotations + import os +from typing import TYPE_CHECKING + import pytest -from poetry.core.semver.exceptions import ParseVersionError -from poetry.utils._compat import PY35 +from poetry.core.version.exceptions import InvalidVersion + from poetry.utils.setup_reader import SetupReader +if TYPE_CHECKING: + from collections.abc import Callable + + @pytest.fixture() -def setup(): - def _setup(name): +def setup() -> Callable[[str], str]: + def _setup(name: str) -> str: return os.path.join(os.path.dirname(__file__), "fixtures", "setups", name) return _setup -@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4") -def test_setup_reader_read_first_level_setup_call_with_direct_types(setup): +def test_setup_reader_read_first_level_setup_call_with_direct_types( + setup: Callable[[str], str] +): result = SetupReader.read_from_directory(setup("flask")) expected_name = "Flask" @@ -41,15 +50,16 @@ def test_setup_reader_read_first_level_setup_call_with_direct_types(setup): } expected_python_requires = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" - assert expected_name == result["name"] - assert expected_version == result["version"] - assert expected_install_requires == result["install_requires"] - assert expected_extras_require == result["extras_require"] - assert expected_python_requires == result["python_requires"] + assert result["name"] == expected_name + assert result["version"] == expected_version + assert result["install_requires"] == expected_install_requires + assert result["extras_require"] == expected_extras_require + assert result["python_requires"] == expected_python_requires -@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4") -def test_setup_reader_read_first_level_setup_call_with_variables(setup): +def test_setup_reader_read_first_level_setup_call_with_variables( + setup: Callable[[str], str] +): result = SetupReader.read_from_directory(setup("requests")) expected_name = None @@ -67,15 +77,16 @@ def test_setup_reader_read_first_level_setup_call_with_variables(setup): } expected_python_requires = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - assert expected_name == result["name"] - assert expected_version == result["version"] - assert expected_install_requires == result["install_requires"] - assert expected_extras_require == result["extras_require"] - assert expected_python_requires == result["python_requires"] + assert result["name"] == expected_name + assert result["version"] == expected_version + assert result["install_requires"] == expected_install_requires + assert result["extras_require"] == expected_extras_require + assert result["python_requires"] == expected_python_requires -@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4") -def test_setup_reader_read_sub_level_setup_call_with_direct_types(setup): +def test_setup_reader_read_sub_level_setup_call_with_direct_types( + setup: Callable[[str], str] +): result = SetupReader.read_from_directory(setup("sqlalchemy")) expected_name = "SQLAlchemy" @@ -92,14 +103,14 @@ def test_setup_reader_read_sub_level_setup_call_with_direct_types(setup): "mssql_pymssql": ["pymssql"], } - assert expected_name == result["name"] - assert expected_version == result["version"] - assert expected_install_requires == result["install_requires"] - assert expected_extras_require == result["extras_require"] + assert result["name"] == expected_name + assert result["version"] == expected_version + assert result["install_requires"] == expected_install_requires + assert result["extras_require"] == expected_extras_require assert result["python_requires"] is None -def test_setup_reader_read_setup_cfg(setup): +def test_setup_reader_read_setup_cfg(setup: Callable[[str], str]): result = SetupReader.read_from_directory(setup("with-setup-cfg")) expected_name = "with-setup-cfg" @@ -111,20 +122,19 @@ def test_setup_reader_read_setup_cfg(setup): } expected_python_requires = ">=2.6,!=3.0,!=3.1,!=3.2,!=3.3" - assert expected_name == result["name"] - assert expected_version == result["version"] - assert expected_install_requires == result["install_requires"] - assert expected_extras_require == result["extras_require"] - assert expected_python_requires == result["python_requires"] + assert result["name"] == expected_name + assert result["version"] == expected_version + assert result["install_requires"] == expected_install_requires + assert result["extras_require"] == expected_extras_require + assert result["python_requires"] == expected_python_requires -def test_setup_reader_read_setup_cfg_with_attr(setup): - with pytest.raises(ParseVersionError): +def test_setup_reader_read_setup_cfg_with_attr(setup: Callable[[str], str]): + with pytest.raises(InvalidVersion): SetupReader.read_from_directory(setup("with-setup-cfg-attr")) -@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4") -def test_setup_reader_read_setup_kwargs(setup): +def test_setup_reader_read_setup_kwargs(setup: Callable[[str], str]): result = SetupReader.read_from_directory(setup("pendulum")) expected_name = "pendulum" @@ -133,15 +143,14 @@ def test_setup_reader_read_setup_kwargs(setup): expected_extras_require = {':python_version < "3.5"': ["typing>=3.6,<4.0"]} expected_python_requires = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - assert expected_name == result["name"] - assert expected_version == result["version"] - assert expected_install_requires == result["install_requires"] - assert expected_extras_require == result["extras_require"] - assert expected_python_requires == result["python_requires"] + assert result["name"] == expected_name + assert result["version"] == expected_version + assert result["install_requires"] == expected_install_requires + assert result["extras_require"] == expected_extras_require + assert result["python_requires"] == expected_python_requires -@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4") -def test_setup_reader_read_setup_call_in_main(setup): +def test_setup_reader_read_setup_call_in_main(setup: Callable[[str], str]): result = SetupReader.read_from_directory(setup("pyyaml")) expected_name = "PyYAML" @@ -150,15 +159,14 @@ def test_setup_reader_read_setup_call_in_main(setup): expected_extras_require = {} expected_python_requires = None - assert expected_name == result["name"] - assert expected_version == result["version"] - assert expected_install_requires == result["install_requires"] - assert expected_extras_require == result["extras_require"] - assert expected_python_requires == result["python_requires"] + assert result["name"] == expected_name + assert result["version"] == expected_version + assert result["install_requires"] == expected_install_requires + assert result["extras_require"] == expected_extras_require + assert result["python_requires"] == expected_python_requires -@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4") -def test_setup_reader_read_extras_require_with_variables(setup): +def test_setup_reader_read_extras_require_with_variables(setup: Callable[[str], str]): result = SetupReader.read_from_directory(setup("extras_require_with_vars")) expected_name = "extras_require_with_vars" @@ -167,19 +175,18 @@ def test_setup_reader_read_extras_require_with_variables(setup): expected_extras_require = {"test": ["pytest"]} expected_python_requires = None - assert expected_name == result["name"] - assert expected_version == result["version"] - assert expected_install_requires == result["install_requires"] - assert expected_extras_require == result["extras_require"] - assert expected_python_requires == result["python_requires"] + assert result["name"] == expected_name + assert result["version"] == expected_version + assert result["install_requires"] == expected_install_requires + assert result["extras_require"] == expected_extras_require + assert result["python_requires"] == expected_python_requires -@pytest.mark.skipif(not PY35, reason="AST parsing does not work for Python <3.4") -def test_setup_reader_setuptools(setup): +def test_setup_reader_setuptools(setup: Callable[[str], str]): result = SetupReader.read_from_directory(setup("setuptools_setup")) expected_name = "my_package" expected_version = "0.1.2" - assert expected_name == result["name"] - assert expected_version == result["version"] + assert result["name"] == expected_name + assert result["version"] == expected_version diff --git a/vendor/poetry/tests/utils/test_source.py b/vendor/poetry/tests/utils/test_source.py new file mode 100644 index 00000000..a970b726 --- /dev/null +++ b/vendor/poetry/tests/utils/test_source.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +import pytest + +from tomlkit.container import Container +from tomlkit.items import Table +from tomlkit.items import Trivia + +from poetry.config.source import Source +from poetry.utils.source import source_to_table + + +@pytest.mark.parametrize( + "source,table_body", + [ + ( + Source("foo", "https://example.com"), + { + "default": False, + "name": "foo", + "secondary": False, + "url": "https://example.com", + }, + ), + ( + Source("bar", "https://example.com/bar", True, True), + { + "default": True, + "name": "bar", + "secondary": True, + "url": "https://example.com/bar", + }, + ), + ], +) +def test_source_to_table(source: Source, table_body: dict[str, str | bool]): + table = Table(Container(), Trivia(), False) + table._value = table_body + + assert source_to_table(source) == table diff --git a/vendor/poetry/tox.ini b/vendor/poetry/tox.ini index 24aae8e0..19c8f11d 100644 --- a/vendor/poetry/tox.ini +++ b/vendor/poetry/tox.ini @@ -1,10 +1,10 @@ [tox] -minversion = 3.3.0 +minversion = 3.18.0 isolated_build = True -envlist = py27, py35, py36, py37, py38, py39, py310 +envlist = py37, py38, py39 [testenv] -whitelist_externals = poetry +allowlist_externals = poetry commands = poetry install -vv --no-root poetry run pytest {posargs} tests/ diff --git a/vendor/requests/.coveragerc b/vendor/requests/.coveragerc deleted file mode 100644 index e9b51ab1..00000000 --- a/vendor/requests/.coveragerc +++ /dev/null @@ -1,2 +0,0 @@ -[run] -omit = requests/packages/* \ No newline at end of file diff --git a/vendor/requests/.github/CODE_OF_CONDUCT.md b/vendor/requests/.github/CODE_OF_CONDUCT.md deleted file mode 100644 index ff7f1066..00000000 --- a/vendor/requests/.github/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,6 +0,0 @@ -# Treat each other well - -Everyone participating in the _requests_ project, and in particular in the issue tracker, -pull requests, and social media activity, is expected to treat other people with respect -and more generally to follow the guidelines articulated in the -[Python Community Code of Conduct](https://www.python.org/psf/codeofconduct/). diff --git a/vendor/requests/.github/CONTRIBUTING.md b/vendor/requests/.github/CONTRIBUTING.md deleted file mode 100644 index 3470dfee..00000000 --- a/vendor/requests/.github/CONTRIBUTING.md +++ /dev/null @@ -1,54 +0,0 @@ -# Contribution Guidelines - -Before opening any issues or proposing any pull requests, please read -our [Contributor's Guide](https://requests.readthedocs.io/en/latest/dev/contributing/). - -To get the greatest chance of helpful responses, please also observe the -following additional notes. - -## Questions - -The GitHub issue tracker is for *bug reports* and *feature requests*. Please do -not use it to ask questions about how to use Requests. These questions should -instead be directed to [Stack Overflow](https://stackoverflow.com/). Make sure -that your question is tagged with the `python-requests` tag when asking it on -Stack Overflow, to ensure that it is answered promptly and accurately. - -## Good Bug Reports - -Please be aware of the following things when filing bug reports: - -1. Avoid raising duplicate issues. *Please* use the GitHub issue search feature - to check whether your bug report or feature request has been mentioned in - the past. Duplicate bug reports and feature requests are a huge maintenance - burden on the limited resources of the project. If it is clear from your - report that you would have struggled to find the original, that's ok, but - if searching for a selection of words in your issue title would have found - the duplicate then the issue will likely be closed extremely abruptly. -2. When filing bug reports about exceptions or tracebacks, please include the - *complete* traceback. Partial tracebacks, or just the exception text, are - not helpful. Issues that do not contain complete tracebacks may be closed - without warning. -3. Make sure you provide a suitable amount of information to work with. This - means you should provide: - - - Guidance on **how to reproduce the issue**. Ideally, this should be a - *small* code sample that can be run immediately by the maintainers. - Failing that, let us know what you're doing, how often it happens, what - environment you're using, etc. Be thorough: it prevents us needing to ask - further questions. - - Tell us **what you expected to happen**. When we run your example code, - what are we expecting to happen? What does "success" look like for your - code? - - Tell us **what actually happens**. It's not helpful for you to say "it - doesn't work" or "it fails". Tell us *how* it fails: do you get an - exception? A hang? A non-200 status code? How was the actual result - different from your expected result? - - Tell us **what version of Requests you're using**, and - **how you installed it**. Different versions of Requests behave - differently and have different bugs, and some distributors of Requests - ship patches on top of the code we supply. - - If you do not provide all of these things, it will take us much longer to - fix your problem. If we ask you to clarify these and you never respond, we - will close your issue without fixing it. diff --git a/vendor/requests/.github/FUNDING.yml b/vendor/requests/.github/FUNDING.yml deleted file mode 100644 index 603c7ff9..00000000 --- a/vendor/requests/.github/FUNDING.yml +++ /dev/null @@ -1 +0,0 @@ -custom: ['https://www.python.org/psf/sponsorship/'] diff --git a/vendor/requests/.github/ISSUE_TEMPLATE.md b/vendor/requests/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index e23f4c70..00000000 --- a/vendor/requests/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,28 +0,0 @@ -Summary. - -## Expected Result - -What you expected. - -## Actual Result - -What happened instead. - -## Reproduction Steps - -```python -import requests - -``` - -## System Information - - $ python -m requests.help - -``` - -``` - -This command is only available on Requests v2.16.4 and greater. Otherwise, -please provide some basic information about your system (Python version, -operating system, &c). \ No newline at end of file diff --git a/vendor/requests/.github/ISSUE_TEMPLATE/Bug_report.md b/vendor/requests/.github/ISSUE_TEMPLATE/Bug_report.md deleted file mode 100644 index 5133965f..00000000 --- a/vendor/requests/.github/ISSUE_TEMPLATE/Bug_report.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -Summary. - -## Expected Result - -What you expected. - -## Actual Result - -What happened instead. - -## Reproduction Steps - -```python -import requests - -``` - -## System Information - - $ python -m requests.help - -``` - -``` - -This command is only available on Requests v2.16.4 and greater. Otherwise, -please provide some basic information about your system (Python version, -operating system, &c). diff --git a/vendor/requests/.github/ISSUE_TEMPLATE/Custom.md b/vendor/requests/.github/ISSUE_TEMPLATE/Custom.md deleted file mode 100644 index 19291c15..00000000 --- a/vendor/requests/.github/ISSUE_TEMPLATE/Custom.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Request for Help -about: Guidance on using Requests. - ---- - -Please refer to our [Stack Overflow tag](https://stackoverflow.com/questions/tagged/python-requests) for guidance. diff --git a/vendor/requests/.github/ISSUE_TEMPLATE/Feature_request.md b/vendor/requests/.github/ISSUE_TEMPLATE/Feature_request.md deleted file mode 100644 index dcf6a445..00000000 --- a/vendor/requests/.github/ISSUE_TEMPLATE/Feature_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project - ---- - -Requests is not accepting feature requests at this time. diff --git a/vendor/requests/.github/SECURITY.md b/vendor/requests/.github/SECURITY.md deleted file mode 100644 index 9021d429..00000000 --- a/vendor/requests/.github/SECURITY.md +++ /dev/null @@ -1,93 +0,0 @@ -# Vulnerability Disclosure - -If you think you have found a potential security vulnerability in -requests, please email [Nate](mailto:nate.prewitt@gmail.com) -and [Seth](mailto:sethmichaellarson@gmail.com) directly. -**Do not file a public issue.** - -Our PGP Key fingerprints are: - -- 8722 7E29 AD9C FF5C FAC3 EA6A 44D3 FF97 B80D C864 ([@nateprewitt](https://keybase.io/nateprewitt)) - -- EDD5 6765 A9D8 4653 CBC8 A134 51B0 6736 1740 F5FC ([@sethmlarson](https://keybase.io/sethmlarson)) - -You can also contact us on [Keybase](https://keybase.io) with the -profiles above if desired. - -If English is not your first language, please try to describe the -problem and its impact to the best of your ability. For greater detail, -please use your native language and we will try our best to translate it -using online services. - -Please also include the code you used to find the problem and the -shortest amount of code necessary to reproduce it. - -Please do not disclose this to anyone else. We will retrieve a CVE -identifier if necessary and give you full credit under whatever name or -alias you provide. We will only request an identifier when we have a fix -and can publish it in a release. - -We will respect your privacy and will only publicize your involvement if -you grant us permission. - -## Process - -This following information discusses the process the requests project -follows in response to vulnerability disclosures. If you are disclosing -a vulnerability, this section of the documentation lets you know how we -will respond to your disclosure. - -### Timeline - -When you report an issue, one of the project members will respond to you -within two days *at the outside*. In most cases responses will be -faster, usually within 12 hours. This initial response will at the very -least confirm receipt of the report. - -If we were able to rapidly reproduce the issue, the initial response -will also contain confirmation of the issue. If we are not, we will -often ask for more information about the reproduction scenario. - -Our goal is to have a fix for any vulnerability released within two -weeks of the initial disclosure. This may potentially involve shipping -an interim release that simply disables function while a more mature fix -can be prepared, but will in the vast majority of cases mean shipping a -complete release as soon as possible. - -Throughout the fix process we will keep you up to speed with how the fix -is progressing. Once the fix is prepared, we will notify you that we -believe we have a fix. Often we will ask you to confirm the fix resolves -the problem in your environment, especially if we are not confident of -our reproduction scenario. - -At this point, we will prepare for the release. We will obtain a CVE -number if one is required, providing you with full credit for the -discovery. We will also decide on a planned release date, and let you -know when it is. This release date will *always* be on a weekday. - -At this point we will reach out to our major downstream packagers to -notify them of an impending security-related patch so they can make -arrangements. In addition, these packagers will be provided with the -intended patch ahead of time, to ensure that they are able to promptly -release their downstream packages. Currently the list of people we -actively contact *ahead of a public release* is: - -- Jeremy Cline, Red Hat (@jeremycline) -- Daniele Tricoli, Debian (@eriol) - -We will notify these individuals at least a week ahead of our planned -release date to ensure that they have sufficient time to prepare. If you -believe you should be on this list, please let one of the maintainers -know at one of the email addresses at the top of this article. - -On release day, we will push the patch to our public repository, along -with an updated changelog that describes the issue and credits you. We -will then issue a PyPI release containing the patch. - -At this point, we will publicise the release. This will involve mails to -mailing lists, Tweets, and all other communication mechanisms available -to the core team. - -We will also explicitly mention which commits contain the fix to make it -easier for other distributors and users to easily patch their own -versions of requests if upgrading is not an option. diff --git a/vendor/requests/.github/workflows/codeql-analysis.yml b/vendor/requests/.github/workflows/codeql-analysis.yml deleted file mode 100644 index bf146390..00000000 --- a/vendor/requests/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,66 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -name: "CodeQL" - -on: - push: - branches: [master] - pull_request: - # The branches below must be a subset of the branches above - branches: [master] - schedule: - - cron: '0 23 * * 0' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - - strategy: - fail-fast: false - - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - with: - # We must fetch at least the immediate parents so that if this is - # a pull request then we can checkout the head. - fetch-depth: 2 - - # If this run was triggered by a pull request event, then checkout - # the head of the pull request instead of the merge commit. - - run: git checkout HEAD^2 - if: ${{ github.event_name == 'pull_request' }} - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: "python" - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v1 - - # â„¹ï¸ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # âœï¸ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 diff --git a/vendor/requests/.github/workflows/run-tests.yml b/vendor/requests/.github/workflows/run-tests.yml deleted file mode 100644 index cf5f0b4b..00000000 --- a/vendor/requests/.github/workflows/run-tests.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: Tests - -on: [push, pull_request] - -jobs: - build: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - python-version: [2.7, 3.6, 3.7, 3.8, 3.9] - os: [ubuntu-18.04, macOS-latest, windows-latest] - include: - # pypy3 on Mac OS currently fails trying to compile - # brotlipy. Moving pypy3 to only test linux. - - python-version: pypy3 - os: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make - - name: Run tests - run: | - make ci diff --git a/vendor/requests/.gitignore b/vendor/requests/.gitignore deleted file mode 100644 index de61154e..00000000 --- a/vendor/requests/.gitignore +++ /dev/null @@ -1,37 +0,0 @@ -.coverage -MANIFEST -coverage.xml -nosetests.xml -junit-report.xml -pylint.txt -toy.py -.cache/ -cover/ -build/ -docs/_build -requests.egg-info/ -*.pyc -*.swp -*.egg -env/ -.venv/ -.eggs/ -.tox/ -.pytest_cache/ -.vscode/ -.eggs/ - -.workon - -# in case you work with IntelliJ/PyCharm -.idea -*.iml -.python-version - - -t.py - -t2.py -dist - -/.mypy_cache/ diff --git a/vendor/requests/AUTHORS.rst b/vendor/requests/AUTHORS.rst deleted file mode 100644 index f4349104..00000000 --- a/vendor/requests/AUTHORS.rst +++ /dev/null @@ -1,193 +0,0 @@ -Requests was lovingly created by Kenneth Reitz. - -Keepers of the Crystals -``````````````````````` - -- Nate Prewitt `@nateprewitt `_. -- Seth M. Larson `@sethmlarson `_. - -Previous Keepers of Crystals -```````````````````````````` -- Kenneth Reitz `@ken-reitz `_, reluctant Keeper of the Master Crystal. -- Cory Benfield `@lukasa `_ -- Ian Cordasco `@sigmavirus24 `_. - - -Patches and Suggestions -``````````````````````` - -- Various Pocoo Members -- Chris Adams -- Flavio Percoco Premoli -- Dj Gilcrease -- Justin Murphy -- Rob Madole -- Aram Dulyan -- Johannes Gorset -- æ‘å±±ã‚ãŒã­ (Megane Murayama) -- James Rowe -- Daniel Schauenberg -- Zbigniew Siciarz -- Daniele Tricoli 'Eriol' -- Richard Boulton -- Miguel Olivares -- Alberto Paro -- Jérémy Bethmont -- 潘旭 (Xu Pan) -- Tamás Gulácsi -- Rubén Abad -- Peter Manser -- Jeremy Selier -- Jens Diemer -- Alex (`@alopatin `_) -- Tom Hogans -- Armin Ronacher -- Shrikant Sharat Kandula -- Mikko Ohtamaa -- Den Shabalin -- Daniel Miller -- Alejandro Giacometti -- Rick Mak -- Johan Bergström -- Josselin Jacquard -- Travis N. Vaught -- Fredrik Möllerstrand -- Daniel Hengeveld -- Dan Head -- Bruno Renié -- David Fischer -- Joseph McCullough -- Juergen Brendel -- Juan Riaza -- Ryan Kelly -- Rolando Espinoza La fuente -- Robert Gieseke -- Idan Gazit -- Ed Summers -- Chris Van Horne -- Christopher Davis -- Ori Livneh -- Jason Emerick -- Bryan Helmig -- Jonas Obrist -- Lucian Ursu -- Tom Moertel -- Frank Kumro Jr -- Chase Sterling -- Marty Alchin -- takluyver -- Ben Toews (`@mastahyeti `_) -- David Kemp -- Brendon Crawford -- Denis (`@Telofy `_) -- Matt Giuca -- Adam Tauber -- Honza Javorek -- Brendan Maguire -- Chris Dary -- Danver Braganza -- Max Countryman -- Nick Chadwick -- Jonathan Drosdeck -- Jiri Machalek -- Steve Pulec -- Michael Kelly -- Michael Newman -- Jonty Wareing -- Shivaram Lingamneni -- Miguel Turner -- Rohan Jain (`@crodjer `_) -- Justin Barber -- Roman Haritonov (`@reclosedev `_) -- Josh Imhoff -- Arup Malakar -- Danilo Bargen (`@dbrgn `_) -- Torsten Landschoff -- Michael Holler (`@apotheos `_) -- Timnit Gebru -- Sarah Gonzalez -- Victoria Mo -- Leila Muhtasib -- Matthias Rahlf -- Jakub Roztocil -- Rhys Elsmore -- André Graf (`@dergraf `_) -- Stephen Zhuang (`@everbird `_) -- Martijn Pieters -- Jonatan Heyman -- David Bonner (`@rascalking `_) -- Vinod Chandru -- Johnny Goodnow -- Denis Ryzhkov -- Wilfred Hughes -- Dmitry Medvinsky -- Bryce Boe (`@bboe `_) -- Colin Dunklau (`@cdunklau `_) -- Bob Carroll (`@rcarz `_) -- Hugo Osvaldo Barrera (`@hobarrera `_) -- Åukasz Langa -- Dave Shawley -- James Clarke (`@jam `_) -- Kevin Burke -- Flavio Curella -- David Pursehouse (`@dpursehouse `_) -- Jon Parise (`@jparise `_) -- Alexander Karpinsky (`@homm86 `_) -- Marc Schlaich (`@schlamar `_) -- Park Ilsu (`@daftshady `_) -- Matt Spitz (`@mattspitz `_) -- Vikram Oberoi (`@voberoi `_) -- Can Ibanoglu (`@canibanoglu `_) -- Thomas Weißschuh (`@t-8ch `_) -- Jayson Vantuyl -- Pengfei.X -- Kamil Madac -- Michael Becker (`@beckerfuffle `_) -- Erik Wickstrom (`@erikwickstrom `_) -- КонÑтантин Подшумок (`@podshumok `_) -- Ben Bass (`@codedstructure `_) -- Jonathan Wong (`@ContinuousFunction `_) -- Martin Jul (`@mjul `_) -- Joe Alcorn (`@buttscicles `_) -- Syed Suhail Ahmed (`@syedsuhail `_) -- Scott Sadler (`@ssadler `_) -- Arthur Darcet (`@arthurdarcet `_) -- Ulrich Petri (`@ulope `_) -- Muhammad Yasoob Ullah Khalid (`@yasoob `_) -- Paul van der Linden (`@pvanderlinden `_) -- Colin Dickson (`@colindickson `_) -- Smiley Barry (`@smiley `_) -- Shagun Sodhani (`@shagunsodhani `_) -- Robin Linderborg (`@vienno `_) -- Brian Samek (`@bsamek `_) -- Dmitry Dygalo (`@Stranger6667 `_) -- piotrjurkiewicz -- Jesse Shapiro (`@haikuginger `_) -- Nate Prewitt (`@nateprewitt `_) -- Maik Himstedt -- Michael Hunsinger -- Brian Bamsch (`@bbamsch `_) -- Om Prakash Kumar (`@iamprakashom `_) -- Philipp Konrad (`@gardiac2002 `_) -- Hussain Tamboli (`@hussaintamboli `_) -- Casey Davidson (`@davidsoncasey `_) -- Andrii Soldatenko (`@a_soldatenko `_) -- Moinuddin Quadri (`@moin18 `_) -- Matt Kohl (`@mattkohl `_) -- Jonathan Vanasco (`@jvanasco `_) -- David Fontenot (`@davidfontenot `_) -- Shmuel Amar (`@shmuelamar `_) -- Gary Wu (`@garywu `_) -- Ryan Pineo (`@ryanpineo `_) -- Ed Morley (`@edmorley `_) -- Matt Liu (`@mlcrazy `_) -- Taylor Hoff (`@PrimordialHelios `_) -- Arthur Vigil (`@ahvigil `_) -- Nehal J Wani (`@nehaljwani `_) -- Demetrios Bairaktaris (`@DemetriosBairaktaris `_) -- Darren Dormer (`@ddormer `_) -- Rajiv Mayani (`@mayani `_) -- Antti Kaihola (`@akaihola `_) -- "Dull Bananas" (`@dullbananas `_) -- Alessio Izzo (`@aless10 `_) -- Sylvain Marié (`@smarie `_) diff --git a/vendor/requests/HISTORY.md b/vendor/requests/HISTORY.md deleted file mode 100644 index 59e4a9f7..00000000 --- a/vendor/requests/HISTORY.md +++ /dev/null @@ -1,1751 +0,0 @@ -Release History -=============== - -dev ---- - -- \[Short description of non-trivial change.\] - -2.26.0 (2021-07-13) -------------------- - -**Improvements** - -- Requests now supports Brotli compression, if either the `brotli` or - `brotlicffi` package is installed. (#5783) - -- `Session.send` now correctly resolves proxy configurations from both - the Session and Request. Behavior now matches `Session.request`. (#5681) - -**Bugfixes** - -- Fixed a race condition in zip extraction when using Requests in parallel - from zip archive. (#5707) - -**Dependencies** - -- Instead of `chardet`, use the MIT-licensed `charset_normalizer` for Python3 - to remove license ambiguity for projects bundling requests. If `chardet` - is already installed on your machine it will be used instead of `charset_normalizer` - to keep backwards compatibility. (#5797) - - You can also install `chardet` while installing requests by - specifying `[use_chardet_on_py3]` extra as follows: - - ```shell - pip install "requests[use_chardet_on_py3]" - ``` - - Python2 still depends upon the `chardet` module. - -- Requests now supports `idna` 3.x on Python 3. `idna` 2.x will continue to - be used on Python 2 installations. (#5711) - -**Deprecations** - -- The `requests[security]` extra has been converted to a no-op install. - PyOpenSSL is no longer the recommended secure option for Requests. (#5867) - -- Requests has officially dropped support for Python 3.5. (#5867) - -2.25.1 (2020-12-16) -------------------- - -**Bugfixes** - -- Requests now treats `application/json` as `utf8` by default. Resolving - inconsistencies between `r.text` and `r.json` output. (#5673) - -**Dependencies** - -- Requests now supports chardet v4.x. - -2.25.0 (2020-11-11) ------------------- - -**Improvements** - -- Added support for NETRC environment variable. (#5643) - -**Dependencies** - -- Requests now supports urllib3 v1.26. - -**Deprecations** - -- Requests v2.25.x will be the last release series with support for Python 3.5. -- The `requests[security]` extra is officially deprecated and will be removed - in Requests v2.26.0. - -2.24.0 (2020-06-17) -------------------- - -**Improvements** - -- pyOpenSSL TLS implementation is now only used if Python - either doesn't have an `ssl` module or doesn't support - SNI. Previously pyOpenSSL was unconditionally used if available. - This applies even if pyOpenSSL is installed via the - `requests[security]` extra (#5443) - -- Redirect resolution should now only occur when - `allow_redirects` is True. (#5492) - -- No longer perform unnecessary Content-Length calculation for - requests that won't use it. (#5496) - -2.23.0 (2020-02-19) -------------------- - -**Improvements** - -- Remove defunct reference to `prefetch` in Session `__attrs__` (#5110) - -**Bugfixes** - -- Requests no longer outputs password in basic auth usage warning. (#5099) - -**Dependencies** - -- Pinning for `chardet` and `idna` now uses major version instead of minor. - This hopefully reduces the need for releases everytime a dependency is updated. - -2.22.0 (2019-05-15) -------------------- - -**Dependencies** - -- Requests now supports urllib3 v1.25.2. - (note: 1.25.0 and 1.25.1 are incompatible) - -**Deprecations** - -- Requests has officially stopped support for Python 3.4. - -2.21.0 (2018-12-10) -------------------- - -**Dependencies** - -- Requests now supports idna v2.8. - -2.20.1 (2018-11-08) -------------------- - -**Bugfixes** - -- Fixed bug with unintended Authorization header stripping for - redirects using default ports (http/80, https/443). - -2.20.0 (2018-10-18) -------------------- - -**Bugfixes** - -- Content-Type header parsing is now case-insensitive (e.g. - charset=utf8 v Charset=utf8). -- Fixed exception leak where certain redirect urls would raise - uncaught urllib3 exceptions. -- Requests removes Authorization header from requests redirected - from https to http on the same hostname. (CVE-2018-18074) -- `should_bypass_proxies` now handles URIs without hostnames (e.g. - files). - -**Dependencies** - -- Requests now supports urllib3 v1.24. - -**Deprecations** - -- Requests has officially stopped support for Python 2.6. - -2.19.1 (2018-06-14) -------------------- - -**Bugfixes** - -- Fixed issue where status\_codes.py's `init` function failed trying - to append to a `__doc__` value of `None`. - -2.19.0 (2018-06-12) -------------------- - -**Improvements** - -- Warn user about possible slowdown when using cryptography version - < 1.3.4 -- Check for invalid host in proxy URL, before forwarding request to - adapter. -- Fragments are now properly maintained across redirects. (RFC7231 - 7.1.2) -- Removed use of cgi module to expedite library load time. -- Added support for SHA-256 and SHA-512 digest auth algorithms. -- Minor performance improvement to `Request.content`. -- Migrate to using collections.abc for 3.7 compatibility. - -**Bugfixes** - -- Parsing empty `Link` headers with `parse_header_links()` no longer - return one bogus entry. -- Fixed issue where loading the default certificate bundle from a zip - archive would raise an `IOError`. -- Fixed issue with unexpected `ImportError` on windows system which do - not support `winreg` module. -- DNS resolution in proxy bypass no longer includes the username and - password in the request. This also fixes the issue of DNS queries - failing on macOS. -- Properly normalize adapter prefixes for url comparison. -- Passing `None` as a file pointer to the `files` param no longer - raises an exception. -- Calling `copy` on a `RequestsCookieJar` will now preserve the cookie - policy correctly. - -**Dependencies** - -- We now support idna v2.7. -- We now support urllib3 v1.23. - -2.18.4 (2017-08-15) -------------------- - -**Improvements** - -- Error messages for invalid headers now include the header name for - easier debugging - -**Dependencies** - -- We now support idna v2.6. - -2.18.3 (2017-08-02) -------------------- - -**Improvements** - -- Running `$ python -m requests.help` now includes the installed - version of idna. - -**Bugfixes** - -- Fixed issue where Requests would raise `ConnectionError` instead of - `SSLError` when encountering SSL problems when using urllib3 v1.22. - -2.18.2 (2017-07-25) -------------------- - -**Bugfixes** - -- `requests.help` no longer fails on Python 2.6 due to the absence of - `ssl.OPENSSL_VERSION_NUMBER`. - -**Dependencies** - -- We now support urllib3 v1.22. - -2.18.1 (2017-06-14) -------------------- - -**Bugfixes** - -- Fix an error in the packaging whereby the `*.whl` contained - incorrect data that regressed the fix in v2.17.3. - -2.18.0 (2017-06-14) -------------------- - -**Improvements** - -- `Response` is now a context manager, so can be used directly in a - `with` statement without first having to be wrapped by - `contextlib.closing()`. - -**Bugfixes** - -- Resolve installation failure if multiprocessing is not available -- Resolve tests crash if multiprocessing is not able to determine the - number of CPU cores -- Resolve error swallowing in utils set\_environ generator - -2.17.3 (2017-05-29) -------------------- - -**Improvements** - -- Improved `packages` namespace identity support, for monkeypatching - libraries. - -2.17.2 (2017-05-29) -------------------- - -**Improvements** - -- Improved `packages` namespace identity support, for monkeypatching - libraries. - -2.17.1 (2017-05-29) -------------------- - -**Improvements** - -- Improved `packages` namespace identity support, for monkeypatching - libraries. - -2.17.0 (2017-05-29) -------------------- - -**Improvements** - -- Removal of the 301 redirect cache. This improves thread-safety. - -2.16.5 (2017-05-28) -------------------- - -- Improvements to `$ python -m requests.help`. - -2.16.4 (2017-05-27) -------------------- - -- Introduction of the `$ python -m requests.help` command, for - debugging with maintainers! - -2.16.3 (2017-05-27) -------------------- - -- Further restored the `requests.packages` namespace for compatibility - reasons. - -2.16.2 (2017-05-27) -------------------- - -- Further restored the `requests.packages` namespace for compatibility - reasons. - -No code modification (noted below) should be necessary any longer. - -2.16.1 (2017-05-27) -------------------- - -- Restored the `requests.packages` namespace for compatibility - reasons. -- Bugfix for `urllib3` version parsing. - -**Note**: code that was written to import against the -`requests.packages` namespace previously will have to import code that -rests at this module-level now. - -For example: - - from requests.packages.urllib3.poolmanager import PoolManager - -Will need to be re-written to be: - - from requests.packages import urllib3 - urllib3.poolmanager.PoolManager - -Or, even better: - - from urllib3.poolmanager import PoolManager - -2.16.0 (2017-05-26) -------------------- - -- Unvendor ALL the things! - -2.15.1 (2017-05-26) -------------------- - -- Everyone makes mistakes. - -2.15.0 (2017-05-26) -------------------- - -**Improvements** - -- Introduction of the `Response.next` property, for getting the next - `PreparedResponse` from a redirect chain (when - `allow_redirects=False`). -- Internal refactoring of `__version__` module. - -**Bugfixes** - -- Restored once-optional parameter for - `requests.utils.get_environ_proxies()`. - -2.14.2 (2017-05-10) -------------------- - -**Bugfixes** - -- Changed a less-than to an equal-to and an or in the dependency - markers to widen compatibility with older setuptools releases. - -2.14.1 (2017-05-09) -------------------- - -**Bugfixes** - -- Changed the dependency markers to widen compatibility with older pip - releases. - -2.14.0 (2017-05-09) -------------------- - -**Improvements** - -- It is now possible to pass `no_proxy` as a key to the `proxies` - dictionary to provide handling similar to the `NO_PROXY` environment - variable. -- When users provide invalid paths to certificate bundle files or - directories Requests now raises `IOError`, rather than failing at - the time of the HTTPS request with a fairly inscrutable certificate - validation error. -- The behavior of `SessionRedirectMixin` was slightly altered. - `resolve_redirects` will now detect a redirect by calling - `get_redirect_target(response)` instead of directly querying - `Response.is_redirect` and `Response.headers['location']`. Advanced - users will be able to process malformed redirects more easily. -- Changed the internal calculation of elapsed request time to have - higher resolution on Windows. -- Added `win_inet_pton` as conditional dependency for the `[socks]` - extra on Windows with Python 2.7. -- Changed the proxy bypass implementation on Windows: the proxy bypass - check doesn't use forward and reverse DNS requests anymore -- URLs with schemes that begin with `http` but are not `http` or - `https` no longer have their host parts forced to lowercase. - -**Bugfixes** - -- Much improved handling of non-ASCII `Location` header values in - redirects. Fewer `UnicodeDecodeErrors` are encountered on Python 2, - and Python 3 now correctly understands that Latin-1 is unlikely to - be the correct encoding. -- If an attempt to `seek` file to find out its length fails, we now - appropriately handle that by aborting our content-length - calculations. -- Restricted `HTTPDigestAuth` to only respond to auth challenges made - on 4XX responses, rather than to all auth challenges. -- Fixed some code that was firing `DeprecationWarning` on Python 3.6. -- The dismayed person emoticon (`/o\\`) no longer has a big head. I'm - sure this is what you were all worrying about most. - -**Miscellaneous** - -- Updated bundled urllib3 to v1.21.1. -- Updated bundled chardet to v3.0.2. -- Updated bundled idna to v2.5. -- Updated bundled certifi to 2017.4.17. - -2.13.0 (2017-01-24) -------------------- - -**Features** - -- Only load the `idna` library when we've determined we need it. This - will save some memory for users. - -**Miscellaneous** - -- Updated bundled urllib3 to 1.20. -- Updated bundled idna to 2.2. - -2.12.5 (2017-01-18) -------------------- - -**Bugfixes** - -- Fixed an issue with JSON encoding detection, specifically detecting - big-endian UTF-32 with BOM. - -2.12.4 (2016-12-14) -------------------- - -**Bugfixes** - -- Fixed regression from 2.12.2 where non-string types were rejected in - the basic auth parameters. While support for this behaviour has been - readded, the behaviour is deprecated and will be removed in the - future. - -2.12.3 (2016-12-01) -------------------- - -**Bugfixes** - -- Fixed regression from v2.12.1 for URLs with schemes that begin with - "http". These URLs have historically been processed as though they - were HTTP-schemed URLs, and so have had parameters added. This was - removed in v2.12.2 in an overzealous attempt to resolve problems - with IDNA-encoding those URLs. This change was reverted: the other - fixes for IDNA-encoding have been judged to be sufficient to return - to the behaviour Requests had before v2.12.0. - -2.12.2 (2016-11-30) -------------------- - -**Bugfixes** - -- Fixed several issues with IDNA-encoding URLs that are technically - invalid but which are widely accepted. Requests will now attempt to - IDNA-encode a URL if it can but, if it fails, and the host contains - only ASCII characters, it will be passed through optimistically. - This will allow users to opt-in to using IDNA2003 themselves if they - want to, and will also allow technically invalid but still common - hostnames. -- Fixed an issue where URLs with leading whitespace would raise - `InvalidSchema` errors. -- Fixed an issue where some URLs without the HTTP or HTTPS schemes - would still have HTTP URL preparation applied to them. -- Fixed an issue where Unicode strings could not be used in basic - auth. -- Fixed an issue encountered by some Requests plugins where - constructing a Response object would cause `Response.content` to - raise an `AttributeError`. - -2.12.1 (2016-11-16) -------------------- - -**Bugfixes** - -- Updated setuptools 'security' extra for the new PyOpenSSL backend in - urllib3. - -**Miscellaneous** - -- Updated bundled urllib3 to 1.19.1. - -2.12.0 (2016-11-15) -------------------- - -**Improvements** - -- Updated support for internationalized domain names from IDNA2003 to - IDNA2008. This updated support is required for several forms of IDNs - and is mandatory for .de domains. -- Much improved heuristics for guessing content lengths: Requests will - no longer read an entire `StringIO` into memory. -- Much improved logic for recalculating `Content-Length` headers for - `PreparedRequest` objects. -- Improved tolerance for file-like objects that have no `tell` method - but do have a `seek` method. -- Anything that is a subclass of `Mapping` is now treated like a - dictionary by the `data=` keyword argument. -- Requests now tolerates empty passwords in proxy credentials, rather - than stripping the credentials. -- If a request is made with a file-like object as the body and that - request is redirected with a 307 or 308 status code, Requests will - now attempt to rewind the body object so it can be replayed. - -**Bugfixes** - -- When calling `response.close`, the call to `close` will be - propagated through to non-urllib3 backends. -- Fixed issue where the `ALL_PROXY` environment variable would be - preferred over scheme-specific variables like `HTTP_PROXY`. -- Fixed issue where non-UTF8 reason phrases got severely mangled by - falling back to decoding using ISO 8859-1 instead. -- Fixed a bug where Requests would not correctly correlate cookies set - when using custom Host headers if those Host headers did not use the - native string type for the platform. - -**Miscellaneous** - -- Updated bundled urllib3 to 1.19. -- Updated bundled certifi certs to 2016.09.26. - -2.11.1 (2016-08-17) -------------------- - -**Bugfixes** - -- Fixed a bug when using `iter_content` with `decode_unicode=True` for - streamed bodies would raise `AttributeError`. This bug was - introduced in 2.11. -- Strip Content-Type and Transfer-Encoding headers from the header - block when following a redirect that transforms the verb from - POST/PUT to GET. - -2.11.0 (2016-08-08) -------------------- - -**Improvements** - -- Added support for the `ALL_PROXY` environment variable. -- Reject header values that contain leading whitespace or newline - characters to reduce risk of header smuggling. - -**Bugfixes** - -- Fixed occasional `TypeError` when attempting to decode a JSON - response that occurred in an error case. Now correctly returns a - `ValueError`. -- Requests would incorrectly ignore a non-CIDR IP address in the - `NO_PROXY` environment variables: Requests now treats it as a - specific IP. -- Fixed a bug when sending JSON data that could cause us to encounter - obscure OpenSSL errors in certain network conditions (yes, really). -- Added type checks to ensure that `iter_content` only accepts - integers and `None` for chunk sizes. -- Fixed issue where responses whose body had not been fully consumed - would have the underlying connection closed but not returned to the - connection pool, which could cause Requests to hang in situations - where the `HTTPAdapter` had been configured to use a blocking - connection pool. - -**Miscellaneous** - -- Updated bundled urllib3 to 1.16. -- Some previous releases accidentally accepted non-strings as - acceptable header values. This release does not. - -2.10.0 (2016-04-29) -------------------- - -**New Features** - -- SOCKS Proxy Support! (requires PySocks; - `$ pip install requests[socks]`) - -**Miscellaneous** - -- Updated bundled urllib3 to 1.15.1. - -2.9.2 (2016-04-29) ------------------- - -**Improvements** - -- Change built-in CaseInsensitiveDict (used for headers) to use - OrderedDict as its underlying datastore. - -**Bugfixes** - -- Don't use redirect\_cache if allow\_redirects=False -- When passed objects that throw exceptions from `tell()`, send them - via chunked transfer encoding instead of failing. -- Raise a ProxyError for proxy related connection issues. - -2.9.1 (2015-12-21) ------------------- - -**Bugfixes** - -- Resolve regression introduced in 2.9.0 that made it impossible to - send binary strings as bodies in Python 3. -- Fixed errors when calculating cookie expiration dates in certain - locales. - -**Miscellaneous** - -- Updated bundled urllib3 to 1.13.1. - -2.9.0 (2015-12-15) ------------------- - -**Minor Improvements** (Backwards compatible) - -- The `verify` keyword argument now supports being passed a path to a - directory of CA certificates, not just a single-file bundle. -- Warnings are now emitted when sending files opened in text mode. -- Added the 511 Network Authentication Required status code to the - status code registry. - -**Bugfixes** - -- For file-like objects that are not sought to the very beginning, we - now send the content length for the number of bytes we will actually - read, rather than the total size of the file, allowing partial file - uploads. -- When uploading file-like objects, if they are empty or have no - obvious content length we set `Transfer-Encoding: chunked` rather - than `Content-Length: 0`. -- We correctly receive the response in buffered mode when uploading - chunked bodies. -- We now handle being passed a query string as a bytestring on Python - 3, by decoding it as UTF-8. -- Sessions are now closed in all cases (exceptional and not) when - using the functional API rather than leaking and waiting for the - garbage collector to clean them up. -- Correctly handle digest auth headers with a malformed `qop` - directive that contains no token, by treating it the same as if no - `qop` directive was provided at all. -- Minor performance improvements when removing specific cookies by - name. - -**Miscellaneous** - -- Updated urllib3 to 1.13. - -2.8.1 (2015-10-13) ------------------- - -**Bugfixes** - -- Update certificate bundle to match `certifi` 2015.9.6.2's weak - certificate bundle. -- Fix a bug in 2.8.0 where requests would raise `ConnectTimeout` - instead of `ConnectionError` -- When using the PreparedRequest flow, requests will now correctly - respect the `json` parameter. Broken in 2.8.0. -- When using the PreparedRequest flow, requests will now correctly - handle a Unicode-string method name on Python 2. Broken in 2.8.0. - -2.8.0 (2015-10-05) ------------------- - -**Minor Improvements** (Backwards Compatible) - -- Requests now supports per-host proxies. This allows the `proxies` - dictionary to have entries of the form - `{'://': ''}`. Host-specific proxies will - be used in preference to the previously-supported scheme-specific - ones, but the previous syntax will continue to work. -- `Response.raise_for_status` now prints the URL that failed as part - of the exception message. -- `requests.utils.get_netrc_auth` now takes an `raise_errors` kwarg, - defaulting to `False`. When `True`, errors parsing `.netrc` files - cause exceptions to be thrown. -- Change to bundled projects import logic to make it easier to - unbundle requests downstream. -- Changed the default User-Agent string to avoid leaking data on - Linux: now contains only the requests version. - -**Bugfixes** - -- The `json` parameter to `post()` and friends will now only be used - if neither `data` nor `files` are present, consistent with the - documentation. -- We now ignore empty fields in the `NO_PROXY` environment variable. -- Fixed problem where `httplib.BadStatusLine` would get raised if - combining `stream=True` with `contextlib.closing`. -- Prevented bugs where we would attempt to return the same connection - back to the connection pool twice when sending a Chunked body. -- Miscellaneous minor internal changes. -- Digest Auth support is now thread safe. - -**Updates** - -- Updated urllib3 to 1.12. - -2.7.0 (2015-05-03) ------------------- - -This is the first release that follows our new release process. For -more, see [our -documentation](https://requests.readthedocs.io/en/latest/community/release-process/). - -**Bugfixes** - -- Updated urllib3 to 1.10.4, resolving several bugs involving chunked - transfer encoding and response framing. - -2.6.2 (2015-04-23) ------------------- - -**Bugfixes** - -- Fix regression where compressed data that was sent as chunked data - was not properly decompressed. (\#2561) - -2.6.1 (2015-04-22) ------------------- - -**Bugfixes** - -- Remove VendorAlias import machinery introduced in v2.5.2. -- Simplify the PreparedRequest.prepare API: We no longer require the - user to pass an empty list to the hooks keyword argument. (c.f. - \#2552) -- Resolve redirects now receives and forwards all of the original - arguments to the adapter. (\#2503) -- Handle UnicodeDecodeErrors when trying to deal with a unicode URL - that cannot be encoded in ASCII. (\#2540) -- Populate the parsed path of the URI field when performing Digest - Authentication. (\#2426) -- Copy a PreparedRequest's CookieJar more reliably when it is not an - instance of RequestsCookieJar. (\#2527) - -2.6.0 (2015-03-14) ------------------- - -**Bugfixes** - -- CVE-2015-2296: Fix handling of cookies on redirect. Previously a - cookie without a host value set would use the hostname for the - redirected URL exposing requests users to session fixation attacks - and potentially cookie stealing. This was disclosed privately by - Matthew Daley of [BugFuzz](https://bugfuzz.com). This affects all - versions of requests from v2.1.0 to v2.5.3 (inclusive on both ends). -- Fix error when requests is an `install_requires` dependency and - `python setup.py test` is run. (\#2462) -- Fix error when urllib3 is unbundled and requests continues to use - the vendored import location. -- Include fixes to `urllib3`'s header handling. -- Requests' handling of unvendored dependencies is now more - restrictive. - -**Features and Improvements** - -- Support bytearrays when passed as parameters in the `files` - argument. (\#2468) -- Avoid data duplication when creating a request with `str`, `bytes`, - or `bytearray` input to the `files` argument. - -2.5.3 (2015-02-24) ------------------- - -**Bugfixes** - -- Revert changes to our vendored certificate bundle. For more context - see (\#2455, \#2456, and ) - -2.5.2 (2015-02-23) ------------------- - -**Features and Improvements** - -- Add sha256 fingerprint support. - ([shazow/urllib3\#540](https://github.com/shazow/urllib3/pull/540)) -- Improve the performance of headers. - ([shazow/urllib3\#544](https://github.com/shazow/urllib3/pull/544)) - -**Bugfixes** - -- Copy pip's import machinery. When downstream redistributors remove - requests.packages.urllib3 the import machinery will continue to let - those same symbols work. Example usage in requests' documentation - and 3rd-party libraries relying on the vendored copies of urllib3 - will work without having to fallback to the system urllib3. -- Attempt to quote parts of the URL on redirect if unquoting and then - quoting fails. (\#2356) -- Fix filename type check for multipart form-data uploads. (\#2411) -- Properly handle the case where a server issuing digest - authentication challenges provides both auth and auth-int - qop-values. (\#2408) -- Fix a socket leak. - ([shazow/urllib3\#549](https://github.com/shazow/urllib3/pull/549)) -- Fix multiple `Set-Cookie` headers properly. - ([shazow/urllib3\#534](https://github.com/shazow/urllib3/pull/534)) -- Disable the built-in hostname verification. - ([shazow/urllib3\#526](https://github.com/shazow/urllib3/pull/526)) -- Fix the behaviour of decoding an exhausted stream. - ([shazow/urllib3\#535](https://github.com/shazow/urllib3/pull/535)) - -**Security** - -- Pulled in an updated `cacert.pem`. -- Drop RC4 from the default cipher list. - ([shazow/urllib3\#551](https://github.com/shazow/urllib3/pull/551)) - -2.5.1 (2014-12-23) ------------------- - -**Behavioural Changes** - -- Only catch HTTPErrors in raise\_for\_status (\#2382) - -**Bugfixes** - -- Handle LocationParseError from urllib3 (\#2344) -- Handle file-like object filenames that are not strings (\#2379) -- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated - (\#2389) - -2.5.0 (2014-12-01) ------------------- - -**Improvements** - -- Allow usage of urllib3's Retry object with HTTPAdapters (\#2216) -- The `iter_lines` method on a response now accepts a delimiter with - which to split the content (\#2295) - -**Behavioural Changes** - -- Add deprecation warnings to functions in requests.utils that will be - removed in 3.0 (\#2309) -- Sessions used by the functional API are always closed (\#2326) -- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9) - (\#2323) - -**Bugfixes** - -- Only parse the URL once (\#2353) -- Allow Content-Length header to always be overridden (\#2332) -- Properly handle files in HTTPDigestAuth (\#2333) -- Cap redirect\_cache size to prevent memory abuse (\#2299) -- Fix HTTPDigestAuth handling of redirects after authenticating - successfully (\#2253) -- Fix crash with custom method parameter to Session.request (\#2317) -- Fix how Link headers are parsed using the regular expression library - (\#2271) - -**Documentation** - -- Add more references for interlinking (\#2348) -- Update CSS for theme (\#2290) -- Update width of buttons and sidebar (\#2289) -- Replace references of Gittip with Gratipay (\#2282) -- Add link to changelog in sidebar (\#2273) - -2.4.3 (2014-10-06) ------------------- - -**Bugfixes** - -- Unicode URL improvements for Python 2. -- Re-order JSON param for backwards compat. -- Automatically defrag authentication schemes from host/pass URIs. - ([\#2249](https://github.com/psf/requests/issues/2249)) - -2.4.2 (2014-10-05) ------------------- - -**Improvements** - -- FINALLY! Add json parameter for uploads! - ([\#2258](https://github.com/psf/requests/pull/2258)) -- Support for bytestring URLs on Python 3.x - ([\#2238](https://github.com/psf/requests/pull/2238)) - -**Bugfixes** - -- Avoid getting stuck in a loop - ([\#2244](https://github.com/psf/requests/pull/2244)) -- Multiple calls to iter\* fail with unhelpful error. - ([\#2240](https://github.com/psf/requests/issues/2240), - [\#2241](https://github.com/psf/requests/issues/2241)) - -**Documentation** - -- Correct redirection introduction - ([\#2245](https://github.com/psf/requests/pull/2245/)) -- Added example of how to send multiple files in one request. - ([\#2227](https://github.com/psf/requests/pull/2227/)) -- Clarify how to pass a custom set of CAs - ([\#2248](https://github.com/psf/requests/pull/2248/)) - -2.4.1 (2014-09-09) ------------------- - -- Now has a "security" package extras set, - `$ pip install requests[security]` -- Requests will now use Certifi if it is available. -- Capture and re-raise urllib3 ProtocolError -- Bugfix for responses that attempt to redirect to themselves forever - (wtf?). - -2.4.0 (2014-08-29) ------------------- - -**Behavioral Changes** - -- `Connection: keep-alive` header is now sent automatically. - -**Improvements** - -- Support for connect timeouts! Timeout now accepts a tuple (connect, - read) which is used to set individual connect and read timeouts. -- Allow copying of PreparedRequests without headers/cookies. -- Updated bundled urllib3 version. -- Refactored settings loading from environment -- new - Session.merge\_environment\_settings. -- Handle socket errors in iter\_content. - -2.3.0 (2014-05-16) ------------------- - -**API Changes** - -- New `Response` property `is_redirect`, which is true when the - library could have processed this response as a redirection (whether - or not it actually did). -- The `timeout` parameter now affects requests with both `stream=True` - and `stream=False` equally. -- The change in v2.0.0 to mandate explicit proxy schemes has been - reverted. Proxy schemes now default to `http://`. -- The `CaseInsensitiveDict` used for HTTP headers now behaves like a - normal dictionary when references as string or viewed in the - interpreter. - -**Bugfixes** - -- No longer expose Authorization or Proxy-Authorization headers on - redirect. Fix CVE-2014-1829 and CVE-2014-1830 respectively. -- Authorization is re-evaluated each redirect. -- On redirect, pass url as native strings. -- Fall-back to autodetected encoding for JSON when Unicode detection - fails. -- Headers set to `None` on the `Session` are now correctly not sent. -- Correctly honor `decode_unicode` even if it wasn't used earlier in - the same response. -- Stop advertising `compress` as a supported Content-Encoding. -- The `Response.history` parameter is now always a list. -- Many, many `urllib3` bugfixes. - -2.2.1 (2014-01-23) ------------------- - -**Bugfixes** - -- Fixes incorrect parsing of proxy credentials that contain a literal - or encoded '\#' character. -- Assorted urllib3 fixes. - -2.2.0 (2014-01-09) ------------------- - -**API Changes** - -- New exception: `ContentDecodingError`. Raised instead of `urllib3` - `DecodeError` exceptions. - -**Bugfixes** - -- Avoid many many exceptions from the buggy implementation of - `proxy_bypass` on OS X in Python 2.6. -- Avoid crashing when attempting to get authentication credentials - from \~/.netrc when running as a user without a home directory. -- Use the correct pool size for pools of connections to proxies. -- Fix iteration of `CookieJar` objects. -- Ensure that cookies are persisted over redirect. -- Switch back to using chardet, since it has merged with charade. - -2.1.0 (2013-12-05) ------------------- - -- Updated CA Bundle, of course. -- Cookies set on individual Requests through a `Session` (e.g. via - `Session.get()`) are no longer persisted to the `Session`. -- Clean up connections when we hit problems during chunked upload, - rather than leaking them. -- Return connections to the pool when a chunked upload is successful, - rather than leaking it. -- Match the HTTPbis recommendation for HTTP 301 redirects. -- Prevent hanging when using streaming uploads and Digest Auth when a - 401 is received. -- Values of headers set by Requests are now always the native string - type. -- Fix previously broken SNI support. -- Fix accessing HTTP proxies using proxy authentication. -- Unencode HTTP Basic usernames and passwords extracted from URLs. -- Support for IP address ranges for no\_proxy environment variable -- Parse headers correctly when users override the default `Host:` - header. -- Avoid munging the URL in case of case-sensitive servers. -- Looser URL handling for non-HTTP/HTTPS urls. -- Accept unicode methods in Python 2.6 and 2.7. -- More resilient cookie handling. -- Make `Response` objects pickleable. -- Actually added MD5-sess to Digest Auth instead of pretending to like - last time. -- Updated internal urllib3. -- Fixed @Lukasa's lack of taste. - -2.0.1 (2013-10-24) ------------------- - -- Updated included CA Bundle with new mistrusts and automated process - for the future -- Added MD5-sess to Digest Auth -- Accept per-file headers in multipart file POST messages. -- Fixed: Don't send the full URL on CONNECT messages. -- Fixed: Correctly lowercase a redirect scheme. -- Fixed: Cookies not persisted when set via functional API. -- Fixed: Translate urllib3 ProxyError into a requests ProxyError - derived from ConnectionError. -- Updated internal urllib3 and chardet. - -2.0.0 (2013-09-24) ------------------- - -**API Changes:** - -- Keys in the Headers dictionary are now native strings on all Python - versions, i.e. bytestrings on Python 2, unicode on Python 3. -- Proxy URLs now *must* have an explicit scheme. A `MissingSchema` - exception will be raised if they don't. -- Timeouts now apply to read time if `Stream=False`. -- `RequestException` is now a subclass of `IOError`, not - `RuntimeError`. -- Added new method to `PreparedRequest` objects: - `PreparedRequest.copy()`. -- Added new method to `Session` objects: `Session.update_request()`. - This method updates a `Request` object with the data (e.g. cookies) - stored on the `Session`. -- Added new method to `Session` objects: `Session.prepare_request()`. - This method updates and prepares a `Request` object, and returns the - corresponding `PreparedRequest` object. -- Added new method to `HTTPAdapter` objects: - `HTTPAdapter.proxy_headers()`. This should not be called directly, - but improves the subclass interface. -- `httplib.IncompleteRead` exceptions caused by incorrect chunked - encoding will now raise a Requests `ChunkedEncodingError` instead. -- Invalid percent-escape sequences now cause a Requests `InvalidURL` - exception to be raised. -- HTTP 208 no longer uses reason phrase `"im_used"`. Correctly uses - `"already_reported"`. -- HTTP 226 reason added (`"im_used"`). - -**Bugfixes:** - -- Vastly improved proxy support, including the CONNECT verb. Special - thanks to the many contributors who worked towards this improvement. -- Cookies are now properly managed when 401 authentication responses - are received. -- Chunked encoding fixes. -- Support for mixed case schemes. -- Better handling of streaming downloads. -- Retrieve environment proxies from more locations. -- Minor cookies fixes. -- Improved redirect behaviour. -- Improved streaming behaviour, particularly for compressed data. -- Miscellaneous small Python 3 text encoding bugs. -- `.netrc` no longer overrides explicit auth. -- Cookies set by hooks are now correctly persisted on Sessions. -- Fix problem with cookies that specify port numbers in their host - field. -- `BytesIO` can be used to perform streaming uploads. -- More generous parsing of the `no_proxy` environment variable. -- Non-string objects can be passed in data values alongside files. - -1.2.3 (2013-05-25) ------------------- - -- Simple packaging fix - -1.2.2 (2013-05-23) ------------------- - -- Simple packaging fix - -1.2.1 (2013-05-20) ------------------- - -- 301 and 302 redirects now change the verb to GET for all verbs, not - just POST, improving browser compatibility. -- Python 3.3.2 compatibility -- Always percent-encode location headers -- Fix connection adapter matching to be most-specific first -- new argument to the default connection adapter for passing a block - argument -- prevent a KeyError when there's no link headers - -1.2.0 (2013-03-31) ------------------- - -- Fixed cookies on sessions and on requests -- Significantly change how hooks are dispatched - hooks now receive - all the arguments specified by the user when making a request so - hooks can make a secondary request with the same parameters. This is - especially necessary for authentication handler authors -- certifi support was removed -- Fixed bug where using OAuth 1 with body `signature_type` sent no - data -- Major proxy work thanks to @Lukasa including parsing of proxy - authentication from the proxy url -- Fix DigestAuth handling too many 401s -- Update vendored urllib3 to include SSL bug fixes -- Allow keyword arguments to be passed to `json.loads()` via the - `Response.json()` method -- Don't send `Content-Length` header by default on `GET` or `HEAD` - requests -- Add `elapsed` attribute to `Response` objects to time how long a - request took. -- Fix `RequestsCookieJar` -- Sessions and Adapters are now picklable, i.e., can be used with the - multiprocessing library -- Update charade to version 1.0.3 - -The change in how hooks are dispatched will likely cause a great deal of -issues. - -1.1.0 (2013-01-10) ------------------- - -- CHUNKED REQUESTS -- Support for iterable response bodies -- Assume servers persist redirect params -- Allow explicit content types to be specified for file data -- Make merge\_kwargs case-insensitive when looking up keys - -1.0.3 (2012-12-18) ------------------- - -- Fix file upload encoding bug -- Fix cookie behavior - -1.0.2 (2012-12-17) ------------------- - -- Proxy fix for HTTPAdapter. - -1.0.1 (2012-12-17) ------------------- - -- Cert verification exception bug. -- Proxy fix for HTTPAdapter. - -1.0.0 (2012-12-17) ------------------- - -- Massive Refactor and Simplification -- Switch to Apache 2.0 license -- Swappable Connection Adapters -- Mountable Connection Adapters -- Mutable ProcessedRequest chain -- /s/prefetch/stream -- Removal of all configuration -- Standard library logging -- Make Response.json() callable, not property. -- Usage of new charade project, which provides python 2 and 3 - simultaneous chardet. -- Removal of all hooks except 'response' -- Removal of all authentication helpers (OAuth, Kerberos) - -This is not a backwards compatible change. - -0.14.2 (2012-10-27) -------------------- - -- Improved mime-compatible JSON handling -- Proxy fixes -- Path hack fixes -- Case-Insensitive Content-Encoding headers -- Support for CJK parameters in form posts - -0.14.1 (2012-10-01) -------------------- - -- Python 3.3 Compatibility -- Simply default accept-encoding -- Bugfixes - -0.14.0 (2012-09-02) -------------------- - -- No more iter\_content errors if already downloaded. - -0.13.9 (2012-08-25) -------------------- - -- Fix for OAuth + POSTs -- Remove exception eating from dispatch\_hook -- General bugfixes - -0.13.8 (2012-08-21) -------------------- - -- Incredible Link header support :) - -0.13.7 (2012-08-19) -------------------- - -- Support for (key, value) lists everywhere. -- Digest Authentication improvements. -- Ensure proxy exclusions work properly. -- Clearer UnicodeError exceptions. -- Automatic casting of URLs to strings (fURL and such) -- Bugfixes. - -0.13.6 (2012-08-06) -------------------- - -- Long awaited fix for hanging connections! - -0.13.5 (2012-07-27) -------------------- - -- Packaging fix - -0.13.4 (2012-07-27) -------------------- - -- GSSAPI/Kerberos authentication! -- App Engine 2.7 Fixes! -- Fix leaking connections (from urllib3 update) -- OAuthlib path hack fix -- OAuthlib URL parameters fix. - -0.13.3 (2012-07-12) -------------------- - -- Use simplejson if available. -- Do not hide SSLErrors behind Timeouts. -- Fixed param handling with urls containing fragments. -- Significantly improved information in User Agent. -- client certificates are ignored when verify=False - -0.13.2 (2012-06-28) -------------------- - -- Zero dependencies (once again)! -- New: Response.reason -- Sign querystring parameters in OAuth 1.0 -- Client certificates no longer ignored when verify=False -- Add openSUSE certificate support - -0.13.1 (2012-06-07) -------------------- - -- Allow passing a file or file-like object as data. -- Allow hooks to return responses that indicate errors. -- Fix Response.text and Response.json for body-less responses. - -0.13.0 (2012-05-29) -------------------- - -- Removal of Requests.async in favor of - [grequests](https://github.com/kennethreitz/grequests) -- Allow disabling of cookie persistence. -- New implementation of safe\_mode -- cookies.get now supports default argument -- Session cookies not saved when Session.request is called with - return\_response=False -- Env: no\_proxy support. -- RequestsCookieJar improvements. -- Various bug fixes. - -0.12.1 (2012-05-08) -------------------- - -- New `Response.json` property. -- Ability to add string file uploads. -- Fix out-of-range issue with iter\_lines. -- Fix iter\_content default size. -- Fix POST redirects containing files. - -0.12.0 (2012-05-02) -------------------- - -- EXPERIMENTAL OAUTH SUPPORT! -- Proper CookieJar-backed cookies interface with awesome dict-like - interface. -- Speed fix for non-iterated content chunks. -- Move `pre_request` to a more usable place. -- New `pre_send` hook. -- Lazily encode data, params, files. -- Load system Certificate Bundle if `certify` isn't available. -- Cleanups, fixes. - -0.11.2 (2012-04-22) -------------------- - -- Attempt to use the OS's certificate bundle if `certifi` isn't - available. -- Infinite digest auth redirect fix. -- Multi-part file upload improvements. -- Fix decoding of invalid %encodings in URLs. -- If there is no content in a response don't throw an error the second - time that content is attempted to be read. -- Upload data on redirects. - -0.11.1 (2012-03-30) -------------------- - -- POST redirects now break RFC to do what browsers do: Follow up with - a GET. -- New `strict_mode` configuration to disable new redirect behavior. - -0.11.0 (2012-03-14) -------------------- - -- Private SSL Certificate support -- Remove select.poll from Gevent monkeypatching -- Remove redundant generator for chunked transfer encoding -- Fix: Response.ok raises Timeout Exception in safe\_mode - -0.10.8 (2012-03-09) -------------------- - -- Generate chunked ValueError fix -- Proxy configuration by environment variables -- Simplification of iter\_lines. -- New trust\_env configuration for disabling system/environment hints. -- Suppress cookie errors. - -0.10.7 (2012-03-07) -------------------- - -- encode\_uri = False - -0.10.6 (2012-02-25) -------------------- - -- Allow '=' in cookies. - -0.10.5 (2012-02-25) -------------------- - -- Response body with 0 content-length fix. -- New async.imap. -- Don't fail on netrc. - -0.10.4 (2012-02-20) -------------------- - -- Honor netrc. - -0.10.3 (2012-02-20) -------------------- - -- HEAD requests don't follow redirects anymore. -- raise\_for\_status() doesn't raise for 3xx anymore. -- Make Session objects picklable. -- ValueError for invalid schema URLs. - -0.10.2 (2012-01-15) -------------------- - -- Vastly improved URL quoting. -- Additional allowed cookie key values. -- Attempted fix for "Too many open files" Error -- Replace unicode errors on first pass, no need for second pass. -- Append '/' to bare-domain urls before query insertion. -- Exceptions now inherit from RuntimeError. -- Binary uploads + auth fix. -- Bugfixes. - -0.10.1 (2012-01-23) -------------------- - -- PYTHON 3 SUPPORT! -- Dropped 2.5 Support. (*Backwards Incompatible*) - -0.10.0 (2012-01-21) -------------------- - -- `Response.content` is now bytes-only. (*Backwards Incompatible*) -- New `Response.text` is unicode-only. -- If no `Response.encoding` is specified and `chardet` is available, - `Response.text` will guess an encoding. -- Default to ISO-8859-1 (Western) encoding for "text" subtypes. -- Removal of decode\_unicode. (*Backwards Incompatible*) -- New multiple-hooks system. -- New `Response.register_hook` for registering hooks within the - pipeline. -- `Response.url` is now Unicode. - -0.9.3 (2012-01-18) ------------------- - -- SSL verify=False bugfix (apparent on windows machines). - -0.9.2 (2012-01-18) ------------------- - -- Asynchronous async.send method. -- Support for proper chunk streams with boundaries. -- session argument for Session classes. -- Print entire hook tracebacks, not just exception instance. -- Fix response.iter\_lines from pending next line. -- Fix but in HTTP-digest auth w/ URI having query strings. -- Fix in Event Hooks section. -- Urllib3 update. - -0.9.1 (2012-01-06) ------------------- - -- danger\_mode for automatic Response.raise\_for\_status() -- Response.iter\_lines refactor - -0.9.0 (2011-12-28) ------------------- - -- verify ssl is default. - -0.8.9 (2011-12-28) ------------------- - -- Packaging fix. - -0.8.8 (2011-12-28) ------------------- - -- SSL CERT VERIFICATION! -- Release of Cerifi: Mozilla's cert list. -- New 'verify' argument for SSL requests. -- Urllib3 update. - -0.8.7 (2011-12-24) ------------------- - -- iter\_lines last-line truncation fix -- Force safe\_mode for async requests -- Handle safe\_mode exceptions more consistently -- Fix iteration on null responses in safe\_mode - -0.8.6 (2011-12-18) ------------------- - -- Socket timeout fixes. -- Proxy Authorization support. - -0.8.5 (2011-12-14) ------------------- - -- Response.iter\_lines! - -0.8.4 (2011-12-11) ------------------- - -- Prefetch bugfix. -- Added license to installed version. - -0.8.3 (2011-11-27) ------------------- - -- Converted auth system to use simpler callable objects. -- New session parameter to API methods. -- Display full URL while logging. - -0.8.2 (2011-11-19) ------------------- - -- New Unicode decoding system, based on over-ridable - Response.encoding. -- Proper URL slash-quote handling. -- Cookies with `[`, `]`, and `_` allowed. - -0.8.1 (2011-11-15) ------------------- - -- URL Request path fix -- Proxy fix. -- Timeouts fix. - -0.8.0 (2011-11-13) ------------------- - -- Keep-alive support! -- Complete removal of Urllib2 -- Complete removal of Poster -- Complete removal of CookieJars -- New ConnectionError raising -- Safe\_mode for error catching -- prefetch parameter for request methods -- OPTION method -- Async pool size throttling -- File uploads send real names -- Vendored in urllib3 - -0.7.6 (2011-11-07) ------------------- - -- Digest authentication bugfix (attach query data to path) - -0.7.5 (2011-11-04) ------------------- - -- Response.content = None if there was an invalid response. -- Redirection auth handling. - -0.7.4 (2011-10-26) ------------------- - -- Session Hooks fix. - -0.7.3 (2011-10-23) ------------------- - -- Digest Auth fix. - -0.7.2 (2011-10-23) ------------------- - -- PATCH Fix. - -0.7.1 (2011-10-23) ------------------- - -- Move away from urllib2 authentication handling. -- Fully Remove AuthManager, AuthObject, &c. -- New tuple-based auth system with handler callbacks. - -0.7.0 (2011-10-22) ------------------- - -- Sessions are now the primary interface. -- Deprecated InvalidMethodException. -- PATCH fix. -- New config system (no more global settings). - -0.6.6 (2011-10-19) ------------------- - -- Session parameter bugfix (params merging). - -0.6.5 (2011-10-18) ------------------- - -- Offline (fast) test suite. -- Session dictionary argument merging. - -0.6.4 (2011-10-13) ------------------- - -- Automatic decoding of unicode, based on HTTP Headers. -- New `decode_unicode` setting. -- Removal of `r.read/close` methods. -- New `r.faw` interface for advanced response usage.\* -- Automatic expansion of parameterized headers. - -0.6.3 (2011-10-13) ------------------- - -- Beautiful `requests.async` module, for making async requests w/ - gevent. - -0.6.2 (2011-10-09) ------------------- - -- GET/HEAD obeys allow\_redirects=False. - -0.6.1 (2011-08-20) ------------------- - -- Enhanced status codes experience `\o/` -- Set a maximum number of redirects (`settings.max_redirects`) -- Full Unicode URL support -- Support for protocol-less redirects. -- Allow for arbitrary request types. -- Bugfixes - -0.6.0 (2011-08-17) ------------------- - -- New callback hook system -- New persistent sessions object and context manager -- Transparent Dict-cookie handling -- Status code reference object -- Removed Response.cached -- Added Response.request -- All args are kwargs -- Relative redirect support -- HTTPError handling improvements -- Improved https testing -- Bugfixes - -0.5.1 (2011-07-23) ------------------- - -- International Domain Name Support! -- Access headers without fetching entire body (`read()`) -- Use lists as dicts for parameters -- Add Forced Basic Authentication -- Forced Basic is default authentication type -- `python-requests.org` default User-Agent header -- CaseInsensitiveDict lower-case caching -- Response.history bugfix - -0.5.0 (2011-06-21) ------------------- - -- PATCH Support -- Support for Proxies -- HTTPBin Test Suite -- Redirect Fixes -- settings.verbose stream writing -- Querystrings for all methods -- URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as - explicitly raised - `r.requests.get('hwe://blah'); r.raise_for_status()` - -0.4.1 (2011-05-22) ------------------- - -- Improved Redirection Handling -- New 'allow\_redirects' param for following non-GET/HEAD Redirects -- Settings module refactoring - -0.4.0 (2011-05-15) ------------------- - -- Response.history: list of redirected responses -- Case-Insensitive Header Dictionaries! -- Unicode URLs - -0.3.4 (2011-05-14) ------------------- - -- Urllib2 HTTPAuthentication Recursion fix (Basic/Digest) -- Internal Refactor -- Bytes data upload Bugfix - -0.3.3 (2011-05-12) ------------------- - -- Request timeouts -- Unicode url-encoded data -- Settings context manager and module - -0.3.2 (2011-04-15) ------------------- - -- Automatic Decompression of GZip Encoded Content -- AutoAuth Support for Tupled HTTP Auth - -0.3.1 (2011-04-01) ------------------- - -- Cookie Changes -- Response.read() -- Poster fix - -0.3.0 (2011-02-25) ------------------- - -- Automatic Authentication API Change -- Smarter Query URL Parameterization -- Allow file uploads and POST data together -- - - New Authentication Manager System - - : - Simpler Basic HTTP System - - Supports all build-in urllib2 Auths - - Allows for custom Auth Handlers - -0.2.4 (2011-02-19) ------------------- - -- Python 2.5 Support -- PyPy-c v1.4 Support -- Auto-Authentication tests -- Improved Request object constructor - -0.2.3 (2011-02-15) ------------------- - -- - - New HTTPHandling Methods - - : - Response.\_\_nonzero\_\_ (false if bad HTTP Status) - - Response.ok (True if expected HTTP Status) - - Response.error (Logged HTTPError if bad HTTP Status) - - Response.raise\_for\_status() (Raises stored HTTPError) - -0.2.2 (2011-02-14) ------------------- - -- Still handles request in the event of an HTTPError. (Issue \#2) -- Eventlet and Gevent Monkeypatch support. -- Cookie Support (Issue \#1) - -0.2.1 (2011-02-14) ------------------- - -- Added file attribute to POST and PUT requests for multipart-encode - file uploads. -- Added Request.url attribute for context and redirects - -0.2.0 (2011-02-14) ------------------- - -- Birth! - -0.0.1 (2011-02-13) ------------------- - -- Frustration -- Conception diff --git a/vendor/requests/LICENSE b/vendor/requests/LICENSE deleted file mode 100644 index 67db8588..00000000 --- a/vendor/requests/LICENSE +++ /dev/null @@ -1,175 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/vendor/requests/MANIFEST.in b/vendor/requests/MANIFEST.in deleted file mode 100644 index 633be369..00000000 --- a/vendor/requests/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -include README.md LICENSE NOTICE HISTORY.md pytest.ini requirements-dev.txt -recursive-include tests *.py diff --git a/vendor/requests/Makefile b/vendor/requests/Makefile deleted file mode 100644 index a3fa4c2e..00000000 --- a/vendor/requests/Makefile +++ /dev/null @@ -1,28 +0,0 @@ -.PHONY: docs -init: - pip install -e .[socks] - pip install -r requirements-dev.txt -test: - # This runs all of the tests, on both Python 2 and Python 3. - detox -ci: - pytest tests --junitxml=report.xml - -test-readme: - python setup.py check --restructuredtext --strict && ([ $$? -eq 0 ] && echo "README.rst and HISTORY.rst ok") || echo "Invalid markup in README.rst or HISTORY.rst!" - -flake8: - flake8 --ignore=E501,F401,E128,E402,E731,F821 requests - -coverage: - pytest --cov-config .coveragerc --verbose --cov-report term --cov-report xml --cov=requests tests - -publish: - pip install 'twine>=1.5.0' - python setup.py sdist bdist_wheel - twine upload dist/* - rm -fr build dist .egg requests.egg-info - -docs: - cd docs && make html - @echo "\033[95m\n\nBuild successful! View the docs homepage at docs/_build/html/index.html.\n\033[0m" diff --git a/vendor/requests/NOTICE b/vendor/requests/NOTICE deleted file mode 100644 index 1ff62db6..00000000 --- a/vendor/requests/NOTICE +++ /dev/null @@ -1,2 +0,0 @@ -Requests -Copyright 2019 Kenneth Reitz diff --git a/vendor/requests/README.md b/vendor/requests/README.md deleted file mode 100644 index 1372f1fe..00000000 --- a/vendor/requests/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# Requests - -**Requests** is a simple, yet elegant, HTTP library. - -```python ->>> import requests ->>> r = requests.get('https://api.github.com/user', auth=('user', 'pass')) ->>> r.status_code -200 ->>> r.headers['content-type'] -'application/json; charset=utf8' ->>> r.encoding -'utf-8' ->>> r.text -'{"type":"User"...' ->>> r.json() -{'disk_usage': 368627, 'private_gists': 484, ...} -``` - -Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method! - -Requests is one of the most downloaded Python package today, pulling in around `14M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `500,000+` repositories. You may certainly put your trust in this code. - -[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests/month) -[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests) -[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors) - -## Installing Requests and Supported Versions - -Requests is available on PyPI: - -```console -$ python -m pip install requests -``` - -Requests officially supports Python 2.7 & 3.6+. - -## Supported Features & Best–Practices - -Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today. - -- Keep-Alive & Connection Pooling -- International Domains and URLs -- Sessions with Cookie Persistence -- Browser-style TLS/SSL Verification -- Basic & Digest Authentication -- Familiar `dict`–like Cookies -- Automatic Content Decompression and Decoding -- Multi-part File Uploads -- SOCKS Proxy Support -- Connection Timeouts -- Streaming Downloads -- Automatic honoring of `.netrc` -- Chunked HTTP Requests - -## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io) - -[![Read the Docs](https://raw.githubusercontent.com/psf/requests/master/ext/ss.png)](https://requests.readthedocs.io) - -## Cloning the repository - -When cloning the Requests repository, you may need to add the `-c -fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see -[this issue](https://github.com/psf/requests/issues/2690) for more background): - -```shell -git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git -``` - -You can also apply this setting to your global Git config: - -```shell -git config --global fetch.fsck.badTimezone ignore -``` - ---- - -[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/master/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/master/ext/psf.png)](https://www.python.org/psf) diff --git a/vendor/requests/docs/.nojekyll b/vendor/requests/docs/.nojekyll deleted file mode 100644 index 8b137891..00000000 --- a/vendor/requests/docs/.nojekyll +++ /dev/null @@ -1 +0,0 @@ - diff --git a/vendor/requests/docs/Makefile b/vendor/requests/docs/Makefile deleted file mode 100644 index 08a2acf6..00000000 --- a/vendor/requests/docs/Makefile +++ /dev/null @@ -1,216 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " applehelp to make an Apple Help Book" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - @echo " coverage to run coverage check of the documentation (if enabled)" - -.PHONY: clean -clean: - rm -rf $(BUILDDIR)/* - -.PHONY: html -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -.PHONY: dirhtml -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -.PHONY: singlehtml -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -.PHONY: pickle -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -.PHONY: json -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -.PHONY: htmlhelp -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -.PHONY: qthelp -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Requests.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Requests.qhc" - -.PHONY: applehelp -applehelp: - $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp - @echo - @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." - @echo "N.B. You won't be able to view it unless you put it in" \ - "~/Library/Documentation/Help or install it in your application" \ - "bundle." - -.PHONY: devhelp -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/Requests" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Requests" - @echo "# devhelp" - -.PHONY: epub -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -.PHONY: latex -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -.PHONY: latexpdf -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -.PHONY: latexpdfja -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -.PHONY: text -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -.PHONY: man -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -.PHONY: texinfo -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -.PHONY: info -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -.PHONY: gettext -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -.PHONY: changes -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -.PHONY: linkcheck -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -.PHONY: doctest -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -.PHONY: coverage -coverage: - $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage - @echo "Testing of coverage in the sources finished, look at the " \ - "results in $(BUILDDIR)/coverage/python.txt." - -.PHONY: xml -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -.PHONY: pseudoxml -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/vendor/requests/docs/_static/custom.css b/vendor/requests/docs/_static/custom.css deleted file mode 100644 index 465e8a9a..00000000 --- a/vendor/requests/docs/_static/custom.css +++ /dev/null @@ -1,177 +0,0 @@ -body > div.document > div.sphinxsidebar > div > form > table > tbody > tr:nth-child(2) > td > select { - width: 100%!important; -} - -#python27 > a { - color: white; -} - -/* Carbon by BuySellAds */ -#carbonads { - display: block; - overflow: hidden; - margin: 1.5em 0 2em; - padding: 1em; - border: solid 1px #cccccc; - border-radius: 2px; - background-color: #eeeeee; - text-align: center; - line-height: 1.5; -} - -#carbonads a { - border-bottom: 0; -} - -#carbonads span { - display: block; - overflow: hidden; -} - -.carbon-img { - display: block; - margin: 0 auto 1em; - text-align: center; -} - -.carbon-text { - display: block; - margin-bottom: 1em; -} - -.carbon-poweredby { - display: block; - text-transform: uppercase; - letter-spacing: 1px; - font-size: 10px; - line-height: 1; -} - - -/* Native CPC by BuySellAds */ - -#native-ribbon #_custom_ { - position: fixed; - right: 0; - bottom: 0; - left: 0; - box-shadow: 0 -1px 4px 1px hsla(0, 0%, 0%, .15); - font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, - Cantarell, "Helvetica Neue", Helvetica, Arial, sans-serif; - transition: all .25s ease-in-out; - transform: translateY(calc(100% - 35px)); - - flex-flow: column nowrap; -} - -#native-ribbon #_custom_:hover { - transform: translateY(0); -} - -.native-img { - margin-right: 20px; - max-height: 50px; - border-radius: 3px; -} - -.native-sponsor { - margin: 10px 20px; - text-align: center; - text-transform: uppercase; - letter-spacing: .5px; - font-size: 12px; - transition: all .3s ease-in-out; - transform-origin: left; -} - -#native-ribbon #_custom_:hover .native-sponsor { - margin: 0 20px; - opacity: 0; - transform: scaleY(0); -} - -.native-flex { - display: flex; - padding: 10px 20px 25px; - text-decoration: none; - - flex-flow: row nowrap; - justify-content: center; - align-items: center; -} - -.native-main { - display: flex; - - flex-flow: row nowrap; - align-items: center; -} - -.native-details { - display: flex; - margin-right: 30px; - - flex-flow: column nowrap; -} - -.native-company { - margin-bottom: 4px; - text-transform: uppercase; - letter-spacing: 2px; - font-size: 10px; -} - -.native-desc { - letter-spacing: 1px; - font-weight: 300; - font-size: 14px; - line-height: 1.4; -} - -.native-cta { - padding: 10px 14px; - border-radius: 3px; - box-shadow: 0 6px 13px 0 hsla(0, 0%, 0%, .15); - text-transform: uppercase; - white-space: nowrap; - letter-spacing: 1px; - font-weight: 400; - font-size: 12px; - transition: all .3s ease-in-out; - transform: translateY(-1px); -} - -.native-cta:hover { - box-shadow: none; - transform: translateY(1px); -} - -@media only screen and (min-width: 320px) and (max-width: 759px) { - .native-flex { - padding: 5px 5px 15px; - flex-direction: column; - - flex-wrap: wrap; - } - - .native-img { - margin: 0; - display: none; - } - - .native-details { - margin: 0; - } - - .native-main { - flex-direction: column; - text-align: left; - - flex-wrap: wrap; - align-content: center; - } - - .native-cta { - display: none; - } -} diff --git a/vendor/requests/docs/_templates/hacks.html b/vendor/requests/docs/_templates/hacks.html deleted file mode 100644 index eca5dffc..00000000 --- a/vendor/requests/docs/_templates/hacks.html +++ /dev/null @@ -1,80 +0,0 @@ - - - - - - - - - -
-
- - diff --git a/vendor/requests/docs/_templates/sidebarintro.html b/vendor/requests/docs/_templates/sidebarintro.html deleted file mode 100644 index b5f51a0a..00000000 --- a/vendor/requests/docs/_templates/sidebarintro.html +++ /dev/null @@ -1,73 +0,0 @@ -

- -

- - - - - -

- Requests is an elegant and simple HTTP library for Python, built for - human beings. -

-

Sponsored by CERT Gouvernemental - GOVCERT.LU.

- - - -

Useful Links

- - - -

Translations

- - - -
-
diff --git a/vendor/requests/docs/_templates/sidebarlogo.html b/vendor/requests/docs/_templates/sidebarlogo.html deleted file mode 100644 index 56d61095..00000000 --- a/vendor/requests/docs/_templates/sidebarlogo.html +++ /dev/null @@ -1,71 +0,0 @@ -

- -

- - - - - -

- Requests is an elegant and simple HTTP library for Python, built for - human beings. You are currently looking at the documentation of the - development release. -

- -

Sponsored by CERT Gouvernemental - GOVCERT.LU.

- - - -

Useful Links

- - -

Translations

- - diff --git a/vendor/requests/docs/_themes/.gitignore b/vendor/requests/docs/_themes/.gitignore deleted file mode 100644 index 66b6e4c2..00000000 --- a/vendor/requests/docs/_themes/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -*.pyc -*.pyo -.DS_Store diff --git a/vendor/requests/docs/_themes/LICENSE b/vendor/requests/docs/_themes/LICENSE deleted file mode 100644 index 3d1e04a2..00000000 --- a/vendor/requests/docs/_themes/LICENSE +++ /dev/null @@ -1,45 +0,0 @@ -Modifications: - -Copyright (c) 2011 Kenneth Reitz. - - -Original Project: - -Copyright (c) 2010 by Armin Ronacher. - - -Some rights reserved. - -Redistribution and use in source and binary forms of the theme, with or -without modification, are permitted provided that the following conditions -are met: - -* Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - -* The names of the contributors may not be used to endorse or - promote products derived from this software without specific - prior written permission. - -We kindly ask you to only use these themes in an unmodified manner just -for Flask and Flask-related products, not for unrelated projects. If you -like the visual style and want to use it for your own projects, please -consider making some larger changes to the themes (such as changing -font faces, sizes, colors or margins). - -THIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/requests/docs/_themes/flask_theme_support.py b/vendor/requests/docs/_themes/flask_theme_support.py deleted file mode 100644 index 33f47449..00000000 --- a/vendor/requests/docs/_themes/flask_theme_support.py +++ /dev/null @@ -1,86 +0,0 @@ -# flasky extensions. flasky pygments style based on tango style -from pygments.style import Style -from pygments.token import Keyword, Name, Comment, String, Error, \ - Number, Operator, Generic, Whitespace, Punctuation, Other, Literal - - -class FlaskyStyle(Style): - background_color = "#f8f8f8" - default_style = "" - - styles = { - # No corresponding class for the following: - #Text: "", # class: '' - Whitespace: "underline #f8f8f8", # class: 'w' - Error: "#a40000 border:#ef2929", # class: 'err' - Other: "#000000", # class 'x' - - Comment: "italic #8f5902", # class: 'c' - Comment.Preproc: "noitalic", # class: 'cp' - - Keyword: "bold #004461", # class: 'k' - Keyword.Constant: "bold #004461", # class: 'kc' - Keyword.Declaration: "bold #004461", # class: 'kd' - Keyword.Namespace: "bold #004461", # class: 'kn' - Keyword.Pseudo: "bold #004461", # class: 'kp' - Keyword.Reserved: "bold #004461", # class: 'kr' - Keyword.Type: "bold #004461", # class: 'kt' - - Operator: "#582800", # class: 'o' - Operator.Word: "bold #004461", # class: 'ow' - like keywords - - Punctuation: "bold #000000", # class: 'p' - - # because special names such as Name.Class, Name.Function, etc. - # are not recognized as such later in the parsing, we choose them - # to look the same as ordinary variables. - Name: "#000000", # class: 'n' - Name.Attribute: "#c4a000", # class: 'na' - to be revised - Name.Builtin: "#004461", # class: 'nb' - Name.Builtin.Pseudo: "#3465a4", # class: 'bp' - Name.Class: "#000000", # class: 'nc' - to be revised - Name.Constant: "#000000", # class: 'no' - to be revised - Name.Decorator: "#888", # class: 'nd' - to be revised - Name.Entity: "#ce5c00", # class: 'ni' - Name.Exception: "bold #cc0000", # class: 'ne' - Name.Function: "#000000", # class: 'nf' - Name.Property: "#000000", # class: 'py' - Name.Label: "#f57900", # class: 'nl' - Name.Namespace: "#000000", # class: 'nn' - to be revised - Name.Other: "#000000", # class: 'nx' - Name.Tag: "bold #004461", # class: 'nt' - like a keyword - Name.Variable: "#000000", # class: 'nv' - to be revised - Name.Variable.Class: "#000000", # class: 'vc' - to be revised - Name.Variable.Global: "#000000", # class: 'vg' - to be revised - Name.Variable.Instance: "#000000", # class: 'vi' - to be revised - - Number: "#990000", # class: 'm' - - Literal: "#000000", # class: 'l' - Literal.Date: "#000000", # class: 'ld' - - String: "#4e9a06", # class: 's' - String.Backtick: "#4e9a06", # class: 'sb' - String.Char: "#4e9a06", # class: 'sc' - String.Doc: "italic #8f5902", # class: 'sd' - like a comment - String.Double: "#4e9a06", # class: 's2' - String.Escape: "#4e9a06", # class: 'se' - String.Heredoc: "#4e9a06", # class: 'sh' - String.Interpol: "#4e9a06", # class: 'si' - String.Other: "#4e9a06", # class: 'sx' - String.Regex: "#4e9a06", # class: 'sr' - String.Single: "#4e9a06", # class: 's1' - String.Symbol: "#4e9a06", # class: 'ss' - - Generic: "#000000", # class: 'g' - Generic.Deleted: "#a40000", # class: 'gd' - Generic.Emph: "italic #000000", # class: 'ge' - Generic.Error: "#ef2929", # class: 'gr' - Generic.Heading: "bold #000080", # class: 'gh' - Generic.Inserted: "#00A000", # class: 'gi' - Generic.Output: "#888", # class: 'go' - Generic.Prompt: "#745334", # class: 'gp' - Generic.Strong: "bold #000000", # class: 'gs' - Generic.Subheading: "bold #800080", # class: 'gu' - Generic.Traceback: "bold #a40000", # class: 'gt' - } diff --git a/vendor/requests/docs/api.rst b/vendor/requests/docs/api.rst deleted file mode 100644 index 93cc4f0d..00000000 --- a/vendor/requests/docs/api.rst +++ /dev/null @@ -1,260 +0,0 @@ -.. _api: - -Developer Interface -=================== - -.. module:: requests - -This part of the documentation covers all the interfaces of Requests. For -parts where Requests depends on external libraries, we document the most -important right here and provide links to the canonical documentation. - - -Main Interface --------------- - -All of Requests' functionality can be accessed by these 7 methods. -They all return an instance of the :class:`Response ` object. - -.. autofunction:: request - -.. autofunction:: head -.. autofunction:: get -.. autofunction:: post -.. autofunction:: put -.. autofunction:: patch -.. autofunction:: delete - -Exceptions ----------- - -.. autoexception:: requests.RequestException -.. autoexception:: requests.ConnectionError -.. autoexception:: requests.HTTPError -.. autoexception:: requests.URLRequired -.. autoexception:: requests.TooManyRedirects -.. autoexception:: requests.ConnectTimeout -.. autoexception:: requests.ReadTimeout -.. autoexception:: requests.Timeout - - -Request Sessions ----------------- - -.. _sessionapi: - -.. autoclass:: Session - :inherited-members: - - -Lower-Level Classes -------------------- - -.. autoclass:: requests.Request - :inherited-members: - -.. autoclass:: Response - :inherited-members: - - -Lower-Lower-Level Classes -------------------------- - -.. autoclass:: requests.PreparedRequest - :inherited-members: - -.. autoclass:: requests.adapters.BaseAdapter - :inherited-members: - -.. autoclass:: requests.adapters.HTTPAdapter - :inherited-members: - -Authentication --------------- - -.. autoclass:: requests.auth.AuthBase -.. autoclass:: requests.auth.HTTPBasicAuth -.. autoclass:: requests.auth.HTTPProxyAuth -.. autoclass:: requests.auth.HTTPDigestAuth - - - -Encodings ---------- - -.. autofunction:: requests.utils.get_encodings_from_content -.. autofunction:: requests.utils.get_encoding_from_headers -.. autofunction:: requests.utils.get_unicode_from_response - - -.. _api-cookies: - -Cookies -------- - -.. autofunction:: requests.utils.dict_from_cookiejar -.. autofunction:: requests.utils.add_dict_to_cookiejar -.. autofunction:: requests.cookies.cookiejar_from_dict - -.. autoclass:: requests.cookies.RequestsCookieJar - :inherited-members: - -.. autoclass:: requests.cookies.CookieConflictError - :inherited-members: - - - -Status Code Lookup ------------------- - -.. autoclass:: requests.codes - -.. automodule:: requests.status_codes - - -Migrating to 1.x ----------------- - -This section details the main differences between 0.x and 1.x and is meant -to ease the pain of upgrading. - - -API Changes -~~~~~~~~~~~ - -* ``Response.json`` is now a callable and not a property of a response. - - :: - - import requests - r = requests.get('https://github.com/timeline.json') - r.json() # This *call* raises an exception if JSON decoding fails - -* The ``Session`` API has changed. Sessions objects no longer take parameters. - ``Session`` is also now capitalized, but it can still be - instantiated with a lowercase ``session`` for backwards compatibility. - - :: - - s = requests.Session() # formerly, session took parameters - s.auth = auth - s.headers.update(headers) - r = s.get('https://httpbin.org/headers') - -* All request hooks have been removed except 'response'. - -* Authentication helpers have been broken out into separate modules. See - requests-oauthlib_ and requests-kerberos_. - -.. _requests-oauthlib: https://github.com/requests/requests-oauthlib -.. _requests-kerberos: https://github.com/requests/requests-kerberos - -* The parameter for streaming requests was changed from ``prefetch`` to - ``stream`` and the logic was inverted. In addition, ``stream`` is now - required for raw response reading. - - :: - - # in 0.x, passing prefetch=False would accomplish the same thing - r = requests.get('https://github.com/timeline.json', stream=True) - for chunk in r.iter_content(8192): - ... - -* The ``config`` parameter to the requests method has been removed. Some of - these options are now configured on a ``Session`` such as keep-alive and - maximum number of redirects. The verbosity option should be handled by - configuring logging. - - :: - - import requests - import logging - - # Enabling debugging at http.client level (requests->urllib3->http.client) - # you will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA. - # the only thing missing will be the response.body which is not logged. - try: # for Python 3 - from http.client import HTTPConnection - except ImportError: - from httplib import HTTPConnection - HTTPConnection.debuglevel = 1 - - logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests - logging.getLogger().setLevel(logging.DEBUG) - requests_log = logging.getLogger("urllib3") - requests_log.setLevel(logging.DEBUG) - requests_log.propagate = True - - requests.get('https://httpbin.org/headers') - - - -Licensing -~~~~~~~~~ - -One key difference that has nothing to do with the API is a change in the -license from the ISC_ license to the `Apache 2.0`_ license. The Apache 2.0 -license ensures that contributions to Requests are also covered by the Apache -2.0 license. - -.. _ISC: https://opensource.org/licenses/ISC -.. _Apache 2.0: https://opensource.org/licenses/Apache-2.0 - - -Migrating to 2.x ----------------- - - -Compared with the 1.0 release, there were relatively few backwards -incompatible changes, but there are still a few issues to be aware of with -this major release. - -For more details on the changes in this release including new APIs, links -to the relevant GitHub issues and some of the bug fixes, read Cory's blog_ -on the subject. - -.. _blog: https://lukasa.co.uk/2013/09/Requests_20/ - - -API Changes -~~~~~~~~~~~ - -* There were a couple changes to how Requests handles exceptions. - ``RequestException`` is now a subclass of ``IOError`` rather than - ``RuntimeError`` as that more accurately categorizes the type of error. - In addition, an invalid URL escape sequence now raises a subclass of - ``RequestException`` rather than a ``ValueError``. - - :: - - requests.get('http://%zz/') # raises requests.exceptions.InvalidURL - - Lastly, ``httplib.IncompleteRead`` exceptions caused by incorrect chunked - encoding will now raise a Requests ``ChunkedEncodingError`` instead. - -* The proxy API has changed slightly. The scheme for a proxy URL is now - required. - - :: - - proxies = { - "http": "10.10.1.10:3128", # use http://10.10.1.10:3128 instead - } - - # In requests 1.x, this was legal, in requests 2.x, - # this raises requests.exceptions.MissingSchema - requests.get("http://example.org", proxies=proxies) - - -Behavioural Changes -~~~~~~~~~~~~~~~~~~~~~~~ - -* Keys in the ``headers`` dictionary are now native strings on all Python - versions, i.e. bytestrings on Python 2 and unicode on Python 3. If the - keys are not native strings (unicode on Python 2 or bytestrings on Python 3) - they will be converted to the native string type assuming UTF-8 encoding. - -* Values in the ``headers`` dictionary should always be strings. This has - been the project's position since before 1.0 but a recent change - (since version 2.11.0) enforces this more strictly. It's advised to avoid - passing header values as unicode when possible. diff --git a/vendor/requests/docs/community/faq.rst b/vendor/requests/docs/community/faq.rst deleted file mode 100644 index 177eaec4..00000000 --- a/vendor/requests/docs/community/faq.rst +++ /dev/null @@ -1,95 +0,0 @@ -.. _faq: - -Frequently Asked Questions -========================== - -This part of the documentation answers common questions about Requests. - -Encoded Data? -------------- - -Requests automatically decompresses gzip-encoded responses, and does -its best to decode response content to unicode when possible. - -When either the `brotli `_ or `brotlicffi `_ -package is installed, requests also decodes Brotli-encoded responses. - -You can get direct access to the raw response (and even the socket), -if needed as well. - - -Custom User-Agents? -------------------- - -Requests allows you to easily override User-Agent strings, along with -any other HTTP Header. See `documentation about headers `_. - - - -Why not Httplib2? ------------------ - -Chris Adams gave an excellent summary on -`Hacker News `_: - - httplib2 is part of why you should use requests: it's far more respectable - as a client but not as well documented and it still takes way too much code - for basic operations. I appreciate what httplib2 is trying to do, that - there's a ton of hard low-level annoyances in building a modern HTTP - client, but really, just use requests instead. Kenneth Reitz is very - motivated and he gets the degree to which simple things should be simple - whereas httplib2 feels more like an academic exercise than something - people should use to build production systems[1]. - - Disclosure: I'm listed in the requests AUTHORS file but can claim credit - for, oh, about 0.0001% of the awesomeness. - - 1. http://code.google.com/p/httplib2/issues/detail?id=96 is a good example: - an annoying bug which affect many people, there was a fix available for - months, which worked great when I applied it in a fork and pounded a couple - TB of data through it, but it took over a year to make it into trunk and - even longer to make it onto PyPI where any other project which required " - httplib2" would get the working version. - - -Python 3 Support? ------------------ - -Yes! Requests officially supports Python 2.7 & 3.6+ and PyPy. - -Python 2 Support? ------------------ - -Yes! We do not have immediate plans to `sunset -`_ our support for Python -2.7. We understand that we have a large user base with varying needs, -and intend to maintain Python 2.7 support within Requests until `pip -stops supporting Python 2.7 (there's no estimated date on that yet) -`_. - -That said, it is *highly* recommended users migrate to Python 3.6+ since Python -2.7 is no longer receiving bug fixes or security updates as of January 1, 2020. - -What are "hostname doesn't match" errors? ------------------------------------------ - -These errors occur when :ref:`SSL certificate verification ` -fails to match the certificate the server responds with to the hostname -Requests thinks it's contacting. If you're certain the server's SSL setup is -correct (for example, because you can visit the site with your browser) and -you're using Python 2.7, a possible explanation is that you need -Server-Name-Indication. - -`Server-Name-Indication`_, or SNI, is an official extension to SSL where the -client tells the server what hostname it is contacting. This is important -when servers are using `Virtual Hosting`_. When such servers are hosting -more than one SSL site they need to be able to return the appropriate -certificate based on the hostname the client is connecting to. - -Python3 and Python 2.7.9+ include native support for SNI in their SSL modules. -For information on using SNI with Requests on Python < 2.7.9 refer to this -`Stack Overflow answer`_. - -.. _`Server-Name-Indication`: https://en.wikipedia.org/wiki/Server_Name_Indication -.. _`virtual hosting`: https://en.wikipedia.org/wiki/Virtual_hosting -.. _`Stack Overflow answer`: https://stackoverflow.com/questions/18578439/using-requests-with-tls-doesnt-give-sni-support/18579484#18579484 diff --git a/vendor/requests/docs/community/out-there.rst b/vendor/requests/docs/community/out-there.rst deleted file mode 100644 index c33ab3c9..00000000 --- a/vendor/requests/docs/community/out-there.rst +++ /dev/null @@ -1,22 +0,0 @@ -Integrations -============ - -Python for iOS --------------- - -Requests is built into the wonderful `Python for iOS `_ runtime! - -To give it a try, simply:: - - import requests - - -Articles & Talks -================ -- `Python for the Web `_ teaches how to use Python to interact with the web, using Requests. -- `Daniel Greenfeld's Review of Requests `_ -- `My 'Python for Humans' talk `_ ( `audio `_ ) -- `Issac Kelly's 'Consuming Web APIs' talk `_ -- `Blog post about Requests via Yum `_ -- `Russian blog post introducing Requests `_ -- `Sending JSON in Requests `_ diff --git a/vendor/requests/docs/community/recommended.rst b/vendor/requests/docs/community/recommended.rst deleted file mode 100644 index 517f4b18..00000000 --- a/vendor/requests/docs/community/recommended.rst +++ /dev/null @@ -1,62 +0,0 @@ -.. _recommended: - -Recommended Packages and Extensions -=================================== - -Requests has a great variety of powerful and useful third-party extensions. -This page provides an overview of some of the best of them. - -Certifi CA Bundle ------------------ - -`Certifi`_ is a carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the -identity of TLS hosts. It has been extracted from the Requests project. - -.. _Certifi: https://github.com/certifi/python-certifi - -CacheControl ------------- - -`CacheControl`_ is an extension that adds a full HTTP cache to Requests. This -makes your web requests substantially more efficient, and should be used -whenever you're making a lot of web requests. - -.. _CacheControl: https://cachecontrol.readthedocs.io/en/latest/ - -Requests-Toolbelt ------------------ - -`Requests-Toolbelt`_ is a collection of utilities that some users of Requests may desire, -but do not belong in Requests proper. This library is actively maintained -by members of the Requests core team, and reflects the functionality most -requested by users within the community. - -.. _Requests-Toolbelt: https://toolbelt.readthedocs.io/en/latest/index.html - - -Requests-Threads ----------------- - -`Requests-Threads`_ is a Requests session that returns the amazing Twisted's awaitable Deferreds instead of Response objects. This allows the use of ``async``/``await`` keyword usage on Python 3, or Twisted's style of programming, if desired. - -.. _Requests-Threads: https://github.com/requests/requests-threads - -Requests-OAuthlib ------------------ - -`requests-oauthlib`_ makes it possible to do the OAuth dance from Requests -automatically. This is useful for the large number of websites that use OAuth -to provide authentication. It also provides a lot of tweaks that handle ways -that specific OAuth providers differ from the standard specifications. - -.. _requests-oauthlib: https://requests-oauthlib.readthedocs.io/en/latest/ - - -Betamax -------- - -`Betamax`_ records your HTTP interactions so the NSA does not have to. -A VCR imitation designed only for Python-Requests. - -.. _betamax: https://github.com/betamaxpy/betamax diff --git a/vendor/requests/docs/community/release-process.rst b/vendor/requests/docs/community/release-process.rst deleted file mode 100644 index 4aa98f75..00000000 --- a/vendor/requests/docs/community/release-process.rst +++ /dev/null @@ -1,53 +0,0 @@ -Release Process and Rules -========================= - -.. versionadded:: v2.6.2 - -Starting with the version to be released after ``v2.6.2``, the following rules -will govern and describe how the Requests core team produces a new release. - -Major Releases --------------- - -A major release will include breaking changes. When it is versioned, it will -be versioned as ``vX.0.0``. For example, if the previous release was -``v10.2.7`` the next version will be ``v11.0.0``. - -Breaking changes are changes that break backwards compatibility with prior -versions. If the project were to change the ``text`` attribute on a -``Response`` object to a method, that would only happen in a Major release. - -Major releases may also include miscellaneous bug fixes. The core developers of -Requests are committed to providing a good user experience. This means we're -also committed to preserving backwards compatibility as much as possible. Major -releases will be infrequent and will need strong justifications before they are -considered. - -Minor Releases --------------- - -A minor release will not include breaking changes but may include miscellaneous -bug fixes. If the previous version of Requests released was ``v10.2.7`` a minor -release would be versioned as ``v10.3.0``. - -Minor releases will be backwards compatible with releases that have the same -major version number. In other words, all versions that would start with -``v10.`` should be compatible with each other. - -Hotfix Releases ---------------- - -A hotfix release will only include bug fixes that were missed when the project -released the previous version. If the previous version of Requests released -``v10.2.7`` the hotfix release would be versioned as ``v10.2.8``. - -Hotfixes will **not** include upgrades to vendored dependencies after -``v2.6.2`` - -Reasoning ---------- - -In the 2.5 and 2.6 release series, the Requests core team upgraded vendored -dependencies and caused a great deal of headaches for both users and the core -team. To reduce this pain, we're forming a concrete set of procedures so -expectations will be properly set. diff --git a/vendor/requests/docs/community/support.rst b/vendor/requests/docs/community/support.rst deleted file mode 100644 index ee905f54..00000000 --- a/vendor/requests/docs/community/support.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. _support: - -Support -======= - -If you have questions or issues about Requests, there are several options: - -Stack Overflow --------------- - -If your question does not contain sensitive (possibly proprietary) -information or can be properly anonymized, please ask a question on -`Stack Overflow `_ -and use the tag ``python-requests``. - - -File an Issue -------------- - -If you notice some unexpected behaviour in Requests, or want to see support -for a new feature, -`file an issue on GitHub `_. - - -Send a Tweet ------------- - -If your question is less than 280 characters, feel free to send a tweet to -`@nateprewitt `_, -`@sethmlarson `_, or -`@sigmavirus24 `_. diff --git a/vendor/requests/docs/community/updates.rst b/vendor/requests/docs/community/updates.rst deleted file mode 100644 index c787c45b..00000000 --- a/vendor/requests/docs/community/updates.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. _updates: - - -Community Updates -================= - -If you'd like to stay up to date on the community and development of Requests, -there are several options: - - -GitHub ------- - -The best way to track the development of Requests is through -`the GitHub repo `_. - - -.. include:: ../../HISTORY.md diff --git a/vendor/requests/docs/community/vulnerabilities.rst b/vendor/requests/docs/community/vulnerabilities.rst deleted file mode 100644 index 63219b57..00000000 --- a/vendor/requests/docs/community/vulnerabilities.rst +++ /dev/null @@ -1,110 +0,0 @@ -Vulnerability Disclosure -======================== - -If you think you have found a potential security vulnerability in requests, -please email `Nate `_ and `Seth `_ directly. **Do not file a public issue.** - -Our PGP Key fingerprints are: - -- 8722 7E29 AD9C FF5C FAC3 EA6A 44D3 FF97 B80D C864 (`@nateprewitt `_) - -- EDD5 6765 A9D8 4653 CBC8 A134 51B0 6736 1740 F5FC (`@sethmlarson `_) - -You can also contact us on `Keybase `_ with the -profiles above if desired. - -If English is not your first language, please try to describe the problem and -its impact to the best of your ability. For greater detail, please use your -native language and we will try our best to translate it using online services. - -Please also include the code you used to find the problem and the shortest -amount of code necessary to reproduce it. - -Please do not disclose this to anyone else. We will retrieve a CVE identifier -if necessary and give you full credit under whatever name or alias you provide. -We will only request an identifier when we have a fix and can publish it in a -release. - -We will respect your privacy and will only publicize your involvement if you -grant us permission. - -Process -------- - -This following information discusses the process the requests project follows -in response to vulnerability disclosures. If you are disclosing a -vulnerability, this section of the documentation lets you know how we will -respond to your disclosure. - -Timeline -~~~~~~~~ - -When you report an issue, one of the project members will respond to you within -two days *at the outside*. In most cases responses will be faster, usually -within 12 hours. This initial response will at the very least confirm receipt -of the report. - -If we were able to rapidly reproduce the issue, the initial response will also -contain confirmation of the issue. If we are not, we will often ask for more -information about the reproduction scenario. - -Our goal is to have a fix for any vulnerability released within two weeks of -the initial disclosure. This may potentially involve shipping an interim -release that simply disables function while a more mature fix can be prepared, -but will in the vast majority of cases mean shipping a complete release as soon -as possible. - -Throughout the fix process we will keep you up to speed with how the fix is -progressing. Once the fix is prepared, we will notify you that we believe we -have a fix. Often we will ask you to confirm the fix resolves the problem in -your environment, especially if we are not confident of our reproduction -scenario. - -At this point, we will prepare for the release. We will obtain a CVE number -if one is required, providing you with full credit for the discovery. We will -also decide on a planned release date, and let you know when it is. This -release date will *always* be on a weekday. - -At this point we will reach out to our major downstream packagers to notify -them of an impending security-related patch so they can make arrangements. In -addition, these packagers will be provided with the intended patch ahead of -time, to ensure that they are able to promptly release their downstream -packages. Currently the list of people we actively contact *ahead of a public -release* is: - -- Jeremy Cline, Red Hat (@jeremycline) -- Daniele Tricoli, Debian (@eriol) - -We will notify these individuals at least a week ahead of our planned release -date to ensure that they have sufficient time to prepare. If you believe you -should be on this list, please let one of the maintainers know at one of the -email addresses at the top of this article. - -On release day, we will push the patch to our public repository, along with an -updated changelog that describes the issue and credits you. We will then issue -a PyPI release containing the patch. - -At this point, we will publicise the release. This will involve mails to -mailing lists, Tweets, and all other communication mechanisms available to the -core team. - -We will also explicitly mention which commits contain the fix to make it easier -for other distributors and users to easily patch their own versions of requests -if upgrading is not an option. - -Previous CVEs -------------- - -- Fixed in 2.20.0 - - `CVE 2018-18074 `_ - -- Fixed in 2.6.0 - - - `CVE 2015-2296 `_, - reported by Matthew Daley of `BugFuzz `_. - -- Fixed in 2.3.0 - - - `CVE 2014-1829 `_ - - - `CVE 2014-1830 `_ diff --git a/vendor/requests/docs/conf.py b/vendor/requests/docs/conf.py deleted file mode 100644 index edbd72ba..00000000 --- a/vendor/requests/docs/conf.py +++ /dev/null @@ -1,386 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Requests documentation build configuration file, created by -# sphinx-quickstart on Fri Feb 19 00:05:47 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) - -# Insert Requests' path into the system. -sys.path.insert(0, os.path.abspath("..")) -sys.path.insert(0, os.path.abspath("_themes")) - -import requests - - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = ".rst" - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"Requests" -copyright = u'MMXVIX. A Kenneth Reitz Project' -author = u"Kenneth Reitz" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = requests.__version__ -# The full version, including alpha/beta/rc tags. -release = requests.__version__ - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -add_function_parentheses = False - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "flask_theme_support.FlaskyStyle" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "show_powered_by": False, - "github_user": "requests", - "github_repo": "requests", - "github_banner": True, - "show_related": False, - "note_bg": "#FFF59C", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -html_use_smartypants = False - -# Custom sidebar templates, maps document names to template names. -html_sidebars = { - "index": ["sidebarintro.html", "sourcelink.html", "searchbox.html", "hacks.html"], - "**": [ - "sidebarlogo.html", - "localtoc.html", - "relations.html", - "sourcelink.html", - "searchbox.html", - "hacks.html", - ], -} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -html_show_sourcelink = False - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -html_show_sphinx = False - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "Requestsdoc" - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - #'preamble': '', - # Latex figure (float) alignment - #'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, "Requests.tex", u"Requests Documentation", u"Kenneth Reitz", "manual") -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "requests", u"Requests Documentation", [author], 1)] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "Requests", - u"Requests Documentation", - author, - "Requests", - "One line description of project.", - "Miscellaneous", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# -- Options for Epub output ---------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = project -epub_author = author -epub_publisher = author -epub_copyright = copyright - -# The basename for the epub file. It defaults to the project name. -# epub_basename = project - -# The HTML theme for the epub output. Since the default themes are not -# optimized for small screen space, using the same theme for HTML and epub -# output is usually not wise. This defaults to 'epub', a theme designed to save -# visual space. -# epub_theme = 'epub' - -# The language of the text. It defaults to the language option -# or 'en' if the language is not set. -# epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -# epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -# epub_identifier = '' - -# A unique identification for the text. -# epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -# epub_cover = () - -# A sequence of (type, uri, title) tuples for the guide element of content.opf. -# epub_guide = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -# epub_pre_files = [] - -# HTML files that should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -# epub_post_files = [] - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ["search.html"] - -# The depth of the table of contents in toc.ncx. -# epub_tocdepth = 3 - -# Allow duplicate toc entries. -# epub_tocdup = True - -# Choose between 'default' and 'includehidden'. -# epub_tocscope = 'default' - -# Fix unsupported image types using the Pillow. -# epub_fix_images = False - -# Scale large images. -# epub_max_image_width = 0 - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# epub_show_urls = 'inline' - -# If false, no index is generated. -# epub_use_index = True - -intersphinx_mapping = { - "python": ("https://docs.python.org/3/", None), - "urllib3": ("https://urllib3.readthedocs.io/en/latest", None), -} diff --git a/vendor/requests/docs/dev/authors.rst b/vendor/requests/docs/dev/authors.rst deleted file mode 100644 index e9799a91..00000000 --- a/vendor/requests/docs/dev/authors.rst +++ /dev/null @@ -1,4 +0,0 @@ -Authors -======= - -.. include:: ../../AUTHORS.rst diff --git a/vendor/requests/docs/dev/contributing.rst b/vendor/requests/docs/dev/contributing.rst deleted file mode 100644 index 0cf7baed..00000000 --- a/vendor/requests/docs/dev/contributing.rst +++ /dev/null @@ -1,208 +0,0 @@ -.. _contributing: - -Contributor's Guide -=================== - -If you're reading this, you're probably interested in contributing to Requests. -Thank you very much! Open source projects live-and-die based on the support -they receive from others, and the fact that you're even considering -contributing to the Requests project is *very* generous of you. - -This document lays out guidelines and advice for contributing to this project. -If you're thinking of contributing, please start by reading this document and -getting a feel for how contributing to this project works. If you have any -questions, feel free to reach out to either `Nate Prewitt`_, `Ian Cordasco`_, -or `Seth Michael Larson`_, the primary maintainers. - -.. _Ian Cordasco: http://www.coglib.com/~icordasc/ -.. _Nate Prewitt: https://www.nateprewitt.com/ -.. _Seth Michael Larson: https://sethmlarson.dev/ - -The guide is split into sections based on the type of contribution you're -thinking of making, with a section that covers general guidelines for all -contributors. - -Be Cordial ----------- - - **Be cordial or be on your way**. *—Kenneth Reitz* - -Requests has one very important rule governing all forms of contribution, -including reporting bugs or requesting features. This golden rule is -"`be cordial or be on your way`_". - -**All contributions are welcome**, as long as -everyone involved is treated with respect. - -.. _be cordial or be on your way: https://kenreitz.org/essays/2013/01/27/be-cordial-or-be-on-your-way - -.. _early-feedback: - -Get Early Feedback ------------------- - -If you are contributing, do not feel the need to sit on your contribution until -it is perfectly polished and complete. It helps everyone involved for you to -seek feedback as early as you possibly can. Submitting an early, unfinished -version of your contribution for feedback in no way prejudices your chances of -getting that contribution accepted, and can save you from putting a lot of work -into a contribution that is not suitable for the project. - -Contribution Suitability ------------------------- - -Our project maintainers have the last word on whether or not a contribution is -suitable for Requests. All contributions will be considered carefully, but from -time to time, contributions will be rejected because they do not suit the -current goals or needs of the project. - -If your contribution is rejected, don't despair! As long as you followed these -guidelines, you will have a much better chance of getting your next -contribution accepted. - - -Code Contributions ------------------- - -Steps for Submitting Code -~~~~~~~~~~~~~~~~~~~~~~~~~ - -When contributing code, you'll want to follow this checklist: - -1. Fork the repository on GitHub. -2. Run the tests to confirm they all pass on your system. If they don't, you'll - need to investigate why they fail. If you're unable to diagnose this - yourself, raise it as a bug report by following the guidelines in this - document: :ref:`bug-reports`. -3. Write tests that demonstrate your bug or feature. Ensure that they fail. -4. Make your change. -5. Run the entire test suite again, confirming that all tests pass *including - the ones you just added*. -6. Send a GitHub Pull Request to the main repository's ``master`` branch. - GitHub Pull Requests are the expected method of code collaboration on this - project. - -The following sub-sections go into more detail on some of the points above. - -Code Review -~~~~~~~~~~~ - -Contributions will not be merged until they've been code reviewed. You should -implement any code review feedback unless you strongly object to it. In the -event that you object to the code review feedback, you should make your case -clearly and calmly. If, after doing so, the feedback is judged to still apply, -you must either apply the feedback or withdraw your contribution. - -New Contributors -~~~~~~~~~~~~~~~~ - -If you are new or relatively new to Open Source, welcome! Requests aims to -be a gentle introduction to the world of Open Source. If you're concerned about -how best to contribute, please consider mailing a maintainer (listed above) and -asking for help. - -Please also check the :ref:`early-feedback` section. - -Kenneth Reitz's Code Styleâ„¢ -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The Requests codebase uses the `PEP 8`_ code style. - -In addition to the standards outlined in PEP 8, we have a few guidelines: - -- Line-length can exceed 79 characters, to 100, when convenient. -- Line-length can exceed 100 characters, when doing otherwise would be *terribly* inconvenient. -- Always use single-quoted strings (e.g. ``'#flatearth'``), unless a single-quote occurs within the string. - -Additionally, one of the styles that PEP8 recommends for `line continuations`_ -completely lacks all sense of taste, and is not to be permitted within -the Requests codebase:: - - # Aligned with opening delimiter. - foo = long_function_name(var_one, var_two, - var_three, var_four) - -No. Just don't. Please. This is much better:: - - foo = long_function_name( - var_one, - var_two, - var_three, - var_four, - ) - -Docstrings are to follow the following syntaxes:: - - def the_earth_is_flat(): - """NASA divided up the seas into thirty-three degrees.""" - pass - -:: - - def fibonacci_spiral_tool(): - """With my feet upon the ground I lose myself / between the sounds - and open wide to suck it in. / I feel it move across my skin. / I'm - reaching up and reaching out. / I'm reaching for the random or - whatever will bewilder me. / Whatever will bewilder me. / And - following our will and wind we may just go where no one's been. / - We'll ride the spiral to the end and may just go where no one's - been. - - Spiral out. Keep going... - """ - pass - -All functions, methods, and classes are to contain docstrings. Object data -model methods (e.g. ``__repr__``) are typically the exception to this rule. - -Thanks for helping to make the world a better place! - -.. _PEP 8: https://pep8.org/ -.. _line continuations: https://www.python.org/dev/peps/pep-0008/#indentation - -Documentation Contributions ---------------------------- - -Documentation improvements are always welcome! The documentation files live in -the ``docs/`` directory of the codebase. They're written in -`reStructuredText`_, and use `Sphinx`_ to generate the full suite of -documentation. - -When contributing documentation, please do your best to follow the style of the -documentation files. This means a soft-limit of 79 characters wide in your text -files and a semi-formal, yet friendly and approachable, prose style. - -When presenting Python code, use single-quoted strings (``'hello'`` instead of -``"hello"``). - -.. _reStructuredText: http://docutils.sourceforge.net/rst.html -.. _Sphinx: http://sphinx-doc.org/index.html - - -.. _bug-reports: - -Bug Reports ------------ - -Bug reports are hugely important! Before you raise one, though, please check -through the `GitHub issues`_, **both open and closed**, to confirm that the bug -hasn't been reported before. Duplicate bug reports are a huge drain on the time -of other contributors, and should be avoided as much as possible. - -.. _GitHub issues: https://github.com/psf/requests/issues - - -Feature Requests ----------------- - -Requests is in a perpetual feature freeze, only the BDFL can add or approve of -new features. The maintainers believe that Requests is a feature-complete -piece of software at this time. - -One of the most important skills to have while maintaining a largely-used -open source project is learning the ability to say "no" to suggested changes, -while keeping an open ear and mind. - -If you believe there is a feature missing, feel free to raise a feature -request, but please do be aware that the overwhelming likelihood is that your -feature request will not be accepted. diff --git a/vendor/requests/docs/index.rst b/vendor/requests/docs/index.rst deleted file mode 100644 index 49bda6dc..00000000 --- a/vendor/requests/docs/index.rst +++ /dev/null @@ -1,136 +0,0 @@ -.. Requests documentation master file, created by - sphinx-quickstart on Sun Feb 13 23:54:25 2011. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Requests: HTTP for Humansâ„¢ -========================== - -Release v\ |version|. (:ref:`Installation `) - - -.. image:: https://pepy.tech/badge/requests - :target: https://pepy.tech/project/requests - -.. image:: https://img.shields.io/pypi/l/requests.svg - :target: https://pypi.org/project/requests/ - -.. image:: https://img.shields.io/pypi/wheel/requests.svg - :target: https://pypi.org/project/requests/ - -.. image:: https://img.shields.io/pypi/pyversions/requests.svg - :target: https://pypi.org/project/requests/ - -**Requests** is an elegant and simple HTTP library for Python, built for human beings. - -------------------- - -**Behold, the power of Requests**:: - - >>> r = requests.get('https://api.github.com/user', auth=('user', 'pass')) - >>> r.status_code - 200 - >>> r.headers['content-type'] - 'application/json; charset=utf8' - >>> r.encoding - 'utf-8' - >>> r.text - '{"type":"User"...' - >>> r.json() - {'private_gists': 419, 'total_private_repos': 77, ...} - -See `similar code, sans Requests `_. - - -**Requests** allows you to send HTTP/1.1 requests extremely easily. -There's no need to manually add query strings to your -URLs, or to form-encode your POST data. Keep-alive and HTTP connection pooling -are 100% automatic, thanks to `urllib3 `_. - -Beloved Features ----------------- - -Requests is ready for today's web. - -- Keep-Alive & Connection Pooling -- International Domains and URLs -- Sessions with Cookie Persistence -- Browser-style SSL Verification -- Automatic Content Decoding -- Basic/Digest Authentication -- Elegant Key/Value Cookies -- Automatic Decompression -- Unicode Response Bodies -- HTTP(S) Proxy Support -- Multipart File Uploads -- Streaming Downloads -- Connection Timeouts -- Chunked Requests -- ``.netrc`` Support - -Requests officially supports Python 2.7 & 3.6+, and runs great on PyPy. - - -The User Guide --------------- - -This part of the documentation, which is mostly prose, begins with some -background information about Requests, then focuses on step-by-step -instructions for getting the most out of Requests. - -.. toctree:: - :maxdepth: 2 - - user/install - user/quickstart - user/advanced - user/authentication - - -The Community Guide -------------------- - -This part of the documentation, which is mostly prose, details the -Requests ecosystem and community. - -.. toctree:: - :maxdepth: 2 - - community/recommended - community/faq - community/out-there - community/support - community/vulnerabilities - community/release-process - -.. toctree:: - :maxdepth: 1 - - community/updates - -The API Documentation / Guide ------------------------------ - -If you are looking for information on a specific function, class, or method, -this part of the documentation is for you. - -.. toctree:: - :maxdepth: 2 - - api - - -The Contributor Guide ---------------------- - -If you want to contribute to the project, this part of the documentation is for -you. - -.. toctree:: - :maxdepth: 3 - - dev/contributing - dev/authors - -There are no more guides. You are now guideless. -Good luck. diff --git a/vendor/requests/docs/make.bat b/vendor/requests/docs/make.bat deleted file mode 100644 index 9eaf9b88..00000000 --- a/vendor/requests/docs/make.bat +++ /dev/null @@ -1,263 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - echo. coverage to run coverage check of the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -REM Check if sphinx-build is available and fallback to Python version if any -%SPHINXBUILD% 1>NUL 2>NUL -if errorlevel 9009 goto sphinx_python -goto sphinx_ok - -:sphinx_python - -set SPHINXBUILD=python -m sphinx.__init__ -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -:sphinx_ok - - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Requests.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Requests.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "coverage" ( - %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage - if errorlevel 1 exit /b 1 - echo. - echo.Testing of coverage in the sources finished, look at the ^ -results in %BUILDDIR%/coverage/python.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff --git a/vendor/requests/docs/user/advanced.rst b/vendor/requests/docs/user/advanced.rst deleted file mode 100644 index 34d400d5..00000000 --- a/vendor/requests/docs/user/advanced.rst +++ /dev/null @@ -1,1090 +0,0 @@ -.. _advanced: - -Advanced Usage -============== - -This document covers some of Requests more advanced features. - -.. _session-objects: - -Session Objects ---------------- - -The Session object allows you to persist certain parameters across -requests. It also persists cookies across all requests made from the -Session instance, and will use ``urllib3``'s `connection pooling`_. So if -you're making several requests to the same host, the underlying TCP -connection will be reused, which can result in a significant performance -increase (see `HTTP persistent connection`_). - -A Session object has all the methods of the main Requests API. - -Let's persist some cookies across requests:: - - s = requests.Session() - - s.get('https://httpbin.org/cookies/set/sessioncookie/123456789') - r = s.get('https://httpbin.org/cookies') - - print(r.text) - # '{"cookies": {"sessioncookie": "123456789"}}' - - -Sessions can also be used to provide default data to the request methods. This -is done by providing data to the properties on a Session object:: - - s = requests.Session() - s.auth = ('user', 'pass') - s.headers.update({'x-test': 'true'}) - - # both 'x-test' and 'x-test2' are sent - s.get('https://httpbin.org/headers', headers={'x-test2': 'true'}) - - -Any dictionaries that you pass to a request method will be merged with the -session-level values that are set. The method-level parameters override session -parameters. - -Note, however, that method-level parameters will *not* be persisted across -requests, even if using a session. This example will only send the cookies -with the first request, but not the second:: - - s = requests.Session() - - r = s.get('https://httpbin.org/cookies', cookies={'from-my': 'browser'}) - print(r.text) - # '{"cookies": {"from-my": "browser"}}' - - r = s.get('https://httpbin.org/cookies') - print(r.text) - # '{"cookies": {}}' - - -If you want to manually add cookies to your session, use the -:ref:`Cookie utility functions ` to manipulate -:attr:`Session.cookies `. - -Sessions can also be used as context managers:: - - with requests.Session() as s: - s.get('https://httpbin.org/cookies/set/sessioncookie/123456789') - -This will make sure the session is closed as soon as the ``with`` block is -exited, even if unhandled exceptions occurred. - - -.. admonition:: Remove a Value From a Dict Parameter - - Sometimes you'll want to omit session-level keys from a dict parameter. To - do this, you simply set that key's value to ``None`` in the method-level - parameter. It will automatically be omitted. - -All values that are contained within a session are directly available to you. -See the :ref:`Session API Docs ` to learn more. - -.. _request-and-response-objects: - -Request and Response Objects ----------------------------- - -Whenever a call is made to ``requests.get()`` and friends, you are doing two -major things. First, you are constructing a ``Request`` object which will be -sent off to a server to request or query some resource. Second, a ``Response`` -object is generated once Requests gets a response back from the server. -The ``Response`` object contains all of the information returned by the server and -also contains the ``Request`` object you created originally. Here is a simple -request to get some very important information from Wikipedia's servers:: - - >>> r = requests.get('https://en.wikipedia.org/wiki/Monty_Python') - -If we want to access the headers the server sent back to us, we do this:: - - >>> r.headers - {'content-length': '56170', 'x-content-type-options': 'nosniff', 'x-cache': - 'HIT from cp1006.eqiad.wmnet, MISS from cp1010.eqiad.wmnet', 'content-encoding': - 'gzip', 'age': '3080', 'content-language': 'en', 'vary': 'Accept-Encoding,Cookie', - 'server': 'Apache', 'last-modified': 'Wed, 13 Jun 2012 01:33:50 GMT', - 'connection': 'close', 'cache-control': 'private, s-maxage=0, max-age=0, - must-revalidate', 'date': 'Thu, 14 Jun 2012 12:59:39 GMT', 'content-type': - 'text/html; charset=UTF-8', 'x-cache-lookup': 'HIT from cp1006.eqiad.wmnet:3128, - MISS from cp1010.eqiad.wmnet:80'} - -However, if we want to get the headers we sent the server, we simply access the -request, and then the request's headers:: - - >>> r.request.headers - {'Accept-Encoding': 'identity, deflate, compress, gzip', - 'Accept': '*/*', 'User-Agent': 'python-requests/1.2.0'} - -.. _prepared-requests: - -Prepared Requests ------------------ - -Whenever you receive a :class:`Response ` object -from an API call or a Session call, the ``request`` attribute is actually the -``PreparedRequest`` that was used. In some cases you may wish to do some extra -work to the body or headers (or anything else really) before sending a -request. The simple recipe for this is the following:: - - from requests import Request, Session - - s = Session() - - req = Request('POST', url, data=data, headers=headers) - prepped = req.prepare() - - # do something with prepped.body - prepped.body = 'No, I want exactly this as the body.' - - # do something with prepped.headers - del prepped.headers['Content-Type'] - - resp = s.send(prepped, - stream=stream, - verify=verify, - proxies=proxies, - cert=cert, - timeout=timeout - ) - - print(resp.status_code) - -Since you are not doing anything special with the ``Request`` object, you -prepare it immediately and modify the ``PreparedRequest`` object. You then -send that with the other parameters you would have sent to ``requests.*`` or -``Session.*``. - -However, the above code will lose some of the advantages of having a Requests -:class:`Session ` object. In particular, -:class:`Session `-level state such as cookies will -not get applied to your request. To get a -:class:`PreparedRequest ` with that state -applied, replace the call to :meth:`Request.prepare() -` with a call to -:meth:`Session.prepare_request() `, like this:: - - from requests import Request, Session - - s = Session() - req = Request('GET', url, data=data, headers=headers) - - prepped = s.prepare_request(req) - - # do something with prepped.body - prepped.body = 'Seriously, send exactly these bytes.' - - # do something with prepped.headers - prepped.headers['Keep-Dead'] = 'parrot' - - resp = s.send(prepped, - stream=stream, - verify=verify, - proxies=proxies, - cert=cert, - timeout=timeout - ) - - print(resp.status_code) - -When you are using the prepared request flow, keep in mind that it does not take into account the environment. -This can cause problems if you are using environment variables to change the behaviour of requests. -For example: Self-signed SSL certificates specified in ``REQUESTS_CA_BUNDLE`` will not be taken into account. -As a result an ``SSL: CERTIFICATE_VERIFY_FAILED`` is thrown. -You can get around this behaviour by explicitly merging the environment settings into your session:: - - from requests import Request, Session - - s = Session() - req = Request('GET', url) - - prepped = s.prepare_request(req) - - # Merge environment settings into session - settings = s.merge_environment_settings(prepped.url, {}, None, None, None) - resp = s.send(prepped, **settings) - - print(resp.status_code) - -.. _verification: - -SSL Cert Verification ---------------------- - -Requests verifies SSL certificates for HTTPS requests, just like a web browser. -By default, SSL verification is enabled, and Requests will throw a SSLError if -it's unable to verify the certificate:: - - >>> requests.get('https://requestb.in') - requests.exceptions.SSLError: hostname 'requestb.in' doesn't match either of '*.herokuapp.com', 'herokuapp.com' - -I don't have SSL setup on this domain, so it throws an exception. Excellent. GitHub does though:: - - >>> requests.get('https://github.com') - - -You can pass ``verify`` the path to a CA_BUNDLE file or directory with certificates of trusted CAs:: - - >>> requests.get('https://github.com', verify='/path/to/certfile') - -or persistent:: - - s = requests.Session() - s.verify = '/path/to/certfile' - -.. note:: If ``verify`` is set to a path to a directory, the directory must have been processed using - the ``c_rehash`` utility supplied with OpenSSL. - -This list of trusted CAs can also be specified through the ``REQUESTS_CA_BUNDLE`` environment variable. -If ``REQUESTS_CA_BUNDLE`` is not set, ``CURL_CA_BUNDLE`` will be used as fallback. - -Requests can also ignore verifying the SSL certificate if you set ``verify`` to False:: - - >>> requests.get('https://kennethreitz.org', verify=False) - - -Note that when ``verify`` is set to ``False``, requests will accept any TLS -certificate presented by the server, and will ignore hostname mismatches -and/or expired certificates, which will make your application vulnerable to -man-in-the-middle (MitM) attacks. Setting verify to ``False`` may be useful -during local development or testing. - -By default, ``verify`` is set to True. Option ``verify`` only applies to host certs. - -Client Side Certificates ------------------------- - -You can also specify a local cert to use as client side certificate, as a single -file (containing the private key and the certificate) or as a tuple of both -files' paths:: - - >>> requests.get('https://kennethreitz.org', cert=('/path/client.cert', '/path/client.key')) - - -or persistent:: - - s = requests.Session() - s.cert = '/path/client.cert' - -If you specify a wrong path or an invalid cert, you'll get a SSLError:: - - >>> requests.get('https://kennethreitz.org', cert='/wrong_path/client.pem') - SSLError: [Errno 336265225] _ssl.c:347: error:140B0009:SSL routines:SSL_CTX_use_PrivateKey_file:PEM lib - -.. warning:: The private key to your local certificate *must* be unencrypted. - Currently, Requests does not support using encrypted keys. - -.. _ca-certificates: - -CA Certificates ---------------- - -Requests uses certificates from the package `certifi`_. This allows for users -to update their trusted certificates without changing the version of Requests. - -Before version 2.16, Requests bundled a set of root CAs that it trusted, -sourced from the `Mozilla trust store`_. The certificates were only updated -once for each Requests version. When ``certifi`` was not installed, this led to -extremely out-of-date certificate bundles when using significantly older -versions of Requests. - -For the sake of security we recommend upgrading certifi frequently! - -.. _HTTP persistent connection: https://en.wikipedia.org/wiki/HTTP_persistent_connection -.. _connection pooling: https://urllib3.readthedocs.io/en/latest/reference/index.html#module-urllib3.connectionpool -.. _certifi: https://certifiio.readthedocs.io/ -.. _Mozilla trust store: https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt - -.. _body-content-workflow: - -Body Content Workflow ---------------------- - -By default, when you make a request, the body of the response is downloaded -immediately. You can override this behaviour and defer downloading the response -body until you access the :attr:`Response.content ` -attribute with the ``stream`` parameter:: - - tarball_url = 'https://github.com/psf/requests/tarball/master' - r = requests.get(tarball_url, stream=True) - -At this point only the response headers have been downloaded and the connection -remains open, hence allowing us to make content retrieval conditional:: - - if int(r.headers['content-length']) < TOO_LONG: - content = r.content - ... - -You can further control the workflow by use of the :meth:`Response.iter_content() ` -and :meth:`Response.iter_lines() ` methods. -Alternatively, you can read the undecoded body from the underlying -urllib3 :class:`urllib3.HTTPResponse ` at -:attr:`Response.raw `. - -If you set ``stream`` to ``True`` when making a request, Requests cannot -release the connection back to the pool unless you consume all the data or call -:meth:`Response.close `. This can lead to -inefficiency with connections. If you find yourself partially reading request -bodies (or not reading them at all) while using ``stream=True``, you should -make the request within a ``with`` statement to ensure it's always closed:: - - with requests.get('https://httpbin.org/get', stream=True) as r: - # Do things with the response here. - -.. _keep-alive: - -Keep-Alive ----------- - -Excellent news — thanks to urllib3, keep-alive is 100% automatic within a session! -Any requests that you make within a session will automatically reuse the appropriate -connection! - -Note that connections are only released back to the pool for reuse once all body -data has been read; be sure to either set ``stream`` to ``False`` or read the -``content`` property of the ``Response`` object. - -.. _streaming-uploads: - -Streaming Uploads ------------------ - -Requests supports streaming uploads, which allow you to send large streams or -files without reading them into memory. To stream and upload, simply provide a -file-like object for your body:: - - with open('massive-body', 'rb') as f: - requests.post('http://some.url/streamed', data=f) - -.. warning:: It is strongly recommended that you open files in :ref:`binary - mode `. This is because Requests may attempt to provide - the ``Content-Length`` header for you, and if it does this value - will be set to the number of *bytes* in the file. Errors may occur - if you open the file in *text mode*. - - -.. _chunk-encoding: - -Chunk-Encoded Requests ----------------------- - -Requests also supports Chunked transfer encoding for outgoing and incoming requests. -To send a chunk-encoded request, simply provide a generator (or any iterator without -a length) for your body:: - - def gen(): - yield 'hi' - yield 'there' - - requests.post('http://some.url/chunked', data=gen()) - -For chunked encoded responses, it's best to iterate over the data using -:meth:`Response.iter_content() `. In -an ideal situation you'll have set ``stream=True`` on the request, in which -case you can iterate chunk-by-chunk by calling ``iter_content`` with a ``chunk_size`` -parameter of ``None``. If you want to set a maximum size of the chunk, -you can set a ``chunk_size`` parameter to any integer. - - -.. _multipart: - -POST Multiple Multipart-Encoded Files -------------------------------------- - -You can send multiple files in one request. For example, suppose you want to -upload image files to an HTML form with a multiple file field 'images':: - - - -To do that, just set files to a list of tuples of ``(form_field_name, file_info)``:: - - >>> url = 'https://httpbin.org/post' - >>> multiple_files = [ - ... ('images', ('foo.png', open('foo.png', 'rb'), 'image/png')), - ... ('images', ('bar.png', open('bar.png', 'rb'), 'image/png'))] - >>> r = requests.post(url, files=multiple_files) - >>> r.text - { - ... - 'files': {'images': 'data:image/png;base64,iVBORw ....'} - 'Content-Type': 'multipart/form-data; boundary=3131623adb2043caaeb5538cc7aa0b3a', - ... - } - -.. warning:: It is strongly recommended that you open files in :ref:`binary - mode `. This is because Requests may attempt to provide - the ``Content-Length`` header for you, and if it does this value - will be set to the number of *bytes* in the file. Errors may occur - if you open the file in *text mode*. - - -.. _event-hooks: - -Event Hooks ------------ - -Requests has a hook system that you can use to manipulate portions of -the request process, or signal event handling. - -Available hooks: - -``response``: - The response generated from a Request. - - -You can assign a hook function on a per-request basis by passing a -``{hook_name: callback_function}`` dictionary to the ``hooks`` request -parameter:: - - hooks={'response': print_url} - -That ``callback_function`` will receive a chunk of data as its first -argument. - -:: - - def print_url(r, *args, **kwargs): - print(r.url) - -If an error occurs while executing your callback, a warning is given. - -If the callback function returns a value, it is assumed that it is to -replace the data that was passed in. If the function doesn't return -anything, nothing else is affected. - -:: - - def record_hook(r, *args, **kwargs): - r.hook_called = True - return r - -Let's print some request method arguments at runtime:: - - >>> requests.get('https://httpbin.org/', hooks={'response': print_url}) - https://httpbin.org/ - - -You can add multiple hooks to a single request. Let's call two hooks at once:: - - >>> r = requests.get('https://httpbin.org/', hooks={'response': [print_url, record_hook]}) - >>> r.hook_called - True - -You can also add hooks to a ``Session`` instance. Any hooks you add will then -be called on every request made to the session. For example:: - - >>> s = requests.Session() - >>> s.hooks['response'].append(print_url) - >>> s.get('https://httpbin.org/') - https://httpbin.org/ - - -A ``Session`` can have multiple hooks, which will be called in the order -they are added. - -.. _custom-auth: - -Custom Authentication ---------------------- - -Requests allows you to specify your own authentication mechanism. - -Any callable which is passed as the ``auth`` argument to a request method will -have the opportunity to modify the request before it is dispatched. - -Authentication implementations are subclasses of :class:`AuthBase `, -and are easy to define. Requests provides two common authentication scheme -implementations in ``requests.auth``: :class:`HTTPBasicAuth ` and -:class:`HTTPDigestAuth `. - -Let's pretend that we have a web service that will only respond if the -``X-Pizza`` header is set to a password value. Unlikely, but just go with it. - -:: - - from requests.auth import AuthBase - - class PizzaAuth(AuthBase): - """Attaches HTTP Pizza Authentication to the given Request object.""" - def __init__(self, username): - # setup any auth-related data here - self.username = username - - def __call__(self, r): - # modify and return the request - r.headers['X-Pizza'] = self.username - return r - -Then, we can make a request using our Pizza Auth:: - - >>> requests.get('http://pizzabin.org/admin', auth=PizzaAuth('kenneth')) - - -.. _streaming-requests: - -Streaming Requests ------------------- - -With :meth:`Response.iter_lines() ` you can easily -iterate over streaming APIs such as the `Twitter Streaming -API `_. Simply -set ``stream`` to ``True`` and iterate over the response with -:meth:`~requests.Response.iter_lines()`:: - - import json - import requests - - r = requests.get('https://httpbin.org/stream/20', stream=True) - - for line in r.iter_lines(): - - # filter out keep-alive new lines - if line: - decoded_line = line.decode('utf-8') - print(json.loads(decoded_line)) - -When using `decode_unicode=True` with -:meth:`Response.iter_lines() ` or -:meth:`Response.iter_content() `, you'll want -to provide a fallback encoding in the event the server doesn't provide one:: - - r = requests.get('https://httpbin.org/stream/20', stream=True) - - if r.encoding is None: - r.encoding = 'utf-8' - - for line in r.iter_lines(decode_unicode=True): - if line: - print(json.loads(line)) - -.. warning:: - - :meth:`~requests.Response.iter_lines()` is not reentrant safe. - Calling this method multiple times causes some of the received data - being lost. In case you need to call it from multiple places, use - the resulting iterator object instead:: - - lines = r.iter_lines() - # Save the first line for later or just skip it - - first_line = next(lines) - - for line in lines: - print(line) - -.. _proxies: - -Proxies -------- - -If you need to use a proxy, you can configure individual requests with the -``proxies`` argument to any request method:: - - import requests - - proxies = { - 'http': 'http://10.10.1.10:3128', - 'https': 'http://10.10.1.10:1080', - } - - requests.get('http://example.org', proxies=proxies) - -Alternatively you can configure it once for an entire -:class:`Session `:: - - import requests - - proxies = { - 'http': 'http://10.10.1.10:3128', - 'https': 'http://10.10.1.10:1080', - } - session = requests.Session() - session.proxies.update(proxies) - - session.get('http://example.org') - -When the proxies configuration is not overridden in python as shown above, -by default Requests relies on the proxy configuration defined by standard -environment variables ``http_proxy``, ``https_proxy``, ``no_proxy`` and -``curl_ca_bundle``. Uppercase variants of these variables are also supported. -You can therefore set them to configure Requests (only set the ones relevant -to your needs):: - - $ export HTTP_PROXY="http://10.10.1.10:3128" - $ export HTTPS_PROXY="http://10.10.1.10:1080" - - $ python - >>> import requests - >>> requests.get('http://example.org') - -To use HTTP Basic Auth with your proxy, use the `http://user:password@host/` -syntax in any of the above configuration entries:: - - $ export HTTPS_PROXY="http://user:pass@10.10.1.10:1080" - - $ python - >>> proxies = {'http': 'http://user:pass@10.10.1.10:3128/'} - -.. warning:: Storing sensitive username and password information in an - environment variable or a version-controlled file is a security risk and is - highly discouraged. - -To give a proxy for a specific scheme and host, use the -`scheme://hostname` form for the key. This will match for -any request to the given scheme and exact hostname. - -:: - - proxies = {'http://10.20.1.128': 'http://10.10.1.10:5323'} - -Note that proxy URLs must include the scheme. - -Finally, note that using a proxy for https connections typically requires your -local machine to trust the proxy's root certificate. By default the list of -certificates trusted by Requests can be found with:: - - from requests.utils import DEFAULT_CA_BUNDLE_PATH - print(DEFAULT_CA_BUNDLE_PATH) - -You override this default certificate bundle by setting the standard -``curl_ca_bundle`` environment variable to another file path:: - - $ export curl_ca_bundle="/usr/local/myproxy_info/cacert.pem" - $ export https_proxy="http://10.10.1.10:1080" - - $ python - >>> import requests - >>> requests.get('https://example.org') - -SOCKS -^^^^^ - -.. versionadded:: 2.10.0 - -In addition to basic HTTP proxies, Requests also supports proxies using the -SOCKS protocol. This is an optional feature that requires that additional -third-party libraries be installed before use. - -You can get the dependencies for this feature from ``pip``: - -.. code-block:: bash - - $ python -m pip install requests[socks] - -Once you've installed those dependencies, using a SOCKS proxy is just as easy -as using a HTTP one:: - - proxies = { - 'http': 'socks5://user:pass@host:port', - 'https': 'socks5://user:pass@host:port' - } - -Using the scheme ``socks5`` causes the DNS resolution to happen on the client, rather than on the proxy server. This is in line with curl, which uses the scheme to decide whether to do the DNS resolution on the client or proxy. If you want to resolve the domains on the proxy server, use ``socks5h`` as the scheme. - -.. _compliance: - -Compliance ----------- - -Requests is intended to be compliant with all relevant specifications and -RFCs where that compliance will not cause difficulties for users. This -attention to the specification can lead to some behaviour that may seem -unusual to those not familiar with the relevant specification. - -Encodings -^^^^^^^^^ - -When you receive a response, Requests makes a guess at the encoding to -use for decoding the response when you access the :attr:`Response.text -` attribute. Requests will first check for an -encoding in the HTTP header, and if none is present, will use -`charset_normalizer `_ -or `chardet `_ to attempt to -guess the encoding. - -If ``chardet`` is installed, ``requests`` uses it, however for python3 -``chardet`` is no longer a mandatory dependency. The ``chardet`` -library is an LGPL-licenced dependency and some users of requests -cannot depend on mandatory LGPL-licensed dependencies. - -When you install ``request`` without specifying ``[use_chardet_on_py3]]`` extra, -and ``chardet`` is not already installed, ``requests`` uses ``charset-normalizer`` -(MIT-licensed) to guess the encoding. For Python 2, ``requests`` uses only -``chardet`` and is a mandatory dependency there. - -The only time Requests will not guess the encoding is if no explicit charset -is present in the HTTP headers **and** the ``Content-Type`` -header contains ``text``. In this situation, `RFC 2616 -`_ specifies -that the default charset must be ``ISO-8859-1``. Requests follows the -specification in this case. If you require a different encoding, you can -manually set the :attr:`Response.encoding ` -property, or use the raw :attr:`Response.content `. - -.. _http-verbs: - -HTTP Verbs ----------- - -Requests provides access to almost the full range of HTTP verbs: GET, OPTIONS, -HEAD, POST, PUT, PATCH and DELETE. The following provides detailed examples of -using these various verbs in Requests, using the GitHub API. - -We will begin with the verb most commonly used: GET. HTTP GET is an idempotent -method that returns a resource from a given URL. As a result, it is the verb -you ought to use when attempting to retrieve data from a web location. An -example usage would be attempting to get information about a specific commit -from GitHub. Suppose we wanted commit ``a050faf`` on Requests. We would get it -like so:: - - >>> import requests - >>> r = requests.get('https://api.github.com/repos/psf/requests/git/commits/a050faf084662f3a352dd1a941f2c7c9f886d4ad') - -We should confirm that GitHub responded correctly. If it has, we want to work -out what type of content it is. Do this like so:: - - >>> if r.status_code == requests.codes.ok: - ... print(r.headers['content-type']) - ... - application/json; charset=utf-8 - -So, GitHub returns JSON. That's great, we can use the :meth:`r.json -` method to parse it into Python objects. - -:: - - >>> commit_data = r.json() - - >>> print(commit_data.keys()) - ['committer', 'author', 'url', 'tree', 'sha', 'parents', 'message'] - - >>> print(commit_data['committer']) - {'date': '2012-05-10T11:10:50-07:00', 'email': 'me@kennethreitz.com', 'name': 'Kenneth Reitz'} - - >>> print(commit_data['message']) - makin' history - -So far, so simple. Well, let's investigate the GitHub API a little bit. Now, -we could look at the documentation, but we might have a little more fun if we -use Requests instead. We can take advantage of the Requests OPTIONS verb to -see what kinds of HTTP methods are supported on the url we just used. - -:: - - >>> verbs = requests.options(r.url) - >>> verbs.status_code - 500 - -Uh, what? That's unhelpful! Turns out GitHub, like many API providers, don't -actually implement the OPTIONS method. This is an annoying oversight, but it's -OK, we can just use the boring documentation. If GitHub had correctly -implemented OPTIONS, however, they should return the allowed methods in the -headers, e.g. - -:: - - >>> verbs = requests.options('http://a-good-website.com/api/cats') - >>> print(verbs.headers['allow']) - GET,HEAD,POST,OPTIONS - -Turning to the documentation, we see that the only other method allowed for -commits is POST, which creates a new commit. As we're using the Requests repo, -we should probably avoid making ham-handed POSTS to it. Instead, let's play -with the Issues feature of GitHub. - -This documentation was added in response to -`Issue #482 `_. Given that -this issue already exists, we will use it as an example. Let's start by getting it. - -:: - - >>> r = requests.get('https://api.github.com/repos/psf/requests/issues/482') - >>> r.status_code - 200 - - >>> issue = json.loads(r.text) - - >>> print(issue['title']) - Feature any http verb in docs - - >>> print(issue['comments']) - 3 - -Cool, we have three comments. Let's take a look at the last of them. - -:: - - >>> r = requests.get(r.url + '/comments') - >>> r.status_code - 200 - - >>> comments = r.json() - - >>> print(comments[0].keys()) - ['body', 'url', 'created_at', 'updated_at', 'user', 'id'] - - >>> print(comments[2]['body']) - Probably in the "advanced" section - -Well, that seems like a silly place. Let's post a comment telling the poster -that he's silly. Who is the poster, anyway? - -:: - - >>> print(comments[2]['user']['login']) - kennethreitz - -OK, so let's tell this Kenneth guy that we think this example should go in the -quickstart guide instead. According to the GitHub API doc, the way to do this -is to POST to the thread. Let's do it. - -:: - - >>> body = json.dumps({u"body": u"Sounds great! I'll get right on it!"}) - >>> url = u"https://api.github.com/repos/psf/requests/issues/482/comments" - - >>> r = requests.post(url=url, data=body) - >>> r.status_code - 404 - -Huh, that's weird. We probably need to authenticate. That'll be a pain, right? -Wrong. Requests makes it easy to use many forms of authentication, including -the very common Basic Auth. - -:: - - >>> from requests.auth import HTTPBasicAuth - >>> auth = HTTPBasicAuth('fake@example.com', 'not_a_real_password') - - >>> r = requests.post(url=url, data=body, auth=auth) - >>> r.status_code - 201 - - >>> content = r.json() - >>> print(content['body']) - Sounds great! I'll get right on it. - -Brilliant. Oh, wait, no! I meant to add that it would take me a while, because -I had to go feed my cat. If only I could edit this comment! Happily, GitHub -allows us to use another HTTP verb, PATCH, to edit this comment. Let's do -that. - -:: - - >>> print(content[u"id"]) - 5804413 - - >>> body = json.dumps({u"body": u"Sounds great! I'll get right on it once I feed my cat."}) - >>> url = u"https://api.github.com/repos/psf/requests/issues/comments/5804413" - - >>> r = requests.patch(url=url, data=body, auth=auth) - >>> r.status_code - 200 - -Excellent. Now, just to torture this Kenneth guy, I've decided to let him -sweat and not tell him that I'm working on this. That means I want to delete -this comment. GitHub lets us delete comments using the incredibly aptly named -DELETE method. Let's get rid of it. - -:: - - >>> r = requests.delete(url=url, auth=auth) - >>> r.status_code - 204 - >>> r.headers['status'] - '204 No Content' - -Excellent. All gone. The last thing I want to know is how much of my ratelimit -I've used. Let's find out. GitHub sends that information in the headers, so -rather than download the whole page I'll send a HEAD request to get the -headers. - -:: - - >>> r = requests.head(url=url, auth=auth) - >>> print(r.headers) - ... - 'x-ratelimit-remaining': '4995' - 'x-ratelimit-limit': '5000' - ... - -Excellent. Time to write a Python program that abuses the GitHub API in all -kinds of exciting ways, 4995 more times. - -.. _custom-verbs: - -Custom Verbs ------------- - -From time to time you may be working with a server that, for whatever reason, -allows use or even requires use of HTTP verbs not covered above. One example of -this would be the MKCOL method some WEBDAV servers use. Do not fret, these can -still be used with Requests. These make use of the built-in ``.request`` -method. For example:: - - >>> r = requests.request('MKCOL', url, data=data) - >>> r.status_code - 200 # Assuming your call was correct - -Utilising this, you can make use of any method verb that your server allows. - - -.. _link-headers: - -Link Headers ------------- - -Many HTTP APIs feature Link headers. They make APIs more self describing and -discoverable. - -GitHub uses these for `pagination `_ -in their API, for example:: - - >>> url = 'https://api.github.com/users/kennethreitz/repos?page=1&per_page=10' - >>> r = requests.head(url=url) - >>> r.headers['link'] - '; rel="next", ; rel="last"' - -Requests will automatically parse these link headers and make them easily consumable:: - - >>> r.links["next"] - {'url': 'https://api.github.com/users/kennethreitz/repos?page=2&per_page=10', 'rel': 'next'} - - >>> r.links["last"] - {'url': 'https://api.github.com/users/kennethreitz/repos?page=7&per_page=10', 'rel': 'last'} - -.. _transport-adapters: - -Transport Adapters ------------------- - -As of v1.0.0, Requests has moved to a modular internal design. Part of the -reason this was done was to implement Transport Adapters, originally -`described here`_. Transport Adapters provide a mechanism to define interaction -methods for an HTTP service. In particular, they allow you to apply per-service -configuration. - -Requests ships with a single Transport Adapter, the :class:`HTTPAdapter -`. This adapter provides the default Requests -interaction with HTTP and HTTPS using the powerful `urllib3`_ library. Whenever -a Requests :class:`Session ` is initialized, one of these is -attached to the :class:`Session ` object for HTTP, and one -for HTTPS. - -Requests enables users to create and use their own Transport Adapters that -provide specific functionality. Once created, a Transport Adapter can be -mounted to a Session object, along with an indication of which web services -it should apply to. - -:: - - >>> s = requests.Session() - >>> s.mount('https://github.com/', MyAdapter()) - -The mount call registers a specific instance of a Transport Adapter to a -prefix. Once mounted, any HTTP request made using that session whose URL starts -with the given prefix will use the given Transport Adapter. - -Many of the details of implementing a Transport Adapter are beyond the scope of -this documentation, but take a look at the next example for a simple SSL use- -case. For more than that, you might look at subclassing the -:class:`BaseAdapter `. - -Example: Specific SSL Version -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The Requests team has made a specific choice to use whatever SSL version is -default in the underlying library (`urllib3`_). Normally this is fine, but from -time to time, you might find yourself needing to connect to a service-endpoint -that uses a version that isn't compatible with the default. - -You can use Transport Adapters for this by taking most of the existing -implementation of HTTPAdapter, and adding a parameter *ssl_version* that gets -passed-through to `urllib3`. We'll make a Transport Adapter that instructs the -library to use SSLv3:: - - import ssl - from urllib3.poolmanager import PoolManager - - from requests.adapters import HTTPAdapter - - - class Ssl3HttpAdapter(HTTPAdapter): - """"Transport adapter" that allows us to use SSLv3.""" - - def init_poolmanager(self, connections, maxsize, block=False): - self.poolmanager = PoolManager( - num_pools=connections, maxsize=maxsize, - block=block, ssl_version=ssl.PROTOCOL_SSLv3) - -.. _`described here`: https://www.kennethreitz.org/essays/the-future-of-python-http -.. _`urllib3`: https://github.com/urllib3/urllib3 - -.. _blocking-or-nonblocking: - -Blocking Or Non-Blocking? -------------------------- - -With the default Transport Adapter in place, Requests does not provide any kind -of non-blocking IO. The :attr:`Response.content ` -property will block until the entire response has been downloaded. If -you require more granularity, the streaming features of the library (see -:ref:`streaming-requests`) allow you to retrieve smaller quantities of the -response at a time. However, these calls will still block. - -If you are concerned about the use of blocking IO, there are lots of projects -out there that combine Requests with one of Python's asynchronicity frameworks. -Some excellent examples are `requests-threads`_, `grequests`_, `requests-futures`_, and `httpx`_. - -.. _`requests-threads`: https://github.com/requests/requests-threads -.. _`grequests`: https://github.com/spyoungtech/grequests -.. _`requests-futures`: https://github.com/ross/requests-futures -.. _`httpx`: https://github.com/encode/httpx - -Header Ordering ---------------- - -In unusual circumstances you may want to provide headers in an ordered manner. If you pass an ``OrderedDict`` to the ``headers`` keyword argument, that will provide the headers with an ordering. *However*, the ordering of the default headers used by Requests will be preferred, which means that if you override default headers in the ``headers`` keyword argument, they may appear out of order compared to other headers in that keyword argument. - -If this is problematic, users should consider setting the default headers on a :class:`Session ` object, by setting :attr:`Session ` to a custom ``OrderedDict``. That ordering will always be preferred. - -.. _timeouts: - -Timeouts --------- - -Most requests to external servers should have a timeout attached, in case the -server is not responding in a timely manner. By default, requests do not time -out unless a timeout value is set explicitly. Without a timeout, your code may -hang for minutes or more. - -The **connect** timeout is the number of seconds Requests will wait for your -client to establish a connection to a remote machine (corresponding to the -`connect()`_) call on the socket. It's a good practice to set connect timeouts -to slightly larger than a multiple of 3, which is the default `TCP packet -retransmission window `_. - -Once your client has connected to the server and sent the HTTP request, the -**read** timeout is the number of seconds the client will wait for the server -to send a response. (Specifically, it's the number of seconds that the client -will wait *between* bytes sent from the server. In 99.9% of cases, this is the -time before the server sends the first byte). - -If you specify a single value for the timeout, like this:: - - r = requests.get('https://github.com', timeout=5) - -The timeout value will be applied to both the ``connect`` and the ``read`` -timeouts. Specify a tuple if you would like to set the values separately:: - - r = requests.get('https://github.com', timeout=(3.05, 27)) - -If the remote server is very slow, you can tell Requests to wait forever for -a response, by passing None as a timeout value and then retrieving a cup of -coffee. - -:: - - r = requests.get('https://github.com', timeout=None) - -.. _`connect()`: https://linux.die.net/man/2/connect diff --git a/vendor/requests/docs/user/authentication.rst b/vendor/requests/docs/user/authentication.rst deleted file mode 100644 index 4d4040eb..00000000 --- a/vendor/requests/docs/user/authentication.rst +++ /dev/null @@ -1,144 +0,0 @@ -.. _authentication: - -Authentication -============== - -This document discusses using various kinds of authentication with Requests. - -Many web services require authentication, and there are many different types. -Below, we outline various forms of authentication available in Requests, from -the simple to the complex. - - -Basic Authentication --------------------- - -Many web services that require authentication accept HTTP Basic Auth. This is -the simplest kind, and Requests supports it straight out of the box. - -Making requests with HTTP Basic Auth is very simple:: - - >>> from requests.auth import HTTPBasicAuth - >>> requests.get('https://api.github.com/user', auth=HTTPBasicAuth('user', 'pass')) - - -In fact, HTTP Basic Auth is so common that Requests provides a handy shorthand -for using it:: - - >>> requests.get('https://api.github.com/user', auth=('user', 'pass')) - - -Providing the credentials in a tuple like this is exactly the same as the -``HTTPBasicAuth`` example above. - - -netrc Authentication -~~~~~~~~~~~~~~~~~~~~ - -If no authentication method is given with the ``auth`` argument, Requests will -attempt to get the authentication credentials for the URL's hostname from the -user's netrc file. The netrc file overrides raw HTTP authentication headers -set with `headers=`. - -If credentials for the hostname are found, the request is sent with HTTP Basic -Auth. - - -Digest Authentication ---------------------- - -Another very popular form of HTTP Authentication is Digest Authentication, -and Requests supports this out of the box as well:: - - >>> from requests.auth import HTTPDigestAuth - >>> url = 'https://httpbin.org/digest-auth/auth/user/pass' - >>> requests.get(url, auth=HTTPDigestAuth('user', 'pass')) - - - -OAuth 1 Authentication ----------------------- - -A common form of authentication for several web APIs is OAuth. The ``requests-oauthlib`` -library allows Requests users to easily make OAuth 1 authenticated requests:: - - >>> import requests - >>> from requests_oauthlib import OAuth1 - - >>> url = 'https://api.twitter.com/1.1/account/verify_credentials.json' - >>> auth = OAuth1('YOUR_APP_KEY', 'YOUR_APP_SECRET', - ... 'USER_OAUTH_TOKEN', 'USER_OAUTH_TOKEN_SECRET') - - >>> requests.get(url, auth=auth) - - -For more information on how to OAuth flow works, please see the official `OAuth`_ website. -For examples and documentation on requests-oauthlib, please see the `requests_oauthlib`_ -repository on GitHub - -OAuth 2 and OpenID Connect Authentication ------------------------------------------ - -The ``requests-oauthlib`` library also handles OAuth 2, the authentication mechanism -underpinning OpenID Connect. See the `requests-oauthlib OAuth2 documentation`_ for -details of the various OAuth 2 credential management flows: - -* `Web Application Flow`_ -* `Mobile Application Flow`_ -* `Legacy Application Flow`_ -* `Backend Application Flow`_ - -Other Authentication --------------------- - -Requests is designed to allow other forms of authentication to be easily and -quickly plugged in. Members of the open-source community frequently write -authentication handlers for more complicated or less commonly-used forms of -authentication. Some of the best have been brought together under the -`Requests organization`_, including: - -- Kerberos_ -- NTLM_ - -If you want to use any of these forms of authentication, go straight to their -GitHub page and follow the instructions. - - -New Forms of Authentication ---------------------------- - -If you can't find a good implementation of the form of authentication you -want, you can implement it yourself. Requests makes it easy to add your own -forms of authentication. - -To do so, subclass :class:`AuthBase ` and implement the -``__call__()`` method:: - - >>> import requests - >>> class MyAuth(requests.auth.AuthBase): - ... def __call__(self, r): - ... # Implement my authentication - ... return r - ... - >>> url = 'https://httpbin.org/get' - >>> requests.get(url, auth=MyAuth()) - - -When an authentication handler is attached to a request, -it is called during request setup. The ``__call__`` method must therefore do -whatever is required to make the authentication work. Some forms of -authentication will additionally add hooks to provide further functionality. - -Further examples can be found under the `Requests organization`_ and in the -``auth.py`` file. - -.. _OAuth: https://oauth.net/ -.. _requests_oauthlib: https://github.com/requests/requests-oauthlib -.. _requests-oauthlib OAuth2 documentation: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html -.. _Web Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#web-application-flow -.. _Mobile Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#mobile-application-flow -.. _Legacy Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#legacy-application-flow -.. _Backend Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#backend-application-flow -.. _Kerberos: https://github.com/requests/requests-kerberos -.. _NTLM: https://github.com/requests/requests-ntlm -.. _Requests organization: https://github.com/requests diff --git a/vendor/requests/docs/user/install.rst b/vendor/requests/docs/user/install.rst deleted file mode 100644 index ed205dbc..00000000 --- a/vendor/requests/docs/user/install.rst +++ /dev/null @@ -1,36 +0,0 @@ -.. _install: - -Installation of Requests -======================== - -This part of the documentation covers the installation of Requests. -The first step to using any software package is getting it properly installed. - - -$ python -m pip install requests --------------------------------- - -To install Requests, simply run this simple command in your terminal of choice:: - - $ python -m pip install requests - -Get the Source Code -------------------- - -Requests is actively developed on GitHub, where the code is -`always available `_. - -You can either clone the public repository:: - - $ git clone git://github.com/psf/requests.git - -Or, download the `tarball `_:: - - $ curl -OL https://github.com/psf/requests/tarball/master - # optionally, zipball is also available (for Windows users). - -Once you have a copy of the source, you can embed it in your own Python -package, or install it into your site-packages easily:: - - $ cd requests - $ python -m pip install . diff --git a/vendor/requests/docs/user/quickstart.rst b/vendor/requests/docs/user/quickstart.rst deleted file mode 100644 index b4649f00..00000000 --- a/vendor/requests/docs/user/quickstart.rst +++ /dev/null @@ -1,569 +0,0 @@ -.. _quickstart: - -Quickstart -========== - -.. module:: requests.models - -Eager to get started? This page gives a good introduction in how to get started -with Requests. - -First, make sure that: - -* Requests is :ref:`installed ` -* Requests is :ref:`up-to-date ` - - -Let's get started with some simple examples. - - -Make a Request --------------- - -Making a request with Requests is very simple. - -Begin by importing the Requests module:: - - >>> import requests - -Now, let's try to get a webpage. For this example, let's get GitHub's public -timeline:: - - >>> r = requests.get('https://api.github.com/events') - -Now, we have a :class:`Response ` object called ``r``. We can -get all the information we need from this object. - -Requests' simple API means that all forms of HTTP request are as obvious. For -example, this is how you make an HTTP POST request:: - - >>> r = requests.post('https://httpbin.org/post', data = {'key':'value'}) - -Nice, right? What about the other HTTP request types: PUT, DELETE, HEAD and -OPTIONS? These are all just as simple:: - - >>> r = requests.put('https://httpbin.org/put', data = {'key':'value'}) - >>> r = requests.delete('https://httpbin.org/delete') - >>> r = requests.head('https://httpbin.org/get') - >>> r = requests.options('https://httpbin.org/get') - -That's all well and good, but it's also only the start of what Requests can -do. - - -Passing Parameters In URLs --------------------------- - -You often want to send some sort of data in the URL's query string. If -you were constructing the URL by hand, this data would be given as key/value -pairs in the URL after a question mark, e.g. ``httpbin.org/get?key=val``. -Requests allows you to provide these arguments as a dictionary of strings, -using the ``params`` keyword argument. As an example, if you wanted to pass -``key1=value1`` and ``key2=value2`` to ``httpbin.org/get``, you would use the -following code:: - - >>> payload = {'key1': 'value1', 'key2': 'value2'} - >>> r = requests.get('https://httpbin.org/get', params=payload) - -You can see that the URL has been correctly encoded by printing the URL:: - - >>> print(r.url) - https://httpbin.org/get?key2=value2&key1=value1 - -Note that any dictionary key whose value is ``None`` will not be added to the -URL's query string. - -You can also pass a list of items as a value:: - - >>> payload = {'key1': 'value1', 'key2': ['value2', 'value3']} - - >>> r = requests.get('https://httpbin.org/get', params=payload) - >>> print(r.url) - https://httpbin.org/get?key1=value1&key2=value2&key2=value3 - -Response Content ----------------- - -We can read the content of the server's response. Consider the GitHub timeline -again:: - - >>> import requests - - >>> r = requests.get('https://api.github.com/events') - >>> r.text - '[{"repository":{"open_issues":0,"url":"https://github.com/... - -Requests will automatically decode content from the server. Most unicode -charsets are seamlessly decoded. - -When you make a request, Requests makes educated guesses about the encoding of -the response based on the HTTP headers. The text encoding guessed by Requests -is used when you access ``r.text``. You can find out what encoding Requests is -using, and change it, using the ``r.encoding`` property:: - - >>> r.encoding - 'utf-8' - >>> r.encoding = 'ISO-8859-1' - -If you change the encoding, Requests will use the new value of ``r.encoding`` -whenever you call ``r.text``. You might want to do this in any situation where -you can apply special logic to work out what the encoding of the content will -be. For example, HTML and XML have the ability to specify their encoding in -their body. In situations like this, you should use ``r.content`` to find the -encoding, and then set ``r.encoding``. This will let you use ``r.text`` with -the correct encoding. - -Requests will also use custom encodings in the event that you need them. If -you have created your own encoding and registered it with the ``codecs`` -module, you can simply use the codec name as the value of ``r.encoding`` and -Requests will handle the decoding for you. - -Binary Response Content ------------------------ - -You can also access the response body as bytes, for non-text requests:: - - >>> r.content - b'[{"repository":{"open_issues":0,"url":"https://github.com/... - -The ``gzip`` and ``deflate`` transfer-encodings are automatically decoded for you. - -The ``br`` transfer-encoding is automatically decoded for you if a Brotli library -like `brotli `_ or `brotlicffi `_ is installed. - -For example, to create an image from binary data returned by a request, you can -use the following code:: - - >>> from PIL import Image - >>> from io import BytesIO - - >>> i = Image.open(BytesIO(r.content)) - - -JSON Response Content ---------------------- - -There's also a builtin JSON decoder, in case you're dealing with JSON data:: - - >>> import requests - - >>> r = requests.get('https://api.github.com/events') - >>> r.json() - [{'repository': {'open_issues': 0, 'url': 'https://github.com/... - -In case the JSON decoding fails, ``r.json()`` raises an exception. For example, if -the response gets a 204 (No Content), or if the response contains invalid JSON, -attempting ``r.json()`` raises ``simplejson.JSONDecodeError`` if simplejson is -installed or raises ``ValueError: No JSON object could be decoded`` on Python 2 or -``json.JSONDecodeError`` on Python 3. - -It should be noted that the success of the call to ``r.json()`` does **not** -indicate the success of the response. Some servers may return a JSON object in a -failed response (e.g. error details with HTTP 500). Such JSON will be decoded -and returned. To check that a request is successful, use -``r.raise_for_status()`` or check ``r.status_code`` is what you expect. - - -Raw Response Content --------------------- - -In the rare case that you'd like to get the raw socket response from the -server, you can access ``r.raw``. If you want to do this, make sure you set -``stream=True`` in your initial request. Once you do, you can do this:: - - >>> r = requests.get('https://api.github.com/events', stream=True) - - >>> r.raw - - - >>> r.raw.read(10) - '\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03' - -In general, however, you should use a pattern like this to save what is being -streamed to a file:: - - with open(filename, 'wb') as fd: - for chunk in r.iter_content(chunk_size=128): - fd.write(chunk) - -Using ``Response.iter_content`` will handle a lot of what you would otherwise -have to handle when using ``Response.raw`` directly. When streaming a -download, the above is the preferred and recommended way to retrieve the -content. Note that ``chunk_size`` can be freely adjusted to a number that -may better fit your use cases. - -.. note:: - - An important note about using ``Response.iter_content`` versus ``Response.raw``. - ``Response.iter_content`` will automatically decode the ``gzip`` and ``deflate`` - transfer-encodings. ``Response.raw`` is a raw stream of bytes -- it does not - transform the response content. If you really need access to the bytes as they - were returned, use ``Response.raw``. - - -Custom Headers --------------- - -If you'd like to add HTTP headers to a request, simply pass in a ``dict`` to the -``headers`` parameter. - -For example, we didn't specify our user-agent in the previous example:: - - >>> url = 'https://api.github.com/some/endpoint' - >>> headers = {'user-agent': 'my-app/0.0.1'} - - >>> r = requests.get(url, headers=headers) - -Note: Custom headers are given less precedence than more specific sources of information. For instance: - -* Authorization headers set with `headers=` will be overridden if credentials - are specified in ``.netrc``, which in turn will be overridden by the ``auth=`` - parameter. Requests will search for the netrc file at `~/.netrc`, `~/_netrc`, - or at the path specified by the `NETRC` environment variable. -* Authorization headers will be removed if you get redirected off-host. -* Proxy-Authorization headers will be overridden by proxy credentials provided in the URL. -* Content-Length headers will be overridden when we can determine the length of the content. - -Furthermore, Requests does not change its behavior at all based on which custom headers are specified. The headers are simply passed on into the final request. - -Note: All header values must be a ``string``, bytestring, or unicode. While permitted, it's advised to avoid passing unicode header values. - -More complicated POST requests ------------------------------- - -Typically, you want to send some form-encoded data — much like an HTML form. -To do this, simply pass a dictionary to the ``data`` argument. Your -dictionary of data will automatically be form-encoded when the request is made:: - - >>> payload = {'key1': 'value1', 'key2': 'value2'} - - >>> r = requests.post("https://httpbin.org/post", data=payload) - >>> print(r.text) - { - ... - "form": { - "key2": "value2", - "key1": "value1" - }, - ... - } - -The ``data`` argument can also have multiple values for each key. This can be -done by making ``data`` either a list of tuples or a dictionary with lists -as values. This is particularly useful when the form has multiple elements that -use the same key:: - - >>> payload_tuples = [('key1', 'value1'), ('key1', 'value2')] - >>> r1 = requests.post('https://httpbin.org/post', data=payload_tuples) - >>> payload_dict = {'key1': ['value1', 'value2']} - >>> r2 = requests.post('https://httpbin.org/post', data=payload_dict) - >>> print(r1.text) - { - ... - "form": { - "key1": [ - "value1", - "value2" - ] - }, - ... - } - >>> r1.text == r2.text - True - -There are times that you may want to send data that is not form-encoded. If -you pass in a ``string`` instead of a ``dict``, that data will be posted directly. - -For example, the GitHub API v3 accepts JSON-Encoded POST/PATCH data:: - - >>> import json - - >>> url = 'https://api.github.com/some/endpoint' - >>> payload = {'some': 'data'} - - >>> r = requests.post(url, data=json.dumps(payload)) - -Instead of encoding the ``dict`` yourself, you can also pass it directly using -the ``json`` parameter (added in version 2.4.2) and it will be encoded automatically:: - - >>> url = 'https://api.github.com/some/endpoint' - >>> payload = {'some': 'data'} - - >>> r = requests.post(url, json=payload) - -Note, the ``json`` parameter is ignored if either ``data`` or ``files`` is passed. - -Using the ``json`` parameter in the request will change the ``Content-Type`` in the header to ``application/json``. - -POST a Multipart-Encoded File ------------------------------ - -Requests makes it simple to upload Multipart-encoded files:: - - >>> url = 'https://httpbin.org/post' - >>> files = {'file': open('report.xls', 'rb')} - - >>> r = requests.post(url, files=files) - >>> r.text - { - ... - "files": { - "file": "" - }, - ... - } - -You can set the filename, content_type and headers explicitly:: - - >>> url = 'https://httpbin.org/post' - >>> files = {'file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel', {'Expires': '0'})} - - >>> r = requests.post(url, files=files) - >>> r.text - { - ... - "files": { - "file": "" - }, - ... - } - -If you want, you can send strings to be received as files:: - - >>> url = 'https://httpbin.org/post' - >>> files = {'file': ('report.csv', 'some,data,to,send\nanother,row,to,send\n')} - - >>> r = requests.post(url, files=files) - >>> r.text - { - ... - "files": { - "file": "some,data,to,send\\nanother,row,to,send\\n" - }, - ... - } - -In the event you are posting a very large file as a ``multipart/form-data`` -request, you may want to stream the request. By default, ``requests`` does not -support this, but there is a separate package which does - -``requests-toolbelt``. You should read `the toolbelt's documentation -`_ for more details about how to use it. - -For sending multiple files in one request refer to the :ref:`advanced ` -section. - -.. warning:: It is strongly recommended that you open files in :ref:`binary - mode `. This is because Requests may attempt to provide - the ``Content-Length`` header for you, and if it does this value - will be set to the number of *bytes* in the file. Errors may occur - if you open the file in *text mode*. - - -Response Status Codes ---------------------- - -We can check the response status code:: - - >>> r = requests.get('https://httpbin.org/get') - >>> r.status_code - 200 - -Requests also comes with a built-in status code lookup object for easy -reference:: - - >>> r.status_code == requests.codes.ok - True - -If we made a bad request (a 4XX client error or 5XX server error response), we -can raise it with -:meth:`Response.raise_for_status() `:: - - >>> bad_r = requests.get('https://httpbin.org/status/404') - >>> bad_r.status_code - 404 - - >>> bad_r.raise_for_status() - Traceback (most recent call last): - File "requests/models.py", line 832, in raise_for_status - raise http_error - requests.exceptions.HTTPError: 404 Client Error - -But, since our ``status_code`` for ``r`` was ``200``, when we call -``raise_for_status()`` we get:: - - >>> r.raise_for_status() - None - -All is well. - - -Response Headers ----------------- - -We can view the server's response headers using a Python dictionary:: - - >>> r.headers - { - 'content-encoding': 'gzip', - 'transfer-encoding': 'chunked', - 'connection': 'close', - 'server': 'nginx/1.0.4', - 'x-runtime': '148ms', - 'etag': '"e1ca502697e5c9317743dc078f67693f"', - 'content-type': 'application/json' - } - -The dictionary is special, though: it's made just for HTTP headers. According to -`RFC 7230 `_, HTTP Header names -are case-insensitive. - -So, we can access the headers using any capitalization we want:: - - >>> r.headers['Content-Type'] - 'application/json' - - >>> r.headers.get('content-type') - 'application/json' - -It is also special in that the server could have sent the same header multiple -times with different values, but requests combines them so they can be -represented in the dictionary within a single mapping, as per -`RFC 7230 `_: - - A recipient MAY combine multiple header fields with the same field name - into one "field-name: field-value" pair, without changing the semantics - of the message, by appending each subsequent field value to the combined - field value in order, separated by a comma. - -Cookies -------- - -If a response contains some Cookies, you can quickly access them:: - - >>> url = 'http://example.com/some/cookie/setting/url' - >>> r = requests.get(url) - - >>> r.cookies['example_cookie_name'] - 'example_cookie_value' - -To send your own cookies to the server, you can use the ``cookies`` -parameter:: - - >>> url = 'https://httpbin.org/cookies' - >>> cookies = dict(cookies_are='working') - - >>> r = requests.get(url, cookies=cookies) - >>> r.text - '{"cookies": {"cookies_are": "working"}}' - -Cookies are returned in a :class:`~requests.cookies.RequestsCookieJar`, -which acts like a ``dict`` but also offers a more complete interface, -suitable for use over multiple domains or paths. Cookie jars can -also be passed in to requests:: - - >>> jar = requests.cookies.RequestsCookieJar() - >>> jar.set('tasty_cookie', 'yum', domain='httpbin.org', path='/cookies') - >>> jar.set('gross_cookie', 'blech', domain='httpbin.org', path='/elsewhere') - >>> url = 'https://httpbin.org/cookies' - >>> r = requests.get(url, cookies=jar) - >>> r.text - '{"cookies": {"tasty_cookie": "yum"}}' - - -Redirection and History ------------------------ - -By default Requests will perform location redirection for all verbs except -HEAD. - -We can use the ``history`` property of the Response object to track redirection. - -The :attr:`Response.history ` list contains the -:class:`Response ` objects that were created in order to -complete the request. The list is sorted from the oldest to the most recent -response. - -For example, GitHub redirects all HTTP requests to HTTPS:: - - >>> r = requests.get('http://github.com/') - - >>> r.url - 'https://github.com/' - - >>> r.status_code - 200 - - >>> r.history - [] - - -If you're using GET, OPTIONS, POST, PUT, PATCH or DELETE, you can disable -redirection handling with the ``allow_redirects`` parameter:: - - >>> r = requests.get('http://github.com/', allow_redirects=False) - - >>> r.status_code - 301 - - >>> r.history - [] - -If you're using HEAD, you can enable redirection as well:: - - >>> r = requests.head('http://github.com/', allow_redirects=True) - - >>> r.url - 'https://github.com/' - - >>> r.history - [] - - -Timeouts --------- - -You can tell Requests to stop waiting for a response after a given number of -seconds with the ``timeout`` parameter. Nearly all production code should use -this parameter in nearly all requests. Failure to do so can cause your program -to hang indefinitely:: - - >>> requests.get('https://github.com/', timeout=0.001) - Traceback (most recent call last): - File "", line 1, in - requests.exceptions.Timeout: HTTPConnectionPool(host='github.com', port=80): Request timed out. (timeout=0.001) - - -.. admonition:: Note - - ``timeout`` is not a time limit on the entire response download; - rather, an exception is raised if the server has not issued a - response for ``timeout`` seconds (more precisely, if no bytes have been - received on the underlying socket for ``timeout`` seconds). If no timeout is specified explicitly, requests do - not time out. - - -Errors and Exceptions ---------------------- - -In the event of a network problem (e.g. DNS failure, refused connection, etc), -Requests will raise a :exc:`~requests.exceptions.ConnectionError` exception. - -:meth:`Response.raise_for_status() ` will -raise an :exc:`~requests.exceptions.HTTPError` if the HTTP request -returned an unsuccessful status code. - -If a request times out, a :exc:`~requests.exceptions.Timeout` exception is -raised. - -If a request exceeds the configured number of maximum redirections, a -:exc:`~requests.exceptions.TooManyRedirects` exception is raised. - -All exceptions that Requests explicitly raises inherit from -:exc:`requests.exceptions.RequestException`. - ------------------------ - -Ready for more? Check out the :ref:`advanced ` section. - - -If you're on the job market, consider taking `this programming quiz `_. A substantial donation will be made to this project, if you find a job through this platform. diff --git a/vendor/requests/ext/LICENSE b/vendor/requests/ext/LICENSE deleted file mode 100644 index c38aa5c3..00000000 --- a/vendor/requests/ext/LICENSE +++ /dev/null @@ -1 +0,0 @@ -Copyright 2019 Kenneth Reitz. All rights reserved. diff --git a/vendor/requests/ext/flower-of-life.jpg b/vendor/requests/ext/flower-of-life.jpg deleted file mode 100644 index f92cc3b1..00000000 Binary files a/vendor/requests/ext/flower-of-life.jpg and /dev/null differ diff --git a/vendor/requests/ext/kr-compressed.png b/vendor/requests/ext/kr-compressed.png deleted file mode 100644 index 53210649..00000000 Binary files a/vendor/requests/ext/kr-compressed.png and /dev/null differ diff --git a/vendor/requests/ext/kr.png b/vendor/requests/ext/kr.png deleted file mode 100644 index b18d76bf..00000000 Binary files a/vendor/requests/ext/kr.png and /dev/null differ diff --git a/vendor/requests/ext/psf-compressed.png b/vendor/requests/ext/psf-compressed.png deleted file mode 100644 index 3bc0d5c1..00000000 Binary files a/vendor/requests/ext/psf-compressed.png and /dev/null differ diff --git a/vendor/requests/ext/psf.png b/vendor/requests/ext/psf.png deleted file mode 100644 index c5815e26..00000000 Binary files a/vendor/requests/ext/psf.png and /dev/null differ diff --git a/vendor/requests/ext/requests-logo-compressed.png b/vendor/requests/ext/requests-logo-compressed.png deleted file mode 100644 index cb4bc642..00000000 Binary files a/vendor/requests/ext/requests-logo-compressed.png and /dev/null differ diff --git a/vendor/requests/ext/requests-logo.ai b/vendor/requests/ext/requests-logo.ai deleted file mode 100644 index 7da8cb73..00000000 --- a/vendor/requests/ext/requests-logo.ai +++ /dev/null @@ -1,8722 +0,0 @@ -%PDF-1.5 %âãÏÓ -1 0 obj <>/OCGs[7 0 R]>>/Pages 3 0 R/Type/Catalog>> endobj 2 0 obj <>stream - - - - - application/pdf - - - requests - - - Adobe Illustrator CC 2017 (Macintosh) - 2016-11-23T19:55:35-05:00 - 2016-11-23T19:55:35-05:00 - 2016-11-23T19:55:35-05:00 - - - - 200 - 256 - JPEG - /9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgBAADIAwER AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE 1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp 0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo +DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXYq7FXYq7FXYq7 FULqOq6XplubnUryCytx1muJEiT/AIJyoxVA6d5z8n6ncLbabrun3tw5ISG3u4JXYgVICo7E7Yqn GKvGra91L82vNmvWKX1xZeQfL1wdP4WUrQtqN2n9/wCtLGySeitfgCNQ9Wr0xVLvO35D6domh/pX yQ8+la/ZUeG/s29B6VHP1vRC8k41rUN8ty2FWRfkH+aWqedNI1PTNfEf+JfLlx9U1GaEBY5xVlSZ VFACTGwYAU7ilaAK9K1HVdL0yD6xqV5BY29aetcyJElfDk5UYqlFl+Y35e30ohsvNGk3Up2EcN9b SNU9NlcnFWQRyJIiyRsHRwGR1NQQdwQRireKuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KqN5eWllay3 d5PHbWsCl5p5WCRoo6szNQAYq8Z0v/nIbUr7VLiwg0SzuEQ/6JetqBsobpAQpe3+sQfvVDdeJ5f5 OxwsqTm6P5z6+JV1Gez8maFGpae5sm+tX7qR0jd1ZFA8eCsT0xYqVt+Snky7ne+n0P8ASt7JT1dX 8w3VxdXchA2JhqVUb9Kof8kYFeffnT+Wfka1/K867oXl+20vU4bmVbqa1DKyfVknDceR+EetCp6V pthV9HaPIkukWMiMHR7eJlcGoYFAQQR1rgV5N/zjlawaHpeueVLicfpnTdQuXvrZjR6tPIiyhT8X B40RgfAg98VTL83/ADrYWtsdGkna1tAVOpXrExRsWVjFapLsebFebFfsqviaYQFYV/zi3Z6jqvmH zr55ki9DTdYuRFZrQL6jBmlditeoVkqe5Y++JVlH/OR3luy17TvKlndMoR9bRGjbq8ZtpnkUfMR0 6daYhV91+S/5eSxaiYdJsrG+024ZlmUpHD6ckaSBmR0lSFQjcSEUAla9DgVjunflTNp2oR/oG8m8 t6mEjdL3TJpY7e4XYoJola+t2XvvHHUdqYVRGqfmD+c3kOHl5iuPLvmO0gjE0ssczWd9KC1OCQxh q9PtLDTuehxVP/yc/O/UvzH1W/tm8uLpNnYQpJJdG+W4YvIfgURGGFirLVuYqu3uMCvV8VdirsVd irsVdirsVdirsVS3XPM3l7Qbb6zrWpW2nQ0JVriVYy1OvEMasfYYqxSf80m1T1bfyTpNxrdwgUm+ uEey05Fb9ozyqGenhGp+YG+GksL1Xy//AIkuG1HzLdt5uvYXCRaNYTNbaNZuBVvVnDGNKdGLM8h2 A8MVtIfNd3pmt3K6XDpn+K9YtITb6TpemK0GkaczcQ0Ubo0EhcKAXd+gAARRUqqEg0n8tfN456ZZ eaX/AEkJkS8i0uSYabZO5CJFPeyyGNpOTBfSiid67e+KeJB+ZQ35c+cLC88ueYbrXL+zKT69I0rN HHGrCKVZV5yH0ZJJEUcunRSzfZVu3uiXHlzz/wCTNTsLH4TrCtIzR1lhZ2UfHFIQUHLhup4sGryU McDFMfyd1eTUPIGn21yU/SOjc9H1CKPlRJrBvRAPIA/FGqP8mxVhv/ORPlCeSHSPO+nEQT6FME1d oZXtJp7GeRFKm4io9I2HTsGJxV5z+a3l21svq/6W0nU5frpC2wvtSS5ihkmKwxTFrcvIIyxAblWv zwpt9KeTPL6eXvKej6IEhR9Ps4beb6uvGJpUjAldQQD8b1ap3Nd98CGHecuev/mXoOlWqerH5XST Wb7meEP1mVDHZRu1GIoBI7EDb4fHFXjP5/efLW8sR5d0O8l1FnuHutQurQSelcyIoE0h4GjxR7Rx gMyrwblX4ThVvT/ydh1HRIfMnkvWri70lwEki0W6ljlgYKBKRDKjS1JrWOpcVqobZcVYp5e8m+Xv L2tve+btNuPM+gXkhSPXbT1ZpI5F+3FPa8gRL3cPVl8K4q9ssfI2i6rYW195F1aDWbOxAjjtJp/q 17aFWLCOK9t0E8LrUjhMhbtyUVxVX0vzV+ZujXh02KKfV5Y1aZ9I1uGSK7WJNj6GpWaT20yljtzD sO7YqyL/AJXh5Y0+3jk83WV/5UkcCh1C3Z4HcAFlhuLf1kcCvU0+VdsCsp8p+ePKfm60mu/LmpRa jBAwSZowwKMwqAyuFYVHtiqeYq7FXYq7FUBrfmDQ9CsmvdZv7fTrRa/vrmRYlJALUXkRyag6DfFW Cw/m7d+Zjcxfl1osmtx20noza7euLHS43pUlXes83D9oRx/TuMKvPrTyPaR3V63lvWJNb873kpk1 J7K1hudIt3MnP03luixRU6U+sM5p9jtim1P81fMX5veTNEtbXWLnSLzRtRAtpzDp6GFWkYlkEck3 7xkVagekFaviMVCS2Gv6DfQW7+cfPEsVghUrYx8bT01TtHa6fG6x8T4n54U0mn+NvyuErWHlez1n WtGgEcK6Vptq0NnPO5NDczfu7icu3SJqKaH4TgRSbahp/wCaeuQW31yBPKOgiiQaXYfV31ARyAlv SjqkVuvE0Y7yrUijV44qhTf+RtA0mXR7WGDWrycr62k6cfrrzioKtfz1EsoVj8f1loox/vs4oY1b /lE2qtrE9hO9p5pnie7j0qxuDHFayM6lVuZFWOJBRmJBCV/3WnFasptkHkT80NW03zDfTXtibm+i iiXzrZ2PF5HkjTjHqUEQYAtx4rOqfaBUgVWhVem+e9d8sea/yd8zX2l3sWpaa+l3UglgKtxaOEyL yVqFWUqCVYBvauBDC/zfaxgtvIVzdAT3NzdWOnm9joYfUjvrOZlY8thSCWm3jirN/N35q6VYCbTd Ant9Q1oBleZ5ANPsiNi97cj4E4n/AHWDzY7Ad8VeHaq8nmyy1Xyl5auJbzT0nF75780LGst5dSmi n0bZWDtbxMeXEbj7IAoSSlD/AJXaB5S8v30nlbzbCNI12+dZ/LPmwlvqtzHTgLc8m9MFqnknIV5U BVgjFVmup+S9W8p6tJqum3beVtWuCA+qxMZtEviWqFuw4PoSGvwmYfaJrK5OBCM8weYLy39G887e XLzTrh1J/wAZ+Vy0iCMEEG4Xi4KenQ0cyr144pQ8/mL8n9a0+HU9ctL+/wBQIKReZbLTLrTbh42b jGv1i0KcqbL9rifDemFDCPMmped7Pzla6L5X82a7pGlXPEfWvNUv1e2qyl0SKdxzXkNlUxg++KvS NU/J630fW5dcudLm/MLTrgq0sGo3Bl1G1PHjJJbiQpb3Csu/AhWrShOBWP8AlvRPNXli6vH/ACr8 wm+0iGX1bnyZq8chltS1TJFNG4W8g3+w6KeR+1UCpVZpbf8AOQWm6cY4PPWg6h5WuG63JT6/YE1p Rbm1DmviGQUxVnXl3z95J8yAfoHXbHUZCOXowTxtKB/lRV9RfpGKp9irx7zT5V/5yGe5k+oearW9 0lmBa2toY9OuyvXikhSfjQ9/VFcKRTFYfJej2spu/PXkPzJrmrDZ7jlHrKyqGqp5xzErX+UAU74q rp+aX5d3pmtZNF8yalbpMIYdDlit7eygNSy231WCWCJlTj9mVXbxxpaTDWfOH5t6hpSL5d8tN5Q8 uJSJLiWFpr0qx4qsNnAvqJWv7MZp15jFCl5c/LTzFLqaeZtWc2k6BSNY8yOLy+ioeRaC39Q28Pxb KzFeI/3XXcqu8xeZ/wAuS02h6Zpt5521i5jaO41FZyLoNzBSM3p4/VoWYtXjwj7KrVIxW2MaD5B8 6Nos+gwahdT6h6rj9FaaY1sbTlRib68lWjOP99qPUNOijfFNsP1m+0W416Xy95kOupc2TJHeoZvr c83pgCRUjglNvapRBWiTN71xW3p+nav+Tsdlb6Zo3ljUxcao6+hZacl1EJpoNuJmZoAKdJHNOO/I gVxWkZd6J+Z2u6OLGDT7L8uvJ6HnNY20ynU7gJQlOcfowr6vGm7qd/iJGKGGa3pv5eTrHoMtqj6n Kwa28v6OEn1GSZvhLXN4VoZOLFiAVQUJZ5Dtilhev+XL3TvL91qGtwIbWGRreKR2ktLyaadZCIpp QiPcei5qZGj4Ow4q1K4q9C/5yH1O3ufJH5btaRC6s7+4ivEglqFlJjiISQAfDz9Yg+GKGOaf5Rvd Q1f6951uZdJ0JTJDJpOkeog06dCUCehRqGlSxUEsu68gdlL0y38o6VJ+iG8uhNO1WyiWLR9f0WIt YXpjXb1nh5rFIwDCSO4qrdORrXAhdeXejRXPoecrK68qNqM4Goho0udHvpf5oyfV+rTNTlyWgB6s 53wqlGq32m+XoLn/AAj+aelQWLB1k0DWJku4FBH2UDtPOgIO4oa1+jAm0PofnP8ANfzLYp+itWew 0W0ol9c6ToJVo4WBCS2i3Lyi4iqP91KHC/EFPQFCh5I0pPImpXtz5rgu/Nmgay8jReYrFmn09I5D Sf61pqH00oQzSOA5XeoGKs+1HyJaapo9fLdzHrvlyeMcNFuZ/wC7jJ5BbS5dZfg/lguEdP5SmBWJ aZN5w8myGDyxqMkiWx5f4A1l1SXgx40tZH9SQIpJKiCSVCf2u2FVTWPzo/LXVLqO1/MDyxqXlrXb VS0N7NDKHhANC1ve2nG5C17qgGBU8s/zw/IKbTrfTrrXzeMi8Od5BfTTOT1DTPDyc/F44VSa6038 k/NF19Y0LydqvmOQMOM9hZS2sQk7f6XeGzCU9pKd8Cpv5C/LL81LHXpL+581X2h+WlkRrPyw9yus TLEoAaOS6ukZIwSDtGGoDs1RXFXs2KvOvPf5a+d/NHrwQefLrSdOmdiLK2tIxSNukbSxyQyuKdeT b4VUtB/L/wDMny/pFtpGmea7D6larGsZOkpE1E6qQkxB5j7TH4j41xVIPON5+Z+mzLpp87wSavcr /oOj6RpcUt9Pt1KSyMIUB6ys6qB92KULZ/lZrF1b295+ZXnR4iij17G3uPq9K/ZSW6d/j2pWi/6r dy2m0Cutflb5bsX0ltYs/Qt3b0fL3lt3kllBHHhPfuzTykqPi4NH4NUUxQmpvvPXmLRVg023j8ke T7ZT9ZS1JW7+rryLn6z6YjgFBXjCryV6mhrihEaafKPk7QAqzJ5Z0GaNopdVu1+r6tqQVTX6tCQs sYq/Lmw5k7hRUPirz2LSr380teS68l6ePJ/kvRo/qU/mSUGG6uIlblKAw4s1eRJV2P8AM5BNMUtf mJ+V3lzR7FdZstYNjoYRbUHVJriae4R2LSTheUJRDxoihW505KN1xW1LSte88WulxXXlLyrBpf16 3ddJttKsD9dkgjKqLy5DyPIIXZiFdt3I+nFWKapPrvmG41CEaZPeSadJLBd61qpDyfW/SAEVrDAO Mb8kkkaOPlUbyEKrHFWafm79W1H8mvy31CIQxiOa1RIXqgJZAkn7wcuC1SpPE0/AqEFf+YvzR0fz ZJp1rosnmO7towp03VreK6l+o3ZHpBbu2k/0qHkp4s9aEbgYq61g16P81bQ6sLHyVNqkCNbX+mve pb3FC3MxfEsaSofgkimQDahUVqVWc+dtE/MLWPLn+GV803A1+SMNdabdx2iWmpwKB6psLuG2hkHw /GyHky9KUoxCrPI8/lTX0uItKt+PnTToBZavoGvyCdbkAFT/AKWIyWk9MFVmjFSv21YUOFVnl3Rv Mvkd3tvJxlntLNS975RvpQLmEOKerE1WjdS1PjqY23pJFuuBU3038xfJ+p6hO6aoPJnmtCGvtL1d fqtteAqP7+GRvRYt0WSKUvTerL8OKtf8q3tL6A635bux5J1AyGSOG3uIb3R7o/C/rW6RvGY1c0HO P0m8VPdVrWU/O620xrfVfKuiedraKk0YiuuG671EV5ExLCm1HJ8MVUNN/M3zl5pQ2U/kiC51KDe4 0z6zaC6gqf2oby4sp06fa4Yqmuo+XfMOraOLR/y501bjl6izXmoRwSRyLQJJFLbJdzLIB0ZZAR44 qmP5Y+VPze0e6d/NnmS2u9IBf6to8aveSxq1eCnUJxFM3D/LV6+OKvScVdirsVdirBdT/I78rtT1 KfUrvRf9NuZGlnmhuruDm7faYiGaNd6+GKbSrzT5A0Ty/pyv5U/L238z6hLVVhup4BDDxX4HkN9I 1RU7BB9I2xW0i/L/AMu/mDo9y+r3vki3bzVeRmKW/l1G2gsbaDn8FtawW6XLQxIlPhVSWIJLb4UK n5o+afNmgaYs/mLzjY+WnuKpZ6Po1t9avLqu1FuLuSLhQkfvPTRV7ntgViPk63/J6yjOt+ZtSk81 +bbtxcx2sEl1qTRMRVYI5FCxzSfzs9ATsAB1Kp75z/MHVIZbKOz8vX0FvPPFaaJo+pmDTLdpwQFK QMBLKE2b94hiSlaq1DilfYeVfLa3UfmP8yvNGk6lq0fP6nAbhX023kJ6qJmi5gUoefxGnxMRQKoY ov5gf4n8wL5T0PVYdM0i/dm8webr+4ht7zUQjelwt1Ur6EZ3SGNd+J+HjuxVegWMnlzT7bUtA/Lm yTWtQ0+0lgX1Cq6VYtQrKZZAvxSzOSXCBmkpSqqtVCsC8921/F+RH5bWmm2gu7m5uLK2ioocNLKp lVVjqORd49h0PTCqtqs1nqHlyXzFoWsNoM+iyE6fDfP6Mlnwm4yWUsjheUYehWGTi4p8IeiEKqul +b/Jfn+1a01WKFY7mSJ/MWmSF5Yo5+QX69ZzWwZopWP7aD05Oj8DuyqcXmm+cfKFtdaTr0E/mnyF bqlxpuqQpzurRVoyyc4W9eMwAV9VAx70NTxVRGmp5J862a3c/mCBdYh3tNZiKwytCpV1+ssh9BpF dQdwAdi0fIUCqvrXmH8yvK2v6dPr9jpvmTS2qNL1xHFl6VxICnou7h0jeUNxViyxt/rbYFTHzXqP lzzLpJsfOf5da2oZSGkjsY7xoWIoWhmspJpR9rqFHuMVeUWtz+RGkaxDoWutqFjblP8AQddthqel sDupivbMiJUmWtDLDHwetSFxV67o35KeSLm1i1PQvMOrPZ3aK0N1BfpcxugNVKvLHNWh71riqOb8 gfy9utRh1LXF1DzBqFsALe41S/uZilN/hVXjQCu9ONK4q9HAAAAFANgB0pirsVdirsVdirsVdirs VdiqlcWdpcU+sQRzUqB6iq1AevUHFVXFXkP5leX/AC15o82HTNM8r2nmPzYkMaX2pak8v1DTIPtR +sEb4pH51WGKjGvJiBuVUv0nTPyN/KiJrLVZrO+8w3j89QnW0SWQOxrxEMKyJaQrX4UqNv5jviqc 6v5p/KnzXZx2ljYX2uFJFmEOladdAtwNOMkrRRQhK9Q7gVp3AxVLfKfk3zNovmlW8r6Nd+VvI91F dS63pOpXNrcLJO8NIZLWGGW6eF+dOdZAOI6bAYqwzzDfzW+lfkTo8S/6PNqFhI42ZQ1vc2wSm/UK zivhXCqRedPKsl5541zXPPOl3dvf3NyY7QRWLXFn9UibjD6LxxTpI7xqObH4tzsMVbTQfLl2aaf5 Lv5Z1AIay0i4gkQMOQcSrHFwPcHlXwwKq6PdedtLvbjT7DW73lFEWk0LzAlzBcKrGiUlT6vchOtJ UdhXxxVM/wAn/wAs7rW7qfVI49Ih05boW2taI0dwt5bsKNI8F3G6yr6sTBk4yek1ela4Vetaj+S2 mS289tpfmHW9KtrlSk1ol2LyB0YAMjLfJdMVIFOPKmBUw/Ln8t5vJFvJZx+ZNT1jTigS3sdQeKSO Dj09ErGroKbBA3H2xVmUsUU0ZjlRZI2+0jAMDTfocVbRERFRFCooAVQKAAdABireKuxV2KuxV2Ku xV2KuxV2KuxV2KrZPU9NvTp6lDw5V48qbVp2xV4f+RVvez+TJL+4jSXzHq9zf3GoXMh/eXMhukLK 1ePHlHE0Y6AcTil35PeZB5Z8z+YPJ/m62Gl69qGpT31jfzLxW+ilYlQsp5BggHwfGRvx2I3KHpVr cCP8ydQtpBVrrSLSW3bsFtrm4WRf+CuFOBU31S/RIpbaFHuLlkKmKKlUDD7TsxVVG9dzU9gcVfO8 sUa+V/yXjS2a7u4brTpbW5UcQEWQuy8zt8aoBQ+2FL6M0/VrK+5LC5WeP++tpAUlQ/5SNv8AT0PY 4EJJ5U/e6/5tuUasL6lFEvSnKCwt0k/4YU+jFWDf85CXNhCfKgt1V/Mb6iy2aIFMn1NreQXJeu/o hvTL+4FMVedflrqNzo//ADkbZ6fYSNHZa9YzR6jbGvps0MMlyjqK05B0602q3icVfU2KuxV2KuxV 2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV4t5t8ueZ/IHmG582eXvrGp+WbuSS61LSI0EkllcSOXa4gV aM0TepLzUA8eR23qqlnWla/5D/MXy8i8IdTsLqNZZLO5RWKcqqC61cI1QwBrWvQ4rSWah+VV5FKl x5Y80X+i3EUZgtxOsepRQwmlY4vrQMyKeI2EuKFK40f85rCxFppt1oOoxKrBmkiutPmJrUGqNdoW b9o7dfpxV5frHlf877bRPLuhx+U4p4PKrBrPUbG9t2Ex6LWOQxzKEXtShIB7YUobXvzY/NuSeM61 olp5YdTxtdQvbO/t+45BbpXKgHuOQHjXAq/TNS/NWPTTZJ5oh0+zmd53On2qNKzzSGWRxcTcnq7M fiNf1YoQWpS6N5e+ua9qt/LdarcJxk1C/la5upQD8MUYJX4eR2RAoxVk3/OP/wCVWuv5lm/MvzVC 9pdSo8eh6ZMGWWNJRRp5A1CpKMyqtO5NKccVfQmKuxV2KuxV2KuxV2KuxV2KuxV2KuxVIf0trGqS 00SOKLTwaNq10GdZKdfq0KlDIv8AxYzqv8vMYq3HoOt0Bl8y3xk7iOGwRPoVraRv+GxV11qOvaSf WvYk1HS1A9a4tUZLmEDq7wVcSr3YxkEdkPZVNjf2Ishfm4jFkYxKLrmvpGMiofnXjxI3riqXfpHU tRAGlxfV7VhX9IXKkVB/3zAeLN/rPxHhyxV5X+bem+R/KVrHcafHcweeNYZodPuNNuXsriaRnLNN c+h6cTIrPuWTfpsNwpDBpbn827G2VtM86XEsixj1lviWXmKksjn1OK7/ALSn54ra2y/5yA/NOG7W A3Gm6jI/BYYxEbgytUAhEtPTcs1dtqHt4YaSzi0/P7zRbWOk3eteVKw69O9rpZsLj1J2mSZoCjWs ioQS61X94ARtWuNIpnmg+e/JHncXWhurJqCKVv8AQNUhMF0qsDyDQyfaFOvAmnfAh5f+Z/5JX+ga bPq35e6jJpsLuBeaRKTJbormnqQtxkaKh4g1+FRvVVGKbZJ+UXlD8sdQ0mHW9Dtph5msHMN/d6oT calbXifDJHKJuap0/wB1gAjpQ4qXqIvprZf9ySrGo/4+0r6J/wBYHeP6ar/lVxQu1TV7LTbP61cM SrFUhjjHOSWR/sRxKN2Zuw/hiqVmHzhqMayNdRaFGzVFtFEt1cBd9nlc+iGO1VWNgP5mxVUfR/MM bI9r5gmd1ryjvLe2libbaogS1kG/g+Kr7PX5U1GPStWgFpfThjZyoS9vchASwjeg4yADkY23puC1 GoqnOKuxV2KuxV2KuxV2KpD5seSeKx0aN+B1i4+r3BDcXFqkbS3HEjerJH6dRuOVffFU7iijijSK JQkcahURRRVUCgAA6AYquxVKZtcedjDo0QvpQSr3HLjbREEg85AG5MCPsICfGnXFUm8u6FBZa3dW N7It3JAkd/Yx0KwwfWZJRKsMJZwoWRKg9QGoMVZfir5p8/TtqP5/amtzV00awhisvBDLFHIfpPrv T6cKejGfzZ1efT/K5jhdo3vJVhaRR0ShZv1fdXEKA9z/ACL8o6Vofk6K9toQbvVAs9xfPvNNVFJL Md6c+VPvwKXnMs1jqOrflWpHqRNruqy21xGSQwhvnddqdC8QPy8MKvUvzC8tafqLyTXC8LhLZ7zS 71RV7e/slLq69OXOM/Eh+FgmBUF+Q3m/zP5q8krd+YIP3kcnox3LAKZkMUcikpxUUKyijd/xJKl5 zrctv+XH/ORdjqUdzKmleaZDa3togJUF44lV35H7IllVhx3+E4ofSGBWEjTNTuPOV7qOmukNroyR 2dvYzcmtpZpU9e4kRQQIX4SonqIp/aqD0xVktnrUMswtbqNrK+P2beanx06mJx8Mg+W/iBiqY4ql +vaUuqaVPaV9Ocj1LScbNFcR/FDKp7FHAP8AZiq7QNSOp6LZ3zUEs0Y9dVqAsy/DKg5b/DIGXFUf irsVdirsVdirsVY/rcEn+KvLl1X90rXluR/ly2/qKfugbFU/xVhP5m2er39ra2en6hLYxqk11erC qkywRNFG6EsQKKJy5HQ0piqE06PVBDbxHzBNaK44W0ckcAik4mhWOZERARSnAxhhQ/DiqWajP5ms /OElrbRXupanHpyurwSxGNYpp2CiV3ihVKGFjSjMf2e+KpvHa3LJTWfM+q2rSCpiSEWcSMvXjM0L FgP+MmKvKPzW8uv5d8xWvnPSb5tW0e4VYdbuTL9auFZFIV34MoKmNQoNPhp8qqQlvmLSLDzV5f8A QjnBhmHqW9yg5hWKlQ3HauzEEdfpxQjfyx/MDV/KFvZ+VPOF1c29hbOkWk6zCY1sGjZi3p3MkiM6 MN6fdt9rFKjpes6XqNz5ATTJ+A0+81G8N1GwARXlvJeI5I9DwCs3JejLsa7FWZeaNV1Lzpe3flLS b2/FnbRn/EGrSeh6cIkRo2tIPq8YEk0ocr9qgHLYnFQ9O8l6FbeXPKtpp0aPFDbIzcJCGdFqSFPA CvFaAd9vHAh4BawR/mL+d02oaoy2eg+VZneeSWREZ7j1G+qxAGv7wBEEib/YINDQYSl7JTWLef1t M1TVNatQm9nJEkZqGG6XJihVvh/ZapP8wwISLyTPrmoTeYRYyzWywalK05vJ/TkXnGjASIYpviUb HcdKYqjfMaavc6JOIr+a9oPhaJUWB2XZUV3SRpHZuhjAAP7QxVk/kCLWIPLFvaazdNe6laNJBc3L nkzOjGvxGtadAcVZHiqQeQmkk8r2106lfr8t1fop7Je3UtygHtwlFMVZBirsVdirsVdirsVQGuaX +ktOe3WT0bhSstpcUqYp4mDxPTuAw3HcVHfFUPpOvxXU50+8T6nrMS8p7Fz9oDYyQMaerET0YdOj BW2xVddKreY7FWAKmyvAQdwQZbXFWM6rd6doMl5YS2p1KOZUkk0ZB6zy2rH0xOImqP3TDhKTtxAY mvVVvyVpGrWGjRa1Z8LibWFS8u7CWR24o61hjguHLsPSiKqA9VP+TirLLDVba8qnF7e6X+8tJxwl X349GXwZSVPY4qx6/h8valrTzXtnFdBFktrW3MKu9w67TOdt41/u1LUUHlXqMVeK+cdLbyNePLps az6DLSaXRkuElubD1JBGqwliDMru393+z2JXFUJbeaPLeozPpzzrHeH4JdOvEaCap6qYpgpb6K4q l13+Wvl5pjdaY1xo178RS4sJni4lhxNEqUAI2PEDFVXyn5n/ADK/KzT/AKpaRWfmDyzC5llBi9K6 RTQMx4HkxoPtEvQdqAUKXoFp5881fmwEg8vQy6F5KLNFql/6kS6ldceIkhtFLfu0+I1k60Bpv8OK s/8AI9n5c8vKPLuixxwadRpbKMDjIrKf30U3KkhkBPMF/iIJ/lwIZFcaoiySW9mn1y9jHx26Oo4V FR6jE0QH7/AHFWB+dtNfSPMOk+Y52+tR6lcrp2raRF8EFyphlaBvTPL1poilF5/a6Cm2Ksg8vSw6 veveNIkj2vFJoE3W3koGS2ANKGMfFJUfbp04UCqbaB/dXn/Mbc/8nDiqCv8AUV1uSfRNJlYpvFqm oxH4IE/biSQfanYfDRfsfaah4qyrIIYYoYUhiUJFGoSNF2CqooAPkMVXYq7FXYq7FXYq7FXYqhr/ AEvT9QjWO9t0nVDyjLirI3TkjfaVvcGuKpCfK2nxa3aqtxfFDa3PFWvrtiv7y3+y5k9QA06cqYqi bny3Y2ltLc6XaqdTVP75yXnnAWnpyzyFpH5DoXY0ND2xVLvKV6lh5a0W4BY6VcWsCymQkm1mCBWD cvsx8xxYfsN7fZVTjX7a2vFgs+Fb2fn9WuFqHgUAepMrqVZePw0oftFcVef+RvJdnLpIF/LdT3Ns 3p3UYlZmV4yU58JS4kjNGKkdPsgGlcVQXkjQLDzX+YGo67NNLcaL5TufqWh2jmscl1GDzupG25tG WYRClFrUb1JUor88fLHk7zBFDbakI4dTij9SW7PoJxtG5r+8kmBoqlWZSASKGlKk4Qh4l5XsPM9n H5QstFvjqk3mL62gs76Q+iDa8ifSm4GRBxXjQ1Fd/ksiGZ2eoGW4ubK6gksdUspDDe6fPxEsbqAT TiWV0PIFXU8WHTAxSfRZbHyl+Ylj68PqeWPMr+hfWQB4xXdKJNEQyCPmxVW8anr2UvTdd8oTXesa Z+i4ZdGvo5RKri7uZWjtyShSYeoYl9VpACI9wP2t8UPUtMFpHYgwwpaoOXrRLQBZFNJORoOVCPtd +uKsU87rdTX3l40Jmk1VBYWgqdo4JnMsgFKD4RyNPgWvc0xVkMflbRERituI7iVhJc3cBNvNLICT zkkhKMxqxO5piqXab5P0mRrj6zJd3SJcSBIprqdowGoX5IGVX5Hrz5fdirJLe3t7aBILeJIYIwFj ijUKiqOgVRQAYqqYq7FXYq7FXYq7FXYq7FXYqgZ/+O3af8w1z/ycgxVG4qxK7uR5Z1K5W6hM3lrV GecsimT6tcMC1wrRgHlFLvJtUhue1Dsqq+Vri3iht3SQzWGoKW0m4ZieNvUtDbknevBiy13/AGT9 ndViXmfVY7V7kJJ9XtEitzfzF+BkSbjDJEg6MqP6TOajqwru2Ksb/JzzXY+W/Nur+VdXuliGu389 7oN1IphiuBI9VRCwA5tyIHiV26ipSXf85P8A5eecPMdlaX3l6xfUkh9JL20hdVcxxNK3IoSpfeUU 41pvtiCgMK/LzW9M0zzP5HGpTCxi8uvrMetTXgMCWr3KSmFXMgj4VHw0I64syFXXdei88fmw/mjy 8rx+WbO2Sze7dTGL6eH1R6qKRyKgTBKnsv3BgxX8w9WubnzXpGi2ieobaRL2440qvBhx3JCr717Y WQfTPlueeb6zPeur3ay2379d0dZZ43cq1ADxX00I7ccDFFaxrWm6dfrqt9OYtGlKqsEal3uLpaCK YRoC7rQcBtuQp+yAcVRPl2G+1bVpvMmowm3iRWtdEs3oWjgJBlnft6k7KBtsEUUJqSVWT4qg9M/4 +/8AmJk/hiqNxV2KuxV2KuxV2KuxV2KuxV2KoGf/AI7dp/zDXP8AycgxVG4qxTzhPc6pcReWtOt1 uLiTjc388jFYbaJDyi9Qr8RMsiceA+0vKtBvirEo/wAvdM1OO5le9uk1K3LC+jgJsoIGVmY+nFb8 Qzs0dQzux4kMT9kYql2paFpXl/ULeTSxMusw2yyW81yWvIrj1ahkYTc/T4IHeX0ipp8XfFUoj8la Ja3Woad5ytJ7Y6JpR1LTNYtZzHNEtpNLJNLZyxlKtynGzKKAJyXFVDyj+e/nTQNCsp/O+nNrNlLG hbUtPWl3DX/lohIRH+GnxKV71rtimmQ33lr8kPzhkGoWVyLfzBLFz9e3ItrxlApWWCVSkwWlCxRq dOQxTuGEec/IPmr8sNPfULXUhr+ihT+6uD6U0BFAP3dWBjBKj4KUr0pivNEeRvKV9Hcaz5psBHqV /pss1NckqLa5YIivb2MPJgy8gyhydzx37YoZhpfkHStS0u+05bi9iSOeIm8SWeBJZLmQUaO2RooU WJmcEIgqwpXY1UJjYeR49Pgn03SAsGuXqGC/gv3a5U2qijzwTMWuERiRwNWAbYrUVCrN/KGrG905 rOeBrTUNLb6peWjgBl4D9244/CUkjoyldu3bFU9xVB6Z/wAff/MTJ/DFUbirsVdirsVdirsVdirs VdirsVQM/wDx3LT/AJhrn/k5BiqpqV/Bp9hPezn91boXYDqadFFe5OwxVjHl2e9XRLSSFA+ueYR+ kbqapeKITKCHdv5YouEca/tUH+UcVRWoWUWhs19Ef9DuozDqbsdzK391cN2qWJR/9YdlxVJ9It7L VIJNRuWaSW6jJ0qOHiZFhZQqXKhqqvqIiBfUFKbftEYqwv8AM/TvMGs/lhfC4T0vMHluL1Pq8ZJW SCLj60quCvqc4BVl2pWnHdTirznW9W1U+SLC70gejPeiABq/3aSrUHkKgb0FffFLG9Ku9RuPKWgy +teXmoW+p6mmnfUw5uYLuWGyNtySP94tZY5TtufiIxZW+g/P0uot+QmoS+bpWj1JLYxS3QT0pCZJ fQRzGporyRSfGvTc7U2xY9Us/LL/ABNqP5feU7BZlS1NtHPexFVQiG1kX044JAPtSKyO3IH/AFhW mKll2o6rp2hX1oWT6ppd1JGjq/wfVnR1l4OoBHFvTcowahYsBXbFDIrDTJbsDVrpfQ1SQs1qxFHh gO6QOBTkpA5SKf2jt0BCqX6rqDWWpabrSQGN2lOnavC1AwgYO6SVNOYikSqU6qzcdzTFWWAggEGo PQ4qg9M/4+/+YmT+GKo3FXYq7FXYq7FXYq7FXYqsuJ4LeCS4nkWKCFWkllchVVFFWZiegAGKsdjX XPMIW4+sSaRojkNBDEON5cx02eSQ726t1VVHOnVlJ4hVSk8s6XHr9nGrXZ52d3yla9u3l+GW2p+9 aUvtXxxVIPMU2v2l9NZaSz61Zww/V4tLupD6pvGjJLpccXdxHG6l/VNASN69FU6/Lp9Lg8k6XfBh 69zbQx3cp5NI88Cei0dGHP4GQqqAbdBiqdXdpc6tbT2lygt9OuEaKROs0sbgq1f2YwR82p/KcVYX 5a8kjSFudKt7i9BspK8UupFkeBl4wOqsxjZSq8P2SGVtyKAKpT580CKHTtQu7a/mae9jjtoIoTOl w1zI6Rek4MoLNMjryVl+yvTauKvIvMHmK18sa5PoN3YILAp69zpluxkfTVkb+6YlER4zyVowrVUG nhinm630HQr5INW8m3y2Gp2TB7e5gYkBg3IJMhPJd69fiGK8kw82+efMn5haVa+V9csreO40iY3e sejNQXUVvXk0UQ+I8U+3xYkVrxFDRS9e0fy3ZWkIm0+6kityDBpunaUruVWN+LuryvIlGkU/GwC0 pixRR/Lq01jUbf8ATsk8/wBVAmNv9ancoCTwWSUMnJmdeXwKoXj3qDirNA2pWZPrE3tsOjooE6j/ AClHwyf7EA+CnFWH/mHq9obvy2mnIL3Vrq/52ttGd5UtY3Z0Y9ECu6gs/wBipPiCqiPLul3l3Ky6 lqlwYo6mwsLSR7OKKJSFaJhGRMzwv8J5PSnHYVpiqM0Xyvpr/XLmKS6trsXcyi4huZwSEcheas7R yUH+/FbFUcmpanpN5Da6u63NhcusNpqiqEdZXNEiuUUBAXOySJRSfhKqaclU+xV2KuxV2KuxV2Ku xVIfMyxXt1pWiyN+6vbgz3UVaGSC0X1Svupl9IMO6kjviqe4qxXzhr8Oi6jYzsyC5ntrq3sY5GCq 88kttwBJIAAoWap6A4qlmkXF3bWnopcxWl1Kx9bUrmjluZq31WIkNJVtzKwCsfiAZaAKpZpks/lz zrqEthaX2r6VfWa3d4zRf6SlwJXWWSESejyR/gLRxr1NV6UxVmWma9f6zaG40yO0EdQObXAnKnuH SEEBh4c64qk3m6S90hI9cvdaS3lgHpNBbQxxvLC7r6ixLK07O4A5KPHpSuKsUOlSeYvMkV/fLdW9 no1rHe2iXExluHWeRkFxKpDxx0jjcpGK/wCVuaKqx78lBbxN+aWr6jbx3F/Lqc1vc6fKo4C2h5gK wox9P96Q+x2XFLxafWbyz07R/NGm20tvrWtanewSiWSedHt4xbPBCwkZ2ZfTnpyJLbVrXCqaa/az j8xvKT2ET293qlxGb6AMRT1GT1I2INPiXkGFN8VD3rywbzyNpOqaZdtKbHTbsx2l5auG+CaOOZEl in5qgHq8RIB0BrTbkEM58vW2r/o5bu21y11Vbusxu/QRkkYgAcXgeNSq8eI2OwxV2pea7zSJIIL+ yW4uZ9oYbGQyzSU+0VhZUYAV33IA74qxny9dxX+v6pr+t282m6pBL9TsnihkZIrdFBdJHRZIncsa OT1oKUpiqO1bWEtR+mLGe2uJbdllu47aQETLQLzKklonK1Qq1RQj4iwXFWQ+T9QtNR0uW+s5BLa3 NzPJDIO6tIT9/jiqaahY21/ZT2VyvOC4QxyAGhowpUEdCOoPY4qhvLl/LfaLbTTMXuUD292/HjW4 tnaCYhewMsbU9sVTLFXYq7FXYq7FXYqx7zDZj/EflvVCxVLa4uLaSv2Qt3bsFJ9zLEiD3anfFWQY qknmnyfonma2hh1KHm9sxktJhQtG57gGqmtNwRiqTJpWk6KPR1LTjBZA7alayTfV/nNGG5Q18d1/ ysVVtK0rSJ/NlzcWBD2lnaRxNJHLI4NxO/qEFuRqUjjjI3/axVN5/KHli4vPrs+mW8t7Sn1l0DS0 8OZ+L8cVbt/KXlq2uXurbTYILmQcZJ41COy1rQstCRirGNN8vWunfmBf2kqyiy1LT45LD9/KEYW8 sgniYchX0/rC8V6cD7Yq8w81+Y2/LD8yrSbQbKW8hvbI/wCKNLQFw0ME3CG7jpUiYoSzdjXcVNcW QROveQ/LXnIf4x/KbVLWS9tmae60UUSB3kUB2WJlDW08ipx+JOLe25KqTfkX5S0/zj5ifzhrbc9U tLieM6QvNI7WOKJIl9VSAxllZwR24p49FD2PyzolhqOv6zqX1ZBYW1+0VlIrScpJYYUt5mJrTjG6 uoH83LwxQyA+TfKpuZbo6VbG5nNZp/THNzQCrN1PTviq/TvKnlzTBKNO0+Kz9clpvQHp8ye7caV+ nFUs0ay0nT7/AFy3kJgWC4W4DSTOP3NxEjB6ltgZllX6MVa1DyzZa27RSWVbJkp9bupJZHJYmoig claU/afb/JIxVO9C0PTNC0qDS9MhEFnbg+nGPFmLMfpZicVRk00UMLzSsEijUvI7bBVUVJPyGKpR 5Lt7qHy3atdRmG4ummvZYGILRteTvcmNiNqp6vE4qneKuxV2KuxV2KuxVDanp1tqVhNY3Ib0Z1oW Q8XUg1V0YfZZGAZT2IriqR2/mOfSFWz80EQSKwig1YKRa3NSQrMwBWCQinJHIFfs1GKp9bXtndRG W2njniGxkjdXUEb9VJGKpPc+aYLpms/L4XVr4/CzxNW1gJ/auLgBkWnXgKuey03xVbpPlSTRdNWH S7rhds5nu3kSsM8zmsjGMEemD0HpkUFOtMVTFdXgjuhaXim1nIXg7ikMrECoik6MQTTiaN7UxVH4 qk3mrR7vUtND6dKLfWLJxc6ZcHosygjg3+RKjNG3sa02xV4X+bcl5f6npXmd7KXS50tXtdas5iGp AzI4lt5UBWVoj/eR/aAQ7d8Uh5JCNV8ialpt15fun/SN1eK9rrUZRbeeCQKv1dzy4ULkswf4SKGu 2FL13yHFr9x5+83ap5NlDaR5hdbSTUwOFut9yMk08CGhk9KMyFTSlTXfYMoL6A0nS7PStMttNsk4 WtrGI4gTUkD9pj3Zjux7nfAhF4qgX1CSYmPToxcODRpmPGBd6H4wDyI8FrvsSMVSvWPLl491ba3a MlzrdjULFN8EE0TbvCAA3pn9qN9yp6kgnFUbp/mXS7uRbZ3NnqBFW067pFcD5IT8Y/ykLKexxVF3 +qaZp6CS/u4bRGrxaeRYwadaciK4qksslx5nK28MUkHl0kNdXEqmN7xRuIYo2owhb9t2HxLstQ3L FWS4q7FXYq7FXYq7FXYq7FXEBgQRUHYg9CMVSdvJvlB5zcNoentOTUzG1gLk+PLjXFU2iijijWKJ BHGgCoigBQBsAAOmKrsVU7m1trqB7e5iSaCQUeKRQyke4O2KpSNO1XSwTpkpvLQUpp1y/wASDuIZ zU08FkqPBlG2KrL3zdp9taM3pyvqPJY4dIICXUkrkhEVGNCDQnmDwABatAcVYB5+0XU7LRL3X9eY 6ix9OWDR4A5sILhnVCjxhg06yB/iZqbgkBS2KQHg+swQaNeTWM0sdzFqEtyl/ZtGPqj3MdwYqWyI KQLGg59aU6fs4UvXfyZXzJ5e/LbRPMKJ9f0ERTG40pY1FzbwPO7yXMLqAZT3ZG3IGxrtgQXtB1e0 eONrSt48qJJFHBRiUkFUckkKqkdCxFe2KGlsLi6UHU2UqdzZRE+j06OxAaXv1AU/y98VR6qqKFUB VUUVRsAB2GKt4qoXun2F/Abe+torqA9Yp0WRD/sWBGKoPT/K/lnTZRNp2kWVlKNhJb28UTAUp1RV PfFUzxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxVil9ZLqvn2OKda22kad6sUqMUdLm9mKVDKQ QRFbEfJj2OKqmvWN5JpN9purWra7ot1C0cohAW74kH4WQcVkPgycSD+z3xV8p6VpA8yeb9Csb/RN TRjPz1EQgTyiBGKF2jdI3Rv3vGVzX+ZfDCm31tp1jqD20VuIl0fS4lCQWUJBuOA2o8ikpH8kqf8A KBwIQvkeCCw/TWh28Xo2ulX7R2q8i/7q4hiuh8R3NGnYb9BtU4qyfFXYq7FXYq7FXYq7FXYq7FXY q7FXYq7FXYq7FXYq7FXYq7FXYqxp3ns/zCXkALPV9NCI3c3NlKzEH3MVxUf6p8MVZJirF9O8kRWf 5gar5u9fk2o2kNqltxpwMdPUbl35emn44qyjFWO+Tj9ZuNe1VDWDUNRf6s/Z47WGO05j2LwNQ9xQ 98VZHirsVdirsVdirsVdirsVdirsVdirsVYb+Y1t+ZTWa3fkvVrGwNrDNJc299bNN6zKvJOLhhw6 EH4T1xV8yeSv+ckfz183+adO8t6bdaXFe6lIYoZJ7aka0UuSxXm1AqnoMVegfmLrX/OVnkvSpfME t/o+qaTaLW7SwtuTRp3lkjlRH4rXco5p1IAriqK/5x7/AOclNS8864fK3me2gh1d4nl0+9tQyJP6 Q5SRvGxejhQWBU0IB2HdV9B4q7FXYq7FUu17RY9Wslh9Vra6gkW4sbyMAvBcICEkUHY7MVZTsykq djiqWQ+bDp4+r+aIf0ZOmx1ABjp81P20n3ENf5JipHbkByxVGy+bvKkMIml1mxSJvsubmKhJ6AfF v9GKpbcalqnmMGy0aOex0uT4bvWpkaF2j6NHZxuFkLMNvVZQq9V5HoqyOzs7WytIbO1jWG2t0WKG JdlVEFFUfIDFVbFXYq7FXYq7FXmX5kf85Dflz5DubjTb+5lvddtwvPSbSMtIpdQ685H4xICrA/ar Ttir03FXYq7FXYq7FXYqo3tubizntw3EzRvGG605KRX8cVfn3/zjh/5O3yt/zES/9Q0uKvt382fM Gk6D+W/mK/1VkFt9QuIFif8A3bLPG0ccIHcyMwXFXy3/AM4f/l5rGoeek85ywPFo2ixzJBcsCFmu p42g9OMnZuEcjlqdNvHFX2LrOtaToumz6nq13FY6fbLznup3CIo9ye56AdScVeM6h/zlAkttd6n5 X8lazr/l2xLLPriI0FueH2mT4JTxHU8qEdwMVZd+VH55+TPzJjmh0oy2eq2qiS40u64iXhWnqRlS yyIDsSNxtUCoxVOvPv5neTPIlilz5hvhDLN/vJYxD1bqc1pSKJdzvtyNF8TiryjVP+cto9Guom1v yHremaXcH9xd3SiGSRSK1SOVURjTsJfpxVmvkj8//JfnjzbB5d8spc3Zaxkv7u8kjMKQcGRPSZX+ Jmq+5Hw9KE12VSHzB/zlP+SuhvL9Slm1S9iLIY7K0eMhwSpUyXAgWle6k/TirObf80/LcX5c2Pn3 XXbR9GvIYZzzV7hoxcMFiBECuxJ5Dou2KsDX/nLP8vL7zBpWhaBaX2q3eqXlvZRy+mtvCpuJViDE ytzJHKtOH0jFXq3mrzd5b8qaRJq/mG/i0+wj29SU7s3UJGgqzuf5VBOKvHdS/wCcp5rfTv09aeQd an8qFgE1yYCCNlJpzACSJxPY8+u2KvQ/y8/ODyT578v3OtaVd/V49PUtqltecYpbVQpbnL8RXhxU kODx2PgcVYVH/wA5IXmv395b/l15J1Dzba2D8LjUfVSytydyODOkteQFQGCsfDFU0/L78+R5k813 flPXvLV55W1yztXvZ47uRHhWKIryYyMsLU+OobhQjvir5S/PLzHoOtfntq2s6bfR3ejvc2IF9DV4 yILaCKVkp9sK0bbr1ptir6+8vf8AOQf5V6/b6pc2OrMtro1t9c1C4uIJoUji5BB8TqOTMzAKo3J6 Yqxay/5yK8zeYopr7yN+XGp6/o0LtH+kZriOyDsuxEa+nPy69A1fEYqyj8p/zmt/P97q+ly6HeaD rGien9fs7sqeJlLAAGiPyBQ1DIMVej4q7FXYqtlljiieWQ8Y41LO3gFFScVfnv8A844Cv52eVh/x fN/1DS4qnn/ORnlrzp5P88wRanq9/rvl+V/rugPqs817GFVhzgcTs6s0ZPFv5lIJ64q+vPyh88aF 5z8habq+kQxWkYQW91p0ACJa3EYHqQhR0UV5L/kkHFXz3/zm55lvG1ny95ZSUrZx2z6lNCDs0kkj Qxsw/wAkRPx+ZxVmv5d/n/5B0fyFoWiW+ha7PFYWEFtM9vp3qRPKsQ9VgyvRubksT3rirxP8ltK8 06P+cVp5isdC1K18vWtxdvcytaTLFFZPFJ8MjFSoohHU9aYqmf8Azj/eXX5h/wDOQn+IfMj/AFy6 hjudSjjkJZEdCEhRAeiw+qCg7UGKvqr84rO0uvyp83pcwpOkejX80ayKGCyRW0jxuK9GRlDKexxV 8x/84TQofzE1uc/3iaQ6L4Ue5gJ/4gMVRv8Azm9Gg81eWnCgO1jMGYDcgTCgJ9q4q+ivyT/8lH5Q /wC2Va/8mxir5Fks4G/5yyWFR6ca+bVlAQADkl4JPxYb4qmP/OXfmSfUvzbXRriZv0doltbxCFKH i1yqzyuB/Oyug+gYq9n1j8/vy/1TyffaHa+W9eksbqxlsoIRpp9Hg0RiVQVfZR026Yq+cfy88mfm b+ifNsWn6LqMNrf6LNFNI9rMsUginhnaNWKqDIyROqgGu5HtirIPyM/5yP8A+VbaHc+X7/RTqOnz XLXaTwyiKaN3REZSrKyuP3YpuCP1Kvdfy/8AzN/J38yvzC/S9u11beZ5tHl0X9DanHCILi0aX6xK F4+qsjAginqboW+DrRV8+/nVpWmWf/OSd3p1pZwW+ni+0pRZxRokIV7a1Lj01AWjFiTtvir3r/nK LyLaw/lJdy+V9HtrRoLu3uNU+o20cbvaRhweXpBSVSRkc1rQCvviryv8nf8AnKu28leUbHytrGhP eWun+oLe9tJVWQpJI0pDxOoUkFzvzFfxxV7j+Tvnr8rPOPmzzL5i8sXV1H5h1qKzbVdKv1jjdI7F GhjkhVAwYEPRyJH/AGfs7VVet4q7FXYql/mG5gtdB1G4ncRwxW0zO7GgACHFXwB/zjzdW9t+dHlW WdxHH9aZOTGg5SQyIoqfFmAxV9p/nV+Wdr+YXkW80fiq6rCDc6PcNQFLlAeKlj0ST7Dexr2GKvkL 8hfzUvfyv89TWOsiSLRL2T6nrlqwPKCWNiqzcf5omqGH8te9MVekf85o+WLm8fy751sF+taW1ubG 4uoqOiVczW7Fl/Zk9R6Hpt74qzP/AJxy/Pfybe+RtN8ta9qcGla3osK2iC9kWGOeCL4YWikkIWqx 0UpWu1QKYq9Sn88eRddupPKmn69aXmq6lbTokFnItyyp6Z5O5i5qgA/nIr9OKvh/8q/Nd1+VH5tx XGtwSRpYSzadrduoq6xtVHKjblwdVcfzU98VfTv5pfn5+W2ofl1runaBqJ13U9X0y8t4LKxilaRE kt3Ek83JV9KOFGLvyoaDFXkv/OEv/Ke69/2yv+xiLFUf/wA5wW841/yvcGNhA9pcxrLT4S6yKWWv iAwxVnv5Rf8AOQn5WaZ+VWi2Wp6t9U1PSLOO0uNOMUrzvJEOK+kqKQ/qUBFDtXemKvn/AETWpbz/ AJyYstW1G1fS5LvzMkslnOOMkHrXQ4Ryjsy8gG98VZL/AM5i+UNQ078yI/MfosdN1u2iAuQPgFzb J6TxEj9r00RhXrXbpir6A/KX/nIDyN5r8rWR1LWLXTPMFvCkWpWd9MkDNKigNLE0pUSI5HLYkjoc VRvn387PLml+U/MV95V1Kx1rXNDtUumt42a4t0DzJCPVkgIXrJ9j1Ax+/FVx/Lj8nPzK0S18wXGi affnUoVlfULMehKzuo5cpIGRy6nYhySOhxV8u+UfJNvaf85PWeheTbh7vS9H1aOcXQYS8bW2Ky3K vItFoPiir47dcVUv+cibsab/AM5H6pqEyMYre40y5oNiyRWdsTxr/qkYq+jPO/8Azkl5JstDsr7y xqtjqry3tpFqMUiyt6NjO9J5WQemwZV2APfqD0xVOvMH5Ifkp5rsX1KbRbJIrmMzLqunMLUFSOXr B4CsbePJgR44q+bv+cYfLlwfz6ml0SZrnQtE+vCe9FCktsyyW9vVhRayMyuKdaHwxV9t4q7FXYqx Lzz+VHkDz1NaTeatL/SMlirpat69zBwWQguP3EkVa8R1xVj0/wDzjP8AkdMgR/K8YC7gpc3kZ+kp MpOKs28p+UfL3lLRIdD8vWn1LS4Gd4rf1JZaNIxdzzmaRzVj3OKsU8x/84/flD5k1u61vWfL63Gp 3rB7qdbq8hDsFC8uEM0aA0Xei7nc74qnvl/8tPI2geWbjyvpekxpoF2zvc6fM0lzHI0oCvy+sNKx qFG1cVeean/ziJ+Tl7dPPDb31grkn6vbXR9MV8PWWVh9+Ks48hflF+X/AJDDv5c0pLe7lUpNfys0 1yykglfUkJKrsPhWg26Yql/5hfkR+W/n27W/1qwaLUwArahZv6EzqAABIQGWSgFAWUkDYHFUj1L8 ovI/5e/lH55i8u2ZW4utD1L6zfXDercuotJKIZKCiCleKgCu/XFXif8AzhL/AMp7r3/bK/7GIsVf VXnfyV5T84aHJpfmeyS809T6oLEo8TqD+8jkUhkYAncHpsdsVeeeRfyY/JbyxaWvn6y0+aFI7UX1 vNqUjTGCNk5iQRAuvPj9mlT/AC74qjtZ/If8i9Y8yyzanon1jXdZM+oy1u9QVpKuplkKpMqRjnKK CgHYDbFWXR/lp5GTybH5MOkxzeWYQwi0+4aScLydpKrJKzygh3JU8qr2pirxy7/5xt/ItvNUOkwW uscppWiY284azilSI3BgeSQNLX0x+yTSoqQTir1PSvya/LDSfLN35XtNDgTStUVUv42ZzNc+mea+ pMW9VirDktG+HtTFWB6t/wA42/lToNq1xBPrdlaXk8Fq1hY3zBZXuZVhRSrirD95vVvs4q9J8ifl j5I8i2b23lrTEtDLT6xdMTJcS0/nlcliP8n7I7DFUD+YP5M/l75+lhufMWm+rfW6+nFfQSPDMI61 4MyEB18AwNO1MVQVp/zj9+U1t5Um8rroavplxMtzcM8spneeNSiSGcMJKqrtxAPEVO25xVjo/wCc VPyyht5LdbzWItMYlpbAX7LbEdTyXj7V64q9I8l+TvKHlXRY7HytYwWenOBIGgPMzVG0jyks0hI6 MzHFU+xV2KuxV2KuxV2KuxV2KuxV2KuxV5v+b1x+ZN9pep+VvK/lOPVrHWdKmtptbl1GC1WCW5WS B4/qzj1JCsZDhuQG9O2KvEfyZ/Kj88fyv8zXWvp5Th1lbmyeyayXU7W2PxyxShw7GRaj0qUI79cV fT3mePVbrynqUNhBy1O5s5YoIOaikssZUDmSq/CW61xVgV7+Xd1b2nmDTbLRRPYTNpCWg9SAm4hs ypuXUSv8M1HkWshWopQ4qh/NugJpwv8AV4/KNvBAukrbaYI1suNpeyTSkMVU8vU5yRcTGp3r8QxV lXkDyrJpFzeXn1A6TBPb2tpFYGRJJGNqH5TztG0ic39TiKMfhX6Aqw3ypZeZNS8s30MN/PqVyDb3 F9Zp9Vt6vd3Pq6jZtKI45UuR6Tq3KULxddhtRVMbv8tr+fRNVu4dOS2uP9Il8u+XlkVFtJJ4Y4WZ pUf0+bekX4KeALGpPZVkN95VkvtH8q6N9ReHSrKeKTUIZJIw6RWtvIIkf0pCGLy8OXBm74qxLT/K vnWw05Db6C311dEu7K0j+tW6xW11d3Us0gAEjCgUoI+I3HwsV7Kusvy51UXv6RudFJkhltmt4xLb +t6UN7aoq8hLx/d2ljyA5U+Og3rirbeVPOUnpXr6ATr31DUorvVDc2zSS3tzGEhNTJVYFDN6a9th xWm6qat5GuLBtbtLTTbltMurSxsONnLbLLeQxxyC45PcSAqx9XizN8XEUU+CrMfJtnqll5Y06z1R EjvLWIQlE40EcZKwg8Pg5ekF5cfh5Vptiqc4q7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXY q7FXYq7FXYq7FXYq7FXYq7FX/9k= - - - - proof:pdf - uuid:65E6390686CF11DBA6E2D887CEACB407 - xmp.did:9b43c0f3-16f7-47d6-9898-94b64c380baa - uuid:6a743afe-74fb-424e-b7c5-f3c5bb7c4fe2 - - xmp.iid:ebb2c7d3-470b-4310-8407-fdf65cb41c3a - xmp.did:ebb2c7d3-470b-4310-8407-fdf65cb41c3a - uuid:65E6390686CF11DBA6E2D887CEACB407 - proof:pdf - - - - - saved - xmp.iid:ebb2c7d3-470b-4310-8407-fdf65cb41c3a - 2016-11-23T19:19:19-05:00 - Adobe Illustrator CC 2017 (Macintosh) - / - - - saved - xmp.iid:9b43c0f3-16f7-47d6-9898-94b64c380baa - 2016-11-23T19:55:30-05:00 - Adobe Illustrator CC 2017 (Macintosh) - / - - - - Web - Document - 1 - False - False - - 1024.000000 - 1280.000000 - Pixels - - - - - OperatorMonoSSm-BookItalic - Operator Mono SSm - Book Italic - Open Type - Version 1.200 - False - OperatorMonoSSm-BookItalic.otf - - - Palatino-Bold - Palatino - Bold - TrueType - 11.0d2e1 - False - Palatino.ttc - - - - - - Cyan - Magenta - Yellow - Black - - - - - - Default Swatch Group - 0 - - - - White - RGB - PROCESS - 255 - 255 - 255 - - - Black - RGB - PROCESS - 0 - 0 - 0 - - - RGB Red - RGB - PROCESS - 255 - 0 - 0 - - - RGB Yellow - RGB - PROCESS - 255 - 255 - 0 - - - RGB Green - RGB - PROCESS - 0 - 255 - 0 - - - RGB Cyan - RGB - PROCESS - 0 - 255 - 255 - - - RGB Blue - RGB - PROCESS - 0 - 0 - 255 - - - RGB Magenta - RGB - PROCESS - 255 - 0 - 255 - - - R=193 G=39 B=45 - RGB - PROCESS - 193 - 39 - 45 - - - R=237 G=28 B=36 - RGB - PROCESS - 237 - 28 - 36 - - - R=241 G=90 B=36 - RGB - PROCESS - 241 - 90 - 36 - - - R=247 G=147 B=30 - RGB - PROCESS - 247 - 147 - 30 - - - R=251 G=176 B=59 - RGB - PROCESS - 251 - 176 - 59 - - - R=252 G=238 B=33 - RGB - PROCESS - 252 - 238 - 33 - - - R=217 G=224 B=33 - RGB - PROCESS - 217 - 224 - 33 - - - R=140 G=198 B=63 - RGB - PROCESS - 140 - 198 - 63 - - - R=57 G=181 B=74 - RGB - PROCESS - 57 - 181 - 74 - - - R=0 G=146 B=69 - RGB - PROCESS - 0 - 146 - 69 - - - R=0 G=104 B=55 - RGB - PROCESS - 0 - 104 - 55 - - - R=34 G=181 B=115 - RGB - PROCESS - 34 - 181 - 115 - - - R=0 G=169 B=157 - RGB - PROCESS - 0 - 169 - 157 - - - R=41 G=171 B=226 - RGB - PROCESS - 41 - 171 - 226 - - - R=0 G=113 B=188 - RGB - PROCESS - 0 - 113 - 188 - - - R=46 G=49 B=146 - RGB - PROCESS - 46 - 49 - 146 - - - R=27 G=20 B=100 - RGB - PROCESS - 27 - 20 - 100 - - - R=102 G=45 B=145 - RGB - PROCESS - 102 - 45 - 145 - - - R=147 G=39 B=143 - RGB - PROCESS - 147 - 39 - 143 - - - R=158 G=0 B=93 - RGB - PROCESS - 158 - 0 - 93 - - - R=212 G=20 B=90 - RGB - PROCESS - 212 - 20 - 90 - - - R=237 G=30 B=121 - RGB - PROCESS - 237 - 30 - 121 - - - R=199 G=178 B=153 - RGB - PROCESS - 199 - 178 - 153 - - - R=153 G=134 B=117 - RGB - PROCESS - 153 - 134 - 117 - - - R=115 G=99 B=87 - RGB - PROCESS - 115 - 99 - 87 - - - R=83 G=71 B=65 - RGB - PROCESS - 83 - 71 - 65 - - - R=198 G=156 B=109 - RGB - PROCESS - 198 - 156 - 109 - - - R=166 G=124 B=82 - RGB - PROCESS - 166 - 124 - 82 - - - R=140 G=98 B=57 - RGB - PROCESS - 140 - 98 - 57 - - - R=117 G=76 B=36 - RGB - PROCESS - 117 - 76 - 36 - - - R=96 G=56 B=19 - RGB - PROCESS - 96 - 56 - 19 - - - R=66 G=33 B=11 - RGB - PROCESS - 66 - 33 - 11 - - - - - - Grays - 1 - - - - R=0 G=0 B=0 - RGB - PROCESS - 0 - 0 - 0 - - - R=26 G=26 B=26 - RGB - PROCESS - 26 - 26 - 26 - - - R=51 G=51 B=51 - RGB - PROCESS - 51 - 51 - 51 - - - R=77 G=77 B=77 - RGB - PROCESS - 77 - 77 - 77 - - - R=102 G=102 B=102 - RGB - PROCESS - 102 - 102 - 102 - - - R=128 G=128 B=128 - RGB - PROCESS - 128 - 128 - 128 - - - R=153 G=153 B=153 - RGB - PROCESS - 153 - 153 - 153 - - - R=179 G=179 B=179 - RGB - PROCESS - 179 - 179 - 179 - - - R=204 G=204 B=204 - RGB - PROCESS - 204 - 204 - 204 - - - R=230 G=230 B=230 - RGB - PROCESS - 230 - 230 - 230 - - - R=242 G=242 B=242 - RGB - PROCESS - 242 - 242 - 242 - - - - - - Web Color Group - 1 - - - - R=63 G=169 B=245 - RGB - PROCESS - 63 - 169 - 245 - - - R=122 G=201 B=67 - RGB - PROCESS - 122 - 201 - 67 - - - R=255 G=147 B=30 - RGB - PROCESS - 255 - 147 - 30 - - - R=255 G=29 B=37 - RGB - PROCESS - 255 - 29 - 37 - - - R=255 G=123 B=172 - RGB - PROCESS - 255 - 123 - 172 - - - R=189 G=204 B=212 - RGB - PROCESS - 189 - 204 - 212 - - - - - - - Adobe PDF library 15.00 - 21.0.0 - - - - - - - - - - - - - - - - - - - - - - - - - endstream endobj 3 0 obj <> endobj 9 0 obj <>/Resources<>/ExtGState<>/Font<>/ProcSet[/PDF/Text]/Properties<>>>/Thumb 14 0 R/TrimBox[0.0 0.0 1024.0 1280.0]/Type/Page>> endobj 10 0 obj <>stream -H‰|—ËÊÉ„÷ç)úN«.Y·­e˜Õ`Œ~áËÂëýÁ_Dö?ÒÈ`çWvWWå%22êÓŸ>_Ÿ~þ\®?üñóõúõU®Ú6?¥Åõöÿó·×_¯_^Ÿ>ÿ¥\_¾^ån±~÷{}ýÂëŸxý¯¯_¯zþÕk´z÷ÚÇÕj»wmãúòï—^ñ{Ç×›?§±ÉYMÆÙýªwŒÅÁw,Ö¿Þënå\ï~GçO÷>SfÑ÷-îqªVÏs´šœèÏõ³ô&³íÙïCf9]˵‚¨zºî[/7'ÉÍ/¯¿¿þŒ«³Îëû_‡û-Ì3ñ1J#egßm”âT€{u…9ñ­˜Š’³ÛÉmþÎv‘â1ñ·û´Í“uœ‘Ê"e *KsWžÍè2F‰ßùZ9âûß|ý?%Qü…Ì·{Íž™ö ;Γ¹“™sYê¼·ýh…rÝöm¨*Ndq8_^ÿ|<;Gùûöûƒg½>+ýê{ß'ˆî›g„I -úÝGpb!3‡ŒÏqŸ²IÙ©ËNÕ[±EPa¨$û^íP"ÒF -ÿ£(|R„— à0WRÚ=&áõ»ÍêåûV¶XVÉø»ÍûpFÿB«Ì¸Î¯Æ>ë - -ËˤûKžðò”{L-(‚Ê -Ú¬#§1ùªÙ\€#F®žë\ƒò*ט•\ -`róJnG¿G¶~wìI¿l»28cOn.j4ùxfû ¾ë×Äo¥WÝԦ̽…bž­|S„ZmžPx°'0ååêxé–ƒ\Ô¨xí QîP1K–°ˆÃrw¹;-„ËjÙ펶•ÝòÜ-»‰Jâ¸ú¤Zç:ëî„%¬Òae€—£–®}éNÐÁ©wPÁÊójÍÞèܺ¨+ù›t¤*XÁË™b!V¤¦ίâ•*Þ9t®×+s‰€Ì‚•Õcáñƒý•B`Äs0iD¶è¼Y ­Ð~ê;03È}-Õ©z+òc÷…|°6š¹³–l~ÙŸÏ!_|Oéµ/¹jy^‡H{~ÞÙ†Ln9 Žrù†J•p÷)ﻆM5Ì®öòMæ‹Ý¡J…Èß‚)˜_ú -ðÙ1Å%Õf8Ÿà¡fpM0XÏÇ¢DAÄ(SÅÇÈ6¼(ãTžLqb³Ù¶)©cõ24ÀÒnÝm3€ÿ6ûæøAÛFÙP™1 hã^ÊY§†6Í.œ{R›æ.a¤níl9¯%7`ªñÉÄ®‰%*hIE/… ùMìË=F§Ð"PGS\hQF­D‰–ÚRá€þ¨+.ͽ-t"v…ËúŒ«çIÉÂv§PªâÔœº˜K{cÖ²sù ƒßª`—±7AACÃK¡2ÍÓé‰]  Ç¿›AßQ:¨(‚ÿ*¿¦Á¥‡çß;؇×d¨{Ÿ0 x¢Õ ƒ‡_˜Ïë$W8³„h?*¥ÄzX.Ñôö’bRᦾ(EÔO­Çô´RÀ›¹<öÀ¯zó\‰P‘ºº@ávè£öN¨éÇÖiZ -µÖzœUµe¦ -5£¸)·Û3Ie&.êh…ÂÌdÓú ÞžY‰y Ô•v6’0ƒ ùã¼—6·Z?B -9„8ž¼5ŒA5GqÈ„ÑÍ185qŠÃI„ød2#ª¾ëX¤iŽäÀØâ®Äaˆ[ ªQðÊö9^! ‘€žÉ´žÜÁX…†D–zs͈ -? ¸7^U*>Ý¿á UCÃÄ“BÎ>ËA{Òã´”$4c†ÇìnoHjŠ×0a*/S;/¨P³t±‘`;•+U͒ð¦ÔÒ³xúC6]ñhÛfÑ¡`–ȘdOÁÚ É -Ž®“¿HƒÓœ'•C|$1¡b‡%ZA<Ÿ‚•è$,Eæf/­˜n¹Û%óº$A81’?ÕúB<&Á¦”‘3ê£-š'Èš†êNÑ»’Ŧ:¤‰†D=©Î—r:b˜ê¤YôGHrk®F–»É9M]k .Í+œ´2FíÓSC‰%…¿ªÛ‚×*ôÐzûQ§ºèg&KÖa³z†»,X™S+AÉZ]Û£'åM±é:õ™‘1|#‘€T–,'õáXÒ³|[¯ª#DœããvS<ØZ,5JîlD–ì ©DÝtø÷_Íþí÷dô>Œ0„½ü\¥^?HWeëH)"ö'l¨] oëÈjjW/ÖÞ¤6ÙoxLQzÏñÂ¶È I(™¡œ½¶Ñ·PÒˆæˆÄ™\9UF‹gÕŽ !Ö|ðM- ‡ëâã–ðlûA1lãÞ–OS—,V³Êžç}‚Á´)P:ˆ‰â[†æ•ÓB i<×¥ò®ŠƒÉ¢b£×ö€9&V°Ò´éU×L˜è9ŸšFx<¬ªCª&å¡uq\F. -Ò?R£)½(£ CÓùZD’šfÅì9W'·¹þh:Ù@ïÞ9í' ©ÊNó%F*P¾¥~R‰u—èT2%åáÉÑ?›î½”¨=N õw›7 î¸ ÞóÃ0›×o¿íÇYvÀjfy] -Ï›eÅÔ”—"œ‚rÆш3¼šáùB‘3uÐ7©“î%ïê\Å®H—™ÞàkêPÙ‚¦F³Ô=óAÚILq|¹`é|&'§¡Ï<GJ¶bãcå@¥¬nšÇ -°àà¥{›çHdm_d<ž­~˜s=ów¦è^$žf6Bi)§ÀŠ/Ô­¿Ö̾© -‚)Öëc§>GÒ©×¥ÈJ2·ÊÔzB‰¸4j°OŸ®‘,PKJÜ•žé¦ë‘×áËØüÈrʧä·ó°fŠÒ°öéÏÝDÒ@JŸ$*8/Œ4EÝ0Xx ëÅÎÖÿ¥»Lr,GŽ º×)òÕ cŽ;i-@·×{æ¬ntÚTeð“1x˜Û Ÿß¡ïykêiWßûyg¥¤ä -ur—Œ¼îÒD£_wkë-íŽÍ{Øy¾ÉLJ(ØZój¼WªòQ¬ ­/8M¬À)oÔU·ÏË£ò $nAeÝ[I=z'n•š?†°÷:>¬Im²%'}ýÈbU3Úž_*ðSmÖUT»bOhë·æs¿opX2ŒF>Þý{°ëÁ#Ð}P—b Þ§üÜ“•$·>:a·ç–Ó-½³ð^s¡ËÜ -Ž6A¨]ÙÉ ·ÌýEˆomìºù«¯?u\9š›ë£f(ƒîÏùÈBØ÷·¤Ì,ýUÄâ­Áûs/ý‹&ôÊM¥äÃöå¾Ö ²¯ó"õ÷|*˜Ð -.n©ÚN7]šA\ØÀb«}î)tHÃIªHeñֻܺò–á ]~ÆÐÂüä¤}…Õ ü4„•ï­`7ÝVí qµjC\MLŽ™ÈÃß¹0_*5vòyj‚y²ÎdRË6,)¡¤ïúåÒØ»ÍgDspj#Ú¹Šéü.Í©{³ä¼e<3*Í­Èkœ,^싘Ùá:’_Ó‰¥ ¾wùa„“rføâÛîý˜–¬ŸU—ÁßpÞÈåþš‰>p«1Ìß"­èõø»œA@þ;üHüCÏ6ötLPåÀ?ÃXð—žuŽ¨Ù˜˜} ²õÆû›u$|z÷GEGÝÙ®Óô¯ÿhoMLÏë'ŒÁ¥{ol%ž¢'ÄR«›tC?µ²ÞÈŽÕñŠOÌ/¯îˆãÁ=þ[µBU-Ìí¶V÷kzîÑáoUÕ\{I>`ó䘕 ´ýmzWÉê³}9öÑ$V 2ãþ¯Ÿ;óGÉÀ¹,éÒlŸ(ÜB‹ŒeY[íþ)žabÖ)r”&p(ќ̕u2¿ï·^/÷Å°³õÖJ—L›é|½'Ìë,û…Ó50\6j¢¤¤N“ŽFÏꦇ—2’¼'á1S­-Îrê[NÞâ$Ù¹]hû17=­‰Óyp‘ƒ°•¸@†;”ëÎeØÕ"{ôÑ´Ì 'Ô„xPPñ¤) OÑSÇÓ§‰­Ëº-í~Ö¢J-+ØÙšžig*{A•ËOoâ—¢Z=+Œjëñn¬ß8]ºH(&¼#±¬”w]éJ}ã‰ÜèÁG"ýåùf+¹“{zÿ`¤òÂ-¡û¡¦îŒ÷o¥›uGoƒ*‡O› wsàé›ÔvÇ÷ÒêÊV÷y–ØÕ -Šû‰§¼œÛo¼åS1w-)„ ¾¥Ç†,ªMÁ.ÔÝk¹=—BG}તSEyÐwTÊÚÍ]àÓ²&BàlÉf%Ʀˆœ`÷³ù\k_‚B“@ô—ÑõÕõTä›^T·êóKHqˆ1³ ë¶S¿ÎÇvù¼ÍXâa¯Ùå=KO·]]°·2T”‚·ñ`|1Ñ{8?\ßû;Š½"™ïÉz-î`êBg†º‚1­‰—u"óüÞu }²È,æp­xÕ®䢭S38ó@-Ìêe?OÌÕìênŠ8ê!"ú†aUÞé–EoƒÜ*‹©¼ð›vó¬ßu&]¶ª_{Å›-¤^¤ÛÎÜ5”(D™PRWÀýÂâŒÕ|×—âfXS“ A˜-cg…K/—ßaI†¯†ÆPÔˆ‰G¾ëfBK§Ê˜å"0[áÌÓ“W9Ÿïµ#ß.ñH—Ø°¼#™Ó–A4(«túÇN¼éÖÕ \„‹àS;‹”{ uÒºE< ¯›x¾°Ù˜jâiúîßÛšª7ÞÓ©Ût” ð>1ïïo…L–‹eÿ#Öh0éÿM‚üý$Ø¥ƒÎó+‡ËfA2Œ2r¦"Î(Qþ¾Bgò|~–Q ‡ÒȬ¬ú°¼Ú:F=K[H£÷Šsìë˜Þïen×®ºjß2¯­M]Qvঠâ%:úÒè^æ¥^oòÙJ[ð.Tƒ2ðej¡fYæYÍhšHÚ˜ªt>ò~Ò¬^ƒÃ;Û–ÆV÷M} `|=ÃlçLMuˆ€ÉÑMÊD‡¹bÎýÁÒvKKBB{ä¦eÇ»½b¸af³všù¢¤ËÃ&ÆåÀد÷'Öy8ÃÌ ázç >B*ï¹íGÏÁi»Vj˜ãåæªêb¶Iioèî•DU/Ãk}§¨S}óFæbipõš çë…—íåÅ 4œßr¿ˆÍJsB‹-ʶ‹…pë-« ‚%ê5ʬè 5 ÷õ€HA,ÿ¢qǽrп³Ê‚4}Qÿù2L+Št»üZϬã­!¥ˆ0àß¹#ù¾FÞX ¤5èÖl vw~•ÑžH¤ÓŒr*O%˼E½L¡[R,K8*®=a÷¯ç…´5zP¶þùïÔ8þ¢F&ü5!ÐÄ­¾Mb°ôŸÜ8Ãüš18Ñæ¢wŸàµ{lE´ÇS%ãB?É_¶@gºM§í'âƇ'á„0fð¤$Z,Â݃ ¶Â#iØÊZ‘íÊ,mîé?G΀ê]É®ƒ~X ømÝþwÆw½é¢»¢¹¿4Ïâ玸$P'yÛ‡™Àß·t+TgWð`hì8?²ÇªÊ½¸ h?GbÜŸ ÷V˜Že顆ú;mƒ]鯣ìîxo½[øÚèþ|Ë‘KÓGUh¹àlä˜BJYtÌnf¹?.µ>­À"áܪœþ«¥ÓýÂð"ÞŸó–E‰ígZðN‘øÒeàm°MhzŸ° Ô½èO.ž{ZqÛ7ÌÁ Q2ÚrÅn‰ÆdŸÿã»L²,¹q x] ô8äúþëvsPùJ%uo¤bFü ‰Áa>õI=}våAÊM0È»'Ñ<´tu­( ìÁ¦–H…ú6ΩŽÎöˆ¾úT¯kSðMImG–,:Wn0jPI† (§Š Lã@Ù¦WPÚÛÞ!VW`˜÷$˜` -8‹?;¡KL&¯6° µ—z ô§ëq2A/µ´Í°ÊQ6s׬œa5¾«Ñh\fø'£û2kú¥]Mhy†ÇÀ…ªtŒ–C:(˜ÝFT/E3.:Æ׉>õ9ÝO‚„¡½Å›¤Zɹò¸×¨Å„ U~ô6jÔ.óoY±´AýÛ¥;Þ¨G Ð "ª®Yÿi­ÁF™ë0`Õ½ú ƒA"à®M_F‚<ŠB'Šò(#™Êåqå¹ ]*ÜS.Ag¾…­#]ŽÇeƒ ÷jÅ -¿ÒìTÃ÷ÍnN•¤®ô³0§(ä 6­ëºÕÛaˆU2WþM3n}Œ¾Ë9 ÌìYÿGŸ÷?ÑU»Ñø}0)°ƒ?ú¦;ì!Q*ý3Ý:ÂVü¢¬J§›JWÃþ 4š–ÆH¶U_;µ¨mMÚ yœ“ -ƒy»? *: ¼L‹NÈÕ •ŒltU`•rÉÜ÷ÃÖ]Ke—86+³—–‡ŠÀ )bZÆyª8óBRj*0±Ÿc•2ðܤ -Ñ@ÔS&ö·ðLñBñ-¸kò¼cg€pDNÁÇAèÃ2·jòëø™éF-·o|V=ÝÇØvJ¶[jZ-ɵ¨•ÚÆ:Õ%]+SXÛõ]vÔFUXOã1ªúÙ[ZÆŠÅ°¬x!ÑI€hDEG¬ÕKôÐB -´ÌVK:8¨æÁÔ;”lJ8•ƒ8Æv\¯V]'ˆn‚þjâ˜"O­Ç0AcZUÊQü›YÓµèXý&’ ÓmÆOÚEÌ#í/›§Ÿ˜{pÅT<@ÃkeÑûKÃ_Uwê2ê…Æ°²´p3«Jl;Únêÿ–ñ\¯fÆ{,C“>×»Þ¡ÝêÐã> -5¯ë ý{ƒÒ™žŸ´Ëoý -Öº @ß~¡'Œ+Ù— 9kô©vW!Ý^²ñƹ #IîСyXeFa =â9øN·Ç³Àq7øøI”€´9òOêñq­~f®¬£¬R½1ë“c<¼u„¨-ô—קĴø›.ª^wÓ–X¤ª¹:TþÈ^΢äô=%—_ë=+yÄ»~8ë}3ë›cL¯¾½Þw¿{Œ÷F´Ró»jÏAerc SH5Åõ|RjiÉtX@<Óyß_(¿œ-Bœb¢î>Ê©)¯·ÅËÃinˆ¶^·6—g@Œ«ò–ÅÎü†·S¶ýZŠœJ‡ÙÏEwHNÄ@ÛÚ - –~F(’ŸÓƒ1¯»g¨ÿˆrÁõ쌾6¾-o§©J7ƒëv#Eò½>†ÎYc~i ÌkÄÑ«CñXe¼l]h —)û7ýRú7M‡z·¹˜ÊKé[„©¼X¨.Ýγ&q]t7ï)‰z²ù”R±= 1[¤%f3û—½ÕÒù¥7Sý„ü ØnO¢¥´Æ6"ñTeÐë‹LßXÆ‹Ä3¸¸‰."^ºòV.ôì4×"Òi¼4š%’:ôû1M¤›W—WJ¥Â“¾ëUÖ¨Ò¹4³”’”˜^ç.·SßeX¢„º¨àk«D7Ý£ƒß“hwÐçºÜò.*¸“R‡îI¼LQu‚Fоm ÜA®…bJ¤ö¹±¶7̆6¨.ßp¾“"EíÚúH´Öï\“¿pÍý]75Ú5J' 9ùñýE9…ªMÚ°Ý…èÆš^—¸óËÊzŠ køÀJë %_z)á`÷i»„át÷÷‡¯Âk-œ[Zε|ÕD±G¤={)¡æOo£´ˆè)Jªd¹ÇvàDät½ÂLÃöÆà9öz4Ï7"¢¬Vþ´×µ 6S§ÆÐÿQÕ-˜Ý›o Z-JRW÷!ÚN¿q}(M>O -Ù.A±ûƒÌW]iîí#ƒ¾Ga-›Û£l ^˜úu¾ë¾Tú“}]q×! ŠKª¹ç`ÝeÓš“à^÷Àfy*w­ÆµÜ2á7¾ËÚ{΃ÐÂ~¤K(0öpb±ò}6ø¬Ôi 5—×Ïêoqu¯éÇ11}>ÇÀh§7ÃèÇM_¾ÞŠ/Cóî“áC·Úç¼1Ûþš‹Ô‡‡‚)l4ÿ§áÑu~†«§ï÷j—£*ûÑ…¢jzV%£ÈÃóij ®¡ª ì^Y¼ïèWí0!ÜU«ŸMìÚvúæðúC#yp™n¡_ÛÙÀF"ö¶t+ÛÈÉÒM®¯E5ûY…IÌHò5§§þ¤›‹Ö-“cœò<ÔÿDÎë9Î/4*–‹}t<Âa´’­¹]v°' '\[*³άZ£Uê¡>æ -72ºa•ð.+ªr¦S·JµyÔ"UïpŠ*æ1 B£ºñÁõ×cß«RÙ&›U÷;š¾£KÇÞŠ¿¬s(®L<ýøf9 Ä{ G1œÕ¢LIVßFŸúvªÇ ¡ "³u1ýúõAÔóˆ»þ$OÜ“š'j’ÍÉB-Ý~›Á¯Í_ -'ð áC×j ± $Èok@ÔŒÅ%²·$ÇJQé`t„Q P¦>J=Â]R0=ïÖóm˃°×!LMiÕf°Òç÷¼\k;·Ã«fr+ònç¬zgè^0®‚yÑ„ëXŽWTÑ@-3í¢·^°¤·©'ÃGLçԟƺÑÉ¥îTáVxuJ&&|j%Ž¤’sÓi‘¨TyK5.“ èÉ¿wBzà‘ßCa0RÆñ£F× */PSò6‡èK±ÿãï -5©æîàkt‰?ÿMˆþ]‡„P:Ì9ªÎƒ3øáÏ ø^¼¨—&ò­Š-[eëj’ÕqfÉu/¾¦º+^ê$› -¨7Óü?±ˆ—H¨Žš´xA·\æ©Ði­Ôhm“Ñíx´ÚÖBƒ$Ž´U›ëà;Úìôê -ºÊB¯¶ë²Ð¼T¬4×{8ºô«èê^«i‹íëT ]YÚd¦–d.Inª¢Z¹+°úÌ?ÛxBÖļGÑ@6Ô9LfÕþ ïˆ1úJl2P% €A/vl© -‡—Y~vWd JÀÑñµÃ2$~Â&µ»*0$úõ¢{éÑÚàOðÔ3àVN- úƒ…ŠÖ¸5ëôý¡èÃŽ L´–þÒù":5§AŒÚôòiXUzÛÞóÛ×È É%ÃJ¸g?˜ô[´hèþÿ·¦Û÷js wK#téxºs¥ B?jߺ¢ï*î…4‡žßΫ‚¯*s³Z,U°‰E¥GhdÝyŽk¯Ä„€ûZ)´ŠGù™–ã)­ÐIæ1¶n‡ ÕZvR§Ñ/y€ RLQöT³u±¤ì›!X ìoM›¸-&ô§ÌîëX EJ Ïšw±\3ú†|‹[¡é«´ùZû&‰ÍB—R9ÅèWÈeß®#þý—írÉ’ãF‚à~NÑh½Ä7óÌV÷_ËÌ£(‘”6M¢*Dxøç †OvVR<|â8çåßzscÇì…ìýÞ—a©EÃYöî’¶—G£Ϩ«ðkGó2³lLŒô4ë[›@3±lƳHGsáHn¦œ¥¦Š1Œò4Ͳ‡¡•8ù†´.¡³fKöú¸Ë#]'õ7\èhÉ9úIèñVBåq€¦–S‘ÆMYz¥ï—–|[óv"Ÿ9ËÖÐ¥<›Y‡ÓrB– "6C!GRÕ)ZOòÈÎü®Ñ—)øWjÈÁƒE/t©©r7§›=’8Ë÷ññwWÍù`S¯W -ÙY.TR*yN¥§£Æ©³r$ûxsB°X1]”çjnsÃ/A$UŸn±}ß–.~äu~G>0…Rê!ö Å€Ör'šÅ7ÉÖ1ñfçÓ¢{=—äá>`ÜU²£q™¢º›¯,?5ÑJu­·ª$Ó€Ok<õ,¥EágÖ|U#‡òg^ÎÀ•:ö‡J_Žµ4­'Ën2Û|@i5ã },%}X`ƒq•ü¶ŽÎ4[àw&̧Œ´Òô¡‚׋:_Å BÚ„‹§=B2Æc~ÅÑÖ´X5h)ö~Oªû1^t»Å1ÅpTî5)­åòR–áGëúÞ0Ü%ÖH§0v¹Å8tèu'?Qi*‚´TëLÃ*ní: -øŠ"•ÿîiºãy¢‹gúì+ TªXËnáñkO|ÌøäK˜M«_9 ã6(„L0a½ŸŠKÑm»»—™Ñÿe{®œ(6ÒïïOÀ Ø+VuÔ3œç¿¼êüo³úª -†ö<Ú/õ‡ìñ ÝÜgð­7Í®Mž£Ì×F8†»L=ï×PB]œ¸ë[”AàAž=6÷—+» Où9Ò”HÅà§àüTaƒ§âÝolý’»(Ù;nZ:Ó—eZÛ›²M¹·±ãƒEÐ2éNz’׌ôD̘Ü8AV@r›ºb’G/í+¶XÇûj¤›«C–„—´_íc_‡0V\—í¶–»öÆ#D_‚÷ˆ5§z¿n/³ÍU!Ï+ž¥·¾Btëô±t+ÉéÆDÊæ·Z†ÆO„¢À’Á¯&ª}–øZ9ëÛw•†¼ÂáÑp‡ûz'î‹áV±-솘ª&5ðÖrÚ›Ÿ¶(Cˆ­tl÷@Û hœ?¨ûü¯„pÝÿÿ ²]b~©9ºòŽmý„X ÙYïV5ÙTgäÂû Y ¶Õóh&û¾qv‰æ-çS”œ£)i\eÇ/öQ|G»c$tÇyTgzw§Ù”uÓià -}lèH¿:ÜB(î*òÓÆ#’eØǸ³<ÈES­} -üM‰dÕþ‰ŽR¤6O,µálä8…ñ!„ü,ÌúÒ| - ¹lY©çì£ïã£r7'Ï4ÅúºzáHÇË·8vEj-¿Q'Xc*ã7¶eµ“cí]¾V·˜×Õ<)’üÈ.âˆGg$fV$U0|¬Ì®¸%(A™ù•Ay)lׇoWó¦†{G°îîe;SÀzjz(ø('@M‡ûÀͱ¨ÃX@1—õT[-ÎÎj£•”u6»%ñS$úzž´ë‚gªû@=y§b§xœ=ó2]¥¹TÞ¹‘©ôI83þÏÿè^àÙO¾éPæN³£ÙŽ˜ª*>9•#él‡Ù¿ýÑú¤¬‘ŸõáÍüFtÔ³Q¸úžäÆJôRÌWn@S—\STyC?ʽV|’x„ìv¢‡¼†Ñ8½†ÎîsŸ -±‡ûÊÊGùaNWd¥XûÔ"³|aP‹ßèÒt–½éãLfÈ'£>1Tü™OrcFfìâéÈ:ë½?œ^_ïó~ÝnË'’«%›£}^° å­µò¦‡©áÈÉyàé*/p­êÎ÷­¿¹Æµv«[n-aö{ª»+ë5FÜi9Äð €²JdïÛÓt; .Ø–3r¿Sø¡8ú NBa•M"YöÏq ëÿ “·M®—O˜5äÃKG¹®¦|8,øv{‹tÍ7/í·$F×å†É­ÃÌ«‡KkJ’=ýz(Q½",›Ü:¨³ÅÓÏÉ9rj‰lÑi£ÃB)Ùb•V÷ä<3oÃÞÚ‚%‘ZV\ÒÂ3ã¤Y/ŸÖ¡·|ýb6›>ïféå¥õp]>=†]»mšÔ{Ì«¿å\@•oLEﯚ6Ž.oôpKmýøJv¨ŠK°Í_׉ -/Sªx|«[‹ÑvÞù×ö°ìj¶™fÍ£-8mjgÅ%Œª:hpÈ[«¬)’ÜîI/BDN` ÷ÇószËó˜G$ÌY¤Ôa©xû[‡f×hÁ/-<å® u'UÄ)7[ˆú Xíëï¿ŒÛïâwµ:±å1Rý#Ü&HIo ýš‘XÑ'’ù{Rw¼Hœ7¿æBν½=Yö³ÊPö'nà,ÁžPbKØ™N¾[7Ma‘-„°,ÞȬ DDÊ ÎJR d((kiÐk -óÏVö­ñn¢T‹Í(vuÏX5 Ce—øAþ»âU¼rLX -;s>͔п+Þh}¨C6¬t'Ó=2uº?§èm9¥Ÿþ¿¬Â‰U°-ùqm'¼Ï(mUÛ T®–ø‰NÇe2ë §»nS½?^زµX~z½Ñë§4ûþÄ i¨Gë3¿7Ÿ`Ÿz½Q‡ƒÀl`—Q¼™³ŠŽ3n8nÄ"%ñõ¬O¬åŒoñÞŠ`¢é‰Q;¡ãx‡2'ïÃ6©ê”itç¡zF”"j|Õüe´˜»ˆ -ÊâæoL¸6G—ÀV5O›L§ïû©~Ùüç)cKôC#¦m¼Î×^u`;âðE)ea¼"¤•S{°Ãªzúá™EÕꊳ¹j­j§NÌ´Õg OªÉ^à1bP±âv#c탰ÜI´¡ˆa´`®~ÈIY~þû/ñ0ñæ -,uŸ?ò‰ç‡'9øÎ7ÿ5W•¡{’—órœÖÇÑ6ÑÔƒª3MIƒŸ·!âfNÃÿS¢@—"ÕŠ -H8ÐŽ‹ö”§V&)] Ï>\ã -Š7ëÌ(2mOÕadá@w\ùÊ|^sâË—“rs]ä -}SägÖsšÌ…½åÈ ÷ ÇÞ¨_òêˆÄG³ßD¸,öø5¾õÄ·þþVýyé}ïú§¦Yÿ™±=É1c‚—Od—†æeF>°]Žmù./À@ÉÑß-½t“V>¬’Ü_d—KvÜJ CçYEo }Š¬ÿz2}û?\PNlg⤺ÕR‰E*ÝzÑrç5ˆ+ ¤Š¬ãž"¡‡7õ)+]£`!ÂlÝ™ö! R\È[ß‚ÜB7 õ·/G7õacÑ躶^« -ÍóIäzXÔÚ°ªcÕ{è·7ÖÑh™«Ø¶y4gK"­‚m.àGÝ©SÌ|ªDº˜OÊÕSÊfó€Ê·\TEÐ@<8ytü½Ú%*䤘UR™-ʧé)½k6èN¸•F³nR¾Í ¶h-Àx3Ac“ø .qIZp™­’Æ)Ç@dáÝ{š´/í4Ý«ÚNóq"Ç’ 0·Ù=YÃ/†šž3&OÖNe¾uj¿Lùß¿?:u_'PQù˜ÇúªíÉz-_U+µõ8õ]›% Ö8©Zƒaêöñè ²d{Qà‘ ?ÔedÐ&ퟖüFHJ“™ä¬;’ÕðsN˜£ø›F -W…|±ÚÊ}ü5«² ¢«r>cDû‰YzC¨W’ha^EÕ9jÛ8Ox8!9ÕòMЊ/T|Ý)áJÂÞþT\§I¥b}E@rÖž -AÉ,”öäX„—ˆ÷Â#³Ó÷ÒhÝ„®' •RCù¤zDƒaI“Ñï_¤3 ¸gÒêdî/¢TgkÆ»zbâV 9ïöš[Ýi€P•…<:¼êìSË«B3}zöâÙ¾mÓ.µJÁ Nöµƒ-t³é²y~8‹àtÆ×|€i‚˜?uÑ@¦)Ô’& Û ±æt+ùŒëeROvñ„8@”Ê(~vDô*A D‰øÌ`ëÁÁ§É:X‚6Øö‡cD¸É¥N0(¢¥—*¼ ¸FOœ%÷[×vÚ¼Åa¹Å#-»+Èiл#AW»¾õ.ËŠBCc•¢&¼¨‚%ZBhðѨooÐþ÷ËÝÅ„ËXªêžL“q}õÞ•Z–^ÀËpŒ‡ºòx"h_×â÷~ÇËmrx€È<#É&ú.ýÆЯë1¨4<©Ãþ Qÿà`iS_;jÄûs ÓYëÃÃA‰©¼}€Ôèn‡Aߪ;¦ä½~&ò>púv®ÚŸ³«ë|ÙZÖ»mãY ÚÖ¶i°Ùgt4E¦…ö›Pw;Ø šeË +°æ&V˜ÁÌ7w î5*¬’\¿™©üïßÍ/D HGô¤ÁüŸÍ|K5…OJ-¶i»¦±ãÆ*Â82`­‡ŽJw„VgsïMÌÌ6ì£Õ§dËQ­¢Þ½ƒLâÔEœ ÓÁq‚1sLùÊvê¢t¤ƒaþ¾mjc:Ê<ŽŒêà+¹ËbVÚ2‰©g”Á¥€ù"»a·ˆ#|@S8vh£žœmê÷ìNL>Ñ"¯O4ÜDÄc bÆÑS ä7 êü”÷kˆÿœ6êeÕ–ç«-§¥ ·Ko; tjÈyL§vÛD¨ßÒ=þbU:¡Þæ/Õ)Ý»ÈeA†¦^@öçÜ¡.1 £ Gõ©wx…Ó“¾lä÷î:(¥"Í®(61|ŠÄ¢ˆØç) 6Ê,ÜwÔã¬âD…‹ˆÚä¸ýii é$uj6Ù£fÊAO‡P§;>‘Çe9a·èNÍâóFøœi`! x¡¸C3›~8f—e(aeêÜôÉÛŽtþ§¼Ì²c¹aºoÀïh 5ì)ûÿ.Xvºý’œä§mõP¥IàÂ@ÂØ·m©ãÉ, -eFç‘U€w}[~¶ïÎfÜ¥%€£ÑP/µ¢ãoE–(sŽYA¤r Î3"‰eœ:8Òï$š¥¡ÝÏ/Êq5.u‹Ý¼#ÍD¦jÙq}CIùùýþº‚µª%ý—ƒÓˆ%×Èe’©PÕ°]ŽëÁh–+Nç˜,{v ?°ÙF[ RÇy˜)qL5¶,ˆ÷-oî××SuÚ®òú5¹ò ì^£‚·®Ï |Þ³A§ÊÌ`} -îdb‡ëöõëþÃ_u±á~‡Ïa^\èVûêéÔD§+ø¸Õ_½p2PÉõdØcú= ic_q¸R~¼¾þTÛô¶n¾urMÌû’±é@T&“'‚ìA¬ÐüØ%zwÓ‘zU"Ò|€©Û¸v¯*!\ÃM¸­ã&a–£mzˆ|kM/WÜçâ#üõäróZVµ£ŠÜ–Ãåiè©,ïJšÁ–šä>/¼´Þš6é¸ydZQ=ÄÝ`h¼z´ ŸÂ«&.ÑmvjéTðM=ŒºSwqu> ”ڕТ†¾ÛÕ˜Û×9E¥ÈφxP¯»u #´ò”­c]Ð9Ñ¥#ët)ït!KtG¹¦S‚VœÖRsNƒÄGæÃég:¹ zw-c;íúæªè±Û E™CkEl:£ìä&²CYBG‹“4’Uù„ͧt¦Ù0pø[âžƼҔIs–Ã9L>m+"#îo_°Ë¿ÃáCÝ—›¹Ø6¿ØÃO[õÿågç˜Uç¹5r¥5Ù f SIðßÿá’bL)ÿqÊ/ìVÔÞY÷$—XÚб*<¨>‘q8GJ $ɧ`¡ÁŠYÅáX3ä™<–*Èrôø¬¤éLì2Åßxr¸¥è‰e| -¢z ázÌÁ-Í«È¿d  ‘”ŠêÔ °MUUw%ÁD’èAÑÕ(²ÖkÚg/ãšL+f”j wàþñj~ée±—–â¦ít{²ã¢×*W2!Y:XŒ® †Ð<¶YZ!Û¹¦b¶‹"Rèl?ÑJø¼ü䟑êdSè²3_ùG[…YuÄü6yOg-Ü@s˜½}Àr0P€åR¡p[¤—9+;ìQoð1?|µ¥C¨€Žàñ8’ ö3 Ë’©úæ°±‡š];Êc¯“ Ó»à¹ëpÔ0dLíØc<{½tÞH˜Ö,‰ƒuð.¼ÀØÑu«o8ùtÌBDß°¥¼¾þ<îä™tu=½šlúöžc-Ö¤mK ºÙ* -:êÙ;ÛahjïìŸÃØ…}NãZ*Ý +Û-_Wl\&³ëKð ø<± f8Ðfü‚TX»¸Vöò!­AåϤÕÓë¾ý mâFc¬:þè Îa (‘J¹ fCMÍþÅxàa馥}1É´Òœ§ßV9;Ý ìLç‘× -€¼„.[ÿÿ¨€àE¶F‰6Žã­¡›&ûûv>³ž¡Û9MÝl]ÄÄäJ°jkwLœÌ(ع[³öO|Ù×6Úa€Ë}Ô+HužW¸Uö#ò»<ãB‹³è(Wø0Lc;¿(pBWc:’ñg"¦Mmž–NmÛÙmREÅàεBÇšži@õ½Žw.VGeÐjm¯î‰Òô©ZK<9Òé±×I„æ3¹…ÊnÊ,mE‹uÙÖQ' `ašÕMxØ|×Î$LÈÀS»AœÎþ^3Bþ÷‡Vþ/õo‡EiAzÚ·ºŸP©œ -§ž=A]ÏË„v -Ì1ªúæÇÒtÊó [Š9hVTÌ¡ªQI`Óú¼'±E“3+ öDóÜv"½½fÅBüi±®ÃØVD]Ô¹ò–ÜRéçs™–äýø–ÚêÞÞÕßxicz‘Sô=Á%ÚY‡Ûí˜h}ª {YÊÔ Žw7åy •V£';κ¶~»ûËV««ÃÔ¦,˜åef‚Ñ;¾B¾_Ô'´Ý;ñäÝs½Ó;ƒ­®ó q04@xÕ^Ò¿‘l„Ãm§WÉüÇÒÎtnŒì‡`t˜Úá;}MkÔP­UÑEú1Ó)K¿Ð‡T•®Sw®õÜ ‡ÅiÉ,éA5!÷#tÆ'É€ËÎîÛÐåÒ½æh%ÌwC†Ã$¶,(YÚ—$wª¶[1H±jt66Yµ;ò¯i¦9¬Œh‚Tê ¸k˜FVcœ&œ={ZcaÆ¢Äþ\i.‹.d±ú{Á’Ìñ×ëo„qp2ý †þ=Pj–BR$&CÞÐn15±í Š§åoáÒvfEzúj´LK÷5 ùëcÕH ]ºÚ¡ƒÓî,»‰£âc¾å7nj]観‡rÁšñŒÈ*§<¨%y© cÝÖœT¾^ü dá„zÁãqYh¢¦ý&¢"šî¼n`¢‡ÈzW;3áž[vbw×n±S•mšF(ù$?Q†TRî-ªæúË ©0Ãj»³û)‘ù( ±4­‚ìâ"é$`-€íý4Ãý…à–àgÿþ¶ŽÎM1Jÿ–??€ÄL賬Ð:ds€­¸Œ×Ÿ“¡(cXÐÒÊ…G_Ìß‹V‡T·8%Õ)£L‚Ïë2h> /Øå&‘*óõ«[.Bg>u}ºÉB£å ŒFD±]{> qªÒÚ›d1¾öõØpVƒ?Wé°&7#˜b—‡`Æo -¸!˜‰ñnFe¼0;’6=hAM¥¡QgHÈII¿L"«šŽ(uÓ&ú8Ñ”©†‹G×eÖV4ÿµò#¯Ó±W¥é¨ÙÁˆ‰aÁ´Ð\S¾¡“êÌâÉ‘à¬úWbÚ»úª¯ -1µªŽ³Òõ!ĸ@Ü(»W:òæ› 6Àw¬hr¡éL¡«[’pl¯›™^ƒ|`æ|L¾††C6k6cZ#Ò\Gz‡©éË Èµã½Y;Ž]o\^eÔ¦Çñ2—A~¸”žÄH´Ä¯Lå¾ztºëÐáÍ,{æêúC/“ º¥çËÚt}]UÞÏËe0ÞHÏIg.{z™P+,¡h­².l£fÇ ½ñØݽÖKv¨ƒ=ÅíLÿ.ÚÕÝ碓KõŽtÇ¥Šº jÇÒ7ã2 `Ã(ÃéõY³ð ¡Ã#Z…z˜}k}-¹R![”sJóÉÿM¨WÍéYê²á}P ;uÎäk–v¸5.F\ÎyNaahc:îŒ}1¥*Úª¾j. lO!Ãä}Ūòj^…O½¤#`ºÈ?¾ëçúðü [µ¢ÎhNXu{Šƒ„ìêw×$ -®Ò»f…>ʸ"‰_‰ãiÕ?ƒAFlÍ}Iùf)"ó6Ó‘qÐ’€ÉÀ"áLì 06Ð|ÚÒe´Õ°‘+Ãù^®kåË¿W¾ù'ßå’I®ÃÐù[…7àw$JÔgO½ÿqã‚áªLÛ§'¶Ã!Q$pFoaÒ`Åëa}6C¹Êw¬% ªÔ¶/,¶ ˜3äêèüÔÍji“ÔMGFçk>¶úñ9,®]Òæq¨S³ÆÙ }s¨ŽœÎÆR]ÃT$gÍû5tE¼sáÍZáÌ‚‡©P¯âœi¥dÚÁײéµz0¢y¾.§4ÁK7ÒT­f¥=5Ô]úì Ø•CCˆ³::êxÌ'G2 ù‡Ö}¯®iùLðaÃ|Nѯ¤ãBvéjMúÞ^‚iªÙÈŽÝC¸ðýmTåZÕô ¥×µ†"qƒ(A˜ë–*?UC4 ¶[lsî(%fEºŽÊè¿~âpºX?F¬Lœ9"âÚëlOKÿ ïMô± ‹Ö.¢ç™îÔ^»'âÄ:ŸQ¾,>ô€M„¼õŸ,¨ðXS„/°'rJTÖ%”Äs㨀I¡ÆHáà$£¾ÑK÷*üùö6M¢ÚóÓØsìóþÂx„Ÿn±GŒ^OôÒð€ãëÞ®¯cÖ$y‡‚Ô•Ï¢½§Uåâ½Óêݺ‰Z_÷sûl¨ú½V< YݸûûÁÖ¢þüü®LcÔ<®«~Ëû¢ÿ°§Þ¯g¾î.‚czzeÅ/Ë <{R‘ö*rÖ™s|c<4uŽ±|GéDÓŠù…ör*«Oç×P‡Ô¹áü%ZÌuÖ«ÒÊQ¨†¤|µó”7&˜«kw“I/Ð×-R*,R~¦a‚•½‘°ÉdÁдշŠÍZëý''¸/S†Û¼p•1•((EÅ¡wyºœiR*v ­}XV¦C–Ýâ¹s¨šå£4ñ>fň¡Ó¸F0^~W±Y¨y7D”|Í |m¿Ù0™­œûÐiOe­*©0QôÌ‚d5›"%L7,w_ 0ùo]’Rõ:£s-åc‡?ïX Œ2Þ¨Ôª¥#ƒY±B#®%ð•2¶Ô±Ë»e“oJE™ÊË¥â?¤*¤SöBf_&…6\´•šáú]"z@îºë15¾_Ã’4]Å„l‹ti+“F3€0/ÒdŸyÙšem²ëÑǼ¦eR¾¿:—è8zëñ—ãµÃJ¥·‚†ýÕßtûöòV˜ ¶Uò¸z4ã"¨âuZpZÍð Ç%ÂÎ2F,˜åš8µ‚w*#ÿûó[Y×aU“xf‹WÚ·‚X€¸7E˛ݘ-©éU#•cÌ,tŸÚ½.+fö¤ -ºžlUÕÖ1~ ¦ÐWCª¡ø“«>\ãK~pfU -…—S—•Â •V.l0½ÿiã‘Ϥ•Zò&ÇÓ -:,ÇóG“åô¿Õ•C‹|¾3rÖ?ÐÐpÒÒ¾ËêÔ¹ÃðZ{E‚êÐGU§[6lf©©¥tS{&ÐÏ™*ŸJ— årY´»Ù ® ²X˜â8E73Ìô,©“ÏÇzâȉúO¶nÊ™½ž¹|ýXô^’t!hÙãõ’yIÕ·k°´íÞÆHrmN- BAlY8°}}œÇº¼úýËv™dÇ1tߧФ—Cät§¾ÿÚøˆ¤\*y#›ªÉŒøÈ,%åÙf†äŽ‚#j¨ì«¬GÔJ·¨e¹KOîÅþÑ/Ã* ãd¨9fE;ú§wØ@•õÓ½Á[JÃÇk¿e ®_~þëêáÕ–Êa‘„g\¼þx‘æ¡ù=ôÿÆÛ–æ”Pü -ZæéRÅ`§ê1¤ïru½g&\y¢áKoŒdÅ—2X× ön¶¤òÒVe,`t2lâêÖŽ¬mG\´xý¶¢» -ŸÝ¼A+ -ÓåØgyR×Ñú3xaE‚ëN&2&VãѼç20Æľ·Ù¶¬ðÔÎHf¬ÕÑ…Uâ`ºá…Ó±íjÕ|€~þXh«êýy~©Ûô ×³3çuKkD%QµkÈe –ûÃï¡‚»šn/Vž;µ8"W#,Þ­Y“›dIÇ?Í¢0‘MÊÐrn3{àë…w@C'od-ìÝùˆ˜“ßE" ¢=0v¦„Údí EïÃQ¿°Šæ»ÄÈÑcÌ$·“÷¬ù®BÀÈsJ~µÁ2Ü[}ÍËÄu‡Œ4£_q%…öO:)š^}µÇÓ]FÊâ,@ºEd%ûÉÅ{ž §™=´4‡-nša‹Y¯n¸GMÖÿQ¨búÕMlTÓûá¯Ôïd[üÆåç¾ÛKÿþ|¨uʨP¢¬9`ÈׂCÍhÆÚ3ìê;Q-ºûÐjòSzwxd&ZÕû¹ Õær[<«7Fœ  IúÌ€·t_YŠê³­Ã¼BÍÃjR[¦ÛQÇCr´O¿X) -cå“Ä&GÆñÈV΋Oã› øY§¤mÌð¯$?„ ZúáîÏN«Ã³ªGuð6;‹«Qíˆl¿ÆbÚßà›áJØÝkÝ¿±€[u™W5ÓGã8ÃË@.æÛýê/•6M^a©’%¼oòIYo̲>^þÚd-ßÒïç&B^3­ÿ¬ô$ÛÞÖƒ5Úzp½5ZÉD=5jok )·qD*Ýn´, ž¬€§rŽì‰©Ë3«9ní¡‰…ÜîžR7*Ÿî¸Ï˜¸ÁÚýI3Ý+U³I\´chª§|NÂ"s®ç;I`x™«W³»eä4U¶{×áp™Å³ªËODZ¾ffåR•ûà%¦$ü°-ó¤yèèl3;:„NÖ–Ï-»Gàµ)ú:¼ø³¸Óx’†àj®½ÚkPuÒ5ï¨bX:˜šååÅVµ®<(³¡Ž<ôÛæ×ùñúómBö ÔiËØ5ãGíûž4Á›_7º±Ãø¶AôÖÊ.çĶSà5<)Q‡yŽ]m«gÝ×JBšªÝîòjÿµG5Mz¸¦Ã¤f¡&l:ohõŠËÉÐöL.ÞÀ&8=H3|T­ÄÃÈxá)Á,?‡ô¡9À|ÞªGŠ;)¤F~× -§{µ“)ÊU€á|ißM£§ò3ÖtÞƒó„x«:왃·áÀ&½˜gì¤å„VKM™[æc*µóÛÝžÑ]q‡™©Ì‘³e(›Ý«÷ª–°ea5‰ê`ëø/GjöIš/ÿº†1ô"c¨}¯cTÔöbå.Ë·1Â9‰†¶NéÅ"2¼(ÍܶÞMp˜¨´W5˜ T¿ƒb­5ƒ| g¤Ü²Ëæ˜ÚNž¯Ñ)ý[¯ˆ° Ë¿Ùp,ɾÜÉÖ;¯û´38㦠xºØÁå' øÄxæsk»|6hôJ¹«°¢‘¢»”[’ÐÌç’þ¾,¿òÛЧ* ñ莪žÖ¯W.ðë8#êÍ—ÑŽ)Çßi>9„ÛzÄ¥õo¡4Κ»n3;™I¼€:4½Ó¯ïrù÷ç; h+1%Ž[RÀɧ%k£Ùš1¼r_Æ×5L¤Õà¥AóÕ&›òauüÚ†õn¸rÅÒÆM{.Ê#2ZÎäI@“!Zz v½qËQJ4|Ù›=¬0»\&" ǜՖ_Ù†ÂXK’a4¸Ý"ÂæCúÅžÞÄË3\ëy9.]¶™T3¬­e\£¯¯ƒNÖfCt¼;ôTRElÈ>éî»â0êÉ24Å–=ž­]®ÍZÆîæZ³Ô­ºæ®ÚSpšßÉy2Ù ¯¿¥(áþU@ÎI‘»– L5w]ª²!¯H?á -êŒTåÔÅJã×°å#Ž¼¥O›ŽÆÎ$ê%µLï“EHW%ÃV¥ÑÛäîÁé—…ÿ#U>ú£êžér™,cýÑ[²ZlîúöÀ{èˆíäQs†Ó«/`zèp\]Èpƒ7uswRý?W¯!³½þ ¹„<í1û¤Ìq³rýüz"dzl‹Õ¹±Mõú‡ÚȪlê­•³4Ã[H¶™’o~€8ºgh€˳²ÓŠAÌa/þH$ËãçÍÐåap™n»l%%kiIU¶QR*‹kꕤ“«bÇX¼¿h®Ô}´Ú¨¡•‡ËÃ%«Éâ†ìaÜæÅü1Òƒ -”1<Øý½ZLŽBtÀXÍzq`tQ눕t)Øú ^u·ëŒ-…³×ANY´H1%ÛfË?l—Mz9 C÷s -_`òIÔÿyf›û¯TÇ]îl:)»¬–(xX¦i ÔÒªy\Okú©5$Íh¯§9î»ÝPÞz[ý¦ -Ûs¢x“é‰^ZÆZŒwÒÉ“¯îàΟ;ÛÐh² œåî½åwäÁÒmß=¾þjP³—¾›Ã]÷Ñom¡ -øêD¯Už7a7V`»n|]’v¨VX´Ò°ÉhJek·ÌOf§O¤#W­fJG¤tpæm0;n€’ %'Í|¡õ᎑ +Hˆ–"P'0ïòÖÈžµIÖv»†p~Q•w§‰lÑ7ê­TØŠ¢¾—iæ@ºLë£÷‚iu×µVêoù×)ß|«mmTg@©A Ä«É8@XváŒh!]µì¼çáÒM;C;¶ñ Þz"¼H“íHÀíözµl”Ø«nç8‘ÆΘµÖ¹Ol`)5š‘C£ûg;æ_éÝçä[cÁ–j9hä÷š…™áûó§+…„Ì ÊëaJËY¯.¾vG½( åªÉzžI€ v9[í=slêN6çªõE&? ºNÑ3Üi 7™´ÒS© ð´[Ï.Ò_ÄN_DN¦G¿þ;' -@/.+žyÛ°iŃtQšƒ›™ÅÏvøã…ÔËBÖGƶæŠyê‹;š™m—äŽc}øÕ{bŠq…A‰›7ÞÒ›A|nG0àð¡»g½>ÿ`cPB5’ZÓÀ=öÔ•|;ÙV©¶¸µŒ²gd•OËݬ°Î’ðu`bÔ*“]m‰Pç\$Cœ€ì0&•Äqƒ¿–n‰_9E¾³ð¢ÖhÀ!qnX…3ƒWørèú1/Þ©]oÁÇ×¼ž>§. .ê€ÝiB;ÏĶFj‹gïr)Šy¯°»3Øö"?¢ ɆŠ=Ü!íÐ.:Üc¬Œyߟ?nhlíkiò[ev7Æ¢Ÿ#užWú›˜°šg,a4:ÍÒ2¹8§ñêb²"Ô€ëwP]ÙÚ§¥VÌj Ï`·Í›€3¿{qx;Ùª‘ùˆ¢œ|+Ò›÷K‹ðÆØî¦á•Ñf"*£ŽŒ˜}’Ìä““ëb½n³‹®L¢ÎK˜üf¹ý‘nî†êØSŽ+’­ÆãøÍ5aÃòœ KI­>©G–0ÂßR.-:‘tØö’m&ÑB…‡§¢kúÛ# DÚ{¼8 -OÈü&0D·6ò˜Óæ(v?ÇGt¤§q†Á½#?ih;D96x(¸Z|4̪â½.2 ¦™ð·rªÝ±§YcÌ,¬ žÌœÇ²néº$Œ8uÆ«ebPðe(åRÒ¢)•ßé_ä`ž;k𙥼pÇ ïì—yœ'‡³7ü‰v~øxí7Á"oIiPM—¡ ¥ñ_lâOنΠø­CFUW`E6ý¥òƱz¯‰ßbâôŠrË·î^jhŠ–TK1XÇÒ‚n89yÑRIÈ"¡üZ Nú„ê6§e“#ÓP©…JO+cXý5IkŽOÝ /9]ž?·{ñÈ»I±è³Ãß -g_øÀÄà™ ºHNõ­Û£…-/ ‰]çHý>ôéû"³ísëþºó·‘QµŠÌpi‘y|­¥^·MÂS,û"Êp‘˜h¢zJ›§¾–“­ùl’ÞÃÒ˜û¾Þ4¯ÿÞ–ÑíüIµC€ÆÝݹFZï8qmlÝ6ž==§¥uzا@]›þÓÇu°q ~ÿ“Òës"alÔǤI$/'2eÞ³änUÖôÛ5ÓçŸ.RŒeÀ6Ÿû#_ë³æyðl³"«Ýš‹=¬j-r#92˜=Òä/?#³¦B=µÛjA%¦1êµ>âl‹[}IžD—G¾†)ùzÛ¹´Z6¨nwÍt1ÖA«E©:ÔóA3À+uL”ç½o‘Õ¼ýVøsdÉœ§ßcxu2S AÒ3l­ŸÎ¤2ýnyÆõ=g;©9üôüÿVEWæ=ýÝðD\šWÞ›¢¬-‰½Ú:ýÔ« õ?3ÏH·‘î‘H\²:í˜-R½}%¸â¡LŽŠ±šÀÄšãŠRI8hËþG(°=ÛÈ7,ÙÈR£§–3Wè¯ÞŸ™³ ÂÔý\ÞuTŽBH 5'õ¼%¼2%$"39(76ÕJa„´ìÒ j7L;‘@³Qm|ï-ŽÛ˜:F¥¯B'ò¡î5tª¥bÅb†n,ë«m*kçÌí„ÉSï'‰¦QmÃÀ*( hjh_˧™R1G}i §Îüì…’ž“WµŒ<+µãO¶÷Y,®Ø굚„·xbÞØŠ¾ÞùíRFª¥žÎ´¤ê$‘•Y öEuôØÇXdÛ®C5ïµög÷Ær|NxÒÊÁ}?d÷x}þ8äžZ8k„âÉ1ŽPµf³&f!Òa¼ÞŠ›ª†® Ù°¡~œÚS¨787ðå­b -l™f³SC|Waßç…Fë˺ŽNTWäÌ«hºQ6Ða•µtŽM‚Rïn®Gš0ÉH2Q/›iȲÔñµý怪ùTQUÍÊGÃÕ!´ó•`¯§›±û6¸”ÀQ» ¸'­wgOäÁž¡IWµ² ¶t-†—¿'’v-n4aGE8¦ìÕ™•V(íVG©Ó½C¡«Ú™Iås“0ígÏÖ_/ªàÿü -ÈðÝo]s]{w¹½r³ƒ†Nøº9錖MÊ& žô -—YÃ×^÷»vMP(òÊ -¯Vûšï²OÚäÐÞä—°vÿYhæ–õзmë–)ñ¡’ -6Öuƒ¦·ñ~þ8~ã껄«éô§ë‹þœ~ýèpíý¦ÄXM-l˜š7yòŒ+Y%Ÿ ²á>°ä5Wf Ê6->¢!G]›A1è6–™­šõɨø…v¨oAK`Ò{†‰c±-‘n@IŽæîÓɽÐ- ½x~CSYÑ™N2p¹ùö¤*Èb¤9Ç ·óá&ºp’9xW0"Tm×j#GA[é¶6[æ?F²)g®S6;!Ž¦·Jv÷ç•ß?êܨ̹  DBEÛ|oxæO­ÆvaWJ½5f±æ‡1‡®f)U áS¯›­wmšÂô¨3¶°Bµ`%p¼oáöÊŽŸAœ ÄCÅÃÁv{Úùˆøzÿüp©†®J0†jSûÛÁ´bË~ 3©NÒ ³Î'tÜneÅ.—µØ¼õX¹A&NÚ­½·•Ý‹ã²íÚÜêÓÒ¦-Ë©f³?ÜÀ5hF €öÑ7=8Ó¼Ã^vÚmå–…¦ÁË5ïú¼a Kµ¤›¡ÞGºášŠËVyÀª#W^²Nø<£Ù圆½û‚-/ã -á¿ÍþI_¸A†Ö¼£ÙÞø=,£#f±´åoÕ5‘Õe>@÷à]È‚uåcn3>Jk3ý‡½ošõûóGQVAÉ´®üXý_ÞF‹y>õyõ#ãYYÉ4ÕXªszëXîÉ÷&È'½# Sžâby÷´Žûþ/?éU WÓEøZi6ç¨ó*z92§®¶í*jN¶®£ß¶5e©s ªgÜèÆ0q&À&,,Bÿ?nÏa’-¹Sˆåf6œG$}–™WSÇ’¬?(Êž,¢Vºˆ÷8Ñhqxû·—Âhâç’;ÂË–±azjƒã«vèbaúÈbÓäèÛ(é¡Ý)vß$ÅÅ訳¦¿Â¾ªGœ»š×L\NQpöè®9N„l…£ni+ !ʃ™s5Ý^¯W­ï¯4¦+ŠšÀWyÇr¬›[¦M•ÀBà[±ö›1"ÅuAÈêÿšnö?áe’ɱÄнNÑ ,æ?ÏßêþkáÁ£X“Ìþ&›Ù,Vfø<øJymDó¤?¯¨C££±ÜkS¿W•×" 3æºMÄÐÍPKçnýŠ›š¤H$—€2tr½,ŠD:=>ü …7²­Ó…b›0¹;Ôi—u7j~®tûùIÖØ]ƒ0^9i”cßr¹®KT¤ñÜNSô›0ƒË§—‹æÕã¾–ý -óˆTM“½Ëž›ÅJxº‰c•\]uÁKœ¥ÉÖÜy{vêƯŸ«qìØQ KŽü&ÐÊY¶cY±VëšÚ78¸õæãÇ÷÷úq¦—+jOA_mœhŃsÉáZÝÔE‹0SÈB|hä|Ö|WoLÔ:iMÔn©Z·pò? /0¡>6< Gû9½ÚÆ MwÌÇæ÷îWñêw÷™OXù(Ç,æ¾ßë§ûC²žâú86<÷¥> ×^Õ4ËÛ‚³ß!-Œ¬"/cyâ¢Áª’\/NÆN@^=¿5³$³êÙv:‘|2)ó!ÄÅ+4¿Ìʬ+"%¨æú€{~ 5²­%ø˾ƒBÌ涂B´à¡öÄ7ç;¬ÃR¥+Üê3ƒ*Ž™@WpÜ2{ÊfõžJqÆ9©¾ˆžîCƒ¼…³_Ÿ&…«üþÉñ»'`êÓ—X7¢D -="’ü¿ÇÈ h2ɲKNR⢛P[ó]Yq/+` «‹»>¸p ø@")s¥¬{½–µ#îvPѺQåeºiŽU†8e\$ƒ°š:^Ün„a+«Wª]lYêUýä8v$aZÅ{M~rMë欸¶èžƒ~q! ­AWì´¯•4{/è÷­ð ' · -¯;±ÏSÜÏ뻶ÍŹI•‡^—&Ô Ç¯NjÛZ×R„›okŠVìê ÒŠb–^"s±dZK>¥W†{°Š×ô²{P"wˆz?p¸N@¸¢ ‡¹MÏ )í|Õa‹4ëÇ&T8÷e‡™å÷úÞ(gcŸÕª>F½S¶“cq]«Ë[¬(?×’{O?+%…¹^9gyÃ4¸õgxB"œlg@e3øìjvŽáa)Vú˜SrÂu·%^>e¯C¿“ùSȱÔªY ¤ØÒ‹‰ºoÍ•+]¥È8IGѺ‡ü§#ŽTUaæzfµ²™,ÊYÇü¼¾ÕwîàªÅ„æX{qÍé‚‘¯´¢øm\&ÃלBa‹®©‰‚š±%=©ZšÊ ¶÷“mðb’Ç)›àë²g>T²àì>:Uw[ÿ¼X춵h÷’ Û–Ùˆ!¯ãe0~^ß=®ãûVï— -Ñ?é -¼µÀ Ï£û:†¦r².Å¡‚Üe]=yè'{ºE&ºf•Jy:Ä‹å¡’JÕéŸv ‰öwM Ðóú!²à¡êô.•ð:*:•’|n«óÉÖ¶Ò;ÝL}qÐ˲^›ùš|‡èi’È”ë$V èªUÌ´õÝ𷾓luwÖÎ^·#sÑND”Ä\—_ÌÌš—£E#>(÷c ëWØðc~˜ÊØËlÊÒ|a׳jnë]eÜ´ØA±Ý_"‡ÄrÆÅkÏm3U\¡F¤@ZzÛ¯))1 ŸbúlC ¤ç“±Ô¼%'sŽ¤8êäßy6Ï1úå¬ÇŒÔÓ|«Î)èV‚ܼ19Æw¥$ü1ÔMÚëzçá½Óa^ Ú¢VÖO“7uÚjNScÌk&‹@D7{Ãqø‹g¨¼Ã’§5ÐŽ4‘¦ë2Ð"NiçFÿ…º%e»ÁZʯ:{_37<ø«»3Ƥ FÂðà“?=SÒÐXøw}I¤Q¹Çl½ Çå£W6-„§)m²ÊÓQc0˜B1 -Ug!vâ©Á±ªœÓ#=ÊÍŠ¿ƒý ét;óQ» ¨ ܳKätøHQ‰l’¼L¹`*»F*¼©ZHMï“Pmð¼~HAd^\M9½zNB£ClxF=H7EÞ¢Ôò-“Q/LóÑÔ_e•Çdz°m]}+Ñѽ”;®’˜hv¶<úYò¡çú=˜„©–Z*´% ã*Þ÷¡çžØçõÿ+!À·µŸ—ZF}ª?é)7A'(É0 Å\ã„I2Éîæ“Â]-æ'G2òÖ›nùß,¶ÿ2´j‚M5q2»3^{‘Zƒéô•â‡ÊR5Lj-Nw×H_)Õ ´žl›ð.3òŒãÜk„ ÙÖ¶š1QÖ÷%ä94]ñ€z°™s ¶\þÍÕà8†Û;N 6”M"M©4Ö- ´øz^?ÚU¸¤#·š2–§œ× -Œ…ÈôÍ•i‰ÛNÎ%ÎΖ¡‹ª|êÊ3q§’ër²µê••ûÌ™‰ˆ= ‚-Ý2µ˜ å¥jà_Vg¬›ò‚ˆ6¤´ñü{†M Û—è^w¸8ÞîFºëÎÏ–bùXZ¤~šÝÃ8švÔæ.;L–\[þãò|^iîÞcÿ8Ê0TEu[êÜúc 'Õp7¡'qQ¥Ûa¿©Ùq.iþÌùðÊ;ÔDÕ¤Tx9pg­b“$R5–´@òe°Ó8ï̪9Û3¿«`üÅø±ÂJ‰Ù½îÑ¡òyý`Y”Œœ²éèÓ^3¥c®³¡8fØIoUÛ|;>œ^e¡Þˬ«•)€@ì;ïÑaû¡}•×ápF¿6ì(Ò²˜Û#õí’û“™O.xAwˬÌ+á(1óG¢Y/8+ Å\aQíÆÈQîlPýH5ßÕöG=ØRCª×„s•¬?Lc0,+ ØÀXs!ÓJ4NR. ‰v ƒÅ)©¼O¨a„÷u´ùc‡SŸØtì+6ãiÛÓÔSqº;S©DÏ õOó¬´á¡í‰fëžwb’ç‡Ô¨räBýdèKaLý-únöJØ ¿!˜'L°Ôi¼Èž©d,X)Í 9€×ìeOx^ßûUËkë8z¼úsM©Å!¹æ'¯™qc]ÓX»³XwQ4@á*;êÊ-Ý»?¦±6 l¦Ú6 2û×bü œ´t˜Î†aåÆÁãUÔu «ÿa5Axc*öóÕþÊ ‘­ÞN÷–;ˆ¿EÎS«w;ÛÌàT7ö[Â7G§¤¦w"Ÿ,ΚP5U­ -£rsjoÅ/Rª}|‚œû­Ö9Ÿ¬Le(>I -¤§.°øcAºÎ iИz¼_ŒÎ 1"5©Õmað³Ä•w‰ë¼õ¼¾cë1êض*ÕAµ§Q¶Z]L\vv‹ÿb‘€t¯¬#ôL{'îÉÒfŒ 4TÒ[åJz&Y}ÚM5KÑAÉŠE÷«k<€%$Ë;¸@“u³Õ¹|!REFb¿5Tˆ±v¯E1Þ¦ð•ÏZl2Uè7¬·}¢¾ªSƒÅù¸&X}Ц=&êc„ˆwpLxfÄbîvï"Ô±õëÉeÜj«?§¡ÿë9°z3Q)W–p§ígu`q›®zw9‚Ò¨_×b›G÷.e^uKÐÞL?e/¤”5oBæ‘êÇé,jËñiFµÖ®žb­žH )Æ€x(õ¶i# 9±RDè Ôb´‘åClòïÛêÝ{ëøâ]p•:¦†¸¿&°¦¡-K †´éXqŒ½§¦¼"¬mÒcšs©ÚÔ\moö¨'¦ÃÍ:Ì´tø¶Š×éé܈“7% Ç’#2k4½>Ž6bÌmw óŽ1Ñ?vW•@zgwEùrå¿»+4§Dé©=%ò‚Z´–îéï{R9Šú÷²³»Ý;þ„ŸÍ¾-²V¼úJµ^ - —$ÄOŠ´ÒÏe9*øwH+I«QƒÍ¢ê&È0ÂRAf“¦Y]²yšI¼¹ÕÆá_!i-bmö]W" [Ôw8dpªÀ­DšQN¨>Ž -“0`ŸÅ°ÎsO„9àquÜ»jVôÊ•2¤ ±ïäSœK®ÈSÚÑÂ^Š®TyBª1‰îtvÖÂZ‡¸¶«+‘¾Fƒê¢jרã·`H¡PÌü"ç&üü}¸D2⻣o1³_ô£ó&7äWq?¢HKDÓ"ÅÎdÍdh¥CVe1mÞ»g8É>¯ï‘Œ84ô±ð¡;+Cív‹ð©ý»–¿Ñ –…¢À¥d‰ÆÄÂ)cœ Þ=Ú›tWæF#gtüCy¹$IŽÜ@t?§è L[|¿óhÛÐíåÏÁª"™c2iÃnfe’Àý97{¬|*D»\£åáñ¸›èH=ÿº~˜¨?Œ(ÑǸç„Ì[åÎ ´÷EPä„i8Ù°ýòL +sBÎO°“îÖqˆk)Qã´ämVäÛ¹`H[)UKä³äô6ÓW!U•ƒwä ËH/ö·Çÿ\ßÇy¶T_‚›^Æ#ýð4A°c5¯Ž¢~cçb6F"~uÇÇ•]pížžÆ4õÝ0D‘q ó -v:Í°Üå&†½‚ãNO}æµÓø¦rL¬Ó£Âφë#U,UQgF›l¼‘ÆÂXþ"$[ÜÓ~ kòºn–bŒ×—nb}ã%²ÕRŽ¥V\÷áb¨š¦N«×ö|Î HÁ¯ÝùP$Æ1Sxó&ÁÂPmÕlyð_ßqáo >©\?k&7›ëu¾e˜¦ÓV‰°¦zìÍJà÷¿äXtx3“W'i÷zoÉ8 3Ÿ~(zþ=§~#ÂÐ>%™w?tL¥Y%ÑÛ9­¨ûuy°ÜÖžp*ÑøÌÄW¹9­eV¨—./«´¡„šâ8sB ³í;Ý®¬9‘uû®¼º{2_2ìßeCÞiœáÿ÷ !Aج]ƒêêçªöÝ‹F“°h˜ŸëGæ<*•fWï.Š¼oç´†•7bë¤ÕQ“ ¢¿: 2‹K†àî ’lp]Þß»›p¹ù<ÍÔª èŽ=œDžC˹6á5ÌùŒd‘_ù¾þ’Ok!0‘ >i¶Ó¼¦mk­§vl¼OfÒ˜KÝõÉÆØfx ç iŒ—a_µÍ×3Ýà)Åɤüû¼R˜Õ¸¦±~r¿`K§ -7Eœ3ßØßm‹Iý<âxî ßwöLÕ bÄ–q¥.»DA§R²UgC¢›5–1dYÚ‚9=¨Ë_HÒ6ý—Òí4ÀñÓ4=ûyÙáÑC´cþÓ¶ë•@ßÇãIæÏ.˜Øýç%m݃ÂÆ }i/y˜”È9p,¶¼4—¤軞 íÚ`' Çå7M.ó÷õ½Úàq*ŠF\kÑ ¿—[ÜÜX`~Íãii1Íû éœ-D¤Ã.»ºwÔ¶?’Im\ºvÍ âKÄ=ßz.òŠÄð0Ìîv͆’åÀ–‹Ëí­Sš^v"|;s¤“*mÉ÷S‹aä[fµH%ˆ<ä7µ<þV‡mH¨—…‘Eâ5(žÂÚµ’žHäN²ÙtÞšþùi8cPK[éEóø“ÆÔÃW>Œ ¥9ª¹b¯ ¦¯¸Õ©£wÖÇ +I ÕeµáéŒÁc ¿Ù =ÌaË×­C›÷fRÇGª]øx†Zr 8E¬‰Çª‘d>ù^!Ï„S‡ÂÈò»Àt¶dÖþÎLyß×ôÊòºÊ‡ºˆUÑ&=µ¬}Ý|S3Q<Æc±à5¼!@@+ñÄiÉV¹#­pŸÆÜÍ”lÝjÒÃÀzÏ¡Œ=—Ó?ÊEv¹s,Çu°óÐþ9ðb殯Ž®w¦GÎ+—– ®$©QÔ‹jEe”Z=EXX'©pl)j:5:J‡5Htýïç2ˆš÷ë»Î^µ‡hÜñ3ÐH*Í'Z8 YyK…±óãæ¥A*)j/$¹›”¹µôcþü¥ š%C¨“ö,e7––*0³Àv=Vj|l&–¡&EíÍš´·C®‰°Ãì„„šè–¡4¥§ïÄupÛ>12˜ð‹Çùµöëçúa‚W…¨K_Xw°Ãa–y€ÿjÔHü<Ùtžµ¼Þ‚+Ïr‰‚ŒçÏ™U'…<ì ÞŒ$ÇaUÔ&˜Ðê½ Š­¯âoÕˆ¦ÓˆžßdÒæê%­ÂqòÌšmËÁ÷õÝ­„IînVêç^£W™gdOg q§uÅŠ¯çjóüÖFŒ¥Äòí o7⨋ÕB² •} (MHutÔ•¸RLû«CÞ €ƒÕÙOy¡Ïî×·ìÉ„ÇpCÖ7C@à0ÆTËbÓ™'²Ö+û¨,ŸæHXld|%Ð3,{'…+ƒFÏ S²;Ñ*ºØ§}v¦žË~;ä½=Ûz'Zª»æÆ-ÕÊ‘Wþê#jRèøûü÷ñ3ÿ:¨W!Hôì†3±£=*QKJeM®‰D|ð?¶e¶¬jYuOm£z4h³É’ÖÐhÂé’8«îÐCã€UPii—'W·XÛ4k÷Õ= DÅÍhW¦¦“ʪZ3ÃsÙL/jFMܯ„ª–z·'ÀGãØi®o¢Úлpw÷_}LÂÉ)ïÛ3¹·%n¥ìæNÅ@ëL¿¡#ŽÛ›Q¢¹_E­W¶EÁ"ÓŒ@®b!ö&¶ø/Ç釤S–3Q>ÏО;ìÇpõ}};B!¨]·ÿŒÒA…ó*ð×ôa\ͪ¾\³g$‹ü[›¹6¦£®>¿š§‡g`Ÿc1GJåá¤Ï Ÿ´ˆðZÿΆ˜Ÿ¹„Ùj?£SÊ6Ë‘‘¢ÿâÃ{þN̘´”’ÝCÙ¶s¬{˜Ô鞟#·1­Ê±#ù¡,‡ËÙý¥ÑÝÚ>ŽÄEÅ—³®ó¬ „¡Áúƒ<¬ìÉiYèiO»¼$ÚøösýØÈ` %«Š(2òWg:ƒ•bÈÞµùƇµêñÍW[.Ân¹9'CŒ; »ÄÇMVZ3ò©û>=à„RÀ·ÔÄ'£Käwu(¸õJýV2}9ÔÌKOuÞR©u¿8¿û®oŸÚj^9²±æ ôq>Ý4² »cÂb¥ØvÍTw|Á£f!‡Es@:UÇæð‰Æ+É%ø·{Vr¥iÏìwxŽô´Ùp,7ê÷õMúeó“þkÇÖ¢â úµðÈ9\Tj+¨¶±ŽëtjÁƒa‹Îè4gNŠ­ÊÍm¦yõÙ4zð-¦Tç¹ÔÇÝAÌH‡oH¥è /BƒºiÛÌؘ ÝV£àþ›ž*H×X -üÀiàÜ»gcyKjÝ°4Úžîß%•uëe’ãÌjU2s/'ö}¿¾;¥à:輶¸!–¸Ù'C†pRO¤Š7ð›8G5ÛaËdýÁ¨²YP ϧd-I„ºrÙ“ÆS–Ö·ûIkÀµÂ)ZÝØA•†…~áu™6u:æÕ’½D6{ú×øu¿þï—¤ÔíæmÙÃuÆgG+5ø «ž! -?ˆÐXæ™áì5‡ý>EapÝ3^f@™¶pQpa¦0¾–œjd -¾k¯âLUN‘¦/;jTÏQ>Ù¦󄌛±RäïË3$ÌÄå–#¾[¶Èº– ’T 8}²Ó&m -gBçèjL`Ó»j¸±lm:}hüä–òRLù—ýÓµiŽïw™”Òm¸\˜ÑǺŠNCݶ7E ?קxœüzÏÃØÓ)zÑôç·«~´jÖðÎs(¨D5]žëCyLZáJPò„ ^ Iў鯸r_ˆ\ŸœG_’ÊœÅrF¹yÀß9Ç[ÿ®Lã5fCú"ëÜ.u,*8Z!jÑ`ÔÁX×Ùù¸zR­U‚Š…g‰ü¼ˆgVÔÍN©CkEÿ]qŠ¸ lêÏõo xù@0riï.µw«Tb©RH¶]]´&E߶ÓiËõˆ™OÛµâw/íx/Pu˜žcž{Lž7ÔµÉ^‡su‡›f<Nól[—¡ž „Ë1óÆ ¿¹/>¸(.PEå§| -q2Ò©™ë”U6FÚÅîx¥UК÷&Ê·°©ç'eSÓ‚ëëìœÛë@Þ¯®¬ C·"çÖ×Og—¬¹j"³L~Ñ[®ÅB1‚ªË7 SÀ¥ÈÜ‹Ç×VÝšöýlŠXPÑôÙ‰=Þº"yËä ­bÈ® Á¶mýg•¡Û–}×d"V#j*ÑòîS1ιI}6›,âZv/ÈÆå~£]£ž½TVΑ‡[¶ngþe‹]ù«÷J&ë:5?;%¢ßú~nÕ'´F[ÊÚÈ)û™—G…*‹wìw¹Ò˜©ÏÉ€ª\–‡6õ”/‡¼³¶ða~þ40¦v˜ ø†BEVBYq0niŽ&§7xt¬ýÄp÷"Ý£™l¡]§p·EÁÑëú,ÐÊÖ¶vZú<E!…’•ÖUOó¥ü¥§)“HmA'Kל4ÔaøůnÜ.¾'á-?€D½Ã¸Y¥7®Ø•ÃâWÓÄ7˼®¡Ä!ÿÄ'ˆÖ{m¶üéÎ?¿R8vDÏt×5×fgaëT €Ò­rD70Õ”¨Â8£%£C6ÇSŠ2mB‚mùÙ}//¥;甉™ÅØ8Bg†3¼F'茪W“$+d I!­ ×QØ¡”˜­“»&g"Gü¾ç,8&+&:!"X@eœõ%WÌ8.¿òÊ^c›Ä{“Î))òiÿð§”}¾aéŒ×ŒJ‡ä!'ÛöïUƒÔ’ñå¿^ª^¯>Ú2SøÙ¾·gl€Ö’Ç’ ·{¥ðyYC¥8½¨H"NÇd)ô~|pU½*²¯¸yÉÿåiÍnc0mó½^"Ù¨´¿û—\qê¨LŠªL³Ë64ˆ¹\÷Zµ9,À˜úž£GÛ*ì{ðÄImæ‡ù×Í¥´=Çùôy¨\hE—á_ä’YÄq$eäõ*´…‰Mq,WŸàa:¾ 9v’b,U[îü>NRÆ×õ±â:¾ “9Zý,¿)_Sj˜äs¾)Öƒ5^^6uŠ”¦i-¡¡SújëÎǼêܴЈÄ]ßÜÄ B}_#µчš,HBf…–šr&Ê“Ÿ%ýCYFÞ-ÙëÊG%lÒÀ„jiæÐÉ]§¸2â«šèvÞE~"ö ÌÛiƒƒƒf´â^¾>Ë»UƦô‘_£µèGy¢ªÒb8ä«G•šGÊkºÊ–O.9¬©Fù¥ÙHÙ"i¼_?ü`s8ókg:Êw»ë¨v\T‚ǧ4èf/kqaLêw6s‡Mêõî²fwqƒË=ò-j9Û6ô çgG¬Mi*æ}ò`.F@z‘:ötðÆZs«)ŒÙÑí;âX€iÂÛ¨ßùB±â*÷¸0ÏÞ¸cȤš3kµû rî "©wèRú×õ [îäi{þÒz¿©Û)wu˜3Ë«¥ŸŽ`œ‹mö”ÐS=âDq$<•sbEÈK•Tí5ºì{MñÉÒMÿv0ùJ¬«W±šÆve±xÓ8ï’ð¿1õrÒ>=7û±·ãHo‘‡Lð9±É}¶Ô—EIHö‰Â¤]Šr.鮤‰Üúû·ëS–ûo’*Ïã.rQûUè1KÚ`ÈaT ZÒ/+g3–ÒúÉeœÎÛ×êE‚[ &'œY[N¯ó†ÜÃÔͺ̡̬r½ÖÎ.¸õ’öõº>v×3¶ttdOÂÏ;4È:ιpóSã£ÂoWhhë•ýLéVÒœº5Ö·;š<¯Õº&8Dgƒÿ1dm³¦y -~HÔê'åÜæYëºPès@2t2\iÙ‡„ ·ÊŠ«¸±‘ ®Œ„‚Æ |.O52F¹ä+«LDµ¥ÿ`ãyGÊÌtùÝ g±õ^”3—‰E $PrVí›ÙßebOÄùx]ŸîÑs_™XóÅëMò¬l«ˆÒá“3ÄéoYˆ ©á õÚI!0àqùÑœ‡ý{@° x振P ÝóÏE„PÏïBdâmåÐ`qVÖdÊ¥Ô蔼ð6M9¡Ìé%³ïT‹hȲ†²^¼ãÖ a¢d]çG¦µ<z7#mjÈÜoKN/»÷¯#tú­©ÿ²2j…ËòPoÂ0/XyÀAèÚ«~a1þLlÝØÛ¼ûŽoB…×Iž%/ÍðùêT^y€¥m4}欭eÛ¤­cWtʵ„Ž6e™$ɺ?¤Üvu׃”…[ä_6»;»=§ž²EÅge1~FëÿOÇdôËïÌ#Ú3?O›¼5L3®ch];>§Œv¨Îµ:H†äã‚7ë×ÈiQÊ™©¥%œ°­WÝ–;ƒõ¥¯–5É jQwÅKäE¢ÒT–[£Z´þLø2ËûV¥F«ä²wÊuën²½ŸëÓÛƒ¥®*YoÞÞ¬¬š¦*ìúUê?{¥–ÝÕ¥J­My6—6‹ -vÛe„W†5öqjÅ%RËUd¯ZÎ+à¬~d‚‘¦@G‹R–^ ðzt0#À°-c÷2 Xï×g‡ä\ö•;IŒÍ†Üë†8#oëi’¶[îY´ÑÕ¦GLL@‹ì-cJýîµXÔ÷Qi3[&Óë©ôº/Þ³’UT¾kôþ.4×:ni WêJyüþwOWJhNÊLTiD¹7WŠÕËé*VèaÊ’µÕdN@â¥Dãƒ-!µ<ÿ‹áúŽ ±[å:˜BIêAÆýEÆýã ÞbÏ-žÑ.oç€c+ÚQv“äŒE‹sÉæ¨6S–Rò k1dMœ%¯Ô–yE-zuª!ž’P¾`©ÈæLJ㉥T§o‘Ê]‡`ýŸ=ŽHÇOù,ûmí¸'0£ÁVä…9l’¯¦J*vP,Œ3‚Q³ILԽѴA7D@ú!tBËþùÿ¥¼L®äF’ ªÊ(À~᱇F}Fû1ûŽdHö¡/`%‘ ³oi|Aö‘ú¤uý•“%Ku;yY@UmlµCÈèn‘æ×È©šÒn5Ñ“ºFOvÝd¥$Wvjm>¯œëÑ2°ÔOîTZàEÆY™Ÿë{®Âó­Ÿ•‹C4~wY؈mªz°þšye^ì#õšúÕý·•XßnêAt5>Ymë¿ØeóÀMP_gͱ­¬ÔLUßÇjoà²Òyóú¡åñI,Mw™d¡¯6õŸûõ­á0èhSåK ÿï6ig¦•MZ‚&³%MC-Í <‡µr'i¥°­ äEÔå§]Iveælh‹U¢"…½j3PŠ¶œ•PÀuì™/i_‰讫g6+sÂÒù÷Ç¡‹ë*zõIô”–¡¦ž6^â™Sî^tÀOèàúÙBO”{êW&. fŸÉi“â4ÜÍÀVËåDõ9z³ÑxxÛõF~îƒxìÆ@ÞQ£q¨˜XÃw$ëÙŽˆáÚÜŤ'åT†ì—üoñ$p蜖ô˜ ±õ,K$Þ'1¶–»Í²Žu-à ߤ¯'æ® DÓÅŒ"Ð} /ðI2>=íÎ悬7e}•P¸d÷Ò6I½äFu<¹9pûe0aš¯‚Mû^ô–«.ÁÐË:V²—‰áéàŸ”÷¹¾ÃüÐÖѤ<Çsì±ç`y‚DE Ú¼ÿ,l8'J$ .ö\¯§n0ÄeÛ¼Ž]Ã’\‹Ù¤ñÿQS<7*>‹ÏeqäçT0²“zÇWEEjŠ_F€8Ç(ñ´È{ïæåº9d²Â?1ƒ0{C{–â£`¡Jõ&@fýñ1åÕ‹y‡õ1=]îc°ry*8€d½ØB…ZHke¦6ñ´K‚ØÏõÝĶԪåàÛm†Š®à5ò†ÝÐEfÚ9ÔÉEðèÒÒÄ"ɹöž™…`Zyëäùm'KìꈻNâÿÛ§ÅÕü”äÌÒ/ay«¥X¹ÛYµ~λ+üJŸÓˆ¤±,=Ú‚Ž¼“Í|M„j»“ÖVe-˜¸ö¢;Ì÷çúfcbЯ ²fíŽç\y2=Ð?g?\gá-éLÞ†¸†Û¡¿5•vbB¨ê½“ËmÏÐ&=Vmœ"È:9—øXr±ldLù2dÈ3 -wæÉå!‹IãÙ ,kŸç±ˆBMì/·²¢Ws”bí®ªí•˜”S°áz×ÿ%%•–ˆ(%vm›–«<­Và–Ѫó&I«Œü¿zÉ]ev„%cê|-Óûs}û‘ˆB %ìjÃ>í1;e0×gð;a7T1K¾š¸Ô¦ÞÞŒôÛcåô£Ž6»…Í3ˆL¨XzeÂ×7· -/Fñº¯Ñ¯µâžÓ“6KæŠK·{n1)J$êï;u?‹°1å ÷){|áÖ ]¥ZSÚMÿlÀHô‘ÅοV¤oN(¼ËSÔ- <ô¨åø%“µfŽ]Sû=>Òeg^<úOæýÕ6¥Gš¥–¸&ãKÃfOia®êþ´P8)Ä2…Nˆã[†$&ÝÎ8æuªážT×6¥ïz“ù}?ÖwèëdÕáê•¢·ç‚tøzké%¥C’Æ…_ªpŸ°Þì)ž'‘È™ˆAÆF¿{Ýýw‡ó+¿Ëƒµ‡«õ‹å2’XêžÓ>ýáwKîú¾°;½U§ùË%1¼Ct1¡Ç¢k½©•>ìåj—™£=´Ðκ5ƒW -Ὠ‹<ºF#:|‰¾‚ªtJ’'Föc—ù˜/œç.1«¤üšL[’ÞÆEdrŒ$½¬?‹8]€•AH ÇmõÚÏKVÞ=hD€ŠÎEÏfB¬~7>@",D"üG3Äv·î×{ÖÒy[â”Ù­-áïìÑ)Á&\4-áæ×4ßÓã}iu 7V/OÒ_Ûž¿0 ‰5ÙÒò‡°,°(ÃÙlô½½ Ñ ògì¯ÖJš¬?R¹ƒ"ã/OÜÔu±F½$j¸ÖˆóyyÓgîÂo”áœ× 1?ׯAÓ Ô°Öz^ño ì ¬w²fÚò¸Íw‡}ÚzE+ûÜøyö«‹:¨f¤øØŽí“ÊàšrÆ èÎ<—ÏzN1‰0¾N´ðïOõsžêg¶¾DÎßæi7?v9#´–zíº×$ÈÚ‘9ßÉm¨øüHߪ:J‘ºÕkj ÒU©,Ôê·—L‡;·êS’üè¨Hgíhç™ù*ûÝËÈï݃Ý?‰ÓÝ_oy™-£^Ò ~}Ñß3’& -hp“ÆÈWË_jÁž¬\—¾.ÐŒ1+…>¤/¦‚ÁSU0¸ÐAvž «•£6Üçé‡1#r”~‘©üö£µ°>×w‹<#A¥$Hívчá°³†sákm'tu­ç7²îemT"ú2½?cÑåB%UÁžM‘’F³Vµ]Cûy@6ÓBºû? í7+Ñ©ä‰ô@ˆìqtA‡³í­š˜j\±F7Ã8ß–…nE[›ÝQ7çfypûƒÚ -ô4HÞ-µ\^¸ª4"ãÎ&î“àìcŸ´p½½S•z$9®Hƒ§ïàÐПëëiºDr/Æ`ƺ-iŸXó¸lÚíˆzù4ɵB"«dxt`Ó—åÈúž+ë4Yrl I «oŸ¿]ã±P€©ºJ‘'+}h¶ËÑó‡kÍ{”úçòbüH@J«¨_çŸÛä«™>˾YÏ£ -­þfQ[£w ¿&’ã¯êt‹ œÇôÝ©²LíØ“SzÆÌ @hsA':1¶ß;žtD‘k€þ`ÔÃ@^Ô%7ÙOZ7θFR?~Ìæ‚„ZX¥XߧfZ¦Ð‘vGÓ²ýgŒ£ƒ«™Ä -·ºnÜT<J[žWÏ‘¾½^+a| ·ÝÛ†â$ìÔœoÛ¿@”ÿS^&YŽó8¾Š/Põ8‚äzÑ÷_w|¹Ò’_ÿÃFN¥i‰Ó¢ü‚n•ўƹsEKvXãM“¼n˜9f¾ÄgËA° ÜåIBO.lVḴRtÞ"Ù3ÇEŒ‚LÎ-‘˜-Ç$QyŒso­§ÐCø4•°y2ºU­¸þÏãõÁʆ ì4!ÌþÊýt'oïîÅÜ̬]ùØKø\y]Ýg±»ŽFî=È­uÇDÐâÎ(ƒÎu{ŽîyC¨M¨°ŠV”+Þb@Ú]1ë -+ÄÛ< \ï°x2aõ»/=ýrLiÐÁø±>Ynv2I2dt³ä"Ž$•qMrGM6ÍŠÊÔÎâ -W7Y…êöï=µf ºÝºôoØfÉ[†ÌðYĆOKD=¶g¦íÁ ;2íΆ™ýr9¶e ]lz‹iÛÔ×/4ýé‘uã•ÒdøšõAel²zh¢M<*Å=™ã1?NqÚL÷™¾G)¶+æï¶ ¥m]·“páb™jv|W+éɧ ›¡Ú×\æ[£ sX»Çƒ)®J?=^,²•à«ýj6îê‚màcûçÀG&Ž {#.ÔìiÊ ­nFpTaþ!ø•5m«»Yâ¿ü}’&Ö }ÝÞg´ñú¼~Ùf ÊT×Ää~hA7ª΂ÃK5+DµQGà%ó8%;Õ`¯f¸=]íy/å´ëçúe—ÍmçÕÕxd¸;©Ê+ƒØ(™5òn–•‡:ù6ÓîN¡n©ô¶#:È;©–$óÃÊcâ-í÷Ò3<~ÿù‡€;<°@¶ˆÑîwuße¼qNSgÆÏf«á¶—ÓètÿäÆO¦AÍÐ-{xö®OÌÕAKUÌé6>Ý,‡†ÓI–#ÿnSvr!OµÄ—HÃSØM+ÎKÑ ™ž‘˜ë#^.¼+X j>ÊÎ’¿eFš’¬½šï.ª8Pb®_ò0®§cêš -½nÑ¢g;u&UŽYÐ|ƒÿ¿SR›C“³^yä{j°ýùsýr—‡aŸ/…”äË‘í û?)˜ -ý}^Ó3,yÜ™°9h¿!^96¬¤I¿ª´ìNK°ÊŒÈàPï’úwq´ÃùBL‹ -]|ð°ÚºNÆSKÏÔ»Ù'uAV/†©£#VKëe=f3^Ÿ×ÇFvkÈà”ÈœLùCÎÄõ*A76”µ—qòŸaSŽT¥9‰ *˄߶à´ós}2Þ!g4ï@ñe|î`¬ñfk]“J6³&ê¬éwÞJS —jé^MFâÈcáÆG™“j³8a¨Ý‘TðØ´ÉèçúååÄý+4²pî» Og¾™:º‡Ê4}TMûȈ Ì’†û¾‰rÆ+µ9¹éiWgwlÝ[_C!JEµ“ø6ZjX禚úùЗZsñÿK8ð—$H¡U8Üíó¨³ût›'{¤l VFôtgåÒtÊkÜ]åß(Ë) ­Ú·ÚæóŽÝö¸¬â0„2¬°Å;ïPðîqÈt›8jLk[9­£ânZÒ_µJõyþ l–™6­úÈEêmÇ@çT¤Ïi†U=·?æ¾ 3Ó„Ú°g˜^«¿}1ÚWîhÛà³1g·f¯ê¸V"K‘>ÃT‚â±êÁëõõy}jÐØô·:•¨™w—– ­i›ŒÆéŽÌ7%Cé°ùÁÝ?^9m:É|DëòNÃ&X ”4Ügbô´¦Ë ¸ÃãPçf]5S°c…[Ò”a³AM¡Ôx¯´.©æ`!ƒ|äÌ•ò û÷Ÿë—“@›Õ y$µ¥|:6UžÐãXj³ÕmÚ¬5á¨ÜpîçwQå[a¾k×Ý‹m5Ög~Õý›‰ìÛ*ô«†¹A¿=îž¡iÒ·%c£Ìî_ÿXö„ݦ°ábxg¸CÝöž¨adÃß«B:&O?¶Èk•èW[YÃÏÖ0ŸëwKÞýFûÙ2–Ú´'U/`±ë$"–9/8oÿÇ1’ÑëbúI[[®Ø« äY!‹¸Z´#1¸?k²ÃÉ0J¿zóö‹#†›ºëlzüJç\‹Ý©&›2†3Ís'FZ?¼DxÒâ¡qÃÆóçú 5aR°1Ô<2ćÔ7Z†©2B¥4oÃ%Ù>ÅŠfB(°¦³eˆíÌPh{eeµöù–Žs` f8iå×"V™ÝÝãN4j%“!GÑÇ^DÂ`/¯m!Å…îƒ_›ŽDðíöà©w,§K÷67¢€|F“„ª®q<Ò8Ý˹2¥eZ_ÁìÑ{ ’€ZgÁ#ˆ%ÛëÅ„ZÛE¤md¸È€%]j3-Ø"{ápyć{ßÎi¯ÏëSŠ­6@;Ü´)“љȼè4wqz§e’´dÃe\hèÁ*Ò2Ðô“åÙ§Ý1V㊠®I ø{·\JY*G÷⥺v°Št.íQ8ïäjÕ[[¯3S’þŠŸß”Sw°¦ Öî7l|u?=b¦ºe>Ð:¬;Ò”ˆ@ W¸®å…ïGÂÀv1BMlê£k³ùáH”t?¸0»õ–ÒH–QÅEÉÉÈ]xÐʘ³=.&qíÞ¸– -:©¢» -Uþp›áõó -ͦ.„ž'Säö\=IÞ€ûsý"yFJ}Vl…®ï1 /^ÓƒV{ ¥Ç]-Z¾Iï¤sÓ]Ú¢ud1g˜¯N¶\/ºB½T¨êhpÀŸt›h¢tWÍ” ;ôa¨Œª!Fššê¶íA[F!­E)Žß6ýmåÝÅw)Åc•ùÕa‘TÏ©«’U€)u·ì1a†–Þ×ÚEK7ž±ª;¿„I¬â;}»m)LÒ#öU¬óÏõ ™¸>jLÂÁè#öUGÚíòïfÆ!ÍeE–†ËðVûuöërÍ!­ØÎ_fÁœcTNM!û‰–2Sæ›t £‹ôØŽafKÜì>t¯v¤«l‹òŠvy;€R!IÏë®Û·c;óLÛ…p™Ì&Õ62ðúp‹Ov\¨¯™E8—ÛV¢éÍ«gB”ÐÀj@€Ù¹¸5ÿ»cSûCi5ºYå¼qç¥yŠäŸH¨º”ë÷Xôë©04ù(øN!\T½ÛÒ­2sçÉàºþ~ùNçÎuZ®×1.{©Zp¦b¸ƒsÔÞgK&Ôza‰Ñ iJb(œkšii·¬À‚þj*”1mÑ*\z4À'yV­4 eÝáŒj1y;°» ?×î”—E µ˜3S=Ò0çuŸ½Úp7ѱxõØã vÔ½Ýmk*žÃzP Fl˜g){ÁÁÁ(Mk\Âî¬ã§MÇ=u7Òl ¾Ýž/_tÇÄåxz0®@^ØÚil—IZuÇ…ÀÚ·Þù?ÊË%¹‘ ¢û>….Pf ð;Ïl{=fsûñç‘j‘R™ÍÔ†R’É$ˆpŽ<,SÃGÝüx}ý^¾)É…N´¸Þ†~¶a5éÄÕFË9…<v:Ié„÷uø¸ê®Î$¥A– †ÅáÅ™pƒ[̲MÐV0ô˜÷]Ûk'K¨l?r«b¯ÛšŽ“_¯ß·%ÕŽ‰«sæÛ¾Zi™3G+) ÿ+/ÞRQ-Åb¦£”4 -@ uæ£vž GëR[7àcœR"ÓPÙ†ÎÛzƒG®Å.Aqµñõˆ¡¯Š›rûàˆv¯‡j4úzý¾û%Qe)ßhâûo»gþ´ÇUýª -ÇiÛÃM„ï@]zF~?œ ›FF[] ƒå¸„€&;!OP[Öɺ £þê…Q|†lÜÏ*ÒXsúÆØË.~š“eøFëww&ySïûëõÇÙë›ÈuNvÛ_ÓîÃÍyc•J«o³•DôG±S¶®ùúÛº»L¡š"N=™ÔŒå+¡hÓcKÎíÔzÒ…\Ïšn€üÌv üÅÙ¯·]Ž`îYi­s4»Z[Õ 6Òô¨é»Ã+š6Û&L²[0ßÇæÑù¢tZ©ðO½ÛÞé<—WÖÜwŒ¢—,6]“Öñ7畵c'*[f§R·RR³pž2òð¡„ ö[Ò­úÏë÷57õx¨÷¤ØXùzmö¼>íá o_¸ïòš˜š,¬uZz´ ò,. - l¾T×^)êëã:‚ìëo1jÚ³•æz8¤j]3äø¹2Ë(ÙÂUþ`‘Òë›Ò·Á÷ ½þ`8†Fº’$Îqök-Àª}Ô1ÍMHèÂQ œˆRg†‘€~ÿ¨0áÁLÊ7Qù|›½°äökw·îv¤ðÚfñ^رõÛ&F Ûh»S UÏ^‡vܽÓðûzXáçw±p¤t~$½~¯˜|/®»/ùHíòÚñTÒSFHä®'·ºÿ6†sÚcòØââ68>¦o\Æyý]öu(ENö|ÙÔ)û”°RrbòFÍ!â?2\¤Òho·qTªú·\6Ú.œê:Ó#Zf••ŒšÖÀºÎã=Ty_-pM= êaöú¡û@ÏvúÃfj–æIãJ’JW!¶víl8óÍM/»z¦Ö‹'û™TB˜ ÐÂç^œ|xW§H‡–Æèv;[/ KËQ×@^€ ƒ[a}b£ºOwl¾F uAñuƒf£ƒUTe¤H …B¡c^ =ÄYû±¦Kw-2]`j~ %—Vº¡»vÆGiƽýé–í9ÀU{ú—HE׎}„IÌOÕ^íOÒ‰A¢Ø»rã©Sõ$êí -±÷É-ýùpÖ\2nº—zÕ3—ù°¹rHªB5N·Z‚Рª„§÷$!!‚·`nÍnx…[h]—¼¥>o½âyýzý“©õB0&·[HN×|xr¤Í´‡KÈì”D{ùÜ8š/r–ëf/Ph}F94¦'0)ƒêÒ›J›C¤깕L´€`|]s~Eçs+ŒÓ ¿!…ýEÔZ~ôì·°u8Aî³ -\ý±Æ„ -®Šs*ïôÅd/÷<Ï^”šuc%PÏ6H옟7â›qQ£÷TÐ,ˆ³>¿V¼M¿ ÜiK@ÿUD ²[$u/ƒ5˜Î4©eýß}{¸ëÎ>Å¢ÓëáL4ÓZ•¦Œ¸÷³L']‡õƒTeýÄ É|Ó ÛKhÿ -¼à«ÖëM[¹Æh¸dW{î$6Cܘ*¥¯Ž ·Ùžt=¶¥®çϲs{îº6À]»cmE›Ý×ÐDY»º}ÔPžf7¶¿h~TÈ«mEͤuo0i‰IÁÐÑëok'Óy6„ʸ˲µZzãeº<ò-¹¶jG—ÊÎÄ޻Ų«ó¦•øÞ}»"¦“Á(Kœž™}l_i­Ð›ÝMg c¬¹oy>^_’iÜgC§o¬×ú,«¬\0ËdG|&@ήæ[³P€Oµd”V7¿ud5P@ž‹>ŒÚÉðQv¸BÍD­T׸5}GõP‹”(7u–úg{C¶)Yû”ÅU\»pð-ÝÖž…á8Å=Ý/€×jÁ–ÎÛêHæ¢òÔ{ 𧶺«{Qo¥µ¥`Í¡ÅÿÿMì1AqdkºëÀö=¬ØDN<:”Ø°Ö¢Ù\7‘AV£´iÙ,µOG¯uˆ/ÓI‹j 7çèh$ñEíy 6$36†èõ8ÐÚ%Šy W¯3¬MÓùVÓ¼k6x—ÓçSU|m¸P¸Iî¥è÷Òi ñ´S[´÷ö槿xçô¾†æ8šj¨ì/:Œk[¿%àwç“*A(›È s{Äò…¢QõvüzLB¢àïI9ûmóÕߺ6ÍŠ4²x~†H‹4BoOëžÌüp8cŠÕÌJYk<îÞ¸­U1Grèc8Û°WB©ub›<wÂ’‰ ¶°)F¹›ûy {ÓË7šï¥sÔûD)°´bät¶ñH\«8e'êJn¯‰î×ÞÑ8³Z=XËv`?­–`iÈfM€ú †‚©ýÕgfƒ{ë¾áCë…±ö 2L_+xôζùPmýÿƒm%Ø’š“ä™ûzš·#Ö+ëŒà9O Ó­mÔÿ…9xtýè½Úû•œU‚µ‘W÷. ¬ñó9¨GÎòe|U·äñwn¨Û‡¦D1Á4›‰n?·³J,%Ža¾yþdÌzô¬£ïµ0÷W™Ûkõhš××ïò55¥¨„:GcKŽy©m:„P-Û‹àö£eÿúKƒ›L®‡_9Ö1ÁþT„ÁдU£•ÀÇ]*gèóuóÜU.X%Ù•rºSê·ƒ4žþtëÓ]ÑQx^sêoÒ?™ã7£ŸlR  â¼™ãºÆ&müÛä+Ù¾¯Z ™n´©Óô^ë»áš|hs¤:* …ßtG }š´’ù´3!ié^YÞ÷ô’H,@©µ’Ëd~RiNd¿ÃÀ1|½~ßðàcñ{ƒHÇ©Æ_Õø{òÃÅâ3HjXù4l_þ¢ÿÿ®Æ¾ßLƒ&ÿ¦öBèZÊêýLüpõŸK½Æs<AX·¬W³QŒ¬x‘!£\£Ûu¡ ›‹ê¥†N¢pxôä ŽRž[¢îéiY/ãÓV'Ù—k®÷~»eöÿ¼~ßó’cpO:aÅ2Ô¡M¶s'v¤[?Ò}Ûx †Së¾`|›”Á°IO’}œy§tVýîŽwè Ç×ëŸã1/6š¢â~¬î:j7Ë;cBJnÛ)ÂQüöŸ× -Ðúý“ž¶¥ŽæzâÇ>/nÐM\Í1 Åq€]99>FòŒ°\,Á}píjªçâ{w–è:–ÝkЃ·™‡òP³èEÁŸvÙ÷£T 5narø²³.ï4 Uìë¨sFEÆåü¦ yÍ¡IìN~w;Tz¯ý—ò2IŽcG‚èþŸB(€LçéM/tÿuûóÈIñ[›i“4°r"<|hUkúóìƒ%)Ó,E“#ÚÂÀÑ͆AË»œ»pc9ÁuœŽJ£³‹O‰Xv¡‰|åà²8Ö¨eo”ܲ°Cö¶IipªX¹m)TY·¼æëÊ=‚³ð^$»d§ð2ÌñC_ŸšBsë9œ~Êé„}qhææv;µÒæµ‘Ç ™&—¶¦™}Qé!zù¹[Øëv9º³+Fá`;#u">¨9IÞpNc¹ƒËñå5p4¼üîŸÃ3º{{¯Sík ñ{ä·ëŸñêºÿ–¿RK‚@©=¯Ç_bUýzáPÊag®ã/µ§—ýW §dx䬪^ZVÐYÚt†QÅ•À´—ð«Õ4+L/Ñ ÂO½(1ê×þ6”IðôBz”­‘³•tŽc#E„[ëåÝ`Ýu+¿âw«Œ@ºˆz'Þƒ¡°_ˆÝ{‰´_3AjÇšÖÍKÖ ‡36@ÈÄ S¾C;çÔŸàÇ£'´Â8 ïaVfzŽˆž€%Ÿ¦¢‰Ž-ÌÃç4ry¯'AWl°åQ÷ôè¿ga¿íÏЇOi-À¢,°M”CÕMS3E>§ŒŸîQ.7ÝЫ¹IÛ¿×XÂn“ -´|1óçMñbg§hˆtÚsË‚^Øá, ÷ö˜Ý¯ÔG¾b}­/Ye 8£r Õ¦ãºúôpR¬Ï:~Vä‰{ê×ÍøO¨ø}ôG™Õ/ØÙëB÷ó…fÞõ 0Ìå¨:Œ^ŸÂʦS–ë ÛæxxKý1Ú|Q꽜ø­„§Y…!{üÒÔK÷dx}àùG•N«[]ð–P—iš¾ŸI‚{Û„˜œéõv¸+9Üö;\Ï5… -¨tx}¤lê¤$ä˜H'zBÿ9«h^•ÙÒ<×:å{’B‹–¬W=··óœ‹< E³l¬ª©Z/fÿú‰TidùÅÌz`ÒBÁÒ„Ð…òåw„ ¶Tì§ÉK”.ÂUT}³Ö§¸]ÜŠ¶½TωƆ6½¾4ó0™¼îV|`\B4+áí+ -¢ñÈâÄ~ò»ž8U§UÆOÜ…FuþsÕoê0›ü^æ§t€*1VbBòTV„8’ =Ó3éÑ&|¹!¢uÖ÷x‘‰µ%0<§t‰驧 "”WDÙ ‘>Ú"åÜ—»Ãð:(¥QHSõ€ñünAàö"ÄÜÓõijCÚ£}ëæîý¼1Z1ÿCß8µƒ] ñ±múÇUct­O@öˆW¯Ài÷ÑcÚŽU°“&¢(x ÛÎû‹Öœµž«N÷§E[‰D -©M8S~ÊúîIgÙ «ããýy¹&• -Š<®N)ÆÛ£JS“~žâ:zëàRw„ù®çV새v—±"M‡R쨪xv]—†5š ÊzM9£àÿT‰ØûÖ]Ê~Q‡…'ª üŒóÔ‚ºêì ™L Ǫ<¹ìÒNT…¡Ì^žZè[œñk¨lÓ#UWuâÁº‚É?Û1uúÄËêe³a®Àê{m¤!3CÛésIÉš4 yë>89-ôø.oÁÒÂb¯Úh|+q›´©5Øt ¼Ä¥¯z»{_Ï7˜ÉUêÁÌ®Ïòq¦u-ÃÕW×EáÃì¨Å4y¢sV|ÌùÏRÚne,—dMÆ–Ž!¾T%DÄ×O%ô-ëuŒ€9Ò­™ÉÐl)·X–Q0F]|œ¦Ð\þw%ß]ûíÝvºœ/Ëò:öÕ„PÕð(«mkÙ×ØʬdÑÛ=üÛU„ÄIƒåÕmrq~_é>g®@4¿ç…»ôà\ ãÊÄ¥Zõ©Ì¸EíºßÇÊØmt¢ÛØC€*îÂJ¸¤šéÀÓ£¼„ trSoê¨ÜrùʯJÜÆÀñ4H¯lÌ07KlF³7Ú§; ¢Rè4‰t{¬ÈãVX{Pt7·Èò¤±<Æ$¬/Eš.R]…ã¿À"ÈX¬SM9]g|ëªÁ´ˆ?¹!© Õœ¥‹Fóeõ PSÉ,ó¯üJ&÷€ßÛRýs~ò]zï°‚4¾Y 3ÙÅb!ãr¬àútQñ…á\_TÓ“À•Aº³K§ÑÚ›æ¼fëÜØÒ6Ïã}÷´…µˆ# Ý̦ïÜáI¶ïC購˯o²61¿’/ÿ¸~Cö9Ã^„VÙ:Ïç¦õÞžÃ)λìRbØò=I<ËlM[)Atù™%¾ŸB ÷57) /`x*ÛpVáÕ³ì,†B‹Fëã l$†=h;5O>m8¤X}¦‰×†eº%Ñ3´·£«µiŠ„ ~!akÖLóÄCJûšP ¾‚icoH¬+üÿu_á!ÃŽ³çozxåÃoUñéŠò©%6F2Ì·íèõel¤åÜFgíHlîO¥qv „ÃÊìUc?yçêN»Êl @oÿóÏßb´ì›ëú]‹–Š!R= ôJ‘Ýš2§O;Ïiiºm—§°‡WmÂÎX­ÆàÕßyB¦‡°GE˜;0½¿ÂI±?)AÎbAf쟙û‹9ù9á#£¹ Ç iñ-v€uX»7Ë&ÖU[ÿW§ô\¿oBÈHTE›Ø{|©p+ÄŽÿ‘].IvãFkµ×AâG`=š:¨Eä¾Vßêþ;¨ i¿Ô›ÔK$è®ï;î¬iÒOÏÍÁ®W…] q8w«Ãqîë£È|kð)¢?1OwaöùŒ«æ Y•Îqç]‘Ýd)q(cÖ¹ âihÙµÛs$Å»‘¾Ißß:‡šhô¬/t«Y×›5åRœ‘t⮕½Ïª„î9´Ì-yÄuF©Ïã°«L‹ØU9Œ‰ãsntÌ!$:¼x'&/ÿ0ó{>sF"ºR×k”¹€ÞNPjJüi×x2ͲõPà{ø¹Ù’ÚÆÈÎÁÊN¡oÀ’^™3òÄS¨C0pH’ÃÀÇ2¥ø<“këþZwbçJƒ~Óbz2=Ä+9ìé5µ Ì‘ËýtˆF{Àr -ª»)÷®í -pp a¹Y=*-‡–Ò§îžz*¿kg6vî(Xj´XJUH8Wƒïõd¾°¨Ïs›â¿Z °^‰èë¬íÓáÃÞ(k5?‘õWçxVî7ƒw·ýR(°½nÌè&›³nOWAƒ í"'›4Õ1òöÛŸ ƒ;fÇz‘.1UsP %¶ÊãrUÑÊÊ“Ÿïû»h‡µa pͧˆa È!€"ÁÙÏç-‚¦ò€QÎ-=›M7×5-°F0%½ðh§\Íç'Î °ÇûáÆú¼{â˜._É•§¶„0ò|»ßCÛk#5$”z%ædÑ @Í+cLº^ï{OI.óÀ… ýKlMêÿÈFN¡k~i¥sæ t²&Jµ+:)D¦©*ûfá%MÏò)‚w:Û…òTdQgIl¨›ÏÔ°d³º±¢£Üìñ\LìçÓjZ«¿S„âoƒÊ=‚!UåÇ©c%pò±÷Gˆ&I ;à‚ÜÇù Æ´*þvÞÕ0ÜÑ©Öœxg’ aX`Lº àäÁëk*+-Mc…„ì_s&çÀæAÔo¾#SÃ>Ï#~a±‰Ní·uHÚ£ ŸS_P¸§yÃ¥`¾×«1\ 6UÆdî#8ZÔ±ßI~ȉ!Uƒ´á_C÷@Ù½×ÎDJ{ùÀ±M<µ 󯣺xP/Ž`Õ™ß}hÊÈ[Çñ­yêLÆÕŒfÞ¼Ï7ÌzÌå€À1Óç–ÇË]±˜ çè…äb¥¨¹´mŸ]íc¹ÊË£h7äű ¬ieHæ¥÷-ä Þ-Ñ_ ¤-rŸå_–¦ý Eò”ëÌzî`¯‚RFWx‹áÉ4‰ÆÃEÖk:œ9lgÎlx6þå´ŠòôcJZW ƒÖíä$SæGe|4œ£¶ÄsœRtbsø]&þ”š3žÌ0A®@‹Ñ4]±‰Ðn$Å|¯N…º™Ü: -TêD]«2a™â8ƒa†K&$HÚ¶~ïŒ5• oLæwÞýlŸ½u[Þ{5SÁüs|¸’™|xG°ÐÇ`ºâ4ãºû,F¶+¨ü„ÔN‚Ì:gž¤¶FàÚ—úéHÅG^ûÃ\æM¬žãàþ¸<8Dü·BfæPÐc(þ®g¦¥l÷ &r8öz»öŽ`zÄÝ z´ãqÕÇù×[ÎÖO¢[Ê1¢ß9õW—Iæ.€^£-k·N/~;*ØÌf'ñPŽâìWU:hf¤OdÏ`Êtƒ vq±GŽî©\D:l=Vw—¸X!´Š‚û`Ù¨ÆS’µºCáª2ôÍŠH¥ìxL¸ÙÅ–IB×Q0àÒÕõ#Qri®‰w;JmÃ)N9Ž.ŸV}LZ+Kigî’Ãm»¶z»‡ÓÄÌÇF¯Pã…?šw5 ë}zœóOrƨأ>¨ò¯RNjQ¦Yb¤4sßtÎ2R£°3Êýœ¨Ï‰VÍ |*9l¥LcS„VTò>¾T•é £$èº;P¦Ði áVÆžû ·]GgAªÙgtŠëž4â :9í‘ŽÀCY™à”þóǾ’B®\܆‡¸%`Ä3—ÞûŸ?ÎUÑÉ4¾ŽùDJóø̽Φó;“fÒÅÏ××~þû¨²±ÆÛ™‹Ð£`°ßQˆ8Õ"ÓœpÅSÁrT†Ã.#.¦~Ù-t^€7OVO%ÆF;¬ò¼úßTh€^26y€õz_ìLÒf¾õ\Èpd`M»åD%±©Ûï?oQKùZÜÅ&©Ä¶§_IFîlF3šL»½àðÑíɵÉ_fI_…—G;6ͼ ëŸ*Î~'jNÄÂþôWB{Ì‚¬‘©íMw¶{¦ëjã_Ëä#€Œy7´†xãq'’òóÇÝ’}žè‘1 ñÓóå±2E¿Ÿø,{’¨np‚\ôܸ–“éŒ\Ý‚”ŸD‰ŒBæØÄ”´×æll£ÐHݲaKpµ7ŽSüþ‡¢ÿ·CÊ7Ü¿¯©Võ¿'ãd“–?HBÈôøA¥ e®|4s -r¦Fj9bŽ­œäƒ¹óìiû;p®50x±ì'µ=½Õí}I–w—)ß!¡±Z°Mep8ìàΣTþ”½_z£BΧ9•'Sɤ‚ƒGï%øüíbý¬F¼_ÿÒÐ’µ'Á;©9Ë^ç²rÍ­5A!á@!wÐÉ{¥ßmúÊZ«ùÃÓ°†©𣽵ð±H¿ZæŸy‚¼;™šð1_ÑÖsÖ-äŸxÛÊ+¹+6v©ëF#„œÍ•äç>S”¦§ˆc™-ÜÂò9ÆQv(D—Ù¿¿4ÃÇfO=Š=,“½QÅH°F’CI&Œ+Q_ ´öÉSíTZÆMóʽä­g$ÂÐøäµ½Y.Hˆ7¿ººÏ÷ò¿•EO/‚{-Ú¢–fŠp½ìl—Ir[9D÷:/@ -3Σ­£7}ÿˆÎ—Ú¤Ý Ë‚ø‰Teå0õ[ó€ê„%ˆ ·EÏÃÙQ¬ó/kÄ™Yj -*/nÏùD ^?–?1fº7žPêý1¡‡É¶öª‚õxÌ›§Z3ÅäÔØRGÿ˜¾@¸íÓÌΪÍU¤˜ÍÌâ[ªÚÝÁ̗ĸ¤P-l:\Áa7HiîîA”Í€dÕ Ù…£ cNa†ôæÔba)àˆ9õXµ+ -ÎåbÂrÓÓ¡’v}ø^ÌmÙÞ¦;~ÿ“îà.­–ºŠ+v¼ÑðŠµÑ"óK¯&&½Yœ«ËÓZL#ËNaX7§å2ýŠ>N -Ox|ÂÍÒï´ç\y¹›fÔûà€öXé÷¶3–PI£móœ>RŸZ¶°=ˆO468\el‹Êl·žÏ -5œÇëçßþCr¿"ºŠ]+ª]Øwïs;.àéÛ´VM̲ä ÀbÚ;jÖ°ÁÓU^fÀ·M Ń.o Š.öÂ9O£’ˆ$xÂÝ@FÉnýïŸDý¸ÿ⯞‹2ôg%9÷I‡B±W—©œ€GOŠ ãgšÏúbï¨ÐhË 6•„ U$2V.›o^<…ãÚu¸­˜ƒý$#„ꜰ§¨¶àÝ‹:Úk•p3ÏÖ“2RM\µçë“Å«s¢h®¥¯jÍžâøû©Nü¥h ÀàÌJV­ ð¿òî&ü?ÄÍ(ØΫ4Řa ]¶ò„ 2¶é“õJóÐ[Ë‹òÏsXÍÐýR1ŒØ½×9€óVüîVÏ ‚ Ù°XW·S ìKL4ìŠsèÇ1ºÿš?U”¿¦Aó -DTŽîýÞ̸¾`ÖmÝ »Íß©_²Tn:îb0·Å­{JGöÒT5®'Ñ/wãúÃ$Ù-F&l©n:]yRÇ ëó^gSˆß?ÿ¸—‚EÔ½b@½çMTà;„~åXiv³ð5ѱ¾×¹Cà*H` ÇÃÁD}Prh>¯ ¶XŽœ1·¶˜îš€Ôrya(áضcZ;¶W×ümÍþùa{£ ÒâL`FÁ Òm¶¼½™Ê]¾c¨F$˜Úi4ÈS:ÂL ý¯yÖrmk*³ˆ¯žè•ŸâQ\Ê=Ç\/óU¦_ ¥ ‹íó>4¤Ë® +zج÷\J+Ýïø"­F„£§^'Ì8 \K° bT0ü[wÐõõsç -—‡ºŸ4BªNÁï_L\|ÊWûJÓ§¹jIloáh:ÙÕw¨G(¸ÌÉüéØô‚+öDÉê¸ãÂvéþ©È0 …ú-7^Ý°YR]'uöíqXºò°5Ì<5ÅΑ…óÄ‚üÖÏ7¡ÔÖ娷LÂ)õ݇XÁ1Ä”ÿ8•å[¡†ÀÑÔåvûÄ×s²´Ò„×IJ¯¢‚ÅNà硆nÝæ<.ÅÔÖ8ŽFj:18¸6G2í{<:¼ûd\%µh\jºÉE“™R·p°iGOÎU·`ŽD`(kÄ-³ñÑÕÛáíéDÜš!`ÆÖ:àÒ+,¦“õå7 Jp<¯UG~Κ’€{^;ÙMÐFÕÔ4åÆ\9=´ª1 ! -fIZV-þYG„AQ†´ÛuD;Úˆ3른ºÊLŽä/8)׸æ§XGŠ N)·v -ýõhŽ³æ÷sœ"´z0ÝÉ&Õ@GhD!Î|Z±s¸+Æ‘ÔÒÙǧW“O­ZÐ  Äq?­MƒÄªó„t "&AçšPÑ|˜`û­:—ÆØ>Ð(YÛÄÓ ‚¨óBêð9¬k½jXh0 u[2XÖD@$/Y^DïÈàk‚èÍ1¬ÙKÀÛ_ðÎjHyMÕ}Pîžh\™¼*²Ú„¨Tåµ’;é¨>e÷ñ­3¶â‘Áû*™C›Gó’h™. ÔeˆHÏ‘›Oó³U_{¡ºÉ’Ë# ïaÅ°SGrêIvQg—7—)î¡ñrñõl¬²ÕDØ´9öhÇ%÷¾=¿pC(«)¶Ï“ál:™Õé\žD7-Ë£†y'mÒõ¯ªŠ–¼z}FæQbáSèëz·1¨xjn-9UÔ›·ï§Î‚Ê”¤ öÓÑNª®‹E“Œ¶Æ@€J30wZÊì¹àÈWT†Ç™¤åUœ-Ž™QÛ[õÎ$-ÿÏÐÂ{Ã+Š $¿ß2šŽ(Œ ‰ÕLÈ= ZíšÝ–u¸K„‚0Z}Cñ¹tG…«Î:š`fyê7 *¾l$5†™}Ü:Â푃Ær0ì9ÎãgŠ¡}e˜I§§L˜áe âÙÏÍgÛ´2gÏÁ¡ 0æFpë©»IK»©X¤õŽ§í´n‹Vá§×ÑêÈ›¹i…W é"/1I7ƒ#¡IÕˆëhC Ô–E¾Ù® -TÒÓt4H6ù@8Ó8’ßfi˜Æœe†SO ÙíW¸Òúïœ[óÉÇžwF¸¨{ék*Ðbp@}ÂjKrvÐõiéÞj³5ýöW¢ýÓrܧ±v'ÝÈ‚GÆS¸{ÙvWAC_b–4€Ç©xºU¸µÓÕ,ÃÅ£&Ó~X¬õH×´0¨øÑß׬C*àU' {³”Ðö í,½~;§0”Ÿ3Ù“û¨8&GôVíKQ§á›NA5-ÛöŠ“‰SxÕ•€´•žEßßtMm²? -àÕɆ&q$¥Ÿk<^0B¡{vFG&¤ŽbÖŽf³üýÈÂî툜D§m^¯îLzM÷UK\›–ÒŸ_søTª–fUvú?êõ¢ì"V]îô÷× -› -î¢úˆN„ròÎð"à cá%, –Ó—À€”]ÔµtGɲ7œl¥¡³M@ºDBCnl†‹ûDØ(äë^—Ù ‡}ÉBÓºpQª…z¦‰:ÝÜ×Í ”0“®ùç­Ã7L·V“KxF ™ï/¸ÛvG%ÖßœÕܘß$YÒÖÊ3GÍõxüûýχ»…ïP£P=;OÿâÎ’ªPLi!Sä˜õNŽ&Ý_ìà 6‘™à]œ=÷€Ü» ‰¾ld‰,,§ö¡d¯®ùížS]®ŽQ$2|©7C]Áåèåiß®¿ÖÌkëürõKxРöë#ëJÏuÍ09j&ÚÈó/Ñ°q˜\Íñp²‹:22ÝâHçAò²R=tô~9!™[1GÍ%{‹z{=>¬m³§fºë|ž~â;š] -ÿGy٤ǑÃ0t?§ð:ŸDQwÊý×ÁËŽídYŒ'²»«$‘d¾ÛÖ³š{Ô—M¢&í¨Á¹¦CþõŽ¾‹´j± gË]™1”’ê'huYáႲ!0äÏÊY(Ï4d€‹×ý¡×ÝÒ£­> ®¦ÚàÖ?}×*›Ö¬¦ÕétAEÁŠUè~¯XbÑ-G‘e@ ÖÇ…õˆå‰~yv®$`ïñä—ÙP[›d;í­îX”… w?¨z.zp YrÁrVš½™%é…€¤Ký°óS‘´÷ÅcWðÕwëÐiÔŠ´%=˜½×§×³5í/‡Ñ&†¾?0~Õ2 Cà]©%hy¦Ë…ëË×Kõ Ï_ãá´ -=@×iŽº¹éšJý+]3 é†:¨ïur˜«Ö*êÄò®õ °¾»&’™ê-ä­Ò9"¦5 nÔŸÆÉr£[þv|•Å[N)tµî‰ùü‰4ã7—ã=«mR常õq-Á¢(¢I›¶yG€jÖzë8Ü€¯­^‡°à—aÔª9ýDíˆÇ6²›Égu'ØD1†³3È'<¸?Y£jlnBûëü;‡ŸŽú“½,|’¿š›ž€r¼=?N_‡¶–{ÅŸô…Ím:!ßEY^FÅûs©Ût~⻤&ùO8½jf½p£OwAkB›•ô¿sL×lŽ]“DèåšÜæŒ Þê—ëÍJm'K¸ÅþÑÍ(;ºJ…« -_çùœYìËB=i`0`пzÌD(hQþJö+ëd5…x4Óªö ) È”À&Ô¨×OK¸~ÖäMµ2ùI-tð”}ñQñyóðñïûÝG×A’¡&!€Þ%úðQnkÐC)ŠgiGî¶I²"¾ºê‘îl1»ÃhئD{±É\tS’ lUíÏ!E=„u¯›'Ó̆ ë­M‚¸5nT³S¥-5JÇ3]ÃÑs¦åCmpÒz«gG9lwÀ ê"K£Ù·npÀÍʹ"õM }Ž.Œ40Lì¢yL‹G¯Í´ù~Ë—ÑŸµ -º°¿,0Á±çopÿ'B•>à©%7¬-Úu¶nóN¹òÙ¾D¼ZÃv¥ IV}±Òã_,õø¥âÓ1f„—d"í0çÜæÑz1üjø Ãv§¢Ë…â5-¥VWF›”Ùwn7’dÁÆA³¶G}¸™’í-ÇW줨W©$š ¯â¶5ý·vºuW[³ahUaÔ盘v}¬ÛWUìå[÷°¹¤áåîÁåøV ]ºð0±¡A½bùFшÅa¦Y Øó0ì[ô7uã ào0‹cj#ñ4yêæ_$MâpKl³¥Æ2QÍzˆ¯FuO„ºup\Ã0Ð#ýFªà»9·—ÈõSþà㩼YDŽâSlIÖ1ý÷#±X¦áZ>0ÓG8Ûapa`tñLðº v׳íêš ½`@¡$µ°nÜm†³é òZæ“á<ñZN0ª‹Ò:JüaIèÑb;¸ÌãX’TFvºZ@Ïu…Ûl%Âq" -za½³kÁ“ªh´mÝ›Hƒ+#䮊'…Y:á!®ñÆʹ*ñÝ‹CõSQ雌‹¥H;–q=)tʘ+cm·¸l±2߬nÜÈ.G<†f£j-˜G‡®G@áØý|, 4Òc™mtØ©“ùºcBÚ[\Õ»^—=ì(Œkãg¿™v²˜Ù¾a¦­ñÍ|¨º[tÑ^zy¤ÒE¹!çf†ëøøUXòa×Wb>æôJ,TÔ‹OFc‹aHvf/^ ´´1#po?<õpUFg[ôƒ@;Ó³H-±æ #xòÓQøKd/‰²ìŽ¦{2>NÖÐ/Æ­§I½XâD³ãŸsž¬&‰ ÄôÉuïËãeH£Pô½·…!@0cU-sÜ®×fFTS?"kE”~oš†eªÁ`nÃå0üÆC ×84¸ƒûÓåZ þC*‹Ô…*äö,§þ÷¹([¨ðŸ¾õ­$2zíˆé´qS»üfEÔ¨ñæe?ͼŒ^ªw×ößÒS«QõúUÐ6Ô•ìo€„lA_0ââ…4N#Lj¼ôi=Y‹avZe‘Ó÷²¡xÙé5?oéÔî–n][²Ò.ó §?YåhVÂ{h³Ù ‹ùxÜ°‹¤teYñqj ßÕ!YÙîº/ZK¨ªª6Ò”÷©¾×ÜuÍ7³ ÃÔÍÎúÚ‹ô"–=Õ¢ùHyÞထå‹+HÉ›kÉ¿×;dJœ—ˆy~*˜ŠåPéD5)X8õ4ÿøõ•=ÓFokuÞåút -n[êçÙÊÉ{ÃMIrH•—¥¨—Dé×S©aÔ¸ÉÖ;þƒ~ú®âú¥{Ò ÎhÈùÑ—[¸çíã.î?µmùÓ þ\–ˆ‘ïõ°Àys#ÝHD3¯±o$E.«ãi%2N?V¯.—¤è †Å‰Ñ¯ñ>EÑÀo+º<Ëuo• aÕU¯Âè´±w³†¾tÂ0Õ¿B“3!Úüødu`±ý_N¼|ÍüU7,élð=ø–©ê­Sú<íõ‹Ù©| b”!Øw—†V•!}ØÁU¶þøŒŠxlßÎl>i»_{:;ôü”þ}¯&¯#*sm>°?W,hÎ :èYþ7›£«ÉgFHùÍðµ‚Ú)€yÆ›;.FCBÎqC§£û¬”tIºÃ…¸“öh>R]…{öóÉ–çtmoýeZ…$vÍm¿zãÇÉÔ<¨‘æòí¸Ï ø÷¦‰ò\oWkö¡FÐöe’횎©Fg\“Õ9Hå‘*Š6ÝŸ‰'¯0«ãŽÊ|T†é´Ò#Õ}5_C."jK/ª(øôrpEÞ¨>¸m*̸*6zÍS¦¡»mÙ,’†-ù婱¾r¼ðÌø’/#ìI\fTy³ãÙ3i§db?¦1µëÜÒŽ6êýÇœ]H åëtôÜŸº²ÏÿèŠ5 Bé ¦$ös'n½ÙÆ“ÏÄ“ â\ªDçÑ`†è=l>ûP§F{é­RÍYùA7Ù *F®‰²«Uë›ÙÜÜÛ㯳Ã[ê_GK <œd<{œ×鈄q±ŠjžZ #I8ÓÉß×p»`kõq -›é6݉™Î>ôÏqzlj£[.›N tª+ -]¸GÒ¤j®b}%Åäc}W|/ÃV6„{ßGåmûóØœ´«Ž­ó¼;‰Võ]@:Ñ–THÀ ­y"ü>kŒ/¬™­Ç?YOã Qu¿î©Ñ“ö.áÏØ…³  -4ÁìjénMAÚs®g©faUZó «r…g›®HH6Éq=øXRÆ×¾Þ™ßêãÏ¢sTÈŸ {HõOŽªÂwú GóÝ~ÁŠí¿EX#ÓpL“2‡Yheo$…ù&ÈNã‘ d µvúsØŸÑëƒ6J†&k:Èx•Å)ÜëŠ}ã!gè²ùH+ªÂY®¿~Q^.I®Ü:{½vð ’;òàŽ½þ—'AÙ*uij=‘D©TE‰ü +vzáó(¤ÓÜ55/Ÿ58ÇÆU;±ÞÙºZ,s ‰Ú8ñW=çÉ—¡ÉÈér·¯±y¿ÒËy,0Ù~«%ï¬Vu)l¬æ+{A: ó4_a®hm§éô´eó¡6™‚{ íLš@BaÕMUÙÖnJ¿RÀ,èï.7Žlß•ùZÅÑQgbuø8d¯ĦM뀓ùüÿÔÛYL®8óSߦº[Ci+:mƒQ胞OVˆ-¦ £º‚¨Þ%k9`Ð9(j@ šU¹"M,|ýnc«å<βºœR[Jb]€2V·éL;€FGø»ÇÉ;?¾^ðÙ~ÁÉvU…ÝoÁ·¥lêÞÚÁ¯ßŒa#j67{ÂAœiµÝ%Ùž“(Ý⢈g)®ä‡…à@çKBk± -&¶pÍ–Jª’Œá­‰MËt. ØÃÝPÚaS*Hˆ~~ýÖñnšÔ¢æ­úüÿP³±Å nUto+¼×g:.Ä„Ã:’+†ÕI$¹åTùeıÓð³Åc±.œ£f²cêtÕZ˜³÷XôµjÎD\Ä4NË»l}€™ã%~N’yíü2N  þPäC>GG\WÒ †}»Úqì¥3_¥“Î  Ùªf®cïÓ&@Ü¢™÷:2¸)]ùù³ŽÓaÝT8<ñN`*`waªµ‚Á:¨›|…QMM¡I‘/dZyâñªiQ:;¯ìé -†÷ÎÒ.< eSØÙßçÄ¡²ü ½ Ô=áôáÂ"Ðå¨)Žø¾†½^ª‹Ã—×0Ž‘+ìÏšy2õÒç{°u*|_uõ'ÿ«|Q¼S´‹í9¦•âC[(pwé6ãmÃ;BÃöU¥ -ó§ºK´ìÆ ã›¯¨µ`±½ÃWõÒ=Þ Óe€´²]è¦B:ð~¦c›uJ|åçOHè$e›KC€Æž¾‘îIC‘©æئŠ~R·‚ΧNÆrW¤’$ÆÛ"·X-q¸cýë%W݇ƒÏùS3÷jO7*ŒÜ‹ÁXè„ͱ¨™ó,†ätÙÛ‡¬WswµîÁƒŸ}Uö: gÍXÄjo®þs`hIŽmšŒí›µ:ÔZ“q àÒæ[ -¦æªcÝA¯Á‰Æ‰Y²€›Ñ™2‘ÕƒÃB©A?Ð(ÚЋ&]꣡ÿ{תJ÷oDZÝ3dÇ\ÕæÑ¡¥¦¤ä.[¢8jMl¿¸4©°¥¢hÅt{]5Z…ùs6‹3ÉH6l´†}ö£/ëETzŸ}ibe«„š½Û| ã¼–©˜‰0¶qwÛ.GHuLšÓ¢›Ýã:‰²CJ„ßH[ƒ\¦ï%z‰l€§o Ûöau§M+Ù÷˜O >’=òó¬©WG"è3ÕyÞ΄qØö£šäë°‘C9Y?»K#þš“ƒmº«%÷9WˆjʾTY=;z(‚ÖfcÐÄ\ö×òÏü}™ó=lðx˜/=ÍLƶñÜ£åçÿÀ„°@KÀì cÅ\Ò½Xa–ŒW23•©1Õn#)ÂÁlX[‹ëH’?Z:}kR·¶eš³+Q…3½0G7•~ãÓ-1lJ—{!øjòõôÀ|ýñ/ V!ÙâéªLø)s?&ÍQA_»}5Ï__ý†õ¯7lª¾vŽçöÊÜ3ª÷º¦ YÌšn†QãṲ́sô,ÏJË™ÌÈÒ]Yþ•PˆiÞÕ(èÛ5IØa“f±""±O þ“Aj<k%ÉÐóJ2ó~\I^Îö£áٱ–o£Žªs#1T¤y(eÚTÍžXŽHî–"æ(5G”Ùݶ=ÀéÕ$ŽDïìÈúãDÝgécúõ'ßL8¹›C™­Õž'âÏ#›98Óm,ŠPmcÅ3ÃÞAì{Ì -Ý g›û\ˆ’wòT1Ù^‚õÄöÚ^é* ÿñDw˜ÆržkùDkT¿þ<×R±–­ìRî+ïÚyïyŸyLnò–XÍùƒ‘ ¬mïAjmÕ¦­–kÂ/Å-´]wÆc’zܨR®JÝ÷};)öwàó üø^Eð_1²ÁÓ¦U§£ûªG~·3Ñš‘“(ñ¼O±r ¿}2¶é;mŠ`÷å ˲Õ?ÅÓtàÜÿz=?‰BL]`aíC·ýA³X½"à£ËÝŽlfÝK2EµÕ¥þ%Yqz[G0…F¤ÃºMšžðAÄíbëPÒFó8¯{ÓÛôþ2hºn>Ò? Q'‚Œ•<8ø»:÷í¹¨FöZx€–8ÀZu·Ý,Tív–Yl+[{P§ÍÔ ­:a‘è³´‚6.>˜qˆŒ#8½$¬œf¸¾íÔ4ÌÃz½ƒ£ÍœÌÑ?½Éüúë5~°Ÿ¼ƒ¤¨dÈÄÓOO$à³Ókwz±}uÎ!¸àÇ{ÑÓ›«’çj•U ŒÛ8(9è»/ÛFs -¾ëPä·:Úw=ÿipú4¬êËòù67}»„ÅsÓzâFvCÔ_Œ¯cYá7Ïþ21¼ö]dÔšÌ&–0a§³Ôs~Ý‚ëRͽ5媘t?ŠÝ Ÿ?1)w_È«ÊŸ çñîKF®Š÷»]çØ™…\[;×ïô½é‹õ¯Y‰öñvâÀÊQsθ, (Œ£'»•‘oî9P2U“ª§±¨5¶uáÌ«#}†·5F¤èiêI?4m¡v7ôˆ2¶ç¾OOk¬¡ÁÊX{˜ÍÅbŽ§ìV…v˜5>ÿØøÒ×õúô±fýpƒžÄy®ß.{$¹Bx=®wÛÝÄ|5„4L6<ÕôÃ)ã’WЭH›7]w`ó€zŽ”é†ÿÐÆwu€þúÙŽoéˆäg5Fk[¶õaàüR‹§+fgÑŸhý-»£ðØ©[lšÕºL83Yz–š‘™"UÃ2NÕd[à»b@ì•zò¡T®Æ߯‘­P©ÄL;°ÔûÉedÞbG î÷ßv®Y墮¹cs¹. -â†ÎüþN†Û—¥mþuU˦ØÜôv’­ÓÓèêÄR‹ÓêwÅ2Kü•ƒ›ˆÚ•—ëfˆÝb{Š((Is¨Çr¨Õ1]~78F_hÛŒRU‡å¢Æò¶˜Gž‘3Õ`iÝ-ô/._FfÝ?Û±7â»w°7ùbò»–fom÷Ó‘-1Y;íYUo=;,p -_zæ+Ѳ沑6,š=°_-$l\N4Ðr݇©ìºù¶²ØË—¶ØH?r ° ?¡9ÓûKE¾ •ÁšðÁ*ÄqÚAè[gÓG<WjÄÀܯcIäÀ÷×€O0’¢2dL[v®>0¨¾¦èßöÿš\  Q2éû|urÒ%é ^€«»Ó˜–@“%¼!ªŸ×› -ý:±ÌÞ€jts¤tØô2ï/Þ¦·¢ú,\±ˆÑ?WQÆØ/ð»Zôœ.‹+ 7qf¸a3ÎÛÕ !scëm àr¼ˆ~ïb‰~ô÷ÐThÃvn ‘á…PBÙ(eŽ˜6¤ü›¶Ë;¶{¹ìŒþ¯:œ¼[Q»uÚƒ»éû÷£"é8Ñâîe«at˜iÕ' t&ËætÖsX±xÂ^,~ß‚Ï5[ߢ%ú·ð¢·—7äÕ¿Ÿ² ð½üï0"Uí‘·,%› {©·‰@²Án`wš1‰Õ™¤‚zOº|ÎN¡èéQì)Ú¯:µóm¨ð³K”5oqËàá$ª9miÚ ‘éj ~Ó«ª ÿcºìR,é úÞ«¸(Sú—–á5 øÉŸÁxÿàsB·˜Fº’JÊŒŒˆTe¼úA@šI¾]ΉڪkÎ֩쩹WÑP:±Ö½ÊÈ°®žábÃ>ú/¾–ã#þî6(Ó`ñãnšÒ~—l÷çÈÉ#…^ñͧÞ^Ŷ‡ÃaYû—³(˜ò‚”­¼+§Ž9Ð1(Ÿ6…-NÇó¡§n—žç·#ÀÁóQió·ZÓcüù¡É;’­VxU_“ŸÉ_êá^x$ pÞÑ3!J­“âðÚE¹CßãnáMH28“(ÜQ2ôÝrHmÞ &ý„é¥8 õ¹ ºä³&E8ÜÔ3i9 ßޮŠí3†1TEqéñbdâggH÷~mH:9›gÕàϼ!j -ícgÂÖŠÒm£fó$%Óhd¨3g9aÈï3® 0½^essxb©¶ÄvÅ!Ð^N&†p=¹ -$Ê»à˜1<»„KYÌM«¿sh ƒ¹¬¼J´-%b†å¼M‘”Þ VnIi:Ù•¡ì¸m\Tâùõ¾À_QGü~‘Ö@Î:yàQç½K'ˆG°¼y§+y  -\Ï(7ÀˆÀ! ì šj à7`Cà6oÜÖuZï2*80`d)rBËH¦¦Ýtûç(ó±/ù9ŠÙ°ÑBMùi…ÅyÚò“ñ o8¨-†žrãH}utÎøU fZÞMMÊAÜ än`_®iÕ?[Ê=Ö)úGïºblFÉ°]¢÷²Z&>ð¶Ô­nJÑ„Íé—Tê0=oÝÀ/5VG‘5E:nbç—•¢^mĒ᣹`¸ ¸ìõŸ‘„·~~I$ì'žXó+1)}E;çe?LF¯‘Ö“Õ˜.¢½f@E¢”uŸc!&¥™wæ÷a\ÞhÜ!†Æqëßåcá}­£§fͪ82lã€ÔÚU(bMÁ¢õú;Ž=UlÄ„-µ_®)%¦~Ó í ÃZ²ùt0cƒÎÛª‘ŠXB%[1%_¼Z“ž»âêAuµ&k:y’+¥’7²$fø+8BpvÉ¥ËÒ¬èœ4w+ÀEå÷Iî‡døÚ,ð:¹ô"Ã[2¬í‚$œµb;òîÈoÛ±óDŒR»Îc@n»ØCßcøµ™Æ—hX“v?¤‘ç"¦6 (÷Û'È9ö(êktÝ<‹¢~'xñ·St­ñ%7¶¤F^cü‚n 4—Óu‚YÀùz„?R©#vDá#tÀk@}8ÛJ}6­;ç¢T Ÿ[|Ul8á·Ë2’±ºâ}P”•nUHÕ¸¿=#«$YOpí/ãóÔz›Ú8=©5³.'çÒª),È” cµ/ Üe­Sb'*¥Î¨Æ½AFÕ.‰<_ýæzÔµ±(ßv PªÀD ‰¤2sF„Щ"F[-&Ùüyk–×™ ³üív<œãn{;žõÕ¤ˆËyñ¼m•®»@Ð]¡Ñ.ÚÕØ¢ÖŒÕSoÆÓ³«ÕîZÂJ|5u-k¯ìkîvÖ¦ÞðQÑ®«…ü]\nâõ³…íT‡d\_r®ÅIëkÍÝJžÜ ¾ø”±BŽZv‚KÂZè±/o^í-rð.SA+èÔÕ… Û/ØWŤV«¾ÖÛÍ©N|ê%¿Yuãs›5_P=¹^«¾pƒl&òû:yŒXÕ¥%©±êîXIÚ“2vÇN!Ú´Ø{Õ¦c0&PƽTœ—‚ɾjgGüž©!ÞV=AÚ!’¼¯»Ãº~`e‹­¦äØ‘±r+è‹dàÅ:nü}¶«glq=›PEsªÊ'Ž\]smÛ¨ýÛ -9—g1 ã63ÕþTìë7k ó÷ú‘Œ-ÇG3;b¦ –÷hjÄ`¥H× -þ1^3SÉuÍ&í“ç’Í·—d- -’ uð|bâÎïž­VêOkóC&6BNFûÉ!˘éw->Ñ¢'R½õ¼ÅíxªLœÒ3ñæÍU¹ÉžÌŒ¼â¹H_nk£e¬õX„¦nÿÇ'kø-—yÃ’L˜Y Œ,'›F„Ér™ÑäG6h¦èU [ÉX»­Z‘Ô£S–X÷º¾¾oº~û ‹²­ïÄ -  6¯F#»˜„ŽÄÙ±–Öu‹é×zZùû:D˜]TTz¼é\¤†Õr•“–Æ=W ˜‚æškÞtºLP» mè¹—ÑÒâϪßHXËîß'¶’ˆOÖŒ¯„ò[,]Ó±{b˜Ð­ð4¥njÊÕe>My¹)ëaÕ|™œoKù¾7šŠ.©èäÞG£Þý["A>~®uAGB¿{ÚÅXMQKæç|¡+™SÓ‰dËß"â˜oq«ÎlÛ'ØcòpÇ]}†<6,%ñ0T ÕZ‰óÑ‚¨„ÜFäŸr¹,Z†\f¹–¸6÷ج+®–è±±a¸ÒS9Lô@­êà†ÎxpQ£¬˜à3ßܺðþpÉ°á -ÈŽ†§fÅâS .oæã ç+žoŒ7®CZ6ó6;® u½«?kÝ°çùnÐSåˆ]³áw\µ)Ž}7¼öå /!–¸D[>ôÚÁgª‹’<¸w®åæðY_¼kNèoî;Kr¡l:Þñ=7RÕì@Gús±8˱íŽÙl2(Œ¶{ùªÆ'TãNæ’,莘¯þÑ 5í§æjÖ«*Gšîjd<ó»I©_KZdSí™’²UZW´ ÛúYŠ’³.¤(2•‘T¥ ý‚ lV,ÜmÌ4c²Õ¹c•×«}³ñNT~±Bm¼®»·aB:—«å‡«|ÚÄ•Œßy¾¤:µÊ/¢ä]Zé°ä}~U%N™_^•Ð×ÚMùMD­H¹úÙžg#QUäcÀä‹ÂJËYo­rU-¨cs’äé¨äÒªs°‰Á À²Ž‰KZœ>âÞõ¨lør÷Sl²ì@ìŽßfP綗×!èNúÝA®€ìYwT9ÄÚCÙ>ØsÑá£w<ÅÐ>Àmžž!_¿J>v­òmõBâè"]!Rg•Cš“¤YÜ—yÿfçûçG)ìw‚P|ÆŽ5‚ <ý'WÓ…"X=󢇚¢¥¨SÍü8ÓœÊO!ô´±Méáü¨ï¡œùu²½ãêc¯ÿü4ý^Z2ýzJ’‹½Hßµ^CÝÊý>cJóŸ?æÒ×5¿MF»ÐØ×ÖœoCó+w-æò8Ïg¦ôéï:Ïzù÷çç?ÿÈ0ßÿÿÿùÏŸýüûS²ˆûl ÞI¨ -៿~ü鯟GR¨lÛRC½ÄÖB”Ï5ÃÐõ*<fB«æô -B$I4 ô¡'fíÑ稦`÷½d¼L½‘Û ¼ßŠ½ þˆßàsP¼7mÇ¡Ê7 ¢ÄV%/þ_.vÊ焽DJŒõð_ÊËå:’\¢û¶¢P~@‚´g|xö¿¸T·JšÍlRʪ¬$ â“~)B¬{íÄ 3©ñÉÊBˆï•s”T3(bR}HêÃ[3˜µàX–Ò´%?Ùá^>¹Cý{yîI˜«‚ˆk¿•Ø ±?å•BD9Q‘+—.Äz¤AÕ3ÂR¦Ÿˆh_@Ó{À$«Yؼ‚U‰³*q`Ùêæ*è~ó­ÆˆïrÖtÒ"°‹Ê³@¨ïKZ0ÙÅÖºúÿ -öÄ“ë&hW@‰™ˆÙéˆ&®y‹WåÆ/eSzFæò&àWܱZ*FÜ‚œ6ÐëÉ£9çC­aˆÙ,ÿv"Îõd2šéó.濳‹vü>âQÙ˜¶·®ÊŒZ—J«éƒhz¾Þž¢NQúÀqä«×!€¶x䮵Æt65]ìhë¡ÖHO8³gäæX"µ)ëQ”Xwª¤oëCéÚV?)¶…N–éhGtLL‚ÃŽ§»ù’ÈQ,éPXÓ£Ä!Ÿà…¦MÛ»ÑÝ nFÕŸmÒÔ¥Wèø$E²MÍcÑÑ>© Ê!pB™¯‡„I1ë5%¯DžxIpŽiöЯ0¼Í²KV… -Ò ¬ü¢–A-mù¡®Í{¬.6ûzx^~¾q\[<Ñ4Åüÿ ÇiZE-hƒ_Ñxýz´ÇÕx–cƒ%ëÃÜkæ&;ÆïéªUãñѺ ÍV¸[”N¯—XEQ}-' -× ³\ | ¾S’y $xðZ°Wóµ­+x µŸÁßb«áðhˆn=  j›Só «1¼"(Á< {nã(âÖ›gc]Î -GiBä5øªŸYàÈ9Íñè¦È_a=¾‡mΚ¦cq`Á‰„)J}¨2,Œb´¦¾ÜV±À¢©ä863Ñ-OŽyÖ=n%I+ã–Îë.§…hT˜Õ[‘ûwTdûµ‡’B¶Å쾃¨¢–ˆ®I„ÚÁ¶>Q¦iFù€X‰%«ðSXå©oq‹d·¼…S DX -iQ Þ zPÔ¦ -èª.ý€ªö—ÈP¡Ð¬ÎŠ”¸¾)@{µÞ¸i»nÈ\Ûr¨4½_F @qK•v9mAnõGÀë ÃÅ3q‡šeîÔãÛ’,P³¡TË1o¾o'ªð÷úcûÇ›¼ú{ìn>w¯2¦9óH -åí5×€Uщe†€( ZÏ2g¸„)ó†å’—hLaÍcžÂ2âh\É~ Z"ÞøØ΂EgªGô^ÍÏ09‘k?ž€XÒûõ¤g¢‚¦(þýqPäS¿¼â¦MpûsÒfá#}ÐãKvÑ°xô¬X1ÓØN¡Xs‹9õg¨Íµ˜N·w=ã‚ÐÂôCî¹®m8‰<6QÍ‹˜Østƒ+Éè$‡KjY[&ˆé?}›õ[ñ¥}ÁÌåÿÿ¥ÚªJ€¼,¿”à,'°µˆlÖªî Ç©êÉó¿0ò×pkœïúA¤°Á(ZF¯þqxsçÏv¥¢Zg«…ËÀñ¶›•e¬Ùͼàð&?&»6Ë/ý8Þ«N I û;¿°xe…«L«ÒQá£|ÛèÅCPÏ?<»!µûÛ\Vñ ‰âàÒ„á¿ÝS -Ý<€ÇÈ/ËÓ¹–Ü*ã{Ž!A”ZÝ[ƒDaú²Ú ›»òæehZ¦ŸTȶd,yñÁ–¡OÚÄ©~ -ÐT£”Iýr5ò¦  ~^AuRѳ4;{{UàÌ—Oi†œN:¨Ýz|Pò&“´ÚßmŒ¯óÕ}ÛÎ.Š° ²³ÅÃ]ݓߺ#ìÚ²|Z‡†M*PR°Ñf 8dc\æ $ãzéæ”#*Qwå®ñ3SûÆ…ÿ^¿Á'ˆ§yÕ6Ðõ¿bI2e*åxµ–õ»caSvi7ó©7y8ÉõÚ> \Ã¥7•)T·å.ûâãøB¨A¦}Rž›+_å·s”,–"æ£m¦š.½¦¿š  -}ÚŒPÛ°SoJ;¦¡V8Ýz“t Ó=ª±ÜØ'ypw–*BÚ€_ÅÀÚÌݶjúe°ÅÜYX:µóòS£ú›jŽév~‹C5ÞÏõ‡£YÊýè#؈¯$–‡ö¢É|Ì—*5I$§{¢ó\‡·ØeE®è(ל†ÛEŠž*O3v·¯ Ï §¿Z³ûΡLÍÖkíX)u.ƒIo4 Œ0¯sÄï[·ÈÂt¤!ó¬m* Šo;«ØéÊ2¥¼·Öèû”óñ±–×ÄgØ.«i3}=bžìIò3¦`„1\gVˆ±¤7“>÷†³í´w¥ŽÞû[ -6[œÿÐ:@ÁY`ƒÊ!†H_f‹tðpíex‹±PgQâp–Ôö±?ˆU9‡£-îò"6Œ½•ÿÀN0xÓ -µ"ï¿ktp/PA“¯t°Ti/çLs¾xàÒº›e”Q'mðyo9SñWm‚ƒjQì1È㌲t¤WÝFeÌnn 7¯aî¨!:. — dŒõ ¦÷W×K~§»ÔâŽc û‚þZ?ÒгÕqBÙJ ð7Ÿ#dƼ€Ü£ -´ÆŽú°ìˆøMT¼Ý}øu"ó´¤u(†.Êlz‹ìØB˜ -cU›/³ö”¤’™ÛîT݆Ǯê®w4’ð¸ÚöpŒËOØ‹*G¿Ø!¨+(ªÑ˜ŸiÔ)T>yö+ G [:A2k‚µˆoz›ÓÔ(Ý7ŒD¹3†¯p¿Nw$KÿVõ¼éHš˜fs¡í¼J›åÄvÌÔóÇŒ©«Gˆ‘ˆá¯÷¤/ßÂìЦ~$-M¦ï8ùýóKun¥+"uREWÂ(~Æa¯O‰®ûã³[q×_ÿÓ·ÍÑOL)êÿLêâLDÇ?“#“Û§¥¹»ë&ÁHØðËÂÙgà G± q2Šö0èÄ,KÒü J£Î}ë܃˜S 3ÿ“ð„ü HæˆJd‚/ì… SÓf‚Oó÷^|WFí7p‰,9Uƒ®ÍSå9âis°HÍå÷àô [4•âÍã¢$ŒÍX.Y:ÄŒ·c…ôy½?i€a'œŒÕá·¾«¹üVóÏ›‹«ê6»v x>yG69® -Y€#IÐz%'‚›öïdÅ):Æns)îêÃg¾ƒrû¡¦¯Ê<#Œþd-#ŽØˆ’òÍY˜ÁÔš6ÿȾ Z~s¥«âŽæ1‹'™¶³M¨ÄSÏl{Üøx²ß±‘ýõçò/L껄¿Õ¿•}ÜæX×mN‹ -žƒ¶mÐö-n ‰KØL㘼5T»3Žsún: ß¢ZM -™€ÙܘÖ_KšBÕ¤'ï;¤“¢“G×ÂßÁ>OFqãbð¿Ðˆâ£ùbî &ÎÀF-ü†ÿ˜ÍðHoã,»¤4c’­FF1Høvl³‹eÜòI/JÁ¯…m‹ÏËçMn8HiitlÓƒ&ÓQ–sÝ÷0þ½~/úÞuÂ>6ñcå[ø?åe’äVŽÑ}Ÿ‚  ˆÀxžÞÔBë>ûó`JÉ̲²ÒBLáÄGL>x&ãbÃR_E(d™Ç ñäiFíAŒ®æ…9ïÅjË$èÂdX”Ît¶Í…fìôWš^Q /ôuZ&]ÏxÈ=Ò:ô‰Hðc{•ÇiÙ1jïL°ß²ü¼ejÙ0ÕçîßlÓLÅ}Ѧtœo4Ê´ÃÛ£× ë¬Kb¬Lº“%ŠuOi…„¨ -Ž( =9H¹·íœRÑd(ÄF¼4ÚǹzÂ*`D4w$™%–a&K˜3 ŒÏõx†»­¥ÏÕ¿d`<>>qÐ+àL!½æ÷' -kH&¬þÞJ5èE°K9iù!1çªé-ªœå±J½/3&\õ™Û†´TŠ0ÂóGwtvG ‡Š ­þ^ÊÉùîÝnøže0Ôoúà=–Õ:ÿÚl¯Òûù›¡ûøüÖPcQSH"R*(€´ÏDëŒtRo˜YÏ^ßÝfôøöÔô^¿¼5ÅÉ^(†U_ªÙÄ ªB:E6ÇÖ ¶³:S˜ÒL}ûZ-ÍÂã’Ó,’e2иàGæ'¦—Y?ÐÙV6+$Æ §<µ¢–+½u—0Ž4¥Ð¤hæ^ÀhÒÀ#…JM³Hÿß[gÚÖ6S¸Á4•Ž—O³KºÜšr¨ªv’0Í -Ø‹äicR—Òw+ \TœïceN2©C’ߤìX(“>o‹üœ©JâáÄõ›±ÐIR‚wubx¶WCNOßa'›Îf×±—ÂD@¹kS**`Ü´åx”bÓ\vÙ„”£®lXªN°ÈÂ"ô&Ùrõ¦Î/ðne [¡‚Þ±­c[’ÌåvhÍsý׿œÜè*l.iØ`‡sár­_åJ«wÍÈ6âxk¹¾¢Öü|™…ÑRz˜pÇß pžDµM‡ÄÛç:">hH]Tò¬9ü˜/‘ö í:C9pT7½±³wÔFFby&Œt2EÃ7ØÔ(`*-i­î™ò ÑŒŸ?¿!ýa4gÉ¢q?[Fš€ºÍF¹»=.@Y±]Œî¹˜ËÉw;Ôi1‡U«™ Ýbx¸³3žQ¶p „å0Û¨¾ìÃËëpN/ûôÏS˜q-.ï­oJ fÉ¥wž‰?>~‰=Ž*â!Šn|6 -ͪÌÒk: #Õj×cY—·Ú^FPxØëôîNdÊÌ­Eæ¶Êé´[ЇàZ[²¬ÈþŒ²@ÇŽg‹PÕ ÝgI3(™YZQBþOÊW\ºáŠleuS¯óQj‘ò^Óžv2ÜZ§o=ÑH‡•Ÿ²®«÷wÞ¯>`ÄÆWS¶†Um2D–›T«d¹Á4_‡]Í~Y·Í“yQ(jJ$¢iÇ"Ž4œvmIXbS:R•A-Nî\]ÓÀqz»”ÂÐÅCãå2@ó¥¥t×õ¦°BQÿ±Ò?ÎFTjýÝÕ½™u¯[†³cG3—EDp}÷…±PT•9æIS5ž½Ä×¥‰•¬P‚"¶0í°_Å6c¥(Š¾˜H¦°ß‘È0p?UÞ½ê Û%y¦Æ^ÃÑÑ.æjšÕÔ]Ìëý%e”¤:g(Ÿ©²©¶O5ÂåÓy}Rëærd]öIµu§ÁL¢@ËÉ¿˜9Ÿæö„ÂfÛ .+J­C3õÜáhú auÂ.yÎ/ökÌΙ˽Lt× Yð‹í(¸EúȦ¨§]\‰S¸\]Jƒ+9šà¹Ôlý]ù:õñxönµ6Vuɯ -s—-U À“ç{‡‹9% }Uñ¶LÖ‚wsBüpe”_áÖs¡E¼ÿŽŸ Gº½¡ää¹uiâ©äxܼ•Ø©óϲZ¯aþŽt¯‡±†› Q<-§éw2¯ ~e/ôº*Ñi?h×KZéu•êq–Õ.Í5l·ôÀô•)£·p?-›àøL[U¥:k7Fe¹›—;š%[ÝQ©¿ˆ$¾•$3èåâP7{SÆAïë‚™;zœ>¬OﶯárTS}øµ -Ÿä¦EyÒ–„:hø§:éÌiZ¨Émø’u+g‚-0*ƒ3ÃÎS˜q™~:‚Vá”~aKÊ §à£¿¤ŽŽ>Ã;öíµ^^Ž^?Xf4î0uÙ:B-$퇗ãŽëT“–PÃÞ‘0áµQLzO /9ÐLÞUBp8l¿aªuŽõFU~fsꎪM5A™Î$Ÿ†ŸyžJé>~é4d))•Ý^Ìý¬iÏ]X‡éÐ QV-ä3–wáãŽMcd¿s£ñXèUÛÀ-4[Í>†/§t -k÷{8kÚ©KãAE£Vá®\;ëNKî»k©žVÿ¬×öôበðVÓLù±;à¯ã¨´,8T«g=耛jG -¹ -À£^%”„]®I0Úëæ¤úþp\鎙‚žY‚¦g³KW”Ui6ÐSˆ²Ü(º±B€ëmDšõuœ2N0„ºv¾œ` JHtO$G~@F‚Ó®§A¼þ ®ôÇb[b ;›Á9šQÖÑò–u»–=õ=ºAAM»Ëá>WH`{Ô½2C^ïܨ[e»…¼,v_/åaÓ>uØ6¬ñ®hÃ&ëõ) üªÕ›£+ðÙȆ7;ªÞÂìá?ê…=ʪn‚÷³ Í‘Ö -”§ðLÏ;ŠÃ#/zn­T(’>GêˆÓ -æà¯ÐBg7Ë™@mÀ«ÒTƒÿÀ:„±æ£åõXiã¿Ûj»Òšé´ð¶„‘Ê9›Ô£…ÔÙ 2ó½|œ"C AfÛF˜"Ýi{4° ç¥ÊÏ£¤ÔO„wò rl¸MèúxüÏ&pù™# -™!^9nYE“ŒIÉëbóÿCy?ÕnnÛƒúT&ÿ vt^¼´>‚køÿÎÑ ¿¸Ýî˜:^å -¥šiñë±_:LVpðvL’¶š­/é™6(a~û)¥ß¡¡˜úA´Äxªc7‹u«µÇ°%hÈu’6N¥u0S9ñGÛZÀ„J9Ѽ|§«>ð¦D€Þ¢Óû -`"JôÐv/ ´ÆZ=ÇbQƒV6}¨÷Ј*üÖU2­.ÏöÖfÛÎ!`êÅ ÇÆ¡Z°yPRn­ç±¯;E†ƒB®C¤çßòKè5èû#3~ö¯]"´›2ËÑõ>ÁÆyÝÍÖðš­ÄJp¸Š; &^“X5 [Åžé‚}ΖÐÝr ðÑФ}&w×(aº=Ã3ЙD‹¿atÖzM¡o¬‰adƒWfêÿúº“aÆ$âPõ†·t´ƒAm»ûó| -QôEÅ÷ Òˆ~§C'º¬ÒÁžÐ»|ˆL”à*Q]o{ð¸6XËäŒI¹V4¾.bG-“´ºö‰e:zc²B[crlRšÛKm°¯ØB,’þhÑáÁ²[j¸ä*EÍ%òÍôªþ*¬íèY!Útš’±ÐÀ/ÿÉìÕL£öÆô¸Gs5e+œ¨MÿØ^À’ëpÝVݱÙc|),ØCûŸB1©ûúïJh¼:‚¾™ê1PÂñìNv,­nk…4À-ÙÚ4œ·¨ä<¿>Uó¯Qm1ƒÊø<4'ÊÐZK„4þuUKÉØ•˜ƒo•fÖD-ÿpÃý\ìPºþÚH1ý`°ñÍíŽ ˆÕ¢Š9m…תoÒÿC‹7V¸æƒ_Ÿ_BSáÕ̪¾(¯y~G†êC -™Ô¶aj=a-õ97#fÞ/ý v«ö­•Vj’`nÂpÉph;¤„БbÁèõBºÐ”%méÕíQrsÿŸòò:Ž$‚¨+t`" ª ì9+ÎûË—ÕÃ¥¸Ÿýá.0Ýh”Já À%ÈELꛌ¿lH˜ 5Ç+]œÓÂyl$úJ«E¸ëX5k3UÏ—ŒN›ö0³»°VëE–XmëÂ…•pÕT%¾­‡{Ÿ„™œ‰­[¢ùàÜ„Üõ­s¬µ²˜ñ…C{ôõW@ýNºõ;ÞKoû0½,°ælzä, tߤåY&-YЮy€¥‡˜¹vªáh™»-]é‚nXÆq5T›ö0zÌI™XUÑŽImú å”aÒÁyèÕzíXS°3ÑOj¿»<ÓFO¯&¶V0liŠxáÕ=K}Y†@Åq­4ì²à¨²D¤£3‰Y—™Ù¬ˆ_Ó`©÷9ø°.#¦µ(€7Òí¤C©¥¯ -¬>Xªê캓À¼u+¡ñ>[’h;-€M˜íVGâã¤ÞvOºÅë~£žáûZ ëí.Év03Â40 -Š¡Á¿Ý§Bô„š¡dX£2Ž_ã˜ûÙê U÷ú--)š7t¼µ2[erq–ʦÎh”úât´[Új°ÖÍÌà »WUr˜óYÒËÄžê¬f¨{€°ü äÄkÞ’½Z÷eÜÑïé7ŒGh/”³7¨ƒ€{ŽZÛ‹˜Î›ïÐ¥kÐ+Ú˜Àä|æfÁG¹\MQ]H°p™ºné„&é^-4œA\‹l,Ìç»eUž– ‘j‚ØQÀˆtX¸_U×E®Žê©ëÍRè¯-hPÓhÄðu/ -Aí-®Ëë£d eˆÛ:uJ…†×•RÒò"™‚-§',¦] @O«( ýÛð…ú¶™YD -ŽÚÂü:Ê6ÖA2âcmýªVžš3¼Å`HmFz+­H\Æ:µêÊêUŶ+v@ÀéÑÏë8½é_û±vê~ñ˜.PÊÛ—pW¨E™æQ|ŠLÁK T¥GQÓ^2}À1ÜΩ—¸­ïð³&»ƒ)[–ò§†Š£¬Ó$ævÖ2ŒÕ¤–ºò ðö¥&Í¡µcÊljèJ«Ç3${b¦µëbI/.ôy8áÓ×›ñwYƒUÙ¦·­hô|Œ+ñ Þ2KÈ\ €XûØ#U·së†×áx^!Ä̶mcã´ À^5b„ð{ÙUéŸÀþ-'>®_Û±-Ãøjy%ðÖÓ‚è…4BݵRÀöªº˜àz½Fy¡p4«Ö•‰ζ¾¦²Jé¿¿þê˜8Õî¾ú%œèÁ‡ÅâQçžU¦&qKA%µ„ͶK¡E/-‹Áº @ÄuÆóÜå<‚ï¥Â ©íç.Nh:,·2D„z-š[Ü&R*Ê:Uƒ¬ Ä:ÆŠ § ÎkÀ•QA -ÿÅ^M³*ÈgöbÁ÷âjiaôÉy¤¤N&Hò\¤¸­Hì`û͆6¶ OÓ•¦Ú(;Ø®ÿy[®OkÒQõÏßñËšÈ J©ÈD¿ý‹Å Ú8L;àÇ„C2¦b±G€ǹïI5¶ò©ÙáOú²Ç×Úž qÃpÀóBûjä½ê©ÅÁx9ŒêBÀ ‚M‡åZŽÎ-úá (]¨wù:L@NŸ‹ÝÄ ¦-ßF3“¾Í|k”hÙò·«ì” äÞ¾®Ýhñüý•Ç÷Ž‹@CüqxÓšQÕ¤BËö@‰V;†MyU/æ­¢ÁÏÂYI{0žN Ö1Þ»Q¢3תL0%’ÒQ¨ƒuŒ‘µÈq[–($¡ÆéÚÌòÿiÀÓ×.ü -ã¨è‚Zl×·èÑ&Ÿÿ§‹dpñ€š³7þ‰]ûjOá èB(“þ`GwiÍú_« µi÷šÎ­`kfBœ2"ÊéGÙHu¸àœ©Œæ®ªÇá T}·kvDjŽeèÚ~чñ.ö†7õ!ÿ?¥ur}õú\­®Wø;ÇuyÅlŽšöŠfè±U~º•ä,%©æµñUöt5\ >pºÌ£†c$žä"P;ÿÂYݾg:K•ÔI{p>×5¬Ð¯Chè¹+t9+@óó'‚°pGa %ÅÀ¡žù€–î?Ã=½À&õ×$¦3gœùÚd¹Ý^ qßM†ç3©2€_±‡;Ñásój^oM_gæ{4º`K«1€Çs!v,LE€tºõžå{=Ññl##Â7Ï“XηyP5‡)8êŽóüŸq§Vð‰¹…Ö+žùŒùkpô“¦U¢ŒÃóEM¢cHš+,3UW±©ð5s‚’1Ð7· ƒ£ JÜ懇¸©HGh½èžnøIȬ¸¿ŽGÔ4ü +·aøù»~3E˜2Ñw4½¼Xu´ È5#v¸ « ‘® öVH=‹àeÐý7Kv:7›QãÜés j<°iÄm:¶š]¢ç*Í —o訉¶ìA"Õ¨|–¼>?=,óWìÈð!>kñUý‘ÇêË둼ž ‚+Œ_5/ºçˆšk@»—[¯BÞA¹³æ;šiÛ]¨¥ýìHÃßÞ%ʳy|ÂîŽÐÂÆ‹¿Ú ‘³ ¶ýÇ6¿úd”dQΫ8ê÷âlºomÒ‡ž\뚸ÈÅaÔé;$žQ0oòQâ¾ÞõÞ"µ÷ßwÝ誅¬•9ÄFío—=Õé­ØȈK²+½êOƒÔ²ùa:yhÒ@»Ø… Î8ÛÀ»Èee€ ÚU¾ æZ9‰*d릙ôë5±ûK×ÆÆöúöEä£Á BéŸÐSÓY¥Me‹’£­Ü‰"âXe6í϶ ðpYÓT'2٩Љ¹r6o¬^}o³)M×dŽØ°$Û¸ñZi•C¦VùMI<Ÿþo?Ó -‹¥{k¥Ó41k}…jטּ*§ÜzÖYª1UŽQ«£î¤ùë6ìp1+Ó¤béõ¡J”P3aôŽÛÄo 2ƒË´È”—Kv;DçZ…6 ~@\§÷ú;n õ\²zÐoR]YLÄÇ0¿u{…E-o‘ÄÕÑDˆÝ澡ÉÔ}§Xª³€>ñî>8ùf‘Ãd~}˜ã}oجJ¿ìéï b°£m„>!´ZRTž–%â¬êW6<èQñ‹ °Ì#¿¸<‡¿P-£¬!šªšJµÓÂ'¸pÇ3 iH]z¢ì×hE8ŒÐ†]IÒè[ÆI JÈbF˜`•#•È´¬ƒ¨šC'£ËŒ‡Þ©-Ͻbîà»v -íõÿ8O‘î’% 1@aµÜη(/Ê©ÿXÌœf^ã7»›ªH÷V³&i9i¾m1EÂæ‰Òõ]HÝ÷1Uáo¯xàóMS™ú †ãàdº¡íV÷/ÃÉ@ZÓ¿ì¼J`¸†@WE3Üèî¬UêÏ7ÑÝÖ°èÁáÅe|w`å(Å¥õÆy°ÎÁWÓˆ×Nƒ}DšàaIn$¸œF,ŠÖ¿Û§aãô|þŒ\’ñ½äD££—ܧ"§9ÆÇz¡êb0´QÐÔp3D4|À슀 JÙìTC-ƒ´í~S‚àQ!©{¬Mt×Ü4é±*êvÂã;Ê| WT:³–Ç`ŒÏ·»úÂÝù8TlSFámYß¼¹»8ÊZŽã7;« žú{Å.KJ€?l¾ ~Ü >Lg1 SáïØÈê¨sZHšgôµ!›O²êÿÿ7䃢!DlÃãÐê+àÇ·Jµí¡¨O‘áßµ:(˜~//¬i¿àú~†®_L•ì1Ezðç„^8ÀnZé¶éHÕ{{4J|hß\N œˆ.ßtys™@†¥VØt¼,Åâ‘ìR ç°r\ž\•dˆº„ÅýùF7YrØ­Jl"µì>öu”ù&þÍŒÂHœ™¶ƒ¹?URÌ´`áÓ-®Æ/„o˜\£,2R=ÁÕ«L—0Œ-}˜·»SÇQƒ§ï»+ûÔÔ¾^"ÜäHÀÿ£Õá´‡éŸÍ?—Pé¼0µeIÉ}'ìn­ÚóÄ%^5Þé«öþŤ‰Bç;Ö‰cå)÷mÐwísXÀ*V?ãTâ¼ñ©ßå° ìÄû´[ÒÁü¬´pàùá‹0Ô“ ? Tbà'ùÛà|Pq·½Ig˜°f®rzàżó!Ðel;áB»ÝÖßaÂ,sɺöO˜,@®“ªÂªe¶ù &Ëý±q™öÒ¥gðÖ=ÀØoq 7饥¹æ3â,ðå‚þ6²Ó7fõù6^­&£Ö,·QÒÌõ´¬ëYž›mZs/q˯—Üž‚ÝýÿE€—Àªúu×o¤ÿ9ÃomE•ˆ¥“BŒ¹Ý–ëU– Á(3ÂýAn­1!Ë öYý‡o=pòl%Z_½ƒ˜Ý¬V/•V`:)í¶À¢Øðàdº÷ƒÎÑ©üݘ A2¦Û!°O³–i—¼©‡åµ Ãµüúüé«Û¥k -Æðÿ©'\`›6¤IM(Ú8Њ±‹àˆ)ÝçºÂø½‹Cm‚ïW }ˆP£tÀÉɺLP1*"¡”oÏRe^ e³”çAIl?H=߬jتÖçú9 -‚# ÿ0â«_@ÓZIÛ2ZZ¯IlžZ\ -Ú±F¡æ¦ 㹚'Z,=õxÆ,Ì´î² ;ʧ:s—aôî¨É¿pé)¤†›ó¯`tÎûëç_—“·¦,ê‘&J’üz;èÃpÍä£ÄTÎÉšY&ƒ9J§#¢ǵï>Ûê$kˆP`BÞí 18Íá<©÷\?¼<Qò(û3r×518ò9<%É©´ÇÆ‘wê4¼^£œþqÅuŸœµ¯;!¸A±“–<ªy¶è0‡'Ù@´ºëÈ€áûj€Õ]÷ ±xša¡Èb8T¢“0„ Á>Ö0ÚÑZ’Ü -'5”x²4­mÙzøáT•ñEêHqbà8Q`'&ô¤]›ý”Õv4 -·Í‰léñâªdjB¸"y-7Ç -³¡î¡Å¬¦ „ÆxT,˜Þ_Ç_ßmñ#®lebnŠÛ¶n›>–x_ÕkcŒØ#ýëN¨iKý!»–ØPTH…º”¯•Y ·[¤»ËchìõàÄŒÑp­ñŒ³fUëéì6±’½~!Ó¡KMU¼¶T#5‘jxÔ/®uþÓ›Î]T\Áy81añ´Ò Åa-â™é|=¯I Í/€’´¡Fä>GœÀà)é - âA!¦ra+ee<ƒ¶+ƲV>whÂu` AM“Á”rj½Ïs'²dþ÷ëHXäÅ–³Î4õ¤`G¤ª3‰Ræî)l·õÔÿL4•3êU2<ßã¢-ºì7\u#ƒçãqå8õ)ô˜.\lW…gÙ$¹Ïåå·LS„À9„.Õ‚âÁôº˜68iHR<¼¦–0gÐÞó<®fXÃÓ›Ú–.ôœÂ.£íÊ…èékÃÀAzúR Š>ª·EË]¢yðëÿ¸—eG³ ]O QN{Ý4$!m×µÓ2•¢L$Üž/:O[Ú‰ÂCëþÇe¤ô -ôH¨Ïî“è-ðþP"ƒTf¼Ž¦dÅ1ÙLE”6lW?ôRR‹¬ýbUKGÚá u!öŒ\øä:ÔÅ,\G…!ƒÓy Ä$Ml¤ÄX¨¯ðÅœ”~c¬=:3È숼K®SI$Å%øeøzëk¼…ÉxÚÜ:HÇ)o„Þýðé›Ãb¸Qv 5‡P!ÒŽÜêºÄƒ¿Ê\–?ÝD!ÙØ2§rŸÖÎ#s˜§?">\Òf.ÃÐ’¯J–Åøe¥Ž)ÄSê/¾²3E -†Uh_§ÞBkX³Ë¡aùP5C—UñV£(à˜b;Þ•¢æªž£§*ÌÇw>,úõÁ ly&1Å;X;˜çéh‚Ô>(‹Hr, Šw܉ 6•ÜÏËyhM,‡AØ㛩ûéIƒ¶Îò·?‘/°¬’©lªÐ?¥M»UÕ¯YåO¹æŽ èÐÍNmøÈÔ$™;‚}YeyQÞy9ÉÍ_VNÛ®Ó·ìÃ\¡Uæ¬Ð×Â^17¨ÿF_¶=¼¦]æ¹¼»jzv "vê«C_/]€sIZæ£"Z¯4˜Qže^›¿r‚³’çýzÛ®ˆ >ó§ñTa31|²•¨|¼Úkl³Î³p"<6­Ù6îåVçO8J®²ÓMNXQåNŸOY¨¢C¶¬¼¹m¼ù–”ú5 ]kˆÇ%Ü!@:—Íá”i€E`BÁ½ZnÚ/Ö3¢ˆ°ß‚eÅE¥"#ɱbª]2ˆÊALrIýÞ'€í ³Œ2ß}ÿ—ò2¹nÞ‚ðÝQ(éÍÒ³Åã‹óÀõUƒAþû‚GH$0½ÔfÞÏÇ”iž}5‰¤²Üv{z·GÿÊÏï¼!fŒ‚ça©~M8Û­äÒÜœô0CL†£^{¹Ôœåê¼¥æ0j:Âãtrz"»Œåè^ÓŠÞk|l´$zg:j—Mаsp¬ÐéÑü±l-ô¾}ñ˨#ûÄy»ƒ–ÊÝyW/ÕÔƒŽf×K‹ë•lå7î¼Î»24,Y¿e¤âiíæIiX Çëõö9¡­&0è=¥ öv8ç yìr¸¼1 m¼a6&X;ÓmñH«*hšÓ ‡Ðôö•ì‚>©RRA!geɃ‡0éZ Hv-tNÜR‰¯ž•‰S?jŽm fÓoýô.þ¼U5Á÷ðŠñù£,¡ð]2þjÒ¥¸¸p'°âšX8NïQ¯MH3 säÂ:äD¥»¦:D²k¤€‚¡<ž™ “¼ŸË—Äi‰çHÎÀÈkÃ3;8'1`p3èÚs]œiTƦÒSÙ$´çòq]?º Û¥ô^ALÜ…x•fx$yÁÖj -Ü¥à”ðëµemµÁmäæFõ Y‹0{€2Õ0§™-*ýëÓ‘/𘗉ÀJí½y)䵿.¤5-"õa£ÙšÖ¤ÅÀ½y”s™“ôšÿcŠPw˜• W¯8p›,g%1!ì2šƒ?r¼æ ¿-z³E¼Í0Nî«‹‘åÊ¥ýjI`ß -€™åF‹té;eCãhaŽäsGUeK·¨a{£@¼YÐb³-Ë»qSOKªi6hgXË+›Ýú†oi:*.©°’;_Ývÿ·°×Äg+\à@¾ö,¡?…E¨YØ’4ìxö{Ù4`Ô1&¸6ò&ò‡Næ úÃú¯2-ò;Êz{I}ž=Ïc7iRænû`'só´š”î®îµòlÖ`껫QÅ-Ú'ÿ9üô‘Ö¦LŽº3¿Š\X¼™Ø÷mO—ø¤?ô“ï&²f`„²ìÅÇÝ¡£óºöOÐÒ×Sp‘zìÐjþ¶ÊÃ:i‹ÐGr£òÙ„ö-ă.qpCÛ¦蓞ʲ·èÍâ¤ôÕzzB]‡pR“4²lóâ¬,üf¯g3ž 7trœ‹vQë|þrHøV dhýv\C7+Üçû¶2Z¾â÷óØrñÌN½{Ô}¨y¨m’2«Q¹žbz)jö”^‘ç6îth;*ò¡N_ÜIÁŠVÏmŒ•¿Kšm<«ƒ5fW]Å€­„I;ÔùÌj¸Òé ¶—]"–eú´}>áEö8†JhPâ0jÛ²ï6p¦œsos*S{-ä::ÿÁæÛoT½¥…ì8ñò–&H>ÏëþàlU ŒHDþr®Zã›Ì†½&)°ø!ÃHÙѬªûaû§Í]˵ÃKò-'5´õË™ÆV'Õkˆ46¦#(Ccçžhza*rÄíi]‚l0-öG‚HèMüdœK»ÉÛqHyty²LØЭ&µzCÖ„¶ô7ÖCßHµ ÇfjÃ"EöÎ o záibêvŠneÕ7]¾;õ_íãVœTדfOZš°Þ'XCòRíÓ ÕÄ:lhˆI#ÍÖè÷Zv§u0Ž9ªu¾qÓçSG³âòUݲöá±4'Ætxƒá¶.û"vƒŒÕèqýšÔÅ0gk¥l±^6‘#Ȫ«½Öp‰¸»j&KÏ]­á}¥…£®CB§=+è§Ú½'¼ê¦s˜îè– j‹û -¦ýÉp 5œšŠ·Öú {œë…Mtd‰Ç*LÞì’ÝßY:?^°œ–æjB3Çɯ‚-¨¢èc¶¹¼#ˆ²´æ¯Ââ—ÎÐ'óð§]^wbSÛø‡šÜk$:aµ§ÆÖð ¹û͉c¹µ…¯¿Ìµ;x=®çˆa9_gÒ¶¶Þæº3x1W¢I;h EbB0aXcLªÈÜ¥/ÂÖ±®#74tˆ^Ðc§‰’zMA"Rå¯4èLŸ˜A¼Ñ3\gµÇS/ì6´J|9Ö~œíµø6» .âóGñš®å,ý¿ž{ÎivG1ãÌâ±F·:y)¥miT†ÕŽ{~²Üg}©c‰·™6‹ë»»µ+eøA¶éqgÏ•yÍsÿT¯"!Ž“Pœ°ÍÍ%WàN¹-½?ÒÍaÁ|baAgë6:ge ÜãbÔ¦¶Û¡“Tt›Îa•ì|X€ \£&+Ÿ$ÚÙj“v…LàžÕ½Ô=oñ@Lìjë}]Z’$QŸßkŸ”,jÑê UóNC¸/ÉÈÁ¹Á›‘hw& ÕËÕê};™á¨mvO6³ï´8?ŽT§™7Ï@èbÞ¼+!–·[·«m,&K1RB3œ‹m{R„>V‹N‹R6š {›Ó*6@vµ˜–Ÿü<õæ}Å‘jÍÿû¯­o\Ž¡/kËÎ;øg³Â5Ó "ÞF²ðô,”U”ÚÙ»Zä¸úÛì‡mSß—³'j$ßÀPÃü›ÛÝ3G5Óÿš)×'ÍÉ—ú¥IÙÐìÁ:@5î¢4V·ùbÆü؆X"ò¢8›á¿ôV“ôÒ£´ªøØæÜç3€»Khþr¶1‘⹜ôOXiàPÒ)Ì\jHÜÊ<“H³ßŒžIÑQh¥±[îæضex(ëJFí¢J§Ÿû§û‡Õ61y›#d C†„"ÉÉÌ Î1“órŸÒñXÅÌ‹˜>nk±ïîÎHzX³³Zõzx…>EËÇïé–U‘±ê9eå™yƒ¶±$~Hï“«×ÙÑ>i§Äü2š˜¢ýB½ÕˤSÚÁœ’ž© ¨ówi›Q’bS[lÒW¤’,ëBø[{¨õ2ÕÕÈÞ~—ä¨ÜÕ¦Õdb™Õ‚“;8P–ý]ž9 ¥§ÜáøC=Ç®'΀7¬c“ËpRóæÐÆÕ<é 2-êqÔl#ÑszÓN$í™a©;9¨Ïž¨s9·)¤Ù?>¿OAÆéPJ­ƒ7Ù^‚è×n5wí¨­‚p‚'£ê¯µ&6– <ëFNâ«âdÝyJøá2¢¸WÂhy¦<Ö´¸(c}°t1ˆ¶iÎV„©•šÍ°NU+ê?®°;®˜ÏïµvÁšJö£Å+U›µ3óXºµšáMaØÞ‘tè0…lËÌý§7œ˜‰Ç°Ð@WYEOÄ9j7¾òЦ͌#þ¯‰ÇKs†ysyŸÒiÕäÔ0h'3ÕÍm©IŸ•’¶Vò×Õæí¡ ê|%ÔlOï×õ½IS:0pÃ0êêPä«·èæT<ð÷#»Úãõt«èÙq9ƒõTÍaìÇö( ìXÿí…2—¨“Aàh‰mbY'X”ZH¤>Á~0¥~ì'X^õôí‚tþæìu3W&¥Õ—)ã PJGU¦š>Þÿgl!”ÿR^nÉqì8ÝŠ7  |ìiöÿ=y%_•å‰óÓv©««HÈØ*ì Ow¾s\sòlYÅþ)5ÈÔTazG÷}ß ”ôüL¯´MdêÛ,NXQ¹°%â[|s ßvÌk]R;ÕŠg¥ Í®’&;:Í8×2`ÝÚŽJ´Ä´Ô‘¦ï1鵕úÚÛmþ ¯@°T«éV?߇«-#Ÿã,ˆ¬nw4‰fH˜9| ->JIÒÓi‰òËib¹ñèuÙ’! U@7^sþVétqvÍá6I,Ó“wÑJ)€¦N•XVôÒéö¡tÿxû‚½»›®“S÷ò¬î5ŠYãePU»ÍǾµx+_ 7W«ÕÚlsm -±þJÕ u? BÄ˺æ-oô4žBÆž•ß껈]Jœë|e5$öºñxB Eë¿£@ÛîRkÙØÈÕ°9±Íi¿*"è¾ëMÓÓ±Pÿ‡+ÏZXQ<†ö+k¤+©".·7Ë'ýY²r¥£˜iè¼Æ—òØ †ž÷:*è¢;¼ßvè£"Í“²¿âzqŽ8¼™õnlÐþ·+þÅÿw.…‚–Ïy“4T¾RÃŽÖJµEÍŠc†€Òh¤5m+YX öïZ“FjAQ ƒ{ÄzëГlu|þÀ¾ÃìnŒaýpp÷bÍì"ve6.zyç­XãFjâ~~/ÙW÷ÝÜÕrÎNÍÙ³âÅv‡­óàï„lôU¯Ó6„l³gd•$ê´¥Ø&ãÁÑØå«ÑœToùóÏs;ÝËÑø2Ðï,ûºËz|›•ïF™Yîš©xÈïŸE ¼{~ó϶„¢œ9:cO„„fThÜnTQWW‰‡ôÛµXù:®’Ë*Ë©à€šSÍ}Š|ú“”ŒOÂi÷u­ÓÍEi‚–¿ésKÚB ._â AŽŸ€µîvª÷cô—Äp:øIlŠH–3ZxàGïµæm$biÉqj÷”~/­[ÍL`áøÃí-PïÖÿѺ>Æã1@À(ÁœN!^ƒÈ¾Ñ §=ù![_ÓË46Þ^l'´mëªô]Åq“æ‚NS8¦âçæÎc⬆)ZoÓ±#È4ƒöî^q°ÐŸ ñ‹9ÌÅ›gK_è®’LõKj­Úðõ1âOh͵N­IGëIºq²Ž‰F¿ºƒŽ=z¹ÊAí@šïÔ‰\¿.i­SÞ®Ö§›·T‡¶–˜wNz=$å|‘Y­ÖŸÜrŽ[kÁÀêo„ƒu¸5©éì>1mÛÒ,“OT˜ SMÞ´bú« ,ÀN„ôsÙì®óʼ^ߦÔ$µ‚û5$è”l -ºÓHmuG–4Ñ,œ³.£„5È¿†Çx—ñR—­wwKILÿ”!Œº6Ц‹›7cõÙÝþé2ëFvZ«ÅØ¿æn#+G­¯–?iWÔüxÔǃ¯ó îràÔ/l¯>ã5í‡ó_’kŸÆõ¯UZ·ÃÝÑ·-3«g§Éj_.y-&ë>V0¦¦„û×GÐIpÍ!F÷n§UÔiSm^ݨ®WñípÔÍRãu`>‡æ$‰Ã¦ÞKNäp¨Ó+rHÓ‹Ð]»­fkŠãçtÅÏýôéT²»Ç*Ç`ï^¸gõ†ç:KruÕ^pÃ-aκM¤ Ð΋Ê]_BWaÃ>o­Â=Š‚+²êáÈž¾{¢âaS ]¯A„ñ4”“`¦Ë‰¦ŸÇcÒ]óÑé^§‡ ¡òèÄöå5›«tÛ§¡|šDtÙ¨žE-³ˆæ¹TûtëÌÚTã„qbYü,ÙÕñÀï¨À>:†hH_´<·™¡{=N'áÊKTHO]#ÖWi''|­ŸË*¬êßߤªZ.ÃMÊ*W9u8oj“äe:.ÖCº à•>Œ1ÏçùÁ -›¶&£v±€ô9Ï‹¸ [ÃXaLÈ}¾¡^½”æÖƒŸÍöÝ4 Ê›«°¨”EbOWë€êJhHgê@£»š³ì%ÁI» ?ôÚ|§‰¨ô5ÒŸ?¨xß„½§Z†›¯<1ÌÒÇÙnܶŠG˜Ì–-°£º9a(´óÁ§»EõŽpn˜<éÉògµÁE]¦vŠbkÝë(yOþ?`‰sØiù߶¬§[¥ÍêíѸ å¤mEíc[Û)è‚çìJRÉVqÅ6uŒpblg>ŒRX}Ô -0JÖù¯öÜÝýùܬÞÕÒÔ§ðŒ1GsaÑ2d¼ùY'éÆ‘ø÷S¸0jÃyp2–[²Îx¼;¦uE¦O?W»MFM"là —ôãhVÔi"Ö¡þJ‡‚«ÕÊ:ÆŽ"ý˜¾â>Øš¯¶äšÐ#ÙÒÕ9»+Æ_ ßÈt‰ó¤=Q]#Ì8xQ²“:ÌIЖÕôÚø²®€%[¹ÌMÕ%*ì’Œm’VŸÁ4Ý 涣âZ+ 7¤Ðö.óš’-‘!QNS>ÁÆ,Ô\N¢ú'mŸ¨³Qÿ„D$›/!iú×?ÁÖéÕæ§g?UL5T3ôGqôw±" ,Œh‹Vûµ–@¾på"ê|U¶©œ®Ã]¹¯1]WGg}†3ªîå2šañ×õRu $üí’ku”ªÉÕ²5^Ýê^ßÙ yÍžºw¨¬^;U8êélŒßËç¤*\ùù¦+v/UØOµSŸ˜3Ê<Ë‚:=8‰m2ZF}j›ŠÃ:á¥_j ¯ÖÕ‚ƒ.œ€ê5nä™î õú„Xm´"ÊIÐöú/“$9’#ÞùŠù@A2öˆ?Í™"ü=U-ªfºÑ^$2+ws[Z(…‚“AýÀi/íâ¦j °p­+\Zž[!ž¦iï[€Ýž -æœ'gÒw±Rzª¦“Ašâ›×¢½Øº;’*&|`.Ê-kû¸Ïvý4%àìÜÄAê´'ñ‡ÊŽ¦B9’ ©,“U’~ -«/‰6ZÏÝ—FáX´’°²À¶‘ <Д]³­v÷L¦'Ó Lº!æNÅ6 AΞôU&çÔÄ~úø–JÀcD¹tHÌ=“rf}¬±y1O ×-Ž&`„Â(Ç ‘å¡f+ÿQ`¢"áïØI)È·Ýdç5ŸKÍÿxè£Út­ªPCP‹oJ(ÂAÐöfé¦îøQ»ÇÇH~·”3töh2º®ëúÄþ㜙ú˜"CÔ¦€´dó¹íÜŠi¬\Z»œþž8~îŒ)ªœ)ST°fë-¹Â"‚ .=3LP¹„N4,=KÜãÛÕÒ«3’%'ð°Sˆ€z8.Ú j ä?]O6  -4®öˆJiã§hKXè"ËMu 4•:ꕵ0õ=Í#<Ÿ‰3å;œ%ÎWAμÝÃò-þ©,‡é×Jž\4oÜê ³mB-Ið`w–c/ý‡®ö›Øq"÷Ïÿa'€v­ðí9ÇúÛM¬œb¦ØŠ,åMg¹*ºü«.{X5?=$`ꥼ‰^Ço¨<ÓjäÌX«£â±o_ê œ÷çÆ»„OàTN²Ü¦ÊMK ÷˜ PÌr3ÄÝWøÜá]óª‘^sú†¶íÀ›®ÌdרSG׈<˜Ò;g<-¤>vDñ¡€ò‰iNçÞ‡3s…'@°&0RJøhßà a_TÆ4©î"EøŸ0+ê±ÞIk¡Y3øwì:´¢Æ¡ó£'_ H D.ƒ˜d?¿,íG}Ïtô摸léâ彸¤×óªÊ3gë“©^ñé‰0KÔh”ØpL´”(SyP£Zwí•àyscŸo@tਟžå?¾=5IÐÜèÓÌ·®ýßîS°–¬<[N1A7»îyÙ©õq·õf/o!‹ÌGðV²—ûxõˆšÛQº °{–ŒÅK×h£ÙˆÖÀðÈ c!|È´1Äw[Q«!š^ú4™—ÙL_"ײòPØ{\œÚ³ñzn"Ëð:™9hÊO: –µß[¦ªã[M&èæO§rãÿß8õ¨ˆçL‹ÕaFeýÎRà^ñ<æÚq3üâ8u\D—žÙê0#ÿ­zY6ÿÌ+éÿ~yì=[Ôô|t;êÔ½™AP#+T*ðƒNdô©Šjt“r>wc¸”Îslý÷¨Ó!6¾‹aD>½ÈwÇ@Žy+Ç!í‰z1­ÃóöN¡‡ßÎ+RB'´ÎŸ³^¶‡ƒÕçt– Zæ½ÙãšÊ™ø.Ÿ --ù£ªA>îܬózVlÜwíÍ0c¦Õɉ3Ô!óW¹D½ßö_WO$: O×.ŸRÚËKí–3vÁx.’/*ù³æs³Ã“ÛõXiàg]<ŒŽÄº =_ú˜Ö¸p¿l{žrqFß#rÕãäûöÞóýÔÙ–šè›ŸwB³$Ì»¸ 1wd¯f\ù^o×ÂmΟ–OóºÒSÿ=ƒJxŽfæ“;–¢K^Ë0rØo÷¯¹yÙoêþAÚÇ -Äý…µ_Wÿ £vïÒ¶÷':JórÊÔØ€¨}›Èçžûzi„%7_¡è–׿öH¶$OmÛµ@œ8øÒ®Z3ç²EÉXyÎê95Ä -ï…Í3Þ(ÌçÚ< EùÎ÷ßRáþü7×…ƒˆ4ñ|= ÿÁ(iœΓkgîåùžÉ0›]ÙÆ].åCS#ÇèÆ®bvB鲸#ņ10‘Äb¹â›ì 3ý(qaîr™Ø9¾lÜm2U}ts>A‹I¦/.r$˜ž¥Ú¥"øâ±.¸Þ èÊÌxùõ‰Á+T®ƒÉ¶êܦþ$×^¤SÔ©&·nõožE¯;ôøêÞ+nRÆVÀt£8蔥ÌYG7œÚÇz'¸Gæù°HE(ô‹ð¦³â1nÇ,×JLCÍAÆNnhºªœÄ9ôìÍÏ ™@‹×šòMÅÖðë _“„F{¹¸6â5©È‰G>;™wõ{èÓ6è8ÆþÛaÁc>éâzn¢£ãZ¹U#ÿ¾×ÿ.ºR’{ÆuuøSó~ÅrµžGêµØÚ_÷\ÜÏÍ¥t$C9Yn~L6Gñ4&ËzZKìeæ(s%ÃV$~£¼#œe™ô“>­ÝR¶Ž©¤ØCâ×^¹Ô¸kÍ¥ßV{?®ö•Û½†ì7ñwîð”+žjçù0}aà­S´È›/\›kòX)ŽôPå¿Â îl_®ÝÐbî#sê«ø¼VžÌ¿„ˆ™†Tr-7”ºôX‹kÍ®l5 W1!ýU]\Sz<ÁÂpÛÞ¢ê Îµ]¿@}àÇ@hƳ<™°ëîµÇŽS#œvË‹ÌË‘aµ¯<=é®ùcÄ|xzÌs=½÷p+YÉ­gžgQMöµÏ=šd4³Ê9,Öó†–X%$Éâ»hö£\Úg´çG €F>q‘­úh¦V3Æ¿]»có îÕJ»>y]ùNˆª--JÎx‚‡õ8ÉË3† JTÇ´ËÓ\µ}>‹H…o¨r»&kõ||måõhÿ¥¼\’ÜÈa º÷)tî øçyfëû¯'_¢d—$‡'f#›ê‹ùiÇÖÍû­”3èÄFwuÛ£¾= -j°ˆ V ¦°Ã-‹”ìÀ<ùxÕe>¦ ŠOg=,ƒTØö5Íþë9Žø.ð²sh%DäèªÓ‹€Ñ1Ш¯…œ«ª¤1?.m…: d?[ˆ8Za³8ð_~ñu`õECÏ(N(Vph¯ù°§¨üü ˆ³Áe¢Wâ$)üöPÓ1¦`Iieî›vh:ÕlHc­‹>q¦›»,,O&¿¶ìÇ.^²Ò™çªz -Qdž]—§¯¤ag¦6¬±ª§ TóD­W‰N¥VÇÙ–M…‡/ÌøôNZ©J´sHB&ý_½¾_Š>'ßM'ꮿMÙvØÛÎB{|¸ä)}é#ªû^ÔHÒ¾ Z"fM#ÜG õ·©T5Ó§.ßxdÚR!/}qÅô _„É]e?é8ÕÓùôDZñ4iâ’¯Ôg¦«ÐЊð^Œú¬”«»Œ[ÃÏ1ùjú©Äj®Ê"O1ª÷ªÌÞ'fû¬Š8gLYf^·ºÈîŒáÊ;‹qÙ™në~£/jî~.Üa‹P˜d¤ÔÝ‹æ=—D«„Öš,0ÉYÍùš÷žÛ P‰=¹¼ #° )o&5gÈ*%£Í{ãGiuLÈ­ô¤é¨XÀ¿JzÁu½Æ­Õ)Yué‘÷’-aã}ŽnëÀrK[g9ã–éÓœîUÔžt|Õi%ÛÖ…ã÷É<aÝg˜–Óð -p¿&cgHóbIi¡]•:ŸÞX•°Ì!”ÇôÌ>Fo¤ˆÐ’àý…ÏÄ0±$«gQ(÷-¨ÏzQ -ÎB}ãØ"ì'T°„S*ÖÒÖ½èvµ]¨të%ÒVÀÁÛ¢þaXɨX…ìAÄ·£¹£¡tâ¹ÈS F–ʈ“êøÿÔ›L`Ý_Ã+M”G? ëš’©by–=Â??–í­Æv tFÑï¶Ë%ƒ±«1»T(½jLû¡ãB2¹x*Ít×s@t|§ìû'¶ŠgVYE{×»a(ÃaljL ÃjìH<ÍnÁwœÅ"Í0kwkþÝÉ [¿l(ø'ëÈo¹:;‡(BqG×ò>ö„b¬“Û´m -‡'—ÓTµ¿D~å/O…YŽÛiSª±¨žÎ×Yû Ê}}è/"*`ìe‚<ñ9Ö›Þ,Âã=µž‚jZ -ê÷æ\BÀ!°vÿp%Ѭpe¥1nmòsñÊÓâßÇýóý>ÄÕ3ø¢Âuÿ~¡>ò ÅxÂùãÚ"Û´<†+#h:½¤D(()¾47¬ØYbÍÓý%t[cxÃÀ¨TCÑÏE8¸»j² éƒx!6TD€êŒî5ª³ò£WîN,soHð>Ú*ÞUm·û½¯5­žn'æÉ»Ù^ÖfŠ:WšZÓÔ¹f×4¥¥¤“ñ´ÕÔrPz«_NpÆæÿ#ÓPMÙJSoz›CVÈv㿱\ß?uÍl/€\/í"Úø@ái‚‘éásÃzܶôjÌS˜Bã‚Á–ÿ ør!Ø“„,Æ …'E¢›°ÆO:ЖíÊóÁw¢ìÍ”´—÷å`5I†ÒE3„þ9V.˜)f¨bk¯¢fß–ïŒg+\ï‘»óï%]òà²ß-- åjoœû_Àë²GÔèMãÉmüÆæÍû\rnO¶=fx_¡|= ñô°®ý¨fVÆÂÄèqÒ¿ËÝtgg7½ñÿ;…zp+oNˆkú´óš3V8aÏôz¿Ríò¡JFiªÇÞõ¥+£d¼Û”¶• ‰Å†cµËS´4µ˜$×ØL„ZZâ(9Yšå^=H}¤™ncÛÉS+éäÀñË›( ~|Êñ_›”¸ÚrµóL hxäöì{N…º¡ÛW€±ÉA»j—ý¼H¥§§,ä€h-ùª ôÑ};,÷°ÿ¥á¥?ój+¼—å¼Ó-ÂןæÝŸúñ{/tƒèô^'*ýìÅeúŠ±é)Lkzš¬f+Ö`”Úê(-03ä£QòbŽws¥S¦!=ÕA `ËBª<»#–›iE«'’KoО$]¸s ¸æ,oR‡)~‚¿Z¸‹ZkõFšñSóò‡0yÑ}&²¶\%ü3–á‚öÏӈ漞$9ŠÍRý™Dµä\6bâˆTºy¾¹^½˜…žŸFmÍó46šš7äu¸jßG¦—ñ@µô ©N±uÀÌG¤æx9DaƒL/ªÀ°Ü3!é‹ ¼„ZKÑ°È)f÷€dúñ¤dÃ$pRnÍ«¬ƒÀAúö4o®?wO[›nØàçÛõÖŸ‚×Â~–)®¥ÆASJl -UV6ö@M§µmÆ€‹ì<ºBC˜Â§‘i³ähõ«(`aÒÀ‚£÷n슉!D)cé¢k¼vÃø#üÕŽœV<©r›iN³á¥Ôo›0š•|eÊÿY^ÁK?Wõ³WhEO¥©„ l¹“±’~ï"54,z›/ÿüü=aW{“‹<á[t¾œâÚ­Æ¿:êŽ4‘7ºMP?+ñÿp JQ¦…™³©«øã˜O“iÞãÞß[²¶Z@`-,ßíÌÇ̽S¥B0‘¼ÁII*8öKÄÒuÐñ:©ç˜´}~ø´B“4Z³³¼;ïžTÍ©‚Rì @UiÆA6†„Wg®™ ù,Õáù k$!m[ÅA©{‰ŸHð¼žâ=2‰¶-yÀî¥=Ù˜ÖS×V‘äñEÏnóØÇK„÷’Hq‡&é¤ÙÒ_§ÀX›séôkà8"ØYÄãñ—Š+m°ÞW½fäJÝØ–V‡ôöCL}ŸáB™tæ)×1“C-—=ŽHa›wðó2jÁf)*ЫÇM^IÇæQ'›R µ²É@^†ÆC \ÜBï¦^zœø“¡2mëûêz2j<ù&FXe‚”Ù‡©µ=m§ç6n»òÒ*?*¨>4.S§`ù˜p“Ž&Fí*,7€¶h{K†æöëÌÒ–jÆ2cÍIì ‹0ÞpI6s/Q²Þ®q!ÿ-m+¯¹ñˆ~VMN»ŸÇÖÅ5¶úr -^›Ì2Únï|4„²U Zm»§I©ôL“)RœIl,¬Ú¦VFP»Ëbôå“PLmÛð˜3ËÀ]˜‘‚À«¾ L”4AL.ö ;åoËR²Hîè*â$êV©®œ;ŠS¹—j?¾§ÊGOU–P)_Ô=AÉú îP%ÕC(ÖÅ:ÔQRáº)9ÛÕDÂé¶ÆBÇÃ#X$Ÿ†Y -°:wÒÄÖn¤kˆz1 b² µ81„{ ÄgŠ[˜É§BÎL[ß¾íÝÐël[ -¶æO~f,‚Ë£|5ânÒPN¶ÝpÚhÄGXKÝF³ùÇbŽÃÃV°à0/Ù}¸ãÃ>ŸÈÁ€ê¥Jû¹^¾„óÚyVƒÀN­8ójµpJÝÒ§gíð`™fêða{s!—9èK­×Ô ‡4È`BÉ"–¯‰#:N»°½&K¨+B_pÓaÈ¿47ª†ï¼¯6DÒ…Ô‚¦ -F‹øñ4 -/0-'°ºLÂ_ÏÊÃÿR^&ÉqíX ×*¼t°ovTDí¾p€';%yò')ñu$/qÑPØ—(©FøµøNRX¤'<ÕÏx³ -$õêdiyŒV_¹âÓ_\šÙŽÚ¥xŒÈõî»ÓÇãÎò;+jžÁ=nÙ¥ñ¥Ãü§ 2j—DÀ¢õ¾lóÝ<ã +Sî³\{B-Ê2À‘7. Ùǧ&ÕÊÛNf2~6²R5„##ÔßG -§©º•ZÃ-I”õt˜®Yã-’0ª±ªTÁ_ð¶WLuxËL H”™#ÇøõkíÕ‰Ó±qq0‚.èà©Zxû¢éH|úK{ÉA]¢ ŠD¯_}¼Å -ç“ŠUî¨ñ^5ãcîu7öîîlÅUŒê½.qhÚ:ȹßecu´?!Ppâù=2¦i_G$ò¸yL*o|4Ë2­½6®€q2íRݬÕ®7¶Ö=ݯ©6¤Fÿ-jª>ÀrÀÞÅ~æF.ùÒî¡dñfÜØ·Ôsþ’y]™n > -ÒÀX¨nœ—axHÍñØЦäëYÊàT•?Aý üÍš9'It©²fN[Ã5²§}öó¡ï…<0ÿ’c(>¬k­}Éø¸Ó~ÄS¨uû ëƒi€‹¡o55ä‚·/Þ¶®”“Ã"k¼$–a‰ŸÄ¸8nºÎ„%:ܘ(Ú «5m¯eºÉÅßõ#â]±ê ¨vãfa™QúÚ|éH÷‚q)ó²}=IóدøzÛ5VLÖ"€,yåÁmwµß­4$úþûÅJkxJš¥&ªèø›•îfN-¡†y ãTFøXÙþÓ›Qi~¼d³ƒ„k4j©k&Œ RüJ«”ˆ.f…ä -¬k‚€ÉuÁY}aiÞ÷…µÇùÀ¼þíÓ…Œ˜é2pƒÎša?vAëÍœC1?Ì”¿Å&¨+‡ùÓ%ÜU›'ý©âŸK»›ÜÔÍÞªO U“!]^˜‰4Â*¶ã ;_eÔº<à¶Cx?b™çï¿ßŽ˜m蘖qíö¾ŽBgi[Û…ëB2AÖŽóE#µ,“¯ñæ”ÃÿÜ¿?×v"¥a®T¸œŒè0ù@›¦Æ-$@®´Ø6±e l‰gôw{Õî¿?‡(Â~xÃ6¿§ Ûåõ÷ñˆcAœ5†¹˜kî¼°8&2$šÕ© …ÆÙœä/ÐB^«NTF¶OT…áIíè'¨DÃþßVÇxâüú\Äá -[F×vðš˜À´…\Ôèc“ç»}¤Yd²j3–éFMÃAo\ &Jü¯W«ñÚ!<;ºÝlm[žzjÅÆEKˆ/@©Õ,‰/‹Ä"Æ\P›ÃÖ ïKk:kõ‡±&Â4S’êXú‰-²© ™z§ú§²1)“A!mÉ›jj°_+ -¾M:Ǩ×6ƉžT¤ Éá$«ÍKE2äxýWÎÃNº8b+n‹ÔêÕ¾¢AëAÌÁªU;ÚK*O–î­èʨ8µ—CjBw‚,3º76É5–ÁÝV%aÊžR.`Ø&¸Ló!ÑôF7/«Ô¡ã.­£°²`\z}„  .•ôÒ„2Ýðõnñç'&î_cxw-SŒ?%,ÀµƒÆx˜Êñwüm‹KÚQ•ý°öý8§ax4¦Dz}èš* Ñ—ìU§]bÜ`7u®c‹5>yk$#¨< ø1³¸”ešEŸ$Pì¨ÄݯL)‰\ÆôdÁ*ö_ëÒê÷ZU@,g9¾üüñþû…ç¹* b#w9ÈK„¼šÑ5k[Q/ÏñÐâ‹1íó²|Ö™ÙÄ.JA3ö\d”:ô•œç^ê^}ñÉ$ª‚F&@#fEÉáÓFpªï¿ß6¢–ÜZSÀ±Áœ”8¨B´ÉÏn½‹4óC ÔŒlÊZܯŒ(¸`¸#Us¬†M·ËB]Ù s}dP [1Úú8›ÿ"Ç„°æÿ¯r%¯±º¤Vå•Š{ø½EMCÄjJ¹=û9Í$¶ªÐtg j^ÝVûHÔÐ:ÒíÝsb‹gO¥×™BHßU6a§¡z+ú\jßÚòcgˆ¡øÔ¹ä§'hÕhZb£­YŽ«q” òsf·æô ˜Xˆ2mrÔÅïõ)Û÷ò{¾×G)GFT°RýM - ÎÕ”ÚMK.\Önqº-μ~$9Ýn:Ί5lʉ…ü7‡%^ªD(-FßJ~/V¶ìÛ•Aïîì%Hôð°óÑlµÒ±w»ØÈÙKíÇrÚ#ŒsÛ-S!$(ÜŸÛ¨ ýübúš²Ã·…U–­=ß=ß´£PóÁ%œaÎèUâÎÂ;÷ÚÀízƒÐóÿL*¶ðâ›ò·Ñ?6Ê3¨h±Qxdµ—?T;œÑË~&Áî!¯ ¦˜Å?Ò°µm|£ö¿ìõbMÎÇ!½s†6H¦0Z ßv ¡óÄš¡°aÝbW™Ác|.xš ¶›$»cZ+¼Rÿ÷c\œ)K8'3i/¶åþÿÝÒzŸ~¿ZöƒMêp„4xÌUßϯ%̹Xšˆhd^YyðOV!å°UÒ!žð¶ô2m×%ô -“rVÚnMåˆ.‘€«_Ë1Z¸†HAbÚ+/sG‹)¦KUIâÞtÃQÂJ©Œžf!ÎÆH7#Ÿ"纀˜ÒA…þóùx” -95^«ÅqCwÑ|­%(qg†ÀÏð 8x´Ïý“RlëéÖæ¿žÀ…åÜ)㵶ñ¨DP槙¥£°à>Öä[(Ì4—Wíñ¶à[Ç€û‚‡/õ]+qåv £²CõÝ6ˆÎe°– -åãQW„¡éd´&o…}ˆCÖ3EšHµ,Ö¸P†¹Ñ%C]8ïmXà›þ,öªKŠc‚pÄ •š^ƒäOQãôHÜJ}P=¢…î?¶Õ…4TuÝÄ7H`g{ÔXG9ó ÉP®c—$²ÞiÈfkíÓ›¤+V ;á.(f–%•¾' {|‹~ø¤ *?¿ZÆWì.¬q™QÐÛ¿" Šsëáóójø;y•ß:Ý—öWm®5Ë¥òçºuí †Ãˆ–:T²SÝ¢§Rˆ Ì!ŸŠ^pVJ¼PÓçðÉETÄÕDÊsãA7¾¸X¯`9šø]G «ÇaDæ@üÉsžTÓ¿÷¯lËQÔ[ÑäûN¶V‹íÅ&¡h=ñ”Î3ºg(ÀŽÅ#¤éÐODÝؽ c Ë·Ñ\=ëÄLgØ›!Ð*âhÆ¿¹} ¥ÜÔCd­Ú¸*„éçĺ™QÖgû…3N›Ó¨ÕvR xÉ.Ÿ‚„ÐÁT#ñ,ÓBdüîSfÁéqsÁfhUÙòAƒmF·ÙÕi¿*•ÜO·Ç_Yà0+¦¶*ÑQwãÖ›§–[«/L)´ JÚ¬{™uDס–E´üë?ÿý®l0\“úòªê€Žó¬þ}—cøª˜{NZ*vJD£ØöŒ¯1l†ëØ`½”(ö¢žnÒÁqh‹YS_]Š;¡ækÝnöúš*ô º×$ª×A¢Û]>öccUwK·i¥nœë–ïT«ZDcrÆu—˜làcÕ>µÐ9Fä7Ç<6ƃï —}·ž2Íöel4:6w5å'jà°„£¬ 4ÖB»ÚMôìÇ|ÒD´} T+¹¶=ÆuÚáXR%\8D•e„Ÿb–pðMý‹|iºÝl.àúL©ãÿ”—I–9D¯’ˆz$Á wÒºÏßö Rdª6µQŠ>€Á†mˆíU)zUí·­G„V<ƒ¡æfJ¬;~¨íbÃt ‰óðÄ:µ6áú¦‡QÂΪÝ^ ™ö‚œgÓÜÛH­¤<åîOqDîîæú=0Í„tÔ0¥[Fwb^ºy[{aq6˜=e·†G¶H -†ˆÇ €n^²Ê”eÕàìm÷u¿{²njzûüø{ ½uˆú Ô¢nÊÚjC•¦ ØCº3½Íª9¬Ûçj!–Ô.»ÛÒ‡äM¦ÓæMsëwÀh«JŠÓ í[MiËrF“ÅA-p)6gÍÝÊL³ó»¾6iëP‚Ù÷‡¯*¢~Hû_“‚øjº°ZœoÀdÆ^ÝÂNÊueö¬ZLóYñ…œØl3ÑHPÓjxX²lù('ÇWÅ>ZôSSV*Øž{EK0õ ƒÒ¬Ö˹!÷clÃÕèÅ9Pc™ßsS‚›4Eå¿ÏEE:¹Iu¹ë÷ Ú׈/n¾DϺ¥gXÄ!Bp{ís£[!ÓMq¾ê¶hÚ~ÔRÅý£!i`ÿù÷ÇÖ=fé‡Tøá2ºo¢g¡òŒm§~=µ!DxˆÂ‰)=íþšä:m_Å\¬šÃ `1KŸÆõmr•ÙCù ŸÀ½jÒ]fE~•ˆ]:Iƒ˜Ý+8ñò$5ÍŽA¤·K˜•Ô[4ñ±4eA|·ÛThC:¨Yl]òGa¯Še¶E SÑ 7Ž§V½ÒœE)ýÔäv"ã‹Ñž4Ô‰?V"Ù,¡Ææ,:…”°dÎ78u=f¯y^!ª‡‰å—ÁÜaäÍŸ2}fÒfÄ…kû×ò®æ’;´ÃFô;Æ8Ç*]Hð7¡z`ã¤ÄtŠÃÜz`…Ë•FXŸt|­Ö|¯ý­Ôñý€&â‘Øþãì“v/¥LtÙ”î’Q{FH/ÈU‡ K•ÊÍžFU¶‹(rý©ƒêk zÖ«R:aÄCÙË’íò[ÇžeWÒÓ°¤õÑÖ¬8¯ôÝ}·‰#hRµõ—U•Ó@6¡a^ˆœ¿ÁÖsÏ÷kÛXAl(îGe ²"†=K½ÃÂâQ„{-ÏÉ­ -wd´x‚=¾‚¾ °‡ÔO¢ŽÇ\;ëîZ mó¹ì!¯^Np2‡][ÝÕæÉ ¥¯0:¬ó9Lã ޳ƕ!¡c‡M¬ÙøUØßîP¸e1M~¯mJefq®˜—þàÕvp7(ž*ûÌ;.M˵²–Ë/oöÊ5BË? V•Í¤É¹àÏNgYPu` nò>`{k¶kªòîökgûíèìtšY²{¶£ê(PgÚ_;f*Tβ…óA„1Wo¶¢ûÔ'°°­Ù!NÛ¦‘Î-S›¦(/}\Kk«@Dô´ÛÔ>¿¸À¤ rÛvzãà&0'Ÿ(w€‹H1ÎßnJ° žë“»F~s½Ý)î:ô ÇØ;¶iÛö™¢cÆ5‚ÿge6l'þö­t‹Mý/Þ^çšÃ±I»øaéúür Û@ºÆ©þ¯¡#Ò}l—í@·Ëà$!Ò'á0ì‡Ó–SÒ˜ÛÎh/Ž45ZŽ+8Líam®¶QÞõ43‚Î$¸p­}gŽn÷ÜmQ‡zö³¨âTé4é1b?òVˆ8ëj]„ûTÔt.ìŨ6'Þë0°.ðÝÂŽÓ\¦Ac mLB^ì÷ѧ4ÿÛ·#Aa9ølŽ–š[`5‹ lŠÍ‰ÊX<Ôíëñ¿N—ðLl›m³îÔì3¦Ýnsi¾z[ÌÛ(ð–#Ú~p‹·²8JTpk—fIϸØd›Ã—Nãº;o~º ))BEí"Ìð•U÷²ÖOK˜kU:þÜžÅ3ÿEì%[ Ÿ&fíÿ=¤½:´‡áx¶Û;=ȧòjè¸[…ŽÆKïbGKøO°ù\ës˜£äO$°hË[ôMV Œ)‡?YôÚEÃQj#GæÉb~Lhm§Ó–9l«<áí_uš03Y‡cfü— àfû~êzZÎRíáŠ*Ó¸Y[*AÇ¥+IãG|©þÍ¿ém˜dóüû',R Í[¹jU°™&Ý £¥^‰Ý;çÇ·'<iÉí`ÓÓ"ípÕ¼ÑÍÿŸ¸Sb¥4’†ÌÿKK[;nÊ/ñ^üO!hÔ“8R³í9ü}êÏãi€Ž¨Ž*ÚòAß«Õ«j4íDу,fö±ÓÍ­aáÏ6÷ð|¿%¡¾u¹+Dá$…ÔXˆÑùNi¤þù÷G[&…(;®×n»Y\Ü ˆÅ‚Œõí&Èe *UR ø¦|c7¯ªC˜ ŸKõga+áœäÕ@‡±Žºu•KNƒ~ J«Z7Ÿå4jJã•/2Úá« ÔßòJ=™¼a¤•{uaª¡Äß.ÖËå…ñ JˆXJÝýÄe¼Á\[Ô×=ÿèÑëèÊ"¦_-líúŽbk–Sòã¨g=ܲd”ò¥äwtzfcÝ” …ÿ[v-i'j©`Öp'Y†?+Q´‰lêS‰9.(CÜ,ØË}ÑúØì«xËdm_Ìnαúè -¦4¿Nº»Zãë5ýûñàë µÇß-]9Rô0‡©·Ö1í²àÙV}£ 4†4#:EßQ¡P!—÷©°Ã&W:–“‰~™ÐŸm{5Ó¡O¯ÙËV^Ìb(bîI‡*¬#Ù o¬Z+£°3o™¥ã à÷'¸o+ãe;߶-gÙéé (qʽõ:è=6Üz”1„€H¶lšˆçêa0Øç£ä§^ÝÓ]¡S‰Rœ#P{_:ÅÝé²øR³ãÓq¡‚.@àœ°Úí¸bï÷Òm†æ.·ŽäëBww»Ÿ’]+ 4ž·ØJèÚ:¶„BP:$‘†º‰Œ]Bh7|ü*Ó±ˆ˜C2Ø­bJÂ<¶P»’ÒÕ—1û[üçYÞU¦6"Ü®Æ34§UÐÚxOžÓ6ÔÝ+ÿ«ÂÜî…)þîí-SKØlµn´kÑi^»Æ¼¨)`Xd´íŒ•L„šÈÍw}a>ßõÊgN¶ªbLÇÓSYqŒJCê8ûаÀ&"þ;*Åq4;³ÁúÇÑc ’€šáam¦)¡fâ´Â—Ý^œլܹú‰_Ÿÿþˆh P•—B "¤O‰ðû»SØõ·#‘!`Œq9ö·¬NvKÈñC£(Móaã©tY~»9½€÷RÜÓ»—ד%7®Ë\³40)•ñ (bn>%.fÃäçgÜ'MnÂkÔ7xŸÿS^&Ùã@Ý×)tòØîÔë>Ǥڤ\›ÚÈ¢%‘@"F3EEÑŸëÔšM ýŠ'[ÀI^Í+÷ø—§¶ ø#ò?ìl`Õ—&£ãiå×jºaÍÒâÂáµ0v]Kî@œ6SÄ-åa Õݼ €ÓwÐÖäŽ[ í„lXlgÀ€ä’rq§Q,&tØYÒÿæ³ÜQ~$ïì`ªMtB%îU³÷8iî¼=£GÁ»z¬ö4´9Ê‘¹]Ò;í±5?ÏÖ6"³ÇÜ-C‡Ú€&@QÑOÏq.vNÖöÂGEj˳mm¯o7X–¤…š®y[벩yÜ&´ ‹éÈuÏý ŒWìOl…t¢íBÒWèìgŸ‡*ìl5n¡ ’‚èè¨ä½{$ ÂúO/ëî¶DŠ†„ûŽÃž -›d_)²ÏÉ¡¢qêQ_>ßaïŸî?Ã=™Ü„Ž+ºnrM4tRj÷1íWŒ¼Sµ÷„¢µÄ”;¬þ¢†8,ps55rÑôíý9¿Éõûuüa~šZ Ïo 2í(P8á$&DÊ·Nü¢šÝQŠ&ãˆ8¤&¨A,S¾9)üDÈjNüó+œádµ“#Ùš…"‹lW¿ÒçÃîʆð¯ I´ÿžQ´’lV3ÐÛN±«€›ˆŒþÌ•Xk0joMÈ÷?x…2k±µRîú £1¬Äó0„žòá]1¼¶ê•ô“ ©¼7‹I=³G†‹b­¦“}XÏ°úúœÈ -•CðÑ™li÷Ì?gN·Û„Ž.†ÚðO%ឺ焈i:|Ù–´­Sò܃®b¦=å¢ã–ôUü:ÖÓ:Šõ®˜å§ÞÉ0Æ–ÙÕ²5å~nÇ¢¹ †£8 - >•Ñ‡«ÃuE=åÞð¢ôÖ#óÓ‚š…*œ“:Ç‚ˆïwÜÌÇ4´5+e ->‘~F´]ì$üÌ”Y¨5 êÏ@¼¾.Näûî5ÐŒ~'mÈ7ͱ¤YIË7XŽ†[é•^Q@¡éJF,pÊýÛ§Ó Tî$Ó¹³8Öå‘‚Oô2ÄzšÔ¨6÷– 㪤%/)qÏâOÐÞ8çÔҮ»í&ŽL\utÃȦu˜ÄŽXN¯pá@(·+Ÿ.AÌS­'GšoætܼӇy3ì§Ù=Â^ÕLIRv)½÷~þlyçáÇo›šÃHõX’­À\Rì®V`)YË„o‘VR³±:3=}%ìf>M¿J.í –Ù¤¡´n62Ph¸) áWšt¶¦’Éâ‚ìœ<¢%×¼¾¾Rp`$8ó# 73‹Ù||NÅ„â±ô .­ä(­Ü¤€Qu‡šÍAÿ\Ù ›ëëÓl8hÕ^&N“ ?¦Ð<ƒ˜Y|N3væ+Üh=ŠL&í‘ô+ßö™’^uÐÎ8ˆ‚¶$šæ£ê}cb»Ç*äa€„jå¡fÉ•ö×ûðùXÓóq"I:WÅ Aš  ÅR3ÇßrSu‰ÀWÂÀîÚ¢†]5‹«áִͼ)ʼn,[ÓÌ—ðÍžìØÞŠ{žF®}ÿCHHíR‡î3Uí92 ŽÝº*dÁ ¢e}Ö÷¶áWiÏg{Óº6AKäcqwP3íâö=+MLqdÇN hñ¼Ò‡+ ¿Ö ]$”×x6žºúwåÜ? É;Yô&Dîû¢OX§öl>v}p³OM‰ÉwNí¨ú8wŽ9–£b}oaËÖº®»Ùyc?ÃJ&òwOÛ1iUëÿ9¨›¥„Ôù‡ùÅõ2»ì|Æ–µ:ßR3?Î0ì©’–Z!£<]¦×ýº¿~¬·žE*Ž—|H{{¬.´4¹c]-íÙŠQºçsV–§˜ÅÞ»ñZÏS,¨!÷×ϵ(ÞU¦ô#ƒ÷ GáÝ{p¢´ì|0+GÖÕó¼w Ü-g]ÍÓÂU–vÑ/c”¶i‚XYˆÏð’¦'k#z€Ô%E˜¸rÈûˆÉAÇuzNè¤ä#JzšVhv•¸mÑSt%.!d¼ÚŸ´N7í6£é™†6&&zêÞ¡Kü N:ª(aÍT6-¤O§þM3ѧê«óÞFúÐQ9Eœf¡ n½TvŠ&¤ÍÂë³íj3 >oÿؘvaöû%\á"µ„¦±™'À×µäíñ´ØþüÞÎ-h\íšß>XúKÃì•€"˜j€•KÂCxÔÜ«­¨„NÎL*è -H³;oV·´få«Ë¾1›á]Éýòc¡dÏ— ]—5üõF`Þ qRD$žÕ´‹À,ß]B,b™Ó›¤± -†O¤5 ¿[‚5nÍ]›çT%L[ß“”"ð.xã>F!F+1ªth O·R£öki/ø0 ŸE GJ¯)Éé…ª/cº°~~ŠÖÇ]5ÐJa±uÒ_TôÇh„«f±†ë,Àræv´«Åߌæm8rF‚s¢úk•ÄÃðÔWrÄ„¼vU”g »ªÊbøÕ»ê¯Íš6hi )ª‹YöÉ>׫ThvcŒ(¨´âŠŠ 'œ—Öª5Ãý¤‚£a®„KêèoZi³@2ÌjÎ<‚ó®hÙ /_‘O¤%Ì´„9—"ÅŒ—Iv\¹Dç^…7ðê°±O½ÚýH•RÒäOÒ¦’I‚h¢4‘¿íZü¢#vkäQ`ezÛ¶£PLRÐ8Jü³]×Ð(ÓƒgÙöB£‚&§R2Ø+XkëXJjHÕU­¡u§pàTénkáqÚ/©¹ºÄL ¸ -÷à7^z•rÛÆ$*¦•2£ƒY„êÈ[&‹I„ªÓÐ;óõËG刢³i-4k0”Ú -§ëXŽ+6tik ;ˆ½$Tÿju­ñö††‘,߉ÿ``º‰?@†ùFüpVzì8‘Hˆo2Ö1€wÙB5G»ÎV4Ôé¶ûC{öííÍ>c”„¾—ÝÅF‘c`w"¬ i¥Ý½ÒŠÐâ’ß]kf•GÁ;mä™íƒúE­0-Ž,‹DKêz{³uüMY¹Bü¥…çtCw‹Ê¿r‡ô @» -ëP ‚¥Òêâ|™C´¹ -£3Õ~‹ºŒ…¢ÁHz®Í-붷ýÙÏýU’]´¢¸ÙbýPdP¥§Î XóSƒÇDê—ãºY2zuÂ"¹^‚ìlgi»¯2åphT­ÎŽJDZ–ãÊDbÁç"yפC¯äaèw€e»µŽ#¸n Ú¥›|ælv®½ÙEY 2ï_ÒPþUÅòhd 1„+jUcÌ÷,4¾Ðmd÷º›lá¸g8³¢C©I´û ÛS(,”î´$Í°ÍQ=cr{Aµ]Ë4? -Ax´d‘ ìï¯ÂhçrjYÔF¢ý¦U³ã|{osÝq:õÿï/Ê©bªð¡Ÿ/ž/ïeÝ Îsù±ùp\Ë‚ l¶btò3 -Ô%<'1 -ñƨ*]f {ã.Éšx=ð‹at w;0ònMd0² zåÍ?¬¨ßM×ßXd€Tœ7Á€ý±µÄS"E…‚°˜«¸a!ý¼/תoGB Ï}»PÎÍà!­Öì6ºb¹¶m¼÷Ý[è2<Íz ñÕì‚χõLõ« ÓÞ±ß[£Ö«Õg•Ä¤AÒÕ´é - ‘—<8ÊÞ®Yxwk‡ÏÏoÙ–(ìD%"3Äý’ì#ÓÏ&k<à5ôQ z " 6ã0ûq{%ʼ·;èìóû­ ùMúS+È@T ªÌÕ€¢þ0ö´N20F&i k÷X… -Ì6w±e‚&º‘MBžëÚ6€hwÍch…C3&{zo¾HfVuÃD3ñ˜.l% -%7p?hEeM‹“˳®ž) »<ï Ÿ,2õ?©,)ë’M“˜dÕU>7Ò\ë:Ž ÍX}& ü+• îâ½Q, ÁÝ„¶C?vê"¯ˆÑS -W8¬Ff!÷v}îFß<éÚÀ@ÓX(lX³ Ôöͨl¶oè[K«Óð?$?¸.Šƒ2\ø•4-’AØp,ÄïÃ@:çè f/{="„`E†hz¤ƒð‘¡ÒÖÙúÃ!ûþ~[ºÇ¬%ôßòøcÿÆèEï8îºmIu­^ŸÁl€\ýŸQ-.à@vlO B¸úøh©ẄR“(àDS‰Q8Ã×”ê}ò7jx8\— î³Ô;ê\”Tµ½Ö¼q¡NÎÌ´CÜ´†îÔ˜¥àI!ì‚´T˜ÛV¬ùéR¹©ðås‘²W\¬ç-‚$µwu®öÎng•¯’'C1Øî-5R@™ÁZ©Œn»¢s§HC40ŽÃÖHβ½jyÕrL7’çL{âU—PøÀé5G¥;tÃk~õÝQ|Ì:TtË;q›ZKˆ©yõ;TÐÁ¼LNŠëi)§¤~ï’ΉÃ<5M ¸‹ãt®Üì‘ûóåÛ#¯ W-÷ð›Úªiq= wÚ/DºP.#nRšÊ£Ð rxR,ïˆ> -ûüüÁŸ¨›$Ñy{ -ÝÀ–ªy:†«æ©ÁÁ¡NnìZ[ºð¯äHÔjׂÈ~Z|Ëô µ±Ïë Iø%ú\€úƒYåZ÷¨]…ãÿb‚Æé¤Ãy¥ gq<mÄ|Ôl¥ûÃJf[Û‡)kµð7%f‰µD-`|`¦üšTxáãã{F#ñW*‘%ö»ŒR‰Ì‰Bá0Õ4=D‚»Û!©›¿iîÿÔ6ö—ÔVïŸ?䫬¨®ž5€_bÀ¸¢†¤<ä¡J°¤úŠ‘ZÜ•1ÜZ†¦ ©×‚˜Öë‹WAWøŒ°¢úÐÚñÿgL‰:‚ë)ö“"è_‚=…̨?‘AIÆpV7åzFþWÿnÍ‹ßü&nÆï÷Ïoa$8ÃëˆüMÜ ·¡÷Þ…£>‘ÂmÐ?ïœ&ë?¿ð;¸1mêtóŠ £ –ÍxÑùÑ`G§·”æõ:áòa…¶^\×/©ïÎÕ‘â䘮Ž:@ñP.7m Sé&²‹ˆº×µD«ûQáñU`þ$µä¦j‡v3°©u®c‚è6/VÙ ïWwô0kﶱÐz‹¹ÄËÉÛ®9T¥kÒë45GÀ©³ŒÀ×Ò/ñZÓ‹dTáë-ä=õJV× °´€âÿü[´pbZÏß&ç„u„W - .9ЀJ»ò”%ÄŽ"ξý¡‘0Y+ó>ùIºM€=ÕëÁšžŽ±ugÍš˜«|9!Œ›„Ùªàº;Eš]‰Z€~ €ò¨>R£H—õ‰K›È4[?-™-`’fêƒ2L/aÔÅœ‹Å*“TÝÊe“Ý|ç‚-8Ä„WÈ-¤UèÞæFeè¡øéðPk³þp?!réåjÛK].ØS ûÛÃ-ˆs±Z3ë¡«1nS¬o7>âW?Ãã;—• º\Ïî,«L=ºs;…j•°4ÎÉ×Ó\?®Y˜eSbG¼z®«ÍЙJx¬ºœÃ–%!‘‘#uK‰&"ÇŽþzY|LÂb@ïê,G¾ª#ù~1[j´t¹ïµœ ¼†ùh4ùþ2YªŒÇ.SØÝ)§r®5bU¡´VåDÌ |\zIE\÷q6ÂCȺz+D~¬]ƒs<@Qv óu Z– -Q)¦¦Å–ª/-„{bnº²‹.EßvkÈ ÅÕ—–×®W?{«6]Õ^5Z÷&=¥1Ú‚=Q°9,ŠRµP°Ã#’šñ€rϵҼêm ‹’Kh®{í&Ï¥þæ¡¡6ÑL¬þØ)a T¢8uzÆU -­NÉÎ]®°» šy´ñ³­}µÍ« :ðØÁêd/˜YøKÐâ"J¿<šò"ñ„§Ü–ÐËÂîa¶õ¸8HÝv.öaiQº ~eK_ Dù…çšèEvtDrÒ 1ÀyŸ òªB}Zµí=KEç#Švc&k%MãqL,Ú0=µî)&Ÿ³.¹&þCð*€™sô—ßß2F§´Ø®Tcšº­ˆŽr›ëSà™ÄÞ¹Ùku>š ð›¯ÕA¦AÞñÎÍh¹¨®@v -Mm–ÈJêà,+­ÕÊÉ-}ã‡ùÑqú,â…êI¹Æc Ó,IM2|·KŽ‹=«âx¡BÝš­î«~†Aýõ=Ðq Vža¶F#=\Læ€K[€Pm£†R?S¯ -Ì+{šš­9CÈåNT —( Û®™³iiÈxpüí2Én$G‚è¾O‘`= 0â<µéEÞÝÿ›S•”ª7”@Ƹ›Ûdsï^¶­lkI^…#ÆéŽO|NšcPÁ²ì òÎCwT@Rêi,Ј {á*;>bl ²¢"XW,ñÑB—AùÛôt=µ·Q)(ç»·ÝiÉe¼Õ*µ$¦àÔZ]<¾’f”(oÝÿšßËDCÝçõ-¬™2+™LùJ8ižbpŠìöÐå•},ÿ쯂•rºá1:u…r”ròôï; -[¸N<ýDaŒŠZ„3×óßœC Ó\C$Ù¡c×Â’D€q’:gp­~³š» µUß|,z¹ÿö>À¿68²µvüŸ)y$\ÕU£¶…ÉŸ¢6ãÊ\F¼™a¥v;¡—BÄØŠ «™’&Šj__Ä2Ý*««‰º=‚3ÐÝûrJØÎ'ÎQÐߧÞnˆÎí'/üyÐÓkêAvBè×y(ÓS\–vŸLpKÛ5Î-ÖÞ%Ãö˜1'“íâ"¬<}~Ö³ÖqWqbQk–‡Ý?P³Š$Óšó¯î:ó¼O‡ú<ÅX¿P@›Ù÷¹k0WIØ~#xTÝG”ôTuÛ5ØiÜ ç­OÖó —ÏúõOÛ~žýëóóǶ!'Zg&|æým×OfòZRùíLQ»ÇN?'zý”¸ö£©Û|ž -TBŸã~¹âàƼ‡ªÇõ†k]ϨåΦ) -ÃÀuD‘±!Õ›a6‰_k>Åç4ÑK]|® ÔØ®žäÂ;æ!ô=’$lhúÒùÍRGѽôfOœžt÷"™‚cñÎu/—ã;½¼#§ýý2ŒEE‹;õÀÅøŽGÓ; -‡Ã9lú¡¶Ãá™Þ§•‘¡(œ‡!u‹³¿ËýœˆÈ‹œÆ\©,Ä;»€¥Ò4`”ñã±o…Έ€6ÚÉ6a"SDke Œ=ÝÚHV{-¤Æ”Ùl¼ç„6 gXxÅîµ¥êñ•huŠ{WLmÓ}½ðÏÏåFrÉë´<ÐziÐxE˜~”w9æ¹Ár÷:ò+,ÁùÚ®¾ÜÉÆÇYŸÐ|[qÒÜàóä+][9–À¡Buy¯Ý™ÃÝÍÈ!±à„àˆœòhbBGÎ1ÅìâÀ¿_Aµc ¨ê¦Ü@l¹E/xè÷Ôï}­SW“_ ð›4Xë.XÏD£!û¸Ç©ý‹ä¸ñϳ|&ëncŒv’¥öfÓ;ž…Á¥mþÚóL¯ã<èºÊ¨&Êáma€ãV¢C)ß/X¢Gž˜$ Š°'#rÇ¿Õb.}<ðÒ§=Öz©S=#¹ô1œ|{ÔiüØ¡î;Ãø’§D;ff<þŽÆ ÓwŒèòl«L¢ìqçÔü±îVfŒKóI-6žöÌi÷ÙEÈc{ûˆ?J°‡nƒÃËnôÃUT,Éo3’6YÁüE“Q)š˜Î9;ÔÝî}Ìçg¯³¿“ÌÌ'-'ℸ3SÙ'(þÅ”<+¬‡¦•É늙GM,¯¿xårÌN–îÈ cÝ>z3ŸrÄ€ª`^è8ÖsH%Ä HPŠcƒWö6>)ÿšÅ×é -‡”ùO æ(ªåélVDЃ°¶LiKv"/ËíÆ ½Þ—ký7‘Ç͘=åt3M«>Œ®wm&¶ŠËþän¥;ÞÍSnö|ÕÃÊÝ [y•nº²·M…Nñ¼{_KUf–W«£]Ί& /Yþ¬Hç kÐÃ{|1V‰M~íZIüÃá{æÙÛ†3‡¹`™A3õ!G?¹ëµ»à\Qz LW&\ëw?‰™/Š7#;LýyQ€NùîºëuïH,œïÏmB’ËAŽÒ~Šn¶½ã ÔãÐZX¦Iø.÷_j™_<@Ù¹u¹æ+ÔVãñTZ1€~tºGî¼jL¤W[ê[D·ÞÃÃ>ñaÌ4  b«±—«%ÒUÚ˜.º\»žKeñû²:j¦Œ!Ó;Ĥ:8ñ¸Î_Ⱦ9&h¤âoU¬Á¸w Wò& ຯ™E?ÆUªT9$L£ªí,7mþýŸ§Fïèß0x²«kA%WMWà¸t0˜=—=›²kªxe¿2\–§ô]g_µÿU,7«äÇäþ%sú8BÑ¢ÔN9%Égðè“›¯ž÷\uq "c³ƒ‰YX<ãÇpd*Þ?°pp·ƒûah÷ŸX f2ýu{Лü-“1œø<¸Âš•Œåÿ':Wÿ+Žó­zh;ub¢¿íóšùÖ—zÐ[½ž„XF³IÉ×ù˜â—ý¹‹(EUҙœTíZ»xx* àåO’"ˆ×IOŠ ÜÆëæÒl?ßöÝ­7Z~R¬Ÿû¦»ÒZ¼ë“VWB†Š¨6Ë|bÎÈf¦¹ ö -7rݦG¶ú±M÷çç0›Ì0(†6 U߼͇ žCŒ?üdÒÓò W©{³I†-"<⻪#Z·^œé…f7†r>ž`ròhÓV±*åŽ0{`˜ Ç°­!Ì0Η2Éz¬ -§~X›IS™k¹f8žBë\ƃ‘ßu,†1͈iX“ÁöõW\©KFÎå” n'w²bŸnuìÊ -Ü.6›[“gvÝÛc­Ï‰ëㇶf^d“ƒfmàš‘AÍd»Ã’\"L»ÎNºÐÆ ÙeZÜSü‡ÂøåeŽ%9Q¿OQÈ~¸Ÿgœ1úþöĨֶÆÉ*¦” b¡vr¦ÛBÍê£*8ÒJ¡Às²ÝÜuàÈÖš~\ù(’X"§»w7iˇ 9Wü C÷Kïi4lº–ŸÚ”æ7Ío4¡M<ª Š#äo°×FÝ«½¤YòjLPñ.N·&FóÛlÞÔëFepNfN>3Æ;Ï“H3*KÝîíˆ2$y -l׌ª -Ì'‘â^ƒ„³|Ý¢ÕmµÜÁ«ûn}a£)œ\KOO’‰¶|Tpt)½ØPô#ZCWI`µ•Õè„Ë×OxÄf ãëÐ AIŒÅÖtþ.©*¿Ö«+«u¤£;£8kYÃG³6vÉÓ¶ 4{úh#«ÛéÔ8±ÐJ*LÀýõÒ;OÐbß«ì€íå$>Á ÂcûÌ¢©†~tp~ @Œ†BJ¬,—“²à?€¦ƒ>ÃÐ,K/Ä Ñäo¯¦M—h²ãºpéÓ¬Ù›èˆK+ëÖ̬ÿ~Ãóöõó/ÝÑØ›6a@|J$‰¹hÀÖ¾æ¸ç$Án÷lŒ…ˆë¨Ð̅鸄 ‹”©§âÛ·ºª‰jl¤S-÷2b'+ß ÛŸYPR¡%ÏU‹­™Š -1¬†Õ,jºü zFfbÊ\5âËÍ^^HëØm}3­ªÇ’í0Å.@'¦øÔó%Új«r`z\ÜtjìŽeu$Ræ4(ˆb¤ö¼ŠGÙf ->î²4P› -™tcœ­–Bÿéà™ÎQä&­$ÎW›2ÂFFEÚ½‰Ýí¸gÔõLa€GnþòÆ· ìOØÎ’2ÓN ç°±ý”v†‡~ýü V XðV'úéfH–vö“p˦±*ûñ(Áñ‘K|h -²/o¡à˜XYžr?}ùŒA–³7²gµ¦ê²ú !¬‡O)m¹öU¢Qñë~;c·õùüüqÆ!Ѷ ÉŒŒ/ÆÃN÷TÙVœ0óãŽEÚž§¾Ê† ¬[­t.ì¸,Ùp4Ù™TïŸHÙTº ¸§üx[ê,)2ÀaT§É+^;y-ÝêÕaÍ:ØËe÷bmyOòT¿ÌïX7&“-[\-­ÔQœÄÿ¹ßþG,’ùÍî}éýøË0†"V=1a:Hw!Á:h0@ÄVî¬éQñ=:‡ÅŒ’/_ulxPŒ2Žï½õ>?&,N—Ùܲ8{œ¯ø•ÐmJce¢2èý}r+µD 1AÛldO4f l,¼¶l§áø@Úô®49»Ê·ÝúÚççÏÝJ=R$¦lô»Üùˆ}Ý·2 -4ÿR¿ÔK@Íxm?@Èô²{6‰yiÅúbÜ‘ÏÏûØDjå›û{†éààa㘪N±š4ü°5‰4±\‘æÅhÀ$*j¸§-Ô EVL–eBSÚ3g\œÅ¤¢ðÃ"´l/Gm¢Ú–Ûäå•[ú¹ÿë[¶f Qß"(ý÷MS7J<·±Ã¤‰änµ‡Ïa7±&Õ¿ÏUú‡¿î¥¬Ú  2´å%ãð”,óvõûT•ÛÇοŒ£ü¦¶IÒÑoo{¼½½Ó“#¬ âÆpš:v?¯'Ü\ZxQå+v †Pœ¦fwjY§'.1L‚‡EÙ3‚š˜áÂa7A6¯›). )„˜oTÛQjm”öÒÖ.LaZ¬Þ£ô‡¥Ù«wß·›ÏÒD‡zDìGõx ƒ>¨ŽÁ05K ³%þºö¿¬‰jŒ‘ù'@~ÖOÝv©ö:*€xxÊgÛž«1—èß¿ªzr¦aß ©H)¿¸7–¯^«žÐ$"ÁHûÞa¥ÚêÐ2Ü18êbK#»«°G_¶G FB ÞåŸ_ú(ó1Úº*n#@³9¬#Z5ç5•W{Ñ­H¯Ã…n]¥Ú÷ðµ0beTÀ5¯ÑM‰D¼É“¯ -¼gÁ£BÕ~ne•ªWÂ -±jÕ츈vÚÀÆþIÞùC†öJ -ÔáWy Ý« ©æKª-—új|ýØ·©Ê¶[ï_XØ -ŸÂ^†ÆõYµñ¡„àŒ,[/TL Ÿ¼9VØƹñÖû~~Õó:°|÷ÿË#±ê(î}Þ`tú­òÀv|Méxõ¶5¼¦Ô‹¶Ëxì˜q,ˬ”úÜv\Wò¸œL¦ CäÛ£2 L ]Ûâ“›¼JüšPàõ!ÙºJ5»O)Tíck|ülvBbætË6zØ‘'s©g¯aUô‹¢…l…Œ-OÚÊoלµå 5«ûÕk -j¼í‰QÃv„Sr,Á|PpˆÜ Áe…¾êjŽ,%xó’4ãï)ñ$ ‚Yí%̳àV³ÁšP®¸ƒx%ëÞh)`p¤ãr©\¾Øð‚UN©µÌƒ ’=ÈFFa;)u@B‹Ðà‡rŠúF”²9V©‡‡1\P% /]â‘ÎʲGƒœ¸¼Ž=’pí‘Å" ³/6£fâ]Cˆ I„Š­FÕ¼©ØŽuúfaþZ©«ÌŠ -7 e²ë5xG>I74£ª¥uèŠgf‰3×k×µe6ÎóÍQ% ¿sy®µîµÞ“îuKïâ:°iíc@/…e…R«û×Ö(¥8˳ j]¹ž-s]¯‡yÕ¥Çqb:‘Ϭ¬Z·ÁXo7ÍÄÓüÒ¸8×YDÅPÒÈýü\¬»<.^úÌb‘xÞ—ç7è¹XÔ°×ey ÈåÛV ¸™CPr¶3e»‚Q=À¶–Ĺ` ï~+V×—mªãâÉ‘¤ù´FÇIH/ƒ.”,©ÐçƒBÒ› CB¹HOaȈ/ Ǧlð,²¸LØ÷òpÇ]õz°—5<6Òt{ÎnñóµAsØ ¸o"åLq¨}Ûd¥ü -ÕEYK.s÷ ûœí#½—n°¤>0¢INTh‘`êÖ '"aK™&ù“jÛE •£•Z‹Íf¦›Jf^’‹a ·ð_yì"Åp>Ñá7ùKô¢ "T€À>~;¶ªƒxj7Ü¿Õãš)hÛ²zÌQ› _ý -¨øQyýÐU¡·åãhÒ^¹<±õ>=¯_­lØ‘7ùÿ¯Œ;c&ÑÜrï>±9ÌdµI¤D‡çâ7ÎRüÓz™t&T3c2Ñö4a¼Ì~ w«Ó­ó„·¥ŠDáí’ÕÞ8mrF3OÁ~ÉÍir¾˜}e«Økc—ØW;x±XÃœ-üßüP+˜y ¢Þ«©Çÿ½ £’•å +YÚ®Ò0Œ3΀©yp‹Ü 'ðíÜn6·ë´é/æ4cY=xÈò€Ë{ê×ç½AUÓ…]qZÒë‹Çqÿáè–G²¯{ÜŒ]Z1":P‡6À'GÉsƒ·{ÈöÄ<`Enë‚[s)£òÆ4«çÑ›âìÙÒ>ºYPðøú¿;ú™¼À°»ßËQù1á8ÙÅ~‚á ëO9"§ÕIÁÈVÁXqWñqz>=Сuõó±°×býSd»áº¯m+ &êë\ õ}Ç–™aÔRg’ ÜP{¥§Ì,tM»1Ç/ºÕJ~Wº’s ’«è´ÄXü¿,‡bx33lVK*äÀ®n':‡}ý~œHbªº¢»J@°ç¯)»9ûµöÚ.沄¨.gd‡cËðôziwýH]¤‹«}Åñ 5 à¯A—rL‚°.ß®‰– ÆmÃEä2Çó`º@”)™CU’ŸÇXýX›lJß,žX b î€C6Wû˜fIi€Vw«= „Ì)\§ñ'ö«GZ–î,om™TˆƒØ˜'ƒo^&̬Ê^˜—¬ç¡5n#c‰–Y枢õf\E§žþ£ ÄgW±57³J¡¹·˜ÁÁשÊVÜDr»„¦Š­qQªƒiGøÊôºv¤}v«é¥§ -R4è’¯FbÑÎ8=Gßà2² ?•Ž ãU”+úFmå°×Õ -Ž×¾ô7íºGM£¸¹åÙì* 9Ц=ªtްᣲÔfbûòj[’È)M6j¥ÜO^«÷á?;/l<Ò”ú^zÅSá•a˜Õªv„}¹E•&ÞB%5ŠIb¨:ÁŤõê \½GWõºå•”žUV7_SþK7M5ûx¨ÈÁj½¶$°²‘Ú––M¦ÅT!Qý±Dg -ÓLzÑzÍöºÎ³p)b7ÌÎc1°ÆƒÂ+ ç‰ËÑ8úì†x¨Ñó\SªÒ‹ÝœBÐ=ˉäˆhÕÊB!áq¡ÜŽŒkY(ï[ ,6 Ì«]’îi:Þm¼òe®GUT€Køª¤neÓÊtø#þ &&"X²eX³ÜTª™ï^°zBð„Á³Æu¯É¦Ã¡N–š m¶2áò¥¡ƒJ›¤Ióþ¸ö©%3XçÔYà³”’hÊ>v“ðŸ þÖ\+|—Ã\©qʲê,ÛÙõ½ˆ§·ÕRìÖÞ ”ÆÑ™¹:ž•‹ŠTöäÃeœ,&š¦Ó¦á¡ ra‘´”t¨Æû‹CJ·x•zI¸ &Š SÓhí"Ó?"i–’kÑ&¥â`Æõ×<¥Mí`Ûêœ(à“„ 0ÞÌá†)Ì–í'«<Æp_–Á¹0Cüøó5WѲŸƒŠ?£& ³MÔzLÚÁì~‹Zeth[®¨=I' £L>OáE½Sõ(K‡Ù!¿[å¶rÈ«z|ÔºÝDJ³náTç…îkVU‘%€áŽÃ¬@n]ŸÒƒðÍ%cØ(®à3qC–ì¡Š¼©G?×xÒ¯Dð7å⹞ÕÂhÿ·Pµ.h›…úÅÿ)T‹QUÊvˆ-r‰ƒÜeçôm¤|²P5e>·1%,«zÁ‹÷[ü¥´ƒCk¹§Hç”ïå4Œôg?‘A}e[̃ ãÔÖUïnßW•£0Ž’PίSÞû,¬â_¿;n͸<{°³[›©"R?h§gº)}´íkبt´$¬90Îò€£\nãòZVKexf¨»íî>Õz%AT·x/Tâû{½ãWàu+h½±É3Û?£ª™yEº3ª°´ÃÝqm¾ °ø¤åÙè¹,O}äh×ÌÝÃQÅÌK — Õc¸òt:)Åm^“W½ãût©&ŨÈVzfÔñÈ‘¢ýuµlÚ¼‹ï³ÒÂB\¥Ø·ÇqPœ9‹^7ùòEZèc$ÿÝ¿²Ïè–Žø-¢ ”u¯æfØ‚¹!0ú6TbÇ£‰ÚPVkË3•åZ.ŽzLÿœŒ!KRª‹«f´n‡Â2¦`pܽ®ºnÖ"É;îB4“w‰+AÙyju;%ºÓœ¦ Ž[†ïŽ¿ÿ§Ê¨ËGbdfèÏÅ£á•.9žjºÐ>Ú–%/áI] ½A°éh)¤wZ#®™YÜ|ß4RÙl™¤Y÷=i/bõï…²V½«ÿœ‚0nhrK%Uä5Zx›èAÓÑ_Ø@kª ìô {¦µ\þÜ´â’N›Ij¥@*û¸*EÛþpj£¤BÿëG±*ªÀºÁ“¹?äa~ÉÃwqÝáe;ŽrÎùî\Ú‰ä+ãNk`(Áw»£Z$“VˆáEOI¿ÑíÐü‡â0Žæ?I&ûZPQjFnÉ7´h+ŒVa:>4bŒÖL-5{é'£ïº‚èAÖVB]÷l¿ºcš–?|^3Æþ›+‰1±B¤-›qθQh(›f†|½Âîžs­Ý±¸84Í“} C zl!4¶Õ™ž†ÆnðlŽEíÔÙæØZð2¥‰#"È-àmîyüçDîª:šj6ÐïÍ0-žëä1ÈýÉôf {Î*àœPœ:Ë 'ÄÛ÷Î./pl|1UTpÿ…Uì ‚y¦’¾s¸«J¤TŒ{ªŽÏýúýÖâc[ô‚Ú ²~+ì+þ§ìŒ×tvp3#g^sS†­zVšX÷mGÈšÁ¢];û·^6p¨áU•ÓcRu®o5j·…\c]0FÎQ\ 76ð¼›J{OX¡~y`$c0)¯‰KùüZjzó]î7zÆÎ|ƒwªY”(`KÐC r›æìâ~Ïቴy «^˜ Bª@¦5gˆÄV¾r=µ^TÆY©òûÕi•ý‡÷=Û&ãùü Û¹¡HLIm`¥¶X«Ùå2Èf!C´ÙÜ6{FA®É§ÞNƒÃ4ãDw×ÿÛÎDÀ}5Îý˜|ûãs~ïû´gǹþip¦®}¯:ß÷íx6Rêk³ã©¶:0Å GGRê  ;2ÞíÊ_=T|í®{*T[¸e]0/—r<‘£eG‚'öÍ~;V©ùE>×DZƅ…HµšKmn¿‰Ÿà„“7©šÃ×Òa¼Èî€Hòõ´)+ÞŸ#º§Á¹Ëq‘(øXØÍy÷WÝÝÍѽ~¸x±<Ô¬Á-Ífµ¿û«í:ÃÒ"Ó½)9…-+V—™’ÞÜXÎQÿ)šÑɉš“xz&¶E…—Û’S§—Òÿø]×À =qö‹a1ýO·ˆ¶§všÇy¾bôh·Yæ«iàsï¾ñ®Ê66ÍO´¦¯ÓbíÒ˜¸ß #ùØ´jIµgq÷1dá½ÙÝè-5u¤õÓ^ºyŠ4´ h[V<—ëèàaµ¬,X» @D|ß™k>y­áƒ™5L5ÞGø*òçóÇá÷Æjìg;‰^¯£Ë<«hÑ€‹¦—·¬ÕÅ°m°›1˜aÒCdRþèÚœÜy¢®÷°­ª<¥Ý¤”‡ÉlþíD$–þÝž… ¿–""ßË;¡¢P ¤™<¹îÿü íÑj §w¿Nte)>ª+º´ºCâ?ÿZŽbÜæÓß5cqzø~-­zÜŠËž¦5w*]ØsHnäèÊíîQj -+bC­gú­rT¤¶pÉ e"ÜH}´RßoÒˆj ôºg@ ¢ñn¾KvÜk|‡h×ñH=už¶S(äÅ -4,~TÙB!M|H=^ñ $ iËíÍ/ªÎ‰ÒD4ÙüÀ\¡jy%­xm{65©Çj Š§ñÙ_ :&¦Ú¸ÆÚ ÕÖzièx,ùÿ€X%ã‹K/ª™lTtÛA­Ò3­q*ÌmV¾òøË»Xþ8\ݼ§¾~Íž¿?bE*¦æŽå õ *Úë+Û '7×B½ÐA¬º-X·!¦5M€@¢qÌ—”Zgh¯ˆ‹Ôkfêýš/Ç«ãmÞýЬ^£i¿Ú’-ÐÍéÄÃVý udÖ¥·_#Ypêqm‘AÉ»¹M§.sò¦8Ûï— ´§Zö*à°bWrŒž)¼í|½º¨™¼ÐÙÈW½…îhû5D4C˾vJ‚Ýÿ÷Ð'Á€4çBƒf×w*{eÐ×çÏöˆ9†Œú×ì¦ùßíѦFQ"Fn[ÃÅ4åERØjoÒá¿U»u”g¡?Éc£\×åeKÚõ¦GÒîèÑ}<·%SèÊýëe+œ¡¾ÿÈSCð*dÒ#¦)¥½“ÙÞ°ØsEŽë€ãüÚgñ+ÁIŽR¡cÝ Õƒãh`:·Í= Lèw+‰‰ugåm7!evDòcU,×k¿ÇÅfûŠÆ[Ëÿùx¬h©v\Ò»¶#íýØ™»'³žõdÉêŠ# Ív [.…¯JïÈ,˜†kÞxßùòÛ—¿[Ÿö»œ’Îuö‚žêw -Ønjo ˼)¯ã€“S‡ÁXÓ%='n›zÐ-õ.~[ÈÉÑÙ¦Ó¡fSe5dÅ™ÈîrÞ™¶Q0Ûv eô’÷eßh/ª{Ñ=·ÅiwãqÜ<ÎãÄ@êt…-d"x5À÷ÎedVN³°Ï/è®*Ç3oCkz·bÔÇÛàüÞ°=Ük'Äÿ³ò[‚Þµýƒ_Yç=÷8:– |y¹"Z@»Ü‰1Ÿàùh§d‡_Ij”àºL´“½gt¨,·Êys%+±ú‹"!(§DÉÝ…žBÝà‡š3¬WkÙ`ÌglõÞå詺××Ç=Ñãz®?(+-^¹Sìnë¬è¢­àzŸÜæÆù¹¾a0íR–‚h÷#]œðžñH#uÛöÖåéä±;°Ø î¡Äžx^îŒ2“0>‚ÆúØ£¦”ÃntÇRàíÝ–R.Nz«q @†æý ëçÑ¢3Wn{Ñ’K=󦽖!ÀtËA[ý¨bR˜øº>«¶þÙz GZv›Ö9°°ènY°ŸXä`Áî¿u;Œ 8¼¶½ú.¼}2’:Ê›qçžxù•ë¿AÄ@Q·ŠqÇ÷÷{jäcùb;ÚG¼)Çû Ofb:f¬¾"Pç#u;›ÝFG(Ž{Ã{i¯Á{; çú£¸Ì·^sàOrÞ7A»OÈhjL¸áÜ7ìUžë}Ìfë# OÃX¼ÁàüqûÃÀÉ,êßÒGQÄÅzÔ7í)Û¤ài`‹ ¯êÚ~1i¬GyA6$•~ll'û’¿ÂéaŸ‚ Wóضð9n„vø¶0Tó T,wÝõ2{­#äÖ}‡HëJò|fû;-ÞÒjÍ*QN›¡ c÷N½Eûeæ@Úci4…Ixïi]=ïº,ðËNz}BªÉ®-»¤bkæ¦ò·l9Ó\Ì«¶4`»£—Uë&¦'}-âÃ37·,š ¡ã{Mh".TÐrãeojRKâJ_©Â6æ’N»gúŒwEšfTD9øy<…"…MmÝÈb¯o6íÁI3²ÕvÚ÷ëê<5ÞÑžsZWñ½Ý­Ûé7"“®’[Ãm[Å…¨=Š ™ì4¿Ûpû€ÏÈL4MÏ–âþ³&û%}”×lÃõÓN…ËôòÊ[Œõî|ŠËˆ0O…œûÇœë¢}Xµ+Ízc÷ºY³G°¢hAÚ£ßÓÝXñ:}Ö‘)Õ/‡{Þ§kj½>â-W´â¿ýµž…Þ=×»óþ[ìÎÈ4fÕ3<Š+Xê}…ÂïDc²ÃÅrý¡·“È»¤b~™ªUï› ,¤hàDz‘_ÄB•ÇÕÂL8xl,‚Þ=LÞ딕ÄFÜQY¬~s± -çãxˆê4[˜žaúÓ>²o¶ÚN¬š›SžI2³ºWÌOh-s2è…c´ò¡n;Ò-d½@h¶-¡[¨Ä›êÖĨ²½é:°¿ÞG«Ç¦¾åd©Uõú^>3èíîÕsŸo›ÝîÔ¤ÿdÙ­A;&Ò£ÕÁŽ¦âfŒ×ï¤êx@Jàæ=©6ãïø˜Ž+Pwè?ýíàÇùeˆ4†ýŠü•„VMHªõ÷ð kB— ÉCô1‡!k#ჺ¼XÐ7Ê62[À`«:¯™q‹GD¿óaI¦¨Ïy³§öøhçÐj¯s¼9¼Æˆ± N;¦ˆþ©Ð·Rt¢G1PþÚ¾—¢ìàÒSÞ—ßìã: šoÌAéÆÛ¾Ú†‘Ëÿk$z°°5óÝŠ¾ ð›ÕÔ¹"#o_wGåêõ£½íâÚŠè¬D8oEo÷SÇUŸ’Õ$Æ -0Ïûùº±þ|ü8]b«_§¨-lè›VBŸìgØŽZúfB#åµêNÛ4Þgî/Xvr:»KrŸd>õ¤‰§¥kÔ/†·9z>ñ\£ZK}wB"ƒ‰¢>°Ý(øº¾EjM|D™òü(»=`¹5xñ³Ó -4Ö¶Ò+Ýä{ì¦ja,¥#.³Ù0¤)rªÖz!u4'(/ˆZl%† #Z©‚Ã:yÃÜ]‘‘Ø­©š©Õ£‹î©{]lR=„=±‰./¨š<†£÷kƒê¹^–bÞ”OêqJ7ÅÈÚd76àCMxžû }[é‚:€R!P: [Ê­ÄmÛ/‹]´Ku˜!yqWÚ¹+°Aé­TYú:³:LI¡ (9*(y™ç©™ ðê£z"‘ð«èGÇ›q *D”Gc¥Þù0„18èèCaµzèÝ -߬¦~†%fJ>äó -QšËÇн*gûIwÆÿXlÕ^# Üx:þ,‡ÚÇì&ŒÖ<Ѩk… •ïf¹ ú¯¢Z -Ìñ·w®ƒ¶¬5ļnÝ…W°”œZv¯ÄeÛ®å÷§à¡$Jà_l\}„4 „9‹…–®‘}¨m€k?*–ãL¢îµr,ï]¥8Ãp4²Ki´Oò®í)K&ÈÛõ½Ûö„É$ t¸6Ø°sÁ9ƒÂBi¤ÊŽª`Ž¬2jw £«îP+òÔz­¬.EÌ“Uñô¸¤,‘¡;[ý®>±ÅnþrÌ`…Çp'úuÒhIº­ã¯ -½¾ë\mê¹µÜFùüИT"¡ ý Êî/*¾ÓuK6B`\Í\¤á怢z–ßyÌ(ÕŒçâÁe ?(šÝÀ¼ð€w£ëëúžŠTLÂ…µRñ7+Îw·ª­aOe,ͲX²›T-- é£~ ( É9r¬œÿÚL‚MÒ¡Ðû•–½s>ÝeŒnÎjÍ°ÓK¾|ÛWÖj×èfùŸýÈY·%ŽgšòuýVt{AþõÜs/H£#¯‹b<Ça¦ø×Æ)ç5‡.çm'˜˜ã˜cûØK¾®¿oCêêHÒAœÔ˜¥+'´çþjÏÁÄš“Œg§ÌÓ©ã_…C…}mSzÛ†‡ÁÒ¨ÚÕéÑ¢†UøŠ?ï‘SØtöTÕÍ€FH1…ijy]ß÷ W7"ß—öÿRèPDS<ì餔%ܹu,#›oÂ(s¾Vøj‹Ï%­ßþpž÷H§ Oa‘PV=uk÷ÏðÄpí¢¹œ«qúâãèëúûÇó#ÉŒ =@ è÷µ‘Hÿ®fö¡Ïž=–à/ª'½k¿¬öµ]¦©æÇ@:âʹ¢ÙãÞ* 9ûMÖýò×õíL­lœXS’´°Û½>Ï4ìÉÄSM •$‚×iV)3m+Ž]$¥hõWm}·O¸o¥ÄgˆÎw×yáê[$`g?F­ftW\y°›;•A6 ~Xžµ@·÷¢’›ÈKÎåªoÒAy»»*— VOHÐ?æj96mì¶|ÚëHþ¸œ“p|ÍGÆŒ´š#ÁÚövwÏ#-L&†ÅœgÍ*;GŠnýãÕIäë‹ÆùoÏl `¡õå;J”â™[Û©¯i:“‘´¸“÷¯4m.Wú·LLÆ<]Ý£Tõ[¥oï 4ÚPi]ng=£Q?dg:\Z®LöåïNçW•kRÎÛƒž–Ó¶3†I•¦Ô‹¶¯ÏºOæ_H®ù–a¢æ»% lT¢ažêƬÙµR4ËÀÜ킵›¦aˆrsg󌋞sdM_oÙu#÷èü—ýTÅK“Gµ2.÷¶è+˜ ‚©wU,~|Ôì k™%¡4{qôœ¿ å$î ^iاÄLi=‰ôUôõ(ápÊÄT‹P›î—}lü9›M%Ï9Rz~ÙÎLq]!xg"‚WìúôèâùÍ:ª-¯oG? ?É’ù@qw¥6Ò‘HxÕ® -NinÑÂZ8=öHn†}“2Qd¦u—¤g[JèT“JÍôȆ«©šÓy ô)­H¡F–ïI-I_ö‚òÍ þW‹èå&ZfÀ0ß›ºùÜjKûÙ6>$Û=­þÕÎô“#@$©Ùó5ÙÑ 3W³¿ì™ÁhÉjÈ æØœ«Pûµâ¿æûº×£DI)J »ö2ŸÿóãÏß-q„+jÖx]0ñMº¯Úiè±J"“N·káÖUâ,Bp ƱéÌnýŒv.ÒL®W%1Øn) §ðu}·>•¤Jð"&dãV› %«&Ü®µzð™­Š\V@#ÝVòû±œId"üŸ¾ð`ÓGÞ¥*ÓžÎ2¥¡Ê³^ò±ðëL¨f´RͼÚU&¨‘¦Õ±hr…V ¯~f6J’Â6ØÞ™ÞªÝ'ÍHÏbز,nš,Â…ÉŽ ø™_óoc§½÷gÿ?èÌB÷¨ë°´¬wÏ Ò¾n3[t&^;üeX¸óÙÛùuD6½êôŸÁ8–`æ:²Aô,˜AfDYØ—@F\ôœ}¾Aìô¤öu}?ÑÕ,æ\|wÜÇ -|¾±-7Úî9›úX´»ü†]ÏÅ~פsp*8¯ç¬Zú®ÆÑ“µ”!l~ebºO9¬ZÝD8‹ím’ý/s¥“ ¶sµ˜óüšûkµûY/Ø˪æ~bçÐdqE8ÎÌ/å;ÎSo®±öáÏV^õ·s˜É ¤Õ4ÒUÐhÊÑËÚ9ºg¨4ÕN £—«#c¥£§„¤gHn¹—鲋̣Z÷“G’ÀHÅ9at*NU¸V(ÎUì0’$líÈÓ<17¬¢¯ë[ö´ˆ·;[çn‰‡fnÅ‚⨼6–0T@Ë\Ùàø?2HWwÕüDR§*m›cQjK -pBèû0G+–'©¹[±ýqMTE3#G¯{ýçË%?ýñ—.¹'„¨ªY»fn_䣕Ғ‰Ò FMAÇŸµØ 6Šþêq}i}˜XÖt¡>Âî¹âuý¶]F¹¶ññ^û°(Ú à5ÙÝ­¥U/Âi]¥»¥.ÛúëRd±Üø¨>û>(¦ùõ²SIm­Or˜_×_ì¶ãYkð´fÜ)~þí¢O¦ù±iãêÍf×a¾QÙ8§˜¦$²Ø$9SV\]Gqܨ´6&2ômm®}¥ätj€—  -ÿäÖSÜ…cd¥:‘ª¤N¯q£ªSÝŽ³F -Ä K|uØõ[ç)Ûü´;m¹Ç7T4ug[;Amæ³Ú/ô|çm.w13ßE!Õ.=‡ÍÎçi}´Nõ\R¦/ÀP›œ«vÒ6Žõ®$@SŒjÔ6‚VÞž¦š«.u¢êq€ëvû?'BÆû~À4S÷ZããúûùQ Å…KÌëìõVz-+ÑX¡/OyÄv·¥ÑVuÆðÒjçë4ŽyçÎQ“Ù8"Uùþ@¥÷TѸ´î5îb¬ ñ‡cYˆ=«IqÍÀQí+pXrX¤› àf=h;¦Í¼Y V<Ø­®b2’pÿÛ@ÊZ\E+ MµS&TëR -†Ç—Êhfš¥âm0ki„ïïe˜*©‹ åèq?™2| kgr»Òe¦ƒJªo¯´jh#صmwejä¦NKQ|`;˘+%PÝܺÀ®yõÿïõÌ%~)°Ýì¹Ö´ïŽ,¬¤³g—r^ÑÛÓߊLí°²˜árô µo3;¢ÄÅú“¾´·µÛò-µ¤Ö÷“Ó^ -)w'}à%*PÆ?¾î CJ%£DcÖ ‚¶/~K}ÞäÃÝLœÖrJ_Ñv˜¼Jÿœ©Ô¦¸Ú!sÕ^×ô¸f¥˜‡ŽåéÐôš¥Ý«f¯˜Tí—1²'®9–­¢ªºè•dõ RET©õIò(€dÁû´dI7®èÈ ¾£»]¦gn7Vy9"…p„“În;‰äÜWÝSšÃßúl|¢Sã-®7í…XlõËaqÏcÌùVDÓÍͶxüØèo8 Èü€èºÍ‰Ï+ÆWjpæÜÑÎJ¬XУKŒ"¿V¸YS±oÍP†2Ü>wšm÷bíŽaUÐC¦w³2“FbG—Z"Д¶ûi±[f»š]Ù¥9Æ|Aßt­v«êɃÖ=ÄôÚTVHµ%éÈ+ÍÒ`;0N´™ÅΦ’[*ºcŒÒ^JoñA éÍyCž¿Rvöt f¸¡’ü{ó¦;“ù²-hðõóý,:”.‰mÞ³£­hù‘D_±íÓó,J\8Îd,¥ªáÒÒà®Lrº8¿çv¹NÌ8ntÐ0¸8«;²ª¾<*cÍZþ?_Uݾ1‚Vð×j´´B¯,ÂJ¶ˆ¶Ï Lóø¾¡ªjLú¡™;¡7ê”P˜Ã.ZzFwÓu#íIñ¹¤,Óƒ¹—wÚÏþ9ï×Ïâë §ºrt›ÿ³ôÝCª&ržE¨é°„ö|U£éÚÌXmzºJ`VÉ%Ót³åªõc-Ųõ†ä rPð™)L£n¢±ÎèÇöE¹«{lãïçÛŠ#K8‘ ‘lê ûš½Šì`Òµ|º›uì“vaÍ®Oþœ–µ¡30k¿«Ô£›1`'h«‚GÞ ;l*Z,‘etš¼±˜ˆ±$žNM› ¾/[³¸”NKÆ=Óœ7t–)9‚¹ÙÇ¢‹ H*úÃbº{Í^Ä7BîmZfF˜Çç{Ñd/miïmwõͽ q:£~ï¸Ç¹Ôž¸UMî5³³¥ƒèU³ÅíR” -P:Tª½É󑹂˙’ÎÖÕIí¾3ZäN÷ÈëjùƒÉtÕ3]Ò|ºjÆ1¢†R–-SÖXÙ)w&²šJŠ?è݈#‰Åf¿kU: JÕîè„(Þ§É,ý÷ó£7i˜§-Ê7QùX~KÙ´ãÉd0›E©]Š,ƒ‘,HÇÇÝžú]A/h®+ 1gŸtf Næ$e©&3‹ÚÅÎúf½¨ÿiõ¡ýÅppãÿÍ4Ö×ëçG3-dTBX¤]AŠzÙeÏÄ[R©\ËR/ 1¶eOÈqݬ{»µúbË­dì@Mo'w¹¹ŽýdOO$¢á³ÄÎl}*éù]AãjˆöÏŽO¤*õ¿ï’EÒ]Rq{\äǞ̅W¹O‚ÉYÞÛy‰ÎN/ýÚÝv#ð„?„`áÏÈs<™>ûjèè’r¸š–ø‰Är—ˆ¥Ø†ùþ¡üõH¡âÝ@¨zÁä>ÝžëøT˜¦ô·S9§m G>uR#ÎáÅ’~ ¨ãc«@íqò·Ð|U”˜ux±Ñ2=~Ø{EÇ]­w&`NÊ…ó“5‹Ž…kGf½ Q&›°Ñ¼Óùá°ÔWc•®¢¤íVß߇¬ÞkÂ̼o«Ìi]´HB6¨ù|Œ\»Z!gmµñ;àˆ^¶Hº«™7$ÞáZ¨ŸE…éå·áƒ2”Mã¿$*]ŸGlàVtI xÞ*[Æ2¦¼ò˜ÿt´šë£A¶]ÔlŒšº7 ÷3,SkgìõÏv9:¡“ëj™Óè cA*?ù8ÂvåÎÔœD¯­|-{ñ 'ø–ÙýÿÎ"‘sß°©W éG²3š5®Að…g³îj1ÎǹezõÓÁexŸíP°\3æÍãüº])Óx¿šÎm2l5Íoq·ÜLE?ß6¶5ê鋃·ó0»LCg¾³™J·ä˜ 5j±Ãµ­f]Û$çL‡i3–ÖðÉdßÑ“¾ÌÙTü¹Þ×…aVzµð$«·6G@xö9i# ÚID=±¾?Œ²EZ$(álÆ·ˆ´7i‘µ}oÕö2?ݹm ÚvøÉB:œd7 ˆsi3¹éûô¬WEË®y8…Sc (‚ý ï·³ñ°>?â§'kX2 ´î(å>Â㛤î£*¦H=4H6€&ôfÅœåW7#.|ZRÃ@ -ÆVƒ,$Ùò %S“ëUGéH§…zr½v¨óú?ãå’ÜFѽOÁ Ð?P百ÀÜ~òeµ¤&­˜ð†ØÍn >Y/×"¬ÕÓs¨ff¿¸>ÂG;m öÚñIª -eléGŠ˜7u͈¡¦¤‹‰¹¥Áy¹Q59Tÿ±¬ •éªò+5g©šõAËë¯*–Vw5t:\ÍjéŸ[ÑlåZ*…²Ö¨ˆJ]Ÿ‡cÅ4Ù`9ü Îk5+Û˜êtÏÎ#ÑV·‡×ácþêþc Uõ¶Þó¡`UìÀ߯DUà?òÝ1-¡ £Ë š;§*+G¹Óë{ž ‡Óö—ÓRb‘ÚÀšÀ W‰ŒÒˆP…º¼„o‡×ã`I¸[j<'‘‰•rµuíäÍšëi»»]%°%íÒ¼T ,è×V`ä uï}iîê¾E)…Ê}اep–:f—Ì©U4æ5ÆOËaaVj¸–Þî/H3K\ÑF?§ÊžöÜÎt?¾ýl´¨ÕWKRctuêp,'Jñ­PsxfÖ2L—s[ÄTfCa˜ÒÎpÌ ¼ùb3÷KqrX¢&%Éž¥=Hlõ±~¾LmdFm§†ÐwÁ<Ò3Úaäž-g°ª_‚0«çgT½¡Éê>> -¤&¸ÖdA Ú«ÚxDE*ã±Íº·›|Q}}T"’6jB3_aU>ÔÀ{te7š×Òcr]ÕC'$•ÃæJc2Ú ÔaCúq±-”¶£ #3ÑÔP4”^}È©¥~ÿ˜ÇÙWn" ÔûÉÄlª–õÀ®/yŽ'%Üð!Øpó„~ ˆÄ,ùY÷jß²äÇIÙUYK•£5‰‰VX~tx5× -.˜ü0T@X¶‰WK&‹ŠlŠ -Ìéw”¼F;tVû¸–NV…ô2G$Ëw†~F$JšU÷³kÝô6…¨Æ¢æZòä0¡i4ÀqÓä¬îe|AÄO*\IÓCÁpÑŒ™â2R1LÈbJm5½^?ÐƵW+õ2î2R•ZW`·çÔÿÎ^¹Qæ'³§§pþ´7¤P)·i±zI.&“>þŽ%>‚²©Ö‹ùšFûå‰]•?qlÃö.¤fCÝžŽáMdõ xyCq x}š¸¾Aqµ]/VuJ³?7ÑLhÞK~“ÉS“˜¶l­ÚÔvŸ‘„r&AªpJ·ÇÀ7œ4*«&S˜d)˜ï‚ƒF•ƒp–- -n3^`µ.c ­,kš>Ït1bž•_öPŠÂƒZû[!(@ä:ëÇR{9Æ(6À3(µ÷˹Ó3‰>åRgëŠWŠ}Ü>Ç4£ÑÌ喝4(ÆKn¦mR§VøÿkŦ&-Rò¹Ûy'v–X Øšü™¾X¦6ìr/¼^„Û• h[ gÔ¬´P¨€ÎÅaÕ³š}%ð>p8_Ÿo;UïRb WÕCo÷d‰¥YžêÈ£NO¦¡±¿èEO° ¤`m›Öz&¤™ÙHmÎ ^é²cZ ™b×RpRçƒ(G°Ýb÷Æ“ƒ(ÓÅ;ÂH«½sE¶27î?N)07æªÎlùKf ër1´iø §–âíBU¿NËσÀ?àsøÅÛ·@°Ø·Ñ‰4^·Ÿ†Ü‹ bEˆšKR«Xˆ±Í›¢Ñªek…Z³ÌºþSÜ°=ÃìÂƃ–l‹b÷§#Ú˜¾Ù18¦˜7òÿ÷0­æ¼UCϸ—m‰âá÷×íl¨lì6»]Z=ûò®5´=ÁôÆ¡ïg7‰aáÎHKJ3bq$›ú³ªçöüR9li¦V;îƒg2­VX \”O±Äñæƒs\'A3ågyO^†³Å1òniÍv·s8ý¤Nï[¨û*¬Ájtx¤ƒžJ¥JIîaÞžvùyþ² ê4ÄØ[{¾Ö⺶EŠ0VDv—àÍÔ¶ÈÎÏ6cÀQ/ŽöffWÝ­#èÁ–î©Æ ܸÉîût¥Û‰îY?G¥õPÕ댥:W!6GžSŽá!`n¨7º¿ÓŽl¿aZ§»jþ9¾4±*ZáöD¾oav¬¼Ahï‰Háw(¥úõh¼_]Ž¯!âˆ(“A«ãVö¸Z‰£Ÿœ W¨DùŒâ#Ï:.ãG»-¬õ9çÕ›¬Â ÷Ï÷Ó5¤CA?"˜ëN£¢àõUp !!˜æbªõÆ5N8´ÐhAb”óËS¢Þìóp­bPC%œ‚7öB‰÷¼±ºÜk>0‹®ÄÊ°öéÕ‚v5v&ÛµX†ÄÎç×LjY~îççw±  ‹#‰þŒÅ4éû±™Á’/Éí;1ÅN6:­5ˆ -+ÒT­J¾æÕ´±áI{äz@1³0|䜴îæ_©G/L¨3@£~ƒTʨX,*q -S¨d9*³?#Œn -©[¡®¤%úÂr\(úœV'Ñ+](HŠ09ºA14ÍÍåbmnìuñ™ÁâŒW5Š’Ó‡küÿv8ù²€ $‹]ê Á#>óó¹Ð£´Ÿ8_ÙQŸ]ÉR ËC`¥²rÔ;n¡©ø+"總¬»Š+ÔýÜÎ|S –Å90dņéYû51Bè…Dx‹¾8nÒNYáØœç1ZyÒ0o%“¢àç{ñøåÁ€Qh‘ÓÝdäÕr"·/ˆc*=¡åR$´Ñáo{ -Ý^¯½nOpœ–Ñ•é6Á³Ø™Kq}2 —·êþŸkm˜Ñ–îi ;Öð^Ç…íã.)ŠLy—˜8î½¹r8m »ñ@Ç18VstÙ<ÍPq½ j`hËIß Ôq<º¥*ƒù+î‹öÔjc9µ÷á{û6ä0<÷˜_QEù÷f¬  îgǧ%Sk†Íèµ)Ù)ž}<õ*Å t›XÍåp í‡/(6-{^o#—«»ÞVžW9‰´íu™V0”Ö½~j³Úy¸O]aÕۜˆ®7P£'±MYp½UÇ‚Ê·ùµÛb†Á„º£¤}£&ØÄ‘žnaÚÅm¼ßZõÿuh†ÉJÏœêÍòA±ËÁ†îšóžXÎktž‹?·3¦³ÒЖ> zrfë52BÕÕªû?R:ÅQ“€‚9ÎÛpE7„IÏ!ºªw:qøÇ/´<éVÕãpÞ ,R ÞPb^1vÙl_¹Ù-é”å¨1¨•å©¥3žf*KÚçÊ“'¦™¬¿ÄvÛ²mˆØÔó>¥´wÍm´ôõÚw¡«Ó³„m}s|܆~6«U°§Ž]\­¸ZÔÎœŸn¨Û2ol6¥:ñ¿?*ѱ뭪«zÙÄጨ¢ÖNÞ96%¤W·éñeÅ-í•Âpi¸¥GŽîo1…báÐýÔs¯¨`†êÔbÕq…–ÒÙ®dÕOC)µJXÞÚÛoÐͲàÖ -}–H³2›Ñ¿—kxÍØ9ßr² ãúÉ·n˜Sr"ŸFÐ @4³0>î~XvtÿüÆë6@aOµO©ýeÀïó1K÷F]™²~žÖ»ÔÜI ;•ÀUÙv8¡épßIo\üú|Ÿ¹!yÔóÎÎ{ç MÿÊx?Ý# „ÔÊí!#…«— -hlÒ‰žÓœí/³ä -àX«“±ôŽÕÔ|•}õ²ý »}þÁC «f±4«v«’ݽ†ù{mcB £ÌØsïÞ¿&B©îF›†aÈVß@ˆò®.n½:aWßTÅeD²ÍŸé2ܺìÏ ÍÕâ¬À 'agX– ¼Ò 5ÃJÀ<íã8bÃÿG"–þ\´«]™~f ®ö:Uj:Ýü|/ÆÙá.ÕŠ ]ù·¾ s“2išé¼ÁøÏØúòrIs7‚ð~N¡ ¨> ñ>Oo}ßÞñGR]¢ÚcOmX‰"ÌŒ—£(‹cî“*R ¨YDR`Œ"¹œ6aøÿcÆ®Ïô I¶ÕËgþ§®ýI›±¹þ½ṎX£ !É?0ÝNÇÒ]ÓÐÔä¬-Bjþ“ÙIGÃAo6všË!0õWoVãd€Òao•»Çöj Oÿñ·u?ÜÅƶé%þÀÕ)þ‚Jé7\ç¼¾M–}dÇᦴ•¡R·‹+1(•)N€ &¤ˆfKù{¡Gšùo ©gºrä,_01¤& Á^éõí…=g5vG¸üç}œ‹¼‹Ogæøç}æv秪ŠrœÒLh¸wv7#®¹~ö4cÙêŇþ? éKÏ‘Èâªâ6[Ö³J\Y.F`&QAâ2鳚y›Ž¨Šíjþ­„™°wW]w)+éá¢dg/ª<&®b\Íäç+4F;‰›ûàŽƒQåÏrû…Q%W «b¬÷#ïØN^Íåúõ—òê¢@C¢Hdä5Ì,ÃΠFšdÏåd¼úžYÎ;R1ÓåÂן µ˜ -ûð[* Þ»}ÅÒ` [* hú¯æÑTüÇì||…Ÿ¯’ªGÕ›IÉqaÍqQ'㛯{#BÌÚ=nðôÏÝòQä'ÝI¬Á.õgîr±IU_(ÒFoö’j¦&“iÖaÞ‰¹Ýöat—pº "|Eþ’¬i"ŸI˜éåNö ¦ÑÙ@Šþ¾s:÷º|n[ÓvHP½OïêŽ{*xwbzh̨¯̹†3âr‹á\RK¬#.KtjÑlä×e µbÓD­ùñ¨þsv:u~fF´.œép]PCµØÅœ5™³”Na—àAé‹´8=cÕú™ü£÷Õ0¾Qõaµ>;½rËwÎy$ì™w""~B0j¾Ž'`ËfMß%•5RýŽ(1ÜOñ/ž -qÛÀ•„šñKvBÀQF<ô{H·E–o‹„Uà›ëøvQ¡Žb‚Ä2 Zn0®eUÅ;(¹<UÖN[z§*—¤Uðá[mǶµ«‡ hL×&v‚ÍT.ýºM뚊ÜìÏzÌcX$½y¹ÉìGcÑ-:¢<¹}ÚÛŽÈ'm°úèÓÆ1Œõ_£&æVžåp-R«ö‚ó´¼´"šßÊÇš)Ó`å3§óÏ@0±4ONC$VØ ÀVlÛ4«­fõA Oáúoqr(K°0}h4õ Œ jáîEz=¬rIvOOÏø¤Jòj"ã©Cؘ|ó2ý«ø8PHCSƒ–ôsP8°Nû¥Pæ•âÎ ™×¾¯ŸcÌÙ¶Ð; 3ïn±§ùê —‹ÕO›éQí7ºëgu_PEY±¬Žgât`Ÿ$ø¤«»6VY‹ãsø†ºÓ‚ÑfÇwÆ œ#ºjjK8$iú(!OÕ^˜01ïSé‚(xÅ Oײä‘ý‹H‰¾†$<¡F¼ ¨ß«è ¯™ô§¦µá*h3ðEÇN ©ð³„eKŽh¢:Rx³üÓ¡.3yÓ¾ «(h‹õRn¹îcsŽ»,¶Ý^Ó‡:cBÓ‘´ -aæYd1ÐT¶«Å4‰*°8¬!Tƒ¶àÿ§Ê ^ƒçÙÇ‘w±½{Ù5•DÞGJÒ2#¥pj7±CŸzKL÷`À”Rð_Íb·%^Ñãg²6ó/*œ´°_ô¢SÏáñ7*Úc&=±Ò>Wõû|ïÔó–~‚ÞèamhΣKpÝ¥AZÛ+HcØv1uŠKD²uHï“5M@.Ö¾­ Þ|˜6M[Üjz÷sáj©å¶ì‰óB†æ0ºß”tøF@‡’4pqŠI¥â?¦ì€ÔÇ*TRÓ$`Wä -δÐL ÌôüÏ4¨¡ŸUÐÙlP-½¾E]?x[ßÇ - QÙVëãe® CUHw p®ü¾~J€Jxóv¤(1ߢ’Ì t°€-wƒqF¨+ŸŽCØòbàÝo$Âi*.‘Þ–h—rO\ÐÝM/Ö8Xœ+UòÈ_^ 㯻EH$Ó¬ÓðBµ=ûSU6Iú ÍôRS 6‹÷áÇ AÕ‡S.a¥Ûh–mÖµõÈok¿¤¼ÙlÆÅAʙҀ š7Žk˜W’«i#ªÆ|;°ÔËÑ?m¨š=X›ÇUê:Ua~¬˜3wÌzç¤ÇÁSFU_8s‘ŠÉ1³ª‚;Nã>f´ˆ‘édX»‹½’^ZE+ù÷ˆ8ÉJo×?œÄ&H¿„²2ç[È(fSDݶ!œRj3w 2œy+ƒ -5g;Uoú #G"9Ïe8JŠëØ~À°¥òp5p~–ü³ÇëÀµã²Ì*÷6Û•]êò¾yíKå­vmúAÙÎ×N;z’9Ip°YâGÀû{ï¥gm˜¹Ô‹—Æ^Ç ýíB¿ʆÁxJˆ „-U†åw‡'¼µw‰u‚ötò¿ß{ý'¹&ì«i\%0ÖžÞFùð¥ò &7£ƒ>ÓVßâÕî÷ëGmš°#åÛyD½ùÒ2¦Þx¥8¬J6bÚ±œqMÓe»ÏŒƒb5Ž=ÈÆ\.+A¶™ãæ±<áͦͦšÔÍþe^ÖK#¦Ü5­"oÇùH3J§3IúŽø„¨ÙSû»Vý«M,–M³!žÄèñ€×@Þ¦ÉIöûúY1Ù 6¯³ãöV1MÀÌ -Xw8ê僙êº_ˆË@͈:ì‰Ó§£gÿOË©cÆj¾ÙŒ5,K¡ÆxˆÄ¼²Zçrä:/ÝnF¤.á§/ŒÜœ¦, -›ª6 xÙiçNßò”õÞþ ØXÅé¹vq-ç7`7Ì¿¤\cv@(k¸i5cFݬÆòFèŒH¥ßSÄñ~ýDrµ—žÖ=Ê{ŠÂûɈɽxlÛÉ™Àá›éòË1_2…5Bd›ef‚ôë”Ò~,¼ö̲ãÌÇo³¥D¹]ù$T;¼Š µégbëá$Ú׶/Êt6Hd3Ü&•^Îoy‡x‹éà‘Ä3pD‡õ1݉»íhø_?kV›Â ¥·y wˆ)*&’Uz9±æô†õ˜cÃ…ú€„i˜žAêXj­©Ú©›m—î Äj¢â;ühlÈS@¬¯79‚âSxƒ'®:¿o¶ ”±Á«³y$0ñŸi¹Ÿ&cçGè»]&ݪ&(åm¶yˆgÞ¯?˜:µ‹ ¡apïŽÀݲPzÉì²(|§`™#§ò‘Y6ä\sÍ€©mOI.àèÐnàý‡ò2ÉŽ$7‚è¾OQÈ~˜pœG[] o/ûæÁb&K i$"|°ý/PªÎu¥]i ŠÎ)æ€xš3ÖlôÀ>žkyÛÖ?µÆuÕ|?ÿ¬j'R¡……àk}ÔÎáE3‰(íZÛ.B‚9­4P!ýÖ4ML ÝL%Œ¬TéøN¿E 5Ôá 7‡cA{*Õ2ó>J•–‘¦¼ù3ì—¿ CXX—"Cˆ wLQ6è =4ÕØVüÝBX7. ‹¥Ó$L°±Äžøë˜ý4ñXT…ÛÄ—ŠÆ ¹ÏUŠÊ°-½ÁJ÷±Ej\𠨄e!¡™Àä†ÛÁu´Á*‰Ú`uÓz¬•–~O´¨S1ÏmtBºà#¦Óħc@4RS6@ø8ªBu™1j$=RaðQôÄž8µpÓNìÛK©ÛÇNŠB›*yÒ%Ü"½mu´æ‹ÒÌ–òÚ%œ´qê¦OšÌ$‚U«]¨WS[¦­Ò•[Ç~¯âÅXÏ|ž?«8uQÄÔT®æ½ïU¬ -(n„YV"xÒ4&IÎmŒJp¨VŽ­…‚!®UBР!ˆ¯ËœSø§rÓ†Š XªÙ U ykjóQ -ý™­ïÐ'ö<£‘zk‡f.ĵÓMà r›£1PËRÙÖüˆ4¹ ^½w‚ÛÌ‹œVô»ý֑݇|þŒÜf µy“:ËRªœ˜þòJuœn€+ü­ôyñ6X¡‰"»+UõÙ+È*jÄÍp͸&Mæ£CM Õ±(6 ãÙ-';L¡K_î(­:Ô´ÛT€ÊuÜN‚×­ÞÁk§ÆÚ³à`™Õ9†aXÙL8¥7Öÿ†v£µph3«ÁßnÄr´6hÖURŽ4˜¹qõÒ×4ÜÍçÔ xòþžzM¾ˆ -;ÕCYÅ4SbVOÕճȴ\~Å²Ò S¨z ç4‡FsªÄ…¡€ID‚DKÀW¸ŽÕ.sãQ¯=­"y.FÊQ)§·}fp7ÇæÌz¯š–ÿ¯‰n€·¡ä%àŸXîÆ«æ¾½ /‰Ûó«S¬¢½h@™ÕÐå©üÿ[”û5O‘‹nñµ?×H[ƒßÏŸ N„ -_ÖL³›ñÔð‚ºq:hçd°›Ý×µ¼ÝÝ’‹Æ:Õ5ÓˆüÇM¯\ô[ú ~ôZ›¾ö*?Ä—|†ygg)üm’â…cœV¯¯Òµ/‘Å(±@ÞIÞQ…,˦é:pPÀð˻ܞ¸‡AHX$µ¤ÈJ{íbbξ·®¤zQ~JQnã/„"?ÍÞÚç"Uåh_]¬ÉǨ;á6d`nðu§IüÜó9 ­=º¢Ï)Ve¯3|…«y:I/h§Ô8R$ϸÜ!H^Ò@kbÛ:OüÚX“&e‡þºòÓ°˜º…}Üö{K}‚Xb¡Oýž¿¾?jiîEÍÓŽ ¹¼ÕÍ9ÐP–<ÈøKçÜR&º¿ tÓÕáäÊ¡4BØFA3ÇÕÅÇT‹Fæc¥ôçW,îy.½Huÿ³1õ®¯úÔ2' Äó48[îqä†1KØÀº¾iT±î¨o‰ªÛ¸ Ì<«È/÷ÁÃ-r9 -Š; —Ï‘XrÙàQPNÅ)ÆêQ c_0ºc•6+ ,TÖÔåÇÂâ¢ZK·çO PñLYÒ/óÆœ6€)ö8¿þýq-ݧØtk ³`¯ö^Œ¢h¿áv˜HÜb -˲µ•µ;íXV÷Ø„d–@l9è4‘ëØ0› £!L}?-BI¾Ÿÿ;qn+fm@ÏÀ‡Cá]ΗI;ÎyšhMúhò‰ZÐ/˜fÇà™‹^FÖ×vêpM ­¦Qd‘Ÿ~5ú”þ´®D¶º«”‘Pa­Ç› nUdÅAØdr§£Rz¼Ì±;s,NØðÜO)Ú-÷¿Ÿÿ%Z»£ÿº¡å7ˆ,Á*04 -}-l6´áX¬Æ1(’ ‰Õ9Âm¨³ª.ñ}íZ*|Zc¸¸¼††láh.™Rë¡ç¨ÞzÎáÌŠ¼¾=î÷TZ1ÆølŠUVó÷óË«÷;¥2á‚övyö)$Zó<¦4i–ˆCf?'Ì‘Ž„Múø¼ß ¢$œáv`ç*>—:{þ!~- åð6š¢­ãF8Õ¦©Q”j2xú«Ì lïð¢CûtéâGÊ@Ûj>i\]}¿Š¬:Tv!FÌOA^Ðצ÷¥yäÄÖî¸ kÌšãðãE ÓCÝø¶Þ -2F¥Ô9 ³¶]/yÍ“Íô@Þá÷ÚQ÷ wßQ9w‹9šÍ¶ %iÆLµ¹®AÐK!<•Ü2Tõfí «<ÌE¨«&1²ðÚÞÄGáI`k,è‹«Å„êún€m1™µJ(5Û7™9¼§Ü&éûù3åRVû,4jÌþ‘òÛY䤻g\f“3£0®WâáÿáD -¿)¿Ur7¬ÅåüÆ3d ü§Mâ?—¥:¿VXmò³0haLÔÛáaZ u[ý|„u:`¥C9½ÛkÁ›š-ÍŠr¥_´˜4ÛÊQ²ÓÆiîcu¥ÉБ]ÞöÕ(hYÀµ|é¡ØØ@ºk¥ PÌHÃx¥Ã‘BŠðôôcWªéŠzç¨$¼E+©çA¢"œå?ýÈô%Ü‚Gõâèb‹bÈáêZÇ(ÌðNO—8Æåt05LDyâêT.·Ê.—%k hy¥˜M½)#e‹™ÂNv›,¦˜QÜý7Õ¢š`x´…fmfß/VŠ[9¹J†Š gˆ£†.¿'7¹Ë8ñ¯¿þôïhPM¿UkWNíp0 -L×\.Ò³ ”õVû˜¾† -{Ò„8­6ŸÚM`Ço´± 8³bÁøªFn·êÖX¦%~9ÓFgZ§±ûÕ!/@I™ßˆ˜>š9A¹³Åõ]pJRéê -oWî5ÝâбÒ§^ÁLjûОÚÕáÝvf@ÝõÉÖɽ¶F¼£´¶á¸ -`Ø«ÈJ¸Œzn\\BЈx7•Æ-ˆ¬ØÑMýâ¼>"eÆ €jSRšWx`Ueqøê¡e‹Uø¶‘œBÅĸ¯ñ¹¼ÐF>Êmô7Ê= ÝUa¨GÕ0 à 5´š°géï¾ÁJãáͶ¦ëJpøB*•©rí*êðlXgËãÕ^¬/db<|õ‹Õe‚ì£)O¾YÖ8ÿ¡¼LÎ[Éa |Ÿ(œ€ÞG‚{<¾LþLýêY²}™‹¤nõB…Z|ŠaâDƒ }7õßwä91³GÃèk4øU¨¬E³4±ÚÏÞ¼Y¸ëÎNœrð¨GuáEëاêD³–gUÌ߸ָ•;Í7i·‡Ž`‚x³&e‰98Ñe”r§¾Šª -‚vØì×´²qvÊuEF·€•3à,Ü -ä -ˆèK-µø$ˆ üxJ[iK*CÎb¢¯ -D °+ -µrÑ_JN¬î›FµÍö©8†w¥TZm¼˜„rù=ݲÅLà™v®… iÒŽ×ßê5s¢|ïPbUÓ—éÒeµcÊñ<[‘8Q$—U\¿‰WÁeÔ!K¸”@´^æ²Ç½¦h' r£I”îlw–°4n5…]“ÎS'rYœÀPéD÷pç›0½¸S66Ã>éÜw‹°KT²‡w„²ÚíéTصB²<ÖÖ2|¢kÕùiGtÂí:%ÊqT÷0"°‹¨ºÑ¥zL³Gè -èÝTaà¡ïÜ Ç4¤¢k`–|A½¥>>£a/Ù’nvYÛž¿ÒÎ<¡‡”ì‘üò 'S®²Ç°£ÉâáU°mÛÜ·ÜL™| äÁ€ -£µ¤r³î¹ -rP§\‚ñHÈn/ïH2ÆDäg M}¡Cð#® -¶EïzÛJ¶Üªõ -kíNN‚Frî*¶g=z’òWªBeÝã²U$¤]&B õHÉÑËŲü¸òp}Q'Q¦ è²zäׇ¶YŠiS–ñºY†W¼*!ª˜©T€!®»4PÀty‘iŽqöø`{'©ô£žþ öûH”0Ýq´®—ocay83=ñƒ„´}¢"ìZŸŽÆuYã‡z/ç¿MpLëc@¨d0€XÔ.SÇö‡þ\ʇÖ»€häØ©¡š݇4 ŠÎ;6£!ð5×¢˜ö~Ã’†cﶆ5U;[É°U`/ZŒ`¶ÍÒ¸@ê9Ž¦¦[]/œ÷zDþ!LÕ Ð49‰;„ÆF×ü¦.ØD?uXìωqD¸ÄƒGŽ€¬ˆ¾5kÆ¿„@·ãa{Ü¡É<°ËHƒ9–0&üíõ‰ínY,b¢ù3ð´÷ȽÌ,ÚygœgYQ/®,PçÉ"‡¡$|­Ü±ÀCÍzÂ@ݶ; ¼¦;MÀGB*Ú?Òl]ãý[#¥̯tÇÃ*ô&“§ VØ&¿ùÚWucdž‚ÞêqÖAh7'jŸqzlÄLÞ_µ$ð~††çu?Ë×qÆ#[³eš…„@9nyG˜FhNqUÄ/,Hä`X ­*"{{lšbï¢öÎ’Ø‘.·íÑ€_w¨­™‘þLÛº“ -ÒnLb‚.ânÈà0…m†F ÙÌ‚©Eõ|Ôä]@ÎR½ûtyw:}AnÕzwaLe•û„Ó"ÇÈF\\95yc‘WlÛATä0 µNb´ßBa¡oo‹1jI9&µö°Òa àVÛÊæ¿“Õ5f¥ÈIÚHõÈãmJ´âsjsE߬Š®È#v*а&H$›c‹\]y>hˆâô¹Þ‡cúæîOåÂoÃÑÁN;Ú¤'Ͽáöƒ^‰ _:rÖ]Õùð¯ù(Õ=X% ßáéwï¿"àòÔyTGäŒvsŒ£¥þr³çŸKT…yS­ÁÒ‘Æ¥>d´¬á0ĵÙÍK»¥xÄô!LSë&Öb2sÓ[r§|çL*óFÅ =¯“FZàîOTÚÞYløl溟}$9H·ÔYö­~õ±¦ö@Ó2j¢·æë]źÿÂi±Õ½ÆnG/V|_‡Zº×¡Ç|_ˆD¼Ì 4påËBàh0Æ1šñhvÔĨ–¹+.Þ‘mOWÏ{‰pêÇLýN|'…/Ž~0³¾-¸Þ¥f©ÜÂ*ï"TÒøZp1Ès,o¤Kõ;,ôéć=eUËwcûmÛUûúü¾ñùžª©à¡: ¯ÏELÛìb…y›Â³U(ÐÊׂÀã¡1dAMWÁƇõÛ"×’Yw1L4Œ)ôª :*\Û¯Û?ïðh9J)3B]ÒM©Ø™>Z×lÏh8º …9›­`Þµ¸¶n£^2WèüVˆKÓDPuëí>êuö%73"o%ӪË&ùÖj^p5ÂÂvJtÿ˜¹¼„ogbQÃpgâ—Î=§Vµ” êú+<ºïâþ#"«@YbPMw¶ËG.¹ˆÛ0‡3´³& R§`\EÆôé'Jc¨u)„vµ<è}ÙÈJ-d…ñÓ€NsÉl(ºhƒºŽ÷ãú±»ŠuM7T;ÔÎb~z/×Kì½Ï­ß¡X G_Líêú>¤zZ×1dõ´Áå#ó[ BNÃÞH—Mß¼wŠ…€~´ôÿX(!ÞpùÜ€†ëêWä"ÉhòŠçnxÏù¯îk~¦<†s§ ïcgƒj4»'fšªmo/Ìô²?Ž½ ØJ°<$$ìX]„¶Þ?22N«¸ÀêY u&“F¢Y\ 3:Ùt|þ3ìA#ÐylC06¯`Øô¤üŸÓÜ5lcc[¹ªC¯vg4ßfc#X±b&R{RCR¬‘WÝ~A¼{HÖÔYøŒ{¡ -C&HŸ§¹¶TÅ“iaSÒÛ Ðj¡HÍe¨ú²WoèëÎ銤/ød±¤=†L‡åÑáÅ TUg á`ú¿ú®Šá…üiä‡/¤Psj‡{1£œ·ÐTŠW‚ -°Ë꣢bf_ZF¸ÑfºÜŠp´zëi¹Ý¼WT4zZ o䶪]ãÿi=O5†iø‡ìäßæø>òªêÕTý•ÛzqÅy‡å“ßßwïiS¿v'•ñjb*(NÍxÃëgË¢d¢ì6öÿ3íû *T-Cl¤ZÕ’p£z@,^ß9nCnÏï -%ßRë°a„þé>­üsøé'nÃ-˃áö4l«¥¹õE“³›TÏsPžÎ³€sß’ØúÇÓiWRææïrn8é .yn[h:ì)I]aÏÑmœ%D×úÙ’3öƒ+Þ˜Õlóƒ è£RºC§±K¬}Au¹µÑŒ3é1Á#ï0Q¨Ãšk‚—:¸<¥´§®Cov–ÑÃ]~÷týëç/žN:_ÓÓÄkc¼Ãj›EÛ§ ™«£¦Ù¡?=îhöMsÚgìL$(·æœ¤×ÌHv$àp·¡éþ©tÅ}2îì±BÅh~u=©<0s -Áº)ɱ©á_Ç”jOí£ìyëym+=ëӻµÀžÚ4®Ñ«—#cØ4¶}Ò¨uãBÿ¾–º[ã¿>ÿÏ ê%ÕOTaOR{: W†Ôôw%-ç1¢u8—ª=âä]ïÄjÎ傪ǻAZá)ÇÞÈ=i«gæ…ˆ‚`†Ý¢àðÖ4¦áíå+OFžÊ² -öº²9o%Ø™ŸŸßJ WkDŸ -ÅìÊŠ¥ÂÔ£–›ô EhûªKSTÄ•ÎZ\¾UpW«Ÿ•×ÙÈK´¸°–™ѲÝ<µÍ:)JèkTjØêè`#çœÛͤŸ^^sÓ© q“¼7=x&ŒN’εfz«OáÂŽýîšwûõù XäÏÇÚäÊ]ß=@Mvz(©]å?ÊË%IŽ¢û>E]@2’à÷F³ÐºÏ?þÌVF¥6j³eT~‚÷çÍwcxMÅÔÎÈØ^kV÷@r€x ÀæK+"‘pjJœ(1¥™#fM_Có¥Æ\Kq§ï§gWKÀïëÇÆ$ÏK8²õžç¹1“ªšË&Ô ;i_.~Zgø+±®uŸªóœð¤'wàú¬ZCo$WÑ"ø³Å”¿þ¹yKlR/ñ¬¯TœÇ.,˜Õ»­ñA¡FŽCsÞ\ç{#;~j-¦~0ù'ªèÔs7Á¦¶}šM‡ÄŠÞvã^óZßq9Jja+#!û>rÒÏÎ3Nz© I@»>ûÀ«â<ù•¯¾oqÄQˆßðÿ1Þ3¥w£~’^k2† ÛóBÌ -ï!…V­G™G¢ö;½Që× Õ3ïsJ#Û¦/kns.½ôÁª5ùì­¹Ò´vóÚ]Ðò¨~oí[ëõÜA8œZi<Ø{mwÿ5aβ¤ÆdÉË÷B{½ÛJ¦×Ýšh¿[ ²zÙÊu úß÷³ƒÓ›µòª×ú½eH«óQK,³º¶–kAÒ…Qwëz ¦’’Ä#{Ê LñL@g¯#žÞ–2“×Ã> äèÕrŽ~`¾éTõ>d·ýÂUí£7{ÓÞæ“y¦›¯ì„‚¬üOÌ–˜>|bµYUDÒŸ”³1ÓƒªÉ N{ßôoÜë§?€q7Ô¶Â3ßí£ÚÉúI\oé×ÛÞ7"ݺ qÏ ¶;Á“øP fɶǵ·GëІšMMe «6Eõ%†]ꡤ£»Ëqx³ælýU&ÚŸ±e¸ÝLj¯|ýÑ[úÊ*žÁeûyg«’q!‡'ýJù¼lì= -r‘‹ëAæsó””È9^4†¦h?r’ÜâÐS5T°æl£(”ã¯MXB¨jf¾ï›]T’d­ÎŽç-|i@º­68ù¸¯~ZuXᦚcV -ÉEHüùÆt½Ò÷Ú>Rà>*çD´”u§$™]>ûÄ9ð,:lÉͨíQä’½EË<#`É …Û¸Á'Þ²0û¼k¦øÉk>êý¨5ùLï©.’m4ßÀkí¦%WS¨Š ÷t,~ñ|(Ã×ý_½ø}s#²Ã¤mÄÞÏæ ØNûñvZÊþîÞj%kõfn$Xý5.øµ¥ÙáÕêÁìåw}#{¬¤ó£Ñ™`tØj&qÒ Çé -]ŽiNÇô# 8”º—[ûQƒøU«ÓN0wy†Óa<ø™A©ø`bº¦ÖîYïV³cŽw„kÑèÑÊ©L‹xd“ÆL -õì’údšåpL¼WàâLR«%Ý´~3òôq‡»£¸•À^1mŸ h;Âô\9$'ÛšT€4ÑG;¥dy•SΓ€f·¹äUøÎŽN=' è`;£Õ»[g“ªöjïnà–®Ú3Ž™©3}ÿÈAfF´9_¬reì3N­Œa¶¾!¡ô -ÁåŠo­l…cúŽ¦`{Ú Ú|šÛ©-}ù ×)?"ÐrhAr”«ÂŸ®ÚæÔ·ÓMÐÉ$UKËíz†°9¡°wº)>Çc%=éc€üïe¼S>NÕjÁ¹Ë»˜µŸÆ“áÌ¡D(À¸ØÕëÈFÕ«üg5{ú·ì=›c ë0»ÇáAƒê˹ „÷…¬“.7Ì™;‘;µf_}\™֖׊¹™t]ÏÉç/«,ßÚ•-LÌùcö` ]™[oKC®»»jIŽZI ÔV¥TXŸfÏOTŸ„E¤º¢ÖdóìЄ÷jš©Ÿ4³‡$ÊÑB&ð´ß±/†yOuù®Nß­ì—Ö[n륧›e6åðÑÏR—¯ÏÏÊLN"gÌö×çÚÈ!"0Éìœç½x3§A=*.)·¥Ñ"ðSÚ9Þ;j2 M0 ;ÎÓ¶‡ëuýoùÙ裉,É2j”GW¦ûleŠêyJƒFs‹qà® f­Y;è à»­ƒôÇLt!¥i({|tîvO&¬gû˸Ýá‡ó¨]# ¸ÙmY ÃÝÝ-4G -ÛqÑŒ¶dߨJu´3¯A~~ý ¹Æ˜§êØÑhÎ2UÚï$ÇÛDü•­´Ÿ}Ð=÷ŸÔ4d0ÂqýøÒ™µ7§uoCµýEñ&0 B [ÕÖb¿á›ÊVðóÛŸ‘÷årL°Ì­ƒàÆ:)žÌµõ*FÍŸ³hä§øæ¹Ô¼ç&·3ãëú9 ÑÉTð¨xÄ™·dX—Âb—\Â)5-0íy,³)v©‚ÐfaP=Ž9Œ+¦ÿäršçÚá8:™ôå­r¤4:t)< ¢âB›q -j¬/ÅÖÁô,Ó´RÎÈACŒ”“æ/ó=Æ”‹ž ÒÕ5›O–ù?åå’ãHŽÑ}Ÿ"/ €tþÏ3Û¾ÀÜ~ì™3«©BzQŠT(D:íÛN|ý¾~2ò Û¯úŠ@¨ÍsX¸J3m£ºy£¸…Ù”—ƒÂ©Éˆàô‰†Îw®‡;ãÚÜ~×ñ&É ‘}ðG>[Nf÷úéß=PÓEâÑ–NÝï[3œê^LÕ”U§.ÕL\Sᶋ:ým'ûÈú¦#›;zfÑÙÒ\úú¾U2Ê„Jšú¯´s ýá„ò·&¶·¹ÒmíµŸÜÿ3¤¬_¿®(@"Ñì$ç¥ }Æ;ÈcXÄ"»†¡ä0dYÓÚÉä\¦5f°\Ýu³c)âqé*=Ñ6½ZÁÊ~öïÒþH«™‰žÆã`ö=j¦‘õu©R\=Œ£È\Ù]ô€¹HѦ{Ó$²· “Ç't<àÞŸ:2½…Iàú|LØÁYá>½ªíøÑÂŽq“µkn$,¦ÇFôËÐo_å:Ý;RÇ2/4¯§]Ù3¢´o²c³Fv”Dùÿf[â - -Ӳ€ ãWߤå,=sÎ8þ‰êÁ Tf¦í­˜™P,"R¤á·h8 yØ€zqžª0·ñ…Ýyñu!¢¹™GÛ~}Z·ôê\mv–ŒŒó<±ýDI%9l9úYËûè•Kª%ÜgkOâ%hüŸå³d7PÄ“ÒSÐP'Ò?$¨ÛéÚç×/ѪŸªòÿ?1]tÂ{¸^áÑûAÄC˜Óúè:éiŸ €ã5°’Ã$ôÙ¶‚ëð–À¯„‰¬¾Ž5^ëÄ'mµY.Ò3îë—tVQ -îZÆ õùèkYô3^?BCXÐÍÙÝÅá¼OºÃžo»Ãc¨-A wë"ÙÜÆj8gaeX>’a"L-¶@iï£hwwïjrŸîæöê·¥»SÝzzóÉ)$·îTGZR'8½¥ßŽü™àá5nΚÓnÜ•§y! ¬ý˜Ã®Lâ^?GÑôn|O?°lƒï>Yv2ÀÇ]¯xá"%zú€ÙVì™’æ5­¢N› -«>,ÐÈ”åL³¼˜^Uql%ÍÃXv]éø²Ìè^g…X"öÜæþ×}G" ~T¹V{¤»è3)lƒC -Ô¤Ú´½Ü*᜴¦Ñt↗…/pä5ßšÆL!¯w/½ûCvpjºšÖK6˜øúÞòº—ýJeùû¯0]õŒÂCDvO0ÆkfŠâÞGÇ»” -¶ê¾Õu§}¯õ«¯‘è´£,ÇD[ÜÒùVˆrH@ 8óJÏUÒhþo Þ‰‚¼j|c¯,ü¡N‡ŸÓ“¤>&Ænß•Z¯Ä¯iXC,ýL™Ù]6â02éxçL‚Oz˜âb¶‹¹@»vÁ+“[Özñ7Û´z–j°í™_/¹m¯A› ,¼ÉË'ºë“.«†ÕÜa?…q¥ƒ†Kz÷o ÇÍ»3–Ù.8øŒ±½èTÝnuoY;X»ìMƒší|Á‚Dªæs诈¦xXõ VóCÙé¬Ì`mçã€pÙ…—Sm?ÝEÿ}¥wU‡ùb {h¦³Ãïë‡nVÜt¦1´vžF6×C`±½œM1‘ús ”)Rë_о’dµh«¨ätÁŽx\û'†„÷z ‹Diš¦a3óA™ú(Õ/«1ó û±Ñ<Ñv<ÑîlŠ©£Óºµ*™$Ý»’¶Ôêù9†U[îísV‰tçÕ­›ø÷ú‡êöc+¦v14‹sPµÕL‹o3×€‰O[wð$•‰‡K›ø‘00[Û°O Èi«[3]c#3 ­;E@(i‹ÝQÅÊþÍÌ#ÔlžÝd"µE‘®B¬ÉݲuÉÉàô²KKÞÃnDzzÕ8ÙS¸~¶¶&Q1lFˆ¼ãË -âô ¾šÑ6}·MuÁ"ó¿îÏ6‚î‚ûÅAjôˆ ân>ª_ F¹ÁÇðʆÓÉcÕs @c|4³SîÈùxó–H)¨4‘èêݩ辕¿Ûaë »d×qªá£ÜwVs=ººÏ?×­oê½¼çÈ681ÒɨλS·j<š3À½~$‘/v("jÏ÷á¶vüIÕÓaì¬~ðR?Þš÷öˆ,™ZiK½ÄÃuÒ0<šgr\¯¸{¢dÉpˆK¦¿kNˆÏ8 Ì«Œú¢àâŽLŠFá†0”ï;—%S“~®l žÇÝ¿¯ÐÔÚ±!ëþ†•‡Í -ýŠ‹W4$cXÚèn/T-—†ý¥pm]”r» µyãT¿É¹Ú¡«¾qLrœ}v×4L3žÈ4VäpZþ^ÉiÐñ`5å—ø“ÂÆõ'1}pO g¦2‹Ç¾Ùo:†àeïÓ"rí÷ú“ÈýP|¯K*-½MK§}(…CsJ NØÐ6¦4/‹³xÓ<„Øø…ÇVþl\óëû߇”Hå‹ûKG¹¶Y`{*}ÆP«¸÷EóÄØ"Z¬¥žÎßËb!›%ž}¬xå!»M¶iÈ©·ÚY8§A/;«’ðò¤ç*&æÙ^Õ¿S™îx®SDu…Ô¾/˜ß…b'¶SsØÛ†ÅIew êÌ´À_YÙÊÒ03Ó±•™èz|[Æpt=a°¢£Œ0?«Oäpº\AYå£x[a €Žó®Bͨ44HÅŽ5u›R#µ”(lÝåCy‡ðé˜ÅÙŽø"‘ÃÒè25G®cša”ÃÓüénihBJH¡Q¸mÈ—9Û1ë@ÒØ·~ iò¸a–á"5>#æšäÃs”8ñÛjsÒí-[Ã1öÊ–’´EÏï<ê˜9`vØÏ÷dù½8_ —vÓØYzÐÞ?z¤™×S>§«µÖ Ïâ„õ~pÇóï>°ºȾ¸½pÌéÝBív¶‚SjXŽ5沌Mc`y´cgèz2¹×?¤ )‚âSg¥ZsÛOˆÏuÄJ(ßúuqžÖ½ãŽ·;d!ów¾Üµ4¤^•õ»÷öˆDëÊÏã³ß[d½$ÕŸV;Ö.É«ù ë‹£¸×ý¹ÍJé$¨•i2¬‡Zäcs*VüV9~·ÝfñB*Ã:±,öwuÓãÊ8ô½¾’p3#ô§ƒ%G;Œíî—¯ìU~öÊ¿5/Ṳ̈nV´Õ“KÓ &RY«cq<+ ó{¿~¶ˆŽCÙ‰*Ýï͉úÌÓEØÙ¾€8(f#Y¯ÊL£šÃ<ÛOåÎEww„ÿã$„ò)jú$ûxÜÕÔ™x‰nEd;]þéÝ®ã‘Õf,N–S kjóΟc‚šå9¦Ÿ#—g«W¯,GÐ ÜNíÇ‹¥ã^ÛÇ•b¨÷»N«ˆBoC.naûäùZÛq8‰jât;®#¡C ‚µTiqwh¡P¼¢Ú09‡1–«½ -¹ã5ÊÇõx'ƒõŠ$òv^ Íçt«æ{,úFôRé …*?%QmÙ«éïúâq[zµ k1!xVY×Ì«)H‰k¶EnW8àQï8”j©™Í_] U9­i1¯¾ìYÚx»4ÑÛÑ)M‚Ò@¨ÚÍ5 t0ôÁ—óÝCÞM}$š3qâïW]9xîÈàZP÷õWsäጼð"Œ©!A »ëEªÕ¿¡µ®á`1 âÄ |Õ\é-·Zõsâj$ué)(yšÏéàN¯Ä©Þ-ê£æ£çAµvgmõ­èÛG5ÒïåŶFoŸ’ƒÅ`/×r°J÷èµ6ýëcIPÁ€·þºl -«ƒçÍíI=(¸üÄHùTBܪ“1ó”O.¡·Ç^Zsîá%p®æý_¿—·HÖ8;ìiæÛ?³¶=.r ëùì@çô`¨Õ˜­‰Ñ3ö9¯=¹-™Sµ‘_‹4xÚýÑîõsEº:ãçï‰änƒ¤)—FÍ.‘Æ“­AcáK¤ÈÀC?K"R³‰Ã}0.åp÷¤¶7fl¯4‘ß)H ç5•Jîì¸fFšŠ?ÎA‘uÎÑCZ ƒ)k°Ît|ùçÇò™ÕëÒ¿¸ÁÍŸxN××*B*>MPôý¶Ê5%††íîK$LŽOPÁÁNNœ«©Ü½‡¦rO›ëSsˬÚ>šÉ:]t‚ŸS,ü%vØT¿ºqex 5³â¹òÂ<Î+ud^ñyÇK*#üq×õ/(CSнrä\Õ¾ KÆ ”#³ŸÊó‰ò9;L©“;zïWwXë%žÇ9Ë bcÍ­C˜Ôrd&ë‚IKÝÃõþòV·Øõü6d0ËJ˜Ó– -{h-Ú©8t©O&Õã·—Çý^»ê¿^×÷4©­i8aRRKÜÂäpòî´-í{›0Çð°‰–%šivz>R€Hr&Æ ±º9-ËS&ŠS3Z+ý4$©Öï§ ÎÇþ=ß'@9%!3Ûm|¾ŽýyÈ™ xpÕZjzŽ­„ÙEaX¾ÒLæ“., ³.¶Zš ?\þjŸSÌÛ ®Ri—#ÕåÖ}ä?ß-QpfEÁ šú:Rá>LÆbl㊌-\éØ©íͶ,G[|3ø9– mKµÙ D!Óî”AûŠ–UEfjÓÆÕþu=º'·–1á[«¢µùäçsç’JÎ:SЈu29ÌÜ‚•ÎHkm÷©«QÄ$I‡…Ñçz­Öì -aöÕß¹ ÀY™d ó>Ïž/’Ñ5¹6µÚžiå¥H’ãÞ[ÙÄ2•ÜLQ ûªÓxE0Dk¥Ú=,{ +gü4X~7„4hóN9-tŸüù`ÃRW@óz0é›6 -ÇÒi}&·…¥t0?³÷IWX$ÌL¯9ÚµùÏ‹_—M çî{XÌݳ3©[fwˆçZb3ôuµÄ­QTŠ×¯î‹(êÄíhý0ÿyÅ‚¾m ¬Ó¢ƒõ8eÜZÐhÙ²pÍiÓcq"yRr'ÓóÈ£;ÏÓ–÷D2´í2 -çÓsY'_Õe1æØÅÛ¬Ù®a{²ÞÍGçE¢‚€ˆaìg¨­ñ´gÜYðŒ¾7“Qó– ˜e¶Ølܦ}iºœë–6ôxNE?^¡9`÷|·tµÇukå¥%ùÿúFG'L#!$qÓQŠ‹nC3;½ÌR5EˆLlÿ`ÊS€úfYÒ{[;¨ãð£Ït#o/ì¼m-å¦Ì–^ÄË ÎÒ¯¸a ®Ÿ¢ÛÄ+ô‘â½lP:,öe‹ÑÑÆ?jÈ»5î|<]¡¨Ü÷Ÿe˜vøÑ ¾•†fB8?¦M'¼Q‘¸ÈS³ÖÊÑÕö÷±“ -ȧ#[üf«‘’óˆh›•ÎÌò }S´v?W_é¯}Ýïçj2âu(ðös“øÙã’Û/Okæ‹ã+wÉ‹¸_» ;Ú±óšþÖ¢8íkqI›æÓ52÷M#ìqýÖ–”}ÅÎr#nÔ°»!V[{T+´lÄAÄ-·²mRZš‹®rê:"eæ#yËFjcw³³ø°îüpwRŒÏuù†¥{:OëuC`nFj»¤vÏaøLá„°·}Ýîó’Âæ_»Ð¥ý—Ó¾Â^Ïp„_´Þk ]d²óÉ0,ç‚ B‚,¤›}6d¦Üð‘È(é(߸ºáû¦ÉZÝg[OÉQûcüÆ2\ceNÅÔŠM£¥ŒÜ²3ßIc‰Éº›ÝŸ>‡3Ìþ¶“FN³ ê`§?ãTdÌÇ*}^PŸ&ïVHÌ ´²ãñ]¹ÅF¡ŽÙ\&Tï5<´Xø9IõøyeÇÆcµeE9¥}¥x襶pxá G¯'*$€ïv7àÙêíö°ÜPËžžÃ¹õ$žd -Gò"aöX-Zšø–†£KwÀˆØÙ0Þ*¦W¸ÝŒZ=T“¯G¯I¶‘)ÇLJÿùñï£ižÿ÷NAGŒ¿â¤7[mî8Še[$eS†eaïéc¬™sxF±éúª7ÜW\ã‹C©‡a[ÉøàêÕè››pû3»HLœ}Š¶õ—²ù˪ӆröéòYgKƶžå÷Pg Šã ·i#ÿ»ŽRŒš2òÑ7Ô(¤hB¥G2{êÁw@˜)þÉ0‘ô¨'‡™WãסçÕ°9*l(Ï:ÃÐ)™¥h÷¢6vlmˆQU¥PE‚Õ¹³A×t¢*,Lø¥ª4úŶ›¿Ð ãC ÛUĆ fšuÏ©…ð‹îB¦ÿ‰´z!],³»žÕ:\E@ù‘íÕbÈe3²:F¦>=ÇÛĬV¤çPvRôpœqðÕq;ѽëR(A3ÓèýPã`âL½ê<ö¯’ •@%[8C¨‹Zcó£ªüöÍ p}û@Ovà_p}æ¥áªl'äÚÚZOÃŒ LšªmU†Ù±`VÑa ãm6¼Û´ÅבÀñ§ÓÛMüÑɆÁô•±îF™QÖ­…cË/µ½Ú{¾OUz3oW(Эuz)Ê0]†!‘1FzÕ+W±ZOm§Â=î°¨câ²8 3ôPË@Ѽ"n^héÿS^&Ùqä0Ý뺀üHp>Ooz¡û¯;~ eUVù¹í…å¢ÄÊ$@ -Åæe-Ѧ^|ù‘Agì >@wOKmaÑ* BÄú½s;gÚ2rŠ™§:,3êS°Û½õ™ -oÒËliu ¬%<sY_<ÆçÅœc9Õº_ïfd&*gW ”\¥o­62 Å¸ëUÈâ[ÜQÃÕÁif4÷.+ ø»ÙGìlÉ6ÅÍd¡#Ñ͆U…Îs6Ójë„êpóVP´ •‚‘¦¦F¢BÝÚ?¥D[ûs14vS>Ì wdƾª¾øó;w4©Iì܆™VgÖ¤é­ëø¥$BŽ?à-sçÙç’ßNXÅ ¾¦>J‰¦Ý2Ví¸JXõ仂vüŒ6©‚“þZ}q8ñ¾™í÷ë_yíýá)æS‰ßÍoT؇Íe&È@ãNÿiÉJ M.ò kP·Z‚1-$jj3Ä/4_ô 6Ñfô]OpŒ­–S$Rø˜úf’ÜLÔõ]¶»¼ZcüÓ© >–æ3qÔ‘¤õÃm-&(HÜ=c`ŒŽÝ4œºš}Íp0ó›®,cæË’lX.§Dg=[€®^û¬â?ððæžmdÁ¹UÃIÈmÜS÷ýóüM犉B³¹90DÝçé*5¨˜5…ÂNMÞ¤>P(-ËêáîY¸ÉÂ`õ²¸¶8ÙžB|Úpéq(ßÊN4ª«a  YãKÞõÆRkZ—©wíúÄ`ŽfN1¿òcSE^‡Ž 5‰óÑZ¢å+¶Fä‚E’16…9’²EÛ=ÂÌ_ÒUö+©o\#é‹ã|Ðù·í€6y.ú·Íÿe­|òHû7ø5ô¶ 8{Ÿ,ÎDŠ‹lÎ!L•“]¶nô‚–Ô¶7`Ym¯¸“*©/hfŸ >©Xùü'd`§Q¼¯ WœÜ·øŸc‹ÖdŸu]°.;#à „õ©Á3¹‹6u²mP;˜j?º¹U6¼×å(¤E{Ù%öbMCï„R½‰É;mLª½ÇÇ¥!¼žK®pYOñäÿ0ô„âe$tŒ#àŽáP¤"Ù\ê.á¸ÒÒw`ó„¹™ó-ÝÞI¾dâ2RnVœ»t7Ç®íÞ_°÷@¢ÝC@Ç#{÷dÒ4W˜©®·žÃÚ*r¯zÅ!@Ía(ÍêêÙ]U|mÓ« - -—Ž¯ƒ®‚‹sÙWJÖÔÅqR½·bëíŽÉã ÁN|ùùZe‘‹–#a,ãU±kç»ê˜†î꾞TÍ dº -ÖÓ3°Ÿ“×áð§F„g’} a‚‡#p“æYba©bFŒ¨öfË«qgÁˆQ2ÁaÎQâ‰æ¢|`Xfˆ¶ÅT„ ì’gÉp’7uÁv1}d´`mÎÕã,+ˆÇ•3oŠq¨LµÀ#¿øL-%¨ZÉë™núûç‹êˆˆÓÿÍM&¬õ<Ý´Õmé¦~Îì¶rV:¹x+>w%Vj¾ŒƒÐÈtÃÐÊÓ—Õ0ÀáFì+Ó¼Ø$ÊฆG(Õ6÷ ¨j–ú5£ù™ê…ˆÁ1tPuÅ™ãÇO [8'ªBDeVÖ}«G‡‘Ÿÿ\]4†Õ<=íp(zÙ_é]Ç5C:– '¸ÞóÙËHtì´ß7 7¡«aŸNŽUQûµïf¶çñfÏ°" èÔ½dHçaÓzó4ò²ð©Wø¯ž}MrAd×qGF–Ú¹£©Ÿ/Râê¯T.ÎÃ6 L:Ä.í&;5È›™6§óÑ©¾®ûINêñ´gçç˜À¯ù7ø0æsHe›+!íàÂ6,úI®gæ4Aì“iéæ/Ü©È·„¢°Ô)†Ì¸f0-¶ðŠê§šcc¨Â%ß”¨ôåw¾ C,Œ ÜáÕ(kMÿ,Ö|†—ztQ;Q¬ôgœdB¹Nm¦—qÙ®^8ÝZ8ËCÉ£÷ž*ò^ /¤ºI*Qj%ÓàÆN^™cBm~ŒÔw瑱;vMì¶üO=]c$š©};þ -ÑMݼ©Î³qI¨á“£€yz¬Óð^ØzeªUd®¦ë4‰n¤Ç:ñÝaº*-cüœLÛÍýÜ£ÙC‰4+¨i‹ÃÃÂbï*•lcS¯>߶é Ò ÊÛ·YL¹Ì'ÛP@.Ü}”Øæuë÷ùI§˜_]cµPç¦ö˜7‚,ñ´:VÐŒ–¶>˜wF!ˆ™¦¬Mwö„]ªIA}îáÔ†3Dï)à˜26e¤ƒšhÌù6zÏ -Ð1Ýê5Ýj¸Óñj;O&MЄìuUÐ烸 ïO;µ‰ºÎäi×^!ðŽ…ÑÜÏÿ Ñ,G® ÑÈÌý`á@Ž8¤íŸÌgñÐ8˜-ûƒ2ɺ -›¼=ðe"¼ ) ì,€°Ûôy¸°ái'-á¬Ý0†”ïQÅIe:©ôW1)8iô¶ó X÷÷¦Ù±FOÿ”®FìƒÍ²¥ðÁmÊÚvo{*Ïî5§6Ãé»'WöúkØ ¤£µú+ûnØð©t©ØçJ0æê2SÍÊÀÖu%ø§‘Î QöåáøüZ‰*œò¼*ƒ¥v<Š…kAÛÐ}.ñ\ Iȶ¸%\á4Eäpþùt’§þ*Z{EuÀÜÎq•YÀ¿|²ÌÁjžD¬ ¦éD•²;ÔUØžKÜËB掞Óv²’P?€±'$CêÖjòß”÷ åùÜTw§Á*v·…}ëbÀIkaÓ5öµôô»Ô¤D×ãé8N&6 -éï£&1\ñ”Žrª§áõÈ¿=Õf›ÜÎ ÃÚøEVt‘վƖßÇJz;ÛNþóöïΦ˜ -…Ö¡|ô‡ÈæˆåSE³µe†µXnûj&ÂXÙÒœ•Þf6Ú†fÜ[JjBµú{~~:Ê8`mz8H åáðBN$–¥8–û*J5ŽÝ3ða¹ª¹;Ì:Ç^‡ß‘Š$6TÎ]ZµQo$}è¿­Ánª¨Žš;Ûªõ¾‹ÆHG'tVCßh2Iü‡ïT ÆÒq*;žƒÑÕ --E‡´æðbÆ™ñ~:ÚtSñ·èùzR]ˆåttŽ=:8 -ÒŠe?ÂaH{µæ"ZëŸozÛ¢2Š:HÅ]][DI+x‹Ö…:MÝä-Ažìî¬NÃuîΔ ùÊÈqaofQýcQ·æÌ[†q§øýèÑqhàE;ÖXXîn㪘{HJu"áq#ö °V;Ú¨9¡y|´œø\Wwi˜Â™ŠGø³NFS£N¬;©úË]¬œ!ä|kÓØL»ÚXÒ˳¼kuEÚ¶µKW²Lô戂>¼ò3ÄèM8ónÌN’d·½ÌäÜ,¹«‚qx‡:_·)Ûkw1ãD5_ãyd™Ü6t¬ëï¼uèêÐs¥çëþa»zí×±Úð˷ȶ7g+ŽvXê%qœÌ¯¨ºª¾—I ŃáÒÙ⊨Íd\Û7WxT«øg Ã"~<‘Bbš¦®ke&J‰Z.‹])\O£ÐÖ†d#ö´znPÈN<ñŠ}Pm=’H¢GL±`ùévCµØòÁN¾¾;ð̆ˆêE 9ÃlX[ÆÉó7nò…C’Uµ‘Ô< ð$»Ìœê³1éßZP/Õd,u·cÏ©æ¤YÏ„){YÔæXmÚ.4vä:Šÿ®FÙ½,ÿñ]6Ùô: ÷*zÉ‘¨ÿõ¼iïüpA%íŠû|ÇÛUEþ6m’]µ´›f!>ÖÌõ°u}  ê€=ÿv›&Ý22ö”AM[ _¶/å«þ5¢…d‘tÉ(O<ݸ,ÓÇZüÕúº±¨9™éc¥!ª4¼þœ§s‹©ñQpgpO¯ë-fÌuX® -®±’B—óHRöÍnG·1â’ÀöÚ¬<}öî ’Äü£%y522‘ÑÈ#Ž}ÛÔ ÑV àªAîP‹·§ýê§ò†®& @Ô$J÷m¡ÝD­ŒJrÉ -kwdñ­yPÜL¢ÿ[š°­(ôG7Î¥ã¦#qRó†ºÒž{f êûõ'ûMTùŠ“á²P;iÌU¢W¦²æuý¶œ:ÇuFsà’‚ª]JÎ@µ¸¡bHL'£_=¾b’“<1Ý`¨ÛbìÒ†e™I³Ð±Ž–ÔMT „s:‹ÆEêó‰n¯,Ç¥÷«‡3ÝâæÅzV™‡í CR˜q¸Œ!>¸oÙNžAmÇ)?ò›0ÖÎh³wÞUu}&³ÿdVÛòýÛ‘q¨lR›=^©OúX©èòÉij ¿Xºx3,™,„Ânü¾#hi²)Îb·“Ò,jVÁ¶”¢W,Ñ°"Tm½Â=Ó—Û€±íèÜrýGë>ïÈÒNê󬩧ŠâPJS(wz-Ê2 - Ãýºßùœè£R¨C8áþàsßCêöb—þLFÌ n—dd鄺R-ƤÙ}¦êêž­aßØ9-Ó’¦¸øø›uBÂéñT×YþŽL‘))qþùvµ –šÓ]«Ê° a¡VÝá}9ʹ…z¸©§#¦º"Okv°fãýÏbÒ-kµ/¼éeæñ —Èz›Î=<—•¸hT ÐªØÝ:MÑŽ,H$\5¶ÜÇÌïó’JÑ­î Š´Tú¯œü´vyLk¾ó‚  r½ÔÜyŽ_uQ-èwS_ŒÈ„Õû¹¶‡y¿öòÑÔñ”˹ȷsÕßùþg§)º¢N“$ÊþãWÇ_ÁÀèü¤Î@|5®3º‹ôŒêˆ‚~pÊî´9®‹FvQ9Ǫ´¦ww–›jwÚaݺ×luº +y;E–HX/³øJµ=úÀ‡ú‚åè¸l‰êN3R— N¢Ïͦӻ*Oè7ãÓRÁS¶V­-Tk& Î6¢šô“<‚G´eŠ~úIµZ7òŽzª÷0wž×ôØ<å¡sh«3^k¼kæÖ žCÃ7ûq<ÍýU¼®º;¨¨O¹šÖü‘»cÌý©¹RÜ–ô±€šð_Æ æ>àv!£h¿íêŒEç}«=‡Ÿ”%å9¶¦Þi -“À&À”†Ø ›dËk´k‚šŒåø7¾®:c¼ý¤«_MÆ]ÓV£›ÿ,˜èþ„Š¬‚¡T“Ñ5:кþÉ´-yßRj°ˆL%݆ôÅ“Öö²Š±Ñóaµÿ?¢ý[‹îMrû»ˆx’”'…(K§KxYþgÏCKW£ÇaD7YWÔèáŽv‚œ¡¢M+ÛÞNžÓ™ˆjVËNˆF8.•¨!ØñlËѽ³ûúž$ÖF}UÆ Îëû=>ÕŸ_& ‰¬ Ú§tsMúj×î´‡+úlpÉQè‹Ó°xv:Žî5.ñ³ïJ9U‰œÜÖÝ’Ú½'7]m~œ 3<Â@׃à/~Ÿœ~ŠfgbúΓ”2ì‡=âÖ¬?<VËLHg›¦úÊ0Šcq£ªíúq¨;ÎI:4ÄrÓàsض‡ºtQ‘‘宧ýe‹ LDH:Ø‘¡:õV7mócçá={ÕõLþ+»|ßÙŽ ã¢ÎÜq,Ô¹çY,6r<eX“^Ò@±aħ iö=·V÷Nn{sê+ž6‚ºÔcˆÃßן{:Â'†`ð4¯‡iåáD47¨_ _'XÀh¶Õ+ºþ&g9¤¢fèT9îÝdPKæ\)rò9Gña@ͺ• w€¬—½ÄÛ«ï/K¸·`qÓùõ² ³w?ˤ½¿ìq~~ÏÙCæòÒ• Ÿ¡h8óïP¤‘65u2]1^[{d2[aú(ž#Ú]Ô˜âȹظçUZš(ÉNÚop& Pmdº U¡ -wŽWŽ•B)ô2Œi÷o»Ý'Ëà ͶìdœœoØr,>gØê¡©>»« V2¦tÄWNxz(dÇäö‘¦£(Aš¡¿öP™cí}uôw1Ã'«Q±p±¯cóo¦öªÝQT°ÙÙj,MÚNÚª/©`X)öÝØ4^hÖÕšËà RðÕœ>9?uÚ³ êØäuëÅÍr_ç; nNwÆ&"P^šEÇ2=ÊÈàu#(`gäiv_KIKæÓ6+áÉ'0:œfrrÒõÄý´KÝNŠÖ»“[¡[æà*ÚÉÛ¢Êú.ÉV!L#ÊÛ|QMZ-s ŠQn,ˆ¹ÜÙ§=—zêm—é¼QÇÕl´·{þ&Øõ–`wÁƒ$3ٯеWmÛ>Ž{Û\·dNð9˜€rÚ»úk:M[8^_"7û-¥flœ7M6=sùÜÍp›)U™(ô$ š4×vÚó¤ê{Î:tœ,Ú¹q ,¥',$YíT¿ãÎÚã9OŒH1Óóþ £CrÔâ>zò+ 𥴖ֆÕ÷ت;ÛÊNgo¦ ÂpØ!wó± â©ú‡gÕ™§:­E$‰à#s$¢“Cÿüª {YÝ £³ˆÖ¤›õ/!Œm_•À_š~ÎJÇïOô-vªûï -óSFALÖå³=Oo^õy¢cŒ”•“ø´m$…Ã8Þ슱ߋPßD2“¼; ¿;ý¦TsÉña¯fÔ\?ö6³Ÿ]öw^¡Fƒµµ7ùß:¯Tñ5óÞÉÌV`îrÒß7V–L—–ž3¼3bÍÔ»9÷mó `XÆžôŒ°mÑ\_ä [î$”™}÷ƒj9Þï×ñî¦eãíPíØÁ'Õ®™]ÚSTºIʨÞÌ]êÊBŸFsŒNê-_èV]åY}føMµEJud&2‘ÞÎÈ}! ú•š} !`5e¥*:Ï´ŽžÌ0}À>íVoapzf5“ R)OAä<þ¾î·PøésBÓ»ËÿOy™$É­ä@tÿO¡ ´,Ätž¿Õý×íÏÁ,‘EY·iêÈd’€Ã‡ÚIïØ{궇ò&ºã{ÛÖ—4C$CÂ1zÏ•BñE5zB„Åɶ.œ«ê®)ÕH?åYØ*Õ¨˜ïÚöŒ9þAªqRƒt´DloÎ:O/ïDä“èïQè3¬k‹oѼßuy¬}—“Ñ“ä¨^û•Ë<­™9““éètÏIOt‹ÚEˆ,GÙžmN¿P•°'Y÷SÆUŠcÃÑüt¿x|£õêãUêù Ù³TÌ‹˜yk­^ô—À„ †ÊHPáds`*,r¦Žây:™ý&òÙã9½W8¸bÂK‡¶¦±EöÃS|s«5G¦¹Ù#­%Ú­˜þE6#£P`ÄÚq…#E¶;ÕT²£k·të…}ÆÇX<„Îèj_Q^fâ×»r¬ xˆÛá´‰¿øoH¼ -_ËÚIÓ¿;8K[­­:ÛªÿÜ18­jéiÄ„G5¸ŠÞÜ•9w¤ÞÉ“È¡MŽå¶÷ìšÞ!ºì<Á‹ýk*õ2¹9ൖTžñºr®ÂÄ"¡niøW³Ók¶³¬EÔ¿þ9‚uZýe“žr˜TO+9sY›ÇI‰O)<Ý6%xÀÚªÎõ=µsjÏ«ª.-æ=/•HvÄ”•ª£/C!]Âinº!´÷Õä,èJ‹îê²lÃr“M7NŸÉ ’ÓE÷q¹êšµfâô»fÏFjXŽTÄÆF«¤"­Õq¡vkœCæ„‹l×¼W´€gå¨6Ç)ù…í7bÔvn1'9ôíÚYõætÚ…§ÓmÓ}¾nxLAÉocóÕžüµ}y»¤ÿAŸPʲ?j5–“š˜ûâ:3DZqLõ¨Í´lÙÅ|0ÍúT#>ø¡dúÑÇUµíéËÜ=û ^§;W+ÓnRD|ÅYiïO0¡ÃçYÛOSŧ%ÙL-øƒ©Rs‰N¤F~¿%Œ‘®j·°wN÷èÙß醛]‡~hvD^ÅÅ´.ÝÝ9·Íc$#5fÛEóØŽ•"4.µÜœnu-Uu›ö0Lg·¸ÆÜ—D·Hë—z`æÜðÖRÕÇEz¸æžš“–ÞãëÏ'j]Nþ£¡5ïÕîWrFÙæ…b?hÀGÃY CBUQ'GïëY¾ÆrÚU@ð†Öžº’£¡hbfæ;¶1?›¼×"œ¦¢'ü|¨\ž(à  kÒôûptóšßD —ÍÇCqd´Ã̓02å¾@u½Ì3rE—My*sf/Yl_Û Œ"ü*”º {¥zç§YÔ°RìK?9ôï©iý ·ó³åx…3kq¸B4E©Ó(¡Üÿþ#¶mìZT$¤š3Î~ª)¿ÒcOüÈÿ_C¤³œ…_9ªH-q'öriFe<£x ö¦4«xb·$Á6Mf3QPLþ¹¸5rÃVèh »m -Q«·Rÿ Á É×UDð}ß…Á §EåT¸ý΃Å$z¹¢¶ìñOŠt³±±3J0™5ó«HN#Xv=ʤèæa -&ÈÂQK±Ý °¯r‚¸QzU‡â÷œCRa{÷ÃáxìD‹ÓÎwCg(ºp§§7Õàww>9ðÚ6"ùË´¢Ê¾&(ÿúGiS87Ùg/£Õ|X.oˆd‡ƒz«ÿÖï%úÌÕ@@úi!%ŠØTÃ8ëĺVy2ÏpLG…Ee’ϨKoIy41B*T™Ãð0kÁÙŒÖÈæ]iÌÈñ¦íz& KcÂò$Á–¼3µ^2¹ÍÞil·/Äè磡¨íRŒç,U>µkÇ)½Ø'éTwŒü ‘Ã~ž·E³ÙHæ)ê“´´$o¤¶".>kHŸ4ß;Ít{øq%PIô{+[¼<êNUˆÑꃛÍuå¨Ó,¶õÂõ.ó"ÁÄü ÀVð¾GË"ºžé€‘ kXgÍ -¨xRt¬b,dKw“¤Ÿ!ª9 =°x~üÎoÿS'ÏÆñŠ¾s_ËeŠ-ÝÙîrÒçàƒ1“ú6Ë#< k’°¶)6­šnÄÄ®ÏäL§Û0o#3‡ûz"!‡¸i²›~yÎâºx|’¥öà|[á@9æ³ -帵'ñö*æV«nt¹¹qŒåêŠÃ‚ž4.Xñ™)@M#ûFòå'ÅAóL+ûö»µ{jZÒ¶N{€’H=ð1Ÿžíÿ¹•"ŒåÎ'Ž¼?i»»0å -:•”lÞÂƕϬ’‰HF¼«_šHóï$›œ„—ÙiDužVøuo;!E¬øaÓž˜a<¼Õ-±–ý>FyÔ·„žUÚ9Àd33­Œn2¨q%Cˆ§·ëÖ°‰Øf¦îÓþ„ÂþÚõ@ž(d-äyç¡cÿ.—Ž˜ILMEsd¦scÝh(Ý*C‚—ÇØɳ0Ɉ‰~²lªµüD; &¡EºbL¡uHÌE&MÛ×j$ï`”šjÌXqëp4Ór,Õ·bqµš#7gÛ­$€×ú覎ϣè±ju´ò™Õ¥—v€åŲó&Ûjyû¬^w½Jé Z%ò´æ¥Cñlï:‚×üÁaŸÚnËï…Sj$Vü!Û÷“Ôj‚@¥ƒhõª!-PUÙ+ì†u·ŽUêµµCòæºÜ AK´,Ù>M×Λ2<Áç”êje›Y”|ö´&Å–mãk²+("`Ø3’¾¦ ¢æ'ÍdÈñisPj`´ýÁ©k´:!Cm[:g} -ÔYÝ¢ogx[¨úåØ踓ê&‹ËhIæV½ì/ÓÇN#fI÷̬è©g»}ø ¢Mt©®Ä\HiO×¢PìÞÁf^ÁÒ„³LÉý²Ò,':B eÑhAÿ¢eõ`^Ùv @úY±^EK<ž¿e,r"7«‡ï~¯À8>Ø\f3gÞüÜTÇ#7k:0ö5Ò“§ˆNKjM Œ6dÿM°Ä;™eW7l\º!.¯é¹ô,Î -”ô!¤<%¿/%]SÕÂd¨¹:ãÃDëýÍÏâ}oiAøü;¨ëŠ ÕK·VŸŽmßÜü²!årÇ d ‹Á>ȃëìYv›¡Ÿ ƒy`­£[LWO¿Üü_n'A}¹DÈzú_mò˜ -èÆÞa3_•qF lÃ7)œÜ°í÷®ë{Jv|PD-a\#uð¾Ú¬U媺â6nêz>c"ˆbý^ˆÿW¦.o©az6ÒS~5?–?,9«Zˆ©i³þDIË/X¶ëø”0dò€jwâüôñUésÓ$«xÊ}Ÿ‘éqŸ•sãûtm\@ýÿÊ·µ™¶ð¸äJ½¬‘>TåÆ “e:E͸BÓcꀪÙ¤ønß©ùO‰ŸßRÈwÔóψ½Žå¢âæ‹€9ÇsBö1dFÊø*Øy¢%¬æc0Íjó¿t—Iv$± D÷:….Pý8 Ïã}ßÞñ)[%Ù›êNå@Ä0¸f¹{øð½\#ØCAôœ¶Ãc& õì·p%XãžÁ2«1éÄ:²ôù·³!ïú…ÖôÜÏ£uB!²¡:AAoÄŽÄ:ÁgVšiN°žWòj–íåö"WÙtY¶í”ZuÊØøh['‡‚¿NSîœH¬¿]*euèå}Œ2lÓ&øïÇ?Ÿ.@Z‚ƒZþd·ÈšÓÍFpû{ˆ´¢O +ÒM™Ô-âm߯Ô &· ÂdsDÖ„&¨¶Óá jÞkž7•qªAzô_ýÈÍ|oG^pšñµã1'ÓLSæ9þ8¥Z—?Z),\Ê€‡%òGºJÈY¿üñKN#ÝÑyù_pö-^Yv‡7²íµS•Ÿ ÂYXÕ"3.¦rv‰ ‚½H !-pëöô£úΆ¸<ˆC†zy#Ú¶ì/ÿQZ å§?Ëv?"ï¡7A©ãþÆZŽ &Ä»óm  P ô°¦^Ç*MžôªY‹óL`Üùù•Žvä»8PBÑçs=ž†”f\ƒhèÄîüM©Íÿýý±o¥'ú&– Âz÷-òÎ0|ÿ~À‰"<4åSш +´úqö~ìŸsÜ-ÅnSˆæ}N÷ÉVžáEÎ4þõKWœ3¶\‘¾m—&†©±úNj†DÇ,xº£ IÌÄ| MÀzë -Hôâ¹SüîD€<øØøtÖ îàaEYiPçµT4Dê4ãWÐ L°ܽ*ÿß9hñÑÃÓ®a`.ק @INܯ‚Cš Á-µ 1“*¦'Q÷$öfBí Z‡ŸT•ÒÝ­XŸg“?‡Öµ³S"ÔnGsn½aFíÍ8’ëiÂœ˜H²@šƒÑÅòžù(sèX*@)èÂ#}ìt¯ðÿùjY7i²]ZÚí˜j8²Ð[Ô¦Ò.ƒþë{Ú±º Ží :Ü1xy‰jtb´7oÓÞ&ÿµì}Ú².Ìñ.ÉÍ&ëùÍ_’,>À¤‹>¢‘ÙÞÜ8–‰1>η¦8mè®ñ‰˜®3Iíx€¸g±»Ã4£K;‘KlõQ¦i —Tÿ_~Ìi…`ÙÓÌÑ׊aGB*ÝtçT2I”³w©F‘:2£ã ÌtÇL2{½éf-aŽ'è¾^¾!|5¥—ÂàÇ,-³üK¨lNŽ/›h‘Î(Û õ%H|Ò(R+˜Þ[y$p,ú6;^Ú7Á*…HŽ@h8Næ5Ë“ MÔOW±ñ•wD° d§.¦•öL½F`ó8bÿZBÏðP”v,™)¬®÷ŸØBî…Ç´ã8Xy.¤!Ÿ^¥Û‡ZäÇ|lÉòÍ[Î -^Hÿ!Ãö‘æ,*é¤؈[|yHš×<=Í¥‘ÃÜIZ¿m×ý2ÿ¯¹íPÿ~ÐÀ6†Ë7KN—«'ð(qâÝ.|NÛÈŽ†ÿrµ._Ø®Õkv3¢ cq=Ù‚Èo±5—\Ã7`.š¯æ0ÐeNÔõ4Ä,Ë«ŽÊü|s´`‘ðÉíëAHìðÔ}Ö°¶è|Ý3~Ëf aåhM©Ñˇ -ˆ÷ùîšö¿•KôçmÂiž¶—ÄFcB˜tä”°’w´t«Øó8j¿ h«AÝéfmóYKlDë=%°3iòya&Áu}ç¦kRbÌÛùe.B! ÷#x­ƒÎ~QÓtn*rþ)›‹Qæ6å»`O›ˆ:`ã{Äx}Vå4ßM*N·‘’O›¨îq†Õ@»iyÝp¼ð‰nêrUËé0Ÿ ª«°–[µè˜vEfé˜`ãób'Ïì²6ëFBíËÇT^|˜3ï,þðËrF!~2›ó<î -`oO@ôÂóØ è <)È(¶"…Ú:ÂqÀ÷œÇLñQ‹÷^ä0 ÛÂ$e 8éù¾ Ëå‘Ž¯À°ïÚ ìÎçÙ ðá¦ö?ªÐ>Ƴ¥¤¤g¯ç‹=6YV¯mv¢qŒŒÃ2K÷òðµûßbö[)xؼˆ\} 'Rvz©:fÇ+ˆ:麈µ™€0&õ‡5Ò@ÞĶbMUS4¨9~òôõ@êtÀ•+¼Mq“ Ãœ𬦺>£É:… Ækˆ±õJªWd‹ðù°`Âä1²dÌ3ç׬c¯a˜Ÿ„ËA˜gÛ5ê@÷Þ\ÎѸ‰B¾» rT‹¯Aþ1»S›ŽAL"ã~s14©Ñm¶BÝ`OÑ·…S]…@;ç­GÓB®£v+Yª†¸óm¶kÀ³‘-Û+OÈÄÉôC7#иô¢<›cÓº¡Ù~ö¶¿†à 9ŽY«+ª u‚‡ë!x}–+vÁÏ2Æ^†G–›` úãXî;I¶ ë!Ó©Š6¥óÛ±c¡×mQÑvæ¢S#¦/GëF%Ñh’7ÌEq³¦c*YéLÛþG©Ô¥‚ñã=Ö\Vü´\ú.w;ˆ=_uŸèõ^:>ÀòÖ­Ò¼Ae$õ«èõ*J‘¶e"VæäÙ3"ÑY¾K¼¢=L' ó×!’rÏé7àÍ$»Ò…™ƒ{¹9KFñÔ~(è-]Åb©YÏ"ô8ºýð&_¤EMwÒ–m†+guÌkzVz›¹©‰¤´ZÚöè-–;~ŒÖíÏ}H(\L‹¢ ®¹¶†$P—[dW:z¨³kó’iÙ2væ[& †6^”7Ê/Kç*ü!Z*”x\”ÕuzU\œ8dHh™ uyÔg´{‘ЖúÔ¯Þj–î1³¾hª×ÞÂp/ H„ @‘ý塵¬ÄjËhx x2ŒB#ÕÙa™‚ÿråzÍO«ž!MîS‡MþÕ(5Ìnej‘Uâ7+xÀ§GÆÀj•yÕoóîO¼s[_Ø,ÿÿ•œbcƒ&!ÖÉ2¿!vØ5h#{û ³2Éœöe‹/ ‰]«ƒŠQ€w…¡¡ñ[‹ÿ˜µJ»Ý¬‰ýh¦zLEš¨ß./]nßQ£mˆŸÁ-õÐÙt™:t‰XE1Hº]$MõûxžC›ÐW&Áß¿Ýc1L¼³˜ƒ˜Â÷}Nö¹åîÿªç½C6ËßÛh¥¼)"“˜ãð?D&ÍÂ}µ*FvY®hbc€Àk—„bi1x\s¥ -G³ÎvÁ£‹™Ð³¶Xî4ƒ°ŠwÜüÓ—ÿ8ç¨WvU å$”U³^†Âf`z'±€¹ÌRf  - -^–ònÃUV³»UÖŒi7BINå·DÌ×ñ9æ%Ýa¿À®Kþ§'‰ª„µîè:Aþa$>d4ÊÄN2æÂéÿßYSØûÙãÀƒª '¹ßfƒKk+˜U,ÝkM–ŽÛm°l_P Iˆèj¯Ð ^ °á™¬9ájŠkôõ9}ÕÓ6+Ø,:ºöÜ*Þô=šW_0Û¡¯„p¯É$!^Ýmbc1…§=ΦM.q›ÿ¦¼\²ZI‚º6ðÞÉŒüÆžzÿãÖU´ Lz.ÛYñ‘®¶½5%ö{ªchœ7Y¹ÞÊŸ–xÂáí®Ða‰•ÂÌ"êÅlÖ -+É~à—„àË0ë:×JT‚»ÓÃÃhØ3ÈršiÙKsÞÜ,hpï S?,fP}õÙJ{æ+1™Ìçmxè$„\,À=ÊÝÿË2µ­ë /ŠÞÎp*÷YeDË1Í•Ô… ùÏ-XôX É/}ËââÊÛƒsŠH[)Í®óm:¯:‘ZäxÒo½5îÓ‚c§M¤ ¬|'Ë.A\~­[bT -vwHKFOçâv#¯I Ý~9‹~JWÍ¿taþÞ…I,Øâ jtÞ,ÍR\òÇÈáxvG¥Yj³)F·ºv³×\p§dñ<§-%³ö|XªÖƒ³YS’šÆý@<£›W–ôe8ä¸BÀæð#(6Ðþá´¡ÚLv„½}s¾‰Ù)ôsÿÆT›¥¶Ge¹F”—.c¢5ðúV/`ÛNL‰Q´[ö›x¶z´ë 3Sò’sòFÚ¡Œ>Õ¼×>ƒèªFp õµ¿Óì÷‹iÄo&;ÒâŸrìHqœŸ˜ºâ@¢†GrIÀáãÑÈÛfùÓØŽ’@)†¶ððwNžcM«o8§ÌjôÕ-ÈÚ³¨ƒ87ÐœVø¼^¨t=…›_í×ÄîQÿ·ší­¤êÇ;oÍ4J²4­Ë«Ð]Ú'·³]ˆlZ¶µ‹•~άÌxÀ U?IT -Š=à6³ð6üõÍ8¤sÇxl‚Fy`§ç]“Æn^'^Lä^³Ò0ÚU¦‰k’v}» ÖçgV½Îã*}ÂÌä´¶ZÍÝÅ“V?îâôYA)샚@Èk­á!ý´«¯Ÿ9G©S먟÷L~ó-/Î/õa&B̘±ýœ¤cfºî(„T]Vaîu¯]rºålò)ñ­g Ðêï¥êõó°üýÃßÙJ}Pùûxñw9Ç0’ †åÌ5BÞ ü`šZŽmcy¯T©¬! ô:“ú¶A43ÑuZæQ";‘]†é°T€™îÓ(g9B4'ø„/-`Ž.¾¶ãƒf³>}[šZŠ WOoVô\Û›Áw®{^ë4œ«ÂÝý$¬Ã“hˆnl5Å/Cx?²©ß»(— †Îh²êõPeê·æ`—j˜ø›Ÿ³wÏÏv Äœá¹ëPÌbo.í½…ïµŒ·.sPߪ [QÎYÉlZHr{„îÄÏÃV¾¨Jg YmYË{tÑg½_á‘èa£a4“.äô r‰ð`Gs~)ˆ§zÖ³» ¦Ñšjh¥;u=·d©¼Äá»’¸Sl!Qü^ÅÏŸâb½ø#Ã.×m—á5~ÍeíT¢P4‚LñJm›³>™Þ«QŽÀA$¦Õ8®âhÜ©ÕŒ8&öC_“wçÎl³NZ -ÇÇûlžTT5ÇB2 rwZ„6Y·w÷``®*®?Ç¥a0„×µäÕdÏp|DE&²3ß'PwÑ ÑeÃ*í¼KñšžE&óÉ©N{唆c·ésoêT·µ4¹R¦˜/ØÿëædoÄ}tô²Óð’Å©‘Ç®poÓÈñ0”Dî^\tsªå½_£;Y®h•~Ï‚éZA9HŸªm V‘×Ä›5 ¿6V5÷ =ÕÌg–LKÕʽé–D8Eßš]sÎjª_?¹Þ+œª¢;RbhÎ^¤¨¾ Nì-kÅâ?¦CJ™N\¶K™»ÖKÐ]FƒÇ°µtdò@áf>í2‰7àéÕ´¨f峓òá´«ú5êÍH‡Q w„ãĪ K±CŽÜý±ãËi±£íùìƒÎV%˜kÊ{»™çlH1âX^Â4šåž‡%zÆ•ïÈ©úfxÉ@¶ÙÃ7d&Z±õ íÒ¹‚B…rž’˜t<·¬€$,ˆZ] zÍ`ÆÇN⪶ÌRÐP…Ý»=ünx#1ÕÇÑᑯ tû°ŒiE7ìÐçnñX¶…’.ŠˆäP-« äÈ4I^.¹M„ØJÖ ²kºpØFó€÷å$ðinjÏë‚L5UAðU©)ÒáQaÆÉ,E=N/N5ñXÑfísñwuÊ+^çîÉ)w’¬ž±ß²–PDµ’ *€²áó]¿"˜*)5(LN z;ýo¸mÃG.¦n„h¶Ê¤KýÛÃ82*¯†€hšôe1ÚYÍDcc€5k|Ñ™¥ÒƳ½”‹Õ­êù%#l?XÀo©OiŸ9ŹsÃzê+$Í®ÿKÛÃ䀸«x þž¨ãáI$üi,˜Ë2œ¤•lßÌD'ùg»8c¹,Óø0•Q™æ²œ ËÆœóÕä {¦x†M¹Z/údilsÓ‚6 Ód®~që4jªÅæÚ5îS²ü V_x¦ •Å=CÛË–¾Çf?ð¥K›®R,Ä€©Ô‚7¯¯]®g-„™Pxu1en¨â„k²YœyVÈL«3•T/'B¢U¯¨·òËŽ¯ ÇËÞZ‹SN¤3h%$)3œ˜Ñ?Bgòj­Óç¬ FUçPp!¾©ìþ¤÷Ð*|ðV|Ôliw™4á[³Y?õò,Os‘àKU“yÅÅÐO±“ë#NP3Lâ?´s¶Î+‘"Í‘WbËѪžN u ¿˜‡”¶Ž_€KTy`Îl×´f»`¹v+}Ð<¬vgñ¡œ3Ô ±²A#_Ù«›iµ=æì¸MkMg–åTàf¤„szæÖ°¾2»zÑšûòp:Ò€Ú½uRJòÝ•xnçt–ß3¹£L;Ì…œšA,NE=¦X”Zj3“`ÚEæUJßm(`–j¼jöíî^ÝOÚëþÊ¿?ŸW÷ ã¬&hBöï5“çFºLÎÓ´¢uƒŽ~[.ô‹y¾2«¶cêGøjmüÊ ý!|½-mZRÅû£džcu›{ôR¥áVÍvÞ…1l[vT$me¬2õT€5󱛣 9D£¯> :‹ã¶‚éõÒ¯ ¶í+ÜO·—ßK[Oâ_x§2¬–îl²r0—ªÐÝÁf±dP&F ævëP3º¼ [Ã<åÑ7Y2ò"û€ PßÅbsrõ m%osŽ!$¦wÇ¡ÁŸIªf‡šÎ#bǚׯhY»û× ÎÅF´ŽEÏNjˆ¼dlëâµ›Ô°â®ka]‹,þzàd×(Ô¢ë/É$o'’¡©ëœÎ¶Å™p ,•«Ét–#b˜øL6ÈÕ4*‚Ëz2o›hkÜ®d:åJM.[ l)Üö-Cú`ûÒŒ½Ÿo”g˜O×cÔ‰-íhJ×hü¾º%Gç?©FËKsRªæÅñžó®3„£** ˆQº÷£5ÇÞ(iJC%† ŽÆ«WùÖ˜‘zM‹Ý£‚Ýn!ÄhÇ^7ís<¿},‡Œu¥2*F<ÕÍ·t¢•¸Ï”{/ÑvqhMþýPY¨óY ³7°ÎdW"‰ú¬ ¨6« \GÜQ¤|j69ˆda+‰âCF½²ÇdˆÙL›á¾’?LZ ‹7s¥ù”iyÕhÜÚïàëžÓM…u£}˜ÙqÃ_(<7žê­ ®Õµ>-·É˜±OÊ Â?|ÆCËSÙеáïI ,áHq&ŠÞÑ݈^¯øUyì4â–íT¾Œ60"hwL)ex ™>¦tºÇlîú¥l/‘ý¸4Sí+‰0„ùkÀŽ`•› Û1KîÉC>”Ø´ßj¬–[˜ã6;øýÅfÝAŠþ¹2õd×/-oê*ëå' ¨T›ÐÒ÷ÀþTŽQxÕ-éáOz—Øk†Ý1$µëU—" \¸.è®ö¶‡òFO·l2Õ˜\°Ÿ»-Ëoªá§I¤7»v•_çnT‹73nN#gR'4«çb -Ë*Þ¶sò§æFœ«¯ª#¦smA‡—A0uÿ84¨¢<¼·ÊZ‰«ñ9T ¿(µ±ÿü *`ŠÇJ\ée»cË´uýV@Ê¡W¨5å]?Û•8šC_×Òêè[ !yšÃH«Æ«ð -Ni—²R}k¦l‚×åÀ¸ö•ãêíZ·I˜ºuÛ‰±*fFwè0Ö†D·_A6ýøûn¬¢þ~4RK -*ùmnìwŠ¬Ç|Xz–Þ‡=.ð°PDš_—2é9´ÖpѮιþZ=Fóž3íÏm¿=Óö©—$dš"àõæåeß÷=Ð]ieÆWþýÙ´sqݪ¥‰›ëí’±`Ô©uO(“Çs•ß¡¿Ûï ±¿¾æ(JÆ;½Ì0 4=iHü-€šŠ^šÿ° ‡jªç‘Š²Ç÷uš'¶ûÑÒ_ ‰/´MéúÅX1Ìí¥eÞXÎ[XîŽ{\\¬ã¹Ú[3[eÝ» ní†D± ÀŒ9Fe7\×rÈÔÃ.GåfHa~rˆ‘þÐüÃÓR·}-Ó­˜aç§õðå¤ÊKíÂh™AŽ_Öa¶²_&`r9Ý„íðð/ãU–åÆ ¯â 8A€UÊÓË$/?õHêB-@‘ŒùÞÇÇ0.`ChyUÌLN$†0¹D£?Å"L$]= gφßÍå×Váu½¬3*•4— ¼ìº\6µ!+3‡žR± Kº:Ù´K¤Qv -H(ˆ´Êš ?øtŽ9vãн“Õ˜®Ë!è QËI¡êÔÑê¦4Û€«oØQá ^¸…â‘€Ay8`O.úeÏb÷š¯ž^[´üdG%£W)dGŠ{¡Y.ƒÄ*U}Ë®ö$ë÷¾K\ÈL²ÒÏIwX¨˜’—2˜%S¢¦îèÎþÁMß%/pT(§'›BAéìîGúdWÅfDï MŽ0½ fȇ*il”ñì=ÉUt7°·~zJ3Á!_¼Ä3%ÿ~Ào2TÉ£¡h‚Šw(­ä%.¥h­è`h‡ûYï¦rE ík®;qM%) 5÷;&æ>0šÔ{ÜÐt IY½›2,Q\ŽX3E·èñÛ¤•¡)€ Rœ»T²®/¯È{˜'„%âÔöêÑ0ÍÛÊÊŒ{èò»M€þÎÐ…òÊ@Ú=r=Д‡ã6(œ‘öªê4ÝÿªE¤rÂjyhüLï¹ôûø5y§‚ɉ’Ö€n瑲ëÎ:Ò”–Þƒ7Ø2ív'ôi$ )™ß© „b ¥ 2³âJC?íLX‘5dpNq,–Éè¶fœ4ʠŨÊÇt†® -ŽC– jš³@¥ƒ´‡³ì9¾ÄûM*(ÿÆצõ‹N^fš 1CÀš,äA„\u‹jáDR6/WrßØ6YBS*¡‰baÂúA-af½ßºæ½„vÊ™YÑmE¹Òá,ÑÙÚ_* –0:/šÑÈ“xEõÃ-÷Ô…l p²´á[—j_Šm }13ºÔ1T¨&h$í¾˜S‘’f†ËÈúñ4ûžº -õ‘M+‘ÇÊÒAØX¾Éh¯*°]$¨1IJ$®šÁêøXq2Wt¬ -Lá³®BFðWsšXÁ‚íp¶&a´}ŸŸiræ\Jûƺ'0Âa@Áz™WƲ—Å30§Y áΩÕ``ØHO5Ä.•q/çr´ÁEKÃdM´“QQcV(8´âá…ÿRLjowY«üv¾Ø0áƸÅ÷E=s vYž†„c´ã~œM€³èµ` -šÙj6-Ž´E©Ÿþ õ^lÂQÇo;¿$—Ah3Ìÿ]5dØÏë7F¢SƱLË(ݹ¯£¢³Äúø–¢M2`­*,\³ZJ&V–a¤Ô|Èò­-³îYnŠ¬™UšÃ‡Ò*ØÖ¡¼ŒÃàƒjly=’+2ºFÅ÷Ï# 8„ Òî:Â6kp¾U…é”ä‡s›Vh7rÈ‘ýpa].ëuÉísjNÄ~kå4¼Ô˜–Âé÷@}àd>¾9Ù`0l8‘«rÉ´Z|•À5ÞÝÄÒ‹›Ë­à’l©–AwÊ»âd Œ‹µé%—†eä>+›÷®k!%Y|}nüƒE™Nc`·2iìI2*+º<ƒ‹#Šë> -Ï1JÒ µ¬TAº‰ÔŨER8À>g6+tµ^¤7ò‹Úií4O £E|Ài›ýøz}¥2o›ªAk|!ô2~6EÇ«À=…b"%ú›"§î,†>öïÎ?]@èFlwqêØÌàžúâ(§!¤Ñ–±T„Ô%ݺZË©¹„'ÝR™_[6~ËL¸”OBLld°ìD_¶8¯/ Qaéô’WÆDm!²¢ù'ÍHÿè¹1R-šU–5/FÝÔrȵÑg×QGfÝ…_&¯ñQïV¡Ó±äâ<ße(å†OÁÜ÷®°t?ÅGÜ€ÜöÒƒ"hRNºÞØ·qôVåI =sñܪú×ê5¶±™ -oé∲¶’õUŒÄ½%—´9ôõ2ªwYÎÉšÓePðþ !XìN뵚7cÅö{A9…Œ(`OÉXÌÑgýv¤–ªrb£2“×”6‘³äµ8?b"Õ +Ä‚0q˜ÕËC{RVa¦š–¢_Õɽ*ÅÝ]{ÃÁwåD,;]àÀy -P^µµ³R¦·Êk&p¬ç»ÙZEž*=”ÑÕⳫoÒž`ŠÅ­Z%Óê]Àa®fº2àtW°•õdÀ£ån˜ äÊ6€B/»hÊ&µàÁüY¶-2&±Q¶bì”—»·Ù ¨áÛŸ·»'m`¿ÂK&Ìõ2Ùkj{º$ÑTÆÞä1׉~i²¢]¡ »7Oq©â ß?ùv¡EÜoìyE)7qj+eÕ¸±^åìö.ÉCp8Á"ÿ"º&êï`­•ì|MOy™^Éþêîsõ8A”žìÙ5c¾»,ÇãrÞóÝ(1Û…¶-°—,Ëò”*£ky¤ÓÎæÉvÜpFš-|T¢Á¤‡˜=g©!?‹g{ •"ço°ÄÎIcßk–‰ìĬj‡D‡Ž¨ TÙ´Ç!®¦žÙП]2ó0ÕC†aÈÍóY/ݤ+ÌiaRŒ7FÚª›ÿ8N -ÿrmkHÄþ””,)«Ù¤¸©_ÒASYfR¼Õù踎Àâ"u{ ^Ðlˆulj©]˜p¾R»I¡·Y"Zµ18†¥ 4íaUå¾|ûyýÿüFx¸ÔMÄl©ÙQ­m™x÷g¹j¤+”ÅMY7]ýºä…ÂØZózuË©/΂¼—¸î‡ŒF_ÚöZeJüz—“b5ìJߣKõ3iöf¹í®&Fê4êòç’]òì´i¿)œ²+žþbÇdÇ°l‘EQeöG/ù ¹€yàKÃÌŽ-ý¤S–pX1h3¹’£{áöTB61} +Äëúxä§ð.ZüA0×13S•ÝvÙ›µê´[qD€s+I«ó»s­]ê,cA!¤‹Þx§¡ŠY>ŸÒE9þ©÷ÍÅH `ðÚêAƒ˜·Â™O)‘½PƦñá¬HÿýTº™e{M3s™î é"VµMÂByÅή¶“Ç®ì&¬uÅÙ·]‹læž„Y^ø‹tX“;y$oåVz(5Ž VË?åÅâÝTÈÛ 'mr´—F£ " à9heùRúKGçç¡XIîlþY4€¥>÷âWY/=~è¢mArGMo¡ŒÔ!ºRÍR¾‘.-¨ÙÛ .0¯Ì»¥V÷O§®tÔY™†¦|aûqÈ~4L‰f‰7[rì»@Ûü¾M¦£¥Rçz ÔË\‚溹»Ýçò¼$+‘ÛR Ù'æEeÆUeРñÓºL×÷mj‡˜ýÈ#r\iSD­ÞIb®!W|½¶bâ{÷Æþb“–ç.W¼æÄl‘‘ÅÒÎ 8Š$Z‚.ÆÕ”a!WnV¥¨J¿´R{üD¢C^^¢*Ò§it@ùç*»òC\5Ækøß4”«6š=‚€eùSÿd?þ3 -««ªÅ¡³ìBvÙ=b…¸©‰ÐîóØ'æúè·Éu¢*Cdxþ -Öñ¼sÊ^îŒ$ÍÜ{'ªþ‹¨'šp:gÚ oî»6¢Ô`1GµT/™ˆ¯!¸ŒÊ."! xXj®]Á1£—øÑ䢼t:®µnÜé;¥1ÅNoï%ð¼ÕûŒV>Ö!ªù“×imjU٠欪ܽLs®Ž1(tŵZŸ5§¾´O¾‰dÏ(/“#¹‘%ˆÞG -*Ðf‘»<¼|ý%øþ<@v‹6fs©î@ÆæKþú}EÑ †Úþ—ø€%BP.!©Í¯ñµEuVÉêè÷Gu <ÜŽéô½‰æ˜³Ð²n3™—]n ûÞ´B^­ôÁhå 9íîÔ„¼Ñ3¹­±÷_ ‡† HË‘m“H™oÓÐÓoô6¿}>N× lãVm@—‡ËI%Ƨ¯*ŽÕiol,ó•Va‡¨h…nç:ž˜°vŠégôrJbë2¶%Ï_%ÇH¥Ní¬ÜÍûðæ†ÖËDºÁwº@ít»(s$Œƒ i?ñ}ˆW`ܘ²«¯ ]Ûï¯þƒÆ¾´övS6ë®Û0ÓI©ne&C5sÿ÷ïŸ!Kd†°pžmõW6i+jê•ûå~iÝÿ—8ZéC¬i@ -¤]´ò5¥kæ%²”×,Z^לE‰ÓgDÍ:Î@å¹Oæ ÊuÑ- -MŒ©<ïÐt@»Ó8ät„ =ü,´ßâ*í_ Á„yËà½|{Ã!ï8Ú‘æTk†&ðÖUi -HÃX&xÐÿK#jÙ?ÿÙ6“Ó¨yT|ÝN5ïÕ‘ùŸŠ«ýË1n;Ìs„ß*¤Fí´Q´!™p•3Ó°V¦”xÉÕÔ¤ã.ýFèz³DTÙ¾)Y¡…tÇÑÿ ×:v¹ukëc7&ýXÛ*¥&ŽáѲÖCŸ›É”NxšwzY Áxèòu‡µ¾Þ=‹÷Éa¯Ö³û[[Ÿ‹Æê2 -yp( ÛKqµ'ÔdL†úœ1@Þ©@š=…u5úLþè•ÕlLe4Ò^ÁƬÍ/ÖïŸiߪæ>(®;k ¢œßÛ|„ ò^åæ­%_ú‰X€.x,>ýÓ@™¡žùCRìUw“t¾¡HMf÷\…Ü°-E—œU„ -­f:PÃèྲàÔ³þÁà´VT•WM§Žñy Ýâ‡'|°i}L@_ã)&›.½žZÀ ÕE¯–©­‚çõ„Â0‹šÔtR|‰|s=ŽðC+Îy¸>%¶“¶ÑÊ3=Œ$êÔàнãŽ8«Ú¨PÁ“p=10—!1`~­²3ÍbGA3(’FáÞáOÐÙ´ /Z¤ý¤ñ õëÑð…ÃKqÌQéîõÔà ÛsB”-¯Ï£¼-Lwaò0¼)mùG¿äw¿|ÂÏ¿´K<&ð>š« =¬ô4ËI ‰»í['z[-ÿعpžšÒ% é¡6¬Z¬m`RÒ•Ž~P6žiÆmF7„¼8fÛ;…šs†î‹PxåÖ%Ø]æ^²²˜5TŠgêo©m¶öªGõZ㣂ї¡-ÀU›'€X°üj.et›e=GoQJâVÃÜ^’(<ªp…iVÆe¯{V#&쌪 wCÚÀpàáfâ|óx -­d•¡ ¾Üë¶DÛ¦£ÿüçO¥Ï„cpYÍôg©Õ¦kòxk”¶½R&v”¬ÓaPÌpA$] ¿¬ÖÒeÙ‹naå4ÇqL½Œ/·*Ê‹4b}¯µTX¦u³×g>Úð]I:ìA%#QO.´‹ÕdiÃíNgg£·îZ–_'êÛg{©ÀŸ -Ìñ»YgL¡ê?ß})úþŽò*þT¢%`”‘S'ôæ¶W(΄ÀFó} ±c¶ÚÖ>ª¯¤ÐƒÀÂ~1—Ký§k³¶8§FHT¤S“c:šèÔ.v©ôÙtwÝR’–²(~·âºVŽ»?¾`W¿®ÚÔª¶íøõ“‹Ûø^b®ZËѨ‘_¨•(ˆè5âç-ƒ6®ÏÏçT²‘4$l·ïÛP_{Õs=Óózöf _x‚qØÖ…×dŽ ñp'ç8& 3Ù» ]â«[Jh$Xh‡Úo<~Þ`•Í±íF]0…oÍÑÍ$õ;>Ø©_•`\§`¿£Cn-(cªÒkàxáôƒN ¬¢'˜A²lƒÂÀ1]J%ÐÊ )@üÚ¡£§] ñzÜel}~uï£Àú†L]&åw1~žÎ:(>„mvíáÎã€ðæZƱ…ø¥Ú+¾yá{“*@»ÔVò¿ÀE “Cï_CÙË•qÕ°M%Ó¸h1OQ̵4S‹{Xõéfe˜èñæ”aWÆ{§ï š8þý„LefTV6d6ß,ŽÔ‡MT­…¸/†»*iQ¶°¦MÍåÓºP^ÈrpÝl ˜ì?¼bxpˆ%¥ct3§Õ7ƒG·ŸÀ[ºsç“…Ác…GäQéoïÍ1øÈ@­ëß9ý·V¦ -ÈGÁÚu.zç½KQ«&Ç$}ŠÉ–ãKKn{GÇS¨åËä™L£F…,n xÌY¯R̨ŠŒ÷øbÑ‘ábÇþl~…µûF^Hiõ6^ä…@ Yí"³?ÄÃp~IqËv¬j]‚ -Œ¼ß‡xýš´„ÔŒ¾69NÞׯµæF¾i.BU7A#šÍ}*IýXÉà¥ÂȤ´ËrÎ S-Ve)ˆU‘0›Œæ½“æR€=|9»Au©Cª¼&´õºêßù)Í‘›œ{®PôµaÒ0Qý Õ“Û¶°m_ ÍOr¸®Ñj/FYï;»$eBððüQV^o•Ç•Éý>¼¼Xõ6¨vCˆ.ìðÇ uöa Di×â¨%ö6Súîr“Gç\~Õ4*–Š SãWUQ»£Ô–RôF—%€Çþ» Vʼn9‹0íÍ~„è3À§@“›¦KK`áð€&%êÁè%îÏcp¾dÉðpܶ‹9‡ï—7“«þ:ë‘Pû‘$|C/¿”hZ°°Œfpî»ýD;ù|Ûý¥¡sû{ZŸ…iÔûÑŒýé”5‹ —YøW÷ã)¢$4ÐMÚW¼ëáÒñH\Ê)ÜM.ŠÂÜ.ÝDÍH÷Øœ¶ÍD"ø‘;ñ`ëÝ£ä‡ H§à >LsnÑV¯½éàZ¡›˜Åó×,ô=°ëL7qsæÞtÿŸ>ïôGˆSn”À¼òwãu -AÐíW[Ò¬²#°ÔÿݜԊ—g!¹djEΈÿó].ÉŽä6ÝJm ÿXOO{ìïÞ÷\¤ª¥'»'zÅ•I¸¿jÈz¹0°yfíåŒrd6_cÓÍQ\ù`uMÜÃD·KæT…Æwã_æF ˆk!ûØʺˆnU®tc&¢ªiÊ‚*¦&&iZ´5!ïÕZ×ݼ5*_±9sôsž»Þ'bWï0©¸w;™¦RêòZ2œjIeÄálÛ—ÙûöšgE™c—ë×êZ¾OwìŒ5ªRäQCÃb@¿`&/&ƒÚ_ð±Yñàžó2®zó'*ŽSâiÀâ|‡Qž°*Pª ø–÷”ØlÇ5‡|mh ªµ«}›3­(³!™§ Q±]ÍÎÇî ÎÒ0sÝ22®zVÍ[Ò^¦êÚl¬G)–Wí‡MOŸ8}V>jÄ!ètƒD”(¤|€¤{’rœ²hÓ‚oîx÷(‚(k5oµ˜›'¤iÚ& -ݽÅä«ÛÛ®™ŠˆW§GÀž§óÂþ"L­•hA>ØÈ7ØD7³+Ö´ßz° ñE luDn>À¥L«8S•©žÖœÙL9M[úk=Eh浟UCtŽ©¬ÙO™ ­Ž _ÒFþàaF9:ƤÃì'©IlN=5t¯¥?ZC²­k¾µ¤¤‹Ce$”/–ªÜ Ž­Z/Ýqâ½ÀÙÆR€ë\² ‹äpÖÝd¸_r“­Û/E×:ùr™4CÞD-ݽNCñÕ-V¨øÝ[™ ®Bjú6ĬÒC”^Á[bÝçòJ¬®ð³ÔÒ•ºÜ¯AÁ;çÆÈkçµ éR þ„#W>Öڞ˩û:Áj®³V]=àT[c¯ÿìFF’^–WÔ·[¨Ó‘õc=ñ\œV멦êå)"_ä5¯öõÞËÞÊÚ éfkˆ›ô–£¡•G^鎫—°æ†úu²ˆº¾^£é0™CµÁ9¡$UѧuÅ–º·ºAšMèe¹sª˜¤¡‚‡K³š†Tê6äFTu ¬aêÖ^<’=̺?2™]àóù-îs ºVhhuelÑß™Ì!Bq0öÌÓ^ªÛK…ÝØŸˆA”][ë:^;õúoײhý÷om\+d“+†ƒ…®dz-ƒÍÆS¶½òî÷b«|ìª/_÷ã1ª¯tH§üoâçï‹éÞdžáüÐÍ˼MŠh”]Sgæž#Ížø`áaÖi´‡YÆk§“Ö_‹óVþšà%Jͯ˜šFÊzE8 ž-ÝæcÔf™·Ÿ?ýþüöøÿdCxXé»M·øßcrŸ®õbÚí/G5hŒšøewçsôò¥áàÅSD¾ÍÙu[W]8»¡í° Te§ÁÍ¿õ[Òå}í»ÂhǶèzå}i#Ï"¬Œ4ù:ÖJåDëaíÄèºõìc]ªXR o@»Ü#³üíÒ®9„úÑ|øZdB‚Ħ šÖyÝò¥‹A"Úm³sZÆ°et}ä¾zß^]Ö3ª(°ßzÕe0í¥mƒÙÍØ9 ¢ŠÊ2Ý‚Ã2+C@›æý?Ü娰Û^oû´0zó Ò¦©ãŽ*\òºXedH.ïd´=÷ûzß,Ÿ,ÆŸWw˧ïQÄrw¹4wxÛá|y[Ô…ý‹»³Óæl²npF ÛLª½ïÞ®Ýù|~E›K¸àÈøžü¨<ÀHÓìiÜÛócÕÕ÷™iª¦-"ñDCäl1@{CivÌòXxâ %ýÂp UèÕbAôˆrÂÚyjt5]̓¼…¡hIŠóhYa¯Ûz6ÀÆH!i£{„à´ÅwÍ ¥Çæ,Ü/j~+ìi‰;0da†|û§°,€ç3¾ˆw d‚ŠÏ߇öp9U¯û@V„㯢:L"—*Îé}Ôr{’¨£ýí}á>&W‚Lkª„Iél«³RhÊûýÅU³ÐŸÛÍž×ã”÷ÓÿOÐó¹þç MâWt¡@Zß?§ÿrÁGÇÎÁQ´Åø¿†QZ“o%?áAK ÒãÁ‚ÓÎèÏ‚“V½Ð€ÏÕ§_è]Ju#‡Q*‹²¶EÚ"w$»[MÌ4?®¾|õ°_ -›b(=ä -H—ñ¡q]Ö¾î9·*~‡9§ÃÒûS±`>Ù¨Y÷Ø*·Úü˜ÞnmePqWnV; (ú\ÒÕ¶n¯`ó†š!á@ãP[̨jÖ÷Ò-r!O\ÝH'‡&t»XÃVÅÜ94Ñ¢Œ:ŸK¿+S’&¬à⢅e5ìžÏÿ1PBnªº@ý~G±¡Y¢Ëúhàû4 -O>D/g4õècÛIàsWóü¯óÔ¨”½-Oâp„›è¢ú0+Ï=CG¹´ÞÝæÌÃáÍ7½åp«ÙY»Œ‚ÿ¤?»þ¼W ¨ˆZÁÑâÛºI)uñíêpÞK@iÔ¼Åvµ€‹}œé»4™˜²l̸ӵBÓsG>»¥*PÞ-¼­…ŽÏk櫶­„€ìVæi8¦R·*bFJÜ⟿¸èqÅQä¤;'ìO0ƒ2<ŸßáÆGn?yÍJ %™ äõ’Û—LôñRQ‘ïððÐåad®Ç{ARvÆô1DŒ½u:¡Wû¨‚=uRdZ0›[,”€rý6icÙc[Û—Ë‹å1_œâÎæ§ûÜÍ’ñÿqs¾]Ž-jj.š=?*5kG¯¹ÌºåŠ -þ¿óŠ…7ì.‰U[z ‚䉿“ ±hÂŽƒGFŽ=½°v´Ôï0œ½:#ur–° ä¬Óî£9®EÂM™¦ðÖ? »öú<Ò̯äƒñN¼MÃE¯ñž|¦3Ýr±‘P¬"ì©ý[`LÈÆ,;›)jfeÐuÛ2râÂŽ™ì ˵6vmheüïXŽµU:oŽñx‚G.µ=|½¤·ÜW•w£tzš¹–G?ZÖ”b J"®1ã£@‹¦ì‰2îoÍvn‘S}Þ„ r7¢Îr™üöcÖXÛÑ!K;ìÄ€Ï1£L‹Õ0ÛvgÙ]vNgO–6ž kn»ÃU ^MI8âhÖª7UëŸìY¶­më¾þýóvXÇŽÄĴƇƒÂlv8(ÌêÁ‡ #ÍÓgáïá Nu:Ãôeû´˜ß1KÓŸ˜ö‚ëu-½Í¨ÿ©,ËU¥èe¥5 tŸä6z]tzy»+Ü)AA ÿˆ¸@¿ÕÉÒ–Ë&úqélWöÔøGQT¹À~öû¯QÛ蘒ÁcûôÙ@˾r‹â'AtúF“ïåqÛ<Þ—zßÈ4Œm˜FdŽrú‰àœ¹Q©g¢£éõ½emôjÎß%´fX6ÄôUÂ;‰€ïo/ߤaßßíwç®zu,sz ;ü!^¼Y?ئNhòŠä–*í…ìf19­À+ò‹0á·W†ïÙÒ\w­‚8Ó,Ïæ¨-Þ L=õjÓçeœºè¨æo=ѵ›elö¸šž>Š´Ðñ¢lkMØø—«J»í¬ -IQ÷îÒsa]îÛËíàÆð -8º÷xèVÍ×Jò—¸ºÝvÉ1ÆNæÓ¼85 àºÕø«IK,‡hLÜñξLð œ{4_#…Wµ67û(²ÂæióWí@°_õØfËa! å³ëãÕ„‰„fQnf…PdM[•¸Œù‰‹÷òôQyw»Œâ„agmàÛ)àeîº.Ú0Ù|ÿKy¹d¹u#At®Uh¥ƒÄëñ¤Þ@ï¾ãF¢Ô¤ís<¡Š*ŸˆÖ©oSv¤„Ç«Uʼj~±v @fËÃ(ë×™U5w ò¦uußµrøØÙ:㽤ú¨¯Úuø´„9ÖÅö‘e‚Òzä”jÁYØ{=UCÓ£ÿðLo çA†·âˆŸÚ¥uåe{€„c®)3&æaó†ï;¥l5ìóú´<¢„óýžR;Õb4jN*q0Uª¡‡u‰§N(Fp&Õm7¶tÊ­_-é» 6àJó’|£:ø`°Å•L¨5þ; 5 ƒ¸;ŠQ–W܇1ŒåƒÑ+áoé&{kÏ#÷ÒüpLq’b]4`* ÝrjsLA¨s 5Îäª EúhÜîM_68$Zݦ%_€¦[v°ˆ±^!›êâb̳ö¥]êŒÐ1ª9!,¯ºi\¼ONJsPSµ¶³Ø -¢&%7Ng­¦ï«’É|h\p#Mú`Ltðu ŠC•«Ä›mtÖ;õp†1!¬~¬AB†Å¬„Ÿ²ÔVM_ÏMõÖ¯¬C¦Ïù+ßÚT;(þžwUR»vuÌŪzô'òœI6êAŽÚz„¨¡Ù.sµ_é´šâ•t3~¾¾©–a­36m¬à;¸s¢ì¨da-Å*vNÿКè=M÷J9xâ{¼Ãǵ6ß›â2µY‡aê¦ì­eT™G3{ˆÿi‚Eá !{ÈWiÂɼ%˜NË–µôõ­V¢Û° -µðz‡‡Rÿm­¬rØ ût‹ ¥;/!!%íµL¦‘œéÀ!ÃA‚pxý[}ÝF -±™¹¦m‹ÿh[ÍÅI%‚\ 0=ßz‰óϬË7?àü†“_6S‚ƒ•®§\R¦ioÆ^õ=Æ*<4|¦ ¬ê¦õ°C‘tÕgjÞRæ±»ÕõІµ¬OÕó„‹ÞÓa"âCŒ¿¶‰Z0u™·æm%Lʦ}]*6àíu#*¼ú°'¾ÛõóûÄÞR“5jø “ñ=ñÃÉÇ*&´a&/:Šk^!SDNý*90T=Jâ^¤ý‚M›{"‘¹îÇ"±nHe¥MlÝf&<&Ðo:9v$'±FŸÙ‘ eЀ޵rms®T©æïÚ.ÝÇÆõu)ƒ]ž™6Í}œ ,}:þÇ!WmH…!9šÎuv-ŠW¸w÷h˜ËÝ* ;Zb]ûnäW -Ó³…–ê¶40|÷¡T] Hì`Â(79 æãrRâqùÌ;Á¤ÀòÇ9¦¢qÅ;lû6¹t¬õB}ÝÖÞ¨KL‚ÙŒèåùØIŸÁ¦ÌmˇZå0-™ÞI[àr>¡ÓªÙÃ3{5tØ ãë³›aºfº›éa1n?€G -{Ž¾®8~m?öüÊ¶Ô -EIò€:óè1ÑIX\©}qÖ -•ÌÂS"L›u[KEMÎhµ¼g%ïWT°ít×ã|zÏt‘u`!*3ž´DZï6.Ýpîý3)[jáRNÿºÛu1RšK4K:B«×Où%6`WB³™|Ûžs)\v¶ËÄ:!ÁCu"Xàá´I÷_ƾä&³7M:~Ü”–ÈFXÒå8‹of§JÆ%¬%è{¡Næ¯Kò=kJˬÖvÈ’Ñ«Þñ~ß䔉阨޳ôݤõý:>j¿–a0ĸ@–~—L“Œ²Ô ñl9ˆW?É•°¶JµTÇPÒOÖ5IÒ`l¹÷kÖ±9=†ûÆŒ¼ æ6üü22ù”™èM#sw_ÿBæÿþ‚¬û°B6³ÊAæWžíý0-Å ±ö´÷c©[=¦õ†÷ìp7³¬Id§¿úZ„œT²¹G†­8fkV\ÞpeÉí"þÌ´Cû9*MœŽ†ˆÂKlvÍ’ÃëGèuáã!'Ô·‡R²ÐÇÚwÈ–BñqWÄ÷›5 úêÀ?¢ çB™"a!¬Z@ÞtÎœ<b³cLˆ¡ånIpÌ®Ôô‚4M‡Ö¤ÆN°à§¹'ãdêíÍqõ4Oø‰ÝÉ/iæ¿5›Ó vÌDŠ¯k%OFp†‘ì`0åÓ`ŽuÕVGAßÓ3é:!ã‹ëz ɱLINuw ZoŽföEPTé‘~£ñ_y¥Ñ':Åy‹~½ªmþÜSܶ 2\czfñ!Ì)[“«Ÿmó2–Iî˦'ÊQl@TèdúS¥SŠ{]וúÁGuíj¬t+I²Û>ˆ8›§¯ã-¨sXíÖEø†Nó­ŒæÍ‚_Þ×ú)K“ég°*줼ð»î5™¡zú¦Ý9AšŸüb'kš3¾è™åzçÃ飖pªïMœ3‡-¿F®Ï/mÏ4·¦?«*§ÿ£?iVÖòz½yð£|Eß®'o•¸BÝÒ\µ=_æ*|½hYßvƒ¿ªyk]…´õ2¥(Ía©ÚêjÈ0"2.ºÅp¯éX:Ç‹W4;ôï×÷¦tz©ýÒR8¨>qKOâÙcZ¿¢º¾SnOJ9;Î Ì —9ɉ¦p' †YŸ•F¸†}ØÛ„âVI0Jkâ´ØNè3”k ÌüÆ¿§Dšî«ÌòßÜîù´?„•¹£óÃ|R ú«ÁšJ' nˆ†hCyå€>$;q,ÇY†`h*hÖöâ¨örpƒÓ}Uÿþ…¶³ÊÃÕJulaÕ©†o’©ÝØFˆA‚îDò¦X¹e‹Ûj‹#ß\4Í¡Ef™´ƒýç ÿrå?Îߺ¥Qu‡ðíÇT5gM€UÖ¦J|<øÔ «=ó,¦¡82°å6’©þ=¥!…Vˆ6#£Šç´Úÿa¤ÊQa™’Ü á1DyȯˆÅN1nôg}H'Æ!®¡Ô*UK)a ì |ÁFZ³yêïËj{XÄØÄÖ–}6Gê+!)½C—ì#-péMd¢…,!¶U­Lüã²În¯É]ÃdX,k-ûR²ßá=~1Q³Gxéâ/¬«auqÿ½(õÁð"< )HÎUO@Ÿ>¡þeî~ï¬íÈ”Åg+ÑFõ€ƒÔò‘|âÖèßÕ¼~‘\Dx½ÇÛ_yöú.TKž3k‰_Tœy7õúÉK§Ì¶éóÜë‰T% ¹{|1yð®øÁÑ“m\:éVŒrÜ”)O‘˜ì<”¦Ë¡­Ì:õ”îÔœ‡œywÒ_t묮j(·É²Õö6Uèو߯íS\þa9U æÚäTpcåÅsƒê@V ÐÇòÀ©­n€ÉQ×6×ÂC …ÆaûgSÖª¹[f¡IJØSãS^§±Ylþÿú¡–Ä]ø¤ë Þ#ñtòÉ׊ä+qGßßÊ46vplãM‰"qxïž´{2š3±ß75ñêcÿ8l••âiš1AðÖ°>»’ ›AR:Y°–Zcù3O‰ì·öþ¹lÑekJµLv§Y~ØF˦z˜[2Ú+6{éò%>ŒT¼¤Öj•¤wÚ¾¾^ö=ÖÈÇnö.Íè¼éx¸ ŒÃãmËÙ%W­ŸÿUý‘ÇáÌù'ݳâÁú¥uÞ·Šó ºç?Ö{@éß5óŠ.¾ƒMMZÚÓ^pG¹È—$8Œ$½åG3=,~1t ¿)­æÓWCN" Ú–E{¿–•áç•ïö? -»r©k›o»õ¢v^ƨi'>Ón–l}ŠT5¢£C”’®_Ãv’äüÉÒ‰`QOÊeX;ÝydݬÕmL’µi¯ðñ²,ÏŸ[ùObÊÕ<è¹õÑrý^ÅÓÔažhA+œZFçVŒÃ=­¾¼–á·(4×+ž"Ô—=ˆÎfþi†ÊZ"¹óy¦ÞòF¯,ÆÿQ^.Éä0Ý÷)|9þyžÞÎýד/Q¶URÇDÏFVÉRI™/!Ã|ÿ‡t3˜0 RŸ¤4;øž`GY.áªè!=wAîÍrÈ–Y£Y Îvuvt«ÕJ_Åkï@‘Ú± ìÓÒ,Ž'§{¦Õߊ´'ŸÕ®×p”ac9™GŠ¾žv“÷rÏtôè«j¯Å˜ª£¥6š˜ç´ÃR9~p%Z) aJf æ1.93v§g>?,±o·e· íáiÎ㶌2Rž=ö*ÖJïì¢Ü¦‹Q/ÕlÝx<Ó±ÃÓp²$61Ššº2œ R ˆ™9Eø§Y›IUÑöŒk²€“ -o#,Õ zÐ4}Ö² ‹÷==Ø£Üñ´»¯zŒÿsxO+çDY ]—®Á*Ò”×ÊÁ—œi÷­gÿÚá½5VIÇ®öm*œ`žjs|ªî¨në$˜Òo¥²«.¾Ÿ°"§Œ<$wà%< ­o²ÓºZ;ÉRë)‰dš,n‰òî·=¨éä,:b.xÎ"(¡vSRÛº$ÁSGÊ¡h— d_hZV1ïÓ1=³ZïÛ[Mô5“nªšpöÖ¸Tͬk¤§w<‹"~„]ß#)~¤Iµ¯ãBùëµh‡ôš6²z¬Òs.ùk‹EoûKMpyQ¿/dæå¼I½¾­Le¤ d\»|j8€˜…†•eˆ=W6áeWäWX½u¥•ýÃUÃ讌¶Ä¿Ÿ–ÒâÅšísxÑ~[î8¶L”ˉlÕ?”å¿–ÞÓsK 4pª³Çë2ÂÀŠˆ.Z U–%Xb˜)%²ø¾­žô¤—Œ2ÜIùún5Rõ%‰Ü¥ßÒ‡*°íšö 7/C” ¯¾²¼öm_\W¢HuŽ1}î©„5š…q~‘’¦y§o5Jçy=?¯/‹ŸÉ’윎Ï'Ÿlþ>ç±Ý¨²²ÕjNsZ×þÜUCš)8¥/TöZ¦PRG`Çp²ä«nŽ©öÒÃuŸ¼:‹8VP—€ÂýÕµx<åÖòÒ¨ªj«Ø·e3“£¥·.³ «©V9:™Má/3ØÈÐl&B~Pñ188\°X#C[îÜG÷…=F7³pèzG}øÄ0ÃFøÆ°í=šÎât_/VÐŽž BÃξÁFÀýèø¹}¿oõ\÷ÌÕõ¹…üÑ$hM—ì°ÄzÎÇ×3G%Y¤¾«±#)²â@ôð9bªz0Èùe•’{ÀÒÖ))Q¢D:;ŽÅECQù¯Ö[fâû\{¢€w÷YóOoë«“üzýF}Ü×ópŠ±îçõïÓ´fÆ?ÍÉj ãQ‡Z´Nû×ð¡M ƒÉ“œ-_Õð^ÜÅÕ%`—Õp_Ðu ·ê´õª)^Õ­yÓ·®è’ô«?šÛ¿;oò|…ËâèE¿uÍÀ·XïPa$4úaÞr¢G3á*Înß<Ú³™Ìµ¤ðm'‘àˆŠè¶q¦(1…¸W§¸—hsí4ôÜêÚHä±ubÊäŸÝêVÖÌjda€G[r8ýxÀèÜB,X²®üºÎVÉá1R-œ_ø?ݤæ(t†„ü üäL“T5¬H¸ŽÖIUÚRüôf ×ìªo´ºY¤ð¤Ä7ªFC9¶Ê{k.£ØÕ\ýMõ” ôÊ»HŸÛy­¦•C Uº~jSdŒ 9ÁÉ£ÆýÐ2 ¤©T§3fªø¹öŽzšîz¬ÉB3·, ì0ÛU‡è¶„ -ûõ+?èntyØjÃŽH?êÉ -ê,ºWCd\ó -PfîÓ@ŽóªÊÜÃÏå1X˜«·t’ÛÞÄŠ{T_µæ,"€¢B$ m/ÌTHDí Z9)ÿp° -3¿&jMèQg7óAÝ 4%ò7IÞpå0C;4ç “ÔÏvKÕ5}ž‚çd&w,k;$º%ôþUi -Î(gØŒÉâ‡lã˜ZŒ-§;Z`* ¾úr3½±ªÛ– ’›íeÉ Cǚ㫳àôÁ9ËR êG¾Á4[•"ž« -[]^AáT¦ª]Ôfi¦«¨QE.³hpçƒâ ókÍ­<‚ôZNrGóÃß>è}Õ¸há{9Ò6=)`Æq…«‰&ŽL#0¬TT‹ Ã1Ó‘ÖTÔviĹ¢W¥/“Nè^)wù¨Ÿß¿Ôg¬Ñj$)Þs¾ºÕ¢ÚÙ» !'Q=,÷gù+¡š}DâCFEÔù¤r­ÜÉ.J¦”|û÷*aHýÀRI(Û ®ohJJÜÂq¬;§BB´Ò^æfaø4Ÿ¿uéñ1/od4:¯ºè‰Ye—\b€n#ÿ€Žo„®æžóM5Ëx ¦Ì¨sÐéâ臚y_uÏ ¾ºìç\Ë1žâjÙ¸AÉNö¼éº?“ïÈ Bì êi7" þµÍ~>™¨z€µÔ28 -G€2».isv}$µèDàCzõN ¸Å÷k{cðYÄK[;˜ß,ÏarÙîe¦‹ÓŠ–¾M¦¾ÔÏ= ªl›‰92C5‡}¤¾·Xv]/æò?tbð¢ìa+Pœt‹å°8ÏÓ_® âË€á5—øí±£!Óº n&?õ@œè6L”j¨©‰˜ú]œˆQ—g†Q ‘_\ÒL…ZÑ6¦¶f†jÈ—°t5{àv ‚¤ÁîG3¢Àµaà’‰%9·£DY'Õi`̺dv|y|sn¥«40´Ôì¾çÒ­ÿ»µ}žt.u›nEæ8x´4ä²9ví¦ÛúûqíÅ}ÇŽBGh‹û»©n}9*¼}³ž¡<K?9DÄï§m›ËŒðø¤Ïª_ô¸|£¸1h,Ö–ÕìÙw­£¦ne[Ȧí™]J/µyÀ~Ùöö°N¦=´$‚ª‘írZÊBçQ~FiI¦•]ßÝ6,Jó?#@3wÒ±:¦qÀ¯NOúßt1Vå.›SÇH`¨à7þËé(RG×ò+à³SFKm6œZ8xtÉñçα‘i7¯Ò¨×b¬°ì¥ásÇ€‘ªm ˆr~âZv…¢3¸ ª/÷Là k¥s®”›¢q5¾Kª©Ó˜»Áa#ÇUòÝ‹u'iŒ|Åœ¾lÕWֶž%ƒOò@siÁ¥JY3®IÄ&a²wÛs–ðÀ¥Jœ¡å°jeÅF²FX£¹}Hh<ê íªâùnmR3KcêbÂH5^h=¦©^¯?%a̤´öDƒ”¶/B£pèáÕ†& u '³Œpð˜ù~±é¾°}39‹ù­ÀV­p©òÚ¬xÙ’{Š¢––Þ=)Ý)gOw µ5¾ìn¡\TtZcÚóKªMŽˆ®gN|êrOöÏÛÕ¹"Cˆ¬sùºÕlŤ"mè®Dûãùõ­ûi ØYªÖ!­}²pfÕgdüf 5£Œƒ­Ÿi¶©VtšZ™šFÊ‚\ÍìRò©õ>ý)ª‘*ãÍrd*Ž‚¸•é,>å­ù—>o¯å~^_¡{ÀÔ¤˜=Ö}º§[åøÀCùVûØ®Æ*Œèv¥·a?§§Ì‘Þéä«Ræ’N>¥twh3>uzV^3ãDÚnd.ÛQÔiNôùÂÐ|qWÞ-«u…k½› –Î+g€^¼ž„­J ¤Yt£ï#€Ëì÷µ°ïG>¹Û [Óaë‚3 ;t¸³ëùÂɺ/K -û™œG±(·Êo¿nµÝ¡¦Êç¹ ‘Î÷RyÛšûázýÿMÁ§„w­…ÿÞZù—ò2IŽå8‚è^§À(Ëy¸‘\ëüòç^øìnP4û›ªkÊŒððáYĸLr‰ÕÄ éäiE¬Â~|¨³K[²d™kå«×Û˜wçöç7x¿*Ã4¡zœ"0ö÷|º%7—ñºÁÑýës¼¿1žúê¸ÞVAI´Pj”1ªºŸŒZ½ºrã+é#ž.ÃÙ2œíI…ÞÞ@Ëñ –}bX×`H𲟻ÑV½ø©àÜÌ“§@x¢à-ö ¶¤­ŒHjŸ±,$õ¸¦ŸB}&2å9Âg¯/z Ýì`},ۀˀóõ&)g_‘€!‚ƨWhã<œ¤X«ùÊ2”Uö ëíƒrÙíï×3Å)¡Ï •†2ïÄ€K)ÿjžjP[¬EÆɲ*ŽYp.,.˵;¢N9w{‹æ[µzÝ -ŒÒ· ó¨¦µt‡ÕUÕð¥ÝQÜ^C+õÜ(¸£ù„:ÞÚ1’½À0ãGö’O¹ZÓ!7°W{wÎÈÜ‹´“4PmôjLÈ¿=1Å%ÄøbDiC=øÍjôt+‰ú¥Œ"Ã.o½PÜ|=~®¶†)î*@|¼ÍѬòëxr«é{¬ÕR°b[g™;àÖ)G+_ l­ÿsb•ähy]žfÐßþV´s<·æ|¹äcàL‰áü‚Ì4 -¤Þ]F[v DOd&OJÐÐXE>:+âXï­nm½?‹§,Wp\w©™£í7'ÖÑeT¡Eÿ‹‡m˜B’8˜’hàÅÉ … ÎTš¹ æÄ [Çù-ž˜ãŠÏÍë}¼«Ö7û2D±$‘…MÉÃDÉ®­¾mìÓâ4çoá­Q½F -©Éºí p·EªMTŸÃìïñ¨öu>¶ªâkmªŠµ×3gLÿ„ŠÃ•œ¶7´«Õj²Ø¿ÈBÜÔlmI8tÕÆV//Õñ;¾ôi»ˆçî1y¯j÷:D‘µâœ€\Ú›-»ƒ@Gœl6} --/jˆI×è;ƤÃ)eCŒ$Äá`€%[ö4Ó¨?…]M_ïkƒ§æ‘ü?‡^1E³ã쉎©°Žë‡R/¹~¹—å:7ÂÝËŒ”‡õÞ‰”=…Î&{âìôüª4|dÇ#žnÚ‰ -΢Æ$vËÒ²8œ0b-õ¬ÜJÛ\Tøåëý¾[È2c“®ýÑü "r ,.“*¬iô^ÖuÁZ¦rKÏGª/6ÀLà¿O´¯MÇØ"îOÒKÈul8#xÛŽ‹ad<o1‘êaº#OñÎêkȦCnmè#øQlCÆÔ_ÇPxg:ïžÏæv£¶Î'ýÉe×q‚ÀcëCš±¹Çkéîe?Á¢¡Õ&Já¯l¢ý pYnªÏàådŸ]g~aËZ|w'‡‹îÍpYë†ÑÉ/øOSûÊç¼H©ïÑù~ Ü4kñ‹í,wز©ü<©ëÅׇÈŠ9=m5{»‹'Ž¤yûÉ -*;ŽØ»hÝLôGAžÞµ¤84«©þØa¯¸?½ÛÆO¿l4ëï¤:¾yc\õ:¬oúïòzØè ·Kߺx¡–`Ÿ=ÐW¯k[ힸÓ-iå 9°ÄÓبy«¹£>«iåé¢4üûé}-ý©ÕjƦ÷µýUhP÷´c?qÇš"ÉxžûKÎß'/3çãú›„ã‰nëezïy3‘5‚HY‘RÛ“[¢'3‹×~æÓÚ€¶òYH…°+d7#z_'„mÀÕà½ÚcDN.TïoÄZê¼=ùi۵х§„£;y@D*ulꉇqãY´ÖÎÍ€ë]wZ9–Ÿ½Ð@ä…Êt˜ƒåÍ•±Yô^«)¤MabŽ¬ÎB¨æ†Eà¬d³Oü–® Mð¨Ï6‹B@ÍuPªgªQÁéÌgP ¤e-1Þ&Õo=èš1œ-ÉŠ Dºêÿ§ãÕ²ß ä¶ö,…Ú!Ñf&5¡Û¯,¡÷ON‹…ÆA•“þœü׳Ûc®k?†à ±õ#×Øæ8J]ÇSꪌ˜¥ä¾ªÁ°À̠ϱÕnGàÿw9–:£ '6µ+rfƒbszW¼‡¼‘Ó#>ˆuªšgå‹Æ½×ïÛÛ£¸yÂjgÆpiÁ‡‚*Í:kh ,Ýà£,Ÿâì¾’kI\)ä0E¯ÖÒÓm|Çàê‡ë·I!¦' 1ѯ§éRÉkOY5ÕâÕ†„UƒYýÍýèHL²Dñê‡ÿ›ô×ñ<Œ« -e5ŒÀe]¿´ï™¤ëynæªí’I^â] ÙHØÓ§ Wèü윷§VcÌÁ:AK½±ŽÀ®³rÓ&a¡Ù•eÀRÝÑ‹Ì6Õ\§³³Õ.ˆiÄ.ù.Üã2ºK3s»vrFpäË[úØ^mÈÆ—X/0ÑL ,O­þ’ǘ÷¿Ö·×¯ïCÿ™Å¦Ê•V/ä¾yç==ú}GX²ÓŸ´SmÆØÅÍ?½áÙìp yæïv¯V {Y={„ÏNóa!ˆa'6lx²ØýçׄS¦±öÿ™p0H‚RÐ`Ôþ}ÇÉ1C¶Þݬ‡âµ”fÍô¸$ó¼a_Åcv7pnˆXÏiXeW¦bôc Œç‰{Û¡“T÷Ç“KjÌ*³äÁŠç°5÷Òv¹˜Åùñ#­Eq– ±„¶ÏB'Ðg{Óؽ¤¾‡ Šܾu–ßp"÷þKmÉÂÓ¼ÄDJôaðþ­ÒJ;šÓ›ªðŒ aÇÉì#&múƳ×9³ÍE†CxÜýVïÐFO´ôøÙh‰5“oÓã;{÷ñ')]\ªW¶(~¥|R.h5˜!†~¹£fMÓˆKkLŽ¾jQ†êX7ÖÈš¼„ìÞKe×N–³@ð[7y˜¾nJU5rÝÚß'¤V¬4"°õµc.…ÃÑõÂds¡+b^zVçü[–Éñ§sš0YŸ©S›óµNêe5;ÑtõP'5™eêJ’…LDRÅédY»±‰ý Ðs'¹J, ø0ÿ÷ Çk¬àUUÒö­‡›íí{ZTÛÝ¿bªy[ Û²™dµ²ÌIªÑ”¿OHù…šñ“F -…ßëdZu߯j›\†}ª1\Mi¦.ýháWãŽ>:,󶹳EÔ?tÕ ~Ž³œ®æLT®!½œ×aÞ(T}_LŒ÷u¥gœƒy‚Á»‘m-‹a4ÁA¥¯v ãšÏF‹ÑW8}÷cŪی9jB‚#^lXÜšfH.q[±æä‡ë@!?z#JîѾž†ùaìßœ›ˆð7èŒÆ]Ub.reçÔËúÉ*G7Ùt¸Q×c‚¹/ZØ<ÅiqœÓ~œžJÞë†þµ'{æIN¨É^ºq™³m°©ÆL¹ãÆwb+D(¥^bØ·;™ö8Æ -`kùÀML?¼„\ò£j¶]—¡FAöxÅv †p5Oò™bh¬¹ -*2mR{#Êšq0§%ÑO{¹Ã–wb -ïd7°$Šw½þ6âäí`™9ËÌ-ëö>œLÀëñsWWMéfžoXÀ¾øÕ.úÒfTGº¹ÿGy™d¹‘ä@t¯SäTÏøtžÚæúömßàL‘I©^Õ†R$ƒpÀ`C™¹.­†ÔN@¶ÔÃ8ôë¸î1ÓSXÞ•-aõ'¼§^gž®m‹h*jÓ¯äÜ¿!½ãÅÂ]~#Ü!î_‡Œ² 8홈g¼°ê|RÙñãnÚàöÌ1~ûγžñŸov»ìüKaœ‹OgÓöV×,ö2²éн¼ã¯²ºÝ ,ë¬ÔÎu d5Z¯çpÝ–€,ã. ŠIgò¤^ÌtßeEÕUº¥%­¾Äè$Çw²S¨÷W-QªÍÂÜêɨrÿ‘Yà1¬÷ -¢Û—:B%¯µâø)JÍÒSyMM°Ô¨´;_ÂFÀØ:+¼…ýÜß?Ž™¨ÉV?44ž¦º%T•ô\SWŠz'²Ð“V¨u°Òp<“T»%ýué1•ÈVýl|J¤‰+æ0ºêó÷a ikú0P‘Ü¿f åÜVÝ…`Í­ -±*÷l©:¢ ÐK#¼ -"¯z41¾ ÊÀ1²ô}âÕ*…!Z©`ô†æó@¼âe‹4,€KÄÝ(h|$·SÔX¦ëš5¶È´¹ädzŽŒÊH몇0_• ¾—ÃC;¤ç4üØ éŠÅܪpróÖCD¨V¯Í ð<>´%ûëtzÛ<­-ÀmK2åvŽ",¸ÚfÌlÇ“a=iß±óíð· D -ˆ²kÓÞuë¡Ç2]*Ä0Ó´=G³©V¤ªœ¢»M›ÚXõS}Ós‡5&œu^ ŽS×ãóÍàpÔdi†Yék¿«¥ÚyŽ¡­´·öÖÒxñÝh:Ý{¢¹ŽL]†M­i8ãê 5ãͺv t›Ý²a…e­_»Þƒ–™ÛQ†a[VBs@jÛé‚E¹L®?°ã½¹Z‚µwýš…jî•ÐEêÓ(<2ýüœt& ]o#Uåm¼W;mf øyÁk°2úóòÞ?¼ùv‘Ø}ä9ƒuéeK2*Jý%ØP÷msNG°ÊFØðÌxaï -çQh`ûUŽï=;9u?žÆàÙN>«TK⦙z“ùxX:Øòž¦çôið–“áVBœ{úÍ#3Åm†Ç9¦Í¢`ñ‚ÅÕ?~}¾»í‰ÒÔ"ï£-{μ®µÛ3~°R,l7.BP…ÇjòŽ8°ÛY»5—£Rl³¶ÛŽêhT»cmò¬ÌŸÓé×[¬-j#Ÿ™Yµ±úç´_"¦â’ÊÔ·´v+wtË<k^«¬Õeï¥môFd¿°¦¤9¼ð:¹EØ`Ó«å©€ùºÌÓŽÿ~¶÷ ¦Ž"Ë‘õtù_ „ m)VØ¢³u‚Ik)¦4ckAíâÝíÌÓ„êê¢g¬ÑêærEøL[Ï&û3ÒÊI:£QÓC²8 ’ÍžÎ6§ÇØÿélf"ït›†mC4 #óiw”>!€ÇR9ʶæ¤>â°ÊC50¡+C®NÅ–U+¤x·sµGÜ{ äØnÝÇØws ] ó-øØzÿ„íŠÅêÿ”—IrìFD÷:/@YN‘Ãy´éE_@·—?Ôÿ@MÖ½)$ -Ș<ž£oQ™ì$ÚøIJ¢zvígaYcߟ‡3-Û‹£,—¼äúnÉcÛc‚Æ`I(â²ôû(eÕ)´üz -¼Ê€×è©2{àB3ÛÑ<Šç†´(ëÊ2ÌÚ6µµS}ðêaBÞƵ9ŽaôgcV\~|¶å¿h°Í`MµE#=Ñßæ¾Wš·¦Î ‡wýz¨¯ùP=|‹w„Íà\NOÉÌcÁ¾¨îF‹§Ó½Õe:ãeA´¨[7±6g.¼ßks#€Ô÷è·Åæúlá6´zA(ª@ö~(N7öêT —3ü²©n³Ý¶±ÚÁ…;ÓfŸY¼óúÜøF²)ÉÚ 5Rbx/^eaÆsz¢ajð´Å?õV í©éJ´ÿF=Š„D@²i5vìÊ9ÝŒ7˜éï¸Tù8íº¿þÖCÐPò2Ÿß¸“‘±bwÅ~ŽÞïzÃ0šç,¬O3$±ÛšW hxçå½b˜~¸ -n='‰fÌð•ç†Q¦”Ëê\îCbùÂ=±Ÿ³\RM Jó̸O¢ßÛD„7¸WØOLITUuSâ¯JÜgb%8ĵ Zå×ea—ž©u÷t5!i½Lw®ÉÜnsX{ÊÂn)ÇißIž¶BéeXæ  w¥fÆsUÏß\¹º=d*ÎŽ+Åðβóß=V*jBPµzjîùv¾.ÙGÝPx§Acúâûß«²É<5}Ïi òž[ÊMIŸ†‘õó¦¥O¤4ÔV«ü—KvüÌŽ)|e‡/¯/ÆËÇ~šêÆ‚}P°[­1sӻÔR1Aˆs1Ì»Gw¤V^ŸŸŠ)PÒš®Ht˾ýݶXš¢É{7m1&ãµÁ=Û´á–£ñèýHQ§J;™sÙzRº«˜¨PýQØmIldL!w#5ËJí¿mW¦Þ!uµº¸ÿà¹<®Ñ·ÝnˆxèÎÌJøóÀ€ay­ü‡î žø‹Éø5Š]<ü«g&>YìÙ”ö„܇ rŸ£ãì¨_Affš|"³­H‡{Òâ4­¨ÅM´»ܬSðì£k'?ÂÍ«rl-/õRdðʸ <"¢”â -}¼5÷K-1®Šµ$MO•Ê¥Öð Ø–­dEvvjµDDn*Ã$¥á©Í1]üò0 1wL†•U'Ø.*õ ºñÕ1<í]?Øínç¹üþ~€4•…°Œ{YÇ.%ªZôÔdQ†F#\å’à/s‰’„ä¹ÅPåôYúhþ;ýÛ­žÈBA,ä[DÖR º{>.®ih±¾iô[¶%'M¢ºÀ —ÖdJDû­¦˜NÑÉÁ¨ð™þ=|¸$4ò¯?þóKbôésÚV>T{‰a®¸gê,ú iÍÅöìS<à¯Y˜Vg^©Iì’¶hæoý‡ÖR ³r§]I±WYî —•ß§1øDÆ‘„IëÁiK {•cm×±‡cè˜thå>†)ÕñŒ+‘Õe Ò›J,N† áó#GRsD¬S_­óž£f¢R 1Í÷­p¨m·¡üw§©ŸkxS©ØÕÇÅFmî™{ÖL ŒIg$_ù佺Þ4šq™óêG[þïPÂY–ŠýI_æ®ÖL_Þ%Ž^>éȼ9Ê߬Ò,}­²n•UqÕ£ä¸\£\1³'HFU[§<[– -AgEÕf„Û4ÐIÚÂ7K/Ê¥€º³˜«oûÞÌ©ûùÕÛ?»^Ü‚¾.H×\Õ=¼ºQ"”»Ó³ô*y¦­‰ßÎK±K>µzìÖÀ Ô,9#0¢àä`„K;KQ†÷µ»¯TKÛª¿ŠUì¹ø’äæ!m”2÷XgâÞJu÷}Y»‚Àºê¼ÕEJ±óúÕì˹®ÊkTôæNF<—ðš&šÉôêjx~šwüJÖß“ýÌÜ°¡Ý:FÄi¾Ó¾§»Ÿ©ñ™nþj£bícuì|Õá´»ïÔ‡„Ä–æm‚‡Cf[ü¼ Øá.­¦â÷o¹rNÚ‡Ýý~zæü3qésítÇu`jÞ~¨`ñrÒï?0e-SYDe5« -’Us׌=œ“vÁ€]â^—‚™<òÚóºÓ ¥±ðÜnƒº‡ï³|öCÍ’°ãr»g$Få+vÚŒç¸Mçw&^Ž‘ÓcÒT*ºä>nÅüÕG:Dí<^ÜžbH©WИu\­Ù‚HC‡ûÞ[9mÇL°6Ï[l+-›!æ^Z{f˜îÒîÑβµòž\=1r'U²“5‘ÓûœÊáyüýùÁ[ -¯5­¶YÞŠ~ FN{Y/zdeU[¶µ.«wº§³ -ƒÙ@xi2£X«Ù†u¦'Ló ³Gÿ÷¥¦Æü3êDGÄÀ'ÖƒÜäËÇN|ÑÈ¿om†#1M†áÉã\À>V&{d~1Í¢3þêv°‘Fl| ðœZZ¹•õÁ)±‰µh‘çz"?6–¼2’?]Z2Oš‡|Óqì.ìédSëýÕ -¾’ÎÊðéÏþEåÆv‘ïÊ‘p7Ë%^ús­Y•š{ Å§¢ËZNáël¡zÎ9Ū¡ÙÁ¦ì0;qqì(+Ýbl.æÔ6=—‘J†úŸr9ÒË5?Y‚ø•ãs?rî÷êö~â ˵)k@ÇMÁú¥AÍ=ÔzÂcÎ@N»HsÎÍ–Òn•*a{ùQ‹f&Zcçdìš3ÊÚš5W’£GJ TJ€ÕX@‘¸*f -%º¯2gÚü®¬hˉ¦V-&&)¾‹RbàO¦³÷ËÛñ׸L‘.bÎmÖ¼±N^=ªƒ†e…2zqvÓ«7¢ÏáVý1Ù€ÔAÁ²–¹¤Ôõ¢"áŽwÔÓ2öE^ŸŸ[I5-µ¬Ë2^5%í<÷,–ÿ¬.ÐÉœ”õ%õÊ|ů- ¶rÔÈ×°MèÝ1t ‹û”ØMt­8±c¡wL‹´Wjs£êõ’ ä·¥l>F:Ðb£äV7ŒôÌÑÜž[}á\¹:Þ@c™h õàCŽÌ•ûB̉Õh5ÛËV Ø'\CãjÞbŽi'TzõŽ‘»ùt˜h¨è yŽ(ÅG¯¾­´ŒD§$7#ãœÓ‚Iä;EÖÿª%çÞöÊšzßîŽüÜ?€ÉnfIì&‘í.±ÌižÓ;!YÙž‡nC©Nú€(¦<Œ®ÇZk\²']KÆ>Ó¯±gªBðµó†¥ ëDP ½RÖFz7—ø¤¯å÷pà“«œ¶eœ+‰Ÿˆ§×`øjc ·ÂφºEòýf»x­> haûõùîc»t‘–ŒOw©’þØåfùkÛ‡…<‡ -M¾T}A´í2 "q&'þù€¿ÔN>tzY¶e')ÛÁa®»„….ë†Øõo× WGoÇ äÈ4ØÞž¿þüN}i<rå{.’ï²Ò²jǺÔy •.C=v•©õésYðò‰`2ÝMµ§÷òœM;e•>ŠêO¡“»é,ÂÊ~°²½å²ÛÊm[‘^ÜCë+&ÓŠoíD(Õšazâ:V­çõ‚~k6çÐKZóuë-ÌI·?ž›/ù…ËÂíH¨3™åœçXæ°æ®zÚd¦š ïµörO-èÛ96&œ/ Cj’¶Xdå“Ø/¾½Ó ÌsŸ½§¶Öƭݺ¨¥cÌë]M.WÙF¯så¨ê—‹F˜3mÙA,RL>»‡ö;v'½¬–Ç&Mâ#¼c>g'á@Ópo=®*²«²Dµ„;ž"²ÏÆâ)¢Æo“——Gu5Q.¾~˜Ä±v(ŒVÏ(S/uHËÈýÜß®?$\¡†nõwW"2jµN/LµôÓY íªzó¼YèëTÔ?ñ߶l&¿7~¹9K›«IÌŽïwr—_Ðj3+ÝÐ¥ÈÇDN -…;Þ– ò\”È‹r’B ü¿)ÑC þVI—èv‡¤öb†S·ˆT(Ibµå~$€xªcÉÿD:½ÁøB êqY” OaåµÆ#‰Ââ¹Áe‘:^}1!ÿâÃëꛣ;ˆh…£ìº‘Þ7§½¤¾Jƒ.”IçKEšÓ Púà3Ja£Òôç‰vŸ"Ç_SO¨º@O8I'.”®€¶j€uø(vd3[Í餣t³«f"õ(WÑsÒj!Þ䟈ò™«k…PG„ìÂìAðÍ&¾ÃÜ«‚Qì76ÒŠŽ-ÅÜL)'l+QS'†.S#„fn³ -˜–Pè{"I|³<-Nät'D”>8¾‘Pyu¢‰‚ÊPÛØ1ÍxI˜’áLï©?‹¥­w^-W°»:VôQ_…PUÒ¹ìÿ‘G•ÔºÌó¬›_¬c½ZE_ÕMG¥ÙŒÖ¸³¬þ;R ]Ö7G3ƒK‹æ`«ššHuMЩ9RAõÙèß#)çµ—Ó~$]l‘ïpºiQM¸þâªô„cïêšêA³ž8Yô¶-¤@ÖDOv…3,Ü«”žk™Õê÷ŸR*VíÜé^ýrÎKÔú‚^I׎ô·éa9#N&F­¹UÔÐÐ+=ÝaAÛB þÿ˜Þ Ayíû¨™ÞEÝ–åTË0]¾”š ×8¿Ýס@³(Ìóá^Æ@h_^6JË<¶"[4J†º…8¨Œ›Øu Þrœ SrWÖù˜ßï Ë…øÔ¢îÚ™¼é°Ù„A§<¢HñXY‰$<”»Ù˜ ­Y‰yÍŠl\ cg›BŒ3ÝvÙVÓ#@³ò­¡¹‰•ÏOÍÃφ¦n¾™{²*ñêxA3{6ÍKsîÓbAlªÓ)ìÃR:8Š¦#`Ë' ¢`ü™|1„*EA­9W4†´—‘½ˆ·Ç÷à©L×3˜¬ý+žl -(à<óñGG$h 1Ay%g%WÂL|(…S½Iaúd\~óq§aÃ2uó^.ÿ•~sœÓ^³~®ûQÕK×uFgé/cÒË´J>O`ÂHµèÔïØ›S`äÖpÏðýb•DWu÷(¢<Óöý”½k .(Ó$ÝŸ&S¿ž9 “^ÔáÿÆÛ ‰ü¢Mµûßý´Šhê€á‡oU¦ÿŠ¦…¤\&ë,'“µøʼnÊCU~U½K‡N²˜¥©0¯Ç £ôù©k~@|ÒLáénf¹ÿ}ÜVF½9" -Ûæë—°m‡-X•mS†O‰aÞNžZ¸&1Ûlue<—Îp>»%YxV­º/QCÎ[îÁ`ȶÝÙW9rÚ:B”;×}œ¬ûÝHJ¼cÐKÛ«êùÏb]Á°œm÷×ÆÁrp¸/*­ûŒ*Úò¡ÎcìL¦±º¦£Ý¨)žk¯|!ɇ°šŸ¥qéÿ‚ 5Œ°µ:5›Ð]ü®Ú -ád×»´ºp«CÙ‹B/ÍDòBÈ qÐNµþµ &æ0‡AÜ_Ojõ{a3”ÌCi}~`Ȩ+”ÝoÖ­cûñÝüXÚûWX ûÀºÛ°[¸&*ƒuп§Ç2‡óS ¾ËÝh)Å}ásö²˜ü¹ -4Q†å:™ižÇCéE"=͆×âL´È250w9Ÿ_̳‹¨¼Ç–åžÉ ¬¿¯¶<¦\bÚ,UŠ6–:V¯12­ÊL_x!MQ~­ùÞxU®Ô -+3œÍSßä=#­í_ÃÉqŸæ€4UI—¢yhÇ‚Ç0¦šµAľ–°+èìÇE €©¬[=¿UY -i àrváñ¿!⃮¾*•]w¼M}غ\‰ aÅ»b)bÔ±:1Çù|B›Ó³ÜÛ2õÃ(Z5ÓŸÚÛsácVÅióõ`‡Ëÿû{DÝý°-‰ÏV+ Þ. qäÅ6/G6¿5lŶYŨrÃÞÓ–lþ6Φê¤Ö¸>Ö]ûq]èüMw~ÉYkQOïša÷<î×^¥–$ ’Ú°Š¿6)mÆX3ˆ~ÞHøPĸõ®ó°;8nâæÑÇjÈ‘êMÂA“<ÐþcµJ>èñŽÆ•*5Ya÷a¥¹Ào: XUz þ…òeÛ©Þç-,ÏêZÉOytP™À ïÎõêw¢(/—³é'¿®²'*Æv|˜lf÷¢›äSÉ :§EvØíà]FY=²­þZÆè8Ö,h6°4Ñß–µ -PUž-Mƒäæ­E©i¸&¼—|ì§ñŸ »ÒÇw:ÙåKÏJïÙ ¥…p -Ó¾²ö,ë¿°Á)­¦+^Âû]ØÓ??û§Á¤{N÷ÇLÃá×ò¹]ߣ>«ÉÜ{8O sWq ÑãºrÂÞxù÷»ÝÁVxé$£uHDÔEÛœn•¾ÎJ7<·• -IŽ£äZWTÕ­ükZ-ãj8½??Ç?ø -Flj  d¿šÏVŽÁ䥯Ðæ6Oí°¥`Ž˜yÆ*Mcª†%“èÙö,ÓÌ­@÷’º¢¦&Gsh³<‘‡ôsŸbVó$‚B{z‚Úº<î™ÄT¹–ÆZM¾žÊÎÊA— ß`•t¥[¥°#2ìò­åõI,@Zúñôs¤d·Im¦²‘Øôèüÿ¸Wù™ ¿Á D9(×5CïLfEO>jA„µßWh ÇJjÉ Œ³î–÷Uí>\;ÚóWvï.NZ\6ý&Ä,ÿÞ3m«Áx¶:Æ™-ãOö³ý}¹ èè·Ÿ/=°fõp3BÖ„Zħ¿4[ÃÍæŽ6[0Q‘«–7g„ýžúRùaL-¬õ˜Áí¨©Œ6ÕtÎÔ‹‹ÓnSm›Û¢I^_zˆ3ÙÚ^k®ø ³ú± –B|Ð[¿ÍVßW<5µ'tD±âÌzÃÍÉh/À1Í8Œ,§Sº¥™Vg6QR7g|²yz;yÈõZ¼!ÊÛ‡ÛG®¦í<Ÿ’¾Á£žÛE1‘amO$~ýœo²ywå4@á»® H{°äI}Ù¦ÍÂÅ`q€]j,ÿ ýø;ç ¨6gwr´w£Øp±že‹¿Ï8&EH1øÎ.xóGfEäØ-Ó_{›R´é`uÂ]Ý7à+‹•ˆ¶Ò?ð—6,ì}ÌFgßkG–9¡dqª°ú²Ü^=¦§È~“ÒàØÆ/õ:Ðébà_oS"‰0I–Ñæð"ëµU{Æû‰wØÄSi›ïQEO&ÁÑ[:k{9–šÈ`kg˜¶‘³t·c›‰b§¦XˆZ›7Ä.¬Bzå8$Ë w :˜{oË0 -Ôq5\ŒqUÔD5ó°è®›o_(·Ë~s%%ŽC“ÊØ©•¾‰ì›6Ü1 séÇU2ë]öðÑ-ë_ È©O ݦÔâU¯†™â8G^ù Û>¾·¶Ä§C’9nÛ©;°;š¢¨®Ûãøû•htX‘„ã2PË–AŽKñ¯áSSbp7Óì§î¶óbúý/º›ù¯ì·ô¯:é‰f«é¥µÍËç!šö`QÛ”«eB,æc;:ŒîÏÛ•æÁSzóÀŒu—ý9|3,=›q¨…¤"ýFSô‚oZ5ÕÿþGºÙH1é¦ .{[ÖG½Ç´5”@žŽa"ýN"éthOd≠G—xë°äáDZ•~“i Äc؈OᩱlZéùQ‹ê»è—¼¬ëÙܾ%©=£‚L|pŒñ.±U·=:4ïqw-w‘®÷¤©Êëòx2ê¹—Ë·3s+?¨}8'úi%ùáæF^vR‘ªÛSŸÈ+ŠE¬›’wx´´Tƒ6®â›YF¾3mN'1mrˆE3dðÍ+´s(~³¦Á²¼¾AhBóX7oœu Õ»çÝt½Ûë×eº0ŸXK·:ßW§›^–Û€…¹®#¿±²ñôF㸙6Õ~IHy£ :]‚‹#æ·–¦~[Zýß¼ˆ•ðcRÙåá¸äh0?wç|9Äyñ9|B†Ó•üÚþ¬ÁG€dMž·¦¥íü0ú Ò3„”áÕÔáGí¸~‡¦}Tja%BetNvºš¸W‘±ÈNX}X“¢Ú õQ(‰ñÇõT³G͇§¼©°Ô-c ƒÑõ8‰™Aó°|v%lˆÍ4ÔÐ'1o\œéfÛYS;-ŒÒÞ'Y[$a'“~K¾ÔJ?þåïímØ®açi^ ú…žß…‡N„Ø>aVÕCûá,­³ ’_Aˆ ë’›:jB{çŽ%-UÑϲ\òÈzLÕ£…s$í¤#Ó®•F åIófFSk —›I-Eꜗ±p~X:’-¤M,Öð‰eÔ‡b¼•@ð‡Y½wø¤â~¬ˆ·2]ÅzDÜ¡Öu?!¥Ô¥:‘ Þò—Úitlybƒè}Õ.r~øUëH@ŽŒÕŠun<ˆFqT†¶]"KZÐ]÷[~·l?;zˇީ€ý?nôéÜE'¡ÚKû–t›gG¿Â¯XÞR¯–yU—•rì“€kªîOZ‰Õ\q+æz+Øœ6»&±Lw|£•³0 -e¸AäÖÅh5ˆ'1Ü+­>5¦ƒ@ó¹1ÃÇ@rlŽ’™$q½TQo:Æéf“ý('ýy¸ø„ÒWˆŠšpö½­ähåªn‰¡E__ÖÂÿð -w¬þ'A0û5‡~Iø¢:<9ˆšwºùv\Y™RµÅ†6dTãÖ’ ǧe¨ïI½R†ñ½Ò#^?ëÏJ/$ï&ŠæUæßÄa¨ÔG%öd›©ÖŽºag^A'Ø:ª‚݆Z<´üøewÊ™ofXDµe$ÉK¢ÜñQñäH ,fAÌ ˜(G纻Š§•/×buø$^’ud®BhoNïT®Isº‰´B“CÀzÅvæZÏé.ËP­ûò÷Ÿ%,Ú5~ëóUÂi'ì†×An2/ÝÉÆ5g.Cš1œ¯º€Rþû¾þÛkÈöIŽDÛ[¯4Ý»­q8yø<´5‹ØêÏY3~}þoëÿz¤zÅœR½Ä܈ê4‘â!¡Rl`r$ýÞ×”—K’#GD÷:E_ dÄÿ<³™…. Û?GV7Ù”Él6ÉŠb23pøÇ Æ‘ÕÓJaÓd»=,:žÜqàšµùø.2èòóõÀÅ[t{_⋽¨YJK”IV%toߦÖù¸Ê’ÞE2³\ÿ¹® ¬Ÿ×f/I05¿IhûU¢=±À"AN:-øš%Îiw¤—]ﶇY%|_,Œ3.?º·Í(sÂ_yÔ[`wüW³èc—‰irïé ¬´R^vñq°aò:ôžªìñz°6ìêŽÆfÔFô’ÔÚõ æÛd«hXËöå°a–×Y®G¶WRS%®e¸y1ðDø5t¢*v!dÝ(jÖÍu­xË‚><ál)Mlu[˹ôU¿ÑífõÖÐüóݜ̚# ‚Lo3(·4¯Å]-?Q[Ô¾5ÃË>wìŠiwÛ¹2F˜žcïGÆš¥7÷ûqÃåÐÀ°&À,¯gZžwCûøX¯+/&B¹aÜ~íV¥µû²SeÑMíб¤5=°aÇ'ëŸáÒ¼^v]% Ïöcìס§£†®‰œV9Ñîà+®¶jN‰Ëª7<¿RᜮÕ3+°^ }}˜G¼¿=/”E`ûKœp·“沎mëÆê­JÇÖ¾®ÿàr™µËÓßü7¶1ÌŠzº¬l)A _€(2qs¸íÈ’rHB¿²kó8÷•eÓlv'ZÙ”°hÛ3;:725–Âí·\“eÜ÷CÙplKÎþ‡Îÿ«GŽôhy4ÓÑBøÝ@ò#Sª¥Z)MöË Jš¥8WŸ_?7Ý‚Ü¿†QÉ« -ù¬+¹”\\jù•–†°¿³8ÅŠujë¡Í/7cÖ›'ZY†A·TÍþîÑÊãÕu´Oºkxx¦MugíûFwsxa[^2¾ õ*ñî£?ÇëýG¤•Û •jQbfù:£r7:C­UJ#ÄÎ_“§Lø£[½Cöw}GÎzm|º³ã+lhô¢dPhXý7(“épªž½Ëw^áñ´¶½5o1âòý—½œ˜ ßÑœ éÈúiÃnÉW¯–ÏF^s‘‘]¦Æ‹>ŽÉ‘tÅwfüò7Є;4‹k®Ô:Ã?<š¨Æ›Nï÷¹ÞttG6‹¡sO-á(“b e9<HÛÎ fJƒ {Ow«LF4Y§ŒGÕëȆúã¹te¾{KåòÀGÆS×Þê~W×¼×añh¼ÀÈüŸöU?ק·s?È5ŧ6u–DÉ®\÷…øò_ã>¶¼—Ó«Œ²ý¼÷£9g=¹ê|P›Ô¬ -_/ÕR­y™ÖîóðìaNRóløÇÞðö³"Âôä IÌFºÕO6ã!ÌCS…Ü0q9ÝKg3É >ÕËÎ9ó±¡TXÚ9ÒHl3ÉÓs½ù -%îùÀL…[bY62¦=±Ùôô¥«ÝI//Á „ØÞ&rl/r ­]¥‰åú2€[–ÚëÚkqÒFó -FZA˶^"BÖWHˆ‘CeM;j"(ë‡:ÓæõX1²GþJ VNˆ¥ó“«keC¨ŒŠKÀOè¾^<Ž>˜Üû§ÉD· «nQµºÜªl)’S’‚G´J‡¸,Øq’úv 5W>:ÐÍj…“sèoA{’7'•ì@µÛ/z\]€Ú#ÞÖ˼ÆUwÌÜ..MUíH*iæ >Xà€bØñ‹®oÔ.Ø¡j<(M;ÉÓ½pºLWdœ{w±z%NxY_íÒÖØÏ­1 Ü.Pvøмm÷õ“ÀàY!ÏÆoÍéxÔâ=ȶ 4›ò¶?ÂŽŒ,6Ù*ç›ûY¬”¼Á¿.ì±ñÆù=V@m'óI)D³n±[HžÒê(=wÏo.´™Ö×ë.“"Bê˜å ôF5ÎêdXze‚™sÞ£®-´ö®â¶®ÇhžÕaB„q Áp;|ö^ÓpÓ„Ër»¯ é ÝG£¥‰Š¬Y ˆ› Êa÷¤©LCŒHɈúëÛòKÖÕ§1¡ÍSÇ.‰£,ZÎ^^{óÝ8il¹»×ê ýÄ5±ƒÛúyØ·1ŸËßØ¢³kOòߘù`aN<T |œÌ,à ¢¢j²k’sŒ€¤‹ž>}EúyŒŠ…v´m¨3ÞÏg˜rÞùˆ}Á:ëF%SÈpÙm+DG»›h5Qjv¯{¹ë¤FvZs ^¼¡ÅÕôÐ;àÐám[WON,z÷ßo®ûqÒ´{~ZO¢+ÐhƒÁ./A1äÂÁä/¹½vHì¬ °®±L÷4VƒóN‰º-–#ÃÒmÑë¸L]?>-Г‘a#§ºìâ÷^äaiï°{ <úç2>ORC½¯›–tdAIŽå«;Ó‰žÒZ½,WêÃ6{%⶷ïÓKSäYéضVE°e-5’œ%ñ°ÌîÛšsŒ\„J¼¯UíÅÏhæ´º -¿u!ºÐÄظ·|s!;<“6öÇ%õ½|3¬ÿ®–±hÍrtò¡â[æÅ'‚’S™AßNÙM^ŒÜ,Wsø•Ê•<Ó6uÚ­§ÿOšy"”ªty\?;›¶0¶,Q~æZ"1jpE¨YŽÂþC‹´¥Œ´:í,H'ÿ+.iµP5˜+mw8¨õylD+“Ù†•§fƒìuÇ{" gÄ×ó1yR`Þ^LÚy#-ž¦€JCšm@>Oâq…Ÿï[Ò‰;dÌÑq0º\àmÝ Û#Ú¸œs>^§¹þ hý qͶY,½¹ÉúD/'싳=×ÊÊ¥t@D"ö++O(eu=±ÇÓöš¶oõïû¸ÕÅÊé^7stú;í®õ@Yùë.²pG{³^g‡†É„Œ·qà}=ÒŽ^ Æ0‹ó¤Åîà|¢å<|äo콦›íëúô{ CA„¿óŽw…jǬªmÿõ‡™ܵ2–¶Øc<1º1zy‘]›RýòrÇ’#ר?«Ðzÿ$Ö3Î3dÏú_Ü¥©¬’ÓNwe}2Iˆ¸á -""‘#»Žˆ{æÅ\æ<­=!›ãuÔ>»½xÙ²[õqâ¥<±(F‡âüô#¯fžbSBóð"o‚Z>°yÑiþ`f³ÖO{”Ú¿ õËÙŒ<Â6–C&µèAÏvÔH£¨ü˜¯rƒú¨c„óéêzÊ–_vsÂñcϵþ—K³>Ž¶“E‘-›·‡½G‡5j]¾¤o‘$šTŒAºÜ•“@*釪–íØ›…÷dUÈÎàW MÈZ7ŽpÚ0`L•BTïhãÛ]’jÇû. t‡ÒÙéQR•4xÝ›+îïôFÇÆ[­ÓnRR¼Še.£<ÑéGŽj7Žk‘¢žYÝBÛK"ä¤Cÿ÷zÿøWøNIÛNË«Ócºz7Xù ÆçtMìczº˜þ9mgÒÊ€ Òö}©Tóë‘Ïî˜CŸú:ê©™®;/Àê¢L|ÂCµ"ùùhÛ¹[¤~ÈöUæŸz]†Ë£Sì&¥™ZôuÒâû¶Úb+#(åt„ìPs€¶æŒz\†Ê§QÿЪ÷¯8ë;œLVòö‹Y¨l7b‰O–ݨYˤڅ*ävSl/*ݨ_nCâµ›‹ T­¶]vS% ÓcCÓɃóõw ¦Xç ÄɶѬ ÖÛ´æEMä¦ï—Ï´Óˆ2Ó"¨$i¤[f-z25°saJä¤w†ƒZeµ§ðêÛóêt3ß¿û[\@çÚëêÌsašZÿƒ{/»™Nwk—»‘MBYspG#²n†š&z™Óîé Q© -;uSÑ3œÿÂÙùÔÇr0wšn©SÚzNÎc¹Û&‚°.†Xøsï*÷£Õ …fäN‚óCÜ—atÕkê“áÕ©Ïì­nœ6Ð ®ä— ç4ÎLˆ»ù!Ä#x`{i§]WZæ¹>M«#»i=íq¸÷ïùdð@æU]épªC|´k¥)c›R -)¹ŸÇv÷–?¡áVòÍ)2ïºVo" |D+×áH'ëãjT¨æ$`!¼j k¹ÎÞá»ïK¢Ð ;£ÚVuOœæž€¢Z±Ú·,³dƒ–Ø H{©Mi…VŸÑ°ð ®¹æÕ ÕGñfô›áΉºßÎÃpÿ~ʇFmLµ¼ÎCíTÚS>¦©lf,ÛLÁ¤»(ç5äMÈÜÇhS0G¤ž(e•ì+§¨ ÇUÕõ,3¯Ëðyâ•@az‚JWîÛ8ÐNÆÅvƒ"í«k+º‡ï’H4Ç*ÒLYèH÷ÉT÷ÑÌÀÕß„£ZYoˆøèÓ^ØcK<ÕWßn^îSÄ´×ÁÖfݯLzf{Îk™t$†× 9êï“YKRîƒræ@é|Õ—ÿ­Èq¸4Êd¢@î—¹¬@o^ Zà¼ÆË·­É`ÞžèVÑ­×¾$cøÎ^ç^Ñ®5Ki:c²¬Ï}÷¼¶z·ìÒ0纓‹žÝvot8aÑ{æ¿N^¬Å ­Ë0J/IJ_D*1­§³½H>Ãá«nIt»í7 ‚¯*ÇHçaT7sâú’®û8L¼éîü'“Ò|ÛÁ‡•o·]‹t»Ìm_½¦Kj1î@µls¡Îvëé©çM{˜D=å¸AÌ>¼ð¡6Nõ‡ dwWñËLLmdK‹"½H‡<5ãŸÝI¯¾ÅŠu¼ü1Hz A]¼¦)ÒöÂÁ7‹!gÐ5«¬ŽIá¤Á-­k¡¤ŽcrW—¡»`ÍÁù¹2üü2nØk¢Wp$xÓ|µ¸â®¶ÚFUoFW1ˆV -ÖQÆBy·‰“®^6åºHhx½I$¢¶'®ê¶á:´ÐqÑ3Ââ\bé¥Ã•eúT—=·f›¼ºÛË‹3áp?§VD¥>}U‡9[É^\V`RB–wàÐñd`žZÓa‹ 'LÅéâ~f°æQ€¦ÎY´aÆÒ²éU#÷.9+è­yL|… J9E›ØŠ%Í=¿œ`N„rŠ3Íšb:$i³_›šÀ"“—F\96áýÚþðpìƒ×‰¨0àº|jk­)sGx°ÎM=ÍîæÜ7Í‘s½¡iä¯ÆýíûAmÚXm$¯ - „:ðõ `S -ûdß‹ÝçVo4mnÝ¡ã„>p褤“/t¬ITŽãIȳÕ)I¬å1dn–}šaôÕÿÝ­ÌÃ&¦Ñ×ïÖZT´F>©áÆ'_Óƶuß~O?줹g÷'움ìžVpõN7Ô­ž½š~Óò û/X6 ZÆtSŸpxj+ÉP’èúk§8~?ƒžÝ:òT¹9ÌèÌúÔ™o鞣¡co JjðìVõîOjH #²iyê¬þ5‡Ð4ÅC“`Åš¬X Hèv±Nw¼Mzu*`bóõ‡p$¹‚×ñÚ@Kô¾w.M&‹έg\îh¾ŽÈáâü¾pÞͳ ÜuÅwÐ@Âè5»ñßT¿bD ³îíƒ -ÏÛ4¶†-°Eb‚æÚ1-O²kRk8äÌgòøEÜ|Öæ”\Rdm«ƒ• ~¤;ù <>\[1éA+^ñ³¤F3÷š7®vÚÕ_’­ZSõÐÒÆrü‹šYuL¡.Ö/~â>môHüeàøQOyrkóÄ!üžÛZ¡6SïꜦ—á*W¹N;JA"·Â;&dÜá¶Ú¹tfaÞŸÎJBH;÷¤nƒ’Ù¶**TÜ - ëýbÒµÏfd˜ái®)>º]§û{¤SêÓJ`¯oÀ~ÙYD¯¿Óàºóø‘a”æñÕl¬nÄh)v)24EB‘™ØiŠ­Y_Îv*“<[e”éµÞ!Ãða&ÖAw¢ªéoÃåékYÞæ»(Û?)§6;8`΀}«ÅtâDº'Dn1¼Ó[¿ÞL´p½'¬x:•.ú\£‰zÚ‘M†±†¨ÙoÞK4ë-Ô‡y -ÕÊ€rª•j>J4]’b´…L¿¿†ÅÝ_ÓGŽc‡Â~Ùº•…O•—>à<—sd| ¢Æ¾îšoŒ-ïÞ"E¯[w·èͺ=ûd÷+Sž­Ùý‘%³˜®‹ÙäµÂSìlÔùú½ŠAŠNh^ÃÎz¼ªö…¥Lªt«OAõdxádkÕc€Öt65ËÑûŶ÷èj'™­7¯¶÷“¢YvvÇËu𵶟Ò}|’z¾þ Ï1hÍýአùö'mh_œêf)œŠ¦kÖ,ä˜M9bqšiE)\#96zú0b1ýÚµ9¸×œ•êmœ–'j³)'ž·®†5¾Gºúzvne®þOy¹$9Žä@ô*º@¦E þ7šE¯Ûln?þT–(U·Mm(Q¤Èþ›…5N=òO&Wÿð|”}òfÆurr†‰4UXÀ´Ú,ì5b+|…q׌ÝrÌká36Ñ`›}XÅ‘zÌ~-•˜R–m/Ûߘ:MˆÙ¡~:‘ÝÍÙ<[¯…šN˯€cTG"0X ÑÚ4ZÛ9ªz#ì†i«®Þ‘¸ÉýålVMcZx¶í‹¶v_z±®>ëOLƒÞ8l-‡;/E+¿Ñù5NN”áÝl·ºÝ·—Ëè”ùœ¡\g¸Çs_1«Þ§èeˆâÃpKÔ´x¢ž¼)£‡‚Ï|\¥ÂsÑ —Q¯~i_½Ó›^Ú5PqÅ©VPçpCº÷;Œé  _ª‡­Eíyår’¸sÀÓÓ‰C‰›}ŽêRYä˦N¸¥¥»{s%©h­½Éi·Íœwæ¿ŽXm##ÀÌ9ƇïÂHµ=ñ)ýC'ݪK±ÙɯĈ[ß{&E³A±Æ"+7/ñx&B¾ôóˆ^M -} ÚÕm랆vÌnÇvi3ØÞÛ)4¼Vb.6*a{ÄØ>Æ”gk†›‹ê0Œì×ÓökŽýQ¾rØÂZÙ¢Ç ²˜Bsl&Î!_%} ´'ì.@Íôf¤ÂÝ’ceTL0?Ô¡{O[H9`þý²èžØcš)²ç«.´²gX1mW˜\R‰kx¥YçÙ´ÜHu1ArT‡Ђ3=¯eÙŒ„!Åùp¶Ñ¡¹brú©Ðù†Qsœ§¡•ê÷Ÿgf$+¬5ñIÿ¦§ö‡fÃìÍ·Pæ7½PË#÷*G ²Öön±³Y»xË -hz"¬–ª$âßXºaƒM«ªly-í.ŒPÍP#ùñÀ¡ǺËÇD±°ÕDÆ}àþ± ˆë,†YN|N7š¯„¯Ë:Æ›„8w–,Mÿ$®J+‰«"Þ/æË«—…©ˆ©2ˆ#K·ÕhZ%—Æ©2}ðQE21"ÐÏð$VÁcyƒª¶¾óÛüñ‡9t¾ÒO:)gt¶±U'î‡yâ¤&â8¿0ô¦î}u°»ÕOyu «–8ÆÒ<œæ*©·wï ›šm8öš—2×$ëu¯*úô£UŸãš îÕÇ@ÄêkQÉhÂË2¯»§ð4wé@mÔÂÕ‚ [l{ëôY¯™{5pZáCaØ=—)k;C¥¸n,Öðà­n27+M)¦o&W…›ÞaÞýá…ÜŸãÛ¾ÇÆY¹±™î?"L£wÒþNÇ1û_úÜÕˆ6LC[C¹ƒžG5E1°(Ú¢RÜÆ;u}²tJìn åšAýŽ‰æ4ô·€…9ÕKÜáÚ³ãU²¬º—ŒL(k2:*h¶;Í4wÖN¶Ë{vNÎE§ºóIn(îÏq~ŒÆlŒŸ[Y=ˆw¦ˆŒxÅKúNž]æb&“V"Ùm@å0z¿ŠjÒ²´­mF®ž%ºsЙ+ø‘páM*[`‰v[e -ˆÒ´b­¨âO† }8Ùõf="ïŨ»pêëö.69evizÅe4Kf:"†~>%ßtÒÅ…aŒfFº¹ðÜ=óŠ+®J9&õiªA¤TÐÀ–G•ƒ”{Wä0Y^ÇO\ÕáÚ€,õ*1ñ‹ÝŽpI¬%‘¼É^„+"+¤Õ ´[DÅ!ô‘M‘p}²ïm»ÍIÏ“öø×SŒ£5 þ¶t°/YŽ™yüˆc'Ó†® Ö-rÓª,óO‚A­u5ê!&y£ãBj4Û2­ùdLÌŸ ²,›þžåk6·9µÛùðr§Ý/\¢&Ögy­Û%Ÿˆð-¤ôÇϱý¦ÕÂÑÒÚ­º“ÂOš™ß±Í–pmV´·{FÒ‚>f‚/˜U8Q߇-XÕÇtÖ¼ 誴Õ÷…|íROí0K¥ÊÀßf¥‘^jZ·ÖuƤ‡ìóøÐ1[¯¨2ê‹âÃ!‚R 2`ˆw7L€‰"‹ÉmZÙª9!Vÿz¶ÓŠƒeµí'¢N;ƒãì‰OWG b…bÜÑošõZó8±Ý XÍÖçQïzßÔùå•a·þMu‹wÍņ“U)àêX wki+ÇfKAc¸¶ì~d.G+P˜ëTdtSÚ\l®l_)n–vŒ5Ž+5"ø ëÝYúâuü –j fi«Õ^9'íCÀ©Û¸³µÌÞ:*nf¼eGAš)&fñ¾ÈÚa›€*âI«š*bÐ#Kçû, ´óé¯.ô]7ýW=¼x•ÅìïU¼s8q:¦}u/å CÆçI±Iä祿š©¥f%í¸ÏŠ§]¾A؃9'R‹|@¸¢VX[œÄª7–kIÄÓ|Å¿Ôxl$¼¼“ææ6O¯'ú*Ë#œ¸§Ýnôkxõd_Ü6aGŠ˜í½/¡0ðÀ˜ÙÎÉ&t[®îúûåÅò1·'•UÕ[ìíg×ñÔI? AG Mí¯:ˆVKglý­ºª:Œô¦«^Îe0ﻤ‘¶|Ö~—ÏêÙºŽzî»7(Z<¶¸ì·@» ã†O£þîNOxXd"¯Ì‚Š@3(yÃŹèb”èfÛvõ®¡1§$›˜¥aÚº3ƒ–ˆ=8µæåß–‹®{à>o†DÆâg ÍAM½ÍTÕf•9#’û§—:¦}=I7<º6Ꮅ$›I3÷²¸Ž§™{ÝU·§¸ŽŸx™;£iŠºn¬Ôn»§ù}¨ÂLßÛÅIü/É’ML‰³"ÛVOÐ>f‹+C5œ{ÏQ^×{ ¼_Ç÷õn—©é >oŽÞ<Ê>)‘Rƒ”´XŽé4]¤1jJµÌîë/D¿a«Êãê5솢(à¸M8MÖîã~©&,ôM¦úÈ"ÿŒW‚dz;„ŒS=&-q7»•ù¢­Ô'œÙ¶9A#IŒ‚°œY1ù‹[·^%ãЇ£UœtDÛ–WSyèL2=¨5uËJ©ƒG.¯ižîôp ¨îû1±BO*#;Ëqh0UÁb“²µ‡ºô¢0Ÿãò·åþ  çQ‹ck)ëÿP^3uÊÈWne¤6e»î´`šäl5õ\Þ;~žèF=«;¤ÀuÚCq&Uaitq_f·¢Åù>6}š¥wÛD˜µcž$épÓ’}š!ZËÝÇ.~ŽóÃÇOÙÁµ[\ê×_կО“U®^lc™]§™a6¿íñ‡!6RR—–;leVN6ʯF®–aÚ9¯´„À¦e]š]?Ïó$Föóû3¯é’5%E}‹yÄ>± LÈ„½m>bÃ(5óª°t^ fv²r›Äå‘ij?Ó½ÂÏÉÂhÅšˆ—è¶ÍbåäEù"ŸòŸˆF'*¶Ÿ_iNG”¬[<¤îå­OÀ‚ -„Ív¼û”|øÍÆæU'«ÂÌÓXÒ—-½3X´mE¹3¦¿Ønæ÷÷2uÍ1ª¬2up7N>Ãí¬ÕD|jªü¶Å›ý™Ã„êyMIíÖ|Lx8["úÉ@,úM¿âòfz…hŒÊ9ž ¤©?u~þ×3×‡Ý -Â%RV§r÷‚ÛÆ—ÿ1^.IŽä8ÝÏ)êYF‚àïF³èuŸü9¢:%å,z¦¨pø‡*ðùã¤êˆÜ Õsö éÁëI»$Í(νÛÇžÝÜm¹dtj„mJyÁF$U34ãòŠÑß,þVKãâÛºÙ«í°Â¨š°ßU¹+~Ûal‹wlŸw–R'MÜå™âX†¿±Üñ°ÿþéû¡ß×Ͼ_Áf«1sn˜ýÅ^µâ77ñ5«|™Ú¦á ¡LÓ’´Ã>g²ÙT/D¬‘ðëÇÔ8§›üÖMçª9íÇkÆÆp˜Ju_!ì¨-Y⣕¢à_)sÈì4ûÝé‡]„•6† wvᧇÎôÉ!¥^ƒéãþúýÂ-KõÊ!QSíØ¿|hâ;'µIgfêÖ8Ù1„£3†ª24“S…  UÉ -dGÔ?M*¬ñW£t"} oŠ]plQ\æMœlàZuŽçšGR[ ìNsñ©½m`âóy%ÆM|šö¥ø|Ãk˜Î¿–Çmy:‚¥ª@¢ãõŽÚ °f§¶“”þ ¦âÙ8Íc$Ÿáæ'¾ Ò25vÏ2ÎÔ”[¸Ö¾æÂçfY“Ixƨ•d+ÓØ-è ŽQðPÕ+µÙ}_? >šž.ÞS.¤Rù2î‚|¸Þ@”Àšêå!Çt¬ŽÚŸv¤Œ¼L¯æS8_ÑÌuO°<šãŽ@´<€|ˆ cm¦‹&L¶|Ëx%êã§w:¼qü¾3“bøΫ—µˆÕà9xÚqSD¤ T -ˆp¤Ç“zÚÀ¨ø³¸ˆS¦ùXí"~€cµ›µÆ  §BÕ| -:i]ô£”¢pOºÇ.üpÔÓŒVatp¨í6¹¸Ö×èàøûúÉU“Œ(Ž"•µ_•¼NªØ>ÞáSÑ_í×{h×z2˜;Læåd=WMì,4Zy¯HÃ6L˜UÀV‰/C}€¯-=ø£ÆAS×µ/,3®/L}à–ß^»K,U©ÍbÉ#[`°D‚âÐ;êÖ¤¦n]O¡Ê-;£Œn1N6eþ³›pâ3—šzÝö¾j/XíÏHo»šõü7i6Š2¨ -›ÆÄu÷¦³ˆx,+[/¼<†¨sÚ¾_› ÂLŽ‚?Î -]ó=³Ä²}_ÿ_Ÿ·€²®&6Z¼õ™ÝšËÿúËp\)­g¯ù«Ld£©Qù]É^LI70Ý|Ît‚Àûªwí©IN Ýb>žlÃàÀ+„žãuÜ2ézü";ÛôÔe ©\Úg™ê©*žõ"ëF'Õµõ -uÓ®>ÿ*×—=ÈT;ߦ~ûsùä8ÄfiKdwvï¯&—12ÅѶ¯E1ÂBWü8ÅWB6©¿âÀ¹ÔÈÎc£‚ìæ.‹%~/ìj÷ ‡ùñXÒ¡‹@h­5$õ¶ƒ2-l€eG…´·™«æ)Öf\¤Vž·jÆŽwÙ§ÖÊ9¦²sÆsÙ:"Í’#¹+N¹ŒÇC$V{+f|ß×Ony ÆOÑ^ÚKb¶:Æut„“’‰Éå0€žùZåɉVIVœi†$S>ŽŠ¬0ØœcE9f1›r ‚ @ŠâߧJä±³Èp%–à•®RÔüõ2ÓkÆÕ\Qy½~žþ°aÂÌÞä­¢…>+‡QFƒ·§Ö ôûšpŠé6urA J†b¿ŽycÚ¿jåKK3àù3Ê¥ ÉpÛÑ®ª©êð¶´$c˜©GP¬Ù Ê×Íé>ýº $Ï; A!¯×5Ð#›v·ŽüGä[ j>K'³&¹ŸR“š}F€YR‘J&oö9°@%îhObHí‰\ÀüõcXhÔ<ýÒ[ ¿–¶ôJ;÷£qÿrý׆(ì9Pºíwl—y£un·Ã^R·À…Åé$ÖNãÀ -À–D¤Àk=¹W2|­-˜nrÆ8®†iEÿw1I¨töT$"Ó~~kù¸Y·<)šòá!¸:Í[%ì%¾¯ÿž6E Oø[€öìR9ÔÅ£5ÇM¯3àZÓ³†Ù¼­ôÚìµê¶JYü&áÙ» -}êP“¯9cYcÌž—ïëg÷²Ò«FïŽóewúŸë¿'³‚rÝP¥^á!Uýn’Â’÷ý”ÛŽÃ> C2ׄÐY»>YW»¯©h¥µ·ÍÝcÅ#k`í,V/¢HÏÍ÷YM{¹ïë -–ÄŒu±ÅðïË°ÞæÖYh„I¿LËu]»‹Uà šFÈÂ$cq-Hc߇†&¼Q -XsÙÏ„ºZ»ÏükžkÚŒNþ¬"ï±lCfM5ZÜ ¡By h‹–¼{¹~b™ pQ9É=^K’hLë–{º‹ó6R )²;cÆ©"¢“ñ°,NÚ^·¦ rõ°¿ø[9Eìð´ïynþ–K@YAïOy (ë}Dï¯×ëx!–Èo½çЦÅgöþ«² ¯ñ/ݼ/ ÅXe›‹žh(¦»ÂçñkB‡&[\¬.ë/ê“8Ñm|›Î¯¬Á#’Ô€'›+z,ZÍf -ÆžîUzº°ÖðJÛÿ3úÃt‰Ùiž7G7¨³Laþ\~‡ ¤ÝÍÙAÞ{"Â*f+;•Hý¾F{_À {XË 5’”'µkd4¼þ² ³S•Ac}8,Ýw͹סëŸëÇN%CÒp!hHtæEõ^vZ´Ãp ’×ÙÐ9ÄC"b‹Yìy-Ž­C*#¾žì±×8ãÆaWIé³—ØgfIw|_¦ -âü• ¿”ÌNâ)U]žܧàâ¢5E|ƒ¶ýc1_¹°(_c{ξR›áe,ðÁHéûÖ˜ôMÀÁCÂi'aƒzPò5 œìZ m—Âøù NÏZs˧2\¸÷Ìçi‚–…„z´C‹O§¾m·rç-÷¤CE{‚)ÙVƒ‡o%K¢à_)R„! “^ÏlW–Dí²™\¾0´Oû:æüV\§Òˆ*lË4jñWmÇ0[ÖLò…=&ŒIO²þýX-ÝÎ^ß6ùkË‘N'ý˦Ë)f”ŽŸßeºâÚ~ -Gm‹kÛ¦SÚQFe9Um.ïTÈcö[’`–]¿¢âèÂõp[qÊ0»|Aú3 -–¡¸Äî[Â`eÇšËYYPÃ&s4ô¢×!#þsÝ?R`^ÍåQËä-U­8¯)Q›GUh`6 IÈí»›Û1õòmðA 8Ò†Ådrz%D¢Ød«‚ª¶Ð»\¨ò‚uwãCÉh€Ñ. ksYB/‹XäIí•N%Â*‘ͤúc:4çNMÓ"e2–Ìûá4·ù1°Ú}°C½`°c;&½9‰žá˜º*qÏy §UOˆÂ¿ø28²kžž·,# øÇÍÎw&ä6l:ȲC¤EÚÒœV5ž>¡o¬†£‘rOyÇ&·hB‚“èƹ—#<‡N»H€9ù=ì„!£]ªywû4Á‚ƒ9Ð{£ngí©ž¥g®ªS‹ã‡Ý鸚· ëYuÔ¬Á¶Õ5ã>ö ó‚¿ϹA‰½Â8bÚìI„7æßx€ÿ~ 1V¯ µoÀ#vÊÖËòìvÑÂ">ªú¦vMä»áÏÕgöëó§ – M¿Ê=^ z*à†0šéŒò”ÎèR’;ÚÕtxùøn -¥…ï;Ü»e^R·A{“Cˆ¾nÅ‚¢ÐñS85Êwh Z/'öfëÆv-B É¡îÀ±2:Fùpþâu¿›ÅD¦jײîa(še?XûD†·í qT=ë—iØÅ×€B(ÐøGiEà#& DoÙ 6ýÛôšóœþ~É<~êÚQ ¶ª£ñv;o™‚ñÑàóëõúI+8Æÿ(/s,ɈúsŠ¾@÷ËDîç‘#C÷·?Àš&kFz’ÃÚ‹ ‹vÆ-ˆšÒ%Nl€Þܼåƹ‘G»œûæP¢!‚bÐH„³Ž‰®Ìã2°ú´-WÕ–¯SE΢»”íÍÄä×ÛÙðl>uê¿h¯¯²¼Åä§wg¥}ñnûq~^ÿk+Ø¥žf9 5S@&´÷C—q!ÖèÁÚæÕêÁvRÑe†ÀÀ%Ít…Sj쪢WÝÚ+jX¤¦‚3i6x¤¿óŒ•Ò®ëoV#°Ó²Ðq–®ÝJìæLCqxÙÍS9ËÃQ5)¬3ýf½þçÌÿáªÑ7Ûµ½dN$ä¿w÷Éäú=Ñ<œ¬4,¼dOïö2cX©ùÒô.±hb“.à& æð›Vü£f>jsQ×åiPPH[wàO½³X–šÁRßAí½9Iµã•²Ÿ[Þ—Ä0ìsvú‚^Ò -,6ýdqÉçPí|žË[RôýüßaÖçýÈn±Ú&ôÂ@ÕRÛú™WåNlG¤~ýp㹩ͧzÛèV‰»í¸Û1ŒŽÚ!¨:Ëãăüs¦â3ñ˜±„îѧ’0Îä‹L’»;˸ws_lÓ"‰}æ75‰‰·=µiaÃýrcϳ£ÇŸ}_ß;Ú(H›‹>Õ'ÝÄà1sôÉÿ+Ô;íêÉ5›± À).¼ªyè!僉+ÏßO±$ˆ:ŠvlÙ߃k±ŒMþvCÏ‘Ìmlæݾªd >3Cj»yÐþÎL¥V4ñ NÚ8 ¹³i— „§Ô²RŸ¶}T{“îë/õlð/òmÂKé÷ÕB@Ó£oãdÍf¢fÐå¸-qÂÖ!cz+¦‹ÆhñšûQ®£O Ó×2lZµúò .8Íîå“Sf@M÷I„„·ûõ­¨‹}UÚ›ê÷éõá·ŽE^ÍNs€¨2¶º¸ܘÌò -ê“o¹ŸÑ.>[ÍQ„˜æîÖ¸n¦±j##©Ž…H7ÚÈ¿6ûN;#ý¢’_ô’·š'ÿ3ðu 8ò‹0dóžI#ˆòŸr`‡®L÷ÀFñØ[:^Ý!£MéIÙfù%>áÅHÖ‚šB»ß•¡0ÀÂleJ—\lýŸühI‰$¿‰I±ëÄ¿ˆuñ„ÐW“Ã’µkµçà ÔŽ´ý½ÐdF“IŽ0?Òiçd’gÒI±á,à˜oJ}zÊpÚø¾¾CDëÛ0¼ãÓx†ŸÁ§ñt¥#v܇ã¯Fc ®VÞç0¬È\4-º–ÊÕVDD˜¨ÎŽ‡ìùitªÄ…@Œa´4gNÚ€ÑF•h‘¦?£ÀÎ`rtÓâñ -4ÇÉk9ÆŽ°=âÕ°ý(¶/¼¸·Ç§¨„¼‘Iˆ¨ <‘f…@–wLùâØzÁþŸ.ÃÍt°Šæu]&ã>êS1¾¯ïœ £:áëý,¤X}´N8«°a4ÓnÚt -›à˺U"Ac0€šPÚ¤¿t¡zóA}`+C»(P#¬þ¤Ã@ÚÐà—lb‘sѧI̹{ÎǪ.³7üqÍýÇ‚:™’Ö}9yU8 ¿·ÕÓæUãÞ½Úÿ­L8zÕN¤ý´3Óh*£ -Ö²cãÎkŒíà¡pͧËd&XŸý‘LpÌÄË^|3ëšVlXEšyQ@z®U3-_߇8(U± - ß×#èà|­*¶‹Ñ} X‚T°-lÍ*ðI#SÙà1%Ûæè3Gê%¥Õ0¤CZ¹ÁöK)çq¸;W¡ÚaëNµ#–ó™Ä¢Ö¼ÅØÈi­‚Ýšÿ—»©îêîÆ׫ ñçíõe]Ȩח§è ]{*ÈAÖC ÓãBô’1³Ç`#pwþºgž¼}ŒR{Õù…³Ó½mNy{r°:³BÝLâFeÓý ê‰Qé(rF…+#åÒfmÇ©m·/Hé­ôü$Q±FÔ…ðvYö­³}öÀwp8z}_Ž ¡ ª_šÊ78ª#wl‡[¤Ú]OBf›>œ_º8žfׄ}$H*Öüæ>/p n‚ÐÀ*½æ=Dv²¯ô×Ì«n\á€/,íYOÁ‡™@2<Œs„~ -¾% ½ÞmÉ¢×O~”zY Õ÷}ÑÍ@êW‚Q/=ˆiÆÎÃlçB&Ëœ‹]ŠšÛEÂBNBŠ#N»à^¯óð©=DŽ]45¯8¸^dÎÉ •6ÌDyî"PªH¬(®bÚté6¯*S”Ž÷„¢»ÉR㥉àk#@:°“0\8ß«Ÿ¡Ã±éðÄçaãS’ûáXØüÇæ“ÿ¾þÆ×­Ã͸ˆgÊ(sXʻš™ævX¨§2šIÝ„ºj5QÌÊA#¢A[qÒâ!–³mc‚£oû†Å·lmº÷APÞ¨fØüòéå“ÏÍ"^„j¡­ÓùIŒ„ÃñÔúͶy×áÒ}Ža®4‰æÑ!…$ Ÿ½‚aŒÍÈ*-`+\JíÄ×85›idL; 2¿Ët¿t«7N§îN|ߥâ&°JÚ;ó§ú©ûÖÿ¯¸MŽ™G”‘p#ž¿]ZMÀµ®^ÎС ^2Óu²Þ=%Çú/¾þ7£Ÿtù¬°™*¾¯¿h‰¦Z"Q²÷-›£¦Ãñ Š¥ÄqMâ†é–Ú”“ùå¬ {ÕÛ#× -ØVãî¤öÇm#Ù±èlnnÐ._½Õ«ó pÓLðlou±-”"Ÿiž˜ŠàÖãð+•Ïÿø^¯À31SÐŒ6U «RZöîºX^íTÉ壉 ’ú¾þBU¹f*Qv2ÕÍrǤcbó¼Ö¤sÔù<‹+\5kv¶×¿c(¶!aŠaKìaA;7¯yÐÆŒ1k¥Š´Ü,¨ô÷˜„Aìúûã~ý¥&9Ë©ÓÉ~ T±TÓ–Ÿ°àõÈí+všÃž\°‰j¼Pî'kž®îàÖ̇‹"‡cÝ—m{*»Î¶ldJPËLUí½Uou ƶ–7ä—ùq¿¾ÈV×P«'OÇîön~ͼZø2mÍП5ÒÑ‘oy̓Ð{UƒæË:7#8¿Y°‚ÝQÇgmmÛ·€; xoÓh¦tÇ·…\'ªc—DGgÛb‚S‡mQ·ß>&ÏÝ«?_ÝhŽ?¯ïãVŒ’Ì rS^nÉqì8ý¿«ðä >÷äýOžDI·[š‰ Xîê®b‘@"ƒ¾¯õVþØ¥~jphH¯bÄtËv‰¢¸Û,¥!®Î.?‘XÁñÛ>ê8dâˆ:€â‚¤ŒÓŸÐa{ôä)â0Ñk2 óËy@ñ¿n¡EêùŠc½29F§âÙkܦnÙ -©*€“ƒ’-e|P '>ûWs– •yôýx~»¹Š9­CxªP™˜ãéq¶4£»ý-'j[¯ÿjJW „¼?ž™NšèîÏ?,ª®ÇÇÏÓÍÌ–3‡Óíâ’p°Iš°ÌìÃîx_HÏNuŽ¿ ç&ÐÇ»sÍð¾þþǃ¨k -wc|ãSbk˱F++ݯl§²£ö¶°´¶¥ŽaÓ/+W¾ 3o׃#+õˆÖ°1äаVup8;w<Ä£“©>¯g"ƒ@³ìg­C5e‰o“˜K©‰l¿/ ŸÝËHÀØÒÀðô <œ:ûêÀîpDò­;¦ÿD°D8þ‹ÏH'ªm2n¶½J¯»z½ÍNÎtÊE÷Û{µ›Ð_~Û?…O¶_(–„÷~‘}^8l¬HUV¬æw@pã¼Æ~4:-äpriŒoD¡çuŽÚ) ®YVê8^y²æ óá›Ä'yéÒdÕÀsmEÍi+%•³¨mÇûùO•ˆoU¨Ÿ nÈ0Y‡óOóüKÞjÆ)4¢÷¶*F:†VeÎ,"°Dí(éãXQ{…D‰Z¦ÅÙKЈ«2‰’4 Øöi'Ѥ—’_Âcg~IÞ®*Ñ™uâKÚŒéCC©ä+‰ Ó¨è,IrìMWÇséú(ïu2L>Ì"ãë¾ÜÇìë2íʹAh²E‹Aäƒ,éòfÍZÝwÊHonŒ]qdÞÚè-÷<´ñ^VÊô™v±ôlËQãÔ¥~Ô:i—щÛJ¨ì¦fÉ—™$E÷¬À(9L,œ•6Çl·­0Ú—ÊópÝ¿"ÛÆ63x$ŠIŽ‚ ·å›QÌÃÃsrf­{t†¼{7׌ÓNãQ&zÍ *LÆâµ¢FªY6ÃVðÄ5…´›¾Ûl>&1W-¶À‘¬0Dž«' -B¯Šå¤òåù–3þSLÇñO1½”³5ì½ûí¥Žó»­ë_U½ûö¹OføNHÇ^’F½bùÇL/yï#Äк^k·u¬s¼ëðÜÜ2§[ôÃ`ÌŽÕa(ä”4PjÉœrA-¾ŒâìÚG¹x:á½Ä“@ZA·˜+³®ŒLË<ÕèFÉ.Ã8Ã0ñ©#aÕ[áL=ëH}ì›™è -+oLw³Â(²þ %³_²U¯°,Võ‡èÈ'.d[¨§®´¾¡·á0Òû.%þ£ÏÈ¿N˜ñHƒQïô™/•ÃœtqÇMÓpšgjØÛáÐþK2ù²èß!öŽð˜t7æþXQà2I”|Å0´Þ¯ýqÎlrTÚ15¦ÈÌ.lsîîS2ž(\IåñßÏÕN<ôÓüÛ·sõu$ ’Þd¹.÷ë\š¢4aË8Ši»ú²í’¢Ù­v&õ–" &-o±m ÅÁÚôõé ·š½áxf(®žX¨FÓ„%"ÍÅ çÙ‹÷®v!?@¸cp1øöÅñX{‹aÀŒ@qu_ úÐø-+°<’òkk®Óîjxá"n¤Å&ÜáÃî̺X U +T^Š2Y¨>¾«dXÁk[¡GÔ!eÜ•vP+ ¾Æzb•UàL›k}QèÕ Ä‰þÑcÛ°VõgýSuÄ]0wðÕd-Û u£4ƒxÆs˜´(k*¢†Ê]À `7ÎÄðîÇË ÌX¾ýκýÚÜë‹°I0ç9W+š/ÛðpÆ㿦¿çv9köéuƒ"Z­®¼Ò_n˜ T‹qkUL›;E•Qs##ÿBN¯ØZl-Q>ÃÖâujœ†Œ/Њj®Žƒè.è².$1 - 5˜´ÙíµV¿#¹¼;„Ô‹y?Á.ŽJ‚{ùÀˆMêYÚô”g=aþ R›fšéZ®O;x½@®,X»om•íÖg±N2ƒÎ³ªŠ ½–+bTª?Ì®ærÐôóÕ¨·§7Æ$ˆåòâíGÇ­ïÄà˜z©ŽDv6"®fD¨Û?Ä[ð‘o=Ô<µ[;ì¦òP}ðx2Æ{p± ]'¾˜´òñØÔ[GgMˆŽ $¹?”áoü‚ |©(xm1ŽZYjûtгå°ñ9ü ýÔx@#i+¢¶k V@“d&–/Özü¬×5“  |šøu'Š§¹Ç7"Κ•Œ¡»ÄS8‹¸ñI`„PÚŒÕÖA/_ûìyÀ‰Ïù7&¸zÐm¡´^ÒH.•´S ×ï3ν¦ßÑ÷Ó€Åû2ÜQ&w·Ï(Kí€È@T‹´“Iƒ:?q”‚ÈíÎwQqpè>"[ø~Ä&c2˜Ù¶ñLs¾±ZoÖ@š^]­å¥¼úXì ´?ÍfSðG‚™G±Á4Š„ r¯= îÕc™¶»ê™tÜYtÙJ¨ç*„øÚý÷="ßûœ"‡mÓŒ¡ºzÿù¸W¤­;,1rÙ2‚‰h÷WÅNÞÖMSÍêmc£S\Þ´–©¢²3×ÃLðÓ¢6\Ú·íkØí,Æì»a³hÄ ÆêïÝ1=’¯ß¾uG‘1:"þ–ÍýgsÄ Ó®h l)š×í@muu^(GµÈb#&uÁîÚ@{Tè Ô5YÛÊ Ã–Ÿ  f…½Ý´Ò¦yŒù&Þx¦ì·ÿõÉq‰®J¨3á½_¹Sï/ŒãºË‘Ã^Π$xm>Œßš Ñð°ýûp|´éѸÝöÈB÷ï·¢þçRǤ5óñÐôÒsÖ­»ÍƒØpM ¹1RÔ<Ýhðôòž¿«Â¿)‰k'lŽƒßçraoI¡)tþ;ÝVúã1ŒQŽâI!i.ûÀê?fi3â¤/»… Cþ˜Œ&ãv#›ƒ»ŸŽÃº04ËJY‰¬)jzâ²A†á”Ù“òéâÞYÚCÌRJLçsÓnv¤-Bþ‡ñ2IŽ#I‚à¯àÐ{¼g.}èóˆð÷cjj¡4ça²²²2|1SóB8Ûê^fÆ*Þ÷§j]ôý=+Ä©PxÑI,Oµ¢ßÅâ(wÎp°„fr1çò[Ð.U¯¢4=Єv00h¹ -«›†æ»Ãlõ%ªIBÕŽù´ ›rD‚’¡+`%cÎ@Ù0_ec5/&†µK¶Dþ¯wRÝéÁñA:-õ¯ð¨#Ÿ‚Y“{§.XQØ—¥áY¤•´#¡xÕÅ)ÃåÞ4F¤·L3˜°4{šs¦UþdlΉܑqH¥Ó‹ã5ÝgUËŠ‡mÜ Ñ“{’L¥spD`'j"N9iŸá#Ì^Ù•m5Óý'u Qïf -ÁËLzwãÌ$·|*·ðê«]Òh¤áS -ã&C‡ECj8Ü0/#iÖ-iœä`2b|¨Ú8¥YŠKÜkSnØ‚Ö3èäg:š¨IÀ%¹®`…™ ܲjàcYlr]ëí™Ö±,ÚxI¥3~~ý]orŽlÕÓ¼”hÉÓù%`.Þ˜V\€•=´’ooÒI)U;;[di81iv«:ª2æµ^˜C'ÞYúL«³Ä>9NÛaö@º+~f9ÒùÒºûz¤A+¾,já#ß[9Â#³a&ÍÛHWB`3\ήóú‚ïéT·˜.=˜o(I‡1uœJº;¦<4f²bÌ‹zâ?›BˆõÛzöàõÒ æõáaXõ£lxŽÇ3ž /T7òýs†“8š]c›]A´^­€e[H]ŠmÑ`í»I/æ±¼ªÇ#ß—×/Æ›t6[y?ö¶âÞ²F¦³õz´u(kË,»êc6Ó³–w|–,ð²KÐ7‰9ßù+³gwñG®ŽUœ›9^x®þóãïß­ˆfëÿ§8pÚcÑZ³j;Q̾ ^ó¼šAþ#K©5KæÙV³³“všqÔž“:³s«ËËX~Â#_½RšT:ÐxŠØ¶æáq«×°ÑÁ˜Ýð ˆLXaå©™J%>á×®µ™s4™ÓÜßnÕÊÔã±)3Ñ«çhÏyo_ÖUNÖ==†P^-ÜL´LßÜwcÜÚ¨9MÕŠô\âmIì<y" |ó"ò~±å“ÛÀtÓpSêJ¹mžoñË\ŸÁÚ*ó“ºá²3lø;waÊ“<ÌiKmifë.isx1|Ð1D¾x@ILTØÓ”ì qÝÓ€¨ãákÝ5y›+Õ­v·Ç4)ð¸£F L\oö}f¼:Îy Œ<¹Nͦ îç°@CF-0ìl¯öÖ¿u@£4´µê£ù8ÍaÆ'è©—t»90qÈi ¥ÒSzò©FeÄWg2„ßÁÎcšiYGt—ÑÝœY&¡‹àª²ÔžT[Øûe£µVžVw䈵7yˆ¥â®ø<ÚT:yЇU¬ª­¢Ñ“|Ž9ЮÞîòXA>”„ìÄàt fŒÂìÝŒ•š³ý¿@4l@¶§jÑðú&]Áù\”;ÚÝ&U=6mÎG)üÃ"ÍIÖ<ë`à©$íL†³@1í?é®ÃóÉžóß)×Mfz/ofÉÝ3ôܲͩ±ÜþémÈübàÚUNô1û°BÖGzSœòø÷­¦qZ já_=è5—½J”ÿá¨~U|ÞîXí>3׬4öt×k9vëØ4 Âm‘ǤÌù(?˜Œ˜ -º×ñM èÕ¨æÐp¤˜ÎµH÷µAJ·a½xÙ°M|ÿ}­E_Xh¨, -ý øÀS>l16~nûUOcl†Æ|öÄd -]Ü8@_OάåNþ¯ÉÚóÚÑá—ÑÏ`ÖèkÜ°B¦\¶–þÒᶘˆï¿¯§òJ P3%6›ñ¸eÔ¶:ÐÐOm0KÛ–Ž˜y0ÜùTGµ8é6¼m™aš>Spô¢ªáÄáSÏ -£Ž÷mJÍN‡;£©ݬÑCÊÀ³ M r¼E°Ÿg8Q%6¹vÑô~Öë°† è~§’~^%±O>ÂÇ|Áóû²{ÿþ•5$R`¦²–ag{RèmGö]Úša2µ"eäô/s敇\ˆSs/Î3WŸ#ú ¯†Q4T]²ª½Ô£'¯V| -©ÂL«ûo+àU5îr¾âÿÿîXòëw'…b.qO±ó…¬FŠ°Í6Í ‡Íw°–†ÜYìÔÇ^#†UDÆÚ}Øê$6ù8-Gt\7ššËZ[*0CÊ(||BïÊè#P©Ì2Ò K¯šQÓ¶‡¥$甞ê]<];"fJBÙÛQ2â2r“žåçO¦@Z"ÓÂ>žÂ ¡æ TR„[¯Ø­e^ vzä‹‚n4ÙR´7½º$]Æ÷‰³ÕñEê,ýîF9T{¸ZEÏQsDAJv’kBŠ&?¢¶íð£zñ#Ãz ÀÓgÄ¥Aš9F†m¯éR™a9Õ&è"ÚΗôëi3vÎ>ÚrA„=Re}V¸W_)fÙ·³æþAkëÂwôÉ6­Âå=_tÃ@q -›8²7DN¸²?-{I~·R‘cD`T4ü¼ô²²–˜âv£U“‹˜¾`Úºç&óÛà28w’e1Š­j?-žÑ–†šÕX:±Žã‘OžÇˉ.誙üŽE¥}ºÖ¼0ã*Þ«È. ½Æo(Ï}]j¡Žÿ™J3scÃ,öª¶^W§}w&§uv¾õ‘–÷y9*lLëfû‹ÏÎ4@+[cÉntÈhÄl^ÚcCtÝ´b¾iÙ,þñ=ɪº8sxcËÂÜZ6(ê'Åê’B´¼$&ýì è:ÑN‰Ô u{(˜ÜnmîN}qî|€;yÝw^C ¬«Ú+)]j[§©æ©ª!P‘m¢«šoý °î–¬©êz”¶¥Z”—šîÃÛ.§Ì“/I}¥òƒÏ…®eR¨€ÒÈr©;­ã_¤ÐÀ <ßdzCæCn]Û(—ùP9ú÷ýdyú«ÍÁ^D ¡ö9T¦™Ô|F·—„‹/±mn¦ î#¹á@£ PÙ1ÿG>õÔj\àÑ}aÙ–m¤0ì´‡…"-{—§ª<¶—Z,WȦ_ûC5£kfݦÊã¿Fdq¿tåÌ,õ)½¿ÐmöܱJýÐ2þÔDZ][Žé•>(˜D4]˜\öXœú¯¥±Ïè‹ÒWäñ Pßa %;騄[ØŠ@ÝÔ;¤ÄeU™ÉY>hMýaN¸üåå’GrÑýœ -#ÿqm´àZç—?"Ñ HcÒbšSêªÌ ÿž´€ŸQAŠé@Àñh+6¦]Ǹì>Úr>Bi¼¬ãF9,—+lguw£û¨R#žY Â+×ßλ¶ÓÿùüÉŸJÇct(ÜÖ¶_SUðº±Áê¬ $,ÓlúÓö¯íi¸ÑmáØàÔ ô÷¬pœ½Ž€K€xût‡„Åaóq9@c\vBLj. >8…FÀOÒ‰s~;4rE¢óøÂ4N`ãâˆ4 L˜.Õ,Rd"½MA#0"bKZ€‰N¶Mw8tö¬c•ÓöÛà}ʆ© 3‰†qœGô<ÉÃpá¼+ìâôVÆj{ìYEá½)`ø>ã{¹ù{À~bá×_n—‚–Ä·£¤BO¨&Ë^¹"Ì:BKI_²rÎ`¨×èÖ}FRüšj²ƒ×.ùDz›¡ñuÕ{SIðå•?_óÂýÌj„ƒ“³{ÝýÉÊ¡‰ûwX]Qz!¤]HÅdîƒ)JO4ÐKD–®]‚º9&°ƒ"[³ ¶&;ß“»¥ÈðˆÎkÉ]jh$‚iïÅÓ¬Ïá˜KÛÂ…á=J·›Êáëÿ1]AÕ¼§löʤº`™x¡kˬDwÊú9œ^9(º³ÒÙ.²€à@Ä9Vs\6vA›ŸÀ®á,£xDmò`ºw¦ÓêV±ôå5I¥YyE9e¹ªž´0÷côü–¤ÉÔçŒïòsVe‚PƼd¥—dÙ§§²Ý7•~q÷éî1œõî㪠Z+hË‹¨@†›Þ)¸'4…y¦]1™1˜ô-†Bòô¶ký$òHìœ5× m•^Ÿnb·Bï)•ÞY6=¿Fê¿ÅA¤ ËÔã„È´¼ö8‡FGþ¹Îý£,`#ý^5ý&l.½W÷|VS“î®»ÃtÑŒA”b;îBXŒ¬–b¸{üqS›§‰9°77–Ž¾Z­üöÏŒ©5~†eç öžÑó¿èEçå—(¸ä¤¹_Cç{ø]å–e)u ðñz[d.¤c0ch þ­ä‰bŒHsw Wy€H[؈á^üjóê‚ w7ìÁzê››º)4þÿë^Ò/sýÞøm’îk7qsÐBpw í* ^gkó]ÕPF…„ù0Îï­_;¦|^ÝqvF±_Õ\Ÿô÷(9_Ï(öwÁ:høöŽ t¿î(¼nûZ(H‹:ãÅì'o?ìtæp—ça`l‹ùº†ãPFKK]C¿dHuT--Ö£ ì«uý‹×× åŒÐÝSø§Økœ"Ò[A²,mûïV„ûæd˜¾¨±ƒd“o$mÏã ýŃ}O€ýëËm‚%ר× ^õ˶|o®;%é%üKÂy`51Þ'6<²çSºó¿kjsl×l9]EÝ“¢/ì*ôˆ -vË -D^N¨À«þ¡ÚÙdzó>ÚöæúôAÌe vOç®þäU¿#«†”·æzNP¯OþûnBæ†l΃¼7~mÂðppsbSœÅï/ÁuöxÌs»\ß•¯öûJò—·)¿Ü;dÐóåm Ê0ÄnlãÏÈN7 I鳊`’Í®Ë áˆunƒuǶ©ÌSˆÙëþk¼ãub®/Ÿ_NGPÔö¥Ï3Í:op B¯…DÖÐH ÃÌtǨî!E¯V€vOÃøwu´Qa(ƒ`Õpd ‹e'9#»J»;Ño›iáƒb„Ü -”­E׳õ»Ñ8+ ¾¾Lù^N’ïê› 6>?¿îÓâÔBR°ã÷=Êë=Á­Sré¾èÈt‡!ÚÌ?ÃØFzi¼R #©ô:ê„·fa [b¡9šaC–Ås#,xvÔë'jå¾WÇ­fßå¾É¢Ïº9{½T¤ÖŒYW§óõƒ³tDS\öêLï‡Øû;òqåÝi\Íj¨MY烈ùÛ=]ç°çvôq,`ã2¼ö®Cs¡ëŸŸ_Çq7ÉFpœ¼‘o:TžÔÌ–^Gó+GåªV±ô‰eÓUO›¥ þ8Oå]¥¨Ñ<. qCØ:ff%5û¬¶Ì¹Äµùÿ¨æÈXY¹-ä B¡/{«iå1òW“,%C€ã¼£ÂÉ,˜ÿOèâW«GÎ3ËÞ̧ㄟŸ_OobÕ¤ƒîä0ßRQNêãçSøêZkƒfµ¦woR!)‡NÕ—IUJ€®? ‚’HªÑw‹.¨ÇW×AþÇ_ÿü³hǽ?ŸßH<×ÑÎÙPÚ7ëYÇ^2ƒÅÜQvÓ‘—¨ ýÚZÎíZe°Nà €ì&0Ù{dÓe -õ]Mȳò)&5ÝÄ—¹Üqz˜¤½*м«èpëÎnžU­Òu°¿»Ô²ù~~~žh™ƒáé,£Ç{º?Ìkn+Ò=ÛóªêjΨB&òÏ`-=CƒB½Âôõµj.»¤Zîás ünû›#;žŽœd¦Û¹"ÝocS´jä¬å¸öXãÀ3ƒ,i%ø¢¾ŽúÏçú–}#ò–š–•çÅ-åƒ]4q[9ÂYMƒ¼n¦Ë` /7–;=´X -‚6 (ÙJßV‹á:Òi¸ŽâÚ,·Ù4½]yÔÈÚúø¥¢ØÃb½ÈÈ»6XÁ0T§CtóF‡Ñø>H„ñÑÿèÔN.S2;|?CF¦†‹çÈÒ÷q~úDAA‹1Oß¿{¥&¬eÖ›ðŒ"A×^Pnèã`Y‰š7Ðô „xi7NãëíúU=~ï©üIç /ôÍŒÚ]ÅÊQ‚Û«}pÍîœôgaß­Þ5ˆ¥»®ŸˆkÖ/t@x£c?Õùªä ×i_#‚Ù×Æ6ï,Œ„Ó¶…ùzš}íÉ!XðÓÌ;¢’hesëhó8‹}¯/ƒÐ©'F»HO¾˜Ðò¦Â#íÞ⑤]†b²9ð#¨ŠÏRsuAéeìù"ú£Ó žû bÛÑ]¸8{HU=U“¹šªö•^NX¼Ö¬…ét›»? júͺ÷˜ÌÁ±N¥éë|¥³dÙÏ×Í+ úp&ÕA.x§Røàœ·^ÇÐ¥9)æ|Œi)Ñ^ÐàÑí]úœ¬vt+4 —~ñš”Á¨'’õˆÝ…f-gd ˆkåZëÆ|†þƶ&nê+h"5.ºQp[v,Pø¾ÕYˆ2xÚ&ámíJY½ƒ@!øBrám# -kzýZ)s´²k)lNKW·èšL0í?™…q›„. -Äøxœ‚¨€T>AøâT¦"—B‹Aþ#+ã -"ZÍÞEÉ=üΗ…8ËQ3Q|y¬CÁÁÔýËTgŒ–‰;Ëññg¸­»·mTŽçµHVºÍiÆFÕR'Vá4ª¥9êíö®ú½QmtüïW~YÒ¤7}¡÷œ×†4‚¾p>®'²}Dó´Zı$Ð-à ^›B‘>µ’Ýá2åMw†¼ÉDkà!ðqBRd0ªŸ”‰_[pY•&•æ—lPãwÔ=¢4›>u‘iC5)3¯c!¬&Xן´²kFMÇ¥\´îpžS·xÐ7¥·“Š³W–\Lˆ ˜Mºª‹ ?ˆ[ìPÏŸ6ÄcÃÑ#4¬eB ûÇ(€»`Ó]{¦Çg»ö|ëOqX¨Wb)€ÎmúržHÖ‡ÄÉíER‰UOË\³# žö…Ì ZA*”]`r(.gMÌŽË+´NI·r*‡ÊàÎå¿1|ùãöx•-'R<$ÓþFµ -h[¥ËM’#0/ÿMy™$7–#AtߧÐ: ç©mÞÝþ< %?YVÖµaŠJŠˆðÑCâ4ß-pìå -ksÓ!Ba%žj¾‘¹mbÂŒï“ãЦ@Æ!ŽîרW³è€ -k¿ V|ŠÈB-uYך>2ŸíkªÜ¨e+{ÉýO ž1Œ9Lž‹sUË¢@Sò3ÕÀ:ëô¾öÎøaGO°XãÊæSÓHjfëh–á„'×<[Ê]ƒQšnŒ¤1§ïwÀøÅÎÄumNi3 -eÁøìPzÖ"lèçõsÞÿ€Ñ0ah« ;õb~[<¢­TÏNŠi'¦ì¬ ðì ==ëMletª£›´üDÍ”8o=çÆljÔ) –Œ}\˜!y+ïY| ZëBÁ¤Òu¥µ”óÓH·'GSrA´mG’M‹öqþõ9ľ¶ë!Cìçsˆ -•äe‰Rë¼È6¶m 9 Wÿ©JPŸÇ Ë{™°Ñ}GN’Œ!êùöÛ¯Ùg¦;€«@„)¦6K]éX#Gê±b "K¦ãÔÄ@ƒt®æÆ¢â!F»Øˆv¦l'GØÆ ])@¼1<¹=ã+Í¿V8t£ä6Öe¯$ƒ`ÐÙ¤¾Hw‹ =ñóšÔÀtº9-ž¿lÓy³.n>Ͳ!¸në­#ϪèÛJ¯ˆýƒ-,_w8LÚ8râ{ +˜IíQdk5~@ÊÐä>Jz¥-E™:À|5xR -Vs°aþºò¬àLìŸ'^C<®o]ùž}hµRž^d±ÛØ:˜w YFØlËáÏô%¿3EeMŸîŒ²é;—Ñ ïæZÖ›^Y"j<†ÐÐ|ߦ˜Ùš#ÑÌ`hý!ñKÙéÊÜÁ´á¼‰æIÛ7ŒŒÃ5¥îgç©_«YÛ˜+–îl 8ËE‰Pê4¬ê»øËÕ 'v0 -?¶õ›“¦E™’?ÌrçÇwO\©q;<Ùž-’‹Æ,ðú¯éZóöBƒ¸ŒŒ«Åß…X §ÞÚøßÖ®ìj›æ~¯1ä7A$âufoR~uô8¢aß'£ÿÒAè -OÍ6걡 -û/–ØÜKŽY® §ºÎçIÈ\Ã^l"È…«~µ¼0ÂØÜ |7ŒûT~æÝ)•¿8þÔ ðßtM9g¤ŸH&Áò)*Gq”®Õ‹àŇSo\„ü]ávSîÏm¿Eyç¾JJÞ¤xpÂIjˆŸ‡òóÒ l[#´Fò™IQçú°î’”MW¼ÃäpLÝö1µÙíš d:EÓàlÅáZËZVª ÉÞ$Ç.f¾×ç_YùY›.d1¸Ž¶çßÊïç=ÜÅ'ßR-QuéUúwÏõ–ŠbÖ Ôgºp‰7wi+ kç&>¡á|{Üö~ÿúå•T=Š3›.&õ§Ó`äîk¦ôÉ7œã:Ò;òßzÏ —º{ÜWêj‘_D¹&¼l‡XœÂú8 -ï)Ô¼'± -CûE-ÔË'¯¤ÔÊ/kM<|>ˆUÛJ¼xF‰/L³3GÈÂæôÚ§ñÎw%Pk…kç[”fÃw7Bß2îPÞtæ´ ¤’6‡:™~w9b Z„Îã†gßP>÷qÃAê]Üp/JmÿsCâÓéÛß` [MÁ -½„”¹¸µusV°ªã½¬Ã¦ZIC¶ä¦°³r2éæÉvÇ´¼oêjÛ©ü”%…/O4œàu„fO¡%"ÿ˜OwÓ@C“uÓÖËNrܘ­å×™_½2h¦{Tgn³GúåèÁ—yj&EÇzŸT•žŽò+ÏÈìë«E;×ÈŠÃaqÒÃà¼ÏÒ3(8ví~/2œåd!…®@CŠ$”õÑ uèJ¼ayÕAˆvõ.¢2”ØÆzõ’SdZlt‘Ã.¶éÿ¢ÝOŸ‰9¾~^?{˜¼R+ÜøÜÖ£Ûy‘aB)P.œE„.~„siA¿wBkíÍMïD -W¶Žæñ %:{w4(Ùv9ñC"¢îkûWG¥Ò¦fnN<ûÊ©}œ¤: (½Ó,ú=úÑv‰;ßÞƒ9|F–„¶×LeÊ‚{OÕ(¥‡ÎaþuKMšÅb±§="œ4›âçpM®”¶KïŸXŒ! vŒà}£ð'äʳÁ× da™…} öò¼¡ZolrÝöjÏË–ÄVòÕBʧ©>ÃÉŽc…¥ÍN¾©õ g–*³äop& Ôqtq-O‚ñ¯v>ï0Jñh6é -”Ëšæ×k÷ÌŽwÌt(5ÓеZR¬Ž}6½Ý)·Û~2JX¯÷j_i¶°õ£ÉìQÆ%ÖªbüX†ŽT{-C³+k>ô£´ŸšYmO 3MýB£e³õYåH;Ï\uƒu?7dÈ~u$’êóý£œaÛ*¦õ‰Ôü.ïu Ýi·4FòaµÍ±ýk¬ÞÎ=p¸H¬™ÞhzžöŠ?ÆC±¢²;äùTÏWLþˆÀÏØhq¨+ኂ®6ªÙ'¢Z‚ÑØaÒŠ#~ð+Ê !ú¹!‰v¿;àšÛÉg­œNl‹}m÷75=yŸ¯$èû…¦¾’’X†úG‰·æ«´cÃÍ»¬ä) ¤M¿YŒìêƒÐ=7FSý[ïÅð±Sx:ÔÝ™µc߆ëRºñ"=yrs°b©â«Š­uÛ6“†æp%¿qoñ™;Gw³…‡Y竃ôδqæÎrª€9B…òÞ-B+ @Ù‰’{ÇÊW!}†Ág…Ì­Zæ/¢±+\ÍuÔålü²hùJ7B„ŸÈÛg É/±,÷ÌãZÑžNñ§º|Ñ?¬9.l/†Üîâxæ~N=>Á©I‡èÝjºŽ®žôªßÑkC:¤®:Puëð·ï^1²”Qog.SÚ¬Èj¼%íæbbêNæî™ÍlwšWɽbDñûõm«Ë{HFKºWZü0CjÊgk„†í£KƒmM}¦®7óøH¦ñB`rô® †êòc™vçq’ùjojÓ©ëê˜[š3¹½-Û§¶·›í¯××÷›­T’µÙ}áçb‚–É9`²rmaö,­Ëø÷ZÙÔ8¬FP5äÕôg$)¥@BøínšJ”¿Á½t‡[/×ÞöÒ}§?¯ÿ÷éI5Û=p¸~€Êƒü9<‚› aið]fè@<@îN8æRÔ4©aAÒ»½¯­MÞ¹ -Bä‘¥qðMŠŸ™‚‚¢†L‡W“¬¥´R9³ÂõÐÌN†<âå<|¥K­“ÔÓ†íþÏëû¬&Qv0,íƒ#¾Ï3j ¯V]$)É„“Ì‹šAv©:ÝDZâBî#ã½.•<­š^»\çPÃ2¾“':AûdOª…ÿtnÓ=v÷ -†ÿQ^&I’ÛJÝÿSÔRæóümß-ÈΡÍdÒ†U`‚d Œ-¯ë÷…ÏbÙ­!U ¼þ­b‰U¿"(øhô˜éìÔÚnbI±ʼn4®›N¾Í-2tAM&^ -Ã4L?µ&­úÙTÃP5¼0çuý>YS3 ‰Jªz}ò-)¤ªk¶«Ã¯©¡ÃGؘÒ:D‹ùEÒ·5ÆB©»9éÑÑÌI‘Gè}˜z5Šz¾”cOkaÛÿÔYéWÍÇîêðñ®ŠF#t_fK÷ÖQÏŒ¾ÒO[M›¢^[*ja‘vs¤¶"˜u·BÖ]Žo ˆºFÿp‹ÂféêÑ{í*r;øµ ¥+®@¯×Ò,SQÚáÍ”ãu›äêJŽ«G/ÑRðgä!åúx„ -ó£‹ù-¬Ó£¥‹Òª ÓeKùžëèn&­Ò¯›XžÊƒÇnuÔOgÕøÑš£{4ƒ4_óÀ<ÚV¸¢i½³×‹ ÍÃS¡¿ÒЃw­Â·bí¤UjÊœ5Ò³s¶Œ$µ>B -GK¾t„Z)?䯱©çî°Ý”$ÎÝä Mš¶7À­3Ο´€o„¤eYÃKõiŒT™¬Ø¬šÌ‘‘N%C¹$X§zH¥ -š›u—rØ8>ôRÁ–ê¾0¤o¤1·aY[‹Ž;g:È[bÄ]@ÉL-)ø{²ò64.å Ð ‘)‰`šmËú¶ÍlLª²‘¶ )jÜé8æQGõ¬p –Çc×á*&̯—ú”IÅi qð™òÛúêXrF\Y „£¥…L½„Ò| øÛ$«§ä{wbÑ‚Þ8wXý•T§VŸÅ²5¥ç¸Ú «~ZbT”3Jë1¬êët^ªmìi8ºßY#/ÃBFóm-HBõC…Ñ}žmÚ¢6¦ÐÙ©mÒàÚcŠ6¸×³“¾ `¯Ï¨aÕ#þU-Ñ ”i¦*-E}$›‘<…r,ú@RÍç -ìNœ'n?ésÇ/0( … ¨#c­† ¯Ê¥‘ MJH)¦¦°œ§ ›2mx¼-¹SÑчêÿÒSoÔIt9ß„Ð9Ì’˜±7†œËj»©•Âî3P·ÇcÇ·¯<{¤‘ç¡‘–TÑ£Cƒæܨ»xÐ…¥pl hdýE&hƒe©ŽyZy¡`´ñ]þ™âȲ/ÚÕ ”/ ›R7Ñ`úÆð€—ÙÔzVX:käœÖc(Ûl¹ õúÒH%Ø=+cd¿z&V·Øö–Ó?‰¼[ ¼®ß4(#¥A¸ªÁÉé‹[&*ƒRÈcÙ©È{ý¨*ò-Ž0'z­©Vë³&Çè sÕ:K[iK`%“:lë˜:ç°ÏÈ­‹oª1ÔpZ¡qúµ/+a”AîUâé4Z@|¢Î"³”ºV½h¨Z¬£˜Övõ¯S°I¸‹0«©•®mPgÁ—¥%pnhR—­K­… -mÓ¥æjX.ZDOuà!Ñz%.¬­ý«›ýt2ã¾eFw‹ÓøÍ8±5B‹bÔ}/s,Rúºþû2kvèßp倞“NW5'£ìD"ªqrÄÝ‹#•cRŠÈ â1´´Þ-t[ñ-ÎtÚ,3§~s!(ÝiëÃøv@;¡ -ÎTL8Š $š"ô$À+óUÊäšõ¦cšÌL“^/‘u¥òÍ3­öê)ö÷ò8ž´?+)ÂGõð©WŒÛ!¶pÓ«Ë:?•'΃kõoÖ¢£tâ]úÚ?p‡ ,I¾†<˜w¤\W§þ¨Õ± U1˜±X¦‘håÚ'ŽåÅ"Eœ”dYÇ2°ÙÖ=Ü{ Á€”ÞÆLÐ{ãòˆEÎ~òôqÔȦä©0ÀÿGk¶óó~ý¶zm?J–"QóÌø²~uX¨”m Ú­@&ŒÔ ðv‘>ašbím¶#â掩žRÖ6|ÔWyÑOâ w2ˆJßñM h…P±$9 x‘÷wæ@ærÄÆ öj[0ðö†Â5°º§¥Þ2ÇÏ— ÎpÆå„Âf¤$ú^FÒä6õ±ýøAN÷ æß ü˜(*gº¥ëœbIRv ©)@>.Z[«•Ò¡Çn°œ£Ð;1 †ë¹ä]©>r»å”²>~òa ¸IÄzùL:Ay±ü÷,¹Š¬§@> •"¿³(ˆóvÙ«t/Û­,qFéñY2ðƒdŒ°,'ë·GB¼SÙkÒ\”*-A -Qr¡3Å'¬ìÃt¼®ÀJÍ©]K@ -y½ëàfù\·§8-ˆ…¥3P®S$9š9¹¼‰8K3bë–¥áàÅ·ÆÓPÐ`~­" ‹_½¸Ù:l~©6š€¢¾s«aYŒÜ¸šzªN½£(Kýodû>àS¤.q²Ê[ˆcé ¡Zõó²t¡ŽŽÐmX•¥ - ŒˆÞ?­ßæ“çÛ^_©·h¶v™Ó‹Qgö™).m…>‚êK«Ä!a¦ÕY¨-0•ë -ä÷ð/å€Çñ%éèß÷ëw¦õQk2EHÐßáªÛm -Áe´Ùáô§8Ü­ŽirÂÐAøŽ -Ãöò¤ABèÿÿ_*ôb­¦`{ÚñÅJ?¯ËW¤² Bsµ$^ ÎýÙÓŧžttB¡u“ ¾¤¾î”e¹ðéDS›³©´˜†bî‹ãñQ§Ž4×@†Î¹‡=c% J4ÃWœLÉŠ!Äèhõ°Ká”Jt>…{#®÷ñ-dÈw9öB'*3 eH2 핧)†0|eéŒÀÕô¿Œ¾–5M¨®ƒ`o·…T…€ì:s¹:¨ÃÎWŒ1¢{Al_zµmÌ⟿íÚàŽˆBåEè̊݃æ²-Ÿˆ¢”ÔoëîtÌÖ -Pq•Œ£“5ß«B(T ÿGgRN¯•QõJ´ÍÇÆ]™6 à'ãèòUq+‹0í süF胴±Ö^L‰}¸úôPëÁdAÌ!î•›&Xƒ\ó¬Â£Fàð\ äuó/{Š7Si—áÓ÷òdÀÂÁÁ9£À5z‚¡ÚYÆÝs­ -+:³þÑÅ"ƒzHÍ‘cf6lÃç7Àácþësƒ’¤`×·‚®_ÿ~ı6³MxÆ›Ó4¡lÉÊÍx\iÀcqøqu‚eTâ`»j4LQÊ–e·b6#H¡ŸU3Z¾2hl'=¯”å’˜G¿«Ÿ5B\SKUÃ)-<3HôNJÝþüù<¦ºœÙG%sµ©×Sc„4>XéÛôTÔÉSžsäq¨‡ië4¬Åx|Ž¡´ÅQ;ì2%»(òvŸHM1µµS]‹F{á8Ó"VǪ«¶]ûÏ?TtWº‚à«*î´m¿7|_õëõïçÁ…š»Êõ¶²q>H¼R_üåù´±þXÈ¥‚ò ÇA‘q¶£r¯ÙÖYœºË¢~׳‡ÜááÝÓífÃd¤)—oüs Èþþ[ºè˜}Õs¾“¢*-o‹6^oýgœ0-„xÜ lTHŸ–3ðºÙÏ°$Õ‡x#2~—~ë·~¹¸3ð ­ï®°!‹Ç_X<#}ùUŽ2ÅÊà]´gsãËÇ0!Ý8Â9-m»è9à7{™Ëž^wK|ÕÏH-;&+ò¢„‹Ž9ŒEV¦GFµÒ²Ø@ÙgD›öõý¨ áÈïÇÝ᪈ÑÆTüCŸ–àã¤ó–äù´k =™0éê6¼Þz„x`úܹ‘@ -Æ}Þ8ÍJ/Áiì^tòØ9–¨Ÿõi`W-Ù¿ÌZî²¹…k´™5ÝÖuÚ®µÙc¢£s¿Zž§‹JG@ض˄éõÆð8ýi2­áqÎ섘·©Çï³ï«´•‰”ö¾‰Wggƒ—ÕRcEm#žz,­%×V¯}-ÛAý’ ,9A¡obÊùSß ¦ïûfô\y6^]m²m÷´ÁF††uþ/2;……õ)©á,ÜLGÇ ][š7´Ðë:åØs›Î?<@Á=§ÌØÖ|ÆK ý$·›qfëypVºIâŒUB¨šêåµóð¡NêæØ”3…h©ÞkKnÿ -çÿžW‹^Ž0içòâ„ÚBEá[ª²¯¦/‹—,{ô9ôœŠ×g³ò©Ó‰‚¢‚P ‘_~PݯáÎc‡¿¾ØžËø«‹kËœN,¸\ÐWV£Å5Q=ر (3³]Wp\ýx©+Z‘ΨŠ¦Ç€•€gà( I=§ÜÒ1`·ž -vRÆ^Uí`ñ^|»ÕP}1 •;³2¹Þ•Ðñ®×. HüRB4§/CñŠÇßaGÜÜ¿ÅFâA«¦GààãA¯>-µTç­ÑD&ñb„Jª12Öeتh®~hné>ú¡C]›I1´©¦ œcq4”×w.ùVÞÙŠ Õ¤„k¨ðê— ±gžzFMM¬e?{1nj‚,q²Kc ó‰2Ó¦ýz„}|jz9¶DÙ¹^ö˜)ʹÖ3hIüsrˆ‰Ëx½P Ê×Ói²ÇöZ͔˙ôxËTk¯97"62ÀDÆwfZ‰ÃtûAƒž¨û'âݽª$f°ëZ¯¹dñ2;èøqWέOL ø†3™g(ø'~NJ-/L6 Ç­·–ÕÍν­=_¹(ÁþºŠþ>ãÅ…µÅ »Mot¨|ñ‰!a½ª,äÓ}×Ýj U:³>(o§"ÄD4E5•ÑxCÑÑåC=õéâV„q«{Š”©i_ ’ÓZiëÞ™š¦Ü·1íIƒŠ–*Unèê.¦‘³©|µ]âÃc÷ˆ bj p÷åOèžOø(é¹Î§”–…—'&æN¼9º*Õ$Änêry÷Λvz¡7?—ñŽjÒr ¤R‘­åÍ5×¾‚Àõøf»6…±-õ³™x™HÍ¥Á©`B§ºÈ–""O8¦¼î¸t#Mk¿(Åž˜è£Aªé¹EçðÕ²Dáç°—7ùj”^åÔüûYAÌ"S’sá¸7˨‡LT_8 +ZÃeDÖFÏ‚&ÞÔ¾q—bZ ôŽº\ʲL,”3 ¥”[tG—¶j¶ƒqT'`X#õ€Ñàýƒã3f 7áÏŸÝûz¡Ó)Z™ÛÜŽuÊ&\Ä$æ²;a·ÚÙ¨¥SW€é|`UB9< 2RmëÔv{†ÙÍËê•b¨`(d&œFtzF‰t>ÇãtÏeU_©ã&ˆ€N+1]/zœ·¤`<Ós‹b¶wPö¹Ö]ÆÇ~>C k¾WÁºÜ\†zF¸_ìpt„¤Pcýæiu Ü»Áž§(›jN \ÊŽ¢5íRZ2¶kCh'a_x,¥ wo7q•cʟʲí*ÇHz:¦¾£ß‡¼ƒ ÅáW¿Áò$œkFGðO­!uA4 ùv§†euç9Š´£ë—T}¯švÄçïú­¸Œj>Í=:m«ïƒ“ÄD™æ÷¤±‹Â¦Þ(HÕ˜§×¾ž*º´§¹¿µGèX2˜ÕN\Χ"p"^¼®ªFÅõiAT‰‚2é9‰õ=ÊC°òŒEo< 0ÿj¥Åº• H,3WT«bdƒu7U}±`+esøêÌÄw3  hØS\s8˜š¶§9ÀWfâûÓ³8¹$ ˆ% lZ¶a|gN²«Öm‰|†Éiß–í·ìµ‰Î™“±Åÿ»Ê)§np@2~VQqK“£z ´Îhó߷ЬT9âÎ5ˆ;ÂCÍÀ)vìl§dXb1kçÙUrÕ;É*såô_ô´b݈¨.ؤÄÍ:; ~óÕéáQIH”«™-HRa¹~:He -×–×nä°8Rć2š’p”ÞRáÄ¥Ý0ºÞ|¯Õ‘‘¨¢qª÷>[Æ ëט|áµåyëÞ‡›ãP¨Œ[Fµfg½ç³»ã»ç»ËÓÛÿ¤ÔζòxYùÁœ«´¥9­`¶4†ŽÄå~¯Yí‰ÅØvðþ,^ÆÌãŽôôÇðíáÂRÙ¾ê-Ôè±*묿T.VoÞî©q¿úò¦F–‡¿Lܨ©ö½Ü!¼†‰R®›z:Ë9ÐbðÆ"±­J¤˜ÚËŠq˜ -:³Ò=5êÒWø´5§«1ñáºëf7ËFŸÇî(]a½]Ö]óŽÉ¼BO5ŸmöT]²ªØmCþ‰híZ=@á¼íºyN'®͈ÝßCÀÀ: -×ÆtÓ]IÍ•¤"½Ã£ÂÚiGª¿äWôA£3¨oðôÕì^ ¼A|ðݪ´#ظê‚ÿQ^nÉä:ÝŠ7à’àsOwÿß7O‚v«dGLôÇhº¬’T™tSP*”{…°J:ñ½´ÚÃMú~ï¯#i"€‘ÝaÓÑ°¡bž®‰Št’~˜yTq\k¦!ÃVºûØh›Õ.Ù.ÇVQËyµ ÌZ¹Û37ÒÉa·tèÆôc¹ÙmÏʆ¯Ÿ¬¶3ß]ÇÎ|üÁ弯Û>1w²ÑAÐåJ·òDηŠ² }¿¶ß˜¶j´¨C/þëó¬ t ÔÈf¥BÝk²Þ0æöÒÉZòŸ ŒÙ >á$#,ϼĮ?¡ùåŽwGõŽ[=ŽŸÌ\»+íñ꺊‹ÓëûI=žÌ?‘ڠ耭Þôr¡\i #·ÿ×%­áôóñý:~ É -ÚJ‰’‹·¥ÀdWØ)ZÖÀ죸%°Åµ ±ÜÝö¦8íÁuÕ˜Ð{·!­oRº [ø¾9ò'zði—î^L ‚›Ÿ5™³rœ¶0õ¥§R™ÆL>ÀŠMOC !°/8¡`ÆY&³Í#òw;Û¨Ïtº== £±ÖS—Û¡³'ÝÞë—´ë¹T”-¥ÄcßHs¤°‡CY‡æ¹2W‡m¿Õ”‡â…JUã¦dcÝüUÂ(Ã@n¶¢µ‡),wLUi‘ëæPÒD:q;¦;9’Ã#çtŸçi»Áðïë/§•aƒýuÄÓgÆÌÇÉôÞrMu8³¦ÏTóµ]äYIÑHC'æ!x¾WFv×LëõôÅN{¦ÙŠŸøñÀ¹Ê}ö4T¾t‡¢É¾LaË´A p“îm]ä\–tÐC9³/§²þjE -ä3ÛÕW}¥(¶Ÿg“ÜëšüÝË™¾ìvk%<›Ã $9ÝSiÖ>t¬ õ£~|}þðJ^fH•MÅæ#áK·$„ánãË1ÓCYm“BÛ i|TmX…<Œn»LO‹z¨VÀ㜙 ^>´Š-¸Éà=¿ùÁI»ü÷ž©ÆSå—UqÈñhZ¸dÍ1±ü<®S]~Üq÷«Ml%ŒHB¸SÀOä@0ªº'xyÍ<#&ðD¬gv°o,¦îàOT†VÎûø¦qœo|;lkÄí Á¡÷:Uö˜/Š%]í28÷aÉÚæäâFá ±â ì#žv"làË Ù3Ž{†©Ø¹g¦çpÄÛSoc–#[–ñó¡õmÌî9Ÿê¯[#mæë1 lü³ÂÁ¨à™öº†@”’4ÞÕµÆÇöÛþ§t&uÑh`Ôñ¤„Êäù(M_zæ(’Âcd=Xšè<‘<çôˆí¯kL~åÈ&sºœ¼ó;'nʃWn ‘Y鎭KJvó¦`è`5nõLÍNXü%4ñ[ÍNÙ'ŽŸ{jŸýÍÎõÁâè—ÉŠŽæK°È´‡†âø“Ô9—D€€ÈÝLû¹¬5õJgàçåhôÈÊòxÑ¡jLz¼ï¢ÖTö«GaBÞi}¯‹C†;ô,ÝÊ›VO«_ø™ö«GÜØz·»7J͇:R/·5_\D*Õï‹l€ ã=u²Â·Îõ× ·™U6œ‡çòËJH«ÆÕ·2¬eMˆ?À^à±e§`0Š¥‘°Ö,= ßÄÌLã9á{WX¯­ö¼·fÖ¬°·x—{=^Ølîë/f£õ ¨¥k¾Î3¦tŠÕPݦPKV^c#ÍøÉ“6Ä„ ö¯‹‰×l¾ŠÉÅ<9fìH3Ÿ‡Ð{a­· ïöâÞq[BWqßs+ãßXÒOÊ-÷ïëSKHò,qey=uNšíDO”4~úü&wý/Öæ•]éó©…¹-ËÛ—Ò®£Í¨¹ lF·%1ÖÊì†çÕÞî÷XæªoÇ[ŽÂåʬø'ÌE¸Ù¢â´°yB°oÐZ¸ºÜ~¼Y¬ð61±Î8 àk¸!ÓåòùÛI·OsÝyu®ËVõcÏm«ìH½ŒÇ«Ïî—öl«Ox,7’{EA­òWûÙ²âº\æÄú,˧£KB?[/ò¸³¯èK@!y٘¥Ý{™dÞ°NmBU\…¹hÔ§û C2¦‰KýRèÕq‡:–Lû<âzƒÝˆ Su>Jsè"¤}ÏébŒ?É®Õ -´ Ì(•¥2ÒÙÙPMTØ~Ø;ëÊí‰ÜW­÷—³³ªèN÷îdœþD’m,¼¯ñ £Ë«M¾ë;Ú|òÜ!vºûzyl9?3±wÃ"z¢EühP1æ%©“µð êp) -4mKf=aÕ…«OôqoG±º.NlÒTÊé«‹/Åø¢_m‰„se°ìöeÙ -hÛŠµDÿ"-Ex1ì/Eù÷骚ӌDk·lYfSÌÚ·Å^ëÔ·æ4˜ÉYÅ÷Ô’ŽZÎ}Òð»5%™E¦ó°o²÷õ§ÇAì³°^Ð6…ê%“¿»uíl.^2n4ãØyÂå¹ë®è,–dœ|ÚㆢՔp÷uk>&‚Šź¬9 -c\9É—X0ºÌ•¤´§‰sSa=ü?Ýå’Þ8’áýœÂèúÈ÷yf[˜ÛOüªZ¶º7’)“ÉL ¯úûÇåY„#è_ù«œßrÀp iéýƒ1ºjòÞ³•ôvÇ íS\*X:fi–«X/ˆH0Ña…Ñ6ü+8ו}U 8FȘˆiK%ËGÅõµŽ¿PÀØ¿6¬¬«(‡À£[åIш_Þl»¼èŽ.ºõüºl¯áŠÅ ˆ®â8Sz‚tëTÃ66U;©R¤ŸË?eÝG?t\/!¯¯=,ݬ ]—Ø À®³; -§BÈÇQ™žfj)r#Szõš^Èõ×D™ûŽøhio ‚ ¥×÷¾åU½¤~÷¯ÊiM¡l©_&g§²ã«{*:õR)M]¢SˆP7mËC“úG†Ñ|¥Â‚£+Ñ5e@ŸÆçPCTxΰi|¯mh/0ð³nÇO™/*£ì4±Ä`E-ÅìlS°È^HdFÔád•}po®n º†Jmî±+iÓ3UÞÂ`Úll,£‡Ô¥u`&]Í -‚³ßº71)0o‚%|Œå²ØÉzÛÎaЯ‡™CŒ)4Œa^ÆŠ0*üo;iBïŽ_€wò_3 %à"ìÎe$\U$s•†œÐßUoiç±CëfýX2¢Ë»\Ÿ¥›×Ûøks3R·ºåX5Ý~(k±Ñ_é½É_´ê'«´ËýÜ0Z/VÅÎÎõƒ½‚ºh†—+(‰ÄšÄƒÈXbá°jºÚ*3ÔprI÷¨PLÞj‹¸äcÁÌ -G˜Åa8éfÕ=̨ÎS¤Šò¥e8€»~²`Ÿö™{øk¸qÚíwŸC½›c#Üø²4&´²Ä/¹ç·ÉTSÒ\)ü•¼vW¨ÁÏôؽ®–gS’Ôþ' ½‡Ñ‚¬·óô¬â ïÛÖ×¾‰°áýǵÕdÚ¥ù$½c/‡"ÂÂÚFé·ƒ&³=üy?g^؉ w¶ùNç5h‡Vƒ1,îë؃ï -—=*Göa -›áÎÍV€å·Hs–ëÇJÎ)ù­ô»„ÅE ãÌ<"{QÂeáé(ïg]ö^>ñRþá¬jΨP£-Œ÷³ÓÅlÕJ¥bcšVÚ©Y. sÊòv<s0ë»—Y`Ä˦”ïÌÇJw3oáx_+ÅÉ×)=»îSëg”H<&jÙgN߯!X…ï´6Ýôï)o|Æ<¡÷Ê4E;D£x‡9عåWI“ýºetžRÃÞ¢´ª…ÕÈrv¶mïÚ6M}›Å›ãCÞížvå¶(/5Ëø9̉«ÎsFOŽF%½x–‡sÊ(·§êÖºÒ!STTp²øž2à‚ÛivåíI*#“eÅìîŽ[¾üSƒgŒ ¨\³eø!¹ûPÞ]Ÿ†]T¢À•-;÷›Ú¤_\Ûx2ÙJ²ýñ Ýï'¸í1kst`åZæÞ3 ¡r-;.Ç-›Òªiª]švG+ºv`ÕKŠÌÝQ´Ó«¾k›Ö4 »¹ÏÐýWZ|ÖBö ‰,âÅ£õî^[%F“ËUŒØ–òUÁiÍêI?!¬¦«Û o·pqÊE­þÌɵl;ZÈ‹aéÆü0Xu§æsŒ - Zè·iê0U·za‹s=¿Í&é†Oå’áI,Ñ.Íq"ü,Q\êü½J>yƒo²D -Ž?¥šd<~O’Qª0 nÍnÅM"èl4þÄàám§@¬³綶 h*”ÅwÉÐÌ'Iž˜â¥Ihdv:‹‘»IÌX3ú$…e¢Çšv öÞ6læ-¹CmãЋ-Ç }§pÙLå÷sœPÈHkiG)ü{œ ñË‹oqµæ&I‚Ä×l`蛞ÅWuùNº:œ:&¦3ë6†£<¡ÚŠe)éá¬íå.ª1ä•î–ãîõý<}p”nOŠÓýy #Ætd“M‚ñηŽ/;ù^ÚËÈSOƒÆ‘êpš4fpUMaŽ -;…ÁÒj€3ŸÚŒë“›ËJ‚ƒ‹†´«6Ë=º`#0ZÀLõ•6¹»Ô/,‘ÿýÏ4·”DCžÄ³)?†,« ­ÅyÍ]×x¥o:e¿ü‘YÆpÑŸÏý™1ÄtSgÏZ;Pž?{î/R!¼þÊX€ ûW\ sóôýUÂ|òNð7í%é~*¤-Wý½ÓPàˆ38bÌð7Î ‹âéÀº°f¯¥b°<"Fü_HÁ·£ë\8¥œ_õ÷‡à‘½ÊRjñ±Öû„’KšÃ‘p¨l×çò«¨HÉÝš¦tÀ¥cƒ'6nADÙ±!£š;Ñ4e.«ÒmNÿ¤åû,#¥Œô›`P^£’˜³#Ë`V„UOµnbY|{ì¢{ôvóÀáË¿>Ë0ƒ´uI™R^ð7”Àº¥$þöuøx†Ä1$´‹àèjÚãͲÏêzZ!±éìÔÜâï=ªîpŠÙ?áÙ%0ѶªŽ¡}âa\®>lË-¿sx²nëÔLW/¯;ÔÓg*ÛºhÌp†ô¦ííÅKH¤4.«Ú‚¹ë€m>ëYíá §¡%ù#׸ø¹œn>ŠÒ†€çäšuœïN¿•4Ž"¹4q`#uaƒ®é«œª[|\¢òO³ßŠb<ž0¸6ü¤|¬n ´ïî¨FëùœŸMRTZœÁhç[S(À(æ\\Ãu…ÒcéJS¥UOôY30—c#§Õ bþ?åå’ÝXŽÑy¯ÂKàwTƒÞÿ¸ãCÛzRžÓYƒ|NÙE‚@ÄyæOЄ  #±nBê²z— “ún Žåyx»¹“¨nôýxQ§óx@âh¯aƒ—­9‹F$^Å^:UâìúÝõM]4[Š–Sç˜6“Û]ÍY]œK‹íkQë¹é5Øä‹s²Öd¼sõã=Wkõz%¥£R3æÝ7Ig¤¬ÛJx}Êí×üž[è^=Έ°Ð,$¸¶qŸû•™™R‡Œáé -Aú\âÜ´6Þè…1›å9UVâ°Þ>u}Ks•ßÁ(ˆ…"x\bÍþØ"ìèxö›td|45Ø>7° ï•#îÚúSÅÐÙ¶Ë (€\ -&%ú†mLÁ8‹Ý4¼t¦Rãi'HŠBrT|ÿžN?ìÜÛR4š1ôbi3E4é}â’§'º¼Mæ@Õw/òA‰lÙ’›œ±#ìA¼ñpÎdQyòq§©s†‡X~‰1ªž?{\wôyÑ5¦gZy”³AÙYIw i¡ã}±ž°”¶~ŸïŠëk&¿ñB@SöKm -™ïëPX½Æ¾¤bÙ’Ž×‰mØ×µmêô£«Ä,齓«âõ–q¹Îø •Æ±$L0Ã1ì£Ti†a˜(ºd~Øyù+•ç’¥áÌt ÖxKoËhðó|¯ÇÖÅÀ©;_ôRÓ’ìPJu¢ÓÙ‚g]bRk‚õ ;ò²šv45R˜5þ¼WÝÔÔì$1D"Õ¤§¬$rœe°c)ÇgÍ÷̧Ï®ßþ<ÿþ„\ym‹Š‰Þ?ìtÓ îêñö¨7ëlˆÍ&4ÉAQÐ MzÅ9ìÍRÛÇdS¬tÚÅ @¬Üts"R™ž;Ù_qõq¸¯ßÇÇÉ00w„âÁ'Úyˆ6er‡$› ×'ŸÜF¿2ö"àä9¾» ›šÃZN­dP(Œ7jHª8|Ç<¾š¯6ºí܇mNg½uä×÷¿÷ó$Ì¢`!+ƒHñ8OWø)Kõ(B«“xšJ:…¡o™¥D”9Q2R¨·¯}‹;Éz±Š˜Ãx˼³·K´õ2T¶mô"e·)Œ™u¯ ãÞ‘ÖóÈtzôöïÉ_/.9ÓÐýA@ý5 žTG¶Á3BÚ¢EˆÆšòˆ”°yú¬Çpu¥ †¬¬®Ç`˵òjÛ€Cõ|Zè×ïãÃ?ñ½ BŒ €2d=üä ! ]7)^àdÑ÷%Ìs;þj:o“ÔæL^”ð<ÕKÇÝtþ£í`FŒwN_íÒÝš K‡S‹h¬›céŠÔ^1ÁþoHkºü©ÌS‡a"r]Ša&uâ½ä§ú¾k+&ü>?:ùAY/Œ>eðUs.ç ÉKM2U›µÛ¸¸þ¹ÚN¸<¬@L•ÕÝÑ|Á8‡¸_º/G0ºlcƒ¯m÷FAX~3› -gn=þr6qñ¸è=¾âˆÍ“X¬Fk<Üaçcu¦Æ%ŽÒ¦d¡{+WÒæ‡hi¬á(Öcîî®H|Ù#Wå|è ù߃l¡<{X×ë¹I%Žê;‹£úsKÝ1”wÄÝ”W÷0»Îb¹˜÷Ú#7¬£¡Ñ„±Þ¶R'ZocÍ‡à ¨}áÍ2r“í!ÖSz©5BÈÍbRpî#àìTS|«®ð9Á™ÇwÏ;ož\í­ô~ŽàP,T9)¥½8ŒqÏ©ûl‡©‹D¹iù8ÇS¥RÝ}þñimrM‰deü²p¬˜Ê4—‡ý]?šlA&Sþ™mzÙ µZjȽœuˆI\»$+½DùÍ’ÝKv®Þ|s$TtÛ”,[³¡}n œ’t‡éÛÐTyµ{`S”ôv+Årì$Îyu(âà„!hÂ(ñ°×Œ˜›³ÐþíýÕÂ`ÔÓÙ¹„ $6Eš$ª»Å B’·Xí €Spw·Bß/ý‹‹¥:;ÃáµÄ„`É¿_8Vχ ¥Ýª: -A?¯-ˆP¾€èx ²º9ý`;A}âËp2þ&†“ÐLIc°5 ßêév5ýúü“K4íUGô8¯¨‰m6àÃË×4µæ¥ëkrɾsi=d“î8tœ@fžm£õþ¥Კǒõdk/$â?‘j)G-ÿÙíÃô#íN³ö§Rëîi@ j9­D£ºý<£¯HdƒZšê~´¬ÍµšázyŒ"#µ6£•ÉdëPf[¾ÞÅ»Ów7“ýg¿ÏbÎÊô "+#lO!g,ÏCë6ZÅÙy"¥¸µaÎú!¿ 70vp2|˜d^íff”Å®jº’„f&¥(Ý‘¸`®ÚYV‡îÀ3nQŽ·Æ8á6½Lɦ$ž–ð$á¹ ;]À®D›.†Ûrftn"œ‚¼®Wñc‡$‹å«æ«VŸKÐ#ÔeÒz5£råhmš`¹±¢Ç¶î×°M…»MÓ‹"ªãjJK6 j…omZ_Ó`‹õ‚žÛôuo-ʈ¤°™—fuÆ0…,9ÃAŠÁ=Ó' -]‘sø_aöW;QÇüi$G‚¥W:]ŒH׈Çò¦Bº ·êÌ€Šˆ# xJ/Ù‹æôfW@1`÷Àl²oŽq ]Íe1R„ôDáFxëÝuS߀³ìˆ(ßX®«‹êÑ}:\Þ?mý]Ãz`‹ßŸÔ\(hÆ} DË«—‡Ÿx¶ÕýLÜ(¬cÃÁ$ÜFIÍ['KÒ!-üÛuȇTªBû*¬ !Yó!û*\¨2¶ð²M£]5oæ{:*<YŠ;‰ €Ë#ÙD¾™™ýµ‰MÄâ=Ý$•Dàulw„7󰬑féxŒfÅÖÿGGä4/õ@²„ :¥†fz»Z£•/£ vm&–Ÿ]£´ßˆC]”ôÒ·V ,ß3¡¡õâa³Ù“î¦Ó:>\&‘<¤×ééFî`‡ã]‰i×S&OlMÛ~‹áó×ëçÿ -ÀA¤m0„rr\ä…ºX@µ`ÏV[WÂí•`O2×rqHfÓT:dY -’º¼Ò3!²6œëCÚ+{û8ÌëF±°pR§àº*ÕÉ>é‹ÔfšêÊ¥›_; w”ÿ4c/Ó^qü+ÄÌ.±S•b gu\“³Y=ý ”ðÿAFÑt­"— U´6dQÞ-ëÔy'ÈŠÕ^ÀÛn¸U7 *OË»tìb£¤žòÃ;Õv½M¨£¿>|Q´];xÄvŠœŽdºÝ -8G ùýã~þ×"âÚ\ûÞ¾hœÖàÉ'; -Nwñ±±6;ölã,ˆôÛràô¤‰4ã¯gÊbç=Ÿ;ÉV®‰Õ¥Ê9:R;Ü4 - QvßÞܫ™{…|?!:\·$3nUà[VPŠÅf®£eï qeê*ÙÃ¥²³ËÍíŒG_кµ`lÖž*8l)]½ Oã߉Tòp*efuBÔq~Ý7ªâÊ¿y§]ŽwÈ&K¾?œyKOms¤Ê½™A*5JGB¶³ç4-®f–ï–¯ÚnèQ•z@KE±ÈωçšÇÊÀX)dÓ]¶³øú¾ø:ë¯×Ï~³½k‹W¿9Äb»0au¶UÈfÐp-ÎNYN‚(ù{ƒJré b<ó¸žAü2œ§µË¶„Ré°*¼Þˬ´»DÏ¡ÑYn&SL*óÒcw}?» b¿??ÏîˆêÕU¿¬þ&7Óþ>m0HÕ-” nog‹VÉCþiÅâØÉ£0ï©œëÀBTï9çê7ÅÎß-¥y9N¸…Û…ã÷ñ¡!‡³øþüØ“NA¯GíºÔŒÈö -È>XÙ‰Dà D³®›êÛ= Ðsí¹D-ªÌ4LBZiQ'ÆÍÚé‚×Ó`ˆÁPÖMæ| ;ËVÒïëªâlwZ#†Á}€ÙßF5híóù£∄2É#¹Ô¤7kFô:O^óy¯"Ï°RžG›´Ð^‚€˜ˆd§;àÿ¡i+c+©µCØ>*\H Ë åªØS§S{5¿¬Â\ˆ5ãÞ"©VF-í§Tk/Ëf×bXTÛ8Ub|v³–"æPâ%¾üÀìõÉü‘µ¾•MoV‡6j²¤!€å³lÝ™†rmc¹lÁÑ£¾ùzÛ7÷f†°Ò߀A8†ŸŽ¨am¬GêQìÕ‹7þò™ì€Úö:šà%ÖÒ~ÏÁì7úš‰BÓü¿¿Ø7 -—m\æ´G¬"Ô=«öÇéÉm!4¼né^sÂçç÷Miž‚4S†zF¼SªØ£—4üŠm8K:"Z9Çê[‹À¦Žc‚"‚IÈ{Ó´éq‡¸91fÆSäŲù"ž¾Lçén‘¬›sÔTÖ¡ô÷×>–±c›¾'Ḏ4N2`À™Öaböµ¨é–%ŒCY)c–^Ì•vzÌ -8>IŒÿ¿ªnè´H@."5чŽˆå±T¸‹D‹ÅsY_ì6M€úéÃS$¡qK¡:„"8òEÝAˆ=ªtM·ÃgÛ£¡?6ÃÅÄb‹XáájpÛ’V¸ -ŒóŒð¤¤’3,Ž·Ü&ÊËÆøîDØíš}ÏÿÜL#æëóp³ÎéÆÍÄA¼ÌÞ¾ž¦a+цQ0ž0A {úr?ËzËç5 F'´ûûm 3t>æä˜ï–°ÝG<ë÷ßö‡¸‡•7êã¼A½5†(i+i…4o -EÃoQ5츘`©àË^ÂÀHr–y‚Àúµª‰š›“²¨K³†²C”ÕDp?Ü“4˜)hÓ-f,‹îiFÔ‘úñ`¡P¥ÅJø\¥:”•)ì— §#ëe{m¸ž+ž7`Ï ˆ¶åI0ÄÕ.'ü(Û -9„CÜüuúØ:ª”ÒíØ» ƒœ‰~)Ᾰ #Oa#Ê P~ú×\…c:;ÉÈÝËUCcÕ5 Œ%·’ßuÕæ7îïúÿ{¿7ýWCüjxXƶ)½Wùz}‡R‰ï†H¡ÏÒ?ÌÐÞ›ÃÆ¿“øÊ1TjÒBù Äx wÉÝIñé•õˆÔ³fq>¬WMK˜ÔʆC¯“ä¹[hšê)ÍR»›@¹ ¶Ò¾ËjæÀŠHîx§PðZ¶éÌðõù½l‹p+*‰…¶¬×Ø'f.*Eú솺mˆ£BUI›áV ®‰=Î}{ q¬òÁJ ˆÕÑ ›o?µé'¦?¸%˜½U8‹ÇÀœ¡= -3ˤª™9å°THošÓ»ïZOHŒcs9l#ÜÚ*NO ›ÇV=¶Óe¼U«ôúùƒƒ-Tò—]Ãsf¾Uµ5+l4vÑÛÎY0 5lŽ‘xÑëô±ºóÏÐé©£zQ"×ÃþÝ÷`b†²w›±ZMOs^yœ­[ö¢¯·C^ƒøëóû!'¼’J5¿Ÿñ:³Òœžƒ½k)¥h™$7‹ó„LCl,—¡_s¢­Sá´v³DFç?örôÛ½ ª´•†£Þ¤‡rÂØKûâºCÒysnf×b‚Ñ·† -‡`Ÿ…ƒs -ÝŒ\Pwh™½Î -\×Y2Âœ§2Øj.‡\=œN‡6‰ˆŽo𥎠'­* TBZ‡ÓBd€ÒÒfÑsaµUNÍ¢“ö*îbL­aê•è†ð9ãCåÖôãT‰Zée5Ö¯°æÊCùDU„gع8NŒ¯×% œçAôT¯5…:ùµV@¯Å’!­H,o镇²E»¹êú„~´–çàÕ´U`g{9»ãÅ.Hé ü›¾è`OrœÙ}DzÀ!6vÜ¡Y¾¼ C¾{Û9ý‘ºéåÓ §œä †äÏ(¢÷rTGËS† ]ÑûªŽò¨Œ#t§B›â?|/îÜõÐî$:fiG’•mRÜûéLÇì/8„ƒ·a?æ¹ÏÏöHMMµ)–Hå -ˆ_Œ¦r½$sîŒûís´^v©²„: Å'¡Ë[çWŸ¸¹ãå/ž”pÌô4é ~ÃWÊì;. -îÓ êEˆõi¿`näׇuT1¢ÝœÎÝÕª]èvrÛ9!‹^ð—jÚîuó0³L´k[ø(5KHÓ¾Dxâó8sO/ù›NEƒ²ÚØÎbma[cÒoô–w¿v¬_Ÿ?X%CÊ2 ö—Dz¯…f1ÃH‰£ä°’’õ ¤õ dACi;|Œ¨“E‘P9®;ž&\j_U‹Oúëï.ËŠ˜Ñð(Z`d1ÇB>ØItÅÌçéínfArî0€#¹”ñÃZ½Î>ûyA˜ZÝXÞcR4ÙfÔæ1ã©é/H*<Ó§›£ñTxùYeÁV$Š$ ¬ËkšmB[øú¦gýò -¦OÔç«J6= Z·$¶ß4Žþ\» [pÂö²9ïq/L8[ɳ™ÆGj»†ÚÎaW-0»YO·æRõ¨vM7£œˆ…UuôB@êVƭȶ…<ÕÙF¦É‚‡¾ë;IŒV×ö¯¡f«ûÓ–3 R@õGÒìI>~?l‰÷Õ]bŒsì0!,m× HG›úAF Ü‚%*³w¶ù¯ÁxºÚ"®³×‚mVéˆÄaèL2¢ŠÅƒÓ£;éÂÎqØ[:ä^?¿·7–Ùe‘ípiŸÝ‚”žØËÙXˆ.ìäLêR¼ՋضÇ}ºXýÔ"-¬§zëeÌoο%ÔúÕ=ªb§ÅÝçx.æ ¼"2ꬉÉþ Þ££ TÃ-ky½¿Ñjbï´’‹®KÊÒ(o>óÜmßâY–|¢£{zðW_Þ3P¡WÐ*-ìÚŒµñuâƒnst=tÖò•³°ÉB{]}ë'Îåõó{?[Ò"Ñ馆竟̀Ýòä rÉQ7tÇz…)Ý¡ä5 â‡Ù¼¿-“åØŽjz0ÈrhR$ì¼2‘(ðzmÓÿûKк֌ÑvM\ '"žš9†5*òã2Lˆ† -¡§<Áv1{pîTå§.'?·%¦ÇriÒÊ‘öjzq*£BÀ– Æ™82 PRYyù~ÊøZbz³I'«§YŸÅhç±ÐhÓ­^¨?˜˜b¤cÀk@¯÷·+Dæ~vûÊË%9r¢{Ÿ¢/à …ï¼˜û¯/‹="%Ù1³aˆÝ-(de½œvŸ´ú¶¬Hبö«kEÕvþHoZÄýúÛ=MzñÞ«¤ÛwmÚJÍ‹÷ª»ñG`=Ý]ÀSs˜îžS'‰¸®4„vu©fkežŒ‹b°])úÔ‹‰0¢aú6ãœt”ŒA ì™'–\¿­ªcTíâs:ºÙnVW,ÕhíW>³®òÔ ­sÒà -(ÉT×*3úø#hóqB`=7˜¬ °ÇBzÒ\ä†Ã³)!kûãA¡Çe*ïÔ·°‚ò¦þékGwœåoÂg0/Ö2fŽµØ×È»úÊ®ÔÜè3»¦ümÀkíPðIœrž™æZ§u¨{3õìtzX<ÍèTl踄üýûì ôp)H½>²ie—zþ6è9LÄœÕÃu'6Tê…ßVûj3ª¯Ûåˤ=µÖÎà±=ü<–œlTÌ$|É‚/~ ªÜ™2š]3ƒh‰\‰Nåxxj¡–Óq^3~e0Ó‘-û=ß\8Ï‹—_ùÃsÞ6èéÞoîF¨øÃøõÀ_×ï9mít€+kßœ¿ÙeÝx~ó®_6$©UQÍO°¹‹òËpVß´˜Ý$yÐnXù²wíwº¡œ6<Û¾Qùq£ðŽsõ8Þõú㊙‡òæÙsj×K“,°êÙýlÝm©Vý{©rR—œK²<šŠØ~ɨ;yLè¾c£s9ÙI®ÀüœNÛ’߆1–€¢§N•SjÐ`uì=þmŽÜÁv6¤ü­äÃ`¨ƒÔy¬³f^^›ó×=£Bèˆ4 u»Þ -4[6]q7PÆn}È7‰W‚¬ëbµáäîrÞztVþ`xÖ#-=L[Þ9ì—Ö Eò”¹¹ÒåîÇ·‹Þ9§îŽ›ƒÆÖ››¸›8=_lQl㦚_˜Jx±º~¶´Ô¥.#ë²bã$ÒbmjÄFê&0äAgLg ]¿¶ÛÑÞB› -ŠÊïêLÇ"T1˳ÏòÍ…©³§GM(’¥ºuhïlM§Á< ©ì_ÊË;²¢~¯BP§µQû·;nà©”©*ã'%f¾1Tè@_{±£D Ÿòul„ˆò {hT{ßY3T:NJu -=U]8íÑaÞ'.½æ% –eRÉæš„ô0ºäÑ}…‘Š–zg±‚ÂïkY›Ã!ŽÏ?mÈíÀR¹PÖ<¹ìµ®Ù¢ÈÜ|{¯÷½—7ìñˬ’Øþ°ASCªéòÛl-æxŽY©,̺ݶVPßvLªúpiw–uK›Ñ¥==]8Ó4ѹs{K×â^š c¼™o> àüÛæU*x‡Åð5Ī£’¤“´±[ÙHª9Í]P‡ÉW5¼X>=} q·ÓÆ:Œ’¤˜6§“ˆš­6NÌtÿ† ¥¦é4äDl+²ÝAÍîOv™fÄeãšã1Ú(xå\o•œ•7 ׬¼Ñß ËM›Ž´“È?Q©À0ÉÈ…Œ¼*ÃÏ\*F/Ó½œ}NŒjZGVÕØíèÁ«èä°qÞǦ Ÿ]||>í§Ø-ˆÙ°Bk=tXc¬·n~ܨé5WK Iyüè^yÁV]µ¾èFxdu*ԈϤ…«‚Ú#ž×sç¢z+Ý[2³­=tšGo`Â30é[—÷Àý§4æàþn “Aެ㠑Ã&Œÿÿ6o!¡¨Š™o¹ .Œ¬Ð×Ìœ-N)”'Äan™_@@•FxŠæuu ©rÅüÈM£uéâÎcg‘óúðäÖq«;6µ¢°k×°óËH¼‘€cä÷çŠjLD0‚eêŸ``ß°½Ê¼–ˆб«M)[o» Mñ™‹W\8ºùÂYbùôø†é=Z¡u»brcü § ':KDy»9q¤ K\‹€Ä6 –‚ö¡}Ôî:¤’ ð‚“U?&•íå¥*¬ÈšŒ¨;7Z& ìÖC—Êû]ò-wj‘z¹{<ûéä6´Os*DMP¿tf9Î¥îS€[8ïXb°0Q+VÑ…@HntšcüR~‰[t;àpÅ£õ*½½ýÿ¾&ÇõÿþüÙg)ëVÔQÞýns³=jöbzh˜iØ¡æÙŠG5“nBꥈ†Àæ¢;èü×óÞA÷ö/Ð%¾ØHvå´âг… „ÓÍCÌÈYï®·ðÝ6w¢—Î~æ|×·é„ùýù7h#ýiŒ÷—XFšâ½{Ø%1ïÌ´@rØ‚M²¶ÐM H¬¶è"åzWŽi¾úþü¹¬£—èd"Äv_w€Êu'‡´Õ‹t¥®»|ßÚª-X½Po¶`Šéëg:õäýþü±‡ zV¶ï}p —mo+†;j;Úán§Ä’yìv·=tõj¤9á8[?­ÿNûú‹&Zj¿ÞžlwÁ~îJoa{+ -ˆÛtuÝÕïöåŽ>Ý§RÞ4ÃËègÁ++a£ü¹{¹ë)‰x|+±&¦ØÔ3u£ˆbzì…ZAB],ƒ&X¨$\µ&*® ßä)£0/N¬ÃÄ]õÀß=ÝŸ‡2F·så*.ú»[E¿?ÿ¹ -¹dSó$¸]¨¼nLÌŽ2ÙŽM.ÛZŽ¸™Ä 5ˑŲ ºÛôN‡…¬ ‚âìcÄN;ªƒ`adéß™"ˆå„VuÚWC×=íçp3.›yÝŒ~›žÌâ:üëЙ«a²* ‹‡={¿>ZÀ¯Ý9•SéFÞ¤*¼ºñ‰BݱáöSÁ!;!¢›å×2°lvº i¨©:¬Àòfo\òçãŸWÁlÇóp·læÄco¡»í-ûdïð.3Ä@ÇT;/ ÙÏ[£sÁåTOc¼£ ´‹ù?¥ ’KÓ¡dŒ0wøgïz~ñÀ{ÛY0ǽR*§©ï -û¥§ìƺ¿çGÞÒÒ:Q_½`Ú,'ÛÀM.šBì«ÔÕüA+§Êf}K™¨¥¥¥§'®hœqšžF³%òž]—Æ)cèRé,±â°B‡u¶å­Ýn|Ã.МťGC÷¯¤öس]ù–hR^šÁ:g×f–Ûû–BMZL=LÛžB9%>Žu±=š@vˆ¹[ڊȤtK¼‹É¡û°ekèQšnQ ¬]àí(kêŠax«á€>FÊ´d„¸Žߟ?I¨©2l\ÆgÁe¯€í_@Æú…fö¸Â’½Í@Rsø« Vø5Ÿ§Á6æ^i!Ë×}6‚±m6 §Tÿ¾§€Ð,éÍ(û±÷œµHm£†Äï8£Èg‡èFN6lΧñ³Âcÿ) nOe2ŸLT7˜Y†þ’•¿îÇDM|ÀöXín Ãî›vıŒW³£’øöU. «Š¶Áõk1›|ý†¨$mмL?*æ%9u=î嘄*ù<“¾ØÂö( ³œ€‘öå,Çò”‰÷³»m±µdj3X=-g­zqG”-b®ÓO«­ yáîÔ˜åÈÂ|ßQ&cØ…x ¢ró -ç/mÔ[‘ü5ì¾*¼‚ÒoxëñAv#†êjÁqW‚–³¥óV¯‘F, §Žf)ô35[Y+²¨°gùð~åÍ9 -2j +Ÿ¥yÌ?c?É3V÷›;ò‰­À’AýWûŸ½ËŒÇc¥ñ&bO¾Î)ÝQ'G"¹C`x¾‰˜Âs,-Ýs¼œÿ'»L²ÛH¢¸÷)tõËy¸SßÝ Ø&¥MŠEVå€À‹(UÌázè9ݱþ< ææyô;çõLüäž–UÃaëZ£ôv,ˆ3 J³\“ekÔÛšÄ{·áv¦šÔµÚQ ž¨P/m 6Lä©Æï)9Š¼ç oXÊø+(Õkê«ïˆùf>!„ ®Ž˜eÇ{˜´g÷2ÓÕ]ÓùëÉR3¸¨*5ïÀÑ0ݳ½'1G–rCÝ·l¡hµCø”lýH#¶A j\~î“xŠéëäzøîÔ¸ý¡Uàö?üÕF<üÌù?dèÛ—Þh¡<ó­3¥Z'„`7ô¶¾žK"¦«Ïòe 5X ÉMyWMÀW·ÜÜjŸ×!aLÌŽúË &‚©]b û4Žó©™[ðZÑQ¯åÕŠŠèŽTe2‘6E@Mˆ{WW»]_]lYZãÕÈ ÐøÃŒá²ðy„È™äŠ'ÿ5nÖœÈP³¤Zx2$4=wèÇÚç -볦`¹(½FŒ{qÁ×¼(mΕšòïÁEVTs0Þ1.gHó¹—Ajyýk¼S‰\ÏÓë+ŠRo¢Âq¼Ôß·uC|ôŸÚ×SlŸ6m€i^ËýWÀÞ” -–=˜ÕQ²õ–5üå§6ú¨BÄoÓ@QéÃÊž¨*ÛºûãÈ×UÚö{Ð+Öã&ݬhwE³Ôöb-ïöæâÚ«êËÁz”Z-$zòvZ²v¹¾|5¯è,íŒb—Rå«dyµõ=Æ 4;Xšq5›Ý`nÅÐ.i§ùêÕ¥¬ðéž88Å40ï·õgÎvuM}ð:}­›zMâ:µúVÔqA mµš8£¬Ý’µ† œZfÚ©EÔµ«“âÔŠ^ìåÄ­®mý\Kì«êÁ“ÛÈN ÅÖCömF|„v5ëÏ­™šî@Ó²@ Ì_¢:½†mµ<Žˆ–¶ñÃgHÚT=Y£V@5Ð×÷hÇO«SD7ì2!M"‰%Zk¹8[o®“dPkx°p3mºüg&J–$ 9]nGÍá\p-+kuDÓq-ŠL‹Õ‡¾;2œëïÝâï¡óŒTíÒQ+L ‡XÈIo÷FCTæeûßÃMWbÓ³Rt ʵ՗¹+rÇŠ#‚“N2^ßAOF:º¤UY Ý%ôÏèȆØ~·¯|~lÍË­Õ'³“ÏòeÓYãxh1muíº<5yW“_{~bƉ«¨Sí,wƒ2ÉP°Ëδcê-# <¾ë*†ÒÜ}ÀU(=¢+R#;5v#b‰~mã>Ä}$ß4V $-ª^£É<ß鯽è©ïÈŸ˜å0¡Y5Û!Òyýã”Wu%´Å¹ÿΪTd Csù¯ nŠ¿BÑo=ÇàWád`ÿ,çþRŒ©Nlªíúèõ¦ÊÍ F9$NZ’aš ¼rÂ}y’–pl{%´È'S[Œ±*™* ¦¨uögbc+ò$ÝŠ8w¨Œ'‹e¨çÇg¢.½ÐIRT9¬.«ï*9Á㸹¤8ü÷˜bêLlµa  /^tFG¹ÈD>7a4ðÖôSýÑoxPwsl 3Òi‰ îÇcar3J|œOågj‰b–Õ?ç\é¶÷6ð&tR¨°¬h–úG¢Eîê¹ìv»^ÿ¡‰úhñÚO†ÈëŸv‰';ótìxîw»¼Ã”ˆè+'púõ‚Uï|µ(oÍqzw8 ŠI'ô;kv`ùâ:*]6CÓk4»Œl{~ç¬ñëœîžmÛäÐÝéÍ1MôÂ9„% œc‰—”’µòr‰ãäjÖƒ°Üõ™ÀÂf0¯û¯Õ(¤ýÞ:Ò#ß_ cÆ*çexÓgC-}}»íqK†ÎOé š\=xj1‘êî1Y ¼8 \ ¡µXÙÆ göÏaàJ•¡ƒ³/–ÊRRßkžlÓ“[y·Mý¼ÛVß2riíñA„˜×ìÏžòuÓêÄû0…,x@ߧªßrH^KØ„¿ë±'N//ݦ®ÐE?†ƒ@œ×¹ØEÛé±¢õr×9§öCõfô®åõÏÖnµ¥èÁçµZ¼©ìaéÒª(ÛÑ €[9µ»›¦´ÑýŸvIͬÃï2ÞøF>ÅîÛ“dþ¢+è•ÎáøRyÈfµêqïYwìGß빪àxÈ® Ù„FëpÌ©ök^œùf‹x4;R…þ5™Y²á)!1W¢ê}u5«ÕÎô¬w¨ÚǾ ŠZLÌO騛žò¨íVÄWMÍ|Æ[UˆpäÇaÜå±G¼ÉKAC6Ì‹q¹Îh%¾Ù‡åȇ¶ä[o è‘sز‘Ô‰©×•XûÆdœpb6 ¦ÞAÂjøÓmZ‹×z -îñš—μ×e¶#èú²sšNï@ Êñg…+9 \ÇPoy•Œiè×xÛÑç?{kº…¬3'}°8‰ÝÇŒÅäm)š¢;p*t‡QÔ1rËã B:4Ý 7X…÷^Nf¦6G,™Ð/Ϙ™üЭwÏøN!òS|X£…T,QýçT|8b€Úß'íù3¾ ‡®á´`¶\wì_ðRõpÉêŽÇŒßÛV…½ØÛzecÈ1ÆꉃˆêÕ“ÎÆÖbχøŠ¹PÑò£ -Úb–á¶p|}URX -"Úƒmw^F²ûz§Y3‡u~}Ô܈s$£ÀqW7­Âj‰ µ^óßêóçPUgd+zó-°Þ×GÑýËýoX‡$`Ý -°+¯Øã’î¸kxêHwz 9k=«õóë:+§“ª 1ª\öŠý3òÌƶ»ûÂ~ Ô³¢ªTýÝ8zsÃó¨øüyØ%B¬„JWá+‚ÈUD?]ØÙš£9O²™Q?´6²kÚéš=/‘Qc{šDóš·k„®©(ºê·â >²½|rÊ-jf¥§[H´³%ô:ŒY‚›×Y¯£Ì´½ÚщŸ„)€ÔÔ¶þ<¹stcV`¨¼¼s§•ôŒHt‘uÀK$•|¾c$C¸V@è»xÖŸtÑ+;Óè¨Udç©ÒKÔÿA„ŒØQµÚeô¶¯˜gÚç¸w=^wt÷@xˆŠú^iÁ~äÊOÒ˧I¾¥SF«j6È7z?74‹I—~,§h½U¤ûëߟë[šÏF8$@¸} Zobåß4šZçp´Žª†:Ï:,XÓæ(š5Æ*q»®ë\/kÄ£Bœž7ª„³E¾ÓNgw³~ ‡Yð²<‡¦Qzu´# ¼•”n•ÓMâGH缆ÛQÍØ–eoèÌãs`ã²eòù›ÏË8Ĉôo•r\ggy‡TD»Í*±NkL+33œm1öÓœ\q꿸²%Iô—}¢M7†¶€£7ÖUÊE9àîÕ©LºÞézjÜ_iÈúet®²î¥1UÚNÔ\¯þSÜY?[ÑÙ]ÿ´4 ­ü0Q:;;Ŧ ýœÎþëY¾ íH tîØ+ Ài››Ìf=E. U›Q2Îòägõ~GÄË¢0§%ñv‡.õîèç.]&wE7v›[ïÕÔ®ó>÷EŸîe·°Æ!o⯾o;jëι۱P°|ÂSX0ì@½t9Ç¿¤Ÿó›½Í~pK}iŸ<ü…€Zó~¥ÆÚ -³’;@uׄ4ì0œuÝžªúö²¶£¿®×4Ai…D]@’::-Qº‚W¾‹Çôˆ Í$Ws=m¡×Õ­ää{Eõ~rU#Ô8ôÕݺK³ü’°êŠ‰îæ?šÍÑ}˜×ï\ ót•›G>`|ió©îñÏœUîû¢AÆvÛôùs“î T°¡±ëeÍ^G’~• 7ã!Ì+Sº=™¢Ãw¬€ª› ÿqßNraIø‹zt}IY±è(µó \§x. ¤Dƒf„ßµXN ÔCˆTõ‡û†¨Š´þùž‘R^u©µÈò[îºó‹éõ²ILäA¤z}Kÿû~½t`/GðaxGeõê h®/I üqp׈ÿ2c42ti…F“Mé‘÷ °¸KZNpfA»Fþù ÷g0L7 øu”\N -¾¯oÝGámËûV2Qfs´ÍUGg±é&IOÁÎÑ!m:§ÇÀy†…ûøyR“KŠØ®^x¾BÐGœç2iôà -"0]WÑnø]ù··9—ùþ¾îî ªþªtø­£lʈê#ç¦R½¤‚G:zÚ»À~¬p@1Ù¼DÞ<í«»¬*íûg9rQ²™£Ü;ÃÞ§ã«Ç4w@H¦vÛ4í´¦t±GÆw‹ w3¯ÛÖCÞB¢{ôä’Ÿ¬¯ÝîÔ:=ù5ìÁ˸Ê팩–`-š±Ýâ ŠNçZ|Òb°¦ã: ÷J”í¡9 -¬…jtH¿ç©1Ì›VQýú´Žû¯ûbB•nêóǾÆÕW5ýÿ¦gŒ×‰ÞWÔ+\À³aC4‚ZãIY%š-éå|1‰¤¼¨^× -{¤EnH â­Ø%%ú|J·Û„æ5Ê3_çÍtjÄ¥™k¯ó#'àfûüÔš¡öñz~ŠÂïŸù¦‚éûV[¹_ /ÜxSƒî³:emœtd²çKS -¶Â Y:Œ•ÖkFy@;5Mî~ßÕIt`‰tbŒÜéÏpV2GÒÞÎc~Æ7úi­‘ú¶pÞNƒá§»eya{r^åÜ^üsi¹¦!Aó©>Ør ~Ô’ä~;oÕðôØH™Rþ;%ó‹CõêiÕìœjØ^+¹K~Ü "áÜwÞA»Dè1žÏ!œS;]|_•^òW[ì"D¥*¨sLX½·Å_—gUlhÎ\¤Ý¶}O»Äï\YM¿“FÃpiê¥àºWi•üâTåÂé¹­ÝÊ(ó>*óXÓìl’ìá5Ø©VÒ§ù¤Y†ìltî0FÛOAeŸ'ë}‚þÿ—€kxîÀWuEúÿ÷IkÆ¥T“X÷ä}ÄÄg–ƒGÚ¹j~cÉ•~zŠãh_æÙþ¾O¬ôêqrDð­ˆÀT‚fת6Yyt_‡]M¢3ipÝÝ´|°é.X›¿%õ…Õ’Ò…‹ãünºWAwêBY¢ú©/WwÒw0ža«¥P•lu×ÅFÞMÜ<ÕLÙ¾ 1ÉDc¸Ð”´¯(SÞ£â¨'t\rÚ7¡»Äý(t™mžFB‚þ6Ä*o-™âíŽVí ûãXÜYœY<ÿã¡wŒGHX®V+Íé[꬯қù`#qvü¤ÎmY;‡¨6D! û^¢ÑÝùêN­"/É {Ñô»á­W[øJ…Ί×/D‰¹\88P+w2tW¨“Nd³òuC÷žaÔfº˜Ýþ°oÿã{ -òø%Á&í/×½€¯ÛÎø=Éý)´ó`×6ô»â‰u/bô+ytxøË4¤Of##˜aß'<& ‚Tëd±üúÖ°YÔcÅŠ û×ÜJ%uÊy=œ>%’á¼ånéK¦y£NKgWÔmv…5îÓÙ–öTV7§i’_Ü2TíAdw‰3Ù#ì’齫sÖ³ÿ¶•+ãl:û§p›žºI°÷Ÿ=›øAû>nkZ³¥Ð3ãÅ ÞrŸ”஧>Þµ3·ßëPq-«QZC»@â:•¿òò~œØeú' |ŽÚR}ÒlÀAo^ÍÆ(ž®qtàE-¨ï2¡›ßk¥ -뤉¾ÈŽ[e†ôµ=\ ‹™å}®õ¥}ŽÄÊ~ -eX­fÿ -²(xÀÏøà’irḞÓÑgQ$Öó› e5v-L£i¿6‡€ÊÞíThá¹3Þk;¬Ú¬zÿ;Ý?zÚÞN²÷Í͆sM%ÊDñ[uJçTK1 $N—Ó<¸dC½Ç«Ù]v‡Ž’ %øü%Y%VøX}V×, ÓcÌòëžLמÓó}ò+óúŸôêQë]Œ“+¦ýrCmªÚ_Ø4b8"\bV÷¤0®±Æ©™Æ¯¬:Ï.aíê°v:iɆ)éÛžatú¶EÁ§«Êê§åG—k¾<X¢Ó¡­œ‹ø–G¦±vg½µcUA ßIG)#<ŽÃHw(„‡GZ-ñtÄ ¼`ú£}?¬†ÕÒìþ;{t‹*±'¡LE{ež§i·R<Ê!ŸQ¶ tÈÃ-ÀØûãæðéóºÜ=HIðØ©h7¾Ü¬uï¼Ó¿žy†-?ëªÉh›ÔªÉ}Ç«=ÊÚàsï2BÔÃcƒÎ[eÒ¶ëüGÝ´‹!Òü{Š— ·Ø=ç1PLùºaí‘Øa™èÝ)æØápz9——PÁ¥¶ßï6ô6Ía¸Ô}Òð -{‘±n¼ Ä0Ê—5f î´Wt÷±³â”ѹ»Ú4½X–Ó,±rhp™ÁvÙ4Y*¸æ~t obîáý•ˆ+]£Ðg çc~èšG§ªPÄVQkÐÁÓ /ú½ÐÓ€b˳½ £Àò,ŽßT§ ‡í…iﳸJSmPn+6´)Ö­Å“DˆzB´…•yˆJ´AˆPj·Ø£bÁ´è ‰ô¶Fåºèwº×M»1kÄü¬ûŠfj¾ï„åÌÔ)ÿ´ûÎWx’šÏ6wÓ¾ø6Pz¦É³­Ê¦¯²Yä’ßLƒ˜Õm2äzÙ>¨ÄD·N•Uð׸¡›èèð,d!oå]CÌ'}sK¿æ¿œÓ' „B 7Êœxäxíh·µá—ƒVͱ¾JœtãÕ¨´8’nu³ÖYdX ¬{Ñ”£á¾N¥= +²é¯,ŒՓÚ Œ—‚?“_?.$ç_Š2±ì+Î~ÐÆ“CšèÞTk­þA]’»†7Šw§½ :~‚Ös:*9*véâú´ÏšnY,ÝöWKŒg–1FÉâ2›tª—ÛÁ%¹ŒôžC5júqŠ#d||\"=ÑšèŠ{‡Bèya±ºÖ?ªýͧ Òï@€lX—þzØ^õT2¢Ü{8´ê«·Õa»wÔY©E8ÄÁ‹z#£Ùm|£éû§Ç\läU9égÄ0M÷%:õL€21¦ƒ¨”OZ½J±ntOáa1‘è¸ ô×ÂÜ0#KCºÂÄ÷çÇ!R<ve.u(‰OH<®ÃÚºê,›‹[“È F¹¶ÏÅ­ÇvÝK×ÛÛÐùó®“èB[ -¦R÷i¿¤ÁR¸þ¨,a×3y e‹ôYJ§GØ$ï‹aÒ€„ÿšúO^LßЂÆQs­.»mM3/Ì ¬¹µ<.³¦^–&P!§š“º¦}D¨ž€ÖM—×jª>ØwmÍgºžÓ, úmN¡m7Sj#"´Ž¦VÙDãéÓ^±AàQ§“Só²©tÖÍŽp¦"ÈõPñÄîv ]ªDÌ|-íÓÖ^ƒ@ºÔ—T¦é'¬´×LZßU›Ä5s+^hòºE>¥%å3ÁM¿F:;‡W:: a¡ê ŒúY¨w úIÿ1pîÈgŸÆ~è¨zïY óÙÖgCÛ-4|êžé¾N+ž–-1þÐZ׈j!xJ©»N°+gÀÔIg]íãRÇ*´Wa/ÕÒ†p/|ê=ïQsòúúi"BèÚ"YývÎWºßú¹9‹.g~@ãq:6Ÿ—‚b5>ßž¨ K.¯l ˜hÀ êbŠœfHì+ÌWz²Þ“DB×GÚÎäßï·ÌÐÿ¼~Þt<­$ -WðÏõ$\{VÐAˈ ÖƒÀAGI§¹%Lîìÿ5,(F·Ú§µl캛sàn&C|¸³jT¶u8Œ×=íl1úßÛ”îç-jvv}‹ŒÄìÚlš‰+AfX/1žõÔõXŒêDÕïä{h±__@b`° J*\¾@"ì†UsŽã„Fø±¼ßÇUkvFÂD¬pAýÀØÃÃÊ*q7Ÿ*s⋸)G‘À‘¦hb×â+Ô0 ºì†Â£f0ìµTúë j#ýá¤/W.Vb‡ Së‰ËzŒ‚À‘Ò½…~0Â×ÉpU&ãp‹o®aë¤3X[°(¢b©èÃùŸE¾²äžk¯JLºûp|D^_dDL"U¡ŠÁúZ¨v™åäj÷oÐuÒW ?ãF°Äbj¥rZ^µŽë5D¢Õ 4‹“^@ºúê.{… uôì.UðPÅ´¤ß}?Ä­?Ló[Å8jÙì»E–’ =•ý›!Î>3FxÀ9“Û¦UO[(³\†Ã«ªÞÌ‹êÏò¡‹×η}l¬šªà†Õ@Oåñp.ÚøKï%•èLYý‘çŠÇŸ²ê5¯™>åeÄn£ü•£c³~ÍÐ<¿¾~Žfê6»´ë5ñ•¬5‹'ÕÂÜ5ß÷Àr°)DˆýÛ¶,5@úáðŒ©ÑïrI¨[¾Y› -UäÐ7¼•¬Â^ŽKu;-ŠT›[ÿä€;!ÚfþÂ*|×ùÞ‰³b»Ž¼?ãæÄf…¨éŽâ¾PûnîAV€5,–¥+‘¾`öjÄAoóxL¤bž„ª­ QïWÃ:‚€uÞ_'Îíû1öÒ\WcP^:B¤\º"žÆ6M`5«>Wk4Œ„g’ž–sˆF’®n’a×´ˆ¼çó½æíÊ·® -»;9.žÅÒµl8Nú³éÒG‰ÃGÍ#ðEì”ãGÍ5:]§?Ï)¤¿ÖÜÁ”Ò[ŽNeÎÀp˜…ºãæÚ,„§\û/¬à1UÔ{G˺'ADHâT6íÓL]°ÂΉ Ù|[¢ÞÝ)gÞ2üýÌël h#[Ëã@‚œ•;ô}á¥6höûì&±c„T3;œTV…x½ÿ"ÚÎw›Ò iË$nç³Èpb72LÇO¼hî­!sÁ²œv¥W‚V!CqNëÄòà cË͸ۼùx«L_u‰û­%\©J¯>e7Ú¹¤gÒûN• ¹~7™a/ûÚ -ü - ö•Z#ª}ž×Û1hݵ¯„«+ï©âS^.É­ä:ßUxr¿{êñ[ÿË“ mI¾=)[v©ŠÌ“ÃKl˜Õé å «ŠåŠ>&µÓÜjsß,Êâiu|s'‚©‹0 hB7N÷Ýf{Í-^6—Èc=m:Š÷Í*¢’ùèÌw?ˆIAÈ“jÈäKR#sÔYXgFEŸÄNòn÷Qµ{H£W¼õÅÔ\ÜÖÒÚ{¿–G°l¤£¸8õ ™{Å÷õmCmÞõìqPöó|Z«A)ôæŲÀÌcR Ù™Jud­R­w¡¨ÙÂZShpáÇαa&‹Ë {ȲƒUtÜ*ìNá(j×:üŸ†’uÿÞÜ"ëÞµ†ùÚÿT™×Cui¿.¿¶îÀ Ó‚3#žÔESÑr+ö¬Å0 -\>-Ô=ýP›0Hi‚sòj3V•^3´%¥EÊ«ZÂ#¸}T+v‚ËHÏjõËl!,t\R”$•ÄÍDE Öß{ë.pÍðà -–¿¸X(‡²5)tcºõ¬‹õ¼;Ì“%µ“C‹bb|6\îF®r <¤ƒ]†V¥¿bõmÉä -ÎÄ"ÿœ¹mC÷Š ºØ¤Ð'éŽÊ *ÐocYÚ¦£ÂCô_ûq ±m’ÔR)í¨G=åf$T/Õ™v²ZwœAøþ °±EÂÞn2Íp7ÖæÙ¡³  –žãIëö¸±îaÎn7:Kzm…ç#Zå–º=â¿:ðˆ„h¤¥ÂTâÒÏôiôÃ~».5„gê|˜™IY+gj:?Èr„}êÛ§Rü¯p¶Ãª\døº»åÓ<_Ö~:ÁÑ@ˆT`IC --gÆpž=(â -ÒÚ<ëyõ¿;„ŠûkójTN•ô<*Óí:>|¦ÀjÞISœ­×,êN[ÛéW¿k|7{wÑŽsï< ¬aVü6 [% ìsÆJ™ÂóãÓöÏ  ¹ý¯çúwp‰-Y+êøš3HϲYÌ>=(x5çç•4hò}QÚ7@wÓD± $šìj]ô,ᥛrZ3³»3;[[_ -T$—@W‹îóÞÊH©íß_èGu%ù­I íë…[ÁÏÄëvé™|ðº[èí‘D½ÐvH$MŸ$'O®šE´ÄnX„—·4‡5rcFº¶¯G*QR£¢ßÅÞÿsO7wÒ.ó[Äõ€7³fµŸëÛÎO¨„ÂÖa¨hâÑ5LÚ®lsû-Ç·‘(â&VC5ãM´Ä6׺ÎtÅ“îVóA§•»nÝòêoém_×·‹¨TOB„úJÓ6_º3cÙ´ù..0T¯%ÔüP—Sš-CØš—j#r·õš¶Ö+r…‰²©%¬@a¾lNm=Ü´õ°9ÖÉ4¤ÞlX G>õCUŠ3¨qÿœ«A¸úY=Ž¨þåƒôs»í®Î”o ž™^Ö°<Úöœ`™1Xb§f“‘Š¶OhHïçœ}¦ö¬ï‰œf;'ïü¬ÈÎö·µ |§±’ô«°˜M·ÿÝ…xíSÉ'É{ô|ldcèãžæˆÊåFsÁ“‘ñHÓúÀã´way ø`žn"œÃ¹ˆò²^„E?=–ÌG PÑz/ci¹zÉ[ÒLÕíðØòz-§¾}ƒ'Ýå®çÿ…dº3]é#ht¹+®ž$ìÄRÅ9w!½^«¼PÐüÕÌ8A¸„S·Ë¾äÚ“ÅhCiK/›(È ãHCÒl2“)Aç(PØ÷IÌQ )%¦_ïùLчºk{úïOZWÛ/ÿ¢StL ÿ8>3“>ßÓŠEHVì¦1ÐÞÅȹOqt¡¥´ò±€à¶½`ÕqVþ­Ä€˜QE}~ðÜš*ߪgLÝÈÄ4 ™Vw´-À§H¬Jt†„„(þœ—áõ»Ê -µ³3#£Ò~Qšjº¹—w™¡à?ÉŒÖÖâÅf±.¯tãwÚljÊô}ØYPÕ/ºt%8a§žQwzaûøç›Öt>´ÍQ=H§ìÊè°}‚Z¥;îjJôD[IA—˜€®Ö’æV‘TëѸ¯ð½éj‡3òv†SÝÐœb´Öܨ3(µø&IhþÎ6œyX†R¨t²-îÃÕî´kɧµ”Ä°Îanìš­x-jPéIE ¼‘® 5ÿy zÈ"ºÁV–‰Ï)R«ï”ójT —«_4KV☸Í¥n§’ñOÎoÉZ«Új{æ£rfÒ“Ÿ"¥aÃb€,+s‚‚]„†µKO†ž71ÅúÆZ¥›µ|Õ;Þ»Oɵ‡ñé†ùä"Íœ1ù#Ü¿©¸\úÿfÃnKŒnšò'F2`ä}§™‰‘”c={¤‡Ÿbv°l…–ûÔ¦“Ùœd”É á4 œéÖ^5ÌKæÛõ6z+U=¯¿÷¿ -´D1t>ígÚ¬ª¥§ýBbÇ̓¶WrÖFÚd½!Ù†ù¡?ì‘i’&2D7£&w¿×4Ì`y}?.å¼M«èX¸¾ý¼ÜÒ8¯nX M¯6KX ®¸U1~rŒƒ1 À\ŒP$K¯3H0[•îÆÏ}kY=ZÛæUl2²$Ž»ñÛ(N/,ï -Ó¾}Výí¯÷QߎéãûÒ’êZí-›Ð¤÷|2’Ha‘•©ýæñÔTKÌú¼‘A¬6é®Ë„Ÿ—Ñ)àêã#_6NU~Í]•N>1ìªÞ&ê)ëiv¸Kï Ó\Ù)Öµx]QÉI-} 'ü°Ëö%9¼ø!'y]³§§×ï¤Éa*Þ‰ÀeIÕ§ržH̯Ç¿f+cÿ_††2D õîã%~’ž]rõÔDäñµª›)¸5Û!e«íp7RöE@7±AgÊ·“Ûƒm§/®ïKn$QŽæ|ZÀžL¶°ÏÈœÁø÷XùÂh“ã‘»m¸ç˜À%jÊ[²{Üïþî\ˆA ï½ÊÑsë;eSÌÀRª„¥ñ5Ÿ”ã`ó}}ß’LwÍB\€ÿö³taì ÷Æ×Í`KŸüa¸®9 9¼yK’½¢‹§½Ú?Ú¯‰˜Š««‘U&=l|^ÄæùýØgæ,8§óXáUÌÃR!R¦Áë F´=eîL~ëx©C%ŸÅÞ‘ÔX“Šúî9äQ3UÁ>àå° áZõp$Äd–tO¾ÛU@8ß‚•úÇ%oáýÊ-FeÔå@Õ,gfS'gDš7Ú<ü*|OƒhZF«rÁaÊ~hs·ÈÛÅ[¤¨Áè¿`DúÆhgvÍ'-ø ä×L:|£fTâ›Ûlrf~É#-ø2-7 ËIàÛÊ|·*ÐZ'Ë•Yë3l‰×o¼,Ü&Ј;ãY¬1ìKåÐðSöÜfD¾ïëü $SíRc†á¯xÞSøÚ94Ð6 œ ÌkèÑÍÝܳJ4¡¸á»–ðfæŠ6¯34t6ªwÔJÚJIÞØWKÏÐÞ¯c¤bXëÅnësgÌy- Ѽۋ‚æd"Šëäs]Çj‹} À‘rúZ±¦í÷VâݯŠ!Mª—H¼/ -Ýåf õ9™+ŠÅª¿÷éø҈DzÊaäÞ[¨õ’uoœóÔæ-5ðÈ'Ÿcîí•š¢”0lñ%¨½HߪVcf¾hñÙœ}ÓÍiï…‚ÂY¨› êØ_=©B¯ÿS^.Y®Ü8{Þ@õ! ð·'ûï¾ã²ü*«^<‘D)•IøTo<½´FÞv›ÁG\ŸzÌD¥s‡˜9£v/Ž§¯Ž$0|^ço¨RÇÃËåøE•=ñÔãÓ¬#p¥v¹,$ö™Xg1QÑDUŶ•Fö©®Jn3~û¿+.Ôã}&‰’‰ÿ‚›ànšqT3¨»UqÎp,pŠñ ÌðŒ4ìòl&º0íVlF¯]çÙ–Ÿœe:ß"Ô®Åù¸¡ÚÏ÷±ÃÅw‚¤ýö—cªš‰wVx«$–øÊ·bAw ˜£¡=±ÉûrUººbé½SèA<Ú_IÊdqó¾R˜Y»l¯³Ð9·†ÍÖ4Ÿð·{©Öõ$Æ ×bë=ï¥ÕÌ™¸¾×`K8`{Õö¹ï™HàÅÀfÛ5ÃnÛ¤.4mä5Ã?‘tx,Ä‘Ó…ôuS8••# ÁKQh9ûZxÔlºÂÄ -·Í«'Á͘éÊ÷SCÍÆhu»»Ñ*sðJ¢ý~ø{…KÎh´X¯Ãa£ØNpŽµz5'±Ùvªo6¾9ÈöLuÄ΀•s€Ü®çíôâe&ðøNÙí[ŽÙ%&9 üŒÕïm^žÕ1ïŒ?ëãŽ&aJGâËc׳Ão͆AM…?¯æ'ÝN›§ è|—¨ Ÿ2ÓeÚ]‚ãLl ÒœýÃ4ìº dñß±,Ÿþɲé븼$Þt},ÊËà¼üåæ1ý1¢?jjl¡úà?ñΟUˆJ:ÍÔÖ¬ØÒ`4¬ïŽŸWX6ÚKƒ&[¢VÌá@ý-æzÊf…fÈK )BûÄ) ß*æqÚ½Æ趰ªjLëT]þF˜kvR_–õÛ´ѽÿþ£«*ƒŠM€!~ÌiCÜYŒ½~Q.Ù@cÜQaÖƒ¹ú?fjøˆ°]û -‘Lïå8ÓðÀW»QTßX[,Åb.þûŽÔ7Õ!D¨Ó#çèpb[ÍÙ¾®™‹.¹·‚[½M>ÿÈc‹ðÂ@¹±cìþÊ A…ŽëÔ¬†[h†2h -"g3Më‘ŽÚNçìeë‡GÅ,7’Ë°î`°ÝK\ÂrèYäóov*R¢®ûÆ—•šU„tsLtQF*b™õS—ƒý—sWóɺsg©Ð$ùìtq3 ‘®?‹‘š Eýà îG…ÑUÖ0­ä§ ,¥þr,ŸjX8¡¯q*Ø(ìL]e£úôÂ,ZÚþtŠpfi’wÕá¬?F±í°à«úi ¶\nR€Ù+À à²æ*V*ôͽضìOCðý\ mF±åXÕ‚þ:n‡B¹’v+}–6tL[ÓL!TÉk@Ú´]P%Ä–¶Lag¬%"åº'³‹Ç¦ÍD[Õ¶—³^óYÆ»$â©Ä -扥Ó!5[§tÜ)õK{e¯ÐüœRìeé„ŠºóYUö§±múv¹Óm™$ÏØwL3XÙò|\*úìýᑦEc,#Ó©·5ßzE`mæ >ÿœ”Cí·1%Ömù.PsþCŽ†•§WmÙ=QÝðoÍÓFÕÁy—=jÛꈣÄê½lùòP½fwþ±í©öofaˆÎCDRëõUanMèèØWZD2P UøænŒ’Q5GÐ9OÅD׬_l -Z…MEŽi«x†o‚óö4Xâe$°¾»,Uù–mb;w=î£ ‡ÉåVÃît‰Ox×àùk%® yÇü³>¯ÄŒÇE·»å¾Ž [YÙMÌ÷¬äºÂ{[¥ûשJ]ƒL\–5œäèÀ$“ú”8Q40ôµ@#Üû~ÛØíØb[™!•îÝî™PhÃF_Çâq<Ï’ºv%„^}=žŸeõ@ÏO¼SjªÄTš‹å}Ìn¸EST@Ý(Jl>2mc¸ý²Q n¨>ë\àaé@ºÓ­N3/#"€”ÿŠa¢/»Ü×Æp|„`¢9e9kÓÜõú ¸ìο;ˆ ˆaz¹Â—/‡*¾@òYîºi‡½!4J^uJ=Žö4³@K"%[•âc£ú#º±®…±§#¦+ÚÃÇ!Ðl[(ºGº:(ÖÀ°«<&—i†%ˆâÁ„TÃæuZñhžc¯tÙçäpY„0¶' Y6­°D¿NŒÛóÕH+ä®õÖV RŸ¿OAÃm£Õ=°bç«ïjð`B|“M—P'JiŸøY J£Û:¾èÚ]pªB) ÿ¸š³ûãõ®pxïuŲ¬ä>ú#»WœG3F¦/eR¶©ÛDN ïñ -3e|ÂAž|¼ºÚtðèGñ…øæ‚69¢Þ¡!5Ê ¿oëßìxZ5ü˜ðãµÞÏc¡*þWàÔtËÃ:(äŸø¦áõ D^“¤þpü€íððMPô·ÏÉ(4üÎý~°š%aÀjº!N÷W?§E'kýäd˜jÛ‹<‰µ;µl;®ÿZ §¶¼ˆ€ƒ ]Äœ¡iéD7n}ÎáØAˆ -&½6Œ»Ž~I§šQ·ë‰]×æ±ÓæAa@š'Ê+˜Aå¾ .qŒ¥½ÝJ9àÙkhè&–Þ£&ÔÚAˆ‡õåhµÑpÒmúM3ž3ÀUf»de!¶&[£·evdö‡í§-^¯]V-§¬êW–¾û¬>VMàmTwN¢ª{Y+gp’±¬ªW;¦NãÚ‹† £l”xQiùd¨²'&Á{ÍÈi¸¡³Ë´½usÙ⯴c?£ŒxsR *¯QV#¿2Æ4ß´ -Z×i/°½Gâ9©uðµEé(Øé„TÊ3š>æ9历Ц`š&Ÿ]¾ï-÷œ¶¸n‡ýc(@ÆÏÍ—} -TG?®‡ât{>&æEzvœ 'ô9<š±Uô­¹ïÈÄßl~"Â1q aã¼HÝ­nœ¤"iÂÏ…ÌTétFvšaŽoúŒ€—Öxý[ø´Ç!kñúÓét4aWißÅüs|ŒNéJ’|c”3µ±³÷^³2(¹íYjplg±ˆËt›$ðcëÏ¿´\÷±ÿ—ò…í!$/Ì^ I+ …úƒ+q»Q‘^VnšJ.?o‡;w¥šx€ýã´¢ŠeáÑ'“Ô¯ÃbÙà_’®GPî×€:äO/zÑ -6æ5l-‡^·Ø3œ4ú9î ˆò‡mq›ö¬«•,-" D‡Apx“z¯ ‰tûó#Ùœ!Ò²u IøÂ'‘îÙ2Î1CwG·Û6ö(/~î“KÄÎp^E‹ œUm‰uʃûÛÖ|Ï;ê­V³ÆSwŸ&§ô¿›;uÑ‹.DÛœŽ6¿Pœ~¡¹q!ߢT/Ƭ¢LobœâÌ|Ê{èÝÖuPCZ,"ëlóä ÛvïëŠ{œðÎ{ËÛã´ûÿËQ„ÑÀ¯_Õ ×LÝQ©AõÓH—·n©a” ]$x#¥Éd£:€È +!«¤u»Z'ÑÒR-›Î‰YÁÒVê?g~žËÁ(vý¶‡ëýŽµ#®Ý¢máOz?Get»Î¤íÌõâʳ{E×(r³ A­%·‹éXf-—ŸÍ¶‚ÅÅÆkb¾%´ ›ýzý¶•Üâ­”0hxÅû5ªRÈÛáÖ¥€Ãœ^ë?akgöµÅ…º/Œtšï¡qí©X)ù+ת;pm8ü|À‰ˆ·nyþGy™d¹qAt¯Sðì$æóxã…Ö>¿ãG»I¶­'mZYDaÈŒøaÌšÒôû–ã¥)’V7³Ii™H»@ïî n^»¥UOR5šÆvוV\¢g~:šÒ°lîwàu<¥ ½À’yD{t¬k/ö"Mw’,ºj::P–ÖÝû¬¨³" 'ÅK™OwMçfôÄî¶Æ Ï‹•v À¤³¿DOwXÆö]¡86ÓšnÒï+TÔQ¶µk³,zdÓSF¯þå1•Ê™!=sLÇêq—ÞoG«eº7æ4ãVä°‰kFÁF±iLõM7ý½qƉ&™`ÉVýL¹0USj®nw<#ØhK£°û¬•6vFÖÖpyÑ|Ô5Å•“æ…I£gL¯“z–ü· îUÖàŸIòóñSœz&)Êóϱr,s6/}*1 g··î’7_Ñ`›v¿þ¾ ŠdLµÛjŸYÚ‹´™Y»)R§`þV¶ôGöàŒµ¨3Gîðì\â¹ÁæáÙ-ÏeçÓgÖdDǺqŒçÁ™ƒ”amzVw{ü -t?‰¡ág5iëk¸ó¶Dß7Î;@¼œËÆNÞíùc/šÂÀê§M‚³/ê´#{•Ìê¼zbˆF[k8èJ©B»zh;2EÄ£çx\Ú™_BŒhmJ¸9í•åm*Ítñi¨v:ƒ~ж±òš …ŠÉs¶Ã÷*˜,„ùZûŠê­÷«/Û{ÔœçÉ­±±FÁœv³a,,_ºÜ’¸”ˆáð<'l^.yFPF·k{*C_¥ðïuãÛ›ÆíùïÛº›tKpÉÂÕ€}=…­ -¹8-èSÉêI·W±4ÈM´F 7½ÛIµ©¥T-ƒb¨è4Úa‡›¬YUâ²èÖV¢+¥½Œ‡ÜZsÙW‹šÆÇÔ»_¥Ë C -jH=Ék ææèôÁ¡ZXØ;·¤Ç›®µzˆßR·Å«¢Õ–SõÚ-zŽ‰mM—ÖÙ,\Ø>(TµŸáÑÓ-‰Ð¶6½Óiºal)íKÞ¬EÚ»õë¢-I( ‡¢_‡=Å5™Î@(µo\m _kÇ-r1ÛꆙM6ezÑ2âi+È–„öXÉUŽA’ÚEýXË 'w‰ª ×›>˜e‘Ù©c25_–Tø–L OmiT,t¥ ×Tš±fΕŠOÙf§6„±á'Ê_˜=`¦¹Wx¡Ir˜ìã;Ù7È€¤uT¡Úޓ仆H•ÃIŒ˜ÙA‘&¡½wß®>7'H=8Î!(£`"†ËÑl]Q,Vé¨Lá µ°˜-™ÿÓûu~©Ç>äê×ëŒ :Žšf3{æH˜M⑨9Íì•2ÉÜûÊ!]õùóš¸Á‹Îk$ÚÓ½›“|ƒß_Ü=.…xŠ¼èû\ÉeUGjµL:©jÖYŽHÛ’RP)³zhu—«î¶V… ýâÅ`Ï6ðSåûš—ª¤ŠÿX3ôö¤±5ÁçâuËêQ““´¢ûä4(¼ð{LYœ¥ŸÕ?;º‡ÍW(òkbMÿ{•Ðô“eÝ5Økg'XÚ>¢'åWä …•þÈp~!Ü@6øXRÃ9†—o}jB7eYŽ#yÕ}T_ï­8õJöNýÄ?QËÁ*QVjÿکǾv؆ÿ¾-XœÎ%Í›’”¾×=ZR@‘]co„[úHBg þ#£ý!WÐTdÒg1d%Ume™ÏzHmEÚ§ZD·¥vW¡ld‚1š3±B>tBYc!‘ò©G*{RÝÖ+vh©2< rXÀ¶ D3‡`!€)\«×T~‡Ò/½ftœ3R!'ˬÍÐÚ–£Ø%¦¶L½~Zº‘D…Ü©ü,Žyû¤$ïÇjÓÄÅÓ 8T¿{ÍÅÓ0J;¹â”hý«‹û@c£9J3ô}lwLn>ŸªwPåkÌ<¼YÖÒOzÎB«ª¾E0›µ2r .^¾²C ¬a’Qz Ç* u5Á³:†®žö4ÀLgÍ­k¶oª}ä:šãh¡<ìNÁk#_'òl.£6Qcn»üÉ™g«¡³šjêè¹ X\3¹Ñ—=ðIwNñmëkà”awoo;o”Wz«íÎ^úm÷•>$ªÞjËLQ†øxé·6èÛë¯&øÖo,M]"4Ñ!–xvÆ1\} ŒÌ·é¥Õwm‰7y‡ô ¼ô¶\Çq­ç°YË®4—ÅzN*quØëì[ŸB»"îìhÍí®¬45-zUÁÂLj=‡5ùáØ.˜|ð9nK[®$JåËjè8ù¬Áäc¸¯Î³é4ÜV±ÓÓϺ&¥ç»Q ¦®š”wRJ5o°æ›¹tjvBÓKá{€î‹ÍX@¶²F°Ä›z³PýÛlÀ¨;° Pî¾PŠ@ª™L¤“"òk†Ò™@x[*ÜÏÊ-04ºšt¬›‚ågÒ¬ÓÒ×÷Ìði‹i‡º; úºŒ©‰¦#—D* áôt’çÊ›ý×Jñý;"Í®¤1«ý¡gŸ‰‘&+Hü|µÚ°•vIèï±êèõG¸›¡c¼¸-W²µh¹zÙ5 -ìJÆ<³&±*Yì´§ßÖlñe³ÉÛÕ¹Ê!Ðßƾ1Õæ3×9ÒÑz¼¶P.jE²™E4lX§à½ÜÂïíhÎ`]®ÔUÀi-ºO£Â\ÑÙ†*Õ݇þßÜÌ’@\kN;ZÚzeÁ0ÿFðìê]ßTå¥èóMÝ/8¨jØ2{Àt¹Ôn+иǚýI#Qó™P…†ÛZmÞÃð¶uZÉvh$“ííÿ§ÔÓLGËÁádı؇^¶T¶Ûƒ¿üÿ÷%ûŽþ:ŽU†1,jöÜíS4ýv¥O¸Þ ¹G7DP®';à#÷Îöé\ÑqÂLDˆþùû¤ÙÛÃXúmí²cDfßv‡ÆëÚKì‹}BÛ(y—!óŒáªhÅ­ìk[u’·¦ªÑOãà é•~ô‚fà#Úš[™tD2<Ðã8¥E­†”Ö|OBR“v£Éˆ»¥QGV@ ë¼Éʤ¿ihížbdñ÷0yÓÿÅrGä^I¹V—AáBëe‡„®]eÐåjO¤|ʱæMíh×ås"ù,÷P5Ö ªÈr¯Òüù%Wtî ý¿ë‰ Õçø`(<ܾ'¬D£µ´,êý‹~VŸ¹ïÜþöˆÅ~íôiKÔìTG-ãyG`™‰›ßj¥®¶£FÝ(ת]QÔ0ÝwVþ¼,ïÛ>âópœÒótCLP͙ԴUEw´ºóâ@ø§¹âåšJŽ 骺±þw‡•è²»–~Æx©ÿI5µß’)ÁIBÏfG³§”íñI„Ý:ƒ<ç5`ʵDSݵÜ &jxÔ‡ùÙXÝucH¸ãöZ¹}¥ÙÈ»õ¶Ô·ŠÝœåÓb~ߦkæ-®öìéùfÍ‘BœÆ-´í¦l$U”icå•  wÐEgÇÕ¤ìæ÷U¶²¢v£~èÙçbËGÎãŠg±ò‡Òp<òU¿WæÑH)*¯Öuõõ÷m3hpiÃj4¥tøyEÓäòìA—×i`EwH®ÂXPT²‡ôïéØÖׄµÚÐ*ÊÞØ@Ó:ë˜Pª¾§»ŽÃ-Ð)m«Ôc›ðáj›C,ÃcHû7‘54{ËPP“aõ¬p†÷($H†;ÝŒtíáX÷/åå’IŽÑ}Ÿ¢. 2$ø9Ïlëþëñç2¥n³žMV…2#‚÷çP7›ÿK#£ƒÕŠ»Z‘tºtïæ´ÔöòˆåÒøÊÛÁáZPbÅo1ìb+ŽÕ¹Kam«ºÖì*Çd»?õXÖ¦òØ9‰yÛeÑôÓSEÓp*§rÊúé2‚){BÊZü•ê1˜®övd/A4ãÒ•1u:M~ŽIAàäœáhR ¯gªž((ùûWÑÃWØŸœ5­ -õb¹Û’ÕH¼;+›jºK˽ÌÌô[˜öûq~-U#û˜È•¿mix3µZê¹=i2üõÅ ;̧Ž]¿þ´xÿXßΊ¬ŒìnRc“X®N>ùP– q5â—K»Pá3–/ctŒ·¾ô‘Mg'®ÙLv›=/ÚšCµÆ¤l¼È@ªWô[Ï–bÑpÜ«iÑê†N¦Á¾ª’ ýIàU×6•OÿàÒÍj,dE‘„W«÷\6ÌÐÑê©ÁP«è%k/‹,kŠÂóö1t¦ŠJÁäûit:Ù­M¿k š¨;²”éôv|k¯¾«{XxÄ3ÊØþ‡–€ýèLPÏÜvOT̾ú2@9”—Õ§ŽfÖ›ŽOõxѪç“s1þÒ9÷bÀø5ÝR݇# „ÝÛÂZ{çÛkêùúü®{ÂÚ=´ÊÍqŸvßÀ&I1yó%¬øÿª×4æ±.ù)@N'/§O'’ÏNÖN·\ã½J{A8„eZ¸IUƒƒÄF–ê#M„:Ù“žü>Ø.ŠªŸ.úMé¡é~$1j:ñ†`†kpO÷MÚ@'`Z/?:¾é:+oï ó€^‡{õ;=½ÛòÓšRH Â*è.kti4‡Z¶WÏt3ÍX.ç2•"¬þþÂ>‘œ¾ÆuJ7/ê8L5ÞÚÇ£-"<`uÚ­6iU?€ -PÊ^|rÅYí8.UÿÌ ¨Þ…iOÕEw‡\f[OdÅŒa¡i’™z—Ìžìùô¬ƒøÞdòÔhªÆ–Œë¼ÏW“EÍœ‹¨ZçݸüvÕó:¸êõèÛÅr=´ŸîªÐ¡ †=oonãÎ}›^fxÊ®ÚTZ% -‹×xQÃtó(øÚ:´âÂüi|>ÍP¼z:Í=ѧY -À8Ÿïø×Ûr.sŸÊt¨ü{ëtvê¡‹€ó…m:«¤ãK;¢qÝëµ–û@(ŽP² æV¨kNaÆc*wªÀ9ܲQáJ¥tµZu:·£*~ëT9‚‡:+za‚ -Öå´Ÿ>$ñ#»hYiYßl‹Ýšà¢â¦çº®›åþ­Âøõ™?±YztÏ–>ï+bŸV–áp„¾èQÕa2v9w5Ph2GQ«AÌY j ¾A›áÂEàC²Æ≹»Ó&ÐÌbaŬœ8òÌwW\œž?¶nù›,×­ ¬¶_¶FVÌ饃ç:N8ódKØv,ʃFh»­r÷êï¨óp¨cÒÖ½½Æ¨¾ïÉ©<þÿSZ¡–%•Ú°Õ™ç-ãzeXìv›Fµ—¹ÐYŽU›}èÚÚÅÚƈÞç©}š;‹½<˜tÊÀ` -¬} 5jdˆ¿ø°JgxN2=‹Ï—ݵ؎¶hgûßøÇ©dsÁ]•7ÁC½cþìrÿ¡ú–.5;ùEÚ8­œ—³Fs>èöý°*ÜIù¦¯Ï–Щ -É‘§û¾ÚP±¶a‡ ±Övëßm¢>>j'¯·Ó"d» ¿’36|SDr§E6™Ü!ù7ˆÊ}­w­aŽMd%ÈnÖ ¨gÔ7ƒE‘{±¼Á&<˽†We–?&äe¥†ºÒ×Áj(É1ŸÒ.ü6³ -……Öë“d“½$Tß» •$Œ´J´ãƒ,Iïêº/|cýoØwˆjˆAúËœ…®kÄ®§íc?”Ï_¶+“õ éÒÐwƒH®íxôIéq°VMks^àíÇpÄ5œo½DšþÀܲx×½¦zê²Ì£Íν¤tyÏGwuV⯠&F$ÞaÈúÖ'¦ßW²©µç·†cßw ¼alÃüÁSñ\>Õ>ªm²N‚@ Í3‹bª4g{5õqÃ…–»Y=ôðt]„É@ša £zY"˜Ÿ)ʼ¬¡ Ít2âr›‡ v‰ 0ÚßêÝ^TÈq‡JŽB?ˆJDËé¿75÷Eš™žÚ¢ŽÕ¨5ûÔù9Gìþ­ðUò‰v5ñ÷§Êz%>÷lâ…¦k !<'K_9øÌ$æäu¨AûXê4-/:@§ˆ ÕhŽìÛ—¼3'ÐrU×ãD$Mj•¢è÷îà;l¡ŒH‡ÃUéÙöðüÁQsa:{A6_¡º/âÓ± -ê̵w*}Ž}lg&X¨†KÇÊYéôç5vË*ÂWë­¤G» e:¥mdó¶é]NW„£ÑÁNÂÑñB†H#ň.Þ"V€‹\Òn˜<èÞ(iÓ)8uù4Ï~Ì— ™hyiŠþr¤Ðßp(aû½¹)¤ÚÃC £>u¦v’V4%)ŽDØ Ìö3ÇÕ¹Ìi‹=XϲÚæåÌÒ-˜V€\?ÏL¶˜x¬¦^4r^;0:p˜O/ŽÑyUÔ,£áÌGÎh»/æ5ç¦MÆð£ÃR5®óå°jè}ÉvfÔÕpÒÊr¦ÇVžE©ª´ü¿Zâ8ª’²ªœ)Ï›²‘΀^…¹mR¹4_ÖRŽ]K"i©³=ïüŸÓ=öQj&xˆ‡EÓíQúF•N•_ŸßœkÕÔ[+‹ñ -•5øÞr`ÄðÙðRLš¢‡³>¥ÒD¢†å×´ú.ÉžäJ¤{=Ò;ïàåu¶1Šn`Ihóq ZʺóQÖe/Wü æ³vÓA[._ÏÇZûó@a¿Ôõéuçaí6úëëG½åÝ»ÞL! ¸M¶ÔJ§t™=‡‰+·±¨»%`Ù•%÷‡qÓ6æÔ·™NÄ”ÐÚý}%Ò\Ü-;Zäv q±&Þ¦~žÌ=ÙÂÇåÆJT-+«€ºhîßèHn…ÆîGL:o :yŸ÷ñ9~}†Â?ÛeJ÷‡^ ’ÒS÷y•Ü €é0‘â©çZ)´É©…_ë>ú(GG¹J²»"à)¯3Š½bs©¶ -Q£ -LäS¤ùÕËJñ;Ž]îfOiž¶ëT÷JP—oÚͱí`ØÔ÷Íoß‘<(ê^äëp¯k*ÎüúÃìAŠ@èÚm ÒÙmB…i$møÔïÝŽ75®wûÃ] 4¸Ãh°ƒbÏ7òúWcý_ÆË%É ¢{B(þœg6³˜u›õíÇŸ(%‹mc³¡”Åd&ˆp^<ˆZKXŒrwVÔÆOêaÍÖ¬n#h-±“ßp‚hCÞ7éÒ]ŒÐÌf3ûs½1³¼ÀGÙÜmà|Íî:qm²¦¶|›û¦k8èÝ~Âß7·‡oÓeËËB±eZáU–ÍîBp §p/{áDYäwáÅÒ‚eíàöSë ÇÂj 2ÌL>±š¦ÒÆÕfðœû’ãNÎð@ÃÓ‰þ«P‡1’ Ë€·Šyö8òŽWD¬47 Îâª!2p…]ãLŠ=VpÞiÿõOgœóΈÛá @ýÎ5Ø«JB@ñ *Ó(¿nNj8ÿw@ðëܧÍBuòÄ–å`7„9Ì&¹ÑkƒI!”ìÒŒî´d¬HbÚ® =WÍsx3‰3د;yg¯ÖŒè†³^g^Ÿ‰rl´º¹ÑM<“µ}Çç‚ì1|èÓ* wᙧžµ‹E»3ø‡> »›S¤v½/‘D°€6 V)ol#Á Ö_9UCò%£T¶ÝÈ>•ÐìPºOùC3®AÝNyòÈ©I']œc~å™29T€ä[Š8>ÕPi€˜Y_èÎèiÇoD<ÉŒ+߯=}ν÷ÈG‰Z¯¼EÎ*Kž YS/Ô+ŽÃÝr‰Øq©ìX˳Þh9ÚÛöÏÍ÷ÝVlF ”¡î2 Ï%Æó<ÇeŸáO9ÇŽ«°œÀ^€Õ¤]#OŸ­/f¼Y+ïfE)Ü$) ¸büO¯¦;:š,›bšº®ëžÏC;«-ï­ý¶oy>µøeo‰-ÚmêÔ?‹–—ÒÆÞÝHРާvŸ€('!ñ@Rªe•0Ù.´aðvKiò&­Ü Ÿ ©È›–ö”wkiÿ€`¼Æí ¥ÄGhìµÔ‘ð¦#+4íRº=“Þk=•Ô«U"óø¨ ¢æÕL©=‡6¢"oK„à -XPµY çÉ£Óö#›pñ‘5­&Ljµ¶¬¨h©ŽJhGÿ$6· -ÆVÍ`ÑÛOaù[”¢ ê嘃Ž„±zq]qL꬧ V©õÖa˜ï7çõCø7'‹Ó&™üG›§š°iRé(pÕlÄùY÷‰0z²FÚLëßhÈ›¥R.Ö¬šæ3\ZZÄLmßDŽJ¼æP¿ëœÐqò³t‘ `êÃ×r䑹`2ÅÓ»¶‚tâHKÃ×áÐ'­Îåö#“ı§Ky-M(yÕqÐC.,Ȩû{ÙÒ´&Ln¦ÇtRuRbïWs¤ç÷y™Ç_¶j¾ß†Î³2Ôd^§õ”{?VÎÑNY3¦üûux©5ËŒòɻ햎DÇÕ›Ïó*·X/<Ô{£Z›{³ÎDÝÚêoÔàÒë¦BÉÐrßî{oОÛÊÜ2¡ÛÅìî«™½ÌVš­²²¡ƒ/Ïö±#¥_¸!w*1òj•ÉH0ò‡Dˆî¸9»K9Ï+höåJw‚®-ÇÀñ“~>ß,;tç‚îJ<IÂvòžZY „áíô ˜™`+>Y#.á€+—pÁÊö·ïaåqJÒ.%©msÔÚïÌV`ŸÜd#Ö>%&€Œº¨9”!~êÐà›­2KPT#-R£;o|Ô¿@§2ª6­ƒÑ§ºïïä”KÃìZ×¾ÞT"[lÌkpß¡sl'³8’hÎ3XÉ9­è>íaù´Ósm.ØQþiÊ=nËñŠSÝUfµ©†zuâ$«bA<§OŸ”“—:` Qî³²'â-ï9hSÔy¥µÝ¼¢ÓZ™«ÞŲIþ|~/ƒ„£ç—â0Õ)Õ >ÓS—›Õj¹=g5O±ÞÓÛ_že8+-RçóÎØ–ý™4¶˜—ê\¯§åbµk†ÉIki?oŒ|l.’Û~ßœüíTÁ^œÀ -âIKe¦ˆNì˶gn2¨7 L‰Ç"@¡üÚLe-­gLmÞfDâ‹Iì‚ðÍÐAtÇÎáþ#€ ͳ -JƒÇ~àÔH`ÅŒ€¯Ü°»;* /©Uó`$»æ…Á^SÓRÝŽçm¦ØªÐØÞʘr!ìx:Î*å¢Æ[)ÝB¯ou\t.±­ç³ŽûfZÉþ¹zʼw4<ÿx…†qÉìàÀq&rN½.9­Åxäm:Y$'P ‘žš$+`½Zá_ü^ÒÌ OÄK,›Û‰§¹(Ó!©¦’pÉŸ±åžW¯uDÞ«)¬™6¹\5ܨû>zUoïøUšJ¯ðÖp&ÀvãØ_è#ΞÑÊá#ÝEü|Úð ó,þUm½5¶õª¤‹³w®É_Mžb‘ÐÛæɤ)Ó‘ÁmmG:¯ˆ¯$ÌZ³‰Õ_Ä.·›WóÑO{BˆÍø÷˶ÿÏx0*†êÍԴ⹬,dy -‚½Ùmôí؃úê(NBeÝäiƒ ‡SMØ5àwÆ€æòåðçWbP€ùrfî4íçÅÏoÿ~ÞlaD:6¤ÙýŸÅË<`{·©ÿÜùË‹”k·TõŒÌª"Ý1‹«Ί ìÔÜÞí?ÓeÕ2«¯:6jLeod^w¤|tësn‡ªºrÇ„òï”?”w¤%*ÈoËØ·C)Nð‹®ýã–4šº -°u1:|Ü\· ‰Ñòe{”)ajW·Ÿ-Ðv=§”¯·tézni¤õÊYªG4zI.ÁU¯g›6lm\ÑÿWÓãÚµLQ3vè Ë¿Þä‹Ó)y9•=ÔЦæÏŸ›’é#ÌîÊëyP:œÉú¥—Ã̲©ýnÓ9¸ÕT9ŸùU­â*“épr¬$èæeŸßN°èº‹7Ù\†Å<.TÏK½*“eô_tjâ@âÁVÙ]ý«ZQùà gqx§5ƒzv‘ƒ.µy;á9V·èi»Ç…à£9Õ«øøFßê .!{שgªI}‚Ýìõmï´ñ û³å°©-5µLdè„ûÙ”à0~¿ÂÀrû ³sd6¾E˽ïo!R¸˜#ŸÝ©j*ãŽEµçCϤ7Ûˆ`w;èd1ÃÃÅ‹›4õ¬£)8mÌ\ÁZ“¬ xº} -ízj¶, q!dí”5m§îÈqxíô te=¯âƒÌ P¹âur­ò\¤»©œ <#S˜ëaõ'OIn —†ÃH§dä97ŸÛUâ›HKÚ‚£‘šº£Û[ç=b(E¶X„g–»›Ðé*ƒ¹8A×1®ß¤$-7bw~77þ‰ÒÒv{j&CIÛJDŒzczPk<+ݽÓw -5±`[Cœ¾%òzߪ¦ETcl|–HPë¾çk )<À~pG賜÷ù7§þ¤UܬçëA~‚£ÃUcA‘»Þ óŸu',B ‘ÔièM#sd:ÓÔÚkŽ·WW7ÕÎ!œ“»¡îæMq2¸Ò$$ªãC¸…œ…0Í=81­Šå±ÇÃá÷9MËÂdXKWû’…ôkøE~ÞÛ-Ì:€„‘R{f³.zõdxz—äz(îSyNç‡Ë·œS°!%«ý¶“‚ÜF¼r‰_ZQÝßÍZ€3Ð,%JÜÆeŠ-“)%jøÌ k|¸§›:à ,–ø´ ‘¹åö¤‚Ofœb}Üí -^J‘¶üùC•Zÿ.d«&¨Z³Sv™ÕDqþÜx_gÜpý“a°:?<Ôž5¿Œu}éA,üT¢¹C,Œ:ŸøÏyÝÿv&·½íEÖfý×öÅÿþ«í^„ït ìüJ9©¢|ùé8„”m>9Ëoœâ-þxfÁkV„ûø_BÈÑž¦ÉnIP%j¾ße¤°Û/»L«LqûUZ8«¹ßÑ‘!¨¾,$‚Q÷Œ -Jà©y0÷ÛýÙâÔT«¦¡¤0Å^÷y·6sÓ‰¡ýK :Ê‹mñŠ¦ú~bnš&K -…™FšÔ'ÙÚ†ã@±a¶âP¾ì´#=<¦­¬×—7;kB'Ñ7^9K¢»€"U1 ;éðÃø׽όwþß*‡¦œu5ÀOÇz»ÜÂ!-p?„.ªïfe¹ì5O²šD$Õ&Å6€Ë`ä¬ÒŠP\-5±TG¥õ$ÏN ù•?}œwÊ°š´‰ïe}Pç¸3®|û4íDæ2ÿwâŠÛ©Í¡¢˜q:YE³?¬°³xŠÁhDz¨+ç’±8ý.Žâ -¢íinÕ]ªžo¹ë‡íÇ/yS|n£ÔnU™)d¯@ÉAºóGY×íô[0¢°B.`œß»»ÌØbñN.ˆs›kš ÓLˆÎkDµÉ×kaÂLÊk² ôcµUî”e2ÈSû?HÔ†>E¡(ÔðE§ˆù[aäÍìðP6ÚÁcßç¥]ÚY•'#¨^™…sg^XäžiG·±qÃÑLþ9,‚„¬2ãŽx³×¶5ÊO8v&,ì+ÁŠ£ë|$»”e­êH*iTÐa‘²ê‡B£ˆ<cï„Íc˜ÑeÄõ<ýÀ¡ÿFÅý%­c£"õ–ȵÏûZ{Øbj®Ov}aúáŸ{ ŸtF¦þà¨É[fÇ.‰1îâBqí•MóûGT"«Mi(4¶Éõ4ÝÂ5@ -æ¶&oÜvRt0þˆ†ØZÏ”¾3$éìØd.jn¨&âFíÙ[,ý³Zó¥…WJk:ÖÉ„5†«Kœ§qeE+'£íú´ô¢)®P(­õ|ä(¤êôû- AÌzâù0UMš©OF¹‡1Žl;`“¶+K –ll ‰$ Kè㸆Ke~l#õ_ð+ì3ʱ/¦Ž[PœÜ´)ÁC?ãÙšöôÔÓ˪‹eßÎÌt¤~1ó;ð##p{±¤¼×3÷J0`°²v8{„µÏb3^CHßÝ¥Ë 1ô0[˜r ÄjL¤W#dôá˜ã‡8yõ“´û¥YàÞ(C$ ãÈò¥“ý4¤•V×ß)E¨Î@©Í;iÖ0õùus7ÁO’^ƒ ‘¡LßÚ}o -O_¿jêS=šªÑ)^oM³Œ–EÎR«yäX@±~y4S~©íð–s_;! ¶ä_O¾düí'Í¥ª·ðGwö»¤ÛièËÞG6Zs\&ã»ûydè@-2²M y™÷¯i4f[–¤†ñJ’ìcö$lëVè’½áLU_UÛ9{EhK¤g¸-³)‘µ?ŽÝï•ûÏ,]äâؘK#Y½åRÄŽ&äºÍ£p;­FWô[|&÷«bQøÖ|ãvø žbh.žæv5JÖ@ìr¤¯D‹"øø× >Á†m@Ê5¾êA?Ú°TKW’Ýôñ]±PxäȈ,›´æìR]ß™Ùp9Ðât?r£d˜\‚\Øài‹/\fdC·N•Â¶–¾Iº$¾Ç–:‡êÅ»ª_|ŒÆ÷"˜EY`·ÿQ^nÉmäJý÷*¼;ð(¼ö4¸»¿y² ™-*fÂ?-¶H6TUæIhèeKs˜ž`ûFÉ–~yÎóÇÛUÎf*—ç£;dQÍ4íâÅœ‘ ]®ù~ÒmÒÿ8Îû­[‹ô*z*Ã0’FážüjOhüH@×âÍôO¹v£g -Â+tÃ9zO3Tnu·e?lƒ¸„Þ»ÿ“jX²7Ôsb†'f˜!yýµT Rƒpcž³^wWíE"6¾¤„Ú[Zï 9,z‚í許›®¯H¯Ã¨]ÔÖ·½N*D“bôžÑ×qðz—×Ëõëz‡ð0Æðm4ÿX¯x£ãµs®`ٜ܃NÞ~ÅçL ¥þµ¸ÖF§ƒ;Á³¼ôY5½y<ÿÚÕ™8:lÍÞ& 0ì±3#ƒn„f`‡.Å×#btC-hVSáŠÅ›Ø"ì7mÎndL)H¢L üX&cðë³yíà§Â—áãWà½vñrŒLÖ“”ŸkߺÎ7•‰9lÌUJ­5=˜<ÔКô$ÿa\t½ìý¸‡I?Ž‡\ -^™L0‹H¥ÌŽI>)ìV—ÓwËƵ@ô]­û`É´Û›³VçŒhÄU¬ “Òî•¢µ&ª ¹âÌ<[=´&y§†‰Ê‚ÿ¤*} %¬Åf#[Êae¦Ñ<éu§¨áVZ¶»12ãI«Ñ­ÚikŒ`.CéèIn3EŸsƒSîÌ?ÕrÑë%xW˾ÙkãÞx©ŽtÔb¼j˜i~c»éŠ2®¬˜UFdª; o-5eƒ›YnlÞ;͉s –3Ò×0pûÄ=`¡”1Ž‡÷ËæµÖ š«’Ó¢¤c¬¼nËXíV˜¿}Šc‘¾N!kéKŽŠ²'ökHÖ±Æ!¾1a …ü˜€ŽÑGk5–BáÖÇ–}ÙHIæݦ¹•„„ÑHÑZªj0=£%-Õ[=§uNX=kÆݧבÌhöžo#D‹” ¤Íõ0»½räÍFi\ÚKoþ_FJb׈ã´4áýé_nw¾½'Ÿö>Y0Zæü×Êx¼‰z¥‹h¦›±8è4û)SÕŽµè¨êüøôN}áx+É$©:ž-ЃMþðÂÐôu°£ø‹&{ÅóIï4Ó:pÓò4éK÷ž’o_ÐÙ¢ÝC Œ®|Õ0k’N»Ù‡‹:²†áÔÉS5ϵèœ u(0íî“÷I(\Î0z³ÃtÿüÓ.1ÁO sÑÜŽÊžK¼>«'k>tzPÓ"*vÿº -c¦¬Žœî‰‹à=Í)áaòä(‰ôu6q•¯·Hl»DŸ™²Œ´åÒF`DÔr"…?°cS3åzè·o~rThôa²‡Žvj -˜Ò妆H=Ô©Í=QmÏ vóè•ÕãÉ ®Elò觀åÁûœcèœJ±n*uí¯»n¹` ül·NëüÂÖ ˜ªÉÁ˜šú@–»¦_‡¶W50r¯•¶B4¤rèÖœÈ`S·%UåuŒ%ÚbO1ù>Äx·ŸVöûÚV ¸ê®$Sg­œ&Ëât÷§øDs¾hÆÚolý_h «ò3ËBò{†Ú¬½dã6·ù¶ËÑõÏ wó­¶=)$ÒSKu‚Ÿå±âGÔœúÈâƱIõ–$d ‰ùwÄ¢ õl–¡)]âׄê`[ëÀ¼Ô$­cp^mØÆÆöþ×Á*ö†­pš=;2èEÞÖó¡nœFšéLéNÅÓ”Üî¤ä4ÿ'áo¬xfyýV3¡erLøCË?#†Ó¦o)¡º?H´` §›æÍp8S„ùœšRžfL„—T`¤Î |&e..© `º#¦Ì”-¦a9|E(ÖiݬšzªØ¬…Ã*­¾,÷ã&ÈG‡Ù“•'ÙTŠPü.ö­»lÔþ™C¨û£Êê(®"ùæw·N½!~ÀÞŽzÒìãÏÒË d­ìgë› ¯l÷0èþ c×mÓoo ¤èÚš“à¯ìK}¤¶yL)ɵ¹Qš§Ü؆áAUËGI$²“/øq#ôùûb}ÊŸz1¶ŒUN~—€ÑÎ_¸²[5cá:¹;¶Mý×÷Îû³šýÑ'm³ÌŸë×î«H/¡éfÚ+­ˆÖ¦,§ë¦†fÖsÌ5 -N<‡ŸV,ÖŠî–S'£ñͳXP=†ú{_ó+ÿû`bk{7ZÑ oÃOëÂä($¯ö€*û»æn{ -K]‰X©vIxÙŠ$ª{kË?>ë9 DvæfJ´××<î³M$¶_—†ëïá£:‰Íòw·lÔB*êóiÙÕ¥éë·Š‘×wÅ8W?>⬜ú‘€¶•º7ÇÈ:âÂI†&ÜäÈÎÌaÚb0 Žœþ³ËIá¨WÜÒ%<ùgtsȧ҂ÁáÝmmìü6‹êõñkâЀói ÈÉr8 é¦S²UXz'à”u-Å•zz\ÆμŠp¾ží3 ;$˜ð󤪫ŒlÙÇSSÍÔñÙšuéÔqH}zÚÑ‹™iJȼc[¢ŒÐD>(™‡[Sh°Ð)NV™zƒSvC©Tg‡ñ)«v܈ZZwí¶@êªçÑÇ×½¬O#Ñ6šïJš\á×TúËšÃ8áš3µŒ‡J y?°g\{+öòTû]2|>›µ…=?¯ã›ÈÞºHºµé©ùª[vœü=éFI07?”Ù“½sd|L™nêiã+Æ-$›Å×êP¢eŸD«ŠPwÿÝ6ô\ŸÞ>¨ ¾fè§,9×ŇæÉØó´.MÜÏÏk¼å¿´š—2í¿ðt¥m¢¿Õ“Éî‹K€Ð°MëdxsÇ©‡†gcìÔçåcL0½+Ýãšs3†æÀ’t‹9áQISݽ®÷J6¤tÐÇsžx€Ê‰øÄ„|ë´ís÷k”qùuÐùug¼ê03Vý¯#ÛÖˆ¡¯ms4"Ð5oµ±]l7JOCÃt:§cõÎ3_þ‡Ù}ÆX[ÆÌ»Aíߧeƒ6ðýcõ×â·=(º óÜš&_¦:£‡+|no§ž–DÙžÕ˜sý̶Ñõ›jTt“ŒØ4¥ò¾šÀR˜ÜeŒ‰ P¸ÿ<(wÿ¯mt0AØÙÒ?óeuè²þE&ÉøH¬Tñôn¬å\Qo¶ž1]‘°¡ŽúÔ‹åBÜ«¤é­Ë„qû¸ËtR! -xàðF -wOYj¾É8´)G@ËbãC‡— ”ò,uTC„~@½î)UÙ¡% Ÿ–Vßž*§ ÓL%ÔþfiüpO>ÈÁ Ç­è9ØáËÖ‡ß4&ŽÿS^.IŽä8ÝÏ)êj#øÉ;Íý×ãÏY-eÖ´Yo”ŠŒPþ9?ø'ÇŸáZÚ9Ìéïñ뉆Œ]L߸HÅ׿ -ˆÈšj„#FNÃâàÄ[üî¢|H̶ÒPŸÝ|©p¨{ÛÞúìÎfº«És½TkÞ·!ãcÂÒJéïvG£<æÅŽwK”øFôfÜPJþšì¦™¬¯â¾(ji¨Û5·{ðVX$qûŒáôÖÊ9¨ßqžå&ªm­OÊ C_§¦'P )~6‡Ao³Kt³iÏešdŸÏù0_íä -x6;æß`îN"î1·€ÏsíO‰rv'¿Nˆ)jw¯SØYÃjUò1 -‚‹d: ñ“¥*ãÓV”í1°ëÝ£_×w;MÌµÒ øÀ™IcþJý‡ck—ç½×Û´Çõl}îâNW¢S3d±Vÿ²\m–Û´É)æáEeïç7—ˆk4{×ç*½¸!¶;%ÝÍw*m.Dße¦,@¦6¬÷²e¥KÝd?Ý+vì¥~[œ¥ŽUϵtb0Â¥Q]Udf0žœñhÐ]î/£¨ËOvÔ·&ª1>Œ£tÄ0jüŒ¸ŠÜ–‹¶t~…}¢jäµ#øú¼ßK¤§1ùEŒ‰]{W›a67ïÛׂœï(WfvÕ7ù¼¢WèÐëÓ9‰çB"+¿lV øU•ÒO’¥^xzŽ¯d½>í^qäì/º¾1ÚK‰XiƒøÌ^ ~­öÀŽÃ3EVDŠüBÎk¢œÓ¯žÃ”|K/)?PÜá9{¬¶éªSâHר3²‘JZkÙœ½4ü.âÎðõqÚÚj)˜RgÌU9Lüà¼òª©Û׺íR¯×ë^0íÖùy©cUÂ|mù*–æ¦¸Ó -Ô½ÒƒUêÞ“mu³òqˆëÜêX[ÿéxT]¨*0â¬@Dxuí¤†B»ô¨Ãgˆ ƒSåµa€ =ë–õè=Ý¡l¶Y˜d®›½!½Ž:µv›)R…ª)<²dpzrÏ®=>Ë{šš=Ç»ðª4;z‰Ñæ—&?¾ßjWCmGHp\ØðÑÍÙ<átÔ. i“<€ú”]ïd7«§}€{9¶jsK—²â #ülÕ­~VÑj½‚QÖ‘õeãõÖs†§Ös™ÝcäöŠj>àc -§-å~qÕÛºéoÝXˆ}? @c%y‚ï?Š„ÓC›ë7Al{[ ¶Ž¡³†Ÿ2DPÌô5ÔDûLjˆôòƒ*1Ó)t&µIÄ%¸OEU ”™K%³/D©ÀäÕ4a"ì4\]»vxA6è®ÙCË¢-AMåE9"ùô´Ð®tîté¶áƒ@t&á‰õ‚C.Í»f™A–ÎØŠlKËÏÞÍC·ü8W¡ñGI¯úƒ;çê÷¾ÉR·îuÓ‹\xâ}ûE+†6|­1ôstžíª„îX?p°¬îcm⟜Xe šÌ«—^­ë C}t5d#ÔÍëu®ªœf“ÇÔ\îP1•~ªYz2VVS|ï’««S'RùÔµe,Ûs?}Çp,as2vödAõ4cò•Wªî^u9·¼Ú£”ÛÔêmU$䢗®švÕÐöØõò[ýÓ¨7ç¦ú ‘æ÷Î,p v›ö»3„,B^Û4)×´~iï'XVµó$”ŽÝ§®¨`Î` À;ש*ÕS@ä̦-kfÿ;D‚Ð6AŒŽm@¶ô±í@µ^ZîK®‰°+ì× jWVÕ^ßûq[µÛ -·Îˆ#ÌQ;v"4l:$qqh¯ƒ?Èj† Î1kýé!Ÿ2çLóÿ­ˆ¶lqEÇÿ%.êða<Ðlh”½Å}m ,¨ñÜŠt‰‰jóVúx¾³}j¾qÄÎÆAñ4§?H5²áÔ6¬e뭦șÌþ¹g:}ÿ~Æ{Ñó°¡m•l€¢K¿šm‘&+CÕ+û\Ÿ·”vyUløcC.—FF`{ûß ÁA#q16èQáɘËéì8iLö뉛øvl›Çâs>®Zûõù‡¢Ë -µ _…gh¼·We|œi¡ó=( q+‰Ø×Zî öisãçcÓÃÿÏÂ2Úzß´ ê\:ïK×Rw0úz£ÁߎGßéôý8iÌ?ŸëÇq†¸tã_ÂÆæ¼O-t»lÅï×n]õ̋۱N¾±ieA~ã0„n†¾QÉèà*• *¼¯‹4)páÿUÉ~ôkB—×7µÞÖyúççTF¤ªôS8U,‹z\–_άÁÃp´WÞÈ4’ý€ì9v ë‘rÈ"$£rõOœyðŸ°À«2–óÔ²xØM$ŠF/{v¬¡¦Cœ6¥×#EˆS#;MõŸ%NO ½ª|·3n7ÇSu!e¤ýwõ¬®Ç ª1^£Ü6‹ -PòöP!5T_HŠ¿\`CGú#HbN4©ˆf1M韼) -U:.(¢Šù‹`¤¡Å…¡¥˜Ž!žK—N¥·˜‡·A{˜ ‚GØšn+fôÒh¨AeH`zëÂAäÃEÒVÛýÒ¶0`чoÜ÷¥6eg~*ªÆPãÁ*iÁý®6òÙËÉb«ëbí3ÞTäcP6@4ª£²ìD‰o¸+A¼ƒô1ùsÙ",‘øefB’Á)Z[\”˘ZûjA»“•à57 5õ6l<ÛªDéàL;7™+U} –êàƒ#ñ5 QU$a®½¤RW­G²¤u8/2¥6{}bÀÖò ¬¿CvÒuÙ†à|´ƒI†»ò2O©F ²”Ý7ÅZ½Z ‰±6kÝg{oô²¼­æOaêG£*§E??l–±9Â@E`×^6 ¦Þ©òŸÁaU…fntvÿêXv%`áéï©0tÒöƒ¯øDý&Š¿ì„›·åxVß[©(¹c~õš5OÙëÅ>,êõÔÓ/2Ïã*Šeßë4¿~>çÏäµUZAîeÏ»¿o6/Ìj/ÂrRTË-#+Pm ¨¥?CZK> CÇÖÌ‘Ë®F»Õ·ë&°ªݪ¾Uêé°’v^³çœ5:ÝÔ/gêçÇt‚=÷ÓY‰æ:7øÇV¥€ÇÜJïžm—™å@lsÊ ­°ñ‰’äéºX€Ç2iMzxpVjëv÷„Ía­(ÓÝ¿e°îÐíûú¹ÙÀr{ Œé¢Sœ-*sí -*ÃcGh´5˜@+ ;f‚˜·öë„„ÁaÇP÷´h¿®?vj ñÄÙ öü{§ãëômÍ»}±L'·W]²ô Qè°ë¼ÕD-(`A‘2ˆÈá¹;iÀ6ÎÓS2ÔpñoÞë1IûŽc ¶UßíOÖñÕ®áæaìŸoøq?^AQO=ëyG³¦ï ½òcù.­×6à…ù°gOs•®³òf³8ôJ  k•û€ñö24¶½âû^™s<*tÌ’zqóS’'rµG¬‹nVGd¹–ŪÑÄI„üáA:²~qê2¡6^ÐëiŠ·±ê¾±GíàÎ2TfPTÆÏ÷JüƒIáz×Ï“ -ônEàºH3Ë^ù>#.µ£®‰±²>F#Õÿße“IÊáýœÂ¨~€ó̶ï¿þâ ¥{\öÌ·èjgU& R(~Jù†_¤ØV=0ñ²Ç,û’‰‰6¾”3ÀëX‡C^O `÷ïw[@1SJ(ö¤ÄÈ-æ¹åYýx°Ýí¾ê‹Urò]’ œ>»Ÿ^»ä%÷³z¿QÁåóíµL?{›ipïgïRy^«¦mºVÚûqi<3ápJeÂ¥+<^è~TPÄ|_6Ëšœ)>ŽQuìOÞÓT³šcÌ÷û¤«ög -Кt,t~Ñ •ˆ º£†gž"œ^ÝÞ­øã<£4w¦}ËÎñîë®™íŸÏÛA;ñ8íæ×<FKÑæ΄ŒP3’Uó¨­`9\ŒõÌÃvÃÓûVŸÍRÍa‹}# -b#5ì7‡†˜ÓÞ¦=¸¡D/V9k†Ôb¨Fi1ÂÁ%ñëCº~†L`C¥'¦¶%S½ñjÀ®Zú&\ó–Ö"ÔcpMºP-:*:Œó10ý–¶©C+ B„ í«6xIŠ3¹BµE]fbÝË^pª‘ö‰·Ö‹ò”4Y÷¢àNGŠ§•ì i½…Á_­ñdœÂ¶ëúå8†hÚ}ÃríÉ0LsÇ Èzlj£¢Äµ¾ÁcпfwU¡-!LZô¤¡…“õ®Ürò\¬ë"i»¹Êen;ò= µËå-è˜ø„{?ÊXQÑ;¯°µÙó<,zuüHÔl]5@Óé‡6{»EÚÔ}Ž°†ö¨(èiËæØ¡„† M6Wžs<àת•T>¡h7Uj -|Ñ<}$ïçdóÓÍÒTë¥Ã­@û±Nÿiã©cuÚ–±ë"Ë =®8ɽ”Å–ç+‰.xöósÿô™œqŠ{0ï™ÉÊ¢XsKóÂÅ°{T…Ø“W3‚%%\„™bc~OÒ+ÝÀ\8MÀ°`Ï9Ü°ÒQëi&?üƒ†ÐÅŠ/5òËN³³ÿƒ[uïò¨òe3‰NÌ(c+Ñï*©þƒ!ÁœV×0hº"N´%CMhª‘_¬, -»­›»¼‡¸–Àðl:·oêzD\:Ž^;r@8›¿„q…¦¥Ê³ê;ÛÀo¦!¯”œ&4–2–×Éζ}G©ýxÑiQQ%ùZ°6cm:°ÕTlr«†|B£Z|½<=ÍŒÆàߪþÐ[ʦÃ/ å>%…¨kÏ@bˆ¼éÔ4 ->Š;b’n×´ç\û ZkIšB·çíý±³Þ|kæÐ2N~‹kÛv@jÓÁ&Ë-;L:æB%ÃñD8uØðj$¿=² ¼¤¢Ç¿…Žÿ'P¶ê -¡gie”çǸþ²ùÔvußtGÙìï½èNÒ>î¨oÖ§VöÜßÙ ‡©wT M—ðšG€Èê¾wòRA -ÀMÆòª”,‚ãj,.0˜¬©ó˜î»”Z}žñQ«”ܱFNÔ /@ñ…Á†íE|¦Ñ‹U–·tC  -½b¢F¹b!½% ß½Lr6¨sÍâ#•Z$>:ðT“Ë@û1>Û hÇ@‰]„ƒÒ‡#Óõ A[†J¢I*V9 |§$<>×CÿßÏ!‚£ÞU Å–ófe›§¢«¿É‡£òiª#$PM9_þ´í^Bb{‘uçÇc†ºž€ý5B*Ui°–0~PÐårB— Ø%\9M„A§¹iFRÊ -ëÂrp º5YP¥¼†ŽæLõa˜(š5Á4“Ú ñˆ:XtQ»èåà®Yeça"ÂŽS¼Ç -ô>ˆ§¯Ã?ÿ{q!Ø‹ ê$Ÿ ¿ü#$¶(Së6ÂÄz·a–áËbqœG€X&R‡Ï–—p--|/±ôÄØ"W¯G(쫵 -I\é'&)”é,ÆÂÓABŽ®ŽÊ -ÓMIõ„ Ëî«|"N÷ÚÙ7_¬]gŽµï)Lt&Jm¶ÆÏ¡»ù…fÚíGñàäÑî©{7¿Ê«¾MwÍu¹”Ÿþ„ìãÆËs‰¦ßpÃs= ‘zosâFØ//Íᢎ~¹ `›ö9À÷Ϧ…K¸›s}” |[ê˜ð-‚DT¥mý5ÿjgŬp§›c‡.PVØ:wV>s‚ƒ3 vsÖ½£­-SD6/ˆ§ý?—ñõÂp`({`Óö›;ç IŽuŸt¶¶ÁÀÒTgu £#ºõ îÜFzƒÛN>9µ_mW)dèñëþ­ƒH§ÝŸÁlJ-. r¿Çx¸,(±†IÏHpžß¡Ô¾Gd•´a5&3ý7f¥ÕôÃxfdÍêWYBÈ?Æ -ìg¬2»žš.Ðøç`Ûp“沎dÈØqOƯÝ7²ÎJyøI7hC#Á¨sQƒª}²pšµ9˜Îå`Ú|ŽìÃÅ0á,ë=Ì£ª@:Ý_F^X`Øa>ZÇËŠX¶¾–iA¼& 9 üStSö׎#ù•Ø…CÂ]ÏYvݺMƒ,%±PYŽ" -fVVÄcŸ{!¿#¤L¯›¤¯ÏJ{z…ƒýìɉXµÛçé´ªƒg¬ÍCÇó¯E5Ò¯tJ5=ÕÑÙEñɸ‡ÝÌtZ`‚n·5–¬ƒ,ª¥ªla™´ú>SdwVß?4,L€÷Ž7ÈÅ65Ôl)€öó4MÅe%$jjZÏeëOÅ0Kí|6a…·‹Ô6~¨Ù^T+Õºç€ÍxªtŠ&Ö -oJ5Õ²m‰ÐB碞M¿ý6)]·7¼YDúšKûcÉ®óR·g’o߈K^MpòKÝlÚuÖa™=@-}–õ¸š­ú¦Ê㧠•9ÑÞá­ƒÓÐm_ýúM“ùhŠ[Äk’’ƒ1c.„½z¢ÃòÍ{Jüì@d@è••Á¶•vÂqìîËnŸ}šÆú!bP ÛMd¨å¡N8¡ñ¶Ž8lÛãš ás µ@Ô4Lzš²mÓ¨3üƒýÁ ÍJ‹ŽráÔ¦ÏéF‰êìý”—K’ãFD÷:E]²üDþΣÍ,úsûñçvE™l´©n áþ\qçë–3­à±§9ØÍôžæüÔÖìór(ˆÙ_òQœ‘ÎÆ`cQa{§ÍË»ÅÏžý"åòg0›åi†ÕâUÍ}èy:¼[‡}bèãÉŸÍ‘£ÃM>6UÈR±¨¾óOMžP™â¸f£!lâõ^«„î „®h¾Ÿ±’ço-µ£¤R -Þ¢§ÙPk³O·9Ý°h'r-åÃNÔ[÷™FœUS<}$V=¹YT½¥Ã÷Ý-ãjŹN -Pµf×eRª-/wŸ̪¡N!Ì"ßm8òjvήÏ×:ˆ\hX÷2iÃa¸ÆÂf äڦІh1fòŽÉëv‹ÔC<˜¾0W/kÌsÔhoBkÚc±È“aÖ£ Öacë´¿!U^Pé‚ø>º Š%\_±ºó¢™»½¬ÈÖ®ÓŽ½Òp56Õ²"$DôwÆ]°…‹êƒ–'ÕY¿ çåªõ ÷9OÓòÚŽ-»º*göð¡“'-rŠT©ÙN³XÁ%$•´Ù?:ÜI<[-hN -dEä¸ËŸ—Ôµ‡¥;2¸oIvI³zÎ>ô»íÀþ)ñVmF{WDf ‡7èä\ßÍÓbUÖ{Ò §ƒ„&XôŽ WÕßÝ°ÂbéÁÕí_%=³:»4ÇÁ¾ø¢þú:O8 Pœ²ÿU÷@ZÃËωt³²™$íWk¹,š×xNxP·Ý&߽ľ"{ö»&„…òT¼ê6O~àÿÿ›h?b*&[5Y÷¬i&²œÍ0äVô²S¿½ƒ(ÃwÕŒxqüY…_”äîfœÑw-/ýžy;HSýìD Î=b˜g‹lŽ§‰Õf϶$k„>ä>ÃscÄ€­ß0¢f~ìÍùänö¦­cÚÙ„;¼õåcXW¨uøòl5Ôé‘ Þ'OÄ?ø=ÿ÷ß¹}¡JϨ6_©Ígÿ-Û÷ßL'ÃÁZüì>½$[JÏýåfŒ°tfUºÇ®¯$h‚Ì)ú.7Wo^1íxºÎíJ R =ÉÃ(³Z¢ÌòÃÚ°ÇœaNŽ§<Ëó÷ß”€ÉƒåùûLð´?vXßàÈÚMǾáêÜŒgÝðÇ*aÝn³I@h2ÂjC>¶t>8%o·*jLÏAX›¦æÌ´lß½M¿t0=©ËλnëÒe›Ùû‰@ "÷(ñé6Äx}Í—ê=ýÝi«z²~Û^š¬¤>àöü$ë~5 ëíï!ƒìš(ß.臵n/Z°8ÎŒ†Âòúñ×t§.Å›ÚeC~…ùÕT$Ö‹ -‡Q{T%ÑC?ѽ¼^ Òð:š|‡>Ñ¢«™•Ú*Éìiëó°étLs¬ªÚ-û‚mú›ñœîŒbezº?Læ…Dè1ÔOnâ郋8úÙÈÃÍûzXÁ҆µ††œᲚ†æ>wL¿ÿþ\nlÌy|Z -ÿ½ÜaõÉø¨™hn¯ÂÝÜ™ufTq²T ”ãPµÒÇíÂòIvÒh»ÿ¼FÍâùý÷Ç’ÆÒ‘½aŸæõ{E·ßëÙäÒÓ¡›iËrR=<å¥îQ¬¢+Õ`#ë‘ÿxÆ ¯›éÔ;yßp«ŠG(‰†ÏÐ.Û#Eo]t*¨ÌÙzèwI„Vcoÿœ,{õ•œÛ2/ô“’‘ÑO·GÊ"ãÍË£Yº¨7ˆN\䯘 èi5W,»$ÇoÂzßJêkÕ žãÕƒâx¾OÞ¼õ©Ä¦¬×݈‚š¡ð°ÁÁëòxž®H1è4¬Ç]¯FÞôý€µ0ÖOq =¯ÓKâ †.,vØeckiFÔPþÁÊÂé©’4¤ù˜¤5ŽÉŠÙ½æ -NæíÐ&×Ý\ À '\1J Ï2è–O~j·Ë³ è†Z^oMð\.AÏxÓ½ôfU{³¨Ó8jsú˜2Ù½|U{=Zµ(üé¤ tïžzݜá‘&k„ÕœxšMõ¬‘Ý®žÍß“TÄ2(¶ô{_k9¿þÐI”,9B}¿¥Ë`íhè]yoÙwÓ½ò ³²‰»€ì:‡Ý2.‚Ýûbç1éëJEöݵ$Z{Aª2æ?†Ý f©Þ´WA(leÆ!êÈT‹­L g,S+m&ò07èÎ -ÁMý†ñðC'oB| nÿ.í -ªÍÔªæùyŠò¥E¸ÝpÃ^§Èx‡S€F “³ím§Ì¯ÂM°‹LV%¡iáRMùT4PuÙ¼ª;N‚ŠH‡h† ä_ˆ#¸aﯙu¸z­½2]ñÄ·ÒóÃÛÝ0¶°ØAñelcÜø³’®y´2ɆΗÉ{ƒWc¼FL Çâ•[Eí©Ïëf¡2,ÿ8  §rÕ6¦øéhäÔÙð:& .äJOÕ‹ê­æ½ÙäiFê@KDFÖјs<ÐaT@™ Î…ÿÿ ø’ÿ•ÿýÿû@Gñ²aÕ;#eWG}ŸÅM>ªRMtxú0µ²5º Ì=“mµÚŽRHÅ´ñvwT9?VmR¨ËM¼?ÈdHå»$mï-8“(¾ùê(Z¶¤çq¡Á7ìnKéE$¡²MO[+%ý`Í|/#ZÆ¥Ã.w7)wZÑK×b+VÄ"®%)c–J2åÒbÐ&‹­ãšðÐ lmÜr.—š‚½tòCª- -ª®¸b“”’7®qô¼iRq”ŒKS×ÀòuÛ`”¨sÔe¥]…yg®ž¬ÄÄ©§VÁðù•cÆÜ#Ÿ|BjYƱe]ÉyŠµ´®ãÕ©,0c’ì‡0Š3Å^ä 팞Ҿ'UŸÕµ*ÑBM Pc;rNzœKx(²yÒ¾ºŽ,;qVýŠz©yBÊaüF†%ìzqT³#+‘l©Oc8]“ö¯dÐü{«ÜN•¸½¸½õˆ£’sž<œ… Œ«‘‰B.t‡ž þ¥µ‚$Ò®ýFç¸I{d#²P@;/+m­K„œ}°P€õö)N4ºŽë9œª«Ê ´3ËÛX¤°kE‹Ä ¨/‰›itWPØØšA½¶e A3F/¨}'˜®óé~zËP„ËÓ§jÁ^ÜZ á!çKÏž7l“øâÉËÓ(¦"úgáö»ÍÎ!/T‡8uŠacö OõîËJ>>4ü¨> r­ ÂioX~ã´üFϨ¨èa­ž‘Èp¦æçK˜d)¬Æ¾ß’š8-\-†xµA;&ó¶p©ê¸ &`à*‚ýÝôß“9»zâ•èÌK˜Úl- Õó—Ƀkœ¬ºEí¬¯¶]vÖjc¥Çñ‚~Ò‰Œ×£Û—`•Ûmm–ДžÖwr¤óŽñi•ëÛ*u¸ÔaN¡-hx—9Cx[·ùN”¦á‘ ÚÒûåe’ÝÈ Ñ}ŸB¦Äpo¼ð|{Ç[¤h÷{½‘X¬bÈŒŒÁ—seA%I;Q©7¨(ë Âò«”¶ôåîÃ%‹:«å¾™ïN2.†j}À–ªE;oq¾|þ^(Yê=Z <\ŸMPe”eŸÒFº‰‰†]£ xÔiÏfÁ8‰ši5œxýþ4Ã(4£\€¤Û­ÙÇBøÃë¤[Q[@‰K‡ ?È?ç¦Ì„n9’}ìj"‡j¿®E\„8­_f©‡/…Û.Ùæwš¡·9Fžšë¦ -îçPR,-ÚÜÖ‹§¡@)I"åD½Û²†w{a®±§¾šôݳ¡Ý=¦ÂŠ+:( œik -VaŒn+:Óç‹™ÓšÅËWÔ@e­Õ~¾ÖË}æS±H¸·}§WÛÎ3Yä²î³„3ʬ´ô‰Ú÷2Ñ¢‰œè™â«¥›a»º{±Ý§(õ¥ÄQýe»¦ëm†7!²1ÃÚ|[ë9ót{5€ýðöM Õ(m¬íd’¤n5=ÜRšÓ_/ i;ÓæI×-Ô a²$²4 -5ý¨j¶mz:Ñ€yºàMp÷Z¿X\¦¯6m9‡­=Éæ¹ß×ßÿYAî0Ae"]Obñ‰ìçp¥}o™ì¼hÅúUÛIådì«NCÿšS’öå|Q¯”NÌ¢ç[†"E~ªe=PŠuÚã!^:‰\ª¥¢¼î¸ÞÇ*ù;:hFKôß½r÷k½ˆE–¹u§¸å="@Í Š8©;=Ø¡áÈó÷ `’R­ß|ô¹ á?½ ð£4 Õ.¡¸½ú+òy9c™7Aäç隆šP„P uüë”Úµu®ääÔdšqs§êñàµ,ð°JL‡ØzS'&Ö4ê™Ûaá -Å훼—+º1Õ_¸cQ:­ž?¦•ïCŽ?ô ²^) -P5Eh^ú,ª¼’öϦFÛ\{ A½Nùþx̺ŠüV"<¦ï¿áxh窉g6™zè̤’œloScˆc4i>OˆÉ‚z|ÉÇåaÿ ‡>NÂËo7©àT ‹Åçfj Cd9Ô1•þîx=xs{›©/ü&½"Ø^àXóõäsç%G¬åW‹ÙIݪ«%)µLªÃãµe]1zÍJè¡Øé.0ÆWFKFÊ”¨‚Àüx ÌŸNv<Ô_¿l’tP›Z "M<÷ôØ©œáy>Û;Ý°Õ¬ÉGݶžCæÖþ±Zý4ÿ°Š™gr6Y ¡ôh”lM&0j{RŠ ¹dakHÁÓ‡ó.w¨Q±­z{ýêèo¯ ÓÂ^S„ìíyⳘ€u k×|x#ˆÅŸmÖ9OŽ²5áYÌ×¼½;4$rÿ›Ç å÷¬I™ L6›õáM«µ  Î'ÛAÝŸ}O7fDì¡ÅÌ@Å2ØO0¶D³ñ -·¸´–zéMã¢Ý¶2œvÉŠìô9áî_.Uðjßöª1OòÄ+aO–üúûv<•»«úÊ^fáW†©lÀ‹{z~â83Εl„gëDŽ©U¦Øš2gÌtf"ËFI¿þÏ£þåt|üN{À2¢í2ÕÈgÓ©†­[90Ö Ë;¦‚HbC`ýó‡’Ç5!¾[©*µZ^*·o;,éWg³ÍDx6¿¬võªt1 Wˆ&ã ÂùIDu²DÇyR¶‹µ”¨ÂrŬ·k4Î Ð/ÎM¯S[v¢ÚaÉc$õ»Ü’Sã>½°Ûµ—ÓÚ¶hð7ë©œ†Å÷ƒ©oitõÎÛK9«A.4@:<ˆ[5Âøù(Òg\ºÀ:}Fy†_ä‹hJóQ¹8ÍTxî³ÕacYW/öÁ'zÓ.W׸`ÖÕ‘ì04B‰Š>2TéˆCû¨‰¾Ï>ì péáêQÒaÒeVŽ‰C8abÛÖSö} £iwc[;óÙå`†¢RQ;ܹ"_»%IÓÜĆYM¯£¬¢ÂèÞ´‡&ò’~ŸÇ#¸mÓ„éêÌ$«Ë.Æ>Þ†Ž6Ùn,õHéÍ"ÄŸ}Û·7‚GnEûor߶Wï0úÝ|Š¡EµØY~œ¨läYh¹aj÷Çц‘ £.ÏÀµn¢ŒÌO<îm{Py…G¼‡K£Ê1Ân$CoÅeì¤ñõg´¨5C󙺊iYDyÏ -Ô·žK Ñtè'bŽ›tI,Ë,} --ŸÕKv»6šú*ÐàNVÊÐT%Îtl´¢ý¸áÈ9aÉD„óTuʨyB^*x³uÕb©Ì8øÕ—BkC-eÛ›(¦v¹þŒŒ*.èmç§0ɨ{4ëu2¨­¦ryçP´dIuÔSÿšÂ²ÞÂiÙ`£ùż¤Ö‰9¸Ë»Õ¥|vÙEÏ?È_:ËH -+ˆ®ÞW<ðb”®1•ºìyÕ««qPZ½h=ln%=âá`ðÀ¦œu}R -0÷­ô ÔÁºøu‘ëuø(:Y8ˆ;võfeQöÁ`f5%óÜóE¦8ݤi+šÙs·1DòÛöÒÔ ,.ס½¯¤K]ÑÍ-Ó^M¹ÆÐeÿøìƒÝ¤ -å˜a¼b1îµ-÷C”U-…ÝP8‘‹õÛz˵‚­P‰Ãþ'P¦ù‡?Ÿ‘®gÏ ±¥ùkb)w#™>"}“ÝÏ*×?MT³X'€:VŒ,ó4wu¦KY÷¡îEhÓhÄÏÁNŽ]M=æ–šÖd˜æVªHLHXM»„ÒVM›-=M¤Öf-7ä˾ñ.@üÜ>P0y+¹EéÐÔ f¦k¸{Ö_¶ÌšüWUp¾(бU'_üb¢Á«;ðüåÂÁ qÈ÷±]]j”¸B,Ùß¹0GŠW~ - :c¶šÑ†–CáÇ—ã<Ãéƒ-ÉÑm‚]øéËéÂé‹Á;jNæ}ƒÕiÿV圌ªÒÉ$’¾T1|³DߣNýžÛ@«;®-L2¨äÚèm;bw¯á°AfÙr˜Æ5\·J»h’k3í–Ü™ååŒ) _t\¶Ét5Ïy+¦^÷(qÈŸx«ý”ædØF‹0qÀ`þ™’Sÿq« ¶~/ØÁU!Hí - -÷¼R7”*d/@/Ó 0à“M+kA‘E3#1Vã±3ÂÕšÃmËž¿nxCõ»®i?Ìáä\Øžpáü±¢7ÿX™{Ë¡KœûÌ—Nºæ„rÚ!ïìWhÄÕEJq˜Õ8×õçÎŽSš›T­TÉ`ž@Ú-UîÎÆö8¾Úo¬„ÄèÍm0ðëó»ySO6Öø6 ü⺭x‹ûß–¡± ú†0¡ #3ÖÙû6'hÁÔ:‡ìO»0È)ÙHzªþf;ã>7Šù¸ÝZA~’aë;-—‚ÊÜTO?)E3£5sñ7ë{|ýjz½^*Ü&öÚriÏ8ÖK˜À4niyˆŽ6§UPòª.‹¨ö ]»õ fAo¶ÎÎ5ÌoZ xFÊgÍžmwÕ6ÆÖËðK(Å!!éK¥·i~¼Ú[/ÛI;>?‹U¢UæëVÔµ´×AsÎ]?÷ã`šóŽôã!©äs<±:éOI““I¶{µŸX5¦ƒ$/hIŽúfŸÜAäbM¤ˆi^?—©âót]îÃ7Lž¶oK]ÝÊÏõ_8‡ê=@sËÇÀ(kÆBmxƒ0`¬ p§‹öNþ39QxêOF¨ÙM¡ÿÍC·2I«;—`…Ñž“]ÓÜÌ-ÃÌ:î»t4ßòëóß÷Ç΂ˆ"õXØèG‹‡-íI³‡É;-ócMÅr*Íd7 CÕ¬%ܧº11Ól¯ªÒ„ñ<Òd³åó" ƒ¡l™Ê*˜uªð9èÚTïAŒ^,ü³º¦J‘Z'ôÙЦº pq>uú12°Ü#·ùFHNX›kjº|·ÍÈìvÓ·Ýk§1ñîWï(¦ -4s”ÛT!|¶W<ïÐù{Иa®¯ÏïMøáJü­Z—þæÄsš¦Ë‚A3Œý -æ¥.Ën›êw[jJÅ¥h£ƒ'aîÖF$´ÁÇ։ݶôWC\?p3ëÆŸqí^&Ä1í*®¹ƒÂ—£Ê±}þ†¿«Kg.g\zô›#žÇ/ evÙ>s°^X «•7)vÌÔÐö°µËçTŠù‘ÌúÑ´òÆCV„ŒHˆuÞ¿x¬âĤÄD3~ÇÚ1D2•èì÷³Z„??þ}‹]@u?ÌKÔ{~«ôzf¿·©Å ¿ê{($Lħ6[ ¼–Ê'”n¿6EWürÜðkì,qe³¾Œê§îñOy˯Ÿß¶e|ô“s§ŒÝ~£øãBÄÃ,ØUT¥,/ÖŽ(¡^Lo©tóC£Õ þÖçä3JöÛ¶êçÇ× -ðÃ~¾¶/N·Ñ…7(D³ˆZ^Õð:¢/‹‘÷Ñ¡-êú\¾Õ¡Ž_Ÿßë°–'î–GC_˜tÅ.ÍÐYKó,("ŽØÝ©ºÆ=ðÒq³²l|²jñî”üS-ˆ›XÖã^ÓÐóHX¡ÞDIOëúæÉÊ¡ØÅo#}?,ÚDwWMV¨:o§›¼­7Jh0ÃÅ# ÃÜéÝí”uBÔ"цÛÁòëϫ͵Vór”1,¤É#¦óîÜ0lô"'z<-ü•…Šñ1‚¿˜B¦„+qÂìžÞƒÅÝÕ°2=aCw]ï7|ÉŽý•iš–àm­»Þ×9° a¢íÓ§ Àie¯ùÖ\£çÄo ÆÄMÃdà’ªE"º?ä.kqü߬3”¾˜6š‚œŒ· »÷r²>Í_ŸßO6îîT³k¾3ÒHK€NÑñ+3»v+ð6ýäÈŽ{ºSÂx¬ôöxòe”öö0“G8o˜G{MAÿ/Q¬ƒB/!óÞ·ìeoS[¶ü™7ýæ'gôuƒà5bÇŽa{n¦š]•Pn %¬~òÜøMÏEmig#rØ?pï4õò¨:/`Æ2"DËU\iÔÑZº½ð‡ 2p–>¯ScÅÛx]·¡Ð%óˆÔuIî†D/5o’úûܼQÎÞ|óTE:¦•n‘äc¾ ×¥g*_8UY™û§«mf7þܺãøAšXZ=¬áÌbÃ…šJ>Í`Ë£Tú3&º»ÌÙ83fº ?(Ê×1/¨õ“0Û;+ŒLF§oˆ¬oU§EóØØÒ 5â_6^—OM‡8æ£Ã¥åÔm§îÚšN¯?ªÜ™>fßNxCí>.¡f{,ǵ¢'cˆåÑW¸š¨Eaß4vW¬ŠY¼{–²ñÛýìD‘–|ï OtTÞ‘}sàm·;m°» ªN»ë/oÈ#Ó|!Û®¹ºÆ²q¢›‘ï¥}«¼DÝñ §6H„Mª‘à^­ñâÌýÔV¸QµcIà-;õ]R‹MEé¦E©Ã÷ÁÕ†»öí¼y wŸ÷˳MŸŒKwŽn©p“´¬‘ùÕÍ p„{ÆY’qŸ`u,QzX!Ê r8KÆ«ìêWùáIݱY¶·,§W„Vç¤xÓ¿V5»Þ†þ -ל";u4t¡Á9Òðe‡NN$]]³_9HâJÁöÇÑÛ *-°£¶2ÃÏæ—•eHiâx̧mMþ²Nˆ.µ'}ýè2 ¹˜éÃVÉ:Kµæ@×´ô1õŽ -Íáäú&¶’éeA08î?†OÜañcnG8Ö ›=Õ…–hºÖÍýð/Ðüì'ïlÒÊ`òóRÝÆ; S–‚ëÃÊÍëöHû YŸ¿˜¨¾YÖj‹ÁºSy6¥h嬙’:2 ¼¥Ï^4—-6 ½†DÆs÷Þ«6âFé®mï<µÆnà =î â ÃûìÐï¿Ûë ÏÁ÷9AáŠFýxòG0#~ ˲ÚÒ´ßY¢¸ÐÁyµÄ‡a“ËörôoylPÄ€ôN«f`Ü »2²™Ï»§ -€l鬼M/çóìëšœò4W‹xÂó¿Ÿ”vWœýåi+þzfÞÔ¯egß+ÊŽç ø6`´šïìd ö|ÎßÙI A©¥á›^Ã+áá†i°¶qÄ£×n£ÀÀÀæ6×›|_O4ƒæþúFµ™‚/cø;àéêB!à¯â©"Ç}ÌòX‰ÝMë“Ñ“ïÅ é Ç4¾@‡·0ß³$‘"_ÃËujž>{ž~4 ;5TO§ù=S‚ñ 5®$—ÍÐ÷œý$sóÌß/x¨…ÅÏí÷F¤ë8 -?ЯYø€tê› ™¸õ»£;1¼Û9t{®F-¦:ç0ŽË¬÷—S綛¼N€êð1Ÿ;±é5qmÑ>Î -Ó¦JUmõ™W3c5)œºN,ëX‹Í[v¥3Pt/H#ÞÁJצ¾{ýð¥ø͉¦¾Ø;5ÓÙxëXàTVÁ\,µ„¢Ç˜T=$a¾>FµyÚóÖö¹HX-K4àªV¯¹µ£†,€[Lv*ê¬Þì¸aíIu` UèfƦÁI諊™ø-gB~`s:9G§E4!”ÀZm8ém s‹ùÄWX!§© `n51YÆ.d¥@ifÕÔ“ãÅ™y_lwa' H$Å1¥™ᢚ W-ûD”Yï\/x8´^×ýÖ“E Ýè}~©Ç¦!lEôÊ–l#ÐèŠ#ÂølOû„3'Û5YÂõ“tP¸ná’›z‡B¿Ú¾Yé«oFö9€½8§jËî[[%WŠŽ¬\‘ù,ƒL$•¹þ ¾„ê1³õz9Ò.ì*-8Z=‡¥ô±dábý^ÿ‚yŠ§È[v ž·ìY#š -Üt«/ -Ô7£Ù´ 2»Áœ¥X@¶Ó5 ë1 [;N|z™fåìö4£bÖ4%Ä3ùLzé–¡4vÿ|L;0ŒãÑN3]6Ì/N¶øx»Æ6ÛâÔ¸pà K…-S-ùPÉLÊ‘!Fµ9žù–Õ¨©žg¤?ÛÖ £k&žŠã×ÿ^ÿPžckÀd¶1°.$öŒŸÂœœÎ4Atî¨[/˺pŠÓü"C‚G•ÓUW¥Ãm9Cx˸7On ;­¹:ž43ú•l¤u 4+œfZ}v)ðâïuýÍñ•ŒH#Y²8¦|•ö´›žswR>ëò¹§;$›DzgÝ°Ì‹ú0CŘoÐwcÊí>D\Þ]0ƒfªÃ×AƒI\Ü$;4Ú,ëÙA·Î”_W%Ž7FÓÒ¤Ž z2kfßaÀGO;ŠÎâs¬ µÆ x­4­6ZHáÆs¡CŒ³I‚$Ù\TT©f*š3mÅN÷AÜ {PÝž5¥;ÜÍÅ{kçΆžƒ£;Ú:œ¨4²)VA1íÕÔâc$ÏôŠÛñë„ÙS="{Ëkõ3Òîw€\ì'8>ú©¶`fûn#Ez­Ä°“2;Ÿ}R5×äå]tæ¢&R»çÓÆ¥»i”âRdc0 õÊy}l÷r‚Ômª -¢ä g%P·oé³o݆ºðÜöŒÏ´ÉxqØ]êöR+·€…À;µvÅFœûÎqƧ†#Óc Ö¤ÖkÆ-?¿Ua RxÅ¢™ÀýýA=,–Ùƒ#`{9ûðÜx`x괦˭͞ÇQ7*ðôoÔ±Ió›…õéD¼i~> \,Ê8ºÉÕµ…œ®Fµs˜—éz< ¹à~y9š¨“C‹º-Ún˜áá ª‹åÒ8©¿DtÛ¿k âC‚ðô¸´M¤§0!ÜŶZâñØUZJ~àPâ=ºù¾yXk«Îc›/’$œá#âÖÅæÜ¡r‚yz‡ŠØ88^à^m«¨âúNn:Ô®w¦Yò»o›xjKYì¥w¥iZ6¢0—‹L8Ò{5ÕS É9o½~…ÛÐD a·!¤öê×·PÝÁØ_dªÉ/ rR¥âálMŽHpVÃÕ¶ÆÄÎÛ:“Ö…_èÿ èìÚ!¤¬u1BÚü(ÖðC•®P‚%çÓc÷ÚQœ…ûi*º…žÝ'Ë@‰Õƒ&SÞ]ë¡!‰ÓòÏNV÷!ñgñ1Ô0E·jÄ“„¸º€Ñv‡þT“Fc«Øê·¶Û<è{æÁ¤¾eÆ9çÐöÑ0mÞôè¶úê,„-Dn¸`¥Š­â“ØB}è¥}y·`oáƒ:gAÇ´½X0h¥¥&TEú ˆ@¤³*^7w|çï_PýE)W‡­C;áÊ—{¬…¶Z<ñúntC´eÔ#«:PÙ0éawtkÛÁÐ -úQî^Hâ¥IC¢G €ÊS¬¶9 ?صä.iØð=#xD–Í.e\*io)ˆ;ÎÝØw³¾)³ÚmòyL—£d ¨ŽYU»ÈGj™‘××7£Û5{¼(„CjøáÁ÷QT5†ÔMÝ0òÉ -Û£(@•’÷ swíáŒí»µó÷¥fÅŠ//1Wɺ¾=~/ Ô ëLÎÂ2`žÍ\•E1T5Š=W²ðôPÂÅÃÖÂq -ÐÍEŒrþÅ[TDý¤c[Zý±\{!5¿××zõŠö!5‘ÒW 80µ*8Ů쀔±04%5qÀù0| Ë)…‹„ôzLà Š`}œ }† `gõÏ¿ªäö ‹hª¥c²s/÷o" cJÙÒ"cÞ0óYk£sôðñn -{È 5: G»žf•E»§érv;ýÞœ6Û¼¥™a6ÙîAŒP5'rÁ ‰>:ှW?^ \þG?‘£wœÜZL‡Ÿ°…­·ÀÄNTÍrzK¤¯”ÙЫgÙVážV×ñ™(¦xÅÙá”—Â8é…­Dõ‡I:Ψ@–ÞÜË¢}–/Šõ-ׇŸÕ¸oˆÍõ׎ÎB‘%2l6,ál^âÙ¬Ð+¥QB°$ %pf%»»=Mš‰Þýl]<‡_šÖõl¸–»ƒ¦Þ+ 8K´iŠiYÅm¨¬jî[ Æ®þ³ù¡Â617ÌôçKÔ²u%Ððù}ž$¼Ê@‚Se=õ1dD쌪 ,&ÛhÞŽ+É´%–½ssE«£˜vYmû¦Þl5wZzÞ {8–Íô¼KSÅÝ°-–`BEŸéšSønû¸ -èô“&ÒKìH«Ø¼îÜ?w¶ŒÇXÛ×Û7’Þ“ç·¬NLdÇ×¢M|&üQHf4ñÉõS–6zøäÙ˜HíÂ@øúɧ¸-a%ÇÔ™÷dMíº¿ãúºù¹A¤•;¸ìñ°rœ_Þüòi6ÆÌa¹ãjÛ74ˆ‰ d4õƒB÷Ûº²®Ù¡ŽÐè8ª55Ø£.°‡oëvÁE%{{ÄÆH¦¸&I( eR¡;šµc†4Z£Ój|"÷¹Tu"ýÄ,QÞÍãúkKú -·ò@†Û™¹8;YIyÝ‹OF¸ÒÿQ^fÉ‘ä8ýŸSè*#Á ~Ÿ¶5Ž*e °ÇPžÎÞq *®'ŠÈ¡`À×xqF”›¡Zª}Û|”c -"Ï°íö¶Ó7aY9;¨çHr×rº -v8IªÍú‰¢”Ô«†¯Ûpª¹¾ê•wªö£"ß´6n§S½k<ÊÑ"ìÇ¥kÛùáæG);Ñ­Ñj6¢uyQ¡=,ÔMŸxĦ“ãô±KÇÈ“ÔR!±·3f W?9ÌÎiµZÌÅÅn½›Évì+|ÜþÆ MB£ÄMŽý¶óÂæS®àš2Má§[åýš 3ê„a£à΢vnx“OŒ”ŸÝçáŽ5UÈ*6o 6&ê±€ËH…úü¾óÜ&å¢ÅL¼¡õ¢XI°5Þ$:¦î¶kÝð“Ú{‹­°´Œ¬èbÔ:ø¢[†£þëúÔ€Þ<ÜS›Ž0a44ƒV"> -\1-ÍÃÙô“8V%è°”bé‚/…(J ¡2×óÏãð×µOÞÑkû–ÁÁ1G]_ÓÙ€¿Ê<7´°-BàÀÛñ j?“äfƶu•ÜÎ*Ó¤ -Éåp ã ð%9DéÒ^Ã~TWËGæ³öB6d‘9ëd—…@©l–ÑoÝ´6v u¯êäíØ;Ø3ÎfÚÈj”ÌÎ!Íbí¹üìYÕ¤i ¼FÏ噃Ֆï¶@œï- -OÆ]ÝË)T›wáCèR¢Ð20[£”þÄK ÝÐgåà!ÆGµméܲ¸/•”+ÚU Z§ûz -#BŸb ÐŒæbÉësQñ(,½CJ]omӡ֣ج€Óî’EÑn‡ $·bÕ2¬,áì(ïö-¹¹zvÝúÑ -ïkÙyèÝç—4½#û¸} -q룸Z”’%×Ô¨ªNØÙ¨ƒBòØwûÛº÷3bŠû²ïO˜Kõ"¯@Ò<*ܸP·Uú(±-K üíj¶âiwn r¢( ¯²‹» Ù¼h» -þ ÓdHÛ̽YbkVâÉ’Çf¥›/€B˜U<-1ñ×?ãz¡sìT Í–ÎY‹1Éçó_fú8[ÎtÂÜýOzŽø\»¦Äãµý;§W,7三I!–y e–Öñ Õr‰ÀÇTS8°z´b¼òµ™Åá€R3ý²Õã7ÔŸŸYïÈ /e¿¡6Œb¡ÒDKà ß8wž°‡rÌEJœ·Ðãå3E§´T}Û5GÞoîGÏ»,¸¾á÷§Ö õÉßs½VÑj^G -ˆˆÝãT¸°…'&圧ÔÒ>=,…Ïà™Þúµ°ØêM+‹¯ØQ §ÓIl©2:Ö@eµÐ0Oº¨õÂÜ o¤ÙŽf?ÓëÇQlå[G‹á¦þ˜¶öv©g7óëˆ-Mõé»çºY{¾?Vó÷_Áæ{=`bÃbùZOzAû—ø3m³ÒB¸NŒS Vê»”ö -çi:ÇÇ…Ij^ÓþHsû4›s0Äþi•Z 0Í&ÓÞß•k†yÃ:~&…¤Ô0BKØœòBºXÆ)b‚%tÂOìòG¥MÕõ¸ƒ:êrF¸“³ê­˜Ï ÔýÜí™ôs³s±µë5æ·ÿæÅy¥íñ/ˆj؛‘ÐýléW5j« à»Eåé4üós¦7©êæÕïg;o#ÝÝ‚ÙÊ°v¯€G$pÜXÇí¤fÓ4?Ü5ê•vÍFûœÊœ}P¿­™ -€‚šñÓÀÑF+ÆrjMàôòªØÁŽMž<Éø`vÍÑB†ÆZ¶¹!äŠ#)×rÑ”Sš­«ì2z0jvYV4?ñÖ\ðÝ\6zTs¦ïp(Õ•æ«ïƒÍ‰†¯NfUÂJ«ÁIM?:™‹fûÉÃø!ý;»*p¹Ö{Ô>§m×JÃÑ^„úîZ9±œ*âÞ^¦´E$¾Rýã¯Ó5•õê$‚gÕÕ9–u²žö±Áò¨g/fvî¾K—Ô W¨º ð-¾² ;v¤Ñ)“_§ £^͘¶&<« -Üq]¾ =™È~RËÕùÂÛØQ¿ï¾â4”ÞÉý†ƒš™ýå§Õ]Õ’’ Å 2‰%svq¨Î3½Ð+wìˆ6¶aRkØÈëøQ¢.3?0Í$£<+êÍscÅ6›îô3w…&UWeØÃÑŒï6-yKt¥ÅgBmhèÍÔØE5Ý:X½_ë‹Î HfsLŸ |ͶF?5 a.ÍZÝqL{zWÑõñû¯Nó7­ÑÚÚyÓ¤ä}§ŠÝ¬Ì­‚¯ø’H3VFñ‡½‰O#zŽIˈ -gHgIh°’ŠFzÙž`e’ߦfÀÑÍôU¶[¡sÞ]bÀ‹õ&hÓø­©ÒùG¯'sž'3ÀÑú'ïÙÜ xVÜVHMbiê¸_.ñÍ49òç§U¾6JØv/2z©êI1šiõt×dhqÛ6¸ýU ÜÂqìãr©áŽå–[MErËÎ|­lÌÆö2ëmÖ±ðØbP¸Ø‚9š úæû†sîØ£¡o õ­Ä;ö·CÚå/Ðêmg`ôòm+³¾¬a°N!ºÛ+•XU =_aÙŒ„®pe$„S”©9í¢ÝÃúº]p@Ð(_DtÔÕ©A]&¶ÏU®sÚûÀ…Ôa9KÊ£Åãù!»Øׇá¡èTªMç+yùC4ú}ô6Þd[`wüŒyA¤¸u~êÿJŠ=jÚºó´Üۅǯ² »æ®/™$îc ÓñkØ°®¢:Õ_4Íuºª·’-¿©MÏø0Š#þm«÷PÙ!|å)YPÄ„{Vh¢÷‹ª ->%‰¥+4~7;p³ïùÆR}:NMàÜu¾Þ=C¤Ñ‰x½– -=‡¥Ê£AØ,Xv8HÎQ¥Ê•î ·îÀ}­cÒ*÷6Ÿ#‘AÁ̶`qÈ´¸´…¢ýòÀ‘Þxµ]­¸ëõþs€¹‡Ò2^ƒsçÛ©Zxo™U\š´¯ýwß&ŠOšB1¡ÍÝ…ÈÔû6^ÓÔ¿ƒÝWJÊAzR‹ õúC?ûÞ_ó¼M§ÅË:¶ÖíU>ÏhõÇ“Úl`ëóy×C‘K$â×Ûì¼É…ÇQ-³Ùxã l©s©A×löC §œudµµ¸'ɳQŒÐ,V–í(ñœÎѬ¯š2K+ÔŠ.Ã…ÂD/õUbâÁæ‹SŽü?ÇHLqpŒ­)ë=]M¶Hw¸&%îŽÁèw«»°¹­+1ê,†8½BX„÷}<À·º—}™i}ôñ˜ŒÝ}‰1ôÑLŠç] l&Ô,›‡ü)‚ìú?ÊË%K’Ü‚{¢/ÐóHð‹óh3‹YëürsDµ2+[OšMVE~"Hp7ïÎŒ2ßþÚÀ»Œhb £tîì²+,|Üqô”°°*mÑy` KIù}ü~–£¤V@©~¼mû“ŽP”èv;¼Û!†lõ9ýshŽ‘³ŽtïºVLùY0X?€f1î;z¹4ëÒ¥6ÌvrØm"d¾ •LûzluF^Ýg½¼Y·é%¾iìd¿åbl>¬—x)&_6}³ØVˆJV´à 0ó8*ÒýØ%q£¤L#Ú™­á걸$ÄQ ëAùÈ^3_ͯƒÙ*? O†iM`ÑÓí Y×ÔuÍî¿Ã?:Õ²o9,ºµŽñ®úýǯË+iÒ24Ý[ÓÐ_F?ÉP^™¹+¿hp–sòx™O·b«Ý·IÙªÎNÈžÓ.1­èÄÕe×LVÜŠÇÔÅüâJë–+•‰ÀèCM¤Êå0Ð0`Ç°¬FKyE0ÝfQÉ`ö–EŸQÜskXÖFE2ÌD»—¹„±åÌGvV3î‚Ë,x:ÆL>kÄQ‹\×c7;ã»Em«öJ3Jû<‚) S®XP…Ì×#Ø}¸/“ÀtvÙãQ`AÆ4< y`p—uº%¡Kïzžû(®àö²á]prlñ9D›-°ß…þÏW^ˆ M­ó¸4{aÜ,„œ%e «Î¨IT0ÊÃŽú|¥:¡ö4W¤<6ÊÜ6«Yó2ï{æA-š™¶þÿé»±ÎêËãùtÀ{ïKuBwä2ÂQŽ•àUÓÛÚ|âÑÈÝ -ëù°Y|‘%¸tÇ©h4»|·ã ƒùë—9s`¶Ô–ýÎO6†vŒçm‡mRŸY;ƒùš  x´KCÏ™ˆ\†Ýƒ ÷´øu¤¹Ç´0ʨ–ÊieǸ5f*s¦)Y€WêÀ:_lu†ÁˆÝûeÿü‡‚Ï©ôÕÁÕµªè:ôËùf¶4´¬Ñ4çbê­•-}{g÷#¸ŒCÐj…C~{§qØÒU*õäAäAƒž…Û³NlÏR¶nmuŒ% Nú²ÐéÈX;óÉ4E¿­.GÖ€ïôd©Œ­&ú¬â†l~òuYÎ*¥?úvwù²’ÞmnÁÌ÷Þ OG'ú µ´[}êÞéý­,Ã\œ[´HTÍF`¨9ÆŽþt¼±òfÍý½vû1ŸmÒDc~ Ó¿jVªÖcøªŸx®¼G˜.‹[Öæ®×Ô ¢aÑgUÿ.>!i„©Ú÷­=n‘¾õ.?Ub± D%Ž|s¿ªD÷\3)aµÓç(„cU+§lp'Ád¤?Í>³ )ƒÉ§oh~Ô»YT^ÿ6OéíÝÏŽžðcì·ŒåÞõJç#h–«á¾Ë4ã„pHµÍ-7L=Zð~¢ã´HAü ­fLJ¡+µèëþ=`š1, :Q§àfü×^­Âr2îŽ7[.£]ÿÃåá>Xæ½&’9 }ƒz§ÓDè«g±ÍÂ#:ðyÒÏú„G:8]훓BØÒØ6–ÉîwÑã·áê‹Ô-~Ä›š_'tà¶>„YÆ°ÏWžJ|<É}œ`E¦vEºQzX“0JFGŸ•ljº6êΰÍÞSʳí°ÑoQñ(‡29_QË5O]°å9œhÃó½íÙA5GÆôüÞÈÂ7³jM³c,™íÛ(öivæµÿ†9$£«3‰—±}ƒ·µ\+ qÙIÍ‹]-k&BÂg‡·YwmÃYŠ1ý\£‰´„/þù-ñª<¢ Í›0ã5²µk'µ&Ž>'@8{ž½í‚ ý¦ax\g†QÇÕJ²Zûµcµ(¶‘b¾EÙü•5zû­HZY±êüÚ_ã‚b#P mé$d°Ü£êíµÊÓÕ•› !b¬9Hɉ"¬f;x¤ýE£¡%Ù‘º5.*³Ìa<ð ó®Ó$l,³ÆJü÷!’2T­›¢6Ýü4û|å7NôöÒKØÌ,~VÝìíV˜8sYÓb­¯Qéæ,æÆß\e§¹éÿvÔÿÎŒ­Ù£fØôuÙ<©Zv; ¯YDr\®%sä ¤@{¿êgýN»QÐE…!ƒ¾ÀpvêªÊ_Š,è"=ˆ¹•`Èz¼eb [hÇt<Ó]ç‰ËZ“ç®tYd<¿ß­ç¸2Çéàü&`‰†}¢š=ê|g)Ý xª8ˆ×>¹ö§gË!a=ö>ëú ³Ôó©ã`ý2_\‘™ágKíÿªäTQÁصkz˜yÌûþšF]6ÃFqzþåÂP¶eŸÞŸ±í®ÔÿÆ·®ùiI,µöD<¾÷«xDVo{óÞd£JwÁäç¶0ËùðR™ “^kp^Öòu¾”Þo÷“z¥Ùýî•ÿù¥$³ngN_Ÿïµë~³vZæ5ógÚœ²¤Ç|[T”ýdK{þ¤æ>ÏAù«pº¡8v„×öNõhfóò~,{C™ˆ6TÛû¶l’ŒŸVA3mÄ–zUÆÝørxžÏCY–8Ռ˅¶Ÿ†:.Ë~·˜u‹UH¹4s}Ô€m}M<j$SJ¤á,ÍZßl¡ÌIÃOòПžõC•„/¡íÌ.ãÂõô%™ë?ùã_Üç$4³´½^гôÊãO;Û5Óú±íÒw¿•ü«ËÍ[úÿcø°7ñçº:×׊_Çf:v½ÜÕnõÛ¶½¯Áóž‚Xa¥~·¾Øïïy=ŽXõ $h*.æø¦@L6¬ä´§SÕÚÞ= -=ÇCu¼ÓÔ;‹Û>©^a6“öRûtÞ,WÐ)uîá_9­Pö›vÐÁg£ž„¾˜tɺe”Ñ1QìÛ>î¥kŒgéÛ Íý«oÔ+nŽc*Vãz–×¾w±Õ»„9aÞÃ9ßOtnEÒií±ñndy<ÈÓY.ÍÈMP–'¹ÌLâf;Œ”‘Nýß”—Kvã:Dçµ -o@u€ÄG=¨ý;n$e‹Rׯ'´!Q$ŸéÅÈh[‡Á´§¦ˆX¶âØPï;&@ü\?·\ð ؈2%uÞú±r“N–bé ¶‰!˜nxèfŠ)n¤6-G•Þl@”$=ˆ©³„7µ@šøSòËÈ+O[×Ò«—ô‡Hs\ÞÌ4‡ÌË_wÈW“ -ƒ½¾"i87›ãê(€±é•e¿AL‚„M=&p™ëÙÌΕ!4Ÿwšœè—zL8ÎÙ÷n#Ï×7ȼ’ÍRÝÄÏÃ0ÄIìÞ=©­ã5fn]趑A%›÷c-žU=@%š³º—µXJ9°=lt¾#IÞ2›ù,/Ÿz­‚®廙’~S­Ù,ó±¨zÃÇ< çP¡µa8È“ªyxЧN¨)Ó1£–­¡¥ÄYuOÿ ¦Š8ò?öÓ{ʇ—\š§!«ªcñÑÕÝÓ@ {ïzTû×ÏõCz‘­ tzˆ\ðöÚ9<¼0ç²›3NO›j–…¡Õ"»‹58=Ô]ÛÈ¢»Ãþõ*pŽCGPÀÆŒ# ð(ïè˦’8Pá²YeP™‰n]óäÜXDTà‹2„±?¿”:–.ÔÇáfJ ÅEYä)úE¡I`—ËAÙ»Cï 5f¥e -€A>j¨²KJ¡œÜîK {fJóáÞ6þÆq+|m^fç¸?i›•ßòàSBÜ•ì2-‰c3Ä`¡;:ª­tK'ù¨vßÛ˜S1nÍÂ\©Î»(+½®Y¿ôzú°Ë ‡–øt¹ÿ€ æÔÕ.›©è230ƒÉà°Eíæ\Ý=ó2˜Š^´EôhËg™û2ü-Õæð]I?XûöŒíçÂZí¹üòMŸ^‚êbäèÜÏZY ET¥®\§¤f\=ŸhöÒéd£¶‡§%ŒðlÔ¶‡åŒ®PA·Äm\Ü”íË°—$ Þ ý:|­€D¤(iÑ_Íq<_RÇ¢–¢ªY(Ž°EO­ ½ÛÆU/ŽÓJwU¶+D’¡YÕø|)ÏK*ö¾»é:l)¾‚%cl°/;e·9OÄ7óc­MŒ¦®Úû¢°[£ÛÕ‘€ÂhT e:nv xAÊóèv 74‹|é ¦åÍÁ´â_×ôœ“ýeÞ¨V!§«I¶¹ÔÀ¢óÝs®ƒ£#«>ëüfìê¼Å–[‡À‰MOÆ™fëáHºX±t7®yÁ4ôq"laºw©Ôo'êe¤ì›ÿ"™ÏLÚMM|X.šR5Ò0Ï3S_I.Ù¹µ“·6Ç£½cŸ‹ñ5=Τ#ÇxŽêqLÖ‰¯5ï-‹’9뇫QhADÇE¦Wü}¼4¹ü)iQ’õKuÏí•[ÙJ³¬}æÖ¿§Ÿí¹á©4¦㳘@fMqêŽûxÔIvó{wgl ñ—ãRŧˆ´Þý)&#„†¹_GÇÐn¯4‡žÁ•Æ1éƵøªß­‚Ôs!À‹ßãbwEÇkþjÛB¬înϦI9–Š†©ù.K¾# êâÿš÷%¡¬qÈH·ß‚´´Žó+†^U¶mcÑ¢Ûé¼kú—½<zÑÛÈÉ«>_pæÿ1ÿTZ=w'¼ÓðwÕÐ}î¢Ù¨ìšƒ¢¬n¥ÖÙ·ôÇÿnÛ3ì«šQ³'éJ/£ûaüõÒ„Ùtõ>í%³GäåýH{k.ÓZ¨(‘—#å¨óæjŸaóöÕi{ð‘üÕ‹ÙÅÑêÇjÚúœ–ß™Há)Z[rÜ`8 ÔÉY¯ù¢wL¸1úÛٺٿ~Îz£M4¬I~ÖX·Y¿ü_Ùþ³J¤gœ™”üÚî„LGF%{…È™«Šµo÷¼¬€é)tfõêóF Ë%á¾Ñ}úÀ Þmõqôø¾~¤P(SU‹!C`Ÿ6’ç7y­¦G«Jì3l@#_¬Ñ .íJã§æÄ4F£Zc›Ë"}¿îÅ*°¬‚—G¯ LºþG´À;;¤Ô4ÞÃÑ´i?‡È# hßãƒW;F°–£šN7–t5ªMSãN{:nÝ(Çuœ”¦BO³G•smošvs«2äpZzŒ¥C,4Trö¡7#+Ç@Ö½B ý®^DƒjFÕ­ ‹qaDGièÄ/ýcC9zª¾^•›‹(Öì+»é£%§5LG`áV´BÚˆ(êü­^hÀÊî…ìJÒ[f‹­oq›¸Õ¼áƒt7Cë WòYjB®{åûp9ù… ÀYäWäj½¼!Ô†#[¦‡êE€àÅï!|÷BïÄÈ«fŽ¯%ȸ%QÛä‘uçY9ËtõðjeÇ\Ér~þH¢Ðc6BˆU{¬LJ8Òá0±.Ùßho»¶êÉ2ÕíûÐl§ÐïëûÐ4<–z4ÎÀ&ݲ(å £ÂBÑu[±*b†dèöKþt{a¢ -„ÈNbä„*„j3­*:kÍPùÕRUùª¬öFkuá°[ ×d¬ªð#„˜9é‹%€7l¶vá û4ö)ìÖàå! ×Wï«U¸I[±ñ2°eÇq€z[ô0åÀÓ£qö¶vÕã®Íl£H°ù´:W V5Ôþsuc/(óçúÞ xg ’SOÔ^»Ë£úX<µ^äóN±ö<Q~wó -Ä2œS0$Ú!õÆ0;¡h¶if¤_h6­HúyjU&®ciS«žª>,W9Õq;„q›CQ¹Ö<{™§0#Ýq·,hâ#Г$ôþb€ÅÜ‹ßIiѧ#CZÎq7(aü¹þá}’y¤›ò}×´›Â  RP0þ¼âïHr7ìB(€‰´–#ŸéiÂcÐœ,Œ*v_ Õ¾„Wë8Ñ}šMª3½0¦j¥±[í~¬Œ^?×·cÅy-Udˆ£Çݲcÿ¡Šã-œ‘lý¹¡‡&^›G°J0Ö\ËÛ¡Êt›ˆa6ÚêyuB/X÷Ž Öt¤Ž&?ã3=65šÊ”è´°*¬+L¬dÑyeÛœ ÓNMLÊÞO®0]õé=G0¯#Óo-[ó“WG?/½&që@Ž.¡PE ¯m~ÎèÄP½\ß ¿†HÛ]øÐËKáËrii|k:¦5Ýa‚Õ;ýºSãRsü‹UIWB¨ƒÚ>)"Óµæ!/fψ™¶–tÐÞr!⬎XÜÅÍYË~²Kgx¹çqûkmy{dq£¸$‡ª†kͤÐ:溽N&»éš®aoºÃ“Ƭ®u¹CouŒ<²=Ûe&5`¿ã¤KFE“ -YT¬+û:›ß>/ÿž0ë³gÈ7ë+Åx=(Ý٢ÓD,ÅÙpácýX2‘b|•ktÏõ.ÎQíì§MÏ°†B—lH±šÝˆ &^>ÆÏ™ÕiãdõŠŒùŒ¥¶UqB·7š—IOÏàQ4åëO‹{ñ8¯×ï«AzKbkÆ6a¹¢8ï†åªg²iFϹå™ü°+TñÔKRÿ³ ^*^¦--qä꘺ÜW m÷¸?d&ŠúÓe©™–C@WºÄj¨›Sm‹ñ¼]€š8žQmÐs&ìDUIÖÄDåLN™¸áqšwݾ‘H1Ý‘€Ìf àöâ!’¡OhkL§¨´ÞZ -gÃwWŠ°¾ät ­mÄç2¼xlSÖß–‡(néšÂ™ŽHPôÅj€ìÈar ¤¤MÈw•€:PÇá¢éÌÓŠ(2õëL8'mvh—•­þ ,ÃÔÒ= »MývSúâWGõ4wM휤ž£á·Žƪ>_M…Pì¤Dózîƒw²­ÕcIÿe¼L²¹ º÷)tõCbÆ|ÿµãG‚MR´ýzSȪ"Cä·ŠŽ¯×ëŸQO?[üâ%‚Ù„·7³Ro9©=Z¢¹°i -1Ôãç6¡P–í8D ý‚Á°ûÊvÕdÿmêÝV`×BF×´_mµD´–ó;Le‘ -H0S±ÇñzO0`[Î+”ù¢=Â#ÙÕt\Ñ߀£ÕnŽó0•ßLëT°@\Ô²)àµ$­Ë\nà¬Åb€+¶åÖKB¿/)é¸x^?ôŽ”„©ÀpJ뵯UÉ ol×Al´ÍÁ‰»M»ÂÔùà̈à5¾Óòyý hIÍGìŸW%eA<•š>.íð‰ÅÏ×XL3É×mĶÂäì6ADÝë’d.K^¯¤cYï•„ã‘Z\zVOmûÒK’“AP‰m.¼ainŽ°ç~Šmøs•`wÚz8ïJéØíÞœžïéZMÃ8·‰3¨Éçõgä4õ´×A5X¬ŸŽÌ>15SAOÝ8£†Î¸£àpÃD™vjˆ"žÎ©Ê«[Vµlé.mÎD‘ÇU6{¾í\*î=w,öìøÓmžK†äíÐE†nä8Öu¤31Ño—žÚjÇí;%LcŒ~–:M;˜¥bN7qÔsLÁrGR3Ù}KQ7 ´„¨;a¤o —’ݪqP9QSe’>dzÍßðIø) ®U3ù:±© !Ê™JO÷îqÁÞíd:j{î ¶Dwºÿx÷FñÄÕ¯$€ü4aV‚åÛ?1H“Æú„Ú{IØ¥pyBsߊ¦>ÎeŠU|ÍÃry -Õ: ^ĸ£~¸{Gj[fœ*W¨>G°RëÜØ6[­ž*­WíHJŸ¤®Syi×òôÄÍêO)i†ç»Óùóú!ú)m(*³“ÓÜ¢Èð ›wÚ¹ÃÂœîÀ*@J‹ÉQeN¶Wè§B³{ÞÚg:š¶m!ÒŽTìr½Ï©;Ë>Ç -wú€ƒBÔúx] Úþ#ËÁ ûRmÇf’¥F…–û¤¬•R/å{豤j¬BÜ «Œ¦ÃÚ¸dÕÍ"6 ‘]O®Þi¤Âý/×ѧ3  â&´òMb<¼td^}눜 Ì\†mÚË›0xÝb0ml"¼…MóiÓ#C»ó¦–óUµÕRmÌ t~¯‘>#Ú²­–œQ”lCÉ›•I5C·“#ÛEé9ùR0J*ú(駂[h¢åOÒµ<\/ìê'µÖ­ÿõÞ™H§S2;‹í®Õ¦7“vök86SÔx¥¼OÆýNŠ–ýçõOÅŸð“fe°ãJRG½”].’¯´•åÑÔ\Ì"r7îáá°sÈRž¦ËÐÙô¥Äz¤ü®àn*¡ê’Å•}V¥Ñl~_?¦´3¼ä½öz-ºÓ¼-CjŽ—Ò¸*Íþ›khÛFÃwËW¸?Ýn‚{mÊ‹ê“1$|êé@Asü‰,:ãbŒyGv5SGº“B“òøŽjLÜçõß²¦­ œÙ÷»aíùƒ}_÷yòOÖ|îŒ*rý´K¿ÇÝÓkúÚŒÜùI„93¢Ê"•õYs­pÔž`Íï[\¹[Q3ªYº#¦œºëhœyYP×Ã]=0ìI›m ÆiçÌ­{0Èè¸ô†è''#wt$êp<óIÄŸÎpt°CŸë´…¾Þa%"8]ŠJ3óžô ®WâALèØ7u¶ùiXê‰\Ùâô$cÓž6Ç)\¤ã3UÎANLç´y²W9¹É:d˜‚Ozƒ–4€Ö©ûu“Á°ã,2ÿIÿ†bm+Ž23¶fjJeçÇ -¡;Ì®–|QÍ_9¬Žñ&®"!@8l+«ð¶/ë³WUÕÔŒ‘]Ð}²¬O+n1mK««ý«dVè…Æ´e¡ÅÒa ÓRƒ@{z¡í‘NSÛ¬.©YNu‹ŠÕƒÛcý`u1ÝàkT£^eSbÌ ŸQmÚÌq» h}u%½'Ô0¢-:š}yô‡fN{º¸lýü?Õã»Ïv»±•zyêyÀqTùn£ÍÝ#G‚&Øö|šø7­¦3†‰x;ž¹A­>`{Té>ýÕôõu>UæSxúþmCÑÝò“n;!JL®éù|5MôìoŒBWÍGOþ¶]HœVêÜñm;?—÷ -)é_^wI47Š5üäዺ\¤Y^-otô5ÑÀØ©žwžîîL×õ™ëáòoÚ”Þ3avrCdÂ}Þ²Ûø’NbþJcA¥ 4Ù;³˜fšyoçt–iFã%eF(Ó©Ô}jZ ]=È% œUy½ ¤ßû¼~(ƒzphC*#Þ’ æ0é<ïÈю裺Ža¨qÇ}6ít—ÝÐÀÞyWÅÎmؘ¯Äí­…~&¡¯l½z¤”H9š¶æ›„1çê5µ6¾Í€ç»çJÅ–ìt&(„Q,¾q<®Öx(3ïTÊ06ãÂFiÎUQqÁ{+›m.×â¹îD^&×þu -?쇴“0¼7Š»5þ+e®vô&Òt¶lÙ¨ôÓÅÏ•Qz™ž€”q?Ëy5=+{–™Ù¿-ýŠ*Lö}=…»Û ª‹']›Ë^ÆBv¦S:÷I]#›ef¤3Ãÿ_8±xê…ÿ0^.ÉÜ@½Ê\€ -| -¿óxãÅ\À·w¾,H"E;b6=â°Ù ª2_®dÚy%lzk¬ zɦ¡b"x§?œWá -ƒuÔv'ögµØ¢uT†PWZå8–êpóýêpBB ˜ØIlé†]-Y*‘§Uu™3ÒöX-bÔu[ÊHžRá½Ïe¿:ÖP̘üýî4EdGê%pÌúâ4Åf®Ûg°t²k%ÏØ^Û27u—ŸÔk‡ :T“§<‰Î´ªÔfàó=XŸAqÏøŒˆ©÷Í´ÉÃT;òö…ÄÞàGËkq*$X²õöp ²'å€Òº+~P·2ØŬ݊…‚ê`lýk˜6 -„Ç–›¨tHÝT2œLÔìT«ºê§Uß·rê{àX£¹5Ñ:½§ú´§'ª»ÑŠ€4ëñqÓè/lœëAþ~kÒÅ/µý#š›{=ö¶ž>iŸ¶˜2¶÷^›Qeû­)_Šy´N-[ -;¸Àè&”D»“ËékÅ'|w~cÍÐnß §4*®#rëuí5ZK^uÝK¡Ùv³²ƒIúA¸%×á0£Àl)¼]o’ÑFêt;ìúªî#Ûö¨HsëF0<¥cþœ§–I:e{¾šÈ=‘9îõm_zê<ˆ‡æ5Ö«Œé‚žæ÷+Vä"vŽý'Æάòšžˆž²H,ù 8+¿•:ý¶ï””Íby&Rq&¶]ƘÊÍ”Å43ôæÍxløé¥óÜxó^ÿcŠW%jƒ°m=Ž”®¦U©ÈÝÎqV’Iº~ñÒ:í¦&<ëJ#ÑÌ×Iä>¯º}¬½ß×K“bJxõà¹pò/P -gT6¯ç$ôºmÂ4l [A·×>çGáç‘ÊZ!mÑ ?T_…OXdVÊ* -ßôv7œìðè31&Ž“ئ^~Á"ôrˆ&†¡€7g -ëü7h¡3§C¢Ø|yu ûú`úòVÑŸpËò +K葦ÓŠýèXQõçnölÖ~~¡Ya4ÙüòÂ3µÝF2Ô» [|& mbŽ[B‡1[ßC:{1HIÕ"gþ@íHª&ÝÓ¯ºÕí.®™s3ûÅšÑû š9ÝÓ6?-ϱ«[I-BRõÎaJ ˆ>—Q»Ã„ÅŽVà{“ˆ%÷—¤Á¹Ç ùVf­;éžö!@HæZ‰ªwº%iž—$ÐÝÏkö¬ªC·ú†š@ÏÔ÷†qâUhÜçëfŸçLÅMß—ýb¦×˜°?DÁ}ñq¬‡ £ª®»à]r,·çqŽ¤v:r˜3°:³«×ÄÚá*Ñ2ÜC{s úÖV÷§ëq:QÐLVg©å˜Ê]áâÞ;N0À'ˆ³fOV÷ihW vá™ÔìN‚ãDüQÃN€°=ÂÕ¤ïKÚ(Pë)/Ö¸¯ë[…¥àU5%kÌçËLSÇN¤S‡8ëÖJ€¶š,º>ÆŠ¾:|ãªz±ñîõÕ˜çJ”‚â£Ò²IÎÕ®¦FáT*…t­ÕmýþÏ@«ÔbËÆBfÏâ)Ÿ¢—C% á•@I#3™Úé%n¨k~ì‘>´í©À ·UÕº^Ð-f0jl<éûJÍe7mf2äÅæ=$>u“縜É{~d_ÎU™>„€ø«øûëÀÚ¯çëŸë¿ÔT#4-•¹ù¥µµaÊP¹q ÙL`ò«i¡ bÒ} V »ŠÊÅ5¡‚Y`ÿŒîVB;†Æ”¡Ì lÕf©Ûù®yW˾»JMLð<Ò1xîóÌ‚Ï”²l ªìŠÎ‚ föÆL_q]Of<§¥p§Ùl{½0áЗý¦fy8Üæùú³Øê Ñ]ÿ5æbææ÷Z¤°¥nN¾^¦Â ýi’᤼ëóJƒ‚d—²h³…( èS&d‹Ó¨©§¥"Å°gªÁ~Õmýþ2âFNN熋D-{´{=XŸ¼¥$~;}64BgRÜnµZi­Ä¥üõ Ôze-¾»ŽyóIcÜt·ìPê}lÇ°â£;Ey«Nz< ì™×Uƒ²ü-9†r–ÜÊøZÖ¢û‡«8„ñúŽ¼§¾Gw<«ÝMÁW²Dªmÿ=YÆ´¸KŽdKR ËgWãhÑ l n^^ŽHuÌCðì5ëKw¤ž¯oÝÊ X2võ|}‰¥£*æÄI³Ä+¶ê¶Ýª,tVzeª0Z.¶¡oT‹Œa°ÿ4«3XÇ÷õ§~vLFb!›—<Õ§Õ¨L‰J{R†êi•ü¹ÂÅáB¸4]̬œÔOºXrÍ‘:o£¡'¶'Ì]Λª„„@¸RR‚JR3ÊÔÓŽMúéÉÜé®W+®ÌKê¬}[÷$k.¯ë"_u*)ÎZmJ²[²e¾xvLyUÖ0€¦´| ‡ÞÎË¡·I¿¯oLÀt‰tÞ]õØS™ƒs“ÒðÜayG!¢cѧ%’ø§ -É IØÃ[7ºèh׶Ž -•]óß·›ba *ÜŽäìü”yk%Ëéa“}ƒ'Çúù1MH -T{*Û¼2UÊIˆÏL;Óßc& ŽØy†gç),'P½Ô¦øà==@À\¹î‘üP[ÞÍÞ_ŠM°z¾¾[Ie…2˜ëׄ•¯³åÀ¯A·ìU˜:¨c3qL³Ö’ððO&«Ÿ›œ‚¹€U¥‘0sÚxa>M½y>ºÎz;iZ£¦¾Dw8xRAªË_Ú¡¥Wõe‡¥ƒRƒñÌE÷ú¾þ¬Øs"škÄËsøӓP#‘ÞsŽ£„¡£‘Íêør§q5^“M¢¨ÙÃ$QlôΞÉ79h¤‰Xt™S(:ÉŠ° fžƒŽœ Eûbñ/âcÍ, 9i­úŸË$Nþ­#o ÿžü~Xr!/eÇ¢¶A×…ä†Ó[H˜z-Ú»i‚“G^“p#>šv`¶8éàÚ,g˜¯X¤.g+W¿Òe û ü¨ºˆh'ûMEMµbqìŸÒÊÑÖ$ÍÀ…Õ±›ßÑëÃ93+l§®™ÍZ$Ý€ØG¡µTwëiͯöYi~ÁrÍï,¦Ã¹úµt ŒnØ“ ï„»êŽ<Ÿ“Á`žaq‰tÝ•“«)é×Íõƒ ßçÆ¥æäïZlE…¦Fîiw$½ó@wÝ­ù¢¯ÿý¾¾¼ŽrêH–Ú'ê9/¦Ú›£Å‘ 1žÈ¸þs.ÿç°í•+Y‚ -EÆm÷)/s$9²$†ê</À±¿/çi•2ÍúöƒD‘™Uaf1r‰ðïÇÒ­MÓ[öÃqã½öWAê'ÌÒFÜ´€"„ÜÙЂ®a^÷öÌ’7Ž}º¥Ké¯O0è;Œ{ Jµ{¤§O›¼ª?s~xÉSMWU./S­®~DÿÑœ6ð«TÃyvûÇb}=F}eƒãóYÅþÝ=8ÙáËÇÂ^¯@•!;ühú8ènÈIJ®…RSüX†_#`y0Å2ZÇôɆAÜØHjPŠ@†g¥õ†FÆ,¨/ÏÇå/ßîªiTŒÑåP‹1¬ŠÎNéòYøQ±Ä3ä„NR #í%¥JL[|†´¼3Eò˃rü ÎÙÃ1Ôˆf»Jôå(Ñïu⊪–M º½>nÑ3!Æ’L0Vb<¹ ô?]hÅƨ.ë>ØÃÀˆ1âÂ1Wú4õ„ÉÉ\P¼Ó§]|ý*猘hƒƒžãç(áAC’g4Øæz¿Þ¹åNçC@×Ú»VI__?/ïRŠXs¬Ï}“kæM(À3˜,œH‡ÿ¸žµ£<®àû)·Dr3Ì_Óœ§“'F‰;k\ \zM*?ê#V"¿r‚ Z§…¶S¡”ê;•ge*fŠ&ÚJèñ³~4»Æf \¥‡”vj roA×ã7³÷¿…б~üí„£´|¾4¡8S ŽÒ/¶ê UC­´Í‹®÷sÝSï1 ñZÝå•’¿w'ѵµYÇóöɪÅýmá»í™ !ý¶˜UQ0è”À‡X Õ¦N-b—=¾ ²—a³ß¡2 ’߯_â -\¨ƒ¨&qv[oXÙaº»?¬ÇÚ¬üsZ|ŠîرÂá"éf¨4bç«øÕ,™dsûãƒ=‘ZÏ8@~sìr1ÔkVt]-ÊW»¸¯ SÓÆU•Ï8ä0°ÝÝ8ÌRË3ÑÑ,Œ1·U“+4L­Žd˜ t‚eÿÊHîqÖ½b1ßâW'Qá©^ÜúçõKƒ‡ZÈ–µRù÷5B¾ÃRâ7'ïdÙä¨æ>´Ç  Šø„•·q]ºñòQo\C6Kg€_t¿x{ ÑÄÉ…}y-ó陞)uêþT&ªî:³0vìRüøzÜy)m:µcO4‹éw »óVí³7÷^ïYëûúú¹_R–Âx¶Æu 3¯ýJk–E{žx¯rB• •Zƒ›Ãûxñ¬]³Öõù›•dÎñäá›™HfL„Ý.{]Ý(ŸpU’DÛŽ.Õ¹r«ÿÿÏÎÍKléßuL-tyág“1¬[í‘p.…\ä‹iÁ³S‚b+2”M<€5¼kâŽCP×o9Øß‘n¥²VÕYì^"Õ…`)t_…¯bíÅ­û!çãnžr+ãwÞÁ¹iiq%ñuÇéB»·ÃÎí4A¹ÚV<Åݽ›"k‰Ç²4ÐŒúpÎ}cãTšè£f#ñ²–䟑…Š„ô Á‰n£j¶òÈ”ÜóÝûˆ‘ -ˆÃ5xw`£òöýñùH”Ì8kÆ$h¤BäƒÀÄIˆúxü~ž’ÝAé³NŠ6ìˆám98ÄN‚c“îÅÃPh÷¼Ô3ØTø²íwP±×Ù‘U=õa’åªp¢V˜DØÙhöık}òèìñŸ×/ܦò¶5+S–`ý²Î ©¡Ò>õšeÜ:ÅWYARÑ?ßT f.RÒæ6˲Aåß@3‰jÙ•‰ŸÄr¶í47´-;»e·‹§‡ö -Q!?àQí›—>4ÿéyï>âÔ†iª.¿ÍèÊÛÙ›WùÏëlâªt¢] ñõëB;X4Þ£’Žœù4¡ê*ª ï…þo›ßcPSSå‡Óïaü7¦„²qÕ’Œ»Æììc¿‚Ñœ›{ÑC™}êßå8XYž}ºnÜ•“E ñÏ7ºà±Tß}Ö(›É˶qí†Ù¦ûå¹Ó‰,—}þJ±~ý*+òÜÎÒºœ>ÞDÞ‚±`‚jÁ;þs^ƒ›å­ŒÖhØïsìew¼ÅÓÇî>ïfl˜É{<ãpÞmæ1 Ò†uö„†Õ¨&·BDK[tc!8’€·“›ô¶®o|>ørV[œ‘ì7m™ÝV¢Â”Ä›¾é¨$_O‘1"gš˜bj‹uÞBÇjvBQ—äÓµÌ -3Г«£•ûy¶I}Õ¸8ï?ðx3 ¥™´ø¬ë¬_¨y6ñ­\Hþ´óÎ ÖË@ÕÑ£!>u/~jMîÓ‡]ñ¼sµ®¸ow?þ#•Î!2Ôãˆ]È£¶¿œ…ª)Æl)áM¨p»æãaZbSw°¥07ö.“õ~÷Úc¢CÃèǘ|¶ø›  ±Êa¯ Íî2>ìT+´]=éö8½ºH¯# ¶ hœH5}êè^Íά…9ŽoçÿöAÿŸË¸ÖŽÝm?L“¸Ÿ›½7¿Ñ Xº%=²þ÷]ÖÝ,‚„ZV¾ÎþlˆJgÒ©qýýè3LÑ" ,9I3Ëûûâ—?›tiçðsšžÝªê%†Muª6 ¾rüÓÙÖ £çfgbecZ«S -åÏý ÐNMÏë_Îô¿Ç‰²\¾c·ZÝÈÏM%Oá%­9œª`˜Yö÷@îI ZÿH]6Èáß™`ã˪´ìD{ AO`1 {ÿþØ3õ®’û/ååŽIŽA}NÑà<üŸóŒ²ÂÞ_^w Ô°’Õ¶B2Y™•@ ÂÝ|䯸ééÿ=Ðc|×=:P{Ð^³¼×Ë¢Ò o¯iö©nD@­læöe éi¸ÍU¿ím>£€X</î›lÿ.¾t×é?¯5;Á¼²îùPXµ®÷”UH¬>"Ë8fË^øªˆ}t~zÑ®aÌ›iµ´T¡]öŸ΀;ZNÝÏÂ9Ïè T÷Â+Î$,_I 0[‹×Í0Ô[Á`ļ-††"~¥må¥2 ¼””Íä¦BÒBèï5sE¦”'Ä{_.ñD ?¶ØµÅâC•C譮ݿ[ÔŒfÜ´ë:Òl6±ø§QõÆ¡t‰©¼äê3bª¸,˜Tcè¿dòŠÇ.AjêsÐí:BKHəݠ}Ê}zX-™îΊÔT»8¤Éá/ý¬5×¥¾¸v Wæéù¥ù¹ÒüÈRIûÌJ©žûÑ ·ú­Ò %×íï2Åöt}îHI<ä5½ëR“âÔŽi* 8èLjh÷ßj – 4üÌm«Œ®ª€8〠˞rb{ñíN«ö¡È33Ú±e,e`$çíêîòö¾®á¨ûðü¨”r<ÓF4²Ý¬‚?Ë7ÔiÆzÕO]K{Ö/W¢#ðnOžÕëàgÙ¯ùƒÿZ>Á…Y§œ ñ+i•à¾z!¿Îõ'sQMè"°¼¡m{ñE%¶jk­Ê=;1³‚â“I@tRƒaÞ(k÷Å(Wå[j‡©Â¾”Œ=¸RÈ -±ÇÕÐ/R‚«Ï_—‡ å‚Ù¼Šêɤ»¬œà¸šm#SóÉG¦ËÂ8žçáZþ íøÊe¡ëôMM¡2þå°–ȵÜU[¶ñQpFÆ™*Ï$Î/`¬„2éùNÊ› èëÃÝÐæËEqµŒF£TX=jK°tšNòØ¿üèñŽp M`ô6 UµÖ¦y2 ¾aÆ>œÖê/Šã\·{¨g§“á4½ÖÂÉîlŒ¸ÍÎû -öêvËì„kß{zfÚáç~ËüŠYç O|]Éó¤(?êÆf4Æç ·‡Ú^ÌâãŒxŸŠhï -Ø6€“ÎâðŒvÞœä.Ï×ëºMá$ë5šºŒÁøœîó\ÑRWiÜúã×ãú/“ l…ñ‰R °.ß䦮úîPѤSËËD4þ®÷|³Jô}üÐ,'AÙ{\”ÿY%qÇ\:©ZC_4Û[Û;„n‚_xxÃz`éêvíæÕNÈSY\N-*r>šû1³Ò<=›õf:5“n,c& ú;CðÏ_GÅ=¦ßJÈZ wA ÷iµ_!KÕH|+¶J>p·xÁ›ßÞÓWµî`ç› á÷öÑy@ÒjǬåŽï\¡K  ÔþÝFIË£©I GùJ¹1ntœ$öoòÒàgï -¦WÁI†£ƒ¤ìJ‰$æ|S†·•–UScYMSŸØ¬«™IÄí¨Íî›Áª¡iyPx"M¶ÚtLnìxn. ðÖ)ŠÛóÝ þõ‚æUçÑÐûUV*W¼¦'Ä ‹ðÄ}%;ÂZ¹Akö[ñ‰ôûç´´«V¨©£å`é½ÉˆÏ³Œ\ÇÍ+r¡[8¥+4“WÖÌ6\©µ`¢ ÇÑa&zÖvÕ­%n‘¬¿‹X–z*PsR2¤?'Eq• pü˜o¸®p£Éäg1¦íxhCÓדxß íîÌÞâ{|pbTlj<—†ÍeYê½»¤„Ó¾éñ֋ù+pÓ-[µ^²¦‰^Ne¡nÅ¡Ÿžì…£ê½W- R§ƒSÐéåÏLê=U_ûóI·Ìö‚ë2-ƒþûg–=Ë&WCëÝ;Ý2kÀûÙÁNM+Cå(V&=\Þ¥W²„ÓrGónDªÒÙ—á`Ãhhi1ØÆkZ³ndÔj“ `×ìhqÚôƹ4àô’ºnû̦¾É=·ïúTÆ°uùK)ÓpÑê¢L™6Œ:2TÛ„>\¦¹ÿ>ëÝM]šÄf¼…‘‘B]÷ßÓzB€v!‹[tó_OÍ÷™¬Y'®mnȳ]>7š©_xüÔ &ìľx§Ë½ù µPZÑ®üÒÊÛ,‹~½«ãª¢ŒÛ½ÐšžËÉ]#@E”ºgµ¾ëÕyøûçz­ÐðLUIjE¬ïQ /Fk˜½ÆäwhŠÎ¹FŽVµŠî´¦Ã¯>í–`ÈL=3Ž}Þ“V$É© ©wÞGô<ûÇV‚@-oøÌ ÓøtܸÛ!Ó-ÿMÆÝ=U8Õ¼ºBÿÙþ:/õJAÛ©úá0×í×a¬zpJˆ(Íu -ÄëÄgeè—LÏùRÀ›’ œœØjd3´E¤]a¨Y¢¾Ÿ»/g# |ÍG,2©…åÞUx!i{Rd?=•™,¶ãõ6 ùZÉáôeç †©´±iñ‰«º NyéØ"^9m/8¶Wâúm¶—W²»›¾^9Ú3õÔÃäÏó{üSHr÷nooVÆÛXKÏz²º]º:Þ!½Jê9É+YTùå Ø-¼f!CÃJa¼>@ÈÆ,7óڨ諙{Ø=@MÛð\Ü=aa;o±¯Túnsúê~S$9R—¡‰†í5^Ðœ ì—¿È% ÝñÅRñA[]¼í+W²I³—†ÒÆçí u&·4¯Í¼¶À†Mî;¬×µ©Šû–mj=ÆÅoM”šL³ïôd[û´yý„„•·Èî9¼­Ìu#±í÷}†9a<9·5¨6|ÌK³Æ'G…61æÀÈÖ/Ç`—µÅyŠ'&Ïí¦˜ ê?mF¤­n¬+::£wï•q:ÛÚõmÝŠÕžˆ×j» ›sûª‰óq1d©ˆ7aÌÚiÁ‘@SÒÄÛÒaÜÛâ NÐ+‚“wµ -“Þ+”æ=áyÕƒ•ál‘éJ•X9š^}d0¾d¯±ÀÅÆe8Ø–œÚ cOc^xÕ*þlÕOcVWóŽ–«ÏR¤FbîFAŠ'xܤfnÏîn™lôi/H[ÄÍ‹H‚Å~¤&góð'Gº¯vˆOø£‚Á¦—‘…´Êø×l»ûñ]¡“ÝøšßÜT/Y¢¤aþŠxvwžVÛ=ÉŒ W[åxÂ@C³d}-ˆ_s¾ŸÇ°™-FÃœ‡ æø4¬¬nˆ·µLÜjVjÕ¨bëˆ\’=nåì8-­Šflä&uÊ4‡‘ÎÛŸ¬…îVê{,v{˜FK*ßØå‹Ÿ›^|né€Hß??ÔCƒ}¶•þäÿò2Ir$7¢è^§à(ƒÃ1ž§·½–Yß¾ÿûÎì"3K2Ó&«ŒàÓî;6gÂ߶͹{…5¹k?¯ö  -wŠ&$ï%]´—Zè:]ôzÞ¢ÙŒu¼…WA*´+Bƒ‡zµr^jìôÝ0?+Þt„•óµ§”zhî0¾Æör‹N×B,ZI½ŸºÀD1Š24Úc»® ÄgWàÞI©×Ì5µm‚ÃV‚tYø¹šþèí—Á’ݪ:ì"¸”*Dé"aŠæxÉþ)xU/j½5'ZÑÕâC5öc -Vu6ÔI¶äUt/Ý£+KS‡)¡ê¼&!¾€lýF&Øu uÈU#œ‡få ÐÂn`M+’ÑÍ*zU -SÍ?t¤ZHT±4÷•.A+é÷˜Í6M -UÉâV¥Ó%’<Šç*ösÅkéèHÖEïá eD2ãYaÂÑjV ÏÓ4õÒu£Í2=àÚR#¥,vµ^G£ˆZ®BäuD¨î¯ª1OM ²¸3"é"]LO´j´ƒx<$mñ¶ª®;# <6 -÷(êìvRòˆØ*¤¼ m£z:%µ®ÿ—rªrI" M´¢&í¬£²¹ÑΉ*0ùîÕÕú.é›éáT¶• õÍñ¦›¦àŒ)²ûÇç㞺'}â:ËZ_Í°TSE¹Ôúâ©  ,E {PåÐl|˶RÙ;ÄœH®ˆ'àÅÛr¢d^Gƒ„‡czIù:°ìFí%â'øûPs.õZ06Ë­;ÕæJ–,o°÷tcø -2aì””n0«(mÀ"ßµ÷ÖÖÑ™^÷®a’uÓüjü†ÚÔ‡k™× =­-²Zš×•/e]3£áÅî©þè" EwPÏv(±¬ˆÔB :)Ϥ Mˆ¨v$Ža¦©BÓˆÕ‡BÅ6Wßh/­QÙ+µ!N©Ô«šÄ‰•ú¼¨ cPïË[Hmyµm7Šaú4f—««}ìôŽ6ô@ÓÙ~G+]“tÔŽj˜¬L5Þ¡¹&÷Cìò.öT}hÝ£ö…\œ¹õX©ìÊÝÿÄxj™¨Ë^Í…Ê’ê<…$AÑÎpJlªx©•‘U‚WQ #¦ZÛŽ0ø ÝŽ|u-ì‚Wp׋-½†í/ãÆToð×rÔR›þǧ݂Ú)9Æ°¨!Á¼êrè„W‰¸º¸JQÍtTôƒÆŽ™š*=××fÜUx%+g%K/,@gÚ“É;ááP„Mà %V>Rn°f°‹åZ"a>–º¡3ªÂì£ñQv»j™Úï¸oÖ°Däö‹…ò¤Åì´’ha¨w3rHv„û)tݺ.¬cRª6r½7~hÁ¿aTÐYªÔEmÓÚ„ƶ†ÆâNäˆÂæT/ÀÑØàš2¬ç4º™¨œ—h®´wkžã¶JUg‘™å© oêдá•yȬEi*ÚOç òÐÚ¥ü¦.vÌàl€DwÇçµS0Œâ‘Êx¢¢TX«­Q éïŠJ¨TÜò¬cö–î.âçtj°/gAÂJȲ¿ñ)q÷t¤þ»*0Óöæ(FDúGig ILmÒÐ¥øÏÁŒ¿:ê Á@¼i¥D[Ú Iü`“dÏÙ0®áLè¸aµörIQ=/Ý<㕃®è%§÷ô® Õó¸mÔ;êƲÄj´Šô x~“ŠAæs®«6‡ -žÞ%¿Q5 æžµ*Í)œ]ªP[§žh„žµEݼ湊<__X>5ÖÙ÷žÊÍŸzr­B¸aã:‰Ór,dÁ?_†èûµ+'ä ‹#Öëu„N·›œ¶Vzüò@1è<Ýñ’Õ9ëp)¿j0í®Ê¬ýr[|«µNüǺË1Ø輋®$IŸýv¶LJè“6„ùžQÕ0¼)z$²v†ÓÃî ÿ´pyIM$¯k¥¶`Ð$xðQË«ïvÙËpWVÃÚ5U‡€h}–Ý#¤½MÍ{Ý!ôé¿¿q}¬3H¢íîß{õVμ§G7Ò5sÍÈêüî°_YÎ=“¯þözÖb­ -6Ÿ…v 9ÃùØŸcÜÓ× [ý7c,a³1!E/i4Þœê´(н®æX=Ä>=ë0FTIÀs žŒSlW…ÁÕèúç˜ðc׶^ 6ˆ^¤#e®UÊ¢h5õRazJ_ûJIY´Yºâku¡P‰zû^ó Ý gÛÂ>ÖuÓÖ‡ m<àæEz…è -&OÁQ.¤•¥2È#™ðžÔÝH*‚²yD¾%UJZ©D£µ&Ê_}Ðê–ÍÆŒª¯ r7‡WÝ -Îë#ÀuQ÷,~PŸ§7ñ¥Å÷þ}^˦K,ËÿJ4w»à[&óš¼Í?¨q)xuß*ËÙIß‚!T"U•=ÏÃ8<ÌÁ_PÛ|¾Êbk%ÝèÖjR£›BM“¶HíE¶ð;´cŒ×ܹӖ¤¶ž5óêÐÕß³cúëšù=ë ? œ” øžõá[÷¡GáÛ'”)„é¹3 C_û|V·êÚç©î º2b{ c|ƒ…,=+¾xûænKãÁ´9S­u#Ôä´è†Ýz¿ ¥ú˜N˜ˆá¥EI1GÙR‰-“ÐìvBŒ`x²Êäø&¨V¾ ôjäÄP”qAMÜåxæ†C®™­ù®jãõÔ¨1òŒ)IÜÈ{ôbV«`uÀ2S¸(Œ˜bÔ왤`X¯™ -RÔC%ù½R¥ÖÓÑç¶<€YÅ/ÅãÃÐfšÄ‰vg]Š‹¥qpÞ’Ø·eïÒê9ni¥e*¯v@»Î¬&ÛUL7™1­¶|ñ]½Ý§µL67þWãœQÝzG”ÍëeªSÖ» -{_y¯¥ (¹Ïø(J§'"¬¼› -±‹¯”—kQH‚!Â&-»ÈDFIö¤…AbÿØPïÎ:ëX”ÓŸØ“’¢M|.å–³¦)ªQ¿ZXüBÕ ­Ñ훺Q\Ø–âÌì·Ñ”šw_¨w¿YÄ'&ÏFÌœ«4½±±r‚=–AÙËœd"W) +ºÕ\¤XóË›|Åi‹»ÛÎ(b§é+/©a>õõ6}NcDSæâˆSAé”MhæIua€°¿m¸@Žþ.R¤Ó;‹jÞàhæ/²Ñæ?ƶÖÞŠ&}#üñê@4lTEO ë‡ØÇKÅ\ØWëÌòi%Æ»ññ˜§{dåá÷BTŸÒÄû“”š)¥ÙJ´ûRB hß½dCïÌÝ ø¶*›ÖÙw¸ÏKL¦Q±wЧMž5‡„4 _¤ê/ÕåÛµ˜fG‹¯Ö>áþ:³¿þ~»«ÌŽ˜Iý´'^#Þï*ÀLŠcy•g+fËDŽÛ|j„Ðhš8Mr3^.É­ãJ÷*îä -ÿõôÔû¿2=uÕb_¶Ö™±hdAUË0¢ÇR0»)ïåf÷Î3†ß·_!­âƒyGïÖ09Ë%cU•®²˜# >^½·«ð ¤v&ÌëÓ»…=ëÌóòìt­úõéÝ3t+ôîýúƒ)¶•›ºmòÙ›O&½àɾóó1·4‹=MûŒk…ƒ›§]¾¹_«oVÅbä­éjó.¯C¾¦1ï~káë4ŶG7(örÅþ¿÷oŒéŸ\(øa„ÉÞíAÝÕj’¢tË’«ï‡<\=Ö2T«ŽöûÇÆ¡´¬Ø\oHS#'ž×æ#‡ŠœìÙÁ^‡"´@³­®V©Ïe–«¬çÖ|Á¼É¯g™Ú¦é£\Ųûz’ ->€'¨ybC4'M¨»]^ΑoÀŠÚ pªÿ0HÌP -ˆ+Çd´$cÐØöï”2yÐßZ¶Tw•û(—eâè—õ3]o÷:ª‰úšQ͵I~\K3óålù6RËgÜS«Œxôy\rŸ¦@ªOPüèÁ,Ç\m€²ê2¹Qï™D1=„P}f* *b+hǵ1ËZ!âåëJ Ùö’MШB€cÅ>wñ\ðÜuU#~›büñ²Mr#‡aèUæIé[Ôö -{ûåŒ=vªö“v÷´%‘Ô!µ*Vïúùh£ÐÓ& Çf$¹Ù¨Ë¾¤¸`Â>8-ÇÍ|Ë)TǪËκf£ÃÛÉ°ê)i°°Á?–Z…™•n$Å~En¡hAö†šÑH§“m–„§±þwêv_ÕÅ4joÛ7h¥ê,ýh¼ M…%9n!nx"=ÂÝÑÖ £¹’Ä"xYAïÑz;µá¡À@qy;+çpä?‰ª}{±ö¿¹×r"ù íö¬~(çŒZ<2o0D5+?s8ξԾ[¢©]u­2¿"ÿgÏ8àÚˆKâÊÌ'ŒL½«ñ¨îÂ7 s?¥~Ÿîq­z«z¿¼FOµI›(N€Ô’ž:×~up¡ôŽ˜…©Fr‰ÌçiQcϯ?ŸŸ†CÊ+°¨¥m•Jmø,úz“~¤³$›½=)öQŸS?é¾»YrjbŸõ¥®cõ¯?Ÿûݪu[š,‚•ÚŸx5ô°<œž.ç@3I‹‚m܇[IàŒ¥Ç˜TˆgyTk%Ú’b´5óê–Ý+9eŵ,u~'ôñl ä8Òå#i÷Xýu('C,¨¥i÷KÉ+›‰(°&O¼] ]ëÅ—ÁΑ^ì"ÞÏKº2Uý¬%¿?º•¢î_Û&U6ßH¬¦!©Õî—Ã6uÚ‚ -ìö¦ÙJ3zê É£=73Ùøœ×V*òW*ªÛFÚ?:°…jëHy¬äÒìó² ô*«¢’àõ *Ýç•Úðˆþ‰øº~ÞÁxVDb}·u³D=Zw‰Éèlø¦´p g…ÒY™Ç V’ µw›Ø&å9M„=ÛnpyŒ9S‚lw«¶Þ© µŸÅäAý¨0¥`UnwñhþÃØÙt_ÜÍRZÙâz%¹Î„¼¡ú†ÌZÆä,{ž;ãBNׂuÒ|ZÈ6<ªEÛÉQj'[#(ÛÉEH^–Â"HoåÒ'@w¾¤×ÒÙóJ”T!èq-@UØçΫl­HÞyÅ­É­­Cü—y£<}š ¶ÎÁ‡ÞàÉ_ù ¼7bu(‰¯ÔÊàðM4¡ß%% -vÌNgøSúP/ô„·iÇq’¶G¼Ï”è ¶m⛳\^9>>Ënƒ)^ÐÅ6²8>;Þ't1&Ÿb»š¯V¾ô„zI Ïso'#ƒNRëo‡Dv*³fÉå?³ÁkºÖ•ƒ°1®óx“wÚÿ¾âý¡÷TpénW¸µ_뢀‡‰äeEê|Ñ"»®ä°Ùøí*¬ÑÙ¢ßH Ý@$p?13­º3n’Ýâ{†ëµJ°4<ïªÁ­ñ©ÿØU1A„e­?¹dÃ]Úê°W§_ðEƒ½ÏHèžajwÀR—uX©TʈPônnæ1íE©ÿEïÒ6ÓÖzO€®¹¹MMø&Ý^ó$wù”?@ÔßÓFO¡Öòm0õ+ÎÑQ³ ¶ýAò釥x¿Šêßæä7{ÆÞØ9^ÎK˜„·3qmWÐ3_X¸³nR£­¿\Ž$ž.ÿ˜>Mßjž¦vù¬s½!c Çw—›+Ȉ…’À(GîØ–‰0µoý iþö~ÆG×­N³µ,‰wíã¹ÃE×TÌ‹©ˆt¥F{µìvo¯q ÿAÈÝê°ü÷ܽ9Hù’I¸IvÙf‡µ¢%xBõŠI ÏJìhû•© Ùª¤{ëÿ?v%0ÔÖLî{=;†Û3ý–áñ§,8Ìiέ[ONîÉ ziÖ“k8ûåà’"³2ÕÄU“¤¦:h׉BÌxØ -OƒÛþï‡zè·†Œ~¾yÝÚ--ÝÌIY½Kº÷)Ö¡áùfŒUâK•Ãé䛦-ÆáfÅäëñ¤ú¶šºQàð‚%0æÌ°uìdq™9­ØNÅL³ˆÔ°ÊÐõú@*y‘_4j*sËú -Ø“d2†YßQßðRdgÇ:výa&; »¦¿¥µD†X~µÒo® ;4æy@c\tŸÃ¸Y%lˆ¡/tˆÚ="¡ãQ7Ž{æÂmÓ/$ëU5—y3¢fà¤ûv'Ïu'Tkpk¾œV2y‹¡æƒö©ŠÝáþïðŒæ–-w²À„Uú !v£yÍ4…©²¯2X!HWu&jž’›z\eÿ²')ã¿nÌBÖXF¿hXý!Càm…Ë-ÿß*Ëó… ­¾Îa¹62’xê÷­\Ke—V¨èD¶a˜žÖɺ0p:‡:pè¦bN(a‹ŽÂ„´ú¸ððìà`÷tâ9Ùƒ%jeH\VNw©@ºgOANÚcõeÊؤuÕ‰’w5.bò\‰M6\ùʃwðâÎî'R}UXVXG>X´"ÃÔY|¯¶‡ o$G×Ó=j¯Ã_ˆY¦;o<6‘Õñ¯e>e˜ñ·㨠ûÅ…¥t={M£h®Þh¤Jê¬tSÚó†3¬Ó™¶:Ø›fZÐôØA&ú\Š2[äæðÄê_ߟ i´VõÁÚ¯k}4IwÂS•Rm&ž –Ê\O6VºÃº®½“d‰Ü¬ %Eíöv"óm\z÷™¸'ª@ºÅ‹öõG2užp³¤ÖvÓ´ñÈk=ˆZõ¹á½L‹3BGÓ 9F˜íΟT -õˆ_vè#M\6¿E—V½tedÐS ®/ê½ï·+ lþv[ÁîËw…tOË·ª¨÷ßf«9Iz‘Ó"¯:Á@ò½ÖÞ -ã?@:û0ýAìÒ'ô¿çôzµê¡kmûXðg¾uÓò‹†ÃP‡îítûc/((·h¨Û!­£Û/ŠÙRHÕÀò©–¬¯j™¾°˜ âäY_ÚSgPIEÃw­ÏÍ_{H´A=«¿¬˜ITÜ}y0rÍŒjµž¡¦­W@uù&Ìj s‰|ôՈ㺹¬0M‡KtÙµ™NÛÖAÈl „×G¬H^m¤ï¤y=^lI¢8~¬¹KÔ\ˆÕŠo´Ó?u÷ŽÏ0ßµ¯ãæPw‹šûyaÒhKŠ¸A{›5u"ÂçŠßQÌr¯÷¬¥Ù%?}ÙÝŠ‰ûÙ’vYÍçÆ="¤¬Ÿ’m?™Õ¯÷}¾ þÊœãa¯­¯ÇÀšíø>aªTÁk>Àd´ôb®Ó“›ùµåèè~f -»¹ÛNžð‚ŸÖ¥D½÷çÒy|A'¿Ýòñõ{;ò­î°r/?6;–èv×bÔS‡à›æ &Ñ’æ­̶ÖãÑP>ƒ!᪀ĉÞÂÈ©‘¦w‚»ãð1¿kó¥áV…lóZé©Y80¨ç ª¯¨±\š¶µßO/²Ó&«r~cêüî.;8M½ÜÔƒ¬Ós,j7S»Þš¥²«WìŒnøái¬¥NhœpñÍ4í}X£oêÌ ž(*¢dÉH䙞ݜk[ÈôÇõµ5㺠&ó·J:öŽÎ½ÚÄI:ƒáCj϶ÚJOòŽÛ…åì7iŸUEm^ -´+ü°³³éÆ’.ãÖr•ÙžMD®ÖÀ¥.y—.ˆ&™m\x¼ŸwÇÑy_ ¥o6mï.¤É£®—d¢$»Ž„Et©Õ¸;½Kž6~‹Õ¼îÞÔÞW"Épiy›ÛÓõmš¢ïÕ>:]äÔpjÂ÷7ãÆDL˜¶ªRM>1ùd´dKËüÓT yX˜ÓŒ÷¸Ê±òã» -“eÒÝ!ä±ÏZÆßçPš(t>ñ\Ÿ~Ù%¹c|Åz -ë Tp±Z¼»ÔÌàŽ¨ÿÆÉZX“«Fõ2­tL=¯EìÅ>¨Ùçƒm’c]ÆϬøÉ×0fN5xýf¿Š—< &Wc©;ç`âw½Ÿ tU¬— OŽp¡¸ž9ƒ“msV+aÿñfã_²TóNx{râŸfÇ·±Aâ;sÝeûåÄ H'§]Ž[­7/¢øÒÜ’Ý\Ï‹ÌÑo{`?#ê©ë> fM£%z3¢oÞ’æ\¸c+X<oòì^=èî>œÂbG*,ñ¥½F¤‘À•Ðÿp½ï;ßÇÑ=ˆ61šÏú„GIpq”6oxlqºmO¨C £&`.{ÖÖZ D [ë^Õ«ñÐ…3ü,rub®óIèôÇÁAÕ2Ú¿p¤º`«âfIKW©´Jˉ•M>†5¿¾w§š'Á?lÁ%W+$‰H»ÁÍa]•PøUª?¼F¿^_á H÷/ûa<ñû»†!ñÚhxîšÞŠy ‹À„é¨ÖHL° I âÜFO/a=$S™;4_bÇïÛ»@×ÌMIqýÚ-œøižäÓzÍ·qÍ<•Ría†CjÄm0ó×DÓ£öö0Ë>´Cn¤‹y}eÖ½š# ¶xôõãlo7·­f{;#!•»Œö -1½¯_PW{qÆ×µò}ÉÏà ^8J·SÕË¢ÿ"ýmÑ‘q§fµü‚)’â—ží~¶ô`ºj[£ÕZTä¼y’™ý|p§f–­»Ÿè‰B›°~›‡óÊûú5„T`œ}¡ÉãÅÒ’‰Ì.—lið‰e99L=âÈÃZƒÃ8»Dœ_®é@— -f”ùl=cÉwåÙåEI±®Éfà×)‡I_¯*GëA5£ ¡×j¶ÍNJÜÎq9f—|Srq”böy‰ëjšI,5ÚkåbñDaÍ“E‹RPÌÖ(b£9Ñwù~íð{3¬@ñMÃ)µrÌ ã«!ï •[«†lÂûcŸ3·aE£‡§å!$»Èéý7Ù6¾bú÷‚fëüPùÅÁß×?ttù 1«õ¡òÕÃlGšæoÅÑZ4•=D¶N§uÈöszˆ1°Ëø—ïh'wÄ©s™Í :Ò¡ZËA °…Øà½ðmÐ)îOÝÙ¾te,·$àö¾~t@~E“×Ȧ¼dgÜWP­]a‡Íܱªû,A È˼/=eÌmv4tf ö ¥i±äf§ËJ¡J -@®â.ü±j“Á̱R U[¬²ïã%¬^¯ŸÅu0¥ÝœåûÆô…¾,)¢HIêÕŸ¿}\£¾±½S$>9xg äÉrj§ß¨¦€äÇUtŽíAfuWIKË?®»ðÍZB°H†Jö\ k]ZÿV/MÔ;ISÿ[€Óu ÓUŽ¹‚ív ÛÃ\Œáå ÍðìFs‰¥mpÉj¶°«‡×ì=;ù7²gòéô¦ôùoEƒ4¾–7ȉj{•Ù(ÿš9KÿÝ#¨I/,ȸXÒg -Tá¦@L9#ÈùdKaGß™Š-ÁŽ‘îa³\7naìÀ¢‚,tW‚»ºÚ"€£ºïË×Þè¬]#íÀ±¼J}¨Ê£ -CÑù…“wOqØ —EzÓyÅéé¦ÛFEIµÍ O‡U½ÑÉ5BRŽÏ¡r’ŸÍ£ •W<^ý¸¦5B¾’{“çµÈ‡<4$4¯Öqä»­Cû£¡Ú•ìEt-NÖÙ,kqݦáP«é”Ã7˜}*^~ÎYgÛc•¸¨š×ßׯõ/ÎmiVcµÛ&´ #š±8*Û´¶½sã°.ò'Rz; &¡¾Tó^f©ø¬ƒƒ÷͉“¾zÛ$wÇ7qÔ#{7oVâVË'“ÝM}\¯Ÿ%Õ%g 5+_}z·m˜ÝY¤T‡/ ~ªª6k5”K]OŒ­u[µœ#w$‹ýÿ)/³äȈþûº@;jAmçñï\À·w¾Õjv‡#f~(Q"‹U@"——%›½ÕW2¢Ç¹&‚g±ø/¯¶’V¦qŸº¹ûû8=-œ¯Gc¢4à!{Óod­ºü­‚j5Tf¡ B$I\q(M£ex£2¬ÿàZ -p ´¯ØqD,£wëw9wTôìÌN:»çÓÐ×æUn´0ýú\€jxmvø7å…ÝxMeUç—ù‹-|ok‡º™¢ pdÒƒ’¿…îËU"Å™ … ûÛêô¦çšÕt£p,'ªÀ¡Óì>¦O²ÒÂ× rÍrÉ·ÝE›dOд#±¿Ñïyb¸ðçú¡Üs0Ðí *í*Ä@è{l$Mnf,øȯ2%"¿íot‚ÈNû<Û± <9‚­{@m(Ë=‰lÛt… ®õƒÿÕ5å>¬…|˜xn·›9ZO `ôTAUZN Õa±c¨‘TªÚò0û#Óaˆ?ɽ4+IOÑvÃócæ0OKÀ†ÃDOص^®<¿°»Ú¹pÙ“ã”ó÷V‘vO"O-´Ü ÉîFÜœIÞ†ÔÒt‘„¾k¶–ɨ/>ÑÊÎs™ç2ª»ªÐŒNÀ¬–2hË&gÌ;©Ÿã~<¯ï›Þ"€-KŠj†yîyXÕ0çQíà Žµ±?ýÀîÑ$D2EsÎytjËÖ…¼ÂýÈÔÅäÔ¼?¦œGÃ’ž|ƒ'µ4Ný¡­ôŽûoYÜÄJí\oL0ÑFÆýgùÍn‹šoVïn[Išü>Oúj ·ü&R\ò~EF¼,o´´8u¼Uç\WjYàŽLX}Ÿ Ó¦6añA¿¸uëĽ³ù#'y<Ã4±ÐÍÚò(«Yõ|3²U3—¦ÙL°gr¤JT¦s! + t7@GÚb–ÆÂú¯„ßφé²1¿MŠu§]s‡ÀŠv/eùvÞ\&¨nk©oA9a×ÇÒiÕš5§&ÙKI†e îöIÝ” ýó{µçÆç!4Ç–¦ØjI/×ÔNmD|³¡Ðû5a’ 1qÄ/>¶<îÖP¥:…Îegàœ…iáø#±JxúøQ@ÀŸnCU‰Ì[)49Yˆfõ…¢$m9ª9ÆB‰Ü¯ßvNËEJ‘즒l²›oG!?ô²/Λy ÇMõDˆäÛ''óz†,ÅÓšM+|àÔy m.Öo¸~¸½ŠL†»rFE©|‘Üë0Çc<ùÜå »çi blÛ¼§bêëõ£s®GwÀ™õÍy¹uS:.¤Kˆzr¿b£c!†U±½ÚÍ‘tìÌÞº«àgµe •vÓ{í´3Ó„\S‡a¢|ÎöÿŽ½5 ¢ð‚`",ë·"X ¯Ë{ÎKµŽ¾æ 3’˜j’p§-×6 ‰wL¸u˜q'%>ÍÛÛI~¬nŒ+̪ éÑ2$Fž­®ÔJ"-àálL¾ê˜ˆÙÓº%¼ð4zøsÏÓØÙ5ò.ãâñºÃV˜6./;‹ïJ½îÜíy±µÌ9…þÓÖ"Ú–>/þtX 8=êÝé»/ÆÿçúÞ¡¹1sMâ”"§g‡TÙæ, º„íTÉÄ }‹·u$•­÷k(¸±Æzµzd,,=ŸLï\{hÚØÚ*!J­ƒõ.ž5 øï± -è“Á ÷asAè¸Ï{sÜü¹~ ’ÑÓ4ÄBNÈODŠ¡ -1az¢Ç1GÛI+v€8ûÂË:n# ÷Ý0tãú_ä(H»Ž)RÄ0•ÇËñ‰ô¹óIˆË‹èÖf0ïÕ¼q?=ú/ío#íÓΑ -¯§m …ÄäÓí¢Û¡¶–¨ò¶˜ÁÇźxM ¦Ó-»ÿžl9RþÏ=tôŒü\ß¿£@7— -R‘l–óbdžý]LTݨ`X#íÑŸÀ›ù8‘ã\z¿iºH­X°š?áÉe1f®¦%n…Úó™šcSyDó‡B;µŽŒÒï¾ -?{þ-«HƒÃn'ƒcÔ±%ì}·N5½ÞZ¤µµ)ÚaS´ã$7¹š}Ÿlkk“û~ÑxFŒ»Rž5‰"ÖÇoÕVÓ~b¢{*Q­ õñ?'RHàÌqÀã ™¨L'j§>­=§žíMS›³˜ŠÅªVíéP¥±vóæLÃ5ö}fÉo\Ïuý!<*vŠ‰‰Ø¯¦N»èP¬ã½©ÐŠË¼!~œÓ» 2µ˜ÆÑõÀ•jÜ5ǵÂ,'ËT¦ãÁaUp8±ZoÁ¡8ªtè©­Yþ¾Î»2éiÐ'³¦#Å êÍþ.ïéy}û¼åc0á —J™¬"ö4O˜v· k±ö lfzAŸfëÏ­•N®?S¨´U$…ï¯L¸–(Ü·ä¦ì@<é+Þ˜–ð;ǼU&CvåtýSóªÖu‡‚®™I«Ÿ+òp/¬„It‹ñîœzÛuyÂÓ{aá(BuïÜaÓŸËì´jªGÔ:¸q™=”t½!¢ú(Ïë[Kæ’[}©% ø&ÎØ=-ѽð>“"ò_5ÒÌP×ÌÖ<Ù3d”rçŽÌ[Jí¨V–vì.GƒƒPŽ´Åñ]Ýo‰ÁÎòâLIr!(Ï|z¥J &éUQëǨÉ:¤F}rˆ£Ž7~ºåJÜ+‡©,[ "›z­2ͽ¤öÂ5ÀÔ)Ò³ÈÚ C€Ê&ç½U¡‚„<×`Š›ZóÙîy¸r=ÒUP ¤íí™ìp%ݯ'Ž…–6a“SìÂÏj¶N¶p³² ®iøtÛZ¢H±³W5Ú=Hìi+ò¼¾#£a ´^è°tï‡*.ç¹K¾šÔŽ7ϼؾÃê°C§ä@‹Þ]pâÉŸz¹6WBnt¬Ì|̶f|x²ŠG8ˆ5"íCw ‘£ç¸A{üí¤¥ýM”­ï`'¢‡†)âZ1ç}JH¯×? .ˆv¸Nª‡Ç"÷N-Ûñ‡6 ›ú~¿*õ®—×’÷âP§j/&X,Â$²ùW±fþGy¹$G’ã@ô*º@•ü<þÜxj&ˆ*Àñ‰b=bëYMjW5ã.÷IƒhL ç”g…G²; -é²E ½Ž¿Ò!(×ÑëñáÔä-E‘ÐüG““×´kË¢£h7PBÊtõ!p:¤a|†Ÿ§·O#´Á½ðˆôž –›úëóGÙqt€kC­¿ê¼£ÚÛþ»§6½’A‚¡œò`:“éR)y¦M”´o¯ö·»í_Mâ "™(Ì𙀨öâ wé;Œ:gû„+ð$­0ȿͳ—üõù)s‚#i:ô÷O¯Å›œx)* mcÛY­%Õ>êF&¬ÕÇ’Sì\J_AóõšÆ+óåŽõû ‘ì¾.KÐA˚¶ÒßÐ0<àöckl  å¿îò‘iüÓ¬BškÛ‚JÁ[ŸŒ0ÖÁýúÂHÒŒå›v`«á00 T*Çê59Ý£”ŠÐK+žÎº9¶¬rá¬l6€z¹Ðð1Ài/;êBíp QyK‰^£ê|ÀïÑêè·®¶PƒIiÕ5dOK°DÐûW=È;IkÖ7"ì_@ØF„%°8šÐ¡ u#û¢Cw…JQŽѱ‡†­c³bÕ¿Ð&ÌqHœZÝ…€Ù*ë®”cbs¥\c–VàœÓ×£W­ýë^5õƒ‹Øéa”A»zÆ­VÓAšLæ…Nò^ë×_–eæ0gÆ?þ%ñMíuItFâP÷¼ã@##Ü]g¦ý±>§+ýÿØ`$˜tƒœã†Û™¬[ïÔ1xvM@aÆçiêç\&S-p®Ÿ–»r…Y ×ß5%Í¥T„±ÊØoýõ(* -\mYäéõô¡Îr·¢J|'kc„>ûÉ[É,Ë…?Ìm)n¦ JG|'ϽoÉ;gâp´c]Õ"ÒÓJK*Ó÷~毢zÂXq’Pºìg6;ð}š—è ¦j§X߈t'®¨ÕíjÝÖü›äóó¹sª¨×]ì¼îÎÑ«ûüQUþªs£î¢||ï[¥õ úZ~ÖU“þ-ϵÏ{|Ñ{ÏE-óÖª§q«…yc¦‘¹96½÷6ëÛÒAu¨Ž™\]®ÿò$õ°$hî˜î4•ÆÉf=h Gmþ~øÉr °[+®î¡ç;µ9ýà#ß 1R"LÐ~ϵS.kà.‘ði«ìrÔCJ£×wæÜ9Ì.§D?›J̶:qjqüÆÁÒŒŽþt?½³u8ZPŽrH e/›¦—§W_¬UùMëÓ ‹G…z«÷?•õpz‰‡k0…ÁY^_6ùh#—h¯Ê&‡áˆ:ȯ - × …Ø*õñå4bD•ÛO×4X¦ãüA¡¶—ZÇ‹³ÖLQ%œ›¾a±ñ?N¦×Ãçþ¢§v(…ÙÝÈÈ®Õ^÷ªù™»tPŒ#BÛ&bxôÖJ»YfiCNsnÙç -syùKí͆VÙV & õuxÒÃÚ{¾ÏÁ25<ã¯×Ù9ÎOט"÷¥C =êƒqÌãû©ÄË5"ì¨$é„Æ*g`.³nìÚ/•9æÉ6juOÉ—v!ã/jRø5+;‰åÂ\«uú§Ö*D[Ümoö3A—n ×ÛVTÆv'J®nl…’Þ%2 Ëš$ÍîÑÞúæä)¿ì÷Ô/;=à‚Ä iËîtMðgšŽlŒ·¿{îXHÚ˜áÏŸÂv$º]È7f³û³¢xú¼eÃ9͆lËz à ëžÕRa·ôͪa>«~¨át=<ëÚRa9WM®h&‹%€ñk‘¤î‹Á š¶d=Ì©eWápJîz'³°#Ä\|üÐvv®âN½Íüñ¦íõÊtq£Q*4OÕþúò*x®p¹Ðû9:Ÿ}¨ë°cV®‚­äÄ—»B£¨xíÛ»ë•Ñv JH·ÚþZ1¥ÊÍwwQ$ýÕÎp¶ ¹ÐŲ”f:î¨P‹~Œ^ür¾!YÁX_O…~ÔŒ—´k€à——öèvQ›üõô/fèaš(öŸ÷ó(}ÉIRKÿß ‹Aæ"ŠMã&ã:­K1 tQmÒí|= µÝXÓÍ!«žÙoÖa?öxl§Aâêt±ßë`T(‚•íÿ¯ƒùñP@½ë1~òºÈ™8;Ö®ˆÜîæ‰j0M”5ßØ¥St"[$šŠ0y9Û(˜àEhCh°«î¬ó¶«Û?iGòsSr¶)§œd -^gßA­p:Ý6«ve™û<‰cYï,ÚƒN¤Í¥W:=-7¦…‘@(ÀUƒo=Ùº“îܧTŽ–²Ö‚ßÇÝÿm˜ÿÔÂc„å\#RlwL’뢂TBè6¼Ðh–wD¡øq›çªÄc3ÖÃÜød’þ«­êôñI¬>D°îi š7ßÇñ:ü}}~ßÝ>h‰˜hÉñžF!]~ÿž˶üåÝõÚùt"öqz\p2 k7»á£A\¥î[ó -¼,ОíÞ„¸“Øë¶OÊߪ€H®GwÆÀN„}XÞüÐíŠHÕt ¤2C¡ aBpÑ1<ê 2éæ•´ÉÒFE%`¦>6eK¬ÚXŒj8ÕÚðHøÀü½öë†t\Žn¨?|õZì–Š®‰PëئÛtY“Cåç›>O/)K`ô%ÁÂâílWçT¿>œ­ -–¢#鵞ͼLc;>Ʊk}´ ÒçDç;ªXÇJßê8Oŧœ¦¡V2ùÄ8ÿpXVák÷4 _£ŽÎ*KAÃhÍgýËôa|žÆì­#ÕìážÑÏWhľqàÐÝÎ+>±|Ú†æÔ1Uìkå¢2P-RK\ÚÀÕ€žRƱÁ6wÑì5•ðN.4ü%T8üxPBRkŸ2äÎ(C^t‹6Ωf½±¾˜&ö©!'#X¿87~3þ‹ÌnU³ëÏg‡â¾JR’W¤-,­÷0,òÍû.¯'@Lœ]‡»­ó7|OÍôrÆU}öÞݶWÆ]‡”5ë>Ë -@4vÏÒ6Æd³Á°Ÿó0Z«ƒn6¢7ð_÷üÿ‘¬¤¯cõŠ¦ê¥¥UDj6wÚŒÍXÄ!íÄ‚×bï ÓÚE‚Þv8¢ùÔJ_s—ý–å>¬ÁɾΠ…úëóû‚›¦{Ph ­0n¼Ì`e M<ïÙ¿亖«tìËíöO…«N·tK5¿º~ Çú;ÒIª¹«™Db†#Ǿ`uÆÇ7º_èç±E3mËɬJæn™X8hžóäš®#&‡NË{—lzt=û„ŸR¿¿Ê4’Bsç¤Í aLÏÖ·‹n×å€29 æ^_ï0g~¼~~««"•C3§æƟ⥮Ë#º W«]‹ïöLó0¯­O­p­ÉeÿM‹Èô!=òÅ^«Š “R†ðddªÖ‚-p‰æQûË­ÈwÃ*€wÑûùM»ûÇëç÷¾tœž6ÿf¼\’ëØq :«Ðî ‚{ò¸#z÷'Q²tåxRv]U±H ‘Ÿ f¼ÛÃ"ä<«æiÔ(U>J7% :þ¦ŒrÐr²[уñÒ©îÁP‘5¸Ó‡ug–šmY¯gÉCÉcû‘Á*û¼/7GÿºþÑ>&QÇÓÑMËß• Œ€ -»tM[N;¦~a`¦  ®…³äž¿Mì`ÜòH°Ý}râ%¯øPŸ¶uEÌ¿Oi‹ì!9òã÷~ HüB†’‡SðP0µãn¯c­ú»Ë[YF÷¾»ÂÝ6Ï{¾R}y«Ð1]ý¾þAï*þnX…;ÞP˜‚,‡fNµÛLÄ zn‘5Œ‡Ýç6îf¥[6Ëy˜œ†÷Çv‚“i3·Íùjþ¡ÍÕЪ‹uñ]‡¸Æ?ËΔ/N¹Ïi™v_·íw˜XðöU¯3¡hW«|ø˜ïpž“¯ëß³Aû×9¬iF~ÙÌÙ,ÅðÈZ€5Äÿù•À ç…é€8uà `'óPJx¢hnF9Œ-¬€’S6õn=ì‡0S`¨¬ø¡ˆ_ן'»“@&Š¢nè -`6hhÙ’Ä)QîÖ©¾‹©+Ûb|oÙ•Ìtfa¦[†, å;i>£1Ëak4gÉdT:Ò‘WA«ÂcZZ(¨fu¼sø° -]žd¥mNsSøílQajÙ$=aj˜ŒÊŽŠ¾N\„A/ºõ{HÒ©þc9$¡·&-¶w–:\ZiÙ™BKì}×ôŒô-}Ñ…Ê]Çt—eßWq¡V¢Ú,s¬?Ý5œÍâ<±ñö¢„’ªûÎM£^ÿ}ý{nBgm…]+ÜbàH´%ëÿ6 ÷àI[#*ÍÎàƒÛkߨâÜJT“lºB•±-D·ŸjC¯A[„Éáùû¢ý}ÐL)ß®œ)qF"AISÈcÎi·âA¾ +ÿdÅAN1ù=µ rnv> ôߎ™2à45{ôH9P´»¯OÏoR}EÒóØ)eždv»á´lE  Oßö¶k}Ãï\BEµRž‰ÕÏw2IÏMß•õ®-ý¬`´KU£ X¤ÄNÂùÉ8×7³ÝÇ1¡(£DPëUe:NS_Ì,cgt«žÿâÍ°°®]¹LB’8EºÖ÷ÔWñu„U³_-vìDÔáÁt|úL±¡´ÎàÕøU2¥]øK¡™ÿÃM´—ò» ™—5|GJZÎçÓîÕÓ·žAh^cÜŸ.µÊþ”†š¶€Áõ^BX2°Z°¬nïgLÄBi´v·ÃtæT±ö4cŠµóM Êf\‰$·«|‹öµÓi•hÊÿ“ŒƒÓ]ǶÞmæ>–ù¦íîNNÚ¢¦}£`uÕÔöf&ëª «ºA3&qUƘ½|æH ö»:’8¸« "Ðÿ·cÈÜ¢*žÔ=MiÕ7Ý ­3e¯U`5þH·ç(©Û®zÏJ°]"¨OªÕÍvÔÀfëÆ'ÆÜ%a‡•Ûv,d#'­_ËçRõ?  ¡TÏTªÔ ÃÇ:$LÆl#nȈM¤ÁsEêY4ojųÓà}xv7C½CãËô©dMòqgôñ­Òv2ˆìAtsέW„=8š+r´õÅ6n0v”Z;ŽNx -SˆN3ÔVñTUö´jÙ‰Æ[D".±Þð€£ÖîYn½;ìPTViFàòXá -;ìI‘½Ë€.•™Ü8~¸v*züZó[AzÙ2þÞ³V'ÿ½Ô; \:[½ ­ -^ù–Êåêi’$<.\ùÛ‡X3ú£ -ZûC{£n[ôR‘ºEv†PøJ4@ÕÔˆÙ8ê_ îÙîïKH ‘‹þNm^“ÀÖyMy©rC§'äµ$™j5ƒSðìå"#̆)†¹–ë§hâ@>ŽThRuÛຠãûiͬª^\q5VG_'Ç6F/Ý3ëaJ¢à§÷ôñ^„]mt“ÎÒ·ÇÝ´:…Háx‘\y;ͺ+ËKÉÞ>Þ.©Õ4MtÇG¸%[À~ët{=.• ìéü[Ïiü;VO§OiÑñÇ[of¡å ðÒžOš&ÁÉM†ú¥¬zšž‡kx% zYd„ÁRŸ°Ó–¼qÖŽjÈ‚Ö+„®nK¨ zÍvG¯¨Åkš.ò”ï o½äàõwWœc6æqÂÄUpÚ¯L“l-NfKx ÙÊagc{Þ :0¬¹ÞÌè®ë–dkâ Ò·ÃnÈ„ -ä!ªŠB‘g ˜bx <ö¯QS¡‰Ç78ŽÀÉ4†‡¡¯ú–f™¦‹Põ;¶I³>´1ÇF01TÖzŽY*~ÄæÚ·R®»jµÈG„tÍ7”mb¦†Þ7-h ÞÅÎlŽý¤CÛ˜Žœ•tON{>lÀç"„E¬JãÝq’ytõÿŸf''±HJrÆw G`öô´f<ì2/·p]jf™.œXŒbƒaxQ°Æù¬õÕ»€Zø¥kÆlT}˜ñíçò± Z.*c5þN*´½v„üµQ\§ ‰x°aìóEÇdª×l¼”BÓT}·»£Ž`zŠé•;x¸æ¶OãYq„fèA±¨9 _“=w)'z«e—È :ñX·À6l¢_í`{B„ô»tÒ»BÝžÚ¾ÖäLÝ1J¼Ò²Žÿ Jˆkãm»30›)ÿ”Ö T™J9"ZaÝ"\›Î„epn”IšÖGÍ”6°ä,3köƒÂ9ã§)íT ÷®¿lá\Çú Q)°uÏis-Ã{ÿMh”kÿ¨ÿÿÍC«Ä‘b¸š‘7>íþW¯ Þë²»þ7 µÇx›6«ºO{R« F”ç|’Y%Áfvá8Àx¯ä§Ê-gÅÇÊn¡F.L=ŠK\â1§4"ݵM^Õ𪙆ª>­U Lý ³d=&¦b¬¡è÷E!×ÙÀ̺<ˆ½¨Sæ'ÝW$'ä¶Î[„£#Ây^—9J5Ä,›onûÒáÈ$úë‚©NÒly9æt¾)aŸñ‚׆²29=ä¤ùQ5/·LvåÎn¼ÅH:„‡B¦¿¹¥} 5$õ˜:ñÑ‹Õà=þÿ÷è'py_ÇÝß…j1]Y›ñ•ë(Шdÿ%6¹£‹é¾º¢4`ƒ®Ê.“Èÿ(/—äÈq$ˆîûy•ÿz¡õœ¿ýy0KÉ̲±î KTQ$ˆp®–Ù9_žç êöß5} ´ ø¹w ˜n³§Â?óéÉbK·þdÌ"$Ø@œ;àë¯n¸Å`WŸ:U1¾hÖë%?(ß´™´¾-hÓªp!XuÀ}«;Y‡ðº³}KQ»ýé²Â©†í¨a5 à×Ï™’fÔû×t³6¦¢ÀSB™\lqG®â%t;lòt8Òá–:a»!hWÊÒ­wÛ†¨z,o÷ÚÙ -÷bxGÍr̯üàný¹¾ïLÂÑ1ÅQ: -[oü°ô2é zÜe["…ĆþZOÌëØ—‘È‚¥Âúe£Á1iÑyœðÔ[7 ^¯ïÝ æc`wíÌ[7ì2?í^˜ ‡©ƒ-Í4‰ñâ—08 ¸Rúó‘‹|°aBvk³NŽávï3.ËT“ì[øƒ6'}þ²5…&œ–Xž±¢õJ{%,Êû©œú'Ur”\1”æ¼—ùõµH,òõú^¤ °jW‰Q¿ üÉQ´áLQ¡Ü¨!)ííÌFëúL­\,Ž HÃ…ª2äã^ÇóºE¿%µg,¬Ôq<„Îf­ï¬Bu ªóµó£Â` Uª¾Ý„(/ÃYÝó)úÓ­óÁ_‡º|ú¬THvd2èAí ¯Õ8u+¨¶Ø ?›nªUœò/©Ì¼ v[– -«ÞjÉ,£ØY“Átþ5Üæµîæã°Y3%­–éã?‡}Oˉ’OÐ’Î_µ™ÚkÚI3£ÑêÛ/º‘’L‡.Ã"o ºË¡±•1GÏúgGÖŠ¬F¡0‘lvcºû‹£qܦ´_W—Òçnë‘ù²®ì뉤Ã*VöX„Ú/Í'Ì1õyƒjÕkNÖ~ÓïˆÇëõ½Tm`ŒÂLF¶Ízk®M©Ñ}‚±v†V£)L%Ou!Hi‰Ó€Ö’¨Ñ²gè"Ë¢àŠÏd3+Óô@9Ir{ÓGU‚PUþÎ;*¹wèÁf8 -Xc¯¥ :ZçÄËTw‚W2±•–b$ÛŽ< #Nšüá<Kð‹æXÚÃ[:è?©Ûq(û­ =°?×÷Ê.jµ±’Ë«3j«cXei/Bˆ¯©q ¯Öî5ù¿–uk‹D0; ®MDm&léD´pKëùE81 M~¶ñÙ‚<¶L½Åöº9Öå0̶åÿcvÔ}͵ÜÒ{Zo†}ˆù´J¹j Cw}ïÀß±j|Nêél¦ªsÉk ÜÎ"ªí7·s<ã«9Én# 9<{^°Ô©Õj¹|Ÿǃ‡~Z1Í'ñAW´¢Ì¿ãÀ?6èÑ+þÐ÷_ÿÎ{ñx½~½,‡žS¤Ò2F¿ kDÔ¬»a78ËhéÅ¥L–çÑ‚Öž™«yo&ËåÍÿþê>ÎÀç~¬ûí®=>¯" \2/D@cÞ^×[»ß;¡«e§:ÔÈ1ëÌæK®a0G}º -B{c¬þÿë0Ÿ:±Ö4¥ß£OéÝàC¬î¥ÝfîšAzTå’1>¯€î˜#ÿÈù&ÌÓz!F/YyPMÐGR¹ÊHû©í݈ðÂà’ŽfC“"Åjž˜Âz~'Û香Òõ•Süd÷“ä(µ9jÅ—ÒXê¥DXız“ÏÒÄØ1â7Ú’¾/Ýp ³|ô‹&Uož—¾MXôTƒ@é™vNñí6bÇD(Wj;ÎiWÿP^&Ù­#;ÿUx®“ ²[OMkÿã7@KýUZ´¨d&ˆf³¿»yÝœVÑ5œ3~ @h¬#äTºâÍq·hvFNB ¾“ÇõöyÚ²oË{›æñ°9 ìÅŒPŽ+ú—ħ8 ¤³ÚAÂ)Rôf»9.“–® 5 -§¼ÛËU}}%úU»¹iÕ‘ÏÆi®äDŠ5Ì¢á÷ã^#ó4¼õ?@•Ò‹¥¨°È¼©tkd}ê0kìñJ/ßÕçox‘d ?ÅŒ1Ž]ÝXíåE4{§Ò#ânØdœ¼‹ˆL¸Ïšl4£ˆðýŠb*íúP¢ÞkÖºep»V6¬ l×0vš•:½X‚q½°Ð6›Ó’¶”ó¹JżІsTÈe}&"»v:ï\ôk¿Ö¥Çs×GMùÄY¤Ö~ùüµºÓ´Fã«ÓPMb[ÝXF«­ŠFsµÉâ¾ßÝÚt#-âÈagæ£m{à\„f’ø~íª¯°b÷ÜÁ:lrΔ{¨ýÿ©ul;O)Fÿdxlów:TÝõŽªÃÛž—¼Ç4õ#ßônxO­þépb®ëу3ÅV]Q *O|¨¹Ã&"†Y†6®À„•öp{f¼KBç/#»&œËBk¼âfd€¤Õ[Ï`3·Ý Ìk¶ÌwE¤ÉL J£ÏWå¸ù°±k&"\ƒsV ³çˆülSèè†áëNŠtLÃx!õ¸D`"ì­Lf •ª!¿’ ¬¦Üf—©y °hŸãÐ*‚jhEûJ•èÊé­ÝÇaÔ™é2)iÞ˜_êiIk+|4@iÈ Ã5}Àӱø9&&ñŽÒs}'ä ÝŽ?¹ê\ÝŸ(g,ÜôSűk–4 ‡vqkô3fÄ5˜°úãeBJw¿fnÌã¤×FzcVTßcåX•tÖóz4Ç×{G^NÛëú¨©òŽÊ¤‰©1Ò¯šv´²!}:ÞÑœoq Nª%¬ŒÉo˜ñeóp šŸþÑ/]Ÿ[8ú}Ó215_¶×oÿ(sI);tì´ÎÒcûéžÒ3b=%9a.(óò³å5–7Òô1§3E±ÎÏAŨ¼.O£;Ð>)ä <Þ}®E-t©FsždY¸C.Otì}™†“–©¦åþ@¦à|7bƒ1q‘²îMÙ èÙ £‰0@|QŒì‘“?C¶¯­àåFIs’j»VŠï9 Š†w¹úçŒMCur~«#N&Â~x®¬Ò\×÷õWLÀ©éÿb~M)¯esš†¹f.SÛ‚œ+ÿµ;Ã>Dáó\cfE®„ëCØ’.ç¤þùiqÐfÓ'™o )®ŽcRjó’ìƹêf(ac&ýÁãm|ݯÏ3êíú÷Ñ­¾?œëL7VLêèHKõÖ\]¥Þán4LœJÕÓG‰#¾í cí<\M±'Ud) =}¶oQL[öU‹ -¦+áWšM -™D«6›XÙØðZ«îgš SàwAþÕlh…ò“T¡W“rg4µhcÑáÓ¨¢§FKñx”ÊgF¦˜t`0Òú:þNÖéZ[“‚RÁ8ÕÌ„ ×6 e Wɧçuݯÿ±úÁA¦fØFTb˜ôˆØ×äè»ib¨öL?šÎpX« ›„7Ïï—´ã4§sÚÞ>Â:€ ß;{µ¯÷åyš© Ò\†üLJxëÐñëRòRŒ4?ý˜ 'l1²j“kŠ:u<õ~p*‹Õâ8‹ºNÇO*€f-'ÕºÍÉ­ù;^AÉü]õTxéúL5^ ó¿ku"à¥ËŒ0#uWqS|aÈ ßìƒW[™JG?^n/›‚E)ë²9Ã|ÜÁaT\—?)A$¬‰³x!#­Ž®LÍBžÊÖæG¿C•.xXœº¹wÛb‹G# "‚ -Lœ­Ÿò*d~ݯ¿8ÉŽASµlFn!–?™z¬(£11£:0”K <™ýÐù‹0bóä`ö§¾mÞ_×çFÐyòEl,êú´oÙ¸aða là™£‘Ö|$«e0¶£IvDP1T™fþŠÚ åÞªù×òM]Ÿ|~¼Ù÷õiM0š£âŽÉïæËnÓiœžÉÕÌ -׉J7´{Î¥W6ˆ­šÀ†Þ×åÆD,BrY‰´?ÝïalzEÿÑn<"®õBÈXrÔÓVÊYY²°›ßéË‘­Íxˆ‹þÈ|üdVs*p¨æA£;襸ÿa€ˆ*É̘Â`ë#îbÅPú©·(BPLÑ -ÓäLÆlšJ•«â®õ¤äØã­{\¬îÙ¯™kк!¼eÝ…mœ)íèÅÀ¨$|³)’Ú¢Å.Gàj¶ôc]ì«"ënbtõOaÓ†eft×l³zñ»#å¦Cvz‹"è b{ÝS¡\jO_N­ú³§ßJÄà'§zàh!€:#qX/«Ã-­Æ±³§!rÅZ 9<‹Lo¶niè±Á(N|;̓B‡êÃhÀXŠvTŽ• :šÕY«PR§ûF–[+¿Ï‚}¤d·B=9èkLwb ;Ó^Dk´¹Oÿ' «XËh­Ö(Úr2lu;énÅïIÍ퓽ηÞ×'iÉ!‚GÎÀ÷Æ´³Ås™ à²oŽnþÚù>ª#lÛLYÛ3‚r`ò`OTóqùgÆL0;þ:LªJ³eb˜ ­×pG[K„FŽÛn`,O%kY¾¿M·a”/ÛhdN»ž;…iѾù` ƒûÌôgÚŠîÀ­ Ì}b`€$0’’Å®‡>,Üêûú+~i·EÌwëÃå”ÈW&ï—É"2;õ‹È’<&G–uƒ«§Š¸ˆ¦"÷äR“I&¡.æíÑÍLQmþŵ6ÿIsÛ‡wêu³Jw“'dV©Ì¶gÙ—&Búª&\*AsaÜA´¾ªioeˆð‡Ãì]ôƒ‘=Ö~ôÆ}ùMíé!eŒó³J«i™Ã1R Ís‰Ûµ¦s^r?<¾6%zSçw㪀‡Úæ+´ƒ08Z -Jh/aÿHnc¬fLgR«u{ê®-ÐëúlðÖ‰ð©±E¥øåWƒ…ý`Å4"»c®JOLZ˜ÙE /2ì“ôùT¡3Å.W ¯¶æp¦Ô[›ÅÜ"¨Ïu?Ð…&‹¡E)¹sM±©™ÑôpÿTdå.IÜý'@|˜^sÌuùuvénhÕá°óÈ% Dåd[Áú âd±Šæ2)ù/ÔïØïœ^Ó ^n²Øé1ƒ3nüÛÚí f3ÝÎÛYG2U -¬ã­3ïG; é÷õó*g$&>ˆ_ÜýÈCê«öó1.ui›Lf#|ôî1U:ÕMÔz7i¼¸EcÙmÍ? ñ+õæõ¹¯ùò™6¿û½¯H”¸,´½+fg[WlŽ-½Ü5£Q7Žÿ ÏÉ4¿­V§L¨·'†Áº’a“M”eñ) -»ñ™ö¿>±S¨òûú<“,s§?:— G~S( ,ÒäfœäªÓ¿RWÉ8QSLCuȯêÓr9vXk¦êm'˜ÈjúN1Þ×ߢjÅ1ù0(©~<‹ -¤ãΧ{3 H4‡A0`Ã6¸]TÙ­eÛ‹ Ô/¤~tu»àQüo)¾y»~vK­øº_¹ušÅ\4¿ßWlE -¦‰÷æ[w—ºE3ç8¡°ƒ–TÌ|@@©ÑK‘—óì–C£Z·RWÏÖ¤¬äªrÍWí®âµPIŠ ý/ñºË$­’Â{Nñ.õy–}žÞrÿuÇ2U¢{8ÉÁ–B1ÈwâÀ£Ð›ó „N¸#^­ªº öb[\­já/šåÛš·G$„7¾³™QC!ða‘Z ´(~#†D4{ç˜v?Pt¯¾ ŸµµG™Ñƒ C ì9ÄSVܳkþ¼\-ò4\Ž_­m Ê‹?«vâÚúÓ¨°}?cÅÃ.^^k.l¿¼aˆeÝÑ+-?òÒÌ›‚½ly;ÖêÚñåÛOo㻺‰ã}xy…y€¦wij{üòÁÝÓÆÕF@ßžrÖÃåÖ†ž•o¼×Ó+£õ¦SŠ ^ÇQ—%*k¨W¯mUÕ‚"®m2(<é‘xÁÝ:À–š=b³‚& u{êÌÄý°TW'·/Í*ñP¾aëBÈâg>At=UÉP·™Ùañ,éybÿä%Öp¬¶éð@`Ã9'éâ9ËC¯%~‡e·åzk ¤x•~ë÷PùŽèsäòl;•Òî•ã·ëfy¸T|€.ÿÖ…aÛ‰šã•é‘91éÆ‚+«` (Ro~@-„\ŠÈ{ -»¬uÙÃÉ”Šgw^Ø3¿NTÿ+•{toó[ŽáU±˜õ^èÎOl™ñ¥Éî˜Õ“/¤ q̼°G÷3ÍÐ÷3RÅê ç ƒ;œþ΀¸ªÌˆçæî=j.ïm˜.ôâ˜É½á(бëÞœ]—4=Ñó¡•™)¬Ô\ØÞ¾g"U0‘u†¸2¬ü'‰ÍCwØ%ó%z®Ýñ±5P[ut‰¼pbÝÚaŽÄ,"Ó9ÜzPyIdQ]»î#mñc9~¦$_È&H½‹Ø hÈ9ñ¨YPÙÀÁ-!~Dl¨DÒš¦[¨³ hš¨{ºgÆtcšùpÓf7Ú; ÇU}¬º‡¦ÚŽ‰‡+˜3­‡ÏHf°®ûuÑÑÝ\]ÕËu‰@Š×éù4ë£ÞlÍÜ0y‡¦Æ´Zk€… ­o…É)l*x•»+Ÿ¢4 7ðÆ)hªÎÖD6ÚÆéƒÓÂ#\¡iÚSõ±EKd·›·âu>ö°9Ö•Ì ª¨Ÿ¸˜×>™ö@×°Îbˆ}¡8#©òÍ[_O õÁFòº~lMLFÕê#™™ÃÜuo¿â¤7Þ”#"bfún¸Š’¥ô@®œ“Wªa¾‘DŽÓ³ͧlÛÛU :~è*ÚÉ Ä“X‰'ÄHðŒÓßC_jz/Ö6ÿx¹Ø9ð°ÅN=×E_äË)ªÈH[—‹t¬E@4e„Õ_ؽ%}+ˆ@àcjΊn ãÕ7 ›"$Λ‚ Ž3ѼJö[hŒgrÕA¦×Ýc4I}þ‚Χ©™xé–ëÐÈkúÿåvb(D82×½D{ 37Žb"ŠÛó®õpeÕd.t¡©C;×Â$ëb0´Īë³ûÓ£LSM“n±ˆ_1a“vÚTj‰Ž! -Ęä;É€‡uóÊÁë!¶açT¨1Ð)`½(ãÜ™Aé–iMÚ;õT[©˜ì–b+ÿ‰åò¥Ë3ª—wK7Õ¹} ÎFpTžW³“§˜:™]R·@>B|@.bD½iwÿòO+<^ÌŒ<¼ÒS¬mɈ>w”—Û„î­dF}Ô³%1Ž¥›ªu{z‘´›Ù4mlFt¶ì-µ $lw3䃪aÖ„åó‹CÀfË$屎£åö±Ÿ6`áKÊET¨Í\³O!=ê½ßqÝ2Oª÷<ˆ{ò(P&Ü^3ð„Îع"ž³Ámè¿Ì× n4ö²Vºüø\vÒA¯0Ü]Íõd2³ÇìÅ•Å©t¿ê5_ì-ÀOðÌOÜ'l::SDY$°ymû¬­4?C0Çă¾_6Üò­6õP·¥LV"O±n¢ŠÌÔiäñ#Û2<ç¾’Ô¼`b­æxuÓ¬§}úmXÙ‹£s5Æp:é‘ 6¿G)Xývòï¿¢Ô,ÚpQÍÖ&Ïá׿¢TquIw’ DÐAŽ¬8|F2Ðè¤CÐV%u£6/°è°:±kÒlVÂêX;lÖÑ"¦ œ²êÐ?(ph€ƒ†Q²T•Œ[ ÓŠ™Üu§qÍEàÐÞ¦á*ÔÛE…3WËA.ÝÃbêZ¾)ü[Ëév›U¹Â:c%Më@Ó¡RqŸzq`ûÜ’H ßÿ—A›ÚJÝÇ'UY>6|]³ª¼£3D9v6Žä½í×,J?&õ=·—`.øC¤à°y=òŒ’27MŠñz.¿Âž¹e^ N7Ýê+(ªì9wÑ/ÐóŽ¸"öÖO&ÿþ qáQH 1}EÛ~#N°I¹E*Ïëóƒ!˜Ö.UÇŽ†šCÒêe3³šJß¿è,ãEªŸ¦ŸÒs¤KZ=oëiÍÒ×OãÝ.I#ªMÓeՌד3^ò¹–ÆãÚX_¹CZ¨ž(µFë~xÃ&M.iò*ï¶ÄzY.a[á§B2zg’ï°ò Ë85lØsìѺC}²SRßÄ|Â)^¡•úÕ’iÅaÀÕHZÍ2øÞѪCñ((üèhpž€C˜ðúÆ!‚ÇhǤ‡æstÓ†G0ÊQ6¶Ž­Í3žº‘Të$mÉëoŠ!  ‚Û+5¨G°à~©Ñ#£'ìkšée;|D“ò"Çb˜¡'þUaPø?xª3jñ 79vé#EáÉLvä¶Í~ïʦìïyõ/K@R¬9ZdVrîô’âô¢_}/ýÞÐ÷×Ïõs˜øG/æ¦ú­ô}z”£ù$a¿•Abؽº!—çºÇ,+þ²îfׄ7Z'ô«Smq¶À¸`KE$‚¹L5×WŽ%Ã<K‘Ì?n”ëZ‰i0OܨJòýTaPùa‚ÖØ«ìÍÑà›ªäXÕêªl^-¾ýU[Ô±ßUÑù](¬el#— iÍê«_ÍmtòÄ|£M·8YA<|c-Ûš–†ßöK¾Ú<àAÝŽ‡ 6.„Rr†ø×_猹­ ðGK=Û;}hRÔá¸ÿT£›ÃŽ&wÙçžèßðèÒãáǹœ"ÝÚ« ÆX; „C¯v¾Ud7“¿Tö¤ -ùÕØû näB:‹!BóÂíwÎg§ ÖV³"͈övÎÒð„ÿÞì´9ñ ûhta`aÆ°adö4ÞÛ:&ºÇŒÏÓÏX@IvöËÌ7i -k“ÿèD T½_e\´ URM‹ü´†ÿQÌ@âøm¤G7]Ø -…E;GûIÁwŽˆäu÷H +¹±Óhn´Ù÷&ךâŒüsâ7muwîOÁèG{6¦cº?‡Ê½¹¹^áÞvLc`XÞÁq¦4 -8Òè´RÛê/*ÓFê²åk„Où¯Oáµ4(vaøÀp†Î2Í@Oæ>ze,ú½b‚Ó•’ìR"A쥟ÉÉãÌÿ¢ÅØ÷çßuPT ÒF,aÖ?¬HÌØ…7Ó!©BVO×ZÇN³Fàìæbk)æ¦þæ’åö/ße’WŽÁ½NÁ °fÎS›ZôúöíæsPõ†O_Iþbp77eŸJ}·×LßÒkÖi[+›EŠMyº$d„®yB÷§þû@›yõ·qj¿~Íã¹°ôC©42cÙçi½Œ•J¬l\·8÷ðîµSëå1R‡Gúœ-ßG€³À»i©’Æ€çAn‡°Î?D¥%~s‡ˆ[øOöõÂ&T·mÀ%4í,.×QÒ¡ ÈG»ÃO5d6{ÖE‡—£†‡¥{1>7zÉnôvæl”ÏKaÞ$aY1ZIÙ™>¬ËÂÜx+fÖžLÃ.‘–Ã%cÕÔ&EÆs ÞF,1ê¶é}Í,g³ÙàÛá†Ñð‰¹ »c>IÑÆHìµK–Ë㟺$Û¡¯£¯gȨî0öh5-–ß(ž~Ü:}«¹îyy§ÖO99«´8=ã¸>1S9S¶3e2y5~]JIˆ ƒÕ´ÆëíbϯçŸï·kG…Ô¥ÖÙ8Çx^­ÙóTÕI—Ø7‘4ÁNoFßù‘œe_Í n * Ч¯šØw}’ïÅ»Då -(œ+Ñoݯ>Í9š_àX¨q_÷w½ŠÕ”ázV9^u´L¶°Xhþ€*³±4ò¹Ýµ‹K2ù3âã?[ªyv=²OÇÆ]¯'#´ËCÅS¨ußítIÞ>nØ¥Õ¨ ý;YY  ÇÓWÕ[¦ßªGu®F&(ž$:j®¡×°öMUÈ8´Ì2ºÍ¤t’½½ÉýWæXvk8 ä 0VVd^õú™@89BØ‹ÍXšßØ’¦Q«,Z6R¿®S<xhè§C®`˜®¯´”®üóó½C—Ù*™È^ïYãUm2¯ÌeÛ}£úü+¶ uw¾‚ÖbÚGO2ÒýÀ(så³?ÇDý÷}e¬ÇÝ>f«H?,V•ïŠ¦zÅ‘Øo:Õ}_~Y}bØd'g"|Ú•‡¥‹¢Åè¿ë6RÆ• -Þ$^;Ú%®7=Ó'mMÕ_þ,Ù8† ÄïÖ±6ü€½©Í)å -/7ê Qäv [“õ™iÿõÏcQ¶%×e}.Š-@…Úƒ,ÏÝé>¯V`øǼ°ª§t7“N܆çÏ_H NŽƒc³A¥«öªfïü›0•Êä”r,C*!ÂKUŠ†¾îÜ(`·e’¢åŠ »fž’ÈÖôÔœSmFçZ{Pm-‚ëŽ#±÷¸G0ü=Ç«.BH_¸Ì¨øÓŽæ9b ÛÜ°—žö˜.z.(ú£¸zýÑ…·v ÙæwqµÊÅP9’œ9‹ˆ.NÖ’0˜…|BÇå÷¦]’òÁóË¢¥‚1%.᪕¼vØ™ÞpÖ$G Äj·&Á›)fòç­WôSÕÜûvk:r6 §¥;ƒ‰þpg¼¥tÏŸ.o˜*Ô!ëUÈ9;39.hâ½³…2m•²ldÖig[cVµ\8dC«Ãi!Õ¤TÑé°°ºuž$ý§ÅÈŒWýó3ôü?jÍ1ÅíêV‡ÒMHЗ 5ÛÒè‡ôýñª¯ãétÕ§rIÞ‹ê3æe¤ N̹¬gz8²8¡fCå±ñÆ4õOâGê*·ÔR»*îV%&73«ÔÕ†âãzðqƒ—U´f4F×l¿€Ig»[Ú‹\+sÁÜÆ -§”mÙ˜o>Aó);JÅ¿ß+ÎŒ³·^–1_+^»³ÚÈhEƒÓ–‰FZÛ•»Þ’3w¢›¼LU¿—ÝxVÝ2ë6ƒNG®·›º— ¸sNç@Þf‚Q•¢ª;muË´LgfŽ¨4š ‹eÕùL°!èцîd,ý•ì SÁcśޘKŒî|”JÓÔTø= þ,Ú\.÷' ðAÌntÅÖ2ÛÉJ½.;£O8K®ú²­¹õQ*M²mç°Zñc*°ÑtpKwÚ~ƒ-}xUªÃ%­Žœõ.Ça[Wuyg±/?×ZòçÇ–JÐ`ËÝ‘ -Úõƒ»gñÍ1Ò%Õ -ïlN¸·AHS|LƒÕʤW礞Ùò¦¨I9qÍRv¦ÌþzÖ_Ï?ßÏÚ‡çvŠØ¾bSÏÜ;——o¤fôz½׎sauŒEG&U¦Fƒù…¶ õ2¿çeøwgp5ty#.^þrŒÐôëõøñQy¼H: &ˆ¬™ßx~Rïb{†5b"üîÉa@[™Ý™÷‡ôÄ ú,~˜±3â̹C4 á«ÛöËáiIÚZÙüX9a^–WÍ´¾lúË&D*‡óQ þš+¢oî(ôÎ_ ¹3Ê -3Š‚~¯YAíšfµ]¿Ýl1¬Ó¼èlÉ·hg¥9e´£'7¹2=•‘>*ÖKž0þžb)Xåšn?™æœ‹5\ó<$ÍQ´Fî9±+Ç×ø¯Zñ´¡­×4Pfl_®´u¦»áõoËÀ4–K:gVGûöêB|)çÐÒðAóPmðÅוo«L†ÊãæSžVŒ º® YUAõ -? ³ÕÌÌJ[°7Àñ­(žŽÓü@<²°õûé+±OÈ#S$RØ~š)ƒ$„é7ÛÍ"§c¸À“¦f< „+œÓsuòF`bN¯ôX2.;œ3LúÚœ?󀒻Ј÷Ìir†ÒzœôK2Ë2üÇDµeÇå·À@ÖšM3J–å•wwõMõN^cÆV®¥žv*Á2¡;£êë"!ö‘àiF)†™O›h¼ŠÍÑt†…p™.¦FàÓM«ÿuÿ°SGºþµˆhãYËÒšY}çîèAµMÍJ›°|,}ÖísÐÚÂÒ‹¶‰MË!Á:Ãã¾z¸ÁeþË[¢{«‘®¬lYßxBÃÜi[ÛJ2‹KRíÙnùìH雕‚>ßï=ácõa-ª³_‰«x¶œµ ôY/†¤Rí!Aµ» –¹i´R¨žóB)…™çòÕ6®vCH×ßœê™í¹¸ç&¾ölWZ—|écv°:ëþ‹:[!E^Ý{žµf ìLƒ»cþÒ=žð¤·1Ž±ko™²òø$ë ¹öíû¥ßšî†Åã^êÉO=µÏÀ å„­ö^¹0§$I[uM÷ -g4šÚ ºuŒè -D[ ´Ç{1±yÖ¾×$~DYosþÑsØu˜V1¨Ž‘ÿK߈“dœÄiOú«´"¬ãÚž)”N~ªfºœ%Ø‘r¥‰ëþÜ¥Û¶p@¥òÈ´D¡tl90ñ#ì™*8»ÕǃxÔ//Ü Ý ¹§út­šÕÿ6¨Ð–µó'ÄØÞجإŠWSyïœ"ÇÆɤØÚdxëiN#ô@ß$œÆòû[ -3Õ Ðä®å(€=éÞ€CwŽc©zÖpyKµ0ÌEÀ[:dÔ¸iÉDwÒsXÉð¤kW‚`Y5¹[å`­a»Ò\hºÂÒ¨6èÏÝQÎ2É h%Uw¶‚§Ž–ú/+Û½ -Âmcp'–~Ïùp›/¶s©6ÈHóÔ­â‡,¤?‚¬$×bKzCsJ³J¼és{lÔ÷ZsÆýtó°|>1„^¥ºAî‰(™NåºÂ‘¾äw_µ™†Gdzþe~Òã¶Æøµ± AŒ_ÅVÛõÓ(ªoà»Ì}+¯oîü”¡+Ø»KŽÒs,…Þ¯ù†x‘몟=–êLCöæ­/c©’DR»ñA»ÐœŒŽÚ›Ôíym=U.–˃îf‰/º®N¯€Ã×€Çåâ˜ÎiôÛ§ÈǼlQí¡‚Þ­%@Àô¦ë©ÅÊ…Û~ÎK_-18fνèÙŒó?ÊË-Ùq#¢[ñÚQ@½÷äïYÿäIP=Ò•=þé¾”(²$òQr~ínYºæ9ôG$ήY²yyd­4Ò¥q7ø;*Ý{¹>hL8ïî×ø1 ·<ÿžïˆfSi{ûºÔÖ·Ó =X¶JÄØÉÛžšÚõºm›é­‡¨Y††ø@` -£%çðNï“ÏMRÖ5KLm Ôô]—Vc‹î‘={¯iv`ˆ7пzÓ¬¬î‚ÕÑ0͵o:eÇÜmÿ²nEEƒaÛ¶–æ¸<Ȩ¼p¼°ür/Èû çM{¸tDm£xÙÂÙø¾¸Þ² |•elΪP¾ŸÚÚ´RÓó‘ÃAà<Æ­å[“ªOÿŠaþ™ÿUÀöL[Iέ„tçÓ™*+± ‡‡š8úÐëÃ'Ue1O/ŒW÷)Xçsÿ¨?œw5ù^DàòÃ|7né!¤á¢³m›³ÝœòB/Á¸Z\·Ì& i aÓw1äñ¦«SÁ;zmz„Y"Ï­àoáG¸F}ãê±/š€¯ÑKdgÙŸØRµ¢ÚUôØÌÂò~¢{ì~]Ç2‰Š~„ÆèÖÃͦñs¬oÃÏA—FÂÁ(ÔêØ”Ó ÝX\ò+ßÒ&ºÊ°³;|!nXÂÙÆtuUÛZò“ €ƒœ„êw­Ú8R³óÚñÞ½£ô·¡Ï—IèÈ-g³¼ÚBïؘýûÏ7PX&ÖÚ¶9_TõÏ Pÿ'ÈçH»áÈ϶,’øQn@¹OUF¹×â’ö¶áã̓€®0ŒÛ°$F¥oÄ´š‚å9ï¥ú°ô², §CŠ ¤?WU‰~ÑͦĀåÇ¡äÏ &‡‹âŠÝ†3Û9²Ð…h(4Ž¦rËײˆ)ÓY–óêË;Ìõbk6'KB¹ê¯«ÿ@I»ÌnÞá>Ý6ücPÝÓNÿó»®8C?Ü ïçCS¢YÎèò/N3ÃoèËDw.ÊtTºVÄs³Ø×yyÑŠ¢=°µIƒ;äørXHo|nˆí O3,üR§¡mŸÉrXU -ð..Ô}|Ë Sp~Ñ–D^§Uówãë~T>) -ôÅ O\(@ÕÕr©BÔ@ýWzc¦…a;ŒÆž–LëèÖ†ô0d¶­ùôçãå°´ »Mñýé?ÅÃËs_áó§x\LŒ^uºººôã÷ªš½Ë¨Új–ð.ÛäÊòIOÚŒ´Ö„W)ÈñgËS·b™hÒâkRï–3^«Ö—}&i˜X ʹعÞ>sv"ÐÙëU'‹èžû"Üí_ƒÙõ}ɸ>¹Å –_ÌB+|¿º¯ïZ¾æüb©#lïÞ5þ"ý9Qv¤V(¯žˆ^ÙäÈyV—ìêÇþÏæbýh]öÚŠ@ƒaê[mʺ*ÛÔ²j‡²(AQ—Z›ÿླྀ$ûTË£Öž(ÈÝÀ¬×XYIL{‡'U²ÿpMJ›|û 55ˆÙÖÓóe“ä¾]£WïšÞõuØÚxĺmv=å~Ô€±4£Â=e„=pÀNŽ°­ŸÐœÔî°à8«”Á«r™äÝB]>¢‰-ñš¶Çý;”¢«Ä¹iŠ8°ß¨ÇÞiÁ°ÄFrëvIÚ9¤{A)‚G°›:aZ1‘wuC„IRlZ"g…8 LC˜aT9É*ì´þÞý|[„q­:Wß55|öìóÅ [X»öFþfÕÖôyÔ -n‹1®rŠ÷ÿ–ÿ›ñ£Y‡/»]fŸ_Œ?¦ªcvÊSìÄ'åÏQù¦fÑnÉ®µ !**I¤ÜVØ.œK´^‘Yë!ÆAvwE~ˆçj\àá©5µ‘ªhqṳã¸k·iø>Çy!¡èèphíAêÕ¤^ }¥÷jmô’W •a.RøÖ©/ÇÕ“éð– 9 Þùè\Aí^ͯ¿I#õE±††aon["¹A"ŸÊAr•Û96LvZ?–ÎÀÊO·o®î(¸Àëh`:3}OÛzµ¦S|© « @8ÖèÇóÂ6i«w÷ Œ‰ oŠ»x(Í4a¾£¡Ÿ‚d]g9èþMd½6X'6<õY5•‹V¶‰éf· -‹O6šØ§K>—‹¡.˜ùX\¤'ÝtózºÍ®øh?ƒ†‡Ý'ÕBbH»?s]ù¹9ã@Žc€£!rúÚœÅÛßÛKwþgj ݾ¼ygUnqúj`Zõ?DÐwØÐnìywÆ,iG¶Ì Z/F¯™Ù;ÁFz§ãIš™Ö.FÈÆ:…³jµ²&ãš^n¾ðo¦0ãÙâ|ú¥°NDØYôoÚø§e€ÒŽ§Ÿøï+ ’â7”„Ýã˜ÚaË}*¿ {n™Í"F]6P‹s@Du÷´?–YSû8ò ¿‘²ùN B).ýÓ2Ý - aU^~ý(ÎÓmÐî(³ëÆöÆ*{ëQšwsÀn¹¾uhIc®Dï ´ã| =hϽÍW(Í]PêE7-µ•Î­!*=¶•Îc˜ “ÉoG×ìÝ7@“ͽԦ(Jé³Ë¾@ÇÉAŸ8žv‡Iѱc¥b¡ ö¹âi'v!üþ¬zoÊÒYeVzkŸK>{帋#d -¿Ì'¿F…µfÙ·J8 -®é‘…õŸÇžŠPÌEE†˜Pŵ—Ò²&{Wø^û|¬ÍÃS“Í©—í<{ùO?ÍÕ—²•ÎHî‡ážÕn–‘¿!8N¯ñ àÖ5\K#wî;œ’:3kmW2Mó–aÍôç!¦]ÿ ÿó­S‹–j!NS‘ëƒeéhx/íÈ/&¬G™õ&ìÇ'ª‡ƒ•k$±*Ux ‚`38F"€‰7zm¾Ànè#Q³î´ÌAt¾)ìûÆKÔìíYXêNIQ˜¬ÀF‚í궢k”¨YEå\ܳú0ëBsüt7©uFÉß_©Oðïjá"ÜÏ mesŽ-VlËôF­»IÂypZ'Ù 0À@ê²C•1ÖÙ«{½Zô6™,{¬¦~£å$uøijFyÏ‹Ñ«ZKmI'”¦«i#l8ì)»¬…¸E×·àWÄ¢ ˆrŽ3<*TQ#”׋cÅ ¡ &W2€ê…¢‡â ôKÂÎå¡Àdj ï ÛM&6÷+:˜Í>F4wTƒ®ÔXڀ̴ïÕ½×NY¿´‰wX|Ûµ Š1Gt=™´õ“½)Œ…Ë4núµ®’ˆ/ÝaªNRLõK­ÐƒRêÍÞ›tÄï· M÷† C`ó4ÖÉ€H^\H;õ´¶WÝz N¿yN'Ë Ô£CHTpn¢eý7Ùd^¦¾‰û‘8EBäE`°g×t=ÐÔ0|ºˆäJ­Æ¤uZ¦®vXë[e\õ})y×FhfAÒ¾ÿ¼þü-EéùÌV’ª’»dÛ,ê†,d4ñP'º3Èø–!»f=s¼é5ÛwKµ€±Ø/ÍöàF™í1û|j{Ä1ÅמæÿS”y±ò¼KhYDÐßkΛÞU‘ørndBâ‡æeå˜^ÍÖx’‹ÕÌ&Ý¥ÛfÑ[+ýÒ¶lZ#G)–Zÿj ~œv)tNÔt-mò§èt.ò1ôŠÀiØ'†ÃàeP€$ÃH¤q4F»´¹[ºˆ¡ïÅ®¨™• {%„¿›& °oYMîêa1Ul'7(zcÉÆ©4…PÞyLìðøˆÇG™–—–5}JŸmjw8R0õ Y‘¾‰a5Å>Þ§Ú+D'mpî±ó9ع>^Z'üUeÜé1ƒþ÷“à?ÿKy¹dÇn$1tîU¼ TŸüÖÓSoÀ»o\€ò«’z`O$‘ª"3##€‹÷Ÿß ³hVµÝIN¸ðÌ%†©ô,› ,­e]ûðºjü.\‰VWöem’?f›Óö1ìgT×ð ÖR§íé…xø"»çÙŸV¼Ð fî0Î^çsÛ“ŽÇpù<\*F%´‘CÐØ›‡Ég~´liWÒSCÍUK¸ ˜¼A´[†Ítø¿†—Šɉþ­³)O‹Ý—é¬ú’Îv÷ÇkXµ èÎñG„®£õšËÞ—œ)h‚UÎÓ6§HÓû`ór?_k9Ûx¨-‘_%ãQÕAïÕ1­å…ãS¯Î“©#+g6^`÷ʯ\ˆKͨWÚ4/Yk5…°:bwúj^œ_52Ä/YL§öþµ!ØèôÉ=!ÉîÙS!šÃN{}ðeä-T£x£-æÎ),Gë=9 ¥7­*q§»±ÓÒ:Þtn¡Ïp(—‰Nçìš½†98,@:«›à3Òd¢`#§S€uÞ×ú¡ÜYU`õ}¾`¤SßD¥C|büB;¤[ î¢3Š - -m¶Bž%Ît£Š$Ÿ+J5ŒqFfE&øO™N9a§Ä½€üDùÒYz±FöŸ|J_7ÌR)¿}®¼²fÀÉ‘ªUkß -w•à”^®é}ƈÐ|aÇ€Ëñt’Â6F IÔ~sl3îãá±?ú³åúå(ˆŸ„¡Y †ÉDÂàøåÇc5˜<Ž6à†ãÏMsdzRuçÀŒz¡ÏrìEkô]¤uÕ¯7Gÿê4‘[ ¥6Û«Äþ`¶ýìHu„äÎñV¨]+#‹ñS@OÝãÐÆô 1@²$ý%#IZ4²#Q‡ÊZbƒHÛÈœ7§”Ñ=XÚƒºH"Ñ«í0zžàÅë'_ófÒÈkX$ lõ¾Û ®¦·"³D³f“‰pëQVÞê¸ëJLŠg}«Û…€}pœºœ«Š'Î ù•tk÷=^ݵžcüV{l™òNäý•Be:¿÷µ4äÏkÃ¥¿[n)öòÑû‚A+“jh÷1ÿ3yBÙB{Ôé|ZFÅÚ.‚ö¸Ò P’Pgfàf¦V-9)tv_ºOç +tƒRã`iª¾ŒrF•âÈêi~+Ãâ5ù¡Çÿ([A+¼ÿ7±ÇÂÈÿ -UXÉH]-M=Ýr«ÛtX–ÒûÛŸÃŒ{»!B—ýë2•åB÷þú£˜B)_µ¶é©hÛWFåæ¹vÅK ôtÿ÷4ÃÑjŸ»~„8?õ®¾oUpYOrÞÆ(iWghZ‰òÜ•¶¶ ]ÍùQ2kì±Ïi°bg2•tûöõy@ÍŒéFÕÂÊöhØ]-V.~~ëìåÎ6þÖùss‚þŽÑÍ£©èjÉwú×$u–ÒåÉY1aTOïGZŠ–€Öì«ÈIm†i.‹3c3°ÓòÍ™3›uq®×õ<'ñÝËeÿÍÍÖª³ÃHº„Û¤¤â¡ê6Óº5¾©©sà˜ðåN¯mx/õ:bv¿™=,³1óU37ÃõWŠnª×ݪ˼ÖÔ„¦åèØ*\px‘…](tšëé-‘hKd~bŠã䉞‡b'qXòá—ìwºPÍ0ÃË«m ‘ÖöÁ[ù¦6!ñŸkøi\CÕ#ÍI§É_Ü8c/èÚÝÓK’Ú½¹˜ˆÊšÎ©öúÖ(㿘Ÿjn´3c{æ'àž”ïá4ÃRÝäíGÖ™[вµÃyq×õMyùÖÖÚÂ&€lÚ ÙèÖ=‹™d„âàNÍŒ æë#r¯YU. —V»9wù¬°}ÓOÃ&£Ïs @ÈŠªûì`UïÁ€t›Ž.‡ ±?¶ý;Á¯“ú5‡J'p1yQö%7Çm¾Ôû$úûXš\fÀR=ðmؽûéý gGS0õOõ‘J¹?K©·sª,l[¹—ùgèè´ÍB)—;÷º€êŠ%ê×v_iÉà¯]jv#óÊ?qñ’±Óâ—Giº©áoÎ úÌSª¾ÐSóæB,Ë¿ûþJº–ç™Ø1œHA.þn–…mÈ%+üÚ –XÅþ:kP"N]Ü&×t×;úN÷VGOùû{yïubÐÛµìÚ?…TD¢}j‰µ±ÆÇ«uÕâØëYý Ê -I’?¨Õ6°ß €èüØhòÜ÷âoŽEßÄO]n°gZ®`g†Œ§[·º]J³v§ Å ã”×l‹C7‚ºû~sjR=UNü=É€WG¹'Fö>½#ò¹xÁÐåt‰ OÛƳz\£T;Ìjg„„iBïgàf¢%pŒÃ¡å[-™®X€àu¬6­è|gÿ.¶š3|d­Í¬a.•úÜ>F‰²?{ä  ~ƺB¬ÓZ÷ÀæÖ›Õ¶,„>@+–U7Ê:IpfÈá@Ó¬·¯¿Œ¥¥Ü4¨ ':÷ŠÕø˜Ÿæ‰a.»-„´è.põíqŸÊ+Mˆ)Ûìxà`•E6Øi÷Â}„3]Ÿs;v’é£ýXõº`Ÿ¨¸ÓJŒ×›5Ž§N«½IpM‚E|ßóÔ^î”U=Kžñ9ì}£z}ÉúTà8ú´_±(6ªN¨<é¢iŒd+ÃÒ^V·}¦Ã©.*VÑüý/6¿›PÓgD[²/¤Yµ1tKEPíA–žXxú#/àÍÈB^"”†wÿ­ý߈Æ,w¹Ïvwùö½ÕŽóîp«­­6§î˜wÓ¾í¼#¶Õ,ï9VŠ¿ƒÅz:аúÓÃÞLÅ'Š×L"EØ5fhž`Æ6-Ì è0J·GuCÕñ°ëÕ3™ƒãsÎg“ Ÿy%‚פʡó¹Q!ÑScÎÉ™Ñ0TîgvjSv¦ñ<ÔùIEÀhÒ ûý7¥…>$Þÿ{*Æ‚XÄö"¦ ™_ ¼Étû²`É}à"”' ¸|Í)ýlïáÜÔýãþ]–:»¼ -kÖÛÞ»ûÜ@Pñ”î“‚ú,Ö¾¹øâKÚ}2haÐlVÝ¢Ý%|RKû›>½`=j TPЃ*žï:BäüœßQ Ø?%I(¡ÃVA§Ä~½³’}¼°i7‘È" áÞcÎm¯Ìmk;7ΈÀŽ\R™üÙKx1j(U±Êz÷Ç?Š-1£–/ã ×Qdrƒnj$þb6F– _´%ÈQƒ{è²^»ðkÞL“ÃIÌ©>£ÞY`sÊC{ÿÇw¹$I’Û@t?§¨ ¤Œ$H<6ZÌZfº½ü9¢º³²M²±©lFFF€Ã?6 †ü (õýïg]>‚Ô0¤Cy³hÑ­OleTü  tÿ–}lÞÉ,ðTêÙEƒš·ƒ½­é0gÏŠ€‹ïì°eÈ86 -áõ‰›!ƒ?ãX×T¤2e­Òó…Eà,̓—^: -%Pœ£ð¯!o`'I¡p!èº]‘ܾiÔ7< šøD8ø¼EHc—Jës1CTxê³¹7Ú& -ÍõÚSKÒW¼õ Bl?á²Ë˜ž Þqì__âž]ɵ²& ²[ÃJë†ûW£úªw0Œ\é…_½³{ô,½«i¸_aÄ¿ÐÛp6¯{/×f¸%Ë’BùGUÃvjÀåAIºí£ Y:Ý1©Ç l`Vëð3àW;Ù•ûqŒ1ì[†³š¯é y¯¬Ê§©ÁøkÈôiBÁëâÞFb¿¾8ziëWý÷ïoá7»,ËÃðÔeÔ®1~¤‘¦Š FwOž9¦uøArBI½‰xXv¸܇i\Š'ýb‘ÇàEÀ»Jw}zûd&Sa]£U e1ØS ruWÙ‡H¾‘‘‡æ.¢FÁ›„Ä4‡´¦¹ñÚ–EÿÉØ4ýívãÏÄè‘XjÛ©ÏŸêqo„‡pÒ…ÞOœb)b¯'9¶Áa‰±Á®ëF(÷ Wè„·®•ð”&²FUIlxƒ üëÙw³afÍföSõ„ÒÙ4²#ÕlýT‡i]*Uxžaªéh J²p.ÄdC›tDqrølìM6O\ûÖÛÚšL¯òëQ V7+¬Ñ>ˆ>0¯ˆ›³Ý6M>üpk”õŠ­cá$Ô®í'S•&©`|fpÐÏšO2§~ª#õLnîDDítšžô†’—gm"ümöõäOè—“ È«Þkå†à!Ó…çΟ³üÐÎù-IÓª0I¬ÐÇq¥J£ºcôþ‡õ„´†úø‰ìPZ¹Šiz…ø~M$n’à°m!2¦)9Ú÷¤s6uq;+A½˜üa“_xÚƒ=Xee–'ͧvö*•ÖÆqW²•5ãvÓ8£æÑ€w?¨ÆS®îOß!®F›Äz “°Ll?fûë\Û@f¨üéF;å„*NôRÊá¨cůLª.Í,_”eºKÝ [LºkpÐU©U¯ì0(=ÇÑ›T׸§ûÿalbòõ›l ÈY?‰´ñ| pÊ޲Õ`õayç«e…•àT$Ð9uó0’ú5ëGGönvG -¶µÚ°˜¹“sYAÆÉg¥BŒz™ÿ²'N@O8Þ•iÀ%x?oìø:þÐuõÐ#4$¦lªïÇm×þô×z”¢G¯Ø‡%˜Îv5Yaä¹/³ì"¶%˜ÊUÝœé `iVö§ù½¶½¿þ~l;9A%ÝíoæWjÈzl"êÖ®Æá éÑ,ûë²MIɤó…{’¾tÂ&”Jøñ(ö‡€©ðtV‚½!|µúGV‡BÄ0ÁÁ¦šø`µ02 ¯\ŠÙÜi­qvS“bÙ˜5â0ˆmDZ unê<¢9Ã5¤ŸjíÄœ F`ÅcKÈ" -”¿DZ´¿ŽgeË”ûÆ먻- AÕwªÙ°gCÚ9PêÕÝ6‰_Ç L‘þÆŠãz–Ѽ¥š tn[ÑãööqSñðmšÙá;ŽþÂnœëHY‡ÊLë4,Ý£•£vwóCÇ,L)0V™VU!JÓôXìÛ²“ûÁ\¥©vKW*næ-µÙª+ä(•9mhÿÈ&*¤êÔ'ž´ƒ!•õäH°äM¹[åÚeÏ—Ô—%d„ê>n¿ÎÝYCE³qpÌ"‹J1µ‚=A¤º¨ áÛãëîî/a@²!L¯8ån–æWÀï_yKÀ•36sq뽊!aë§ P+Àrž<6ã¯Àë¥Japì<Œ‰­QzI Ðú\v*¯/È–kÚk§#ÆrEëkm÷“¯5Á€t\îÖ]¹ „ë‡c‡–Î6 ¦zø$¶y+ö¸LÆ ^§çÆKW˜™šÀæ:ÏÈV•á–3ˆ4Óu,é–_™:]Î[5ìr}œVåïÇD°wåÀéáSϧ°:ÍF Jnîö`'UÁ¶¤¤b ç·v±!g•é›㜀ßUF Çeïé|†×œå=÷k¾Ó£¸;´å¼ä‘öF¯š<Èf˜”›g˜×¥cH:¹éZ.xਂc/”1Ä:V8ÐÌ•ÛSHž¢Ÿ·~8ÔIMÈ0¤Ä-ÇÞÚЗ„vüqÌr‚ø ”x9F¿±{ÝkÁ×áÕ ¦õŸNzå¹iU¡R-¢ -\ŒFÓ,QJI#¯P`·G>ö~âÎÙ÷üò©òÍè4`ÜÂ\€Rƒj9šñ% 1ú½ð;¡ËDzãN A¿&ù%•˜ìл@Ø16$D±º «†Þ9ÅÿžXS8Ul{ bËâ -û7ŒÞ hí—IÑß¹=P´o¦0œ O -­Òìã¸îôÐÊ£a:|‡’iÂ6ÿn¢eÉÉZ¨«‚&-[^O%´ìi¨µ£Ò¼QgûGÙ(±:6gutïµÁƒÉ¼p_N²„óU㮧cè Ê -yEŠ2=Î/(úá£f{Üœ°ø©­}aôkÒúe¬eΆ– ¤„qÖÓD?6-ÂOš£U:å\ ¡jÜú2ƒjÿIØJ¿vhŒkëþÒÏT+®^ß|ô0¡jC«`æÎr ÄÈ-@`\uWÓÈiF0æÅFÌŠhe®¢û:©iößF†@ -œ'E‘€]H˜ã:„ -¸L?;¸!)N«lÅ\êv‘ - -kiÿ"­U™ó$àxäŒkOzqÀªöEÛ3“¬ø°p6Ú¿ÿ~X8é IM2&`EÄ»ÑÿíœùêR"ÿ©Þ­G›µÍ8^ˆ‚öº2ÉoB¿]Ì W˜þÎ9£RáuœtÃ$;{~­Â7„º¶9œ½à-Ë<’9lŠƒÚÃìTûPÁÉ¡°)³6æ7X±Z‹˜&P09PÖ$É]¿\<5 -Ç{¤Ã2À„D8qëòpB×1ìáxIˆnßÝ "‘•ÝÞãa‡¶ 8êyü+Æ€nÎ]3¾CŒiE–»ìѤìbäŠjzÈ´T"Häh -o%ÃWE`´ZÍÔÝe¨7® û’ƒËx“)¯ܬ÷RqÝÝž¬E ¹‚ÿ-µôcÁ<UwFܹ|È&f¿øÄhZ°pë/+C@³ªPSjW£Dý³C¸Þ%3ìa­Y‹tPïŒ -ií¨_f¾ß?ƒ‹ö'ÿ+Ú·"‰¿P?mPuü"ì¦O¿ÞÜ)ë>J3µ1r¤è“éPÌ(Wá YšAUÊaòí§N~ Zjã)û<+$¨c¤E7ÒF†Ž°‡œØòª#›¿'$Û -Nûg Âøý÷sò/FPd´ÉêÞ'ëLk. ã'¥~Ó;?²[Ç0d³ÃyN(‰0Qq"¼Ö¬‰èë;òUÉ`sݵ{Ù¬|õa"£F¥áÛs¨g©vŸ Ø9ÍóÅk,«ïñ&B£!‹ªúWi#•Œ‡À7hŸgCí;ÎI4ws,tîóoÜË݉”†V3ù¼W•z×ÿŸõ”óŸ[tb&컞:vÞ‘Xÿ—òrI’7‚è~NQè1þ8Ïlµ–ÙÜ^þ<˜•IÖH¦^4«™Éÿ,*RŒϤÊZ›Û¾üXŠ™¡Z>Yª±©Ÿ×,eKgUÃ5“'½neÊ0$yÇæÂyJETñ^Ò­–Û  ”~Íòp‹T‡³¤"P ›sA…Ò ÃËì¡UçË_" ÁbgçGéJÇùqx•µèVßíO¿¯Ï*+sî­R¬"øÆg•µ$ªQ.þuJº˜F•wÛOêUµ±“"@:ºé`ý”™>‡Âµù`$T"Z¢ÑSPÕÌù&|3éBy¡¾½¼˜*W– ®Wsh+v§IEö#mÜp¦ƒYÀ¦0ªVò›*ÇÍË£Žú•zŸôöéß×5˜@épt‰ØgÍ -ŽOá?A–ÒŒ·ˆ8FÛöÕ¿× v×mÔf/xÒxȇ‰–GVçþu…—_HxMÌ80µæ?%)dYê˜öá+2#Ì6ßt“ýN¢ù)‹‡%¡ÚŒD.;È”ÄÝ“Ï:±"+uß0–}ÂXÞx¯‘Æ%`nÁѦXåÿdO*ÒE`¶ç×=Ž–wqWŸŽÕ~_Ÿêã$°¶§b¸ñ±4ÜøJÒÀž9{ŨÕ._XÁȤwç‚JÍ)jÍi­¹¶aǸšëÓu¯$(Ž‘òù%”¾«·ù¬áïëUiL™t²È -—óá'µÍéNT Í¿8ªíZOKÙü1qÜ‹I,ø¼þ(æ`º4´Ä¤¥Y¹ÉXÏöÂæÈö+éÏz!ÞéNß6¬C z´[&haÛ(D8®· ïÔ-¡À_#‹XfƒãkMWªM!Ûu™¯%_p¨ýŠ–Llüžw)h…<ˆÀ¯RÏÛ0ähŠ³3íÝtþLè]§I;&î -R¾[í|_4{S°骷f»@V û–ùߣÿ[(講ö¾ÂŒƒ_Ê®Z ¤£ZÜC[n¦—꬞Óßš7Íti'3Î:ç¶Mð-ÎàÊȇý ]uIg0rïìa5T£Tô²iw©¨Ã1é¯?Š:¦cÙ>Ö‡#uÕ€+—¦pøá®hÕ®è|¨à02Þ×ß9‘7<ÈîA­Ë{hï=ht—2rå; ?+_ÍôÛah,‡KÔ¨Ä1iˆwÔôxتªSÇ À5Z\Á§§TY‰Q’U„—q¥ ÈÃCèèLŸ9|«ÍJ/¡ê‰ÃvdÀj#-ÑÖIzˆ…xóðŽ63ž§‡‹|?yض¾åf8Âò•‘¥À³›e‰0Kc÷’,á˜à·«ÌgqòÍ2i`­JöÿBvqIp>ý±S› gå4à¦;‘™ûLNÒ'¬6À¯ÑÐÃÒS‹mp±™C”{jOêuÌÀN#/?Iãð~­®p‚ÂÜmÓ¦Cb&«ð Cv¸l»b³§¡@+ é¬MÓJ¨3£ð"(i³ zå¾…§2<Îá–êR?/«ÎgÎT0~Ö2ü‘?%ññz²©˜Û6Ø¿Û¤ÖóÙXqÝ¥ž „Ê⺑"­è~bÚÎ2¯eaÑœðŒÝʯ?äˆä¨î¾‹„:fL`3é1ØLð ïî¡ý¾>;±©Žükgéó LëS›t±4c ¦›žž34 Û¬V'OaÜx™igW1¥ç ÑÐcú/ÎZ°Û³`â «—ƒÉ+_Œ[Ttä¨Èé>JM¿T—aŽ¬„‘‹÷âm–µ‡~ÇÓ÷õ*'ŽXúÑŒýA1¤~7ñ†Ì™7¶ô­¦pÏin¾¼Xu.ÌšÛ r?v?wöLÅu™äº&A_.¯Ù ‹>m.ÃlÏ8pöjøèýOpyk&몀œ#Ä“Ýò5€í”ZÜÿÍWa{>®ÏR-í¡‹#W'3V¿¸Úÿ©ñöÂiçw¸e3‘qyÀí7Oì3Óý]_ÉÃŽuúã2ŒªšuZ¹ªó¤zBC£]ÇÃ…_Y¡XÑœsO(‡G‰ë hsh­§â”›ží„h/ìg¢Å2‘ÍÖ#R£ñܠLj¯²ÅÕ· zÏkz``¬´êQš1ÒôzÍ~-fËI™ãÐÏcâmÞ×çá&¦q0ò´@úÙº9ì2=ç9–C -¼EŽì©Ù’ É^6=^y±XmökëˆÔEm^c³Ñß¾ömÞúHuáÇ(7Óã–®Ùû@·V†?x¼­kÃÐoAí"©)O<*æ„š]ò£¬Á7xÑ•þŸIºÓ& Ÿ Y°ó° Ó›»HÍfœ´){9C˜Œ›5ÉäÄïÕ6¼Œ{< Š(É{¥ï¯‘ÀoþصƒÔš/qÒ[øc—¢'O{&Š6ð=è þ|ö`ŠF¦š¸Aã»bÑ„ÛèÖcf’ߺTOžiºbô·]E2ß‹—UÅ¿Å^¤Îz°Ö{Fr¿½lƽ°ù¹Š1FXÊÎëæßr¨sš¯0Bšîá `2-©âÍXÌ)®sY‰ÓÆôFm‡Š¼¯Ï1dåšÈ3ÛiÃ{>ôÁ€ò{OcYœ½lðk÷6ks_I"—³ÅROìBIƒPÊmïYjæý“TLLF¬XðìP£™Ž —õ¤ߨíÿ:©/eÖ(‹ÆÖÒý¾ùý>m(˨ÆG†#pJ,í¦æ¨Õ=xg@Â_¦¯âÞ¨©Ô¬Vaö±¸SQÞÎö;o´ýõ¾<Û#ÒLôE4 ñoÒþÑ°1(ËÙÌýšYËlzp±R8PÝÓ~Ô ¬­ :~¢º{o˶²ffÝ6g½:Õîr y:çm`¤êœg"üßù©.û-¡7½|WÈÌ*ÕiOÝžÁC<]ûË;räm’¢!ŒÈûn*Ëð‰à n–£ØãJšºÊ0R¬Ê.ÏV‚U÷ðt­þß4ô­ÌPúd‰(úÁæ£MaÕ’˜hÏb) µfß‚ŸÕ­ööH°gµã›Ïyù N´zšIuˆ–”\A ÜDš}AìôÌž¬‘ÂW4æG5•Ÿ6Ì°>€VŒk(箯ºm.éL‚IÙe­Xlðõ±²ÖZzå 2=O´›Üâ:‰–¾\Ú:Ã]òLá­oœíš†mŒà÷¬mQ“ WDBYæ'g"= -®c-É">„v6/¤Þ ¬ÚYE&Ïf'xêÄ#݇ÇZxNC°\ß4**¾çjŸ3r°{z‡·=ÌàÌ]ø›šþ8–ëqœH^Û;ÅÞáí•*zxxy»¤+==`hï¦9ÿDËÓ¦Xpf¹™ú´-e89ØÏPÍ°ÕÁÞ§ÅØu¥àϸ<tn±ÇêSÕÙ|‹ QŒÛ&ÈtsÓBfG¿Ó¶3y N_Nòvú=Ò`XãÄJC¿¦TšùlÖ\œ'áá$’Ê–ÙéŒý*— äUÓ•1nOs”4Éë“?Š×›ö_ÂIû&…ež«x’Â2‡Q´SìæblûÈÈ·doÓÞì߈¬Óêņ§b¨€Ï6Nˆ­lã*b·÷V„ cF’¯9¯6ÿñÛw¥¹J;µ’Âm®DÆe>Ä]Ötl¥1< ÓZØ3³x‹“Ö‘cVçÿ¿Å{›~¥Ì*MK.aì7aÀü—Yô$§%ueä™$;rÍØ2õÔð -N:è}Xˆ+\ûWþÿ¹ãá÷0ÝJ@¢ó)D; ¤b@„z•FYxoé³OK[æºIÉ'tóQ7ˆœuõ½túØRõsôL3H-û·»«æ½Ë~ÇõçXÒõ›–þr™¤gf‰㪪ë‚Ç(/—ä8r$ˆîu -^@2ü8Ïluÿuûó@5+I™z¡³* D„ÿp¬xiE×-wYŸƒGïI;wºVE€rg5ÙðLn\EZ(„J5r¯£€>[³<ÿ7b£b{9ˆ=lö6õb“ö±l5茇."qZ;[ÏIgϬçFº7Ò­r@iïʀj† ~ -ñïFjr¿×¾5“=a2h<‘†¦üÀâ'0öò›û9½môNN¥tS›ñà‹8#7Ó:šû§3. !¸73SÂ:ÐCŸ–̉lÃÊÉ4>¡Ûn”¦yçW¥/×4àhm*A£ž×°eŽ§mÙ?>iî{ —FvŒÃQmÇyhk™¦Z„’a°×4'Õ$ˆ‘L[r굓Ì3çºéî“$gœÎ¾mÜ-p=º w³èÙL¢Í^²ÏýLÌëOj´ÿ•o!bÓ–¨0¦ý<ˆ%2f4‹0‹ýv&•.Skç¡â#®ÀßžóÌ:ÛÓy¯ö/Í©¬jP?ü.|kùˆ˜.ÞpÊ6‘E¤›Z—åJn£U“ÎY.\™l‚¬ë‚»Û3KMn<·Ýüéçõ˶‡±è4G:2p)oÕÛi´º½Ò¡‰=ÝâX »å«°š{Utk[  -õ|e–¤k\à-þÑ3iŠÄyõøY’¯îhèÀ‰ûÅ£@'œ:‚°à–£‰òi¶´?‡~`nÌÝRn¶ÙÌx†%îTK¶¦}JQ†71\.£ÙüéuÛâ£RDsüëk»K!˜B*šýd™‰‘úþC[À¬™R„†i~–rm?ÝMš¤¿Ÿ?Õ1qHëf¨p§Nbî?g3ÇÍš²­yIBÃøE¢ÍB#ùg1bë×…³ÔyÞ//i²°O’Ëâ½:÷ð;ÔY¾r<ÛwÉHýÛ¶U#Yû" "NZã–Ç`(Š¥A° }ïk˃١Íi1h¶r­§œ&lj˜û{*îF`Šm7¿ý_©—±Fê19R‡[qifÚQÆ8¯PÂpó¿/#'ä‹Jõ×­!Ѳp¢‡³m”Ì]ÕûL3ï„ -ñV‹,rIOFhƒ¤ž‘t}Çûž«Pô÷žËz&+ñÌLˆ%²p]ãȤ…’eŠžÜÀ0¢ ™-çÊ); 8að îýÌb¡h©$ÅŽnï+|e§[š~í&í‹©ulú¼þ=¥½<9jYìÜ õN<Ò¤Û2ì¸$ËÜ«qi¨)¨ÚØ´ˆeds’OŽêËæ;¯ó[ì”–ÔhàCJ7â}˜¯óÖØÇÉÂ8YöâÎô’D‰Œ€1;ÖVs:Jb'\»sÃgú¾ò±&Úfr¦¿O›³ÞŒ3Ìä;?ÞÓ®ïé=iŸ×o'S¡èB°öÚï±p·¤>L»-M™©¸û‚^êuôýõe±3¸?ûk}kGX•Ä˜ ìÁ™&¶ -É”hÅOd«Ø]c4:.d\IøhͶÉÂaü—„y$®ïÒÀÞ÷æFÊ'ÿ—9n!ÈÈ–v ¯7¨¶tƒÕF á -2óì×9kØH4‡EŒ¡j¦‘î¾)ƒ®AwÚq¨GÖ€¹“9#ñ÷ mÛe,ÚùÆÌSŽ`)5|vŒ¶¼ó°å‚í†Z"UÝÆ¥4ü‘ÞN½„³Ž‡Ó£0A"3Ûò?qÕö½,à‚#1«?›t×ø(ÌIï7€¦Ò†ßU}73Þ8†Û:ÓÌ. -Ñë}î—¡²œDCx\Ü §¡¤™ä ™ØØFخۻ\ù¸ ’ã¨t0É‚#Ô´õ°O_ëJ„zò°ÓÅjw’UkD¹}‰õø'Ý<0 ›yï>­ˆlêÌ“ó?'þ¥{kÕ -B¡Ç¸ŠVþ*wnç]¬u%èþúTD†rê÷i6„仪RënÖ« kù¾ !2!øBµ»[&P_½%jê¶Ke›‘‰ËÍzR“zLíË¥[|Iôé³4Ÿ»P1MŸXot¹öôeùa¿é­Hd2>½Ç•X•Köëj‚  íhÔ™UH_*KšEôkAMT["³Ö’×mWÍ¥0vuö»l+²âW“‰ö¹´ƒ°5åÉ9 òkTÒòÍåO»¦õ!Ú“Ñ;ïŒS(ãmWé~kêHf…ánr(°&æ6:Ãûž’\Õeù„ÝvX'Uψ8j Ò¨ÿF^¡ÌÉr4Ÿ³3Þb€íÓÐnB“6>ÌÖÚÄðü«:Ã)˜ËS‘TâGÔÄš¬(óPs’ôcìV9àÄ#X©¶žp œ¹ÇKº:OÜ‚–\ŸÊu˜ÍùpÔ%VXŠÕ*rÂ+ç#§ 9Ú‡:™ÂtĦ:wl§QôrÕy®‹)]p åÚ©à³V¿×jŠoÂEf6š´ AÖÖ‚jYÕçN€“#eð8tZl¬©oÖ|¬ãv”M6^ítûÙ$`±Иˆ=’èj¤‰N’,ˆH*pßàKÞhàMwä±Mê}5yÏÙ’jqhb ÑÖÄï©Ïqß.ð^ØãOÓÞ4±Ò²Þztªg~›8ýFŠþÊXÑMl"u³yµ|˜¹ü˧Ã3›åå{ý¿ÀÓÑP}È´Í\†ÊÅY5®ÛUfÒ? u¨§köÙ¶þ\’ýÙ·/!U4KË%ëÊè:ødôÄÑÐRÝܘ¿„•ù»Ú^6³—VYvĨº~ò²êü"‹¶F>†+}üÆŽX˜Š„ñ7ñ¨Ûfmù=?jó¨åÚ˯R1µ¡9Ç^¡€L‰|’;O6!;ç%{•h=qÅ´?2݌㳔ÂîNpîÔ8´‹4†2rçeŽÎz‘Kd’«=òH~6n -Y×4Ô2(é“F¸æ  nÔŒ¸Ç&4žãȳÅJÎßT‚Ê|Èt`î3uÊÏ[lùpåÕ8ñ7ÃaCÎVg™0u:a8\t'Ö2Wîx¤)›Ù´HŒ‘( î±^ÆÀ)a:4üÊX7âcâzãs¯ßŽ§Š,éë‘„¨[ñ€~Î=B‹5Р+Z1Î¥?}¦ÚZئaí^Ôúñû¢½x"¾í¡AMÓ³¶ö;ê5l ôn!åøz—Š†%Ð#(‘ÖÙ »—/É2ÚÇûõk0ZtL •xPÆØôš‘å1Å4«d6âÇ©2ÄN·Ø+Œ£½íªYQì‰ô¤ -ç×Ó‡g€ Lj§lVÆÞ·U×%¤ù¿È9ðҊߪè_¶ ¡ó>«û¥å–¥™ÓÍéÆ"ëênïíÈP¼%>ÀMK\qÁM½[Œ6¬p>Á÷ûá9¡Ÿ/—$Éq#ˆîuŠ¾@•áÀy´ÑbÖ2ÓíåσYEfÚ˜´a7ŠLˆÿaÐu_ŠG„èø—æ] [éóãªâ4ØÖÇhT›ŽÖr' ‡ß%嬼ogÇ”5/c¦Å«×ÝÁ!Qb¦ZnÊÌWµÑÒiVß öûÔß÷1ñmËŠVûÿm–Rµ–Hc› -Ömóý8â6¬Àmª÷­?Œýúƒïß—ÿþ‡=ƒÖ‘&z§‰V±ˆžÖsØtÚºäÛV÷š½SéÎò« c¸ä¯@73¶ƒâØ\èü˱·8صuùmûõŸèé%þqÁ0iXùqéˆ%îÁ,ë$ß?±]¸‡‹áUEÕ¶"ÔºEÏ~ÌÏ “Õ½Ñ잆¦ãæjü˜Ð]½DŒužf86žž%ÞÍ´c±*q`(¥aUì–x¤¥Mè×˪;[r¼…®e75zjîXfãÑ êÓsLTÝÿXܶ•{fHZÓ4÷3™•ê¼fù¤¾q >µ~w‚™sfÇ©®ù+e™ÞWúŠ%kŒKä\7[Ó¸š…ùY¶q(X)W¤ü*Kí68¨>Ç[7ÝG«mý yrÒ£˜¯O±¡BÝ],NšŽK¶êMWÏNÉb‰CÔ1Ÿ)ðs.ºŽ¬+š§÷„Ób8¨õââžÖý–¯©›“ÔÇ~ðkµÚ˜.Ç©äA4y“¨l²‹…•–2ðY[[ômóÕ^9]…ÏTm?Ú¨±nGÖâ;Pã?ñSs_áqëy1hUJi NQâÞ£ õá®|døîथ}Á‚3›q™†°‰`ø$?’£š ï^eö”5±)Î.W -”58–é¼…=‡æU ²n!7¸Çy¶¹•Ee±bçƒÍ -#ʯ$ú•†w*°G>—aì4Ÿ«5èœN¢ °WÕ7Ñd‘”¦dÚ-^éì[ü¶jê»ò´ÿÓþ[Ÿ Q;dÅÔ¿; ¾²‰Õ] ˜&Ù0ƒkÇ„Ú•ã6©µžÒ9‚ï ˜òœn°“VÔ‡©†Š;íA…›[¬eïè!Ô­Š4k¨‰8| 9/9êºØ;ÓˮփsN¿Nsš r:æƒ+Î Ó¼AFÈÿ¿£a©8MBC[&iþ”ImÖ®„=Ѻ$Å{By=°²Q®£™ZìÉ&Ž—sMôú\–æ')ý‹j¸þ•ïëÑͦbùð|}ep=šãÅ*Ô 26+í+Hâ(«Ç42wòÙÀXÙ΂iû«ó -]’G!½<^Žôì+îß|±÷{t¸|œ0‡÷ŸÁ=‡ü¸…¯þsÊö§ó«^Ž`ЕÒm†Ðëq~²ÿ@_—÷挙*{ôv÷m{¸%:{› DB¾àní>D ˜n1ãCóWl³E5Âî­ðš˜h*/p¡8ªsgW÷ÂÜBîÇífôÖ=¸ÀþîàÕãàÚêcp=+¿×·³;Æ–aeß×Á'¦ <«Ó!À÷ ¸>©ê ‡ R§Ç¥9kT¬ÏðüÛÒê¬XýX?Td€q4ü‰‘ÔÕG"t<Žä™½.ï½ìp¨HV:X÷^Öµ}’j?ìXÔÕåä -0Pê†\‡Zn*ú:ÅÄ ›Éa®‚˜xª„y¸ï^ RNŽ'—&1p)ïªß•„@K¦fPÙ*;ÿ/ÆÏV«“ ´Œñqp9&€’ït›å¡ú€°ôô(ù¶©g½üPÙ3¦ÿA(žìæñø¹¾u‹–šš³Òæx ÉêLëZž ªøÙ‹ƒ@kcÇ»†f'–†{Œ³ôZs./á!AŽjkkÚwLYÂO%—@l­FVµÙpB–”¶Ú,½Mþô ]×÷äë{¨ý1Ð+‚í滑z¦cåøëæ̪Йtpr:xb½:R…SX1!™$·û‰øê0ÃÙ–NÛa¬'Š^Ë–J5S¿‚¿^,ÖÍ`?×ÿß“1†~ï,6t;%Ã&ú¢Ì™¥u?¾Òõ—ev^tËŠégZ÷ Z,21¤eÙÓÆ€šôUSÕN+ü‡^ÍßäCý3WG›RYNR¿¬TXzÚ0>hn™¡"P"’KO}ÉT9¦ÿr¼”è1 ¦Èßë{á,¤šÀ‚}®ñ(Ü:)üöôpJ!«Œô×TY _,}ÃŽ”½ƒéÑþ*½ñ¼Š&ÿ±LëäÿŸy\Ü rhê _WŒ w"î×÷S kŽt­hOðî/l5Ü_VÉ‚…¥$ù⯠-·mÔI6Ì.ƒŽm5ÉTî1ª°rЦ †zçÆ.|n;Ì×Ï(ο×tª#$¨œû¼å±™dsmlؾŒá¹–gÎÜý•ÝÆÈ‘Ú€™áѧô`8IÙG Ì:Ÿ‹.”\ôÃ"æžNÏ&»bX9Ù¨'ŸGÚßë‡áø;O]—Šc V[°å^ çaŒJÒåbVmf -Ò"ì¬ÊÖÜŠÙ^¶ÕÒ3²»“UËÞé-ª]ÙdM;€\BŒ H«WI»Ó™ŒÉ¨ þ'âp÷ëÇln¥©Úôxï/`†‘º.lþ%cêZ¥ZôµíÇÒ—x¿ÃÛïÍË¿pv+¯ù#Îݯ’'_¹q`D.ŒÜ­G«Û.¬ {¶\ÐA3‚μ …›iÍ ]#¼d‰‘E ±O6`§Ñ^Í.¸ìÈa¶‘°²ËG–p—gA-æ[õ÷øó{ý,~po[úªq~#†j=+‰ß>k¸ÿÕrŒ{7 Â,x¦‡aäñ·÷[ê«èZï¬JoŽlÌò2m43æ´;&yR̯13˜¾Ù>#ã÷úÁ -Î =_:Tïw=ï6k§ðRÍÔ"§¤l#@èO}aÃ..<óôIþøÏõ} 1@«HKÓ½w{XŠ>«¼ýʺ´Bþ>œXSúÈN|ãe,¼•áHÙß`½¬ÌTÂõ_¬#,,‘øê%W¥§…¼ò×KÁ› ϼw<ÓÕÞpæ`”Œ«U_ðÒ10è&LÚ¬þÒŽéà7ªKz.šåfînËõ$ÒÕÿܯº­ù…Ž¥–Úߥàí…cr7ëÀÁEàO|à4€Ï4Ð8Y_®{§4Û|2lX3kê-ûfo›–Ê<<÷|7išô÷=k«¸ñPîuÝyeBúØE Õh‡ìêÊ쑘Föa¦Í®¤RÓtœ¸¨¹ª Ùlèx£èÖ9õuýܨHO¦(µUuèšÀ€"Im¨ t-3ˆ½Ø´ -}]^&#/TaGÒÑÙŒ2Å¡ -¿³WzJ«ÓJQ‚Ç´Ú~KÄL“¸æJò/æq-fÆ:¦‚,[°Ö^FlRÚaraÔk9Õö™i)œkÇÌxbß'Å}0kó¨äõ3U¨Æ‚ï™ÎQÔeÀoý-™¾æBŽë4œj–¿eýJËŠ-‘‹-o54ÚLwÐsH™ïÄaöÚé·÷J{]ËÅ9ÝÒ‘94ì¦Ou1b=C_×þ9›ú}}AÛ}0Üü/åå’ÜÈ ÑýœB…?Îã­/àÛ;_VSb“φ"D²»Q¨Ê|9hM$eXhH -Mà×*ÒÏ 34¡ºMiA,Ÿ>Ž -ôã -d l¾BØŒA‘w§ßb"¼þYû>| -áàÇ2(mäkmŒàã_ˆZÕæŸÐ‡zìæ·|©; @«^s1uÜåÕÈÆÙ¨¸H—¨-ñX»§¸6±ôrò| äOZ÷ç8rnß<0½™ú¤žþ ÛlopºÈ’ìÎÐ{/x,É‹­OÎŒ / R.lRZ}X·~·ýHÛûׯºÂZj’uÈž8ÛIE>ËÙ2mt¸ÌM×dužùÝ(;ÅMßUA›ÇB=ÈjŒL¥V›Çkr€v¿›q·cK2\u{OÕbI ¨ÇÁõ8,ß ]:툋ªède„©´Ø Åh²ê|WÉ®žÀ*„UßÕaÑš:˜íü¦ ”Eë«âØÔ:zT÷Z¾ 'Í^‚WS–Px9Ó6›L.aœì×àëþµ{O` ë!Ó}ñ2­ö¡q‡OËí£.ZŽÞÝÕÇ«•¡ü»V¶“ü%^]Mt^uZªxe¾fò»Bý´ruÜy¤˜Ï˜†ïìñ «Ù«OÍÛôŠ·Ú¿|´°Ú¥:L÷Ê´RhûEbížf”ùSýooNfÜ­zy4¾__'q’AT…–H D”…çÞ{wƒ^cåñÄõXub‚»þ³Fš|ñyfÊ+(e ›ÒHŽÃ4ÏŠûŸÎ¥ë$Ö ï¢”,ôJ¢)ܳåâ¾Ãa%Ðâ›HÍKï€(ô\ÇMkŠÑÿ  -¥òžŽÓ{4¹› -5ef•%þ@ÑiU³fp:¬véQ²ˆL]àX8“oa3žùè™Jõ9ª;ÖNô  Ôôª«8¤QÌê’Úªú¶c=áÇ•l©ÆÃk+”!“ÖLçÅÕX{†Üw˜¸„©WŸý_¿–ÚA~ì«ÕÍu„3ðZ-kKeº6ôÆ]-ˆÑYª— V "Ð <dŸ©N¿Š|ÜuÑ(>eL³§=&~óš½OlÑQÀÏ’œ(ÛO®l[µSZ_îNà]Yk Jb|†1Íò8ºÝc%  –ºQ†½­ÿ ¤N‚—S&ôQ.uæ#£¢° aÜ)RÓÚ’ î¨êÙ˜I}/jb•ÕãïƒY xnŸ3Ígú ó Uºæ£Yy#°èµ˜.Zu>£uÃß«Æébª [+\õ‹œ‰Ö¶7ºø—†£ÃÎy¶659=ª‹Õ©œËí¸Ð{¢©lÿzwŽêRã*ÝÞáä:oIl/t¨7£y8‡c«­ <‰t[×ÏéU«,RFßÎ9«îf§°ºêyýA¡mfç}qÿšGÂù–Þ³­v÷‡ü»öx}ÙÒ”áLÌN! YÏ[ª–$5â0Í Ç*=–0¿H_Ž -ª?o©èqMWyKŸÇfoöôsñóBœ«QF #äyXhý÷ÌIb ç\¦Mf»%~ë6KÇ­Ó¶fv×n:$¢cðYº&âÔÛ-û£0V3oth±Ó. ÜYN}eôüæžÈ$!fú ßÄ1öÀ`”)!MÒžg˜ÓÛÊd£€@â– -c_U¯HÍÆðÄ:^pSåo¦Cî»ý>¬+¥¹¼š¶e*ÁiËÂwb7f©ôQ¸=» È\ ˆÇâÛ=Cš‡„þ;rŸNP¼¾©À)*Î*Æï}«Òj>£¸î³ jOù*îü–p¯Ù¹ (ÚÃ¥½+tÓ3ßšíŠæò¬ë’–âÛL‡Ê°ù[·‚žþ‘Ú³[Mÿü¢ßçF.ÚI ÃY†UT -W›%Ÿ÷é~dÏPÜybºœ?¯¯kÌ€ŽpÙ7AíY‚ÌFG&ß#Ÿ[ÆCa8—×»ÕrÊð¬¤?så.;mÞ®ÄÒ_ŒÉÌÔMàŽÅImuwLx[Õ¡¯Fê¿¡~ôÔÿ=\ÆeO8Yòs©fŠúy}© h‰ÕϲÚÜFªúX–y\ûösâ=ldδž•ÍÒŽ×è&Ô棒‡æ†ï3/ˆÙ0“žÄ‚ë¸ÀÝ™ª¯Ä«dI‡*¶ÁÄæçýíjjuæãý«½„RJ¹JC‡æ³Évwÿúb; hÒ™÷êy` -m.£ûáŠí8{(jtŽ·ô ›×©Ð<Í·˜6ÔqßØϯÿßU 6¯»Q•Þ¸ $ ¬[kNµ™¬ƒ$­Ó,ô›m©©°qbPµ×¹?E^:“Õ’T΀x†)8µI[&Ë4Îxp±ÆhYœ#ŠñCcïÌvçœËTõ›ÒïppŠI¿Ìëõ¥"hI£éP•QoýÓ¹ &‘ýï?Ñݳeæ¹ô̯=<ËASÖH¨@d·3•'Ù\§#(îÎ/_©L#ÇwÅÉ‘´Í±è7N:IAPš*”qï‹Á+—^6]<!‹Vÿ©<¯h¡^ªfê.5+7m„1õîïïRá?âét-7¬ûr ˜î+¦•ÁÛ¶ï¸>ŒrÀ·³˜ô gݵ‡x¦šÜÓÃñ6kQ¡¡ÏðŸ+$6OÙfÊZ2j¡\º´ßÏog<©9ªgßÓ…¡²š²y}{bå«£ «{:Òò’.V¦T·ËH‰sUx“vZ½²q‰p+›Ãÿ L#›scí™ýxýryUê¥;—‘ÛJâ,wŸ¿r8í$ñº»­‰¨}™ê2·Ù“mµ<‘¹ü¨+}†¥WRßZžžN—q—¾Y?ÚÈS™nO_¶¼)`˹üçP“gŽ‘úg9t2‡“‰„ÍJOVöxøëYf™ŒdÍÞ…³Ð=·=p-ç°ŸŽòø:;ç 2ÇPžã -AÃÓëe81.fˆå¬Û€ËŽñ±tie¥Mý­ž®jÈn¿á#ì˜ wóžtŸÓ›§v!•zÚhýœ¼%e -šR‘]%Úôòd-¥1ÛiŽûc¦ïÀ.ldW©ùž µ‚'ê#úe€‹|Ô¥Cív…ËÂ@z5#ú%አíËò"Ûv&›4+ÎIº^—¹?Lo'Lîå6>ahxq½0®ÆÌ|ö.ôSOÕeV*“¹oêR*ºÝ!£XàÎiFq¶ØwjË:l„ŠŠÄjÊFð…¬zæ¿”—K’Û8D÷>E_  -ßóx3‹¹ÀÜ~òeQmQêp„½`˜-JPU™/›Ï ÒÍG_FÌŒ¹¯¡µ~<.ê‚×ÅÊô6„*]Aæû¹Iáì9é$‹ÕÖ#ñHÜó]e.·­ýxõv¯²ÆJ¤€œ+<t<“„[J{p®žX¾AˆWÖ˜n$ÓeØß»ÉßR—™^„ãMGÔ§Ó9K*©qK•¦´@©;çÈ¿¬ô‡ˆ %·²[ä±­æ?ngËÇO¶,ªËÏ @õu¥«£F:^ãÉÕ,wz'h5[ç[ðeŽ0’»ÓàºVu²mB²gÙ.9ûÍü2KN7ö,"mÙÜ—è‘–)R¥Û6CÓŠ~³\Ú5ÌÊÈ~Åï¥2/ƒàj勒„FŽÒç‹•gBè™WJb}Ï9ì3•re÷ížl×¼…y¤Õ nËk¯çòÅÞ-ËNˆÂE¶Êì©–ÖÅbEÀ¬Ïͪ”ŽzA}`É€ÏjB˜ç쌻òà_W Á›«hª -¸^ -±žÂjØß]‹ìÆ'š‰ò©tÖv)ñT5õ'ç«r×ê}Ë7¦6ÚFoµ'9…ækÎ{`\Î$îjš¨:/#ièÊ`W™û•µÁít¤Ó#¬9M±¿~ °ßø6+"¾(æ¥zÄ©Ñ wEi¦!ø¤ÏðY‡ÃZfÌy?÷ŽaýtÁûùvßscæMEš¹åpûÔfAŸ%M6#G­îžaG7-ö ºN”3I¹º'Z¤>«¯7v` -J?&ˆ¾dɽ?×7OŠ¹súG°}®•V¤“¶ÎKÒ,kK6¸ÐLR+ôv)ÎéfÄÆ,5‚F¸¸-‚° ¨€É xçvÎćC}}.j‡aòªãkÔèZŒ¾®Rž¿9g¡áešÐVæÝfßV*q>‹T,sKÃ|:yi#U¨(¯œ ÒôoUÕŽÏ……ËQ„¨©âj*E˜ªÐÁʪ@ùa5«Ø-®Ácîž’ «H’n\ä†sîñ–I”|U ÑZ­€*â~bÞ«ÛC*›!±¸ ‡ Lû4OÊU…\²N y€XÜtÀɸˆ÷H U9UU7€?ø„››-ª£Ê¯ÚÉìÉÃzk%ÕèÌö=jM‡¬ëZßÜG%ÔªY¤(žÄÑž·ÕfÁm Ìôš,IÔÖØ#åªVb) {³‘¥q×™»…íI‚IO–GTT±Gdï}ÅR<ß·6[ßl Âôtëj½n±®qw\tF׆ìñÿ×=Kñê(왬6ƽ«ýµÈEtFž,E¬ŒCNWkæ»M£m]žœJÕ5•¦žÅ°à˜ëÿA›ÅØ]žÍöìì&.£ÕÊtþef -@"Õ¡ŽQ–ó [Ÿ£Hw ƹŽ7‡¦³Êˆ5´èõ¬õŒ˜µ-œ)š ÆÇ‘ 7¬ˆæ‘_f©±XtÀwJ–dz=ç‹Š›z†·7ôÍ×Éò£×Z³¼Ojûµ â£þ|„ÐfÞTQÙa/–ôWËäÌ s[à÷¶®¯Èõ9%~.Áçr¾’Êîá;zW<Øa-éQ«f—úÏ*l¸#WyÑ3?W€òu‹³åìÖ ‚m=ïð ãDôjA7·OóÜ=³ÆÎÌ°í3%h9˜12ñ„óÏ™'±å>•¸V{õ°AÌ$ÿ:.]¶­×MuqÆ»Uï¬ÕtèÖoj¨è´{z„VømT¨'7FÖ¢x-lçgõGÓÑU‡ÒAÝ«·š©ÐûÞOžt#Í¿æÛd«•¤øƒÉ.‹ð8ŸÕ¬·ÄæÁmáYÚ² -s³FÆ×\Ê šý3iw™Úœe[N÷±ß7û±+4šç€¢B«_p¼ý †æzû¼·MŸå 꼚y»ÈÍDz ÔÛ9Ç¿ß×·sÐ&+’›õ›ª'Žv¬7íýß?›§-{ãëm‘£/}ÛË[ÇwRÔš¶›7‚ Ûχ›ëŒfÇòP¬ñÝtk¿¬÷Mȧ¤*µ±4¹Ô[-™JY¡$OoÑ:?ÁÀX·/ºj~(Ì^-L¦æ¤ …6'žÖnG²ìt˜ÝN¯uß•å}vÏ&Á±ºª™ä¯ã‰ 7lýªíÄ.ôÄ¿ÎvÛ€Ö붗¡þÞï½ëmI¬ñõ|NÇ %¯¹Oß©à;•`½z[=w=‘\eô·É§{§¢.ß­¤Üv®ggŽ.þÎö—jq -˨|Ý]W#ñ{ÍÙrGJ9;'®L°¡…’Ý5zbؽ뇣ép¸oAæÍäºÉÂy¤½Ëò ¾An;ÎS1`´c ½÷ævìg$yé€äÁõˆ’ôíX>S ·%G éæBÝ‹8^ð»?2ç––À<š1í¾¡åÞ8 DÃô}|²«9X¸?s¢›“ŸV¤È¡uÉ *ÛÊô–v¡k`Î=¼ù08«œçø©:©Yõf&1S ô¢Ð6èå¹^ßLô$ØI@µÍg?éodzPÚÈ¡£:#sè1 ÿ¾«þh›±¤Èjá'c_Ï>ïÇ:LäOG)N -ï -Ô=(Úy!=z?%!|Î~²Ìe¡û”çÕná™™ã4'‘¦nK ˜åIKáÙ5×óøY;k´„ ^Š—µo£ìœ%‘{é_î§eÓœe†Ãw´”Í €ô½îÄUòÍZN˜»Û¦'[ebÔÁ÷êÏ\…ŽçhFî3XÂ断ߴìZŽÝ¯ú¶Õn= °âm6÷yr{'G²úqI°VïûR#c … ›ÙÏäÁËšãb^þÿ òkYMåÕ$©üôϳ‘ìê^!"oY?š½aXØê%TdÎEXAUQr83Ž»ÁE:’›åÇUvpe<@º,ãO ­º=ãR?~Ü6Ü=zº»›,~,’FvÊ6”n{­IµŽ—±’V'Ú³ûªl5¤JÊc­¼ÖrR-±õ]\ÿt€Lº³Ñ¾Ÿñm;9ÎÇ$Äk«%]“ú½¢Xü2 ¥ñÏw¥%´·a åCÂÐp Ûæ,=ZÍlj´ÂO’-q”»È¶ô×¼ê ¯;ë_Š:ö]ï¶qÒ¥.8¶ã‰Ý‹ Ö"Y]Ñ>`D³™* µ‰X—ÌªÚ —Ø:#¿I- ì%L sQ®2 Õi—¿ Gõq}ß•zeG½Š¹÷sÓY2ŒºÁŽkøò3ZZ“$ 1‡ºټŌ³~¦JGM+Är™iǬÆêt7ks–µ¼ÄN¿~f¤ºíõ\®g%±åÁò͸rW’%å¹B“gLÅJf®Žª{¯Œ9=i|§NGs-w±ˆÓðʸ1àwËðœ~§ÖãQù}ý«³¯ÕȌɣu/N+3Âå z·bpqPÔÌB­S¾-P‘8ÖÒâ–iŒ“çF$]¸Û·qšzã´X%ô‚¦Ñ’é—ZTéô­ä<…oÃX|6qòé.<÷YPt›ï]¤¨æ9t4SDî_n<ìΰÜsD1Ù ïJÖg®—‰ê“àÕ)ц}åÐE¾åaé¡Ÿ¨¬qÔëpÑ+Ç©ï÷åÐVõ‚¨™Ršß_jJ)-=ýJEð³ZÖÿ”—Ird9D÷y -] e$8Ÿ§·uÿuûsPYñª2«MH&øÔR° smþĶé@4=áó}o ÞõïRpWU ¡LEÄh»"!8à‘qÿÒ\¨yî#Cì%¦£šÅŽ ÅÁQ÷ÿõ@2s^‰8ZèB.éD´¦Ú¿v®HÀú¢[ÐBèCDµ„PÅõ­¢UjÂNDC=¥:ìD:>ùfb6 qP”a®Ó°0qzbë4Å“jK⾆`‚@¹*Üðö‹GÄã:ØúHlcºÁF¼Ö§ƒcUê?€JZË|4µW¸‡Ú基ã#Í°:Ýç*6åWCÓ;Ç4NGƒÞèd|tì“ÎGY{Yó´zDzá`È™u{|.SÀ´ìv|•nR|”ßiО_¶ -\÷j`µmwkBõH×+T§Iº‘y¯”°‘ò~ <\È+ÈŒÊDÊm.3z¶©Ô¯£F/·Q­•-úfÍÑ;ö>ÆàÎiÉÝÆ¢ü‘ïÅ(:ýŒ>Zºƒ½³ÙÔ¥žÜÑÝÙ“¨é<±7{áZªm©&ÿa<Ž½d¸óCb OüAW]Ї£ÔpãëwjÉÇöþ†Ì´­SZ.“&bÀÃýÀSò2L¿­Ø|Œè)ScñèΤˆ×\öÎaOµѽP˘k²ÔrlÒ€i­L¸X¼:Žš–Ý.Í|¨Ä–:rM{™ƒ²ù¦¿  u“¶.jr—z<¡Ê–±^)k_Î=-ÏßÀ£Ó:Ž7¼¦™¹œ$”t×IxyGlI,€‚ƒ·1ÌP³ËwèKëÇ,Ör7Ó.µYújÎH>…ÏrrY8´i;ÒVOÍ—²ÝËewNŠŠ ™v¬Ü³r Çù°„³½XÂñnv+šT¼Â¬û¥âÝá–6výÞlœ®jÿ2Jww"cì>cÔó²ÐþYSÅ´0š‘ê…ÏBb펖,?šeQÆ4sa´ÄYD<«îqÃ6 U„T±”êÝ®ýÒF¦É}93áô{ì('þew1Âwµ½>’ÓÀbºž—²­í²q54†oNZœD°U”+hËÓIÏÔËéJx»·Y7t3;i¸®îMEu Ó~nì}í…ÒIËVyÇ×Q«®?pÓ•=Ó-ákf\ «ÿgÿ†Çm%wÞéÈ>‡ç@c´ý:p>³ 'kE¥¸Žà«Y ÊW»›SJµ5ÆÞ1Ú{Ã,õ+VH;†­V±Š‹]¦ÕxùZï=‘Ḇ=ÊÒoüìŸåR£¬ÍÁºÏCl¨lEÒ–‡L¬n ¢ÕÂuuçæ<7r§x ¦Ì¶Ù󵧡š¯-áÌT"’c{v4ÛN\®õǽëOþ8àj =#°Z¶Yååí•Îº'[Œ{ñµ\}xô#~2î¬Kñ­ÏÓêU~qÉ=Î,vØh3G¹–ŽPàË pÕ-âÀJ2³‘ÆJ7²ªFã'O¾.ø0CDI~ß©ªtÀ{1× Ú—¬7±çi²dÓ§ÂF­c¼ö«yC*¦:©wy~»‡`g\K·®fµÀLœ™î¹Úf6ifçÖßWÛÜ´~W ¼L£]9Áx3YÑÖ—¾xʺsr*3Üù`õ”ô $ A±ðš„õøªßfb|üùïj-8Ì×5ÿ/þÀ½ãÏÍœËÙ¼–2OÎ>’#E-Ñ”YlyÝlÑi»~¹öjÚ˜-õ<3„ÅQ8¿i'¶7花Ëw–e_~ñ¬îèh)|¯Z,%2%‰SMî¡ê ™¶zü·½s2Æ*kFÔuîТ멻Æqr´žoZ9¦§xîwàI9ÎЇ«dTƒ&h¾ïônßxŒ´™–{†Ëó<ur 4]˜†]³N™U‰R5E3…E,gÔ@“1Ý£'ÓÀ˜æo rØÊh Â×C*Òzü“uëBg- ë¦ÿF¼ŠEMËŸÀªÄ$.€àÇYUò°M“‡ûFt°†ÅI¤xCjU¤ËŒéË–ªù`Õ<N¸såÑÌ -G­köKŽ^ · ãµÃ/­<Š•Ì‰do“Cžv}ñ]XÝP?ûâzd!ò»7á -µ‹Ö°gwF¶_µl¾veränu\Üíu ä±¢˜º»´ìæ÷1 HP™ÑÿÒØãPòõ;ß9º*'»úƒ´òcÿUálÎlô¯7öK©»ùÐå/}ù ǼÏÇ l_}IbüMCNJ½²Óߺ³ñ›²]‰:¸m|YiR®Ö­€¥škR¬pŒ}ÎåÓ\ Z`-u¨–|DhOkˆ}yÖ²ÚW c·ú& - ›`ÙâÅ6j^+™dØJÄ'Å>Ÿ`Iÿ;³Mw±‹ÔŠµò›2 #uØ'ŸBÖ¦Vf ÍÏnñª£‘xt%æH0–”ÿšœ}Yìù¥T¨Ñè Ð -wSûUÙb¼6§(½ÐlˆÕÈzò‘¯™MÂ_5G š,V2b ×·^h\a?ÐmûKÛÿ…¶ÚÖrÄ]`X_A^†»ÏäŒèin8©iÕñ“f¦O è[™ÅžzÿÀf^3#ªf>Dà>ÝL\%SĮֱæD%—Ünµ²9‘l¦ñ5¯H3vûEÕ4ÝŽ e9a6Æ'<α „1˜š9Ó‰Ù$3iþ‚Gî'[ù/dñ·ÁãÙãøusÖZg…‚n‡¾6X`)BÑ Ñ„ÿRm¸Ò¥d[X·…S´©ìq‹4éÌØÞm®MMl³é®‰ õ¯¯¼±WV¯ªÅa!ColqøpwIºnZ¡Î©“S˜Q­fHMbׇ‡ujŽ§Uè¦Ë¾íZT›ïÅœ2ŸÀSÅ\jÖ7})Å0שÓÄ/ ¼sx!˜§¿æ!‘ÙiÈ®æ‡2Yt—Ÿž8–-hŸÿ*ýšµ¡;Ï—žï€¼¶Š©nÉ ¯Ò1NŠˆ3T«3ú üØúÄñ+–õÌ´µñ>ìŠÀ0'>¡ zCߘ6Aë¿äB·¯î§I“ÜÀVÈ%Ö:7µ¤­_Fnq¢—é–Ž?æ(;Ê«¬㉊iTÝßþæ«{9p ³u´É}ê«9š–šVgµÙƒŸ60>ðsÕÀ(*ÄÔkÜNmE¾zX*dŽ%òÀüi+êƲp.Ö-*ZzzæâÙHéþþý¾¥ë[’TüçuÜKÊU_ õõô{-d]ÉÅv¶TK DØʵ–Þhì´r-ýF]³Þ¯Õ¾wž@¬´ˆÎ'Ó$ˆÎ‡58¦ÍäÜ5ÍqMx¬¶ôŠËéi ×£aªŽ™BðÆö¢_‰ü”q¬Ù§Œ7g²Ó"5ž†;ßi†Ó¤J3­®N…ÝŽJçVÍ—[\ç[íQ·Uq™‡D˜pA7®-®ÕYs§>v†Dþù]o¦÷ßæ¶8gÂOl&¬¦ÐÝl˜æH¿xNÚÒZ;탟­¥çÂWò»ÊÝCíìwyxŽCµh· ÖV#Míño&°¸te¤A?m°8Z[æ+#ó!x—«°\ÒÓŽ¥pZ6ªuƒð—Žv`È£@3q2,è‰Êk±)5Xu:¯Ø¼õùIçÿ)/w,7v$ˆúoÚ@¿ ñ]Ï8chÿöÄDK,RcÈa³šÅ"ÈŒ¸‘òýú±îEŠ¼™k®^=±3Ëêuݱ0Þ´AñoŽ¸è0™ˆªT‡|aú×}Ýx®Ô¨&0ýÑŽêD‡uyd™S³ë«4ëqöøâq*pz'ÉQ/Î2ž»ÊÐØwJ^÷ï™íÝ|ü½Çíýâ‹]ÕSÞOŠÆ>É·‘¦«iºÃù @s8sµ~j4¯F¨'9ÔUm÷QÝèv²ª¥C]~–¤¸ÎÛà¹t÷µ~žï‚¾ôÁ¼ïmLÛ>Í|›Âf.ÜO½›“ 8 ï/¢; «}ûàð«0wê}1˜ûDævŽð]5 icš©|»Ÿ¿9Ý^ô̆ûçžP¿gM=lââW–1­“´¯ù<Èh,0ŠÕ”¤ûX†½8ǪŒÒF‡%Ô˜ -âXâÒki  †`ö'ŠTi¬âöÓM°,tCËýúUþ¯- £Êc[{åÖ ÞX«È¼2W˜®0®bàjðt*”oÚäP´ Î 2^8lKº±Í Ö7Nj*ÎÄ>Üp(-¡¡ª0qUóhf·¹.߯ËîLXà‹Ëq³—’‡’XÜC„>ØÖýXËpæ•—`g®˜TÕPŒ£…á’gG†ŽUºÛì”c™¤óØÐ\:ãàÓtÓnÿŸéF‡ÛÉoujýS2ðzŒ;Я=½ŽBJ&ZŽO£…ÎkMË”HS«Á€´åýÖÉ„ÔóŠLMK `G÷ˆl}sWÆ¥ìjNÜ‹+ä" êt›ú—ðbýºÙÒEeöÛrC×7¶€Üyú14H‘[תÆ8Æ´Ñ—ö#›·ÚF«Ý uš,&ë3ЇÖlF5[6,˜$Vƒ¢óv[·Gzss|çög+îÎâÿvo{¼2òsù§²>M®Îp<ÈñX¡R¸Ú®v< -HÖÜ̆$õi©Á±ªî°ÃpµÌsèÇ*ÝA÷u -ý tíü9еãB0aSƒ»©É¯ R#[æÑQL`„¯“ö¡ÆNk©†)ƒTlQ‹ÖŒpÍ“͹„O³ X'B šD·õîo÷´òÝVjMXzêÊO!QIèö“c7ý¾ŽÉWcxXät„,–UXTOš˜²SC¨¼DÕx’ 3ú‘+,éûeX:XÐ"ÉÀUƒuNòŽÝggîXÃg5ß@-¬×ãâÚßœ©À™¶3€Írýl¢Tä^Báì‚O´~è¢BÄ´‘Í;ªÅÒáêg¸EeªxÌÑ )zJé2Û•N]vÍ]â£Üc£W¿~ìkD˜Þ÷)É¿6uÐ ”Ì£74¯4¼O£ ¬=qcÛCèýjÇ Cѣћý›@ß× üÒšjsš4Aû½ŠêòÙ褶?„%ôŠ%:hZcÜE0E_|›o´ü†8CÙÞìQÿK–šÎÊÅ+]ÿXyØ”šëÐx´¤¥?;¦z¦ÑÞîjJÿõ*}þw´xÈŒên•ä‡ËVI‹ù=©èÝ»9Þ0ÂÙ‘I`À¾‹4æ¾kÜTÌØ;U]ÓæùUE§ª.$‚/‚<Û ¶JzÔÛÃ}1£AjݱR#íþE(7ý³¼jË<œØ¥ oŸI"ÙLB£²jJMïLž-SŠ•L²gÏÈ´šÑx›¡Çð¿ <ªªY "žJÕjV€Ón„¿¨ƒÝèy5H3sfõ{dž‹!oùß[Gˆ ~G\Ú2 5¨»bð±ù£^Õˆ\ k­ÔõÇHr©Ya„egò vÜø§4çóŒ·Y®³7„üº·™’7]½12ÐzáJ±áHž¯Û¢í¡.w²Ü"=oî€3Ù´í%éÔ€h‹º( #õµÞpËeƒ¥^ÖtŒå+šÚª<3OõœU×ò$3+šÖ8;©âšñº?ùW`ü ËıàÓ=>$Ý $ÿΗðK7 Ïy -o>åõ£Ö’- óÝšŸUžÑ¶$*ç\á#úsü -öÈžÞ¹Î)ÍÕVDÞähdU¸`|¯f°ºI%ÁfÑœ{:ãê! ¦DéŠûÁé·Fßœ]wª^w$ä—2®l’Y¡ŸÙëµÅÉq%È›¸c¿‹ÒßW•ì½XE}‚µªX#9îW­ðWž{Ž‘›mž &”2ÊwWê’î:Ö™C&\Iöè‘(¯šâµ[„zPº%Õî.FløcìZ[•h·Ó Ñ•Œô^±¡ýçŸÿÞ-öFWöÖäû-v<Þ”“4–úâÓm,Ž¤z® ¼xK!h-ËÜcåIø v6Ì)¾'¦¿·l_É Ri]Úq fÕÒ­†ã onŽ;üHdôÙêÓÐ:ǹö[CAšs/AJÂXÕýºŒÛ秣 Ä´í%÷€€Ô™ÒfQáÀpVˆ_t\¶ÙÝ5Ž„g+)ÈO ÌàŽvjBö0Äw‡B4µ8o>–zu{ŽP1Z}ΦÀ'áo -)qýc¨„Úe&ê ¤žµ‚¡ªsÑϪ]¦HE,ºÐƒc“Iy°yOèTlÛñŽÇ{µšJ1l¢»{ˆÉ€³«8Xº÷ôc#kP¦/Ñ$^Ó힤äÜW5Öû;œmÞ,©ŒW’赻京Qœo_NwHö?û›1Ætx9htóɆ,¹CL±‚§pJtWOPÔX7ˆéXÞÍ'M½ç’Å«té{µ\2FÈŒ¹M_fÌØ'?ÇŠÄÒ™±0…µ\âW…(ÐM-„,nøìŽ`ƒ!¬¾‡ Tç~×#H0V`?6âœL2 -#½=ç†ÀW¤ÿD;y/ªúåðÃ+@ÿ¤že2ð ÃùŒ†ÑötZʇ‡¸ó¢À£² Cª^ä€_·’¢jJ+NP²Ã…$ÝØÀz•p\,8(-pöIàŽc†©g–ŠçXe[Ç×k›ÚÅc¸5'!ý—ãèjíKH[µÓq›`$lhÇ™µe‡OœJ¾í01þìð[' ^û<†øD£Zó౿lˆËŒŠy¸Y§`ãpÄp×4´–a wƒÕ;í=ÂwŽa^âT„‰'ƒBaw-,,MÚá±÷·f¹Þ<жY\nij]'¡£ y>¼šØüfÚÍÖ| ¼ÿ‹*\)ãÙœžÒ“ûçb"É—›’\ ¬7ÁEAÆ¿»¦i\œäöA½·¿È]]õ>ì×óX@vçàÇ|‹ÝÇ>Þñ)`sC¶äÈ)aÒËÛÆhŸ8¶ââ_YäRP5’!:ÚPäSí2}[Ð<ÓqÝañ®;)þèÐsDC†%`ë™%jŠwYÎQúgJ'®ßŸäß½›nÄíý³e—¦Ñž4´§îWü8ÙÜy^ûÛ ø}jX ÍU½xcšÅù$à«>ïHùaªmŸ?Ú·Úv䎴ú¼HöàkÉú=Ãåܶ$|.l]Õiˆp …w¬z¹î¥XË7Ñ_yœ¹d°=9¦Ñ2ª÷>4ÐH‡ã¬€çãØkæ¡™Ð){· ØøB7S‡R?vÖf/6ÓkfE-A -u®jO=t{:¼"Y½/4ÐÛqöÿ(/—ìF’#îû}ÖËÈžg6ZÌý×r·HrP@Kz½ ¬ÊŠ»9O6õs'ê{¶†ÆBZŒAgåu/¤0c'»ìçxiÅL˜¿ÿgÿÊ…!+]ÏIü,çó|®ê‡."ç_†KÑš8cñ~i¾²Z™-Éãøã“ “.À$;²a+‹œUÓÚ:°ñz>bÿ„Ãñ‘1FwªÃÈÇr%_á<=QS4¹ˆáÌâÚAÇÙó$Çö>;DÒÌ2±ŸS•s(_Sj ú²lv?tôç˜AŸC#¯{Yu—{ž¤ ä¼‡æi7üímbmþòJ˜Ò¢S6/PÏ¥(õ’{Çׂ­ò4vŸyeüxK»·‘{ÚoægØ“å¥wùkq5=±$zV­%[)º132ì6’‡N;ïû.‘2ô´æU¾fäÓ–mFÍyo¤eæÜ eh–äE‹¤ï «CГ'̶‹±…Pññ`^rDAÝZ>ú÷ƒY™ÐÌ–uKËl«çî_¡›¿‡ AU¿IRù{Ïòvv ´"¡Íì# 9Ü!»f«_5\»%&£$k™c÷!À¤û’^Ùõ϶§Z¬ÚÚvΣÁã¸÷ qÜÄ=À·Qƒ¼Nždî\Yà²I`ok|•êü¥Úžz Ï «ýk?JúB9ªàu9äÞf;"Aú¬~Òë÷;˜Qœo¿Ô©pDÏݺ¡ øóã Tìûõã Ç)~Ê©¥.ë¼´x¿žVnÐÔfdTÙª6¡~õ§ñ§ŒÎ4 -è?÷ó0×Ú¾_Ã÷mV·¯jSÛê1žåÐG.)mv6œÄüócbZ:ÀÒTúaË“åË t¸â°‰¢ÊÔa#«RXM˜÷þ:9 >ˆp|ªÂú SÀ[èê¾rqë^˽ ˆß¯¿‘Õ Ií5Á’¦7ÔS:"5ÓýuO­† úDÏ—?™Û³dðÊQ–¾vV¡ŒXõ²hõ<[<‹¿~ ö¹3uõÒåôP ×á&7ÿë‡P£¥é 0è Uä÷À»‚µçë'ºL5` ÏìÏYC‘N f ËÝ¥2k3¥&Õ²$Ê!f¥_æ{• ÒfÊNë©»óØx]ˆ/›zÍ^SËfxù·È@@÷ÅK=àR:ï‚Å%wPD°­×±ÝÜ9Z§E<äÜîç žk0ŒÉ¯Õf(6>—;c[ÉO F\ýŒ†ÏUÞYËM=Ãÿ8“H,0t´§û”‹÷6´WÆЈϧ«6w‰úpZ^‘sÑ™[D‡ÀšQ‰„‚õÍñìâ¬?¿^>{Ø­¨›÷hϼªcx^“¼dðèÈ'ÛÒ*;Ýb§thïÉ_Éû¹‘•˜®zÏÍ ?®%…oc#›R„‡—¢š‡=Ë“TºÊ÷‡›áÐÎA:©—‡0£jÖ+z¥|ÕùˆN½ík ŸßR]¯EÚéâZ©kX#iWÈ aõZ7Äîiñh_dþaÎ`Òå`ÐIN.˯rÕP×y˜s!ß‘=?´»W×?ˆ^G…‹ùº“6‰;z^¡½U1&$5gH_›i/“RœœgrÚÑ6Õ?3wÅàÙTܹ17_0Þy…ú÷y°±µºúñòE%&Ø Ç]£N÷z®¶6fc\U²ëgA&ÀaàÊßÿl Á×Ée Ýáz©IÏjÅ??}烦êiC—=à¸j¢yÿ}j±íºÆ‚±ì ±ý/Êårt›û‘ºZÞŠ?ÒéwgÒ­iðÕ4â)‰ˆÌ2|#_ÏC¨ù-upWØÈ—:>œ¦Ho&®T4€^„á#NÄS;ÕI“#E^­ÕYØÔªþ¥½U(™ÁæµM9G« )IÑœ•F_›ÅÆÖ„-n2üëÇ¿¿qkÿçjUa†žN> Û—‡mœÍ.pëp‹·ÎÀ rlUL<Óp<ËEuY¬†-OiÙÏ–{¸g~šBz÷ÑÚ¬w+ç["–F¤¶ŸT~‰ÁÌ›ÖÅd_r!ÄAã€Íà{>„J‡Åº_Ëhú(-Ð߯Ÿ$Þ5{ve£Åys}·Eó,n BR¥QAŽqÝ2o•¨A›½•V úX+lPsñ6¼i4µp"¾#n½ y¶¸Ñ1ˆœ=¡ 2W‰Û»y²“F‚ê’g–ÜNJ7Üø÷÷áY5ƒ€$[³ÙŸ.Ýý¶ßsû±.d/ä0@cM,†7ÞfKGm9U)æHv7ºë–ÓåÃ6œÀ× u^›%'²Å•”‰£Z}pÁñûzÕd8ýèé´ê6ߴ뺼F”ÚúÐñº=ÓÕërÿ’Ø÷‘xzSu=ëºÓÔ×4´¯è6æw`àõ;ù®’êIYªH©Òä-´Ã_=ãIÝý¦¢ŽdëÑ,¹¶ -;²Íæ¶ÈJ„۴¹êAÃ=gñëõý¦EÒéÌ,ºc1“^<>13âKD¾¾;>³•ÃNOºÉæÄÂ/LÓ)q¥–L£.™ ïÚ\?Àc$ïìdTqÆáÊái«ÀÕë#¶îžµF~蟨VõŸ†zuÕÎRºYèM…󢹞âDtR7­l§=D£6ƒª×Ο'hÇYþ›˜Üêlm¨ëîYvöX(@ÁŠv0ó#å¦2¥‡eyl>½­,»qIvM«_CÖßÚc/ã)‹žÇÓ¸ê'5§ƒíD2Žqø<Ì¿&…*zøvK r¢ps[„a)ÐiËÍ×VÆ0o[{¶µ~äÀ¥l=‡(/‘AºsÏ­0ªb>åõ­]`ÿ/þØä­‰ÆGí)’èG†:Uê½ ;wDË­ç|õWºäAépœáц™{%p6Õn£óÙDA…Û©Ñ&äQékÖþþ­^ïåYk8固7Þ#~>û·˜ŸHy?N¥¯8ÓW&¨ö ‹jKÍw(Š%qPà:h íUÿÛsáŒȼý¬â´W1LÓO즿Ա'­´eÃM%vò¢X…thj°½ñÉ“ç éXN•,”ÃnÍ|E€ÓèÏÇaO Á÷õƒª¬èžÖañ<’€Àæe™®9Î-÷²Ô´“ôh#ÆŸ–ŸJ¼#78ÜÐúõE/t:îß怴›†ÁH}˜·óOÅÚš¯ê6­0÷Z¨¯êv¯Œ¤±–OöäpÖ’É+7*q¡5Ém3SúG¼rLBÄ#trœ­Po'Ãbï -bMÙz¿!Ǿ±ÓívеJ²!!g?Ÿc¤(¶¯ê9;’ifÔÿn«¬ïü‹ðâ•bz, ÌRâUK¶híIh~¾´O×pêDDç¡öš‡ôÌVlOÄ€X¼sKR?—Õz»?$üðwhBeƒfàVúÕÒ5àðÿÍßÄØÿƒ#U÷Û‡œú­¼æX?p§zý0ÅÎBε… -×S¿(OõêÈ›?­pv@2³ÝP¹/ÖU/`›O¹*–†Ù9ßYôtµ¤!Ódø"¦µÈðRað¤ëéÍàM#Yy“ S‡­ZÓõŠ›^÷Õ®™‰ˆõ,Æ XºmÏAÖ;{<ÌíJÏMo±þATÈ™Èhêã?”—ÛqìHDÿ× -9 ‰BêeÏúïÃäI´FMénÄÎ¥–ØdHäãao·kñè}ÝŽ#{"<3p'wQƒ›4FC_þžF±¹+Ë…7ŽÁd£1ûù“ÉÆSÆ­ˆ9uâwÐÏ9|Ê.ÔSnØ ì¿ìJ–7‰/…>lÈ2ï`Sýòsbì?[ÒqÑ末ÒoôwxdUÐû 3YÚõåÊ­0!/ج¸ÊÑñà·õ£›ÅLî¹Éòüa¡ ›LŽþË4llêâ¸~{†ÛË[¦D·ÂW˘"á -ý½ &ny9¡©Ï ¯Dý.~Æÿ]¼»<¦G-á•pSFO+çv‡ÒmàqnâÒ«qÎGýþ«"Y3p.J÷±žªÂ&åN-° בoà Kí½¤[Ó3çÄÎ>šj+óuýÃÒ´žó‘²Òyñ˜¸ï`Mv½¢ÁKóåØÙ3e§6ì°´€/Ó.Ka·Å÷Óy‚,ÔL'-áOU*è‘5kv)ÞEý£•nGr»)u^»Á4IäÜ>À*Os‡ÇP¹·=¥ëÿC‚ä‡Øi¤ý (y¿·a^‹6)lf¡º 2ö-×v+pîUÄW¢S¶åаÝLïQÄ®ÂgÅ+Uó‰J™6O[Ø–MM¤YuÛçúõ庩ùg2cöºº@dã_tëWÿÕêTa+‡öñäé²_Â9ò!lDVÉA·Ð•oi•'Ðq· -Å~‰aØlV²êËÞ2ËÃÂÛy‘¾¯?êÓXY-‰¬ŒÂÊ~”·{X¡‰Û…/?Ë›ÉXä) é¨[7Ru˜#& ì°ö@ƒa€÷—¡4RýóªAÄÀ.u-9lºH„³ŸOGDÄ'V€S“Sdφ„k„1Ú§a6–Íû€o´Bh(ýÒMW³IMT=r‰# Áí‘v…y¼IºWQ/Ÿ:¼ökˆ5ÅÐ3Mz 2©˜úõyjÉõ¹ vY/hbÁy\bN[ÅU~!U>¿“O´ª"(•¡;§T~?œ)íT;t¢k71iœb\Å#ŒJLA@ˆXš‹v_‡NÃmM+%ê»ÒIiM¿& @üÖËWÉ9°ÔЫ¦ìp8Q³ r;ìL:bÇ­Á|‚ÅÇf‡q6U H²Æû·êÚ½Ž¼A&ÑrÖÚ -l“Þ©Ê¥;å¾…']?°ô´à^~Í´wy,„¸=½ gïÉXræGÙGÍìê»°FpG IËíMåožØhöc“–êÏÃû˜4Ô‹ªB6øG#÷Æ’õä†Ãz4Š ñøVtõ×´t}«úégªWKÔÃ!‹¼° -ãÇ^ÿ3–¡uFÙ•êDXî?ƒº™§þ xé#8€£^ÓÚ'j%Nq¹÷6[pƒHSÉF*Òwqî¦q=²t("ÐÐùŽh^ö»¡Žˆ"vDøÊŽU*Õýì˜Æ[:4›(´N<îz±™(´›±8‰¶ýe º"£Ž[[&~i¹ÌÄœ/å£0qBÞÜ+^ÝòÈö òúyÖ+sˆ–4‚눠<`“DÈå/ÜèÎ Ö -} ŽK͸ø–žüéNßÖ³ÿ“Nî}õðd†‘Ý+53À¨™N.óGréV¡ïëOب~åPUÖ âý\zå/aè6³FéѳMÕsô²@n©lZ®å©›Û"lÌ‹ÜC>Àöhƒ´¦–¢q…É«3ñ(ß\Në&ªUL” ë©]SUÇÿµGR¥h¡UŒÉ‹ÂbL0Ï•OÓi›Æf꫺´6ŒÎ¸« ]øÆÅêˆÈOºä -À©3=yæhqå’îλ4_”D4¢kú¾mÁ’iSÇö~vkÙ(Ól6|c÷V“HäéKÇz›~<­u©y´íke»º_ë”88eœ[¬òºÈ#‚1¼§S§yãöYlƒBÌ`ÉÏ&ŸeQ8JŽiEbea æ(w€q´¼¡ŽE2šFóæzl7¯‰Á;á“#%/W.Y‚±!þýnV'ö²‘ú!˜®·ë/Æ6á«a–ú› ½g¸~‘„›¥vèèf%5%ë·„*?–_þ9l‡–=”?Êjª'&]Pè…ùÉ‘ ‰|}PïNõ½N€Riö a]NÍm9Yt¡¨(Íl=Ìh9ìuÎÿe|­‘M…ñÐigg† ;œ>r¾Ôh–Åí»üéáPÌýÑ]}çýú‹XØ ÍeÞÒùw©½õÖRô>}¤C“´tdÛÙZhî´ˆÎëfà‰sþ[Qö½:ÿ-‹Ø¬J˜pl`˜9 6|–—ñóÖ[qÇÆÆëe·žØÑF|LÑ[›/ÛQiõ§:Çß×ŸÝ hÂùËÁc>í.fÍ“PçÔnù¥¨®KK½­X–Ø,¦·+e— tLvû–"WV‘ÇœX¼m ã5“(ÖXwgÛ32<‡åߘJ O£ì²ÿǾ{f›å×åÙm««j‚àÝÞ¶jÚ>…rÆÕt1†³X¶a" LgŘ„Øר3Õ;¸|ÖP´nÞ6Ùüá.³ÒdŒºÏ´r.¹fî(BkyÊ“H{ùÿ(‹r4p¦æ¤wM ~z’q,"$=5n ±Ý¢cK§œ¯r•*Ù‰S&„¾ô›òi©Ããí«L §{Z1ù~ïõ;°«é-!ìŽ÷ÆŠ³Œ¬4…¯ÃïwòúÖqñ饶-ÂÊM¿;’@¹‰vÐi‹eµãÕÎj+…šf8ö½‹Ä®­-¼°ÒˆÛØ^§rZÏ,8ôjsÞb8ôš~ȪèÓUã2£¦¢&å÷à)¾úºþ!tfjq×±ééß½öF¦vø;l/ðóÇéCÚ<—Š8pW€=dÔÉVŸv{y½f,ÏF/^œâ¬*:v3ÊæhѽV§[SWÁ¢?ƒÝôj}_ÿÿêxXÙƒYh2ƽ<ˬWü£Z!Æàç16såìÿ¦¼Ü²$Çm º•Ú@Ï!øæŽüá Ìî7ÀìJe}Ü?ªV§D‘âuìñvâN𙎮– f¾þ0Î,J&Aß7ÿ¨wÿ•?~!/y¨t@݇I»ú€VMi =×G…a¬%ø¦ápfR¬¸ùÔø i÷„ä’Šy'¸WTÜAÛ®&ÈÈÅ÷õó\ •V³&±-F×Éhy–zÒiY‡kq&=>CKÈÁú”ãªî[sà&ŽZ/Œ8„Îâ²Ìå¼j3Ý-´Æn×aØö,¤FåØù€ñ¤!ã"sA,‚çcøŽÚĦ>m_²uŽ™2ú2c‘Òeæ¦~û?ó…Žw›‰2‰ÆmªÍ-d-§“¼é/)Ä[:KỊ{ªEö“9Ž ä÷õG—C)Ÿ'÷ïÔóM²£û¥É7ÚĬ7oN»ÇAåñÑÄ’)Ò.©$ O.²¨ B݆ -™tØh -ûHLaDUÍúÛ¦÷L‘Îá°jêª ´‰ -:];zs›¦€?†wbÙa?íÔ¶êõÿž:-&òVHƒzÏWņoë©š`†÷‡5¸œ\­«Úè÷JFÜl¿™®µl¨¢{¯©mŠv2L8¦©ÜB²•òŒdÉDN»[(Š÷K{-EGó·“ÅzFöõ~ýè¯fßv`dn¿³‹0n3í±°'¶_=f±¿êõW;B·ø–ê#öë¾·û¯„ÖÔî¿Ÿt›.Ž-þ/Œ,YìM%p_%3‘Tû™>P÷µ_. ¸x‹–ùÉ5¾Þ¯Ÿ¥›œ¯¾µB™ýaU:%ÙéNkÒ Ø2g3ûô2ii›oÃ¥ÃÙá*<úµä±1FZsZ´gtŽî³šNº"ññaŠ8Îþz¿~m.ü¿¸áà•ú|’:ß,úœmùeõæU÷fÚkYñ5®_­é.=ƒsµûì´¾DŒ¾t@zåO ÒE¦CÒ½6Æcžf±)8-[Jž;²4×'±=ãj«3¨£¨ŠÎä5{b\îúØF8ÖÀ-ä¸ÒOžà8áµÌ+Œ’ðª[DµÞ×Hû¾þÁáÞ?÷onÃFÊß -LЖ-FÙad†ù[$Ɣɪr(=j Ò/DÌ|Ȉ/¸ %‹ÐùǶ­Ñß×XÁûȨpZ;úÓ(öL¯3 Uó–רé)⺋Hô@7¨ËH -Cü” “‡K:‘·ûgÜÎ&;ø7dq4Ñ: Ý×[л¿˜‘lÇÍ}}«Sö¬Ñé*gþ°)c}½_ÿáð…ÁYa«ÿîÂ#« #¿w3y'ÑXFIŒÊDŸ«ý–9Ú’šÖ16¥˜+äXÌÅh3N`H+ç ìIM± -O#¬…a¹ [VÙˆ¶ ï@º±; 0¦[æRYãÊã úkÜbTMó¡Û•øÐÇ—ï­8úH×Xùg‚ÇtÃf×$ !OÅnƒl´x1æ~É}MoÔÝ‘³©ÅÞê°–Ü]Õi*­Œ«ÉõåžI„j]uÂHÜ‚xæ<ω˜vÍé!LyTÕ W³O»»£YÒ ‰MÌ$f²  öê o ÔRR‚©©žÁrËRhuÕ÷t5Íê¯! Ÿã çWZêj̱•H& pkÆ:Ý€+ã‰åZûW°8d}lVqÉί©¾e~8ìXÀeßœ¯œT\_Hª§û&:û”=’ï«Á½Æz ˆ­Û.a'°F‚;ÃÑ~IÂ;L %ï2XŽ”0uÛ;cic,äï'§-¹‚l]¶áøPºel0ÚBÕ¶2køD¥ã¦Q ·Õ=1]9U¨åˆ¤Ã3á×5ŠWØäXýÄpKá%õNžé¡«×ßµšt°—ÈÐS Ùw?αr1?z…¥?üùy‹Hv!³\m)jIÎE›ÏímDš¯Xé2Ó:øÔÄtó›>›m—âöÜ©¬îÆ•óHœ«W|ø$¿¯…žx:ÇpSãÒŒ‡ÃÚj°U<¾Ñß÷ÊeÕîÅvÚ!Fn0<‡DNö¯ -âl3¯›§Üu_'Ç°V?Ù)õL²§}GN¸SÅøù‡fh¨Žø¤ž¤ž‡–†\×cßX&Ø6R€ö…‘eĵq­Þ=W©XïúÄoñ4OFÙ?Èf´îÖ¶†°·ç.v3*Ž­_mÙðjíÛѳR3§âlßÏ/q}d««k'ÁÖ†Ÿ=‰Ä¶ŒÝ>ýP;ž½ã<S¨c9~ÕéÞy2iF¨Ùs]ô Ëeêµ¾¹Ûkô$X/^èêúvY=³œÛ3šŠ,O.ws-›m¹…\èKZgrš¥±:`´èH÷F»ÄBéÁƃjµF¬6óÉL3~vP}*¤ÌƒËâ©òRb&R|#dŠ‘tî ºË+ª!Rü±™„-{ Š³W6Mœ¨”>уÝmój<%¬Gѹú‡o‘Tä†T)Õå]•˜ýé ôÄÃ?‚M3éoÈ-ª·ÀÛ[ ÅXŠ‚¾Öµ)p¹À»ÆÕ.v77‹TI¨×ƒyµ ý•‰ ¨Ú%§M6hºþ#S††–:±ž5¼rZY¢ë¤,{ŒTȱˆQNM²°gœnObû~ˆÚ3N&ÚÁÉZ>Geè4bÇ`Š+¬QnŠëa¿µîUµýAÌlx`iŽÚZß„غg¢C¿”cºÔ5ýÅ´–„-1¹" œò=3° «ÎYNœ³ì¸ 0%#Ý=ÿnϹîÍOØ™õúPeµnN/µ<#/ÓK j+£`*—¡ÕrØkܬnWÖŠga¥Ë9MŠ­ì%ÙAœÓô‡ )FS3vô‘v»GÜï¼ßü'_«þÕÓʺ¦ßíÛ#Q2OÐ3~=µá±7i œù®D ŠÉÀao×W9ªþW˜Û>ptx²¥Æck@=$¹Ÿ5ßf ´®2+ÿ¿·»Ëü4õžÍR÷赟>H¼™3«øˆéÊæJiΫqyºIU -ÖH“÷zrx%©´68änÁ?v-zr4kŠŸ$4@GÞ„Fa’l±9’“òê·ÙhG& W®ÕŸCAÛƒÿ¯Wñÿ£xÚÇ»AJhÉ{ñjxbg–â„/×½ñyw:rã$g@¶ Íefä“kb7goit©A‡í#×4ó‰ãVƒé1{SÅ@Þ nì„Äg¹« Êùäâîó_?οÀêÿç’Èv Àïó3ÔŽŽX™.fsL]¸öm5uN=Œ¬ tìŒô캂]¢œùµ¤µBWÁ51GÅw­ê6Ô°„ã&G›¹û8ßè·ïëÇùæAàD CU¨­÷D)ƒ¥¢ÔCI dŒÀQ\æØ#åÆy‘àéƒMT£‰éÜ×¢/ ×Lö©iJ& º-2©;%a{b2bÒ÷ÚÇ_?[Xxmž³Ö½æû­Å&©Ø)âUÒ[—nEÀöÚšN²%c~^òmþ‡ò2É‘ä‚à}^Qè·àò]u ßËÍ#kí4sÉî¬Êb’¾Œ–|¹²=ù“”ÈÝâÑr\ -5Ì¿®[Ôž<ŸaO5,öÒð¹(é\xïÓ^¢xW­ÑÐÕ]20¥«½Ü}®ÉŸËgÓ-PæЛO[o:ºgæ®)îéÐZnb¥„îBw@Y=;®=㎆ïŒÎs#%¶oODŸf û±°ñaÂó¬ë<ÒÛIÒ5S8g)jŽJ‰¢êý¤ç·»pû?œÖÿ@CVV«hU±!^¡Á9¡™bÍéo -T—e·Ö$™šž²‡YðÔ•ê™a°EÆí¤,ô²Òƒ¹¬k™ú9Ù{ûÏÞj>Ê9‡­YžŒÙÛx—y{½þ}a¡qkgØLBÍ WË´Ø;4MBY6+©gV¸1Ý@wÐ"LªžAp¼2ʯ3/2Ä9á‡Bã­ºœåô`— ‰ø……z\¿õLºŠå¡D9¯=‹ðÏÍÂ5G«On¦Ãkq&4oºªc˜hËNË$˜»iîQ\VÆ|¶«Yºý⌦–¬\,2פa©“øk{8©–qÞ_OɪT®ZªÁÓ¶cU£kôÈâ_1§9Û¨Äí¾;gúãú­h´GT:;Ùe½­Ýã`±™ËÿÿQ¯j°\µÄ8Ii*Ã^jb²Ûž˜QÚÜuÚÈ -M©ønYuœÝrÙ¦k¢¾·¿òËçõ¢EGb®Çz•«¯»ìg&Y°Â:.&*"@ì}•tã.mÈòdË»©65­7YSzk†­”°`DKýšÛÞl~È•¸6T3ؽe]Šú¼~ž¨TLKhF=xïgš™ÄÌ–šÕ’-ëŸa¶¦?}ܺ5ï`œM=£eð²ôŠìØ©UäÞj^¿ëG3g>¯ß$«>¤ ¶þ/šš¶EË`f÷>ºéÄŒlàøktÆ´‚…ð­SÓ¯ó$‡ïâ£ÅP7Æ}üÌÔªâ/2“ÐjâF{íÆqº|^RŸ`wTÎB6m¿@úòœf[»i\†÷’Ç’Ž=umþ‹Mam7[èW>f.ͱ…Ñ3cJ‘eFìù<ïWýWbøOj]éǸ÷ç2¥3¿_™öàx­ ˜Þté¶nùƒ–‡Ò÷gø{ uÄ·5Øwg׬ƒzã5Ió²›>øŽ†{ª('Q)Ú9Lè„°eEé†~›U—Ž³¶`cѦœt¦^š´] ÏÿZLó ì˜Ýú°>`·×ë7˜è¬Ø’Ù -­yÚa§L2ÜœÃpÁ«êˆ€^ëT›n1Û¥Çõ¢™›6fQµÀ2y¶Ý8" A÷ÓÂöJ9baݶ•ÑŽGV«"I5œì:Æ’GOUÏ´ªÏÙÓŸ©Ö6Ag -é{çÕ;ùÏU­f Ç ïüs9RèFݯ3FŒiµú½_™ú|€¤†VÎ…O¡êÃdR ÔÐÝ[Ö¯å·;Á¤FMæì.Ä8ìŽ[ÑÓû–v=;Í%g­”"# -›ÐfºÙ±Ç¾R›£Ê†vª΢zï°¸^IB9 ‚\Y×Ù.~/`Ü^¯¿pðÐB…Åx‹¥Õû*½^êPÒG 7VÕ’£QbùJs?ˈ[jyV¹ù$è;?Ö7áëù™ÃŸ>¯ßÚ²ªƒ±Ñ!‰ès·êÃA¨ô¢²z[³…éªNKµLW«Y±‘Äzœ²„eO±’NšcŒ¶ȃ’ž=‹ƒ,-³Ç$–‚HU+ÆÂÂIm"w]Û½_ãÒQ«zå>ƒðxz§7(70’Öf¹Ø:WXšžX‡á¢Öëy1®ý4?E&l) ‡Q½I ªjçeï Óå ·sÖ„(BÚ£§•Ro¹ 9ŠfsÜßmW`ø¼~š”f#-Úlªâa@-BÂDZf"kÃú^Çy„S8º`<ú5AªßÎV¨¤,˜ö7ÎU4¢dV“Þ¦¼SE'ªoþŒË¨Sj¹Ì‹…tú ¦ö³Ïë7#¦}RQeIÙ’ò–4+a"/êð€lç—°¦7ölúé´ïõ>ô-°Ô ùá_-3Jj{¶ñ ‰_+™6Ã×E´¸“¯bpP\s‡~¸ûözýý#±òv¦lÛ¡ ƒëÖãuà(7#ŠlÖ§[ͶngzÉDr¥ -j‚‹zöØzêû‰„½‰‹Ž@ -D[ÿŸRÓÿðÅè—ô5­13-À–ç_¿WÁ½_>Éç4ÎÔn1‘‘È€…Ê4ŒgÞÌ« ¾9¹ Otö+, Å4 ]4Æs§·ÐaåüDñiSáx U¨ˆª/?(¸G©–;ÐϵH{OÌïëõ&d’8A&h¯©®;Ð:Þ‡áqDtë˜Û×®™ÜÃ5›œæ¬eZ‹K.Ê´Å;–L;7Ì‹=Ô §Š}.´˜¢ægûºëÃ# ¢çõÛáb!4ŽxbÄz=\„Ñ"yœˆ[³Ì~Ócñ8ÇF-†½˜¿µåz!{½x¸êÖîHÇ>^+.>“ÖŽëx5‰®U?ÙÂOPžEÑŸ×ß?ßËãö7ÜœãXƒñ{8‡•›?î’æºíK>NºŽôzÍ'5àŠ,ÖÈg¼€—¦^½ ü¦[°kZ…¶ì(­‰Ýyà€% -|x›é$4Òùt7gaŒÇ²/$çÈ÷l o¨n¨eú!\›À„ÛnÉÔ¯ô!õÄ`ÂúgwGP~lM‡#¦èBËȬª’ýaY†ìæ °¥‰wßL™Ïëgw&gÕ´ÏÂ~ÛGw"A»L6U[„‰qÚeÄ{ØŠá$TëË^Fz“^µÇ=×%gP,ËaI+{Ò{n`Þv·²#UÚ„ÙŸY# ©¸±ü®{$Ø9¢p~s"«AU6 -Ð=Ç0j'5Gh®Ôn%!" ½:™™‰3Lõ+EÊx0ìL”Åáx?âq -pá­«s¼Í†7Pt ú8±OºkÂâ´¹äYΊ¾NªÖ›»´“w´©P§—)*·|ºs!„£ó@÷?Mm”Õ«Ö<\qÒjìÁûJS¹ƒITÔKÐ.€+©PP tßy«¦Ñ9¹%/ ut;ÙÉzÇÌŸëk»ÀfcƒºuAs,’ëèȪ8LCjþöf û§wÚ‡ú¯¹µÉMx£!¡5ÊvŽjKP‚Ûk›^ 9%YàÍÀE"öG!C?Ø„f`ây_•³§‚ežjùÊk1ÑŽ³’BQ³?/V˜&´†k¤§ŒÕò9½Êñ‰if4„¨ õ0_˜[U{W_¼ó2ßì99÷ë7Þêµ°3hÉxžªºtŒgúÿn À{Íõ;†;€èã›4Wa™Ž¬ ð ¤PÒ— Vµ RCP³†öŠ$»m/˜-‡ÇõÁ¦Œ‚8]Õâø×[zÍÅz˜%ÿú¡Ï; ÚRƒô¹Å@(×H3R¶­jý·æ¯ã&‡ Ö°E¿†(NaŠÇa´j¢ÖÓËÇåéÓM¯¨ÖH½òpWtŒèîçWh.Ô¼ž&ékN;œŽ­×·«\FIj3xõ"Ô`QD=² ÔR½ÿ’c›4è/CÔÑaÐృmUv¹ï!ƒ´ÒëÕ¦7ñnî”H¨Q"Ž©JòK¹Â :vnHÄJúëgú„†%æ{->QÌS9˜*¾Íi¤,“ÙÙi"óPg7Íß,ø´#VÛìÈ5”6ý -ZZÓúy’ÌEWs¤Üf¾”6\ô—3§£Øþ4"“)yCÉÁãyIÁ Ë4íœ~ñåeŽå6Q¿OÁ h^eíu9cèþöÄD‹)Cã@ Š*·È|½NŸ0÷ŽtOj«t®l›ó5I‰ýz¦¥Ì…ˆ˜Ÿõ¾qûãõúá05ìÑz {Q›ZQ Lɯ/{ å[ÿ¥™­V²/¥ÖS R„fʧºÊÉâÌæ º6ˆÐH~‹¶´\dloý á׃µž~4-¹²ì$?§R{—×ûñz}X­P¶VÞb>§¦àÅnŽ^¯-‘S72Ìj[$¡_$K©¾7=M/0ÐJ‘ìqM‰¾¥Ö®Ô¿mÖí/± àƒO²Õ–+œ¹}bèElwj·Íz^߃’Ï$+K뻯âZ˜ƒb=¨9mé{ا…îÃ`3œã`J”ý§Z° Ãlà}­Þ‹Ï1c†¸a뎲±0LûQ[—Öæ¤Þ‹M¾^ßã›J1æ`M›¨ùêJf"|)ቿ*Ñå5¼YæJ¤Û(SÐNá[Þ—V§ õ„Û vDUIé|wå(~G· lʦg TfÔ -·®l„ö¼~PH±¡’kÐÏ#j"¹g¸f×»[Z³#jé4]ÅpÚl õWÿxgê<ºšQg?6•³g•»ï -È·ÿ¹lMÏè¢ZÌ»ÙÖI¸_‚{ó˜KºQnKží¦£ŽmÐ`p",dƒU÷ók1t§5Ê€y7ÏÓ °ù¥É)b¢}=“s¡KMHfüû[ê‹×뇘n4£,óv“¿½ÉxO²^“Œ[ÛuQ­O#…ÉL¡[$Qâ‚ ª÷1hWwŠ‘]‰bá6‹M+_iÇßB°d¨co©Êú?Xk[±f”ìèØÝãÒƒ…ÁÖ¶å¦g߀?tí±›n‹Ä,í~ïkœßµ¹²õXØÛÛÑk?ÐŒaKdÀ ¡ìÚ¦uƵÒ1œAŸ3[* -²›-}øß~yq’'M)bÄhkäÞµ'ì,Û†åR±Ò“ 8ÜæåZñ ‰Z)vM=[^–€ lpjÛ¸B 7å€pÅqÓé:É0]M­TÑÚ¤/™¬§Ñ뵆ŘùÕiéUúl5ŽZ(èJ,*€²Ù £|@RØÈPØ.X/L©ÿK­ž$ ï¤]}'\ P¼ˆÖQ2ÆÅK;ub½NLq5¦œ(Ò¡êº Kø0÷d…ŽÒôh—²é7`Ž»Æv!ÁÛÇo‹ÆëƒÐÎÿõE¦*Ù0‰áN =Êa–¨ÐíÎnc»höEt«Y‘·rÑÓm ²:¨¢¹X3í®rÜ«[´+Ofß-«"œ !D40Jû(f¿Ì±ˆtº…y¿TG¼ÚH§ZDs(˜ÕüàØ£ûNßUðœu{‰T„@§iöDÊõ^ãÛ¹»ó§ä6éwEïð£åW«ÍÆ<ä°ænn%j -0;¡$ÒÊÁ˜Í $È÷]Õ†¥s̪XˆD±ê'³rÚcý`B;$¢å¬äÓÐËÂtqº<앪–òZì'›ž{3¯#ø¼þ'¥"ÚÙÚçu¬&înkÚŸ¢aÞÁäb÷g勉ZZ'Q€U²ywLÀReVÆ”–n)ÿÕ®™ìj¯~OeZñ‹xÊpwتUˆŽbå78ñâ}^?äV‡‡õ]¯p¢Vã(ÃÇ$;‘Ôí6…Q4r¹Cª™^g·J–µº ¼ªÌ…ŒÖèGNHéÇëÄ^ÉP³•U²‡FÞŒ#í^ÅzØeÏëßG¶’ªiÚêtMïcœU½‘P»Ã»üõÅZgôß»ÙéÊ{a–:¢[¶šó+ ­PBñTdR¡î:OÞ;èMv1ª“±ÃÏŸN ÛM[ïaïóçõa×!Xò¥cí¸²îp«¦Õšàë&Ñ¥ø`lMóãý“ •;ȨÒLÝÖQу¶4z4EV?ì‘<è’Úžï=¬ýx½~Nõ0/í›ÝOÜÞ"¸aÐX^½*˜qÖÌ»“‘I›éÊzKÛâ@…~tE½¾º -]¿ÙHÍ]r¹ š ó"§áèjKW‚C¬o½:üäçõƒœiCu£"S~Úu ü³2:Ej -«µ…A‹„†s¤ -Š}_Ý/Ò°ZèøûÐ…ôbñjm† ÝhßÜXù¬Ìt\ÊÌ=ªóx½¾Gµ(3±ÀÙè·¢Í>Ã]ºnÒ^çبy…ºmøí5 X$Ñ&A{(²ý®…V“:[š¢¥ ˆujÎŽ±ò‘½µP]pµm¾ÕÈš;‡s±tÔ%Ë]RȈGãu™LFêyýp}¼\Kº/¶i;·Ú†U.h\@zZì|à!±Õ¶KÎÔD–³%ìØNçï’Tm¤°ÅÙê¤4Å@£ m sF_â9Ç~oØÞ€çõ#*D\Ö·Â[çn9FšÙ™c93µ§­®ñžÉªÚ..8¦r”Jº»V¬™±&{nÀƒÉ¤Ong^Ôàyýë!Sìia“Tñ‚¿ Êg¥žtÕ´­1ÝnÆÒWnlSº´ä2W?þpÒæ3þ¾þ}ÏoÌp¦Š— «ò¼8¼F9 Ò±¼"GAkûí•8ìw5š%F‚€ÞÀ]ÚaVÑ• -W楚–Œ}Ò­Æ}7Žñ¼þª -Ä;°¸— ú:¹3¼ÕÆñ9¨+Þ¬æaM»;ZD8±œÝm$´"T;¸Ÿ_b/2 Ä–ò¹yìþ]Ó¢nßâõüí QUn\ƾªé3[2ôˆ_bLÀ÷6šärŒ™X÷ü±Tz {I»g,Sã/œiGÒ¤G9©`‹ÏLÝ¿ogfAÀΘ4ųóéêSÜnã}5=`tßW{D¤Ñ·‘Ö Ë{òֲǽ Q·Ãðm<~À»m–m£LËÎ3ì‘>u–|ÅJÚÚ(!bØ2«ú~÷ËqQÁø~AÞz~â&+ùDî½ÈÕÞhaÉ/"M¤¾è‚a|‡ï ¡ÖÄhŒ”•ˆÎnªÅЙwãÌ€ZsLÛo:d+cFäÈSvº×Ú»Ýc³þhnNVµÛ¾êqÑIJq£8¨ÛJ”Ó{GÚÄž'×ÏVµˆ7!µºa¿MÔð`?¯L¤ŒÉDÛ¼ßWK© ÍÙÔôÃJDšÞ†Ö¯‹fÒßö)ŠW>GeÕ”ád­àÛ UÏä2—”¿n -.çeUNg¹ÞÈzt[²ÑxWuªÿý†¿S¯×?Z«&‡@Ç„Î_~ì¶\Š„לêSQÇýª¬v¾—Ò^ðÀh^T{&MïÆ6ͦþf†ï܃§Å[ÖK‚ߎ[™;ÜAHÏ´~çm9&àJ_èõþ.ÁÑ™F»7Ü|ä\@°Õ4ŠýgÙë¶B–µÀ§þÄ34•p©æF+àïç×4"\z:ìpèŽN!y>¤ÕS\KX…µuWÿÐ#”nû§_ôpr½;CÇÿÕÞì&Û£éÍ.¥2tqœ"UpÕóêÎ8©Zd‚C3»Å¶lãzvCYóžèhèg—¡˜ˆÔÁ¢ÀP衤î„éUç7¨‘a1æKo5¯šâ)=`J:7í„ݪ±þ¤Ljï¥|¤Aà“éÔU{BÙʦ4ðOwY;ì5Œ“û‘¤HãÖÔJ ;ïmÔ–ßà­Š\ß¿‹’ìÚ:üUÊ`AES¤õsfbíuMí!îXt"£/¾ÐTÙFvÝ)dx¾;¤Ý TbYJau -t-p{¹ÓyBpS&Í7-–nb`[3q „ïdá&Æ7ôž)È"€·Æ¯}æk3Aíþa:šç¢ôîúÛ¯1$Cš‘’@KäÀÀŠpƒ@¸²¬kß±ÐIJö2–j|ÊÝüåå-YŽQ¿WQÈ> ÚÑeÏú'n€YâÓFeý§I ¢Ð±ô»9èþØ~EÀk«U½½µf½yd‹o®sz-nS|ͦz,Ç„eO¦ãNèÙv&Zø²Wé^Áy&¥Cð°4wƒ~áåÑ[Qíc¿Lª«Þ—´h5Ž¾T=jÒiÿ-ì–©}æ[ùŠM·¨²™Ê™Iqs]™ Ó}Ó@ÚÐ …rìkÙBKÜΤ AÛiýŽaÓdo~]_.Ã0”Ÿò3Öô˜zX÷7YÍ'!ËcóÚöÒŸa¯Ëb°Ò§^Ó¹ úZ\þü –@Y•Ö`ñ*+Ýyš#q9ѧäÝhXpÂPäÛýXÙ0ƒIòìîMez373ªTÄR“üÔša9{Æ='‚pº;Ì=ÍÜ72|Ö"¨…@µ€Ë;+‘\zç³’bÍc¤í§=(Þg¡Îῳ—ÍS.ÙrîâÄpi‰8éšÊÉckUòÁÖëu¹£»¬Ý†ÍîµlÃÀ‚:QÓîÁ_ƒ1ö¯Ãÿú:¡ïÁs¾ìaƧwŒtòÑ®wŒ°bŒm°w¦Yw˜†V£¥Ù î d=ý™81CÇÿÍC£¦•‚¿À§\¹ñ©àR님B{™2ÁUgE¦|@bQ/ûg -4wˆª*~Îß*|ÙHle0 Ë¥üö¨nÎI•”všM‡f]3À¿Å/SµÜäcÛ–àŸàG>ã\ªïë¶Ã+¼íør¿m‰Œ-¯ƒÜ«º¾"_,ö°—& GIÓ:мÓ(ŒNü$d Jó‚vwyÏÈ«”ó»-kaÖ$ì.Z÷¦Ã.þŸ(·®lŸé‹m/cØH4¯H]¯¬k7›jO£Zxef^Ùh {Æ‚•¯’;»QSXnF[þï1¢wÐ;"?|RPçë*¤°?Úx/d·‡¦Ï²0¤,ióêçjI»@º™ND vÔÍd­•†)éÒh6*Sú²£ëX„†u&zÑiÝ#ÞUYxlKUú«ÑïÇúÈd1R­tû}l÷?¿²¸_c£6ˆË´ïB†<çݧÏéz¹¤Õ+™™`„7(tõì”È üÙ‡:ÁíÛS÷¶m¬N^ø …`jL€Ã£6į{ 9ìiU×ðiBÀ`•ÎXˆ»îÛPw>d '’ \ÖuäÇXåOõW=·Óßâk³ú÷µ˜~¢H-jª3Þk¡‚rìó„¥0ºkŠ¿™ßÞ„¥Õd ÀwÏÁd6jÍÞO;3µä÷_ØyvsDX[ -Á›Ä×Ú÷±7ꎵ_‹ÞTü¬ä¶ý t¢-{̹m0vÌ£—· Ä~ì™ÃÄ…ÓYfºQßrPÛ®uÿ«Mî‘ÕªÜý§ZT>QEÉêäwŽá0iLQZ>á¹éöNc»ˆ˜ñÖádòK:Vp²LÛ|A[‘:õãǪ7NˆøwþÿݤjÃV½¡ÌIS¿|¡Ù3°Oc³ r!ŒŒ}g@ªv†9®ø«^Årñj7' p…W?јöy»˜Œj˜>Çu«ï¼ŸF¿ž´ûÿ¦¾zåi §“žÿ>ß "0^ÝVBó•ˆ8HVß` [TÜLbxÙy[?\Sµkþù÷Ç¢{žqâ–èMkÿ,ZÝÍW±jëÎWQLÏ+ë5—j§ “33ü㸧 GBd>TÀ¡9ÒØŸB »Ð¬d8Ä4™Ù#©TþtEZÑN`hËþhýoè£g{`þy/ÖK­Hp0DVTXÌÔ:¶yÒ™­üÖ™•~N¡v£ÎQ Y>5 ´Ðv(xÃ$Û …ÞmÃr<>ˆkùO:?o¾ÁÚ–…}@{Ógš8ÂRÃÀò1ÐoÂaÕ÷Óô†$s]õljëJz è+GšFÙàƒ Õ+ºü5»8^õ´k~ôí¡•ØJˆÇrºRóúQé-ïZLé>ÆVñ#i9QÜŽ`¦ƒE_]ܹëxw‡ý9ç€ýȪ/bÐv.ÊQŸŒË™¤Fm˜ -ߺÕnÊึ§çveL¿¯ûæŽbŠ‘¹>5·;xÒ]« D¡%ÆL·îNœLi6õ´TVçd„E·ì4lˆyÑöuÚìhîïÑôTÑqmªäJº,ÊÖ÷fº÷IÓb\œ‚s‡é²6RoÛGrJ‚§s·Žql€ —F±0ñ4Lå󞦕CpÍŽœ¤qSH7 -bYˆž´c¨ãóLãžü‡Z=1o<Æįù`4;HLf‹Ö3À½ë|¬ñëÏïþW,/{À¨NkÝð×Ülç[3SK_}„7õì¬GgLGz8Ô7ÇÜÙñm½8ÇeÖ¸Ê.IÚßÓ>¢µ ûq.P%ñNï)xÍÙÓŽË_4'Œ#éåM¾ÔìN  ÏŸJVÍ—¨ü=L¥<& {“Çñ€GL£˜”¢uUošGVV-2þ¹Žn½o*=D¬?ëɳ:Üm;¼ÙÒ‘\}oà/VµìÈb¸`ak¥zqÕ¾Ê -ð -Iê¢Ý+ø.¤`CQ6ž¼Ø½†3!¸qi—Å -ðZÖa¹V ¢zÑWY¤´€±^ÓÚS#„É‹$FåAz4n†Q gŽXŒû`Áã!.Ë×+ÓaŒëï«p’ÆZ¬z’+…m­ËNò1ÌÁÜê.° ókÏÒ¨ÄrOÎdŸÓ®†j®Mi`Kð!°´ôrÝ/˜Ë\åœËîÞØl™ãFz im#R¬{¿Ê2ì¼f§#_ëÐwÑÀ:ž -Ì^õ é ò:ÓmÝþb–ÑÄO[ë–J7W§V2«©i9œNgUˆÁb=f'ÂukV k eÅk§Ðœ›VÁ²êÝüܼ6Aƒaˆ`H°$àÇûìgCNJ¿œ¡ ÕôÑ9¥½`Lôa'¼+$2 »šÚ(©ªÀ2a_HûÍíW˜ÀšF±|i˜ÊîbUÐà´¥†)Âz@α•nÉ -~!ÜgZ°×'Ž­ë}_éuëòïþž6ØlV€‚5þ >ŽÚ¨¯¦g†yº’؆nÉU.4Ê> f’)½Úémß´Ð. ì?dœX<͆]…äL´ä´”bÛ&¬ÒÙ¾ -Ÿ×<‰Y°“Hòx& ÄdD&W£&´öß”›‹_vÂ3 Ú6äا»5±Ðüi×» +-Ìü`¨uGD½`w÷ÆOÌKEj¬]»EÁñrZê@‡YšåMôŽ5Â0JþóÄ­G2óSý1ÑÌÍ„j6¨ñÚ^h׌Ȗž}ckMé ïA5<®ÈšXЧówÞE-s ó^íŸx@i¬H³d/ "ítå‹ÓØìg‚ÒiûÈDö!­•­oÇ7ösŸ¢/±"É­“8éí?¨Åû«66%?Ð - ->Z»¦Ë)¾!+‹‚Z{:C ù[IÚdžáRVqo…þ˜+<ìeÑaY*„Ád¼j¢2 Ž,o1"¼'»Úß=–ÏÆI¢ú,Á§„>WE÷Úë+EéþéݪmLYî%ó%œR·Ý‚ý*á¨%¿z&NÏ,XFÂúÜIºWSXÛ΃lòÝ]u,K:¨¡uüC¬¦Ùw^Qû¡ÇÄ«üoYïÌP–+Ö­tvgeò¢fyš%;¼¼ÿcåiÂ!γ‰yil"ìª$TF >9#Õ¶¯ïçÀP¬3’â漆™úhÚߨþý–`rÒ -GŠÀÀ¿!7`XšSm)ýµRÙQ3 h²ncð`.–*¶Nš[P඲^QtÓžæ:ÌW%Œ7âá뢙¾û£žÿ߈e1¥mè1B÷•:h ¥Ý š.‡]·P·q^)‘ħ:wÆáá±ñ’Å^2vØ?ÊFÓ—ŠEW`ü:š©£™:Ú7Eö†žüò²É’ÜÆðÞ§è Ø ’87³˜ Ìí?Ê*)³<ïõFêR*Iˆø¢(‘ AXwú“UzçWr¤íIÍ—ˆ¡í°>&Kò×6À¥L%qiƒ°Ü9^ü»Z¯$*(zJÎ)ºBˆ²{õ¡w±×5 Xx>g%—T@-ž? ñ¿ã -ùZn²ÔÑýh»~ë‹/|H{(Ä>–Åå ––»du3V[ºmTZºCXÅ$é£)~m)ÌË·š´¬. ü4®MKtŒEÇÉߺ¬<†h>æ ©KÉc×mMqx\ð&4M?R¹5xÓ<©•l¡DÕU‚bñƒ`,·A!Tò³d+”dÑÄ:—ÿÈy²TÓPRø©y5Ñ},!Àn¼9Þÿ>ä«Ñ®>Ýúo(§óüú‡åÈ,giãQlb¾ñ¡,Šq¾Î¦qâXñn윤|jØûò‚Øõ3µáÜr ñÇHs±€g¶}f7ºÚ!¨kNïæÈß.3朰†§ûΣÌqî½[êUÏ e.ˆçmå>Ú·gC»6‡ž^4ǛҔrÜLõRÉ5Jƒ£Œ€B¦WCáàlc#Õs ~x|±Lè<‡(GvÞN‚en:;çÊ@|ø♵M¸J$QòUº‰Hºì˜{Ûû;;r$e˜êø“ëõåPŠÑY4ëÙšž4ë²–w-o - \2?þ@!{D…ñ¢õœ¿2´ŽacíKŽÉÉ "¿ÇwKwç‘áócЉðvÖ’-Iw29R>ñŸ*"áêw¢×B«Q qè/‚×½V)z5G®îvóXNÉÏÙW„ÑCMæ{M(‡1žµs駗×ÅERm/ÑÙ£'aàĪ·»ìŽS»ÔshôH?´Œ£îPrâI¡eŒåF>Q†Â´éX5ßâ" ãþd‘ Ö -sLå\èTïIf°3ñÇ]R§y<–Oœ%NŸÿ•$e -¢F;;ck]Ú¹xÓ„øͨÊá®{-•ì4Nö¡X“ÅêbibúO%Û“¡&àQýÞ¦K›,@]¿j«n†DÑV¸=$cƒNeL›(ÌZrØô7hJDZæÍÂç’]vEÊ%Õá$]yXHnû¹·,œ‹¶Ç÷•š-‰íÒ[ W‹äèhªˆ·Ý”Ã(Ç/pÌ°2‡R |r©á‡3æyM‚7Vî–ÈIóŸ×ÈÉ+¾¯ï…´Ððú ý¾þ>ºeI`Êå]É0Vé8Ê°,Üoeb+®~rñ/‡µ¡L0C¡\ø -¢îöòÝW0Ø’Ë”Þ8ó- Ìýë~ý”0d«­(‹7Bš~1˜ô~^{ÃrBEå?rñžæéK£…5½_¥æ íá×ömº”ltšs¾ƒB·ü,SG )¦î¢M+˜ºófÀ?ÕÍ ZuíÅÒ”Rå“îg•‘Y÷J-E¹ïë‡enA@£žŽu¸"¥/»¶8EG¶-s_ØÐ „EŸíÚB¢£ê Â3OÝadŸÛº’ -§´m?V‚ÕtIY<>H³Nj ÞNû^ͧWÂMÈŽ3RªÕFÙŸîÈw¿¾—`V’Dò˜ûÞ,mÊfƒÚJý0§ ÄãÚáõŽ¥ã|e¶#B>GIq8´Ù]žù…1¶ï„ŒÃ y•«ñżS½Ñ¸D£ÎÒEýN<‰"º#†È³Ï:ô_/ìýcd²CïVˆ€­»ðF¬­Ã/§ÔêÇwe#2g­0DVâ`žû¾²õKÝ9JïK z4¸z¾ÇE9ìvé|P¨±ÀsG†X¾è úBFÔ2çò÷ê‰t!(‹¦âÄGsŒ™ME”hxuËÇZ†È4¤ÚÖ“5%Ú!‹¡h%}“ï. /üCwQTѨԴŸÎjQ‰Öä'%#r:®GN'õ±­µ.K -‚Äh‚ïþÝ .‘\S‡€Tý„â×ô:‡Ò¶C‡C#_ø´:麥Z®nçá\ÏÍlÙKa‰€Ü5Ð]ôEa¿Uû¤)´5è%Vü 6¢KK²¶ƒSZ ¥ä¡²ÑÐBELY²uà”–ÅåÍœ.¯Ê{-í˜D‡¹‚„mvT(.ô¤iŠß:ƒ#<ê(TŒè~?‚Ùe½½—GLÞ³–bz×%±õè1x ‡îrªàu=ªFݧœFzz¦•ÃËpe#äŠNX&>×â™ëRñèûúá¼ð²…Ò΃àfîGJ6¬lªË@çè(|^ÂçÖRõN@%“ð NÑSXцÑU–_NœË‡% í4ä:\Ñ‘óŠä–­;¦|›EÇ0~üý¡N2ÜE½‡?«a|¤™!#üº~ U±1£YbÍNÙ{;<š Iþa–¨”–e™ µÉ€™DI£‹ôpøX$±  ~¡Iã¤â–ÔyÙÃ…åCÔ‚Æ9WsΟ6Ïæ\y?áÔÿ~]ß··ÙÛèž¹¨\§?ÃZ·\¸»š;ÖœŽlϦ™NAàÁ‰t߆¶RFG“ovê7¡C€w¬v$Au‹MÙu7e²z¾õ¯öõu}ß,g - ‘DÙöÓ{Ê2ýš¿:ÛüRžÑ¸¸8L{ì'úvz]>)«Ý®ýQ~µ¿ºñfC+ᡪ¡)§u/”úˆ–ÏÃë*Ü÷õ}{‰J34@v¸¦çá-óo“…ù •ˆÂ1…zjÜ;3üÈä"ƨC&Ù!¯;§\†Õbº 3¸é{û‡ñ2Én4Çð>Oá ¨ Îç©m^ o_ñ¨²$g÷ëlJÿ@@4½èÆÓËVõ°áÕÍwÜcðÚ9ëÄ÷çÿ?vÞœLâwî¡S‹Â턘Ù9¼ öá$lKžUCZ9|ºÞ,’èGr™ž¶ø¿\9ʧ+#-±_{M…›_w¬(ߟlYÀÔ•úr¼Š6æyšë–gœh1þ€#6²U— ã,íTüÙh®ÆKVJ$;퓽§â­¥T¤;eó¢ØÕ?*òñ%«ˆ¿ÕNÄ£Ú]cÚæ?*Ä®’l}Ë–¦Sëß¿¤ Í’‘㔊'¸\mS½s×½vm¦Žb;¬%‰߶4q:E@6Lž¾0ÉEót5TÎòèiMd kǵ±u½|ÛÔòÅd/Í.Ÿ¥žC½<¹õntè¤Â@@ÏùûzîUɺhk=©á¤5ú–gëôÌA`]¬g²ôt Щáö˜XšÄ±_ᘃS.g9jW ²' x칦Ù÷X,ð•ÕÍQ=ؽ?«é¬%a KKLŸÎÈ/ÓÀÉ<Ê - ULìÉVn€v]ð(ú&ÆöƒrÆ„RŽ^Ó1ìU³aQßÄêFÖi#ÔÌ‘eܶ%ÌÅLjÁÞ–hsèvöË”ÇëG@)… °lpŸköQÛ„r™8Ï“¤¤d -[Lò|7;ÝŠúýùƒMµ æW©—p(Ðt¿²¦9›Ú °Fí8FºR)Zƒ1Ú9ΘßÙ‹3f¹¥¾H;8bVÙ8UÙ–&¯UNJÕ°mh;-©9l;EŒô|h ¿=Ã’ÿ_Igƒÿg¿â³í¯|¨¥VV{þwã`SèGüÁ8HÿÿZM”›çôí¥\ĺ“9ÃÖ³¦û¼2‘'ÔÞ*xJŠçïÙèâ_±m%«÷¬†‡MáÂWGUky5[J:õxd¶&t%Ï‘Ëá2®ñº‚­NiðÊ*p¶w&·u:®ŸŸu„ m» ½¦|‹`‹ª«¿È<¿:ÆÀÓוNbÊš;âõ¾Í.àƒÛбtw9Ïnoô€p„Mìn{ͱnåŽè¦^þÇBE:‘Gb‚h·M/±Ý Ð»õV>äoÞ¥ÿÛað+ÁrsáÔK4/’ »Š®Æè0Ý°ÿ×¼“ùºwþû—¢^.mXø…rÙóJJ¢Æ¥+?]Ï(‘X¥qÁøÈ‘¤þˆUºGOdƵÏU€'®ë\Iž•2=²«úѲ¢¿»§{Oâ ÛÕãWt<Ø{·'NÃ_o¯7ý]Çëe:¾b¯D8Ë1Í‚ØÉwÕµí+aO^ª]ÁýÔôÚg±u -=¼•çµ[²A8`Dœ\J}SK+¥"„¶ZÓ[x…ðœLT§-×ÿLSk5\‹ÂôRÝÛr\·»’[Š°ïñ[W×o·Ì T«÷~$RÿüøÑym‚GôÐôöÑÞ:ßdü¢Vp%ωkT"f™"‹ \OÞ%ôvGv¯ü»Lã˜gê™Ì¿&p/ԧ㯶éRBu¯Ôu©cŠiµšbí·#…94y:~xÁÿÁ a¨ÉŽÀ(K­¦”·¡bÆšîÚöoÚ­†9{‹ÕÜcÚÿÃþëHéIƒâ`’é×› Q±ÊÕUéa:Y§‡©9|ö€ ¡?:QMëÅ>™“Ù¡vª6»·¼×õñ¼¯ýË´ ,]Ýjõ•˜5Y >Ýö›å°¢2ޡ㛚‡Vœ`§ËmšY—̈xºmrAEÁ8y¶Ö8ò6s¥að`´}KŒ,˜¨Fs ;kU7–Ò3‡³·×}ü|0™Á3G¶ØNýlô÷çgY¶@†UÕSÀß2cqÕÉEf®l4äWJfÈqµ”ÛãSŠÉ±mX½žžH[6ˉgÛÖç™ -¨¡“çÒ=OÖÄs«†9VäV#³þ’½„÷>ÎÕ÷UL3û$½•iÂj×(6feXõ/sÂwÛ5öït£Ü¬¨Û4•‘‘Ë‘Fì?fG;Ãý¾ö¢g5äÃ…à‹Ãz UÄåäÑ}ó>^ž™~YÈÒ-‚}>ꢖƒi½ M%'Öi‹¾héëËðÏÝÒ#óG$ÚVd–'ô,)µõNÒ ÷â|£ՂŽíŒkܵ&2VShëBxd›:”–kÿ{µÚß1äù;¢e»ËÐ~;Ãpî¶Uè.D¸=:+àEžZíÓ.17^Ô/}±Æ šì…<¹Ý­.Ö¯™e†é·€8í±WßIJÊt«»ÇSŽ«cé< 0T eXÔƒ,Ò!L¦Â2à !ØD)ƒþÚ -K‘¤“dg(ǶzÕD§MG3+—B8P3¶Uq:æM“Èòô†‡Ÿˆv@.M}ÚX¸Š´2QˆR©NÕÞ&v¯áÏÿ 5ÏÏ®]Ö´g9s!ôùœ~í¸ûñ¥@>N É:luÆ© êt‰îOÓ2>W™6§­´9Ãö>]9sÆBóØ8rº•ôJ^uY鈛ªŒN0s_ÜC-Êx6iKêÖn0Ê(ˆjÒÜn¿GÀ + OÐh{t -‘ƒçl.áäwïÑ¿^?(Šö r1sÎ7÷YÓ›Ô*°h…þ¤æZŽÜô¸ù-i‘Ìñ«ÿWÉ›™—ñFŠ£iBÚ1ŒV»gñgs;°\ˆí™u(dØBA±µ˜&&ÅbÃgº¢Ú Vý3S¼/º a–½ã{j¢£Ëâ÷´è¯¢cËþýùY¹£ù©Ñ éúÌŠõB–¨ØLr™¥Ž;cÉsÅU]€SUÅÿù…ÊŠ ….REF«†_çC’šJ -9˜E‰»<ö”âÏ”…ᔡïÃȲ6š"ÒmýUìîûzµtÍî+[Ʋc¶„¨ˆc*­ž/ç +!^ÒÔ‹†ÉboŸV1Úøzýü¨ã,¦T©OU X½ÕÑs ü¥TÕÿί'hºs×À†¨ê¼v7 ²jCÍtCW_ÞKuDÊ° š½™,PøeØkGBñØ™+uU†Xà,û_×±Úb@/,ÒÁtvs}¦¿Œ÷ùÄU½~~Vgi‹ô¾åeWØ- SÃðpðvœ@ýk^×7v»Ò8•ƒék6ÌËÑb&FâåÌ€ƒ”ý>mrË3†%Çán—t ¶Ancoàxœþþ%Tý´+bÝ<l#ä`ËŠ:h%ƒŠŠ½}ÃacržZDôµI`õøÓ$dGÄ ‰®\©!/*–x1ÎyE#'ŸR2åÙµK¨´G~«èeU™ p$4ùÉ;`7Ä”¨µN+[;íèÏó™Ëð)ºÿ8 BXÚ *·¾†)`:-Tƒá¥ÿrñ_¯ŸŸý?r´]S 7*[\ãUÜÆÉœ+­¶×‚¯Ž¾csÄ"±8ðqkYŸSWÎᦱ¿ è Ã"\Ò«hiž•Ôæ²ÖÜÇm}KÇÌus-9·í'6«V“~€«H]‰¨n×?Kò!ë¼³°×/ŸŸõÙØ‚¦êsbì7뿇•qT[TÁè •VMGZðþ¸Š>m3­ókžef‹ë¥5<ÃÄËûðn¾i˜íã·Y1¢=£¹§ôˆyý½ -{Ë&;H,ynE/b¤õ  £]qÌÁc`î Leú ýƒ‹(æÅhW³2,µ"…»›žuùB«}8L‘’âh³)Ca”esH;^I²­RDRî–ÚPÂTƒy–N{?H6Â, K);öl¦G4Õ@<¥h†ý‰Ï3.Xw‡Ž¡ûddkmì1D:€–°Þ£c¿=ôÃU••nåR‚~žUi뜱»yÛ½z(àL—FE¡ãÆË&½‘¢ûœBÐ|$Áßód›û¯S¯@Ù²œE6ži©ÕM‚@Õ+GºgToÀÍpÕðMp$°[Ë#3¹Y¼\&Wc¾rA˜cúI¬›¯DÏ4s¤éÛN›¡œ¨ -ƒ—I€ˆI‡NeøÂlW œ|ñI3o?¥Í~ÿû9TZ-%7â} Aó²/ ·×dÜÓ×fgm¤¿g¤ €¦Åh8ÓÈT37ƒ4'CIUOÓ©†–U˜½ÝWÕO®·¹ýýXÿª/f‡Ù{í~"ÁÞ‹…øjÙ䶾Ԛ£ÝçÚn˜éÙzÉô¥%äºìÐ7I¥ßYí‹÷ã5©¼Ž•!@»©Ÿ‘(–Uqä^É«…«Ó²ǃ Aƒg¹²Ÿ:|/†º=;ùÜc~!y=È4 3³íxÙó5úôéä¥ cSÓ§ø,$ÓjPÈŪ"óÃátRs»MQ…‘D38—ø ðx› 3Š,²©gYÆN¡:ÜÔbë:†¥… 8º°;z êté©@ð¬ØÁ4K¶i¥3}þVL\ú;#ßú%~¾þ~öKGÖr¿(¸½u.ÐôÏè(¶º#íå3yã-ýý\n‘H@jŶöµ·j],×®‰'Mó5qªt¬œ4#@Ë!TÇDš3ßp @H?O†)²Åʶ/ ïy˜Å¥©æ-J¶)Ö;¯öóxÿû¹+x® -ð4sÐù1´µg#ËŒT+¹-¯D'æXV.¹ym,²—5mÎwkÝÝ®ÿá¬ÝnhM¥ wâWã¼Á”*l' -ºùgWXÿÞ¶Mº Ôp»Kz½^ýÿÐæ£e¾ˆ†"‰(9°>Úñ¼éõ ¢™®õIΫꢶÒ·™‡Ô X]Ǭ®ä]¿_WÃa'˜Ú% £Mg$s–¶Þ3<­oÅç?ܤngƒ_aWv;pÉæиIwAQ„þâ;äYR9Ù1—ÔÄu†îÅCf©qþþþkØž³Ÿuü¶rËd[{‡oqµf¦l1Öºå[¸*²<:eòÀˆíIwèGOö3³égY“°–ć¢ÞH«–Y7K‘q@)7c·¹©4?êX‚Ñ(Í ûw‡PDQM[^¼ò쎈ð,Ù¢ÍIŠ§¯vXbwMZ2ñöºÙóÂmÇq§±]wÐú+¤”Ùtü[îd]¸k Á„&ΆñÌ800^?P=7½~7KZy#%^àÇ•EÉ[jVv-âÈN© Îrd¨üwºÃS7mˆ$Sóòáevûùt?äTîõóÑ¿”,Ž$¿Ê>ìô:{XïçD?¶õÚ«•ô3’ŽNúnÀWäàin7©s”Ðô6âî´µ5;¾}3á!fl¼3N^«¶‡Ö¬öÑIÇ=´ç}Áç–D -ET'}ou])²ìÃâšÿpæÙHåØÄŒ¨ ZÇwFÑ¢ÑiÒ«aT=–1^ ;ÂS¤¢âÄ2@¡W#¯ÕÉÍÇšWÝ3ûúnYh[óƒvz«™ô(¶…Òo¶jWÍNÛ±ZnB–\zyPçÙf;U{¤ž'«¡4ýnWúšñ]÷ÖYü<‚f΂a‹‰éy<3qËŒsMóss®­õÓ÷U÷¾?ŽM}«µka½1xñŽµÍ–ÊÛk™gY0­A%åu:µ)£ºKj×Ñ=D ,YßÈf}óÙ4ÂM -¤JÆ¥~K¬j-­C¥Û&à6­ÐJ9Õ¼94ãºiR»€”‚ÎHÖ™q«žxüeSí#ÈŸVø5lA϶m!ô8Fë­ÉÑ•ð -t¥7}³%K°A>€óu9Ôƒøªùæý.¹ç¯¡? ¸ *š$²^~ÔÚæ­liSú€ dÝCLíïÃY°M/ì–ê¦éŒë!,AO% ð.ýŠ\©Ë³ç½ýhîjO|‡°Düm¢Öá¢`e^Øï°’KÃ×ü(¿åÍC²sú]¸ÆiVÛW[ý7·Çˆ舖6=×-ŠNµëY`X~ÉmKÖ3ôïbø³&am_ǵ"ãyÇ.îŠgEÚk™Ji)mWºrÊé8ÐEïrdQ;¢Ö Oíi+âÜÇ<1 ÕAñGÆ  Vb©³túK׃¼‡G…*>&!Ð ¨¡pÏ̹%¬Œ”aMºïåʶ†¦ ºéä´ ¢ƒ³3Ïh;6,s4F§kÎM?‹Yk|o¦Äîüh—LÙ®|Twy¡;$´ª€N¦™Q¶”@Ýx<ˆªR@×|©! -ÿ—O·ê2)Y™ƒ¥Ð<‚D–±iz­VGÕ[fÍë,èb×súðSíÞýjf`°Åm%Î]·ÓìùÁÒvè˾Dj¤*Ž¹Åþ'5–æÖºtYI# °6ÔÑ‚ìÙ]óÉœ+× ¦y^¸©uR’ïfZ(4î‡k…º$4í;„OäE&W3li¾t{b%Æ.aäF$à'‡˜òÓ ‡}p8VŒßn8TÎRI“•½ ©`·ã¶5Ób³U”qÜuR 6 iˆÐ4 •æ÷y5ãP05}8$êmO›1±‹øYùAbiðHÕ”×®â’ÝÌ1ô‡-j˜÷\KÖÚ|;kà^ä¼ õtÐÝ :[õÚÌ»™‚–á¤Ö4å‘Ù¶§J¶æXÃT~EÉ 6Ù+§Xò¥øœ•´aS$1=Yº Œ+f=oÞy°ðÔp‡U†Ôé4Ô}Y¯&f®Ò¯\¬7cSG$©=2GÔTEÖÀSj:ë­YÔrÌZ‘q¢j H#ž“e6vBá;ÃLCž Ö®„æiB¥å°.Ü).rðÄë ጰlªø4¹S­Ý;ÔÄN½ñ7èä¹\ÆxÄ@}¡…ž0%õ0Ò6Ù:D'6̼Z‡À#²ïX‡™¢ªSþÑhU¿“CÕGš©™£ºú#ÿ^ÆßLEŽÂã–Ö.g̦X|33Ív4ø "¯ÅNÛœ^Øgè*’àU­t´«qèÌcÀŸ\‰9µ+^^’÷ˆ‹-&m#€H­›,Zýó:l#n¢H¹°ä­ ;•Þ>ókÔŒºÓó|©×  Ó›J`¦Ýˆ/>›Ø öÃí\3†,!vbÛd§ÚƒŽÏ猪ö%™Ñkãø K%êp* Ÿ(‹N®÷¯<Án§³žFù=2ýëqQw¸ïä„<·¤é1<8‰C/[ÞØü“X§ÞÀÛý‹ ìTÉxÐÕJ%•Iø°YÅïµüï_ì§úÞÆ?ÀAmX’è§ùØö˜%àWe×k½ÓpøRݬ¸îHã3Â\ €B¨®0ÿ­˜Ku•v¾.;aß\Ð)àèŒË>ªQ€9hÖ±¯±Röùí;uå,—ù¿Ó·¥¡I…ÏêÒ¶–á¢@…Ú¯CŸy„ûÃg¡EÜ^E®Ua:ýUå,eï¨ÇAy»ÄþÔ­JN9QÁ/¡>!LïÈvâ ‹ô„JæÓ¸8!•4@³_Nìè#4IžÉ xÓOʵMxÔØ3Ú¤>Ï}¿Ä²zqðÚ`bö}.Ê°m‰qäE°Ò÷±€qÔ)ˆž¦“¥p¨˜™ë¼o;ƒ³AÛMƒoµj¥MŽsŽí m~°.WäÄô¼œ‘m{9b‘·Ã¦œ4Ìé±×¨—2ª©22¤nAÇLZôO‡IÒ–k¥}î'=ÖbÀl±xbHâÆuníøó+¨ýzm»[§ 5îB¾ß6£®¥æ.’µnÇ ¦4fZÕ|]ÈŒÇvv¦§\ö¸ÅËpÐvKEê©šöæïT,­|žÐ ,„ª;ÊDh÷ÓÜãâx$¿5Dtôúu·F£ÑªQ±Ó­BQÀí˜ÄcÛEÎ4I/ËbÕ-#ªQ1‘Wý§ZÔu¹kk¢ÂÀïw \o÷¤êÂþ|©³I(!äø”G|÷V$ÊOÖßt#9ÑöDïCj}y -R]LûÚN2.ïêFï^ $'*ÖÕ·®6ÒÊø§®ŽEXÓä EQz¾ÏËôÛðTß9ttÊÖ•å_ ¹gÛ¥§OëÕÒ?1ò_rÄD¸ßî—.ð„HCg~´•KÚe~]™ÆÂâ¸vK›nµ[;y‚ëL0Â8zsoZFÐ0³Ì" 2YŒ9~Æfû.¿KÇ4âÓ–Ëö£›ä.zí¯àó¥e´Ð&§BŒš܈ףÔ‰–UkÕÀe< …PdŒ-!šÑÅ©š«¢Ò™îÛJ PrT¶Ò¥Že>°‰šûªâÚá0pùšƒ”£È*†uA*öÊú¹Î«èÂNv®ƒuØÏÐ 4a‘¬¾xx»ËXn˜ó»mï‡3üÙ¡%Ž‚O` -¶ª düíÜ;Òݨý1-•ylŽD6ó¹¡Èí …ð±ÆÏIO¥ÅЂ€P†)ÁÅ’†¶t¿¦P}x'åËFY”—'0’ö@}·¯œlf0%!²8m¹pÅ…s¡U¬QMÃTF‚½e-›Â/Ï2Îö"¬üû³Žø±U‘c]õ‘)E{ž¥ãÈ÷öt†¤\Üçf¨Æ1²ñ´Û -WÓ¸†K8s;4Ï7yϸ2O4ðÁUÇ¿)"2 -û¥´ºžnXTߌ “hYc44%öD\Qhg›| -FøÀF*¢†z¬MÎí=4ˆ2"Dzí¹ -dzÁãÈÙ’Eu-­f¨O@+ÂSV‡ËŠÁb©A+ÂF¤ÜÕqQåÔ@(„¾H©l1ìqø…„KûB‚t¯àUÀ¹,è#8}AÅÙ´¾k*Ùô4úòDíx 6ÚŸÖ.C›á{ËehXâí×HèÿäzIjÁª«#ij%+WzJ¹ó·K/Ô ŽzÓˆŸ®`hÃôT"«âD«wí²-ç½­æ)¬â­Úîãا—ºòsð­šIŠñâ¬jOÉBýB߃<8‡+Šž¤׃˜E»@Ë^ i7nAߺræW+Gé˜2J˜Ã© @©Nc0¼2̓ÂÉM î`Cl¨¦™Ùºµ++2±–nòh×ó×UÅÌpÛ¼0U¿·”Å–dµÂç83Õ5m=´Ñ­¥û¬dü¾M·&Ö -±"ÏOFèØËÇߌÐ÷!KJ7:¾<\ -×Ì“ÙàÇkŽ‹'¯Ö4)Öºm›¡*ÄF¹ú0û®M-¹á=w^6a±þûÊñ¶!Òïn -Š÷dÖ¾^ÿ~eáöÁI—¯2ž†«e‚ ýf êLIβË]iû–½ NCÜ?1÷Çma˜Kÿǯe;Bý²Òx‚±¶šJ†Ï`”Iºéa g=-ôpä>gpþø…øRL~ ­à„V²aµæì1Òq ŠÂõk˜ðizUPrÙiÃõHÛÖ0=Y`‡ŸˆÑÕǼ¼?Ñ´ 놕=3îóüvJ5‘]Þí°Ñ÷úó2¯é­>:u=[cDI|Ânºy -òó¡*‘ù -¢ÝÍê4eF°³é6´Hn;Î\B“³;É=½RÕî`J&ÐûævË[‚ ï]¡Zûé÷*öÌXüâ–þÌB‹’ A¸Ñg¼‰j¤ŽÖé)_udíL¸3¦Æ¦BÝ„úÔ¸1\à®Sê.“ó×ð—Q¯oŽéß2d¬ny^[±Bj}ê_wŒQ÷:ö¡µ!ûã†ö*åïùÜÝ_ÁŒK(@îgæe÷ebÞ#B"RÅ‘S¯¯‰ -æ5ÁñÜ|[¯#%¡f!ûE1÷Žê«ÈûÞì£;ìV‚ס)aƒJ¯}|wmc†w0¤¿L× ;ÿï~î/m'˜"jSM¡x¥ØüMÄ-°à:îýš —®1ý ïF ™0$ÓdFWóŒÛ%Âo¬í:€ oÂõTµ†¿ßä#y1 T–ãZ‚V®ZÍ÷¬4¹…”JA/¦}â^y+@ô"(Õn‡KK½/Ó‰m–zŸn|&†S#à©óïMs&.2Ž¨M¡­N«¯g‡X¶’4†f­ŠöÜÊÐתÙ)á°È6x\Ó–í^"Ÿ¾m_Ψw+˜îžáÕ…˜è_[È马Y•-Hõekœáì®' -™j±4¢Ž¸*/ims_uÕ¾ú0#¨]Mêà?õk¨zéêÈI`*pë½Ð„4(s\e®˜-ßÓ¹`ö -ÑÇPaŸ›*F㢪™ÿãæÞy#…ŸæÊn+¸²úÂAØÞEöÿdm ':„Êâá±í² k”¨:·a©@k³âÒ°ÜZI‹%„¼Î\èSÇj—¸Ù_a‚Œë?ºCÞ{.©¥=â݇>LµfvýftB§üM¡Õ†Õ¡hà[ÕJÀjT½Ñ¥í·RŽ^¬›¨Á¨y ¸E*‘Ws¢+{çÕ©Ûk« ²ì#g‘C¹ HœšËR#¿èõZ·™3Å€DÉz— Aû†°K1ýN{1-ȉÚí:鉃áUoQ†Z¿ש·’¯2Y ¯Zýfäm½–Ö¡º‡jˆ“æÙw1 ¦aKÿOy¹$9’ã@t_§è d ð{žÙöý×ãÏY#¥¦ÍfÝ)–B €ÿ¦ ó°úqá[ôäß0ÓB@ãêÿÖ=Mö¾ÙËÕ2OE¢Í”QFDËLg Sj6‡?¸<ÖCÒp*aÈfðÔENÕƃŠE"\Ê ;mmWeœJM1ëHEC+íõæø“Èϥʅ!` -V‰Ò´{Ö@˜“Ùæë8Õ4ˆð<Žî«p©N„Õvë¸A›/$ Ò0—¢wycQ&̺ìØÐŒ¬çúayë¹›ø“ÞJ÷·ÓK?¥áð4llk‰MŠûçlØìtÉ)˨¢äÅ å«ÕoÌí¾%n¶üPÃ4ËÁWΰØFOðŽwçºõ¦ËÜi¬·}˜š·ß°àrd<Õ5Á6ŸËãˆm–R_I@Gíµ qV£rWQ4}}Öu+§”BËDÓµ¼Ä¤ib@€£1ÌDZÃkËÐ]ÏÁ2šõ§ûéõ¨/cgñÒ_½DfÖ»¶¡?Íâ¤_[Oû7Ô À8j÷¡zh¹vm¶¨ö/½½›„ôÜvx}|Ïä]Ñ3|ÐëÙÄ1Ø“í@~G'U¬¡ñ£´(8÷u´RGfœšœKGsί$Þæ$ÙìÌwËËu0Æ̸¼ÀXÃ9ì‘î ËÅš4ÊVRNˆ0ÈezŽ_åÈò:WÞŽ<^]f2Eé¬XËéÁ*gŠ‚b%q«Ë~ÔH™ÃÖ¹‡ÍQ·®ë5vÌÙ½¿eJÏBºaÔ±$¾mY°™õöf;„‰c»÷LÓ,l¥¡HðOÙ -Bû :âÕùëVÇ;S~LB˜ÝeÖ,cÝúžn™¢0è)R/>Teßß®ir»ÌªÝž»^ùl©š±mâÄb8wB›¶cÞk•@ÛöQ%ë*:öyaIw¿ëtžU«mÛt­¡ÌGzÅê‡]¬•ÇþåöÇAà6-dòò¥.&Œ@˜FÀÙÃär40‚¹vzzchkV:rúÁÐ4$Kã…‰”+E Äô6"g4߀Ù!^Þ ‰ºömKºJòaH:/Ôž ‚ðñXöÒýƒç¶9Y׋°|Û‘ jfÏ~`eÖ­½ûÂJ=“8ía›Ý+8çjürj›ÃcðÈ>|–¨ÌGϬŠŠ'Ç)Ä&¬W<Ó’½4,½ÑÄÏ°qt¦’ÎêmY§€uÁì%éE™xOÏ2Ü™7^Þ棲ä#¼!°$ý¤­b÷Rc)ä÷°ãÔHhéªm— šèêÜeå'6gܺ¡æD“h6doíªÛZÚ©b2¶“ ¹ãÔ€wß¹ì;×?øNx¬àÍXî—6÷ -~êâá-×ÉÔî =6GäÝkÈçpß:ÿ–l@_påÕ£X,³ôµ›ã=įIä¨â J%ŒY+v­¸ò¿1‹gç—Ž*,JÃ<>‚ºÐ -a xxU6NsR§yyÃö¤«H©i8Jé·l,öïÓ·;ÄGD7Bú*Ï6ãTS»<Ãdg™ ·E—Í[ì2T+KkôñcÙ±ÖýÉŒg–ü¬]£§õÇÚÇèp}Í_ŒïaÂMjŽŒ­ž¶NªÇ¢šê=”úŒ)óé¡?ž½sítpòZiÙ»X0 4,e8‚¿]Ъ¹Yíö­shа©ú›y¹è9z8ÝJÃ0…aBKÓje¢é>¬ :û3Zõ— ÿ 2©Š.¶º”=Îk²B÷íéÍ°ŸÐôŽRÝø‹ôã„ØÞØUclá2)è @þÞë>vúø§ö Oí‹ò{m¡Oíi¦ºFÞd´+@5ÓβÞfœ5ûÓ&U€‘¶Ã—¶¼°¬f7·m,K£THFº›V] (4Ž>œÃÇá$¤øuðÍ”UÐmßDì®Ãj³Þ #I——Sé9|œg³ãºÜÑ (aL{‡èÿ·kW¾õhx_$lú<‡ŒÒÃ÷r P#e9G5À‰s¼pÞèŒÀÑáôï"eÕÖ²6»È#Ù¨B––߇Š62¨û¨o¨Ê.”Rj+L|4 _<Ô/‚+F¯(‰×2˜}-C„>0S`qŽ²ÕÛNRÇHé5ëû©ƒÂsPƒkYÄ÷ç0ËÁÒ¾ûÚ«hsì)Oí­¡…È!<(®?èÛ(¼{Õ³%*#élÖ ´_ooÝ&”yT®õÜyìŒT*€µ{A°{X",ȉæƒ;ü÷ägXv«ûs6ü§é¿û7›ª–“bz•odD§CG3‰I$’JÞ'v¬²UNÉWøZG­!_j«+Ž,dã¡Š |íãðºW\§“MÁR5?. £ñl¨zn(i3pJÛ'=Óž–Ôè„©Sj›naÜçwvá1†7]anvÚf2ϳŽUºÖÆôƒyÝŒ,¢‚ ¬R,v®Ò(G;Ò¡Uo<\^°[ înw†ÑY1©•mŒOŒšõ…‘gVMdhO™@*þ»ùhÑþ»Ç±•Õç‰hÌezÙ÷»ûôÙ8z–²"6”,•D)ƒ3Û¨Ø5=õíI‡\LÑüÉÝìH -êñ7XNbmš€9+æ ówûè¸ÎYüÃi&â[ÀÞzbcy ©â–ɧףYi³t:,@Q…ÔÜ#xy‚VÔ÷x¡ë|ˆvõ"èsñº„‘û,3? “·† ¤F-*/GöÂ.ò°N¦Î¾«ôxúŒ¨Ø‚ÀLJã¼’õX {òŠ ºÛúhçªÌÓÒö`wC áÐùé¦ü—<”ÍxÎcri:t0ÞÇÌsF‘ðͬÆiü™SDEžÌÏ2Ï„†R  ³¿úý'GM·ñbɧ‰\¥G0 O!²[s¶ çâˆ&MHLÐ’üßJtª½•Õ†G»T[YV8+2™VðQyÉ„?DÉË!ÖÑÞ•¥ôϪs_»÷ ¬rXÍTï™Øæ@CYÞ*Í#îøõJ¢‹€3ÖÅZÇ›rtÍCüöHÓ܉!ò ñ•¥¨v ¯ƒÍNÀ°â©÷mp !a0Ûˆ÷²{ÆôLü‚ÍÚ´Tgµ`”oijîò…ØÆ\Õ§=ì⳺µË5ç¸ÓœÑP×igYñ \ëN¯•šUËÿÿ~ðì4©h‡PÚÞ Œ°QeþBiC…×Ú­ÂÓÍc…2T¬Õë•î;tcA l\×Þhu"-š“ä°Æ¢U©K3mK Žë7!hZ´Ó\üU!N_!y@ë/¾°÷96„Ùk5ÔwŠ3{•O!½°n¿ø3O1þìÇÑfÖôw?zŒëôu~ÀZR/È -Ÿ½’Ï*Θ§Ú¹ÒŽDZ4kÐ*0s6!^S1©îÍiúš…¥>ÿœòAU4¼ãb~Þü‘ö,æ~¦¼‡k<5fÒ¾}ž8ˆÌGÙ¸.„+/ÏÙ;Àc‚IÄ1ï«ìŽ.sÊ~tvHg ÀÚðE‹ƒ÷§´zO½Œ­ðí'®@Âè 3ø÷! CŸÔ3 ûˆåÞ»ãípºiƒý suyÄáø³Ø}¢i;…ƒšÄŸÍÀk{ƒñĆO”L.' -ãí£#eUƒèËÿîYt‚n¬’XÈfæÜŸÊ,e"&Ðô¤ÛÎŽ‡Ê¹“qç2™&LœÑ|¿ô~ŒæÙcU«s¡såý—é—ö…i»—| ŸSV<ëÿæ¼Ü²Ém úïUÔ4‡ H‚Ü“7àÝn Õ­Lõœcûc4¥.UŠÄ#â†W -"%Êz«OèE°bí=÷G¢ÜÛ]ª ™í -¼>Ǹ à&ã¦OÈx~-ŽÎ «8¤!|ëûÚÇ©Fól_É©ü¼õ"¾®Úç­•o#ªh»õº“Æ®ÆPçÐH¨ÞÖ/gr‡ÊÎ%°úÒ £BÍ—I.*ŒÊ­Ã*"P¼•|8¬i­VãHSøÖ%©~u½£T[S 1œRqߥnøñ­ÐÈc…Š‚8O¿qt¡¢ôÊÇWÕæ$¤ž={Ì{ÕÂT ¾%ÝÕ|$ ˆëØU6öB°gBµêd£¥×$Š_ -2´ ì-x8w]Jô"§i7 -Ðû5¸ýèêù. ’—óïï*ôÀ÷æV·öèô{žÀ¾˜ÃŸÆçð,ò‚cSiúËí†Ç0bîç°@LÇb«7|.öÿè[0Ý͆¶xfTºá4Þ­­Q?ôÁ­îo?*€½³”\ËØ9Ûâ…qÝGeTµ¾•ÄùÝkúu½®¯0s‚üHÁ‡Ú>€^Ó”¼ ÜļóMÑ8K‹Ê«èïd/eØ®†å6JaNå‚5U=ÓßïnzÓï î’R—£x|«\f¾3ó·sç#Ö¹±Äž{¢yöM:qƉ££ú¶ª%}Âѵþßq<µhIýǒɺÞd¥ò5‰ÞÈš8-ßMH;õ»Æ×ËwCÀÀ½Œ§TîDþD³BlÅ#|»Õm»pcé cvƒ®Š\ÃñÅcaác©óš†5]&îŠ.xr. Þ=߀‹ñmM›;³yKª–[ìÐíÖÖÓ†|3úD° -áRgÁN¥¡È+‘æu™s~Oå³s«òC„¸©ka*Su¼ÅCð«o¶…ðKõ@^»  +R½uá‰#³M±é¶ëzÓGÚn;¸vL‰ó¯[gSë‰(ò3¤'ëmr¹Ñk  ­Î•Éý1)ŽÄyJÅe|ð¶p˳c=ISN9ñüh\ø0Ë3Ý%òQ¿½èU!Ö/}¤=CBNòÜâ}Vå¶ ²Ž­î§”< ¨Ãh|©Ù­ÿmÁ‰­Ê‰É3‡¸õ¶7„ªälª«ç(ð×±¼†¹ó‹ -8K{(°1ÓvüÞbá>¤‚®PÎ8ÚÇÝgÀ©€ªUõâwDívñ×É~Ø‚?îÒôä‹s ÎÐYšÈ‘¨û*°7%]܇¦“ý¥Ÿ¨ÍÐÐëèvÅ¥yžçm ôܹ×h"::y3j+Ä{¿>Ï>¥¥åö²ßx=ìÂk¾‹®u¬!ºH]2U„Èw>äLŠa-m4£Ã…!|dýœÔfšPöãÒÎL¶ˆVþ¯ŠwV'·vãnùå¹P=[Ú[Ƹ£ì­Õ³Ó8ò¼çááÍw¡$‹:o‡6éeMËÇÃR™^NÊ/à{"_Ç£è MŽ")-·Ë»æ—÷#¾ž©‘–Ö¥æ]ò‘߶Ëì¡eÞ*£ê³_²ÌÆN“Ë$Ãt+º«­Øêýòl›'X ª~—S`¹´E$‰ ÐÜ ¹í:„Ä ŸO«r¿`Pc„&ŸêJ -ð‚ê»ó¯þË·„—•¼f{¯:3¢ùñ“ »®§ëq‹½ËM„Ñyx„÷UUÃå¡ÓUáÄVÍõ]zÿ?'ÙÄo%¦©‹ùXüH=ȸ—`EKÆ«ÿrÛÈë. Z½¨]b¸]À­‰ÐŒ‹%óÑÈC(¶¨iw¥r“fß®ËÎ`H¿Ž¼œ2Û)ùõú¼}G¯óÐ3ûœõ×3Œ’³J]òšu4XŠ‘o¬å$©©M×9—¢üÊŸ#*p:Ñ!ïÐÔ7às¡«ÙÅrùÝG­Ï’U÷úÞ˜‚Újô”K{¬"!¢Z>œÓæ%g©ËESn).øÝïׯZ±<ù- ­“:>‰Ñ~¼㮯mXŒ4mI˜üºÑ˜H[(ßuªyñÝèRë½äª!%ï]–;Á2;^Ñôš•S4 ²%‡žá÷~~>_¿î”4C£kÎ?®4DdŠ‚6—±&¬ -WR(Wµ¼&wn -Te˜$]q3pu);Å*¡H6+H®DQöKCÆap5?b­ß¯ÿõ¿Á“d“?»šâJW£|ýŠ¯ÙUèxÁRG²§¼ØæúŠx²kwmÐi×úgËß QrFzxÕ—Ý÷òç÷Ëó2e"Yęόùyí_ÛÅüõ*H¼\ȯu5TCæʱª2ØúÔ’¿ýp±éJ°{¨×ç’Ñ¢ë¿)/—äFr$ˆîûº€Êð üÎÓÛºÀÜ~üy€IU›uo(¥˜ÊîÏ(щ%Å]’p’‡]î±âÆ°4JÌy.¿xÞø1}þcõˆ+¹48ê3XØty‘;eí”} ô*ü\¶ØÅcÁÈX¦!ÿ_ý¨~òn½>êp¼„4Bmß²/°©Ã/£6·«ôÜ®[ŒÁ”®<¹bÓ2kYœ±Ë^'0«æ¡•N.ó:¦ï¥F“;šÚÿų’¤”Qßv,¢Šðû›gýhaÑÒ‹Iä1©5"¼{&g”áD&ÚД¨{É|Õ´…ˆ´z,BÖœZ¼Gdâvã¤ÂŸ%I -}w| ¼}Ça}þu ¨²­£çq/¬»]¤‘¸[¡éJBmIâÛ—#ø™Dä麟d…Gy°1úÇóçÁÐ õûà©Û³b,cj‰ã4kÊ)g ¬À5f6Y­-é9™…@ÇPE÷šéˆywí×&w‰|Sì]N?±¹[u§42«oü¸1ˆ1õó¢Û§¾>ÿ$"[Æ3Ô.¥'·òëvvλL—»üLex H)–°$âHŒ†sV±õæ»%…‹.™;ü-U«în_•êص.Kõš&j9cîcþŸozQ¬_Ÿÿa›RpŠnmünÐ:WÒžVò¼2®Ä‚\i‡ÿÓ·š¾N¾ì£u¢ªö6Œ·{ÙIeտͤWáþÒäA¬a²R®ÌzêB‘.5¸Ÿ)ôÐþ"ìÔÁšùÕcÚÜÇ8ɯERŒ"‘Pkû4#n­‰º'ìÂÄ#R³3‘MÐæÍn±+ì¼Zí^ôûó_#Ããt™tÚ|OdÉvüv=ݯUÜô4#"©Tœ*r—ù¶»É€‚tQ¤B×°]Å4iîäÀžÂ!§ÂTÑðV¡DÀ“ÊÔåfþÔÔæ˜ÇÀ­kg'±¯Ï?À ø· ©v×™¼3¤¥ ©ÚÑ–9'­B}ÏMs,1¯¤vÓrãð—Ñ{î¶ÝC°Œ$šÔµÒÆ_V[Æ0ÑÀÔûj—j¶Y-gÙ{{•ß™"¶2¶T—Æ'Ü™AÑúòT_O·> ¡bD¦„céSû[2—ÿÐuÛoØ¿E&½Q]üA+¢.ÕtÀ>Ý~26¦<ò‡Uó‹ôX­ ®T.ì©F-tHeÇÝ\£mC÷Ñ9Ê®«yv£xöÖ/OOÚÔË$1rƒJ뢈¹gÕžIÌkÀ‚ÂWnßÔ+øz¥ï/+¿Ç]±‡ß†6<ö>~36úCSý~óÿðÍPÏ0&ÛÇÿ9fæ,8‚o·‡8«bÔªºsCTÂUüJH*ãÐå¹J|÷ÉîÇÈy(=¾ìlë$ݶ––01ºÎÊ+}1«ÌÓyŒåAN‡;X9IzZüÔÒKÉbdqÄ̓ügÝý†AV¢@ƒü²ºS'±/~1‹Ÿ!´ºfDe”Ç'Ãpâ -|8Ò\ÿ€ŽK»Å˜õPlõ¼h×4”å$ñ$Ëú®Ð[Ë‘Ôr35ˆÜK)-Ò’þt‡I&¯ -S¿õO’)ý]ö Mãp¤òÏpÊ6¦Xü×ÚNg##X¡ñh;î[=$Š­WÑTâ-2{:óšv”ƒö›©#»­=² ©Îƒ±‚ƒñH¥Û+*Ž!ìº=†¥R!Jæú=C³™*™™½¶f•ŽÈÓ*xÜ Ó8ä=*Z.vdJöÒ׫éÃ%tµa˜Ýúmàh͈¶UjÇk lÝ*àŽýC¯Ž2 -Úb7tÂÓ¹íœí==YÓ~ylÆšdÇ™6y"ŒÜ“±›ì­ÛÓ…{÷ŠýÀp;ùÖö*‡—BoúGàùçB·ä2-£QvnÝ›¨z&íY‰QF2’NÕøc•v˜iêT5[O >çéÎwxr0¶¾›ÓÇ‚‹DSÇ/›ûØþû™Ž,»¬­ûÞƒìêzë‰+ñDæ:<#àeGT9.]B}šáA-¨ËÊçÍÝ+µm‡[ïî9þ6",Ûÿ9¬É†¢æ·®v›™÷V‹§šÖòRš{Ÿ“î´7rÄVg³× ÿ@Eb¥üèZ>‰‘ôa/šÞ¬‰ÞG:LÅŽƒÒÍNavxÛ”Ùø§Ö˛Ω1õ¹µïc°¯è·×v·-2u%Jî«ûGðŽ+Ž/FïæX‘P.„øÃqèG²W`ö ]õå¬+›~´KæZŽ“€);¤f¿Ÿ¾’í[sú†àIÚîÕyf­·ÄŽ“’æW`ñth:‘Œ~úý?ø“d„ÓiNiµTûu}l½}Ùyçà¾r†W4ÑúŠ&ÁYÛ5“¤f#ÜÅôÅhékÍÁ´ùNJ€(´ÄÒ/bÂÅ^ºÁœ@àsüØÚÁô%(ŠzªóØO„•Š”Å+Yñ²Ú ›Û -E3vÃÅCRá64Ž« žˆe[ÄGª/zÊãÊ  AæeõñÙ‘HìHoßɪ.y/(OuBøåûÔ Ãb QkÍËÆtÆëÈ“É÷çÛ!Iî%\êéGûKó»#oIôÅZþÕs86¡ãÍ!Ð -¤¶LJJ¤“#§ªíE`£ç¨`*¹þ)¬ ú?zó¹W-:ߟï«U’›C³;‚ÈÁ>V[ý\/eá8N1]P!ÊÖLJ›XGŠãÔÂ:θ‡ T ĶôÄøä³áyN‘\¥?´Òû?^±ƒJt£å«ù÷i±ƒi?ºMÀƒš8¶}êVg¨ÊŸ?4kWÉÙ‡?‡…Áú0‚®ÙH ÿÞ1-I•Õàˆ R+Éyd´Ú¾­¹–Ü|ü|‰šág»ñv -”¾ó@c®íÞ\ÀL=‘#Ã¥l¿Rëf¢¨‘n$]„§P† ¦é&ÖŽáJt í†Ån•î%M[" ô Sì hé™7¹Mxhwà®:½{R\ô¯Ür1›)àzÕÄñ¼2€‹\¸Ý‹µ_O-éFê­ *0N¤$ëºÓÔ-êí­ ,Ôjì»Ï{ÎMз¯‹ø½(œ\¡|]è‘$OˆáŒž·¤Ý2äÿc-~žN¥Ã£ç2WƒJÆ ìRb’NLâk«_ÿ߬¼nOf²¼1Xbv¶¯~7ÝIieÉŠ£2ÔÒ_#ÏØPz0á’5²æ¥Šv‡4ö?S×jqŽK= 7óÒ×ðµš/Ý“„@RqƒslN§Ö o-•^ßÏ ôÐùDžšéFBmëÇs†Þ0œ*Š eÉÔ7Qê32“g€«Ï`¢–ÝIEªTKùˆ›LˆzÆ°·‡5úóÿ”—YrÛZD·â ÈqçaO½ÿïΓ >“R;¢ßlH xQ•ãàM~¡(¨Œïº'¢÷ºÆ‹‹)ÏU%wÌRŸë«>•¤ù÷P#=PÁ{$¦2PÅÅâÚ«k¾³“Kb—Ç™ïØûMl@’‘¾V‘«#àujäaZ†ú‚ãÎH··r`!{/¾`X`yÎÙÉN*Pø½â£99¢ÚËYü™ÖQ,°°ƒkÝXÖï‘â0m.èù´&ÏžèèfÕÕL!ê*Ỹs|æž«~R6 Ó¬õ¡Ç¶ÙS¸Å‘c}"ÖˆC–¨?-vJ0ÛÇ.}ÿDah[ŧ{`,fv Ô±s:j 3¥2ìÝ^šÏ/”2Ò7×°Én+gó¢ôWŠš®Î¥` »WÿmŸ'hTËù-ð¬Ó>´È18 ýõ +ÓHÅé3ªÁVy§ -±_¦ ø» ÚÁˆÓêIå¤_Œ4âulÎꛞ?'?—š|hS׈¯77’uoêîˆ \çF×…ãÊ/œ¯½b‘ Ä­@m¤µÉ'çƒb»¦UÄâ€ïßïåÃk¾üü‘L4Y­ER»ÔDGŸ*XŠ5¸óDi¨ÓúvJð²D¸ýêb3’."2Úa$×e<ç]÷¼Õ‚晘%RlCWcgb J–]œmjwËê˳N`g¤Ì»˜Û ·­Çï…)ê’ÉiMæXQq‹á7Äo—¤™]Î2 »}'Æðú¨9§ùºïQóûÐUCgíÒÉ „“ì“Œ{ŒÜBHsyo=…²#]Ö`ª¥^£fU¼¶M¿s¾¥?N¬ÒÞKLœÛ{ŸæßÎ0 ãwR¾ÙC*„ûX"-N°ûUYÔ2ˆš¿ô4Š¾å¿åå/ŸOÿÿ¡b -0¼’l½±É[¥†4¥¶N¦l fÄÕézEõøùU“È_ðÀyÑŽd´®]k蓨ƒî|°9 ÷ØÎ-:aòËðêîð©«+F³ ˆ²%jž´Ž€•P³·áWžÅk»ÀY¡‹sninDò$ôÖ˜Æ=Oƒ{_1¯ØeøÿÁqYä N-‰[_ï]I8HЮBéus@âf“vâTàgèÉ6Gôë.¾’¾gœÞ Ê¡–@ê9akÇ‚AmsœðŽó7_¾ØŒý>LxÖ1¨EºU{Z7œ_~^ q»@oKéþð=¯–0ºO¸ž¼íŠ¤Àäþ#`./àÑݾ†ùYëͧ΃yd¸Â¸¶[Ä™dÖŸ¥Ç2€îãÜ[œl'e[w}l]Œøð§4÷éuîçsÿ¿8³nË‚ßcØ|ó„Vº…¶Ï±6lÇÕ*&y‘7ikÃø§n¼^Îñ+ébü׬ÓÅ Ñž]\Ç hRcî*Ê®³Û™:òr'fA¢›‚0B”ÎÕí¶Õ|Õmf:¾´Ô Ëz‚6ÕuzúÑÕÑ® ®ÔYø_® µ_`\0’]ý˜Bø +ç[¯­-qéÆí A}6*1ûÏäîô¼eôÄA‡L7E¸ö¸âJL[þ£É¥`a:Þ±?œû …hƒ¢7'J“&[ЄC•…îñP¡?á"ÜÆ¡È'êrÌñxW3<ðt˨áж’—Õ2´×Ùlpkû¸+«ns[cZ̈ ¢W)εSklqèaËmöé¤çÆßæ´–-Ð%G¼sÁE­¥› -˜ß¢¾°I•³’~ú[ý¢¯¸©2ˆí6)jÖ„v¢s×ùø2Ì¿Ïô¹ŽÇ™s¯aX{Îô M{Hg ršefïˆy+¶ßaÏ愬ü®ÿAôj4¶òo^.mÀ4‘e„òÏsÈ»œÕêÆ<á JÜáw]é -Fºœ#Qº_Ú­hzÇ^EµYǯhé$u„óJ9{!î>ÙÒ‡h¶ÍÙ°8{àqÛW¥ìXß²¼Ÿ$Ë“xR?ýÌ®²RYÖ;›´U™Ë\÷ü3+©MY1å˜áü ‰èuצt»k%\âÀ­¦`ÝFÇ¡ œÝƒ„kû%ß™‰þeqø§?Öð(‚ þ®Ÿm³[é@¶¤­ùtj -˜¬0ÍË8÷dï‘gr”GFd.Ö æð>¹îpÞ]ìâï†rµÿ-¼RéÆE)?Ò>‚ÚÐÊ"öè È5Yÿw9™î*ºÚ$Æ´P@b±iæÖ·£§5=ð„-ƒÈvùŽ•j5â8Çyïmb<·ÙKgNâëq¿Ø ‡Ú÷¤bnbÖµRÖ9ýÍ„;¯ š­­å8sfþ²Žq؇—SW})ΫÇh¬ Z=p„fz"ýó°û`Û1lÿð’ÑHê2ùµ´±ò‘uû}2u*yWÝs0Îêì+ÎÈpÁêÀ¸©§®ÃÔ.›"éî„ìåfÒœ¼zj#­±ÐêÀ$N ‚|ï6³•.5ö«èÞü¶aKÝDeêM¤[ÓH!Úà³7rð’ôëÅ¢oHÛ+-ÂáGŒh®×ÏìH23o/à kd -êd}RZeh;‚®¡úÔEû”r–\³àÖF”¥Û@âœO³©N>µ´¿D›¿ê§#‹>Q¢bb2£„œz’+žÐènV;í|ÇýÅ -*̺ðèåK`Û»G1¯oªkdG4Ÿ'2Þfd9ˆÎ6Œm½ws4)¦ûײ:âf+ëò':ÇÕ’(`RÜUfü…s“6r|\‰Uí&Þ2Ö¯ñ¸Î™ïÜ1=³á1’ð$VÒ{W cl£3œÙ”§êfïAôž6RË| U¡׆£“ Çl º'7ê™rÈÈ2SÒ›høŸ-ýS³ôGÒ•¢C‰¼Ûª÷ªë+°­þã 58åð€“p ž¥Ðø¶Û2Â%ÃÔ+öî2Ñ®Á^169P7-NÍ \=€PÚÝ=[Gþ÷É/„µã”ÂÈÿóó»ëUW¡<Ì }÷ôqAâ¢Ç|%"#-ÊÅ’8Í'ëè]Ë‚ îk «dTâv: ¯X7—½ûêŠî¡éjËÐôfuåN²ß6ˆJǶª uÂözy¢*OÅkÉ üÎøèqQ[‚›‡'ÔDâGê>Ò¨£U³ÓC‹ïˆ¨N Rˆ1h‡oi´9ûã? O%E€Ž_‹1ìÄ»YÌ€e·bÓú.-ÚJ9b÷ZY‡^ŒWvÞ7¹·¿Ãî¯ýŠ£»°å7µwN× é!ÁÛaÛŸ5ñ«¹¬~Sy«¹lô—E‹£¯}°yÜOüG—âƒÃíç#Œ³Xížôýt.MßQ?ìYÅçs «9à¶üøÁJ™äFêIM3{‡êQëñM½$28P&½¬‡]¥#9 ÓõÍoP«]‹KdWc\hGëW^šQ7Rôhšɭ:©ñP €èDlŸyˆ®¡Ÿ–þÿ‡gcL -DmcçÏËu—8Y´~É)ìÞ;\;6èêuœümålÔ™,E~Ín¬Gß9›_÷ð*Çà;Ÿ£:ZüÝ0áø!5wiå{;+n;Šqp®€ÒnäÅ£4Éf ,žž£Îc7±Õ¢çV7Ÿl -t‰I’ºjõìDzՓ»IWR7®cyåÙ2Âw,‹ ¢qˆ‚;c:mÛÏ…µÅ~†±ü XÒ·»£€¿[¹ßñS=˜šÎi\=¯Õ -ËÖúVAz<µ[Ýš=„gý—ò2I’$6’௘Ô‡cùÿ¦©y'—âˆð’ÝÑøb¦¦ -7w½9o¤ xj˜BZZ Zþƒ¬®sCÚ/ e…`†ößšî¢\ÈEƒÒ<€·ü7,7ÓÁ;¾…jSÖ(ÝÛF¦Ëåôõ6ൺΠõùÇÊs8Œx"Wq^Çl;d]> úæXm™õkÒ§t¡ÛFA »Î㘞¦UAuëæžÐ‘ÕQ˜Ó£ÐÓY 9Ñ¢;Q _ÿPYWAÚíŽ隘°¹ìß‘&§!ùÿ²fyh÷{ß”—V“ÈäÆ ã Hæu¶í~ñ­46í47}›|ˆó.zv}9- 1ïzÓÃizÄ9Ô£•ñmk8-òÕeFCΰv0R¹‹ˆ6 ¯âõYu½Ë¬JÅ¢ü°;ëVXº2|WÍ6-ÞËüµQj6é6~??54ÔN¤©Z3ß<šåT—uh¼| ä7Ýi{Ì{R}6MŽ\ÉËÒÏøj\ü\;Æ!nm{E§ˆ-¶îá[˜üñtgu¾:Šó¥M®96K¹m™­YûVy¹‚~î>© ê""Ô1ZÜÒW3WàlùD¨áˆÉá~Šº~Š}_Š sy>÷—¼M¢$÷YA`Xñ&o·‘¶ZÄ/AHx®„É -ÒÛã¹GRu¡pý—ïô¸‚"6,áþ&Š%õ3åóÏÖ»g¨ø1ÌeÿY m»w[5{ÐQ"HÙj¶ÎFyïµ`hO–†:{*-sŠ2xÕnð?á é/ñÆép¡WM–mê%ÔÕS2LoÓ 0õ)ú¬¨¦ó¨#KšvîKÖlEoÌ=[{*òmNrÊõ‰ âöÆgn GP#«lãK j2šÐÎŽ†¶ZÕÕÅíœ[(Qc»ƒ"’_z!oz(ÙHÊCÈúá‡ñ(å266cëg”âq«Ÿi!¶9Ô@\·°×µV›õÝ{;±;ä­µ{Çžv`‡6bíW†!ºè$+A¶yßÀpVMc–¼TŠZÇB„ˆ¡ -#k]®ë7º±1›Ë_Ù …¼ýaÙ¼0üΉÊöѼ‹Çã¥×°ÁÝj°I| -$Œ."2‹ù™1¢%Møk¨ñ´-Ü'Åè‰li¿Órˆþœ -ˆ6Û@T|pímƒµ´÷ÏQ -Ïì’œ ÂFÙùé¿Ïvþ,¹£Eþ!µº$Ž¶†¦’÷jÌÞ %RÑA[•ÇÝ–ïù¨í(éíLÝöºí³ Îè㵂ilïÚ‘N˜xvJrO¯Iq®l¿lœÂbÏ:4EÏgn¢T$ð¦ßšOs½—˜^ÎêÜ*Ɇ“´½q¼wá4dn‚ zóEôh´¦N³¦÷ÌñZ¢ð.œE›ŒðÛlÕfA¯ûͲÚ$FÝxÑšNÚ™×4œ9|Ú{¼šaËÚF`§•£¤ÇÒÞ|ýåDV¯c18Vòcqú´Kñu‰Â¥•æ¢$§¹óßn´ÛÜ -ÝËõ}*ôìþ ? -êÚçSg›¿ºW €§ººÖzxÔøîè¯Ðy–6=¸˜,.,Ç»ˆV¢­[Uä[öhd_Ô6y\\¤×l -¼8þíÈÝ´øµ2Û^ê!çs|ˆ³JçJõöûЈQØ# ÿä¼ `êØ94ãB¾¸m•°¡:Y`Õ ]p«“CºËT‘­1cÈöV9Èâ¼#Q³þ`è,f -˜§Ðñ$)vîGÒû[©ô -’‰iìU2Æý#£è,›Ä&ç†ìîkŸ[X&ˆ5èÉ,ÍX aåe0‰qˈ1PÓòqR_Õ?Š¬Ê°Q&£›ÃÀóÂÁàÏ -‰ÊpÖ‚õÇÍ8 7ûºXçªãÑ._Ýg0>2énBŠãñìÏ‚ú,Ócyo‰–Óˆl¬ îÏÚÖçÙÛ×%Âiçr—M•iÂX&#×3Kè?˜š4Ð,…úü£kë0î2•­öx¿]>d¼<ò£´•åP%Òݹ« cEAwk]Ûë¹mz«Ÿg$'H£¦%¼SDy#8רïñBÍ,´¹½¬„UñÜû•½²xºŒ1½s¹Œ¬,íÊÃnªr ,ä¤kÑìj™|{ý¨´J¦þ‘¼µ4»ˆÁ”8ÿ×&HqÑõ~oM8¯r&˯ùŸÁzjî× ÚÛb/ V8«ÀÔöz]HJê¿?žLj&~ÀÜ) Qý¿#¼Âóant)Aß™9ÞæSЊ9ÆåI¦@3Z{Ü]J}-íÍË.ô:—B.­­9äRãÔˆ”(dP?í< éïÁÁe)™›°ž¦&CÇTát8÷e}£ {£Ýc½æÿ”Õ<êIJRa«ÈÇéqn3M[5Ññš${ƒvûhb£OßJƒúÇT hD&«·¿é[·1GOƒ­ž‹’Yx> -1×ôB±sü3±¥ÃRýászuüŒ£^w«÷ó’o»þ§þþ9>Súº5Lñu2|IÛëÑœ)vFmš¥m¸ìˆÔ?jd™Œ¨oôeö±-ò}|Î|Š=X‚Ÿ(‹‡Ó¢ÀÇ=q«míü¶Ÿ(¸ 0ŠxÕèÈ×ä:é -Qz÷Ø]œ¶u+Àúšý¸=ÜÍWZÖ“¨BçIÏò3MQú”Àyñäq-ªÛ‰Ð¦¢"¯õ‚ºCÜv X½ÒÝ}×Sk{³‰2œ\ì`rFûì׋…‡}øz0WM¼¤®¥Ý5ݲœní–±ÿó:»p¬øÐgZžÎ,èÎ:æYíðÞñÿbæbЮÐÌÁzó¥/:`sl Ö¿$¾:ðuk ò˜knqÿMòÊîÖ·öâÃkÓ­Ÿ4ôóÍC-þ;Ú·PœxÍO3=Å•vˆ>¡Ø³éÎwÖ9¡ŒqÈÇ3ÌìN'½¦liFj©¥©P+ÜÍþfk‡kiþ›5§XF¹ÿ]…‚žÒábœð¡[a@æàÙݽ[k7Í‘½[x¢€j"%»=Ó±¬;ünMîq4Dê·;¢‡é6³Á•¦C„ÍnÀãÉýf8:fþèê©~=‡üƒÛ3ps^»T$ØÀÛú_~óX®ëtË.ýìÕô{Ê‘UðŽ€„~‚$ñtðŽ©¼v7¾*¿Ú¼È¨ IB ôVžÔÛYÜ| LÁ9CÀš§{œ`‡8O¼˜f“ŸÉ ð;M=5üU¸ŽÞžy|¯ÜÞoL ãE4Õ­÷?,ÿ”]…Ñ=_æwïOãÜ]è4¶¨v.ßä¦q³· š¶Ær†BÝn§×”tX—îëKºÈ]õ óª‘Ì=÷ԾAø"»ptß|†¯_»1%jõPõ¬1 W>ô)´eq ¡ÔS:ËF¦w_Õ¬À5ô£îK'"Àp¼†õ! [ÏßôBèÛ TÉ¢Y/DeVN@„‡’­ã°Ò‹ÁÇ>'_»ûÆ%ç.©CµšÑÀ£v*é  -‘+ß8D5ÝáïG8$&ÒéšbÕ¼¹å@ &D´.Ά[2§i³Í§ßÛw fã YâæXÞWºò¨öbãC™CÄ+CÔ®c´b@´Æ°¯…3qÜÈ‚ƒ-Ô¤^§Û™pÏ È¶´’çâ0B£¡9„M'Õ2t˜ •Î³J×7ð(‡k¸²ã4£¥–~ÇÅ‘kœ´KþçVg[æwuþxÓúQ´û] ºPú£:¡>åøTG®!9Ük}µçN¤HÞäH -]ÞILG¿ÃÉÁ†j'Øaû¨ Íø;63s2Ú_§oy¯=Â÷q-øÅ*b<´:áTµA`C¬vÊÄÊ–÷/bÔP«¢ÖÙ‹‹„}¾òªÎÂÆñŸm`¢î™û+ny™ýÜgæ*XÑ“½Äêq“MšˆÕKŽ> e㜧™Äà,– -ÄÄÞœ$rù¹_dÁ‡sH£;®ûùF(°¡d \þ)Y¸Mˆ ÈCr VbÂÛb׬”’c÷ jøX]Ú7¡‡Míž±ß1ïH-Ž‡÷`˜¢NŽP”dmÙÑo§»­É6d& h~Ø4ßÌ•“ÝtØ*+!5làW¸é˜_ÝÞsšÝe)õܲC;™Ùfþw4U9†»qåûbVâU£¶y'+Ü=õaÜT˜2VfÕjuÆJ£= XÇJ½ Dg¦Íão -vàÊ9å´ž°ÜuåZdsY©awzI_JöyT9m¸‡6Vö'­m8ûuàdm‚÷M¿Õ«~´¹íåï›,æPÍ>¤®Z†ÛQ9ÈÁO±Gª ¹¯baª§UÚL7¡8áJsÑÃÊ+Õ{Ä„Íî´>”ºÅ^UR}ˆMt3š½Q6–§Ù¦S#Éôæ R!{&$,Kò3ÀVõé N›’dÖež¬O²¿¾Ù”Ï߯jÚ¹^² -D"}ím- B­ýpÜšqË"øøø,€[ÚP'f‡X`€÷{½‘idÓ7òk?™Ÿðsyâ§á|H†¬£óØ9¶³eŒtbÍ%Z·giËÏÇw]èmy2¬Ö–vŠÎë¼ç_ï¼ß“"`w«ÆÖÁyV Ç„­<‰(ôü¤õ¶„Ž@7'lz9¤ù>‚lžjU¾ÔT­õ-UOLºƒ4:y£Öjåöm5ŸäpèL®7SÛ]t¾ÛÂLen0 üÖ‚WØÌ°UFµorik\ß»¦ñÝOI]îÜ4a¬7{¢ §²è32ÈbæÙžx)F@íýbŽú” g#Ì°£ÝvÛƒ5}ëRa†ðð8nÕðM(ƒŽ|e›é.®‚¨ñœ*˜zÌbçåž®ÿ^õÉ}r¯ëkÛöEô†dy5ØE…³S¬,?/’ <6(pŒW,òÏd°´”Þç«k~³íáœëz£´¡x—>"UL1ÓÒOT·O[>×™¦ÜQôÙÓÚy{µ «šß·žª°KCº&úÚÛ35u3ZÏœTvæÙ[¨m#‘YÀ Ç>›:h·Ó·C) çùŒÒL§è#Ëîùöö$¯‘>úÏ߯ oRÁv#ƒ¼ê“î“©¬åúþ64{¢±¥ «4¸j¹äáwýê²ibr«×êõϼ{ bBÁ¡ÈïÁ‚e¦ZÛÎü¸«ØÜÔ§Úÿ‡µŠ{ruêË â<ú23p6KG09„1{U#×k²©ýÜ2̺ÍiëÙ£çìñ);'´yÒ ܼ«Ðî/ ­sV›†Î¹;z¢¬H™]Nªë”Š#RyîS]¬'ù÷ͧDíµD9³?™f,›ï°1×ìFíøXþ¸Ñ¶88z÷¶•¶«2âN›}­øäŠYE@:Κ¨ºÄñä…sÈ{‡™ê#vYPi0ÖÆx&Æ™#Ñi|hÈb¹ÂŃöÐÙá`¨ÛGîF? ƒ¿ùáàžCóËŸå…k{;uLð0œë«í—lé—ÛÐó–›lÑÞùÎCÓ\ˆ™¼„u:–+›4l˽Ü=ÎýÁ/Ã'sŠSÓ)T™O=°V÷·É¸ŒPÉhUZîeï\èɤqR§tØ“‹u¡i>Óêæ„L‘VK?ì<§%Ê/5sŠ*ôï~h毦¨’K™‡ª›ß­À´h8ïŒô‡Q{n•a4êüÍX[*[Ih¥Þ¦ƒ¼’ð=߯¥ÂŒïy2?´âõð±x¯…Ê7g”#^«è¨G ç¬ZÏe”Â:• 3- oPú÷je¸Šd§ÎšÏ`žøQm<µòã]¦¢TuÄ8 ¨Å"Ȇc²rÝŠÌ{íÚŸ5RTþ>=ý‚0yüµEK*Zø áUI“y ¤…W=Ü,æ ÏÁDDš¨:l²$ŸÓ¤¯c©ìºEé{ÙNžÃ[¦ x\äfñ Ïxeú®ÉX\È ¦}oSBq¶ùßÉ@"gæSmÒémœŒÀcC¡©A—È ^Kñ‹Že®8&ºQ\<¸¦ÖY%so G“nvn¿<-êÏ´·°]@à;&­Å-ñìh‡™bœÝÕ®W( w·ÍüŠF˜my ¨˼úŽ$äQ«Þ1¿Q¸¿CìG§u¿Ÿú\ç2"¦Síä×UǬÉ0~Þ‘ûâÍNQÖNÜ´>ÕŒ·u™o klÙçÇU¹¦øœ25$Fû:r.¶)£VäÁýEL¨9áæª=}—Ò{hºO¬+µ,×uN³ÿ]ó½Ù<}¸åægöI™\Ëcxàõ—´w³ßí|RåI2ú"z-÷˜Ozoìv[ÛP¤`]ßDñA•i;ßqL¢iõ›ÉšÃÎX 3hÎ šoö4; ø©+í6N¤p \9+;ï~YŠŠÈx£#PÈûÏ+AXGýP<•Õxní ´¯[¯œÇÙH¨ ª7k’€¿H€‡´™Lèèý±ž¡×NMÁ¦uˆÙ,–‡.ªÏ VëtF4wCäjÌé‘á2]Å9NŸ&|”¦€¨PÅ_(ê@‰¯.Ϥl4ô3s21Óy(<`æú"¥²ÿ‰pwiŠåÀê*Yü û«iþ^È(+§üäº45‹? ”Zí3õõÙgßt¶Qºï×·Nñ_ƒ ç³ûýkÉ1_n“êÛÃÞKI„º7ŠÄÎÜç×£v_HfØŒús{øuÞ"Ö•4ˆY@­5c@oÔc–‡Z§? D Ç †ÅiBgÖBÝËÆ-!Ö‚é…K™æ«Çñ\¦t29J[à‰ZׯI žpžÅîH—“+r+ÇØŽÄ€‰ƒ¼×÷œÔf©ÇÖt¦™h[ÏNß©oøº®¹UMþåSìKKg6}Ù_8?ó:¬7{Ç×΀C›0êÛÑBoÄ°hž1¾òòuõGkÉÆÐyKvX.‰Ü»÷¼t¿£u×´4–&!Ü—ô>½ŽÖ R¨ ïÈÁa¥«€ªóÞ_'l ÉƒuíCÇ:Wœ4ˆƒóN™z+(±“©Yäh«fÐ`†NñÈ×ò“alþ~}Ÿ…B$“`K¦TØѧÙûŽy¸Õº"ù“n¸ÃÏÎÑ?m¥a#0‘èA&º/ãx¾_?–s± -§ßÌ(˜¶ôûŸPߥŸâ».Ĭøj º¦ôP§pªw¢d©@qI²þŒL•)J Çkt‚!=Æ‹©ŒhÚ´éíMÙ+Ü× R\at Á&qê*¬ÛÈJ²-ôƒöÆ•$_·ìÖ-»jKMÉá8TÊd‰*…›ªî%ytVïô ¤b¾&h~º -3-óû쌖Æk+ÛhÏN¦ýì”ùïǹt,@ô¼ÕðÖ­?(ŸÛ—ÝÎèž00¼ªy=N äвl¯*Í4–Y g¿FžÊ7L€¡õ€ñ†½TG’Í3·­ÒvÕ½rfsGµ|wê²fcû˜³45cBIÈ'«^ñÂüÅà…‡T:гùÿG!t{h‹Bì{~ä}`é…9Ä¿3„p¥eç#•iÚLh€ôë’YfÕĔܣþY¶‚Ñ3š‘ËSHÂ'×,'ñy~êt€Ú`NÎ/ö#þÙ¡£ÏkB}¤·0¼©«YY Á´s -àÓ›ftHr=ݘ€åÃ,¤„=¦?#qèÞ'¯Ø€÷…[r²uÓ.À'å]Omª¬rð.RÛOÞµõ´ÙðôÓ'RÜì3¾ÉÅiw:¥TåsMˆ˜ä»|Q÷Õn‹-7kºÜû Ië_yVÇxŒElIš|NWݹLµbÓÀ«‰õ¸ç@¼’ÕbÉ›j±ánÃÓ­xì"8ààöý˜$wõ*j~¯ v£À hŽTP½†šSÐÔ±pÈU¨Ó£ÑK#F²ÙZÕ"ÙŽÁBÏ" Z|Jã‡üå-m.™cº÷F²ŒÀ‘jÝÖkû#.:ÂÄ’qJ Ÿ~î²Ù (ÊiZÕF5]ˆ8çM|ИÂ>ô~¸jñ5£8 -É<Äç‡áTú’½ƒÀ6ït)ÏÓq „¦p‹íá銞SšÆÞ"ûtû‹ÝQd:ø¤6·Ô„híU¥Ž¼£FAVŠµ‹—y:PEÜóª4|³9ý»íᣌŽžqìE"K;ÏQ{stkîüˆ³˜¡ ÇÖª>ÞíìÔ³éÖiÖ³Þg.eOji¹K²0Ë'“ñ äú‹’ÆæXÙʹGáªqá.ؤ*éozÈÈ©EHød)íòØ"‘N ¿æY~ÏãBÓ³º‰ßÉ[¢¬s¥¾ðïÄúÈ/E°!¨’²ûÈC ú€–ƒ~¿~¨ŠnÜuLO¤ø0½‘Úq:ݶöupŒÁð¡Ö”bf¤lîü鎂³ø\ݬ>ØúC¼ª8;jR¡ -Ý´±©·oÜ -§±êf5íu¢ãP†Ø*ô%öW ³j[OäI —ºVÏÅÜ»S‡Þb`ÊH ŒêŠ›Ó`ò¨O ¨‹Œ;ûA±+œ£hÒÈUkÝkËÈT{µú~x]ã¸E“êþ‹£q‘€µ¾˜3ƒ÷ :V„¾RŠŸ3Õ®Eþ\~qwàS‰!ÀÔ;€§ådŒš‹øJê0‰Õ U’,y zHBÙ)Úå‚Γˆ0/¯ôke¥úæ2‘r½Z«Õgýb´T¿tAD» æ5aÉ(\XÂ@[ S×KõølÏ/ÓÅÜTì}d 7à âµXSGo¾p.@Ëxd§M?ÞãeXºVK¥cIˆÄÔ“xÕMÒ vÝu+Ir=º0|=“ÊÖ’ìò3ç«êí¦•šI"o;mï[‹l,ª»cMF"f¤Ea+C{ }y_íÎ{ì»{ðMf¥¤;¹6e»xc’øðˆå<±³çûõíŒbm5ƒ:SG}4Õ^n§†t­›ÁÙ!I½,”}ÓÖÍÄ–V…¦’ƒL £$%ëìÑÏdŸÓ±éÁâ’ºœˆ¶Ž¼˜kÚdÎ7ý'ò6Ïs”xØꯚï¿ðF-~7RÜî}j;\çø•ÒˆM51š¹ü—ú•ùòÑ—Õ ù~„Nhg¨î™ëj…[JÛaê©^ymݹόwa;OÎV1j-óÙJ³4¯¤“VdÂ̯·šAʉ‘ž`?ÅŒ…ÞQÉû™)ój õÔÉZŒ–taû9Ægþ9¾ž "£$awŸ·-}¿¾ÉBðU‹,ŠÝ…WßCŽ º`+Ѳi~OÒh[ÞuYõU•ê7þ£¼Ü²Æq º•l`úà{O³ÙýÔ-0‰eç§tìD¦(¨º5n*«ßÃDëÛ¨6 ðÓ€,_v`Íå ó¦ýÝ]ÿ{}ïýsH¤—ΪóôWA­–¤0ÊÙYvŠë°³ Kz_D“a¿¯áfG´¾˜¦¢DæûÞºõ´l‘«Ž^ÛÁ”ˆW3µ, +)HùK¶$ëW[ïæÐ3:ƒFH9»‰iƒòü—niˆ ­‘ú[í·íÓͨÕÖ¡iƒèá_SGD¤? 9Lh¿×BéŽö·êñz[ï>^Toe"k”ržq‰K5žlÄFÍ«æH‘k¹b„gE–çK¿ÐI.ÏÍ}ÔXÖƒ¡ŽxbQà#¯î ëlžÂHã0ÕÞØßÞÜêòsý€õ(Øó¶ñ - I;Óô_;ÔöE¿(32Æú x.BbדÌxKõF–še¨o[sgÿ^?d+Ö\®‚EÇ -zËg60ôA÷ÝÀoY#³ÄN¤ÚÜÞ*6£ßÆN깩a"ybNV Ϙ.ÇYx6õ¤Ÿk'w˜Üq;¬¯}§tÊ£xH¿øÚ¸IjU[xwuÚŠ"ÖwbÍLè×)bËúº°ú¸\?׿Hj÷ÔÞÚ¢=`ìÞÍÈ–ˆšú‡ÔðŸ(-•‹k¹Ky +%5‘ Bò:®.âL_‚*¹Æäǘnø“:ݹ í®f#Zõ9žsÔ¯×ëG' >j·y ¯ýˆF͆RÌ6È?–Cójˤ‡ÚRg6ËÇ–“\2ÍÅNÏø ”b5Ÿg˜éæðQC=¬×"Òn i|„Üt”eÒŽýÓ‘œçNʾ…^ PM·•Ñç)R]ISÿAHë[-€E'Ç…‘õp@/(o¥P©ÊIÆTŽBp´ëc¥QþR§-ö=¨áCF5ûå<ˆBXñ{Inp=¦¥ }£Øñ~ˤJŽ „ìJ³ž ÜÕAl‘í/‹‘Ÿ9ø™óêžøêûèç[]ýèâH"ù¯¤#r£v/=iÝmyÿÞ@ºoo¾«bÎ}¤$¬ûJ ì—^Ýˬ¢lfèÏÉ>%ŒçôŸß7«!Kâ6ŽÜrÏ—Ív£©–/„ýÑ£4Â}ø‹·‘NÑ „Ýã¥y«P±¥‹½uSeW í„@Å -HG kþ˃ÅWΟ愸F ÂKk[ó&œ´2í–8ŽŸ»Åë×–Ô¦ÕÎõOƧi³)‘]æqÞ1_’“ä0ëZ‹‡ÒÈý:bÎLß#öwy'd¿ý³F˜aŒº!n>Ëæ!²öWº¡?,÷ôS°K;9uBÅ2bÒ²¦w2F ÷Ë)‹À¡¶™û-JÑ€/?“v¯Èò51˜8¯æȨ' ŸË(ä—†ÕqJ6»”þÿéz:aC%1u5ûñºÃC†O雷ö§‡ÛcÏd§yb…<ÅÓ6ís¥½aæ0fZc@Î÷C˜’7DaljYžÓ‹·—±ÊôÆa£þBòK„@ˆ 4‡B©Mçsu@­ÎzÅNÔ´Þ2»>1Gqïé±ü«aÚÖÌÃD•ž”á§×mrRCõ:8rî[´UÝ-Ùu.ÿ,â™·b^ î|~ßɦ±äÁuÖ™ØåÏ@÷Á–ê¦kÿØܵ|Žü"HÈÔöþG;·y1@mœÄrhØ´S´âè »Ò¡«/óìõ—Ê¢VõÍ¡¨ÕÊñÕ÷ˆî¤+»¶c°v©5 …²9Ú <4ºvÍH6õ&¨C¨—¥GúªÄ:TXY›ÖÇÓù}ÙN’R¯åfPóA/UØÙÍ_»QZüVV&ŽùEVÎ)Òœû(”YÙ©Ù@Éôƒ9HcT«ã@:Ñ° ú”g¦˜l:«Ü¸êX­Š3m¤”“+îG+, î^÷G#XÌ-I¾-ócÊNë>È’ÙìlûÔJ»j߉*e^¶Ð=T*I¶gë =çàáµ[Z'PRfoŠ9o ƒ2™Ï­Ìb¬ ˆ(Ø›B2ˆ8u¥ðÛo[µ÷{ovŠ“Ršuí2M=ŠeÉ_öp®oåšÊ±i±Rs€^ËÅw&ü.3 ´»{Ä!á;Ñ©MhnÔV3l#õâåàU¯~èÞe †ü- -0>ð};}šv s±§M˜YX)îBÿ3`ÐǸôšn±¾< »Ý¢œ3*úh†å`û:ã K¸>]ïõ£\¨‰Þt9Ýû"3®ÎÆçâMîIÃŽN5n²h%Ý$Ü/ƒQÀÞ–­;ü¿}®Ž÷ééÙÓÞY˜Ïš!‰0æAYZ&΃*Õ,qìx±Ñ]hà[­¹‹±KtÒ$Nz{–ÃhצáU/ñGóôíjh¢ç©æiµsyÆF4KðÈžqÐZ=7½<=8 ûÐõÌ ÝN!5k-»aXî–¤‹ÿ5@/Òt‡à¡_¼Þj‘mE'»Z£¦K¡þSÓþJQŽíX‡0 epV»Ñ€ž¡ÔÞjjxd³ª—MOK1ø»Ìßü8ÒŠoªõoößñ¦QÅZ]ó‚ÝI+ÝõìH™¿Aa9Jñ)'l‹UͩŨä(Vm€mÓð©ÿ \ :èK&læ]ùv–¶–a@;?Ý(óysÄáôõ}ýpùPEÉ`šÙÓÇKi»]˜f™äŒf ®:tioÒbÙá–=cÊCšsGDû«6_+a}©Äní®[98Ý‹ÙŠÞ™çDRrC¶R Ýþ©¤[«Üa?àÖf²G¤t^k^-Äÿßm;·|_?N 6F úÈ÷óØ„8 ”tD»ÒªkÇ:°Xu«kýO.¦î'ú©;~ÈB%+ºÃÈk¤¯âë·G³å&îõaéJÖ6Ëâ®Ä¯ÖÌ45~·ÇÏËŸï‹çÙ?bûÍI!Ý ™q·„iA&Ö@à’¤Ïî¾jNŒ !o‡»ÀÞˆ¨ñ‡Žr-A~Ïö¤ðé[ØÓòD¯‰\‡ÃãfòôÕu7És(Ûóúa’Pù¡`ã8гxL¿)›·Aýí {ZaÏI"³˜†¤i ½åÞžèL°Âç±Ã¾?ëIÀJa{ÃM‡ºçõ}Í!I B£ÒÛ¸OHÙËŽ™KoDZ5ñÄ|±êåêâK­|ú%ÇIt×ËçÄ’$ ˆW¸SÑ ¥b^ÝÊ Y")b ÝJ9¶oû`ζø–ô4[¸Ž™úÐÓE7ŠÚUÀ£³Ô:bà´} óxÑžÔ4¼æ6×ñé|7ËD—ô2Jü¤tᵄP)ô¤£"äï,k7f}®æ«G¡þUVp$‚kÜóW¿ÒÇ6ìKoì¯>ìql³3[[Y0»Ônë ó¥Ã¢>)Y§P¨Ã,îp7_{ãsÄEwŽ&¸co]0Fâv8ý5·åj^{)Û‡’&—K÷ÚÚ›Nº—ž×·£lÓu¹K?ëuÅ;,Þ¥›‡JЯ‘„•ü<ôXMfÕ$âžâ»ÁÜÍùˆ?(ÿ1Úɯ -›ñÅîdTÜC<ÔÖ!˜+çÎîã¯\ …h»jÚWöðäïF‘{êµÅmœ:žŸ²©'qô¸]!áÆ”¤²¢*-w% TÓn/(g"õ¨ ¨u2t>¹‰AˆM‹ÊvöÙü£Œ»8NÕóú±'eµ£~VÒÒLÏWÃò³dŽŠ5i·æ ´ž6m˜&#,V2§ÕŽhײà8ãF$JËÄêvñ'm©¡PMÒ·fg¸IJI›L%måXåâMù³û¿¯ï›Û*6µŠ<ä´ûæN…‘Zn§n°ÁºÞ¬,úS\c1-x‘T:'«ºx¤nöRS÷+ŸÅcIëk‹º”š?zÂëË=‹ùï#ëKÌðl•Í d*ôö=wK¨Í"ãîvîŠyÜ‚[¿Ïž¬«ÑÃCðÓ-µúë‘.¿ÿºª.o«<”´X‹Ô²Gë7d@Éô|«Wü¤­í5‹ÞÊœäÌØ="ÉË‹FÇM %½kSëLÛ„× -늧ŽÛ±´+!=Ñ«xò˜+Œ®R¿Ò‰rŽBÓ¯¬Úmð«SÃÑ[¶ _^òì‘>5õðθÝ+\?¤0Ì8t•¦à[óžŠ^+§WtB蜡z®«óêØ5~GɪâïAá·çCйþ¦[L.QÌÓ[KhŽþ¬$Å·èà¢_×ß”þ_v¤s¼.WA3ÒZ£zÕëH†¿ò1­jI¾²×tòër­´þÀ†«&& Lg^HÔ8ZÒ§ºWf65[ªÆn¸kéwQftÊù맰ioql¾È;Þ¥Îû:Wõ¥À-Ž^Låúp;Áaþ|˜]\†¡.†' ä`´¤(Ÿ@±zü«Á fÈ-rnªâêrf´Âžíÿ]m¾˜`ìõ\Š†îÈß?Î’ïâF¾8Hª'¯‘ÙI/f÷jõë’÷:EÞ%Û”V8j~o¬±®éŠï$+Òž>›ý g#. -™í6æme§¥·twÆÎçh†l¤e×s-m§ K_.›Ð‚^Z»ûHoÇëi“\#ÆÀþ‰Ofžä•mRÙ9üÇ"ùÊÈΘٵ;IQsµyÏÎ×Ãk[Xݽ®bxëì¨/ýYÁ*0÷6¸ÖEÏâg„Ó,+ÝY›Æ† ôÙ}Õ Q f(,±¯‰ÿ]âÿ¼W§Š= (» Df߯­>€âøàqK:~æÓ\…“ó#ã¬{dÐÈj®{‡³ÕõS®&r5%tkmóm²; Á,ZajK.ad·I.«:ݱ^fwÝ™aj°=K»ÿ¦0’BA4’ÊÍ6š;ÈDÎ:#±/ #–-‹ü,PrSßüú'Ó¬’êNŠ‹R/¬ŠgYþàXÚôÒö^íUÑU;¨g¨ùéz™"XÿaU˜q›ÓènÖ@ˆ÷Çå¼1’uYô[ñ˜ôé²ãËVòŒ@Û1sdYþGy™d9Žä@t_§ˆ d?ŸàÃêþë¶o R¤”ýºjÃj Üá€Ù·1žþ´— LhñÿŸ€(Ú¸òZÔõÞyžz@{Œ4Òö“E•üÂfýb3ëïàv¥A¬4ˆ®JÛÝÞ5­šü?¢ÔAöÞ×O/ÝtµŠ™ÏµoÉì[ n~,W/—ÐiÁ«zµLÏI¥]lRÇb µ+6°˜ÿV&ªÎd2rÛ¡Bë÷” j4C±³óõ©ùÂÆÿ–6Ú“×2ãi ìÓo£¡,ïëçÆEñE?°JƒÍ¢TúÊ¢;Ó­ ]–ÆZýÃí\â`A—LoÿYDÝ-¹²ã^Üæ)}ö8 辤Û¦—V¶óÎþ^.ÏnW=jú“zD®ƒ¾¯_û;Ȫ†·`ÀD¨õŽêv=æàžvI  aµÄÞâÄèSïÓú@˜€ü¶[° ·¶c^ Eãb°Ì`üy Aßö“ß×Ïu¦Wb´ªþÁÄ~ŸÓVãYFf?V~ÅÓµêu2ǜ͘-R™†YÆ<Þ^\¤?I®™&ÂwweNêo ‹FÏ4ÏZ}[«œÆûú¹·1¥Øj.Á»†m>÷Ö&ø}‡N‡o7€r–¾¡ÿÛ¼¦¡û-J¬Ašú»Mž¯Äeç¸ælj›žÝž³êÀ˜³«ÐÉÌÀ8¸ŸÓh¹%°º;Ý= åèaõÕŸ[óÄ}b)¨»jàïºóM¬Þ¬9»lR¾^»gA¿¥eÔä"‰ÒÌÑÞ -õÔT›%9Ó¯t’ Ùf3¬T^Ê¥ÿÃb¾NÍ|éþfž¦Š˜">ÞyÜ:ÕjO/$A#µM[¦ç RÎÌ̲¶“çj™™ü^ý €êzQÐw èÈ5k‚¨ñäØÞøZ'ܺãz%•·ÞP0îj4*¡íŠJ:ýå©ÈÇo”,†ód6Lñ}ýZ­âžú1èz‚´zXÅnsHXÛq¨2°ZQÅPÛ¼²“‹vÂå ÎgɆ‡hWFÿ’è&]GÄPk™øá³fä–_™t¡£žÌ4’°G’{»Ô`]4®ú_ˆf)yê+Ö,”¿ -ÌüÉùWßýF¢ªu¨‹OR‚ ð‘ïz ßÀЛّ<¸ b¶ êƒú ø´˜5óÿÏc}ýèWTEÒÝw›Š4õ†,ÕmõeNÕ­ì¡ÙkûnOŸVl˜ïÑ,,®EÍAÈ!!Xö,iŠ­ž–£ÐsKä;ãQ¢ÆIwpÂ0S¨P¥ù£ˆù*ü™™žÉk]I,z{ÇW˜|î­8¬zkå{j†fs"ÁjC¨ñÜ3á ÆB r ;&}ÊñÔôîlÓ–j½ÙB³,1ñ?â£Û+khDÜû -Šºæ¢Vû†îÎƨå -8½%åmöç\øcùÓÖŽ?(]ïìo°‚ÙŒ§Íª"p$Š6`AîM|Qfç¾*_"§,þUÔw*ƒÓp?5ô#ý±wAXº’D]—§C…¬ÔâD2m/qùÑuèƒe”Ùå eì&5dUsăÀ"1)%!»rº¤Oo®ü.¬¸Pœ¼MK† N±Æ_UBõß¿A+êþ¹_¿VGˆlhŠñù¸O!œ[ý<ئqXbí”(ÑIïÓð!e ¦jY3àó0œ@IØqµª¦›h~7œ{ÂÇw#Ã[UÖäg+ïN­ÄÞ¼‘qìHʬÁ‘y5 ö‰ÉÃ)í}ýƒÂ2LÌÁ~U~®HšFXö6ÜÈ{Ž³ö1^0ªSÙ#ƒ@è  Ytª>¦øäÏÇØ3øt8ç¸"pb „5‡PQcYžS8ÎÏûòµ«i[Š¨™DoÍß}œ´îß,hC¬!¥.£¼ZøÒ9ÓB—=ÿ×ö«ÓgE"ì>ÀŒ;A†ŽŠ,³ÍQ;óÒ‡ï§q½®Ÿ«_ ÔÒ³¥§jƒÛòQñã)Ù'­l:“¢þÝ Ê’S÷›'EÑs,/B¢¬G ó}rt4~ÏÏn9Ç,¯è ðM`ßÙb鳨Ùv[m7"1øâ 炦VšXHšTQ]õ+vË¡*èÿ¾–oznzíC*7 ¾°f—é¸B{ØÇÕ´‰½ð‡"ñjÕ²äsFE#Ø€b&ivõÙó~Þ—Ïú+:®®®R7k'û1Åqi=J±DzwLƒ±½Ê˺<£ ÷ôË ó4£nOO§Ï úŸÖ->7–“Fç²$*î[òbFe,ONåUÈÚ„©¶ÜÄ‘±®Oƒ«E“aE£ƒ_÷YG3´@Ö9ÉíC¸á·çŽðéŽ)Ò >«™)Ù‹VÐ=;Átùæö»ÅTÆÝœ/ EõB™†Ž¢qà»88ÔäÌi<3 ðòP$<H&`Ó08ú´öV«îÕÿ(P9ny”çWúÿ½¶µñ}ýì†-|ž…Ž×¬Óew7™(T¥&Z‰<òà^  pü‰ÜM3‘r¶ú*ŸÍJ ‰ ÇR£Ó@ ã:LŽµÜUG?w3z†@ §®E£«ùjwSZº81-¨.,:NR$g;ôyø“Ûløaþ"æ…ìÄ|ÁÀšMçå/Þý}ýçelÿ1Š9u‹|ëèp› ¤®ÚE²(O øF›O|2 ‹8ï9ž‰77ɵv*uœÄ¨¿ªž§CQÕÕÊsܺ©ô³~c°{sv£ÞxÃQoéff¥ÃjÒLèÁqU”Mˆ -žo©¯Ðùàf F³µ\áÊ&™:]-£h¿Â‹k ÖrË^:&a|Ø%ÁL -¹ò®Œ²ï¶ù]ž3MöH9ÌËs‹ÅîÛQÃ=#ß.’‡màà§%œ½”G®Öí%*FµÌ¬éœKØ7„iÄôÇÐjUyà -6 -ßKÛµLý`Éî¼:ì2{¡c{ðA+Ãx±[mÇP·õl¼±è˜÷õj¾Yꬬ\}qWó9sŒŒÑÝ¢R­ç: -c+]Ö=F+¥ÉÔø:úÓ"Ǩ¾l¦œK Óm#ˆ˜U¡]1®æØòÝÎ1&$UPí»ø| «ŸK'7G&3#ì)±˜ñíè¹æÄÒÍͲ§µÊGǽxOæS¢"PÛ0r·LÚm°ÃvFCÊ&/­â áUƒ“e¼øhø‡µ(ÆOü‹Ý'oY¨šYƒW4#ÒÈ2…KP<»=1Û„÷ænyhÇŒ1†‹ï8Öøžuüܯ_ÚTá09>ÏyJ|ó¾—•¼„{$}"i S³4l‹˜6š6³ð8ž¹=qÁ™NÇ_’›è/½k^ñí—ÞŒ™r±mÂ|Ùk"( šÙMg[MâšVóÔ]Î`7ÏêùËlàH¢»6­vÒ®ˆÜ x0j/ª75Z"ór¯¯cèûúYߤHé¸p£qÒ&gÎ LJ¤A¿œš妙¹¬© -jÿ¹îÓù“–ÞzRJ߉Z^ä*6ÝJ/(‡íǺ’Ó$œtcjÓ< ñAÿÏÒÌC (ÒRìépy4ÓÂbKñ#ZÍߤL½7wï¬xC  «tð—Uhx»  —é(T'ÕöÍ‘R†¥‹´NK‘–ƒâLæµÞ; ü›ÓÚ A°-5Í~*GÉÂ5ÐÉ>8´ðôE=,z‡5ì½#Ë°3¬K¨Ù¶•rÆ:ï"EÖì²w£h±òÁ5|:sòÖ’¡ÎË+šÆAt»ÛÐ<{£=½‚pv¿þs¯P'µlJAZð¼g0ë©ð˜¶ñ_¶Ëä<’\Â÷±BH7p±g®ãÀóþÅ`µjéCguª˜Y$ˆeçà×fû ‹·eŠG!í–¤Ž¦oŒZxÎ_½ÒRU-}t?,§ +}2·²l­1)çK~ÇÈjÁƒïµ5J¯Û1ý=&Ï·ú©¶¯Øòrj‚Ÿ¶ª¢¹NÜeg¢$½}¥ÌL:ƒ?$î¨ß+dHɶÇná5‰E粘ì#29jÄÖï•vùp·÷%¶?ègæ§fCÕˆ\/(^_Ï×÷V! )Ÿ]bŸüæNÞ&Áˆ¦L=¬Mµ -á;¼†Yõ8!Ωi×SWZÆR°)R¦u9Tά­ ¤9ÐDX;zNÃlÕ”Ýx>cU\ébÑ;] ,¦ºtv{ %c> -+ EeŠrÐÊU¡k€’¦n;jxÃŒŸ3ÓN´šÒ½ Rm·øÎx -¸"¥% -åªÕ‚'qÕͲÊaÖCX4<ÑÇJ`­ôãªôÌíÉ|î»Ëª“ëÏÂÁ2ºÙ®j '>´ƒw¬‚4Ó(Íâÿø‘Fè ÷ÈŠ@Õ¡ß2Þ²Œ  9[…ú¨}%Èç0p[”ü§öÝI ³0òjq&ýµtîòV%ÓÙÚ6ÎeBù#Y¤8îtû’áÂÈ~ƒÄîÁYžŸÅÈéiL‰ªÕã:¾áÎu€ª»b kj•)wrÐÎg5ë“ÈLIUJÚ)‡õ´g|°äñöLaw;«š^§«¿NÖðU6G!Îu7þR?1N#¥ ãjfÌçÉLÌm‹Ê¬#§4¬6¹³’²Y·«RzÚ£ '~RnÍ:«?ÓÁFnñf·C*ê¶ÍÓ‰aHŽ"eàòÚmó Ð!ñwŠS4ù*ÕÿXvšÿЇ7Š]âÛÚ«MÇý—r/šr¨*7 ¡p ëÏ–éêäRÏÇ4¡RàN¹˜Ê™U™×+ÆòøôéIih ©çþÀtºN÷_¯{ï ª¬W‡ëq#ÏÜ¢c ‚L%®tÀÊŒÝúæ”P€*àOÉz 33{¹å5*$Â(TçZ¤4[)¢ãÌíÂvL“jiÍ,ø¦XNE§eô ->Ë.P¬%! 6,)>sÙJšh `ÁyËõ¼ -ÌéZ|m±Ath§6Ö='Ë0ŒÞîSÍ©)-Ûb„’AiÙÝDä~’ŒRúiu„×Á,HŒ_ý -P²z'¨«Ü1=‘Þcõ™iæíJ :ó[^fŒ æN¯£±ðt*]YcXË2«²‹1‚±·™ª¤= ÓŒzUͧZ*` -.êi3ÑvÅ¡þêF«½1/Ç~ÖÕaeIÔŸùRbóåГú¥Ú\¿ºn@¤Ì‘~¢7Bi-¶ -äœ:€Ã˜ÝUÎ6À0ÍF‹‹úÂk“¿þ^߶*¼ÉX訳k* ç¹97™Úš±ébZƒ-Yod7j±÷Ë¡—Ârãš3U²¨-b» ^W-{ .қÉÃ}C¢| †™ªãf28ô‚<·¸¹ˆ¬ þgÞêv=ç$Z}³.¾ŠüÊH[‰§®¦Ï`’¢ÝҗƆKZ‡âô幬Ïõ£Ã8­Ð›‡<ç³c ïbÚÝPË-ÍmzÑœç_æòn÷‹v3~j¢ÙãÄmÜêÛ <õðÿÌ^ÅkKXÀµOµ‘h.—éNº¦–Ð#ÊoêŬÂ,O×wèLåÚ¦-Oñ›0üL¹’æέ2“ô;ðƒÙÃWµv lhD Ûu¸æÒmÇVê¼SŒhÌô(X­/ÿNa,É-éüº ~…Ò -ÞP«•¤9üàhêôJ¾[nûHK·2‘Ökýæ¶h ­nëeÞŒû{ù¨˜z I³MLA)CÌÒÒ/ÔBµ5òd§SÕ­øI50ÝN„bLR@!Cjf#“jÌk±Ëp*!„Q<üØð$þ»l¨ê¯‰etvõ{ýà4‰¦tts¬7OÏį̀ÔS0¢ H‰°|v¡ÒÕjWé´›ˆš,šg&Ó¹¶=à7à_š-à9 æ1}àZädåˆ.Æìñ9ÒazÖ#ûhç‘c½R´Óžÿ}Ì岄>ƒ’üŽ¥š‚õÔÞ=‰Ç8éýs9ëÉàÞÃ6¸Xü±b`þtÓ‡Ùžξ-$1ÞZðõ{ùàÄÃû¤ô[xÅ-ÿîTÕöÆÈ´BŽRH1äí~‡“ ¦çÎw…c>ªÅ¿gC~à©ê3rººÑìÂÞ)Æ›wZõëùú±u@—¯8ªÂÙ/ÔWÂ.£f]‡ ¡\n_y€$¿¥Ó{€ZŒf‰º#ˆ¨©¢9Oó'IŒx=€p— qLd=vV¬U¥Š×Ç …ÅñcyiÆMo¯<'²0·#Vy­‘ìÏ彂ÀüÏ‚ññ‚¸2êåM“@·mþ3¼:ôˆ”â‘æÓ-rÆ)ò‹]õΩ0,>&Ÿm:8¦z[Ác’-W+×̘–³S0 ]q €ÉT¾6N®eT¿†Ôn ‚eøgCK¶ElÒ¯VïQE­:óbÆýÍLÉ:§lX%T–/Qwœa?¬™‡]S¶h"±i@µP”“é óÿ+ Ö‹ïÉDÍS«ˆÑ6µ)úá.oÇž²É|ê¶# ÂfgŽ°É¡ÒuŽ§U",1_dr¥DÅçÀj˜Øó¾é¬b,Oz\ š:ŸêJôq«ÜLÌzÃÒ½ÅÇèØéaÇÑ‹¹·°–|TL™[í±æ·„/ÝamÁÕfb› ÝRWgA£(JÃÇ{%Sª•ÀÅÔ;ºXùe{zBíâ­^ËYŽ}$ŠX1ƒç+g -‘€ô§•:‰iƒ­—aVg–rêÄ-¨óÚ= €Ýnb*8 Pem“‰ÝSš½ -–ŠÝ:K1ÆtsRí‹aŒÁHõ‰°´•cOr%‡aÂá¾ù-'~„a7ÚòŠÁ;—Û`¢Õ*®y‰ ŠQpš'i_Ã"aI| îæ1;¡é/kÜ®ïýꃭ“ê]Çxaþæ…Ãÿ¿,²œ(¦þ9GŽqÅêY\—&Ø2§•\£X¹ÄµX%HPß¼—`ÓK=éò§qÂf¡ÔÓ­fn6LŽÜâ¶fø·?#ð>«E2‰Ý&1´Ê7Ml òyýØRâ Ä_ýêo§áÜÒ<³Ë.œS™y#®Àº˜cÍþÁ-pê´­±:Dy"•¹XàŠM$%„æ«ï_÷ëÇ…Sz3º^>°ZOJ)51  î2í·ôÑ´%wü@ÙNÁ^(Lð!°.š¥Åì}[jVDÎj7ûÐ\y[ÊáÛmÀ!*¿\#ÑÑf7¸Bž`áL»Ñ"Í:¶†&5îOF„ÃÝNÀUS"ìÝϼ¤~¥(5*«Î¶âv¸ƒTSóIÍšDø±†is[þÛW¾k¸9µÅa˜nø…ŒÂÊDÖŒtqá™>ûËý‚±ÉÍÃ&—õDeG7‚^dðZ;Ï=!ÞN»›q.x ûœQno'ùòç:Ò‰ŒxÝŸ³©yrG.dX|i×¹A›egéݶYµ+ô©3mîjµ,»‹õÒuxÚºŒ‹ÂÃ1‹ -Åù#EÐniFu¼ÉíÆ,h?lîx¡‹MÀÕÔqÁ¿Øø®4²á‚X¢;ÏW„ðyýp,Z› W4Ø$¬»ˆöŸôõ­È3êI°ç®áÄvPØ u¹O"Ô“ÖU#[Æs{Õ¡çbljþhjÝÓš‹'Õh*K y9žþçòv„E:èÁ9õ}wÝÍÊ­ª{Ri`¢FZj5<ÈŒ_ü§Ûµ´¹Œ¿tè«ÿ ›öî>.”†Í£ÿ¶â`È*8ÔI¾SüòŠjUIJeõßv´Ò»/j•Vh j>V¥’S‹„™ödHÒ¯åªlçM´ÔÌ‹ô ÃI®|³J#ú×ýúq¢…|Hd ¥oÖjï µZzi®-ç N…u¸øf£)ÈL‹1—m2uÔ]âjNÕ^W/?åR»O¼ú[ºÛ´ôyýX˜EK9¥Ýk˜¢yaPØ‘I¾„©Öi繄aæÂP•zå¹'q ZÕ¼kòÿ½ŒYž w˜ I"Í–ØÙc‘ï]8‡u|^C(í¤þW–rmç>ÍJ;dX… ´W[ä‡Ï¶-F4ÅGpläRn¬BmÆ.ήTÂÙ†·^ áY^–»Ù,è¯i½ž¢/u8{:"ÞäÉúô'ÇYG?"ë<ªÌˆ¸^…®«ËÁ]µ Ï„kºkþ฼G뾓ßÀVr”¤îíYÈ+Ų««þ¶ì+ŽæmÊ3ãó¯ÅcÃNnùûi.¥o«¯{IãÒ“¦ôIAI«ee Ã¾¿¹ôv[:òØÛŸ 6£"5°úñóK‡Jjô/ZÏÀ'®…ö>ê‘…æuÏl8ƒ#ëÉø‹G‡áE±2.²ˆåÛ9³*>9DÍA(™~€ÕrÌ\ï:l¼õ‹©¹Ej Ü)¨/sáÉu;A†8s”J9­é›ºeçh¤Ñ?½¢žTØmš€§zUéÒ?Q]„UpL¼Õyg©–¥ç «Ð=©RçzªÕ«Aƒ~ïîòhu‰g3Éî/3e©¯à¨y+Û‘,¼Ó§¨ëx™É°®Bó8&íÚ§—~Z»läpÛ²†tsÖYP)÷7ï.„ž¾:*_ï0&TêžÀÞéFL§Ñ•}:%N|0~ð5$¯ÿ›³“Íí4£ži&2c}(fêhè~¯´ÒÖ:h\Cðƒ¿ŒÍ|LÕ3̼q¬ øPm` Bä´[EX¼?LŠlßYŠÃ‹Ö‡,9·±;¾«¸«¥e¨œÖþÖÌ6ÝÙbRkú‡úJ£1¿çõ½ÜŽ/ƒšddØxÙFÿªÑN·dº[É‹²ùÙ5€¢ŽÚp—Måç½Cùy}/BÔZª`Ò§H‘jîE„ù¤Uy†»5§#À€Gl\ˆù8ÁGŽÅŒÅ[Ö ¼'¿·J†Û!Ôi¿¤$e½L/9ð·:cØ4MÁ?#ðð¬Îekel4¯íµ…JšrJÝ›¯¡]®Zm“îN –›ÀwâYvDD¼mÎÌG^kN‹”} I s÷J™†öFóŸvÞð‚ó™= ² Ú:ëcÆJ«î¤„Ù[b¨Ñ\险¼×+Õð/EÝö…/`A‚¿;ùþëÉ—([Ý’'f¼©ÕÝ,@þÚ@@™‹%Oö}€¡ž©>þ‚IŒ`Ž4¦RæG¥`’å]:Ž=¦hî𷉺i  óòª#S®–¾4êäy>Ö0Bzâ 6QS”æï……ÕH1aRË´iÎ>ïöjL¤cžÐ5¯e\Žò^Ôë h´ÿQ ²A&c¾jgŽ[êÃÞgz€1AÏ‘ Z±²ü I·¡ð½Z‚çýÏÉqü43%ÔôUµâo¼†mZÔ×f‡D$ïEjòY=KÓŒPh*«éÜ°í¹¹?\C8 Óê80§ÏŠ¾S[ýcÁ>¤ð¯Ç¼¿‘lB«Ô(8M#¨•G™¿±>êÿ¢&ü¢df\öÄzç&¸‡ÿ£¢ma¨‹j4g7‹H`È ®9‡†ö¾ôºQ½$u2,i9 Ç…í -vRULà7ð^ª³ÂÖ%K´ß×±ú~θÖů Ìœ9y‰/ Ó?m;è ¼ï¦a Ù•ªˆ:v™£L³å­-×ʦdî[m&¹ ~†Ëmvðö&BÒ6{¶ÂÐ0¹ CËáqPmÄ{‡ã|ÿóì_K¡´JÈÉÄ»—Ãb‡ÑçK¸Ý½ÒQâÎ8Õa"¦x2Ë–¨'Ë-¡XÃÊœÕâ¦ÚnCBXf4¿B?óán'a6./¥$ rMïtô.}u;ˆ.ÄÈïôq‹vþ3)ê’v4[Ðñžž ο™O¿ ®f*C×:ÉWË‘ŒÊµé¤<Œ”F’¹:NÛ4ÂÇ ä‘½ŠãÎÊY (ܤõe×BÕU¦jHP ”÷~/oéò¬É /•Pq“0†Æ¦&ø]›,{c­²ù ¢ó;îëŽXÏéENsj:í$ïŽê&^KßöÜ5yš¾t µueä`íŠá™âaVé6ÈZÇqV\‚'Í–¹Ó|?&€ÎþT¦³ #c çJ‹ˆ°‹ÂŽ­þ½¤Ø.Š‹Åhíx•cÛÉÌýp¡°~¦OÒ‹î8õqd„dXîb%»Ñ´v—ðn’êwK˜—­ÈaÚ¼3­Ž$k­G„·MJÞ]Ù[qÿ…JæVsCpù㼇;#ñy¼ÛÒåYýSf–‡‡]0Ж4HC8$†k—#«xy¢í„NÒVB;Œå™÷H‡ª85ûÈÀÁ0Åd.uÒ8 @Ü“lˇ+†X€Ñ”ØÓg¹ÀwCÁ¶¯Ï/¬ ju›<ßübÛhº5¬Lj'âÓz·Q9æA¶«PϨ«e·B$»Óty׳3ÄÎ=Sט›­fs¨[•Pž%ðèk›Ïufœ°Ô¬î韩ÙaÏi,O:¾lâ>Þ‚2ge—‡‘bæ0?µ -h\GdP®eÖNòÈi¤›xšõÚÁ ¶Ëò,Gc†¸œâ~ñw—sJ2 L,±3B÷乜nz\k”­þ­)«6þ´Êm›bÃy7xeíÖxžŸ9ÕYõt¶_w5—É·ÃÔÆ8+Ý9\­{mCúã[SXî ò$êP‹“bäh-%aÒæœy(ø±çÔ+¨Yô…Ìi·44F¸UËmw®Æ û—išˆcÓ’»¶)dä~È; -£†çêéÒ®žýݘÅÛãÖ;| öÁNzC‰zI{¬áe\…9ö‡ ¿ÄÙþau^Ê£^tÐ-ïÚlžt®µ½jm˜´ÎñqWw†\ð_º( -ÉиbÂ|iØúÌ«Öß9mð?žŸû„¨)ØÎÖ -S¯±e/ÛÔá)Û*.¸¬ŒœîÇãD!$(CGOÓõFØ^m_ý7\4½‡Œ0¬g:6øèÎ×-YOj€LPÃ…Âå#µÒa²¡'U/͈ªˆ´·0»éÜoèèÙÇó‹*1к]^­û½†«‡ÐlLÇ’îšO`k›-NkO +¦gE ]îÚDŸqZ•kÓÚE éÝÉôìâ Ý–c2KUÝ.n3y˜Åؼ0—¥üªÄãËš|¥Q~Ï;P öy?Ï/“¾Ô„1¹ÂÇd 1.Ë(¿0FVLÔçün*ÆÛÓÃÅaƒ‡N)ô÷1…<Þž»OÃp&Á(›ãD4}wí—âA±=·Ê6ij€¹8˜Üæ¤m™Ž :!~;=ÿ* Ò¨9¶‡…nÕoLÔ÷Zª •ÍSÏ¿Ìc,±Dª;.î%!9â–üÊë6ì3 Ž‹‘±ÉÆÔ-ÑËx¸ºUàø2èfµžÑrâ®Û*ºmð$Ä‚áÕ4ÚD9èŸd7”áSlèîç¿úû/y×#<!Šzöªt -¬¸ßCš*ó)u*¸€ûe‹ƒ¡N˜RÓÝWáz ÎN¶8vÕ¼íLcâ°%Þ|ÿ×73ž§ïL{¡Ù½i-°ªËBcÈ]FRD—U³Ú]ŸˆÒ)êãùÿ“„6›Ý°oxãQÄ,<\sâ 2=o¨ª° Gé3:Þ‡ 2q·#‡ý䔌`ÈæÙö–ûíÄ;Èíúÿ-öQF@`=÷µ7$8oQp0•RµéO‡ :˜^ø €[ê¸;-º¿Tö ã­~¢$”ÿüÛ¤ |»£)¢a__F­mÛêéI¸ä½?̵×ŃÍ–ŒÎØÌ„uöO¼ØG7!q€þ¬~}+—”^u¨äútø°4™N#¾ØtW9tžÙu{æÿ &®F†ãPxD dÄÏÄÜíÚëÉ„v”^-·Ò§s.o–‡m˜4߯@†XròWÁsÁi™žù—ö_ «Ûn#XÓ9­t7Í0S‚HC–oØw¥Ñl.Õ±"»ÎðšÌb04R» ÈrºšPaTU”¥·Ôær8a™W}rÎPÆ›ËVЬÙmÞgõ¿ûÇÃ9Â5×´'n¤{ÛŠÝW°/¹–I{w<ôòü|÷-¿@ˆ‘‡ A/-UFe©w—¯‚,5k³;mGeRgË:Ø&QŽ,Õ 'Ȭä§W4ÉneZ"òåµA㬺ýÛy_J†P^—é!î:ˆ=yAÅÑüð[c8 Ù_ª–ÅÔ&wäp{vÕíøÑïã2­ôÏÏ%;éWw¨oùÑSvª<çb³ö‰ß†œV”Œ[–]æc-V#AGºã¹•¹æ ¿7÷—Ó¾K¨8x*ëôWŒê Ý,»Q&š%k>rØ=yÄE@¦Úé3“‹ÏÙ§ý-nã¿nÕï{åOª¾Þçn¸|ÿ¡¼\’äˆm z_@üŸÇ[ßí|‰jijF‡7-qºšÈߟÏs'_=΋Ž»Mõ¥Ý¬^ÞY¼.5* -4|åãcÇ[†Ñö||ÿPï€Ï8ºÒ×û#‰"ÔfrlþñB´GÆ«U8«ËÀŸ -ݳ*ÉW[HœyæXU‹TªkÑ€lö@5ãY®ŠãñdÚì’Ll”†yy,œÍ®ciã®[7,Ú1%”$U /}î´¹è×ÔwûÏç÷ ›jg0Fäþza‘¦~ZáÄ¥’àC‹Êe‡ &Ë~­3¾¼Ôê -wMÖÈxhUA§z§ÞD÷Ù±p³8$¦ç–%–¸Ë«MdéL>1KÄU*ƒ¼×.6¿ƒ¨Ó}°}´kΗxßp8ŽôBé•«ð¾ÂåÉVyœ9P%„Äðݶ-‹?Åo«$¼ª{XÖ1ø*˜õÏÂCt׬Þêýg©W%’/ë}¼E„O×NVÕåã·Uö°gá%]ÃYÉìz[gb‹[‰³>¿LØËm#}Ó†œÄ¡ß ½ØxÖƽ”P3³ª Ü/kO¢è8:ÏéBðmŸ_ä)•¼d~AÎÔ:Ï£µ^H¿ 2Eb¬\×Ê(ÕÝV9èE¤|Kœ!Ýé;½4¼ÆígñS8àQ÷®n3ªwžóÆ©¯H§Æˆ -«ö´PH¸Tî"Íz Ìý=¦Vˆ74Ž{´ûyAc¬/VVEÿÀ½ØtÁwWÝxsõ=~]”Ç‚±"ÅÀÿuKþtÇ5á|µm4¾êZpHWf™¤[‘òÇ°®iÐÚûãɉ¬[ćÔOã~,wþ>Z;ÙãõÿoG\ºl|ø•Ó°{l @¯ ?u™ä ÛÚç†**±«BØÌ Gññ°ÙÀ7"·³\$O’‡Ž‰{še9ÖµòÃ%¬Â¶Ç7£9h·tF×Ç Ïi œÅ>PØÅ,ET*$O‡W+.Æ0;¤ÁPè›k?«­4rj@²á96©ß¢\ ÄßUþL4˜»]<ð%ø@/šÛíE"™†%÷Þ ‡\‡†ŸÖÈt»92'C…‚zø£Ý¬{e»nj×"'óît…)(ëñI\œºv)êð£oãR$wüyÄ q!w®ubÜ/1H…]“`ãbÈs@Ø1,¬Nrb-{±!¬¹IÛCÎ3ÉS•ž^&ÄÔ8L§Þ¢Ë -Òã°‹Q_0÷™†#à_v$MÚÙm‹—ºC-ÆÌ…í#¦®Ùk±1ù né®1B¶M´×ÙTµjk”Rêï}"-]o”°h,†}ŒîeÖ3݂Ιpµ8 ¼h^›³]£*|i&¥¹›¢ÚÛÐ {ž‘»ýh¥°0Ã[#±å¿‚䋨HÐÌÉÛG^“#[Í Ui¿:qœ$55}bEÅ*–Èk.œÛš’û»3§™–/…þh ¶k4½¡ŒÕïÎ4;ˆ½Mº+L-%`ˆìœ×x ×žÜ -]ÖŠX>†êÑĨ‰ÉÂX¥Û´,áþhK@8KÛ.Ÿ¯½sÀù}°}¤€±1Ë15ÖLÁWÓ…Ø6·'nüöAFºm ü YsØWÍã# ;!I]òwN2 ýe·æ·™ ³Û£÷'½-ã_÷5qv ïw74æ·{ -Ÿ0ial8®ÖK·¢ß/èx+ý‹ük6›$¢±S+ÕÎŒWïq½¥0dÏAËAØãœè–zgŸu4°ë£×êÜ´ó(csÅ(zîß^ÒÜíq -½ï›ü±=å£ÅùâíûÒÖKõžRÜîüÔ=Ç“á7x#=aSBl»œº˜cÇ2¸4‹ÿÍQÃú‹ózõ6óauúü3àä1ñŽVž÷+û*Lûr,^݉¹ÚžÈ½ÜŠi€9!y(Œ£>üæדé˜]?ÈBè¦oŽî°\|;O 5­5›h\G*Ø7ÿÁè4ëÇ´“ 8r–7F1$Íøáÿñ ”¦T–œ«ŒÌ,¾¾ù|"æ»J_Ç*CªïЮáEÚª|]Û)㊠‚þZ櫆eÄoËm?'ßm/Iÿo&pʆ‰›„Dýs÷×95íq{ÇÔfªíÒŒ²fcWö‹Qs;‘ª]ɶ…s(¿úì®´iIÌa/‡¦!ù `¬ñÿÇ[öçLbñË·»ÒÅúØùDwu º¸(;kLáu3$Ÿ(Ê´ÔÃ\%p9ÔÀÜ¿ð‰ôîغmÖˆ€½vM|ü‡ò2I’ëF‚螧¨ ° Ìçé\ˬoßþ†eFKî’sÑÓ^”>WëãþïaGÓsRáo<3ÇHåÜ 47AÒ <÷¹y¼›i5>Û\*FK:zð¼m½Î·‘ã Cxî¢' -3]Ý@UÒ‹ -Ö–Š(쪀4„ŽŠþûâaÇ©?›–èoUÕEÈšJ¦eO#ƒâŒ»…•åT+~òFZ"ÎÍ—•Ñ#“dÌ,Wµq*‚TàŒ²ƒ+ÓÐK+mèmNª´µíªÚº¦Ô~kåôZËâͪÔ8öñúW³ì -Ž -'È»›UÓ˼ï“Vc%,Êt—!‡õŸêëW1Ž¸È¹=Üq3pjøÐ& -8Üw—ø_¦„JWO:ðízÅôrî[¾æeOb9–dÓö4Üc˜‹‹:¥÷’kE7)°Q\é:ÞÅ>Û=½y2"¤iÓï|¸S¼6•&2¾w[™¦Kü©¨ !4v(êtú|6Å®`EAd_ЦSZ&ÒSî(áu0}вjÕ,÷HédOßtí 3 ÿ׋ßµ9qd  /’•e²b}ú±Ê£¤j¾õÀ¥˜üÊ2¾ÎpV²D™íC¢uÒÂd¿6‰æ‰­×N%rª›Y0lF“QÞa¿ãrª¹¦zÃð©¹Ûs£Öã}ŽV<g6°¡,JMNoþßJIXä^»Oß•/Àí¾lÚ›iq­xo@·½¥(iÆ5y¨®³§ƒš,¡÷¹^ -_f/|Ôù7œ‡™Ùé¿Òm ƒ8'ê…–}-ÞÅ‚ž´-‚2öz =ð[‡™ó²:‡cz¹un׶t¬kÿ†ë[ÿzzIø‹Ž¨ (S;Ú°‘èóª!±ÇƒïŸ&fÖd˜çà´0ûað -*,ï»9ÏØ¢D™µ„C·j_ÿ@Nz±]îZ·ŸãÞ”¥­ÛŽâ—^zçÜÐ2‘VSÎ sD¢Ÿ;n_)¤œž®a*ž÷ÀÿÊ †Àï£Ê(š^ݺàÞÊ›f -¶:£í…ã­ªš–ÍywÜ! -ít[Z«ö]&)ùÌÁ´Å9T—¥x$f³Ý Nm×ÓÖ“iwÛßNxç“4ûÙû©Í}äXX¿·éÏLbO$0éÍ©nbr~›v%I¥BXSÝ`íÅiæýÁ3¹éèÄõµ¥ŒÌ;I›©X þè+Χùá†e®ÆðOs¦Ý¤¨t¨ ÞÇh¦Ðž«,†=4¾Í.ÿ<¡LŽÑc,£½É?a¢ToHß„M± Í{m6´}œgg{çì’€§ÍùêÔËp,5(GgA<åu‘è/ì?ZÞ²Ê2+N¼r -Š·pM¥Q´˜#½½¾Ïb‡îÕ–Ž9œHYÓLt²©i*0p3¹9Ó¡oÃ?½t`ͯŸÂ¦ï¿9T+qéͳÕ焃]läiT‡œ–lY<,ÍZÆJ±11hgzÝÇö ¶K¼çç@ü½¸ÛRÍ! ßqÞå¸Ù𤮙žÀ,¾ðh¼§ñ›~Aúˆíª·øù^¦+!+c½ÆñŒA,õäŠZ8hUL·¦ -C&ˆùz¢²N~ƒ†§3K³ÃÓ„¯Ò8í¬©;íl†d¯7ËoyõêM3fñ„ÉŽw•– šÛ'Mœ;ílýÂÍ^„hl¿EE[çƒAÛ@%‡£‡ÜWÀ£u{ÐD&‚“Ti‘HQ£ÿ~ÓÞ?r{œ4ß±…z>_úÖAžG½Ï_Q§<Ô¬änº¿÷‡‰³ªø -|þy„°Ð:AÌÜs¼0öÝÓÙéU¦Žqñëó}ÇuK4echë¢Ë—×ô -RAÇN woØiÿŽ{Ž;À{ô‡ÚŒTù”n™Çý³JÓ+?oC¥°½’Éî³É Sú†—ÉjM'©×<®À6¶v%o˜ôXwB„ø*ìx¸£aT…|‡Ñ86*§v <ɧ‘zƒ¸ØŸC ß+4`:¡À<¸ß.e(ƒÐSu„~RëÊÊó,l8¦ ®öûáΟ¤Ë×ñíiË~~þ‹1ؼ Ðw#ùºÆi­Ô.kÏœ±ñê`8ÔB»¹íìAiyááM¢ÝÅÜ€×Ýsî7\ZÇȬ ûÖ°^ðÀ‡­ÌØ'ÕDYŽ;ŽŽè^Nšºðª¦ý VÓãhL¯T¬êáÖðŽíÝW”iÔûþ*e#.¬¾w›7i£cêìiŽjøQÎÇ­2Å”•î…TÅŠXü™‘ÿ}ûãÑ Æ0þú|kª® \_££w3^tL¯Ø&ÐÔìT[qAÏqÛz& Y}É6àŠLz$I|˜ÈæJ£éÍÇÍ)âêãXUî²-cYÐæöâ%'6l9Âse—Q:¥Z'ÉÏucR^ðj·óëó¯…q•uiÐØ“çÂ[4®ú‘_M³XÝΤýe-I¸šgSö^™¸aÇŸ>ß»²„ó"L±MyîJO%-ņ÷öʈdþṞ*W³ÊàO5;Ðòmýéãqâæ]vwxEÿOÚž˜ÌóФVÆlò4Þ[‚O—!Ùìžwq&©Ýß÷ן˜Í…§ØËtÑ̯íU‚;U~úüÀ+`£2¢Á8HÎÏÉŸ£à‹Ó_O»;¶ õô¡I¹,ÆÕ#yí¼w§ó» ˆXZªRW‹_Ðß2åq§}~t“̤Òʧ¿1ž­üœ‰äƒô Ñ“hl -ã‡H7ÕæOæMQ{ò’L;ľM†é¶5@vÏš›aR+©.e›ÕêC‹k­É¨HFÕ´AdÀµñZg 7¾(Ü €ØÞÚ¡"ì(æ~:aeNæ?¬`ß~’nêû'ÛpW¾íiØ&KÕp²K4ŽQâbo/Ǿ¶a9\Iˆ£>ÊŽ÷x1›¸ÇÇGG´?dYjËçŽhÐHkÉÑîC1 -÷¿©¤“­Ôæ*¶îyÑjé8 w,¢¡–OÓµºHкH¾n6AèÕiV*@«–¡|®„!(šƒ:ÖÈ1Uï8ªÉ´õ‡)— -ÆìR–áȪß;F«ùÕ’@͸D½âYu´‡ó7åücŒ¡£—$û5ÂRtëÞ´;Q“Ò±âô‡_$O±ˆ~‘‹ŠM·• ÒO-Òl`¡ïahû¸Bö¶Zhª›SÞ÷Ô€Z0cÜnâtšÑÛ[ÖJ™d®mæj7dÊYÎ#á4j­ ˦Žy«6Ö¾Û3‰F¥»•£©ÿ³[º(;\<ÒÚ·‘‚tŠ³3Œf¼öWò¨Õ¦|ÏÞÐÔ ‚)W÷¦òÉâîvò Ns9"Ì6oÚÌé—á[:wh™‚TrÝÉKŒ‡‘öPH"=Öí¦¨`½ÈcUȆÇúpMàVâc±¸wÁ’ÉUã8õÄÚÐÙ*OT§=ša+`{(‰(:)¦hµáÝ(˜1ŒuåF»¦RëK5‹œˆlTË"tÁclñµF^5±Ê\‚\i‘Þ0#ª´ZEÔÁ†ãmP˜F7AL[0®Sš°xëlºl‹|mŸÑvlC±”ÿ„Ú6þOy¹%Ç‘#AðO1 ÄëNs¹ý†G¢‡]ljÍöC‹U] 3#”UMä·Wc -F5ÓeKóŒÑd6ïÁ×âo ’0cr¥SÑàí–‹€Ö3º±™¥wÝ{Â%Èr¬èò¥±MAGg -Ò ÇçƒæKå ÝI¯F ->¥.–ôËC‘9€µ½Ïµ2g S²5;­š µÈÔ+2`’Ç:å.¤õW %µüm F+ÏWß|ïÄÒ ¬nH#V%„e¿á ŽŸ¯oJ ³Jí–倔ã«S)‘ -ºÌêš zo싵æ¸Z÷°D²ŸÇŠ½T,ÝuûX1<¤£ƒ2¯þPü–Ù‹*Ü8‰²!ÙpR#Ôl÷¤[Vž©¾Hý´²ñ{²bo®Ù®®ï¾‡ýGÒØŸKUY—öœ-_êt»¶½ Êε‹¿µ[7κ·Â »é7qXÂm ¡æ:-íÙÈi:ÛSCÑS¥º³š .…¥:­IqŸÞ<%óþü°#üzEÁ1ö~ÚPÀ‹ÿ[‰ÙL/R0-]ãeG«§+Z³œ[›õh¸ô7üI·=ûçZ(¼«=¬oaêüõïÏŸtN)T@Úµß+`‰e§~?SåCL×féü‘ é•ÊD··ÚytôV…1,H3•¶a-&ÅÌ ªSÅHzt“ï¥ ¾hHˆõÊôõ¬Â6ˆì“?¾oIAb’¥b3*ýI<…CênyíºAõ»ÆN4Ó½8>'¹¼­@{L«pÊ»Œk"@½Q;0NÈÔ¬¯í‡&ÁàZ -T^OvÛž6ú”‰üê)ª -FÄÕ¡ÝÑ<=ºÝÎÆ_Mñ¤pš8•›Ïꙥ<‰ˆi‚Ó×¼©å®þß>Û#‹“ÉHëZìrŸŠ”AIï·P©3B±Ëª®^ŠV{†Œje¹þòôü¹NÂÔtÝe3,¥eU®ÁÃW"R*s@diÍ—u´œ¬àï|”Žd²šA º[yY-ÀG­Š†wå—Õp €r%–ºÉšxZÕšžÈ¶@æ/ŽÀã|ÚjP@º}·¤ãÍxº3sÏOk@ÿÔúPk.µX(ä^ÞÇWËðÔ×n/E»;$W«i·‰íú«úª5– ©Ëp¨sOHÍTý´^¾©9(Ô+³…úÐ0ÒïZ³'sÓíùp 1ê)Iîˆnôë*ð÷=U(S³¥è°®wâ@4ÐÒÖ‹šŽ“Ûç8Rò“ž4öë´½i/¾Ñówu°ÙɺóØ®Dc«h/íª ÂjþãôŒf7Ï5:ýlY,άŠYüÞ~˜›N3=­K?PRô4Š–˜dj>ùZ7Ü׃‘¯í Rr@çôÆF÷ô¥Ùìó”Ña øúù½½Ñ(Ré *‚Õ_B  Þ€=W.õ›¶]àœ•}¾à-Üs›S@éì{½[Ü Â¡p€éÑ)Ó‘ðp»þó¸d=ÿ<¸(ÜôãÓêþÜWD¾ží³ÝÖTFö6‚CF7凟ӼéXicÞ3ö@ÂôÆl$~ç›ÚÀŸ‡BþK©–ßû¥º0þšRð5â<œ¬ö¤Ùn”óg̱ô$¶ìÛK‘äÇ#U²šž"‹€Ó¤Ÿ1¸hú1§Ó?GMÅü×Vÿt¾êGHÆA$Ø©†%p±°iºO–unO*¬«žZ›åˆpèкfMPv¤mà{8±èj¤ËŒ¶3Ûì<óŽãéõ–bµ€ßëYЩǸïݨQ©ÖV†ñ»×l† Wj.g±°`1ze}ÚZñH]aþ”ä袣¯˜²åµ™PÞÇ;*ÕÄ<ÌÔ£R’ÚwžZ!SÌéñ˜ê¡ž[cO¬~]b5Ò¶FãÏÃj—³zWíòÌrÔØ&¾—ƒS}ˆÈ÷19ʨ˜þ©(ý<¶tÒÀ\Q‰,b¯á·ç0lÞ¥¶„2U1Pï.BUÔðÖîU¼R¼È[uÛœYOãÉ5}àz¾9ëemW¶¼]üРÒÿ©rZt×'cJ‹*óf7 ‹ÊY–5HLFK2 ¹Ý1ó]•¬ïÔ®œ³úø¾ºk`þÝŸy¨1kÒ‰ŠN|=%ªôþJ·È\Z•äO^Kj‘/˜3mD(¶ÎŒDZ³½ÄÍžs\ Ûõy~%²ÓuæAJÌšR 0X‚0ö7-PmÓ ¿}¨ÜÂŽ¥ŒTŽ6Þ!GÇÃP“‹½S‚"¥{å¼¢ª¤´ºð‰àèŒ UüAç¶ê+ò¾6Ö(äÈKµVS÷¸Búƒ[—z+W–>½uûiZŽ§ÑöÁʘ¹N‰zŠ,”]ç­Ú®Ó.VêôÕ@ç–ŠjïÇÏèÃ\¥v)ß:‹Î¥k?Å=B/µ µÿ‡'ã¤áèˤZ̆ˆªúZÈFpµ^9„2,מ9ªá6ÕÌ+¸,[úpTI>R§TŽ´/’ÆÒZŽ_¡¥¯öfC^F‰ÁQ’Õ¿5~˜öê¿Z¨<‹žD<úþ8åŽ$†¹v8º©¢(¿·Ï”.{¢¹É3ZÕržS¯Mlú$ ªÛ|D¦‚wr–~ŸLW8Ý@&üZNêLMƼc|UËùšÅd:O -w¦°µ|i€ÜÛÙ`Œç”õ†n¶Jµóƒ2@s{Ê>Q YlzN½­æT&±½³”ØVŒyR_mŸëk¾‰¼MÃÛl¶U§Ét ëÌ>Öâ¤AË»:‡~l{ËòŠö'ä,¿ek Æsg¥9ñY|²Û^V€hµÓex$á4”¦«È -/“Ž‰ê5/hÛ ¬!Jy¨#Žj¯û.ê‰@«QʃKL7Õô|B•|¡ü=8ꥺϴ'-Ñ­â|¼K=pœÎtdÊ~ˆ§í.}9 ‘x?ò_¥z¨uîý‹ÉRij‰F9ÝämBA¤¨‰4ÑR¥Û…"lŸÎn°ØäHÚœYM¨Z^ÌòzMx¶9BüJÔá°göoq‡íÈ{Ïmc°{ŸO—Óøüž@ê(ˆ<ìÿ•ƒH¦é=3h±l«—y­ô,gÝfÃv3‹jÐü§¤Œ¬;Þî5sNö·Ñk4¬zÐì¬×~ˆô£C¤ -1Þ‹€@cÅrtÜ%tÒ mTÛyì7U,°hî+^ÍϦ†møNK\kϵfÄ™ãK­$+-)¶ŠÇÀ|­5)¾'{û©ÆÅ®BPaäÇHwJF¬Û­5Twuº¢ –N´MÙ4ȦåîRÐýï G’ðù“?zAWS{Ë_žÓ-¨/ÃÙ©“aS‚Š ç´GjxÈcŸžJ]ìŽàé¸ñ°R7—šA¬;øøjñ¥‡¯J¤hǨTtzÇF>-cà5áÈòŽòel‰‰„'mÁì‘’rCáhÝí&ž@èqd>«5lI?ØÁÜFx&ÛûÅôÀ„¯Ç55¥]+R–†}Ô© ;ek#!°¡¸ówf¥á3E#°¬ãŸ%³`c+²Ð’oa©™[ú°/Œ»õ““Y¼®ÈÄJZxwÁa>NXÐ1EcR—óäS/êãí¶í5ÜhF¥N»€ÜÁMÈlÀÍI‰Žêƒ‰ËǯK¯CÐઔÿ Yâ -1^ØVý¡’ö¿8Óú¿véuËçÊ“DˆÛc+M=Á„*³Ý(¬dX—–½¯ÙF›ÇZ"Y1$?tµû)/—äHn$ˆîu -^ ÛøãF³ÐzÌtûñçR•¬Ö˜zSd²’™@ Âýù….¦ w@¤ngmßß¹ ºný·¯ßÿ‹è;eœí¶uVƒ§ à–.„ò.£èϸøØ úY8ÄÂךS‚,Ûu«IºçÛ°»¬ºÁƒ¤M¬Ç0éi¦>¤Ôs£JbñØ»v‚‹‘·kòÎGNor¦õN„ …Œ‘9is¦`Ç@Þ³©  ž¡×“PU -´tÜ~ G¹sqìë:Qsy¢P¡¿øhø‘ÂJ®¼ŠçPU/©úã¿þOÙ=Ÿ3i5G7׉ÁŸ‹™+Ý 5Íl¡í`ÉÅÝÁ=ÕæšwÆEÂûFš^ùfÊWÿ,·ý«–¤¤o -+©x]u\±Äuý™¬ÚRì¸?ÒKT+±Ç85Ї®ÆŠa¥ƒ‚W.x³vgèj†ÙpšŸ~¿*v §óË|žJ÷¼8wÄ;Ú Š—LŒê¿¨ü¨¹ŒÖ0¿£TOýL[é9¬²@‹±Ý-Ùð,·AͦîûúÞ -8ºlž¯æ´ò*ÍÑ?¾>ëûÈ¿ÅzÍýtø׊Õ0ÇEÆ %Pá—ÅìµáÑHZS?SMùz6r¡»çÏ”À¹m-³ÆUf³A4§.„YñúÛžùƒ\øŸ»üáþ¿Ÿ¢‡_,_Ú'-=¨Ä˜‚[ÌjÅç¨×é>j%2=ÝJnÎFÑjxj[¿÷zûî@õƺóÒ÷L=÷À4§€¾×·ª£{Õ^€á¼…'©QÕ;‡ØtÂ1ðd­­]³ ²6¨Àìåõf9Ê 8ƒÝ±©•j‰c(ÄbºÐôW÷29Ø_'½#x@`FÁ¹d¯‹]ë Ð)V>G.6ˆU%NNì²Ùã—ãU/mOÚm þ4³Ã»–šÊÂÜ]iÂn„ߘxw`)à°ø—Ha¶ùrF¸–£ãéî ßã` ÍoD*d©µª+v{&rΈŠ{Á枪JN}êõðŒò¢ª~lƒDƘøÄRQèÕ|Lž´9½üb8u‚mp—¼‹l©Ýs›ˆù2 i*dI"õÃzIú¢½cFýäuéúæ:¯ýÎoWÃz§28Eê²=4(Åeûºb îÚœ…n¡›¯aJæ‘«aˆ=õZ”½‰>}­étSN›Êü• þcMQ³DÈ‚ës¦ˆj»k©ÉÌ7æåRZK®œéÊIÁ%…óÚÎòÃÂÙ¾±#ƒxÿúOÃ4K°b¸äÕ¶õôn=<ªüVõ3š'ú•u¹0[¯dÀ -¼~f‡&Åé~Žyc:ìi¢ŠŒ>]Q -Àˆ9Þ1ëìë ›‹«–‡½"‘Í EŸ–ÉÔRÕoïÓc2R»ê…¡å«H'O%+ûY¶³ûÇëç÷²…&uJ§Æ¤Lã•vºÝ$gŽCÕ¹i=™ƒæ µé…hp4Ú-ö䚯°5ª[e UëœÖIº,7 éI:Ž:vWÕtê5V- nåÅPùv[RƒCµý;’)æø2m¤¨µš -—ÂÅçÑùng¹¾ôÎî¾ ôTâp Æ…?ÎaŒ6ì_j}öYsƒ¡£Ñ÷±¬w%ÐɬË~ƺ¯=i-à³&^‚z)j ¾eKÌœƒ*ªüó˯œrÕ¶þ";Ub+Æ·å×Øúßçˆýæ*܃à ½T‡ªdÇ=­ò“씺U­ªæòSô*8ã<Õäåƒ BSøë© ¡ãŽ†z‹Ž—ãÜõÞ®¾‰äÂL_î²,§ ¨~´àvA\W:ÞÄbœz»ù´±`û§§ÉõfzÇÎO@>UVóÀ°ÃwþƶpòMÕZ˨ÆFHXƒÎïZÍLzëÔ±í#Ë•¨«mõŒ(ÛC\íÚê'}–ÔŽãZ0CÇjVµ”eFè&iæÓ$uúòö39é–g‹×Ï·iÔÒCÛr%õø|’]âÙ^-µË½¸ŒT´n#3´v½z3:ßP.2 ƒ«Û»Bµ»Ëðè-êSÀÙÌ9l­Re¶ÓÛ\clø(Í3Œtj“+\Û-áSøjùw¼ó3Û$«¶®€ñ¯P¦Š€A75ÃàÚ'{ë\Îéw²JrR[Ö ’ /¸Ïšiw^éÈÀ’lpåT?ÌIãÍg÷„+­` OˆUG—D¢£ ;|ôjûä~öf:‰ågñeµfòž†örW±Ý“˜@þ¬‰ZìnÓ=ý‚Îî'ÔÂÑq&£=}&ò(­5üþ«)Yò)=pëÏ2g}*yõIyôa¤g!‚0PöJŽ¦GGâêÉD{´›J·d˜ ó‘šG–¢«É†É&Íȸèͱ«z(ÆÈ2´Î,ªÀ¯ÍŠüЊ‚hVG6Øã­ -`—ú¦5¤k=Š°¯ïN¦z0§ù`¼\©Ó=4\°ÑÔmÏqØÆ„³n›0ííÖN±`›}5c‰ôö¸¤6<ØÏ£ÿ^£Kfw5ˆÆ”Aû:ã0Á vizØêyž;ËðÇs}ál;؇ø®y ÇRÂS‹¬®æs)œ_äú3‡ÿi7ëͶú.Õ³ïŸOº°Ž"žÙHxpŠÞ)L-GN+ÉRIÖ­˜zOQVS¶¬{Š Jy ¶jm æ(AqO|¬ö¼ó§[¡ÕUB±Nùv -ÞÜ6—÷ŒµõÈ®{æÔ[CÃñ²½Í7™©Õ|˜JÎbÚôjõ_Å>G“^Xœzq´zef›óÊŽ”Úàj§ðò^®ºXLl&¹y\ÒmQÀšÙ­t] [™“ÂÁNô‡Y]MóaÍPúc:®ýP”,Ý Ggqê{»ÓRØËÿFðå •„Gû9yûdq9@)s%¹c{×KFñÒkö@Þ|»¦g(2ÂezéÓN²-Cž›³ßŽËÓ–a¯dtTc4–Hˆ§ú绞Ƨ’B©û½‰©×¢‰‚øhã>€°9¦¯c 8cmwÃøàØW«V3£ÏO:>ÒµPÛ[Ù©˜Fü4(ß9ȤÞ<¶µÇ¦àm}÷úד©·ô»*TKXÍùñø¬ÏžìR‰ã#7~HLroC®ó[¬uÊ:°¸fÚÀÅp øåå–ìHnÑ­Ü ÌIð¹ÿx Þ½ó$¨•ÔîÿÜVµ( $òÑ<s]y´ï^0Iñ_¼üsûOZýö{•ŠFã>6]FLcnåo?Õdæñ1K¦³Çåôþ+~é²™C}¯nëKÔé9i5œqNº#ì°í£€-GR›M›ÑV€Ù$½Ëß´+@µã÷í–rê-ÎÑ¥¯½?n‡cTw’<ë„Ö2¨1¥:Ú™ š \†•`Yìå;ž¢N )_)ViX«ëÁì>çHX°voxôº!)†4.?t¶V‹~E -ëÏ5«"qqS¤ÀPÏLá?m¼´Z$Ö„ÚYÓ]‰¸˜ðç+0ÆÝÒNaõ;y ª¢)Õu¸a/ݘåa)——­yb1àbAöVê{¸ÆÒ,à £ëZ°Ó»kË’?7GH ¢8æì¶[‚ñšú8iU3õžxv­ÛÐÙ öߘÁ%Tjó1*ªOG\ñk;¹±4æ÷̃^²RpÃC, E -ÄÑ?ŽpiÕæ¿_érÆ'væQûuˆ°Çm¯1jÅY¯¹Ìf͉·š ríééÂÔ4`ba;"–¦:ŽH]Ûx -*ë–5ÕZ8˜Õ”*`…îPZ.‹\&ôR|I )Û ÑÍBÛô›xž[潃‡z™Ä_åÊŠ³½éðB/Ú”ûez]Øý,ˆt&¡-H±~a*73«ªyíXpX.˜/n|öíWu;S­§È¼{@9ú‚a ÔY;— lUlx4*ÚågôŠmžWººÏ2®bq¬ÍpI÷‹ã.:ÁLݨ¯Õ';|Óš-Õžè¯Ãe¹Bõ¹~9.…0”P&p¬ãæ0 -»l»iÐuùıÛòÁpO÷®&{1¨›:¿lu;k®’Mqä9b'¬ABÀÈ TªmµÁjZÊ¡®2ÝÑå•~+8 Q. S:ôëÆ Œ•_ò¢ëû˜À˜wŸ%‹.†ˆ¾ -Ü‘Æ”x‘–·jn»’u\Šoÿ:4³ªc` G}ˆ -ÜL¹Ž‡­dJÙŠv{GÎ%mN›ýs>ýº€jvÞ£†£rT©H®£kÃÒS¼ì9IJÄqL付 -v¢†"™cÖ„3â@§Çê×Ê“®›i:®™ÿ*ƒ|Ø~ ªvž µýqãQÆúåŒÕ;ïÃXš¿ëÄ9‚hx=µj¤±_­^ÌèÎlæÏ%¦£‹©‘ihg€‘ê1Ë5Ë7*öŽ‘Úýé*jøÎÍ÷o™™Nî4ö¡|Ü<ì–JÏ«§/×.seí¾\j©/I‹*zUüO¬ãcž+Ö •ÈÜÓ§?t«Lß+Lš£O/$@€˜}¸tïg®-ÍBÙI.á£WCÈsz;±—½Z[¶ƒq ããæ­tí@—Ýàì ÛnÅ™vàè¶bQ½Š}–ãJdž¥J•V–•É윕!fïüÞr'üýÛãˆÔ¦Gzo\Nã­mg„êvä3fRQYi}X+‡{Ž ÔßM[ƒå66«¹dߥs¶]Ÿ`;pm×Ú%‹ÿ3%¢ê…{§µSׄÚÛíÔòû‘&p§ã¸ïa—A¬5²±ê^:¹êpñ[{9ñ»˜›shƪ²ß ›ÎîîöïîØ‹×ß®ˆäóbs„ºcI?7±¶KÃ×ù—·Û)0àPc¤Š›]2 ¡.)ZYÙdÀ°)Óàæ#G%Øn±ÝúºXã0º˜ NèŸ3væ‡ë4‘§¯YIz}ZoŽ¡ ¨)rŽë± ò‰T­¥D˜:ì j÷.øŸºôk6 Sç®Þ¡Iç°’»–ap’~7ˆÒ·ËUû+çÍöéÿpíHDkyM°Ö+¥+‡?êLEÊùî0"Ã?™é“ -ƒ†:cw2ô3Üš9ÛÄjÏ4·è˜~ͧè;%ãÙHœ|±~þ_MQ^ëúÍ úX\‰ºEI%ÅÓ«Ô–³j:3+Îf! Y›|nâ0QNµIméxù^™)ºa)ïöwáà’ö®$ aU"¾~SúºüPvú¬Å”ÞLûˆFáé³ÎÅþÑ5I‘˜ñmøÏz)öÛlT{_õ¬äpTÚDµVð  XûfJ§g·=ì·Ÿé§ÁöÇmu#µŽÝ5²ÕX+IÏCÝPåçz#éê‹¢¥Ë—áøc§ûi/í.>ì•îêàòK¹™s7Ö’6H÷ò:Kd‰ 'Xi¸æòpãÏ2eµ„|uùed¦ß¾„ë³+Ç7Š•³¤ì’¸¡¡†Åßî†eÐO}5)9ñ4´tïyÞ)U'­/„½êˆPKv¿ÎÐ3v‚Ÿë^ßV;ërŒ¹¶Ü1Üxâs2›á™D_·RqæÂìHý;@û®é:ÉÝ.§&1‡†¬_÷»åD¦i×eµgÛþ‹É„›™ÂB ]ðÿ#B‚êeIU¼¥sŒÝ!»âL»NÞ¨U0Õ67i}Vé=¹2­R»öMÎHGD~ËÄÒvp›;yiŽ+–x›iM×Ûc°Lã#a2¼ªtWuh½¦Ø4G*;^Ôú.¬‘§†Èæ€?©Ïæ¹ò£Æ_ÅkVLFûÀ†3Æëïùÿ¦Ûò”ro£œýiã;\·ãÃ_vKâ™-[^KNá€$m#œcÉŒ=s-K% »úDǾðŽÕ¾žÒˆIÛ¢œÔãFQä3¾Œ@ö©'åAxhU˜yå;ãËvÚÄŸ%JWúªæÜ‚nV¬þ ,ø,&‹t8^éIÜ=-“Ó ×û¸dTWÊÈg)á~´#-ÏÏ¿ ~³¯lÇ¢ÔG\]æ’§¦‹ÂmX¢‡Ÿú=ôHÞ²-º§9Ó¨°8JhrfœéÕ5¨Ç`ZÝÂë(W/YKEe×™þXÞ 'ÌFœ«:7åÀ°V²[VGI·gvQÜ Ù<–3† - r± c@–ØdÌy…}7z8×bòÄh©KåÐõŒ)r‘ÕñÍ‚»+óy4rÏFŠÛT~ã£4'n™ ±Ó‚G+jØô¤4(z5ëî0ƒâõ}ŽnùÃrèðÖó vÒ‚ -Aã2½ Ò!Ç%» "]iH¹a#o®ô)gÀ•FQ/~0×ç¥0+ Ö­/Çt»mÁÜ ß¶ÕP -gVµH¼‹ '«¼¤]ä ª ±ÏY9Þ«~æû×)]Z­IH«‰ã¶Ú _h´‡äŠýé“hP²3gRÕðYI¯&¦Ž&ŽÚ3hÕlñ£MêªlRÔðÕüÇ’a.ÎÖà Ë_ûôÁ^¼½wÛ{ö°lÛ®ÊØ„æDk, ¡Uo^œ´e|óÙ°3¯´ÖSxÃœZÁ#`/p¿êSçET·§Â Ü°åNQšˆXg;ð‘àtGöjJŸT\í\lHD:÷¾²_~½j­o'5üëý9Œ íÎOd(ÍÒU&}ãÎvû^o={«$ÚžUó¯ õõJdPL©4Õn©€6xæ÷¿Ø]ó¶nðXëÃó0¿×mu_ŒíÈPÜ+ -™±ÇFš´»Ï;ÚÞ“î|¢ùç¾0¦%aZ't‚–‹µ;WÎb=Û&h`÷í•Àü»#»¸'ÜI|³^"º’} -߈’Õ<=ÅöÊšã¤i,ÔË¡H-Ôï|°fEk\¹õCêY§bèðj.\Ôlóæ¹Väq#—¬VmP}÷{³…bú¢×š~oCs8êT¬ 4j¿˜\Täk±™x_°¦pÊf÷ˆ”ÐŒ¢ÕÄ[rKC~ÃzÜ9M¿o¯¶™]5SýŸî>ìÌöÚ]&alÀ…žý“ÖÜ#£i/ØÙd.;œ®:¹öÉp7òW¯ò!-•s¬yYiˆ½]b›€Ïl±¿iåH+F^Ç{…”Ô"†ÎvÚ VSéöš×—á8àªq6WøpW䊹Âí~¨‡üìLx—Òš4¬]hœZ¿]¹ÁCýl¦>¹ó…ÈAå®ç{»‰Jß9ÇÏúõËLÌ/ù»ÌJ8¨Ó–]åœ'éfYoq -ÅP„Eáhržj:ÒwJ­™+3½äyŽ{—ÌĆ›fè<–§¯zo>ªás¸@öFP=dÚñcLõ²J;!E»ù“!?Qìª.w()%Ùý*¦n§¢Â£Ó½Š’ '&!Zõ¿œT•ƒê;ÃXŽ¶ªé#6„Ãçõ—r -ÂX²×z -oõ"™v±»‰#Ñ'˜¶®í“|0ZO³£LdŽ4º‹¿½Ú –cíì`£«yI»Qhûf½eܤéçÛ=¿LÌžì~Þ œÍ·ôÃVs΋ÇÑå—ÚE µ$gî#cã´ï±» ËC´§ÆMËò×õ[Ûœe¢ª%úªyf“ºÆ=¶Ëu'o# CTR#™4ý§Œ‡À„ÝÚþ›M]Xj\AíNÀÜ˵<lóBw1ObRu³dŽždXf*Zw»lױϧ¼ Ç̯ëMkø‰js«kJ}µ£~‘Y_û)FƒÓ|D„-ÄÎ)]ŒOóüè3Áni¤V¢C'®éˆÃŽvÇBí×,Êú; îª¢íº–´äqXøÌ#ól”çþÂÆðuý±?L^c¯Ä÷Wé:±ýF×õÞBï`!Q¦ûºZÓnÃuÝÝ,E{1Óô"pþh¼·“’ñìº^Î@…»¤ô•V§ßG¥rÇÏ6½ý!œ>¯¯ë¿oÕrñ‡–Óª)r žMænn8B_ÙfJX -ïŒmZ’Ñ»Ù5îƒÍ-©vÙå¦tq}*9û’#î²½n¶Í¡†¸+Þ[~!–ᾂ®þó'A—zI©:ëi/1Ù)ì—«ÒØdKyúXò­ ½ ôN©ñAÕ€Å7G7¸fb KÌOítAÊ:Wfƒä‹í”ÄG€‹½?Èû&Ùå%%X÷«SãÛ7N^fdŸßþýüÄG5è0Ô hžüÙUØ–ͪÚÚVªèu8¨QÇ5(³[ww¶äös;C!µÑÉ óQ8f0ÄÊÒ§+z¥I:Qw“ÑnÞA÷§‰¦”Ó#6üh†,Þ­·¦ ×mºJ /ÖUËkd7,‚^÷§T—…Ò‘Na†°Zo¿X ÕâËÓ #iÔp’¢p’ãш£‰ÐLquÑ©eC5¶ä.hsŽüGeTè3$ÿ ÁªN,¨¤K¯‹aõ!“Êœ¥cšB+‹Ö¨—·ytgXŶe¶]ü€´Ì½d9õ6‡%:ët¿¹I5^òÉ»AÖ‘|ÉÿÜöR•©··Fã〘é‡*/ãìjoù÷Ã:4²G¯-ía5Õvé‘Œ¸Ò³›À§V¹-ò àZWEÄqo"MœaÐ×ÏbÏÑ´6b^§·e{ÕÉÄ^¹­: Ö˜µŒž§Y¢z—½g3U#§UŸiIúP*¾™+Úp\pŽ¬N™hµ~ !¸ÇÍêè4ë.Z dÊyh8ÚG6±ý•é L>ƒcêÛ}à^“ÊÉ5Ê1ÜÏL2ë'YwJMè6ÜU_ïÛ)ñÓYàd€ëeXVJœgäô«" ›`(ògu%¦¡Žz•™ -.ÕB>–#Ä{ÿÐÃ¥µúä¡3îêH=ô]˜ß0U?Êoª—0Jf¯íÖ씲̑^Si®±JõÐŒfÌþ¼þÖ¯ÿP ÉQ°©>ÇÖ-‡Öx®c™4Ô"¹ïöNäUE´¶Úf™êé¡Gl±ª §È½ªcÕÉ-ér½v£—©Ç´Ò™;UóUä¦PNY÷èM”w­~æCýpÇEýk­–òeÒt Ù–Öéylc[âVOgÓ¦RÊ3‰™I†]PŠû£¬eÒçáD¶i’G®ÅàCa³ò{øh5=ïñ6In{Dîºg[žtÙ±Í)ý³á£:'eÝóÔÏ“7ŽIãëúƒ(%µhß Xý\t$;FJ$kÊ¥Mäá{úÏ™yÊ1ªæúAÙصûAÕìÇÀæÎéÚþì7wT5šQ ¬ÔZ†À‚" -ÀFÆšSÏ/«™0Çú¨‡TqèÛ®] êÅQW‘ªër`QM=™D4†×µµR”HÝ‚l짪_¦j«k ©#€†~2¯:¨N‡ˆÚh£Ñžq¥&ö~^û¥iŸ bO'OfÞ;!²¸’¯êA=gOtf Éôgsþ¤ðÅB%iv×TÛ}s+¨ ƒZü»y<´5Ù0*»¨X¸9ÃÞ|v–cÙÇåû~Ž îÈ0g…LZ<œ:(¹»¢(KŽúIÁÜ76•âÅÕ;ëðû48¤M“Ü] t•»fK$ŒØ0íRÌQ®YÞÙZøÉêiCö+6}òÛÄFô†£¦ú\\˜þµX3ÖìÅÀInwèæyô­ ¡_×¥"6é7³éðì߯²áJM ãû‡ ·LHY¼RzîË5¬-ËTL¹¸×LèÎ8žÓe‡A©=žº–>³"ÃlK£Ûv«Ñ м‡3Ë9œË -?´ØYD‹*Þ¦Eê<ä—§»Rªº­Ñy}†©ýÙR#(Ò×õ{©àÝ*šbɳd /¤Õú™W}ªYggZ`&ÌîaE’ÂWVs¹IvµÂ.¬\¢Ün±©ãI6z;½j×ÌÊñ–û¥™Sùhµt>äØîñuý¶C9’ŽMƒfkFÁ^禥޳E»emG8vÈ—FžÀî`fºÙ3Ð&ÈrK™WÞÁù†–Mĺ’‘`ÏÂïG­¥<ÂœS–b½3½8`NǺLó¸ßÜTÒspþ/Îêµ =XÁ×õ{´ûYô^i­²ŒGzd¼›‰eÃ]ÚR>b§´ì‹Õ+)óòFÍ»|pø•Zxf½:‹==5ÈŒbJ6cÈ8÷‚hôa~Ïõ±«ATB |Œ4¤P9§ºú§|¡ ;QÈþÑy§ZºÂÐ(5@»ÁWk)™l›ö¬ÄCVíf.œˆôÞæÿyZ¾q›—ºA%"µi2‹Y[Õ¤ËܶØÁйÍb?YF:É mzejTjÕÙ 1áLE¡BKp˜RIµÿaàRÌq¨ª–Xë5*"ÑÊuhOkôgw -_ןCòÏÝQÏÔ¯›ØuœX¦ž­-Ù½ë墣ºo[^7÷eÉU<¥¸ -·˜e±wú‰0I…P~ ã`áf/\ÆÁ ª•äÊmÜóXVˆ*ÏdŽt“RÜÍçZ®ÔFYŸõ9¸î×õ·út½a†š®ž Ëêeänkc3Áz·ú¸g”ÃËÀ‡ÍeïÓ¹çÄðîW·wÌ=ἺYjdÅ×pÌIínhÔ30R"¢ŠV½?JKAj»žß­lVè’` Õ5^½‚ÐórýÅVÅêáÂâÔ¦/^±3â4'Ìj§B¿"¥¥^V¢Ów"]1‰p”–_fMSÍÓUÿÊK™Õi~¯$J"Ä!›Â‘z|Üõ®Û‚~1˜nϺçzPŽó;ƒÍÖù=ûGEÿóKÿ¶ñà’ͦQmÓ nþHƒAÌÀYôÇðÙt^‹ÚloUnøqžmö*j,[Ik>pŦ@ …Lî¾ÒYu›¼ñ_A6ŽúƒvGE’õ«Œ¯hvÅ«s³O> ú¾³¹dôtÚ’´Íìì…?±¢Þé/m¼¸pa_‚£;qB3Ý´ÓrØŽEm^ -ì´Hæ¸Ü©P \ŽOG»iV5Š¯vt¾¹˜Äy}â«Â'qpB¯)æ<²àP“ÿ… Û°nZZ9 A‹vœï}ˆ¥!ç¥.O‚ÆŽ§nTg³æ†acKŽq4n<ß&9Þlõ‰¤’ê1…¨žW6tSìÍøüãtXKN§ˆ¶O‰h52¢¥±;Þîj¶äá­ŒäµÎ(fy¬yÇtè⛠زk¯Ç=~agÈ6Ö~Ü™vjò_éÎ5D~†3êL3+¶}Þ^4ß´×W‹‹° ÕòCs† û袻ÞßYÇ| ®|z‘8ž?™•qZ× µÁE½™wDzöT£`Y©ºMŠã -ã=¦OâuÊf·p*Ò¼ÏM³CêÉäÝ~Oп0FÂ9˜œî±[«¶ªBçºÍ,yî&‡lbE_÷µ]‘vþ%48Î4?(åßùKKO^X -Žè˜ÆfÓÜT™É9ÂÆtÌ5Ü'·B¯Øm$ºÎ|ê,>W>Ë4•šì`–þn§ÄM8é=_g™=>_ßZ=§ÖϱPóŒöcúšç×.ŽV+G2#%dul¨Eùr[&[/€U5™g¹DÕ‚#¤L" ¬ ÷cÍ®$+†)"Ò=++.Èt0…ÊRa¸ÅO­¸f©´pr@–Á6ñßMK¦Ž‚{eéÒ\ðšÆŒíµJ>ëÓ²ÒuIÜÅÓðQùŠÁý¼Øš÷®W|²ÿAÀÀ¯_V·nþo5×­Pyå×1r;xjÜÞF+´SŠq)F•¨ÀpfÀØ$:BàKÇÍÞß×÷Ž7b>…#Ìþ÷p/û“æt O2áêÉÇjaMx½ÂbÓDLÝuY`GšOáVç›õZê<îaÒ6ä¸a¢4žS€]Íh>ð‡pXñ“C@Öúr¸í*sNš ¢¼¡Áë1o¼Ýf - ”v§…´+©gfz¬s.ŒJÙ¦+Š;ÄÜ6N£¥¸6Èâ© ›J>.ïÕìY•®·©²îƒÖ’í¢¬5}p `blFB²9©¯ÙçD"`m%i•üZíî}.–W~fÁ«_Ct’Ik¸o³Z“šç¢Sˆå¼§uÆÉ)¼´72Øu$¼¾­Û }]ßKSí@Õ¾ƒAŠöê¤*yIJ ¿KŽìÂ0µ0ëcº0SÛ¨)p¤„àÁRé>×ab«@„ú¿ÝÆcô -ƒa -]§âþ>Ro`ûë—µoòÜ›õ [áôÓ—µ1¨L½ãÁžÀöaXiÈöQ8!tÙ«1:=ÒôdÇ@±è;Gg^­×¢šß×à 9ißšFÕ¡¼ºS ­U%"ªêXSçyFÒ“ -Àî´D$˜ÄÊ)Z3çv-K(i.P– N²Òã“43ÎÎp´ßRntlÈ÷õǹ _‘÷ljOýY‡º0‹óƒÓ¥×é –ýKeˆ²ö=¸.éE÷ãKƇYü|ãNå°­ÆŽe‘¾OŒ9òØß²‘êëúÓOzœ€D?<ðŠïöÀP±åßENnx‡¦Ò¹|_Ní˜1ÃL-;d&—¢ß šÅŽž1v¹(¸ Ý­t¤é4|o>©Û(t³—‹²‹oÚcÛA5E½tйJâf¨Qö5–ó¸ý†?ÄÓý&‚¶òx° -MªvÜ#ñØÝ“^N¦Àâ )l™@4ÄlšV9Ý?ØL·Úm·Ç«]~i9Ó­ø‰®tsr9Í»ß/~¶4þk ‡†(œ°Ùò Àj–Q®~ ä똈ä‰Ùd}ÝÅ9˜êïëû.Ój:–ª<Ú3ÆmÛô-¥÷‘ÞbÇ”S3åU˜›}ž“".¶ŠÉF‡NK=궭ß³¾û^¨!âÚhÑ@“à4-"dºÏ¡úc‹ MãuÿNm¨Í -ýKãàÂò3HpéŸìRCSÆPüÂÔYHôM++ÒM -Ç'M_÷|Š“ûãçV¦ß%õ{ÇCÔ´Œ@F÷å¾@ßÉ…Ø¿1$#£eãËaUùLÙÀL æ…@Q_AããùúÞÎ1,4î&”üÂÄ…vÆeâfxåtZþgdéÏæɦ§ÖàLLµifÉ¢ËÍVï&>†¶62W?<©Zñ_” R&û/›\aƒ¡^ÕQ^“ߪ9§5“%VؘeàÃò™Ê‰é@âÕ.s!áÍÖ7jù\R¡ŽŽñÔ øY]+rc²NÛ×mY¾Z>3ï¯ö¾þH˜Ã1>å«2œÖS¹åš9àŽKÇÃ"þÏ/úd¸ÂCj(j¿œÀzNߢŒ€e”´U m®Å!dLDfÝ2jtÏ4U‘–¤Ïóð-Å8b/Fó«‘Å¡~«b;¶gĽ‘ŽjhD19†˜v £ùŽlEK8¶¨ÿ0!\E§œÃ0ÿd³°ÚY0(bçœ'^1ÏZï´ñ°ÒèÍH‡T]\Hb¯‘ ¢[¡ÚÁ¨tžû,Ó4µáamÜÁÇcÙªMžŽ‰Ô5†âgÝÏ•hqÁ„z´ žöS/úl?†_,ÆN5LŽ+UÀá–ð¢3ÚTqê‹ÕýkÑa¡I -Ò1§“Û“2¥‚/³ãùÙEvX§€è’<Äâ=ø6x´¿~7HÿC_?=XTïLïCç=ªŠ–<µº ’" ¸¼ÛGgÐû£Ìb­¼\IBV «ÿ4WXç!CZS¸öûHBÕÇŒDC@è¨×±Šñó¤ÉÁçÒ’pi3ÒS«¡ŒKX b¥!xãóîìöuý1Êê!ý27â'¼·+Ù…‘l–­)/—,9’îç¼€øˆÿy´Ñ‚÷_ËÍ‘Í®êæ“f6IfWVVp7WÓW/Ì’¹Q*Íĉÿz&sÓ$ÞöÊÍ‹p y®;ËžæIè«‚¬'Œg.vn¶3ß믒“¢Nw¸/î­>ÍE£ºÇì1*ÿª4JL–  ’EyS„˜Jž«Ø΂Æ*0ÐñÉiÚ¥á¥ÎVÞ=V>½Ðývùø}F® ¯C!™ù|¡lþ˜ãxòZH½Y+¨f»žÝá3-ˆ`¦i¨[ßÛ­¶Ó²À7jÌõ€Dr6ú‹°õ/õâs–ê;¶ÃkJNn¯‰6{k ¾oõ š ÌÎ,tÿv0¾°†‰Ø´%ÎX–…ñP¾îg¸œ˜|ìÓÛaGT”EMô‡D7» -P×à´út­Ì|žvÆÔ‰šÃv߬8»r¦z"î# cxÊÕwmÄ YÝ”…á$—¢¬Q^ØÜëU)=îa+³ÅæµöÞâ¡&¡þʼnÌÆ—oc)Ž#%ˆ/ôv¹Ë f ÜÚ4ül½á© Ó¡bZÆ*@nM^™S¶´¼æôDºì¨ÌŸêHu¯pYðªOªÛnËÊpž¥Ô/˜'z ^ÇI&"í–*µU&'mW÷žRKС¯úÇTÚÅÅÊ*W”u·šy÷š™ÐÏ–[šþ9ÞSÆ{¢èvöÏëß÷wä¼Û -Ð&²7¶ähÇ:‡'ú“ÚÜòʃ@]½/ÂIÁ†øYà—ý2ì©„&*,P˜‰·U€ñ^Øã’þ¾~›\I´2Ñgœåer“tÚ ´Œ×ñbƒXÕê„E˜7}Zž5#âVªß·UÍ ÿá”6Îù˜‚£O{¼(™EµåöËÖ5~Œ±þ8Á9*ËléÀËq0ìЪöƒaJ¹­¹d©Íøh.jÇZ°Nc•FhÔG´ÝÏUÿŸÎÚÈìæ•Óœ!sùnwËÈYQÏ.wW<¡ñ¸ñ¦Þ÷†, Ÿ×oƒ§þX’§qØlž7‘ ƒ±odËrÖ-FQ·èÁ“JÔÝi9æ"]øà©ÑÆŶÅnZ"†Š&QéFÏÎCÿù˜¼{~¼^¿®VÄttàã@®ñ®hväÙ•°5Ç´ WÓÿ<žˆfÜF(æ(G¸~îš;MXCùœ7Iplx-a;¨ ±·úq烛¿¸Å—F¸ téÄ(xüÿf¥m:íEõÜ´ÙX¨ð¶Z »fÚ`Âsží¿=Íš³½ÔX34£¿9â.4ît²’FöeO©áS¹v•®´ò1R ñ´””ÞÔ VÙ³äMÇ”V‰Æ„¬O WÌ ¾7¤~+2̃)·õq^vÒ ™E»ªÚŒ×,3¶—¯Ù0¨æÈ•{Ug¸–§C¬*„Qƒ¯Sã“þ¼~)è<ÅÖÌÏ üÌWáhz¿j´=+:«þ¼'uzb6“©†NC¥J¨èj¥áÞQÔsî*œýWGðx‹®¦FýßiQ$4œ"gÌ™êzÁgNiZ;Ž™¿¡Ý—z¿"Î\ ˆÖ6³Þ(ÔSÞ—uW Þ˜“$Kd¸ôV)n¦Å at»Ñ¶è»ÛG|ØŸÕù|9¼É‹•&Yð kL$ÓߨÛÉëù¿ނɬ 4ëÀ;SCüvÐ\®?ê¿_ÏyƒÄj§$3´7\YÃ~ÜÁ€Ë1´§KEsÍ -£:îáü3,Ã,§Ï7Ø9ŒýÓÒÙ  ¹- Ñ¨5ÞQ‡Œ’Gª»ÛÏQÞôÎXËÄøyýÖ´›nª,\Ÿ»9‡ãìê§p4`Ž,3Çÿ?^ƒŠą́Z˜„‹óÝÜàþ[rØJ)*å>¡äQQÌy@:TMŽY¡RL^áÎ1N2€&MÇ7€Í1MËúgVÊiû jšÛÒÊïô6÷Ñ7å«SéP‹ô¯? Ìï‹Z¸omjÎQrí 7<­÷€þ«2aã°OD›ôþ ƒ[ä¥Ö(`§]Î<† -¸ ÒÜtŒ f…©W%›œË)æöÂNÝ9ícæF`Þ,a`°Ÿ×?ôCjg“Ñμou@u´ã™Qû""IÙ+96ó4ÙxКKþµ\ìŒÝÜå¡Ø…?“ÞoÚ“Rï4læ*„ù÷_üM¨GWIø©uÙ/TåôOM}Ð/«åûÎ=QŸ×o;GòÔÙšK‘eOÉ"d4TÔ‡ÇÀWØòÉíê÷Α=HÞOCêGR7öiåœ/(Ĉò×ãñ¼fî§IŠÔ³y7™fO5Ú’„1ŸÆ«ÑèéGg=²ÞOþÿAßÿàZSmJ¶«œ4‰Xù®8—µõp_9úínëÛT¥%A‰ºëÏ"Ä_}?¶¥OF¢kUÂK“VÝõ>5—^³Ã+Øö 9Ú«=aÐÔUÚ™aYÔz†ò–ênjµø—ÊœV6éž7ÄŽÙ˜_`©— ‡Sæ5 „mSg7†KºL³†CõPí­ÌìDô1Žãîï£nY´ƒ›Õð=„šô?.ßÆtó)@°ŒWܯ¶F®-’‡›–Ê»i¼ã%èN{“ÙÆ3º›:$Úªcé|`He„¦ÿ.ôƒN~êqª©ž¼ž{j¶¹Çåu WoŽšTÎEfÀW¦…ª[ì¡2íYÍpëÓ)ÍÃÖ¨Zõš5ŸZwÐL“ãg‡N("m7 -MÃRþƒù[ T-ô>-±¿l¨ÛöÓv÷‹QoÆÍ‘fk¡°€‹xAô ïF–9ÉP šëz/£Â߃LØ®¡s—~ßÇx? 4ùõú‡ˆ&ÄÐâËj¯x3|þZÉ©Ö"?¨x}º™xÖ´áÕè#ðÀÅ¢tI¥H‚‰!åàÏ^ç2yü ™Pe(èr“šÕý <|»Pa6ø‹ö›­Û´¿F–Â|xÓudµîŠð$÷±)ôö~¨uoΗ·4/Kiy–{BQÀ7FnÈùçõëÉ«fIñÆa­ïQ”¬S‡aÇ^w¥:V_Iáì#Í¿tÛ·é‡õªõgÖ|ù ½@ãTÊéÍ·9Nmâü0cºy»:‡»Êƒ¼§ûÙéÆ‚&ü/åå’æ6á}NÑp>|Ÿg6s…¹ýÔ_c«E²Q[n™ÁB=lÖ{ÉwÌ+:DóøM#,}S¹G‡ê,4ùü™ Ä''Ä$íȯóŽ)IÛO„£BCqœbÈiýlÞz¦Uq@U-ž[z­Ù昆IeŠ­Øa~<ëØ’0+b²Q½¬sÖ2ÝË)ÃŽÓ „¸!-–[×Ódè»ígé?'«÷^D8c:tÖˆ >NÜ­,ûÞŠæ0Ó²5,q,S|\›ôl)wH–É‘ª;CQ‹­ñ\ÜLwM…`Nöa¨ÃzïE´‹í f›X@þ4g¨zßøÐK½îÖ+£ú§Gg–âh"æì2ÁVhÔýÏoÍ©);¿kŽj U)_ÄYÖù8–ìb=Ä/ꛨ sH¢ŽÅ‚C¡†ó\þœüÞý\B›ü霜wÓòÂLˆ›<Þ=Å S +nø\ö=5à÷tmiî>цƒŽ|oœç[j£GoÔ!«UI ¤@"_š‚syB烟}s‚§9Ù€AsºÓé¢:M<8‚Ì^†ëì÷˜GãÊ¥}ÔÛU&úÓ§pT1í/³û’SIa쎣”^FK)Ôî<·U”J‚k[h4“E#U+Rí{‡ôKãˆTÖiìuL}ËüP]´gríaf€°ù×dÇ‘'ͼކr°ãëú!Ø‚» $ÃÓ†«~²°ªŽiopªÿĞɇWÆ<¯S3øEþñªtªÅs0Ù~ÉÔÚù äs;MSÃçÑp|G_óÁæ”5j¿ím[3~ O›è ÏSêSs_eØe×üïYŽH{Gj‚¬X1È\ð" ó¬¶!æ!¹õa—Ü y˪/ý÷yö8exúÚ-:ñdáW·¼¯²&òT)¡þ„ßêÜÏW;ªÔÆsxƒæfÞfÕÐÇ# àŸÑIŽ{vuQÜ@£ïH€L8Ö•³ØWŠ‹¥¶¤‰?f°¶Y–Xú(‘YVØ}ÈP-úÅBÜíc,UW‹qS=øøÔ?;ÌF`#|ø]aOR¿ôyd ÚÂKÂô …!üð0ž¡«õ7€ÑʵÒ}8õmþ xf¾0³ðª"xWÇŠeµ„„°P; ±Fç¡•¦5"­NK3ÁjáЀÝH6Þîi·qצÅ:Ìq×Èt "KùsT?WûEŒfv‹ôvÎÉ­è6¶£zï€åÆv)[ËÁNŸ?Ð;!5õÈ“œ~Ûn©Ùwï'íÒH¬ô4YIwDá˜7ÜL õ Ç’±öEŒ¶‘ n˜Þ§M“Ò–èÃôr=cF;ÑÞ‡»„*Ÿ+õþ?%ONÑšw Á,â;G Vø#ö4yš[È!8ìqúå#áfXá õè¹—Z\ês’ƧíÑX(®)P|D¡&%ãZòø{KMW†Qý$4…BäDHu‡é!\x7`ðòûõ[%òƒŒ²œS׺sÞm <SÌlQÅ hB¢’$©‰Ä4Œ´ÌRkú¶c5©À|๼°+[B±Ì¬Hùƒõ¾qÏNmͯ÷ëwoR:‚ÖT¼^A}/¾”a‘%,Ú:vtÆãÓ¦Íþa'¸RiuØal„Y¡TÇ]¡/Ðܨû+óЂ¯ç`æѹê(ù^ö1u¼®eKâ6% m¼Íz+û¤íYÌÔp„ÅD]gú,ýfdX…†2Ø@š~zñ á`À8þV_ôTGƒ<×n¢|„2!ýà”˜¯ü¨ú>¸¡/Çß°dmøt+È…ÊÄ5¾0Ž)ÙöÍp¾6@/c_O‡TWm£åÜVŒÏH$zÇÝÖÓ;ý÷@­>=ý0„ó€¬wÍÑ -'*ìháÕ=GE¸ $¨ÇÕgºZcëaO­"¶a+ӥ﷽6Dh¾Ê}5³ÇÔyŽË ñÔ°T9Ý>F²§«d—‘®ÇA¯Q»fôXvÛ=¥í¯×åOOH­å „w¼ÏÇýòfcV×HÕesP]Lœ^3f¶e؃œL/ËZÔ!çw0…³Û¯ëw½ÙÂ"C(®(ŸÅ"Ú3¬ìÐ9iy9‡?u×ó$ƒ±“ +gQiÖÐ+ª‹ï`[À*h=šÃÙq6Æ_Eú4,zRk-ïs5:'\I¤+I‘Åè•‘SLvðÿĆfèÄí± èȇ¤Ä°ÞaMöËø[¶a1ÝÙƒ¸´¨Ì•Ð)©šÝ‚1E×Öôò`Тg*‚£‡ï·]{œ¼c0ñ×më+y•hÃbàœ[˜=º©ê±œðL j®ý„róä.–F˺#ÊëúzX¤$µ»EÌnèTwû|e—1Ã|A¯é^TLpµæ­y‰¦üvð ˆñ1–*DÈÈ »I¯X¤uÑ7¾±Øì× qIçøiÌ>LÌø’éhC@Ô]Ý·:ã•tŠ!5Ó¸Œ™ éãïéIùuýóVQK$ö Ç j3«nû*f§ÝÀ=Ñ -STò®:›®i] ÉJ/v)>î™ZÇÝÕY¿^×´&'ªÕ‘Y†oÞ@¹ u§&MØuN‡.‘g™ÙkK?Î9Õ7Í2|ŽƒR”ôu'dºq)>„aö8´LJym•ÖSþ¼ÂÎvÏ4Ç&m±Ò€ËIß7_RO^?¼ÅPÆjb™±D õ~bvÖÊm8ùÈóàÆi©8’Œq©YÏâð¤'EV:}>ÜŒÅ#‹šÚì’E‡ÍôÓWû”¦€=íÿmGÌmN½_T—†] ­‘Ôf»¾/ﻄ›*Y+ɬÍî/Zó„·‘)ÇaHݵoe‘ëix@ÿ.”:ÍHþñµ¸9ZÍõ;µ+4 [V°u{ÛI§¥8Þ/Œ¬ceœ¶z`©.ûlzØL,üN ׃_—×—‡Å;—DZ8ŽB¢"Û­ã{}Í6£Ž:ÀÈêV¥ e"§™wu>=sýÀNCFûå“°´•ë*¿¦”˜‰7úHlí3FvM©W¥tSùϘJa…AŒXåJ—8ÜrzÎìÑÖÌRÄ‹:‰&î‡Wî=1NfórÂÅL·ìÚÃCO¹1·‹»–/´þi£w&ÖéÅòJMÜIhÿ‰A—Û:Š§õ7±Í¶š×­HÍlYó‘N¸y2ˆzg¦14è`¿·ƒ¸½ö¼4´úkõR 4yC,3 €GæÓÑ7²Çµç ÙãmŒCf‚•·NßY;Ýú¸X%#F;n‚Zæ3}èÂ+zaVë®F o~?æÑlÞ‚ÒÍ~·Ôt†±ýÛÓe}p°túÁ!·¢Æ2{¿nÒðD]EÐX}AL×±-I  Ñ´›p"U:@Ä–â;© ½æ·z£¯µ—1çÈeæUéÙ_ôÞJÉ+5h BóÍäPÑ×ñß׆ÒsËS/µ`MûÚÈkS\=[¸ÙbêÄ‘×ÁaÆDz—ÅÍKé–U Bs‚ll¦N¡I6YRŒßÚZ-Ö+h®µ{JC6žVÖ3áÈ2ÊRœž(ºÏÉŠ®êA¬»®Ç¢ºfj²6ÖŠÖòBÓpKU)–µjì¿ö@„þ÷ÙsÓz´²>°Ý—ºšF˜½¢¡ú¯NWBs´ 9^lë:ôCK=Õ8èЉº°õÔ;(gd…{Î8>¯q= fMw«){­QŽ½™9M½þw§îñ½7~µÑœS’.½àteõ¸¾²Ž‡ÇK ³T†VïÚwrð¾?…ñRQF^]nók>¬d”BCIXXؤsÖÉùs¯vs‡QY‹ncç¯_ê2ŒDJ¥žRöо)úø:ØŸ–Öø 7gÒTžÂŽŽÜ£FnàîÏ?Ž²ž¹Éݦg M´m` [Gi·“õúZ³M£æ±|eoöñÐp~qÇgi±§aü³ $º6ýÃö©j=›ÉjÝËÆõ£²bë®NÁE©û5™Bqç‹J¦Áäy“Õ¯ÆoMUè8bƒlçƒ'E%éÒ>oïL·|+ɪ­œ ž+¢†_§Í8Æ„Ä„—4i‚Û+ÁÜI¬8£Æè&õ~S7TUÅ‹LhRm¬ky€4O£çÚWœÚö,Ê:ÓÞýõ[ròÛkƒ¾Ïþço瀣 „~J0urÆ[ÿ3¨Ïã[ÝÄëzšV*W¶(w‘§ãZhôO¶Õpdcú–ú _Zh¸¿ºßÖ¢¼ÑÖnlh)+ t7àyX4uVMC½;eÇšçÚ,«¢‰å¸Ñõéáš(—/¯ž]î‡é.Õ2ç„ü¸¦r¤ð‹yŠÎ¥ú[Ý#T¯ü…›/ï²÷¼Ý­‘~ß÷WA©¸Ø±f.×su³ÅŽy8ÅñõàU»·fOh¡f -Õ{vê\¤4;™J^,D5¸rà$Òµ‰7Êú½ÙݳFÅÙ¬D°â!÷¶Øgî Ûèîóøƒp‹æó7z¼|úÐIЬdf@©ž]’@4>>- o~‡È¯%cdj= á­hFâÅIC§uóÕÝÐAœªõ±gÿªg ,'ûZ ¿iÿkG$5n 5–î®2ÛÇîƒÑ©êŒ#š#‰Ô±ÞQlP§çñC+ÄÓÝî„Ÿë_Z$=)¹·èXåm²öÉO’çu¾SÙ³^ÛÎâ"¶¬mŸÉ£YÛ=qÝFÑ Y?½úÖÿy´s#ÆXt5PŸ2ÿ»n_À˜Ñ,Õ è›g)­BÎmÖøÐT÷ᯛ姺´fTU{e8Þ”F.ÖLô¼ø‰Œ;Œˆ_u<úä6œ`ǬF[ýAÈÕ¦ÕHƹ¼®fÖŒèm¡.©\xp`“;éžrpO3¬ó@…ñ¼¼ž„×fƒ¨¬®æ È·¬Óýž­Ï¯üû£‡Ä¿K²ÖÈ5oÜé­°_8Ôæl—¾·ÌÖ³@¥öñ6`¹«1Ì€¼zƒ1ùÈ~Yé™Õ yÈdô‹£{0«ägŸúªFj‹Û³q50Iâéí{ÀÞsß÷Ë|ôz|Ó=m‰ªÀÆ…ïc´3gGVâ9+Y¦Ìo)Ñ0ÔL…[Ñ¿“_´:˜Ózæ™Z\&fî4ãð|–™²åec'·#dö)–æôBŸ;3ñd7aVÖ¯¸¼9å ]ð<¾Õ@KQs˜J§yW“ò -ïøÏâh&-ên‘µŸTÉXüüº”äè'Ã'uA¾Õd\ˆ^ûq·çÎ|åjp–€;óM&›ù©H#Ïãû²††¦ÉÞì“ÂÇ;µ–é(òÆ°ÈHØQ+eN£<µ‘(÷€›‹uôd(ÔæZcµÌ~ínX >ÆO¦éüžüÞ xˆ ºÒia% -4~à7Ej¯9Ç™)ö%Ò­&§A%ûLOÑŒ|/£W,tmt¦$S"×°ogóè®ËªQ£S;¹ïÉo †–üûß7hÏ€Òx“¹é¨U’ßIi¶‡\`ÑÀ¹6‚c«ÔòjIqî#2§¬4êUsî -WîyR@·*^6¹Å²ùËZ4“q®Ýe÷oòŠ5ÙWvz”<[ÜUçA—D:àÔXã€yvÒknØoïÓÕPz†œ½IÓ×ßÇ÷Êvu—ö?– -q+쌻éq¯™L*[;w=VRÆåÏ-®mwبàøÌH¬ _˜{Êv¡'g†’õi—ÔõLˆu¡Ík±»ÏŒ3ìƒëGm I¨I=0YjqÏ€‹ñ}çGªŸ¬}PUýë¹ qqâSÍXâÒ žFó¨»âÊd‘Atç%¶rET­ÒfŽÄ6D`VÛïççqØ?Ã?„/MäIsÛ$ƒÊì†ê·ÀÀ_ -G±Ü> rvlyûë2Æ—›“Ijªà¼¯ÜÕm[6`f3#õ6à¤õ7åå’9ŽÑýœBPøßhZÏùÇŸS"SeÖÝ–XÉd÷ç–ÑíæÐ&)I$¥W—ä³së0ð9ñA&Ü‘)a¹´kÛ×FæƒÄP,p¹ûžN ©½_zS| -æëâ怆ÞÌÊbq.Íy+¯ÈLs§þPï¥oÙ'aºåÔâ­=åX…Ý‹šŸê¾¬Ý…^É üØX•sÑ°¶,„àTÿ-Kƒj>ƒªýTãµ -ç QaÚ½ÓÏqH@éŸ~• üºþju”Jã8æUî¹®]éj ¥y”´Œ°ÈÔnYZŽ£èeòÄœf‡!7þlI`² -ØwZss5Ù£¥§]•4¡vüðvS¢$Y>ì­õ©®cZÙš´0¨EŠIá´8ÓïŒqj:7Ë1Õ™Qûu‡Z’$æº) !:uò¾4&£8›¨ÛaêìªÑÆ9H-îÆð×köØ0dη“ˆARý¹¾ŸDA"4,c²Žw :N¹G½&PYjFñTÏîžZ¶”6èÁº@hò^wRUgÌf,®F좃úÒx"A¬*–¦ãT=ÙÈ ¹X–{z -mΓDzâÑϰ㦠]šÛr -GYj|[ÔìÍigoŸØHWíý9š­q‚?×>œ÷aùR‹¹c…Á8ÄêhÀ†‡ó£6&b•íäûd˜fˆ!Ž„UQf±š¹.nLûCÒOuç·/èdÚiÜ“Žcq(£õtò”Iº¬nB'Ò`5/G³¹‰¾H–ŸK¥‹‘3yìÌ3_+Ÿ}ç)k{¬ñ-«ï×_ò€-H…ÆaÀúÃY´pÄȵ²ë±7£ìRÛsB:(:*Ö*g “µ6 ¸ÌËô)Öšj•s¤Tœ~I:o‘>öì'½-êÎ{³Y+2,¾@ _ŠÄ²–wõãÿq·›[gK¥f7Þn¾ÓÍkÚ¹H¾Óª—?µ¿ª•Š?ë>%NæøÓ¨nqkœþ<{Ѽ_U_NˆAŒ½@ä'‡´™¯¶·•¨W´hI—õø^ô¤’|?‹ö‹£† þ¾-ìç£ï믅©qFÄÓ·fÜ1±aêIN¶k‘ËJõˆ•«ê5å - ô!ŒYòÄ •e‰ñÍέTæå½/µ[àÛÙv -ÄmNPK'ûhuÿÀÏõ¢6®C˜³˜no:BúJAG§åàÞ ©wS«†Z(Ìéú[$Qb܃&ï‘êHÿdÉ0‘ò+w;–ŽŠ¸œ„!u=©w˜«Ônª~³ôÊJ=:z(ËsÒÇ4Jèu¤èsk#¯ž ?îì¢Õ±B&b¶æòØ®¿îìÓº¥Ó+¡Ž¯.kšnùÝX–J¯Ü‹aÕÒ§ÝcÁî·7Ïê8wîpÚL=ŠÚàz&–ÓÓû^×÷s;Z/r<›~.€¾ûØ&1KÖrÀ¨i«Ó)‰fª9Tº<î²ÇíQilµávvCÇúä$WN£¦›ô­§þ¶³Za§ª•gÏr@)Õ_iFÄHQ rÀ‘¶t­i9?%EÀYIB§'…€†Ÿà:W›YÌØûpÉõq¿¾IÇ,%9™ïtöëQ¤m°œÎ3Ë”^-™nâÒ2‰Î…ÙyOÓ{R¦Ó­$JÝ2g -î)°«·«nVü£—O¾][:nOl×»¤êiü»ƒW§)4Øzû• -öØ欑Ö[½â2 29}µ‘ŽŠ§{¦1=­Ž2þ\ÿP"æQm1ÕÍA—Ü)gœe§cOKÅíjI Ž5ME^ôÂJìÇ*ôlgRÒýú/–¡¶ê¶ôþí‘NYE;^ÅNT’Tñ0E_¬éØ®ØL³†qZñ–¦Ù§ãè£',ï°Òù“ct<›—m¿~î¬þâƒfÀšc1[:òO)†Ð·°¾Ö±žnHÞ+vñüûWe4C^U¦·9•)‡’÷æVª•^ØÉheó/¤…ÔjÝ«.ᦵtÀJ$^gÒ—ì²›µPR¡÷\±‹_møÅ+Ò©Vº/ ø2+Øuùµ9ýúáØ5©:’vÛÌô½Œ¦­šº{ÌTƒ9ô铯VP%{kUÄ[ ãÎ¥@iOg%Ä|Êf\À6l*¾½"hŽC‘¼%…ò‹ ¾ˆ¥°mú<Õ¶-ÏY&«:®åzZÆ1ªe Íâut ž\–¶ê^b›rÔeqqŸ«çÈ’ì­%•ý6^íã~}'—ƹX5;m?…ÐñFÅ;ñ‘¤š9#¬jì>W)vXx¾¦²ÍDÿž96SN&Kmy8#”„ÆUs«å¿t4þô“–wU©þ'¯üïKò=(?×÷âºØŸüZ‹sg4bïu wõÑ`ÊÕšï vÆ– ÏrwB r43M· Qf8 UÛU‡†;Î{]2±2×ppWE“Œ#3d±¯ ™´ÛU_ënÍ.Ž¬óóU6çÏ™ds,æp7ÈM#ý\UK™d¨GEH„™ú¬Ö±˜I ×iŸA`ª‹è½5¥Ä ö\%¿K– ÿåom7ØKàeÄÈܸVî© ;³Å¬ÐÁ+`üôKó -=Ç-Û2’|$ä_Žë0X]çj-‹™Üý4Ⱦ<]ß×_%Ž!c«ÏÙ÷•Ê™¿]9ñ#i8õI‹ÜÝ‹@Iupœvʨ­S§½+Ð?›µøJÒ."ëät¶ÝÞÝ6Î~qÖä'8VÂç·ínçÌáÐÐIVÔ:‡)Æ“Þf–ÁÊ”×Ô7Šƒv3é2Žj8e'aÀ£šQg\¤sÅ!à’JF“Þž-À:íò°Hë[¶tì;§Ô'¢ÆÕ:Y\ò] 7Ž§¸3–·K†€áUa6k¤;ìþ -\Èú@ce'ª†qM?=f„‘[\ÓÒþ_#GÈ’=þXY–½ŒÁœh ex^ÕW¿Ì×¢··DbÑö¼Úh(’³Ü튕g¾ü›<Ö¶#Â[fô´ÿ\‹úiÈŽÀjyìsÙµP²šrÈH¿èIØó²¿*ܘšT/DºUs†ø–ö*¿í;¹îè åÏËÒÚy#;§õ¡â-¬Ù!$§ÑH‡£ólÄ6á¢H´ð±s«ž=RÌÆÍ^À 8Á$WŽH!²xÝ70}¼«'p¾®(Ö, ý`(ÏÃNL‹«w=Ü^Ä­ -c¿ŒÃÈø™•“P>Ž -€0„Æðù³µä94@cu“ ™€ÆÝ‘8Mo®Ðâºï ˜]Øw*¶~±&õµ­·ÅŽDaÜb«™nÈR8ü\ßKÑ¡pýª0MRVÞP¶‡­?!r›©GºrP –™jjÅ!ã)hA<ÚžýÓRú£{¥ðQî®Z,38‘[Û“Õ-?ßjÑ/‡/¤â˜jäeÌŒ #FUÒ¤iQ•4-RÍSÛÛÓˆÿÎe`Ý>ÆxT¿+£ÃÐÅ"¶ÊP‘ð~±¨°Q]|Q0hÙ²“ÇXG°ïÅv ٠мHXŸÂÅêÍZ”‰j]èöøSù©¿Z–¼ÅÌÑŒžß×÷ýFLÓ1F÷h>N~¸*3OÞQ‰CÄ^9ãvò[Ø4ýU%R6êÕ6lí -ne˜&‘ö¹ÆôViB\iè¸CÛt.üdD*jlí<6Eâ«£Ñ8ÕjÈÿLîÓéN8=M»6V£Ém¦#ÞÜýÀñçúÖèGg$)ЬöV¦™;=ÆW(I"g×áúex¥\Ï:w$9Ú¶5G›è‹Z¨'ì=•C²ýìéŸISÜù,œ–“ª·nW?u˜ùÕ‰¤t÷è_þ³çЖÿS^&É•ä8Ý×)t¥‘à|§Z÷ùÛŸƒ’þV–¹ û”b ‡˜¾VΗ•Àç­ë:‹¸(˜pýêi HˆêEÅ&#3æø+æÁı™0MózÈ?¸ÖåZI6DòÇ`Š…—z ¸ áþµÝiú…]ŽÚÁb­y·{XÎÌr½~¹ÐúmË^†Ë¿f<™ñ +Ë×åí`š}¦xtüm¬'‡„Ïáû|ÛE„À«%R¿ûM;Ýž¯47ƒG" ÑÎÍÙ|bªÐ‹Šñ¾6„a³ÅØ5üº’ -U&˶å@îÆoA?×?ïÜ“$æl0Ñ°Fž £×„>-XûBúÚa:7…é$3+ƒ{³„°ðÌó‰Í€xf î”lîš‘(^ZÖ?¯ÌöŒ¬cLØw:ÞôžJ‡Ž!þ0X;¬¢3€]*4Ç'Í éD=nà¿Ê7”ìDs´Êƒ­Ìp3,s»8ØߧÌóÜ1lÏÃõõH•Ækþ50M}b™¦ós+切+)¼Ø`{h­Ì]$ÒN$\{mz´ô"8”®éÚŽPûþþ:8¨càÖ¤û™€?÷}};…6_¤’*¹Æg;²œ½g¦wj}‚q-£°ÄHܺ,#n¦8¦“mÔ §Å¸{xüœÙíb@®HeÆ8V͸ű4¥Ï$M>^ß'z¢ÉêÅ"ýÀn_:cÌô ›Kœ4¶Ê4¦PP/í¯a+þ¬s¸ùåÃM5–òx‘ÙªÛÅñÿVLªÛ§Àh2ãËÜ¥¯D…˜7“¢‚ØòŒs}ßD-ÑS“4‹x`­Á;åYEŠùp}‡ 0mï£a¸(IÂwx¨Ð¦z”ãÃaÁ3Pr2Ó–}úÓ8™~µÐ!RWOÏ.FÛiI=a‹(ɬš;®æ”îÞEi m7¶y/š 'ƒv—–€¹,ÊQSЋƒáÁDÓa@ÌøШ§¶ÕgÎc_×·š5¦Rþ&Ô6Öã@¡jØ*=–~ˆ‰F¨Ó«…•FçÓÇ·i‘°$»Ç&ZZ£íúªBæáA¿wZª%nÔjfú ÓùΑSy/ -ðùÖºÎ]øΚøZ ƒgåêäwPÏÊ»aÉÔàN6©×d‰‰;: h,tcÚ›³ÓzfõI¡½ËU‰äÈ.˜ÇNHë‹V½»r+ÖmrÛ8#¾ÿѵϋ!m9ýÍÌh7WÖ¡øøåËÔž†ßhå9ñO¸ 82ÎX®G¦xAw*9%s‚õnû -ÇXàå¿K2œÅhÐek:übo þ§¥…Rbçèc¦! ;ßlp1,Œ [BT`öcX¡)˜ œ/šâž—ôڜо¯¿#­‰’ñP¬šñ$þ­€èrKP¶£WKñ·ÚŒ›†“gõô¯…Ûý.ÓÞÚWºÀuFF¬½ñ2ÕNÌ›ue`Ž%¯dUáä%Çr®tFÜ_·¥½¨œ.¦¿DAš·pzºŽ(qÙ¬›p°~†,e¤ãZ.Q,ŠLÕì¶7Æå÷õM¤½Ií/#<ô§@ÁzÊ%,~î¦æ5Èt[,¹ -=ï­ÛErÛ²ˆ!ÁÁ|üË¢ûVô|X&Õ›oµsò\þZØSm›¹ªn<Ÿëãçò;_¶ìÖ¨t'œ‡^mükRK77‡ žîÆF²$–g:Ö°îPž\g0ëÛ3Z›(ÂzЪ1óZͺmÑf7Åö¼ ]û#_(„¨v ÜxÇ\©›.¶ßß9¦G¹ÛvàïûeïåŠF³OúÜé0Å~Å¢$E訥¦³ -SÍ6 ]þi£¦VÒ6­×v‚Åê:þL«î0ãž”À•Ó"S²;obPF7ßF û’‘ŒÝ ÿ+?ÏŠñ;á4\G}³Ùcwâ¡šÝÔ±=lvµ±ƒÄ}p9¸hɦg»Ú©©bØõËÒ¤rëúˆª&ͲìꀥÏ>Іg}÷X“`ùzÊáÙŒû¶íÓÕ4É2Ë„Ö³¸ôçdë¹1Ëé矟 ŽÌžì|ûöd¯ž¢€Æƒ¾ÜYà¶ÖL¶ãºË‘Š•²¹‡¡2•ˆ þÔnRÍÂýT-ŽõïÜt›î¨![ªI˜gfVÈÉYþgì>H^ü¾¾I†^‹ Ä„÷=ŸT¸÷Ô¢ž50YeêBðªéÈ;iÇöÊ`mN$•ŽO3œXÂÒ­ááÌѨmØsÃIÜtQài4¦ö¹±á[ l~¾5¡«4V|iD m"?3³15a!Ô×4ÀV_i„½íŽ¨Rní𿢞B’zšÍ4UÔï‘Î&¶rïÝ3£êŠ c_„IÐüx¼¾P¦Ðh|uÊûAîZLDíôft°‡ajáöwp+…µÏ~:¢8¤XÃìGd‹Ô¦îž²ð«œcâßþ½ÍòÄ°T+‘漤„•HµûE–•Ž PI{:Ž¤u|i´åeb"Ñ7-êL£-‹ª÷_k=˜i.`$D‚ â²s¨(zS¾Áé²Ï—è~ižø¢RðŸë[Ù¥=ð‚}{ f¢jsM'ø<ÅA³™þ«)hƒ‹=.u,ŽŽ-®Ñ£èñ¢bå{ß¹m"ålIyž¤zõCÝþ—ÏoǶ¶§rõõÔ,BµÔd—’ùÏ`×´™öÚ®ÍTf!hyÝrüAŠ3Ћ:?—7YÞÔR)]#ŸéOjUQÿEO[µ?,^j7 ŸÜ¬]˜i¬?¼ÓG§^ ú¦Gfoz"ò®®¨þÝ*äÒckŸºoÓýs}sƒyë2Nª@Ìg_]ÓWçÈçˆGkÐœ”邲[uï:‘Åq°¨ê -?ɃÆI'9pB1r[æIïM²i[œcØY’KÕ ó’¿ŠÝÒ÷õÍ jÓ§`…‡Ð$?2ÿ¨´t$•öõP1\ÀÝÌ”e.Ïòˆ O?&ƒS/ô9`)ÛVÈpôBðO¦¤c÷L¿=³üe 4«Iœ¬E<¨úµ áÃnW.ØÌ<_sBÆ‚wc‚q0$Ìeú<3³K>™ŠüšÓþÄ‹Ð|1É Xl=zUÒM·aQ µ­BÛ*E ÛHxw -Z ÇQoÃ^\[Õíâat·c–d–å7;XŠ~ºÅaw÷ÒN’ 7@cT$Iaìg‚ìÛçù¾¾KÙskªÆ"è–4Q±™Î¡â"âœtpp94m[Œ2WUÜ䱬6NÖWNT¸ 0[ ¡Ð ¦îPä}ÒŸÓÆg‹ÉôûÐFlê‘ -·ü¤õ`f {V, ÒúOØý0×\!<1Θ¸¦š•Äîô ¶vq¹ÎYÔ÷õÏQ¡WÕ4z©$PÆùÚGfÇNŶJ×y†عFýiû‘LÏgò#±ÂÙd[UsP¹coKÃu.§GúÐçtvŠ5ìûúr¶^0–¦é¥?›í«Œ+•}Š©n¤Oª¢üjÏp&Πda±ê7šNô[8ß‘@ê(ó’ÑL3ñy~Q]Í–ZÝu{?±§ EÙPÂWŠÒÂÚkÃ~êÜxˆ+у;-9ûm–ÆYÀôp$ƶ4Ä4=;Íd3^À`_ý}}+˜¶ÎX+™è¯Ã4{ÚG;‚ÿS^%;rAô>_1Çá0¹_A>rA}Gl!$³Ù–Åßó–¬é®i ™KMõÔ–ñâ-ò´6Pœ Ë;¹Mr¯«hŠOÕ·ËER¥ -É°vÇ`GÍÂZY 6Íö€67"ûí×d)5ë#h3CŽÛ„·WþŠÈ&“æp[—V’øHÑ$%)·0X¶^ÛvmIbiGk}9‡ÛP¯Ç;BâÊ°¹6 ë·¨›²™Ì Yð ÷4q¿¸½9àQ(¸%J('CÙêøl€j­sZE !\.âäĸЪôRË]«\îBuZCr;«W -®tÿ(Iñ´w iÙ©Ó•Ë>ÉÞÓ꣠SZ<“º× Y·R´B¼Û2Îúieûç0JE¯z9~ù¨Sx_Y¶EZ,õp$«\M:H§Y ›`ÚièàXÓÁÑ÷n…vZÁjCK ~·Ì}¹„`å‡&±a¼¡fŠJ;%3Êþl’Í“žæøÈ_lvñJŒpa£L7pknÜÒ¾r3ª”BG™žÅðRÅßÞD.šÁU4i~5űwÝKkH G€¤é:âg›f+Æ E˜>†è#9b¥ƒ9‡ œlùKÓŸT_[þÿôa5q¤ó^ƒN¥œ0P5mch-Ié4ÄÖ_‡,aD5S±(1!„i¤¸Z¢ð^»™«ÉlñVêÒÀ±T®žÄi³“}ÒÅ„ŒðÌ°¥Ú3Wàý?‘E]bŠ'iƒŸ\Îq_&G(Q--±ÆI{†3ã †ìW0óö8oYMWÓ}i¸(UÏ8âC 9p5ƒžÚÆ´r¸÷-Î;iS†CY ^ÙeùÔëñŽ)2è78šã<ªæd*ýóÆ/ŠQ7yK³Ì=*åxHt‘ý¹¬Å(ºDáí®TAk&–©*æÉye–BÕ†ý™ñ:™Pá ð‡ŽŽ–ÀûÑAU»%ŒsEYG€uëoë0§"äËñu*[êz»“¼¯u ïk\` &µ5#T-;Nü»/ï0ZÖ?:õ[HÝ¿Y±LÓúmÝb‚c9,ÆFøþ¦$çWï ¨Kîçv-ÌQÃ+Zv<4 4I+XR¢¯Ã§EÝ©+ñ²Ÿ8|ZZdcˆy¢˜†ÍC¶{ž& -ºZGñEÍ¢Ótæ‰.¼ÿ£êdl‚v;³Î!‚V¯»UdZfªC–É(‹í%ödBGÝ1•³`~#½P &O«äë¯ÜÒê©T¤:ÏPR˜8w¸§ÿê…í'—›ìÙ¸BÌVVÑØŠ´[¬àaÛbÆò¡­ˆð†5 5‘á8’в­ë=6ž&<ªú_=:Û–µ±Ls.bÃ\X¶ÖŽÆf†ÅaNCpš¨`{®Ç×½%þVbD9[ºµ2ùгÅÌì|QW„Nöi;Í‚]„šølsƒÔ]/iOÎhØA¦…£ÊÐǨP°¥I Œ¨²5Üò¹¹yª9/Ç×»JœG 4*…XÇ®@‰šc¬”ØØë­Mh@¦[b¼Ön°”èh‹ÀMl× - ­#‹‡WëçàEK|s¼ƒ$W®‚|‚c×Ù“(™´ÌáâÜãÃòß‹¹²É…WM Z<…,á:˜xÁeÁ–r¨¬†ÐW6Õ‰Œ¦äCµz{¶ -*¬òƒL²Ì…²§/c‘Ð¥CÅ.0-ºÙᶠ-ÊQ×ãÕ€>ÙóMà հ= ê(Ž}²ÃVEÉ­l¡YÔgê0å -øÞª8ã+¦1«–A+Ñu½ÓIPJ|;m 6Ÿr˜¡™5»ê -LTĘ&Œ¦ÐŒ^‡ Q»H˜îÁ) â °Âa¥H¢AXpR28…Fc‰ÝÐX˜ˆjÆ[¡EÉ΂Ã9´Ð×ã«Ò6„–ÂÕ*J[o3šîô4>nzflj±ÒÕGo&=í¼Š³àŠÍj -ãlçYæ ^uĪM÷ææ K-šd¨]4BR˜hÇôØÎ\÷Ež×ù—o…•æ,˹ÆWG¾_ <=Ui9Á¡ ô´1Y.u䵑±Ì{ÁLCu—eß>"›ù»O•>x‹|ci†XÈ)C»ÿÌ¡å…s€\šó’þ½–­·l2>¡“Ê®¶mv/᜔®üö¢r$î›Ë…U»¼S€D].??î“ÏQh?3ÞþòcQ+€ã•Vt÷òþá釷_]~{Øßò+xòùñé¯OÇ•W/zûáã^#œ8Ñ×ã^Yþñfi±>»!?E¢ “0¹ªÐj~ýôþ§—õ_íþêïþ¦þÉP¼œõp|ùOïGIÃzT'ŸI½¤mÝóñãñþ|ì7{¿>žßŸåƒýÔ»ã!€×ñÄoÞ·üñ÷ùñ'½îÍåáÍ÷ß=¢{ÿ -06'ä endstream endobj 14 0 obj <>stream -8;Y8d8TlUC%#"'.?GUij7`BO65EGb1e#c8cnQhj)`k4q*lTHi11n,e<l%qKSW#&p(!kJ(_FcW9[G-KUP1 -8FDq+;P!pY%usX`G*&V*)9*Q':HN9o'=G[: -fpn^s)X^@242-@c_<;*lc]F4sQoZQe;CPG;h#P58m/PUO?Ghg,N+F,o`EGr05C"Dm -U1(qnMjEoMT]T&LSd+'8B8T7b&Cd?PiCYqbPU?ML^;Io[D%(0KkY"!3@];Yg-F"DI -^_Z;Gqr1"SE.CA""'EmA4un3HK4=6_2f-AmB8Ro(UX?DO^TPUI!5;B'E5f.Hb:&TF`72gq"q"f<`6^j6<(P -15ea>CJ(.$J<$;'3qJpfNH?l_"7[J#Cm_a&^")8`Ah\J@Jr -#S")bH)Zm=hc))4-^d6odpXrm@sAmQ_^fD!]-nEQc$Mn3eOV/KNg+&s%eUUFDA>IE5Kk!@MLQ=7aV#;JGk/):<91G?M6/hYanj,W;Sbdau3P*UrA=^ -,Ge$!Ud8HVMBRDM-p3f_.mGEnR.P2S+#\aeVO(+3TY/I(j25IqX=F9tN0N^Hc=6O` -c&b[HiSTs!ZJ#M\O-O4Um"sMVNK[*M?tpampP/s#/(KJ/88$AgHM*r!//J_^/t3[_ -0k3!;4bdZm/2L9?%Ve6C-Ea3N0qk&="Q#t-5Y@`@8q"3Z.LT6j=[1B8%^;tif.ChD -oZ^L#d1U!Ohon/$YY.XhD98AZ\KBP`0*^=.RYYKqgI?CoA)^[$`pe%T]>PQ_-n?d:;'#+,i*Q%7W=F;t!*u -fJJ`-N:FV6Rf3n@/=[rDL:sN0^*L6TN*!lQ_Zc+WUn"54P.Kr8A8*W?>Em`0bTVSq -Z^,"39*33u0BMWs+@GFMH;g5nW"b.]%Wc074XRo^\u6lM1@s1Qc&Q\os1gn27+DQ# -$OHI4N<;Q!/gBbf8ao(s>h^@^M)/U3Z-W4+RH;/2(L,*Gi6J/pj\MYVnX;KYbh:qr -?tlK.Ygo(QS?oai1bK^4 -?l;i+n69,1D==A"=/m4k_6]7Q6Z".lJum:@VQ?QX>TJqq8SQp![;q"r0(rE/iFTQs -E%9jU.Lqm((o1X0c-CB3hd>-K*Uer17TRr&=FC[KIKN]X@[AX;J&bMX5+H-lphp!# -%q12jf'_-KK8 -o&dV,)IQVum;!0BroqaIY%-mfo7(/.)r"\%X^s"K$i;j>?1[QQ[9ScLI=&@s.^3L/ -q;sd$C<-0Y?/jVhCt@1+mriT>DmqIQ[k$66ENe,%*H`=mP3^B;-02*%UR6JIlXou0 -bl.^Aoe2[K]P^a@"`k&4$Y5OFQ5\Lcg/ZAa#I`K,FY^R?X4^L][kc=4;0,)VNufj; -rpebn9_R?>.qi55cfY+O0tgamrQPoHDo)1C#DNUJLMaHl>/I endstream endobj 15 0 obj [/Indexed/DeviceRGB 255 16 0 R] endobj 16 0 obj <>stream -8;X]O>EqN@%''O_@%e@?J;%+8(9e>X=MR6S?i^YgA3=].HDXF.R$lIL@"pJ+EP(%0 -b]6ajmNZn*!='OQZeQ^Y*,=]?C.B+\Ulg9dhD*"iC[;*=3`oP1[!S^)?1)IZ4dup` -E1r!/,*0[*9.aFIR2&b-C#soRZ7Dl%MLY\.?d>Mn -6%Q2oYfNRF$$+ON<+]RUJmC0InDZ4OTs0S!saG>GGKUlQ*Q?45:CI&4J'_2j$XKrcYp0n+Xl_nU*O( -l[$6Nn+Z_Nq0]s7hs]`XX1nZ8&94a\~> endstream endobj 7 0 obj <> endobj 17 0 obj [/View/Design] endobj 18 0 obj <>>> endobj 6 0 obj <> endobj 5 0 obj <> endobj 20 0 obj <> endobj 21 0 obj <>stream -H‰„– xGÇÿï7ó×Ä]8óC$Å¢.­ºk£¥ÒÒ’j%!!KÒI©RD\Jj×6ÈCŠnÛ°Ø4¢îêÞˆT.œœ`·uçÑÅ9ûžK[ìótçyÞwμ3ó}ç›ÿÌû›ä©)±ðEŒá#:w…§la‹Ÿ4®~x7€êuLŒŸ1aøú` Þi@¦ÅÅFÇ\ -ø¿Êã»ÇqÀ3Ý»¶q É©ÞvŸ8>­“¹¯Scn'%D§&yú‡¸ÆïF'ÄšŠƒúr;£’§%;C0úÀÕ!i{ Cj{´LŽdyj*GWü•hÎTo5Žò´û‡‡åùp>4}òèP{‰Øa€Vã>±_Ÿá~º«h wݘ׃if˜4_W€àíù½Ö¼³þ¸xf -Hþï&ø jóš×A]ÔC}4@CøÁø­MÐÍÐ-Ðh…Ö0Cñ³ÀŠ6h‹@´C‚ñ Ú£:âOè„ÎA^PtóèŽè‰^xÏ£7^@ôE?ôÇ Ä Æ‹x a‚—ñ -†b^Åp„ã5¼Ž‰7ð&Fa4"ðÆàm¼ƒ±ˆD\ºŽÃxÄ 0qø3&a2â‘€w‘ˆ$LÁTLC2Rð¦#3X·YHÃl¤cï¯÷1`æãC,ÀB,B&# K°”Š±aVâc|‚l¬¢´ kLeTˆµø>cA×a=6 —ö`#I>G>6ó>ý[± Ûñ%þŽ¯ð5¾Av`'þ]Ø=² -ñ-Š°Å؇ï°p‡pGpÇð=ŽãNâNã Îâ”àJqeøPŽ -T¢ -!}yÓ<Ó®ýêËêµF:ùQkjC(š¦ÓZF+håÑvÚA{é•“~¦»Z3-@SZ¨ÖC©Eh‰Ú m¶D[Á;ø[í vT;¡•kׄuEÑH4­D ]D/Ñ_ ÃÅkb”H3EºÈYb¹X'òÄv±[‰Ãâ¸8%JD™¸ *D•¸!#åx9Y¦È•r­Ü 7˯änY,OÉy^ÚäUy]Þö‹2jþ‹ù¼Ùf¾l¾i¾mþEù¨ª‰j­‚TgÕCõUƒÕ05JE¨1ê5Q%ªÙjZ¦V©ÏÔ&µM}­¾SgÔ9õ£º®î(‡QǨg˜ «d„¡ÆsFc‘dÌ0Ò9Æ"c©‘kä[,¢[j[YšZ¬– KK'ËXK¤eŽU³š¬ ¬þÖ–V³µƒu°5ÌmmÓ0p]àæÀ#'kºÄÜw>09:š:Z8z:ÂœN÷©ªåVÁ@ù“™ÚRGG©nrhmcŠh«PA5tƒîkÍ5³Öî ²X…ÝZ¡v@;¬}¯•² -&VÁO4¢h':Šž¢ŸèU!Ù­ÂλÎj§Í•_Ù8W;â¤øÐÙX.¡12ƒ -hg-pîjËÞì ç çÎ^Îg†³™Ó×i‚ÝqÞQ -8N8ŽóüÑ.iËKY\Ïu¼îHãèfg¾#Å1ÖÁO_«eóiÜÏE‘ì‹Ø -ÙvÒ7Zwtà“ÖUkóò²»ÜÜTTÇ_¶U»\Y=úr9PÃßTãÃÆý5:gxûCû;SÆ~ÛÍì‹jÚÛW×ÄØ í×=ϱ'p¼%?©ãÅžHU¶íFÕ|®Sm¶7mÌ"[¸í o_±- êŸ6?[iå$WûÒíKü=Uq•—P±¨\_‘ \Ü_É­©º_1¯"­ìzEDÅÀS¹>g}€ÞOï£é-d'7œæzQ“ÁkÃoÒâØN>ý­Úq·?òGë¡ítû6>ÞØ1o}è±qEn_økÔåµ³leO=ï¬X!>Ùâ¯b=g¬±S‹c|VNsÎ*õœ×8qáñY‚o®¸` Ä-6ß8Lž>YŸ­Õ“o‘Ár˵Ë2Ü‘,¹Qæ{{ó~—ëö92ï×>oÔ—úQ@]§A4˜>¤FCèez…†Ò0<ÀCTÃŽÝC±4&òÁ¼L™‚DÍ_k¤5±"‰™}ÑKì*¦w%%˜ºR"%ÑšJÓ(ÙJ)ôM:åÒ,Újjjjbjnj¡·Ònêíô^z˜þ¶ž¨—˜JõÞúH}‰¾Iß®Öí¦ózš>[ÿ/ÛÕÕuEï{ïœ;ƒ ÎÀ0|GPa ¢¤€¢¨ˆ("Äú%†ˆ6F1þc–šÿš˜¦¦5jti±jWcSÿ¿­1&1ƨ1V›­?¦û>4uu•Y›{ß½÷·ï9û½sîtù9æïø_á»4Œªh ¦•´‚–Ò2|áߢ·i5¾÷£i*­¥TNƒ¨‚†ÐPŽlüKIch<=GõÈ 4ÙùšBÓéez…fÒšKóh>½J  Û…´Ž~Mï"wÿ–~‡ü½ ¼‘vÒŸèCÚƒòdó}t€ÒQ:F'SN!«|†ü~¹ås:GïCß_Ðz:O_ÒWô5]@ƹH—hrÐ!úŽÑeZŒJà ºFKèT -ש’Þ£ÚL#h;ª‡ÝTG¤gQ9ȉPCgè!GÆÉX™ÄgøïüÿOòY™ 㥇âÛ|‹ÿÅßòE¾Éwx0Wò¨§L&ËvÒ-5Â}¹‡žêeŒŒÖ)EÕ§”Jí)Ú¬P÷S”Y­@]¼WÌ£H±IÛAmÈEl¬gÈNq‡ÂÉI²ªØuÈ¢‡´ÆTÆ7(˜ZPÙ´=Ú^ -0¦«úÖxÙ˜a,6– -]7?œ8ÃbCcCñOÃñãÛøèAã…rÓGÍ߬2¬ÝˆP¹voº3<#Ý—•‘­…9¤%ZÑÂѺ3,)>Î’ä1’í -‹·ç>7¡WqþŒê1)­¬Rµ·h:7¥GQU§‚šü©¥MÝJ-–Ò ŠHÕ>•ö«çÇí™™Ò7ÝØi`xÝ€1‡íM…{Ö¥º˜hI‡ÅïÖNùM÷IÐÕøµ—q gQaÏ€>”ã”l”oÓ DM(ŽuˆªÃ–:hY¾q}™Së -†ˆM÷œŸ÷ɾ~ó}†”²·¦1CžX•\v>É×#þòÉ‚ÙÕ6«®u©òy+ó’á‚Ž9 ½ìt¹ã çj¶ÈÔä¶mK¼Y™‘í‚Ãòª³—ˆ éínÖšÿš¾–Ûˆ4¥xÑ›éQÎt†èðì‰u(öáS"ДËí^ˆ¤2Üa­‰â)‘–€Ø9O‡ÖÖÔ¥s\;wË„”XÏSî‰s¨O¿›šÜrnáX·MÏvŒnØ8O×›¶¯vuðjli—j{a°Vt§™×‰¶!ÖyŠè”Qš£Ù}¾¬§4O&üå1',æŒzùÂ</Ë+C4xÏc*ÂëÉLÓ¼å[æ[KºÖÚƒôŽ·¦~®Ýf ›}ôñ-RóD=Ÿ_0¯woCÓC⌸NH³ÜP¿¸c·¼â¶‘e›åÊGëö¶]ûÇv¶oe Ä·­½í91‘ã"ŠŠ¦† Ïèw*ÇÜÌß„µëîAÕ!]Ò¬fi´>©Ñ¬ÉNî:Ç÷¯<( -) XˆuÀ£;䲇¨C^¿åáÖRsô‰?yN>Ò#[Ì5|"ŠÎŠR¢‹%JÈ¢Z;)jõbc,˜m®ß(âÐÖêÃü×±>(²€Ž@$([µÀ/€\¬Fþ#°Ñ(ºÃfß.™-:q¹(åCb,žÛ. ¢;ú“€^F”¨ÖõÅ\!üPˆñR ×yhÇáÞ2ô'ÂN*Öù€t¬Kõˆ2Á{ ÕÚ£Uϯ&á¿…¾ü”Ýð©Dû,®«ð¼Bp(ÅuÖämÐW­þÉE¿ æËÑfuXŸ‹û‡`> ×^ÌE€—¶’Um¼>•îY= -mW<¿JíÛÜ;ö­öüxOŠ¿ÉéÿÀä~Oâ?‡¾Ñÿ#pågnÿ ÅëIQld`ŸB,l@üSȱ¥ø 6 -–Âû«LÛ#D -üjÏnqêúgÿaZ+ÒŒ[þ«˜«“ËÅá?£wòw7^+ŒËb‹LÙØ_ ø$a/…ÐÞ,øbÖ]Àý7a'Š.Á® -xÖ!¥¹Ç¾RþÁX9ZøÁ}ëúE°Õè Aàðoåw{­¼i3ßòoGŒ¶)`o™¦ ]ØŽ½Õ«ûaË©žaÆâQ«ðHÉ€(1c¤€|ÿ8^Ñ 3'¥¯0`?0x c“ÑNB{¶#ÐÞTºUÚQU:F¢Í÷ A”(þ¦~.Šþ¦ÖΚïá2}¤p`“ÂÇ9ŸlöC’zo”voÜÓ 4£4ööË=6ªãŠÃçî}íÚƤ؊! åRQÛKÖÆ^S‚10Ac 6L ^Œ›µ×öÚï’6 %mÄ£P‰–4iÕ `RE$VªHQ“¾Ò”H´išZµ ¶$€¢¨4-ööwæÎÝ]/Þ¬£&ý#ºXgfîÌìÌ™sÎœI–h¿_ì6•*SûJŸlú`›Ë ßO•²¼ˆ}JJ kÌ….A²æòšY‡Æ,jÖ¨–ýžu(}¿–ýŸ}0dâ³M•²f?H•;ØS¥s.IçSÂþ=\Æ® -…?Vr\âØàœ­sîñóOù=œw€èÆk’ „À^¢AäZ7ր׉þs²mHvn¼.€áslsãçàèJêÔÑ‘³oŽïÜÎ1ÐùæÄŽ}²O©ã¿S+”|FÚý“âe¹¾½r]{¼cø>xbŸ{%r¿¢Ÿ$ñOÞGÅ>aÃÔ«Ð+bØŽØò¸ÈqˆA|¸T‚.óÞÓbóö ° ²ýùî«PÆ]̉Rpì~Ü+QØ&îÈ¢ìýTÄútî.¾›Ø~ÙvÍ!06 =ANðý†&ät ñ9rª¯¦òY8wÔoÚ>Ï÷2òê¹Yîؤý›Bˆ§Wˆ†IŽ©œ§úžE®Ú{*]ž`TÆš4ªOÿ}t2³ Ãï,M÷ÝÉâ¹CZ{ŒýX‹íJû=Å.’ÎsD‰3PÁ&œÉiÄê‡Y—^R[ÀÓ)Ì/OÿP‘§ð¼gûà pu䉃— ‚âEF"ì5Š9ÿ¶A}µþ9Óyü~•¹ÌF÷C®3Àlªòn {¹ðÅ<øÄ4Ø÷9çYKEÙž[á‡çh&êÓ”7Ñö*äïʺM9{Ûs™%Þ‹¥È‘yÜcä‡Ìç1Êy´a¼r˜òPÿ¾™ê?0çZ» 9F=@ExSøy~ØÁŒasTÚ}õCØã Ìñ ØFmX¨Ù>´ñžï2òeÙx¯Údí³ÉîûÁ’Gl°2/cL5lóblSƒ~½ÜÂ’U6Ò†ë5!ø ?UT -ëS‹ù$ݦWámA"¶éWÆØ}à”ƒTÏRrüVêq¡”S¥>%âΖ:¥6n㸥¡•Æqš¡Ï@¬&Úƒûø0k;i¬Vü­ 9î$H9ß«ÿD}:Õy¨Ñó -âùI´)T‡û®NŸ rq÷NrÆü²fk@|܉<ÿmÄÝwÉRÏc¬y#ÿÎ(0nÇ}x=©ðúxÁRá=Å÷5€|x€&1žØ‡sd{Á°}˽À4¡‹T “ ,epO Ý%ÃzLEèèm×ó(àsH†Ï#q6yXÃ5ìûgÔ¨¥5úh-çlqûÉ$¥}e’éÞ„Æär;Ÿ[éˆÞEs쌔Ÿ=”O’ß<<Ù¨«¾ˆ{‡‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹‹Ëg…Èø3ýœn¡_’ɵ¤š¨çÐwãía`—UÊGM‘ýòi·,(’e“Êé)ôT4jgè¬,+T¬œ”eå*N»ŠöwdY£bÏXY6P¾K–MÚàY??Ò¹µ»­usuÜ*¯¬,/©¬ XÁ²²VMgg8dÍ´wöö„ºÖ’Ž¥ÖÍ݃VÃÖε2îíi‹tDeÇšpØ£Vw(êî µ”Ο·`ñ¢Åw.o7÷´uDJæEÂ-£ms*V[ÔjÆ”­mQ¬*Ôbõt7·„Ú›»ï·"›¬eèÑÃË©YDó)B´•º©Zi3õEÇA9U⯜J„  %Heø›R Ftâ0B(óøvÔz12„Y¸çê T*¿fš=ˆzútŠùVbDXÌÖ†RESfü¯²9@ab1˜— -¤Aö—É Jg'w Öf`HêJ›š4W( ²'…!¨>hH¶ˆh]ÔV‡.£¤AþJ„ú-̇„0È -@vP6ÈË»<(–Ï$} f”ÀÃÔh?3$?ý×e¨bÀ ¦e9œ endstream endobj 19 0 obj <> endobj 22 0 obj <>stream -H‰T“iPSWÇ_ï™–)ÏW!±É­è‹‚Á–APЀxH0HÂ&”E–ŽS¥Š ¢0 Ö¥¨ ,.X—Š•UãZkëÏÃkC§vÚ9îÌ™ÿýŸó»çfn†q8œ9^žÞþ~þvÁIŒF¡SkV«Õ2YÂ'žjõÖB¥Œþ[ÅŠ8,ežË7cgó)/£ãdëló‡8„ÏÕ³ñ+ÌŒÃQ$¾s¡§lh¥–VÐ:"†IPh¶ÒêXÚ_ÍĪ mK{©ìé´8et È £ZÃlQjuŒ†‰¡•‰t4£Ñ)Lg|ŠF©QFë”êD­ƒ£¯l}FC/¡c˜X ã˜óÄ0? À° .&ãbÓMXØ:L†up<8UfŸš2{Âe¸çÍ­ÌÃÍ[h$?ûÚú,ç-†åbXï ܽHN±DÖë,ÜØH ‡Sì,­1 -qˆƒTé›$cù8*4ò©ÜÉùxm0r“Â8‹rÆ—²‹†¬® kœlM?­ª»û°°#…9M’m.*dŸ+ã…†úlŠ‘é T^ËÇLaEùM››¬? -‘‹P8¡´IB¶ ige=WÇ:ººÄdz6"Z"ÀåWžmègzaâ1ÇpTמrÙÅÔ]Ùsôá 1ýJߣ[v잉z¾õ´]&iLÇÁ楂 Dhr2ÕÉ”JˆìÐæ <õ³œBä5èö䧷ÁBŒ¤ˆ¢‚BÛAÒµï—ÝÙ_´— -}"7¯ r‡#SÙ„qÐ¥Y±–Sž’-ìP•ÓŠ»mý²¤VHê:`-p„—5­>þî -×ÏÄ=ð©-|SŽß'zá5>Ü×u¼MÔX‘-!›{£•.B²Å£K Â;bAÞ &•UoO³‚Þ›§îå<$+؉ê)ÿÈÌœâJ!Y{e -_ -ÏéŽD)Â×~!¾¼Kw,JÄlJÝ(!7×ì®ßYæZhŒ–èÐŒ0TÊ'ÈøƒÇA^ 3xM'*«Eµ»â×çJ>7ÎÄoc¬#>•­U–di%d…»*ð®iÍÎùyóÞá–ý÷ðpýÜ£–»‹É°=M%ÅÎEÖä¬E‚p´ŸwŸ(8 º²^y¸¯¯³á~'o`¢/{GŸ=ĺrº Ðmà²ásÜÆÀ4Ò44š;âÂLX!bã|4É0Xw´5ØÑîh:2s‹¸ü»X€èOž ¯ÍIãÀp¨{Èe{ê¨üv$ ‰¯‡6ÞfsµÊ5Å»Í7å»ôeºŒƒÉ!² ˆ¤ªmõùb•ïFÿå Jëþáë†Ãçvv–^9ˆÖG¤PôŠ'ȶ‚Jpß1AV±‡À–z„xÏâîŠÉŽ—2{E² @ÿmÅ™»3%1Që"«£¬ÇÚ®noâ‘Ug¤ãÞÁ°%ʯïâp!ëö­ ¡ {„µë…Ŧ~W˜½ì•ß†2WÄ6}|›kЈÃBß‹#†h€Å~E.È÷!2|ð—ÄJÔ…|Ÿ-‚˜k#Ⱦ+f“ÓjÒ¬ ` ¤œk6­øÂèÂCåC6dý(;w&|Ð+ÜÍo}Ì‘<åÇ'U¦ß6¿9-Eµ'D¡Õ8«NÜuuŸºÓ/!åéȲ]ÎOy‚ÌöXXÖ^*%|ÿ4q•T¯×¿å¿÷5ßbäýÇ“øÌÉyÔ_ ѽ‰ endstream endobj 13 0 obj <> endobj 12 0 obj [/ICCBased 23 0 R] endobj 23 0 obj <>stream -H‰œ–yTSwÇoÉž•°Ãc [€°5la‘QIBHØADED„ª•2ÖmtFOE.®c­Ö}êÒõ0êè8´׎8GNg¦Óïï÷9÷wïïÝß½÷ó '¥ªµÕ0 Ö ÏJŒÅb¤  - 2y­.-;!à’ÆK°ZÜ ü‹ž^i½"LÊÀ0ðÿ‰-×é @8(”µrœ;q®ª7èLöœy¥•&†Qëñq¶4±jž½ç|æ9ÚÄ -V³)gB£0ñiœWו8#©8wÕ©•õ8_Å٥ʨQãüÜ«QÊj@é&»A)/ÇÙgº>'K‚óÈtÕ;\ú” Ó¥$ÕºF½ZUnÀÜå˜(4TŒ%)ë«”ƒ0C&¯”阤Z£“i˜¿óœ8¦Úbx‘ƒE¡ÁÁBÑ;…ú¯›¿P¦ÞÎӓ̹žAü om?çW= -€x¯Íú·¶Ò-Œ¯Àòæ[›Ëû0ñ¾¾øÎ}ø¦y)7ta¾¾õõõ>j¥ÜÇTÐ7úŸ¿@ï¼ÏÇtÜ›ò`qÊ2™±Ê€™ê&¯®ª6ê±ZL®Ä„?â_øóyxg)Ë”z¥ÈçL­UáíÖ*ÔuµSkÿSeØO4?׸¸c¯¯Ø°.òò· åÒR´ ßÞô-•’2ð5ßáÞüÜÏ ú÷Sá>Ó£V­š‹“då`r£¾n~ÏôY &à+`œ;ÂA4ˆÉ 䀰ÈA9Ð=¨- t°lÃ`;»Á~pŒƒÁ ðGp| ®[`Lƒ‡`<¯ "A ˆ YA+äùCb(Š‡R¡,¨*T2B-Ð -¨ꇆ¡Ðnè÷ÐQètº}MA ï —0Óal»Á¾°ŽSàx ¬‚kà&¸^Á£ð>ø0|>_ƒ'á‡ð,ÂG!"F$H:Rˆ”!z¤éF‘Qd?r 9‹\A&‘GÈ ”ˆrQ ¢áhš‹ÊÑ´íE‡Ñ]èaô4zBgÐ×Á–àE#H ‹*B=¡‹0HØIøˆp†p0MxJ$ùD1„˜D, V›‰½Ä­ÄÄãÄKÄ»ÄY‰dEò"EÒI2’ÔEÚBÚGúŒt™4MzN¦‘Èþär!YKî ’÷?%_&ß#¿¢°(®”0J:EAi¤ôQÆ(Ç()Ó”WT6U@ æP+¨íÔ!ê~êêmêæD ¥eÒÔ´å´!ÚïhŸÓ¦h/èº']B/¢éëèÒÓ¿¢?a0nŒhF!ÃÀXÇØÍ8ÅøšñÜŒkæc&5S˜µ™˜6»lö˜Iaº2c˜K™MÌAæ!æEæ#…åÆ’°d¬VÖë(ëk–Íe‹Øél »—½‡}Ž}ŸCâ¸qâ9 -N'çÎ)Î].ÂuæJ¸rî -î÷ wšGä xR^¯‡÷[ÞoÆœchžgÞ`>bþ‰ù$á»ñ¥ü*~ÿ ÿ:ÿ¥…EŒ…ÒbÅ~‹ËÏ,m,£-•–Ý–,¯Y¾´Â¬â­*­6X[ݱF­=­3­ë­·YŸ±~dó ·‘ÛtÛ´¹i ÛzÚfÙ6Û~`{ÁvÖÎÞ.ÑNg·Åî”Ý#{¾}´}…ý€ý§ö¸‘j‡‡ÏþŠ™c1X6„Æfm“Ž;'_9 œr:œ8Ýq¦:‹ËœœO:ϸ8¸¤¹´¸ìu¹éJq»–»nv=ëúÌMà–ï¶ÊmÜí¾ÀR 4 ö -n»3Ü£ÜkÜGݯz=Ä•[=¾ô„=ƒ<Ë=GTB(É/ÙSòƒ,]6*›-•–¾W:#—È7Ë*¢ŠÊe¿ò^YDYÙ}U„j£êAyTù`ù#µD=¬þ¶"©b{ųÊôÊ+¬Ê¯: !kJ4Gµm¥ötµ}uCõ%—®K7YV³©fFŸ¢ßY Õ.©=bàá?SŒîÆ•Æ©ºÈº‘ºçõyõ‡Ø Ú† žkï5%4ý¦m–7Ÿlqlio™Z³lG+ÔZÚz²Í¹­³mzyâò]íÔöÊö?uøuôw|¿"űN»ÎåwW&®ÜÛe֥ﺱ*|ÕöÕèjõê‰5k¶¬yÝ­èþ¢Ç¯g°ç‡^yïkEk‡Öþ¸®lÝD_p߶õÄõÚõ×7DmØÕÏîoê¿»1mãál {àûMśΠnßLÝlÜ<9”úO¤[þ˜¸™$™™üšhšÕ›B›¯œœ‰œ÷dÒž@ž®ŸŸ‹Ÿú i Ø¡G¡¶¢&¢–££v£æ¤V¤Ç¥8¥©¦¦‹¦ý§n§à¨R¨Ä©7©©ªª««u«é¬\¬Ð­D­¸®-®¡¯¯‹°°u°ê±`±Ö²K²Â³8³®´%´œµµŠ¶¶y¶ð·h·à¸Y¸Ñ¹J¹Âº;ºµ».»§¼!¼›½½¾ -¾„¾ÿ¿z¿õÀpÀìÁgÁãÂ_ÂÛÃXÃÔÄQÄÎÅKÅÈÆFÆÃÇAÇ¿È=ȼÉ:ɹÊ8Ê·Ë6˶Ì5̵Í5͵Î6ζÏ7ϸÐ9кÑ<ѾÒ?ÒÁÓDÓÆÔIÔËÕNÕÑÖUÖØ×\×àØdØèÙlÙñÚvÚûÛ€ÜÜŠÝÝ–ÞÞ¢ß)߯à6à½áDáÌâSâÛãcãëäsäüå„æ æ–çç©è2è¼éFéÐê[êåëpëûì†ííœî(î´ï@ïÌðXðåñrñÿòŒóó§ô4ôÂõPõÞömöû÷Šøø¨ù8ùÇúWúçûwüü˜ý)ýºþKþÜÿmÿÿ ÷„óû endstream endobj 11 0 obj <> endobj 24 0 obj <> endobj 25 0 obj <>stream -%!PS-Adobe-3.0 %%Creator: Adobe Illustrator(R) 17.0 %%AI8_CreatorVersion: 21.0.0 %%For: (Kenneth Reitz) () %%Title: (requests.ai) %%CreationDate: 11/23/16 7:55 PM %%Canvassize: 16383 %%BoundingBox: 2 -1314 1022 -6 %%HiResBoundingBox: 2.92169833255775 -1314 1021.41238881833 -6.5448391922846 %%DocumentProcessColors: Cyan Magenta Yellow Black %AI5_FileFormat 13.0 %AI12_BuildNumber: 223 %AI3_ColorUsage: Color %AI7_ImageSettings: 0 %%RGBProcessColor: 0 0 0 ([Registration]) %AI3_Cropmarks: 0 -1280 1024 0 %AI3_TemplateBox: 512.5 -640.5 512.5 -640.5 %AI3_TileBox: 224 -996 800 -262 %AI3_DocumentPreview: None %AI5_ArtSize: 14400 14400 %AI5_RulerUnits: 6 %AI9_ColorModel: 1 %AI5_ArtFlags: 0 0 0 1 0 0 1 0 0 %AI5_TargetResolution: 800 %AI5_NumLayers: 1 %AI17_Begin_Content_if_version_gt:17 1 %AI9_OpenToView: -700.448496019698 209.999491681081 0.396269427797295 1350 720 18 0 0 10 42 0 0 0 1 1 0 1 1 0 1 %AI17_Alternate_Content %AI9_OpenToView: -700.448496019698 209.999491681081 0.396269427797295 1350 720 18 0 0 10 42 0 0 0 1 1 0 1 1 0 1 %AI17_End_Versioned_Content %AI5_OpenViewLayers: 7 %%PageOrigin:112 -940 %AI7_GridSettings: 72 8 72 8 1 0 0.800000011920929 0.800000011920929 0.800000011920929 0.899999976158142 0.899999976158142 0.899999976158142 %AI9_Flatten: 1 %AI12_CMSettings: 00.MS %%EndComments endstream endobj 26 0 obj <>stream -%%BoundingBox: 2 -1314 1022 -6 %%HiResBoundingBox: 2.92169833255775 -1314 1021.41238881833 -6.5448391922846 %AI7_Thumbnail: 100 128 8 %%BeginData: 15058 Hex Bytes %0000330000660000990000CC0033000033330033660033990033CC0033FF %0066000066330066660066990066CC0066FF009900009933009966009999 %0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66 %00FF9900FFCC3300003300333300663300993300CC3300FF333300333333 %3333663333993333CC3333FF3366003366333366663366993366CC3366FF %3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99 %33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033 %6600666600996600CC6600FF6633006633336633666633996633CC6633FF %6666006666336666666666996666CC6666FF669900669933669966669999 %6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33 %66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF %9933009933339933669933999933CC9933FF996600996633996666996699 %9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33 %99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF %CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399 %CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933 %CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF %CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC %FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699 %FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33 %FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100 %000011111111220000002200000022222222440000004400000044444444 %550000005500000055555555770000007700000077777777880000008800 %000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB %DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF %00FF0000FFFFFF0000FF00FFFFFF00FFFFFF %524C45FD2EFFA8272827277DFD5DFF7D277DA8FF7DF827FD5BFFA8F8A8FD %04FF52F852FD50FF7D7D527D7DFD05FF2752FD04FFA8A827F8A8FFFFFFA8 %7D525252A8A8FD42FFA87D27525252272727FD04FF277DFD05FF7D7DF8A8 %FFFFFF52F852272727522752A8FD38FFA8CBA8FFA87D27527DA8A8A9A8FF %7D27A8FFFFFFF87DFD04FF7D7D52F87DFFFFFF2852A8FFA87D7D7D5227F8 %527DA8FFFFA8FD31FF7D21525252277DFFFFFD04A8FFA827A8FFFFFF527D %FFFFFFA87D7D27F8FD04FF7D52FFA8FFA8A87DA8A8A852522727F87DFD30 %FF7D2752527DA8FFFFA8A8FFFFFF7DFFA8277DFFFFFF7D27A8FFA87D5227 %F87DFD04FF527DA8A8FFFD04A8FFFFA87D27F827A8FD2EFFA15227A8A84C %4B52527D52522752FFFFA8FF277DFD04FF7D27527D5252F852FD05FF527D %FFA8A8272727525227F82727527D7D7DA8FD2AFFA852527DFF7D7DA87DF8 %FD0427522152FFFFFF7D52FD05FFF8277D7D7D527DFD05FF27A8A8FF28F8 %524C2727F8277DFF7D7D527D52527DA8FD25FF7D7D52A8A8A87DA8A87D27 %7DA87D7DFFA82727A8A8FFA852A8FD04FF7D277D7D5227A8FD04FF7D53A8 %FF7D27F8A8A87D537D27277DFFFFA8FD047D27527DFD20FF7D5252527DFF %7DA87DA87D2852A87E7D7DFFA8FD04277DA8FF527DFD04FFA852A8A82752 %FD05FF527DFF5227F82727A8FFA8527D4C27277DA8FFA87D7DFF7D7D2727 %277DFD1AFF7DF8777DFFFFFFA8A27D7D52527DA87DA8A8CB525252A87D52 %F84C52A852FD05FF52A8A85252FD04FF7D52FF27F827527D7DF87DFFFFA8 %7D7DA87D52527D7DA8A87D525252F852FD19FFA8277D7D524C525252274C %527DFFA87DFFA85227527DA8A8FF52275227527D52FD04FF7D7DFF527DFF %FFFF7D52FF2727F87DFF7D7DA827277DFFA8A87DFFFFA87D52F82152277D %FF7DF8F87DA8FD14FF7D5252FFFFA852A87D27F87D7DA8FFFF7D7D522752 %7D7DA8A8A82752A8A85227525252FFFFFF7DA8FF5252FFFF7D52FF52277D %52F8A8FFA87DA87D522752FD05A8FF52277DFFFF7D52FFA87D272752A8A8 %FD0EFF7D52F852A8FF7D527DFFFFFF4C214C522727F8272752A8FFA8A87D %5227527D7D7DFF52F8525227A8FF7D7DFF27527D7D527D27F8A8A87D7DF8 %7DFFFF7D7DA8FF5227277D52275252277DA8FFFFA8527DA8FF7D52F82727 %7D7DFD08FF7D7D272727A8A8A87DA8FFFFA87E5252A8A852A2524C7DA252 %7D5252275252A2527DA8FF525277522752525227A8FF52277D5252F87D7D %52A8FF7DA852527DFFA8FFFFFFA85227A87D7DA8A827527DFFFFFFA87D52 %A8FFFF7D7D272727527DA8A85227F82752A8A8FFA8A8A8FFFFA952272852 %527D7DFFFFA8272752524C7D4C52FD05FFA821277DA87D7D27522752A8FF %272752F8277D7DFF7D52A8FF7DA87D522752527D2727F827A8FFA87D52A8 %7D2727A8FFFFFFA8A8FD07FF52F8F87DFF7D7D525252FD057D5252275252 %7D52A8FFFFA87D4C527DA8525327527DA8A8A87D27277DA87D7DA8525227 %7D27A8FF52F827527DFFFF7DFFA85252A8FD04FF7D5252527DA82752A8FF %FFA87D7DA8A8272752A8A8FFA8FFFFFFA8A87D527DFD06FFA8A87D7D2127 %275252A8A87DA8FFFFFF522727FD047DFF7DF8F8522752217DA8FFFFA877 %52F87D7DA852F852FFF8277D7D7D527DA87DFFA8A852527D7D52522752A8 %A87DA8525252FFFFFFA87D7D7D5227F827F827F827275252A8FD0BFF527D %FD06FFA8CB7D77274C7DA87DA8FFFF7D5253FFA87D7D525252275227527D %A8FFA85252277DFF5227A87DA8A8A8277DA8FFFFFF2752277D7D277DFFFF %FF7DA8FF7D277DA8FFFFA87DFFFFFF7DA8A87D527DFD0BFFA85227272752 %52535252274C212727A87D7D7DFFFFA827527DFF7DA8FFA8F8277D77F852 %7D7D5252277D52277DFF2727524C277D7D52275252A827F87DFF7DA87D52 %27A8FFFF7D7DFFA85227277D7EFD04A8FFA8FFFFA8F8F852FD0EFFA8A8A8 %A9A8A8527DA8FFFFA8A8FF7D52527DA8FF7DFFFF7DF852A8FFA8A8522727 %52F852F827277DFF7D277D2727F852F852A8A8A8FF5252A8FF7DA8A87D27 %52A8FFA87DA8FFA87D5252F827525228527D7D527D7DFD13FFA8527DFFFF %A27D7D52272752A8FFA87DFF7D27F877FFFF7DFF7D2727A852A827277D52 %27FF5227A852527DA87DF87DFFA87DFF52F87DFFA87DA8A85227277D7DA8 %A8FD04FFA852527DA8A8FD17FFA82727527D527D7DA2277DFD04FFA87D27 %2752FFFFA87DFF522752A87DFF7D527DA8A852FF7D277DA852A8FF7DA8F8 %53FFA852FF7D5252FFFFA8FFFFA87DF82752282752277D7DA827F852FD1A %FFA8FFA8FFFFFF277EFFA87D7D27522777FFFFFFA87DA227527DFF7DFF27 %27A8A8FF7D27FF52277D7D52277DA87DA82727A8A852A8A87727527DFD05 %FF52A8A8A8FD057DA8A8FD1FFF7DF827527D527DA8FF7D7DFFFFA87D527D %277DFFFFFFA80027A8FFA87D4C7DFF52F87DA8FF52277DA8A8FF27527DA8 %A8FFFFFF522752527D7DA8A8527DFD27FFA8FFA8FFA8FFFFFF2727525227 %527D7D2752A8A8527D272727A852527DFF52FF5252277D7D52F8F8527DFF %FFA852275252A8FFFF7D7DA8A87D7D5252F8277DFD2DFFA8A8FFA8FFFFFF %7D525227F8A87DA8A87D282727FFA87DFF527DFFF8272752275227527DFF %A827277D525252A852277DFD05FFA8FD37FF7D27FD05FFA8FFFF7DF8A852 %FF277D7D2752FD057D27272752522727A8A87D52522752A8FD3BFF5252FF %FFFFA8FFFFA87D7D5252A87DFF00527D277D527D5252277D527D27277DA8 %A8FD40FFA8527DFFA87D27272752527DA8FFFFA852FFF87DFF7DFD05527D %5252527D277DFFFFA8FD3FFF7D52FFFF7D52FFFFFF7DFD05FFA87DFF2752 %FD05FF7DFD04FF527D7DF8A8FD41FFF8A8A8A8277DFFFFFF7DA8FD04FFA8 %27FF527DFD04FFA87DFD04FF7D527D2727FD40FFA8277DFFA852F8FFFFFF %7DA8FD04FF7D7DA85252FD05FF7DFD04FF7D277D2727FD41FF27A8A8FFA8 %5227A8FFA87DFFFFA8A8A852FF277DA8A8A8FF7D7DFD04FF27527D27F8FD %41FF7D52A8FD04FF527D7D7DA8FFFFFFA87DFF7D7DFFFFA8A87DA8FFA87D %277D7D7DF827FD41FFA82727A8FD04FFA87D5227527D527D27A828527D7D %5252FD05277D7D7D27F87DFD43FF5227A8FD06FFA8FFA27D52522727F827 %F8275252527D7DA87DA852F852FD44FFA852207DA8FD0BFFA8A87D7D4C52 %52FD057D272752FD38FFA8FFA8FD0CFFA84C527DA87DA8A8FD0BFF7D4C27 %7D52522752A8FD0DFFA8FD28FFA8A87DA87DFD04A8FD07FFA8A8A8FFA852 %F827F8272752527D7DA8FD06FFA87DF8F87DFFFFA8A8FD07FFA8A87D7D7D %A87DA8A8FD24FF7DA8A8FD05FFA8A8A8FD05FFA8A8FD04FF277D7D7D5252 %274C27272752527DA8FD04FFA8277DFD04FFA8FD05FFA87DA8FD06FFA87D %FD22FF7DA8FD09FF7DA8FFA8A87DFD05FF2052FD047D4C27275252FF2752 %52277DFFFFFFA8A8277DFD04FFA8A8A8FFA87DA8FD08FFA852FD20FF7DA8 %FD0BFFFD04A8FD05FF7727FD047D27274CFFFF7DA8277DFFA87752FD04FF %A2F87DFD06FFA87DFD0CFF7DFD1EFFA8A8FD0CFF7D7DFD07FF27277D7D7D %522127A8FFFF7DA827A8FFA8A87D27A8FFFFA827F8FD06FF7D52A8FD0BFF %A8A8FD1DFFA8A8FD0AFFA87DFFA8FD07FF27527D7D7D52F87DAEFFFF7DA8 %27A2FFA8FFA82152FFFFFF7D277DFD05FF7DFFA8A8A8FD09FFA8A8FD1DFF %7DFD09FFA8A8A8FFFFA8A8FD05FFA827527D7D7D27F852FFFFFF28A827A8 %FFFFA8A8F8A8FFFFFFA8F852FD04FFA8A8FFFFA8A8A8FD09FF7DFD1DFF7D %FD08FF7DA8A8FFA8FF7DA8FFFFFD04A827527D7D7D52F852FFFFA87DA852 %7DFFFFFF5252FD04FF7D2152FFA8FFFFA87DFFA8FFFFA87DA8FD07FFA8A8 %FD1CFF7DFD07FF52527DA8FD057DA87D7D7DA87D77F8FD047D27F87DA8A8 %52A9277DA8FF52277DFFFFFFA852F8A87D7D7DFF7D7DA87D7DA87D7D27FD %07FF7DFD1DFFA8FD07FFA87DA8A8FFFFFF7D7D7DA8FD04FFA8274BFD047D %52F87DA87D7D527DFF7D52A8A8FFFFFF7DF87DFFFFFFA87D7D52A8A8FFA8 %FF7D7DA8FD06FF7DFD1DFF7DFD07FFA8A87DA87DA8FF7D7DFFA8A87DA8A8 %FF7D2727FD047D52F852275227A87D277DFFA8FFFFFF4C27FD05A8FF5252 %FFFFFD06A8FD06FF7DFD1EFFA8FD05FFA8FFFFA8A8FFA87D7DFD07FFA8A8 %7D4C52A87D7D7DA85227F8272727A2FD05FF7727A8FD07FF52A8A8FFA8A8 %FFFFA8FD05FFA8A8FD1EFFA8A8FD05FFA8FFA8A8A8FF7D7DA8FD06FFA8A8 %FFA82752FD077D5227F82752A8FFFF2752A8A8FD06FFA8A827FFA8A8A8FF %A8A8FD04FFA8A8FD1FFFA8A8A8FD04FFA8FFFFA8FF527DFFFF7DA8FD04FF %A8FFA8FFA85227FD097D5227217D2752A8FFA8FD05FFA8FFFF7D52A8A8FF %FFFFA8FFFFFFA8A8FD21FFA87D7DFFFFFFA8FFFFA87D7D7DFFFFFFA8A8A8 %FFA8A87DA8A8A87DFD042752FD077D27F87DFFFF7DA87DFFA8A8A8FFFFFF %FD047DFFFFA8FFFFFF7D7DA8FD23FFA87DA87DA8A8A87DA8A8A8FD05FFA8 %A8FD047DA87D27A8FFA852F82752FD067D5227A87DA87D527DFFA8FD04FF %7DA8FF7DFFA8A87DA87DA8FD27FFA8A8A8FFA8FFFFA87DA8A8FD04FFA852 %FD04A85252FFFFFF7E52F8522752FD057D2727FFA8A85252FD06FF7D7DA8 %FFFFFFA8A8A8FD30FF7DA8FFA8FD04FFA87DA8FFFFA827FD04FFA87D527D %2727527DA87D7D5227A2A8A87DA8FD06FFA87DFD05FFA8FD2CFFA8FD05FF %7DFFA8A8FFFF7DFF7DA8FFFF5252FFFFFFA87D7D27A220FF7D27527D7D7D %F852FFA87DFFA8FFFFFFA8FF52A8FD04FFA8FD33FFA8A8FFA8A8A8FFFFFF %A8FFA85276FFFFFFA852F84C7D52FFFF52527D7D7D5227FFA8FFFFFFA8FF %A8FFA8A8FD06FFA8FD2CFFA8FD06FF7DA8FFA8A8FFFFA8A87DA82752FFFF %FFA80021277D27FFFF7DF87D7D7DF84C7DA8A8FFFFA87DFFFF7DA8FD05FF %A8FD34FFA87DA8FFA8FFFFFF7DA87D7D27A8FFFFFF52F84C7D27FFFF2752 %7DA852F827A87DFFA8FFA8FFA87D7DFD07FFA8FD2CFFA8FD07FF7D7D7DFF %A8FFA8FD047D52F8A8FFFFA8A827274B52F8527D7D7D27F8527DFD04A8FF %7D527DFD07FFA8FD36FF7DA87D7D527D527DA87D7D2727FD05FF5227F827 %527D7D7D2152A87D7D7D527D52A8A8FD36FFA8FD07FFA87D7DA87D7D7DA8 %7D7D527DA87DF87DFD04FFA87D27274B7D277D7D7D52A87D7D7DA8A87DA8 %FD07FFA8FD35FF7DA8A8FFFFA8FF7DA87D7D27A8FF27274C52A8FD04FFA8 %52277D52A87DA87DFFA8A8FFFF7D7DA8FD07FFA8FD2CFFA8FD06FF7D7DFF %A87DFFFFA8A852A8FF7D7D7D277D522727767DFFFFFF7D52277DFFA87D7D %A8FFFFA87DFFA87DFD06FFA8FD33FFA87DFFFFA87DFFFFFFA8A8A8FFFF7D %277D7DA852527D2752FFFFFF2752FFFF7DA8A8FFA8FFA8A8FFFFA8A8FD06 %FFA8FD2CFFA8FD05FF7DFFFFA8A8FF7DFFA87DA8A87DFF7D27527D5227F8 %FD0452FFFF5227A852A8FFA8A8FFA8A8FFA8FFFF7DFD05FFA8FD32FFA87D %FFA8A8FFFFFD04A8FFA87DA8FF27527D52F8277D522776FFFF524BA87DA8 %FFA87DA8A8FFFFFFA8FFA87DFD32FFA8FD04FF7D7DFF7DFFFFFFA87D7DFF %A8FFA8A8FF7D27A75252F8762752A8FFA82727A8A8FFA8FF7D7DA8FFFFFF %A8FFA87DA8FFFFFFA8FD2AFFA8A87DA8A8A8FFFF7D7DA8FD05FF7D527DA8 %A8A87DA87D527DA8525227277DFFFF7DF87D7DFD04A85252FD06FFA87DFF %FFA8A8A87DA8A8FD25FFA87DA8A8FFA8FFA87D7DA87DFD05FFA8A852FD04 %A8FD047D52FD047D522752FF4C52A87D7DA87DA87DA8A8A8FD04FF7DA87D %7DA8FFA8FFA8A87DA8FD22FFA87DFFFFFFA8A8FFFFA8A8527DFD04FFA8FF %FFFFA8FFA8FFA8FF527D5227527D7DA85227275252A8A8FFA8A8A8FFFFFF %A8FFA8FFFFA852A8A8FFFFA8A8FFFFFF7DA8FD20FFA87DFD05FF7DFFA8A8 %7DA852FFA8A8A8FD04FFA8A8FFA8FF7D7DA827212727527D7D5227527D7D %FFA8A8FFA8A8FD05FFA8A827FD04A8FFA8A8FD04FF7DA8FD1FFF7DFD05FF %A8A8FFA8A8FFFF7D52FD07FFA8A8FFFFA8527D7DF8FFA852F8527D7D2727 %5252A8FFFFFFA8FD07FF7D7DFFFFA8FFFFFFA8FD05FF7DFD1EFF7DA8FD06 %FFA8FF7DFFA8A8A8527DFFFFFFFD04A87DA8A87D52A82727FFFF277D2727 %7D52F852527DA8A852FD04A8FFFFFFA87DA8FFA8FF7DFFA8A8FD06FFA8FD %1DFFA8FD07FFA87DFD04A8FF7D527DA8A8FFA8FFFFFF7DA852A8A85252FF %7D277D27F87D7DF852A852A8A8FFFFFFA8FFA8A87D7D7DFD04FFA87DA8A8 %FD06FFA8FD1DFF7DFD07FF527D7DFD04A87D7DA87DA8A8FFA8FFA8527DA8 %A8FF274BFF7DF87D27275252F87DA8A8A8527DFFA8FFA8A87DA87D7DFD05 %A87D52FD07FF7DFD1DFF7DFD07FF7D52FD05A87DA8FFFF7DA8A8A87DA8A8 %A852A8A87D27A8FF7D274B7DA827277DFF7DA8A8A87DA8A8A87DFFFFA87D %FFA8FFA8A87D52FD07FFA8A8FD1CFF7DFD08FFA87DFD04FFA87DFD09FFA8 %7DA8FFA87D27FFFFA82127272752FFA87DA8FD09FFA8A8FD04FFA8A8A8FD %07FF7DFD1DFFA8FD0AFF7DA8FFFF7DFD0BFF7DA8FFFFA82127FFFFFF4CF8 %7DFFFFFF52FD0BFF7DFFFFFFA8A8FD09FF7DFD1DFFA8A8FD0AFFFD04A8FD %0BFFA87DFFFF52274C217DFFFF2152A8FF7DA8FD0BFFFD04A8FD0AFFA8A8 %FD1EFF7DFD0CFF7D7DFD0CFFA8A8FF77527D27F852FFA8F8FFFF7DFD0CFF %A87DFD0CFF7DFD20FF7DFD0AFFA87DFFA8A8FD0AFFA87DFFA82152F85227 %277D27527DA8FD0BFFA8FF7DA8FD0AFF7DA8FD20FFA87DFD08FFA87DFFFF %FFA8FFA8FD09FF7D7DA852527D522727A827527DFD0AFFA8A8FFFFFF7DA8 %FD08FF7DA8FD23FF7D7D7DA8A8FF7D7D7DFD06FFA87DA8FD08FF7DA87D4C %527DF827277DA8FD09FFA8A8A8FD05FF7DA87DA8A8FFA8A87DFD26FFA8A8 %7DA8A8FD0BFFA8A8FD08FFA87D7D52277D2727A8FD09FFA8FD0BFF7DA87D %A8A8FD38FFA8A8A8FD05FF7D527DA82752275227527DFD05FFA8A8A8FD4C %FF7DA8FFFF7DA8FFFF527D52522727A8A87DFFFFFFA8A8FD4FFFA8A87DA8 %FFFFA8525252212727FFFF7D7DFFA8FD53FF7DA8FFFFFF7D524C2727A8FF %FFA87DA8FD53FFA87DFFA8A8FFFF527D277DFFA87DFFA8A8FD53FF7DA8FF %FFA8A87D2727277DA8A8FFFFFF7DFD53FFA8A8FD04FFA8525227A8A8FFFF %FFA87DFD53FF7DFD07FF27A8FD06FF7DFD53FFA8A8FD06FF527EFD05FFA8 %7DFD53FF7DA8FD06FFA8FD06FFA87DFD54FF7DFD0DFF7DFD55FFA87DFD0B %FFA8A8FD34FFA827272752FD0427F85227A8FD15FF7D7DFD09FFA87DFD36 %FFA852F8F8F8277D5227F8F8F87DFD15FFA87DFD07FF7D7DFD1BFFA852FD %1BFFA8F8F8F8A8FFFFFF27F8F8F8FD16FFA87D7D7DA87D7D7DA8FD1AFFA8 %27F8F8A8FD1AFFA827F8F8A8FFFFFF7DF8F8F8FD18FFA8FFA8FFA8FD1DFF %F8F827FD1BFFA8F8F8F8A8FFFFFFA8F8F827FD35FFA8FFFFFFA827F827FD %06FFA8FD15FF27F8F8A8FFFFFF7DF8F87DFD05FFA82727F852A8FD05FF7D %27F82752A87D7DFFFFA8A8527DFFFFFFA87D5252FD04FF7DF827277DFFFF %FFA82727F8F827A8FFFFF8F827A8A8FFFF7D2727F82752FD12FFF8F8F8A8 %FFFFA8F8F852FD04FFA852F8277D27F8F87DFFFFFF27F82752FD04F827FF %27F8F8F827FFA827F8F8F852FFFF7D27F87D52F8F827FFA8F8F87DA87DF8 %27FF27FD05F87D52F852A8A827F8A8FD11FF27F827A87D5227F8A8FD05FF %52F852FFFFFF27F827FFFFF8F87DFFFFFF27F8F87DFFFF7DF8F852FFFFFF %7DF8F852FFA8F8F8A8FFFF52F8F8A852F87DFFFFFF5252A87DF8F8277D7D %7DF827FD04FFF8FD12FFF8F8F8FF52F8F827FD05FFA8F8F87DFFFFFF27F8 %F8A827F827FD04FF7DF8F87DFFFFA8F8F852FFFFFF7DF8F87DFF52F827FF %FFFFA8F8F82727F852FFFFFFA87DFFFFF8F852FFFF52F8F8A8FFFFFF7DA8 %FD10FFA827F8F8A8FF27F8F852FD04FF7DF8F8527D7D7D27F8F87D27F852 %FD04FF52F8F87DFFFFA8F8F852FFFFFF7DF8F87DFF27F8F87D7D7D52F8F8 %2727F8F8272752A8FFFFFFF8F827FFFF7DF8F8F827277DFD12FFA8F8F8F8 %A8FFA8F8F8F87DFFFFFF7DF8F8F8FD042752527DF8F852FD04FF7DF8F87D %FFFFA8F8F827FFFFFF7DF8F87DFF27F8F8F8FD0427527D7DFD06F827FFA8 %F8F827FFFFFF27FD06F87DFD10FFA827F8F8A8FFFF7DF8F8F8FFFFFF7DF8 %F87DFD06FFA8F8F852FD04FF52F8F87DFFFFA8F8F852FFFFFFA8F8F87DFF %F8F8F8FD09FF5252FD04F87DFFF8F827FD04FFA85227F8F8F827FD10FFA8 %F8F8F8A8FFFFFF27F8F827FFFFA8F8F827FD06FFA8F8F8F8FD04FF7DF8F8 %7DFFFFA8F8F852FFFFFF52F8F87DFF27F8F87DFD06FF7DA8FFFFFF52F8F8 %7DFFF8F852FFFFA8A8FFFFFFA8F8F8F8FD10FFA8F8F8F8A8FFFFFF7DF8F8 %F87DFFA827F8F852FD06FF27F8F852FFFFFFF8F8F87DFFFF7DF8F827A8FF %A827F8F87DFF52F8F827A8FD05FFF8A8FD04FFF8F87DFFF8F827FFFF7D27 %FD04FF52F827FD0FFFA87DF8F8F87DFD04FF27F8F8F87DFF52F8F8F8277D %7D5227FF52F8F8F82727F827F8F87DFFFFA8F8F8F8FD0427F8F852FFFFFD %04F8527D52277D2727FFFFFF52F827FFFFFD04F85227F87DFFFFFFF8F87D %FD0EFFA827F8272727F827A8FFFFFFF827F8F87DFF7D27FD05F8A8FFFF52 %FD04F8FF7DF8F8A8FFFFFFA8F8F8F827A8A8F827F827FFFF27FD05F852FF %27F8F85227F827FFFFFF7D27F8F852A8F8F82752F8277DFD10FFA8FFFFFF %A8FFA8FD05FFA8FFA8FFFFFFA8A87D7D7DFD05FFA87D7DFFFF7DF8F87DFD %05FFA8A8A8FFFFFFA8FFA8FFFFFFA87D7DA8A8FFFFFFA87D527DA8FD05FF %A8A8A8FFFFFF7D7D52A8A8FD32FF52F8F87DFD13FFA8FD08FFA8FD43FF7D %F8F87DFD04FFA852FFFF7D7DA87DFFA8FFFFFFA87D52FFA8FFA8FFA8FFA8 %7DFD04FFA8FFA8FFFFFFA8FFA8FFFFFFA8FD32FF52F8F87DFD04FFA87D7D %FF52A8FD057DFFFFA8A8FFFD047D52A8FFA8527D7DA8A8A87D7D7DFF7D7D %A87D7DFF7DA8FD32FF52F8F852FD04FF7D7D7DA87DFFFD04A87DA8FF527D %7D7DA852A87DA8FF7DA87D7DA8A8527D7DA87DFF7D7D7DA8A8A87DFD30FF %7DFD0627FFFFFF7DFF52FF7DA87D7D527D7DFFA87D7DFF7D7DA8FF7DA8FF %A8A8FD067DA8FD057DFF7DFF7D7DFD31FFA8FFFFFFA8FD09FFA8FFA87DA8 %FFFFFF7D7DA8FFA8FFFFFFA8FFA8FFA8FFA8FFA8FFA8FFA8FFA8FFFFFFA8 %FDFCFFFD9CFFFF %%EndData endstream endobj 27 0 obj <>stream -: ¼šZ\€öÁü£¾ödDzŸ£'¬¸¢­.cg$Rmç<6uEBÀ«Ö²•¾:–ÿÆÓß±SÖï÷ö…É(•îÓ’Ïô -@Úñþ ÑHº¸ý(ýáéÈßfKTs&女öÎM~9#ÎMnt?òx,»ß{Wà¼3¯K0Á«ãYKf«4r 2ÆKÜ»å?‚Vo*‚¦LDÃÞ¢ -¾#iOhµè1GÎ&…¾éN&dË¡\ê°¢xÞâ2ù/ŸXV§h÷”\ÒšýA¬²7…-ÛÌ*“Ùª°¦¶L’eßO•PFçVÇ•sX<ññ¾vTcÁ„=yß ’9bÑè¶÷d…•þšþñ/ÿä³×w}îkîíÙ„ž‚Tó;4Ü•4ÚÂP”Wg/}m1šËæÍÖl&Ôá†F9ÈSgZÛSôó)»9ry -ŸWfk,Ux¹Ñ >çöšs ½1 Ž7æ8ﲇ`|ª¹x§Ê󊾶Q (ÎOyÖÑ„=#¸ã¿n¼†Ò3ºø%V§Ã‹k†è G©GyP bl »Üí«œ¡S'€ÈˆâXYt‡á}ÌXßÞi ’GÅ3Y–ÐQó³Ä›)§ŽöØ…ŸÑ Fy)w¼©ÙÝ‚ˆ~JŸtt MÒTMýN¼‰•&ÝW ¡v‡òDD±6©¢dù‘©Ïô ·U—A·Ò¥·õj¼çìAÐ6Y[ܬ4Ô¼*y׸65òp„{jƒøï³NÅn ì±9Ûa •ÞQéì(EÖÅZÀúBÂtYŸçÄ´üŸr;ºH xƒ·ZtÒ‰#ÃÚ4F9þ]R\~ ß(¤[)EÙe{ÔÝ0¯QVŸÕÓ&Ž5I“lMÍ&ìÈ™ööŠ‚¡C8ËW;’ës4JšÖáÀ`3䬑5\Ÿ8cZ8+u÷Ö~Ld`x'÷Œ›ød­gV©x  üßåê®j{ŸÕÉÁI´PÒëëqÅ eµ´hÖD‚êV†¯ç¹““»ˆ"³ÅÛ_j½Þ¨h÷òƒyÚo(né¾`׫PJ1®RQ‹kRŽÀž4—c(V&ÑÕé\!Œ¸X×rÏs6ýÜ'`:ƒ€ñÓZ/¤¾øÞfñ†)X‚μìFn¡Ù‡Iö3"üÍ« ¬§÷¼@k·T°¤&E‹]©õ s‘=(EÌFÔ/(Ü{±áN·ÚšhSæñºìöóÚ’4Lw"ÔÜbÙ¾R8?‘s]nü9Ø–hù“q„æŠ(ÆqÅТ‘qÓT }Ú¢ -] fgãwÂhV·Ø®íÎë7ŠQwùCœšŒÐWOĤi+Á*6Ë\&z5×n>ʵ­ˆe’˜/¯ì“6~nPíl@»'L s‹sÞ9BT]aXT<®#4Ÿõ:zh Àò¢“±m?›ôL¾¢×Qþn´Ë߀J· âØyÝ1µ¦ˆÊ²˜’áñÞÉpÒ†ÛuRÇ«=’NÐ{DÙ@¯ì éݯèÒç;\ÀTs-”—åMUûVžUÿì!]8Re9¦¨rª0Ö+¾hù8؇Ã/&§žÊŠ6îèk½õrh^à¯Ç–ižGtÞ2’ä ?rØŸ±‰Bæä·>Ý‚²Or•r'Ô± LšT1Ñõ\A*…³¦"õÊwx!—7˜‚~X9w¤ÜƧö‹]+6 -¡-<ª¥OMã‹À‡¢ÁM†˜&øàe`Ú¿öâ+šR/`PUÏuíMÔ§¯ÍÁ±ÂqòàRP»kñr ÕÑ…ŽÙ'ˆâz#*zÚˆñ,#Ê(ñ͈÷ÔMB¾F¼¶ï¼ïw_Iv.‚ñ»­©™½ùÕG²+ÌýL=jWôÖšü—îø`/÷II•¿áäZ±ËtR¬y®y±²ï§†˜i8}æ…‡âö¡1i›¼;!ÔŽG þ5µ›XsòüqË"4yvêÂõ‚&¬`â’ë*jÛÐN¢zj!j½ÛÚ*"WÊkŠN*©(òÁ:—ÁgÔBTˆœÍdüÚðVü²5<•\Õ¢îPñ\ ’ ä÷ŒqëÖIéÑ:œ¤Ð y±`É|®=I•‹~msf;½q2½ìŽ!"ìM 浧»Br8Ç]ªjí_ÇUµéÇU€*A©å|¤K“–ÐÚz'w%Q<3‘~ì!kϤa¤eŸ§Z'ýTP¨ž*|iÊÃ$­kˆî¸ÚEœöìuØör–j–`Ú‚-ê2gB$ð«=Ó(Ë'H繉¥gÝQ bú`“9´éËhIž,h®  -¦Þ<û±=ö®êé°Õ¢ŠÖ DÀñ¬A=µ–—€êúä£Ç[“òÔ3?^}4Ôx@('fÈÍ-Nb–ݨí†ò]*_#«ª‡¦N¥U¹¹ z_J+¾ËïðbÞ3êlìì?‡yí°ÊWà=ãûyˆíïfnäÅÓÙö±Êù—nSÏ6Jk”6OuÇvã‰r Šˆö |ŒÐ~ùm¯ûS\N «B2šskF®ð,GNà3­`¾‹B¿5­Ï}—¦Oü¿øÔÄ›¦@^!`U?¦LçržøN´ê ê0ƒcËU®/w1¬1x¬´»ª¨bÕ6öNjDGÌÕ’3ˆpEVÄ0nm½ÒxCùR‡–Òôâ"}̽¢¼ÂÀp”?¸¶6?»ãÜæš7wpKýÉExf’Îä¢ÕØÛ\Aº1u""_õrø,aAò‹;ëyM×1ç¶|›”%ŸÈ–=ÿÿÛ]¤½¸'1›?~lKCµ^²ÐZi¼!ö°»í;^çÜÿÃ2} îlšð…ü fREùÿí¡ÑÒ»Ò~K4æËiÖ")WÕ˜ÿÃà·ùÕ¢ä«ä¯„“¨ 9y^0’=¿Ym=ÀW–¥×BP0iK†–9'–e0@µý‰™_Ö>W©bd•jL›9ª)’ÕºövÖIļù®!¢ -ŸÚÙûÝ zíF4¦&¼çdS‰4£owhš·¯µTV×hKÕÇu~®gÁZr –ÝϽ -µ¥·J]üÌS—óú¼²©dK™-ån ’ŸÈ·ýHÈ+§Ø1¾µ¡Ä£!ºpo~Êpþ*òÒÄ8ï81#wë3ô¨ì™ÜµÃÆmë -~£ÿÝ 5ŽÎ?YÞQû,ÍŒúóùWNã#L=Z qdÑ9{‚u‰·Nc¸,Z-QÎhA]¿‚þûû1FÔæb;unís¾ÁâR1aÅ5 X|Öd†£Ð"®ôp;zkÅì™Ò²“}Fm¼¯(xFj"nι=Þ%Iø Áó}Rº[^v=¶BoϽXŸõýw ‚PÀŒ #—â]¹B/½Œ(ZSHçÃ)ç‘R{( vZQc–LýÚ1 ‘¥ ò|/šù_Ð¥¹*×jdÍ·Ž1Þ“!i6‹#FÓ¤w>vë$Ìá"h½®;ùW[Bƒ¨·U dMéù+)ÚŒ—ÇíeLôjÕà­vÉ^ã píÀ‰÷wàM™'CŸõ -ä>kÄþÛJ"±åøVwóu¨ŸÑ‚!³<Õ”1ÁÕTdë–±4©o7©G¼ òÐÞýÜ:ÃlÂÈ¥Tgãñðl#q}™«‰²ÿûÿPQ9Ë;oÒìÒàT¶æ½*ª {…ÄTáó;—âðy¸ò­¤ÜÔ%d•¥ùG~HhÔÌ \ÿêŽô7Èñ¨Óa®âc{óŠã ðäµ­BùȬp®lmßOš®I«œ,‚ÅÀ‡åð½®Ô%rèK„ªÌ¦6<ñ.•ò}ò+‰JcyÕ])xW©xŒš7+czÃæf§R -þÀ¹wICIË)ƒÞ±—øäm)ð¬‡g,õ•Œûç" ùUY/çÇ2é—dºÇ\þHùó+µºˆF[ ûFfúA’ýé;ÅšvL^+`3„ß¹»Nwä6  -H~%)'ц~¦eÇï °¶CÞØ6Zcɹzßc¤ü8оZ8ðzïâW¥½uÄ`Î+{¾„x*–¯BjzJ‘¦t{^¡Ç%—•,ôi-©À}Fë_1Cö4 q=–e-Ê¥‰µ‡õöˆFìÀÁšU,¡Š¤ïŸØv]I7Vq@$vĹk}_u¡Ë•7~ÙcªËž1OÇ(Þië˜@ÙÿâΓFdÔ×é÷¯&‡}¸æ‚Ì:8Uβ×ÃÏ·¶×é -yñE{}¥×¯ ì(%½Å>;àï×ýÅÅÐ^Õ¨%Æisû~#çdJ ˆ!Ê1õ?jV3jÇàk¿Y›79tžÙ£²LõPò”EÐ4$Ý–äIcV¿C‘WKF¤Öc8uPa·Ä¸EðŽ¡jœ;“^¯YýdÁ¨«Rˆ$( Å›¸'}ŠNìü«º8”…ΪMÛcoAÞÿ„.IETñ]“P¹©»ÔʶÔø1•SÉ©À£B­ñE˜~]A?^'g—LF¢]Ÿ=I!ZB_iî­Ø?bQÎôµgòOµÃ^ º©ùq,ã’OÁÅË•[6hB”MüX·”º µÈ[Í8>…&ØŽäéÔÓHß½_ÝÎFjtmZ3¨ ±Þ¹0h¿ÐÒ^é‘·ý¢‘!iÇ;ÞlßëRT (m©ø·ë ô ™QP?c˸‡("RÆv/ÜÄb÷pGô¬­îO’Æ…Toy“AY/$S4:†þEœ<ÀÙ!-Û®ðbÞ¼pƒª -®„a×dœWÕ›Ïí¥-„y" o`ã÷.cÇuø=±—!€µ&uwp¨Ç`¦êõ ¥yžr\Zùø -ÉþE­6V»¹*ŠyÃÄ‹÷„Ï´ÝÍØçÓ,gä‹+Öºaîgà J7‘>°æzÌÎÖë¤æÏÝ'ðÁ“}¥QáX7n¿þL=t´Ð§ü—ì°™“"Ίw2Ë© ΄…ÏÞ †Øwi–SÔ³>wÌ®z„ "9Z¢koí©ú¨½¯¥ÑóÐå^OK]ö¸G‹Ûpí¡€n-fp_ì'Úi”oÏÚ‡Â鋽ö0hJ4|æ°@äž8Á×ÈãCR?¨ÐQ»˜dCüú™»“K’íLÐ+ˆ=Ü „C;Q¿)ÇDM ÈÑóp÷)ßä:ÃÝõYU9s`Þq¬=è.öÔ‚mê¿T禖ÎÙµU“–œÄöR+V+۲מõ0†c‰öRÜøRhaÞñhŠP÷üP[¥j™óºÙ•ÏÝxËf­ðN/Qµ““iãX•É†K˜i#5•èê,‹oNg;ÀPÁzî3âG  0ªtZ™×’¤ -NÍéo».§žn=u€1!ùñ±NK’†”uÎÚöÀ -ÔšÙîmuŽP ii¿¬nn\›S èÎBeø[Ë[Ä.÷êè å3ªbsÐà)²ÏLzÆþŸ¶:ÐÒ@¥9 ôiÌìJeiÿï-1 ö8Mž9$Sä -¯¾í«ñÇóÎ4®[Åá'öç×(7ß8ü(Œªf“Õñ›ÂÊuè—#“‘˜3ßN­HàÐöwÇŽ’‹*T -n¶srõô'MvƒlÍÙðqNíáyNYq#aõö;ì¯ÀÜGHý¤¤è|ºÞ=Q–Ÿìz0K§~¦ #½­ wG¬@œÃ3¾€Å‘Ú ö½C,Õy°ÎÈ6]†Ð÷pbýóyPï;|uÏA·`£ßƒly$³ç¬è16Mâ/j ›#Oîô8ÏéëC~jR÷OöW³‘äKn]mƒ]Y@è óåÀ÷FÆLºÞÀ’¨‰îD÷g<9ç¯p(»Šv#9J`貓e;ohÈ"Ô7®™NÕ\´˜Õ™^#óø^×éh1Gm%óK è4*um-÷Jã ît æ±à|w<£Ò1§¨ÕËða@~kcE²ñ®Ù¡¦7w‚;°¢ÕÌ:žqÛÑK5jöE«ì¡ÆsFDR pêß8&(Πý2ŽuuF4‡¨óŠ²#?©§¤áƒ–Ùt¸#°ÿg8»¶/6r)lŸ",ÖÎl>ö@Y^…–Ñxl‘炶)aÝ"ˆ ¨M…ì0'ûztv.˜ƒdee~²ä¦ÔÂÿÊQrÚ(ªæ§% -ôrX†f #3\áãbê-5ô¼>K•pÊkû¬¦^D©®ß% ÖCçÕñÔÜm‹5ÑYn ¾çg„¸Þ}Ó=e Tqíè\2S—|R«—ŸÜW Š[Òµ7kÂÆ~-èŽå¦w1G u¬îÑZ©¤ÆåßÚv+lAAäD¶õ„«%Ÿ]¥'(¹Xî©ÊĈ:‰©í MœùS»‚ò¨9Lëòˆ}ÝÎSl¼NjÔÅ^…AuÀÛú¦â ÔÜîgõTðöE˜‘Òߢì=÷J¬WõB¢Èvoo‰&ò¡€]ÏÂÑîaptĬ¿þ•\Õw„‡”>W5Îêb>¡h52dÈ#G¢.†ØWkiƯ#Ö7ÕŒíUŒB¦)æ[øÇIÀƒ×=ãÃ\µ«ÒAA¡c¼Qê§y!B—3{Λ/TêÞBªÐ Ð^U”@ínfÓ»bøA„ï7ùMÅð¾i¨ ðjO‹÷üVB¨Tº1qíZä «pÆ=౸A6®„5¼Ë¤B[Ùl-T¼lÓ€*3qœ¯š~X:cÎJRغæƒÇ,'·p™;ömuÑ5Ø°E†ö‡å—ªßÆ´®Ãßý -ùÞ“L ½³*êÎ+îÑJ”à+ø¡CÒ"›0¡‰»*ŽóW‚ÓFŸÚN ‘¦ÀIÍhÞ*XH…©æʽ†hÙ±Sê%Ã/“%¾ïÔZ©{9Ìž‡"qù£_ï’5t9§´×•L®Q¨`è":¶0—–{)¡ŒwD™J·òrš~-î’C†šn¦U_“+‚éOÎ7"ÅzùW¬¨xJ¶¯Qê4¥ñ̼' ¶6ä*Äw%©/d»½ìX8EqÛh9#(€ó…9K úkßë¹{ê¿ŠøV¡Î6ôg²T½œŽ‚]I¼cüEX±0‹ÿBíüáï -ÎRLFÏ©V»ébG,ý",^z¶3lv…Ît«“Å&µç'`k²Ôrmýõ¹ƒ­¬ãd79_3‡¼¿~&Æ-^¿F'ÌÍv=·/^ΨˆM­Á˜˜üì™°ÿx‹÷Èh(sk¨¿wmÞBk‚Èž0•¨û5S£—pGó¡*h‰ v@òw_Q,7·Z@Îw#h^>Ñ$«§žÛêˆ&ôÌÛXÈN+þ-ÜþÂħceÊncmúUT={Îai£}ßIâ1 -2áJ;®‰+ú%¾¹S—qK»jŸ¼lô#qjçuO¬ÛGiJ'áX ÇŠØm8„~ÜüéÀ\Eq2̱y&öOÔ)ÆJ)ž”`N8tL½°Bcw–_ÑD߳ݜ"í#ÝCÚê•4-!´Ô3Óüìij̯S¯T€ ÄS0Ïy•#®ÃL»ïÊf™DŽð°"½¦xÑðà ¿ÒdŠ©›#={„êOø7äé÷BÿUV|ÚHW1L›-*02ÿÛjÒ½•†’­q“GgÉÝ){ê -hÖI,wàIz[¹L€;¢CÐö=µT˜™hrl©Àâ%á×Wý|†wLŽl£õk°óùËbTÎdVI“€Ù•2V9» ÐÎ4ŽäJµ¦]¹Jì ü2B¿RF¶Ô aЫ²›"7<¼éÌK¥ROjiÚÖù½¿`~FÎZsí|žl½%94w¸½Þ…2’‘šÌÌ”¢‡¢ 'ÌQ0×M=xnÐ7Fc+/Ž}ÿ‰$î/?«¢F«÷ƒ®vÞÆ)|S¡[H–|–cL %„SVÎ"öœ®EƒBè jÐ÷C0J‡¹¢™&á«8رðÅ’FMÊeóÛWeÄ8´2fw<)iØ3Ú^q ’Ýt$,B·¦[ª -èdÕnPÒ@¤ŠÑu:$¦mËöøóEAßþòä'kËE¦·}“º†‹ôl=•¬3ÂþÅP‹Öpls³ì7úO[b}e±êC‘uîùÉò+P\‡zmS^ÎЯ…á«WLÕ›^iÁLâ6žDûºâcæíœeßá.À¤Æœ%oS ¾r?™E€ªúÉB=ÍËôG;¢·rX·¢av*é?µR7Ó_5˜Ñ;Àñô²ðSuÌÚYÚë[D’à×U(04øÝuUCx*íDk1Å“'ò¶÷zlr‰ru[“ŠU^¨ ù{}ÌGêÍ;âE”“’ñšƒQ–d€ÒýÃZ²ò\Q2 [IkT …/$…,ZõÓ][ÁÉ…-=€òÝÓØù(ß‘ºÅ/Ž#ûÏ:í¶åï©x6/LŒpÕn9·åŸmO ¶ú=›®Âlæ¥!vi”¶eY ë9i‡ _ÈÀµ zª£E“+B:Ec ŠªX7¢”3„¡ä3öÊ -FWˆ†!V1jiݳ:‰óe1Á0Õ®8’oa*º‘€ÅlŠ«kv[èÓvyh:Ý{}‘yó§®3Ó×Sˆ€£÷R¡éúåñ<ÈY“äI]5·B¯ÚÆR¾ÎWÂ6¿âÜÕ牔³²\vR Qwàü¡kþR £<ÕëÕ̃D‘ÒN³6=TT -p¤u–€jÞì@‡¾¸^)†ÓûºÎœ#üõÁ æKguMÞs߉rj±eµ9pH¢OCÊC¹ìI—9òfl‚ÔËžÒWüY¨ì×™úQÂÆÜ3 «Ñ#2²×2Iæòs”p‰‰ùdŽ|ÖrñÆ"ŽÊ˜ÎDÈçXP8 0»ÔQˆ/UåòS{íñq”òÛ ™‡Ÿ¶7"õª“K'ûQ­ªÀÁðHôVíp»â–?“‘½$q/J¯<ýî±´%þ!q0°»\눼ʺs{®zWœ„O…’`RëV8ŠÞ×Ìŧº£Ô¾ú\nGÌ4V옣n…u"&ìYúÊõÎÁuþ ë±ÒUU¯[LG{’¤ -ƒ´ ¶äþø¢gŠ¦íŒÞVt«ç>".\ì#Ò3ð9×û½¾¥¶(qUµnÎï{挜*2@ڧ͵‰_gxÇÁõ‰˜t{V‘Wï5%ÿû©¾Õ'³o K^É©ò™Êè’§’ºšíŒsSXo”CšÔñØz÷Fýxø—ËôGâ€uc_›ÆHeúå> sGŠËCê­A@?²¢<1ê®~VôÍzRö¯hjŠˆ}¯sÓvèŽ÷lÞÑ°ÛìÉ4Û -ßqh!ʧªuv†¡¶JH0Ñ‹H\m8y‰u¤¬§Ù‹!qŠªuz»¨µQ×™qßͧA!/ DµÅùîæ¡VèpSE­_[åI›Iªc1h¤”uG;DƒßN»WCkn>¨‚[­¢;ÊtZèsÅíË!~Œ…€ñ”ƒJ†zne=Ûw<Ì£‘ÂxrQÎÚ15™ç 'žŠ&ðs ’¿ˆìóÁXã á\õ}ÌxšÿëÅÚso -D˜ØÁJÞiüëc]ñ°¨²ˆlUA*}o ™¶b¾À“¤ï}akps`aŸ(åJ· …F#ÚuÀ)5«k¬æ\Êzñ…_½MǺÒÞ´æÈT·Ð*[€nˆØ‘Ì%ÕŠúL±Î÷¶¯;ª®´E9k~).ÒñI²;¢Ð2¸€7‡—=â~Ø|qØÈùvŽUî` -´E£dþ«ÁÅñ«lñÑÿú™{NV²'KS•ßÕ›ÖÝ «P…óXr8úKíé7¹¥Duå´S ¡xü(ðì¨jQ¤ìý}ܹæ*w§“VšZx#nK’¥'çô¤â µ]@5Ú‹[“Ö!é i Ø :ÁÆ sTHþ¤§•8Újîß©ú~åcñCµt6í)]Ò,ûŠì…Šíª…/ÝÈ”“ýzg°ÐŽs ¤7V-˘\zèU·:’V(&9óf2bf2Ð h0>;@¦zrI™þ’±zw”¼%eÚ/øGÖ/óô—F5DRKSõ|b²9]1Þ×9"?ÕŒ|,»3æ.¤•6!à´)t -üÙÌyGŽG+³èJ@9âk—]KM+ÚÈLý-p°j7?ïÔ™·¨‹ÿ%óù½ÖáÏRÝ5ÁíÐ2dÑCE…8Ì;õ‰t&x$’Áûȹ®1Ì%è©æ,÷ Oí‡ÆˆÌ_%€ÅÄúÌŽÀ±½‡øÌ€£TùáE "^_ƒpHaùøà˜uh‰4Ñ–$hZG´/#J£‹;KQ _™Ñ|Ø#~q§Ë¥¢¿½¥hÍŸÉîm¼'ÔE&c+XžE©›2òkBÐS‡U+Y×áóF…Zõ)QªÆko‘ª!Z¢R“³"Gr¼Û,¯u[Á6«°52-gœÙΗ/…Ôså2‚@òú–V€\Ob¶"æÿzJ¼½ˆL½òÏÝV..¬ÕÞ’Ï ãZŒ´ˆ¨ŠËAÙ³÷ßДÝ~Uÿ5ƒO¾þi‘è{’zHõîÿ?÷UíÖn°[oU$Eþ ŸMo9F,D?SNå Dy‹Tåª|µD0U«òŸH+ëîoó. Ám®hCÙMUáŽ7"bH„¯¯ _{ªüMO¶ d*àZi™G*Ê ÊÐ¥žÊøœ ²¢îéo‘B! P¸ž»˾Ÿñµ"Ûÿ€Ôöóä{ñ÷콘®íJúƒF~üµÇ*ª•!÷µd5â¼ìA04ëXìò;qòóãX< °à³Ä §Ês÷ý]κäìì)_²cmQTdšQCFÜ=²ˆúWÎÅ7v©ü¡©0•[ܾãï_Ã2ýò -FŽ]ñ?O»…çÑh·Ðhä¤É°Pg2=b¡Ä â•=ÉçƒâßDÁeRœÑ×z͹ü-äÁä <9ÏÁ3BcüÇ….1Ia…ÆÆDªK›#Bš+â}!ÜyZ©7݈udû¾ -èN¬Lô;Ê~W¬Ÿ«¥Ø-ÞðlYVêñÅS/mÎûNÌ %~~ûêN ÜñǢІÇ­yö½Z;Á;ö’QäÛâ-èý…@\7z z4=皘WIÙ´0ã•]ÑW'¥6Ÿ²ÓbyXҽر\j s–EmCèÅ ?¢ž2û8± ‡ÌñjOÒ[OD–à®Úß•ƒ‹äݼr‹AR´Hà!W 1l¦~ûQ#·²Ü ƒÂ¤\ltvóÉMa[fê tñÛQ³ØÇ&3™‡+½«–ŒWކ߫XïsÄ`98SêÜi‹…Ñê+ò.±æ½bðójiÀ¡‡fØS#êa#dgÑw“kˆ4Àß}Q]càÊöaäÔnUÙ³&|£$T3û¹íÙCÑ>¥qÈ©‚'tE¬"¨D ¤E°FÅ M¨r7Ìðp*cþêVL)2Ùö™Ë t‡°›ÑÀ3 ½@0÷íGîÆQ`¾C.G@¬÷•’η©q'Ì­°˜nOIom5wuÑ£¶®Ýß¼P P[tòº -¾4ýïÃœ£Ç_úÌ´ø'¼jX¿6@x»ó üÛÒÍ<¡]^óÝGh¥ÚÃÅ oFØÕ{¾Þ(=ÎI‰·t*nÕî•nwÝ(Ôƒá:–Xp²07-oáâ'Úl¯€J²sï¢èÆíïÃ'+À¡k¶¨á #Ÿò¿×˜öÄ9š®p›—9B±%Ø‹?kà[¿bšCÒ¸C“ÙÞ8æ‡ÇüÌ;ô܉O—†®øÎu<9 ©üœ`ß0ç‘7e+ÞùÓ{œV1ÿêéo] ®4¿2óœ“þÕ‘ª%›x㢈=s‹UΕ"}Ç• —‘!]{ñži_ð,C±ˆJÝÆØ)7šG©þªîÈ3¡(󀆕˜ç@ÞMdÙ¨pÌ+Ü|d>‹OÑ:–¦,ï—ñ•Ït[^#ù¸¨†Á+•qZ&ðmåDcýMËÛÅ’!¾ŸC+c™ÍT At&ÄÞp¥"ðÖÊÆD±I¬˜AÔb*tsÇÏD­aGm¥ÜEïLÇ9‚Ž][Žõ±Y0J ô&³!Ì“/a1q¼9êøÔüd€}lå -aÀØ A¯€)ÄigÔf¨2çEN0î«$f APvC‘Ž ’ëÌýC¢ ž½®¤“?ϱl°w|¶÷}qv—è½7&çŒlóP[,Žhý]£6½ë­v"ŒÅk;Uœ â›°úbÙÑÐ!Î|‹|nEžÕfY³©öbí’ÏÄÍügt"ÚˆæÖbÜõãƒ_(0-:Có:;SÇpf–¤ñ9Âó - 7öÊUB”ºŽ·´¯£€ž´Ï Í[¼Ž{æÖwõ» æ¼çûüÊÌ×é ‚eQÏj¸¸+Ùë•;·’ì¬!´jøgØÜ -b&”MzIèx=øwÜ&æW˜o8^^Gyõnt¿£ãà>NEe‰ð± ÊNÀ` ã.='æ7„gü´^­*vj5 -‚f¨ ÙŠVÙ^tdß’=n é×Ì+99dÏö^ O`µͱ"pêöçôµÎÎ M’@ä#È9Ý©½§ꊪƕY~ ˆ‚°`¢=ïrÜEü¾—4(| ãb¨³­¯‚œ¥¢Oë“&˜»ÐÁSH+@íÔ%%ß›µ:Ç}??ðw6Ž^Kß-z”nä6•Ó‘ÛÅx3á^‰>íD˜°íx·ævá3N*¤RœÀ‚åxc-:ÓEuàA¯wTºõ*Jg¡“áQc¡>57¤K]ÓÓë9—A®œ‚ìQœ{¤”3—Ô¹/É./sÖwø¾»BóCIqŸôl -`#vM\Sáz^‡’¥w4ïd9v¯'j!sÈ|ÂSMá«ÂÆhëvRVtnEW2R½t,µ¹¹R.s‡ëÙ+µÒ_b;TÛ8WÅQ±ãˆ - OÏc©Û°™Ôs»Õ©@ÎrL¸Yýi…íS®c1ou)|°±Òel›ƒ‰3ƒṁÓæòØWØ}é°GU˜™;•RÔ°s•`§å¬ 3óq»KGŽkÓÞÞ]ù†6zÂîAQÎ!s)Êï1YÍiÝ>½8ÇynD+rZn¿RJY¤¯wàHÀ†¦ -ñn§Ç¥4Gs !äѽ~®ø#,í¸#¹“þ"YóËqžyCpÂÂ$‰sFkœ´fF\µ{—ËÏuÊÄTp°Ï IU.Î[9¿1 fqñ§ “|Fñ™}fp™!aDˆ¢mĆPÚ™9Í—Ðhµ3¢ÒC -M³¸_áWò­)uï¸QÃê¢TÕ >8‘©=×C[H4á:„9²q­yQ|wÈz×ü,®;UñªëâµO_Y†ã¨Apýú±—@Ûèkñ^«¨äÉ>qGÞ[ÇÞW­Ì°dÙõœàL}ä}M -BOw°êk 4ºÝ¢YŽÂ‚ý,/Á?8AÛñvüc+*TZå'Àk!Ÿ%Ô³êÔ|$‰À5dØÿrË:Ãô©9À±Ú¢–’ -y(½ - xgÇÊU.±4#@èÎÓÉÀHtÉEmôéÎ͆òûÑuF>ð´ÇþR)oùË_JUùW?õ“Ž*Ÿ¤Ý׃q¥\K÷^7ð ïÔë­R‹n56]9XIõe 2Và]œ]$}Û*1RçŠ!ù¦ëÜ×UìCw -m­°[ˆ½±‰’ã~‚í>¬ àüyèÑ1(œ‡l²sÈÊô¥Ja+qì)|í 9-7¾âi1¹S-&3g©¥>g^£#`H¤¶£Õ£FDx‚¤åLñ_mò|nÈŽÆ?W¯ Cî¤=ü uq.N¶A´F?iŒ•jü4¶3î[,žpŠÕ舜?ÑŠ>KÝà-=ß²K,=‹Ÿ|Â:Œ™yÕu”†ÙÆš¿üš-Õ2¯ëÐÍÀé8U ™ËFÍÝ;ëuŒíùH[TŠÜŠL ¥ÌÛZX¯/ -‘Æüb}ÀÿG/ÑUÖßÙÈO°Š3wä -ä¨ë×2aœA%à@îtDìH…ÐT·RGVºmì¡{,mô„ `5m´{f°)ïhŒ–£[~9{$…‘Cn:”¯òîPLõV÷¢i{£8º¡º{›½¦ïµàÇs±bgžøß‘EgÇþ o99¶b“áwþ©ò.¹T\hÛ:–9qór«r¡T‡KwDYáãÒ°lÚÄ¥f€5ìÙþ>þåËd/K*š±‰|^`~bô–¢FÐ<‘<¿[° ê9}P3oáîíyòßj”ö[2&†ÀäýY_jž…¾+À×Â’·‘öæ^\©á*Ý©…"a;8ßÅ/lIow;²cñüM ÿI]mþODúôÙ¡¤ ¥ÇŽ3t ê–|ħøàü›¿|„çä4A¼D¯U"°<ÂÍ*|Î\P¸¢;òÏó¼Õ”u4ºB‚$²Ñ‚ÔËNŽ‹½Çé é¬Î·vÀ @Søg9EÙE!¹ÑçŸK5Q§Ú.ftLE¯Ž è¡Ùã p÷±ôy÷ pŸÙ¿R)¿l`Úω’; ‘›%iÝéPS•)ÆôM°2}^†zÔMçÜ.Â!ÔÁæ~Š™˜XþC·²Q_ÄKFå‹žz¢‚ÚÇûkGhh^¤á¾‹¼Ñvê‘{¼ì0JN±¨tæôl†¶ÕW"E±b¸ëÌGmV¸‹oÊRîÚé˜Þx_2w‚(ê⯔zï-.å¸Íóí’GP¢þ”$kxÁüj÷V JF·1–Xu‹Ô.ô3J%3p¼üË:™…£GbŠz}ׄÙ Á‰xL©VóW©@z+‹bè¾X‹tÛÁá\aûÊS‘q›(쪣 ÂTÅÚYúù4_fܧó%zˆº¢œÒ7 ÎâyÅ0ö2ãþs¹ê©ó׆kº©½²è=ô£Ä×á²ö„a:”Q…±y"M­“Pqaf[ó+Þ‹êçàðÊ1õÄx¦´‘n¸WˆjÍÜ -öz|¢Äƒ/‹à{5Q÷ÔX¬åbëL?¶€ÍI(ÔÅz¦¯ð~“¢m©%­–'‰Oþ¬LœNt»Çµaàú[ëŽë!1på.™Øw !‘Ï žÂVn4“Ȇ®ã½Ëí£IK";ÎãâIÖ»¯!>ÆŸ`;*3ÅQú°´D~5êAH—eOTfÔô+:½í²ØÈäýŽ¼š7_t¿n¥*§ Ž|ÂqsØÛ@˜%3‘¶‰F¥gfC£Šwϵ²4ì*t§iÆ/jK¡!â—8vO¸!Wê¿3ø¼ÛzÅó#íÚÝ϶½s"1è•H4•ãÄî3%ÐÓ_À5gàp¬*„/̽Z#ì–(†Pp -ì 8®NØCßëÐi;SsrŒë'Ü¿|àÍŒ¼™¿ e…eWä¿ÇHÍ©jç5ÞöZs¡‡#®s4Wß°iy'*j.*»v]$seŒöÁë$Ó÷•sÜì!½ík%· -©齕ÍRyoꛞ Ÿ}—kü€ ®E‚èDZ"ïçqâÖS¦O²„µÝK\ é‡É`±õ[‘a¶]ö¢Á›.c‹K1Û)ªÊ–ÄÕ„A_(ÜŽ\§ÅŸfût;ß?™nH«T¹ë]΂µ}eR'·e ¨«<$ :“€‘¹\ '^Áßáh8iœÿ4¹“¬Ê)´ñÈLŸò€²i(…)ÎÇ¿°¬¯‚al¶¥Cô>^ |€Œ´1òp4T'™˜CŠF…®èA»úŒ×ß;f÷’?½M¯' /YrÇ3¾RU¼Ø~ÏXf”_¥bq"¢*ÁcA¡ϽóÿœéïTJŠ&x8琉ÂYRÍP·áÕ‹Å0à ’ÑížõçëRÅuüì¿Ê•ÿ÷‘|ê°«†Ö-çÐ]ä|‹Œ* šï5f‹ØF¥UR5HñÅ^C…÷–!xÂE:jmï!"ß93èLeåˆ`Îîªkð!êþ`©|B#?Yö'ßZC.3ÒX7DmÀ[æ†èï -)#·BKñyÎ9ÊVÓ\Q²ˆ]¨x0è¿ó)"†ˆ“@±5ˆ ñÙ÷»Ö‘%k:Ë ÔõqVÁ—ŒzÎbÞã5ŸÅ`Æ:•V©V5 vÅ>DÈZ3~(3ÖuFt+œ(=»ƒ:õEþ{ýà¹wÐ;Ù¨\/¶öD±s ó.оÃJ”•e“QÆÔxÀïúšÀ€äbˆ/þÆ.sâУÞi²¬=†” ¾ Þ9†Q[”´²$¼Kñš¢¿§V‹# _ä×ç³oj±ŽöV6¸w•\û–pQ¡Ì_ð9è‡óä¸Ë«Ý«ÔFMÔm³ø5ŠûÍ_~úÓZ#Ÿ~äï{[Ê4ÓBʪÐ4²9C¬Ö–éÛê âm9MNkC e[ö÷ óŸ½´œ½{è_n¶úÖ÷ZJD¨–%Y«pýRÕ“wµŽ¸ž‡Wª3U¸ÕùK -Í[?fÄåD"pÇ>·ÄDë³Þ€{Øaÿ•1õ0lÓm[R´‚‰Ý¬Að±omïF.LŽ4Â(SBWñÜ:÷êŠÌs‘"(¶Ë‘!w”jqiÇjѨè5¨¦Šœ¢‚)À$ÙW«ŠæF«ž[Y‘/D†ÌŒXzRùZW)Þ:sG³,ªR¦å)ž ¢éy{uõÔO#OàV[ûÚö‘@“™§$ÊcQVè(ªjÇ¥¸¦š£3§|~D¢`Wäà/† 0j‘üÌY)êËâ:s¿&6ýù?(·u-³m¾þûzÞõ ÿÉDäà†©trbRa’0åFô#LßR¸XÅ•±Å -éGmEqE˜T]¢¨#rÎíhV$°‘ûtã–¤h8ê»8'ë×»;غ&—R…I»ù¼™» ›RnU0‰ÑK 䎪âvÈ.s ¥åT%ivAˆTð¶Ü îO pÉÂW|"á§$©ÚP¶ÎÒüÅ«= ÷E^‚‰f -Ft‰ø”0=Ð)œ‚@AOÿËõ¢ÿ×@á+p.瘎ëõ®¡â´é4ô°<˜Wôv¦Ê‡ãU -Í×ûn¶žéš:4½¦4>¾V/²).·Ô7VmåáK!骾üaW˜šFdýd&-©R@:d®R¿_µ X\¸\¼2?gŠÙaë¡sö5茙3«Bùçi—UºDqîo*°dÜhÏZ77¡Ã*^#À½bÐÿ¸“îþ©‘]z¤œ.ÜÕä¬ê&(\ nâ«ÂœJÖÞ‘¹*™o©–hg²ùù—9+¯D­ÊmŠG¡5æ#˜D¦Að&¾”šÀÂÓ“6©Ï=µÁŵüóIñ¦ˆ(âSý–z ÈQ:åœÕB)¸ãTÅúá΄öññáÚóÖpÙ¯Ÿª¿—zøE"›_•–Sݽ÷ã7¥åïÙ!+ˆiG ¢O¦–ÉG^½œÅêV»ÒB=óÒðm®Sbà;C¡îhÞß%¿­§ŸÑW\¡°½– .ñ ib®ó$EmÛûÇ(>ß`9²—ÄX]¿¥æ!&0«^ºWãìMYÓŽ³—kAý¹Ô¤ÝðT™t=4Ì–œÌD|rp_[„4a†ã"Q°pa•Âù>DŸ¯¥Õ õ%F Ç»ÌK`A°wýþºÒo»×òo±Ç8=^_øœ5í¬üS@ÂN L¡-;Œ«Øëþ¢}§ñÃe×tJÇ}¿j±Š -)󗟉uþò›=óÓSãûuÎň{;‹7¨ÝÉgœ‚smdV…ÎÉX%À± - Íû$T­³Ûÿèu’K÷fïû½æ…t}ßKùòLîŠùoZ¦ñ`ó)ê¿Û‡ßRÃïóñaØјBuœ*Ëi›U…÷#Õ¶ÐCW`Ñ -!]¥¤UõDÀn¡b\óˆX§N j@x•ZóÓý)U[côŽªõ:¾EB-JæêTVõ¢±çÍu?ÃÈ>DÆíkÇûÇ¡ÛƒHDɇex;´Íc†¨ã-hüž\‚9ô‘ʾªdˆ¢êü`^/@wdâÊWY³L'î|ÿ•"TzÃîÚíK€ EdÑHÒ¢,t(Íoï¶<°*5Ôä× -ÿÉ‹¶ -àß•—¶GqN #×øGϹUp¼>ñ¡­2¨Ûy…m½=ŒÝ9лR¤»ºp•¤ð4`]ïëæ.«%gãù®ZÝèv̳>ÿNÿ3ÎÁÏ—Ô÷Z½t³©C–Å6ö PQhÃˈpþÒ£|ë-^–€aå>×Ú&Nëû¼‹TÛD°)Û¾÷Õ‰Ã'&¿/b_[ßÆeL­bT'Ž5CÓw?° DkUŠì$XÖôXÁ ï¾dñ$²˜%D.“UÊ7œ”:BÇ*–*µ{¾[K#óÈ)™`f=v n¾þŠ|bLYqP‘ÃœG€_P;A’æ!éç5¨£|Ÿw¢<Þ=L©³ÔT_Â_ˆîóµ­/R Úœb`¥±-B(š68ûžÀÖ4j |R ¹C"–z³× Lë/ç×ý{­„Ÿ\)b\W î ñ¢ôì!T§!ЮX'D*/ÊûýCS%~L1¦‹…íEßiïoúKN“™–ÚßÙäuô0F2‚/¦ ¬ŽïÇ‹"n(΀<<@eã§Øßj£ íñ™òÕÃÎΉüôÝ… C8¯²þKKv ›²Å%g|n]Š…ই R%è¹l¹E9'ÅèJg™E¡ºU„¨?åÖcÐ#&ƒJÄtiœ°é<¥Ð=×A´Jå\ãÌkúÕL ÇrxUô±Ÿå‡M€» ÚX·¢?é4i¥HÚL˜}Gî8ê'+;ÚP—´ øሀtÃé8L{r{R¶g>@å(6C¬Ž½âŽ!Æ8£ÏpïÔË?E¥âÇ€7?ËvåkÞÙ¨æ¸aÍ!SºöÿÜ*Ï%iõÒ÷ïÁ]çtSÞáx§þ†*šµ ž°+ÀXø€¾iÑþ©o•7GÆ0ý˜L‰ú½Â’É ü“ŠÏ–x0†cpq‹7qguÑ{^_XUïè6Ι±ÿí+ºÅqT3Ê_ZL«íC!ºÅ“@òÿ=Ý~øËol>÷«·`;ç$»Ûp"ýõW9žFp†aÓ[εG›k®ý¯§FÏ1šªòUIȸ"ük]‡%)ӕѲ6ÄÀˆ+îȨnlÞžðúÎqÛ¯÷­â# Sû¼iUOôÆb&õ²?³†{‡¹ õð‘îÈž!ÊŠ®öù¬ÁÀƒóÑ•:YÞ €-zùóö—Ž -W¹xÎýê±åFx¾ +6Bñ›çÆ ¡Î¸¸ªHæøQë¡f"xD2£ãX´*ÚØq­Ú¥¯k-<®F¤õ›¬³\¶¼b3B¼rØg¾b<[Z¶ÿðêÛèÕ2 ({†|—ø1²?v+–Hˆn4q:. éto€Ûÿ²o?é…Õäýô•˜¼^­«èù½M®Ïµir¶®Éê¤Üôm¹Í°’-x|ÞëC{‡±q¸~|èKÊ]”«8ÉÝGÔÛªšbkGÌ)¿fK@^¼âÑbw£¤ÄØçkß¹^>¿ÏR6ÿ2/ÿAèåU[ºìp¥¡ø²» T(õ‡'mˆ×Wår?lÁ(çÑbàX‡ðHáU,ïþŒÐ&²] ©cá·|:ô² Íê’Åš½" ¤“H}låå‘)¯}ŸåT£c“¿8Å•ÕTtŸ³ß¦ŠÂžÔÞ'<•Ù5MÆ[fè><Ì¥öœ‹§xŸÂÕÙÖ¦}êH -×´ö2HVv©­´ÜÐALV}øWÍ„-Åó;†²ð`6)6”ýþ£oKH_åœk¯³ÙŸ{FÀÃ0C»Ÿ¶ ñÍñ7æŽt¬9wï©å™»ëíD“I+ˆx… :€s¶·×bL]%*U_øš&ÝÛQùQyý0Tžóמñ4ö"óÿÉ|¢ö&z—4ÜÑwöƒHØ'æ6U> À :¥¤Oȹý½K4ÑÈã j†Çœ$€=÷útî…iöÌ'=ë\:ÂRb:’S§-Zœo>ÿ¯Ïê½Þ(Vž¸Ð¾}¥}óKJrN*!¯¸hQߺȓ>r¼Òíì!a\im¢ôßýåCëgh^G€} ¶v=ˆŽ ’2Pò÷9H -{À—©]:•6 ¡Èù¯ÁX.Œd4Òþ=Ëg=_'n„¶Ž+ˆ¿/!¦¹H¡¾”ë½ûA¢H7[I4]å#xK‚ïkÀq:w@Z÷m]Gíˆ+E˜zmêC)zÉ0óîU'ò3D4If÷ZÌp“šhÒ•‹;ß°_‹ ¶NÜ 0Ÿ!Š3̜ӥ–ROÍ í¦÷@ËS¶è°¾Ë/óŽRVâ\´ƒê+ÚUíß¡\‘hNoÄÛwd—¨&·‚⇠”Ê“‡ -õš¾õŶãímÚÍYßL(}ðùN”t¯R½¢ÞOgmþ˜ànz׶–×›šo$ùö§±@ƒl¸KJuTšøû¼Î<‚.s1èYÉÝš‹zSŸ?FrË÷I+/0g| hÓCQ‰ô/x\ŸÛÒZP^‡¿É­Äð¬e÷2`”†•<£2„|‡šçS]ßX†)YçÍÊùªÓ`V¡»ÏmÍ>"’A¡eV¨£'í§\yÐÌiNœJ2¯­^ñ'kjé;÷.Ó«ô>s ( Q õDVWøgs¢”ÆYOø3ô_µ”È÷,‹_eQçÚ¿¹-Þ6â9{Þµ ’ö‚ˆŽÚ[ØÀæ€RäÓ~ ¢££ÞúkÎy4L/ÊÁrõSš‹Ž$kû;’Lì8ÁSK¤êÊç`þ{–É\àäa\Ëñ […qËn}¼fò§dòÞYÔ}iY5tË@ħ¾)µÞKX~ mgSŠ“°>6ÁØ×ÔÄÛ’²ïÓl`ÆVP~gÙˆFè"‡_ע͘;…›QS!µ¸S¾Øa,n&à”¥@ÁÛX"ƒs+õo„góådá ÎA}yˆÉK2;fŠòúüK½œ«Úâœ%êct®.©Ñ¸do‘ÝU\ëxGJ&Ø Ôßý僪—ªH¬:?úêÍðr…f.f>Ñ*š·¢“r¬êN³výy½>áFÚ’Šù×Ü艃Gêg=7Ñ"e`òÕ™”i¾D=»n^Mh„Fž×ÞÞÿˆMc¤r"™[ÝV\ û¨;CŠ®3`ˆ2Žzêµöc²›b¯@¥çB‹¢!uâ§XúŠ¬ó4%ª¡?±–²JyÔXöKYüÀ¬'/ŸÒ5íÜ|¥·œ°,[ôé2`*€TvÚ -M…µfñe__x¨¼ü×öv‡|~Õ¯ûü/)ŠFT/þÁ6–ן‚™_ -6$ï.…,ó&puïNùòëLh’ê×)méÒ]ÄD¡ß÷'~ïñ¥U¨Ø©{_¹½hô. ‘·$©Çß4Vâ»AÄ©OE빌¶'yTÇTäÆNCŽègxŸÑ¼ÏTb¡çó{Ó9 Ýtzßéf‹œü`¾`œ:ú"˜N”°†Ì%е2d—;|ÕûM¤F³e+ØÊ°¾¡'c«"~Ÿñ¼Ì _F×V‹Â p0G§*³!(ö2½bcÐCqïñ±Rƒ$Oô0 ÑçÖz9£¥ <auQ÷¹£œgœˆ6…i*<² -l29@‹‰Ôü¶ÐÜ‘XÖÐÖÅÅæ·Ô}H<¿Ÿ«H ö¡qðM²¬¥è¤K¿Í—7?€…Z7ŠÅ:=ŠÛ µ¶ÈÙ3›^(üdS êu#ÞÀÎÖ#8úĸ#Tù’_)e+´ÃwÇ·ÐMp{9aï‘!ôLbá ‡\CtÍæ¿ê#Ö†чçˆ5€ ›[ÃzNš {RÄ÷²¤¢ŽÕtŠiHžÌUäæZ‡±¿^2;[Ô5ðžGs£ç[ †€b‘%GD¾jD kžãÉçF9SÚh®‚% -çyÌš« 1y£wc ÐÈ!¶Î 0X²À4Hð5šÛW~®ãVvàšYZ–ðÿ±*#+·3’„Ñ¡L¡(#!jGu×{Ùð_©?•œ’…7RÐ\«yµÅj´ v½ àë€vZñÐ#.MÑáÐbÛ³NðƒN> ‹<”&®î<ñ{æú–r‡ñÏó’'ôrJ†8WÏÏáÔ$n)xcÜ@CHL´RÜJG¼Sh§P§”m‹`á‰)¦¸»ïå:¤HÑ÷”]GIvв9™£ŸË×¾_,È?Óá—‚+ƒåܵ“¬Ó`Ø®8•þ©iY<šéOBùŽ¨ÙzZ€±ŒÚµ—D·LðG&zèÑš“)ÌÃê8J7»[ 2…½`¸{qƒá% ¤}| Õ¾‰ƒ­n|½‡M¨pÆžÀ‰#ŽlG‹#Ù¡µÅ¢"lÜWù‘Y“€T"1ØÒ<ÒŽØ@èwÍ{”¬„*!$F\…±¤àµzÐDuæ|‹dA¬6ËúlK8Eϸô¥‡×Ò|¥s›y1ÈØS •ÀZS}’daîQònÔÁá¡<Ä‚ K´ -‹éê%oAp—9™íæŽèh_Ù [4{A3ñíH`¨a5ô­-Üî>Õ¡ÿ?G8eÿ7 -y¡©1cûÜj Åž´¤>ÆN1óDÞFl!çMK,á}±#ëLQú8¢ù5@=0bÎç"dÆ(‰ qNu9h~*Ž Üí4c.›ä¤ø/œƒ:“άUuY«l6¯ü^•=ÚØ]ºT ®Šb¹AÀò ½Gu¥ƒRu|#zT–HüÝu€ó½çÑ„:geü€½m¼`F†x7žÏМɭxÜøM4_2[ȹOÅ:p`w™GÒE$qÆ -ÖŒ#ûN‡ßÜ £ Jž+áüüV¶½Sçè+ÛÎ lÅm†À¶ÌF -Ór;P*ŒŠ¦P=›0öqRùŒXˆÀ}Dš—ñ„3»_VX`/ê>XnsÐ(5¼ý¡ Êåªã©îù5¸i[ªŒ¡9¨ÔSNõzÑv†lAwLÊ’3Q¨CiÏ7W2s„Ga(Çoüù¿7¢†P6™ÓÌ‘½ç ¿bÐB“h[ïFºÄ¥5Ù®“I¦¯'àñ–ç‚%G˜?(àéΆ Ÿk“Þ'Ýäûõy8+jvVÒç"û vþùDÜ(ˆP‰—ñT÷Z73F8 Ò4—á²ó}諆¨Ë¹äj)ë=@úWó…HâôÇ ÕìÁ?}&QÜ+Ð -÷4Í Ò³‰Ï`Òö¹™(^ÆJ= Bg.^!½!Âf5$ý}Ú†„GIÞíÆ{{H˜ÒY"Êzíu+ït®¥ßó[<`ÈM‚ÊjÞ£fî¢s¡¢ñ8ÜA<õÜ„£°8·T„âÒŸOv‘ÜöÒ‹j!§ -pfH‘ -اl n˜ãÈu4.âžaå>Ä?¼_¼4ÁÜŠ0'šTõæÎ"Þ›ÈÜ‘Fš©‚Oåé™±Uô|žh£ˆ°¸ÛóPð5Ú, gýMÓÈ”Jh€]¨YÎf‡ð€X)íHøìVéRŸéóÉÅßÆxØHÔ¨#‚ióÓ:ÿ“Žµó¹Œ-Ç3B3¢=öÈOûŸÒHêÃÂW‰¬¦«,æˉ†ëßÐy´\¼š®¿ú;¨×vd>ÊsÅ‰å¢ ©» ìÀG~ ɽƒÃš&[]Þ ðaÏz7£ ¢±#ãbL½5ìB{ Ã™TÁŒ9)½®p´ó¼¤=Æ#[,^“Þ]ÒD¥Wv\±O°&@»aXÜ/CôÇð§gntfl‚óª—óÇÀ5KGEÈ.é0Ç>]è=ÿü#ƒ³¨”\g¢sŒô6O•ÍAdî‚!«>2(þê›Øï÷rò åã+%»5+f"y§öuÏà-~­Gà˜š~߸¶€x?¹¿ÚŒ(©f) ×”`®<–3oh” ¢!2¼¦_e+ŸCöÌâŸÒj5ŒØ9È»·ä1@ËA¶šhÕŠzÇv‡%—¹"ÍFYHÿ°½¢:wì8kQUnÙoðîáëUûûs†÷ÈdÏþ§r2ùG®ƒ.‡f½«c8OT´¯š9"fdÍxÉ>,¼l¢QxJÔu…­Bè¡^5jË>µ"Í@Ù®!:®piGíÆÎ8ñÊÜœ¤Ü¥‰vyÿ½†ÑÇŠú}L½/"^çLô(õ• >¯ÃtˆOÁÜ'ï'×9 ŽœÀûB=Ô3+Ü«ñ<ÏÞy¤øÝjþ«K3Û C‘G§’ÖzÔˆç ôºf˜¬ÓÇëÒ;F«‚‡›Ìœs¯‹••+vKO}+ÎÚþԸ܊ò7‡¥FmÎD¤DÁr´¿^”¹Õ\®ñU³OFCÞ.9ßæ j²"碓s‹wÂŒ>æ„€À€…#X‘·£‡N-å:“ß\ÉÚûã=„VRÏŠŒMíõD~DiÄéagÉGœDMñz‚¼ë1âM·×¯® 9™.µä3V±YKö==Ô¶îgÝéf¦<,^µyA⛤Šô3š»fá¥!N6„sæG×ü£7è{.úÔWŒ€IÂ(Ú¹˜Ò=Q]ÉY¿G#õˆKêË-‚.C˜cçq„INjiÉ2G4$ö éyò8­‡Üªië>Š ‘¼0F+»kxÌ5Õôu˜¼ ûGé‰ÑÀÙ£D2c~#´¼¥lгFèöÏÿh©%`ô‰({E*?GÞ|Y[*2IŽ_†À¤Ì/ɼ!ç&Ö÷1Xà¼9DN4Bä~G&§&p‰Ì7&yŨÁxÒ_V£ä½i«bL·|î0ñª‹iÞEn님w|~rÀÌpU -Š™ž;)"²r"QàüõäPiò£”AZê ¼”èºT؆ÿ3OQâáS¦w¥ŽŸšOi'ݨã?ç­Ò)—†ÍÌ‚ê:qçZ¸ö1™&K6¥‡YĽÂ.?ölû0°Wè¾ÉC6ùI±ÆS'”b7Ë3(ó|a€ÁõT‡ ·äÑ“Ï­f¦ßG„q¨sˆÃ)„Àb-b%9"}¯|ª9hî-W=[EØO‚0$~Ыyн¢˜®­Õ{†­|ÃÖîrÇfò -˜,Û„*4¬Öò^;SëTÛUcæbd{›ïB_âgG>8ÞÓÝõû_-þU·>µT¯ÄB‡Þ>özËðïßí® ÍÀakåíö9èü¥üwab*Ú¨„—=ÿQBZoœÔ$ò{“H=øH„Ø¢ïÁ¥ œ{*ërRpøóabÌðP§cEˆñÓ*Ç£%=¬›ë£H4¨h‡¯­ˆèF¦5Õuµ[µ‚Á‡ô®C\áéVDeÕ1²åÏÿ¦¿Uw¢ÿº.>ÐóúB*¹°ïÔ–qÔ tдh8ÓQ/ øŒöÁñÎÄÙ`ì Â€Q¤µ#—v~àR|þ—ÿïÿÿã ñÜ÷?®(½# Ñ“]GîM$EÞ=¯`žæº#eõd‹¦Tª¸„…Îj\K.e™žøÓÖ©È «£Â¹¨T¦6ÝS)ÊVýMW³EpIÉøJYHdC³}µÔÝ•vÒ…:¶vų•™}ëNj³}%[› m”`¯|Bøêf9µ“\Aµ¤AôZÉÄs‡ç7©Ä?’JŒ*¢©ƒ¢Q}ÄT.‰ÂÐ&}âN^Cv±&ˆ"»Š¼¼H‰ßäÖ±†ˆ£dÁ„ -Üjçî>§Å^Þ•‘°€Õ`Ú*Àâ&G3eL ˆ£>w‚2‹Ì2·â:™zMÛ­8xWÊmÆ-¾%ÐÏåÄVĽ+ ÞQW8Z1O4‹)¤Œ‘çN_§¤Õ¯ -=„Èõ+ùÚÜ¡•gÏ þ[m1)^ð†Ë+ž'õ}#â×.™)üûjÚò¿þøBù=Ù +«ß;W$O60ÈOs%½;F|@ÅdB¾˜ƒÆ6ŒlnÆJ‹£Ðup6¹kŽ28ÿ 5?Fð,J%­ð…‡Z˜ »D)ôÚ%¹ §©«¥-)ÉÍ.¡ÙßÈ­~tLûäÙ‡>vß3¨q¬/ZÙþž¾N—Òµ’em.¢¾ôZ rŽÐ§­¥„,£W…˜ß,%-'>!§æ~’ ²XÉò3Ù=D¥5E1÷ZË-‡'?xˆ“ JËåÜóëë€Ä¼K ê¿[ìBr1÷^–?6"íþØ€\ÂÂO‚qÅÂ'C0F ?öe'ñ]ûÌÜòBKã(5jÑxî±”iëÇ™Q»qø¿@Pà~ µŽ®oüTò,íž’)¸'Ë'ÝÎxõ›çÖî>AÁí—vÄN™vµ“U‡¥.ÂÜÃh‡=Á(€Å¥'Í”©’w°}! Ô»ªÍÎÃj®YoFPð#x¢ßú‘Œvø¶‚ªŽR@RÁ­~¨}dyS„Xº’â4)•4¿†(Ìzý[QlÝ0¯Ž«z¢!xhc¤^ËQá9d‘ň(¥‰v3D”âÀP¬vgË&@ ôÌ“¼R-D[ƒ¶ßLV;ÊísVÅ›íŒN:zOo7gT†  »ÞÒW ÓÂÛîåyßJisÓ£°µjÌèQ:"”+s¬ì[d5Ò4Àç;äÐ;o°ÐŽ\’Yè—A”\ɃùäOðòCê|Ó%Ú9ã5ý’Æïœs8™@Œ³<ò&È$ú—¯à/ö<äëŽüÎ -Ï©Ù¬+w¥m„Zƒà.Þ3<£ ñÛqÓ -qM`z4C`M*ép‚Š;—2Iƒ£ì9 jU½½=×ÁCæ¢E¸a®ùÁŠlƧ\$ÝI{§õÊ3c¼òÁ/Ÿ5IÒkê¼Ïho)âk°RláÐõ£zM[¶Çilü¬(­ú#©ë”T°ú*Ü2’óÓBÃ]·:Îè¼Ïo~üyî×$¥9ª…‰ê*pÙx@r^ƒ<„bÔ=„»æŸÝpm½CŒ ê4DÒj¦h¤õYw:QSÈ°ùÉâ‘ô(w*As?Á!!Qù2³Ä0¤¤6¸º¹ÏXï[ú<1(­‹€‚"þ‚FÏB‡T)ÎúHiS[õ¨&«œkþàCñîSy3dW<Ö4m1 éè‚»¿”Úç v§2k‚ž/%4Ñ‹šñæ ‹ú'ÍSZë‚n" ÃA!Á?è‰[•ÅKc‹Õ,H7ƒH~a3ž>…^+• -…š²m’‡À^2©zä ›ëˆÜ7qØ'ä¹æ{Û«Ó® 1O¹x)gÇžh>G‰?~X:ã”VyU)“sZLsßüÖŽˆÎç»ê"ÛIq;ûÊ? èÍ=Nuò‚¤Ï -°]›8’XÈïgÊ ]\WæœBŽt©zBO8…=ØD¼KòN,åÓEž/!ÌL|ÀwÕ©Þ÷zê ¿Eîjù5G´B$ƒ Syy{Baº³Wì #ƪ›.;æP¤jõM.•ÐÐH'Sx¨åP×TÕ¼ƒ/h©ßcëù͹“6Ù#¯žd ^DÈÔ×Ð;^FÌëa?Ò=µßë^ƵýÉ|ИјjÎÇ€N±`^í{í°ÛPN½0 ¶° ”?ÈÍm•±—ùzÄI:ݧŽ­ß {A?¸á! ‰HÀdTZmîûšã<0çù´ïáÆÒŒçÀɈ2ñ\Nxñê¶)wd`HtmalÏ!r„˜—ëü›»JÀ|Ðï{ X÷x¾ßFòÜãiêG[Úz§ËA'ï -èÌ(,2Êê/#?»!èEfº,7ÒGw»²qS™‡»¨/€Q>cûVâ›{•=¬_QÀk$? `£ÛE-Ì ¤”Ô†ñ<0g Û>e&¾qä¤øL -VEæˆ~ì ü*ÞI»!“ÄQ÷™EÔhíKåY!·³‹ŒæDXðŸ4œøöí,ðYt*ÂqyZN fÞQxif—jŠ MÓ]Ï$aÆL€`-‚+z |fÒ¼S eJtÝ£iÁ J ^9nfH5_±vF±=a$·€×âÐwöÔ‚z“™Ž«²§ùg ÏHŒŠ[”¹³^Ѿ·¨ÞѶ‘Y„8z}ô’þÊ_f¸,BìúJAªúmQtB!|áèüîö<Ð÷*¨{¥#9Ÿ%.´-ɳŠ±Nn4Aœ4'£É™¤¦©ÍvΩ L Oq%Rïö)çhXø3—áh"m½ÌÓ¢qç´£8H‚Ò§èÖt¥r¬\6ÚB¥‰¤ý–Rq*=3C5WGl_!$6x EÒ±Šñòš¢ó ÀžråéÓ*û£ƒÉÆ Ï^„§\%š™¢yÐJ{ƒxL\ wÇ1)ÜH -ºJ8=Ä\vý…Ï?ÒRY´z±Dδ:£…7G埨GF±³Þ8vgË«²œÈmèGqåW ѯü%sÉ¡&À:Ø_õzVœÍ'Å’¹”¾[±ç¿µ˜ç]©ÃyAÙ\Øð~"äG[ ðÁ¤­h¡ëgUÀi>?¾16„èh}cMË”oàAþ‚•Ž™¬*N•…Mª íyÞ¥¯!Ñ•¬½ò%Ç$ŠþýÉ9É-8_ Êù†×·ZµµöTן ZuT4}ïØ4?!`b;¤ØÄ­Š…`…êyÉ -£lZô ð N0/!U¯µˆ6_û^‘¸~ÊYVq¤ò óɧzk;ˉwÃË·ó0왋 ”ØîD‡ì‰kÕOp-¬Ÿçü =ZÓ¿8Œý†¨>Ô‡#Z-…JE$}O$ÛZv§‘J²ÿ °9¯ãŒºK;Zm>3i£ÌDZôÏQþS;¹ýòǧb°q'ÐÇ•®cS•6çÝ.¾V@eßPКUn“RðèøHD(þÔCñÅï€9‘>…7ÌhêåÅ’2¦{ …uˆ¸§m«„öçˆðþ¼.•¹ù~ë:¦F@žfÂÍíNÃöŽa°_^’Pw½¾& ½ö8†æNà]91™äzpÅÁGê6r§šS\1‡àóŸ?{ƒšöfî˜uBÑÛÅ€Dœ—„ú‘ÍÐg KºÙÆÝ)‘>û<}ÎÉhû§œ”  -&.ÔÃ3¾1¯©õŽôzw<"¨ƒœH2ˆÚ¶¿`+Ÿ+ÍeͼB 9ÜíÈœÑnäŸ_ŸÏ¼7¹9=hD®ÿ$7+|”k–’C$ -鿸QÒR¢,{HÉí®ÒÇ?ü$–×ëڣ괫«…™ª2lˇ;”3ÎcD³„LŒ$WDFç?dbß<äÇ[’ykùŠ[Êü¡_xŠ»ðOæùí¦±…å˜xZyÁýA3´'poAMù5ÉýçZ#J+aj£ž¯"™ðgõ?ck‚¨Ÿsɱ *3f’E}Ña¿N¡¯xO]µÖ‘)F§\¥)1©2ÔÍv ÍäæúC‚™8m{:ì @_­b¤dîTγ¶žo‘uºZ<¦r™H=²ÞS¯Œx1 -Sà¾gvA{›xì·9ä‚—bÜQ:÷Ì„34Ľgت¬ðÄšd€‡A¨z'‘a$+|Zxœ0Úð…ÃhFx}áõ­FgœpõÒ@–ËËæ nûúñQaIäÛuV-N*W’ñ[E¯ÊéÇ/ÎQùÈŸ¾ïÔiø@±¦ö^Û·Ý°ZWÞ+.S7©Ùú:Øk\ÿHŸ¼¿2i~­Äõ‘/ìªxø®oüÉD0h |KÕË £#A°Íý?þæXPv´¤N徿2ã…S@5¯üï9¤ Bƒ¤çÏë¹kÒÄßSb!¸Ó¥Ô‹ÍaÇÈð€OŽ~y‡»(h[<ça ¬„;õ±ä”سJOÛ0@ÝëXÇ!Æiô ïî<ô^§ËWfwåd¹Ìpe”È© pRëÛÒ8ï5›NIí–-Öý4¹Qþ¿zÌ|4¹øE¹0“ŽÝBzÙ - ÇZ3ÞW¶fÒ1БvÛ_43ˆGÖIŸ=Y»xJŒ‘]XCt†yضl>VaªYF} Aä…¤=Yò6+ÀIÂÓì´¢rôÜ*ó)D óLQ„߃ªÈµA¶V%ï(l ¨îP¶#è‘#@œ$/ãã=Æ4,:¿[FÜd½òk¢¹ÉÍ)êEI‚Cdóqò5«!ítjž ”GY·‘/”ÇþØ€"”vè˜ç'îãu½eO­w¬9C “îóOµ¤S×nՑ‹¡ò˜©FŸq7ùð®¿Ì–-zeŸÿE[”SaEÍü$$ÄŒwEpÆ Ì8ºÞ ÀFÉÉ -Õ¬4DÛŽÀÆÊ‚Ú8–k«Ì#ã*_© ƒñ,\´ÞÃD°œêûGröÁƦu Uo':ëmõF±=ÅYD²S>¥øàã¢êf÷=ë÷Ú/-9}pm7kjaI+·v&ûE 2Ì4²„oR†ºÕkP'­äyDj3Kà¦Îh7¡VÕ- -ø—‹œ$GÚ‘©¦â2RµP-ÈÎ çüªÂîöàÍcïç³Hg¥c¤u —I‡!Ü|õò[T¡‰+쉹q±ZÚ¦‘\²Éf“ßêÐçó¦ãŠzÕÖëc䧤•à¡™…˜ô4ÄAhy=nDÿÏå ›¨¯Èh?õ-»žâÃV]‘‹¡ÚøðQ:^W×ÎmÙ`ÅiùRø™TËŸö4n„ü¬^SÛ8Õ>¡7ܧ“ouEZZÔäw#~¯í¦=vÖ¿Êe´6£†K/1Q…tð%1{ºˆ¿£û—ŸÎy©ßɯ=sü18¨Ü¬¼tD9ì¬]õaG¬Öy(ÆlÌšxú¤×\š #[qÓÔcÚŠ”B, N¨Sê/sƱ;¶ -–NÅ‘MzKó]ä[ {X½V—ZvÅ< -v„ôp„Á<‡0¹'ÌלAl\$ÆhæÓfßÖ"§r–ÈË9A,ãÉúd K´‡âiÈÙ#3Ö#íýÑ…™¿ß‡­ 5–²m·V±æ¬7O“r Ü‹ÔGÅÆ{&“ȼ+·bņ t¤’V¼Æמù‘÷‡â9l·¡Sg ÈŽøsÊ!%»"ø5»Åe…ùçJì?™oÒ‹0꘬̀Ó@vT±qЬñZöÆnõG2Uó;f_f›Ý”Êîp<ÛUôÒë½½Gš“ç»–'”l$0C²UíÑOíUz LUÙœŒ­ðÀT¥ý¡þŸãÈÆ9ÿïyoWm…OZšÇ––L•A¼f‰°‰…:væ-èÀÐ絆¹¿†˜+˜?¼ç¹­*¯ÅB<§ÄbæžhkŠr8wuÈðj;¢ˆ¾ iš›ô}ÓÍRð”Aœ[BF@b>w*{†È ¯x¹BÛÕÈuFªš%ƒ6a<8è8ç÷î¤FŽ“|Ja‰£ž:Þ!¡¬¶(=íüh8I(ø1ËÉwéíÍ(\¯ßü×æÖ¿­^ÈášGúñŽgŠ'îÝÛUu鑬 l—XÙV*Œ|ƒ 4¤ò‘Xñaüê±#½Ù£¸¹íQ…I$~¹gH€£ælÖ©¥n ¦Ê†©‰¬¸¨F«)©Ö¯Zම‹—«ü·EIo•ßâ1GF:ù)U?ÿw JSO»uïI&Uuü¶¸ L¶üú+MM AÜåתð@„>«úÆ«ƒr,£•øvEò^‡™•*…pn´çµ(·ü(`±Ô}™?1µ¼5À ·#¼K–”œ&Ý §}Gøz?ydÅlbUÿÊ iôÀÖÞÕÉ›‡Jw‚êÁ·Å=f¬EÆѤ}Ô_¯2w­¤8Áìáµ€Óž7I˜Vðv7:êõZ`Mþ~¹Ìà’ ¢t<ÄANDË%WaŽ´u>«Êä$v‰SMŠRBzšÏ„M”¼[=Uî;²›õT^^ŸOÐQÛ„ltl-— @6Gê¼uvRwyAC8ð¨3æUÞ¯?[áÍB¾°S|Ï ô9ÊÉ·Zn|vaH[÷¥¨@Ñ™g່ÝY¿SK -ã4ó5ð¸°ùFl›”D©±ðéå’êôŽ‹6€Ó«DTÏæF¬“c[Œ$`µ’!MHürUaLŠ´ëE¯°ãR;Ð9‚Ø[ -×£Cø5›Y¯!r¬ôÿåö€üm¤{VسVèhauUfÿ½ó½Ïï‘4–4ê–+EöSvÑV¡?úÛ1©”ñ…Oµ&Ñø¹Ó6 R"é+€ˆ¯É!^@H½_$Ý÷X+‰Ã²·Ÿ14þ€>hmïSoá’LžþÚªÔª¥®“”šºOëìÄÒ¬úOÔ”ˆûB ¬s‘­ü›‡F¼ $Œî*5¥…±GA§JMÛ{bKÙ%5?¢ Ïª¦ˆ¼ -½HÊ·©ó=Ùðx]Ý™ßꤎˆKèÞu¾¨§÷àñªÎÇB -m,5Rj»‘ª¬!Kdz?~]ÊóD«{•³¯XÆSŒ¬Â%èðÜè!ŒŸŒP]¸ÅXâ»±çæ2$/­ŒÚ½ýä†SBVÓÿ‘é|Á¼¨ˆÉÂw¥`Û·JÁ}åÀã[ ú½ÞžŠF çÛ»êU2Ú¸Áß×/VB§ußn@g@#S¿¥„nÕ{•»h*|ø ÞU™€ù”Ž‚—Aö<”ó¹{zZ!ÿÔ2rŒêöW”­âžÛ,Á£jHHZѾ¿Ç*b:¤Î¨ÈGEõÐ$¢þ\¹ !o¯M7ìÏC ¤6…½|¨›!ÅÇ#Ë<™Ag[wx°m7“ĸUGIP/`>¾ðÉwvÕAë!ƒ×rÙÙûDÅ0á +ê »¸?–õ*bÊÝž²9Ž€ê"ñ ŒÆ[’w¥¿ñ,üø9,Köi ñ=¢äÜ -À÷çöF[o¤9©•ÃñïûŠ}„ã˜!i´„G„=æ+Ø÷wŒ¥ì8SÕøÐõD·äÀ«ÉÙïàUµ^zj|)· ìon ôR<ð¥ÂG(™A[>ìH!U‹x¥«01íñ}=#,ñ-fòÃ2¸Öï½>OùWÐú¥Ýc{¯²%1ç TÀ§ÅüHǽw¢ÊlÍlñƒË @¸À\DKå<’ÛUk×>*.ztª¡:駎šOÀ£Êk7éý¾•"¹2Ç6êGÚ?jùA³ïŒ½°/©Í]u§XŽŽ=¢ÐñTÍvA¢ácwáýÜxß¡Y¹`„KÞê#´@UÕ˜ŸŠb½ÜÈJ×~§>Ž¯F~ª¾ŸN¾•¨Ëö¼Rò‰Q§ Í@Á¿Ô)ž3¾šÞuŠª'.æin¿OÌ“¼Ð|(PU¦¹OnBìÜw­(n¢¶»êúzŠíÝãÌÞ\3”¾ÿýàÈ%'xÕ¶¶Çk¡³#WíÇ*’ÌWÈËjî‡E±FGxbÞ‚˜z½î€„3$é«þUª›´è#>KŸOør…=Á›±Jg´¢L¹£¨0‰ÔŒwͲú5Wé@‚]•&c?Ë Lb«°JE÷ŽÓ{.Ä‚ú*ƒô+>»9Ž£˜á¸TÛS·Œe‹8)€|* ,‘éü‚s­ý3Ouwÿ)wÖê~ì‹æ~úä©(×¥™pÕ'¯ZCKUîø|$ ô™ãÐà".ºÉþ Fú¯;ö¯Élü¬:ø=c¨Ôƒfã#aY6W•P“Ïiìî×üö ÷WT.ì—îy6ç÷ÌÅf¶§åOˆk´Ú? iH߸ -A ì˃ 4ûÝÙ•o5<Û·ÒñÜ¿ù ‹èL–WOÐhf ^ù$ÄVØŒíFy'Zwò>íoì¼ÎSJ¤°ðH0„Mûiia†x-¬¹õÌ!dZÄÏô€êõB–°QÔOÌÛ£ƒ=7Y6±¹™Iœ2a —›¯& ÕÇiäá‘´´Lµɼ¥vŽ7s]Çx‡LD±»=fg;I -µW¤ ë©fÞÄ¥æŒØù¤ìXLöQÕ`ÊnÍ¥Î3r.)û”EBk¢×"xja§µ - Åž10Ã9Hˆ‹fK„eî‰Íc@s=都¹È¦3;ï¯ ÑáW„C—è5)®Ù°;=ÐÉ’OUMÅBF¦›­_›/Fµqqw—Ñ|~NFÀ‘HŽôžºVÈ ”„®ïô ÚÇ€]°)4ÛmDÇTÉù>H`I‹tƒ–³Û‘xü®!·B¯Nš5±·`cÎøR=æCÏÄwö)ïŠÏ€8ˆôåm -Øë¡›ŒdjX ÅâÀá*¯w~ã’Ø;b³b‡› ª×¢–AOòè[0?6pÑA³õÈì„ æ¡æe>tÌ"¶ž»æÔñÆ”áÔf“fÕüésH‹Ú7B¢!Þ¯>RcHšóôã>ã«ñݵX6o½)[Íhoo!%Ç‹'öVÜÉ›3þj3n´r•AÊ5ú­\Ú[òä9ͯ(ì©ŠsBDz­}ïYŽ=~^GŸA×û „׳Ý1ë¾ÔQ! võÌZÌ©+²KÙpd©k力dÍàœ±œƒ€(´vñüæuÈ™0w+™ô\öPõÊa¿ËF˜„wv‰|ûÐÂá>®‹;LœPL»erµøt¢<ͧ’”b|ðvjȦˆ).âõ´ß‘gˆ Ë~ë*ÊMa¹öz¨ƒ³é–K0¡- -6–-Q.BDƒHÝÜ%¼gˆ^ÓâóS4_* -M&ÙÈÃølö¬šçAbÆ&è¥ÎAI:ÆÎçybúÁÃÆ>–ótÆì$3úÅ»ÂÎaªÌ«8s0)gò=hÆ$Ø%KªL>öÉl-ȬNi¸ïÃqâ{wÐÓ¬9DRÁ’^q ¼qEBdx‚'#ó -, n «-òSQh5Ø+Ís $\Æ<®Ï¸íñ‚Á¬=£æœ­ñ îŽ-Zìrdµv ³$0$×àxAƒšJÄü°ÆM&õ«›¯×–W<œ®‡:Iï×½` 3u§™?kŠQJ¶¨î3p4$Œ£Wˆ$5ºªj²8i}Æ?“ñÅ‘M`."+(Q•ÌEpa~ „rÒ­J[·E[Ÿ`I×oã6'‹ázÞªíú¡B&™ÿQ0Í5^ìw]g`4ßG•Î¤ÛNLSÇ«ñ1­§ åé9«ÒùE‡7ÌŸÖ¢:å«­ˆ‘éÞm8w a -8¿TòØ5%¶zºž2PÚX[H?Åy&'Ç}M¿ÛÐY˜9UP•3Vx1S{ÊF½ÅÜ[@¡øHÊÅn§ÕLt#'›·*]–(oϧ;×¢:‚ªTŒ=_ñbÕeÂCxŽ5D3y1)ï £ý}JiøÃH×ÑVDóùgkȨ$ ÍM`ƒÐÌø›‚ó•#§o* ø´ãeë½YS)kÆ_Ö!iÒÇDVpgˆztr¾+ØAÓÔ,õ)¾/ú63Ò*¥¡_1lüdÂd“3y_[ÿ¼è<*Žµi'æX÷äý ‚”ôÅSƒwà7X¿’̉ u{v¶W>ûàœ)-c·^’ÆÂf;ï·"¸l'öÈüo ·’8î‰Ü¤žHõÄä.ç²£=ÀBAqzpJÐ-‘3Éô£•òÃSs’X÷2eñðŸaí ô©¨ÒCŠèjÒ½â$'(uÚE¯/ä¡oìã(Óì¨K¦&=g8I—í0w¬˜rYH©·Ý0'ƒ†§eÅ:·ŸzQWeËBˇlð™PG7†ø‘HffËPÎ#Tݨ©®C÷†}b<Ó^iXÜ`±œf³à+bâªy8‰ÔëôR(„an‡«‡¿j>Ñd£ŠTáû!õåß ûJ(cnµ¬ƒXqd$êCÿ†ýªYÛëPNK O—ýVi¥z§¨>Þ·ÒF¤Âø|uÛ¤¦× t¼­4‹ñÊ! Öƒ÷þŒhÔîÓ½…aß:¢ôd‚)WÊcC°[W¹„µºÁVõ#ýó*hZ6zØĆŸˆ°è²«<>qû©Õ8ãÙ3ʶ•ÂÄ·h¾AåôQ©Ä›6⾕²:™C z Ÿx¸rtótOqÑS‹t`Gî‰-­iÂ)s@÷Õ­àŽIßÀÞÞõÁ²aðû_&R¬ -6ÊŠ±TX¹7Švç‰!LÜõ~n+¿¶å4Á©ü²Î8ØGî#x=kåÓ½ÌN$ÊîH>ÇØ2ÉXIoñe=#À¯9l¥ÈAMâùÛHy>¥9=‚*±Íí-BÚn¥IaÍ@ï¸Îž`T++êõ†Ø•š‘Ö°!s‰3±•¥\õTG6¾j‰‡‡ãäAœ¸%Ø©Á<ÎL'}·Ö¥´ý]Ì…²âØfœÁŠš¬öà-†JÉûFò–ò®$|ø-&óѼ`>Þî((„^!íž0 -ŸõÜ+ð¹Âù\(¨+õ܉QI¬É»b5• æß%6>MËa<ôÈO[ÈŒ žÓ(~J!µ úîYçCiÖî‰ñ’¯uÈ+ðã#mJµ€ˆÈß!%\£®#¥X‹î@'0ñgt˜S RÙ • ªwú†×Bg“Öáá H»üŸnd]‘²M4?,ð -ß„cuŽ§Ç.Æ,Ø£]~¡Y Ñéãù–¼};~9Ò—ú«ø†3îÌ¡Bxîþ$Owîô…>5—ÂcÐáÌ!¶Gƒˆ“`ÎôT|OÙU7Q´72¤\«hø/msQ'=UCà˜šVE+qUa§qîÁ0\5f†  \zͽ‘۟Ɖ”ÔŒH kR/ Ыv™z(ÂJ‡UEœ‰0½Õkµ¼±jWÖJäæ|úW~Ï ¾úˆUTô“OL?X­p0ä•EѨîD -P®%ÜYËZ’[h•ÙÈùX©­½éÙÈU­i&Ð&<½Ñ¼ŸpÎë¨ -¡ † -¶†àYÄC—WLâíIü“ìàŒ°÷-Žßïª -$†ð2ÂÿöPû¸lwÕÉ2˜_ÒÓ÷CI©ôQñNáºhJ_Ì7Z ¹ÐWŒQ‚ϲ¥x–íÒ囧rní¶'Óá2qÐ(6Üôý½IÀ‰ˆ¬Ù3V„äÆÜJ+/Ê€áß®A$¾ k°÷öpÍ-z»®÷uE‡{ŠóŸ¯†êv]é‹^]aO{ôð’à—={Ó+eÖÎä¬V±†÷†ˆÕúO÷ýCëmÿÐÆ9~ U-Ú2rCQ¾ýJ¯bËÞ‰x6w±¹h[¯¨¢”$òe`çꓪðkø-ÈöœÌÕë¼ð-èç¡:Wõe<]¿ÚûÝ?IE°‹¶šP®Uõ*Ÿ•gÇpR”o:{Î#ž Þµ¼6«}Of f a5¢áEäÊš“6é%åΫ„ôSR’F7l«-D€ 1s‡…ÿ²_YŒÛvmÍ9h¬|fA™€FC.‰¢ùc‘×ÚÔÉK–ªß%ó\+ýŒ³§BçKP§iVîL ycÛ³õiÂÉIÙû§Kqc¡j J… ZntX;ê{0 éÛ\|y’ž¥h p¯JGÏzX!ùö(y÷BôrëéX¬çöòD³gÔâJI$*â÷ªrc÷Ò½~rq;!–4Âe2šygCø¦u®½çz;[Œ;e¢süGv 5zÔ{•vmàWÌ’5læ[žÛ!y4ÝÑØVç'‹Eøz¼ =_Mä}×/A¸;9l6ÕP‰ˆ!…q2èº)  ´O6UâiÑ(× CúV@ÿúÂäzëëq¿Ð!âôß‹`Åæ™ãNþ@¥¤§Û3ÔâßëëýU®èÈ^÷ü•FQ{ÝçÁì\„æ"ú×yH®ø䉣ƒæ@OØlú“r<}%SâßÈÕPZ7Ô!!”Á*QÂLÞ9üôÊôƒÁ$‹µÜΖJ»çW4ÊrÃ¥Þߤ2—T?C…Öö.Yõùb€ÛûÇlåæÃT K óûlE/]~ª JN u3ä_Ìx…ßÈÄ7÷÷*±-\`ï„b´hEµ»à?ŽÄnvüô•Õá%æ¡+³ØêÈœs)v'°-9¢-… Á& wõùÏ! ]×Üæþ=¹SÏùTJ¦zD,jWò$oBÉI¢³~Pc/'ùWœÿõÇvšïDÿ<¢÷%m§1ß+<³9vÝs_¬§OlùŸ@4l”öþ_ìŸì¨EeîØzä²Çýå•0çMp ÙP \Xmϵn*lüyÿ=T,•”vVËßsÂÆHKî$W4PlpiöJŽ…î8YgÊC2BubTÌ= â ;õÿ¯žcR}6 -iô3w'¹–äXš Wà{¸pƒBŠŒcšãÚ€5r[@í¾øý‡ÏÂMÕ"ô¬‘ñŒz¥csš¿–@t¢žª;øÇâL#ÇöÀ%m;x[Ó©€"Ú4„O%7š7¦† !ÝÐkÍ÷Né•ŒíYCè˜j;KÒÿãkÝÐZ&@Åo{Ó÷šÓkYº&3q4ˆ´ ª¨¿å˘ZÈòFÊ¥ë)œÏ—3ýè)hŸ»ÐÙˆ‘CÜ\!Ñý“ê -Ñ(_Ë'C€sõ˜ÏÈDð|¡§=Âmô ?›Aw ÝêûóR2{¤ý÷ÿÉŽè -åw>aY³û7ݔǴN÷ µÐ¤?«9ØãL¢qŽá;CàÎëœòüm|ÕYP¢s®«¬€3íT§qn‚ݬˆÖõ* b„!ŠûÐ;¦lnF»P¢Èê!*2'“Én)S -2GE¶êþ”»¬3®›ôÀar„t«Ô´*ökÍ­!-‚Lke‡i !ÌþÄ@–¹„Kiÿ¦eEIµ´‘gV‰2RŸùVÚòñíŒÂ+­‹{FGÛ_º„äµghÓF³IJOÕ”a %¯ÁÇ“çK†™ö^ƒT†]³b09‘÷¼y²±¹K&Ó×·qw‚Æ&Ûw“BÇ¢¥ë²ÒwUø d蜣¸¥ä¹BûÑú™+Öá(á!H®L{ƒ×!¾þNóCaOq?BÍŒgð®.0|)EˆõY^ë÷Š’W^Eí*±˜_Áão¢ÕÞÑÐÀÁGXw‘¶ˆ<ÛU‚V†í¢#þ¸¥á3쬘GÒëèôá›AÝ°ãJÆ=‘K£;VÑ#´Dõëʱ³^¹ÛŒâf0‰1›éÀ -ëÄÓÍ%ì%0Biå?±ƒX[w׸d•&­œ=¶km§kÈÚdîÏd8kˆÕA× j'ùSªB)!KÍêó`³¤T7s)r¹Vã[œ¯¦tGLÿˆ°—û%¤¶–ƤEoHg“AÝL'8ûÄ“ædtÛ â^ tsEÈ®ãKE±‹ÜZd (°’•’ìÊ‘:Ú&H¾=s”Ä™óûÝsjgEM@0ˆÈ•Œ¿%ØŽ…}?±„2%"¸¡ÔVªb19A­Š–I–wâW"Ò-nÕa] xšLëlÔ-× åÄ’¥A)˜ŸÊâk®pùG¾(e Ñ÷/b ½ æ³ ·2båX´[”ÉŸ½Ëbƒë²^1LËÏ\uhÝy}]Ž·ôöãþºÇéž+è…{gÐZ@t†\jå(’·×ž²¶X'Ûz°pÔFÐF¤W먃›‰Å„Ó†¬°nçN ¾vÙ#½YÒ¾­`k/$M˜µì”¶]ê€ëuv”Vƒè·Gì[¿cVs–S6‹y6Ã2„͵‚ºg Yèå—!´àГ¯”‰»Ôþt -Sœ‰¿6|E®Ô,{ø‘Ò“²§ÄM"P?CF¬Ö?R„õÈ´„áD¦ˆÜì%­{‡8çãi@iØáí+à¥æ%Ÿ–l‘­:£j°fdÐŒ±ÖnQOY#b¶|¿Ab˜ÔY+¾š836v©ÄÇÂO%«:BI*MÜQ2ø42‡ëü†:rŽÁÇAÏ€F½³€ g:ÂoEÄÍÀ5Y¡|•¯s¡Ž ÿ^º‚¤ tè]…<› 9²¾Ÿ'Y²­'È&4‚×™¢ÑÃKдÙ1‘rmX"ÛªM€¦,xÔ -šÆS…?$–«øDgm³O)À‘ WÛàîuå1g ¨«ìFlò$ Žy#@æŒêa‚ªøVa, Á´6wL‚êJø°éF†ïÐD¾#ÿKlh(Æí½IO‹-Ro1ËXóû·Í(çH¶‹7zëQæ>¢{„lsCàŠ*æ‹ÀUþgTçês¥7ÉNäˆK¢ýIᙺÀã”á>½ñÎDšu'}¤–Ê4ßÒìA^Χ—jïYË4áÌú pQ4›ñÙU­;Ø> e$º,²{”€ )»¨lªfXà pm”$£­Ö8Byb\’™ît¾™$“¨žp}m¢z’!–U«bÿ—`_µhÙ-Îl»Èâlœ3—²'Q{…¼·OáYHß Üsm[ý7E³)ôºÊíP›c˜6ª¨£6HsÑüsŠ&ÒçÕù¦Ç’⧠{…y”:?*{Fªi™!úM=ÞÐ' Ú¶W%\½É‹Ö&ú«lk+UÿÌ¢Ú9Þ¯¸nË~;ã­ŒKá%±o¤Qx«ÿÇ3}Zˆ#‰Æ—öÁˆ+Ýý1ÄÕ[•7¤3ÔÁ#\1×ØðS/#*‹±s¦Åó+t7ij 6VMrJQÇìã~…0êÞ°yö••!=Û°S-ŽHúxHñ‹×º>™!n“=%gƒ A”Y‹G€æfPâL"Q6è•”øMju§¢6ÊNLÍ„¾’‘õ?'ÓÇ'ìZš˜‰ït_º³þ - wxåñµÎ£ˆ‚Ù•ã|¤…§‘‰(l½£9D½Ð‡Âý@{Qüµ» %™Ï@hÅY¹vwÖ±HSØ%–t6r¡/!PRzŒ›ÄS2éœ`65öŠÕq$ÂYÕ>ð;Lƒ–Š'ÙsÅèðJ-Š^â ::Wšw+è¯ù}0ý†ÝïwÂ˵Ê[}ÌåÒ¤\GÜ -H±hQ_‘åÚ9 -ÅÔ›¤1M…ÜÙ“([ÿm„0IÒ¡kç:ƒüSGÖÓc0ë}ºT Už Ó‘v‰!"¹34®c$Å£èu#I «0uìQÁ›¦º,¸nY"­P÷Ž$ú»xÓ¬Ö8 >p¾•"G/Òžš!'ßJaÄ:½ÔÌN>ðHÓW\a¿Íè"—‚娅Éÿ½;üÑ^È”R*r|í2"ÛÚú¾"ú´Á9&ç'`c.zõþnBÙx2ovƒ –‹ªÉ•ê:æ›íÚ¿£Zñ6÷Zˆ -ós–ÖVâ–~´€}z–T -£èëWȈ5Îúv°­­ÍÒÄ×àç·°ï0ªæãùüzëÛJ2ìóÅë³”E•3hžùá±’3]D-Ö‘Züše¿Á¦ŸåyòŠé×Q„Óz8"ºÕvÕÔÕ$ÖùFAT†ci•‡#{HdŒ\}š yõ9Dɼ™¬}¯XÂHù¯ä×^ë ª™&Þ:*pÛF› -t#a¥Õ¤i=îÚ@:m®v'yfPÍѤ‡‹µ;@Ö>ÿÜ͚êûÚÌH%žå(iLéÅE¦ -Ý#ꕦ¿q«ÕI‘È~]ÕµŽø+ïL¾ÓÚžaî'“÷Y¤=%ý.W$:B¶Lºß B#ŽüÈjd)5ÐÞ—[b:ÐÓÜa’|aLÜuP%:iþ×r\‡Ê}˜Š@ü”×p²@N°Œ÷ÍP0v=…Öú›Àyhߘ_}Ruƒ°N«ûý„Ë \]#:Ã5º—¨zz©à~êý[Aº_üÁÄ͔Ƚ­”ÐJ_‹¦E’h©9Õ¸óoÆ(Jj‡Ä õï†àéHòöëw%õO¦Z¼!Ô™+ßÎ\É]Þ«~‡æ ð_wc+‡õé±ÐYC:MDÁý(ýë7R;ð#±cIàÙµëÍY¬ìûo©H¹wfþÓ+.Ë/’Üýu7‰ê U(´·ŸéðöX™e Í'Þ§¬F•™}Fô*¯µ,}KÐî¯>Ó—´'å½dŠÑImé¿Ñкsí"Áù9jcâ=$œE†‰nÛÊßù‹ÉôëâÕšN¨?úÉuJ5u -»½çÄ¢- Ä)‰¥nˆäÖþ~Ü#gÔ®ÈA{*ËüY‘S¸p|í‚b¾}2¬'JÑTëò{í¦2É'Hò7 “1/Š×÷JrÿwÂâx+T0R‘BÒ *Ä8¤ßür|6ǹ÷÷µØ×^iê -ŒÏX½ !F…mÚ¬/y5õÄÅX -ã¬Ês5¬Q“uv䬃r® =˜ûì•ÁÓŸX·S?ò'÷©Ýý Þ'€„é=“ðÒiÁÛû¤Á>HmÜyõ{òˆ­6Êt… î,(Ý:ykÈéei(½_C˜–]`ççÿúG.& -X'dX3y*ïUt7*̪Lÿyó=­")Yk#Ø¢u«öô8’XºÏJ -ð°îÊ_É>£ÇÁqj¦K68$®€„"õ"C.>9ùBŸã^‡DY%åw¤8`½ Ë×ÉÛã6 l4½;Þ¥.sZ2ûPú<‚ÔÌë }N$”é&ôôò‡`æÏ Õ׽ﺅÃ+o1ØN&|&–&˜ÿÔÐŽyD#ä{ééÿykݬ+´ Ú箚C^ˆè×ÚÛ/v‰˜_“N²µ5p'U²ÕêK׎a'·€·Jô§JæQ›´è^þ’´žwiÈ>®(9‰8Ìõ˜¥ñ¤Ó¹+¨ðï ë+l `ëYª8"/Ü(~ nê¯j[½ú©0ÊÈåsø0àŒËã/=V 5MüËXý“óä/?JUοhØe‡XÉoä}„›3ƵÂl¹+µïöâöjÏÀî`7"SõÄ.–!FZ:ë_z…Yz†¼Ðª×©™ÉÏâs!27u3+,Ð é 2² b¤ T¾öô’ºyÅ##|'7úák;ÙSy4HÇ~%Yüê1c—º@Fh»Ü1%åT`¦Iu«© w Ù.«ð€¥$µž ”-Žx}lïkµ†¸ó…qüùÆë«Ïz¦Ç©£ý -g…bÑÁXxŠ£JÄtL-×#®¿­¢)ýƒnÿýƒ)ÕÁ7º¶0 êcsIl†l+ÕA]D"€¸%)ìì„puÄ䨜Ì×I*W‚"+¨Fæ çµûxª¤Øï˜1ñkƒ×Va ‰üIZFÏšXð)TÃ(åkæŽÔ±ÁÇ;¼¡Ê;ãØU¿øƒG×5â[`hP÷á×rDŸ¥E6éÙÀj ›Ì¿ÝåE•s½Œ"æD§‚8Ùµeþ æ©Z­‚\Áyµ¯gÂ^Q-*™"ƒ;w ßónFP{GÄ$6Ìj¬sv—õw"%¸h.`­ùÇ ì;—zÁÉB`9ÿä¤$Ò¸‰6‹t#™ß™Ìd_L¯>Þð¯§ÄÞ€P¥µ–lSm{¢‘Ê•À<´ùOeÐ$ùÓí/®†1Øü1@ùÑ$-ûÏ Õ2ĘÈÓVÃ`×´ºP\í?Hº\³H?¥3OY^Õð@>~ç -8ãiÑÅÕǤ\¬ •¥:E^´èâÓkV×$ö_Äž;–tjû:•£üŽ%|xIaî~î'áVÍ®µ(îˆ µu¤ðBí¥H™F‘¸×¥‚áÕc’v)’Pú kÛ¢9=u✒‘ ‰åQ±×Tr}GÙwŒë¤ÔÊN¡~ŠRÖÈ¥Z Ÿâ¹·z4bK†šæúÚ¿)d GÎIrBB³yÎl‡ÑÙTg8HfŠRT¨ÞÆ\ Cl~=U¬óˆÎÜelòtiû ˆ¹ŸŠê ‘§úäÀkkµ­ìÒ¤µ7ßU]´´ÖF œ”Ò¶ûu&œ&Î[ÁYºKÙC¬óH-VlÞ’½]·u -mžÌ‰óæ -À \c”]á8¿W¦„ÅÖ#k¾6ž­0}µ8ÖgêÝo¢ó–0#øX[ŸoÌòZ|äñ-Q[þÌ~ÏXú"b›©€Îѹ°G¥š‚݈/ÞhYi×Oº›ó—Ÿ-~ý>Öbl¥ Ò’ï¾ç7ìj«;öAyó=íÖ¸¼Ö¡•Î•‹´r†¬Ï@±\38¢þÕ¹¿±·¨V±¶¿¹ç$¸€~+麬W¾ðŒFŽ æºAïˆû®uOe¾# ‡€8Ê£öu¦Sm 6óû? r¯©ãˆ©•¦õ+_¡þÉöa=?hÍm¬²ÍP§1­•äbûêö:I6â•À9>›øøŸºÔÍ8át‰Â±¥Bz¿éìYõzWW*Ä•´¸é¿TAèß¼w™Sµí “ ~°YPªñ©È”ݹkKLÆýYôQP@¯ð‚A>TV·›Ë¸5’µ*£¶N --{£´ªeÐV4’úû­•”Ób>Ú¸œeì-8_ï‚ Y\à{œ¨„ -·S°N—{—?Ʋ? Iô_‚;'U"!gðöev»A}ÃúœGÔt”§;Ίhâ‰Ð£ÑBúÊŽÝž° âÌùݤ¤óÞÑ-nÞ‰æ»ïG ôˆ,zÚx™#àJnÑKoAŽ- ·Fœrh™®d;`òNÏë”ÿ¿­4¶_úTÂü , Âýg|~åwô —îAŸYW 8èOWÔ žT9빉٤zñ”Iø»xãE9¢ ×w‡-mkFùEèZø‰=z„Ím‘ä-ÿ»˜ýÏOµsSüb9³GåV_ï Y¦úKh)Ð>!gÅMxœ9c¨Iül˜ò·ßµÄª)õ_½8ÕcV`mþh"芄Ãz³‘=iy³w$*g}ÍþI»ùß|ç“cÌ:{òò{t ;>ç,( %õ„¥|DŽÅ§–ö`Sz‡Mn{«¡Ý[¿TCˆ«è†öÒ)ð;vó#<£«&&ÙÙK‹pøKX)’ö”c¡Ç›†hMï_½¾/2‹< ~[kR„¨rXKÿƒ}IDÛŸb¥Ìˆ¶?± ò댌ûDñ¿HòSúD±},\%üõ±'†\Ÿ¤xÆïØjöì¿‘²C«u9ò±³•m)Ûž­gZŽ; ßA¤^ÄlÀH—Nö-Õ ð#Ûþn…2\ý£©Ã†ÙGêîù:*#Qý9‚Øe4õ[Už÷þúª× æFx{¯ÊßzÙèMÎA ˜,Pªæ£‚æèM@Ö#HÍ5£‘-Š 9!°‘K›‰®)%ª£TôŽh$ÆÄ[ɦIoÙ—S±ˆ°ç ÿIê -î(CØëX:¡a*Æ*ŠÞ…a¤Êã@Ï"‡MRg¦Ä5× 6ë?.Œ\bØ+4å–VCÖƾTë ¢*¶üÎE²„õÚK3ª@qX¸#¡GêïæP G6‹K’öÞ7Í=- -±\Jc©DŠX°ôOÌ+cj@úÜj}ä¤V\+,¹>e6ôDàE¸°ï÷”ððÕšyFpè‰ÙoÉt -S2ä…”œØR©&*ž7ÜZßØýv²b%¢Â4%+'lQÇ∧¬/C Œ€æ¥*…²¸&Q+Jù›Dåå‡hïúù$V2Ó™fÞO(jO #Ä°b²3Ø`W”“?ÑlôŠÔ^•ùQx _ÉæÂÁâõ-•R¸\oàAo…ü£’è«Î¢ yTST™)ÚÁ-6áù†q"S=àÀÊ7ƒÏ¾BJf[œ0á•sæ. HÑ–8$aÛ"uVÞÍ0tVß3ö)ðÍs/³±uê>å -¾îåìAÖÇû†À!ÆÓ0Æ, I*« UŽÔHÓîà §åuoõGOiW•ÿŒ°Cm0OÂÁn 5-1ÊøãLtÇ“"†fû‰bú8§sÂ<'ãW­ã¹¥4f' -UÄësY€qæÅ‹Œ-ípúp1O¨²@‡z¿8‹jgüÄ´Ñ'ì-¨P òšf)—W3òèé*õ糇˜ÿèÊJšëDQÔ#ôƒODñÜ HŠKQgÙ‰³“C*ú„Úª^~G–)¿3b*\b399ÞTU"ïJŸ›”¡nGžjÈ·u”:™{+v¯-‰3Ú0Í‘0\`ý{ý‘mŸ+ÖŸxµWèg9‚:‰;ÓJÀAßYIâ,¹‚'ÛΩ—™oe†ºy‰gž¥Äí8šÖÌà -5È€†?kW½û‰Þ=ï(æ¦i,AßzJDkûWÌïŸ!‚‘(d–È•Nç{GR|Æ4ó×ûÄŸ™ÙM -—‘‹ X…Oj+™ ã ö’H±‚ã2ÎNzÿ.³øÙ®ñçýì×+’éÜo˜“¼«©SK’hŸÂôÿ/`hÆ£+ò>ò¼Öä(8‘¶ïí2k‡k1¹Ð\Ï:L̆å -EOåMŸ‹]õ -oY¬8ãë(ÙM¢:Øê(á AÐb]iFÔDT%GyêWì“4Z>ÑÈ &Ã?ª8úc&«06 á= }#°}xúï:¼¿ÿã†ÄyØìÚ î¿à3 -%òеÜàcQVG:©äW$¡:¿N}ÄqPÖóÏSÿŽÙÌŽ¶¾‚jäЯ°\󡈻¼„ìc¤÷[¤ÑÞj!§…ÄNÏgÍæoíÒ•/ÑÙÏô¥˜¬øÉ$xøL”Ä(Mýu/Ûä*¤àKt/0Å¿Z×}ÏäjPWt¢ :ÿT»\“9Xd^ÖÔµq»°VŽH(rDOmHkúГQrØwÖû `Ž|¹"(›W5f]aö¹}Ö9­üƳƒi -ŽÐ¨tiGÏWl×PÕ±S0£J;ìÙ2·kQ)f¿‘ëºbOê»(ÿÿ)rË·@6ú´WÖ^؈oe ±uH‰½.аŽÎ @Òb¯!dZÖ…åBZs¸aÔÛ‹l*äÊ6„ºsH)o«»mdMeŸ;æk´”ˆïëçœu˜¬[w¿i¸¶O”³.¤ÏY¢>AD0t¥Y<¸y M™5è¤J]W¹ã‡k`¤pKqì‰áu°ˆK“ó&|¥ÄQ#ÞÌÊ¸Ë |bZþ¶’6{ °Á¯ÔºÖ)j¿›¦òú‡#;%}@È »‡WýªÇâÖ¶:2mELä’˜Ûüæ|:ÑìÎíUÁðš¦æ´%þËÖÊ—¸ƒcö*M<4°-’›³hÆ%=§OSÇlˆO+ʤ·+Õº‚ «WLÀ`eÍ›œ¡î/Žk`ÏþÝ.Ž­²6qäÜ#Ìa<#*±>· ;~ë¨ú|cÑ}¡9zr¹3^#©œ$¢ U1¡wGü ‚$c7î¨Ú×Ù÷3Þè'¤Þ¿oë IJL§8~8Ùsyp¡ÌM‚7]êˆÖÐŒMtÈvWÔ 9Ì$ŽŒ‰ BÍ{)ÃI¨œž.f•÷t+‹~:…ñQÂrü MŽ~×J…uP{È~ÿìßP;Þß±÷Tù™¯Ø[BòzÐ8œ"–<Ül¥XCU=ØGÅ #«™0[¤pËöïˆÎ¸Ýjÿ}kßH+üAZ<Ò»"‡j©²úít–ü…„–IÍÝ?ùK™ó¿€ý¥AÿýÕ«¡åóDüèÝy­ž4ïe¯øsÆ?&†)gÒ¹$Ù1ˆãy)¬ÚKêLým¿¤ZRt !ükIF½eßc¨vTß×'*ªØ+ÊnW •4 Ãxí䤊VÈõŸÉADö¿.'M˜ø -øv\šñ^Y¯çKÒÕÉT¢üœ¾lö±}wéÊWõÀh ÖÒ½T*[DS¯@¾4¨b‘E°² #aR%O‰üüáû}ç/”›þKúä:#Žãˆý¢Ð”Ø«Âì`˜šñQÃØU§&‡Çi ŒÏºIuß&…*éZ·7ûÔý¼áƒ’lÀß%µ ®"ÌJ÷؆zt…×þRèŸOõE |F4M)PÙOˆ§–lí§¿àNþ°~ç/ÑŠû•ÎÓ‘pz·±g/U°5ˆèÅËt(§¤NÂ74¾.Ö,yõh÷ˆÈ{9QIm=aRz„Ñ~¥KˆÌ¿þaJœ¤n×Æ«°SÂÜË]>,T> Áð‹({xÇœŒ”O³ÌwÁ¤pmÔÍ_Fˆ]1öùž»ÃDŽîÁ—e1%/ ÿøúI'«Åkä‰KÓ‡±ÃŒäúŸ¤OZP1¨®99®^ë“( NæS:,HgðŠAÀ4|É_½ X„ØÞˆŽh^#§F<Ñ×ÑR:çô:[$D®^=Í;²Zb[Êg“_ß$æB»ÖÚ#‰z’)[g <#IËŒˆÚi{ŠÏ_»A‘ôßð-Ày°s©N¾oJ¹Kz%81E?«}ÊqŽ'€ôî,ÅJ*àΣ úZôñxù@FúŠ¾Ñ‚ko«¯=ð‰Læ•ßAý‡W.]ù¼Õ¡‰ñ©îE9ÁWöLO}› „Ö$:KÃÇ dßsÆ€Ô½Šcv܃€¯>AhÞú€G+R;Âjk*2o$WZS඗‚Ï¥)³Žÿ¨ÃIBÓ,½ÄÂ×::ª×‰Þô°¡+@^i—êGÓ¤: 5 Œ=à)°Ô$¶(U›ƒ’Ò{ïºe±4†Ã&´nš ¯\ð„0„_'¯ Rí¸>/™dÄ‹®†H´Ú+âõ0¿Å¡V0…WÀaÄa¶×âŒÝ1½¶ ¡ÚK^]a†œ'›=X¹à§·¬uxEO –n(Eî+´E^J1,„h|r£æg)i ­OJ;rþ³†`ûÅTàŠz*&.aë2|õ¶ç/®õ6ÚSÖ¡=l9íõ7š†dþÖµÔž='´Îè®ö !´Ó°èT Ÿš~ºh¨˜3ÀVò›ðº!¡ÀêL’ö$EÆÐŽ†y;€ûؾ@ ëñÉ®Ùþ²‹›Ï6nbøtG%wo´“#}õêjV@ÓâoÆ•Ö.Š^ø[ðGD¥SßÁ^¢>¢:j0ÜÆÙÌÑÿ{ö$*Nxü‰)ÅöÓCñVƒ BIª¨ñ߀„l®ùiÅäðMM÷‘õ¬iún b÷Ì%ß#<'ZüüßéÌõ§—S@Ÿwm~°Ý@³ô€>áÎ ½A}{Îç”6~C^_N·jq=È©­5ížcýÎ¥š§(¸s“@§´ ̹áðuœ(]æ\¨ãÐl%L²¶Y2Ê¡Öù -lé´–Ç]Ê:t•©¥Ð’êM¡²>nwD Ÿ!Y’ RAwK×àV®$”`Ü@­æ "ú*ÜÖWxBìÔ»+ÿH²­¥}¤øùÖ§<µŒÁ‘ÕFô„zI.åSG@ŽF ýÌi·ÙKn„Û¼ô©$a›möbH#e˜9zK®T¦bÕ³k‚7ZGû”ˆf’Ù×É@âÇBEYvŒ¥ïŸÄ«‚tb]h–X7kˆàPOÇlT¤¯‡#§Xûwßëi¾»ePḞ6R¡#þv¡¿ïl_©É îÉWÓóÎ/r†<Þ,43䪜'o>%˜âÚëB­ÏVÒ{äëcBQ†NNî ¾¥F$›‰ÃÅ[¢‡d.þäônA¢ã ü&ñ¸Â²°K Çé èÀ*‘=Ç]ΓT$›cË÷9’^Ê™3Á¬—éz%¤³É>Z†íº–.u˜Iü›0kNçµ7ÃÙÀéaä—p,­æ«üH?å„™T>Š¹ÙÒÇQÀÅ $6µï¬lêŠéU>´ŸbpòH9ìœcÈ#jná•‘"´× Ûˆl–oäÔ#mû)MxÀR_3êO€]W9¨Mo­fgõ+ srQ¦D¤T‡>å£Á±e8™&ëÍÊ1ì÷ãÙCìì„€M¦]rrØÈvT².I¤Y$´2‹°qÌy¾î†D(S†£| /†ÙK¶oÅš+ú ÌD7_’é•Œm^óÆ Éå*ÇáãÀnBº;~Kż°ów$ùĨÀÌ-¬‡b–Ñàe´w•b‹'¤<ÇñWÉ©,Yâ$å9É9 (gT¾•ÞÆ™ñºä Võ1²Û•ŸX–¦×ž*ÅSP0½¡•·¡`=NQÆ)O^?¶®:J†ü•g¦¨£ø‚bé ½»v„Œ§µLþUUíæSæ•çü‚SŸ="rÌÍbŸÃådJ à³Mº4¤f=oŒ÷¥*ÖQÂÿg)uÈ™Ç|ªÖRÆüˆOK-µc¥ñ7z­&Iæi+P6¨M‹–€±ü"dnärp)vÑ‚–eè±vœª„WžÍgÜtvgkeÊ¢:£§í³cS[ƒs›=§XÑÃ&D[PP ­CS0<—z¢<Áø -ƒãîA–l¤VHsÿÀñE³ï{¹jucT^1®LÊfíÜ ˆg[µî–²$!°²u9Ž Ž# -ç= —Xp¯‹ò_¿Þ¶Ÿ -| ¾ÿÊS±lwÚ#2¯.u¦QÿÆâ6—”§4„íSƒš‚0‰F;øÔ™¸çÙCtÆig3†ÊuƒÈ•…ßòD­ëˆ¤Å§ì|ÈÿklÛK¡1G„Ä1Ê(è¡È-§Û}æÀpê­I—ÝáÎZ Ûa[À…EöË·É¥$Ì=þ©‡.fÅîRðجØD«8&ÀZúŽX“‚ýZÉ Þ¿Uš\‘ÑÁ蓮ZªÃ¿c­«µÉÄ.xÅ_Ù kK ä)¶^Q¿[s© ;ê±è—ŠºæAŒ}·á“õÖØvéxÈÏÞôc&6ª8çXMA¦çŸmÇGi4îÑíËd¼ÜN¾œ2Yác: ê -Ïž¯b -^®#*þl…$lQfÔ¤ bëýêGýéC¸Î9!çæ۬ϔ sÅQÙT _¢­Ùi“·úV£§q]m•õêlÜÞà>Z‰ªm¿ß;Ò=üÙÒjÒ¿•ÁjÔ¡‰¿w^1¸M?q…¬ãÖ$_›Âý)¨¿ÎÞŸí~“î![žI$¦£7EîS<ÝCÐ)̸ք¹Éÿ°6ô7˜ë7–(g²e‰ÆN{ÕV¸©#œÀyÙ’‹°ŠC•<¯4¶Òc¦tÒkš~þ_W‰¦EÝ¥¤oÚ»u3G°ÒkçS ÆMPÎåêö~ Œ:V[%|Q™Ñùëµ™4æÉŸb•KiÉ—ˆp’ˆí:DgµS:¢ä }ú•I õ¬· pm!lMÈ'@õºe”L§Kß4:ØÕß´›Ê“òÇþοÒëÕÑxc~¼o™˜úšBkæ[LvVŠÜRkjA–ÑÓþå'ÿ‚¤(¨ƒ©“–™Év›Û†/\½d4Û[6MJI|‰£t+NüIóâg×CbûK·Ô?b„ºV&hŸmì½ãìq˜‘zÏ~ãHó‹Vo+!zÊGxø϶€Uz`ø2 -zŸ8¦­ð:çJ®Ž÷°­Ê*™S@wD‚G'U 8þüª¾Ì´9TF,>ÙX{¾1=ÞElèâjý–R¹ß‹žQƒ}¾)…J5rÇÃ|eø9ÀÛ<ö/_ßþÊNd,`›»I°)ãO¾l̦!l>ŠÕ£emDzíoø—?Vþ7]ÊØë³\êm‡”ñre -§ÎòGµŸeTê*øÒÔrÎè%ßé©Óâ¨T–K j¡ -ý(_o"´ó?e±ôʶñ€m_g q¢”*W.Έî\ °QzÝ:îJ&€êŸT_éÃ\¼{r‘ÿ"¤D¤ ‡ÓvH‰ùGV¿.’ZhbAY -pI@ŠéF{ÿ|n-Áàÿ*N¦_xån{ˆ^m0¶åå™x[­„ø+2Õ¥äúEíg\¨‚íÞ‘i6ô ËxÍÏ7>UùAýÒWø JaâŠXÆS‡,¤´w X’7ëijeïÏf¾¶'Z†ÏY>NQßÇ]#Áùì#_„{¼„÷òšÏ·ÚVòñ~íå„D%g%Ng vY9# ŽÂåCÿQïÂwðÛ—YõJ5âüZ”ºHkß¼îºü…IɃ˜¸¬ÓigÃ"äROœ ,ª±SGrÇ÷ -Xiš~˘ùƒ!ÔC„ƒ¢rb‰w»E¼áËE"p}'c†ñí»ûvàˆ¢sˆl°Ö ·•.{9çŸÞHÁRïµ;¬¯ñ[äÇ^ÐàckpQ߉•VÒæa¸ÒÍ—[Ü%¹QÿÄ,IieÍËÔ7ò;´¸µ‘R_}´á ëÊzwnÓqÀ@ÊÂË>Riô­É>ä˜uHN=?kœÞ~kÛB¤±Hˆ â¡‚B0qoÛŠ£*ýt‰3WjÜ@^è3î-1zE’Ž¦ÅçiZÿd¶Ø~ •®¸z¬Ï0ŸõQÛw#ÌD™4m\ŠaÄ·“íÞ¯¾Îÿ®uË!ŒÚM®·† ¡¤®¶B&ŠTS õ˜ï}>aDîÄ¥äÔ„J×Ë»JFp¤qÅÑt½¿[͉3/v¨'\F ‹])OÎ;’ò+ÝÁÜ;Ëe¤Åˆ9ø rIÀ$(”´·÷ÇUrë”t>“¨÷L·¦OÜ6Äå+>Ro›šŸØ`ï[†îÿ\§‘Æ\À!kþ‡Ær&,!õS¿ïÔË¢›µ›~fô¯ŸôðÏx`0ÛЕMY¢!䪕£2ûjç„å3U`oWÒT=Š&5€øJ‘T(˜øéJ ~èÖf[™‹p,;•ŽÉ1€I(! BË‘,Þ:åSNM¦}/òð‘C"èIii4¿ñB\88ÑFÿ“+ìZ -œäA_Q*çµ¥HË‘ÝiÀŸø„IëJ^W ÃÔf½Î“Y~ÍØõp<½$—ÉâÄU¤#·Ò ãÄÌ™&¢@û'¥æTéšÌ‹ÖGPÖ‹rs.Á·!rÏPˆs"ŧ¤¥xþsÐœ!n>ø©“l|£ §2³)äÇ£«7²?+á}AÊ:Ñ -›P¢¼ ¤WÎ(9ê™ífr]§4ù´HwGiTÎ9 Sg‚K8Ê“òµ-sÜÛPë˜Ál¥×S‰:àW9k>±"A¯€Å>Ïnœ'°î}l9ËaƒÅi!Î HZè¦W88ô7›È—¼‘mBêi?c¥² *éü;ÊU'ÿõ5pV&A‚[CÈ(Q-e®D¦êŽ³ÅÚœ£ îȔ⌸ÀÚÙÀUlÕÍj*õZÙƒ²õ…ù¦S™®Â0ˆ8å;ñÎé+_G½¼ÈJ ž#8~&ñ¿ )ÛrN1¹¤pË×éÎpëœÿz¤5Þdr.up2¼Ék–PÙ jÁ}¹».Å·ÑæÝRÇ™1ˆ@‹a˜¶^Öúÿü6¢Á$ô+ˆlü:{°…-ä¤ù–Œ¶q+°-fv"ÅŽMõ7µþóOoC -),EXw¯ã°ÇªëuÖ¤Ä#’Ä×.Ð#ìä…ƱµŒDrÑRhK±÷>¶ÓšÈˆ?xbÓ—ýQpi9ȺE“>ÙÚ¬ MlÉôȹ(”×!9fš.»Z;IÚwE³6#®žú%ëwh6âƒH‹çŸ¦y/òïõùu2µË½ä¦Á¥Ž‰ªårëÓ.õ—žàF²E"P^„9Ša -Þ1И½¿þKÊ-’Í7º~%÷õpÛîû¸«[Aš? -œÒµíŸBTUá.ÏwPø™Š”¶õž»”êâuÿ¶­œB'*ŒéçÏA'R ˜²ß&²O¢€ÖÁâø–¸7]‚Ñð2ãä8"u[t â+â,e¼ŠŽÖÖO8ðLñ?u}öo€¾ï¹ `%`S ÅÃwæˆ~¨(Aì›ý«1¢Öؤ¤‘'tòâcþ+/™óœjÜee ÷…L Nù# >z8A0ü¦†+ò²ÔÓË9âYóe…w*Ðú.J¯¦Ô5ÙÜg-+ ¡²õÑô€høä•r…ÄOÇ0#BOFIík?v¨h’åõŽ™¡"(G5/·#%µDÏ4CžVš—µ{öx!Mr;¨Ïð‹IºÓÊ..Ô+q[/ÎÒÑ`Uj]Ö_âsÒ¼-½iä>hf™üUfì[±”~íи–†1#+Œæ¦,iDSíéPkÂR½ªâjˆuS€&à0¿l'ß”`²ÿü­ïä:„æŒâ(ÌPצÇÊÛI¶ö¡%Ę\IëôŒFwûžíd9ˆp•”§ ÿÊA„†ôaIúò¦d7ðÔÕÃQ ØË7F=ób×øõ_Ò·ú¥éé&=Ö  Õ}]µ³c6->µ…ìeæ ¾į̂ݛ]A +¶ÿtUíãû¿¹ªÚÜ¿ü¤×Ϩ)àéñçöµþršR+ԛŒ#A†¸ø$«°Ö4#1ž~~ ¹Èâ«Ûd‰­AIµ„FDgrÇQÓ3ó¡ "g'Ô&¼ï¯çf7@´;M _¾¿ÿ?,±_úºµQ8z€…aˆM6fuJ·åìÚ“ êÎ*"¡H4B! -€Ûª6ºƒ#~êqª¥ë£Ö‹\uUØ¢]œÞAJp(‡2”QŠêãÞU/œËFî~Z`ß°ªý0†´]¤Ø*…#$àC]¤tÓHœ¬;è%Tq;Ý×+œß!¤£Ç–zýˆÅ,†Â·‰ö.#Xˆ½¼~’6–]&¸ÈôÉzÄ·ÄW}+’~€iúfƒ-ÛI?‘f×C‰€¦!´Uõ€¨ìG¨°÷`OÀ£¢§ê´»â¦÷‘Z€MÄ/B´Äüe=Ø'¾…^~¥[Ëâ©î«@¯ŠQí:æÆ–ÔfTKR8즠ê0Âí­ûá/ðFL¹­<òË8 A³FŒRߣ÷½&¾,cYCõ2ï@û׉ވ%Pe¸vÌýrt“o9¤FƒSÄ*§ûR'¦Ä¨’¬®Õåfð—ïºÝaZxIgEMgÁCÑpR'Ù"ƒôâ׬ ‰Jbj±ð5JÔŠ’•Ô#s“D ™Bmy¶h‡æEÇ ÇÍréì±6Ì6¦_~U.i@ü—LŠp€¢/¨²ÆW³.^oD[[Õ|Э¢a[ fx¨;ÆXW©z¦Í¸r÷V;_LœEå.cŽý;ò!¢ëA2‘8Çļy¿Ô-ãõ jË87z2|‚Rïµ_àÅ—Öƒ¿ErŽÁy£fôèK±Pöîíi²{k.ŸYF_‚Õ ž54ÔK 9²¶X½Ó m>—:¥uØ6DÅ{ÂdÔ8‹ªJL Ž‹†œÚÉm—Yöi¥m=†Cži}®¸›æJt>¹¿Bíí:å¦ýMÙXk$XyÖf×Lãø†›‘Œ­Å‚®ÎHïȲm€¼™y$Œx¬Ö§ØÇÃçŒûK¯AäAuXõ}p–bû -ÕX‹³ýÆõ[·/¸'à~S@~ÖKçGÈÉ„MÖöõ zØz& ”ï ÀéESgæJä´å ÷“Öòú A±FÄ o YÐk!æWÖ:Äï)¾ýy)`&ãŠðÀ¯Ét ®`kQqQúì½¢€-¯ZN` >  ‡MIý -UhB€!Pœ)•PÿK¯Š²ÊzŽ7Ø?l-ä%ëc릶“¬¾™8ʲƉµi=7àm ÂqÆùÄ 5}.Í®¤ý(âˆ@ -®FÜÁ&.TÐ2›0,uï+CT ¤kýù@®’Ẕˆïa°/9·ÔÑ›&ë Â{åÝd§›tõz¶P5ƒôÓðûg¼­¸ÛïW¼…á†>=ç*•gâP‡ê¿ÁQ§åœ¼pð5‹Ûãy¥¨R”¤”®ø(¢Ïæg´Ä‰Gõ:V߀& 9Yêf¦÷ˆ¡¨a¿a²E~@ -ù_“ËQÜÑÎ"ÒÌSš‰ðGã³"”}¶>€†œêò&fiLãÔÉʹ'VvM—òn‰ÜÂÐìÃ[Ô0þ1+©—¿­ðíu‚¬uDž&¿K){pùÁ¿zk a‹5ÃQW’1) ?­®4ؽ­à•Úâýd3YKÓÕï‚èv×v€*ªópß°¸“àä¬A¥³IÈþªwC@èŠ m騒E´ è -!AÉË6t¿Û±û(DÕÙ{o©¶@µw½½µPèw€åÜáߎ˜ñ4~¿õke›#,^’?ë”U¸íçd ŒWo© -ÚyQ4âÔØrLAÒÇ8cXáNC èsërO–F¦ù«°oý7NU×W,»¢D†[£•;óô-L©S, ¨fA¼Oò*ò(©xcY¾‡”Ñ°–vêÕÒ΀®›•uŒˆch(|JŠã¢Ó šyÕ 3Oì¢;±–Z4Ý“µ¬î° Ñ~†_§#•‚MÄ”°ßY©”ØP=öÐÿ‘âàÊJå¸B!» -ñi=Û·èöÐ?égxãM&~HŒäüQ/ÐgaþÞ¢û«X`kYðdšõTë¡B¢™CtŸ -ÅOŽ„̆dÞD>?BÜWÝÅS#Z´ccv¥é‰Š½nW³*¿¥š@ãMùˆ+„„>9iïïþâ&—Tzmƒft4¶˜•Y–O)NÓÙ)ãE#0,*?©]j¿4ïBSlŸ¨_ÛZ´ó;<ßä¶ß¿Ÿo”u¾Ô‘³”Á^¿Ô‘]û 'nÓZSÑÉ:„²=…WÞÔþ´oZdËþÆô'Šß„ †ÁDÀ=«zTÙH®Äì7Àë` 5-A†…‘‘´…U3UMÞõA†ÍÁõõö[`Éw‘[žðö©¬a®ÅàT -d‡ÌÐïÊañ¿pæ™5è¿‚ÑÊX=ÎT'v|¬)z‹öÊŠB˜ -<µ½½×ö4žZØ:Ù¯–íCIêSFÜc‰yË¡ bD€1W’Ÿx¤¢êã•E MutjÞk)5éqŒí„Ï׸5Ü<ìÅâ9'h:£°SOÅÌñKù4K{½~t×0.J3îºG <†DäÞ±ß,l`sÒ©3{5ˆÔ#_µ÷­AÓrd}ó;‘%rrÆ™*Oå̈Ñx«†ë @}¥O†pöÐ ¼UÉ]Šï™õ%bÇ'ÞâØnŸQ†ŠÊõ¦¿%…Ê-+ÅÑ-²ÏoôÜV"Ç>›©[† ÍXoïʥ脢Õ1慯ط¾i„­Oó£¦AW-ÜzIà­‰iàÌ`mÍÛ¾7aÅú¢¥ÀßD&ï7šÀÌ”ßúu)Õ´Äõ;#° mâ]Ç;ßiâ% 6ŽÑEIÅß¡[W§êÓ4Os‚À;{Ó­Žš¯­Æ&"€lD4烮?j§yKE•ÇiS©c¡¾ë‰×uу֊æïÿÈ .b´r««A2n¶²mÑ;½fmkAðýTÙ¹â«Ó•¡ãcoŽYP5å?ßØQ¾2MçáMX)œ•#´ 5Lh*npöZ<-V‡bðà˜üŒ•¿*m ùwåøÒfüuxðGøfœ€ûŸ0¹”\8°BšL{tÛ/1ç±¥¢c'á¸ïbïêj_Q†J¨-B›­ú2½¨m5fÄQ÷Ž2 CÑ^`V[Q¥G 4»gÅSpÚŠéž0öûƬ›&Í57ßXkºší9Hži¤>¡DNMÑã¾"ðc„" *±‹²'ÊIèÇ­ö<^°V¢ò§„ eP*±£}3 ¾3p¼7l fèZ£WúìYE#J.0ù$ß#M?9]«'"X#x¿öÑÒHßK/,mz¢™Ê((N† K©1]ÇÞ¤Gl‘Þ3—bÓ Nä‰wžÈŽ.]›bÅêE0Xq“â €ã™ß±¤Ö£˜r×ÎÉLPäzƒp©ö:*uu¾6W:Ò_Škt‘Ìš`?Úý;Í<$zÇ(zlݤÚþnqÊ lxÂò„˜½Œ¥jÈú&6Ÿ3±éPZîYÕÃõTéC¢OÛ᢭ !~µ}âš¿JHOØ¢Wt\£ÁÚŸý)q+ü¶¥'‰ZwLÓ(~ÄìàÃ|˜ÛŠ…0gE¯úD'š¦øŽí×ñº¢¢üí+àÌI±¹Vø|cŠ½ãE3ðþ˃gfŽ­£íY´–ÆžÄî[YDqµÒCRà‚踟‰ôÞœWׯyV{ŒMNò~âŠÇ¿c…BÑûK\_µoœk+ˆÃfx‚0@YÝ/62dÌxV¼¤‡xÔ L>;¯’MìL"¿uZÏ"ûÇÕ±Në”cgå\ãÜàÀc¿ùìS_?ä‰ß9ü\l-Õê3†ýÿò7Ó÷:} Z5zŸ­†˜a´tÎÖô¯«6RoÏÞóîhõ¨cêdig#ßöÏ%ÔÐoÅùÌ DõéØ©½E·[;˜2hXDàYaDÛ’Ýs¡é²^PìæßÒ,8¢~vØû"^Ævc¼|±*Þ"qøù7¼ ¼&å­D@»Ï¯}ã´X‹P³‹ÔÝ‹-FYÓg'!ð‰´YÄÄ5úÞ}Ò¥ìdÏL‹d܃ÑüyLX–#ŒhÇõw#À×׋kJ°A«ýÍuFjAÞéQzlmK ¦#æÞÛà"–÷@è9’~úÞÝ™ã`q«É»Kû좶фé¿~Áÿk/w!údl:KÕÿ…#€B}¿”ÿYœ¤g^’Ó–û}DS—O ìß4 Â'ÿ› à‡1 ^~¹©ïàeأ澳XœK›Zá÷Z$Î×=^ókˆTk¶§v¨ó¤t1öÑÐÔ<ßОJ—eæOŒ¤÷á0‚ãN ÿ Ëë»Ö2uªq}$zQ^˜Ü=“÷Žl |þy|ŠëüºÇüÚTîZoU¡Îr{æÚôö^I24aâï¾ïGûã,0û•Nó™/¯;î¯!Dø®¨t}ï6¥¨†ô˜yÏd¹ÌÚl!‰LÂè‰}ïH¤¯#ø*+xÎN²ÁÈ»~:~Â8 -¦ôoÉ—/”ÔÐ+vGÉô¹ÏTÝÛÑõ5£T‘^“ºÞ=DoAg³E¥­«¸ïP,V]°*ôE¾ªeË£° W-S´9¯ëøš™ªNh CÅCû<®vŸ ÀJ—é¿Kb)×ôó+3_ëó‰9v(I5ƾþà„ݯJ4£Pv¤Ó^•‚(ÎÝê/ÊB¨w#k„k!¨Bð¯^ñ>c¥®³'*GÉ_lÓ¶D4(Lg 1¶ ~Gñ>pSI'üÅ-+rë?eFÇîÿupõGEŒC„YÇUïdˆÌ¸m9Ú!2T*‘GdªÞI e¥žÑVÀØCýx¯ö0‚XÀ“%/äÀR…LÂWA{莫9‰ j`g*o¶—ý;ÇŸ—&¿ÎÃ$è<®›ñU4(@î+1Õb¾ØÒÕKh}”):-=”SûßFþî,¢ã¿EÜÑÎòÐôCx¢›¨¨„J h2(y¿$;º|%<ÎHדaJŸÍ °{ÂîÔ:CõDû»‚=÷ï\lôœN4XLŠ0X,²Û£ ‡æM㔂-Œ•44é&EM’ÊŽL~ 9ý£ñLÆ2œ‰5‹Ï;ÂjÃé«Ï®>h­~4#±KIv”lœ>ÿúÇÔª6Õÿò¢Á/™š"ð—3nï™-XµÃÎ=Ò¿Óp $õ3Y®X‡#S²žI%›’L£" -ö0 ¢À>fàíîÆCkÚРþ<øB™ud ™kˆàæVˆ^;Q¨`8^;%µÂ[£‡{¨`ÐÙGUìbœa;¡%»vI$ì5©oëüdìßY;„|ñêah×o)*(²¤k:`Û¥¥±‰(&Õs•“R1´FX?kƒöFuÚ°ÎZ Ä™ÃÍVC¸€C5Q ^CqV€ÑUCÀ™ØïÂkºTŠ­àïØŸ|ÚÖO4ûx¥˜ûŒ1† -¬hÏwK¥Ï½Ø5¢÷Æ>ÏI0àC•¾)¦+€eká1«¢Ôû× £¥%+¥ý×}ŠDÎÚ9/pÅža>]Zõë„{*ܘèD‘F­+%R=ÑðF‰.Ã}–ÙhÒÁ$&l£çÆKâ¶âAûįEP-gu=åJEzD’¨•MþfÒƒ i„?„C4nÆ®Ñô¼‹„¾þO’»™a\*¥öGeI÷{=þ9<Á¯VŸ¸Ã‡Š0.æ¡ss8ôQïíDaP‚ -§kŽ·ñ$”àÒêÅu:® FŒÝˆ—å]mê'Ôsâ Ñô™žèɧ6B%k³Ò¸×¨t$.\ƒRð?Á0Ï3¬4û`'<ÓÛÐãHý&Ô¶Hw&w¾ä;"EtÆyª^]4.Cs~SĽpú°‚èMM©Ó÷æ)dHl:¡;•º_‘QÃW‘e\C´¥§ÇLßÿ‹*FÑñŽª¾Çň@ýöÉåÞü:Lî"n¯!¤/hb*A¦<[Š–ÆGP.x ÂëŽÌj¤È+ÑöF âtÇ?FÅE®§0g?”1_¤@XúŠ-Ugæ¶NGý¥™Ú’p¤{RšÃûI ¥­ËL±¸ µßbìþá“9¬WYkÅI»VÑÖ® iɽ†ØFˆ*š¾£ -éB?‚ï@ažè‰N•ôQÆ ƺä­•|Σ”00%õ(?ýóRÃé‘ëŒh¨Ø•\Ÿ°6=4ŠqùG5(‚wºÃ*–$¿ 9霛ñÃÎlÓ·ž-jNì#¥•º×jÚJYqðŒàEËX)ŠšäL— ¤ý–žaãøÚ¸£„¡ª§{ežâE½=J³=#J õGÄcòàëÝ8ÌÞ'<2Z4F´C·§<½6ZsúJwz N+iSÎnˆTÑc)_ÿžëˆ?ïì‚è+L`ÖLTÖYé:8V/„^@Â&½Ã–aL|¶_p#ž7Lù»Øn\#%*´B‹KT¡ØzåRÀÃX¥ö3Ãè^\Ó±˜.âÞ0 qsD’âóÕ"Pé…cü´Mqw 3VbbÐDj å”xÏ`[øö^Áï¢C¨=¸ïuHU8â=Ä~EG—&¾p_ßç -ÂÈ¥¬ädŠA=^Mh¦æ -Šû…vR%j|…Aµ’ vÖ‹b¹ËÝù•LYõUv*‘rx,KJèG3ûÞ°‡ˆŒýä ºw²®´Cjkì%o [#Îhê†':ÀÛD\‡^C5E…W¼7ÁFj¦Ý‹¾&ÝBbTT'èš“24Aºë\[]% -\Ü“Øò ‹\zMc 9X€¤–ZlçÇ[ígœ\"š‘ÎÆ¢iu‚%{Ÿ=ÉWg¼‹‡xGÖLµ·Ží9\ä)h2LÝÞµ«%d*x“Mq7¡ê$€â›Ÿ¹i_ÒàÐ;nÑÜÛGùìÙa'ÇÀ×íNá…¶Šfï³:—Ufh=#9F#„óðË53Âáý€XÛ5’&—p¢R;gîú¸k#&9¡¶àJž@g3F—_qÆÏ¥E½rý ð/êñ$`&º~11o ¤Q^úta½ÓVñJ×ê™_‘^œ`r,…ªHa Ñ" é…¹g²6KmÀ‚$Jg®!hE„åM'g­ jéGÄgß :‹ü ÙCF+ꊃÔÈŒô©I\Ò(/˜Ú«‰]±\ij¹æêAÒÓùe&&ã»Ïh`ö‘0ÑY8zZ‡w¶Tu™?Ö ¶×ñ ¿k¹KŠñD¾epMSî{‚ÝEí³à¥­!Œ*T¯Õ’"éß]œ1¥WÙLn¶¬³0¬-'¹..!ÜŠŒ\á ‚Æ“]¿Ey„|íúhŸ ð–cYœ±mt ]—º1J£®ÙYýŠ™_g$¨}Ö1ƒÏì<ª†Ðwxhêñx[üo%Ö£È;ò)€‚1Hž˜…ºX5È P+ž,c¤ytWºâÎÆÅ2ãMÍpUÁJÖÝpˆÄÓ%üsÖü4Abõ.ÄõÀÌ¸ß -ß#ßOÐXÈt;L66Ì Êkˆb'nŠ×ë»PëSE7YCRÕ>sÛ·ÖE~ 2CUíÎ+Žæä>ååwhŸ+ŽÛ¼²Æ#„§õ´Â_nà“6ç=ÊFA¸†Ì=èwÎr92(ˆ”ý@cî¸ÿ̪æz3ÙÂg*ÑGŒ¾õe$‹•é˜ž˜ABfŽÆ\E3 5¦œR1câoóÆ=驵 OÓÂS{fžHý†¦°v{å>ºy'?CiôsP3"okVífF™¢åR3ë÷õBž¤c¼©µó72ëî$i­aL9S&ŠÜ -¢¦õ®¼—#k|MÅ“ûëÿÒ/ïé ÔËÓ4“ Íô:F¬ÞÉB¥Î0ò;ÉbÖRFÝC¨£9\°½gQÙ& ÌûkÈJÌŒxŸ:5+^Â|‡9"sóó¨sMJfÌŒ8¼ìZwŒ±$KœVÖÖä:úóŒæ÷wŠ‹È# ›¹’ü¨H®­ä ÚЄÐõÈ;—O ¶@ì:XˆWCì*x:ªD"6Š´¢Ï¶[#­#XŒ„ .[é¨ ÌÚõá²<’VyýÄmC­{ˆõ·"®u/ÝS)€H\¯ãÚGƒÔ°€ìI_O1…sX$‡PBÕ†ô(  ©3È:Dg¼ÕiÐõÈ]¨¯Mñ™²OIËô:WõƒA¯QŽN!Ø%€Ë]ùnT©`Ó† )óëpØÞŠ& -`Od—Mˆ7ò¡’b±uÅ6R#4o®k™z2Οb‡Š¢øÂÀY< Æ]5:G¤,­¥Ó“¯„<±1×w%=IÓÀ2Šj¯‰®v¢1–)påÂK«AÅýÚ!(Ãè+ œwÊ¥w”÷ÓR.–Ã|uÕ&›€²…»«(Q£îAì|A*8wì˸ßDx~ß `&jåþu„…(tPù»ÂñÉK€º*àY׈l¬LœŸAy¡…¬}ï¢ x1B»[¬ 9i¤&ï–ˆÃlcùIV\½6Ás†zS -“Â?©ÀöÆäó1ä $Ä›$v½bo Ybgþ{—…ã|F|s9mñÕ﹎ò¼¸êŠ)  sµã¨¶Þ¤£y¢56Þ6@zlŠ 8J.™.`,±F@¸aþ¾w»Ý\;ìÿq—õŽ0Uïûn€ÍiÐ_I–\?SHRÛ3V7ÿjb0LïïZke³‹†‚ZSUÏW%ýè®AêØY}‡!š¾¤F® -/é*Ì€y¥&O†°-*ÕŠßÓŽ‡Ó'1VuæRÞß›#óÜLÙÎ: 標 ¤´~+µF$©lÝ`N ¦RxaNw…µx¢U.¥œHñ%$ -ië®8לzÉúû­0}U_TiB<?ˆõµí„+}¼À!x&ߟDaëÜKŒýŽ_¹D2èë{F!ñÓ¤è¢Â\ß¡Qp£1{ 4¶ -Ô¢uç`c sË\Ó(r¡cŽÒ°ð·‚3Ë[,RG@gbÝ“¯°·§#diÄ|‘­É?oÕ¡©C3)£YCn"=”õsÞ»*ž=ªÇ‘"O¥2‘Ëß±÷XaìþeL|Ánö±ù 0ŸìÂÒ犀Ê!ûàüÿ„§ñ,#‰ÈD™º€wjµƒ§±1Œo´½N<5®uØŽo§¯z¾ÄïH<;svÉtFxŽ†h>À±ª*š—Ä;ŸÊËœ;RÔ3U€:ê×’q$kØSÙÔ¤ 3®˜ÏººJñ-[2ß9~(ͦlà‘Äwr%›¼]m¾‘.»¬N7•}×r›1i`MñÕ*×\ßêJs¨G>ó‰|ákzô:ÃÁ‰ôêŽðÐæ ÈÿŠDŽíD‡¤/k“ú$Åà€¾ou†“º6ן[ì’§Ó{îT˜ˆjãqÖ¥¤–°…i.¾{7¹$ Ù,±ôœ‘ƒÛÑÀ)\‰oÊ4AWŠ%°¼>ŽðÑJV´>óú.Uá¾y™7ÙÛݯ/$Õ¼Jme\ì®F¯bw¬I— ¤SÛï(ü“ó÷eÇ«žêäËê3‡§…¹öCDBÀÐ¥Z9¼i(ô -bfÈÑ£ÂeC@¶°ÍŸXÀØ(Hk³˜½‚½ã;*Ù=ÖžŸ“‡í6"íã[Éb™¿ò˜°oqr‚Ž»9'ÌdBüƒµçÎY{_Ø ëœsdNH(ôbG¯}Ø< µE+=R+qZ¡CÕ ¥M•ƒ³R7;É2טu.葽$¢ h$€¿:£6Øð:Vå/xð¦7°Ò+͘HÊ©l↶¯D~q7E(÷ÿÅœ£@Y%‹=vEÆG»:Ô¿Gím"‹J$>Ÿý¬w\½© IÎ{üíI"tSq!c±µÚ¨¯}÷ Í:zB[ºkëB“‰+D*œÔb%BâJÏ“…ƒi ßø[Î"#D}T›9fN4´Ä¡mM¦à-)T^TQÏbÙ·ðëMç±jƒÆt€ž9´©|¿@µ÷×™¨_}?_ïø§P™ù)ª‹ölÍ[âk¼ÂýVtð‹õeÍÇæį¡gD”eu„bQ¨[¬z{Š.\‘æãëw®jÎÆwçGpõ¯þ¨,­€åLHwqųKšöŒ=#ýÍ*é>w9©ÌXæõ=gÉåŒÈ¨û"#¡î’ªþ<ã~é—ý—sêD|Lk²™P¸Ž¥èŶٸu÷V—¹L$ƒ#ÄœÑV0è†r£öþR>)C¼sdÞíÝè™ yè4\µúNJ>Wzô 9Q OT⺒IŠº»¬‰5@nJ9Àd$œ×Wõ&ÕÕg|…Ç•nê3o®®”‘„Š7™ÆQ¨ÀUÛ»GQtIʽZÖ’y„wU‡4µß Ì†]îÙ¨Ž…@ËTÏIWZÒë€äªÃŽ…j+¿ úýþø®câ -¯}è=s)è (b¢ðçÿ ì "3hCÚýGŠ¼ÄSÞ’YL*ë_ -žP¨ôü,›®üNiœ<¡ä[Àf ¹¡òG» ÆLsÂ3:#ô³¦WtÊÕCs -Î!³1¦àAT­A)~¯- ®ª~‡=bÜ|½ÀÅЃ¸6•“—èS~ V‹¾:- c"ó+ІÛcl‚=»|+KÅZÊLå}QžNÁ¶ÆpÅõı6ÄÆ‘ÓFÄšèÀ€A'ãž«î9x ŽÀ©£ºõ¾/·+ºÔ`©ôÒ,¨~þˆü·á@âc­v€£Z‡¼éõ ÛÛ³®Vø ‚JlÄ´fÒ¬·}ŽJÌîúæi,¯Èýɺ™¾#¤L4ëÙÓKêLX[ÕSœ4&G@4¤˜‚«xŸˆù§Y¯·Ff/+•t–LÅjíï ÛŸì Y÷“öZ Öéñ./$“8?g…Ƚ¬¯ý‚1A -®L2.öó=ç‹ -€ˆšó/¢mµˆÕ‘¼„’`ŽûM÷ê`ó‹¡ú„ úg«ïý§ê×kà tî]'ÊŒæ@Ödøx³‘[< …óÝ#"¥è ¾ãýîÂÎìÐ`o†OŠ çZK~'$î‘yPùd ™FÅå» §éÔ~3KS¸Ì -€ã‹ìÅ[Oõ¦Î‰óì=äAo|¨hßÙCög~ÓŠÓU²Ñuª4±mI"ÀÛ:ÞQ䈴P’AÚFbRûñ¡ÿD|è¡GÉ´;rÚ­OrDÀíŸ=ø2½¾#2­ôjùXßÔTáÎ]žhƒÒE\?Õ#o°HHÑÒ>¹cgZÆ©ëßô¨þÊÓ`¶ÀÿXØÙ‹z™ÝG—IÌ2JOûÔÄáÅá­£³aᮃdHƒvÅjz¢ÒDnÐ)ßψ&ŒNeú© }¼Õø‡û‰”ƒ¨lÀQ¯ gIƒ.Ô {ÈüÍ |ãå}9ª–i<œ`¨aLß䬺V^vúG½[XUéÊÈ ñBsø;/»¿°‰ù ó!Yß3÷à±Q»“p@Ä•ºRj×qT¸!XB FÔ†ÂÏy=®Rzu‡£Ý÷bÆSn]ÞËGXR:n‘¯Ð•Nµ¼ØÓÞýÜ5;Ä«ØdtpExÅ6B‚ùô„1ÂZÓ-ôÈ;5~ì1…¨5á~ýLÿ[¨(WXåWÈj…ár¶Ì˜€Cz2%$*ÔÓ”~ R*ÀQ.~#Þ%ý®BËÁ&éœ\pî°Äð€ÀCcR›Ï´ò×$jýb›W<=kÐúY`©3‚º˜%ÖÜFȳšÌ>‚Îf²ì{Ævy—bH:¤ã;¶ß,Ù'i&7‹ÊEPÍc#ç|-UÎ+ŠøÑn;©-¹)A°ò9; -ÚæM€ Ûûf¨Ö¨¡žOFðú'%uü>å±ÌÃä.°×tš Xw] -ƒâ†¾^ï/ -B`)°9pûidÿ+[•."Ç´h;¢ð¼‘eÖV%´Uð3Ìó4Ý ½kžÙSÞÔß,É­$€¸¤i®Ü… D¹áj›ù F÷éùC<‚ ¿šÓÓ¿H  NH¹5Ñ¢ryRr¥{óštxVð¥>»—Šf tåzùô2÷ÈIõ¬{½‰ý#œvq±ü?–[X!*Ӫב”Ãäì4î=@C øŠÈ­º t÷ŸzÂd‹.€çGîfMäqEç¦èÑ\Ÿ4#=É.õŸÈJ†©Ñ#rzÁ“®!QÏæ½A|¿=Ç-y˜·—æá:c'‡»˜½4+ ]ÏFÝ![’~û'. >óõ‰Ðh¬W §ÑDóØdwÄ £#:7‰6Ú|Ctá+-²–®2w@áÚÂ~¤%jç ©Þámu’xg—ÏÂN­.}°Yu¡7gbj–s(¿uyZª×Âá¾m_Yñá¹+udh¤×QËéÆíŠÌûŽ¯*ÂŒUžcÖHWZ:›º«©<ÁÆϯÚŸÈÇÍ£Õîw¡ÈÛÌu)éU•k‚Rì•Ù`õT8²ÓK ¥:Gj%oÞ Ë:xÐõQâüx”/´•WÎÓ=eh¤¢ÿ|mOÎÚ|$û”¦(HGíØýø' ]–ÕëÕ¬w~’13êUïæ¯Eäem5ê=›Q -ÚwUÍÑSƒlhIÕ|#¾¥¾Ý¹A®XÝžûˆ2³¬òŠ$¹ß!nÀ`<: 9"ú`»ªW nyñ­Ì4_ç‘`8ë™6]l¥J‘£"{…¿{E¥šzz -\E’ÑÖŒ]¿÷Û -”G΀X4ž01qF‰°ö¶äéFeJÜÖœsòTèè]_ø(Óƒ5¤«P!—`‡Î¨FIÜöx}u™tÖÀå·WCÇ£ÃÜ… ^¹(}d`DU”Ún öì‘-‘*)Ò WfÁwÅŒp,ÿU Ö©ûô­"ªè«4æ…%Óäö¥=ˆ -Üﺔå%k±Ë*-p¤e„6voºD‚fßD¢wòdÿÁ¢Š*yÊD¼6+aŠ¶áœ×.E bmé¸Ó»pàÃ:¤ˆÈpp{ŠQË›•‚“ßdÇD›âŒW®èë­AÄj{Πu°ãቤ§Öcû(ß?½Úwxšr÷w+1a‰<©“|E+˜Fo‚¸C#¥÷ÜypêzѲ1£°g&^¸½>¹â9JÑìkùß>¥aѽw“åÛù{—~—–†°¢Äê‚u¿ò[,p8‡%hŒƒXKöÞ¢Ž›5"mítÕq¡Ô¶Ò8ĈB¾ -qþ*5«:6½ó -tÙ7Ïö„ ²šF¿îͨZÇði"² [Ò"LÄšJm¦ÕuE˜Õ£¬p¬<TĨ:wXÊÓÏ[Êë‹¿­ÿŽ^ý\ýCOUQÛ¡?À„©æ±˜Hm˜H硫”—t³Ï#ª3s=zÚWẫuè -¯ŠÎB˜Z׆NFlâjÚ´PùSÐ$—š^ȧ,u쪜ùJ#?*‡<ŠðùºÁO„ª“’‡2Ïr%·Ä…×Â#Xág£uz;…(I”ïï‰ÁÕ¨¢q—¤Ï JXÅW9ßí[¤õΈÍfY»…~½úû É]þQ`Ðí­½k Ç,±ÜN[¯Nç e(Ñþ:0fŒ*Ç×íü"%ø_»\=ÿ/ ÜOì¿+i'¼û·lþ[I.X®fMÍÍX'Iùa]ÙŸúÑës¹ÇɬÂ?Qhÿ&Rrg¼‹…_ɲüyF³o¸˜ßÀqIêF³·Œä¡BžþL'¾>âŒÎ^à¯8fìÐñ~Gû_ÂÇÐS£›ÞéÖ@÷+L2@·` ±í†Ë$yìK!Û Béà„⎠í4¥Ý2m(x_S÷&õ¨Ú!¡LM µ‹we -VF«#Ie¦á)–„Óšý"^Çõ–ü ˜@)oÄÖï°‰:Óß­h1 a쀹 II‚‚‹¡ ‰ò­>U)´hx\2½¿àGôÀO²`ErÊF0h’ô™‚rTÓ)o‹”^¯/,½p®ãKÒÑ™¨¿ª…Pâð=sîg›±Yž¡M€¬˜wäôbœ‰/sjn˜íh‘ÉŠ¦ÕÚ')á.n -¶ >ê Q×]¿£ˆ2bÿÖ–!#âi6÷ë,á‰õH¥’MsW¦ÕÞê½b´ùÛQ¦½ -yïŽõ®ˆhÊ -;×Év:M ú߯€ñöeÙÀSÿŸQ“>ñÐJoSÜyÒù8ÑùŠpÏ”ë°Zà x4j|ÜKi|&R^Cœß¼¥ßZVGiQú'Þî=ß›Š,ÃИƒLʬ`žp‰\ÚSÁ4TO#µ±$ÄxÞ¸âiÒq“®øŸ©¸’v‰ŠÞ ùû î6*Ï`šáÃuÂm¿§6ÊìãµYX ¢¦Oº™à!IiÀ„¨D=ïÜöhÉAC; £;ókÃ!o" -·~C(1ˆ- Iêù5èÕ猩d.µÞŸ÷»’‘£ò'CfÀ"3½Óol€[mæ ¬àæøš6’KÉ7²ãßý%Ö¤8.¶‘;eØ¿ä¾õ—lÔ¿|%Utõ -KÒdî å8$/ÊnŸ[ÖÅqV~F~”è3S - x£»+¾éGª"½ôfzDÁ-ÿ»,Òªb$S%B/]å©à 8÷™‰~Ò¥côѱæZé  ÿ­›aôsÅ úøš¼ð¾*%Í=3_ÎóGtö“ß®Î' õ‡`ÀÑô‚ à ûV"­VY²”ZUeR¢ CúcÈ€¤Ç½áMsé$b–\ûs·\ç(w–¶5" ²¥“¦ßwä/TØž•X¿±Î³÷ÆGQ‹ŒOå~,ðþú/kB…¢E†a…äŒw>ÿ|‰ÓJ‡—ʈ©$"e^›ËõÎVzÖèPÊVò?#§Ã,nÅxTAö8ëR¤T"ˆnLQëAb3!W¤}ÿãAM“BJmÊ €Ö3îkâF|⎼E5êJÄPLô?ŠnSsïùhxª“LmBÙ -Ýn–rï“ð^±åãÄE€ôú¸›á+ß‘Or©A:Ph¨ã?y*"E‡x}¾N2_˜ÕÉ~¨ó7 -<:óôq¿Æ€Þ­;Ò]2„ÊEbæf ¡6¸§qäÓ¸ç¢U:`5„Œ(Iˆ¤@Ñò~h*¼ ¶%d¢Ã}o_Ž n}=7_qþ]ŠudNÕ½ÕŠ›ò1ÕdºÒFR ƒº`× Ã%w ¢mŠ]N¼¿yª72E ­ ¤2m@_Ñ/,â¯gßÿzã3dÍú3¢Ñ±ˆT5¨Å.o%é;Ÿ?­º“ÑsšÚËl -‹A,­ßùÿ0ïÕòámŸý2) ‡˜¦æênp¸Î€¾Þ€7p¸‰Ìûs†&Ïûžqž.øäÑ/bÏoŒ—öï<È_wTvG~QÈ'ÔÝ5‰þ²Õþ–Í-¦këPQ+¹%—z5 `d/{©µa®×|&þYóŠNÙ“^KQ×úëßùKä=)¿òGT=ìÐè9c"¶\¢y²ð8T\ß÷þ*xª·ª^OQ&èqªiý‰^gXÓ¹‹,&œ[+"‹ÇPJˆ¨¹GàmÚÑ”‘¦“c×­ßi˜ÝpFÓ­”^Øõ*BQ½½F©­:e†ã®ßÑ -’õ<X#` Õ¥7û=ùš²€Ž´(,ˆÜ;±ËJ2HÓ %hx® tR,€¹~¤‘¤ïþÔWûõ_|ç_+­ïLw€óŠÞìSƒžK‰|-UH¦'p´+)iI*pò ¡™ ýó_Ë!Ih -íáJäg„_gaèâ»U,&æŸ'Ä™çäý3e”ë/Pjß“C*€ªªþ‚þ_Âë'\Ö¦ïü-Ý»™€F–•zÂ…Œ ÛÍÞ/«î'ÁÜ_ÿ%áKæ<_qÕyôd£•tô>ÁK5óüæFÊ -F–…ûv–»6IÍú”R áW— 1FÊkÈ(÷ºÞ)J>âø œ{'Hä®"$$‹žÚßa>ˆ`Œzá"ªóT#"Nµ|„©ƒÚákß6Ö„.ÜF&>ŒpzyŠÃ1pÇ!‚Α&Q—od=i¯YC:ÈÊZ¿ñ‘ ÄÔÖ‰€æ!Ü¡*Þ_Á¯üE´pãú~Çszì+Û;6õGOt×kù¼Fxï΃Ç@}"/yPCheóSnA/lµ,–Zx~Ô“ñ¢Û!›ðþ& ´ÒâìB”øb&ê`ìŸoLŠÿXfT°Dú÷ ‹‘0?is(ÞÄýä¦#ÐeY “Ô‚L·’4KŠ;Š ¹ý•7»éÄ -:xwjÝ Þ1&ÎBíÙv‡X ùK*ÀÏàªÄžýqIshòYý$rp¥o_73æh'2ÆF"˜ÀGë KŠ…rü Ó”‹”¶ÛÑ>ÒìÿòYvÖzˆ©[ìÚû`j±ÝšUó¨d†gȑ—àPµI@I\Æ‚1²†4^à¦^2Zàõ9Ü¡Ê]ârs:óÅ•uøõ—2AJ±¦óÿP±‹‰¥lZÿ näU—ŒòÍ™€Ûs+± Œÿ°©²¦Ãò×–Hê:ñ>Ù^ÄOOÚl,”­‘æþÑÊZ¬pÅi-¨ºr'‡|…píÙÊC”F2©‡’Õ¿Ýç)wûæôŽóãq…îRÏŽ¦R•¾ËöPµB”ÙÐG3„ÓÇzÀw|yX*¤L¬ÁˆáóqEi&¯Å67ËF.Wz“C‹œžyOF´5¾Èép×ãYÀÆË4r£×”!ZbQ:O׸Lºô^X,™hìùL¾g‚Ãy1kT¿$\ÆþPµL]¨˜ ®!Q”ÜÉUŒÈ9e’ØJÞ@î­ÅÀPÁ?3! Øç›Û›Æµ5ç»\õH‚ª‹ 9?¾›.ŸÀlTq3³”ûŽçÙv¤O)€û>3dMz*Bˆhy3´ÜÖûMí8?SŠássÝš•Cö(üCËÍ´ÔíúˆÂ’n„w@²†r&õ“¼´Gy„¾iúQbWL 31Š´Â;AÞÚ•²œâ­8Cãón¤ \Å"1ë™´$‹Ô§E;=ÂT$‚*xñ;ojê”ÅõÜ B*pª]Èt†\Ø'}ÍÅÈ[BçŽT !†àÕ¯ˆÝöì†Ñ”Icáu´(K!¸CÓ´ˆgd7R­`ðæKÉå›ú©Éî3àÅÏP&”¥Ùê»3â\«”†J»ÞÛïÿÈ hÚ3N‚t‹<Õph;ƒnyÝÐYÞ†¥™@á~Ë[jɸgmGžójÏZ÷¿…3rà°äk¶Ð9@ {”2„Ë è(àÐïµÀ ðª6Ç¥”K¢MÂ,SÁ#©aLÜ5äLÊì«LvMEUº%(ÈýjZ@œõ°,´Ï/gïÁ–I‡XV£¸ÀTÃÞMX-šZÔŸíîv)Ó0Cœè°‹ë·LœN«-b}=µøºTôÐo€él¢‘Ø~H(“ó;̇{|ô!}MÕ”¡ƒ±Žã6P<æM‹+õlùk/“§žK²<ìÌÌŠH uå_´Ÿµ;ó Ûǹ' J|0g½?³„ -ÎÝ,CÁ1éŒAaU4´3"7ë+sçû‚ŸûŽ©)hÜ]O-ÆÆ^· -{—ÑQ9tªwݬdÈìÔÜšÿàÜ¢Ù踲pSéC_5ärÆÇÔ>wRîÙƒ'E¤Yw²€°¤í S?QŠHÏYßaž’w4IZ$ü¥ƒq]³ß=ÏÝóÎÙ0øÈu‚ eò(f§h À—š®]õ½Îé/¿7âœgijͺžŠáQë›^–ªoÙØj÷lOhí>ð{òí "Ê1 -`8ÀUžèÑÁ6ýCëYž“Åé8#xJjôºˆl|{s«iÖù¨±Ù‹ýàÚfàw³xja†Ø‘Ê—(I'ܹ *"m’w—ðM.ŽõGǘçšÑuEÜníp7¸²±Ío>ãPpÒ÷]Ûµ'ð1;IO¢ŽƒÜ{G?5¾Hjxô@^!g»×wb‘†\põtÁ`„#µRpæ]yR*ED¦H/'Ê€àI„¹üGÃqœë†Œ¯cK×6ÛFù­Þ瓧â!põ+ýˆÅÎÜ·fH +ï*PYgÉû5GâN%Jþh׺UÖO¢n@—£6íî¨<×OŽ†c ò¸Ë=ùŠ]¨¦ë¹w­ƒ#rÔît¢^õŠ€Qk7­f‡¤øØ/Ö‰›“e.ĹgžO®cVã8—ŒZO Fß™ÄО/¥·øÄiÃ.Ñ©¸(DÆ¥·Õ†t0§:ë…>kP4SXͼ&h‹”+ÊPë}‚ Ò³9ž/]o°iÍ ³†àtDȹd 3çhŠl\ ’öhx(Íòe1d)BÚeD$ú{˜‘½ÎÃã‰ü÷BCŽµ!ã‘¢XR•*Ñ“÷³GÍcKÇì|ol4wíuy(°H$pœÖjÏGµ9dOÞ°ª†BÉV^‚uÊ“Q¢o›9‹a=@>´Jr¼D2‰Ðq~2Œ0/Kzgwž;êÙ`r]NÓÛñ{¯[ ÷í> ½¸³%µ£ÖÔM˒蜜Ûs£8Â2Æ q<‰’†Çã`¦=‡DMÇ&vÝ6·t ‰f‰Kº¤.+¹­™â¿H–ßøÌ\ñÌNvϸ歈Áõä¤ÕÖ-ù3Ë·"LÈÃæàÑ# Ï .ã;Ô‘çû A• Ñ1Woë5ZôŒç¥å;âôÑñ«ô<Î#ñð?|æWPʬ'‡GG{'O˜4\§’J쿹¶A4}§‰}CîÔi£+ZvùŠž"ÝSïÌŒhÐæU¬®`¬×3sïüâçˆpìSìó¦Ç5`ð¼ê× «Ÿc¯EŪt.ÌÜ?WÊU¿eÿÍzþYÅ|[Õœ8]uô¤g%Cþ^ Ï-²wwðÎ7àÄ$¦§« Ãl³á-ët°ÉÒ?Üi8½<7Ç(¬Ø¯—_”b>(ô“[é,šVOüìÅË!¿óXŸJ$Òdµ§G)^]'õ¨ëÌ9<ƒë=«2ŸèTå3 jHŒ©fä¹å¤U:*EŒ»nE0ŒÈsöA-ï -A¯x=Õ‘¾qùwé K<§ýHmÍ#4 i%¸6jdõ@NѬs+]—LGøùYµ­ù^Ž $£ÂÒEÆêÏy»SË—"Ö5‡´ôb£€}¥amʘ¶ –ûžØ”§s -N•X´<!FáþQp ¤D1-!®nÇžM5ëÈ´€¤+II2$Æ€ZJHÔôþÜí“ú6V÷L;™¤ß)9ÝÑ[#æ3BCòïÉÕO¢”÷»4Ž•È¾è‡ó?SòŒq®£‰Å˜x&ÑêFýÜY÷”Ñ´xDé{y”s7½âK{õ¬õëpeŠ ï$Ìu˜/ÎeƒigO-xN)+Q†¯‹{æ ¨âÅ’UX B‘`­Vù:Ÿa·¨Ur+ŠÉ1Õ¥øÔu´)S§Å+­ÛjÅ)î)ëéÏÏÏ@á'.töÕy*ÅÏò|WÒÂÕ)F•yÒùCaå´½7@æ4n…OðŠ¿jS´ï1V¯!ý-P÷~UšxŠXqTGÀÇ#"ºJÌ,àb±¸ã]R´yõ<ú–Ôbº æ¡é`ÌÕvÔCñ_%ÌšL‹žg:g.û]ˆ -Zû'œ-Œ“§YѵàĹ.Ÿ[QŠF:²&^ߨ¶¿­×Â`„»BœþâIL2ùÛS&lh@jUØe ´gàU#ìIH&FësèKÛx¿ÊcÆþ?ÿŸŠ«à…×^Îép~°&|ü×_UNÙh-ß1qXEJŒüNÎ/e-=ˆ«Ý=…â93¯#'‚ªVˈƒ9J€85„×Òƒb@ú{“ÍbK@ãË âACîûN¾ ç=…Ϊ’ÌþæË—Ì–J·"Ò¥AW¶T "¸ K¤fo±ì´:K6ÚÓVñqðg ¡´'¾M‰È€9¡=¿—Ë“$y,Šl…Y @PiBÎA#ÝCªV­ò±ùŸÈé‚¥ŒU.•ãïÊ퉲>sbh ¦\þNTñœÙÝéæT½«lhmÎ'zÒÍF¤éKÓÿH'C‚+ å‘ŒnvëÏŸWä@ŸUæ¼Z-†õŸ)ÃXa·(ÞlKutá[4%Ÿ•N e\!ækÕwOh·3­¶w}·á?Ž0•Gð•·thD7¥n5÷ aàL ˜;Ž -ôiÇMùÖ3z‚Ù=Þ¦ˆŽ­ÐžU¥ag%òP ¤KI -œ÷ªÞ&Å îÚïêAørC"4ê*чí±¨¶Š˜•ªNöÖÝÄŸ+C¿ ô/ª¥y€B¼Åbwš -Å•gñÖ„Ãñ©c;×±]Ó#‹KqgÄL‘ê*Ñå ÑÏ•NŒHÓ‹÷Ô XfœH#Gå -ÀHBš'6Å™[tdH·**àF͉¿•Sîò5<¼cÝ)9:50‰˜Dÿ¨¥ò¹«›0³gcˆ{‚v@„æ‰DÊžbYH%Ñ“2÷"ýO¢uk5ìA_ˆ1`z{ñQ-ÏÇA…•™IòPþÛ4¼6Jç*I‹œº%ÓSËë‘K€3ªTa Ù+íËzðXxa€g}Û7F̵±YßÉax—¦Ç]›@ ü¹áÄÕû‹ŸãîßÚ× O¡ËÀ’ÔEÏâùJµ9‡;͔،µwÉùK3¡–~’*~Z½âò›%U—»†ÐÁ SZÕ#…EJîæ,WÏ ©,/ endstream endobj 28 0 obj <>stream -LÉ}%ב͘ç©Þè»3…xpÃÛU‘?^ìS GfŒ‡÷yW™¾›šÕ‰Îù‚»07têT÷*pøÖξRv5èR+™Ï;„r«j)F@ 9EA·ÆÃÚ‹¿8ÊêÈÕl@›ÓõØò¡óEó‰EYþ ˘‘¹HLqÔiFº6€A=_ýXòëVÞ_ÕŸŒ¸“Œÿ=H}MyŽzNv!R¯7Ûó–Ó@·OAܶ=‰uæzUÝöÿî5$!èÜ ¯ãg¨C©tuÕ'æôÖcßå TÔÊrø8Ùĵ0‹g7Úx÷Añ¶`lÆ¿¡A†E"G•”˜f%úuÊÏ¥²ÙÁ…fkrCO½ê>|Ý)ÕwÒœ´ÕÚ?ã¸6·8 -Ul›ÜŠ'ÝïQvIÆÄ) ù~Ôñpf<>Ÿ^CzŠsŽ´`æuoºWÏ^M(F óM“œÇ?_g…v—z.WkƒæVÁ^J–[5\U[®`‹Æ]CN;ÂXêi¸Ž ²©«¿R¯OÅ— ….ÎZ‰s­ŽÈãäk·ø‚K%âL‘à -V8úŽÛþ|¯œå‘ (T‹N+e"d_B¤<‡7¼$ÅÉÏÎD¤‰[ÿ.ly•O^¯ç/ÿsU*(8£X¿Ç”só®`ÿ“tÃQEL ™«ÕièÇoQlMRH¡Þìß™§î5dž{A°òéÿÍѬ¥Ò‹Äæ5uí?oIE$ç¥~ä YÁûLá?8ó‹-Ðwæ‰6â˜ó¦ø›ó –8dOI(eîaó‹çºÓö„‚)´N\ƒ#rÅŽ%M^âIy•ƒT;TJWBžiË&bôO¸ºÅî~=¸lAÝB5jUß0¢¹¤ãG¶Ö¤ÓÚé£:~PºJÊÏhë÷Z±ì»£™\áF`ÞbºpØ|Mú­¬œŽ–^ö “§…%•3„Ý^æn¯oL‰•<õ ƒ6B¨>yD9‰Yª‡{dÆòÅd6-•ì.͇ì°ÄèWËsÛýßÄ{ZÁþ7·jEálç_bþªÕ 1‘ø™ÁT±‚©ò^kõÂ]*‹8~D³ÓÖzç›È a° 3Ù¸&'ä$ÇIaD!–Ëk`‡w…N’2Hlï 3þõö1:’t‡›¥Y koGøtYð¬mé¯Þq5Ѹ-Ñ¡(9>9Z?8ƒó~8J?ce=«¸â…;¥S±ÇRÛ»¿žÚOKÍ ÚÏvÖ“õC"‡ìà>ü\°h×QÊ ¹YK ²þaY¿íëlškéAã0kÌ[ÓnL1ÿYƒ4=Ê°p(ö"?V¸äÔã#L%f+Jxï–P¢hó‡_uÁÖ¼ýí~w™S¬i{F -gnmáaÚöEº®™ó:4 õ(ÂZË~ kYï*í«9•¹§u.À7S`KG×O¡F›ƒ#µ âêg‹ß…äë¢0ÎüZð%XÎÝß×Ö¨yÝiú¹iw³]Û×»¢ò«Âõ z F)ú×/ïý1¨˜Ôδ!ü`’6rùÀá ‘rœ¿(˜TkZëÄæûAز[}’SáË¥õW9ÈFŠVIïÙ¥”x+Ò;[µ’wO´}¼w&>Þªµ]„ ²Gµqm¢ƒh;®µ¥Žûì±xKhM7’šçq®€·%  ¤4?YìJ‚oAâüDªÌÔU¥þ© -éM«cEªó{ vÏJÝñ,R*×ÄYC3»h­ð[Âoª^”U©­@¿‘Ó¼Ïw„nŽê,0e\·Š3Íž#Cƒ.r©€vu€–DÈay9q04W¯óúÞ.ø[ßTn´øi°\ç]Èðq–(Jê¨Ç„3â*ƒ×l<ŠfpËg‘[É«m¶cËSá05oS‚h€\GFAí¸¯xÔ-x<‰Þ)‚ÿùŒÙN{òX Ú³¤N‡<é2 ØFz -EÅÚ&™¡ÕŒ÷T‰ø96NF5Dù‡t„µý¼Òß,ú—!O•Üê“Ĥͣ*¼F¸‘BL ªêÖ6ϲ͕Kz` ‡L¿0(²1^–Cp‹ß²!‘É>¾¥y×ÆòzB€OP<Ç$;xý` DÙ÷XCŠ´EI€fà¹ý!çiCFÍü*žWmL‘ŸQ\a| -C+Úa*vÚ29›Ž¨kõºÕá¿pG´QèPÐT©«`"i¼ÆBöŸÑFåɇ r±~N‡úÍ­æÛv¨^Ûº\° -Þe†ìLIOžõŽ¡êbÓÊ·¸”nì¡h©­ÑJïU? ô^Z¶j"A@"$’0¥§½•÷F[‡åò<„¯-ûzROlÓJOö€ ßϧdáÔC· 6`8'ÞüA8 -ùTiH"eÆ3Ko¤ò -0U30¦“clÒ‹C.Çéç¹´sô'÷‚ïÍ›ë}ŽÎ9þ(–eÞt~m8h%%GÅæ:hlé·ûíÕgSÙB<»ßN§»Gbã¤b½‡%‹„Ü\U») -ân—÷jèô!ÜgyV[U4+Ÿ-zÐ)óR¸Òij<àÑÝvº/`PO„§zá( !‰â¥lsHŒT©ÜáZjÙŠNK#tO»½Q;C,ÄÚªëwÄÖeîn<¢l]Ò…‡kÜžÙÌ/›\˜ìâù`Xîòè¾4ô ŠÊ=õíÞ3v‹)k~ô)û­øb6•`0è'›ÊSCzª3žÛ#ðlŽ7Î.#ÔÇE;m;÷’úº£ò’#/b-G/”2kîô·Á lå¤Ö40Üi›-[Ñå&Cör[CæDc†ûy„8:("‘È?mgÙóDófî/³.âŽèûS=ÓæÇm:ÜÈ3¶C”Û@ƒVÇtnZ¥ïr#=3ÉVMS¹OäåÕÄ#9Â|ZÚ—š¦`éw,ÇYCFš·µ)A…Óžûªè8…œMլđÍFÐ¥;‘º½dΛ‡¯BWè΀À4ÖÙ -‰, œÓï}~ìèPØ#qDut–»(*KÔi-«ôò±?RÓ%3uÕeùBb™Nw$=ÄÝ‹ -VòRç‚BuôU NÌAlˆ ¡Bå~ŒÚhRîae‘2©]Ÿ?(`*!Tä‡À - Ìó§:&OøóIàÁiwUð@K¦Ê¹Ð¯ÔVÎ…Äó9>ƒ'쇂nE&q¯7³ä6ÀM‰D0Ee™J6 -â«S¾·ä üÝ;m –]¶ƒó|† ¤6›]ãM?¿¬ô|KNÕc[ö5×Z*j¢eû w=ا‰jpL2ÇùBÓ -?béIHˆÌ*qU¤©ˆ­{ª‡“o…@ÂWž¼P­:P8ѱÇp€"I1‰²5¥8š„3D¢úd¸ „ŸÿN-æk -¼» -6tu—KaGŠíUJ¶TpÏt(Í&ßá;n Сy¯LN€dm¡rÜ™’Dy•(ôÑa{³÷±õ=‚yEÙ9ý®Ñ na¥ä`¸€ X2>YsóÍË‚.Z°5TóL7ºtÔÀ•™‚!¦í5ˆÖ¦H‡ÒœoFº.8tçp–A-#~…ÂhHt$4} -Ò‚õ¡Äœû{CÂ9*é§Õ =#]æ°9ÂS: ]ë2Ühweâ}zá"…`4µƒN¥…î0ƒŠ'ŽjúcI%U£µt箄äßP§Û†(­i…è'‰\åˆÙ!J<…»ŠÝVÈY[ú6ÇÜá.C3Æ»tÐv½/Ú¶-å¦KŒ™þÞò?~o̤ڥè‡ê»žv5•êv÷\ºÙx%m­¶Á>‚àDÃgN‹3p«Bµ:)âÚºG[ÊÒùf ¯½µ­žzo¡õ’ÊÔÂP9¸ƒÚßÓ^¯Jµc¾¬—4|FÑzŸ`ZÇ*óÙ¼"²öTÞ9Å´x#ÈØjÐŒýUªrBç:˜jà2í½´…ê;µ‚à --l“¸X{Gu‡‹ì«¢âÌrâfwž• zÛä¯>ÖëƒY€ð¬%¸9H?í1ÝSG F_¹ÒJæ çAY4øcºö-½–ߣÀ†FÅʶÏ)瑼쉯Äiמb^1c?æë9±æî•‚;€,ÂxyIEž/ÆH~ö@ˆsº[9("”EIÓ[»Ógþ(Ûö¹-9~iLfæ~½CÍ™{E?€Ò° rm?šMòÔ €“¤K–é˜:m¼itmà¯ùüjØÑx³·U4fÛ{;ÎVۮͻÞk¯ð/Ʀ³‚hæÙvq¼J\&k¤¥- ›çÛòc$û«ê¯øzc2·¯°þC£ìë#7ZÈ£(m -gG¨ø•&²['JÏù¸âoÀ -™m s‡Bx&5ýñŒ4ª4œ6ÕÞìðbš;6Ó:ig~ò%)Ãl<ÖÂz¸_¨ßy}›ü>Tb})èÆ4ˆÒ'°ÇÜtõ{„¬«¾œfÚß-gRdÝEÿU™ïpò涊ýÓ¾wl-§ì†ƒE¿¼œ²CÀ„’Û\‘ŸLµ¾ä <‚”j¹ÕßȘOVÆû' Âßíº_¿û¿òî7^ÔWhL+€‰¨±)Öó¸Ï-íCÍêèë:üeàtjÒ\!1wmé÷ì+¹y"\x¯‘o(ÞQÁ¾Wx¢ZÐ5ã|×ÑB¶ kßÞ{Jò£~ÍCu»ñ¨ÚD‹8~RJ×)éj¼rŽ<ßùØYþp˜#3sî™þ]®Œ{Š¬ÜýN7@±›emÛV„²Hgz¦˜gó`±1«ß;SP{(ý?{7À–(d‘;A™rn42ž:'oª¬©= -îIáÌ\¥ë´¡^Ããxü$¾ÌføAp.£Wr©}DÙÁ¾«Æ·-§ ¾6}û}IU‰èw¶ž=6ê8AââT_õA»ùÇ?Q‚ë¤mO\”8“˜¡Äk‘3!¾_H­qØ¢-œ_<ãsOOáÎ^ˆº¨íLÒfˆ‹Á”¡‘ŠŽÚGH€ÿäP1טÄi€ê -ýÌÙ5î(ýMó¢E³¤7sw4$0qùÄž& ùèo‚(&g5hÎëC,Õƒ´niÏîÀ)à¹Ìü¹ž‰Êeì…$YJ -|¼ü^Z’3…èÅ0äÓbf’"PTQdwð¹)±»#8mÞYÃXÃX&ôÖ[Phêj:#ÊÍ0ñÕÙ]¶ÒÇ›á¿æ=ì·"b*´› þ)Ô-…ùúÂÚ U2WRχÊÞÀ}þ"½³ˆš½Š7™}´/AmÅoLѲRQbV!ˆ”eñ#×irœø+'­ufÑØ@Ð>ké|Úõ¯O0™ûï#fcéü»/2C:î1B3ñR&œ»Â±ßKFÊ…ÙÿÍ1d¾ÙÐÏ*§QÙ¸²­:˜ÜI÷¨ -åS³ M3 e ÞAP]Ô -]p¦–3wkòN*€¤,çþcÇiÕÔ9c~w– ùaÎ 0múÎ)a'Iá•’[‘½%û³G ½®³à"ÈPèwÐÜÓÑO¶ëVL¦" ’41>v9"‰ÆŽ\̱¯[=$³íïkÈÚ"Œy‹+Ò–làÌæl ÒYÙbè|ç©ž€Öãz­wû­6"}îV=6p˜R#>®Ó©+é®ÌEHÑïŽ-Y÷|Nˆp´û?¬ÓŸ³âͬu²AB -ÊóNzW1Ãû$“»ª?Õ5 -¡ùËîs°,0Ï£ýlû}»öý/Vž}ÿlƒi'$iÇ&íß`gqû³\00ÞiʃJH&¾csû7ûIç5Ê«"[W„ÑïHø84^2Šž¯L„}¿FOA}@eá"ˆWõ‡Ø9}ã©ÖÛGÍ¿:Ëmr -yûæ× x½SyÊdªš2‡m!6iÔ'XžŒßæ:pø@™Ÿ¼=2µ¾È»ÝuXš ˜È -¨Q¥7hÓ) =Sž Jã2M93_Úž¨*ÓX›9?¿£@ÈoÉk\ƒ)Ýù]Au+&XŸ›ù\(…2­| Zäcæù6½Í…O¬fðyŽ5° -Gtdâ§<£aÖ`óÖÉ­ÕýÏ?s',m}ï­Üi|8ÍÉrÍ7›µ:·0¾½\s+K½ Çë¯ï-JOú~}®^¢ôÖòyE‰:GÄ‘ò®t?—KvT ¼DKÁ·;á|ë/p”_ú²‚Q‚ótPŸrpé!“ìi Ǭ—¬Â"KÆÈÂÔì-‡JFÇß2›5H#½©®Ã•"‘A ázôäh±£Á…ïÊâ)w*›Âñ\k$‘ímž„‹*Î(€ul‡ÈdP%iÓIû}Ê+FâIóƒ¸+9¢¯ßÞ»&~…d¼Åu£|³q$”îßI ŸÁ5€îq—|´ø\Øøl§ÖÁpÑÂùw ° JÆ+&û=WòžtìÒ5˜™XÏ Zи_:sÜùGÄÀ½ú×Òe'óœ½ìÔpåg`u`Ù‹…vGá#m‹ö׃Y:Ã%‚øœƒ"Kî{‘–ŒqÛ%@µLE8vëëSC3Ä"‰BϦ‘ò=|„¯2c:÷$РcóÁ•¥÷,,‡èÜE 9lÆ#º¯-¿”>òLK%–NTŽ"#[¤´…±K=50–e±ÏOTïÆ™³€¹ÓÓ"%ý1cª­ … ƒ)ô¼ -¹—WC›ÎÒ–ö±ÃÝb˜ÁXëeÛ&K¾ÏøþÝW ™ Ý%=pï—OÙ¯”í¶- “¹?àaÙTµmSb>¡†¥g¹Šä±›ëO½¾nþk)h;¤d( ³Iô‹ç$1¤¥ŽÀ"ž;á!we—øfŒ Ùc/º‘‰µÅ3©>2F¹“7ôXGuC9ÝC³ÎxºGÕ àU¦›¿›Äߘçï°&¢ˆ Ù‰d5ÎÅ7â`9Ž¸A8MçO -/¤:¢"€H¦3ù“,¾ìGÏzcþ®R1ùW_™¹a×Çñô±“L'A:0‹3ÓIË⻥BAjî¾i˜AO9“ran%Íô©ÛE¢‡X˜†‡2ßg¸‘d+j‘ ƒ‚>!¸u¦[Ý΀/B¨9Š‘A5€Û˜W8ºjiö(c¿S/a…ŽºV‚<#F|ãI§Æe"’l»þÉ%\vþ-£ñ„ƒ¶P·êïY©=…Ï Ð•NùCIƒ -.à«Nô;†ð”°¹Vņ SŸt`t؉߫è§ÄTÁ‡j!PŠ¬ó¡dè8`¬¤ "¾ÊG1îÓWLx q˜C‡2nkQÃKíú¨Ä~ -ûè±kürÕ›µl@ÿCšý*÷0û” Îü…€pŒ‘® -Ïuã­=…¹‘çœùeÚæ/•ùþ)Bó’l¯$\wàħ4ùèÁ@¦­­GzGä/kšñ¢#I|¾G Cm{ôa×æDo)ˆ2¯Œƒ¼špªÂ²;ÝAVë¯Þƾjöiéð3>TÃèrÝ*mÞ]!ò\Äyu¢Ù𤊠F%4„Ù3QX6Ò×íQíѦÀ#oh25mïð¿n²~ãüþæÔÒ@+ŽdZÚl"Æ”ööž­h}~9˜ U ø¤^ô±’;º_ò°„ùŸ‚êQl’-Yóᆿ’tÎÃÛ%u8j ìYNH‚,†ÌU /4OÜHÞ9|A[áòŽ Á%G£I[í©!R4¢4W59™†#ÔØJ-ƒ â5VÎgj(Kþ$¡†œÂèMz¾JµµùÉtÐÀÃt¤=x+ÇÍáºW! -Àož‰¹Ž¦daôý\'æÁþDá.Úƒ°é¸‘¯î<ˆµ¥YÚ[ÄÓyIœÇó³ŽwÝÒkcîÒ€„Ûz´¤ö’—´È™E²Í¯Q6 â¨ÊÈŠÏ ×S ꣓|OÐÅ“k¬Aòl:;c"SãcÙòÙ O2$‚;8fg¯ï@%öNÏB<óåÌùÙ¦À ¼¯’• ;JFéÒD#¸ÞdºÑMŒ!HBß»ÖçÐ÷svÓ£Òa sÿ)™ÏF²®˜$3jdPò_ƒN\Ü=½ZÅ-’ìô·¢¬y7ïŽÜO¢3CÎXŸÛc„B†(ü;Aà9RA¸S?¢ž-Á܃ХÒS‰r)0aÔùBî–s O²›afŒÃ3ärò^-ߢŠ<†ìü#ª"žJ Ý|®ënRÒ]¿†wŠº=ûË«Fˆz0¯¶1žb¥^ñ¾*õ£ÄèœëóŒCûˆã÷<â”)úê÷>j¾ÌZq‘‹"@ »JÄl˜êÖ¹ÓL²ùxu§i¬ °küb”ñû,îsI¿Î<•Æä\¯’¤ ã“ ^«Nc ¥`̃À!=‹#‡3‡ø0¶fÄIu…M.5äŒA‹åøë“ïg—»Çå€þJ¬ ÃŽ¥ »B’•7‰îNÎhy„%c5³ñ÷Ä9î7¶0ÊÈQV¾£^ùìåÕ‚ëÃÿ᯵[^¨F×–F¶Btª ´ähµÒg#ëÕb®ûÉtŒDŸ]®6f¦×.½ÿܘÕÓvÚåÙ-²¨å×­ ÀxO› -q~Gë}ÔH`ÙËÁ/žü¹Á/@Qm¾ý»²SzÃWí®gìcçr'v,ë ë9ªO3g˜;ß™›w ™=8²G˜0­œ^Ä÷vÖÒ/jMÌ£ô Šfm¾R”‹é©í˜€ˆ’½ç:,òHÄÞkï^†å@èwüÅ·zY¨îWNÆIà’ügqI†,³×+r½@‰ÛµvÙO ºïU-C)5øìS+è‰ÌÁIñ›;ö_tÐfÑr‘0¯–€•š]JýtÃùùٓÆ{/ÆÂz>r^=‰{°ƒõpžL)´eÀ~´“ñd†¥#ÙÕ²‹zŠ:Ì“7{ûy-XX–­CE~æ8Ù­`YnEG¬½Ãwæâ‘U`ÞÝü,vÏ=×aÕ@ù¯‡ÿâ¹éÍnàè´¹l/„qNyö8ZMð~`èïk_ÛÈ(¹±ÄZŽ“ÙmÛóTh{H -Ç2“°9YMá¼À/?Õbpƒ ®Ÿ“SÓ˜äCœÜãìG7ŸðÐÛ] ïOIñïþò)2=ø;GFTô½Çd¨ý(Ù‰9»á -þÊ è>;˜{‰g §Ò0ÄóeȆ*w^öUsA¨žjÞÒAØ8×Z}ÞÞ!÷¹4iª`tynÔl_C·Œ‘"#ª86ãYæ7ËyDïšÃoï¤üOX3œ?Òq†«k)eB… -S³8JýjYxØ EGô>ÄZPBpdÀC™Ê”3,bC"¶À`OCè‰Î‰ù¤Õ«œE˜¿å¤;®´ wÉkD'#ÞS7¹Ð§| ¤pi$°|†3žA`ÃÌec¹~ªø†ptÞì3ˆHtÄ^¯³ÝÑ%츓 ÄàÀ %2©áïoÐ7ôùË÷>‘Ì/©Qd O»l_ið§œx¨7š¾åÌ“tC)‘¼mt.ª|{8Ë>Í.ã‹4é¼ÑUñ™0-ç‘-Þ…¤d /Q,×T²ž÷ÚÂõª£.óó’•,ÈZ ¬‘!:wº¹W–.þiëQ¢˜ûWôÐZÑG‘{0Dî¡K($jnAyìk#Ù=Á.€ïO 2©¬nJT/CNàmõÍT £7®´’° 7ÇÒëz˼5ñ+8à†|Ž™ä¶ý:yÌßL·ãÎÎÇIO„ºGÜ‚´m‘Ì&qîolÃu< ¨Z¨”hŽh¿ÌhÃj<ÂÑå ¹sÓyÏépÿ=gKû»¿üýÀ(Õ寿é_µ»dÌ­ÃWzŧζ”|ÍâPÌ¿HSo1¬ FqK ö¸¦íšuEÝäˆ|½V þN §ÚçCûé>£í’}tOq¬ýšÊ0ÆÏó?ÿýŠ˜ÿ‰¸B’ ×é ±CƒÎíH†]^#¢J(­£¡dEßýŽ+Åw–D¾ª «@dTEñfÌ6?éŽÛ'F$‹;'Q¡ÔWý¥ µÿæ~“3ŸNcúÕ”+1ž{DÿŒ¸·#ŸìÆŒt™Ý”]bD]JIŸBfu³5µ¨’,Ý€ÜV‚ÇÇwWªƒdÎÌQâ@4!·ØQhmVè`¶X\¥´¦W"àŸ àœÞèÓ¹:Ý(uz¡:%öõƒ±—z‘<=“\ŠXÝž„e½¾`2Ó{OØ‚­¡6°„ÍN`Û(‡ð¹ã~¿G[ Lcg÷ågZIHB†zÞ‘|ôd47Ùd9Uú¡'¾¡?Øš™HàäQæÑf÷öK³ÿ›¿ü]™=T ÃfÇÙ}†;÷•Ú|<ÆQ˜'µEØ ü 3þˆä=oÁ'Ñ—!¨¸q· ÈÁRÙ{T*h!ÏMòˆü Ðe†«ÄÒ…#V_^<Õ-ð° -¬m &xž-úQ¦‘:\1¿›¿X P‘ŽÎêa;VT ÜÐcÃ×S¦×öñܪÌdŒ;çaPKÑXpÀØn2oiݯˆ½Î†”gÏB )Â}ãõ­òEx„ñJÂPM±Â•™×Gì/Ìš/[ÿ–< gG¦® ¾áŸ2ÍþK¦™1Äà ąn‹7îbÐM¥2³ëVýõ«öA0¤‹_þbGZ™s#›{ñ\;ËãóÄBaÔ ®æ0ja‘÷ ýzæ Òù’oºƒ”Ÿ—?_¥48ÿ;*¥…>>Ž*yo!´KŸˆˆógÈYQ«{Wiu` äb¹Í©¬D ä©ÓCxâk|/“*ÑyW#R6@qšíI º“|Äi H­ÚP»¢^¦ï›yÛD²¼ö*Ì­ôô/)t0Äð;Ô÷gýdÒqk*A1ö{ì#ëk™‘^›ÿrDÒV×Ð;<ëíAÎB´ êS ; ×cóp'-F>%ŒÓ†Sß`×cG¬ÎWz«;©)adê¯AÄŠo.Ôôህ LKŒËÝjþ¾¹Âä_ߘoÍ>1ɨ­de?ªøýN×/¨¿ƒ¾ó—9SçFñ#Ô%3.=°üˆŒˆªÝ•nÍÜN¢Ø#-Ú¢þ”AÇ™ªiÓ@¾PüÞq ‚E j92BXš#.q,6ÕyÄðÏ'ötóù¯‚"àE)ä®û€´¢é'¼ A¦Øµkã9ƒºôuxħ¯†àk_Ê@®h°y&ŒŠë}+!÷HTEø -{Ï͵P¢>ê: Ù¹ªµ1þ¸˜Œ-“p"L2â_žÂçœ?7èjKŸú|?U9˜i~”‚”tñõ™×yñ¶:sHÄ®t«›!GÈ™ôp®´Ö Twùr«§âVûÖ¬ánd×ã x¥â@rÐÑô“ïg9=óÁacõcgûA]üñiŸÔiç®ÁF¢æqÌAd -Ó`¥ü3¯ß…8]ó&HcŸ>PGÏŒ`-ŽŒ÷ø1é‰düu—*–¹§Þ†‰ùnR»¶×ªËËM_.Ì™ÆدxÆøóí]QpÔ÷ðb6Ücð±2^>\ª>ï¡oª}7cKôÞá0 í’ºÒBˆB0gsoüÒ+gF3‡ÄÂþ÷KïÞ’Î#cÊßóúÙŽC•ÇgæÖ¦¥ªÅÌ uX8ý'#8|¶«QÒ¯ª¥UÎÀæçÌWá±ø䑨¸¬)j;Àæ˜ñ#LJ‚úŒ'/ÕÒžÌvæäÁQÜŠ˜±sºñ^—v& -t¶‚¸žC@fU½ƒ³sÈ\L¸Šªì$Ýl›Î“È2¼Mæó{†­lvCv›ÿæJùgN*°åX…]FØDb„<°'D3AÉ{ÒnÇz`5n=97÷`A¢…aH“§Eî—¾DX0ÒFÜFç.º‘¸4­[Qò``#“7(Úw©ÌwœŸ<¿ÙjbàSG€Ê1º0æ.ô›àfœ1*„dð£U‰~¾?-^‡‹‰`1n×2ç;x}bÝ£Šs„>ß{·_ú˜pʲ†ÎîˆÙ§"ÈÍ's‚@&|üœH­ª)˜";Ò]p„8³j>j#Ž~ Å=Ðü¼sØ^¼öÞŸ"q–ñ ¸æ‚Ìzr(Î r[×Ñ»ÀT]{Y Wøˆ¶—Ÿ°v È= -ûü‰7Ýù±qv’˯ýżGé¿,fjÜIŸú¶vÓÑìálÉr×Qgr©=|ã(\±eDæz\!^ Õ…Œm‰ïä/PŒW+wÐ-6jWH‰EÈèŸøE_ÿÅ‘¡¢Gz£EvÔy"Îû ¿ÈiA­%[‘uì¡ÅÑ·:w'©p7ô‹!šVSõÕ}|™ìÁ}²SÓ^zâ»iÈeÆžýׯÃâb–†ó“«Èm9-›W1…XFl­†ÀPƈýÊÎ{þˆs ‘ÑÉ.ƒæú•Óé"dB»;b/:DŽcçÔÁº/LJÛ34TÅ—ïF! ouPZÍœeÎF³ð9®{Hµl»žQCvÕp-„”ÎÄ¿jC0Le! Óîôl¥p÷¢¬¡K¥HÅfè=׉eÍ *QÇ€fgƒÊ‹¿W~Í·vÊüò²>iÕËS…Žr¤ÿçøßÓm.hÌÜ Üñ«¾:GøÚwÊøIy»4™‡¸E¾râ¿Å®>+õ‘ZàoQµ1H•·‡ Ð{>A#auí‡Öa9 þA6ÅAvèž_´^Ö­‚Œ°Fî« ä­WÖ퉤ð†,ö2$Ê“›.îžg:+èa›â#V²LcK­ÕË{4^ȱ@Kþ™°ŠNÌÐ]¯†@ÖЀ$¡ÑþÆ9KÐêýî8k‘x}½lß(GlÍгFÏŸ†^å²GÌÈAÞ‚‚©!±PíÝÎç÷X­ÿü[K6õ°›1òWOÿ`SbÓLý+›:Õ‚q‡m¾ÈüÞ¨ˆw¾°3¡3gWÁÖ“K·˜Ÿ|g¯ý3{ú3‚eËfºÒ´Á‡.ßIiɽδèÔ[zf2Ò{²ÆMÐá4Z A%rÀÔ¦dŠš{XAïçfêÄn™¶î?órö8cl*Ë{ ’B)UÖe(£äèÜ+ó™ÕV>% w› OLSñ†Ïø—©W1¿Q¹c£1%©ÁŽ£Bz=Äëz¶Å˜ÍlŸ@LK»AümPÉÄகZôê=‹ÇØX´Úë³] “a.+hòm߯ÁT¨þŽ -Rž½V#T0héÃÜ9Ä«¢ž!²è*0f² p Ö¨5h.1Muu ñô“ç>ÛœÖO.£èÊ<{æ94H#sD&VCª>CZÔ“RoÌ :*ft«òDxö3>š+–m† ãlx:½†´txi7¯ÇŸW8WѾäaAË79ªß;ÿ%Ô1ò~wkoà®×ˆˆ˜ü…è¼›!½l·ø/ø˯ô®)²gþ.^ã©ç½²™—Ä KßARaW¤…Þ/O£–`¬~¾;›æi¯ÐJµW¹U(k䑲Uj©{Ž]zùDzâÇö©âzþ‚}K)÷sobAêÜ ™Kït ÞUz·Pä -ƒ²…œcP„ÏåHc†WP½y‘Ì„ñ„.£%ÊÿJ#nãI¢`#61ä j[ï="`pl„¢¦#ôÁ]Fb+¯j¨Î†6Þ-‡Åˆé#è'%•O’UNhbDÜ££Ý—ÜìMFå -È£¥ >öF2 ûId³ ¸ší’25lŽ™yÈÔ¹¤0Ø­‡®†dóÔUbEÃ=å -—hf¢?’Ä>ðÀwnEPX|¨4Úëõqk`­¥^kÈ^Y5‰OòíäΧÊ"ŽÏSÁTìr'7,OÂîå€%C:à]‡׆¡ZduE>~RäžÀ3û8¶„Gâ?C˜?s”ÁiŒM{Ã5øǺŽä ¹ûäe†w B7Ë ƒ÷ N—,РÐ&ìøÒô£mék²|ü£ÉCWÐΑ¹™Ãþ€/TJud{ˆG¥hMå< ËÑëý¨ŠÍäW¡b‹H¯IÈf2ðX…k?™Û,׋¾…ûnU'j —7^ßX4ëh@ 3=k…lU¶ÍQKQú–ar•°dk¤ž0V¼ëߊ|ƺþFŒF¡Ðœb ™Þêˆ S ré7¸]ásžðY›z‚&‚ƒ5”[é¥2©¬Ïc)oH’dù¿ñ™½{+ùáà¹Ù«jkÝ‘Ïòïæu¢­®nùŒú<{ m›mT³mÁ±žŸ_ñö^)”0´©ŠdÔ2./ÑÎqÔ2Píi© 6·JÕE¯ºÅÿ)ÞGuK躟%f¢±ô0YTŸä‰n;Ö_~+„é‚…‡§?B“ NÓZ™S®ùÝø”fÆÔ¿3$J̪ßÇÈ;œTw·H/¸ò‚šÏœô1fŸ1l4 YιvçíD*|Û0ußCð¯Ž3ÂÔ¢ƒˆzhà¬~ïq ™™‰jt5¨Ú2@‡HW´ßI?vÑz(âËöŽ'Þ1ª9Žaü¤mȑ΂íY³"Ÿ”Ñ(Ø×û#ع…ú™ ÍHc ùèë;ÀDm¼4Á¿œéï$Qbä no:­"Z¸vÞTvvhБL| ¼M$©×z—JÈ'ÛÇå÷{•/ßý_“0MxO œAA75ë2S,c+íþ¢¦-öÚPG€¦T™/ -©ãxa­ÍÉc-Aún»kîÂØñü"De¹©#G´ãDx¹kˆÞô㘎zéî%:ˆçº=È-Ä®#‡‰‰3þ+ÀÆïŸ3·°Í×àæó¿2Hü¢@‹‘É;?)úÐcTÅF¼Ý¦âµ 8Ê÷öLY$½‹ÑjÇ-žA˜(¶/vŽŒS_þIiö¨A)Æ€?]Å`c5wž‡ÊØ¢ÚB®þK±Êv¢ª©íþ«¬˜gÈ-Ã[BNƒ+k#48ê:­W±<·¤i$EÈÅ”“UÇvê3Sή™D+[‹)ç+ÂçxÃ6™Wë™ ßoâãÌÑöå¯gÿÖæPD92,ŸŸ·¶Îqľ-r£X@±Iï0›É¨¥‚ómm±ã.ÊÏÇl9H€©0?¿œcEÚh8¤½$*îµ#é×ÁÉ”ìJÔu5<Ó«!ÀÙd¾׺Ñ<ÞQjÖMzßtW×Fl)—ÿ¾ÊIßý‘æTFPºaÿd£Ë®¦E¹¯)÷>]îÈ”oÇ -EøÅTâ –:5*YA_Ásâ(Fajþ3‹´ãÚ@÷TÍßxy«…NåæA?Ð R ŒþÚ¢¼Ú ?Ùc~Ãm©ß ûü²Ïù„Ï–Íç1+:ù"Êü+ù Ý)ì€#žw#Ö*s}#eR ÝàÚR~®w¤;çD(8¸JSÓ[''ˇ;:t°=JIîjgÂ’ù³o!@<1„~*ÇùI:¯Ÿq…çœ1ìÃ7ÃôÀ³܃fü]ƒXªG6e«ˆ*å -éƳâxº±XQ[âø¯_ß› sF:d,ͨGÖ[4ªôyFóÄ7&ç!þxHa¦:Zù½ŠˆCÑ‘ˆ5ÂLcý¶#ֻѡŽIyJ?¤ß*nqÄùã~0Õ(ÜÆ<ñŒÿC†A7‡ˆÉG|P¥î[æò•\åL‚â|»«GûO2/*uy]Gxºfêîñ|K  x¶²J ŒÝ<Žµ/^ߘæK†d‹¯BH-=d8‘tA‡¥G‚âu·«ÐÙ±”çÒ=ÿ»b8ªÃþ²·¤Ÿƒ–;oŸÔçû·M'¥nЬVæÏ»X"•—#Ž»fãü9—܆wXë -©ù›«1K-'S<Ùݪý@kGŇ։]aä· ’Å ¹sã+÷(¯©ÐBž=LqnÕ7G-M(ÿ,¥6Fvƒüq<Ô£„FéñŒ“™<Ô´hKÙ€vÑÐsÔØ`…G>‡à£~ ¿²ã‹2¯~¤(wÖþÁP -m›F­Û‰<ÂB™¯˜ r¹€¿ÅüëS­Å)/⦡]Ð×yb^ÏY"ï§îÁIúDEÆ¢”tcôU?àSþx|â™Ç/5ÝRÀûw¯ìžÈ¾)¥†ÍC§+UÜ×zµ@Òµyïyµçž²Íá\ßÇ>AˆÎæúΦyEïÏ,-A ¬ÏB©Ø~¤ÚÈv\D9 [>âe^dwÞ½×£é;W¹8_*ñ¶pöP´ìB^ýgTý”šå©¶ÞWüÏ5—ÈøCľâÊbóϨ![@Ó^aDòæö¨é˜Óh,wóÈ ÌÍ€øynï$„}öò%—uŒ‰Knu -Í(ëôQCB¸ÐælÛöèFê%âìë\g Äfæš¹«d7°)Gm­6*5=²_G^ßi“o¨Øíƒ[Š´ùà}cJ¼•Äžaõ —Kú•ýÁ¹äP[L]Fa©Eú( ðàÚï Ÿ‚ýc\â•1 2¼N§â Â¥˜¨ð`GQèr.‚ã`>=:¡÷çN™¬‰kÔ™ZÐŒ·ï£.‡Íœ|À'¦W!>ÍínC á -¥3zp=xèªÖ­,=¬õd°1ǹ`#v'†á%ödˆiq:¿ë¤î ºw/YÏ„¢!6ŠWÀ¼#ZÔ¼*½ó#>vËG|Gêû ‘y¦¯?Ž¨à­µ\äŠÐS(×#¾hŽ-°Ð§Zü`›:ܼÌu$n /ðJS´Õ(CG‹CÆ¥T#õ[° D„r¹“ínáå ¾ÇŽ“Bö®^õM%‘Ç-« …^½]¼‹ÜéæØÎX½Ï!é/¦ Ñ%Éë™ö`B1 B-®Ä{^b…Û<±°2#ÅÝÀ*ߘ|ÿ*Î8ˆ*3j«Á™´8ED‘ölå¸pÚÏÃùÂé© ùÊê“dð'aÛúòÖ'Oă›'‰fÒ­¤’2É(­ÝBÎÎ R&ä9ô¶ædÉk&Â#‘ãÜzÀؘꢌ[Jþ“]k›£WGV±d"˜©Q!(­£4Š)ìÉcïˆàA‡ºÒÂÃ4ïO4Þ·?S÷¡Ç*¸•Ž"ßx莪y8’ºkÜìZØ臾!Çã›xœ!ÓrÔŸñ.6$oëÐYßjˆø~¾wÔŠZøÄØ8µÓ*L7#ʃ#F- !ßáV¼ÙÂÁ¹“*ô€èMŒco¸S‡ëö%àëF*ÏÊ‚¤ÛzùÊ`KO•ÊzG|s#£L¤î¤!]=·Áì/ÙN¥kéÔSéÁ1Iíï`*=o1âê9CĹö”R‰K®cu±ºÇáP\}h¦øhÐï×7æßñ`3Çͨy$V‰°Ž²>‘ÅW„w7’‘(p«¯êÒ‹V÷¡dÞ?U’úgÛ´£üwØ¥ãVÜÙ õ÷è´wŠeÒ‚ÁÉ-°Mƒ€¤R9>ÌËù·êôž©¡ ­‹\6Ý’+÷ˆ}íº]Û‹-Cdâ ¢F ²ñj4éØIйyRÙé¸xk>¡“I§ HÅ‚:Ëšº,Ò«|O]¬LŠ(:c~2Â$€-ÎÔ¨¸p çfð¿ÿaÕÐ+õÅP>*h'yà›©Urnê$]½Žè”#ÌŸDÄñ5ŸgI‹1’Ï ç%Ó«çwDR:¤ÑçÌé§ê¨· „—£B˜ýôó Á#á!ùúÆ«S)dpÉ}Ââ=p|S±ß[ñ=SüVXÚKþ–Ê ;â;ÍoìÃ~)Çì¿©]gÿVjAMaÃU¢A—ïÄ@wÿ+ñ¼3«v:ë©T8SP%9Qïu‹v9Üß•=irxŽHŽÌ^˜r™dn²‹åPf6Þ×ÔÜdaR¬}e5Näù!÷_™â-ê¼øaÇÚ ý¢ÍrT«U@ þwýNëŠFmäå¼Hvrc¬ *”‚[󷆜iHÃãJ·w¿+S³U¯ËÊS˜î›ÂS½ÀðÑÁ5}}…ñî`ã)„_Ž~×u´x:åò5„}•¸~Š¸ÞU4‹2"¤qjÎë¡ ù9>3רTwpÀÖÓzz&¯¢4^¿†t»®ˆDk÷XžUò¦g½š,[ÒŒ!¬uBn¶‘Ö9ëå€ #¹8®ƒ4öÙ§FN:é\š_O?«äø˜~PQªV‚®çßep¼žsÞ—Øâ-ó€fÒ¶ñ=Ú*?éš7™‘‹ad+ƒ4èô–YÚ¡µÜ‘Å]‡°{nšó{b9 !ÉŠò½ÉÓ©?Õu0ï£ôo[C.~#ÒËó“óZeáïýÂB’ìû³öj¢Xe±õ±Þ2›” \f)=øþ>û*à›œÔýZÖ<}Üë Çϵ  k@wÈRÕÚ£×»GœuÔº:˜slÒͬÁÀ\Á’àL¬Î3X´4펵ÊhäÆ^¬VùüYäîûº×è©ÜGSì3¾TÚ h(€ÍIOÎ'Ž Çýõ)±_ÕúN¶òÂ䔥Å7Ôe°[¡£°N€ä±÷#øzÿ\Ì_>¸?Žùÿ³¡î¡hyq׆—£†\GL¸Ât‹+a)ôN>¶?©#œ÷Ô vû˜§ãjë¦ ¯ZâyÙŸ¨A<ÁžD‡zÖ×´=-ß-íÂØéò$pÃÿ9ÖšH£~F÷ã›{ó8zâï|:rˆ -‚óÈ#)Öˆ+8#€çHOÇ@âWÝoý%ôò'ñ¹‡½}×Üâ'ÍÓ&.U›ŒøJuÞ +”Ü+ÊÐ|pRzÀ§©Wêñ]O*©˜R̸¢ØqE¬ }HK»}_ż *K!‘ê€z«ÎÓ—|Ò©Â=¼s¢‹ì_ZAÍøÞÎÄV~ÎÖóõ§z¯^^Îê!¡=R9K¨–n/ðͳ痢úIàö7‹÷ŒŽE•ƒôXƒsÚÜäå,¼ÿë'岋Ç>R”/7&sþGÜùÛO@ Nv¤Ã÷(Mú7 -Î,„<‰ºI™ ë(1Ï«ôüÇ‘Í’:£Tî -$£Ž’Q¨Ês†œg0b&"ç »†ðô%„Šª˜!|^·¶ÄØ®ŠÈHr–?ÿq.±¸»ùˆÎ24ñä¶Çíúu–.Et䨣†äѹòk@£×`+;×±Yú™cÏ­Ø%W:Vûþ¾Y2VÊ„àϘ6“¨_ˆ­!¤‘àÖÉÉû -—6Ðó¾ -“`råìÍóþôõ½&„®õTú±\"Ê—O®Á%Å{BÌ+ž‡=v0gèš´KŽ³Ê•†\„€îzk ý—¨5 9½eÒsGV®¿ÎxÉa«Pº:s«óÇNá}¿^ߘ€oé€O²ÕÿäZöõ_è2Ç‚í³NæȨíD>2ék9Nõ¸­G¶‚ Á{ï¯+êq*¡ó­ÓðÈuÐ'ÔdE0ä׈ÕDZKç Š¬rWÿ(ü¼Ú£%Aa©A–Sk´ðQfáêcÙ³gÈü53Àj*ç lQfI…4–Š¶!û`UÔ°Ø¢uÑSª–ì*Êj‹sfØ|ôŒ·”Ýo#À.Už)ƒbuP Ñ6Dû9| •ØýMfÞbɉ¸ê%_e¾Sk£8Ñ,8Îœc{î¤æ@‡ÜÊœŒH¾žië¶Ê¨%Ñ'£0+ÈÎ}èFˆ -abžD¥5‹žÿî-œ>¿—8$’P#ÕŒ¬Ë¡„5Vc º¶¸AΠ݋ÈÌ5åÐøÖEqþ:âa‰I™;#ñ•ú1ÁK#¸Û+&”„™Öë¦Ú¯ Œ -(\!M³éiH $íV“諃`ÇÞ+u¡áäý!†©W¶ãU<Izk>fÇÞÊæFLò•!`¼´6ºŒj«cÃÚÂõ­êzò¨\Q[aDDí°DæqÕrûôˆZÄA5 âÂëÖ]ñ Ú®h†ën]”výš -ÿŒJ¥’3°Ë({Ï!úW”? ضhd@( ]énÔ@ïÚy+Fí¼ûE |.˜£«^‚”QE.=¨$ÅÂl{¤–ª týnÚ] È°ñ%Z–j(pžq¹ëož°mízâj·~2åYö0Ò¾R"Ù>ʉç¦'Ø3̨ŒÀRÙ5¡êÕȸÎHq^Üg¶y2ññ3tÝ90—G©¢`[ÎS®cAÅÃ8‰„)2D,ÔÏ2W9¢(BMëòç åxî?¨^ÂåñC£tÈ­„ùa@µüp^Âð½#ó*ô»s¶ØÅ·PGã‰ú_dŠGèm0xx¤‘ÿäéÔ5}Nמ‡ûiEѸ2s f%ŠQ_Ï™;hfíxL‘Nq^y.ªÌu+¿b¸#'«áŽ*'%¶"oD ´z„ú.""çW™ó߬ÈYÇÃu±wÖµ’è/DzÜ+iTÙoÖÂÛÅsw9!kö:{q=N=€6ŠKãÍľ¬å:Š–Ž­´¾ ü—(#|9þª)ž W_çÌaùøU45Ô¢3ÀâЩ}dw%P@=ˆž·¥W cƒòä3³Äœ&\JžãpÚÊ Èz£4€¾ Íµ„ŠnÜJb2ÙAR‹>îä^Çy"ùÎí„VÛÃÜgž%~!5óÛŽ,œ§Þi¥‚Gµ®Ðœè…÷¼‚åš6±,V0¤´{†,·‚Þº±8¥ê\b<Ö 5š™@C>¤Ÿ62ÜCÝJf‚ §aï¿öt&ë?½Š— „´C±ÏBJ\–šaO±Þ×Ï9µ]‰é,ØâM'@È™Ä\ªã” Ä«òCWøŒWšW£ÇI•Jéú½»Z°âé 8Oi0$±ª\„ΗBɱ)Átô$I³Œ|˜§U„XɶŸž±,BÚÍ­v w f.Æ© …í¿"c'£ë@;ð‰¦;T›Þ€²Ð0½#(»ë¹¡»!ŬX.7옭[‘î{¾·‡#…k-èMYÓÔ0nŒûqÕ~&I¿"Ütf„,ˆý4ï{Sdž¡î=óHTF[ò¾.CÏøC“©Îþȳaú1ÐF·¶e³ÿrí¾¡©qägåzýNÙ/?8á– .¸¶Ô?C—Q‘®ßù‹=†l›n£0¿Î¿+ -¹°xÅsDoAa Ý;ƒH}R˜qÆ”‡å|Dž:"=dH϶Ýß͉fÔdÝÓ‹ãd]§h„˜"½r/ÑtÆÌ9>êûd…õO4yáó®§æ\礘S…VäUg¼CÑ”Ìq]çÖrÒXg!Éت¥Èh·¯Ó&ŒÂA”H ™ü|E*›QO¢}€¸p<+ûŠ’ûÕë|'zxûoXßù‹·ÿ œSP'RÔÏ–vb–È\rîÅ? ¾»Ljá¿ÞÁH+g<3 -¾ç&úó:ˆ,y·ž!ó™èÆô -C¾½”pôâ·ýgöla8· }k0ŽjÌÑ*€UºŽ0Ï~7ÄY“šœMø ‡”g¬)›C<‰Á2TU‚Û#d.ÀD˜N×zp½>”¦umpèêô:ºÕ¥3w%=¯šSOl •m¶ ÜŠBuÄ*Ž»õ¦Í©` ž÷RÚ¾^ßøRïÎ`×\¶Ð`A¢Ü¼Š±[vÒ7‡S)Æ>zË#ÄgºYÉ¿à£Æ·þòŸ¦W·Ï>š‡…Þ&И»ü:b}½/q”ÉRzó¢ýÛ÷N÷ œ÷J¼f«8K÷a¯×z‚ÀÐF4^C4¶CœZŸ‚Â>?‡ür•È=q‹Þ¶óý‰#·©ÙÚÞƒøLŦ³­¬áÜÉy¦A©Çï³>2K+²“×¢X߀ƒ·Ê‡H]tFÕö÷lÚ”ì7ÜìºNSÂXóIÇÔ·%lúúS½AxdØ—tmŠ0𰖀ߪ;Ï7¹}ó1b2p¬ï­»_¨ßù *BȆ¶pS½v5:ÍŒ'ÐITʵt/›ZÛ w5cXøRž4õ–ùàYÿ8gx¶<¥b)Ñ‘ÞM½}FDxÔ–˜^ð{$%¬[ÎH²ÈÛÇ4ˆÇbäù ?ª«0F<Á¿ñTë|à·yæH£ð -Lž_GðMý)ƒÓ¼ØŠÖg«á7³i*毊NŸÃbföwìYÙ»R‹ -ÈS’\QÅVÐe«Òƒ.Áp–¬;Vý2˜]jHU#õ}¦° ²ZöóLáÞôØûñSSœÃä3>+Šÿùy´³{ “ƒÝlE'¦ñ¢Õ`…Ô#Ð`Ð*6#Rûéª7x?½¸ý^ÿ¸ÿôLU3^zóêã¼ ’ôDØýŽ®ýÀÙîøò÷=ëÛ£— ˜bšR¿½”GéòÏwÉ7oEØ=Ò¸<Õ1\4]y`$äNêÔMßgÆ*ǽÄ#4d_,¶;óÈÌö¸Œ€±wÐ/C?¶Ÿd§ç"-R´<³aºŽ©/¸¸Rù4ÈL'¨„V’[]ð*$G"¶HàÀµ8{O 9bÅ©<ÐÖ(ŽÌ†x h«&✻|¯ïmÛÌÜœ‰äŸ·©Áà,ë·T òæ4OÀ h ^H9Ä?>±²ñ]Î7›Òñ*ÿ¹ß -fÎÀáëÌýsþHW~•?y&‚bŠ`ËòA±k›ƒ}­¨XyGT2C׉Iâ¼¾±èþ+<StGcomr$Uå8Z”œè,͘·ž½L ZKï˜l®î‘z§Ç^Êû»rÇB5?÷k¿£vAfç)Ü•b–¹›W¯¡d²-¿ëŒ™q¹f39-٘ΟÊ\¦?5àN·Inr›0||½gôòF`B 4êæ>gJŠ¾ÛDæ|„Ž3_ú}{ˆ)ÿ§ UÆï8™<ÑùÙ)^ùË謟DHjt¿Ï˜¡k¦óAÀÑZüz>ýUƒdkºHÊk5¿ÕÈθÇìk¥1‘u*´e3r±ýU`ß[Š3²<ŽØB5&cÀäNÏî=#­-‘š4*‰û°Â™@5™û© m\WG$«q8Òd6°)¯„Á®ï7ÖuRÄ‹î¤HíäÑ­½Ïú1ˆ³3Z‚ÜžáZ`Ò7׈ù’k<1Ißâ®=bG’ìm-ê@—µyÊí@³Ñ‰jך*ð}sñ0çîÿDé¿faÛÚë°Í§ÏBëáÔp¼]åѪjþƒTý<žxïæDßúÞXM3Y Ýõ½-fE˜#„‘¹Žª7¶UÂb‰0ÁJ‡ˆ EÄ7äã¿qhÊåãdÞ>;m¿_‰Vèýo^Éœº^-bƒéå³™ˆÍWL'Ul6¿Èõ1$=lZùÐsç‹Cä{HJ-ò‹—KqòJ›—ð÷úÐ@Ës¤·\g=<Àù½’#NÑað¡hÁ£•<15]Ûå\Cs=J Û"x‚ÇÝ÷÷VOÞÙ™Q{çœ:Ñ–-Ö¹ËÇæĆ>òúØP"ðœ©EÙ w†,)Ô'qÉSf´8-¯lb2ŸFnäºkfb -qúÏ2º²DCiï¯o|©Ÿ¢M3§ÅHÞH1›?1¼âT0MŠœ'örjîë/¿ˆöŒiŸæÏþ ßý‘0óýÀ¤óGih+dxVô.ÄÛ4‡ ½–ˆ•áC>á~Ä\Vâø>†ÔÄ…­ýç1tÛ}t's W˜¤ŒŸwº ìÌŒM€o÷ôê:P‰EF3â}ç Âèõ3K¸ÌPäâ…Å­îìño…Ýz’±Ôçä§äÿÿЩi}6:l65zoog$1rá•Í͹M– e_öYtuàÄ[â­ø^0ãP -k ûŽ3™#Àè#¤=Ò^ß;`o~ y{ÈѲÆs5m? ùó;óæü7õ_FYŽú’Íe4V ¯ Â8¿éTç”ñÍŸ}?ß×q©üKªØ1íßpâWèr*‚I9ȘhÈñ÷ËOïBLï¡4ŒÜ)*ñÐGûµRžQ¥z´>Q*¡Á‹kÂNÕhºé™†w¦!*\{÷8×~9ï`¬ØÐ*mbÛù²÷zn¼De@ãü^”ê+kkÃ;Ï{7 xƒE´G†—=+ -n#‚`wtËvÔM&2É·>~ç¯ÿ 4¶¹ÌÁBUNalUGLs¨ºà %}™!Ï»O4§Šî.¼DI­X˜"§°Ò¬u2‰°¯Ô«:&B„ÝpÏÕA-_È ¹"ª.¶_yê÷… ¥ÊuVE-CtöýȸۀOžWEîÒ¢–ƒ`Ä]‹ÇPôX -•1{Äê@þ‚©Q>˜ÿYe¾ÃÕßáˆ,?“tßî;C‚­°Êÿ1-¼t[FUj,sã {¯¶/6…°î{©9ñŠŒŽV«í¢Ÿ}Õò£æ„fýDAYW‰Š3iñk[o/gI“©]‘PšƒÌH[ÁÒ¹çVÀ¼„‘®zwÜß™)«Ýƒzºý‰yfQ’}7÷´ ªG*ü3jssF¼Ag˜s¡â„Õ#Í -MmέÆapû¹ÛR©Èñ\S/Ô' ÙRJ锤 Wœ`ûjpÑ -åß·¢†@RÐmž§ÕfËÐßaé~c’ÿw¡ÇÐ::+÷½à~¶*ÌD‹qlqib]ƒ$ú~^í¾' -Q좚õEÞ;iª{9ȠȳëåjwGëå;ÜH/í÷š¹ÿù ñ¨vòsÛzù]ήÖÚëgRÒöGèxúö`Ì¡”FŠÞ„¦<õÂC‘qÖ{=çl§ v‰q±5+U%%ŒÛþ”ÆXÀÙ̹É^-ÐTWoØU`¶zCï¬G£L0)tñ£W¾Ô§¾ø -ï¦êÁãN …2ANh•¤‰áyª…Šióx9ôžhCîÙ -‚°ÿÒ.Ý¿õ—¨ãµ(ðœZwguæÏlJ œhÛu¼†ëEg%Uè7v‰Nn¡Ð#,-ézÞmfHÞî ×zÉvå@Lç{dKŽ»ä ¡<£êåßÀb Ù»~f——°e^G‹ËGï½'D… æ߈#nw€Ü¼6ÖÆ"ÛŠ¹JÂB—¹è…v‹s,(SBTüzE±–¯?È{¾HÊê RÍÆ]6}g£öÞæ¶(I^E™naTЀ‘­#ΫdNuž¨·iEËŽöq¬nÿÆ(X„øÆ—úïØ~¼ˆ¬Ò£/0<"æÄP#6£H‚Í)MRv{!¤4ˆx|ø¨ëÍyÐnXÏ+àŽ½øé]ßY ›cïÏ Ëi•úÌ™ Ò¤yP>Çoæ‚!ñýFf±vëNF4ý•Ú²žç„Útuê ÁìÏO6 -)2¤³É’¼‡p¹ß}ç¹±ý¿¹Ž¤5Èý}3éŒäÂ\»ËÁl¡­ëÄ -( 6wš§Ìw$x]û¿T"GwäÞ=U‚Ë ¦¨4 ökŒp-ÈÍ‚™ØYñÍŒ¶žhPBU÷štMÒ­7Ö -ÂE{~Ä1f;DM¤ýÂ#›KUð‰{‰Ú¿°{úöÆ¡bÍP°øõicz[µçU‚µ?Èô¤vÌ4Xr1-úŸy9"J³ƒôæ ?:§ä…*³)ÂÑaÏqòäL8Wö(“­ìrJ$2^c&,8ßÚŠDçñoT÷ócp¶ž(õ"Ì’WÇ`¶¿¾~¦ä[æ±æ£BúHíôàøÃUÕ•JçJÿ#¥Š]É„–vFŽñA–ñ;™ û>#m;ŸŒ*+‰”_‹Nœ¼#žÏàè½y~…;B·[­ï3¸K0¤Òb£·e@tj \ûÆ¡0¡Ÿ9¯\XöòD9/™ñÎ;%ˆŽèçþzXÛ§Ôµ‚\§E’Ja3³—ÐQ÷\EŽ9DjS§½c‡‘Ÿ§Ê}ç¹s+y#¥B¦s¾ržÙŒ<à‘;‘ò›Ë’•ô`=é »»H§Û©~ÆÐ3ãs.ŽÜ©ks*†µ‰³B×-€oÏÛC÷ Ñ%èñ¸ÙÑ#ߺ½¾ µ[l[aÐ)Pi« ¡û¯¹ - Ÿ_­ -¬´ --¤6€èGA«/A´ vÆ•žQ„ö›Ú×wþ’I§-9<ù{Ò0` -ð„šÿÊ ùâõÚg¨‘w‚ÔÚdi€j3Ö2äN@‡9„:οÈ~1â›SÊ&8ÜÎp+}º#z´¯Ö¬S§³6ŠJ@tQ}óNµÈuô·è]mq,ókîpo[É7žê½äåaMYÄ4 œ;_V¤Ýú w¹ QW.¤ùœp¬ShóÆ|ÂO|ý—¼ýY²8U4;žÔG4v{îLH<äø¹Ì1@Ø -­¤ï./ÿì“Ãû8x,5«oìú[‚á"24ŽëÀ^ÕFvûà @+Ö˜Mfâ¨AM_{âSC®r®yFFŒI~ç†sÿ Ì•­¤Õ7¦¬¤cpÇíâçïE‡Í\FÒ¡Š _­Ñ*ЪõþŸßÝ;þ‡ù|θ -‹FÄ•ž ›'+Îé«+4¿Nkpžf<Àˆ÷ª}þƒ2êwþ²>ñÛÒ_¤ÞÏr‡Ò­&©åšYOŸ‰|¶¨$ eÔÆT¶;&ÙkA¿ã²TöZ|þù88íµÀ8 -î k­6ä#Å)¡¾òrê46ZûÚlcµÐ¼þÞf›·?4iè&ÏH.åÑÉ™/…³LP»¸ún…Q´yqÞGt´_QŠÿìøý_>·“"†Þ}h“˜h;Ýu"8Œ;p±R £ dM“èŸ ›C!ªc71nG[à2†´P¢OFÛ-C‚k0¦ôy¨“ÌO#ä ½´ô'ƒÜ†)­Aj_D9 E,ÓÁˆy~S†lñƒ¤8\²œÈ1z‹[Z”†ÀÀÌW  =–6aØBñäÉu„Ifi%ª‹'PUª‡¢Ð20—´" ‰ (óH׶ÈdCî²F¸Ö­-Ž«ì`ç A¹oÈ (b^p–ó$gˆŽÞN@¿ îीԱ™ƒÞýXÔB‰|é}`,ã¼·ðÖQô/!¥+TuF4s„ÒT(å˜ÔQ“Çm‡¹ÇV?9v¶hXžo¹¯‹ÊŒÊi‹R×È -U/$³ç:„ôÜf·^Ðf -Ùüpìë¡yRf2sÈ<—‚Ü¡Ø4×öëÏ<U‚Pa¢U|ª,ÑKÂŒúÐÉ*LDŽqsŠRY{—,ÁúÌlvÑÿ­ßµ¯•ède1'šƒ ‰Æžˆ–PëqSâ,CTO  ‘s‡·øóäÇöX¢Ì‡(Äï™vD3ÎR,:4Ïî¨àEò[±ž¹â|ïT=kµ !—’“É(Å;\×'”ãìø>YPJmB>{)˜ÅË–~.2nè„–/å#Ä •>(r°ïÄ„k—¶ÿ!žë=db#N¢Xƒ4n<¬¢Xô¨Uò}K¼Z8s—ª ^A©0 Òb,¨±š– sÆœaÍ]Rd+Ì2ÿ ©ó™3^}½>îv!/’è÷lQž¡^º×˹¡Ò%]\þÐ5Uì ÊÜëw’a4ŞܪۢÈÏE‹³àð vV‘ 6b.È‚m++"?ä?"—÷‡¶á¡[ƒŒÝ£ØÊT-‹ŠÞÙãUtßK™Œq×Âm §A׸I[ @œxµ ®XD?à$ògCÒ–² îPôÔƒ™Y¶¨r=T‹J½`Or³+8² BY¹2‹÷¹õA­\$±îÚZüo:Mâ×7NŽŸÑÅà=Þòíâ'*”'·X˜{ì9Z’¥WœYçü´cŸ#åÆöþßÏia ¤ò>:%´“³uzJ¿ï葉¤Ã¦ðó¢èOVjÔ8_`µzQÉiŽÝë:ü ØØ^Ñ»ó~Iœ¬sn4î^ RJºKa{‹÷l´êöðÞa è&ÚܼéÂùhÑQ÷“KÙ3{Ú닪ַ5žBÖ–$:vE»…ddòmìw‘m‘qºéuÌÆï_ôêÇ;`ýýwU/™Ëî­=(V8âyp=k~ÌßÃœp/…P -Ï%Ê+ÎA昛 Üš?µ'ñí[óYøŽJÂùótÐÃàºjQŠº¥ü)¢DÚ#Æ ô¿†P£é”[D1r‰uCVÖJ¥ø¼ŒæW¿×¢@Xà€rµ¥VꋧW½JM;Ew˜®ËDn`È/y>g\¼÷û­‚“äƒ%ü‰ÌÝû²Qã\ íâ$θnImò­2Ç빯¸ÁãÞDž`<¾(Ù´»¶/Y¨Rº–Ñ -"‡]‰¾ÿFÃ%æ68Ø_4â5êbºÓD_›i‰-4»ëÁï *Ñ —SÊÚ6hñzšßZ»±‰¯As%©íëS¥ÄjË6¸÷t›0·Kl–; ôä?ÑçÔ˜C.CÍ5Ž­Ï:‚è«å>>2/ Ì®³¨>u£€—óîR]!¾ñ:£¨ãÒîbü3úºQ䯫 ÖG´íŒ&lÝG2Ç%ꮥI:ƒºcàÇJÀôžRê ¿_ú uµ´q[ð–$JäSñž×­D—IU‰¿_õ¹aQf´;·»}o‰üß/µ†ÊÁüTEïà»…g¥©ªK}…?•b3a8ìS^UºÐ -†ÐS”†Ü|ÆŠŠ0y³·úY“–Õ¶ð™@Gº{„tb#‘ïCÞ7 ßM§›WSÀ+BÞÜÉ -„"l,\3äü¹8Ênäk°\4´|÷ÇæÁýC*:—Ç™}1a²Sň/pЃÎêËÜhm d+·(ˆT à¨! ØîÛ®"zQlÚ‚àï`aô0䥧æõߊäd)Ìx¨è%Q½MîtGRL€@PF8ÀÔ£zºFœD/Ô£ÐGê핾æs×ÏÙ#‘ýœ¢÷l[õ$Ÿ5dîÒV5 Îëõ9ñD.ŸJOÍñNÂ}Õ¸¨žÈÛÎ}qù¸#î¾Ò´ëü©NUGÓ -/ÈÂ#F~?3äB'¼§ñHL@r<Öwƒý‡áRÎÛ‰V—ò|_+% öÑC£Áx½¿lݨüAhRéÚã¡•ï†çÒXÝ)†›ŒæîíçG™ŸZòÚCÍüQÄLÙzîƪ‹ý¸àõïo5 ‡GRËVf¦s¿¬ð¼ÞõîrÓÞžìÂ2p0ÑjK¿òõ«Qñƒý b´Ä‡uVyôÔX ýq]²ßBÏkñ°mÜ°w¦@¡^>Í3@ êä$U¸äÒ+ Õƒ’ %@Ûç«©m]ez0¢¹Ê‘ÿ§Å€ìΨT1§=u 2f¨Î9†°ÏËP®i]÷¹E·J?áý{ܧNôËà1öæŒR¢‘À` ?·ñšê´“_$}Åyк Ýò‘Q;jqÕÑ{ŽCUð¤Ùñ¨m\ñt…µ¾…üóqZ§Œ*™rù¥Ë…ÑÂÆÙ\Œ‰LÏ žÑvê-öyª´È°ˆ[k*ä™$SM0ÝËÇ×üÿ˜»³\I’k; #¨9Ä*áîfÞØ÷ûÕ,è‹5Í^¶ö±HVef½D< -D^Z†wÖœf7;¦"Æp’oQ1ü*œ•;C†Jç|½€,rÑ ‚cÄ=}0'VmÃÖÚ¢¬‹9þÚ¢}D@€½L ɇrÔA<³Û¤zCo`h:õ×bý+AðŸ&ëøÅs÷ë—ÿG¡¨@ˆÁÎA­ÏC²Ž§—!*… -ÎÜ1T8”cè^ áÍGÀdÐH`RG¡YÐý#ÞÜ1âæKæîJêhËÜ W1—-XÓ{w˜–Ü0‚Âïqz†¿ª_€]`Í6`Zìg^˜6'¥OÕ&¿³Q`üÛ…ED¥pè1Ž0äT8c§S³…I,êpK`ç©TI‘4Ýjjž9¿kÉÍÚ‚¸}è4áב¥âÊS¬=K‰0 &æÆYÜ9åy6gEž¼…Ÿ­,Ç±Yz˜ÇµøeBPâm…X+¦•zŠ/5wlÁ3²ô{®4w΂ŒHÿ‘]æ`DË5æ -l&Í®CR±ð„M‰f Æÿ•›ÉgK¿:\ˆ¦ MjQ*gBDvW <©ÎÜK9ÐÞuR }–9˜½9r×™ð“£Ý_·óüìq÷ákäÎx^;3[z:úÇ|·ŽSCeQIâf‹xzf -È(&TíQÍسÏÙ¢v).ñ²^[EAùxZÿuPh¡zÝŠ/w~†«êE¤êÎË' c¡Â£¶(°Gê#§«u…ÐçJ1é>Xdè‚|CóS[©Ô¯ ;9³geZÎ)¨ 1AïÝ?¯Ì ïZG¥uΗìN¬W—¢‹žÔ¯­Aâ[%ê!í‡Aƒ†´ètþ¤¥#Ÿ³£úØww<:ˆ}«ù#ÓKì© ,‰†ß=c°¢5€”_Ém¶Ÿ$¿Ryø+O/øÂ/·?Ö !¸)Þd&ÔgÌ"ÕöÛñĤãÊãjÔqŠä×µxÆÑ°n6C+›ŽØö°ª,to Ç–Cë:€°ùwœRC}[âÅ^/‹_eCuõ+5Ÿ+Ž iê;·¼T³qháŸÁ¸(ÞgÐ>:ÜH_ûîHy„k>¢6ª¸+µ­!O`T]^}ÿbÈ?êî†p¤räÌA¶÷­èÛô+ðº¨+ìÔhâúÄ@³“SHi÷…=£³ëEG¦dÖ%È0ªpÁ§µžó/UÝz*¹Ó"ØÓ51öšaç L|oL›6*¿“D†ž\6çk ˆ¨7#G‚UßkVè€RjÕ/M-EyŠ#éÀí2zÔ-Àõ3üÒ\ˆ8¿e{c‰D¥ý-kiž7Ò<WâfÒ *\tòù“»{@É;Õø‡šÙsÙð'“¸_x’þà¯;ÓÐ/w“{ªFI.¯X¡ vìë€o…U^D¿K¸ üLÃݱiM·‹«ëŠ¨.ïW¯ÉE:'‡+¦½v±Â9ê r3AËÁFÇ+ÅíÛ‘ÇayBå×6ûw ;ÇN½éÊx©Ìß–4ÕcÚG¶´{E>¡ÞBÄsAIpîh 2LO 7S0C\S€ÔÇŠù‡©/„Äü;v\¢%rNDy4¥˜'šdz‘ùE>»Ç>8¬‘=}‡ó@\‚S‘îaE®Ú×ÍT(Ñ\Îú4žO´£ãyÕsψs’Rô"‘PLY®€õþ"(@sÇ«Ù—¡ToE»´ŽäþW$¦ò论…ÒŸäN±Kni6ŸåÍ -Ù™ät6‚Z›B'oXŽûh0\ep |wög.¨A‘8C@ 'o~ê}C‰e­J#Ÿ„.ÿ!¥/ßH•vEqñs—kw|‹Mè ÉE¤á 1bØŠ(zZž@ýÓ¯ (»Igèòï´¨…جü³²Í«ÆšŒâ¡JcŠMR!>²þ›U2C;¸g’ˆï=†*õÌ„@w롤tÂnÌÙÚc晀ï¢ 9ˆnZ ŽͨúÎGµ8_>1ÝUfJ`ôð·ˆk* £/(³cÄ~uOäÿ³ñô/½©2Ý ­v4Ž#ªœÉ4¦´ -B{&JC 5tì£|Zã…9 NtK–Æîǵ—e:á­¡²®¢_2ÿB¸. -ÐœÃÃ8鑵NH¤.ö¢YÅ ×¹`‘±t"Ôð(\Z^e/ŽÛaþ™»$ `繶žoyEIÛcö'*`œ¿éh\‚’pôn$ù„ãg¤¹µf÷úÈë»Xµd»^¼½ÿŒÅüwù‰> <§Îp†°,á’÷)³kèq~R¡¨¶Ó<pZCšÆs5nÖ1žNº"¯¤².”Dð›q¿«o—îÔÜ9Ž±<âgÞ­Ké• -=,ÓÎäkW–X‹ÒrÃ^:Z>І%#foOånÞœ â!_SJ·Yç8å‰GA¬QñaÙœg: +1rŸ²E¦G‘çþûôî_¯ï eÆè–7§œT¼“ZÓ¨E(J @-b/(ËOÀ•Ÿ°×ÏÏe„9¿ÅÊP%cÈ·ê)Tm”Tg£¦šÅÓá¹îzéT5³ÏwÒžTo ¹æ½á™ÂØ9cPÚ|ŸíÔ6ÁÏR¦âOD&k&ÿu’f»£ BC qºšûíŒÿã±ê·÷x–Bpó#¼žùšQ¥4c•ÜMÜè7žd鹿¹;²±aÜaOv$¤nÏù;Ž¢#ûÀÁUN žE*¦är¨ûlqaÎÿ¼)ÑÆcyTøÆWh”úr]J?{`ú+Nør33­Û?N¿íõÁ§ú È•^D4s5ÒlLŒ£]£:.µÖ2á•Æ…ÚÑöPý" r€rAÅЪÛËkZg´o¶m_Bq4‹.¤(ü QÓ¥Ãx,mŠù9.ý¯¹]Š÷Ozî[ü IœEöàŠíÉÜL{OÍôˆ– v½ C ˜" ÀRùw~ {zÌ®p]ê¬L0²vJ"Ä2!j¢±ÃÈÝù–ÂyÇÌÓ`Î’ -K:#CfxVM0J%'0n8ŽÙ÷Q÷ÃÀ£Ð™Gj÷õu9×-K„Nº–OˆfG4:öV#ˆ!úKÖþ—S×Ú'”…¾>Äôéù^|>Ÿõ(ûÒåàwåuGI0æW&7³Ò¢™Ïi¼ÇiK+¶çÅ:N‰MåCã¢[mx¶Ïûw Sžg\¥PAˆ[®®6õŽö~§4¢…) Í\m¡Q¿²W̱(†}sž‹*v!÷Zr Êbý Ø‰²Txm3xج­µkw3•ÃÞºa’H¾:‹ïµ¿‘Á‘J«4 ÈDËïhZG^•\½]Ä?ø\îŸm3ÕèˆDVú\|Ï.&àØ´¢bËô6Q6ƒà¶=ð 轊³ý/­Žþ²„\6]Å; Ôx 8U³Š_\w•4 Ó-¸SoÄš?ªAKN€Í×Ü 2¤c„åv¿gÔÌ«"Ì÷¯99*`kF!ï+xÆ h¡±QMë%\`kJžc½Xœæd¥‘ÖàÆ94¿¢L?÷¶›ø½” jš?,©CÞ te¡Ì3x¢yøQWBHôVXVDEAÝ^„;·¬ÖϺÒL'õý,÷"ðm(‹¾×9Ðc×»¡×Ì`¹'÷JA~ÎeAjHÈtZìc±%߀õš#2s³qN„Vàˆ¨€<Êý.Å™ÂúQp¤# Ê‚þæ É×j³{>97·–+ýD1æLšAxP4Ô,Y%çáÜåæaœÐkA6S(¦sÙCn²¿¡öÙþÖ./g9õ¥†ãr¾§Ê - ˜³‰ë¯’6éŠC48¨@îk±-Í͈ÙÖõVÅ€ÃäêvÔuYd6íƒ××ý?cÿzFYúˆ²oPÅKJs»£7BÓdæƤ‘÷µä÷ÄWÎ(›X&¸›:Sûú• #;]ã -z‹)»ÂHh`ĵB÷óŠ=3¸¯õ;"ž}Ë:é2ÿŒÀÜzŸ³¥aa—gè¾.EÛ¶›ÞúÄH|&-BÑ»éêƒÓ¿7ÿ¬%ÂølI&sµèŸÅ {nWÓ=l ²jþPÎîŠæÙ@·—m"žGEñ?I…ýøm)7X¡E&§ú}ÀÛÝ@¨HöKs.Ú£[´x_p Àwáéî¯3 /©4בßéih³A@ð¸3CƒR#ܯc]ŠÁK y{L¹/“¥¤dp•…Ì©îñŒ˜ƒNð–AsËÈú™—ú<ÂMð^ôïXÁ£@l£×%1A'ÏzÆâíŪ0VtAL€rš“bàôÏð…Z^Wä2ÜMøÊØvgDÁÆq|ÜÉ#ëѱ©„¢#çäõ®½¿]Ô¾ao™ò–&ý¼ÒÝ™³T%Ê9Âs[$ˆ¦û È—ÐúN™)¢Êê"#UÈù;ðÊLe{±®DÖGuÔÇóÁ¹YGe8UðaÓïÄp©ã‡DÍGWñöî³&㥓¬Ð\ŽôuÒ @Jòg„Ô—Ú&£ÂfߛاíÞ‚Ø–l€Ó¥ôœŸ£dÃï·H£—ó’Âð * ü\¯­P¿úƒ%B+§vó¦BcPÊè‘¡ŒÐ\"œ¨ÒcÝ”6h~ п8äu &ØÏpzk®û;&à¤q5ÔÔ-¯,´¡Y3îüT`úéz© G |þç9ãPs ‘Ö Atå 2#Â2ðøÑ !±Æá3Š9ÐVŽ#¢(ÏX—³?Xì´ ›™ŸOŒýù«T·˜i7¼»9„ÀÔ1sÕ¾ acûDv*vNú¢\í¢ÂŸ0‚|a®(Sk8DßÉnÿ¬;†SA'g„BžŠÓýÞóŠÕý¾7ÛªüˆåAJÕ+‘8 xDM£­A×^œTE‘((i$žg´jeQW8ùWÔ•50RÇû¡Ð;¡Z@B\Š“¨žuß³"a—§$øÿÀ©ÏMëæÉ"^.>W0 -˜è‹I HÝ Ù5²`Wõ½ïz*1,pÕ©¼fi”ùëàyâï§Ó À Ö1Ójtmfç˜ûrÅ¥öá› èEÀÙd ëJZ’i„çg¨ÛñzYjXpñP;îÒM°J¡Ü«ê­Þ8B•e6HhóŽÒá6Î"ð΋ Ö±<ëW¥oUêni`ßý%çãW;¡ãñr W´@ï:³H¦A­0Ù³çnJLq‘§„…ìI‚xh9A»º„úÜ5IüØKsœ6•î#°‘9ˆ¯‰7_€W`G3'–Ú 87EMÏãBµò#[|Ëç!ðän™ºâCdvsØ@†w.džûå}*§¤wÏÜðÌ¡†·²ž«Ä*UÈ¡ ᩯøzôœ‡5ĺÛcä–ö¹Aö†pXïk3b´$2?á´ßÙÃÏEC0ÿå!V|¾Ó|u§´´§fˆÒÊf‚Wå$ƒžÈΕÕôhÂhª©žó­Äçøéuz†võ!½¨OŽÆÿ¡›vÂ?²½û—G„^[_ƒìÛ2tΦ;#$ê S×-rìÚèÙÞ=c‹zþx_ -¸0 ¡â–éžBÁs€ -,Çæ¯ ƒ•½êèúj8hîNGKffÑêŽlâ¦v7MIé@žQ.ÅaúÕ ‘áR—èýÔGCþiù{üŸyþÞkÈ\i -ÁúX5‚áêùD^Á–T¶”åk>åÜ¢w§õ]ƒRÉß FÏõPñ¾ÑÍŒº4¹š4åu<~phý'ì„’|©0ÂC¶0+,¯èŽ¡m¶Â‘•Jò¨?µAy±úyV ^y†ì*MšJG !ÊZžmx ‰1ó•e͸C¨*‡yÕ^øpÑ]V“>ç@+·‘í`¬gÚ˜×Oö#×DâؘüHÐl½æ“Ž[Ox”þ€Y4ó,sã\î#VÍá=çêN5ãhÉ$¼ã>Bõ’Å奃;o¬Åx3#ÌCÀ2û~Fº¬Gv0E¦-Â+N¤y¶=KÑc+ ;0¶¸âŒxrÐh5OO>•CwrƒiØ"¿ZÔXNÎssªâô „š­~çQ{£÷Gø A>÷Ghcïé<>¤â­Ãk‡b=ÏR aÔt€ ÉÚê¡.ž&sS“¦¶ý-Á¼ißzÌRPü€ƒ5·JßaÄÔº ¯¾Ó{2%êh±¡-“è)[…~HìN;çküôÀM¹¯‘˜¸bælªüÄTÿKʶ•gý_›âq«üò•ü‘AîÓË÷ž¼Yß3hˆA†«e(ý,ßy^ºÑR$Žõ÷ß9z.½š¬ÕôÒFtƳ&s£S× @O`0g8·¦’ˆ²Yúxìc±†©û‰‘ð³|xôœ¤14.‘7>ê/ÞÙè‘Ï·Ë…h¾¤]m‰÷Ô]„çˆÛ©Oìñ?ZCø›:WÕ{ËÒ°gôw²t:Á­žÄþÞðzÖ;jõó}P×Å,„ÁÉéÞÙ¼?º¿v?núΚ|µÀËK¿ÇRÏ8sm:ád!jÐÎHˆüµjÝÜø¾E¯þ´3EAæ&I4@Ó–K9i´q4_2/gr‹Èó”cb.5 c"_ƒhÊ2"TeÈü-ŽÇz7ó\2çî(&ȾÀDèVÚb )̪¬êu¼  ñÄ`¤ÞŽ è•[—ŠmðÎþx}LJ5´É££Õ-\H”ÜjRÏ='æ\Ìm½@EÓ¨AoY'œaðŽ˜{ÍĬµUºa¬œÃ;0½]Òx‰ Eî‰T³Ñ<9aHˆ]ÐòK䉇1òg!ÍÞù‚oWæ/Í ½dAžðÄâ€A’š°:ÛxV¤(”zŠŒæÚ¥ÞB¢`ç~JfÊäL è×ØJ5ÃÍ°¢×s§0æ€Ý¢ë÷å¢z3b.Dˆ–TuO›VÐ -¼Eí›sÍîL+f˜^çÎZàAäƒ^Q4ü!œÝ~BPý(‘jñ={çâoU§…ƒ~žµj÷`VËÀ`~Ãþ-*îÚ^[0óûŒ4ƒêh{/ÉM=‘¢ä÷% <(9Ìä5]¤Bjx{M^ÓùŽ›6îˤ»¢k§“ÝÅ0\¶yk|‹²fÃaP»›ô^4̹|Z] –Ê™Èn¬Ù(…öH©g`Rž^ý‘£Êæ™7î+÷µŽ¢ÒÛkŸrÜÁ8í-X‚i:Sï3L“PTÒíðm­jFŒOÎÙˆëÜ„-x¨n·}¶ÇüDA"1¬ã«Ú"jÑ‚gkŒ–ûÈ&]sÖÄD9®³/Øñ©ùùºQ‰V.œ³PL 1ID¹Çϯ‡Á³òºÂ^ê)O8fµØ"«1¨»yàË×’aá¯EVA5ü½¯ù¥ø‘w§9?œ¥›Ü ɘÆ\äG ¶XϽµî“—(Óm¹™¾s‹°Í ¢È)Áõ¨îë¹ås®Ïµr½>x¾›ÇzâmH^2”Øæ<#ôë~¼U¿! p ¬±„¬„>à&#C:4w8ÆhÏ{Èa¤3Ø2DüFÈÔÇ–3O=7‰Ü?~ó-äêžbÄ>å3iD¼­Kã±¥–¬ï=Gç<Þ¿EòßngE=º‹úÃ<õ"MñôXObyg'Ï•Œ(„ê“FPÆ•Tñ6êÏó3DY£•½©!±¾‚íŸÞsþ¸G¾#Àkˆú‘h[ú|Ö¥Æ1¬ø”ÁÞHô™Q,#¨-iKc$Ç{‰é·XÍÕt”;Ug•+m’üæð î$ƒæSÕ½Ý5€¹H^‘£Oz½âÌã¡F$j·ÌÛæD–üŸ–ømįÿòyI-"GByR ÚŠO‹trÊX¤‘ƒ{€k©,Ñ=b³êèjmñ—í)¤²”š‘h¥4ò®(W'y+&Û€Äd6”îð~¦ù.XקÞóû ~êP¸«#~æm3»Vqç×x”c÷º«HAã¾b˜àˆ6Ç-¿Ü~äãj‚YU£“îãaÒJB7™ÔØÓaF…8Wíæ Ê`n}ÁPÜäL"“}Qܪ>?'ñ'°¨4_Àù£ìéBÄ9…[ faâfH´9v¤z²¼À-+3Ây^D}nÈ-±yúü3éb%„e- §ŸHï× ¼ãÉüHÂöÀ½æg¾H^Çu¯® A'Ë¡G³ùq*@Ä| ¥g²’;ÕófÔï9 -êæ!%÷["²ãqOúý‰¤´ b_=LÝÈùçAYË.J_ J¶h ÏÛ&-?S»G±©Ðv0#Nj,Î÷à 8 M<#[0ݤ¡HÁ Œñð—yè–ª‡þ¿Ùv²ÝåêndÇR@¶Â–,xNž˜–ƒüÝÑÔ#Èv(ؘ;=í­yrÆÔ5Õ,L/Y•!m!"ymí[ÌüÈ®só èþNkj~¨k‹4Õ–‰r¾‡ì–4s'éy)ûX Beæ4»•gÌÆ mîHל˜wÐmýÎLGíàL¢ÆëÒ©E)£»× „L½<ý{Û’E5ÿ=4{Wÿ,©,;®hÓ|;ó¼Øš;ß!t¿×BrJ¸çbž0:Óê¾êÝ î+C;åR&åÜ´h$åR-ˆ¿aÞÄek~³hÏéßaqµ'º4H9é%͘~Ešp«D:¬TxÿP¨ >6ÿáL -Í?„(µ(TC`kàÅ˺l¦¼„s5EÑt9¤ºVR<Í>3}Ç2Åí£lúäR‚â3Íp˜ ƒt )ò¢âÞHß×·ÐGãc*·È©~`'" 7CȈ2¥Å„ﻃèï yj@ dœWŸ4ß(A>2r›ãe¤’É#EÍí’Âóãcn5D’?ïOœŸ] ä:vK'©¾õŸà§™ÌfS’G32t -¢\øT'¿’²ÿõàá_Ìëâc0‡ž[é„}|îì«3L#¨E1ù2G "LnçL¶4wYŸSÛ‘œß¹™Ûºÿ™Áœ™ê‘æ“á«\Qwt,*Øi4îå&ô“Ò YÌp¾ ÙÏÈ•ï9[pã^îx|óÍëMÔ–)šOÄæ²ÔÀÚanWÞùœ$¨ÐϺ¹ [U­.¿J€UÏïÌv•€P#¸¼‰=â+,]òöäâ²;ægzXäÌctî›-ภ¹¶Ô'¹ÿ¼.Ñ ‘«‡ÀU[#x7ªœÙѯ|Û¸¢ËˆtÑa°)ìv6=/̓]ã,ïHD3mnôs›¥b¤xrdÉ噎oíŠþU„¨Ë|U4ÜZ™@̡͙徃?öb$ˆÕV¸Š´ó lþÇùÞ.ý‡Ë♡P‚¨ÊN_žùá´}ÎXl\!cµÇ#©Ç@4‘#ÒÍ›û9· |üp¬,'ü¤EKµŽy tWç|r{.Œ9“uBd'ùNó¼Ží£òd…7q ÚsNÌÌsþÄ6Ljä1ÒÜàM9•NµŽ× xV»` iql)ˆr@»Øé fA…MÝÄ/ëTÌ÷‡BIíiúW~GN¡&;}"Ý3ý‘ƒ¨–ö)Û”öï'WRSé¸Ð.s¤…Þ+N™1éý‡ó–úhŒ7v”0ºx>£ñù×yŽ±Gç8ûv‡ªqñîªr•¢¶B¥=ö|/#êY\Â"NhØ1IwŠ™óCõ#Õ%MåžÆ|.Õ7¤3þ±ï1w".qT:âa¦êõµgFRè#û§—3_휑(l˜p¡-Bƒ¸nðóLE7RE3cd³oó îÑm{¶±yuZM ¦3«›ÎÈQ±u4Aï+†#sÈ‘½[{~ñ£Ïð·ð)fÎ=UQ(¾Æðº™]{ÍSfoƒ×?B]˜òÍûæMm4‡Àg\T.àìu´H¹Åiž_™ç^ »H¶’®|ã/r:‡hÌÞÒ¸h†7fHSÒ#ÑL˜w³1:x4ÓÊ*nDK©Sx†™«Î·§·As2L„,†rÆáÐÍ›ëEw‹"Òqbƒ‹ô¡8#{â¾ÌbÎÒ~']h¡{éÿö‹V~³z€£™Z70Äa¡sHC9£(w¬Ø÷ í]5ï•àú‹7Ï}­v?¥ÖhLCl€ÄÒ÷JÿT×䢯àú°ÌéÅ×6L€L;]å^x<æPì1ž o¬±d~ø,½/&¬PXÂêÇùÓ…yçJ…Rªt!Ø tõØ|>øÿ\Oû:¬øã7Á ð&èg[ñÔEoH^£@v%:¹[ö\¯ÅIsÃvÍÿÍjZV¨4 -ò×uŸßC%›BbB¥c¡-Ê:£®¥Ç b~üžŸi~D¨9@CvH!CÜÖ<* fe±u¿øk6#ˆµ\éˆÿºþ/„늸nMÂy $d…€@‡¾Ø*ò@)—7ŸlIéNU1ôxÌvZìtZ¢Úw‰¦8 '‘!eñDq6k!ñŸÿ–Ôâjë;ÜÕóßÆu}þïß¾©|Oäê-Z¤×“÷3“baN‡¨Jâ;4=®Å {"WG(2Cjƒ½i¿]u•W„Ž4/ñ†Y¤FŠ– –iZ/=9YK@ZÿõÞë–çÁKçºöQ˜XhÅŠx~EÃSaÏûÛ÷pFh£654cÄ¡!LÛ¯ež~’½üEÓÒž„ÏsÆË^ 3ßMkQHî؟í£;ô;˜Í¥'´;çv7”¤è1Üê@λ#ùf8¥·ù;Cç)ÜݹSEñè°nâÓÐèæ @™ËùÝ5†U´É3ÄŒ¹r•j6ý¬fˆûȉüp®åVØZ¥‹,R †œÚâd+])A‹Ù‹¤íÀf`V^Jë(K«ž}Ý°E(AܯÏW¦áO„»\‘t¹ƒ0*ó>›Bê7Pª*¤Õx -Ár½Ñàw¤•¢q™± -W'-Ý*¸s‹ðj0éiåWf&ÜÀrò‡(J§âô¼)ñlW½ÃuöŠØj¹]N%ˆŠº›4H’ p,TîK¿d¯Wx„í±G²–ì$öP%¯5h‡%Íùt¿ ¡Î3çQ`5„^ rW½WY’ÐÕòkëW`t§í‘ÞfLINË6Éät…PsÇö;=\ATôÙŽ”}€grAÊ)v f¨"Æ6êKí±îV[ø~¡nû¤ùw/ƒ"@4@ɃDX¹ëƒ«5Ì÷¤hv¾>˜oô±Jâü—Š(e„/¬XPë¶6XÕG_¾äÿFdpHƒ1v{ý,¢ø³0Ð/…ãtÕ9ÄyY(¼¡07”ê¸hª|Ø!â+fÐM±ùºör:¦¢¸Á䇮,JH~[}‚‘„å¹ñlâ›ý{°—Ä^/¬(ŒÂTV1„²©¬;‘–z‘r¸pϻҼ2Ã{žŠ‹:Kæ+n1ìˆ0;ìÌMª!¶¥ÿ0Ð×S‚{l-GÝqÜõì4)T³ìÆônõrv SêïoA‚1VŽø6Á@K]FŒÇÇ”9\Áœº!Ñ«’JËÿH8ДöWþゆ×Mó'”¾™ÙÁÏ/ã²¼ä÷²oßø»QqDhñ]ع—\#>Wl†«Í”#Ð;WKpzeqju¾:a›ðc/%õÑy¶å€ÍäÒ|¥ÿhZÆL¤]Š40 W[h¯»9bÔyöh¶—=šôs|Éj{ƒñ¶=ÏxBɽ|+6,·+$þ¯¢¬"rµì 1c8@/¸3öõú*ø æôû¡"£ªŸñ´õŽ÷ȃÍ< -Ó~97Ìg‹W°!-€3¦È|©©ãBÕ•‚ÿ ì*°;ªfÁØ• ÜLXF-DÝOêØ×OŽ÷_$´Ÿ‚@8¼wz=W °CÃôtbnMÈpëZò8ª°§L´0Øœ Έ#iÒã‡Ì­)ª5„`Tr¥0T À³ËK¯›ßqhv@ž# -'Ñ…;¯Õì‡<çCHäu†Ï¼Ç/inä ›°GÂr…åYg„Gyî¡qc$íaDPºˆ‚=1šï0©HxÞ0uOÚ[9“ÑpZ€nhhèåû™×dðÁgžg”Åê;ÒszE¦‡p -öm;—M¨ze”Óá­Ý îÝ|IGy"@çDvïÔdv÷ ì!iBF3ŸØûëQ4i*7çÀC5‚/$¡¤ù;$â!*¢³PI˜ŠM¡pß þÑ ÓžOzÍ…ñ™w³+EÒUûŽ"‘¯´à§P>ЬzÆgÈ+†°s%’zÔ¥Ô'ÌUNöb_OÀu¢âlGZ4[vÑ+P‡µÕë) ûÌÒ!•å㙵àK!"l?!M!Ÿûà E_{?ÑùˆÆ•ká_ÜŠH«DÆZÝ4ID¹ìm—O¤­ü«]>Ža2jH0£ÙÍؙҮ T{'ŸzÔlÆ BÏê -êzx|¸çˆhÐ.n ’3¢Øü²f¨bbL¾ÊŠëºò£Ü„ˆ§½Ñ¸¦¬\¿Ë' i¿ª¸¦9+”•?i€s`µF›è*OÅìw_ -ŸWÉkDÄš”RdËBQ¿™ob?ÊBÔ³m‰Ï×BéÞ§ùÞ—ý/”¥Îº|ĤvtfJSu/s_ÄßQÍæøÈ8|s\^†±*+=.6åæG—ÀQ!Ö(7HòÐüÄ4ìs&ÒX“Æ¡ñêFHâ '«û=Ã=yqÚæÄ|{]D­Åj´¼>(ߣük(I4i¦‚°Ö6 Ã<ÆÈòü–GL†qyÐ -OÔ®ÅÆÜ3÷+ò3IœLkŸè Ñf c7bÕù*Ð)¶|ÓFA¶3M+(zp3[ÀaY¯fz­H$š‰¦¦¸Û>;v—¤žm%Ý…ŽœT÷OÅ¢ŸÿRgÓß~ÈØxþ#SæH…¼L%jPü«=k·•è*µxèÄÑXþEôEâä#QÝŽ3iÐ/{º œ²"η·È -‚ý žWâü’Ƶ  4õ"X˜#ÅÑId1ÖùÄ<¤òšÀ‚ ¨¯HÛ/ÄD?ôAu‰¯…Ž1)n”˜Õ?ù>páÏûà¡9××ðZr¼üðXƹÎd*ŸR«”h]j'–pj{é/身 ŒóÀüãØ 8kÈ‘"óûù³ó–HÍI„nœ¥[›„`‘ -a–\»‚‘rþ¶µ¸{DîhH0'jrgFØþöMÔ ¥Éfšesî6ØAñq ©ƒM*î[°¯ lȶÎçêùsæ ¨zÆ°ö|¢|êÅËç'Žæ¿©F¥ ÔÎÜ‚*CƒXÎãóy#pÏ+îÉ¡º\—$-ª>‡{µ‰iÔE7çP,£ªy‰C"Žfþòþz¤[0®è&é7Э6„Ïì bÕÊΤœNðh=eHþÇxÊ• ÷.Gg -“KÁmc[rÇyÊb.2`4ëù¼/AãoW‘.s…lÂn²Z 3œe¼³—åck®`xÂhêÏÏûMIÐ -œáY‰z¨ˆÃ·ÆÀüJN:qXÊ£Œ9°â¨ ñÙ_|«ÿy ŒƒãZ¶•[$Jôˆe´ò£´è½Ôâ¹A©`kÄD•q{‹Eü–Æsç×¹ÙÞÁçŠBx&;FÜY¿è¾¥ÝßZŒÅ\ç[ôÀýŽÿ4”S/z»GD)R¸©!˜×ôrð(Òbu²$â†tS‡›€4濤¸O±¶]k4† ù¢!°¯«8÷†ÜW< -ÀXjÈvÏßßN XNd]êTï7…įyË{¤èï;œˆÜÍbùPd†°?uÚôØ^5ô#~QOº§†Äk€ÏnAú# -—ºáfCÎ-Ï`^m©WÀw`xs˜9÷RÏ‹ ~}+QjªÊ›CW®•fF‰í…OýzŽõòa›ç›^ßGÂIÉn{Ñ»(=‡U¾+%jG½}¥&‘åÊäe¨ûo­†°hŠãè5[ôX‰ G†"Cå¾0qïë=dIgXç{ Vß‚P³Å¥hA˜’óp A˜:Ä^X¶ª1ì†ÀTïï—R^B÷8ËÖÆK¥×Ë^jÍ:êM_wîa¹,@ÌÛQŒJ&e¯?Õ¿¯òHŠI÷3vróä êêøi‰ý1ÚÌã{í£(^øÜø 3Î}=ip‡ª®wPÙ.Ýr•¼ö"‘›dçNkÖÇǹb>räHd«Øê<߃ÀÄÇwt»¬ƒƒ;1‰§†<¾bå­À$ì µpC~D PWq)U!chÑ‘bÌÔ !d䥳å±!LÌÕ;B«†xíôóä•Ÿoh– þ¡ƒã“ò×à<Ó`yöžöÉîhûœtßb›[ò~ç°“«šï ÅgØ”xŽ,Íëƒ/µfUZîµ ÙOïIã³\åɥéÒ²èØ5;~ÒKx~*ý8†ØPiðy¢„mô ôþRÜm¡%eP'«5Hvqå}Ý(yá -âjÉ9¶8Ä`ÒÑ‚èsˆ–:+}²¶,¡K÷Iù8ÚjÐi¦µûzûFZ>ì´îçŸsÁ§yàŠö×M_ߤÀB<Þ¯ù^„‰xeÚck÷•ØÝì0Õ¤‹³–Žxî˜)˜È±ÅHE톩mñCM"©2ÌD=EËÎ:F¨³Mþzóä9ë…ètuòqC†ªØ5†@-™>i4*ßi_…bgдð$]™þlAñÄpŒ Ï4X÷8ìPÈš›¯^шÞu­ÔÏ"Ö'YybÅ=ã¶9 \ÃÐã•þɬnO¸àíU–ià÷¯Ðìlë¾ù/7CG›hFj'‚/Íà Ÿ7OÎ0•x>C¤ÛDû${QGy¶(‡sÈ"P :1åèÌÏ}v¾ä'ôÍGo ¡®AFXzÌ,rçÿkW`ĆP>é©w0š1—!¸O{ICŸvºõT¶hýÍS+dÈùr|¶]Šzš[bÀ÷&9Î× Fãa¿u•ÊMÇÆ;†h‰–™Ïаëçq bþœôM¯Ï}‰¥ ›ÎÐcŽà«ëtŠ^EK'ø¯ìLP`VÊò_X¼cL ļ•›ÙKš=X;$h2ºÊr ïV$A)å‘:žZsñ -½zˆ®“Š—䥳“%®  –íJ>55&‚8Ó|—Ômå{ðºÓ «T'i5‚–q~.µ¹Ò–°¢ìŒ¶É % k‘IZ4I¢Id¿ÊÔÑòú–†îF#–²É­ê«š[yD# „&!Í@‡æ¦£*ðb¼¸u|›ÍÕ–+5tÕ;ªa¶``âhJ˜®$e|¢†+æ}i[Ø×B˜²2ˆ^Ë?Dô@[ò˾ÖcKȆÜîã~ðñ•~²¶©Gµ¢ÈJœ È»Ý;®*êg1þh9ÿ…mlG—ÓnÖ]xF…;¢J'=×y©˜“«Cw­#CœÓ ´·à¨èѪõi5(ÓßGЪg"¬øNúžMÍÅLOh˜A={Ø<ö'3-tºvå¾=ßsÿÒ@JÚ±atwìÀW*ÐZÁl¶A®+Qòò Uíi±I"ˆÔ3EmFĵHdˆc’gh<Ö_&úYU -‘dâ¢u@e:b¬Ì 9{EV"˜3¢ªvol 沃’Šzài,}Ë&¾¢}&…ñT¦M8RuáS‘wæD »¬„ ]»0¤‚ÜèýÞ¤G™ì¨'°À™E¢ŒçH›W|hêW,o'jäRsWó¥HMk%HÑЀ\J_Ù1’öΊJæÉ1f>Ô×áÄ¿%þ“øÇog6ÿÍ[Šz lc§@ÉHëhÅr,ÏWÑyŸG6w\b ¡ÌHøBþY˜ÎK-ü¹‚Æ+ÁF5¥®’5L¦J©q»³K\¨(€×ŒTÒå 64F—ÛÊù¿ðWÛ¤DQÚ÷¹I‹˜Hë°BÜ’2˽YS <Ðê‘Žøö2ROzbS]yžÌIJ(wpG·ÃΈ÷N«pµ ¯Õ¬ïš‡S¬IÄñ׺[ÍÜ¢™>_s„ýG‡†{òÌa3™»ÿ^ßžûÐ4:kÐ$*úžg‚4¦µßîzsscCØUÔ6àÆ`~_òý‘ÔöDiw8ß^/ÌBo/P-:±Žcæ7Ü­FÑ;bHE>T ²=Œ$~Xu$EΈȥd2÷$ - ÷]hXj£„r\U›>e8È"á«©¼lÐ{7ÔŒ¥H\Nüàmº½cò¥U³=eU½&#Ì 46ë(µúÕ0¿BÖüÅé)cÆ¿ˆÐ”M ØQœ³v^¿G?3q 6,BtïqLýñl –ï<ßè·1ö2„LG¨WÌ4øS$ë¾ÅÈûÌ–ÓjØ$ñrß;)½Nr<ë@!öºÔáè;m„±dç4•˜{@ofÈ¥âO^5%Ù9dFŸ¶7yÕ&ÃÔö‚Üί(ªÐ„9ýDIæfÈ¢÷ W p)8ÍsGzîÚDûCGýÑ3 ‡(>6Š}3t“*Y‘×l -»½^ñ|ŸêÕ]vï¹w35¢Ù£Õ ·­-çáÐ;1,ŽþÂs¸èГ¨ï1?üàk¾•*[[Ç#@MÖZXL -,ý¬æ³‚6ì¹ÇF4|uéa\A`lvœŽ·î²š–kûWqì¿#ªc¢.†2{ú蟒å"Ã?7ºØ¼‘Š LÅ'ìApD<ûõÁG®)—Jå$}ßË‘Ð °Ä÷¸HÞ êöµóý•CÑÃ-Ý#à"ŠÈWyd–åèòª™çÂ…K.ÞI[ƒöù6Zʼn5¡vf”ÎEn†h,ˆZ€‘#ÄRR(Î?®n¸ÏÐÇìv.YJA<Á´7½^·L[J¯àŽv=ÜTÈÅا½Þa€¼Æ“¬’²©Ú¸—Ý&¾ª»;žG$Ûý¯Va¡ÀäÎ{¾³e>Qz([¦ Ò²Kÿú/ ¢v„(¿ ¶LR0›Ñ“ª¬µªêQ/«FSÇx»?ê+üPijg7í`qjä ‡®ðþ)eLæÍ0/èê#è”}ʘk/ãDÒÃ2«J…xFžTÀY]%6{ÅëW„w¯ŠvæEóJJ¬ò§FO5ª÷ç/Aô/þâtüºÌ6OÇ3Ê–3JEÌ5H$0£½%ž¯äM=ÿ=õ_ÀñõoÂ%›[cæö®PºiEœ‚0ŠŽpp&sÈ <$:v@^š†\éô—¤Nü[‹y>Þ*'íÔF ’VÉE_lœÄ¼þ×ÿùí¤0X‡)@ð^\É3aùä ÝÍP:¿î7ÿ¡ëS¨+8øˆî)î‡3OuEs«¼æ‘Ë/,†{Ñ#]ÚgO|Vßóvì#Zû $|B~g¦P¦_~óÿýK•?¢¢¢/]RÕ‘~zì# Ïœ.“¢KY”0[‰$ m. -êÚŒ/Üñ™‡A’=±k¡RëêŠØèÜ þ šxÏsv+Íç‚.žE‹ví `µ XÃq¥ço«˜¿ý¡ NúÜ+æ1Eä¿ôe€¸z*’ÛYÏ44¶=œofžH­… ØÎ$hVN"¸]Q³)EÙ¹Ìu)æ:¢Ëö\ãÍòx+Ê­Ðsbµ_ìþòc±ãûIC_*.ùü8í~⌫åžÖtPyHjkçì‡dc¤x(ÀØÓX"`Óùv•. ñ{±d×æž;„À붆s•#CíÜk&êÉžOï¸Q‚¤IûDhò©!œ11vvÖ9­Å•@­5À¹5¤ÉªÈÞ>=—bV§Âmî©v¾˜¦¢TÌ Jâ{ÁfÍé[èNéJõq²oytòL!oƒÜ> "¹Ðý-ô8Úô÷‘»¡å5à×FÈ€;bÃs³í©{Ñnr°ÍÍv>öüÞ6*+«gC€ìxj .¯y}êíLL¶íØ #oØá(Øí30s ŸÝwèT—ã4-87‚Ý|m<¥ ÁøcÊ&èEª(Óõ©·çµuÙÌ}äŽ1 芴§îø‡¹õÁô{“©€ŒˆmË2!6qf^ÆG3{æ$ÙÙ¦ÆiÄÚc½¦!_˜?S©~ñ—?ÿŸ:tU~¯x1õr“ÚýEq˜ô$ù3šëgà g€ØáÖPÕ!ÃÛJT¤þ73œáH¨Ì!->Âqp¢D…¤D°ž;4áoœ͈xö<œ$’©sÌA´—æD‰˜Þ^¢×¤›É·-Yeñ/ý˜y?¯žêí"t!ânÀwÊsÍ\Q$HIðÆU>SKi(ãIÐp^å*›…É_ÿEùfÃò©±D4cØ0÷ľÊ7é§ÊܗЦz¾ÿƒ Æ ÎG¸”Z_òx®Õí¥xìãЩâ–%Îú•¾rª¦âø8£ª¯ìHcPrö~…‹ é®?€§†Ì[=[b u¥«(ÅOÁ¹›‚mÑ uÇÂ>ÇÚq¤Å ˜C€bA9äÃQR^‘ˆÞàêõõëû÷D'Õw¥ìyz -LìX™8¿K~i¦óu?–0ÊñÜæ¿QRÔ/4dËëæLªZ]*ÆÉóãí»,l@Yø½„òÈP=æc² 0ö§(ÿóí,‡xÌ‚xñAAÌí2.+‘¬Á×ro%ÒC]–Í!œ+rQÔ–i+dK‰•Í×*áç­8Ç%Zu É·ìÄÁêÚ’{¢ Q¨_IˆÑ ¦^%/ÿÆ=ú|啱¨ -]T@÷ܲ='7´&*c·É9oj£ô{–:Å´Tr¼Q+Ì0š{BÛ9¨ Ư¨úgÔÊ#L‰¯êíÌKw{BŸ¾Z|•” f$3Ö;Æ´Ý™„ŒøI\JÑd'ç1¢^Ÿ\Uóà˜øPÂ;¢ ûÄÐû.Œ_©¿å -âá=ÖÔóO¸³+K®×§gN½ÝõTü—ÙŸ|tåwpÁwµ‹yÈè¿‹ÔÆÛó™„ø¿!Òï¤Ú£O?RáPÈH(œêçêÂhS6%záôÅWÍi¦ée; Öú:¶ª¸ÖÒ¹½¿á•¾z,‰ˆíÜ°â°ÇdEí(X½ÿú ÁoB–Æü“@ ü$e^ÔÖ8pž}c`ñì|²æõ RµvÝ© Ì©!9ÛhýsFq‘@ö Ÿ‰TXz÷ž™ì—™÷2$±Î¨f Iž(Jf猪ÜyßÂúħÝ(^V.|.óŽaö*/€Yñ<{m ]¨‚ ­’X ‡Ø°z¹Âúzõ ¾T”AØcìÀ\+æ‚ÛNí¼Ô©QJÅ~‹7 çF.R‚Åã|i½H%Tg¾â“`—Ή>GÎ!Wü`¢lT*ëíaˆl|†<o—SjuXˆñšw»~""½zšé‚¾ãIOǾsÓWCF“íDhô¾rå´¼O¶ OE?WšPs÷´èqç¡´²d,^ÕüÛüÜ1/êñ|_éÎržbÝšU圻óÙ"l~ÅÙCe£EgÕ•îoÈYÜ|Ž€ïu«•^†ocHœéîÝ÷Ò Ì÷¢é0_ðÅ .Ø´ªïý[êˆ7t׺Òó- IJÇùxìR"…r±yÖ§\èOÓr‰êD@ÚvóC9àûþ·–néÀÞ1ÎöÏ•î#Ð"Kã)çåä='+Åš*a¼•òªxM<—2·!Ð#ØêlÉY3Sµü'°Iƒdm<&é¢d¯+!\Ÿ™*Ò1=b1Õ#˜ û"4œq]Yu$„94µîO¥4Zçó„zçƒB£zgÚ¼/Uwð¤˜Phb/ÖôÜLÈkc¾Îx»¿¼š ~ý +îXbìX<‰¬¢Á`jñ 9䵜:•ø;~v†ki¬zÿ$ ·1Tg©Ö\™m/ÍLÒæz‰¸7$«&Öž¦nVæ<%¡ûÂWŒ ,˜•züM™Ñêž/P··×gЕ¹`F¬—®)ˆPßéÓa¹CÚna'³&—êÌ…\ ->± nJðÛ ”ëwpÖ1 pD·¦Ÿ¢Ó“<Ƀ}l¼ÂÛ“7H ÍÕ –Uã-øŠ¦ÅÇ- `åH¨8q·‚{öu‹þä/N·ciêd¾Ü9-B‡‡ÄºèI8ÝÀý"e]gø+RáÙ¼©‚¯Û8\±’()P]øõ¹CÄ¥|,^ÉWn؆`=XÑ^~öþ'rÿEàñ;Œ‚æ¤g‚â|£M‡K¢C¦vDX;;À¹²ÉcÄÜ…ÜUgàãMG.ý(¦‚A3ï(ÉÅYç,‘e¨£ºe.ÁrD²Bv›‡ÞT"¥¿]I‹e Õ®—·Ü>H°;"~“AÏ–_µÎ/Àµp °þ U‡ƒ3ËQ9w+ý’›<Ÿ½SQˆéà|ôå sDóŽOZ ´ypž&Úƒ{">S©¾Ë?Ûó3=‚†óôoåHa…¸ :|2ÕŒ:…!]Ó 2ñqZ—š«ãD\?@EgÃV÷ Í/B&g‹áQž,{‰elJ3W‹TGÏì—º›øm;ìâLT!O‹µ&Óƒ 8©§"4ð3Ã~æˆBâ÷ãŒPr½# › "LejÑè,Aúuo+½È|<ˆ¯*ÜÎyîæw6”ÞPZ"Ž‹!Ð?/u«yÚ¹.Lwwž ÆȲÎy$Svb³'Ët"7ƒmdÕíLã‘·®ì *jÈ|L±éF"H8âÕœ‰§Ÿ‚ºˆ=Á÷û›G¥) ûÁ<9ÓÈý)¡‹Ëzt{ð1C£nñO0wÄ)/Ú}$‚ôÁ­Ó;LÔLŠ¦±¦ v[TW ‚ϦkB²ñe1ЬV®îmo¦*G;Ry-zEšÜòEàš2µKuAË(öËǺ’÷¦¬9ºRŒŒ–  æ´Âú^õâ;C8D*(^ÝÑü·×^±Ÿ_ÅS:ÚÁUÝúòg¤.Oe»üNä5ÙMž©$…&×Iéo]*®Ô$ž|†«B‹9»”Ãözqú`G¬ - YÒО9™wQ*ã-ÈCH0«šÐM«ÈÇ ù˜ äÌqÎÏyÖïÌÐ{Šw˜±i,¿2„B=¯j¨Â6RJÁ(Ù’§÷^òFQKE–ßQ°i-꯳cÈÎÀ…õSÀ¼w -˜•pOlDÇ.qÅc4õÊ}Æ1{1VdçÑyçÝtÎø‡$_œ?3W3Nix0GPDžK‹`šÜ})ì¨üÕÞ”ÜãU&nsCWF[ŽQ}'ÐFJð>UëGÕב[]ؘ¹ul´‹"-‘zÅœpn3Ì’ð"FÊjÁjôï)£Žˆ “A~R­‰wêC`&/¿t‚USäO Jò`O¦Ý˜:Õœ8Žlñ¶I]ÈR›ÛƒæEtæBÙÑetÌçòœuW„ÇWíHTA(0 —i -o0¿Ñn1(¶LmoñÙb)„G¼…Lænz©å骟2—çè˵/C@uýkµy\êÒŃžâU†€* -ô½YÔÍßQ5ƒ§WÈkkÈm;q}ff?:·Îƒ8¼,Ø“ kË”ÊS=á;!¢6·ÂÙð·(© -¾Ÿäpæ{Ðéeiçx-ÅÉž»1 ç‰!¨< -ƒƒyÓÙlU×}›Ã3ÙÔ»†„Våo ˃»¸"›XC8C·p•ÌŽ•€´M¢º›ëáCŽìHó­î:0ÃH8— ƒ$®íDáSvÜRÞï߯BŽÍÃLÀoZ?Á±HXØ2HbäNÌË\©0»¾ÜÖÊ q–*®ßò:¥ßëή ѵÜWªlHj5(\À±©4±á‚7껣Ê⤠ÔQA8jÉÕÑ Æ–,º;—0ê‘Š—´e2oØÂs`Ì# jk %TïСžx$Ý%e‰'ñ*¢Å€@U.[¿ IŠ *CPI<î@R'Åù"Éuö±Ú -:¸p*#¸`Õ_s©)ÿ’ä°çpUpM=']yÆ&ÜCuÈÇ)#;˜Z‰´ø ÍѪ•ÿ_Ù»¢ûæ|[Xt¨ÉY¢«ê¶»à µ¨ ðæD‘ò~RÙ2ô^ÚQ ºí”+• ô8ËŽ%ò¨göóiìHÉrÜVôÏn,í£¿¤úpƒ€dϵW¹p.X9Ö•"¨JÕ“ÝôNF’²£H -4N߬¤OãÀäí>U¼)Ãð^RŸUM‡·UÅüoŠ ªÜ7ËÚgµD§±d¬0 ‹ ð }¯½ “Iß))òi–ÚvÍÙ´¶õ¿|/ïþ®âÀEô˜ëdïeˆ¾ÅÌ~¯jO»&߬³eÞò XDÒ@·³jkOG˜~ÿáÌG­½e|×;RúwµG¡^»g²Ì‚IAb ªbÀ'Q"í{sBI‘¨ï{,‰þ¸{¾wU?C¿dÔ7ì9_6Ö{5ÔMbNoGuæÚÒ‡ÉXŽ¶Ïzî‚ñ–ƒ¿â¡›_ÕÀi{ n oCb×]в,Kñ$¡3x%Jö^ÜŸ¦¤IH쪮T` ]?No*8È,'t àè<•Þ¬7T{—ͱOKçi‰Š ’ŽS–À€,)~$”3ý«¼À<¸±§eÈâñ¶zd¶¦Ü¸3¾‘g\[ š?¼EÂ’éX‹[2J[ý]­,Y½Æ¤¸´«!mŒDÖ¬àáo¤AÕ@¹}ôày&Ø< ÌÏ lØ#¬e2­!˜ NòmMŠ/æß»M yŠð-c -78bö ‘ÏJh#â•ÂP„H8rô}̬ÉÆþg|S·¯ÿbŒP6cs™âM–xM’bšë„ß›ÏÞz_ov“©r}Ÿ‰ékî~Þ‘RaÛËWI@º·o…·›3öyQN]g¯ÕödúP+ÝŸ€èaƒcŠê+…œj"*…øùø¯viiu¾!ã ”6í\»§úæÊgõn½·] R,0]âqTœ^Sá!ÇKzg‹•¸!izläfÞí]÷Õ€–Îãû|:Ÿ˜®??ϧüVcXùòä8ß3ašS -˜“培›¨£/ŸÕ’~”yƒLÎSi|›OíyïDœ¦·=?}$-?¥JÐbÈj9¦»<×®,¤ßñf䣕/î§Ôzò1ÏSTŠÚEx~ýŽåèAøÅÍ!Q±›së:€Q„0ü~v)ܱç£_1ìg+ñ~*.¥éŸï ©{eËx7¿yýy2gÒü86$Óg•±³7Û£<¦f:ö-Æ÷ˆÙë$™§)Ô2ÓPQTù‰çû&@çº`ÿøz-¬£,·`7±V ?U‚×™à/þâœúIñë¿Ъ‰#¢7°æ¡ÆKd€9ÑÌÀA{möÔõžf‡MlK¥nÆ£˜ýÜ0ß”µk€­6.¬d±µÔÕ×É̃–ùU#m«âºÑ¹xÞƒÔmC‚Þ³ÀØ€~®­X‘V¤Í¹S:(ì-&à°l„÷áè7òÀMÔKk3³™ïD[î9&Šg±Cëäa°ˆ°‹Ž’>~1zFç`KéãBˆ +º(@Œ„D\(-³«óô!ŠO+ä¬bþ–.†MFâ°P'‘Dxÿl~‡žÚu¯ûÝâ¾é3( Å²Xœ/N0pÎï=¥çDŠ.5£‡vËÃTó÷õjÈÄëÔÄ -±ÓM•bmTjcï Ò¼óSê¢}=ùÖA¦Y¢®E¯8BTñÄW ¡æ/ˆÃÀù°½Ý¥Jðô„nRí'ðî×±HÈçªY›í•Í²é`ï{ ©k‘‘ùú<²>R œw9לùÓ!+Ãks®·f^b’ªÎðdMݹ]#©ÖA–©‹’ …Ÿ(l®’l°°kbv‡ä%æ=ÎõØ ¢ûÄS·¢°ù:óð¢agKBeîºýû–4Z&sùÏ¢0ùs®óuPÇý‘íà#‡ë׺*Z0’µÍB1ã‹6J†p9£5qG` QGžÓ»mÉñ<•pD.¢OŒsS«: -Õëwž†ÐÙäm,TµÃcŠõv¬0VÕ)ëšçTöLb¥Íè×æVÌ‹Iªz¢Nf€œôQŒ=·÷ 1Ùµ"öB¬ø•@Hÿ†( jÞ‚S?²Í÷Á#Œ=¨üG)+’¶ºëŠ×)E[_G îzÈ•N35ʲNƒº™48„K3X'¬ˆ§7ø^Hs—61d:?¢;9¯Óã'÷B•(ƧgØ -•…G¢QZDëŒ'q6Þ°ëõg -  ›×²Ÿh(¶ Ù‹^ -F°ðóLÁà=¶rƒÈƒ§5·›…ãý-ÝNì½ëÝ°!‰=r븦Vß"îð÷{ü¿fÄ¿‡‚Gt4õÅ+r÷]ËüæÒ¢ûGU)܃£çÏê'Ï×FpF©+[…Þç]Cf¦K²r&c -òpºSþrǦ/¥ß`V-Gñ3ŸÑS&È]X€£X{ýçô³ô¿…·â!Â>ä¾DW1HÒkH¢yïì `Í]áàÞ{‡ë3¡»¦e‰Ò°÷6ÅÒ×&°|à•›O¤½ðè#óöŸ²Óâ‰ôb‹|+õt=ÇG˜¸)çßI“£ÏÙ#§@\7N„¡°ž”Õ¡²©SwrŒ;òïÞegá[¯[VP†.ú<ÔvTl;ÒpR»nÄû›çPàCj|çRê¾â¼W“]ã9KaÔZȶnÐE%+š¼Õ° Ñc?s¥‹ª­¨%d#®˜_2Ô­îç€N¹ÐMŽõb-Ö{¾&&êWÃBêu]– ÈYýf†¢š]ªš™µ•‚ -ί±m5C% &!\ÊY¤…f¿Í×"6|Rñ™_Š˜;l. ¢8SÄq^¨š!9‘k÷ðÇÏe+³•R.?ôÂ+Ãp‘€%Ü£>(Áœ¿C3€”òÜ'‚oW¤æ„zpÎæU%h/ð–Ì,|»é1?3”æiè‚ü>îô…€ÑøR>¹ã¹ó@ªDqÊe.R‘Cš“ ø’‰M™]çrf)ÛÞÕ¹Þ¹+ ˜Ø0м9J¾?}«¨7ic +Ù1’B‹‚å¢((º&®!OD¸¨0æ7ûRÆL»­ß™ ît¢ŠÌ⨚“ÄÎ|Ô'ç2i•U/œ±KjqD¯r¨wδ »¯¾mÇPL>¸9p3móê1L¥d¼Y´ã5ß; óXé]½û)ñ#LÚ¥.¾EØÉi—ôU{gÈ·?¯?çBòP)ò{Õ\DþªÅ»í^ÒÐÇj' "ÐÂ+o+®S´¿ÇjX åS?Ÿ‰çÚFÍ5%ºõ;§‚ÛÅùi,¦Î›Øc¬’"C$ ƒÈîš ‰ØàF¾¿*†T›¤47DÜQ¥É°Ÿ'dV.Ñ¡ƒ>Œù¿}+ ^¤U´º¸¥’”4¥3÷F*†tKÄ7kÐòmœù;úlêì¡ñ¥_lxþ;pþø½ Èk>Té>ìÚû2ºc­(*“Å©ÅÕ +Š„£bÈÉÚ[§p*qó‹#Å6\¼¶‰Q Ÿ8ç”US«„Â'GîÅÁyGBˆ_ðfªú‚}Œ¢’)Xø‹'ØñÑ_ìfOY4Ðo;ž}ݦ onó»2Þ©d| ÿ»(ã@j{¼íãêø2ļ¯°cûº€´‡ÄJ–ªŠæT©ÎÚÏ«h¾£Ü%ZDëbU¡÷¥‚êG¬mÞT{5G¼Š¢Ý¾Õv£^ÏcÊ0-Y ¯&Ôºß;«hèñ9sÆÓÑ¢ü{ÄØyÝï|=º{ÑÊœ?È4”ñ9ß ¤Il÷RÚRd#Riöp[ËëzJ? @#–Oó×új•>ºò—‚‚Kž4úŸáý¬>Àöê{Œ Š§¸V™P À¡Œ8ãÙ"£ T·|¿©qν@k<;¶úÆ)G_â`ÒGÑð?EÎHZT*šE¼˜"ú`7õBNƒóá±smzȆ€ƒ©®Ø=竘;×·(;û¯…Üôå5°Å“9všæœm‰ÛumELvà#Ê=ãÈ ãr×!èÌE2Ü#¾Ë‚]}?m¶×-lè'z÷`;ÿUP(…ÄØ¥Õ†ò§’–M×ÜC•sàÓ+ôè9ŸËät·í±Áîu i{Ô2èÅ×¥TÝOô«Ç Ó¥èi*Ý\Îò!RJÌ«8׃±F:Z -”tXïmM‹ -ˆé®·²@NÝÎ}ê“K›Ô…AÜ0õ`¦#ðçEdW¦Ø)ÃPÿ ‡&Έ¤’2mŠöBá½bbe׎ü„i‚ß{…œv$…N4ÝêŠaŸê± -öl¤&̘!u[ HµGœp+ð 'Q»äì;ÔµÝY‹ìÝs|1i?ÖÁü¤aKvž_ï~ÿêf¢5oÁ™ƒbAíjW 9 º8¯ ¢Ž¶B¸-¶k´ú=£™+˜ÒHßï>‰ -*FI°<»WÏ™üiïj wÁ™#UÈѽÛ$·UO€ŽËL>Ì*ém{Ë)ÂÑø;cÙB³ëhý>m¡0Š\Åi;öÐB×Ð6À—ç|Ç‚‹dòב³˜$ôcyÞ°˜'M/5³óúHål×ó¼iRÒýGcê-ÐË»Ø9.ŸFÄ ™<ÅØWý$Äì2úU=Ó 2ŠÏ -~ë”éÄñý”g ЗŽR¼uj ±^;'¶lS·ÍNèRD@4Z( ×|ÛRŠPc0ÕÓ5ö÷vãÐö婹v bwµs%6çsót—Ù0Nòšóëå`ëSKuæ=»Áœ¨ì; ¯ÅÊuL@®¶gm_·âQËt¾ä'÷ß“bdËØa¸¢¼~­ˆ£™jî[%wä<à´¶£Â  .e"•Ïø~°wþ§O¢c³ª ðn,½¸˜nI E.jne㊑¾˜‹¬TÜ!ÚþÄÝ8Šòó9ОÊwNìÚWô‹·UÏL0U°§Ÿ -._ÿåóŒ ¶§ öHÏ0þ°CÍ·¾`lÕŸë{Þ¹•áKt¤‹©|1ŒÝòRe¦óãÞ¶}¶>îd Š¶&×÷Ÿ!ãyøŒsá©KÁ©¯WF9?'½é'Ⲇ¤àd.Ô‘Ù.q2á•Û‘ò)~/´-å]Ž»ÇR¥¼Ç†œóG­†l‘VˆW|Оåˆpó8RM}Ÿ»qc°ö÷ïm;Sy?ÎeGG¯:¯D”¸IóV»Ó-¨£yƒXéá‹ÖC wë (œ”Ï0ïÏ–>·»Éhd öôúRòcÆÅwVŸsø1_ïÜÖzÂútëý)õžN›y¹`ïÌuè¹ákÜÜUŒp ’ £e9²…™±½uÉÂ¥êÍ»²ìh?Ïìo=ZÈ HQ(»ÒYƒÛ±¨"tF´ôÞÊÝ<>ઞXòL‡mÿÁîû“¿¬E"u8ì„­žr!ïAlà8²Þ¢íXÕ™ ÁË© ¯|TÒMEo+tÐ%Lbjg9²é:¹‰Ó|ßsåè -kY%{œƒa¹¬æþž -gØ[Ì^¶°0)»>}}å«”BÒ‘Z_ðªÖZ¾2s±~–)ÙS!{ƒm auß×¥` ÏR5û?YšÉg¤õ£ ¶§e«dv@ÎZâçzãöÚš‡SZ{ÃŒx²LºÈypyðxÖ9QñeÖ-‡½¡  gƒvyè=æºeuØ«\'Ö2ªºp½×¡õÅ_^ -ï4žçíË<¡S"ôò(~Ñ™òq˜~­M1ý«¹µìQºI[÷'u½¯ÿ’‰‰‹52Ry…oIÇÇ4ìGöî ÆÑR=Ÿ" tÕŽ• w¿8nðtá¦Z¤Å¢…½œ.çJ:â¹ñ¤\ æ7º= „ÞÓØîJõ”ÕÕÊnòo Rбs±FÓ¬k×c!'Wb¼ÖèGÌúˆÍùÚ}ÌÎ#¥¡%›:žåBö›Ò1mÕg,Ú©¾‰ÕëË·÷nŠÌ— ×ˆ»˜°¿_1ï“fœ¿øƒnÇj5®îüGISr+z§ -åRi¢7”üát -aqõsFŽ•†óùçjumÕ[5fuÌhÞÚF0C¿7ÕÔV¶•ýª©–¼T…ïí]†z ½’†· ¬BFçñ[–ai¢0é!cpE0X˜ÃAÿ.ÎoR[¸z÷בXWÑWÞPr÷Dè”ã g~¡RJÏ~‰3 He)=£hä)„¼4DG,[ -Á‚\Ñ*É´EBÐfˆ']ì3@$Z~Ë`*vd*\;•?¬ß¿!Ùcâò,¥Ñ¡]Gö -ëñ»b馊ïV:&Äæ†ÿD]0ÛìÔÞGͦúº$•1×sI -[”ÌR;`~‚÷}ÝGÊ€%aD£¯-¾R¢´{«×ÖX|š¯i6*0Üüz@‚›ãöÒýìq5\ƒô÷yª½”ÔTƒ%¼±º¤#Ø/Ë&-äh8,¿•‘¾$UJ2f`_CΆ¨~`vÜ¡(ÈÍ„ð‘ç$'‘#ãJL_p%l̹7ÒéP²ŸæyIÜGŸoñ[¬$Ëg2OÅ~P­™Ÿ2+xG óá+8fθ ñäZhŒ¹=É+[PEÖƒ’§ûÈÌC±Åà˜·G@Èf¡Rõ­à.Ψy:I ›™H¥øÕk!(çæßHÞi¯å ®,"åÀš¶î&;ár™aT /ÛeÖûëƒýdmrWT'n0Wö4jòªGë3F_ú—±>–jVÆ<þ¬~pŒþò·(´—µr¬Â©'F9®Úæ,"Åzµ-ÙUâ†ìÙÃWC¼GЖ0ßwÉgk’*çPŽŠŠ?å -í;ÇŸ»Ù—‚)¤T)ìòMŠ!Ž…½Ü@VÆ·´ ™—´"ðwÇGI=:Y{#ò6×½”(kè¬ò ŽN'?KÕ;àj>ó✨ÝòŸÑ ½›Ðß—Ó­èÒÒ契 ”rÅYçÍXÔø³í­62´\´î%Ö„b>Ou–oõä^‹ß~hq¾BŸ·ÿ“?yHZ‚†=·UzŽÈ{„UœÄëƒï¹¦™ÒhS!^ÃÇÖÕQN:8c4»o‘ù^ÓìOèð¾þK8_Âæÿ(€>È,_‚}_­k-jÜeBÅÅ‹ÒÉ֕ݪLæ ya鑾u‡±èSÏÉGÇ‹KÒÖ*ú‡¬Õ1Úâ~ãJ†½HÉ[@_y©×6Âiª!7+ ÕÄÛŒ nöh±oÊ…:ópC`ªÕ/²š›sš¤‰È,¨Ä+§…x›Þ¤ª†²g$?xLèÏÕ|agNy¡Ç0eM Å -µæ½rY±3ç>2ˆRÁFŽ˜»B·þkÐüíùåæ6+Øбр¼Ví¾ƒ.룅”…ürÝÍŒüæéy‘)bÌP)œ'™ëûS‘ -œ ÇÿËÜäX²c[‚ÏA'ð RQHiÿnÎÂh}ë&³O®}xÍÝLõ¹Þè¹À×G»R±8Å.”ˆ@aáÔ*xJ¥$–cû{̘ê;ÙýGüG´´¶ôF©Áì ÃoçVE+ð²’Óæ'b0IžÞ86ÿ§Þñ&Ò¹Ò–©ATRì?ëWpô@º×¡Oè.´Î¡ùÆÔMÝvNÐùK,S5‰þuír±Ê_¢:p DìUµUv&Í®"µÞö¯ôïX•oÿ’ƒmžPJ|w´c7%?á’æáZoßsx5èf×ËR0seOã¤y[«eê¨C ¹±ë››0ô(É´9Ä› -ä|¼àÍmU­¡ûè°“î!S ÃÁéÈUŽ^ÔŒ£Ûs,=[ž=çß™_1UîPA?J—š<™á[ˆXpd;]Ÿ™Ÿ;:M¨ËNtC$7æ஄-½Cü¿IÛ1û‹˜iLݨF1³¸¡èPĨĹEvf~‹™lF,åø¨!¸uï| iÑ.å}Q²eªFŽÀHÑÉ5©Ž^ŒºR×¼Ÿ£='LÔ`J‘ÓXóK"¢Õ¦”ûW¹X4gº¥2KUâÁýä/™® ½á¸ÎãË¥®àÔéúÁdeXº¼qWÈîà‰Ó' #Æj7zB27 ¸#<{@ãfvŸG vŸ¥!Kœ!^‘OÑH‚VÁ{õHhÏü…ÚÌœeO†8؃Òä_€yu—sHö¶ÆƒZîGiƒB Œ‡ù«)³’Û´_x)õ†Oµ±I¡7ŠO,"|¥içT»½6Ñu_¤ŸÖJB{§½N".»‰þº]ñ®]i;ˆÁÀ_äw@á/ v‚:Ù)¶ìA˜ŽÊ¦§_Ñ[¯íX<¦ƒž!Hˆ y™£/þŠR7yâì=‡f€Û¼G8'F:tÄ•³R˜;ˆÉ¦C¼VÎIŠ:{ГG8.Í”îá;Dr0Ì«-¤’¦bè– ƒíŽ‘ž!V±Aá, 9ʳ‡&[uwí£jo©?§-ÕbAܯ$ƒ Gøø±TŒ•ª ¤‰ -sUŒÉA .p»&1ÛBøË+7ÂH@€£OƒÓàÓdÉØ‚l™’ûí5D¹¶šåt†túø«l¹”‹_#¢ºG+:R$ÑYñ>-M /ÇÒ“RmE¸®ø1WKÈŽŽ‹Eʼn|4x‚¸igÈë0ž¶½*`óncFÖ¯ ®IÃT•ÂËyLÏ0°ŸÜŒsr+$¤ÅÞA¼÷è eF?4¦펥ûK¹S…èýfVœõä39z¤é舆¾ÌÕà¤â2gž§zë.h·›rÅÚIÜ"–„Z9¯DÑ«EÔNŽ¶Ë¼³‡y_&$sc'—›™gþ;('®»ËÖ~oØÄÊjW|CÿçYC9ÞÒ:Íý½Så ‰Ø?ZT%Ç#àÄ9¤…žÜ¥ÃúÞ~8ð/‰øA4åh*~üXÏhM击20µÞEk¸x1Á;çïèœÆVgnÓw.x8'Á…¤<§ášƒg³û‹‡{»uÆç–àà1'ÚŒ °Õºó{ Ùii~ 7q&y~»ˆûvha†ÌÿûŠ¡Á|GþÞftRÂ:`®0ºÐŠþ‹Wˆƒ6"Æ9S4¶?ü!æºË#wŽˆUÆ‘ -,qh OÈûÄugîŸ?Ráb•ÝnC=t:øRÉßf¯½äÍwÑh§¢îœÌHÿj’X?e 5ßsð FlPýëÜ&MËýýاªù3ÒxÝà°G~fd-lJûæå/*û6Òt3{Cà<(VÌéå ƒÍ÷ñ(ظңTþÐÀ‡•w)z0‡3»•£Š?DÕ@»3VÎóPš‚pSyÌ!'‹¬ƒHÙj£¶Í_wðŽH£EãöÒî·QÌKþf¥až$"ánÌ–"Ê .þøš º‘c? €Ç(ðÞ㌡À®ÍSÍMïGLV÷„i¤Ä~Æ£p¦«ÇÇTˆ`p>j7z3zLBÏž!oa§má~)*R¦‚nˆ²•îø94 À_w*&¸­vëýÌ35çDCÍ0A IÄ€ -é =uP9÷º_ž+«W"—jF󆋀w<\ 1%÷#oXõ?‚ᆱ÷95ÕK¢'"æâð¦~GìvîFÕ¢ò1Ãoa]æB3Ã;·P8«ïíD‡§ô_÷‰ r·éPÔ¬ÙŠbzÇ/}ý£âóWÅÂöˆŒÖÛÜ[ó;'ÖÞ|£¸òö|úþ„Ù 2G8å/ðå“'gF„ʈ ¸µ\i(¢2Ù[ŠÍ3áfvìßtAÅEbS Ïâq»Ö“”Xè¯Ûv8®»|×-):ÐI'Ò’!wé{¡¥æRæyÛéŒzÇ|)/ý; ñ9ì[‚ÕkO!õæS˜švãŒgdô£«I±©}Ó׿í -° GíHt†ž0 RT€„«ŸƒlZ˜éNø-È´ˆ¥Î¦!rQ“›ÛLÜ`ï΢5ÌX "Û¬S3|â+ŒÆ ºØïÑdæô`G^é›yÞ ‹¬”!áЫßÉ3"×[Ï};4–ùQ'…m±ð1H”NÐÃ혢WMMmf:˜÷N+½—ˆÕœ ?ÂϾðóŠqCó3ÁÍñ:zyCG}Ïæ—ß™ËÎ/Rô9ÂéEÎh l>&‡Ç+X‚׉0¢—“Yšx³V¤(`d“²Cljùž·Žrm2b"‚nVuî;r=&µJÓp‡{|uëöP:ÓÍÜã18“çrþ£&虧ßÑ¢å1¬'\ÒâEpßñ?%@F‡£gHša°–£ÞKR0JAÆYžHÉrzK‡ÖréQV§ë  þáý†¸N9|?3ääõ8ô–C…ö)XÑÍcûrÄÔ0FÐyÂ’î.JnÇtFü¡9—˜I‰zcÏvK¸n&PülðdøQ¢}Ìa"(!–wFëwE.»»m G~é¯Gö…û£Ått>ê‘“C ª+‚,…=^W Šj_{Rõ‹ZRЂ2;¥—¬†ÔÖ¹¿(ØÎWhu\¡÷uéYÏw̳ûÃjAÜs’xNº¹È@k/¢´©¹zÉÐøÁ.EããNï8I,ë…ò?Í×ä°sH eIÊuÁD€ÆÓ‰f¼# v@›Z½tF{:Oª<ö!Ì‹ø?Z$éG€úüë'¿E© ÛÊtš8w«ºÁaÄ=ýÆ­ß“Š°ë#FÕ³³ 'ìô3˜I5‡IG‚Þ™¶yîM³âNçƒ4Q<.b6ëŠ@AÃSõîÑõléxÞÑ„SÁ­ð&ñÜ!šy)ß%,›Åž8Š0ðŒ¾`/Ôç$Ý=²üTS8p^·Œ drœ'Úìöê‘~Ž%†lœ.S«$¢}ÄÝùÊrMÊÔLCÎô¡( WFÜIÄK4@\J‡þzѾ¾ék%¹“ôØmh–¶”(ôy(f'_åÁÌâkƒˆ~–Ë—nÖrÄ«[äwžì±í*-õKU4¯¿de7Y›&1`oÕo.iXï¬l¶5* 1–`R| ¬rWNCVô/8Æ9· ÍÝ=¾ÆÊq6|l,-IiCúÛÏòL-¡ô«¨;ÖOt»X0žfþ_V\­&ͱÕ1!Ë¡yÄþ1KåŒÒ ª#â0²r¶|^0[7Þ†gä`îÔÛY,¡>Ýé˜XhÀ,Ê_¹’\žæ¬lª'Ž,5%˃?y¦ÖœË¾Ifq¶pVö|Á6®ÂzÛ~VÇ F±…«!­VPjÕ0λ>Iôö6ö¨O’‰ O êt[:è<Äo`‰*Þ<¸`ÑÎ+?»@U¾b ¹× Sꤤí/†`#©ßUÝÞ*›Û¥\“CáR;‚eý5„üåÒVÊ”¤WGž\T™åàÓ„è%D€âU»Id—4Û¯lIkòbæ>¯X¥_Á p%".ãÁH÷;”œÞ3>ç£÷—jìŒtÂmÙ݉L%’„#õ›i5b¾¥D’ëä 4mé`Q$ïŽ(ñUB€%É•Ã9Í)Åìàôµ*@Ù({ò#!Tc;YB¯õ¤/Û«TJõÕ6¢ôg(’¨@ÉçiЪáŸXÄæ5ÄwD“‰g²ÜiiD9¤„wùÇ€xÇp®ßõ)íÄ]F4"®$xÙܤc9‚†Õ*Ø"5úmöE_HaûˆËrè«@ÓWlªÊ%4…S -TšaÝ™1woÖ’=¤µý7\A°ÈÏ¿s\5™÷/àvqòz -¢ÜÓž‰ª”Ñ"¹>@Gã3)y»Ò'‚ñöÆÂqñ¼º9OàoO”òyBÑW~M_kf¨µƒë|fí Ó²¥|K¢u/ꑉÛXòµäãgmfž]‰3Ì9ž‘æ2¡älfÊï÷[°™»*!wäÆjáç°W©ßž°º¶¸ùG¢¬¶Œ*Ë‘‰´QýêPÿy-Ghé1ã¯åiH/©¶3Þœ*êÐ]„(Å@9¼‚9X‘Mïo a·¿É׳ÇÈ–˜>û—!-.Íd´_;ç}Ø;¿U:Œ·iÄ Vd©ßlÏÔβ ÅA¦ë‹¾kÇ;kˆÙÉu#T%fmqÿp¥ѱÇ!üˆgÄ„H¬Ûè©8R‹¦É d,’4$ªå‰ææîJLI -Wxµq¤÷ûktD¤ð}¬="ué>äÙ1‡Äw,Ò yÅHReEŒà¤Õzë^̯[›±ÊãÔšÊX¼~‡R/äh¦Ž§Rð>Jw€âôIBâ&bž·G+ºñ-?Ý%ÂÑ×&°ºâᙨþÞ"5PßiÄTÇÖÔr¿sŸ¢Q×£ÛïÍNáÈ™Òòö.ýpLãÈЀå+J{—gq ãsÑ‚UJàh 6ú°êM.Åaƒ‡y²¦8ë¦þN,ê|¹ej¢S+óImèaw²´1{S‹z ®èDÙ'µ¢Ì>G§Ö PAvƸÖ[¸úÊìÓ׌Áqöñ¦¥¦íyyõµ¤8& -VŽçüµ åqòL åÖ¤AW/䄺.ÄWøü%î[^ÏSÛÖˆ¯Ñ³º2Æh*Ä0†B’Â{ýÊϘ…ìàÈ·j®ŽŸë*w*Žß¶öPm¤ƒë"ì¾v?ÓƒŸdO(çwôðGœvµÇÂ=<ÐÉ6e΂•È ›'ß½˜¦a§u{:@åH‹´¿"auæ+æ ÃjÓT–·¼òÎ;Ñœ»^Cbä©~ÈŽîÕÚõ3b#Y*ÕÉ•nmi©ï½×ëcŒ­´Îü\îƒäˆhQ“òG†ÌÙNnršáûsêužZ‡ÆA+Ö%Ę¦Ñ¹e]&ñm°&ªX¡êöó÷Dûä¿O$ý3–ï0ðg B> ×Ú -LBœ)‚TGZ/5dgŠBºù,|Gä[ôpžpÎÎ KÐQ±í J+'YˆžÉ_¼ŒJpéliw `L˜í°ž[ñ®†LÏ ,Ü€Ì@bžÕtøÅ6r¦ & ;0¿îa«²ŒFPU¤yô6=Љ}Ê€(ض͘bÒå pHC 0„ã¼!g kä{ûǨeä8?Ô¯»Öç(°g÷lùl7Ž×©µ/ €4]p9òm`6EÎ#Oˆmû¢’÷H 쀲!›n*hTM˜|¬òó?Ë1ÇWÃ52HQhö‘n­Ëì¿jÀ§"Ó\ÕùN6 &™”ÖË#êÜô‹µ -îʈ6 -Rè:#íìÚ[ ²³à1wåÑÌYmâªðð ùRóöR¬ÝI h{¡Ûù)‘ºå™.Ÿ8Ãá Ó¬ÌV/Ù Eþ«'w»§m½b®\ RdyRt~ŠqÉ8 -š%Ÿ¹8ÙÐu!Š­ôoæòÍÍ8ïÎ6ˆîs'”¢ô“à`†ÑÙ¤¥ü^— F•é·· -½í*gìhÈS¬€ªè¹ãyç/:,~&ÌÓÞc½Õùrîû^¡$³Ë¡eóÔ÷{D-Ù#è·>1¡å+Uù_ŸxÎG¨â2A{˜ñ -Lª>qKq>tç²9:¨ -F«° jeøÄ°…*æ[߸¬É Ô4ïJöv’ ÀÙÑÅFxÏ©4/¶™;AË=BÛŸJÉÛov}{qœþ-}ª7ÿ}øXÎdâ~HmƒA”Àþ`¶‹Ù6ç!‚ìÊè¶ -ö8ÜVPŒÆ}јz^A1 Œþb ëçòÉíÑžÉ2í‰ôyJ ’ì ëvÑH_Ω?{T¿)¿f[aÜÖ¢ˆ@gæ&™ $ÖϵéF*n†Á‡¯&âаÐiá•¢Æ-Û "¶GÅas(Hnß‹¿³Ä@Ä)•¸;~§sT=¤œáÚ”ìÓJ¨Ä' -ˆpu/ò™ÿÝkd|ñU?¯ÂµÂþsîó³ÆÌéË(zl…q}ªhÄE«, o,ÕôHDïŠ2ºeEX€¯L­ä_ ÝŽÖ{ím)F)ÄÇYûR„ŒB˜§HŸÔñŒÝí\¡¯„ÄÁpVÓøy+óâ£^õ¯d^”«Ð¿p˜ybå/#¶UÔ2YÚ„„5þ\PßýaIãh¿<)þ§Î¿[‰‘#á„JªÙz?­G¿?¥ÍC°-wùqÒ˜2UfnxUÚ|pèà:Î÷sžxgÚmת*ÍHø -Ù†¨_DÆâ ó¬ŸY©Bn:·'‰s.” R2Ädš‡%ÎùžÁÝ üBS~aF›”zPtåçHÏp ·²Ú¬Ê°ÒÊâ@dà´îI;\¥o˜s -Y'ËmY(R»3E3òêžêéèô\W)k[±«ßU>ndI¸t –âÀ|´ ëR7•¹/oquŠl]Ô ²B/Xél嬵K¡WIØãÅà Ðã¯@y£K=]Oè(T·¨V-9Öç PD®d”nVO¤çÈ¡ ›s/oÕùÏ– ¦”úA6ÑÁBÐj‹Ü†B‚¿»ÅrêãôŒ€bPûUC®å¬9üQšˆ×l˜¸á\ƒ>JÊøº7TGá³ÂõB ,Ý£ÛaÃ¥N~õêôÀ^ÙqjÂ.7Ö­¨k>…ï%ØÇŒØÁ]ÂB1£µòkn1Z!Žp%ÇŒP÷®¨(ôµ¤„ÆsE™‘ëR߬»ÿg™@oΧ™Ùs¿B7ÙÁƒöÔüe0º¾"=»ÇD÷ -ú¤D艜֟BYÇŸ»Ãñyw˜ó›ZûùDÍïM1•”3ÉÜÎX ±BëW­¶R©÷-„¬$ ¢¬®BÈîLc²÷°á_k¶Ù!Ê[r­YâÎ3`+ýgÂ2"1ZÏ›%éí_‘Ÿa@y€ÌJe(dHü˜ ¨k†<‘bULÂv©KbVä“ÆÑyG‚Ku³Õ y`XTé¾ãQIãÒcìgíD©%° @Õ?–G%´s+H¾ÆäAÕv®'‚sâ{¡Äô<ë© Æ¢#4“ñнF# ªÍ¦Ç£”j¥@ -ŽgêórGͧÝE¶0’«€GÙ`ÀiqSxM^¡´–}¼B ¥[™ÊúÒ?µBvЊ¼;yMŠ=öÌZ})šû;ä#éî_9èžG`³EÝV[éñ2ê[vú”ì~©Ò’TÒ¾›¿>© eÎÀí=•Þ´;&"MuúlˆsJ`É}ظýçõØAYC@«îGGW íR-Là ͺ/õ›°ÊÎЂ´ ĆÓ@®•ì‰gW™>cžýDŽãC*oÆhî<"€Û™ûMHÐ&áä•Îƒ4õÏëpX‡à|&Ur ĘJì$C¸§Èb•¯íˆo¢Fz«ƒyíˆ{dÚìáͪÂh¤ætey)ÃíëtbapRǾHN¹3oŽ±ûµ-êB°á4Ñ¿ß(VP©1¤˜Äg¥R2ð؉;DûxãýÀ±õ„H/ŒÍÛqì_gm¿ùÙ…Ë÷Y0 H÷)ž‚¾qö¨»q#muõß^]¥$ë‚á˜6þk °ÆÕVÒÔO%à Õ-ŽÁ&pûá.ÝêàÞâ?‹ÛáoßQ›®AA•#%ÄŠ“T³Všä²ÝB…–^`©+¿€'2ÎS_3([£Ý5ˆǶ‹9e ±F:S“â{EÍÅTÍ ¬R5C¸¤6—‚‚ÛÉ…ª÷bªÞÂiIÑÖ…ÖwœÇR4;³ Q½Å›íA|"¹‹O•0­ºÄKy¹p„ÀµOñC¤‹†ÕvÇ!Ð{h!¿"0p'»x)†B(üOŠ‹´Jô9¦}+˦¼("¢ óg‘»UÆæZE/3VñäQ—ÅkiÁ¥‚ÖÒ{5„*nLÜ/[l+PÈÓåGö4Îç¶?™Åœœ·ÃœÃðQ?ƒ:‹hy¡8-µLrXFœþÈý”^‰­¥±d©œÁÛßû²JkOšŸ0zr)ý‰›]é¼V -EdÅYŽ¾V.>ÕȉRÜ,”¿ñëœá#]qøXŽ€vùȬ´GÄ:U¯-œ%ñ[8£8Á¢¶˜m.×€ùoìFkÏzÙÄãMóÕ97Y˜NÁ%x†ý Â9‰¸²®ð1ú{Û'‘ü/róÏnUgìstýŽ'¨û¬y°;ä«t±’ã«Ž¡úÚâÄ·Q:Åý2 ‡è$y{-i™&sm/ÅN?®Múµê÷C›¹`®k¾{¶K°솆;_,Mùü– ›V‚ †¬ž!2Ï}De€¨ç¹jU_)¦vPÎm{]êJ=¹gnùŒËŸ´÷JÓäÜâ/H‹Í²¬!$ÕÌ•¬XK%S0u–Œ¸ƒ9£pa‹‘èѦàMºmˆt7ÜÆYÔ×,i©QvMîëA·áÙ÷µò#:Ó#HRõ$ïà.‘uÏ=cLûÆì[UI½öS¤è?D\RÃ[í<ܨäI7hvZkfûŒ“¸:j­þä@®ˆã_aE­Òœø­N¹‰~{ýûXßæZ×pžIjI)^>1õJd›²cÅÎ¥•ÌŠ½ 2ŽÁê_¥|t“ãTu¸ÖÌÕý»°µfîGH]¬D_Ð*#7tf ²ªb!«D™ižXù ÕqJÞOÀ!€õ’@,$Z<„Aà`âëûëJ`yÎÓ{KQÒ -îž[±Õ­Ø¨‰ãz¤xFG# ±Cý"_ùâ^;çäq¦ƒi„ -£†ÑjìtÖ­='׺0¨ _HIá]¢9Ê: -pB)H]Y€3«P3992NWÖ*"ÛÕbm–¸Ì•F‚'ÿµÔÔ½‰I<‘0ÈÍà®#AŒþÅcN¼öwEr °7‹g”ú^®,íPÍÅH ´Ç¾þÔ2¼«7ú[aõK8ÒŸc–ñ31*×GµeNØžiѲIüŒ`ï@ßãÚ–Eדo©y”$©Æ¾t(Wt4)$ìT+‘nîB!äG” ‰0Í©§cÐ)%7u^+:Ô¢‰_‘áŽ\Êx -tÒ0yµ9—u±/¶g'ô#Í\†¬“d/Ê¿E’vE&§.&Fë±T£âqŽ|f-Uðš@o¼¾%­¥É;'Ç[ÞÝ›”OŸ·l%-˜-uÀòOY÷öÓ;y GøMÞ«.: Ûùô~.ÿŸ®Dv•átjœ[PÓ tÏÇÒ,vËVmr¾³h–%~‹‚2 - Õ°X?¶¸ «xBÀÒ†¼¶62è(d0¤h) E–ès‹ß”H¹äˆÊáÔ2W¸b|Ôž©åÖˆ¹Ý -UŨæ#æé*ž%fR5Î ž¶ñ¼›ùßà#8„¡.2=ï(ŸÁ¿3s¢òã—·§ -†öæ—]d)ã—qpæ05Ÿ'ýßÊÁÁHiW7j:lÂ`¼®…ÄÈÛÅņ$üà—ŦKróÄ6¶€Ìg„}Æý”©> .Ó”ïW sPÍê0µ0Ð4Ü©U‹Æm˜?±!Y†:Bx+g^Âm±]eÒ 8" ¤!m¤‚Æ¢ŠçqoiëÎؤ՜²$s]a 'e׈²ü’á<.Ä_l¬EK0ƒlYªÕ|{òRg%x hêgCÍ9ôìåÛ…ßHc¬K©ãq£UnÌá½T¶‚ãÓRÛTö¯À]2Íç«c9¶¯IC%õðQZôÍÞXPKyhÜÙÍÄn)Æ‚¿[Ô -fÙßm=]ƒ'Lõ-ŠÈ,ÔÖ–`åw‘ª·þ²:ßTŸVõYÇö®­* Y$”†ÎG¨É-½BãF.šù줌–r9ùÔ¼QÝØNs{½?zA')¦sýŠòAê×(u»g°.¤_×iÙÙ6:Åë‘*lE½š]ãæÜ/Ms%=ë•!ô¹ìí'ß?E¢¤¤(ªs¼•!ù!QŸk{†xqÇç–)¹,¤Òb£+ª'Áý³äóý_˜p‰²Žo#O­Ñ|`{ÇyY°¦CËZ_·)zÌÙ žè± €e) ^+óÔÁõ›*t$PJ„z…Ž²Œ~,Œ{& -ü3-ò}5­iAƸ:FÔ~žÒÄѵ¤Š’°»VHD^æoímõµuøÎ4P—ȈÙÝÂc_Íoç+Èçq•F¾½rD‚Ó endstream endobj 29 0 obj <>stream -â4¾ JPÉè7±þ («EvpíˆÁþ.Ãl:B3â€Ä}Zè-*ż¿“h”ê(ÞÞ 6 6„2¨À0o¬Õ›rŠSEŠË,Ët ƒGÔURßX -‡óOhi,z©¬ì‰znX¯«SØ¡Z´À"ã:³¼#žyÛqϺ¥¸×^F4Wnª§a¿¥L–W¤³|­Š6ï„û<¿¸BsÓ9ööƒÖ¤rK´bý©&÷wo=xêMX²›+{B«ôàë¶Àöófw/±ÖùU4Zƒçì?Â|×JŽâ¡êëºáayƒ·Tˆi;5î#–œÁGãA°®@ËHWx(Íd59Ì,¤ߎjõñ 1}|ñ×`¤¿%‰ý I ꀒwõ±¨ÇtÙ/ñÏ(D¸Jíáß»´¦BŽØ_Ô·¹È?ò#Å|SÜ[Ù%0ôˆ¥Õ¯Ä‰l(E±£€£?v,9GÔ8Ä8ðb‚~n±ãÕ@òÙiå5Y7KÞ}_Xì†ùuìq†øˆ,.•Òd¬¹_3ÌÑÿ ±p`ÛÛ› »ÿ‚Mâ ~™]â„”Œíi[mhF´,w6NákÏÏe¤vÖ$¾  úÔkŽ!Ÿîñ/Ö­rrѡ߶k“{(R7¾Ï1 -׆°½YbðqóÈ8/bÌÖ#E¤§XÅÂþyµýz(ÅB¨Ç‹„'S¼'ÄŠ‹  Ìþq¡é†íã·Nâcr|·›ýÌ«b¯–rûX¤núËع¡ ‹Êé B+zÕ½¥µ]¢Ú¢ý+ú[ý_[´¦ÝV5[žôQ!•¦µ‚ñ ¢ùÐ,…ŽŠó²Þñ‚Îï·öçE@ †?mV:Ûñ‘ ¢Å çï(¨8¼ª/7Ç '›!bÔãK -þ¸‡?Æd u…¬^:6ÂvscÚÓ„÷IéNH‘2€ƒ»VQÏLèØKŒÐJt¼¥ìˆåCÙ—Âàž -Ùõ¥¡÷3!Â^ÔúÎ@®­'mq ®X¾7䦨ß_õ‚>j.ŒòÖ¥˜è%o÷ñÆú]¼ª¢Ù=\5®cº€T®â*K‡5ò¯Ò4‰å/ G,D*¡öîO]Ùcü;vdÿbLö÷4\©˜DòxÕd™ãòc?E%”Ñ~„«òˆ£ÔyT¬ÍôÒŽÊû(œ¦ÈyéM°hU¹Óý˜‡¤–Æí£AöÏÅçW“Õ…ŠjöbýrïEeI·µ7ìAÆÇ+Demg, ÷¥K^Û™ú…pÿEZL”Œ€ã¬½ŠmþH€n­†°™cA~…£G° EúÌ¿ŽžÀÔqm u:jñóíUˆM€ÂÚfÔˆXo,M B‹êVà4WBƒ.f_*g¬zït×ËÁ´Õx}’*‘Ö¾‰èHÍJîbï¼–´Êˆ’€ÌQßÒ­¡»<÷u-‘¶-¤o©Ôþ{šÞöuÜä7†|?ùÖĽ·”<ŸZí -2ø,ûÓ!/:\éÑl -Râ V4y³jþÝl‰‡ÏßÄp®Œùd§òÆ»/B¿#–¹äˆBki÷ z£—]›®RÀP¸=ÍžÉÖ¦˜|÷×|¢ç«ÎsüšO=@•S&€8ÏѻƤ3ŸãfÑm/‰±È‚Ë<°ÞQ²m‘šÈT–ª6ÜÇ‹Ï/’x|u<ñÖŒTe¦wðV -@W–_¤ˆ¼rªgŠì"îÂ&þ{"vwj*3º·ÅH§¡cÞ­Òjâõ‡-,!Ò-©Òu”€É;k¤¶Ü'ÞÔÌî´lÄÔªJ 7ÅÚÕÜÆ`>U‚xä¤ß±¹a•ãm;û׬¦?ƒì?}ÿ7‘î©¥~öÕåJÄÛ#fÐQ·YúwÇòÁ'$ŽÊ1»†Øç ªt1€Ø+¡k/1à$Qÿ´Va2K MÜ¢°vIl<ô*ÆjxÞvšF5Gb*#w¼‰È J˜ÂúTN’¹´¶dt5HQ“Ž/b¤u@Ôà£÷:lœó(Vv,Ú…á5=±óv(-æè*ùè’¿K šwÓWc>QˆšXóõØך„mõ1¹BŸcÁ‘30Ü«¹Ö¦ $BT~] ‘ìHíÛı·D£ÒHô†È^ój_ûûÁ ™«>i n>{ ÐíRwfÓÒJkàû@ÉÍ+%Jƒ@¡çw‹ûo¢$˜O\Íñëwæ+w®¬¿јœÿF·û¢}N¡z=Eê—0G™-fŽxüˆ›ÙHkûò|ÁÕù‚™óž…OhzŽ¶”p¸¤,gë~+ZûÒÓ9ò°!úK-T†m9€W‰Úìq”4~d,q±“¢y òŒf ìEò,º›êSÍ»@æç»ù–ž7»`÷¹–¿WÜçƒN?Êr2ñ” .ÀC)„t$5 P'¥´ÃB6‡Žîq¿dî"U‡5ññKæŽÃÑ}•¾'´Î&§ó„ -“(NI3Ó!< È|Å}Ì-< <´íN&!÷€¸ˆ wQ(*JjÆ©ˆ|Vè‘FçæµGO þ7fWþ²ÿ{„¿ü–½›P¼_w°”tÀrÞ/2õ*…vŸxƒX;íÊÈüD¹$«ä#Ch©é+È3Ï]ev%Ñ'¨§ ±õÈÁ‚pÚSŽz; ,Û98²¢›jÊXõ5Îx¯s´»•2ÒuĈ/C¶ÄaH*ú\œZòkˆöŽNUö]6ê±oµKija§ü&/#ðÀažQç>Æ“·3“wÉVDàš!!v5=†-ˆ«]#€ùäžýpPDšˆå®ÿ®Ôèñ÷ÕΡ:0MICüÈœ ·3gqà\qCÝë†[€Ò{"¹;^Qƒ\%¯I&š2ó½QnÓvÜ íÞógúœ?Ÿ>†£†HW)¬¨hÍ!Oz ;’lÚ9e~ð·Þ‚$ˆÎܼ -à]Ò>k”б~d3|ZÆýÄÆÙïða?¤Q#›qì  DîjÎRÂsH×}¢ñ¤úãRñí`ƒ®ÆàTß;$ó«µáÈDÍ:&Ô àåttÌ‹l‰Âëƒ'÷ Ýâ†ä†{1 oÂSqå¾#ŸËÄN£yç” @5b¸y~0Üžã¥Ñµ¨ßqÖúZh ·ÉÐ;'˜CòNuáºbóøŠÅöD½aWà'+«pˆHÃþûÌœ¹#ÝQCH¿?| ­Eæò€áÞ:5kCö8 îY%i-<©C…pÉ4Ý‹ßBÎ÷5¢?wPü&”y×(„pýaÌ\þš·×:S>õnô,ï’ýŠÏ£‚ÝÈW»ƒ3ßJëã3 s:ØOÆg|¡ Ù¼S¡ÁLÊ•3Q½Èáí +T¥æìgŽâR7¼y®ðöJÕ ìœ °Ö¹ Aû“oöK1ÂêƒsÈÓ‰˜ìÂ>…{íKùG\6_ïβ‚Xê!\·œ -Í7®¬jkhYW'v®˜‚YF†€K*!ašCLɈÀÎM¦.•pÎëÑZÁ§ì²2œÎ]q· …œX›Eâ“´| Ù’ð÷{à¹}ÂM5á8¡Ï!±prì–è©}I3îT\ã²SN§Zs? ówp£i3âÅnÆþ¶u¥‡1Ò>Þ8Cê¬K±B:ã_Iži¡RaÀöÛ‡H}aK+èÏÆÏ÷™GÛÃB&»Ñ)‚ÏþÄvýàítñ%žG[ ¢Jɱ%Æ §0gëâ\›+GâXC¼7§ðLËþßõ;"ŽH)Îœn¢Úû‘‰~â³æ©ø#fi1Ï!ÚÙâÓ@'¦¥æwÄ& ²ÔžøÔeÜë™–¾2“7¦Öšñ§ƒv?VzMp˜^ì»} ýnŽFsš§a =飿㪾øË>ªÆÞq©ê Aæó‘4f@°apͳ£4÷'fÞqÍQÊÎ6?Lcá*ÈïP羃ý5$-¥˜Ï˜KüÂÈ»ô£rh§6™V°äÓœLÏ3ÂÔ8À­çÔ†æŒD[ZiùƸ(éÜY§60®JKû“#°âå ñhÎbÅ=-Õ“Œ™ßBb«îÖ>œë›F×®a¡ð;÷*å~"D„Œ7Þß/ËdoßÚée™|D³ØÛÙ¢ýÍíWA3ÁøVÕbèiÖ:ï9&‡wöp¯¤ØƒûòÌÎ IÓËyJ1ÿg=7r°ÉÉ2”6àv—9¯ïhüc‹ñˆ_“3b€ZsÈY aú„û]#±öF;éqÒ„d¯_Ÿ%/jŽ8\.?Ã,BÏaÛ2$ºåÀ¿ñ&øx.zKxÿâæ̱HˆÎÓЬȥ"`x¤ Ûk°-ÔKo~™´˜Ïž³¿Å¯ù\GV×AÒçÞ×g~$ösv’O™¿CMƒµÇ^úd« ¨1·s¨‘ˆÇÔ€ê>¼ùÆîŽö\‰FÉ.¶„ý¬Ð»v¼‘Ii­áù¥ddI¹8?â“åfîø¤ôåJçW®ˆÕÌ=è çy: ÊA… öÎý<ÊP -jHÞŸ¶R‡¼Y±)Š'Vç}½7y‹Ž'‚Ûb0ÐR/šœ+œ/Šb&YìŽDì]Òö òQÊ~_Óï~Geiñÿæî)¥¬³C‡ "mI.g!7‚¡#Þ8§qØöÜYƒ0äÜî|ây²´ñ;â+ðTª«æÀ‰cË3æðQêäÆx¿í\yøóRì}R0â ´Á‹ÚÊã·EeÎu>Ü9ä @t½F˜ÁÌÍ3øGZ–ÁU·Çž¢Eâz'šªMìIÛP•¢¥pnKÑ››õú ÌÒÅÿ}©^_ÿåEÀïg£ÀñV‡¯ï5¡ûa$=ãˆD…#öŽrÕ„ÕÔ"­röT„²ÆF´e¶¢µÆt>ÁRSA˜í£À^ÏÃí‘0XCŠ &–lÿX9rÊÁRòê&í(꾪\,ËíÅœúcès¢R\ªŸQØÔ&õýË ¼\LE¥ÈÜ;6‹)lÎ!xã€Ì&ÖÔ b`=߯²ˆhL•4êS”P<6#.™<ûù¿¾²ñ£?èiG†ÜGL1ï½vUeV0øáíõúŠôOwUÙðñN™Äw—‰ðc lP8°E»ÅÿÖUBe:ÜLjTÂÓ¡Â!Î-Þkó€%Ëv>ünÌH óŠÃÑ÷¢ÆïÈÏYH0À{ ÎªÍ"r«ÕÔÍA¼£¦z1W òÚ‡² -ô›!p“q£»5÷’yî3h¡y1GÐÃԻ±ÞÓøíqc2ýñϨ©v“á>Ž«’ çqŽ±U‘HûûÕcF}’ã)8fHÇ ŽÈ*âu¬·öûëB©=­tª Š@Ñ=7äÁ­ª^#ȆÐvv¢dÈN/—¶zm`s-QR¾5÷.üJýkæ$1^¥D/æ Æ}p}¨.Z´ñ4xæ½|ÿ•Ö4SGûÜùðø¬ÈzñºõLá»`•i“…Š­12¢î#mZÿï3ä[ =á#þTÙÖ¹Câ#““éå5K¢"kcÇ‹ŒÙ ગ‡š%½„ò®XCŒàT<pˆ'!Jú Ç= tݬ ŠaÇÍ1d€MChÄê‰F9ÅÚº”¥í5_Ï!=y\Öžñ*$Kkñn”œ9c.uŽaw-zPéu}ŸOõ¿Äè“ ˜ÇýU=rÄWúÄn®ƒÏVCô`^=j`‹{jYdø¬ÁŠW›B®M+Æ9õœA0D™X˱÷9ÕQŸçjóH§†‚\íññÆ×|a“ç©¿?ÚŠ@˜Da‹€H8KZA#¾%X¨9„ǽ*pU,ÿ=GoU°üã€=~[T—íûWò³æÂ|¼Hîþzùji4ÔàòòyaFl<íV‰5p¢LZy{}e\%ïóõ•Ù¶¡-^Uö¥äÇx0•³’Ý$!°ñ¦Y‡l~’ïãOÖ•î8|*ù8ƒõNQ3ôÏ®ëÕ0`Â1ó-8¹”R:á٘ЮPKIÂö¾9ÜM¼ ‘•ÏjËð@F¼T·÷¾3¥²4A¸8ÏÜÔ^¢¶*Ôù>Äš T%«>Ó ª(fÖÆpû„¶úâ/`´VÉù›ªª’³pT8Ô²=TUu°¢`zR_Ä8Ô¯?R©!°¹à{ž>«V|R­ÂhûU+üÜyg béEjfó¿Ÿ×¥lQ$¾ädóëòDôê/F•“YÄ–‘!§j§ÆßUÁr È¿›^d°‡0•Žÿ–,57|Ë °{br«l}JÃ,èUËišƒ4lùý½štª'=ÕÐwV‹oªá(âØŽØâÁ”–ÙTPO…%^òG’°#¹ïïäÂãë¿ü¡—ún™UAÒa\¦€×*7>Â÷ŠæÌ!­ƒF}ØŸ{øøº!ÈÌíxÕ>ùKC(<¿jŸpA'1¤8¦Þe/#š=Ž[ÀŒàówÄn*dW| ×ù«üd™¡™¥'Ú+àcÇsåÕÇ`øŸÿ0&úZ‘4Îb†Ö † ¶mRf¥#Å1£ÊËÇ>Ú´!ãµ°ô ƒ)¬ä@Úß”QÍ{M@ÍêmuÿY²ðAó?þôr:F‚{KµFá+nã#Àxi³riºÛ,À‡Yrkn¤GdH°0ôý<Ôb×M“P rS•*Ä]CˆÕu øGzÁœ`½fp QÑ42L}ü³;'_”œÃÏ—)ƒî01UîÛûÇgÓ©/­ªÆodžJH¿m4KHGPôî5èˆ*ƒî+•=Ú÷¨"e@lí^4*}Æw»çž®¨ð?ÛYƒÈ©¦l¥i¤{>³©Í',î@*¿±È¢#(#}§{þþñtàgV¢6œú©ÞÃ:Á¨Ý-äd+Qówn‡¸1«xNb‰¬‰úìû€tyÏB©AǾXEIa³»f¹êD»£—%!…¾ˆ(â-*ŽôM÷j‚ÌÃ:¶0¶ÇY¼ƒë(ŒÖS(k0¨+áÂÉéã³{è›"o _~&ùÚùцøÕ[ÿd’ ²ø†ÿÂ Ú -ñs—p£V¿?ÕÄgÈ*»JqHL¼ñ+¦ƒöT˜U:ó3þJ÷ToeßêŸÍ9›8rÊX϶PA²½¡#†²ûª{E2I·M(¨Ó—Û|òUÒÚ÷2(Î\\sŸyþØýªzéûħ×Û×ÃÃN¹‰M¬Sñr.­œRç0 H4׃Žã%kI*‹oñšê¸Â|w{ÆGÌÙ£–%á3ÌV)9¬8»¾5×ã·ÔÉfö ²IqŒFqœAèöt -³æó׾†kbºkEEĈ.±x¯µÝA:€× #‰'—D[=Šk\¾ÚjÓŒxÈžþ^«’AÞ=* ±©I2ˆVø¤ªpç=ÎÌ>ª™úa…Õ¦hÄx¡Ø•úýÄ|õ—ßÓÃUU/ÅëAwÞçü2œXgò»‘ -b8™ù±9ÃíƯv’2Nd.÷p·¨,Å‘<°§Ÿ/5°´!õ\5ó•ö´é¼3w2œ‚ªbZ»ø·ƒÒ®þFÀîJ¥›`ÁŒçËéé@l=.‰Ù8«¦ŽÏ¦æ¼1gtkQNUƘ&B[͉¿…ÞîV´ÚIàªI8êE‚naÙ2ß =ªîm•ÔU6ç¾­ìžA´¿ϲ3£8ž;¡´ "†•ø¼˜¹lù]ÞÑg9¸vÍOàØ%hñ?ùJ^p`3‘=âöÂAŽÔç IíŠ^ĬžºRyYoüyÒ¬{"»™ÑV©7"fsï[±æœ¥[°­çRª’!TL&"£GÁ©ñ²§éÍÞó®°¶`ÁTLz¦Ä]•=’ªÇJéáÛ®Ïp”X·LP£3á1`æü~Oü7 ™Qì£ ­2iì•] ø²QCxì`? ]mx6ï–>jŠB¶‚F Úd° ÃÆúìZ Þ—ý„ÅÞÇ+êÕO;ªž®™[2Û–žóí•ôÓ¨Óq5Ų¥’’"+Ü#ªþE«ìË¿ü{“bµíJÄHáÛþmPê¢Zrd]Ž¾ÎWû+ÊÖ“là`§ÌâòØÒÎL7<ÏñÊN…ôÚ³ëj¨sÁv|¢ç¤Çí÷ÖÉɉlÛÁœ=Ò}…«·Î5éjxJz®X9lGAZZó.¤œÑÎ<ìDý -áõÜàï´ˆFr…¬b¤wDr©.®ìtV Æx’ÿ¹¿«šKIõêõ7öõŠ{ÐÂóc§ÍEé7¥?¢o™Qr"…‘a­–.e§ÄSÏÏœ3¯<%IOÕ'dßû¤~º¾¸ê¦°4³ûû9±L µOøù±¤ö·5ßvfs%Ë`â¾±2h|²cýê/Ÿ„7awçûc€­÷×_]Š4óæîBR†CœëTd¡^ª”V1Xy½ßg52|V`¯½hðÄÕÈ`Ó˜0³a³Ì ;G_¤ŒùÎI‡t|¦®³Œ 5õVâ>ÈKF™4¾9ÚMX!rá¼£¢m|ÏøÍa5óNzµ»¨ê\Øç;´yá&àƒFû ëK6s¡W¤õaF­9/Žµ­h§æfz¼ mkÝ"8Šöz®Gzìr„ýî¶JÌ)2Þq§ØŽµ¢BgëËeÁ숽ßÚÎãu?ÇZ¡[”ÍS+•߉»7€kµÙ˜—7‰W#ጤ~Äáû²ê¸¾ ˜6/”?“E€TöJ…)a±¬Qgmá;¹TZd3âJ~ëc*7r A4äÒˆ‰ ÷¸ª9ƒ°gW;‹5ôÍì{ñª¯lç¿xÕ>Ô iq‡È·,å ::ÖÎSKzÊ®${ü¼ê/þò‰WýæŽîð„S:‰ÂžµO åò=Ós -;½ÙkÈŽL¨FsÞW rû“L@¯©ª&‚Ô¢Þ;¥ã8qÕ¶•€IÔ2ïÇз}€”Ï+[]†Ø[æ®DýôI¬]n\sZm¡¥¹~%ŠJÌ>'´¯HÁ>$L¿Ñ3ÚUs©Ìß™sÓŽŸ·ÕVl׷؉æÂÁÚ¸¹±”X§±«“gcög]Šƒ&0± ëÀ2â³J" -oùÄÊSÂá'&ïo´NËh.D¸îVðƒ¡ÐõLU¨Öó -œý©ôÅ_¾(iÒUzδ7ÖÑïiâf]éf²^Œîµ6¹ºA‰‹ÍŽ„ew\š¸Gp/b¤¿ÝûЉÇQl– …®C”ñk6b¼=šgúˆÏ“Ž÷Þ"¼][I|ªwäÄw#^“÷JhU -¨¶2WšIÜïA‚ãÕè—Sà¦Nõ -s:|sJSälš¸W;Çz¤o"!CâR{$À~õ&f|yðÿÕ«¹£tsYØ àj™ùsv\VgHí¯;‚…Ò˱KÕÁ \à0¿ï5䎥ÕEK´|;%V·ËQ‚x V˜ê•&¼ÒGHEŽ^Bu‹6óÿóþ›¿ü;#´¢×ïwEƒÃRUÀ¹zÐUGXày\´mQM)Y‹"9ÞF¾y“(”¾6:ß”.óþÚèxýÉÙz1•¹æWÏ”²5ŽR|W¯]7VˆOLÁûó¼O ÷=°A¿£sRÒ -WÕ6»7bûö¬vm˜pÝÁ·vÔÿ -<·€ÛÑŸøÄ.ÊáL^EÌ}ò1<+C±Þœ0EUw¤™É]…u°`08ô¬yÇëˆïDbè¶`0œ¼ÝÝ¿\×2)6ÂlÒµñœ¯TLóxÑ}q¡„ˆFaÌr;¾1rÆJ…œZvE&‰8€RŠîNð6üVw†3OÖ[žI]‚:¨à;x›µt1ä:`°žwÕ{æôÿZˆ</-(÷{+ñ\Ýú#Ò¤Eÿ…»ÿ”ï¸?cãÒhíàÍÈ`Ï«tD\$1Re—¾Ãœ¨’Œ­êB'WÝ¡Ü  -Qm´€ùŸX¾¤,tšÍ´þ¶´t-x©£a¯!²ÛªŽýç?*Á'<Rܪlpx%›]•ßÁöEŽ™Â†é?0´ÿc …§ã>T 9ðü5!V¦z”°Þßô£T´2÷Bl/4ÞçÚ‚Ò0cÖè¸d2 ú¨÷?{ pÝW÷FÝ1ÃF}¡X³ÏóæX:ñ÷n.§âäHtúêÎØy<«€'=ÐNmÅpióxjpcæ¢QÉ~TbóJùOõ®‡ÀqÐA;Èd:¿öW¥ºüÛ±=¿*(YdÜR¬QÊŠA-òXwÜi~ÜIÂr)R~]l{ÕÓ3eØ°éžÕæ‡øœÑK:¢JPƒR!Ƕ½·W jNû¹EÔ8kиíÌ"ûâ7ÐÜÞFMÐ[ŽÈDúqT«Ó²¼Ö&ƒ8^NÁÅë–±v­¿®¸©Pr-úW8oÅMxÕÌvÿ@Â×Cì?¯¨ÿŠ|åû õ3ìs2ÝóxÑþ_Ï2£ó;*áêf÷‰úëZ³ͬ x¾½HvU5”¾gUžûEˆr?ïÔ÷âlQ¹U;×iöåE7G02€m8ð¸^ Íadzy’çBOtc7-öG™¸­q$Á•æ]Å@©sO@US†xo1Dbc¨èìˆÄb%’…‡8±Áö3jdÙ¥Ùžßu7#¨¤ýúMêî‹FÎç1NpPnp±A $áQ¸ªÊ„Ê ŽpälŽf@#O^>_ÛsÚ _Ž 5r‚'3A‹à<5›'ÃÍÚºUÃûAòeÖŠêÓŠI~°.>³•¯ä·[Ytè?ž‰tt¸å}T.É4·Å\"¤õ]=®SÖˆ¢n’@•É3~,ò¤˜î5ôŽC?³†ìq›â£]üîŠYÓ‘¬p.Jûluš j´‰.jÕ=³{Bñ£¦Î‹}¼ñ©^A!nv*7gsw²<½t\ç~r™W;ÐnÂĦIÛ¥×K@×”ùÄ%-LÓß>õ¦ÏQ¿C½RÖâ_=2,sï/„ÝÈõ¬€‹§âÂ|Ý{Õ1UXm Ïz¬óóQçRDiwa›ù^i“lç«'  žÜ¾ê¹!yAáû -†ôŽ¹^¥–sÁ€á¾’N¼Öî{#;r2ùTº=âR7J•¼9MÿÃJø5'Viº£mP žë¬¶ª3I0á±QœÃ=Nw²}š0)MÿÖ¾>ÿæ/¿'2)M+–©/@@g¯æ˜zG‹ZxP¥é¸"¬E¥Ò •ýyD«4GUb¹9ú‹j5^¡kz(¨fŒL^áHf¨)TʾӫÖ®š<—y¨“Ùâ)í"õ¬K0žNÓW 8~ ¬t³wÍŒq¦s!å¤ã@+gÀK? 3}Àä‘mká -DÕîBF¨“y>Œe`–»±®à[ *kó§™ ¦j}² ’øŸK±÷C‹ƒJx*¦$ Ô×l¤þ»°ßšÊãG%4'v÷ÈÌŠ Ô~ÖMô”wµj5x‰ Tô©Ø -ê ÿ$Ç  ØÅz¸­rƦtw Êf«M«Êò\cÝM:dU (9œÉGÊ‘}=·^¿&`=ÎõÏ+¸PFæb.K^—W><*”m‹ŠÉRïØÄ}Yõvî«O‹~ âèËÒHåXÕ¥ o=]M†|0Ug#Wëä?ÏóWkhF-$¯b¿ÌGª•ò¥»‚ºªû%U£ô7¡?@ͶÁOœñÛ©ôp'ŽEï0|§ -A“F–ü܃mƒÄ-[™ž=À¸¯î°ÁjȈð¥,MÿCÉCyQ¸3ÿ˜q;¾ÂS,ÆžÊ Òx}Ìýí¨l`<ûõ²ÅäkÞûÆÚ³GÃ$ÎÀˆœK>eÇŒtT æzAâ$Iÿœ{ %Äl‹rï<¾2àì G¿´þŽú'bOTÑ ÚCN‡F¸@U²Û4@ËÝ͡؃ÉÓKÆþ)9×Kà©PȈÍÙrÕ —ƒ­ÌZu)ÈZD÷©Fÿ y -ÂH0inrýŽùP ‘g_PØ(åÌóšR’‘÷™>÷ä P§sê÷¤9ÑÝ/«¿=’–´|;’yóÍ „ë¬ÔØj„*å™røí‰è`m²dµÁ%-&äz¢Q–ëDhî]„_T v“T‡çr9毤ü{X{3> ’z‘·:„v«'BÛ<´¬‹ÜpÆÅW”%Óq¥Æ|ã!ܪÏÚ³1aÐpÌ`ð9–½Ÿm͘•ìùÞçŽ÷Ñ1´vÊ–T‚òíÿ¢‰;gÌ|GX8|öû…x»?G¡ƒ;Äã]D_e)÷ÌÙª ݨ||»G¼tŸôXA@»Ï™*åÍ }¤ð¤îÚ÷:ÿVøéO&yÿœõ¨Ê',„•+6:ŠÓéiwmeÁâ‰Ér¼"9…)Z5Ì;2v%)¶v¯ˆqçÔ|’‹žÓ^Ó±[ }UŸëD"01ç¾xРœEn§øÁGjV/ /W¢¬Dã—# N‘Z¥žJ&M -°„Ö꿧˜~¤äˆ˜Ú( ^¸TWuÏl—þ0ƒ•®‘q1˔ޟ­FЀoB»¦€­§1g5ÅzdthþÏ*®óD³g^K²¢dHc„zȦ!3»cؽ`C;šþ–µ*Ìé‘÷žEœs¬oÄþvDí÷®+u|zÄæξ€NƒVm<æº#Ꜻ”å¶*€ˆŽòçŏ•~añÆå€GÊ®sO`¼‘¬þ*{y¢Ê7Ãt°Å¨ysÒIÝËlèS±öû¿$5ÛSÜ™©7ÜÜ«ÜN^àÊÝ_tœ©D±äG8[º÷tÉ‘£æýŒ±/64—矋_P§€ÛŽP„Ý@ñ,pR æ1»Š†’T÷ÔǸ«r„#m%±Á¨R–™”K-ŠGs´¡ô5äL‹)¥ƒ[Äô±Rë°8ÑË0RNiÊü‘Úý²s/Å3)ÍVª%µ2Î8NìkçlQ†>òP B™á²B G“ ­ßBø–Z59æRGWhªƒ Òìì ç ðA¤SïW=Ý_q!1hàÚ. ò³ÛpoOss>»Çˆ{YùØŒ¥…ø¡öPM‡èJ¦·8AìeIܯ…­?0ä”ßt"0×àyU8La,Þ€OM«=çɵ2>Þ˜y V—K”º9Y‚I‡ñ×z×ZÖ “Èi“‘½~÷øŽ0ù{ù3þüâ/_`•ÞZ!Ū8ö%’JVAh·ÜRúÙÞ4¢¹2/&´.Ñ=šb5Ää7iKgôU´=çˆÐëåkߌóŽ5(û­G·ùû&áX:ÉòœdbL2Ÿ:.ç-â}û’ʺ9Þ¥OäBÞà“‹57Ì¿{ÈYR XäWÉ4jlÑN*C¡Â㤫ny•€Á¬æ.aŠ6g=Ž;©UI¬)¥q)¨ŒÆ'~W4&OÌm"FðdÈ<Ú¹éVÈŸ'–Qx¨XšAœÀçν!/˜§]¦§. Ærq]àª;¦`*ã&"Vâj1Í&) Qh¾"1øÒªs~pç€FÊhptG -Wà{e|‘ð)ƒfq1h>Í@Å£ÔánšìóÎûÅ@ŽãÖWÂÒ‚û³Ìÿù/…ñÿÛD•=7ž, -ð¦r/pyª¢ ½á|G•Ö#Ñ ¹‡•&kÒf¢·Ïž>hJ´«ÑÅîsU ¢‚UºÔ_9æì$kçCÑV5@Ñâ‹Ö­DiT®*ÍÁ`kŸ±—ê²Éšx1xÏozÛÖÖ»-D/¯º_ŸóŽñ´ªÎ¢`(n]*P -dÙˆz|ÿö^z[qçy¾Þ.tìþÕ+=ÛòZì ¾¦|²8mŸT?c‡¨<…sW -T³Ã†o#Î×KØqæR4¤µB½ùPÄ™‚>9 æzpÕ£‘bÙ‰Uz+¦CÖ÷ëàé*Z¢~Ôœd%“b.(U|rÉ…ÁHÅŽWëwHÆé¶ôÊÙ¶hçæ êuPɺո% «T»Èù/îyäìû+§Öƒ‘´iSë‡ÕÇë„–K]AA<”‚ôrÕ ö[ Œ‡u]Då«)Ç=;%?'éXLÌflòFJ Wô²4x@ävD3oäfæ®pêDn­ ˜MOì•êRV1ñæÅ™ @"Ì\PAΔ$.]C?KêrQ‡8]Î/l¶Ê2/Z¡Ì›Î»(êBÁºÒ.Cô™ìá÷ºcùÌ¥&÷Œ”$Häв;ãj>¯Ó߃)U&#³°àÌôä?VJªw¬˜©˜õ®ªæðI^í¬J (éS=ÍÝÒ×"Ñq×Í|3ý^l±ÐŒÎt0&ÀÌc¬ãWÞŸ&ÛV­!:†Ž$q@ÛËQuûdš¼}òaýó//e£ÿØ¥,9-ž -÷@«©cj3Zö‘Û̽ÏèÊÝW´Þk½Ú¾Ñwo·Eiƒ«iêâv\ªyO¬2$&]ñ¼Õz]je,nZÏ·/q²99z¶rd -…GÌÖJ7Cðё£€žžþ¯ÿ—xþÛ”ægr''[Û[È*{z â"‚íuwdnç> 8u×ÆÆ^“‘o¥iD– —È]¨¨7©UðÛ½^é–˜KoŽÛû™G%aïZ(_jÏžåM,÷ßr(y€7ë²ò‡cþS‘à7lÉâÿaZØ…î'ž™/“™ `<Æ4i;ýL¤ÈéîW1—¢ÙßL‰îbû𾫄½'ûíDKú{Uîô2.‡ûœÅ«»zQP§…mÒrVS™šóÀ©±JØíÙúk$*èb\V% Ú‹Ë0XçT3 ,’^_u°í'hû3 C /Îd†í|µ§|é}HaA$¡„ò%zt|ÞML&ž¼½„à¯ÕQ?"w7ã{iþFQT‹*$›u3ø*ªƒ#ÞW[€ÌÑï¹{µzSb­ËtæXA©êáÜ#låËöDM&Z«{Z¨èÔûXûKËNŸÔËÒ¥ùxcV¼Á¨~‡ÙKƒj*¥ôè¬NQìŠÃ€‚Ο¬¦âNrx^ÛoÒ×ÇgaÙíÇöY÷íûéç?*Ñ -Ü’@;¶@éðYŸWöhÞà"•!Ì2ìCÿ\C4_”J’1|•`*FŒ(úŽë•ô‘o+ô$¢tFêx±9–mÞjÍè{°—û]=örœ·ø Ð…˜¿étïÛR—ëEôÜžÃ>ÜŠÖ7ºJ?÷ÅzQðD€n{'»ô}7"âž«¡Т'ºœÅ“5_Fw–’Ú\Jó·qùt,~þËóÛ'í“þvŸÿ™A¤¬˜:)‡Ôy!‚À¸,2ù6/ÄYC6¶P2¼ž×é5ç& K=ªsÀ)ç‚/œnç« ò¨#Ô:þe²È3"'®!7#¥yö³ð˘óÄåÕëJ#ÊÆZý% ð{ÌíÛ–3ð 8™4ñ%ž?S~.Ñûz7ˆ1L‘“¾ñú~ù¥\Ho÷ ö^%q<朹ˆ~UÃæõrÄPßã›õ<ÕúÊ2åë¢ø÷µÅŸ5È> -EÜ’»«Qº1´‘ÈÅU5ɾ÷Š€0æG¹Þ¬‚i¬†¯B°¦`:·@fpOÉx†·Ýmò¯þzS¤]±ÃúUœ‹¸Ê-Ë,P²7ÜÒû-^6AxO«6;ÿÊnäÀPr)ƒ(Tû2ϱ~ç"K@)E€h€>Qeû«ŠU#V^º~eú⠔!’£î¼ þæ–ÝŸGc­ :õOŸ")«ŠùŸŸ ì×AXr+oÕ«?Oi^uož»‘ðˆo™P¯ÿbYÀ¶vÎ9±EG‹ó¶WmséÏÿ™è~þÃWbßö$ª;§—«š¡†Òi?ž…ßÀþ(ºEqÒ™Û¯úÒ`û»>KÀcüÂÒg¡ú¦#WJM•Å>p°]G§ 6Yh¦V—òCø#ëI9hžU2ñîÝënÎtÉY¼ÊA,­£íyVu…kf4ÂÁ ÑQ(ì¼_ØÍàR÷Ú3Du·Ð#f’â -kYŠÒ a!ˆíˆŽ®·ìˆo=¨¯ºev%À­`倄àvF\j¤m…†Y¿2çzpJ°¥ ðã|š걊\ô‚uÝÕ¨J𮩹ŠÍt¾D¥‡æÕ[­ªlÅé»°ZÂe¾5â×ù,+¤¦jºi #$,Ù®@Ù®$!»Û'ý™ïÿ’ýð;Ý&Ûá­ò}9\MÆ*Áˆ¿`•>Òj¹Ô¯ƒ%ßWRU) -Bi¨•Õ]ÄûõÉ‹ÍÉuf;œÛ²;,ÆSªLP l’÷šÑ¢ÒZƒ¶~)1ÉïAªé¹ß’¢ -fABMÙî]­Ð4öp§™Íú ™ÿqFU10vøÿò#qåõ)D¼>‰_–Ù7¾‹?3ˆ¡Ù<…à”ÖKQœ“&ÀqÓႺÿ±¸i°ˆ ]gBzÇ…€PYùYÝÎ-1~xÔ>²ÀÑÜÔçÂN¨¦~Þà®÷ÔmÁ9@bÛ¸×Η7¿ú8Ò &ò$ŽÍù«@Œß5wŠ»š„[ݶD ×|JY \ܹ¬R™@ÛIQ@g®Ç¬öo Ö·>ÜVî?¨lpÞkOAÒ¡ -ŸWüˆv:½åE ›—½¼¬¹Æ÷¶ª¸ÎC)–sæ]•…íá~–.àÛ¯1·ý\­â¹ÖÉš“yC-ÿ–ýå_þ}%·§`Ùpª `»ºEH•èï§Àìì\[¤½ZêÔðžO  eä DiТDW2äþA‘éNÑtÒÃWKã -@ø¤¨hÛ’ЉW?@CÙÓ+`¢ ü¤î)О{Y¼ßö‚† ÀHs“ŠÕ!ñ=s -Ï—×*©=ÈÆà Ðã8Càì -ÇÀ½§8wËûø£Gc<8¬(΄(ÍzB󡎔կ¸­Fõǹ'G”P#3—rèÌ|²ïÈYuòõ ÀÐoâÌH´¤é9h•‹ ru›9ÃrZR“•ùƒ\±Íˆtë¼R‚õ¨ñ<ár¥ƒ`í¿ÄÕów( -9QºOóT¬v0„P¢BN¥gTïZÌ—Ó£Ç Ù¥TË·ý´8Ü´ÅeBèpËM$ð‘W$vãÜvËæN˜q/ôÕ%ŸùüŒ[¸wóJ!°žXGÕ«Â ÉkãöÐÕ«BƒÊ©U}œsB½Jí±kê@®Ážu7lojo»Z^.¥µ`¦‡øp× TúI&vþpêYðc!_fÆ9g+DÿœGy=¬Å6º†l,®Lâ;³ï&RBI°®ö¡šð¡T÷;bQæ!±îž´b˜Y{ é^ Tßù­ÌhÆnm“þà™Ør-Æ„}Ó1)LâV6æüAÅgP¿¾ÆBéÎmIcáŠšÜ ý•%)zµ¼51AOVrØ4ªÿ ê×*˜Q;/Ä Ü?‡£†9ák²£x¬ó˛âà:†º•§=Ý\ñžhf”¯n¢Õ‰Å†/êËðƒ¼ÕÂÀ0È{R ðo’nÓb¾e4Ôª3ü‰láÇ{Bâ7÷n% þ+å–¶öyý`17••ûÎ>÷¤i»8ç"H’tABÏç†Õw!›OÍ`§J)’;$à9s–†òU˜ ¼:Ιâþ3g··ÌÃÁ=_ûë+nŸÎË>‹ðŽ5%oßa -‹Âtz<äz) ”ã]aBE± ŒRËvÕbÕ‰0¬ß-‹ì܃!ÏD‡tÆje~•K‰’ð#ãJÿ*@£Ú]LCfvÓC‡Ô½À},£æñ‰bÐÜö….R ñƨ`™º‘ÌC4tX¡KTCšÎ^ ›VòYöÚwâBƒR‚ëì/ 3¥€C¶G i¹ÀYC:œ#¦NZm=n™°ªjk<«ƒ%ŠÈ• СQC‰’9Å›$Óy·H©c_€B×RŸF£0“âþ‘³ìRýº·EW4ö4%„.?¢Û'n©‘3F9r%D|Æ}7XÉé<ï@aô.òþÔ»OžÚã\jË2&1_±óÈû‹Iç9°Ê3‚‰¸RR2µæÒÑ.Õ.–ïÍ …g¬ù‡ -ßp–ÎW´“}˜rGt쀭îã)%cýž¹Ë÷P âO°é³b-Gxt‚F«ÐÙŸÁ,³dl\n$†ŠÒåx3Ïx8Ö®p ü±ƒú5Ø—CÉ@aæðqÒÇx¢vS2ýGlK,¼~­j(šÒ\{¸™3«èt›€ªR~üÜu.AÐü‘fŽã^€&p±àœÁ%ô˜ZØÞô+ö41`ØŽ -ö›vOA§CŒpw :\•¿3Ì$§XŸÂ=%Å3¿Üàg?…%7±Gï-ƒ"?4£…˜7Êîæ†!ôT-¿37§q§†˜“ln@ô‡áŠ7rÄÅ(Þ"º‹¼KÆ5b5˜­§Ó#ß·YXÇ›ýä¨3þ¬¡Žßhyç+uýæ ÌÔõÚsœ“Ó -!_äA;2Ë?¾LCŠ3Gšˆö'xâ;`Ð’ëÚ¶–E´0VöQsøXÁè~áæ÷©š—Ý3W‰e k‰¥)£àVQ±ª„/JØÁàÊ$] Ò¦yJþhºÀà_ýZHô†ìÓ€·l‡ÍóHvß¿"ŠoU½0åR#.rÁÁj ä~óŸ{§õŸ4yƒ`e»Ý+³j¥ÁÿÄÙÃHS;ày½:w:\÷³V Fïƒø¼½úUÖ( H{XrƒOï’=‚בm;<‡Ìsä¬ÊR¯–)Õù#dP«¸À* +µqW -ŒA¬5g{36ðž=õ9ÎÊ·§[däÁr›C«SþcU#Ÿ9š¸Ã úº×Wåõѧ¹ä•ü7F¨Å•e£TS÷Ñ5'Ú*VÛR­³;ô(ÝaàÚòÚÞ^t -{“Ð]0QŽN~Û^ñyT­:ÌÆH|jC¤d§Ñ@“©œ‚÷T“·ý½ ¬”‘"ý$о^.j¢³—¿ÊaˆW¢”yO­ü¥‚êŠEm -®ûŸ½¾/þð;™Â³Ëyž(L/JÖá­Nšv³Ž®ƒLÊŠVÚÊ«Bµ™ºIÇX?žÌÿ“DD‹|'Å™ß*䧜.¡‘ ¬W_,ÞCâq”Ób–²­ë†mÜ‹–…ŠÚä'@‹ÙªØmmy«mÝÍu Ч;C€Ù.¼‡rLc£.•·¶‡;ŽÆ:˜UðÖÖ£ÓnáýÏÝ!e¶ƒ aœá—&© zÆÛ%º1O]hÛ›D3²øʦf&¢ý*ðœäxpÓC!ÎÏE7ƒ>gÆ€XéÑøz¡:¨¾Ê®™{-ªqó#á«Û¨ólO‰ttûžób@­‰ªÈ‚†ˆ—ó;q¼‚TÛÓ…‡2Z"M;/tÆ`Z¡Žìë¹ÒÞ-¨¼ÛN•GJvB’àÉš ÇÍB¹ùÉöK»N™í®…æÜ° oé•ÉôóöÑ_Öv‚5Ò¢ýZz‹ñOŠR­ÆÈ;ôÙ³‚º+nÌs¡gw“½PyÞ˜è/¯!jŠHتCYiŒãæAB1‰8Œp#; -#êJ¾ÿÿã+A©¿‘˜êjFcÀ·9Y÷Ú´zð7r…*PS$ØR…%³j‘äüñÏáz)Ì*ÀÔk -=Ñ-‹ßý1æ_JwÝó¯ûÄ*ï²—ŸÄ|ZÞ½×gæZŠpÂlõJ¶Š -Y½^ìsÆU‹ŽñýëÝGJüIl½{õ¯þTb$$´¤óIêJ–;Ú=¥Ž"*ÂRyŸ¦•¤Bö¯+G”Ž¯ÙñÔi‹ÒJ÷(Üü32)Sßgé‘ב̟ìàá<šñ-T$z á¿Ž³ß¶H Ò¶_éÇT&`[TÔe>d"¹Ô¸ê,¡<ÛëÉšÜsÃT%"Iý쀠V?%”Ò•ôÕö°Oˆç>õ+Bô-ãö<I  ƒk%&dAЛ† =ÓÑûu­ÖË %£By{»ßχ_ÙÍ#'t8Ò>ߢ0ïIǪl?¯4y\,]Ý7“›O¾âÒs~Š××ùÑ"#Ì[çHà‘'Àš»L+œ˜zÃQ¾ôšì‹·¶¥†û]bw‘SŽï±¿‚¶#¥Ü ¶%覂I¹'< ½?’#ùE4,u‡/v‰Š ¢÷°«MÑVtÑt¡zçTyÑ#^ßËsçUý¿0`ÆÓ -†Hë猾ó±è¼ ›J4ƒK„‡J =ž¥Ìï¡L†Xoo¾;‹¤æ©Žð:ÞGµ®Iì`ÒK©Æ¹iëE~š»Z‹ÂÎß"ðæä#Lm¥vlÖè=Q*-‘›|¡r§šÔ*¼¹­Ö™„Ï%½V¸úvÈleÚ‰yóDV:&ˆÿ›iœ¨%¢íX\!‡ÆFŒ`PXR1Ø»•aÌ=’¿¥|¤>—_ @õ^M€Rz¨ÎµHú½û¤³½Š÷XƒûíU ¿ƒ®Ù#öSƒâvby 9H:ç.“ˆc5P/Þuw{?úµÑÃ0… Rmåp@o”€jôq‡¬žUöᎱÜÿÏÜßdIŽ+Ù‚îj6ŒE$¶kÕÎîóùw¾-P?ÇÍÌô™›w¥'ÃT•‘-ûG½>s}¾ù—¯ìŸU7î¼8Û2­ºÑ`LŽWÈIÄ2œÿy{¥š!kàMegK®&‹™W%*Þ õÞÐßµð`6v.ÎxÕ€Y:‘¼b¾Ì·âVs- ¤äÄ›Eåé“Në[5=®Œ¬ÿ§üÕ5°X ö@‰î¥¾–œ -)û¨˜öÀx\Ú–ÑûÇÊra©ÀÛ *îÄ„‹?fàëå–X­vàYž‡æã_^½Ýü ¾ÏëWÉtfé¶)ÁÖ®OÁéç‡ÉrílrYK‚×oªCÕ:kçÇê·r_­¼ª8h‹ç×ú„Ù§úJ׫˜zJ[ïêSÈQgßÈô¿è/û§$‡ìÝ 91!¸F%=2ªqPeª•»Õ&–ChUùzGä‚´8¿4¡+]μóç·í\©~óàB’Ýù3¿;ˆ¦v®¤ ­hY¼£—aL†ÀéuÇa~Ô¤Ù¡BÍʬl7Ò+*M¢Ñ/7ÅÞbÒÍ>Œ¥`}ÔÁ°oØVä,ƒóÎÝ;ý2»>ʈô)ä8þÉúsæM¬“zfcMKK¨'©Á¯c"{>ë€cÝäé[o¡×øQž¨„åÒ«dXÒx¶nb=ŒÐ~zRKÈF€À÷Ž=Y–1!‘PE•&Æ××QdyÄБ!ϬbÆÔ>qÇ>û>|½&ek€i1m¯zHWÉöìJ¯N;„|{&‹qMß.½áþ˜Éö¤Ÿó&…­wðN?³!þ­Ä#ÛdX±¤¦ÿÜ#ùçÆûr+nzÈf¯Š"—ƒþ÷Q2Á§AZ<±[¹Æð“„æ^¤Nx,Æ¢“‰ÍÖÂòÛ_ä½·Hnr 0¸è‰_bž b*v­m×Aq‹­Ù^*œyãÏe[SG[s‡¥nK2/Úb) }.¯_Vôv/­ŽÜzjÞëêu‰˜6ú¦»ÈY¹dRÍ$¬€ÞžÔùå°¾÷DUsgZ긭ö l+6‘ì'qÌMD6V÷òŠx"š÷QÕ`̵Çûöû,‹ð¹âÂkr¸ÆuÞÙ] Ò$?%róÄñPwYbþûê{ùG²K -‰µ•é¹tŠù "MyFž3jß™k1“]þ‘_Ü"?Icšõ96ùçAñ|)Xd×;+Ðk)jcŠV³—ÅM#ÄAY—pß¾2àÏ%aä؈ϾØS¼·ø¾éàRãíq¹¼³~·çÕÖ ñå ŒÔŠ§R[|+‡ÉñO"Φ“pÎBnJ_%*L -X*Úã®®k¶pGö9f"m¨Ð_Ýë<žOY@½ð6wà*—òŽ‘¸¯°\iëyuÕ6“#j›õžëÿD‹_) Þè/ùÒ €A¹Dùa+!†.K.>Ô,ûh–L_eH#>FRݾ“}þ—þÕ"á©Ó¯Ì˜ø\£®æÐáÂ/;ÿª Ü%vÎKO1¼¸ðó 0«½Ê%üR ãì â-›7•¬ª7ùyWH4õAwÈ*’?Ì‚ZÈ[’/é`—:,Y7Sê·h%6G)¢sÍãæ²ÞS­\kLÀÝÓt¹t#4ÝÌöŒW[~õ%ÄG©K|è„gÐoÌr$ýxî»÷ ÞÑŸ Ÿk8Q1}ß®×­¹B-q^¤±…P‰ »h«©Ò µ“ß™ FP׃۲¸¦HxõJD,|ô”êßƶ(1›”j‚N,œ˜iœ_Š_ÿåúS†ZQ”Ã*¸Â× DYŽÀêa~-ûJ¢<ž²S&+ª‹P»}Í”áÆ“9¢YKN.¶×réÃ_)÷=Q½îj^vÿááÜ™*ã’§;ªj,â½zBëE[9Ë…í1ODö0ù <@P ‰ô¼"å`0Ñ+_ÊqøH}„_|T@F“p‰­Ââ.4^'_ÙG¾nÒ‘ø$Þ2ÿ£¹:‘Ð’õñó½[çÍ|j‡Ù!seC‹¤ØÃV’føødy©c CsÞ|Þ îÿ®àñs²Êþp†×zÆ1ð\lÀ뉹Oúa‚Ëøe¡šW£ÁN,Ñ7Ĥk.˜Ý8×[€n†+‰”ØLyMzÜàÅ2crì{cå­‰‰lQˆò]qÑ¡‰>Ì7+!—@÷ç¾"'¶€’mϼd;Sf”Ä›³’¹«ü"O[l«k‹}æÙU'ä=Ñ@x‹yænèØÐ0žßŸÔ$½œ)Óæ­I&<ÙÛÙÖ@íÉB†¯G·‘ý5PâŽ.´Ì$I2öŒqìiÒ~“/w½lË8¥‡÷°£_t¡Tk“x¤±,Éù¹òŸ]QjŸKò’w²¸ò!umq+?3‡‹.ìË!s¾p<®:H -¢«Ýéæ¿b(n¨ixvÙ‚ÚúKïc¶5?zdXq$XðŠ¾µu‰˜¢¹ŸÎg|¯Mˆ…éÄó»C£´%}[‡¶la2BÝYíNš ï)Ëæ˜û‡W_‚i‘³}ø,ÕYóíY7ïq\`×s· ÍÝJxÚ¢ý¤!½è42 Šsá,ÌÓñ†b4WߦU Ÿ¬KN‡Ï%†™v­~¦ð¾Ô½'.ÅùvíûáÜß 6{ýuóF­”ñÆcIJ/Nœ ÕÓœ…ƒâÁZWÇX¿›î¼©œÈ‘ØX¦°[Ç^î×sëEZoB<úïâ^“-ïo¯ìáدôªR(Áåoß^HëTŠºv<.ê‰Ñî•%OÔÁ³ô)~L2‡ˆ¯t -µÍ¿*z$©A‹AE¦|Ë (;)“mé“2!qYæ{_—Ä­ p Æ ÿq€vTßÔ þµpÏYu&+ù Ù8CÏJˆP¬I©KüI¼g¹ìÑïé ZýnpÅÜûS î~?ŒqaÉmIÀÛϱX±y¼gOj!¿±1;–qÓ•ÍÙ¶ã_6×ßOjíxúz¶Ç:µwŶäÀ—™B Z4„˜´¸ãŸìŽÎÔ@ôcÏ÷ÿòi@gAýT%Îå¿á»þŠÕiªD™YÆI¾‹<Ýâ8bªšSÖ~飾þÃWj×ÏL7X:ÓƒëvÆš#)ëe“²‚U÷–ZoÌR\L]HˆÆûˆzÝ%'™¤ç~‹^ýµ…P^¤ô“aXç6îR cǨq@(Æh$"¨ú±4å{ kοc<¦’[wí“7V=äÖ’inŸk!¬¡ -Þ½`›EñCd†ãÏÇ°í×{TÁÊ¡;FŒJ“·’`\R¬p)_mífe)p8Ú –o‚_ Íçpâ/ÿòU¬þèïp0Ce”ó2–ÅgÌéj<Ï9­žÙƾÀÄ+q%§sô==êU $ iBìë÷Â_æžÉx§UgÎã ÆÛVp½4ü-2®wIh’5¸ÆΛ܅‡AŒ/ò¡ã¨BÛ”ï°Õ°ãšÝ;®´˜pðÝ{¼ºLDæR©´Éú;è³hoLYRŸyѯ¥Fˆ“0ÆM²Æ;Ó™ª6zäåÁ\‚¯<wÒÐ,!– -{D~•¼­ÛüW á {ô¨DMHQ‚;ª(çÙ»Ô=.º2ªhçèk.A`º[âw 8tîóàJà®æ’Ž“ †>æ>sΓíˆ91ã룲YÔÞ}]d<»K×yV{ÙXÉ­u—Ñ-YG§„`—<?e<ÓÅ ¤qPéõ«˜¹X7êp]Ó]®IóÒ–qkÏî4ò -žºÜ?¥ t§%>õö#öNó£MD¯ú Àç)K˹ïÏÀÑ3"m¾ç£ûvi¦ù;H½P\œz‹ñ2¦ª¦Ù‰”Üþ}–_ƒ/…ºâÏ'5 G‰‚0¿ð,Oik2¾Ø'*Í@Þ\Ëd®ö’Ãß(çq_8²;_~CæXuB3Rñ¹z÷2pFÜ2;ÐŽço˜åûy(öÏlF¹äÌÍ4!+ô„÷Æç´= -v6“½Çmš½MJjß‡Õ Ú,NotGB,?±•/þltË€ƒO‹ Ÿ +[Þ_5ݨ¥ÅC4e~M•š°1ÆK{Ìf/3Ö8þ6Æ8nì±ñeÊÑkCÕw¼>õ»±ÓQìç¨O:Õƪñ|Õ¶§«ÒÒÎúàæPÃÝÚpK^†}¦6A_JœOÂc±imY A*—À½3B`ò¦¨]C>» ,ëb sÚí3ÅÁûfJ^˘µ&yè¼Ä ´á‰ã¨õç8Jï܉Õü¨33.ìü-Gz.j²³SóÍ9 ìyo çý:"© þš¬°ï²nÜ[¤©W.y4Á8“ðŒÜ? ÷Dƒ"Rš1ÿÌŸË‹ ¶$‰¸§z - eKêóø#Ÿ—œ¡ °H3Àuÿðpñ‚ñíë"nË3<îŽrÁu \‘¼ÂÓ–”“çÎ;>?—¸BQ¾.áj©}³Ú?Þ8ÎTiýNBÞ¬ŸÎì€GéïºhÝPÇBªœ8oëkÏ eg}~£Š¾éåôê`œ³dØ[ì¬l2šx¥Rf³ÍQÙ4 ûýÔ7žDTvf´­­£ˆA,3Ãm–ðe«#(3.¤Æyè 1™óÐTÁÎ/äkÅÑŽî’}œ/;¡;‰"DHG]Cº }²YÅ›'Š¼ý)Û¶ù'Pñ:‘aMä’Ï)ÉqY;Úˆ#­ì¥Øñ<¼@[v¯+Ò¾‘Qfž([ÎdgEž5ê’¨q½þjv`¼|uº9šòQœý´`&G¹W² -woíu®‹ ¹ägª–a-¹¡ù¯n«×pÎw2O³áuË_Y÷)ØlãØBK*ë'ÖŸ•=¥ÅÉr o8Y¶ŒÅæ=nΪžþÍ%gºø—¦g^2NæÈ´S% ™O3·[øŠ9ÊïÝiÞ¤¼éÞHid³¸c!Õ3iy"J»jU¤Žà¿®­w‰Yï¡Qk ÅÅ„g¯cO8:—Q>x#°fÙòç­àÀÕM›žw†zæ=ÙPÁ`l¥lÝö¤>«°k!=°«Í®íH‹É÷Þ2°q4:~Ì5kþl0AìÇ:=ËŽ“f/—³x§P—_Û¶ž]g@­\Ä`°É9“½§š£Ì¼¡ÿ¿¸®!R•88ÙV‰†g”&×÷ÃczÒ®½öÚ,æ+„®¶e·c?yÅ(áY'ISÚÓn8®TC¡[%Ìô{D|»ªp>µyjxµ}™,~.í]óYé÷Æþ+HQw!#´(íÓ-sFÄîÎL\b§á<6ãƒãÍ#›<¢)Kð§±aräGi†>™ìæ"áË]üëºH(/ëXc®HØMY‘…YFÀY¯¶ª‘ÎLuÉÑFÕ6}2¢VÞz'åkDêh wýòYù©>âlf$ÀU>#­¨„ ȱY~°ÀsÉ|ï6býL (ÏåZ¿ Áœ¾gæîm 1l4INÎ]¦8¯—tËò†êF—£ù’au>èä±®î×ÖùmJ~SÛ»W}”1 Ò¥ùwÂì☥ovƒ-rQvMN.)%õ.QØ6A,®þú$©Ö°×«þL¼ç¾*6$? -„5ïHkÂóÄÅÄˈy †–÷ZÅ‘KX_Ïõx…擼²£ÅkvÜõg¼-uBˆÒuƒ…Õ,0²¢P„?òÑs\IÕÀÆ‚¹=9âMš¤Üº}pèù(5Ž1o'މą8Ý-ÛÁÈÒ\îZÕŒ. we¯‚°6a5gL¬èû€Ë^°§GT—Wj®^DÂðrÿFÍÓJø6×AíKr*î¼Ñ¾ ,ˆ)ü\jüð:’'@Q¼?¶Ëwž%QÄÄìÓE}"ÄÓâ’6~¡¶R­±mä‰$üè|²ãñ-S¯|žqv™cÃèÛ‘í"­ÔEAÜCowǘP,'ÂÆ›6§€Téo ³S”Õ[çy·hÚ›=E)ù¹ÙÏÐçïE ù ŽYCÕdw ÄvcÀ8³OP²S¾Žð§×ÿûf{dS8´‹È9šPææ)2 lb]Q:°;g2èx¾òð£yqU]`"³åuÉÅÙ0G©vÇT꣱üÈìkfxD)nŽDW^g{58ú¸¹>Ø™ÕMô¸ÃÌn¼.9“ûzáï8š¢µòë–ŽZ66]Q¡F¿«Ò˜O×߈']ö_‘/é|E~[GÕ%œcŒ˜»Í]¡v³'â/}Ô†„^/%ÊIOjÁw̃^«˜ûùÅÎkC¸™ç ¢ÂEOŸ]—À+æFL[xƺµXâ/öuÉs}çR ÝÊ-fC˜)ÃâõxúÌ"ý*:qøÅÃóXGû-8ƒ½aYFä-æ}ýª3Ík1óíÞΠ-çxÝ3srÅå€M„Ei-U>#oŒ-–¡"/ùÊ6Oa/sç`?+å°ZYxrV*¥u”Ìã›Åƒº'ÞÛ‹¨é©Ûn¡`ÿØ"¼2éÉxk -†EôÕ!Ž‘ »…:ŸÀ涶ñ”¯èÖæüÞ×9o©Ïç=ܯ•ÝiÇÎ -á~O6%YŒ<ÄÌ:õ -ñ«I‰S/ÄMrÞâ8ÍŸ´'«]šŸ@‰lm8ƒ{©ÔÚ•¶äÇ7xí& ò˜ü˜…Ù;ÐVâ€õ$2/u˜ø‘8ù=s7æµ{neÒgóÄú—ù%öÞFóäEæ¦2·ätaÈ­¬c‘mä®`ìL{ßþ!và©ÈÝó½[ÏGÕNºpÍÉüå‚Å:œäë) -%”¬J:eÇo â™Y>yä·Ùïµêè3ÄÛÈ |”=ì­#_¨b›¿e']ù;YP Ü«‰ÈI8ÿÏÖY.™{¿’}¡pÁÉÈÆôÔŽFîÓhíPçXèÆ£”Cµoa¥Rt¸,JÍiU´óá›Å8¦áóƵè%ã;0Õ,™«Å^¶k^õÙ1Z&íÊAØ¡¿IÓQÈèÙ ž…*z[w„róûKÔ룵xIÍ ;‹•4<-ÄüÓC ÙN¡\ó”¹Û€‹ÇWÚƒÿVh„#û…$ñݼàçÎk®]:¤&a´uQƒÙŒB´?tœ¸‹tƉ -^»Åþ¯Õïm¹”•…¹ÝkÕÅ\:Öæx¶dàaÊÕ5)êŸäŸØñçBkÌ3<25”ÙÌÔ7WŒ+5ÓQKWßjNmp>Õ·ÎC=³¶ÁYj-º-sM^å¢ïÌc<ëïô÷ dÕá÷’“ÔèN¼§-9ª®êŽ—SG!4¼ÐFFù¬©}> ½¨.W‡ÜŸ˜f„¤ïûºu~#Ã]¼%˜,±˜u÷Zšz,í³GQ®8 ¬+…¸,Äëþ As’p@£ðw°v¾õ[$Ù~”Z桱U†1$¶'48Xÿ•Çý€ÞMEözNñ£¾ê íû;mÿ+p w¢C鮊¨O²†Ž`æîº7˜Á„R-ÚÙc•øŒ2;¾h“/´ñÏÌÀpzÍ+ÄeÜñx)Tæ)_=ǾìB¡w,êŸtcIQn­^ª8o°$ý`]ò„û öêa9“ͪ¯Z-æe‘yéG_„Ïù8é·{Y˜¼e©~+Âg€“I˜¥ ±î²œyLi¯ä…ˆ*‚n]… Å·âæ¨þ¼> -ÝZ0Ñõ$ W5×ÉÉâJ:‡ÖfIÒõÔßAQ{ðwIÚnm#¶w‹íô^™:'XR>ª±ß„j/¸K1'¡=Ù­+ô>N$0šùØ¥# ¶Lné À‡ë‡çEO×Pr!8· >õ¦èK£³ °•jÿ•ØiG¹'m.d¹žo,‹ÿ{üßÐ,€!Eæt,…î&qº „iå†Òrr³ˆ×M«k‡îjùú_éNàc1ønf(CM*fÑ‘¡³äíB‰Ã¨­6VÌ`, ¦3‰+P%ŠÜ¸ªˆIÔˆA$Ä|[›Ç¿Ãúç§2·ýKp ;4?¨¢ÜI–0à|ÅT¡¯9ŽŒÞSZÿ;øŸÛ·v) |B™^X€QlS•i,^âApÛñߣ›:hGëþeûC~iOúbv|Rh¦ ÕDDÌgB/—vÍ%e¨C '§¼†è4’ƒ™÷ÏÕixqÄ`7A™ `îÁ*©âÿmÒˆL -ÏRùpÇ,–;ÖC7!‘cĶaô-ÛµíI¥ŒTåNdJSÛ¨ÌÏÓ:‡À÷à• -p±(cÖ\Ss•¸kå¯læ·ô^­v(Öøì ç{E¯ÌE!é1Ò¨›çÙ]8Nó“枀•³6ÌËžåÕÆᑘҕÔB·«Ü=#òaz˺¢f-?<¦µœ:=«)"Œ€Â€6a-dç{8eW"]8ÐéyÃg.Ò4=Ç^ãñÉÿ:Ü›ÿ:ö¢ìû”ïm9¥¥<‘o€ !ˆ‚ÏV¥ÜYð Q|þî×EÔˆÁöÎmóE­€s—ðˆ£}GFpÉÍ» –œl´Û³ ²p¾Â,¸hïa\«oïÈyÿ»]ÌkÖî[:(Q<ÅͼnšÎ3Œ?ò‰c]r0 šªØè1%`µz 69µ•”S9+a£»ãútK&8 0K7Az¢ vxMuíC˜n‹ÅرVµÎ“[œN"Q³8Ó&r×Hnú=SŒV— Çé‘ï@±z´…L»V×~b!""‚/G[…¡ý{ÁO[ö!í™î‚î{¨…E0£Áî[à“ŽŽÇNXO]"O,æÑ„5¾{ˆ‡”Ì}ýžXÞOgõGMøÌt‘µ¶…6fLаÆΞKö„'Íu¬\Ì=N%BE}"yEIÉ -ÚTšîô\-{¾r„³|Ñîš\¨çR‹†áüÇkýåâ%Œ†©„]><«°ã•¹O / Mëì·:íËxî^å÷‡íOüÀL|äZŠg f¾¹¿rQ2z“->.9sï•ÃªOÔÏqVU™Df+œÕH›x._Ö8¦cÁÓ]j“v¢<]dÍGš á±-´Ê}¸çàv¾Pn‡©Â‹¢«0î£UÂw]B…t‹žƒ<œœÚ·"zq;G,La”¡nØ4ŸNe³íõwLªÒˆê^à’ÈèÛ÷…Ìj/J˜³(k}©:"9îëõ÷à€"KSæœá›Ï»º={8ÀàQÀŠý8ïa_1 \>™!íì›W•³ Z`î#†é êø™¢K$ jÈQÊRó°°KmOæX¼| -ˆg+×Tov‹µ¯O»*ùjÇDÔ±{"Ž2z3‚_ -M÷«´ 1˜m}¡Ï&P›£Äl!ó鹌ï`„Ǻ¤%rtÄ€&7á€Ñ&Ë®lÕÁ]b2ôl9'{<[1Î -löÃpËFt3¿òk}W1j™×i -Z¸ýG ¿áÇáWζ“ý#/°ƒ|4Ÿb¸ßù— &?ÞÕ4ÇKœÎR‹¶§Ò!ó¬bZ’³ÔÙ^”‹½ÆSÌ9ÎàÇ7-8IZý¹îÀío·3Å4ãÈØS‚7ÖFºIWwôlyhFFóè>0•²É*ŒF9;)¦™%¥_ :¾¨}^5\¡”:õÌE Q û–°- ÷»Ü¹«¾q$Ÿ7b0eVó>Bk¿×YÛ®žxÜ䬺S#d ×z 0¹+ò¤;,†¬‰{}Ô¦ -Âçg=dÉ7n¸Ú¯£r~¸Êjÿ=¿€?¨Të¬Þºx©ãWè1\ãÉw¿^ ™(Žrƒšwç?=õS™}½âpÓtÞäùx…öi×Y¬$ ƒ³í{o`5Îö3V%¬ïÜTXh;ÐöBQ)€DD0mÖ¡¦GÜã¢Ñúïê´ÒŸÜa>_ó6fÒ²ÛI?-vpˆ,rÎ*sv#DbÌžD ª.å´Í´½ÛòsÅm‘Õgæ”fCT`ÛBÔòÏgÖ`gDÕÅ‚BÒBy¹ˆ¾ÌGz2hB‰-ªÂ3P^V>Êð×Ø>†÷g5Pxì ¸¹Z¥¹ìG­xö«Çâ*¶£Üð‘ Ãéç"ìväòyM˱ù0Oê˜+«!Oš_7ÑÜ®|a±‰Ürñ¥×%jYºÚ½~’Ãy. E]à%Úi²‹ÏæÎ<ó;£xÂH«ß“¨ÇÓ(éó´Ý{xzlŒr‰Ž‘@‰ZcåTp©|X:-VÜ=âYaÞ~.h†¥4Å€¢åí›[ZƘ¾‹ÁB]‚Ü0ë_ûôõ>4Ãü‡y ô]ÑɆˆ#¡ÉÆ `gÖá—ú÷°sH®Pd>ÞõPú³¯OÏþcåþ+1œÄ­‘e¼©\Ôh0u–Ü‚4 Cï¹â¯§¦Küâù…ñîéXV„6@¥¯òR—ÌÕvŽÕŽPzxqž¤õ:”B[€´HL©ÎˆÑ«J5"ýIÚZm¡óð*¥mŽ¤cÆøçËHܹ smcu\P¹ P–”!])ïXç#œ&¯´ÃÏ%ø-j ïk+Du[dFç£î©Íuܽþμ¹ÂÓþÓave"%¿æ¾KtF–ØÚø$Ï/v³‚ª:©À8BMøØ]:7’çzêMŒÎ&8 -ú˜ÝÍÔ4^„bߦk  O^œžçÎÀ°.A\™ç‡Ñrm6Ý;’²·»ž¥ûÀŠ¤eÑHš´½Õœa¤SΊfõ¨™ïoé~>‚õ_ÜW¸·!]¼Õ[¾²Ô\ÈÖ#h{ß@gö. Iôlžyž# ì)°’|É(‹UV¬ÿe%~—tjÿC«™kþ´SŒ MZÔ)óð¼×@g§Ì;}j¤úûYáÎÒlÚXóÊ+fÆGµy™{ΕBv°ñ›ã%“hY;ñbRÎí8Èðw£Qºè‘.x]¤ -‹vÚqƒ()L® MêtÁ¦ô¦9&쮂ööž*´ë¹)×2Ò¢‰¿Ü Üjr)É*4we‡å¶*îö£.GR|®‰Æñ!VóÕ+ãT]úc¸[1ew´Ù7 §³.Býå'ËO@Ds.A«¡ÕqÂÚG¥éXMÑlç÷$Ê×—¹ dˆíu¹ÓÑìža}Ý÷ºhc™ÎñM©À„¨K$LìEç¾€¬ÃMw_â!ö^Œï¦•;Ëð£lÀ18Ÿ8Òd}a"Þ±óÄ;R?ÚbòÇ=‡Ý ?QÀ~^~ë¼Ù©qC¼5èÈy#}mþÕù¾ÅŠF‚PeÏ$K3Ç XuV¢('ÉŸpÝ›Ž}oì¿ê¢œ}>•ä‘Ýg‡ž² Î¶!3›Œ¦Ø ‰“¾”7Ü%lY‰ç¢ìµ‘å4£GåÔ`dîöüTak^‰î>£ûH˜¦ñ“¬+¦´E™ û0’ú@Â"ŒWzÐÌzòâAR¿éÞ)Ò£†‘’(cíýUùé›VúÂûý› Á÷†œ´2åïˆts—ßÆbV$2ÝäÆè« Eç.qF°_z-Z¡¤ïÛñ"<`ãÆååÉ7>î2×6÷P§ ösdí’÷çHB¿*E`¾$Ëa˳&¬­¾Ÿý»Ç»4Жž¬¾1Ï.“Ù'3ïœ$‹±u>ô+çSÍñ­ñDÞ‰Kyé®@fyð1µ½Ss!µJ>ùº›o_êý»µ‹-GÖ ÝkÊxà ¶ò.Õm)Üy1R<ŠÊÒP™Û©S¹Çèˆ`¥þNÀú;­ÈSCAŒÌΟTñÎ(—«ÜŽ6“b‚WiϾ3|?ˆ¶7F¼£Ô°v«”® ¶Ûbc’—]Ùfÿ?cog1¶AêS»i¯–òïR…ÓQ…2gÐí2ýºówèòÜ IU™ÒF˜‘äVâCÑ(”ÖïŽg꩹éÏo8]†Çb‰3„S'§àJÈKèEb¢æ:î×”v.ÒN)Eßêöåõ=0:·5¤Ý‚‚SÙÜ«´+vóEÓ?³ykˆšpŠTe?ªt3‘ŸfÙú´'’Y*t'ç²ÖkÞhÐ Š4½µÊÉýau-éF1F»Ë‹!¢slÓçúž7˜ÃüÒÙ¿ãé„"i-d…/T×—ªOVT©qþ¶+ÜT/:ï*ræjCÎa±¨c‹t ñà¨èV6']ƶtt×܉ÎPým¯“.Ð^±¥ ¿h“}EƒGõ -Þ,^Né•›ánQ<`‚gw5ö:È1&gçíL>j;$YhÊæYÖ¾Tº^e<£E¨Àµí´Rv‹ ¹ÌGÂÑ›!<"ã8O~oîYÅ7}2¾ºçþ¼ÂY°"˜Ã献¿cRa€Í‹£Î -.rœjöÌõJÖä–ĵ7Õo‹+äs|øgÉ¥Ù=$änmQ[_9gþä½1VüÍâêÏÔ†ï-®~B„T¢SñFGìi «Ø8œÜ첸ën·ò$¼×%sk*× ˜þ9U¢Eç)¦s$Ú‡fJ*Õ·oK´‘ÝK Å\‹‰(VaÜ4OBÓ`4,›¨å±”_äGåà¢D=™ûoK{øl8çH§”Uò_Ñml«$z°&Ù*Ü—¥V¶V¦ ýŒ4ô~9wòZ%<Δêñá·¡¢Lª¦‚], ÓŠkÕ—\¯Ÿ’_ovwΤּ÷¥n'cÝâ6½,VÇ?qÛŸ«òä˜Ø$QMÞ;°ÓŠ³â¼CÝj“Ž¹…¨~…'lÙnv—y¤æ”É›Š9aßðöߴŸã‹* — ¾3N|š-æé{µ{…=£<1wpuš€Iè»Ò‚.‹Ô%l×L¹òGæÓŽIùR Á¦Â (üQñ#ªð¨ ò x¥æAÅhÞ=oˆ?ÁÇRwq“t~¼ñ£^t8aœT˜Ö­ßžä°¶:O\ÂMO•s‰„õ¼¤nþ[Ì€¯ Uøèy%-® ìTû<Æ™žõ3ÌuºFË^?ÖE@rñ´•™ºÎÿ}˜ˆÿÔ„û$'jóÍqs|@íX µ23p,r )Ð9 €ûXú+òs2Ú÷(ôúG$ùkm¡!DIô)I ”­½]è×\rjÉX•*U6KP³ëxýj½fGo¸—áÓ{D…šÇpù–-…_Y -õŒüŒ³¯îv„fêW?o<¨ÿ³‚Ÿ·Ñ_ÙF/ìϸ¡=/Ï ­{ öEFñ•’bÉ´¶v7,r=‰S~ëpÊX¥žÎ×E’0 ï‚9æyè?c6VWˆ‹C?‰zz æÀí<ˆU¤ƒ®ív $pÝç¯×Ì;=J‡ƒ+Œ' -‹¤G–§˜«˜b 20w¼$#(Jú2–s vŒ˜Yá@H¢pnuÀ;v¢|Ð&lAÌÛQ©ö,- kO@Á|Yð‰K†œàÄO™ˆÇ¸„+‰²×X<àÖÖrcº=͆î<×w~SG ÞñU5å+Ïý¡}>í\5³„CgÍÝùÙcæ9eø÷|¼llöÜçúU”$Û y¹†Bº.Ìʃ -àd9a¸f4´+1û -ó;)Ïü¶%³=û)¶d<,[LjbŠ¥ÊíwݯM·r-Ǥfͺƒ}õ¦Wm𒺇h 0’W]ß×—gámGS¼xf{Odz•O»‰#½YþÆh<àJ”4Ëá¹çãÈ6ÍúKzòxŠŽÚf@"‰ZØtZRið›<˜¶DÔWsQ:_öµF1ÛÕv±#ŽðœŒ`rIæ—ȄõGoq3ºÊq-”„|„ -‘nÿûöÛ/ 0/2tîñÎD:ÕÊÜh>â—M|ÃèëïÌÙÄh>ÈÏV9õûø‹áá+ì½”M;«'¦ÐÏQ¿éþ'«Qh¨i×…ä)‡:ž-YÚ¨P\ý,ô.D¥™½äå¤ -Ù×{“±±<¥„•cXì1zÆtÎ˲/£€9v¿k¡­ -q–ÕaäÎKÔz€Ô{a¿ó°ž;Q[_µD9åžâÑIY)\mߺߘU2ÐëLŽt¾1n-Zý¹— ŸÌiðµ×%¼Àº¡¦’X§Ì{³™Y%³2´ôØc2.aŽphç#âx/Œ´ËDäêd4ڣĻô“<Žz–, :¯ªÒýÆ î`5wøm kjôl¹Ø\ÊU Êéµ´îb«íD=Ìpâ²¢‘_ëßÎ,må%£åTý@°ó3„WéA˜8•šÑçÛàÆÙÚyb/‰ë˜Ìâ¨Ýü•ž>Шh lQbÁ+]ùü§"±bÝy–1îÇ=¿bA{%"çÎ*¾÷deì!€%áÆÞ-êŠQÆíÄj!z%1ˆ ÿacÙÎœ ä)#³`3öÊ7_>Ô²@o2J»šÇ.²v²·<%M¿ã"}ÄVºÜÐùnì¦Î…-ë†Hóý$ù­•ÃÇùÖ­9­/<ÏŽE„m-v#s™'òa>ºü•#%(‚*¶BžTÅvž(ÿå³2êÞ"Ld Ÿ>J0ZÙD!¸Ã–+TEúɼD(&ú:.Eîpu­¯x š73a¥Ü§SwæÓ˜üb}’@6“V¨å†Õ‘Q‚pÅL ÝcÕK­·²î T®Vú‘Œ&k[‘ªwS/Þ^›ß<»çMË#›ÿìÞ¡uvÜšñAšy~ˆZO:c{Y@nŒM  -З¹žχs ¥Q¶ÞSN’„´a$οbè ä™G‹ø€3ï_&Ò¿†è\›ŸÁ@·Žð­Â ?B´ÜìR·÷lñÙgq¬Î{Ýƺø•¸Hw{¹€Ý¬£a]Õ!C(ü$îuÙáÜqœûîZÐU?û442¦pšÍß´EÌä¼æ×°£ûŒ^Ö·ë: äÏuü˘˜“wé-îf Xü!Ë”q[ø6†Ñ8¯ˆq#ÝyÖ!‹g„Ç|Q"|‰ŠÛ¿f èÉ=5LgzZ'JÂÂXÎ7,hþÎIJÀKmÑ¢FÁ /3‚¯˜¡k½kR›Ož§hƒ]H¼¸a›uÜΗ:¤y&8šÅÿ]š˜`Ì[smV¢ÕŸ3jãgîxŒKvÛUL’6ü`1ñá¿õh@z%¹D”Fü²Ïß)¨sM>I@*ÒK6¾èqœß÷J‚ÖÇú;l¤‘ Îx–ïÙ;÷½dQÍó‚ÞâŠjÊܠ蜧îVEµ¸3óšk<˜—ÐÜ;Ü°{ù VÉ/Ð')jºè÷G]”.‘Ù¬äËHDn„“W'–3gÏ'R„ˆè‹Óh½³7qË•hÉœíɠÛL{xÕ6Îר…4Øæž·ˆ)Ž‹€¦*u^H[‘»Ù`°\Ú ®õÎZîA)#ò‡Âsi.YxhõFSJÑ—>¥ÅÞ«+/Ý1o“ÝMqû¤ù?Dô9dWe)e;Sļ±Ö_Š=9îR¤Á?MiQÙÒ£+J–º sÜE3]VÁxCJ6ú:èW2c’OùÑùŠK–(Ù ¶9N,œ‘•ò`¹`¦f÷=b±¾—Mf9£&—Pir(Ȇ9®j=÷ð˜2Ñ1çŒ hõÊÃû—ˆ -.C9Œï4W{2ÒâµÓùÁ LÞ˜Û;ŠC„Ó•¥0˜8’“SÛµbß3¦–¿Y®°Áé ªe;•!©•ÊEèN-†GÑ·AŽ­~ÚxöÀ‚µƒØm·¤§DQ³tâöãËÛ‰þ°ªóÒ\]ÄîqqÓ¦–IÂ+—dj £,hZÏdAݽ8üqgÞsóžùK<é£ÞjÅ],hí¹e‘lf÷³Š&ÛM w†rÂÐ'\Ì×Ëè‰1Z‰¸N«ê_•×g˜Òê.]Ã÷óɳ¼—„“ñÒË0±3}äÀªêå®ßF4v7«ã|*»´‚óÞWÅ¡#-„˜“ù¤÷e.;Ÿ - Ÿ%‘ƒµôú†[±ã¼$ó—'²ÉZ?¬òz7àF@>‘—Zž<Øoõ/ZÓÙ¼óásd~0Îþ¬úî_þ{®øí¯f ñ•r G›Ç`V»‰ÇO…MÝ•¥“ø')¢áË^2îo³õÞŠ‡cêlžu¥—ã½}MÝGÂ˹·*R^W[´!Y+îvŒì³}ñË¥Åÿ‹T›ñ¿¡ûx9|Nû|Iv—ïǯ\ÄÛmpû‚3-67®>Êmü"¼â†°Æ‰þ›ÈŽz Mßx¾ÌÏ#áÇîÜñ½ú„“’O(€žd­¨²6vE³€¼²½¼QX¹hc÷ã•}]4êÖpg*‘qãظ½LZQç .yÇ>ùnµhcG±„н@Æ=Ães»;wïEßíÏ˃›S˜ƒ4øguˆƒ#×¾ )šG- -:sP»¥÷MÊ$%~×ÛØî÷Þ‘¤f Ïóó­•}ïP‰œ ¦;³}øX -ÙÏ’ôûqFRÄ?±y3æÝùg r p½ðŠY„K \ü•‘h,ÈÏŒR¼xˆŒ_½1=„S1(L²]\€a®^œ_öʘwGAc(µ¥™käÏ OWµØ|wˆ7¢L"Í„÷ì˜Ì[ŽòwžŸHð(xo,쑹Pë毨‡†¡ú¹DîÈ:ÙIkÌK O.ñ欬¸²_™'‡jm‚ˆF|U‚xçašÜz)š6©Yóûä¡«à˜à m•Í˜æÒÓL¼2bs!2./µ¿#žNpÕ°h`¯P¥ì®˜;ÛuH/è‹#ÀÜúž% š·¡÷lÔ)T•®˜Bx¸ìêP[G75-=z¡4 ÊvfàUŒë†×r^­"T‡•‹uÎGõŸ3…-PSöç,µa$}xX:’‘£µªÐ4¡‡úóÂ[;9Îß¡û‘ŒA…º_›øÄëֿ̳h“ä¢ ;Ì1dOw1ª>µií tÒ¾xƒð×ôD&Žz÷٬㉞•)§ŽÉiªßžÅ=T#:ʆ¼VŽ{G^}ù…Åú¸g’9ª¨Ò)HVOWQ…§iTÄ´qY$]7ltVHç¾Ô”G*lɱ·µ‡4C¸^<­QˆoL»Ÿu…‡ƒÍµˆ›l×±œÕ×'™ËŠÞûo nÔ–½ïúÆrÈoµî“îô4ª:‚œœO/层½­"1WeÞ}UŠ{ávöåᘕ쎃SmŠè0+v¬«RzÍÞîH¦F4E¯¢jOŠy{QŽŽˆ9æn²—›öe¢Ï2us¢ò=SV€^…jî”qlKÎWÁ)˜®×É_g1ÄסgÛëU'–ù§S¸—¢FŽÁ)@/V1êDÌx\³¾/òLOPÈžøç7ß2U€ÇH©Q‡ßyàžÎ#ìÔ‚¤âAœIË•0æý‹“}ÿbZöùš¨;~Ü#~å"ùsã»ê";Úì< -U»Í$å<#5BÇÍ^Û‘‚ˆÛÖk]Â;áaÏp.•&û^vMj³÷ñºfÓRôœ]eú4o¤Àº|’Nhþ'XH‰ -OÔK ¬sÍb û. çU“„ÑÅÖñèj¯ik!ûAK):züõÀ:m9‚?óQvMÆðã,÷Æ'zd Ï/Á÷ïòÍe~Y‰Vnò‘êæà•8Œ5ð¾ãßèI -c X¨®×o:XœVÐç½t8§¢~¶uÙË#§œbùÉMôÀ{EÕxû5\zÖ+ÞÒ1‰ƒTGoÉr^jP¶ ܤzí²ô¸—Ž ´«4ªÙwŽøT¢ÞÁlq´):oAÝÀì~¶?£+‚Ü4bâçÚùüËg¹ÌýIR¢rtŠå…ÿ¯¬™+7w_f$‘ílUÁ8Ìvá r.áí§'*¦ z7P‘Ôž^Kõ>2??÷ç÷R¥yb|¼X½â%®d¯¡±,™iÃ6¤ÝÌœ…·Ûú6ëž}»ËO}þŸ2íݱO•YFO%èÜ)Ô¨Ú®¹ðàÄÙÚͯâ@ï ¶½.1>yxRËVòm@ÂøºãºD^ÞÍ£aÑ@$è@;%MW§â"ÁHˆ‰€GŠvËœ=^õ`IŠÿ@}1ÍxÒÉÞi©Â7tp¬€02ƒQwaO`ç5þ@m¾ -øÈ”¾Îl~Fèeè KJ{ÞÊ9èδ—õœ|®« aò"^¢Å¬»L†Õ±5—Èï4Y;ê£Zb"Aõì¯l€½#oöÌîýÜI@´*…V¨àסĊðþ&}52‘6:÷ø;Éjy‚q:k~&rû½ -Ú=Y‡W’…êWÝ™cJgÝ÷*À„…‹æ;윹Ùp÷»ãÕ9Þ%ð`ý[–‹ßšØÆÅo® ½2 “Ñ€×úbø˜¿&~œ¯oÍR²ž$d!%X<¡³|¢=§²Új…]ÙZHdGø¼Y„JPúðü7¶ò—ùó\Ìz{–·5åÛÇ^ÔI&XHöׯ8£x‚jÔuFý†À¥U‘,ƺҪdÒs…Aå\ÅUbCVà±lÞ\B•À±PÈï6s¿Ëœ•»¦$!S­ )Ü­D™ -qrôœªÒÕóñØŸšã¦ç£¾š[@¡Ñ¡„ Ho ^l*ÌpÚ@›VVèFrØÖ!¥k8FØxG•æ=Â(ïyn¢™ßu:Üièj1ŸeKËE“h«òäÃCb©1ÿÚÚ}î$r Š½Û© ü¼8–Dz­Êã;Ö9>F›Óqq,¥0ñÿ[Nh¢ú(ÏO¤‘§zfÎ ã¼ó7Å‹SÂÍÔ0Œ8Š0BqP T¶5ú)Is)žQÅ]a‚?B<Ù¿è›÷/Ûgž‰6нsª/BKËÀ€å‰B$1‰h?Y yŽoùl”›q³ùϽ+mOœÅ¬ÏTUOÉÿÑÃ’3]ƒ•JŸ1‚àH~k„ç‘qhú®µ™* E6ÓÖd‡Îƒ ¡Œ‡(νå#ê{l4ùe›C¯~øs‹qËf›¹`+=üí­˜‡Û&Xw®Ý;²ö.ÓUb¾Ìø‡ É~ß5“Ôß'ã°Âc]2Oª%©¶(¿\r·¶½=ÙêëŒ+jxûA>Šl“Kq±EY×1¨ÄˆÏ÷=CyÂûuLÀü¸0nðŸÔ+¡]%yDMѺ_yÊnŽ×X©#Ña,Š!ãrQDhr?¯‰—?…÷c´$îFÁbŸº`½KñŽ†-™×ÜÕG ™_ÂwŒ»ã6ô)àÅòK¢o<,Þ'_pSÀ( )L#: ¾`¢Óc}²ïŽh» S£Íw‰fœÄ±=u €yO•nk„/cç¾Óÿ›}ƒ¹ôȳ®ÑvàD¹x>Ôy£]é ®ZsŠ,¬Ç36ÇïõÍìonýE­Hë3pZ„:UXçP'§`[ßx–Ô'‹˜dÑF«¨ÜØò÷õ»{|/n/ϳêNcHÔG½n2–Df]ôàÉgÝË·0»È8oÈ®–s×Û ‚ÄšrìᕃûT¢ê}¤ÜU„Ïf@ñH4—è!X/74¢–ü9Eèç‰9Üðѯ@¦=Ž×õŒR%} _ô.¤Š ý -øD‰PHÂ~Ã?¶}bÈÿ!¢ö(æ@î‰[XÁË*æ - u€Pb2rHo²„|¶?‹S€®êý5ã€(%‰ïã5ãp|0Pyáo1Vcù÷ú3:1ó0D¶PõâsZØÆ ƒ¸ã˜9õ;è[ùö³5†“f•=ðßôb>Ô3ÿÂ*cDg¦0ºIaœkö'/Îe?ÿËŠúO;”<"ÜJôÓ]Öøèw…匞,­#JÍ\"Á½šlÈR¸ÖD:lç8Cô»e%c„m´ÍrEç-ûóÐw«¶²ÿ”Ó:ÃÛÃAØr–›Ù€ªÚ\sÖèøå¸Ã ó=ý2·Ûúß¡Ï;ÍY9÷–0k>6¹* åàžL§#ávú^ñÐFPä9; -Í~þùüÏ“z ˜HÆzÜH -+gmœaIÞæa–"Ô=øBúEJog%R‰ñ?k§?Êý\ó ]O7ÿ£zâW)B(œQaŠªœ‹´kÎjÌ»C¡0X+jð_'AkzOÍIˆ^ß¹¿‚‘1Ì/ÐõxÉ‘Z¦¶Q]÷*OáNÞ¹> eþ§÷‚dD̵èR Q«˜ áªX¾ Pã_”üÙ¡`ßIXµü”–ݽåÏÀaC`-k‘›r¬Çõ¯d€W(ÁOQ \„΋ûßæKktO†ñ½i%&DFÊö×<]®Z—ÆêGéNÉ”³öR}ÃÜ’¥>ó·÷WñRŸ|²ÕÉk¤×÷ŸÍ¾9hñÛiû }ýOòìòùažÿ+u÷"ÐR“:u½™wö)¥94­°zàpZ J ­/–‚¼eG_ìÚÔÓãe|“TÏõö"!Ie°Ò [YQ¦é®ûb°¹³mÇQv‘H€Þ~.™[Xljx·/Ϥ¿Óá–â±7VÄ8¼‰¯X\±îd³\Ô‹Àx†³xºÏ÷øyÌM}øâ÷ÂÉT!†3|ÌŸÂÉ ©›øÙ`ŒôGp‚¶Ÿ˜ßýË7öº?òÑåšs.0‘1*5h¥>û¾¤,«t@¿ÉúLÁSc?ûù>É^.eï~“z;³žàà3§óÌܤ»Ú3³†+ˆ7ÿœ÷XöËÖ_QõT%jKID„£¦›w£O‰ÒWLŽtó[‚wa´ýøKïþe ³u+SÖ’‚LÄZ½ U.Ÿôùó“׎#o‰{…|\%‡òÍòUG­ygk[ßQ…Ô˜ª¯ùÄîþô“9v¿Vª/ðͶ]¤«Y–šS ‘û/ci¦Æµ“Dô6ßÕÍÞA½³•ÙøŽñ%‹ *Ç › š5W¹nuÉü~ð†ß)_Å`…P©V|ÕGƒéœ*`¾Îâñî°—pXrwÖÐÁÒ·Îoµ¤Þi*åJÅ=ÿ ãz®ýkùš«cø!$Žê•T+AµÈÙó‡#ðûaôQ¶ý§è¹:f…”2žQ|Ç$±B|¹Ð²mCï:©ý%¹¯X.Ô‰ˆ22g5˜’+}WÓ!B4Þª³Ôd‹ sMnôGòÉÕ(ÑÏ™ ßspžû ŠqFP=/z.ßÅÜžk¥và€kî* 8ç¸g°ÇÇêçÕ÷:ˆøñc‘Î/v–_ƼþŠF O=‹Æy„ÙWÄóG¸gøF‹R÷Õå‹·ÕçkyÅÍe³CV•5b¸üìrŸx.Ì«%4ÐDÑöÁ5½ùÇLï L¢l}îræIR$uÄÍbOmä¶%r~ÏqvV}³Ôʲ‘ù&5j -²‘¢n€¥ù˜ïReùéÓˆu¸U'ž ÁÀ`ü&+áô#ç£"¨À·ô×ù¾‚­¶y?î`íÞÒ8Ó?üW”gíbmÓéYíÁjrþ6¦²=n«ßü•Ì8Ÿ§b¢Ê„] yÝÞz}c"¾ycÍ; “—ðÿ@>ÍÏneÄeØò2Ž#îÌ•OâIgE¹<s³:8ÂqÍSP÷&>Iý3oŽCqt»ïa–A|òÈ>ÞX‹£3‹LÉÜÊ+ßÒl™N=q¨Yƒ=cN¢|1#ÅÑùi8«;ù#ø GgìñÐAæõØâ}õÄ·â 2ðÂÿš)F±».Rûå÷(í‹Î'¾ºƒqóE¡8û¨"'r È®æ}žKu°B@ÛŽ]RH›jb—#Tk›¬‹Ê„Wò¹¾tY\Þzù;síjÝ閧Õóñâj¦^Ôaÿý•«ÅèBI÷䣄GÌ»6—ùUC&.–³ª'0÷€}þ¦Ãýˆ¬6?N¶ÓÇcÿ2øóÙéL×FYd\I9×Î2ÿÊGq±|¸Ž…_á´Û©g 4™7Ð?̇]P…G0øÖ]…l:Üci]ÍÅÇnÙOZ©{‚öÙ±Y½-ð§”J\6•’HU&”.aÕ'÷ŠàcÙ€¹IåÁ ¦ÀÝí‡}]R„r㘇–¹ :~^yä"B suÉãà2´;*{à¡ÔÐ/#êÎÇùó -|wƒ&d“Zûù%åÕF~·ÃçˆñÄ Ê’œÙ¨´çùÕÜü`rŸøÏŸøyÿ‰ ÇÍÌfgÀ-ûñ<^';`2QÖ[tÿmÆÎ%­gYw_•…ë -“¬D ÕGè«„£,¡TtÛ‹ÿ•l}Ô7„?ÊúzV®G†ÇÅá”ã¿bËø,Ÿ+J/Ã|«ÐÀ+´õú¨ÝùŠÑRt£-7ahÒ,?ÊJýfãÀÂ÷w Àˆ; Wï8°šÍÓd¾ý™ lÑ"âÌqÝ£ÃØëhµ½e4ÓË8­ÿCß¹öŸ:žSÖ^N—3qîk÷ú.éUïxêõ\°ãÖ|O^yw|m¢ZŸ4Ü‚ #¸¢¾¤©Í—Äg±%¾}̯å˾ҊûˆÔ÷}¼±&^R"({fåS;1yb,Æ+rÎûA°¼G‡õ±%ÌËÇ1˯íËÿÍ¿|MŠ½GԱ齼֒0½]©Džy›»HO)¬Ïòu÷fo¬ŽuϲÜÚÅf¿¶A#;ðfsꧥo¢Ú—q,Þ[Ww^b‰})ÈvÞ%üÌ‘tçz*Yxžéå'YËöÅsÏx"o5u~þJú2[ϽWZÃ<É•àú °vÔvÙ;ÎÝh$“NC4êë^ûò@‹"ZÙ; Ûš¿ )§/­P &{:µE…hz•G¡Ãf"žÍ2¼¥Ë[Ú†Ž5åp”<˜ˆ¸øÎâc§¥ãév”ðë\‹o= šæFʯŽÛìòL¦nWø3;›dž¿cƒ·½>0‰èöó=«D® ¦,,=¢Å³Eûœ!1ÝøR6‚¹9ïOíïD€îÍ xýÈC¾W‚ì²w3@º¶gfú?-¾Å ´ó©ÈtiO•Ùóäz)¡(óºóÉ[hÌ‹Aø…/øõ_>aIjž-xá-è~a1Œß<‹q“€Yól¶á‹òXŽH[çzè ú¯OB¿hÊõŸ/5±‡ST¸¢A{‘Ò.FÍÌ!ïJXúóA‰Ao¨j9æÓ:јùoÿ2y‡öPmS:3|sÉ<<›Z¥ß¯ävgîuXªO}ÒàI¥4ÖYœ#ö–‘Üî¼zÞÃçÕm„ÇHwþy ù¨ôàn#‡½Å9çÜ Ú¨Š— ,Ce¿²$Ø>WuÏóïð[:¯8i?ϪâP À\> ¹$YS1U]5%#~GLGÖGNÜêãr©V‘\]¨g=~¼‡qx…¤†F•Å¡2yJ»E…¡vz- äØ Üë%ðÕÀ_Vr!'3•±/FGŽí±.¹£ˆeq~¼±ö~ÛJ6ìÏQgFMu»àëNOIY€ØÁ:3"ù¸ã -uTÃeâö߶ƒW]ó߇ãÕÁþÐîýª‹â¸"Õ¶÷ âÌýbpØTï €„Û©?eÆÑ Fþou°C~îMì…zž2íã…ÙÃìv§Ã¿HCXî†óf_™ˆ‹¸zfÝ£~Ò–ž22Ãc"8"R#˜ÊE*6¶7wââ®Ä±í™7l}Á¹ Èf’¯üÖNHcT:­š ¿Ý&wÇrÄ@K€k¨ c3‚‹æÉ«+y$!zí¹’­ÀLŽ]#¨XÃÔCWq?ÅÜÓÈÕ÷1ƒ™-³ý÷cpÈb -„j¯KlìW²o³CBĬŠ8íÜë"õ ö)æks¿¿HiŽÒ“0*µÞßà ²ÀÁ_XµŒš‹È1–´0óaÜ5HŒÊQ½Å}þÐ -?e-@ð¡ùæ_¾q4ý¹( Û{NøÚº㙠ò£Q;êPŽl#z³wÏÙSú£-NœÊ*x6(¹÷?\|´‰m„„§þXáœñ@ÏAGF wüŠÌ9Q~vA_=)Mï!³ò4°ó¬djžH ×ãîu¤zÀ,¼Ø ¯?ƒÌ°;ÑYÞ+Hã2 ÄXN1½²‹¡­þå$èÈÙ¶ÒcCtÖPÆ5)Ê?×Îþå3Ñzû:šÕÛ ®¸(½òx¢¿ØÈ—ÏÕͲ‰Æ>Û‰™ÏgO`ê'vÀ£"Áy¸Ìgw’«%™€ªá¦bøö»Ni¤v¥už¸ã³š— ö*æM˜ènŽ}”ØýÍÿµÂÎæj¦è~£kÌ3“ë¾j?®épöQÑþ%DÙžqÕZq’¤J°‰¿wžÆNk²2ø ÉÆŸ^ô|ÒP«¶œˆ‘|Ž2ô?~Ç-I¬‰9Û¨ÖZt…Ršn,QOÙ;Ÿ2Ððw,¾4N£ŸŸÔáØøøm9¼Í¡vá8;åG/ué‘*#|ñ‚1ôEwãèÀ.'ä‘þÙàë?|êàRžþXöüJÙ}#\5h[ÀÊ–”}J­CZtqìòUa™|„qø¬bÎð “äì¯bŽ—€“áÞWÝØmÓÉ_oµX|¨GÍ'eãAEhf|­+$^‰T¿«@ÕfdµeÜ)5wSqä’WÊq®É†ð[‚{fñÄsɬÃç½Ä¶PÖßÔ8NcÍÔ î’ƒ¥óMåª}ß &]î¾O\"‘Ûƒ¶ÅRè¥<¤Ø(Qn×à÷…’ñ”‰Ô_è"Õ¢q@²#þ]Ãd°ÁÅ”XÊÝÂãbÕ>_Z•ÀïÞºËG}x`ô7À‰D±ýýµÔ×Ëó×E1ñÃ!X¹ÈvbÃ0X‰œó.õÝEëËÚŠKþ…^ü^Õ<Ž[*®ëìœGÆ…ä9¸ˆ{WžÒê0¢1ÖHc·In…øòçµéÛ——â›ùsêOò±û_¹èÖÚ4Ä#5_ªKêìY]äM.û§Œ }¯¶AQ/÷›‰z ²©*’®zÒ0µ7ù/jYE« -Eò]Éaˆ?úÃÍþ`WDLótS+ô¸Á˜|ÒW݈p›¾LÔ{­ùëwi‰Ø»½#Ó‘-J{÷YT®ù}wÌqÊÈáš aÕã~5Pوφj¾tJù¤§$˜\O†,ÎU‰C<¿©’Œy:h­æ#8É”|®5Îi©Ž¤‰äa&qJCÏ8·¨í‰ËÉ\0¹ÎªŸ÷¦9™Ò;ðJÙât¶[Žy&àéäZš6ãóŸPè;:µpŸä]Ð ñΖ'ˆ³ˆ§äg\ô–7Ô«Wç<;X‹àdÒvQRY1/–ú\…9}zá ¸íBO©ðfß³Ô¿,¨Ï´ų@òzS0À&paÅÇDtŃƒ0 ½8›ü\¿ÿÿ©ÎÁIÚ†‰\Ž®^&ZÚsÔ“Q›ÝŒ)O¸pOtw¡˜Üu˜…V½ÐÐù@·pmþƒ†R¤êZ×G©<ˆÛÐKê"F8gH¾Á´´Çªfáûr槗mñÆ9Ÿe~ßÀ­`'ªEnUvD¬Ä3Î]g[‡×ífØxeÀÔÃËa6þœá|ó¯Aa~ìÛçVAnŒ58ÏÙYĶê¸Düˆ2 3È¿ß­dKo±…IÚo~,0‚K€8Úß`„±Îýùb‹PÞt(5¼RøqÇ9°ŸZ*8²³k¼Þð¼?³k4Ð}“ƒÓ¬>rìíõgLJ÷äžÙÛ®J/•Ÿ3”È‘‹4ÿê‘r?ÛI2>{¥œ¡Š˜…š÷Ü£¾v'jP yto…WüW{XáØŸ³·îO¡¾qÙ®ã†çÌ7†”¡odaQôY™3¯¸Ép|_."3Ðò™¤Ñß]r»” #Y_.)á[hÌø¿ÉÈ’òåÛÙñã9Ã=õu£/ú; -|zµ+rÃ^¶48%1üG0¼pfy^¢œIµTžµµôUäÁ¤‰Ã„ |*æ½ÿ!ÿlý*am?+‰”z'(€1S„üs¥Ô(skKj0¿§më ·f“1Z@¶® -Ô ð=p„b}xzÎ碚*sWŒ;VZóEyE'sDèÀbô¨O¡;6Œ«" Œæ‘Gjü~M‹î„ŽõÅ c|õ´™?8©û¹)óE'4R8ÇQ(içN|qf¹ålܘºf\w¼Ñr-ìëÈg5Ð͇ô|>çW}.;¦Hâ¯ËÃUñ‹º^êƒóÄT‡_ÁVâq¶H ÊÙixd섧&Ø2ƒ{‚¢inë£öíë›mâãïE±Ü!ã ‹¶6u{oŸÒVGr'4–J^õƒ0à‰¡Îsý%õþ3ù`ÿZƺõÞñ¾¶;Ç0{q©3r1:9ý¯tb—P0v<Ô\TD2V±ìñ"¼ŽôpÇhÈ%+Ió•„Ös¶@üý´Gž³„pC˜‚y\tl”Ê9h¹(!d,•ÔSù¨‘¾Ê]‘9o®ÑL]¸6·õ|à¸ÈlóÝȺS(e`«NyÜúC<ðö, .!9³5¬—âÄñnÔ8ùàPn6`xó}Ñæ ’AÌM},‘-fW=Á LÁc¾ƒ ½JÂrdh¼ŸÙ!ÈsFüB+=ÌðæÚLH7âU¹{d z‡­4úΡƒÌwæAC 7ê£F¶f#sq2…»”}û•¿ fÎÕ "83@w -4Œ{A¡|¾þ .m¼KÛs­¾ &ç´øOÛt¦ÛÇ?Õ6!‚$É€½Ü«ýšE”Á»C±”›ó{$/«.‰k†WVCBípÇ’Èvÿ¦/C‚L±Ë†ylþÎ-¯ï£#·‡!”ìgò¾ Á{±æC=FÈ; -9Hèu4ªóþ—ï/ƒõiçl_¿ -Zy¦x» ¾™/ÑþÄä–<·o.L{“Ë{lÚÇúECôWÃÀï;Ō¼8/ñž‚ÁÉ!zg1êïÌ›sês¯ÖôŽ½!vˆiÅÛ}'÷Ô§ ^î$arÁܲT ™”·Ì5û½È£ÞInrñ·À\ù¬ëo_h_-“ËÉ{”ÈoÓzìˆ*—‚ß ’óm¬¦éX‡FPúAèÀÁÖv#x>rË —#šI" |¬_Êk¹Â«åðÇFT±è;‰ONÑTÞo[ /X~‚Ê“;Ž·wÜà£Eò…ïļ%¦oÜô¥½¼îâºKàJ6k¤ðF}ž,*C½z¹Ç-†Iñ"¯±ÝýÒ­ -Ý–Õ/í~^£ˆDã&¯ýÈ8j?³²¯©G¸7Dr) 3Nœ£˜ XäÆwä£ziäïºG$Fsq Óœg’-7³'X² ¹g p¥6ÎÉȈ)ÓÑ"yd;[ã)=»t’¸ðyÞݼÖ,j[ п/‰ÿóºŸ÷ŒŠ,EP•ÒqV:ÔßÄ<_A¢ÐIoi™ØÁ̈æâ–jÿ/;!aîX ÉKVø½˜†AfæëÎÙ)§#³ç# -ú,ú-£¬¢¢eá”"$´Ñ8ã³RÌG­Mƒ­lè·4».¯„gÓuHs<‹}·ÝYï¦ÖºcŽ·& -*žÂ€Ð1Mwãæ¸ê±~=ðÚ±–ÊâÄ/÷»Ýg5ô-ô9éÃf,>†M2N³¶‹Dp éÄ!I¯‡÷ÁJå„*ÎÓÞ«?ù hÔ­›€}U³t$= ¤Ø¶8~?) - ľ㽃ÄÚMÂ0:æ-F@j¯H¸Ëï:”_è’øSKUݦf ØÏÒfáüQ$!ôó5q¤ÀH5?Ì…Ö2NaFÏÄ7Žòlã’¾Îè§]©8Ÿªü[x%Ú©^6[yhÿâm_n7â |íûkñÞr»ûË5ç`ãÄ´p\ëïpŽá#R1ÝŠzm£z²¤ÜŠŠã–ìµàmÜvð‚êî3ÆÐ?í•bN<â ‹ -£&¢¦cý(Úk“Fa}ËÔbþç‚Åã€dþ{Û°j#ÛZÍ/ó»h™·XiŸ‹ä60o\„ yxgÂ4¯¢>…›À—œù…Ø߬øý¥xºìÌH­,ý8dœò¿;K—óµÏØ7¾±-‡Ó¸AM«¼Ïyÿâú çÔMw‰ÔG¡F¯øqúµ [€õAr9ó?Wáes•ß_F†ËgOh-7€½¿ÞY†fÞ€›x~1BêÚ6xw›{ß®eè¼T‡c«ð¥æü #>qD´ ÂĶVìJ,+‡pù¡ü°Ðÿs.+’_QyuÄž„M"W÷c…%?ðh¯ -K09Õ).Çó·˜óÿnÎÏo®Y1ç?¯ÅþØ’M‘¦ Ô?‡ #Ïê¹î«<… Sþ<¡ßË}$̶?¯šá„mIO®šá¡—D€•ð1UP…¦N%4JÔà-¤4#AõÃaQ¸!SÇHßÁÈÙ/ãópeºÛóGcì!c"SQÛ7†„ÓÍólݺwS¼£øñ„Ï*-XžÄ]ÔÈö½Ú¤,F‘œ¶D—Ö®‹p|³Vî|Yò$«òhµeCø³»Ú7ÿòÕc´ìLØ~5•sqI„wdç ŒMå°ß\‹“òàäÿ;+Æ˸GnIYŠmv›ù -"Éñ¹xÊßìH òú$k™k O0Ƹ™¢-žÏ²ç‰›¤¦Îûd”¥†‡¸Å7›M,t$Žd̃!›à6£’ÍõÑn̓»ÜžäP²æ6¢^»Ä™yËBZÙP2xHçóÚñ`H~λ˜wD?J#ú¬]ÏäŒ\¥ŒÃ“Ï7æ¡<¨ï*4<|Ñ©QwºÃÏ™y•ãe©e$“vµ×osý}RëÜæLƒOÅòÔéJ¡·á2_Å°å|†_Êš`ÞÌ,âÌ)m˜›ù[j!Ããoü`~‚’åÁzŒmÌÅòÂØĽ¨úÅ2fÉn†ªaÌ%âJˆ¾¯¿áã 2ÉŽ«h®!‘†°ó¬¿s‹ÇÜ3¤ê1ºU-qê¡Å¨+œRÔ“\ЪTGÿ§`¼3;¿m+¶ß¿ -Ç$‹‹¯Y“rÎ ó 3,¿Êð6ãßa¿`ßW9†X9ð÷=N[ Xµ¡P\ µy•d džßÜ€"ì-¯õº£lúììž`6D©'­_d>>‹Ù÷ú !$MÕ°ŠÕÆ.ºQ³—?S&FZªúE €iãÆë¼Âzˆì˜-k?°&ø´nû¨O -‡{îEÇ<7Žå, Wœˆ=áKdy=±Φ1œå#ãé-±#ߎTÈÚ¼„èHôÏxÖ°*è˜ðÇò¯™Õ$4'Šâ ->< ~:w…gµ\w,GCÙ®KMXð…×~*…)†­Û³àáÀ›ö“Ö×cŠë"|ƒÓ#,CE{$—t=íÓƒ–˜3ÈŸWÞïr‰§ 0:}ciј›—U*ÀÜ}Ä\¬{æÓü¦9TO±˜øì–§8ú¼3~šv¤}ùqú´ õ4s7Ñɹ^vcœ8Ž™©CSi·v \¤ÙÙYöŒ{Ô\â7÷×HíŽTð)j£?c!=̆zFjÎ)‰>Gȧ{Üò ¦íCGúj¡±"dØ’Éð|g¤öÂ=¶-˜©¦ëÞŸÚñíÜ‹òÎJÈá–ˆC2‡Vs7Ü9+ñîÝö™Pð™ŸÍéÇ~äWv°ƒÃ°’ò)õ).™Q‰OúÂçdñs¾dš;8õNÈ}¯.‹ v;%+êÜò„qÔ48¹Ç¶lŽA~ð±N†ÐÊÍœ½’æµÌ8#Fó`»=ñ&Bd‹§ç]˜yFPµš\¯†.ÓxÛ^¸,5BëQ!/3J¨£×ääægį&urí,átqԬǜBY|kƒÂ•3©|Ö'±¦à;-Ž­6º¤£`^´RªxOOs%6﵎Å\ptu ð«ÂIgéÀ4$õ½ªLÒÖkQ’ŒÍ¿r$·/™¦(gÌCgâ5lsW1æÚÆœk!—Èê4¯_]Â󽋽¯”4£µÛô¥¾ˆe ‰Pß ¼÷œÆ÷‹R!–q'?ÞX/Š\‹ïâf]æçS!zOÙA˜•´'¿,35Á“÷)y¾¸çm_–òçs( ×”Ãk™¸¿ÚæÕsD‚&kçæF?P f4rÑ“t ¦×³‘ù!§r+ZÄ@[5£%1ÓÆÐÓJV^ áx;³œ¶àƒÿ›åt:/"ö^cO· IǶ[ÞÍ}DCjG]"ÞQ8e\¥t(ã¥ó(Fìüf7ÞûG¶Êï+R±@”5 0™$rØŽüîÒ­)þs ·l¹ÀöóãçÛ÷×ÿiÍ{Æ[\ÿd¯åÅA`Œ <ÖÆcûôÀ“>ß¾4+$ž¤ß|‹Ç?@mô(ìtŸµþöF­ÈMÝM£¼¡4Éû–üT/µAUÞ.¾?ò#z9¦ÎßÈD™Qåáì_V*.rg¹y̯LL†ó(¼Š?Æãµ¥°näÔ‡Oúy¿å¿Óßô;jž4ÑÌÅÒ¬í÷À)Æl0EŽ@Ýì¯)ž¡ÈG)¿Øù}ó/ßäÓÿŒ+üŠ+èܼ4FšâçU•›Ìo¹Çétž -ó}4<‰\.aDÓC5Þ_`ÉAØ›Gÿ´D0€g¡mk/Žyß“¾›l œ¨C0«}5—6úd_²2“¨ÔEÓ^%–Ò}¾¤§–íZ5o nP1ÿšO°åc®t×BË,…³˜+ðHO¼ä·p™åo)G/³—cÉ•¶Å¼À[lQu™ïG[ŸtÁ÷›1Kú“¡í|.q¨( ct\ðæ. »–3‰(õGíg÷H€¶‰K˜ ƯEpõk“aõ &{ªí˜U‡‘r+ Ý!—"üý¨`ëùw$iløÜRW††ÙA³¥K¬+6a‰ôÜ_c/Æjü¹¨{}ãÙ†Á-àß:722ݸ¼un9sjYNÞ '~°vl‹Š%–D¯ˆ5 -èñ 4îëöü6>= 2—`ÍîüX&¹Nƨ`öiyÁ/°²ô†`³{¬¥›šß+Tì;šÎÍMHÌë+“ \=žŸ<®ˆ­¯à‰€äŒ!µHïðjç÷8ã¹b®>?ŠlceÏß;JNF%ñu®UÌbo$ó$óÒ¹GˆM1ħ_›½Ð¿ûì!ÏfpKwäN hÅšóÌòsQE)?*ÄÂx@£Lîé -‚‘óê/ãéabgc6?>ÞØÑ_¡YsÛ“úÁ5%u@X~ÕpXb*ƒ;qš>Ákà¥ú4³·í»øí?&wiŒ¿™®ÿø:þÊK¢(çØ”vˆã<9X™>1©ã’ì1BEFaçk‰&£1V©¹~u¼dÚöÖ$aúãkü¡à/:®µû^Ø”«v˜´ó `Qº`L‡æ³Ô‚ûv¬.¡1áø]™ë~¸Þ|öÕå_d,µ BŽ@Ä–ŠUj¦ «u«¢-ÌõÆÜ™©+¯å•ûPž#xÌçUw¸Å[ºA控<æçÞaOš¯È¶BLo?æóÆ®™©¡ °øÙttan>†Åï]K¹óò=RýÏ~M ¿an¾Ãåüé÷×Ì·lÞx -“»VƈåÍ|i ½P·Í)§býºÆ95lˆûÍgªêï1|ù†µcÁöYâé§1gWž»±„?×V󄎼ác ¦ç#Y@k‹¥jµ¼Øc{œL!÷ŒSG=Õ]8¶ÀNùÊ6d -(áDkùðfn–ó«ÅBº<εg¼“X„?¯hËùˆ F°ýP»ùÅÅó8kPZkp¼û|]ÁÐÄ`ûòÆ2­‘³¤Ñ}ü¾l7#f份ñ¹ÌZe™E¾ÿjJõuý%âëVÎ&®z$Èóã΀]*gÄLœûlmX£ñ@¹ìׇ¾ák↠ólúÇïÈ,0BZ—8\"m K¯G…ÍR -™Obu°£Ä|,»Þ'rxã†!×u)L7Þè{Ìx9ùËZM—?IŽÂ Ë%1ˆôÑ[]BFpÄt e¡Êˆ'ØW=÷åôPUÀ:î|åñ€s‰?œKí`, g›yóM·¸­nÜ‚\ÂYJ²âz®nà.AÅ™½ÕÝ9þIát…Øïï Š‹Š&º×ݙ﹦1þ1/ £¢'80Б!ù“y -S>êVU*Ô™oöºÈñ}öNÎ7Ý%Ö.ªÆóÜõ¬fízÒD’k¹ÂeÞNòî§®È’Ó H[õI="›ü‘Ø´¹Û—=ÀHvVÇƳ7bÖßA´ŸÕ1¸ñÈÏëo½¼o,’´‚É>Ù¥#&îy¢NV†¨·\‡çYÒ÷½àøF÷õšå7þà -úU‹5Éíª>¼V"e=7ES}XÌcS×ZÏT= ›×{¡NUܯ÷¢çݯ´2<ª…~[àë“" ùÛ†Q{ì¡bOja±/cã›"¢¥œ¨ì-³Äc­f!sœ&«#€ øD'–/Ãß”[0'ˆóÑq…}5Yx˜[r¯4.Ü£n©w–Çü°]“Ý9ì™Ù½c15_š3¦½.¢{Ÿ›Û¢ ¹ä†ôxR¸$ÑÊ[fƒW.1ÍÙ嫶ª}i|¯8Š†% Ýv®-ŠÀ»>‰³iî ^¢ûK&z¤"uÅÀg†‰é{ÊÿRòoíª÷.‘©8,}«éD ƒåäI3u bO_÷j0»¶×*±V>A¿³²* -©³ñ´¼B=*%Žy$ŸÕtDq‹šýÐÆiÀÑ”.yôû -^«;ïQôÀ3ìa•î«ªåKÖÒ2ÔG•ÇüÍqË0»¤V×ö$© ‡PgQU[`@gâp{þ_0P‹œ`ÁPÇÍf]Å(k¹@’^«P…_‰t¬¸–˜nûE›çú;¨ðû)½mbþ#Á?W™ñqâí»ýF¶þ)è”ólýüD4ˆ¨Š¿3pýÌ3ù#£ÅðçóæWêü}µÏ‹2¨> ‡$-}¬ÓÏ´©wl?$„ZLë«‹ßgèŽÕz•‰CǾDˆòLéu‘/kºIVVÇͬ'íNG -ôlpŒø4 QÇ;ghYƒþ©»%ŽHƒ~¯” -vk lM8ï/xògšŸŠâ³¦eÈ:>K@ÚÊèõ€iì ¼´óÁ#“íà$"”ZáógÌ+vH+‡òù^[…»õµÞi“iB¶+&æƒÿƒØОìªÁooEouÑ|Ñ·°žå©@|—å½ù¨–9$ióÝÂñA16’bgñÊsj"ž\ -šrN­Ø±æØ#\Ú8|$„¯³rȳ½›ÏnÞ -ª3q0aEc!%L2î|/F“4`û2Ç^í(ÜcxŠD?K¶XÝæïѺȘ‘ô¤1T¬Ç°'ò¬•-¦«©äü%ôϪVÁPM¸r†ñèAŽ=Ù½­S"ñ}n0–Z>ãËÀé›ù:‚Š÷ N>Œ#7à®àŽ-ÅìYN `šûÒ¼e×µ"›áø˜°ñŒŸë‘Ö›c{ò17³Çkôä<ÍŸ7´Óå‰s…^_1¹ -DN®ì«-#x®yðo]¢Ù$nÞÚ+ÞÏßk}Ï$GG9G}az5LˆÓ¸S)pbx¹õ•0½ÍxžÖy%G“ùÕY— .l¼Ö -Ï‹[–jA3vVn6Ó ï¢Ð>¬{:<ÎÜsƒŠûíý“‡³ëó¯a]€1¢f¬¿B"‹£bnöQ.aƒÍh ×e¢¨”ýñÝb²ÆŸÄK‚BXO<©#ê+l 7Åk«I7ÿ®E^AX°å]6Õ…—šŽÓ«Wê ®øf¯ûi÷ËbýË-x˜(d—þUžb=‰;[ €œƒÙ]Öd#¾Iæ÷ºo¼Bþ»ï=Ö“7!%§êöÿgîN®%ɱlÁRà<(áKDH3NŠŸ¦PÜö¹PË°§æa¯þäg®d¼€‰Jƒæ6§©òš-rð×yÛ9C+_µv™ÜÎDT ˜¹×©„Ÿ1‹gPÕÊ+•!ŽŽ{?¸cµñƒ|¯²§`Üv¸ißÄΨ„¬U|†Œ(ÖpÂ>WÌ$s­í†ÜÀOžJ_qœ¡=”îN‘úfiGFZ/–› Ù÷'ƒVE¿=؈þÏ“õ¾ÔÚîzNKg,…ÚÀôâQ®86é!ÑïÂáÓÚ¼k»û ¾¿½÷¯„ -»3ØíL—ù~æ©’02ƒ B¡thˆSRAS[{vt'oÙºolvxWRd×Zó– ¾“Jr‰<;åLÏy/‹-ôi‰‹Ü7*íú|@ªíǽðáïEGq‰Í Dâ€{_7¹ûÌ0Í w³S»%wä©O ½:ß5òLjãóŒÒÝ’íãÞ¨~UÂÍnï¦D »Ÿ ¶–õá*N} ±CßUÞEºÇ÷6ðÌ~m‘ÙèèvfOZò3’™g>©5ø[QË:,rç‹Lï‡ïý%gŠõÌoý‡£‚êVº±G êp²¹àm¾.2±¤æi;ÖZ!J|ˆþßÝ”‚0GLê:óû¢ï E[„ÄUG„²s@Í“ˆÀ$ÿf´”nX<û*°£Þqòåª4DÅk/p›¼ Ø5ževÞÄ$ÿŇÕfCF¶´§ªóWÅ4„í-ëâŽúkô¦Ìÿý‡÷Ã{e~Çñ<©¨Sï9:¹tLË rw@q×3gb)çkìeÛl$ïoúmg=<àuÎa^¿µm†°ÁE`ôž—R}²z88á´—ßËu?¾sûTãbÙwG¯Pä\˜> »¶åþßÎÖoŠ.ÝœÐ.ÂJ(ÆCc¼‘U>ÎP½#eâÅêt©Ö³À㺠žòDä‰Úa~ûû³êîZÇ UÜ -¶Þãq}’¤~<Ü7“¥“¥A YzÁ°~ã¡VÈI ©ºŽ”Žþ îÑåC’SJ¹«ˆÑó\;ÓEN~X__•®/b†?„ -aÖqÉî%M Â3tú·ìøtÿ® û“YöuV^ãü®àû –Á‹3V -ÖÞµMî„îºâIü -Å#ÿ‚dÑ(¹ƒ°‚ô®Û±(g,UÜu0n PÛ¹m婤ï#žp•¢rs”bÌ"ÌÇg¾LÚVO*;Z×B€"ÎѺыÎÖJ”î‚ ªºr-š'Llæ`É£§dm H‚:7\Ž¿åPû}Òºy-ñx-JÜ‘ˆkCëåù{K^¦CåeO‚ýûOµ ©ÆåH:­MÙ³ÇÈYJ˜|sDóï%LñÜ6ÎÔí -"ðK;Õ_¾¶\Çó)Bòû—ÿwÍt‹5foU–àöô Ë\1'Êgvˆen_Œ ¸m@uj`fBqü‰Eâ{B©ß°TݺÂ|ÖlëlæÖ/ÑÒêQgÆŠ’Á%3€œ¤°3È(gÔ+È͇Bò›vÿ€Åä,/Ò'¦i:Kµ»F(§üãžèøC¹sÁ={ñ+OÚDÜÈ\~kê.3igK–u½­ÆØŸ‹‡p©ÙõT•2Ðkr´wì~aFòۿ̯Üàt›©£D–¥¡BÒòu=ó•›]p‹š -jäSƒôOpÑ`_ šIåÀÈ|QtŒa<ÿ°cá0vÍlŠ,r›Xä¨D{ÎâÓ± ZœADîG%aÀßñêqÀ!ÀГÕu€€Ú±Út"Xeõ¶ªÛU§öåFÒý›b.&Z š= Û›šÆ«=ž»“vÊÿ Ûo^ãN3~nA¥GµÕEÂc->sè¿þ˜—ø3ñ1s¯8&6Î?Г¶\zò3œÖŸ@ÇXÊECª[¡ó}Î!&ÆÐçÐS†Üñý1`S˜?¥:ÌŽµ“ŠË‡ÚUftÑSæu@óaø¼ù©=¼Dôº1Ï£ Ío?¬‹n²ê‘r¿Öè¼> érgX髱ΠÔuø7¢ì<è4¨ë]åˆãÝÜgºr (ë¦0Ÿ|MÆG=òñTËšBo_ƒLOÒ”Ðøÿý‡›6uÎ$Tƒ°@aÜç–Þ#sáHÖ?Ì£kÕ6¢N6Þ‘—C>F7î»Éˆ0è^å{žqtÔð½"/Ô -Ýáð{î&¡é|§§3;_ 6™ê­¿5æ$¼`s'ç¨zFÐo$J%Kîy9(å°m´ÜM+OÎ'Fß!z%FLåê¸ê:;Ò;ÎãäϘ\܇Þ9DêàÒ8öÁ$Ì!Aî9“:ÌïCfÙJwØš™Á׬ùŠ1Mµ[ÓöÐê~ðÃX…Îw>'áÂtôÁ‡˜“–ÚxÛb´­ ãñ3‘ïPO©kê'õþÈöpèú«lÚƒƒì´E2I#æ$£ˆÔï*Îyê‘ÏÍ+tÆÇs' s3s‹ìÓœ$ì÷¯µßœ ©.ÌãÙ3Ñž:YŶ»¶ãHÙíÆÂÍ‚ÉŒBàŒ¿H†Ì“ËT¯,£Ÿ¡tÏ©¤9q¬’x Š"’Z¶Æ -;Iÿ»6¿'Ná:®B]ö$¢Y˜B6©5*_w„Qœ„Oö6¦²|ÎøW„yØ(@3çC1£$qÓŸÙ(±ò->áÈö=Ž‹ñYšüÉOͧOÌèç±ue¿¶ËëÕáÿÕ 9K´”y†Ïœfîû¯P4’$5bЭˆ¶{ŽHÎ>O\¨qÔô¡87–éMNªœ.Äå²^æ Æ¸›²dw™¦Ï¹8Of@ïffNt´À®Æ¢¦pmi[ÆÅ=ò‡ö¨‚MÓYe§ÂÔïlüm7÷¢Ã‘Ë *I;mF\ ùˆ6°Líõƒõ`/8©ž–ÖÏ<à.m§EPûÚéViVÉêåôóJ -ìÿIäùµçÃ¥¸…,þ#û<5çnÓéÎ18Ì7;§J#Ë'±F·~°çpf´žç=l¢Ö–5ÐT?ažÓ#zê•Ð€ÐÊËg^J^r.HsAÜca<1N5HâpÇïÈlñŽmn`75¶íŽ6gj,†ðÆ<"/ÙÖ¢müÍo«¯~i×ÔÀ;ß³ŽØ$-”K·,7ÓUXÉX¾g7JžÈ_|êQî'¨—®!‘ëœçbòÁIä5½$9âÐyž#—A7¥¯l[F\äZ!†¯#³;<‘=Q²¡Æ¼'†?™º¤jµ|¢ð½× àrÓóÓ™Ý:GÔ¦‚œoíL3oîµ_ˆÃ†ž•Í1ƒ9µß'¸ÄFvÙ} Qo†ªwâr™sNK‚n¶˜Ž4@1ÌwBqæ\á³{¦Åyã{°=?¥ÜG3Ìp?sPz/›åç§øìÊ=ònÇ Rƒ¼¤ç™bˆâ#c]p=C†n. îÕ‘Ÿò*l¡ùs?ãÒ¾²˜Ü2ó¦3æê3sñ൑wõ¤O~ -œ–~ðx}#£d èõTŠÍØ—¤Ýª–ªôL³ÉDç¿&ཹ-T³!?5?µ]1÷CângòŽ#qÎêÐ|ƒ,¶i¯ÌAÐqT0y<Ú¢-9R€¦’z®m’`•ÀœmâÎäôJ4|É™õL`wEïBF’Жæ*þ˜ÖÌíz¬2áGj- ;;d’XXÙŒ@üyö*5m1Ò»^CÊ ¹áQú(¾’äiG¾bý–\#J-òÿ+{á6qè©{D¿e(»Žu+R H©öÊ,ÏQfÜïA*£ñQìBOéÑ7 -î yÁžz8‚OÖ®ÇçýRâ—.ù@(Þ¬Ü#(p*Ñg]AÿÊž•ó"+3ÉÆ~ÆbÈ@4>_Ö%pjÒ  ±Z€Ø‘Òh‹a¥~6ÎlF—ºÁ™p<É‚Ms_ ~WÁn‘ÅŽÕ Ã<¯C°ŒeÙ®go¹øYßÒ8ÍÊð‡ç £õmÚ@šgŸ½´çÔúæugZŒ˜Å±ÿîÞr®³gcs'„*#Ò E‹înJìjòˆ7¡!`Á’%"§~JûKk_Ÿ>¦ó…,Ç,ÕµºrYbœ4ötF s{"6@kÍÍläcUs2m:Mi•‰©Ö3Q}Â:3´ëo{ì€T6z½›+˜È”§ëx™Ñ$¤‹ú¹ÄÝ´=Ïû:¾ç™ÔS=ˆñA~ª.zðU¯t_œvLåØíÀ½Aaµ>ÕÖÇnŠªáŒZ½½’Dµ¦ò‚ÅÊ,RI¯¦ElÞà0µÔnú -¹|RòŽçeÒžf{eõÔ}™¿s›Ìzi©õœbåüh“«6i ¬óðˆØú<#ãí˜HË+ó¿‘¥)w±…öxÂ'^Såõr‡° -ç÷\QrOÜ7èáXÎuÒR£"¦¯8÷ŒÞñhµÓBhQÊÞ?ßwsµ”ǤÄíõó- 3wDaÞ°,—˜Ä虼CáNn]IvKn -—EÍ]áã—Ÿ"._D9f$<"qG«wH äZ#å:ä«ÀhP;å²Ñ™Oô1 -³PÝa°„Ü ¤ëÌ W\G;ÔûÐüw+¯wÞ’ÏEÈ„á…eâ+…$ÎøêÙcºLä\{ôëñh‘ͽA’”!Î<½ Ü É©¸¥{7¿Nï[:bŠiKcÊ hF(ußÊKÜ®nÁ¨ºexÌçxQè¤xÅúùå~£˜î:ÙÍfbQ£~ƒIøÈy7àqÀ;ê2ÑÈåòãpò‚™{¦VÝÔe.b„1›˜“ÛwJyæ~D¢çI¯0Bà Íó—Hi±øÛÛ4ÒÜ`ŽªÌe›yë§Íªç3ôÇ.N¡³ÈÍ!ÂÄ!‹¤ö¿æòŽ±D‹ŽçÞóSG’ R͇ýbÂÓ¹¶y¹™ˆÁÇeÜæ $Ãa–ºÍ|óC…,Áv~wf=û!¤ÑêNÅNÄú@(Ö (û¢MeÌðJ6G§Ã\DØâÿñ´`5:-‹=ƒGMw£©£Ï&’°5ä¥Í4¹åybÀ–дՂxkB×ÖÏB4 ‰r†ÓÅS-‘ ‘÷4,ŸMÌŸ§:ù*tϧÂI9D\²å1òT‚ -X©–›©í…“¥¤‘ø~þU¦ÚêGÂ'ƒfÄ3ß8g“¼œÑ²ç!³é/æ:êìJpÕ¿ñÅáiâ!—–W|Ñ="nê»àjYÔú©Ð^õÕã’crË›oüãÛ×$ -ŒR½«WÍ!'=UhnPþ½ÞŸB¬d.zЙꢥ‹æiø ~*ìiÞG­ªˆxöyœ™ZmÐK´öö§v§t<€â×=ŸŠaË]› }:ÏŠÊ~Šä -1C©š]±rA}†00D8…l…#Îûend)ÜØ™n$žjˆJ‹HãŽÔj«oêŠR‹cÝ0 XÅHñºDl~…;òL™ÆÎÿ02˜ ÎD¶S³·vY“¤Úàx]ÐVê½sÁË{Ψ»mÙO%è+BØóͧè…Œ8P3CÀTæV>Ï“¨jš‹Œ£Õj¢C¬Kn»¯!¬×¶n²†Ù¶TöÂŒÙr7ÁÁXÌ›\; Ýÿ›Jè^_|c*„!Qœ ðò¨§jKGËF»?¯ßGï ö£9J¤Â倅ñnHЦ V5ƒé£*€!Nl‚ªý:-îó…ñðaÓ€Ê2£œ¦s9“P¡€A¦@U‹§–ÍÕÐ-…/QÎ댯ÓafOQÚŸºœW§ûLUßæî4ôÏP· ‰† -ÕÆ»”ëH+êGÄA•|d‡ÁQ8““ã9Ì R‹†,¢ÒDEˆåf¿ãØDãŽ,Ïðm:aËh:…ær”•ß~²nq8 EB¼ÑST]„O[6Ôxw˼]³$î-tîn¼’>[â`)s¦óË63ls 40ÏFÚ#/™ŸÃ}•8SÔÓX —=˜Å¹¼ÿ”Iþã8Dz‚ý·&"ðNFă[í£Ç¨øTdžo”ŒŽ²–A.ÜCªã6›;f¿:ȅǯG@:/8sÜ÷Ì!JZÕáÂf«!dï^說Þh!=ÊS€}Þß‘O>§±¤½fPšBØuSÆÙ2¾HgL= Ë‹±)&Ï0â”O0 -êÚ ù¡Ëü—–T“ç‚kšœô,صù­$ÙëegøD1ÛgÒ7¡¢ˆM2ÚUŸ‰¤×éwZâ+ƒ€SÐŽ‘†ƒ®X~jðçÜ"à“ŸRV¥Bvkþe¦k¸9ÖnGªr¹2é+†\îàœ‡¼ð|'ó é[§Üx†å¡]?®¸5ÖSµ´–ë-w¤¦9'INo‡­ðdmK¶ ¢ImÊ`’ø¬ñuFRì ʼn) -ï@³0EYd¸óªƒoQ<Óh§·4gu’‹Ey,œþ‚ô(õx}5qzäaKèÒO0ÚR¦ûš€ÜäUûi9Õ Þ£ó;cðèápÜQ - ˜"MháýõÌõèérõT€á]íŒ8;ň—Š’·pý* WN1ìL£7ÚÊZÒ]Cî˜iÌÿ»ÕÂô¾ò¥F½@­®$7µ‚ív -s¤Çžõz*g'ÆK1 U°ë¡N½èEè†l©%£›Gœ¢Ãä÷V3)å)EQ¦ÚŸ÷”( ÏÑ2‹ ÕÊðzƒá”_öàôþ©3ÀÍ)0^¤f]ˆ³¶#.HŒª¼#-r_oçÄQñ¤5¤‰ªÉýšo•v ™`‹æ©Ì õ>µKõºa[Två±gÍQ9{|MmBáˆâRÜ?3ÈN„ŠR0/s¤G¾AÿÞý¨_Ú%Ê©cÎÁù ÄÝpÓ¶££î÷™¸L^¡=?%: I$;‡ÙÎå-ß¹ß$oJý-1襲°¦0ƒÛ¥â&LmaÊ ªÃ7£Û+¤þÁ¨\¨M:7:vƒv4SWļ¡e®ª€ê˜Êõ@u$y«9'Ø€YÓ«Ç´Êiš²ïŒì6º;\ŸqFüð†€ ¬AØÈsCùyÛÖÃC ‘y Mv>¢æü\b - én×ôEz¤ø N"Ø4JpA£<)7J`Â[Ý,|CÖ¸ŠüjêššW´C2iÒæ¾bΘ_‚ä´Œ%¶4Ê-[T‚ -ºà4å1ŒŽtbgˆsÓ¶bSyHõö@< f.+þ%• :Ö™±ævAoÿƒ[]ŠÝˆü”‡ ‘ãP ó9^ßXP‹öÊ/d‘ô& ˆ& ˜Ç|§­4C{Å™VAŽkŠ…ÿ±Î¯ç+ ê+6‡»¤ ¯’DÁ¾*R{R•'‰÷VH, dΧ+aõ{dW» O¡ñi³œµ kwBC)Àe>sÍ'i)JíQÕÕUEÛxÞ›ùÉ/ œ3U -åMâ*"¿†¨#lg€Çrl€’Ì÷( œçòà&ÇiU¦ w¤']Ï^Žø­[ýÁcbÀWÊóFl)Õa¢C–• -™äy~±¹ZçôŸØ•Â1å*sù?Œ9»´ú)¨þÃòÚH“Ô M.¯Z>£å׈v@/e·ô®3d~fDz“ð -C‹3ÛY<…üЕOBj&Kta)_h—kЭ%¸I æ"„Ÿ1–â© $›€BnBò=#NUŽGl6F…‘6™A4ý|¿DÚ™FÌ?⼉° `TTDGyã¼P5 #$bW¹§{q܇|°³CYq(ÍÜ7"t~‰E×÷½^Ÿ›cb­˜™Em¶¥Ø®w¬É'O‡˜V»t3t)/¿³faƒ{7ôĵ*úÜDïj-D›×¸*A©RÆŠÅý9V” Ä/gЕö~×ÏŶœ•ÅR‰»è“¥‘ÖØ ääÙ*9SÜ"L‘ïw-•à5@uœY¼¦Ìþ·Þ2çû}©]¦¼hv*¶j™>ë2T¯ÿ¹ùÞäM2h­î—–爜ï|1–AüdåûµÞKv)ò€O/.NsÔ<çWQ/ÙÓˆHò="ÙÃÁë]¯? · ²…XßØŽÞÁQ ãbï"Eö誟QTKlÔ)wß÷ESøuÔ\‹öàè“&“¹c„Ï1ÿü× ÔíÔ·iê÷ì¤UêÌT¢±ÚüÆŠò{¢‘¦i˜\Ó‘È›CöI „­§I¯‰&B:©ð(«Fa—>xæœA¥%;g8l½‹®:Òh †^£¶ñ†B\D"2µÍ#¤ƒ\<ìoŒòôAàWƒöì¡ÈÖÁš¤ŸHòAY»ÕÉüÄrý$ÍY}ÀyA#°ƒÛkHhÈà¶-B€öýFpéY5Xð‰”Âåý R{s££…y¯ë<ب-f¯‘$Ô¹%U÷=é‹„Róúë͆¾Å{mÃÏ­Aó-¡ÑËœ¿¤l¶æ$Æz"MRc´0æ‡Ús¯7£§OCÄ"y5áèt÷ëYW ·y q5–>`ÙŠRaq ‰Þÿ}‡ftœçݳ§§»ÇídÆCší‰ÄœnœîQoŸØÔwÊ.LS(|t«zxU¸ydÜph·gX3yµ¬i1!“G³h$ö70€Ö='¾}ë¨1NZ¾¬rð+6r›?ƒ›Ëe(N<‘=¯ðaž.M WÒSCx»¦óÔŒ€rö>c¬ŸJgäêrÇØ|MáEÉCÅ–¨"Võ•-F‚‰H6b»Ò3Óˆ7}Ù*MFtp†£Ø\½>PT 3/l^gÆÌÖêC^¡õÚì’ЫXÄ7ö‰7Š/€¤Þ+JH…gcСx£Èô*˜ ‘õÜ[xs›Õ_’Š—xØþ…åþKc¼öÓÿe?ƒUÛÁï‘èŽ^ ³¹¢ñælgÆlª¸`p}Áâhà@éÄòA@ -o®!èäQ!kå -±×ÂS#3UuÑÄØ9‰¼1zwÊÅcD„ÚÂW'–ÏÓìYøóŒdÉ6þ¥œÃŠÃ»Þ" -á*ÎzÀ˜7?ÛÿÆM9ï.èÜ< µC6IßKŒ´ìf.:§@iX`wU­¢Í¬Ì½ÕS)¸j+à:?«jÅÐêJ{]Í3ö×x¥ÐªVŠ—IDõ/ ©ø§m(÷Ç‘ªœžÊŒ{ T©4ßžÚdæóµ®ß G5"ãÛsŠ¿ìx¨¡qI¸Þw³³–Šq«¨1†I£½[7¬›â¨®ž"ˆ—fvé;¤{ —®y8Ñ(ñÎ!zG˜#÷Gª ‹y…-퓃¯ê?ðW £(#ÓŸ¾rôx4œ¡J¶î¿[T?Ö>4…Wdçv³e³ÜÊŒlW*Ü´b[éYćâŽ8ÃÊ ?—þW'à_,}{ß^j_­-2:>åv§Ð#¿bW80õÞs†úŠú Ò+òà|9g@…èr6ëÁ<‰˜†Fèù\Ù×â²_jš—˜¶`‚8à:g‚}Î]?uñ–ÑÁÒÌ•¥½¼•âX ¦vÙcSzt•;ÖVòl3]”Jyè,¾Ë<<¸¨ÐK`7Û‘Î阘À£˜!L¡mD¹53¡®(C3×j›ŠÍGAµ\ãhN.-/‡á||Ѩ\Îs´¹@ÜýÀK­G¼?´ûÚ«‡>x9áí’Êá—Üå•&XS¼ÊŠKÛ|!5D3Š_óÊ}~(†¹HþÜÙÎ5%f´³ƒø(¾MK`^æĽ2¤'DÒÀ ,µkÏõhÔª‚Å#õŠNræDh½HmT‘‹ÊG”¯.àif+ó^ž­ z®¼25”é†j -!ø¦>È'TŸ¤èª§‰‰Ý\‘¡Ž ìYNž§# Uïd…OݲÆA¿yÔÆæÇä`¯ˆœÆ㻸Eeþ|µ€CàOôñir žõz/¦_SeüŸÖ Ä…W…i+þ~ùÖ.ƒOâ­Æ”¢Ì m]3ʼn³Ùè -*¨È{©©=ÛÊ/þòi0þG´…¨&]E49ÒQ“’#Hr‹ðƒGaq™MѹF  -*®E‡D(~BŽ5ä‰ Úþ¦mtàã¹U^©:Ó$´ZÂtW4½B¹µØbú2Ÿ“‡>$ñŒGÂu–¿ÈüÔ|¢ÎŒóŒO"^QOÕ:¬Àp³¸ï†aêÅ™ˆÀEÀ'‡ü¦ ‹¢¿Ç«è3Æ…çïaØPIÉvT£Ð!­ÈËF2×Ü lÿæžGJâU[ø^ót»r7Zé€Ï^Nו»{Ó9ÅO:Ó¸Ó—ÔêjõS7¸µÆK^ ½ýn‚_nýð ÕnË–·@<é]?Eµ€Š€2Ì1<à-N“½§Aݲwó:EçĦ¿…{­¾ÓÁ‰ŽKtåPA -Ä÷åΔŸ@è7f6Œ |¤®ZM -Ѳ•Mn½~jg` ÖÂz‚LåLåʱ~‰"Ï.ÚzèRôêŸÚÁ¨òK[Tè5ÏÖ<ÃX1áE@Å-=(h-’÷äQj…¢ó°!®èôÅÝš›×L‚“²=á‘ä•Ík„üM5P†™Iõ¥ ¦¿G¬Eª²¨t¬ù)èÔ{{¶(Û㯠¤ÈzáÈsÍá^6¦$Žá#]¯¬^ê”dÀq?z˜“'ìÂßlïBND¿:€`qùC8Ù®0å¡\°4¯¤y‚‹+‚ ¾åèêHolûÉŸ1|¥§ò_wJ}Ö’´Ûr*dI7œ®ó–ú7@l±çœË.[½l -4g\ž9äŽ×ýÆSm WÚîÅòl¾·9äüÓvÇçôŒCª!ì×Cšé3cÏ 0d£Œ©ïsXæ±ïãýµÅŸñ–±bMƒž\É"çòh˜CêW%Od)Ri?Õ{jÀÝ•ŒÄî¡ÜÎržégŒ˜tØâÇ -ì‡u tïÏqò UQÏÄK¹¾ë|ù]3ˆeÌ”;ĹÎ%EVUœÇêë;C¶°%f#ò0¾ ó –ôñFG€7û sçý)+Ä«UÎÌ&S= ¹:ZM£ž¶þdEü”#=R…É ®'5Õ§Ž¢S©N&ÉIô/eŽ]БL)vŸ/¸Ô­PYJ±õNZ£€’¦= šaT½áã‚KTÞÆÀ›RY| j~&qhë(3]­UÐñTÇÝ°i3ÙuRþ++O݃9éÞÓ¶÷T›,ôxÀ¶æuPÒ>B>XoG3yK½z¦fà퉑–Ì,WÙK¶cšVáˆBŸ¦™‚|O¬aìÈ+¿4÷·²žCø\‡ÊD™³oìoC@{G°”’‹bº3âÐ¥bx¦çµKH®T1ü‰Œp}ê¦F)̹ÒÃéQñ°#¡W4–ZÚŵlÕ¼.„uBÁ"Eºùfït©“ôî÷ŠŠÇq¥H§}*AA2ïï!-úÿºÙÀ®0壟";y,©=m5EÙ'Õ™*¨TL%‡¢ñuGD󉨤Xxnœ`#G •tk¸Åv–/OÜ2äYºÓQ¿Epƒ!b« 冋¤Ô!Êã–©ªH/@#ùeØþ¡+7¢&ú×™òrx?䙎p¸˜Š£MÝY]¬ä¨pƺ6¼ŸbèaD«![[[”9®e2¼Ik>ë'ŸºD{×áá] v$;OW.FfQ=Ć Ú­ÕéáÐ5"Éç;‹¿ W­ª–›¢.A>2[±^ÃÅß"–]Á€é)¢ (×uœñŠ¡Zs5HKiK9u;óS •¹{äd8ƒÚ@ڑίI7ñÜ¥ü‘†?|hå;v™×Ç)‹ìÅ¢X8”h7¿³çÆÿw–¤¬Ç€õáÝÙÑÕfT£ÅzÔØ̈C¸eÚ¹çUF€>#Íåyp‰šÊÌ)w£©Çãe¤EëY‘ù›ˆl&^Á¸¡AõÕؤ4z†Ô£„ª·BÛÎä‰JZö¼ÔçÙŸnë©ænqò[ca“!sö]ImÎc‰qÇÔ?Žjß«,÷p f’²>Gª¹A²Úµ`Q‘ú:כмLuZc‡zjôgë`…¨™K áXèùí9"¦8뛋¯-}È!-s‰öూ ä1üfåjh~R‚Úf-MwžÖʈ½a -Ä4„:Ž…®,†3„±'€¤lÒµ›¡kžGYBø€xö%õ6FºGá GR´“v™ÞY¡ì’GÖx fÖ†cU߯eÀ>Í’¾PC<ù‡Xá| Ûâ ä×Á…Ðãzñ¢ïgƒ)gµø‹‚\«âÕ€A#â?x8ŠíŒU«àšpe¤ªêf Õ¶gý§U¢»È»*¡€ wÓ}fÜ~ÃöUa:@[¶hGÎ{éCí„;Ž¾ú¾êTÀZê©grr,ìxÂ'ñ¾×OÙ(´7Ð #ÔçX›Ó–gÔOᘠ¦Î¨¤üzºÑ©\7ãX¾c_ܗΊ -ÒýØ×?zðY©¥*+ î”#w<—)8çáSíӂcÈ—ò -‰-ôŽÂûµòÐ[dö»Yà Km iO$PA[…>síQëyƒ)•ŸÅj8= ¯WqiÂ9ãNjO®òåÞÑ[òÔžlî{t¨;Ôsڬ׳ÐxðQb<¯…áãŽðHÑ®¾àYÏiPBpOô÷G²‡ ѨSBTZÝcºzjï°:µžI “w.¦<ÔK>Y–¥Ý›`D‡^غîºaЩþ^ÁÉžTyVµÕ§$èó`à|cE½1òØô{Ï1–ÁyF’È—àñ*#œŒ¶;˜hú*A¦$’‰f²ØW¯šýékÿ~N£ŽëwìYb¯ÊÅÈ náÃâ0'MÅikÔ4DÏdóG>rÎz–!!œ‘ð*o -Œ­‘––DƒTY#È#Ò 4¢‘èV!W¦ä˜lºƒU®˜F -¬¬®J]×TncÒ·r oBz„?È(¸bÒKص—)¶rª¨ß%ÂQ›Y´w,n¥ê4Ø9f§Ø³¥˜ró* (´F ‹Ì''ŸÔU`<‹bý¥Mñ£öÀ:jÆî ©E#)ذ߶c~û¥–’¸*BãŒ$XÂáOÕ[4L(øñ‘öŠ/K)p¬#ª W¤vÎóttþäؘºÔüßÒ=<µVŸà7BXÓ¹íAzòI̵‚ŸG˜Û.±,1´,4”ØÍ[²Ã’9ÕɈùÖ»Š¼€}a tŠï»Û¥WýW~)ý«9}‹# Ù-Å~¢^“_¢m‚¼Òˆ’GþDÃ=.Aг†(>‚®ò7ÎýÆd† :i³/â7.8µ£ãUÊ/sÃ$Ž°¥E@É<ê@‰¨Þ)µ8ÄZÌk TðïWRž ÷Ø-ö-z“7ê‘¡y,R%Èyá“…º3€g¦ Îù0Ç{u3`¾¢į’QÁæ2W€?äeYÒ¢Q  œ1OµÍEù2bÄû"ÖÐy&bg#Òž$óhöˆp Ý¡hAŸ’"‰}m…9;C2§Žp­¯3æhÏQòFd GtnÜ!C3¸û+ŽN,*ö;Á´¿ñk ½‰lÜ3|Ütâ^×AÚc—±ÍLOô$Çô#<’0ë‰NuàÖ@ƒwì•öõòî­úœGŽ¬Ìgr•,]_¿x¿Nb¹91bÛßXQÿ;‚ÈoQ/Ö"894ÊÈ.F>»­ -”ûi üt7âµv -ªW¼¯l³[Ÿ,Ë«D|sH†^šàïçQ_Xó§±Ã/ÌûBÀâ[ù¼ÇB¹ôu±¨áú‹Š¡àó))Ìò zÆ(N„ƘšÂVOauGGI¨‡ò°n“¬B:Æß8"Gí,.Èy‚Fë¹9)NƒÚÙP}ûûýéSGÄX^ǘTr‡áX ~FO[Z\K¤ -Ie„ò—â4dt†ú$ò_|Wùßà*4ºIs‹d@_”þhÔLç‘}§þ˜o-8Ðu‡¿–Õ·I7ö(yžö‰ÀãŽz¤¼\ûDOð¢u”/Ûóá%ÿ|Dž_ðÉLàÂoñ\ü,`%¥à6ÿN>ÚBTÜõÿ–|ÆÎ#Dõ™ÅPRÖÔ‡bÛ³Xaä!¬øyžó kÁ¦çwÆ Ûß(ï‘x¼ó\$®p]æe8Nå§"¨ -[tí±“ß¨¾~M0áLKˆ¹ïAägš) 1®â˜¶—<Zh'P¼g.*&œº‡ñ -Î# ±jñ ?Yå—ÖS™×d}z—z–ÿIӧ鵺å¨yXxx£òjHÁ·÷±®Ãë–®ƒ¼Éi|4îÄ…orš¹¸Wë#É÷ºÚz;4߬ -æ맼èÝbÓéí>AŠ¦|ð¤wŠ:η¹Çµ£ðëgÌÀÕC•¦ £ªç)U &mü7ŸÀ‡GðëºFÊÇ:võSØ︑í>ßHù=.ÞûêF`rÛËgÕÀƒæÇ·’!3$n¬öIã7S}E—·¥ múDQ9tJÖç,$òó®ãZl¿œÐw‹¾šJïø¨Z~ýK–¤WäµaTÅ#F[’¦‘%I’/öÇ W‹*ôöi†œî¥§árAü§ó%¢ÞÊH7ˆ¾r+J -3TÈPPëí‡]t¦¼VªÊÄ „´‡ù³ºÎ†n­kñ„”B‹TüŒTŽ”Ø¢ôë+>UeÖKµ6áÌçšÏ¤á$¤‹ú³CGæûD7×ÿNg}烱•HÅn ´koà)vÇì+§­X²ÂÂ-ø5Œöü^Z%WPÑò삉ºã¢Ùl±ŒŒ‹Bn…¦?Ý?ɬ–SÜȓޱ˲Ԏ·»ª»(+Ü0nL§ED-QËq+ÎE„èŒqÅY÷2gôÙ$mܧ¾-·±–5DYD¤ìsõ[ÄT7ƹ»‡ë9U4œ<“6_#ÙþŒó}2²·A´>)|lºz$è¶u°}'¬¤UÞÅÑÀQKÈ-ßÑ(Ñõ凳hààÍ9åÉðú™C¿yºŒÀ¿ !2ŽG l椽Þ]¹}ú"Éæ¬ii+©­ëÔ‰2CÂ';«ÉI»ã&ä¾U-ô ºGA-œ*BTÜN¯»*¡·–)­®§– -|âA_Ç·¼t´°!ïí7û˨uŸ¯o,ÜÄÂÅñäH™l‹dÔŒ’p`÷ƒ*›Vš?胢}9,c…^ÑÏzÈ—þsL29²øÅ^¹v4!ƒåêÉá¯t%aëðÎKiKÒ˜SÉ»ÂßÐ ‡‰;)o,¹R¶Vþ0åœ{f[aÉLG´ä·=(êÅôuÎø<Æ]ØaJV]äfÄÍeP"ZØF\Ùx€ìõK'©ˆ­–¤ft¥÷rí~óëãÚ²¥IjŒ¤r'h÷NK Íλvy5ëSmÃjƺ -ÜyJgebS*ª>1}*—6ã"E€È)lD)˜!ïõÜàãJ ŽwxŽ4‰Yº'^Ê'ˆ~!Yõâdöl›O”0+Å´‰X5 ¬í7CÎœ:n¹fÄÜ|ûÕµ~ -X$\ó*½NŠG±½ÎCÐHP4sVfïÀŸODv›írTÄs›n?k~Råô~óúÆ<7%õx# M !ë1F‡:©vÄò²náD¹H*¢å½C„ôaþ÷ZÑþk柇¤>ùO’0={OPÞƒ‡ž á¢gIÂ:½  }ßä *-–*õ™‡m$–Õˆé‰;Âýû‰ªÒ1ñ`}2_ýX:7øÕ ¿%@ù„Ÿe6RJ>#2\–êQÚ= "†ý/Éà»…9qƒôyj+EdÈVñw<šæÜŒÁÔ@I¬À¥ÐˆC©(• ËÏW¨Ú#,#zä>X4‹¢ºÓ¹±ž‰^+ÑãúA,î#h½Ö¯¥r£½Ð¤0-lý¹ôù—xY÷±~j¤ðÕ˜;„~žYóµ…­Jh7VðSL¼(ìd` )݇?BNÂPÈ«aè# בµcf&VO"培Ÿ)`LŒŸìò*j¨Ë0Ñ;°_6­,ªŸ…SÝlfêÅîÿAèÇÔ[Ϋǥ-*˜kyÁþ¢X@]íN½^,t#×ÊŒiê‘™D…Þ" -þ3ŒægÀrOáögjDBb¶—(áí©šâÍÈBÇåVÁûwÞÓ†[¼E­tõ%]¸=¤ç¦$€É-5ªÄ9ÿ‘jLyE@·CÙۖóÃv£`R -Ù7/15.ò4Qeeà%ʙ¬ѳrJª€)3fHäÕTÞÄè=® w™7ªÚ -²ÙÐÜ×öþ%`Æóè5 ùQyÑ9/¨ˆs®;ªfxÈjðü¶Ä.ýÏjüªVlW bƒ—…ÊOÍ ïU‹æº\ƒ¶Âa_=¹NÈ® ~z($hÿÆ`F Ô3¿Ý¥1^Ct*‚Èé+óžo]HÑ€g¬O¤‘™ö Zª¾&Ž"ét¹ú D ^ ù)NæH¤!–IH¼‡wN¡È¹ƒ»DÒ"ôyv¸Š*pŠ % p·Ôˆýee{Âb®ì2ÈgIQ$©ç_Ôî@Ç{5&°BgØ*g€×ÝO€wV !’Ú¸¸)5„fלˆzwÍQ¹Ìyç_o>÷©ˆ}…ÜþŠTùńǦ!eï¡W•Äñ  ¸s†’Ù«;(Cnib‡YÇu©!;Ï2¾­ûY¯ø?¯ßÚff@Dw&²»*“l˜ÙEŽHØÖî0÷;:åö-þ¢8K -L¯ÛSäò¯2Áçùü…4elPiðî »íœ™9¬õ“â.º–•çKüyç–(X#ïô„ÍžèËÕŽ°êæÌ¿SŽ¿¹t†ˆë§aQy¹C>¥=rœó-}è˯ Ed /Ó0Ž¿–ruÂ×{BUjV’EÕ6Ã=Ý•·5¤°¾XëõSseÓd`ïAÈJ|ºE^;…. #\òÈ»}m¡ät@… - ´ -o¨Þm)yc'_bî–ƒ -|h Å­GO“¶ùÈÁE2µ…µÜ·{]G:I¶Í*‘ Ð]Wañv­B`]ÕºW9j ¨~R›|¿ä¹^QJ™, -so¾zõû½Áˆ‘a–Dio?ž«Ç7qöx2x°4€èL½=Ƽ™þü˜sJùT#Â󀌘ñ¨!"Oj­Ž$¾Uf õ¶Ü`EÑÞïOEÍWÏ™ÝC¸}âÅË—ì;3™›™)×ÁÎVà§3Ñzž…$Y`}Içw ¹„Ô~Êy™ 7îk›‡Î6‹cS®À¦²Û±æºå2Szøͪz»»÷ˆí™s•GAš'S‘ªmô9sØÙ(ÿPeÛ˜+ýƒ¹ûGXþuLBŒß³þ®ÍP‘«ý - Î³ì­öÔ[ó‹/¢•ÂÐró†iº'³°Þó-ŠÜ?NþѪc¥Ë,ÁMI.çTKmµ«Tó>_`3Œ€í -fèZb<ì …þÂV§[«h¸ì-«ŒËNïÂAÛžöF¹ ZA´AèÂÊç“€áq¿áf.m‡¿’é;KÈìåý,Äý|jÛ.%ëkaM#€r™•¨ß£mW6ÿꎄRl,š,Ø Ìþžȃ‡úI|dÛrjÎÔJV\.‡fJ ¨ ãÀáñ5{R½8ØS?êcð<òËw‘8 éÔ$%¢É«êÝ’z¨(–Î _AËÛ!ÕI €3W¡©g/ïdXÍÔºSo§ØO'žòúû‚r§w1ÇÁ>•” - ó7ÓïFå/ ¸ô’ðjzJËÏz5wºµÏÈY-úRç±È2 µ£‘wôȈó&þ’otð躪Îj?¤œHV†ÌÌŸæëÜÝ#¾ô ¤c9ðø«ìeQ˱w-q„ƒä Qp¢êšGN}åÐ-ßëJwëü©Ô•âúøOÀ6¤B©û—¿.¯@>stream -wÚǤ -0 H<4çŸ7#‚x ÿØ+°QJÏŸ’oÇ’6@ø­Õ¹ÏÆ9·\©9QKÈ©Xš‚-8´'¶Ì=é)#ƒk $.(´ýŽŠ³Òu«Ÿr2à¡#²Ȓ^—’W]¥Ç¹ -‘µô÷ê°Ç–ÐÌ Aèñ[AÆGŒxã1%÷<¢šÎ¥?Ú"Ñ@;8ÖõlGœ²û^ˆO2 -‚_Y[JÉô©¢ƒ†Vµì¦oÀS}À–¬è aç×ÅAtÏ;Äç ‚…EQ¿K÷ŸÃÁ‚æŠ*{ÄÝ“PkÖžâ8œ—‰¥^ÓæסªÝèæõIñ¿Ab)ï¢W®•ºuòhbqnÁFbåqxaÊ^&g’y¶.¬<¢jsf”x@1,‰3¤ÿ;OšyüÃÊ¡_ðŠ -Xúþ¼mHµÛz>áFš¥ñÓ8î0 «A‡V~¬¢¬£p﮾ä=ÊK´úû"#ªçØ8(C¿â¦ ¨·pŒm}þå‹n“j…!Jj£§g0'¡žé»©¸åB}EŸ·¦¦03TKÓ8æ׸œÈÚ¦{‹™àß òû«-4 §Lù J+•èkÏt•fUzê©ZE Òiôdú)tùG;g,õó®ö­ö7ï7Rúuy íƒ ›VKIü{x‰[¹JD>\b¾œŸš ›YzOuæí`jQb/_T<*ckÁ{L0 °ŠÀ-K„ïj åm B4—¹qrdkñèå:›d–©2úƒ:Ü]=|óÍg«§A¨â07„¦½sEÌù4‘ç¨P -ºŠv­Wƒ«¬#5s ™Aìfú¨ü„&+Ê«ð‚Rt …‡êÉ~Š¾ÁK«º›G’Ê¥ó©!¿™«ìãìÖ৯˜Ø€Y»|âÏhJŒ S—?‰'F¼ wÄlþIË°Rq ù©ýWe7ªS` -ûJeµ) ;¼lúê)»!7˜ pšX*ÿ²EÍñªãËÁ£7Qסu3ç(p‡Óun`>…3qµtNX.ÅL©‡jÿQí—H|*Ê$žËö€²Üq•º» ÌĤìoKÞÏ ÇÏMãˆV=4ôB£¤%|¦MòÔu‚»Þ,9L¶ãRi ÄÕ°Ô‰ô®÷ˆjF²ý@àlU‡§ÁU•/TcÜr`e©ª=¿µq §4còô©„+Ÿ£ÞXrš”=Efª|ñùw™Qko0žó˜Gˆl+øii†Çÿ).9p$·Î±… µ÷MÜ?¸‹Ò[×鉭#ÅåRŤØÅ!{ñ‘õÈòyÉ©¹RáH(GDF\‘ùÝok(‰#¨ûûåô°©¡ûJ7Á‰Ä!ê<tˆáD™0êõ¹¾–$o%¨ -4ôTôÓøä<‹/G9#„ì[ bŒ¿œü‡us üøÐK>¾ˆÌý“ØÚ>Eš+a\xˆ¶rZq§Ø:8y®bȪ2„%4Lzîeˆ>1žO9¶0¸5ê€#æfIà8Jö°PH)í~\úL€â:kRØU$ÍÌUy£ü™Ÿz´k{DÄ,ä{¾³d°/Û1wZrœ'døUC6¦ÕBœéñÐ;ΪmÅú;/É6 É߸¡îkïu˪ÄáËs¢¦˜ÔòŒ"XU \s¡!¥¹²ó“š§ù9÷½ÞN¼Íˆ$À ä:,Ñžx“îõ! È1íµu_Ô…!-Ï++gi2¢Ýq«!º[fáÆPWùŠšâÇÑT0çudç°åz)Š‘Jt·`bÈHB·PÕöÐÁ–š"®#¸eéæ¢Ðv^Qeäï±×Jí ß‘Ù £¶‘&òù<ç›,¦g*h¹ƒhM†X‘ŠV*ÌkZœ±êéÞÕýÈ%d‰B(ÑPbQ®Žä4ÓÔ™ý~²/åúhôÜöûI':ÝZÒ˼[ò];‹VËH2_Â1,Œë£Rs} -U|Ï&±G˜›á!ý“«)>·H½ÑydI« -8WNl’ºŠBØ„õÌò³há½6‚U;þí¾øÂqîr‘¶/Pk̳ÛïT¶9FW…·\·ÊK!CHÔÀï¼æš‹¨‡YŽxÒe›Àµx¼‘·Ð“tC\ÄÝ õF°­Pa)ÄÏExgú@,®Û³~IÔu„з¼ Ãô!…‚¡×Þo"[*¶UmÝè]Ë‚.$húØÊÖÑ•†/'Ÿ}¼ÝÆ`KZ¨®èù*î)és èÖíßr¼§°co¢Ì—P¢ús?à¶;»Gȱ4²¨xmq/Ö‡ -ëõ¬(æ*TíßFLõýnN`O8­iœ)è÷ÆȸëV¢cž8ðÛJò ¢>p”¬ÝË~ÆL(´š¹¯#õÙ[ÙÅ¢£·ä­Ë²÷Œ~>ÙY¸H4›Å¥1t× ¢ªdå±-ËlS ÙÀ™J'¿,š+±¼í÷¢. ®½‹+ÚÅ;¿Òl~•û™"úa>{®#lÔE¬£¥‡¥lʧ†(ÄýZ 9´^z#æ9Ï›†›ÞF\µñüAuÞS 6/ZÜ«§¸K ï¨tÏùYÜb¢£,¢bƒEC]ëñøaìK!œÎf”Y ±·E›+CÔ™w%|ªÔ!8φ^U6Èê1A…r‡¢äÒgµ‡£ì)#P£\àXX»5¬Öý !s¿¯uâ1ò ØOiðnå€;fл¡2d{ÊBøñ&,Û£>Â~g+OÌEŽEBAq¹îö¶ Û9"@¼–Â\P?ÂƈiS]a£,s³{4Ò#v‚¡iNÁ•°jW¿ð%ÎGÇWÐòñ“ÖY6‹ŸÊgÇ›Ñg¤{[/Ô Bt¸jÝ“Jé1=  1ØÙCßï¸ÊAf|ô5)iWæÆ÷›³ öWŽ'fœ¸ýÔŽÜÞbEŸÙ4T78‡Aõ ¸ªÏÆÿ¼XóÐö$E±èQ©yÙÛ^ þ¼ãPê~¤µåµÜçT®gÊ{Œ9¿—X,`œqXì—ñ}O>L¬ê¡a»FœÉ‘ù=kª'#¥_ŽDÉúJiss¶ìé¹*·fwÂ]¥è8W\ ÏWÇÔ×&”½r;ãË·UÛ‰ª5/xÁO$h‰@Wàc%íÙB>sºðlÇI©´…ƒ¢È5ï$'g“ÜîC쮲‹T÷üÛ»ò»…Ÿ;ÆÛÑ3œ#Á!~ɹ$¬ 1¶+S®ÅC‹<¶Š­9HÐç_æ¢-%:²wÑã¤ÕŽé‹þµ½êiLÌÞóbð¨„mU2trLj‘ŽÒ°óõÙÑÃ~“zù†Ü¦×U}=ÄP¼Wü‡eq.·¤h˜ëØë8œ\Y$&ª0猉Uí0á£Î]»†0ñ )ëÚa4à òϪ°ü ƒõ%Ô}%n0ý£× cŒLi«\sˆÆ7&ß»Nwu¨Wždàââ<™ wFˆõêŽDò—Î/ØruP ¡Ç{È×Ü7›#ßHÈþÎ ü ±æVšVkϦ2ö(L¾z„Yec‰öÇyFvAq+䜣I“b+šLT™Ráé:Þ-àòÃ]ÚÙrÑ')Rë «èŽñÍ–Bë7%˜öØá‰Ì©{Ç ÒKcdW54$P] û\©4ÚyÄ£ceÎÒ|n¨Ï;qÞ9u0|V1@Ô+zF”t‰ÁÅP².sqYn)­¯!ýÊá…uTÁ ±ŽQå^Ct~coqeãëéÕ”ØäÃœu~#q^7;àaœ2¸·M¼Îe(ˆ[ƒ&ÉÛ´Ú[=7´å°ÙŠ#6@nψ7 à¼?$üa Ü|olñÇ~¬Âg•ÒŸ|‹×}ÇtýáÕã ¢ÇżUƒæ&Šö31´wY!ð@€vŽþ}µ¬ý) ¥î‘ƺn:cȇqÑù\ú!ú˜×Z½Ç_¸–C3Ôpåçääenc1N‰¥„AO¯Ö}¿¾1ýV…XàSF¼vúë VCJ„’p³˜°€4?*Òl»SÄùe«óS|øKXåÄø-øpl8¢Íví5ˆœÁÙË}8B„Z¼H½ž¤†ÐwBD&ò]D%-7V,zðÏ(–È¡7i¸/MŸen7gü£jHˆz -2PÎß㬌"íb¶= )1(ˆz -“¸sãž[:õE¡[ÿdí–\§l3{ª^†ÌéÇ¿õÜÛú)d˜RGš{/ºNTjR’#ŽçÁ÷ÐyÔ|ði}‘~ÔÜT´÷Å ÒCš¹zRWOE>Sô -×#ü"¼^ºŸÇ]âÂò:—¸}FŽLK«³iƘÑÈÙß#ÚIúfîOÛö•\ ~Ϲ"hbµxMË- ê) ÷gÓÒЉZ‰÷(‘×¹÷¥…?J±„Ü?“ -\‘Àè<]åŒg”3(_WRHÃÒ4Ö~Ž$ðHßõ`Aï1:wc/•‹¦`v¿‡Dql„APa„>•ÓàÚßÄ*ˆ9Ò@OÀÃ3`¹T_"¢±¤ÝçÆép튌:™֎麆T5B™/úøѼ˜ íÀž ,à -¦—‚ Œž s 4b3ÑO÷dÔxÛ߀žW¸M›Œ@NÄB‹O+w{ÛEªñÉ™Þåfƥꠀ±¥|=|¿Š ¯=æ7ê¯ç#Û¤iƒFÏòíמ‡_Š>#þ]#Æíèþ¼mƒ¶Útö·¢-ŠøS8÷ŠÕ÷ÒUlâ±ú|<ÿîpiã˜Ý@r%" áûðÞ#'ªymT¹*ÙK0©^†=nQÉd/Ój8Îb±pÚLè¼…ÅÒ¾~Òöa Ú>\%ä^‘!‚õõ6Ôþg~é¨Ï B…Aûý±E:¿e›$KÖ’ŠGøŸRÕØVCØ«o D羨}JŠ%øSjØ0ìz îo_Ižcä(ÿìh+Ì –-šîGrñ‡CâžQ«UîyuléõÛgzAzìY‡㾸¾{\N7à³jvõh"–̨!Ȉ´_Z0²…Œø"êúKdÄOBËÏCêV׶ŒÊiŒ8Uo õ]Ê9t ´)/^ ˜êëÖ†¨§9¾_w°’r§t¡Xì4h½8aô¾8XÆ9vmafŸq¢ÓôQ=Š^÷Èh‹%£€¢·¦0ÒÕGƒÞêÚÆ^6¸T´Íòª<û!¼.ààë(ƒTI | ê¬àýW„Òw=sÒ -L:='ÐÛ€e8c'°gÈ’-¨Ñ‚§c¤Ò²¹ªÑY—™ËÙôj7A!ÏØÆAmr´+)}ŠÛ7¿ñ6Ê’j`#RKáÚÑ ;¯â …~šGʬy[N±óYøWVªÄÿºêCÉËÕóB÷Ë ¨¯úU€'E±ÒÅ|À›Ü¢†±Peƒú2â72Ÿ*fä ÈUžð…Å´*ŸºÌÝGib,¤×o&ßB£Åìæ_ÖS*WˆbY“„›î ‰Ä`-ý:VŒÍ®WŽØ¥_¥9ú—¶×þoâã|ËKýæåÿÙ"å<•¿§àŸ{t±!Uqò “¡Ó49E)á¸FÄ!>3*>Ûc˜‰µÈ°Ð3ùo!pćä:Þ|8du²L(pU‰Ù¢{yúy)V,d>&2nÚ¢V8zTëE/$üo‹ço±WÔ;/Ç@/õ£G›4æÛcpé6Pñé!íÂÔ20ÿ€°%r³Øo´®óHQZ*Oö&¦"·­žj\§û´·gÿÞ*I¦ëV¾)ñáz<įN ­Hî¢ÊÀ¯DP±sûY¡ôÏñ‘3¯Õ…¯CTÎØ`|uôŠé âú_Éh!d…¦AÔ—a‹HØŒ1Ø]ô›Ž¶#TðpÑ›G?fç6^|1!,oH¡sLì—é É7Ö»ŽwQ8n„m„V[~Œ¤=ˆ÷”¢…}щzwnfžYBŽsyäÎMYúTZåGy6öX]{ó4»êŽi)‚M `+öÒnôÿ¤îAˆ9B´ù©0öĨO½¾Tx ¼ï„ãÙ˜|RC&ºQ=Ê4»Ýa1ÅñÙû®5„¢ÃKá_êÍÍ”¿ÍÝQ+9è”8J€IúþiÛNƒ'Ð\œÑEÎüÚ„þuè˘…F%9ºmQ)ݸŠ¼­ }IµS„ÇÆœ±Ê^cì‚j=X“ªáà1@Õ¼Õ› '³n -íTaÈ<2}Ó0(–S›I\í—bƒ6°•òdäX¿á;¤¯J^ÇR?#Ÿ.dÞ¯†ÏJEÒPÆê5;®R”xPÒT;?ª|E%èá®ë°<ÈÛ255”…U-¾d©ïhÚ@Ð]|†”:ï˜ò^ïK5RAQ‘rCF<ê×윓ÈAl=j6sŸVmóKL_7VT•?B iõ—LÕ4¯¡v@×ÜM!¦a;@°Ò–>‘h")ˆ‡è•d*ÉWð>¡s§!–:¢ØFŽBºÔ ˆ"ïXqÈVÆL#ÎKz£ÁˆÌË s{ÖÌ"Šê ÐßO¾U¡ÅF1°ÜÁ§GÒY«âä”yÁC!Q— qgdIÜ àþàÎ}ó}W:´‰»´p·Èií²Æ«Tü9lb|زžÿß•àþ§!Æ7ÔþΠùN:pê½E‹ýÓÜJ ÁzÉ -Þ .SmŒ ‹úã[C¡[Ç9Þ -ºA7b^ȘèºgHyѪ׿Ô‰{…ÂÉQ+jŸ?¬æ†Oð:Í{ýWX*Ê.R˜*^ðžP¨8Ïâ}ÕÒ„¹*s`¾˜6f - ŒOQÍÝ‘-‰r>Ž÷m‰ƒ ~K­!EP»$šmŠBjMàÒÀ4wŽô•(ÈÊ×Nñ•WvIn?ÿ’eLaš[a^FÅZÊ…×F¡”ÛÎv 7ë•ää˜uÂq*¯ZFñÃâÔ_”Ï«°ÙŠ‰Xƒ»Ç¾fvR„.tðà4ãgãvˆJVÒ/Å–ìõg6êÂ׸5?Z…Yß‘æ·\Ïø'óñ¸`jÎËwÆrT/¯LDƒè¥Î2Oü§<9 ·H%Ïåá!7Fé±üæý½k†#͹›sõ[@GñÃ;Ë®3ÐÜåF5¥Y7‰Lw;l*„?y’žulüÛþÕ˜ÿļxÛçŸß¹§ù § -Sò¬A Ï/.VWèmáþ_¼¿Õ÷ÐNFÊ““è Õ"·Ék2- ûZ}”ÝŸ»Þ[øDÂ]=× ¿92»áÌ7û+èêwý…G§xÇÎëXX+õ¤#oWæ”8v‘iãõµ†›¤™vӡ§}Ç5䊑’Sä q²n(Ñn‰€^Å„b¥‡CL7—êÙûš 0ꢙÎ,… ŸæŠŠr_¥/n„g0IA¢å©Ë -ø v5JOœ±uô‚ŽzÉ2ÌÂ*B½s¹]ñ>M•§V䶩œS?ptÿ~R,¿ -<ÛØÆ.üN)aS.Â/gAõ4Ð4ˆš´<†ZøÏ¿ƒ÷’4>_äBÿÿAGhs‘ð ÁôpÙ…¦ÙK>k¿C ä§s<"T×ñƒ6Ÿ°íúGj˜l¶çö¶)òjá%JSx=6ótç^ªÈ Rg„²/©Âžl‡¬®B»*ýˆkiYª„á®lRŠÅBj&án‚†…f~£Ž7±ÐTöGDPÔ´.;-uÂÀ@Úz7»v!êØ<õÿZ‚«ç|8IG|§6ª¥ã+Þª!Bú¶¦)•!¾50ÞŒMþŸòÀMñ”8uÊe$Fƒw(Þ\—MGxò©ïYAö\M-Þ"ßäÍ!Â?¡{—nø–l˜*QÊ™£RK‹ÀN—UfùÕì;‰å7Œ?þŽŽýx•;*0#²À¾”e¨ÿ1ŽÈ„õ몯 ¸ xæ“ÄU2’·#Õ¦õ*™ÓÒ~ -siýÎ ‡«!_9òC[¤¶¯Â4‚eWCÔ0ÏH,Ô#͇Óö¥$uþ0㻨z^)¿¶8uµtŽ^5âèñC Ž8# -ÍÜ0¯¸†^é8q-™<» UJï#’僺ÿIƒ|ž É… -ëf7ÒŒ79qÜ;< àï(ªºÎˆ^’%”§Š>P+²ôü*d‚¬‚Z1 -ñmæP$Tó®ðþæm†÷ëDF?ªæ ô¥&ˆ?Õ¿gSE†,ø°<a& -¨P Î;Óé¶2÷ó‹ FV_U¬Ÿÿ¶ä$>‡‚Å5ææ(ÞÀ˜¶µC¡§ºu;$$Îà¨!rÿº;ª«Žæ“ÈÝ L-@|*šUr¢Œó¥õSmîHóhqâ÷Dßñߣ¯Z[S<’²ë§=/ J8ß-Åñ¬Ÿêen?Ãÿ”ì.æñ{úLÑ쩧2é¢"47…–ÍWNÍS"&ÙK:]úyÈÂ?Þ -úJ[”´ 9|Íí›>y~êHíAñQcÝòHïü¦§†¾ý¨áÙ”¢“R[Ž#yîïTK.EŒpEöå))”“B÷šý‚ŒŸrŽèâí壘Ç'iá¯xûˆàÏÊì7´…ù}W£Œ{Ö8g°Žó°yÍ‹þ[ÔN|$ðñhTÞ1€cþ®™ñugAscÕ ÒbÀ„z¹››¯™?a5Œ°¸Ò=TY¹dÏéo7±¬F×72‰tusà*5EO%§TuÌ— «»CSú!zzJLÔªñ†©+eµãoû&”ÓiŸù9{/^7`ë|>ëADE¶¥ÃZð½Vòõ`àùÒÕ^q–¢gƒ›§Õ<ŒÎ¥,Ê^aOõ1€_î¬ò¦a»Óöl0¸YÀÒ´+¼“ÀÞ:ü²Ô@1ªfÜWpKpÅUœ@Da³ª={ã¹ ½ó.¹è.©"Kæ$óO˜)`ÇÐÅq.˜k¿ª*Dà×à|i²EêBËàzwö¯ˆc)µÊvª¬ðèãò“{ÿ‘rÿÝÖ²ºyè©lHŽ¥{.À#ÍDwa-« (wžEûóV>çpIƒ"@æú‰CÂ( (#P`b«é'òrJ=®5I‚¼@ðÉ6üDI§Œ$I£èéR"˜;ÈÇG|ö”îU>q@Œ 5UMö.äj”sŠ©s*sÂŽÚ¬ ¨ú"¶‚õ"…Î×ÓÞ«¸dì{:{qà+; To@½ÆG¤†çŠ…3ž·w%ÄÆ#"¥¦×ûÎ7§Þß ck®sù pßéŸRþÆ$]aŽƒx#…ÞVÍ­©ÔÛë/ÄŸJ–ùäö­ F[âÃIå˜Ó¿¦Üè8 -ý„—~Òû?QuKªíäSv -ۣѻ5N¿¤\@Ú¦F3¶áKq1ô¼Œ–VW¦HÐö:b=ùã¡•½…ÚvùÊ –RÛÅÚb-¦˜s5:óîZl¿œg)ËétJÔ[|p—·;ö®\¯·øƒâdçk‰Ïµ DáÓòK71®aÊ÷*wêµ’ÇÚ®H*•ø܉~}ŸÕ¶ÂNêtÀk| \ù©0Ô×ùSµ‡zsص£öÐ'&æCÀ}ÿC>÷[8¿”Å¢„©’Û$öº|¥‚ÆÛ>‚ôYé¿TX®Žcç !{ŠÚΨ»ñ¸°…Eð@ɨ+ìŸàÎîbsÐ#h•Qe²ÆÌ¿Í -sLsî·Ò^íÑW›7=wìù~3dP0[Ɉ»_&Ï|×bÚøWIavâ#IO±ŠVƒ­®íø“DäÐ×¹²©ÒÝêQ=Y«6mæøß¡ª~±m)lÊ©ÕI;–"ù•W<{(S'2{‰—!šéÄãçûl5$šì‚õS3 o=ñDCN)û¼>ÁUòž#Ú@²ÒPñ£ØJ„Þ FÊ/,ß—xË +˜a7•Æ^ªw]–ô-Òµ!Ðè´ÎhÚ[J:”¿ˆë×+¦Ý Œ®ÐåÓ[YøŒ!pˆ ²ïéßνUjÛÕ3°8mèiá=Upž1ÿÕÍ>$?¨ÏýÓþY -ˆ¿åŒÎ%€Ór.ÁSÌÔ¿´Má ¨©3¾cø”W”på_<QCpª*$w árFß÷®²¸A—àœN£*¡‡zÒ™3øÎe¤”¨ËÅÏp3s‹³‚äx¾K·½ÀŠ"’H\ªÍdé -´„õPAªm…”é{T­¨c"áF*¢¥¯ Ï­!¬´8`E_%+˜gšþgÕeá9bŠ¥iéõ2Õå=üµ£™r—ä|PÆßböµ£UB$·AÀ$ô‰ñ– Gƒœ÷䩸z³1 RÊ ÀòSD}|Ñü0æ¤úZÒß5%ßyH-©IÑ!!8W’òsî¡#U|•->‹hjû»yÈÛç>Òß²ØËëè«BæƒÔMîT(^߬xf½ò0ÞïŒÙWsCɇ‘xì.—‡1ñÄ…Œ&=¼êŠQ51!ä~‘1Sor]0 w¹~Î5’«ôÄF´µç3‘ïM6#ÉÇá “Uôm¤„ŦãUþ¢Þ•ós®ç>‚„ÐVK]ÙN~²ÿ%«±¸Î;‰jD9ïE¿²^߈nhTpÜH“*B^ zBu‹9;˜Í½\Á®È4bõ¤ƒÒ€ú©¥lÒ}œÏ¿jR}{ë´`ø~m+çx÷%A¨aæžÕòQ¸ë¬Só^’)kcÃ5A¤ô\ädFô˜¬’7>qãQÀ©B?c’?>ËÅ(/š#ôkÏã»ðÑè ÌuwÅB®.)Ñ.mOš¼ì¨ÞÒÏ`{[ŒÒŒüŽõç¹*ÝfßÛ Dò)'m÷b/ߣì`Û‹‹òD}ýû@U°cÍwV$GU´¢Çã š<É…¿ ’sâ`Ê,ŶÀ»¿šÊÒÇøU¡#2|“lˆñl¿"à"™tæ¨s኎È?ÃÛ–žõI|ìHãQâˆévRªí*‹„9›@^jüÒæ©Þ+à¾1Ë`bXA¼ÖÝÔ¾ç2l#ë¼Ë"˜Aülf?{á5Fé^‡KÖJ„ûǬ>b²¶zâÀ6&Ö¹¬-FÛÄé7:ûæÆ ªe´:ÌÅUDמÓ\e«â±ó©¯÷@Åi¾H¿”¶ÞòÔwÔ"ˆ9Ÿ}³Ûi -_¨½tÑ} ¸½É5˜6GàG‚ ö(+™IК£Ö߶ ÑúÇ@ó2æÍÀÓsxéfþ€éyî¸}Y©h~ó£ž)TPS ùÛ>¹”¸.ŠlΞhþÎõ‰T£4q–¦õuDçz^w¥‹Gþ±V¯bù§2°<[D—<Ô™ ƒéL±GNŠ¹* '\å§ Ê®ÀiÉ‚„WfJp{±þ¬ÌÔ6Õ9+Ì=ç¼ÒAˆÔÆJ^#×¹=?«èSÙ›Àz¬ñiaw"Ÿê1Ó=ãgqW]zi‚2VÁ\ò5®÷óXÌdéqŽ Ýs0žUb¨!ó¼êiÜw[ô­ö'Ò3Ütlÿ‚½í±»¶oÕÑÓ9ÏÁ«•·a  ?Ñó¡@ä½ÏHO3W¹NÈÒ™åݱs:©óŒ^È<ÔÕô=„Qâí´mà‹Žc¬B³jãŒbá„®¥Íx8ã$ÑJÊÈ°÷—Nzp{&M-Œ'©Sd*æA.¹‹ gÞžc,,Vþ+¯êüp¬¾UE¯Êçü{H =Éõò„:*,>Õo¿ÓŠ‡©X—&'ÉÅšÄ+÷Ø ¦(–ɬA–mfÆpÑ}½Hâ…€õѵÿÅ_þMNv -Áó,¼²ì(YYh¿X±Î i„-³Õf­«Ò©Cy«$Ô˜9‘ñ–‹ -ÔUNÖUäC3øࢄÆPLÃMÆwU‘ÏÙÉeëLC>Ñ hˆ°¨4Î!»* ͉Þß¼›Ê“ŽYéÂÍ_ba0O•³ z´èK…8&rQ³î±Öë5„\€ºFI†°zBø: óL¢–Ȭù:K쎾µä!ò_û{Ð-ÄŠäO½¿¹6Á[èï½Jß~³ËÌ4¾o=RFðqç”êôãÁ•ý“ôQD#Uy -#Ž>?¨­`Þú—˜þsLèUàøÖ:ǽô)9\¤ÎùC;±ÇSó9dU¨=cÂ7Ž°§Ø)ZI=¨ŒP÷N1çz"ʧU%T‚Q̼¿A#°TÕU|É©ƺ+ÜAãî-~ãùˆ”Lž+‚îæ€ã<´!ò%Äp|AÖª`„í‡Jãž †t²Q‚¯Ø[/”ÂÑ›3áÿ­;•×w+ã¢Ýú ήhkx<Ë­÷/Îí3f }ã;¬ H?(›wXŠ7t œ¿3?)¾|à»Î.¹m¶ -ãŒí -ù`-¿ÈS´óí똴hO©´Ã;6“%÷À鬫T= ë_‰ôÆð¢Â†.׃÷2Bð^Z2Áíñ©ãÇÉñ]`{„Ƀv!a ­BpÉóÛBŸïK|ž ßdgëyF©z(ËykÁ§?HG†Ðzm±´Ú—ô°þ%\¯µê—HTQÞ›Ô]ƒ`÷øp¹agŽ1Ñ“â› 3=”ñ‚TûýÛ{K~ Þ¢a1lÅðg6pæˆÚáÉÏi«õ-ùýZöë翈r¡g -mcôhˆ½J[c9déiMâÝ ½+JÔåÎsÅÕfãamÁño°ÿçµÅ¥v~ó”$Wf@𼫡c@ïªÕù2JŠ‡rÜ™D'ŒÍ éõœü{ Íìy0À!S씆@¶ ¶Õ DÓ§â{©(øÔÎêgú²GíÌñaI¡þI•¨1±PÝÜV‡rVB&52@êsmG2Â@…9½ßð×– /*è½v†˜²c“BZ‘‰hŠÐOpõØê+DC½I—) ÿþK­æ¢ö$…sYŒð¯¡uCp§aÌ‹ºªÏ«Ž™^ãW “_üáÅOUó¼#ÏèQ¼D²”G,i`«R™Ã¼3­ifÁ}ÄéÂ9¤ž´ó@K€Á<×éQ9Ú°ß %UÔ`!ÜŒ -Þ+DƈÉhñ=Áå_yÐ#rkÌ ÷ç€MaŽí4¹¾‘ì咢È®‰–äëõ.Ì)ÄIb³?²ÀˆV#Šårà-‡‰ú<ôžK2–|¯_K}¸€~*WÚ3OlÛ0yÌùG¸Œ˜=ø¾Kßàf[^¿ãr®Œ-&//•É¨ða%¥*:/ƒ?K_FÜþÂÍÛ1]Û`×ú%ÕN’¥OtL\榒sD±ÍÎHù‹út°‚Ò—¹swBCÊ- -o‘*K›ÉÎ8·ÆGqc0¡Œ8ÙR¼Ñn ¿<*¡Ñ9 :•J*Fý¦Ž -èõÀþÀ6Ý„Ù𶫆\â+²ppÑßx«^öØ°6¡>A¾ò» -¾öO Šé Ž„‘Ðå»c7?`š?µÃ®_ü%…‡ßâÜÿΠ'ìËmi>CŒœA‘´+VZó0aKªÖ») =jäBï3èdèý° P‘zŸ÷ALÏH1+c1¡!Q0ŽÕ“ºd¶ékñ¦. ‘š¸…NɧÛRߌ(¸ÝFP»ÿy¥O´×°ZâDÉc>G¥ðÍÙöÔ‚JLdáßÄ@Õ»‡Ñêb ¬I¼žë—¶X6éÉ@ \¶M;n+^àxÂɇZ*à0Jl¶I^‘ð£žoÛŽÔWÑ[ú4œYRÉ’;ž!Šå…]7>3pXêãÁØýlZý+Iÿö©äò Åí¿¿žqOÕ‹ó u6?©4ÚÜ~Ÿ@$MHˆpo,ž§¿UÄ™@)Ç߯*âO©íĵhRÜ‹åÒù°†i1‡T÷Ñ1³Y—r7Mg‘˜WôüíÅIv¶ÈzËöâiá]—q[áÓ€7Yö´Ñï'Ñs_:âàÜO I桨$K²<; ýMh¸^ i ?« ³ßÉ¥ŸÖSí_Äì •Àÿ:æûª*Mˆœ Ÿ}®AU͘߹»¥ëEêxAd™×:‹gh 3®ÿP ¡0j£0~ºõ8þ?æî%I’»ìr> Q@Ð6»5j§Hµx»%R³/¬}`Á¼néþz$)|žS3UèÁùìO@…Ç|fq!V¾}éÈè(H‚?@ҀⅷXÌQ Àû‰ë•dú)×g䊖ö lÔc™6^xŠw´þÂÖ~B‹mR__F:¥q*>~*uÂíWn¾—l”Ͳǽ×_ö˜ŽÄ68‡ºA¤Ä•Í–øgÂý—¤üO C#Æ—Ø^tÔç«9‡ßŒÒ᧙#¸¡ Öw¤;Èxê²€Ù¤ª –S]c+ùã0ØHUoeÑ5ÏݵBwŸÑ@Û,Ù“7?‘>y¢Œ!õy é_†D¦#¨: Ùž"6ôÿ°©,A›Žžvªa®ïdÛ’¾ñ lšÛ=JÍqÒðG•ÌçT²+ãZ³QpXé¶ µìÛû÷q»OIˆ9i 49+ž›­\®ñ2nfO,¤cû¾ꔺlo¯îÙÜ{Eý9Š¿Ê²ÅX‡wRS:Ú#…Y|=´âç±mþ»²mˆáHÕýPb#ÀwÑé“Eöçë-°ûý1¯ i"…‘ÏÙïnZÂ3ÄQÀÚ€àW¶Èÿ¡=äRiæzSJ¿ßž“¬(àžß­V£¶*Ýw¥fg´Ñcü™œGÙë8ó™7è>ÖS%¥zp6ˆ½«Ö­Rœ¦Ÿ;–>I©ª•nNÞO¸õ/OçOÊkó6¸@‘(Ý -´±ÃRõc`såm,î] LöZƒ*Quý —CÎ8ƒèJ3ðìˆMZj¦rCiAmÕ‹}>1+ðYž†.¹ -ªÀ5òŸ×ˆÒŒ»m‘þðed.²½«@/s V/D-þÇ÷¿é·¤ê]U¸V`ê¥øÐzâ\õÒñ8Þ^i‘ëÂ٪爚·ç ôŽEû¼F± -СX¥Õ€°‰Ìfƒ ·ón‹\xDlöAr:´æÍ©ŒU£RM -sÇ“ÚïÌ -ÃÇ¡ˆŒ^ÎÈjá‘„:3ãòÒVqfõ({tÜg]<âQK¾müv©‡Á(Ë`BNó#4·¾º8R-ãök.¥m>?“–iiÔþÂÂaBÓê#5` }—b0ÅB°™¢ÜÀ—@÷%,ž…<JùÓ¦o¾ïç]B«fqÕn%Ð}0FÈÖëmä>“¯Þ?ÿ>ÝõŠÉ8öC2¿€ðn´¶ôÉ•o¾cô¹ÀñH}Ëñ»#m~Hapô -õfë?'dòШÄõ¤Ý½Çb:[…ø‚ ¶¤,˜KSg¦¨Ó8êË€VR({¢¿ÔCp$hOüzA€´n5ïü6ÁH—>q?kª>Ò©çÏ}ôu)úÈ,ÊcǺ®§G‹ü°ô†Ó7›mÏësˆKw»­º^ßܽ×Û<7¶^.ÖÆKì$àpLÍVŒ ¬g*6BtšH¸Â7³%ö¤huü]ùëãÌ€µì\y!%ÃÑضiþ¦Â[Z/Ol b"¢GÀboqxãÝl/]IƒŒlÉ1Ìt8ýPŸíKWÒdœr_Çeœ<†eä[*¤˜¼!ƒÉ™ƒÓ~3•^:¤àîXþmEÐøæW½œêQ½ÎÚ?–Û ²ɧ3nÍÉŸÈyW¢×LÀ‚íuT‰ßšÄû'BÁökG…GrNbtÆÐ3Rq=Ô—FS±ï¹ûðÇÐx²íRÀëqhœ[ËùsFÏlc–£N(ס¹lQWþL$oVcD‰!mSÀœå+ ¿Kùo–wd.È!:TeõOð;—âäælWf ¬®|¡CKÝÚ·ð– -f×,*fÏ]–jˆ%Ôo×µ¤e|§ËUV€=o•ŒÐÒP(=Kº®“I&¾2^\K:Ùú» ;#Çg aï^_ÆÏäç˜DHo·À0èlWºnE9|¦.º~GÄ—ö`uÿzDlçˆåå^ð tÚ³f`uN§³lŠµy[ʶè¤ôD……î@R1&jj¹©ŒD*]SPñ“aÇ´Q²äo¨‹ü£w¥Û/2°ÕSy¡ql!¶^1‰‘MÒ‰Å=¨)_…­¨/-Û¹½<JÄÊì…¤qˆº?9 ј,¡J6!{ -½Åk>ogÜl0è -F‡Õ‚ 4Úd£÷R–kNXyÔõ•êan^¶npúáwG+@ã¿­ ]¬Ñ¹o«4¿ä«Õ·õs®-Xët碨žU†uÀUTÁóñ•÷Òývâ~1¨C -û&~ŸåbTFLƒŽÁ_¡ìX_ ?{aRØ-ƒòˆ;ÛË·òK"ÓAB‹©Ô}†˜yÆCéeÃsî­çKŽÅFæ.áïx;×ó[â Ú®<2ÙÐÉ•Få.UêRÇ£†S¥ùžã*å—;›Ì°w«69éû¡%‰«|c"šàQS;ëÞÜ”öäp!]|û^ ÁC‡¹ƒä_ép?A ÿ0'`ÑLé ÆbC(<!rû‘M—zë‰áòd²Ã®NwdYÍ\vþÜ *r-rf3 7½‹@W¡Z‡‰t‘© D3pä¹3L> -’?¤;óu¢„EŽåæü¡ûØóšÔÝYN&/Y4òGâËXKÄj茒…„dq*D¿<:´v\󴹋Dx‚ª‚øD_:†ym—)¡ÿþàþnFPd3kjظ`še£7ðH›g1Ô_5êãM†xbòûõAò®ÄS‡ÉÌ»€#6_cð1â› ÷ÕZÍõü ƒÄœ¿Ûæ_4dœIý(K¼Œ¡üáVXÊTÚ_º«`½¿%8fó¸À|ÝKýÿ™ñ]ã•{?ø'ɲ#R¤,I‚Ìûf,2ãÐF†Œ4CL•%¿çhǤ Dðæyv†ú{t²kщ ÙæLøï œ1ìüš5–ôÝgÉ5œšf&{ÄNÀÝ‘dé•r+ˆó<Œ4¢¯Â÷KB{Ôàϸ˜çS̈¶ÂÿÖí1꾑¯»Ä<ˆÕ¹1!y•IT—ž|ŽÙ15Ô£èU²féÉÔ²›*pzEKR% -Ž´ÓUp®ür"¶´ÕÌ›wU_u>_ˆéQOB\¥Ó«$š=¸MóÇv×owq³0ÅÇK]J -,z< 1‹孵ªeãEa, -‹„òuoæÝzå“ qαY$HOhùÕ¬‰¤…6|«Zó}L¸³©^©t7E„’àqÚ@XA×%䇙ƒïVðíF"š¡¶ø9zc]ièp×1fR¡¡Né;ÝWÂñØFÔN[P4ƹšŸIb@ËÐQKãš[ã¿és†/oþ”6–ÉI$£< ˨ -ê«Dãw3R–ÆëÿF‡ó­÷¼}ÖÊø¼Dÿ³ül{lQŠ¶¦ÄñB{úŸ7Íe¡%‘¯­E¶°F­ø™¢à1¸Pü\ι¬éoÑgÌN÷)•Ä¼[IÍ"àCþê_ÿHý“^ÃØh@7"¿Z*óHºàÝÕÄž é|Œ;¶x…˜öÐïTmîˆØC܆ÞBSå%ÓÿÄÿŒeð“ùŽE«÷«„¹êRûˆþöVˆ÷†`uPFŒ÷ÿ½ ‚5xéâOìc¯ÇO§wcKŽE^ᶰ`{oíxÓý}Þ#N£8WÝ×"Àª;»açó;ƒÞxð6L^-é $º2(–ÒÑGyˆ!¶ëϩᔌM–|_—‹ëªEw\—7¦Œ=©áu')¦‰p—`˜ü»¥BayA‰HdÊð|­èA2± ¸_’€ˆóè\bÑ ð¥–úð6?¥5ŽˆØÈvq;7d®;zb~à ÞÞ"fœ”—Áí~/AÀg„¯¯¿T·7¬î'<Ù;׉Ù_T²¿ªï‚y@hÿ¹îzH/G2nzÐsWÞ8ˆ[‚Ü‘²K«¯ ïéñ¤E‡p1¨÷Dòˆ÷sWõÝ¢ìÝuîgªeN°‡a^@PÌŠ'0¥‰í©Kà òXaÂf<‚dœ’8 †h…¶ oNÛqš³x‘ŽÏg‡‰¾ú[( f1WÂàe¸Ó‘lË·<±ç+9£r‰w ׸ f—QÂ"¶¢,DÈx‰w!âç5ý|Nÿ}C´cé:ê€ÞFÎïö+ ØKë{+}/Æ!Ù³{´|›V,ˆz×ʼä£3Λ/œõ9\¹øåj¹'Åc{K…:Š˜¾ñ–Y´á}òœjwe„ʸ[ïšmÛÊídzreoÌ´‚öºwÔ2˜Öx‰ÿÐmû›¾ê¶ý `Ü<2æ*×]Àxú.n"]àøj)×dkÙgJÛù¡d(GîZq!É¥úÿƽü‹¥œ.¦ç®ñZThÇY·Îè\ -qo#vFd¸†eà™KAžõŸƒqO†ÓƸ'L¹q^ôÖ˜;Š…Öa@2Ì$¡6bÈQâÇçÓýx£rïj±Kb[ꮼ$ÆñÂä’¹õ:!.j)i!QÂÕrM—‡„È=õZ²%C8¥·y$«Õ]׊yÁòÀ@¢HׄX97Fló„ß_(A:) #g®¥Oê\ßx'£j†Ð s%ÁÐNoKOH5P=¨Õqª×¢\Á ùxC¤]*Þ^¿ –㎢ê³t‰ Û.ÊúÕŽqu‚ìZ¡HüÆǵNÊnà`=ZÔ2פ=š6ZJsäŒ+ºùM„(“°{?rƒ%¥t—D;6quœUçË*Œ¿ó¿ò’›‰ãBôBÛ‚ýÜšé-,m¾–¼!"A  !øÚY¤UßuÃü½R'¶j†ª<5Œ®g« 3ž°‹µ_à#žÇÜÿã-ÞÿJx'È;ÃV+-™ˆ|…!5ßo“Ísß|¼Ž}œËCcÆ~¢?†ÉÏöf¤ej<$À5ZÍêqàQò¤@F¶Õ±üÀî7·€¯oßÁ¥Têü$'^?°†êq”ñا²‹OÜVÄßµ(ž‰dgæãþ°D[y„ÃN-iÅ=%¸9Ó~Ú+TÜ@9·bpõ¤¿¡ìØ4Ê,ÂúP“«*„’_w«ÇžKõUjû½–€ŠF‰>u\¶ð¥zù½f‰œC®œ!€_pD ú^#ûŽÖÏ9¿™àÃù8¶´%ƒ¶7ÕxKŽ2É…ú÷£æ·»wEŒÅ~6Tºë©¢:‹8*ÁÂK°­Ø#†õÐP^\ 9Yð0ð¤~WÇñiC k¥ -U¬j¡Œš)·þ€¬ð–5€ þø–ÜÇW2TÛ›êèç5«Ã.Uhñ/Œô ½¦ÝýQ†L¡9 [²ÝèWk BÙ¼ó‰…KÁƒéöø‰îñò$'¦”%ßð²bF…ÙØð™K s¦µR”msfÜ­ú*·¡[1øn3ÃT ‡ïLÙDŸ%žë·?É×_7@¡)s`ªÜœÁu€Žj°@sé÷\‚E@!ùzW~y£€`Ô(h¦a3Œè‘©Vª On–lãH+hT¨üÉØ;²F™[ò¯K߆”z'\KÇo”®Ñ<äôÑSZ]ꀂ$W1³ÛïÔ‹Ç=tƒH Œý.…¶„ã~•ëjT¸'·ŽÙ0ò£àÀü;¨‰Ï]àïÿòRºu¢›í5–(iÁè6õ¥tË,ªéF$Ä¢›&7t‡ÚRWrïfǪPõÀ7Ø1'–(Ÿ¼ˆ1¢”±’ÿŠÒa8qYt‡@¸ [ƒQ1iíÊ|°‰Y(€àÖ’S©GnƒÄ] Ûî -N½Ç¥ôM³/NXçx±õuSjPùœ-ôÁ ÁøŠªnF´DâVœÚ¼ø]ó0ºZ–°f{’{XÎ÷¼8JŸk]ê¬){¸nò@¡7%ù’oCü¹¡Ù¾–mpfšY¾ß<ªÿM¾ Žô4 Y²óÀ´žQ‡‘wGâCu¦¼i«QÄã HiÁW¯‹Õƒ6z S;UüäŠýµD/üÜ"žv 8²=-Šƒ)mA"hÓéŒ÷:›ÌWñ¹”È4F×o ï–ò§v àkïû_µn>ÇxuÂOQ’¶ÇÆ_LžÓçOBåæ“O—¢—×@ï­»½½É¾oŸrÛd%ãÔž”pKK³±ÌìwpWi 8‹ç‹|…À´É[;„ÝüÔÍ[Udǯ¾rй„ Ðɵ{;C™d:B/ªÔ}ÙÛ.ƒÌ->ä%¬yª>!g§MŽ”*ú£yDÎ -Ò¹³ :1¡^ÒŸvÌl£–hà!=RZçR0Ûô”YÖŠ=Êüä±£ç•àç;fÔÜBû醄[%•iëA>s'¨¥uåûЄuãH÷L³~¡“†”7N‹x`õè$AqÞheîõ³‰ðÝ-Šléåvb«=†žÆfµä6ny"×sçJa›Ae¦‚_‹p­ÙÂåRæôèÁq÷õeæ_d½3)K曤R¸KuÇïΠø¦¶ØêGü9ª£ZÑëö¥Kú?Ÿc^Û~דqü½¨^õ,éÌvòÉ·¹åf—Æqí¼5oµÈt·E{vcàþ¸ Q”ºë§ëûõ°$TבӣŒ‚ÜxDü¬³Ô0wÎê[yTUŸÌìc†oþ+ÿ€«äô¾]"c_ð|Qì†Óö±à¸Å×K—b~o¸ÈG·E±wÀ7Òö¢)þÏ×nZ`¨‰az?o -1Ç‹tn5[Ä{“®F‰|VzF–\¿¢>(¹Fs"©"åÝ4pyµƒ]HSn>ñç^‹Î Èu¹fÐì"|CBæY;ˆ-H’™*>éÊ HùÙg²@¬tÊ¡ ºŠáZ5„‹>¼ÿÁ©=¢t¡wÄõ¦Ç‡}±ÊH»ªËÁJI(gv§¯:üQû\É\ÆÕI»úå<&vÈ“$p”KUÈÚ<<)ïQ#4Ã'•H»{–96züÙ(23I•þžæ}´úPîZ¤ -•ó©§ÃIž5 ßX4§½–\wÝx‚wÑ–G ¯O܉¬q5,*ÙoDƒmK¸žà"ô¦k1¨ðßÜïy–—X‡›ï€\ð’M<ƒ—*‡š ]Ó¶V3û¸òF1«bÅK+&<르5ÎTyÞw8§°sð‘e€b?òþ3¿ÇüÐ -®¿&#lÀãw×íÓ¯p/ž ;*¾ƒs»nqO°+9ÄýÈÏ6‚ˆ\(÷õ¹ù¢Á‹€…ƒõ¼n¥ Äߣ^†4öññ¨{žÂîýߘ·§^¨8g?æLüFqxE‚zXrÄ‘]Î&v˜ì$P`Fà½Ì„€ìv¬§%‹Ó´ŸÃY!°û|Ñõ*°q‹9ÞiB¹'Ñ Í…†¾eÏ9ß鳂LZÃá–sØë¶B–! „Ñ,U…‰-­·Lš"GÞÆl@G %m… Ü -}‘^éÑù=ÞãÀœ‰6Îñ—vF-áCÕ¸ lŽ2"´-WÖü¹+ÈŽ´y´ïBËÐYÐcˆ–Éy¬ÌD -@ü5-¾TûBߢùœÎ%_k¤òm~óÊÃP è -dò0ídäâë^y˜&üFˆçú­YzÇb¯Çìþk…ÒH–Ê"¨ªSûy~¯ù¾Þ°y&vÏ’‹…}uœÍ½yÖ"ŒÎÍÈ–íkÎ%ßpè¬õõ9LžbÕ -›+éaT…I(ÜÎz²èxeFv1ÑdƒJSXü“drqéùp -a0k…_0ú¦a†d9ÜίöX’D¿¦—Šx5¥—LG 糧uvb„9F;å”Ä—;W2ä) Q…6ÖhçÍÅ9jÀä1]鲞•«œÈÏ”Z©_Ø(º‰|½ýi;â‘uÄÀ²“H÷ï Ýá#ŽÓaþÖaÉ«ûmÁsåN™½Å8QJ†£YÂNŽÃ‚CTG³:4kÇ4À&o¢e§ ,qJ8g±GÈF˜—Òâ¥es=ñÓH%eó ƒÿA=-ƒa¦ @‰Cæ§Áõ/þ3àùTé€vZ¾æªíp˜èc‚ÍEüOöüȃfd8e^‡OâÊ:o+e³¹! Ó²µMÆEÔö¤yõ4&/OH¾–ÌÄ~F,¸!ú©¼dÈ£Æzã÷ÈœGŠ3ôñYêÝ$9òž°ÚYŠ—k«z2=Œ²kÎH…)½Na)_ø–¥Î°gI‚d WH4ê%]Ó÷;VBdXf‡RÔu©“¼6&øÎòû®lçt£â¬wz -$^ê' X‘ö™”Kon‰™ÒéâU…ОÛs Øc¦È”¿^»O7˧¶9â±,ïÑä‚‘»CÚæ@ŽôÎIµžh‘ûITËK¹ÿŠÈ£‘ô¡Xe«x݈T¤Yd:5c!0æH¹’Úï{í+ˆ˜$¶ØÊWA³Å_?ïÚkE±çÈ.Çj›[õ‰ C]‰2úŽŸ†kRöZB[&RDݨþ2”u¡ýÄ$\†5~´ rã~ý?»­§ûDÊT¥¨#×HÏ‘Ã9br<ƒµ<ƪGyK«76D^¶¼¸R¶ü×k p”Ò—í³×kmµGÂL 4¯ˆ’&ã“ž| …j>Gšó4Ëו†T‹PD×{ÒþÁ±Òè+Ñ7–ö3»ãŠÖ{2w®VWØP.¥†¼4ù¯”]·2gÌyiÍCËW6€óù¥ ûZ:é$g䌋ü¦ÌQ+x©°„¸ô¨jÈÛ8ü©í½œYT‚A•n!å±´ç=Õax†ãø†}Xñ t -¶-™|œA舰"ƒÎrxjIx¦Ç¤ ‰*!ISæ«EóµÞû/öœ({·¶L3} ´ rUñeéꇖOÙËÑFé›uââí]%jkѯ•hÎùœêË•J6_ -szmy{–7!åR(zKÔS:ž}Ý?çÛÁ;(“çÜÀAmdÚ¢GD]'7ÙŒ†Í…t'ÝÔ+ -®zy[‰ÊÜWØX2£ò›Äg¶›|Å]{mê’ÿ01côÍNþŸpœ·…ÚÜv—6ñ‡‡óî¯R8rw†ÚégØëÎþùP¸N†ö^ïî¼K<.æ¢rÆ:ý-ø¢„ "–—¡ä›È¸Ñ-<ÛY±¦‰+H”¥ ÄT±?ëÀíP‘d8°éÃDFB@J‰a~,•‚ÑbßË F'„Øß[ +XE= -‰ÑÔ¹Ÿ«„£=¨ï,ï»Ì1úï ˆŸÇh¶j{·;ÉäaàL^w‹éP’É= ¢+œÏZ>e˜Íøó#bð´¨aƒžj3Ñ2œ¥¢Ú¯Ðf„$a œ±2"&‡Ã<˜¾äûç›' ˆB€¯¬=Óq3=#8åD -‡¸Ž%ªè~þÊdzИœßž½q®§åÈÛn5ù»ÒIFÏüø›ÙàGB{®œVò);ÚdÊ«ž'•ôSÂîùÂ7mPb ûjÁSí¯z 5v?¡hÉ,á^<©f™£Ö§ì¨=ö'o$s¦ú\2¶+ål < · ˆÍÅNW©Õö~ ~½‘"{²É0Æ7ÀMwožVú# úy®¹‚xßÙñÊnY*È$Õó£xpѾ£¿ß#Ÿ=3ë;Â}íL[ˆ\(çºͽàh¹f#Tœd?É'‹@›CÔ¶'ôÓè‹Ï× ³ëÕyˆýêQèÕ',Z#Lu§”öÉœy+nfvbg”eë©/L…BçìNMŠ ÄýŽ z…úŒÐê Y‡\d6QȽ¡³NÿôTÈ;qMNTë¯WóѱÁAlŽC‰#JL}5\<„ LQÇJ{‚4€Ò{µ‰µ¹H‹Qã•ô‰¦MðiÈç´hÅ9²ö,1ŸÑÇ>õb>W3ݨ+ͼ‘¸ÅëzÖ\Iä0 åÄÅ”HÔëè9Ø"ù -ê8~:íât!Ï•§rç‰c稸EqZqÈŠ¤Ä¾Æ›BóxCóOR21Áù®0ÿ+ãÆhR-¶”Uß_øD´ÑVDq`~ÈBo™K¼nˆ˜róçÍ>Î ¤*ÊZ“f¼V¹Ac®+t ü¹¼¡ãŒq,‰¶ØBe6„³hj¬rDåÁ•BE.óº‡ò2 |êÚÙâ-U½+Jò‚X^•,pß>µ…¾MÏgvø]™ÖôK…•ä!¿tÜÝÏëJö¨yòÈ…XGõ¢å ²¢Ãð„+~ä'IZ;˜Ê•´Ysó@=¸X_'¢lqðxQ˜¯V¥Õͯw»C;!.ö1ù­ÎR¤»Àx´jtMÃ9tèDô+-,Žh‚Ϋ— ¡uúØ©Wó î¼k˜H¢jMátí,³YÖò_ûQSèEÁ÷]ŒK0…0ïÔpžµÞ­ØF•(Ì3EÀ¾~õS÷P`кè/ý,ðqŒã1U]‘áŸØÚÂ{Šm ‰è§%¯ÔÍ8ƒzN¢›V·†‰ÍË£Óõ”C€ÿW$vÊó}¬¡ŠDˆ"´pFÊLéW–_²–‡ DPð^êÌpK«G“#Ô`z·\*î-3}ˆr«ED¸x†D Æ‹4_=Ú»qÃGƈéxΟÒÓHž!ú)îO}Š †®‡øÙ© -"‰^íúUL -7˜Àl$¬ŒfH  $;`Ô|V×N³Ðìµd£¨u÷þjÑÆ›ù– ¤M£ŠÁ³×RŠ£WDbœfóÔùMOa$Iõƒ3ª8wHѧ9ùÖ -IÛ£Y¾:ùøÄ£c¾¡Q2–Ì<_2:Þÿð¦ØéÜèf@æG,0ЗpUæ×ù«hq¡òÐÊ3¾ôÂÍulÄ•›jäPf1ŠGÄ ÿÞLHü'ÉÖVÑàSz¡›nE¿ qîôj•tBF r—0çG¦(æ)HÌGð²®4¿æ 1ódíCdœ3·Àñìõ‹¢wÂýt{ÚŠƒR|¹"óG“·û²3¹2¤kô†4RpT\Áœ#s5XÌFDÎ7|M.áÐ×Û]Ó9H°ÝW‹ŠþðQ4ÉåwZÁäeшIúLç/ÿﮑ½žé³n±¹3¢: -ñ‡òšyzVÖpÅ »Ÿ1¥†íû~®ìë‚f æF–¨"MY…üÞû{ A†øÙ“~½in~ñ—wÞê†ópQ†ÏÒãv8m5ÔØÒúÈ“‹†Ãa¦'ªˆO†´‡ˆÏí F´0¨“)®ñ-ýB0b<«:¢„&B)•áÏ7G‹VÆ°UÃÌ ™4¶ˆí÷˜Ìé%Q¨CÌ÷%­‚mk£Yðe¡§¢F0DÚõÎ ¿*Œ}.ˆë8¢ò,9ŒZ‚‘éw²‘îÕî¬ñDï»Ð¤UF"†O>è’uö¤3:<Øø#R‡ûϦ=å z¸µ‚ÐF’Ή« é]ËACma»½$gaØl~A£ûò/_‰·cŽÂÏ_ð¼~ɼËêñ£s½.üzW;³ôÚë ÐÌ3ÂÚ,ÎH&àîþ‘ÌVY¹ ²ø'?FÆ´tuÈ¢ŒÂ8Ì{/Öl´P[\Iåð~…P¡F}Ò÷|ߧ¬ÎZAx^šº‘Õ{%¿P=A’T'wnLnZ½1 õ5Ò}4jp-Ùœ% ߸ýþàþÕc†ˆ5‘çôRœññHôz1ܬyô„“ÁìüBˈ±ÎõV¤}ñ—•·ÊI玥VIsG²B{Q‰ŸÃ’•Û_yÉ8:0Ï£`oú&- Vò$õ:ÛBñÂÜøK„ØUfÕàúácNéë1Ð÷™Ë€§Ì;´‹ü“² -uÏMÐåÊx÷ -K|Ķ>幸‹|.åAC ŽóªA§Kñ?îe¬|·‡Œ¡ï°f¼À¸T]žMH~g+ SzHXí8gìÁqÇzqFbÏÙ©w ãnPlŠ (·‘‡ª ‡%¤6á3ôUÏê öX#¢;Ô”g.Ñ!$?¢^f$H€â‚ì‰{„E#BË:æpóYnú/wä²ÇZ‚>L: ÇÄ,¿‘3®=g®ùÌ|É/ÇDõ!Íœ´¿%$ã+T'™¢ÞR•»êþ¾¯Óõô";>~°÷^¡0¶Ú»ìè)'xwÊ2~ …°vXvGÇÏ~èÁ®}‹˜þ+‹B˜Ê\.!AëòˆD¹Éj°ÙóuŸí…DŸñ+ÖfsÛk†,ø©xµšFnP‹òBÀËí4%Ï8S»”Ô2¦ iPÍKi÷=¡ËLçb¾Vvtœ~ÿ¢Ž°dÈ«< ”00Q±´Ž½|ÞuÓ.p†ö±€ûP WhœzYÀWN<£ª6 ©ƒÄßѹÇ`$ÉìWљà^ -CM׀ꄒt&ŸJ©ÎL¯û %Áьս¿0ùÔ“ù¬)^ú¶ré£?h—lFäûY,Œ[Ôðýv9WmÇIOê*ÈÆYɹía„€Cº)ã^rß}M»¡{"¾ݾ8 q•[,uÃpÝŽ—;Ù‚£ñßäN\ -óZèÏQΫa3ü0íO{š(ë“Nê\„æ·¿ê¥ôµyù‰• ü¾¨°#¯tm©°GFúˆÁö^â2²jÔ.Εç½sø¿Paãðÿ …›ƒ%z•£2aÐ×xÈ[Vì`*sgz¯ùO_0 -LÛñPXqm’ØkÍ8¶'~+¤>ŠdrSBGú1ëcÌ^·Ø)öBÐÁD €Ñ¦‚©i ¬Â¥æÍ &th¦oÐÇs¥ ‹Ôëíë #¹ÌvTÀCV+êÌ©«Ïø Õ¯èHWðÛ*JìK÷÷Ta´+9±™/ž/Ý’tÃR¹Éäݘǿ(8q"ì¼ 9¨w‡"wÅóÇx>T÷DÜÕ:ÀZØÉ2 Ĥ®O¡rkƒKy>pÊe:Þ·­šÙva-7±5+ ÇosÖ«¸:'{xr•3<ŽrM½Ùñ5Ø8I˜Eþ'”ÂUÀOBQ3Ÿšó¨%Š_S˜nîÆ€ ÄX¤×§¨ÑI_ë%ÅêA0ã:ò•*]¤žº•úX<utSÍÿ$ïè8"][÷²°·ÛJ‘•Ï{¿ÚâØ쨊Àz…žJªíToÁt†ù*Á­æ’›E/&=Ù¢ó"âb>?¨×&gª§­ô´‹ä¼”}ñcä—O€vw4¸À3– ¯G}c•)ÏM/î›üw»G–†ò¼×I09ØÌúéö_1ÙhÚ’´.Òîÿs»û³Lãç5yù¾ ü•E{¸~æd -Vå-—Ù3^ ì?kd¦[@ð ¤áe(òšu Aµþ´E‹2ëèÚtå÷8Š/¹Ç¶¡ØJ”ïç;Òâqå‘͇H­¢¹r]CÍzk¥<8Ax¯ ¨œ‡VyœÐþ• %µ²–u%;åLKÙOÒ9Bõ:#á°~S¼Mçž -}ÑÁ`*îÂ,|Aš_Z&8Ì‹vDmL+âhµ9ÎÒ?÷X÷Nê!j€üƒç´öÔíªÊ;œL””ˆœ­ou«—t«jÿ¼þãý9Àǃ…1 “g¢ôÞgÞ1çQ®9$âÁÂ9P¤ Å6VBŸ7%ýnM;…KMåFìXÄ™\ñ4‰mLꔑn5÷¸Ÿ1 -ïÄÒOëožˆF¸{¢| 6Ð&¯§fln¥€z}¥—ûYÁýý/ק°m¼ùÉJÀæ½ÀA¨ÆÑå‰åd+*r8h{Ù L3Ö¨éؼ%jpCó9‚‚¸"¤Üxd‰©´›ºIhþ)3ö„T2ÉËÝG·¡>D:ÅÈ=]¼tÜò1ÚRü[VÜ´—ç)íÞ=ÓÎD $!KÙ8ï ¼ÿÇrùÞºï Eÿ‰rK1‚¤#¬Z6”üþˆ­ip½k…±ÊtN¯cq3ÃÿH¸J{m¨' ªMÖ>ê7ñvÓèºf–KÀ+ÇÔñBüÒmšA‡2oI`¤òŠ±çè6˜ÊµÌ!]IYP" ¥¿ØgésÒŠÕ†ÈÊ õïŘ³E@é?Øk¬7O]ÕÑ#ã¿»Üîè~lj§wy¥/¶Ä¶Þ ˆÿn×gÐH$¿ÀÅ칧×C=ÃÌC(#¡lT\(±‰µ’èhQ· :£*Jœ{z» PªÍQ2HgˆbÞ?BöUÍǼD²­öŠZÞl/bU®´ÅZéÌ /‰´ þÄâÇiöì9ñ{Ñ·h£D\ý,;<2úòË‘KQî SÐÈ{ÉnéO"è||ž1=«í\[Õ׳+Ûñ;žëgTAË),2†Q)Ü#ñi ÝZâ}î|-K¾bÀ8êÝ溒“½µW” hÉéa.D8ÊNˆº#û–ƒAUbð}ÕÔŠpb5S˜ßlŠlÕš¦¦¡Aºã#ã·OÞSÏ»çü»÷ùÌ(awÎØJ êÂ`Þ-i%psm^¯s´J(£Hä˜Ú,àŒ\¯Cߣ„Ú´£ƒ‹ˆ÷,ÚC_'d‘̺'ò] -‰ñ•ßž%¬¬½LP.43ç˜+¢]G/w='*ÁzKPü»æW—MÄî Õz‹ëPò.ÀI›a¢=0î_dS™$Õèö†R¹’çQ^kžHõgßë ßþ3\ÜÃKËÇDªZ=lNêð¥Þ¿¥¢õÌLt‘ ‹®y°%úË¢'ú5~Wk!AlP7ðãWìüÂî4€dÃgè7c«T€ç‡ C>ŠÚ0ÉT4àÕ¨:Ñã¾T/zúEŸƒ ¥+“!Š^_&(µù˜okCùNîŒÔ˜Q°ñîü±ë7 .mªþš¬E|$‘ƒaò1ïxÀ#êèt ‚qŽ U½tÚ¨ñ¹œÆñKß4ÆAUîk‘Hè °s -‰ëŸ˜‰7uú °µl€[øfWÚ†¢ÞˆrÊúbjoÿùY½XÖé'c¤…—öYY·A|j`„ÓBêQ*¿•T SŒAÿpÆácT>£Â0:3„g9­^ñT{F¾pŠ¸ØEÛ%¯Ää@&P4nq2Þƒ0Üç‡Ü‚ f -¨Ã|£B)܃/îD€Êô55 ™ñ€¨ÒoóÊŒo)t6Lº=¶l½X†ˆ3DÝŠxñvþŠ„™‰Å³6çØ’2"cÏ%Š^X¼t˜„˘ÿŠ´áß.7 UæT§‚bæ3›l¦ÅAT -œYmOA$VÄ^óu9Iþ£A -çÔ3Z€Ê7ÅñÏ•Œª ÿr gGgpÊÞ1¦((ÚOs7q‹3)«o,È!¥ìa^1mˆâܯ™ÌÍw½Š·ó( ÷PÿàHsåÛÒÌNÝ£˜µ²&ÙÓÞ"¢]šWJ’ó£múíÌvD.¥ÏÓy,°?×Îh=“¶*àZ<.Ú`ÇÉ,}HSxe¶aðÇIÇYƒ´ƒDháVã¾ @îO…$r1qñ},ª#M?â$Èœv^,‹gg ûgÍÕ×pxë³.… OÛÖ9Z=ùÒüXp’|å(DÓ4æ+tõB*½ƒùaÉ5”ªl¿SóŸÑÇ̈š׫ ¿ l÷¨4ŧëWÁ$@õŒ-fsÉð\f€@?V¯žÑÁAÑjèª{–8j†¦ßŒ.eg {%hE\öÌ"J#ª¥3bxͤ©f$¬ÐW¥ú&Ê"@óêq{©†MóÜ[­ÕX|Fó¦1_KnƒY4S»]+’j/_æ-¬è¸õeÉE«¸HÆOÕ7^FO–¤ÐÖÂÌ•hï ^}é\Iøw0¹‡|’ãpè‹Cmq2u‡Ó3ã®tj1}Ÿ4/8ÍŒÝ 9Œ­ÈþCWé!ÐŽñ9fâN ñwagÈ·éÜ0–¥Fÿ6àùâ/ï]ÃäŒ%g¤ˆÀ'ò$m*÷¼# -BD†…ž®–—iÖHÝ ùç;ŽÞ§”¢Ø>çA²`N‡,'þãJN›dt2ÕÌ•¢÷c~~)éÂ$Ù€ è:H"‚K$+#”Õ^hÒ+'á-?¸4Å3CpG IXîøðmKkØ\eTu°Ä}f5Åž¹Ó¶¾1‘:ýÀyö„M¼×HžìÉ’ÛG¼ÍF߉æüQ—‹æ †Þ“þZôÚˆèÒ«é|¿÷Ãûì©v¿«é~ìr—»t„5sKƒÅD@‚Ñꀗ̽Òs´àkç¤M¤Š"j~Ɖôª­1C%B$îsûÓ¹˜ÿ*}QÀ†Ò9 È–ä Ì&NC^u(<6©þb«çN¾1©vx4 -ΑÙÝç¨ÿ†2°žY¨éâÅ®‚æÎÇÿ•Ek̼´«3ÀϘ7BT¶!lßÜãôvò«†¶ÚNé}GhÙ0'ÐÕ«º‘~•¸QÄ"(C/EÉ6ͽŠ ^£'ræs”;„{·½Ú\„©èδÒýd7ÉÂun/F!Ú3±ØSd˜w<ÏY¸oyJÿ±? Òð§ê„E>šÄîñ¬wˆS¬ºZQ¦f¥$~tÊ™¿TõWèˆoœôêS~<§z®OƒÕcavTnŸáæŠÛÛj>þ¯X -9¤¥/”Zo4Ù>è6€’z'æ”°F÷¤¦ Mt¦]:²s«Þôj!ûûúMF·Ä„²„jÁgž{gÉ®Áè>DE|Çí -k5x–u)@šYRÖb—Iw8¿BåsrYÌÕ;> -7Óa+’£g %J’h1õÇT¢ÁîaèüÏ¥¶qÅd›µ;»õù2 äÔçÐÛS5Œð;5‡/p\–h*”^n«dÇܢ$―´\ô~V#ÿD&=mž« - ¼²"{Ôª­=Ak+¢0j·VÂ!ô\™MÐÓaÑ°®šáŽ.avè¹f=_mê;>¿d)jŠ Á#ŸCw6G&[{‚¶ ½ -³¶?·×ÿçõ]yÐ&—WúŠ9Oâ%yûrÿQF=$YŒœƒA Pu̼ðøÚâ5ŠæÿÞa÷—·çyÄqbWä!L¡#{'ý óà>ãÜ£ƒ0Zˆ=e‰?Ü{bÌ 9rb¼òXšºùlW[îýÓ’![Ò@˜ó¥â=¯H:(³„ÿIè D?×¾DãL§³}üOÔ 0(>(Ö<|øxAˆë¤Íe*;·OK% ¬r‚ q?±Ívm”][êL_v ʉb²Í@@"h¥Uù¼’n ñŒ'ŸC¶Ï¯ÌØ$Yâ¸R³Äü௭@{sä;¹,ƒ”‰ãrÄ‘¤±¡ãG¹ÕG—Á[ùg½»e½ÿåï,÷l…oïÇ_¹õp¿kÌH^9§éu4«NvÀ9q1Éèæ5c#Ûõ‰*{ùîßùŠvë¨óf.Â:1[BÖÇD(ßXù¬ 5Ÿ7]Kǹ^TÆ3&Â-Kä2Ý{š²-{ù·qˆ=gŠ²º|ÛUg4 -m²‘Gh悆<£R,låùJÑH¨ýl7eœ6¼ÆˆÞÄšâè°ñ¡|œ’À:þÒ⤄•º‰¦Šìê;¾Ê,úzG\ŸªŸþ°Ä·ý¢¿2íÇQiqÖU„vùÄ 2Ф4<`¼]”¨ÕáÜ:]QŠ~ ä@é] Š³œ‚8Xå‘¡[߉Ðò ¬ÇUÀµàK­±¥5‡+ÃO‚Ä{Š0ÐÑÓ’#aˆI°¥åi–6oèE }[­©öT:ÞžY”“ï´Éçô´Ãâ}ÒW¼Za76Í?âóhÃ¥ù‡cÆYP•Rý¶"jaìÉzD÷»§ÁÀËý£RàÕŸ/b5³ð/¾Vó©•ûQ 4}µÏï´rs%Òé³èÃŒH“Û˜5ƹb¥é$€µ2GX”dF“²ìOB\ª¦ho2_ÛTS+×¹¡Fd5G:‘Þz„:þîãH«íL/Dsõ©|Ü,¹@?ÊÜþÔ¬ÙÏ rø¶ùÁ,½’{µ*g`#Ù{[ #Üû‡#»x2£ ez¢dD7j8¨?뉖_a$o!6ùÇŠŠæôÉE”ƒ!çYØß¿jobÙ•1&#ãóg ‘/þòé,N‚_ Ö|<çóE‚U~…ˆrˆå[Å&ƒKì‘W‹Mœ½{#Èo&§áç"~Ä *¸ƒ6¢&MÒ‘«çvšÎ¬ütF©+Ýò½¬ìÒ¡ýJ3¤1Á‘c‰Ô¼%O0ÔtÀFµn¢ÀTƒêfÉÜÑ°Z•˜ñ¼kv¶ë,Éæ ¾°ïUI„«v´FÈbïAÆize»]‘‚ù‰±e©–Ê¿Žˆ†ýë«c "A.äwÇšÁ˜×p;^Mm²¯í(¯Ç,iqÞÊÿ›N3§3 rÆIPÍš6'$F~6(®`ÑCœñÕ…,ëã{mË;ƒ­æO© ž:ª5ÕâžƘ:øÜ—"Aädƒs©ÆØçÓàýŸÊÐu>|ó’þµ߈ËÊy&ˆURì ë -*¥,,¬h7õ”'`NÕ6Ïžã!^xÉëi´pg­à/Ê)™z£R'™fÁy"ê -•cÅc–TÞäÊ»öhE ·&1ð6êäÞÓ*çG®¡œP ýv€SÊ8 »5s3a¦Dh¢–4ØAÏp¯%:7á<Ä¢,ì`T!d£Õ¥PÓf¦`zz\pÁ} „̟즙ëuFP+7Øö™É}½Š»V£Ä¾T´`Lˆžû^#ùÁq§’¸j‘ð2Ý-Ï·ñÜð…ã½c´z¡Ú™efîóƒ¸]û@IN¦³rLÜd½ßíI†ÂB†¢NÜú‡wÖÑ…ƒvÞ‹úø–×o~8Ç~8F–óŽR‹k¥V‹k¤UýÊbt=)÷½¡G¢•ÊnõÈ®d_ó¡ß®úâA:cgH¥¿à®èibbýi -«‹é`JKîÑv¹Züõ}f ‡ôPÜÏ0sNŠÀT ²"¨"7–(M:ûRéI9zf‰G=ÅÒS˜WÂÈ»í ÀöJ¾³S`‡Ð3i¸ž`jŽh~ý“´«ÄôV0)Ë 4hïAW—ô ן-°ùR®\£œ=Ø¢Á¦áMÿÉðîR±‹ÌÛ­•- op.¹B&m=þ$ …cóþè§]ÒÂ`íÙwäR8 -åi9É,– -žýÛ“©0SñÂô®K>Ê= p³fX¨æwÔqüp{Y6×ÕP¹”ŠÙHÀ˜5íF‡y—f­ U´Ö2D,lYñÅ_ˆÂÁ™_%~`Gµb·ÒgØ’ÙçÙ½I¥Ìg’ã¬=¤Làt¡'†þÆϨK](#'ÜâÑëq~³××+©C°knÑ"OÌ¿¶L¦GNÄü¥g†¥zÕ;©*c$1¢!÷nøûå_>U˜9KäšÎ0ý’ZÄÞjñIŽÔâ_Y„Ã(Êöb%àOržÂh£¡ræä"xžÒPjPK¡J[Ók2\Ž¶Pê…ÔþUõ{…x=Mnè# ºdªœN†ç¨ÓÄ‹½_ɱýâ"N:˜(Γ%ƒ—¾þY4C³'jèÏšC:ÛbúµGá)Hƒ®IŠŒìN³Ï—ÜvHb~÷æàrúׯò”¡·³$M 4A÷ÌöV­ñ e©[ †Úƒ_"GÑÀ0*eU’…Ü¿;@U-×~¼ê§CËëŽö}©Â.ÃqfÕO'9šyÏ%9ö :—|KÏlüt#dx—ü믘„ÍJ :õ?Ø/¿ƒñ<ľ™`Q -JÅi -W}‹vÆJEc3©8í;YÄL™‚:ý‚ý6àiùú³ìØK¼Òð›ÿ·öìþdóîÉà.hŸm«û¦Å,óÅ[ÒÚ‡!k„=¨Sð ¢éZUÙATŽ’N­E‘f/,f¥'»`¤ ß‹+b†>´¦£  „+!—ø…¨;º¦˜XUiØ1¼^{wDPtwœ«Û€¡iØœ%ãh2°}Þè~V æ½L˜Jÿßx‡¢œ¼,E¥œKû–Ø´w-9Wà'©¤¯©€Ø`‹¢ÐJîÉ6‚ó±%qš2ïÉÿŒÐNº» Ö×õ;h£ñÊ#Q4DŸ¯’Yô™Ë•®ºÁ¨Ñ‘Üí @qÅ€¢UÙ0’Šeè+KRrDó!K¨:SròŸv/Èì®'—Ã-BŠûhjJõØehÜö¢ÕþÊ‘6÷—ùÝõñƒí÷¥kŸÛbcKQ¬ÚØ™>‘ŸÎÜóŸËèÝ©Wi\Ȉ_KP|ñ—7òÖcüD—ä¬"‹îx2=1‡­ u|©"RguNÍSézj…$ž‚Ž6|V@Âl”ûŠe¯2·Ç /˽€7'¾^Gù³÷ëë^rn„C!¼,©W¯ú ¹T;Ž¦-üÌ)!ÖÜK]šxÄ|:—žTÀŠS³ÄG4‚¯ÌÑÔ?|ʬl•m2{ºá÷¸¶¼ :PXÞ¸RŸo‘Mêñ\˜ÿÊ7ÞÏøŽœ[YúAü(ZÍqŠ6q'NÛôËâ ¿ªœ¼ã…~‹rP¦5•Ãœ :þi«³ZôÒÙJ~嵿$x»æV‹—ŠQ‰œµDe?°OŸö:Nx»ÈïúJ„`0çMŽhP!ú¥þvm¥ƒm%B:ÿNåäñ|\ÞSÅo¦Ç4òTTàáÜ!ñ-#üjTw†5>Â_üQ*ôwïToÉŸî= $xº®­&\)&­ñ”±KºäçÞê1Ïs[ -„_aýG|'ÃÛ~¦¢ô­µzn£Óè;]6ÆÙkbQöMšA\DI®J­Éœa±×•Æ`ýçÕ?äçAÙ¡JË."Ö²-ñ=†rb„¹û&ŽÞj'`Œ‚c)HŽ¤Íôãw’1O°Z¡#ûªô&1 žÒ¨Ü"«J™Ëðø\õçµ³pøOª 3šàR5? -$ ýÐy.Ùøq å'm­zA•Ù¯¯za âÈØߥHÆǤÝÏ Èã>ÐSŸõfÝÌ¢Ë^/æì#ÔKyK¾Ý/þ ^r ÌS1p$7'\xáˆ"Ð;$ѶG$îàz2~ÿËg–N -ßÂ@ -Û~2ã7Ð`±L 8}¶ì´ÐHøûW<‹‹€sû­g$¦øÿ´Eë<¶o Ú˜l§¹<¬tþî¶áuÖ Šez6ŠÆ&‹{@¼Ò „Ågâ©1Ãa çtÖ#'uË(é,mÀ3¨gµdð -ÿËx¶PX<³›¥GÄVÂ?ßÿį0æŸYýQmDƒ'ýõg Êøæ>Æ\I€ÚÖšÿyüV«tßá•l^Ö2æ:tÙÓ†Zâ5CEÅ*.Öù+1_5ÈÔ;(ã-2PÁýœÉ»B‚ ´ƒ`‰h‡2hI™ÿO_âŸq}Ô€p<ʸä -­~Gñ}âwxËŽç¹{àn>–ŠŠ¢©–T_Z1šQßï¾õVÜ>%±³ƒH¸)ŒÁ|õ¨/²2îùÒHˆŸIR䦑Gòžë“–K$“?Á¯÷>éîÈ_ÁÚAt‡Ua(mÑéÞPÿ¿[ÀI‚@WKßgìÖ »€‡—ýšÑ( Mù\=e¦ZdÙtºÿ+¯#t5 ž{Ùul02FÃÀyaízî1CåÇw†\ •õ˜Å]é‡;?‚{y¢õáïèC. -˜øH–Õ·‚m…Mz=XÔ­YóΩñ±€$B0™ÏìþÕ[»_hN"7c*LõŸ}Þ0^Ò‰ûšWg7½-–’$ƒr ÷Žýg·¼iôGž ·L!»Þ×D>Hó EFA…?Ý‚TÑýÍÁ¸/3ou}h©çZ”&”¦"*…!f&Qž¼4 “–+r†Ky2'L¹g-™â91r“Ês1:Dø$·-¢ó`Œöå„ÏI‘ËNh™gP$A&^‡v”Ï[ÑBöJ:eŠMxýŒ32€'P? -ç÷€#1û!#$-)Ö3·Òršˆû×–vºN·.ˆ+íÞÝ°ÿ‘‘›©JÜs0jÑŒÔ*BDâöØg –­ôÅ šgͱquå°-›ZFjsÉœ‚FàþÛ&Ä[iðGpfYr‘Ê Œ+x-a¸EZ,_8hmŒô!™‹d+ržPÛQ  pƒµ¬ÈcWVgÃpˆÆÑïŠùóˆy î¶v Ï#¢“ô2>Ê~׎Îì,EQÈxÀ|­”1´XGn-}h^Ú&wsƒ]˽8 yoÙ–ï{mÁG·;¿õ·ÒÎ<0ˆ^qàn°Ÿ´›õØ÷|ŽFï=PwɳÖÒü:†"YÂ)¬ø¬‡Œ©±Ž} íôRÔ€5ôf/ú—Þˆù1W -ôë`-¡t†ùæ}n ppüë®OéêÇ™°lÌ¥ 0qrÞiùårkR¥Ð{0ß”uD#ÓÌ»¢ìÓs%™ÝqjŒçö1‘Ò°"š»•®Iu­— ÷²‹I9@Ê´§åR:ø@—w,iç…Ñ«xbqÝ×ï¾,ŠÌ¶¡‘ÝW‰Ü™£¤×·¾0ü“àXAë§ó’ÁS‡op&¬‘Fi€óÐ/‰¸AO䇢ßH_2 T‘a&ØF™Òü=Íxÿ³Ðûg‰Ï()™K*ÊFlUë—®œ\I"fÚ•Í ;ø©}Iz ì*ßxO< ôÆ+×]gÓM¾Ï0b†O„ àünmÏŒ¥Ž”Gïôlíe¢®e­“{ÆrÒi@.®»NA**A'ºS[/ ùX5—|ÜÆmØ£æziﺔܗ ÜÝ{‰6Ù>’å2Yïã|!¤Ý-¥´ƒ,ËÂØsåÛŒò`ÊŒTl`¬tÆ‘©N"67i»²D¿4øIÚ©@Ÿu~é\d†}%/FÓOWïIq—SH†@ïÌ>͇.;>*ô?ÄÙ¶ÔšK… -²Íwá¢Õ{íU»£fÏþŠŠŠVœòàYO -0«¸”#ÁÕYßÂýªtoGÙý!bÚ¥Î&]DÏÌh½³¤3©cÍ 'þ¢Ñj_cÝ $`PîæÙ¹Gi'B„»^oþ|Ehµòyø(u¶–YÁcóµ(6ƈT•8¾¡{£5TTlHfxK:C%£gr}ƒèD‹¯ñ>c­Yûï–{@íˆÚèp¨Í7ÔÞ:·Q¹3xr¡ç<${Š»g~¶QiáwÃÛ–û=I._gñš‘œ¼!r–nšé‚'…e~c'9q;  -<é·Ôèè9¶fÔ¿’Õþ³iŠóX×ãÝG=)âŸÊöq=µfþØŒ j9*À³eK´»þ2¶|FÈuÚszµ‹Fäü`Î7¿·~a¡’ -·“l'ë4/Øó… Ž/MÁ´oæÇxè°"ùÙÑ ™{hÞ«õ)1n±âÙ_ïT¢5ö+ - ÓÞ8ê*«³%PÐ+ ™¶Õ»Ð#œ¾¨X@`yðmí=­=]³™£D-!Ê×j*¶ûÚ{0í¨Çñ£ÛJç–“]gm«Á‘½£tL9øÜoGÙN´_ÜàˆÊm•æGõœúpJRT>½h|2ØBgûGs=Å›–Ì:²›aç±'9ý(  ^ä1‘­EÁ‚žô&z¥d×b_Ùê˜Ü#‹Y¯Îu×Ç,rkèZ>ÀKwØfùò„ÁKªv…Ý‚8ÄÍCa])<šÄ×sü¤ x´ÚÚëGý§꥖uD¤ûUÊØéöˆÔt7 Ýh~oÅv Á¿ç›ÊËùÆ÷»¶7”t‡¯ ×ž*IìxÌZêœ#ì˜y’Ú+0d`mÁ?Š¼ç|ÖÑ35ýG™c‡=K˜šéÓ°´dTÒŸq¥äRêÍ-™Êcí¾Û¸"~Þ/æP4…>v8’ƒVɵ¶‘%æ~<ÚçãØ 8"LG#¿êˆ’­Ñ.µçΆí¢ãZ¿êˆ” ¹yÏ—MÓðãŸC ßX‚xD› - c ‡•Jf°¦è–Znq&ܹ~_nÔA\ÏÇpøžÔè Û47¾&¨ÌŒ¡ÛY|nøÆšís nv Ù1*Ï÷Z¯v8ªn.ÅØ¢mÛ››£ 6ÎË”l~w¨sfðõ®›†³—ƒê­³µ#.‘œDvd³é]k‘dÉý›å7Âÿp­ûçÚjU?,ö_ôLbȵ›|÷\ -ºg~*”)‰R^h,´÷*$ôè±U_þš’wÌKµpn戌ìNKU¢áê;èv@ókÏ%!tæ]âëë Ï*ËÀx;—j¤¨ì mWçΧUâOnª8x-û|W -ÜÇq«ïw²=™ÿhnLF’6Ôæž_¥Ý=×kN—Ú®zfQ`—yTòÈ“n*õÁyŠE¨QS—0^>fÜ1uSµË^àÊdà ‡jq……NÈ?ê44¿âà@ÜÕ@à9ˆºUWPDPQ‹?ùMåRÝÞ!àñ?è€*£m5œ6Ý1 §dÃ7D]1s½¯ï`Ë,ñk˜ÿ±\é¶WF®’0öÃçÓ#~ô<¿KÍ"â1Žáb*¢uL*ïÊK‡} -2§ìúÑ’uËŠŒÌŠQýrQs†îyŽ5Í[–„~îU‡“†ªñÊ)ô‘¶–×6­-î_w²ÍÐ!Wº‰ühRèfµ( -þ¼'ñ±,™‡ú=èéöªçcÎ cd}Ô’œf¢ŠŸ´×’ùqæKìPƺT3Ú´*“$ÏÈC6"ª]S¼nm«!ÐÏÍpÄä@_>ž¦8wu¿¶ˆLWsÿ¿"fçfbPaèX#Ÿj¦a”çɇÖ}/!&#ïÞzä~ Bϼ4ý$&EÀD%Dhßn·8û0fõh -~÷µEÉ1ï m)îÙÎÑò»ö¾Z•·a€·­f{\Ä2å¼ê»H’ vz zõAE¯}þl:Wj­ƒÃÔO¢|zçÙFù2­ZN„ˆ+ïµ$üìA¡€ÖKy)÷šá>Õ” -uÏù¡TúüÙ©=”¦áùÆ©™ÏµB_ö]ƒëè0 #2ÅÊ·§ÔËÐæ&‚ nØ—¡M­kŽ™¢zHËÍ–ËùzG)¨GÛ|/¡ŽOjíøÛ˜ö}‰³ô L‹û,ÊG0cÃDÛ˜ŽäLÚyWÊïÙz‘ÙãŒú×e)Wœã¤È9Ó’È°É€ñºzÚXæ:à%ó~pÙ¾fÛ¥C«RŸ? ðØk´íLjþ )9…øø=çµ”f› JüänÃ_+XbêR±YqAÇ\ÑtVc¸Òñ+Ä;f:ÕÀÔ%Íñv²F¥n΅Þ6†:–˪ùë]@iO÷´™ûƒ¹z•›§Ñ¾:„x]é‰L(˜)tü‡§@Š›!¨\-á+wbµoZzóÀ,¹aùâhÝ8Kn°ž—³(G¦Kw^{õ–#G×´Ø}Ž-Û~¿›ã‚àƒÓ_ÉC%ªK™Dj*Š”ÔëÓê8Ñë ¼Zªþí™—z‹{Õü´# œYYûÜÄœy[n@&º•–€6 P¢¨DÝéVÊÐk­ÉÃLîÐvò«L‘ <2cßÏÎh&PŠ®$YKUM¯5¸×’´5‘ R«hªP±hÑçëRòá'x›tît&(jêÔG€6dKÛT®­1wöŒ½ýªãWh¼Óg´„ Å´œr©#ä©®;õ™¹ˆÝÅ–f4mA’ÌhîINø²Ï¬àÌdããïïê²·‘lôüˆ”6ºç Ð{Éd÷H­…>¢Q¼çÉtœ9Î_xX÷¿¡¡ò—w–<£Hž¯Ñ@óÅ’´ú0ø¿þÁð匲u;ÒÆN1ôøÚØžk&­q¿È:É©»vXÀÝy"Y²ÅZï ‚§¶ÂjDDl)—‚†IxB‚1ÌE1ÒuÕÔò1B“iL©Ô­×†›µgÉu(¡®_›¥‘èóNô@2zFZ‡ÿyõH|S¹S4Ç膠 -YR¢&=þ=PB_+È Â8:¶ÆS#&lá;W‚Š½Ôõ@¥Gm-KCĦ3½¼q¥× !m¦u}ô¸D?aY“²-É8¼»òêág$²=3ƒ^!©^œêyg©!;S—$ÒO³d07£Æ\r§³¼—»Ò‘;CaçrGB9—’À©I`GžÂ¦éójL˜ªunàKó±èÜz·ÎãÇ°¯Ûlæ7‘ONAJ©^¥ómÍøž¸6¦@&ýwdp×F¾1¶ ÿJò‚Ù}ÐTóeˆ#žo£yÇj™ßY àM»â%—šgPÃüš{Ô©žø8>òÇ“>ú‡ß dŽzm",.RMlwú83Mè)Î5fÈ»åT©H: ôÙ׃Ò9³±K!Ù" I‡IËGl°âu6ÿwž‚%^A 8˜Å¿-âñtKFIŸZòÌûÀiùÊT›? 5Iž=&EYÄZ>=‘磜£œ”x{¯/ÓJL×äüøA˜x‘Ð)jR)»sdE¿®«4{qE€ÜÓÍrz—ÿ -|jƒë#7_ïM]ý+Ñw‡Žž\&ÚP(Å÷SÇ,VR šleCŠó%l]å)1ÿ1ôQ’&%ša•7á^G:§¬Ê·äÇIS&üú ‹’¯sP*FÇ¿ÍX Y†À“‰.$r¸ÈSÉ;µ2L›È‡ÈÊ]â¹²¦CÃÐÁPî*Ãà úë’ð)=Axyqˆg†mˆ#zÞ{Œ‚Tyà÷ÓšÕ÷&Fò8Ö ˆ~TÅ_—ꤟ¯X÷Œ¼ ªf¬#â¹Å™Ç3Å°êÑGFµõ«ä²@À¡R2IF)IŸw¾Í\Ýb†—(½Ñ•vwÈuÚ%l™Ú„•‹·ËeKÌ]væV¿Š‚Ûmñ”Kk@ªe¶ù‘Gnºú\ñ@]¿Šª4ÌPÈÙ™ÐÚ‚þê5®#õo –¾\=æ34f‹$)ç‘0¼0†ÈËÑyéžµ+â}ê'RZâ¬m±lO³h ²\GÝ£.Ft£×Ö`Q¢Ì i$S®¯LcCS²•™Þ€ÔB„8 -_óŸ_ª—f+ - ËÄ¢ÖÔbËÃdlDË"•ey†²}ð~õ—7 Lû[´ì=…åÈ¢“¢ê,‹ë™©8îmèضÄæ [µ* éŒV-ljäÁ”ÇžœGúÛcOe~+ªÝÌH#‰fƒ’༦øÔOf΃ •ãï Åš6IŒŒôªò»Gµà ×G^ôNþL\ÿ@¥j¡¶ä„Äæ ß)ïñòîb1ëÍ=hÖ‡"ø;âÕæYO®×Ð;ÙÄßzèÀLú!ëPó FKóé<¢0õ1Ð{ì«Å 7<¤VÄ|ÃZBó/¨Uí¼À¢‡VDäi¹Ô·Ïj%ÇõNFƒv0äõ°: Z’ã€öL¡–ÌJ긬aÄ é+í·¿¼×à?ÛR8ãüXI GzËvâïx’çôÙ§k–?¤; ;’™øEÐûëæ—!‹Œ47?†WzdıžV‹þßt®Ü|é†õ–Ð[ÀAâ¢kùdI{*Ôî)íà.4I±FÊè¢Ð\Ó‹ËÀh%†*óȼrƒÒ™wa©bÎ3ôÊ.:*pÄhx†1“ÎðD¨GtœõËX£ù3XdzW.<³¦î…H ròŽÝêsì‘r Ê| ož~£d``°€Èø“ifþD²à!üz‘ˆÎ2§':¿ZžbgÑñý?Ù¹N %\;wLjå‘lt¥74YùŒÄä±Ô+ï»òÝþKcøàõœ½ÉpÒ7n¬ŽÀÉI]Ù¹4A·èpGA42•F€Ó;Øeþ,pé¿n¢GÿX¤÷ÚtX#ΨFi—÷¨ È„ƒ¹ùÉÇCwb&ò¬E-i7ê´r׈¿Ê¾0%—#ˆå¸÷hžEtƒeO²„{*PžÑG½r)Ý¢Áÿžä9È^ÎmÀ=Ë\ŠiÇ]>GÅØTŒò'ˆæ¹™á æ­IkÙ¼‰ðôÌïs)™d¦[/Ì_¶ µSë`7¶n4™Ø`È®P¥z!T Ÿ¦Y˜8„ix}Û˜ -^¨D/Ô-MÇœ[Ú™®5‰ógÁt1ôã -D„|I€†ÓÁQ¶Š\™<…¨:ù­õÓëÜŠŽO‹)öž®u a) XYözX9@†ýª%=<3]ìԾ݀¯‘¥êEÞ8ߌãouíiηdó‡# {WkcÖ[¼c U¿8A<¶=è"‚c¨Ì!¹tRì›5½arbõ–P+Õ<äë˜>ì¬güÐndá²%à44Þç¦èÕæ»b^>vçÄža¬qiF퉡X¦y©á?¾£^B±á "caDºER ‰¢³¨A6};RÏ:Y[â_ žýåÝ™í¹ Lῇ‘V„€ø'µû½R¥3h3Ž3ç2ñ…Ò$6£ïR\ÌhDm)~ÿœpéIC·±³þ¨oÑ>A!£é³¿–±ÇÀ~·(‰2Þ|¸h`­†ÃFÇ"´èŸé@Ì´'ˆ¹cµÒO%ó(E˜Cˆ¡b2ÙG;1}òÕbž»•â9ˆ¥0º•YBœCçÆýYŽtÐ;Z˜½×€€7Ï“Ú -. U?9W°ÀNà[”vU›øF r±røÁ£Z$Ê­ŽXQr Ü"¾hKbÌ­!š*”=0šµø勞þòÙîÍm3Ë%‡ «W©åèF - Úežþ7ªLÕEŽ= ªê¨?„¹A5K cÜ…¾g8Š¤zˆ‚Ž‹@êì; áZ#á«”Š -y”bO¢— °2Úh‘¦Ô7Ý9ÌY”|–Hzp“®óÕÖÒR2„ÅTÌ¥°/Žt ©Ã¬Þ×V€Fò0ùÆ=6•°ø} ¯LPl*4„oïÞ«« ã¡÷vl0ZF⨧³¥cpk1Îm¨®ÎSné€<¶6~¦øœ´ÛJVm¹ j;ŠÄˆãzÀ42õߊÑù#F‰7—%ýºâ¼ÎTMÐÞô -÷]\=8ŠØ?YBF˘Ögpñݨ¨hƒ”qmh3G‹܇˜<£fyG‘Ÿ1¹f¢Wòâ~‰seXo!~wT¸‘F!]E$|d› ƒ4 ¢Ë%”y¦ST=\qFˆÃx 84‹e6阄½d¬Ü⓬"Bù¶|\³“9ÇÅÌy.¹ø™t÷Å¡æ3Lƒ“”íÇžÓkÖ&fæ4ÍÉ/ä;U¡GãCì6Öbš!?ôôK™¶¿™Ô~ñ—罟ð³ýdsÄUØ1Rœ° ¦ š;û0ãÓ S˜DjÍK0þxïO^FL\sëé“Yú±A{†îŽºYÀ3y2Q¥þ†g²kû”™’8ÚÍ·ïßÇÀžd÷¸ü™îm¡HKç»Î"ðÌƤ‡D“«éçç€0D"wÝK­àˆL©™?r`nÆ%Ì®³'54í¾kή;¨Šiª/ðÄX>EÙ•qóbrìè³å¸<"ãI®$òçûiÏÔÖl%¥Š¼äˆiaÏÜÂب¥Å­²ŸSæF‰ Ç"ùÉþ÷áÆ×ùwÉ1þs•F;U>ßoÀ߃ÁÖ"YÄ©È1K=üï§êÂΆE<À?—ŽÀÖ3»Hjx}æ–š_\©Q—âätÄζ¥FóÌdÿ\| ™k¸rB°K›› ‰Å-l(.c”ÅÄ`®ç WM·‚Œ<ã¾×H$):üÍõo£•°¢¶ ;j´Bq¶í±~%xµ×tl^ª¡OP¦Ú®ª«3.=_‡ñrå=<ÂþÊáßOê5?{:Øœ¬AÇüL@"ü·Çm:"ÎÚ6ÇR|£ Î[f¤9úægŸÕRÎO(_; -)CsÅ&àF™øåL ÌÓÜQ{:ÛOâ|d4­@“iª îô³ïX`oO ¢Í~ÙÅš‘öìDÕœÔãƒï›÷èBÀ§]µðµLçÈùy¯·¬—ì„PLìÑ ‡ºã-áúNsXÄ´èÈŠyS’%3E‹©y<äéùÏ@[Wº”: ¤Z„ îN<žd~Î4Aä¸Ñ—úînžcœü¥QØVã„}Ê’Ï1¾CÀŒ0ù5ù¢1ô G&õ0õ²!53¥FÉ®@KSfsª%ß<©å$uÐWºýÒ™_)º¶)ÃÇ-›V# ½¬¤>Å›Ø~ŠI÷Ûýù° H-¼%¡îhyÄu¡€ÎUó×$J <"c}Fê%KLd´Ñ¶pøX8"Í¥-ðs¸ô“µÁÉ~(¢f!Ž`±FƒªT`,uƒàé̼T¸ì0B°í\ò1C©v -ô1UšR¼/ãÅç`÷ÝôÈPƃf~“ÀŸ%T¹q?6zÏÁ0³–ç½ÐŠµ™{YÍÞS¦CSÿC£9sá²ÚÇoÔu¬ím…ª‘‰‘¤lAóU“s±úr›žüÈT(uz0ÁQá&¸QPInAŽOþßw=¨0J1#ö™Ð!ž1t/óˆì‰ÿ €K–:¸/)}ÂJÚ#†ÑsœU²¨É—·^,)ã;ß1¨ÚóóÏŒ+L<íW„éU€ˆ!?dùœ&ä†0û]…ñ£Šx¶ÅžéÎãÎs2ŒìL’Ç“ý’ùöÏ`hÑ;Ÿ‘Ú³škºhx#–ìA¤mε„Íõ™æÖZ›KZàÇYœ-:Ô¿÷õ‚ 6=ôVsU+p뱫ú+šÔDå¥Æ+È ksR‰Ãõñƒgµ˜w݃91B–øˆ̧÷äÙ!š‰´ßIÝݹ“J®l”–®ê9ÿMVÕ_þžôë'ór¦ì´ÁxäÄR®6b -âûk.ÜŒÀ¶˜¦—3å UÑÂ"¹>ºƒ#.âôÓd/–‹yÆßs‰¾ÁU‚¿O¹Œ>,#øÃ\O&Qß3.:ÂM4>¥zPíɱµ¶ä!ÀóP'õe‚ -õ|ñç’*5¼â4?279a±ëcƒrîòSBê¹õŒÌ,DÍX@u×ç€löx¼cÂ&>ÓtÂVË·qÿ¶ê(x^E?¤gÀÙ~9J„d}eþ€Ä„÷š½îv§s™ˆFenfC[cNè)`Ü.]å¡ãÑ9å×’ŽíåyFŽ¸Ç õȯ—A!Ù3êŸ/z¤V¯(€;ÍF^'0 ÐÇïwÅÚ¯­OŽ˜)™‚Ôq&ÑÐO›4\Ò'‚•n>gkþ»«TSÿvv÷¯ÿ²ÿí¿þcúw"Þ¤ü+ùË)›_Š\YD()N—§²<èé%€’[¼Zæ4˜ÏW¦4¤FªÉW¦DÙiYçʔ̜›ûÖcÙ{E”G›äw„½Ù$vÂk œæ­Æ®Cîð¿[2x¸Õò8"»Ù·u©Ø(YiŒÕ9¡pàRUKÐö6ŠÐûj¿ Óq·“)ü©ã/ ê×¥nºGdñÏŸüôùEU’6ÂŽçˆaåw–¤Øš?(ïþsþ÷?jÕ¨ô×øѱ”8Š3Œ§yŸ‘ -H{üò›áWÍ‚ÎDÉ‹‚ †ay+(¾ÿKJÔoñ+eÑ­N˜é.O—@râÆ÷3 -¦òQ§Fl"s?cr¨ž:ž©åðYDÿÆŠýÎ…FP=ss˜{…Ó-é0ªŽYfXNf®õH'V8À҂ჳ¯yjô5EÝâÉÅ92S:T/*+K&hD(ŠÝ†'Q×y -’9oþEüà'ØŸ2ZÛRѹöÕ#íê(xgYFÊ?fòWP4™õ+QŠ ï?”ˆïeä;èòûë¯,"¸}Ǧ箊ÐÔðŠ›Ç“B-î'€(FõUÊ=>¦]1HÙWÑhؤ¡“ãAÑxé8‘OfµÏ¡û†¸›€lªLEŠþ -yœ¨ÄCªm<É>ŠH>â*1—`†¥N -³øÜ|ëŽX^n4{öÒ8½¡‘ZôêÅuS|/œ_—Ù9fH S<Œ€ ÿiÜÚ -?)PK»Ev:b—52‹õB—GÓÍ«öeKìJÛ%sðyÃ"ñpö²gý¬|ýõ_þ®—­¶#ûÏ ‰ôCÌ UÔ]K’î]8ãÆEWù£Ô¯†CõÊéc¦˜¸ˆ™ûÉ€>BÁäت-td !|êApàÈ‘ g5´¥{¹–l‘ùŠ¹ÙkѨÕ,Û´)ŒÎà§b£–Q'ˆDOÿøÿþa‘Á¦þ ¤jññ4—Q©O²K{Z†JQMw-zžòYÈ_,!ú¹{Æ{‘à.¸f=­kµÝCb{~ƒÙ{Qÿ0ñ®5FKm7‚èÕ³_Rp¶¡ü‚TÏüZ&ƒžz:»Ø`p•Dùæsf涔ԋ@hÈ}ÂÅa›„±œ¾ Ûæ;môzP p=Zµç]s©M¿±8·t–IÊߘý>6r#Xj]˜»„ô¯÷^—ºèú{PÞŒW5ʇ¨E1_ù¢h`âPý1KžH›;ÙG~¶ dÏaWKÔÿ5M,çhm‚s²}ùdéµÎ_˜´çsÀ‰6£û’2;bç|¢ÇÐþø~£¿D[ S¢ÔéNu!}Ÿ; @â̹iD:Cð<Õ—j«Ä~A÷…ðó®ÑúÉÈ1þnúþýù+7ÿ -X‡ÅX­§ßLÇFtÅ,AgÁå9ÛÒ'”Ÿ÷¸ë2Üñ˜ P±<^ß'º•B餘¶òyí1Í]SDÈÍ P1…ôN5Õ¤Y^¶´cžVõZÌ’«WKÞ=zÿÕká8Щ<¯5XÕwÌÑŸjµ<Ðz×ZBç=Šß««ƒ=5HÐüîêœæïÛ"!“î9ZÍnc‰ð¡Ëp0+åÿF« keôê‘°K%¶ÎAíg[·ÀP'ðÛf¸ …¶A&L‘(:l¨ò1«=jÓÝ¡=bÄ&Ú||¥Öð®äúyÍÿIñº%7¼öhï¡@¥««%RÔmË%%9ûXR(ÜfUhó”ÞDî½ ‚)^eYÝ@Åf8«Ì Ïg …sA+³:Ø0’‹ócbK‡)-9«hwmñ¯U#²^ØL„_|§¦=L±.˜A‹ˆF‹’Ϲ™÷DÀonÝÿž‹ÇcؾEÊ#ü+Ô± ç©9:ï‘õª -8>“{$¶=+tC‡ ¸NŠÃn8mœäünÙïð9lÓVte¾¬Evì)(©7úel ´((#Qûóšó†£I×#‘–®ÛQòeÙb|ëóJ#<·Mà‘ÀÚbÿBmåêü÷˜õ¾fqðfÕ±S[ø9:Ù]#‡+¶žO 8Ånñ1êÃâ -9ý"“™=@‹°3J¢Ê€çEÁ°@ü"H§Õk<¶p8Èh¤ôž³fa\€²Î•F¡=¼uOl-íѽ¡ìôC¥0"ªv´L ¦íÌ1=018ò£ŒiC_ÚfgTAõ?Îb]«£7#ë-\^5ÚXƒ L‰Ç‘M=dÉUZ¢|CHÅ:ÃŒ¾–ˆjr·³iýõ„aã,ªˆòsëI=h=œéD wið];ÐI¤ ¬uÎK&ô @.3üQjÞ ÇïÃæéã›â…©ŸX¿íU²ð"óÄç¶5A¦è>b?lËœÝøe9a¼Á_Øô}¢¡¤ôùV„Jé3F:b PUÇñf%‡pÄùƒ$KgFñøhˆGdäˆ.j)kmYAš^ÊZ´É\°å9kû»0Aó`€5–9 -ôm kµ î¸[¶Òð¢iŒTä®~"¬•{= y£§z€ÜÁùrÉØEÏ&®CB¾›Ë7´Lzþ½‹(ÎO ÙÒM¼Ó8ILtÊÌÁ˜ÀcŒ0—ÉA·LvVóÝ9¬’ánh-èˈZ¯’ÒyÖbÌ‹§³:­‘8_­Ö3~ÞalUS2ÚôtÌVdo™)êg.•WCÆL§ÄSôQÉ;Ò¶ädH@µF«ék ’Ϋ'ùy 1Í, -:Ê¡Ûò£ð)É™^q‡d‰C{L?Ö=h=Dõ)’V+:V´ƒq„Ï,v¯Æ/Í¢ ™d?Škýºyë$ê±dšoW GzFûh*UŽËi"o(¥\llE­&—ý‘E­óˆ#9êJÅ•W9 Õ_Ãï·ÄË?‡I÷ÜZ-"µ¿‚zÙa2R6 W›°*ÑŒqmëä¦ö.Wýåx;ò¾ˆ#ïv ?Û¹ÄZTш.WÝÌ&àQ<ÈOTÓx°>'ØkÀ[þƒëÞ‹ÙH-çýº÷W¶÷q¬{¢¡)Gî¶ný>QdBŽ³ÂfCtmÎEOæ`!5Û?ÒÓEøƒ;e2:¤3eª±Æ)éýÿÌÝI’%9–%ØÄdfÄæ±Ok\p¢…. w_8÷á«™ªˆ¹üedÆÄÅ Ÿ;4¯¹Íó>ñHs¤¤¯}Ö"x¦,ÝV“’ý‰l¦vÞMäNÝh¤íbH ­ôz·^Q¨9£F’>w†àŽ>oƒ”Ã]t5µPú*>n̟˜_ö¬ˆ‰ôOÈW_² â pÛrÆ6>2‰yõ’/I½ŽPÏHÞiõ~ä½6†×·®¸d-4ï%‹ÚZAÛÃj«¦tÙ6Ô[8ÖNJŽ„’ÔjÆz˜ÌbºOT/zìò®:©RÊŠ—©ûÙ<©¿Áï0™É5œO‹¨.f -ýpt\ƒN¶¹rUò;=ÅX«^vG>’‚:,ä‘!Q8š1m9›Í!¤Ø˜éÎ@ë<×¥ž¸6Îè$²m·ÖsÈÄ\n¶häîvÃ;ÄÇ;ŽÒlœˆÔÞ5ä?©ÕYLf -Ú%€B&ŠêÔÑt]0ćü*yËc«Nø¥)„#<ü‡.øï´¿ëÔfBE¨àsÑ„ìt ´ï–4¤”ò’’¤xŒr/céa‡ˆ¶¦ˆ9†C²¼0„¥±Of”ÆÙëi†¾Ü¼R¬»¶—x2 "›ÙÂнó<)ŽÈí‹~¶vŽ˜¹uÁŽžîTXaê{9Ø_F>"FLž2š@Ÿû‰ä[ïrÜ%Åç5Å# #²=Ŧ */‚Q&¸:÷|çO¥{+§ô‘úÂÇïxAÂjû=d#OyK¥< -?ÉÂ#< æ¾WðžV a¿SìÆ'¦Íø­Íü?ùôÐòüGZƒbÙ•"B¯¨cZ>™¶5$"ÛôÎÇ~½^5E¡ïñzÕШúÈqŸÍ'fÒo¦ð?uÝ….h¶—9D”¾©íëMÝgÙøD5u¾Ä3¦ÈJ2ó 'vƒ&Z‚uJÂïĺ;¿ƒ[F)¸ïˆÔ7„Ì<6·ÒIɈbøv…4óöw=C÷ï?MÄü í»•Î´=ì‰tVãü×Ôu`š>¾tûü—Ï•œÐTìÚ ™ ó‹‹L€ú½ä_¡{ÎHŒ/V\£#GnU4'ù-º£àIt¦·.Ž²îu[ä‘9+Q(€«—¬X¼ædÛ},J ­=‘EH©V×±úoîž3·’ÈA —ø®w¬E´®×^Ž–}èLj®7Oa‚jdz¶rÚGrêèiævöÀ4µ•ƒÏ]WjŽéà@³toÃpìòŠ³d«NeA5‚»ou 89[¶Ìh?Í=¥dßõ™;ÌÝU -Y½¯ ²@o|ª’7Ùý®TÆ¡œñÇú  -Sè‘¿ - ƒææÉs.™£}t$ÝŽ‡õfFþZó=À¸y©ÂÆÅ<ò"Ú¡¶FPa8I;íÚy QÙ–XÏ«»? Q§XìÁ*@òq‡Â‰HRhåPª°Ä -ÿ@´èVôåv¼®4÷aÒÅ"Ö Sï{Ï„æ£bˆ‰:1PÖ^Cîœú”-G yœ*!röœÑ½drWlª×¥.ñ{‘€ 2=„&ƒ„ÅÇœÙê6™1=Å Ú‰@t4´æ,äø²Ü ï)!—ès¯‡:iÊSDØŽLÞøZyÆ%¸ç£GŽ½k¿¢&ìüÀ.ÌíºH'¡ =QK’ zÓÆ®;!O‡\¥S{!Àå+ÌÀR3èa8³ú@ý)2•ôŸa0¢âóuò+2ªNf:Žš;Øš/•ZX9tç¼ÿ÷Äm¬6„ç5èª& ØÂÏA<)Ƥ•w ˆ9¢žÕÉ+Ì‹©»‹†:ÚjÇ€Ôf*0‡œÄvíêêî½n¦víñ‰îwX¦!Dô‡z@ã vyíßáš©c’Âv^r ÇÆHž#·XíEHbÈù'UŽ¾—žsÞrZžƒw hƒ4çÇ’ò–íV3RV¬`Ý®5·ê>G\Ay¥qzÖÐW€²³·³!<äv¢ûŒ¼¿1þ4Õçvk†ÏUa’ÝÌ™ï0´)»Â}ló“3/§Ùø”—ž! G]ñê Õg^jn`7fsͤ6Nn -l.ṬÈbœ:ÌÊš­VŒR.ta¢àù¡ÿá[7C¯æŠ<|‹‚æ•b¹AGÀɶ‡ÌR•ÞùÜ…A²²ßô$\æŠ)Öp@Öoh_±˜3»©óCœrƒMÅÈΉ¥¯ˆìÔ0ˆW#”î°As=7(³{Þ®ÏD> Û¡g©Sžºw›# hªÀeî…†‚•¦[‘R^Ó6?t­YEŒÜΓ ±ÈÄüL#túñâ C`Ò¼[f[ÛÜ’0ßp-•ˆ³Ôe,â1c<{„Yæä8ª#G0ÜGJC »qV7¯í—°GÎ&Ò²'p'ìél?Ø@k\D¦]ÕO ËbëÉ#!,vk²æš?CxoÞÚÓ<äg.®ìʬוƒGÕéŒÓÕ±† x®=_NAÔÂ…Ç=× ˜MòTótßñxt”dR Zz”Èg®¡?Ý™³ÏI”mÏ >ðBðÉ€ÍwQq rwÌé®zÇ 3q&]¶Íì$éƒî4i·:2Žòúµáä8¢¤Í©^`ÄáŒvH»æ²|'+§`vù"Ü‘ÏP¿öû'xi”iÄyˆë3gÈ6RØ>uE¾ØÅ3wñ†á»á$ñü­Èj¬Ù<\„;?æ äc°´%$õwEÚO×ý(;©ì$Ñ èÂÅkÎͣ·â,ÖÁ«òºÎ„Åx!ì.ÅnΛ ¥jÎå–/£ù±®¹Wq£ƒ‰6g}æX‘!PoîÎ EîAôóRÖ2èú3ìN™…TC„Éñ²O=·Ø ”{Wù7CF—™ê<'S<9’£Þ8·Cm.†Ž'þß)ÞhÍx=oø!Íh -©ð\ª¯Òй“ŸaáI HòLkǤàKéËæ¸×’×·9²§ÝÑû{"5ßçbšû‡×¥Ð>±îætŠªåɬãS½¯l{°LÑ;é¿Œ5£Ç*Í.Úé¦*üÏ·#Å–½y¡ëw¨-.ÂÖvýž‡üFÝL™‘)%‰°ª8ÍBW<¡þ˱mo‰ æ•÷m±wG‚¼j)&QÌi/Ï–¶‘.½Þ Ú˜’|‹ZÄ t(õ.²1æ™(O&›+âÌ*êþ¸D]ØË™C_ö›Nûõ¸L­3@u6;šßétAˆåL°Òoäàç3Í›¡¨þò¤Çã+ ¸8ø~¼±)½ÚgæuÒ_àç´ÏhûïjK}_í³®|3šÙƒó¼S¥Q¼Ñ7н_D˾ö`ÿ]×LüVôòTø盿ÎèÃ1)¸ã <·Î¹aD^‰Oè#½½Î~¤QjÄ–„t‘ê‰=’Ø·!É‘7 «#¬VÐ쑈”ŒáQAÕ6Í|‹¶Â9çÃ…LWY"K-®Ò!“8´8¬@‹Z®¸®[–ã|õ¶/û°c9ÉD‰Êjé×5ò+À÷˜è¹ctýHERŸy¥-úáÑL¤ÈçdOs ìž+ÁÄÝR×¾çJ{äCTfºRdçî¸ÆD„@‹0Ñ;}WÝCÐ.ÉŽÐ& QJhh)B…Ìÿm %EW¢Ø7#W Ä×ÄÕ©p~0“1üMâ}ÒJöòJ#9ÅÄ«IuÄò‹J@Ó_¦š#û#²3Rk›«æÊwê‘!eÓHÐÜ péˆ6«ÊùÞ›´öÅ^P߉™ØY.vó©ùéÌ }¤G—}þ9ò_i‡´¼<Ç«·X’FN@ÆT49`Ì°ÊÜ:é“rÍþ#8ò®´C{1é|¤ùæEÎ'¦¨ã:çƯҙò œTãƒFQâ5ÒRýå™#ºÒÚgÐú$ñï-„êå.}YCvÄ<‚¯4šs÷RÊ,–]5d^^mæ\‘k ?þRì0†òÍ¿êE¸’+Õ“3‰ŒP#\«Ø6öZúATkj½.ågA†ÚŒfÚI$¼ÇÐ2KtÁl%ÏÚ#Ê–A™âÞkàͨÑÈ­pˆ: û£™?7.¨˜ÏdÊx/Z˜!ÉQûœƒF€±G,Qî'e«;†´nëwJÓI¥A:¾±_ý%l"‘çY»ê£þ¦|yFý)”"BÂ.P‚û1Ï#ç×éOó_@¤ß I‰Î Ñ“'äPŸÃˆ6&f`âæ6N%y2Z•+N»ynJsH Š¦Â?ßü -I¹4°£[îwü/ð6Ê/aÆãR%Ê-«ó Ó9¨ßR Î7k¯ -ð] ˆ^¡ÒÍ}EÖú\kT;ÜqêpaCr¸ËÁC4kÀr[U9ï 1˜uIYmbe]øôõ+ôXîV)«msæ¹ÎË*žÅ[{»ÒŠÐI³Ç¨°Rù¢ÑZC¶+Ph÷3²O<‹ÎT¢4T rgB"K<žÄ›3ë¾É[^èWÍ^ÆçDæªR23ÍéC¦%­5ä`¸³§0žµÛ ˜a­“‡å䌟ýJñÉ \UmÖ{z—>ÄXaŸ€^ jB±”´ë‘Êüjžƒµò·„Ÿ{ÜDÆëÕPÞfúÍ~7CÖ9úê¯:+q§«D<åh,$y€®ë -a†{¼žõö.•1VA{¥Mן1Z–`8é[4Íx²mUÀdRÅ”xN :)1ÛÔ¾<\Ê b ;[XD-ï¢h¦úGIôr}ßZŒ¬,Ì¥HÓö -èù‡c -›`ýùŸo)‹bF.&U¬=B.ÃqXÁ‘°š¸¨=#ZBœ¸øvFYåíú‹¿|&b_mù‘Tkž~ÞÜF2ã^]eSØùPùA&™F\ VAaòŽõr¦jF?ãŠuJªo§;§'%ÁK{SÝLD lwV× eÈîp”bKV>1©“Å‘ªD)‰¥\Ú2$.Šêõ‘ Wƒ¤œ@´Æ¥ò>®øpW+>ÙJ<ƒHO£X= Ly=ö“ÊÏPä@±&=€ð¿BÙTÓ)æ“ç)Zl£ -âÝò^B&”ç>cƒÅùU/܈†ßàAU£{€ó¼æþ*Æ- -87<ÇÁ å`§Ç]eæëÏÀ‹ìö”×(¢¨¨þ¤/v^‘¥1¢6=—iœª8„µÈ+¨ê̻Ǫr.ä” ,뜄¦m«Askc¢ V3÷45ù'!STQñ)Jyg44ׄ!Bëj£ÉXM=¶ÜDÙÇ}Äd—bÃ-™ÇòlËPo¡lÙMˆðâ7|ªyžµ ëyèS“«9ÃsƒÛj°]9 -"êè“úD g¹Ã+¿Ã‚NÖòÐab‘æ:¢Ãt2¿$b5\ß×Ü#i3wÅ ÁÛëo—Q–;–m¤OĤ¼´øñ´PÕ×G¸ -«1óОƒ½›CO¢»¦'M+ÏŸ$´Z“ÄêïT'æ ;}ª‹È…´àJ“â–Œ©J² ´¥Î5D¢ˆ-¸–÷oËòßÿUƒüŒ>ËXƒl=qó›Ñ£[N¥¨DvÖŒ ÍpFÏådKùŠ‰T8oq¿N ©›oâå²^ƒºfNôÙï¤:ó #L-'zÝKù†«ß{†„‘=·~>MõؾÀPï -Dø= ìÐw•R¬á$²^\p«!'â ©£øCJº…z•»Q›c,´AŽºÔCøw/uå×F¬«£&»U;O?8§ðós¥ ŠªòVå¼j­:ã µ&”J²È#±ö“‡êÏŸ¹3” bï½2ʯEìûlÞü Ý_~n‰lшç9Æoj«PžÚ*»Ú(546|ñˆmȘûÚæÔ„Õ™gÞœ(}¤ø1Öj|âœî™ Æ tnÄ÷~ÔÐâ¡ÞUP‹¹‚b:×"ÝÎÚ…@-¼[4Á™šD–üY›®¬+ۮ沀6U(ˆ»ØòŠ±Ê¦&4E“‹—ÌÔ—¸,³±jÔ!t©è•àžqW:s0í$Ðj`ž&‚»?˜ÌK4Ó¹)¼×SµÐµ"|›Th®B¥‘ý¨*~xð}cb¨'žo_]¾ÏÀu¾¿HíÌä¦^ë(-²:QžÍÝ‚›—îqB·JÀçNªeWΔKO‚$’G½êJ¸sǙ˶jÕH¾0?ïȆ¡’©–쯣oÌRß·g5cð`¬·¡·å0 Qï’—¯ÞVk'\ -ððÑùÚ_+ê +¶}«K91ÁîžÊ:Tf<1âIáݴı{’‹³ÂZ!*´òàù;J¤:µ€ÉÐ?k¦“L+®²‚WOŠâDVõn8[ïs4ݘßÛüÝH ÙIó1ãþxcQ-•–8á>OYGÀŽ ͽ²ôË“–o1ç8Kú w0•hÇj–ú'ÚÏ6ûo^‹ÿÛ"Ô”»bÜJ¼*(ƒ¢uÏOŠ,•µýIc¿†\qoÚª$,Ê9ï ^Ö¯¸7ô7Ê+©>sé&|»U6Û‚—RG¸ö|çóŽÞ^êç™°\{À˜­l¡ŸƒwÝ0¾{ &`ço£ÒŠKEÇCRÅ xga\ -Uïïd´’è¶~G±£IØ)#X³ê‡@;γô"ku®žÄöBK æøç8Q§n‘žšiBÒ»~'F¶Aî‘#Ì›+Æå.gR˜KÝ4Â1Ϻ”Ú9›ã3òÌfeÜM®î©bÝGP{[©Ð F\å©.LÒ-Rì'kÃäeÁŸUûJÌyG#_–Ý{õ\â§aM*êá  &~H%cfòÒõùû¯þ›#[íüXý7,ŠQ>¾GÉ)_OeÍνE±®ÂäĶ3k[~‹?Äß«ã²%eÄ£:)kÜi*¯æ¨VÐQ,š(£ƒnPf=Óþ@'>Cö’¥6+´ëQ_éÒ¾UÚ]`™‹É{¸nWenå¼Áß“4Ží@{g<ñ" -³ãˆ ܹyÕ…ÖÿU³Å_þ~П_üåý“ô@!ž=DÚ¤^ô(HØW¨ÑACÚ=^C¨,7ÿd݃9×¥$Ïk:uÂÂÚåãÔLë{ ákúŽq»¼—Oé_”Á‚FD†5IHbuàxÝü3ºì‘ŠhÐí,¶wë±–&Ì>F¯1¤Fúõ_õrö–»6¬´óVÈjK>"Bù3dUzö8w¥LwÒã´µW®¾GM£t=u°à/ÉÓÑM©s‹4@ñ™w õLel}°ã8ózDq¹ !ÇÙW÷_ƒ°ù†Z %€ú›1ÜÁkx%C)èÛ"uòUhœZÞŠ½végtR’l†éE›4«*õåa/ŒÓ‡/)eÀòÞ®¬µ#ÄøùX¯ë-“¦2ÑÛú°_9{ՙ⿠£våFÂW¶áŸõêB¤?j¹ÁçXÛý\Ö °™æ"Ó7¬ˆISG©¹):Ä6Yi ë(e’ï¿Òÿ¥¿>〾Eá"Õ±Bµ«Xpò&5ú#ƒæ¾r-—àøù”qö›ÇÖG/ ¤y´Ùæˆü›-Òæî,¤þü`Eÿ‰ÔG· t+åöÆII³³ ÷r¥=L¾#~€ë–]Oq.MpïKú€IRÛ_ vDŠmD¹áKPû#‚<¡ØS0?Œ’s;Ž8 Rå¼ðÜTˆ‘Hõ„QGz|[ý¯zøFf‰P?¸ƒƒåŸ1f¶¹ÁסÜQ7»ãû”ŸÕÆ|dÎòÈX¤*þ1„Û†€UWò;›þ~Ø¥g "‰­(yˆ ûà¢4¡Q“Z˜ýõÒäSù>Þ˜kâ:™x}n‰$„<‹3ûIýÒ&¨:?;×@L¦ÍÀ…ØžòA|~×}>Ÿßk×ö‰ëÚ{ŒçœûY¸êv‡R¥S&.[[•Ü'—ñEx8¾ŸÅÆ¡ç²3¥ô ­ÿ¹¶’88Ï!G*xÇOAoD)W€8;z¦w¯Ú60sá`wð³äCgwn±pp7zÎßó·s7GípØd¹,ÎÄ40üŒ<{Ë6âOÝylDáÏ,°$ z’B1Z7óÀ‡¡5±ý-WÂvÝ=‚½~Nî8 Ó­—¡ïF#[ׯՕ¨o° -Lr¥NK²ç”Ÿç_™E}¯GbF¶õX ¿ñ™ÖlbRsÒ±–ŽÄ-p®¿cWm»teæ$BáŒû•RÛé„ Ï@?ôØO}š/þòEç¦TªîaÁ­ݜ̗³>¨Ùdhò~u&y§Òz`I“÷Á4tÔ3kEC1=X¯×‹ßæ9…}¿Õ¥ :Õ¬¯²âf@A¬ôìæù„c1C°Nñ0¥6Ûî¾`*ÐÝ~U¼§}”xɹ`y3–² ‚¡ô×–Ù_–}çâ¸h!ó^š—Âj5ù£Ï@âO|*çC õ6}{Ùbú¤*C·£ÕOe‰t¢š®Ö\ý¯®íAºvÄÌ¢> !ùÅ*žî’ë-g›¯ëOµäB( ;ç„ ¬¹©Z̵H*IKQ^Ì«&lW¥gÔœ¶­ü¥~‡¼~õ—ßw§7c4]3Ÿ[/}¥óMm/-~±ˆ®y§d),Sm>ÔVLSxØ”2"-3žù¡WD¯$ãL6„rË·èe—ö˜zfÔv¥º¦ i,ML™¨B4ÄqÞõ -ÑîèCïí+D»"Þ»IŠö„¥«–+zªmÄ;|NñeóRxÌ%¶6Xò2ð9iÚ詯Ó)Ž3Ž‰ÓR~"Ú”¨àŸ|þ-òK1ÇÔÊÕÑ}i½×('2S¯!-§Vœçÿ•¤I’…†X_xÉÎ:yl`¨Aìô¼·@ŸêwR*É9¾ýBÂ!?›iÝÀ)kÕS ë¤WõлL›ZÇ C³d{AÂͳmßÖYûE:ñEOáü{©1“÷ûƾ>¶†î¦a‹%R[ Òyº8Yrt:.3 -¬=áÙ•}Y7vÔ»h… ‘:>¬Hëw”¿ÃYA Ê‹f´ŽÔ¶-4ê|SQ[‚ÌùX|¿‹»ù³çáû9ƒæú -‚EÓ§ ù´2«'Yæ.×wþ@ŸA€’Bñ‘Z/ý~Þø0bQ_|‘Ϫê:Ù«:ÎâŠÑâ (µœ ÷‰ùY"Ù}Î臟tG÷OAÕ.º-ÂÔ;þÃщ¢®- 3þƒ¯ï¼ƒå•‡ä]ƒHMœ@ëÎ&í¶ùß·ø:é<«w…ߤë¢xÏí–'å]@Úê+· ’w¥HÏ«*PGç—o( :1©ù5è>¡cò@DàŸû'`’´nÔ -/y´ÒãëÏâ%^Af'qT;Ûgž‰åùF­¿™tÌYp<æl>$+B¢ôhi®§® -Í!ÿìÕ8B¶æ#¡ÉóR!Åð§CbˆmÚKÜ•!ô)ÕÞš³ëõêG8¢š -ú[f@‘'$ýØ2S?3n¦ÈOm%ô ˆbëž Ø?yíŸ|š¯_v—‚¿|‡¤ü‘Åœú§ZcF‘„†Gàº|/mpPºìùzߺìó±Ð¡óôPo÷Ot¨€e¼hMÞ+©th4Ȩ=ßyõÚ®µ'(Ë®ˆnãDv+­ØÐýjVýg¾Òõùxõœ ÈÕ gi䛄= À+nëÐÅê¾8K[øÐ9¦{/ 5æ“U*ÿ;0Ô*t´0Æ28!Á!’_¦Ú-îÜ-€àÄ»5™x»i?>i;}ÿø{êÕ¼Žè‰8l/ŠÜüP$²À;½wá *“ã©&'e«9)â&³èds -ˆ]òÀà!¹oµëY¨\*r¬VÍ>]Ù„®{•ëòmG ŠA‡ì‡àµ¾V¾ñ·õ¿\öFe­Š -Nù(Èl!Ø u P'aS‘wÆOÌWp<ÁPiÁ«ÞO£r¿úù*ƒêØÜ”¥ûê´ÆóQËè)ÈÇ%r!).èo©¸"ѬŽ½K†(™Î#P˜Ò_ýcb¸O«JõÕƒ¶È¥†Ódgux¶ª#kp«ªR·bÇ0ÙÈ9k±%ŸðÞ8¾ÕXÈã‰P¯Á@q¨œ{(8e}òq&K͹J#œù§lë?)e|ÿk ûkàDÞfDš«»8 ôD¥2”âKì¬DS˜è•|"E²:ᶅ07³D2ógDËb¨è³Çae ã¿[˜æëÁ3¹>«Þ~cÿxrúÏ(@ "«‰s†U?ìÄ(ë**¢'§à`4BÕy†×^Àx„; èAáEªïE7×Õ­A3»âÑb(®µL;¿U¢†ØãÜÝ0-ßx{ÿ+Bì7$*ì¤@K»¡¬è˜ûD -ûÙIóxZJEC%¾•~Ìé§%m[ºÊÞL?^²<^ºš¦,bŸV0çdÙÖI`&äsÙÚ¥f·‹Ü0. ŸËç|KvãÁö=[׋×6ÛUî|íu„QéBòðË‹âŒwéY])BWj1áÅET¦c¹‹w¶…9 2j<õæáËwä’y Þ/Œ¸ÅÁŽƒ …¶ÛUŸsᆈÏÑ›x`Sßâ ÿßßL8ø`D>†é¬AóýðÁh9 éz&[úñ¯° PRa’¤IEçb´8áÏè$‚˜@ΰ'1!ϳ‹Í´I´?Ó5 a‡6©dwÔ¢xê ÑÁâ3!Ü$ªýÉgÛùà¬KÁÚQiP&LH?æTzwÜ)îu -,Îg7(¢Qj[t~ckÀÑzš‘™çGD`ˆtgbÔ$ŠºC™},^œ¦LCC*ô*A8J6Ì|ëR·š²ú\ÿÉiòζê~„+dˆ¢A _/@60Q42ö`ÕIm!¼¯è,GQjOM[l§·ÛZù=gˆšpØ]eroËœÇÅJMt·nÙ^IueÃ̲„ŽÑr"|•·C`A<îÂù œv7Š\ûa'ƒ9z=Ôœ1ˆn+K!l÷܉(òº´!¾+ÛY¬1^u鞢š?o|ªµpÇÒ !îQRQ”á‘!ïò¬¾£-w–ÅCÁOv€;rŸ½}"û¼L™lÞW‰«À?÷½G_³m+G´S`–™­Ô®›]†,!ó0ãg…’S 3EÚ`ÙÊuõ^ÙÜÈzP“¤IÓ}iµ&Š݀‘wx¢Ò­ÛÞ -ÅSÑ"žp<µkò¨:Ióçj:³‹Ì[·‡§ €™ßƒ[¯Ä- |ÚnO®¤—íÜ+åã,íyø3ô‚ÈÊ´|RW㯠âj1 Q¼6EûŒz˶nå k l<²Aäw Êî‘@·ž›FûµØüŽƒYûÚöÔ~©çIžœÁߨlY» l•¹ ¬oyPý¼ Ì KqSIDäPBb9C|Rü­S÷ÙN"çÀ.Ö2ŸQ^ýcšº`¬És  _Å ø pBöXc¶ØÄDlCæ[ô°3·üå$‚tQ(Ü3Dÿ^ÓtDÎ܈yõbGæâ`×v­†X``“Ñ)þw©6d;V\‰?çV€HTx‹*«èG~º·ºhÜû0ä…"ßÚcãÁäÝÐ -™è~Q\/›Wx™q<»Cpt·d ÈtZ–sÒ”>áNòŽ}Ö&„Gè¬Lå dEs7Z·À›ÞØVZ(ëdÀŸ§þU‘ÿzbñVÕ;øhuÏšê‘ÝH›ôôªew…Y‹ ÕŸ’ ¼çTiKËcóI‚(µKˆ»»¢ú“º{bÃëÝR5‚°Í=ï­šhüÙØI=ôHÙº_ÎÈ“ûš€/—™2“»^µÏi;oåŒStË¥P„ç<ÚhŠÖw V -‚rå‡ûµ9ã¼\9M}pò€ƒ”7¬'§hI eD‡ËÊZé(Šaóþ¨ÜÆùʦùx¥©”Ä7ŸjcµÌpgÔi leÏ¥¨éÑÛ?÷ÕÌðSk)èŸ^=;$€ hínó©©=”¯B’÷-–ݵC>©ÿ2¹¿(Ö‚.E‰lêªÐu‹b}pî<ì'g¯óc¦‹B#!e豎Ñv/»B±D4 øs5.®à= iïËŒðz˜h¶QuT{µœ­åsg4%¾=~_1B¤6®Xe¥9ÕC8UX8*¾là[8Á1…7ÃÖBH{Exü×bÈ?üå³(1ä <ß R -KšC[Ú8QÅYx ýÐrGYÞÑÐœ§æœ€Ñšð–»½î"[â¼D¯î³h%¨b‚ºîÒ¾­e¸—» åò3€:`‘¢z Óp然C~ÔÌ¡ÿŸ‘?´‘:º^sÈ!r%ÏkˆþVçaFÍTÀÙÀ)ãYÈìÊ¥U L—n§o,ýY ¸?"VE[rF¥Ø6¿9Š]‰Zk‘‘müs%'È¥Ò‚‹¹ä–5 *9÷@]v -\Ò9Ý ;chœdg8Åñ!‰T­rŽ–WšvsAKæT‰˜màŽ5‘J.ë]›‘½3 3ÆJŒ&GpÀ$l,Þ#Î(d-®QÞ…’ÎxÐË7|ݹ‘r©èÐz[!^e*`  šî𱕱3©…z5{)`q¤‚ê;iO8ƒ–XŠ!ŸoÄÍ-â—\ {r²oÑéf{¤ƒcuEé+8uCxzhû&£$éÜL´»[¶…¨†Û÷A&ξj°Î5þŒg…]¥HרäS§˜Á=kˆÛÖŠ‘!÷r Ú³A¬n¸­wÜË ªH¯ÕYåÔkPÔ&ã`— -ëaÜEXž§>ƒJÌ€£‰Þí2“]­„Û,¯dž ?ÖëSti Ï´—¢¸‚±N@9É?o¡ÊÆâ(}îk€~«j›óoð2”‹èjS˜ŠìdA* R¥Còè ÷æïÌî 0Åå&qný[É‘äRH“:·ÕïŒT¦b°%ù¸ Ï…_Ý•‹|@­ìr;"¨Ú„SãÈ¢Êê {ae,£%u„BvŠu ´ˆÑöpaotÊß4"çê£9Ëi©!ƒìb׉ D!G–2O—Ûˆ»lð›phĵÿÏÙºfi͆„w1ôï‹x~ lµÒ°¶-ˆßQi ³!ÔŸ7¶ìWƒ%]t‚dsç,BÜŒ$#‡2cÈ3.÷åÑ -^ü<’bxò¦œ¡é³çø¤NøÅ_>»8½±~d¡ÍïN¹tm\œOïÚW3Dš¢{•]$k|>¬bÎZÛ@r‡¦Z­Z,³ìª23÷¢›)ÔF·tî– -Ùoø=›ÈYf"ØqµÝN7ÔÎh@]‚žC9t‹ Nhøž-¸m‹l™Âhšf´;¯ž~‚0Gh b‚¸tx!nxÎ3 Âhýy>EŠó¥EsN¦gÇ«¶Í=ŠX¹Wpéó›* ¥ó›†²msÞÞC@.ò¬”çŸÈöÒÊÖκÅî#ú¶GšfG\¿¨u!Ôyù„Ó(5Ð\3WgC²Yèž -GÀ à÷Dž¾ÅKƒ^_‡àÞ\¢Áæ¥î˜ŒãÍ2±üøcŒ<Á¼{®l{=÷âi=×âoǰ鵟õ;#.œ'MFUWœBw¤Dœå,Ù›Ãk„&CæKhw|ãï‚,iŠ`þåÇSK®âÅ…'!|tHN;4mºÈóù3™3÷íX˜wŒø†ÆàˆÊvÜûþU³b®a¬ZÐ/›Bª:áðµ¼?›€2Å–ò¢Äºžý ãZÜÛ—+vY¼w³Ÿ?Ö¥4åâ•y¼<-«#t^ÂðæèÍrüá(œ»QHM}† -÷ÇsýÕ‘‘t -é°‰“ Ï÷E5—\Ly¤ÂpCûR£œo4Œ…'ÒùŽ‘Åvù$üñÅ_~T#»ÙšÏ²¶ãÂu+Žnóû™BY“M3EÉM@¡¾^ÝPäí³|€ÿƒ\ÝjD»2±PÏÿ´[Ç®0 ƒ_„…¬xÖÙÑFúbnÌ,¿þÁÃï¸ÄºçÛ/˜w¶„‡¤oÑŽˆxqÑ:âÞO–žz·Îº¥&#¦¨àW"Atâü‹Gð$R+ÐOq?1–ßÎÃGÑ]‚ëÞ÷zn‚ƒšDΛ‘¤Ê­ùǦÁÙ¬þ«|@hËÎ3©e]gÞÛ#Þ®öHë8‹“˜Nf´$Èó‘† -8R!WBBôBåKOlh;!ÜM‘µå4ÙÃìˆê$¥#oo‹“*Œϲd<‚e°>bM½9hÏ;FZ#lM¢Ÿ(¨Æ.åùž3(Ú3žeèÉ]t„œ[öUN¢T*Oq-£œÅ7 ÏÍÖTC¦Ëù ËÞ3×ÃVJp¤Ó;êƒKüEÅ>;Š@„ƒA½ºÎÌKþ~?Í—S^©lHg~r×rpb jþQË´ýK„ª¼‹B÷“)xéèoIDÁ­ER·ß¤9‚˜ÿ§ÚAË&þÈ DÆ;5ÈÕ­ ·éš`­¡L;çíq3(uQ$‡´.¼/®‰3'_½*(I1„Ç—›‡èrÜóªjãè,Ò‡RXWº…­¯;DROƒÂ|øÁyoáqœYö,õÍ~‰¤ÅÇœLvÆD.N{OÉ)œj„s¢Œ1ÖÏÌv`áÙ¥:Cg^ý¬zîçUnѾÿ©ÛKS,²…îXL{ɇרÒ>º¥Q´Z^¤UÉ n‘‘PòGˆm"{£uvŠ› RÅûW1%Ì4ψ«U'm³Z<˜I çi-`xWq¹§àÎ+ý~JcNÛ «2‚9'Ò¬Y¢7ÿŠDßEàù«§ç`Œ`?€)÷¼³ÍôÿÖ5}ö—ÄFÞ˜|/ÿdG+ÿ¤Ý¢!åï¢ÖG9!ó©~€«+*_W'›ùDÄ¿—pCÏü$mýUÙ²Í@~=Ñ&ý# - r¹G g+Šñ±”¢IÔŸ±Þ;¤º€Ðãbcú ¢Ài¥Íc¤µXɼ™}´Ñ "¯:Ò\ß÷”Äõìþ¤¥…|B²›Ù'ÀÜlÈC‡'s£·"6 -q«M€ˆÇí`åžèŸu -ýR‚• -„¦Ff”Œç Wiˆ*ÀéΖÍ‘Hyn™eäEÙs·™ø Ø­1Þn9(4M•BËÓ4¨]\Œ‘Ò¾“®2í‚râÜ“Mñ_—RD˜»nª-™¬;B´W’¢N÷Š§§îÚ“ßÑN\ ê¬b¹¬zðS=Ü+° 0£ªjkUr”ÀÃVþ…ƒ˜ Isú‚Wã{RØ-lYûmÝ¥ðÞï·SbÅ!3*Ý#sv(†¼.JÆæÙ,ÌüE„Óbõ´•‹JAbY`m }¢´ß(þò9+øþÅÏ™«F>?n.O¶>¬>Œ QÚŽ©G:ç„õé5 憤2ä :k:éc‰åûC½‹^?«%é°Æ`ONé¸lŒz¶H4ÍÍíÌ"! 0Ô£ìØMå- -òš gI)©O1 ñ1o(c+Xw?`‰iE\5Ȧª#’½.O¥ZýÀ´DÙsˆÞÚÓc]4GÃŒYÎ~eÂ4Aj~iÑ=Á¨C…Ö¸(—lš/¸ýá -E<ùN|œé¢R:ßâü -X½:^½¯È¶õ4m`oæ3é­(üØv•‡ãïËħV1k}ØÝøœGÚ¥*šb +¯Œxéȯ}(*·:/ú÷ëJÑ{ز_µIå\AºúàÑ"Óq#ˆÔj-:çax1Ù{oÙg‘èÇj-3VÉ|'fн~Ì¡bë¥Ò6QnóNó-#gùÌ}­þËg8#ê¼ý -$4: ó”ŠÐL)+w>f„²­1{/·%W49Zj"0K£^lC>D¹¢"Jo <0(˜l§qèR¨*+JMËc„Âp¼Ö# ß|­ MËãIC&I‡![xHÔ`+©© Ô®ßÜj[–´3óùu©ØÝFÜFb~ÓâŽS dGTONf )ñ÷ZlCô<¤¸g©ž Æ Í¯×Žzf“snÌl%ߤUvU¦ÎsÓÁç-øì©+\i Ü)xEµKçÌBÚòŠ+>À±lÁVW|@¤¾ºjtÉw°NÝØZi—Q Dõÿï´ÕíõéÍ%ëõÉhYõYÃqè»Ôô&¯}%cjoÌš5»opŒCy2d -Í4ÅžG@¸—Ük~&NgÊbN^&Âß^úß·÷/€ëfQKFÄdxƒk¨oSTÄÙIw3XÝÍ+c›+3•‚y‡ü×ù¢„ÔPbó2,âÒŠsž£#¡v ƃ|Ï–|Ó‘ „9£™ïÅ*”—¦Á|‹0õ;<9h2ÍÇ"Ýãí-/N@+ :y¨Ò%ç&èøþ©^äÒ˜üš²I×1ré¦ô™³õÂ’öç£ú„´7lñøøxSGä‹—½ÍbÃQ1’Š¡ÃÄû¦RN¦¤Î«¢`¤%L`IµJJ¶,À<ñg3D– Ÿƒs§½È‚n‹ƒpŽö馯üúÜûêÅùÏ#rßú¶Í»‡HØļe*§ö 0\à% acBMi…p&8-»™[—O=”Æoƒ”›+]$F -[Á¤6ì;øœGàÜ6¢Š¬Í¾JS0¶­Í`í<×æó½ÑÄ‘a¨µ÷ûòЦšÑ¿2|«As«‰f1ÉÜ U ‹:ö2O.å4VÙšÛ;©~ÜbÈÉ.<Ö;EÏÑ m6O¦¶x™áÕh‹RAÎ×ˉÇ×ì[ªg&áCëçãYñÊÐ#§Ä áê)b]=öZJg‹,ìAT1ÁY“ÒP·HÁÙ"gÞ[}ž/ÈQïå=@ã©´W5Xõ¦E%ø¡{uT©²Å6ahÕ!åxOäç_û…ŠEÅíóq†<ŠÌ¡ÿâã˜Ü{rõC´ï#9ºò§×¯Q•JR¥gïU©Œs"»‚U©œ·ÑIâFÈ¥”?³žÃÔœìx2,PT’Á¶©x÷œí^*¡»‰ø©H‚°2,ǵ˜ïxVÚ£Š€Äh -ÅgIáÍëô`WÕ_¥w{”m)§æFþBt.+º*ÑbX3<>+ÀŒªæˆ£ü¾ŠÊjܬ°öˆ)ís[¼ôkè@ûR‚aÉQ:èª×Ð…›ÔCDjtj -QØV‚Jmypÿ{®’m#Üû¿Åî ¢kÇ¿xl~˜6¤¨z‹NmÍ-ᣂžg?Ç"OF6ƒÞÿ¾-å±ùF½Éu¶}-ê¹QÒ:öRÀ ûAÈ­Gö+¡±ó“óïòdY&[¸@’ï0¨¹ÂJR5äÍ•y##ÿ)S)ú¨íªÓ6‘@™Þ ôs¿Ûas -,îÎÕjt^J Å•D‹wêKÌt¯ü³{«$AhÌr ËÜ…¢†E~ŽpÑçÜ}¶RƒÔþ°Býíßæ–¦Tnt]ÁÈÓû¨‡âUø÷ÊîQÜ7¢ø±UÙï"®ŽÎž¿@¯PÅî5d‹¢±D^M1mŠ¦ÎxGG%—J)á´C•Â\ö™ ÍèÔÛ‰ž‹‚Oj?pÄÇ*áxau§Íóí§ú©Ä¢ÿÞ!—÷pû›àC!ňЫ.Èeɬwf ¤$ÜÙˆaÕùEHðÎ_ÂJý¶iû#ƒ4EWdTê!ÄówšÃàÙƒcã4’ÝÊÇ¡˜®ûÕ‡VÜzÙ_}hÈÅeÚŽAŒæL©Bãp¸Zˆ»&M ž[̱*„`›g@!—ãà^y®*ùÜH6›{`yHþ~h¿e"ù=&Â-ïÑsß#Á™kG1±4Îs¹ÆÇæCåU¸íèž)WHO\m†_Ç@zæ€ãgÀÅòÅ®…õ"Á6s£q+Ÿ€úãÃf«Q"ø[8-àËW¼`b¶t6¯犙g2šÏj»™Fmð? ú€¤®H< R:"¦c«Øô¤tyÚ9…,UX™¡\…l4èÞ>árm¥z›B굈þ+ O^ ¿TAÈ‹ýQ2‚O™[0ŽÓM“ØþØP"hÒvÚÑìY/¦û½ªŽRÝý‰{M‘“2ß½Â_ -?A‹ùFLlÿ–>s­P@iQo€½¸X¿#WúÆ3ý¯€C1 ØCûŠ(†ØëqúƳ€àÎÐ×æ?mÙ–3ü"”‡xÒ2ÿ}Abù-$zê™|“B€öAˆ -UC`4ù¼¨y0|Ù ý&`‡_¤TH¢$dØ––qÔúÏ &zN]#´èy¼ª„Àå^Ž†(2߶gÑn¢Ùc¡ìɆæ)™Úñ®4Ü•²1Eܳí™>0±Jòôû2„L%ap¾Ê©(Pc>›gR=ÚÈY@¹DfÂgIŽ@Åî–nʸÒÆ·Ô@¦ÞF—!¨B¥—­Õ–”øÖA¦Òx…EàlÀä>f´Òâ1¿  èê”~öO =’Î×—ºCȈuË“â'nÈg¹ŒÏ4Ô„»ö–5µO© -›ˆP-2$Çø†T ,öø{œM†/äe¾8Þ‰]…cdžÛ¶2„Yúij>œÉVÄGc5Oï.O>}Ûhy1 Û£·³‚¼ÎgY‹Ktâ²ÀƒûýÞ;*Þ‚D– -'õp\ƒ£¢•¿€ñcR\QøÆK.å0a -$ýéúý_þCxél·kͨúG€©ÂK2÷¨äVPz¿‹Äó¾ ®= -]À”{Ðö‘5h19ý2>jˆñ âJ7’ÉóàÀ¾ƒ‚¥¾¢ºW“¬°©ì€Žà+M·~Ç9èסcòª<ÄNöUz $ ºjÀyäÌõ8È€´NÇ¢[Ju)¾’O˜Ó#*ˆ0Ň¾V#wŒùþö¥jFIn㓇$}ÂckwˆGr$ßQ®‚мÕÌÁ—îß´S¾üËo%ÀQ#ØâJ±ÞÐÄwèiGШ´yæÃ̸©LQkŠàæ‚£¢þ¶¤ã5䈚í!ýXž½:§ þ2²‹£9åØÿŒ>?F[6n!ì/ÔtG Œ+Q²‚ú·À5δ{üá?Z¸ô'Ã68k¯S%^]ò2ÞÛñ? -r©“~F^= jæ ¬®'¤fLô¨ƒ@‰Ðлâ“ó… âÙ"âT¹Ní«®D½¹'+âø:ôç#ÐVéõçãû¯ôW;«+Q}š“"dçÂÖR­J•ZØdr=ækµ“yïľt/ÜòýäOù¢6ùí‰_J…¨ÝD3 ƒôCµ˜ì‚ Î*3ÚÊkÈ/ÍYë¿‚%Bêð÷ -b:®3æ‘9 R Âñl u$jæÕëßQ—íÀ•$Ž®ÕêùÆÔéŽf%CvªB-êu%‘çyXR²Ûzæ—‡#/l"4Ìu*âõ;¹höOÖP18%ÛJˆs‹ -z¦ -D:]Æ#5R…Pñ1å§ùŠ¹B†'ü(G½úÔ\ŠúzSc|é ŠöŠ6±b–£¿‡„‚¥……Û¹iü·˜hÖï zÛÐVòÁUwçûÛ"VÜõîØÞvEÓ·¢Àœ¤w&¦¤ªX%4ŠÑF¿5òMf°•#QfJd;ŒY”~ïêyþ­ Ó¾þËo ñû8Ÿžˆi¾ç“M^õ31‹d…NÁà&*mg¤‰‚"·É1öûX<…-(¿"®„¢C'Å#%!“'i˜È‹ðˉ7Ã^M¡CŸ¸œò ;0 .ÃP*$ðíŸà ¬‡O„ë‰Q¹ò‰TªÏH&òº9xÓƒ$AžG­BGµÿÚþˆ—C׊sÏib„Õ©ÛÔ¡ü¥¾¿¢!v¾O¥Ï/þò Û¢¼þ¡WEHÊÉ›NÏ U¿Á¼>±ð¦»Šb滼þɵºbf+ + -Þøç\H31Õj> -`¢c \bÍR -aSÔ>þBq2Ï’z0”J©äŒš+‘ŽW}À} ò-¸‚ :õV-Ëì“”–s)‡æœž[BîÁêÜG» Ÿ‚Müöí½ 4±Ô~"gi@‚ŽÑó7rRÐu”×BÊÝ£Ë*U#UzÒÁHãø¥˜Ý¾üËWÎd9 `ç‚*εº’’†ã:w€³*¤ŒVšÍŸdMH¶k«:"$TŠH΢iǧ¡ÚMí‘våVbÿkö¿LÚrNéSÝ5D’¨ -R;Ûq=î9›þ;~6õÚ=`œàràiS2—æÙ¯„<¶â.v2pêåd¼È"34d÷ÇKqþÒ6¿â&»¸æ›¯¼k”œ -²£#Æ}~†ä12G Ø~÷z_ѹûÇT‚;\غ>sý%‡¢øl½¾¹cœÇ=—êŒþ®3ù^gôŸnSäw\µÒSØ8„Ü×kU '3_Jœ¶V Ó¥^G)¢ëñ¨·ýã‹'bÂnnžóUd9…­†ÁÂÝ?¬ÒŽà|ç·W‘ó»=~¦:x_GÅ'—˜Bûó¢ø¬¹ ñATþ_ ºŠäà±ôdþÄ)1,*²há‹`õ×CÏd2ý‡üóí½jÒ4+OË}×¢lѯ=FJ°{,ŒMxšO -µ<ÿ Éá8t¿©ÉñÛ_ò‘ÿ©@&ÎlÉ‹Ê2æ,KݦŠ ¬ÖÜÌÐqJ;ôõ -iV½aÏŠ4Ø›•ûÏ`>c>yÒ•P>´ëGgóìKJ,¡`¨„ôÏÚ1èn!l¾vŒy"ï”–÷’þn[ÉîL6kÓ*³Ü¥L˜àã<×öP¹ –ø×ò<¯Žu¥]¬„±Eö€ºÇEæ‘*i^Ÿæmïڰ˽¾@kçý‡rÍÏ/õ¿¦,­DèXáÞÈêáç pŠl–Aq-ÅJªÓtžd>IŠ.]blZ°¯Ú±yû^Gw‹º}† þ™0TãŒí-§3Ò ¥î:_«®rÙÒûð”GÆúÎÏ]†µ#…ëÜŒSyÀ0¦FþíC­plï)“ÒwÉýØ¢ÅÅé_3ˆ‰°àïÌ¢ -Çþ~,!5~‰Æ";üB*áØwL†âœì<„oÛ]¡¯ FŠ0QÔüñ‘]-éK»bV‰¹¨W<Åé#ÅÁâ ”âY\©˜<õøUW)J“ -£,&õ3ÐOJ=÷VCôÜq×ì/Ï¢eÝRÓ,Ú1é׫#“Ÿ´,îy¡}‘<ö'âqÌÂÆp®h<Ü#^$sÑçy•Lq¼¢îL x‚l~¨î¸$ÀLÿdÞ`ªÇö#{“`Â>úI[’Þ´.ô4ˆ}5ä°3¾'œüÿõx?¬iKú\BR´Å=m0%ÁÔRù²FQk¤ÆÃßú‰+VZ¦·B]Ì"õ¿ ÃÈveð9XÆøÍ?ှúv*[<¯ÅåûÎÞî^,L2šQˆˆZSwž¢¶B §¼æ2ÇÝŸª,údP•p«²€õ‹lùèn·¼d^Í  Ñ(K÷Os3¹‡®;‡Ì_€øœ‹ó †ñÛ×·X˜kŸ( á»9'˜ˆž½ŽÓòn•H£ñ—å·<ÃþÞä6úŒìýWý£x(RÕ-¾¡Wó§G°…òÆn ´ê…òÙA¸}½™ÈÜf0¥«ó×Ú&„³´óÈ ƒ~Y°ô»¤ñY„D=+>å8W‹h[+mn[Ý»¤Ž—·'pׯp£CœèºÔ pzчÍb—@zÜå¾Ë]mdÛ×¥kŠÌ„-#x»•û®þƒTç -‘A!ÆAŲÿ+î»{ò{½¾½¤òÈ€\u)î§oÑï’·°B爵h.J©%TF2äÁ½ïgS©KQ¸ä~z¤7†}¾3ñ$'Ž§ïÏ;™â¸Òb6¢m‹ÌÜ;`û ¤KH¬À{-S,y-;&òbˆ!©­½ôÃ#¼•9¡ïÿnñéÄùݹ.eœoßþˆLêßÂoÓþ¹@¿œb¶¾ÐþÁ°µÅ¶3?IõC:ìkJ5s¿è甊ÏGq.€•‹=0KúªØ=_¡H¨I°Î˜®-‚…صJˆnŸÙ¶ö‘+‚ÙÏ¡‰Û«2Ç߀@dš”›¡«Ê‚ÊêÁo™¤gµöYì øØ"º…¶tD ™{²[ï冡w&Žç%[¨Mw3E:Üd–p¸ê5u‰>ËT,|„ àžSòyo‘d:1æ£Yv±ÙÔ†SIMy.Åv‡(I_e„;€Ï‹Ñ+sð/Åç?e¿«ÍÆ›U{ºQ±xrê¤|7OÝÅçZÞ¬^+vD¹+æÚC™{ø-TÖƒLèübÖûq…PMi_Ý¿L§7éDê9$ÄÈœÄ,e.ªnHjü*?qž~„ØO†ÈdÍ‚;Ò|Ç¥’ 7Çm£†  ño-=Í™ ü‰n£=Ï’D&®ò€L´Ðö<¨LõõPè:3²-Q†9(zEwܨªå¹­³Ë’Ø×<7‚¾Jðú.ñ÷A-–3Ä¡°ÞŠ±Aüsføö´mó;‘kœ97üo¾8¬ƒÑÏs¯!-bE[&¥! -5›’wÜï iQ"‡ÅˆX Õe£žd½×£_”QOê[[I™ˆø: ÔmÑαÂ[…`Yòó'¹Íh¬š®†ˆÚê×Q;Á5#þmÏz½åèàÃãÐѪŠ÷Á³¸ú¼q?a²@Ë›"¨-~žJVôÄk¢.E¿ZW9âÎíè’휱¯jŠÝ‘fÑaÌ+Ž¸}î=P„Uz”w! fæwþsKÑZA»fü)–›†sú%sÕ:j!B±È¥tÂ/~Ø{&2Pö¢Ü€•‰ÒÞ ë¦/AC6§û%+§€ÇéGg1¢ž<h M-ƒSÅ_0Rä6"CÈV%ZèáQžª/ª—ÁÕ]Žå•p+—²…ÄôÎqƒæ~BOcD‚¤Ç°èÎQçä5+hõž¶gZ\wyv1¬¹u™›×!žñQæŠÜ6ՒΫ]5H -AY‹£hdôª5.½ -"ªX›1é›á…kCh²±q<ævš¢ˆúÜQJ“u'*K-æÊó?SÎû›¿£ÿj}œ,£ËmÙ´™ &û_$9mµQö†*õ’9ÇÉÖzPf=É1AYö|©'*ÝÚbnæ ˜Ö„$„bæ‹ X÷{ëã#’µúX¾¯¤¥lÀGí¢Gkµ«ü©»Ý ×rþÛ‹·U^ÈÇ<ÐÎŒÄ=wB5õH[w?®º ièCÐ ”vÖ¦–³tw*ƒÿDN´s|Ë…X#† YÞ?âiÃf»Î:qÓ%,1SL•cd5û:4©<ÐѶC+€ot ` -rLÙÂæÛì«øTÌfüáEÌ!=C`ÐÎh'@#é£ð¹¹˜hÍ!«ÙélÃ`{“M¹”ù4aÉ(²"+Ñ…Ê’mMˆ(q ý>o; -BÀå¡mûò„JêZïXì‚òÔXÌ¥ÌÄŒ#ž¯]šj{ ‰槌:”zæuV™1×Ê=ŽæGʯ$ uÐÐëZgøœc±y³ƒÚV”–‘jV¨ÎÃSÀ[ÂSœG|mö…—¸Eœ_†Sa˜²ú´•ë€9®Åµ—DØ9â -LVKAÿ Ç;Ân¶9vEpãkéŠ -;¿—p×a«“§ðð½ê~žMF¡™£ŒƒŒ'Ÿ«ç\Kê‰óâˆ?‘¦Û¤òõš,˜=zñ÷(›V·Ü£Îw„–^“}ÄJ•µ—¿ MÒˆS Ü&ò|@j To÷7ÄŸ,NOl¢IÔï(04Ýî—‰’òÛ-ª‚‰cØ“«™Qâ!êgˤϼŸe‘¯ä1jáù—rB+”úH{?ö$-Ðàœœ¸í„®ƒû)Û®S’°€õ''÷9ÝEfqÀ$1vf~“ëÝ#"ƒî½Ì˜ÎjܽÎÿJ:JZ3z;·Nè`²5ººÝú¼ËqhFøQ Î×™ð¨a@ü;ÙJjÝj!øŸ 5¹# —Α2žý]§}¨CÍh8›æ Ë̸¯ë Ø«½ÎàOJ„{+ Â<mQÒZ;ùì°æT)MZå,O÷8BŸà P‚åÞL+eQ½ÎaΰÙè?­@”Czyv®“‡v -¼øÅž#ŒuÀœ„ž$UØéÄäs)¬Yf a³:ÏÆ¢ƒr†#ff{ð!Ͻ63Ú¹HáßI¹œ ãÄ‘à’Í…Lƒ7 Jõ/þ5;1ýÔ'ºˆŸü½v­ÇSO²Kwˆ ˜HÓOÑè`D;¶,fsóŠrîÀCÞœ§@µ¯µ«Á±æÞ¼øÆ8óAÊ“|uïXx;ÃBTrØÀæ[Ežr‹ÆÛ˜ÈR5q®ÓyF'·>*žn”Sˆ]Kë†Î*•/Úf@…ÍÓ U“KKÙG€*@o)÷¸”9ëL&«ŽÖ#<¯¬”§Úã ÇZ‹¡^X)øhÆ?1$\ƒv9òüV4:orOüõ;•#|/Ð¥÷hê3½r1`â}£ºTCÇ>"þs× ·]G§N{ôÚtõN4䢶ÞzÌ=3dϱÔ~¥GÂoª“ˆ«™”ç¯X™Í§{*±  ‹¶µ*ö²7#Ó ¶5dþ…ô”Ì5[ñ•Ü'²œ0‚†la`@Œõ²2ˆ`û¦1§·ì,›-R@sˆ¨¨Â°™\Aí‚–'Åiú4ü¨æîæÕlQšéƒ¤o/ñ[Hq_ƒt[sàUW†Yƒ®¾ã«åRì³£$QZXwáz‘DÝï³GüÜý^U52W àzH$ˆžvóÕËɪ¦KZ›!î:ßÓÒ‡² x$¶žAÕ¯! H¡ö¾ð^Á>HõGÂKx½¬Ex¢œBó¿IÏ[åŽ>U ?ôDžì´A©GÅølYíQš. [r³“´éöÔ¤€õžáÞÒ›ÿèЀ%©ehNeªŸq²Ž„veß//’m©b2jÙžp´0áßô—ƒUHH½Å¶x„. K~Db=ƧŸmN¿ø˯¦Æ«ô|c!Qê-#K@ËX£ŽõG”®TwÊ~gÕhSÖõ¡k`™áÊ5‚[_fêÙ w–°Wï^ãÂK}toKR˳žDŽÃœœÑL*ör«U9ÛŒÁdµœ!›nÙ㧣¾»Å¢7Gn æ‹ñŸË.c]‡ÿ(Ò1I‰•Ð“½Ž½×î:°t#é©ÿñ@PgˆÿmIÑoñ^˜1¯— lôV](ê@Cĵ²ö~ΩFax ¾À'Å[|…¹ýŒ]ËîX²W¤Ïw+&¬—&ê~ºa†1lÉ[Ì@ÌJà@ -€±?”åÎøìŽ^·KÇ8ûÒœ‹‹ÀóÞ-Ý~Á§–A¢î´>wJ™~ãy¸¡mç̽B­ßËóFŽÝ…ò(o?z´mErªHÎ'Æ(+>stream -f•˜›ÁõYeýEá›a¬jpñ©NµHÖÊJêgR‹‰ÏöšXÛp·Û( ?VÔ•,|e#¼ÐŒ_@rUd!Ƴ՜7ÇSFÚX(q -È3*y’¿in@–C6ŸïGÚ¢‘UÈó¥=ý|›šûÏ»¡+æc¬ "ÞYjKáygK底fˆÂmõó~ADÙh¶ÕSúϻ뫉Ë߬£%¯J§’ñ¾Ež•Bªåó%±¡ÐMlK`0¿J·ý—_!/IµMÐLRLt¡cyVKàœ—’_hœ$ ÂÅK„pÌ•žv”€;œSb7às6µ˜eؼü‹+sÓ•ÇȶtvZv²P!ÛûÃÕ0—Eké !‹ŸÙÿ|¶ˆAÊÎ ¹Ió¥b†û•ÝÑÝõs•e˜–z‡)ü+Î/ì(hììõ…Nþµ[ì†î¬eÖóv²S‡î•M^:nqHQÊr RãwÝð<Î ¤Íí«ž* &ác„úJáuôÞ|™Q\…ç¹:wì„€šå½BêgÊ:a¯n ZåÉâuÒÓHÑ<ûêv:çÕ"K´€Œ°ÚqOd° ì.Ž¡[1Ž³;^ÁÏçí“=DR/9C®ÒSœŠ ¹é¨Åæp#“. -.qŠí÷Zl 8Év¼ ®ìz­J3jxÍËöòžÙŠK‘ ñ¡YÒqãYTˆÇP4¶äû|7({Vl%W&S2M•Èg]i¾q¥AˆÙ€+YFºµ=¨>dUXr9cUyY“ë¹õhÂQÚïuäŸ(^ÙHrW¸“[3ç’Î1<2àÙ²?СN3‚jo|ªŽ“™ƒNt;–ri•x'AKD>3¹Î{vu}á8I„ ‘¤ Düðw¯qœïM)™ ˜†kM©Çd¼ÉjE†wˆ S®üa æ¯ïùD›%¥”Xíü¦FY¦Ašk„§.w¬A§#æjE £@"žKçJˆÄ[æ&ìç=>Y1²«Š–M^С/ZÕŸ-%ñ§ù³­“€©,id—šC˜ -¨äEì4¥(ÚšOöZŒ^ -\™¨`[ŪWÓ%B¼½×• ‡(òyµdâ»j3ÆçqÇj HõìÖqFæqWum¾i~~ð¨6 _#Avž vd!Œ÷É*VÅU}Ò˲!ÞíýÉ«ÁoÚƒ«KØ0âK¤ 'g*Æq—)®^Îìë$7ôìï0¿ì†çW‰ï·mÖúaCö*®>"6®]Ï›pì´0eè}¢í|f¯ZœŠÉöTäMblr®ë}:„Sá0;ö{o'-ˆ"iél€Ll÷µð `­êÖW”'Œ~S»³±åK¸¸÷¹ÔîlÄŠÝ*Ïimª’±Ø›¿©‹pÉñ*cJŒ-à¸á(à 8û¶òj/É™w¶L^ƒ|RBXÊ -!yúð¨=®•5ˆ¦¤ð+S±5Ï8æÑ©®Ú ·rDš:µG?‰ÕÈÝëιTÄ¥Íù´DóaIëí :üHylÝ͇5íeè LIÑmw2¬:àßÖfËŠtN+Úóãî+t¾²YÑžr6C0ìušÚ½Òï¹áž!hDX‹ÙÙ<1õ“œ¨£ -«…–W½?ˆ‚è3Ì¿*ôÎ2Âì{¬ÔZJ÷ˆësIqçKmIÙž]À––2ܦX] ôÍ©¾j|<úÒ¬èQ=¨Ùe/kr<Õ³…Ò$ÔWU,Pžž¼÷x»Pk¿1'xÂ^ŒüWDÃé7l8µ•2Èâ#©» ÙUìˆþ_#¿øËçƒë­”AÌBx[² -ظs6&%*W݃4ô•BRƒT:J-ÕöŸ#ÑÕì}¿Êvj…8ª+¦";uT\M¢W3"I#©!ƒ%ÖŸ±X‡í9ÿ,‚9Á3yÇŠ¶`™E\ Æι•’zÌI0â{_ÅjušŸ‚ιJóìæ Î-AÌwÔ­n -¦¤ék.xU™£.åhiDž…ÏA¬†Qx…iÔ®ãˆÖJZË”3ÕIaÞ†R‹·\w®¥9úGN7;Otc· û•¦Dßc:w ïÞ±)ˆCi:Ls¡Ïäm_o'ùÞsÄöýPjo†¬Áîœ)B¸ED:SóôùžBÈ¢ñ)Pÿê/¿r§ƒÿ®K´ Ž¸8Î ¥Z@Q$UØ\É›Z¼j“A~¤GonÓWo±Ä1Æö «­Üõ²ió[ìå_Á%Ï°ÚcJ'ª°nf„‡Lb©¶:t+dŽ€±1DšÅW4ÛY¯!*("€|ý«j·×ƒ‘ ÛGºÜ¬¾çC-oð'MØÕ‡#=vˆË£òF“­Ìa¢Ù ŠB ýVÙâ£t.íîdóã¼-*žE -tK4 ¿0‡ù$ýÙ+ Úo7}R`°×ÀÉáà9B¦DŠÖøÈ1ïžP‹ùJ{H…üÊÆg@¢V Ô‹+ŠN#’ýÃ;¥_”ù„B¢¢iƒË7¹ÀÔÉFÌO:ÿï¹Ë‡Æ 1ßÏ@š×ïŒ#î¯`ĺxŦ i -Ú~G¾ÂãÄx 9ÓìE¤†Ì}“*2ºÅÿG'µjfv0Jkq*;&5!oxK\"—Ö³²Áž¾ƒØ—þ E8{_·Á+5UÛ/inq‹ ¢ßÏd‚Ɖïó;s_Þ’KìµUDvÎŒ=ȼ%X—²×1¢è•2E”h»à%×”;†§Ü‚ô!t„%Q»3ä&öé?)# ŒucžõTî_ù.5„»¡³ü2E2G¨O£0ÇȲŒràŒRØpDi²†<*û”ØJ‰"Ž…™—R,¡z/K¬SÞ ËŒ¢G?ARžÀa7ÁÄŒû8#g=-Ѭ¤ÄÂoÇøûñ3šð~ˆs½¢‰;¸ý[ÕöÔH”OvžšÇŠ&H;àƒxßá×¥›Müw –¯ÚPÊÙa‚Ó¯f‹ªþÌð¢~Ko;¹ßêëÀ‘dU›*r ’tO[ Sãô6A ¹µ:Çé åz'ÒýS°$ä(ƒ*4«&Æ7ÒDÅNö?N¢u}µuvÕHÝuÒd& ÊQ?‚·üÈ»fŠš ,ÒDP Üü ¢åŒ\Õu®Ö¸Ó‘Šd²WMe`¦½^ˆ•ømûêÅÏÜÀR›Æ6)—Uê‡yÜ\15Ø0ðEi¡Ô‹9uf°x¼ÂónûTy; ¼U\Ž2)m‹Ä8MŽqÅÓ€•T.5@h9PyIÇþ -T=”ú_½Ä<ÑÀ:e³5¤¯1[ -”Ûf¯68–NXö[pýujå[ζ‡D{º„møzž9µâY‡'›v”M}ì-z³ïZ NN'Ô€¼kí±lbÈÈé( Ù3Êòmf[úZ!?íáuøÜ“ƒ øÝ”­}’XhÓa_Ÿ»tJ©—¥Œ¤:¢C¤<8jP¶Ù;2.@QUêÚ[…I¾©Ð¹?E7 Ìt>^”Kt}Tcý{Š!*µaÆWXßSe?cD&Cù?ù¨öôFŠèv¶8iž\ÿwq#ˆ8|åŠÒÝ-M`5 ã3ì»]Ñ -ûH£hOg5èˆ! ·8NVy3²ú»„¿J2Æ8ÆQÆ«9§ÑwæÇÚ0†„þÒ²Õ®QÈ~}èóÄgÿK¦;ݦ›JC°>פú@Y}Û«Aö¤ Œý-ýž¿cB“ kvPŠx¾^p@s"S¶­#œ¯ã̘ç´+Ì™ùzÊÖC‹Íý·?cu tùT3 ½âEì¹ù¦Þ!\okÈž€K¡ë 9û* â³dñævÔ9³‚0øËŒ'hÔµQH£Ðõ‹3Ô¿üZ I­òo ¦Á¿¯LýH×Yäš -~5äylÃßO%í -Ü|WÙ/Ì`ü~à¹`mïë?Ûzà …½¿ƒ |`¶Ç9xíh¦£©Ü#ôç]Ór?Qw‚ædÌŠ%õ^ÏÙƒ_€ñ|ðúKŽáÄ–á[t¥Ú6818}Å¥üÐÓ¢Sõ|jÈQÔN Á@W¤9Ó4ï= 4µfNºÏ¢4 Øœ*òÛ]¤oG<à·E~:º£€ÝÛ{5Äj%=­Ì)Ó$à>Ç#yWµù,%aið~.Uð¹æ.íxûëÎÑï{ëï2á™Pßqà/QvÀhçQÙ²ÓœXli‰ÄZmîs€ÎKBàú3R´§õ^?À¿³Ÿ Ê­¢Qfl ÝÿÎÌ•~pA’hXÊå0Ý‘3üßýºÐ”ïžçm­‚ d8¦>w¡ïæ3QÜL'i_Xœ'y!«ŸDÎ(³U¾#Ž׻®¶ ²ãBëdsŠ’Ã^cð7|“CYÁ‹ÙÓ/ -çqý RDÚì×Ò”øN© Îi + ǰãÕ(Ê!„tVî:|ä‘Z½szg˜iI3øòTþêäþBÞéÛ4âGqؼp”Û€@ =eªñŠOü3×–&À€8i‰‰Ÿ6TrtD讼§’£6gd0ÚÕŽ… ¥v®v¢â’gÏïÈ¥;+ þB @/Ó ¥+ö,Ú–-;®'˜­ŠêMÀ]y(—Â[&´KÓlAjD©žä“Š`rµæÉâ½Ý¬©R;I•Øìêq<~U‰9’<&T®¤µ²‡4x|qºpŽ^i†a)¿ÐV!bÅÿŽ¡Ñ;_!‘¡nÚ‰wµµÏ¸ÿÜr­p8$b÷Õþ3î±…D&Àv±^„úßÌtÙþ^R=¿HÖ– ;SoÛÄsUcTÅÓN^9ʆ½Eâú,Bäuúïøø*† îÛCq ‘]sîÚ¯1…?n¯K%R×r9­‘¡*ŠŒ½"¯«òÍ°ý -#LÆþãj ÅCK@ˆ¼‡”}yiÍÿ$–³¢3JÊý{k¯z…YªTAàsËt˜\^ãíÚÉ#Ú?¯^¨P ƒ©Ás¬rEz×S¼¢”+äãƒLÞ+:ô;%•þ)0߇ÆáÌmÚª°ô8ÀV……9-eæ} t"EPRºÿU„Án•¯½Š0øÁ]Ÿ½–vJò§ÈÁnˆŒ†êü3^H+² ¤¿CÚ„«¡ï¾2²A5œZý§˜‰$"~Þ›¬#bLöÚåWêC¨&U àô¤Oä03mV†Å5ÿjŸ©þB«üòDÊÍiØË{ÓC~ÈðO²ÆvM­¸”Áó2¦b "ï÷3}Uõzy¬‘ªÆa5²D¾ÊC>\a/„£2ðÄÚtõ~WX»~Á¾íÿPÕû^cFºÇÌj¢z°Nx}Mi]@'ÕѾªÄ±£œ*·„Íéñm»_ðÊ‘ çT›«PËŸ«] S2iRç|ˆá6/hõ7ôÚ ï¨]¥?Wù6Œ²Aæü%MA™þUþÀ.Ý! ãèþõ^ø qÿbÌj~Sø‘úÍžWb÷¶j"£À0Tûl#;íO<µñªœ¥wꄪu\ ’!èS›™CœÙƒ1VØÒ„@ómÈpÅr èÅ Îq ë¬˜«,­írî¯[ž¯(*ùñ¡S¯lâ¡A¿qaÀ/zÙ esÊ¥~Œ*ÏÌ ØyþS–ëR;vú;QCæ1¤€»ÙãdƒÍý*[¡Æ­OqyY+ò<{”œÙÕ¯ÌÍ¢k²ÎIÔKèÔ™nÞQÏÔ±c®w‡7ýN'UNúŽVý•”€OZg[òÉ3žØŠŠìá2¡"‹“Îqû§¥Ü? *þ>Æ|úþ…ü¨A²xpî;P*²“À ƒª\2X/@SÖŸ‰è`úXŽs~Ãש_IFPh}œmŽ±“IÊG>ѹ ˜ ûNÆ}á‘å!Š4]åáz÷(m‚lˆà›æ~?'{Úu%Œ,~&Ô[s¥yq-<‘=›K ,[*(^Öö‘Î~WÍðù3þ‡Lð?`,FÈq)ã)# ZµêB;´ì|Ug|v ‘ÞMïè—TRC.¿­úS@Áôedz [378 ýíáж]1¥Ô]Ä•nÒ(8Ž2óEÚ8ÂË?ãR1Æl]ÑÊ®Øû½%Rv@£U{ÆñÐ|FÞJ”hΨ ó$¿csºÚVÙ°ÌÏ‚H~Š¿ÿKZKßÆO¢Â[5þ!G·…ðc¨£õðÑEsŒÛbÐîW•ŒÖ~Q?£Â&—–¤½¢Â\m)W¨Ñþ$óÍ.`‘ì³–ê3Pé’ÓÂNéB`Gö' ¼D1f›‚üžÝ9Õ%­ÊÆ£ ¶~[´‰ m]¦ã{ÜÛâø}@^_àŽ?šÐuîÐïœ)K¼^V`LVQQåï9(XAä1œ†} j‰ÿ7‡ê«3-¥"¾µ°•×~'‚à6È·³‡$ÃI\彞tµ¢,}'Çõ;ahé¿C玮Zð;–Z§至VL{´ ˆdõ†™~AáΆ7ª‚ Ót†œVP®´Âš+&g RgØ5ë輓)…ìÚÖ>ÂÌ„»;é· ¡-º;~’)¸ÐzŸŸgäRP%êö â›ï­Š37r'ã·Þ®õú”MæùþÁ—Zž3ã†ë´Øl&‘§¢¢ç)g™‘›â3ºŽjzÒqà]{Ë×üãø_âšxV°”Õ’9½çm>sÍͨÝv‹àÓ -r‚¨Ã>{ŽÐ'¼üm¹eª€~:E÷L¨ÇÎJ«.wýÄÙk†y: Š¶W]êt šê>ÿŠ?*À[¶áíq” €Ø‘K…¯g H†ûbZ=F4’ò ÒöòÀ4dû+Ž¯ºý=†¤÷Ó¹œ(®|ùüNN”­\^ÞÎåÞ²\Ù7è_Ý ÷縰u‰UÏ$RO¤â±aæñ‹°«ÂÌö(A1àÞ÷|Ãù^ž Ï•k+!úÆcÙ¾ðX¶ì[ÿœ !=\$:æ -A¡B¿Ë¼³2æçáeDS˜XÀkÎã)ì€É/àNQÓ !íjQ¤•ôè !ßÍd¢ íp ó¥üœ…[ÇNì¡ãß…bb¹‘|@JЯ1ç˜æqãPaAS4a -PÖ(3V›;G„·%:h ‰°Úÿ#І[Ksi,Ý„™ÊÓv¿cgßÿŠZÎp5 ¯xâu[æ™RPÒÝþœãÚ°˜³e¿ÂHEäÆkµòŸHe#b¥w÷¦Ÿ¦ mvì‹ÜÎ*]ý)¡&KœSA§ešù‰¢& ¢”KIØ8jÀxg̞ʰª&I…ü ’®Ö\P«=íø‰çf‹“±Ç¡ËsÓšÈ>L* oŽÆ^ä -1v :³tþ€gÞ\—µíé Ô@šï\šO2ŠHI5úùÆ{DBæ”ó¶EœÔ 4UñïšÛ–ªOhky9„W -óÿ¨!¬p±žÁ•3Än¿©]^ïÆ}-¶‘ë¿âÜrG\–9Ô½Ð!,’«Éi›·w-ÀùT¸³ª1˜úJó?ÃÁ#bÒ)ÌpolkÐQ€#àŸLs0”„O_Ø ]DŠù{sz -Îꪊˆ×Â=÷:nrW®ZΩô}—èú¿r:î.è½Wâ¡ç=àM†èèkÂëqïEæs'!”ã"Š¯}Gø™÷·¶ ¯ÈG=ß8F4¬`ö•_Ñr&“3çË¢ò±ïf;SµrC-èÑr"#%*j©÷Ö7ÖÛØc™^ëà š b¿gX#šy ßÑÉv” ªC{EàŠyT܇ &õIY²¦¶?¶¢C3ˆöÐ~HbGÚ:_`ÿÏ+ñ#Ô%Îd&.) «¸v'Ñ,Iqµ:C nÕ¢ŸŠ2É—ÌÍcS5;¿¢;&بð]ò‡]6o¦ xOü!vlíã³Cõ÷0ñ\I#•¦ïŸÑÍS~BÐ1ÆòR½¶•nÏb«hÄ…V\aY=cNO‚9¯Ø‘lì¶Üw쯤ª5 `?Zø)'ˆi82<0h'›šðò‹EW2šsØ—¹ð·vô÷¿Œêåí·¸Ɖªø’àNM -“òf¬Ûw,Â1ÊÂCNSÍjH_‹u„&Íõ¸§B´pzYJQ6ù0`ìz§”DZÀK3xÙâû I²<ä€Èzô†×ïœÅR¡z2äIñRì^BG'·32FÕ"Æü.ª¨£àÐ(Õ"&«œóæcw´˜^²Úw©÷+ù[”Ê+ð_«¹QIås¤Ø½ŽìM^<êèx--3©ÃíH„ë2€ÎçYmÌ8ƒÐ·Ä)©±{Þ9­ -‰P&þ€+æí× ¨öEIEø¤÷ -gNàéy™ê -ÞÁ 70³àRÿ8#j¢"k€çί£î­þ—ÿeÚýù/ïÌ%ŽG€ãûR’8hØão¬Ô…a©qÜ ¤ÊCý±*{Od×wUëy ­ÞŠt@ßàÊÿãÑظ7½­;"ÇÚ%gu#¯R‡qFÝÛÊ’Ô*æ†ÈT`mšT»ÈGõJ]þh£ë}h¹ƒ-Ù¸žqDëW–½“£y‡tÌnr O†ÈÖâP½NòÑR Ê• -.G´Y (êÕÌp?†’Š=uÀB@X7«)£b4r¿W<ÿü¡Þž1½Ÿ[{‡zr˜7å×h áOŸltæ }þ³åß?KdÏcþ53ôUÍÔÌΈÕÇÕjûµM¡Ür®ègj>:€Ý‰týBº’GëÌÔçÚ¬êR:É,U—4¨ºÊZªK;ÅúÒÜ?ÖíÌøB‘Ð¥"¯¹£*EëðWËb®NGÉ«c?šEjB`fÔµõPygÃ3žAãÞJ°ûæ ù–"U›¿Be< Ÿê^|wfhÙ^GeçøK¨^!§ê1·:ÿŒ;lä<1Í;‚Q[i÷tRÒ‡SŠ«W£G}«µÿÌGji1úà;eúüc?ùªGõç¿Àìµ;b %‡(ÈkÞ7#ÊO%ô„+pÚù=fŠ¨9Çvà8%Ô›Ës†áÁªCK^I¦Z×À@xsM_è9p‘¢)ª£s‘?Ãœª,Â.Jž:>sΫÆ8aõNÂ-­Ü[˜Á¶F„y³e‡m±êh8u×Pøþ5DwæÑ´÷xüœÇ]ÂÄkÐ^Joª0sðÝæB‡Ît°2Øþ´(îò‚‚{¥`sŸù‘v8…Ћ-rTwŒôDw[|FýŽ2ßy>.á+¤îûJÌÞëgÎ+A}NX\}d¼*cýÈc(ÔÛ4Z z¢q¯=Ò @„y{®]†õ\*ŒÑÂNÈCŠv¥µP®]{‹DAü"æ$¤6ö0žÒvZ—%5¾1ƒ2(j=|1À¯Üò+Y‚ ÇUC@§G¸åÅb㬂Ïdˆdþ)Žø±Õû‹Ä'‡Jpß!nER -[t;sËñ0#Õ”nˆe!ø\ñ*$]‰Q“¡e+bR{îëKù ;5€½9ˆW/+CLŒvE8 \}‡ùÒ2#oH›f5“âd¾Þ)ydD *Jáüh3Ñ…=…:刹KÌЈ†º~$_†ÌO£}ë3Á˜WM€K2É$Fâš47µ#CãwpçK 73½Y1t ¯´š£‰uÙ\"|-Ë´PëOµDZŒgÒ ˜áO§aÛŸôZ8ðG°ü©ñ§ œ&Žâ‡'‹{¾9§ëóÃý¯ˆùߌmÄM-Ìg"£?úua6bÎòðRJœCn؛ֲQpÏ ºDî®1ì"†Ö>ä%3³À†Ùµå GšaÏž(Ï>‡Ïˆ½ÕШ~&Û¸æ±ýì)vÎíìÈC͉®F©ÀÃîRJ<ïX?µ'_²K+\u€~òÅÊã!Ì'ˆ­I¡‹­Œ¨âïRgý+6HˆA œôî°Y÷l´À¢8-•×FNvn¹gÞ‚†Î©\ Ýr©(/2ïÖ§KE 0<øšé$i™Œ¾ækžXà6 CÆ_‘±kÖyÐ -€¬á8yJ‰¯¼ ÚãlXóÍ +`4ÔmTÂü2?[T4æ.AZÊ_`±"ß<²åøT,L/HCÿt¼jnàý‰>ç•Ý䦈wªécÏßéq¬!OÞãôÐŽô‚¨Á"xίy0ɽäçsfen…á Ðo.uÂÄ‘Ó¥­Ñ óM #Yêuå@q):óv¥½L¥tb`d}ð¹áÔC•RzÅÒ»yòâé]¯oÆßâÁVšåq!‡ÑÈù‘üÐüVù¸sÏ._sŸ//‡¾¼C2x¦À*]´ÌŽôsò„P¥e­¤…Âs¡58¿vvæŽqY'p’IxòD™œw¼Æfü÷“8©äQk;{yäÿÝLÝ:ënEÏûy“&JyÅ65w‹®íN8K>.ô ½{ „p†P#?Èa–6Q­&vŒô.÷ª%ëWéFA~ /¶Ô"¼ûL}ÄídÞ+. ÷)V¦ž®@ ñãŠÛ‚òq-ÜLàŒØ3eˆÐ‰T¯¶#—‚C×Âi±"l[¤4>ød»”ŠºÎÆXϤçŠr -8çĸÐr§Lñäü¦uæù(»u{à®6]šSöjV%C¥„ ‹=öÏaßÛÍìŠÇ¡ÆSêv70$¥JÄÝôóoS“áHbØz£Ð\[\ÞƒÙØþIû.¦À÷¿|‡l{Ôb³ã.hþÀ-·¯6Ñiÿ+ÅEÐëX“>âéÆFuæ‹­½ê¬)ìBø;÷™¹ÛPõOÎI02âNWf/ËSŒµ{Y™!Ö$Šf&/IÑü‚–‰r ->ÒžË#¦=ÎÝ÷b—rØÏ·¼g™›/¤øÇ~/•’ }•†í/vn£jš$”Ù#ÂåÂ:9ž¨ˆŠ¦—¥Äˆ¨cÄ-ªvÍà<žhY?ë©œðÍ=GK‰g‡Zì%ˆ7ZšÊg( ÜúÜ0or=ÉíX«ÿ܃øw¦%ái{;±™«¾¿CL¦¨pFš’Ù; -]Kšÿ„WB©¹E -<¿B“A{ôÎOe‘ëÄAÉ9r³6 'ÕSM3ì–ÏçJqôéI|Y œ W}D•öd¾{ ‘Ô#ërûðT„˜(ÍÎ 0 -%5wŠ¡œ)¨_†)“üžéÓ=¬¾é¿©®ØŠŒHÛKå-U’¯rÕ ‘NpÊRš¼<Åê˜ßßg8 Ðù΋Aü(YŸ0éÃ]ÓsÏ ä9³tXG ³kªÆ•÷‰"6Kó›dï(žMu.Š%sIAOÝ ÷Tì(fÛyí~é/&9W!Á#mûz(Ø„'V•+¿—-ãûn‰¹çpü”qË{Qj~DŽá@¯Ë@àBG[èÈ-ª¶ôëB„ÎÈ1ªßo«ÃÔ•áÀÁ«!ãÒe/Ék„›v—}ùŽ”q=×t_—ÚšxJ”3Þ!cj²M5© y}@ï(wí³ ˆÃ»@€âÿ“ÝÞsx<ñ3A°};0“ä¸DPè4Ž)牲ßüžÛ³\,÷…àˆ‹¡!"ÕPjŠ¯-˜Ç<²tõÿ.Ž Š’¶óñ¬!Õ-ßø‡\ëDäHóQ˜k5+,Äx„j\‘ùÿÃû>ÍrW§MYfø%¨ÀD£§ÄšZºÚ>‘0cbcJÜ´á–xTÒGu€çÏ>i/(w¬„É| ÈÑœ‚,Ë(S^ÄÑCæ_Õ3Ò!G·E'z‘>udÄOÙÎc¨Ôõ^åßV”þ‡pAYˆA²«&s£ƒ³•Ä:Œ©æñe©AļH,`M)ª”Ðúém¹{R6#Ú½_Jfw0£²Äs=5”ûƒÉwÄ‹"ƒ‚Ñú2$Å^ãóFÐvÞtµƒçWXËÛîCô–%‰¹Ë© -ÓÏåí-9™ÓY[2çnú‹ø)z`iO§”¥%Fæ=zL1-oooŸ»å³Qf5c&~ÛÕ·¥I - ÌS{¬Ÿ’»Pi6x7jÂwA9SØ`"Ƭn~ó»†ðÕZIl|AióÅàƒžŠ#6Î=¤^—‚¾¢‹³V­p…¿È‹˜ÉsaÞPœ^æ49®õ;6 **!2DÏ‹ÀͲKÀha¡¥R8_ÞÅ(Ù\…àËyQ#õæ^¸¥ëŠ—·¸,³éQ'«v¤"ŸOyÇV|.º¢˜Ñj;Ja= -&Ùdê…«ÒÝgÒE‡˜˜ˆ!dôzàK¨ÊB» ªè©Âж¿óË—\Q¡GÀªÞŒêþãö>–râÜ¡Ü£/ظ¢Ì¹v•Jêˆ~§‚Ó]ÇÆB×-›Šv•Õ·;b±NÊOŒð¬ 1ï -6HŒ8LwÍòaYï‘ÍZ­ÄsZÛŒ×z’ÿ±î®lKLº-nó>”µ–Jñ·`.挞"ai 7,rsn¶…R¢ŸÛÕnJñÀnÄ èT²*ó’Ú¡¨­mÔÎ'‰cd5ö2ÔÌRÁ“š›Â•œÚmQ«=–õ“0“ÆyØå”ío4+AHõ¨=9€„üyUþ£êLÏÌjvB[,Ä,ÖÁðN)aæge÷ÊIbàhjsX O]ʲ>¿‰Ó‡æDWŒÝïµ\ ‰ÉŒ”Ku³Wé¤ÒHS@~þ;%s{Óu–WÛIÅÁ ÅÙ³òµ;Ûƒ`½»®l…m¶î¶‹0¾bB" rU’}UO.{eõSß^Ã3„%gq]GÝnÛcLÎ(c9¼Ì©ù¥o‘g®l|ù¶¾p;Q¤F¶G´Ëæ Ïc¬Äoµeé=ì±Åè GF”ÙiÇbØÆ%ZOºKöôæžwi{ÎsIåË,¨‡bvscQá íõŽ;‘Z½•dÖÞå߸“C!Dºjëyn”V¢b˱ä?ÙHªNžÅ‰TÑ,txßɬŽ¨c;AÂ\‘²`"Pæ¸ë’³© bϸ²ï+ ¦s2Y¬b Ñ)@ç“‚ÆQI( -9RíØœï¬xZ¸ÅYÞéáÀnQ„·º™)´tI/ÁÑìsq®n–©¤Vc·LßSyåX.{Æw@lê]á°ÉÑ¿¢SiuÓ ‰Okϯ蚨#ÎîϯÒ1…¾‡ðcæ±û9Dãgè4)š¡úyJ¨r„w-ºGù Þ²P #IþAàZª½RµÛo3Tòú¸Rž~‡'‚’yB¿©ÖŸ?ŒQúóÛþ;a Vpø+ƒf¼'¹Ü æ°?BÔ¯!s³Ð®èÚCö=ƘLŒW‰â<ÂÉÞʲÈÛ¦žNfåHÃÄ<Õ¦$ž_®·M\Q )¿–O¼ää$«Ð©ÆÝBa»—0Þ£Ü]5! cÀìIª¹5h þ -µ3¿ƒiDpO¿ó(|ñ-Î5‚QD‹2Ç{JE0ξI‡ï(*j?5y÷3Ð5ù(ÿW.§ƒ ÿY—‰Uý£)~åõÚøO"qåß • ê§€Î°˜UÓÑÏ%•<VSWËþLðü¤h<ƒ‚ªrÄÿX÷ iOÐë3Þ -–å­­ÚÔgt~çZg‚iõ•îÔ#fVÿá°‡É@äè¥;( -̵ -o:_‚»PÔùŽ²G¨ˆ‚O0ŽW˜gšâ5k„[ºÈï&€áNŒç\µ/ª‚Á1‡plœÁÉ©(¼JÚŠ-‚oŠ¯F84"ÃA[Y¢=åqÕçúš2ÊÞäŠ×BßuÏè¹íëfª!Ø»ÔMßþŠÇU_ö]¸#ÆIµ}-ll0âmtG7V‘ðSCx–Ó5ª!E“¹§q­ö¥­£Ö JšÀW9'Q¸„9©Àü™‘Å÷ºÒïqÛÛ’@ª¡ðE¸æÊ6å éEÇ€¥(ƒŒ¯­¿pPÑ1Z¨œqü*ìàïÄçŸÿò°mó -·WÍuXë»Kú¶{É"Å´± ™þmM9I!Í—!˜¾+œr«ĮŜ¶&'$o&ašYãXSUÛÿ9ʹ:b„oÊ…k¬ßÙB9³;égvSuO¯Z3¯Vס-Áž&£(Ûs’Ù5 Ã8ÕXÃB+ˆ2÷M†\yw¯uÜæã˜õ´JжÔgAÇ:öeèö†#€ -p¤pKç{SA@ÝRÞÍ¥èðu8£¿ån˜D™hÕÁV¸ƒšÞŸ’ …vÄÁîNsZx{d.Ï¿äWŸOA̮ʯ¿"¶²k }èØë^=°Wº¶ ØЖÌìMì@oE37<ÿŸâ¸dÓ„—-†­3Ö§R†*8«äËÓå·4DB…¡:—pº -,{"¯+]:áwõØêƒkÙ2ï}_j kÎÐ -ké©=® pa‰˜4$v+hw¾j#Æn:È›?MõwW‰=·J:Aª·›€æè%2pM›×z–—ïˆNT"Rá²/b?ü勾ptÊæ,ô}ç-ÌcÿZ›~ x5-]¥Ç¢ÑóŽªgásHϸ‘º3^³‹.TLµ*­c:Ú¶âGí…„-¿ÂZw›šDÊ@âL4í"RRƒèŒDä—yÞF”™*Ñ][±ÖžœºÅ„oR«‹²±¾à“BÿŒÝ«œñlȾ­vò'íªÀNVIÔçE}f7!iû2á»`”ñ=«×ýõ9«)[Õ>3ÿç5MÊZýÜ?ZæÆ9 -hD<¥óº}Vd«¡ur¤Ÿöçoµ¦”ý+YÀÖCû£¿D­SñxŠPÈK`@³×n]Fˆà jQ¦Ëoúæõ—¯@ËíKÈúy£?˜x<=ùÚÅip"Ñ6)¥fÙ‰r-ëË‹„D¼?L⿸¥ï7¶Õ"ãíÞ¨0‰Þdƒ-ñœ‹g‘¾PR«ˆÙø´÷­vÓâvOĬŽî[™=Qõ±2·[C˜+jõâvxä]ÝòN öiæѯä¨BøúÐÌ{T† ±Eð'{þ²1¢‡ î-[K_^û7‰ÃþòsnK&¬Gsþ.ÌÆÜ^U6BŽ8#œcPgu§azî«*±G]¥áš£zôÑÂ+ðÄÛ6Ù Šâg§ùA+–¢3I›µê1ØxÑÎϺé”]ŠˆçJ “-j©V'ôÙ;ièÿ‰U9´¸ ¡Í)BÊpôÏKþN܉­ GÿîGÏ\¨ª5wædŒ¨-výXMN­'+35ûŠp¯”hññÎÏëô„Í驲%ÞVø˜¶Ð[uÕSÇ<îí|‡K -Wú -÷ª¥ÐÖy¹Ñnkw±Ë‚lß| b¬à"[Ç=—) º:blKÿ<'Oð ÿ~o1¡_\v¢°ä—ý:bN¢¾gWŒŒ ÔδLe9²y¿Û¸‘5k?Ì]/@”Ë“­ø}þ2_³.ž÷`îk£ L%牑\¢5æ¤ihv½$Üê;«Õ3±ßS‚®ÃˆÜ¿ýµ¿£MŠö¨Ææ„"?A‡âž5_¨ESóQVsÌoÜ«p^ñ!Ú.$,AÿŠ9ÐÄÙý -:H5•IÐÖG4.”™²)Ñ…²Æ…9pË3¸?}Š{Ý ¦/ôG€ ±óÌ!×UÅ.tà˜K{¨ªÐ#RŸ^«~ìU³¯ìŒ4—TPDF­!Æ1õTRkg7f}0¤àà¢Z†kJÑÐÌrV¬÷70¡ˆÎ7àj+­géê‚¢nW„jZÙn:ÞO8¯Ý­!˜ÿ?Ùy•O}œò«˜v‚áÝT3¬—¼;ðÀØÓ¢’²-¢Ä$&ˆ'gY•¹È+e-ªá•ÀNå‹jËI|ÖyÅLÝÒáß•F5©°i_Ça Ñ V9ØB§7«$Ê®#xnÛts"F`îp˜HõqR -ÆH7ÿÿÇÖ\C]×<®A\Wž‚?»]Úd¬ÀÂÞCS$3O’(yŒ¼gþ“Qü‡'‹.¡ ¹Î[Ì :~¿sêé=-uViß  ZÇÊ÷Õ3æ6Ë¥2Ã?|§wÄFø‚ǸJ*FŸ,m ²²B’ÉÛ`ä²]‘[×è8¶ÿ }ÕÄ۾؅¥¤xhqV^~´Y¼æ-¦g~×V{šG…€%rd­ú¦;·(²AÛnM“LouJóÕÆÓÇÒí5$¾Mr[‚ â`¿üÖ ‰×="ВΟ¤»”aæÍTÔçÜ»2‚8YÄW½Ù3Q©ÅÎ{ÆûBg:vlßît"·¼êžV?ýÌS@¨=¡E’‡¾¢ŒcN备œ:ÿõÖ -Q~›{hüMka§nrOÔ¿^!”©È2ÙIÒ¥l”g¶ÜÂhÿá;-Š1ï<õЧ”þøÚóF©?ˆN Sè0”ž"uÞ^õJ’ùëÿüÀ…š‘¬ä­݈$ß›º=Ä%â‚ÛõÞËœÛó1ô5ŸW‘=Á´o%]b[•ÈEË4šþÃþDj´¿‡\²¹¨š–p|å¿Ð=ßo…C½'b¤Iz`{[p"3Ÿ‚G|˜Ÿ–Ä×Â>ÒkHׯ#·A±Ë§÷Z&3æ@ÉŽYâ‘—¨†¾À®"ÇÃ*úh=f>@ŸH›ØIÙ±RŸ'}j »%Ñ‹'¤²ÑJ¶‹ðè ³ˆ!FÙWë¸ý[õ|ÿAÜäÏ/äïìP‡é®÷Ò¶£éàµÓhçH¨Ù&í½Îéd;²ïq}ú™ç%Ä$õ¿¥¬…øÑ%,¼y®álvaú^)°g‡¢#r‡Ù1„-g¼úÚP5¤…Ó^ø«üÎSžº¡NúzðÇYˆ4CÙ"C"û„0kH‹cFúV'É/h=Ø©ŸâÀúÏ!_×ÔÍwBv¼®ú>|36͹”™«_ìT´Í\ô!wx>œ¸ËW€ Sàä£lˆµs¾¹@„"3 -€N,dËÞÃ&qO¥9çô'¶_•ãª U‚ 7Ø<¾k*ÚœXsW\žÉî€tDu‘ĸ —**Qõb¼y{2:KÖ~05B,ºcîÆ71{ÒÔ•C\ð{{ñ;RbS帖½ÄÓlóA¹µ˜ÏîÐ]hÍcùË$GPá[X¼¸öSƒò‰™â™Œ>"ª”JÇa/e?€uûNW®^‹ãêK¾”Ú[¿Vê4 Úæ6q°Šr$56aUÙ*Ge‚ÞàK)ÌACÎﳚ(cN/ªBË™žØCÕn¤˜¯ IÒ‘|}ð¡j:¹v˜n´‡Ä¶ Hþèe ¼=9"&Ã1 á›íàø"PŽÆò»YaJ( ¥à±Š ý!p:}Z%”tNô°îí|ûÊç|ñ*SIöÎý)(ºf™tˆ$…žÛ•4o”ÅZYy$¹ÕzáìHcw^õ‰•ÝÛ¢÷Ò%gT¡û#ùR`Þó¼iWa>ó„/+œ_•ÇÑYj¾cËIè}Äb-µúø9³·ªâžàÜ™Ýd/Šr‹1ª¢qqH¨£1ƒÿë°¥à]ômo‹’â5Q¿[š ž‰Do1”56§?¸=Å‘8¹Ýj–çºaû8ù -mñ”j(34Ì¿=I“­‰*iöPéð\Ï­ß8WÌ ñKUJùïN«¤?¡m†1çÂÃ<`c÷vj/×BjWRVC\<ÑÎ!{kDoä`UÌ-U Žh~›¥è5 >>RWˆ²Ô™6nK;vœÉ¼ÖkÊÕDÙ¼­·^ËI{—bî”ÿý_xí ¾çãmpqÄ ëI…* •ý¾º“Æ©´d¨*ÿy%¼•î/É—ûylew‚4€Ba†ˆ2 Uþdf7…¤ý×?üåwñÇ4ûþ8¹kÅd‡Ñ\}Ú‚#Ø«±Ñ<ïV§æ}éÐm>|ËTøÈ©@nà=轂>èù¨å€`ÊöŽå$qÑÉ»ºb:É"1rPµðQ¦[$ÈbO•½Ai»«¶”ÆcŠaÐY¦ò¦ð¦Šø2 -ߤgç^„SvAiñ® -m -J,¦5£l®ì[à.½pA#(‹¸c¤`/ñTò!ŽÜk­1}¦2iæBsýçe^ëÕ@fà“ê8lgº©—M“ØCX*ÀPOH(ø0çê”nä“ ÖjUGÏR׈dí]Ûæ)dŠ¾[UN㉠¨3>ÛïúÂWÎ7+‚)ŸPEM¶à-}Z‘ ï7ˆýžSC?/›Õq¦8¿}³ ýá/ßC¯½=7EŒ½Þ´úÆTˆÈ¬9þW2D·À7¥°‘DêéeÙÖ•Nv”ˆpjDÖ4yMê'Ѭ©8¤I:ž ¿ -è²~kç¯zps8ìëPÛ¢ÃÉÏUe7¹m÷OHªNµ¯ƒÊ« -ßJs%<õ2c”‰RR®fTS²ïûÈA{XioN‚wÛ½G -í”t§©²Åuœ볈Ü@>+1[ú.;y¯Îƒ÷½å;Å€=(îŠU½Ü5AùÑ)`|Î5¼ Qóηkò¶ ‚=4™ÿ8ÍWó¬SÐdãý+ëhÔ•»‰mFH¼sS›ç0ÚXYÓPÏÁ73²ßÔâã“qÿóz)0ý&¥ôyæ)ûÅ}-äD:+´ñ!(ÎH'Öïæ½­5U›çã ›wäìùÔ šP:Ϻ.6uZ÷ûw¶©*ƒ]ËÝnþLýýF Ñ‚ÇGΦÌí ’@SZ™ 8æX¥méi•/½Ø­Ü‹5ïàËൟøß…ìqz] ƒä;ÅE| ’€%Iö€Kp’Ökòj2B S楞ÇRã¸×°# qÍ!9¨Ÿ ŽÖÝЛЈ¥oÙ¯õþ°oïe}Ã#èlñãƒ;›Û¤FXÉ7d¦"ovÏ=_s¤¥·/ ê£X¢YôX$7­'U;:5e}9ÈuŠÑùW¨ÓîQ!Hf,0&V/oCE(–ÌŸ¦Ct†¨ŠØ^Ÿ»V ž“¦ÝÀ–Í*Ñ‚eU…øuÅ ÈÖSÚ;¾åGÇ·üèøž}ðòÿΔ¢ù>ç÷ñw×ò°½8ïžXB#‡9’5"¾9ÉH)~6£´íjsJ’q„w¥è¢Æ>j¨i'þ~ùÐùO‘íÝÍ]$;Ÿy.’3M:ÜïÙ±¾ü ^gp'´†ZàãÆúÕÁE\ *Ú 8"–>ü© rTïwÀ‚ÊÁgPÑÐ)™œ5"ú©,AÕÉÐ$²NÖ› Ð*c»á˘‹Ä݉37|¥ÿ -Û•Ü‹ÒbÃ<Ÿü¹@ú>C˜aÌiø`U¾‡Àû¢³ÞÇz}(Ç„?Ž Ïw1û|@¢Ÿ3 ònð ä§èö{„w'//Øôb­Ñ×s%bÞc¸8Ÿøà Õ”žHmíAx+,݈žœWñŠ)ÛW=Ö‡-G¼9'4” |=Ä„!ÑÜ:è€çœ:ÑYT:($åßU$:J‰j¿+µˆ´Ð3*i+w à÷D>b†èg¸£´¤¢w¶ž›’îjÑ;Óõ™‘8q¨»æÍMåI HìuF>s -¢×WÙP…áÂ\!Ãl—œ×Hø}¾z®tÁy+=rº^3”#8 &„Çù;Qû×Î8)‚ÖR`¥®åyF²é¤µš’â^<ƒç»ä´OäÖUÚTÜÖ¼ÙSp€±Ÿ?ÓRùP;=ã2–ǦG´’ÀÕ2}UtS êKî7ÎÙŒmª—{R£lj»‘™Xûa®¹Í­qnÉîFBKL±?m`«I-Tœ®üù9 G­y¯—$¾*€ÖWøíÛÆûuŒ]²½°ˆ[hP+~µ83“|è8-@ViuŽ±ò…iþ¥Ù ™o »½Èœ{HÙuCÀö?i 2SžÑJ[ ȦVÚB…9µƒ»ä ’”¥¹Úy€Í5 õ§Ä€:þ$2'RfûçV«#\a¬è´uÄ6ƒhéá*…K•h˜@Kä5…#úò7~æ^É„ó—,ák#¨qõ¨xëG¢;Èa¢6^žòO|>´Æ÷ -ð›žÂ¡h4ßçÙÓ¦¢I‰UÛV¢@û÷ŒÒⓧFwE¦o‘êÄN>èǼ½‚¡.2ì²AàbÔ`7ò¡n -·Ú]G î¨ÔdG(÷É ¯§¤!ˆ“fȸq#¨…À!ÑšËNþÅá©R ¸xÊ& KÖ³ð@¨"@5w´2-ýN,"ÅAáÓsaùNÂœÙ\…ø@ËdM%-õ¿€,›Që‘Þ'úHTçùŽ¬n{Ôr4d#䢋;OuP¨X(z¥cm'R•%¶%q£D´Kú Û+AÚc>y-¢ÈF‡$®˜+‰+刃³QQâ¶;¡Pòé+?º¹&öðDæ f -WÎ}¯F~” èµîø‘ž–æõœA¾hUn)÷(F‡B¡45¬ÐSÔ°HÄ´z{tà”õ„•¸Š{a«âº¥vE˜Ç…¶ cy!ê´úÖ,ŠP•]×,†•0IŒ!NF»=ù[ļC­f ôl’-Âáh¦|°µy¢DÞŽÖWpß„Kèèg6zdܹ@¯è;æ,`cˆk‚Æõþ\QNëKB8…&z½×Yæ‹gu“Vß‘GíUM ݹWWB’©7( ãw½æœ¾Lò›¹§ZÆœZýà§äMî»*C1™§‘¡ÎUpP˾X´Up˜¯7Kôtª @7–’cl2³âz<ÀçZµälG©?²'6 - ¨Qª­Ê>WŠ(3ºÇ“i©e=Ì^%…Æäg¨æPK"ùúàÜXy„üïyJ™4öTöøc§™€/oÎÒ\€ N"ñM#x|kœïó“;eŒµ"p}­¢X•rÚVÝ)C£?N»ún«"@<qÁlîȱ™ì-j«ªE‹–j¼¢Ag£CqG}5¹†SÑ–Œ´×²à=yàøªÒ ÎŒí7RZ-WBZâpÕ™ˆ†«&JJáƒzcuƒŽÞ©Ì‚ `Á;?þ'`<£zAž¯ 1Æ#¨þŠs -¾#½¿¥tß+÷'ÃX"å*T{='}«ðo™¸×ŽøËÈ…Ðx/§ƒ£æH¢0'phRODùö»ŠjA»Cs}2Cº}0zgÉßp—·¼áÖ‹} P÷>3k8(Ïì‰Âç#âîµEÐß·HÑOs¥ˆäSßIímiëËBZ*…ó Æ¤UÒ=ö4t-èˆgðïî½´0Ø Ì™ˆó8‡ô€PôÒKKÈÂí1ÈñdéqKHØ xî3ú8å@€ˆE½r×â<Ë›¯O5Ù¯¶q“ܮ̒ Y¯nÄÉü„â™L7ícƒ Z¨ -^†\µit?ê݈”ÀìòöT¶ÁHž(]¬=ø¤KG[q´÷•æ«Ý¢ó½wßé`®~Wy3gž=Öº™ÿ€@hbXjå†XÿƒÕÉQŸ2@;6YX¹®d#„ÒX͈9×`–UÑ7ÕÏ q;0Éý?!–U³%é÷Pd‘ÐnžŠz -PG.°S¦s)* -Q$H{>‡\á)ã»]hD†Fä „Å½„ ŒguªÝýyAýòܱÈ{öÿ¥ÇØ7O “ì¾²J,žV×%ֈʈ¨8×qĬȹ´UA§w¨oAÛ§>ÉŽ)€!»­t °²¥ Wò«fÝža;—€É=®š"²9/é(Gœ¸~Uj«­;/j»Sxž_7Ãù7÷"žýã3½!_Ö’ªȯL˜ðiéÓW~PR.Ž`Ž1n#ž¬}ÜɾÿfÝ\òoºfþò…ÆWÿǹõw Ú Ž_ŸÌQð7ªÁX‰õ;ª±7ÖØšéRŸ€DZ†´¨Ü±,X+†.%Do¿]@f£l^Ý µ_”°ü.Aêùïñ§o{QTekÃ1t–ÐÎÃC£¯¨)Ÿî]ZBÙڈݩÐÁ¤¯5CÂ&†(Ø°É¢6igýµR$7ã|ÛHj¢ÔÝZÔd*„ŠƒãîšèŒöBcñ³+E­Aóû¨Ñkä¡.´×yì\×ýéÊyK;H¾–8kD“8òÒõö¾' -ŵã´0Hòöu?®C¥8§½- -€q ºkb*ïîi_ßkÙ.Õ½¸ªb’î±}~öµ‹JK jµ™ (Ô&5ëg H…ÕÍÛëŒ#î¨Ú~´³×’ £@# ¯˜·"vÂ?µJ¨xꃂ’žÁkÕÝBOÔ‰ÓòúfSv1uOlA ²ìi¬¯ò¹µÊ§ýïÿº…Ô¢ÃþJ3‰†+*&¥¹O ›>‘WÔ¶ -|P!ïAoΧÐ}¨ã=5ZÀ2€Ì‰´2F¡Ö.…¸Ö -žQý Çòù£W%1aøwB]4ƒÖ•¸ç(¬PîW-RšpàÕÏI%³UìH4°§í'¬u´tÈ´òãÜš\Ócƒy•ŒêVM48="bú–•÷ú¡1ï#ǽ% á=C4ϺРi…Õtõ Ø·â¯@ ºï˜5­|àPžÕÁEñqˆ­Xk¼mg!©Ä¸/Ÿ‰hXS?#óÕŸgÄ/‚H”¸”îž«öfž&‚ö= -Ê[$w¯ Î”tþC䛸Ô×¾FfjHd-¤É+¥©’ƒ<#îDÁ4+uÈí©º¶]”*“Èîì›Q!#•užë{ r¾3®;²Ç¯ÏÊšêÙþç«rã„Ë{V Ì/«ª'‘\5kšâµ(,bˆ¿` ¢U¦~D€]l…®zFºLVáJD³a³¥ûXH…®Å…w‘ðrnPAFøæDgz45D0 ´&£T C†^t"ž\‰ûe<¦êJõª¸2”¥"<œÈ]Æãg^Í“t9Ù)Vc=ÔP‘ƒ–8b ›:=Ñ‘K¶×¯¶¦êœ%-º|£Šùp2P÷„³¦êvd/>êý…æE5’½f35nR.U–1+Êÿ~¯ -&sÈsQ¢Iåú0Û€ÌeI_²OÌýRÿfzÅ4ÍדïM § ×3¤Nß3®çÈdé,©bºáÖkEœW„‡È%ÙþÔõCcáO´&¢8ã‚l!)p¸iÚµqUžˆåFÛp*:jP‹/TzvóyÔ‡ÍI`´•7ôN„@ç^ybqO{þ‘'‚ZÈïk¤d¤¼uÜ+Ɖ3íBKËR·tOXݯ߉?ê\Cj†¨„CÕCb­!ó°Mí¨ôMJ£g†E׺åKžx§æw\;vƒü·ßo‡' ã3j{ëѪÌ6¼¢1 n¹© ÅaÕ¦I6<¨0íÈ„ò»û†‡àÚÿ[úôèZÙbð°tÀŽðbªÖªÀû+´ZùòÁ­tªâ-°QÄù«=f†[äTZJ¦±].¾È6„Ý„îÄ9i鈩èêU÷ÄÒãÊÔ-j Ý+ö#+=ë±Å\©T©"Qþð¦Ö)¢·àRz½[µY;É‘i­@ >ï“×FÖSÝ—?ä-T¯,Hâ)g¿¢ç½P4w!'d)ÞæŠzýXÒ÷jå+lmà;R5%~OŸëÌe¬‚Í Ï´ÅZëÉï{DéäYÇaµ¢ÁÞ/×Y"›â€i´º~çô·¥ð¤yrD¾þ»^1§%R¬e·«85ÈÜô‰öúTp¶º?e™ ™AºÿˆÄp}µª¸ê›`oÀ]Rí÷ÇdµE%voq`ÝdÔˆo]µå‘I‘OÁWÍË­ÒÅÕ˜jÕ®(¬ÒbÔl‰9-y®+<ã;­C3]èÎRç!¦ÆQÒPxêR¶ð†—¢ÈLîlAРOìs7ˆjútý¼ËÁ6Ø8¶[™ m=È@ˆ´åhªwX¯ -ŸUEõT ߆Ÿ©È.&ùàsšE†2Ä#Å)œñ8þEf…òü©°ÐkÐ\c&ihÔ÷$bÀ쌒%ãÄž[^C­¾ÖƒÜ«=µ¶“´ÎX9èŽûVWlœÈ/¿M°`n–K!Î wº‘wdþx{Ê™nDµ8ÑWŸ0\ö»FÄ•æÙZE ’é ØŒÎÝ9s<¤Þúšâï‡áZ16ú]íX¿B æ2o3âV£³–=”……^ÏŒ˜YER;¾ÓžÇY¡ðªQ¬´«ãÖÁ¹+›¾ºÀ©¨™”ÏÓæ#ˆh>òè²¾Ò;EŸvænIX[>8O\"èDí_a0ÃXïJ÷ê›Užr³’^qƒÙïPšyžÔn•ØIv*Š}Í¥«} -ܹT\{SžÎû;[IÑ{óê¹›ˆëÛ’âpÓ ³~Ê~ù¬ú/(Ì9ŸÛA]([®#qNŠ’ƒW«*]WXW܈í'wêö ¡¯qñ rß1«É7b+É…ó^þj€[s=…ž,KŒµÄ×:þjòx%ΫÔB·R’ -ré—1èèˆöµjköÀðHÛy¤yÞ)ÜúÎx´´¢}£–½LÏ`BKÆÀ1Dç,žÖY»¬7µ[=ï5%"‘s‰ÞSQh^®º,ÂÍ`ç"c ë éN}½mÔ3´ÖrdÑí¨AÄvªÄº!$”ó„6º ¥7D+Êò=M=|-ìÏ÷1(üÄÁÉøìÍteB8v a <~¡0ðQ¯ws’ðvÅö™ì‚È&j+–ïÌÛÚCsÝFD®S¼{½á˜ô:C‹{²ùRhÊ qÛ{ïÓ±T@1)€¥›êfVfláï'˜›'2U!Ð\PáO>ïIÙ«ÔhɆ÷ßFµE“zKCÕ0-ÆŽ1‘(± ¶)qbPELZø­º²ýÆíh?ŒIî8bBˆ_7ÞIÂÒÌXµ•FæÝ­vz8CÒ‚së¤F‘‚†ðB©îY9‚’0²ÒŒåØÚ¦ð†h£îÝÎe×¢BF먅öWDé[Z)ýã}Åo†Ôñ¥\A‹vÍäl‘olçW:7·­ÈÇ;ÉïüDŸ‹J¤O}G#<=»½z»WÀo²_evåè«ÚON"Š3W»j\9 …ÂÕ…œ›³cQ¥ie>pûÚP”Ø --rÞ……VÉïÀ˜ÜÁ„.|Kã» ö±Ý–SŽß“sgë²ìaÙE Y© -37Ô|NÒ;ÕgökùT3V>KB°Åhe÷y™Ãz}Ç^µž+¥ñyrîë#ÀÈ‚&‰ïW×ÁT³»ö<€@’³Îέà¤@s¦IQÆuó»ŽríZ9¿œÖ+ñΤ૬ÊÍÓ© -9r•–“ùZ3‡å4_êEù¾¨â^ó/Ó3n1•£B€1oËOUo$[_•°¹?Dm”]IÀJ§¸Çõ$‰¸7\µ]_§¯ó }à¶ì­Ò¹ÿãšZÌÒ WMžb™Ç4-ù»h Ùä:bó:ÔÀ U̔玶Ó<Òo Žã4ê;'Ž±ëÉ£`­ÊŽáº*ב8F€ÔPLšÊÍBÙÁïÞ†JÄ ™æâŒjkGðªnû»ƒ4¦¶—ﻑÜÕ6´:ŸºPèuƒjæ}Ô8Cæó•ÏíÎy0"'\=Šƒ4)Ðw ù¦TØ0^$¹¢„¥F5]ö¯t˜1¬»_—â^’·9´1¡Ù‚éæ©|»+Z &RÆ¡×êÏã“‚Þ>‚2gR±¥ÒIúùÝôB"'º!·Ra °“¼¤±N9_4h^–ʵÐEú~‰u²IÇz£“¯m œ6$V¯^àJS]”(!¢í£õí½OE&mñ.lnu0¿½­Ÿ‰i‰:ܹ~gžþs¼ƒÆú¨«XDGFs=x§^d¤ôoðKbÑÑë®jºO=0ù@tNu(¾¸Èü¨ž÷ÝiæFbP:BBß =¦fGp>ÞüTñ²'¸Â:‘æLž“!¾½n' -5ºDà›]$êüú®9äÖKl€zà+ëGèåëEiöîÑá<¸6eü(êjû›ÊßýºGèI²x›î¾;x…àìbç¥DdYÂjÓ×Ò¢Qß–©A«œ$9";¸×“ç±aj•îsË·&5Á$9{´ ;Èh w®ãÉÓ¼×¥:6SRp)ÕN;…'S̨I]ehndjʹ›æná%ƒE=LŽ#E@®w˜"¨ùdÁ™ˆÓmM©f ü]vUtAYtujÍ™3âÔã(ùï9èˆ ‰Wß¼UC븢+÷Ìk'Í#BFzä±x.«ÒÄS‹Â¥dÎy£3´¼‰,pìÊבïévž¬¾—ò=æù·R!œ¬C´ö¸È t쯼cÖð+g嬻“M²ªYÎsôÈn ÍsÕ+†FF‘ÓO~™êgpq`Εæäžð'5y_¬†·"aÒ¡í&·}}°!m{ñt+‰BbrH}qª^tF`žŸÙ?ýåwüÍ\´ 4mú8[å…¹«¢˜©¨š[ -¤5ûÖyÄŽuŠsQS°ãc;gÉ<€PÛ¬ËduJìðª sÏœC4r¨ËЇvÌœVRBÖ>1­–,.¤iîüŒµ_n4e~ÅRäH)æ«—(ó­!ó^Ÿ*û¯!T=Îó.Ñl@Gà šzÁËjÒ] Lˆ’Õ$ahnE–$Gó CÀŽð\Ù.ð¬§RÖi\bBtΘ,YÔRšc*·A-%dzvÔïÌ}BB°£ly—ø%~‡fПêmÓ7wø9«zr%ìC ŽµH¨ª ë Š±¸ÊœË¨ï˶ûý×oñÔçûðqE6E |—ä…§»"©tæ”1‚0¢GÜ×;¢œ"¢§å è†;u¶÷â¥ë0·š9£ÆžÅÀ^´¿ŽÚµt ÖŒ3v²ÛhÔi½2«x'wüx†fÞÍNÛ_cÒ~¬cá ´Hh5¬Æ>W‰ÂÇšH¥p3†3>3´¯u!áLOuËε‹7ä¨+ÝŽTŠõPGÔY8Ÿ)-x&–Bgú‘È2¹ñŒC´TgvcËâmŸy:R|hÁÊœU‘u(ð¥Gÿïq®‰:ðüvLçÒÓ7†Ü™‡¾÷,dºKÖ¦^‰}ÑP~»A¯XbkÍò?¤÷ó&!ÑgöS¶§Uî¡6 Uê;ð¯£/ට‘ áï”ïM)£]Y}ϺԌ“ʽ?ÿ‹]–èÅ ^£O¥¥ƒñ™¤Ï11öŸcû¿ds½©ñ=þCŠñÇ¿d•t‹ŽS×Û/Ërä7añ‘U‚25ÿåM=tu½ÑÓ©ƒ)]çå7Š°[ˆu­†(SÈOŸ3p3´6C_iy뵺3Ϋ—–ÄÚxâ®>ÀÛG¡®ç>JÖ åkxÅ -óîœËL·=…õåå.[—¨e56à·˜p?%nx¥n­2‰’öòf¹†¹­=Õ\/  jÈ+-W8ª+N@‘滚ÒKç“ÐZèFYÓA¾ L3ƒØÒŠmÙPÄÜ9¿¤–ÎF·Ý¶ÀÀïןiAÇŸø‡´r˜¯³Uï†òŠÙƒúà£à¶ô›™óÅPåç¿|sè8o=²ÇÒÂ,(°Z(Œ[8¤;uýµˆù¥»s>©»]­p‚„¥G§ã¢JòÔØt¥‚å<›ÏËl%úiÞæ \á!o1ª›/›a1~zÚuJÏi<¨N´'-ÏêpI“¡â"†e 4nCº` ;¸–ät½ -«¤Ò¹PWn9î -íÛì 4ÕÇ• yˆòÜFTþûQ‘_‡ˆI\¼{žêÔZÅH"GM!ÊÝà¹Ôù Ò9gÎ’!äà}£Ãlw¼zØ)h¥ôe.J'rÄ©Žùiõ©[&¡KAXíJ%¹†ÍS¼éÔç\…Q]À=÷ ì s›Ä)·‘ý•mîƒñÞ÷.ö’¨×*­4èt–LÙÄ›^øNëXu8¢ðzˆÚ^N˜„ ¾ÂÖŽïl¦âIƒæ^CŸÓYg½>‹“ð6¤)WkÀ\cØ|78Ãfú|yi“^Ê"ô¬ß°¦ù»Ð] î%K‚ÛRºÒ™ú¿©«`ý’ähxªJµ2”Å£Ôd~Ùä˜A[ì™É…ªV6&§c`6¨(xÐCV.¯!hV#ÚŽ×=£(È£îF÷?¢‹HM÷û©B)"p¬û ÐMßð^¶ ‘±ÙüÊòdGcãü¡ÿñ¤~&”Ig)R.yчmÒË«\f×CÇÃ'ùˆD-mÍA9üõ]fú;d=e¾ßŠÚ9;ÿNüApÏiÔ¥‹>Ÿå†¼“O9#2§ˆÉW€'æT·ž"Po{U ¢oÍ7›ëJÂË?FQ‰¡H°uðˆí¥º¢™ÂkþõÿΟIµ÷ž‹ºq«û%OúÍ:‡M·àwÞó“ù=›!Žõ‹¯0‡ÐEN5žÀgM]Í,ÞŸÑ–uÇ’vàšè× ‚ÍÔ–ìŠ~æØq -” U&ˠݧ F²Ë¹ŸÊw™[åpáÌ~•°·}m‹9'\H¡¯ B 9‡í £žÊQŠaN ÖR3Nµ¾Üï“Ò&ý×}ý -1¿óªòfUÀ ï=ð5[$’ ¼?<Ë«G‡f%ŠWt=T?£—PÂJªkv>O O®è *™Ä"f¸E$™ÂïóKôÚK±åŒ®C$s¿­€þò™{G-å -»ä9ÚŠ¹ç–Á©n†QwÌ\îdÞz°Ñk9ÃJiºÃ¾æÎd—…T¶Ûì•øP@ S’¬ÇMˆgãÜM)5X_i¿Ï´¸$Þfˆ'W#R{õtƒR4Óß‚Š}©Dž{qyAêJ[ÞÕÍsr¬Š&[ºÐÆ{­GéÍ‚²—Œ.•" ì ?žñÛȹnöåRBYU¢šs‡HmÜüØêò~{ot7ûí8B`bÝ­k<§ šo«J$´Ò-Ò'Í;þ¼ÄÄH&«(æ©þÔùË—¼YŽð›‹O¾r0}(Ƭa^ Šmè0ó+wÞá'b”Œ~Ás6Àe'F”*öz0rÒšecz¸€Ò•¤ýgfkΑ>ØéÎćý]±Ïkø|VœB~R»R 7'Õn^#ý׊T4©Í̉_CÔbNŽDïÎNµ½-xË}]j‹8w Ú¾©ƒ£½Ùï–ßÑ(¼[dU×6¶ñ—=Êù#uv•R˜™v¾3Ó[S.¬\H< 3ØŠK·_é"$;ÆVõ~´(_­ÍPV³ëåÆ) ˆJèÜ—¯yãu°?ïJ±pñÍœ¼×{Lo²’ÕwBŸ*qÓý -¼c‹{4¾‹gXÛŒÒBæõðÉÚ·žê÷¿|O:¯‹9Â&-Ðëq b(IçzîpäU_éRè>#ðb'ÝÂý ›ûŠFqAp™¢£šomµYçûMW¤½ÞmÖèðrØKæË(€xŸcn¬&*¦Ý%Q¹ÓDù瀇ƒ‘ÊÓ1×ECL>3·%è=ì[a\?WÛ>¡+;¸;An~G`Êú¤º€Ú‹ì—»§p“7eQÈ'ÒŠR}ÆÖÞV£·…D̃ókÈ‘êÊ5‚¯þC3„¼mÔ-—n1ß §Ç–åœÞmàfYÖÓ!»íëƒoõkÀM¶‡7u³`Q½ODª®Ì–8wOÏ™'\uUd={øÜãõ£Šøi†ï‘ïçÙ 96Zp÷Û^a.¥‡ß{%ðO=¼`Éú¸µ(Êíׇ9äáËg_5¢ƒínAîÍß™› ý~éù–t–Ù1vðà A3æ)­Ìd*Ùgd»*Î%å|È2zI ©RPã§BÜœWœª””}¥€Wà6`8 -c~&õÓ3Ö×¾:iÅtC¯ê|‚aº íŒpš!GVùZ±;íª™XðCÝkÀýU¤çy e{³âÈyÒ\AiÙ§%¢+8µ-'À(rõ4ÒJÎÆȃ0U÷$$™N(‰D†G yb<Œï• ²hôD2… {ßkÈ‘)ñ$}¥{ýïE9Hõ)AúöÀgéÝ5„âIè¢`Ðf³R•`•¤\Tà à“'w…Ež“ÂÕ¾ÚèHÝGt>Ï;ÙÃþâ´Ü ÚYâK<³î’Já×ÛvI îU_Û´Gž#ê`÷7ˆèOùÿ à2÷xµUL&À“~Œw:3¢§¦× ¢HäDiÑv_9*ŽÊE+jeo¦‡s‚l”}×~1߃ô*$õ™8ˆ‘o°ZâüାVV·o•¦Äk­fð±Òz$º; ÃÞ†Qít-!ûuÔW&eã*0zÍÅb¢ÛUPL¢‚Ù´Xþó~™&푵_Fa}nòŒõ;jÙ z§.Px¼Âk7¤Û'x]Ûÿ\g]iœAOŒºÔ¾ïµuðVë.j/µ—ä'„÷¯Zz•Ln'«_…•ý©é|÷þÎ)ÁÏ!,¾¶L†® jÕæU¯³ÀYãìy; PÇãHùL‰ä¢½°‡úToq;AiS,½‘ü2£ìÃüš¥-™Q -‚úÖÊÆ-ˆÐ1‡48CÒþ'ú>fΨ™-Ê™”å!½2…¾EUŠŠ?zËjV¨DšßV¥o -´·üj‘«VND ¿TÕ|C_ ³†hcöÈ.þQ 8.†aäÌWbЉ99[*ÙÌÅF2Œú¹!z¢¬Ë&l&ÇÂ|Ÿ¦Éܶ‘°‰ÎH¿†8€£ê°ÉS!§Áʈg ùˆìH„ZÝ1p¦ÀÉ稟9lÕéƒÕ‡ÚUš.ÝÔ(¿‘+<õÖ§¢Ä@õ¾ŠQN@m:ø½>Õ#$y°/‰V¬Wt}zÞsÈ‘Ôc‹Bùµ´ÿˆ«ÒagÄܲøü\ÑX¤'t) Õ’ŠáºT°pƒhÕÚå–ugLš­²ÉC3‚ýx*WüÖxoËÕq ÛϬ»ïëCA÷{{[ågû”h&GÎ …RZ€ÅÜ5v7‚=tø"Šw?oDÛ†G¶VcP†Xzaùð‡ŒðC#!«ñ7£˜«+ôúQÅ÷€¯H˜¹iQ£)o¡Vææo¢ÜÚ¤U-RîdCï5òjh˜x!@#O¸Qm-5 å‰M<4çzŠ ïßâ͈{B¤€JéÁ 7ùäñŸwùÓ ÜCz™/v Öjî$&ï5D®ô*žCj§ŠEÓ–ÀÞr¤KAaÿ¨=Z\1#%ºNÑñ^hÒ-ŸåJŸþ‰ª„Aü"b ]äûAzZ˜8-¿ã#F¾»¶EÑ'ëÇÈ4Œ=C57‰ÚÕWûV÷màª)y*tï«Åït3—¥™ÓZÞß8"D)á:ëg(ÜèçŠj¾Í3!ÛڞȾ -¦;† Ž—[*hï!§&`š¯ÿþ/“kîrBpÖF£R x:ñ;êúœ¡Pb`·1ÏJD»%%ÄaVÙgôœ¡{h6gÀ«„J4bç™g!Js¯Ñ³/Ö¹¶m Œò‡TN3D‹'ƒ¸'jkVv©0´€d`ÒDVÉCÔcI¨-õŒ« špƶž!‡SîÙ«~]üG†-%Åke•ø0²™„èyô¹§daCåw˜Û;ôªë¢ƒQíN„™»ÑªÛch¿8›B¤¸J–š[‹/Ð.عIEÔ·n!í~G½:m0mGýÎü/)lصòíã¤|ï5 2søÇóGr¡kÕ‹l;çêD¥ê“¸îhɉ·Îpâ¾]È¥K^,M7¹ý‚ÌXŸ[ro§Z—<ªyq¯ðLó,mœÍ(¥ MºŞ’ÅAh”¤êV¾.†¸¦1ÙÇêCd¡:ÿ¾¯LÖ犀å³H¦*Ÿ1³ o"K= XaôwwX¸býëJ -Ïæ¦Ù´Á2‘V9 åès¥zhþ€Þ#\ŒRÉ[Në–Hzn(0”ä?s%|àÈ8é‹w6¹P-í(Eƒ…%Û6’gϯ\o®T]VÏK B.‰–à" —ÄÖµbWÙiêÔ(¬Å#¥‚ƒƒ Òâ;¯´HÇE2µ6ÂNêŒs_…«yÙrÈÝ ,ì¢1¢îPÚ•œî¦7dKãh«Ëé9ä:ªl(扬‰ã¾u•@òx‚ª–æp„˜×¶Þ1È(å4)þ¼ÿ­ÞB|YõÆSÕc-´[„Â,.XƒM¡…QXŠÓ;Ê©Ž¿iW;ùïÐïû›béýÝÖ¼ÁD,U òál¦à™œëïÄUTØÉŒšNÎï=™ò=QâSj1‹®…hrB6â«>‰žk”ßkãÜÍ›å¾òyŒ […”-§~/äC¯SŸ*lx±5W؉>€ò‚UCϸæ¾urpë”—ä–ô=FPÚ ÞÛ:Š« /‘;ò+ªYOq¥SN¤ /g^hƒvÖ íŒÆ¬Ä–ö²Àôv<=w_ùŽ5(6≂í¹jHøÆËf¿V³Dm¶¶Û‹®£4AÚˆÂÎ$oD5yrÌ“bpŸAóýÐâW ÚþÏöìU 8)äí -8ÏýÆfÍÐeÀ‡ve3ä=O -‚ŽÉ*¨ÃA–c)Ú ½CŸÒ•àÞöÇ°üZò –f{D Þ]Q8§ê£(‘+i“°‹ ‘œIscáŽaÓáqº•š .’ŠŸÂ#Úü ä{EƒÌÚEÖQ¸­$ÓæS…ÊV1$=úwѸµ#´Vxpºt×z7¼jÏ=4¯la›ý3¨mˆ7Í|²®eôïOÞñÿ½ -CöWO?ˆ°¨ !SõD ­ÊSjç(·øWrÊ-màôÒ·oÂö?X†~éÀ§#°-ŵ‰¾e¬.m½Ë¯_ÕókÌé—¯·ÖÚÌcFÊ®I¼‘+ˆÚiòœïóLû]ôá\ç"Æ^³e‹læç«.‹Ê¼#m `ŒJÈ"×KAP#ëLñLW·¶¤åçNcõLQŽÙˆèCó³RÒW Íø¢äR©CÊáá,Ù‰äH'“æ˜î’2t:Rt ɶDÐH#‹‰Šh§¬H]j.4·R2‚² q‘»Û®üJ™ïæ:*ª¸Rf¡ÑΨ ç±ÙN8¢óLŒHm+¤–ƽ$É|•#~ßíW;ç(wö½ÄoZdò"Þé%ç†ç±dŠv]bbÁ©HóHí¬ù®ó­KAŽ•Ùu?«óÖ!Ì[êzÄ4¶ãÌuÄšÀj_&8Á –Kâ‡]Œ_Û›ÂQKœÜ½ö`iȸþEÉá€ÁÇè‹ôš‘aïºãqDM]åéÎõ¨ÛMïÏ’ydûŽÚrîR<KGmnúKÉ{Õ²N¢Z´ž"…nïÀòj±žH?ñ¡ ö€5_oôå -yõx+§¨FŠ\jGÊ3éãƵöIt§`‚Tt§­–C”lzC­ñ«·Î›y~Ñ9ã -²ß9^®ÀWfx9b!äæIÐ^ே(5hhÓkËý*ãñÃ_öòIÂ8?(7S ÷?®öžßÑ8í‹3"½^eÍJ ๔F-lP´„ ¤È2¤!bU¿¼…Õ?¿Ud -æí|Ç¥ŽU_#ôVôÿÃp%zí-´Ü*̨±]Ė蜿ƒ[Gì„WékÐÚqb»xÐ赌HŒ  í[Jáó:#ÞfdñHG¬p -·„&„Ñ‹róÍÙc'ÌXC¢õ}y% ×À®¤D„HÆ^Eú^—òªÐñ‡~Ip4å¯ ôÑe»±mè¹ßUª;ZZÌ>_шŽàö¬s§éMâNwª]=W&IbS¯7Ϩ©Ñá¸^þ;ÅàèDÛªÞÝŒs£º¾Õ?,§fŠ»näÛ]ªtwÙ7©?,(*LÓt`´¸œ©ñ © Òar’(cßie¿à:`^~]dV¤–0ÝÖÜɱÌÁIÀªDx¯ðZÂF¨d¤sw,äŒöÀS‰\`è¼g’WO‹mÓSZI®pƒî-y¢øÛ˜,r!øÏ}­{‚¦Ò}¹éåwÀ–# t-†˜Òwá:zÏÛŠ‰Ó"<ëo0¦p´õ•)úÖa.XlnYP9¬ªçCÅ¥*…”^e‚3¾D£×æ^Væq±½–ÔŠS—d¤@%mÌ£Ìktµç -~kÕè0ð²ÔÛ~Ò„YŠ´g_8ve ˜ÔŸj¥ÂGÊ<óŠ[””-ã‹»¢ù¨aŸnbý¬%ÞðH”QÇRjßÄøË—dÆ”bi‹'°" jgö„CÙŒÑR Ç°HÆaÚÁ"Ð1œoD4¢±^*§™Q˜gÔK^ÓíZ#¾xk4‡³È;ºž„²¥€cÁ9OÈÓÐŒ^©8„cGiµ- ’;&Fúé3R‡Å -´wnUJž¿‚¤¡jîR3ˆTUi -Õ ÁÈÜÞæÖ=#D‡‘'X®OˆyíËè¾¢¿má4ZWGµ9E¤-P¢(±@ úâîò‰gñüÂGº(­—"ù]?ÿOaØ oç)§þÆ–é¨ò¥OŠß3–û+Y_òº})·“\.…GŸé–W “ÒïõÔݹ³ÅêêÏâq“Ð…ÊV[yÝàæmÙêÒGØå3s¥¬­C¸kWV’_·9[áoª×óÕŽS‰Ùq¯(qœ%Û§l!g ¤6åÇ;±‚özO)xå•;RíhÕ·‰úqSÚ]Y¥;„†‰PÏguó;ýÉrÛJÿPJÔ®`J …ÅŽx¹-áwYhìð‚M¤Ø¯9u½g!:!åôµ¥$1,ºž}¥å -ÞóNö0— SHƒt;b6¹ÊÝlø.u•ÏBY¸ÓãÝ.X( ѳàtåYwÔÙ¸•_ ‚"Ó8¦Îj)ØJî”ûª¤@eBQË;΀K‚ÛŸïõÔI -1¾ŽÕ`;¡ .¬OpFê”lþ¶¾Œ¦Q‹–^òOæDºÂg<>˜k»uvû$1‘¨ÊãœÜTÔs«ò¨þTåfè~!¸N®âðø¿ÔÃýå+jäøÞÙüh·½ÉÏW~CõEQÂ?´¯!½ìDñg4JG9Lø° %¤gGQ™[•´øM…Gt;Íô£@Q[Þô6VÊy‘þ"¤ÎfJH’´UNï|¾óÈ“uÛ-‹8’À¹Û›aCBðŒ«}.ÅáuaMVv«˜qGu:á¿’§vËQ !Ù-€J¾´KE‹vî»Òÿñ|¡>{mûJF8Âôðʲ$ååx= ÁrÌÇŸxáÒd]C@ºôûj?J{ܱŸU‡¨ÒvKêè—Q¶õDG¸¾å†ç~n;úÇûmäÉÝ_$L²—ÞW¬Ò·„•&nl™Ð)RźDgj¾½4)~Ï&ñ ¿r/üîŸË§ðˆëÂlýEU'™ôuEN2˜‰k¤ãáè[L…’âJjÕ8‘7¥íÇëWS™ˆã‚·øW1pYÉW‰×;,ø!ô>ôªƒ'aÖÀ3·Ø-UÐ? 3\'©#>±×î¿iüŠjTçGUJéŸ@çßuÌ€Ü^ÌQï)ÕQÂœÁj˜ök„ƒˆ|G¿ßлqË«Å·OW¤-NÑy£Úp›Ì¯‚¶ðóNþqöšÎ $ç -ɯØÕ¯ ¶r©êÊB‡…øºãH©#zÍhˆ+z0ÀSu%­¹aï®íå%º¾SÄìX/Ó+Á§º¼g“T%˜6äv:íêc]ªx=Žé¾ÓF»RyàÒPv¿ì„•Ïœ®sHš/yà 4lPÏ_s¦ù&¢ƒúïÿ**‚S5õ¼P”·–ìd i=jCi¹[áÉ&9wÖÿ§Z–Ôv¨§õ+ S8Â39ͬ1űh¯­åÉó£*}EÂΣ†WP”OµD!/ð -×l¿Dþ1þÕÜa;Èý3 ô«{úþ{Á*»ÇŸªºWýó ¨ˆ‚ÄBâ °ÙSôHýõ&ʼ{p¾†ìQF#D}}Z©†g'¡¡{¬Æ“š©vÀPK7䉀iéŠ'®¿÷$P‡Ó+ÝRüNÙrœ#ÂYêöÇù‚m:ß4%öĹ@Ðú_pA—×Ýõ¥Š«Ýew€öÿ%R5Žd¯‹÷¬¡`¦ ‚,êA8ÛYtÎùT&Ô£‰P€¤'Nm=f u7*U x´¥gP„bÔ]z_½½¤Ä^%d»bÞ>L$`Ál@©Íxý<¸z ú×õË–)³,èǪÀz †Y¸ ‡ç/©•Ì‹•Œ³øZ/P>Ó¢O8î¢Ç<#XXòuÃ0`ö=§e’ÎvîõÏrOÂE2‚!­l3–³5s”éÐÂDmŸ40VÃ2ìk Œ%Ýg# oÝR¯ M[±2e‹¦MJ¹ÇºðTÿñ8ÿá/ße×>lû¨q½ƒiYÚè<.æW_ØZc:£ÏVN¨Ù(P;òú¼UüÄdÛ){-¼‹>0g¤§Wã ðãí…þB[˜iâ[{­!Ív©B­´H¡®Âã_Ÿ4«IÜbèþÕŒdÚFD?HË ¡Ûe‚_CH Ìe[v:¢{Î/ñm0 O¢ -3ÒÇUM»r~I+@ó{=Ûø¼Žݯb`Uè‡ãt¯6W"â úv°âX¤É™g'!Òˬæwéªü¥ýStå‡1Ÿ÷#À¹x‘èD®ž4­J–)ÆÕh.Ë%ÚSuð[)j©'j픳÷U*o©È¬\•„“ ÓJÛ/#‰tȘí '±N,ö­>­ÃS‹ëë`SE;Á°:ó ^(é ôH†«¶H¡ª2Þ¥ð N¨°¹7V/¾f$Ú:ìíÚ然ªA Žr©nÇ!¿f‚³HÚÞÂÅgß8—"òÞWk¥¸e92Ò|År–dzÕ8˜ëOR“ù_•º©hICH÷•m‹Í’–BŠ‚ÓšJIØÚ©ïà/Í)G#!Ä~ý&$ñþòMGâÃR7!ï8‰­Ôc¢§@ÖƒpGJµš’ìF0_ ldsï„5ÏçUá=ö«{8Û¯´;\é"ùp¯Æ?Žž,Ô¹­$$YækŸbtÊ¿áqÊ­N0ä »@ý+ˆ!:6ßþF EC&¢lõòˆ¾ƒ·Çý¥­D8éáÑn¤ ÒBd¨$—±™8ãÿ¢Ö-ÀÙ ayì w‚—ô?ÔiI#¶¼3V¢øÊHuG;¿Cº~D}ý@Vׂ ŸAÝVY¬wÐ:â(”ËùVM…<ã½£Ø@êõÆY –†úÅI­irèYe¥ÒŠÿŒ³³‘6ĨW¥=t„`3ç@x fOA«dV_ËÏEj·B_S„}×õ¼"yÒZ? Å°UoëT˾5ÅYJµ©bQ0‹›é¨$Çhä¯ùö€3„ÑîL±ï CÑ’ g<½oï¹;Ø…bp¿eµToz -B‚ÍÙ~GŸj)²€ÚalÎJc*OE9 w¡µ·½ºwÌñ¹””•èOµ¸ësbR™x§/h·´;ïùŸ·áävØÒj¯&Å:`[4;N=Ï#œy®ý\=ÙiÉ·‚…t¨0C¡_ohëW¥Tç·öþ·?ü†/[¿:xÀl÷ÇÃßÿU§Õ!ú ›|½O¸ -ݶ­N¦ Q,(tgÌ/â1ÃC "P3®}Œ&N€|D1Þ#(¿ÒúE -ÓÞþËÓB•ò.Éɹöƒ«âufÈ|}åHXIÍ%ó¦c|ÝëRêtdÞ™È× ’—Öã™Û®[®b´…Ö`"GêS½ŽWa©,ú F8„dõìu¾ž kù4±{cß±½æ‚ÓL‹ña¦ -p¾%¡¨(l ÏÙIêšLkÜúÿÇܽ$Y®\Y‚ç`  U(€6»ÕŽ P$[éóï–®}ôùÌœa7ULÚS.ýœÏþ˜†àœ¹?ž‡Ô‚«ž1(ˆäÉ‘#ŸÆ8 ö×ÛÙÎíN—j¨£MÅ”)Û¢^tÕ‹àÑi‘äV£j -w8fõ©eðjŠ"Ñ‘@En©;Éò æ:8?~œ}ÿ;*”oQ2ª¶.<ȽÆ̃„ñøVÊ -(ª/%…– "4å7[ÿ(Æ4„çñâIžÈQé²h÷„±5 &bì¯ÊWÈŠ”SÜê]Æ–¦ñ‘–W¢ýE§ô - Øk{*‘guÔÂDÆŽ¬qÑú7ÂÖŸþòOýÆD´?!…v‚˜Æ#ªyÚÁÔòfo÷«wr‡è¿sJÁ+P‡,lV×wI€jjMÿüÛ:èiç|ý<èÑ'ÎÁùŠ 8µ‚ÚçGF:SÑ14#³Ëþ4j “¦Bv‘IÓH&Øq­ô6R\Âh  ·¶ž"Ò¢A]Sþ„'`ˆš­ܱ”ã÷Êî˜ û â!³ .ˆÝoa{W¬ÙÿJ.ÌÌ<ÑMø(j„öùü¨´¥µÍ™©ò¸G"èDÂWè]i/fÖ¬\ù‰BÞ€aFŒ Á…1ïÞÇ7ðxÐ:»¯ë…’ðjÁœUöxØŸ„JËtp¡ÓÙŸŒ@ø|ÌmýÜð¡ï°e» #…¬¥úd á™Âé!…ÔFÖ  mI"pŽ‰kÜÁR‚Y÷2î}A -6€v¶ÆÁ¦yY-…«»¿sþ\JO`éêÑü[’ùŠ,3hÕ/蟂k@(€·¯1Œ¦Ψ=ú±Çòûÿ±†\ÈCO»…ùaJ¬ã pðÿVŒ×  DYáÁoiY‚‘d%c„ëœTè¾ÁÆ¿ŠÀóW°ù*©ü€ŠZ%•H@"õ ã×Ô¥^›BGÜD6¥¼š ±Ô¼HŽdÈ[X/JñgY-Á;¢lXX&UÊ7 œnÕ3î—kf>¢OéøÓG†HS#BTMÀȲ@Ã<²×;Hj"ööê€+ˆEt´BB‘\|ì_MrJ‡Ë«pVV•š3Zé×¹*­´”û*ÔEgT5(í5Æâ!`ÝøOµß°-ßXïðèöp}”Ò¡´¤O®Ø©¼sdVK˜ˆ½lë*É)ã\‹›4X‘%ùwÊäŒm^…-¬–Z4qÚñ)½5åê0Æññ ƒuÆ7ÇúÊd#7¡iTÎò$f›±hb"Á¹#ŒÈ‡¸}nžÃോ9ï„¡Êm†ìÙuåâ}I"Óãæ ùŒ ŽöÈ“¯ •m.Þ] ëV'üÀ|_ca!ÈQlöégÎâ=´µÖ3$ÛžµHDP<{λÌ6Ø´FSèÀ()A6]6íB±|M#R4jâGÊfW”2 *毩óWE¿_ͤ¾Žá.2Ÿñån–ni"ã³lvB±wY ²Î% -à­JEO¿r~2C¶€~d[ YÛs½77v}!ìØ@´åüÉ&Øš@œ¯Ä Œ -Î\Oßï× ®UÌç'ëhÓ8;Wx~í5ä:C£Å‡ëá8ÊûkÈ£û<‚oq«#:»Ð,çrÿ$õ¦ùK4w~_|ñw·¢˜S‚0ÕŽ¡pVæU¾§Å[²ƒJͧ>]«øf†Å•üì>/Ö¹C´¢{hbm©ø:/æßE&^úJE»‡£ÅAÅ?žç£­HtRƒ’MÌ $í£ó v’]&óË;pnfw–ŒözOJQ ¤³ ´ð EUÈ»SKaS«b¸]/Ù¥²÷4Îë4C2zí­ÅÑ' …¹Þçfb‘szXÖg¦Â,ÜJÛ³%±mÑè×øXŒ¤ }gnT8ä¥tXœà™‘E8‡œ¨áTèÿõ±±€ØRÝ|Dð‘Ö£’ßù¾œ@ÄÒ¤ùø!øDÅwY+R`ØÒ Í¦ÝŸ#£Ü⛾z~ÈE:O´9Ý -š•ªu+dY?z¾¢Ô½n›ˆœ[átlu« ,ÎùTšM§IìTíÅ3êÛT¢Î½ZÊOœÕôÜæ6r¯D -v €‘%Õ|;&Óq‡š:^Üz4+Xú¹šæ..ÙTŸ¿º'õT^ð[„2[œà:…1¥bœw[‰@¯‰ÅÖ¼ÏO~ÝÌDëŒj•N„½(ÉàînnÔŒ—z™÷¥è*$·’jÉ‚Âs ÒÍjBÀÝ\üæ¡Ñsº‚SÍZ—º]§° v¹½ú:8ȉÌå0Ä—!‡±`zà×E*iÛ`áV‡R*í·çz-NvG— öÉ­x4Œ¸ÖÌÝá…¹±GJ¹Bž¦Ì£Õ±¦s\Ü´#o‡;˜Î¥bh©w}ŸûDi¨—ô¹iš%ë"gùÃiYeR„>5‹½ôzMÒ¹Rù+ðœï/N½=¦Çs¿~ð•¼çÌbÀŒÇÖÜ×Ûëc2p´)µã¬;Å÷3¦csKX ‡=­ybŽÄðÕg:|IJjá{ÔÓp(f¢5ê¡û:Mª,Úˆ/ÇÀ -p5;…îò"æj«¬J›^XÞÚ:î7ýósµAÅ;0²ˆ9d>7Š¯IQJt‡l†•ÙÆn)wÒ™¼=©}óYcšª›3Ÿ‰†Ã çý,@’ -[~ó%Jú1 û_Ð’ËURþ¼º…} Móˆ _ا´Q -ïÏ°†eW5PÉÂÌëœ{ díG‡â˜5ÝÚ)Š*Ù, ív]óFªÛKIPªwYsj-[ê)Á{#žïJÒÛñ⎡SÌ•D7õ í¤N:¢¾ˆ€±CÌGò±ß©rTK4•å˳Í<÷MÖ@ÂX]Ô/‡™Óp¡‹tZ-Ës‘wo> C)-0¯´™z ß/tóY½¨ÆÂœý·Ÿi•.L†…{_†MÌ\ømIQ˾ŽÓè˜ÿdz|Tãµ'´#oƒU·ÄÃ~Ü.ÄÿAQø¦_„3Û -­Æ|UÂx–Ñ™Ù|G:?žµýÌÆ. aÐlÑ:<7Éávû-ÑŽ©í¿¢¥‘0¼È7€$'Q:úŸ{ sp‡zË a¯äy$gÖqoXØóyL×Ubx·ºg‹¢g8*ÿœ7R´`%xáT˜±­„ö‚ˆ*Ã^×½TxD\ß“þ1H¿†Q7œ—õHöÉè @™_È -…îRÂ/˜~$ ÛYNuR`¿I2uY_nuÁ1~ -´n§òzÅ$×ù>_ ŸM-±s£Í%f%€&0 Ê yœ9‹öR*DŸþŒŸ7¥ô'"DU³Ç˸£Ô¹âg²×ýº1ëÓÙ¶Šæù–›æw1ˆwD5ZßuEÕyªéä¾Ë@íä @boNPs„WðR竃à¬eÍ)“Câ‘„Ê:'è½?x½À‚ -J¹UG@"‚ýìŸA-j.ñ,yL„‚å˜+z–Î)õö¨3¤)âÆT >Q¦Ö©å«ð˜O22ÆÚƲ˜Ù 9Œçyä:(êC)´½ vwÖ.ÙóãÇeûÚ[ºêü…¤ôë@GL颷õÚJ´Cû£Ûï&“Ssã¼ÊóâkEÿ¯†3ýc¢ ¨¬§çg¯Zò)„µɦCüºRc6¨D¥N‹>€¥Ó+Ü?b蛧‡µ<„±-â3‘½V1Û‚4Ä,­ôϱŽ*h5„±£Žx^f¤Bç]HÐîR¯'Éõ¼Ž×…6¾>õÊy¡Îß–¶ÒŠHŒÆ:«äQ+4Š?Ê -ì ?æj¥Ó DqÀVUo‡£Ø-…Úh£ºÅ«2çnú·jKT‡ß ƒAÏß/?ª¡‹­ÆAExÿ(q¯º“’f;›7>Õ² &cÙÆ;gU7M¢èJEm’ -öù„Q}—†ÒùI7˜aDjñ_µ{ÎoÙ¯cTÌ6<±'”:ùÝGYCúysKƒ,ýA3v¥w}. ”R$5 -°íÞSü°Ö,³6&7ó•ž±À8Ï AQUf÷Ë÷A «|ÕW–tkÏJõ‹/Ýì) Ø1CÝ`uüæbè§æŸƒ«Íbßt£iVnÇ"É>…x‰ÞÒ;'V ÝÛ sÛ¹ƒ{l G °œûz&2îñš¼ÈÓDŸO³/'L½xÂA -õXûàMd–í_¥N$êC7Æd«}p#n“ZM¯ ‹(Ü•:²Vø£'!Žtl·%rîSßb1³Ó­ æÐm‘ßépÑ÷PØd=×·VÂ}½¼k%OÆ™œü¤~;.ùâã/}à'Ú,ndÿ v‡û4œ·2Ñ|¦ÿ›A”ÌFÛó"‘*OJHÏE (ð)Ç9CzwÌÏÇeúéP˜(G  ÈÛhÂÍ]6ô4wµ)]{‘ŒHÕPú3dSü„WËêˆUÊuëÑfú)êŠH#eõꀌÀ<Ï\çðéZlÎjÁ~² RÔ7ÖÔ 2bv¡ž,_)%¦º•nWŸˆ¦ƒ3Ÿ¿6)ížÖïM®ï¼Öí2U¯tv¨Eܵ”²UîÂÓv/±¦^mÏÜ>}¯güߘâÅi PîV<ÈÍN“ HhW;TÉâ>O€' -Âu"CExtqÍ0áöÔöª œ=`yÞ’óõB§¨È -"«{úˆ(cÆ{º’ *XFzG#3g@ ÃdPÛÛ—µÒ™ógØRsà(–?~ ^4bd=Ív’eFÜÁV)Í>¡µ5¿EŸÂ‚•÷Ä«n¥æï©t½®Ès4]tqhiÙâv-· ®‹à+:Àwù|è9ÏS‚‹ùºÈyF‘©Ä¹`v[²4s@’»ÉŠ+)!EO°/"½ê]D·âõü8)^ bæ$iEÛ´ÒØQOCÙ‚©«#(£àÖ÷j‡§u¦>¿'«ÿSƒø‹ çñF|{Í]¨x¼ðQõø(ªjEkIÝɳØdõ²<¿c¨1Ï'4ÃÂŒ.K¨1º³dðª¬:ƒumÚpyº’)¼_aÜ9.Õ|`‚+S@?B$Ô4j†#zŸåó–üLÃ,å«„cFpJ4£ -¸[Hü-ÛX~M8OÊÝn¥¤¬J™=sFsE&M{–ŒXúì««2CLÖ}Ï43{¤²´ïÀ×Ù>¢d¤v"û%`¥ ÍQúÉÁϘáP¥ µ<òiNÃȹ›ý'œ³ìðOõÂG³»ërðë…¦ÉÕ¸s$¶<ø‰ñ`Òt銕Îè†ÆWæùùê_ ÓUø14üA‘ô¹mS+«½ý>ªÔ‡£>? `ÐéG[öyry‰RK>!Ú;ˆJŨãJh Ço7‘R_áìA'µc)±ŒxçÐáJ¾Ž1 q¡•ïkh´dçÚq½ -~Ä7bÕrTúÚe"ªcî«(ëž‹^ÏUöV ¬®ÞÚkˆVw¹Ö«´(N¸·—âß\T¾nÊK«†©Ð³'k¾Ö¯Éò»ºžU~’œÝ_4Ðëlàå£õF±´×¬ÜSÜ9_.¨ñ{ç´^ŒŒ”¾ã6Ö6D«­Ü‡®ñþAî=±Ÿ$oˆ¾bs ±n_×êcÉé[=æIñ'ßX'ãÛÆ;¾ƒùgø¥eªÔ„É*Ùo,UÒGàü¶6ßóÕÎmöXƒ.ç(Ò»ƒÐöØ4=ôÄ{_Dz¦î#Ÿ6O/Ásó›¿ï,À?]?ˆ^äPâ÷ŸÃý|Õ…aÄj= Õ¶ -œÚØF®3Žb±^ªGØÀêJ©Í†¡î±féU¿˜ÿ†$÷y›FZî -ÝÕ†uUìÚçocZÖS*Vcý¥Ü£þyBZÛ·oœ…±Ê‰'rÖp`¡x€|ó„brÃ}›‚tã_—ydœÚê­úþ]=÷JØ¡Šêoõ¨é‘‰zC¯)bsZØW<-йJB©ðƒ#¢à^÷zê#ÐÚøÂÜ)"·!¾Š,q2çÖÖÍ(Ñzúü`áJ€sª–#o¦—VE ±mÃZ@î~¼1÷^¶µUø 'ÇjÛYåMò‹©¦Ä$€ð°Õ[ÒòD­¢g ÷úñÝ‘¶zÿÊ~_4©ò±çî$ú[mãP½ž Žìx€ð¡€&>Ž4‡íôiQùÊ\ih?ÒôPÎ(šÚXDZ\Þ6ü%‚~Å ç?ÿ·ê–¤sé åδ¹÷ã±2GóÖÇw˜ -‡¸§ô¹ÙÇÅnîN(’r⃠<ÃVó‚‹ðoÙ£HüJÚW[ʪO "•¯qb>«8Â^U#Ec·ÂÎÓŒó‹Ë?P»B2±¸Ðo‰<ÁVçÓ¨.=Is†ëÉyúÙﳃ23»ËP)µ¾ƒ#̬ÓKMtСi…„w=u,NÝ¡­­8D!_Œz—TÈÖš ¦P=ñ ‹€Î³°&~ŸBS,!e›?NŠ—¢ž ((œdasÒLQyNˆ!g=âT·ëù_p¾ @lèz“¹}Sdl_ú¦ßÇD(gBa¡D sY -_hóV˜Ê¡ñ„‡±k0µÚçæ¤r)ˆTnÝ…ÌèRc[CTˆ1#Ùk¿[T@oŪÎÊSªz󈱶"i"Á£«ö‡FªÊ÷V&ö"¹ÇV_Y}:ÚÜR¨ÒšÆçhÄìX‚ìªíç™Ù³Cif±DV´IA¸ƒl‚"Æ‹‚”ü©—“é=$fsŒy¨£ðš»2ûÚTÉ¢Yë,‹ˆç©4*(o\Ë·™sØÎM$>/?ªšQs7pRø¾ö¢`¾Íy ÔfØu»w‹EcK|pÛêîo¢â_Ǥ"eݳq– ›¹£^»™O…Š´Z?µŒÕ߬ޭd.>‚Ôa@­µ:ΩÌRȬ 4á áh,pà–³n9ã‚Õa¨38ûµÕ#$w„»ñ£=£, «‚‹C}ó^z˦ Q2C À‘Jò¸ SwE,¨¿¸¡ŸØ†HÚe'£XUšñW8ú-®fà™ç­×V¸®Ø’kg·º—`Ì’©O}Ýó½²_¸º³ÚD¶’ú"ñ"ºMüŽÝ⿸†ìà+;l)`}"ÛㆵùõŠl“Žñ+|µÉaŸ›uÖJ½•N‚vbq¶„äQ[mçOÌGa*Z?¡¬{µ+ZWŠg#¿¯;a—`¿jþ¯Ïøˆ>eTz–¼ÑEv‹ P””~üL/• ‘Xò¥R#Ÿ'ã­<ÑW1ž¤Éɯu”]‹ÐÉ ãÿ\ŒÿΕø3âÖ4 î¿í¯={\dp„°] T‚S²ÜŽ_!•¢™ (ˆÀSð½ã`üR€–TÃÝŸ G6ç…Ék ZoiÄ J GLì,:iì]ö<ÆÂIºN¥Ô‹‘Z9u9L¦Ûå]Å…™Ç¤¢wnþºOG™Ùh×¼@èF ÄÌY?—–€À·—tR;3½ž˜íBܼ”AÖÿµ iò Ÿ±Ù½’Š`a‰û½zjµ[¹ïÃùÚa_ ¿vn@¤ˆeýü^lÀ$0"ÙìÌуeY¾ëW\!©~ómQÀMiZ57Ž­ßj™úË_Õ8ÿÛ‚9týòÔ‡@1¯\dðkÐ-Œçc)ŒÍÓ,°iO\#HhÌýsÛBÚ}·²|:$lú{Š›,•Ï( -läËÊ÷à QeÔ™ÉóMݘ“WA» ³Í©ŸGܦQ&Ž”Qû¦3m <–¢‘ -õPÛPC¯ëÜð‰Bγ½rÌ ØUMùáõ­H ßE ˜ØZð‰Øj·Ny…aïû™¹HC’Û$Ô\lâh´+ôßNœóÏЄ­nd탬μ] ïb: \Öï*´PvU"žÄ…ºØÉÂÇ;lžsg…Ò0 u:K¯JD|¬–ÆjU"0p¯åÂzÕ\å–x$ó‰7# ì “•Yî½”žÀWS_m<ÚŽh\÷l3Æ\´¯¾˜[òFš"ä‹ -×D×ÆÞÊmxÎC˜ãŽµKášnÝñË…Jkô¢åBæúQk‰R/hÇëNê EB)åƒn5¶Öúýùú”«ŸÔþàš€¬È¨=µáJo`¥Ø\ýø> w:Úx9_œÂ6ÆQ½ºÌ؈óA3l5ŸÈ_ú=¶2oV"þ°k<‰<µàk{‘æ0¹_û^M1ù²¯]”¸F1ãŒã‚™8/KÌåu \Î8¾>2€à½äÆÿð‘If‰\7¡ó¾jš¬T° Ò«Kø®gyÆålptGX}6æ7ÝeãÏ'!¹³Å>{áC@”Yäh=T…¼ ’b<ÁÉ%báöÊó|Ábæ|·ˆ“yE³ƒÀêxuµE u¶U*‡_›‡Vsô}Ñï0ÄÈ­ÌÿZ·¢ì,XÂóíìv­Š¹N‚!éü;FÞ¢Î9Ë-'ê¢ ‰b™1ÝK¹° %éË# òî{¸LŸ6(¾ˆ®‡(,Ò<oL‰TNeN² <¨zþÅûè9¶’W“›‘Þ‰V}|ªö‡ÁÐ#PÝþBóÄÎ}“ðÇXãwb 0çF|ûl‹4GSrÐÇ@—î’÷€špQBQU.è—ÖÞŒž8C~„|Ïh)½·ØGø9†èº¨)öÒ>èD)¼c«ûë`ôÜ«éÍŸ„Øã¼»|4ß8&õ; -Þ½.µˆNÑõ.Ã,2«§/¬‡zfœ@¢ñ&ÏaoˆQÙ sù¶ÄÍ·áÜo…ÿçýhR| ÊB×?w£´Ôï@Jn(jÅsïW¬¸u‹Þ ¡zarÀÿÆ«3…Ù™ZBÐ$žê>ðÉ s‘R"ÇŠJTÄ3ai~Ÿ–3¬ÆLÃôaÖ£®´ôÒC3Õ`à¨!Z9¦“CsbNI¢ÑË{?Hýÿ»Ø†Ëi9¦ b[·ƒ(Æ­­ô;82°Ïƒsá'ØLüMY½0¥ˆ+}^†´Ë“g°¨ÝÆ‚µQJéÎè…4 ß›‹ð]ñðÒè@úûX}ùÙ!1„ZÅÒ<_Ÿ”N ÆÄñ‰ì&ÙKÙ¨JJ„¸#n~“iì‹®¦NÁŒz/±ªÞ¢"ß(Öà\ˆÙß—®bm•áK<¤ÞC™e¦€å(! Ø"¬¢Ëÿu­È#½Á%ã~<Á²(d.@Û¥Ámr·×T¾RóÖs {¸7Ìžn_|êSŠ/çx”þø½ÿŸOtÑ…š„Þe<°/Ñ?é®âúhg‘ɾˆ†Ž¡¿æ}´RØþ#¿)ûf’ºë\tGlR_ƒÀ¸Hb(¤úµ§AuUÈ"¡‹ž·?ïÑàÙ÷Ô™3¬ýŠ—ºW»ULô¦!°JhYÅÖRÿ¤'tû%@*êq$š¾ðsK!’€VƒæóÎ…´£hô‚Âs¡z^V[Ù øƒ 5½]­3½ð£æ–OPßÂÌ‹*n¹U’$šmàœÿŠªžƒ"Ý þ[9ñúl?í~Ù…Jø ÆG`[7R3Ž¢Éaé°ù‚E\È®#ÖØXûÆ™åÅûâBNþ‚ -ê5ˆé‚ç àÉP”òU[•¶»Züj×›`ú3ä×P¼ž2üÖW;ÒßÕôð‡½ÊZl5ê×ÀľC {“!ð¨ò!…ny DD:á8’ß±˜9£&Fžo½>ÊoʺïãjFÑg[å4·ªã’@´*,Ú˜[êÁQ£–ôV§_!‰_ýQ£dRžýø.ûAÉdÄonÓª,瑾EºT†ÙrX`^Jí+­vPÓêal•’ÉÐò|&]3W!^ú¼ÚQÙnJð¯[Éy²lŸxpÓ Ѫ[G—GWæèÌÇ‹ìz*Ÿ 'VÔÀãÄw¯Éf€Îö©F£f,fàT._Ô§\/«4—Ö47ã>‚йî^n€VO€èrÿ„ÑGŠD¼ãéc.¬1U/#-$¸èr<{é“–Â|ÚòŠé±¡%w…æs ÙÔBç ®ZN”`¨i.5hdÀõ#ÎŽZîHÌàŸ0ã\Ÿª‹9.͘2G²ŽïruáN>*½ÀnÿˆEžAbšÔ„vó7ICJS†Mbfå)ÑY§–Hüq_r1sÆPñB¸^CZÛ3¯n(ÙÞ¿‹üÔL÷ô¡p”|M7ñÛ‘èr}œMmÃ] ýÓD_š|;Z!Å^<ÂhçE›à½î~yšŸR½åi¾éñ£iu¨6ãYñ5<ÿªöDüá+ÿŽ"º8Ú<\Â[gä}Sa¸C:«džd' ³]½X€2±Í°J@>èîÔF´è$]föOTäW äê½ihÎâ©…éȪ±‡Ò±-ÉŸªfÒhéy B Ü^_'²¹se-žõÈut›öŽT¡™÷ãxr'Ô>ÈŠÕ<ƒ -µ£FnñÕÞ -/ÓÖ«¹RF>®ÎO¤0•°A bE1†ôZzH{$"^ô¤¶øt´çœSª©ZMoö-žSkˆ_{ë(Û -Ä?ÃÝò¯²›ÞTÕbÌN-ò ¯æR¼­/~¦% \´ï-ýŠ?/ò7—â3äob;ÒkJÖñ<÷ìå¬MD`h;¾?ÇŸ?þeNÞSFÇ µS/?>k£mø@eEÿûŒ$ú]W¦(uˆ³%„äMø˜1ZŽ -‡p„€ûá t} Zó&áGUM?Ô)Sø€Œelq§šÛêƪ} Òsºa ç¶ø1ö´-hóù>W¨í„¨Èuº×fÅù5LfÑ4̾›ü™ßóˆÌ•ŠãÁD\غ·é&:3Ãßã˜Y7‡ Xö¤>÷1âZ7¿¡ -ب;é8 Ñ,Ìy§ùíPíNÅ»½„¡±µÈÛf|Ì5ÅM «{5k}ªXAÎ’ø&í«%·ÞuG6Ä Lgž†s?Ž`±F‘×/Z5·NþÁêc”¾ÄL¦‚V5//ç/‰4Ûõü—¿ÈN -2Uì2u%§ùøw€SsÇÿCš§¤È b1C §#1¯CgFÂÙ[ -£ó:m¾cØÏ!<·z4>Úú5j ¬¿Zú…¹U”¤½zrèBÅ£¹”hÞȆÏdÀÕA3si/ÎI7Bo"ïë*s†‘óQÍ8×CÝø˲ê¶:á?Pm®“¸Ò‘aÞ[¿æXã´Ï9äç/µ:_ªåóM«-Kº>,ÈJBŒI?[Ã'nƒqÅæ¥ÇB·6Ño ç7,Úù¥¥nF…!u±°îõJ:DŸ“!êwQUS39KÆ.È㎬5J¢ïó°ví;. â­0ßµ¶ŸûÍïL -—n4Ljub¸ã²25›Ssìµ®}½üßö|?XþÙ1õ g*D÷ö…wPƒ.¤ÇœŽ¿àÙòê[[ù†?Mw¹“ÿŽñzI¥jî8M)*mXŒs­_cA4ž^±ÐVuýyØÊöØÎ/ðJÊ.•Š\*X§´¹£“ŸL:—ìôňéOõy -Ë ž8[ëÞ’}jQå.RóP"Ê Çlîï -Ð'¡Á?S̾ÿå{qòÄCÙµ¤SgYö¶P!{ëЛ‡ðuÒÆ|Ÿ­˜Êg±.ÁÌž!ó?ã>„1YÕÉ!Þ¸lÖåp>Ñ€>Uc¼ríP?©9Ï——ØåYñê, -w( KÇæ°†w<ݯÙþĦäFÇüXçÃUæ"°Öù€¸Ûî–Æí:if\ÄzïŸ' œúüó³®!üà¶úçB«¼TÚÇÍŸJª(˜ùy{Ç™´w¯Šþ†*zèÇ“ï…A9bˆ›šÈ®Q÷‡Ö'²ží®•¦m$Ô^”ÎRëu’œÅ—üñ/_D’²m’¯RâvW1³ÝMQîæZµ¯mSß‚œ°´Î>ÈZ™X“óSýªR£³æuª!]ÊPÁ{æ yhíh]U‰”¤¨F¥æºëÕ+Z|°P¯´‹€6ð¸­.“¾úÎß3¬hZ#2QýmŸyË_OBÈ™á)¹ÍCýpò1ÄuùÆÅý «bªÿN¦e^n%’´¿Ãœ³Êc·‰txìÿâ`CÒ+ö;“ ¡hTeÙÏ Ñì#2ØYWS•Å–s³«qD<N½&\45x:>ããÏ´fSÃxs³cšÎÜþÛMj!’.@¯“kƒz%Þvþ[µtØþô—oyÂ9—­˜øL*w®’.éÎ çŠkÑ ^Ã"š\ûó:BÀD–xÝÌÝTÃ*TÚˆ‚UóRQÂÙ(1¸ãƒŠmu¿h»ðPí3¹• I=aC:«cZÔ$oàB£ñHìñÐy^ -©²Ð±6§S%ïjø5D‚t¦DóÚœ@KI•¾:€gÒíh ØzxaÌ-‚yð¸ ðÑwáä2‡­!;Ë=áÉÒ@ª®DØ\»“˜Á·•-HŠ³æñÛù.N-„ƒ -Zc_®wIž‚9ïêÓÐÔÇó[Ëÿ‘<Îç\(È=:ÒáUe%5ŸS¨Á}ü‘'ûý/ß *çÓ*¢~ŽˆåÕÃÒ#¥ìk§²B"šbuo-‡ñþD&M?Óê°wöðÎi]`å‘2DrEuæyö—‚w•UFîUV0Úý÷ª°;ŸÜl¿]¸¼µ_!Ø7Üç Œ‡ì:¯³C4û5"â.x>^‚ó¶ì D½ãqZÜ®Ã6ïQ$i`ªzQD¥]«¾(ç×7œ3g`ScÂÞûú½1¼R£A4Í9$ó(žâGöqĀ洛SD†¸nµð«ŒÜY„épÍéüƧZSêŒÚxÑViD²;îÄKuœc¼:£f^Ç‘'©è7 Ãþò#÷ó © ¥?ªŒ5gÐMm¢¨Ì Ë×Ñ:üYï¾^Haw›o¾²clÖ«{ÎY[%‡ojOùLQBaÁâõs€ËºØý¼2-aœÉ‚”Öÿ\yCØ3—ꚸÔæ€mÏû5q‰_îŒÛkâ6ÆÚR„ˆ—× lšˆ~Fœ×b¯·¿Äc Ö"]Úl¢¦Osá |êõbXž0}­õ¦F~öü—E d ~~‡ÿ;ñ†b‹¤ßK@œ#6äŒ!g:®ƒGEŸ¢åG÷$‡_L†T!m©ÈÒÇÜ"ø–>c^Œßc«;^újæ›2*¿dC¶ð1ï¥egÆF'{ø18¹óÓ'û^ªtÏU * éùÃer›e¦ìÈü`úž¦V<àO},×çWÀ•û½%°iêÈWtt÷¨bšt=–â%²µïi2ƒX|ü‘÷ýe™d“ÿ"”ùF¤#N¤ ]H£z5¼sÆI„©Š©P•ŽÒÏ v¹ÌÜ ßÍF‡È’KU’˜!ëÁnÕks¼¶Wˆj -óÍ›ƒ[Ff‡Š.O†[ÚÃ\r¶Ÿñ<‹ª¼zââ¥F¢¨ŸÇÊp’z5só\-úC¤«{6dˆ|—¾ÆØŸEŠ¾¬"†¤óŸe}x¦ ¦Lå8sQ+ÌíÀn}gWkÂq`Oˆë¡6˜'ªA÷Ꚉ9¶Â|* "ÑÄíy­ -‘é©[aݪÙS_ÌÅè.©£Î=ìÃÇœIG0J¿ÿá•„æM™á­ð7s¥<ží¨s†ï°Üäµóó—H<0‹~öEÑ>”3àÿ`r.|éìoö4_ÿ’þÌH×9­•–/X³¿C.™¹{‡Â«éÖ«  `<f²ß”îíWó=.yd³p¿¢Ýè7&Lg!…hýÅSîQçè R®ÀÄq®UI¤&>'c¯y+D¡ ®£­;ÙäHøxã™^ZÍG\ „ëÕ´uO,°ÁK£ìˆ™¼jï*Ü•v£\ gî_Åt¿ÿá MJ•÷ç]ówé&J@újÉÌAÒ—ÍÞ;×{ŽÉÀ˜ÛÒ"Ô´)uÓ #îLK¦~c ‰É”Ê®"‡:Qd•£ -¹j3Òßb¦«¹©E“œõ]+ Sæðs†§Ú‰~S&þ‚GŽRZQ_¹ùU§Î¥Ð:F¥¥­ ɈØrÈuЭæC“GYûJ‹LAGȉä -pîu¯*Ãœ![ŒûFèÅ IQØ|[_tçÔWÙ¸ë”åå ¹iµ–ð–Þe[~óxû?ûÜH¸?zìKK¤ALó†ÎWµ_Ž(ìc«hýPŽ`ÑgÐ|Ëé~¬~vvæÑÃ’€~ÿÏ¥ô‚_i™Ä•V~Æ籊ê:ärɆå%_´«\‚!óåp÷wßq¸dŽÀ¬Ç«²ÔzïÀ,Ü(˜²‚ ®­ûæŬ‰+WÛ÷Q%7E£G}sþ‡P¸µº"œ¦ýs¿Ê\Nðˆ„¦Ò?…º+Ä{Q×wZD½Ò‘úñ3­Š=’ nØ¿LÂiŽðÒG+¦,fƒ"ï ¤^ÌÁÐQ7ôãÏZßÿò—è¼Ê³`†óP=c5¼ê‹O$Ãfݪô4ÓݤÍzŠ‹Ò*ffµa¿t -…1:”­ï»à”'B÷ƒD {®¢)BÀ¿ýò­h*Ùj)½ý׸GôÔ2›Z (!5¢|®Z©o;ç±ÏçRŒ F…o@À—í5Í'´ÜôRÌØV-ž|ML¨e`©™å@x•^ã¹ôÐ^þùí½:}÷`I"Œ6¹Í ÚïØû•„-gÉè~ÝÁìlÄÓêÓ—JãæË÷ûã_¾ôßÿÆÐç‰Óøõj¦“Ž ~GB&¯Õ~vBŒ¿>1U2f9BÇ׋Gãåú|¾x¨®°ñŒüÝKs›€Uîü×-¯~Çë<î’&«_Ì3~‹IÄþŠ°®›ïí~EXPuWø¡ë#ŸQ¿W m¯²7/†¦5ܪM][Óã^„;2¼z~ú¬°qÇYV-8öWØ~MÖŠOm®Gñ û†õAF§òŒ$ŒÎ¹î{Ž¶Å-•ûû“éŒI {µ¸ègoÑm8xomáK;Ľ˜3®s©ìè¥p'­1ßjÓøË÷HnÀ tî(ß‚›»OYvßc×Ò6žQ)-„#:KYÇø]+ðÒXEdk Šud•Î!!,?'$ÿȃIÕºÜI_ƒzö\¶sêKßÉÍÍ}˦š>7­‡€.<ÃøÚË¡€‰71v¸ŸPÊGž ÎÏÙ[1ã}$?-/›î?êÁ•¡È—“ÈÍ€ÚòØ[’ؼ0fð¨|Bìµó¹¨©ø|¼ñöê+Ï…ÒÙˆfL¼ÁQmwô6íý§¿|ð‘ÿÚ‡ûù/¾*)ü¨ ¨ÅåD#jOë•V3‰Ãß´kGo‹ß6Ãy9÷Œºæ’ŸŸl¥–ugÂEIò® é§0_weø¡¼Vˆ½ÏCÆ»F÷|Ò‘iË2™€>×Ne,9ÖðÞ˜kòÛSÌåì=rú]ÍÏʃëÔýôõ{¹TwDôV§=æ®ÿŒÂvg@Ïäù<Õ±à³þðl­ThˆªÉ}¼ñú>üOìœ9ù=àŸœQj Àßc†`ª¹Y}A»2‚½ÿ¹Ðö>´}¡°óñŸÂ’“^„è‘ÉI\âEQ1 -ÄqèĆú}'ŸÙ’Ά*©hÅóÙË»"!±ÕþJ¢S¨¬0*Œ\Àø!YòBëÁwüÐ*ôöŽÒd|Vh­4(ÒÞã•wô(ýê3¼ŠoôŠ¢A·Ê½Oôõ‡ªFU²èHºX÷‹z-ÿåA¨jo¼zðA‹œWο:VºI¯ˆøÀJª ­óMÅq¾un7GÂÇ=°¨ù¨«¾d^°§ñ&¢M"~/„lŠ΃#gcU³ý­¹õ.¬õ+àÐ\‰ÖÅÜp -…ƒù°'$/DX"ÓkNÊWª5:/Bå›#s¤ˆÿù7älg„eÌÝÒòÕ¸Öõâ¿c‚$“ÛªnHÒΡ˜¶låñ9€} %Ò„~'RGt²?ÊÀž0I–½&bC»”ìÀÎÏ*Ø?D@…Å£”‰üdü¨C ÑÚ•b”§Ñòg­ÕºÑEšâ0M!¡ÃÂ^¸ÿ¬±"žÈFÇ«Hós§žëx‹yÖõäÁ¢ØÂÛë_ñ¸ìÀÈ­ö]xª³¨ìØ¿t~oqI¯ëÄÝz®³ؤiæ¯"žÇNÇo¤oúâqy–üD$ïļóÊx„5è:t|é†\¾xô©ç%â¶×Ü:2oãC9ËÏÓïS%&$?f››ú÷µa¸_Q€LUc‚«dÈjUS6ˆ%m‹TÝñEQîŽsßUçÂ#¢û¬u'ƈ•ÍÜ5ð¾çtµLHtûBï‹ì77µ®¿ùÔYi„åW2+Ã#½ÖÔ½£ÎúŒã¥­2Ê_îŠ!Žì5¢ÄÙ%Ï— -|H«4Zz­’+êït¾K†„ ò%²Âì;?¿²–ÝsŸÿ¢ë'…+”l}Xè{Œ­öå÷äŸ]¨÷X‹ó)ÿo-¥ÿüFD[΃J}=NW6O¡átP©´lKy‘®Ì °ˆŒ›d3¼+®v;9tö˜ŸÒé5Σê¨]+v$šçßôQ8ßÉað)£Ï¾щöÒŸsI5 ÕŽÐ”ö£ö^hæ¹-<¨ŠdñÄîk7¾¿1àïoÚR_ÇÌivQ†ßöš"E„ð¹(¥ê­—+ŸWð8Ÿ²g†bགg Ø™ÇU駜‘j\ -3 äW’­ùr/_ˆö‘R¨ -Ðá™_‹(Mè É{9Q°VûgKˆ«ÛœŸ3‘­;˜+/gY"ÉHIs`6 ª±È«ÀzÇ•v;ÿ…Q÷Ävâx}5ÕcM%8… }u³Ñ«ÝË^ˆê44¢Õ%Þ (…pkéð=Åd8¢Â½ç¡æÍï¼ Zôë2J½4§òúä>^·ë0–áÚtÏ'” ‹`ôè¥s% ŸÏ•·G‰±à¬ü‰aÍ Yî™ëÌ«¨Þ³d>j›¦'B7sá2®´]aN×çAŸçH•ï²³œ;bÿR Ú!“ÌçuxxKRæä)Óø A@„E˜ÞÓoíÆCkè ôw[ÐC:.g1›³õΕsSSÄ/ÙÉ~ÅŸ}”$ò_¥ëßù‹Íø™Ç•öév^¥Í9 ä\µ?ñ{ܨ?ÿ~mò´ ?Z$Bæ« v§Bü÷Öb_êý)0„ZL–¼âˆíÒ¨&0 ?s'M¤:<“jÔÜ9ûþÈcî‘/Ü}°œU€O]gÄ‹þ1çBIW|¼£C+‚tÝLÊ7{Y˜GÛD²6š&óyâºpŠÂOÔbÖ£dð®$®—!ÚÿÂ&—ø Ù°]w2 êû(Õë®ö,&í¡ÂÚ;ž=sG Ñ{>‘°íÏ2ŠÚ3/ž”. ¹B’ “z!Û¶™TuGxanÕ¢±¼Ua •V…@,q˜A±`¾=,6À‘49µ×<Õ ‘½½†\1ýðÜOnuE¥2¢>e¥¬…ÒÒzÌ U¦"¿’ë(tÿëSøðbn-‰½ÂÜ>︹ÊoïÝmž{+*2¨5 ëI‘Í·RúÚ‚Ãÿy|\Žº‡ª!VíOÜp&ïj,õ­ª|Jpgƒ! ^´“˶ÃKјÁ’Ù–«‚o¦?ß´§.‹aøàý‰tÙ¢†ƒ{¥'y_µ^âH±Ì‚<Ô–ê(d,ßŠÛ Z=M3Bm[ˆ„ÚµëOC@ßΨ¡û6‚“#RØž¡}ð¹uí*ÅQ!ØÄsO´µ¯l°3˜0™”q.ªhseÏ÷§G””É­æR¤o€µ@¯ÿ@ÍÅWIE~ËEfÜ}G7*2ÁäV‹$5»‰ò¿£ JÜæ…V9*#½ÊÍM{ÏöróÀ¢\ΦëËoÎvÛG1kl{D çæ„ |â².£qs“·cšòwGšJë%z‚®_ÞœU·šÀÕ⾦u³V·’Ü<1uÙ1ˆîèÁ¯Å2Ÿ¡?8&§\äÇåЉó êhâÝ°“ü¦~lËUÔŠ(mÚGÓÉ2h—€À®ÂÛ_ÑÝv@[2sA$àhˆ2AH`—#Ø^h6*þÒÂüæ@üœõ{žÿÝ÷u'ª§jT¼tòþ$ÞšþfEÂ^¦Z‡¨i¨úygÄ?;ôWË­¶ˆ—²aöú -bäùéŽHsÖC=‘mšÿc}Nf¤3°™\q$š©3;jR ^2Ú<ÕÏÕË–Ka ùåž="¨­¿Ÿ%ªrK ³Ÿ&Zð†€)1ëùÃî¿ÐqØœOjü…yókOnL dé8X‘Qñ4Y}"e¥žôÞ$ßöLMf8A´G³Eb`M¨—òÂ;eø:kÁŒÌ\‹|0l^¾# «=q½-lô=ÀÒK —¸4MnÁù´õH°ä…Él*˜ÉH¾W áAå*P*°Œðý\g'Žt6z¶û-þÈT8ŠßbéOŽ\wÔ: Fa£/³CÃlá0@í™!75Rp9V֌ډR†‘8jŒÂS‡V¥`F± u’hú¢–P0BHÀÌ(ô;Š)ñã­«Ð{”òÜ [üóËÂACòßõ‹‰áwÉÈò¥Z‹©rtbë¹£$½N^[‘˜ØqÄ5¢^1ùvQTüÑÝJÅ™‰{nuÅx¦W”uÈõ¹c6¯Æå®Ñ!7ÊPú¬Ù—Üÿܦ—VŒL­eŒ¯0äÖæéŒd\‚ÃÔ 6GX²»—ÉkäÄÝb\C*Nµa$ôá™ÞÒÀV÷ãÎcŸ©ÊÙƒy`¶Ü ‚@Iú€+HxŽEÂÑåÔ=ÉÐ&‘HÏ!GPs#(¹ªRŠ¹¸+L„€r†•NXÇÄU±b·3/4ÄYfnVsÃ{cé®-F»˜ÍD^ÎX¨Ð¶ChzÁˆ(VJ -¯lBDb†WX–%ß|Ò~þ‹ˆâ ¯›H†Wv! ™mzh¾ô@ÇÛaô^/0ÂX‚òÜ=ÎDQú^Ð=&œÕ(ð×Eæ¶Ñ»Û1g°QKÃ-`1O¿Ç]¢×ÏQ”¥Ï>QÇŽ ¾ 6Y‹X¾X”3ôËf1ÁE{ër•”Ì€ëW¡@ˆ[$+Bc‡vã¡©› qPRʉPÌÅúR|ï*q´™Á˜i÷<Íñ8.)§BâÎíãÒïÕÂ$?n‡6¤Ó0f¸¼9‰ãÖÅFdÿªTÅæÌ&·SïOφpž³‘¼7è›í5D¹€äY•b{]F›­åÆ›e¹`ÃXCè6sø¦î;!\¦Zé eýß96´|)™3ÌEb^ £lj_xÓ>w awo_C(Ÿ‰ˆ™@»S$PN8ø T$ ¢³ô#2ºÄ3¨o‰d9#5ö§3(°>‚0¡ Af!iqô`Sk¦?AQœÒzOD+áÇ™Ò{Ç¢ŸL#ˆµ%¨7¤Q"ÅŠ™sÖ¢Šó¦ÜÀæ‘Å ˆQÇmÕ¼H{üP†¸j¹Nœ]VwRÛ k¢óÛîZwx~jô1>ÞØ)>LAÖƒ\ÖxÈ·{Õž"DZ¿ÄiøH§ØÛiç„ò¥‘úñGÛÒÿò¹¡EJÝø;ö~8]hkC»"'sß1XrFñ±éëq/'ÂURІ0Wb%C»x Ûï„÷ËÌ„·Dø……/#]"ïW‘ÐAÒ÷ >‚ÈxHXÜ7Iå9"…‹øÝžs ÇêX_*a בÊV˜=eFY~–Ú×} ŠBŒ„ø¨É+vÈá$¡2Èg¨Ç$2Ž[˜‚HÜ:[ a­p·Ö  ‹"|~ñpÐBIÃÉ5,H†?ø»>§zÈU=€èD†wH ï¬ÉïJd­ÌKM›Ç´93)F$î`—µ2)Ž°ü• - ²£OÆƸ®CÚj)ýu+tðhïÛpߘê/1ò±•¦–5˜ Ö¼’ÛÙÖ_¾ú¤E×Z•x9¡5´/2sïü%Rtä>´{Ž¸x+íì¤zgú6ý<Õ#EøÆxrP(ŽÁ]‘F~úhó]<>áÏTžE‘#„ß䮪ä#êEÝ׫h’”þðLÄÚŠW©pq˳ׄÜQÎFÖ«ëtܤ³©±9ŒWZO)UêŽA™µJO‚„ï1hŒUE¬æYwsôáΪ«:O2°[{«¨µEu¶ÇFz½ÚÕ1¨dA -ŠÕÓ­çÀ ¹âʧáée„JüþäÓë#@º ôåjA4 ÒR´²o¹L ™æeA©ž2•Kõ|Žõŵjp†àøж•í˘òfGÓ»^ß©å¯Þ=U&Q5žŽ˜qÂ|}Z¥¨Ú,5˜Wm.UJz:ªtÑØUŠS~ ñ¿.ÞòF:Ïóº'6¾BíŒLìÊ"ÿ)¹¸Ã ö%Ë[G&¦Ýì"¤ý#bMDK„}#Áy3¡^d57ý¦ø‘íȯ5og+IFPã -Ê¡Ÿqn™¥ßÿ²Î+{=ÿ -Ôzë/æ-—åô­ÈõU7’=ÂÊÇ!æwiÎìB?Òæ³e‰p -Gæ -»*b’°ªbiб†c´óY ‹Úü“]ãOÕ°30Äê,¶V·bºÌN–ÃÄèù5"QtëW5‡—@,Ù ª+­#i°E•–°ä‹Éš¬³)··é«nÀç"9— ÅÅDŒµ¼!ÒrKI³åÇè,È;·8Ò! r6÷sáQ=T×#n ¨Qïí–Ê P bÄ*ité$zñk*Ë[øÛZ°âb×'ˆOý¡éCû­G§F‹zêhU2{hŸ[I‰íTOqï¨D!ï/Ä·€I´~ž‹¸ÃkHÝml™…=¬Àb±å/’ÅÈó=å¿"ß<Þi%O9¿a þKæ®´LZ¤w\Ïzr˜›!š-u–ùŸ•¢´4OÑÆ2ŸYÔÁr„Nƒß¸:¯í‘Þ@M¨ç‰3ÏøœQ´‰ÈÎ .g:rqù@ Ž˜¶Òȶð(¥Þã·I´G< °'ëä µUDalÜ~XƒF舜lð© õQ²èÀΒؤ;‘8­Ëz¢'x§fAÑ‹Oi(S—Q ÛXÉèÞk‡ußi {±ùÁ—à—•üòήËl ™š1ªÇÞTWu3X¶o|©5ŸNéM£*OÙ_ô{¥=$ZÑ¡È_èvEÂtžÙ ¶1ÇUê“ -³Cüù/ow0C ú ˆ )«Ò9PñŠA¤&¹ã Hë´†¨'50¿HÝ‹uÙAÂ"ˆ”C¸‹©ß÷Š 4YK¢+—¡©%—Df{"(Ù"lf6:•WŒI„:°ßÔfƒ'œ'o †#¬)Í99¶jJGüsŸûå2öœk÷×(D†²SõôGššúÙÊ&†À¬•ÿ ³F¹„>ˆBËž"ûE–›Ä~DÛ >‡l?ñ5hî¦ì@E8w`ô殬˜$ -˜k ò0qq×}âG°µ8FdÈH§˜øs'£KN 25t|¼7·K]Žê[›´Tú3= -ìEÃð»â\ýgrÀªA€ìP­ËAÀÕ_>ð•Ÿ5sÏ/âõtWYú[…ù翼Ÿîñ1ÏBx»âØôJªÚ¯Êê@kDíˆÿxpÁ¥Š¬cÆÛl¥g¨Ò3¯<£Gq~CG»ë÷ÌA©Â—Ã56ëÀù©•Rt(|µXóL>3{I"0O®`§¤lጽäîQÛy³‚¶àV80¥09£4ÒãÁâ—S_n5È|uÊùø/ô ¡—-ø~‘µ£,·æUÂj& `™g:gæÇã‚V©J³Šûz¥WC‹ãJÓíýŒ&Ôt¹ÞÛ^˜"Ö-çÊ_nm#ÑÑ:KƒKó\ª›¨ï›ŠÕÏy?£!v«¼ìq®HxïÉûÓµKȽàà„ør§ºu"øe§ÝêbÐ…'qYHß›-áÔÐîø”è,NõÖ#•qYô|¹Ð@: °Z±R™¡M: vKÎÇÛj>°Ü!&–Pá±æQ¤Ð«ùÐe\f]jÑð:! -šÌöŠ¤°ü CÒ-!þçÉ?›%=€„½R§+‚qBd‘êUuæ4yý¹¥U ^ãpÞ÷â‘™×;#§äñsüO sæŠ3Ýj¨ßYi=Ä¸è¦ Bˆ¦"ƒ² ©mÎ}*ùñ½ -ãõ‹¼ÉOÈT¢‹Y&†H‹«4‡ ‡±Âeøwå]¼#1˜fï‚TyQ:Ì„=ò uÞVht¥&®u´ÝŸÑ“Ò -§ÿ=íb£ù¿o]«ð)ìAÒ2-wÚÅ5Wé:JøÑ^q>6¬yáø[õO×ÁwNý£ÔŠ–böù¨Ù¤ Çx ¶8J¶«W~ÌYÕXá<(Mu¸u1h5Ø÷jR©ØÎuêæXJMÀw'ËÊ67[¥B'{Ü™f³¸ÉqgôøD=ˆK¯«ìQ¸™óeÜ+CN×ùLmᬇ"Z7ƒj*%[^Êj{{cåhÔ…xd‚ý<)ÖÄ=Ìë„+Ð(ék„ùË5øŠ\@¯vzÄRƒÃjG*¯ý/\ã­¿, ž"ßÕÚž‹û<Ô8‚cíÁ´`6—m‹}‰ÎÙâï tî¹HêŠD g0ö1Oƒ4®Â¥Õ_ÀòD#¤_œ -gZ‰­Ng}ö۴§â.Ю¤WðËÆÒÅ¡Ê[,b3< ÌNG Vå÷•ŠŒ²ïÁ: *w…´ª ïÐþŸµá-Àæ¹±­ò±§ÎtíŸCôv‰ ôÿŠaÍ©©yEê´ÕB‹i~®¼À3¢¹tO_%_¥ "¤_þVTX«Šú¶­´•™ôZcø-òÅ'R#7Y¡Zü†œ'c”]Ü_8HÇ[IÚJ­sÄLkD©V](Ï\Wh¿3hž9ÚY´øîs6±–<>BáÖT+df -A0RHé-µÂ#€°1þS­2ƒq.>ÎpTUš?:y~uj…•uÌÐ=‚Cƒ–rê1ªºò2Âô—ê„&´ƒpaÚnå7 -®èG BˆÕlÞNéA÷•{åë†\-áDK#ÚUTòУ* Çu#nvNÒÖ¨ý%”ÏázP."OµQy®_l†½-õ@ã´þÉÈ Q QßßÓ]H]òë*¹`Þ¥ðÜ/žæú…ZûÂÞ -Ï8íªïpDù-5wcÏøO„»*¶ZÕB¤w`½>ÝnÍç‚ž`BÎÈ÷´ÆõÑt®ê°=^ëCÌrTá÷‡ù÷jqéÙØ/@Åi1óÌóPTïÕôŠ 4ý* )kf—àýE2õ½¿d™üøî× ðÜ©áŒõ 7:.Ëš-¬É{ž`t+ò” i÷>ú»s73~dû)Èkv-UîCœûàRáòr»K}Z#¥“Ü ¼ïBRØï£÷B’1çŒGž­ðlû ë·4í ì!-m9X¢HG…‹ƒ÷Tpå÷nB5åûÝ™[:ß42J'㛘òω£gc™7Ì_ÿºcÇVû;ƒT1„{” "7tëÖ¨ï÷„Ñc,‰C ùvèÒ¢óO€N“QáïÊÁ|ŽøõUJÄ·C‚#-ÕÂaPcÇ¥Q»“M‰4ÓŒ³bŒMvÃpà¨ÇD9ê$ñGt=bMg¼BµQL’lÈhÐÍb¶øÄAqk jf^sVƒÿ‰­¦!gT!æ׆M&Ò¿ e¥Þ{€cªÐrE/{Þ -så‚[J@žèÉŒ^†R¤—49Úp‘Œ@ Q‰p6§çY8àX‡Ôd|¿ÎÀªà1c4;i.p­Ê˜žbü=îºÌ Àü§Heˆn=yÐ+RÙ¡=>Hùso?w(Î3”‡*Eû¬¸Õ~—ë 8ï¦w’š'P2~è­¸è€w‡‰)"‰!‡,óð×ëR­r!#”ƒ'½ ¯&Jþê>ÞçœȬ 6q¤4ñïÑ:u„ÕINsâ£ü–a+š±3eˆ-x$ƒ òoEýõ¹/ÊUýŠÍ –±§Ã¸¸ë÷Žf›U?çj᩵òæ•÷½,v2ùBï"£>C›õPýˆLå•Öê5ÔíìŸÜ*é€FA´Ëœçlw4±dW{F\¥¡Öãi¤¤ÿuŽ×ŒÀ)‘ÐXiëØZoÐÍy•ôìSâ{Uëé†+?‹7'çhòm/¼ê"V…¦@ñù7Éõ2ÉV³ç^˜Ï6ω‹³EÌüh-nQ„…×óknò䛿!!ÀéÕÎèó®[Œ©±7—¤ð©5„Š3¯Ã",¡è©_U»#éF®•$3DÍbnç”Ô<¦~Å®,ù`¨k'žóAð½Y4¹tÐ &òç†#¿¤¹Tþþ¸rOµM‰}Þª…àa¦·gÚ„8ýô®¸•ZÌ“ÊÙžFÙ\Ä¿˜$hGÙÆ-íd쀧 Åm-Ç™äýDÌjxâù<‡Ìy÷VòĽ¶¤ž‚ c“Ýác^¤PUBñ¸Îý ¤N¡uC˜Ct1ŽSùµï[ÁC¼ö5ä9³d\8;ºBPðUÚ¢AÌEÎ;ë¼+ÐÔ›°€z pæ0Ô… a¸f§Þâ˜=7¼_GAxD”Ö­6"(<ý±¬ßpÒ@GÑö9ñ\)ÇÒwÈ·Z¼RÎeG†üxø~Jæ;þ6Í’WEzXaS %c.V%2õÙ3h&ÖȦbYJß4™ÿð—ï&É]ôrG -cÔÙ¤õfé‘7ŸóbžÛB‹G.·× GŒÙÇ\ÒœÆææ%ä±JF »nÛž#N$¨£ûY'rrlÑæUÖ–3ÛU¨ (c¾¬=–O@v0ÐuœQÂH¸^u°S®ã E™¬îDr@"ãÖyDœ=lý~Ö­N ”´ƒ£6ò÷¶y$4ºÁófG\[ôÉÇà5‡ Ìû<ÛkÕÁŠšÕ¸Qĺsê=Ab>AC¢Mªý1@ÚÔ·ö;Pû\8wŽv±¥ë<‡Ä.å´®<6žuÏnÐÝ °£Ë÷"/æõ{éÂÅyb@¿È#Èδa Açα]¡€J>ôÖ—ÎÎ9?”Rг?C¬ÜJ¼20©¶îõÁµŸ;š†@$áE>‘`4k´ÊdÃÒ¦³üÓ£9“¶„“'åZ©i{òàÐÛß5?”1TdÞm ¥€­×­Œà¦­l1 G‰¡ð3ê*öU!ʹ̉ç‘Â;&y¨Ø]!¢éäV=Ò‰a×­`D—‘–½:C¢áÛ·8&ÒDÄÊ_÷àµ$G(ÂÆÌ! íE*Ï='ÎÁ¹gH,B6@Æ·ù_TÑÓ؇°Î+­ðŽ½¼ïu§ª§k±Ïƈ¹Û®·”Ǹ(ºI¸ÎÌšQ“œ0ãS›Äˆ1’jæ^³/þáìûëN’|zëú£eJ€¦Žâ-› qCRî ^(gaaœ½ˆ=ÊZ[2£Þï5ûHñfcÄœ_Ñz»rÈ"½ó‘býƒQ(pN«g^¹&:-›¸ºúPI ¡,誕QÇãÄë]ƒˆM5‹,åy»XÌ=Èë×<ð]}Û; f`æ“ð‚ %r$<°qvŸÇ†!xwÑo>©Ô°-Ý_±MÝš:jK²qfñïµ—ÔCáꙈ˜².vUNµ`"…±Í¯™CnìI–ì{Y÷ä{ÆDà*˜F.“Ì]ÆrçCÁÑUº“EÖo4ÃÌÄÜ5°xƒ·|êCÝD­©Ü’)«uẂÀ} ç Á–ƒ5¹Ã'çÅ‘TÒÞù†‡Ä]‘G½÷é÷Ú 9öˆÒ^% - ƒlµ–"YÝ¡NÒx¨½pĸ" jÑgŸû¢†ôÓ¤IñÇ$v.ø¬ûØjœ‘=èÆugè½ã®xO.*iƒ[äÜöSk¥Ðz§¼ -UÖ¦“ Cw(°ÿó¥’åÊf‰ùN.1.gPjû˜œÛ:3/)>°Pãæ5ï¤s3wR]™ú€±¼Æﲩ™à\aCDD~‹%ÞS\$GÊ®<%‹±½P”OxO¬ÛEûóíÌ9… oÄÂGŠ¤¸J#máy£Q‚Õ*GM¼Î»Å'ïaÞià×À·ì•3˜ÎŸ77[Ñ#ë0­Úhœžn¦Ÿv)È®¤Îf!lSôŸ¹íC†bB Ú4à¬tüàù‘î8 -F08É¡¤ãCOî ±f­N°ªV¤?©x)œA¢VýëÊH -:ùZ¦Œý/¬£ó­¿¼?m=Û}ßTgç‡~T‚C/sômU*s+C%ƒÜZ‰z½>2¹©m\ýó#‘<äüÈŠ´ì·4²¯ú=s÷ò̓Ðûè*‚ -i‰Ã^i7¼M¤q¤½pAñD!g#="­"-n…^¯±éW÷„§gmös»Ø¬FŒv -82™;Ó%=ê=õ ’àó×ÄfœsãPµ†Ì ¹› -ø°®í/+ŸWîOU,åÏœœDù<ÔãÜ£š3”I>R¨=âgfÿL¨-Þ@sËÜ¢Ø\u4^1{ô7½†Ø?GÓ:Œ3hí}öðþâ²þç¿|o)a|©29ŒÝ£­½±aV˜;ã‡G;¨ŠwÚ?W%qsêXÇ ·3QÁ˜KÃч”['8¤‰öD䢸hJ:RêOõ xgµ9HÑÆyØj߈cšÒ”…ò,,˜ÄS®ê¸ãM_SŽvçG%{bŒs×éL&«Gûó®J•'æN\¦ÏuÊ|±5› )ÕWšg~òñUsØÒ ²ðBÄê+Zö"(ê§L’Án¡Ý!@áç‰2é‘øæîÈýuljÞ³º•"¬aÿ½òEPe•¹QlAƒ* Ö¿J} -|]»ZýuTØO=+ÿJ¦UÞÈ=Ÿ“E_PÄ[iÐõè®o%ÌVÏÛ#;‘¤’øÀ)á»”õÍuAI¥×ݪ­8¡#?ÎÀO#«]kJ{¬G24vT÷êøj|è÷ïò³2%VÛ[2'¾Õ%»øW›Qù*oûÙÎV¹“°ôôHM9…¿kŒW?ßR¤12_F¼cg=jjŠÜF*ךuš£¶ß>÷Õ7gïI¬%šüG øÑVOoçÙyæ:@ÜÀâÀlÈ#ãŒÁ@Jìï Ê¸ÔáºN‹½¬ù˜Ù«*ý¤°=+ÍÄHÄTüC©i#˜f‘°šÀÜ1—Á±8S“î•k‹ø·A%v¢ÏgÈÌZTª€h•뎟?Õ’ä$ƒ¨ýõ R$º„~Bj‘IÕÝ" e¹—•­bý7tÛþòhR¯=ÌYÆóô…Æ„H=‹3$uu¸àŸš0-cÖwd¤§C_xGzäÎZþ‘[µ”OÉQ>‚pŠfi¯ü’‘­ØžVí“Å%nö³%Ò}"ºLtß×NÍ3Îç½úÏøŽç̨ŸÃN•J*ùôGAö3&[Ï*ømB輎ÏVwé´×«ÑêNõSí³Õ­ÌzDâ÷©÷àx¡j húî4Sò ×ú¦-°< â÷7¥“jVbÍ„vzDФ4¯*èœeí@ƨUC“Xë?ƒéú«üù/ñÛþ±öñ;ƒæLÀšFÉZ]˜d|&›‚fÔq4§ÉLV3ø72¯«Š†9¯›³VtÒ3~®Z¾ñ2Y´‚>ƒFµV ñ3Ϊø+‹>Ñé|ïBƒ]¯û…‘}”ÙïŸ!dÞbz.¤ÁˆÕ ©¦‘ñÁÿ[ðŠå·G+3éã¼äŸ¨è¤Dë÷‰L×Êñô îhßÔ­æ9wr‹Ks®ÕS9y…Œ´•g€¸B©õµ^ñ³!‰Çx³LÌÛ7¯”r™«,¹ýù¥¶³æ˜ªF |¾z¼Ð¿j9ÿ´)}O)È‹)€ùŒW’þ{¯²—F?¼ýïT˜ƒxÝÒšîµSè¸n¤>u QBÇ$½¢™õDŒ"FÒŽ‘z·š *Ë {Vh‚2MüÿÕß Tê;V€¬÷øQrݨbψ=l9$âþHLcÆi£n57Pi; ÉQa$õØ ä*¸€>]2¢M·i¯¨ì`¡NÝK;õŽí€ à£pTX;¢ûyÚ=~¥"6ì;£¢2@%Á«m¯þ¢ê9bIuÖ£S£l'èAן¼ÙˆP àÃÕÖ’tÕÌú7¾ÕkWB ‚<ˆÒ*¤0Õ›nGe© -›=óÅ·+ye„ƒ:xáÛÛÒ7`÷{MhÚûp%ïsÕÔO5Ð'ÐÕAêḳÊ÷—¢m’Wÿƒ -ÿ|)OŒš½w -ú -s&ã|³÷XBr´Têƒç8}8‰Í©ÐÓxQßv>õcb×<ÑSw:È?£í•™‰“*¢™ÏÍ—5wê¡n£ŠœG AÏ¿r”ÌC¬§L -D„;ÏC%ëU°Î‰ÚKSoŒ’X«Ëlº -ó?é¬Û9r-ýŠÒhºŸ:>>0æÖÛ] ¾•ü…Nàøî<3W×"h—ð¢+%ªžÿC³ËsÊÐíK±ú¿Ä¾×Ñm‡­l˲uaÀ <±ï`þBd¹ªu3,DDF°ô…œÈ~øMí(~OG úŽ$ÿI°´¸›zÏdKy~@>3¨†1ÆËK›/÷½qÏœ×Ú¢ŽˆUu­¹¡¬{fuÙ÷¦ìvx?G-e~ð/ѬÿGž)h°žWÛé h«SóâÜâ1Oѹëêk¬ŠiÃèxãí-Dò†ó{Eêc¤Ç‹ÊÐ ;Sd —?ä3ÝÍôSÞ…sÃ7 å[Mª(XáóßDØä …Ù5¶ßb‰Þ?£ãðÍÎ#ÐÀlÙƒím Y`^R Œa ýkËVe·mÙ¸uz_è üùÕG|Òzê! AIË”×\ÜÔ®tHp±o”XÁ°Òã;­Î3‚G3\fij¦Bÿùî\÷á|Å® Œå­tC|{¾òçâïÁv°#:W)!ä|% KÛRbýS)Äsuìç~`°Žx÷´O?]«·­­ÊÒnvÈç „HHä·Ž…tþ·J:!ÛýY-2OÄ«vB“”›ÉöŠ+ø1´#æΫ7të#Ú‹8)Yv+ nÞÕÿ\ÜGzõQà¹Çv­ÌÒ‘ä̺„Ê“ ˆ­ßXcä7°8ñMC”ÒùÌ\GÕ•ÀCO*íþ^óÿƼ㰴Ìqè…gµ›.ú 3J>ðgzk!܃$_ ˆS˜gÁ>¿? ™ÿ$fé)ʧ„á;:œÀ¼;íÖÁ 鉓håè,ìi9µÈ¶uúÌ«#È&ÅçSºe!Í#'ÎÏÉõéÀј¦ñZ:jsB(a6M CÄ$[4íBgª;Cfæxêˆh…pz˜{¢Ö‘i´Ï…½Qa Îà µb’%¥(¯2ƒM ÿüE -ÍÅh¢Í!Ûf©¸&ñœŸÜs¸¡yÅûµ Ž ãŽã'” žÐ–šä²èà0Ì¡æ\ŠóõJ»tPöñ-Ž10h)æ9„àïˆk‹îK#.äó9ŽHó…ÂÛÊò»/‘¹›Öa£â¬œ_ΣTBß59+=çV·ñø†Ðøù/vNfÔx‚ZC$icâaXÉ-)Ûš‹Š £É3Ïû´ôUŽ=s¬vNù”žxóØš€tK=Š‹ Í@Ê@óu¹CkyJ]=ç¡0|åg¼±¯|CS2¦SÞ=`EˆÎvk,WùãTIý…ù ÷VêÒ0͆¦‰Õ,~®©Dƒµ÷sïÌÿ΂j^ÇMä·Ys#æ@ó Ý<¶+æÞ3¢Cq!t`Ú£žj>(Už±Å ë(["j,' Ô¹¥e·GýPfk93«}ÁbRoöÎudä´#ˆÇ@67è”@×ü«Þâvw—iwê¬Üw°ç>F9´Õ‰ˆF®ø€xngl¡3äŽÿÎm™C=£eåôÒÆw+ ï” ·g™†E*:2<Œqy˜vÝqãXCf&-öBÃy*vÆÞÕHã•!ØyÆb­­#¡Çh´ä –^¿ûGäpÝuˆ%4&W£ Q8WzŒ ü¦$•mJÀl…ë©Ð†æ¢Ho¥BEÓcÇ.xJRJ £ÖÂQw^GkI+ŸØÒ¹|±N<—gÄýìãeõr«àzæ…QÿM‹’ºãe§­‘?P€1=³Cä€êé:æýå˜æç¿Xý{tÙùZÇ5¿ãÅP¹EâÄ’œ?@‘u+*«õ6Cô]â$gÖ[¬yI\ÏZµþ1™ÖvX«ßKÓ¬¯Õ?ç{T2£œ\­é¹Î}Êirè!Ôó½öQGÅ¥hÓïw1íç¼Æ‡ìq­!¯pgNÞPA}iZðà:¢tçF-È~=¬’^¢ aàìW]†"¢O¤`UîHÚÈî 4β`bFeâ௔5bÄ2€µ˜±Eæ¿h©€¯Äó™æÏ‹ÛϦò<¥£;§úZꜱ{$ÛÓL’”££›âº>Û}¬#RТ‹ÕFI £²Ò¢icÉv–îã™6ÓEÐî;Òøç‘7|Güh%>°mÔgžÑþ™[)pi%¥‹<¤ I;©î©Á¿²é¿bÁ¶Šƒ ¤%RL>}-]/¾FÊO°§¦D‰3ÍŸQvJYl¤5(¡ˆþˆÛFN²u0–Ž£™!$"Ú©9i‡–×Â’Àßû Ôr]!¸Ê¥CLR†°¸¢P4¾8ªµ±‹ -H2*† 7áâOÂJ(ÓföH -†ð±Þ¨™*éž#&'΢ÈÞa&͸® ƒì5vE¹'þõ K?¯ÝµÃ¨bô£n -)ù`H¡xî$É"¨´ű ì@î¡zšêÒÐÑ;±ÅœŠYÔ1…ËíeKÐ ±=†2ƒšt¬¯žVƒl|€µ¦ˆ;FÇeˆ’ÔK+KŽ j÷Q6Œ2fÎr–ØúyWNïgy•ÎØ`~² ô¾Ü 6Q0Ó—ÖÎUõ A­'ÐöºÌ¥À›L~?˪tnÂx)ñ”q">¥æÒð¤Q A¢e -Cú!p‰‚›Æ$V¥ÜË•VC Ï¢kç㢦YðLäȤx"ÁenuüJA¢Õ—º|ö.ÙE_ü¼¯¨¥´Z²44vgóŒiÃ5(ðã&¢ÐÃæš»–Ç„‘<ÅóÙ9±®6ìêƒD0WŸ*b¹»ñ©}Q1ÜF_2¶C¸ì×q­?7'ˆWÁ7'¿fŒ_1%Â[º¢ºbÐ4Ýü:g 8†ÖzbwÂRµ×SÈ#¶ŒcÈ\&qBMÓ|ÿ(µ-®HG¨$†ìqÔŒVï:6~i^PìȆ4P¾ʘÓÀ)ß.V‡G ‘kh¥ Æüwþ[‘2ùkÈa¢ÇË¢^_ -þ MÓ5è"ßA”QhüŠùh3^ÅÏc?‡n=ý@z?{Ê@Ä"*=ß°þˆz澦ÄY¢³d£÷úP -žbŽT³ŽŸr´zÿŽh.矟ñ=A[>Î5%À†õ¢ -€g½\é9Àî— Jè¥j‡Œ¯ÇGYÛ8Žï‚1çNRÝãíXC˜~¨m@RB™¯Sll+Ý<Óò­Ÿñ¼$êâ’I{‘Ç‚#dþ¿ ¦xy.)<èF¨sÓ~cCzefØD °Çu,Ûõ(΋ôž²]!ºðz%f‚­WÞ ){µoÈ—Ÿÿ²öÍôí¥y¹”Ý_xfßì¿’ÓÐiŸ®G{úX Jó#30òŸçÂŽ7QÜ“xÐoÔ“òq×3$ ~ת487Uo^M‹ý©I"#Í¡!¯É¤¤©ˆ¤’“zNíÀbZÐ3¹z"ø”S–¥¨ á„p¶D@Öe:ÒÖ›©Åx+ÿ¸šHltQé¹Vîl›`íóÀhÌo/F—Ûž!Äñ@®0‘ýš¨ Ï‰ÚϵmÒížtæ]1ÈdÈ¢©Ì” :f¢i;1àÜŇ ¶ñ:5`/zýÝî—™×5»Z~+š€Û¤ç¿²ƒRù¦•AÒ°,êËõ¬Å»*æîg†hßãQ=ii^Ù/3f>×ó¬ìFÓ×_?·^BJÄõ2úLl 9ÃÀ¶ÖÄŠBeËšµݸoõ±å,ZÅîkŒû\çè« "{•¢ªÒ;y¼¯Îce\}¥‡Gzx -µæ-[YKµ{ÿ7]VuÝGä3öµ•QÝê{¶‚Tr0—¹vgš.Ù>>Mì´´´>å:%r·ö¶÷%+xÑBo#ðt{Ù•6ås¬à÷Š8Ûe«<²ráþ¦Šwn¥"ôŠLÈ®}Ч žZ]´R‘”‚ËE1¨VÔ|뚆&vÛË?è[ZÉ'rÞËPÅ÷UÏMMIÖ{¤·ÿñÆ~ô¿ÂÕâïüûoåFtÕÔ¸_ß0s“•ïž™9'ùKJÊKQ8¶N{¤Z2t%ìhÉæi–XZt3Ö:ÑËíikÖ7Üp˜†Ç‹¾‰%÷’åá œŒç~ì‘&IôqFe!eœ»Ò¿=AêMè^ALP iÛ÷eÇ’*ö„ï^!Jò¾bÕrB>ã®ÁLå^?†´zÚµdÉWã1µ»Êæj¤·´&Ušñ6¼g¨-ºG™ÔÆK3K+Zç‘™W~kA–z1œÄÃôl%V¬;Öoö"–ØäÊøJÞ[_QMæгVÆðßûFû[y»@-r[™!½ŠF:™Ý³¤â×aþŒñÔ•´]š®áš#­šØA”·ü*ÌÙÜ=ÉQ½JƒëGZ„e¦®8À“aîë -ÆiÄ´ø¾‘ƒ}J¬\íGWDÿZ=ä:ø¡Òz·u - :r+P‘P>U&_‚𠪘ڌ2¨LÞœ³#J'fËÕr«4­NgÿSšç×üÁM ¦+j AÑ+AH]¥F.Ké8óP[ÜÔÎÔ¿LQpceª+’¨Æ´UtdIÿ¢¾›}WåØ"ÛV2íÏ|_öHĘù©ÊÝYÿìªîH¼™vøT~Çÿ ¾Wpé]ŽðT®ÖÇÿËÜ$Y’cY‚]ïáoÀ”à<öis> ]@í¾pîÃW3Q ‘dQddf8‰C?wh^s:jˆÁ[ÐÐÕeV­jÞüÊ›2ÆÀþòI]ö뿬¾hÚ­¶ -AZ¬yø¶B#o¬y~Ö ^éö²rµ endstream endobj 32 0 obj <>stream -0瘷¡iôT7C‡1ÎñûU}'e滌ø -2(¸¢¦_D×£G{>5–K¿;[|Öõæ¼)þ•€9CÙµtÅÖ;ÒUêëRs Obþeûi³¹aŸ…tËs^Þt¨ý¿4·Ë'°0˜¥ÌHÊ™ #2Þ"ziåW•îÕrîÖ– PQìÆ\döW¢zsâ‰l¬àÒ{‚ Iº£üÄnìZ>;C½3²Ð6ç„ÏÄœÊw1¾ÙK û¯8RŸYb[¶ØwBj ^ùÍNãu#Fó¨Q¬[†S‚n8|N»|Ëþ@aõö:a…÷ÑV¦/˜;Ío;¦Á|w]i¶ÀŸˆ°ÿsÌ|l«Æ—Ù…Û§®[Ý´45ö”hú<]E -W aa‹ -z’Ù iÔA žº—j­VÂ^}ùNMá„Ò8ñe«E¨%°Q„Ü®úâ —Ç ¨~¥4a´’‘:êR_¬ª_0›.Æ!iFa |¢§ã¨Ú -¢õù/[pßw¼ÊFÿ„æ;I(wÄóŒ¥p U;-K^Ø[…BGä#÷LñqV ªá[²ô›1P¢kÂJ <”^jÈ[Éþ®‡A€ªjîïzØ¥-V¿4¸ÇYÝ XËp¨â¼øR³p€Óê=~ OWï‡`”VšÝ#ZÉPà[„ÝØ Ý!ϵ¬,H•å†‹Ûßô>™ªÓCÓ~®¸òB[ØØ|¹<5„J×5DIY H#±<†Òs4Çž¾¤žÏÃ3Ú—¯‡J&Ë…Ü1«´nI²€¾„¼»ÞëêÝ-š'8´B¯ÎðÜ¥{<æÏ»ŒGuA -ˆ+¥$qœ²Œ¶ýT#ècýõ_R‘°cõÈ»ÃåWÊK& {ÚhU‘Pð$`Jå5nåœö¤Þ°éu—48 g¦î•Š[ºúgWqlwŠzFŽ!âqÛz ™ë0ý„³*F†1ãÉ´[4bæ9èüß)¹Ü¡ƒÁD Š”ž}=]Û'À†}½bÇߣ ‰sž‡rb‘: ³ûyˆ§ -Àî¤9 SyªÅåQs_†b½JS{ ¸o¡énsÍF§0";÷L)@ðôž‹£{_‘CŽ7ý]"ògÐè° ¨YÙ®2~¬ùGü݇ÝëS}-H²AÄAÝÃ+9¾— lå p°stÖ\ÁŸôåSþžƒðå_’œD£ª¬™#}ruÆJšyûµ'9é°ï-ùâvÞ…XƒÖ“îé¦ÑóèÒƒœ‹¨ -Ô6“I;-E—oâÞ’ p„“gA·dˆ«¸!=»#HjÐIÆ ^Ý=×Hp½[qJYÈ·‘oAbâLÐ@"JI\6úEÇ)ùð‡ÌMŸ¾Å4W%÷¨!@bj7±HL¨ß"®‡{õÔM¸ŽT3ª»‚³aÝ\±&[ƒµâw¯ê¼ZH„M&ý,ñDç@}ûëOõ®ŸÀø¶¸ ÷RÜ ¯ËÔgÚðÛ¿a3Fô-Žï*nüÙÏäofDvÍe¥uó‡Ç_êÎ[à±N´ùÍ!9ÙI{úÖª#Oî!yî¡Ç2ö€ˆŽI1úVòÝ3µÊ¨¤¿bÖì_‡æÀº°òÝ-5 ëõ^ƒi¼¤4|¯HZPk’g°EcgÚÔ«uÚ2ñŒòVY£<ªk·Øû9À\ïxî33xãäT/Ýl‘’K,0+ØÁ^ƒ’×#«×‚ ÊáçÙà Bn‰áêƶï§- _÷‘ši«§"ò27Ò sËæÎVÒ¹bCNó—Uéúÿ ð+wD‚[QüJ}ˆ°>øõµN¹1˜:ó¨Æ«Ý.µ túÔ_>Úý—Ä}_žò?ë”Çgi°}âÂðÜÕRzFŽp °4Mb±p•¸Ï {¾»C2–Íõ:4ÎÏÙW—ÉNOv¢åh”Jhs€6ÏŒ±Jð>q.éſt<ïNu YË|§ó §…[©DT ÿµ’t0ݨú|kÂNŒì2¹È=wK™yæ,ó§eÃ¥o§GÑƹ÷ÎX |ÅžRÈøòÊl¸+d»éª= þo…c¦“Îå-û$xQJÀB„¡¢­¿¶Í]æ<ô€Pÿþ*ÂÒ"Ž÷·Ùq¼"µóªU-å%.œBŽ, ̳$Éhùig \Â|Ò»'òo’¶—0F"uäÏåœ#’ŒåvEè{ćx¾ ¶d'[«îUŠ‚|=4%ҠĀ¨èˆçPƒU‰§BwsHÛ"t€_G²éç5æe×½·º_xíl/Ú¬%Ÿg¦(#ÝÑÍC¥Äì÷,™5o¦Ûíâ]Öìbs6‹Þ ÑæáÀŽ‰åÉF·¼^l–"MN°È$ÎG¼°cÏÒØÐ_@ßÉË‚Vú™±ðXCEƒ“ßøR5ƒ°¼ÑínšÄ&T`UÄpÑ'"AE©þH“`þE‹düˆa½Œ£^ Ä&Ã4—RÄ Ð׃+ÀîÑ+yÖÝ|1+–#/Œù7ª½Gtª £ïð2ÄTå6 ÖUŽ`üDYþ[p±¡ñ·ùÜKÿËû36ê#'^úXS‘D4?­|XhÓf$80$záÌDKÄlEÔ¸éçìïé:?êÖ£¤„6{ØSÚ½¦ë- w fƒš-´ `%2¤“c¦ rXCD‘42£]5Ø»Ì[…£<°È³ñ-€C¦;ó¤|¹{_´*%A†›GüΡüŠD4 -d*£¿=€æA®’L•+CÐQaîìwˆ>OèMñ74ÓüÝK—NÑÖ -â¢ûÄÃi‘%Qã`¼)³k*tVQå…#5îÖÿµÔ‚Õ Âf½Aˆ¨Éü‹j$û&΀ɡ%ôG#·v÷r$ŒC.ïÕplƒHϪEf¨t%i&ÞR°+Ýa CéCÂBÚ•ß lþ µ¬ ðp¶j,P3$ýX(Ò`a^Š!óÔlT7û*—TœÉ¢$Ï~9J+³Û€íásDxRGz ÇUN%RxŽôWM›ƒmÐNJ²#—Ñ°jrëñÍ1©æã¬ðL¯_9]ÛÙ#öš|ó§·sëuradqD¶x¤©D˜¬=ѾäèXïNºÓ€uá7³cÀ†cËÛÂï€l¢+Z‚#[¬I=Ä —ïØØ@¸³5]‘˜a4\–¤p À{·zêR»¦§ùw:\ÖmÞy0ÉO˜J¼±ž””ÙBíúé¢f­Ðß—,×Ttzg'…þ#—º¢WýU΢, _{`3)®túÅei /ЧÞý‘Ž½ò‡Ža?êX{~ldX÷4ýÎ|–QÈø\ -¯8q@w¦5Úêt$½s<\.`[€”¾Ë¨Ÿ *K2IÒXû‘Ô‰ÐÎ ÄÙê:{ŒÃ,Ö³Ô˜`ËGº¬[Ý/m`¡íLüÖé=gV†Q† ÞÒü½ðÒÚýÌ#7$ý™=Ö=ýöaŸ§ÉóÁ$å7>yߘM¥{n…)ÌuJq<ظö–ðŸÐôWžÖÙ·£±(f‰0r\pæf__ÝN‰(fóÌè¹á› ‡êÛÓÊËÞ@À…"Ä•›¹‰ûÌÄ!ÁŒrË€z7&œ¾5e!/²åÑJF5î–qR~âÖ( `m솅¿±s$DF|WE—éчå’)ô=Àòsî"É?3qcÒFÌX¯€ºßˆô?áöBC¿¿S‡D¤ ꩧºòEeˆhù満A­)ŽžL˜r}g‹ùÿ=ÆŽ2<ñí~Z ‰¦®:ÑU·¶«íwb»-ƒ G#±SbšÆxþ,÷H!?w[á(€ï*ðókã›;b Ââ=?Óšæ 6©ì×Òbl$kYQs†ÍB›ó©›í²çT§|/(íú-© ¤)Áäš%h~†®öçý-Cö¨¤Ð±<[V7ÀñT¾u)Üâ¹ùµ3 9ƒ8óè>{½œ¹Asˆ9ˆªÙ}m‡ßØ׊Tza#û‘ÈXPvÇ£2RA瓇Æ0úI’0îõ©žhd°,xT¦Ú“2)+†§:‡1!‚ò>Ï“¶=ÑàUÑ{F]C‹ŸìŒ};Õ8Û£M­~O•,ƒøhk1ŠjšÊŒ®K¢{ÿŽ‚Sgº²² jñöL -,Óë,¡•3b&ŸÞ€ôÊ,L-]á ¢ÒFl»[üïzŠâD²ÈŽÌ=Ë;Ò¡Y7̽ÅþªÈÜöº”"K´"³½^²Š*G‚¥ ÑÞJz—åícR²;™O>‡q ê@  Z’XOøœu©¤> ÑÒûztvõã[pƒÚc’Z§]ç?!àk-çX„T Z6PNäs/ç(BÔ/WÂ}Lb‰ãå0Ój)Éê²ÍEåUwWÅ äµ…mHÊ/ïa‹ç8FMÒ‹A(ûÞ†“ªÉ¯aÆ•d¯cuPb•®okWâ´ú„"1âè…>Ô9»å¼óX ³A“,ßÀ\k8{ñ¦ÊšÒ¿éе̉'}³°{âÓèyˆ"èš+éõ]ôh©¦ÍÑA 81î8Ôüá8ç«Áá!’¶`êsŠâZ—&˜³9x/í–èGâó\C. ìkOÝË àtj¡àn­†W¨¨4rpÌÿRh -€ˆ€îs?92E–O–Ë™MUßkœ™ÂÒîËþ:gþÜ¡Z4w­´§{Ý-‹n'eç\§&‘²Ê}f>(Le¢ 碛;—tž¦÷îk½^ ‹&dšQ!¨°73‘Ðz|&Ç4&|C.µÅŸª“†i5É/JÆØòšù¹O¹Î{^ˆ£æÕ •²÷ ×.s±åÇ7Í㊖á)Í•ª‚ÚuckÔ &èsösP厲:·ºZt»8΀qdÈ—ñå/ÆE#×À'™}ÝôF Äöøu¡Ñ•Vµ»#’w(€¤bú±òùùŸK¡ìè @TW£1§g´…L4#XA0w‚ûbLÜŸ(b÷UðžKbDKȵ?ÓTNž‰ð>³¿ A’‚øÜ·8 ÍÌúªM“\V‘0y“=5H™%&P²MÑ’SSöQŸ#8’I¡¬E­:† ­åW„Î[8£zoùÊ{|1࢞ˆé‹â\&c´@2¾ñ+ !èҴƦyͽôGj¹än+Œ‹ÃM´¯æšèªW¤ 7×t&êÁØÛ´xæ"ž/;n¼OÙ@\•EôÔÔJD=8½ðîmÅ]2î|á4Lµä¥øwôD•BZ²oÒ;BÉNüSú>J¢ÿÓbÍ݃ò¡bÚ8 ]%T¥Á´qNIúRœÊ^¶’ÂMQË…§¾ AÜ>y<üÆAô7³×îãû•üÛUû†¢'ãDZf/Õš(Ñ›Êðò…e£ÁÛ²ÕÉ %2½mîQîÛÊbÜ€ôÿ¯3ÙtmsìSi;ìçšNÛNÁmxÛk[>ÕT4å´ƒ3ãί£\®!ŽW]@ë ÙbÛë…þ!Æ"ºt¤²}æv7ü† §Ò§l+î¹=$ì2bK¯+h° ²1÷­†P,? *:Fz„1F¬ú¢øãÝèõøD×ö~$MÇŠT²ÍÊßN¬é±­;õ» g°€Ú‘Ψ#kôHÞ%“€`1Ò¥æRGânõpƒÆñ±ð)K¢‘À7<Ášî£†P¸s®ÐœCʧ³£ö#É@™ÄЪ V;_Rw*Œ;Á•§mäÝXÝÃü׿,}‡ù°²¾6(øÉ=Ôαâû™f´\–r¥Úˆ|¨íɇ”àQ®ºá™æ^*䜤ל˜¸Õ (Ynú¾Õyaô}’Ìyó|÷ÐÚwš=~…,5gì-hŠw‡’Ð<Ðç“ïa6ŠR­×½±ZhœJ¼‹”ùŒŽ°èq7<a½7ŽKŽƒ-•‚›ÌôQ¿"ü“€Óç©h2v«A„—ÀÉfäí;&Æž´ ÷‹;]|pjw Eÿ09kæ„èo«Ct†¤…¿5h¾`"ÔÄúÁ'Ú#9öQ°"„­hŽ+Ñïèkªoá Fk6W:mÉGd*K¶S8ô[ù—äSTJé[¬ï ª`ÝF¦q|StÕ¦ÿhU~ŒÂRr–y8L‡Jx|*¶4H—åðD}ã„%K‡ÆSÇŠÙëäBÉòª=¢ÅŽ>Ž ¥Ô©Â‚Ž|“?^tq¾ÛBv–Ø}”'º>Ë>Žºfu>ç$I¡+TI„H]ÄuÓJѽ”k%K^ÑLHÄŸÀõ>Ï ¼SZ¹³àb&\‰I Õ47úš7± ýñºb¿(©‰¸ÜÊŸç}µ¥e1^ߘ}oǘ9F2)ß*O«vÐÈ‹¸•xŒZPìe‹]¯,h®8ÙÁÍ?A+û'×ÆcR°þ2ªý™Asaí;’èØ+ -­nëTRd“6½º»èùj!Ð0+¾«#C!€\ìv͆³Hs?#9+ ¸b¢E’[&“q@9¬8¸Ng¤ÊÜ{*csëk jçYfâX©³:¾kЂ;1i[]&'GØg/Ûlw„|ILà:Å÷É9Äu©+ÍAä] @Ó#V¬¤‘xæÀáí¥RG/¥…‚U‡1*% -m§Ÿ®åžµ.QΪÍ!d@PV„]¹’õÆù†jÑúùŸà« —æs²JëgúkÇBØþÆ+Ò9Š s}å =²QT%r©+•ÈbÔJqË­GVÙ+_=ÔäuÐ+Ù9RZ„Lnqïjš@póîš9~…H£ »„¥6ÜÔêíÐACº?"Sô’L0]3¿h5dgÕ±Õ¨Ÿ¬ÁJŒ3Ó˜¡Ž*,zâªÞÁ¢ßj óß>kêD @—zÕž¤$‰j†ÀÂ{çì¸/Ýò•ÁÍ9 -»Ú u1u¯˜•ƒ¦Pê>jÐÃC$Äi<"J„Š/ H±7¯Å‡©!‘9)p_kÈñ„‹×˜CŠÑË@šuyÇûwô›{…÷=¿]”ýŒ›¨!PEÔè$ng=Š«¢Ö–ÀýãïoNŠ£õùëÉè;ppWÒUôÅu¾Êù+õoÚ.V=ê;Év¶Oï5w\N:Q<¼P¦ÍÅ`¨d£dÊÛJ"Eà§jÅò­,Oüå£Ôo´€>s¿×ï»Êaäé8±7¹GU˜Ò¾êå?ä掜…«µÅ«–zÿžÚZŸgŒwRTCRPš™¥œ•0ªÙIÜïê÷éÇR0ÆE¤KcÓU±o! -¬3æ=aå¥Ïˆåóº—ð0svÍŒ>¡ä©( -FØÈ›özp…ßÛ,KA[¬Ù6࣎@ô¤X®€À¤Õ¸E¸\sEªq´ÀÖÚg9rF¡bž;^s;=ÒÃÔN\p!ŒÝÍ•¨ïfù0ŠP„AòuŽZ®Ëx°fΦF}íF!¸Ê\Åï—~clh ÁBg­yDy>Cbð£Ú`"Þ7'Û£€yôõ+úeßžץèÕƒ2µØ.Ô3D½oº(Žt‹q - Jüþ–|Š -î&ê9æXëÌsϽ¨‡Þ«8ŸœÚ¿*¶`Šʸ3Kë|•›tlùŸŒõÞïï ø¼ÃY—ÒÿMØDØ°Šs’Ïû<U®èsãOìiÙfŠÆ}cëW}‡äè·~â*bÈ%Hr=Î}²—*Þ¸3Ñ»˜­ŸáéÕYBEñ{¡ Õ éLY$nÐ -‰(+B±YC¬CLtx5k*ÓGOþZ7ü^ÜÅøq=1hlÞWq–¹¸õ‚óËAsMÙ ìó]™áyÒ¸µ×׫÷]àÔÝ Aæ))7Oø\vÖZŽ1Ê4›ù¿|¾É< -p=k“ù$¨ñ› -çß ösQd”`£9­R|¶[Û)%ÿ´n­ÊPD=ƒ³n¼gX;'k‰-Þ;‘ȾL׈’á.b@L4ÕâÏ*‘JÉyù^ºpH ŒSÆñîËÚTFµA:TdRFíZ¹Ô‰¨LÚ-¡1 ò“¶íU*KÕ‚e›zx®„Æpw•N¨¨2Mxl8 ÀyÔÁÌ™BQ0=É3Ó³Õ:¥¢wï¼Jm5æÅqÎGAO Åš-\èøcÆáç»_yÈhñÉ!¡ÎP"¥- €UGõÔý¼Ò×j!•›Ý"Þ¡Çœ›b@å\¾{©â¦ù™TûæËí9wEK9×þ -LÎ,üÍ_ÕVlØ?˜ÇÏÑנЊ…1{ ápÍK±#¬eøŽ)ŒV!âáªÔSB¿Qm’ÍÇߢ/mzTÑjD ! -°/ui%Šè Õ—•UÊÀ@UœÕè¨y¬š#5•è£Ì‰0†bL²Â+Pj-GVw¯È⓲R£ß꩹ιKõl.Ü£(±­:øÙšA':WúÕå7ÿŠJ³ÄÅé—04€%Neå+ŒõHÓ? ÕÀ\Q¡-Wå cWRJ›?RÆ)ü-vu«™1¢ˆ´BEÿ—aÁAaÀQpüêã?juñùɇújN¬ÝOÆ á~Å+­›# §÷]˜ùyñrÐFhÜ \iþ •wÊ'kÿëNWîMŸ-r‹úÍG!:Ô%ÔNÁ°æsÄü"`g "‹¨áºº£|Ph?·ª}‰wuÄ^0ÆßÎñ2HWû‘ówÚâA† –#MõQ·¬xÙ|O -°”z8°Ò%ífTÄ-ÆÃo@‡Ö*˜*s.¸œ¡î†ŒÜÌçŠã¬°§K…þz£wòÌÝp }VŠ\´ÅÈMmÀ'ª1¸¨ˆU§t.Àá+e¸J¡ ç=©Ò›3…KD̃îÌ^#"XÂ@'º¡ñd<Ê ­ÁI9Τ;â ‹Uµ'úÔDnñ¨8Ò‘M;GåÜRr¯÷;·‹ aö(Édƒö;ž¤£, Sù™#(0EùÎ:'R}Íš o̾·R]|É[¹½ŸÕß䎄fPýÍ‹ñJ¤¬£ê„üœçæ@ u˲é¿éqþ¾ëùõ*ó¸~DNx‹¡Çª–ï逈F`©¿’R¸Ž•ô¡ÛZ¯l^ÞA tÞŸQA2¾s«ÉºÝÇìøoϾúɬT]7¡¿]_{ý|cñÅÖ*5-®~÷ÈɽºÛ©ÖiÝž›ÓØG²Cên!Ùö­àˆZ*»s{¹Š·jœß)Éîû‘$g -}]úYƒ\IªT%xPž¡ Í3Ó¶™jƒó=‡B‘>úz^oª‰É¸Škóå#L`Ë«¸fÉŸ¤Zbp§|ÇjÙ³i‚ÕŒ` ÓoÑ/– ]kC¤«&O«Rí¬ £ð“Àõn¨a‚ÆãNšBK™?hvM Ü -?·*$ßQPàê ¨*Á<«-éRýNó{ìkkÝJÁê^·,4 FìNMQýª!$oà£éb ͈/Ly§^ âAV[ÙÛ€¹8D-¬šD -‹ìÙýMŒ¤³¶QŒì ,Å(Úø=ýœÏ æLç|MdUN’ï¿ÎÀùÒÑ¥ªjß™… «™[Œ†œMˆ!5ä–¡ÏÝ€F]ŽIe=â”GGS3Mèˆce  3„v\¶õ;IÐf(©š|óSRp¸^«!#Y=ϸ2s´þ¥?A×¥öØŠ´~¯ù§\Ùîäž×ºÐ.UfÉ]KÃ33tÜ+fM -fb£à“ÉRQÿdz/ø<ê– þw–ÔhI —dÀpÎÇxŸµT£/È=v¡O…Zç‹ztýZ&ßüJQfÝÖë_r®¯–[Þö5ÏgG!ÌŽ¿.›q6˜ê#‘Ò!!öSCötsƒ\oß«"WœxÞšo‡¶rênä›0H¤0Çr§vD’Û4ÜJ]ih@çþ­1ÛÇ^YPló+7ÁT=E+•@YÞÏìçf2ù3ýâ°p€'BZ”°ƒ4;|ú-m Ùéü΢Êw—dGÈ0ŸúsV~ž.TlöECæMvá›qa8æ7&÷8s¼“Üä1Vo+:ÀsY’úv7I‰u;b±º•Vò•ÒF¥9g¤³#êzcwîuz\À˜2ë¾ÛâcÑè#àá3GãÛ¸]¦N{K¿vQ.ãøÞ -ýGª°ÄõŸ"Æ ¸ 6j{¬ß8ãVÚ2V“3xmЯ'Õì¹e*XݱVZ½#Ý$úù1/z^)£Ëƒq¹¶ÂÔˆ`÷0…'÷geâzW9`[‚–ÒU+Ýü¸h¤Wœ ë íV¢~UP£÷Åʀǥš_Gé$é’˜°z7ƒ·´ÞâÈ¥e¦6¡ªË]ƒ¨$Î}b®i(e ³I.ÀëRø‚–eøÚBèÔ¤×@Xk‡zÁØi´ \ƒºEÚô‚®*žÂØÒ ›§Ìµ‹Q¢)ùæÄäÈ‹Qû> ç_p0"DP -dg<ªtÎþµÇ²ª•Ä—CÃ|=¯VCxÇð£¿BŽÏؘ)Wæ¹=Ÿ1ígáí+iÛóÏ¥økA¢oÑS\$g.ƒØp¯[¾±PxEô„s\gh1·ø}Ý‹Þ¸T¤/4­eK_;8í5(ê~O((©ï)tñ›ZO½·Ò“¤oX5й3oAhõ#ÚÁ45Ð5žÆ¯·—¼¢Ôlàæ€X]¦zé3ÈGö¾ ²»¼~%o=OÍ'Â6×ÎUfªæúUšÝ#«t1¢²À€S­è<¢åœ÷¶ßTuY;íF½¤–Pñ%ž"…åpÅJeí–¶öØLwÀ£Y0F^ëL~| -A?êg ìmaõh”@Ê‹ íîg’œŠ§ÅXmÍödwxÍ°;jX×çˆ dæ ì‘L •ñ÷4mÀÅ%ó»i*ñ²qƒ$U*ž©Dm&~OÁ¾‰3ÄF¨¾ºÒì6÷öκ' TM9÷©?pÍž´7H€/!s=fL{B£Ýz?o²FI9vée]iSF&.”¹ãÄŒ 6„k«»ñ)î-2«­2ó=ÒjóDb”UJiD»žjÂê¼k‹ž¡¢Ö__§7&+”oêsŸ,ÒiÐɼÜZiûqž‰—‰õ€KC]P½‹“Íö‰›´}š`¤OͦÜõT…ÓÑ ¢§˜4UÜõ(;膾áË:¹¹q+¨ýõˆtñC!êÌ3”~MJ°ëì‚[‘úî2+<†œ«AH} XP‘Ž¶‹°:Fîá4ŒQíÃTaêÔ 1A2q¼O ¸ø7ð亮=÷£±=«Ã60±5¶Ü±DhOÎH ¹†Ì¯°ÅA\ÝŠ( ¶Ùf>E˜ÕÍ<ì`’,Æ_ÉÔÅæã:³—U5WPpŽpr…AŒg¯çn˜:n{dt>ñ*»ÎBíkõøRp¯k=k\轧ú—Hý+’O€ Œæ=óÎC¬¯!Èî›ÞÿÍQøkRüïyþñ€÷’M>CÚ -Š‹æ¡Fؤ>âI/Ìõ&‹`±¢’x^ßÙrG“x~}çy -bcTï2z˜ŽÇÚ0ªs/$qEÃÅ„q;𶚭ÈwRn¤'À@´v.ÊÉ}(úç(ï]˜ÅI&Æ,zÖSñìÑ +*M·-ó\ÛÛ™‰çcº@­”òAS}'TöÞ£zš–G¼Àê© º7nJǶ¢ŒF…?ï>î|qæcy°fÝ +O€uD¨o|+s -*™.ôx“i´oŒ­ñ|Rò]R#6C}½¡‹6ÆD<ƒÖÿúWñ£(gÌõpÝ¿øQOÔ£N^C‚Ñ ºr¿‡ ù/mŽýõÞl--Á”¼Þ4ÓF #…ß—ê–=âÖ~u`›f©·&8’ì)&¿Þ;Ò1æA}V,;ÍßQWúrÛòÜÚ¡þÙ{½c²9Ÿ™P÷ÿûoõÿ”xgý3„ܶ(j³Ha§,掔g8º‰ÛY©ùÐß êÌä9>Mž¯ÿ2§S§qÀZw'† ™g¸º+ª?™v§r€6Ì å‘Í) b)S¾_=‘ÍmÞ5äNá0tRð“oP‘@‰Þà¸ÉÏ3Fmÿž—zèâ$íçwž˜®žk^ù‰ åy¹!Ã%ïyа ÝC\÷¶;éGè;m=|ÒÞ9å€Ú±šÉ  R[[Šh´ã¯!ðÞ7;9„‚ 0u´È‘f)_&-„šÓ²çBëBéÝÒÝšÇT?±Çî+Š÷;£•‘Å8÷Ö—™óò„L¼"T7ø-ΙýÈS -Ó¾Ô¡«Ñ€ú½·xÄJåÕ÷'{ªßr–“Ög ¿Ì‰=%¡ø‹­Øfá“Çd¥³E;FŠ·5ˆMàÆTŽ€FßŶšl—51Þ³¦Š5×e P§7‚œÉÎ6ŠJ›D^r)ú]š(ı étr’Tà÷ù;á±nñ$‹Å|xÝyJ€P%G=”M Jãiï_ÁÇa¶Zæ¡Q8wå=â©—pbˆEm ·¡Ísõµ`6J›ŒjpJ¾±¦–˜µÄ>ýŽa‚L:Ùôóü —ý­¿d-s£J¡s›iÆw•2¿¸[•êgÞ¦Hù@ÝI½kRA—5ëÒwò6áagü9Z&¯7Þíñ|sŠhùóÿè±ý© ;€¶ #A-têw:1¤wýf.õßl 3óø( ¸\*Ð3Ê@£mä–eZ½Ç›Ü®× ”Ãà@3Cˆê™Œë¡Àk@8£=2I¸EXó\_Ÿ¼eòë¹-É«…5äYæŠEó¾~(÷ùòp×kÄYšÜG–òn¿‚å&ë› CPåHão)Ftž©AžÉpוN:µW| >ƒ §¹áEç-ÅÝzšÇÍ“£6û©!;_1EèV—"(ç;•\VMX1OˆóÛeniÀûQº³†ÌwælH¦ß˜~oOv]vKú1ñ„¼cé˜ÃvóÝM6Vyà¼ÁuŒÈá 7|´˜øú/VÉ?œ @^G¢?3æÒ/?JÝ©Ñç(”ØÃ)¤l^®W„~äÚš]@ëÀ›yÇAk¼¼¨&+˜ŠŸ:&1%Ÿý¾G]J¿|q.»Ä†"¹=AÞ1ÒÍz¿Î!ªN•aŸ© U±)7÷\ -;Hßyýg§6cÈé¹Ä‚ “ð+»²ƒEßZˆsˆ„‰A-ìµò£Öå•œC¤½¸öï?ó¥À@É_ÑÆA&BÎcz¼ ¹Ê:ìH¡?3ÊHÙvSSêheµÑÀ¾¯!ƒÜÓÐከßÑzŸG®lW ñÄÐmœÓùùlS:‚ùÌq`U©Uï¹òP &ÊU§‰*¸Aå?p%ï†NcAe®®tG=L¦%°ñ†ãÄþDߣî˜<1ܨ!_~©ÿ;'óŠÝ„`[Mä¡›=§§ÛKyã>öw‹~sûìòcu­”ä®Zþëõa$¿ÂÑkϼþ!åÐÅÛ—$¤©Ø)ЙŸ&á葺 ˆåZ³U93úŠž‰Ð{Šë®]‰Ø±EÎ_ÎÁ|HbçÖÖƒê^4$ýª+1àÙa¨‚6ë¹C†Mc§¿¾~{ï6G*O·;(*~Ï»üãñ¹ÿñ#~ò@ÿÍG¼øiÎ>îq| -lB½U(þ™Aº ¤*Ÿ¤J5¬öÄ4ÔœÊ!M’=¾xÔÂj’##æ׈zªBñ„öÃ󈚻ØCc‚ç¹ê›²5¹¿[%çË’q€j8YófmqGêñßx¨ÿ’„D=k‹ý RøÉX•apÙ~þ«8Q$Z(.–`C'ý’°ÐúG“VIb0¨àUÜü‡.4Z±@£èç#²Ó;ÅÆE??£:ܶ×ý’£n„F;VZTù3¢Ÿüñ¿Õ°áu²lॉ¨x=¶g0ahJ ¯Ñ½ßKñ¦ÜC“Ørp±4äD׃£ ÿç;ï¯*¡ ŸÀpW¡ˆ>ù~½‹Ã {tÑ|B°ØV ìϲwjå,˜ùTõþ¢ã'˜ß5;šKv„r\ \’0ø’Þãw$Õ\ˆŽ§ä7éeáTˆLaÁŸÏ/˜Š:µ¶\d˽=1ðn[] ¢4ìFNMl¦u°X×Qcz':ã>§ÝEâsÇ»5O–ˆÚ¼Ñá ½Î/r¥-þIÿ¡dß³ŸüZªów$`!Tg´¾PŠIò´È‡<‹/L˜‡§ÌõzÓ…7¼K ¸°7ô?¹šŒ vjP8^wG† †<»¾úµ¿eq¸*EèRä{$h2Ú[džAn—ÂÜöD¿Åù¢ï -ø÷"_ñ^…¡…‚ æó¿¡eCP[Å Öµm ÂF`7©î­Ú×”`þ½&oÔ!Ó>£'¾—>~Xyáëd ø/Ƽ{õhO€ä«Ò†Œ‘€ôãÝëÆm+˜´øLqóç) -B•¾ÀsÞ«K@oKñõµ(ëjâÎã¶z Úž°òs¯ºÞ„k†H‡NrU}Ã5e”ŒwÝBZ›ÿÀ¡ÌÌs¬`ØšaÙ¦0Y»„íϪú"dÎ -¶âUnh¿ÜþZ™Wh¶˜¾ŽBІð)KîlÆWSž.]Š1*Ù â*O« ýÏ—L0ädÖ1cRx^?Ã"IðÅ$ ˆêô^®/qPá/gã]‘ v©ãô§¬[µÇMQ~,}´ì_êíO]Š~Ÿ=ål +„( #”s¢Ð±ÑŒš6b¯X©dJØåïôýŽ} )œb,7JO{ŒU„LrŠ%˜‰…Àþ¥Ur9„°j`owªá8vf ¿2D‰a< dÚ^|õ#þmâYÇ|5—‘ ÎOê¿1×÷®q'æ,Ö°Š:¿ -÷œ~ÓÜh™eìl“çì3ÚúQ½Ï ×ã?ê;w§êÀ,™©’ßdÓÝË%«èoö¸¼šR¨Å€ÑPˆÁí¶x»ízV R#t~€ÿ<\´›ž'FJ„sï +·È±,P‚>sÌjâ`Ö%ÃëAJ«õ—>cÏ1ÓÄ…Ô!Ä·¡ô³Ýiˆó ¢0Á“+q¶'Y³¡ÚU—šM5¹šX‹åg¢SÍ÷½ªäÌf=#¬š;«7c‚3!ÍSCØyC/%¦×„=øèïÜ%ô£9`鯈íÏZwO)½¤nSÝΞüç°<ëw¶X¨>-Ö›3›‰©tlo%*í^ÜÎliãô8ÍábD¯¾-u‘`Çâ¶ñŸý­¼àŽé’Ä”‚Kä×ûÄ;´‚$ܶHy.Ò¿4EqUä¢?FçsîÁiPegšgÔ<¯AqFêãoتüåc÷7ío&«cÎóø.X’ÂG-p÷²ÓÇ鶗m¦5gàè4~£ëÞØÐó„l‰Ðb:³%ÆnÒœ‹0ÕÕªÁCâž!Égºð2ßä´*{’.·ðF;3Yb¶ô¯[v !¢šë, adÇÓI45ˆ:uò|óžJe‘Bj¡‘ÿ7Á(`C_áÇx•ŸÚÈô¯GÎƧ‚ш´R¯/%Š&¥EÆ2UMT¼?#‘e§¨ä9d"£ªTiŸÇ"+hô½ºæ~87»4óãÕxÖ¾Ë!%95÷^}¦ˆÙ[_›ª0ô±1g–"?:ô˜©©Õ´;S0º#È7Chªß,sؘ”áø¦·ß2äJ ù?ü„†Ç T\d¿\êŒ9„r>Td ²TƒeâÖyƒ -^éS?ù”-r FØž á\ÓŸ„W÷¯£O’«¦\êŠà³â]_q:£ Ž×!9ÄãéfÙܨjvîDŒÎ8ýr=•¼•œ&Àš¡ë[Wë@§_Ç΋ÞÇXIÃüˆs™«áõ$äšÑižûè§+ -öàŽp\sÆök•YåµDQy¡h}ÃhµDú¢ó©'Ò„¨îŽ ùÍI§< ¸ö.&"‡¤øë•û>ý¨nò¾}êºÍù¹+¥¬ù$ì?R ùT×øè>2ÿòY#Ö3tÇ =a7É#ÌÜ¢ì8gÔ›:ïùòCiîançf!)së=ÌD´åþÞ )°WMà×nè»^GÕXeÝ‘OS8*ÐCh`ºN!-¡X“ËLU_® ]åÍ -nwG’nΫ›£Z~DÅ[Ê—NSÕ j•EÈž8>$G<×=3ªA¦qÒ¢‘Ñt‹D rÃOº¶ø”1It°ªpìlV"rv¬ì†x=‰'K¥¬i`0¿Î¤ƒ¦ÙKªt¢e­([Vòóý—Ga¿€ƒò<¨Ì)¡³ Q{æ÷Úy; -ɼŕCë„<"jf½u§,pÜceÝ ½!ÑQ Ýù¼><Ž½Ú=ü`l™N?Ö¡ÄA#&ÓZÙ¡–ðûªùÅüû•/GÄgDZêŽËq\,aM¬œXæAQ‡z2æDƒVF_Ò$¿±@ø»*Þë³)Â[ÿã9% -°N¨Åíx $’ņ؀‹ŽÞRˆP;tãü¸\„°G'Ýîƒ+À²UÔ,»î·ø”As‘aºÒ>Iû–¼ -®Ï!Ñu/‡¦ZmQ‡äaß&ñ gÜwQ‰À 7{ €¡EÎ9¶7«Ó~SthÌkˆGî+:Ų͛ò©Rÿ±Ô -Ø@Ç#«T§ø„:¯xQ„«¤¸ ð·œ.ÒÀï*–Îm¼µ{Á>(Àl)ÀÏ0z(›ÉbÇXý¼ßu¿µ—‰KtŸëÝlÎ,T‰7¥N€ªüÕ¬S2"K(ýSbmñ¼×¤ìGÖ=ÝÞ¼ÔŽëZ- *)Win-¼ “y£¦àýQôèó_ŽÏÞ :*j¾v¦r…md¸G?udêx‚x¤Ò"Y•TCHo Ò™S~R›F®B0ý¹(;ù† «ZʘÏlÑø=xŠÏØø]%[,í¦DÝÑF‚“ ž!;[«m•8 Ѻë^k‹(e»$P;Á³[5œóÆ»4…’ÞS4•i~¡¿ïv씓5fææzè.ŠÐ×råAqªôâ¶G+ ìÂÆήûhä‘QR[h¶šÃþ -l{¥Ã¨.¯.M[£†Xù d·ô¬ÔÌ; j˜ëG¢>¦|v?cµÇ@çOm×’WÛ¨¼Øƒ#²§=v3j’w·ë}Ç*Ag\Ä÷×7¦ÄÿŠ]÷ ™Ÿi½׈7Ý‹©ôSb·ó”ɃVðÏ[Û{¯‰`…•\´ö–¸¡3°P»ƒê€õ•˜vê>ÈyU» `­R~EêpZy{é?*ïGG«›¹{  ^<ñ¶ì(³­1Ñ—)ï…5¶£×[¸a×}Vëé¥qú*G‚%<0r‹›pòþo”,@®2JF3zËs¯-UÝ>*Ž¹_Ås/ >Þ1Qá|{:Iß•çxD{ÖÌñÆI·)Z¾÷`@Æž«N€ö#Èþðíñ=Uv]ˆ¢s>§ªRùÒP÷U´“»î¯²'aõðêJ\­¶pÅ3îÂû§²Ïþ Õû»°úëDãgrš'Û´ùô®4ë - ï±ÊKNÓIéÏ–4c´~b)ÁiUöt©i%_ø•>!žUcð3AöÐ÷ÛW€7–ÀDMÐèFc>zíIÑ!êòÙŸV-Z[ˆQüëW-š:%úÅäÙicVª+ÄÚ‰Ž‘†¾¼ŽPO½æ^IÄÊy†³¥¿ -ù¾h« hò-×¼ö*äkv:†«ÑÇ%1ŽSWªÀ*ìëŸy}Úå5„ælæ{‘}#%L)‘ MøD´oâŽKXµvFÖ÷SZ”范¯Râ'ìËý’ÿqˆ [£¨°?áÇðlŸabE2Ë&©[ìû ÈaCzrZÍãKÕ{Œ8ê–=yBô»¨øô¨ØJÑXweëÛ²?C4禺nZ/Cðþ>qmú1Öd£>”Æ8ï÷ñÞ(Gq& eGPQh÷ä(½cˆ-â\AÄÌ["×Ô1¸´üŠ3´s÷Kd‹óõe˜™s·§Š”§j -v@{#ÈÄ¿1ÂRxØB'ºôUG8b_†·­è™´ËÊÌRŠúâÌ eÿ…'q„—FD°SÌÏ©æàiEé§}€|V<¤Zõ™è÷5´Æt±ãC}’¥X¨™*ëc2ç€>öãOÃ77d;"úxáD»L7Bd(Â3ã¨îA"ùù}ˆ¯Èók²=‚(ÆŸ‘´É'¼•ªÏ0³žÒÈ"‰CSÓä_Å¡k´Ø1dÈ“úáQpÑMjìÒ²‰åá—öz(½]51·ù2O"‰†FZž‘òþÄ#8ßÙ!®Ä<'<±œ“å?Y’ºnb-¹×¤›~­ú®.N'nܬ8LA&‘C#1÷_ÅgÈwz*ÍVüè—îq»8Þêåg&©;V@îÁû˜XËܱ»þ£ µ“å*ܾ · -´f‡~g¿¡Þ¹rôˆmp>\a…ÐôòÁ*"ð—8ðc2r}*i^ŸuѾ¬Æ«?µHö3G~î„CÄÒIo“¤±÷\[±ÅfÊÖ -àL¹å ‡y¤½ E'0í¥XÛ1fT±K£·žžÉSv'¨­rÄœL ⲂrF}^';Ù:)ÕRhÏö«“±ï$¾ -[­ æ¸5iöæû©dÂåÇf8¡83F*Ü]a®>Eí¿ùËßJA#±$ˬ½jCŠ×{ R¤#®[ðSme?LÑOGŠÒš!Oñc7ª!}Õ¯;dQb&"$üÓ«æ4S; Õz²¨< Š²LU{¸È¥Òè± @· ¯ªÀ¬k8V™‘eéœöÌU ìô{»‡Ÿ2dgo•³«ÊŒÔ EDÜÈU4®$lÿ¬hÞ´4WÚý#ˆ^²5+Œ”ÊÜm*案„ -ZAä7s÷$3ÃN´}?5lq~Lô³€'É ç{%#MðMGûÎt=—|âGuî‘¿~ª[Pž:«©žWÐtû3û÷8ªÀ -e(ñ•["Y¥ùMgóŽnµ›®Wv]=õNŸ*ÅDÿ(ûÖÞ€}­@ @Pb¥3vIþ¨ÎŸÌN7þL±Oc§DO·43KâºîRa¾½¾ñP‘ÓÒç„>˜¦´ÕWI¿ˆ+œ%ïÅ?÷ª¸Mžo0Ü $@…°ü²1îa”íEÕ’×A=‚FR·-‘ 2¥³c­×C¡ë®RÏ[$( oݹ2‚‰x />¤¹½t§0åpõï'©Ê.UÁñ=”WJâ -bÙøY¿2‡`ò]Bȹ:þó/´ƒ¸äô?A”U2åÕ'º“#Óù÷7.‚½ÜðNCíë÷W³ø„ßÙÐâ@ë’ò `xwüîºjlC…-àñ0õ¿Ð ï8Î(ˆÅOPçÑѽS«@+ý9ésé¢#qVÈÕá‹ÓD&Æ®dN©A¢ÇqqQé‘(¸)Þ1D™áÁNÔAfæ0?RXN,P©A|†´2FÀës ™IÕÌ\i«–ÉyƼÛ“ë -hÝL¤|pì‚ù5Øâw/ÈL$ ôüÝ3¢øó~à…Eo¾ž~§IÀÙDæDK™ÔÍLeç²8W3ÌÙs3 ¯H`g(.Ã5§¢“1±©ÐOã22×*$‹Q5@ú D(¶w@?sD|›Á‰ñPP=Ö‘' …ú«ì~\+Cä-¯_—ÂŽU&P³ÞŽà -ïNKÆÅŠqÞ¥Tç§ñ$4Ü×"W%g®fh-¢ãÿÉ•XÁrRKÁ." -G#±÷0F9S%i%o¨“"ÄÍÝ=÷«—Eýá,ÙCØ-Ï{¼¶z¨;U¡mžG`—¦;œy¢ÚúL.Ø*}ž‡QåšÆ» @Xe†vBÔ7¯=eŠÅV5Çy ´Ì¿+lñ••5Fí’fÄÕ2·+Î7{h*F@ôy¦¯—Ô:ƒÎÖ°îqB^©Ï0Ÿþ1ò½×a×RNI=}ér/LÉqþNy~ë/–~9RëÁž‘Ê+a2œ2+¦÷\úBÍÊC„FjgÕdÓãèÔ¹z{sFËNZI.ò`ǃ>”‚ù¡:]4ŸçO]/Íäz—ï4cÂ[ä‹T·Ä`#fTQóáD ÜžöûR캲œn7.óåÔ[%Šç©ÍÂ$تÑyJ›“±³¨¿ ÏõHÅ÷ˆŒžé"*víÕ½:?Ñ¿þ‹%òu0—HdþU¾¥<Û¹Ž‰ù!è}bêŽOVAûuH¨ýÎ}Æ`w÷.¡U¤ûjˆ“æŠ}HO;Ö­Êwߪǔsmã9ßÁ¼D,ѹÓrN›yÙúת·½3®¾¢ÛHÖåÝ£ç(s©´ÉÈQ8}Ö9BrÅ»ö0C ¾#öëÈš' «¹¹úXñÁjj5KRÓ;b˜÷€2åJ—rò…ƒ-“›>ל€±ºgówf~2‹Áº~ŸŸ¸Ð$÷.þÒߊÔjB î$ êžÑÅp%ÎÇK. GrÏèWùj;öÝ[»kB}ì}ý—„[舎 €õ±¤?çQhÖßá~ÿÌ tªìùÎÔHª´¸~ÌWÂAƒ ü¶°#]æßqgNºöAZèÀsý÷;ᥠ-T;'†RB#±Û£tEæüKEû -€)Arôº®,øG•ðÏ;¨¢@ÖÁ: ÕIo•FŸA\l(Q1äÖíŒÐp ¾®ƒ¯!›8A=7‚¨»] ¡¼#;Ǽz·¼´¡sVoìl‰ì7à¢_õT¬KhÊØtgD¡U¥žt/µ9@^y»Ä3$Ðõ”$ã¡¢«ðô˜5W脃çª}îó_ðqúÇ#ãSåîë¿d/ür5Û Á(ˆ#¡™öQƒZ¤FI?mWöÒd'hwi³¢Ã³©žWb•!M€r=ªÙ}Î=Ô-…ʵEÅ‹NÛôIxt4~$};ÃR?#q^Â\Ò–Å<Æ<gäA­³Ù gØuÖå9G}« -Ð}Æå}§sdnÖ•ø<Â?@TpÞ³ åZ Ñ'J…y[Qµrà]••ÿ„wn½Æ;~ØjI¶ö¬øýП£ ¿Ÿëw`„ €%Ï>1}¯òqÞÎïf -;ð‘„ÿJ’÷ÆN•yõÍøà’G-ìSã~@PK‘ŸïmñI[½‡£2òþE—/W^‹Y°¿„0Ãa5†Êj?¡[ó—ÏÖ`%|Qª–(¡gÙQa=ݺ‰7A$ÙAæ¨j6À§+,DˆªQù¦BbH -Ý“„å+„[”›ãߦlz9$Á^*õÿM©_'¼CŸûCaí"£»PïèÌm}>½§rÀíGc'Ã4˜¡˜”#GK‰ÛIÁCW»£ŸK%KŽíŸw¬9ð~íˆw·ùؽž[×ûýÅw¬Ä΂¸Mo²^-9ZZ—3IÇÞ)BÆ…BuxnS$¡þë_Ž:SeÈ›H•?ž0wÉV xyÐϽT­"ººº1åU"'jgN¸e|vpÒt9Î*ÓÂO²_T¶Kùç²87™Y_‘´9.º¯²‘&P?Ž^®0l -%ðò„ Þvò½’h‚1ápœ ‹,#3L•ƒ+[ f\<¢•CzÏܯHŸ¨QvùºßKåàᶎXü*ƒ-v¤°ÒåƒÁ• ,+ˆ›çHzzŒ~,m{«FѦ®(ªás”uD«!dXV[s¬Joª^b¨3@ÀGÄwÒ¸J)«ìU.B\Ááš»IÈôÿ -šH ûoyÞñ–åíxœe.ÆkT"Ç„ãçRNÇFú~ïÔ™[Þ÷’¸G“!Ÿ×Ï Œç÷Pѱ5…­"l!©0tª2Ÿ¨ò< ¨k‘ÿ;TÁkO¼/V"gú#®$Åá£v ÎÒè6nú‰\å‹+È E¦¬{S àJdùŒ±dȺŸ=Pïå<Êðï@8ìã" šxÑK;¯‰ºA6ŠS¨hŠ±J½×:™èPªÎÁ9”È–Úo-HŠ.ME[§ûµô›aVô Ô©ÜÀ_ëH_£ºìX¢Ü½JÀ¾´þHàcqí%M¸Mq0¥˜òJ%‚ܯ%N}`”~UûʳˆÖlb²  jØ‘$½×c¶‡ð»… hïÐí¦ëJWœPœÚǾÈ-¬c†"ðÖ׃ˆó›§x4¾¯ -Â2]Cèc#Â!ëÛÐvk ¸RéÆô2ëDÍPÊñÆÃw²=³7é‹SÈIº`ÙÍqö0œzZ©®äü=[•âkû%žJš†ZDtÎéÍÿ°-÷µMPUÁ¼q¨ l*ÒÙïÃ]XOšå^Ö5”€!ž¥çòDˆëÎÖ×UÖž“L-ŠÚðg1ñs”§…¯ûûLu¾+\EVšš=ä -ÃÈ>ØÔÌØAFåþaƒ 'y¥êÓó¿oªÚϵH¯bàžZ}`¼b„;2EÊ.wx¼3iиS¼oh#æÁwÚf² ÌkÑÅ2o©´îYw=âÿŽZ é|öë 5Lu!+‰°Qm87ãZÆ£•¾êé¼¢pÆ c —Ô¸~…·0äÄk@ºBfº é>BLÊ EèC!ë‰e—yœ@©wé«Þ -R¢MMÒ+øÍ›!Ê|É-6{†< }U ~S×Åf?d ™f -ÚÁÑvDè;fŸá°þAMpF/̪±]ƒ[eÊqHËn‘e«ïv-Ó½x¯ 2ð™¬×ˆÓ÷èÆye#Ø‹<8„Eóý0{¨!òÉà•ÛV® -ž]p|ÁI"¾HØmÄÚ»˜9v&Ûá‡ÿH̯·ýh€>{\J´ÓèjÈ8¥JˆÛ⪠”¿†ô­¸A$™ÔYŒ-•^ƒXÞnêî¸iñ^±[½cï¡@Ó@Ö.>ñ¡÷lZ/ºç¾>& Âò{]‰¿‡¬¬;KH±óœn‘ðèéR „ÐÚ¨)z%.R¸÷ä¼}%²½D‚qP¹ ŒaÁB¥¨zÆjq­¡´%»ëù+Päñt êñÔšr0ô8µeö}yˆ/Tˇ°ãÙTG"¨ ڙѭh©¦¨‰¦š7ø3§}*™ú-s÷3Šóã˜`Ä¿Ê®~F”†MEÝ*ƒ)›m«¸.o•RÏ©®VeÓŠ~Ék6qõOú³ þP(:öÊ £d-š&5¹CyæªA#²³'èÒwØ€­U´îº”8â™A˜$:ô[Y°5lwë‘e‰¦eªwròݲ`tŒ' ­^QŽœž²Æ|@vʵRÐŒd!˜ÿ¥ó”÷(×Ìâ£übŒÝ??»%TUbÿÄ·‚~½Iœƒ-ö¤Äí¢ö²ëD"ΣöY’̺yøkýÎÉ)G£(”¼3SŽ í–:T’¶®k¬‹æ䡺Õ|F¬ Ï 5wÛa.Œ½lV·ƒHĪ"pú¤†_µqŽ`ª* [ß6¡ÂÍɳ;­xœeË”BÍ8Àæ.¡-i™ÔtN=d`B‹yBP븕|þŽûÒb¦£Þ0B/½âô°áf½J§£"ÿ4KG;.XϵôþcF¦Ž¨æXkáƒÐ®w•U{ŸØ$$=mËTpkúí‡ßid+Á<- u˜·øÑ°dÀ¿¶÷-cþ_e]lsÇñV5ÛÊhfÀïx*/« -Ãùƒ–5úŠÝ¹Ê{$fAT²~K»xá¬m€· ÒdÅ7Ê0+wj13Ü"œ ç’ =I§[Q³ÐÂæfëšÑJ¿s®ÂKt/³åýƒþɤ}†$Å݃7ÐWÔEJ›DšdÑÚNiz~éòóžÞ—wK? » ¼CZ{{ˆƒ·Åí¨½õЗ@lEýHÓ˜ Ìäy±<í^ „~ J¡.eöÎW½xÅ›¨ŠrdjFjÖXÛXûŽäͨ„¯Œ|¯Ï¬’M¼ûׯÐ~êaéÔ¡OX…Ö{àI²³žtŽFx€3'bÞåçRdÙ^1ký-…8~„ÌâÖ›<#ž^5Õ"}²“žŠÙ n ­ö§ä³ «ô0/³;¥ŸX4툻¯ÜÝÕR}Ky+›vêëï - ÑcðÊ#´ÑÇ®ñîfVùŠ£ž$M•ŒK§faq_O¿uê#&Äjè>KàŽË ë®1ÎZ&å+;¶*qlY€H6±¥8ïß,؉ÿöY-Êkºï̈©_ïéžE!C®û3ò)YmÁ³WvfçXù×rJJV?N+:V†.£³•K bw@%ž!˜ñë|‹“îpMúÅÇè˜s’KÚŽ†î¥m0¯³Ë@­yÔsC¶ÉŸØÈ}‹8ýª14,4Xú· ¡’DW+¼5ˆ—™,šÙh$òg§®×þÖÃ×P¡£v®s@¥ëŠÞyÕëŠÏD5z“E[ß};ã«lÅ?æNÙךL’rõㅤ窙+û“!>€‹û¯ÊÖ~ÑÂÔi|ý{9FªÅ?év­“KlžˆèÈŒ„DfÛ×Ò>ñ€Oj@¾•Þd¬¹7x×H7¸†U@Ð+~‘®à—´Ä6òMä1ò—H#›RJ‚Sî†kVõ³{ƒ{ÔP+Â×7„¾€ßV'4¨Îra ó)GWã]‹ö¢dŸ`Û{=wì…Ÿœ/GI~Dç˜çá¶^qG;ygRvíÔ‘äºß}ùP$•¸“Ü–Ž’ó:)ðHÍ"UjÝŠ«²ú/VÕ:$wþµHҚᤨ;ÌÔŒ^}Q·p×WÛ"QŸŽ“m -ªšý†„õÛ¿|^ýß;$1w¨ßîÔWµ¼’ˆý;ᜊX³—°ùC)áP&÷Ïžon¼1À±]É<­\d0 -ŸøÜg¹ÕFG™3gyÄìthFt¥«…Á'IUÖËI -IµQ ÍûªûÔ©™ÿ, gU( ¼ò|Ñf¿¤mXÅž:ùJÏ),ÌÕˆy”³aˆÔJê`³NWvªë dÒä¾ê `¥O -jÑ'‰ÿùAhi†A쉚6õ`¶q}N¿M1:È°¸ëàpyWw¤ùcéŒMZô½ v˜oíîúZ¦áÛ-1ÕW¸jší÷HŒçouš0; 36 F8ªé!³Å·o¤å²™€Ç¾€/iÿÌéW0mÿþI@q”’x‹›Íž+ÁCJŒ ¨pé&Ì+èCøk£“AYèø»”cèÖ)˹ð7dLÑ>p•Ó·8¹‚ìeߣ2;Rºâqt©{÷›…Þ¸ªvtÆù‰†µÈ#úCpÔPîí©âÇ|q5ïþ¡éQ¡ÀžÍ.EØü„nç3{ù `š±#aѪХÌì˜8¶pÒIõßTàdÁ—!IÑçDŒ·5(â¾XÇk0™9£/ kèJÑØq+sÕ¯¹ ˆj ðù;Re aRÉ<#å`ÕÓ#ݘÎTÉ”q4Ë猚¥¨ñæ˜KQ´2åXB´Gf,jÕ˜¹Ìï°}•=Qtzœ©R -¬º  ö&JÉgçÍØ!÷è˜_õPs/£ˆ°Ó¯oï(9‹r®ö3E|†ïwqTJ©‡•…h~e„Ù‡½XúåeEΔOs´IÝ ¯Aº8ļ¼c•¶ÝÓß~‡1* 7Œu«•‡ÑO´HùëÙ÷ÖÓTØìMT “lå¨ä×¾¤V¼>ü1Ÿ´®µÀ#’‡ý÷!ãGeû›tÆþß®‘¢Ï2ËÁa½¢Éa#ò‰È™Wë•iÝÁL‰2ËCkQZ‹²CÚ†2J^­½ëzï­g'„$ 8t,1+âN±w½ùNâ% -EËDLcv†>“©EÍ‚€ôù6#£JîL„èã€À=~Ê>è¹Óx™é:Jö+š×´Èø 'ŒÐOA…Q9¢ä36B“<žÆ:ey¤™F }[¬);°[¯ù¶¥&Fù\f -îD"b!q/½€8€˜ÍgVøp–b0F]&¿u»•²P.D‹èRÇdQë± ?V·Öþ“H_PÇdzÁ~tƒ+ÿ" ÷ó_UÍ¥q aY!‰„!Ý RpgëDGj“«ösÅ`ð§¢ã%FMȹÐCÄ,8 3è¾y"'—Ö,: ØUœÙÏM’˜2P£¨¢ñ)ûõ+[éèDå¿Jì Ç•ïÖ…ôÐ ŒÛ[9Ä*²ß·ÔDãZƒ3˜Ô‘bœÒªÞDüæú:¬úàÓÑ3Î<‚(A^B õ#€ vn×Û׉NûK‹õ“Âh `p™3€׺e}"â%»ÿsbeíšOT^Eâ±6S—èL¹ýÿ€ïÓSjTiyEåø‰Ÿ®]I ~"ê ÚSû¨ù’°=†Dcþð Ê3o“ ‰¿(뺮°*ñ*ô )‹«S©ï( Á LÈç‘sQàoQ›ǬBǼ -\Ê|àÌšql‰O¯r„˜0ŽÇüªAøÛÎô›b@²5¢[;§a׳áVªÒc\E*š<×™/8Œ]íÕ·­KÁÐ÷Þ²ƒTä(10ÐMíD份‡ÄGÁéí‘ [q¯•¾k:ѯC'Õ9 -ÒrAšÏy¤ÂWc¬òtì ÔÎü ™æcåzy,¶¼:ªÚŠÜô,0µöãªOp&*hjóëBî ™`‹’¨1LAõMM!\IJƒàegÐÞÒ\×ó¢pœ!âó0/ÁCÎØ*Í>g2„4SÁeí£ɇ!s™AçŽçA*K’±·ª{n)Œ‚¸ö5„ÉëÆã¥`:AB‡ÍC¬ö'NWÌÎ’i\ª:¦½üÌÆCK_;^ç2÷\%·ˆÇäG×>Îz}Á Ì€Ž`¶0 ªl¨œë~•ÓlŠòJú‡Ö[ 8’ô(¤m½!A§—éËUz­‰w…˧tdçÎzÅ@Tí~>l‹àûs^­!]´¹·Y>¯o,Ü·Fƒ7úD;JÖÉɲvG"IòPZŒm¨*n{È@[özµƒOgã'‡†ë³Èý77˜+‡IÓ×.>äA±ð ºjêO¤êÂîi­1¿}ZΚñ~û<‰þ¾*ú´€* } Rpei§2ðs9õQ‘S†Ø@œtwúÐœÀ&º?ÈWÕ"„h%y6 kaë˜[K2ÿo¯“[Ý“›R¤*’÷¨¼n2²¸©Ÿi¬ÍUÈ:êŒF6im™wP£Ä³þª hô¿“fÌg€^L4tð’ÑN‹\„`y"È<þa?%ùy¾g !s¹Fˆî$—;ÁŠ|âÖ3«øÿ˜»³$Íqìj´#Ð| -#$Ÿk(õúçîì/Ö>`(#^©¸,X„U`nmu6P—¾Cð‘Sƒ+;5oòÉ]qD…Ι×Ì]Œ¤Æ”÷â 9¦)}á¡q¬Á¨÷XéxÜèf Äó3°*®9‹ûYpCЩâÝR=£Î/öŒ…ØŸwVä ¤s|D¢>Ù·çMÚ!¤áÀ^Jó)=ÂßEtÌ?©˜~Þ ¤í#Ï1/+'9@©®òb›5/×"-ëF8îßß{€d‰)®{ž«Ï«o -)§Oðæû¨r6| Ú.Ù¯•9ÒйsN¼ƒtiPDƒ• €&‡ ç<‚Ø£„Ë·J ÿóïE_ÓùL+P‹>›»im×Ѷ®VHŒO-ÿ`ýb2=CÆ;rŸ[ÖÍŸ,¥÷Lÿ“Éôž†RŒ¡žÖW±‰}ËüÂÁ}f{?¥ÊÑ\ëGUU«ÅÕ9‰Ï|Pj²bþ{}e{Ù–çÊÒIX•ßóLûzÿÇʯ2‘˜#„¼àQvJ6w€q-æþµ†Díb¾¯D¯,„VeoëÝ÷,XŽÈá_ÆV«Å@xò<çNðDÍEÌ/Ñu÷ýXÕMä§jËÇ^– ¬víŸm•’ž´q™$m:EönæL ÎÃC,m¢[7UgìýX•­;.ìó¼¼zj_|¨·Û¦ÒIŽD?ŠXI4ÑmuÖ£V¦»×"°s¸g¸æŸ'ËŸ&Ô¾ý¢m Ž†ÔÕ D\¼™k#Kä‹ýUƒ¸©„§ Ì$õ!Hp}'U÷ \íZ&gÏþ#Ð ¥zC5Ô N¢(¬a¨|àQœ¤{"P:kŒ²¼†P;'™¹Êì{ ÷°‚µ‡ès?L>'˜¢;Ö…Q£WµBõZvÞÒçO°ô¼Å^fW– 䵤UÕ½UÐ=ãm±D”b#ºc¾ð>0DÅÎJÓbôöÄ`/¨Œ²"¤'$ -9b5l±OšŠ‘ɸ2j]ûXY±(k20{šêõè;QVœðìC¡9L{bqŠa¿DÙ |cþÕºˆðÀ3:Ö-øé·dŠþÂåH… -Ëšm@›ñÁ9ߊ’Áuÿü„ÛsÒÀ*Ÿô±ˆL>ÂCH/Um„!Tñ¹¯jDO-Ī#Õ¾IÇoáC¸rªeªñ‚‚/É4o5džw›·Db«¼3T¯»Ø½@AqZržÅÓŽÓ6¤Ú¹€8¯ aÅc•à¤Õéå‡&À¦¿ºüC=™9UÅHù”œæEÜ[£Hv¯Ô´ ->[‚È´ë›®xM´àÍìôw½¿›Eg¨gþq°˜¯¹·…»‚®€'PéõŠ‡ô˜Õ!MùʽÕ-0¦–it ñÙ_³®^±\ÍŸÀO,ÆÕd‚~;Wò=˜kjZœ­¬½¾˜oýD§¹%ãtžBˆƒÀtùH«ïi7ìD¶*ó# ß±ìï"T?c‹R4†™ëaF¾Y_ZÄÊ¡=ÅÊù‘OrsGÙ¢/ë–Ý/!W‡‹#ŠÛ¢_¿ÜMnÚ Øë‡þÅ'á㉺´Cãay,nyhƒä+c`Ài1¼Y³°Øl‹e=üBÄ8Ÿâ0ºhÝéë“–ûëîÏ‹¨å-;"Áþs:EUlM\Ôû•ü21{‘çQÓÏžµF-Û¹À¦\_¼¾· MY@t´ò9³ЦDÈ:*viË0ÿˆÜ:bb²š=!NüÚoüü‡?Ø‹|ï#o‘«¥¹ÇÖgF5ˆî¤‡-;By¹x½çÚ4R¸àRøÍw¯äÂyQðr/M9³Y’”K1"Ϧ;†(n° 1mËþdy9MÏçö'MW÷<:¸ßI,¡sô+”•/É¡§BCel¢RpvZ¡/”ÁÕþîºYêW+ÂzvÝÐjö’Ë®ác瓆B7Ý!gz˜¢!Æ7)¾0”øÀÈäÞ:;ííœ_ÿXg—RDÅxüD{ãêð§Ì®5ŽðÏh´˜WgTðí¸Gˆßž»45ü §ï¨3P Zôͧ4Ÿ”>"À`Y© -Îc˜Jjv±ßn?CæcØý©Ÿò5Ù…FyÔa ‚/Í!êKTÁ¼úÓg7¾8ú„œ €Š4'ÅÿhšYý' ‡ Þ9".ÿ¯Øl;}Žˆ›5†ô8äÅ5ƒ&AÁ§®ÔôàN•‡3eÕß`!£üï¯øD?½¶guN2Ëak‡÷r3zòzÌq €¤. -pTùÑYR'ËVôB¾AÔù¿¸ªÓ9òþWI Å}eÐüÓ®”ÙÄ;) ‹géAí¨³±@2äf´&ñ8ï9OÏ„+Ð6¾[¼cD‰\Kž¦V2á23ÅÐVpd0ñ­ì| zc¯Çñ ´ñB¢ÔÐ_z;e*5ú«üÀ -ã‰:Îj«ÍBÇV]§ÍßI|mÓË2\ˆ%û–<3´á:§Â!9ÔX/ÌõÌ©¨‡åR*¨8n›3hÔz‡âV©/õáÁ#Ú ³v 0²¹£¥+¾¯¿ÕÛ]‚ppæT3@“ðóð#O^â2ïæ_nÖô KŸvû„ØÛÆßÿS¨_K”Ù5¾”Áù+ª$nõ‡â¸ã§j`&‰¦CDü" êˆ3DŽi;SqYª_‹û¤öË;1œñ»¢–ïêÿìŒ? ‹7„þ÷Š¡ö;ªßÑöY[¶-÷!mØó% þN÷„üFÔsjÕ¿Î óûs/w—ÏýîÏÎÓŸ”ßÈ©ÿ - <¾@ð©÷ñBx|ÎÔ9“Å’ )oeØa‚=wY¹§P¦}è‹o¡@kíÊÎU|s`Qþ–ŽŽ½¿%ÍUiE7ï7¸ü˜‡êbþêi€YS†ýIŒ=ÃñÒúЩ‚Iˆ…~Þ¼ŽÊE-p•$äåc<Ú¨º’M6óKú*Çý\à-Ïô’DIZtÆó7üí~—ǘÓnb ½M}™¿8‘ÓcÞVzHç ÃŸÈ«þš¦Z #ßB lã’Š„%‚N)4 ðà…ÿØhšœÜdŸj/± -«êJ»„2^1šVï2‹Ñ:;×í2M㑽™‚¿xOO¸õ[œ··)îÅû˜x«øV…ús âüˆõû⹚qc^Ø:|D·Vm5ÚKØ\ o ¥j_»"zM˜0¿#ËÅ<Ũ[îü¡Ð6çþ›!о˜Þgˆé@«ö æá•Ka8n󨸶ø16hNMð…*UKÑÒq –~Þ)˜ÂÏþúK½6¯²G°U@é$œQ`="«pƘ®Y}äaxtùŠšçfÏ_Fõ DVBù-æ‹ÓOªÎ„Ÿ7–%õBhææÙÃYýëÈí²~ÀpwÚ`ó]ßP‡–Üp!°iØÎÝÐêÖ;ªÏL÷¥1‹ÿçÏ|¦HðáÂw¼Õtç•UN4Ý£žIÇ÷ˆä‘‚…¦Õ´ÔLQwpÓ{ÇÂíFê\>µm âçJphøNAXï¥(kq/<íHw ?„¨ ÊY”Úw\üõnŸ$å‡Ò¹EìQ!†/µ˜ jÏ3¾Î†"–mÑC¸×z³µéàz-p/&Ù i: (É‘ÎTs!„±àÝØÖÎUo‡"à´½ j¬“)c0ý~¨ÿuN¼¹Èã(æ:µQ1Ä\ñøIÖ¥¢²«G¡SìˆôÚ=Ø·Õ•ú[¦‘ÝîŠåw³³ì†_õ®m†g<[ía`­!íñàjº ­8´Á]A‰³È„D•³0åš™EVó^¬`, zò{(Q«ïû?µµm±@UùHCöêúÃ|^y²ýMñºÞV;öÇ؇çI²mï4ú-Ø«?xI4KÞº_¸«^1¨}Œœ¢—NñZÿ¦$i—²ˆÎ1\F7^ïÅé%¶ÝdSõH;íHûëå:GcSIo ~à¦AwÚ-éÛÖ7èä½t»tô–Ì*ö?.zg'yõ;+I61¼ÒsU€ÆH¯%¶õ)ÑÀv{ûp“ zâ¬y~ Ë-÷Œ>ˆ×A›6-˜yúœ#}ØúËY¢tÑo G×pÅ }óÙGûȘÏ on¹4ù¹ Ç}1H³êŠÉ Ã8õƳ‹´÷w PYeW¶eÌÆ€€Ÿ[†:à<‹ -–4’ŒÇ&*&DŸÊÛ¥0Rü¡!d{!¥;ù¸Ô³ß/ÿãiÙR .þG4nˆÃ´u²ÎX2~«í¯vÛ·È]鑳óÉ2B -¸ì`—œÙ‡=NB¨¹Jƒ=ì¸à ÑîÉ|R$k÷»‚ö·LK.Ôø‘œÞ…8¹+ãN=àYVÿ–N΀göFZ<²ÁÀºl¨åÓþû¿ b¢¥« ²S …gÔËü¦˜sl¹îµkªÝN÷-êfî‘ôÉöY; C›øÊß©ÁƒŠ ýmõ£¶j<,V†üÈ…ö´”óK1…\ˆÎ _,¨ý9d‹‰Pôu%–½F °YW¹¾Z7çÏ}mâcÞê•Ìë¾ò;W£13ê¡5-re¢ù=< -ÔÂ$–i£GI]éh_Ÿ Eø†FÙß­×AB!Ð'ÍïÐ<÷¯4“±^^”¥ã_8Ÿ©ß?¢Š˜3§¯í¹1}ä”v†‘nÐ,.ìmïé¦?M4h.°AÑ׶ £}€ÊwÔ‚·¼¾NÕy;‹õâz<ßÍ”\)æP*¨®ÛÀÅc–‘ZqL1‰ûR=Â0X¤Aƒò!> UÙ¦'\¿Biµì:³ÇuŽ^Óœ¹i@å7{ ±«ù'LÙ»ÝüÕ½ÆÄuÙ#^qzå¿ÂG¢ÒŒÑD—)Üãå] sä4KtÝÏ`üëWÀÃÊb0ê~Ž*a -Ïqô ÚaDeófÆA"ꤘÐêR;±µ3vuž*6U…§¯pHÒsƈ4"ªy¤BñDJæ !ÄJäL„xåWâC×›‘íVLñê®!ôê•SÏJ9ÕÙî^¤ÑëRálj‡_y%:‡{DófÜ9÷Ò´Èü¹ŽJ9¯ØC\ÑYüð+¶Ÿå‚ØNªMA¼ÜàÖ qä6O%gúhgQUò‚Ñ'všÀ©ÌàhaƒÓq31ä*Sðq=«Ú¤Üň%± Û23&ŽŒ©Z ø̹²¬àmëÔe· -2»oMɪ4#ßÌîÂ'Ã4ãn–@ÑÎÊç¦ -ª4”žÂ#%„]sKœä†iÞoÖõUWþPIJ.®$†Pܼhý©p6¢ÁðŒŠo«îx¢$bPMõrwØ2ªF`ƒ4HçûÌÚc¡Š©wµ¬„(tÝéô¨‚e&H”½d»×•:³û½kPûeîKc麳2f–å‚JZµ-ì@?úü4aõœÔV@tqé<ü;o¯qAšDãþD…ž;ï¶GE°¢ÌAUd;„ß!¨Ák5d9¡\ƒá̵ZÆÉà 3Zž;…\›ûHí\âlI¼©j‹3*¾â1+iéážA|éÛ±¨J|£ù5çoœ4 Ùâ[½ƒtíããº&Œ@jKÙ2<;(/¥4c…@y9g`µX¾‘ˆÎ`Г:°…ë%‡äRLÍwž½”CF}p+ðÚ’{´¤ ` -)ÎÉ–%óUÎ(ßÊ2KWyIdöõ`ñÛDÜÙ „¡I”úšè¾ ÍMêJ=‰µpÆ}Vñ| -ÔÑÖ&ºzË» ëwP|bðõ¯:ÊžQ‹+öÀ"1 dD½<¸ÉwÇí~Tùx q7ìå ¸Õ¶Ð*3¤C‹2è1­Y/PBíCDóÞ&š–;¯$¤ˆ*ÑÓ?VÓ—•HÛ<‡žoœ½¯þƒYì¬è1é&;HÎ ²‚È_f¬9öÊ{I÷Ð0’‹ ýYþáË¿(€Hdw¸ªIg¼…N8@€K'Ìd_Z¦ÒÝ0¬†Vôüg±ÌêÛj2DJuh} –°WTHß&â -®Xüßÿu úÆn©× v»÷ÇEêÚ–í³ºãkäìi-p‡Žžë¼Äm^¶¨Ý…bœêlä}¶BO×ìJöI‡†àzþN¾ÉÝ&\Z^Û¹D#æf„ʧ]'8çlÀüÒè1Ñ%È•xx>1`ëAìXn±ÏÛƒÂÜ?ÜÌC;6jÒYØŠé’*’ÒW‰2^¨ºÙd¬ü=´²mÖåÝíìf綶è›ìMw›ÎÀ_óþ©Ã -Mò;3Ü?ñY8ÙÌ/yg+è^ÔÝ ±¦"_,Ñš[g£Dqbnu)ÙQÚ—s Îßécšng”as)-Qõ\#7ãòÅm¤“ È%® õQAã¹:B«S…=51ðǸB‘B¾%@nH4"'udÄÞb&FRxKØÉKˆr~ôtÁE= vcœ~ƒí›ZÑ”‚D:²ó~/‡¸‡’ۭǪ¹¼Îø?’ÒŒ¨ë±.…a©ƒxìdBŒ-FÏs=]Q¡÷ÔöØÉ׌¸ -4iRÏ £YÈ^EG*«U‡€ö€5\õ™À¡èõóÌøøÆñ¢>i^PxW!ÖqaZ(Þ{°Û2£äï-/X‰59£2uº=‡ß…k¾þË·÷2¨óy;sOgQÚjÅÚ`cÚŽ3ðÑãx„eÂçó˜§á²Ÿ¾‡£ÅĬ.`uðÊð“µgu†y(Øàêå dOªéÇ!²’ñ4ñ ï„PöëQÛa?"i{µ7¤…Žâ¥>ÖŒJ5|ƒîOã}N»~çœ]¡«! -Zf×¾[êpx¡N± áïᱸ’Ç»+[“÷‰%Ñúw-¡wá¬EÀÔìRºŽµô7J¦†Àî¼V¬(†„š®øBSA¥x<Æ -‰iR!±ùçClœAKm¤†Ì}WaynÊarù3e«xO²¬˜ˆÿd¡œ ðÛ)¬¥½GÙtêÌ2 vD#ã.°@v«=s®bÁÀO" »;ÞåF_õØ#óíRghÚ -㨎û°Øæ·ù0D°ª{(z¨ì ÅÌ4P -9ÒönaÙbgÔdïàW1 ÍC!'Î-r~—ýˆæ^ÊßCgÓDqn‘΃Ï\C˜Ïïñ¨ýwù_J7À³s+&ÙÖ£“ó¾V¿Djº$9Á…CÍ*w+`¦M³Ò8Ù?!~'Í[ý_eÐ%µ€¯¡üÑs@&læí‚–+IÂœ]ÿ+ÅÞÐ4æ&Ek¸*Ü» Øt›C†  ¨Ré+­¬Vo¬!uŒ•ðƒû)‰Ò[© …b1¤L_ŠA­á;ߕȈµmÈhe„ „PXÕ\àé¡!\iжµ÷ôRS±3hU@´ÚRÚìs'.n¥ÆF¼þm<©KóÉlêôpHcÄŸÕ‹è‘wDû½»ÒU‹iA Íá6mô6·;p–i·T*Ï<P?Ε«ÓÅó:Å×)2òuŠÆ´xj3€W¼¸P. ‘×>‘ ™ ÔýšJYÆvT…“EH´–ÃzÓz,%õ©*ªGx;&I0ÊV°«N¤ß ‡¿úƒn¶PÆ-ã¹SÝK®òÅC’B?Û6#å›QY²–=º.AíÃz¹pö( «^3'‡º^ØV—l)¡¥¶W½F†µµÄmGÄ{SpÇÒ»V5æ‰M»¤¥çwØåÝÖrN–ÕøÃ`¤Þ-Cf¸Á S‡l!r6þÍd†#`•1r]­€ø”l®ˆ `¾g '²“×Âþ°Cî{¼À¬æ:U†¿HèRRY5`ó „h«K­ÞÈس®KU‡0²¤kK{F×ZðQ…ä•ÕjÈ™hœ—«–÷Æ É‚ëBínnwÚ·WŒ–‰ “ -LÇÔ•¾V¶Œ# }B¥ª^Ý:Låö'W•S\dFirTr‰Aú½+–g1fÐ\Ý:&ò §€äÊ/ìI\C&¨è}a§ÎøÆä†eš*ïgš3ô&7­^5‘ºÔL{¡(µ®I‘VL4•€Dór„olFߢâÅXDf¾¶úš¢U汪¤©‡Î d{b$dR ÊÑ=¸Ï·ˆìš÷ƒ—õ9U‹nΔ»àÖº5G¬)0\åœÂù.d¥Ä[KêŠu&ù¯!¿¹ŒÚ[®VW†I›çÁÇÿ›ƒÎpº¡gÕ/åè hýûð¯¹yH>î: fBëh'êt}|c‰×>´o1únZë"~œê°ÅÏ'Bä­þ‚;У½‚e ñ»£Ûý)ñ¿#M%Xþ*~˜çå§ÈG!¢Ç¬âá¹]…¶“Ú}¶­íÁy$>çßZÚ6‚"F¥R>¼ý© è*»á@Ùª´) ÅüúYÙbRæ†~£¥ oÛ¢uß+Ž5¹¾Ø7ÊóôTev xT2 âÔ¯UÈœó5@7²'ºßC›|"¨GÄÄî|G”fÇ -Ü-m-ǵ![H8<ªü€›òïw©Päw^Â1y8W-*tüÐîU"/"“ªÇûúf‚F\àJûù)nßôÞ^¤ëõö‹Öë+¥ô=°*f€WÞè üâîÄ”ä€]®š/„cæãÍmòõÏF#ådtWŸ‡»0ÍBOœU ¢PMÖ³ícUì¡¥é0²®Ïïhck@ !ðxör½y7´Î£á¯ÀÕaЗ^4E)~KQ+Tÿà,&ΧÔT]ç’ -Ì«àéJí”­f¬ÎH,CZRL’¾ ã®ÄºÏÛTÏ0¨ ›³÷Uƒ„Äð[É2Ÿâx=+qì,üFlØlŒß‰ÉKŃjùp˾T<ôËaºùÙDÅ#=ë+{nÌeJ‘?nÉMÁ¿ -0ç/¿³|üQÐû˜®`D<ÎÜ) no²Þ\L5zfÀœ~”$V<¨s ½à˾qeK1³j'q¥JÓ`•[¿ˆ=3dĽ+j(kˆl—§(äÿËc5üef ½e‘=úq#á©„ÞîD™½ÂÓ' J‹rðA´¯Ð5ÖCÝXÕØSèÿF\Òù½ñÖØŠA™f¶4T P.uîËÑ3æŠpAÂàØ?Y–ìŸÌ÷Ïfï©9_" ÀóùÔü+cZzm}Ž½¹2©ÖC&Áö&ÁUí”óLT/Ÿ”²<4AYE2eùÙ$9?ÃTaÄËé¨1d¹ù|êVåg"7´%åÉÔÞ*'WÚžQb¾ó0z¡5¤Ùg6Çöq®˜£´L7ï£Q%º4pø\ “B+ -÷Š]æì—˜ ¶ó%Us¯ÔU=?B†gUÌáí’‡@ãYAÇóœ´p ]£W᥊(uª¨hdˆF -Ý‹Š ݹ±!Y¿AÍ©û@r¾+%'^ÝBZxa!1Éúúà‡^Ú–îý?:½à)+è}´¶`Ì©_,Z-,ÕhFŒHn=JÌV•Aů·éãëé÷¿ã~™üU=ÅWHû= -AÁ5niŒ)RÝI3Õ^±•ùòÖ¹,Mîø{ -íp@áðñf«4¦c(´Z\BF cP¬z]P{%™[´`ô˜ - Û¢i³lƒ;§É™ƒTƈá¸"­Ï¬™-Ì“_áYt²ÜšOöà(0¹-œ‡/_Þ5øKÖÞ_!Xäâöˆo/àã”7ˆ”ð‡ðVè_lEdá#ÒGæp¸ˆ -bjfgÔa‹;iµÅZ+¤Çö#¾ÀÉ~Æ¢4ªÏâ‘>aitÞaioÏêÿê+FE™xü eùº ¦woGPI…FÕŽõx-Î#ÓRnÇ:.›FU³þ<5ãêN”£jÈïãþÞçQÓ„ºö¢3>tªçÇd0v½2à£~Çý¼™ëœOÙ»A¥ÃhTàëȲZ`V v š2Ž+)Z5Òž˜%ò!ïxÎýH -œ¦/•DÈ&Àr×Ò¡°…l{b5½(П -ØóŽŠÐõhÈŒÕƤ]sÓÇXÄê@†ÊO¸ -Mï =ø<ùæz–øêÇõ3Ê"rÐ\0ÅlŽÉ·ì×êGƒ~<ä -@†dŒ»æ[­Ê ¼!:Z¬ë®ÈY`ó褭²_ãNr‘#Œ¬ »•laî‚+棂ÎÜuÚnXdJ„çµ0U{$ã¤Hó3Èü!²/à}õÚD(Û¦¦qg„Òh×k¸ÒδÈe1ò¶š´éùÝgñÒˆ$ض¥OÚó;šÀ7/­ó…i†D©÷ €Ð—L¡+–dÞ™MfŽýýxxûk•q¸;k#oèwó»iP¥7w­Ï¤"vÙmÁ|ñvý?4ÕcõÏM*DáÉü«¸dpG(ýílu[ × r [R'¶ÊÕª—1È&Nu3úú~Ǿj89KÔ!ZmP¾3dÐTç{[ƒ˜3µ8A䎿1äø‘^q&g€8Âõõ¸•½Ô¼Yñø­¶‘D®>Óœ BvîóHþø YÌ ðÇè¸ ?§›­iΊò©/Õ^´ìuê€f´=ÙòÑ×¥tSÆžkOe˜øÖ ®õpܵcįÚñL$ò˜Ñ÷¨šÝ–)i{%.4û 6ŽZº*réYî§kåHK‘Q_êŽ=Ý\J –}õëcÚ6Îàñ¹W¼9R|ÙæÄh÷ª·Qjkku7@û(†\ê<6Geª@y*ñ4 -Ñbåvóƒ,›Ï¥B¹9UPÞÖuÆì÷¤ÉYŽòh[~1Ž°¶ÚiÚ€f"Êü쳕ÎÙž·²±ÙVx„ªèwž1¥pø{8Âã| -„hèÀ§¸X¦7Š]äߤí–IhÛW)-Ê+8»;1Þ{Îß,¡ûÌ@– Î`ÎÕPžÇM[ßy2qOš [îmK§E\rMŠ1Ì-ŒTq¢Rã#gá¬úñ…½–˽çب»Òƪ¯énUó+pøË?701U‹þ"=/‹cžœ•wCŠMe\UÊfW+ðp4C¿7BNÕ3¥…1\O6ΞgØ…PÀÿŠ›îÿ €ÑÜåZH—È,D£ki3ð\%³„nY-b‡ºÜ/Ñ£›ÙåoÚ•±ï¬Ù¹’>ÖÿñƒüMýë¿X Od÷ dµm¬XåUΘœùL*kDÛÅWr…ª´à1áçý*¬A\°ÖˆÎÀ|kˆSŽØÉ9Gúnê’k.ÅË «3 NÌ’7Œ@äÅ\Š$Uµd'ïü¸6€¡øy$ŸÈXtZ(öDH -l,0Å$IJ:- v@ãæˆ!óº­'/3Ôw±xÐܲ,4 &yäJ_¾¿U‘ØkáÏûž«÷ŽÖÝI9†6ðeǜ󄳊²Ý§–Úי߹o)ÓÍÕ㲂ž:7hgè³ "m ®ÝWðÉÑàŒDÈs}t(ð8Ét8Þ”z“˜ûÎéž!xðe˱RêE-ùÂýWc+ªÀ ÇÆý4‰ù²E iøB’§©[€ß‡œtì(* ²úó‡!üžT¢ïû‚ ÛϨ_Üw~…6ç¸Ã÷í+—» ÏXç5‚83Áëhôq•]HËi0¯3¤,ÈŸðK/‚(ïL¶9ÿFw¯6Suþ31o$Õµ‘?æÓ ù }Ti9)$bå>—R©Å6Ø#V„d®P„@z3-þON­f.o¯9«‡†ß&c³wÝ)¼\O唑Õ9D×g:anÙžUiD1v4¼+å™y)Ôx¯ú Eˆ˜¿~5aã8ö l‚žÐ4Þ–¹áÊÌðFƒtkÔ(õbn8Æ„ÂÊ ”a¢p‡ÈìÏKœ-}ú¤#°´3ü'¨C­(Ö^ñŒUdŒ±¬jÃÜDf[ªK…iž ;’§aR“¦#›9ø )îÛk%ι_¾òÑ”';¹àªˆÅ½þLœžç_°÷3M9áú/º1 žì)Åx¦U>×6Ö•ò«iÝ0 Ç»?²íÀÄ¿a4†h>¥?“É!ýˆsæZx›D©2FDDåS‡ŸîV †%s"t‘DYê).„2ÿ²ÇnL(!û¶¾ Ó/¦D™5 ³åºçB5áÏúþO1ác¸Gb¾ÔB_`ιçÀ=ÖŒÐÐ&°0#¼æ X¿üñ˜•ŠŠÞ²EÐ…Ü÷QóÌ‹n¶æ©ì ¹o~Iú†åE€%}ìôîE]}øÆàªÅlRóÆ<¨!7··'n{÷z}š­w¸wãÕ©;P¯;•½ß¡&[UͶX j£õØßÿéÙ ·‚¸«'íå¬U„¿Š¡ëWuŸ æåŸhX¶èH¶f`9Ö«éðŸGž©gOp®V”Aâ/•’H7=YOG–ÆÀà¬Mß! -óîäj™|Ã÷ßcé<Þš2ÈiÑåÍdk ߦï/î¸CµiËm\¡u]ƒ<Ç[Ø O£Ï|ôœk ca²ß’c²bÛûÇ7‚‰ÿºÍ7fî_qäŒR( æc{›ø¼P‘Ž»És¤!=_Þ[xQ#篸ÖÑÿÞ¹ŠJéL¡ Z’ë0p®2ÉšûÑŠžê;GWndûå3óýgïm1úÒ˲Á]m}h•>Ë{=8ÒÈ!í!r)ÎcJ‚úÖÛOG§˜1ÜÙ-sµï}1%°=q™¾ Ñøêmçõ9`y‚ÒxÖ}H˜*Q‰`Í2öÆœÿÚJÏó¸oû¹  –&ÕÜkÒ¤y³©WˆÕF’àÆ—›ƒk†Â-®„³H¦‘Û[ºÚ{ô•z€œAzïA…Y¦YH¾-Ö< Cª‚×fkýwv³'ôÍñ¬FB*µöΪ僃dÁ‘Žq¾¿üAËm±j…û¾ÈA`œ9ÙŸ -°Žì“T¬Â ­Ó»£OÞX³sDä‹ÕÖ¸«RËÈh‹säÃ4íÇkÅr]kŽi{cùÿYRï¸Ù­D&½EϧìiŠY$yªFŸjžh-Zkõà€üÎ-¶²¢ä¤YñU9æ 6«Œ_W’­G³æ,óÅcÇ6t®€aò&?Äy‚·Ÿ ãqjfÂ"G[$¿ÊäQV]¤^ßWiVEø«ìÿŽX*ǽêå¬÷xkÎËl=Ž“` ÑË]¼´ðÛKo)ÏÓîÐZ:ezfh¡A’\Ž÷—Üô)YeÝ÷½©éq(.äm1P.Ÿ)hÁTžDT°S-d¾ç åñ+|û…¾a5+­mȼÏ-Ž ûú¤èù<1yýŔޛ%ßž÷G€ĹêJ|Îiäk?o†®Fê\Ü”V:9§g•ˆà\n.°Þæ ‰…|Ä[ô£:¢³-Ö×Ê– Ìçáð²^Œ56wbÇÁ÷¿;öwM]çÇÊŠBâúh©”\[Üäú{ôY%”sÉÍÕTJ®`àìRy¨kP* Ä²-lÆßJUç·þbæ~ýJæÌìcKàãÞ*€´ ‰õ¨|ø>ÄBSýÞŠû£¬r§¿÷ðk]0^RHXß'+'½ýÍhiÅÒAêU½¿i#ž&@®¥h×ÂA¼ž2#öP’ô[•b{‡âx åë ìæ1½ ¦öªRÞVi#ܶH²ë£D1± QðÓP‘œ“Éo°³c’Ì<ªÒ¼MGÚ˜'Ý&$÷sÝ…O¡Ò”Âìœ.svDÏê&·%fÑ´fƒùP>ÔÚ¡‹ Ö*)ôÞUyØcÓÌdå\f‘%%„¡ÔÑRV€wŠOS?¾–‹ŽGÁì[‹¤Œîvm3®W|k¥=ýpKxO]-%erꃠs@TG#nÊý÷’ëW0i¿|e >¾tμÞ)X‹j¨û0ƒ K˜ã}qðæ©¡:Fu뛟WA;ÃTÁFU÷Ù䱅š?¸9ƒÖ} |Þº|øºü}7µ»Jhï("¯r;Ä‚cÛWi é SQ}úb&‰75 -ûb£¬ ¥‘íikÌ¿PdkBieÁU&ë{ŠY‚Rö .Ž‘o‹wÒ \H“±{y›BÂÁ9ý%ˆð‡ô: GÇ·fmú°`Bg È%}„ŸKë­3ø’Íýá/Êó¿Ò¢¾õ—wÓûß÷‡Úôž­E£­ìÕk© à>LÝF¶"}Cµ RtëäšQ‘}ÿý -’“¹ˆ…s…îï1$ʘÏIJ °jœáp¶ûüy^“¼~ŽâôÕÍþ}+¢ºÓMyP:(k:åÌð¢ša׋;' ÏË $¡’¬K!t”!œº÷R£œ; ¶òMЋíß9²U„‘ ÞÖËÈê"³sž9³Ï -Ñö°wktÕœŠÑ¡ÿÖ^^0m£nùU}†!Wb]27±h‚¨Ý=zÇ’ •þmYœ~êäÿÁçé7NÎœS'K瀺ð´c Ù‡Ó¹ÖtoþÊ ã(møPƒH¿>O¬•` !z×a¢1Ä0îžXOøPs½ëõÆ«h¿Ê½ó‰¬â ReTƒºV!Lž§0j‹xê™ß‘Щ€>£ŒãÓ"–}vÍ!WbKrsJm5ââå9"ÈJak#ÄlüÇBPƧÒyK2£¼ã®ÇkÞ|)Ä´(&œ<¼;L›^y,¿î¸’쀜i眚‰#Æn‡à‡D-¿WÚ_:0>ᇻ9bJwô¿ömóG¢ b%K¿ˆÑö½—»üxÒ /qÃè–‰i•êéèËUíŠ7Œ_»?ÎØúñlÕDëmÙÍQÜÝâ­é[YíGIïÄìÛDˤÔú]jP9•ú<ë–狉*&Ü€D‚ŒÛâu½>·$†SbŒ†Éü 3ÙƒÀ-â;bºR!½©\‹>u¼ºŸ5ÿòL;7õ¤ .5_.ãfu‚LuGç božZÛÈ»Á9Mßš;÷sƲƒÓ 6ÙÝ ë  ±|âÒĨË1ç¤ -z~¯‹Þxì©©%'×?1º¼¢uÉ^ï`@þ;túN²Hί©4è¥Ùpt.0‘«ëR*6ß~dÈ>*0Æv6CÛ(·ó´Å‹µ'© -}iÕÕ ³Gïãÿe7¡Ê£Ñf©ÔÃ8Iøó—m -èº2Á ¹Ê<%³b¾Ô3>qfç<*+0dã K„¤ð1g|V ™o-ecFNË–d½6¥‘âô]H<~~*ÏT«¡ 3Z2'úž]t/vqñU<Æltí÷ˆÈîµÄ»ÓKí´*Óâ¦ÀCÊðº–'3oò+d‰ž!veÑq« ÂOßú®|Ã¥:x&¼½’Ó5¨eš4tïà[uæ!ª±/ÀæÕÑêÒ~ã•-‚2wŽ1×3®z{$‚ýÜòú€”š€‰èÏõäP”¢ë—ž›ñE5ƒ×7¢ŠZÔÝîKÚ'*A‘-¢³ÈóKûK#š"^¦Ê&î½~m]Ÿ8×'\Þ" -YH#d׎ƞÚþ{¬-~=2žN©ê‡t•ä8cŠŸºÌJ•±YdFý9åt(Ø·¬ u®< ñ׋æ—KqÞnqv¼´”5ªÈd\#—ŠÏðNmMESëXzÈõšÝ.5_kS‹Àú2dnzàÔœ1[ÔéSžÁº˜“>Û`ð r ÐqšFD60PHÖ¿\Š°PõÜüÓ3›‹LM7 .#ÄhôÂs9yÇBݱHñÜ—XCƒ;A=QΙaJ)3åû«`ýâ6O 7š˜þÇš™Çihü:ÐKõöàI½ZŽ±âöΈo7¶‰¹’ÖËM­À†<ÿI¼‹õ:oµwÛ|"9ÞåÜìé B+ô]ëB 36šï­”¶ºxï1ÍT]ÿ­~¾Hh-sÎ@{Ϲ²Ç¡ÐíÎ }åƒnZ ›³_éIË1NÑæ˜ç,R§tæ驹GŽA³5L¾ÞbB‡ ŽT\soyLsåòT!S÷¸/_•´ÇŽ<"E3aíÜ”ð Âú<óL&ÑCñbýäÚf|AH}c=½¦ß·vІx²[¬bBF"Y1"r„ð .ÕåÓÊÇ’/}üÉâû³ xÛ>ée×k쥹£ÙÀ- ë¦÷þ×Õa¨‹†¦Ú«Ð\q’^‡á³E‹un±#k„¼­r: U§w#œlž0P£ªäŠÉÖýžOYÞí¹*éK’Üç~È¢óhìé ¹Û9Á]Âw'ˆšFlÀÇ5(^ñôÎÛ›G£4JÙoëëÕˆ²›®Ô¶×ÛCu@ßã`”·×ÓS»{™%¤»,ÈÑØb†œèŽs„a~)4Šg>ÿ iÕs!Ìç{åcäÌDÚ¯óôè*XQM\%IàP‘ý›Ïœ„k[qÄE-‘:üãTVéÒÄOqü-Áîœ Ó4â”Ô¨Žˆ?ÄS+¦àä¸×ýâiXx(ëò:»L«‘§æhÔ:w"Íû„æk ŽöúÔ|_ÅÉÿßð‹a÷Î žÑPÑô­èÒ6`ÿ;‚±Ñ)!êgfÙ~†ìs¥p½Þ[|t`*}„Ä¢Ä0´˜³Ñ£l J~=±î•KrÛãß•3ìÊž¤DõûPÇD¿s@k©~«#Ü{®>»Ôk»¡m9å“=÷§¾ÂC+¾“Ùî@‰H¿’4HõŒÔøÓzÒŸËžA’¸€èIꇓ‘ÇÛ¸“ÞÜ"ü£¿”0L\‡»N&C42b1Ž!A¡*ÄÜ;—¢¨¬†Ú¯{Ís&Rü˜÷ !¦sžÙ¯wžÛâý«å£ÜêU -õîšçö”-þgí}#bz¨jûÊôÇE>õÖµAò´PC3ç ò{Þß±©lI㿱ͮoîú´Ü¥-“R<7ÀÕôwn¸t…‡#¦L ú>)&^í—ZUÆ|ÒX¨¬¡Ï·…µÔµ¬EõøROmÿ{„pÈáeÏAq ›_€R ´Lx• êøQçÌ„3ÕòÑ[ÍQù‚®¦!ð}›¼,+œ £l8h(¢8ôñ´ðÍÖ®FI½âsBX‘4K,Ï$늊»aKPR®€´¨ ͯu< -ÐÌþuQ틧€#¾Øˆä -úÔx30 -\BÖj阗½btˆ 9qRçüdçG´ï¥Õvµ*}Á@âÌ>¹‘ê¹þ–žÐã‘9~2jL³ž·¾S‹±å?§N -ú=ÚŸ>W¹G`èïñ·7DBÍeôË;—B ƒ82¯ ýî’cÇáßÙŒŽJ9Ô;Þ4ȽAÊ·±G 2_J`× E°7v^×]Cpï²Ã‚»Ò[á.·wP„ÚÉ~2OqÐqÅß5) íÎyšC‰~|c‹]™ (|LôÙmœ.èNPP0Z* ŠÑµ©ÏÝÁÁÌ<åù³®ÌgÙ°ßÆ8 À±M‘ìZVQBJyç¯ -Æ$r°½3ˆ’ËM–…¥JàtR5#³.Þ‰zäéNÖ¦¹A¤Â™+å,0oýÉ=%N£0Å»öCgÔÍSŽ%`[EmÀ~4òíÊÜÕ’§öK]æ¹ÖÌ…Ì+©œZÖYþíxboÊ¡H†ÆP;ˆ 9ÊñØìÏMÇ0h#rán Ð!ˆûêPèp+D}`.ê 7m½”›j`: š96¡AãGê­<Älšsvj[`.užÑ˜¾œµp4éÞˆ6oˆ¥<•z„JÇÖ âì>|ßißsÜθy“:“Ïæ äw,5x©“Qð| -£#§}¢ùÐÓ6´Œ‰ j† Åñ'‡´/óß󀔢iò\Ég ±5Ä B-Š³„{EŒ©p’Žó¹8¬"ëPjS¾ê[ÑëØ”s¸”{*öÐZÊ›òóýä[=å*w\‘´5DŸ¿C.>Õá٪±hkÄNÚ½¥ãÿfm9Ò`[”Úr;¥0"¾Qéá…‚!zìšÚ‘p`e³b Ïõúæ¾®[W§UTÄrét7è—ÏA¨Ï툯RmÓ©¡L¤ŸÔ¨º>½Ð·)㌪ÂÏk 칞EW)5¥Êî>Þ¢ì¿É2úËï­ä_ ƒ×UeÙ;ïƒSé¾鵫ä{ʲöƒ4Át~ÏÚ  ±?Ï»BqYosØ¢kÀKòU4K3$ºRØ^#vŒ¨Ðþ¬R)À1ó — -l s#Ônê«œ,qΰd‰]¢)7RÄóÕ5Ü3™¹Ýwpê­jpŸ‹¥B—ø=»\áØÉ2"'&ÕÚOH|ʺÊÑD½¤X¨*‰1)¡C­!>ÏÏj© D—«û:$X;Ù±ö½ -³ #æi—ÓH±’³Ï‰.ÃòU¡åº¦!Jú¼aÛO.åÈâyÄ dË~PÍ XnÔ;Z7=õl”@±‘ã-¥'‚|q°¤€ÝžÚTؘŒ´b°‰m‰ó}Y#õ ½J…”d}½Mò¨=E£c>–rÕÇ7&_- -8/=DÕﲎïacYì$Ôò—yLwMw"W%ÂöžV¡.6Ãß½ä#3õ‡¿üò‡V¡Ë—-ç¿2H±j®p܈kµ®ÁÚ1÷tì³éÓr.>ª~pââ®+&K†Õ³j"nŸ>ú3Þ U1º8Þ•Wr‰v¼z=-Î=U|•Ä.ž+£ck€³·‘̇¶·41dLØ.ÁC[éºÛ›KÐîÍKQq1—3ä¾ðATïµ@øDÕO‘JõÆ¢× B|æ&U¿Bœ ¯²¶óÆNƒŠÔžªŒÜ‚p˜vÖ4/=BMÚØ„‚=€ß¨æ×Õ3ÙUtüd:$ÎÙúŒŸJª*)Œrç gAõ2þuI—Uµ•Ò$oLÚæfE7ç~ŽsÆïäâ¢ËsSá÷õÑ°f(§0ÈSÊÏ2ת-ÞlèGÆ\ŽîXS ÝhÆu{ aèêØݶ³$õ)$ëÃê¡nÓ»T¥rp¹{¡€•×GˆŒ.h«g:Pkž8UMøN¾I”ʶs!澯6rU¸mq#ÚHiPåȯ\¡÷±_ü‰'ÀªJÏFÅ6¯õrº‡Ë½ Yn5Ò\Ší -u+È“ n<|¸´^iÈ¡O®Õ&¡èÌÉ,jþZ¤.€q›n§\ßùy*=Ô €«C†òHv€èÒ›Ãf—FRç£|þç$N}íºýÂ}úþ9\{7™È¤h+Js2ν]¾­þÓäH‡ª×¾,»n©·ŠÀ¾·½˜‹'—»]Á2Ë#^9ÙÞQÂnyƒÍÁ•S€.aâ>Wäz 9ÈÔE‡‰JIbñ5ä Þ5•ƒ(¨Ì¸LeßwjÍLjÏêuÕ½,—hm ÙBõ¿ãJ:‡œ©Ú!†ï-¼½&[ÙÔ¸öDMsÞm -Kù-LÚùžJpä²·¼f¯ƒxDÔH—æù9D†î½¿½X -xÒRƒâ€â#ÓñÕk¹/®ð™*ìCÁ Ôþ ‡4Õ¡× TSèªZDqÂÑÕî¹Ïó(Û¥œf!AèÞbÁ¹ yÚ˜>ÛGäß¡ü6êWÒ'¦öx&“ˆ˜ E¯ËÖžª›GÇß±Jó;êÇ{‹íúàÝç,®;†Ôñ}‘-r÷\9pT·ø3-Ca-lãjK5©%¡eÒª@•žãŽ¥Pó  ÏŒ Å²x± ¸4|ž´e"üJúšÂÇÕŽÊ.ÙjÆšÑ5ö°PbjÓŒ2UˆQËs;GÜʘ_!L$¹Ã$±‘ìÔ`³šûóýØÖ?T[ ÿ(]Õá™Þ'šû¤Æ¨)Åñ 2¿´ZÚˆæ×ï)àóKaî®XÏJ•Dºv¬Òï½Å«œ^O,‹¾£ô¹Á=U‘•ÈéƒWm,è„#*ë«®»ã‡^±Œ7ƒß¤ÀWU9‚,ô’S/'^;œ©š„åÄ#¤«&¹B1ÎÇDú*1Ã_ÚÔw*¾Œ-D4—[e–W”54 ~vôÔTÂÑBœwð#q‘ÖæwÔ: !E›ÔZO%„ëNäJŸeÏÃI#z¥¹d1£Á2ö·ÃÂ% ©½ º£&t„\ñ¹÷=ƒ’=Ô5y# š ƒÏd^kȯ£ŠÌRK*ˆLÊîÕgP/R)Ù"ujРL­ùÜ+Fj)ë`?+EJö³~%bÓÊÔ±9*k”¼Ÿ\p·(®çRa³×^—zb@ŒQqVˆ9(\ضŽ*tBŠÇpX霚Vïá±o.Luµ]‹Ô²$>—’^UZ„^<ìÐÒr7áÝ6Q¿ÃïŠ^',I†(P:M÷ˆ2BÜ{é{¬b²Øây´Uiqœ±Û™»Õú†U·Q+KIz¤(#XÔ{åå¢Óvò¶¸ª¤Jæy¨±m«ÓDƒâ †Ò½Êø²šS(y¹áI»¦*¼¨o<7²m=Cd@…·û]S}“‹bÓñ³Wjuf‘ÔÚL¡ÿ¸¢Y‘/µgŠŒµ6È ^/„ôÇ7¶‰ÕOfɨãMu6ºó¤(•{e¨RXѨ¾ª2gòöçîñ'Xáç&Â7Q„TÙç>9§Ð}o«ñVÖ ÿðCRÑRhcä6RvnÔulßwÔÔ^$VßPîùXH®ƒœ·£Â¼-RIs•Îiµ-¸r[¼ 9C+ŽÛäI9`ýN< õ³pæݱ{"$ÖSÿºAu¸Ä Z¿0a^'~ˆÊ§Ixž‹‹‹sæ¤ðà¶É1ÖtJ«4ÖÊo<'ÝLEp!| §¿q2`;¸"[ê¬/¨qa]_!ù­UqÇÈШÍkFÈ­KÁ¨!ÁC½ëŠŠþ',Ë¿óA•21gø-|¤y…‘L'Ô•KáÐÞ±‚®Æ)ïsÚ4¾X¯8Ù#«F®ŠŸH Ž7¥–Š“ˆ]¦Üó;ƒØ#/PÝÁJF4V'þ\()ü«^qÇhrVÏ$+zJ ‰«»!Ü„ïÛ»y+Ó5jâ¼yz -kcy¹>=Á(%ÝÁž~Þ7O§ˆÌ0x%N·È !y™nìPÄb¢Ñ_àýþò‰±û5Îé¯T=¢Ð!‚‰óiØÑ,ŸÔŸÎÅ {Ößâ t¤émÈóí øš«@/b؈oÕ©~ÆHu&8Ei -'î\ pÄTš¹‚VkŠs¸T}%²Vg„åBź—'¬Äµ}à=Ö„A°-ÊîÅÉvÌÀõPD2Dõîð3XèÞW1q/—æŒ^Bž ä{Š¸xHÄxì\j.#¥—^x™+¦Ý g½ÑM‹ÓaUXš* -!±ð|ʪ kˆƒ¶îCÑŠx–ÄÕ¯ˆƒÚ Õ,£¢Àܾôë¡Ôa6gÔ^Jó!úí1ë:¢ëPñ"„ô)éë = -’† -õ-”]êÅvÄÀâ”n »¢Í<ÉUå·Úpi2T ½Ùƒ÷”nøù éù­‚¼Š9Ð ;ÕèB‡ö¨œ]äGtÊêâr2Ž¸aœ5ƼØ͘m_)DgÂñ¤ÿ^˜®G -[2·¶ ™agº§Ï^x7y¼ mK¥lÕ"¨7`ô£Ã¡g i'ïçÂb?jfð g}BüùƒÖ€õrü¢¹]NïëÕWóFìñî°ä4æ$„7~ -àxDHN ¶–;P¤nõ`·QèE4³ùqö­Ç†Y†?«ÇNÞÐ*ÿªzq’S‘eÏJ½þï®3û±D~“F¶¸ÔFŠ -þbN|&Ùe)uÌ-OnïCtfB+ó©1hm½¹šZ”v ÅdlÊçs›Ë-G3 ´—0úo3š¯½ÚV_͉®MJV8Æ[ŽŸ±Ý@zep,¸6˜–¥}÷¢–‚*!‹V²Êv·OTîG .1qÐç1®q¬±§„‚Ù™„û7ê9¤«€µb¢À¶üØzdѱŠ'/*n••û‘yÓêÁ„’¶µã+W7Ã!j‹•zÐûLŠD¹ü(½Ù$´£b^­Ù¼ˆFÎÇ,¦ÕŠ”uL=s')Þ£NR#¨1vwZœFÌ8Äâõ³úÌåö,ü'â"s† ~ŸE´zÞRÇÊQfñ*íƒaµžÄë‹ü¢nLË°À »u WÄc]Ö‚ÎÀ/à»Ã!LŒžBæëŒsîQT_HAlú"Õ§µßÎãþ‰Ì…gq¼$-Vó%F{Ó)xþ™à¶Ž!=U3ÊÓm<ÅM„‡Ó}Á› !`ÏåÒøRcŸGô½·7CB`&³FÑ„¢ÙØi/E*¡^•J¾\»+gk²úXëò‘ÿ(²ÝÃ'Òh‰ã<Ÿ–þ)áùëšW¿æh³ŽÃŸì3ï~-p7ã!õÎ89Á’¶HS ÓÀÂ)&c’NOÔL“àÐ6O·§0yóãʆ¸ -sOZôå÷Û(ŽoÍ­‰3ºÒ£D.„¦GÜc¦.èÖ½  -œbF ~F@ÙpDå¨Ð ÙÛæ±zßÛH¤aCb©n?¯të"©mFeΠ¡­¯ª¯ÒítAa¼-ãpÜ]Šø´:¯àbÍ8f&‹2„^CæÃ< Ô}ñÙ¼©+ì’M ª•¦º2¥ö³„,:î'ÜUïoãÍÜ(†Ó©e¢Ï:rfZÞh'×¹½9Ûå*Ñ¥KLØó—6kÈeù£”´‚oòÉÔ‚|§ÒðñIñ*+ØÑ€ ´!JX:p>ùF‡$§ßaŒMîž“ Émîyó ÁÝo†Åþò»©ñïâ ¡/}µûýUÄb¥_P¸+™ÊÓ‰î¶ðÔÁ`d¸W¸‚…nÃ…ó~¾¹¡¯Ô ³Ù­(Âq­“Á!\dé]“ψ®ÌDˆÜ»}7Ý!t¨jÑâÅ°’&éÇGÌŠÅ•ÙK%½¯Ô‹­ãž¾é7S¸=Å3Š2ãŒö\‘tØÌÔëBv(L[ÂÓ«$y¤¡ôRŠžØÁˆ¹(E`8„ç߳㦛©îµŸ+)¨4Ä‹.¢”ü$A±¯4/¨C³½ï@ü?@r”|‡r9ßeB¥„Í6õ'R-‰Ü¤…}]²U…$øq¤}WeÕ š¦ì8 yâÒ>پꭔ‚+XJ½“Ÿ@ÒXa¬-¦ŠŒþ!†ïB¡Wì“£Ê-'3{Úg›uû)I˜=àæL ]ÐUZMaHTNöhtë„ - mq!Gê^†Èm˜…™‰ÿHpHá•ŽrlÞèë÷B¼ïÑó/‡ƒŽ˜Š’Á¼û‡‡0L0!mOŠl™+9ÿâS­½6ÊxË~mïcè\Žc¡fhÉÌâÒl!¿Ž˜íÿ ~ce¦Žø·ò+ÔÓö?†ï;ÐPw@^hOQ¼úÜŽcí7=ô@ZL~‰³áµ¯æ3Lꦔ¶ -ë4Ÿ3:)ÿ#Í©#€ñP–oQÿ' ˜þá½ösu–ÝÞÓ£Ùö¤ÖÊBCô}Ñ iÑssßazÚ9:‘r„ÅÂÈnqom«+@·„|÷=öõTT€‡9oô(âsò½Yw1ìøHŒªˆSÙI6D]'6Xoè|3P"]Ÿë~NÜ3?ƒê -Ýå‘zv.áÎ}Fú\Y«-²T“’hßЦÀ<—Ò¯¡–Î_y_ƒæ‚jË!‘+3#æ¾ýÈöpï¥[ƒß ÓñŬXÒ‚ª¤OÖÛy¦‹ïm`$´塚j:ôÄGlàIl‹ÒxÔû»³íýùÄ:nmC€H½¤ÔIž€¸µôÇõ’ XŽb)”]&?î¼Çs~°ÒA)™ŸÏYQðMá–;*Áx**É¥„22 †ËUDo¢ý¦ÒàŽµX[^ -*¦R’üX -í1ÚçÖçYÊ7ö%5$lýc ’x£ö^AÙ:é2„P¯â3—“V)'¨d"Bš•‰¶J¢Ìå⊲G£È†©uÐqÚ‹ú r~jsŠáç¥7R+"Qþy®‡7VC.ðZ%%ºó³õè=Íÿ+ʪï§êBkìÆò„3OBaå Pà {‰“ OmGñ^†ó¡Å­*ÈÞÜ,øÌÐH0›ß>1ŸõyGº}FÀ¡Ž<®º—ùʉ…BTeLd'Éíë æ¤--HÎ'wsº’g²ºW•¦aôÌQ“gì>•‰Z•·™Íñpâ¿nJ…°ìý'øà÷^b=ºËÚÑx¨?™-6pa—ìG”Ù¸xEf¥JX&r?k"Œ©tñÖãóþÃnµîpK¯ Ãñj»´ˆð™ -¾­1ã|O=õ‹wõ>v`*ÀýÆÜ üœ²1Ã(øBNq›Ú»Õ{tTýÏ®üåï¼€)þ¦ú¸µ9AQ'‘÷ˆåOðßWdv#`åÚ“,ÖêÌ´ÎDƒÆ"ò½ºÉÑ.ÝCd½+®Ž!¶ò(8ô@ª{Ðó?†æJ0­“ T§=ê¹¼¡Þr:bâØ-'+оMÓV~ŽäÇPv WNÔ8I´ 6gŒ¤ðJ$|™Üqmï#áú–u?Ê°{+}–¿§w­[~‰ÎãÔðË–Ϭ+>óŠìÂDNÃöaø’ã\ûÓÜäË·m‹×ežì­µÞ­§Û9¸àSYöÇϨU.N{IåÜÎg7WŠ+)‹©ðÜåZŒà èt×°©zT‘æ-Q®EL;²‘ì:…“Ró$Qr?rE››ùçA:ƒ‰™[_Éäò3!`ºÇã. -ÂE 2|=Ð@Ý6 õŽÁÙ*ï÷øpàÄõÿÏM«X´Aväÿç¦Ñ+øBVPU`Àlõ+Z¡Içž]•žfY›óƆO°èŒG8Çͱ~7Ѽ1o¥I}¥óäÀƒAu¾áí)]¦ú… dÉäU_O‰wæF#‡êi+GÜ`)Í¿ŽCÔ›2—Í\ŽÉè±/Ÿ°ï˜Ê¿´/ö]1Ãn}9¾ Gÿ îë©·> qЦ ¡íü®tWZÝÛÅÉá)ñ¾'b ,)¡mêw‹cïPؤ“ˆÆï}”6بNÊYѦ2[¤^Ø¢ÞçâkáÛ3À&J[ƒ%mƒß«÷÷¤ó»4ì¶DÇwÇo|ªE§¥C*¡ Åø˜:ñSD+5¸ƒBž³+’¹ØN Ñëþ4~çN|“­Qël~—¸Yù‚® ÍÈ{¥ìe¹C]‹ÂöDn–!ú¹hTt’çgïiØYlèô¬Ï¥Èðç Ê¢r Ú6]gDu³˜ï]ïÓ™±¨P|”ÊŽ*×<ð '̪Dí{ Ò<8œóªhñYÄ%(¢Kv¬!rî0a7D׆;ã5ÿOŽµ×Ïæbb¥C+…”»Ì»)Ißc‰Ò ÒêN]õÝâËp‡æ:~S2°È•÷|·]´¤¶•×쪑ðl"yx‡”ú}0¡#òÒ±B¾à{·¯!ÒÛ,ËoLŠÿp-àkö‡ovŽ8Ù1y#m}M^Ø%#‡K…?N¸3WÍÓd<]ú=’94ñA6{äGÔB ?+²Q5—קú¼* -å(PÞ}EH;{+ì’QÄ1 -´ì“CáI˜ /ÞžÊTCú6sÕ­":µ‹‡Ne§×™—üNd~…B©9°á)¨ -]+ôˆL³jdHÅŠeq¿j_¼¿•>n[„V8Š+-¡‹hpcüÁJ°°øg€ ´I¨ÞI ?µ’îO­¤û7Õî|WR–Â̺«Ã#™+_/ò~ji÷EBçÿ¡ž€èW\ýxþ²²Ò¬Îѯ1½Þöüã¼åë-ÍvFNK&cÅá¤uÓö¸t·˜Âži¸µB;j<™d©@o&U4â)õo<ÕƉðeª[Ažæ Œ¶Ræî°Âj[[´0œ·EõŸÇúâ¹=²RüfO ÀÁjZ _HÓ£ -‹Wv÷žw ”¾ÇâŽö->WtÕ -¶?WÌö³üç,‘ðÜ ¾½ïlEvíIt#¥PDƒx‘6WEF5Ñi”¶ð\ì?¤uò¸{¼›ôÃo¦ê•š9=í´Ž”ãWtVͦ$CÐI¶Yïð“ÜäüK3³²wK÷¢)üòìätvw}¦9½À¨$žu@K@ í-ÉÇ e¸¸Î„lÈ·Ubtø#iNÔ -ÑW^è¢yÞ6)ÏÉg¶_‘½¾YgIŽëºo¥ušÖÁ’x ß°ô™îÊðáùŽhû¦eø÷n`ðŽ¿ç‡Y¿ûí}­ïWº³Ö#J®€vjªìÛ³(à„é#±íÑy¦BúªC´Ð fj‚ªr†O… >£„60ã uu3¨?[ô›×)ÿ•fá?@DY™"¥W"X)(è²¹ôÍpb?ˆNýôfߢßÈþ -ðða!Aø §T:h?ŸßôŽHkP±½µ‘„ÕÓm•ð((cÅ÷ Ž+ãé\bõ¹°#óëÎEF|[ÂÞÔ¬VJùâ<…[©{i𠲓^ öûJgRO§×3©ßØö‚&©KmеV÷žrËÜ%ÐèÍX¸äB²0»ZòØzÔ4>O@¯{Ée âd'šÕBF;vz×BáÓ‚ã yõROOýA®»E¬Ù‚šçGà9¹™ã.›Þ¾™ÆeéÚÄDêó¤í‘ÐÅEâ0‡ù<F¶ˆ2»íôµt?/Ô?,ÝOžo9v•'Á‹Úµ4EÕ'Á±ÞߚރɟÎÉ»QBX<™¢GÔú ôté{•\1SîôMŽã-ÝB_ž¾|K·ê¿3FÙÞbõšü¼1jP‡ïžëën+¸po€2 áv¡ £Ñ S"9‡°e®D—¢ÉE}jFk“€²ŠBÂs¬A6ç‚Q|˜ßp¯Š¸:àÄæ–¾a:û9p2$ž•zc†gµúç›à¬‰ÚÌ„gؤhñ|J*ræ¼Ð‰vȶ†üã¾^ ‚/¿ÓJí%%>«ººóÓ3Øèm¼àØP?VÁ©„’ù¤nKˆžfÙ¹‰|.f°F}ˆÀ†þ©ÿ ˆR{¬þ{HŒÒƹ®¤þÔC.\ó‰ò r6•¶}Åë•Æ¦úuúo”&€ê×E{°Ò­Ê9æîÀô -þ¥~Ç">v!+à~’zò ÚÞºQd½‹&!D$_}½-‰¢t•â—ØþØ¢­Áv¡¨íÒm^ÕèǤ¿õ3Ðÿh¶`U¯f¯¥(ïá~õ|¿)ûRéÀÚ úƒ_öJð¨\…ƒ©„,¥W6½ÿá‹cR¬Ð~.)Ò\=hµàÑ•ËXoâÆ%‘:ù˃¡¨”¯à ’ä\|z%Èí?ÿå—÷xc{§…=uNªH¨@¯Áwž1z^‰Õ–“w+ŠÈMgR¥¦ÄÉV6léî‚Ï^ E"o[šh[µPÎ)ÈѸ«!3¸=£y”%‡Ö„ç‚#Îþðã\®Ø·@hàè\sþìu72Z¥J*…kóÚ¼BviwPáän?×¥Äð+èn4N9Œ–Ö¡xyÆ@™ZSm\¯úl­Ò÷øà€ÿÅ­º%Ãdÿ€WŽ{”éI;&Ð ¥ç*Ð?äíœàÊ0£îf®á#Uþm«Ú÷ßj%ç‘4Ýâ¦I^6d^Á~µwçŠBLŠY؈/³YôŒÊŸýòBác.,ëŸbø?üå1ü×BâÅ…ŒLÝ)åý43“µóÉçkñóåLz¬ùŒ¬’íº¨.£”ágdqAÃGhÃÒ -Âßaöbm¾¡$…hšÓš¬Õ æÑ’Žú%Úùšeíç"m¦x¹­KA7¨‡$S}ì¤ÅYå=oÍ@[ña]hÛt¶Ür†êÜÍÜ0]4×-«~ñÃ9¯…’‰âñLÎK‰½‘ŽšÇŒZôU6qùZ—ëÌv¬í;L9{¤~çLYw~¯úVpQ6ñëî ¥ý¥x|©zP.J2?ÓÅ€0ÁN#ÜÍ´ö©ÚbœEf}ÂÚáèÄsÚäÁì.ŽÎßÁ‘1nù+çõËþÞƒ¥c²ECåz°)jÒKq®z‘»È·Ÿû⬒‹#H®X&WçˆGÂÙ‹\£]/tÔ¢ñ³]9wcP³ãV€CÑÊfÜPV[¼NU-ÊÔG[&apœ2åºtÔî–H]i¦{hÇÛþú`qœìÕñ)=Q-C u­Aº9Ø3çU¢j{,dœØq,㩬 ¨Ä^u¦.Ñd"»’!tnùÑÍ=ûxë,4ÊfjÔÃ_öäê˜"/8šâ8H5³,hëÚìÉõúÇ7>Õ‹gôyÆR8A9¬½ît™’úKê(ádjY‰‹û­òýù,îý¹ïßí¾âÅ£}¦´3ꇜ'$d qæä3 jI÷)'=$‡!Á°§ˆz\dûŸsÚŒ‰sŒ-–MÞI•ÙÂc’Ý;£Y.›<ñ#²@! Ÿ˜†?l7öräÛl8f^žÇ¿ ¶Lû8FΗ=—íXFŽ¼µSî[}ÑÀE£4²Õ<¯ø Žø{Œ"«ŒSHp¦¼ž&>,¶l0#6y,å§êúcRkJÒ®[ÅéeB¤ž2§Oü­5ÙZuÂO¤#øB~\Ï[R_uÒ¯fç¾/—FÇÔ}‹¶Î}] -€ø ç}ZXÕÝ\«Â¼ÊƒÙ:·…&Üáø÷"Kòv£ÈvnyÁ fêÙ^ÌKºŒv°{y\v‡¦õâRÍ,Úü÷Êe¦º¼Cã³÷*¿”3-¢F )@¼’Ðs]ˆ¬”e½ÄXç_v *´£×wÒç¼çèa.ì±tºâØ-®üøÆ<_õf›>yr -¬Uoº;4ÌYTÆü˜õâþéG*±…q"'z=m5æ/¿x¬êŸ¾[é8èXg[ûýü•¹zÆy’+î‡6¹„z>š9ð_,eú?o<»eÉgI£*f1µó–HÁt³‰­Åñsq­ -3#©JW)Jvòj¡’hdîcýŽûW¦¸ -Ó -*ŒÐ|,%Y3¨Rø³½jñÑ2§—xSÈ šÉËùpW#ÊÂ׶qbšÙ)sh -*‰ÕG vöqïÁË0,FSòû+Ä 84'éÝ«ò&…ãM¤yì+z%S#—‰G/÷_á}êÚGÍ#ܯ?^c#%™LŠ0n¨@‹Sì¢t†½àÜÈUõùC.žzOÛÇb îÑil)t€Ëk§ÙöÆШäÍ# ‘œJBW›7Ïß7ž³ý©g2iš,C+QôMâõΣñ|˜ïÁ¢q{(ænžùéÍAˆ‘º-Ìžˆପ¹¦PÐb ñ\aF-å†HÎk¨h”ãôË;àØûNsûVÚ¾ó8[L÷Ψæk/)¼f{nj]Î -ÈÆ%À|t`"Þõ[œ™\påFäíõXòfË¢tm³­*RИóÍ~ö°jNœÃȽeû-˜ A¿H_æÍœh¸è¦C^+ýÆœöO»ÙõH[b–ë:„âs–&6˜C”õGYTÚ hxûþ¬µpûŒëÝ°íLãìyÁQñØUzþbÐJH`l­:a¡&½‰#ßm”%7‚zSU í#Í&ìù{û‹ —¹£œ0ä3¡mgJ;ç¦0_ÿò5íøþ¬´zçœ2gñdæ‹eÂyG~uÁ„ÕÍ`lãŽYŸšÿÄ}W“úÎD+ä¬ÏíßìÄP÷š˜Äl[ÇYÁ¦æ8;§æÂ, ðQä®x£EEÛñ¢tù6¸š#6jŽÛ-ÍyƒVÚγz‘"¥Ž­BV¢kaY<¦GUcì™,ñ'MËöÞ€ßHâ+Ø,¢–áNy*Ò'ÑÏ÷‚'—QY¯Xôµ33£öÑÜMá#¢k¾¼c Tg$†w@*„ùïyÞ]%-ñü -„ó—óm‰?Œ™óóúŽò"8<“ é¡)50$éÚŸú#irf]ž6šHòH.ŽÛÀq2‡-¶%s* ëÓ½RN«!·t÷´q.9ò\±Ž³ìžH‹_±W 3P­€ÉŒEÓr>J_[pÍZ–ˆŒ!rM;Ìiç ºƒÉ;ô|„ã J3 çÏ#›‚npz&Á¥šx†ÆòWioq¸^*üfݹVŽ{Ùݨ˜Î;©Ê(Ï·`zEaäÇK UòRÚÁÒ¯‚ÚûAQ<¹¬Ÿ Ô‚P¾#ž.#Ò•¹Eˆ3ˆ˜eú}G½°cHJ—^œø@5{8ÛÇÒ{§3’¢"è™%A;/üÇ;ãw´Š2On¥sp.%Ø~{qµTpyâ }zøTôZˆÁñ,ê]}¦ÍN`ÎÜa&œ%q4W+Â}N(´!ŠÖ% Éò4é/€ˆÄLGc÷H™U˜âûYe ˜À¹‰SËãmž[Éôæ…kV,J­¹øaÊ‹ãE_i&׬Ÿø‰^5;açñé;u êJä°HÎÄW2Öš@®á -ßëÌ54ëkbZgôÃ;öŒFº®ÕÙ$™Öh›.ÈKe Ä@åk¾Ò¬\ö™ó ®éIœèÿ0KNÛ+$™z"–&*ØGi~säRE³õc - 쓧Qü`“øyÎãú’£:Ö`öxËôŸZ#[–ϛ膣ÅÄ]¥®ú˜û/·P%¾q/¢ÌÍl6!/€ÒSŸðŒ•s[×P°J¸Ü=©µ×dpø!áV¬ö¢ús!!^Õ -¨ùÒƒ!êad!Í,ÊUÝv9‘Ä·¹„ÑÍ»†Æ1¥ç®A# ZªfãÊì Ö–H_Uoq‚¬šà Í«~ïy¬VÓ{45.¥úÀUêɇ.òAÒiÔev[¢Ü:”|“—’aôh†Y'{xL˜¡çš—3v¼Â®íZë$>}#¨ìôþÕ‘£`žýO^ªÙk°»&æ<`Õ6p¼­‰=jÇ©=kZñÒÃ;­ A¦ÙJŽî¤AØÝÊæ§2®•®¾õ<Ç:væ9µGé‡Ü»« ZúêÝw¢i*ÕCÆ[›Î¬<ƒ”MQ3ŠŒ!­èMÞ.ß{¶iOd«;d{O Ä¼;yjNž=ÕTþ PeÐœ•D”Ù'„ Õ´”¶`%ØfÏt%ãs‘/;Mʵðçþ£Ð¦:‡$ßC½),º -þMw -öºÏ2öc綈„ðL7g\ï 4€<Ä!†èçúÁg2ë`¿3¤·ˆ[&ª‡f‚p¦8”þŒF,2~Ùˆ`´6Ç< È/ W;(ýƒHò\›{í©ƒ'±"§Æç±ÅyN4º@~ÏÀH(Âë´FGp³ãÝ|«Z£Îh‡ˆ÷žzWsHKØ$ò׫dôâÖ‹k¡efÛ Ô±£î:´µï>¶àp º5{çùEk7× @[ªá¨ÏäW¤w¤e†@} âYOßky?(€EóË©í(x”uzýâ¹yrà¼@=r¦ÌÄ´}Æè×z¨=swÜYnÉCf/îæ ¯{,ü±Ng¨¨uTïr‹ %oÃD§A-%ëÀˆ›°¿æ*’ñëŠòúõ¬Ÿ;_ÀAOp•ÃË6"njÌÜ´•( Oûóê¦âç¼O>‰¬„íˆ$‡RÿÜÝuD;|ƒ63’hÁÑל`àb -¨{U”–Ϊº}´õ¡¿A‡ý¡­§ê]Óoms¤Ä]ï/°œöDü¹å;µ!>qøçÞrÐ"søöûšÆ´lœ›—#)ae²KrƒºŠÔp®Ñ;ž’cû¢~lrŸO9ÂD¿"¾CúÏòWº -¥üˆÑÉÜZ€bæno=ƒ¨ŒµT¢·x¢ú5ê6QÝ"¥R[#þÈ%1›Ü/?DQ]ÚŸeq5Ó‡ð£‡ÿu.Ñ5BOXÃè0÷Ñ°š‰Î)Ëu˜ä«¤·*”æ]ŒY4ßߺ¹ñ4`uÓk¤0ÞÊoKgg'r6T‰ÆMU'²Àr„i™EºK¯3-µdtý†èK^_›~H¼jŒtáÍ•3›úF\À$à 4C›ø¹‚²ˆíÚܵX ¶rZû*¥öEPá+ '*vš£GœŸÞb}C)¢‡~ yK+™Â¬·ø¼'t´“Å(ìÞø8—­Ë\©ÅÜP¡w† [IÔ‘ÛëÛ¨ÓØÿ²‘­1wáÞZ}z³‹Î…ŸËXóG„yj)PØ+f -óÍÿïyG bÊÀµög Þ3ÿ¸^†C•ƒðª!Ñ”b7vôŒ˜¡kÄ“ïµ é–Ûž¼&?xÄ]þ™gî5²M ü„® ¡±¸Ÿ]ŠÚç¥#ÿ…žxåEßÜj¼ÎâpÔ!4§ÎÎ켓Û3 Å3¼B]és á^A. \|ÁÂrÎgÓ¥ÍË›q_*æG}a {F•6Ï´Ù"m.RÌ =Dc„·™¨ÍMÓÉ2å‰BG@5¹”£PfÞè úz‰×éÛQŽÓ->Òö -PsàœA4Π„» ã²ÃÄQª•-Ž±tûƒ›uÞa^æìHmŠæEhñÌ9›ÃWm´©L×CÁ~²ìÞëƒÍæÝ5¢\ PľmU_d4ÎQp2aˆÜ5æ7VïÎxó¾>œÜñ𩊱’N£PÁg¯Iw·lÂ![âºo±”†¶+‘N -POlœk)Wœ¦r’ú:>‹þž­2S¦ús†bW/µó9Ú*Ž¯–,lL´äòÒWO:N눥‰Êå(mô¼zû<ãÀ„døz|Ÿ¯÷ýùîFVxÎÂîf)d´Å—ñ½"*œ'˜kýš8¼óŒŠ:°­W v¼­&ç›(yc_‡¬_‚aM…ôÍ{ô"çü]‡5cÉÄ™rðÒ= ıžì‰Á0éÔŽGøXA[9/0ª½ÙR®^·X2ht¾ŒBÝÌÍ8fòÏÚtiâ%:¼@NqK<®!›³®y çNJc;ÎìÚ]‹V×xÇ¥‰º"ý¶¾fºB¥×~Š»Xs©m?™½k—x8¡XúUUN&íüva î·æKŽ4z^J®wfRçÚ‚ÓzÛâ¤á!LQ>nóèÜRQ«½s1 )Ü·s+8éN³özÊdS-Wƒ¢¼>^<ì[wÔ(ú[7Xõrç:c1 (Ãø„“ðµ¦N®Ê„Tf7z›çin[ÀäTƒø -‚Œ0G¸AÍkÝíR ;‘ùkPc´-Q2Ê£ôl¦ ÚñHÿ|¨WÍtlž3Rð'$/\9pçw -ð8ßÝqgRÄ^‰¯ÅŒÌlL°vì´³~šõAx Ç6³âj)tW»&»4ø̃L¯>?Ãu¤üÐVw²I;>IQΠሓ³_Íö~n,—=EÉÃûsù.ø|½A;½í£>Õæç¤ìîlÙ Z -¢ üÐYŸ“”cçHg¯Õ»ã÷ݪÔ÷ëÛ-ò-<¿ÔÜn±hÃð-ÅŒ»pžÄ{@WBbIe¡ôœí©^èhÛ´ôp¨AÇ-ì‰e?·MD?äTƒèC᥌ÝJªÉ68Ä÷nÅ ”Î>ŠWó„Ù8Ô/q\…´¦–Áº -‡»ycÐÎœnÀ…âÿ´aR0  8—@?Ñ@©?Êx¡“cT¦¬oÈ 4øVë#°KòºtðóTX#ÄÝ·J[Û‚í铳˜‡?¨|¶óíHÝÑO-åé3ïPFhµïä]Þ1L•ãðÚ²öAãU)}‡8A‹—]æü+Bó®¶õÅOéÿî`‘¡´¸€Û¶3&½þšÁìêiƒiÞ÷ò}Öëã-'[3Ïe€@ÌÕ°k«’ >u 3G€2Ž:Pö­×#­¤ÎžÜ(•R _¾«wúXxð"gìí`ÔGè^' ä¨^Ì(j3mðl•>h’o2„ŤPÛÂ4·fô±ÍCÞôÉ 3I'ù n>œ„†mÅo‰òØ¡í–ÁA÷þ$Y['kb´-"ùá¿Ú€aaä}±ñNµ¯åQªE‘:#Žœª×/G|@ Çíž?Øx{kº¯àÓ[ì£SÊz‰¨€çÖ9¿PsPܳWø¬*Ë ‘óåU§äz¹ßWÜ»c+šàïGÙÁß·µU¡õméöî4£R¸AG–G‘¨0üŠh\âS†¹8>ì+8îaj3 -ûÉ­<ëºÕxÞ¿¦#ûo²·²ûxi¡V}B麜bvÀ4·Òb­ fÉ9ÆÈJ³qÖó­¬ëL3ô꩹‹uHNœ½r}çu¼wõ?ç©6 ”½­ìM…ØQÖߨE2›¯øäp )¼h^é‚d¾¦R@¹OhzóÇfB•Ng®Þ¢úƒJÍëã -óoã ž}Ÿ×hÅé]ÎØölýÝ`Q­›«+˜£øÖ­bEnçæP³Üí"#HÃ; -4±NѶٳ'ÍWí`‹£ÜA³û ’Dò|N¹Ù¬¦ 6’ùTb‡n%2«½,´î)€Ýµõú dkùÌ&Á¦ûuÔ­à˜No">ôµÁ£+ïöÝ4–Ð!Cž˜ÿ¼…!É^,Û7½¼{•¼á;5õö¼–Ï\„=}~c Dœj8J¾ÿR‹èOlz.ü›ÍaDŽÎø(§" WÔÿè uÓ?ŠKtM¯r‰¾~õ¿¾Ì°ë7™ÿ5¡Lð+DôÀ%nÖ]JvŽÀÂó,ÂH)þÏAèS‘°d‚aÎÍ£VçeHê­Ÿbt×Ý=s®Çžã¢v?jH -µOr}%¨’.äŪÒÄ¡&¿>]FäZl žwîy$ÛG¬$ô”±½Ë´ãæ_ô:.ìãì…,¬™iòs.B:&­@„Ì›"&ƒ¯&â -PìˆBHÙó$Â2-ôþ‡ú~«vñ„ìQ×~'óásË©òc„Šì:Z‰§¨7Ê"ž»ž;ýáí¢U_U€}uÛµ(´*‰.äý¡QÂ. RIã‰5ÎŒõBavvnŒá‰±–„É£\¸Å:Àõ^ÌŠŸ9u”˜\ICžJ9|Œ§XFv¡‡Z QÉÔFQâæÑvÇãþ óköR6±hLœ«…†{?ãV©È€¶X\Áu¿[z[ÎoûGU UIðJÙ×,ÀÄýãKý]ÉRä¨ËuD†­êæŠÇ§E}Á iƒÝö¶/H7)ÍÐNž÷÷S\ -Ùïüœ Á17—i¶Q¡ÞW§sDð– R"Ã-‘ÍÜ0¯8¥¤ŒÈ™›ôu?U¸uU{7µ­ýºWq49¦ÁüÏDÕìp¢¹žº_”°Á²F*ŸiìÔ«X«MZLlŸ%ágiOÎèéÆè‰Tu¤GÜ]äøU²ìacÏ{øú5Îpê÷ùº JŒ "… -‚ûë;ì97n’%)d ­AN'ă„W¹Iÿ¬§b:<úñ¼;¸—¾Ê¯€*.Ó¹‰l®­¥× M]í.È‹#m® s#öø”ë•ö*?3ÑKTÄÆCŸíß6¨š®¡,I ª{:Q4ãñ…F4~ó‚ü:f…·ßì~?²Ñ«Gå¨>\Á ë<™59 Κ¬ÀU§*÷¼ÚÏ-=€ª£öâlér Ô³Nî- -Î4ÛËÊ D)âp_-¶vö0`[aÍûÌ»­ gÐÜ‘òY%+"6w¼?úµR&§³`• S…9Úç!Åp/É×¼ø³À‡b{¬è'> µ0쓪¶n*s'jL4€JGuV•¨æ±¢™äЀÑ/}©nhÀâ=í ð!v:¤G5YŸ—àƒÕóîeˆÚÂ’wÆ™¥~^GøI¢–¹J`·ùTH -Ū_Ù^n!ÊQuéÏÅ:>pdÉ%9ù -`-]ÎÅw\†0"ZM(F…›Ü”üc>ÍÎ~£6}¿Od~Ô ¦P³&Œ„œÎÚgw•ÜËê -*°zWªèX`Ÿ§gŠ5±ò«TFc"„<­s¥Îõº¬~Í›Áü#¾P =›CÛ)¡·¨õææGÝÀçZeüpì—jÃ[“f#Ù9Rå™+3M`—ÓJ áà€§ºõ’¨fjã§hÅ$¬à®ÄzÓSº!½Ãßz*›Ç§$PãÒoŽ<ôg’°œ˜ í3×—0ÝIùrnãg…âÿá¹BÒÛ³“{Ö ;Èœø:X/éÉü¿l“GA$΋Þ÷Y)ƒeOg×=R]èõL'&™›¶ 4ñɬÓÎ=Wߌëêf0æÕ@c)h?aÚ¨›gÏØè“<ºrN‹ƒôc:]Ñ%èòOe"Lá[_ë*•A·¿UœI¾®’_ºÍ­VÒ‹wq;Ú,¢ô¿\qûŒJù‚wRì BõÚç÷$>$¡TCPU> ø}ÏtDÝ+EˆZ=‡U{÷cŽÊ´«& ÷+¸?Î -Nçla¬ñ¤‰c È6#¤7]!ùlQïS}%cøôû¹ßD†V~W‰èúj•£ÑûeßÒÀÕLìQ¬ôla½Î ù ÃåJó“55ÎgÅ?ô—/¦Í‘Î&w'r%Ô.¢ª|«"*@é·lêˆé}/Ò„d×ÂéÁ*&ö¡¢ /v„÷„„aǹË쟸ð]ïÁ4÷C†TšUº$”8ØÀLõ¥Çúц+}ìTÑ…ÖE.Qw*nÕBykÉ÷E‘%Ò¯à —¾£“ä@Ìf×–ãëÑïÕ4øfö½2š9ïU>‘55ÚŸ%ìý’ÿF ãúM«ù«£Uñ=| X õ P\äÝVU%ìÔP9RÉÔÜÇz拹§LæÉ^9¨*«‘$klŽT}¤ Y^tZ©6tÁ)¥õ(<5h~7J–ðsÊÛóъΠT^‚ÚÉá+t¤#HÍ« -X Á£…Â]Ñr•öR¡ååùw±C1Qy9^…ŒjgÞXž -Ý€Í_O¿šÜŽÒv¨†÷» Ãè ^Uå2 uô÷½ --dCÛ> 쉷Üå õUh„8-» fKAj)¤…ßØFþé@Þ¬€3íίG´´×<O`;JdvÚ?èE}i½ýAAêƒåû£6Šø?XUµ'ÉIѨŒnG6a]׳LÿÆâ¡ÑçogÐH@g¸Å‹Šçó endstream endobj 33 0 obj <>stream -ó,¸Æ2݃²Ú×ÞFÚÛëЋI¶JI‹’àµ/ÎXüçv¹iß“ÎW¬|ð ¬!§¾Èz+ÆŒ!µ`Ã=ju8ìaxC<Ì)[¢-¼¤bäÙ‰$Œì=ÿ}þÁ-è7Ãm¦âÂfÂ+"åFvæ`‹M§~KiþÄÊÑYŸë ºÔu¢8cã'únÜöpÄÂ>ÿÒ¸ ‡€Ð(¡½÷îwÇü1,Ù¬,‘ôuŸ‹3öÄWÇ„ö~?êF3ƒ§opÔ±¶t°Ïx2£ƒhI¸º­çN¬½]hmÏg‡Çÿkª#qɇÚz´îç $ÅeGÕyöBžØæ<#Ìz¿ƒŸòÚûG§¶öJ~ÇÊ=ò°ápý¯˜vð¯èUã}þ -Óò†ƒ¥È<ÙÖ{«!C-‹ëÌÜ/¶óbÀ?ÿ£d·ÐåDÌo¶mH¢‘QýÏÿˆ‘<Éö†´ìé±Øÿèa¦æ_þä7åå¨ØÇ"X\Ÿô»W¼Î[)r)B¦¡±ßq³Þ˜J÷^¥|ÔEÔâuÚþ*Hûç¿üf"¿2µo‹Éiæ A ÷³¾»% zK›æÜßæ-ÑÃ×WÕ_'ìµÕyœwt´jþuŠ°bX‰§&Â0‘—Î[$WíÔ9 -GÍdnæ呼†D‹ÑÍ‚4‘3pyÞ½¾E,—æaÌ s¶#qŒâȱZ,àÚ+˜|J“&Ê •aÄï@%g²³Wz¬-»dªkb4Ž^ìܶ*¿ˆoOT·ïRäK:Å)ž;ݺ²ŠÎIjHos[!6BžX®…nqn•U®ðA3î`+t®ïD\}ŒÞL|Ñû¶wG¨Çã(êg‘Ù‰Eãü÷,Ùðiäܨê-éð g$Õb;Ke‰^He ìWTLš|_ô ~“Œª¬NܱJ73dï‘-Vó¨þïÎæÿàZE«š8' -—Ã˘ tTQ+í±b5çU¬72ê€WÅ*ÖÇ[4+ omÊ…ñ³ÎtÛ+Ò¦ ÿ -\„hØF¥âZóDÛY£”B@*M>ŸäØ÷ÚPÛÔ®šHóç÷è (Î=k™‡ÄZÍf"I»¬8+Õ* »¶Pû’¦àQTtMÚú)æ°–ùغ=,‚ zFõH§÷¨ƒ“‚‰;uD²¡•¸/•=ª w ‘:êM©ç×ù‰=Á¥Ü*­ó˜¢Í§zá o„FDŸ½¼Ì¾K‚üÈ.^5ÖÝóau,5G’Ô§Ô¾µ3XÜñ¶?"Í…Oôùø/(óëWžÍ×ôÆ4ý¹ñ£° ê`]·4±ø!®Áøß{öÝبªæ%Q"QtWPPø[8 -/ödÕÓG§#D ´ŸÊ)‰âÝØ©çQ°–±CÙ²ãÉæé3œ 2BCxs!­bæY•;êäG°ß~ñ Z W²—€óIaûÈé ¥VÈ—A*aÓuXC…½9>C¾Ôvt?ŒÁsF &ä1j½°¶š½OÄì© 8ÛŽ0W·ÂùµÕ|Ý|~¡ö?€P‚#4@Õ:õ±.LÓÕ´÷•Ïl¸ºgßàüy* Í> -ãÙÒŸ8»BUmD¥v ©g)?g—ˆ‘”Ún6$¢¡|¯ž±?O“AMH Ä‹ld#u<ï,ò‘ÇT~ù»ãF¯®ã»Od#Ú¿g|Œ€ ¡YæËsüöÞ²<Åí-Œ¦ÓÄalº+ý aGájý`f;µ­ ©mn:¯À©]ÂAÑ×fãèÜ£´qÕÖ÷_¦•­^é{€I %G¯%À‰z6üÌþø‘Óîˆ0<Áå­Œß2Ž/ŽÇ×=cžOJB¶µ­Å<P•òѤá‚ø£i0:O - ~¨ýÁ"€au†0+9mëv€1õâ?$™Ê+èš^*†œ¿xï…`›Á+àhuöuÒUhÚfû¢˜ˆÒvʬî@ ѤŒ‰·{‚)è©«Ö3Q¥ãAsÿç:DHÈœ!òÑPœÏ#®SiñkP®n¼Ù–ì’Šˆåƒ×÷ædΛÑ{‚G‰ÀÄE>Âoë)Ø: õ÷ê±chÃHÍ-‰Û‚h¼ýÒùó_þþ]ˆè“ë½Âþ 1SGÃ6ÖYjg8r?RŽ–¢ÅIÏ|¬!7¦†úØéG8 V›ú¤­’Ú|É;H[Âh¿æ¡úƺ|‘u®è6“ÏÐ^àDTw³Ð"AÔ݆Ûd:áåÚ—Çöf9úVdµ1ÃÿæAn[_§þ댳²²ùræÑF¾ç±iUˆ—-ã>ëýa>Γ ËçõÁ§ZS -:a„”ò6QÛ0J‹Åa,¹@DrÞÆ!l¶‘ÅÆ*ÕÔßDŽßFQWÿÝeêƒKÑ|qŒÐ3¶àaì›éÁmÔúT¹A6äèÉÆNÆËî﮹Hÿòü›®ÑùמÓ[ñïYM!ÿ>†CZÝ•'èK^´ï¸Hû x¥à|q‡Ô ¯uv×*ª3êÈ&j¡¢L÷g,æ;žÝq¬¹.öˆáfWÖ°&².tÝ3dnéó”éÐylŒJÇXwÕ­BCLzîg¼™ïBÉ{Õr«¬#x¯ëÍ{Çg ¯SóJG v8_é‘ -‘Óôr2äÁôÔ Jå gšª$²ÜžšÞkÜßöû=⸓êêäõ×o±èu#>áú«Ç“B•¯¤I¾©ú{qçãOveÝu'–C±{ÇVFwÏÁ\âÎ~?¿:¦m@ÛG\ð"/8Ï0}±·tCŪ @sF%/¤Ã -ב—Ÿõnxp(öÎècDŸ€å)Y¸mßßW±Æïtîóú"¤u¦Ðš¯Rôâ+j=†JAbÆ7µþ|ÿd²0ŽD2ˬ¨%‡¬è¢ÒÒ`¤o4áë¨9WŒ{~ñçÕ¢AYjÈ¿-â?ÿå«UæLè8>¶¯êWa -`™—Wf'™åzÎÔø ¢(î)zñ+>^ó«²[O7oÄÐNŸkOÊdÈâ4b§¾MáuP5RÙ•Ik w”Ç#{ùÿçu.Íî-pá#ÒÁ\•w}wû™™ {ŸÊ¡g)è¢ßÎ'$ðÓžX ôס>é¶/—Ä'N):ôœ:.ÊîR Éìu…/ÔÚ.1î±™f¨_ËROSj±ÉùBЦšª\:7ùÉÚ–3Z8cjˆ„‚‡ð^î@ˆÐgÀá[Ðg/¼ü-j†\Ì/æ9f%çVÙø°i¢îõ–Ù®õØýrÅSU“& 뎖=î`y[Äó=•îcŒ]ï5„È-‚énê1^Ò~|~szaðsŠÑóCàÝËÝm;kh¯ ‚Hsãö­Ì¶Ñœ\5sZD“ú®ð’Æ7g{#—œõ{œ+Ùjuý3ÓbÂœw{_CtÅ-¯¤ÛLö7žo~ã'mwü¹Âóó[œ…çƒ)G’ ¸§¸íwƒ%ø8þLKýRÿ={µ(G,>{‹XH°¶Wú sO¨ kQŽ¼7íÊ£x”&Þ¥àrX©@iW%¼šKéÿºê¼¿˜§T¢ºïó‘”e"–Œzzó*W$X÷’W¤Œý–›Py1ÂAwQ9ëk™”×ŃDÝ×­°@Èv­©?€ØžŒ©·‡sz/‹TJ7d¢HùÁ‡z#ߨ7„^fàæyVÜ€•½Ûªj“̆Î4¤iüñª®$üïØV_RÕ¯š÷ß¿‘4xVo‚µ{m±rˆ‚âxiÀúx/àFkg”F̶&‚ž-“ÝÒ‘ð˜q²/û£šPqžaœíµqXaPw¬qr«Á¡—Bw°}nJÌÖÝ  ùÓÁ¶!W¬ ævrì5¥®¿ª7£Lr®£k€ÝŸ9a=Uv}uÕ¶†pyæqC(ðbp£àž;©…âû—4ƒWËW¸?í}Fžbx°ÿ‘}—¹*ÜÂ!»Îæ&y˾KB2ú-IC¼™3Õ5—=jîFÄ…Šžì`-glD<ÆQÛ7BÕŵî¾×j#ü»w·~L4…hÊ?çU*~ÆÀÏéò>¥VgJÐÃc…ØòòNr·1öØ~N  ¥y§3 [˜}»sô.0ÐÅ“i¦É7îU«•¿çU‚Œ5ÄÁÌmŽÿ'+¿è,,ìDZ*sŒùW‚.kÀÝ—˜÷x~oN|ýK¹}XprÐ -¨Ë#Á#k 0Ýt'Bٷ̳4qmÌuÐÚácW¤Óc¾pÝÿŠtz’£=×iBQBé…4„ÁÅ*ÒÊ/gy'5ã.ãi˜Ì¹q§‹ñÃuDK î)XMòaªË¢Ìž©þ¤åËtJê½—ž,)„ --ˆtR—ù 'ù98ÀØP|SUÑMs¸ävÚÖàoM+€¿ÅðØ o«ÛçIËÅŠñõGMÿ/L®¯öôS]|Φ³ -ñhs"ÒPùdžI<_SðüŽ"X¯"+WìðU¢W£€xº†l^šä%#F’KÄ”Òæˆ/^•µü…Üç¨"Yz 7lBEçþâå!}äŠñW0DU4£aÆÊ#©:OÌ}@¬=mgh)nìxdG]f¤’ØanäN6áÆÉàîë±ÓÔ,)%í4kútÖ»£^ŠÙ±W†Ü#홧UÁÝÀµgD屑ð‘JucÖ'Òã[ª||¦·ÝïHËuÎúŽQ„(~ÎA›äâÕW‚%]|œÖÔaëÉ!ý{™ñû¿Ìé4î´Ëä(ÿǦ J_þÄpäƒ}éA†~7h‹ß—œVÏá5 ƒÝzŠ»˜!óãœÄx õ °mÙý r²¯‹X³¤ÖÆÀŽA0ŒˆwZ^¹Èü`»Ù9¯Ss{Ó i÷º=õâ&{5„›ýv"±×C)»¶¨\{ƒë_©Àï :nÀ™ `tQèMžf÷‹–†ç +F•àz äÈisŽÕ -¸”©wúM翃ȜaW ò º`:æ\[L7,÷ŒÖ4Ô¬ÙÒCJú‹yK.›‘F"ú?LÒNÍT–vÄßxôXr1(·Ëyõƒ^ pç覇 -Öhn[êô&ó)½å_z#dÖ8 Þ¯ -@bÐ@3f½;ohסöÁŒ0Å6LWmägT‚F9‡FIÕ‘Khÿ¾` Â<»b£z×îŸD®ð»2Œ½y@ecˆˆÿÓS«!"È‹Z÷Š>!Uw¥ŸÐ2êÐÝ[Ì–³-Þ"ñ_5ä›ï:£™®‘2UD  Û=8¡S1ÂîQÅÌ¿:""K—üX×$ïñrzùÏØñapÖëÐ6#OðÁû{·” âÀröŠ¡¥¾eù*nÓá Á€ïcY¿ÔËK@òwÄÜ÷IqëÛ<ÜÁ‹ó²ž€kZ b’§\zæ Ìè1ÊýR2Aº‚²‡^%Çf¡ ’œgê<«¸pj¦uÊ»ïÅDÂJàž.¹Apí\·D8¹ ÕSõG/æ}'‚’ƒªNFðI˜{ÊÜ“ÏõsŸHžÜ5{êNôø÷Ô«Öqúª:ÉÁT5DÄ!²µü ¨9k•¨TjS›‰ä•tVhîò¨bÐû¼Æª˜°ÜÜâÜQ×±*V|b¿©Qœæ’O" Öž'_£\4Tžƒ„—ò8(x’ÑÕžCB´»"ü7–;Ò†b -T¨V·ŠánŽïÈ­ˆÄ€ Ÿ„õsÑã ¿£!<S3îðÆ”X²ÔC±VŽ¹Ò³t#ÔR¨âÈûäšÿH†æ2ñX8G$'žÈÂñ€,×/[~yÏ÷&(Âm| ‡¡ˆš,À­fnˆÔ§PE¨¾ÐÏS#X¥·(µm÷ëƒé·T´PíV%s_¿ l1Ì_ @øãnð$…¨øM"Á‚û…Æ÷‡¿Xç{Em7Î ûàŒÍ àɲxTµ¡½;õ ƒ0nä°[ -³…ʉmPQÌå¦|‹ ž²ÍÌ ÚJÜ1SиÖgeNØ”óB]G$HP'F7’ eáDd“K.Ý=¡=5»Ì ôSpÙ˜ãøÁ»ö7M‹˜®ùÅÇæ“^H&Ϥ¡¯Gœº×±p€„ÂÑì Ç’&‹bîåi¤EŸô)„œ,î&‰ɬª8ï‘-œñe>šÒF.ë©!èÝM5¾–ªýr#*R†€ìlJãv­3YºÆ¡3J”!jáA:©6}B=¯“ë¶Ä{òÍæV &ªÎuh)´¨ÖOvžêòP¸L i "ÅY±ïׂõw—ôî½3/@ -#užœ’=;Â(ˆ&a5õËŠøBä o©©5–#CžyÐ]Û]Å‚î]éËòqºN9Jâ~ω˜føþ×–VC0A6)¬æ Q¡G€È"§öŒL;ÆûV1þÂOÍ -q-™ÏV×›tVÌ•,b1¹î’€Uó«IŠÂU™q§!{ŽÄ¡¢W®ñÉï¨ïœ¼nEP©a•ÅrÂW”å©V[xD¾«±ò¤i®½«•ÖÞ?rž`™Ï-ðYe9“›¹-0| ¢#lÎDF$_Ë<–ÞjóÓÌõ=H€oWÕÿŒâ„üQÙy)ìܤz¸ì¨%éô omqÎÚ€­7B¹ÿõá}$ɤcx)JÙ‹ç|²¹^ïô±‚‡9‚¡£KmïKÿ¨ j06˜‡"P¶ 3cÈöÒÜ”Ö`3ð5ЖÈ¢z ,H—8¼%©›Ã]b€Wˆ,î$Å"A9ì¹êɱio`Œ$פª#\8¯xgÕr'Tµ·[ã©W‡È)·°oÇékpê-èÃL4tåægëk8†þ³(O¥9„º !aˆS³E%“ÖQ=8 ÊvÇ“«}Æ•;¦{ëý5Ö(„"²Ú1zôhgÎ6 -Ï~EÆ,Ÿ\h{«óiSn{MppŸ¸FgC5èÆ+48.v/dÔš¼uç/$¡R(•Ci'ŠHkÙ²¨¹ÅAWP>#ª°Œ–žQ %ûéܺÀ›ÛóFIýR»ÉS]]W(ØuèÊ*‘|뱶§ˆXùñã#³ÇÍËW8@óx–UÃtdò7®scËçèÌÓ2h€žgâi2ùɤhú?è_€x^ߦºÝËpN-ˬ+Dϼûzw/Z"ÏšêigoÎu8nÎyKpMQM µÞâëƒõ»‚ ÉýE¿'ôP‚Yš Ua%Ã?þEOÀ–*¸¿Ê×ï}ÿ—ì3‘&&÷DyØï$T4Ï•¸Ú³¸8R|Cmñ MÚFlÏn¬›6ªÝ¿Ñ–©!Ž.€„SùoO“ëd•­ïœnA—µ_tšQÛ»É5³à ·Zš gô¦1gR4ç2…*ñ$ÅÊ}ÔˆZ¡:4læ§ØãQaÄ!ˆÞ®û,ØS ¤dáÏP¼#+oìh«+çÛ[–@£¹a6ê`3ÌE…æ_'ö¦œádÛïc•¶0Lê2õ(0Ñ_l…v¡ÓJ+*|HÄÜ™–eÎõ×jU +cµ#h¢2»¤ôºbZ¯óèSƒC ¾’m«u?,°^"Ç¿ -V|ò—”²‘ĬÚz÷ê_PÐt|·g/¡?_“–³¢?xšñ…¡æ!|¦¾üˆ6æ^õ–b·Æ ?ù*SlÓÒ>IÒ¤¥¡\Htý¡óç 45ò´½{$ÖjôfÍÕb¸¥ÖÝq²ð+™ïCrÎŒcÅB•(¯‹LÝ;Ðó„#[š¦@@y„Û‘!È{37Ú¡böjšr '¶-4¯XUÿ‚ßzÏݹb±–>jPºj±-o ]ãÔà~'&·â¤–OmÃüþK½·(ÒJt®ãt·+xlIÖøÓ¶HC¤Ë®/Ì¡ßmG¾ÿKÚýdó¥YbdEƒ>tÑ'4¥„4ãL`dt‰m¶~Ìe x¦#DÌ ®{*ÀcöôFæiµ÷Ežâ>ë*|‰ï32J…´ êû?ã°‹€¿ý#&¡°Žªè’u•4„ðGó¨ÙÔ’(È2ŽQ#hÞÐî±,4RŠ‡†7*É]…´+³ «^±o <ÁJ+©í2ÐËUã!o‚( -ŠN©E»|ɯ/„ˆïÿò>à(ÓèªõdÈQôH¾Öb·÷# §h„‘A5²ûfâ¢9ÕÚŽû B]<4ß$bær…§8áBíwâ®XG̘ŠžöÏ.Êúåo­ò7དÂUñÐß -jΔãª,ú@©’ñdÑI؇׈@üNØÅF„qs+ºH[™ ÷}å¶GXD1«X­wxÁ[÷y¬Ä?|Rç=mu6ßØñÎ ô™Ê#Ñ¢¼]µÉ±!Ëd™MvAT39íA朕¶rÅÔ+ä^–£ô‰·;%â:Jq8ǃ•Ù’ -Ü,Ç»üØýšó‰ö¶(¹* X¯¦\K_ÒëSéšþ9^L‰•–O@ óþßõ  ˆ-õÊ3âBé=†k¡½G¯IüRÇíÖïÿ’©›´Ø'e§½¢ôƒâÿP­º›©/ïqý:¯ñ <?Ž$xÈÀ=½ŽRù:âýiwì€Vöœ=æè^íÞÐ/p™.ÐlÌ›^ -SÍ;¤ƒó³|5jUõˆ[ â0®ƒw]i©ßj9ŽÚ¹KŒk Ñ&‰G I#1-†u§3þ§I¨ÆÊ`2MŽ·ª4ýjŒ¤Q·fÐ{†Ÿ¹³9à)<qéö~û¥ÖQêŒÄ˜àì‰íWž BJ¤ðë_æÔ&•¹¯q•ØÐïÐËïÿ²À˜ß?Vc6ß BºØh$?¡a¤½u)Q1ÍóÎ…9ß1C&#'FÓÝšççU…¾yÈ¡!C’(ÏÉ3g¦ífTÇ^h*4÷Lê“«P•8ÝlËy 2ƒ7èe\+|›/9lì ›Ý›Ú7•²óZQ ‚iÃ3j¦ IÄs\ È–b“¶?×­†i;sNÁ%ñ¯€SZ=;|Ï¡œl¨œêË™$æ^Pœá'Òï?ß^ÔÓMÏÎílQðUo¢1¥¤0ïN_i=›åTøÃgßJ…5çRÕ°êŽBgþÛÿ—ÿN´¯Я$ï¨w„fw‘àáÚ"'-ºQ©€6tìkÏÒSÈgG°ÅJ¹b^È­ Gï¨P\§E: é9(„¦ò0Ô/r”ÚZÔVG%‡¿[BÜEéIK,˜ræ*T8ƒÆŒüäÂ}>OÊVU ®CÚÜe~ÜÊT¯;nÏÞwŠç»‡ŽðqNÜ’)»T(Ÿ™I¹Z·øÌ/ndŠRLÖ ž‹‡ÛçáþÿëS_Ô¢ÚMXÑh*]=“}U$Ð[üc§JE)¥)Võ"²QÃ@ØCÅ)Äm‹ÏøÚ +ŒÚþmøêªoWª4Hì4ÞÃWX€–/ës´Å™ÛÐBŸ«ºÿl,îžnÅß$ïÄØt9*¶A­ÑM=×uî¤ó÷É? aÀï†Ëp +á@[.…ýðÏ›~râóVAâ½½µ76b%ôÙ‘ýf‰ôý_"­ ]K -á¡u|Vl¢Q¾‡rFZwР"fëÏwUÉv?d1vrjŠÏˆµ+êóA ^9¯˜DeZàB×x¹«¼ª$ïPÃÕ}•`•„‰é -`@pïS&åê…3WØÎZjr⢇úSÚ•¶lnUš¯4§²~x)··/ï^E‡øã¾?eôD:#Ö}à‘%Û·µì9w`¸å8{Ï9ôIuéàöd¬Ìýªܭ„íØ×ö@xõ*5:dn/q9Ù± jÄÿ ÀÁ„ùñï”ãm·¤8àeÔ"w¾{ñ|‰çƒC|—‡œ?Ž­-IŒ  ðl‹H”=ƒí¹šþÜ7ó¤ëC]¨ETK'ú -ËÂ7•ÂÎ?¹th\ÒÝk¡‰cð]¾žô“|dz*•º£xÎs%&¬2˜Ü±Ä«2eGbD5?¦ióšG¤ôc÷MU³‡Œ¿ÿá6£ÎýëŸe ®¥%7ÛgÆÄXÓN›ÿŒŸBOëç=ÐÆ;ðubÒ¶Q)±ÙEoüŽ«R”sZ¬hçªeÖ—¬À­iÅ­éßàAÓ¿ÊE}#ÔqŽ¥I›ë`U>ñø|²ŒôˆYÎú*±:i‡…£ë˜•vÆŒ¤Gêvþ`’ùèëâŒÚRïØùËÏS;”œóŒà`¨÷²/BF§ÀvF.‚6*pü6Ïåú¥ª;§óv•œÒéœMÃÍ_eÁƒIÆUƒ´GÈ9s™…g¦§´Bò·4/TƒÚ²Ã¸b|ÚKnÀÊ ¸A£wú¸E=ÛܾKG1Oí /’ÍQŠ$Äx @Áf[§uÇ()Hz³-z§7ØõE“zÎz¨ù$ü×^BHçVšKØ¢×Y.RíûUÏ-E A¥Ü¸">Âï&q¬k ö_rq¬½!|“"-™ž¹;'ùp|',\¨{[Cf*ί—¹æY…Í#gQýà=þ·Zì#Å}c¶D¨†íx߉‘ûž‹Ç„èÙgä4£øÅæ÷bv¿-³#íWvW”’æ#aÕ“¸#>õóF‘ˆbô;êÕ ØPøB—½ð%‘#¢Vó?\$¨´‡ÿ¨Ã~½5DÈXQ»Z¬¦TáƒÑ1ɦ,œ‰Â†ä2gL›7XÃóõ·›;‡âØþ¯lîÓutÊëç)X¾Äj =F -‹&%ŽbJbª#$+~º{$F3$.ÍÞÀU1•:Çàû3¯u]ï3ëBï„DrŽ]úþTõëÁ { Øùß8IÂκŸò‡Q©ãÆG¤èy7Ú¾†êŒúâ6ß”ìÁÁÁ?8K}û—;Óöâ€+椟?‚Š hY{ ñž1ˆ6Û¤^u~‚Š³ÍðáRðœ%Š¾Ý¥¶N  -,.†æ¥bê«ô]Åì¡ÑÜu§+Lïÿô™‰)‹˜”ßÎ#ž@1Pqhоz¤”ÞR“ócZ Ï`€Ä©èÔ ùìœOS3walÔPÖ2B†ù iB5†¶Ïo$·7“žOîÓãàMÚ'Ç O‹ÖóñÊa`5çö۲܈–¦t›/ðõÁ”Xùâs0ŸéÆ §p´#FÈÐGäU‡[ÀûñoÀ¯û‹/üï Å -CWXÍz½„¶{¸ÐÞ±!üQƒÈ©Á;ìjÍK¹G,xç¢kŠÔtçdWèéû>{CCFT{ç#²‹J€JgyK„¿®©™¢MÀÎ0CZß’|CØ£(-™fò¿w´f1V´C‰G ¹ç0©y™ô5µgÒË@ë –a¾7ÌÍÔì_ÕÞÚãQ¢±ô*/ð¸l<ñiʺyäþX¢r²YF“ŽÕ­¢Å„ˆ|ÊxÅ\ûÛRNï‡û—<)®z±èî!ÌâSò8#…O#E1#†×T[ŒB<êëNÖø–Iü·Qœ£ÍQöo!ìœ¡Æ x"€ª²«Ç„ËEô5BïÛ{’™™±¢×_ŸÛ1ë×’UÇAŠh‹|&õæ#è9”r)yPò?3Ûãy¦O»ÓÀŽ¸Fdóx#?BŠ„=¦\Úβž×+Åiˆm÷Y|Ç9û¤òc+£v|BG£‰£fÖý‰Ô¶& -²©’–ò•¹DJ×ÅSÄDàlË›&_y‚Ãm»š1‹Ïü XÃw|ÀÀîEÿÈ#=vB¼ï½H”ÿ ô„ž gƒæªJÍÒ•Rb½¡°A€×Z]§Óª™ßUÁ¥š®T£ýÒê§H˜…æ½ /Í[åÖ½ÅGº×›™ù„í¨¤ÈÓ2:Ü‚ßËÌe¦„J‚6?F»ÏÏÜJ¾=g¹Ì3/ÝüSí™}ayÀMo¤&|‡½FY[)îç'|¡Êþÿÿ¨.'ZL*ï9A)XöJ –€¿i‚1˜w“iSJSCVúÑZ«ÅXª!d¥’(Ùê#LÝK‚y3`ó àŠsJPip'øþ¦?²úMº¥¡”ÓÞzz´:çhú#…+i‰+”žk‡D8rªëÍøB"ïÀ»K§Î^??˜ ¤GØ·‡ï¯åÇÚLãú(;g&㇈éÞyõ"¤+[Mäõoú釆“Ÿ,H­•QúoähÑŠ!2©®ÇÄ24A†FˆÚ–DcË£met08»Ë@…|é -åC.DUÝpLÓºÕ;Çëú?8ó5¯Š´]ÁàUÕWHîA‹¡Å,™º™u}¯H2{©{¸ëÇ_ÑI-Ó›ª6+ÑÞ'Y#<êãFƒÜUvî°¼6 í–øq²²NÍ‚3¬YC¤tó‰ød9“`!ï -T¤´Aái„&ãzßÑø­'Šò€NØ}Ôk{£Ô–ëÌ%ý'pÚ[íô·©0u+«Jv´×¬qú Qc8VÇQtrGDÑ+¾Ã£ZúŒÀðæ–xC¦¡#ävVæLj ¹éR$5@Ee®E2ç ÆzÛ¯æ^ƹҭA¬W¸Ì?v” ÁÞ£G±J.Ð 7ÁÆó_#l~ -MÏ^õ  =·'ÕîŒûŸëXcbáÊ¢öj@ Q”§¦0~úüüqÌ!lˆÂfw^DëáHo/Ú0´ßÞo5ñ¢S[ÉŠ8)0çu«°$£ë½r}T ÈÇQÛó¡$y˜é;­dŸ½œsÔÚ¥õ7öì½óÀíUóämKñÉuÈ°é:Û»R7?:µ–}ÄlÈô#¸›ðÞV¥®J{l•óÜëšoâ$ŽÝ—È”êbbÝŠ#ôP®ë‘cuJHHÇ€7kne—*­0­õ‡-Çû™ õ.„¾ô€èêÎWC|}p9AbåöVujïÑ53DGìä¸x˃Îs‹gÏzÕÜmêàéQF]­`’œ¼¹A±†«º¢lñp>UJ†¯‰{oy1[@\ˆઠi3Ó }Ðy®õPš5óIÏMV„J1O4¤ݧ81Î}$šý‰Ñ£¢8-ˆ¥û4t~oôËڑă¾¢*Ò¹”ÏArÅ|pŠ¹PgøK³Wò$¨BÜž·«œ¹Q<”€Jðš%ò@hS½ Û»Væ!SÞlÏ“;¥\Ý1pÝ(|E€!Q—÷ûhbáÇyo}V«xž8E§q &ûá¼®0p%¼QQWl„Oï[1L·lúþ/™(ßø\Èæûzñ !·6 ÀsÌ©ÞýÀ°;dkÄ#¡3ù:_3.UKêèˆê¼5 æs{ü“‚ökΩp×Τ†ý~A~öÕ…¿`˺ý™ZBv!§Àm®'ºV—†šR§ßé WiTWupôL - n›YàÅ\†nË)ùYöfÈŒ‹FI©ZM»§9tÄð€œOÙ^†ð}kñH¤lñ -ë(|°6þ~Cz—Ø·è QlÕÓ‡D;®»pÇ­ƒzÞ«-Šn¡ê‰cQ#ößÑ„ÇGùxÛÄRÖübõ¬UâÐÚðßG¥X£åªá“²k4eÈÜ"ƒô¸ú;ðQˆD1À<ïýRäÄÜÖîqÏP=xx) FE¬4ʲ9£}™-Æš¡Õ¶ëö0núÞî‚Bª!î¯Âc«K#=7Ú®ÛñúÔÍ€8ª1¸{Ú¿4‰Hö¼¡"›#9¡îÁÙD÷P,Æ ª'+äv/Cɹµ ¨’Ÿá5Q[U±'Cáæ•ç~ýÓ'eT/z!¤ šŸT_˦¢}³!ʘG’žÑ± 4æRM ‚]¼,¢†À±Óª$–ŸýMŸ1ƒ®tÂѪW²DÕ-aùû¡Ò©˜AY9'Þz¤Å*^txs6î º±à9ÓIÁ1(Sžº•ˆ+î^:˜J·èyOdâÙÒyzò`ßËÂè¤ã©nð„9Eü´¡ßJÉWåÆ6ž£m¬©Ö€wψûU=¯üP¯!»çsƒ %{º3#ZToÕŠ#o††$÷Q0Lþ7‡Ì=— ’®È±æ¸ö¨ÕEÑ/&o}G ¦g­ôppžÄ‚IŸ›<¯Gàó­%nÍEn›Eß›ÿ'­æ=Oˆxò[sŸÌ^Ë)5D¶A§4áÖgäö¥{÷û˜x%s3@>èÜ=U°9c¹(? ¡/x;áåȪ A#¨A Å¦ýL]¢x› -6#L\@ùy•¡21šþ¦ 5#Ž}¡©e: ×ÇYÛÃL-~·Ù¬ê§Ô«ÑUÁÊdPQ5#v’ódù+e -&¥U]ì%Às®idùªOl5n4ÄêªW¢BFïI(‡Õ4“Ÿ ÙÓÅàxžÎܶcHÑç|ŸëVt÷b=܇9¯é!‹÷”Ûò!îžoQZÍS]q… =<È ¤{g„–Òüï%þâÊÆ¿X JÑኼ×¥©·ÕPœ…ú+çjÀèC?@˜8óP2/\Ýœ}Z€úµ¶êH‚PÒ5Ü_É/>PQ\Slß_ep ¡ŸTJ‹GzðzƒPƹ‘^vÌjrÔÏÙ§Ïæ Á×Ë“ÙNÆ"ètE<ŸŒ‡)¡ìg¡Öú1{d—0,X^¾>˜èï ‰(ÆÉÜõ(ýH¶Z=‘ä’0Ï8cp[UWwʈô¨Ð6b5é¯ÿ;{öþè/Ÿ—…oEƒ«ÓqÅF¡ØFÉìg¹BÊžH"e9Ó Õ?xÞ'Ú6´Æ©cíNø\ušÐ*äÉÏÆ;ˆH*qˆåïk™H¥5³‘™P•Ë,/’j7чì¸uµK3åSL;»$¹Éë?s+¦ëDYªÕ°·H~Ÿ±Cš7$äœVJÀÜ‘¹&$ßæ€$*•s®Õi5˜9 ÝG/NO·bî@3Fùœx1Éuõ‡† ]zPâJYÒB:»hBnõݪउ÷ŽQøûjîÎåàˆ{ˆ¿ž9÷ÚüKdOÍK­]ˆù¡ca*ñ§\¡o¼÷ÿN^åYë]Æÿ”.t;bÇz%hõ@#NQ3‡'T@Ú9¸÷òýS@úžð{¸ÄœµØ¶@¥ -Rç š˜ŸKw]ñ†ƒ’)‰u6‡hÝQƒL¡ãøB -^àâä•[…ÅúÖÇÐa`©Òƒ+æw†PàŽýÔP‹GipðãΖì×ÌŸw&1Fn…GdCÙw¼Ÿª§]ÕS$då!Iª7î:O tG˜Wsˆ´ˆó]']w­¨Ð!û¶l¹(WùÄ7`+ר ¯¬ý"Aå§ÌG";ãÁwle8}DuÛÿ xP @ïÎ@®ž=ŒO]5ðóì¹ ÌIÐy/õü4â€6ó’“œ›å¥Ôð€>ÇT>þi=C0ó®8Z®r:G:Ê£ 'Šf»8ïŽô—A»u’˜‘RžJƒ’ó\ VZà²H*WÑ’Ø2B³ÿ‚¼b`,~$á¡´Àw57¢Jôzcjä2ó÷%šm3ª?ë2Ø«÷^ÙÅA‚$™Óºí+(>ýZÝãëCŠœs×í}ÝI9‘·í‡ü‰2!I ˆš³"ÞàuÂÀÈ -°Õ†n).ød憶©‹zGDv¾¶H¢6­FÐEç€y ”–*B\›TDåW•¨£÷‡”U¾ÍÕUp,xe8Õ^‘ éwÅOÔÿ¤Ünoºø{¢å=vŠ–óh¥e¢‚0¢bÅ ©ÕBtg³ùW~+g±²Wº7œ\f€yº¥×J¥¼zAzžRià±GU¬ñ¹ÉæG¡æ¬œÝNC> - ?•±¦2Lÿ$íX90Q3,ëÕ¸ØRËß!ù«V»®´}SòÀZ¶Ã¯GÒÙv*khUã‚‘íÌçQoæP:åAs¦¨ÍÏ#è¶ÕûeºƒaD¬Õ«°É³ÊjØ{Oý>Es% ¥’ùï«Áþ$¨óÌhE Àç»>+D!Q…[3i”À'Ý„Ö c<¥™xÚÔ—+ê'%y\Ò‹ßñEýý_l‚ßGŸs|…ˆ¦üqT‹"?‡HΑJ7á{¼ã\Ç]Z‡kÆÜx&Ô©T´Ü3 §æjr¹HHp×RH(—y"f·«k=•H(mwlGýÚ-”WÊE-]”­ö·Òœ¸’W+ÜgÙk¦‹¢Gf^nnUŠÇî ÙÞ¤úÆQ™«Ð%³íl{™n7É#´0>ä•[Ié0ªË}%-Mµñ @‘,ü®­»õn8¯c!íáä~’&TðáÌË]Ï(Áû\bÄîy%7±ÜT ¡îäœsC’¨¢e§<¾Àb¿ÿËœP¼ê1Q«¹g¼‚ÙU:>ýãͦPŽÊPå”p$Z?îðšäà¶0lÒ¹†ì‹‚î2NïaüaµwTÜU -½br£’\:ô¡Íhïi`:÷×ì¯Ê1—‚-ÓêŒcA -@Äÿ™ë)‚†ÕšË\ÂwîÞÌLÒ„‚äù•öeŸJúÌÿÈê]ׂ·ÌSÕF¹á\M9˜ˆâm1©nåxþ®,ëÖevd0w§ñ„š[–BÎgÆyðå´} ±žš3°ªTlIP¤ü™ûlqÔv(=«Jõ¨£l—EU:dá0íU‚byÁ ß·¸ØaÕ?ìû¦d¬F0²wJ›!Î ÃKÅ»hÊò±öUÜ4Hw -7¹v\%Ò“o,ÀTkõcæižšZáiB1æHèyÖ3å|hÈê)]g"È+l+ÈfǼôkw*h¹ŠèÑ&ð,¨1ÑT}®ÁÙCðVC†ê5ñàvU}ô‘¡êᶂ°Çõ¡‘«b¢FõÆû, pÜåywÎ9BvpºèQ-Ø9~'¨O¿Wx«=ã‘-´Æ U%Ü'¢`•î ÀÇ¡ôîF¡÷{&› ŽªVUbíèkÜ ¼7ÐBJ I7 KðeœÒ]¸d3},‰ûùH-ÈmáÜAÖ#]é“q‡M!…3V$5Âü˜Ÿ€Bá¦W”´•«i+µ¶±„³çÉsÆ\&äY…Æþ~½ÖRhÿ`G…ÃGë+ï—Ä;:š ÷Ê5Q®hr_ûx‹ÜqÙœ aÒf”£)SEåd³æœ¦âófßÜÉ~ÚS(wø°“ yæWÍõvïÙº7(ç½Ú¹(»bça?YSø‰ª>X¨æûbEt÷ˆS¡èü -*fžq~ïFP/x¸/U96#í;!œ(¤#•±þoêvGUá¾þåÝ÷/ÿGuA(†­0Ö÷rTTÊ]U=à€-íCSÁq}yöšQ¶äÃ_Îa Uµ²¦”VæͬD]x¦«ëa(z åŽÌÕ½È" Ðwò¬ª:þ=&FKe8ÿ=1G÷ÛQ¥Ÿ- °Ë>ª–Aú„s‹Üø‹ PØA‡¢­ìánmz› aÐö`¿°‹2)ã‹8Òˆ)vÑ_æüœ»b»Ö›‰·'w!-QTç[±Þ/µ}4ŠìÎn¥á|Ç‚w}&F™6É;ÄŠž2cä¢M‡CÀ" áy4²ÎG?j—Üi¤d¾Ç¢9#ÕèJ¶#½*ðÁ`çõÉ*­í„Ä‹.Âí@¸ê­˜¤vÛQ»/ÞÊ\%ÇýñʯӾ`»­¦•ÄR'†AîJ•0øy¨µ´^2|ðÖÏÑ+åýÈüå‹‹äÞ~aoY$ÌŽùí™^EÃ9„òÕ½°ùýñüxûêÇs"ì€9è‘$hº«·Üê"wö‘~ÄÓl'’àˆ ½E‡™Xi÷ä>få‘}™  û“_ÓÃç#—³œZÍ“n?ßÆ™q–qkjiçºQt­F´5^# <W¬G{?øy5Znû%|³%¿÷ ™EÅkŠ[´€dn`‚ú5"^§^ýþúàí­\ä‘ø›³òVв›¥Eò-õ—M7t¾<ìáÒú¿ -ïëZ±<;þ½Ú’†Øñ‡¿FQVÏ•³9È ba8ú^É´CÍÞìå¶Ã‰)Q–Þ#x£ÓÑ”w Ò…ýÑ­Å^é.èésÉvÃ"ìIÎp®ôƒ4 ÐàgÞÊÜ tp¾Þ‹}ü‰™Ÿ\¬oåüõcÎL,’»£¤FsJ=Ì:úz¨K="f1%ÐN6‰Ä¤Væ™AÌ¿(¨Q‚'_K÷tö5"ÏæׯïÿŠ4Ï÷­…¬¹©°mÀؽ* |¢ÉÚY,ZŒ«zÀÉ'⊲´òK -pRû ŸûÄÚß±*M-Vpsþ‡&8ØcÐ9ÔXÂÄ, ¡ïÉf4jȈ@–~çy½© - Ë´FJèŸÿ1p?gö„ß8ãç…ˆ–ëy¢Äd´h…†W²Ä”ægá\#Té®(j&ñx}´–\È/ž*$‘\´é÷5ð›!½ÂÜFT{£DÀÀ¹RÐ|,-ö㉠_”LÕIÇÛœbQ¢ÎDÅ„.(-cž9­ ±U¦ì¤*Q ìs1k°t´“î*<{-æåáª6÷àü‘3\=côaºvC½%lä±€Þ‰ZH7î=™rÂàÁv  íqmƒ¦tCÃÈz· {dö°Ò QMÝJCÚXÜ ±T½(®©™/³#Ú½Ø ±½Ü©ï<¾@m~_ù¢ÏWÞNËòsÄ]‹ … ¢~,8}¤@ã²0¿Õ+ø¥]±jˆú!8XQå;ðâDLh´[€2€‰ÞkUtMsz¬ §êFçœ~#^0™¡qáÏw¥³ûødà¤ÊP·¥õ%žwâ® -?8ßÄZáä9µPüºº -ȸQFþè -gŸÚÕ&T˜ë7Ö;TT/@Îð}:ö[Êã`hŽ¼^K\Ñ|®¤×^‰¶AEŽÐ禊ËÞ!fºik$iÌÌš‡ RlÁ]åqHæGˆž2Vzª¤Ê('ÝT‰¾ßEÿ¿:”£GÈâõçË ÖIf¬Zj¤°c¿Åa2gðN2Ÿ•aì@RßÚ¿ÿËÚì¿y–ÕT —ÅCµï ïXÖY æ.ÆUã³v«ùf{D† }÷çý"$ú”2Ì>PhÒè1ôzACD"ÀNIªÔ·ÝÏˤ9½»ú|my÷{ˆn?µ¿«ì ¢3&é½–>E‚³s­TS†8èEÛ¶ÕLÙíÙx‡wÿh.¼Ûa$BH$%|¶ B©T†Ê¯pæ$8Ž(DL¨"Ä°‹ªuº}QCø½|¥…+»¥#ÖFúVÀ² -$ñiƒRÁ‘;˜ß¹Ð°b°¥ðgP(ž)-E€mîõ¨äü†ò-èÃ)rƒnÌ!´ªÕPÂG9ø¹$Dn”•IúGíŠæ͹ /Ã?yÄ\ÑÏ×% †ÿ¦ÉkU–9cè¸ -Á¶´9ƒ‰¸ ø-JÅeöí'÷="«þ¦íI”ûÛc^¸Å2EQ¥G<~añjJL;é9 ÿ.[»×ŽIþ˜Ç ->sîÓ#7Ìíäh#zàq(h@ýò̆ºUOH§pc¹9ĉyG&~!‡!a$ÓO¹¢jÔÔŠpþZÕÄjÐ ÎPU2€ø(´}Á‹Á¡N`†æãB_’þ;SÖwÖ´³:jY‘…¤|k%·Ì5+´-yîé†õhAµIäu¿Jg[C¶’‚Ž®!ôç+Lð©ˆU§F4:5H’s'Öž—³GõŠ÷Hbóh·Žõ}J€B™}Tff^…Ī}ÄÛ±qT¼ƒÅhש¡éùz^†è‰]¢Z#ƃvKLiRQýšVƒCÅy~çTît*ÏC”ñaZ“¡%÷H[D£ÊeffÖ¢XPÏ”"ß°yî‘3jc‰×]Áf¬uç?˜’-q²A›µ jÔ U Ø lf±ùɺñI3›o8§@[ËûEU‚Ä& ÝÒÒš‚~XÄ œuø·½d·X+PŽù -ƒ"ÓŸ¡¹e齓èù ^lHoϺb5†3´/Ï:H@òù°c%žGSÊÛ»Á爞nK—­ýÒŠÈ_~­D|cãŒ/›ŒÔ/2µOijÙnå»ùˆúÞM²B»»º ζW_‰2õÿÛ½‡¶iôÈ|õsér}Œ(Þ¥-™öãa -YCË -¦'ïã«:uoõ'jçsªôˆÕ)ᢄ/`?™ða_ý5±j‚ò3³nµØ‚Ôe‹Î•Aʯöúk»WGÖ…!Ñü¦(/L ®æX9@°F»œ¶Å-†.Pø¾3ñlËa=Ô¦‹£#h÷Ò¦ª¿: K^ð.ø?„ã½EKnf:ÍàTô±Ã”îKÅMRËÂR-o:0§PKcéÅ%ò$7©Ôê@I*yX£:ç Öª {NeCÔ“”ˆJ“ÇÞ+™™S=oÏ­çÔ]‰­¯•õÅ› ØVæ 6eCpž÷¨ŸÀ¿¾zÿ#´àZž,¢ÒrÙ7?é*k1ªŽY"LÀ#¼„7õç–Ó‘é”&Ë®º1ÞCŠCû·— ž‚LÖ$®ºÄˆÚÏw#šò‚bØ«©ô9’§¾óaúí»Vµx Š€.ªn{.Ž©VÇÖÇnt:1•×cÛ¾°ç +—‘ìx -“ü(@ÉP¶˜ãÏ1z_ícÍ[<»¦uäN›‚Ç0­µBD3N ¬‡ºNèÔ×jQ†:tEpN«½*:í¦BèÓßÓýºâcçV·Ù(°ÍÑÔInö·y+/˜UÉ“Ÿ;uÝÞ[#›g¤yQTø#*E‚WÄ%j/‹:ø<Â@AêçP]-/Ƨ—AbFCwx¼u†üâšõÙ_|Û÷„Ì”TJ¤ {,p|saÁÇy…Bê¬áB SÄ©'e “h¢OŽJq),9Tçwx¦9Ežx[šÞ3¾­ë}öQô0(ž›ÿØÑÞr#ZgÒë‰w!2’d¹G†(Tkìç¶ s0ÖÖ¸I1ý3¡ÂNý& mKy&A4×å%¡`±ÄR*¸Ãõ»äšfý'¡Â?“àÜj Zi -_'¸„ÆáœOÿY¡Ëöýa5× j‘ºZÆàµjWÏÅç°¹ÃzƒdT[¨È¸!Ñàá¢OóÄ]¾ "}ò€Â#{’Ø CD à† 8VÀ;#Òè“žA x‡;™‘؈Ý8«â ðŽR5ÁèyDœš}+äýfvYë4™»üe˜^e®Øq·¿£W9 Ó -Ö(öÓs´yîÎÙŠÍ$æX…©hHP2Ôøˆp5FL`ÈîôS€Õ³—Ë`@HŸˆÝµä(æ«‘WØãλQ¥ëHþ}nóTæFˆjýÊ4cdU2©.5R7ósOMA¢X©eæs¦ùȃsNÌÍœ'³tvû+ÛpXFuÒæž„(7W>•…”I®ßª;Bóy]ÉôŽµq‹ÞÉþ¥Ó¹öúà;­-í *-â˜o,ôBìr#‡(;N)ôÙzPXïéÞ¤‹Cã*Š -«åó›¤Uÿb½ôû˜ljJá0ãò¬cUÿF ª©«óg>=1Lì=…}º8 Qâº"%•r– Œ€n“Œé<“¾Ï*(ìñãÎV󧂗'ˆ˜#¶Â¯èAÝŠ¯Ðõ¨ µù¤ô ¯¬~é™ê°u}8ºpã.ŽlVµ ™V *±Z3jF~ˆì£×uò-r@‚gä:£‘ͬÚÅ®^bO`šÞi¿õÌŽš -Lnõç²yÊä5}ÄgE‰’dÞ¥ò2ëîèæ˜èïÚÄ&å{sOvÍàÐ*Ö¬"„^{WI˜>,ÕÅ•Ä?±n½€]_-ú¢Q2 DDÉÐîhá Ž˜ÓUr&ÊÞWV'ÙÿaîO’-G¶«Asÿîž -´ÙvN€ÝðùwS¿µõŸ»i7²‘Á'ŠøuuàÐb«¨üñÌBTÝË%€®‹s;ž"BÕ©¹O,l"3Q‘ªÃ±…j“ãÒÞ¥Ž -€|슔2Áï'ò:)(ˆPNP³öaè „HçC‘[|ýî½=üϽdDÃ,Á™ª¸Å¦yÇZ|°3P¸Çè¢r[z·äé?ÊußùK2S½|^¼qê%¥‚ˆKÙƒíÌ |K©æ[?¾«\`FïÙ³€ŒVfœ+M竇‰6#DVËÇ,!ÜÏU¡Fu -º·f{ …œ¢»x÷±þ{¦*?šh.ÇJÌXË­øM•ï³Ñê¼3›ŸVà³Ø>«"&rænq'Ê œÚIæàÍ­XÍLÍÜ’Ï`šƒ&ž¼FÕ#‰Ú™ßÍ=É$#L¸¤&{[‘¬F¦¹]Ÿ—<|ÐØvΪS2%¡AxìòaS#’@˜ßøRŸN.‡+Ú—ÈÔ1þÙÛý]·W–ôï¼½oýÅ j»:Å÷íû¿nò§ÚœnDsWÐŒKò³Â/ºA3¡%ù6!Ðø6"ÁÍ ‹Ö§W¨XQ’¢kApâ4rÛx\» a:¢’Š–ÅœF­Ñ•L+$œõÙ^€Ö³†¨)Q»÷½×þ#û^Ne KЄíüc¿">ª–ŒïG\ôÊ«\šÏqŠ{’<Ú#¶T`ÉF¢âæø§þ2Ÿ=¤Iƒnb9,ÎëG¸W騅à‹J!Ë?°T‹ È“®®rÇÛ PqÝçè¤#iIõÇn1àUX â¼»`ˆ£,ÍðÔ Í[]ò›æ`'ß!Âi·ÎԾΙòˆsÊŠ&Ö4jø5®ÐË¢7ÜA"XtP“=ìß‘Çg‹ƒ¼ñÕ€õÅLC²7„7OEŠkÊ?-¶<ïþ½çC· ªùu«ÆœÉ\ÏL$¾2J¤°DÖuÒ$š 6³¿ä$®yáu†7<Ü·CÎØfMšõœ=®&kZæV%ëËcìµ ìÛèñëí¬½WŸ\*o¡w/—^L,«×pÝ‹<«”çN\D®¢¸¬[=%9‘ +òiä?¤Ð†IÝ°¼é>+$\Q´×¡WG&V ÍÒ¤–;`Vþñâ>%¢3º$Mzƒ¤ÕAß0fC2u59»Ü×Måã vë‰ß‹Ò”ÕŇîñk”–•TXoEÌÀ…¼ º9@èÚ­/ÜYcgªië]ªrûC¬Ý©Åª©óCâ!ØÈ­P}¨œ­³ÄÆ%rþ¦.u;Ÿ?EóïÞ,Zš/HÂgf‰Ø+³y¦ tÄôÃ$Í1ïÏ9|@’6Ý;CTæ£ |gˆ`JŸMBÑø?³-áËé×Ñù5ˆYèÈ&þ¬hHÄ!©¤dÀ:¬-E›hP…&ÕpÌrÌ¢ä - §oŸ··¾‘ ëzEŽ¹®Ã=»0}*òhÂíb¾ÕîJúýø´±^ˆ•síGŠ0’`•Çtü­MrmOA7¨ÅH'îmO_ÇT«e¸ ìÀÝÙõƒVÁiG«!šc’ö'Ϋkð¹±~Ø›ÎPkƒÈäj¤*S;S4Y“d“d2còœRz¡ºzr¬y®{¤Ø:[R² -°‰ÝæÁïœàΆüÕã`ÉÕ¢¬gjKO.°ˆ-Z¤wL×ë{ƒ–‰Ä)2DK’(ÌKµC£ï( L²Zà˜‘»ù0Îx2!óÜ0—›x²žØ™S%zNd´çYÕä´KJar·s5ÝÎðpॴùgþîûͶɪ|›Þg^»:ªtµ²Â84¿ f×¢ã{ç¢W"&ª}RgE@á2ÞŽ8u{6wÐxOYüEW€fß«º|1aŸžq\•¬ò÷axŽ;½51á(¢5©‹6jãûíÕ<¨±SÐsí[­…‹ìd>•‡¢M‘Q¥:ª”j7d‚æ+13鸎°{Éwίol[wÉFÖcÄEË!}³ãýí_Pï{3{>ÿ{ÿé¶36¦kW Ùwõ*¶†n{¾²N[õ+Gd«Ÿ9¥¦l!G±Ä¨ÝUjUl¤µQá)ÍïÌã1º~B -c¢en¿l–žòˆž¤W”Ò‚¿*æNá䄽éUÞ ”øäÈNká0g•ï*2ØÆ6Ðb}(ZO¿«x&ƒ~ÂJ¶Wñ,œ3¦‘¢—YƒnœkW=“šÔhÛWÏX€ðî{;ëŠ7ËCôv§µç‹Ì^2#¹CµÈ1³ro­OŸ¶XöV24ÁÜ cïby_Ķ¯§¢³õ‹‘.äiŵ¸ŸÞ(‘>q‚ öiý¶†ãf#¨!Þîª+ɧæEz|ÒsV*þ$·÷‡zßpÃÛqÏ JîåÝü$ó™?Pi»ÈILò 0p ±UÕ[ ykÈÊæèSÝ©oø©Ó`]Kš2ÒWÌв «çIˆŒ¦DS¦×Ö@ë½¥^>‰ª0áðPØ «ì*RWVR&BýB‚½s€Á(‹QTÅP–{èi± -ÃmBRý¡ô“\\Å¿<Ïí6¯°vÜ}噜_bC­êS*Õ’…­/œé×é+Aw¶Ïq jƒþ¿¾±zÿWhЈ÷ß‘Eû–…kBù'ŽØšÍÉ– þGÑk»„àWrRÉ­¸—^æ::o ø»†cQPÞ) lÜíù3…9ãm>ja±,Ših¡;¯)u¿+iÙÈ&Á­r†riúT¢¹«Ð%¸ö²{)cë­œŒy5¦ò êJ˜5A>²1ç“hŒ=JÆOÈ1‡Ã殼¢Þ~£‚©ÑÀÖ©‹`.Uæöòµ‘e{R@HÅ´ñm)x,«ˆï(\ŽÂ¶ÚŽÐòö³Óli£~ У¾ ½û*œ¨°7ñ’ðŠ ÃcjþU²ÔX3½…òW @Ú³ûs¿ KB÷ÀË8€ß1q…š•®­BŸ4¹c3"öpjy8Iã•ó2ˆX×éiìÑHÛ,À“Z.O/í7ÁqÆYzTJÁU㟆zùT²IâÝ[·_²!NL-õXºbIZ•½(±_O0æãúD‰½}8w©ƒÝ[(þ!W¯Å1Î];&ë-€‘øŠ9@Ü•¦ˆ]Q %‰ˆóQÆèÈ‘6ùšµUÈ^?˜  cUWÖŽ(hYKü½vÿÂT²^ª¶œ>É2?»Mœºr}¾Äcð‚wÀ$gS½É3“•Oe—ü8“š¯¦ƒf‘ç»^n7-FÖqÑIÁÿà -K›\ä˜'vBCÚN(™'aUxà`t„×A—pBëP­’Ð×]à£Ë"=S¥ǽ}À-¥+’üŠFlã°óÔ`kNF}VUÎ8xU^É™¥ëé^qÞhmb¨‚óL¶ú1¨' -A³OëáÞÛH’1ôÉ/Лf¯ÔUÓ›ëŸÏtÁë•êˆ‰ÕJ±¨q¿bm»·îqíòÀÈÊ¢ºITˆ:"~ªœä:Ħ›k%ÈŒ+$•ÕEŸê»¡Ëÿ¿ »¤mÖ T›QþÑ[éÄÐp³ÁÛÛ Î3±oŒ'pÊ›t’«HQÈA”«h4ë£Öz̪ç3”‰Ùl$-Äü~ËG–¹£DR³gŠçîÉ (V¥ßð 9xA‡äS¿%eV&Ñ6(AöKrqœ»ýwðH~”žïZès’4œDE©$Ýq¥]gd"€³âyDôçXæwduÌÐYlçN»äB¤†Ö3æLoõtÚÈ®ƒðK““ÀJo‰Ÿ£MØ.ìÑ=DÓèýf.©ZödZò8Ïuâ¤ç+šUå‹vR¹på238I -Ø­5•g6µ¬‚Þ™šugmùïüÜéŽhJŒRvÓøŒJŽ3ÂѦƦG?y0ÌV/§JcúÞ½&ͦÞ^cèqB1”:—¤*{:g˜ ^ùOعWÄù·†eþò7¦ÂoÆ|¿âù„†£÷¦8H¡`d}çq“ÂÿÉÚ¶2dDÀÌxÒ?)ß\Ê£G5ßwúÅ.€ø{Wì#QRy3É"Èá„X›èõ©)¨LrfרZD‹Îoc¥z¥ -ts¿Ši`N¤`®õ= |+Å¥è÷X剚‡k2èŒjQø–IçÞ™@ú6¯Ì—”šžÅMŒ^'ñ “Ì?¶_q1Ÿ‚V›ñ¸ËT‚š6°‚òáÜ™é³Qé Œ [H$¸ŠÆ`¼4¶žjžÊܦŸ2$•’Ìa]åí¨ÚÈ™"ü–ÖM—™Ôx§Ê±l們¶œ\–Qï»Ð_õ‚o]™ª)…$‘VíñØÛ@¸ïÑœ#Laáæ€Ä?¦'`–¤­2D–FLȧ}l9X¨%LÙ¥NªöS66o¨xàª{ftàÔ®yìóšº[ô9ÞÙ¿_¯³» á²^àxmYBȱ¢ŠÝûT… ؈ ®Í -;à(hþõ‹‡ÏoþòûDöaÌ©þ™¼ (]qõQŸd3«ÄfMm0Ôé')n$-•ÁªÒ&1ÃÐÙ}Ó“¼¥«fJgZ3½UU6ê÷›‹?5ÀØèùÏ”Ñf˜ÀZ"“gšÃ¦wìû¹\»)HÕ['‡e®Ò¯@=£îÖ¥Ô/Œ’Ÿ³›•Àƒ¼‘ëÜ,½wA“Ä!¡©C @þ|ïsã‘šÝ7ÿܨE¤v½…‚±ÍØ2ÇPWz¼®žŸM2ÈO­ûj¶[7­x+Œ»ÅDO­û;ò92ØšÔÛh´œc£°xÈr^|FèGlN0°×˺k€ð‹¯å=2€Ïší¶ -¢v.ÜäÀ;œ&aœE¤á6ÞÃ"Ô¯ ‰•ªw¼§y*÷û%h"Oí¦ñá(·˜mÿbåÎ0y×tÄ^1ƒ×©ö\±ù)¬‘ŸGϳmŽ´TnL€Â£å*’XóÉG -Y+T~¯’¨-Ø„Agtêd×Õ(éOa'V\CŽT ÐÆ3jÈbQÜ|ï;Ž÷ë¥ÌŸÝ¯;üû@)Â-Qƒ¿€ÀÂB†Ž‘Å8fß)@®—šº«há*MÕ †æ´¢¼½ÚŸ —‡ÖÜŽ±c jhˆ&tSu&ꇪÅÒÌbœQ{€Ù§#µv «Y­8„&„®pöŸ»&¶JVóYƒZhêäš”;±¶ÔHôkOŒG}Š‡´ËêÔùÃv´·Í -ŸÓé¬Æ­ŒõƒpKi.Ó[ÝÔ)5ùø< úŠ!+µ_˜?ÿegÖÈmþªÎc¬Ë%ùì)ú’ªÁ n8¢³:ûoG¶©X`>½èÅC[„–d8úI É U‡þC³5»ou©³’:$Œif=ê¨ÐþÖ冽‰^”óHŒ3 -Œª3$ÄùÐÞŸìQór¢`ôEöžÇÖ&Û€º¢ªì ¼²@Â];•{É‘Ðo¯}ùc4­*‘ »ó -c¾˜gè960$ßw}¹C–ù|ÕU`ÖHH?³~ _¶¦õó<Ͼ•ûXøîÝZ„lãÌ”[­¬è&N¯²+ÌÌŒÑÇ÷’ë°úp­€’\¾LyJê™3Ë)‚é¿@ÈÐ#áþ•Ž,ÿ§–Àø·NßѾ÷—,“?m†íÍð}â÷©8cÖ@ç;žhØdgöòÅwïð$÷xÝ׫Ÿ¶#Ì«ãØ92Ô:…±Vàkï56W-eô´™Ð¯ìWGGÂà|ª=±Ú¸Ä:Ê—·³¿Ÿ6ú$?ϲ*r¦35^§Ä‚ÞG<ŸÒŠ™²ÏÆ£÷­!s lókS× §Á/¢_X.ë))Wj¾û„œ,TÖ7¦*ù8Œœàd&/ l -U¼xjÖ#ÂJhºÝõŽ¡(¢T6ÞÇ,$•ô†À¿.XÍ%ªbÝÄ°”ªÖ›ã„„íŒÿtßÄ{{gȤ7šC)*ó‡ÜYC®ì:JÕ=KA149s~Ö·ÂéŠ|Ý| -K1QÃù«äTzœm¨ý©´ï0.6ÿ L NÿRÊüðÅç¼Ê3¦µ´f˜¶²bºbSÍ0P„oŠ:ÿã/Ÿ2‘î9Õ@B¢]Ã(ù=ëDaß…5žþÎè$½M<³©úÎÀ†ÿQ<Àç%x$ÔL٭Ь?™õ¹ άilú¤EWµˆ<ê€ÀDW§õ …&KëÕ lZïTß{*‘ëÝÀŸÂd¾ÕØ¡ÿH^aÃoÐ>­jÆ.#×\'çq㌈cpÏ‡¬t¼›,F¯ö|ï2AY Ç0h¬Ô#éÀ)× hä1H, -&%…g¶~kDGÓËCï¥|²BXÖöÑJèýù/ßFýrE—$ D¢Qb!g÷à}–b¹G±6gA¿ç@tÝ¿†íF ~°D>Y”دOàSƒÖü¿€p¸&˜X1ÞxðÆæ'ÆO0Å%ËïâcíO dNêTž¸©v>q°Í_TÓ([×F6Ïò€ÐãRðøE·ýÏù7FñK#ày>É2ŠUq1 óŸêyG¿áL·O:¤ÚªžÌ“¡ôŸ0ZÃA'ï¡R/ ˜;¬»5+×ö(½”Wàs°%Ö¯oÌŠOm•æ`¨:Z ÐÝlŽ‹ñ¦ßÎè•W¶ºö›ÙÛãI½†ø½¦úÏfûŸÿ’ɲӣþy–šè?ÉNeÚ J`ï¼A—ü¬ìY¬wT5S7)TE÷9×v>á)þÖ¤¬Šºþ{þVúÇVi‹!c " sçTËuà„T» -¤ø­×Ý£Ú9øz¬—ä ÿ/…1îl<~ròj¡å÷PÍ6[3šbW˜÷TÏãï~îC:DeåøIûŸÞßÞ¤ºN1Ô ¶Ò•»ÂÚâ”"ÎT¿¦ßxms©£ÖHgZ]Ê5ãù›,Í·þòý:æ´…ù;ÙM™›4>3iî ~ ÔýŠ²ÔŸØ s”®Z'´±Ôëy>5^¨t,–gn®”~•äóŠ–~ú‡#èŸ'nÒi¦þA®˜!ýÃFìµ]Yc©ùEºåÑ1üYóƒY -›p×2͸G\•¤)n¿awÁx=©ùÙsŸÔIPŸ¦Oû~íN(„ýþÙiÒ‘Qâžc7µR¬¢ñTM}Ü@¢‹ ûäÜŸ>¹ëÈy©ÆŽÝ¸ùVyQ‰´Û¿ä¡å™ªNÖì€ã,LfªÈL|l¿ŸiúOá•?ÿåS7ƒœXé€Ò - 5ª÷ö×æP\zʱBÃ+±ÁnC Ÿ3 -Fo ¿îˆ‰í[ÿ$5„âbýWR³Þu¼ëîÔÈàéçWC6rf -j]·Â.Ëk¡I«¼ˆ¶C'QŒ)Wiÿƒñ -ù|Ò~>vöÝ™¹âõ7ËtÔ Fÿ"Ê/ ïiì³¥]¡Å»G zgñ#å~Øb•Šãýj騑šÿ]à‹ ­];d“šÈ>Fº­™Þ²²x×màfrjÉR6A0Eµ}Û'¢-C…ð8~Þª‡Ô´þ«"dâ¢RºYi‰®cÒ°2–'Ž²V´¡ÅR÷×7fÄÿŠ#OÜ{6dÒuÆÜž ù q–c‡ç…œrl˜ Žú‘Â-bÊ…Ióù÷ª7L0ÌMJ&ö')Ò/7-·^¸=ÒïÌbqw´¦~K÷|>ÔRŒVp2cð°Xãp–¡†ä¤‚½™-ÑÞúó3Eïv½µ:ž¹m¿U |îtÎ3÷0?žpÒ—ÆcxO2¾ FŒW’™Ob8ßïIP‡}ù Âo?ö1 ÕbÝUÜæ‹zGHðkG*î`ö4â‘'kiڒ麶(}šÑ#Rpe`Gc p43‘Þäú`féNí…IïAZÌ8aoÈ4kÎÄF+ÆöŠóžx¤Ù‹@ʸazË땘Æ&•òØíIßýI_Ù0Ô^”[`ì3ýÞÔLaó隈°·b±ÚðŽëcl¸µÖGVÊý1«U–Pþç'Ô_“ †µInãOBop=¥±Â¦æA¡ò1-mõ#¬Èd}œ:¢) Öû”‹Üš -ܱÄ:")ÊëLev†*ÐÐÓ`:b¬õüS~ïù‡Cf˜?v*þª†("ª:U _»<(äÎxíê‘7-kí…茇t&ųú/GŒHsF9(.8ŽqçGb·§Ô*ÄOPãÜê©(ªGQZHJ§Ÿ3WõÖ ²“ãeлX?— -ܹy‡Cnhþûé½D—}D¦Ö˜;]KÆý­°Ç¬…§.ÎWóH›ʼnfò·ˆ”oð!81-^iKÅ•\D~WOãI1ƒÚù¹ ‡ònÇñ~¯ô¿Bò=”#úúC/çaãùh@Êm3G±J›ï|v'ZÓ¨ó`²t3û||x @×â}´*ÝJ…§U!KÏÛ~Iö “œ6UĨfH)k¡ß®³)Ra¶B…|}(RæTÈNÿ‘'ÇÉ\áøñ”ö™“VbQ -™¹Î Ä{Ũ[lf ˆÃ»RŸ|¾ÕûWtøÏÅ©¿’ø=9Yµ7wˆÏQÖ7 -39MÏøØÒRN‹EÄE†¡¤Ší”·éÝ믮]ˆÁ£|(d™» MuàëÐàr´¯Ýñ8ÄnD+S9ßù]9«[eCý5»ÓDf]Zq»ÑE|X»ôñ2øÀ±In‘!X.kK¼žš:gdI×Öø³MÑÅoyúÝ;$‘‚*Þõ“!ûÃǵ¯#ýAðïqBøNñöñg,Þ_5P'ÿ Ò¦¯‡-„*ò;§¸g-–Â:<¯§aH&`£!HOC\yDŒÂj>WŒRϯщFßCæí£‚†dœE§këí3}¢â6Üý‹³¸¼zÜìд^…ÿâžø‡?d-ÿ©g¶u_åþ¢f5r/—Г]5ù" mu>› §!¤ŒZšëßíÒïú­@à,ªØšÔÀ!#§öÇöE‰RÏš“GÞæÅŸI"±û£øÓ]íùTÏý¦‡G/é0±€;×Å"eb‰¼BÍ:t4x¼´ïõóa}µÖ“:x.#[y`„RãÛ ¤T='úÁ·}i¦Špý¢%ñÏåôûöˆ=áç™àªh¿Þ½+„謯=FÕâMþúêÁšÁÒ*„Æß Ñ!\¡ÎMÀ ö›!Q߶Àæ†×ÜpÆ÷Å·6†ƒïÈ6µâÕõZK¥Pèz !"G~­d y˾Ôbé{(¼á.åÓþ†ü÷´2êõkÖ'%š´B8Ñ—°AšÃ¨WC— Á’ö/fF/Yáõé4£K9‹çN¨áÁ²3†׌öᇮËÌ5˜WÌytŒ²*ghqÚ?ñ”Zÿ?JPÐeQÏùϸA >¢XÑQ‚|©ð¦}¢€xc{¦h“×,'pz!ô¿cº¸~tMË«áÁFÖåëÿÏx¼1>íW ‚8©Ý®ÍòÿÎÏAD;Å1¼–¾.|ùÞ%]O}«î‰„xî‹aÓ!ÆEZ\¿0¢¬Ô`™ÏÐ&ÀQf@ÀîƒÊ]|ˆ_ÚY¿ûËÿ¯VðOÜ߯0¿"ÚO\ÉlÍš±ý¥8Œ¡¢Ä7N‰‰¹HQ^·¿<Ò:›¸ÑÅB j×!«–—Cú[\2~G9S_µÆt‚ü˜“®dlRðø{Í‹#j³Ì -ð¹õõÏEÎH+¨e:ãv›˜tÏýc¨,¤‘×”'¾““ùÝß#Žœ4 ×£àyFŠxU=µz'ŸcŸ.î›s„ùvN[Çš>ëU5 z¯Š2±x#Ñx”Ýgßâ/¯QâïŠÍà§T/ï^‹´koEH/Þ™Ùž.V©¾]pVlX}5&Îᶲvy„ïÌL3/&§²ˆÆ¿Ñ¬á M^úêáÚRÉ3¼5èJ×?ùÑw3€LÖ'•Rs шeiÄßôz#¼Òå¦Ìn‚ŠFuöµó(­ŽBü ¬Ó7»˜B‰ü7°Á¿™|ÿ7@Âoew4BrN‡übPw:Œ˜©Ž©ûG‚¢€)¢ÚIÅÌ2œél3Õ+º¢ž;ãïVõ®õ Ej›çÐ,m7sŠ9­o‹)-emálû½€ñ÷æ÷:í©ÉìÍïL¦ö?óJø”jïäOù=oÕEQ„·í~ÞÑ®p$jó[OTU»ZÖÞA, eCm¹X.¾ï”(zL8v5H 5áÄ“»3áíÖØÛó3BwGÄüÖcÃÉ)•D*7¢þw†…š—G%~E0h}Y³÷Q‰§ªKéó6¨õ|müЕZC;}{;oU/©ÓUFUëO*7ÜÍ…[ š -×SÛËçß\ç“ýMëÜD¾ÿÏsUñûÏëö¯Ì+öñËùì#GLŸä>¨MB’\§YèuÀwWA(ŸˆøÄ›¾bÝÆäiÄúåÝ× m ÞUÒây¡.à…G·V‘ê¸î3fx}¶ˆñÈ«¹úÙBÊ,ЭØe¬çÅïèßkÆw'%ý7.´V4¬åC–®ZhОê -akÍ2»¥«0Aâç.WÛ:A`z:ºtnõèÒDªyT@‰â]=j™Ðéç -´GB%Ñè&Àÿw«ÎQôkÏ€Ïo²³Q¯%¯Ò±ìåf„þ1(a=’¾oƒ§?ßÜË+ÄÍ¥Ö“¼@Zcgú™)(úé{iݦÝ(‰«oF`:³ûo2×·þ²½€Dõ3’ÚÕ÷S›ŒLe„‹sM­Â}X@4<–,OèYR¤ÒóÜVP§³8#‘ª ªXÞt”·!„X&Je< ’{€ôU?!Ê7 >{Ç&xC`°ÖñC6Â-Æz|¦OY&Á\6«)£±ßú“Ê»-tÅ'çŸN¡j-ÉVÍoÊh;Ú®ßüeoÿãžùW»Nq´=ÆÄ£Í<þÿŸ½Ë‡¨sós!Æ@oçç ˆÁõ†Å>ÞÈêÚÿ? ÊñoBvg'^±­Ñh¿ÉæÚ›ßY=c«ç†‹Œ¾£œ[âàùÄ™dî0çTD‡¯¬ JÑ5°Dn8Ÿ-!~õŽ“P†náçKB ³ª ¡±¸#Lû3'}ÂöÚ¸ø]ùÈšk9kݼñK¦R¬U2sÐ}†Yj,ªªäüßiÎÏ·þ²>rÃ9Azþ ÄÓÄ atÅVë3·#©—Ì÷|ÞÝŒ¤óÎÓM]_h؆Îm¨ç+БAµo=ZåIã+nyÚ ôâûöëN÷ÒV[sè|vsðà.v›gïu⧀ôÌ?gDŠ?ú=듬’hìai¯ºÈ»61-ò7”NÏD\`^ôŽj F—ÀþüÎï­:žÂÿ:„ô@á‚p Ô¾ñúþWÐÂþ(ÙóWÉW ‰¸{ü]èÏjÜ’ïŒjëÃtSœå$9”Ô²C¦-"ÔîÖïeˆ´0RâdÙñÑÚ¹¢úM§ç:<¦ˆ§\‰AK|Yrtñz.ððƒ‹ªšFHžiÄfÚHö­ˆN^³$ÝJ†H«„ŒQšHÏѤu¢ÖÔ«$*s C?Â2ƒg/¢æðä@±aÊEÅ ¼Ök²œn¼ pô#Ç£@ßîJ«)¿8ªÈ 2oh+•Î/æ8•í¿ÿTCüó_2?ú‚—˜ŸgDµ  c}îOè¯-Aóå¨ Í·¥óp1` Ÿõ@=LGÞºY¦–Ò™¶ëZ¢ó9Ëy¶„–@PÑˤ¤«¡Ò^‰0Z}­ªH €‚2¬ žDF¡äþ5do\kº–7¬Fà>[y[¥2’-wÝjéB/Ðõþ[{[}9ä!Ý;NÌ ùZÍp!œ€¶àQ±™N놫`ÊNó.!·©ƒ¼â§šAâ~M•øý®ë´¨Á)ÁÅc¡†ÐýéJòNžõü±±ŠÔaËD;BdB› ¶.rkôØQ&*ZßådnÜPt"ÌuÐTñéçBÞª‡y v -åEß5dí$_ÃÞÉF÷«'^æå‡u<8ÞØÛGÕ=I‡Ø~÷eˆmÍ8)=Ù²!¿<{»JªrýøQÂ…NU¼I »I(ÅÑ;xüµQ5òð0¼ÐÊnQ¼ß¡\„;шœÖüËqTÍw}!°JѬ¡´¯uvŸ[H\-â‡fL^š/ Ô thw2ÝfüÙcò8|Î}l -«äUAÄ­d¯ë„?ƒ[í›rR¤5&Á壌Eçyn'ɶ „ÖG¼=R¼4’G  ›Ç¬%/jïTP[™ª¬A x#‹²¨hÆr›ÅYŠ…_Kô…™Ù AÔFÉr½L@:)R‚äeñÂÀ{\q§ŒŠÎ‹ìºV-$룅Ãa—&¢”Û«ò°ÞZ1CÖ& B³;2Ÿ) šœsÒI#~!³hæÀ&°ç27×xáÜWy+­ƒžñOŽtÏohd~Þ9Ø Y{R)Õ…Ò™sø±ˆxãV•Ï]„iÆF…]÷åé®9,‹ÎgX^%ÙæàÓü¸¬4œ´mÒ4aïåQMsþ’Ù©R>Êñ#èÛ5„”Ù«¯‚eæ©)‰6O Coï=üù5ÏtýˆL ¥v¦ýB{Š\”þ‘¾SÏÄ€Û'íX·"~¨)Å›CÆl¿S [ëEt¿»â†¬@Ê-ùP#ÝÆ&œÒùý1‡xWTd©·ãŒÖg®›!Hè§gME‹÷Å9†|ÁÀÞD`ñ–;²›½éãã-f05ùP‹ÞXJÈú U~é©Lü„?Ïó­ü%/íÈ3š†Š|h(ÜíÖ‘)H?Ý™TAÀêÆ„¡äâ~tËÿÎâCý‚À{ƒ -8USí;¯ÜyÞÕ+ïz;óbùî/t£‰ ßøI -€žú>œ{ñ>u!¿;{£°‰[±þ˜4z„"‹PõþJ dŒh\9[3¥â]Ö{ÉŽÔÕî OÐ?®ú½‡Þ•-ŒÕg5I0§cLJ½<ù¥1u2g~S†è>23[hå¶yÊôU$Å›—õÆ'âíÑÈY!Ù›¦HnÕ…ø¥g“ÉÛÕ`j@<±Ö>ôè»])XÓ»Ì'}À¸BZÅ<~ó¹~ jþû&›Bª¢ÈšB44FznkK`ÆÝ[¯5³û/á֩玭V@Î¥í\›Ø{†pVOKRwµBÏêM¿Oì*G€Ÿ/e…9zÀG½jÝé‡mc'ëVÏÃ] -Sã.<ÆHÖ"-Í®AÙñeBBvT&3M­®§]F“‘gS½Û-nlü›C¿#§cß“{p -òC -Gã%nKµ#ܸCêw=\¨¸£\ÌyľkþT—0ÅVPÀ¬½Þ¡Yó[èbx¤3¾\³]vÑÊ íAwèëj×l-dD?Ö;©ºs¥ -‰aåµ)VD|OÒËÍT9€²ŠTÈÇý—éÔ9¢ÿù—̧z!9nS²úå…üUƒôZgöãv~biÆïÓì w âÕý¬¯s¨åk—Dgû5ëÉ>ÓÜJh1ð½ûQQéi1¥î5[|SÒ o„äs*f;x5 ±«‹i˜tßÕúzY¶‘ý€Ó±~š†ÞøD!–1åÍ“·ø qXÿÕ¨sÓ†AŽ‚ÕaýbÞ¸Ò³~ ±à!Ý5u/Ò‹ð~)”aîRÇ£½ÚϽØÎø aCß½°C\q9܇â“Ob“;ìàQ”*BN_~$ë[fß ð뛸û3`­LÈåѼjÕýg0²&P‡œ¨/ŽÀ6"ïvUÁZôl¿Á‹”oübd¶&ð -,TŽwTl—GY—߈t¢fº¹™Ðg£ÆåÚô‰ãcÑÐ=ÙC^­é§×ÙAð–-×¼5Õ¾‚+¼îÔ@Ú¹=øAÍ™ÙÐë¡Ñô3ju©²>:GvÕf]Ï;¾ŽsO°Ÿ2 z©´’þ8S|²¼¯ú0Œ¶â‰{º"+ΛGX}o´†€m+[_¹ÜƒÜ|¤Iö¢ºSø衺¶½õˆIá)fWá0÷z¹«z•-N«àPdßHØóh\Ÿ®otâ–Ô®"5è¸Ëëë©þ–Ö}2p£R˜‚·ÖM‚E V« ¢‘)É·_°‰í—Võ?Ç|"¬k÷ñÄèÀg&©Lÿ 7+?³E‡½ÿׄê±Ú ­±0„”7øÎJ6Oýür­syOÝ–˜¹fnTÖíE2ƒ çäVè[UJ]¨^Yli‡A¤EË*ó2h¯áÀQ è'Å—÷=7jãàNÙ©È€7Ñ#<÷»Ž2æ!×Êq/¿FrªÐV"NQ‡( €¬ùŸ!±µÙëé⼩‘úÅ/±кöe³Š|Áõ]³w+M »ãÊdŽgãsGná] ƾ¦îE()½õ{.$Ç€¸êˆ°ÌãY#ÔªXCZ¤Îš wÜÅJY÷gµoýÕl×V¥y yúF„éJ9y’•É31Õš8?Õ”Mæ áá¬ã”’Ð`¯‡]õ—ìRR«ú"Kº’{2¹ôlsꨔˆFÎø3%€Ò¦ª·÷Çiþ‚¾Òvâ’§X)ùèµ'Ü¿ý f9Jºg¡K~œýù/k9®hõ=K­…}©q”OqK_|-Ç‹<` -ʱcý Ra‡ †cHÑÚ¢pRöÒ¸jL .¯¨YÛ]ÇèQ_¨àÒ~ÑQ ^ê¢]ªçËž=†Wú€ƒjÞ+ärC&— *µs„ˆ¾n·Þ¬[ ”¬[âØw°CƒºÜÒ%Ù³!ž˜•ìŠF•ZR¹¾è¢‡xÁ­oûëdxî3tЋŸûŠ'×¼íÕù¾ÀÁÄ]¾‡Ç¹^d_z9µö Z/Šs%,CU÷o† Uÿ烸-ð†ûë²£µöÂo[Ò)²= !ëŠ<$Úœ‹•cd>[E +ô¼ž8@=üU›vóó²GŽô$¿.WuéëG Avu<ÄÖpÝ -ÏÏ_O환ëöºŒ& —ã¬ÞÝI•‡RžRΚjWLbIK?¬8¶K(d5Õ¨Ü)í= /·Òz9ë‘ÞtX@Á3¤«iû¯÷çϨëÏ*ʃw¢H)<Ôw1^Ù·>-OõÂ8ìÎý)_åñ¡«<¿µ •ëp%*ÏIàÑ7šçÚùä—~÷zÐJ!º¥Ž5~Aœýù/Ÿ à^„‚‚ù#š^#Ê0ãk tE\¥E¬ÝŽ@»bsF.»Ga¬Mí{gñ‚œPÏõÙYPV ¼{c¡ÿ¨—,Õ±sk’ï~K¤ÃYÀ·çÿ{|Ý2¿³±ü¯+üïwuÜ­{ÝÖ»¿)¹ÚDÇY†k‚—8HÒfT;cø]qÇ¥@:‰§¯ãw]¹Î*ÒhëýsÆ ¨]øâW½ü[Ñ7«épƒúÌGœl¨ƒÎ7gw;Õ}eê}œéeA¨è|ã©òöÿ ÷ÏýõÏ‘% ÀÓH!þ)Än{´ æYáls¬[Ç:#‚*DÊ cg}¾’ÀV¸Zçd¢Ã£ rÀaÍ5Äv -rnÿ›xAQД#1Тšú¤xŸ -F™rÆ–Rùi-C® ä áFA¬ñ MÁw ‰$¿§··²íDÌ;³YAéäV1ÖØõ1áâÙàµì„·üïCZÛØ™K³¢`-þväeÓ ½²e^c.;Ò/?³éÂãÊchþFŽh°‚…6;¿”†Þk>¨ ß÷V '¼ÂÕEVW‘=œ¨®ã§W°ÚUå ‰“‰½Øg[ÑeX›ˆÅ&Ý-@ÑHNãþôTƒlÔ­Eº>b™ì+HøÜ26)˜æ–îøÖB”£­×ÞصÛͳêKC^ãÎîi­ó2¸ÁéÃFÞhû‘ÐäE迃ø£ËÓ‡,gIçé¤bã:Ófƒ¾$—²ò»)€B¬/ i8É G^ï6Ëà®Û³¢ÀùÖú'hõlÕ5Þç|´“H6¸Çþ÷~ÌÍZÚÇÀÕëÇ ø$]B¶¿é» t½Ÿ»Nb®å4É(g§rC„ÔêÒè¶~ñŒìòz-”rCïRÙ5‹¨A®ão…Ý>÷eŽ/³¤Ô0 ¹7ùµD)î¯Í†ƒã»inƒÈ²â¯Óx—šãs +½3²ñÎ>‘éÛ¬b ‡„hó­©DÝ#ˆ?À6»ÌpD­AƒTÀĹ8óÅMgá}·7£I‹.â¸ýšÉVmXK7¦%¡EPz¦}ààE¨‡iâ³ÆuÉÏhzC®’K=bìghTæ ö®ª_¬â[ S"ŒÝ©˜©¢èЃ¡t«§Îé@}£¨”›w™·jÅz(Q„PÚZTö±+;× -ø¡ Ùhb‰³–æÜs‘3ØÜèBQ*\(ÊþVPQ9 ôº°E2cDÅýÝZÄ:ˆ˜ãŠðAF€œW«µÄœ=8¶“kÌ«‰‡ÜZÊ€“Rq‹GÓºŒ“Œ5)’,öL‘=9ðwr§× ~Ž ÷öä`ý‡ûVkJÐZYÍÆ1jk¹1.önæ®èró2RÏNrOÛ¶½_cLzkGð…¾””{@Dzz Â’ãoÌet}ñòšÓ¿DA3ÄF:¨>­+©¦Áyöò:dÓÕ1)éš_´ñbm¹”ÌBXUßó‹SOöA*ÜÙ+ö£táÝé[ Gvw볌է3Ãók·öo‹›Ï± ¦)Ó%–ø×& “;uù?ÒS ®9ÇG¢±ÄZÙà׈Ž~߀ú×¼ºRq]1IÛ—ÉÀÙ®­üñiu²+AÉöžc›fÛOBAíÚ0åuÚÉ1ÚÖéÐŽyÕK.¦²Ùgc€+Q*Ë<˜‡Â¢8Äšðí»ÅẂŽé+`»Ÿ Ê9ò¨q_™ ‹ßå:¦˜œÃãøñ -u“»À­Œ;߸X0ëz~@b€«?…“?4‹½aÚUüÏqÒnDâŸh"¤ÆSpR»†}ú$žä˜qÛÆÛ -rªÀÚ?#3ÿÎcHÄü翈ç]P«‰å{2I0Á:ÚÚ¤W<'¹¤ÔíõŠÄ0E8=ªG:åNÔžEœ · ÁÞûúf‡vÁ÷⹦ô¤ù Tô‰ç0[ùM8²Î²7BTÀ2dEêë¢$n…|zpWtæÂcQ½±q±«xŽ| @Ûy×ÔÔëTä$’á#š@<„€“ ED{67íƒÐ‘îãu½Û7¾€ùʼn‚†°Ðù¦Àpc?;ÝÀq%²-t6бqo-ü¢º•øêQWùcD·0š lÀw8§nK Rƒ¬Íà[^:°w‚TêÅЪo‘½ÂÅë —òÉ ²ô^Ñt‘rªãÜÑUYû¨OÕÂkk?íå¡ðŒ jq·*fð(ˆI¸äÖ,Ëü ºÔí¬UKð²§Ez¥Š¯–a¾4‚ª2pÀÔStƒ×Îw€6Z«omÁOtáìÁZŸ;ú¯+È|hþ#2*u8XQ¢<ÄT¸À[~ÛÞ¤OÀBÄ´`>Œ¸A.ä¼ÏG‘Öæ&€ò)ªJAw¼ìHH“>¦TÚ”S­Hƒ©›Pö«Œ…^MhéûLÕU®L"% u‘9–Í qùªj¾Šº²¤£‚ˆìyŇ=õqaÄå×µ«|á˜Ñ&vaè×,8zY÷–Ñ›ÄqëÜ1âB`Ñ£,ù®TÝqáoŽ©„rS`Í!t$ßèÕò9Ú¸°>….8ž­iÝ‹6ãB¬ð}rýR«:ÆvÖ•öOÂãe8hd-Eœcëî—%–žfp) K Óí.¡i0Mj0bÔ]'ä{)݉açE ¬hñÔëC­c¥ó»`(É+Ëò¡V¬ÿ`˜ð,ßÇ›i쌶ú—¯¹Ó+ì÷–DæKT.”jzQÒ«õkرÔ6±6çÊÊÂŒ¨^úb7^oî„_Üi¼ï Wƒür«ý(†Š­äÈ5’^¼c¸³˜l¡-KE+詨‘/)ªß9i>,ä$ã;ßÎ'¼u¸üá8ü_`ÙöÍ" >*å)…åú¯ âü•\šn¦z§H:§†"µ„ôL âòBƒ§ŽõÛ®VQ£m#hŠ¢[yQ=ÊÞëhÀÕÅ3 bRušæû±¯ŸIg¯„<Ìo‡L‰EQ¿½±qˈ=€¯gíU"fK³^QHN¦J_%o@'.åjèã‡1Õ€ö*zbf×—¯¦âo艋aµ"ÑÿÓÖ` E,²ÊáêAȈŸ[Ê®‘~¥ìõÿ(ü½ÅYÒ,!%ª4‹ýתJ#é¯ *TùÍdgT•8@' …d™ÜÞöO“úŠƒÓ,Yìµ­œè[±2hÅe°boùi⣷ø àήíI“ÚÉN((aM†0:ÑƯîò±,eñ2>†0j&ó¨ždü¨1H:¥§w_Ǻ~J\·"|¾RýHƒ¯mÃx]l£Oºn²Tßý2,ê'²¥é#b<^)çò¾ßΚۢㇷ" ì ·kÁ#Òjz2½žŠ‹ -‘ÇþáúÆr‘dÒ¬3?-ü†BúŠ‚ö ¬ Êcd[¼cJB/³çQèwBk¶·}}cZll#À8Èlöø„~Ã$ï’Îóïþ¢¾ü‹sÇŸÿòÿdºJ. l¯ÖH‹Ce‚€ÏÌ!Ûê²]ŸÊ»=J¯órEënÕ׳ß³âNo…@9"0s·ŸôkT4ÂCoÓž­É(ŒŸ>(/ çiFL'½rÆ›å}p ˜üì¯Jþ µ€£å -ah -8p³P"MŠ«ÔpÞ›—¾6 ié·¦OÃ1‰¢å³A“Oi毹–[…BIÉlh÷ôÒ°­²Š»XcÍR{Û“zÏ"¥8D=6lc7!¯×í¨«¥Î抢\Êr{«%vŸŠÀ¨1êìˆ<ëeE5Å®¾b -#A+(ÉÚ`PÃÞˆ½:ŒdVH€ç©fHç/å£[Ô*œ`å׳Â]˜Ïβ8¹àΣyëêˆÓÓvÕCGk±°Ã †Ï±P†[œŸJ Ec"Ê”6äd£gŒVÞ]6Ò]#µp÷ S¶œ¡°ëä1ÎëUqãÍvc—Ë„ÁHŒH‹P)È6~>ý„¸ï©Ó©,i¾òXíÙÑŽmfHýU­æ,ù1±\P ÷§Êÿ¢çŠ:G5zŸÐ¬•Y®¼E/׉ËíŒñÀ]oøÔÿð²WŠ o©Ú€Á}žÏ—JX³w&lÉ™/ Ý]§Q@[ƒæVXÖÉk;uD)V¾Þ§ÙrÒH,yõ=ÿ¦“EèS›g6Ê÷ú´ÛEŽçÆÿ·ÍôdíŸ<Ö—r·8úC „À¥¯?cÝ°Vɱ/CzgæDHzéK ¨}”E”ø…53ŽÞîÈÑŸ7ý“VCÇ­pί>ûõPNt7 “Ïúw|¢+A²ÊhžQeÍWàéǸ¤¼T²+UÌ®ÚÎVí$Vâ8(ˆ+‹2åT‚üÚþÀ¯¤GZ©Xçד—5±uzIEiþϤOX€×‚•p…7Zc8J“õA_Ý4P»R¸É¼ÕTèË¢‹¾ÒÙÕÈàU„‘¾K ZÊv“þ()Q+ôÄ«;Ô`ËE|ÙÛtT쳪YE¬ƒØ*«·ïŸ¶ÃxúõTdóûƒ¶_„ ˆáAÏðýÛ£æì°ÕÊfþ…*ðç¿È&ö*¡Xqµgüc•ü•Að']¹a´ÝÒµB‰R4t  !ù0£Ç1kÍNÐô¨‚Ú·Ö>,í@J75¨QÑèZú@º¼Š3ÍÍ’÷ ¾|—°—i=ÏÏ–6ÜpÝé/¨ÇI¥îÄåða—ÖuH”KæñÙ« ö××ÎN×¢j¤#*"³JèLZZý³Ë¼1¹0röj»èCŠéõþÈ$k¾…ÔsŒ®’ ¯o|ªMœî í﫳àÉ`ž9 7Cœîᚪ"“Ê_o%&ÁNëÊþ74é_¼L~GœºûèM"…©5Ù~´H믵wcÐÓÑtY¯vE?â 7“MI×ZíÊibñ ‡&Á[¯j=ïº t9ßSBLãM‡!t´tpþãÿ ±T»®s\4Ù¦{5|´ Éc(óÔ€-i¤,±ØÕŽ4Í6™Ù‡sr´¼çÖvêŽPD"'¿fü/ÿ&¥»&ÐP…X‰‚µ>£3RÆcþuƒ¹ŽõØk2i¥Ü­˜AQ;Jl…éTSM5æ.Ù@=%½¬Aö¢ÃŸ·ÕÑ~SšZëo8Ù©¡ƒ«×z”ƒ J¦°ç+´Hyƒ©ÜûVq)];a´¯ñ^ɤ‡7hËY tí`¢Å7Š»Qê—E_ Ã¿'á8R·÷“ÈÊß!Ç -wZ* hMûý­mØѬ'Ü7Þý)ß•kåѹ/Æéú -VÍ\‡ïþâPyi%G½L«̃s¿âGߊ¢ZÛmX*iÙýš¡ë£áÿp<ÊU|¨uœˆÀs¥ª(Í<©ùÛÚ„·-v¤ƒÚÍnëásZÄÏoO]ª© æíéD‡ã¦.qõ­3|Ù·xôxQöÀuO„mïå®+<é^¨öÆmgí5óÙfkG#ÍsãRJ¡lÄ-”¢yWd›Q7ä0ÙteÕ›`øâIV8:­!ƒ!±W©û³ÉJûpÍ"ãÚRÄJ{§ËjEQä³sg<éÿP˜›‘b ÅDúy‚¼ûFtÜ"ä[XŽu a>“½„Î 0¡îÈx"hyã0ÎòÐòS¨Ùôx€:œ-@•ƒyÔ&ˆrÈ:BzéB”×}¦Xî´—‡gè‹Ò~|& mHú£bdj%Þ=…˜6=D]P=zQÆèi±iû*£µrUú¹Š,Ô g°vVL_î7ˆd´(Þq×']‹YìkAª¬rÌÊ¥OQMBôþóÿ0)¼œØ‡`,ONU ­õĪ, äsÏš[Ñv…¾æksxi³Fœ|O¬+©6,Q™¤Š‡Nºä”SîúPÓ“]…¡Ë†6êĹLÏLƈs©Æªbl‹¢A ƒRxŽ1FYQhf4âqË;Ž;Çsóœ)½{aU¹ö³ÁGa•Î[‹$ -$«Ô­ÕÇTLJ‰«õzªˆ¸«Ö*ôC?J‹7¾2k%Ì0F¯_|ÓSUSJµ“/] üëÆÊ gjtrÂ0<+ŽÎ礧U<óè錄ÛSS‚Öpʑʾ£y=t…ÄóŽÛ—× €ó‚¼N8Ú0kG -Ýûcó”ÞÉ@¯¼j ³yr/1ŠœTÕ`+Îz{ÇLMºepv,¶ˆæhÓh×aIEdKJý¤Ñ¿?ËWthž¬ÕÌâ9yR,L™t±RØC81?™¢X€œk°ñúl -t÷Z¼*ò¡PÖR}ÈhʼnyÀSp¡G 8˜ÿx‡z:!9®Ö#æüõi‡rŠÌPEv('”‰%.tÎÖÀÁÈ’y<1Iòœ0?Ò}ø§:Ãñ‹&ê?†¬@ný[^fÕÊCfî°7ƒNÿ‹"ÕPGSŲŠªØ -8¸(È8WœàI91A–õÐßhÖ¼†88ÙÇ뙉mGGBwð¼BZ¡Â£lzä0Àxˆo­Ší:’óö®(¢o–ÀÐ`Š·`ì3$ÍoH›'km€šë’–ž¹%F¡¼B_™•õyV^ïtXižÓ,û&\ZÒ,‰;­ÀœPÞ¹öä³ü sÙ•(Ò³b[Z1ôsg¹ÌƒŒ?^¢nÕâEdGãèRCøð8Êã庾òâq/±²èŠø}%0ò™jwMÔ“~ãÝ£Ù“›C†G²ì]A£ /hëUÊ3D6“NC S2Eu`œ‘Bô3`)_¹•HP‹8™Hî„B$Î&äVŽƒª{笄ǂԚS5s‚}eM@ŒLä˳Fˆ£ïèY¤Ô¶"’õ5AVIWõ}§µ|ÃÅ°«|y=û …‚ºÍ/¾¢Ê=™Çåý¾j*à|-Y;•V$Qc&)!Ĩ­ÏêhÑNŠ5ÔV_–k§ôg -“£Úßj)BŒ$} ¿cmÞ­¢Ä¯H5 #MO¦!,ª÷¯›ûÁê†W~oK¾ýÛ£åN·gÔg`%ƒ¢ƒ*ÿM¹¼y¨™i?Ìë3Äš0Ôý̾õã^Ûö­ @Ò‚šGB£µ,¨A*/“žÚbÏ«¾e@âj…k>ˆ–FEÙ§>äYÞ6É -ú`tá ¡µjJ¬Ë ÀOþk)¨Î®£®žîò FÚ‰ÛH¾70yHž4;õÜc8pT{ÞZQ#æ<7™^wz„“ƒJ_O}Íaxo‹Õ=?ÑJc°× » ?Š˜˜AÐi˜òúcë:äpï;ráYrÄ`¨ux¨÷GÙÛÁ{®b}C܈䓆 ¡;Ä0»ÎS};‹ºhq% _+¨ÀÅIc¬Ls²ÖêM•^¥q…ñ„u¥uµ|K½"¼Ñ+•Æý†tÿf‹Ñœ¹õ>j4â·ê?¢Ìr~2i'm‰Ì8eÈ…é)R[û*ù3p•Qk*‚'& Úu -CµHþ1S^“¢±k* Ͻì^C"ÚìÄÌ| 1[_ü©!ážñˆ¬è]ÐøY#R™ÎuZÒc®~ò›ƒBŸNļ2Ä«¶§ãº^:ŽþZ¼ï^ FN¡ð[ï/Êe¨KZ­&` ª£×dz®T]çdÏY¯àÁ,QÂ@öj‰ÊN -x_"ºYRQæ±C­Y,œ« ÝP¿E€þ²Ã¯4-lXzFuvÐ#Y|ß÷`VÈí·ß-,ßa¥¹‘cBüÄöóç'W~„H¦´ ·-”¼ÖUW¼•žØ1‡¯0T –n÷]G%‹¶v îÖNž¼+êB#ÚK& ~úÛCQx ’ÀßvëfÇÈ\Gç†izÒôäâàNzD¨®¬˜õú.ý7mCظ¿)ö¥idyòbÛÝ£AiÊ ¶* öI "Q¯e5gª­°MS{o”,Á›ãÃä÷*šøž*Ù"™|ŒœA!2äå(I‚Ž‘-’k -IóHNÍ$L­¥;¼A×Iß^I·*Ÿ > °«Uÿ°›syî;ß| -›´0(”ázÃ%Á¦÷zä:§5ž##‹Aj‡Ì÷¦›0i|R+!$7R–°—QìÇ‘´ÿC5ü&y2'º4±Àq\5D\B{Ü¥ÕÆ» å*¤ é Ž.G¿W:ÓÉ—ãL"çéܯRm­Œ]ÞrŠŒÿš,ê×>|J­¿nåmöꨚäþùÆÊËø;G -~Сæpàĺsó²‚abÿ<>óTŒZXÌ#¬[¿fÍÕà¢u< ?’Œ£í’»3ŽÜ]‚_Ê™î'â4¸­Í+ÈÀAôº•@yM§7©¦ƒÁv“vÞ"Bø ‡-OE^˜pFë}åÑqψ4‚[~%™#Eem>Žñu@ÿˆÁ˜ØL¬ìlhiêL냨+b@ž )†¨éï™l˜ÿ'qÓ¤AžIIv›¬y$ɉ$¶Ÿ¨}¤o†Xv‘´¶eßûVçÑKÌöšy)V¸ƒì9H[ô—ð'Ô;BËàwžúM‡Û*4ÆŒäh“ã*icq; -øIŽ>…(Ó’ˆV(IŠÜjjxyè™EÕ#,®,p‡´õ}ög„ }kõ5ÚÀ\nÈ9±Æ#A:7àH’ª6iJø‰Áß´·þaãëôÖCéš³’„ÜÑ®éLÅ:ŽS†Üy’¨M¾E軣¡¨FG’c†Ï<…Ÿ¶Rë“!8’¢IÆ'9Äb±†Âõ³|§ÙÀÜrTåM² !NTÓl»Ôi#ìP{PºJ'#5Ýë‡Jv6Ü3¹Â1bЩÛ1¯ÒȳQ[Çnˆe¥<Æ]â­2àZ6X¨ú5ÂâÔH«¥,ãÏ» Sñ¿RÚÁÎúT -#0e“¤8íäàÞ¸§Ôu­=óS/6¸k´Éòk^ŒE.’3èŽóŸkcÖQ¨ær/ ±g±¨$L­÷ªúç‘FÕA Á£ðÚGJ>:ÎZ†Qüúel>âÉdŽ·ŠÇïÜ -wO4Z#ÌþÄ’Ä÷HñøѸB>&±ô/m²hCX~¤\…åz{Òu6"ìV„=|¼ò¸uÄ—Jòú_QNä»þ#}¨ªò“ú_Ç´\6ÐÚe^oð]q£Ñ트»ýBÂvW=<ë<öbJÞ4™hhá•Ñƒ$™v>›ø}ä[úºÁ)­iî%Ot}ÖÜ›ë÷MŠ]Ù±¦<‹úq¶+#ðÛ•†VBf9!qg:iiÖºóK…ˆµ˜ ð4ЯM"e3ÖM‰ßì5iÈŸAç²ÄrníÞ/ñùuF·º¬„Îd$Mªo†`ÒëDxë:ÏZ5Íúj±\¬þ—£µ&µdÊ~†$ -çE2Æ(_!§dÔIªÆO@ "î]Cìš8ªã_Oä_2ÓÏ’§V0¼ ùóÁ±¸ˆ›ëxÃ9à@ T°Óà -2_G‡Cøüû~п¡G ðøEìøÅóŸcù«ÈÚbàxßó7UÖ¿jÎÚVâÒ4jÐÈ»Nú„LcÍCÎ+§ÅŒ‘ ÀO§ê¹Ø4­§Ýõè”gýwÈyǾ™âÑ Ok„‡¯>*ì… ‹ÕÕmBFV¬}~ÂŒ}2 -1uö+™ò¤Q°¶‚õ_p™„Öà>ø²g@_È19µ’×±Ý- -9šŠy‚qñÜ_*ÀüÜeìðxøª´ªßÖ'œ#›úy¦·ÚÃûªA#ÔXv´cŶ´Î¨†ÅŽ3GéŒÔN£A¢÷+-¶á ÀÍú½D-G‹ÔO݉›#ä")ÕÔÝfÄ"/”ðzÔW4£ÄèÕ»K˜JÏžÃéznb«M4cËðUí=ÈüÈ£ê'úÄÜ\Çó‹u¡B^Ÿ!ýq … -lQA”Ìl뺮|4õu$ëNº¸Éw­µõ¥â -›„³¶'ŸÞŸzŽà+…µekÅÏ06<öˆMjqÓ¥ØâŽ÷b'X™ÃÔ ú–èfÊòF!“ш(R>W,0ƒñ¹˜ÊaàÀ„®GzI(K@s6¦ë qG¬9¥÷é‹­&"ØY—!ˆk„dV® Z–”²PUÕâ1´Þè•¥†ˆÞ׺„‡ ,mý<¢yf_ŠcoüÝ7hFQ?’ - ÖOæÄ…)áT߯&\>E‘91“R7A#·ù©JU -üý£~¢Þ×7\ƒÏÚæÉf[¼+ÆÅZïË™Ì~“AÏáר¨ýy?ßçŽ~'`ˆ7j:i"ÕêóÚæF0ž8©+Åí¸ïÐǦu¯§d!þÍU5§Ì¿KTÇîèï™U¡ábö{X\´¦gßâ'<ŽtíõuÈ•aJXæÚ -hpÉà„7FƘ).i, ~r]¾ÍB›0=„  Ðj´];º ¦6½'¬žõùPËß(s$9ÙäªÄèÜü+?=!)?^Ò9ô~{Ø¿nÕcrXoó\6 Õ+ ”pdÉÁ:ñ ‰Ê¹R~¤ÿs+j>Ð2¡iðo•¯)ÞªïîÁÉ/'EûUª¸’n[ZùŠ{_t|d˜Ód ˆ²E&~%€W}‡¬éì,7ÞƯ@<.¿&²'ÌŸ„Ày|Ñ~À¥¨G5Q­œ £oìÔ0á´ò@” òµìfžŠ¥x'VÑÿ6è´H®œpr¤që8³±¬AZäè kRÊxý3N3"Jž}™˜±8ïgíø$ d)prÛ'!Î7%E³ÎûJŸ¡‡s˜×~9íìA{<ð›0%mg QóvZ†dˆ -YLF†¨âÆåCzì4D=¯G:IÚ:NûLÂ5< -/‡£¯ó´%¹‚$Zi±|’\–û -A’ÆwR™DèõTÖP¨„7{Jƒž¦¹4D¨Ù•t“&u#®xÐ1…ß3!y +ÊÏ9kHT:´ÐfeŒJb­`áÐÜ…CÀ{ôÀçµÌŽ#>ÝÐy¹…W -¸(-Áõ'b€4 ë¡œô#måï‡JùJœû~9zl0}h鹩®ÑI3€qNáŸâeȃGŒé=¶¨F›'wô÷†%½aB¸‹ZÒU.,Ñ¡3c?Iev½x±ldÉ)G1éÆ4…ynÄq„‚[º_ªÖe3°D´Ü§5‚šëŒ×Í/Næ`øŽò¾ê±ªK@³ëÕ Œ%yzREˆ97åfmËhÎÔãÑÀœbÄ å[_…•hÏu†7¼ð!ý‹+ÅŠE`{ –¸Î® >Z iq#øǤ1·z¶8+ÂcÔ 3ÖKÊ4õÅå ¯P†m¾ŠÝjry™7šñW ÈS!˜¡¡vÐÛüÌ?`1-³ìêkÐIÈŠv†Ö yÜ…/ -ùWËuŽØ¢-\p(9QÐSÿ÷]#±2â?O¸ÿÌòõ¥×²‡ºko[G©3u…W®âÔ€{ž³0—Š~z„óGtÊ =ääÚB¨OÜÏÏ%u'‘!¾‘©:qAÊyÏÌ,Q‘¥JiÕcĦH‹ôë‡ï‡›¼^‚}€æJÉ5^Õ$<á¥Îr^×e¿âF;#¨ïpD-‚&,ÿôÞý 5ù3Þ -òZ2½ä÷)Â_HåëÙ'}×Â7‡W\Ý~¡®½±þbHÚ·jÚ15ÈsýBA„9ñÓ:ø¯@ ÁàçpŸ!`$I"¯|åøŸ%”y3a _׉SCO¨WO%ÊÆ& )™2ìŽÚëq£Ò523¶`¶—[E±$­íö  $!›\‡eOT¼; Wl}ÒòìAÞ¿÷䃳: Ä‘êYéí‡R>¨ TVÓ[X9ŠFɆ‘—“ͱ9°î´›F*¯·øÄ"Ïû ­Z?çIkáIð$`Hw³BvT!§[ôKÓ8"Ç”–lÊA•OýúJ_uò[£g#ÝÄÑka®eE(º:¢Fµè>ÍÂO=‘½±îA·÷ Ò~ü~¨&ñ|¯'¬€œ“Õ5‡D–¯RÓ«§ €Ý8÷ä|‰% ~`ñ-û'ðâsŽhì „nê_bxü½1t»Óñ¦E QÉH@%|¤ï¢¹âì'r“<äÎ#€x_©4¬ÿVÝO¯ U ò'r>xæŸKØHÊ6”~¦y“Î>¶^?cùZ©)ÝÃCàÚS¾8!DѲBêˆ æ¡ÆµtÖ¹bd’k1 é«a¬åŒˆB2;’#¶°î¤†Ð°:)G{} 3ý§±^¡' -rcd7®iœF>çÜ©ûJƒ–{j¹ó²xª½¥¹àûïbƒ®%ÊMóo$ù3$Šê|i³]Ú:lhB†±¤þ°!}ZV#Þy<ù¢ý‰‰é¬³e¾²÷­k²ñ<Ê.M¥˜®Å‹ßQ«¾I¿ús¦ùWæR¤¦âÂØ zqE=øÄLÿ&{P*íV«ªü©w^t,¼¼õðŽX3ò)ÃaE ¬. -gˆì)³¡y.I6åVÛßxÞiQ´úÉ)w*ŠV©Oø1¨!=¼WSÇpÝBXŸè®Øȣ쾋Œ¿¦2²Þ˜!Ù¤ôàRhNmaÆÓb%Np9&ñVÔ TÙµðÄò‚}#õbÀkzmF™%âŒ?­ëè÷ÅPƒ¨dODʉySKSwFN5C^~SH¼æ¾Õ¤`|N2Ǽ=Bgã¶þ¯)ÌáÜrÎœ³¤aìÈ@N=B•ÌÐDAã_ù}ïÆÛ~¼Ë`áSœ=]ú´ ½o}†Ñs'E5¢„oüÍfÔñèÀV?=CV¼Ÿ -G øëG$žÈYW²í9]âŠIÞ[¨ÝHZÞ:Žõž(TŒ²w'‚¬oÌr®kÑC®é|µ Ÿ$¹?›õ}èøçµ{0»É/¶O9‘_Îòov¡Éz¼Aè×»éŒ9íüÞÖˆ%ã9SsdbŽ_¼^’3Ò™kÑŠ´R(0ëVg\Ä36[¢e@-Hb_å1 -ŠŒÞÂw°2I¶^Øñ~Èn|…ö †¬©[¸Ä<6Ðɪ7Q]¯£ä:`è%k»úuÁQ*LºùNIª¶3ªÿ]]ƒ_pö3Ò›#¨ý,zÝ,T>ûY‘è±ÔGi€Ìÿßý›vû¯cÔ‰p¯G˜16Œ,ì©SåOÂÀõb×ô„š–‚ÊŽ ÑÝ Û»´üX0v­]Åš )O<#ïQÒ jÏpž›´jqD…¾Bf`ot<#¾1¹N #Ø¥Gö3Ñ5õ "ž` /Ñùž›¦ XvxG-oÝ 6sm—žçUC¨Ÿ ÃÖ\A›oiŠ÷êòÑÃ{£ü´2—%ïåÛ˜bàà®ôD6éœÕÃ×9Ê2x“¹ˆ­WÏl¥Zg|9!{3•0#o ãtp$“Ò[Á{ê¨úÉ *‚c­Ûœª¦P‡±†T§ëÞ!·QÎ焪KTÀN$s%Û°4׫‰!éîÁžàù¶Úf¯j²Ð÷¡/}€­ºÎÓe½—3¤ƒò‘ -W`/•Ëµ¥cÞ}4<Åñ6WÊ–SŽu­ë‹-IÑÚÔMŽP"í`•·Ü¡Eëæï Q|2‘û§‘¼ãž–V ‹®„úµíÀÃúÎûpŠ”äÒ³©ôe‰ŒÂ»cÃË E¼}ï;Øel3°õáUÒÌ#7ÜŸö9]ÑDÉ^¤™#]"ý7#ÜVw"IJf{c[ÿ5ââ³vÉþcS©É.R-f ú¡u¯¼EåoìcÚ “]Ü‚Fࡧ%‡®'Q‡Äû“JWž­n¥ÆB;‰K£M°±€E~ 8uô:„©7Îø¶¥ŒGÚÖļöñiF -ºÈ¨2—èèJ"µ†@{ö`ÞÄ—| -<ØÅ#aÙ:"B7ê©üÉôzÞ(Ûæ06VÒàâå„ñ´Ä÷Yµ‚Éë†D¦‹úöLÌ|¬ï¢ÌKÏCcå%­gÓñ¶:ï©&œŒNîºÌ—?g¤ÇŒxâ8¾Ï)¹qþ1À\‰¢ø<£s"×Û,À'ÛC~Aç¬úÓdc¦²™¢$aTCÎh˜p‡©ë(M¼=3ÔµäµeŸ)s±Ù9¹qÖ@Ü°3 Fê´GÚ¥9Wæ–éRK™žŠ+@lGª0lÒ¾ -*áè´ÄR ˆ®q$Oì¤É0íU’[PðŸQ"'rZ×Y™Ì™ºÄÜßšDhµJ Àò¹ª!¤¡®p™Tôr«s´”ª*/!&8¹å»óõöÎ÷ØñI•1×d úïÏÛÄç´W|DÄû¯Ó^¡S¨¢^â@¬±í>F‰GðœÕæ¿ÍÞoãí~1ý֬¸@ ô_“ð‹:´ bcmgÌ7Á#˜I©°§®<³0<+N~P_1¼ ¾ð6ÅJeÏjúÂÔåç£é´5DÕŠ¼¹„(w_!êV¬³q!?›„3ir·æQ‹.ç$Ê6’ÌI°¢¶µM¨MÇ< CÎFù$²îæuÀœyòô°Ÿô›À•‡^d²­€óÜU‹',¿W_c&EUdŽnpóâ#UتÑàõ"´‹*m¼ÒПsìç:Œ„Nw¼t<·B3¼õ>vÚvFH,¯ø‚õZKåÚ|‚N .…€ÒÇTºg»àŒM§Z -¢ºð¤·³8^7§ë/ís¶.p—I1Ò°àezmæƒÖQÜ$²Ùß5Hù²ü”²TÆY8 õãæ=É‘f.Gn£‡ ˆb­ÝsE6cV¨åp:«¾„ÔIâÜEæèx" 1èÃzéQSá?=jŽH9¢ýy¦ï ¨O“ÌjT£v½8Ê’„÷N€¡BLŸêº+ä@8Ôü~ûüsþ¢tþR„ý{/7+QG3$KHè¬Ðê^eE±WDˆ±ô}?ø¸kÆ¡¡ß…í¬h‰ÌB2®I(N*FI ‘xª”>G y~„m'ÈœGÛ`GŠ·]$Y° ÑCUÙ²¸õ‰²Ìp‡!gP©gÅZá!yG&Ž{’аtêfT–ö­,¼Y‚ 9•˜ÑÁc Ô6&7ªg`#þwã6*êÇçýLUdÜæ(b ê½I*ôš5'e ½ž\Ms=—ó¸+ê”[kãxSr -òF2"2ìO†Ð³F6A¦ØplM1ªi7Èš´’ÛC:Ú¿b%ÙDUžéM>Á3-ì'\³#çñÕ?På¸ÏÁS$‚fþY¬f&}Ä#…~½š§0±öQZdw!`Q cGÙ«O«ìFNĹa(ü™T³ ý<ýËHÔ`;¯ØS/OËŽÉÃË~"°ß"MûI¼˜[µþà€Ép?ÏóQÔG§7rVƒÿÎ1º6&%-‰fçÕø0L§|W·Ñ3k+Õ­ÖÚq:½áìtªŽ¢ÐõŽþ_ß‘þ°¹÷ç,i#|ÂV¨]"ÞzäÒÿÇXaÈA}´Ü™î &WF˜6 -?´¦{yÉw¾iÚj-bƒµÄÉA)RœñĦý9{îʽjÑÏUÚÿó.ñÉ–Nv±ëÚd7’ È^äI"¾Ç!µUNd9žRƒDÿ:¥ÐsüÍÙ_~ñG>ûß6¸ª&Pº¾å ”Âè tL{Í-„x_ÕÄl¿Š¼úþˆ×6·j¹êL3¿°nJ¤`íÙ9ðÕ㊚ ßÃn]Ÿ+*å(7³ºÖÑ©ô܃eÖ[ƒâ¥ç²+#úê%ýOì!>À;¹¶äÕ±`y©2ó³Ò ½ŠYÙ~ÐL -ýgo1Ñpc´-¢»~1x º¿ z]§ÿ®À¬6ÎïTÇÜ3CèpêÿE”G2vsMÉ8S–)$£’¤7½Ú(»Ï wªðY¬E}çêÉ€$§ò¯k .RW¹R}¼Î3ý˜¤úü!âb¢¬Vðã¨ô<ñ?á;”!Ž”ä© |Ñbõ]UÓ+#˜l€ß„Iý/©¥–ñUõñŸ%Š²F¢ó&Ó÷³¬“ÕxèÕH†>F ~°'Ê:×YâÚÞ8x†d7Ò@@ÆdçV[´9A÷·×u³(¥L¿DŽG¼<òr^ä-ÆÛt’®”Jb›t£òŠ·R]^êëÿàbóT¬»¤êÑ3 –<*=tgz$¿ ‘Eïæ¬!ƒ¶À&â_ßXT{ñ‹¨ÖƵ¢}ÆÊÑ:\‰Ù¥¾Ý##«ãüÈ ÕøÒ:„fÌ Ø†þ­.+ô¿ÕN"ÀþrŠÅ/l›¨ ŠÊ,T•SÂéx Ò­Å€¯Ç1Øá@¦ôrœ«¨¤ü°êú>…J‹—ËAr„ÙÆœaÇÎÁÌY|êÇM+_ 0£‰uÎàQ‚°`Ù«Ojß%¢ûƇ˜Ð冥!˪^„' - -°{²˜VaKµNüµqLÃñ݀ϯÜéâÎáõWE§ O„Ôz§@çt‰º0„Çì• <8sÐ÷²Äç…`ÿÝÞŸàyýF)d¢¸¦ -s¤yB ,8&€}Dë ÝÈ zGLöZ1Pœ«·}•:ë½% ŒâH=Óz6, Å&›çt^LJôΨë:zøÆ” Àe4bÈŠƒÃGàÅdÞ˜=§,iŠ+wŽqÕU„Ü05O:Ù²3B%¡AÐ(,J…:Z­4C"ùJñÿ÷­°M×LrŽ·ú DóLJ¼Ðòú+mÛý2#UÍ~z6–š8i7Ž ^?O5£6¬÷xæ:Ot&¢+M©àØ]×oC&¢Ô‰³w\Ý:†{°9$ˆ)+÷w©â4ë'À/½‡ÄhÍ@©—/2ØÇâ›à²0«Ò´)¨U½¿è!@$Ç9­qmÀEr¶ÿiáû®ù3U}¡š£ŠhD"Ôa—šÓüMûbL»1ÒÐòpVƒ^0ƒ³;Lȹ÷P^Üd<÷:ûÉ¡uÞa´E Š‚ÞFÔòUK~;ˆ ám]×#R®v{@²º²³š¨>Í€,û\ýÎÞC ¾a-W$±½NIjQð¸w€¶è@ªòóŒä0è~iªr\ —B+ê4—£ÿwÖQú$àw4ݵ¥»¯øÛKdG{ÚÂŒ„×ýþõ-È•b!¹ó'Õ¡W>¼¼*¼Š;3JKá<=”˜Ó=€÷Ô–#tÄ2âXàø¤ùõqÌ]¿˜ïžkw/,m¥(7þúWUÚ_‰=AÁ?ÎðM{™‘—fz2@ˆ­ &yG -JMòI¿ö à<¡«@L¢+pÛ²8¬Bf Ûõ V¶C:«RuCÉÂ×x¶×•¸ŸÜ*JjÐ]†(6¨cöB]Å_K¯}•Þ ‚í%þ>òP¬n•<ÓŠŸ©ì0‰nÒ%vÄ×’ª@a¡È B×…¹°dÊÔ’ê÷Sõ0+îØ9ù¹T‡w®ð–§zϹþ›ì¾‘ýâJ»­!š£–NÊ b£]ä7Z{cɬô5Èv?HQ@åRR9§Q-{Þ p“ÔmÃÉ ÕyBl}uõÑø–ŽòKÛ\ -težŸÕÖGøb$ ŽóJ±ý)Já‘„øÂn‘.ã PD¨‚{ÏF_a—ô'ñï)ý—„c5•GRšï0}JY®Ô2I€‚10CÉ®måÉþúÁL_akÈ!cÄa`{R€›o"ÓI}ë*—>Aéqü#9«£g0- ¬Ñãbė܉Yldø䶘©…Žô9È/˜•[€GÀ×r5í¥%ÃÊê‰èS(@IñéˆH½9'¿W–@ŽtUÔ&aWܨ'´dÁòƒ-*ÇÝ°W€µ)0cGFÃî¦÷ÀoÏ>7:=Ò')Âsn -´üäa‘õ¨Q­_:2ô^ný°ù¨rô¼‘: ú‘ò9¡sÏ•xáHab)CÂͨ†]*‰ :ÐGAŸúÌTœÌ°q0ˆR\’õ§"ZÒxu~‚îÏ yƒË­’%8¶äX¼â È6ÅòPwha&óò9ËFÙ©á]bô§85èïƒ"‰Gµ#7ªßL²Ø®Ã)=âB8Ï:™ô Ñï½@Ìf½É÷U¨J/†¨õ+å|I_›ò_]‰Ë¹Æidnýõùýh?­‡jŽ¾ok¾ÐPeÚ£ œ—Ccl]ôQX‡^P yújÜ"4J#žEøa a‹f] “¼Ý#z™P(ÔkÑT„Ekj!ÄÎlý(•¸+~²3ì§"Ùÿo`G8ÏÔù²îdê¾V$DŽ8”s'°ÜèÄÀÉôû% -÷'¸Ü3g»”hOÌXÍ"–ÆÔEáDÞ¸C­>úay³b#O0NAÏÀcG%Ì‘«šWtýÀ±»¢òÒâeÞ8_c0zÑâ%+£Øß>%ÀŒš‹Gh6IC’J|ÏÀÁõö¡qôЇ•õnÙk{ìE9°ÝŸ F›lÀÉQ¥L,% QoÁ^0,ý{LÒÌ«ñ‘­² Ü“äëI-˜§ÇüêP {ƒ"EH »×-[ÍZ4þÕÖ ” ²Å%‰ŒÌüa¾Âýˆ‡¶ÊÓ•¸C=!p±†‰n>{ÄvK‰]&lbuëCÅé¼c665,ÝL¹ãù»2<º+Ž2{ö3¶„ƒ`¾×whÎù]#Ô#IéëåB¡¹JÞa;—z0ý#é× ®Ž8ÿ(kào¹T­$4â?¯̾ÿú?Ð5ú•.F<)5Ë®L„vG6•ŠGÃqSÒæTµ ¶C¨™Òy~ÐËW§§½2Ã0h·Z1É9æn¥ º†3sJÅ3½[iÁÆ˃4¨!Ž8ÈyUörÄÜXù*?e¿ýŒª¼ª(ñmBéJ4äÈ“–¾k‘Ì4ƒõéAéN¿®t ÕpÆ«y«ZÝ"­àž7Nº³´£'³„z*ºö(ù -p!2òïÉPÏÀa…ížÙ^v?„†ÕŽÚ_ÑAP¹Žìš¬è®!d¨iö»\b.¶AéS]å=²‹/´Ì1¡H¸S$YW â>¹Š¨à -êE¾ýù@¬¾¸ïú ”¢5:uEÅ,ú?g Š§@iFwÿ•þà|®ùNêKD›6É~/CU¹«.e*!89u­éK[CÎu# Az']:J@Ƙù]b]œÀ”» Kꈜ‚Ú®N66EÍ¿+&”G¼ ~¦òõþÅ:èÁKv Çóî&ãË>eçû0PÚ‡¤çÞJÓE‘E‡õ|°DI·óÚ®Òkï$ -}c§í™ ‘&ÓŽ@±v4~Ñ&h‘š1½C Õ9©åüÿ»@Å=sÉ[•š®]Í Ó *H;(3Y›‹sÂs¼çÅ­…L^³ëß¹ÔÛ×cT™Ú si‹ë–~“ÒÍ%U™HJà‚iêÜZ ü™b^1 ™ÂÇP¿XÎ;Þg¥êã~z‰Z4ròNÙëU–RwpTT¨ÏÕ±y¢}Ü‘ØÃòYñHÛó^;(d¦†ì´ P·JXýÀ/ó?Ý“#ØÖ"/—=ê`vè:Wžü ²Ñ.¹ ŸrÅ9Žñ^¡ßsŸnT²ŠBBZД8( ü`z½uÙ˜6óˆøªÒq­Á¢õZ,Ž“ƽ¼îzŸ0p‚H¹4àþ†ºüú,ù€Œvt̆¸ÄgþŒ’r¢óÄVÍÿ•A;üK¾*;ÒJÜrÒ–â™pßkZªÒ­¼XObd¸õšìݸÐO«ÛFt°  w®&þMUL%Ò2–`-' *¿¢ßø¾h¦ÞL³yip5HϤbû÷V>ÈŽ¥³á¥Ñ®Ô@+4Ÿ¯‚MspB›Gfù,eR½¸a“ÝÂ%œÈ@DŠEAiâ&è¨h<‰7cÜ®c¸—è'‰ˆˆÅdcCŽ ŒI“Gb讯°çôqšÆ>ÞKþ-x!7Óù”»¡ŒUÒ¢ÀÐbŽ¥IMæ´/;3ùn¯ðË‚w™ -ô(Í=±=‰öøúN`å1øŠpF :²jtZ%‡lO®†l2¾HZÔVù¿Sl=Ôȱb7*>—É·šºFä}òö"ǃ*ap%þ-MŸ=ZÆ$“æ’rg·wôÕ­$öÒD­K±…"=äoÂ…+/›Ýîí©K©Ç‰fÊ] Y;_rD}¿¤~+2ηLFL7wÕ˜™w.mË?s'Ü…ü½$‡•¥ñÙ쨧T¯þÖíIìùY£ñã˜H;€÷#,ÅkâÎ Øþk ¡Š8v®A]9h¾æ{_+M©öxß_Ëûñ„3ævÿ7‹”k+wsý|Ë#©¬®µ%‚F'l‹º _m~&ðø‘Ý5»[Ôxˆ”°É%0|¢]Ù}ŠKû*U¢ô)í\®YRÑœòðºJÌ`/®‚Dø"€!Ž–ƒv?E ML]#ÙYA”zì ;Ö\¹ôM¡š QÆù*‰ç£%#4>6=PôÙ -#wèfÛŒT y„x öõúÑد$‚o¥+ÁA-Þâ«ß¡yÔú¯žhÄâðpwç@Šåʱ~tnЫ}xo±Ðœî]ƒˆN -4ÿÀÃz–J¢ÐH<¿dª‘ÚfD®¡sÕöKOòâv½1øj#µ‡}Ék -Ä ™{϶Ȅ§„ªÇ½ý¶9râ%]÷í4ÿÀ3ë£ËP|$èIX-q@clûªó´,F¦U{ÜδZ?‹Òm€KBõþÛÙ|þ_Í#…ésù é$Óq^j\3pühö$MAE¤aîci»¯ª¯ÿn”-W}IÉ°-ê‰eL¦s‹;ú+* z -Ô>©úÞÉèv‚hï¡gE¢³§=e½cI’hÈB|q£¨+^#€üœ²±½Ѫ”dïe왈èéXCkP4I5ð¯¶E)ä¨C5§L¯[õ–à ‰õ̵ue°åK¨ OewÅy‰éØh²gÂ]”2o!G‚sÝ÷\Ûª‡{å°ÖÏЪ$Éäd99?¥:/ö;A©©gQ8ÕiXÖ«šùÞHW*k̶Çóó’¹(–á¾VôÏåä±Z&3»Ї=„zeõèé“ Cʹÿòýò/Ñòå!‡I~/õ2Ò¹¬Oï«ŽÅ jœîs %”½„èwMD ºhY!vñbOh[-VFùUF…dôöº”@5ì˜Ø¤×Ž@³ç•Ýñ§JÖ팽"mvEr„êVÕšäùt¨V‹3·‡)ÓS7Ë4 ®à dW|%È\t”ÔItb†8d¯+¸ƒ·±3¶½¨Ïôzw3PkÆ疌Ǝ -b»JÄ#çaeÄ‘Ù݆Gä2ˆÓÁ„r.iÓuáñ6$R+‘wP›¡—}Œ -€Šmz/`¼= $+f'Öð[ “ë`äÎ0³Tà‰]¡jÀ ý,®ðšÒ;m‰„p¼¾”¾üý8fî3;»ÍEGîYö@gá.Ã÷ý5ÇœóÏu“¥þŠ;‘Uå~,„—!’wYÖbçm _ÈÍ´aιù8r¡ú¨(êÒrqÅ3bkŸRè²O¤…qd1zJ[+µžFÜ.•ß0• ˜©­!sZÎ!ÔËîD…ñµZ­ÉM1‹öÀS?—yË÷³È‚®«Z“ófb†¬Ä¦ ‡À×Ê%J²ëW4çÖà,¥¤ïH°0%(µQrÅüi´±vj˜¨1Ó•·6„Ïd7Ÿ'™¥ÊÁn¥•Ôé£KD‹É|Ê NŽáù]‹ãPÀ½M*°~‡; …² -TYѧï¯ïçÄ:!zÖœ‡à›ŒàH‹.3î Þuþ¶³ŽüÇ•èà¾êÝ?ª¸~ñ‡?™$£ÜBSð}}M¹=#2ŠÕ?ögÙ_ªÍ)Ÿzm†ÌÏ‚óòõ÷EÁ7…Î#'¡ûùáCøøB®íþ£±÷E™&ê¬* HŽ²k…P +Nº‚MÑ^0éo{L Bòʼn½4Eâ Ò퉚{Å…ÉW¡_¼ää¶tä¿’7"µ“˜š#lþKWÂ"Ž’7¢´±+ìÆu ÁZĽ¢ós¼¥’÷=š"ïì†$9e%èWI%+Æ€æ^…’NkʃD¤¾Sd×zŸRñ¥-ŸÏ.Oøf (g’fËî®IruÕOÙRû“Ú¯_«A}D< ±.•MÂF‚ÉZkðœNÕ;Þ=JÉœpØ<ímvÛÕ=fú#H}•fé„( ï1Ð¥Gбg %ãR†zÚ~YI\7ŸF+ˆ‘ó½W5ÖÝRó$N}¬åØMx&÷Gl}»3o—Ê•H"m4ÛCòÓÐTd"ÿù~òýfY*ÚËÇ7KâÁå#ÝÒ"«€ÌG =C=Þ0;àïñGÿ²ðóºþ…%Ù W z=ÉÑ2cW%SmÜÔLæÃFà°­®fá?nÂzñ÷¯ àÕæÞˆÙeRÞo9ÉLÐâ9‚Œ;H•ô°x"p£ ë½b¹þ²ž§ó ÷ÓWl‘¶g!:!ïVé'ªžÕëˆÖe§o‘ƒâú9•L¾*%õ_µú&ñÎ)NAõ>–u!Yì„YÈZÕ¾×?55û'rMÿÌFPÝQBOË6Û$±Æ˜ Â?«m9;–è(;,±ÿG p¤¼ ÞLÚ«óx6ç¼æL‘bG sjí…ÊMÑ™ÞÕ³øR7]cº¡ÅºbôBº,ÂY%Ð9’dÊ«®Ò·òy¥ÌñéNwÖ -lT…i–<åfÎuœnâþQ?3âØE•zKWŠRóìÏ2Ñ–šSZ£õ£ªRpäH­1ƒxÑ÷è x¯2êm+ 7°Ô;bê-î9Pî÷–˜ù\U -1"£°8Ž¤G0?Â+\µ—áx–ÏKØszÝdmn±ç(gæUúŽ!ÙÅ m~q9T`;<Ôµ°å1)0¯°„‹îgÕUŠFþŒÅΑ*ÌÒÊæ£ï( œ¡û뮚wQi¹ÉfJ(ÙEâ~îÃ9Dè8ía“GS%S‚ BB—UOëݘì‹ûöÍ4/ƨ:)ϬÊú3jQ·^P­E5QùsÒ­($‘Ïr¼ä/ÉýS²/“´ïÛt¿2æŽF¶Bû±‚±h‘†­žá‰-Wz÷ [b&S¬Ûê=0®ç÷ì«ž´zÈ/üRÊXxdŠ» ¦8XOŠ×]×ã‰ÍÙ€Àßdiâpû‘Å´Å«"KöŠ¹»à/à̽ÖÈÉl -/’ü®cDµ0’?jr® wH‚€,ÖöÊI@ž¤òðøuì ì%²W |7ìåýË´ûÈ~$r>I0æ§à“’ƒï×™ÏÎ<ñJƒ%P(ù . º"I›7#:ysÖEÁîUª{^7µêÒ9‚3ÙG鮂U‚7ò*5ª9Û%ö$t†ØSöT‚¯’e‚¥i¡kG£¹ÛÚÁ)b3ReŸ´Vµæ•Ž=ÈgÝqæIrvwòWYµöàÙ¾\ [z­(|-[§4÷õªÔ©ö=ƶ°VCt;¶9Vùh0ÃÑ7 ýÈ œî¡ö7¥µ'=žq°#ëw,ý#Cvãqž÷ësœ1yîgŠGæ› ¹aý -’6)©Zß|©áUŒB6d[8n^1W’ý+#hÅñ®nEQcˆpÌÎg]é 6Z8€J•š=vOÔ©™´è¶ÊÄc™ ²`ÏŒùvæ½¥.;!¡ ºiå(hP¥FðÓä*â¼Å´ÇÓLzÀñ¬Xôvi¾Ô¾ü ®ýŸÂɵLã¾\€Ï„ðžVT5©hlßïfrŸ› Xº¹â䲿?1ïz½ç8 yó¾/™ÏXÆyónê•Ð~A\θE‰¾DJ¹E²HÚ®reˆ˜Z}d?ÔÁ¹X¯º:br[ZDëRv ø5µkÕ4Ùhmv½çHÑjà²<9IDÐÌÞ{W=UÎ­àŸªžàÎwZ±ï+¡omѤ)e ¨šÜ[a²·˜ÑÎØì„Ãou3¢=ºGüàþ©”;àÑ’Ó5(êø·æp¹8Ç:½au‘Q²Ãܪӷè¨KA™ßÈö’yDw6’•Ó6䆡jÞÆÊ÷Ï-Úm%2”\tæ ^ãîÓ²=‰ –„AÊž-øms#=~Q¦Y¬Æ{òCþCÜ{ -ŽqÇû®º]ÐTNã‹çB‘x$|ãÖk¥Á'жxÒ„ èÉzÄÅ{5Õ⩶áaü¨®°jÛwIŸ´«¥ ™‹#E¦eÍIen”±”- ÎhFßȵá>}*ú¼a}à Ýñ[ðõ¯ò}¶n-Üâˆ^9å°þ§ -Óf¯ŠÎ߶Aù­“Ñ:þD9ã8|•¾fhIî°¥ÕgÒ€ÍñŒ¥ɼ2øpÊÉ Ìâ|Gg/±ÏpPEGa.§ÊÈÔ¢<9‡ÜÛ‰ á½n°g>Y€ét/šñ 5FTËâ¡éÌA×ÿg¿'²ýÊ ¼·¦à¹o‹LfRwV–D«c/§ÇÓóFÐêöKæÆ ï7=oÎW`ÛozÞÍã’¡QùN MSûœiLuPC@ -‹žâ2¿ P³ûzY{‡îoJæý²h3EÉÜozðç"6:9ʃäXO¾É#çC÷: -þo*ùZŠƒÒ0{€æí^ßÿ­˜Ôc"ÄKÅaUgc*=¨·f -ÝMž2Ià2„ ‚Co”VÕŸá¹ nM¤·šÕÉù8Щ·œ•8ïé%V°{vTEŽèNH@ôr•°5×TÞß™ù%òÂyN´²qÙ^ -.êûÌUö×[Á…<rϳ*B¢…41[‰ Ñ¤+³Uö•¯ÿJ÷2y#Dz°s -£âÍ}­äb‶Åp<ã¯F…Bè)}›ÐCä w*T*)ÚÆT-ΣÐñsÀ—O¨õ;„. 9l%WÏaêš–0±8ùd®ùßèkýþžoõɹ…p!8õ ÒÙמ«4üNP@µÅl&f¥ÁIƼR³úNöóQÁeþ%½ƒ¿"{åÜ8G¡ð+& =åÛ#-$ë__rê·”tá^_ ›@‘"‰Miæ«!;›–Ö -ý'áÃ$ݨ²—Ö'•û†Ø’^èµ(r÷[ -Ή¥ˆËFF)÷>Û¢ë -}}¯rÑ¥PÌÙ:zKÛë ³±~~ŠwÎOñÎù¡‚ôcE\XîÜ6Ò[S‰„(º’!„ -Bjk÷Â->7üà:Þ:µÆÚ‹OU¹ öÜ8—²æ$‚®~C¶“ÐÐÆ>¤ñ¶Åö?€µ•E¶%„Ûrìöu¤å˜Ýf‹©D Šô#0´™GÙ"–YÇF–w œÌÿíõ¤+š[Þ!¾mx‹7Ï]êÍüþsMÜA•!Ïüšwiü©´x;Xuô¥ßÅèÖ¥L>ÏõF[ bõ)¥,~D}Ýa‡Zî{䈊˜hÙÚq¯HyE?Ö ½Ý§×£‡øcÒÆd7”èó(=®‘+ëS¹Ë²gff=·…P‹bÏ<_VÆY–TÍ{±Í0À¨²b–óM›ôFå|•«÷X3X¹3{#P¯mé¼OÆeÛK—”SÜU÷’Ÿ1H•×rö% š£7È•G­äg˜yyøoï¾br³£RÀ›½¬c· -‚õqo !TT?‰»ªGñn?~ÓÏ¡î—¼[bö†òü©Zé*£ƒ¤ýý+dƒ‡‘úEmÚ->Fƒ6µNEfµöÃP¢†k«*³¼'©¸¢0¯um½Z( ¬dÓË- ÐG‡3˜?ÚAo}–“xÀ‘NJÛÞ­Š]PÞ×ëÉo5@~EÀ} wTv¬ÃH£Ó&9ŠÒ—Ÿnr¡¥øbïQIÑÃ}-Q“'±ª@FÔdÆ¡Õ¥(QõU­â¹t–GÆõ—’,3฀ÒÚ1›‹òD]©dläê?R5I¸Ó©Ã¸©À*œ(IËZ° iLF{ò^4>ä%b\NŸ¸¤ÇǸûøPÈ]ZÛÿ­Ÿx)mƒ˜Þx‚eîÜæ0Iá„Ý)pÞ—qÎÁŠ»ô‹5êÃQÖæ¶Yè—ö["šýÜBèì«–ªÀý˜îht¯Ôùe@'NÖ˜ Ä(#6!­ÒCÏb×¹*áï zÔ1»’ƒï¼®t²«ß’_y»n×))ÜÔîì\(?ͱ¨è¶]?vcßÜk6F.#©Ù -Šç]¢‹¥Þ UÝl˜O/W(Oû¢@>ñÇzì—êÍûÿU°ýÇ“™ºÆÌzÐ!ÈÛ+“ÞÁíWM´‘d冂&6tù`•s9–vá:×)úk¨ÆÕÇ$HªsN¾¢œbàÄ™¼"Œ™ ¦ƒ¬iÊù š”+~içï{±Ø®Ò#<½“ôƒ¾}¨·gÁ -ô¦Ô6J„žYlzX× -,~ ÖL˜á{I¦@áGØäîePðɲàÓþÙ_Çö7s±º"=ÐÒ‘vý{iœÌ¦Z䊳0f…1éäò¤X.ÖýoX!ªúþžöäèJ.oFSLäé¥éuˡ㈤ÔóŸãT˦§ê8…À¸ñu³2ê8ÝåçjXhA²%sóCšïõ3<ô’ëq̘­nfþ°3 %µŸŒL²½œqIËÑB[Á1w_4ý2‚ag.Î6šLݯšH|q ·øƒc9“–éNRÿøBúùFa\ödeÓ1 ªœR_é~Rèù0&|3™ ¹û·¢gö WD†f9;÷¡òµ´Uº*öµ‚°üc…£à°„^xá2 „!å–(,f.‘i€SG!ÀK˜<4è²üR·dÙ·¬ÿdâY&ÓGU?ñ<Cç^ª ÿý3yõŠØ¨Ýš¶Gj¢8ð0xk¿ÂÏ»l+ -%‡aÐÜ™ TðÇ{5cñ·¢Áv:<¨Eß*…Ÿ8ä'åj¢TK/†F¹Ìâ©p%˜À¡F t.Ý’Nû0ò ‡Êü=æI:Ïen`ƒ^¨4íõý3½cR…sA}*…ÔÏEvÑ»âþÝñ›ÁpÚÍÕ5ùdN÷¹kòwæ{óÈ_eЉ-|†Œ~!w…³­ìû8‘ØÓKø€ß¥Œº'Y{žþc®­¸\R{[T €5ÃŽ%2 »Ä“º.N§ï#;QQç8ˆ†¨X—ZTÉwj)=¬Jdääj9Wî1Þ4+@9_ai]ª™òóÛ÷èÊŸì©g˜ò]2ÕyËW Zdåâ9 -©ŒR± ”äŠÅüâG‘UUÎÛ‹Åêå`QwæNçXü2» -ÙÐR9óö=>È=8ÔPЂÏæ¼A“ê'>ŸÉeo„K’”÷[²¥9Ìücù*D€ÈWÙØÄ_ºy½€d9ýïÛóúÑ_æ”êOlŒÈkͯ^w–(Ü!ùÍ_s|äÙQ¶Wk°¿V -úý«KVw<Èìç{P¤€âgJ`Ð| s·žJØn5(ux%)±wÑ ºm_Ĥ‡ŒÙœŠH7†@›Ýê«K¡î1te-öú÷¿ jÍ¡êerÎLÙG$»/¼Ëï¿‘ßOªï7 ó¯ƒJHÝMŒœ£¿Ò©P›ŠowXå/ú.ÇcE œÉ½ä±I2šíWº’R»¢ `ÈL÷¸ÙãõØA†ÐŽWÍ• q{»£è–U 5‹©C–Z¯ožÙºì© ÑçÔôÂ$¤[9_ö_±›¸Ó>«³oAM†+=¬Ž€õ”_õåN¼ -"Œ!6AÌîéÕº£©KÈo÷LOdCy 4èV25XÀÿi£àã÷béi…]áB^QX4ývØýMV›\sä!íV6ô;Wâü1G<÷^êcò"—Uì‹À›Ö(;ÑÂÇXjhÞ› 0œwÃÖ^yÖLñKáŠja}¨_؜ǖ$b›ÙJŒDZ>ÔÜ`,¶÷ ÍÅ¡>ÚüjÃlPp5Ûz*'Ö}u:è~31™ÂãL A$f¼.… ¥‡0Æj6ž:1”¤‹$϶ȼßK*Žpî¾u¥ù¿-Ö«w[štÂp:Ps2õõPÁYfîïSró8Íõ©äòQKë9Ù%‚×*ãâv$U -M;ˆ®Ã¬ˆÚ¾ŒÂûcñú¡?=2Њ´²c-7'×í= aGøG¢èá}>Õsq{iipþ–xŽxç|®êmgf„ðy„›ûa<0â-6ã{·  ‡h_¹ ijyZIAñ¾v„>êBÔXú¶ˆñGî)x’AÔB¾¶Åá©T&¸@|gÝV20wIÓò¸¢êõá=üg™«Š¾46Ÿ@%Ó-¾r)oy‹7ÁÞJ*z.ò««žmþŽ–¯¼{_ט…¤ÊórŽÐNð2QÏUAféK³›ó¡+µ-ª~…QâhD©µ²…@µv=\U‚ÅÄ&ÙèF‰ŒŸ9>Oí -®8÷«ä±²áiïq¤!qÔÙщÂà üù#ó{“—å@ò¶SÝ#±ýô9‚±=!°–öîÄÌ”·ðD_p'U áŒ5}­ö—So†"ÕÞ÷’jŠ yM[h„#JŸsfNñ¿ó¢°y«7,º¡OL/æÁ¤¥í…Rí¯åZ§¿g6\jNóEn9ëfö‡ÄÉ\Ñì ó#ª¥PÑgJä(‹¥ãZWŠK+¯˜‘W‘Œ~# ¶my(,â'zËmukæþ®I¶4'DGáW8±U©- S‰®¸K¡üØÄmÔ§´ÇU"47HŸ(mÈ쯭}¤¼ `éf®OÀïö8ã p.[PQ™{s#Ž/JÜÞ*å§0éÎÔ2±äÔ±%Ÿst[SkAqÎw–Š@s:Ë›ž0•iÞóZ-‡Mùè  •¸Ù&Én”[æádNÔš¯9Ó jî -0Dåk’5ï°rofK¢\zÞ‚£›³ù/õÍ-œç„7ç<ÃÃÁò 󃲅H0—J¿{nš‹sCÅ÷!)O‰þZ[1˜   /ÛµùE/÷¸bãƒßÃ^4rf¥sësÈ;;Œ³t©¦‘†µ`À ™i>y–³Z¤#äØq•¦œ‡Ð僵‹Óe_©%E±>ðسŸ7¡Ð]Ñ56„4‚“Ó!]|E³å”ÜLBùdÔ#͵)ZµA.D‘&®2‹RÆŒ¨ ¡K²26IB-äÉÍ(Ï!†±v~ÁH.Àéóé\I Öq—Íçv¥hw§gÖ^5dFûúûŽÔàÁÞ x2—Š5Ñþ'±>'#’̹Š7 ™†-%R<\ú,ôu·œQÁ;¨mœJ0î*?%™–f£½›“xtS•UçED0àßÿZï&rœ0_…*M^Î|Ã#9±Bå,fÙ98­²do­¦¸"]{:‘C ¶? -AÛ’fâ/pÎDÝø|›Æ˜7z‹ Nì7:Rüä/ª±ìàXÌ]ŽèžëO:©¯j,UiçÉ¥™_h÷#sáÖ’ÀÙ5䉊IYb½Š·Î30ê -²†ãÜÊ€ªÍ¬aCCjDÍïf—:(סAƒCP 2'?l]ùðK´ß–Òi¡Î uAh$,ÄÕ’¢[RËðz÷àcç‰Ác¥e†7önxæ]ÝËÁ¢ýÐ| ž­!=ëhG8¨Î¯í µØ²x*3[•Sþ})6ñØGVv+FûÎÏr>殪ö5›­7ôþr™%Ø÷úÁ§zÛ3î¡%\!!Å0¾ÁÇ]¢ŠÄ˜”šÊÕ\ŸG™RÒ?Óå“Ü÷1¥¾#¦”hAPE*§q‹Ö;6éT¸JhÛ‚§Ð’c^¨+š!Ѫ"’ZÀÈ{7b>Oܽü¹ž¡+ŽxίÞâ‘~Ù•´üÎŒºˆÇñ¶à…ST§§ -™Gdƒ4ÚŠSðµž8#…!é¡BÓ˜4*Í7¥‡Ñ¢=:§ Y1<Ð˱¯'vÛݨæ%š!u€nVÛa:=ò×Ë„î+n*³Mö ï†^,ðpû^ñ™NÞUâ%ÕV·Cñi¿+DSwåçY®¡ViXRï #0ƒbB3øŽãðJ¥ËW¯! -8ùeKaj2qƒ|~ nI!ãžx×—")b×.ÚB7ˆ '—ôWûdÿ銴NøÍHPÑ&A¹ÕSÿ@'üþ/ÙJc÷*Ó»Óñàjîä4O3í)®ÌC‡·'ÜH HΆÂö«!ó°Äxã=2$©+ðG¬6½@§íFàmAà|é‘øáqV.3´®ã¶¿¹mOuÀ$Š{ á²B}Ê&8‡(Z‘êÃ)L™("d‰¬ §ÔCÝø:Û½ßk -²Y~â–÷÷H)Àk^ï ÈÄš{é·]Àxp7xôîó§àÌ‚xŠ`¡ k·¶Þ(wªÅo%;wò®áÄtæ4ï ¦ÛíÓpãpÇ }¯\S¥nNâyŒ·g1o‚X©‘e‘k®±…}‡ØJAã ±ºˆÃOÜ”ï­Õ¡ÖzÁñ9ƒÆL­ÒÚÇ¿ÒÙIè¯bÈÍy÷Q¦xsu- -)°ÍW‰ML7Cx_EÛ5FÔJ@GÜJǛРVe“å¨P9+0ÛŽ¸ÐL ºNó‹$Ëmå¾uA‰kéÞ©ïÑMR C’Ë4Ž³±:Ìï£\ -{é.p¤o"1Ií -2Ì™='NpÍ°†UZwìÿÖ+ÚüŒ£8 -é½¥§''äÊs-`Kl8ò -›J¯‘4¡˜Ï¾4÷È-ú×8û5¢]%‡8¸3n -Ÿ„N N endstream endobj 34 0 obj <>stream -^žL:ñS¾aÝDæe§Êa‹85JVúLqá:#ïçì<Ë#êŒ#¾4Xãr(1p‘ç·¥æ.ÜÃ\*ä5Û1vûXºo¦Ðø™>ŽH½ÊÝ®süþ›µº©ÔàŒ%””»­SÝ—dË ­™?y“J|"8TÓoÆUŒ æ§BâZPt}Ìw -·qªxÍ!=ÆÕ[Ä  ðÙ|ÈX-þi°c^1ÈyŽ¾ô:í“ÝùÊžŸÜÿç_ÿpœGøLi2 -\œDÄ%Ï‚ ̃n?—ï#…¨ÊE^™û©ƒn‡u¿lÝçè˜î#Òb}Iiο2Q’»F]Qõ°UçýÌ[&Í©ŽxöÏaÔžê·uájmžQÖQר¬3–Ø©Os‡+iekDð«íˆƒ@9ºyæ&Syc5#OYûµsƒ¢¹5÷›´Eþ³#¦Üà×ÈïyÚ‘wAšm’ù»¶¬}Ô×çi®{5B–=ìñx;{0K}]ꎤJ~f@HïšJp¡H_€\XkˆÈ/Š´±<óòT/bÇ«!K.*´^"lQ[í]„A1‡=UM¼¶¨¶çn°³ôñµ¿Š˜‘zº ÅäéÀ·¹µdŽ7í¬¥…ešµ½žiHµd‹¸Xt’B›KÊÁfÿ7ýBõª(‹™Æd÷¢FÍ(  ôp·åæÓ…d»=o+¶Ž"|íâN¨óνÖré-m:í~?uç 5J>,†Žz-ÅšþSGÑè‹–²Ò¾~'ý¦ù4ü3$¬Úfm«!3†}XÝ/:£ ÊAOí—Jßú -;2>$*ÃH Ä3Þm¶•5d$îœË},Gܦ5ºíYC¤¾ XæÔ~oÒi©Í¹B—«æÄÅgGØ/½­!-íÃ^gmnÅ-ûëûõæë‚Ö>ºGc1® ÙËÈ鲈?úOh%›ÜØ=ÏÝ£¤ú=÷³Òì„ÝÌÜ"¯ €ëVa.š.z¢Í1§§õþœáåÅréšó£•½§’)'WB_ëq‹ÊUü<2uU”gÙŠî÷›mg" -_þÅËåy"Pè°{jB€ÕDzOà†Ð¼¯5ây~Gí²¼´Ž¿‹VOuh€a¦0]jOlÃÖËqÒÙ‡G¹û„SÖdY­Qä·W9ÿ¤‚ ,bOQvR˜ÜnBÈ !¨WëwØ~g "ëþh 7a‘Ú¯ÞEüHør€YZœN`O -htQŠðeç9­î ”€ü$÷‡ôUDÿ8lŠ™ë|£Œ„bô"Íå è¥ÉvPc?¼Dƒv@n3!Œ÷xyyœ*êG 9®`+^ö2äQ;bïwÕôFÑå9 ¤îÌmó¸Ñ-¹ÉWæ*z’nïí2êåäA‰¿ VÀUõYúóûåœXØ–e¢9@46Qù×tA_ å‹ z|6#ª¯ Z»j§ø§ò§MðÄDó Ó}K˜¶J2ºÅu-ÁÉN½@v>#‹-\ ¤©äÜ: gÝrȳ<È®ý}PD¾äa._@Øo>ÕÛ@ -4?=ñ„Â.7›’Ö™dÊØwTö—Ê$"Žóq Ÿ_†"ý“LÕGYØÀ™¿}û¿ -ŽË¯6& ×:^%š·E§Ä¡yâO?ëC3Éë$·´ŒjJqÁ…ü=£ß؉¬ƒŠ#ô02š™d“¸¬okçHtGrfýá\ÕÅ#æÏÙ9.•°Faâ÷ÖáÐ;躼Êr@ã£dÿLÍÒ«ÓñÄj“ž»Ôë}– HžOÖnè\g!××Þ Tw&¡he6[> ò[Š€9ÊO(©M¾,*…{¬Ï5y5^å°ñ¸ûÑ:É& ¼qòT‘ØÓ«?¨rÊ…†ûéÀ ¬uhFµ"Ý}°ûÉ_`™ï¼"ræÕ¥¤ÄoR:‚eèGrh*D‹âÄ|ò'mÊ€@Ö‚ñgµØïP%`š5ÈV°fpÓíZ­qä}Œ`6ìGXÊLG¶Õ‚Š-UœpŽ‚‡(: çT«õD_¦¥ 9Cm€ÔEÛÿ#¯ öE .Iq³ëfW#3î‚·¼æÏö3¡'2%ºž…#“ -úzPÕ *Îv‘í|ãÈYAtDŽD¡?-¢ý‘ =›VÌ•w#nä}z•ûJßávFК H?øRÿ3šùß .l¿mÑYS>JyÌ¡1ÃUä>«²ÝƒŸge–b!ÃòöCÞ]¬ÿøÂ]&L® â;²ã 2¬Ž"£ld :EsìcU]y¶T5y?2bî¦ñ³¯Ž³éþÖ¬ rI¢“ †°ƒQ H3·ã[p9HP¾‡ìwæiwyXúÎ2E«\êŠÛú‘-òX:D…vQí¨ºcg•s2f†ÜéŠRë<ŠMƒ>ô.W–­¬Ã=ó)hÝŒ¯þ¢¹þÉ8âû¿d®þÓg!ÈÀ䯼xW™HÖôÐMÆü»‚É\µ'Ó˜oK’6’d‘×òž«—p¸ûõŸ¹zr?·gÍÕAâ’O೯fìqñ!{Q¼}@“ü•lÄýgNÎB@DókNÃ%p¯›=RÔ­ÞææXÑðW¢}4êfÄ¢œX5tV©gº¨GTã#¯ï˜í¯ß}QFÂûðdâŠÁ3ŽÂáµ´ uµŽH¬ŠxÕ¾‡1Wâ^Å1¶n,—ƈCvPýGÊ §X¹úr‡¢ âÉ‘`dÞÉ_Éxi?ŒÿxïÄÊÉ–á - YÙÿ¡¸ÚeÕ¤3ËåÕ•ùØ÷ÐJ+7i¸ê†ÀÄW*b]¿lD˜ýíí¤Ôq0Àì—Õ€1ªÏÕƒÌÍö0ïF]ÍŠ´í“Ñו­„tÖ{ ¦çýP_Vú¯wÞØ\’=”ÈÓЯ«í+õWçÛ EG„€gœñÒ7–Y9Jœ–ãÓ`K[ÚÙq\ï4-kž—s>v‰k8ûs ö”Ôøóîd’ZäM½˜÷)»àÔÃÉÉÐÖ¹\šºWÈt"ÞTÂMzÌɹ¨Y84Š»¤•ÑØ•)ÜR²µ#òôÈ…”ˆÄ¸ éÇÔßÞž{[ô(Зžø•3„ÐMêÒjégI8=TŒ·2£ -õ’@@UŲ¶¬ ·ï™"ÑHÌ×Þ“éžéPìïÀeî½{,žÆï3~ž¤w/:XŽxÊ&ÂÜãXä%2)ÐUßZŒË’g£qˉdþH×ë—¦=7±ýŸãz0Š}m6tE]þ25H¢ùhk#˜ê³Ü/z’õLF±Ì´ü`ý·Ž÷è@—ТLp·uYIâÇŠÍ°½½žï±E_+’Û‰I>Hdý£×ó.Ô¥GJuóÐCâšè -Pq² ²ÍöÕññ¢ÐaÂ'ÈlØ–×ó–YŠ|û¨!`"-ŠMì€ÇûËŠ404"ÏþWgœÜ<;ef—ÚiGÌ<ÿ°Âì’Õß»¾ I.Ú$$Wײæ‹lá`Àº<©NP9)8Ø‚ýì1ê1#AuY¨•¨Ý23H±'%~¾rî ³ƒ'$ù°.#Ô¯ù®{Ô^דã¶Qwº€X€þGé¿3¤ñVžK…*L†Àµ\á×¾e·ËF{*>{Ü¿°7‚‡Ï{¦1̸º)T°þG†Ý¯¥°ÒõvÄïçÄ{¶ZIW€ôW$FŠ­—îŸÏ•¹™d!‡*HcL)ñЙ”§Øôwe†2Ñý;«9Hí¿[ènüö+ÿʘ-Þ»Å)¬ÚüúØf´L9é eòx£ ã{’.»8sg‚8ƒ¡›þ@¿ÆýžO"lf=xüSZ)lêBØœðOŒàþºZJ&zwµ¡YÌûzƒ¿(C¾éxk›SŽ¦¼»#Ž‚v•31æK[½üKìEêÕGõPŒýv˜ólµéë¢ÏEK`ôXËÆçŽ[xméóVš}92½à›ÝÉÑ–6¸$þÈ̸έƢ4+®ò¡ºi?—pt§ -ÞsVÒVAs§N@ƒëÁÌ—·Âô±÷}Œƒ¿(}ÿà}ü -H¹] µ^=m;û’W^=¡ã#Üåç­<¯ZÔ ¾øÓ¯…ýÑC¸—œ~/ºŒL:?§`µ?ë؆×Vþ¤ÅüC,a䆵y†ÞB†ðàˆˆÑúFzdM7ØAðWÞæ)ÄßXõ«tË ÁûE:{ŠÈø( ²?–ùel)SÌs'iý|Ö=á?¨ªí>üG3·T€¾N’=}Õ)Ÿ#Ú*$³–j˜Òw¶ï#f*ìÿ šYGi}|Ìù·O”ú-Ùž¾5Mø•A¼Ô ßËÃ81ØŽzaXDä·”v®Å¹Ò¢"°ÝÇXFÚ†Äÿö×ÛBiŠ§Ý±`ˆúÒ±xÞá6F浬CÔ¾èö)¨ç ÿÎÅF¡=¹Go††ÿþ×:~%í8ß°oØKh¿Xýý?‰qð8¼ô…¢·Œ½Ï­t< áÓ*‹ñ‰±B²EK‡èG+Ëu2j<¶aÿ#w Ììw|lf¯zxgFé^æØù”xä§öÊ÷™Óî‰y& ¾¯öŸî>[üËýäׄå zÒejÝqêõlw@I¹kŽ9$Þ[ƒÚÔE†h&uÎí©±»\º×Di¾ƒ¨•3Ž4q®Åbƒ -lëîFßëä+­6wÃ;"+½“<ÕCÙ{Ϫ!3§ 4¦=WÂÑ †ëý+–€Ná|=5"Dó³”rë±û2Í^ß›qr3QÁVzn™_q%þz>âßÿe~Ö¹}ÿM¨Âü^óÖþŠ4Ì÷<ìæG·pph36=jI¸½ëĆd%Ðû}©!¤•ã@Ö¯9Pˆs+^Øü¢ò+aí ð€ˆSƒ:r?:¾R}Xçsþ Чhá†WÝ0Y¶ídŸCìReQ™^îW»áì‚Å®+ñOÛÓ;qC2(¾ùóÞÐk<é×ÍhUÐ1j„NÂÌO5Tæ3yÕÐîX¯=h¹´È SåÀƒÃ1U•¨ç–ŸŒ ä.ÓåÂÖ'Š+ó‹Œ^³9ÓÐ cžŒ竃qG‹;¯rÍŽ´³‹ªKí©h«o)ç$›ÿ ”¨œ~`})ñìýRq3d³ cMÌ=ƒz¿ÛbG¢Égjvà Ä\iD½'<‰”Ôü®àcSÆ×n\1¦X4ß²ºE'?2½hŸ¯qìÑW Ç(ª]¹˜ ÌÞrÐœCQÿÛ®úb@ 5@Açï„)˜$RÓFÓúr?C{ì…1yf@”4¼â‘¤ªUp_í]3ÊkXlê¹çRÀ99Â<©!°­GZžcdˆ’‚F¹]Ïm}>BÙ=ÙäRQ‰#¹âTyÏ -(¢R&"„ÆÎ0³°®³fWlùX·#%&êrÑÖ¾Ž"m¤i_ôy©H20ö5Dk&2äè ˜æ4ä# ù,“Ñ¢=j˜ûÕ̬ØÌÍWø"^xÔˆörÕºcl‰øߨ„º^ÞüÀ!|½w -ŽµšÅå{ ä¹âl»Pj•#G«òÚЈ(R›ëY;^yª¬Ë©´_!›h)j¶èâ$Eê¨|ógXŽõXï!·›y¶J¿Œcîã£ïhn’ÙTÀš“ŠO¼L!è]Jù9Òæ¤ÂAÖ§Øl[A¤ÞJ¬ÿÒk,=6ÕœA`÷ˆp^£)‰ay(Ò7Ê)¼&¯dÚq\,ÜÑ'šw¤dÙÒ#—M«n/QÉ3ò1C{~gN¶‹1fu/¾øUk:ÄÞ‚–;"Žt xÄÆjO§?8W°û†|z¾@•n9è¬3؈0`QŒFLÃ/I›(ð2¿ý´ì£ùP[t×DåOôhi¸‹Ÿ°º£#Ü[rtÀ™Â°\ÐLÊT\"^O4d'½3ÐægIjÈåJ̃ç>ø@Ùe+F†°¨uë†Pðd›£ónC”ãP«–SÖ â©’¿Ç 9“s~Éà[ ^ပ v3« Æ´ñ“ˆð RýöÔ~„^7„Ž&–Êj`³z¤; Ùñ”#çrñ#sVËvʸ=‹":T¾äÌ—v¥SÌL0nva¼šrth·{Qýi¬\©LjRkÚîu¥Ó©¬†Ð"Ï2g%¸æŒ9v.ž{­–CtƒÛKæÅvN+"áN?bžQ“¸£‰rØñI;éCCØd–ÏCžƒOdÐI%¹ãûª•ðŸA>¬-J7dþéöè ½u©Nã#¥ÂÝ¥O3sÅ$MÀq+ãf3ÏJ{r@6šydAÛÚÈÔðãÀ}¿óDáú©Í“.Ý]›9?çê)aæRׇ˜À,b)ÆIXŽïnÛ¯ï°iD$Í©Àj´ôßÈàÔzÛºÕ˜i/æ;wŒ+Iemm}\à%Î0%X!3Bâx•=–^rOßÌ’š;T/5ÁùïEíYÚ˜ïï»°ïMH*Hº“–kêÆ3Ô£±(RA›s‡@ûY)»¿ÌOÚ£Ã[æÏš×ßÿEpJ8Y±Á)èÐa5….¨ð^Ø l@}§±U`) zöÐR“rMÝO„æ™f°·8ÒÁÕ T‰<Ÿ€G$Qõs¾Ýxwú+prÜŠg/¨EûC4SɃt/»u®Ï8IÁV!D‹œzpÒ@Ü^Ÿ è5y:N°Ú[ b– }“þ_ÿ¢¬úÀz²÷]á'¬ÃüÎ+ó9浤çW„6yž­ÈèÁ—iÎÛ– ð¸£dƒèÙúwÝ‚>’lYüón ìÎP̓‹ Ÿ<¡N¡+ÑÜGˆ¶WTÑ a#ª}pjrþ0²ôPÜeoâog…%?²'â|q+ …ÐÕ#¥çwLTÍ?©åËҧϑ‰Vaî]]qqÚµûŒÞû³çÈJ~ˆL®¼ɳž-Í)z·ˆLdH³¸6 -ˆH²/Þ¿êòAzŒwæ x˜ìÜÅÈ°Q£¼±ÂiÁxºŸðl6Âw¾½'µ:&­Þ±j«>l”U ‰â™V%9‚³¢nHÑ;BD•Cùž1€ºÕRÖÛI¹ì€ç9þÓ1kÁ¶ú–¤<`_'i:öæýÄlѯ¨Á&ÇÊÌQ8f@t9ŠÀG âêŽQ)„{™£Àò4zb•oeD¹BóX~oñr¬,@«êàÏ¥ -"CGú¨Yþ¡ <ÃZ½j¤kΕû4"êËQñ2fØa~õ}-š©«Í{…˃ˆ(urk#]‹0²†¾`xà»áv¼s1 D¥2™aµG­Á†rUšpiEjØúRQ€˜oL³WNBÏJeC2Öù¼aà ^¶»ö¨6ª9çž°–‡¨¯P›ÒÉMàã^—J; ùÇñΚÙw7}æJ¸§;EÀ£²’,rY`žÜÌ·Ûñ»â•¦ea†§ -tœÌÛ,ÅOÏòAÆÖ d™´<¿D›ÿ­¼Þr ýÑÑ9â”ûÏ+RSQµÌ±m® ~îQʯAê–3û8´]êv©ìµÍßi xIï}*ÜáùÞgNìµ™ç¤zgk w>ýø ÛݼûÁ£ÛÅ#~ ×ÊÝô”ðÛìzÙ`Sÿ"NÙjÛ#›tÜêR¡j΃+öåv´Kq"§íÉ÷Ìc%©v4‹„ï=•WsO„kÓϦgr@ÚÑâye~ä\µ­(óêxù|5âò;ú3 ¾*¬.CN|}z&ì 顲ÅÀ Õt’æ‘Ø›KQ‚cÊnÏ Þ™ª§†‚Km0Æ1~ê·¤§ñß‚¢ï'EMÞm¾F„%RÑüšËRcËüÍ_øhB0 ÅéD‰z>•Û¾øËçÉzéxhÍòŠêjsãü+ïÑ‚@Ïå´âÒQ·:Í]TÞºEÛùy]°x;j?oUÛ–!1ENª?7;C"P ìYõК %UÅù -ÜÏÑ HUr iGF Žú ΣU.–ŠÛ‘ü -{0Ò‡ó -;BŸÍ®xO/`ƒÑª+ù¤n:1sï 5 ‰Z23[wü$ •©or(ÒZÁüÁû[õsÚ㧊úfpⱃ Zî:wú6@³EʦÙr$'fY„¹=šñIÙøã_lJ,æòˆæ®QÕ[9–ZOmJzfRþ,ÏUƒ¢vwÄìÁŽ&»ÇàËjý^qywòʾµñÅ!¹RñQÛ¢3º¥ÕFOÅí$´ +Øê\Ntv’º¿ÈÑÚ)D¥˜¯g¼%ý tçÉÑÕqú[wÊ펎híR¶ ãò½'%/:A¯Uº_*èÐ&_SB¦Þbà—!gºšó×S&ðf$ü¾îø¦ÆI–z>âü¤ûó4Irèw²°‹(*lA…5†€懮ž!8ÜHÞ)Õdó{´ÚUPƒQóLE/²-ìkÛ’“ƒ Ÿà•Þ H¨æóûJrò*u÷)¿kK‚O¤°q›ðªÇ“¬$˜“†?yFó%¹¶¤ ê¯þò牚-I¥L>ž³ -†=NÐ~ñëž»ñ_Û°~"Û¶ÊãÆP¿šy¼M€ÐÉ£‚]éÉp.¼˜™å>æˆÐ Sw¸b·&R3ªê=äIûÄÁÑ:ú#c â‡)Ía.—M@S¯ÓTH¸6†ûKd ~ðLë]ŸÜ[ö®í®(„ö©‰»˜cÛÇ•"f°'¯t?>˜´ý}(7¯c¼ê†¯ÁÜã¡7—S”'òÔÝ'ô$_kuôô‰Z41 ƒ.Hg2ž$é#„jˆÐbn¶9EØÁ îXaeÏ^º=½îˆ‹t¨Ó1µ±cg™ý·ÁEõ -²†$#õÓÂ÷ü -Æsôþ:Ž´ksóÿw.5’Xã‘ëw”ùò¢ ÑÜ@ý¸SÒ3$FöZ#rÌ<÷1JØúËÿ;oo…›´¡×&%Ôæ²>þn¢&‹BÕ¢j„ȯᄃkÁ‘UÞg:§âI•[ãæ¼2O/ÍÅ;&¬[À|Açl†iõ¹}1¬ðøŒØþÑ"9kÄ7âÿŸ0Å}mœ1¸ö`¬ƒ¾!,Åb ›nÈÉÔNí¯‘‰å³qäéhI‹7ò‘¡T®ñhv¢éIËÃî5 -ÚÑ…ö¢ÊÓÉyÆûVÀi(Ï•f -¼—)Ú–ˆ˜Zö¦‹Ø}•QßüV’Eé S{]H L% -€ Â%Y—Iê£Ï+'vø’@$ZT¹™ï^]}ÑfThÊ驈Åbþ úL²ƒŸ×a¾´¯·àJ>:ÆÜŸp%ÿâƒ~»t~e †¸ à êÛ ³ øúûW-Ò˜\jB>ëw¢wŸ·ýŸ·(•ìT˜.‚‰é¡•S L'(GÌ=zþ¹mÏAM-Q‚Ô÷a]ÆosÈ $÷Xv÷ -‚ç¼(ݤ– -ê•fÞ·²5ñag-†åR”ª-ÿ]jGи¾Àœf¾zy]±dŸÛÁ‘|"Oëã>Ÿu©èTp(Ï4÷ÝmѧK±žéÐÜù -uän¼(P%bé?ÚåªZ‹õf0º¤Ýw`êØà¥gK bPÂ>C¶Ö%ÛñŠ7x}©âòý_L¨Ãþá`æm-Lг R™Q‡èõÀí¡ÁçÅÒ÷Ò†tôùá/¸rúJi&k¤ÐÁŒå‰.¯!'èƃ÷ÒåôîÕÓ$#qºHÎÄ‘…\§K–R—R'DpÇ«¾ï·gÌ~´²½5ð7Ô‹qWP5ÌƆ,ÅóϪ:_¹}VÖH¡—Ò¥,"kHå”O¸ÝyýØ:Ù¶5DB÷x€ÄéÇ·”á°ö'-²M'K‹²±!)/×k)mÃ$ò\(Çi'Í· H±ˆ¦Äše‹ò×%ï©ZÅ"ƒ -dýή­V¦(U˜47ÍôuÃ8 ËÔ+™4ÒfªÍ„´Ú¶´ãÊ §VÛŽÛ“+ ˆùˆ¤µÜ¸ý“ºªý.åI]Õ6I¨Ð¤«„ËV AÞÆ¥¨%}ÔD‹ Ý8‡æQJF`&Ñ«ñǼ¾y8À´ðö5+ZúaÉñΣ`Ç}ÌÈ~çÝDá™áÜY¯U͘÷Û±kö˜›·èiu‘Í™!°Çq¼ÓÑ0D·,°6¤ó3ܳ"î W]Ê­Ú/ÕaªX HˆSB\s é?“ˆš¿žb±­ê>‚õŠ£\‹’6Z=2¢I:ŽHg>gâ0 ß|¦Ló ŸOhǵ‹5BH…Œ“ô{D•>9ä¯{ €rCg¿Vƒú€yÀXí‚€Êv©R|† zCýæV´˜EO`;÷¨~PÓrÖò¬{VídÄ@ÐRÕ7HO_9û¹ÏºÔu—/SrÅ [lªŸªÎ¢>=?„Ó²ZOQIØŠä¡Âàh¡þn=mgŠnÓÜJ;gkƒÉT²o•?æÜi•¡<£íS]x7‘n²:*Š áê¹Wæw@ñ‘†ï‹Ïƒ:ˉ’Á61÷"ßb¸v×µ*Wgk×z¢ g1†Ú<ð‚8½ÀîTïL¾¢³ÓöúL:ïqlðàÿͦù{>Ê2ª¢j,øŽY“X‰fã\œ¿-ÔÕæ4RçœÏÛ¿¦d|R#úØ1Ê¡‘„îÈÏeÏ׳°Iûû†ãÉÙ!×Xà+ØyHaÛÎ7¹U“¯ó`m­ìЀí>ê©»rêuî9BÁýšo×›¦¥Ùw®»Q˜PÌ%yLõ[Ȭ­6$}ì¸]ÍYÃníò *xž3ôƒÎu¥' *h˜iXKÐw•Qõ¨À§«ÓBâ¥3ÙÕë‘ÔDà’8·2ܼ‚J®dÙæõ¤\bIiÁO„qó~ãûK68©¾ãÌd‚ºýà;½Ÿ=…=¢Ûu†"GB.½±ÄÞñ ‰qôGRŒU s„«™ˆ}»þn­¤õg;Ö?jû¿çÓ× Ì+‹Çfͧ“X]:qG-wBi$.ñÿó¸óE%cÐkÏììp›ãxïM(á=¦ÚšŽôcÛ°Z”=~R[|ɺ|t´Æ»`nƒ¹ /1<à+ï°-µ¨j*Ó«yb‹TpÎÜ.†Rïþu¿bƤUßê™b’·ßšå{f ),u[8†z5ü¤n@H ->?x{﮲34¤ä³È 1©A:{$°¡7Gìõöª·ñ‰tÒ€¶ˆÃaýû€ýùì°ûÏGP¼ö¨¹Œ0ÕÙ>zß9Ô6¦ñ™ —b¼-š-Dðƒ§‰ç}®:~zTÏÞç*è/î;v‚¤Ñ0 -ÉÀ˜@2E<ü2èŒ.´'â¬x.äÔèVàÞ¯|Š§VàîÃ;î;ÌŽ±( -²›…I÷¨’䈺⌄–1gO Ì${,š¼¿ÿ!‹ùÛýíWvÒ¨àëlŸµTUì;pРó ¦!‰¬YËR“FÖ¶6mõ¼ÑÞ÷¦M«†P~ü RT.Ù¯g•}ø²E9FŠ‹ó«VÞu¯8%’òµžç^û@®ïQiñîǨþ™Ú~£›ÙëJØxjÏsÕrv"3púÔÓm’ˆlé{ýä|¨‚ G1n‰÷ò°2ð—Ï;ÐAóV"wR¯b5àLó -”¢>j1m—¾²3ö4a½¢ƒäã`èvé—·P_˶EµÍ ]ƒoÞT ¾wªBPàrï^j>›ÅaBê"ØâÏêS’œ+ã¨j> -÷F™Ð%5ˆ÷\Pm˜u%Á™/?Î ai5£ -¾<óû¤ëŽ–pop¼Ç¸ Bù),. -|Ó/Ê-‹/b‚C€zžñaã ;=‚ô†à-ª¥’SÅØÛŽÄ,‰½¾È²Ìÿ¤ªmPPïÎù8848 &{¢}m“Á[Y, …„'±éyg±jíA‚趇Fi¾A#²å 8*|Åhx0Ö£{ýL éÂw´­’’ßÒ¾×·dŠ\É<“0'VóE9—±&1–²Ç&{ˆžÀ€;'O XáJÀº²D§‘Þþ3 xö#õn¦áÙ¸J]Á)6â”)ãÉÑÓWˆ…ÚX£\šêf|ê .ÂoqðeŒÀ8¹hFªgLbgJ²©$Ê*DTTë«®¤BD) Ôû(èÏQêŽ%qT¡AÁe+ÒÚ®$)iŽB‚“bŒ`eö©3CÁôÐ^õÞTñfÆñï\I<¨àÐ ¦©H ±çâÔiµé$EcëëÝ€ë[=”]#þ(êÙÇ!*n{ -·ºe¦:å(Õ¢.­+ùûýþb5ƒ#«»óQ&Š’o}~´p€|³ÖWÞ~u¼Îþä—wÜEhm•;ß´D•?ÜÑKªA Ñ•TšYì‘Š«:kpˆ1ÐJع©Ñ®¦¼^*×*[òr A"ÈÝz8góÐBàˆt°z¹cž bÔûkOKJ0äÕù†ù›³ç³8½f!ܼؠcMgÊ.ï,ˆÈ²rÇc’ý>gþðGyÀú!=ãpºÊ&Aä6“–ô:¾„òÓuÒÍ’ykî¶Ø/ ¡'vZ;SDó+—¢od Ì3—âþIe*žnùœmâf#u$sІfTXeàáè‹*ñó8x8pæ‡åÅEô®2¼Û!ŠæÃæáñˆö…¢hI£k*QåÐÒ ”»9‚C¦P+ŠZdÉ0„æ%ä<€ųaî(ìïÜ'¤bõ‚)»è¡¾•îÚ©çs#d"fçÂV­>gü’ÌKÈâWÀü¯à‡F -—MÆ@øX·sV.‹ÔÚ2»\ŠCž¼8§gÄ4kra.–¯÷“¯|~¨š¾bµKNR ð¸Ç7!9Ì6ï™þÛjChä {Ž*_!ŸÓô½ùª©_ ¥•=*½w°«m US9ÇsÍ ->Ÿd58Võª~5l½^Š(5ä)¸ñÜÿßÌ‹ù+WÑðð\óÃ`«ìnÅ"ŒÄ+º”Øñµ¢† õ=üŸyzØ•òþdÒòú=nµ¹c/9…Òp”pS¥<©gðéÍ'ŠÕ\âßsg¯ôî_ß﫽dŸsža1Fõãöº8ìn)D&¤‹ICø‹ HIJXV09Ÿ:G[Œ%ÿ–y¼Ê¹áï[\6´"ÎF2ˆ7©·3¿,æ¶i®øÈ~PËv„ÆÔ,4mÕq¹˜ ÁëI. {ÞÜV¶•âŒ'K€û(UÓk¸šòDê¨Éc7×üFAP(ÛQY3„›"»Àòêç)ÊØ”1WîÏ64Ú¨µˆì]l¶ìh4»AÔDŠwN¥9ˆ $´Ívàí1ŠêTòC»ê€@p¬´òxΠ‡œ4€ñ -¤¶°ç·8éŒ -ý»—CîÌê•1Àw,Wà—bKž~×›Ÿ½ó”£ïRðE»yôhvè®\ Ëlt†X·ñÔN-ÖÆ’”DÌ«Ž7ãĹÛÀXÕ±7/»¼DªÑkLè;­yˆô°ˆK…îFE™T¨vk…9gÌ ÂÄöA5=ˆ~Ï8“­m€S³U^ ô&Ñ8Šb+ü9ãqRf/ù s¤·SF`7vû»Œ™•Û«€ýtÓë¨çÄJ«÷HË“\kÓ¥à-¬$¥Õ7$<–æˆ l| òL„åøÕÇõ¬C‚ú?Ví;íqyP‡Yƒt £.¡üeHtPÏÛ[‡ m„s‚^ ü=Ó׫!DTMvA`M€G¼W$‹"3š6[¡ä‰\[-5 ¸gKRÞ§¹w…„Šp±×wÂR}P÷ÎÒøa‡ˆX¼©ó«ø QÜØ1Yò;ûç“Æg† Kuد×VÔ›$³W{)›nH2¸9MÇrPBL-0ZOO4¯ñO,™O¦ëøÕQ½÷ÒY0ëE*¿ášñ3H]fóíxÛ‘;ýš=ÕÙ³ì@RWŠ‚ªKQ=ý굨ïj¢´u%x“SFðЈ10!ìµ:”iO^̳†%ãO”ú¶u+9Cá{`r.™ø’<ÿÀ - îçù¹Î=ßkG¯–¬ˆóà{F˜á°[Ìû¥s&ç9KnÂmL´MO(•ï_Þú¨·ì@’ “Ñn˜PO9{ua³Ž+xm«Oú ¯|ýqô?_ŒY›·Ž äß©©×|..EÈ¥©êR ÞK ¯ö9%͈¿!ã¥äÔç¹èúŸõŽˆIÐ( ¯¥ -ÃaÈñ{©î‘‹:Š‹O -žü,êöÚ\"1Còœ -e<‹ó¨†‰ÊJp£)¡4§†ÈØPZõÅ’×´0…ÆUÛBá1ž8uõïÎ`¸ÌVVGRì£Ïp¯ èŽäÙyçÒÁK,fåÒב(¡Òv¥+W V¼韈WN¾qçP|z*gQV#G  Õrð3ÝŽgM2šÒ]eƒP—ÒCÇ %;* ”sæÍT…ãÏépõuÜìšãhlŠ«?˜K#ñÌ2Ù÷ab‰Ô.£Í‚¼ÒAÏLþEð½ºoÏؼ¿üAŒùbL˜»xíXåp¨ù¨wpò`€8%'€ÓJ¨©ªYñî{®3펂ÒLjÁ÷²Ç™‘Ù$F†üî(‘^)ãßbG~[Ȇæ":cÇœ°¥ÕD’{Ý…%1ëÐG›Êwn|X¤˜c7èòÇR‘ˆU—x¤¿ûH±­ƒiÁ„‚£PyµðG ’â€HRÞ͵³Àc«‡Åq”jDªFÊA>ñf ÀL4k,eâbšT¨N6]JÝ)Ý·RwnQwÓPÎÝl00GŲ  ‚ÚˆÄfQïÂÑûX@j xšòÑ-m`*k;c¿`7iÇ¥5!½R'ŠjÙKú[ûý5$¨¬"×wj¡weKt>cqeî°O1ä›ÓÀ¼Q¹ÁJáToä{«%q¹Þ•¿¢R ¤4¾- !§Lúµôðö#z.¡œ­†ö|óNhÿàUªA2oæŠD‡Œ˜‚OO;štáÕj+UTV"yÀ•{!_7ÖrØþ¯ê†I~—Ž¦“+_!L*ÇÏ™¶Å ]¯udÚPk‹wýÌãè!ÄwÓô.ä˜ŀ¥ÊqCΠéÂþ\ÈtÃÌÛ´-æ=óJ EØ©IÅ$=8‰5dnówë&Ÿ*âȶa¬1sÓÑݧ· ºù+‚±¼ïx®?ÐJ¯õŠOÐã ÷ª!ó5Ø…Ççz(¹{düô(©d8≠ó¨÷çAÕàDx„¶³‹Ö@q&´3í3NnµÎ+^XµÇP6éqž»×"É>ªv”Tº‡·wE¿ôgy§s÷M™ñX'M))Á* ÍÑC¿‡ÅŸ XQXº»µ5¹}Syô)­2dþc·Ðv™ÜÜRü¦wûÞbÎØ*ŒŽ:(µ\½¨–f7{Ò=íP {ýÌADD%(ÐÐó’‹G^Vž‰ ù­Ü:¼>š = -*ÁÇQÏ0K5,Ÿ!8MþÎwíBQOÍd—ÃÌ”z;îí;5[bÚˆô;¹«"Û+Òù¢@[ÒWΆ`vMeí¬/i<ûóE<õˆ°y‹ܹ>$_' Ô­™xF*Úì7ÖÙ?Ê>ð&r8'žƒtqóckBhoŒ"Ïœ¢~T_".$4F]G—2ðöQï…šÌÜG®#Æžq÷Î3 žEúÀ÷@ur·æ×N‰²r^š—ÚH<ïË;Øëü^7œI¿*XDµõ!@7ψ½rÿ‰‡Ü¶Š7 -_ºHžý 8äR¾lºu -è!˜ 0ŽÂ;éœÀEPbfJ¹IÒª2pÿÝ8vÂ÷'ñ¯ûÝ#±y‡ùæ¿ÖHÕÆØS÷pc«¦ÌK…r.âbB^E*¶AITFx¥~XíÚ£‡&¿à1~vh’²{1v– -­Ò#…È=ÄÍ¡Äì­ÓÖº*¦¶îN4¾-ò=[Wlßzâ°K ÂOÖ7[‡ÆgD©Ùr΋ÝDzFénÎÄn«ì³Î'ê9@’”³KÀgcì³ÑÇÙëÍÐVQŸË`_ùª¢Ñ<úŽ^â*”_[,¡µ3dn—òSq~é‘Y[’úÁû -(F&ß|‚¥Tkï¼mZ5´¯àªyÛ˜‹Qž×‹´¸'™7—qÎäÕ5„­YÎâ°ÈÓ=÷k¢\ -í ,Zü‰>³ží*9ÆèæBªÒl`HšŠÙBuO‰ÐâˆÒûßJâg¾`JÆ[üÚ3„‘ƒà2@•¡ ø¦Ecó¨A*7/Hv*ABØÍÑÿ -âÍnKã§*ÙlKý;µZrÁ3²Aö”†¯ïÖ&”è+!ò8ãçcÎÜ -³R msŽˆþÝ|œvdÎ|»ž~ÓÝ÷¨IÍÔJ–#ìõš&jÐÛcN¯àª¯:¾ Øî¯ÑÛŸšíOàþ¥ð†u4Þh.ÊȶSŠ‹r#Ú´««´‡ð,¾—înŠ8*ò5DœxÒk×Q I†f}Âè#”Ž+ûr+k–{‡1aZ÷†×ùž¤­¤=\4Ýû+x»Ä4ŠêÖ_@rѸ„̤ÎÿTT÷ÝvϾZ†ýþ+:Ì¡+à[Ÿ5Ô{an¼êÁœñ‰óÑÚ«c -¢¨¬™$F  ùl[&jAÔæµîQÂV‹zJ -1§Ûõ*p¼ œŒ,2IùŽ‚Cê£ðgvRìÊTûKXò›OUSj÷F{Öú²ÿAéÐb§¤K/ò¯óK/ŒÿgRUê#Ág|DùAþaJ=)Ì©šôŠ­ }ôbuTrÍ®+SaÅDæ¹&¶­éÒàm³Ó¤K ªeYÿS‘;JäéL•› ›¸oÿsw’lIŽ zÜÃÛ@¸¨*…ê˜ÓçB¤FôÔî ßpáf´W"%UIa2žÃ¯vhNó7aN]q‹írá'{êÚ‘„Íè{ÜkêJŽ1mš߃>ž¾d•O¡8šTKp(›Dðt=k˜#Ú¬'ŸõPªŠx:üg¿—¿eëÀw¹£4ÏÊrÏLaü}™²u˜™p¼B¯Ô_•Î7pÃXÀ? lý¨‹CÒ¿µ2üöÌû3ê–猽rT} š~Æâ„XF¤qÁíåÙØEeì~ýû“üŠ’‹h¨´•‡°«]ž -=äÆ¢fæRP̾ÏJÿ&‚¥€@N¡¡ šCîs•9 (SÚì#‚_±>‚^)¯ß¡ÃŠP[$Ä‘ìÐ$§MZC:R¦oƒøE¨«ÉèK] yãªÚ°N*©±Žëê9å¯2WBíÐ8YƒB÷‘ØráʈfT¶g¶ùlYTJÚä×ïê}͇õŒÖI+ÓœŒá.`»-Šá¤Ò#fM€:¨™º¯t‚m€ kpRßß „ŠÜ.ÑÕ~¤_ÕÃ=L1–s¥¿6â3j#:¥h4ýûnêâø©ò<® ó‡ŸKÑŠY -ÈqÉaDë9J—våkæ.­HûžÑÙ«A¥" ¹”˜0~Á*¤°5$eáÚSôµYˆ©À÷þ»H›m͇@õ°Êô¦[ˆCûgºC„aÒQ€È’q[;Ôm¿ßéGÙ) %­C¿Òœñ´Ê9"( -.12$%…,¨SvÐVƤ«ƒ‘r¯Çêq_ñô³G /“È}·¯ªb´Tx{Í'‚›ÀðÒ+¿ ]KœÂ«í¬JuwÝr›ÅÈ )h“–;gÕQº,1ÞSí~MVê¼óÚ¯_s=´ òùdÙö -¥^G¾(Ö‘öò×7¾ùÇ ÌÖ|1õ|GÉ™êøi3II2Öp½10å‹e0·þ›D1>œEýø ëü«¿ügŸ¹ù»—_¼IוZÐD¿B&PÉ_ñàZADVO}GÀùØdÈKkϵÄ\w AÄdÐ|_{ˆüp]R'…Â+®0¥ÚGì¢ýAûdägNy] °v:HO‹93.î˜ÎRlŠo¾ö¯¨ûÓØ.2jôo;ìöà^ýþ5‚ªÔ‚x‡ÒѤ¢RCº’ð)äáÂ46‡‚x… t²K­+é=ÎT2s;Maí,YÝiæ$¬á]YF}ú?ŠÏ×7¾Ô_9#D{µ.Ç–Hc’ÍGô:ïòÍëË´2¹ù -±›éHÁÔÏšçóF•Pp*6(iµ´¯31œ 9ªZ*ãìXjõ¯o¼¾ŽD“ Dl® Wü?#t´6ô»ŒÄ 7Ø‘ðö¥ý"ü ù·âÀóï€$áÝ]ô'á¼¾bšS–$èÎð C“bîš*Ë]%¡Çi ª†[¥ötH0>ˇŠõ,­¶ƒÅ˜ø·JXŠe·þ³÷¬+Á Oa‹м.¯jê÷vBŒw™Ò Pø7ìy óÞ0c}Ä  y(†RKˆ€÷¬~>ßñ¶íú0%£wx'²ÚþF@ïSN[ûV„¸Ÿ^±|@j:ZÈìWiþH#Žk«s+NÞg!c|ƒÐbrÖ¿¾ñ™>í)]Y&ûÍ·rõ+f‹‰>”-Fñª=… § wxž2ÿþ¡zü+ïï¿ûƒiö[ïHH3!@“ðóÏ–¾ÖNÁ‹äáÊÇšà€'™ . b¾ç‡©6ÌqAÜ–Ö £IïN#K]éIO<>â¦iýŽš9xǃå;†˜ëÝíÿ÷Çûþx@ÿþ/ëåõ@HÖF°ò®™Q;}ÔÎnöP§>Àzy]” rm”ªãí¥¨ó¬o¸¾]gÔ×gÙ4´è(s Ä4m¦c˜@*v£èÂWØÐWâž° -ÿs]ˆJ\¤A²(óNÿÚ -S0 œµk ÇÕ"Cõ„”ãg3J! :Fp>ð{zdçŸ@´x²w"Š­ïNDÌ÷ÛÏD3TD}á9©5¨tøŠYu -q.¥¼û/jôJŸ0hzKx£uÐT'âUùÍI?¥Láò;˜‘TSêDXoWÖ1xõ‘ç6„›GË)n!(®Èñ -¾s)ˆÜJ!à¤`ÙXjÜhDôTשaÈòuè\»›í‘)_È\hº´Î/Ðz?`ù¬y·k†PPL¯ì”â7%ˆ˜ïäeݹ ©Á›¾ä¥ÓáP ‡I³{&ÓÒÏ´í6ø8ûQ{/O”¿º -ëgÔÓ¦q׺ Å]­ïô`MòY;Ó«ÒÒîQ|ã'¹†¨s©] uŸa•µ|mÑvåR,J™„ªûù¥Ü,’òWŸqâEiCV¸¥òˆZ[ï -/0ZìºlòT7V®TÔhו&)ºõ@²X¬12g, £Çjm¤Q%Â$n2¬mY=˜Îf‹E[‡âf£<rîëç¸FŠ0)fEcÂÊ,‹1ã O¬z¸>Õ}Õ<ï±éö5!]…ÞZ¥òÙŽ—'õ€¬ 1¦Yu–º­ÿL¶K”¹ˆ‹ÒPj*6jCê`P‰àaMEj§\¾Ö§Ðßv •¹5$íG¶aWlïÖKa•ò—r©>Àv]?q°¥/P'°§ê×uµ”<Õ#å£$’>Á×zû´çèGòçÊ8|Sß%Ú•q‘ÈU;n/O~kw½ÁZ®/•$$TRù+ò[3bjÂË—×uçkÎpWòAÓ\êŠ×¸–Oþ¤ùãÍ7eCl -D›Ü➢Bø{dÌRùË´(œgU!eJûU«j…’” Ô0õ‰¬Nõ Pþu‡]õø9sÕŽÞÈ;ð±Ué8l´—x,:¶-7ÜÁbb yZiû©ä©¬DGT  -ºt¾Gœ¿å–Q™Ã}ˆ÷f#b¤d¯'`ÞÛ¤m¬Ãøi¨¦×ºZ«&{Ü}!CPÔ¼ÈyjoHvÎŽÏ—£‘ªEš37`â¹/%ð|’6I²×³J篑³ì”Îáõ• á‡9yÝŽ@e×DÖŽßs¡²ýŠœ¨nÚC.u3©N§j"–« q»c„:åÚCÏþ™³pXP@†ÈFteÌ1CøÍÈÖ\=o‘líL¿·IØIL;Î;K¼À²@lúçŽU±¼øjˆiʯ×çn¨PGmM£±ýadIk«4ƒ°U_Uù5ç[L[™;Ž÷ -’ÉÞÇO\#C€µ&ËyÖUùýY§ ³ÕÀ³šeËçB -!ƒŽPÆg ¿2dê“Ågë­êŒN t}Þ–íÁòtìÕs ?€ó€µC[ëyŠ‹„‚÷žèˆºÁ!Bx4ì „$gZRã›° 'IÉèòµ˜Ì’åaØœÜÁë{íÊGN„¼ã“i•X⊕u'êÀíëú~ö-€÷XÛ«Ù‰ô‰éH$TÌ¿ÎivîêЙà‡‚Ýâ»cø*©Á-‹7C9¯­õ@äÙ—>™ëÏõ½sºh¨wz§Áì…¸D~]¨Hþ»¡ì©OÙõÂKwd½Q•Æ³ýí?¿n(ÿÌNûÆYûYÞ쥒)¥ëõ¸Bk‘AŒäêû$ÎbìßI5=‘ùÝÏ|U“7MÝq× ñ(˜ïÉþ0#H¿ûf¢Ø+Á:>— - Í|ß/¿Y „ç3£øϬ ptÖoè*L¡-åwâhóÚI©«G2sqŒÍiv"Q¦Äš2íôåCLÝkfÄî‚¢ÿ5±®r†}ž+ñû|ËásîÓaEŠkÕH"êñѨÎÙ¯ÏAt‡/Bè¨+HÇ“zŸDì–´ð+)ZG§Ò -yCæuTQhôRräÌÇp’fßøyW6Qª™Ó° Ån^ÕïÄ-PMôÈ‘§!›oÏFlmo½ìŒâYx©Rå‰+&ÿO%g,@=ñìdù=R„sâ -ˆØ8F¥Ü)$¼oÍ3Ü6ÄN̵.%¦ n=åÞ—ºmUj|mîlÙ8+oÉ®VÏPvzW® ˜®˜yË áŽðÆ¥=È,oEòŽñ²9úd3xŒ«rƉ<ó€§Ù³‘Dn,ø­Üó“~ÂyzŽR•ßRÏ0$ÒÚd¸ÀwÌpã«ï~Å€ÌFÛ3÷gŽ{¶ªIò:$5¸õÍ,›ˆì•Ýar¦ê7÷]¤ÆçAöÌ…Ô.£Ô’·nYùt…¿=/Nµ=â«À’ÝßþÌGùt7ÀÃVº>’)x ¥Íؼâ ÙãQ"ÏO,ç'–Àû7–@í~iXh]-ê%5AoºóІ©4eÍ ƒé€ž#«¡9ÉÁ,üú²IÝ -`œu´Ó ¾ãѽ¾ëä_)FâÚ§fâ+¨Ñ£TÊ "¦ß2S¦¬ú‘åëÙ–FŒJºh%mðÓÔL>n¯¯o<Õÿ&‹ê·;ÚŸµòˆ<ÓÛºÓÅÑb¸ M"×o --´ãÄoÐ}u>=À ºUj&µMßb’g¿’Ú¦_*rWé3Ù®³”¥éÛì,b„W£\|UQ•ãc… ëY ÍßÚ§ÿ7¨J|+»ÍÛ\X” ¯Š…²ôô¼ÛŽÕdt›5ˆkH Û3˜•ó»a׌­ -ºw‘’so¥h¬›“$Æ8Òbœ»ÂoÏ«Þ˜’šcDíòý«¤¦úÈ@Ž~ŒÍ¸hŒº”œ㞈v~‡ØšDEý“l*ú‰§¾áhGÙ£ `DÏ›8ÃÈ–rSx2#Îö«Ÿ`‘ïO’Â?þe}×qØôLx’Ê{ÉÎ@„aþ\ƒÈv­¬)2gz„×A$ƒìÖTGštŒ¦a¢š!O°ÛjT79·é#êñ 6nCrÀ¹®-Býw=£D(­ùÒèé*-MÎ1žu)Š”ZHEîQ˜S„yZSh0ÒÓf¤+ $•_AäæêëÜu©Ó™Q’› -ø€æ)F»ÖÏœƒV:kt[~†]`:e‚0ä•+ú´{W/R¸…ãò7s%‚¼)5kižé<¹·ƒRí›ß¡%­ÃEõµ^Žª—fgÅÞ3nN¯˜Þ8>9•Ç\J{Å D¾‰äSEfre Ý0$¨Hâë‡Qkê“ ÐEÜm3„Ü:ñ–ÑwÑdáJwÔ¥bÊH-tí:U=ô ŸFzÓ3(RÆþG •øax¹”òCÄðˆQüC³^e<Ž¼s)Ý™ÎËŽ9kfi%­õuÞ'ß*)ìÊ‚ä†<ü DPèn²NÓµ«Í`hºRê?mûŠ1b©I[ý][ò–ÖéšÙdb”àÎ ÐfÞþл3äH‰/¦f€õ«ß1ÒkÌ¥.°FÎà5À Š(¸ä¦ó;Œš³“¼JC )v½‘åš¿ÕkFtM"PÑyXèÔà{»ö·>\ÿ•BÊú ƒ~ðEć×`ËÈ×%/eʘftH}=#JÝ[­²~Ä+@âÀÀB­7+¨l%W’Ûù×€:(bφ\dOû…„ÁÔÖ†êõŒYý;/?ïP©“I½Eôî{¯FI¶&áPúƒ”89¡»Š¸ÁŠ®jRø‰¼Åu)‡¶RßEu+C4 iš¿í¯KÙùâ ›ttèf)nŽ”˜LdAß´ÊÛ† -˜†Íº-M¼Aè*\½ÐÉ–É+Záj_,¯”H‡mÍ{ âyp -kzÝñQ‚qA4Å r¬3” !- û‹­Ve2‚ÛmÐÖ|ÐûZ:•5`(ÐÒíüCã*š$f ™Ân¶},js"UQ"+ÐÛ:Œ²Wæ–Ü%¯Ú½Ú­šŸQíR‡ìÁGd_8£ÌHõ¥®t—N”®š·GÜ–Å“Åðvèøx‡I!ƒLZZ¨7Áó5ûŽ˜¬5EkG¹Ëz¹õc×È«ys.è³rÁ„úñ &Õ¥A­ä8h´ ½è,%›Kõx(BWçÜœTDÒbÅÚÔ‰ˆóqÀºæÞK^°Ý(äfñz„ãŒGWaê·Õ øwmµwnØ)«Fìoml0 -Рw•¯oœó/M²DÝ£ÞrÇ#e@ Œ”œ:~ð©±}ÕžÅB‘„lû“þ÷/þò³™fž—JM5µ2È-“[B`S_VZbRŠµŠg…oð뺺9üý]Ç_}BBñ¨mHפ@µTzÎ0ÚF–àN@ŠýP;ÝYc.øâvÑ‘4ù¢I -n5ûÜÁ»ÊÔÚ8gác^í’Ôt²ï§†ðÕ»FÔëõi²ˆ’ÖR8+>…Ó‚ävÖ¯L­è¢Œ(6=µU¡B=Q-næÄŒ ¢íL‚îõt…õº/»Ôƒå ¢#óÓæ¡( J›+ äz82¥&ßSŠ:z*jŠSÒbµÝþ–p/úsO!1-—xµÙ(³Tá¢*Â×¹~ŒïÐÝBÎÔRñµ÷žo–÷| -4-‹€_©™q,»Û»—®^ '¨ÿ5FÝ|êÙóL­—G*4TlFþ,ÚɃnZ¸~D[ægì#:UHÆÙ4 t4Ñh¤@ø¼vÕ³:²Ò 5KŽ2b,:{mŽWTøןÚËÏ€ÄÇêg£¤ÿBÍO÷8àá•Áˆ¨ e« -´\±²0ú†43îÐ7N„(úe²m Ýôjà¸õÚÂœV‰§ÌG¼>:# ÖS™ä8Kxäú›SÙù‹fI´%þ¥ŠN«2ã–j—Êd’$k“ƒ("ÌÿùVšQr™–ô,¾Œôgšúx«-±Eb5²»{þ*¬ÿÎåBïu¯\ ]+Ï;6©£ŽÈ‰HMÆT·«‚BÀoW;Bk _H.[Ï,ļ±p%œÔ3n¨QþP\Fà[„€Z :@{άKÁ_qd7óð7ÉùNBÊkæ硆ƒí%q¿‡˜Ïú ¦ö„8tíböœÛž˜SŸI³6MCåkEܘLªCòŽq¹¬¯÷Ï×¼‹&ñŽ³vð;)%ñá2˜]ŠÌtt`?ÿ™GÇmE!R…=H7ä>ÓyëJÈ“¸[³~ÎÌq}ös«5 –ν¢NÚë­ÚPûÌÐïã -’®V&0ÊÊ/:´Vºˆëíï}iDE¾ü­ïã/yK0!Zí/¹¹RaqŸŒdÒël:UG¹IF -'—×G‘ãl<Ü¿þéþ™Ô'ý(]Ûõ÷Ö?;ëTo=v ±kN5 -5̳¥ƒP¯&ÞZ,LäUô6Céëq¹„{ õïÿP¢´å´]  SB±×ÞÖtÔ½(ë1ÕÂøt²*+Îj[# -)›ÂÔäUv¸kbè:VíБãÓ)xîú> {*åg -DWk#å%t<û±/••KÉsJLÁ‰[Í•q¦ ?žºP¼l9®Ê$Ú~ÃLÝhë¿©3=ª†ÜoäÖ;Ö"j?ÞUûíwÚÓiÍHåX¹»(áðp ~jòÈib¹Bš5!JŸò¨eÜoÙ•þ}Ì÷ÓÙ ’|˜Ì[ƒ$ :rð扤ð(±±æÚ¡ÀdwŠÕ ¶z@°R½O´ SZ2Ÿ¢…Öà­ïøfÐÊ‚d™¾ý 4b&zuùò™£kyÓi¹åɯŒŸUºŠwi9@FbõŸ¹”úÝAe.^g"gw€/rñ)[´”O¦ -ÑÑÞaCØe|¯naAwzJW[ÉɈ¥ŸAÉŽDˆÄãpB=RÕEãʦÞw%æ[1$Ý·ãrD $znB¤Ò3™*Ü„š®L ¬=ÚUó¯ªÊÏ¢èùú¥d -OÄ\(§Î}ŽP{¢!&‚4h@c®Ào2ƒ¨’¨\ðíÐçÔ~º«ì %À^7nk¯!ëD_'ÁÙ#Ãœ%>‰­ù_›môZÈýúœu¸F¨¹DžÍ:l=>ƒa»õЬ°gÓb§¨‚‘—Å{Ÿ¿)T¾~(äåÍ ë²©„û©h¯vá½X#%kÚ„ ¾äÎ1 ô’¼]»æScc-FP)ºàsGíBÄtÆo:àqµæÎU¶ö0ã —AäÚ!ÃzEt-×yfHäøu=…hÍ äÞ=g9@ú•8ÒG‹3k$±]°ÄC=ÓMÏ„Ô^ÀHµ{¯… -eáÅ[‹âvÑÚgÙкÑïz}ÊÜšìR!apv¯ÙEMH;—Ú¤F‘‹eG ‘gGCpæC ‰ß*aÀ–Ç›m[Í >®(JbÏc‡8×AØ“›cæÄd-ÍL½%3$‡ÚÉâÓÞ{úýÏ3ýÓB(%uÏe=R"fúijŒ;ëY’ã€-ÏG“:ëï'ç‘óøÇÿc²"#é#LÕòÈôF¼×—ú´WV$¬Sd–éöÍ=ÄÛÁ$‹²`2R¦¸§Ô~'wä¹Úòòµ­ŽÒ9ì{È©¢ß¨Ié ’øƒ -ľ4¸`úxi#+ï“©Ÿ|R-赌ýÝdâçUª>z ÐÈf1ò˜u) -ý£Eý²í¥s3n½.ÕfùhWu8RÇÛ«%Ü`Ï#­‰²®1DŒ#3™›>‰%:\^Xûwphã®/8Ó¬ ÊÖk=J(ª"ߊ>‰éÙô4µñCÔ¸®`w‰Ûâ’&Vj5kÛkê-»Ò¥HÈ5ê'>@5Ÿ²€ª¿1'Ì]ëV5Ýâ]©stwì¶G8ð8X/Þ¬ÈwÖ†õ–ª'ÛgÁLDè+kÿ$S¼BðIÏ”$k/‚ºâÝ:~¹á¾>ÄU¤×Ÿí*5ÖX}¾;/Æ—½ÐëïO°¹®414*û»d¸-¶­;û#JðéÎh!0'áÆós¿³«œÜ‚Ý!­ûØGXç[¢GI3-)mÇ?ˆ`µÏ#Ì;£ºÍôZŒs|rð®Wu1AÈ«¢f@Óggû36€+ªµ¤z{µ·u ž]Ô¸ö¥ õ…¥¸)ÿÌ•X–FÁuíSùI{M#tÖfÂ`•úq¿ýf€¢îumï¢>‘NÏŒîßOCêRé‚“A?Î÷³ÿÔ’3ÏkA?üù×yÇ%Âúù_ YZì‹hˆíAŠ¬ÏÎÔò¦š¹½âùÜŽ–ÝoH‰ï…,åN®‚* 9éß%סÝêh^ïuç:óL>vP‹]ƒx¼W ?ºÂkËIØþ®ÏwüqþB¯å{ÕÍÂQ¯»öwú—\ïš»p¡B ö{W”’…Œ[MŒ‚B.´ÂÔ7ú_…‹—äýúá±[j¨¸a+ˆË’d¹5Â|l‰ß©åÞÀ]·¬ç¬I…p%v1\¼eªÕ€RG¥¨¸ëÑŸãiÇV© 5<áU*®ôŸF6=¦ëǬÚÕLÚ&š[}þ?¨‘üÿ›Ôþ¾SQß™5CŠŸ_ÁÚ¬mŽôÙß6—›ñY$Ô…”/æÛÝ—ªu…œ¼»/kbÓ‡(vhú<¢á§ä\$Q ;$Õ9à©ZLº/ÌQWŠ‰Ú¾Õ}ùAÓ¨ ϼãõ !!Ìí;¶äË¢jÙµð€± hÖ9!¦SZžÑÙâ“u̲Àzé‘ßðÏפ=Ë.–¿½D«`ªq0Ï#WzÛƳƒxÝ ¿+ûýý”³—o&ÑoÏ]HdJJÀ/:#A¨ñ`EçZñ||ŠØÜha|–ühÁ¿¯­1uë76±imâBæwˆzÐ=Yÿk~•ó)MÙgCVÖ"®‘yp%Jé$o05ôQ× ´›õ6øÖåN_ÊôÇ[Cž£4\| ×_ìò5Dßí%D¥ã¼G[uFçEƒâÙƒÌî”Þ¢‘?ÿ(Y­ÜRÆ!&«ˆnÁÒYðÛÀU]!į2ã<nŒ€ìUB=„ ïnÁ }ç$¸!(&ýJŒ«¨í4ºˆðf×YXÝ ÷áär–— -K¨óþ”opÀG¬à4I|TëØà¬ýpTp ¾`èàxŒÑovìàG cDÏ€€‹nC ª27òáÖ,‰OõˆÖ‰Ùb‡šê^‘½–ÏÌÁÀV!”BÀ›VRàGše)½gš37"“´öì~%ŒÒ“„:¾òW\-ÉJfTÖ !qx\²|rúXšßïYær°Ò>©°RX{â{£ù>éfÛáªöºÔk1ôbDü5„­1Ýán 4`‹­Ýqì»zåIWh{“«þêx“û×·‚­¸"ðø×^¡®BxÙöùÖ’ÂèXKP“ ­!jrį!ÎŽlH u¤k}Œº”"óÛ³”§ ³Âæí3Ô‹yÒµš´)ú•®Ú 𘺮ÎKkE -Ó»T Ê>/ ébÞóTn0¬ô€¶jPrà°³Öd÷à FSc(à”µöÁþúH࿹©–§}í_ã°—!mR'å£=àºf^{ ZKjFbý%ƒs¶îDY?Da7ãK -È1v׈’¿“¨¬Í5€ÌFüà´ Ìƾ‚‰-À%É«)@tf zƒ}Ô¨Á'FG‘Ýh¯ý+7XW’n{—0À'ô%ù¾no„C­é'ÍŠ€ëÖ¥üoÝÙ;kQÐ3RÙ§'—ŽÁB´‘³°ÅÐ,0Ñ_Öèþ&¥wGm,nÁÊ¥ñ¾Ùe׿Ò÷QwK ʦñ[Eù°ä¡ bcôtº#‡Ìú…Äkz­¹a½nÞ䈈‹SÏšsž#‡©Ÿƒ`0çãÚ®µ-Ý 'N ¡ø9>¹oÀŽHÖ™{ì`¡ã@d?'’Á$ë­ç.Ÿ¹_¾Ò2¸äD›û¹£¿Iþi–ÁòýGšW8Ãñ„Ì¥Vœ/X^çq8Õ¡¾›9ƒ•x0)kõ©’¯r#"aC„TÑ›V”¼#Ö‘‹§øWÞMjoÚýàeÃÂq@f&Q*­0½Rºl}¼`Y=ü(²Å]";$„škÞ7¹ö%zÐ^G€ÌŒgÞ2ˆ-E/fèÀ -tT\ñÅi‘¨UÆ£ž7¢?¸‘r 1×rÄÃ¥ìç[̺Òyë‘ER‚Óøc[ÙŽ3nmî¸Å83~VåÑD±§ÀþænbXþ¦Q6ÊÝ{m†3èà9ÊâõÄc–¼ËÂ)Œ‘iOx|ï€<éi9Rx¾` 4}—Dg.5w°^òºçéÙÇëïòrsÄ´ô!­äÇ)õö:3לèV™òF²¼ÉÒ½kE‚}h¶´‘¡–Æý!s˜Z´;TY¿Ò!´ NˆØ=öÝlaŠÙ@(Laéå1Åm\²‘€‡ÍL(z° É1öâì͈vÕQÖ›¦Åq5´üljêµäB8Z d׿àÊEæÞ¹¼„ÌÈ9pò¤Îzñü »ZÇpw¿Ò¡Ne}b -"+|3#fûȳÿOÑù¦èvJ[aƽÝ_,˜õ®UÐŽ÷W‘ü$|ÿû¿üåq§ø¾qGZ'ÿ…F>#¾GºüˆúC¼ô¯Ž˜RŸÂ€ÀÞä6ù¹ê¯ïû×À*4 -CÚîÀ€‰íâê|¦-«Ùß‘t;õ»_5„"í%ÍŠð* A~…—íU«ÊëÖà!”ýB?€úÞ’«¸ëC1çh%ºÏ%UƒRfª„8ìKä­Lh€í¬È+ÜÁÜ2öÿÅ‹b?^‹08‹zðÐTèºdzS!l²%JYõŽYTá—À‰”É1‰)•Ö¸hd‘pAë_ñAnÌWG,cghˆÙx𠵧€Ùûºk )Š[¢WVjñ!/îD›Î³V1‰?ú’V²0*<¤Ò£–Ÿ—ñ’œ}wÖÕR :¢·_ج(¿w¶¦1C3˜[þ$~ÌW†2¢±¥Ž€z:v¯È"’W¨-níÉ´[n5…¼¾¸±¬/©^û-ß>œ1Ü‹l;‹xfüMGzAk"î\ûK FTh™×~;£#ÛΰøL -ðŸÄRq÷PÚµÕ,õ¨œ¾"˜{ï¶ñwAxž³æŸ t‘îô[¯OMWJ]ã«,±Îe®Ú‘…ÁÚìãÉ¡é{ðÚ>ÈŸ¨L¬Ý©t¾ÏŠi±Efžhºò"^ýÖúUªC± ²ÍZ €Öqœž+ -ÅWÊþ¢ -ƒê{V­Š¾~Åü÷…’…‹ƒ´Ðl&Èõ˜aŸ´§ Â3å …Æzhÿ -øïšmØ$w}ªtŒéX|TÛ7n.”tA®·ƒ¼ÓÍÿ¹‡D ¸ôŒó*¥ÑGpÐ9Œ"‰³Lä ²:ì-Hès9±Y27JЉ¨“xEskÁ;º?îØÿÓNûé9êökD¯šs¬d9²c¨–?™ÑÂh¬]9$T æ:U}Ä™Úë·zŽ?)MåtÆ{’Uß4æªõ/ Ë~ŸÔ¤%]=Q4“JµÃÁôá˜6²“á#‘ÖKu¿{÷ðFïQÒØdzÉQW*i2†÷$­¥Â}V$€µ¦·*lrôþ›Üà„îXR03 - -<¬±D{rÏeúÏlgê1R¶;0ƒJI'\Ü×"¿È]âJéïÅšŒ³BlA²ã­±œõYCtË&S"Ng2^{ž úP\j)jàÎëE°w=”tMùøŒÑV†`èè³R 7äEÚhŒz¿!=æ„~çÇZ]ß–Ï“æVhûzA¢¨{…‚J(^ a‰ÁšR1~RàM”XUžj„x 7+µ±fÁ–)ÇÓ‰—ƒO¬õ¶~ç¿2hÆð-\Rœ3åäL\K=‘:'Œ„…\Ÿô9êl9kê€øŠœfÖvÕ9(P0™KéÈ97ÊÌì¾TQ“ºË«íTOÌ~¯p?zæùj‚W§NP>¢ŒÕ ‘øZ³<鬩NQ[ƒDµ«FTöÿÝw\¼Ö^Î02åUKzãBŸÊVaëæíöÒ°kW’žÈ0gnuv#ž$«Ó=µLŠÉ¡¯æ5ÿ#a¥O’ߥ1ôFv‡9<@e˜U»YGÖLoÍcUwþ:#š^u-d€¢ÁS–Ôí7çöåSãq17LN©K‰'ÀŠâQBÑr…±Lìö-Ý«!ÆÓ·µtöÛDÁ Žm#6{oU¥ ñmzN¨339º[Tz2äºQ²v“5p—Dþ壈‰1¢šâÔòÁÏ”€h|Ã+βˆGÚ«÷wŽˆÅ£2N©ä#ÄŽ3ç¡Rk"8"«X) a¤Ò'øôÕ1ƒX‡fÞŸÊÎFØ*USö>ÉO©®d†–›!ºp1 -mÙgôå½ø\¨Éa£aù¦¾è›œÛ˜ -/ƒµ¾ó‘[ ¡¦i}h”|}ãlø ØÈ[‚¢¤©´6É©ù 7Ô êöù$ -5óÊîZÈ %íûƒ™ê¯,XÏ¿Û&Ö†ÿþ`¤õbÖ¥´«K‰2½¶Ça³ŒÝ:RÔ[kVÃ3 ¡_FPÁx2Uæ^øP|:íù秃JCcVû"Ž`dýÝA2ÕÉ™Súù”Îí@4Ñm~…ñ½2æ{ì¶C˜;èÊñÎl¨Üî|meov²*rÄëæ^õ7ÈE»"ر{)/A)â##BÀ£ªÕÖÖÁ2èÝn ¯-=JF¨öhÔ_Ûç£E&R›±0Ì…ì%©éÝ™±Bqp?€ß¯t‡Šý'*~|j¿Ñ?ŠÓ'=ë¨ίÝšqVf>ÚxŠ¤\’&H@ñ%jʪy äÂs×ð´9Qx¨Í™Ò)²õÙkSÍ/6MÍ·öÂKÁg£µ#ؼ#Ê‘º_m—¿™sµ40È"ÌN1DYåa@Lrw$Õž¸¨ 9@ÆY ŽHÀ%º‘]ûIdíWù»ôMz„öZõ9«Ë¥&²Â/QKš„7=\¦Š»Þn3ó”‚%Ź㴫"˜!ªöžŽ!¯äßfá/òë/?:à~m¹;Ö2CNÙö•§ÙuXÖ­Óß3D,CÈ0#NëS[±ÇÀsá_3¼ãWðcþ5Wl‹L[ä:»tgSÑlòÎnKŽ˜²o%®3­*Ÿ*|k"‚'(#VÒ™9ÅÐ{ÔÒ¤3Ûã¹fÕ:ïa=æm Éz¾°ržø·ŠÁHì†qVܾ8Åèøÿݱ]Q‡o­Ù=Š™ª•[ªžø«ÜüÎz(²\¢ûçÞö‰j6;µµµ²äÂmÌ~ãŽïá¦úÊ ¢ž§S3ä‰\8e°{£(äXL.ÕòæýÞ¬²>ɱ ”1M?ϼ?»ËÁyçLˆ¨'h1þCYdþæK}¢²3åY,²ë˜Y×í”h27Ƀ kÀœíVŸãøˆ ~„:Þ•ÒÎÎtý›ëÔØ)䊂-ù7Þ%w|„FO”æ*J9› Ù‘™„çL;³2€ÚO(txÑ™vk_ë ë/ZbQfŽüœp)zë«;ùV|q$#¡zowÉã­™§@Ò~õŸ`y¿ÿKÐZ—@m+Æ´_ÿ¸¢¦(à%¨3BáÎÅSS+ÍåuÔSyW~qtŠePŽÜyاLSŽMáE1âž3Œ±5â“à~¬I÷ž›1ÂN•€ÃM”kÃ<Ö[lE_<¨°ÓHTh©•K·rxò~a›ì §'™Ðé¤5¿8öÍÒO¸}_1˨² Ç"n´$Ž_øó¥ÌH¿æ‰ -¬/ã²Ã3Ò?kÞ:‡Zy¤bÑ'Ô¦àŒ£1N -sâ -Íòa‰ÉüŠ2ªºÄ‰éÒ_щ˜4+ÉyP…ÍÏ›œµöUCH{D±®•‚B,Õ_FˆDb¶¹Œ-ìZµXC.¢Qé¡Ç††¨ã|ðŽB'frœO…¢M¾Uã‰Æ^˜V©ÝBBÇ×’ñÖ íL”Bï{éèvàózlJcb™ÐW:ôõîX]ç7Þ€×Z¬·]¨e GÅŠö…€Ã5 -W®Ò¢‰DÛˆ4~° ˜Ñ-¿óˆÄ!¥f„òØ‘ -zX³IFD³þ”1”œÕŠecub½£Íý±%ÄB¾;×(4”ÉÌ«>õ(Ïû™ÕúÏHº?<_,¢·—A_̱ô†â‘¦JÂ⌺«),W)9" ±î×^H*µo¿~?Åg”¿[Õ¥@öÔ€­¼;ç2(‰Wx¾^Ýöÿ)×%œw…—þ>ÛÔáo>ˆÒ§•¹ªËØG'‰„Ì•„¬…óH.wi/D{Ÿ¹öµ}Ë>»è¹þ¦ ÐI"ëiÄd``Vˬ¥{+cLáa _ï2cõb:ž>ò<Ó9Û×T0qê®] L‰Rd?"Ew¦Ñ’›jÛ¤X[vüp(Ìaü曪jt®¶õQ4¶#ûâÁ½¯šÉå¾•Ø¹gšgâkàPMj}o}ª?ð¿$|× ðþ€È¹X¹ÈrÉ-`…òrb/½®Lµ /÷hÝL{‡ùgö˳¸9kÃd¡6|DŒl]âè,kÝ‘#žDèS5ä¢N©?ñ [sÂ#ñÍ»j„œqàWÙ€…íwÍ/fŠ2f›r+ -Û:Å… ë1Þ}3ëæ(×LfE§£ÙÒÔ©ÇfƪŠ<åH)’¸iùšº²à8Ã͘H°×UYÛsB¿ÉÉ•¸7g„á×›7³æ,Òm­õâ1y‹Ö Á׳ÎÌ5¯ì×3fŠTAQkåÕÌÐRˆ÷ù\=æ\-–}ñç1$b´#ÀS í>¤îØŒBSCY ›* TøÚõÑnî'IDú¯ÖT7¥«Ý´òÐ sŠôz¥Ñ SUúw_‰Ê„fH9ýùí/˜ŽµÔš*å¼uŒõ,-ù«hz ÂgY›@Ñ žÈ3ÑmH÷°˜š\drz™o¦Äz51¿N6·9¾Š -·6(¤—àlœõ\‹ý·Jrÿ…VñÏDj1p;ÙTg½ù5½Ö–%3ïq[1°¤úáºù„ïRyûU2Wl9©0?ìdÔrãÑ]DEn›f¿é„œš‹¤+Ô¾µÏVq•Né ó#\×Nmhý{Á¸ -2O§Šû°Sëq¹rêì!˜l¯@Ñ!“s4â"¢RíŠgPÁ{šôo”w\ê0åÀà*p ·åè¨ …oM‡Q}q©¡äL®@Â!%ñìÖ;ë9—WÀÛtTÞ^¡BNÇ5vøŽ’ç-]ۙ㠞3ŽÓF|ËYè,רÊæå€#™y0Œ±;£Ì _{×wI?üO_ê -£ n³’ÿ+N¼Ês뽜E!Šm ëYyHŸ%Ävr¹XŒ—Š_ä¬IWúYv—³¶1(õØMä ë©·8z^etŠ+"¸øV6Zói­KB‘å;ªh3Ë£ù¾ªh‡ ‡q–? -[ª"ê)4þF€ão“'e¿5LRØú[»±41~ûBþÌf\pÚ|v~óì'æÒg•Þ“FâHÙã¢` ¼$ÿáû9 - í[˜ÂWÌÞ+t¨r@¬zÔà~ŸÈ\y€Á{™eëZºQ½\9¯Pƒ_œì¦+s"cÜø¾u7ªõŽBHª¹ƒÔ\Ü<ùƒon;ò38"wXL3@LihsÄM­¡+µz=zÞ*ŸµQž´Æˆ±•P5ú½å»"æÖv¯›3ðƒçøøjêäŸqÝËÚrñ,OD÷ Y÷Ž²jE(Ö\´^õL•Ä»9ôÖ HVÑáQT’Þ‹ÐoÙâ†k©bõÅ«1`Ðý7%ÞP±•)Ÿ”lèm®=^ËÄï¬Æÿó?¾q€þ™*Èà™HÌó®)’ª -¿§x4Îá­j¢ý{^âÐVIÛ÷ÂmÚȽޱæΣZnÜö²»ŠóµÁÕ(Þ½»b’GZVu׊Èì¯jH€¿ëÁT Ù{9ŽØ®9š£{µ®¢ßðÜõ -ˆèkò¦ý^b+OWTØxó.ñ¨‡.hj¤êzû¢¢ã©j9”\NÚYÙøéh?EžãÇ[ÑocŠ?½°XŠóÜÛwédþ pYÖåälzóOé–sã)¨ pã™ùØAÒ 1ÏòQgc'º[ƒŒ™Ê^ · -イ#B‚Á{ Fò)(ö½EJ€Š™ãs‚XYOpT8f/x› û*;ޛĥNŠeˆmönã¯h–‹éIZ65B²Ï(ñywàÌn@všC·®$à™LNŽzndµ3NBmŽò!>Ȫ5þ €³ßáž1¤ ×·¿ÿŽ54óðˆæ$@˜yF讄eDŽØÜÖF}§x'ÉÖm½#a!\hZŠ5¤{4ZAF”ñ*)™è‡B?ñ ;½ªéуƒ¬ID¼mÏ_‘ŠHã$ý¸†tçHI“÷Âp1EŒºh‚œø•¼‚T$»ôLÇÞžÈ|ÒW«Ø¿Ô1šäGÏ`Ý3Ðsîøph6Åd,Åôß¾¿áJã@JL‘ A:cãÚˆ¯íWùúŽ¿Ê¯Wîµ¹I0A#x©Iƒš7 ™W’p7øâW ^ ³£…ûGB¦&·µ¼z7:Jouì¬bÁÐ`™!½Jáÿ|UH©$¨øDQRžä(˜PN ¾”þ‰UCOY#gÑ@5*-ׯÚHþ[M613ºªL§¤ÿ¨WóšØ­ŸÉÔ`{(ÄHö€Û^û7@ÈÈf´¦âñî^–Ί¡êJ!_6Àv]÷§[ÇŒïdNq~é;£æ WõQLz>¯éÎ8RM4¬ -h…8z.çN¬!ªyφH¬H*7 ݧaÌããÀ+ýô:Åý*Êá­¨Ò¼©ÂéAÖ5Ž˜ï9^\ -o`¦üUÞ,ËÍ#"v-lO I¬“^ß!TâñClÎÚéÌlÓ­ö5I;‹Œ[N -«FA›À™«BYhÒ€>ç~÷iâëQÆ\ÁtÇwÅë=@˜#Ù£­¡z+ÞôÔ2$ yÜ=,Ú+ƒ‰·£á‡/€BÙÿë\ -ˆZ·Z«†ÄV -öLAw¤ Í€¬•;=dÛSà8`:BÈÓðÈ^f#X5/ætÔÎJŒá”B]“VÀ™îö ŠLdì³[W‰‰D4¬rÿ\çJì$ñŽç§äªŸ¦?•cþ© —CkS?¨– hÉÌ܈ϮÇÖ§^q"(D–ì:êà2ŸñY²þ=MGb赪‰,jŸ!)0Ììü¯ÿðägÚü€AïØ“, R/îFÈjã š ×Ž`!z¾v×ížYŸ½›•³÷:gÈ̃”¢ 7lóâ:Rxv܈/îZ?ø~òç>ÿÎé«*Öoϼ?sœ¡håÞ×ü䞦mÔWy=°@÷$Æ;…Lg†e)–?³ÐMuHSdJwi½…œä$=¢zpÅ°Ž{–[£WÄõ¦jE:èØáÉ-FbÜ•+K뉨•ÿÑeÔõ”Ã.5§.¦À6 -M˜Øˆ£´ý°'oW¨1ücFz‚':!‰õu÷õPäÿO'ਠ-}YÕ)Î ¸R#[é´ á–(:[Çtøm—:×v1Œ -SjŸ‚YŠÃ }àAë‰bBÓ_¦5„Ó}/3Ä-“¬,˜2„#y/3·t~?é˜c6ox‘Ââ<+ÇËÍPGÔYçÙK1 E\cÃÕð`ˆœ+zkmÑ Å•@I¿—éÞJÕNp˜‘!x¾# Òˆ~¾òèóɸ>W‚vLù7ŠàÂu& %S#`õîFU>²­‘s[' Ê-"C˜;Ø"Ï'zêo$™å8k¦¥^=¢U¿îåHøÀ—`m»jQ„×%ÂôyÃä[Y¾Ù[Í> °DJ}Â×èܱ¢[ -àÌ!§þÈ;7²QŒŽvÕ8ÌC}<‚ã“êÀ†=Ä~Þl8+Óëûn(?¯Ì‰ÀÙÜ*I=µœàRéUà¾S¹3…W:¢„LŠ¯*1Ó¤}}ãý}4ªÚ¡Æn}±2¸˜C¤ÙÂó™ÿŸ¶™èr¶;‹}ÛögEêÚ±?ëXç;ÿvùÎW´´#4¦îÏV¸)ûör0Ënh í*%}^¿—†'Q µšÚ9äSÐnïïÊ%4¿÷µuâ~í߉<­ýuÕ›]ñ›ìµq¤/ÉÐlÁ(¿—|m|Ísl1ÝâzÞ¹÷(Žkq…y¢dw8"!ºbÄìQóŠù_òQC&byUQä·ö¨1;¨Ü<Ö/ŒÁ;!Išø!²M‡ÓDɤxŠ1’Né«ùøÉÖë‡!Ût)ÊÎt°â¥!Û“­&îÑÇIxpËfß¹*kDTñà ¿n¨VªxGúãK/ttb:; +°æ¶;Ãñ{|µ©·¤x{¦Í [¹d]iÅ.§`—ëey¸ËçÉ?Ý9ùBC?#wç#_Ñ“<<™ÍkÈÉUåbYËÜæk»xïç¸jÏcÕeè§\ -à †÷d“lYS ='Rÿk‘@:¨ôhFŸrM -!ã"VçWüÄsîðųn¸Š6ò¯ˆcEmaM]`ŸžÝ°EÚ}´déø½Ax£7ËM°ÌÍEIPÞë™ \_hd+¯ïp$Nçdõm`º¹~ÇA0‡Ï@+ÐQgqðKT%9ª¢bþ¤ó‘’4òª\â)SŸC­YE Ry}ôêUµsÆ"D7jý_¾” PÈîxžeà)ZÕ­|ÁóÕyZúŠ¢m׺NÈ-2«2Áõ-.]6*ôð%ã0 W̙⾩§½`-ŒØÙ]ÔË(J'+÷+ü†âÇóT/ê`5 öŒhÏØC_ŠþîÚ?_©ã­÷âΣæù‰–¿!»b]Ï¥ç¦*‘1C2Åcó¯¤z²Âm×Hì¾{ÚÀ=1¾½ÅfÔ¹"ößj~:duð^Ê»›W{+·™Ú'x8¬ƒ 'Axœ×Vâ j×ÛÞK’Tkš*.¹k€–@8òµÛðµØFÝßØ>EÄ×’F£À«;Æî0N2Gt³"e}E8ù+jxÙñ–¾{û¥ŠØñ“lÀc¾ï~¯õAÙr¾°&¨iÝ«óDH³[+ {CŽ¤Uwú¾µQ=E0£D±,É6¤xeîÒË¡õ$t­Aë_¡ê1kÙ9—Q3#‰×åz­xYõW6¦³¬ñ±x#4 ÄGšlCWN|‡÷]þ Lv·Öm‹%m¹XH™ðôwî&dâl24p’I.ŸO¿ûÍùÞ¸–x†x¼]KºE`5—º³C—îWlž9*½‡·¬±¨áÆßkF—ÙGˆ‡³ÈT™¼Ìþ4—d»å¨å†¿#•&¾ô›JãX i¦µ¼^jU9 ðUf<š¯ºT|þX8mÀÅÅ/ɶض>ø¨ÙÜÝ2Ux½Øÿ,l•¢'a\‹‰“!ÈqÁ«²: -›#wE8÷¥Â©\›ž4¨Þ²,òOÖ¸ˆ8¦s‡]ÆRvÜÙó@UÔ]ªñ¾¦ÈäæÜëS ò8ÀÝ>ŸAššDR'7B%¼Po3D)U(…MCh?BÈQ ¬»yãŒÑø“”Ógû{„{Ëm :RfÔ -Ÿ„ê ÔvÙÄ—mGBQÔÔ ¼Ï }ýñ!œy§¤/µn‡ˆôÛü ìnW=èmç÷œÛ¾£ ñÀûä¿Éú‰ëI åéH·o=ã Kê´ïãºÝ8ZÙã(ˆEuû>ƒøEtÿ“âÏv‰úÐê478ckŸw¶–ÑœXŸþ™ŽwÀÙçŒÜG}ÓåQgxçË~‰d* h¥I>»rä„aíG¨}†¬e£2­œsV3$ÓìQõ7æѺ!mF´"¡µä<}ÖÔ®q@,dF©ÍÕ[«R”Æë®vhŒeÖ‡¢ÒxÛ³i`õtˆ™5DáT¹òIÊ`2ëœÈ'î¼þô¢QQ#4U±€ ¹Z`‚~ûWì´¦RB>p­À|!sZßCÌ?bS‰„T—â‰|!²!† ùâñõ냲j™hÛLT.ƈ¡HÜ—ÖŠ~âý¤*~Æ¥MÀRäQÎ^s›‰ÚD1¾òes`hkZ¼OTàsPñòfÒð#^Q¸,YþYÓÊ&úpŒg Iäs¡DõqÇe^)7܉ø¤¬ërøÙFïÚt ª±÷y&Þ7`Y½Õ >ÊN‚e*›gí:ƒ“ºãû^ÎÔ¿'ÇŽìS®32gícÀ¾~†—~ø¥Þ±U‘¯½>‹ÒÙú’gÒCšÊš ÎãÞ7CYÖhuÿ3sù÷Œîݳ÷^Tî)„Rô45É,ÇiÇÚQ·@|À>~¿jÓ‰øÒ ÃúZOÖàº%¤ tâ¯7-&íöºŸÆä’„µ®zá[° t(ÎÏv§>}½qÆþ÷AÄŠ÷NïðÝ}ÚŽ­ºQƒœo—gyd”­„ μö´mÃWf?¶'ÎhÄŒ¦ZÍØ6%èð^?»bzÿE*>ªôyÍñS3_Géÿ•°'$ÌWö»%8cþðV~ægøTŽí¬;â¸CÐX7òëŸáSú!«“m5žEáwZt!;¸tÈì/ÝT3MSÂ5ö>ÿÓÆÿ ÖÙÏÐ|Ñ›xnK‹inay½ú%£(]:-ÃÁ·j’ª¶XáWTqsT9í­uÝÁÑþF%`WÄö¼ûWVv@Âü H ¤kõdu'îk÷〇Ö^ù„JÚ5íÅ–ÍiXW²DhÝ¡Ô*Œ:!ªj¤oE™µ§ÃóŸQ½8 Lk8}Èø7Cµû¡‰¬½¦ò…¸‘!Š,+\GÉkÌœ­53RÜ7„¸óúÿoÒé›nºVþª}ßÕKK£øJ×ï`ͯ«*.j†°îŒGÄHí0ù"øCá¼î&0i¶T1ï!’Uší’cÞÎQ:ÜW™H‚ -ËýÎXÁž5dm•G€Óêxߘ>¶¢šuz‚Þn>ä £`Ëz“éø¨§­ÕtDÊJ‹¢Çß…øô¿ýTþý_Ì^Bª¬uöIY×}"'ÏhÈ"(³äÈEµòŒïRS¾5Å0úØ ‚؈ªHþîúxç¤sOÛüÁÙ­wÑ÷‡8Ï:<þ2§"Êÿû.£÷µ?I)×lä°‹«DQßÛ:Ñ梻uÑjHc~` y<Úª†€H+iÀ«Fß”ìâÚn{áè)Ý„ žüù˜¾u7!@6 ç .ï©h·ñö3=~1¿äþ#NWiNͽœ†ôe𺦼㇘³†nÊ -3˜päϘ?~}ã[Õœr°ƒùˆ£ŸÌŠCšº“Übô!ñ[y¸O’ˆ£ˆ÷è;¿õ—“~û -þ¬AMq¶sŒýIH5µ|¦§^%q6Ea°È A #†oý×âÜ ¨þí×K­»æ⥕ðí ag¥}_`ˆÌtÝ„Íö$¢I² ° Mú‰9ždÂöI=àó ®Õ 0ܸK¾BÜ' Ndš!âQ¨Ugz{ÎDæü²jeÄÓ€k¹àÏn‹ómUO…è­zeVd·5oÏ+Ò&½ÞNÕÜ'¨{Ïn…&jÿlÈHÛ¬iXíî#D5s%àÍ{K^Ùl/¶ü¨]{Íü'ø׳f´æACé#Š°Y"bÈl· -KU1çC?#5£ÎhÐív•Må<÷E¸"âÝ?Ó#­OÃvü½ÃJsZŸqöýLbÊ}Qã1)˜¥’<÷bï±ò¦ä9¿¹ØËÈ…ô§hë¶FxMƒù‰s¦óà‰ÌÚßSÍïü%‚k½Ä¾îcw{€¸Öy‚܈CªAo´|T[‚=7VO«‘[„íÉ™"tub™/&+Ó‰¥BøŒ@Œ/ˈÞeqÇùAtóUæ¬zIÏÁ'²ÀÒ¥Í=u¶5acÛ1£*°sU·> hv ÁnvÝ”ðž‘~P9(µó#_5n, zÿo¹)N!Ò=Ïø§gô¯×îÐ#SBåNú›CE£•“KKSŸ>ÄÜRe®Ëp*¿GjûР’þ„C„$¨jHIë‰Bò c¨¸¨œ–!bpûÆ©4dñ_ÖW™xƒ¨L4^8'®’Q–¤#n¼p±+-Þ©—Æ­̃NFÐ"ù;˜¦Í™ã"ïoÅM¶Ls¥ÃJ柶eMíæÒ×¾€£T—(AdzÁMáů:å¡b B.ñÎ¥ÎØ ?ߣ¦Õ‹+Épø¤_,DœØ<W¨·ýÀBõàÂàë~~D×*Àï@6.Ö×,Ùó©‹]J -ònë,Òµ»FczÄ}7.æò–˜§  b“¹2À +¡üfü_+Š¿cí6œ¤ä¾¶SÚ%¦8‘ì×Ϭ{i(âð\y}ÚÞ8Ð>º-m}Ã?@uŸ¡d-ç7Z(±$ Œz\°" TW -ü\qå’ žgZHs”βÁ{8Ë(Ær)& îû^ÐÌÑ‚ï]Ý}BŒlÚþ¨@IÝ~†Ó¿yŠ>5;RÖ`Ž£’•G"1oŽ›n†ý<"!2ªÛòŠ'T¾Ò¶{kP”Šmfäë½=2@$ÑÎýÒ¯~Tï3ä"èM¯«=Û¿&Ò>pG£ê%9¦`v2ŠÝ—:µ‘ù‰ù -Ý®µ.;Jéµ c­Üë¿#×€F¯xçEž#¢Ì+w®Öû{㾤¿òFeúK1L’Óס£é5$Ë&P1øßñÄåPG“ ëì—]+”,^?:\¥Ç~va‹èXÝ_1¹‘“¸úƒ¦|§E )¥ El­fBK4šãƒs™pÁEP ßïØ(•Ç®à| 6Ñ]—jaÍ'ŽYÇÕ¯•¹T§!³ü–U xO5®í#uN$SÔW6r±hšó‡Æq^òˆÃs*9|`×6ýqó+€;Àçˆ6Fê¨LÉ~s"nxYÄfâÝ.2&Ø5ðBQ­Óãj±¹ ú.w¬ÂW÷hmwñúÿÖûõ_~ÖbdÐJÇ©žÕV¡­ú¶”šòêQ9¾½!Äh˜éëæQYÕZûw0·™1õóû_9u¤u²+²í1‚Ú‡i'á¨Ôgù|®ƒÛ6³nMìð—¯UÃ^êý¿òUÊlaµ9dd5H÷åV3[8ʩϲº‰®M˜[v–OÜ¢žø¬«ß{•\Ѥ•]ˆ‡R˜¥OÔqÛ¢ù‡e…¶Bò88…i<ÛuÔêŒ}ºo|©]¢l¥H|:Æ4þ%f“%‚¤ýµüFs)lˆBi]†ïÿ+ Ò,ðÇdFÕ¾¡EÜ‹¨[ûÆTÔÁþÌx˜¯é&oŽ“Øfj¯Ð“½¶–¨¥âÐq8þínÀ­ŸŸèÚUzÚW>–tªÙ7°wjÞø›“PÕgÛ€~–³¡ÂB½G)eÎöß#è¢ÜiVç:Oî÷M5/#3amŒoí—œôCäiÎ"_Ú ºL¦ @ð÷oïCZ11áÂŽŸ[ —ÄHÉ'­`HàïœðùÑӎ┣½åëg­ìoêi;9åë“Í(Ê”­”Þ5½|å“$Ügm¤I£ 4ÿÅKQ‰°Rµ†ÓžÐ‘^;ò•!på'É„ôì1¡c§#÷Ïÿ0(⧘۫4ˆµI¶qg~'¦‚B»s¹v$‰$؃vn»ëW8ÜÎ;Òý¶~—ÒÕŽÍâÑ„Ö;â?ñv?ŽÅ<\Y¹å­Î#½$v0/©¸´ -`L¶_¿"àK"qSÁËK ›Ì&D‹íѽ̕®}xö²5¯ ¾šhdy&.yÆ÷i{©éŠ3m룰 ONÁ³Œ•Ë()'_’‡û•(gÍÌ0€ ¿~G&1’®ïBëõ½gº½P€o±¿B¼RWö•¼÷¿2½^`[†,ãóèè—T5Û»cÀõÑ C"Áº<Úã©d|}cŠ~,,(Ë”ëiφYv„_RGejèÃm¥¦Nà*òÿËRé…üW~¬5ùwÆÁ÷Ëõ¤¡ð$„‹aù}Ø8w¤–̳æ&³p}Ø(ä#fæáUåf,¯'þÌ»ÜÈÕžá6½[Æè˜bÅß­jj3¶`¹(&\›óG”«RûæN¤z±NËH’4CD7CÍ{ J K{*ê¼ÇùWUÇ,ŠîXÚÏñ1¾“ÓÖr‰â›¦\œNÅg¨Î÷s7OãÇÛ1—ÅÝë+k»†Õù€Gц¼+šXéÅåSÓÇÄ™›Yt€Œ -Í=‚0žFÞö~(‰R4CÒõKce„Ȉe:òþTŽzŒ<¯»†»E; -ΰräóM ²ëñ€¸h¬FªuM -Æ”¼c*Þ"QÌlComÿõéõlüBBØ:Ñ7Êd}?!Ælh6· e¢6ÆXfÆï…jojú?rÈ~òuüYÛ4«à·ügdÚ©+$F«©9ʺIŒCð«0)€ÍÏÏð˦çB+‘DÁÑQúü -so¸«’zúŒ´ÅØ¿Íü²×Ò8 -¤Aï‡[ÒSL´ñÇŒ•¹W¨ø&Ë£¸?´KÃúÃxîKÑ - é“NM(b‘R‚:Sà yŽ•H;Š)—\” ÉÖ»És€—ÖÂÓ>ä9Æ·6þ=7£®Å6½µÏÄãs­)¥K„MN¡ëÔ?½6_ \Ú¿zÖ7¾Ô'"äÎoÄö‡m}‹µéšP}7AÃX³i–YÏK:¸Â™gü›žçÏjœ?‹ÅÊ ËœŽÒ>U¾#D¡öc FR›ã,ð=D™JÈ0½÷8|hÒS>ÆÚDRŒìÙj%ögªkë;mÛ_­m@[‘1Yœ½.ØýÖI•À¯SØŒE|e†o6ßØ·aÝÁ¾+´E›è}†UévÞ¿*³ü²ó õ°ß2†ÿ ï—:ñ›x—«í3¬¾ƒ’™òH,íÃM>âx$ÌEÇk±2笹iÐñ-”TÍ»(ÃAøˆ%õСŒŸã7£ë.Ž!ÕqÖx3^:]C‡î[—þUí²+2ýç]±Ï˱xð‰w…ûÁÁ¼DT¡k20IzÔå6}S&KáŒÇvܾÔj9س3M0ú笟îÎÁ¯@¿KÜ]ÔÈ­×&oê‰"3ª€j ¹ÂÛ¶#äšßo.2)/)ˆG!ç_Ä‰Ï”å  ÷¥€›Ï½ýrxÂ@ ×ï4ýܸ‚ï7¼öØZ2šu3Ø €Y³¨è'áÆõÌ×n³]–UÆÏIàƒ«á_ô¥ë‘ÐÒå[kËh5³t]GðièÄ:]ìÚ½ì®ù×.˜Apk¶¼<®)E€húí,®´Gñ„”OÚ%ŽŒjü¼·øÕxÕB3(P˜ÿ‘Üæü#ÖrüTÜËZ[­µrñT†~ïšÜdr’1ÒÄ?3æÎ1¨o²±˜Œw½µûˆ#èYV(qP1•o…æ¹_쉔¾®v‰mˆûÖþÈ•Ox’‰«o1â{t÷#% ÄuæÒÃÁ_zŠ2òÄѡϺþSCD¥ë1ÜkM¹óJ§w#WÂùæÅž RkÐCåÁãÀƒ‰Ý -»˜·T¸äRtM»¢¾Ô‘£^peõLá+iqwµ™­t»ãˆ6˜wƒ-È¢¦b%Þ#@lâĽfîM"—±¼‹!Ot‚×^yÕ -„çýsÙ˽/­ºe{©¹?ú¡o\}ê™ÒÒ¢Ú™óFÓÁW`¨9õëiƒ™{DGÜOyõÝk Ököø]·´dRG¤¯ ‰#„IZç£-šä<úÝÈ·üÝì«)'J|FG1H`¹Áà×6\ýŸõåÈqë÷„n÷#Ôgþ°ü姎Ï÷,œºZÎ}ï1WÒ±uÞõ";h'@Q‚Äz°ò3¬¼ku ²BÇóy×32%%}¦ÒçX«¶ø–Ç@Ëv´ æw¯VœÔž«tLžl?“Ù{×*-Tâ_GP†Ì´… ëØ;;1Î,Õz½¤{¾ÞðoEDß SéÉ„¥vPpHaÃõ×JPö³ÐïÒú‘œ{'&>÷ -‚3SÏ(Ý!þ>)Áz}©xEAñ›óçÿ[ψ\ùËz1û9£âË—þ)ÙAÞ>¾ÆZíó¦i;¼(¹tf#ÉùjmiW™y£¯^qúì§Ü'z;D,@iKLÑ"$ä ¦ÿæ¤×“Ž–Á™ÚAý ´0‚JTè„*8ÃWÓÎä:Ah5úž7’'ÐR/|w9‰3˜rB×üŸÙ’Þù™'´Õ»;u…HX±ÚÛ5ŽÈ´ä'Hq!Ð&~Ÿ8ä&û¤c·XWm“™ñ+1W/öÕ«OLäIðQ˜²‘=.oе"ÖÆGžZR.ò:8'’ÀïX'6Í Í’ù ë¿¢Ì-Ùt†×˜A#/]©%G•”˲Ô,ÆéÊ@£"©Õ£@«­Ä4gEwyò#ij ê’ÔÆEiÌ_f<Õ Z±Ý7èzŸÊÖ<å(ÚjjM-¢®GD$í(à5#NÍ_!xz9 - -@\_¥í(E9 _q[©vs s2ÿ$ ŒFGíüw´„Fc«?®·y_ñàÙÙ' >E£bÛéƒ'×=ˆüÝU³º4@R¡Ü -¦ÍÂï>gž7É‚w Xeˆ¢wìÌPEÕ„|Î’€DfïXþ²z„!ñW³RLã!}’6ÖðràV N -ïLÛû· ü#üµ„ÊX¬'åQâ(˜t$(ÅQ ã¿Ë– Äw0u¶{_¿”hüY[ã‡1þúí»/ýÓ©*Áü°²]Ðt¾àÂß’ÚÒ_Äk[ëfÔS.çlRŽÌ ùÎK7äÜA -|ìQÂ_Í+qǬãö¬óž}ôtEƒ|!¦ü$ÿ¹jî>ÒÀà îÖˆo9ÙF‘˜lw -0ÜDÜ™˜°/´õ» ]Ö¾Ò)ãÔºTD'"hþî…êrB¹îL^Jr'ãÀà\B½ä'Ìw|‘G!þS%zDB WJ¡J‹MÜjÌM§¶ÙâÒò­eRÚÈ·/£´Í‘†ÃE“Œë¢]£ð}X±#éÕÆÂ;Ç[xáß`a~äjý"[øý»ÿ3Ÿ¨Rű´i|Ãx¾ÆM«G57ø”ç34«#‘fá÷f”<$o~Dd÷%ì·O„´ 航Îò»é÷gì´m(üµñ4àdñë· Ê ‘H½ïA°U‘¿³Á% nó³7¯ÙÖ­ïoÎÝÝ˼I,U³$3 =µ8ÒŽÞ˜¹R ¸€‚8øÑb÷¢Ò’ø±£ükNX¼•ô&§±Ím™•Z«ÄgíaÅÃTËù3ƒ¨º®#ùŒh%?p§ÏÊYñYx™10™»,²îoÄWù++츳µ·TVH¥ou8ìµxyvß^™={næDqó3:O$d"¹£VáˆÃq'[aøÁI ¶£b&0W‰FGˆÞ²§ÿº jO*-ÇCŒÍ]ü¸ö #£ÐL#ýô©*AZ¾Ê®¢1˜‹[ÅÜÑHCÿ î3~m¤8WB-Hâ£}b[)7=J4–(KOŽMïŸ!D›¤ñ¶¨#._¨'­iDœ†IÄõ†gº+ƪ$ßÒ(mV´Ts@Š³—ªø*¿@èüó? -úO§1ÄßoŸSëQê^;M5ÎcBÑã&¢¸˜S‹˜¦ó `ly±w·3¾¥éþЀø%Xã‡vû–’ûÍŒú³Ö áºK¡´Õkãu)µò…‘¹ÛÙ<«7îê쌑½¦*ÎgEFŽæ Dk¯HhgÕŸŽFËZqG?v±‡ž -´-º¨5Ò$¿a -ªÑíJÖî÷1h-ÚÈ]­ 6R% -DÔxçÍz¤Pd8?•îWº¯º\côï;a5$8 \öGzgŒ°Á ŽÌÂQÙl;é?¾wœ(â„Wà=– 2ÌÆ‘xTð7•ðŸhUmÚ×Öá‘ÒdÖúŠèdG˜•/РÊn¼,§'ÛxÐíoí1;Þ²ßâîᙕcØÒ樶Øà Y?ór9Òë² dVþØL;ÿ‚ý’™K7²MìÙ;$ËHÉåþôB¬ý¸ÈÇp6ƒh£©v_Õ Q§b`Pçš!¬§y4yöfHÏ'$‰¿6Ã#æwejÙT•U3¶Ò'kØ_>YÊ„(4ІUK˜-€<Û -hc½|®”‚ª{<Ï?RèqØAý3:·dÈÙQjŒv±ÒÞ¸¢”ØeÛ–”˜ÌT@GÔïjQv$_­Öñ -Ž[ 9ãQr‚˜µ\ -ù\\ Zt|rËwDŒö-Ç îå'¡µ4êwT¼UÁÁªóŠ¡R©®ÜqéÎÝ<1¢YÁÕö¯Tµf…ópî§ò“kܽvxÝ7d ¯5y9Î\öƒ^P[ÆRÆ;¾FÞ0ñ -½{k÷&®R0¥(ÔëC­½?›:’ȹRêOy›=Φ•Óý~î}à¦Ô8:Oä£QÔ,hÁuli`\•Ó¶+–nÿmú“òÙü[ ±6sÁ«…D^Òuëé{š%Bˆ»BSôÒŠk)¹ÛÝy[ŸŒšâ9åíGIÿ W4 ðZ -vº5DùÙ$Vˆ£ñ:Š›”Aòj»90ùÓØVœ Ü•!D'SÕ¯EÒœŸ±_¯W -¯’ϳŽuxðõî£&²?éÚvØ"àêîJ­ Ð)“’„›(—¯hê1>¿°Xâ­+„©ç^SI½?ò ö½/ö®—|fò¬‰Ûv$B—å øîØ›,!(öe0)J{¨u0 »«}gX…ž×§¸}k˜“¾AÀ'±nŸª£¿3dœ-ýœ›ÑfÿrTÿäž5S©;@٪ЗåUgsù¤ t½AAé²a¢IW0Ò#Œ~Y5i -ó»ÿ1,Ÿî?Ñúǯ¦î¿{!Ü{µÍj,Lìöøn~ÞýZ‰úÿdäwÛUÑU¬NíÀŒ×ç`>«ù2²3õzÔúÌvmûû“n³ åƒ>XÇ^%wr$ŽãJóÄhÆ^¢iºw’òÄ*Z»@ ‚ž|{Eƒõ`7‡‹ó©³"à5ÁàU`ØkÓ¿bWÀÐò8?;è­‰qDeà—;((ŒÜ õ±ÏEK”‹g=p Öúr[LïIeÕ!4Ïÿajþõ%>%<â -rÕÕé 0䳸•&!Ùù–ÓhX´b˜Nm73èG•_ðþ‚v—ÿºM w2"ÞYñ001iõ—¯¨OäJæXc`É¡1¥û2âÿ¦îO²lɱ3Qs1€ÛR@Úì¾YD7mþÝÄ÷o£ÛÕëaÊN¾Hr-’®Ä=R¡ØÅ_ @ Ž¸ã÷÷Ó®}è\qK™á«Fý{ÔüÇ[”ˆAl˜­8Õœ%{Åd}Sb„Mo•iÁS¶p€ùç§Á“½÷ÚCàfã_^*¢ÅCŠJXŽö5ˆ:¹)ýÆóœÏ@[ȱ9äL;ŽÏHá#MÙÀ¡#U£Ýzž±”À«m¥'0òŠ×d1¿Eß_ÿO™ÀˆD®#0æfqjžü8ê©4ÖHJ1ñ5$È¿K.L>ëë_ó/áíÒêoZß@v'¨ÏYˆ€dÐG«o‰õ!K¨À`××o&ÔoímçÑÞÿlœ^g¦j¾!ŸÃƤ]Í#ÔT¸ØýÚÒäãx…ŽðDI•:ÿ¨¢>àSÞ+ƒ3ðr½uo¿ Ÿ¹R0'Ú•FsÆè_)šLo:"Z”3"bE;–¬lÛ¿sDûQƒë¼÷翼œÁðR!œÛ¾Ô¤C{X=®¡_–4ñÙ¡àWÄZW\O]jÄCDù·3§ÿÇ÷÷—Èý’k–÷d⊼LØÒq­¤]Á „0b‚è'²qµÚ7Úߪ´É5~ñù>&¥\ê1#É Ú›(q§Ísí.%”Ú÷æ#*Gc9¾„ÔPH—”'‘!a]D°jÔÔqW´ÐãÍîÓ¼ãÖÔƒþW,õTnÞ¯Yóon{3êc8EÁ—¾ñ1·Ü†øT/p·‰füœñÇpÚ©ŸxÆœ»^Ÿ€WS|ùøš˜ÿ¼ÚÜ™r쮯'G^:oþNwû½Ý1†B¬Û„\pÐõ°Àª­cÍ*ôì1sÓE=ÅθKä~ET gK Ñ>:$ýYIñ?|§ÏI³û¸0ZBX8Žù–§´9ÊœÚr,×ÙÓºÔ¹×÷7;ÂwT¶ý -¾F‚´Ø¯¦ ´–yß²JžT¨øÎ’Èc ùKiÂëcÔtêm‚¡»†h§Øû"¥ŒJ§+#—±i´³ZÑø¸›N캷1ä2£ÁœÎ@”ž¸_„R•FJÏ¢ï: 3‚qä®25,Âqߟ„]¥dÆŸýPæ$ù²3p9t°êfÞb,r™ÿÁûÛ0xÚ¨¼ºÜÅIilBj“å.-^.³¿ à¿ÉóÏßüõç¸w’÷ÒI‰Ð»Ì+»JS^½0ccf™—ne ¿b“u_8õ<°J1pê3zmcž›jΞêJâ?¶²4c åþÉóV:œÄ»¥¥Ic‘ý†öŒ’º«dþ·!=HŠ,ç(þ|R¥æ8˜‰'JŠsEŸ~ç,±`˜·ïQ+Ñ.… {Þ¼Ì,/øñéUR\ -“&Z zšcN4ý;í bE_[5…1SþãèN›Ôõstx8ýh¤RBÅ4àXŸShqù®œ·XÖº¥鶿k”}}7ìøî®jS ®(l IÀœ¦`ujðè -Û†ÊÊ-nw ”§O'ö-iÕtÌ ìB°å! GFú,OdbcO] OÍÊ™<]²P‡ò-pŠQt;ˆÌŠ:­c{rß?°Í3Œê‘2ëK  5&ÎÞÏLŠäAmPdÍ”ÄÉÈ´òR= 1{`b?ó;¼Š=9$^‘Ò…LfÆ#t¯ßy4Õ½í´@OÅäë.‹­ðß"©þ‹æÜ\Sr€6ŠA RôŸÖg.SsùkVOQoñáòR‰ê¬BßÅÁüJÐɪøÂùRöÜ?a!r›Aæ01MÓ¼[5-¹¢T>‰"_ˆiþyNüQfí*=cúÈñN½Hn‚…ªò“kÎÏ¢—3ïë÷†—ßγ¿e‹ÔL¿BQ÷óœÑª[ßÔ6Ô£n:gl`/½­–^ÖJÆyˆ@ð¿WÜyÄóxý¿Ù£^5»SûI“s­7?B-^¼^É,á<àôV.&Cg¬N}1 ˜X-*!E~¤JòÅrb½k¢§SC°ÙŸÀ†×´œÈšFs©®´® ÃNÈ-™߇Ó0®­LÙ[¸¦žÂC=ˆFQ§pQÏÍçÉ›Æ×^ß>Î^ú‹+—‹æß]°ƒõ/i fgK¡|¬w¶¬ûCê -¸ó×MiþâM%-¾"ÚOK•*Êë-…ù{ËÁ”nå¥ Åkó+®Ik^Á›>]Fö`œEê"î“ñj†Â£¢b`(6A>”Çaì:Î;hô¸Ú­°ÖÈ¿÷Ù–Ï>3Pô¡äbHD@ûë¿n˜V;øÙ#B¶&Ô‘¾Mv…&}"QUt×y‚ ¦ÑŽº]b®ë8ºæ&žD)‚¡‚±uª  (⬣²mN 9ÞÛV,^zS·®X0ݘЦxñÄÆTí Þ¹´×‹ÂÖQ¿sr²¢õ`È“@õ‹mÝ hª§x71Í· ³¢F½à¡ÖLDádŽ -†¶°›®—s¦‰çs•\Ü?̈=5YD¹œ9X–ÚÙ¬ç2`œ©B©^ÒòÉÜü†Öšßö ù -#LªÔºNB*-šÔ^"r"VÚfR ê[wj=ÛùºÅÏv(åe•¿ƒÊÉÇzÛÈå”°H`¾+/Ì¥ë¯Km„ǽ9Gë ÎO®ñ«Ì¼°’îXU¢ pô»Ç›Ä°ð4‹¿fl+«É¾V44Öæg®¨8(8o§*µOÄfEßbI5›EÏŒ8c¸R2ßw €“€<®ÙÌÉhx¦ùy¤F…I¦|“Øœ¨?Eüjqƒ³jMqmà8_Ìb6!Ñ)¨ÿ«n¨Šàà>Ÿ™ºØ žÊK“Ñj˜lÐé›{@Rš›ûyž$»ÁÎâ83-7ªd÷ ÌݵÌŠt²­¢âì#„}@óÇŒ€¢2’'òùÖÌç^ÕØ–àgÞ†Q¿úPoÄlž0JèaMújc§Q_XQ˜BPŽ_q¨û¥Ñóý/ï÷jyˆàwUˆ}z@\Ñ×x7^CºþFy_Ñõƒ"…¢¾Ñ}ôØŸC²ø–JZ½át•7áwáDbwt¼X†·’~à¢ö"áݧº÷Ï6–|Ù‡s’Xà&–š@Gd¤Ëõd7¶hˆ v¾iŠÂå£$½Òâÿ5‹êÿ^Tæ×1JÿœÓþ™ô˜Ö¶^™z؆·àÆG½Ã™¬y«+és¤‡crŠIWפ¾=ó†½Z/VÅÇm¿ÿ€rI¿kޑϦ•¬ëÜr?9æf´h¤Çì´ÁNc裮9éýÔ±¬†  –K)yŸé‘²‹¢Árgm@&»«‰2ISp-¦#oO­Fx͸VªR¶s)`yr¥ä0œŸ!Y²âûÜ9b¬ÔïÿŠˆ¬6_6^ê÷~»ÌÍÕ|±5¸ÏžmšAÚ=ËG 6NWç\J¨wn'õwÔª³;ƒnCéŒÒË>øª°`ZE:…Ò¹.Ndê¾Î]§ÅIw(¿Ã:ýµ^1[4똺Ÿe‘‹/0jˆÅÚGüfêJ:¬ø&3˜„ G•Þ 5~ø®1xZîÉE/šã½Æ×æߧÛsZ ådüŠø%hÃã#H$´S‚iczõS=e,0¤œšÇý8éw±Ýw¾á?ïP%¡{Ä&…çMOÈ©ñì¢yCZ7od¼ÂÀ„ Z›‰*^©!DˆîŒ¿^ÿl»R[Uôr96¨„cñÌ÷ ˆ ý#Å;½Ý±ñÁr³ ¾?(F½NHdýSä”8íŒeªÛš•{ÎÈTˆëóg|ŠDŒCò´2ð.k}Djà ¼ÁQày3„á–£Vêî?;M"½7m ’Êdsk÷~¹¯Ê÷i FHcíöC~ýVhá›/èó;?Ⱥ\¸¸TŸ5éú9#\<{æÐ ŽAÙG ƒìEÿmm¤ã=ƒ½ö]÷çw"¿‡gàéÐd$Ý¿¢¶?GÄ;ø¿kª’Îíó;¤X4#7_†™^ N͹gÙ·+BÖ -€.E§I¿ i ¹¶çB jûR5cM}FË­¹™yŒìq0¤Âä{<ùgs¢‘X6’² À„û¬ &–~k: HHÇNò;ºé=tÐT!&}åwÊ飆 -3ä†ô×ö™ˆ×ˆ€J ÊôýÞº™ž$ñ蛡ÕÑõ39Ré\_û†¨¦`n•™ÜpCØÌSÛßMÒ¤_‘]_v-ÓþæÜì„wƒp V·.ô†R™ÉíР†"µ‡|¸žéméʆUùì@ú/Ú’DH0û68…ë_ª«7C±†ö*%æbv{çOÉŠÑ×Kq%9 -ñGAE¯_•e¯o¬ë;v+·ÙÀã*Ç›Žzªe„T´Î…½H’ÈÎXêäCá’y­o”Ê£h¼‚¾§Œ°Ñ×tŸ™·ôfž4è q8ÁN’ɈB‚× -}ÛÏ Å èÈ‘ú1œÕú‚¤,p¿[óS†ÌT®cú¾&G&œF "ÈU[CV ÁDˆšë³gƒNé -o2ƒN"”‘­OùÒ<`ùvÎH%šOÀ[ºÐ羿~ðúöWVÆšÖHÛê®— µàµ¢X¤²LJ5pÌö$îd,ɼÄfŒË¢_ðëg߈lã» ÍIBQ„g±.Y} Ÿîõ ÛkGÕ{Ä@ƒQ€ù#ý»ÎÈH(Í(g£¸}4Ä7`HˇnTW¯žK¡rEã Tßï(dòôy"«ÔEEÞÄD5nÌ@÷»rò]wƒWÈ*׎ôå©V.$_êE¤ö+Á²9b[ŽnÆ|“<Ôí8s©•ìÎgÑoþZœæS ÄG{j›:£8.+ûÉ2zÙ7²qÄ`S…ìµEÄ»Ëä¶èÆÕÛaU¢Ñån˜âòôBJ¬!õ—)¶|¿~ð­öyZ‘p5_€·Åá¥Î:a5£ß:Sª«ø|#=ßT„ŸïúŒŸ“„“-öoNž?kP6à[nv× v’i˜9$Þ:,×7Œ×'á«:ß;¸‰ù9O/z€, >çé|ƒDë!X;,ó}PV"ƒ×¹ƒ¶hÀ¯Iñöœ§ÉSAj¹”<üŒëJ¶ûµa„o€uÇU­ŸÞöÛï Çž:O‰&jê#ßÏÔn¹lL ¿NÅcÌ*žÆóî5ä,a”'̯“!P…_rœ»†ðb˶s…²ïw¦ŽÌ×_QÂV…ŽÂãí0êa²²ŒÙëUX@Jy¿~ð©v·Émž4E -œàŠýuïg#çi’é‘+ø -\ûL^s?ÞãwvüÒ… ·ëo)sÌ)ÿqUýY«ê%°DõU«œp÷¤Ru°!l*ìPŸÜ ûrßH D3ö^O4Æ<×g¯°àåú}¯_xNzDµƒ"º^ôÄ ô»åCñddä6ôÿ9r¼†°ªÇʹÞú•¨ÞHÓãÏ´Þáɳ6·fý“H3W‚!\Ó¿ðÙµëÑLJmë\ûѦüT⊕ʃõ£6òrŠægÔ†²³åÖxümð Ó;ÿîë`£øçhîÏÿU¦¼tDx¾y->†™Nèµ2ž¤ÀuÖ\ñøÄŠV…š? QÄNUå8Ü4‘u¡84ôD ƒö¾Í]C´fÆD:´ªÇŠÇUÒMÅýç 1,Ʊ¤ùÐî¾ÅCÉ‹ccËVˆu¨ðrbc4ÒF†À‹€‚´èêB›´¿(¹ÅCb_·Nj=ºþŒï@DvËøPÝÂ=s\Ú|k!ëè xGZrP–¥¤Œ%“=€{° Å‘=€—ô•AWÒŽ>œqCÅ¿ù[”ú­¡÷ë˜8"ø΂ß8Ô­ÛDÎt–¾Â^Zƒ0¸ÆÍ6( -¬ŒZ_võ1ýpªâ=W‚PZä7­DHm­©“ýe;jö€h§ÇêòŽÄòÅ|Ü!î˜ÍŠS0,јØé²âì¦ -øˆÊŠw´o¹ -Ó#’{P„Í'lwÌýZ<+ŠI¢_ƒ/$õ© ô x# -êOŒÂ$i1­}ë–Ã+[‰\š?x;¦°i>SELf-£)æ«Ï¯¨û\3(€™*SBŠoßõùö]ý‹ïÊy£CqöOÀ¤z2• 7oRcÄòH8»òziè«|¼{ÄФ‡–]™;£|<Sê8kôz:Hÿ¼ìf;]Z.ÕÿØ0²'$ -zK¥JT‘SQpÝìúˆZY÷T¾Ì¥îÒB@qæ~¬e‚¡l$òKì—RX½÷Q£3¨™»²‹¯3œrëí¬ŠiO{Nóö$Ó7.ŽqÌ·× ;ñ -uG]ªSí\{ÒÚbä趎·µ·EµN£óvÐA¨P›¬Ò ê{­{®#K­xýÑÓ—M6q¸!ñºxÎÊ1¸ Ѐ7Ÿg¾ÔQ6ʉ•jÙtIè\Jwƒé 7Ô (["5]¡6™%‹gˆ5†4˜UøA2Ä89G$ïÒAÎô¼ZÀÞÑßSŽa>[•ðo,ZGíËgü¬BVÙŸRè#ehÖåôSýÓS¥$ÞÊÜŠÙ!›¯À^ç-KðÒë\×:ø¸%;Éžñ£òSUýõ¤î¿`“ÙwÿLÎ^t È+TY'V;\ ù£?ó¡ÓX§(K“Z'"KššÙÜdB®VEø°Š-´Ž€¡®HÁiØö¦i‘µ¤/BJ#–T>bœ\Ài3ébxûÄŠ<þoF¨­c”(t±B¡µmÎÍ%ûxñZ]{¢Wø¯.W“PzRR -[çUrBÆ5dÆc¬Â©YóÒFøNߥ†¼ì-Ò¹× ;•"|·ºTdÇúðæó8[PY-ò’ß™'s¾zì“ìŽY»öëËyr!NßñÓ2ƲQ?ãºw%h…d×áeå¬c½;àß•Ó=3yaL‰ùB9š÷e  ²Ä`²ÙÓiÜ¡H9v³×˹g}íl H -à6hÌNoêÛwCˆ°Ç¤—)¹Ð`ªwip¤*Àó"Nñ¨ÿâ‰;n¨OYr=59Qºì£M æà²S¿ÏÚÄ‘cKyÏRk²ýòbŸÚ¢«eÐZr ñÎã¢(G9ìŒ?á¹—Óz¨3¬³ìÐÿ¸rÿJú´MšjÃh§¸ôòøk?¿Š»ø¾ yöŒ½ÕïªLÀ_UÂÇ÷ªü9ÖŒ$GÉakf¢×ûÀŠ‹Ó²¿ -yö“ÓšzZÿF.öˆ% ÁÂê -=[nç@ ¡sÔâ;fj,¶Û¹F?±:;gø>CɈr}Â'`aÉ´Éb+?È×Nñ´½ñjjQüèO®4„_rýYAÁ(ÁÀ¶3:Z½iJZWç}?wFúÙÁżÊä¡“ï¹5»¨gáž•‚T¸8ÑÀhLÈôvB?i€å¬yyè_)ÑÞ‘÷ÈÏP|G‘QÆ|#µØ¢Ó?›"ÜÓížPºrCR˜¤žŸ7E¹ +pqpûle#Fè`RZ°ùØŒ%™!ˆ LòÈù¹kˆoO#‡è?ïmKvlNEÅì’T7`œí™Æ;’ÿ_ÈRÖšÑJß3"Xú‹ùóoÖ¾g/XåS°rá8ö}¤éÛHáæ(M ˜ÖÓgxò´M‰b¤ªOË'§ ÐÝ`—Ç>ëInE£¿;oiÊqæ8kcR’oµ4œ¶ñô\V7Jçšpª“6e^v³›Ä’#Í„¢ç[Þ¦t×ÃVu¡½·ó$:@ÞšnW~ô`…4•yër½®¸›× o–­œdý Èì–EJ¢|ïü7(Ý#úåûÀ_Os§h~ÄЫh ù$âÎrÄâöNlAŧ¥ÈæòTZBWê†ÍÀYÀ‹9_*©p„}ú‡:@×á¢p,œz‰[{8ÝDµ©Òƒ'jhäw¡z+ÈzÇ ðmy¦,=Éƽ=9ýnú…ŽcBD;¸Ré æƒÄj®+æPPY¿¼XìÏz@©¦¼Ã4¶˜Æ@ª³öu”&º+~nÚVŸÈí%¤£p}ý`žü~›ÍKKû¤œh‚g©LR¡|úf9ý¦›ÿ-’øM_FEÖeeSfžïñÉ×a O@ì5Å«’‹¦‡»·b x"©®Þyn1¼-ñ í-¯¸aS@4U kbòN€óeoòÒµ{@²ÞY—æÀZúôŒëR]$?ãu­UõóÉÛ×çfu´6“yQFµÀD‹Mjvg&ìi1É…k3þ'x¾…B¿iÿà ˜¬±gS玅Rƒè‘*¶ UPv¸ÄÌ îæ݇=½E„¿ì$´HyÏ­|¡íZ¿3îsiªX¬Íó{bNå(' -¼ë' `˜&'ÎV -‘AƒžGiP»Ôãˆ朣|E,|ÏÅ&ó3†p× v?PpaxÅ7{ßKà>aY+þÁ:ÓöÞwÄs íîQC>„CïóØ»,%hÇ9´UÅêS-€üQCÐí¢ž•Ö•EÞÓ3¤ŠnŠÁm†~¹+KWÂ?+Wôz£z§µ}¬´Wäü‹ïùŠ¹T?4öJŸ¬Û»*P`8…Z'ýª½úÄ®"ök¥Ù÷óªýlõe?u A"YdFêŒìZøtGvx¨ju¶·­T±”¯‰÷Ó©Jqä›ìù‹ë«Ø÷{iõgÝŽ¤¾+t2®TG*~òîúI·#DÝCþ{l€H´VŸà?òu¢ÇVs F@'Fß©ûúÊŽR¹'[ǯ¿S|¾#Ó[eg¸öØë 1•]ˆâb:÷­ÍÆGEÃð%kœ!븩˜‘pÛ_ù­ñ˜á\­Aé\LïÙ\Ýßù‰¥ê|÷ÍHÞñÇFšìZ©wÍx ýUd§gFU,riÓÒAZQfµW× u(¨*H½²ƒÆRƒóq¸¬†$Ûä̘õÿ_Šõÿ,VW´•Âd4ÆvõåÄåK4_Q©]…-Q?æŽn¡Oî )5Jv|ð}÷_ñïÚGÙVlF~G©“Ü.¥Å‚ë±ÎV€1i -ÕŠRÆ©øÉûÚ5ªÝ#ņO‘&í¡ë)„ÿîUÝ”«v%çI{F(ÔªêÄÌÖGæÊ]C"GMáÔN[x œ•†©2r*æ6ø¶¶«4­ÈÉk/ü«òt¨ïÆAúI™†.{Ã|d¢z&â$Äæø§õƒðÚ–òŸ¤…³v÷•û\µ’GXØЪou[$ø˜~3)CJ¬Ç‡Bë½=áâ_ÿ½=­#€Œñ§óÃLG‡,å3H¾»%âM†xG|G??3„±DóêúH¬°è.^[ ±[cAôR…ÂzY‡à‰Þ{cCceÕÔ_NqÙ•m—ê@Õq¼ç ,8´žÝBeÒtÀzµÊÆŸ;:.wßE¶ÃÖÓ9Ÿ©e<yͯ×~?«È¹ª‚á\¥:09ffèÂ^Ñü\éÊQÿ¨ (¡…ª × úžcŠ]]mrZ -w\p•ðjˆ"<™)6]5³Ùwê‡ë]ïÎ×¢µú®IG×d u²çwÂ6ûüÔµŸ»Ì²2k¢û£Q«fØê ‰æšÅ&×óˆ°Ë³~~W­Cul± ìé"ÁFÓQÛH€¦À¿Ö¥važY›þPßiïŒ~ˆ°0hÐÐ4; -4äv&½Ì(WèW½¢…µO×>zÎ#5év©yaÞ‘ë±#`¼ ™n)‚Ò -åÉ&—œÁ›ý>ÑZùQ\$¬á{œŸ# `kM¤àMpíx1±ÚGõ+Œ9©qEŒ;ÊB(kê”Ôê,DDßw  r…2êÈ?e‹=T§¢œÈhÏÁÜ -ß z<ãŧ…Z—ê™3úª™]gªÅªêàUÀZïcÆ…ê >im“äI0KWU#§È¡ô ò+ºÈ]1í± ¢âÕ¦§É >þLá%ê‹ aíŒv–›áŒ}ªT:äë˜j‘q~ë™èÎ!¨ÝŠÀ/‰u6ÝñÀÊÝôbóZY=²nï -sUþ]C.2§>”&ò -tÄQ´ŽøÒЬäL¿ÖU®ZÅI~OO R¼ -R•“)…Ý£èšêé ¾ˆA¦/ø ÿ)6ûÿ½©%„øZ¬O5((jô±3¡õ»äŠÇ•;öë­Ê’¿"†`Amm¿²6°u9ß0œ¾ ù¯H¢uû< ‹:±È~õîËã¢E„ŒRK‘»¦*'œ„U Á‚AµÝ¯è¡…~n³…'ŒÃ‡:ãÿΓ#©RLÆ]ƒNàs%8!øüz¦GŠY9ã:Jö  ie„…øÒê¡FÖÌšwë5ïKýÃK.Óè´Pq‰ˆqä“õÐj^Ó­ö]ñ8ì‘{«BX—@*€ªï%)ú á[WeüRéM?¤ÐQþ›ºAªÅÆÕÔĦ®ÑRÀ©#·ü€D‚Î-«¬€}þ;ª$×ÞO¯,Á\šØŸ^YÌ·ÏO¼—˜‚r¹³ôù öfÍôêÒríä]þÔ;C°ž4¨Å ½˜{Ä÷mQ§÷t·w·ž¯4ë¥<ÕÞk—_o9%ØüŽ}K”Ëš®†¬ÍrzwšÕå¦[DÅÿí;бG¤‹>½çì?€t¨,´½z4ô5ýGJ½Ä$R܉tˆ³êŠ@ HUÊàÿø­>ìQ¡Ó´¨lr¶¾ /7Ò£“UͲ 3¼Ü\ -VÞ¾u~cÛôÝ[0¯$^Œè$ç~kZÀëgÔ÷gÞ>“$Õ˳ÿ5apiGU+ë+^apæÛsÁ<v#ížúÐÔ$¢üëCS!ŒGIÞ~p¼È±Pç„ðAPI–nÇ)ð£=§*)%š/E¾‚˜ûƒŠž(k‘VtZ iñ<`÷î3žHê%T#J\–×î²g ž/G¶PËÈIƒ}âøW —´¡èŠdÉõ -’äý¼â8J:‹÷#¨˜B*X{Á ùãÜiy1›_ã¾?QªÙB–~®½Lt.À„»ûDSâ®A=ê+€Îˆ‚ÇâÈíç<#@óõƒ)ññýìÉ;?Kæ–n]ywÊëÍàá­”aC–Ÿ«&âJrõ¯ï*¶*°ß°òW„쑆Բ•{ éDuøõbV‰v¸N5õ8òå0 -wÌsP‚ ápËSiÍ'â0Ú¡é"ò íÓl~RJ–[¬û¼J3cµ·;•S)@þ“ÝM[+2ªµŒ b&Í¥FÑFµžÖÖÞ¾-“MÏL)€«¤O1¢Ky%÷”NÝ¿f|'³ðt¬ß‘›\ÓRü›1’mPyykbË è€gOµnÝI¾"¥òxÆÈ8ÓW¼å‡r ‚3Š@-ócºÀ‚FùóQV„'¡4 œÀ±‡öiIån|Í\J6Lä3@ŠôøK•”E"aÀ›Ø£7hã?¨o4Òˆƒû -M=»9×¥x F”y·@D¥\•ãRÁŽÕÔú(kµ°žOÞÈóÑÑ°£åw€ßwt[¾~0ÿRÑ•‘rwÿ`- ’¢&?ïâ ®èDq™Žf5d—OüA²*¾{ |“ÒøÇ@l>¥tîÞw¾ó•â~"¬bß™·2hj`O3Š¼©ý´1…¢÷†a´Plsع‘Ȋɲù1£v(]ÛA 80ñ’÷íq—ËÑ£´‚EpÀº”Žlœ.Ê¥7ØÔhê«à¬£‘)èù¾ñ%üöD9w~fH<ž0êx†}ŽÐÐK÷˜Û¯‰…EµW²f{ù¤¾JwúO© ˜2$úV3Lð¼âhCÁj‘9̾(…»ÅžùŒô…¶Ê–!*ÍH߈’3úeÑìy(å¯I‹60«[l¤es„›ú?Øà‘¸Uæã´+BR˜Œ×¯Ý§)C<'c8PÈãÞþ”߃öïù!xJ-Ri`5ë>+úž¥2z±-HêÉcÞü­³3(&¦Øú)åøQ©#)5ËK2® -GLÛ¾‚€&Lf’:²]64¯ì†:°)@Ÿ%jôP£S¦ES¶Ú xcqyßµ×Ù1¯‘î6¥ž/ŠÜIsb㘠Å:~0Éyš !¼a¬tð¬oQŦ¯×ù!#¸kúó$ V T/».#Æ3¾8Â_­W½ëö½õ±wÝI<Øb®J °NŸ|ß+ý§»ÖpLJ ±yÄÙš¡æ‡Âäz FõºÔÚ]Emñ‰ZŽŽ ÷idmòOÿÒ-›|õs‘'×ZåˆÌ[³+Ú3gäD2äVc¦Ãr?õà€e§ƒà.xôª˜“µO–lב‡ÃŽµOù&ÄÛà,XÉaöëësoÊ`ÿ´e#Ú,†-ú9l·kaGtÉ™˜JÙßBœó÷ùgk‹:÷W§šÄñà…ûDl½‡È…qP!Æ}i΀>þLûD#nNñßqñì -ú2)´Þ…ôíÕH*P.-ƒ²¢Ò>;dÆ#¿‘_lÅþcÝ]<ƨtÌ,Š °U䣖+Ä¡¨Áë !'‹Uú8S‰3’”á:’EXO…ì¨,0ŒoèS5ȶ8ðd»š!q5#QÞƒïq{±Þ G2‚úîZp×árþ!G [˜fö~(~uèå(”)¯}*ÿÖòúbå‘*ÙÁ«Y}—Ú &ºùš -ìs8c!¬?kG‰xŸž%4ùº’¾ÍñÄPãáÈöñô¨`Ÿ­UÅÕÐOÈW á‹tºf&Pkû±_8:0¸S.µaŸÑL>  »’h†r)^(=FÞ›á^„ks)Ki-¿ˆñ9ì Äà†Æ.A´uÎ1åóŠÿ÷ÿ"ú{kè>ǃþ;sH•w§µC¨¶3˜ŽêœÜê˜ÿúPy*H(®›…ÃZÁ¦uNÚemøûRõž½azÕV/ä¡m~ÕCQmåJ½BãžsÆx›µ ãúkj÷öWÐ’¨*N¯ô&¢œ"¬¨GqúeÑ8¥E€+´é:Ÿ’»3ü¯·ßÿå7ê¶ÿ|tü™AÚä°ä¡±9‚B(†Ô²JâÉÂFàŠ…ZTVkNÑ:ï ̶hªUËÛŒ`M¨¦Ìì/;ªƒ39^#¶]y5rýÎùF­ª¾r¸ªý…Wô¯ØÌ>Œó NãžåÓ?Kå3 õ­…u÷6˜!6îªms©ôþ×:˜}ù•#²/è°N†'JèkW'˜²h–Ž×yç$Xc&=6æ°¨·f04èÅ¥x™d¥õ:-<2é~rÊG†wl¥¬?žkç¾ú&Úy”hd|â_EZÉ0|3Üúö‡_¦ÔOÏÒÏÂj[õã¼KõvýÖ ÔM†t÷>LìõŒ¼ÎìÞR;ªÖ iBPAžg??@/_9æ×=›¦vØúOu¨LÀhe`×ÅùÕq -‹ðËí>v„xLEPµ›0sËLP9ªb7w׃cSl±æIز¦ÏTu¤„y:'K©ä©hÝ–íLyñ(Ä·ô'÷6»ÖYK=º­Aà0k↺÷d»YIœŽ¾ ².uBX²©ïþv¤G³E b´QkÁ‚±•ãfÅÚ€*¥ÛHo !Š×™ ¾SõøÍ_¾»ŠDŒ4›'Ùˆ(sÑʹ·ªzBHWË*ÎÒd<_|°*êUH9#ž`ˆ}B!!_ì‡íÉëLmûŒ¿@ÈÖãbGõt¼ýÎkÇrÜ–Œs|"µ~”`tT±×®}Gh% -‘xÇ+S‹uÇØUÑjäîÊ‹Mb &¡–ªtšÂß]—µ•E¼”8‚àhÑ{ˆ -iøÑ¢ûˆi‹x‚Zïý¨ª(Ǧ£m²~ñHzÞ‚†O!’‰Ü£téõpv€³jXÿð­þï(Žüã&ýgÆ\qT‚L´¿Ìø›ÂUÈt¦’FŽO"˜KpÿE -¢!wœ_å6„êR-èc¦¸€æ~`sôžP;³bF¢_×U}†]À3^Yæ'Gœ N}jL1 „…±`ÆÊÇË•¥@YcÝ¿ ýå×âþÍFñÏÁpe-T¹ÖKºÊ»/AS =±lwP]‘UpNw,€÷  -h#I7b¨ÕX‡¦<1h¡ÌY¢Ù+LŒ“ –BA ¨'ã($rºª>:0XÕ)‹ÂÂ=ä%IÌ»RWàÕ4±ÓDP›V.›äY¡Ki‹ç{¯1x–"ìµ ;îêghÌÃàÛ8þ•úŒVš€þÊ‘aöAÚ%† LÿFOç†;ÜUƒ±x“C.¦’ÆævKÑ­r1½[V$—5wÔìd‡QÌýQÞ²µæÚ:ú é˜ö¨––6û¹tbI@^lñ“ÿ¨5÷í/ß鶸M§3ž.üEΛ@¿Åù*A‡’Ú+ãÚr…p_µ)Çq+ÀËX¡ñõŽÑéJïÏþ¯Œ(ilŸýŸRK«ª.J Á~ìœO<ÍŸHò&aŽ\©™>ð7ÝØ#61ð¹Ó!6R#ôw´îXÀ"æÄ]™ÐQ(¢Î,ôÞ˜vÔ•Öž ÄòDgåJ£õˆ:1‰ùóÜR(:¶xV#Ñ7#”>If9æ(æNÎÄ÷4Ó`D„~?A3‚¸ŽÙ;ö•ìª§g¨,k±€X’qÐw«à»ÇÆõŽ>rÖ¥¨`™4:¾•_Š3Ï6ŠáV¦ßϤñ¨…³J+ÎN« ®øU2¾_ëßÿðÝ·6'Ä:Þ'Ïél@ÔǼ0×[}KûØ©H´6òûÄ úŒÍÐQmËYrZ W%=gŠ|zŽùÙÇÒÊœ±ùÝ>`EK£á}Gícè[ÞÓΰÔSVÊç¶0!ñC{ŒT?çâ»eNÚç\T{ºª9 Zø MÎÀOkÐ\QÕæÍ™¿25§ž3*ökVh†¥3 W¡Ža“|µCs7°¬P ?[Y’)²Ãçž„_–)ùÔŽH²}À¼—›;™BU8 {ÏÔô¥›·ñ+ÚcvÈ]Ú­ÝyŠš€3îJ65{×õ¥ˆ@øÈG¼ÌÄá+n²…[ù?χÿ;RÊÿp›Ú6JQÚí&%!-2H³Þ†æTcº°ÏŸ˜Ièö¨vYJC“.æ9ßÿüâUI ¤PŸø]C&hñÿH)ƒRaÃS®©²–aOg}Ä,Š“ÌÖÄUØç"ã|¦þ9ãªÖÎù‰èÖñ6D4Yj d¦Z€s@vcLøTc.m_@'¸Îÿø•ÿûõå+®ò±‰5vT§ë›îãû ÷ë_’3þ>»Šä¾?«SÚÓº!œöT¶ä‡'Ñ?WL‰G¡¾DàúýÕKWãþäŒTr™ÂV¥±³‘®§ï®¼/j¿ík7PšŽ!æS2¹'ª:7z_ ð—B„°NÊÿØêr>¦úuîÊzöLžÅ*X‡ÐŠ¹¯Tèu#[úÀW5b›â'õÄɪÅîMôؽšyxáÜ>kŠ ¦6K– eeÔw5´Ìs6­ P…z;„ŒÙšFTUMvê˜ÁÜÍ;Vš±f;Æä!=K_Ø—êàxtn ²R‘Ú G¯äT(&?k+LÞwb©hæ€[ÙâmÏLDªÕÙJÍ'Éò½F=GúR•¡:Ãܱ'+oñËò¹à -0ý> à†ò¼"͵³Æ¨-á«ü H4ì;eóNÀ²ß^àñ›Þîïÿò÷Ę’òCšDÓ,-úu熭áv\*)±C'„ X‹µµ=1b¤,úâ©`Pº]XéIó›D¾'BVk;YF9âiH¶·xÕ]Q\sv­¥[{ icÎ?˜‘¬Fv\?ƒ©´VþŠ—žà›à(3×uÕg Ìš¤Bð0„¡ZVšžE1(s°9ã²·®Qß#â11½ÉR†¹ÎJe 8Ûè:ï×úvÞã!±HöPä[_’°ãدã,{NžW앤 †¹²ÁÁoñˆÄZ|ßCøƾô××Eò·`«ì*ëß’¬H ‚q¹Ô°–”®®r²Í `Æñ×àhÈœ]’„ËšUˆ>ád#‰­! v"Eƒ+Ò.+ºQN ŒözÒoª mÆ‹~wÔu'E0+ ]s|¤OÓýŽ"z¯!Äð$çÏ×R3Rt"A~{‚I¢©õ2÷É¥Pd±ZÜl J@›ì‚[^¿ßUà¡QCdÑæxÛ!”}+BŽËNCa$µ$ð©­rh+Ð+ŒbòZC–?¸)%êõŽéÃ\šzŠ÷ù3šÌ â‡ÖæƒO=62ÝÓ£XVŽ~ëÓÿd{‚ Â1\÷À±~~­iøæ—ÙjQ=w’YﱿQÿ^{Îc_jMjzš"Û•Õ`I3,ºsÊЯºÕóei7Ló¬^Õj‡xz QÐŹèŸ7|è¥éGÌLõ Šš±<íéõ9¥?k‰A“÷ü±pðÙ^M -`ø+ŠóÉS­y¨ž’ ¼†¬À]‡=6y”êø€3OcOuRYŸ¹ó©"‰LÈ:¤ªLue}ŪðƒZä"ëŽ!`i‘*¦tòÁ“YÁÑýº"¸Çvƒ2<¬ÛŽ=2EíY¡Ÿ³°(4=·iÔx! •FsÛ{ÅÚ}£â!y5aØÜ.L¼2¢2} -iò ŠŒêè mÔiãõ)ªN&£¨uÛÙˆk‡ZVðâ~úßä0õÔ6ÚãÎùä@#úâ WºÞÀÚh©&bˆ¯)±Ò^[zÃÊdd•ßh¡²«©µ{Æ”þ€¸*LŠ»é‘¿ÓÁóLä0DHË϶ïE3˜5N—-”™.'¶;P]É`\‡‡ºÓºãG…4Â5;Ó£ÛÁþl=÷k6Dl¸Îz5›5e]ñô ûL<à=®ØXÕA†D•³þÍÖç–n*säÔ0¦7ùÔ¬gºÃÇgÓ Òavž Ë½à—”ïüŸ4ôNg­{82Á>>aL„n=k²ÆÊUÎgI^üm 8°ý¶ lXt ì]+à{ù ËK–»Óg}’p§É,Ú°þ«Žxš‰\tslPt¼#à:£ý6žˆT£å´jDS,éžìB ¶(|6/ø.?ÅD¹.<— A9x´š½'X-9ûåz)FxD¡•{‚Ão€É.íŸc–† ®¥M!æ,ÑÅ„n‹šF¸g”†áIÎj)Ì¿N²Àù ¯æû˜ÄR”²Tåÿbßf2Aû2k -À½Ï2m}F²ds4®„ýŽtRÄøƒ¬ýBúÚ3G™§‹ÑUTö×>†Xcey.•Ê|Ï8n›³Œ¨ëk UC’²¸SYÁÁ˜1»k^Üå·ïHK-²Æ/žúuÕ DF’¡…Qº<°Hã“l8ÂE£íõÈ_„yævL—´¬À×RžÁ~É$ú©ÛA)e}q]@ëR[´¡‡K„SÐPƒ…yqÐf%Qdi´êúZf½¢XñŒ61LbÒí‘;lÖ,ŽJZ.Å[ tÖ¥là™‡ÈíGyM-ZÖ 7°×:K(ÎVdF'*‹•7ÏÖl€¦kži«Â­Aôµo¾µí×fuû…¨iÍŠŽzüfÌAë@åDd÷Ï´Ò¾*(VB¼•±ƒ¬+%çõ¼ÇUñ“Xhý\×úÄagÔû8?qX4ÂÃÝm9T„@ ßj®—!Zú+(t¯ëìÓU8¬Déü³ÜÑÑ?‰"¾Õ ¢)9[û 鱜v>Î,-кº‘diiŸùmÇ·Lìß¿‚ÂÇ~~§Z¼†„i]aØ™Þϳù©¬"’¥+~$ûý꽋GݱyCb¯ÐG~ˆˆ%¢n¥KÀ·¦×:v4gÑS„$<¾_ „ÎnëYm¼Ö4óvòØ~(ì³þtg|eUí¾~0'²Cüs _Sç ÂÆý~wÅ}íN/»M¢]k«GÝ,_«„´AãqæNLè½£Õ];®>­8âÙ]Y‡2‡îzy;pà¦ùìžoKFÕS ú£ÖñŠ´5["ïY…ûšÚìÒ‚LÁ¤+ *÷B qE‹„V¯Ÿe@iÓ¨+ÚrÇâŒÅ÷º½5]‚â—\æK‚[£úôß$ë~÷—ïÖèë ¼04lÝ¢dJGöª§Ä›SœÂ  Õª³ÑRhÑAçìÍ (TÂi5D/ [9ñ\§Š¦ïÙ÷2mV- -ñîŒìjÛ -°Ö~tÓ*lªÒÐRçTüK‰öc8É]µ„WFøz*$6ïf”hÔ—«úI2Gï˜{ï¡mNêW=«Z £ÅJ²Ï±³Í¸Û_G耹RGF¶§#Sœ;3sØI:Zm,o¤xØf•-¯¬Ðt®ð¾ÔžA4eâà \8j¾ð;?µ®¹1ãƒ1²H麮šAßàOßÿðwsóG sNØZ«x,¢o}â`ÆǸK+ؾ³ù^‰$B£Mû‘C½J×Ä ®ËÚ† í€F:>™ÒÝ~\K(re"ù´)qa„2öûdzñ%_N³£°Á£!DÔŽØE¼¥Æ{)Ø4pšµçsíù~…°×¡_30ÁÙmž4øú®ž P&ÃäIï–Ê’—üÜï.3¬p튟íaîUN×68*:³…ª,áAŸõP|£zÖÿº¥: ²ÏÞQ_bž¦“œŠýˆãS®(3R„ H|êv„LÛë“€Ü[8«µðH–KF˜î}ê=ê,C.馷ó">¿!gÕn—Ž…TùÍ;~㩺bÏëïîËe™xŧŠ³ŽÞ1Øú®ëaÄ 3È×'Á½Þ'-¯3׫-¤=Ùp™ÑÈ—£VÒª¯·r¥éŒrÐ8ËŠ+qÃnšêé,Œt¯¨Jp‡C¶ÛÅbâ ìèªçåv}(ij«ô±’H?<•FùÖʾKö-g´êÀ³g€¨bG€ø³ò”TºÙ™+§äTŠÍ˃6ºÓ†-7®ª‘Έ2<ä›·G˜§ ;^ìõž+¦Ë'©ãÂì÷9¢œ5´V¯ˆü³ÊèRHu› bUHh:IJnçˆÊÎùWŒ6Í'(mˆú‘v}\µò~ïƒuŠ]µS¢_ñü»HÑS5}Û'J»É7iÁ­$iT៌²}ì-úcŽãõÍ’é^m¯/¸xùÍ7H.Pч”]Å<Œ²Ê¤QH–§ºƒ’ñ{Œ"?…*êi `oÏ3sÆ&ýÜqûi©=™>5)Ò¶«×taQ¸Ñëöt¥»n3œ”9¯®¨ü¥9Ãõônaz‰Qì…œ¹—TÝZ}‡X˪L«ï.@u†LûíÈ#)ýõƒ³ãc.ðÄpKM$šŠ9Pê9I™h+1°oIäRçÂ…•Û¨s|¯Èÿ»¿Õõ ‘þa9þ™pTà²ïÑv¡£Éù¸˜o+¿^ ‹äÛÔDs!5>4¡Þì1´Óˆó¢e|í~Ô}Æò93sã³rÅ3¨ÜçLÛåEúÓ JÅsFq}ƹ¡~‡Á¸ ±ZÍZƒ‰JÝÍõäA¹9ö]… ó–¯*Û¬xšR:žŠäÊ[m2Ñ€×VMt“õÈ€ØU§ -&t¾eœe]÷ÈñêÒd_ìÚ_·”’¶^-ýÎV‹x8MbV4P¦³.…tvÀ¤¿©ÍDõAE™lËLGdÊey½éV0NÄþ ‰'"ó!îèàIB!zªN©ƒ¼~eCCŠÊ>†È¹ßÐf!ÛƒTå™:ckÚýg\yµÀò;¾°HÊî£ñË^iþ :ì3ÊÕ¦•³(Œ C¦ÚøÔÉóž¯/¿Ã¼eþS’/S“¹Û0dÙiŸIMó;4GOîw²|KÊwÝp É„`ë;Ž¯Úzª@Ð㶷ýD¼»¥Ç]nÍý -/°‡rñ×gGéC]KFH¥Òwè¿bç˜æ䜒æE+ &á Š()þy¯’tW3¸®=…ëªã©\NHț눃i#¡U! ܸ^ÎzmŸNï[MqÕÈÏ4Þ-éÍh^¡LyŠ¡_§\¸÷½Ñoh±€@̼D‰Ï¡Vôˆ•D,/½ÔC¬÷óP«gðãÊÊa6‡ãºÆÏ‚„J‘ K†>Yè)';±—£ Éq9êO,æÏhÿ¬-YAT€ßTñkyéwV€³¿¬Í“¾1!ûˆPÌšOp,!€¬ó6›çš—&Õœ³"~[¬M}Vƒëhtp«‡ÓjIéТ+°EL © :˜æ¸œsÆó~vê;Âô+ »¥Ù¦jîH¾ó;ofáCa1CÖ!ù\e}8‹0¾²Ä¾qüÕˆ`ë×̽ã4Ò[;Òm£ - O)`»’Òq8J5ü¹ÕÏtí°.íª!h3Dë7,s YgÕ‰£äŽvÈA«K°[5 J•¯FÊq -Ÿ°Ã—êò³ŸOƒŠ!‡|*î°èbG y˜nH¾9Š»I;`¹Bù8÷ µç‘9fU@Nµ•`µøê°‘ŒL,pJAríyé_ã>D'f_*OýÂÒd·¢u gE¿~íZ¹TÁúWºj]‹›ïˆñ4ô¯LÀÍX$LïÔ˜ Mç[Æ@ü&L–€}½I-B’9þH!ðLk†~ö7u¼ßüåw|‰µÅkn endstream endobj 35 0 obj <>stream -¨·í¾Ðš©Tß‘p(½ÀAG¹ÚœÞ‘8Nå´Ý̽ÕÔóîöa1œñE@sS y?A gá­+èfSÕ‘tùñqý±ú`, -/ ´7 dmr*éá¡/¬‰ôÅÿ¡AG4ýÄhù¯t8#aVšâ©¸§º|íñk¾\è|Dÿûî)Zý!7Ó ÙÎ Ê‘óõépÆÊE_e?Èk£W;ø §½3ã=SxÕXÿõZI,²Þ`ypjiºÃ¸¨¨UÈü§£)»s&1-…rlõ;“ÓTŒãsT4<-åðø'UÓ–·BÕZ™_?˜›·X[.ˆÙc-@á¥@öUâlÌy+Ùàe$ÑpIêûæýÝÙÕø&oýƒïÏ$«v¼Ýõ24užiIN~r˜®¡6ÜÁ©×âÄ¥Úlõ?ãNBao^¯ôÆ'Õ4êæÀ.‹‹¨!*¾qM:ç¾›2qW]»+u–ÍDÜÿª¦1ÊŒ8ô -äÞ4±)ƒ¡8s=ù¤àjÔ £¿ÖˆxôÑÞãÖ>êÒʆ¤~2Ôž:Þ}<[Á¦g[ÉÊ%Ì3¬ãhê4]ƒ8™·¤¾€ažþ*õ{èã¿çóp’²ö¤ñS{¥C60tóÜÇà ,¾÷ï*l‘¾>ÇCìl&×çxp¸ VQTö¼µË)⦅mìe’Ý°Áཊo±›„z_ìE”ÿ1%fD›¨ß»,X¹´úg§#vŒ¹eÚ­8i1b[oÿ’¹¿ýð ’®“ú­’jîúF!jëÏò¨Ó{#<Ã:ïà²ñðßÄÿî®÷]i° ŒZ·Æ¤Ns…Äì©H®ïÌëpmé+òý^5ˆÌul² ƒÀ#:|Æ1,C&ŸÀg”rÒ]ʵ R¬\2ëjòÝÁ²T¶§ý)1ô3a (ª—^ ¨z†ü‰R‰…Š%Ó±‚k²Ywn†jÒu’Õ²uèB3ãÅo%QKqi±‡7èv¦5æ-6änr?ÛÕÆKžº#Ô% ÜrÀ-sõÕîS?Áû™õѬ,/px´'øsiä Ç×_WRId[ÝÃxVBE¬¦-p¥«I‰PµïXÕé+åùG#@Kº~‡±¡JB_X=°îw4l“zÑ«ázàñlêÏkõÀA-‚(ŠÑЫÖLÊük12»ˆzѸ>ß-A‰FØÓÅ9uõ5Déëg­"^îB(,uwho¨_q’ø7þÓw°\3ÏÍ GÙÿz}--¹ž¶Uâü#6kã -n×+~ਠþªFMë1•Ž+2Eùz*tHíF†¬8?g@˜­26÷¡×">³+@€äèl†¥8[$” µŠg»ÂÀ“W¾£æäû&fdŸÉU5•,¥Ò5ñFñ¸‘úk˜·}è•/n§›rïA_=\Èžò½(„±p --¦GlÄ;I—¯u*^PD©4æ^ž¼z‘+ƒôI2¶›¶‰I£]È*ä›z\Ÿ‘t6´$;‡uW7&åSxšú•èú²?éUû@Ò Ëpm~z¤YIyGþ'ª¯"ƹ~CdÄ0”wX))b°À½b‡SCÎN¿âRBÉSáÎËl°ÎFê£~gíG3¹ÍæOJ³:FÜ1Û1ê ”y׺Õô‘ ‰Ö½ €ølʨ·KÐtVœÛÝ¢ cÕ\*øN&‘gݱйiQhïã!*-¬¸#¹æDÍòµîØÅ·æÄSª—,Nå†9°¾ÃñùÏ] šŠBD¹×› JÕ;Wv<ÒØëŸ HÀ;ª¤xÐ0 …½r’+¦ã(, áñ”Žët -à[óàHG°öb³;ˆ=KLaówé_‚ÏoÁ¶Û¨Pp ²aqþ¦^¶Q¯qˆœdQ­!(ËJKg(!/'#ðŠ²»¹8Ÿ2û\ê -œL@Xqí§ -© -,ö”ë”ÒQqþ4c¼ÐÚHîÉ@œdëgl¡}ªC¨Dî -ò7?6í%sµûS­-à‰dœÌ€²æ%\ö|ò[-Ñ -hµóKùˆ‡žê/õô"—ÿÆ`ÑLçÞ%?šè'ß»žHÚ48ãAÔŽ­ªÆ÷Hƒ §÷ëw>Y¿ºCüÖ9ëŸßÈŸÿ«^,d{ “Ϭ-zð#¶Fëó°ò!prÖL`&écŸÏ¡›#õùÌ -óþïÏ,Á5Ljà )%ß0tën&—Qv{º¤] -~zÿÑg.ÃÎ3}»;Ed¢ððÑf˜3÷؃_ôê~ò—¼ën/Áî97Яjmû·t%o^vÓ™™ôúVfäZ¾Ð¥3É˾ƒÇ7*ÝæØ%Z˜ O^ö1EÎ+¦žkt¦J©—¼—ïçè<©Ï`¶kÿZ39\oyÍøçÙÔí""—~®so&p*j명-†Ò5ɨäRG$§Y›‘έ‡ZÑÍ‘,&Û>óVâ(ÅD¤¡ ‹¨Z+›ØÙ|¬ ч™ÍßÞÏ™øD=Åí ]F Xý¿ß9Æ=$±Sð[Cœý”!&øß`FGÍð)ñN¦£‡^.fíÜLv0iêÁG‘Mž—Ö*PüCÍÄ3ñOC)ä®} Û±àZ2o–©ð¶¶µhCÌ8.Ì O@î ÕöVq–6„è\’p9Ƈúru4s%ófÝžt!JÄÞ±<>J•µÚ8Úí(ó[‰©zíAd()™SW.”>“Ÿ´ žd4‰ ¹j­Ý7xð7¸ú‘4Ðb_.OÜRÌ9ïµïxÝ»ŠX‹!ØÚŒ!ãç±ÏtÛËá*^{"»»ÞãŽy%·}_IŸz–A+öKàªì£ºu^ÔçÈxµECø+E/ùT><£ñªËt^Õ|¶y•ë®^4làu¯~ÖŒÀœ}Ðw4Ý¢“¢ÍsKîò%)b!Èz%´ä_”8FVÏG4C‹©AŒg7^!QÉŽÄNµ~Ç6¥ï’]Öl´Boû¡ˆ3ªldûz8Pï{ïMsHƒu°­˜h[i‰°›#ymñâfçòl§§ûsŸëN½T§–‹J¹(¸" ®Mµ’ü•’½Xî+‰¡õ ¤$½šþPƘ©ŽZ@i¬ƒq=z/‘–Bj¾¥Üî.üŸ¹Gyü:÷ŠÂIÄ»åü·©ÑÉ Â¨D<°Gnš¶üV¡Õgîô°Çû2v½[ù|‘Ls¢+ɾ·Ç§º‰¬”eó:O’ëÊ©÷¹éoñ§ÁÓ”ψì <¦B¥S!pG­g:µ}æ näŒ0ÀÀðCÎ`ôOXwܺî2ÅΙӓÏÒùÓ6’Wd¿¯t뾂ÍÒ´âÙ~‰œÔB˜„šU§¿‡É«¦Œ‘×Ö"»ÇÍ$`àuN-¡3nÏ'ïLòp l¯}îM]°‡gŠKÁv¾µO„Ð ¬²w´ýþ>8Óga‚ŸXMÇŠ(pÀõ±m JŸt¤÷§téÕpøÂE=àT×ù3 ŒÇ³ðµ©‘é)/áªY¸ØÅSóxƒ¹Ñ ­Î”,™”öL‘¯(žæìø}¬:Nj)…àŒh—Çž²·Í`¾Óöyrö¢{\1:–kŒ°QuÑÖí¬÷”}kF®æÕ~ ïàÅÃÐ;S˜! ¼2-×l³¯šÀ -ðʘê@4¡UÛ(|Dº qA1%f÷¿+tãg»ÚÚt¶§O!g$V|ý§p³OàîëÍEê½|k<Ó•­l»ð£÷®µ¼’‹à…™žÆ;œNß>cÒG8”$»‚c£žˆm¼±­Œ¦á?†»8 ‡ª‘ BóÿåT3ãÐ\ê3Jz|žŽÎÓüOdLåyC*>¾ ¿@:4ÿÿ.R}«)¬f·Æk÷¬›d\Ë… è/4pʯ/©1ñYç¨-õ¦ü’w¾ƒ l{æ›"Çä=J@ü,"šÄ¼t[Õ™ÞÀi´ß…âB“xYâ®_ðmèÄß3ùRb!Ùm•(N)‰_)±ìµJBÛ+¬ê™îDDì?:P讕R;™^åp­)o>ÿH:¡ÒŸ}©u:ÐY:"sc)Âk1r»ö,x|@Õ èŒº”&Ê ûtc×RlkSŽ$°+LÊ‘òß¹K)hS~ëɈWÌ:¼ý¬þÔ#רj¼|r!£‚¨®A³ ˜ÄÛ_c|»‹d„ž£®@­!È~b?_ŠªYëÃ<ŸCŸ §ÔࣶLF¾k8#_víÚ#õ:ÐWÆØG*²ÛÉã(¦÷Ö´®ž¢zÿ²Z?tfŽk«¯%s‡¦ø&7LQÆ¡@`öª[nÁa>‰ÛŽõvÀ~E!’Ì’z´W\qNJeîKÕ†âTGK~ÇÖ*V~gå»Ô -ì”å¶%`EÉ‘ïz8u±Õ;ú½y›ÌÈTi¹^í»1Ïn›ÄøIt -ž}î*R ëe$ZDxúô$9{ð'©…üðÞlÔNâ“‹ãWJäÕ×<>â‚ñø’;‡ wÆ•q \IÑî4°‘ÁÅ3¿CVÈÄVñ5)²£_«Mxª‹ŸåŠDñE¤¡§=¯úR$ûõZ\+vq'¸#æ}öÈîÇŠ¼:Œ UüAeCOaR ùñ¬§|þö´°Ö9-Ý® òµüa{Š ˆª¿v‰€„ˆWlªº/ë Ò×@T'Öb‹ƒ…Þë#Dæ CÄxÒ9ÀäC¦Ù!#Më> aÃÆq8ÙªEÌŒ±ûE3Ž~‰^Í¡ÏR\è¨^«Ùô¿]ʬíÚŽ|¡žšêž‚ýñèt7/MSE…âb>g¼l°£øf¯§Z»-þ§êzˆMµVcF%°ØQuÃ;ÖsW£^ßjmnr#Çàq}ħzÞjªLµ•ôiÆŸRrÜ)„gíéµ+Ÿ+>ìå žNÛEmîoZ ãØÕÇȘöÜYx0DXÌýjŸ,lmtçŒH-_9\ÔÀûÙÓ_s¤¦~e¢;¿õZ?v7)€êG+Ü{ æ­E’‚“Ý`ÉŸÉWlnš† ëØŒà÷G8&û¨™õ»|+–ÏLÓ*­AŠB9cd†ïv~”#XnfÕ@Ábx•râD{RÄ|÷UC¹Z5y^ÈéR¥ÿ¯*P΃r.€_%Ã8t°ÍçœBØG‡Œå›b¼žëÑz*j”œÖ -Mçt 9Sž²'× #'GŽøÇìð^ÿ $Á¶Rü.H*pnaâÉW´‘®z—Ò÷º=À™¯/;Ä:‚›Dz[Ê w,^Ó#êq -8‡|ªô ÇWyçcOÛ¾ñã¿ã~×ýÿaQ„ ‚•Šg9Yÿõÿ]jf7´)å½]ÌfÞxFÆ?åÄ8HÄÇñéþLt[ø³¸pØ©ŠæÜŽhL¡¿|73yBðƒy$”Ø?‹‘ˆCñJÓ[Ž~>tN̼«?pÞ£z²K­w ié[N;®¶B²Ò–&âˆ)S·Ú¼ -²òÔ2¥ 3`EãWiÓQ¢hæS È -hÁ(ÉlÛVØÁü0H½æϺL’ˆ¿€ TË5Š"ö%,Îœd7s}øF‘Ôu/1˜k— ]‘uÚ ™‹ •àG½†å„†‰r½`°æø-\Êâ,k*ÕG6/Å£Âøà äÿ]]±$´þ/.’¦”Xó’J<€Ž'f ©€]CõžU’­’1]Ý)eüa’ÎGýMÒ1fÕ.õÌÉÒÞÝS»ãUx¶£m5õµg+q7€U©Ä’:Âo~¶ª›ÜÙ#FÓvÈ>g¬0úY€‚³Ã£9*OBÉ_ µÍ¯8'æ¤>e={ÄÔžä®cn[žÐ'@ö>±àµA(á:Ã¥ ÝºÞP˜\Õþ\¯x½ˆlhYÔWÿùbÌ„ƒg…¤7.&ÖM…oW.½ö"’³Oº–ƒ_‹ÍuôÁ DzDíÃQ—¦å{„÷›•N–P/žz:‰?(r„í(Ìee­$E“k¡ÿ¶Î¦´êÔü0 ‘[“ÇãÁß2{„¤µƒW31'Áœro>ÙhßãÏn´$Zlq ³¯ÚåpbI3«B2é.pa«¢n P©.:´=—R^!Qø|ôIXM˜hOé™Îˆ úTQ™² d2z Iäa¥îÈãì1*e˜SÒ(v8äŠÀ…‹wᤱV_€ÿëYœ¢<ÒM¤§’(ZÒGH£ym½pÙP¼ÃµõNnr,uŸ0wô ösx++Îz› 锚æ'Àxí(Ïuî*ˆë)ûÞn7i²ž~ݶWµ2ÿ׌D| q¤·ôV¢ŠAàmH[½|(¸ŽÌÎÎ8#{¶Ž‡ýžWƒY”N²bŠ+=¼ï^ ÄLብv‹Zu†€Ÿ‡u»'¡ÄIõ®g"tÁ|½DÑ-"ˤº‚I×f^ß -Æ/€õu¶ ˆ|×òíëëéCˆS»åfË£$3¿¹ËrDBµÓ}Š-§2? ˜ê €û7Ñç÷*ù÷šø?O(Ë>ÅÉ| ¾Åã•[ÞÃkúŸDêØÇö÷¼ö¢¨¹vȘfÑOLV9fß# -ßz¢è¥¨\¶Ç¿Ì½#yK-é(z¿<8k‰¬#óˆ`SÜ&3ÄšñÇã­“ ©aFð¨ïÓ¯+²=GI:Í0øä’*E»ïý;š2 ‚çî‘o6 ®TŸb>}æn˼;©\QÕ'%&\„õÞOù./OP¤îwZFJ¤v€Hv³‰Ï騅ŸôòçÖU£cÄÑôèמº`0+RkR«\oGI¬–5À1Îð{o%aX=»~ÜÃpûãc÷ôDŸH÷þTïúwªbŸFMk/˜Ü‘[6¥Ã‡,¯¬$É`YçÚ~´™UÓUçŒ9?+„ìªDÿ¾ÎêYßìs0Yk…˜k£j€½ÿ¡kôïIíþ+dïO©KÜß÷Œ?k¼hÄ åÐË¢—°òŠ2wüÉ».q×[Ü¥ïqÄAîúé–ûè°Ø+q³÷‹ïA²=x­ñ d¯÷ú6~çF›?DÝ-1NÌ‘"Ï‘–½R)󅉼¯ŒB‘{ÇGÅ8`غc¶iÙbÕíIÆh6}Ì6ú!º¡ißr3lôžø2÷g#ŒA¼Ø½ã¶¤,ë·HÖÖ sç{M¸§jò'9VK¥QÎA„»û͸Â!ü_jsÍ”)ÍJZŠIˆpF`xŸ’8B¥ÓA¾G1ËÍ]§#H€5~-‘ýF ã;nèËëϽ¼ÈÆ^»ãzÅŸ û‡ÍʬÕEÿ[·UTEâQµF÷‹[Á€·ßì[­vÝÍ¡ä?¥>ÿ•­=Ók*”ä²uRT`Ž«èwÖ„…K¸!…ö&gåÝi œu>ê¦ý£uÈüâBâà³7Á˜MíÄrêÜ!5B.ŠR•ùÏ9:Ȧ²ZºÂ¸?‰‚¤[X³æÞõê´fÞ";¼ýü«³EäBQoî>}¥N°üJ‡òœ Ã*áNwýdD'ýÙ&X›y÷#hf‘$àÀAºÎtaSOâ°pô(á¬MßîDR¥DK1¶Nßõ㛉Óqþ»”AûH^Ã5½„?Fßr>\Øãª|mù¶wdhnY õÃ`¤e¥bFøIdú-H/bJæÅù—QD™ÆùßDoŒ‚ðî -D•”m…K[fJå¦Ðª×ëwÈ¥) W,M‚k»,)SÊr»íŒYI(¤ÒzßÉ“KzlÆð‡úù»µqr¸3j jêë$l.|H -Áp›îÕåiUým‰RÈŠæ%Ae³™ËS3ñ¸j‹Ë‚ fÒAÏJµ(3’©qr?&fÓ ü09€ç@ê5$>º(,xÙ -<Æô±X¨ñÇÄÇ¥ÅßKT º3J£î&XM+ @@§2I€Úµ;0û¤¯<0>l -—¾SÙÏ­—“¶|µd -!{Ç)Ô~ï!xsoðlPȲ:q]UÑ4·HÏ®Ë\ÿŽá#.¦Õ–CŠ,rSÒèuËëUA±•°8äjg¶ƒ·ƒÍsgÞˆ…)ŒÐKñqëê_ê‘ýw-š*#Üx%váú _çÈ‹Ð|ðOIHvgHO±üüðÌOr¤ëÇg©)$ùP3ÁæˆsxŠÆ€qIn®œW„÷0ñÀÝz ôÌ°§†èážT¿úfv—ª!׽⃂­—ŽùBµÌ µ6ÌBĘ]’·;¶ä¦ÿfšˆ¾ÐC%X1àõϹ21&$£kÃ:ÑÆ’ži󻤑w@Ž.@뢯cäÞ×B<®7Ä–L Y¦Ûé% „M[o°Z/ÅÚ+»pÉ[ƒ(o¡ìe­`M=Ù6Jx@µïh%˜wWëF'ÿEåžø!!M d|ö‰ð²â›ysØnp/í?½ÐMk›˜o.•}Â÷æü<)ª-~±þÃQê ŠEgÐâÜ{_x”:jFŒ2}¹£Çôd“µ²}6¸›¯ˆ¦ Ë¢Ý×Flýã~¾c˜¸ö%Zqôr0ØÁ -Y,;Rîh*ÒGP¶Ü‰$N -6ø’¢Ñß+Dþò­ñ÷7餒ˉGY\fmÛI+öX¼ïªÆþDWª‡š¸ir䱂%së$»¢âÀÜ ¾Úƒ¹e… ìA@=éú±«V€¹²ÝÍKØy&˜(ŽÊÕØì¥^Ì. „ö\é­i^"¼íÌ\L‹«Àüt,×T{z9š{-ÁO˜uÈQ -³AfU[%¨{5Lj ñ0RRÈðPÈDCÅCÁ ö˜u cn»Õ¨!âÝùâ `ºüï(¶Ä<²ql¼JOäq¢œZ ç×ÿ'/ùQ"ûÿ2w'I¶äØ™ WÀ=Øè¢P(´Ç4Ç%Rã˜ò- v_øþƒûÂÝÌ<ìr™d -™xW;4§ùtê£-qV'OÙ<ùX¨³§ð:N˜:Ûs÷ºFœ åxüÖ(7…l È5sÆ3{a±ÕK¸,"Iê–®uw°5%¹Ø^Éìu”µñÆçùø‰ÅUmE3 ò - RiO¥±.„$ †ÒJ{|#–ÚÊE¬(;í.ÓïOi#¾…rGbæçÕð[…“!pŸRá;Á¯ŽìSYô>—ÏCÒŒ'J*ñfº#xÖ¿ˆS~#{õ§Ñ¯þ×ÔTjŠšÙ‘A÷•Î÷A«~„‹%v§Ô’ˉ޸Pñ¨Ó&óI“ï¤\¸.…¡… ÛEªxð¯A8 ÙÌ”ÄDWXï9 ¯N¨%z.‚41χ–ø‘™;wkác‰±sÀ¦ÚKOœqîMY©té+èáðÌÉÜ‹ú…=…)²fì>çòûyêÖg¾·2˜ ó8 ­Ú8q?!óKvg­ÉÔ4Žˆ¼Ï£sO‚û7RfŸr„o•Ìá&ƒŒìf©•«¹Ïý1_ÙÙÉåŠêÑQÚ™LŒÐÆ(AgH®Ó0Úu,"—D1¹„˜}Tm=ôóukÉ€üÌ&¶5€»•"¼mwÄTûNô,õð ÔˆKyÑ1µ¬]:¡ô3ヹbŠÆ´ŽÃÞG‰„î‘»øÕŽÁ¤/}†8mFŽ”V…æÞÖQc»4? e0ªÉNžå^[VôžzÁ¾Åκþ½²êfѪCì6¸Ê¥æ­JÝß^‘€¯»CcA‡¼ñ‘–iaI`E·0DÚ–ÿ0ïiÙæÖ—è³Zƒ0=å"üYíü<—ÎOÅ°ÿÎ1'™ê[Ù“ŠÕ´5T[BáGÇ[ëVF±ÂOÚç™\âH$æ<ŸÞýµÃêSƒ£F<]ÅCcŽ‹Â¿4xF9-c…[£JÑd£ª„¼éÔ6ˆBëmüí¹ TDë+BÝ^jø¹y½Ú3(ð!01ÏW\Öng!Xt¿¬¡¿ †QB¯3(ÚnÎç”â¶3:bªS©»%4O* w¯pi -îWì[þgt̨‡ökÌ#m:?ùàÃøVQ׎6&ÿ r» -Ò9•¾‘Uüb¿ÿEVzóâ2Êj -ÏK÷),Kæèi¶Â,ÈÑz)Hû„;¯wêqL"ôñʨÕÑô„ŒÅ¾¸t‚.õÍ;žK"­KõºK…ʼÏ,ùó%ÇvíL10jVÌQ M,Å,Õ:«Rw/1!CâD?Glµà7uà8šœ@’§p@qírl%N\FPõP8¿·öo¥*ÒëI8·QF9i6({ëk;¢œ_/Ô—†â´1·Ch×Vƒžtþ¡ãÔ`hÎ ]6Îu‚«R_ë쯚TÑdXu©Zz˜¶-©éŽ·Ä„³ûŠlüÒ&êFGaróv¶Rìu½K5ë¾ÒÏ$5‡¨q£,ˆÚ¢¨FDÇ>T~>;GqÃÔÝ&Rððý¢”gR`“[kÑ)öýƒoú¥ _¬˜S«.NxGýg8õGp|Mã,7øùªTÔSþì>/…$BŽ„ÅJàPY°Gƒ¥Q$9­²!·ðo­ÔÖ(—ß=Ö«ÞÆ{DÅsþijúGçÚÐNi€O|ü¸#-á§+–­PTpdï¨%Sæn•\V'× íõHû Ã$¼@Tñ#ûkgRϾLå¹×ÎÒ¢˜QL0Í­0ôVvìùл®5AWObÓçkȈüïÇ,Q»J×û¬SÕÅcØ‚hÛÖ¢˜ßtæè¥Ú2”l€èžýcN¢?Э¡”ðÞ¨Á¨KjçN¾ß;È+¶Í<Š·=¡'š °Q.uo‘ç˜YëéEqƒÌø잢Ök© ²jÑjq¸ÅÐml_ì֜Ѧe¬s®àRLjzÑô6"Ù`£Ëœ§N6¢6s®A0ÀsH`»y9ö˜s#khjõT÷qß9ƒo2ƒt‚óÅlu`ÏßQ›9R|¿Jg™» ¬=¾¢=åÇ/õBÚœjµZü[F<‚Jt -îÉËOn1’Û,ñħÔrÄËûþ7Èï/™úgï¥÷wt}-z¦™R+>¡zÓ®GÖæARè0‰‚nÙRØ­Ž¶šñkùvpºq¿—¯µâµ¾6ô“›­ãì -9„õ.ô‡Z›GhEtÜT6“¸{…9@y×ÄZ‘ܡ• ͯ EÃË­8¡`&³"Œ8kð"´œxQ>ÑÐÌ;úcŸÐuãO®˜Æ=zMzz?-îYÑ1Ù‚(ŠOrˆ¸W¼Ó¸¾—¿üdùuÌÛå~Z0ÜjXerÕ’¶úˆúËe+`ÊIY‡é|8€y Ñ\1áE3i^²Êe´…‚#Z…È»v(l¼NS&ˆDí¨ßQYá¿…•£P‚¾¿ ^©7_qAªz‘†$Lꈣ‰ë/Ý9XcýŽ=!sa.´2`‚t?"+ ±,aæ/p*Šà],a殆¸|“؇²‘~žòˈ…ÍM -5¢\‡½3>é|«(ÌcèÙîÕi¡q õR-oG;nâOuõtPiï«­Kµ-r{‘S+‡+½náá^MGQY˜—ó:ÆûÕž¹!¸Í ‹ñ±ÅG&//¸àßñçÏâÇ_Š”ŠC153õóÔý3¶H¡ýõO•šÓmÖƸΒßPß<‚cK96n¹[|Gl¤ÛWõÖ êyŒRˆGˆƒU|®µàé×ô3³³½2 ¬, ±•HD~¢°T5D@Ú&¶³ -»LNé>qð¼“H(=1V9®Ê|˜^R–x~ëÏyse?ZúÆÄoê«Cm^hDâR­>ƒ„w]Ógé$¸öÕÖp÷µŽ£°#1"ÎO1Û»ü*†*ñø±Ê wš[ʧñFIâs§ÑþPß@ò4oÏ7>RÍ%˜~RäÁWnOª1'û¿'‘RI†³ÅïN¾ò×ÆÁ[ ªé$ö¡Þ/Ã\ K–÷Jæ{7M¢š&ë¾ÆDº¡3N Žš•>ÿŒ™ŽcùQ•$«C_ï#IQ ezã|¹N*êptŸ·µðãt5©÷Ÿû ÖJ…Rž¢Z?Nš[ØÝþ…'†A@r,$é–†pôY`¢P ñ×Â]˜CìÒk¯‡êÉEâÿr6ë9w #{½™à=éQäì=~±d¸ÎVyþ–”„œšìVŽ(nmBteÒå?>0UÁ¶ cíõ%̃¼ze– ƒ²í780†–a­à2;ÜÌiæÕ‚°Ußú>|éù•îûS"Sõcý?qñgOæÿ2§Þ©¼uhýqì°Æû-߬±#ƒa>õõŽÀ¥©~^™XAw -T§œ,HËÕ€‡¢\df¤~n!¨Óˆç!`ÿº€wé\ýã?ÁÇ—FojÔĆxæªIi?Æ]|tt wÏS±(ÉÜ”2+ׯ'ZB}ýJÛ!Ì•¼;ƒè¶)v³í0èÎzò-ã34ÇUi•†Ršß9Òê!3èÑ$ä<¥.mÝ | Ý'¥ªAÆCeU¶pO Šy±þagV][#Ø@àuPIÞZ†pà´\êýñ†”0Ñ÷p%êgg$ B6÷Q¸B)e^{¤TÜzÂð›+ŽŒU·’5 æ£fÈ¥ÑJ%F¢%§Â¥6}Ä67>)BÀ“A=&)ó“ªö|ªƒ&­ntæpÄpZ~j(»»¹ \”GÚ`Ë5tk=c¯Â3rÁÅéwðþn©;Îkû™¡\ÿÄÎû;¸jŒs= Y† -À¥äØ™7œˆ~LB‰ëfCˆ>ƒboí©V6jHãc ÁÎe¿‡0 -ݺ>þŸ¹`T•Cç™c¬øùÏñ¤h|œ±Ê†eJ ÈíìIÎRÅíó;ÌMôð…³Rˆ½Š¤bÛ¢c·™ z¬F úGï.‚È='Wµ'ü-–·oð觩ý@ŸïóVÈ0ÇðOtÅÊÏeÌ5ܺÓÝ1úªDÙ', q¬áN^~ô­±Hzf¹:ƒ2i»¹¹ÏW†‘ OTæf¢ÅJ6nd{ñD8ªÄbkÍÑb/¹á€p"7¿‘óa#Ù}ýŒ‡¤‚›ýèÇmí•×ΰ2>P”ŠA"åψ½ðñQÎ×Q!”ê E -ÿÄM¥í#¥¤Oií¯žJóW×´÷·ß›ï xÏiáqÅÑH6ÿÕ Q%«Ä} —"oªL³ ñþÍw_WÈ&æ»6i6¶(=Ì,P'(õ·ê¸cʨÝìù8 ž+㽋£˜#¯þ¬â&“¡»ÎÚèÌ -0¼G:çvFæFI¤nˆJí<Ôψ¿½ý•YØHO¨]CSó™ÒÀ-þþ/©„Xìà‚<øjlúã_|eôÙ(!…S¢ŸNkSöÄfìhç(çbgMaJE™-ÇI½ƒWÒ É6¢¯.Fªêcî%T¯v-ó¾lsg2­);ÆCˆü>ãw´5†Ç,¬¬ÒÈü¢j9Ãr+‡ÐíÖGœi„}e.ö3<ÿ°2jH´á±1{6nƒ°Éçá³ýg¼¥‘~äŽþY^Vc1rð†D†ô&S¸Õ¥~x{KÔ"ÑW_Fg’B+äkoþ (ú„Ð{ç/ó›:ùiKœ˜mŒÄþ³tA@âÅ«CÑpI˜á¤É’Aáá5Q Ä>}Áèi'8Ø⼜q† ÃlþûóZ¿Â\I®£ïý£îG52N³°4(^©Ðâ¢bvÀÏpÜ®‡“|H´h ‚!®††¸]q|ÂUà~vCÌÿ‡ŸTG¼[ 9‚êÑ3§æù³„ 8Ý:)ÑWf,4·œ+„izR’ª»å>é:¤6 êžç¹îÇý‰ÕÐÜFð½ ùܶ³I°×‰FçFe}F!Ôw¹pÓe;ëRs…GwŒÏ¥ØÖ“x#ÕŽMZÏýÄ#èLÕxq¹)žº†@ã[}Œ&ΑþcĦ ÑV~®t©ŽCMTö½¤îç¡‘Ö_ÏX¯“£Õ€÷BSÊ·TIÁÃÄž»<Í%eÙó A¬>wœˆ0ÝòTÌ+±å降¯‰£Œ£k«0Gß\,;¿ÝŠ<;rΩNk]‰æù&"¹í\çƒÄl *9óJÉhh®²jS3¢]BðM~|hÊBÅõqvôlù$Qµºt¯ÝB‘[Sr(¥>tÞ«ŠŽÊš1Ðñr»Ð £Ñ•=ϯ0S§ ÁžÖP´‹cÀé®sNÙ‘<}áb£vƒä†>']Þ•ž=-òÂõ;JN€9äí‰ãÏÖ$  l¾Ó†û¨o@†—{HVT¤TçF¿¦_Ÿ'dŒ·và)—Ra£­XCaÓô#qÉC§(ü‚ò+zW„K2ÓgxjSŸIÿyÔƨTJÑÀLFQÚ]ñÇ—Jµ=ѽœ[ð€ -¼YbæKQ¦¡œ!ÚsΊ± &O 9bÄKp 39ëw”di„þòèP!ŠFff޲㩠º3íùÅ"ˆ7uç•h˜è°ŽÞ QÝ~À…;¢r©³L™‰_›@ÃÒª9ShÚlJΕ'¹&¶BE°°âj JsÈ 8èÅà5ÕåJ1ÓhÑ^›ãÿˆ£$®OÏ[&¯4×æ²ò\y6Ú "$Þõ;d©1z•ÓžþñÆñ±B}ÍûvD ^0»©½H‚*”)ƒlóJà‘(ÞyæOŒÜ?µÉ©:ÿUʘ/£×JAÖ•üºP~Õ’TÊv€¥SÙãb±'éØkÂÊ«¶¨¿VÖ©ªé kÀ®t‡Þ RT5„æ üáŒ#ÄFM¾Tø#±œ.Ã(õÆ'êªÏ¿f裲c½Õïôøذخ$gƒ%‡IÙ3„·<è9ª‘^ŠžÃÝKàÑ ´@§b¡’˜÷Q$˸öz*œ ˜£”NÎFš#j»ò¡Ä~s8Ù…V8½¢Ee•Œv ¸éw¬‡j[dµ©š•Éè„ËÜ¢ -/>TKÂT4-…T·stÃKî™Òg/ÛüέbZŽz¨Ч\ÿY§%µŠ‰êtÎÐÍÏw‰ßöz”Þ¨–Boî¸S‘^L¦±°Ä5„±KÅýá$ò¨MÔðçÿI×æ¯þãÆü•Ol3sÚ0*Böü\‹::WÕænv¦ .$ùBÕâ`IÑ'ÁææÝþˆ¢…#_Gó]a -€ÌŠÙ}a ¿ÙÕøh 8Ÿ_XÔcSMœÓééQ«ÐÑî[ÕTFZ~$ô¨ƒ¾YVe›ÃqѺ]?’_š›ÕÇ 0½ƒz3Röätçâ˹—9:ï;Û]ð2˜°)瑈ïÓì†S“A¤#‡VÂÜ#ºSQgÛ×Ýdj“œuñPä00ØÇÊèS`&ºx¨“ÖØ+ BCN}þ%I›Ð–Öਭ -SM»JAôãñBÐÄ£g¶‰¾ÌSBÍò_}åꌋkéÉï DÉx@¸Cþÿç{^Âõíý+˜|NjÑDöæh+,JóÚ3˜Þ‚G™3—Oð*Rd5 êÌÖBÛJC ¬ (Œ‚ßi/".†«šxðCË©PWJÎpQÎ>úkÏ8XŒÌ µ{ýLá‹wÊDWvÂHTñ,¯ý]7Îi.±à„Z¡ÃÎÈ„´¿¶"Ž÷üùOßy{Ó%œ/ð½ZdKÍé¥Tóa½Órk8îm•  Ü^¯mÈNÛäµm˜aº«o¡–zç=¢ˆ’Ag P¢†Û¾A! Ø`+ÃÊrsQÏ`oÇ»E'½66aNú§E?°î0/¤ :-.9WÀ‹Y-c‡{ƒzâÈGAæXßT§Cä3[±×ü.#ý*@DGƒqÔE -§õ³wêWçÚö§¼aþ¹ò„ÃÂ_  ä7üÝÑ+F¥ÕÎÚ˜Lö„V¶+sÆÖÑ -¼d¸Ï–L¸"]4±³ÎÄ -tÁF÷H¿Ö®Á¦%Î7­–amyj\çÚ¨8@½ˆç+Ÿ¡˜ZP×ï]¹#×þ½k„ - |òÏ\‰´bJÕ£¯1Ã4”úØçþ«2¤'’w³Wí Õ3;äSckiî?1Ͼq©¤û<²S1j$&¨¾µµ˜Õóè¥Ìì;h¿QZß#ò‘A‡~©p3—‘ñ½@T[J>å©¥kdõcˆòêϵîWä‹:}V— ™³íN¿jm»g Êxzçk í§|¿»Þqoð®½KÑõï¾Ì±¯¼EtœZ¦ƒÍ’`+9ƒöHé?’­#¿ƒ¥»7S—µØŸù@7½Å·R—j -I:`†t£-IgüzÎiTÓxÌA…èj6P¢/VXí‹xhû*r#.|Iž®;÷ ”gE'寴zÊlŽ[e±óÕ^¡c?j ó¹ÃoZ•!¼ý çkžÃT“tÁ)æÉQÁ£ëÅ*huÌš+Tj¿£4Ôqv£T\‘Ÿf§ª›æjgÇ» Øæ—îuÇé&²çt½ñPÿWl õ:fήó¤`<§è§N&JŠþ+ƒb£ÙaYOsGΡٞPò52.ÖJbÁ³¸s;]+üObÑó€™C¬ª„¾åoeSÊ+rÙáÏÌ.üRFI‡d]‰&Ó±•—ˆV¬tˆ‡äHâgÈFrKBbóд05ÝF~%ª=œÔ ¤uÜÂԸ˼A®ŸA„w·å£ÙwÇÑýH‹f Qõ=ìFõá˜aŒÎó÷¯Àߥšžv_üíÎ3:þº¬éÆ•¤Gù+‰ ß¼ÞÖˆ–}CÓ\!?Sukõ¥R™ŽËüVXäÖe¨@×=ƒ@vÎpò>Fg¨AÀ.š•†l8pZNÃF–ºÉã»äýÍÅ¥ÀêaUR©ôS6ÿJÃn‡LĆÑÕ)ž‡JœE´®æ*£¿ÈÐS€K¯Ä?·Ñð>Ê&ñ†Ö8#²´r >OaŠœ6kÐÎH"¨+Š_BîrˆTB¾ãC¸š.ª3Fåö¹ º¶Â¡ægD[Ðß'á“j;'ôêîtX bÚÔU@—œK_ 7‡Œ$È3ÄuþF¥X%ç8ríJ¥ ò°­ÐÁ½_ñŒDBK=6hãb ?ÊsÕ‘ÍBÏ´Ñ‹â§Ú@ a­„&­S½1âÁ‡¨þÞ^–C6kî'U­k  SɃظêfdQ$ËÓîI£Ž .÷éà N”™`ƒg“G+1Ñ’%髹s1‘%öÓÏ야ŒP4ïò"à¾^êìáûFG+´ô…&qâ\Ç“Dè…Íça³63¬åÓ)Ín#¼*3–Fræ½jõî[â;¦f'8ГwÞA´Û㛄âïÓôëWzWÍsi%!ºëk.Þ i9<£ú¬y~ªýk<͉ýQ2dØl¡8œ1w=ˆÔ,€²Ç·?΀í„?¯*.ϸÃͨaÎpá¼'5ÄA@")¾‹ž©‡&Ô>ÛC`ÉjCµ¸ÏòXIwßÛ“OÍð± ^4üª­©ôÉ•¤­•‰ wh͸þPD:‹À„Ž9Wà…OpT„©Yµ÷H{Œõ½o'.›8HK¤ÕY÷Üy•uPµj?ÂK‘B_s7H»•¯Í$,cûíƒÑÊug£äúQCd麹!ÅQùÃkªºÕ ‡Ô¹Ïœ4¿)?7Âj«fù|Ƀ5ÝQo&ž0™©ù9î¥6¼è±¤FuŸQ-T#`¿O5Ó¯]þQ3\è˜ká®ð¼ƒa¼œ­OµÉ+òGwaÈô¯Øxõ}ùGé!Ò9Ýp°!ÈRqjíZwã?Îx.° fFç7Ìãk_zÒ6 õÜ ãÒt"ï×ׇT=6'¤!q¡¥ýÕÆr–‹Ïìˆ|OáÉï& :^ö×%£'fŒ™ÄC9yj&埧ø3¢¥]¢1d÷µ÷1Š§á•ën·§b{”ø×ÎëÝÓz—à;þfÀ‡Ÿ\C´…IHiSmRšÆ5$6³{úpmò2' -¯wYǤÎèæŽ|´m1‡¢å -ºx­©îó¥³Ñ{TÃ÷e½‡_½•ÙFLçŒQÀZwÅ -â–VàNs6b ó].¼’ Y=š9äŠF›¨ÝÏòBÞµ|fT(”+!à$‚w®A×ûq6ógȺ+U í=Ëçx‡³¤B]ŸT‘oÖU#!mFM–µixŽçuЋ #”Dm¡ó {8Óž•ÊG1øŒ‰!ø»-t„šd1Áðü•]_šXäS ´º`ÉæŠpÖí[ö5ÅBÄ ûœâ÷V[,s=’iƒ™‹ -e±Ýï˜ÍfH‹—3ר¹éG›Èq&À¢žûåø`9á–j׉šE¥$Û®®ñ‰½¥5ÏÇñùo-h^a4~‹rôëü+1ÿ™´…éŽ.»lìÜ_4»æÅNLÛ즿,{ç׎.+{+Ë#µ[P´x÷Ö­CÖmÖnûµ@}²A#ïv>K6„ª -%†þGb+ Zv.*¿éˆ7 ƒAçÏÔ ò®<ϵ¿è`D·yez <„ç2NhònW_þ¡ÚáÁæy,q6dg›D×2 ¹™Ñö»6h%ÁmôÚ57Ê0äæÈ‚Û;™ƒH Ë^™ßÃCFážï¢Ë3$¤RQg×ÐÊMß½Ä[*L´û^Ô>­z¦lÏ‹ZßþŒåœÌŽžÉ«’_ÑŒsŒ÷mYļ_]®|…Ì™ƒküËòô h-YÓx‰c÷ü3JÄs/sŒÍä§ËÇ¡ÕD]×Ù^Þª}V[¯zÍÃJDêBAT°¹„ͬÏ'Ô`lï¾\6TЮy|¼1Ï_뇄Ií.E”'´‘´´]!¾bFbvŠ([=ÄoUhù\ ùv…~á5(­âÇã¾×ôÞ"A®T€ Q¡AžÃ¬ªƒ/µ`÷ ðt}Bt -#màLÖ ê¶™É‡8&$I/?ä+g÷|¬ã‰Bì?ÒU$®F£îXƵ#GêœÍ8=GcX"Q×ïÇkÖ%¨A—K…[iñEü¹ûš¼ø3Œ>Ø·çR ?Ñ OGÐr÷š/uK™¨{à2p´¯ - -¢´ %ÞVžŠ#IÕžbY«-h¤ø(çR•­¼:*—•6ñ qà¸S/Ìžeµ? e£HèJ¾†à¶Ùà$ïÒ8®$`•˜Êþ/ùG7¥î‡×Ç<¢|LÁ9Ù&'üÊ9w2“q©2ÓÁ6…¦¸-»ø‹˜c»=þGÍO??¶æõµ2Ê™:ìšÝVSêösÚ66aNÑ×»±½áz§‡ðãü[u{¦§ñÑŽùx‰KmjÔ%Â’µ“Ä̺;ÞÂÍï’t_‚ÑÎ74|”µÏˆŸo¨úoÄû¿*³¸UÓÕjöµ9o¦ÞÍ:p&zAﱶf™:¾Üh… <áÄ’I^+ÈŠÛé³*܈WWª"ù†ÀÄ'þE©X‚É€´2‹»#¶vG_Z~ê™Ä†ØÙ#xÙ–NѦ—†±ž9VP»¸è•˜­” -9myÑÉ–ö¤ÉgŒ×¡€”fpšÙï”N( ÷ÜÍà—å±°G—ûæ¸(2FÚÿ*Š°| :ëÈvA€oÆÉÇöòá´ë˜ô{ûHB½ŒŸö²Û£*"©iP^†¡9õ¥Ú½ ã@u ‚„écâªi¸oõj® Äd„ oŒÕß@Ú¾i!äPDøqAÞ#âÛ~{~ž””h„÷ôF¶™UÙ¸,Àȳ>ä¸ïÞ£ô+æTD¢jΪò÷ŠÖÎ]êâû'eùìü±jj&¶ûñ$ÿ•A=ŽÎ¨ˆý•ñrãH~=î$Zs¶'œÛïoèòæâb¥¢}È—½ãኩœ¸zK+O‘ÿ,ªÄJ•œdKga -„ô5ätN@R#ÜØKµ}[Eeþr× ;tE'ŽûLÛk+CyⱨB=8MÅgÍ7g6¥¥GQA˧¼rG^ëX&ÜW”p}OØÖ^gèå¨ÄÛ)fAO·¼ô^nµ]#7&àO*6ùBzzý冽‚9§¶›=æù#rû‡¥¾ÑuÍ·ð«ò3Fá½<+ âóI›å°@¸fžZV -ןDáˆF”K–ÿŽZA؆Îì7&ÅK`Ù‰¬ÀpNŽSë~VÅVŽŽó‹«×¡Û„ˆNL…ÆUˆ³L«£ûª•¿³ïfBáõh,­$vž»œLáó)ô=„ÍÅìú`Ó(ÌÀëûÅ5²Â•/–8_dtŽ_jº÷#ÆL¡A”hø6«$€uß~¯£•Éb¢C¸uPM£© - jSN¨×¯þ8ãE“ÃGUËp>Ñ(W{õm^6Ï=©Åç%‘7;/¼5D\«4ü4C⛦‚òÔc‡´ Ó‹Â]Õ&AÿL#…\ƒ¢ŽHz‚ZQZÖzm¼–$úÌb§ÜA¾øù7u™ß“â…d…Á=CEÏn‡,«.ô™ú&·3RJ»²jz©o„š“ºÿ…Þ\ù|vcîþ\Áø•sî€i’ì·ßƒb€JÊGŸ*ž;Б ®°“¡“ð¦êS¥HºñMÿU—K¸v¢îJŠ¡ËªÜÕ>°¬I‹Ì8³–*èxV0xEþù»U_jÆ&oß–kïüÀ1Îm%²í†[p™}|¯‡ú´3þ*„õ8&#_qíIdi˜Ù‚h$H`éñä£V‹ÊÌ #è¦k´ë¼¾ÄÉ[{í×;ð«–³Æú¡A=õÚŠ»è ‚ t=™í“"îþ¯Rõ–Sïð¾Û øhÃÝêë™ZüÊ7ŽdŠc¥º«ðËì¢DÅÀ¦¶ôÞ¥ÇÎNùD^¡ô(MÛqØ⼂œ¤‘9¿ßJ"ð¤‰wÓ|ÍC¸rê]?ë[j}*»«¨õFý,î'Øž4ë~B?fØJQâæÊXî·9Ü9± ¡/`1^:§ÂbÑÈUf£óN£–pm¼œ'”à6ØÀAèÁE÷œïOʳDY:Ù`‰ÍÏIõ“KæQU9ÒÖwÙ÷ÍTüøøé‹ÿžf÷-¤û´ÿæ4ûªköóGûUŸ?:qUKJtãì&@ÓùhÑjÔÊ8ÓoÁ’u`_ª«ÿf¦I÷¢®šO® -ó©^wνÿ×ô˜Ñ_ä!ÙJ”€nä\¤ƒÂRïÜ (£DIt„z>Ï$ýïIºb.Z̾´9öló…{¶ù;óðKè:!B2-¨‹’Ôk&Bç爕×U~l¯º\ ðà}A•£ì‡Ât–±ÛþÇ‘æG¼öÜ°Çct³µ× Öìy¶T”ï7>Ôÿ¦}èÇGü•Gl|¢ºWŽr¾R¤ 49¯þíg#jÑ[?þõ6̯K·$}›”ocåá–ñœ›¨*µ7 •ÆW]ùzdÊ 8Ëüš{ ¹ã2 }}Ô\Lkn-zrkТs ‰FO|Nmé -¸”©¶óE;ë@\Kt½]µ qÐÕÔ²UÅýÀ¡Ç%;vm¡sMEï.°%KSlCl;ߌ }T–q ¿$ ·$LÚ:œŠ©F:†Å¶Œ&܉xöñ1ñÈdR°¯ÒÄÑ{ÛwK[wƒ_4™ö'Ü®'$ -‡2./àíü ‘lú0ätFkBm• £KqBdí•*iãj6…ÿÔ)ÊB=jY_Ø”ˆzÛ¡9wsYéóä90«×@êùfäц”ÇGÐ>š0G† €Ĉ6ÀyGptÄ7åZ—Bvrfˆ¿™ îúxãíü¿KÀIS´íŒú£¿ùMµMVêét€)Âý‹ùñÙ†ñ§OcB³©R>9’Ü¿™ ˆ£èaÙ õOØH ag× úÁ„,})ml»«QòÀ9„8 -ÞMdúŽÔ¶ ƒpÁ[Zƒ;šÖ†H1w»­JÉ°_<ò"÷ ^üD®Fb1U4º„ÉpC£Ø1Ht‰ï¦¥® FÉ m‹Xâ)›Ìwt»S ZÕ\Ÿñ®KAÆùšŒ1ˆÉÛÈ~ßõ;èk&›s DÎ 7õ<ôjC€.÷-ši¯ì‡dåezpX—)²^ŠÅw%hèWjˆW AŠºl[øYŽɬs7šÍ¨—¨ysú–,ÕVÊ \ û4”±6l?&ÞɨUž;ò -¥Àáhn0ž%8táÞ#àß9) »ØY@’ðyjû¾¯³ ÅÀR_ýVß…k5"šnÙ\†Å4MÏA4mOCâX(R‹Þ$¹¶¼›àÊÖô"YJL㺊õÂZ>gꛢš¦ON VÐJëñÜSüïk&ƒãH@ŽëÄÉ6÷(:$D1¶‚³÷˜ƒ»Q~ÛkÐ|!`žqú3Âeì}E÷Ïɇ»íòi{ -a៲TAŠ,… -oÖ‚Àµ”!ºªûU” Ux¹ršŠ7¼8‚azê6™êdï«2=i‰ÕR¸IÕ>E2oÑ)êɨ‚½}ÄÒ…\.ÕŽ'±zñ(*EÖΆԊ9 p—B? æ‹O0ÜÏ~ÕCQ¢m16Jí0xbƒC&xïOd¥¼Çž;d -Í™í:Cõö1 €^á5…tÅÎGüO¾þî¯I|Çšós=¹Æ–Z9ö~&—*_Ýc/*Tð»2Ûwa¦6Rcìç³€™>S6½ø€¿8Õ[ð”û¡ÏÐ¥º'"ŒuÇa*ô½ø{ –ˆôãH_oV?&%¶ ád°É‰úZT§Ì/0/|¶ºÔ¯š{+&è8bè–Ï­†p*›[2r9{Xè,¼ ˆ¹L{Q·4µ©dlKå(ÅX(¥ãÌŒNã”]´´°” \öuÝ\âtËSZ‰&å„#•Œ`©OeÁaWbÓéBí9OçŒZfQc6&ÊšçÝ!ÓõÒ³D6iÅzÙ"‚í­g"À&²*éxPu LG -"rëyT5-•Ì‰PÀÉž¿xçŽ÷?°¡ýS¤+-¸6©½}"3B¾(L¬Ðc:ôôšñ“¾i«òY^d<{Q2·Ž$ö/Õ½z![¬Î|LÕ–Å[¤epyL<¶DjRð<ô[vÚÀÞxq@öRÿ"ê00æFf:R8³N˜°UÚ>¡*.^‹a‰•+®¬óŒ’fÊ€Z3™9:‹L™Ørll ¯‹•î5ÿºªÜÜ R‘WQºh‰è­EuÝ[¢º§ÂôÈFÇ®5Ý‚Ïã\Üd÷^¥ =á,ŸöZ =¶(8N;X‘㸊Ây@pôØ@Ë!´5aï¼ìòA¢+éÀ E+—ãôªàÀ70ô?‡S ¬©ÍöŽµÂ;î"Ÿ¥¸»»’Z,Ä'ØZV?{¬¢¾ºÄl|Ñ3û¬O±„TæÆ&ég4Öë.ƒÆGؘw&ê;¢/M,žñ^ƒRA>"6Sr|ÕÇ'î{`v4›ô.>,ëqoÖVEâÌìa¡Xj|QŠ‹JÜU3ב;S3Ói’M&V¨zt845äf姊ŸmœuOÚ#‘ÏnñÝ=…­ûZë‘:]”' ¦»ÀVñá»ïþ#á$•hOh|;m_f‚DdŽ‚¾rÊAØiÁò GÂû@ý,]zëñ稻¶¶"¨Ýy‘¹+òaѲë ZýT䃥â 9^€v '-·¶"µŽ“¤¿rõWW-òàT±®ƒªÆ\J/×U Q=Ù-ÜmÏÔãy4‚ÿºÓ€Ú£Ý ó’Ríªy¢ü„»ôâO•¡.Wã/qÔïÌ滑õÞ ê줌íªä{ïÙ«.V U¶bK -xG -?5¤b]MÇ‘@v¨77Ç -ÀöYƒøÇÌsÒá ضõŒ~Vlê:¤s[ jì¨ x¹áäöDgmÜV6ý¹3TÉ>"¦sŽLŒú>®ø`…2(ZâfgÇIÙiý+qœ5$Tê+ø<¢p@“,¢Õ¤»ÇF&`ûÀHHSÌy¯!3¦TTýŒ.hNŒ2D­>Ö½Ô?Fô±1×æ)_ßÊ<Æãkag^-1ÓˇâHngâðæ…ÆÛpWA^¸AKCÛ2([W¶í•…êâ¾²6ù«wQÏ$g£uÔÈèê0®¢áAéØ6µ•§nҀ眛/ÂÕ¾ò1¥ ú–´J —ŸÇ߄ݪ "_­×•öù*¥Ï<“ÂáÝuÂùFEÜsŸó¨7Œ¯Z¼GØŸ®3ü&¾L¨¼ª8Ôš XßAu‹šâ_ódºØ[£½¤HgŽ¶%Àïj[š£Ânª -=½²çª=T,^¥"›­Aš6sžd ¡}cžgcüÞfiz&^îk¹\¤A(T êaè‘ÝÝzÍáBvnW¬n-G¢…\êyÔë#§¬ð«[œK‘Šð³»ýmåÖÈ ìà_òôøžè3²vº{Ðl=ò©3ÜÇÙje'®EèM0Çp1–ØGí¡­›äŸì–ÒótPCQm×a;0úkO)VTØ!_§BÜ×õj?J–>Ø–H(ßi/Z†×<ïˆVÖ( ó@=Ov)ðl {{vŸâû_ hªŽ·ZÐñz¢- í:¬%‘ÁVDQÎÖ›©Ðk“öÊ -Ém¥:¨cýÆÿ²Œ Lí«è€ñãt?ÎKY}ƒm¶,³£Â¾X݇ëŠt_éGßr–¾‘y‰*ÁÖâï²o«4oLl¿^P\­YÇšLs×èD±õ~‚˜Ã€<­xT¦ªÄ‡9œ.5¨2Ì£&mâ2‘ãÜÂð ‰êÛ“9qÇø"ƒÊµÉL.=‡¿röÍínOQyŸýü]ç¢ù?ö7u.A,˜Ë«jÔÏ9Zø#p¡šœÐ•xõ0ãã'”!ÐÆqE¾íˆÞtv²'˜‘3P‹*sí=‡˜ÁkG¼³Õ³8Š*"ÅëÁ+u \a[|/Itìr‡üÙWˆ¡Â¹EJý¥01³“Áq‘ymϱ<„BùqÛëÜ×J«O ó?rsØPÖ~Ö>ÉØÚ™ÎÒ› -„cWé…¯_Õ²hâžöÑ4‘#L‹drÿfb½1÷j‰([É-¹”‘O…‚Ã{@‰ÙöÖz ¥’¶ÚvÄ^3Â韠±ß c¾˾U¢¥4පWV™hÎà"ç‚zD÷K‰ïÚ~W¶D¬÷+MòÊæÓ ÈîJp(œ„PÇKbN¿ùt¬ÚÆ&/¡>"ë,o:U‘x1_•†*8áåJ[Tܯ‹Ve­xí¹VFБ\’÷R´hUYpºg¿l!x71X³tw“«ÿ4ÛòñQ-&¦Îzúcñ¯‰÷DÃþX QÕ¤ƒ²µßÕTb™2ÁÂàŠÄ=F Ýôä‹eAòÔ¨1fB½§-½³—Ò}†(HÑößÂ;ñqhé}Íß°ÇGx¶ã¹_y&>ôÆU¢†(Ž)`”ͧi–Ηż'ÑúÍäY‘>m‚Í9)ö˜ùò&lÆ+icÂÀ¿n•ãÁôÓùßÈhÅŽØáYÂbe^Ô¨R¿2ÚhÉ|…Ùaž£"ÜU•ÚþìóÕhð«Îí‚Tþ(ý -S“*(²Ì¸ê€¦”t‰ ÝS•è†¶` -Ý•K) )þÝÁм›ý]j8‰ªáïPÙ8ÅVú'!Ô¢SÆAoÕÏ!ù)BÝÁ;ý]18xÄzàÏñÜ<ëU·»E/¶,$äEp\½µŽ,ÑC\ƒZÒ‡·²Ìÿe‹÷b'´Væ³êåŽ.“˜¬geóLìßÏu"î¥;˜$àuÜ©ZŒÒÎqÚ )ùX0=tzhѱÂ'©‚ ‹šÓ¢½K.3øºœ½Ú—Ûií¦Î?¢¶¯­ãrÕÙ¸"®OGF/ «u œãÎtž·ø’Õ‚ü¢ô÷|’ˆø:&âs)ñív†$›ãSí ‰'âsOl¿Ö¶­ÂõQ±€²3s®Ë(,´‘¤ŒÉ¦Ôî #@ä;·±`Û«n q§Ó:·AÙòçómÏ«rùÄ>öTð;=Ô=(¡ý®¢–­vîó„i6÷AÃ= ¢ƒb+&¤÷_…¯ù¹¨¹Rþt`¬ó–šóÞ3S«úqêÍ#NÜ«r9â«xÏîȉLt1½œuÂ!Å^„|çs©ÁO‡ãY”F[ ¢üCaDæóá;ÀÜÙËG<'%\Z¥÷õƧzÉí^„û©ˆvÒMq¦=L;Õýå”M¡ ‘ø‰+(ZeÛ˜âü£ØgÞöÖò³0˯(ÀX/wT ’]Îèâ0é{úUg4`d]G ®m%øÃíX×$b3jàÑ•Æ÷–¾-­@ÝÊHÀèá±>žcIÖ`µ1†èðôg°j°ó¼ ZØ#Õ ‹Ù‡ ÷,nþBžæi¹$TQ6­å#f`µ“vŽ¯ºTòÖˆvhey7übOaxµ©6pui-3¨ÁCáooéòUïw;14íLÆÐñ;·}©½î8¢‰GlêJ­ÒáR*ñj¢_ÝÙÍBê¼£ “ù$´êèw•Š ±¹y -+œŒsI(\épž6©ƒørÚíݾÈlŸì>6¨ÏˆÞð˜b ¤ÏÈsç{Ê"w„Ò~eÐZÙýÔöš1GD!àE榨`»’ä½~…Rú2``,Ç-ì,üÃK™>®QóÕCþQ17Æ”;ìŒâ­»~FUêŽNR«!4ÅѳTæBK`¥«^Æ ;c5ý˜“eDXÂ'uÂn‡Í¦³¬L§Wªæu¹P6çÉš<«F ÊĘƒ!vÂyxŽ'ráî£AÁ8imº×Et<…<Óø#áøY|îy¦pFK;ï8óK¾¿eÈŸiM'ïR|½ -VI²´äÖ[¦Ê\ ö/¢Gö+$9XúpàÁcÌ£#C"Á"SnûK.”Z±¶às×ƯΦë–r‡¾›tà_Z öî¨æÞ_ÙS`¬~g?Ø´Ï•ýü"e®âÇvüþÔl£¶ññÿÕï P2+YÕ->9¡jPyn!uÒg»Ödy» -De|¿æ Üd]ñÌ+­¢QÖæ§I‘³j M¶±…•—NÕ%¤ék[''Äìf Ùè¹R5 eÖQŸCZy¸ð%¹¤;D‚æèûkLr5*ùŒµÎoÔî¤É©w¶+RvÒI|§˜FÎwÛñ/öB§´ê<¡:1òŒ6.Ùa¢ ÌÊê6ž©²¼! û_5¨wŸw™M¨äwr±Âé‹BxMÂDscºÔÇK¢Vnü“接ܶ(ó –“‚: -Ëx¥ÀIõŽ˜Yª•Ds||Î7ÎOÖé ¥uÍ=Z(’ ?lF¤”gT'®3EõÒ%ÀÖ`Q_ØD!•ó³$“‡žŽ”óÉð,Ú¶)Ò%~túSHvòšÊŸ„™ósP…R¤¼ÿãdµ˜WêÀ]7‡Û²ùáI‹¤/ÃÐ!«ôlAÏq*¬ωDHÒ;‹ºaÊdlGIšCE†›B]§-y'C7W†VNHs¥}H]Âfc„gî&¸·Œ]\i0Y S~®Ž5ÈL&vô<QCz´·²ŒµÙDyr×üŠH|Fm$g§“dsHb0ùÏ\jîŒ EXÿk,ó·YW®ã}1ê^$Ûg‡jy$šÆ[„`Û7C<íùšä)ÊÎGê078T4ÏXö˜lÏßÚëåÝ8éó^~œz¿²Ã.x8Å×Av =¡VôÎ>¢h¥¡w #Þ9¥ÄàÌæ¨\Ñ„Ù¾ø¯lÉÜrÚ}udy#Š¨¸(4pÎS½¢,Ê…\)`ú1&ù1@„Z )â9²m)6”qï•Úß@̼ªžŸ<õ)èÏõp¤ºLmd¯!ôb÷Tõ¯º¢ˆxM‚’wļ@ïWvÒ³Ý_™}B¹`å²¹ìdcL0òþ+¤¡Ý)ß;…'æÓE¿éH'ê®ÇV]™«Dh´–ˆrÿÜãø¯W“Ê. ²Ä>?³Ùì5Œ®õ3s«™7kº?kˆ@QµÊ 3sŒØ†¯’½>#Œ«®´=ã|V¸w3[9£PhÝw– £hW;K.õZŒxçûGæÄ÷íµHâ“8$|Ui3†y¨¾\‹Bù¹¥//|ö5/:G4Žú[ו¿,ü­x)¾Q»ÐÈ9ŸÕŸ*ˆi4úçξ6"ç†jCö¾7’ƒù{Öš f “¡Áý^¼œjÏ\Õ¯P·Éì%š‘0¹.r©£N¨/LoPó”!˜n)ˆyÕ̆ÐcʳÃúV9Qj–(lXfLjrÎHؽ¾ê€ûlÕFª¯š­„7a¾ªÒ„ò³—Vƒ†Ødn–l[«íˆ&M‡t†yµÏª­Ýÿî.;"Î{¡ýèËž/kÜâgßëw2Õs·$Ì¤’Ã~¿Î¨ù ÄéÓ©¨CêDHNQOWlárð#Ö¿8>C±¶Œ\¸ êãf(ÐXîÉ‚PËç%Q ²Ã×ñÓcׄ«w¬ ŒBÃ\O"Ò§Þß|7#þÉ­Ufç[«›¯tmLÝ]`Àû -p$Ú)(€W]©€ŠV`L¿öÄíÏ¥÷°ÿ6ÊÝc>g÷‹Ó¦¢inw2H­ÜE¥-¿cŠ±Ó9t„ÝÌ1Öû»âÈØ·$ÈpØe©•œmy¢Ñ=&=8"B·3ˆBÂõeþ+ƒtOfÌ4¤ -5hW´"æ¡çû_ùè)Ó)Ö=Lž;1¼Êƒ½åø ‘™j<¸Ç)2ÚH#̸٠»µåx7}ƒâ¹BŠ¤àÿðO®Æãjjo£§8Gk°Œné}±¢Çó¤»?âÉ5ñ¡g\8ùH£q¸Ý÷HÉü#ÅŠžN˵¥âCªŽ -.h.2{×™M%vÏ[o¦º†+ õÃIOåØD T`~‚êE•m«D+~žñ‚iYÏ™:¼·wÇ3R*a˜D rMlÐêûjDawçͱ–B…–Úµ.%EŸÇLˆÕiú·¹õDOâÊGÀ+SsÜ##7ÃÈDÚ¬Ffïù—ìž–Œã4åèQÚÿaÏXþ#f¬÷·Ñ–‚Oaúföˆló7â¯?þ%¨$z¾¦í¦ƒÇÕî,¬õÉØNoìW$èxYQ·y¢?9ïìy†H³…þ’¯>73¡C*Ë1­·Ì«w³ öm´÷ò; a0k -Àƒz•äÜó<åWñmW”Åç ÔXÐø®É’™+›ÒýíWðO’¼Aæ²¥Ö+ldUË ·ìDý€4ïk™ c&ðàzlEmŸr†eVC`ë8¿ -nJX_‡ãÇrDvŽ«Î1^j*x|ᶞõ€qºU{[BÆONÑ­lÆ2„¯¤©Ž°Wv¨z_ã›!ÊÌEãÚJ§x•÷ä°¬Ç,6•ÎP6ÿQÀa=éžÃš´Áè/fÍôìB]Qµ‹Ós.DJ‹¤Œg:þÔ ÅPB2ü+#y…! Gèˆg¦M#,xÅæg/çtñ3"‰ uÎG·Ã¡›f p“š$¨œ÷'ªfƒ pÇqRÏ‹_Σߨ¬Õ ÊÞÔ _=âÎ3w±s%pŸ#Â{”Oæõü¬û\=“HVÌAƒû¹öUÚ`o¬©×Ò?ycÔÄËÚ—p‹ZÙVd7˜k -¸#õ§ £tÀ@ zë¨4ìs‚õó_Þ_û7$IW ^ÞŒ]˜¢A$ŸqŽrÎ9£¸´†€ÉbhÞe.~‹/}ÿ=„†Ìµ€nè›·NÎyŽzû$åÈ)hÌäCÄÚuIhÎ!ôÜmÏ kJ)±éè5ëâ ‰g{­)Åbµ bqtñ/„ð‡Ï©KÉt×KÈc?)Å]FiO A’Ådº(¸ìð¢e )(€·[Ø+ŠÂS@ùæ$CÂÖ}EW d4Ecî-%„V¥Ï}KÁöêí9^¢¤ÊøîD—¥÷˜ò† {‚GàÒ –\f÷Œ ´Þ”/L"|9²Ñ•gýŠêFàí×jbN³é»ëÓ9H¿WÌ‘ø£d°[lÖŽÈAwÀz”)AÚó’ƒ&.SÊôfÀ…kÊ»$u™'óôä KŠŽA™ÿöZîf\V5ÒÇúÍZBmQ4;ëRg¡ŽÄz>8yu8‘}šŸ¨6ªŽ 2Fì Z¹ƒóz¬¿ þc‘Ðé!JÅÓxžÎg5,ošý4š5欀8/gX)ÕRž°½³a­O¥ü­lê[tZφhâÙï°ŠßßgЂ¡’Ø*~@5{æ:bø>?®¬-¢[µ¼½âé6·@8ÉiüéÞú‹}Æ÷Údv=“בƒiDe®¸ôÉQM6¥Â‡hÌvÕ&ÿ _)ŠÝæ1pžˆ®öŒ8$†™;ç©›Ÿ/ôÜëH·9ÕV|ºb³aTWˆc°öE4 …x.€nñZ:ÃùÏâ³]I^”Zα?£Âú[É©õ Àc˜Š&Gí„,PéøÇ.yE¬ì ÷Ú|˜w ìµznÝQþñ´…ñš„»æPúþVH`õâ¹úÄ ¸#P;HžÐ·î ×!õÚÚuã‹P|¶¬všŽô%´ßbý³>t/þϲš[…Õ+xŠQ—ºyÏã¬ÉæñWb¼˜­v(–^Œ,ZYg/tg1wÌ gÞL„²ŠUDçO`ÈIIÉ.qDZ'Zê¾´ëA~X²¾ìVéæû¦7 ðI§L0З‹gjÌÖÚ05ˆd#†Oe - ŒM²ÒdUÁ³¾î÷&oñŠn}†¹¤R8îÃ<öú­è8ªvmžZêÝCš¦-éñ¯´¢/¥Ìí/üÊê¼ü\¯þ•A=ÔûBÎl&þ¢}KxtS/xVi\þŒ‚îòF½_iz'ùQ%øý¸°kÄä¡¥Õ‘•³Fj_‘^#›<'êíÞ+°f :óÌÖãd7Þ+–ìœÿ)Q‰ÇJ -–þm¬y¥Œ©º(_¨¬A¢¦DDÌâbâÎþ…×÷ó_^eÿþ ù•íÑaâbª*4äçט?ÖŸ—ï[€=–«FDö@«þŒëiÈì«äUg'pû\‚í¥/¼ËtD_õ; -8ŒeKÑr„MÞ Ô×ùŠ ªJÐÎwórm|! ºA‚ïà?XzBa+õ­†ô8w LñfrtK¦«]Õ¾^ØÄ5JkéA“YåíB…üâý^@ð!é„TIÑî¬ ØÅc〲°Ü?H¼‚QC²÷;ø² -°ã>ƒ’÷ÍÝž`9ͬ!»‘ü„î¥Ü´hëåBú[_K™#²a½ÆÀÿaKžéC/ÏÁKHù&hÉNhx—'B8íÊ=uX\i¶_çc}OV‡³¼Xšy¤­FÇÇ´àee#e®ß¡£¢0¢¯•/Ìšç(Ö–=eà°Ïtiií‡óäŨc’Ó˜!´#ŽÄ„¨ì»+†ODÕRR>„B0lGÙd;Y~Ñþ)É8‚weÑÂ=jïBÂJ£‡Dùq&+ßšºULDâpR© Xå0ÑÉÍïùñk! -@;;釠z‚gîˆMqïL¨Bœ~ `(÷ËúåB×Û -R Ø¿Ïð#æ‡#­9¾7÷êókd±l“¦B÷Ñâ<¢]Q¹³×Î?.ð$BÙP8Õ ÙõáìΊ6›1šb eâct³£PR£Ý‘­¡¼ç´T |¢ƒ™šúCH@½î¦;+gÞÜí9…dT!™!ÑJ½d1ÕØZH@šo¡}l‘"Á¿œV¶eÈ;‡ðeGŒ/u-èk%5\ãzÅqóÖ åÅß(ê^gr`­È¸ŸQ,(y³åå@ЂÐQ‚ ŠidY’§ë[ñ -bVƒÃFдÛ#1³žöe.űs³ž@¨RËž=jqaë0å„¢ªÑH7ˆ™A)`h#GϤåh]âZ'1oBë…ÑÑB`q‚þBs2ŠLôÁœ0 5µPÌÁ¢òôñH´âmŽ mžé…¥x¥ ½åk—Ï=øïW*sg«°Ó®B%bŸˆ[ eøLØW^¥®½/^~šj,+êŹå\‹uö0§çµ§keMé†ÛJYPôÏOlÿÎ4ß3¡wž ù&…Ã=ÉÉ?kš“ ·,s¸tµSþŸk¤Gl’*âÃøn/é<«tëÃU]WäÞÓºÒG¨¼ÓQ@òLÿÌ—O³méçãeP(š; WšRS»âR½Åm¥ÞÍ‘ZÆ qfs@êUZÍ Ö j±9_­òxŠƒ;WŠP=dòS2oÚÓZs)ìm;ƒ´%ÚÖ£,Ø÷¨&µÄdwˆQÅccôÒ ¢wyãúC¤çgN*ø^)xöèv)Ì·÷›+Q’…þìàPQÒƒ£´US¾ÙRï08•|Â+vlÞxïõú =8#5#õX°¥¾½?bÇÊU-¯xË´ýðcy[¼N‚©BS ˜V;Pš=%,ÔŒ§(@©e}ýèÀ'aìT>b+2ǽVíÚ|~†žœÐðÁ“›ºdA9ͳÆGsW©<Êî#SxnY#ªšF‰ÜÄÄþú¬¹7!Ú¹«RÖbUi5ùTÿZ¤V†²·sËÑÄð Äõ”^£ämˆÆŒ=Kép] -ô&aûLŒPÕ’‘“g#3_p‡ÄÌþaCwÎçõÍUÍIÎéŸ! ]ûà+ -ÞãýºËé?ƒ¢§ñâX œZ†”ºE;n$n·ž! "[ üʈq±Ï[%å•‘ù‘ûa«{íšg„œ³³ ¦ï€v²#m"ì朓ÖÙŸZMe,ÏÜTOx§9oEû¯K!ÿðÚkÌ@S¯ì}ûy1ó0¦[ÈùGßÖaVÛ윬IðBr¸þêIÌïµQ¤ÁÑd/ÌšA¡ºa¤*D掵Œé–@ù -ØÁîþYÏ­<ÑGú“+üY_^L¥asE“gÔÁw(4`@¶€2xnä–m°ð¼±ÏÞïNÍ­0pÅk¦mºuAi¥œ£™ÑÃÚ×)¯¨)Ñ*}Gd<õ«Qݼ”/®}¬ZMùÔ¡`<ï0I^á镽l„&™ðTv1ÊŠÍ.{ðGô~>QnD8‹€Ì­Lu= š÷'åþý_þìW}Žß†Õó„PO09WUä Õã@¶Z®1ĵ1¦q®~À%>m—j¹ÞÒŸ-–¹9ÛÉÐ’£ù|¤z–ªôuÛ"„Áøã£äðL6Ãå…&#‚ÉkÌ;"|f¾[žPK+ãˆÀ7稕Ðù݈°\™•'¾–@\³¯ØD¨áA¿Ê!鎰ŠW°¥A~`·U>”9Á¼#Ô¤ðžѤ+¯ïŠH¡N©¹Ò¨¥%‘.ÚÑÇ®ÇÒ‚¤5Àtž¶ -BWåC*VÀ{ùO´Øº‘йWíÀö¨ª‰¸-#´“£QP-¦ÐuÇsæ?‰ -žâ‘îU2¼¢å«+yjöØu¯!Ò·gš—}Gƒz3‹,„{ë/0ñ*ÞËÍ0í t¼ª¬WåQ£‘¬¿ÒÝ–‡ºbÐc/ۉܭ´tîd|Ê8ŽŸ•»ªõݪž¡m™°Ž±ŸˆtŽ tsN¤”J%ɾ ‹»TÍ"ŸIè¡ ‘lC|£øãm§š&'¶êåTÞCñžphUƒÓ-L47gP;wœ# k¢s4¤¯m¹ÚðMÍPõØ—t v¦NF¡˜FzZŪª -\;ö#LW=÷…;§Å¸»ØvvFwc½¿ð,ïCc±åY<ÙdÒñwžîá«ïG>•š‚ï`v¿¼±O¬VƒþèßD»°cÕÔ+ì™á+N"¾ºfÂýIísÏÿþ®ÃiA°A$ÔRïƘ¸Ã­Œ §h µâåa·ÅerS®)$±wÈ‹ø¥†ðƒcÀ<7Ë’fÇP»g0ý”X¿dÏÙ<ÂB-#uµ«oE‡XB´Ç칟½tö÷¦»•h~꫆8ðÿ x„ͯåÍ •Ñì¥q»êï‚'ÿVK„t#Ý}©Tu¹p?p=UófA º¨³ -¾ a´/_zqXM¤@ ÐáÙ„¦æOñ«´öòUêJ3P~j?9CÙæÒ{ô¢Ïí%Ýy^%Z°ç¿Dÿ{¨Tíyy[¤õ{h¸„Óº§×:ä=Å#(oš jôQ ›2êS[®,9#\}VK†6l|KÊ>t}ÏÍyÍ>E3oW™ †nfÅžÑNu:jä?OÀÿ)H"ðÛ(•Ui™Ñ[-³U‡:dãåŽõT‘z¾F@O“æ f«÷cÓ[šRH‘'›Rh™¾øk¯6“Æ*œˆ’X5Zâ43§ûX‚V4CíWì Ñ%ÛŽ+Í:¡ ,#zþnˆ^B ë‹*ɳªêdÞPfß:ž2–=©ÂXÚv5É#°¿½œîné])Rë+Ì5­ò?*xP‘Ò¾{™c€ól™àÙÔxÊ$=®§#k ˆG -ô ˆÛ&ć=R‰á3ƒß;Öƒƒ×Ä• °b¤’Т¡¹G+è£<Õ8ï*/Žzª½Gàâœ~ìG™Œ”ÒîÜQÍ—'<· lñëUµ•]gL±× -é×µö˜èK]iËì¥áËë'›™Ý²øUܺki -ï €$&Ú{½Ô Ä!Øß%N®™+Ù=<‘ÚfæêàªÑÝ+ü²+ÑÃ/Yg:׺Sp¯°é,ö£ÔÙ¤Å:7šNuê£÷ΩÏâæÁäjFö.YÇœzö²›®uõ%ô*ŸQIHÍ-QÕžz@¥ÿó @ºEx9ÉÑbekÛV[B+`n{çHæº"ÊôjcÓj÷á¨æ2³üo´2Ž€9.—& ";µL8кúÌ‘›ÛÕ, äÝ/]Áºa,7\&­ú½Êǃ×<¶k…®-´¹íY³ü‡µ»˜ƒ‡æi8¦O©…–µóZ¨e!øgøùOíGðÒ( bí V蛿|ckýsvõ+yœzÒ@~Ž¶¨nî¹Ä…+(äÉT=çˆEìèÊrþ#dê­ÌNΘÇé žcåqüeš§Ð\É]u̬žßÉI‡ø¼ÖïèîÅa }Cö  ¸Ì¶JNgúÛ"Æ#ªúg.•=‘²Ç+ƒµMFÂ=Æ+âô×]9Ck„ÚN»bn8Ú/÷¶tkˆÝ,\ÓÕtŽâ‘m0#.¥©­^|¡ÏîA–šÃϨL3eÒÐtÄy‚É"Bã2D‚{žaÐݨëræ¤z{W^—2ø&æ‘S2³9óÏ‚ðh¦|òVË~ RQÓ†¨0ec(€/E¸MaéV«ß–Ê õèqþ:ÞKîË| o/&iè‰î½0¸ŒØsä/ÒÐævw’UšË0ÁdíË¢øù/oÇû3òP.Ÿ8¥ÐΟš«†„¾}>ÁVÕ„~ä‚@ÚÃp‚à - ºÂ­ùöIü.‡5UkjL›Î5HkXV±-¤´Éž&U‰n2¤žû¥ã½Ýï¢Z„¼ª#0_ÏR$?çWļÐô*ÀÛ'½Šs…¢ÔV˜>î[%ú}ë(oûRƺRåëg£Äjx£G\Àå¼R‚:=hcvÄP·˜Y]Ïïé”5?Ší4ºÂË|ã%-?Fô_Έ·—AÕsŽ©w™[«ž°ó°î[Á‚:Å ž$á¬[æú¤' ›·cH1. &Lµ÷`ÏÑfÁDöž3Ûe•FÒä -ô¡±é1[|J¢æø"xv| "ÿg¶ÿ|ŠÿZ”íF ç*z|md~Ѭ:* å»#º—bÞž è k¥“T¦¹ÍÑ%TU­Ðdl d=V‰C-ååðç˜åÐWÞŠëÒä¤DuËM÷JŠED¶ï”sþ™)!×Q¸ûöÄàíÉQ¥ÿóÜú_ÿÑ?'â¿2(ú‹[´€[èªÛ…ÇëØyÊ7$¥Zåü¬Ä€Ù¤"õWñçIv_UŒ¨¼Ë@úñ"Úœ¬@ªŒ±;¾Î;^Ý{¹±oŠx c^Wœ°Öá}RųÃé<Õõ€ýÝz$* ˜&‹;U ùäGöy¨Ä½ -&ûâä½Î(ŒQ€gþöqKŒÅ -ˬêjÁ¤Òßc†’G2Ä ÝŸ«Bƒr!ÏyDÍ–~”×ý<‹Ê´¥½ˆáò”QýPù"f©ëÍsŽ¨+3”z3-®ºç(áYd'Ú£ -£Çþ/#Ðø¬BX®{V–Ùʃ¾¨L-ö·‚‹j>] ` 6*‹÷{œÿ ¥Çßȯ :´2æº#³´—–cÓ/¿—i,V±Êræ‘Âœf×Þ>þö3ûég[ÊÉ'¬'©(ì¡Éûð}”D˨´¹R¶Åíš±¬rº#•uû@ÌèÕ±-•CAa&zDã -=eô«Aµ+V¥WMÝØ—ñßÚR0®Ìñ§~g>åØææNT&9¤ÁÌN@6ºùË–U‘ûÌ_¤$&l‡ýŸ“®ñ½ŒÁ'+¿äe?úq'(æºÛ¶œ´™ËzüzG`nÒþ{YÆ;®¢|«}uUƒ 0Ö§UN¥u€W—Ml€éëW=O¡oA¡@k0¤FÄwdW¢’»FšC„dè>õ8cCý`²² dëüê)[’)e9Ieæékg¦Òƒe¥SZr±ÏâmtʹDw‰X/¨úCÌ!—0šðœ/ºØmµ\Gl¨™[nqäÊ«ÑÒ"äÆWrI3àƒ°+vÐÌ -JáIËÎ]™¨oÁ”¬ŒõÂz{y´ÿû¯YeOÜq )GQðQD"'ÃT Ü=TØ’Ä%&Ï¥U8õc%GFÎ’ûI@÷üOfkù©£D;²øöÛ½½¼ä±ôàÔJBìÄ+ù¡ÃY´ÿÂÚžþN›ìŸÝyb®øU«AC½*RÛ-tBhÅ#­PV/ùD-ÙvTSãç7hðÃ+ Éú݉ör¤ÑâQÆ ö¦ž\†ìuý ¥ŽQ‘òùVí?v¨HÉQÊZNæœaˆQ5`ÖÉߘ?»c\¯$BêOEvQ$(DÉö[yt%ÝîgÓÉ«¦gg¸Öˆ[®[™ßÅÔõÙßk¦ÅµÜ‰ÀV;`íù"QpOêÒ-^£‚KUå«3ê,ñÚ¹-àég[»ÿrz™aþ÷~ìÏïàWÜÞÕ.ŸQ«Ê²ÚÃíOª<’^(á3„§5„ԦשVW†#$Uxé 5¨îÑ_-t§í¯RŸ¿?qÉÁ@°»ðèË4ž³/á #µR¹Æ1»ï§¯>+6ÝœñfË‹bCÔñè÷º”B8î¡êìÝr/)~’n(°@öO\U™ÙQ5Ó‚ß×ê‚M›k Š·-få¥ÃvW‡™Á@,mUŠÃ×ìc³^Zˆ¦8¹Ð7ö¶ºá¸¢Æ¶öÚ#…nÎ4‘l˜Ë¶oc£½[³ Kˆ{ºåÚ°[ì­ûxíØÄ/GôvjªšÍ €êÐ"¿  4U Aæ²£ÛÛöק¶êCP¨o­Á¬ ’È$síEð}@^%š-fé[Ô°âf±ÕêÙÄè5Lᬌ¿’ÄßúËZ'Ò3º–âŠábØJ›£-6ö¹sÅÕGµK«0(¼j:_ØMƒéjb«ðØ YÜ|è¹uÜ^ÿ]N·ÌfÐå°Zh›3‘ßtÉ;ÕÎàBå­­” o\T¡ž—¾C\îÈ¿oQ$Œœ±$ -â)W2ã¤rm[÷cRéôò¡Y¿ò=¹Çz7÷ù4µC0ãO¼áú’z†t&ìeA‘ѳCW -F·¿{e¾„w"Ö¶ô€'ZdÑMðŸ?Õbšóvˆèz¤V”“*zü?¹B_é•c³ù ô ™¤&ðNzÑ?I~ÿ—?Ç”Çû3jÒ÷CÚSþZo„â;A“ íÈtQ|!ì¶2»Ä;‹¯íZ/ž§Ünîš,yùl!ê@Ýãs«¥sDý-b¼`Ù…DÖgtD·¨•1‚©¼WIÕl×ç{å«Ê‘Îù±^Q Ü rÓ)9"JP (d!w 0_ˆŒ‘Ÿ‘¤ AÂ]É3+6”ñÑÏ÷“ðãJBù÷+™ß5ŸŸ=†!žÙÀùF÷"„®£.÷&>í¡ƒ‰h•Ö¯¸_ ¾V(‹yâKÍPÔW8³ÞóJ¦Ôßž¹{íx×Å\€gð•=éøà10àG#a†Èc6‰ØÒöã/È´ïÿòFÂÛƯ:õPL¯21阆ÜYÚïwF•¬Xô¨ï˜ ¯­~ØŒ#Æù¯­ppæy‰ liøÌ­¥j wê=­HVfªí3´‡Ø­{IßæêJÏožûâ¹(yó^C6º¶”ô nÂÎVÎ{Zeˆ TØ‹ÇExÒ²üû‰ºØ9 -sª£Jís{/+Û;æ\_ù YŸK®sN¯p0ï4na Ž°ásð¶ý‰HYT8®þI‡ÞØÞ«,Þãì:r1·„ÜöFòAY§F׫*®êÉe$´³Œù­ø®Nþ×1ïŠ&prÚ‹}J—,’4-è[lã[þ€þa/'°¹£Lé-r`ʈCS³ãYgÓcu^vizÂÚ ¢E³Š5¸¦Tp[a°YÃ9RO¢ÝçZ¦Í;µó¥ù`ÒÅq¼vAÊ„ÊoGÂY\ » Ú‹LzùU¯}D(=¡ÎÊe/,ÏÒôÜ2ÈBƒUéHUÔ({†ÓY5@Ø=‚Ñó;ó“ly¹Œ è?õä =VìäììOIJxY„„.2ûÛœÔ(G J»bâ˜Êpƒ>â ”­hÆgœÅ@£mòÙ®öðàN=àÚ®¾xó—O^¶™M?½ú_C@_;Müª†Îgpü@;çi•“íþ}¬Âa’çûÙ¤)|ÅU=³éI¥&§÷=jÏïÙ¬Ø4( -3Ã{L|Û8+iÃo‘~:‹*Cf´WÎ -8¢g+ܬK ÎÞ¨óšÃ# )ÿc«ù¨!Ö=q¿¤çW²qJå -uW]ê„‹Fï»—²Àت1¢ZºP×2ƒ#n©F‰Æ0æ׈ÎÚu‹ÆóSǺ›Àõ5§'°kþL\>`?ª/¥@ó[@Ö3ØeZȘO½‡f²à³×wŸ1Ïx¶EÛ&èwáL$!|c5š¹;Ñ¥X^ÐÖ¿¢C!:·¥”©Ô;ƒ} -\9ù/m˜Ï?+~3dÑS~xþ_yÕ0,tÎ -¾m$C;o‰…“×,´ÇŸmQÎ¥î%ÔØãp´Eƒà®oŠÚ6–ŽÆ1p„PÈŸQß” a_µû¢ªEùãYB$‘“Pü%Ó‡uHæKö4¸VÖYÜváJöÀ;ç%Zj§à.’-êyöˆTâÞ”Ü9b-ônþË“ºuÙY\‘ -Ƚ.5§Mjè‚ ¬Pªã¬§ÏáAFÄ"zû-"ÇIÅQsn¡lɉÄ*êßXálª…)k1=Bˆ'xsITo÷¤à™ÊÞ©!£ß¤:=}÷—0sþ:zç/sªâ2 I<Úÿ>Êñ^ò÷¬›ŠõñHVœ{-²í&™¦) òÑ#n)¿OHë !Q¢C‡Í›r ÕÚVÙ1ãheë×£EÏç§]{E&QówæñôGˆ™“™7Óäëc§Ì[?sŒ¨³õˆ5ÿÿÌÝKŽæHv-êÔ|• i4#ÙV÷¶ï¸-åü»Ç¾µQá‘r/àGI^üù°Ç~¬-„Þ|Ôn¹ÈÉ’Ù¢ÙÓÇj¥Pª(ê癚zBþ< -ÆG‹‰6'2%ñÇò={°'¿Š¹ÿÏ A£4óŸû€á݆oúÌ Š§D‚ˆ'F«AG²‹´Íy+*g~Jçw'[GWǺž¹ 3JnòÔ-ÆžŠ²éÜÌãm~Îáx<I:3—6Äɹ Ô4NÜ»À¼QY=‡¨µÎÓWŠ`ÎŒµáFµØƒÍ/Q ¿r=ÓGÚ¹±œÀCó¨Õæ;ÕSð‡z¹› 1 Ñ?6ÍsXë\ q°·ëìIŠæ¯ò€Ú³¥ÌOÖ(x™ðú.øI’ßìXævvF%óQ«Eº¸Ã¯—O¹õ6GÀi^´3Ž#ˆÚyÓ¤kæ™e?™Û©3Ý/„äQc0Ù"{ó ~œ ùÜ—.í碄ȵ}®Ÿ9ÄžÇÓâR÷rò3)ʬðÔaXã$ܯMÒ»=ÁÁæŒ80›(‡_Æxjé¦hnXù!=:Çp±ngj‹8X°7ž¹Æ‰–rÄH€v˨o@$µLà—Z\ïvò‚ó£Üõ%Ýî ê²tïAÝze·ö¨žlI]9ç‘”ºœ܃϶æ§KªÝ›yC®†Ó~‡¦â“¨Ü3eÄè39ím´8 ë¼¼ý–ªÇÀ÷^µr)!¡ð1ÕË,ï4e$Cמ!x`2æˆoÍIry0‘Ù#²ÒbPqÎ@h¬έ1›X#åôDÕïA·ÁÈñ@ødzÎ'ÂÞCÍœ¡EìðˆžÍ^Ób‘/r…¶|׺eY¡iœ™÷<±žÃ\3eäÜíätÊanQÁéàÓ"““]hƒ •Ye/Òþ5ï4j£ƒcÒ¨G€ÝhãÍË%ˆ#ŠJÝQSFöÐn¬¿ONÍwaGBƒl"Ž6üw©p~JÆJ^5?ësS^ ]“>¯I>¿iÓW=¡ÉËÁ´%!Cöl¿«gu†|líÿ£Ä@2ÉOǾ}¹‡g<Çp¡‰%$‰ w“ñëêõ)!‹*¤ˆ³‰^7•ðT“¶ðÝ,nªGy&Í‘³3C”´1êè -`'À²9.òÔøÅ\›#X0§ù—veÏF¼­ÊVð–,O£)Äó#þT;öä`ÆPãÞ…m·Ý™XÑa™’.ðƒd„Å*+œÐÕdÞبìu&ûˆO¼îkÍñ9Xè®0šë¤ ðÈœãЖðÿÔœê¡v™'ÿ|WZ.Î} ¥3b,kTPÖœƒË=°½8y&Šà*â褠4O\Àà=“¢·ë±Lž)JýóÑL"€Úxº&¥ÚÑY"¥iíhG’|Z léLà†@ü Æ) ú¢Ð¡ówHw"ö§þÃî4[è÷Wp¾³€ÄH&9_AĆÉ%¿Îg BdÁh²X>bð…A©"½´;Ï\ôÐçW(«›ýùz*ƒß3j¯hsj¤ä(™ë+‘*»?¨}A K$iµQ”µš5äVƒ÷cc㶟±Œ9jˆmžÆ’>‹ºÍÃG6Gã4€Q Î#ÌáÖhŠÞ…TÎÐ^˜¿ß+Wó'oÌ.U¿Q%vÓHvŽÈ¿¨^ÙgÛö -€söQØSŽ?¢Z1Ýù7ù¹È0Ãß,X.¾f[ ->sÝÛ¸vH¤ùŒ3üu-ªÏU%US”ù,s!g+;PŠwÁlŽ¿‡]I4òJ,ßÝ5¥ÀÏýŸŒ±+‰‡ -̧ù#NGƒHQŽ?*=.âs -KId*N”—:+N/1Ùvjag|uÕ´Fôx놷@–ˆ!‹vNú´ˆwP/mmÏaŸV{¢?ËâN=:›ü|3ú–Ç$VV´S"˜2t·óˆ0™—:‰F<ÌØïmeÞ!bW&;bΑP‚é ÓžZ°õ¡ß?à^+„»›¥û\ÀÖÒàšŸ âãg0¯çQ‘Ñ×ÖKÙ="mg‹;Lä™”]¹­+ÇÍÎ,:´c3M jckÿm#V‰0Øg+ò»ôU7FÅäÒ¾®ˆè‹ÉW‹„óæÌŠ½«pêp7 ð\-=Y"ÁÅX2¥×RìôOæy_ÿÅ¢}î3ª<ÏxH¯°FßgMЗÔM^õéÅT3 ÒÍ•d2Ó–é0×Éæ8¨C«"²÷½©“´ùÊÁ›*pœ;&­ä“OìÙ#GÍüªªQ›'á4¶¢Ùr’Æóor3”p%½$¼ýT$´©ŠU´6“fdJ­ý}7Lnf³ ©+De9÷vä±/¥b`þà%¼=Â'''nÎÂ#úùNؽBTå_ñ=ò̼Ž§$Lk%ÔŽ' XÉVi–‡1û¿®’ÍCf•_¬C)N´ k˪ jÏeÈÛyBÒ kƒÛ{j'"䯧ÄÂlœ7Ÿžôâ†#„Ôµ c -ˆÏ"•´Ê!‚alO/aÿ¹½¿üÚËß?·¯VŒlÛÂ5ú5þ+C¸P·é•Pr<€ÚmR×™tRNš’k§Ö  ¸q¾˜/SñÜŽŸîýB2*0èÝìYj¶¸‘ßê_EîpþPÆ>Âæ·ƒC³âÉŒ»½¨=Ir„žŽ ’jÂÝ®‚‡5°lGèã-ñ½ñq8Ï3…bO48·Ú9ùnûÏU—9ãpC¡Üd"µ„Šzê^ˆÌÜÜr[À|¢WŠV -¤2ŸÊófÌÖ‚+§Ê‘dþn;– ´ÇÖÕWcüÛŒ2žKáÏ¡ A¯gð¬Û'{Èƒ9a‘žˆ¾]õ)6¤C¾K ¾š«±A hæ)ãXÓVnÈáó9ŠHnKt­2¸Æ4Ž†Xû¢ÜО>³öOäßОΈ8>ø©qô˜·©Ø"SG¹GgÃBCgý Lo/(¨Vð†û!“¼b&¾;Sz ¡¡† ²ú:‚À³#{/ý³-ÄÌ0-Î0ha!x»JÊÃ3&µ½]uÓ£‚´¯´V…p&K6èÁÚH]a {‚(ðÑþÔÓvÌŠëýÆ3r`]›Z©AiÖŒ£ŠðOêó­¼îÀšRÖ¹:A…a‘$€âŸ½™=e™8€7·hY秚é~Љ£¶ËÜ̉ŽÿU™ÃF²ï)XÍø~nÛOÔìîÌ[[`‘Pk{Ý/‚–¼V øþÞuô"Ÿzp@˜9I…3G¾XCØ>@F^…Tð%1éW…'¾¨µÔqöàÜL ÓÙ'à³¹–ÿ Ó¶\Æî‘[/÷Ž¿ƒOÙ"Å `½_@> ñ»R“ÂùÓÏõS[„Õ[d4jÚ„S !ÎOEî—JyäL÷h•žºc«hç‹I¾ãœüÔ6€ï-¦wÊŒO9®…b«–Æ©ežíXÕ¥ëÅï‚ßE²vü{DïÏøoC~Âëf-~ùÿʘH˜ÙÁŽ}MÔDXg¦J“ž+v%aÂÍÝãìì‘ôìjÞ² <¼ÕþcÞÂ|¢>Ua­§!é»c—»sÎBI 9×éf) u÷”%;ž4év~·*)Õ˜÷R–½A§pcðÜ=Ãÿ±‡=*jÉZ»…4jâ‚[l‘Á_ÅXåò‡?HµTRñ"1¸‘ ¨:TŸo]5ÎóʬT‹%¯YÊ¢©Ø’ªRÃËEâš]$ýõØóØQ‡dXé &Ö«è,&¹Ï·ªKIuåÞëŽK^êXU]ˆz zŽõP„ÕEÜm~3d•@S6§ÃÖ‡â¦×Qg™ É,˜p¸ÊSõîq‡uM›ó®²î×;LÁ„Zš¸*Â}¿å颤”¤øœøå·=C2N€è ÎŽO¬‰ãßÉ2¹ðñÉðìŒHn„à¶l*"OÒ"Šé3æéÒ_@ì[•,Eÿ¶öñç´iXê²ïµÛg(µ5·AÒa|O/ŦL8þ‰ž eÏ8éô<¼Væ»2t,†ü˜Ù8‹'q ʈù8wX•\pÑÌ ùò¡VƒŽ®-ìš¿¤}ìa‚«é©XðÂ:E´b%:Üšo¿6ŽŸ¯O´ßýÅ»î±>ïš“:‚–z8›îämŸwŽîןÎÕì½ -Üž‡9º¢Û¯=–| -Q»ä‘@¶ -…VI%ŠCUÎöhõø±ÊÆNåË-xç{_ÕµFÞ° ¬ EÆZ„æ‹¿ßVFhQº -‰ÊZ:1c)š*tjÛaAWÞŒ&³<¶àá„nÁm`ùö4~‚ï›ù7¢}A0(‘Då!õt'D< Dè” {)Ö6*—ÛSá³úCáš¼}1KUæk&¿ -ðÑØÕKÛ2ç5ƒÎ³¶a3%º£üõ³µÔ&;7¶£P¼oGOÄ4K r“ÅÚKp ÕTÂþt÷È3g#Öô…CÝJ£(G ÚOÁ¨ªGGjfÛÃ2:W•óˆ:èÈîã–çÛv†]€]5EŒ„ü¯:ÙW³ xʼn2ç%„ßêÊÀû+sÈü)rî‘nÝÂ:µ\÷T‡¸KìÇý•^µåʉÙ#Ë62 ŒÁûŠ§#’xÛªW¶¿’M§åÒô[!VÂYá,¥ =¡Õ+™“ì ¡¸Wžç¤²ý|gµ¦HÕ‚Uù|«š‘∞)*s1¨:­¢”¤pÎ\D’ IwMU´ŠjR»_¹¢s°ê~¡åmtïñ¢ÔÚR9š;Å“ÖˆÒ×äÚt±ðÑD’ÂJN'p)õÕï—C+ÇBÄWxŠ@TàijuŸE”#¦Z@†£c•ÚbqÛɤ4 ¤!ˆ<Ñ­H àÞ¬ÕÚ+¨ƒ˜yd5ƒÐÝØNêäð"#»»Š³=+|#oB•¼íQË=Gºò6©ÈT’ -o‹ÄÅ9×LQ¦IÈöè9šAÜ„—º$>ÿÿª6°Šäf~§òÞ‰i#WAæ*ˆû¦\Ÿ¿Q ÒÞWIM‚*¬Á<ÙÿHU3úÍÏ*ývŒ£.øßm+˜›!°XçÃrh¹¨€›Îðe„´%ækA^¯f´®<ø%PäÒæ À¼ŽÓ|ª¨ÇªèCÝ«†ö¥´g=Í!ª5È4B¥wþ^šêÛÏš©ÏïH½Ð«P8qr¹ÆŽLá;zP]?ùZM‚e|P’ÁrBi¾‰ã­ú2ä4ª·"§Êrö½E©ëZ›l(Ülº/o^µãŠ[çÒÙlÍ&6ïðèBXš-Ë^ÜÃØf”‹ôeÿ+Q"÷™+æ—Õ:<0”tŸÓTM”ÈÎ"(¯–Øú¤7ãN1²ÂDÕºÙõ£¯%òd[È~ð¬Úüw™úc{^Y©¥Hü¤¢¯–6Vcðb0§/ðÜÇ7â±—½®k}¦ñq†Ÿ™ôž¼`(¦c¾ ŽÀ.¨¸ôb„œG¿N(ù‹ÖÈg=lc>×ð£s1=R$_ZÚ¡èQE|EÇ…´á¨.§£¸ò_Ïi*œZâcûzk€7Ôg(V$ðÙ£Ó¶•7¯![‘#ö«§D5L¹Éþ(ñç ‡5¸ô$ªV%uþãUfžÝ(‚•ˆQ禎̨4°<®èC¯b˜>cÜu?ÊSL Eʘƒ´|t{r,ëXCžÄ^uTŸÞGïw®5í  4c®ÑW94hµ™Á)ŽÔl¸£a¶¶–ë4{ëØô½÷(kûŠ=Òí~ÜEfõGù¶îV*œ©¯×SÅZE¯œ|˨°W\ú ¾õqúW˜‡Úqí\a!ÅHâìó÷tó7A#"ãSª€ÂÖù,?W«~?"qó—’ž„¨ÒŒš½–?ræåVÂʆPa")  -¡.ÝX͈-îYþ@öôD”)"láM¼«±£=ír3@C­Ð=²¾7º}Âa»å¨j/É®­AOÉ•fã«xh:F½ruä£`ž±ó2” ¥Šm…º`:2 * s§ŸìCûJ’”ðÔ€W ÞÖ C+ÅÉמ•¹<Öñ—PwL‹‡Û0žq[iÉÜ«¢.yô€wëš”]/L¾èýQ£SëŒ flŒU¦æäù¦P6žlþeþü s¢rZ€¢·ÃÓ)4ÔôÆ<ɦŽ÷™ˆ8£éðwRòåm¹¯G²ÀXû`s~²ƒ›ßñ^»W¢€ßpÃ& 2Я¹ãåû—RK³yo‚¡ü:ýÁ¦ç·²G‡xÚLërøiŒ÷c©@büÁ…ÃfHpÔ=oke»Ž#]¸jîX³ªEAëzº1üÀ‰_z7LRm>WqÞ²ûE– Ç›°D|¹J¯ `& ž»°/ -3¨ -“ù= é¥Í›‘ -db=1Q9„ëÈÍnmž&·ˆ£6ŠÑµvtJÞª¾â+|é+ö½ÙØFZø+¤]7@FôãlGQR)×#±rçOT‚ê=±ÓÃúÅšÒ Ih\áÑ^rŽáäö ‡ôJU3V-Ó?Ë]EÛƒ3¤&^[Œ£KªB&6`µ.œ ¾¶#¥t2'G)goxÀ±Ñˆ³Ù406enÁç -ŽRËÄdF±ð,'C@=©°µùØšãŲk«HÌCƒh“}œ™Ž;OLΣ£†Pƒí&Ji«¾#XLø%«eî>ðGâ'h‰ìçí}ð¹ŽÊ •!,ymtûZ”7U›l×u6½xÁ8ƒj¶Ñ®ôR—ù _q¼Åû™LÞ´/Ë++˜V››RÐS ôæðǾÒK¥„øv¦÷¡å)™€à-Fc)Ë_Núí}4èúk[FR>¿+l»½0´òŠ-0¹à)¿"T¦ܾ &j»†(9‰è7…žÿJ|Ct| À­’”Ž#R)GuAîçKßãy\  .M’B}Þª»M£÷¡q¥±|}R«";/¡O·À)$—4/ƒÙëSÎT“ØK=•ÓX=¤)@o·3l•¸àI8êC‘Æ'Áþ¾¬ù]wl/øº•lX¤œ»[°Ã..¶ºR»³\Ê^gM¬Ôx˜›Ž3½é+~ô–Ó%ºŽ?K3³Æ$?Nö»µèbêJ*PѯCÌÿ[Ó“¡¯²áø‘iža–oöŒTLø -”:¹—{{Á+Iáh¸,-èÿœpÉ°¦0‡@¿ªfü0ÜÒ‹_€e;'×))ÎCq -¢kËÚ¯A,l5ó˜++È\næqÑ›“íy'.Ý’Ú1nÄ[±/ëÎÖ˜®H+s;›rù@÷9ëɇäoq©•Ã®(‡%zß®¨º:Ðe<³Èoäöâ“ÖÖ\+æþ)®Bᬪ ¸%pä<Žÿ®ª4ãâ]ëðE@ñÏUÄdj—ß’-Ï7ëu},D­2’ÓS ןó•.…˜Ü‚Uˆå@:áô„¼-˜TúÐi‰0¿¼ÄŽTtVQ‡­óÜÉztÎàEÐP<ûšQO,.T­ý»“ŽÄÉM+Í”,Ñ׋ë›:3ú{¹É“ry1‹ô"yTIL²Ý»öXª±w¾±>î*‰uºÁì3£šš¿ÀdŠoJbPòäòjîªJsl”\‘ \BîãZ°ªK+Où\T%eK·˜y_~Eò|Æ­ÂóÔÁÈ(gŽü:ƒ.í)¸xry:$±í¿6?òó`%òXü´H}4,+|š/k.Ö`”Vņ³ó²3nFQV+$V ꉚ#r“BR,{Áá'å§AÁ*‘³Â¼.´æZWÐRœÖ5˜ÿÌS=¶|“æë:QuÑR¿ëí $KafˆsˆØŽML¯¶5k·µsÿ1èsKQfÁiéi:þÆXl£@üöJ+ :yJ¦i‚^kÍ}+óøß°þÝ+ùñÒþª5ƒy5õc¿Þ†×œ*Z§:÷™-ɱÝï…dž¡Ø~âžïM¨GÀOY}pEä¡h!Ý@ÝÂUŸÔ•!ž± oÕÝÛ'ý=>[V}½¶ÕÁV&8}Ë }kT%PHˆô™žD+…P !Ë•öõë[êP·x3[ô4b’-ÖM¹£ä/MûÙØR'øÄðþÝ_~†¤~?Câ»gC9K|Ah>X…ŽZ ¢÷©rÇÛh{ZŠZ¨^ožÔ¸¢²U¢ æ_gæˆq¥zΖuK'‡bºª=l´=Üb˜“Eá”Îåïûß*f×QøeEü¡ó‚ÛJ8:ËRU½ì鸕#.Ù‹ª©°°€¢µQõç?ÈIbôèh±ëcÀwÏ—Éÿ„5sš9·ö o]MÂþü¾ãóI}ç7.©º¥Ã>±‹Xà -Tãàð™5wáõ[ÉÔ­ƒ°åÀÖOWÐáJ+ç á'°ªÎHØOßéþª’Jt8]ï™MîùN/pΦ[»]DKZûX‹…€äÜÙÔÒæê~¢ˆ|…ˆ–! |9eìô}ôèW–ÄÞ–-òv;ór•†Ò£{´š¿ªn©èŒô³ÒøÄÛ…OÕP@;CÕ&‰Mï#‡«ÄA "äâ¡€\ ‰P‰¢A[éÚÆYzèxêá'DžcUkb)jöê(Üúíj…p¼Áél±ïÖ/ÀݬüüÅ1ƒW=ágªT®©Ô|ÉÈ,°÷®²…@¢å˜ÌÀU½½ÒøÞ†µ¡5\ò¹£2Û¡¤à i€¥]Q#àœÜù1aÊ<&N×Örêæׂ”Z›ý ­°Ï­°ëл†æle›kÚŽ»°dÎõ/'ù‹*~,s0LøÈËú†½Ï0IØÜû¸Ò^k´õœ†§Sf†ø'øþïÿòs¤k%þm.ùõ«v\ðÕ{LgÜHû é`´—ûž¤p–îä½Ò/ñ¾bHV†v¡ ÑgWd=V~ »¼v.ÔÇuǼ­‰ý¥1±dÀ8tоõ ¯–ºz“7ž˜€ ¢p‹µÕ¶îæ¤ÚKÄ;_9â#Þû{ø£Íâou^Ê &KÅBeçDjADõ-$ 'pBÔ”E4¼ÿ¨†]¢äìR¡E^IJ²zæìÙÈo~ãC-]µ[ŠÞ«Vô% ½“S‹ð¿™@ж3°G”¥»X - ‰[i›ÿ,s¥û«Öù“P_%J_¿z{; ¨¦>¶W R7Vrû0((²[ÄÖW>u¤×õcBÈY‘u¦“>%‰Ûó­Ì̃†Œ·ý -©uXBryÈäžñéíhåÆR¥;*¤†Û*iu<‘¿|k@ØþÇ‹ -Íá4F¿«­Aÿ]2˜ÂÛPÄõòÍà éŒræ6[4´œmYç¿ÊZW¤¾²‡VŒ‰YFÛ~ˆmԑ檭{N¸ƒÆáf·>Ö#…Í _͈Ψ9¥™ðEÒUhvµß=V-¾%@Lüé²/»d2uè'|³;ÒšR?߯ð=% 8¿·C&Õo;2¡v =¡%ÏN[ãUR¨‚:’Ül/)¬{ûŽððk’_›Â"ùOBª8GÿÄÚ¿ Ú]KÜ¡äÝ‚ C°g¶{×¢pV \ïÑ®Qži%¯Óì»WÅ -íòdö«!ÝîG-ðúœ<ÓjÇ­6Hc­Ñ„*ª¡q¯Oádä–)#À6´ãz¿#ZF\³¤ MØš)SÞëœÖ>¸£¤~¯ÛÙµóp ö -•z@¬ ä-Š»6²¦ è.ˆm]¸ÕîT©iCèµy¨\è<Éeçv9w6µî›ÁÂœyÏl´“•Sô?#ÀD?dæ+!’ dü"ÏLcàNí+À™þF£ ÔXDtÈ•4õÑžðBÛ|ÆýÈ»ÒZyèø[pÏ\vó32ÓŸ]SÅmá²–…ÍмhQ9îIベg‚PiÔLügã:§ò¹¬=¹­eϨ¼wïJk ?5oÝüà -2-€!ÛÙåÇ*BäÈn¾Þ)ÇÌ70Q†¹±P©QvMÏ9(w+RþqF¸ 3ïÙ¿@zzÎö"Q"ò£Jœšû¦¶Íñ°€ˆ/Ê7 ¦Œ†GµòŒs…D¿îj'ÏAd%"ÿ¾G’„3%Hûøü¥Gš«–P€&*À"O\Ÿ5'bŠÒ+_œÞŸ~&ÕëC…I€ˆ³ù¹èö™#ið†ÈOMcqÅa%ÚˆûAiY™BY'R/óGšF;‹ÆboOU8ûšÞ6ŽÂu“ú°á…¼½è$9®æŠ×¥lÁ%(2)®Á¸£ËB €3’õËç&Ÿ¬WuCŒWÒ ~Y™®ˆÑè^ï1霧„î:Q¦!Ú¹ežK÷›WñßjÈ–-âL·6ËŸÐ#”. öwžÐµsæ9å -ì$³‘¦ëK ðL/€‡¦ ¤¯–è}dEÍÿ8âc°ç5‚!Š|6]S‰!¨·gý’ì\l˜ÎuSºÖºÛÚ…@d€œ‚q~¹›¿ý7Õ¶qsÔó-Ê-ºþÍž“?”Øi=Ô¤j»†¼ %*»¿¥ÐþÛ±³',ߟ϶Óy»ÒWcn7'‹/çäCLÑingênÚô}«8’Sø îÌÃy¦ãí…ñŸê¾jÀòô`,D"Ÿ‡ ^× _¾€_à4'àJmÑÂFH3_)“÷È™Qm.˜Èpz‡¡*ÃþXÎÑ,WÄgÛHˆA;q>©ìêµ6çì(Ú·}ó &Ì !mU¸;|Ýç¬óGÈq)ão>6m™þV°t}œ‡ÝxöD¸ÊÅ ˆëÞËRsšaŠqµµ; }(è$ûÜ×…É×k;–I1‚|Nv"òs OágçúÒce2 ˆš <+ :,ôƒ¯ˆ˜$iŠŠ|º göhäÑ-„úHÒDEGQ§šøâ¾5_q5̘Pèü+’•Yx(êvÖp|ªvÄ2‚Ùû³†(‘ ÀÏ´nïxW6ØÈso`XªJ_Äba¶„6wLžº2ÌÌ_¼Exkþ£ðƒ9!ø`íÆãíëé·ZÐÐ7á/Þ—iÝÌu’Lß>ø“ØÒìÉ}éÿŠZùrño„'‚`F¿±Ð¶Ò@8îÈ“Õ©‰ˆÄŽ©Ÿ¯ß(` “†‡\pÝ´\R>xDÝUŠ¥X˜w}¥:(º×çˆVÅ¥\ó­M˜IcZÓŽG4‘.hêL !=syÞ¬ñ–õåbyÎyˆ/8¸V/“M®—÷t@¹|–m)Ía'¤ÐºfC7Nk}_Fòá¦Å¡$[þÌTÔû -7¹'nœîœã::˜VÀ@=Ié Št?ÖêÙ+[ ¢?éìc1bÅñÿøƧZ…Îè×>>µØéXµÔÁFñt?rh6Çrðüªá}›déŸ ->T1MîÛ'ô]û,}÷ºg_yαðæ%`hrM¢¿²MÀä4Ƽ=ÕáCcfZВضHº˜!h•#eB¤„£zж þ\žïì}G|) á/ÜŠµ÷IŸÐ¸sRr§âÔ¥’y7œ6çY-^“>là¾Ýß»äiDˆ,jk Ìf‹¶û†IR3ê¡é 8t¬ëè÷sÄ!ïÝ’4Þèõ?¶$Ôö+VxkòÒzäØ°Ý yœìDï[Ôl»-D x¬ëx1úÏý´ýÎòî³Äý÷àÓ*¶ä8qG.æ\ Ra !ð -—Ny:Ö²ï/*ˆš¼#ÈIÂ=)\,ÕÓ·É*¬­õÈ4…ÑC¯i­G]rùÄ‚R«DI¡ ¿-îï{ò05m8ùVK0B—ðŒR ˜/F󜠆“éc±®ÈœÉ¿ÌOÿêâY·B¾Y»÷‰ðë—³|©J6°…T™î’)1ïÁÅwŽ-¥A"ZÕf:ï´ï71@ è„3ýo” «nøÙ0AµJçÐâ) -ˆ©hÇÃ4­9ò¹{Ajjd^Y­Ø’ªcM™ƒÅü"JlQß)"QU zɌ쯦ÚÓ#·5Sèãú‡\ÁWI¿¿¢þ3î\Z"GUÙÜ>Øk+:õýGÒ€™ÄEøߪV`–Ž+P¡öv¾¬Áº—÷¾0_˜ÓüG´ j‡T—»®’â ]Šh>Ê–Š²9s0D“±$?¾ñAµJ?ãðqW^ ¸–0CUËÄÑOP›u3]­Që(¨•úŠ-l½ýGê»5ÛõÓŽUPS3ÐÅéunÊÖ%Ãv›ˆ-š@u ÑóÆ& [_¨«áÜî4¢j6Œzû½r•;’Úå{RÖýpŽdÕ.I:üy*¦æm†|õ¥~@€AŸ(CŽ½¤ÞžðÈÛö%$q¸=VÕéÖÏÿm§_jû©BtüF6âóçY°]a¥C]ÅïÙïPXîhÔýõ$è §æÊa]"Ã$«N˜£ȧk¤ÿ“(üZàjz*«*Es{—dÃÙ/ÌgF·—ꢪ«®h…C¸á¡é9#B›‘ê|_œ{yÛo[ºZÔ5f,(\%!X5å„C³ù`Pƒ .>Î Ì~áÓºV.î5›€Fµ‡|oÕ 6B»¬ ¢`Úc|;…3"ÓŽ„:x"n":é!ÇQ?Bª‹ ¸“.›s3KÏ‘½ˆèÂÂÎöJÝw.ò Ö³ÕÜ_Œ²PeÉù%}AáÜ.ÇP*îEþxæÝL ž;&q`d àN°éš¥§Á¦éçêT Y`íýp·¥g™—'Á¢å·Ý)~²¢,lOœ_ ÍÂx9‹°pªR%çÊÌiD"]ËnՂߊã܉ÌjÈÕ°÷,HϦ/ýñßÿ0'Ž-`“¦qY Ÿ“1ó×kÔô#ûì@àTõòÔTé»Ýh>¹ˆ>«šÛè+jxOºØ·æ[D"ûæÑ•¸ê:[ð„s³“’£ÀœU3;·ÒcŽ9ÇÆ–­oé[GÕ,©‰Ér^òœ^-nŸODAÒ¯;‡TXâÐüYVÜaKÒWR¶Xšðáìc¿ÚÿŒ*H§`ÄØ£Œ¡‚‡¦c0²›‚xLD‰£×+Ö1¨†À7¶È·E~!GyMqÃVN ¨¹D¥ÑZöôÍöªÛ#Üаù›ã7›û/Òž¼ÞxÐéPœBäñÃÙ#•Ñ_D^oW¬%ïÌÆWÐâ -@ªîØkK›9ƒQ-Ê]W8–ב}1Õë8î¯n¥•Ç—MéIJ}âZq@Qy€tC×PákqCÑ#–±ô˜Ì$ -XÔ>k%qɉfLÃÚ¹‘­{ÄZ&üé¤(×ÒÎ]Í®ãÖ²ÜÒß½¯ÝVÑo£àËqbX‘tGhJÐyEPÈ7þ·¥æ§ AYsV¯ù¨d»ñ“³§Í˜Š‡ºúϨŸrt ¶8?çýÍS~Þ”Üûnoñ†A¥¯÷Gaç&‚y¹WŽÈïÓG´}úRªÌnú.G »±Ô 4×æ¶%°·ãÆoÉ=[œMŠ3]¡Þ{Cë¨ -í×ù#QMºûZ¢žÖÑ\’3µÑpº ŸEÓþïO7fÍï#ƒÈßz«UåÚÙ:ž1+aoô§6‘,W'D§jn+»!Cà'äëDHþø¹·³ŽÝè†Kž™ƒ¬IŠi¡ÅÎGÙúfDÜ׊ÄÒ„¸Ÿo,ª×éæN·åÚ#þ—† -7Tôœì©“©wÀJéÂl$TñåÈWAbþ1>á0û'æ¯c,þÓwž½ÅùzE¸ì.ŒÈÌHÙžjôL®e®†³±!àá¾´°úÉÎÃ+½<:Ú›¢C#i\ohÐ<Ûõ7S€ã)ÑÞÖ-Ij©fн#š¨O8Nóáý´P8W…<[í‹¥Æ9––ç‰Fß³‚8,•þr+;(ÕŽyª¸ûèáó&IdÊèÆ -Šf{9SѧÕð1ôCtoå,ÿ÷×vÿš¬OKZ0wm |FN‡Ü¤âEamlPûCZ¤UÉËì‹ÑëfÇ÷Ò`O°]üø~¿¾1óšQÈ©Ê]ÓÀ–|┹H¥D=¹™ÂçÏë\»X î×µÔ é*XH­$Û6EÛµH°O´ ç.:ËXÂ-Å„$ ø;#ôAê½<Ô^ —ûeåDI8¦ƒ®ÿœÖaî1Œ)I¬òh›ßŽôÒH¯RS.ŒÑ³Ÿ/[n.)H¸OÅAîñ.oA€ÈAcÿœ³æëyþæUN8ô½ún•paô–úÞZGì=×zŒBÈ—™»ú7{ûé¿üåW¦ó¯…éu±:‹í©*êHwå³e­X5ÙþQ3·ûciXÑÄ.‰[ß Ú -Š³ýØàGbì;âï~éRÔPÌ,a=šeH^”ýæmå:ãP é9Nªò’ ˆÕ¹ÄòWø¿ÞÿêÚ^c?õK¬²6Ðk¼ÊgŒu@¤Üf-ÍxîôÎYó’‡'ÆQöwÛP%,•0'ÝÞÃÁ;ÎkíïÐoîý=ü4ZÕ•qUë°Ù¸ìˆ_êÕ(7ɧI}ïÌ -¬>bDäÄžøüDº0—ÿ²ÇÞBT»£[w/¦Ÿ¦FKêþïÎý7c¾Ë‘Ò.â^µ­H-ºd*ïÔŒê0½£úÓ—ž¥‡jW˜œ÷wÓ=&œO¯UJ|§h0?ZâuÎ"èXgq!@ÇöÈWË©Ätg㾂Oæ€Ma†A]£¦œ/>9«þIÝÀÎÀÑéÃé’šIS:¬Ÿ‚‡ÖñN!ñ[uny¿ö„ÊX®ºìôp$æíXUÍÉ©ëØ—[¤xöó?¸4±uxâ=2%I*q01 °¥õ¡­eiˆçJêw¹cüÚûÙ*)AÃOdï³Bõ—ø2 ‚,I2A©t\åÊœ:ƒÃPußX{Ö 3fÁ–m¯Ó’gÝ Du¬ëD§K—«å t¦AéÞœ‹‹¨”ùDåodS’ïÔQ¡!ÆKXT!‚ L‚Ýÿp Ù·Dzã%®¦ Q)6ˆÛ±|0ùTî)÷ô:z4€œ·ä>õKs–f–¥§,ØÀ% pU¶YlbŸD±=÷ |@|…¶Ö_º§¨ÝJ©Cš½iéÄì÷‡~\¿Ü¢üªGñ–³[¬ jȋȨÞyª$'Wôf -a¿Å_P¤‡ú]¡e„ÀMµãÄ9 ÔO‚èˆÂôm5Ha5–s š#dö²QÏ~’µÇ¥.óS -aa¯A»SzË4ùT:.¼F¿o‘­ÉJóãpaœ.=Àm‚ ésšðßrÃô‡{’ˆ™#l<¾[!$Ž˜`)lI¤?Eο‘ú~n¡|]•š å -ø×fK…v‰z©úyžX•Óá»8%Îà•ßàF3 -O­ÍñrÀTáÌܨzµ³2A3[/a}ÛâÀZ.9½°…S™â…r 7àR\¯ -¢D«£½ÒÜIT¤Ž2y+«w(4½¾ä8š¤„³ÙÍ„olK¥‰¥0í¤kOY7Ǯ۳—ÄsIz ëêJð·No›+%åí~ÕCtw‰tW0£Y,Vüµål‡q"z‹ï‡_‚ -™Çè3Î%ã_Ï•“V~"u@õ±XŠ(nGDò8”Êh KrÃúàd#úzJ¬©«^Á=Ì·i9(ƒ’ý;bÒ«¨‹¡m*¾¤Š Â#ó½>Oæã©û·{KÎÈ7WøôøJíçxÓ”Hej„1ñ ?fžqúyÀlu#ð7ÉØsgÁÏßå€ùlK\éˆä5M½û“Ö‚ <ØéµÓžkió'»àÏy³¼QcׯÂÈýŽMp®r5ÈI -i‹Þ#{_óg~éΙÁç¬ä,h÷Üñ£ÇQ­û¹@ù[{j¬UÁ½©ý¿ÇÍ<^f…Sö›! -ìoíºª!ôi‰]`ë¼™ÉX~e¼Aʉõ±Ë jåŒ-†¿mïUDÎPgº˜¾& !(ÜÄX맔´4k©åöÔóztÆX¯8Å%É Písâ­_’=ž'ò|†V{—ÄÛ«€ËLÿŽï‰Z~4EI!¬Þ¿çøüŒ´ßs|¾‘ïÿµÎ×ùÔ°ym{EÌZð·`-w^-ª¿ÂÙþüA# âö~£†QobÀ¶þÓ;ˆÄ…ç›?õßUè  Õq‚ÇÊjsödn_‰Va˜´˜ž­’Š¸ó‘y²ÆN`,Ë;‘úß^"‹TfH{Sâ¹¢*v¾}/Z©{ïßòªJ2ª¤(ð1·ˆj ’?ø×q¼aÊrÄthwÆ$ãùœÞüŠe¼?cSex®ùfèé?¯üqd4ØÂÕÇ'm_­¨sáF1æ0 1v:Kák´¾P#~*¼¬GÍo!!õ•žÝ -e(NÈËÆ>À-”ˆ2žUEh=¤¡.©09·3ÉE߯{ÅpóE[C˜æX±äe©’R?t‡¬c}õ™R£¸(÷>ÛþJìMí±%†Kï ¯§Ì´'„–€rE‹wüÏ·ó_Ñ¢B¹gm[¡Ú7ªÉçz5 - 0úÒkG—{ÁìïëmDÄ·êñ_¦ÿåЯÔ_Y»]!kžÄ÷»ßT„Ã6Ï+¥Q®xbéöD„Ÿ_[ï7v‰Zº!áÞ{ðþ„@§Œð#þîô:µa¯í%øï„pº_ëîàšãb¬úºÍH`^Ó;*#;Úy‘«ùœLg™=Ú±@2õ0Þ¢wl„´±ö燩·ß…Ϊsp0Ž}¢ú³”Án[ƒ¦IKE`;8T­r^;ŠHxà[û^Ž&óôG]‡jéº8*¢³e™p]W…^W”G5¿)tüJrøÕfø³§kv§/ƒ¿jPÏo‡Ä°âõ&!ÑX1¬'¥;‰Â -ˆÊ£.t¬7¬º äÜeSP•Ž(ÒóVwššxK{ ŠAý•Ô§ƒ¾Ã\êâdH7øÉ -w’¥xð á$é\þñãŒÕaBôÒèªû.¶Iüâ…¤ì«ÇDcfþ<¿Ž»®CÅhŒ¬íb¡ØoíÂß+‚iÁ†K(º¼üT[lìÏÔû‡k<Ê9zÙYLÚõªfH¦%…ÿoÔe¾9¿ÃrìÛO¹½ÅtD*íZn]šQÞ0ì~i'SˆHL$u9À±èÀr ÒôýùNÌÆzl83 Þ/WÃÿŽ ‘uÌ7=˜6^?ŽûX•J[mÎÔÃï—ÔFÏ3seðØGcátDÑžÑÃkU‰©F„ØÊ™»{4e¨‰Ð›]IG† Kã‰e­îHUþ­;Øÿ·0fqõ §®ýCOÉ﹋»lÈ-IÜÒ¼_rÇZ.Lñæ¼ËiKsGdÕŽîAajK£†5n° Û -õ¹m¦}Q®Æº)Wñˆ{WÄ‘ÏR@7 [µ›¡x ŠT–j.‘}CZÔ`0ÛÒo•Ž Æ Ù•Ím¾¼vÀß]23 -[Ò¼ævé0+«cëæ†0ÏK½ì¹BžTF—’¨ØsîyæBÂq˜ó°õ,®(æ ý54{ª-qÐp½@¤a"z’§VÖi_ì×…cyÎRBÝèN|c..|m‹w%£p‰ˆTq«UЂ¢=q¡E‡àÓ:¸>Á#~Õ(NúÛ×^ýQ²~Vtev‚••Ô÷ù8‰ v½~ÀÅ8N ïæ½.ÙfMÅ=hÅ¿½0¤R]ñ£jw¤•÷ -[ÒéÕ£øp] ïã°Øói2™ qX¸íA5'Çí_3ˆ¤s\¤3ƒþÌdÙŸ_£¨ž L1Å›‹ N#ÏI¢÷úö7`º3¦{†Ìó.â3ÛÛÞ!:FJ/$? ÁZ龎W<7 õájx{ª™ñu²òÛhÿêºð0ígr,g àçaž¾:RìûŽ$íë3ìQ„ñJÇ*U<¸¿}_ÄY)VŒâ€ ŸWúD׉¼´ïMÛËŽ•¶ÊÈ~fŽÕ×ÓæÖGhf©‚L9ÉG‘èß.Š G[µ&‚,’ݾšSÁð¤õ:ÒrQ#‚±ƒ­ü7Jf X²”K"x -¬ðŒE j>ù<(ƒ{ÿ3¿ô´8ñ…ûü*&½Ï§ÊÍ=Ìî8=yTZÈÙÄYŽuOú$ìØš©)¬0l$»zÝÙwia´cÑšpMæ³~ª6Çز¨éɪª™"˜§êgÔ;¦Ý« 2¯ þ|¥ -øçw^ñÿóCƒˆXüLLÇ0«°t´ZJƒ("ï·¿ -Ô§5cPº:úépÞú§.qhü¿Èý˜ ÿã³ü•A@*ê¹û²u¼°â“WlïËgl›Øû\7îê]ñ[dÿwŸ‡Tfì.ëó8.`­ý¥”YŒÚí¶¹4ˆÎà%û½+ùØb€ÅA+ôüE.¹æí¦ðÜñjqÒ`€^÷©R#þû6÷«[†s~T4éÍpüÝGÂÔÓéçp) $ÕѶ[*U”lÉëÕÙq¯Zá†Käº ? ïJI ên•Ϙ’3‰¼Cc.¾¤n¡òÄqÊvof•.§ÑŒ P«jF!ž±\ÂÅð‚²•{•Q±²¥i`I§D8ÇátJ‚v:Èg-a8Å;´†Ó¬ÎÊ ÒŠö.â$)dfiPS²èæß WÕÔ8¸zÅvqŸ;˜û@yJdwRús®ôUIŒïD^z;î% -x£Ì&ù[Š ”p^ÀW'¾§(ùׂ³•”Ì~¬ëÜWÜpöäëäí¶n1GŠbÊ)lGø®VPÿÅrxS3©gú[/£““¤™µ¬, -j‘zÛ]+.'ôÍ[”„à¨sýw'ôOûðoÆ$ýòý•·38·]tÑg}"[JÄÀGœˆÀ!%iX¬Ùp’'©oÍ;P8:‰3ïÎ$O$&W/.‹ÝÂ.µ‚/Hàœ<™v¸P.cš™¨B÷§x|gÚýPî`ÍU@¾7MN`K°‡¶”¼l'òõÆK¸ö¼ó®¿•îüÕŽä7mý£Ìßz‰ ½‚‡ 0ÇUøv"ÒJ=uæ²°Y<æRaVå<þÛ³«¹’,éùº: Äkå„Å ‡&E5Û£‘Nm'°×®p϶YÌ%ñ]÷›²D¥¢’¨}F)ã¿þ1O2 -½3¡Ÿôº_““òYÉÍ¢´E᳎ÅÝ-ÎZtÄ€µû4s¯VÐíåᨦý›Ÿ -Ýb‚ÙuÇJ3$^„ñ³Ã¶ˆÂCtÕÎ4¨`g7>zE‹?ú€¦3=Úz¦¹ËP—B(&{ˆVwS{28žúNÄk#å¥èâ ;ÒUµžêœ4d§BcõùN¤wtDÕ«õ”)q&í¿1ÓþûdD ‹»$±§ÕŽ˜ˆ\´æy#gA[<¥ÏÓ(ðsu¢4S âQâ1Òè®«Ì „ÄÉ£æÑ/ÎÈ»˜8ë§êtÆ™uZÿVÖN–f¦!”Î-Û©nHl£ºr/…ÐŽ³Hî¬ÂU£•âÀIô§ qÄd‡×ž‰¾W}ú‡­@¸X°D8ßXv çÁì–YÒ\n` dWxB\Ì&gcý fÄ…óødmð- Ç7¾ý_ù¬GKþ®i6÷*.NÈ#ã£zp{á^]·îÉDO0Y%#äýŠNbÎëÉ*™|gFƒs¥x„#{Ô<™Ÿê#Þ6'Š84ö}™µKD T™]DRA©æÁÇź®‚m¨ÀÐ˧Ġðƒ&E9ÇŸÏ•Ê™æ:Ö'ôÛ±×jLá¸ä+v° ™aÐEOéys!ghOl±×:_Î$°—M½›>ìVÀ"¼¢jÙÑ>u1¤#©R/Ð;®RÖ]ߊ¯Ý¦è±…™½P¨_¦¶ÖWÄxæa:ã„샴…¬³:L˜GUñ;ÂòŽéœ¯h6þ6ÏUtV}¿ûˇÉô+dþë¿|ÿìb‰í»I‡\ïÙÃ*ßéß[ÒŽJússêkÙ+•ÇÇmûv^8ý}íA -eƒ[òªwÀŒ¦c(à$uR£=Ü­-YÙTé‰ÍvÇ9øKûö+öºÅÔhÇtðSãÑG ß É(²ó¯”T)0ΈڈȋÉ[…¢ºÇ0)ZÎ/Ïb9…ÈYyÄ« ¹ fÔf88—&ÒxGÝΕ³IKjîe@Ç)Èí^2T2‹†™ÂßÌ…2ä@‘ƒsÅp¿ŽÉ:øò€´ -GkÿX2¹âò3Z‘öõFÚLaþÞ×:IócnùÖ9öÑÌBƒÙû\¢!3e„hà#2ÖkÉ)`c¨PÈOÅÈ•qÝMC?pñFwù{Ç~Ü€¸e2†iHtË hìß´ -Ë Hžª¿Ã~Tt< ÛðKez‘¿‚Ù~ó—Ïd0éôxl„K3JŠ|ÑÒçŸ1Ö|û'R¦Õ`¥E}®­1}ÝÕ#îx…OWÞ©Õx "™:¤HËo8âsøn t‚¸#j¾}å©pèpéÇä1“+’­?b/¥6üŒwÈþ¯ÈûÌç“t˜.Û»Ã4'(·|èN0RU;¶½¸yË@$tì{‹öŽÒåòh[ì”ÒnCb|”ö¬içêR*#!ú)$œùA¦U:ÏÔÒv¥ä2C¨{á0yÖI-wŠFoÑÝËÒ+¨ÓÙrb -å䞧ÒÉ»pêÑóÏlŠDÞÔ’h’WSi·ãÏ>DbÐøA©ýŽý:’MJ -3ƒ½ÙV‘&š _~ -âàŽ™Àh¥¡![bÏöÏ#øR¥éÝ5â¼tgÌœïïÌŽ¨¹]AMDAN[JpÑ#"`P‚‚þ60sõ{åÚ3Z {®7JlzÏ+>ã/¾ÜÍ—s}íx”¬ä†jÃÃc¦* l°ç/ ^hçï“>j6ÀŸ°ã÷ùœÃþíW ^óu.† -3Öc»t,OÛ‰ãÉ2ÃJŸºYO¤î~ú³­u“¾šnÏ¢€ºnЬ°ÌkªÞ³»ãPÏ`vÂìUC#1Ó1/‡7ß7æÄ;Sv•‚‘£BXÚÖx¦«´õ¤ --2…ÚUr§O »ûß›AÝ¿ ‹–Ä°ó¿R$Í5c;Lì5 -hèê&¬|Ƨ6žk+dG÷u·Pz2ÔEø]Hú‡}Ó¶ ‰Qñ°|¾Ï…½À‘@8¥¾Ð$R©«,Hù™ÀGøýùsЃ^Ò‹n[_×p‹~€üEÍŒÂÉP3?Û‹¿•rà§ûðß‚íáNTr ö.àd3ëÔ‘€ÏÒtœßd”}ãIt+M††\û„î!Òþ+œý7ø™´è+ÿ-ö¦"@M xÁè<• ²çµ@[wQ(Ùkä’Å”zsÁ‰$¹ËŸ;Chp(Á%é„·[]´xŠ„GÍ_É‹V|ËO òMЯ¥Np'pyBb¬uƒTG«º_%šXC0‡À@ÚQÕìmqúAŠÈ™[ÞK3âÒ7:Þ!ìtøÂê†+Ñ=ˆ´þöÒþÉw‰¶WÁ‡ð‚ 'Ë6ù;còã0i¦@¼T[”üs ®x‘έÁFŒžÄLˆúÛ‘ÂÛÞËkÿ¸õã\4wÞ¸a/ î'Bö$à(9£RîÞp -Lþ#–ÇÒý?à±Þ9ñC9rˆÊŽKi¹9Œ˜ð•7ÀXúQï@i ðãŒùÔ•øèï,Ç?ýåóAúu²q³xögBX`÷dXÚM)^„=q¦ˆ½Ü8¹ÜÑUl|¯úî>Óäp9£ô £i™!ž£‹«¨;xS;]ãh7Öy§P‚Å):Ú~:¶òÖüa–GJ]ªòÐTy}Ÿ«J2cä@<[?°`R¸9'm}sÈ¿sÖØó¸]:LQfçã¨üW^ ó^Úv ëkz?ˆZJ;„V1väØýÖrü¦Ð÷˜¹ ¶FèÆùÔ̽b!s¦ú˜Ÿêñ7DYbúàsÏyl`"Ù'Àà[@x¥Åá´QI súx’õímI‚ld=œí-šï]ÓiÊœ×RåŸ7:vB8 VîØn¦Hz÷e"q\zët'ë‘šÖrDyª_²åUTNóeG‹Y…{@0»ëµ¯:Ý—%Á€±˜1 ¾çVÈ”£ÔªöÇ•†vl*çW«èZ¦0?­·û÷ù Ëÿ¼KÐ+‘a¼î½®²›L”Ï{$¾êe_`5Q‚=GïqxkÇy2¶›1ÍÄz]Ÿ•ÕºJÈùã³n`ø»Þ³—=bðŠ›ÔF)¼ñøÄÖ‹%Î T¡¸7û#lM2¸ÞNužïïwY3¸qí²[À D^ñ±‹€íþMÎâ1t…%`§Œýžæì52D×NWjŒ‚j3l;íE0q;Òì›ÇkYU\I„1{aZ7q¤Ø·4`EP ùrh¡x5&{’íÝ1ºfôy÷}-@*Q§ ë -Äì”—P«èþÌwØ.œoè©÷éÀ¾²LN‰Éדb³î'µâÄ®Ž,z‘gË_¢¢1çžLÙË>ŸH%¿ûËÏVÛ‘§e,)™Á}ÕþŒJûP£ªnAN.žm½Ê^Ž},™øÒ©i\9Í×y13MÕ¡?.Îx‡ÂÅ:R4=4\™EAØRn!=¬íãÚ*ÀÔ¥›—yTõÀJÇYš2VñsÇÙ ì¾G&¬QðgÕzÙ§|ÛˆBòç? : ui+¬Ö`pÜ3¶è)9ðâÛõhÆ<@àëÉ/†RæHüÎáQòmã~7Å\+&·i6˜¬óTy¶r€â?£œ~ÒiHÃG¾¯Ÿ’@Eß“mN†CÀWσcBÛõÅÈs=a‰a1Ÿ˜M¶ÂÎÈ‹?JNÛÍ€ÊS ÝîÒ•sâÈyî#)qM}¤?„ä|ðY%XpZÚ´®x<·×ük.€ÙSô?‡´è}ëÑšÒÕéÁüÂaxì=›¨ÞÜ…qƒ¤'¨œI¡:˜ÆS¹õÉ8èbC¡ÿz¿¸\j”ЮÏkÜÄ/ -#¯3'.wLîÈní?Áåþb–‘Õµ—Ì!·élIrñö³guÍpñt¨JÔ"¤¹™ó »‘Ïy Þí‹ñ¾â aù<綮‚§#99KцÒ*j@æ²Úùñ/ݨ¿îá¤*{ô%~ …Ü^è6DëâK­@·|þ„S:ŸZ7ìÖKYnöƒèàÏ$à1Y‹—0÷Ô·kc€hg€¶3c°U° ŒÜ÷µeà°ô:vñ™tŠØPþë:^Îýê•"é†ÝÞ–¿©‰jq¶¨ïRÞQû¯ŠEöç!”¡x®_ -ù<—ò¨¢ðÀšBìuL>(H@ï¡ÃR•ª#u}cN,¥ýhq4†s¹n4÷ZP‹l -¶Hvaå²9RzÎå+мãSù³öɯc¾2ÜõÅH²WÌê•2õ‚©f3îåº+ÊyP;hüŸ “ö8rõ×BL­A¿ñŠ å0l†Kçk_-ë((gb¯bÈ 'ÀÓÛÚK(R 1àsÖþæý -¿p5e9t£tj5槰AD805È1Œù•Š­!Öùü¦TFƒcQ¢¨X»Py» \îù†Îý7Cþ¬íprÆgÉÝþzµößóuâ}bÛÇ8òß ©©ï¼s½vŽ›r燭–5s©#ÚDÏòzÔïÂïî³D('2ðo£©2šÍÇ+9Åâ3jž#ìB°)MË´;1Ú?~+'õ™;ô‹ö‰¹ûå9øWƒú~-Ƈë8mQ¼âFa©ÊŠ#“;·Ð­­£rŽ}"5óÔ‘1ÏQÙ)òŸU©N«°Žvî”XÏ\' ¤¦4®ïx¡Ô€vöeOxDf)—<ä\%~߇!ÝÑ›ã, >Ç2ý6›Ÿ‹ca«ðcúãpÞ?N=¸’DUaeVÞ/²¯9BûDµ¶Ï½.Âÿ0¶À©5šá ‡!ScTWà&}ìCzlÛSVY’öÆš‚Ûž -Âg8èïC7²¬ì>ö$qW·8Ÿg¬ÇuäŒÝöܯÌÚþ9'Ï8V%vï”±4a†ÀeÅQsàLÓ†a‘:˜b€¦žgñcb"ÂŽ’æ›WI¤‚EÒ|[s¦‡úŒ@W¾žz±ùzoø+ƒ…\pg¶œct4¸#Κ!‘@yOpay’V|ss¥ -Aq¶A.•y€ ÙçHœ ¯˜ÚÅØþE°˜½ZæUñ)^©ȳœC˜PsòÓr³r bÕáÎÔCÅ,±iæõîZJg—}ö¼£„ð×·jÈ°{bF½Ú¿>몧—àr0´uS°tÖ)…?ÉÁ~§ÿøkü÷L¶¥­ŸYǤët?_`~€M[üa ¬«á²â[’qÅ‹ï5üQ­N…dŸ÷Îv>Ú2„4üˆRDØãÚ#tH°ò¬µ4@€ ÖÏ7¬?ç+êWv®–À,\Yv§[íUI¿g–q!PY× -Â$¸Ã´uèÃÙ0º=%ë“ND¿!±ÎgÕo§ÀɤC£êã$T1Oð}ÙQ㲊çþN¯Oˆ³õØñ¿ŠÚ‘ç—¹Á]vܾNôóÎÑh˜Qhmy‚·ãeì°J'Ñ:|ý¥Þ“mÄuvîÆ4²rjEŠRÁù¹Š lÊça.„¢À&É¡r|×?gâçoŽ±Ïè_êëÌVPö -VÚ‹¥87¥X…ó¾rÖ?1àÔnvJÕ¦:_X -b:‹ÿCº -ëXù:¡I)Ç©ˆï áTòÖ";øÞq\϶'Ö¼ZM[t4à}ÞªìÉœ…}Sù°¹Î\¦s­C-WâÐÚ(ñ—3Ý(lˆ>Ð$>–ñ(#Z˜æaH¹ž]`è…{|£ŒCœÜ•»°—t Ïcß^/’(”Q4šÔÆëòa$v¾6À*ê÷ãþN â ó5‡6¡ðÛ ù]-nÔ?¡ âÛJ?ÝÌ©©7¿eïM6ó Òå3ž|9}ùªä£wŠ¥˜5@­œ•|˜ÉÑ'+}è#F€Qx¬;ÒØÜ•˜ë‡xÇŽÊI%Dõ̱¶ÁôHu"–H#‚÷ÍðŒß !:i "ö½œ4ØjQe‹ã}pI‚T|EÒÌ^†2F%Šqµv¬·ï'ڣ̡¯å¸§²N ðAJ%Dñ Zµ†ðÉêiß›¬EÏ$´©øyccë´©1Í)ÆXÖŒá¸/„_w•¢àÉùXž'Ÿ´4~S¿q¸âƒ·EäåY9ºù\æ®Ê¿81F{uËçåILšð=N;HÄ꺭ÜJJC]j=ÔäÊKV¡":Ž5+™(Eaæ*ÒÑ”ˆCYéx›‡•/xiº&[AÞÍUG¤:aŸáJ™•p­''|d~WD„*‹í˜_B3ñÓ$úëp~Ñ­»ðZŽ\‡+ƒ޶œÉÏä¬s«ƒšq3Ê…šEgm´;äƒê7C)„Ü îhÖ×û÷'H#­çåp^j˜Cçk:£›ºmÔYÇÕ‡ÀVz—œë°K·ž–sª*ÐAg<jÐó<ì!¬Óç5L|M\†ùõq.Û¤Ãnñt¨¦~¿­ßD}¾ÝõK`¼ê‰”‡ê%‹>N,æ-ZR|“Î(¥ùT÷ê^h©¨{Òy¤ x^GîWq쉨løÿ´uoq¡ËWÆZzxÑ0™wøßÔ•.5—séï7S–ô¯eã~ÐçáÅ‘{jÝVøª°ô¾^ÿ"‰o®Ú¦mž®æ]à’£¢¢¡²—E«Ê¯«»v¬ b½Ç{ùZ©ßú¾ù[¹•0Ic+l‘{ãç­Þñ…4ë«´6 ¯—1E ÉGelüü(­mé½Ð_Jk®ÓÓ5»`«W_:ˆ_â&›-¹ÇÝùË>(êËÓ(–” »rÅ|ÝÏKPúµ^(Q%ÙNòXó‰°6Š„À‡7ÜÐ;DZ8Lãˆl"Q¥ý?˜P ûUJ<Ó°ôÕtÍŒyè[P‘d@Aí¿0OG”™¸íû÷ÄÝOöé¿þÅ„²§ŽÈß«Ó¯o§ñ~´¹™™{ Žmë…@ Nc@AÓ}ÖÎ,pÉJuXÐF|¨N9•Y¯´ç=ž(ÍØ„ÖÞ %r’õ;žñ~æ@jΪ -‡ù˜X­¾C¶KFÕ£ZϪž~¤5σëÅ^â“Ì*ÅÒä=Îsͨͦ~ËôŒÍVÐ|(me8£GÏjs^¼Œï¨ÅÝìj¯YID…›–éSÞx ×€Oƒ¼\ljê“gÕ™œ8iyBÛª™‹·}Áð\?¾ñ©^+èSáf€ÎZfTW1œ«ýÞæí³…2>iMì­ÿÍŒúT“Þþþ¿Ý¨6ŒLúŠ­ma£Íïn, Pƒ£âwn Õ¨ô!VˆHPÇâõɾUXÃv}Cb¹cu4én!c[Ä`²ŠhE¹ÿJ£Fã%I¯s?zh€ô¨céiDÕs†g«úˆ9£_ò:Àãå½G–ÔÎ_S -pTÄCís>pŒõ¶°õ~{tQ Šú$Á(etwó„õ£QÓ•ÏE¼ÀÎømªŠ4¸Ç¢YzÀ•æÏ2(%?ƒ€ùJÖ â…G‹bïþQ¥ã-þìÏ?6ŸÛ -¸ÞKðo|«—H![ÐRµPï¢MèM?¿*|ý—àôÜ=Ö -§R¿{)cÏ‹µ+:k4¡ÒE'<($žƒF Äã›ÌOu²OD¥ÕQ1ÿ9lD¤Q®à çÁžj¸«c™?†H«w˜ØÉÑ´Ö(xøìQÃ1›a”×î†ÇÛnEDC†e¾ŠËŽÚÖ·JoiCß5¢%u·Ç÷W<‰Œ¸¾‡jÐüÜgZqæšCTtç|ÝX×!,ª3—hé+õ^~{Øv5„nè[€r§ !0·kÀŸoºÁº8šqa&)5…Ç{2*ö’D®Rü¤ÙЙ]vËR; Çò‡rè¼1ÿßøù0áK&ú‹'ßœz; šPô0‡œ“ ‡.}éçìi^ߥ—qdW¸µÀ ¬Ë™ëÂfQ"³æ ¨XFØqÆ+Á…ùCNøs‹6ÁUØÚjÝ(1H79ÿ¶Õô»%Þ44 pû+s‚_ý$ƒLHݯ»pç-øÁ!v@rÄâž–*G-ŒOžŸøgnW Q^‘D,ûÄбá·ÕüWÚô7yÿ-@kÓ¸í—ój:aÑ”‡¦'M"W¡çüæˆ`ù?/™}¡ˆ\â4ñwqUO'fåÑ“Ò:gvtlÀ³0Íæ亇ár%Y¯ à ›V‹«ÉeäHb׸K°"í,¹tŠÊ¡wœ’ôÈýÓÔ°ƒ3¾ê:„ûT>¡I)×nq1'½{·Ð ÑNܱºG)W9+øCÇç4³‚$Ã}žh¤(Ùúø×Ï8΃Ÿ1…$ÅN°Hôšu§5ÿŽËg÷àÜ]À¢=Û JÁd•«²"¦ Æ>)3¡'ª-eH -(ä?ø f…aaŸ¸LewùF[ÙXãÏàm‚Ê™Ûø 'ÀȻɠéâ{$ŽRÕ·ŠGªh•Aÿ=€ºàØOçüœa=Á"‚é·öÜr ¼U!¨kh4r?bíµ™^w@>á„=¡éjZÌÉ¥ƒ#¥»4ú-‰ž÷»0`ƒ:VïQµš+à€y aÐðÀn]¶ìPÑ^ X +ŒwñìÛFzóŠCVg+ºàOÝ n67·ÃˆÎu-«3\SU  Šª?2ÿ®»V0þš·£}Ü4š<²<Á CÈ,^§¬ÆäÄ[Êàì»þƒeÆÌ&¨ôó3DsŸ™ãqµÚ–lZLnçzèß8ßã8R_¯­”™1ìKûÝ_BaüÙÔç;yÕ}þçƒí¯l—¢oýÍ ¬S¾È¤ÛŸœZÚ°óÌRO˜ßc¾xòÜ„ç¬ß#è¦4ózE~æ;vw˜OО÷°‘Í=’#ô¢8 ™Jó)BCSI¤ ö wöÇ™F=e‡¡B¨žŽ‘8‡Pÿ¿U¬¹ ¤Ò¯´KMó¦¾ü‹Ý‚8±Ü©å=]©¡Ä¡íÝà`Á ¦ßqgR”»ÊÂÞËAÅfÈ°òéG"šîÒöìì-鶴¤ kq#ÊPÊâ%²:þG…r":£Z„Ë ~ª~(¬8H>Ü•d¶&xG‡»Ä´ä¡Ú¢i¢‡¹\ìÏ_¯C£!Ÿv¢Ôo.œÚÇ‚Ø9S Ä"ÔÝlA\n‚sF»ö/`Uµdº†ÐðÉ‘­Ô°H[ù5MxŸ?èÌŸ:)“½•žz\0ËÖóÄ(^)2ì4pk‹joX”{Åžg u’,Î|ÙUÁ"ÈJïg™ê°’<ØF†ÈY ïÙ²¢ÅÍÉz*ˆúG1wD‹ÅÁ…´—F­}† ¾}q;j¸zŒr†Â±D “`v¯ â­~m¤³tÙæ™ÁÈ›uÝ UÄŒ–¢çÁ}ÍTñù1(B tjú…û¨î׳¯\Ö ¢qŠíã3}5Çö‚Cì‰J,7rç%âÖªõøœ™s¦¡Å›[FmO£oüû]ßúË:Ÿ¿xùeî^¡Çµ=ב˜ä Á‚ú^L-ÿÏP5¯5‹{¨âñŒÒß‚¼¼è©›QáɹT…Ï'º&jšsÊ·Ô:ðp‹ÐY6uþ~Š==¨”¬}K‘ŸAäãúY ¢ã½ß®v؃á/'552¶\>Sv¤÷aºi¸å–wùÞ’ûÜ©jäèÒoF­"䀻¢+æ:,Ér@¾7d¿"À2cÐ@Á- ß¾µN¢­ ¦¡Ð¡H°§~£ò¸þRÉ}þËÆ*Ó3)™vÿ¾øôË¿„ óeÒPl›ý­*q_jP"¥À+nBOb– ¥˜b …‰±Ç¢ñ>cÐ$1륂3/Ñ+AщÝÃ>ª¤ [‰à„Ê^±j®”€%¥'ýˆŠwK/⾋82ßý­”°‹VÀ-{‰³^åwÎ=æÓûÊï¤sOd±=‚œA h@d&¡-鳋2ä ËŽœVÒ;¢°Øšó!ͼü’Œ¥'W?:^=ZÀ@J wÒ5vè ;åð¹qÇAüÀÎíäRâɸo²‰†DUß,}ÂY.Ý#°€ùž#nkP vj“¾jÄœX¦7}æ€À¼›CÇa'ÄSCC#‹œ´Ò„Ýòó}Džì €bÆOÑiå¬zóâŸQ×±˜à† .À\=œ0{±![…ï¡ö'«5Eþöm¬g¿®së9<ùwðŒ”û!é=ˆ_äü aJ—¾Lɪ0;@yø´… ZvÝ+å^JýÈ”hJ-hÌZ²Džtˆ„72Y -”òƒ3|Ëm¾•‡ý6×KdðGÒ>X·“ÏüÎ϶¶å9;ÂxJ¾§¤˜;xëR÷˹¯ÿ%²/_É_wzDT¬µzµR¿Û®[”Ì."¥”Ë2ƒ$zX* ¥ vñ¾‹ÇœóÐ`ýv3øZ¤ÍŽ¤6P¥ñä›ôÐ=jî’V%Ç@à¼æîeO&œWÕ[8ÊØ/z:9—ÔŽ%É\£'ÆïŸùÎshÍûSûCh[J‰G*¨B@ÀÌlVü¾4uÙë¶(ì0¡šÑQJ3C#µðèáçL2yÈÜ Ük­ëPÕ(Ôogi ¶(G„+{×’EÊKà àŠÄØCM­  -¦‘²™á•ŒQ/áÏè1ó œéÉOÙá’tÑk-¹Eè¬xë¾×&íS͉¸C™DVnP =U7¯Z³vö9áÕ[JÙñ$ÿ|”·|^²Èq“Úu„û3?5ó´Åôs_ó†]¬éGB'¢¢„©gÍ>—Uª^ñŃ2¤ -”.{…Ë— ˜ˆÜSK†eÉrþ€Uaô¼äëèaÏeoÎTZ¿ - ûÚdÈü?U’>×u„Ësæ3ݪ-„ÆԦ`õ‚±¿ m[Û¿·Ë$•[¢¤>ÌUÂêj•‘IØ´{üE­ŒûÃÊ}¥rŸZ½_ÿåû©ýË&"·˜W‚ÀzQŒA€ë#>¶sºˆ¥ƒLg€½¢v:>l"®Q;†è ¹¢ö=)§F÷yTôo}Žd±øh”«)ôSÕ©Á¢PWåö:‡èt-†¬Ñt|ØàÅ/‡I`XATþõªE ¸p•¹Åp$ ç©ë\T æV-Kz´4Á§e¾Ò B µÛ(¿ò'ôbÏ+¯¯‡Ì„ÙÂznWØý‘‚mÔ»ã¨óŸ¤"=*¼Z8KÃ6Ȇrö²¦íqÆmà w¸F<17æ;kº0‹>aʾþKÂÆ/˲¥,°Hðˆ¯òîÜ. T0φÄÌþd»G‰‡¢©Ÿ?JÍzL­šUk¶SË87Þœ$Çé §ò\¨[ð‚-ס£ƒÏÉ㹚&ñ«Rbç“û­ZsÞý<ž˜wG¸ž—3y\­æÑû™_á5Æi'þ§*ú:Üÿ’þ¿|»©ºEC‹Dî®*ôCƒP¬B5øî $/aRʾJ"îŒÃߪˆë‚Å"â­ˆD8éÊ|”ÃÑd‡ÕyTM@ЄðÔ”c‚´Œ-œ ?ûˆ^çýŸTÄÿÿmP‡î P¡À˜RuDº„ÞLˆÊwW[ñé1:<¢·>ÂÙ³ˆj¬¹s™mÜ¡7ˆ€x6—´ÌXQq!È{"‘Ô.)¾4á$Ä_ü&Å—w,Þ›°ÿŸ=+JV}_‚ÎL?·<_ö¿Ôaž·gɱÂô ¤Ësýý!•8 rìÜ•kqô^3Ÿ9|E:QÉ+?e*i”õ;žsia¯Ü?BRVöèsˆÎ·rp×›¨´®ãÁì|º’fÏA3€I7f.mYœPVæ5zÝ÷ÕâÍÔžR àùD8éà¯:G€íì÷óå¢ëAQÅ=O.á”cì¥úÓ"s>ˆSô§œ¢fN ´ÝcÕ3ħÏ×ìeÔ£ endstream endobj 36 0 obj <>stream - Z%b]â-‘ÿ^”½zéµÃ¤Ã:WX:-4ıååèÀÞþr>íÄšyCœ)¢l#&"°nŒtòSì‘„A¡T©ØÓGN%Ó#‰Fpš.ÁA5y`âS½PóÁsw*ñ…ÖB®²oáÇâC¥Lÿ_YU.c¯jNh8mbZË gªò‡×»Çû6§Ø`ÇI0ÅCêMð=]UÂ=挲R†"ŒÀp…ľ§ yK눴ô+«J1T¶Ú>—ýj‹èsÌkmÒŠ{hën*4ˆ–èX¸csFaTIèÏ”·ªÒÆa#1‚ìÔP&¤5KJà ¯Áˆ™’Û–Ï(¨¶U9žÿçÌ‘Ú1(A·mÿh¶ -CtRÈ€¶ôm9pKóí½+¿#{_£;´§*<+´jy­êá÷±øŽ6óxpºßÙ"Ì@‘Ñw “ù„¿ja8V,àol€ëÐO;œìåˆÓß{hÙnÔäsèÁ÷©‰%Oì=ÛRÚ+{‚öIxøë¿|{£Æ -XÓºž5Mr=#î½Zzèý@dç+¹ø;ΩWûRG‰nàŒBîºJ®ãé¿|h-¯H¬W5Æõ¢u ÀøG”îÕ×üV0 Dÿ(±WS@# õ=)¤ïŒŸ34×[ùÞ›/Š‹­°„§q¶a'µ–u2ç:sÎsÕ>9w[$ø§×QCèYiýI>‡ðÔ9’Zm½`ì¸n=Ac¡»šXGÛjOÞÍbîmŸÐœd5'o¬Ò'KÐMñõY‘h)é ¾ZÊw–Ö–íj$„™%o}”qœ–rÄEòSá@Žîà‘\£ñ/æýµ."†§²dÉœ˜ÃÜU^¯zðý ˹øJ"¡ª¯g1·¯Vs¢i³]=ž4•k@uŒÌP•ŘÒf ÅäÚXf¨¦ð/kuÔü½¨‘áF·œök“Ž Rqâ{ QÁ4-·c—Qúeå¢3¾¦ «#|‰èÖäV\=7”T±ÍaÆB'Ð{±ï®ünÜBK¸(HS ”UÝ2êô8ð[È%ît"¿E²i7õ­ŠØ¿Ú }ý—å»IãÛrêëçn±øëòÉÊ4⬶x‘P²›¶T©°&ó(œ—‰1¢!ÅÓPÓ 4%AyØBΊv¶¨MA˜HÈEV¢žAs¶ŸøaÖhB¢È·(nœ÷]·<_ÖN徭k’v:œ{«0Eºî`…[®ø žwPÀX|ÁuVE0NIR¶AèSt•‡ÃǤ4W®ÿ$êœ{Žš°(ñ·Eîß©lŒB]j÷[ý†˜…sž§ê?JSm‹Êé|ÒˆÒu•ïÖ¼÷ñª$»‘z²»¿p4‘€·U«õŸîGpÒG½bø„ Ièñ­ƒJÕ^Fpý"Ñ;—´5Y?'¼¡ª¬½¶1!ÈT¶¹ÏR¡ íbRäyž0$ N6};"úÿ]Q'z¤’l^úRÖº¨Ô)­õõÅni7lý*Ä“s s7К$¥8\}cª×‚Dƒ5qQ¯Î:fA"~µjjlêA× `üD7Ü¿õ—”‰çñ¸……¤½×Èú‘!Ä\ësïXeâ›R³òÂy$ -¤Ý‹^RüꧼÑû|5sþ`™PT ÓjS›ttÕšà·Ç‹1phclÉO6^%zÁûï—Šj„K‘Ã'ºR¤êéÓžÓY%A@C`bj¶ïL…Èó—È:%@S+·š,®'ê^?¯ë´ÃQ¼«,´Å bT+þ¨‡2DÐ==ø`v,i‚6¤:Èu;Ö 5KÅ|Kß&â6’<Ûj¯õǘY{÷8È2Rî½÷z*Ÿ3ˆ÷«´·íV])r”P|²Ç#¤a4{v0€ÊLVÅrÇ5c/Ë'… ž!2*µ›¨²dátçýüR›ªZU¤ŸRúãìkR%MŸÿ¬ëðÛ½0Þ f»Ž,…ÜÖìÊ_Ï¿UXp6ý“QÄç½úlZ(Ïïþ¢*ðI²ä뿼ÞrŠ‹Û[ýŸolîp%€º"“BS0Õƒ©Œ…sG6am+ÆXL}kœ!ÖJ£,CX¼íÚ -f:(Ùp°3k¶^Ìl©ö¤~2äܼç¨ùb„/LבÀ‡!ƒ©Œäž]ü fø×á4"©~EºZ÷oxµ4"Hè 2sÐÅÝ°'&Œþ-KÓ#Ü•Ÿºˆ˜ÅS‘ò÷˜vò~{„ fÀ¨ØÖ¶ÈJþþÚ£q3¢svÙŸ=´’â¥×{E"bÞJË<3ÄÝde(£òT3Ž›[mzÓ¬žTwñmF²)ÝXŽrFb¿Eô@{‹Ét„¾Ut Z[ÝÍå©Pkδ±xyÄ4øYBû—ºË&!&OŠ•ù04yâKíSéÚBYC( -úmVwº'pÄ /•·§ΙâÍS‹™­†Å£–Ü#°ÔÛè¤ßqjž[G¢õ+eP«mgQwG0€¾‘žn¥`Ê#E\ÁaKöîA,{KXúrRáô¤÷óÕôgi‡¤\Þ}÷ö£ÚБ"ÊÿaîN’,ɱ+®€{ø…6€ê8§5® ä´bÜ}á܇ïnæA³”!%i ÿÚ¡yÍmø&GÏá(oâÛVˆŽfª}E+Ÿ|0•U »9DF‡ÇÙco>ébÓþÚs©‹ô>"ç:£]¢þ¢-̨ˆÏ!´‘Òĺõ¿ÂeÍ6kH:„þháíqüs,4ž‰œˆ3hÀ¼)éP쟓ù€ç#_cˆl1ºltÿc}\ˆ¾°.…3+õš|]ê(úèçñQOÕC¿=@Ì®ü4¢££EW厎›T éÎÍÌO䦓ûm0‰W`ø-åT¡nïM¾'×æl[4íat%†#1M÷'Ú·8C­\’Œ÷F»ýÎÀPPDI+÷Š4úAô*ñl3ýˆw䀷wAÅèÄ•AO­Í¦Ÿ´eÐõîb´XËnn¢ݹq=û¹fú@áÓךѰ!Æ -F‰ÿÜ3µ!¸’Fåq~ ¹ìð*®ý,«÷ÿ ¥üý©MTÏG»]àø*c ž­¨—Æ¥ÕÚ a…ìé’[*ê*oyÁ=¤· -˜^þëëÓcj" ñ"m¨´T1¸1Ò6Þ·çwq`}"ý—{_¶ÿ,Ë°+èêؾ/Ô-±¸J%ZaTµçc!?b¢+¦o½ú¿óÝÈ<¡÷Âìµ—W¡-øJ·_Þ³¸*%»?íQ[†Øè[õò•>( -ÍX.sÕ„­C2âLFdé|‡`ŠêG<΃äéöAeîçˆSâX.Æêœ?áEô\sƒ!û‡:átêôût„Ï»Nê‡84ÞSF!àò\+µvz˜B=„ž¸Ë&‹ [:]WŽ®ëÄ/ia圲 ƒ|?ðܳþ:­©Ø¦é˜ÖñùĈ×I|ÅîÜ<„þD!9že“GÃwØâ,!sq:Ö†ì¸ðè…{±\"#qe†]¦}¨öƒÍ]À­qÊḟÄáK ûG….½¾Kï@t¶[ú@gf+õ¹g4rÅ™aïïè#¢•¾èœA{‰þÀbž|€zÞ¥½˜Ãü°š‘sÇÛú÷`ÿŸ…‡SÇä?מ'NÁèO­jIÓLÊ(½Wg[÷PAJº,`ó ­œ-–Âã\glp’íðs@gQÃìJ•Rã‡Órdp¥£ËÙ]ôœB:J´èZùÐdHˆlEn?|6 -6<)s7j^ì À#x,á‘“‘'¯:?ÞBxiûš»~DœyÌîUSt;ÎØà1OhO•mBF¢fÒØY#ð&:»W îþQÖD¡Ó¯…¿S9¹¢L·v‡=:ðšéõDVLwþ=ïMh·rnTµÁêßáY~î0€h÷ «¾LJÿË](VewùE·"+Ò  sÝš[Å%uFRz¾ŒúÌyx‹î€”û:Q¾ªÈí9Ræ0Uø©fØz4/Vö6/òÖצÀëXÒa¹e±bSÀl䋳g=éo-mDQŽ‚¦Ònéßÿ×ëò€•¶Þjri‘è'OU-©ºT-#¢ÖÊz ÄBƒú‘~ý—¿rÔÍÅ•Ìÿ!Iu&8xsëmAÝ:súù®Ïʱ¶¸= â´¾rT&r6”­Ž†Ž|‡Z}yäüõ”â_et¨ÚÃ#¯”kÌ*厱±[¢„¾‘¶€ß¹™Ãâ©—§b52¬8ßÔJØ'Žv_AÙ&yØ}¬™ˆü€äƒ´!*û§¾°2# ?Ð<ïdƤ/»¤a¸¯JxZfuòÙäézP'_Oõ0šÒÜ Î"€Cº_[ -C•¢"åĺòXáà, ðé¯?Ô{Baq³%ê) …ú„GÑÒ Ï„j‚kŒæ›u¥ ea?sÄ4ôøh±óÕ–âOy B†Ü9¢“šï‡B nÃÍ&¸Ë…¹ÿU› ¡[÷çe“8ƒRx…s«¥Ó!¡¤ ¢Y¨jå‚+ù$SŠ½2>€ª¬ä¦.¥Òœ]ÂÇNX‡fjŠ":¬Èô£N´2ëâÇp›ÃhHâ9Èí*y½n¹ÑŸ{Øæep8%nǹr_xQŠö'½‘ûXeâT#&ß?Ë8û’¹®^å}ý*ºç{Hˆ°Ùoeò‡dé{k'–í¾U¡çGDiÛAÒoÇ!Mº·¦G‹*ŠF(¾ú?úÕ[1 ì9Rù3ŽŒ¶Ý'‘¯ÿòÞTä¨h\)°Îǧ(yÞ#V]»X&à]ªûBjÂÓÇÿWÚ6‡$=ì}Õ aMxŸoÆQ^£¢òz÷Ÿ½€HûayU¸Lù!§ØdW -™Ë&œËÐUF¦YØFê•úÉðþT([•€èî>¢©ý]Ñ4½Øy-6‰æ|ëéL$SÓ´};&o@@;¡iîXæFîØn4¨l¯ÚäkíœÄŒÀ¶ñe”ç"8œo±„žÓ*=-tÓ§X§«¼´ªÍI  I÷znåË£¹2ÂõnŠ]Éœ¹­²-ø¶÷Àü,bý©±2ÜDÝã('ò¹ýrc4™Žº—4Qæ{EÁ)³ò¶ âD^ì©i* kIÔf9ú2…;蜄ªÐuÐD°BÞyÑ<ÃÊ”¸#€ó;^ߘ}«ýiÍýb‡+M ®XÆq“0êjBbi·8 #Ö—?@bçdÈ/Ï·þb‘ô’r¹È)l5)µ)çw¸Â5Ì"™ó‰ƒ6!õíƒ «Ù2¥’[„Šs¿R²ËãêÛ9}DÍ5sç†K¥<ý¯Étn+Ò¥W"J>IfŒSí‰GÏs5Ð0¸ï»FkΪe舮~\àÖ«o—^ ãËý—Õ3ÆznˆÆ·©èôDvó—âf悧HŸZL¾;¤\r+„–xÛ™Û)Ò•Û‘CžNŠ0üȘyƒ…¦KL‹Up—ˆþåþ¥D¥œ’.扶ê‘vn½Šô}ž+ã\1É1çõ•=W2r¥œ]üçΊVDÚ–êº!t .È®Ý9’`B¹ŠÈTW²•Õ¬á#¬ìÒ2ƒÀhkgh¡u Á=B–S#Þ…FDÊå<[æ×oaÙ¡ÀHD!"øü#@Á8¡áGé©ï€Çåßtä¨|õ‡ €¾‚a!’‘hØ6zêj -½6:ilÿuÚ¯ÇrfÕq…-!¶µÇÄ÷XTâX=èjVÂïÏÉü/Â?Ð'†ˆßº’¾Èoú¢Ý¤%)8ãJ;TàtBîÑ [C.²‚ó“CDäB3mx”Nt/ -0Ç= -|À¼As„ݯïxÖ½0èº,¥?j{[ª/m,@Mpù'Á»jes™»Ùèt¸ -.³eÅ…¶j]p¹Hý• Â[ïy,‚†95ëCeãÑb}ÚûR–BÚ{}¨òê`c+pqÇMJ=ô5âŠëôyÎosB¶©âë€ -qɽ3sW<ˆúžˆ•’&ç  (—~µnòuUþö¡ýõ_2q¿üÊ ¾6ãày¦R‹]ŸÅáâóƒÌ<¨| -*AŒ¥^ÞnãX3w~rEÔh®™«¥èb!z~ØÃDEµÌŽôÞ™ ->=Êö¬8W"Ü%½°Ò®.4˜)F÷äI]ÄŽb ;o!åä€L¢cœ”1sk°áƒ0/WÓVÇ{žI¶FÙžÞ†jÈ!Às|ßsË°–ïãX€HÀe›9~-Ùgá!=qA0„6©®ò—ŸÑL¾ZHª ƒK‹ Sö½|-­Õ!Ž~Êvô,v4¥G&•ÕÑ9[t§K¥ îåá=m]ÊÌ€X«í[K¿ZÛ'ÿçÛÒ’õmñû±Ç¢½þö/i[ôEúú/ßÌùKš@'§æ}Å°ª( OvG\Lëi5‚iö¸K7kU|ÏRÉ¿ £ÎÁ*¨Giq…–L[r4G1Ÿ#Àä^¡å`ðq}2”PÀúíiN!Juó¦Ö½î)£—¢ÒyìïZàˆ‡<õÎw0¼‰ý¥qQ•C{\<Îõ;[cG%•Åc&58jŸ-ÂUOiLÄsHP6éÔ:È£0ò¤uÑRóâÀ#m9öçýv¸nFùð,-@¹OÅ.ÕzÙ6Ó³å®t†Bé †³jPp÷£¤O«º[Þë°¯OÅOûJµò›8 ‚½:ÆȤðºZ 0u-[1öã]ŠnAèÃí¾°­nÜo ‹Â¤Àûþ ÈCÊR+ô’ç»ÇþÊîÑ|¿»£?~kùÖ¨Âx\£.½í]}«0”RIÕ\²Q-´vT À¯Õ±éå›|žñD.ÌÚ¥jìmÔŒ¤ç^€˜™¥(˜1Ž >Ì}Nt.<{ -»LXV@s!]׳°7;6ö]åòtIŽ84+Y^aƒ‚ÕÀ¤pé¶ý?ä6ßê ýƒVH`wª©<z,Ø´ë©Ê`L>©Xªáªò:œÀe¶OíbåsŸ¹OgèBˆCˆùŒÈ -‹ï€xþœƒh¾kÇmz¾¶eh'=¬s^WY œ‘àJ}òI5þ;âû3FÕí¦€‰ ‰a&t -lÅ>ð.ý÷˜+?8(U½ùFPØ®A”Nð\ŒiÈ­¹ˆÿ-÷ó+»\!¶DÖÀóD €KxÚR¯çR"k¢œ= ÒÎÁÜówˆ`‰v¤b—‘ñ³[Þõ™»QøšîO]ꎈ0Î/¸»‡šïï¢êËîR`˜1[ÎuqµŸ‘¦1Ô&·AãC_˜,\t€­#CjÈI—ù kuäR\aId‘Ázß1y_Ì[ Ž\ -µ#âêÁ2=±Z àCŒ2‡Ì¸#@S”^{Ñ ¼š'ŒØ‘.üµEf2Î[¨ÁOõ¸æTe‹:à Rc‹çhüLæ—"Žà=u\´ž=˜×+÷g=±øAã äBZ`›2,1þs÷ˆ Áe˜]ù™ù‰7ÈûH}<Ùšÿw;âE”ô]«ÇΑÐA«Ù×ÄIç³”+³/Ǥhq½ -‹WPIÏ{Áô4j©ãîu©A4Y˺èQùÐ숗ÓûåŒh8°÷²ERá -a±IÉMÄý—gÄäo~ä“!¸ ¤![HàsgHäÏz„G[WÂNG<’í 7úðj“Â¥é>u)Ò,IfRC¯f¾7“—f?î˜Çø|}óÞ Ò´ÔÔ¼›¹,—ئ¬äQe;i#wÌÙ#—šGoï+ÁÆ“º ±K´¨íÎôì±þö~î5$e/¹àX+ -dïJÜÌ´ngãu-)gs`y§l¯´•jXîø<㓪>!>‰ú–Á©Q€ô{r%%M :çV‚h„}¯CÙ¯[½¢í½^^²¾póÏVÛÈAöüZÊ‚Wjhsj=ÙŽzd²8©Þǽ¾ÕóÓI`/´ƒÅ… ëîYŸ2|®Vb~E†AﶓpÊnuM ¢¿.egŒõÞ×÷¦~=Ÿ‰ÏŒÍæ¶_Î(Þä¿Ö-cØ:%d¹»+]£ùlkˆg޽뚷²Œ>}qõþš·N›Ò¦!7uÚt…ú¨¯0cDªGH¯.õ°%9†²×â#^Êï!GÐPâ°ŠúúÞ ´M0Âç!à;U}왽¦À:=bUÓ—£®Éÿ`úRrñ+gR)õˆ]’Ä4XçL‰€™'—ɤ`¢‹¸–8h÷}÷|Ígz³‚b?õ£wU?Õ€"3·zþßRÒOG@Ù\^g]=šX$ò© ϧÓçæg@ÒEMu.h…ª¶ªÈ¨Ûê:Í·äc¦žhç9SÁ¦@ ¨7×õ©Epúêóó眀/°ˆ˜*&Ý1#¸ÆEÂtÉ'<ç•€VCÄLOw.qéíqLÈíœ×Â(6¹GÇÒU‚žp`Žˆ±Ïâ©÷ÑÂÐhµÌk+0:HC®+ÝAÂrƒÍöÀú ,«ÛáïgT§ÐÇ‚,&Â1°ç™(§x9éì>¨m¾è~2m^ˆâ “ÚÌ]?Ãک猙ÉJÞŸ½lß©9æwªI“v¾½“*“43„ÚÖ‹ÒºýLÕ÷_ù˜wjD3ÇÙ3ui~òÕ£k˜ßÁç-Óž‡êªŽT}¶¾1%VaRõŠKÞñæ/1Cî±nˆÆëf–,‘ [uÉO‘ë7 -“¦Öü‡yJìOJuWE.—Òè|Ý›ÀÀàrºmVüïIí‡æÃü¿²¥^¬-òªwEŒ0è·úªÔj¨Ê@ìæ"3¹kÀ79xÏtùåî¸?ùðT²TíØPÜ=Cà,Oþa1+xöÄ:l2ÑÏÞ—:Ò$,¿cƒ€‚v„±\‰šAû®¹k…€–µ•­NéØ{] -]k -÷ÃHÂù—p]JÓ/ÊΩ9<Ê•Qࢠ-úÔïpÙÓ‘“ó…@‰vP/ðæÔ…ÈýÉ!pSjíg.•Rt¸|ý,úcƼ4kU~Ákó;„¨¢>ýdõU†ê©Èœ¦!~¦;÷<øêñ$—­·Côx<ó;Íúí"ç{öÂŽ£œ¨ëRÈvª²©•>±ýKö—©z(&iM<%fÜ„„í -`º;>~¹æV‹›4’Ãå뙾ºËO&}Å ¤b;ÿT6![û½_Nþ¤Z ©¦¶_…@~p¹BQý»…NÃGBHg/^мOIÍÍŒCHB¢ÄO¸-jPÞ+]rÔ‹ªüQ{èÜ+ÔGó½Þš>ýw0#’“Œg5ȃ€ð+xåwàµîÈåP>C›§¦¼wÖŽ—¬½' 9™AÐTÑ  “Á¹@芌ّ 9æšAû\‰–5 -ÿ Óiˆl®ú:™+ö•Éw -Â{]©ñ5c¾ç+Â.0G }¼š9I©0jÍÏ`”ö°æÇ:šõÌç&øÆwz×*®†Ì×ÐSŠò£¯3'Pñϣߣ*¤D–rù5åø·>ð‘^úàÚ~ñlª þëØL­bÿOÅ«(¾ôëÕPè²úHõGï— ß;ìG¦æ#ÌXíŠqø“.øqVغeŽ0Þ±R$h=¶-¨Oƒ¼cåy@Ÿ„ãq*¢ò”K‘vë Ö,a´‹DÈ,•úÔW%ðÞº³AëkmºZ¼qâ XAtwD“ñ &טÌÀÖòHRÐÌV{G—‚jÀIÀÃ>OœÍ¢ˆù!Ë<9êùÔÚØ{%Š¼ ‰¢ -•!,RÚç÷ƒDë’‚†*0ã½HD_TñÕVíh‰²ú4ÛšîMù§Ž?0ˆxÎOŽ@¿ájÛWœboÞ ?” -Ðí!ZŽ¶Üs&Ѥ" âqiRdÇÛS™©]Cï¾'J3·‘|îøa·-á"‚g›ûqÄ&ºø¾õÊ”P›¤°±zÎNHvn A _>3Úg2€uÙ 1ÓÆ·¡·$SØ–Äš!úKó)î=Ù óB.ôvÕ“›.xÚtæ¯Üð€à:Âú9׶ í'Õ|²ý§¿5Ú+mCØát·þ¯¼>SÔG¸Ö TmŸS|SJ{êrÛ#$7³íAr†;^ßøRëéÓN|ENüG×}騦b¬Ÿ§%0î’ïðºçtULb×~Ií¾õ—Õ_…4bj¬·yÖÅ™"é IòՄߨ¢eŽø„ƒ g?ݬ—Îû§Í9ôä•ý.N ´]y–“è -QUÚ¤e‚ì‹Ìì©M —‘ßâVb‚ 5o/ É`l‹Ÿu/ìbÑëôÄÖ…œ‘XàÛ½BXd¶•à/CÈ&”øóêõØ^^ãÑúý­€0çêFšiB84OÜŽç!íš=Ã> _·«¶`” Ÿ 9züÚ¯¦sI®>~ã¡\ÎUmgâþ•Ååä'²l¯¨º;Äâ£z­Eá”8gõ¬.‘½uzÞ–:¡º…ÊÎ{ÇP=¢½ñ»-ãÀ.é") ʽe‰r{Þû#(­ŸÔ¯ô³o’×6ÀÈ®ÂkЩ?"¢ä|ÑÒdqadÔÉ0$¹gtÑúöLÚ´€æïÌs æ¾KÝÙÆ[e+'ƒšl´Í#&äÁy¬„x_©ÌtÍòƽ;²d½Rî…†ØGKL~Ÿ¥œ <5&Î÷{‡‹ÐúŒÚûõm½¿"¶hì;Ô¼:ÀË(‘«FoïK)4Xf´Ž3¤y#ôš›¤ÙGŸâßu©‘Jj;»2W4·#7-9E âZÇþ›¹õåäûé³7W¯Í"RVÎ(kˆÒH½ú‹×©èÁY‘`n܃è¬_ÕûÅq¥¬ì?Yï}.-ìàgi9+}u`¢@?OÌ.ÿÌ6;GþŸ:/+‚&×|™’¾[¢Ž«&¶Ž˜ìeÜß³6àòö_ÿ0kçšAaà™çx¬Ði‹“=ŠTK\¤ò Ö½§k -þ”„æ«øÇÐiÆÌâU-"íÕ³¹SLmƒÀ9_= p‚]·kŒmEiWuZÛžxp^S:$TZ¿B™#V®Kq SÂC]ª§¢ 7Âãªê­v“ss@­VŽÒ8M·@^ð¯Î{†íWK1…§«kmCq S¤b’+KZÙ)Kz¾ÉqFžó ìñÂCêýrF¬`pYâkœ<©6ßT(fR¹Ï+£˜X·J„ñ-þÇG/ÜêÇÖî¯â\Ço¢ÃïÇ‚zí0H 0úZé4ÜÅ´ï®@ƒÔÇÉ·µ£\ù…1B(JØt©Áho÷;›¥Í ¤û¢„Î .qøùDv†/~ÎüýGœžìmåÖ7SwE~.n¦Q‹s­EÃÅ}ª¹°I®Uú}ýÈÅó®<_öä(ä_ú `\òÓëRæyšmÙ™Vž¯8¶Å°ï·™¾tN˺"Í‹+jxZÊÍ„Ï¥dGºckg:`L÷íXðëâÂð6µ@ØWòÀÒ+W­;¢»ŠXXnTý—ôúü½ðÌo -8[‰óœ±OOw\ÂÀ„Ë7ƒÚö#Mí¤:GÄÇOþé™ö†\$Oà„\#î -ŸM§´Œâë›fˆ¼˜>~c{®æö›‰) Óòº¢\T=r'Ûù(9'¸µ­‚Ûb+§I¹Å< w:§(µŠÙ¾½Ïk¦{üç+2©k<¦hEZ›.oöiùϺ -äÍŽ³h5Ï“#Câ©C,'AÈ=^b»”Yûênßn¤>Q÷ÎÐÁº:Ïôy7[#mè^½ë!Xnö¨;5 Ú -Oô’G¡‡žhgðÛƒ¶ŠÆAC‚Ë>ap¼ná­MŽÉ«ÅÙ²>&Ý,w³æ¼ÀŦÖòà4VæVÍñºz]Ja9 -ûêTÞpVq;ô©ÒßndUù€õ3ªÕ‘Qøzú½}i Îyp]:§‚–¢ôæ'^nëÞ=—PK€f`õ"~-b¦ªùqá| ©¿½L6BÆ„SŸx-Ô ù*æ -×?ê#ºRwbã`2VÒ\¢¹õòg ¦‘’ï\/ŸTuœEóòaËNáprkú÷m˹Ý*þpµ}D1 yæÅ—TÇÌvþúH=’»‹Ý+­åÞõ™ýÇ±ÅƒÜ 7°À3NIáÁê•_õðv¥l[™»òE üÃX—ãª><?-ƒå2RƒŽèÌÌ•!szÅq¼»ÄŒ¾Ôv7gzc·'Ï+´æÕ·scàºT”ÿÀ!¯» -r|({¬¿ö3WÜÐ#¿*E†Ý:|c~Âuñ„­!ˆÓ0œ«µOnçHK8†AÐkDv5…Í­ëÂׇ”JWý£ÉôÉ#0j# ÝÏîðÈ*YÿÊ.œÂó>ÞO¾K>¤ -òsÇôt[@‡°*3i^5dnMX¼õ²ä ‘Ðò½GK!:—ê±Ã:8›n‚yGm‹dXÎ3ÖAwMó 5@C ÛûÛ+Ÿ‚V§øˆT:î¼üÇÇ;ˆàj@ÖQ*²¾ÉLzB®ýS;cÿ”(ìŸÒ‚þ -jQ¾0E´:I£‘@áBÌYõ#jÐ6dÄ3dOtû:!´ £GÊæ+D ˜Š8:‡\6óØÀò­pÓa$jãØ6߇+ÙÞzj¯¨±þn.§ˆêuÆäFëðâ-uT2È6sC™¶(?;œ\WJ›n(ZW°N÷ì$ш5®6ÝÛ ¼GôeH¢ò™ß\½ž[›ž†6õ‹V[ÖÇ·¶Ýë¡d6„Åb¹”jÍFƒDcÔËqàŽrÈVÖP÷5SŽ¶Þ0]~ÁG¤]®ØsËRÚ¯ÊÀñÙœðÅÊ…æV GôoíLiô\=•Æ—G;VÊ´³òÎÉá ÀsžM³‰ èD/¿Ã[”/81hÍg1KÊGmÏŠ;w¢½¼½N˃’ ïÚŸœRï³o«âÛuŸý8Ö‡bJ.JWð©ßìRè½ }ï;l@Ç-·¼Ç ƒµ”çëó|­Æùêf@è82éUº*y…T9ë%‰WHf`1€~ð—ã“UêG\Êç19‡¿ ‚ê¦9&:ÞVˆÒÝÂ5÷–Bj-Î×ÓïàÔ·=!þ\ñÆx‡vr}½Ÿ¡ý±T²ì¹{XõƒšÜQ¡\¬—­fönô‰ƒÔH -H<¸g6S¿yâ«ôìï³ýc{eÿŘm¥?_.±?kX’þi«±F$Eú©éUKõŒæ ¹¯£–Û‹M¬Eÿ†ÄÅôxöB¯5¥r†“)¯>–Ò‚Cª‰ëâ4†¢\)L:iÍBYÍK’Wñ–¾(¬K‘z/˜–Òö¹¿³ 4Þ§°O¨©¾×ÞSטþ=¤ÝH}¾# -æšÂرÂ-€FM¤ö3ÜÚÒ2½ÛŠ!ÙÛBCCHV»|¼z0µ3E&û·¬á°-­Ø$5Í´Ë•„¿³®€áf'$C†KEF¯öÞ‚çÌÏ(Ö#pÉ(¶ÎE>¶Â×ï7¤ëÏÉ”ú2Öý3GÑàé·…ÎtUXÍÝ'ZŽ$Srèy]·“(åp.#]©óÀ+k©Í¼,ÀA\Ÿ±¦Ô—Q~b|jãJ8öp[¶'zZÿ™4@&㥔Åzš#×Hõîÿü‡ƒ¥H hÆ’Ý@¾Ìfnv9‹æ'?YN¨UÑk2gÚ?Ù¤£ -.°«êïŠýÅ„¿¢ ds+N¿g¯X%Íû»&Œ[ôQ4÷4oBä™úN^RÇÞýnZ9£¶¤ƒ|DôØ©7eNA -Þ%|¤SÌŽ› zŽ´_JÏ™e¿L¡ßüe…¡_¼’?óöUÃé¦õ£@éû˜Ãœ ÙÀ(åú4“ê¥C·$U†|g+‡@ØøùYHÎW9Þ/Ï…8N:K©G…2öSîX™tþ;€Í£ª=Zµ=È»çI¤zª‘Rc, }3²÷JÉ{ˆãÂb}¿ðã}?v‡>E.EâY=ŠØóžRr²y˜O.õ)õÿÁø†ÏÒž¤˜ÍðýÖŸ^IÕ{~7IW|²Á|Å6¿¤€­2Äç5Zˆ =ÔnŒ?zH¡ÖÝIȤ¿æ;SÄÒØË8Âè[¹y>bùÛUçDNÆÚ!AEý¹ÆŒƒ:NŠ@·QIŽ{–'E{*Șެ¾ôVDe­‰iS=¶-]63Ù¦]‚ž°Ž91¦7)žâs_/@”5æãiظæ•fÀpBò¤*f_HÙòr%ö4­Ë´ºzœJ纋øÔŒä8}÷X6ä¹–GèÖŒ˜ê‹§ÈÍ¢œÓO®x„7Ý’ørvßc!Žm9ZÙŽ ,Ìd ÜŒíÙÜVñ¦}ßV'O¸mÏòŠ‡¼†d$ocÛrܬÁ)tÎÝ%é—AŘa~•/Å…waßAÈZ~gÄãB!6´íÂ.SÑŸ#gŸ_ÎŽÅݽ,• qÄŒ-5žpÃüx˜•Æ‹&ðåøÀ¢¢ù HgÚ–W¤ç˜Ë¡¸{G<™¼ôðy÷2€“Al%)âSa¢ßà°=¿£û¤ÄIülfùùâä-õÑ$ 9ŠìÈ…Ý_æ&“)¤Öž‰]M žx†wÛjúQ¢ û@v²–ÔÇBIE¨s¸m6ÖÉ™èL‚‚Ù´7î&:);3¦ð˜Ü+Ò‚‰ÓëR‘Ö³U×Ù-• -@™ÍíˆM:Y¼-"òŽ[ö‰[;žðtTYæ•F+Š'qIÄÉí—äÀ3$ŠãÍ,…¸7E=zP¿ó/|ü¦Õº¿×5óB0JylHY­QºÒ†½šÕ.ç½¹…Ï ÃèÌÁÒ–¸;²êÓ¦(9G°œV‘‰êårÐ"îªØzrÿ†ž€‚ÎÛöé³`hÄ|7¼UU¢ÞÞŠõºi\Þefä­æë‹-|Sô˜{!ô*0[<Ícà†9çÚ²°Ñ -ç 0â¦g €õ»p>y3¤à|:Ò.'¹Ìã-°{nçE¹!=^>eBô¥þ®D€Ë"L;æË¡iò±+ä[ -ÎÞ+}ytÔ‘¥“­¬'ø‡V?¸¤£ ˆ½×œ‰•Æ?C¾þË<ÑzÔùÑŒ¡Ìû’4ЃV'ÝRw†Ø`¢þu—Ò@K_Þ6i&æ ¡ä€3»­ kj(ÀVßSŽ7뜢öÏ;Â;:¤Y}p{HB}]Š˜psJb™Zèb€ -­¦cñÿå/P÷Œþíˆ‘Ý”Õ ¡k2÷vèu)EíçÞ••[éFØ 4LK:cþÎj0¨ÚEï‘Äø(Â|ðÙ†8©)œ!ôqÁÙBÜ£Éw´u©yhi”i•<ËŒèbŠIö¾e·¬™%Jm[éOS'/Èï:Ê[hE½D½brþ™.A*Š¾§šã…¤´×ý@^Ì…Æ:õ|¨0ÍM¾æs[þIMGiÇ›¬¢F4Ú¹pä–ØòXû—Þž -‡¶ü³Ürv6ÇOü`üÎù#àŠKãy,·%™™Ð]’f†…(wJîFùz)9íñ<ïk§ìIãPÊ~JàÛˆ ¸ÌHÓ»£ 0ÚzVØ€jú¨â‚-Ž¸È Üâ¨Eâ{dÁ?‘,iƒÿªß¹RfOO‚ºº÷G3ÁÉ2ÂcT zê€ø¯ÕÛŽÔף܅îœàvB²=… zÁ8›¤6ÙBõ¨À:~q¨ßúK–þ—óéÏ,HJØçå@bê¶ÛRš±˜<¶ÇHÑ©€$z¬WÖâ¤2(–¦Þ¤°yLU«„ØäÁ& ¦® ‹Ç ³…¶”H¢˜,|ÑpìÄJz2Ý%y’jr¤X¡G½Õþq—ÿ«M€ :ˆa½ -\Lõ]M!M0ƒÐÜ·HÎù$º™Ÿ)קÒmw8â¤xÜYŽ¼;¤PW_#¶¬½8LUˆ© Ù† ±6™¦¸³gǨ“ËBp‰µ/ -­)CÍ[‹é耛c-¼$÷ oÄVtnGc…˜:±’)í¥ MF#¿s´‚[ñ\o8<œ¶GK®|~7U?ÿT2º@ÃÃÓðT°fZ4;†jV’sí5u‘³£‡lz[¨:Îe›Éëƒ"¡Op»fÍë»GCR¤?ÿóàšfQ–ìÜ–”Á®:˜¡ðŸ¤÷¾ÖÀwÆ´LΡGTÅtKkbCØc]¬Q·öhÈÑÑâîºöhRLWô3ýÎ६ºïïM1ré˜|3“ñ)§„ ”Z¿2jV 8z|"È­èo;~sò‰ä -ß9 -j‡¡e17\‘àY§(®85ì„@ã($­”âî8ŠÂ$ýÈÝ°¡!&±åÈϯWv¾7³Gy²-„=+„åqÝÚæ3åêQ££äQÎêRÍ2òØg*}Ø^{…öc„lèWr -ã\èSÂã„‹±+•Nš"¿£{4ç1±ÓåBö¨õ0PòØXáxO¦kËÚ›Ž\ -Cñ àp´we#DiS¶#½ç C:Å‘é¤xÁ»•^èÖÍ ý›pž¯Ù– ^¯C ÁW™ráœ;\5y¢šWþ­wz(iÅÒ;ÌïÀd#µ¤ç/~„õ²ÞO¼†¸!<•ò¥åByÄ‚-Ó4?™¦ÎúÊ[1/ÄVð~¨[>Ù}¢l§CAd./=¨o<Ô»nuj±Ú5¯ßT²~WÛ"ñ÷¶É·þb¡F;àÑ iáÏ&Tµã§IÿàÏä[*Kì7ö½¼ÀUoBNù̈=¶kÁE¢yx])d„öóN@6"»çE»+u×y -‘UÈ £#ïŠÏ‰ÖM'Hªw>™f°LªKÊ‚ô£·€Z£x•gH ì£ÇÚÊ7,cÞØ8Î7Ý5Sã…42HaNulî…÷þN!oäOyF4É^„mtÕ%M¸£­{YF™XÉ‹ãã–sízû/°(tQKzM_¥g/KäÞxaTª -„Ùh ßwšÐódÀSF"cY¡r@ù‰(ÅSŸàÖÌÑWG“Bà$sD'ÂVœ¯À@¡YüºWoàêQñM(?B ^ÌA‹¦GT«†i>̉/¢Ã]5™ð†ÓÝ{Å>^u)©¢ÜÌ}GiÆÔ¸tOà¡3†[‘!:MtÒ¢1¬Î€ùGQ?ªu"CÖª²T˜6C¸üT"½UdH¯FU3®&¹}ÓOÍCÀ!È{bßôŠ<×QeÍò!E“8}Pá†@ÃÄm¾åT½¶Qà?mÖ£ôns¼=´–~††xg*]yrï‚´ß.‰à¦•»‰oï¹ß’VuØ+ùž8•8-“ë;¶_,>SZö£­0ŠA×¼}~AîG©¤ï€ˆÄÃĻʔBØË»:@gnKì]ÜI˜ÝyµüˆŽÖ<‰L·¾"kÕ(„Ì*·£Ta(3ÎdÌ34Ã`Žö½!¨’ëxÊ}—%–‹÷º”TŽ~ ííÌtlq„~”ÌÙ@DbG™]פ5rmL×rªiç_ &×ëëïMéRe퉸£ˆ›˜7U]ª?x¹q¤[p½sK뉊hŠDäøØdøêÉÓ¾|f‰ØrÈ-·Dsî œ(¬Îèå®"Ç‘`Å=5Er>\o>1>*™!¶fæð¢ë²×æÜ¥×8°×ï#`ì$)”ãÍ¥¨ºõ ¸Ú‚@åÈÝSŸÿ”ÝU*˜OßõyÀ•ˆƒL"¤ðâåÏpÍbäÁâ~¯.É ÷eº©€Ï¸¥C°±‡ -‡5Ìþ«&û¿ÿƒZeðZpS5†r¡oa{VÎêRuÛ¯s›ÒŸR-o¾DíŠPÇ1¥ eçó©hœg{¤É¹¤œÿxÌÔ^”¤´)DK®vÖ…5­k2ë/?ÌN ç?a}ýŠ·²ƒžWæɳÅá ªJ#¿Ñ»š“}Æ¥/ϨÒeGÛñþ!"œ›tež]A ˜wg¯!AoÈöö6VË÷¬K+ Ñ.§¥ EÈç8³#b!‘Òî©@÷ˆ2uÆ—c\+!Üs|w^cù6;˜‹#~°¾“Ò#‚WÝÊÎÊ“§ÑŽévàl/óBRZºõyuBgöxU9åÍåëýI_/4)¢¶Z›ª¶ûD¥µc†;OØͦè)ú—x*Ü*Z•cî°(‘la %Ò™)ä™rÐ>ÒP˜ÿµ²Y£÷™‡&Lv²ïÍŸÿ©àV-ÎÛ¦–+}¡34–ï Vn«ßMYl¤)”÷>ä3ñz $.¹”ÒH*¦sº«.Á…Æ:ãåßTý $\B.iðØNîÀŽž('¥”Ö‚\Û¾,`[Œ¶/†ªÑ„ÂË]ÌúD5áPΛé³S¢=Q¹æ\ú±_Q„¯ºÓÞ (»×NÄdTì8r© (Cè¬ïã#ÆÌDb«’²âZ| ÊAü’£lÁ˜×!ïxD«Oôñ³øÓ¨Ou¤Â¼>aX—AV˜7.s“„¸«Éq|.p¶ù]Õ!Xú<™7ÿ®/EW¼E£ÔÏRaç±Y©K]Ù"Ó¸ks ÝÊùXÞ3}Ñ ƒ9hŒ€©GE݆î³Ý|¿·ñ»?p ;¢»¥oìÌ -ܯWË}гßíA¡@§b¦{Ü5#7;¿½{ÎûUø”ó pašjиO6´V¯†}øÆoø½•m”‚w•’ý›Ö+)¦,À *¬d ägNÝ<ýÈŒ gXª°% ýÎJüÿöTé›ë jôæõAr&‹yXY>ÂÓˆ ÚôÎxap´™ßLe Y·éS¦#¨àgòçœ=í**+Tù´`ŽhÏWÙÃdew?ÔÆç 1™š„Іfeý«(ÆU1ºŽÛ@[;Ð ^Þ[‘PqÔ¼ÿˆHⱧ{ùéùŽó€×áU‹â2Ë å¹VÖ‰kªc¡€E^%$Ø‘<õ¥À3¢ £Ô1¿ÕÜ<7ï¸w UW|s2è‰Ö–SºŽxÎwÛÏèy?ûÛHŠlüC"{OŒ ,b>hˆ‘ÔŽ|ò Y®÷p^*ËœÝjlbXý#g*çõ=8D#Cæ?bb)lãmnHLÃ);Î5cŽìì¸ïw8ˆÖ<ãI©A²ldl-¤§e§–i`Á¤Ç:\e¦£íª`RX&bq—ðñæ‹»®°HÇÚ›¯:SQ˜š‰áS`¯ýˆT—8,^²sþˆ~Ã̺/ŠB9á÷ :+r†œ©g²ÿ> -›£Ž¬BU/ò;‡Rmf|ú'A‰ ù )¦š€»ýŠ/+ö|† ä Ï9.g΀9M—ï²oÜ8Ã4{¹G?(×Ç --m¼—ƒ jl W$3çHÛœ[=,ñH¡B™.B:¹Ã=¡g*ß+¡h7·t¥oŸœrìÜNÅY7™ ͧêUÂœ¹’¶VM´¬ª-| À3[˜îö”œRݲª×#ž°[¾sžì1»‰¥âüámâ )±…" £‹„ìâ¢E½\z—WÚ½´Zæw$cÆgƒ¼0EhQ#2†ò0Ö¨.´ìIðã'IòЬ)âf82§zyaäx‹º4¿_.ÄÄEíN²'Ú¨Ñ,<I3kå ¥Âèm‹È7áC:#s6ÒOs‚:q€ïÔ‹‰'†³£ðt¦Éœ¬ÀÄwJ§ÅƒåvÞ⻑$ˆ-ITlãÑ¡<ŸŠãè|§­:Î9»ºË•½…HaJï¶[+­A‰ÅÕ÷Óéñé%ÞM몓üÂóÐñúÆÙñ¶åŒ¡:tü‘ Æ’=èl´y~R¦/­G“?[¸)Ÿ´¿þKB­4‚TD'VG¯’Ê–$æÏÄÓì I\ÌþÕÞ$É{Ï.CÄ#é(›Jê,DG9Ïs»°(ú$qk¯Ÿ+"RÓò6V:¢…¢ƒ¸× -€,RþeMHç°6™üŠ/¹Î ~Õ·ŒIô{»´Uü„ qÝÏrœÁ†ïÒ¹9‚*Eú¶/C‰)À’Ί¡niCöX/‡®…A·`8_ì|ì³âsx$[GÚ®Lš^Y -LÊh±5š×lWèý[ZÒû³•Eª§äiÃÓõ~V¸uEŒnÀýÔ 3 -‡éÝü‚\"³oIUÙ#Éxál |nõ1ÛÎÅ%€2ÐÜ›¶»½ƒb÷çWÕ^q€g™ ‘£bÕ=+¶>Cd ®ˆ‚µDŬ3£M!š+A|D(]Ü S®%ÊoóÞþ‚ðòà&'žïo–°üÎèºf‚nxZŠB}=’ƒ—ø'NLgud“× -õ7òTç»ù½¯3Ãí -ÜýmGË·a‡žñÕ6ˆ–NM±Qଓ¸<Gàâj)®ƒ¯To+åVÅ*åÁÉï*R Å\ÍÔ°WÔüŠ°ÇYçÞ ž¬]úxI/ãißbÅWGÛ+¦:¿·R×µ±ÂN¯¡²J“ -Y¡]u^4Í8LÒ¾ªú7±Èææ0jgÕxñWc„å$òðö®Ú¡%2Ôù{€Ù8.j2òœÙé;ß íAû-E1çAº†€Áf•ƒ8z_´|³Sê¢-ÿ'·“ÙIqT£Ò ­bÅRG;âÊÅ÷€¥ ‘§ì¤î½ÕZE°­‡š[ p0Á<ö'Õå¡¡€“à÷šƒhüî;ì(½Ì`#Òº^Ρ´~¢Wôåk1&V 8™*ÖêJÝ?BTÔ¤¯:šZ\ïžQûô -%ñè¢G*žˆ>… ÀmÓN…~{þsåSÚdbµ×{®-RcWŠyp§ÞAw“‘¡“‘~M†ŒpË.¤Œˆo‡btš3f(G,Æ”#r%.¢:ææ½WHs¢„É&LHC†þ1Õušöqë)ï*œˆz.áÓe5-Ç—Èð4àÚ+>+¨n]Z…/`“hBãzµ¯@ÎÛݡײ9Ú"F¼o¶&`ëÅ­%Ô0Ë$µWÞ5cyÃÀÙÏ - Ë€ò½žu)9Ì7¸”]m<€(Ùí*ò<ó”¿ŠœÒ²ÛñGÛNkI¡Ý¶Ÿ£0:xyí+nG<`=Âܪp‘ጵ}[#`Þ­˜ŒuŠE*(.Šiâè‘=ö9G9'•”«ü´á³É¢Êà_„+•ßmÃU¨—Œ®ìܦ7÷3àów‘Æ¢týaÿð¨ØÐ&-9¢4Ú@ifÐ& -Ò?ЀZÝu8è†)¡P•x¢ˆMàó攜!3Øç‹=©ô$®žïùŽ“f`+³rÏ{«ªÿ»™ؽ•ëã¢@ù>U…;›MwÔd*“DÌ­¼hF|uoTú{å†ìÑïð-Ïwª[ä# ²¾2y E”Œ0Õž£{=ðkŸ -Üì&*”^Ê´#”ïã(ì¹süHïzóu³þ”Í´R¤±ÿ¼.ïo‡üÙR&º¥p”Öc#*¶{ª% -ÛSÁ¤Þ -ñuXAÔó©w:Žw®•¾¥•[3ý¡à¿· _†è–Çd5†ÇWèH;:;pé;wœÂîŠq8Û¡µ£r)ŠšNHÐÙ½éHæôT›î|’²fÝêHÃü]ñN¨$ìÄŽð^ÖÒ0àm÷ÂßÎÇ1M]ä apYzÚûu$«±ŽjI·óÜ\Ç6ZåGê)qyöWt¿ÑQàPøÔû›©3HqX°çµÞŸ "4#³S%+¨=Re¦•§svòêÂå{QJâØœª¨…1tšž*Çš~#Ũîå#ÜÒÝ\([;eê•AÚî-u/Rð%x£µ©Ä¡ëb«nàEÏn_é^KꉆíØ’.Ž˜ùúÕ3þÝUv¸á ŒâüeKêN«©¹÷Z½2:})T謩P˜˜Æ6Ñf’ºC¸±YSƒJxS:£µTòírø`QçSì‚2å¦Y»Î¹³èÉU÷HØ`ÝU3ž0Û?óc{Ò¢wÖ™äT)Õ²ò_ï~ï6ù#âTö\ ‚<ËŨÂw~è <‰É ùr¢ÿ”-ÁiR…»KAû‚*sC¦§b3’bœo)á!RØeQýwJ\ßùKäWï¾`+‘ÂäÄÑÎU‚¸€ YéØã$¸ÕÿFDÈËY%5åš#Ó`Ož!ÓH²DZöy?2¢ó]e§sÄ…©× 9QTËÞÜPI)ÓmϽt¤ñ†#=©º7ƒ­ýY7S0 m€ûX«QÅç`]Ø“ÉrVÞC›«®ÞÌ‘+›õ ×y‹ÄlÍ«€sDNOÓä§/u§36Ÿs¼£l={íï½êùð–G {VÆQ,*š´«(Õ—i7¦Õ\ ƒ‰ðÄ?KùKâë\““®×ç,¼lA¿$&Î}œ1ÃÄVGüMÜx_•òWÜ긪_'0:®þ³vMh\† {Os+‡ßÓ§šI¹:CðcÅP4zø>ëëkÙ[!Dk„bÄÖ¡_{;ðÓýÌ_X-±Á,s#š4ræÊ£;1 _´¢¾õëãD$zBceˆp7D¨¹ö£^:ÈÉk§ÌgNÜS9<Ã!Š{b«“Àǜļ]êÓôêI`w¶Å‘ÖdÍxÝ?£¸3Xûã½ ¶™i”†Âž1Q%ÍÊ«cBv†7Wëú€°N–T"çn®í°Ä×±ÂÅ'ˆžÃ©Ê­ ^)Á?àv`êƒA•z3±ºƒ <+>Ñïnתòi#R¬v‘ý¶§Í8@©îDƒscLŽÍ9;»Öd#‰W… ®;`mÏyª|ã;ý÷<<1˜€¡¤ˆ>˜k Òjþ™>°Ê2Xt‹oÚ\N"]ŒZúÉ€!ìÁî­záÎ-ùú•@Ö DÛ{"±1XÕ?Š+ö¬@ö">Ì£*ÌîòL¥VÂ&D–Ξ)…Îض‘€àÊ0ˆBb(#"´W¸Áñ¾›Ç²Ssî·!Ì+í1ù‰üa]¨›XpAâÍ!’ÓjÅsƬ©ú1šUHO̬+ƒH·ÎSº­(®‡Š¯ÄX–·ƒªÞŽQ2‹JzNÀÁZWŠwQ†ÈÐ1‚´7G¶ûÌâ’«¬÷ǼK‚íY ¢wèúîÑ%ÖˆU–ͪ@åÝ„QEï;y¾\Sq ƒð̱ <•Br¡6 ÛLìk¢pÍ7vDBK*Ñ¿˜<T±¾S¤ù¿ØÌû‚çt0Ÿ²[ Ô³’ø§”‹çÝ£¢9ž -8¬zfóJôiéu*[§X7ž8§hÔîÍÈŠ +ˆ£­&–~ªÂ%ÂX.uEB -ÈZ Jñžú²”ÈvöJür CZ,×Ù»_/ÄÆL©»õ5‘lj•&]k™·#;î¨Ú:]ÆðZ¯Æ+ãz¡AÿáƤū¼·=kúq! BÜb–­%ül§»=jƒZ„óã`Ã|Mþ ˜ê[Šg_ž Û7$ÿ‡©½‡,{(¶–™ë/039ž¯~‹(@£ÌÄ:2ÀáŒùÅ|ìþÖ_’DüÃwV+!§QÄîqˆ¦v@w*W»ç;e ¥07§´&¯æ-† ¦—s^*n[Å?Õñ!ëx¦TU¤ˆªí’„™µ˜„3ƒ~vH‚ŒIñu.¨)o–]n’HKQªgæøHfÕblQ š—l+N6©¬õ†ÐÝ‚Ùhm§TpúÔL0 °Xãj+æÎÉu;ó‚Äge®1ã}¬æ¶t$·籖d,mBG$Yu¤:LºS!5“Ž=(EŖÂ{úM[,‡¶»†0}¼ÜJSºœåæ¬IqÓéix@=¼D -~~&¸%:W_œCÜöD…s$=ånJ+²¤LÛ#¹áÙES¦*3—˜\Ý!ÇÞ[2–{±ºdi8Òjƒ9O¿ž~o# ²"X‹­ÓgQòQÓGTgOº¨òRØBsƳˆ)kÒ‘*õ_ÿe¡2$fRð¦0^Mê¹ Ò ü3üÕ“Sþzå|ìi”Oy,$W¤T7à™I§/~ƒ..{æ3\Òï0`[a®H'A˾ð豦+zEÚ Ü˼ø©L0e)pÀ¶ÒÉSäµÅ¶ò»¤ …pZòó8‹Úˆü9GËGw×ïÀӋׯڽmð4¦I{͸+œ°xa××9ÇŽw®­,¸1ãSTyŽ÷t"'¥FVYç™.Ý™ý«&7u!Zð…y#6ƒËèI°(i|½Æš·p±âRâº×:tÍvÔ¥[ÝÞb¼È¹q{yjgn²[T»Ž7…+~hEœÑ‰¤Lâîüù¡èðÄ[±>”Û‰ê”Tx%õ7tû‚t¤äƒðMWòO“’åռ㱞)´‹¹<Û[ÞDÖE«ðÔC[«H­:tú*PÌ@q«F3|dôhjÕ ì,Q¶‚~ p@[×ózë›ÀMm‚¢«…m f‰ŠÈÝS¡—U_›H1§(ÃIRŒ°lÛWP ˆÂA‹~Á*¾kža¿&è ÁP³—^ mƒ-ú=j’Eyê©/|cå.œ BŸµèéÆ¡¼tMyòâÝôø‡²°ã¯(†aú’æ(†µ_ýöoý%‡0ªù·²Õ7F_¢sx—0ŽÄÙñêZ±ÉËúQ¥Ü6ßý]æF‘U¾–"×ÝÝ×{%ΘNé¦VËäúY‡&ÖC âg:$aRO‹fèP¶ÊSlÆøg[Ezüü¹¬&ËV0á~|·YC›V©•–ÜñÔÙUtiå³öŸø?^} ‘‚á93ÒèïZõĪ@nd¥â®ÎQ®Î,{á\#{Ie=Ë¡F9˜Îböd|Êdil¤âHeêˆýJ€%R’äPcµPZ$ƒoŠÓE°˜Q-Õ— ö&ç4—æ7 úk+èô… NÝC>CægêŠ.pxv⫼ÞëR˜-Îìr²e)°ÅâQM–3ÕµLœ‡L!ëæ^'Ûp-”8I¢rú6éÍ$¡Dy‡ÒKv~Cf¸ä£ø‰©‹q8ˆöÚßüÏç@§ÀÁôø‰Ó(¡?‹(F'‚…?ƒ ¼˜F±ïyöjø«ìÌaæ4FÚ½-)À¸=£ µñÖu j²íttÔ™ïþ¼²… N,Z{´¯{À+™§¦=Ý™þõ[Ó•ÈlàP¯·¦kz#ó ˆ^àƒÆjîé‡Õ  ¯["ÇþÞ²˜k '\ôؽœ-6*K´ÆJ¯ýù©Ù`;CRÞו¾vðP3¬eX< ‘¢AŽGþã?#˜C,íf½roëbìŠ`µ±#_(Ò >³Ô£À2—ÍÝ{©÷|ñ½—¸°è‡®ÿˆœ´í Æ+”8¸áˆ »tØpXÄ…u/Í_>Ö³Çgž{<”çÕû·ðX!’ðÅi‘«µ/^$/ê=ä”׉7èŠ솤Ü2dn‰"ljTqZ–¢·ÄÔÕ2Tà&=LVn?¯2ÇåÃKªWSë±Ç{€t¤ÆëQ¿£jŽ—µ-ua§­%yÇù¢†<„f‰g]Pn79coã:¾âåG|1XxÙ‘÷‡çV0à®Z¸òâÐ^Öú| Ó;Ày.a¬P½1ž|•IuTµ‚Z>Cx@4;° båknµè!Wò#YCܵÈVµùÄ‘WÏ3µajöváÕõvûÅÜÕ§+÷ïJ°ø®ß!h®õw( Ú"ãÄÑ$Ñÿ/_å//ìŠ>}9‚Î;N;ÃÒgùû¤9 ) Â&j&ŸÉœ•ª“$e¾ç}JsšÇZ[¶C>å£dmû±ž*^NÚ¸´Ð(ñz|+â'®Ãsᢷ¨åSÞÛ÷ëõyþv½$ÀaAþeáúD4ŒxW9ZÝW¯bww‡š8¤±â—5züò?ÑùÅe$c>õ'[Bp%¬ÚëÆ Í[„_ðêÿü~ÓB«%øá&×W–™qÞk>éÊpSR;CZÄJiÍÚ£³5¤<}÷Bà£Ï`F­;lL´R÷Ûø–ûÈ Is ž±Ì†R¹w<8\nzn¡»æþß­˜d#nñ˜ö3ê3Pq¥{wŸ¸) 7 xØK¼>¯{áÉ$DòsŠ êAÏ3íQ³£6|Ä¢»]ÕÊ=(‡Ï¯ìŽŽätôØ@›ÔGïg' -)yõŠg·+ÖžòóF¿÷µ7Ím<NRû^Cä0±+íîù|?”z \ödÆMYoF¸BÎb~‹hÞ%Á6‚Ã_áãÜZç»æ³ùÔ=Žz(T\­ð” Ú£‡ímí¹cº¶Dcg>z˜Üú Ø>3ûÇ0d‹ža‘èó1+j¢«ýí=>è"@äà&ųG³Ç¢>ó Î/?td¶ ˆÉéQŸ#óÔgsÐ%lÈž•Ð1þµ¦ér «¼d'ON‘¢öšב'½Böhéû‰4®ïž™îk·=¶" ½SnÜÎô’3HeéàÈÝ®ºŸÈ4ï‹MiÝñ8ŽjUÀ¦žŠPjpåÀP–¯*—2%ý©Ü/ê.“\¬óJ‚­ÓûSƒÐF´@“¬] E'êÍ-ýbiµ`p¾ô¯7Š¥†¤Í‹8¬D¼E 7A?°Ç“MMTߎ|‹ý|Ôþ.Uóûïäë`0~£†ÔBðé8·d -ó¨‹ZûGºW͘]Ô(¤iÙyn —eŸiž/mäŸ÷îw¬_’UEñF¥Ç ÛŒ ‰£Qƒv¶cš*ó¤o¬s• ¤v -$íIÔúðF ê­Á±œî': -.ùÞx¸·,¯«ù]çÊ ®AÊÊO´ùæì0íÒpíñgY÷¢g9`û©_9°q©’JØÏ Ðµ5ÕÌŽ±•¤Ý-\¥9ÆâÄz›'½Iá»”+n5ÄUŽQ¬ööŽ0ƒrffO):32h MÚ!bº"7nP”q-)G¼æ]ÿˆSÑÀúÈÖ#;¼|5x‚™€7*â{lM7°×º˜«¯æJ(nÐ!ïybF/¨ÝQ½"/Ã&E ,<Ç^73žCÚ2…8^_Ï­AG\5udJ@{ß’€€d2ÌlvÀÚ¢¼Q!ô'ÂÇ€™GË'§°olšÎl`KýN]­^Ç›†£°Vÿäå¬4¾yWoCh"‰ýf“'ú„³õ+P‰þßàÈê$ü–yÜû:.l[K¿dþ>òÁ9‚^5„ ,ã‚Š„㠫ˆ0a!sv‰"*2H=2úšªsbD‰uAùY…Ý {9ÈXüˆ3ú˧N.^Ì9?ö0³ 2ÆÈØëì:{…7’N~î# 5v¯9– ¬#Û'Qp7G Eï·Ÿ›æ\‰?À^Ožâh:ÄÇÚ{!©x­nðh^ñEò[GKK†¤e³®=ù˜RðÞ©ÉŒ¸HÜG5·Þß ”¸Ç „ùë}%&¼°Ê¦ ¹eƒƒB¿Îª]¬ © 40;»Ëq$Ö$³‘­î€éÁkqJje•¥O©Ñ²×>]&;Þx+¾·:fOÛYÐo¶ºnɺŽè -d#»Gªe ÇGö:ÒÎÿ—ü -4Ï;0 ÿà*‰#[Û¡&Þ“½Æ±&àœx¢oneÝñ?ƒ„Þö"¢‚‘¾»Fœ›9×À‘7Ñ.ö©zºÑæ°;®dE3¥7·Cß=KØÞÍAMyμüN–&ɧèʹ;›° =rÞT‡Ÿ›³‹4s½;ODÔË|Ü¿â¹®¹Ÿµy_Ày،ثN…~™_{>vÿQ:†îä¯ȯ5KÉfÇ‘ˆ<Úò”=ßÇû>>–Ì[àø5M?%Wã´ò1ÿt ƒäé½f–v]Ø18;5õ§‡4¨¦„–ðŸ×:úŸ‘Í Q °Ô¯­äù'+[ƒ3éB‹óûQNÀÝÜÈœ+Ë!ˆù;áø1®9JÚÚ]n…9fp/Ai'K(YÈ7¸RéìÅÎzèáœ[Ñ­A,FæiãÖÏÀ©FT£Eã»'<Ž«™¤¤S¢õk ™ó–L ®y! 9ËΫ^¿ß‚w=B²òÇž€øNˆÛoÈ·ÿ9ýõ¡V=·Ñ.é¸pçìWH|býÔü­pîìï6Gl}=; åö{7ÏföLJ)—ùôÕ ù3c¨ÎèÌoˆÊ{E!m)†y¯ÃÒ_Ü“ÓÌ!éPæýÌ&m‡çr•¸G»—ílâŸÉœdÚ*î«G{ÏÓ¢ò)`hŒäÝtÐõìúH^órŒROÅb¬hý‰Š$ØØUÀi|1e`Ùý `Síp4ùÈÁñÊTM¢›!ÎŒC*ô¼þw~†Ì×CP49aD_Ÿºæ-Ï)wY{ó‹"ûƒfç±5 -ñ¢lF€ݱ²ÿ™8 §ùÈ%h˜2 AQD̵A[‰ZŽž7s*…èl÷„@OŽZ é Íà[ó?8•à-†Ø+„9n1úá¢p*ê ó_Ïi› £„¤€¯÷xz–÷¯fÚ‘ÿË-9Pç_"ØV›œÊ³41¦ -Ìâ~ˆöœÅW:óÅE¥€dhæ‚û:fTÙSUÛ®5¨«ó\œ<#àX¬ódÎ=ÁÞO3 ,zŽP -ñ­Î%ìA|ç(Ô:¿ÃÔf«M%9ˆµß¦ w®!Z6Mô±1@>+ˆ±kŒTOÓbn¹FP8çÝØ*gËúm-TF#œ!¬›ÏpG3BëAøÛfT‡±Øê¬1O:êÞŽmCkX ÊÖ9®ÎßœqvB„Š{¼OT—×j³ÆSØŒxŽT¦«oõ-ÕTé—‡™ãŽé8ñoܬªPçº2¬"Œ/fÄ»z!äíÁî)†¤½µ°Æl§àÿ g/{l¥h¤»*Ÿ NÚ'.Óþ÷rÛkâ~ùîÿÌ ˆ¶8àó5BùÉîä’]³E’²ß¥Š\³…ÔÇHöýžPPÕ|_®÷º,DöæZúßõ;` -é)æ·¥eæ˜ßIfìô+€ÝgÈ ²¿l5 œk¸*B‹ \äTÍNң¯óÔ -ÏûWêi_¨ý>G ôžçMÔÞ…¥ºÝ,Iö­x\—Bž•²¶˜úe<©ðçŽwL„1»F0Ñ[ŠÃê·Jµ\¥^øÇ1ØIË•Ê{ÈØ~ø /{‡}jŒÉàçל¡à¡ì>-Õ šßüåƒ÷÷ßvË¥`Û®Ø iP1³zžÖ>¦y‡»wô<¯¦ ûx*£Ð+Øì…Ú{EÓõQqoïÍ™½‡{“*“_íSƒ8:=XSóõåí‹ê1ÏÐ! q^PÒâ’É""p«Ö¼²,ãºÎ€ÕíX­×»ØCwåŠ×kí@GÚn1õi•  ÀPÒ³ÉÎ! ?K`÷¨8žå¸¥uJ¶ü+wÛTÁ©Ú?1Tʹé‰ÁâãmfBùGó+á;óä Ù1+áHQ>*¯»0U¸±.Ń˜bhVŽ£5W‰ !·<ןyÐ_kÈ̽ÃÆ ¼ésâÄ~ø„ë3"n‰xë}¿3¶¼R[TmÃNHÎÖàGL¯Ý–À$xÆ”Œb•Ûðôî8Ò“³T~ž{DG³æ[~ÅVƒg™µ·Æv° ¨²&7˜;æ¾ä­æÏØšEJñ3å°ö;r¯FÌ{#tÄÇk€GŠ¯Ã¹×…à­[¶ô3­¬k@ cÌ…;÷«&õ”„çû©i¿EbÔ¹ÇyêÝ5Ô¶é«ü¿j Óÿ£ÏyWo¯ªØI"ùÈWgy„1КA‚;ê´ìaý¶O§¿A~=ýF?CÁQJÈ%£·»Eò¡˜n×æ@»§þD5Ž$ɜΫKC¥•{ÛêÒèÉ€ñ·Õ¥ ÕO`O˜“s€óH—f>|‡¢kº=¸TŽÙA}¯Ió?a fsv›Zží=ë„âôcÆRþ^}îsº2€`ÑÍFeÛC-ꉼlÎÓÂ.XÇÕ_•ø3O­cÉ-¦Ô¥bx—TÌNÃQáˆè|°Û«NRp×6J„GÝŠ¬Rœ£UñZ³º\·àø9ëÏððUƒ¿³4Ö -ÓaÎ6òs…%ƒìy8ŸL¹ñ×$*- -Ύ΃ZK‹Xw˜5aKÓöåõ21»Ñ19¬!gÔÅC˜ùÆgzc|9,>¡å; Ž˜+Ò®xúŒˆháýøß+@W ]dæEÙó•ýnªX¸A`û‡–šé„n²éVª#¦àÓ‰áJ<rfÓA¨};3xؼ$B6é˜,í@d‰VôZÍi2^U {¨4ÒtA6gù ;iÒÀ ¾Ö¶„„_ÓW™CÄ‹;uðŸy¥S€÷ÄãŒßU6•+¬ Þ-Gî˜Iö¥–ÔÓ#žCìàn¶•××óo-“dõ¦¢»o¡â¤Å­ØG2RæVÖKx&Œú@£öwc‡ˆ¢nŸ$P·E,“úÌ`?¤ó™­°½ä'BsÊg&)õˆ ”¡÷jcˆ~F´+¿³ã ö’ÔäÕx'Ívÿœ¼sÒùf5wG --:I^äZ‘{º©?ÌŸ²ßc´Âò¡†¸¦3°+w5™-¢ž}%†5œ©ËKê9Ü -¥çjë”#6`=jrÄ¢™pײ†Ô‚Â$¶<ÝLÝ£üÐÁú£*°»Ö"Ñ_šgÍ[ýµ:õ–"÷’ ô "{=‘Ãü!&«G†Ì -ÔÓïj¶ÈúQJ’䉛q”õ¬)7NdËé&S‹z/¢¯ˆ[êJ ß<ïH(­µ(1áÖ©y–eOUçl°‘[k12±W Ngí›@0¢GZYi¤H”%å_O½µDˆ“Xõ*ü©àQŽ‡ä!ÇFçܸYÍ(Ml«mK–©\ÜóÌâóÙòËš¹ªô|fNòRiÔÓ± ­tl‰…ב h:úzo¬mbe½I%i3xÔç>ÈŬ;‹L£¼ÞªFV#èÞ°¥+%!czö`õÓî$>°<‡1ç¡Ljãe-…e;¾Ð̳fõ¹,Í!ô+aì½êë_Ö úœv¾ÑÞ] -"ºZjÔh)\‘$»Ö|¡ïæ]ŸÖÑ×ïïFÄ56Ÿìvæ¬ÔhEš‘Ž‹ùéú^QÂú¹œ!åF)/;‹›;ù«ÕzÀ H>ƵAA5*þµA¥]=L… Q˜üI…Éö’êÉNìi—’¢Û`¢0_(eâNgn”Ì#ÑAö¥²$iØ Ç÷ÕƒSË! ÃÂ?ä/óì²;ng¢¾Æ< -ÄT‘¬µú™ZÏ)¤Ï>‡ÌǦè~è1ˆAýŠ®ÖN´õ2h†ziëRYè©ð&­ßÑ·>öøÖ×¾ °Ǻ¥wK …e²u7í{Ò® q¦ƒ±Æ›¿9Öƒ#ÿéÈ“2š³Àˆ¨E†xá?›‰¸A‡yîy¨µU€fÊ­_hxcžQ¾¢û¨Æ!ÞáP» gþÌõ#º±fÖhûrîü2Ù¾1ûÖ*9ˆÐ†µAÉíŒ^crfçíLU.Bå®ç!Bê°V‰>Ñ~/üÒóØÿ¾H$²[<‚×m"ê‰o,’ˆ¶h-çz–¹¹€z¨½Õ!P¬< 3·âê¸\á«çÌË»ßhö»ŸïžÅ²ã¨­¯Le¢÷òX_ƒÂ×ïé=³e÷ßn1gXwƒö„^·¥>G²öˆÄˆÚäZÕ±–A¯Ú[Û³´fºP-kúösFiî¹c®Ãú/$ïÖgö†•/{’­/|dÕÖõ+n<¡gáZ`÷ö:™ÉUƒ$]æ«L—“õ×õ £­ßÙãþy±?ï˜ÁŨu ×qýh<þk}OèÖùN±2 -:$§Ìú–ØÊÍ÷Ú¬ÈÇñlãõYñÞâçžs¥!,´ªÃ “±—¶ÔØ㨌Žrü‘shHÓ ºð×iž?| -cu–v¥¹é¥>`£¦å]Ìòö[f õW WD8Ñá^*ü9ɶQ©gE”LØ$äÁÆ,) èu” ÜâÎfHOH»ôë$B¸îIÅR}&LÏÀw?u¥ˆDåáüY -Wå»+¼û!&ÓÜm­­Øz¥Ú§p¤@ÿûötAÍ·³ª"äe’¬¾†­*Ô$©}ž S¯3Æîøv+ôLÊ–”Gã¶\¶hîB8ÜêŽñŽOh»sÜ5d>j|Р¨_ßøPï’U |#d„& E‚×¾‚Eïw†™b:c.’æê°p~Ãø-§à³f•9M’‹´OË+ì<õé™Ü­Â1‚¾žó7)ƒÔë7¡.î{ÂÇfÚ&6öÖNv°O˜Æ‡¼4ËÒÆ(äÜJ#ä…¨(GD¦J<ê‡Jż ¼·¹ßQ–sòÜZsÿNù* ÿÉ•Püòd:»áy¿ün6z+¯àQ8£¸[2.«ºJQ-ºë©"â¬àˆ h_ø¬*÷+t€ MºV­ë/ð}êi#œZˆU>…Šâ°åRÒ°ô¹ ædGvNË9ä>¯·_¾ôñ›Tzí¦–å‚–œgœDŸtc’ O´;R¸Ÿƒ8l0r¯Â= RÄ6$fçêP*÷¡:±Ñ•uåWzì„ò£¿{NLYœ¥ìÆj6°çÆÞŸ[õžeïH@”ÖkÍ)nÖ¹ò[:mWÜ&ž«ÊÔ<×všRäîê„È r*HÉ=3Há -pp5ÆçÔ(>’"Ö•#è ¥r0? î÷ÁˆFÒV'¹NÜ3Ó@¯1ô•Õr¯à –5xeÙ pE»Ýú•6 è—aͨ÷ <>×VmnÇjmÍËjBiÉíõ†­Ä€¿° «páx¢g•!;öúÆŒX3W@b~JHk‹bs(8ËbŒ«SÀL= -ýþÂö;Y.yÊäBÛ/Uõ–rÎGÔÊöYM<ú ·E“àÚÖŒããK(9ðT- ê´Ê äoj‹ŠÎÚ«ð –™=‘eg³i£4H³“vÀ`3ÊÞ­ÈS(–ÆVí»¥¿…ŒÔ„^¼J6ªÛQý£<Á­P¹mgµYK-ÂW‘Kc ~ÎM,´ékÀô²æ¯Æ+A¾æÉ.ßÖ/OgºšWª(xŽówÒ—uw–VC€w¯3H¤>‡ 1°è€‘ )š¶:MóûœQÚ@‘Ô-z©Oµx9¹¹Ì—á– ×m3àë‚È -•ÅvdÈ»‹Ê`ñìta:3õTš®M:p«ï±;Ûˆfàœ1³™@Ôï0Ç#“ˆMùŒ ‰žO¤ÙSã<#B¯ÂkŒºÔQ¤=Å¢:…-&eêJ ÖùV\QÎÓd^—¼Ð=÷–9„Íöý Jâ`“U{xDŸ¥®”î¥jô˜»Ü{{¸ƒÙ :Vx÷Q ®ùŽuŠF@à÷ÀF’Ç -¿œãG½EXz5p¸{[ þ–„³kuDR¼¸”çØ-@£(Öí+¾cðÉÈTIö(›ÉÉòo¾Ö»†`áJ½Qèè\›NBs¦Nüé€U‡Ïþ^ªVfV¯$t~Ü‹ W|¦4zéé—ïðNônu {õèv§í{³ša -ì>7£äŸùÜñ4èj¯oìKdv“–d6Ñ‘…ÙËM£é0&„GªÈ’Ö>L½…O&\¸lµç}<‰û§´VÀøú…ØΨ£Ä”)Ó¹&‚–Ÿ=uF‹áÑ]ìßæ~¾â]6@¯c—#oÄA)ûYÓ‰~ÂÜ$À‚êÝKðN…?ÖÖ£íaÏѼes¥Ý¦ÈC¿DJy9ó`k!"J¶QDГÞCNÂö…Ø©A` Ë1Ö´¤uM;@µyñwo½-þå™–R¾}Ë>οÞ_¯Øî~æÙ‘z]X«”÷½˜©ûQatÄõüÎÜ¡ë{ïɈõÄ÷ÖÝÀ"¨W•!ÈþtQÒêü׬—CÞq¸85ÖîAÜ,ŘÑÈsýÎü'7ñIdô7;µpÐçó“Èéô¦í?Š]éÈd<,°Ì‰}q¦Dï!Ö¸_µÚæ£1*¬ß¡*¦xÞÏè¹}gAVÑZÀ§d¯ÆÉÌ–r&P˜4ÓöŒ|BÈú¦š/àaµ<…gm¿$&Á6|"S}ì¯[&1-‡bèH€¸Ó?žëJ KÚ5×É9Ègµ>³=½zd«Ìù©hW^…AH?€ÍØç…`¼Íã& ÄþÍ2ëÏYDÎÉÉvÔËÇçÔO/+“agè!#‚¾š¿âHGÚám—Xt?I@ÐÌ!&‚Ö‹c,XÃ؆Qƒxö:´Hb^- -‡[Q¢‰}é(Uãù;üFHµ‹Gë"¶&‹g:° Ý­Q ’ºØ{qK¨S¸ÖW¢ýRàý#ÍÎ<2PÐ\%”SÉL®ZÄš‹4'ç/ÏóÚc\s l+j'æìp¥ŸIµX’X« izÝê¯Ï<Àgì¹Õwew¬1›ÚµdBñKÕqNl"T„9'ŠÂµ;ú²g°(+ˆâÅ‚ãàgHT{,ß® -á×o+‡r)ø9l£ù±WÐ$&ëº^A“:Ië¼"+uÃÎ_&ÑËI.´³4dNÙ!åa—qJYŒÏlh:X—Bê/…¼?Âû¥†´ì?~rw²°Ø5íg´ðÌc™•˜ÆÐú¯o,©7U"Ⱦ9ûè·gY§…³æ´O‘+ÞåjùÑ -Ò®j3ãWÍ¡ßÕ¸~Ó­ú|>~ýêÿÌ Ò:º»ãN(¿O¶¡“=Jªù%ŽºÈNÒƒ}• æf]ĹLìßy¨]¸ÏUh{Ö EŸ`6WF’‘Dƒ¢×Š…î‘%³qV>üDÖe¥0ÓÍW©F{ ú%v6ÓKŸ|'Çž(^GdÍ,1*ÊÁwÖ}™•/‰ôbW˜‰Ê8[„•ßýåÅ¿ë#ìë¿|û0Tq£Ê‡¡ÏEGMÔ‘K£Ã^=_tdûÆÔÁ£ †¢¨fïíˆ4õ§íßnGg:µ5IöÕ êîxnqsí$ËÜ+y%7EÌËÌ!NY´X€åJ^#LªO×±+ÍŸ3Fš¥½VE[“aGŠæé•K!o2=8#é0‡hÏSЊqÃÊ2÷ h_‡ê<Ì[4çîøïl²è`ÁìõŽ\e©¼^ëIF¥©¹Žï1côCïÀtñ f,¶aÜÝ1õ4„¥X4x¾åCÝdl¢ ÿ¬AÑãرç®ìÔ$ÍÈë"4­+mÔé·×omæúD‘RU*ê:õD¨çX„¢ôÞæ\g‘¸Óó6¯³êÉ¿”˜_ þVzü]ÍÙÔ¥šõ”š8¿¬õFÀ9­¹)ÑŠf€4äˆîf⸣ãŸæÆŽ¦~nï&NÂ+‡ Ï/óà»qÜü ä#TWÇ\ •Ij”ð -ÈdÔt2lHš™µ\˜ƒ2 ‘Üܨç¹ÖíËG QȉVª¨BpS×ô„¦L¨½K-ÉÌчÛiGP€U5/bO‹¸ZD0Ýç}<kü´5BöŸö½Æã¼Ð`˜ÖÒ¢µiò+€“Á é|·?¶Ô©rˆíQC'ü6ûχÏòêû4/ÏÛùÕ ‘ ©–ì%I‡©'˜fÂ&1#S;%ÿ*Ör°s®Ý¨ýnh2î¸XÈݾv¤]#“gê¡3ŒuÀk§Æ‡´0?ì¶fþ½÷yŒU Ú¼¤?gn4{ riJç+;ºæ”,δï@kû³z³—¸–{×Xß™6GFˆœšOéO^¬ß -þ¦Q¢©—¯ -Ýb+Ü*ìo:¯i®–ßÝ Ã@•;è¡€U…傸Çñy¤ÉeH°ÎŒ&c[)…(Ù¢¤»ê©úÔ(¿Ñ±zLÞÙÞ9£-öŠiw;W³arë•°§Üï&f‹>ƒPöÝ;{è(î|­%›JOx\?S¸ªWeêóŽq±8ÇSüŸC"LÙµ²S˜iv|E…øì$Žs3û[{þÛ«Ö½R¥XdÎUoï …ëJ –¤©>”¬…㕧>­H’Çúyž*X–‘¡óöºººŠÐA`j­Z∧v‹¨lH²`Ô†7g­Bn\.__Ͼÿ– Çk$#꨽±Å#ÐœZkâÎ\9"d­ºáæRß 9¨Þ_?²t…Ñù¨ò/»yMÔƒ¡|o)¯‰º‹ßûÚûvŠ2$Fbëh:4qÇ4{ŠèCØÖ¥¢¸wÄîþÊx -¨®µ}Ý0Ÿ„’ÒÊ|o³ý ߊNú¨AòéÇs^÷UOõD.Î]oGœ²\srŒ¤§hò™XsÈ¡s²4's)d½ö]RÚjØ¡ŠÐÆ4Ä;Ž&šêU¤f¸Ë[Z´¬Lç'0}GG’÷-]ýÉÏÄ"{LNâÕûÄÑòýñb¯ZÅ >÷ä?Ǻ™X¤3„5›ñŠíJôgª=ï¡l cg…§œ3æ†ð9ßØÚ ØžEŽ—ÌO†<ô Ñ‚ƒmrØpÁv=ˆxD01‹Mïú èÄù©).¡>Drˆ÷Í•0˜(KL+ž'éFàˆ^ÇØÇYüõ<_Y9í(ÝCýÝ*ßð.8Óžõ] œfsñ1MÄÆ£;5»ã#±è«?ÄÚ*'ºwÄgþ¸u6먞͉Cµ™àõ:Ü0xÑ9ˆœ¬Š¢2Ÿ}ˆ·ÂNC®R\lÆ•¥§ \xb -êww,7Ðt”iέњœK+J"qÈ"ÌݳcÞ¤„ÂûB¿) fÉÑ|Š«€FØæŸàµJp¼§²sÈÐÅ­gš»Í Ø^Š@…#Xáz7\¸˜®œ`£DùL&aµK]cØî¨R”±û»Fœnyh ñÓŽ•%ŒrC‚ÂEg«ç~`.„K7ßøPKwÛ[žÅXòŠÙËÜ 1D@滿@>îQKÞ+ºazL\ìèæú„2ùU+ «ßhY~)A £Å5’‹+™àWd!v÷xÿ<ÑaÕýWvÍP¹Úwô¶·ÆIELôZb0ž%%»Ge䤂;w[WšM†‘ò(•Õ“‹ÙAr‹GíÝ$%´sÆ3sW¹û’zÑ^€RËÚÐþ™»“,Ûq+MÔ#ÐÎâ.’ ²Ýlç´V´Â³øþcá÷šIfÙy™jDHæðà -Å.þ¢d›YIŸ4@„ýÇRл¶Ì³¥»HÚ–ô*QÀyU õß÷¯0‰-@„¥àw´”½î%$ñÿÏŸÉ=áXÛwøBŽ»½u¥˜°ßKrÄœpoñÎÎ ¤|@ñ@tB_žôÈré—¤ÞM\‡JÄ‘!­.j0—*å‚šb3ÞŠÙ*æÂOI»Ÿݶ%&dMogÏÓ½“X¦®+ÒEP‘Ô¦Zºö¨Ð0á’glôäô:J槄Ê2Ÿï¤ÌQO &òKüqK£][ô¬”ì‰lÂÖ ç3HdO#@áA÷y¤Nj j¯¯ØXž¡Ií”H .ÒXw‚á;ÂçÜ™  â8{ -GÔݲˆAœê:EõûH%ÌF´nùŽÙ÷Eâ¬Zo1‡é:Ÿ÷2‘»"Îí•Óöp~¼äcÉ¢Á \?šfI^d°mïï¹iÝ´+á‰2Tê>sû|ß0‹”+F -{Æ?«5E¶²ñ»¯¥LÒT×€UÓb&7Ù QIl†„|)º -¥QîË€½ZO :"×xǸ%©Â®×i¥”`ê þáJB¢•©SÛ„%èû±~EçÉ.?*SŸ«}Âýý;ð4¡g -ªóç^*w# -“ó¨ÅDNq|\Ç Ãs[CEb÷Ž+‡u% ±Ú«_˜ç$üѡ9CˆÐ«ë´Í-Q r…ƒ°¶¯‘a{¿¤Z‚LÎr©¹®09HH.â¶Zw±q>º­àÉ‚=YO…I'£‚˜ÿÁn±p.ç]=MúÖI×Yn{Tàý‘á’=`Úµš4ð8êñ íw~ÚÃÎ?´S쎟¥”ƒ.¬¯¸\†¿2bEïP °«ÍAg@ók…aPoˆÑp{Š"fvëfï{«!3+Fz Ê"¢¥ðIâë'ÁYT¦O5I2ZK'´¼ƒa“{q‡_Ë|!A6¿æ¤ø…9I_¾·†í!ÈÈ'¦åE{ÆK Ç,Œ›‘áÏ[ôˆWzðUäžy*ŽùÜÕ9ë<o†}w]°•íoí`É•ÏVÁQ.Ž¿.Ö=ÖE¢Vä©ð"¡c{dOï@UH®¼9 ù÷íùþ#}´„Ãä𤘚ý>ô´„å¼ódçô­Žî/ Œa -ÆFhk‚•?k+üIv[]=}Àƒ^-Ö’:`3Ń¿²Ã#t*;øÄ=Ò!PkÀö=WehJµxÌ^™½=ó½Ã·X0—>@vøØA”¦Žmwÿ¿ïýŒël…BØ$‡ÈŒ¾›C€H<©ÕýÙßïÖ'\óùö«?vÔŒu˜Øyš¬ãºÖYBGC~ô·jDô©ØÜ̈ê´Ò½ÎkP§X·Š>•ãø6D6mÆ캚OÁÔ Jñ§ú'丯`0æK~ê„$CG:«jˆ÷<ñ[†PŽ~èu÷Úåî3š ל™mÆ{7"Ì€IyÜ#ÌT?.bÅ Z_’³~‡\÷®ÄlÏ÷³¢foPÒp6ó§Ê•ã’Zâ‰ÞzgÞOXªTž’ý-úÅmÿCýqüÖ«Ù -Òû»êmÞC\‰Q7(†(^œ‘¿ “†nîd<œ7ÁÅkÞ1êCôwÎœ;†ÌÑ[Í$ƒ`p,q3¡X*Ë\Ùdná¤ë{?©?@¡=OÝÎMÏV^M²*Ís ²}?ÖP»&ïH>L簱èËg÷Uykzš†óç©Ký|!¥ºÓ½y¡ D‹Ì_Ûkñøî+½ü\úÂHCr´mñX‚Ã÷öÆÌÙR”¼û{Ë·d)IªÔÇ,.ÒƱY{›Ú½Œ(Þ—Â+[·;Kj (†[ó fßco©îwKæ =êK~7!Þ$‚ªš©šˆÉli×køâÉ<Õ…x`õk×EK!¾÷Ú%±€ùºçôjÿEÊê&—çÆ®ceÉ2¶xÎÚeSVH‡©”©™)ÍeY½¶#ËÁîè×böm 8¥Á䩾é©ŒUoÑ–AÐèEIJ9bW©6©¯Uq§‡¨U¸=©g4+ªõ÷ÿ vXBß?ÑÆï†%Ö“Œ$0;Ô;=`‡Ü"Û;'ˆ]DÆî_h`bbý9&;Äwï¯Lñ+ÉQóyGÙ| ÊOæÕ¦j´«Mkô X“»=ÿ«IÎãíÆ;3Æ,œ«\ó\SmñŠiˆ>= âÌ ÿÌféŸ|õªPõÔNùa¼mÄ„Å ýf¬þA}ûÅÛÈE3àÙŸºšLMvÜ–£ÇC‡½”&Ï -ç½­˜Qk.¤z’²ß³¶<î`jå<-Ú0ÚÂëp<àüÚè–ýd{¨¾ØŸØðwÇ’ÜýÕ_ÒôúShêû¿$²þV«þ¯ êü[;Á†79S0Bfbß—æ}WJSŽË{§Ťd’ŽŒÇSõžb Üi;p±±û-–E秥²ô!iŽ|d†éè¿ŽÍã 4a¶ï3.肼ê¨Y ©Žnæ…Qi«¼š½6«’ù.ˆzÖ ¤ÝÜ"RñáéÂ\”ƒÊ ‡Í® ƒ3“éM†Zw/Ž¡¢šf²£t]Êä„9ÎÅ`vÁv:íܲXSq&óLU(-駎² þ$iýIcWÃæCòv«¼º‘¨csƒÞ…Ⱦ -Ù,bí.מWè%sÐÒa+€¹Ôåw¯âºŽp,ßg%ßãŠTAÀx)©ËÞ¸C¶Å…ˆeÀ+¸@îä[ùž™6mEí–è@Aëé4õÖÔZ í#2ž.ô¨AÍ]ŪWbß½6úÓ…Œõ¸ðóW›ÞÁ5ÊÞ9ú½!³—¹[ ´`"s `º ôxl²Í¼¤×Nnèì8—’à e£_Qc‚/Ô7E€jŽ%ŽËN1)@ŸÕ•%¢O.­oo°¹¦gŒ£/vï±.Õ’#ùÖ†¤2Àeì²áLç7ŸÈ‘î-k#¥$°'öU¯€lè‘ôßßØä$M§½8Þœ/ÕÊm2Ã>ªk‹,%(ߢð$],Î ÀÚ¹z²i!Rݳ x·mEU˜[Õ$â(ÁuçA^2ýFà ¦`Îê϶Õª³ÄÛ£€Ãš+ðEh66z*ûùfæj¥²ô:Pi[ôQ½xU£þ -úIE‡ÖìüÖ`X{5\å,s‹LΔ@ì¥/#IÙóä[œ„é)é½]¿ðƒSY‹ú$žë¥ÊÂIp½MàWCHMÆaO‡en<¿²ŸãìéÓ;íPJ<›‡Ü°9c––6È‚6.ÍLX <=ýÍÜ.iè#l¼ ¡f\÷+²}d¥z ï¹Áh oùPíˆXITXîÔ›b#& -N Ù²>>y[-ø9/uw€Î\*äiZ‹=›XÀØ{•r¾·—»ÙµR÷j‹Ö„¬œ Å<¹³c‘Â)=´µ=8Hn‘ð–^Ÿ2áÌ úVºŽæÍœ¯0ÑìßçgкìO,Í+à6£8óT†Rz“ÊámÎÉþFeÞ=tË(JšÝ,¶ZH/sëùÁiˆ„æ,~”ØÍÉ k›ìÄ©[Þ)°oQÛ¥˜öª=C.þˆMB Œr–]µ±±!¦[ÊR4Õóh5“$,xU u¾0ÎÕwa „Y…ŠÞËUqþ—­½7$ -Âs™lW‰~tÈ[Ö†žw̬U ¦©ÕÍJÄ N—ÄCÅH]¿¸7SL©jM1H^±ìÚfŦ3žÆnèY›ÔmænpŽ’øòþ4pó𭪤¢Ã¹ÑíÛ›ÄЃmÇ»OƒÏgÚ}Lk½¶¶‘¢c8§ŸùhŠ*¯/tC;|£bsdúÝ8mtG { n ­=x_3äܾ÷’ÏÉLÏÖ¦^1ƒ÷67ð¹]Ú¦ˆ¢œµáp7áÿóš§yN"%^ÎBÐ܉2%¢æDk±ÆŠ=ß²ZxôPæ—2r.ß*„Y57ù§ÊN†0™ßAÓm è]u ÿ8ª44XU¡ +ž¸¿Ìл -î3$Ù \¼µ´ûc¸<ýÕámç:œ!f[WÄà›—Â +8dŒÌê¡d• ”rî¶Þ_qÕU‘lkPX>D§¹§R”ëC黈‹DÆP€Ðéʘ{æ&îSÖ€!=r€™ß(¦Ì+”%Ñ>Kô"Ô¢8ù¥7¼B)-1æ@Û"NÀ{Âä`=ñsâIOie=^SÏYÁÖž6â›Ý3çC¼ˆ˜R‘y¤#²£sñ¤ DšßéTO¤¦ëŽõ#Å°Þqw;ÏI{ˆFï¾~gþó1M¯ºcÇ œÈ(ý CnÖyÞϼøÿÊïÌcâ¹fmå/œp¿â‘éâ2G+¸Ä‚TgÙÈkD=â³ÙtîJÔ©h<œà®÷éóÃùŸõ­.zn×Ób¸â'>¢ºì&Mº7¬ŸÕ"gã–ÍVð*ž¦C¤€Èk5ŒX -kÇWR‚” ->$fYxô/Äö£ÜÎ2/•!ÅžÕ@–%9öžŒ€d‡ôpNæ« RxÕ ì‚ÚË,˜[f®8K_|”Ì;¡¹ÂÑö4®i7¡çå -6½8¯â8ÞPõÇ„‹öAÁº6¼  XIòíh¥Ä3öU ÙaF‘YtÁ]FD_ÈÂÕFÛHI€§—ÿ¤–“8ôr¥ë3hb@Ê«my'+AuÔ&šÂÁ5SÊoŒ+·ÅÜoþ„ -“‘bÃkˆ¨Îsi‰Áq-Ÿ¿g=? ïª^Eç-Äa–8UÀ÷9t „W.2ù\ø9lÞ†·òT±{‰ekdôê[] TWÌ)Ž"Õ)’mb=ää F¤ºj´Íßjj±rG–H øñ$ %Ó„z¥×h“Qت(< Vhß­/ÂåØ_2ÍÖtm\ÖUG_åî _Ñôã@OâþL¥µßUëÐb1y² ýóÆì\"×{Ö:B©f(v…W¯-áû—É ä.¹Ìù=‰{Ï!ª_QD}°^ËD€Óú\˜{"RÁóþ ½_Ëi€ú7¬Ñ~#'²â÷³¼•áòÅáTdRõñ„ã¦\NÏíW¢øX÷=Þ5a¡\|ÇÞ¿#a鱦֘”¢?¼V#"›îÓª¢¨ ± BÁ¨!Oø’šì…)iØ-Ò%çò•Ç²+7»t7±ŠcHUV"5±‹p¶Zé‡hã!×¥¨ÅK©X¯=-Lüª'½Û• 5#vœý\ -pÀù„çšÞÔ^†Ã­†DÊížÕêÙÊ…û>¨l賌ۘYá³Ñ¿†H ?°Ÿ™ zÓ(8$Zt˜~`´ºd§TÖ´À–Ò{…²x4|Ç\‘nj·ºôÂ0ç›&y8ZÛßµÐBfÿSEܘ?Í4Ž¢Nײº÷,w%Š@%g±O¶P;È^-—[yɲ<ÃÙâ¨Í؈“è»ÜÎýB]áÊ&Ób„2cªÊÑyåžI ®±vN6Ÿ»[‘22&2\ø ¨ÖJ‹Üб—7Dyj-PæÅÑýŠ:×Ú9e¢_°©µI—]ªtì\^ð{¢É¶/f —±¦uŽ°à!ˆ\k‡gs1²RÿïeÏtºœžBçÜ^Ý!ÍØW{€^ìØ÷¸˜èa¬‹èÉm)?Â,Ï„Ó1ª²ÎÈv®Œ£ !Ÿ£É›‹=‹Ñ¡5Žëç±¼àÙ*òcûÚ¤Õ³XMQÍy•ã Z"-€’Bí¥ƒLºî3 “øE–Y<}KÐ: ྖ“…j̳ǠjÞðJó‘M® ÕÃn@ÅÛ=TcB?pΪªu/‘ù…FloM˜’²Š5&‰vcAÖ²uÚRæBký9Þ#TÜŽDÅ!FK=æ1å}Öx^½|1ÿoQùþìd©4-öXÔÆiQ_Pæ^C”£EónPhkÛ(“ÔFÓà[5ÁÁàg~½ÁÞ×g""¯i¿é¥ FŸDꢾ¶ç»áÃæÿ͈ùt= §ë~ïѹq`Üß]&±#P÷Ô™À_@þ l¥éIi*Ÿ–嶲ɵ…õZIËÄÇgýJ³š¶{ RÒi$hO¦äiëRg¤;%Œ—:‰!Óž‰ ÜóÚ:mØô¶N°#óN …ýˆöt~g?Sw›À¹žt;BögEÉ@êÝž¹ŸpöR¿?;˜Ÿ¨XþWpGqV¸®ƒåçSTÚcP‘Kþ*û•þw5£höý6úf©“ç9âÆÞ -WƒB,:ãH¼vû¸¿€¨Ç@c‹ªmôÞãKò5ýöO’?±éÆû¬ó]\ðÊulÅcçd*§šv¼Âä£QÇ'©6¢u¨¾Ž:gJÅ'¦²žK³Ì– ²ð!‹¨aô|Y§–àÓ Ð_¶’YÈ4‰®›’zì¡ -d•:Pk@ß×ÝŸ1‚Óšù×gú -{¸‚1ÊÝ©Ú§hŒÜSB¨ÿ_¹ÁwB…’Š³•c½PK›3( ÌG¥Êµ -Zs¦ÛzüÛWür‹rOد¼H Ë!ÔÍ‘ey%?W˜Œt+Ìñi&¿©ß¶/Ƙ -D@˜…G-Cðƒ°P®Ø^ÿUÀKÿ½Ãâ\õ©ô!ÜiØÎ,ùÚ¦JêÆù?"¹V¦ôtön.½å¤æBFC^zö%6¹©ž$(Îu7=î°ª‘L{b+‹«KÈ»fÂF•½“O8ëJ+-v5 ¡¦Ñ-—:B¥—{-{š+…ÐAnõD†0¾£¶ìBüNè#šWÐ]ß¿½õ•½ðÃntŠ?ÛFYf±)N e&6é@k ZÉósè±pþl½Lßü¿ÿíÏÁÝ·§+i±™#wPC F×™ ç6ü_Û¨¶³cž6]E”fÐi!J²û‘ýÝ•"B{ÀdCp°"AäŒFA%' A>s¬yÞ¡ -Ì뎘ëÐ<_…VÐ)¯rsø•yvÞÇ‚ø£ñéÝŸ §!k²l”H 8ÎYLR'UHx'VYlÕ+‘ëL§¹6} XĹ@¢`A -j„ãŸÿXpÓ+ý -#†ÿU°y1­Ê)cþeNÛ?C]ÌÀ|ŒÖCd2âL„OQê„ ·Î4mñ×ÍÀ×À({±CF"÷ ™†¥—¼¥çª¾Ü¢¸¢Âo»oO}M -pªèâZ0JÅ[+ëb‡èѱóŽ9‹³ßK§2f¹·"w+}÷­ôÝz! u;ú"`ÅèžoÌ'žˆÁµ å0;ÝH|Î排FI–•¸tÆöN†ô–¿F¾^ŸØrd?ÿöŸâÆE²öˆš—Àÿ(Q‡fþÛÏe¡ºÃ®Äî»ï5ˆ„Úò£¤Ú;"†–`«!ü5}ò°€B—‰X·YVA -c”}ŒE$ËîA¦(ˆ!4°«òØ2¦VùxœprÅS8¸-ž'ºÍ]N"ν!Ñ4QIÃ.Ek¡!¿ÒG zlß$|¶ÒÎFSý ›Àä\8¼]Øz©î_Ñ£:¢Ô_Cú¥ËBv<âîæ]˜}µT…Ê'vˆ{<çä×,ÜË%üM§xÓ+G ÀöЈòÅô rj2=ú#¹à–tö!yíë–õq`09⼖˦NÈbG:­ºcÑ“lñolÑáœß%ÄX¶êÝóœÓ,… O™ªy,YYŸÌ6žÀ|it#á„&€Þj €fþÑ£ü¾dƒïÌÉ I¥ä,÷'Ì]p ȸ'tE ^r©ùãåš ÿ%¨?‚½4„Ð2™˜ØÝÎÉFýòæ·qf„ÃzħÛJ]sxP§š’9®7 £°Ù_åQíD¹˜­YœšÐ|÷L߯©7z˜äéÃynKaÞKà~ÝßÖ£ó½¸:]£œã•º`°3÷ó+›ùäú§„ç\úWú…ˆ`]Ú5_}w¼ŒÂ¶ìš¿2H8þ!œk¢ÞðsiI÷t’1dÄaiá œ5„NŠ|êI¯-Cv‡CûÚ|ª+…:?ÛúžvÿŒ)øX\ów gµÒŽžBã#æç›ÞE†Ð] rÌ#Q%ºhÙG¬Î±zÇé¸çTu^ÏAúÊTsRur/@˜[˜ÎŠ}}DñXbnDsö\=J¾N  a“5“=ž{`uW:Ïé܃ æ†µÊ©rpèó=wó,± -#žOÈ¢çºúÁwZ1Ä:Þà *»ÒÖPB™„T2Ÿðtvwà/•c¼ã½Â××®eŸ”!?»Yýl>‘%ƒ6Cñý|?ïI7Z-×ȹ³ÿˆöõÒ¶° çÌp²×Ë×ÉHAùøxù°×çʱ^~8Œ¶ÂãzÖï€)À«¬_äHÐÌêu7L§0ÈŽ|Ÿ#Ê\ôßÊk3HTvÆ%+s·—êªÊköOuº²³Í©Û[T›u¡mІ< ?d è7ÌI§²#ù4áog@Ç0Õ‚XjA/ûº$i~ï’ùùx·ÆÁ-|,CâÄ´qÍ˹‘7(öcM^Užþ%È¿5+6ææ{Q­®­ûA•zÒßh™™é_ŸèÚµ5P‡^n¦?Ÿ¼±¾½£±X´E>Þ]ã‹FÑ+dœ »˜³°›j«œg¿üQ¾w›ª¾w´ôiŽì…œY%ú±Q÷¸§šÔü®p³Ø†Ìsá6 6dè9$…¶yö†Ejˆ£” ²"sîÌé00DåÈÙ ç‡Ö:ˆ×COŠ% É_.ñ¿Ó`]içqqĔᤣ¥Öì$:öÔѬ=Ê–Á'õ(1-:g^0‰ ˜\Å¡í¤¶¥QÍ\ Ãù¢ã”9e—вiµCÙw‘ìQÂÒË¿OÔÄóÒœÀkäJ¨7gˆÉ¼+}ëÖ¥Öã„¡Ýz¬ßqFÆ¢ûàø+!ýÁ©žÄÕöb@c…eƒŠŒêèìu¿a?¨™•¶€;f‘0âQ±tCÞÚèã¹c&TP:°zÅ ='ÄëýÝÆœ_A£üd´ö9vó‚sº«9w]ËÎ{¶œ^!zÈ¡ŒxÆ×;ßoºúû¿Ø {ÖZ”öˆ6˜»dŽŽàNÏž(ʹCpþ¡ìˆ†¡f´€ÒxE`+ óYHÒá=ͽ=Cpº)Ëâe2ÿ²¹+Ü¿§[æœlZz-Hk‰dÄÉçêNÀÁ9š- 3PàèÇœWúr)§àœód(M€y=9mÒ–]v^Š‰ìº6Öyž -B°xë2Ú—!“ŽT€oñGûXeÛàâ°‡ƒˆR‚õÅþªÛŠ¹yª›‡ïu„üvQ{V…ŸóTøá à©Ž«®õðòŽ¿ýV4Ñ=²¸”Ê+Ñ›GÖÝ?4/)]—ì‹%ÚIÀ±Fî+’èçÆø§ -ùû.k´¾?îr'® •u±­£Cs)ªt_'ëÏ ‘³Ãg;{ê5]ÐGŸvŒW½ÖÔè™ëRò¨4Àî -–Ÿ_©&;ÖýÌ÷A¿20é|BZF6õµ~fFþNm垉0ß2bOß>CÎr»x òׄzJ€SÒ¸îø惡´æ ¡±E6zµ-´ôZØgM(½æÔ稇"Ü QÞ¯õPŠÀ=’lòBìy¹®¿“çÖ_MN»õÚä Å–z¯÷'2 nk5äÖ®„h9Óï÷䇪ӞÂ"بþi}¨ùªbÄ0âPzÆÈÅåà*‚áöÜì¿èvgˆh -8ýÖhþg&Î †ry¢ìÎaò±–uÒö+*ôúàλ:*ô‘švEöìê¿fRw9c³Š9¬ÿõ<¹TBk]FÚc\™ÖùV€Y­.•2UšJ\ðÞš’ÞÍòk£û^0½í|çjïa’<Þh¾”«§ Ôs¥ÆÞí&Úÿu*v9Ú'u%J‰Òšµ\N Ò`* 5'è[©±I®ÇñÞdÚåÈ#;ˆ­×ÝöZS”µ5€©Þ÷ëË×&óƒ˜G GÀSÕ‚{쵶ÏyW­ˆŸ.§¡¯¦«š˜¶Œ¿š+JÇG wÂJ£ȱWšël0 €o– -ñ Ú¨~‡ì휃wÊz xœÏ¹÷Š5áãBážC„•Ò ç}Hqù¹Û -ί– ²»ƒWÓþºBèáL@@2™l؉î‰t:#`bqÝîìö†µógïòðúÈ\Þ˜ÊéËy&Ô ?9V‰õåèŸañ:Sá×&~V‘”x¨“ˆÜŒÃÛQÈÑù†éøgê$ÑEÛž@–}N!½~úMï"Õ½)+¹¯]­P 4ïGðæ4äB¨ey¥Ú…s?Îìöèrè›(ìž)†¤‡ ¢ÀœÍ¸å(cïu~«l5(Š–3$+4ê—åH¿BËs;Ê uÛïõ;tÈ?XCö|{ †¢†•¬y‚;3ßp "a_CvRC×O«}çæ)ug"îëÑ÷|ŸùÃNN8ˆ,sUoÈ™¥0‰Z‰Q¶Ï–BÀq‚+J& ö¾Vü¨ÿGNØ=V/W+G3Œ¦4S²ƒµÔ½;nrã¸×šé¸u÷“V~}*‚ G¹ß µzÔ8„N׳‚JDP -gOÍv Jþ»]ñXÇ¢ŠQh ÛÚ¢»?¿æÿÎï@|ïúlG€TSáÜ0SkÒï£&¯Ø{>uÇl±QJ¨xäR¸÷)œ¡ßFóxÎD:HÿÌ¥áÄ=,#fØ®-»A°ÖÁÔy/ÝçSôûIü®u€·™j±qJ0†Ïõ^T„Tš‚?n-‘õˆ| [Uæ÷ßuS¿Žø—âªhìë“#(À)#‹­ÿŠ,úäòxŸ@5/˜±g±µ¨Pôgch&Ù[žç½Ø®]{¨·ÓC4üX§á“xÆ¿ô}d:‹è1äU‘oïJey%­¦ä|U=v(¸ñE|‡-^VˆÁUß7FèÈStÏdóÚ%R{&7»*kº\+G›o}~;KäÛ×÷aZÏ\ñi ÕCÕ“8ÙÓ¹©µF”%ÞYS›#6ê(û¿ð@ý„`øS–4e•o³õª'Bõ¡3¶­ê Ä÷´âæ.óäX@2;ûèwX©—½í§}” ôÁeïý£!ãÁ«æÊÐ@ìŠèM}!ÚçŽ=¿£˜DÓg ÙÓ-ëðÁ}­Ò匓m7;lH=•ŠÖÃJ×- ²K>â4 -S‚9åû†<ŸÝ㸹!ªç×ý³bG„<,Y| 8Uc(öh+GñO!Òñi©~ÿ²ÚÉ$õÝŽÔ]ç}ñ”‹É9ѹ‡­Ï̺Ñö)Ÿ’&¹kv “¾ƒn®–ð2ëi‘52~ùH¯ëÚ•b¡*`^A$ °[$Ñ绦áÉ6YE#NLÂoþ2^ ÖØ… Bû§)•ìUƒ!륱ECï";UókÎÖ쯙¢#K붻)7 -¯ëgvš«áLkŽÎß)Ýteâ9n¬= êŠ!cì¹—v„„Òݽ÷Ô'¦/.Å€^6QC»=­]M™Í5ñD(E°Îçª5±…|ê÷߯@\ñ{¾“¹<·NWdÈç•FÄyN°Ž­ø÷ž<œ`"8Éqž;Vﱯ@UD{F¶BÜèÓhÈÛ·ÅfY‘ê¥8ùŠ('Ù¾yÜwsˇÐàFRL„‚:¢ãÖ|Ä̾USæ÷À/ÄÞš¹œÖV|)À.y\@©å‘f!t•ôF½ y¿õŒý%«äRe„èLç{½’PmŽpË ˜#ö<âÃîÕŽxã|m¯Ny†sÿ^ß'§fáÑÿÍwîOz -/Ÿ¥Ì{Qu“L;äÀÑ6#Áü¹>MÇ;BÉ9L‘dIxÃ}eoTÆÈ}{Ö‘ÜÉõY²ë;ß¿è@dÚuWÆ7géø)ägZåj +ø×N:„Hýp¥ìðó GÙ¨§r7¿Í¼EéÜ× ˜‚˜ì~ êVòÐäËi]stTÕßç¶êáfŒ7hØÙ>¨»Ý•gÀ¾¡Ÿ<Ž‘jã. ù«­By¼žŒ; ySÌFç§9V¤g‹ŸìQ/‡ìSöEm¬:ø¦µ3“¾F~‡4ŒRµ6D[ʆÚãZ; Ô>úõþš¼wµé=¸T·Lk•eÞÀ©sÄÄ)Œ·yËp:"Q¨-mHÔhRDôñõƒy¼´ù5N·øJáã¾b†w´ˆb¶Ü‰xTöh”%lØc¶FÔ‹[TàŸÄ—ïÿ"ÖÃc¯Á²/ì„ÔN`¥¶äÍiŸiZ‹[(‡´m­[E -Â×ÌfôŸãXz>Õ<+^K‚sÃ<,F i¥·Ì‚¾})8" Çg{ˆ|ùÌ#æ”#'ÇÉré3··£½+jgTɽ~ðPÿW¤¼ê˜ü³ëÄuF ó§—ŠzAºµòˆ›«‰ul¸Ò'¯ªíœOÆ(‡˜žâ£Å¹¡ð;ç4öÉX?QYqD+upNà9.¹²Îx Qaϱ[Fp+Š¬óØ}tc+–ó»†tmâp3» «iÕ–³ä”Òc¹JϺ”äŠ8F.¥b}eûà|S·¬ê¼q©ÃJbyD/?íõà"ƨ̚à?þÎÿ´*²ÍRMô9²ý+é]>ÄÁÇîªYŽÓx';ïw%6¨Ƕâì–x¤ðŽ×/kŒá#^‡@§Ås­ ZÃîFµa¡a`hœœDŸ–Ùcœürg£î3kJX{ÈÇ÷ik@ºÖª)ÑÇÉ â{±³ºW¢ÒXÕ]1ð|ò;{¼ñÔrZmô™¬e–ˆÛ×ñ¹äÆÙFTÞR9x_ªérè¿UǾë[­«¨Õ3Ýó·ã^Íx>r$Õ^?øRÿ<¾m'ÿU%|ô´SK%T;WT×sÈ0X¡ìÚ[aHU¶’ËØÖyˆv*Àw{7ˆ¢QB‹a»ïÕ¸¾žˆÁ’ùîÕ¤å'|V—!N ¸Õ´áÿjù‚ìj´“’%±Ý«þÄK!}@5­#/`è˜{¤KòˆÍ1Qµ¿[N„«i¤”äèÝ5r‘9¼Þ`¡¹ Õc‚˜¶œž=8L¾)FÔ~¶wgxcœ ûÝr³w#‰TÔeªúþ~{„™²÷ì¹~EVO×'6„dGýcÝïͯ îüÙZüÿßvD 9õ¥«qjÐ:µJW(Û#â}šD'±z1úLe.L|Øœ1ƒ˜-Ê4ê™m^×TÅpy㲧Û9`©Ñ 9¹[¬—™äüs:#ð{½'ZÅ'¹™í˜;ÔˆÐãüP–ªcµ’¤¸¯Qdær¿âxÝŽ I+í …ëK5e({h´Êš¥v`JQg ͪ˜´”"ƒÿ9²ç -ž§”øÕª\p§ôÛu”·¶Ž˜MÌc÷´#¥=C{5ÂçÄrd7ºñ#´„àèÔŠ9_õòøo̽º&”šó@‰Ò®õÒ–žß ­ˆ ÊÊæ~éSæÒMx³í5$l'%pâe¦„Èå -1'5’01™ëi[ µÔS&Ы"yâgº‚®Ûs®†¨IèŽÁ”ê7Ä÷JJn ]ÅKúÄ pÌ2 \·ªôß ì6ª á»Î]ì—N|¤W£ *\½PæxÙÎÇxð}ºŽ™îVÅÐ4Û«°S¢è£§¿B¥aΡÔA5rd(Z«G‡,z²4&YÍF+L´ñGç'pòùÑݯ!ú¶±óïfQj-‚áxW}åcógNjW>c=XÞ\*,8æh$ü߃HÎ˨¬¸ö¨Ÿ·…JCNÙYb>ö5`Imãü€/cìµÈõf–_B#¦{ðY•ÎJý“ í+Ýç3ÙÒñnWà°A4Ï/>ðs÷ˆu+)Æw!N×ÂôEFO½ljÐEï>Só!æ‚´·nÎUꪫ*…“|Ú/CØÂé5Ø|ûûÍ@ºe# ¨oÐÄyŽ8#šÎ5s”üÛˆ6ç|;#º`Q¾®º"ÄP¯sAjôæÒ÷‡¾hÑ‘m&Ï"ÇŒ'Rç%êvG^4Í($G|fù¿Þ#b^úÅgm$„Qm»å˜èöpKêfGUúv2ÆNÒýy;ä^)L8úQÅ>x¤+}Aw<§vœâ¨iLë=’£—),Ç»øM‰£ä-¶ÈÍ>`Š2hn‘A]õ‡ý`KMæÎÒtžóõ‚ÕØ £Ú³QhÜüÆñä[úKü¢v„«÷>ÑÓ¨ŒÜul€U&/g¥:JGÉÖ¶ÛkˆÒ¯ v—ÀvKì|Î^>ÔÎýø׺8ïúé %+ÇX“iDþªýñPž:·:èZ´¡Ã¼zí±ß»+(Y‘ùŠºB}àÆtÓl(a…\œf{ÃYËOåþÍ<¸ÿè/Â9ßÌسÕLÔZ6XÝón‡°ÕùKm`£HÝÆj¹Aînçû]Ó%¥'¶¾jmŠz¶è+^±û:Ã)]§xÚöWІÔ3æ÷æNd2+)­ArÞN ì¬o¦ëépMæè¤*è·jp™~] ° áËrL.ÜEÍ ]Œàø~®F¹L ùËèÄlý§™[Šª4µÜ–¹Š´ qÎ}ýŠE‰Ñ ÿQ˜¾ÊÝy1hÐZ Ø‘ q}õ;,µ·ª®°û—:mÝ:‘ÚÕµu‡s0lq,›—$Ä)áQƒæQ»Á͹uëu¸mÃwGÚƒ,¹À×óâ ÿlW!€/eh{ûL‹’Û:Ôr +¶5Bœß»ŽkUÏ¥s ³Ä9Ë´JRJñ§ 5¯´e±nÌÚµâÁû…$Ùr%ýw¬ §îN}*Æï H -ð|{wŒˆóÄîÅEóvÄþ¼¸\ñRC(G‘-3} }sþí~¼aZ°ê† Ùê¡G}ßÁ/D€ÄYUàóšŠá•ñÀ­È%ªáyªGô5»I9VÈâÑp -´± { büîë¨xTÉb/xó]g™_K­‚ðRϦM²âH~|ÚÒG¿—ÛAó³ºØ{í(°Ì1S]#á6Ò\;VqÔí“Ù6¹^ßïI˧óBj‰Pê(ÉBL 9.­ÿüÁùÊÃA9ÖÖÉž½%“ϲ]ÿø“¿$ŠÇAƒm+ÜD -z,AìØ95Ch3Þ4·EÎꦥµ@Žö™KÂ汯 -î2ä¤ìó^å:dzïc5^¡Î´BÑ‹‡à‹ú]Ïg5rE›JƒfìcEºç«¾:iX -1¦o‹jðEˆ™‰S!ŽäŠŠmU¬nOnyNÙ6*6XC®@½oP·ÄJÀŠWíf4),…‡škU$1(°)Ôïê/c%É@ïÈû#f½ÔÖWyû I“’äÜ…èb½Bbž[Ôî’…ã 2Ì[±ôž$ Þbaì ×1Û™›°^È’1ºÂ¡¹A•ç€ÑKð¿sû®Ubkã™ò‘ΠX„òXímem-š` ç×ÚÎ0 68ýwÖƒÓƒêzŽœƒ,Žƒ¥VˆÙìdªP|$eañmjAÕô=eœØ‘1Î슑]ñ¼\ûÙ8ÿº4çÍ÷Sý-N5¿}Õ³Ÿ}/™ƒƒ":âwj³hÔê3rã;bš¥Ë„ñWÙâø¤˜óý_~Ëx<\PéÏÕU‘þÇÙmæµý°?WYz*Aâ›»‘§Û«P€Ž{ó~©©Ð«f'Üù‡ÐChzêëÐ6Œêž’üû*èÛ^‘»×£D'˜éµçÞ{l>TÖÖ,`ÔË Yã*”y¥h£+zí ÁÐb7Á¾äjUµà 5èsÏ!Dƒ' jÈ9tWqËsfeQ©]¤õF1ÖAZ=¯ä¨äÜ/šö'zZíîMÑé&Œ—‡ê´…b²  žÙü~.„fjʃÛ^X1à‰ƒDŒ5kRTøÒ\:/wÅ­†Ð¶š«ÀÈ}qþ°]änÎøŠIÞà!s)ZÆ[4ªªhõ‰º°µRö¼à3kˆ?M„ÄÏÈä~ž¶@"Ø ØææR¥ò¶Â:Ó`<@1ébö¯#O›´ Yç¨Ï@ë+"¢[nF%N¥>@Ç5ýÎÐÊ({·w׈Gøè¸uÏ5 ? ºS€I5c‡úUX½æ; Êçá…¡¶±‚ÖtÍ€‘úeš=Ü@§û¾8ps'Ã#jñÁ6„‚K€x},Ú0_#‚f÷óãÿ‚)8Òf/1MüºXÉÆ&Ù¡€<¤¢„J»¥‘!suJIißý‰%ÿþ/¶—a•s_Q»Ò_J“ý Æ÷W Ñ[®Aw°Gy“¨«9¸-ôxÀwðéRg¬!¦é3µ× ypT¥b@’©Ï*ê/l!‚†œ1"w‡A¶ê¿ë&-úL`)gê/-Béç¹~¬|â`Å·§2`º<èÔV¢¼v–äÕMêü@Îœ“æWHg8ÑÕ |¹oP2=˜C`è÷¥w¿æ“È1 `Ui¯A4©Çu˹'ÿó~%’ÚêN pŠ ÇêvÂ&žÁ¤—¦î¤µá!J¸.ø¸ˆÑÏ£®üëUzÂ;#%þ¦A̼3þ§”§òúäHhIýjJõ>²gÐ">B dˆk«=¥‡jÙHrr%ºaœ-æ.±-Ì¡\Í‹áùàÍG´æ´BÙ·³o »ÔWtmj˜ËIqÁ9/J>‚Ä徃ÖÛ¢*¢Ô ZíŒQÿÖŸzßÿ%q±ÐŠû+ÎT8’pbBb›k‘WGJÛ¾1f$c’Qú¼Ú—CÎœ,ºÑãÚï/†üÔ ­˜B¿ ’ñ=„¶^z£_ŒHÍÿŽ^å[ÄâÏ ;unoŽÉïwëý›Fjû{‰þ|àï?‘©ôƒÒFe8Èt{‚Õç] 'if˜§™›BË•vÒ~]×{]å'8›U¯Á]¡Týñ#¸ì*f[Z³,~‚§X3BåAØc'jæ ,Œáý±ŒôPy{UüHˆÆÜx{¿c̱–ÉÚ†À%V¸>Ô³`݉…U\ýiX\µ"÷ƒ~¾NY<éȆú°[Õ8¸QyyÓwÜëGî÷íòÌqŸ± ~RúÏôZz§d·ßß¼qÎ8Nv¨>æ´8M¾Ð¿4 - wSIo×{ký}áÎe‹oäxüÛ*QMÞ×ÛÑÓYÙ’ûçMñà]sDKY™8C¶?ÏÞ=¼SnNÍ.¸ÜÜ/¤Sùáô/婃ûÏ//‡p²â…Kîçceþy)ƒ¨ØÑÔÿê–3äŽïf\w??÷·Ÿ)óÛ£ô¯:JOòWòö^»ZC†éœSòŠ‹aà7‘o‹ñ ,ÏPyÀaž Z?Elij•EAýµpdÒ­î+ 3¶41TXyûðKzëËs s;ß"Í Íã»9ˆâa µ'-Qß8R oã|€ªç²ññ„Þ®ìv¬ïˆ(®h°¯îÌÎu]âþÑÞµ_ÔР}÷·u?ª[ôߘHˆ£~õq7a-Ž˜ÿ(æùÏü ŠYJv@±QnÑ\é†"¤i˜$ÿ?xÀSvéÍ;4’&Ä“æ#4¢?1X‡¼C#šQ@›£”A®pOMs%±QÚ³c[C(™SöŽÌb£ÿüGK愃 n´ŸïÓHôãuÀañ~Ñ\k߃âÑ¡kØî²~1‚úúYϵ¯=å!Äž "ž°[ñí½Dá”8õFÚÐm£ÿwèH&"nÄW¤§{±:GZ&ï½ I_­eˆ‘;¦F¾«>|œ"ä¹õÜ­íçC¼?exì™._^Ê ¨‹ð?ˆA¿Ýr†Ð{ -¬fãŸþƒOU1hhé¥2¯“| -˜ú‰—Ó¶W‘dž&t¥g´§‰uý];ù:ô—È’—ÍFAý{ ëÑcÕ¤kÐæifO>¶éx“èîÂçôV‘JüØIÜcÏa´bàYû»@:72 -f;¦37Kë,S ¥o®àÇÝ&j–Ðà„È+?¿âpwÁ½Àãk¡°?ÒâÔVa3uàÀ×ù”@¿3žÛWÛ›pfÌN<»X™Ü¡ -i`¶UhÕ}xjõåv‚†ÏaõzÎUûÐSÆøZ—"¥<7BÙ«î Ð)6Ù•Åë–¹Diçl«@Ê­å ÿýî±IŸW›q<,¬NÚ/,T³·bé¡ñUEÕÉ;µŽˆŠ²¸l#îßU`{@ç-öëõƒYñÿDÆ4/Š$·áãÝ«¼”ÄŒêSX¾—ª¾B‡úp; -0î½Ñp%ÊÍC -•AC@9ºýÄÙäÑçcâ_¡²¯àmžÔ¥vàî×òƒÞ—[VѬÈeG$ç0BÝÌçá•Ýï"Ç|€ð¡ŽÒøïñ¥¼ô"ŠÁ v‡è”ÿŒåÏcGÂ{<ÞÙþ 71'f~gÀ7èIìíÝX¾6Vów©oߟïL=$ZÂ|ìoN#G.Rûqòü+“ܵx{Η¸ÂÅñd‹ÜTϲt•\v¨åö¬3]éƒÝlI ÓÒX *r¿7vûœ‹o…©ò;ÁËRêÙŸµ%3MRU¡çŸuùÄ%NÆ·¿ÑÖ[ì¯}Ó×j•Î6èy$ùŒPÂze%o9£—¨<±iÚ48¨:¹aÖMQ þ»ªëÂêû—ìÐg$´Ã«*“½—n¤†ãö~÷;'g¼•úŒñ£O‰Sö—«˜šqÖ¬!ÊE'k”NU½vM u›ëc׌͡›Ê«ï¿ÊLdîÙñ %¯lpzÎVvBVÏÏ1¸*®¶\jÖgGg†8¡çõÙj..Ú¨#l2íP’œ rôÄ+ýÝ&図æà?µ/øX‹ÉÙÈ'ÖF>R»×ar“]væ– d¸{DQk½ßùêôAùJî5Sæê¼:W ï¤hŠ{î¥qœj÷ųÝàíL¹ûz¢A—Ò¢@ƒfúD˜§ý¡ø¶£ð2:iCo’mÔÄþ÷ÓáMèmH[–ý™ÚH•¿P>ÅVÅâ*ù ʆ|*ï~ÿ—¿…ÌVßüë°‚dÒÃw7FŽð(hIíÊ-ýgC,“{]<è[†P!{âññ+Œ¨¨rHóê‰9Î(í^°<È—›JmKìç+‚€ œîH,?·ý‡P~™q0ƒ¯8úê‰ÌõDÆ% -hk>‘T†¸ö*k£(§gmM×xù´A¢ÿ‚„Úí ?¹—(ÛÍ/óÄâ Vvž53gˆ/ϳži‡M;a%êvbðÞ`"ÜjH¸C¤ä¢C~Fryþa#›pìYõ„ääïç¢.¢•‹›ø2lÏZõi|.®ânJ4ÒE {’ßieñAsñG©ì‘¸ØbM3‡¨AÉjæËy#/ 1ÚÛåÆÖ«ýmÁJí_AÓ‘òf RýˆÌþ]F§Õ Tubgrb9ö²†›s›XÓU˜*X^rƒ¼ßŽ§–c#è?‹>oÐgwPèsw·5´2)ÂøSë̓Á¥ZšëÚ‹}”E%«ˆì©š`ÛLÞ4[b-A ¿:c{·ì†G6Ç‹ñgÌM´ºùÂÜe²é©®¸,G¬&ÂöáÎ¥ 8'ë+‚(æ$g 冀:ïÕCÞ Cu'¶ñP2OJÙЮ¹Ú/H|šAZ›ä¿ˆ4Ïã&5þá -LÏzҨœ&ê³ðíqG‹Çæb…"ì½úT¢QGšlÅþh˜§m«I¢G¯Ýá÷ÙG]êŒóNé£Îy›±–ªÌÎüY#ÊÂ7™\síQÑ~"úânÜð>½îæn¿ Èá«Ñ“jHQƈ͊HÿP/BÂç4¡QÔqh"µÆt -Z|®,¢/{²6·?Ü*X0m‚ ¬÷÷kÊÊ/ðËÍ)dÔ[Þåð<%æÅú‹¾' åÔ -¡Â^B=Jû¬õ°“®AXís¡žUBÅ.»¼ä›®öû•SÃKžg+Kgä¨b,ÇÔIC†Ôw‹ª{ÝÍþ¬Æ|†$§xÀV©Bç¡”…íU¤ž®¤~`[Þ3t“öô¸3I•7½Ã­ö‹±ªW1µ¡¡[6 `L_ñ„¾®ëÕ¸¥³øÆ -ú•k“ˆÎ½Ï× ßÇbÓ&3×æ÷¨ÍfÎÜ9¢—nÈõ‹ÆÇIÜ̧Mˆ–ø¼ŠÀçÖHôTzDa«7¹ºPÁÇ«ä):ÞíýQ[Üè÷;.{§¸®igÎpoM1ñsó¤º×cïi¨Ï©x&€üöÿÏ2ÃŒ4 £@-0â-èáíG3µÂp¯÷Ž@Ÿõ“Ãùl†YyîbèS”ý³3h®ð­s}ú¢O>!|‘„…°ð–M4djîéå%¦Â6™ß±Í†–öÀ_̼<•À¤Â#t8¶g}ûu¸ð_Ä((¨4Z·â´'¸P:d¥ð¤Ñ{ÄÏQHYK¯pKeöYÏt‘ =…­ûûƒüûÿŸï–…RîÜ£ÁKcäŒ_AO^6/[gS=³­”M…½P‡Ï¹–IÝ}“|;+&a€kí_gæ¤2žͯR øœôeŒUÕÃÒìï©Þø+òÞ­¬N¨^Mï¤ë -72fg„Þ¨²«ÎÖᘓXôðWju¢aòõö³Õ™·—²îIe-omc‚”yÖx½ãH•ôˆ¼õœ€#$¡Æ¡6c>‰|ñ—ÏBƒÕ”TÚ5ï•Á›^]ŽûE¿µ²1 -$J¹jˆ5H¾Î#»úUõõ§…4J«2$èÖ|/6,$*ôhýƒ(ögH–;Ší|‡jq¹È6›Kuþñs·y",”•xQŽ8þû–QlÛÜ2$ŽŽ¥ñ9„å ¨¨Ðö,Åõ•ÝHÍŸ9HTÊž:Ë•Dž–o|îHýG;q/SûºCxÏÖ¡˜øôøÂËc.&ÅH¡;Kš)}+Ó¯9èRªñ+ÇP"ÐÇžËã~z]Š*ü<´Ã¯¬ƒ-@Þ32¤«„<ÖEü±ëRúw °Gn¹ýʶ=誴øÌÌäšøf뜻Tˆ*s&ê=?UòsÈÛž¡W=ª’ßÁC$o]ß\%iP¯Q -½sËR:Q¡Ô}ˆ'zÔûŸ•¤ÿû¸"и‰?ë#Ky¶@øæR墀“ÇÀ½Ž»• ¸Ü5¤¯ÎOÊ`ûß !ý¬•Y÷D]¿í1\¼¥[CÙu… ÐþZnMŽÐ‚£F#˜‚ÍÝP|µ3‘†4ò°TÒ'>ÎJzˆ¢J+Cð‚¢šÒZ'óM`ðº¼jÌÌ_ÜœŠà“w_”^¼ø‡-ôó”ñ`™ŸW1êäÐw£N”úaŠ³>3_™Èäx­Jø{DGP%´>3ôvÚªVÅÒH‚jjvž5„>†áùäW,.]Îý#O#Ù¶°Ì2†eð-Zj^õí'êq1ÕÈIÈMøµÚx{‹Ÿí EE÷)ë¬ Ñ¥ž‰®Á<Ó"šAa/ÄÜózý×?¢A#£çb;d:W'lcJýð©¶G{šruÉ[[Ïýo§Í[¹åRÑ{"¬—=ž—.€›^oþò/5NŸ«ü‘>5MÏXF«ÓôÞÎ÷‚Ž|åìe¯jZ(=EÝ& æ@ÄœÞpàxzÂj!ü¶ôKÂã±xâ]œ®]uݵ­‚ò"SNNIŽ/mPLEé˜ÜwÊX1/Ò»æ -ód2ÐŒ©õ&ìSÎáêH’À¥°-CzB¤å¦)xÆí€} š3wf3•ÙSš;-1—S×°Ž ɾ*Šy«äTèn]í©‡²~Ș§y]¨É“æ-_‰Ù½YtWŸüL˜ÅGÊþgÖ[—«QMêE†¹Ç…“§Ã¹%¶bëY+2fKâ¸ÙdwV_îì%æ/Þu¥'’£JºR>ß{‹>,œEŽ“o§ÄÚx5ÒÌÚx2ÅwáIZ¦ÔF É_¢-=ïåF†J†ÆHé·HH_æS8¸ýY³úÂv!KU1ñvJCï·#@1°Ç]3sïˆëÌøt`Yϼuð€B®”V[3»ÂC‹ù±ÝîøW¶^[‡-J†+T¨©B¿øŠŒÞó.&íq¶Gk5ø â ¼Þ3[ðÚö4b2[Bž¤(~G‹üõ.»èˆî:‘Sôî¯ÔÉίw­w#³ùÛÇÑ )B§åË7ý Nô§-FÞ}ÕKv{}ÒÇÆufæŸW‰÷ì‰ÍíUQÙ‚‚Ÿ- ö”´É®Šm$/=VM"OWUàZùÔDS -±#Žm\¡×uÞŠmjêŽ÷þ´_8¬Ü ‘ž_3[‰¿!œº¢gÌ£õçwŠxh5:¬ì^{eƒø â@º˜-FÓ„SóîŠü°jæ†DðfÔª#Ž½°È«ª¢ëzndHl B>«¿Á™‚Ô”óLmûR ã0 -µ>jC‰ûnnžÂíàk*š]0ˆ¦’"Ïg`®ÍŒŽ®X­€8RÄÈgÐ ¬Ø‘®•!íÊáç™öB£EF³îú½ïê×IÏ;5²3²V[Du’æ}DxD)Þ2Vq TWKW˜P.@xú..O¤ùmN×bk„X¨@šgP sm¿©§çö[Cä|—ý¾Å`¤áÉBuo¥±¨ŸÖÙ Ï°•¡Ê«˜Ôïw Ï° -î±¢­cž³îmj¹Kvæ–•ÎáGsÿ¡l½—6‹Î½fÚC·ÕqæM­Óµ´ÿ£“vr ª:Š{ãW€”Þød m 5nÏ÷–ÖóúŽ+j6¶õ;'µ”+€ü‚#Ü¢RR0[šZÉrË­`zÒ4;ˆÞ­K¡w)ózâWßü>žw7iOEžþ‰†aQ*{Hô÷;vi„PŸM -Gc£/‚|VtøNî:Þ¶EÙÅG¯ÝЋMxÇÄ#¿CQ6þ £Ê³{# -û­·BK‘ ¥®r.uFPYaT—'ÜHŽ;öÄÕ¾P/ÔÜ -ï!ÄÛ>ÏFªa?ÅFŒB,±duy~öäþŸÕrÖnR7\Ì::¢E -q²_[•÷~k'õ‡Ï¦0?x•vÂÇ€UU~ƒ("óUÌä7¬ðw ±¡ ]Ò­}lg‹®á]#6a{Ôï÷û½=E [ tävânËÎ)Þ‰µ¿—J`‹rÌ"{m57àlu%TkŸ á}1ybÿr¾×¢òŠðq'b q—D‡{ú^™Èž.`|†Ð%šÿâC ¡p;0ñå®!d‚‘W5B¥Ò'@!‰ }Ô³4?¡2^³I‡ýLº´å ÓÁ⇠têÏÏ&î’õV&Le~Éz‹á‰Í`ô× ;í± ìp(Ril~•MÆ÷O¹þ­¼ö/D½2§•ê½t{§gôÍŽ@xÏ#‚\™{œ¤!¬ÛŠ -·Mk6A`{QõûÓ¹®¤®÷å‚òÍí”d¶‚ͺÇ8VFÈS¤ xÔx<b9éo@­Iü¤™ßº”‚.–ò^EŽ™ËQî˜Û“!ÄŽæwO9b ÑmP¼îG®„3F‘y+8~Ƽ™ÑŽ`|—O¬ªW¤¶Ç%|OçU†ÂH<‹õDì]Xk[Ï/üƒ7ü?—Y¬èô&5}! -)©¸U©Ã­HIÅ*rhF™9å¬?÷¢/Œ~¯^}1fí+ß4qêèõA„é,Ô úJ̯¯cEmæSt* rÿÚäG|Þ]âТU”Ž«:Swªx7<Þp–`xxX%)ž8-Qœ ø‘ƒ_ÁÅdEGñ÷H3¹ú3)üÀ0Ý)ÑÌ!#íu«N…²'ä &-k“»'Bë"gÊ–£n˜»@iBSüøNì3Q˜ó"j»¦mLEæÞ µžû‚£a{/Ð.ÃØPÎyP³? Ú {´çõƒé°D<›] –r©}œ^‹c¥e–aÐGI ís•EÂÝBø£…ødJ>ˆç'bâµÿÊø5¢ -œS÷Ξ2³@ý2]¦¶và秪«ÝØXgsÓªGjíªw³‘¡ÌlN¾ª$ÜC90Y6ù´Þ’Mk£;%öaî&•ñïU :² :Ñ7ï(ê3vzâÊÃÃ$\§èˆ?{dà[T䎧r‰ÇÞ~¯è£ŠYl" 7éj†z§¡½ñÃsÌ›õ¶ÑéØAY@ŽwQ€ƒ¤jÝUµQþAÅ6ö›—¼Š6$ΓíºÅ ÷ºâsŸ³„T¸ÀüÐBO¬…‘ϪÙÙ£D¯ ôEnÃúÛ±ž1Ÿ¼1~XµÑOÞí$û‘ù¼'5B4Á5‘¶¬Jo)^tKlFÄ«¾¡ zQ>\#H<€Q‹´òm ċ⇟YÏqŸ=0oér:'ׯM¢9}jP çgdÚ…óCW@·êw‰#ŸÀ740]I|»Å-|[?òèÞ÷åë rmP5¥áÒĵ0Êœ-Ð’û|æÏ2ûËA…ÞxçÕ«ñ¯P ÎéûçÊ­"q/ȎŽ½'\¡¾'RZ‘†QvúÎRüñ:«W“k"AžÑË&·¢ ÇUíë?èÍÙÉΤušZ‹l²G3º#;8Œ²nGè;5Ò|̓í{‹¨}æÖú*ÌÙÊÔOÙb°¸Çú™ùÃŽïbçg÷o$n"PA:Oˆô>C\É¢†€“Xn{_#”ëLëf«—|Ìþ ?¡Œ{§PtV#=¿w³G¨®~6j×+3'»`Žô›Â‘ò÷Æí]øE*á|Œ÷ Çë$õJ¸UC¨Kl÷<Ã3%Ò…UU¥€*¿3…›·/Þc_©©V~œÚ+˜’DEyqDq³~T¬€(gõ¦æÝϾ§eùVÈ•f¶ÐÒV…y’ã]¾!ùaÔ5êkâølzÐæ®ú-$èØ~U½›‡Ý5œ†ŸWÄ棦#ÎÉHu•n­‚Ä¿š|òú -h"˜½’­oqTÙÁ?è4»Ûc -ef£p1G2Ô¾)1É;ZÐ]#2”É|¥¿Ê””Í0º™sÒÒêK»§]FvÄ‚«N& „ÑQægœä3ãºV«×âOt{Ö½Ìapuê­w¡Ya fA j#}&ˆ¦w«òÊɦyÖ ]Q -±‘Ñ>«ºÑ‰Ó÷è#{7Wv‚Üßë¯ï]@žgàq„¨ÜW1ƒ®× &gpUL¢K9m¤)Kçs‹nšÁº—Añ~ˆ~õ—O2#«wkJFÇ]Î@/xË“ÆO[`¸¨÷C±Ò¤ÊpÄ¡ŒPHiѾŽÑòüΩxéøþÁjg®òÔÎy¤.€ÒÖ{[#VÔ}½û!M|"&8£‹Ôõ¥µÙ'´›™…8»Wð]ŸY&t¬KɹïÜ-ö‘€ Ó¥Š°…úÕé÷žña¤x»¾á1©l i[°Ó¶ÕöúÁ |ƒa¨ž¹§ Òó©vyA’¶XÒ-¢ˆ##êG`=øEó´'ž4¾î|"Nü9f~è†NñØg¡j+]‘Šõ¡çýAÅÌç.ÖÈ˦;¡®D[TÝ/BœQ56„"3Ro·WÄ]Y¹©)&4Ö¢i9cã¨6t?WütVÂ;2$ˆ3F%o -/íÌk{ÛÞemXÂw9IðÌï48iaηl…ܾi—¢— ))LC:Âq„-ÃÆV~W4Í;OÅZ,¹S¨¡Þ2d4D9¤å«•ºóî©Î+ËàÞíÅóÚÏüVJ®×i¿T(ÅóUÌh ‡9Ó·…öÏXcNÏ@Ù°±SWãí[±t4 ÏàFwøÙq}ж&µ $†x‡j5Øõ¤^}G ¦…¶€C¶eM q<+»RD†¢½eV àÁËâŽA†<©!Y„6Žô -À¸l¹ÿ¸£ÅT.§¼PI~Îl«oÕÙ¬×ü™=*•“ø¹g5ðºƒHo߯š¡lÍTÒ•Xœ¨´|§Býšƒ¶hÏ0cœÍ:/ÅMˆ4U>AcüÉ VçžX¥¯Òè±…=]‰µ› ô -¤­±V¡@€€s?5x°Ë…Yj€ »RªšA(dŠíKˆUgîó›’áÍÎêW¬Kl…óÑ] -¨%Û ‡¯¤m‡aì©¡®/b‰Ç.w“ {©tBñ?Ñ*©ûê˜ÄU.Óï¤\JÞ{~š3Ñ— «‹ýM|@}LpFÖ€æ¸OU@&Õj€Ÿ”ÇRzÙèÖÇÄl —gBmÓÄEÐçPT47ß°×Àg¦K,µ¬egR0À:¢æ,ö†ã.oÉD¹Ü E ñY¶ÊZάKö®A,̘VÌß±ÛÙþÎQ‹A„ÒºÕT1$òøj2±[3¤±}¥v”Ì98ßià¨yœ'œULí^æ 9ŸN pß×ÚT°F Ò>z©Sø4š‘u©x‘Ñlñ\…²D;°ÜQÈùKfUJŽfÎcwÃ7jO³~GáÒŠÆñúÁYõ˜ (LÌ î´Ü·PÍÔÕ¨w$Êt‘Ç(œ±V¬4úºÛ¿ê¼ÿ‰±ü¢ñþƒMÆ -˜zRªÜ2_™sOÝË„Š¯@)'áY×+³-E· oܯ0FÒæ½Yïö¥rÏ0SìÀ1¾¤V~„î:RiÎo~'SÅ&rl£6{¼ï¹˜xÂçWìs_*{廨èªS.$Ñoõ=ë€og¼îÀó_®[ãÀb¿ÏW¾ã1wu²‰ÊÄöS ø’*)¿ã;]ùÉá2ðuã¸U—J㸧@²E&f:§LÒÔ²¥%Ȫe -¶¦¥‰á¬„`tªlO]Êç 抔HîV½Wf!ä±{ª™`0±¥ÒÞK#æ€JÃí&¨{dÊ#€Ñ :C6 ÁŽ&ÃPšÓ=Q¾­xè6§hº]/e75SŽ¹ç“Èfàß¡<˜ÏbQp×Q?ZM qÚ:_‰=$;:•òÁZ‘: ÇŠ,’!W«—¥"VCÎø]I#)SªC 5Èô¾~egЊޞí€)!MQÐKxx!•r/#ægÂŽœköxà˜¡®2Óˆ#óXjvîËŸ1Šíêï·4£É¯ûõR³æwNÚºc¸`+b-^sçèlsjo¥!³K磦H&ƒíiãî±·TüØØU¨–¦QüÄ®á^Ð6]„…XÅ¿c¢Ãé¹2UöÛ%ÅV¸*8÷šÍiÿWôðóïMÙö%bó_o)ŒŒ"p7÷ä¸ÑŒ(5n×£¼¬×õ›Cõ:I8›–céìœzR‡ŒþŽ›UÀNûÙÒÐÑIÝ —]q3² Ê-æÚ™ˆZ òož¹3Q:ñ»¸H Tµ%³ç_Ðs!X)ÍË'ð ôsPáëÞ³F2êó÷ -ô7•2¦(ˆ†ÜÐ#Z*­¯!ÂBøw¶® 4‚˜x’#RÏÔ£ßÍ5ÛÌÛâ¼í÷{ÂËëÓÓ^ÎõÄ}-âßd8G`Ñ›³´âøÌÇ ¥ÆúRãÐü|Y pÜO2æJ_zðêØí-™É÷sâLð­£MŠu0Aô¥J˜OQU¨\NÀp¥ Eøa»*ü&Eü‚ûS§ôéå_4Š<¬N¹ÒÔF\@x±'¡c¹¢9»G´†t©åAc¦¿Išƒ›ÓOMÒrS B\§‹;ÞßFˆŒX¹:‘CC’fZã«Uz9¢ŠDø.±-"zîs^sŒhá7=Ð9[)UïUå½³µœcE˜= ñ¦“å„h}Ñ=ëÑ‘L{¸@z†l=ΖN‰¹¶zÒzê±ñ÷úËgO(ûâI~¥žz‹ŒMäî\‡j–‚R×¢èzë+Þ?+Ôd"}ÙOÎY úÐé”ÿ¶Q3…ȇŽ¦Ë–&çà&ðî½DVþlVþ9s’Œü©”àlSÆ-}b7D动·VŽÆy¶Á¹_?»ª×c sA’ËdC#K¸“–â±E§¹†ìüè, Õ ‘Í^…òV÷êAhè+î+°¢‘Áœê.u 'RiZ-ZˆFD Zzþ¥ïÕ³ýÞÔÍ^éœ2†9"Eó¬!O þHB”K¥ÔT/µ_Rb5ÜI8¹”‚þ„½nY±ƒèm½ÄÓ¿yѨÿvr׊˅ŸVÌ/×ì Ðh¢¿UÃ"9b×­mnDŒpب‚PV,wLÖ¹ûÇŠ¥VB$ÏÔ¼éümí\K`†…Óâ|rj;Ö›:ÍÚ¼‰jkÌÜáG½‹wÁòh†|ïðïçò¬Ü?J5§Ä˜xz«ûOìÜÔQ”röQå†?3 ,&©Ÿ:Áñ@§\ÛúX@À9Iž@b“kHf[¬ÃÎõ;èý¬e{)³Ë û³øÞ;ˆÍ<š~´?€ï«dŸAg€/r24°›PNN'Øp%¹i"šJçöÃ6¯h)–dO`îöYÝ&ïuO›®ëP¼JI¨ÇF/ì‹ä”j»ú<ß²FH‚È颓ªT Q½c¼=kè6£…f«kRJ…\©B áu½ã…¸\Þ}³ARxÈÊR‘š^ô·¶¸g‡1‰ç}qÓMˆópt{3¬È—ò—•uÌüg<¥„7OmðÈ;Ú9šLwiô/5< ÄÁpÐîCÎñ™3Ìï ¾‘°Õ ' ¾…d¥y$`hG§ Iµ/6ljNtÇ ØyHiýWÅÈ 5Ü}Þo)&è—[˜,HÌäþÐNúâYJçëåwœ÷UÉ^oð‰Sèâ³WYôNÜsmKóTåGF}^?˜ÿOœ:w´ÿǶë~Ï:¼œýNj¶‚ïàHN¦uKx -é& -ÁÓ «…Ëç*ÂaÞ+<è‘aÏjž3¶@TjV!ÿn¸kãˆÒÞ&OâKãÊ䡺…07göàq¾Õ´æqÆód¤™Gþ|R÷óXš¥E²„›è4’&6.Þs%n£ÃUÜî€K•ç\nȆ¥O]VÛlÉŠqÅ2}ÝpÀ2RZ9ÿL=„<0wñz²$¹éÝ¡l?¹T¸àÀd@°ùÞͳE—ª½~ð©Ö„÷†QÉý½!ÓYIWdß(VGŸá9E¥«èþ4¥àHÃ}2OÞÿŽ´¾3¥þÐîÈ”":ŠJ¸ï¡w†M¢ 0óˆ›ÿH¦Ô -ÙÀº•¤„É°kFíi£Â‹GçeN—@aßAðµd›…;ã×¹—Ò¦Dç$lpE«£Eïòˆ€yÆj~ÒRê P-wŒÕ×é9— -nGb½A+QÏ^?x¨õòné?Í£ðIíÒ„VˆKæçX™ï :jŽÔ;Ìç€t!œÉè"I|’Ié¿%Ó_Œ™/ß ¹GÈùŠºFaúMeÖ¹_Îw_„\OÖ¿ÆÌà] U7ˆx‰ãšY1E‰„÷Fhlc¹¾W:EUI¨ ¡½aDžGŒÞLP„švÔújSh(Œà—åòDl~¦ö»8 ,œÈç*¢µŒ€ˆT¸ãÕ|D–]s0·¨+åÌ°ºyš GÎS­ƒÍ*kÎFˆ4ï¨Õ;>ÈðØð’Jã8¸÷¢Mbž¼©(õÍЈ§œ’´pg]j“‰9Ì9bG’˜6äœc¼Õê–ÚQ‰Æ™K“švÈŒVŽBq{yñw.î&hŠ…w¯AìYlžÓª÷/#ÀoÈ©¦Ñ"IÖ^ßωwE0Jã€EDz#m”˜ÓóRøºaÅž÷‡Œ?íåÁ‹†Ìû'†áøDÑ8þ ÷šºßç9w±U[I٤׊ißÍumœÖºÀ{¯ÉïZر5y[”§Ž8®É ’s>…jˆ:ð‘™IBè®Kh7²¸ÍÔÔÃ<¢¥ßöâ»SÚóˆ×R(!L«¶¸è1øÀ»8%ìGƒìú1#yŽE³Ñöáe©›ß™›ÏF~°Ð>â“7Èu{?[(ùÐs4ÉE*ÇÁ£õ@ -/ÊÆŸÈEŸÿRØj”?Žím,¹…tX“ÄÆÅ-œtà( -}dáW‡µÈ÷z]4X sž¢²>…ö4º¡ú1j`èÏÔ¶î´`#ð€Q* lÂÜžÊqŠ$ -÷"ÂíÜ>LN6)ç0ò/Þ87 -Š8çÁpó½ÝÉ”kNÕý‚Û$pœëRs}(èVcˆû#\_â€swøu§jž(…ã9äŽ|‘¼—Ö/{Cͧ£:ê±'œKYÝ<'çœ\±Û€Çª÷—uÆÑK÷T˜8ºýçu®ß¡"‰5DïõƒõÑ= Nk†Á|çMTÖyï½™;4ª„A÷†­ùh—•ºÛ'e½ÏònŸ•Š~4®ŒçÉle5õu±\ è»¤BŠm„Æ~øê…(á#*ãé “Áž¿ÓK Á6ÄÞ Ù“yÞ%$wÍøhîýú‰÷Up´ô{oñ;C†ªh÷Ôï€Û ˜<ÚÏLL"JWÚ»c!íœÌ¡^E8ä†Ð‰6GPt™¤¨WI5^¥ËÓ£Ò”J·õBƒƒ-~ô†gðK+¯]ï¯@j¾@¢çϧ‚&ezÍ2ÇÚ³¨0ѾúË×ÄŽoÿ2çÏÌ]笒m¨Ø9 o™4ò“H²šn„ @>éØxÝ NV˜¯à0_,§ú|Ž@S–ãÆ‚ê™CÎÀäÂO"¦†åI¿õ¼ß6ð$78\ÆgfÆŽ@çšÓ<\gLñ+¢¬¥gèÂRµÅÀq42UÕ#R¿18QS%š§E9ZŸˆF÷‰/ùôÀ -ÌÍiž;~¦#Õ=ÂïAýv -²“T2£ø±§Ì‚Ñ!ûLªf:·7äœ4%eþ^G ¨AZ]Tr˜¯ÿšƒ†çÿcîN²lDZõ@@s° „/²­9ä^W>ÿnâû7ÎU¸_™eGù–V*渇Š]üET{ ò]èS³ú8YÅ¿8¶¦d‡¾1$Z¶Pë~lj1kçÕ€5á»Nú¨Wd¢=¹•ÎåÌ>i—©Ýr‡wRÀ“àð—ž3}¦çH2–G×^£Ð]%¤3D‡5‚­  ;-¥^çÐ 2j]›Û°PN—w»¡3öA:'tj@èšMó3¦ô‰!H£%7„ßÉãÞ“æÆ<3Þë·÷‘b¯y¯xœŸCÈá«A¯øÔë?š¡0aŒä/M!a“ÆË’xÔ†‹Æ"¶*×z-M™GÏ{A€[š³‡ÕÇõ°ñBèðbð½›° -l2Æö+§>¸šÝgu¡hŒ=Õ;\Ovêá‹ÚL‡–×$Ä™Xü%„a N$eÜÍÊ´Iù{¤Idm-©7^g^žDðwÉ—®˜A¼1–óÚc.¬[’FkÇ^ó3pýN¤;l±Í`ÆÒýŸ‘+d:(€ìWåi¦­êÿ{öŠêíÍ3Á07ŒRYI=x\Wä©âk?zP».• -¡s»r);$§ÙÙr7ˆ¡¡º‘ÏÙ8 +…3c^—a‘d2éyj~B朊(cî&;×doø–órZ3ö|Þz& œiNYªYä6Z½=ö‘Ë¡$¸Ø¼¡ÃÏÇŒ%/ ¦Ö—ÕÍe «9ð¦L -ä(k•5ÑBÃ"þÀ³¦V Ž ½r©µÛZà2Hb¬ÎCž< ‰¡u‚ürlÊT§6FûŠí²–òˆñÝkÁÄn§©_ô9©CTóÔdeÞ#n|ëYW̽m¡Ž²^>æ!Jµ²sp¬IXH\~Ö¥h !Te8´.ÅÈæçPÆ;³±ñ2ä÷õ@9¬·sÉ£,O¬¼×¶¦–ŸZ/ÊQg ̵3gTˆÚôC°{ægÅTSV |(ÆNÚ7^b““|ÂIÜÙNÂЪ8éÃw Ñu -õ8Ö¬†DØ,žV:œ.¥ýËZþMîÜêd”†Sk]¿³6‚èØ:§Ü©°Þ ÁR!—síãn­úæ…LΕ+h¦¨KÕÑ t¼v -¬Ö|r¦¯zÌØœßNHYaA××"…]Þ!DxÓåÕºRï_)â‰+N“dæ/+Ö‘•ÙÚ$@¾ü©g@wò®ë7Môïÿ"¢ï&yVS¶à+%Àw¤‚Ñ ·@f&G÷Dò\ød&J¤Ç ̓0™F:oÒàP'G„ù¹v¢õ»vÖ®s«¤¹òÜ;¢2{ój…pX‹ëwh˜@Þ´74œ\Š ô™Fó -ÐWœ!2l8ê†@Õ+ý!â¹—A1àcf7r©$´ì}EöÂGÈ-‰òõÔ»K®F 1*0qNÜvs‘T(:>öŒZÉÜ ¯—‡ÂáãìôxÔÊ8é Ý1ázb,|b†ûµž‹³DüÊÃ1¤K¯«ó*6êoœâÛU»'A€+–16µ -|ÄN„#Ä:C”h;Ȳږšcïµô9÷¥Öj#|˜ÚvE,Oê/ªÙãô ‚ÙoE®Xë®cÈLµw~jÎA×LÎSC&;$ ë§¼bg¥ŽoØ€^_ü 4Y}k¥Wh]£uš+ǵú1 ÕGE”#¼Ð™xÂr<Üèõ>ôÀ.-0}Þþõã ïG 6¡^mÉàÄA÷úúÉlÝ­;"”ïž*‘÷Õ*Ácz¨ìøÔRrRxJËæÓTÀ|4ÏTir¥µX; ÙGÑkŠ]—/d¬é~#RÒêcÇ2`Ù=úã®AÛx¨Ÿ;¡Ê#}‘ÝÜ=:%r\œÞâ.òdÚ´„yVœ²ÿÍz‘œ¬+9=Bªd碥6ó3ÑÔnßÇŽ¯;å,¾sýë“bÏÜW-Tì_Ô XawXoc[µ+=­F¼n­¼¢¡üg*©kÛÍów¥·ïÿbò~»–ÿLúÁ÷lP½í÷UcÎ7–,ø¹3‰•‰î©S]ClÔÀCnÚ…ùªÄgÈ.$¢RÞ½ü‰¼žXß“nÀ>Úi×O é`ÕKª®Ÿ‰ÖèL¼W™$i¾Ý)†gVv¦ÜÈ×Q[!e±QêßSk“þ>K¦¤‰ 2Ž¥;μ5›$’bOžû Ù@YP¿.C`¬×Ž¸R».ñè#fçµÑ±‹N>ÌȤW$N7< ÅÚ½q]™©Ý<³1kÎëÄC&CBtœ:Y­’ß7Ò7D‹ï³>§¸¬åk‘Ñ21_™!|NLêë`k‰–¬ˆ NCm^û´‰v〨 >¶`=!uÏß4šñ«ÃÛ­´tÈ?$ÝGMŠ;ˆÉ€óGGI¨6D2/gëôH‰È´âÙñ¶$žþÒ´uŹ+B˜Ñ -ˆ¿7¨D–üÄo=µïÿ’-þÛ7ògÞˆb#%¯ã=÷þý-|“‹ÔþC çŽ þçåë=µìì»7ø?h1CÒìbq)j´áwbšÞÄ|kFõ+”%¶k£Þ¬"ɺ÷3à$f×°_K6vÙ£D'OŒ·ïÚ0_b©¬ÍÞ@åêàºôI[,µr©ƒ2Z:|›HúÀ±Cõ9ú¾eÿHü¹ñ9M4ôAl&B[<¼ti‘K‚Fšü;:o¼¨ UñA‚ÿ¹c„æ&*”üýdî¦ ‚ùëB¿ö‰Â!Ê ­PÛHünÑî#ÿ€„˜>Èoø²ïÿ’}VÜì˜äë|í\ÒÔÐø©"äºõ+¡.sÛcZØ@Z -½žFùŠþ é!¢«A×Î^év¿U¹ ~óÏÂMb£lî|·ÃÎõ¤= ýõªõÒ ~ÏJl–ñ‹C¶ÌÄxÀÔ•¬Ÿûò0üDÈÙŸ”B ?yt¢ºa„4é(pfÑ”Têû|걕ˆùiÀEdö ­8G®JúÁ %¨W -žŸŠ’Û¥dóFmQñ -ö¸ `ãÜ ù^ÖAâìUšx{Â]P{ðú±K`%Ac#³:Ž¶µÂG«0$Y¿À œÿ‚Ñh£f_Îòk’»šÉª¿;@6Á£ÆÊ,#‡Úi±ßGìÑd@~½¥YôŠˆíêg¹ü¨Úô—½õ夜9ü("NW]®¢[ -CRÀó™{PÇ z©P$ö¬`Ö|õðñ}æµtÝC½Ö|æõ7@²6>Ÿù €{LØTûÖFÛaýÔûè†ÎˆBôQWâ˜r“N„0½é×»‡{jÈšÈqÂÅ®Bþ­:uäH>Þ„bÌ´X §P§éæØ á=Ũ˔£e¡>Ãf‡.늣ŠZ% &v¾©PΠ=¤¤TokÐ ‰›„3Fo)c>q3?Ó}Nm]oUÜ£× œñb—Z#¢T£úEÜ)iÙë€kiº×GÑÚ?]>Q½ðv}‹´<ëvÞò=Vg«("–kAä¶ýä1¥o8j8|iÜWý‚(xp|ªž½Ͷ#@o½ã°ea™Aä+ Ó†kðk­åŒçÿž›ùvŠî ؊•G£F=šñ_±À³…¬S¢v).uz¯@5Æ ùºmo÷Hr6`Î?Ͼ‹¥~ŽÝ»ã“Òlåm>5è -ÌLe!š½iÍ9ºáTzÝÉnÍU¨Y…³[¹w·kZÛ~5*aƒt°º³FÔ&m¹ÿúÉgØnåzBIPÞôSk€Î[ž8T\½¾#«·¨i¢ø}„+Ðß¿ëB|ÿ—Ïlù??ËŸ‰9hÒÆŸ1…ò R ÄÖA—÷ÊÒî©À]CÔ­9£¼û#þŸ¿ÏÿÌGTöw«%o×¥vØÇ7ûÿ‘}@´§~‡^«ã3þ:É2ésB\ŽºO”5ÓtÆ­–]ªÎg4ίHNÄvFÿ+Æ©võ‹¥uL¯ÞºEü˜´Þ;t‘²Ðì¼ìuzPšAç„´#G²]ÿê‰&ý¿ÊÝŠóˆªlí¾5F$èo4Ÿëwˆòéb›^E¸7FŒü_¥×*²Qíd?\Ѓóܾã8"ßT__έ•õ®ä1üàÀþ‹$8ÖxÒ­Ø07ÆåU'é_QÖŽ=GWÕ>}N‚~ªbáYS>âb~Øûz&…Æ.6ýˆ =ås×;^ëÏU³EI  ø©_i¦™Úõ¿4è®ô -ï²Öé§Ç…¼-†ëPý;2o9àtïȬŸ,ÛêúLâEíã Èñ -þbi+v/ -šÿ0±nˆkýWÀ¸š¸¬DXÞ)ío‚³ßÿe/Éo^I-I@ ç;J#qÒÚf窅[‘0¦)˜m,ªŒÄÑűwÛß™\1yæÙ}gÅ=ð¬!ªoˆÞÑö+ ·4ú{@-HŸuXbµ¿õb­()ÅÍ*5`ÑŒú5a©"\ÊL\\ ÍCÞ¾‡>•¾Çã†[¦œ‡›Y§4nñ®êب…ó÷¾]YSójç'1$ØGæ³Ñ#>ôz&¦Ìd±GØ¡,µ=¿LK¶L¯ÂÇ8‚ SåRW‡EvFϨî†n—”NÓ߯pD ¡^ÞÁXKÉ$QÎOIÎ’“Ú ¦XÙòW ˆ¢Ýªþþv =,›Šªô›xË÷A'`t/M&ãR{àE=ÿ#óOX#w KWÔbDâkÐVþ¥ŽÖ "[z½üß’>¤¢îú,k¶PóuB[¯›÷z ¶: AÞ¿¡‹ëØ7í’Á.j Ž´÷VÊy?âYGîLç{%ý¼†p]GT¡ÎÀâ¯D¡âT\‰vV!@ƒ{c#»¹p´€m`k·NÀÒkHÄôeüŒ›¹Ðs!CÛ»Gh7`Û{É -ø·Ççw0·±‘«Œ3È7¸4ÞV\›­’†ÊI[žÎ¾Òg6ѵc­{ÇÑÃÎh7}³]ÆKà·?H£$˜ZPg±“ýùþ/›šì0˜ÈØoC1ÃîJ7Ì”‚ó¸q߃h樛ŸŒ|Š/ÒÁµà¯Ï·Ž1¿Ú¿¾p=¡´ýㆹ˔ %rVçûÅ„u­Ê¯ÆD(U=εœIWBWO”Þš¬Ì Bõȼ¥ñxGï [áRM0 -”•q!…°ÂØz¤geÚÜó¥´Ud§ô˜ÛyT!é­%¬¾Øó‘Üç{ƒ)S²™‚~fÖÏþ•§BKh¬<¹þ¯Ä“L¦ ªÌŠX»x¶š¹zhŠfZ|ëå4i®°\ —ÿ9ÿ[$äÞì¨×Fî¡¿OÝ –:éÏDƒÅ;hÕ|‡ÔDº¼‚îš øÌM{"x°èù¢çû>÷a$\‘üÜ@ÌGD{`¬'uõ;ÑD î  pG „¯´¢™ñGlRé îRöÆ‹Øè¬!Þs“vîºâZ2<ÖbUàBjšü˜F”ô«8s@ö@`‰?Ý0ÅþNœ7pclé*g]èÙõÂ8ïhµõjdßñé臦l®© `` ¿5fQÖß -^ÿˆ"#/DÕôÃMbY.;Eì'ºÑ¥¾« wÔêzÔÓ”Ù{È.FíóLó¥½öÿs­<µTÃ>=Ù”­:ù`€/T¬>øf7U©]RÆzf˜ ¹’…ÞÒΆ5Ú0Âõ4Æy7Zú¥ÚwwC‰„l²OžIç$¤æÇõ¨ññD„h]=ÔJ 8îA&Qâ’혟‡—´ÉY}‰Ôôf’êï§ùÇ6‹`&cË,¾+ø¤Ž€÷ôýºíÞÙ -E¾"ÄÈÒŠŽ²yí¿©þ|ÿ—Ä`¿\HJÈô»×NÓzqOH×IB³¡Pû - ¨;¦ëkËhwäL"§^žÚìºÚ1c0DóŠð(¶A ‰túÓ® -årpŽôhnnI@ endstream endobj 37 0 obj <>stream -%AI12_CompressedDataxœì½ér%Ç•&øñw~”™Ôݺß#4mc†åBÍjI¤‘TUõÈÊÒ ÌK¥L ;©XO?ßwŽ{,îq0¹eOndá¾?ëçÿð}öÅoÎ_Üÿåø·ïwÝ?üÃå›ãÍ»û7¿ÝÉÓÝ'/_¾ûî ýêó_ïLÚ÷(tþÉð,ü§ã›··÷w¿ÝY³ï%óšoÿêïîŽï¾Ù}~¼}÷¿Þýê×ÈùòöÝË#òÞÿ÷ûãÛwo÷7·¿.ŸDW7ïkÌ™ug&îÒoCØ}ö¸¹ûÛÍÛ··ÿÁìè‡g÷ïï^ÜÞ}}qÿïøöî7Æ¿3½Å¯ÙÿãöóãÛu™ýhMçl)…ù³÷ƺa 2ñú>x?¸ÑŒÖž•]Ý?ÿêx÷î³7÷Ïoß^Þ¿¼óö·»Ëooîv¸ù97»ÿu|ùòþﻋ—7ÏÿºxåúþîŠ~úú(cø‡û»û/¾xõ›‹ûû¿~òîæåís”ý¯»Ïn^bîîñüå‹ÅÛ<__|P矄g×·/˜W7ïvÆqnÎ?1öÙÅûÛ—/þøþÕ_Ž˜'k»gÒ©?½EoÐ1þÎÇéÙ'¯ðä‹ã;Ôü5ÚÀùýüwËÀCI¿úóçǯoe­`.ÿõ×¹Ú7÷¯_ݼù+ßňۡçˆû]¯Ù__½F³2CÁØ=¦%úÿ,ÿÈeÑI¼ÿ›qŒ»¡G6ZÍŸgéø·Ûãß»ûãýÝQâüÍ»/tõxwäÿšóùû—Ç7º»åG>u$þpÿâøå§÷¯_ÞÈH2óÿµÀ—7o¾>¾Ã’»ùþl‡¡|#ýû›o\0R›IÏ.0PwøÎÝ;4÷ÙíWÏþ¦›èÙ×ï~k’Ÿa¾ï¾¼ÿ'éÊoRßï±*ý{3bïl?îÇqô£‰ƒé´eïÆhãèmJc²cÀœ‡~—,:h{û·SÌüÿܬó—ïŽoî0¥i?S;w/že²r|±lK¶°%eDÖãgX¡Ÿ¾¹ÅþÖlÿÑ÷ºv÷æöżt“Ý ú?™·=&ˆ?Û¼íøÔ'£ü¤hÂ`Ø‹ÇŸè(bý¼C?Ê"°Ï.ÿ°ØVýþ_ +èùåý+.â·$q\&Øj/ï¿Ö¼éwÉÁëï_wîÜxö¿ßß¿;¾E]/»1ž}ýææoÇ6ÛÙù‹Ûãä¼=;ƒì³ËçÇ·/_Þœnž¿w<ûã;ЂãÙ§¥Xwö§é-r#µÝ<¿}ƒýõÕËã¿ŸÝÌeôý©üy©ü(ovgG}õ¸xõ8½z«Õßj™ÛE™Û©ÌTßÝkÙ{-{¿({?•½×¦¼×¢ïµèû¹hwö~*ûâæë¯oÎ^ ÇãÙsŒ÷Ù[¬ý—ìÅÛãsî೿¼ùòøîìõÍŽÀëoÎðÆ«›»y‰Qz#¤µ½8{~ÿú[¬¾oÞô½8’Øi¦Ïíïîß½8~uv~8ûôíË›·ßtÓ£×8^_ÝÞ½Ÿ •¿=Þ½z_?îšråßû7/¾:¢ªÛ»#uóöùû—ü£¸Áóüu9~ÑÉ÷¿;;þûó—7¯äW¬®Ûç7/ñÂôÖW †·wm3¾Æ)òòøê'úWïæ¿t$pÞ¾fÏß¾¾y~<;×É8Ï‹-ÿs8»ç¹{&_É?²1ÄRiùC딿æçúðÅíßn¹@¦A›ÆüM¿}õæFgôðþͽ´TvÊÔnùKªëξºE‡óòÀ—ÏpäÞÞ¿à‘¹ž÷Ù_nÞ§Ê(úî›û÷o±Dº³óÅ=,~?וq˜wСùDŸ²\®ŸL…>ÑBŸj¡Oõ•~ª%þ¤%þ´¬æOšõ‚3êlñ¶NÄ«›ço¸ÞqlJ±›ç²!tKëŽîξy÷õÍ›÷¯^Þ¼‡}Êü׳ç7x¯ûò ¤9üã³/ßâü]§çÜáîù=™°ßîžæ`bnþ|öÀ‹gÔ©çË—ÿ¯6ô³4EyÛ/¿}}|rÓWÜUÅk¡«gëÂzÐm5ãË7ïµ6ÑdjÛaVÁ"ß¼Äñìöî+ìêwß.hgR -€Ôƒw>¾‘?:3’ ¬w·7/_Ü~õÕúõJ²³×oî_¼‚v‹ß‘4£úa<ûôÕñë›]gÆpúÅí¿3c:»yþ=·bήŽ/ÁåZãóž­øãÝ×Çõ= ¿Ä.|öÅ·¯þrÿòÙ™þ»êyÈ\†ø3,-|Ý_w"J|öò=²~÷æþýëOï~¥²ÇïA½vš·“Lˆò ýûûý›¿âÏO^of.òÄë_?Xå=¾{þM]i~úáÕ~vIçâ%hÙ[¼­ÏôÏü)iùߎúìñú.o@@åÔÁ2¾xóþí7»/ïï_NuoäOßYæIß|RÈý}z§}i¿™ ÔŸk¬ï|¤ßšÞÙú2ÿOùøÇ7ǹ -ù³üû„uÿ\Ft«}ë¬éùñwháOñs~ä?Ž/vŸþåßð7+ETÐRp¨w_ãï©Ì"„ˆ×ä÷þÖšG ,²ž¼âw8øѶßz°Ì¿úýí_ž@>”üÞ¾}5OÀâÉg<ž¿<~ñ-8ØW×vuü -dp±zåéáîoÇ—÷¯KRË=a,^ÞÜݼ©:ÿ{á s…,x|÷/ÕÈM„ÓñÖÖïoßg -yÿê5µ?»/¾¹y}”^—’_L5'Òo~óÀQeâîân‘ÿ;—à”~»ûÕïpÎïvŸsv«¿À€¤Ýx„®ÏÉHò»xvñ¦ûï]¿õcÉ.’“ä«–©Ãÿâ"¥EWé|‘.$]ÊW9ä¿ë®¿Îé°l–YþØEr‹äW) E;ù%-Ò°H#Òyþÿ¹¹X¤Ë)ñ÷«é¯«Î®æz™–C'+?n‘‚õHåÿ‘©ÓlZ¤q‘ÎéBS÷ÿlÏ¥V_Í&gq=Ÿí¼†Þwõäæ¹ ˜ËåìŽý€´5¿ãb†ñÿn5ÕLóT3É,—ùF³ùýé7çÚv2Íà -ó„™hLr™l¤z–9·ƒÌî<ÃÓÜvòÏ!ÏèzËGÌ¡Ï)HâqÊI:M'¨ËsĹºÌéJÒ!§k&—ç‚+b9—FfÉçò€^êXåÑðÒIvŒ]¹’›ÜÂ(­A:ùæµ|Á9ï¢Knp£»@ºr̸ñÎ{|òƒ?GºôWþà¯1ɇRÃy¸Dº -×dÐeCLqˆçHñ2^Åk¬ ƒñ)¦”†4¦‹t™®Ò5V‰Áðø! qHÃ8œÃÕp®±^l7ºÑZ½q/ÆËñ0^£·æÜ¡9á<çãùùùÒåùÕùáü+È þ"\¤‹ábDÖåÅÕÅáâú²¿4Œ¶»ôh1p9\ž_^\^^^]^c±™…¾\Å«t5\WW—W‡«kL¾=øC8$¤á€v.—‡«îp8\sE\cX®#útF"] ¡ÞkXþ`.{Ìæô÷a‘®ér‘.é|•Æ’:üoX¤´Hq•Â"ùEr‹„Ø]ÛœÖ$¬ŸÓ²é‡åÏÕ*]jê䟋E:_¤q‘†UJ‹)t˜’ü*¹E²‹d©ŸSò.ÿ²lýòçr‘.4ÉÎ\ÍãzöÖsVfk=EëiÁTt‹ÙXOÁzà×ǹ[Ñ®Ì<„«Ñ[Øjœ¦e™¿ÕU#¢£qžÓ(iÈ)IŠ9…œ¼$—“íd„ôOË_ûU¾U~.$ç4JrÂÞî𿘓(lzM.'%½…̃‚–¹<Èü]È| 2?A&„ÓÐË°_É0ŸËRM2Ž^ÆÍÈHd`.d †Nú¤¯ì`/º’œK““´ÐK‹Ð¬—óòâ´k KỖªYж¾ÃB:`”/AôÎAþÁbèA-HcvÀø_‚`žƒlc ¤GÁÉ|…p…aºµ‡¡á À~p ÅÇö5VÈÆð"ƒT Ù1onpÐ_cí\a`/@àGú#H¾ÄßľÃ2>`Ž/Ç1 8&b80F6âËí -sqCeÄá’|Ä1ãqÜX:=Öý«âÑ9¤¡ÃÉqäxœS‡z-qÀz¹”S”Ç*Z2L<ˆí ©ìÄÐæR,WJ•rz¨“;oÀ±©œøY(»õ9©;r~}~Y™0\/H½.¬‹Êc9õ›K )Ÿrúð‡'P/gE6Ï!î^=‰¸Éå,âQ„³L€|›çÝtØE9ìÆæ¸srÜ•Ã—'ŽÁe4®A˜•ƒ° &3abΗÃ.l˜%ϦUZðldæ®äOî¥(å@ä)êxŽJ¾ô\z­Óp®\‰[Í,YBÖ+L~aç•ñQN.³nSgJw†Ì5g.YÄ -á(mfçj„µ— -ôå­‘ÐÙæ4Måd’Êå BÙÄð oòP(Î’æÌKe¦;KÊ#´G‡xƒ­©Ñc´H‡¾Úß{3^LìoIKQ&­R\¤¥\ä‹´Ôå_Ê2ä+uÍ¿,D³²û³Ô¦[¹[ v—+ïb"ãôïP¥T'•.Ã$ƒ„Jõ“¬²Lv#åŸn%éömš%¦…ôtÈrÕa§ó1Û_vY+"˜¦ói<™&¹¾[ ùiO¥Z¤lR7ýZ«K®I²(ºÞ>1m ðFê¶æáûülëa>† -˹h§U.µ°ÙW9)‡U”e¯Ï²°¦¬Öè²Ø\„袙—½› ‡zY¥¼Q»¼W™Š?nìÒ˜Ó¼IgÝЬ5¹Ð=¾3yÜMû2«,Ti±ØŽeC^u ÅÖåbG^L‘í YoÎI¹ÒmìϡڡÔƶ(ÿ–m÷ö膰َÝöèé;íÞYiÕÈún{¸üœØÇßk/äûø‹z‘åÕ!˧AVåG -/2fN$ b•£#Dn£ð1K{v’~®'ŽCµÇaWe H@D -A*9Š@"© t)Ê•ƒ( ©ä²DIèRÐ…ÈAƒÊAˆBF6ǵHÅWüÞ ©,DiȪ4$òÐU–‡.DR‰(B""å2]Š®E(¢X¤‚Ñ9D# -GA„#/‘ ×ØJô¯B–K!#QJJø*‹BêD¿ÆdD5sqéJÄ×s™Î!4Ql*‚E''ŒÄ56ÎA6eÙ$uàÛȹ‘wK¿E–wü1…’g*~5Qð™K›éµrb¾›(uÃu mV9¾hRõß%Ž Ò+üP'4vƒ¨6„sŹHÚdQº‰þ•´ÔOŸ"kÐ¥–cøždâ‡ÜÇYZÔ´TS-Öj¹…ªq© ñER—±«äVi­v UŠ«”º•*/Uj¾a¥³d»Luê&WIWMj®ÛTÖX·<©+½gVýo&·™²&¯MáDŠ'Ó¤ËíVŠÝ$êæ‡ÒøH:ï*…òVºxbâÞ­4×¥«§¥nãáÖ$>9u'³>ð§>D?ø§(nú‰›µÙb¡©¨0õà(êÍ”SÑ~Ž9eÝh—U¥å§Ð†yÛ”ù(”µ¨a '_´´ª³õ]¶‘¨¤¤”Ó0¥1§ó)]lÐ(üt Ú"M³ê~›-èPw‚¢>§¨Î‚Ò\ˆç4­y€Æ¬ˆJ&,݃4eƒ’‚E?ûî%èÅ![yûsÓ·ÂÏÓÜhP£/;³áWM¿YŽ½€Éº:1ÓL†<‰9xè.F± Ÿ_\Q¼ÛðAVj!»…àB[Hl!­JV3Eí&R:“ÐB:g¢9ÓÊ™FδqI….hàŠöÍoÍq-‰Ûš aTº[“®ŠXmp>-=Zð4Û|Ë)*S±ËÔ=D5%O<à?tgÅõŸ»~dvŒb¸×Þc»ì’ÛcƒXý¢û‡gO)zñ¶­qP0ô»Áì!Æ“ÕÕåX×ÞDÈuýˆ½¢G)3z˨iË›X¢ŒÂ0Xìè rž¤òˆL¾iMi°É‡žUxU?h÷®~jØOúQéeŸPýˆÊ<þ Œí½‚X ‚ùTÃ÷É2Ú”¤£TÚ»Q?ÂRåKòOìòAH‹¸}"7wûgm…ŽCqsnÑ -¶r4OYê1äÎÑÊ18ms=Ždƒ—ð±Å¼øÄ®X‘üØ®ú~\ ÃÏ؈ŸfÍÛ´gïøÕ!¤ñ'\øõ—7èE‘qÜ3ð±XB-W“£iñÝv!ýôÍóon_üº+¿ R;9Žîc4Îô ì Ù‰À%éœíÁĘˆ~0âX -¢·žZ¾¹\ßq\QغÚ]S­F¹ÖÕîšj3Ù îìƒóc}Açç­SçãÞ¹ºðcuaý‘ï²>¾øë·Xü?^ôº2þ»²/K3¸›< -ƒ2N&ñË̇d¿…ÌkøÉ™. -# VÂd7A/&/„³¼=®r”QxÉsá"ÂdÐO0d?Áì"•ÅWµ_¬úÀ›E‰jÉÊõ+€âVpµò© §\m·~j…á¶+uÈ:=q»QÇq½™œo®zfuÁQ'uÃÉŽ8¢i>d_œË ^{MÎEOt-Zšö'>zÉCß“Ùûä’Ìrf”‹JaŽÇâ…r¥ó´pDUᇂÏì„JÔAP)ÞP¸¡ç)%šK,·^ûkXuË.qWÙ ®¸¿ÍÖ”½ÜÔ»­žú°rÂXøOžÆjÞTw -7 ¥AÓ(fƒ!§Q„TS™®:Ùæ×"¯Î«:sÞTlUÁµ]Uxµ2›_)ÀÆNdØAdØ"Å^©$+j{S˲âp<ŠËqq:q¶´]%ÑrMŒ"Ó^fÅÕaòF¶Yž ™Ø "Òž‹8{YÄÙ.˳.û(‡,ÍŽ™]-$Y#Vc½ˆ±iò]ž„ØN˜R¬YH±”cÃ,Ç.ÄØ eEU:tX8y\/½\³ûGýûaú}ý4¿ÛÍ$Ù”}˜Úå·åßü}òL±“aÜOýná²ï¦ù_·ú×/¼øË¿Kq°‰“Aivì/ÿ/ÎýÅÅœÜü—Îþê'ŸÝ€ºìð¯îþ‡…«ÿìè?‡i„ÉÑ_£2†É»lŠÆèÄ¢vî·â?å¦Í§¦[MVŒñ°û'Úµr½ØpMÙóuöz-Þ®ÙÏUéA±±v²‚Îå8Ð!‰Æ$Ȫsù`Уamw=_Ø]ãtP¸NΊÙ{%Sâ¨ä±ȉDND²IqLR bÙej¹¤—KŠ¹tP]»¨®T'7ÕNGQ^´Žª§\UpVíþqä!gtïn9¥×®×x¥Ïêõ¬pï&gìÚû´KöÂ);§…Ov÷ˆ[öéU9d#Eå‘Ý-\²—Ùý´P¯³òê*»\ -óRÖ¬®ÚQh¢¬Ý./ß(s^Ä.ûêŽWš² y¸.º¸‘º¼ÂS^åQXJ9ÈtƒÊ>W¢¢Æg]öYú²øÕÑbËXÇ]ÔiÛÜhk B™±¡±h¢5º6d£Š)؎ܨ£7¦Ž6ê  âXF!<ÉẓÁ›áììœnsó\,,VóþYî¡ÅNZ©÷cŒÔ\`W3ú$×ÐÇÀÓD=PÙa@:¶a`ôTùô& $#p?ª*r •Ä5*ýÆ -2ÈöôqVNü¬­Å Èíh>Z¨äðc²nRŽ†d-ž· -’JI6«~â§þ0÷óGýŠô#yp}zð™Hë„oœà> dÔ~ÖæÈ78Þ«GŽŠ IßE˜ª…f&Ľ苦^ý„ßT%Ø^O^\-AÜ€“g”ãA™¿(Ú¼…Þ®§&z¬Hþĸ¡_j-~ܯ|'µÅûW¯Žo¨¹Ð_DK–ÕZ )¾ÈæKÄí³š`˜¢ZÕ%ó2»q -‹¬¾Ús ra[•K-épªGíšåœ£ƒE¬ê¦àÒ´,ã~—‘¤Ëv™á짊vãÄyŠp'éP$<ÈxäÁû짫5…œDëÄ/-yað|>)U†)F¦îŤÎWÙxò}+µ†N¤Â”ÓÓ˜S>ç'oº‰™Š¯×äêÛM1nJaJóOšÒ8¥ó)]Ì©›üþ˜f§ˆµelém»p‹$c×%?¥°JíÏP¥q#wé¼I—'ÒÕi²îuÛ®YW÷Ý’,än°?DZ‡J›¼K°´†Kk˜ùa‚˜ã‡¢4h9¤[±+š¾eèHt×YŽã¶š-ø³ _5"Ü)¾ìêAº¼+.ňۋ:«¨@b^ä³ÞƒZ]ºN˜Wå «ÿ]qÎÖ{®#ê9†l·?ý—ÌŸ¸´-G˜ªªã˜Mö×Xìm¶Õ‡¬]svÑo\gK½6¸h7¢0Ìäž/D¿!&ú®²Ò«†c¶ÒÏvú¥nƒÿÒ@¿0Ñw•>®,õ³­~ö~š¼~N6}‹_ÓÚ£ii­\à–?ÓÒir]Ry®p«JC “[˜ßÚ ²u>Zx-YôÖñè”»Kã|4ÿÕ -OqG:í–´áðxÊÙñÁ$Ú³ ­.AÕ³kú¸¢.ÔEžš£¦û)€ND. 7³3œ§)ª®È!s<ô:z!LtUüó,(,…ìYÀ^ŠÖ“`½”¨» z[ÉÓÓ•$eénC˜^‹Ókz-R«B(MR5äên¬gẈ×v -”ëBöõBÔ>LÛg -ÞîʇÏçŸqJÔæ³k>†Ã”üœº,™Ïò¹]Héf)­/dö9-eY™Ïn!^®ÒE“Ϋ´õ3tBN×ióäeÙé4±AÝük•üwNr‚u£û!Òwª8¬^"Y»*”µ ­_´®Ãë—öÙÖM*Ï¥ºó—é¬ê\):Õ&6YÅŠ]ì¼ÌoÊó£j«“Ë)¬OܬíÌúÎCºêdÇ_Èò³¡H£PŠ‰PUŸ³‘pm&,†BÖ1t9*E­…³½°X ³ÍPT£ç²’QiÈì¹U© ¦£¾T4¦×™¿/1+Y{z.ËkXÄ®DÑ¥j‹Æ°¨X!jþNb0ô¬9LA-WY.¹}ëyÖ¹–ø–e„KÌ’DRœëDö)‘Ã*­CO1^Šò¶\»)jyf@OÅ.ÏɯÒùÉu¶ýià OE¹mæ]óèð!é!G8ñVV -ÛùÙE"ôXîÎp©ŒÆ9õêɧžÆÀ!{/„•‚Ãoè<&}Fåâñ#}á»hþtwwóêøb÷u~´3¿î¶ÎN5OA4 EËÀ€Ð5L¯` -.³[B+è²×±*&¬‚ -³aík천¢OK;-Ä“.ûƒ¬]Ln´Ëj’85ü<7\ ‹× q«\s.:éAQ~<Õq:žvœîæþ|HƒËXÛEcWãÝ)Â[ÖÓ4àY³…•±íçíº¬FÁðÏ8Q?͆Fs~ÝÍh4?"€Î/ . ^XIUúÞ!’ÎÙ4ôft8åñD´Âvðã}ˆXð.©Sb\ý¤'=Ù¤µ?_ @‹ÕËMžL-Z=û~ôºï7)6£ê°¢ÙVhHêq)ΤIà÷.…";Þ#QI9PeÑ @œgï¼Sÿ&¡·$¶ä¹.ÂÕ„¬'ÿ¦­a²—½Z4FC"4Š–§›œYTÍsžYÄEâ2樌°ðc)Q—q5ù³º¬ôQª8¶¸,z…)N#.ĵa³1ÿd¡¯Ë" äX¦"0ÎÑF+<„™¥¤n’Eçä6“?‘Â:uõƒEŠ'S:º“YÇ¥î;ÞYÛÔ=­ØÓÓ/n˜x’ÂóʼnŠ2¶•§%§ߣ$ÔDUÐ&ãw’¢œw|gñšƒK6¯(·›\êŠ;Ý".¬›<éJXXq¡² ÝE6Dÿ¿ ShϘUÌâH×å•x™ é‡!Ôç ±âVç&åsÈÚŽ”ISIúsÞM”JÓ¬¹ZR.õÇ›BfõÏR)$Ür·§Zé’êä«š$´°[h¬ê´a™9‘fMØÐ-ÿxRjôRëÔ=V໦§VøäŸîéEÿÓTø YÐÚ´Ä¢5ÙÅ,¢¡¢I;2%bçµ"ŽP$Wr=y]ÏпS€ivÈuÙ€ú)4dIEÖ¡¥up©›¨ˆ«R·ðÉ]†™*Z¢ÔÄLàrnRqÎêQ¡+]VµÎDe\¬ÑmšRS–…é"ÓWÚçÁÙH¥é¾:„©Û U§Ó$ìQ÷Aé— -‚ -¿KìÊ“ª°Å_+aÒJ “5FݤƒYÁrNÈž˜SuD…+*Fù˜]âÆ¢ŒQQ´ÌÀæ×+`s+9Ïøël|wÙôN¢Q<=®Õƒ£“»úg”P[¯ñNt;pblÙEƒdFíxŒ†RB¥ ’$© Ëdè241hªÏÆvMeS—¹,ôªÓfAJ„§N$¨YNZ -BK‰f-ÔLëâ§Ëú6µP=jÉîL¶»´ONþ)©{Z±…ûÀ#©{zÑÿ4%ÚUF6×îÚ®W—јP¿W. -\/8p¢‹¢ 3¢ £lTƒœÏhoÔ~Ñ7ã»ipÁ•`»iHA( nb_›°­±Ùr¬U7<׺F¶ž°­ç˜.ÛÆŠ]¬ØÄ\0“)ìj@pž `Åür(£²Úf׺~2yͯbîšM]³¡kiær³«[š¸Væ­Ú¸U›¶NXµºÊ¬uʨµ2i=dÏêN™³¶LYO°^]uæêÉ©Æ ÝLÝÓŠÐDm¤îéEÿ“T˜ár~¡ ¿P_(ÈG¸??þ -¡ ¿P_(ÈÇ»??þ -—P_›×">ý! ó ›oðcJ=öÖŘ¼ëñÿž^$t5ŠÁÄ1áÞç%$ ˜[e }¦§1Ýô«¸µ'=a¼”‰¶ú©QÙ|OüãvÁì{PÐ¥¿Ê)åÑ@dD³'¦ÀÎù}43Ù%Öï'Ô,ZÕïÓÆûu‰éý”6<–¿§ÓÝv:°t:pÅé råžÝ¹×éòt’órek¿ü¤Õ´ÎÛçZ|۔ƾ§šâR3ûûÖžõšbŠ*syCbZ8ÐO·*,o[8Ô飫fØ)•k-›¬Šz†3¬† Ë[ñÃÌ™8ßU.`*N^dÄP8çûX’Q“E‘Þ£„÷Æä\ ­KcT‚Eæ˃jõ=˜ z5TãFÛÇa”å>À*èœÒ“mw§¥A96yLÖø(‘–ÓÌpèу»þNžôÈ‹Þ†a$ÞW¿G¥>$0©Á¸hLöÕÍ^º‡Ä0ð+‹häñ+Ú`P?²9þžÇÈæ!b¿«¯ñ•X\'Û\ˆ&n¯}ÆÄ!ZqÓ}ÌÏ6M~¶ù+%fúºòm^|S¹7^¬ü@íë^Rñâ]×»êOWºô!.·ºƒ^¥®Ä©üб0ò…®DÂF ˜{Ì…šˆ¡1ä ö1ߤ|‘ƒØÙ©bÆ:s‹øtš ‡.j:0W9Â\£Émv°P÷ŠóŒ7#àq•O¸L52ӛɊûD/NŸi‰Ð´\êNE.MWç”À¥r}΃+ö»¸_oﶧ»_WkûÇ@Yo’ë ržâTòÖsâ,Ž%ÖîŒÁ Û0£Såq äÒû%\좱?Lußo››|XNèz&~‰±W˜ÜÌËÂ[€Ò× ~Å hrþÑ“Ïôì¨hWNŠ)cê1XaTª“Í{‘=œìÒ1éN\‘»9öS¼0‹‡e<ðŒ°VÀMð6®È™Ùé»HŠk|­Ê«§å$JU±EÁU˜¯YÄdN\™ÊÎuù&°a"uM|¦~7.¾²†ûZ~ÉLÁÇê{çkªÚ-‚q„5G„JL¨^þB=´Y¢H/ZhŒ¯»0C* G´ÈãÛ½\l6ŠÒÙ¡ŠzD5³Ù¥vyÄ¢ -X\¦“[É.ÐUÞCæxÿ˜DvžËmc\ z¿nÅÕ=ýâß“LsÙXE’e¨‹ÿ„߶ÓâÊ‹Ù‰°DU¬ÁB×ä¬%h*Üo@kžÇ®‚–q ˃⎾p¶L+/ËËÙW{“nnQNbßÍH¨åVÉéqé¿X‡,–ÕÍuyÓHq­Ú·M ]C¢'¯ñ‹…Gç¡iåV;×!.ª™ÆÒó¾x—‰oÙ‰“ 4gÇxŒñ»1'^at;ˆ˜w/ºxѣˉ×Ö(ÎXt²¢w”¶ly-û÷»•]oî¾Ï5Ðå²ô_ZöA-[°¨1ñê k©s‰}¬(zv¹Š!RƒãhñP¯b€sUÕ*]M‚!²ÌˆV¹¹ƒóƒIÂbÆ–—¯ˆaΦäzxƒÄöq°}JR–,g¥¹ÿäñ'›’ÙÇÑžï%Úù-Éί¬]8 ­à”1xõRÐ×Mæ;R†[¿Îd<–‡ì})‡£Þæò=`dvÆnÒø\eOA‹+}ÏtÓ×ò¢¯õ-‰ÝúzÄêfÄ%|Þ°ŠiQÄòM`ÝÕéŸ °GÓu÷ÝîØ{<ýüª&ØåK;4âIÃ4özP?óö3_¿ÁP(†sååɶußF.þBáù28ŸÍ·¤Œ:Y°&¯st€•èI?GJŒ@7)èôŠ6TOEw,“-zãì„¿XAÝ£`‹§ÃfòÝà7SüÐÔ}ø«?P…º4ìqÈWv¨Ù!å“%„kÊ×Cò›>ùbh½Ú—ëŸÝdmX^:?ÿß,®…îD/ƒ_ž#óA?¿Tø„òµ:=vãeäíQ.fïÁæ“Éç5ìdï­°öWrßú(L½—ûÕ©S¨ÿ%7/^n§˜ù;?1ôWÙ[ë<ßNQ.C/иn¾}ºlc¾£#úeÔá|áùê¶ó‚3;aßv'¯9wM:‰/µLÝæõçtúÎå^¡*à.ÅNÄF`W¡CfQX– “åák’ºéJ ùR ÛØT.PpŦRàà–f3ô]¶­\çPÙ«la¹ÌV–‹)^¼$34ðü³À6î*„aß$w"ÙíÔÈ0šºõªP=”f+ÔÒU,Q­ßRñZ&$Ð% -(øF»/Ôx -öçýS0K¸››Ò2bní-ZßmÞÞ|èNEòð8{,]v9¥}Húù+\ªuv®øǨðAÙ¢;‘qúç²Y«›6ºÉÏoÉö®¯ªY_Tc'Il¹‘Ê6ºè.ËiC¶þÆéZ¤ù‚´|Ò×â]å«aäB’{'v˜ë0¹ÿgç^õA»í çÊÏΟ -·EB¦(HìÒ+vürÊgég’ˆºé+g—Ozº(¿£¨Ú—± ñbùRÈS/¬žF‹>1WÝr:fÎOy? .L1L©ØÕ` ªu•<xžù?Õå*ówÞM|Ÿp|™Ï#G®Žl™·s‘5ȇ¹Ÿ<úþ— -›Ÿµ|–Á ºéå|kQÏ-Áµ`8;¢…Ì‚“»‰3/üùR-OÒ,¶fQå{Lóó“Tø°hpêÔœº4„[ ÷œ–âÌ$åLrÏ0 õ#‡~ÌbÒEås].:,¸¤fq[‹_ÝÖ2ˆ¤¦wµ\j„ÏaÂZ-Öã‡ìÇ-äê ¸*W¦Œ“­°`ÿ¥¬g:ÈݦÔ,ñ¹ÙTïÅý¹¹âïSá ÙáDÊG׈ ­°²frÎÖ•)Ý|CJN‰h¾%# &Q?á-ñÍfƒôЉEzÛf¾Z,öòqò˜¯ -uÓ%¡! T•}ȪlUdbí½ýµ{o@ƒ©¯VG¢Ÿ+þ1*|P¶èNdœ`[2Ï,u‹ûFæ´¨f1«àˆÌ²XÌf„4_HÒe!•d¶,¬m VœJÝdR(Æ„¡ºâ]ªJ~ ÕÐ;·ýtãöò¶í«ÊÏ1ù$$Ëy0‹lŒ$­Ñ…Á¹4{!6æ×4ÿI³P6¥aO+Ú.ù½ iaòüñ?%:0TØztÂ8„Þ8GTó;÷{jŒíj(ëõ£nq©¿É‚ã¦íÔ¸O{sç~Ò¯êíz?øpigý>­úõ£~å{ ÖÑ0¬|¾7½ºVçòwåö~Ì=Ýb½¼Ãzyƒ5±ñ6ðÆK¼Bù¿ÍÿŸ[2˜µbVTµÝŠòY“VìPÌœ§ðÓ­ÁÅ;ú»;Gg±¸Š»‹åeñç^¯¾ÞËy®—¶WòŒ‹‹C×—òøéRÛ¼—çr:XÔpdªÍAÛŽ}Ÿ°NÌ0Šsso,‚p\[ú§¨ksßìÕåÅÆ.+€ìçzs£…,·­÷T)³rüŒR,YXÞó©w*6‘GcåE¾íúý#ÕÿýöþfÀGø%âã—ˆ_">þñÇ­]Ž§Ãh7.™<À`5Ütê>i¥?éb‰²qæËðæëð.2:~¹o˜®Ä›/Å+׌—ËësðG¡ô«ð£}AojnZ-éSùßk¦M64…‡ÖèÉ[}±¯Íµ8ËH…íØõÅÍX]e³^{2ªöá;ˆlív0íiršà¯¶Âéä©gÈ»ëiÇÒ“n©o†7–«Û!Yˆr`ñ†æ Çå.®ª!ãiÃŒ£\ý-û¡>ÚròžïjþL-ø¡c€ÿù›ÛwÇÿ¶»xyóü¯Ø‚«?Šû´ÊuÛßûF­N8Â|£–€©—«Èýù -FýjqÁïú†é†n‚NWeQN>íê—õÅ/¢•ì¾gpRA ™¢“ºOÊi‚›) ĵr°Òïý˜7•5€¾«¬S5Ãj¥\eïÓ>ßÚQt óÍ{¦û;º|Wy“¸ðèÕ Y»Ý}ïh°åÅÑÈÿÐx°5pÍ|‘ÐHQ ó—“ï€ë¨[àÔ ¹^׫;Âë…pµX*9úù"†‡–Âã—1l,‡nø~áwUˆ`¹^÷ƒðÎÛ T¦ëQ¶.Y‚ýpæûº -·,¨KUaèg<ýÅ’î¦5}ŠÛ<ª¿ÆÔÏáŽÝ÷ŒwLñ8Ù/Ká—¥ðËRøe)œ%üÁñ”Áë ?–ª žýñþî³7·wïnï¾þÍorû2£ûãkæ8ÍùìæÝ»ã›;ôç/¿}ûö¢|þe7Žû1:O+?¤`·³ÞïÁXÝ8,Ï5qϘ¶Ñ°)~”–ŸËÿÿåïúÇÿ¼—_ËXüË·òç?â×ÃÿcLvØýù_ûÝ }ósüS>]a÷ -™¶c÷û­R¥¿_Ô¿õl³¶;üwvþæÝÕíów·÷w7o¾ÝýV´'g÷÷/1xŸä|vxqûîþͳ‹›çÅ`?ûòöåñÙçÇçï~½ûoxáÿÆõÈȯÿå½Tw%O>•h!ôàõÈ’gUY;`ÏBµ±7íã8&¬ÑÚ·¾P}h®õõ ]®Ma÷/7òùÕ$9cö=ØEà -a†½ƒÀ¢“€á²kFH̶]i±w gûÈMcIÆ]HVÂ*x{°  É@~Ù…°7˜›ÝsV6„=(UBÞ†/Ù} ¬mp{ÁÕ½M;?î!ïð*û¸74ëÚBâwÞíAol®mÜ÷ OhÂùÎïÊ&iBÊmKXý€{Œÿí÷‘ýh½ñè¡M{ŒÑê’ßC<±î!p™´³‘àh\DczFB`$vÎj—˜á0þȃG«V‡Lƒ°4{Ö‚e:l®˜ö`Õñª CW¼o‘á°A]ÈòC$`†uC¸Øï98#†Å8Ä=¯ -ÜẎA± @ÀvÞìÇ¢yÃ…½/•™=dô‡øùX8el  UÌОMŠ{ÏèogyØìLÀZ1W÷#- Î’†Ûa†Qúáƒ÷Äà°. žc 0D‚&%+Á ÈD9 -DiÜýÆìqXpÞÝ°ÇÒ¦d2`UaA`Ñd ±XÜ#Ö (¯YÙcû­+ rµøžáagR ’=²Ë˜ø­ÄêõAžû$ Äa‰çJH zEÝÇÚÄÀaòË‘DD*4¡ ‰ÚWçuQy,Æ€áóa—•3û«(ë&w!î©Æ[ë¡ç[h¯‹»è±Èzî-ÚÂÏßð ¢mX—Zv™±\=81=¦;EBz 2uÔ›bN!®be£ºHo( Òç‚á’Ç, ¶ˆ…Å]6‰7ö¹á§PÔí\áÁÀíÃ4ø=qVI(tkŒ*æ›Ǿ®*tj{Œ­-±q®Fl¶ ƒf¨½fU¨9 áXRºã h^ÑIVw›Ô…uM`°@uˆ ‘0ÖŽó6$›ž¤ò$éb-_ ½’Å:4Ör/ ‰|_ÈË«­LìÆ&ËòÁ•q‰c³ž‚£`eh{<æUÀèR~®»½ß†¼dÀÍï€ð`¦±)û»Å7ªŒÒ²²ë×™ØÃàCvÍg°É0^¶m[ɘ¶ì¢?óKõèLŸ91letA1Übb `…°(0öÑ«ÍL¯ÉaW‚cäJûë#3Ðæ„ P5*­,Ï,ù’¡ë Û -|2±\Aæ·¼¶ÒöŒ^|¦zžÛ¦•Uyvë«ùˆÙãDØjZÎÐÚêMoÕC0}çÄÀ•‘ý/Âÿþôdfä“·Ïþpöïòþõ·Ïî¿æäwoîß¿Vndûϯ7ïŽ/žá+®eÜýê×»ùçGù,aðöV®!{²)=8èÄT< m\Ø84{*ÆÀa\F³rXÜd\’Yq7™qñÃ>Œ Ä.‘ÌxÝÇm&!yhÂÆBsh © 'Tèå…#6¿æ“Ê‹ FÉ,V2qJ,,~­„o T¯ºÔ›] -Úˆ&cÌoHuu&H2s=SИ´3–L*ÎdH|8ÚÑ<s‰>r©î0lûHoÄYȳ¶)!_òÂà²W†þMõ`ƒk=àtÈ™~`ÇÙH”ýЖé\ºRgâzÑÌÀÀ“plŸ—i[Siœ˜' ˜‹„£t7X, œ¯63Áöó¬áC"úÞÁȇ•!'þ ÿ©#Ã{1"‹b×q*Ê©v‘xãàÀ7×ÝdäM d•i°j¶¨þL¯dsjR?“ѶùSáºÃSõ'†iE;¶¶+x$´'aÙ±_ã0€ì` ª+ø$ GžˆE„Çóù’Ž¥KwõPù"áÉÊ“Úë~µà$1ѱα€=ä)Û=wIkÁ¸‘ðqóG,.%7#+„à Ïy#3}<é ®¤ ‚ÒNì°ŠDEN N-È~fàuN…GŠÉáyÐ;Ÿ«C¦\!Ç“?¯G£Ž4*0—ع‘Ì%ÎiVwbÜ+¶ˆ4 # ô›z¬°W[yEò­5Æfâ9d8t{…[Åâ ÄÞFh8Cyþ¼Ì4¸NÎ4Øûù0Î,¶´z-¿Qgä†ieu&m -˜¢ê+‰ÓëcÛ²’¡µUݙߪú?}e{ÌV›ÏìÎ_ÿ<¤¡DÚÓô‡…D<»­´Î {‚•áxñþ9­áÚyÑs#‘0|ËtÊÐó ¢¼A´ kn~ ³ )xL(¢²øN‘§ÕÕ™iOõé®ý„þ€ÛÖå ­®îÓôV5Óg¶nÞ039íuv6ªj°B²b*qøíF ˜çVkÁÓ #ƒÅ0öªœ*fœº ‰IÞȦ5lNâôôtÐSrÓ–¢œŒyèR…ÁƒÔ%§ 8šÈ1]†C'¬hð†w•Ú`y{*šŠ} ;gOÌøQÆ,¥I‘ˆS^öNy†®¯ d†zb¬b#kK9‚¦ãÀ‘-¦1ˆiºyÁae#W‡ó‚ŠéHUI1ŒÈu! Ãà±Ùßb ;IS*nO@xðÞKY“YŒW\‰·q$A&K˜±‚²ïéüä]\ÈÈ¿½Ö›GòôŠ¡©K3 -–|6Â5KT›™-WÍgŠ­«nÚÒ>Ötg~©ò™Sö<¹!”a*#¸2µ™ÙrÅSÎa²pA`'~}<öª]ÙÅx^záóy Rƒ5½Õkã¸RÁ`,ª¯3Ƨ63›¬Úïd#WÛº…ilêÊT¸îóTý‰‘ú?Ë"öÓ«ØA1±Àé#óúœM›ÉÉ…Ù‹§*š)nµ xRF¥á¹Q %Ô”ÌÏÄ'$qÛ+ú;d‡›:cÙŒ¢Â¯2I8z,M8¤‡èæˆ6x‹žb¿•}'SÎV¬ôv+CÇpØB¶H²]Üx½™Y“]&dÐ?çѹ¯ôý©W#¶ÃTN9`d†É¢)…3AMx²_}pœíuB¤œâ^C?1*!èj¸¦ï¦!R—Љ§û}ˆ]OÃV=6ûâ8nq³-u‰ºOÅ/£*•Ôk0õÔù]Û*û ¶RÙz.î¦Ou ýR=6M=í,Ômytž>>á¥ñÄ¡ÔÏa€xëpv¸MoBª¶À§ o#ÍÞ¡u+d5»qôNxå_HA;И›ÆmGC°ÈŠb)át[CPmp¾ÔRÐÂ(NƒkÒH~-ÒQ4 §ò¡ÊQäMr´!D|ëŒH–LˆY²‘Æ¿Ú+‘%tŽzkãxKÓ¦{"µÛ1Q·€jÆÞ·~Š<Ëuh qQ iÉ6R«E„GéxËs‘<§!-—j¹Üø0¢0v%µ½â†Ûú2Ò,Qç©Þ¢øoÖ.ŠÔLÒ`ŽRvëBíÝŽ]ìɼ¡!tÜkê  -]#1LnsõAÒŒpq}} WqËÁG7| º¹E#:×mgH‘Îe4Hç^ÎõŠ¤o¡ŸI­{¤AÈo»GŠ¶žê,jëé…Ô¸IR\ sÀˆrfÃ_Òsv±/¢¥rÑŸpœ‹+{nû±õ 4äÐ{*ĸySö¥s ŽyŠ ¸rªä4ÑþiqàY_{WÒ>0ZÇãÑa*lëgÉå‚åN9)ô“Spíp)j6š•©fãLç¥í%y® Ëɵ.˜àˆ .baBÖ=ŽÛ¾˜VœÊÄ2IßðÉ”q¶ø!G‡ÛÊ9“¦pEµs&1$ú+$ʆcã¥I«¿hÎÀjTº4þšàB!:ãóŽšç⽶tÜ´–ªX¢ÏÐß`[NKNˆ*=OàNZD„2a¥§!ÛÛŸNÚ#=ÍÝXòŹ“Ú`oÈB÷Ô+­—gs¶<âîÉòJÛ{ª_̶ß'#ï1jLãJ‘0>-ܺ[6%6B7ê©=7šÒ”¨º´é0Êш T8ãÄ˼j Ù` (¯È~lûÔ”Øò%ݨ¦ݦ)ÍÒI7S(ô“›<‚Ì›þ¦4¹yµb;:´Ž§Ø`GLëÝY26Oç·ÖN¤ß« TÍÞrEåæ bǦko]R-õyÔµ6Λ.©ó[Í(Õß{l°‘ÈvÏp&{;Fª'yèÓ -O÷ºŽ™½qRÖÊöŒ«ãÙɦyë†-E=rO³5õ¶‚Q“n©¼ÇS1!RXgH†ƒSGW-`h “ÎYOñ!ºÉÁ ÁdàeÖÑ‹bÜÕ•Çb$÷‚“MHê~Ùu½ˆŒôKm U—ºì1‹Ã³gƒ›zèšÊ)TÇ$|˜ì¢×|¦GÓó^4< ÇD¢ÀT¼…¿„ƒÕÅkŽ}Âk}$sŒ/£å—äxVŸ :é>:;•fÔQûJUo ´ËáS½ž=`²ÀƒeëL[Ê(+Kˆ2ü.sè ‚#ƒ ¡_<=Ëz¼*÷„¶„ê&A,•û‰±ÿ¥@ÍÁs“¯£WõMhKT]Ñ/Õ¥0”r¼ƒÅǺ¶-§•¥Ôñ!iºTxž·‰ Hór=”Í÷’GÅãŸÝqΊ² 9‚ØÛìt9‘ D·Ï^f¬re§W/þ‹²C6|Ú±«hÁ= “s»øÈŒröRk[gvžz¹Å¶ -¸\…§ š}WÛR>Ó´}jÝÞ1À¹ ¸$µÛ{SbÓí½­§ö[·BPÛBÿ€ ÇxJOFûÔSK²íÿN]am¼î³f-š…1wÔ˜n”¨—Ã#žñ–j0QCá}JK›.òÖÒñŒ¸|¬õ•oD†â¢ÞdlúÏ·¯×>ñmÚUW6}ëEèzq»o['{:æ -÷豚’øÉç®Ô›n÷¯7£Ù4á±Yùø<òÉCతžw±MsASª˜ o€b/à»d$„ߥ½€G,F™FV7¢Ìd/ ÎÛ¶*e½-öÖÔ`À.1qe/hìõÅ^И׋ €.<(Šòt·c´4PífdzF£6ÖÔd'(/ÍåYc OÇc¶€úÀZ­/®É‚·ÓŽÓm›h—Nžú—Àó{«žf&kóÀãs]£òè<*º×-ó@[ªVÉ‹UT6/88^ƒÝ¨õùÜËx”Úmó€Øj•hTOnX8”ƒ3}¬yžuMSš[šôR•F¾iJ­Ôo{´ehG¦­§™ƒ¦)ÍÒv(ÂOïÎäC>Øxjêé$jT*“ÇÌ9·¥’ê|#ø¯$\>»OSCdT9€Ä[¥ÔDÅ¢Ü m‰âú$‘ ¸Tq¶õPà2,áéŸÑ6¤Ê¯ºSŽÈU™Q­|‘ê±Q¥u+À+¤% nÍhÓ›ºDñ¼Z J[M3®MS›Ÿ1øÁòȈd( Bt˜T9éné³É†=Åbª0AéôàRÑ÷G8;uƒÂ7·Å3÷†Ð3 ºruL#WG¿qáx¸ŒTˆD~ÉÑuÕ;PkÚBèÓ…S‰ËDÈW ¢¨WÄSYxwsF#”0b%úd®,‰±îBÈ®ðT/[qè·1HOüÕóCeDF‰{ -5Ýl,¡à’D $«ÓÓNDá‹öO‚‰SAQ`üÚÃø;¯Ö Þ»B%5&ÇSü§•ÎÑÒaè[™äK€»¢÷„eп6‚SÂK\>I\#°yiØF”‚ÂLaÒ²íBÜîJALò"o?“ø G®IŒ¢“`¯Æ†ðàÞ=¾+­¼¥‘Œ¢!ƒŒ©ÅGøzÖ˜¸^m—ʦ:Z¢œub¡€H¹·±ç4jòer4–êtš9(@õâY—ÐërX%öXJ¥(¼é—šRt¦4¢¨{Ú¤Ú¶X%°Z T»Õ§ªD¡FÕØ4õ4ãÛ´å±yúèÔ+Ø`KÄGQØð¤Ë ?>™—V]jì3–‘7*Ð;*QìÀ@TòrºÄSÕâ$Âæ¾·Ù“D`z‡öŒƒ„[ãXJ”+D]…ï·bò'µDFc%a8êx6÷¯£özAÄ£rzðAK÷hÕ ( ÷Øpb”¥¢V•™V£˜H?%6ÎdórU Y)mí¹úšjÒ¨¼B Œ–„ÂYk:•VÃ4È -+¥%TuRT9=rãF=ÞëóÚ+yB¤”‰æuš}› ÌY¡M§…41HëRÉ©²'Pž2jÉ9'‰‘5òî +S4I¶Þ¤‡™¾ª@­Ù³1‡#’_nµ!+ëPÔ OË 1¢M¼ÆÜP,Q}žϵÚå”ÔøFÛC:Þ(¥¾+ %Á1J¥9]W@Îè1ÐÝ‚UhS,öµwcÖzº1WRë’GÔ¾D»¨ê/H\2öc^ÚàŒø©38FW5®Tâè¹LdA'k.1ÐZ=SÄ2 )bà:ºZ+‘àÇ‚¶”‡Se¹å¸’µÓ<÷­R2tÎâùaÕOE}|ÀááSÓ—òAœèØ.~¬!ªæTfÙ3žÒh—ÎäêwØ&mÂ`ı•Ä4ÛWBRú0 ‹tK— G—rçz˜l¬ðžÊ ¯ìmsd§zñl0¥¯˜3r«.±]Ö¾‘ô—‰bS†øhÄ$ím¨ rEQ¬'Ãw¥“Õ‘M²$!õX FGXü Hظ^_m›<`zÀ(X¬¡O.{Ú¸ìéÚ%œ«,OðkÖäpâU %¸X|Ä0öA Ý·êI N`4?9À­¦ÔªéwêBaOìL:/QÓ¾Ûh­áû/EçÍV‡ªú¡f`šzê±mšòÈ Mžè jê¾Í *( ,;¨æÀØ’WÛ¥ÆìÌÃ!§ÈêÉqSD¹ªÔ×ób]q¡JØa(Š][@ã cXé=dÔ¥£­‡’nÒi˜¼¶)M‰ªKåKU©¤Z0³ƒ„Ömaô‚"½tœi»TÐÕ#ÓVÓŒnÓ”ÇfišO,mBzÂAxÈv´>Q%ÕŒùД¢€•Ä˜FŸš,Ad2(µ”Ì !nèΕ ‘©¦Ã U~¨ÐÓë ­Ïm ¥¦8g=ý’{^y¶QëÅ^@ä7Д¨;Röu)zj ®OÔGlÕOV¹ÝpØèQ]BU!:ÍÛÍH6 xtF>F£gÙDrÔ£ŒàTf…ý&ÊS¢|ƒ©Â€#é QâÕfF8à - ŽªgA:óIBÙ&uÅ -N\úõÄë­`ÿ×ðp­y½¹ñÁ -¶A…×”PQZãÚ×k 8jóTii·r%FS4‰ÎÂY]•É -ƒÄR¦•eèBÅ‘h]Žüj:MïÇbâ¬aæÔ%íq¯D[9®QÑáÐzÐ÷i£D¤-22úb²Ú¶… 1‹m‘K>´ sŒñ– 5âÇ0@ 2×”PI| 2×VS£Ã4jôŸë±\Ù(Q̘T”XW4ìM)³—Ú\Ί345^t§†¼—c´v”‹ô*æç -\ŽÎ©â{A×3Á¬ÁåèTÊ`%Ì v‡,¯ -\Ž:VY.Ì×7TØr “°KG÷F 0«°åDS‹ÙMm¾E™c[EuË» ‰Ù5¥š±iÚRï#“4Íf žÆ]žäãŠã6X)èƒp‰ꮆm3ÛQ §v úV(\Á -2®©¥›kÒ–¨:ô¼Û­/MÅ ®4}jÑëÄÅIžžÑ[ZЮ–º–fdë†<:C*Ç~~œ»M¨ˆx×@D4Èw DD·‰Ñ`á5 (^ƒÑøÃo‚D40y HDƒ—×€D4Ày›  ‚^Ñ@é5 So¢×kÀ!˜½¢qß߇hPôpˆ€¯‡hë؇h¢š‘mÐù”ˆ ¦o¢Æëk°!à¾$¢Að[‚D4~ HDAòk°!&H¿-Hˆ Û¯F‚(  DûÛ€(¸ ðClð -à&ÌCÁlÐ -8`êQ7¡¸ÀÊ¡À6 ~à¡Ál  Á6¢ª†ÜÄp(Ø‚ zC2Ø 7Ôhƒ›à 5ì`ƒÝPã6ý:¥‚ÖÛÄpØ(U'4 ~ øB ¸‰áÐ -¶ÕÔX„mSÚ€¥*à„¦- øBÝ¥M‡fd6ªiF·nÊ£³´d%Wø~›¥*L…(X@ôÁ%tCƒY8¿U¡n|§)±ƒ°Qj¦Ð¶ ã0´í^ 7Lœ -7ÃRæ±ÑÝ2f´1t²ˆI(<Ø”ÚR´û‘§À T¿ØqO$§§ %©O ÑÞ@”Ô°Pº7¶%Ô,WX=jNF»UÎjÐR1Üg¶ÚÒ”¨ú4×¥@ª…oÏ?j‹¯Ú’…10K XÃVŸªú¥flêzÚñ­Ûòè.˜¶å¾¶ê1_Œ3j˜v÷²M~ÕçÙÓ£*åôb /»Ûh ˜VZV*$%:ÎS)£¶k¥wC(Ž m!BÐ;ÊÓ¶¥ñ©5|ú=½ù8!Þ™#¸zòôchò'B.î?4[Ú¶OàD1ÏózÙæM;šUwŠ…¼)¥Wn7-ðÜ6_Ÿ—s®ê{y©ZýÊ#C>/ŽŸ\+X1ª˜§¬W•‡ø"2.ƒšì9Ñ"êCšÅ{ ‰zÀQWÄ1ÈvH7Z‰S¥=‹ª¹`y«¦€ªÜ{ ­_2ÈÃWƒLõ÷ÛU7J€OU*AH§M|š”úk èíD™ '¸xA·í¯J<ÏV™jšzê‘lZðÈ||Rgä¦cÕ:ç†\(Õ|&é6mÛÒâæ¦C9³‚ü“7ÇØlo©ZsT¶^Šùj3ÓÕK´o$có±kè99’–8/ -DµvëîÝ”¡ŒË˜ Ô-@nšÞb´!°í§Aœú™úyn›VVåqÓðBú8ŽMË%¢dÝ¡é­zÊwN ÜÇCoEL³‡l‘/ÛUçäÈnÜÖ¥¨xˆ‚`;—(½VÃÚÑQU¦½•8;S!ÅQ2Š$Ü”(G2ïXä¹7ˆÃp[çho³Aî“mÛÒ”¨úTÎäªTRø:O7Ò ñyU[†,Ö%7Š+CÛ§ªD‘⪱iêiÆ·iËcóôsŠsÕš²!»•é=ŠªTÖ›ÝÖÚR4®J„ áÙUã u„Oëù‚WýŒê‚-úuïxÚº)C/t<'qûÝò­>;—{LlŠß« ÔÍÖú›RÄãeh1a~›ˆ½—‡5øúD]{Óº„~©î[O3ÐM[›ŠF…DÖ#æÛ[„2X)Yá_2m¶1 XàAAUü$b}öN£úó=@Æ_.FÃÆ U†»=Dd¾NÛ‡ÝzÄ\äGÂþ…­&4%ªµ{UŠ^¬I⎃^7Ô´%îBÊ0ä!ÙeWª ý@3Íëe›/Ÿú‡¦ˆ5µg<ÞÈÓ@5ì`P¹ƒbó2iJÑÊÔÊí³n’¿ƒÀ¯ðd"ñË!Š%FSü(·ø&_‡š¶2A/Fî°m*éÅn¸ñù’Qµ¾T«™V<ìb?­ný=” Vª—ÛV×Ê\÷½­¦¿¦)ÍÃÏ(0Œš:3ÖÿhÄFóyiL™ïa£­>‡h‹?ˆ83Ä|¿/‰· '‹V‘·Šî‚à;“Ú*Íy ¢ h3š\UÄq^©x‹[miJT])VÙu)¬ŒÕAцA}.×mA SØ’7úT—xžm=6m=yXÛ&œ˜Œ²>~0ñ§"ƒÿÜÞü‘‘Û÷ˈX ¶5`†±÷§ŒÿÒà<Ù@DÒŒª·B%KbŽw-YâšrÛd b‚ÖÑ@!ÃIq8ySH¶€ÆííØâ‹ÑCFôY5¬"S:&F/«ÞºDÌ :Šk±Ã‚@ím"†¡mý˜#V@ar‚-PXdXs  °$áü¾ÅK#¹`T°`¢èÉöç Œ¾>tëÙ CסÌ ]ÞÁ*örd&ѵqÔèÚÍ6@cl…ë©qÁ(†K…vrQ¯Ý:úB»ý5gV0[XÅIeÐÅ›“¸6¾½÷“ ÊLèÛÀ+èëï̈Wsf˜5§ÂÚš[·Õ5÷©~k…ê;'ÇîQÏŸ>fÑnCp1£ nQ«5!m kl­$8©n‚Ô÷Ák -¹óÕnÃgÑóÑp¾jÔ,*}Zˆ+ ÁLÜÈÉ¥J·Í}.«F±"|ñ ,·•Ó·0Wý6®UßYJÀd3~•¥ïj--R•Íݨq¥xsŒÛ¨T¼¡w+Ï3êH>_aPùü‘-ä)¹\§Âš -:ç ª$~WŠ{4ßÂI ¹'5ˆ÷Ì7±£P›é Åùp¾ŠŠF+ß‚‡ÂqA7üŠ;1¶XPCÊËpÊÈ…OÏDÄ ’î -Jö] ú´¹×T™‡^mßiάÀ”ä6ä1´0LäœßqâN]Hó¡ìŒi?3gl@Í™0Òü -RijÜ ÓÜ£*sƒê3'G® 0ÊŠ)´·4gÖÐFâ4´ŠD½tR ²{2ã’Òžëh @©dlâ&•Ì£¨|§7ÊÛGʪó樾rrÜÊÀ:¹¦äîÑ”Yƒ ‘¢É¡Pãüö¢ÅÛ‚9¢ Vë—È£‰ßwý™’±… 4gVÐBå3‹hjÒ·hj)3B]שá)ãÇ«ªx»o€Í™þçnÖ tP¬y·<¤çÇ·è½ÒÆw¦Œ (ž9³Bò™¿SÍ­ÛÀšûT¿5BýScW—¶¶àNM™5*ˆËèÜ í‡i<„#ÌÛ´‘I×ï^âë- 9sÂ3}&£öLmZ ûL(e¦a¨ê:5<[^ªŒ~°¼î'QíÍ6ˆB[ªÆ0àíFE#b¤oÀ)€#ÅüŽÛ( - ,0ɶ™8+xGEûÁ)c b`ʬ -¦ïÔàSë¶ÚÎÕ¥Úª¿üø@ÿà*›¾ôíD B¿;ûüx³ªŸ¼óìêöճώožïÞ=ûŸÇoµ^¿;ûäîÝFiþsó——GmøïïŸÏï„Sï|öæø·Ûãߟ}~ÿ÷·¹{ŽþƒÔõ‡ÞƒE;Ù¸/oñ©ÿq¼ýú›Üá _2|íÃ/þóí‹wßœÐ\ò÷ǯÞ=ÃL\¿¹¿{÷Äž\Þ¿|ûX½_Þ¿®ª=]–s€½=¾‡³šÚC÷矸g‡»ù]ýûâøõí]~òÛݯþñöoÇ_wúh‹¢._á’u½8ùSwA â=xj½ È*4V$צҲüÿ_þÞè _wiªßý#~ý7<ü;>·ûÃîÏÿÚï^hŸ‹#-©¿3Å3>ØšÝï·J5½úýü‡ò6k¿{ò¾Ívqóü¯·w_ë2ûüø|½¾yq÷©üNä!1xY -ѽ„/Þ-*YÑG ă Œ ²TRû`'§ªPƒø1l^·HÍ=y:SÃ`˃H™ÉàFfòÒ‹DÃnÈà-ÌÀgGfÕ¢Òé…• -ž"ϧ+D:ˆÏì ÷t÷úé qÀ¢?ò‚ö -¡Øjµ.‰GÕ%—zX+!ìjàiL<ñuFœñzHí¢#„üì$ /Ù@í“j|eJUŸôzP^ŽÄm©§4,2iÏñ*è?2¬2ÈåDÑI†+Ò™ÔƸ~Ö&g˜Û{9ØÞÌ6ËzpY·EF“A«„ -]Îpv0ÿâ•P”‚ÙA}áÝɆzâÞõ¤o<ò@-•TXu A®Œx8†ñ¹Ñq$ÔŨ£â°ŒcsäUI„»‘@SfPKŒ!Q Mõ˜QžÜ%¹QÊc}²»¥'ΠX|ª„å…ݦòŒùru5òy§‚–}éÁO–2”¢Â€¥z«F)ö’¥’|8iÞ¨ò\êá…pü’Õ†$ºg‘4Hě꣕¦ÒR¡²ù¢?2#N¯7#èA d(†¬Áé³þ—5céðÂÀXZ ß&šúˆ±M¼bXTpaɧ@Ö×щAØ1“ý'Pu¯ñ ‘X&Øg}`´R÷´RIMô’ü%ªh8à¼é°Wæ1­øH°ô0T«4ƒî鈨¨–Æ ÷á+*–PDïcØ)!Fp¾ÄŠû–ÄßÒ²/÷ºÑÓòC=n¤®nQ#jvrЄD ª›r ¯%äž#®XRÂL×”J)ZƒÅSX¿4òÖqˆŠŠËζD'¦_5™¼JCš8k쓈L#U¸QKXŒ7K¸X°‡!¡DAe ZŠV…ò -?Dì'ÍH¢*µ1©ÀÖë ccþÃíù?E¤enÇzí -/‹çàÙ)ª& cÛóþEÎ2/‹%,U&á+Ã* D\bìâ>±<d›3€>ÑUÏCŠðYç;P¬ñF/t‡”A/¬‘q†*‚žæ›Aà o˜çHâ8Éë3e‡Y‰†Ì¦LúAR¿d ¥Ð§8@¹HxÅ—‘züH¿I·¯”dš%)!D75ä4Qq­2̸|‰Ì‹ *JG™ÉË =ýrÀиhÂ"ŠHæ¹?°¬h¸VC÷-±CœT‡ÎeG^/êÅõ›ã ÕI08ž§ 0õb•ÏG“ O¬,drK€JƒÀÒ"@À±lÁ~UG+^?ï-Y:‚Dˆ0z0´òŽ;zfj<Бs³@j‚T"e±”ä¶WjᇠŠK®™¢+ùŒP žv++™}?êMWâm੨Á¡™bô’tá¼ØndÄ2ì=ï{aX;xnÌt¹º¾—yË0¾¹¶A¯]ÃsCƒUùþó¼& ™KÏ;ÚÑ]‚’qõqY‚¶“^PübÊPCâ(‚IT4ªO‡Éä¦éé©/'¨ŸÍ7g¨Ý•ã˜¤ŽÍÑ@Å1PåJÃ‰ï ´33I™I¿áÈÀè1_×A.Ô’+Ê[&öjoÑï€Ë¢eÏ•õT<ðB#uõƒaD»Ã#šØY|ÅeŒ„ü¸§áÊJ†õeÐè*mÅ 鹘ú 1:2 {ÎSw'pïV4ï^.w¢=ÛŒ…u Šš b­Çlì|Ôu f|¶ (j’ŒžÖvÁq%ÃqŽ=a'œ¡H¦A2ÁxÉsyA4¨¸•êRF%ŒÚº‘VvfŒcL‰Â·d 4þŒ¢¡£+H¼ßÒÊ~•yâÐáäÐ%†p[Ò´&Ç+a§pœ ÜŸ âF ~N€ ^&æï Ž)1æÀÁa–áH”rï)ßOHîc\|È6sZ¿yf”#[ã(A ):àŒ%tÊÕ«\1ÒˆCìxKà+]Yrëá¨(ÍÑŠÿƒ,Å$Ñ~ôç 8„ã’EÚY®#HŸ•ûÌGÞNùE/k݆¤ñwr53š‡ 2¹S£ÖŸÄoSx™$©×.$¶:ÚHKÚ’dœ0¡²ÀÌÂè1Ó‰²üÖ`ÄPhh= Ë5ÚdI%y=XAë¥ÔKßUlHÎbläÑ#ÏÅ3”[$‘3ôð” ½6Š0 w%Óÿ`qcˆ•[/|äÕ½V€„Y· “ -ƒL“<·ùŠ å_ ,ð2 ÖÆP¸"nˆ™¾,Sа”yÁxWO¨,ä1#¨«ªUgžêûŽzhÐ3äíi¢rN8|ò2Äu$Z­„Ûí˜KhË%‚OÀÉÎϳrÆ‘nÅž¬”Ÿß -V·%˜BÞ -Aƒz?æ;¦ÍÊ‘ÃîhÒá2—¹2¹h3Ĉé£ZÁ„!Ö9ñBékC­hô" ö$Î¥#Ü*u„6$æ @Ã(ç93ÅQ‡žGâÚÉwãò¹¡ö—žQßÌÊL¹Å”: -!L#9O'™cÒ·œFàò¥¬¢—Ä®ÆÒ6_ì âCÅ[néCeéWÀQ êC¥’uŸ„—G0»ñ|à “"€ žb¸U aPß*·ۣs…¸MãdH„qèy9ús¨72iÓ§‘-ØlheåáÌ´2U’$”.šö´–¾`G1µç¡DÖo”¼”ÍVsþíè &•på°-HÎ+?§Ã3dïÛ‘ð¤BÔÕ ¡ÉG#É=¬z_Êí›Qü`âÂ| |¤ÞBüSÊb§2C »‘z‚AŽ ¢3C¼\J%¹× f$ñÊÃaÉŽÉß!ª#>=VPS+¬ÑÄq)ç ‘ŒAÌDŸ«†Êmliz#`7Ró§”R¬À–Òû+Öýˬ?1œ hè[D‹¦að=…LÎÀ-Gý§G>ã I@zàAgÀ¡•õ -KöûU4<.X …?BdY"l’FS“¯äR°T£ÆÙçEÍOX'ªùy°23‰IhP”ôHŽc†\Bæè’Hd(ºŠzáéS0U'»<ziGaÄ\ÆŒAÜ! y½¢ÏŒZ…fVbÊŒ8QoòÑ ¹€j$a.Ô¬Ìk`qÔxã¦;¦`ÐõT¦GPº-=qU›¨ Én™Q™=+þKI»J—À®D¢‘«Ü±©å*wªÓJ†´ ”QÄÞâb˜ß"õ(^Š‘wÌîä8¡úRL†bJ$[mhÚäC*ĉa…e¤Ê¯L.‰š>dnM®o¥°ïi^¡Ï’+23ņ̞{™™NìeCŽ yO¼¾%»±\ŒAˆ›Ì€<’¦êÆ$1üØ0S×'3Ä64ò2ùNê3¼“àJ³ºA=ªÒPŒJKX¿è¨8÷¨^F=“ÝN(÷˜Ab3¡8èy¨Œ:‡"ÉaÂÑý"•wü"ýq¸‡’‘‹´¹L¹êS´”ɇN§¡¤)|&4Žw¾žàT‹Œc†\Em©lÌãaÍ´2SVg¢ *ßÖëpå-#dçñÜÊEü Nþ=”9ãÝ!âÄ<…móB"¥–¦QÊÁÛ]„ƒ6ƒPæ…l2‡¬ìP ²x`«zðÿTòXt}VÝ':%—Ǻ™³ØBîLü‡Ë+YIÑmªgE#NN§É _\‰ ©3i3 †ž`ÚúUP2¡¾ŽœN½ý<ã¾Êõ]rñ47X ˆÌTç=cö‰ƒ™Äôâ)Xd••œ -T„q`8 \(Åe˜œ=ž¡¿ª5Š âàd†¸ôôƒBÞdºQjq½ŸÅߟ+_NuyâöŽçœ -.;äç”OrŸ‹¬“ùüá/“Ñ;#è¯)êZF¯{QÊò–uqØað­š”ŒádíÅ&ààÓ}cyÉêrM*ï1ÊÛ‡‹6+H…äÁ6gt¶ÔQ”'B¢È/SåÈP`Z\"•rTL QŒy# 5ÌHÙpõ#~GRHÃT]Ï㙼HZ2mR»!Å&ü-ì ÔB‹KR«A"g¢bxþF™mJÉŒ$PW*‘F‰K)%3Š0 -aHÙ¥š22¹þ8ë¸H%³žxu´ME|2YØó -²æ¨ Â<âÀ‰¬QUé|¯9+¨•–b™G=à{E$ÀI(L—‹—ºðzqç -q -‘`ô‹ý¨øq„å—²²B"/‘ÛböÉsRY …ivGi)ˆPfŠ·UT@Pn¸oäJ¤Q2µâÖ¨BMÈ€ØÏ°ª{­.ÐVáziâ­ Ž„‘tg1Á#ËÉ›™ÅÇSÝÅ®fÈä‘Y¨{i©|ô^z@:ÌÃ]§+LpG½+wO÷‰}Э‚uÃàgÂ¥F‘µMF £/>i)”Ì öB¼ Eñ¯©R(#ˆº+7ƒÉ-?NÞ’AxÝ2”Ã$ãÆ»4#ƒ§$ÒlßOÛÉ‹Dåå&W±'ïä5!ækNy2ˆKeÑP²¢PÚ'Þ*`UûVøBs“ ÂǽMt[ Íœñ ?4¢æûæ #ê©.ÊÏ‹~GÍÀ šf‘)¨ÍÌ -¾Í ê»Ye–0{B“I;tP˜HØ‚¬´ýˆ«öÉ)•WUMÔ9ÞÀnÒlL[ ïÄÉqÙ1c :OhAE¸”ËA%øAî@x×A·¾–¨ù“3€2 ï Ü-s¦š%¹¸€êc:æÙE!àB¢ó˜•4¾û”}šd‡ñ?½¢ÛQù,a9 1¨‘ŽéA^æ½uzh#Ÿ¡O¼Ø!J-A×i3‘X¬EYJßdÞ…åœVÁK…X#§YR%”bÒö‡,7=J€¹Ï°+QY$Š+ZÂöFJ2¹OgîD$Õaú’âq¥|÷¥¸nÑR Ö’|™% ¡Á4’Já Áâ _œ\NbÄ%O'5vâ¼Ë*x7(ä«“«“’0ÎÔÕ°Zñ¤£4)r3Œ„èƒNü9Pƒ^À¥z+r4FŒí PÖå:O÷àó€ZÐ$×ÃI¤‡c‰ÿ½·×™¥ÉÎ쮀÷pLŽÁo2þ3ÍaKÆ Œ É#%P³ ªÇÐÝ+ÖÚYYUï§vZ€ Ç8oETVUþÄÏÞÏ~wªHjsMäJ×…q|4ð¼ó<·…C.bF±é ·vx¢ç£‡Y„Cg¢GÕöšKQˆÌƒ«Ç¼ºöp!ËÖ×@˜À L+­Ž1o¤+ûöÂ3(H²÷uª´Fã‘ ¥•‘úpqß›ëWÐ â“Rl¾`A§µO`keeÁ½yž@0 ±ì(>u*©™›³7«³]Vb¦k3å[8ÏenÖSÔñf«¼¯0sÃm@ÔwîV€ñ¹‘[ߊåÛ\瘀&Éú‚RÎÂEØ~þÄbH‘B1ƒ­|Ò$TǬ9—ÀгÇ\uPD¨TA»óô AaQ›î¢¶Õ› xü¶2 c~Ÿ+zÕèe•&[$÷„»8nÎå‹6N“@³‡n[ &qVéªàu’[ZriÄqš_—§óŒolÜoöp#™Äž§„ -„Õp ™ Çf×V+aì0Xâ ²Ïe “ŒˆºjV³ž}¹;)øpxÉ;t¶$3]Â~ýÌ®÷‘ûö;cçˆ]ÆTQŒKx™{é•\Ã-h 1b3âÖj½Âúzè{5ï–ÑÃ]òqÜÕ¹«Wú¦Dúž"ñeÁU<„ÛH+m¨‘1‘É^fNRì‘BÞz‘C;èãØÀ¯£èG“â±PÐø¡M§Ñ_Ûv¬³Ý…¬_Ǭf‹iÇNcôÍíëÉ!`Æd¯‡&ÊF0ž ÎÎe…È°§xö(ã ØKÎJ4W•Fåƒé0G¼Â1šÎ;¯¤µ²×™oÎçg–u@:“{‰« ÃOWísæÕ1ÅìÖ´ oxF=Æs>p¥¶SØ{µ¹8ÕËßË;ª»¦&zTÝÖç!ט(õ8•øJËc6uK7alCæ#±ÅS¤˜Óà W6ôš·®›‚Š†)=âcæ)´½zÉh?E3  [J1Çï;†¦+2£ ×¥P>e‹±çâàˆØ:‡Šïˆ܆ôÀÈßÔ9žº$6(ˆT•î²kÌEº|Æ›b)[dïOòq¥„‰Ï;ÁyRŒË}[£1-ÕÊM3þ‹ˆkZ g9ã·4u[מ—ŽH£mÞU´¸ÍOQ€”s~Kþj”ÙU}ñ/À46´cÃ4SìW‹-¨Ì'1(š—Ï£´ õeµèÚý݈©Å¹Ì9À•¢:沃$Ö‰^ »æìð…KåEbåÃߨài¿¬×+íþvƒ’hœéÉÂÓ8è°¬g‹̆%ýµ–6ˆ°’4ÊëmlX!m$@8œ´†M€.ó’VàCŽ\WU¨·†_SÖh¸%Ôo¦E 8üæÖ˜sáÇ]K©ç#déÆN.Š&VRˆ¿Û©À(ÃD-²üùXãseM‘©-v…Æ…àI¯d«å¶¤ŽF㜳ñ )Gåèf@Cé Æÿs½Îëè,Å ­BÐ=¹îû`†jÙž$ '½ îµtr6¼€ƒ@«#0}AÃ|hxI-æÜÇúƒr“Ái"Hàô2G`jkΕÅÑ|+Ø,¤ŒëZ„C*ZNE5ç¸Â-Óu°·®"×r<שŽàGØpû]ÆkyjTˆs'ˆFÂD9‚3lS‡/s_,«çË0ÙìŸËë`nÆfcÂÓ‰ÆÀ~ߨã¦APØ(¯ïv%ö —Á¿÷½´½"[L˜6¹"*j€×.mÑ榌Ç2â¥öts|kv TÈÔŠÿÀŒ&^‡õÁë˜Ó\¤¾¶Í•9g#sáEájŽw¦¡A¾¸êª½å›àœ>Lþ -ÝØþNôÊ -Ø_wîþ9–¶ ’1ÝpãT}(Ø J¡CÜƽz•xCl%ÏwÌ],±TWY&!AÓÃáP_Šl¥?ƒñ¼jó>A⺳7´èv™çÂÉF¼ti˜«#8Œ$ê3 ªÊÔè‡0nG } ¹=}Wã\Pçød„4³áT]xhdÈX> -™4zt0jм4h@´c²[Ûç(ŒÀµF4ŠNeN±Œ¤ •çi°»°~9~Åqô ªÎ¿Ùbâ -™Ûû±ç©u£~±ÿÄeÏe™H©*éÍœÇüYmyh&÷4?G „*`ýòZ)à²æe±Û³„t£ÚXÎFF3ßn1jÞd¸8±J<“ - -€¥ÓÍD¨X÷ßB&z1;ÑKz ÄFøÁ¸»!r¿Ì¯–‰r¸«ŒøZmC<=òê?ฎmHHÊÅÑݘFŒüd±IÔ”_R×þ…Mx]Fù°èíláj/µsn‘pwšÁŽ=x‹1…œ[Qüm5nxxdf—qОVH\Uò|ž)¡'ö-˼šÜ)!ö=“#bbvˆ2§Mr”ÁŸAÖ~`Í’ã ž¿9?œžÅpÑpÂçW6µÞæù+µF!S.ËÐÔÊýd‘cDZ0¬\+*AÖp öAÁ`Ö ÈXl_g¯SÔH‰¿ »ƒf¬MÜáÏ vž#í©-nûª¶›»Ô¹êi$ Žc„R£¤·§…¨nªç…Áè¦wœ]÷‚$­ 2—bö%O«˜d­/j±¾ÞK8ö‰Í„®nû(²ífþlLki…$${àßî5æÃrÄÇÝABq¨„]‘c†àQÎìJ–ädÐBýIJê[«ÒÀdœBuÚÌÁó©!ŒÝÉ›ŒØÔ„J/5¹w»Á¥?|"+|"V÷»€`pjIp^:ñŽ€†„iÿœü:™R¶Ð Mc‹3‡ÑvDY:R5Ùbö{3½ÚÂF1JéñGupÜ!!Ú\t¶¹¡žçƒ¶HÙÔÙÐ@­7©ET8‡ÄìȇsÅÑÚâ´5 ÿ“¥ÌJ"Î#r&mîû ´î¢‚}ÕÑc®×c·Tb‡‚D§¿ÚôpÆáSÙÓdhx@}¼NôvŸýÏ&””×Âs¤eKÝ -"öˆ‡Ïi~®Æ–EñÈ«3{!N Ú¿âÝsš"ül¨è#®û†xocå=¢0 ê·çÌÈf”r­v'¦Â^Ë«×-]¥Í1›z'nÚÚR{wuesØF—Œ²Pë)¼î³Á—_‰/š”‡âÇ:§Zty½ü4 sÙ£@!¼iLˆùO™<ÜK¸qÖ%8'ñ‚áâ à ÖΤéñÿ™ÛæùJ8*KŒ¬ ÁXS¥šÀȇï´sH¼®“ ¯“Q% -~F¯ªiª,¢êhlÎé0Àk§.Xó都`±7Òò2-PÔÌ£ƒ{.ÍhØ{/C}[Ù„׋úÍq­Š:¢¾HXÇ¥ø‰Q;R•Fs´cß#v•z*æ/~NÝ%ç!MÞ X^õFF´@P©DžOËF%½?šyÓ²¡KËËÑò€9Àiàlïš„7Êg[톒oߤqÞñÙ›6÷å-ËÔÈß×ë !Ëh[I=<Á”³ÕZÎe¼œ_Î=>ó¥€ìŒiè’Q¨•• Dm­ÔÀ:ž4 -µWÔ[r^QBŒÕøp¿UÁMæŸå9é—²¶eDÕ?xcÌÛLý[/—»N´bÐæá.uiû—º¹áïBŸZzH–®BÄx×2ñ}7ØäÑ¿Ôø DÜ›w:n'×rödïÌëHgêþô)ü¬ýݶµî¸ÑQ¢lä)Rk@xæóq|ÆÇ*¥9"oÓ$\Ïéæ·Úâ"Î!JF@§-XáÆʪS©]4Ä8Ì܉¾ò˜kÝëñ®k¬@{†9ó‚9l\O6J†Ìµ e^rõ\1™)fÓvè} JÌ™¬ÂÏe&¼Bw´Ö¤÷A¦42¦ó'•®Ïó’C”Ê™£—뱉·Ü äø¼ÅÊžµPr|¸LÞy3Pÿ‘a Ïæ·¼"îüÝ€Þkì0ÛgÛµjsa1c³G©>ðø:º -1¼„4õ2ºŽ±–a48‰Ì"¢„VD{+ -qv½DÂ¥ÄJDü§‰îÑ^%4çi˜2—_+?ÊëºÝPZ3|çHÎÒ¡ƒòlHJqÆK…T¨+K… -ÞòÚ£‹H5cîb¿^±>góY?÷Q–nôOf-R:Éꪔ¦Ø‘1V¡÷7×N¸ã™+÷;G—&ÐXÚcpfvW¶&×¹Qù¦í÷¥RR2 ²¬j=_ó,²76ŠË9¢® ‘Xó¼ø²àÊâ<êËÙÝìð™p\¸fcOhFâ©ð]5ÜLQ"‘D±H+¯Ò^v9ŒÏ¨§¹õâ`éñCæS6·xèœÕóÒ×Í-*´ßÛ9üáoþ‚oŸkÕQ©P©ˆýÇrlüæ® -§Ý0' bâÓxôþ ƒRJì¯PÀ²ÞžkÓw À7Bò°‰•pAæwã!X,jÙfc”f™œ%øI4xî i`Ú6©zÞîÀ_ ‘µŠ¾aœ÷ŸMÁ«Ö¢SWÿswŸ$³5e$^Ó´âÜÐŽæa¯IÀÏp%ælÄ25áÇ ~´õŒ"µˆ¶Uw1•Å_ šÁˆ_y‰f ʲ ¢’!RÖGÔ³ÕyY™Xâ_þùÀehœ7x³Ñê0ᥤ -¡f±|»¢øÀ‡tÂÓ W¨ùþiEέ1p©ì â b%£ÉàaI qSŠ¸ˆåy¶ÕÒÖГª‹Äë ¯ª".‹ìa}×|Ž°#41RL4F•&|iŒT'Õ—DTŒR¯à¡4LJ=nVƒê©“#º¬úÊÍ ¦fòæ6kÀ e’3ábæ5 -û'´nÞúëpÆÛU«T]#ëŠ D~}~N仢þ×o1jéOÊš!µ¬r.¦«ŸåBß²¢“Ö+þFÌ‹3œã wFq´ý[©XÆ®lÎœSË€Xƒ²5Ê9·£øJê\iý $ë†kï %«®Ã0Õ%å†K.nÕ] o¤ç¹™å\RO\lܪ¦yt³ßå•Á£Ñ™ †‡ÈÆÝ¥ø®¨ÂŸCÂÏœ»”Kž1¬ð÷y›w×P!5@Ê=nŠŽÝPƒ[±.š—kyÝElA?bCÞ!Ù-jÐÞxW2¦ŠVת²àÎY¸£Üåd“‡žhÞ¥äCPö½'âpÉRZIðäDöÌ—>uÖ•Kl$„‰ž5¼;>r˜ƒîF¾mÔÎd¬|&ª` -ác]€é1ßÂc~ÄáÜ\܇ËÚËÏ5:q…ç‹ïÀMÂw Ã\Öì@CT$u&Žx} óÆØ;Dø¯¡š?IbB1GÜtè€:×*¹G%TÁ46‘#QPI®‹ôBÕ¡Û"/ê‘ÂfÞÔ½§G „FêŽÓr*àïË¿£n¦osW‹Ž}ýÜŽ¥£”¦£x¦j6EV,YôÙü*íœ×óhsÚѼ–òüøØ»·kþÝ#ÎõkÇ&Z^õcÄ‚2<â¬cšãÜÕ¤õ]:¾åUD(/ÖѤmÏ„:]†p b¥ž¿íÀ$¯9æçuEnT«2úTŸýÎKB×G¬hk…(¶š×ˆ¡-vWQº{Œå^•Ïžkd]ÃKEPt(IÇ/ªaï@\ŒtoL‚_“Îdì1|@¦“LU~½Ž!óÓÞ»W髌괬)6µ–¨’jYY[Ëá¿3N_,¹Ó×i‡WiŒ¸'çæˆzS¨„ço÷“Äâþ$)oǾ©nç(ÿE¼‚œE6i£„B~nAÙ‚—¦ÚoÓù<Ø®U&'Œ¨x.,¨#ÎX‰´Aí‰Dí0¯¿|}XÌ!;§XÛ¬NoÿyÈÄ’-GE»ïò›ï¢žbáTÂlˆ-³t—;;áž6NÄÀØƈ´PKäo“À§#r²Ÿ®<‚Ì2ùµ44îÏ+则ÚrçMŠ0#&)©‰Y&¾5³ŒöÂzC%™EŠØÑžƒ1£˜7>¡aû3‡T3-©æ)Á¥ ûØG‘3Ç΃áTW~6ýF˜CIæ]‚:Àx7…lÎUˆ_›3ÜK|P–ÍÁlLÔ‹ôcÍÙl/sH_Ë<Þåͧ&Æ¿O$;~fjjvF*ãbQ-ˆö -,WfåvRuR]I-x—ææÀG;Öµ‡x6,éP^™ü È8ƒšW†Mb^²DTÓãu°kePàˆg]zÆ£_1ëª@`FÝåô!Ô‘W(ƒ÷\wCˆqEÒ«3KZŒ,I¬°×K”ʦ##Ãð°éÐrïa²wm‰ò©T B,xê¤Qˆñ…¬¬n+‘išk¤ŒXK¼[çT">JµpšãÄ9–õ{>«Ùp” - ðeƒnâ²µåܺX(6ŒIóáÇ‚y€+L^G9ÎëÈÞN2G|Œ$GÑÑ#ˆ²Õˆ×¥„± ¢Øð­ŒßÃ6P& ûÐPÁMºÂŸ£Œƒ3ŠÇk‰ò®)ð«Ì·Œ2TÇOÅ¢^¬Íž3©UÜaæU}ž5´»~·ZbSK8‡'Ãmƒ6-%ÍfÃuľ!Šžh B&eìÚ^ÞUE¨4VäH4*ÔŽwqº©%lî5Tø˜0 -†¬X¾Öˆ¬ÇáZLpÎOZ|kn -%³ •ÀC@ÈŽ>æÖÕ¤Hî³±¬Xv]ö>óK‚NRX$em(¾Ý‰OáÝéÚ¼ ¡íw•ÀªFá½ï;õËß ¸–îhägãÙB¥‘Õ‘ð%.*j—¾š!äó!EßrÕ-îÄ‚©‘–—-mä“IíR•–þbÓª_1š9õªC n°oÏ+È⊊Djª®_Ú롆ƒÓ@1"i”Ù`zNšš>ü(½oË$GŒ“â==ZóL4¼êuáîÉ1¾X/Ëë)¸åîŠQøÒU±î‘ËøÜ 6ÂhÔhÖ9¼ž!^ãº·6—Aæà6·8Á¯ùfÂü‡)+(ÍæÓq¥”Nmr -=fè‘ÝvÕïkÁÔ]Õ’#P¬Ùs}(Ý°ò²!À”6²¬ÝâQ6xjÿó+:Öd†˂Ԧþ ³ =ÂìXïA‰»`¨¸µH­ŠÂö¸ÆªÛ»¹>«b½"檄Fø†U3/¿îa‹`«þîÚºZ•T¨_Ž’ÎØpϳs„æü¾QtKUtã……c'%ó×µê[2ec–tÄ€L œ•œh35$Àô‰âÈÒÇi­§&§ê{ÏØaåžîçWîÞ4#$Àó)gNùB÷½ç>¹‡‚„¹Ü#,k¡ªóÊÖ³ûU¼ªÆýhj2¨èŒ -XJJtS+fêΈÜR8xƒcõ¬œ=̾%µ!óͯªËH8  -íQ…¹-Â]566› îY¨ nÎûêÈ=–Ó»<ᣠ-ï©&™Šf¶-Tf0ÁÏ×µ´Íó~õW+¼ðâ’ç§t/¡Vžÿ»íª0u4«_·lih¢rºØÛ6®)b *S›&9Â’Tºg‹²Óªs‡Se˜ÖÅtŒ"Ë‹ÔÉìÑÛ¹zx»Ìõ#È–O2ój&[CEÜD'n½TKõ‘Ñ$„@ËÔÈ'hGÆN5sÌiÆâ Sw¢µÎȽ{oæó,XÌV—q‘º=Pk„K¨Å®Qh“¡ j "͹4¹pJ Ÿiö[WRª¾îäžÙi–e}¼Þuä)$c5tbÑ®ºÏE*úžïD.jfç2*oáüw/æ(èÙ|¬œd¥~„$Ÿ÷R^EŠRš¿Œùk€ô}IÊô‰âw¹P ?Láz7·O§ôk–<-¬FÔ®Ïm¿*6€á¥NyŽÓ%\1‚?ed3læðq™^MÃÃÉ Ç€ð 9Ãœ‚òŒ¨ ïËÿ<€ÑüŠeÐ##T`;Rgø+Ì#’ª‚ŠDÇb¦3Ä‹|ÞaÇ_îCàmA¡¶Ç1Æ" â‚Žv}p4ÆÁ9\¹·´~£uæê ó¦À¦¨l£ž˜ˆ ½ -äô‰Ô˃É0koÁ¶ÞÓ…=‰ÞÿSõ?u•P ²°û\0BÌ7-Š@Ù•^±|M é]Iü2N»h­‡Æ…’ñc¨yÁFÀ¸„EgmiÔ–²,zÊT”O0W‰øॠdIqÒb·ç&ߥ +&Véc)Uçøs˜zý¢sQñ¼5à#±äQXÊæÒlé<'Ò‚[æüjQûò-ÄÚª(HŸ—Œ[<æ6ŽF &Ï[Ærv®8–g5¡Ì7ÌÝDE 9´Ú"…Ù)A`mk’b­.bÚœõMMëD€`\cIðÏ}(â +4|5…°•òÜwü -Ãþ-è®KÙß™*Xv„ÄŸùX4Àö¤ÐdÙö„‰eÍú’3xÜVÕ¶fAÅ“fìø:¦Ì×î²’–Á¤ÐÕ‹(kjõv$Xô$|ýú»ÕdË¡ý˜óF÷C‚Udnæ3‡.iSnæÂT›†yØœvûXéD½jÏî Ò—`8`£/ÅF›$+ù»í¬x.@D€w–³EÈ m”0Tuc6ÃþTä|Yjpäº÷áÆ©n—….&3/mn¨ˆ¥qIûJÄ›˜“¤`[ZLè;ígyÁóEœŒ5²ÊHÁÀm·™m­ç†2ùHÌV‚œÍ—¸›Ù¾ù”0=C9qÄH‰´•-çGò¤P—“X‹Ø#0#\g#Õ %÷ïºVƒ6É”ªž~—ï ²mõûs£É Ÿ‰wòЄ†aYòo±†àpRæá†5ª3¶’€Üœ)Ø(•¾¶…éf)ŸÃ]¸+å+(ÁBößa†ZöˆŠ%§¦ø X‰xsrSØnFáļêÒÈU¢9nŸ#î%¿b´ê¡…ÌYpîýmÔåLuÑ™n¾®¦1¤â}IÅçXrÄïï¤ëɆ‚´[Uá¤Éä—!tÙYö±œüQ’;G•H_Å –V -þšK§y»–¾pŸ&·%ÂeáÀ9j8p²f^zO\½¢êqž•9Ì4V3ôFˆqkRQ–ó¬3=Þß*²Î -Éél¶e;\#PÄ=gVee…ò|‡„‹ pcÒ4–sŽ{€Ã,*„±0}îïÎ컊¢y|q‘×\aîk&X]v,õ.Bì=0äBÒñ½*KQ¨}ôyj]]B¯baUÜe9„¶®öçç:"#  #˾éÞvmÙjS ʈăÕ9áyì»Gøg ×6ܧj_Û¹ -m¾ž…ãÖ%“êenó‹~ˆÉ죽a‹h C¯å>æ|ÒÈOºÕáuBûýªø±ý8iTóWá6Ó¬Ò·Rs’”›]óÀ×e¦ß)b½pFÉ|ÜeŒ_~9"E ÐÞq±|ª°p®È£³|*5Ë®p¨Ç—ÛZ$Lb›!°g¿¹Q[Œ±Ï†‹rœVh`ÚfœŽhÈû6+kÝ>‰§øY:ób,ü·ÇÔLÃþvj‚;8öÃ|°óÚ?­ÅÁÉùÃØà"ªYfHy=Q×!¸‚Íô*ttrTT½5Ç÷hlÃô œ?‹;‡Æb}mø´È}ºÎõ7Ž©àØñ«¬<Ò>Jc³Gc;B­m ·8æ àá—¡)o¶n‰˜lÃëK°NtÒ¿lÍábdžqþDv%vë=Šö’¥úÉ£3~éã{¬ÙŽ}zQJÖ„øæc­SY=3ü–íðÚÃea$:£h'=Ü!¢P]ÝÔ‘j?Íš¢©–AóØœé•â]ý>O–(é¿…Œ®G8¸³Ç¨ñ—åE³ÛØx}¬ û¤  «ÇÀàÁ½P0¦Éu' 8trÒPÇ,µ· no­<Ïò.dçZÓ$ÅF| 3E%t›V}&Lé¨O}-u¨’8s„ŒTPl© ¬œ(gpÔf1/oä®4´3,o_OsX¬ÏÆÅ2$âN<ï#VžŠdÏå;c Ѐ-_ÀÄÁ>s±Ý™s u:s3…†)vY.¥; m]ÍC1¥s.;àÙ#v»¬¾ú¦KgN«ZqÝLÈ$sø<w€-ì™éÈY™Øë:„Í]w¼³vKe˜J¤bä‘`漸Ya$ÚHyñk^[²À§þ<óŠŸzH3 \ýÛa•¿5ž'Ò¢ø3†´}”ªL6 =ü»*Ûè‡ÎËçºÃ~/ðW 80+3Ž6 -õ@¨ÖÈ¢vq>2>sŒû(òñÕ>yr£…C²6ûV#_ä­ÍÓê/Kƒ#v]Íòùf6QCa’”–Sr›5 «Ã‹±/›L‹ÀíáŽ]³+}؈ˆ>ÛíÎ_b­Å˜zùÚnpA~‘Èöh–¥î/·XaÕ="º]Ý$X³ŠÓ{MkÞJy¢ «90á&²k+:âÊž‡ÕèV' GÌî(†ª’ðM[?ÒÇåõwhõŽ…F! à˜è|¸uΙŒS#,Q÷?G¯†P‹˜vlg«+6øbðæ=ÓZ»ð.ýw®¦†Ì š\:4qc—pùz¹WÈ8zGÅSóˆFE; ™hB\[ïwÙ bÃùKÈrÞ"xtv†W˜ò¾Ü_¯‘zÌ@³~;†0ä̻ۜ‹xì~Æ“'2εƒ˜Ï)…ï÷»8¬åÝnoªœ £Ÿ g}î£Q¼ûÄ3Ö®Hë³p³áÎVæ’ Î×jˆ½ØÒê¥Ñ3ÞﺎÈÍS†=æLº!5¯ò®¯ÆsSçÉ[ÙËaº2vBçÆ1°BâÊä‡Võ,1^cuÍ:ïNpú.d¸@¬e8z¸9×BÐ3q÷¶}- ²S:•˜¦Óóz—¥8x¼äêg‡k4_jx¸°Íü ðSt€oØçx@6j¬çëßÆ\~ê'ÐÍub¤sIu|*á uÓ|j¸Êo -jlJË,væ:Ü{wÜU6†Óù»¸uÜ€Ó h8Ûáõ˜Õ¡l*ÖW8cšµßƒáU"—0óïê«þSa3I!ÈÊÔµ£^¼êg>æ?½C±P;¡êd˜X˜6–U)²e¤3x¬Ó±œê,çz•µ ‚ƒŒ¢‰“q‹`ÑÜ3%m¿µ0Á‚¤4tìѵŸ3I³ êb½ÇwL+Ñ@Sˆ\Îz0Š 8Øܾ(š Iƒ{½Ê@C9Ò%Z³ãªÜ'ßl–œ9<ÄÜE´¸š.ÞªÞ#ÇúÿØtúu}kɺï„"ƒŽpŽË§.]ße-»Á§æZ„_[hz]&æ¨ íñTªåšCÀ…w¼kÁU‚ô6ì†8÷)¢ÿ¬#´iÊÊ»7péœO….˜fZ]:ëáþC˜ F0oÁÑcüpôE%L æ4v¶ÕZ sEÏjÔKpt{êOÝ>W$;f/ÖAN§æùÜ° WTIhöXh4 t €¾ãtäÏçÃ"á Õ8vˆj¥B@ …áΛÙÓ* jfÆâI ŽÅ:U5+`AWKx’®C ]å›àŠG›bÿX\{WöuWŽáuîa)/çe ¼“¾z¤‡¿Êwcþ-L΃O„Ìt™i‘%/ÛœÆçŽTuXåŽêö.% Zb"TG _X|i… â‹å$ÃÚ(­¿ê^2±UUdô´*áijÂT­dÚ!ö±BSÅN+É{i|Zt_ fBl®Y1G¢ø2ᜅ§¢)3ÿVM®+Òyˆ³¼¢iz©ÏN"ßRÔ×›ÙnÂJ hp]f¨øOrXag°‚–þ©D9ûW 7Ó>Zu ”²ôN\züÌá(dÔý -³cT1,±ðtunùX3îܸ<×glØ÷µ Ï”¹ ¬]öi€’9UÆÙra,ÊçI¸&a‚07Ū<‘‹äeB3Ø —Æ÷Zק(ă¹®%ëa0½°VÆÖ«]fQ“«˜è ?ÇØìÃ&É]k|¼Y¸3±Z¡ŒŽ‰†‹ˆ¶ê€Ñõõý÷¼OlÓ"‹ùØ·KêmÚvªDl’øˆð”~ÉVŒàïmüþðWÜ]Zœÿ×8Ðï'ýúÿ§?ý¿þö?ýçÿéÿüçþ÷ý‡ÿüþÃÿøÿò¯ÿð‡?ýÛÿõúßþá¿ÿ¯ÿòçÿáßÿôßþ-Ž÷ó;þË?ÿÛ?ÿãŸÿù¿þÃüˆ·Ï½~ýíøõ¿þ/s[ ü·¿ùÉm ýòúÎJ̶EïQîtÙ¾Ü8 ]A—€¯×~ ß®I\Œ¢8þ» n³ôf5€½Y/7k0˜¿! `/n‹ù[å3Ûß#Xîú÷šÌŠâÄk†zNÛó©ÂßÃ6æØDf#¼7ˆöb Áæ9±Í‹Ûp¹"œtFf?– úÉ·Ÿjt"ž¾®pe›ëp’¼¯÷,ÏN²Yw.­ø[žPÖV:þ\ö¡˜Ž+*‘L¦¦s¥:ŽÑD¯Ú+F¬ -ÐÏ^Â?/Y=Ç¥jßÚíG`íç®NÇdTíñiÆ7æ}5wŸ¿a÷ìëYfÜW¾4•(ŸhûJ$!í«’âþ3È^™… -Á¯¯茭o.)Ç­žJÙQÊ7¤>@zã›MOŠãª¿ƒ¤'ìžËø&Ñcö¹ùòoz•Ð¥ü žoJdË7o¾qNsÿÆÌÛÐÛÏtù&ó´~Cåù >PòÍ -¸ëg‚|C'&Ë#Àñ°–¾Aò^<ñ4ÏñO˜xõ”¥Óáu›ßLx«ýrÿO¼£´úM€ì‹dø¥$ºêyï®D7Äý‰y׃,õoº{8—]?CÝÃ+mª;vhç7ÊÝ—¯ñ3Á=·Óî'¸}P.ÄböØ>ª)—'¦}m?o:ûÙÈ&¼ ìH”G”áÜ,vl±‡Ù öù7â™^çOŒ8žÄuÞÜôeK”Šl¼:»Œ¶žTõ¾ñYn˜:ãõ#›¢Î÷®ýžN™ÔqŒ›™®'áAi -÷ÅLweT@mþߘé±ìP–Š±Ëy3Ó]m°ƒ ­™¾JÅr”Šë ™n…˜i­ùÄ#}ZÈtëÂX£QVƒŒ1>¨ ¥aÉøAýêO¤Þz³Lçò¡ݤôóBTŸ€ô˃|Òcp#ù1‡$’óîÂÉìÐ6œ"ó'ËW êX’ßøßU#¢~c¿!Á1klÚw0ß!ßUFÀ ò]u³¹n¶7ƒá|"½ïã,’·² êý‚ä]Ù§2ÚTCqZ¯ÕwÝæe£„$üüàdcF«K÷ÂcãÃJQà¦b#Ážó ÃX›BpD6/ÒR{üy -Ÿx¯a²[«©¦qÏ oÍßDížTëzƹaÖ>ŸéfXß>ÐÕU'Žt£«ç¾8³:ÜÄê¦#E~U£oÌaŸ£sOÓƒâ¼ùÔl 8ëO,5‹·^o5# ›FM3Wá ¡æ«pŽ6{Ý ±9ݪ ”'hš—PÚšYþN£6‡ÃðAíÓ“+Í2“o»qÒTV2Þ8iðÌøOŠ4Â(Ù GSÈF¼jC£%–ú‹V‹ÜfQ¢«Ñ°ë†Clfp~2¡Pât±PÐnÄJ¹ ÐB}àgêl -£úâ=KmíÆ<ó7Š¸'Ý9³`›÷ņ:çHl”3oaNœ‹Jœ¼¹ÍEI¾qÍŲÃwJ3Š–|´ g¦ˆe}¯Á÷¹I׊™4y.÷K¨uÝboð2zOI¬oÀåߨ­i¸ÌWhMùøØÖw¼2¾ íAUf£tŒLùˆzò7†2”­ 1ù““S_ÀäÃaî‰IfbÂnc’) c¢¸éÈÕMΊ ̘yá†"“Ê®ýÅB¾‚¢ñ†@FTrŒŒJ½¹ÇùX³ÑcJEaĵܔcû”ü¢“¬‹O}Ñ“µÔÛ§v½XÆûï'Âx¿¶ÉÅWV}"êë–mŽ“QÞÝýH½±Ä÷ßñ~mCˆ±rá¨7{xÝfOäð¥×g½IÃ%‚ò7b6ƒJš'Zø–¢o¶0jXݤ7Sø°ú¡œo,ažNµµ"üwJPl|ð¡_à¤ï;·º° ³ª*㆛Ují l—¨Åž7‹càÍf09Ë; -˜Y:”uVãÇ¢ûßä_FãQOâ/çÒÅÈ&ýàÊ­4Y¨_b ŒOÄo@cÒ‹ì{,¸Ôúñ*çÇ—ÄN%è¶ð½È‡õ j/ã[Ú'¬—ºÛfôâËl²Ñ¼`—[)oD^æ~ÛÒ*ŸüMë âåïSÞ+ÏÏaôñ[Ø]ôSAFˆ½÷òÐãñ†ì²ž¼Ùºœã¨ oÊfˆ„=Iºûµ ÐEYz†ÚÌëã ÑGúUÜ‹›{eSrÑÚ1n8.í9`¹7—×8ù…[–?ä&à".Ëí{‹ÄLšíÂÝbúy…|ËסìöUý³}›ª2Ðq3mÙÜD,*m#nô°eÁ®Çþz`*kþ ¬Ý?9µ÷k OÛ˜üÄ}ô¹dÝ ZâüK0Í•O?RiI„ª8XÉ7•6Õå½ÒQ4^×7•ÑÞµüv6Œ6•%ôéuH„ø‚Ñò.+›A›xÌToc@™7ÔõBk§T¿‘°4˜:ê-|¤¿à³IϧÆñçÀ=ÆÏðY4†Z2v@* bûñIììГ7>(}Ãgí/=Žž†Ïò}¢À¦•AÏøl`+/TÉ~]ßðÙÚ"ë0ã~þH¡%U^$Ïy%çTp•¼ÒƒØG.Äå„V")¢$„e¡[¿ ´ö:âL§Íô} hyA Ì|¡Z'óÉ õ…W"°Ä?°h=[âBP€÷Å¢õBœþP$î×7‹Ö·(Z$’µÞrƒGò’~‡‰ìO­Év¨ó¿è‹Ö›ùÒ—AXþ™EK*ÛülÉZ˜/-·ÀA^}N[;?Y´ñ„á(uÌ{ÛO,ZEöÎûÀ4ç'ŠVÜ'>šà>˸‘´|Zä/Pjég$-_ƒ‹/-¿UéA‡—o­ßDP=nþ «?‘h+Ó¿È´o-=T "KÏKõñF¢µ‡c4•Å˜ý"ц¼½hm@PÅ)ÓJà@KÍt 3ÕZ~&Ñ -N½¸‡UðŠ–p¿¹¡AÕêz:ÞP´Á»‘ÑV­Šû E«azPÍmÝøŠÖ€äeòó§oíÕ  ôŸ´ø›4XàYør9çoÞìlHç:üo–Æê ôœõ=ã3ˇ\KŽóI—%æxZAÿ•5"‹Î ¯ÌT–S¥È|Ü óPÙëú-Èß,Ù‹x¯YŒ„, ÊTÇ­?jÿæÅžê‚ë7&–†M˜ýÂÞ„QŽúM…¥Àë62®åg -,—âÈ>á¯F£Yñ¡Á#ñóUõ‡†`dà¡ãgø«g<{sÆùd¿zožºÔÌßÇ7ûØ"ÊÀ'òõÒK«mÒ+b™ ðÊßµ?±®¼‚©Å¦¹r€#bÚB\%:éÝš”ÏýŸcäñ‚¸F.Î;#†Õ qe”Öáv°&®Ÿ!®NÃ]ˆðSü‚¸zOÊÚ[Ü4W¯“Yjšý s5©GmI=B×æjÃ!ÙPLÊ sµ° ìŸ4×Èn;h$Q ›æ™åH²ðºi®‘Ù^ ì³_o4WÎDÔ-óÔæªMîØtÀßTWÎ’Õ·§÷MßTWÆNÅ…sìTQ´©®,”b¨CK(=¬T½2„R¨%eü¤ºÚ¨‘™ê±©®ÊÊLŽ“Ê×Mu¥AËgÀ(AŸXWeÄTS-ÝXW\¼òuµ¡éò;§´ÞÞ°®Ö+q`NÁÞeó]™³qa˜Guó]™%Ô"Îi«[.ÿ½FMS^5MXÙ,ЫïrÍ9ße6i^“qbœ<"áúz¥1£Š£œ§„k• ×¯J¨½&ñ×öµOÐ+QæCŠ07èÕKìuñ7çÕ×Qäòz«õóÊD9û»R$ïCÕ1Íis^ÑRÅ|>×#i›®J£^w0äÀúlôk^É¢4G”y„ɵl:ÎÖ|¾Ûý,Vf#1Á~õcŠ@ÎýJC¦. m=Â…'ú• Z¨Ìæ ‡Bf£_ÃA ‡ƒÃ…~eç! ˜FŒ›7ù5l.Øî̇þ(7øu~øZ¢—¹Â:Ï›Ka©vT-n{"`­:Õú ã¦1n, ³!ëX_×E$3¥7¬ -îxpz¹Y°QÜj}í|°F ¥o,X¿%RöÙã,%Ò©_0بjýË^µ¸¬ŸäM»دÕ'Ö}±•P *jN? °\:«>H—ïê°7,UÛGŠeÀû•ÆTõÃ?´?ú‚ÀÒÊtné#Ã`í€ç)R¿~†ÁÚËB:VýÁ`ý&Ž'éµõ' –âlë¸f8T?Ã`¹±ãdfã™ÿ„Áº€v Ñ)Õ -áÀ–=2“'V~ 8óï±y£Àr\«s:VJG\Š7 -,—ÓâHª9Æù3–rj!+˜é…’ü˳ìhöÉ~Mf©¸÷ËldëúU[%«uŸíýJ¥ú-tLèWãAšw¸g´7ôkjËp½Ù©? _£Ç7ðõ/†™îx+x–/Ä·ŸÄ×°ÿÍ7蕃å²Ì³ãh›ôÏw2bx"hä«ìD‡îgùF¾òÑQ®Zp¡{±_iˆ× - ÷|c¿*»;Ç|õïX"à8oök4¨ÃR4¿±_ùRèÇ6òÕ¡mΰ›ôÊ:i®#Ÿ¤W%)˜Z ·cÛÈW•AéÅlev;#¹^™6‰×í× Œ#oœkgF"•÷à¸òÅ…oåOQAm¥Øñ…]]ý˸W“LÝ4nÚ]ø•ÂÈÚ‰×^õæ°-9ÚR=rêêªôÑ«î—(æA£°!«ûï\õ~iAUOW¤çGëI@ö‰P-¦wöãENÝ?€©÷K‹“:¿;âœG=ñÛ ’éV½svÜ0Ô“y&7õ<â|>᧼ÇhòbžžKȺQ§§.?ùp:Œb° 6UÙÁ¦˜¸â7ñ䙊Œ$ѳ÷¼n|)¹×^ß ¥¼t]醖ò72 Í*}JDZD8 LJ>–Øæ‘šŸÍýCÚ]Ȧ›CÊcxj&Û¡‘# ÿÄØ㼩£hMK7l´çÏ=£¾Tòå-â.QâŸQ¦I7GO -vsÖ,+¸7l¨¥¥ß´P4IêZ$Ô7óÁEâ y#A{K6´ :ÉoüO¿H½©ŸÝäÍúìf|ÞŸ:³p'.¶§6.¨„Òó>Ä äÙ—´eó;ÇÒ ÷C´NÌûédLJ;…É%²l¸ŸHNôäúñí>W„H6€S:âŽwSç_¬BEÁŸáø”ÍsIýž”ͳ†ÖiÃ5©ÎEe±™š Gñôx4/³šåh¶BÊ°¹™ûï.ó~iQ21 G@±á˜4Rz01/ǯ…IpDŒÝ¨ÅäFaj€ª³¬hÌ…IclŽ P—k£0•L’ᘯ]/ªíLÐcaò ` -6!ߎâòEÀT×5Mëì‘o¦ïˆúÃ#ÂŒ&SॖCÉpu’ÔÁÎäÅ¿Þs2¿i—.wtbÐÀ´¼a/]éxÆØ°§›z‹£ær¤›zIC8£$  çõr-pr,pr¹©—4šT²+3*‘R KºAkâ÷Òµ.!ÖºL›{©i*›¯ATŸ¡`0w%õ‡¿ùÿÓ4ö¤@?˜¶™ûCEÛÊ ÀŒÌP‰Tª…M¼4“SÍ2Í] -ò­ -Ó8r ^RÏñ.]ô‹å;pÿñ/æ¥õå;“s*`Ê{¾ ˜_‘MÀ$’ AxûµòFÀ4™Ä¾Ÿ(ò®…iƒ¿zÔqÝ(LÌÛÍó´°ë¯\UØçnfdpȱ!x5ˆ*}zð¢¦_úiÀÇqîíˆ'o¦ËCWÞ#1Om¦aÙXùÎý 2“ók©¾¨˜>‘áMJ¹©˜±w!xÌÞ%7*æO&øÓr])Š6S{VœØgì³'ÓêOÜá{ùfb&Ù¨n/;>g7syáæð ÄæÍÄ´DCOUYMí†bÜÄz¢g× ÅäÆ3NÀRŠeÔŠI<Å:›…‰ÛÅ•n&G$AžèKäÓ]×áØ%±—À3z/ÓkAR47ñÒ˜n_uÁò +Ó¨VÊ%ª•Ä&Çlƒs_TØÃûÀÆjÄ9©YÄK?§¹JUÔ˱͉²sëtiÖ¦©Š§D5Ý K#5ÖK_tiX_¤Ö‹oIp©ÀÆZBR aÓ,­ Ïõ b‰ I„Í®(?F¿‘•Cï‘öFªäµLj•Z€„O¢H…Ñc+÷äR"Á¿òµy”#xvC9Ì¢å7úäÙv–:9Fø^-Ô$`"AOÂä OÆBg&;›‚À¬J–ì+ÁñJâOnjs${¤M6=Rï|¬jÐH^c…²X‘áZœÆ3¤ßªzŸhHŬhn–Ö éPÆ ‚ä›3-÷5 -´*^&sÕÉwØÂ%O;Cž–/Œ.Ë᪠+í~•(™Vš×R÷&þÞâz‡\ÿzÂKuÃõmuÿÇhlŒ~âIïìšÚ'euKZé‚/¸à%#’oLA›}¡9œ[ÚŸ‚:ÈÄÞìx]«Töó1áß¡Fªß¸ñ>ßžRšæ/<•µ}R¿J ›>™&5K|7J ;|„usÀÄ“â×'%Ð0øò:ùb÷ -‡|î ŽÅ\ÑmýÑ…ç%éxƒº5*®ÕŽj¨æ÷> ×(}cF»ëϹ?+õ×и”!;K \ø ô¸:Yëý› -è¥q‡µÛ› -øõú“ -h i ø«´òèQýÙ<0ªKçMT¯Í^}ïL@ˆm!QRܽ™€4äÌÍWY§õèÆ*+ÛÀYå è -8¿€¤=5+X$@Û½ј;ð&úõ½šsþ8`a- EÞð/  -ækl}ê©gA=eÊ=P€6:úÎñˆÀíFÚàí„–,÷èäeµX<×7 «þøÙ³ñHƒ®P³! Ã](@pF»è®ë hPÈ3#ë¨,Ö@ uõ&:ÚŸVìƒ)ºÞH€ÆŠ¼VÔë´›ÈB5ë†Ü§„Üç*o@½DS éÙ‹Ò;R‡HïÚySÉ]®ù¢}£ê7ÛÔýÍ;¾”›HÑÙ ÿ‹ ¨Ò Há¨DU¯c(0 mZÕ§–Ê èÁt…¢"ÆÕ6«äf¯ïˆ>2VJ,2_ªâÈw†Þ‡o¾fbn³óµ’yÚ>B¨¢áà=¢uýù–!AiSö¬öKïp=¸ZóÆ”ñÆiYà3^-jÇjyÇgã+õEÍúÍóÍÊB&“ú;"‹âu¾ÈX¿…0íb͉¿†ЋƒUbñ²ñWyÍÌ›z¥wÑv…2²qÅîª,ÐV˜PéƒhŠ·Þ +TG,©:ÝÿiÅñàVzróªÈO(³ÝÀªC‹§XPÞ *rŸÁ{\¤*^èî"T}­sÿš‹h†M–ðO ¶•Î~íß¼ñS,‘xP§åP±aSX}ÚŒ)öêõ†–bÒÔ´mÓ;“1$ÅßãÕrøojRòš¥k&9Ÿ#ªµpÙÜ)*Œ– ¿/ÈF—K¬p¡ÎtËa|8†g Üzp†E[þÄ>ñ'rÓž.‰ÄyCžÀDëv"—Ƭ·‰N”Ãëµ½ú ¢@_ðä7Œí”,lSЄò ^gd‡Ÿ”&¢Í¬/ökýŒ*ÍdÚ?QLûµM`kvÝ,¥¡mç;x‰òŠÚíMª:IÕlÌÒý÷ƒ®´_ÛP%RW½YJcY>JÔ—èà¹ÈI¸ª£¯Ùä$]ÖQu=Wƒ-\7'i(×|Q¸µK¼çÎÉ€ÂêpÃÎóïf !sï!»ÑG|uÂ]Θþ]^è#ãé xt.sÕÍ92d{ö›oD$µö7ª‘AùV7ÌHÁK;×… æ ]´Ýj7²è<Ãù‹Tt®‡ðG@Ñ©%TÔË&@Èóæñ·5â ‘¯UÈ pÔñÀQKÍšìIÒn`\7mHžÔH›6dÉp~c a“‹\q£…¸)zý" -yvógЩqrûæ]{Wó‰ Ò(¡?ã‚öê Ä÷×rèDɳ®!?10'µÌžçËÍÜ'è>òÜŸsk@ïç4aÛnšÏ:žtj*òè7ÔçLq!¾`<£.CòŸX>ý\þŸäj/µéAî±dé¬?{°ŸÄ‰ô‹Ó£/%…¨Ÿ|ž±D‚O,Ï ïªß4FI”|_žÙžô?ÀwºÙþÍÜéÇ¢|¢vî†açõZ€uúòSý— £<ácóÅÓQ†Y}Qt¨'Þ&òOxNÕO L¬¾˜9õŠ]Ð+Ǽþ"§êoôëG2 å´ñq5•ˆDß—™†R~ ÔÔãO8MÅïá'&M$ÏEó¹ÒûÃ_qùÿhþ_"ÐÕT„oΈL`'=Ĺk0iì¥×C;Ýu&ÍúÁâÛW"¯oMÀì¡e=®P ds5s§'ìMg ?"D Vß\#9‹ö¥—¢j'Ð_ ÎsÛò¼·ÙÔõ+ XsoUÑ9,,ÎçJå8·@¨§(i Óÿa"\÷7t{rå2§ÍÆšXß3‚ -(tZ6Ê’tã9äØå\vvpt­ÁBPR-YÂîaÂ.c.ø}îEz6Æ#z(kü’èq‘ÈΙvàz˜ðÐAðœ)ˆ,Yç”ÕÝL¯ŒöW/–¡ó¤“\tµÒ¶éL:²æp½ÃÓ5+8@R£ j¯+zP·9{œí|}NC­6{i›f¯Æz«Ï­«$ª³M1¦øMصóIUæWZj®Db²Ùƒô=ßÓ;GžŸiNRžt¬ôu̬RžÃÙeÎ MF$!±að#ŒÀªcýÓÔŽu‘·­×ÐÞÌßVÃ)Ø2Üɲ,äxû ¬ª¥±¹Ça‰{ŒnØÆûÁ]bõ{ Q³b½nuÚØêyAk|@gGJ¶ÝˆzŽËÆÚ¥E¯Æ¢Ö%ÄàyCÖ9Ÿ74«¨èŠ°Žs¹ôjs¨™ãÕþ¤ŸÔ·±“.)EGü¦£‚þœóP&fIaÕ`)Ïç£{Içò™B÷¹1ÊÁhVã¨{ÙÑž”6V®ò2†Sð“Ž 4Ѻ ûš*‹Y…ýÜVá:{ á^E%Æ8yŽ¨¾lóËË(6Ýb7y,äàÜŸ#]ò6cÆm†/rÒŒk®Œ˜/zÔìýÁŠwØ#Тyþ ¶z$OŽƒùþ$ʦl›mäœ"^˜ZZçÊ÷D‰P¡Ê2kЃZzT¶¥Ó4=TgÆñÏ°F›ç¿á\Êóß³a™* Ϧ¹ó펣ÏÙÅ+H~åçÑ9í¨¶Qzà¼"ÎÆ‚ f\(›£Baü¯¸Š±è o+¸ÄWd‰‘\‰£ ë ð³!n@ÂČۃê™9n#mŠP=Rk4¨³ú;¾ƒ]†‡Á³ŠDUè`q ¯£æp©#јRóùpWÌLëõ:<–gô"YA/dôêh‰sXæxNJ§€¿œ=äfM¨OùõW]=<wNðâ4hvÒØs>Ù¸èÉinXá4kO™ó\ ñ%çüô«sz ªrfÅj§.K8eqˆÇµ3N˜Oü = -ˆy3+Tb!,¤.Öy^ªQ°,çqͲŸÛˆ™@ǺհèÙ {êççåÄXŸòyâAåSOµ,OýŽÐ)óx ‡âÔ¢ÔxöÀù2q€m®Öñ”+Ïâü®JÜäͽ¦ÇÑ  -ÊLñ…“ãËA‘FôÀoŠ'SÐÄtÂPj}R KµÚô=ð¡ÍC—“°iv_W Û÷ye‰u°e»rk›±6ü™RÛt[—©ˆÓ^G°ÀM@g¼½ïK/{AE>/»ËG$ ˜é O#8¨ pE5C%Α£—ÓÉ@S<Wªó¤R«J¯»ŒªCµp¼ž¼è¬Ïûø:.qs)Ë-ŽYjb9»ÎzZßþµz9žd­51~ûÊÛÊ‚‰ùùÔ+I¨°\ÏSíñ—QùaVýœ¶-¬i9ÓzðÎÃF Ñi8PBRðVã ŸwUñk²’°3=tXb°™K›+m³àﯟŸK…÷o]fïð9›ç²¯-ë2Æ™[DƒdÎpù=SõnÍMIÉÜËiÚÁ+ßÏ -ìSŸQ ¾¸¤'cÔjzaÄ,Ærgº°SÜbfêfØ}ÎNo:*l5¡?D܃Ië/«,â˜ß—ÙÉß_RíŒu¾ºeÍa$O4ƒeÑW£êçyÒº™ô:‰>Ì›I’÷Ä-rzßDæZz(P ‡+ð¹Ý=—£ü\P1§šþÚ9ãO/lÆ£«ÓŽ…u‰OÒ£…‚ʱÀå†:ß™úèA\­ï¥}¾l›¬~Þ&Š|á_=wŠÙU˜ ŽøƒËm —|~SÍ‘¹˜;çO:ë{_é˜íÂzr|÷ˆpÆÖT/èÕŽCfž± ÐCýºð튂–VÂYºsÕ‹ÅPÜÄaĹýe?ºsìr­Ùpêìó1nÌMø,¡hàð"°ø,Ò’#:ñzªæOê¾}†Îä¯swÄXÔ.—QµÃí=;e/n_Žçœ|F~ì‘N7 ™íú #>èÚÎ6¢·¸k!Fðâ-WÄz°; š»ÆßôP™az'ãT#ÒÁ®l.gW3¢ÙWÕu•ØàN„—^`0«©¤Wf{QOa®c¸‡ÔTýŽuz«?õ8!6íJfc‡îôáe¼>ª¥"ÍñìV5dâîÆaý8ØE*žX…>Ùs`8²Õj% dyǶ$@"AACÅÈ„ÄÙ|.|t6ñéÂ…¿D¡,e(Ú×Â[/æuüí.¦ñ ë¶Q—m'¦/ §S®}6\RgC¢Ü—1ï$Åíë–hö8¶|ÆŸ÷ ò*`jë~¶*%€Ü„¶™’ W ¤ w<ó½+)–¯Dv‡òHVRW»}xéuÚVsÌ„yÙÛz¾YŽÃ@”ÈÌ$à´‡tœÙƒJ\{t7T¤žv ½â,žº}Ø+ö‰g×u–zßs5°Àk¸R“èí9*H•î5¾ãÚœ˜–¯35öˆ’Å» -q)ÀJÐÕR2âùÝ’PšÁN°Œçš² f>,£:6¸±tÌ=ÎäU–i&áÔ¨ìL§_Ž±p•¬• -i "· |1×6ó‹¢*ë8ÍÀ/-I’ø9âèóáÛéù¤•j„¬šDŽßñÅŒ…quË_çú°ÇïáMK$…Iës&"VfsI”6šF©(ó1sF·šWf^Î5dZ™ÝÙ0ñ“¢Ð‘ 0WqGô°f‡˜¸#¥ÓDvg±"I)W v°±-`7̶ Äa(Ô²ƒ%]Ø£’VeÐ1™"ÎÇ隧Ç΢§à¬i­ ±»’÷Õ×¾ºKn>l¤D™+¨CwIJ +ù!¥ÅøRÇíR‰ÁdoòÝ?xÌu\.–ñ­­T‰­TÍVgÌV¼p"Ì=$¼÷$¯¦îÚîÆ¿{„Â9(Þ¸?‡0|¼ç Iö —|@Û”yš§a?]usX`¬¾Â[sŸÀpŸõ>ón¼bÈP¤ÐÝŸŽ”Žå-æܦx¡ ãò#ü¾Ë^R ´¤þ8ÂAº‚è]ê"!9AÌñ¸§}œ¼Ž#W†•mxÐ5 ïñÞãÛ‚^ÃúÇyg¤´ŒÎ¹0^r`Ìœ6è Ø!¡†‡ydפ2]öˆÝšžöËŒ­º\ŠæšáŠèR# wcÀæñðÃ<é«…1Š¯pèƒ1j$Aؤ÷Û† KˆåžÄfœœä=ÉɬL®‹Þ±=½±xUÚPvÖ'ií"]W͵XÙ5ÃMø4:)ÍFÝÐŽŒ›Í¿£ÇÜ“¹Um×r±™7ž<0Ö¿³Å´f ªA©Æ¬›NÉ~Ò‘WalT³«‘]t‡ï"TȯÂÒ¥YÀcr°µûø=8X+¢ÊJjØOª®§û\Q#ÊM:$dc°G‘<öÕc® öa¦6÷úêu®ä§‡8‘ËÆw!Ç®9"$Ìb…)áqXáÿÑ#â -ƒ@°Ö®òë + êZ[üÑ|t™GÏŸzXƒ±ŽÿÙØDþršG£å·Äb°®|†õÈÀÖÆ)¼¿¯³hw "ʶ*ãäKÆdÚà ÅR¶3z”„·æ†œXÞåà´ -×™æNˆ{ºŒº®‡Fcq7lPƒÖ-ÐäuÇȨŠ¤Ã\("º ‚Û!¢2±yÝG-CS®8Rîö2¯F_D€æØêÈm<Žž [áT¥Ø0›¯ˆ‹§oŽ9[~%µ¦‡/Pª?ü Άn$­Mç2¾×pCð¥£KþÓÉ\"°î»j éµK—©Ü;|¨7y”«ö³K2E\äb U“–³uÅÍ0Ò=zôÛbåŠ.œ¤%;I*‡`5¡T§~?åR€aV¢‡íétŠª¢Cýy€°€Æk -oÝLº!zJð7æ#Ê© Ñ`¹Wê3‰ÙRA0–äAKs†í98ìÜ@€%´ ÖXÐR¸šÐR®_]I§ƒ8íéüxZPdžxH× R؃ iD·Í÷c—ÙG÷QRzk:³v i",U ÃøuÄÞµ,\JŽ .hm @í˜ÆŸ«ÔµaUíV‡^l^bÎ+«&»¢5ãÖšs¢ó¯ØLn½FEBì1«hÂy±X}ŠÆ;ÝNÂn÷HE-‹F™:\¶x»÷¶iæºÌ‚å7 ƒp; ü¦ -¸±³Ùc®Ïûp ¢×)d“hI}´Ï:uÊÇQV8æü7‚#Z–ReÞ²Çú€Ø„ÎÇ^ßH“ç®ós¯eÄô:|¶0MY6+sðæœÇð™cød°Âß·{ãÕ„£Ql‰ïXž³êR¾ziÒìº6[HêggÎȵ~S<0ûµÆR®¸C®sÕQ…À'ȽÐcÞöÖÒÒëÈROô;µ‡Ž`®PùÆú JafòÙ#¬hõ’Æ`˜ö’¡.åÓì5ßÖÇC¬á0A¼0KLˆ`–TËö…GóÖ/uÙÈ”°‘ñ娆—®s'Ö]aâ²ñ›\PñÌõ "ÁèÂ"@Û ¾š0ªî^ú‚±nP÷0úJqÒ&aÍù+,õ`ˆiÙ0J{³:\0ÃÕ {’œª¾È”`à'‚ -à6"põ»Ã釦lNpé§^¬ð–äeÎ?y;KYºû¦ï”–µC£²žï^(à¸[PÛœ‘¦ÐŠA†M¹þI˜Eò–ß‹vî hÆôµžÄô Y¿2¦ IóÛí…ryÞëL‚q¨³È£v1Ô -ÄlbžŽE+{/,vç˜>ΫÌ;ŒÚeŽáöŠ"Õ]>^ÓŠ”Ùëb56„§æq™Øÿâ1ôNn1xÕUõ¡-¶sýšeGœ·`‰6pY´aêacI~˜›Xß=·¿Cà hîv“¼>7ž}àá¹ÞYÝó”o÷ñu±€¥…&ãÇãKjš=z^¿^|kæC_Ï)>ø`Ä.Dv*œoÇ‚–”å  ÎÚ_ÒÅss/5šwK$i‡5ÈN:sg>Ã[:§Rj)œ¨ ‹óµføîX«fB¸$U#òÝã;ÎʾO'û yÒ‘qPÎxè´rä–ã—ø æ/IuÓÓA@—™½*aÈ8ŸX¯¡°Ôay-möÀÇĬ‹ -f>(™õÈ_ÒŒsœŽ}üp^¢ëÇo+¢Ùf¢$¶ -ç×S´lßÙ¦ÎßTsnÃé›båJTÔ}ZñC3—Ê -°ÄÉrÚóÄœaþ„€ BÖ»â=<†ÜÆHäæmÜ ²eáò^Ê ¸ŽPMÎ+ÒŽ7ú9w¸Üˆ­}ÎŽßV*?œ+zÑ«²u¡‡:9F²êCâ%EƒúÒ9ÜÇβ?å8wnÚ^¬ÙQ ¡tÏÎég¦W;E‚‘ã˜=BäÂZã‚ÜŸ’vÃ5lس¶ZÿàbXëM'ëqÈä­ÒÏŠ‚­§5PŒ5P ?r˜°Hç!:ƒ La.#r{i]±ë\v¤ŸxÙªŒ›K‚¤ê -,9ú”±YÐ&‡NT"¿P±ÈEž÷Ä\Çœö×–¢”ïÍFý0&TZJV:ûfk„„UËòæ‚q§n¦=z¤hx)D¨‰÷%Å)óÎ.¬‰ìËŠŸz6JÆéQNÇyø(q´®øcî/.}KÚ;çxn}K_O æ–ÈÉD¡7A~\ÖY¹Ôzñc†ìEæ¿=õÓZzW¢ùu=<ì/k‰õ\(QF £óŽ+0µ¹¬…4 ¹>H†‰Â6uì¸A!é`·X›üYcNf-ñ–î1Ž÷FMe@JVeL‘ **iˆÝ·Ë£!!FÉ=ØÍÝr$ -GÍ‘¥SZ® âSÆjivƒÖ^'¸ùåñ‹ÃŽ G(-ìÃH”œ‹í c| B#ztbd•Ö^B -PPàƒÓI 5(‰ä™ÅNZ²Ê¦³,Q霳pw—-˜–ãž‚ÖÒŒô5RZ¯ WÐ\(é­%®fì"ÖpXYîîYMCVˆ§ÜÛWtÛ-´ì!Ž;‘eõOªn:îòK55€à½oëêÕC%ÕÙT¢XºhW½¬“= ªÒ3åèK£©› -2S¦Õeø~SøjÒk´ø¤*8|¶gqíé'uõ¥„A˜D峆F–ïŠåˆ™4F‘C1"÷¨ÐIâÌžk8­¨ŽÒ::M2G<„Èhí«ÓˆýruX{1ò‚ôÍg¯GUÿÚîÁ®Zqˆõã+ÙÁˆDG·g¨ù”þ6‰©‰:wwm¸‰>\È× à‚0Š½ cÒCHMe›Vââþ·Ù‰Æ).:›.ú-ÐJGØ–qü~­Í·ç¢÷¢ùA¤¥ª§ŽãËdäR\¬®q-?†¿$K(²Çc][›‡z±qª¨wá‹sÑs²×uä%k¾¢†Âú’¹€‹E(X ÇêÒsÈ©.dš<ÉI.3ƒtÂ~ŽQ‡A#LÿcçYmÎãÇš2jÜ{ˆ€\?¶¯:ã’¨%" ×Éãçkl‘‡˜½ @†JÉ\LÁS«ìÉbÄdÁfèÔvãèó âpTx=>(3½pˆcUÒÉT û[59§žHɯsº!ÂP¤x˜Cò3_Xz(¦®)¦E³µô¸TP—¥œ,$«îy]rŠ‹’ƒö™]%R/ëšÃ¿l.A#ŽOÑéÒ'™:`›'Š ľžÌ~üdlBÍ;7\­r}»;‚¹š»T^¢üB@•Ð\ÇŠÝ)í`É:½È%ñöíUì—ÄT%ȤšïôT!úò8nˆæ\=NÕ¼¸j]<=´Ø#Ö,Äøtíé'µÍt£—ûÚÙ+Y zFnÖ©Æ= qµâ§Ü“â§\b‹”Úø)oèqlG#ÖŸfTŠ*…RŠ†×TrD[ç`p°²É$ïcñR1Ÿª¨ÄWÖ•¶®´› Äq¦y½IËÏ‚¬h-V#Ófvv˜ë ;Ôµ²ÒìÅS_Õ$Ñ‹µfEKšUë/bÛ~ƒÑí!¥=ˆ‘øµ=_«!~@›á.žÉenNzsÍDÜƱ0#à´¨š)Xñ15âÏéþpžïJÅûd|æÎkÖ!†Æ®±Ï?CâY@«yVUÉóîb­)J2æ©àcβQø“j³þ_E2ùo6séÚ?löá¨óêu¹®K,¢<Ì…NÁ£Ve÷µeýÀ,š=Ϋ­e0OؼÏÊ7ÅÌyˆ·ï^͵1nŒLŸ|O„½³ÇY¹ Ü7Ž%À€ O•yªæWcý2G×l‡¾$H¹¥°\žÏqV:áhlžº$N¸”4æG÷ͬªmÆ#zä°ˆ—‘ÌíhéA_¥ËJ‹Ð #œñoì{\Žw$IóJ’åec?qÓµ†™×ò b°|Òé(YçÞ%{–“TæƒF]Uü¤ŽYF¬H!qÛhyZ·´/¶8ÝÊ–ù;FɧÓIL<,ŽIãoÍ5½XmÐKT^mžT®ܸ$è„RT@™ -1R|ƒ’áiÎ}ú‘Wøs:Û.Umøù‡9…•h­G·:¨®Ý*Q˜XÙÄsJå(?¨ îc1tèBk*4R„ËËݲÝv´D­ )RZÑmÒl–€T5ØF{‚„HÊ®lm6/æi؆phÖ…ŸØ}oi‚C‡”²†tE¯$6;C7:„H”Â{,¢f¬P?0ï•Zwv2[ar·ÇYÒ°§YI—šŒº(jA|ÀJ¥ÖœÆç[D¨ÆÛ£GñÜuår™¦ïé1'BRç±·mÖ’fÓ|úðÑsؘ÷÷”̬3z€õtaÉþ— ¶e9zMYXؙ۲洊©«L±1Sœ yp+°+µJXÕ<’6Uô%X útEìFWX ‘°(Ž¼Ñ蘀"j¢m¯[Ó -™£ãí£¯UiÒ¿Að.To6ÄBèz[ -ÂE )ÎŽp„ÿ”½®ÐNkÊVíuyدÅýܱ4Rœ…×Wr ĭë òV<²-W‡b;…;űu -ÅF×ãÿÒúÇ*eg ÚtÞ\È>_}‰Sˆ[ŠÉJOóJ|Ž¼Ùвå@ê;º[Ñ(=˜iuêg·¢8ùEÈ\òz!ñÒ²ÓLŽ:ž²²9pžiÕXHïÃâ¨G™mµ˜ú®´Ì@Dį厖®‚©J!)w$jøwöP”…BN\ˆ˜æLŽZ¥¦¾Äm3y oÏxì&Ùâ4=-V/‡ÔF”kkãCÙnÓçq¯[@\b˜Ž©çÞr1q˜¡ŸéÙI—‚= [üEµ†íã’O`ŠUŸ¡Áž÷¬…1Ÿ="^PbŽX¶ÿÔ‹•²„œ¹wÉg(u‰TYŠ«°â³'°ê¦Ç~uÒ¦áPµ‡ -+~7o 5«b’#dõóv,V™Eâ‰8Æ9¾{Ä]WBKùýöpP«!J<5ÀZsÉdìg…ˆV†sϾnë^¸ƒ^žð÷¹ù¨aõfΑªÇ?C5§7øyô¢T ¿Š»ÊöŒI‘}‡Ò@z]&Fæ.2Ó´ÏQÔŠ¦Ÿ¤îülj5ÎÅ‹•Ú ¬-J¯.6Såþ˜nQÏìcá3â7·%æ-6@)6@ƒ8=(¡G÷šg ®Z¡ÔV=î’³®ŠL{êàÄ@XÿÒ¥utׇíì1²–U}8zRKuª -º0'ÂÆK‘ˆn—Pb«ús*Ü2u}T=EíÀ>Ç#Îh±Îñ wÒÐ  ¹áPÕ ¤£±Äk­Dñû¥‘8e"ÖC³[«ˆ^‘éü:©F!IèÕYœ!O7¡Khõ6fתÓB§V£ *l.¬Îâ“ÂÄcN!ÖŸH3,~Rà+ƹJŃ°æWaÞæ«ìâ4jÒU×!5GÄL­à°fŽJŠÞªÎ(0J„¶Ãƹf3Á´ë¼ãÐÆ‹`.‡¨‰JÁúI—4.‡2DÔ©–Ê°”wâ8V5/‰ÍŽy|É?ëÁ²¿÷Ñ-z)Æo%êg¯jàƒãP\Ì'~ˆU8óþñ&8Â|‰†Ê‹QöeuW'¿zµ¥YiÉJ¾yêðï2ƒ>÷?#>Y”Öì‘{¼3ªK`â•—AÃqD:¼×%–YB¯5ì&ýæF·¨ÔMíY2ÓxÔ`i »™}Ë´$.Éâ„7Í’½<Ê 6ã´À)O =ˆ¨ „Q…§Y°Ü¢PzR±~ó‡<-ÿ‡Qw{µ0ï/ÁUãeMßA|3zàãoåûyJ­`(ÚæxÑæ„ ayL±.@‡«˜”DÐè=À± Ó·&èý‡/`‘Òì1ó}áwa+`zƒ:7"ˆãŒÂ\•ñ $º$46¨IÅ{Ÿïyö£ƒFQxK! -?—Ñš¨ý«$Ì;ë ØFÚCE’fôWóƒã:–žå­GL´¬Þ(o9æ‚äúé8×X –.ð :ò›çRÃjÊ­M"ˆÞ( #Øy”ìW Ì»5`¿¬F¦ä ¼p,OB5|²ܶÙC/ºµSƒ?^+nä”ÿ@^8zPéUº²f±0ƒbº%>¶‚jöèùŠã‡]÷KªëÙèÖ'‘K'ôy)uXwñHq§ZÃÉ»ØíÓÌÝwlûÄ.õ„n2î,ÜG¯+¼—,yà.½Î(Á©Ø‹Õk)ÆŠ’/®‡kmïxé §Ðœé ©Ç¹ý Z…ØXVÉFdºÉ{jhÝ̽ô0–ÐàQTƒQ•ˆP ¬ßÈœ! -ÇÄ¡ ÛŠíæˆÏQ ¤»”Ls³ïqÄ]`à:M5TXï]ÅxGÒ¡Ù#)Õâõåc É”çÍr¸¼Â|ÞÃA@z.Åè,zì%+©æ‹ÇÞ®jXáy¹I™ÇÅ¿$2/%`¨õ„3[B‡ˆ‡:D•TZOðscªš3®ÂP‹ý})N@•Ö•UœgQ7u¸¯DĽ i¸!0 ZƉ–@‰ln–ûÜ ‘l›W‰)´‡–Ï=Q§@/ÞV ˆÌYE•²vEÉ Î&|æÙÒKuŒ9tŒ;ÈI° Áš4¶–±E‡ÕÔl¨ñ2V,|ÇÞ×—×Þ‰@‡HjýÔëZÀ»Æ³öB—ðˆzÄÎFO‹‘] -€äçˆüÏÿc8á+î ÓÝ·1Ü›ÄõÞ|ê«ø†9'Cx™k ÂÖ ËÒùgEûDô´6XÍû!áwÝ®pÆmg¸ã·úô†Sœ“ÉÏ;JE'~š–°èfÎö:Ôf-«‘Þ—™ÞL2EFT¹Vr€  ‰ëctE•O·†íóõT óÔë݉¹˜Jå)¯ï9Ÿ„Ô¾QæÀ/DŒÁÃáC9›ÛªqžÔŸj­Ä Š˜E@—¢|tîüÝÛêÒ¾I=¯tÓXèdɤùŽ9|±"iKA6׉–äGj",w-~ :nnÓ0òí#Îe(k o Ù†G5gÇ[‡Ê&~JlŸB€|ÄZâþV8³WÔ˜k¶—:ÖY9;€ë¦}i÷çˆÖã‡8Í⣂–ýX=îǹ,Ç•y »LA ŠžÊ5 EÁVZšíQ#üc4IÍYæÓÕJYWâ#hÆ^wX›:‡%5Piåë*‚„á(µê[ÄMc¡.$PO[m%ûÁòásÜú˜ï^çZ_ÏÍV^`jN‘}†{í¹¶þ•–å°År‡-×Ð*«‹»ýI"ÍpÙr X–V@@Š@vHejJ1DZ:è«‚ÌcMÈäaÏUýuì)îŠÚ_·«K¼ë0<)¬ ´ÉQÕV®wÑ, pH*È% ©py>)~3;ê­kK£4 õP¬s«³c»Ìo-l-‰¥£Å¬¥¢´7fŠdFhÉuVau eÙ|‚Îôð‹9Øe™Ãbí ¤Ä +¿û -ߧT!V]“\ u€ÛÕ¹í0Úõõ[¥áo5ioYmM—›ƒö¡°oTo³²§LêúB‹…Ùá,’£þ†Ú¡c¯Ô¢Wöƒj`&°ÉgÐ2mIÛü*õˆï©ÕV‡àÓÇŠ§ÎS½ŒTdÒá¾É”-© -ž_GY{¢#ÒF!&$›”&Î +y‡J -<¼ª½Öõ“Ë>c¦™Œ£—Íƹ(Jœ%|LTÍ-J#‰ÃöfRw9%QO|té"–´XXT‡wø“Kœ·Vs?ÁãkÍ ´+b @ÓuXΌӠ n½DÕ -äSç²#ô kèæóä{¬z«3´]hIø_ÐC[…Ýcjû--ÂZ„Íš¸¾„…˜¦u•€óº¯×#Z!ý¡ˆ¦KˆjÃ.«Ç%üT ‹SÂgXu_$AŸHqOUÊTuNÈõ5’ÑÚìƒÿM¾.egȹ+ ‹›ŠÅÈ›5×Zä‰ òÁÊ%Ï—›~¢1MÕ3/¿m›Žì¥ÊÊ[;ö7˜;̦ÑÂr:ŒóÄ:Ýâoûà´+RsAlïÕ¬¬×Càr±®ù¯»¾÷=­”bMT€AÅ*#ˆŸØˆs B$ŽöEvV«Ø &Øãs–¦§ÜAªAOPMθzp¹æ!çšFÝ̸Õ™EÑ//–·‹Á¦Rú^³q1¤{vHB¾Ý3åj.«Z[ð¾ÖôÒõô•þ![¹.âú¨¿0—HÊã„"½ 4ƒl‰ÔtÁ/ †5®A¢|MÛj7’’éVhgµ;)’?‚F'7ž¸|Å0]›¹dÞr¤`L(ö)¹í·?žˆ:¨?‰í¯nÚÞšYºGd÷ƒUšlé~úN~%Y -¹(µÖˆ¦Ï_HóéBÑî1J â$<Ô€4¯ô£Ôù¥¦$ûh€.3†w¯}¡È],‚žÇAÔÈý|òÎœ/ÈløÞÛ?'â&,÷05¢iÓö¢ÇÀ¾D“‡?"R®U›€¸§¤»¦ýh(ˆ"V ËÏ.±C2 ëCoŒñ—ºðþÐàÝ|>ÏS'ØÂa•ñJÞŸp)˜¯pÆ1jtù+ÒÚÐÄœêBþ…j Õþy1‘SÔ[h.˜7èüf,–8 $óJÄ!)s œôÒÜ7ˆ†zI‘OPø»,·ªG#¢œzðÐ#›Ò"ÉDü£fÍ?÷K)8!C5õ’@@C\/£ ;Á!ûL‘Æ,ÑžÙfú—¤jMòÛô£§¦A)¸›TÇP·ãHd+H;5Æ”-¼t2þ‰ÚA -6ÓOzÈAb¶›ä“Ë};Z `âE|ÈÇ4!!%ÐÈZÎ`©ê¿xP%7Òé•žˆÓÄ)¤/jÊ÷Óq™ -äh0ÓòAˆ§`Ó É›ýîÓYJtgþK¥„w©pžóËëÅ(à‘iÀ»zˆGºÏÊ6f„†š†õÏÂyª…x‘+¼†ÌÞ»]’Dd/ûÎTU¾ÛÈ‘­€Fãr¾õÁ­g -ùœk(§—à³W7 -íŠçK(Áñ0ùìa½ÿ¼…Îi dvµ”5îNÞ[‡Ž4àÁˆ’˜òsLËù@ÄÚt -@dhõ4´ºËe…|¿àâj7K*º §kl\cKÁüqêoíL)h¶‹«)ø ÷Wò¸2ô4©âÌ÷J¢V*Ù fù’½ñ«»øöÍ)dqú'ÉV…5O–%Ù-ד—Tz!¤Œœ¨œ(v¸X‚©9‹)ëþÀþT7NªY<É ìýKË @le¼Ý e£ƒ‹ì5[wlo0è(_¬Ð!h¸{å¶l¢ioOp"¡ 4ÐÆaØŒôQ†r>uöô9À~«B‡ h [ qR0g""¯Æ]?&‡ì…~Y乄¾ŽÿÔïbïqn&—¥F'©×éºöå½ôº„û¡QøC-IQq°¦5.ðumÖdŸ”ýÁÁ 'é·² µjìtVx2kOÕe&kÉh'Ï#f䎬¬U;së¡M×ÄÏà9ù©‡qÛÒêùÊŸrƘò.ÓT³½Ò‘Ø{øUûY«åj—¯I,_ ½DujÅäÐLSaƒ:$ÐUÄ3GbÉ•UšÑÞ` yX¤cOùE¶eÏ4c‡´”ãFªKb@Zm­«oQŠpg™¨@üÖ9vPÂ{«‡´H+˜7BÍgŠµ{ôãÛYdóÝ€ìƒVR'ïZÃÄ‹ÆJ‡^Q¯­½·Ï˜ꇘÎø¡¤§2œìí¸E÷ÃZÂ^ž*¼ÄBE\i„Q®é½Ýkrè‘“:˜zR˜žeÆ”ðK"ß?Ô,ç‹¢† -ÙlK§žUš£•Q«"K­‚^,L%êAì{Ogs,°0¤ZѪôPÒF»ZÂÿDòõ‹OShõ.\t)ý9¯kíbefõD“¼\Mž…_*Q/1Å8uB”·ñÖ’^.È$ès“d‚aäxYÂÐ:€ý§<2 -PósŸz—Je"k胮X–MËk[¿›Š¼oÉZ&îdoÀrþÜóXäY°N’„ë§óÔHæ7Õˆýe—©/{Ojk~­NN¹‚ ùc¯§Æv‡Y¤²áœøŠ¡uÍÃeVWÚ ~žÅׯê±ç‚|(T ¯aÐoS.÷ÐU­$ -È82ªëÈÎF4²Á‚©ù-j9›À?Ù£ÈyÚY#JçÐóAÒéê÷`¡&»¼a/ÑP§— oÔíŸdh¼µÕ‰œ‘v¼î€ò6Xò¦¾ÊØ#°ËWÇ2xè{ãç é+ ödF:BØÀ»[tGE¦£{Øo»ÈÆhÌņg -ã"Ú(˜ŽQ< ²ÄÇ%óñr–ò«J®1A iµ÷¨)d\S&9èf" -Å$­ùÍz¼‚ñfÑ€Åóþ3äÔ!xÑ0Þ0g£sçÞûÉË]^»£  dSåŽ5úlÖŠ¢ä×À°¥—¾«ðzÉ&'z51s †•)S^„§Ž 3Ù§%«²Ú$G¬i©äçú$€< ·b>€njÿõ(Ó­º NÁ.ôÀÇ͆þX_v²æŽ|@ò«¼ z¬Ø“ʘ»ÊYƒ¢ž£Íü)M^¿Ò”:àĵúyB ]º<$#³F¡2¨v¬Ã’ë1YƒcƒQÑ0ù:9‹­á¹ºÛXJYm&Ûˆw:õ¬iYâõ;½¢iجZˆ®ƒ„å}B/S%—­™÷E®¢ñÜ­h˜äЕõðàˆÈ<¤ô€¥-4ÍžûK¸ ÒkÈ̯xsÀÒe½.I] ívìAb•áœ,%:x&]W´0Š8Ùk$ÍŠÃ^Ñ&ÆÛ‰º¿Gɉ£/-ÚÉN°—×Â&ýN&€€Ó 0ðÚ!0:†8+aP€ÀI ¹YÝk âþZÐ`ŽiBv҈ѡm³QŒè%­bã’éY_F¨µ¤0fÃN¹ q@Ö|ˆG‘C¯G]Wè yÏ+߈ÎXå6+Éù¢ÕÄ=p§‡ -÷»G•ÐÞÇ q¡_’£2 -‚ÅPNqC†l¦Ç˜«pIÆ’ÌV|м)c—¹$@aÏBî^±¤T %“3yÂÏ%ı§V¾=õ\§ôeÝp±©Žp£œW$ØÄg"Ú2-¼p =jûë$CûúÔ²b¦ø´›ÔãϘ,&<Ôˆ^‚7ÕƒkYe6¸fø®ˆ‡ƒë׌¢CKÕœW\™eÌñŽ¼ºˆ¶X%uÁÅ4°-g¤üêjØ-§âÔsX¬Å,V…-ôЇ/[èø%Xò4ˆ¥Z6\•Ü:Kk_1&+.8ç@"+HŸËã]XSìM³X -¼Ú}È%Z@E oÚç‹ Ñ¥GšÒP©àÁW‡©ØŠ8±Ø4¶~}é÷{ÏöuèŠÀã`h ÑAàŠËªçº~R9HàÈ„ÑChKi4i~'ø24G²J?P’‹¤¸¿%¦!_Ÿôö€¶ -–ûص{÷¼£œÌz¸'Š`Un½ÔDy>Ž{ßÎ…{ô꯫œºH¯ñ KáŒðm>'(pºk÷P·}´ÜY—¨•iÌ%Ê”7þbßd/(#Ã!,á€']}+Ýa‹æ%p§=ĸYæÙ°•«ÙÀ¨þx‘D™%»ÙDô)yPGLòQ$7gMï&O’g‰à`ö›å¢ùb†jŽÇ?®…›,\½xzuY˜"ÀÔu¹oî;ªèê!ÆZ_iØ`A…âÝ£=a¢êJ1Õ#KÞöjöóÉÝ’# PŠûÚlkBY#2+ßDÝLõÑ”8™øŽqºßd;Ù¦ç÷F¾'jHžíS@fBo79æl`oÃFM,jCˆìw2âNŽrŒRݤ6Í›§ž¶Ïú˜s¢r쀢ŴÀ3Âorê«š(ˆûU$÷0ètŸcžaPc=5Jô’áÚ¾ÑewĆ ×F¡ÆˆÝ·Û‡õ¦%š¨„?ÇIE®rw¨S{™¨&ELûÇåšVž$óp)ç¨BTº´Ÿ +n™p§› 9äº;À!_K˜úeÈ” Îd?±ìé%Zy¶h¯Æ‰l&§¤2Á_-²Ùh©À£¢…¬‘z‘‡¤×_ŸØ—˜×ßÕ#9\ÎÕåV͉žHï‹œZøÃò­O<.ÓÖ§‹t‚¾b4Ïx¯X ö(S(ľ—ð²iòd*ò&Àf3Æ/Ù¼j5—̆ñ»„¥Ø>N.A«°;$EÙs «dN€Ë$ -){ÿ?ºÚôS|M»>ØFQ|@{žÍÁVh"³úÀO %ïD AYüÌ(t‚ -ÎçY±-QL݈?}þˆ"$ÁÜÄÅaQ ¼"ÅsnÇ=¤‰Ey€ly tÖ[œ_¯!„ã!q[‰§ƒè E—TÂ"iYM~`TœÞñXNqÊúN!ûžö¤æ,j×çî'gWûeJsÉ&*AaßÖ¡ó°{Â߃`_éœ{z=!› =»'ɶÈ+¥U{¥Ô³kÞ€­°kP¨O>5ÔÚÔ ½jôJŽ0 ºYÀAæôöÎ,Yæ —™tK0‡v²ºymO~½'@vd!)‡ýx%=¨)åHÍhƒ Ó yúTnOOû=aXõleŸšbÎÈÍŒ˜,»$«ÅH²+Éô“ÁM°YK=¦U–|¥[´œýØ3LA³7£3G!¥Í×¾ÇÈÐä7ƒ ³ç „ħÀŠž•Vïm˜6º< ¦ÝàÙÌå´ìú?D6r€^Ù½š&zAúe/$öƒàcMB)Yá岕ۯ*…w‡––1¡EÍûâPIúŸµ7"Ê H¾¹ÈÄ*‰ý@TßÆì¤×Þþx¦ôOš—µ¬ª<ˆÝYt8ŸÒ±]0*ÛÉi ],Uæqòê2ga\YãËz`㚣 +³=€â Þ’+ëž ªDxšœ£éÒ4ìFû¨Æ½gM=Yn>êQÅ®›Í^w]Vª`½cx`]E‰±C«¯@X扟F¯e0™ˆ° -s½§`¹³(”z€­ví‘‘–Ìþ%ë]‘â›/b@mqX©Á5Ã~Y ¿<ãÀ8j£Š!8¶§äøF³¶µ–‚¶ö輚9Cz¢³j?PLb¢ÜÀ@Y< ‘-ðø¬¯›Ë‘ÌCuBzk§ó3ýn»—Çø*›Q²oêâqŬGŸØ›L†ùEQƲ©|ù¨—ci--Ãç‘öÓ…öF?$—–{ؾ¢‡ô«3*åþ’6KòÞa“ó„ Íî%!NW–`y0ØÚçó¨‡xXêsµT%Ù]ŸVÞ˜”Õ€FSWU~!+aÀý.TäÅs¨XæÚÀˆžÙaµ¡½¡¶ Õ‡ÞJ¯Øú7ˆ9ã+çA.RzyÊf4“?!R)ÖîûSÐ:GznLò±ØBv‘3>6¼~ƒ6¡Ø”bŠM¶ÞjclÔ@îêáÄÍ29I}~:p—¢¹Ã£>P’Ú“éïpÝû©³|ì¥w¤X¹)Ôú$a¸o³§P"ýÁ¼­UlwiBzŽô-Ô€N!dU/¦80 -D/%ë2îMV·ÕmÀ\—ŸÓ<÷ž2©3FÉ:FbôªÖçÅ[0>íTo0ŸT ˆ+Ë& ”¹‡à^5Ò{÷èJŒ|ìá_š¶iíÖ•8Oèi¶&躸ÓX4Yò¢×Bòèч³Ÿž°hÀfuD'HSÔSƒ|¢®NQ\€†*/#žôøîõÁ{ùv™Ùz0‹|ÄÔ:ªqƒIí³S¯žÎ(§¢ÌßnWDø”  él•R‚X6¯‰‰ËÛš-Aü¹Ñ>;ü—î#¦(ÃÃ4³Ü#äÌ€ÏÁaä–ô>)¿¸öÚ>ïq«•æY6GßÑÄ"KÇ3$Y¤- E ±Áab|ÓYœÈ'‹ßH¾GðAßÄôœƒm)@*‚‹J9Ô÷¿˽·MÐñ¶¥æb÷Ð4DÞjëGE*GÕ>ÍK…öz-4!q¸ª®³ #â;+> ,PC^3r˜äu²ËóëŲ!þHâ¼´›M,SþZKZ^µ…éÅcÓ )wpi"A¥ýfæK~¢2 Ò¢n[\E‡i¶Ç†È¨;¢Ø 皉@éõ'ÊJŒµÜ)£¦æaŒxB‡"šÄù3–Xf ææA S©GÄ'«úw3£21{‰H-žR“@G -Òbé~? ˜ BÇÞ`H¼&{•bmaÏ€ªè©LåÐÃ^@²¾UÍ<»9›û;Új5`åË!M¹–”WU‡"ÓE>\l­‚ -Ù_¿±Ï¨0ŽS0|'êytiÉná)(;þ”Ÿ}V®1;#¨UÛé+ë§~3Ùõl¶4$d’%ªK&bØœŒ^±±CÅ™­ .êð¬.‡?÷ðŠJ&(á~BòdûxBa•Bv,QªÁýYŠæŠ%VˆhʪӼmY5°@IïU©x\âïøjõ°kH– -ĆC¦ÿZ’&óÐÙÙí,;@Nú¥^™Ñe*& -žøcȱŠŒ0~jKrÉËH`ÉLÛˆ)þwecéc™pN¡÷·Žù—<Ò%ûA¤ôÅ3F1Ó«js -ë s¼½lIêáSš¿8ÿ¦kÍø©W¶ª”Tâ%rQMÊñœ¡¨·@}ëEvð3C&ApÄ'u4³=¤;Àe`Y9òØ Ú´A%ÖM‚Ùl²›Ykࢇ¥ŒºK@_ÈJh¥Z!jÞ„»sá élˆ¤ V-›,w8øE#"%©á·gÆÙaù²aù -¤~EÀdÞß‘oæî„Ò{ézaì,‹ - ¤²{HSP¸2­)pXzp=éeYè5A}Ùï= be¶‹U󋬗F.ZìGVVàvûz’»¡žÚ¤>ê<Õ¸­«›C¹…‘ÃØp>ñœxdJÙ§"49y9ŸQ¥Êöý-*°ÃÕÈ^`E/g˜<Ǫ„^ ûº ¸³;%m£wi¤Àš—=GŽÄK{¡ž’üxOÊŠâ|Ž)Hq’H’ÕÝ’Õ¥}eÔöž‘3V/ò8… …«ýñBý0c‘€¥!Îx¶°„ùÉ¥úqzR'¡ñX-Ħ…£zéJ5ôÞÊÛÏ&¾ùf1$°÷§8ÑÏœ'¿ýù_þøã?½ýÙ_üõßþêøþ÷¿ýî¯ÿù»¿ùÕo~ûÝ7?þî¿÷ã?~÷W¿þÍþýïü—ßù|_ÿ‹¿ûþwßÿêßÿú»ý?ùÝçíÏþÝÛßÿç_\™ÀùÅ×ó›^øR¹›üÅzCt½0ŽÈ/gòEû+B¹³cH[1œ”„`¹Ê€’1~BŸÿ¢W00ÿD.P:™V.r…¥ -Åî²_¦ê‚¨)±SìBᛧ鿒6)¾œ -Š›qÞ»#tY±Šší“CÛ‰x - endstream endobj 38 0 obj <>stream -öÆi5hû;}Óuä›Ì@ä*èÿÚ£›¦ Q­¦¨…E|‘÷ .ýÑÊ?ÕFX*ÌBRcG@º~æÑûU€Õ¾·¨×+Ô“äÐ]CÆÄ=zw qÚ¼ÕDžE @øÐÂÁÝ—º#µB†_«`FEl€Gòኵfïé¢<è0éçõ¥Oªýý‡2Ÿ~¸»‚°­Ón»áx3 §lG¦ÞBq:§8—ýÌü¦€Ý©\à|磻¿W*áÿa)‰YS|E Î!¾å$Ûâ›dU—ižo¥„éDÖz½eIËþ¥ýYÀUÂBH<ÀÔŠàÈÿn¾ÂJJÎ.óá/C ¢Óôc€dÃã‘s'„Œ~ÜÉú÷= -ÕÛV¬— ‡èsX‡òÆ´§I®i"âöÐ$q—8zÁ‡W¨ÇqaÈ…¾;(“C» F[×öÝ(­V$ -“1 Qýü -}Ÿ~4mÐvO‹¿ÿKBÄz!ñ1Ïß_ÈŸƒ˜e½_HzÑȵOI ô+ÏB>ï‚95„8‡lcc6WJðû#+ÒtoŸÜgôš´0Škózòa/t@ŽKpþ {ÔÚ×F¿êì¿ÖìQ lÿqÍÚ™£^€µ/ì~"+íŠúaaQ®/2$¦,SÔöÜŸÈiäë½ÿa'Ò¶¼©=…êõP!U@ñe#â»<`þ£y_Ü ˜Á¯‹úþC}äRNî\fE/Ö‘–Yž㽫çÆ>"²Í[é§_žþÇHÅ?¨¥|ûÓiŸôQ½‹ào'ýŸ‰)´”„ÀÕ|Z eœYÈ=vJ}w -\PôZ$ú,F\ “`”Ž÷Ã2y͸ -ÚœÖ1"'²æÒ±Y¸¡š¨Æ+ŠÙuÃPßôrˆZutyÝ™óHDPCÖÍÀU¥¾žèyaHáåÿ9’@Â4‹^®ð-:`H;+„,î'L2r*¸ïmºé)Éô²³w?%®GN—Ž.å1÷£'þ -:ï–[†LÖ¹*BCÎÈWÉæë_꾨l±–@ª !œ¢ „íüÇ¿`RßQcÞUñËߥñ¿ÿKfÔ·oÿÏÄŽƒ@wvŠöê$¦s3®`—tõ†”Ê€xøa÷¢Ç€cO©Ø}:4ÿÔºmó/Ê˹_þH£zð%6'§Ù$„¾¡RA4¼™ÉBD]û -“;# ®(Õó¸Ø¬œ’÷-|9½ ÄÆ3 ú™6¤íbZ=aŸ^”8Ƚâ7 ö•Ã‰7ƒ²=¢½è¾cZÅçi¢‚Ï’ÞI.…`õµ8‚Ô*˜=ÇÞØÁùœèó˲²rÚ¢Úr;à/ž>cP"U‘ãr¶Æ×»Eºå¬y æ¦me†çPê²Í͹w‡GÁhõ}j{€Iè&ÒU?C\=R×0éôPR„Š¾>‹ä¡þ¬™Ò”Œ{rôµw›¼îƒ86ó.\r¿ÚîÉ? §#•“-„ÓpÀžò™zb…þƵЪŽGÌÊ‚"“"ˆTwÊd#îÑã%Ç=‡­í†­ªº­Í‚`à•ÀOuƺõØ`\Q{Vöª - -ð4—«Â!¾s?h6OŠ ö’C­¶@†§ÚÿE¶©Å§,f‹—¯\C34ÕuXjÙ½á‡\åø^Ðà¨Í2Ðæ³ë­ˆ2æ/íòŸ2h{ >9òÞl?÷ͬ»7ÑU¶KÚ -Á`+••”½ÆdZ§ÿQ'T^½IŽ·Œü_JµãGI5äÛ—]ñz·²Ùvè†Ä¬)p¢0Q´fhú쓃 -R½4 }M!LÛ@nþy - uûuüzO$6hô:Pò; âQM²üôƒ+ñj:W0§Î·ÖÒùÀÜðŠE³Àž:ÔÞäh‹;±?Z2TÎŒ)Ötnu¤;¸Æž«*¯'Ã<´ŒœËÇ™"v¨„jÖ3ð+Ø)ÈŒ"Ö˜e|¹a¬"ÔF $©ŒðË̼Ÿg“uÖ¬¦íþŽö³%QÚ “›r÷G5²hĺ ™hˆ½Q館×.5¼¨¿ÿdó[6ÿN1Êòïž|̉b„šÑc {ÅŒÜ]•DãXcQïêc;ßx^XZ[•¶ÎI19ËíÁë¶âd[Û ¾sÞ+ ŽKpGhéŒÙjÉÜiÝRŽ_“ÒÍLªxêd¡!DvÊ?Öµk3̯¨ñsŠ9'Ñ™Æ2LˆÿX— -ÇÜjå( *@Qy”ÜÁÚµ)hŽ ÜJ{áa9Ç#gÆäw ¡àÿ¦h:Ë@ƒb ÆSB³‘+qC#à‹øn;9g®`·!V¦(M'ñ dûÄ ÷É -æüZ¿õÇ[µM(î^V—ª1>iOb0z×Ñ(¼ç³MtMNǺO*äüW®€Î6u'\q¶ç L^ãë,n–Sí=ì{zz±.0z²±n?† ÊÆd/ýƒGT+m¸+P\¥Ú·M“z³™e{mþNñ qITq‹’ ~mCš×ZƒÇöœõPФ:#OIßß„5°ì¸^– ëÀ£¤[HJ®­ñzËñ˜]w æÑÎHY]u),Û›WKÜ5;¿IÓ -úiu)&rwÊ|ª•YAk[›íÛæÀÉ3Ùe›MVK@"{¿Ÿmtà?ªG£„¬ß!a«ºüÛQaåH7'Rà á­À&)·Wî“÷«Jt&Åê°lÅeµ^T!õûn‚#å& ¯µ9•ß ™øc¡ô3qÆvbN‹Û ¾ð©(ýüÈØ™-š#ܱî–|ØÀèß’qûbœÍ£®ÙR°ŒéÅÊ`av³-²,îy¯XO‹T»æõû‘GF:î±aò;}Í0í»1Ï­ç<ãjèhß "ìZEî¹Ä‘ñ6ОÀÕ?O5…íÇ•nµÈ ¼rô+â*@E+˜{ƒ™©!çnõ/uZ¬ìÐFK!|:ïQ@5_3&ßwlÞÚŒM,J†L:3·.(ó’­}ÊãicÂ,õ/+óVÞ?>Ù’èpðkäºzÍôþ‡¬#’ʽ>yW§Ä4g6ZI…ï˼»,4üΠ­l}ª®å¼Žð‰2æçW˜K¡\ `Ú#}pDÈJñIн^èŸJ5ŒˆO^Í…6Séõ++¾š™ÇW½@J,ÔiIV¾™š H[©~GðôÀæ°WÓždñ/+#0£ä”Iéªl‰¢è±þ¬f¶6&#aOm’\ƒÝ‘<òœÜ°[JvÞqK;îŠ g‰??¡`µqç³('}Ȫ†¨Šwø~¬GÇzÜI=ÜÏA[6`rW^ ×›ÖçÁÛLŸóU¼Ìa‚)a‹®¸¶!Èr¢AÏ>ÏTUô‰ç³ H›Å–"ÇïÀxålÚÆÃÃ'_糌á1Å¢7{FG+-‚RÒZÒ¶ô-Ö®µÉÔjVÜüüÖ©~ÿæV€ ÷›‰ÍÐé¬ D[•ÖK;kW^É" Âò=÷@oVŠÒ˜§¿Û#U–Œ*añÿCPàÜ{Õ×%SŒw ±äRkËU,RiK„}s Y]+nxÄG#ûÌ•ØÍi؆:Rø¨s.®˜j°ÐT%Š™å - qãqds¢¢8á8uÅK¬8Yž“‚]­¿N­ž -äÓ÷Ü€‡NÆlÏ^H­%l”µÙ‹§Jü_^0zŒ6ú±…³èkˆ¨à(ù³H' 0 +5†8)bøaüÒ“âì¸Ö¹ðŠ’»Øù·žóü‹Ü{û‡1‰”¿}"åi=3þjÛNVëŸFaZ¯èËç  !§ÒZCnIq²ºÇþªd15ïü|Ö5ãøÒL2² #¥×·îçÆ2°ôÏ*ðäµ’Œíã©:ñj‡¥¶U¢4Ü*ò×óÙŽjkP L/¬Æ¾ tMwºÈbžëIk‰2¡ÞÈþ®Ti:ŽÓùül•¾˜w!™õË)þëwøŸüŇ³hØÃRÓb딤“׌׌ïæyH©¨Äñ!©¶wGFM_EêJÄGVºSÌg"ÌTt0d>APTOÚ¦¨Ä óZÈ<Ó{LŠW°+A³"G¢‰GE˜"…›Š 7Î*3±QÈ·¬¿kÀL½2ÃsÇ/ ’-J dÀI5~ær žƒä ¡Ô¼á%2ø2_xõÐúPÝSÕ G!xˆ=uPŽ¢ãr3¾*°9ÈãЊÊÏà,SyÍN@ ò -B¾0r„!Qü+ɢ͊~Ù4Z—ñ¸|Ðô„n˜=Zi²ÈÃ~¦ó蹡Ð|’ª…sú8…·Æ;÷CTÜl¢­½UÃ4hå¥=ÉxðÝvÙ5>Z~†ÜÉðSS#¢¬™è6­µd ´»3gÈùá§z¬3â`ùRO¢±Ž;l¬Çgú_Þ0·åC^‰¶G9’ É…în­k=ßö¸Ú»I.B¨TÑ£¦É¶uíè4ÁjW &ªgA¦åŽ5Og-–.©oªŠžÑóùÊsDESÞdÈýGÄwQ÷^C@ÃÀœÏšÃÂ$R¼ë¡×q“×÷Ä–9@”G×,[ de÷eb­NJÉd$ Aˆâ=Ï^Îöív¾O‘ Å3œ©èq+`¥º¢YÖÿð'Èßc¾ûCŽÕ†7‰¯RþcÅóÈÞZ±¥Ao¤m©1îæ!à«ËîÜÛœnI•õ!l)Ù=Jù¨jD8³$Î4Æ8$¤$\Fúªª¿ëIzF\bˆô¿Ç7½tD± 5‘lÞQ7C—g>×æëµÅ–ëŠÈSŽ3ƒ - 8?týfÏÈŠ®ýõ¿Ö 3>!·ZÛsmõ(ÜÒI„•t°ÀÕ,nüèîºYm¯ d”¨¯Ž7•ÇZ ±Ä%v|ÇwÛ*&Öd#s/âµÈ{å`É:¿¤g0µa¸¹>vϽ=Nñ©R¾võsí{ñ&N˜ƒ ÚÞu”«c 8ŸókK‹ÀÔÙcdš%'ÁØóSŸ - —E¯"CØŽÒâ¼ã×1BÞ˜Ú8‘–wœˆN|»1Íù›WbNlÔxU%ŠI•ûƒZÖÒ)ò ü‘'fì$ó1gºSŒ‰Î’ -”DB:ô¬ÖGÀ©V ÑÑF5…º ßrd8ëmôÄ#QÑÈ*éŒûî^»ÎE‰J|àJ ûzK2ë<×eÕò†´f®ô ²ëŒÌ€æïmÃ÷¨ã7¤Pü+Ø+òW`€GÙ“Y[²ÛÁ#ü,Œß@OãýúÁ -ßÛ–͈Û,¬µ„„¬3ˈ÷þÇ¿ØT~óœúþ/ÙyäiÄvJ -ÿÙ•s±Ö -ëWvG¸ˆt…{WŽHTá3DT)Ô×H™³¥jBQ¤&Æò–•´ŒÅjz‚tÚ?Ò¢ -3„±ß5Åv„Îvn%ˆõÌÂãªMJ€0+‰RK…íŒm̈)çPX”1¬(¯ß]$ôÛ2"€±ŽÓ?"Þ± Õª|0 ¡n^_7{ö0­U;«›Í^/|QÅÛs Qº]'®T ßÑ“ºùƒ*B'À¸ÄQTí³7é.##Ä&ó>R“át¦”øVúsГr©pÛÈ[„ÙÞÒÑšvÄʉšXV1J£³p¥É»Œ+¹×Juó3„g\Yúî ¯‡–qÜžüëSbOUP’§a³mÑÃp…ÄÍ´¥¯ûŸþbþ&røý_LUŸl} jG)ˆ -Ô¢ï:’@Û+bØØž2º7ˆD>y€+•|Å$«ò‰fÓùyÙÌÇODCÈ |1¨æ8žÁ««®K~“Ç3Ýx,=Wê5eN-‚u*üAsj”¨ç»CÔ¦51еuÄm‹…‘o”]êŠDPS>«*É¿N©…tk«ø‰jÉJ a.›}N1C™'9_o:V\ýN>9+˜…À™q³UØH©õJû¤=m+c¨1CþÆÙêv¾ KÖ’›‘hFœ=C2uéæ<¥ä8\x®\j%Q7Q¶Ãiö ߃…È ¿jʧi©xôzÁ¡¬WßÄ Kd,kô:Sà% [ký\/çˆ?jQ Ò¥–î>4 Ñšìªg‘Z÷ÁEÏN¯— P†(3=­ gKŠS@¯|V×—æ¸ÐRiF ª(µ!vƒy;Cça¥%XKùšíI›Pr-Éëë3ý…’œÝº¬¾‚›SºãO—Ø“²¬ù¥ªVŽÝÑ€Õj½|gþnFõý_~¾‡¬2 -xzŸå«O뛎LÜYÚ>–ãCší {øÓ7ï¼y©¦‚¹êJ3³PÅè¨/ÈçRùùý%zÊÆÈPΧ†Ð ámi»¨3„€aõÊ‚_‰ñÙbg¶Eêl)¼\ ò;ì1¢–¬×ÜXÿÿ ég$Ñž½G#ýÓa¬Øט஡ÖqXv•sÄPïNÓ¥Y²ÞÓV»Õ¶#%1Ÿ¼¾ò`‹ì>¿ñTptzª©wí¦Ê+-Pƒ<p¬}Ò‡Šmψ¡é¹¿%¦–æúõdø9H-\îgó¨V5ô\èôœ¨š]£=Y÷j!Ù7_Ìê3”«âeÖœ1Vì?ÞaVkNƒÍlTJÍ-nk^DëfDWŸ#Þ¢ô¤ã?·\ñþ"=ù£¿Ød¢—§Ï§zn±Ç£DR”!S{Å<œžD:ö÷¡K„”ÀuwºÏ0 ƒt¿ÙÉÄ«wT€þFÑxñèÿˆV| -ßNTÙu/ë´êÉÖë3ʯ1SEsîíýˆ#ÃÃsÖ“_ñ­•¨ò¦¦ÞÑ„O]Eariiï´xZ®õÆÊz­€DË7“NRŽco®!TL°xõWõGû§?»6‰çV5½†¶ŒË¬°äWô™Ðæ®âŒ.gO¦–\Jsß ¡k5¢[™äÔ>w£ŒæãŒjZ‘¹äŸª›<ß={×ËŒ¾÷[v%K™Ä -Þz'Kª²ýÞ¯˜ÏDãQüäá3™0 -Ñ|qæΉ¾Ï¬“_xè«Uå;ƒ($¿ˆsU±Èip㿆 +=‘ÞÛ37”„]]ˈõy¯ã†O•NÖ{ˆÐέ¾&Ñmº³ÁwÆR§÷d)ëdiñz“0öqFºY4µ³<ô4楑»v¡²gç@´ À ¤ÏhEèòŸ•˜³ôš)†ï!©iiáäWb’h±jÐ_Ä®ELÕõö0øž”Ù–"ôJÖNÌ#8'>n*BË*74 ÈW™ cF×JªyßÍŠ:1øŠUdòš—-dÙ<úË•e-ô  Áƒ»Îû;›®dLï×f_-ÇØ:×`Ú(ï” - . RZ†ý]‰S.«³Ê Ïo†“ÏoÞ„õâ9ËÌ)eõª±_iñE\¹½((‹¡Dñ­íý<TŒ©‘Ïδ.“ä²g“U‚] "éøúYK¡ -„˜Ÿ:ƒ„LcÏ•Š¶1jª?Š!›‹µ“èçftqPJ9±]¬aYU‹%'2 Zo7"ïKÜUS'ÇÆxkçfÅ$GU–ÒÚ±‚é²]8¼+Ñ«„Œé’Bán–µ¸²¦–_·tÍÖõ\Ò¯ÿ[)â)ÍP3þtPˆ$)±µ;5'h‰X¸BïkÑ6ªŒ¤•â@89¢9rýçJV#%†ì,oT§NÐÔ1Œ´…xÐ Ϲ”ü›ƒðWãÖ Id|ýàýùÈ?lT±aă=º7Fƒý‹AfãD÷‰~h ”âH ¿wŠóÛÚNÖî-uÜÓ±‘ÚTöÁøÛü$1V¸æ¬½.|\:t`έdœû‹’ê]]Ã;]oæÃïîÈqBhëOuª”§à™)ÂŽ“4ª†”ß׌N§j¥+Ã蛵ͯ£Ùâ¡ó{\u-ýi€گ_å_d]ÿ†írEYØ*¨ÿï±Ëí@/<—z©Æ$yHã"c½s,º†„e®½E¼ìoñFÆGÛ,p «Þíüi[âiDõ'8¦µè—W0 t/•‘».'£Ò7Ú:á<ß-K ÌþÉb÷ŒOÍNCÚe{ok)ô8‹ç“ö6¢ü½Þ³tfH‹ÞZbLÅY>B(Ь‡~ÜÞ>CN,÷öï0 K¶¯@›fûZÖñÜI«¼HéJ ÿƒ,,d…&D8¦­wùFê#1Óä™Cú)Ç+Š+½\¨‰âYoÆ¿õþKx–¿ü¥‚÷ÉéõíÄ«ã+’úð ŠæñJ4¸L¥šê¥iÇ´j>Ú×´BXëêØ}íÕtéeö2'«ÕDUp}³íï¼vѾ¡ÛÙ%KÌG'µ'0hiÕ¥¨†qÑ$ùIZN¨íV»è?e-HügQ¥=[ µÉ<÷ÚSj3eÇgÛ“!ŽÉsîöÓ•ûÿûQù}_þÏ]éHb–¯êï+ŠžIM6Øs™túøhHfÖüô埂 ÞtV8V8ÖöÏ`Ðpª;Ös'ËÖž±iYh ˆ!ׯ›ù??Ô§i¥¸ËQDk§ZT¿Ù‘þõÚQ1Îî?ú Õ?¥Ãê[HU×W+z¨CQý#¬OÖKͯX³ CttwίûÝ*àÌvZk#}9%Þ÷ -Šǽ]˜Š“B¿QŒ•e.Ü& -:¨P¶`JL+ýþº Ûµ`'ã,¿C$Ö‰š²Ð…7pÞŒ˜®ëF@(™Ó’8É ý×ÿ¸ÁX« ýªnw -Gd7Ñ4V²/MYljÔgÿýÜõMž/}%«yž»8+¸M!Í âRôô‹ÖbêÕ à“Ð/lŠ[>@ôß23ªùÃQD7˜»J=dˆmµŸ{ &rÝU~v+H¶ OÞB·'Žz5F‰è®G_[HÁ×vS숂Æ”ª„ç~ð^®0Ûæ2Ò’XÛ!mêÖá1•9¨Ž}b:Ò£gàÞë™t¢—ÚV>“Ôëä[#ä’Q4)ô˜oáÈ]ªLíH]í#ÍzàÃFWn½ˆg7¹¯¸6žåB`~â¬w pkß›esr§¸®A„Òo®˜øÚÁŠðÉ–KÝH7[v³ÜKðO¥$¿ƒûŸÚÐµß Â9TJtû¿_P{ƒ¥ú$ý¿I÷eÀ¡rØQïø¯ûo\‚èß¾;õüÐ+d†·Høþ/káûÈâIŽ׶ÄãÃ]»âÒŸ´ŽD™cF˜;sN¦,ßïZsƨG™óÚ;‚’­ -ü3syH½˜ÿaæª!­5ˆ6Ü{;¿"\/ÒµËÌÉHÀm3£>a!i0fw†LÊ$ÐHÔšj°£»kßú#VÆ‘Ã;Ø™K&&@þšG大pþ«Ñ»Xb¯¯¾Ìˆ4‘ŠÍ§FþJª(Ôc{ãÒ ´NMá{’©Î0½ÖÏô0ø¤¬Øn€7L+=–Ž7½{¦pUvkrØz²±÷3Ý‘urPn¼«¨i³‡ƒ~Áô:#J·×-WŒìýº–7 ƒ¨êÝÜö¡ü•D>á¢I²]ñbg­ãr€­à¼UÙ˜g¬ÆfpO¸ñ© -Âú ‡xQ4oœ})¬Xihw3KT0‡oýŽñ ¯V`]ìlúÔ°*d:=ÕÚö1\îçÒˆ[Ç Z4Øaz~ÕÑ„1kßt…•¦kÕ×S­iòà¿Ñîš×–t¸£ƒÇ´ýö­`kÆŸcß žP¬ÕuÕÖ·B›ÂÙbwPq}féÚDT”éîQ²çN{ó×r˜4…ÕFÖ\/ùÌ×Çñóà§-¢E-r=•FÄ̾Ò{é ¸ÓenÔïìöf—°\78gw@|*Û1¦YÀ)@)[]{Äúº©u^g˜ÞNž°'¯Š?¯¯Ö«ÛËfé¦VÉiò<5]úúúÁ¦´‹ÂØRq#æt¼AMu’°-º¸ê <†PYj¿)Ùÿ{åzõ -oDFƒ«ïEAý{%[Ár -oõ°òN¤P½¶!Ó‚Ì»äZÙgü²Fkª¨\»3úû<½&úÛ0ÅãÐh8HÑZÑ c½8îëÓ€š1!U3kãÓ5¡”‹Ã×?wב¨Ä2ÓÀݾ¶4û%»wwÔ¥µJŸè}Á“* ¤Þ˜X£³×à"U‡ýL>0µ+ýÌ:{ÉŽË¢X«ßqxYÓç%Yp:îì~‡®ƒ=%žÎhY{¹ö7æ#gBÐ@7*O{ÕèºB¬oÀ¸Å±uFV.æÖ/°à®Þ×fÄ>å‘>Yýp K0_¨\éàd¥•¿”d¶ŸLÉk¬dËC*įë¯:…?úËÏÃû¨†^Ô<ßm‚w†bB²|ƪ ŒpÆt–b²«kÂ6ø¬¸÷àFø„5oWŠ-k„â\CŠ‰-ÆåUx#OjÊz~FŸ`»[<"œáGØÜkˆÖáøÆ »†tÀoè€#%¡©Z'ŽtNýN)­Ð?„ãžrì‹Aâ®]2)= Û^˜é ž3 õ5·$¨B¦3T]0Wâ>±nˆRí¨A‘BÅtŒï[~ç>rðÅs¦¦KÛ^”€Uyng ”yÏjÄÆM4|ïKÍiÍ‚Þ=ûx您I–ß¹±ÖÉêíŒ$î*ÜpNæÏCÔÿËH_v2~¦ÇßõýXµÞi˪ƒ–-ÍñG9¯øçÇÞfµü±–ž8ϯàí©©áÖÏÜ1ÁÂàÅ—Ñ'¸ëãp_FŸD’g‰Í(B>‘Uj%…2 ñùS‹tM! H{Ý{ï +{À -N¤ÿÙÊy…°„×#ÿŸA±‹$ʾåEzPV^ÇW y Yö½ÍsÑ9ç²ÒÕ˜ÞWÐê/ øßÜ—*)â÷¹³²W«Ä|„¤ŸÙE×ðPÄïí˜Ôlà+*ÝÄ´ T‘ÎÈ\¼{XÌl…ÆÏœáÊ9tÓWª»C:s„q8chN‰ò!7ñQ›#8¶N@ÿµû ÅhýŽ{`­.= Âìg™nÓÚ -»]»|i×xãG {}½¾•~Öú£ZeƒàÛêPZŸ“NÇ• -Pˆrr»ªžþšŸêÁÇÆþÕg¸±{7Õ,byt1¹ïñ‘"|*ðø~¦ïr+y0àŠcÄš@qõ:¡p>å֜ـ|Êù D¨\…ßtŠ«¿Õ‚¾ÿKŽo÷¿?3(æ%úGWPmu ';›ìØdÙ$ìëßÊyë} 6q×:x•¬/†Ò1¯MP€rGË:ÎZ&6þ° -ß -¨ª¾©›¢äµóó”>^ºÓ»öK ïrm‹gŠŠ+Õ—\'Á -^š†÷yþó•©°õª|Z7lÏ~ºÏ _¸ÎfÀÄÜéH±(„Úmà§8ñ7ÓçÙ§T˜ËˆíÙL€3mÂìùÜȵëqï®»…gí}Kú’$Ǥ.»GF¯+ñôó?îI‹è¬+…çI°ts–cÅ<ägÈ3øQ%ôeEYÉ«uV*¹7ÊÛÚLØÇ»’ÜùšÁ_õvIô­)ÚÁ²¿Ä-wè ÷]õæ'NØS·¢ÈE íÖ#ºÈ/=coeoP‘%¤dwÁ¢£µ’zªÑS€cÕÃK9„ÑëŒ?"üåÅYáØ9Ä“Aw™ixq´ÐýKFC05‚ýÈ¥¤6G!v{›ÂSÚöÏ&<1=†‘ê#’x†V Õêl< ­)V:oI)iöô4Ç^#oó~‡’W/‡à˜Y¤ŸÕ¨ù#Øi¿Ô,/Eô-Ÿ_þ¡9ÞñëÀ'3̈úÙïO†dÓâ¨Àÿƃ¯Hø(öøÓ>”º•¤uñ‡¶Nþ¯“RGjrc]¾ìi×A¡¹íÒp(iŽzùV¡¬Aö‰¬ÁI¹8Ä|¹Ÿ^ÖHæòÆóDcfͲŒ•Šº†„_æt[só.èè€McÁמ²÷y%IB%UUí72úkú˜*Ã*YC4}_Ëä|!·)wÛ&ö®€«S«¿k)0Ê$†« ½ñO–"ßùõZž-š:†¨1 êâÁ[CH·GItÔJ_p¢¿}©ŽŸà ö5ã©Ê_)ÓÕ-CרY\{W =b²®M8žÄë]CX'=Ñ?úWy±Níc§ËKƒ—ßÖ Áˆj±fк_ g$2—¡2„ýÁë`ÍoÄîM> šuí Ál\Y7¢¸|q¬…õŠï;*hç·w¾v‘[¿àM%„Œ¢#rîu¤#zR2d‚m¥`6w?þ]\Ÿò;^ÖŒS”Õ¶Eä‚ç‘[ïi¼Ósýæ;*rPýƲ² oÉ {Çž}Dî»$| r¼žRâí¥ç@´GßéŒ-ÔC½Ìv Ç Qk-h¼µÁPJÓÆ©Áú’&ÿ<è³  -ÒD 6ªÀöâí‚Ç<[6ù/lö¯–VþÍx12°"™”ç,ùÖTª(q¬­$I8¿û¬•â," -‘$E‚H “ê´÷ð«{#Ž O¥Æ+|N+::×(1¡¨‚rÇQìü¯ÒÆ|Â1UòßÊ¡/==µÄóªwOЗ%~ ¹2Ù¯Qœî :€˜J;Þˆš¸_PwrK~+~b߯ùÀ©oÖ‹ˆP¼âæwÖÌbé û3JŽHâÐì!¶qQ6ÍŠÏÝjÈ‘´#o´âoñßË°~ËèFšñ¦Eì–E„mW’ÃRf+ÿjXYRä­C¼¢çƒ¢þZ6z‹Á”ÊRÔ.2DgoF‚Bˆ³&b 1P­+Êø¯1¥ZmˆòH,¸nLSy¨—†mB½ó “,¨uæýÁ48XL i:à -T¿»õ£Ý§ÀÑ䟠úÆ/¢°ÏšŽû¨AoVÄY}—þ¦Š:y6æÜ0™I§¸¿†¸L´¶üŠ$ï‰wcæÀºW”QR!ûŒŸÒA®uýŠøÅÝùâ ±¶N§3ÈgÈ”)íF¿;$ÊqnFÇ-Úô[oQ~ïËV0b8 ”‘ÇÏ¥@‚Õ?n¢Z_Þ°ãzjçÜYQ· {ÚñBÆ^Cåê²[üñAq{è×WšŽí™{¡!†êqǸ£f^lA=®C=S”4áŽ|M pVÜX#ºÉ†@Òƒ]¼ÙihÒµ}£¬SO=ÒQ‚€Lû‚(­]þWý¨…Ç¥³sOrN5ƒ×lXï&ÄaƒM]u8áŸ!MaÝT\Ç› –71ž^n*VîŦ‰`P¾Ó‡å·g¿©-]doÌ–­\ÑL­R½v‘7âFB²ã®Ý~`¨Oe«F¹iΔý÷©ñè‡ì/Gž1ƒIæŸd_[¿sè¡Gc}&‚%_ª õÄÜ©Ÿ_?8v?=ßyÓòô‹sGªXu- û"ݼN©¥®D<âåÁOÑÌÒ|ùß¡@l펿Ñ „EEð‰Ę̀JÀÚ=BÕooªêQà÷|U&L\³‚³­¬°Ïã’b €r]›•Ì vä.sbGeõ²¸ˆKd…B^O/¼qðƒ˜»b3x ²‡å®fü<-Gl´á´õA2`ÍGsÞró{;/†IÓÿ×…­Ñ-]»q63(»dvh’.”ð†U˜­Š(}'â6y^¯!*/zñº'Oí0È7Ý®kT¼£M©¼G -ð£²L|CÚÎëÆ`Ú¨ŠŽ!‰‰g–gâhÐÉÇõK«yšéºgƒÎ§„¼¦YªEE8IyqXø2·×»¦°þŽ~oùcg¤ú¸¬‘› ×á” -{l&ƒEP œ{9’ô@ºJè‘åQpÒØ]GJǹ¥Î¸XÔgx™5Én(tB’¸ûh›4o?šAÜ¿õP·–‡ã¹m°Ã÷¢íK/«ÕϬç)nÆE+(ÙÐÌC¨¬ôǃßC¾‡âI³¦=ŸuOÃíš›^Ž0¥×G~†üôº -Àب)<àóIãà gŸ²µižŒ¼½R›ß†ü¾¶MLR ÷Î jlîýkùþ•Ñ ²²ó²wz1Q}¸¾~°p?Š9æªãM VC0ýJi «h¯?*OÏ« ‰ÐÝ°sr«ÐÇ_Êf žÿ"ùgHbRÆ^žG)î,!2ŒÄpj¡%NoÚ×´|Ók‰ªÐ• †:¼*Ò™! òi˜¹çÝ‹¨øùµFÖ¹çĺ÷bÔj?)<™·÷ÊÓú¸ó+wJ:he·]‘fÔÑáv]{» JPgVüüÈ(uðo»Ç^1w¸žbî l¨uL”É ³-üul}ðñ8§ô›«Ï&þ|R4ËÙÈ]BѳW³Þ1Œÿ›´=Ûó­5ELβHƒÀ攂\ö»xÓÓÅ’qÔáø°uUÛŠó/ÙóŽ¨{¦$|~Îa9ó î”îJv¯´O×ÄW–å¿õÕ€ÌöÖʱ„¢5Aï¯LŠ}8f+àÓëJꌭ°v xZÁ……I%U -={c^ûÈçÕ¬ßç¿Ÿ–Ñ¡;k8¯ÁÚeºÀúŽ™-Ù6o°%¹’:ÇåãáGq\yÜÛæxÒ$4_ -i6QtKaÙõ† -ðÖ×Ú`¸H„¾³ñ˜…5búâ‘Kß !5”°YÏc_)^åŒmÍ°'hŽ*ý'\°<ôå娥oN]=Ý©#ö¨ü¾ ›¯À ÁÀ[ -µ:~CÐ8í®Ù$¦äji¯ž»·(ëÅÛg6›Ìµ&³PÜ}CÎQ!}r/"Õ+ôã·ËùL¬»sñ_Ä9ÿ³im^-† û©5Õ<¿~ð‘>» ò0RQâSˆÌ‡°1=­°k1ý쑉G²ôD¿9ðüƒvÅïZÐ?8þ¬o ¶mu=Ÿ£w]šv™“lÒ.Õe.t5¡©)¹ ¢üͦ³|öTÑv}‚–8EÛÛ`/»’I|%Ü3Õú•ÏÑ#©“ß¹=(p ]GHò¹À+« ¯÷ΞfWÙFĽØÑö«”*2h­jºôyü0ÆÎ -Þ¡½ÌÒû‘T˜1G,Þ½V•ïnþ©w  -Lª÷¾wi;Ú'—CCy¤ÎgÇ9Š×Ø°‰ÒÀz¥ãg'ø‡Øó'm›48C& §HgzÕoÓDõ‰!Fž\)¶qX›Q£°Ð#}Wñ/>ë¯2ENBº~tÄ¡ïÙoMRÛ¥’-iø™5—æ÷øØþ cê›JAkhã ÚŒœBªøŒ®q¡æ½ó&¥ÉÎ<#¾¦¹BDCý ï“•öÒUä×áu|šU>RìyÛî(œúÁÍ<{Þ>!}ìxP -%tâw³'åZŸÅò¾mŠ0^ßË?¹þ?ÔùÊÂI‘³^*vgÌ (‡“ñÛ_‚ƒý»PÎ÷1øÿ(Yj -Ýe÷¤™»¢Ïœ/…¤®øœLñ[“"S²òºÈv¶û3$/ªnŠ+kDêÐkÍåGð1,Ö`‰ú/Ô9Ê®¯–@')óËrKÌ´æÏMŠ¾!ñÄA V÷SX;³}Älê©~×÷ˆ(‡lŒL x ŽÌ’˜»ÿ¨ªöiÚõ3‚3Ýg|”çÁtØ7=œBzPg+çR•ŽFö€$')ôº-ìFÏ&{i³£MXh-¿ÀŸ ßÎ8|ÅÕˆ60Àfý—_yË:DU`«€“0¸éôŒhô¸Ÿ[¹GQ¸­É¢å¤£nKZ¿£0`€žÈGqæ¡ëŽæþÊo’‡°_´á|ô ’ú7[oÞßµNJ ù!Ì´ˆfêģ:¬`¨â\IcBR†›ø_& Y‹ýà½Ày3²¡ÿ3“tNÄ.ZŸYë¼õ™l\W¥AO8ÙxدN…Z??Ñå¼AQbà‡IÁÛºíŸÊ2"€RaXÎ ¾’g…¶) r¯V%™ \Ah(§ªïÒ#f±çßÚæ#݃£¹3ÚSž´HÍ -'™øÁ²%ýs´˜-½@¶ž•qD0¸-†!„?ž2ÍÓ[å|ZH½?¨Rïm÷3`°dMØœ_†ÔݬyŽ½å¾ž0 ßòÍV±™*tYÖ^=4Tomö[BDp;j­ë,H¥ 1°j:RQ çW¹ñoö”ßÿ%[’ø• }oÇ$Õ(ºpt{²¥A3á„G’xûmÐ^!ÞÁ/$ …9ìN{ï½à:…ãV|ùZ“ ª‡†îñkM"`2@}SP *ˆ³{‹†D€²?én­Ÿ¡£™L lß ^ê©h™–ÈìÊ‚ð©æ³Gˆ)ŠP1¤±¿æ/yÿ•r9a#Ø P芒Ÿ­¯¡)P#–7°ËàM¾ý£¿¶Îl)\Ö•:EâAÏòn^ ƒ;×`º‰ -W˜e¯÷£ÿg a”*ìÓW|—Ž`­½Þ„rëg¤þWOA=®JüENàÓž P¬A8:­FØÏÖýy;.Ôì)k£Â{H¨ú=|ú¼=8iR)Ïÿ ã•¶O׈W¬éýÊMo6;6åɼÅÌÅb,Ÿ¦š¹7ƒœŠa-K8ÀœfÈj—ø ÈwüÐÀ¿'«×§1òÍ©ÆVÞáù~ [êÍ8È7U/€¼svwKsËT€ú|åõ%æ§]Á¸žüȬè8@$¬Rì§ÄA@&—Ðx­÷»Kc²þI‚&%]†È,¥ ©ònÂ+yÎ=âï=cR±P«Xéa¯%wܱ&²òIöF#†Ú?›Q‰¸ø†œ²ã-°•‡åÅ•“¿«½Ñ«ÚÉ|^¶ ºaýµøù“¿øÊŽI@"Â&…j e|E|§ö“¯|´?´–éa=Å×=f(í SÄ-®@¾VÔNÁû|·ËÏ2d6”À ¹nîѧ¾É[‹oT‡”éÓ)Z¯ïGåÁ7z7îQ IóëŠwcK™^MÖËÝRTÝÉÝ -Ö4Ú4¥&> -Õ}j>=¥K(BŒhíö5(Òg/BGõRkõ3äJ‹]]^¤´^Í£·‚ÍøédȈ~v°‰ƒ‚¡l±âjÈa„@2 -ºŽÎÿR‚»[Ê<•XA•ôN7.?rÕ—­¼™‡yõø;ÒNkïOÇ%€¯¤™,#Ön¿ve.37|ýïÀ¢ð]qÉÓÞtÔ7r1>·9tŒ¯ --ŽêÎß:‰ËëˆZÈû¦½–ÈÁ‘÷R¼ÁzkÒ f’.ivŠ -…¾«Â›È÷L -ûŠæYõÄrÒGÚšBRuÆŽMr¯Œw7/VMý~’ok3Â0=Äܵ}Ø*iKJÏè|æ-ö>jâ½ØHÎ'©õÚtZþÒó©øþ/k-^‘}>èfzû|JÕ1ï8Ï\Kñ‚8@œ%ÿŸ™~ÀYè·±}]AQ¼Ñü#•»†bxF9¾—"õ¥z²N–P¿g{„ÿf3µ@¸ÛæE0.eoïðÃѵ¹ó;„ÌÒ Kž0ÕñH#Ì¿µÐ.-4ú²ªç~jH &[ô7ÍÛ B¢Ãï«|ü‰èÑ`‡ \Ë=nÓ/¼è¿Tâ¤T¿’5¯Ï<îñ‡Uðú°ÀOPÕ'aɇ£’iéÉ bôÆn³CfrEX¬ûÚ¿3åë>×Í}ÿölÂà¡ E‘ö­Tð—]”÷ÿÂØH¨Ü““%ÿ-ðüî¶õx)ÑÎé -SYÕïº §'i†Èà{ÖrÑ8µåUŽøÔ†ÀÒ2¤I#ôÿ#x<`CÎg_¨á©Øø¹Ðdÿka8^ŒSáÛ5HI/®ú€ëwNÑ{dâmÚö¿IÁe¢¦Ü•ö°·Æˆ}ÔrgúS8i¹ÉƒÄæÓÈ´i´Qƒ‚6[7 •ù2ÃèÅ.³sÍõ°ìÖŸZ‰Je€5{@3=!ßF'"bŸ­$uÏZ‡‚>ÏÝy5ô"cí»gÄõtÌÄ‹ë€ùþ3í©Äó*Ì ø‚Äkaç8–؈>è9;Eto‚‘M¯ÝìïøÑïÿ’Iµ$Z“@ ’îñ;+S!ënHƒ·¢‘u”ŠË+o°wAMBs~ÎÔNX²çÞµ[¬<ÏJéõ§!º„Æñž‰÷þKÏí4-l¢ûtëWR;±¦½Ö5XÐO «+›$V÷ÅE5º+Ïzèþ úÍl f D‘7~½$§fˆèJéÇ̶ôJ¦QjÓõ;A¹kx¯Ù½†àˆ½Ú5˜ùBvÚ³”ÒÞ\K,Å(r—Äêw“¾L"¡µ×‘³˜ -D{=ÕJëÖtéá0®!§".bð[2Ù3¼›£rý{ì+©I"µSO®Wlèñ•Xk¯vRyŽZ}ouԇɔrk YqøËj·'$ô¹§^@"ûÒ^ÎE§C¨:Þ»æ -”7u˜õ.V4öNü#Z{³óx¨+qðõýìû¬Ez*­áßœ±–£ -öZ¨3“[ÅY­ú¦ó±» }{ô4bÔÓÿÂ->ô—ÒdŸœqqWÊj/XPyDàÒš ÏÄc¯#$ÖsÀýK4ƒê[µcì!-Bã¢+ÑŒ ˆ­Æ†è½: ›:Çø3¬êä9³Ò°Wâð<2!úÅqs… š è¡£ñLpë\BÒ¼ãÑgHÚ»HXgåeG‰qo@¨7(ÞŠ‡JÔk²¼¤&åaÄÎ=„Økè>M+J÷Vå“wæÕ Û€3öÐrx‹š úÑ”Nqƒ3…IV0›”9°_Ífòj;^c])(†$ ¤W -=gʉëÓä¡ÎD…ì½­¥¥*5zï¸cg -¾îzî”y#fšNmg²2â'u¼Á;yàü2]S8£çJ^«ò¦œz}@ è`n»Úë© ¹èsqÌyÄN–É?˜}»Ö{dSYÄ{ -ß.Fó¤@Ž^À” î¿;HaÞI{‹žÍ!ükòÝ~ÞÞ_–ÆGÎäu¢¯£]Ý¿þ×ÿØÛ±Å(~ì@¯§RB‡õÞ;˜:;©§}bA¥ë£ZôÿÏÿ°[%U ¦šX:ˆdòÞ(Æ’Ö#êÅúlÏu%~Äk­g[&kpbÖä~2ÃvÝ;•ÈµµDI8ŠÑwN.nVÿÝï¨ÓËÌÞù2£«Ú,j{gH¯¡Ñ+õ]gË©Þ®«t‡=è–¥"–@ò5¨Ç×6Î ð9´0%D¦ãù‰™4ox¿þˆqq±  û챆»ÝX³­h*¬Ö8æ¤0RI^ld‘cIíüàKíêDBìK+q £Šú5ý²Eõ4Ôb©.ã2€ŒDžï`.|÷3*ˆ‘¹nÚ‚†×ˆ1{u¢ß +zĀĻùˆg ðòÕÁ¹ -âr¥”ñ^;†‚=XËðz2v°»æ RÐù+Ø KÑ+Án¸;Úl”[ýŽý¹´˜šß'ƒòö+£¶£j|Rn]'ðZ¼ðw.µ²–¾¿³ÿ6¤Â]P>~ÔòšôTÌ—¡‹¦®9í«vc½1š,ä)jjF<÷ö½Ÿ„ÿ -¬°ŠfÿðKº¡¨ˆëdÄŠ˜WÝ 2unù€i"¾?fÕ(ÐþV¦OãDË9¢ì³Ë þ˜% 7Nî!›•£ePõ-s<ùÂ%Z’ò«äìW¸ÂÇZºÒïwjj§Ï—ð&¶&¬§\I¸t¥ç»Vé¬OÕea¼ì.‘P¥s‡Pˆ“º¼Ô„“ç^¹™µ/àE>±bÍÇLQ_àVsâ’ÁÚ¨`öö7qÌd“Íö¯ìÁÐ -+Wà¹iŽ¤uè½TtLû<÷næ–çÚ5 ;.i ÷µs^JêvD: G<|ö‚‰³ÝšøØÚ™è¤EżD‚kˆbŠ/©ñ±³ºµ*è-©l{Ðìq"[MÒ&ÎK1øÌWèÌÞÂ¥9Pý`ù~<;Ÿtœ¹jñ­SEéqªWå.GñªçCÕŠæ¥mdsý8ÚÖ}—Žç_š)?úËÏÓÕû¹ÆüDñ+Ü ¹¶¶ŽÄ†h!÷|$jÈŠŽØbT½#gÒñõåˆBᥧ;–s=Ò™©‘j<ìP•rÁÀ)ü«‹Pr5‚N Y‘5À§XkfHO7¸5>z;ÁãU íL»lOª¯ÑÔxã‚w‡4¸†¬CTª²5i3„æ†ìý¾~\ ÂpgÄàöxª”ƒYp®µ Ü·CØâ!)}}ª=ë&RoôDZ5¥ÄEµ¼c[¯©óF•BZ0W¨Ö·}-Fö™ÎÌͤ]ÁÃÿ ïÛ¿dJ¥?ÐjqT·âo‚?ӊȉ‡Ë:ªqÇ>îÆh¥–c½|Ï«,Œ¶äAú?îz8–a2Þ6w×c­zº;·ò\.…ÃÔ£xìßÑHï¯kÌz*õ,ŽLWê¾Ú40«J¿—‹½|ö.Té„]k|,”|DÛ{$t…øu)<*ü¡zbJ­­øˆÜ"ë Q[Oïhµwtk+ë—WŸØ~½°x€€‘0¼RŠF´=wÃíùËa›‰¹¶o‹â HÖðçÞèôOjxI{ÞuÒçô­bNcåBn^P†€K_é©Øåqi@]ø~V˜½ÿ±e–kPƒfàŒ({Aq4ØU›hòø'€1*ýoÕs4ðÀf@Â×n^ÊÒøûk7I\¸¯ç§¸Î“¸ÅÞm›ŸsüQY¸S†§æ8)ÎpskH?2f^9*¬ç0ßßÖ$“F©¢`6‚Wî;ZYÏöYGû™’ËSí4¿ó&,kÂ=•q_˜¾·–»á[8šöL~…¦Ä Înøõ_)`Ôäìo b²~ÑCËý_¹ºfdOf-›“Çx´²*XyØD©­!¨\k ]oKµ>w|U{Kî­!Þ`Ú™Ÿ'GÕÓô!ˆîKÁÑ«¯ê WCv#Ê•31!—÷'ªˆÑúÏÝÖÿ=uþ[ˆ`–·bËDðoËÛþèhDí·{Pïn±µæØ?Ò|wãTv½ ú ¡3þlÓšø`ûWÙ?̸*¯Ù¢`¤ ÍôWi>u™!‘ähsuÊð^Õ‡ -þQÄUúÛOœî#‹e’^—¢ÿÇøXYíúš˜ðSÔžs*c£{ ªŽs~ýàý횀>(¤Êäktºð„be¡&þþ—¶Äšæ‰½®ÚÂß%ì¾ÿË® |3õÿ¬Râ í¥Zü¾õÚŽàä®7z)†p6¦õ{´jɃ®u¥Â£×þâ”[†ü|B’F¬ ™\îý¥D䬉½ž¯åÂõæ­%¹ÉuE"m —Ñ€ -=ÔèõÓÒß/ìÅ1G%pl‚]xe¡‘°s)¢¹ç¤”ÖˆvçÈÎ1Ò?ß­+ö!D| -ZTšŽJ½›õ£(úóžõ3Ä;‚¥Gí½O„Ùo ~«›ÇloLˆçÏ6©ÿëþ©?¨zWER9®­jÑë“×áçwÏÚ´hÁ¾‘ÐNe—rÆ9âs÷©äëÜÉÕ¾>¥|Œ]%µÄ° KÀ9ã©Aà.f¯<¼ð +öX)‘×oD£tÇ… X¤Çâ%ed.Œ¢á©ÿÜÆý‰×z<Âû«Æ¬uM:¡ñ¿I¼ÆßüŒ÷ã3ë‘FÚÒ„·ÿ¬gP•h­%”v–ÐåÊ<8á2x¾ŽÁh€aU»ÌC¦¿_%¡wýV&üþ/?Ç=4ƈÐq`˺bˆêêÓiÐ5?ÃÎ.t -|¿z9E›b\à_­å>ÒÐñ+O%Hú}¥ d­´F î–®ÜÊ/ŽpxäŠãŽ ÚSzÊöko+Å%àùE¾§ °£0Ð b·Çk§@9]v@rŸÛÆÀã)°Å¥ÖÄžé‹„DA–Võnæ,1'&"’$µŒÔTA\(Ä -Ñá+CîP Ï}ënHªŸT:ß´>~ˆÜATlŽ»Î.bÊÔ'Á§4úö‘©C]ܬ¸;„P97øïüøïÿ²yX”ù”ϵwÉ‹Þ ÔËea*Ýoä8ªÈ÷»NŽèA š[‘&õûwwŠ§½û¢’…ô¦XüJ÷áúuÆnbmSn-¯ÿ -p\Ks·G‹b^©“Ìïúœ·™¦N6•ëŽDÛY‡r=ªŸÀ«RÕ5¤Ç)†€.ÁÃ_ÞçÎ~A’Ï(Þ ;ê u%¤GªÑêÀOÉs“ŒÀ­¦ð¦øZŽJšÖÚv¯€H`ýŠBÄl›ƒsfÏ‹‰ÛÉ[ÚêDGCú?ëJ‰°HÈäÀf`lyÓ‹|*0SüC|•F˜'9kÛÔÀ•ëw2ÿE]yký%#óM6iˆ³Yñ=ú˜´©;×Qµ/Eø¶üó¼µüµdf§w]IˆE¾I­ñë_÷Š·ùiy¬äõõX†ßŽÕ9j°û2Øãµ!¹ï-&‰_Gj5#ïNuS¶)ÿÝA$h)ß,3ž ämÞS]VŽmõO˜yæ Må°Q¯ò¦Â¤{>ÇÈÎø€‘¬z2$:t8[¾Dq"énÛ΋žÆH£çU´¨hí)Í…†Wƒ@BÛNÈôwœ9TþøSLCheyäI1¨ÀØ‘XðUR£¸£ZÛX­©1Ï-”‚ºÓ\…}ƒ‚ô·Ÿû–›i»£—!ò@F§£Ä/”¹lUûRJµÊñΊÖE¢AÌpGŸêŠ§2Iï¯Ò#{ÒlŠù‰Vý²%— -Ëía Z‰þ;MÁ÷–ÆI65–·0]~±r©&Ž•(zhîEGÏŽu§GùÄ?Ú·ˆÈÛ°taT3±ÈpÌ´§Kjï.JI\x[ós–*`Ⱥ¸õHï½owÒÿ)uZA4yÚ7'Á ØjcfM.•ŠÀ:˜ÖÔ'Ž`fúQ²³© P¦GgHÛŒ²&ö‘DøÎ¥^żh²¸¬B€–9‘üÁ`½â–ž<+0Röëä¹±ñ Q, –¢Ï¿Ø‡½Oí½’h<µâ{üÛ y#DË €ˆs.¥™ó¤fW†SùúH)ñe1]Çì—XÕ~f’ÖÑú›CZ'bFæ÷H™eÖÆô ¬Q7ü¢mjÂÞøΘBYRovÏr ažôÀ×h"¢mg)´i4"FíÁ'iû5Îù¤Ûœ¾©ÙìžöržªR;ÔïÄ”&Øè:Kt@£s'|{@æXBN9=Ñ;âh¹ÔÐYa÷œ¤õòH]äw~1ðàÝ gáAáQ+{~­ˆxdÜ…$QO—£) =%›µÂ¸Cè"ºI/•¨þ™€À ÿE$’° ù -ž¬‡0ˆF<™4=¾7S7íÜ!€RÉìY§ÆKò|°09ZAw´\0nõThÉÁ®÷‚G×Î*Zƺñ ™fFšV À‚‘ìQÝœC…3P9QœüǬýñJÛV.Sê©×»R +†òAßíaJUk—¢eþ| Öá*?¨wÑPÔ_ ‹gíç™®„ë tN`| 7C±„{üúѯõå×VÒì50¾È ÞhÝŒSP1îí¼+¯·Å÷‘¾9ÊàÖ_G»\S‡’嬔 -Û눅S\ -Ðè»×…¯|8k»ÚÚÁuéÙ©Þ`ÖV­q%_%¿ž¸‘ã8®J»´ÿ‘–ɽO?ÃÜÀ/DÜÜËÚ˜cçð*¾¹—KNò˜é†Ø™ñÍô3ŠåéÔ#ÚªpEÌ5?Ÿ#î2p­ - 6"!ä˜"tCá¾+´˜`Áüý_º¼Ì¥ÖL§^w}FnB¹‚Œ°6 ؼÀ¢ëI¿{WC»2ʶ‹ÿ"M šÊHD·UŠÙ(½¦=ð_qxïˆeÁn¤ºõ -£UJpvù Ät¦…pºEÕ¸¿¹ƒ˜»Pì>b‚T¾Žêu§ªÌ£ kÄPäÚÖÞÑ0€3ÈÞ¯\ê¥"mH~,])¹Zú8¹¢M6-ÿ¢ÖBGûšob±’”Ç‚ ÜÙI°œP³7aEy꜀ģŒƒÃz6ãåí¥âËõ¿YÛv ±÷Ö‡zÒЇ<}ÛØ´ìE¨¶*•Ïk;GdÉl4b¬q#m*ðù–žX µg“1•H‘EX{äJ#üµ„'…íõÿ-ïÏÏz‘%TÂp¾Òst$Hlû"B·Cþ*€©™éDõ2j¢Ç*p¨ž]eÃÃH"û†â¬Å{è$FL'ÏÝm«vL.ÊwY9pä4ÑZ¢¨uXýA‡&$µ»žj+,Õ}å˜õ;ñâ‚>S†³%ÒB†Ãõb•ÁZÐs´i]´4Vw±eÛBiEq$ºuë˜Øhégyì0 iî³Û¥s-YÁl&E!aÓ:^Ñ×{is¯Wx•ªq]æ+»ûöM{ƒU½Ë÷ä^r•º8fÉ—­øå4ûB†ê{1>½c"³æõ5Ûh×ÆD”3Ö6qxæë«þ‘Ù¨¤]jžW¬£È(rî9¨ifÝ="àÁ¯Fds-6í§…ôgöÁ ˆ.Š ~ðŽ£#­mÍ©ÃLe -¯ð"àˆm¹½Z:”öF¬›¸õoh! έ›xÑ¥„p}É©jÖ>s»3ÆmåúÈ#É£×{ˆc¿5‚h ÕM‘g^Q»x´wÛC‘Ý @ãíêyóŒx–Ô L¶&þåk1t‚"÷ñ‹GDìµÍÉ“žLˆõ`+NxÙvíGNÁ N5xÊßFV¨œ¢!SûÚKù8{ËjÁrÂ’~[4ˆ8Ë©ãõLÏà'‰ÕüJšÄÊîÏo7ÀÚ)¡¯#>i›ƒÈY^ßy?ÑWÞ4·3¾JüŽøõ1!åêsÌOÓX}{YoòPóË·‰Nnõ`ƒ¢jQÊÔâ[½îà£ÚÏ|Û}›~ø`ÒâÌÕ#[ÿ™~0“3²é»qµpEÛÖº\ßå‘ÐÓÄþ´ÄCÍÉo—àÎéðq>Õw’²M…ù(øçRÔòXDj÷ŽÀ°^üښׇ‰™ÀVìûW¬ ¯—ZW?Îè–%ï½JÝO±©›Fµ¸§ä€ÅÀeiì²ú[¡Z.Xõï;N“\ÏʲB-;£¬€Ò oéꃷFH”BØDØâ[ ƒA ;ßwcŸMÀgRˆO^Xؕٗi¶—ccBÖ¦YELø3¸'¯Gw)¬É–—#øÐ*—zq»ö5)‚œ`¬W·|0†ß‰õÍÿ QUîóƼ5užø|3¤Z/TJÒHFâwŸ¡\zÈö˜”®ý–Ξ ¦JdXʬXÌÈ,_–oäMÕE>B Œt~¯EÅ x½ÐŠËSWIëÉs£uQ¸fǤ·®¯/Ò«E±è sö>• ­¯;8)nñù᪽ûŠÆ¯p1ã„{ÅŠØ•žhL?õ¹É‰?þÑÆ7öx2§›÷d½tÚ¬Š•y"ñŠ®OР­æð\ß].d}›SOÞ„‘ó -QNK¾ö¾‹€ŠÿìE¿tM¶ÉjÇ6›äyØ„h2ä -ŸÌhùc}»¡”O&â »OõÁ(nV+˜ºÔVñY/{Â* H£A†äå{+ËÿõøË‘Rù;•ïïcrî(hµKïû³Œ`:Ѹg*Ï„nä”ÕCìXƒHÔ=1½ÊqÄð›šZ-5 I]Ž+ŠV9štå!$ŽÂ¦73jÜqj»÷»çÿ®»³nbÖ µéÁÿQ/Ó‹½žøíŽØ׺YˆöÍúnoFÄ"<­„@§!_œ8ÙNÏ_wňÚ\Õ²ZN“EG9_,WbÊ2üÏ€½ÖÍOTóP@Íä^{W£Ôã¡ê¹;{È^3è³ Ö_¤½µ97âuzõŽ™†¿zæ3h¿?œòû!sz ó‡?MG#ÿCÉ¥§z5-°`ÞqP (91~k• |ã>Á¡ɤ£r„<[Be¤éUe cÞ£å̈£ ñôVî;µ£[5Lþ_æî$IsËô -bº7! Ç1]ø4Þj÷…ï\О™z¨JJI¥{JxŠ¨Ã~vhns]×ØFuùCBÁQ… ùu)¸³¼×WReûkêþÔ  n;–š™•8îÔ÷VWÅL‡Ï™§È^C®;ß®\Þ5 (b±‹õ×¥ìô×{ÂCƒ†²˜Ç½—`œÀ^šôÈ4®u89ÂÓûÛëw´5ÔAG$’<‡÷Dâ¢Þ_´øÆT7‰Ž¥³«Üm”ÃçrLö±Ž\}º BoL<šÅÌ Û‡tý|ïhŒÐ™ËÁ£‚±Å:Šr½½]ÓwήðX+Œà §R×ÅrnÝHKe-ºsH­oì~ÿcyéIóeJ;y‚䯲ZåQÏózjϸ»Î Oýñ©Áß…Ü?óíMÚãn×=k¥IÍZHÐy÷"Ì;À‘ùªÏÚÐôN‘¦Öfœ¦*oô+WôUBä[-ö[׃±i»ßKq:A+#Ðgà¹ÎBCæBò.R÷ª ­¯ÝuDc1N/cµØÙ;CdñòYgºØjáÌg&µÖÓŸÚâq!JF¸¹}K¼C(_z~|ïØdÌŒqÀ÷:ÓÁ:cŠ DKM2o†!±ˆ¯ðvî;M#ŠdÖ÷gTTFAEF·úm7?Ü04>µÏ®¤ -àÒåY1¯5Š<Àüö?ûߤ°Jñ,%QíšDBdr [;˜ôìÿªA—òTTßjÌÉÙ'"©ÿ]êºßÕ\ŽŸ¢h8ŸJ0çÂdh4Úz©$ª¸ÆÁ9‚˜®!t(PÏÖq- -¬{¶íõÌMê£|õBÈÃ2Œ¥ÒK&n×cƒ•!ÇNÁ‚Ôžn+ÙG òˆ‹ò[0¡Fº¶_— ™? ±[ºÑ’ê¤A¾öÿD^ à"×qŽÕè#<”4úâ‹2W*X/`z…„³ß‰ÚêÚÚ5[ Ñ››þ@ë¯"u´t¢å-¼GN¬GÈVâž ØÈï]G Ñ¥P»s´|c^ü{ìˆ$iBØ™• *]’5Ñ`\a«~Ù㮈)IÅE¡éŠ3nf_`¬¢¥ -[ÖXïŸa«;@"”ó‰~ô¦ï{ÖÆðDP*sYú4 |„–ø±:ø]â£Jñ&'4”ÆÐÏä$âL o€ÌÙbSQ{@_ç$ÝV„Çñ:½½öÞ›_ Š–µüT,ù@ë¢k+'»£ÍÕŠÇŸ -Ï®µ#ø¹Ö“³÷Ù .>p_Õ+Eä´>5¡=6D­ÂÖ/¾Ô*¸iY pÈžJWÀ’µ.24ÁÍÇ´S -Ý–žüøÃê}üY íì÷!Ùÿ¾ÜpþÊ ler·W9­ùMteG)‡/xÞíOCÎÞ¢…þ±vÑ[~rž­ÿÜFC - “Õ‹,X÷N=ò#”ñSè"×Ò(¶C #S²2}ˆ.'®¡çû3]ÓpÞïï ÿÿãæ¿&þòg_> -wû2¥¯ÊaüX<“ÈížqG !¹¬¥–XŒÖ]÷œÚô˜óàžïnª-Üü­ -K-ÇÅg®Ñxð÷ê¡X{e•u˜C]Œ>ÊÉK¶@¨×~Q-ÀOš¿•æÃdjq£?-¢:•$îGãyÆFJ…*ߨZoJ1êSbh, Œ/ч›ß…VHõ¬ü™{‹Š.åub [,F“Q.¦zª+q)ÒˆæS‹&…Èé#Ø F ·ý\\ j7[ôg?J¿ó®3 -—äAp€SƒÆ_CtwzúÝ6CÊ—ó ÷H!|í\'æzX$‘ó}t°µØ³ÄvøÈên~“3)Òï™ÇÔ½§hI©ûÜm0wNuîÖ´„ÏÈn¾ª”æ±ØT!¥Zq„¼tU´ä¬Ûë:S‚V¼Ÿø¬0¨GžÀºÊÂb.6\6‡c©’ëÙnJO heVû+0×DëÌHn'[‰XamTfM¿ÊÏkö; -N„{_m¥&{µ|>ÔøÅüKïsIâõ”û¶ð¥?4‚{R¼¿ž%ssEéx^z”-}Ãp”¶K|DX&>º Éâœ×’¿ëRñAüi/Kgn>÷«x·Ã7Rýøxïdh—FlÞÞ¥#­B0„–/¨ÉX¢‘²$Ñ3a2óP¹™½çÈ.ñ 2z)Ž¨»*¬–½ˆ?\mHß"Û'…À ¾¢›Ôæ® -™&)éåö‡àî×±o~ù•ÿÊPxú5­ºž^kg…f—r» æ tÖQþÄ1(³÷*lÕ›Öùª)@ƒC0`5OÈqwY__ªŸóÜ! &‡©ýƒBnѪ¿ß°0ìýΕ2DôN™kå -œW;ªz-.RÄQð‹á±Ñ‚8êmÕÁë^C”\àz‚0߫͘*vm1qE²k«÷¥J6’¶Æœ0 ctdÕ„ÕþTAÒåiréŇ¹»½âzì°wÚÌuèèÙØ!·Ò݈1 º`rت›ÇçCžëí±ïTŽygÝÁµ+ï+¸”»Yõž7¸ˆ*gtþ÷:šæ#³²{V³L¦ÅZnPüY‹¼F {ûÖó]Êt0µ/>Ç«žïÇwæõÈÒó½€íZÚÄJ ™ó9Óx…æl ä1–Œàc‹@± -| !éÝÃþ•LªVR›¼v,&º¥ ñk G>þ“ËÜBß‘ÕQYò“‚å3>Hc5LŸrŸÜb‹mÈ\e”ĨÙ\k2õë‰QSY>fÐL2çäØ÷P?@ñ y.d±cÁs÷€~Ÿjîç ¼Ñ^Æ;¢Ç`þ³>~n¸pì - KOg‘¥ˆåðÔ¢g#¥Ž£êõQZ˜ÇI [¹†VPXø¨¶u:ߺÅýíÍrO»`ªqSGÿFF>V—òw«±”ò¹÷ìþŒ©­¬e=ŸA^žörîœëõY Ô‡[±aç KÒµ©Ò u»©öŒ…Š!ÉþÕñPÖk1†c½Šc‘"m1Dt»K§znbM­”=Hf´ïž6&ÅjÇi+^$tæIß$…Ýëã뙾#²ÛéÈ R)Ò¯ªJ3˜)%ÐYäaÍáùÒ9“¦àµÿfÿ5Ã?~òñWCl|ë/9¯¾Úë¼¢Ï'»mçÒ9"¯Ë™íÞî’ÇH¨Ž,Q`CdÖ€ÆÑxáBÛgžr±ƒX[;3ˆÌ²ü &º0»£þª.–§äý̼dÆ…ùrl}Ý̉ ÚmV‰é¹Dü:¿$Õï­ÖGV)oé€&X­•ϼCýo‘ü¥œŽ>·'X]›Ãi@ü¬ž›q/=è w“ ñ3®yç»{²‰æ@5¨ m±^ _.5ÞíUKÚh’x?ïyµ!š@I•²íìÄij»—M½ „ŠE×¥oX -¼¿­ŸmU? -±r¥!^àÔÖn&+ˆ¾ÙÏÍŒ’îò—9â¾> -_½à]ôêC<…ÿÏTŸ1æÒé:~„¶û˜çùÖ œ´7XÛ(…§Ý_¢Ç¾)ïÄS!]<ùÁ^yïï‡ÑW°"rlæ!'%w)§Eû?¥ÀjC І!™“œ$ã¦êEVz«fËj|ú;#Çð"9¼¤SQG#­2¨¼"E¦ÁÜií¸ áÑ7ÉNìñ4µvm¯0I ¤\Á>(b .“!‡Ùvdž æ“‚CwÄz{ûÙ‡P÷98‘(…!Øæ÷ˆìÅQ·|òÎrf²G  -¯ngEaÒ×Ä]w:2÷ò›+þ¸å=KÎýòŽ8½i‘¹¡ï@Âí"Waò2©£Ü‹{½/yò§j\)°ý³ê[p)buúú8yó³ø.É{¯®ÎÜainuncaNÌÏÕSS4Ðq>K)'&·WŒFŸÕW°çÚ!»ÅP§òœk“Är丨å]Zô(ØÖßÕIÀ¿¼ÕKŽ"~°Þ¸‰Û÷=W«Cm2TŠÕÎ~¢ú9}¡ž÷G@޹°¥g„za_Ïõµþè°âDÌÍ)²ßª3@°®.4}–¿ûµ»Ößß´<²Þþæ]s2&eK“f‚g9eWì@k¢~[Mš[ƒ6ýr_yÀæä‡O±[ãøEUßÅ+Ç6Ç¿ç~‘ú™Žh–-äü]`2Æo‹'”þ]ÏEvœ©áFäH%³÷öwHo©žÆA(FýöìebÁiÚ¿ÄóAKçA€I„,ý.ÞñÄ1C׊i÷"šJÁ)‹˜—áýÄôOa^âYÌtuž«n!¡e ÚKd£k7w›E Sëi¹P[ñľõwæÏnxÞH+­]¾‡?TúâµËŸœ#‰<® 5gž„~Å·½°÷èI¡œÜ¹1ã]ÜòdBÍ…3ï»â§µZËÌmœÏꃩ P=™9龺ØÊË)óÄû¶D¥•d¶Êhˆ‹pù„L_jÃÖhÈVóQ˜bäýAÉGa«¯ƒ€€¥:¶œj -J³LËuQ1/ø­Pzs(ö«z¿Þùœ¢I@¼:ºÚaoH…c”é¢B)¼B6¾€ksi®F·ðšàLÖçIÉá÷/þI±v^ -LØð±~¨JÓóûw=˜6þ†)͘ÿÃ"ÖÐ,­EÜ‘²1·¨a;ÎTŽž8†z`ÌÁ3Gé?l”²½åð'ÆèºX1¥÷uÿ b§ÜñÜò {‡­w™(Cΰ.…ù,“Zž¥:° -1î›®JV/<ÈÄP›¼Sð?í® }‰ñ‘“Ô0Òéo¾Õˆ™ êÃh2WðuŤNEl´‹Aí£Ä¦ƒÄÿ¦"–Ëâ\5‹Á - eçüY³`Fü¡âhµ©7Ÿ¤Ñ,8nÆ^=¶…U;ßB‘:Úº›’~§€}*ü/¸¾3Ë„˜NÃLý±Eß}†#„Ý`$/Ê–†žuE+¸"TVæÜ‘ÏIv‰>W‘vWNúÅ \Ç#b|Eú¹9€*.3'8’Oÿò'Ûßu¿þKVF¼ÿxE9¸#ö¿{‰*²ké±BV]Ë8¾i’<ëd6ì¾-Äv@pÀø£…Ûpýˆ>Õ™íõ›¨Ä¹?oÕ„"iI«Œ±Rn§¶Ô -’ÍõÚ¡¤›cy |LiÖSKÇ~>W4g$ã8J¹„ñÔ¾fÐ~s«äÉöíŽD؉Q8®}ÕØ<͵Á‰m. Üq®žŒ€x¬Ñ¤4HÄGó™=6û»$ Û“GBǯA~¸ì~FÕ’~:@mxÎ{³Ò2ŸR7­àÿük õZȦ`tdÞ•hu›ˆÓm?âà%(žGšon®’dodH¹&¡ö9P½ÝlÚ¸œÆÖ¥’npKwì!¥EGƒŠAo dZ•UõÏùQˆ8› `Z¤ ÿ¨¶ª§rþ¨W7žØ—Š%a«T^r[Ê…%ŒN…n–Ê*ö‰…õAUä­sð©·‰rJnTL©#…Ýób’ûUzù=vÆ(¡T(‹HBMžóFx¸(4¿WZ¿G$‰¡C10Ž˜Gù‚²4É0&+„gÇv¾J~xÍ9²†H¡š´f4°€¨Ø¥†þ€îÿ3sÅÑuü|æž)0¬‘Š6Hç”-_ôï-µ#"žLÄà -™CxPÌÆÎêvp D5ñÆ|fEÏ5ÏþÊÞÐ…¤ q¤17“eÓ9Sf ±uj«W s=:Lÿ®@ ¢í&émÈŽ!Áo2­V¡qT23Je€åOƒÝÑÐí%U(î¯[¾©.€€XOÂ"JGåêUÜ\<•.Þˆp‘B‘ⶮñjÔgƒÏUFò¸ bN¹i~[]YúãQŠ`eà -GˆÔžµ-§\ÎTu½?÷:"Éy–ëˆÎ´ji<} -üX"Ýãgö‰™Òרé\¾¨h@„­+=G(äÙ|M -ò@.Õõ6ˆÖœ"k1É­†Äîø™Û­ôŸÒx‰ÆÇSòì30g¡>¿žêkE† ¾ {m…V‘ù–çŠZ¢6Ž‡âñ¶§WoŒ-™9lGÛþ{ùø²×¯þÛ£*¦e::pEÛ¨‰Á!,"â§Î¬¡°Ss¥G– Õ… ̳„Š3:º^h’Û‹mé7æñ¿j5Î êzƒ¨V…"í8£ÉPjT4ÃB=i¿>ï5~ù2de#Ô¯Gt÷*Š^à9¯(_ÿ5uAÉ=¯2?:6Ïøh¤gŸ˜Q·®íÈT¹äÆÒ ?RLœšyÞJyOJGüÕjá B*Û±{ÜBé¤ –½p‘x Õ_§H»>k©éýü´{l1òÐû=×_c»zy9¶°1™V–mé…-1ÿëÓ=²;”)ªî¢£Jµ|„I¾žjž¨>]QèSœ C…h6J¢¯@{­?[´ãK¸uØP+5 ە敾d_†pxh‰˜£ÂkJgaÜëkÒK‰yýƒ—߉3‘è¾F̱–È í“óL8Ô˜K/t+z%S~.$‹,;ž¥¤˜SŠ]9¯gÞpÇ÷rk«l/`¢çÿÀ|còý_å·cµ¡;E°|MÔõD[‡|Îι!ÎQ„<«àÅe8@ üc~TMK³› Ny–@e=8çÙ¹}pÒV'©ºݹi̵~ǵŕ{îZ9EØ®Wrãu®5ŠKtdØf°bLïâÅ8FUñšSTÁAªìxÆ‘Jß<ó\‰%)ä„ïÝô¥5B¹¹ëÍÕBWÿŒ?ÀY?"+l,¹·T}ŽØK¼µÂ¨»E=«¾[Œc{D¥8ÈÏ m.òð÷ ­/ÐWÒyÑ6#­äÎ7§Ðêˆ4Ÿ\?U>ŽC¶Æ;ŒZþùk E‚i®¼ãçÒ†`¹?þë?ZÚl̪ŒW“Ä)ßH25*9D´&×be'xÈïëR‰%{ü—+ah’ÀK#‘"S-ÍSG9æVZT™ qXLp²X‘tµDGéGý/~ý—ØZ~д€GÙ;2wfš ^zT?j(³Ì ¬k…Î=]H¶Cž¯+£$1Å\uÙö`Vùªç* H¼Œm5XíL· Qjþîô)Óq6H¥s Ù¬€Pr)u?‚¤3r=‹ùõ'¡žbKå•üß˸ÃÔ\wפÏ´ˆ!HváIåš:pÌžd6U tË{¤£z¿NXÐd?ÌÄõr(BøšÏêlèœ?-pÐV¯˜@ yŽq¯~¬ši;eã-7}Œ#\ò¨ý¶ ½¼±ÉkŠ{ÞŸyЛؖѴÃAqõéKíœæî?=­-n#W<@Ò­1gâ%8w‡Ä',ïð¯öòÏ´Hˆ"iøT_ξçÀ/?ó_™Qœ"èÿ’ªªAêÚjkPÏ™™t„V½öŠwÁñÚð)ÙËßhŒý¾ˆ*ñ½¿Ärm•3,ñլЪà#É„Ê-à s«4Lïf|¾"®#¦eŠG˜gÛ2‘EòÙ«åÈÑS rf®ÇQÖ¸=|5Zlðbµd‰ܽ€ ž`¾b?÷k€:‚Õ<Çj[þ͵G/šÊÄåf_«ïaqÔ?ˆçvvÂg+ëšbÈEð;¶®ÿ =Æò…’ŸÌn†/>×Äç‹ÿײ §½»¡§d‡¢?p‡%9¿[ý…ŸÄ8ظï+ -Sé4:;O°aßùK¶ï/×ê_‰ÞmŽHrÛÛXª>¢íJàöpâW(î}mu!DmòkùÆþ3ßíPÛкêjyãðG`I¬z·•¤'Ó«©WiÀ⼨âæ‡sPyDBX ÒE"œ -¸Ê3í%â<4q Á¢h†É¹é•|kË P6·IÀ\L ‰³V×æAF*àgö–cîNx:ó±ªÉpý!%õõ_1’Ì/H˜˜Ží Ο³²Œ¹œE6ôc´H,Ï  ÕÂ*úÁ3™ë#z†oœ¥” Ãh•×59ñhiX÷#+æN å_\`§û±<1D(Wù;ïß ´Šr e]=®µy³‰e5ûèì¥ëGPˆmIGÿQ°þ^“‡Äň¸LÚøÿx8TMQã’TO†$ôÂZ]Š2pºA©-DUÈÛÕÔ¹kýS@!–âs7®y/¤¶ÒªŽ "åâk"ÜÑx;Õ:ÁG´Ø3ãëg¨‡‘ xª1öD·Gî°×Íè1á¿Û¹k‡8?OzÖ*ë^c¹\<¤rŽ'ÚÀÜ?$Né—sŒL_ŒåXC(} Ý×ë;‰/ò9¿¹þŠWµ>ÓïH1¦Sí¼è4ÀÅÕÈ<-É«_ÁóÄÚ0ó‚²mÿOÖÖïkäø„ÐVÐàÞùDÞÜA¦`vk eà±âóÔ.»±±T=hÄÉ€„èfBô¹²»]ê/ûê0Ósz*ufêÇÌ︌þvIœr'TSJõe¥ýŽ´ìy”Õ¥3x -æ¨Íšâß|?àÕE!ƨÐwçø>=»²ü³ã¦ïÃà÷lñA+àÃIäøpÆ©Ì PªVÂþÞÈÇb3¬!©ŠP™)àƒ“C*=ŸGie#1æ–ï×DCæЦˆb¤žaÂ:9YE¡ªÎªxß3Âvã«»L8X<чül=ÒÀª¤9$äà>¥ç{«†8¼Ö@y«Õ8VU|"ñùóP{‚áÁñ¬!Zšµª¸àB}Rç¬#þŠ’Òѳçt ƒrHx‰&âSÓɸ@&}|cö½‹ÇqÛã >4P:Ë®gÕì8j ˆ¸r¥sFÜY©3½j§üïÜãÚçg^ç_O©¿2è!/„.¿m+Ð8A¨NX”†ðI+èï<¿Í\ç%áY«RÌånãç*‘)W×* &C«D5¤.u–G´’·¯èÙñy²/#´ãùõwo¨ϵ¿ ÅžÁ×5¨•DYãÝ{.äÁh+É^—’4/™•ë{k2U"Žê^Aš‹w¸˜xTϵPPËœ+"ÜN§ÛA<ŽTÍó @"½ÿÊ<ûÊ“=˜°L‘m[²¥P1(¦¿0¼¯Iü©$‚¼¼˜Ü×L õôUc”©à†©ÇW{s`‚åµæí~€j)ÇsDÝ©}‘ɬ£{ÃiÀè,ÔáXýÅ­ŽrY'‘ð:Ž…šŠ6ÁU$>ÐÁZûjñw ÒX¸cPr×ý\ïqMÉïŒofaÙ„¸Úá¾–ðR aÈaGß2íššJ£aÛ^ AŒ‡Ö8pá-#h±;Ø:ÈV+5·‚bðFë鵞k·Cwx·.äŽÈeÍ¡ß:ƒˆÚ¶îžB¯=Öë+@è¡O—ÿrZü›ìS°'L G’‡uŸg$|=mNó>2Ü•»Â±w>çí«ÄÔ -G_#K¥)¶-õ¡‘óл¯ýç‡>b^A[€×<| ^D'€re°0 ü˜•žýÄÔèÖ*-£Ây…é&"?4D—B÷Jš\êH5MŒÖÖÁ6çöÜþYH&‡Q¶GmQžIøȃ?¢Ê 1¥~bÕyÅØ¢·’0¶rûh ?u¯äuãž4·àË@Ü¢u(j:îJ^If·t:¯ññoµŠnHèOÏ7åMÈ^[ Kªn»fL뾪ì6÷&¤¶û`‚ÿ^Q;>/»ý\õ}°ïy|rI‘>¼U÷àŽj»æŸ Tº­Þ,‡öV„bDvµ¯j¶Ú«V…â@>àÙÕKÚÂïàzíBŸ¶õdós§¢³&n#øºZ.5 ‰dÇÑG­gr àÈ3YÌÈAmò ã`´ 1)'µî5&¦TôoϵsÀ6c‡Gô5Cž¤Q¼¤ÀpÊqûñþ -*ŒéîÅ„_¦!® îGQÅ j1h€zÈô£k„Òt­^1dÌä|­+s¿ñóãŽSN`dì]Rð pwF†ö±Ïz¦“Ú\WX˾$y’A.Y*NVdnž|ìË_î7ì0UYŸÏ¾¬'u™Dj§uò>ꜤB¼î…¯¶bêi}í–h„ºž•qªhôZŽM 6ù‹oP‡=ߧþñxÃ;D\ 6êð;âè¬Â:—ÊY'’dòÎIüVJ¯zÐöï¡EéjUþýäÐê‚KÞÌrçV_¨¦ ‡Î÷Ró5ÅôíYedÌ9Ý$‘ö·ö˜Âj8 ¡ª¶à¹c"3ˆ &þ/é“-Y3§’øµBÒé|P ÏÒ>¹þ¨j|ý—¹ r1‚3åÝ ¨ã~4¬1w”#‘!¦E(â¬ÆéÝqÇ¢2fÅ=g)UÞ9D§h†·jï2,Ú¾ãÇhv:˜R÷# ø„ü˜PðSŠÏpT½áÖ”4çÿÏ3’œaÞÅ8œ“ûâ¢Ëÿñ‡Z/¿‚ *Üà½x½JcÕ!5ËxúÌY•pr[;Ú­,XýÙ?‚ЯÿâåKN‰Îñ¾B´õcÊía'+»ü*)ä¾#;‚„iŽD/#÷xÖåjp5$âQ\iôüsþÜ|(ͤBûªcY,àH+ýs)ŠñPZü©¿ÃÕN½•‡û~ŃŸ/ãù3¸Ö:ZN[<ÍøÁ<æYw¥/ßß«…åÉM¢ñS k^dD°ÿŠX¶?4%f¾¬{>3±ÈµŒƒ™ú£ºþõ_´c¡ÞÈMF&€†øDD~ñàXœv,F†±¦R—!sgÌ$·6êÐ>œñƒÈ¯dlQž é oåÎ`/€ÁKxǤëA­=üÓYRÍ}nG"#ð 92¶f¡,ð£~Ç”gª¶Áû©kjþDZ´à!­î°]¥K¹’â§àŠð±Œs¸¶ƒ§ (ò;vkõ½£˜´$ˆâQ»…edÜ“î ØD½³0Šz¤K,ÑÎŽŠ¸’@çÑ3Ô!@žïj 9ãØújËŠ–ã-}…ËœK$Ezv˜QÆͬß[a|秺®BìËÁh÷q–ƒì{€™3~:b.—MÊá$c8`Ç°ËMÀ¢ô´—£wG³ïÙ"`39x [•Æs¥=ҬɃ»TñšÁõò€ý÷O@¢_N¿ŒoZòCZZK¼SFYˆ<¬%"~Ï=îÞ"ªs‡)Zþœ`flÐt¢Îê_ò#çL7æ•3køQ­LèXñŽàÏSõ’,šÉÎÄ‚&]懖ë|‰! ÌwºC$Ø3eö6GÁ’èö>¿ï~†¢Šþ°•'ÑPÀ€˜CR 'Óý1ÕýKäˆéZOO$(#Ts”ÀÛν[¨?xÌ!òEz‹Ñ}=Lড়m[¾o“¿=ºðgédÝÖD/ž0‚fgnÖy/¥½ x¬ ÷Ÿéßêfå+©ÅGËBˆn´Ï À5gGk95kºPh‡šñ¢!OúFnºTHU‚Lð0ìƒ{évòà=Ë83ªÀi–ùÌÞ 7‡¶-Y ÓÎ(Ø".ÔøÌÄ#¬ç³TSèJÛ@7âB  ,U )è*£3ÒêN®l¹ÛéÍ*®“DY-X}·ßÕkšˆšƒl¨0 d T¥_K|ÜsæS¦nD—…+Ïr•Tw!E÷7T]&ÿ¬A|-S«-Ña¥+Æâ% ÔŒßÌSX+ªœËqEµŠ%âR±&Iñ3¾e H!Á]ƒG(±1'JŽvýèÀE^÷tÂ1d;¿¡Ûeµ$÷¶dqIáš{dfYösR¤ÃGÔ‘Kxà À«æÝ/U²ë[ kð«UV¤AxºMɵÐÍמ¶³FóÛzfCrd¤TdÉ+FÞÑ  ÷ˆ—`ïQz}eûGUÌ–Õ<‚˜R_ŽÒÝG4i†^]1_Äì<Ièœç”Dz®‹¯€!¨˜6º£Nä>‚Ì09—j &œ¿Ï’‚Ü£ò~[O¶ n/õ˜`æè3žŠR*·s^ÕS]ñ¨¸¢Ï<‡lÎ ݶÀê2z¤xZ%VmÎ> ?Œ–éL{­$bÎ=âÕz‘sfžafžð:åÍwÖ r“²þž(/ãÀ…劢 9ìÁ‰ùOÞR„Ö(E}ˆ.WòG÷º—É¥~x?–‰…²êjÅ»l™¼ˆ¡¯;Ñ#k‘¢[¢؃wzOIÇ|åxÏ¢aÀ´ÜOŠÇ±J¢©¹Ÿ3Ö!ÈÀûëØvæSO€/î¤ýpÆæzetÈVšu ÷ý[‡I©ôµ'ÆGÔ,ªSù+ce눧2| ‘Œ<ñæœ|î%XÕx¨^¿¶ãïûõ_Þ ÿ3ò›ÅÆ ­­˜áÔ´Ö2Ûk=¥IPÿ„ M]… -Ö}-‚é;ã²ñËçkP4ZÄãåóÍ|êÛ~ß‹ƒJû ¼’fií,¶÷K6Öa¦~ÀȎب÷—*AˆêOf¨tÙµÖâ<ü qÏ=yñç†ê„¢z6·y/ä9‘û^Ý3­N¤C¥ü[wïøÖ_’T°¶sÖ"}iNhÄ0Š×¶G@hn-õpä£+m”`×ZÞù%vÎ=R<Ø‘Â2žÉµgˆ^`l ®¾ˆ+úËZ9ð1+&ž›þ«“E¤ae¡ÃWLµCûºò܈Êq…Í_<ÓzõèËø¡Ç³ï—ù3HíêÎ'ô¹TËœö!lqu²-ë>Ÿñ¶ûDÁøË¿„-õ¿‹P¥"wĽíŒ\V‘{l(aIž˜iѦÑû>`Ø®0Î^®ÔÉ·å!‚ør¥®Ø2œ¼ä;{û(Íi~/,Þ<ò5õvM=ú 5Z/¡Áü¿"/^9u±uC^„D?â1–€VôuåÃÏŽÝgä `a—RWTÂ`@œŽo°²ÂšH¿7uSàó—¹_ìLZž¨ª~òŒˆ?¾ß×QûΩŸg^é0¯¨_lá¿l_l7´Ç[û&øÀ­A'æ -킾pÜ÷ …tøFT ŽƒAíÚû’(duL¯çƒëHùp.T"á±øšîW…Gówd:‚ÇÑ+u˜=R©…Aý_ç#d)IÐã+âã«öŸÿá~(GÈ_çÿÝoa;4º‘ªíüŸ wÁÑ>wÝrS^:Ëèdž œ3”.}‹…ôœùbÁŽÙØs)ÕBB4„"°çKó%K(‘߉k¢cP¢X5"înŠ4—ò˜ó çáÝJ«„VÅÏå#FMsåœ*[/Ò»­n]ºv\ñçöþ=#Û|°Âù3UžCìXOúEv•EE¿RR>;Ñ·™C¯X¯Êà%4Ì¥ÉbñÉU¶á"¾¿¢ž°UášïóK]¨‹°pä“žÃ2o1wYžrÐù`û–-u1 |s´ Xäè)=.÷ø‡ùÚ‘ÍÛ63‡.´ڈôeý5-eÜóQ=ÇyUJT -GÁî,Ú ðxoþŠt‹¯øBè*þ(˜‰PŽb6×Òü˜m«âfÉg qB—u¥Hùƒyõ]¬^QT1‚ËÌÓáÊm‡E˽¤?5³ÆÜIÑ|盲=f~Rmšj([£z—Ï4' ~`gǵ½¡ƒ­ÎI?›Æµ³g…ÑÖÿƒ¿þMí뿈@£`‰—íŒM—e„t‰©r -3M¾ÖÕg$|¯Íuî:Òz 32Ü’~“×`­;7,¢hûñ?ÕwñÛ•“ý“ê{–“||ζ§¼÷æ¦ÇüÒ¾ìkYöÀÊírmíVÒümý¼z¶ˆ#æc g½5¼¦©0"ˆx„Ñ¢Ø2gûñ:•)'J Ý«áeVÎeì DuHCìrs{¥ŸÁ}ngdø?ÒÂíQEj(u©¦a«‘rìýYvÐ=êž2‹Ö ÆðÐNž)LÙ _{pË;ð‰óúFÀY: ÷ê«ÊK|$V—¤·ô9—4¹=‰°ù•¨ìŠ‘«»ñôŽŽq¯†ÌüÈÃ=‡v ¹`öÐ׉;ÙŠ„ÍU‘'ù)ÖÒ8ðFÝ ·y¨míyÕÚF+lºëÙc|Q²Ý«C™N“ *ý:è¯å)uÐð|Ìud„p2Ô¸Ÿc=x~ø-ŠP_Ïó7WD)å yÀeòßMFˆêExñ^…wæ-Ñn@ÞŸ¯¾üK°3gpo!„”-¯ŠœüˆÎÅv&—>wVÔ¥³^~¿ó¿³Ú°ã(&¯W«Ü¨jÆnýç*ii3Ÿç§«ÄîFÛ‰jÞšÂ`‰!RP¿²„-‰Gô@*±ˆKÏŒ- ©Ÿæ¥ò]¸Ü¨I= BÅG›Ö?¢ËÑ—¡×™äŠ€t«cà[ðf>ÎõLÔ˜ô^ó†§Dëc‹­÷¨»‘©ç$ßΊw ̦87-ÿ%L9I>GÇ}/úÞ—µÒ€ÚéÁvh*ãIfßîåEÝüÁ}h1þÎQê£ì¸Ðq`dhûŠ‘\•B"ìÎQwsÆç -}&OÚga‘b—êþíT.]§œÂ"_®Rh {ž‰vHvÑÖÝëkãу¨ˆ£tÍZR ÛÎ+§“×ú鲤ÖÎò™¨Ñºc.4À½JÞ@Qúpô…r3ódÞRä -„ò¦A&‰ƒßΚ{Ä‘éAÏÂ'çú#É<¢šÚ¨:Tš¦à¾ªÙ—Òˆ´ˆ¬—!{BÌ*Q*TGK€K%öŸšÞÔûû nW•~Ê4¿7ûÌP݃+õ«*¿Wl ç´9ת›‰ÉRebÁa[Õ,y£àl_Œ~ÉŒúP±±Î ·~GŸ9pâÚÓTz·j ùb£xK\N¹Êi+L%¤(¸ÇŠøWbQc‚ñ܃÷:Š;ÀíùIS¾úýõ_lhÔ3uQãŒ÷€»¤ÅBa2ûLó.4T ]þWN])ôvä ªå -eÄìo*‡ªcÒ;/è¢[ŽÇŠ³;ÔT”E׆†KªS„áÞ–Ì-c9B ×g¨NB˜æO㥠µ×ñr¼™ÁIÛG˜þ?™N<F.5ç7,UCÝ.ØÎÌ.õûèE¢8ö#¿øv¤˜iHÕ©§)¹Ž"¤9c–œ¿ªœÆM¨æZéY¾¹‚•(÷z<º¨²Übƒ,g†nô⧽©"¿ÛBk—¡sO”7>ƒ%äC²ürnàÙeª6^–œz°e/ÄÎËBêa5-ݱPžîLòì/'Å¿Àñ‡É_ÔÉ^ú`÷OÀ Ýõ6ÂÔ=ÂʽX| ‡¯@òopA›!óO‹~5™îÄc _ï¾!T¾F¿»×±ÅËì‰m‰#c[‰%6…oÓy~ù UÖQŒýµEÚjÅôWÉ‚üL5gÜ™5ßúµ| Åž7ÁÆ-á;ßä­†(Ï?Ñ×·=D‡ ²zlu)²A¬¡±¿§>úçg9/ëã!ª¥Îis°SíIÍ—™ª -Þ°έõ{§~öB0(º ¼¡‰P•B8‰$C6ǮҎ›êjÈyóÀ’¿“Ní5hï±Ù"E”éÅ"Q:û’ÝU¡ŽY‚=¼f”<.Ø„wBs¿Þþ€#žÝBÑYU¢>[´YLÜ™=‘UbXw-ÉŦLñùãß\^¶:üV“‚xfëÛ~”,6Ñ¥"9#'Þ!QxCš3pÆg?ÞæJ0é -iÎè² tÝ%~×㸥1Ÿñ<¼–€íÊ.…“Ì|g…äš‹ -'ò*lH4(ÿÀS6Ì«(5ô§â+2hßÙBŸÚá?>S{ùú/ßßZ¶±š|~ -s¢L¹W¤Éƒ²ýèm ±"y¡Q¿»”Ûhبt·"Fqm¢CxYon9ÖÞ'ò…;g¡UÞhR·½MIJPä ‰¥"ý”–âÎ!Ó/B™}ò‰ Bv둛Чe¾Ë§"øŸ”w¹õ’=×WV D$]êN„Œ#³j¨ÈªµÉIÃÔVȵè:¡+äÀc†ØY¹SoÞ¶(L»Ë%íû;T ÛŠ=V;f?èïkg–)(DMP"Ÿ•f‡ª¿e¾ü!™ñõ_Òòþò•ü•W‚­ Øö"Ò¥¼J4BûùÎWò"ÝZ± ·O=H×o¿ÒH­Ío»kãü«Ï¡CüФLu}©Æïñ'pËü7ÏäaE¶#à,¾fõZu ¨”ëÊ­ïŠTrÝ"ßdkÄ–Ô^«1Nûš(æ5ªÏ WePãtµÅ fÇÇà \µ~.Ñz¶íg­ØÝF®TµvcÕtO `Nß;±Ž  -¯±fïŒgÏ 5å±ïÌÞÿº -¼Øæ“„zÒ’%^0éâj¥uýŒ¿bx‡°«h¤×wé)’ÁŠAÇ,È áÅÏ ç·ž3ü¸Êý}K@(¡w%¡ßE½”ÅYÚSPçRwÇpF -ü1áÁU¤8>¨ g i!]hð}ì%B³ô<Ê8ʘ—Æï `E®ó£„yíÃ9ìß44‚é¼_DÈDn·Û2ââJµéÞbi›DðôÚUR§kTõ‘¡ÎQÎœÚÃR¯|ÆP´2ÅÊ\hJ+×jiC_Ššü •TO/k‹ÉÔ’>c„¨ÏåϧÒIö>$Ä¡‹z0m‚‰±H9zN‰úüvžZÓSV‡8„p¦p@¯JL µÕ#Šk3¸#S·DöFF—@ªuÙ òÛlFd$r6óÃò·!ì—Ç|Ó†ÜQˆ‘ÌÀ×qÐ;Ï`„V)eéœÆ4…lº®"~T¹É•¸V(HÓËøG·XL‘3˜â)y†¹Ú¥Ÿ6‰Q¶É§a‘Kã˜ß·Ó}ò5ç»ç{›·ý=—ÒìÇ¢(¶ü{$§òZ4 5ÐlÅ﹇PÛ~¤2щÔê¾ólðŽñ·y@íxÿXh5Œ¨9¢Ñ蜲-×+Bè¿v‡W'˜CÔ”îÖûz&åÔ–4Kfر/笂®'ÜŸw£|M4§Åi¾”˜`“®ÓúÌŠ¢—ñ¥ª3‡@+ºr®*i ʹá]YcsŒ¦Ä<áKòŒT‚ZÍDxæ,’¯ ÛC K’cfRV½3ûæeq1x tºæySŠR¶#¿£Ò} -šÛù®P3hy>AFÍLèUjEù”‚+è°¿‚§=Šci&tQ…жaýwäFZš°ÆOÔ»ô(€][‚·9„ðŸ<‡ýã -´5J)W%ù]ûú±èNE3QØäéÕÅ£kÑì-:Þסˆ«·¬øÚ•iF ‡õy‹?t[íY±©œÎ ŠFÞßʽär}Ÿón(€ÎAvµøx ïúüñ;HöLWz^ÍÍðt©Ò¼™†ÏîÀß ^ׯ(½pLˆ¢ñ¼Ô¹¥OÚsÑcTÂòù¸‚yŸ·Áx-Æ’s“ú‘j­Bü ËçˆmÏi?c»G¨gH°b|3·(NÌÃY‹"àñ1è–l@Ù¯—Áº)gÎÜwr™k‘›Cæ©v2î×›ùÜý¯8j»ÔcÐP³ÕjqbbÑmùXóå`Ö^‚(Ðäòycd«õùX‡Ï]¬añ¨îgÄI!F¤iºR·©¼ˆ²²¤Ÿ£ò.há©®ÈË>Ú3º.][ˆÛª× CÈÂóˆh¡Ot=‹[iÒRTA„Œw4ãp:Ø œú&yšgÔ<œ9àÙÎá}Å©˜¬äŽ ™ yq‹Qƒº>­ðìó%jÖ Xˆ2(”.>„vGEç¦ÁújèxnÐ8D{ÙÁážsNkÔ¬«äùeÏçõŠÆ3_zO–æÌ+®+^!·†ÙÖŸt’Ndöž¹¡sb×KÖ®(·X<{‰\¢@85b“ënmÐ`ጣ>®ê#î;SmŸ‘äóPbšó5OÔ$R€[à5óø½“ó±‹ëÝqrO:Bg¾wñÃÄê¾| ä¯-í<!ï Û¶ œ,\f8¨ÌjhßU˜ù£ÕËS–MyþìÕ~™Û´MI¡[Í.i =y¢%<çÔ( W⇠‰HtäUƒ=È eÍÝ:÷YPQ¶Asi¡adÍg¤iKA0¿sV:N±Õ9_±]ªfî]ù4pÄçêcÞåG©ŒõQÕîd-U=5óx´Ì¥´p$gTiqØ#ŽìFeó —¦3èƒ,CÈikJ<œêúa)£â12~ s´Ø,˜»€ú×™l$-:vf'¾1›+Ò@a:øÉ¥ÕpÉîlñTyŽ‚€XÊ"X0óù;¹Ç•­¯]«m}ó:F ‡‘Äûë£lû¼MtÒP·¤¶ÖÆwÝQXÝ¢æeŒnnÉ­k}kGFLj[C|¥Û™Ûb»°fwPŸG@-ýJ÷–§ÙØ"@m7ßâk»Åå®ú® Ûj™ŽWDîX+*…nKu€d"ÕÁ²…vܨöìùØÈ+ókni52¿2Gºîb`r@mé`·èÝP¼B¸¾3Aió;ßG ·l}(Ù×™ð21‰8€}s¯A€M2 ¾ÅñGL’výÂ=-wo ÿ\#Á9Ëå ]f‹:¼ ]Žœ¼uhÃ橪"Ey5QËMûÈÇíîg܉ú„wsƪ®ægNR°Cã"¨k±#Õú®²[§”ÚÁ3 ˜3ít/ÇYxhMßi<ƒoÎ`ò,¸è‡Wüˆ¡ùÍ…R_ ¾PpÕ‰AÇN"˶¤œ™ß¥0GS.5?çnL‰*n(sQ®O‘Cã-“‚9¾1 MCÀ¯3ú’j‡fhÆ‘ dæ± -Ç2Wk­;—j?¢nÑEשak¾´4áæÑ *™'d0HOÕö:æï'ÿNøéc~±”Hqà{, ØtNz0ôäóDéBq^ ã.bݹ(ÎLu'öœ# ˆ+´~ ‘CúÃÌ,Ð*Oëãªð'¯øò.©vu;º^, Â×=±SV4ö3DŽg|Â{%1Ñ$¸"$Í}LêÒ÷ÀoÈ1\öa‰å¡_÷‡ S¡H¾U:P˜{™r±Ð£{FÔ1Ø©#&â”Ø=×Lq“†@Vx¸ž!l€2K– `¥RªH’6&_+—ºXc  -Ø´Îzì?,@`š$m:Ùª=8zCt=H(ÌIQèé­lÑJ3H¢ÐLúÊn`‘¼xÏhõ"e'2^ô¢&±ƒ@©ˆÆÞVþò -: £†d Ü°Áh(K¸9ÖXTŠ²¹taNnä3ºE%«Š “Mq±†4EO„ý~õ {ü¨ç|’AdG­4Çmô Û{ò¯ é¹-(›#B´)ó?Óì5Dõ0$ºÓoVÔBQ~.Z•á\ôsC€ÞÁjÎJö’m-¦;=Íd ÿ¹>…#ÎÆóŸÜK·¨}13 ¿“K)Ó€©Dvv j‚YÑšñs½aE Ñ“êÖ-zÙ².ZŒ7^pÁH°!óS© ûÄ­D Õefˆüdº+b„ŒÑ5PËÄpQM *wV‰ïDoÓ›Cæ' ž_*MÐnW¸„ »¤q‹ÅÀ“Žp²pX&M34’ùßjØsÚw9“&ݯžÙÐ/ü8aåoÂ<£1iïÜì6Á?€b^Ÿè´§ZÁësTñÁ·ä™ÄñwþÌZ NRJŒïD@›ÿãT¤¹X`D>&EäÔêôÞà•OÍÖSîµ`¢ü0÷QvÑZd e;Î5bF• ‰;Ë+& Ÿ’^5ÀìÜ ÿ”g‹™LKììJWÔ´Ñ>ÒHGtnÝŒ*¡vU:Ž)î`ˆËãûÈÿ`Wôq«À¦g¡`9K.¥œëº°Œy}—y½) áòÍ ï üóÄÁ6Ä)„ÎçL´|Ûéä^ZÙü>É+âsvÄ€ê”<¥9t‹<ºpÕ‰í¼Ã =ãÆ/çoÅö!I·t(N± -êv‘T6YΩóE;êŒæ­´¥î¾W5ÏT<3mÔBuy¦|\Slš¡è¡8±…Ô:2eD3¤oO~åˆ&Ê>Ö¥öèµXœ[¼ÔA÷Á±ª¨†ø”¼´Ÿ§~Öà -ž}uÉê­33\…Nô+EÊóªr}‹Ã Z™Ž—エB4ʱ òÛ—òàÞ¨€LÝ1dÈÜ ˆ¶µb‰Äíˆ9Møis>êF'‹(hÕxQ—¡q[”wÔx…*ç)??‚7V|§à{FÅ5}"Üg7œ<¼§;¹µ'ùL­£ 1¥™™ -Ë#|4“òxó‡úá“íF•‚•!cýL\ˆ–2{úÏ8‚˹½x@}ÎÖûCNy 5½¿²Ÿ`×M!è©j½®ÛuXò ¤2BýŒ%!„½­+}OwSbhЉ±Î!kΉü YJM‚y–ÚoÒ—%Šàƒ—çüÃj¡]MŠÇ—ãQÀ±+…¬÷#ÊH,P<›kþS¶=sIÂL_8Ò~š&ŒËìÄèÁ”g{æ˹\¬W/iÌféí7„ã 7ÒîЫ?ÃB{W1×¹¨Ù·È؃¿7d§_““’¬©ÖÇHó`FÇ?°Q ·UÛ–²Ìs³#ƒEi&ƒž=ö"l‘z¼ž=¿™ï¤T#vJ-ùg¯4ÖìÓ—ÚÍ69~õÚT‹‘Õw5É6ªÆúÓ~M‡Æ. -éØÒd2B¬G•VÇh~©¯;•‹GFŽ}yïiÊSgÕLvmÉ´ëK§ ¯5ú»DàóNðO£Êy¬U·²¿Úms+·í[YPˆ|X1VÇ¡^lÏdQ±T$ßD+êí¾Ä£ÄNëü†giEÎCŠîÏ Í >ïT`î4 ‡l$ß©Z¹T †ar÷šÊ2‹'@û‘!èÙÌ­{m=I …®$—+%#–¶d£²ø4sõò6šƒùsÎã»J§zÇ<Ù‰Ö¨Îo[ëVbÓ¸ “®øa»GKŒSÄYGŒÕ:8”™oÉs˜)+°Ò ©£êñ»«¶Ï!ðú=ÊÇ “0ÊÅçÙºWœBž¢ågöŒ:6‘×ææÜcàõªw±šW*½WÚ!Œ>2÷™7Ð.ìö–>M•ë‰¦mÔs%(Jë"ŠwïTÀñázRè•èt=°žÌë¨;Æ]ˆã+¬Dü¼†>*T[¯†ü© tÁ ¦ š‹î $Ækr®i©Óêyl”%ešHÓ ¦T - ÝaªbùÅ@QÝÒFp¬ñԑ뉽žI¤L'ß.šÒ‘x¤&¯Ÿé‘¤ÜtÖ‰¨u9¿Œ7så©‘¦·÷uh¦I 붅óš0^$Ê“}ˆâvA£è(¿H4¤kˆê®èù)·@2" - UCd•0<½ºŒ3:ò˜qª¸·|ï=é±±„ŽÉïtáøF´Õ!tQ‡ƒ•g2Ò~CiIâQ©%öj½KÍûábxŸHàzRCN>•njù':ËDß{¾Âðž+?"Ü -Îå0ÛÈU€¨e¡Ó™’ÓÃF’‚ -%ª3—+.xŒK~Gîí¿Æ¢a-ÿîvbºÅÔ·½J¥óªI;‚ƒÞR¼Mî`u3Èïe?Ý3¾¡¦^ ¶™ëz´r¹Š^BT,.p<ÞŒ—øD[6ó䤿º±po!63“ -Ÿ~M•QÁ°;8êJ'Ægï¬!ND»ü ìÍáô, ÞiIß±à@ðtñ J§ß ìAh€‡×I*–[iö²5³õÞÑúž§•…¢%=²Z! -WOäÎ×PêÉÌo­jµ¾oú§GùµÄÿ¯«líOMJÂjLݯàý¼r2Í´¶ö…MbÊ£|+í¦ÞLUàP`Z À¥¼~Xö×{:®!O\tÈ•”Yà“»!µïƒlv1ŸÓ ­\zTu^¦zœµ‰wbt_Ÿ˜±ÅG…TJ糚neL´G¤Âã3vEmø¬-¼ 2®»â0ˆ¶èj††oQ¸j ¥”ªh _ _“½FçP§Avb“ LÑM€m.5ùnÇ=è¢]«êHhþP;kF+3ÇÙ„ù‘ÃðLÕ Í0r ~ç,ì®÷áüa@!*)Ò«6Œ|—ë«ØãJzž{‹+¹{EmûÊïøÙ;å¸ßáÌ'yR.ì9±ö#Òòà#ôÉ‘ÍWÝ‚NâEVôyáEï”päÝlQé€DqjÃ)\DySƒhÑ]€À©v…®ÁÝRõ6¯Ž…tBÃ’x Yà1FîvC-Þ`j0«ÙjNÆÅê¯&E°3ÒpX(I×Ç7fΪ£ ¼w -‚û^lJ¢æ`˜wO¼À€W·Ä‰UJù›{§ãlüaˆøüJM8Æë^ç-ÍwÙJ\Úΰ÷@mÖ,g.ÅÜb´5ÈÄ;â]²Ì:…¼ºiOè6*ªÇ…ÛéåsEA H2.3W9‹G¾³-,ä'ÄL˜Q{¥8§O´GÐ}«Í—¯ñq¼#€·"™{žeš«¹ -%yÖfwQò¨LeDÏÀ6Ï#œ¼l{÷g_¡?…l#.6¬ï6V²E}A¥Bp†”‰ÎAv½I’ wíXN´s¿VÝ!½Wù‘*ßé»làF´“†ÞÙ]©*¾-Õè œ%eEÝí}ÂkúGªyG\" -@›^6§=:5qàÞ`òº§úyocýL¸ê £LòJü†Ñ}UB@ýPS¾·Ÿ†Í±"U?(˜/óq†©=Üuò÷˜®ËŸê#ðŒy=#Cü$YV¸ 6ŸÎüãS—VË|/p.Êð¹‰fµ¨A:ùlãÎæ3ç‰È -õ]ÂúñY¾¨BÚ—Þs“ì'yzH!æ¾½1¥ -Ñe†”á¼!çíŠ9ò¿J×D}ö|öøSÞ;&á„ÖÕ¾¾Ï=Ÿv·X_ÔÒÝ«F5g‹V…§®"aæâj¨íG…0# ²½k¼SW| sÿÏÔvÌà Q—E~Rë'^Pkfƒ dÐWšèº´]J4Æ»J hF:ä­¬àgÔ+s\׺T©µõUæk¶‚3fF„lŸªÄàZ#dﱺþ„¯æýø¥Q“ÎA3Ã_Lé£üíYh1»ëý©έK.½’x‹Ô,™èûMy#Õ×·ÏU?C8„ÕÊÌóŠg¼=á62䊿€x(íRôS=¶–û‚)D©OËùÞŸªRÍ5mÝ=M${Œ]¹¤¯¦L òÄ/±Üì3ÂF@B^, Àøc>Þ×Ó{×½ŤN¯) S•¶³2®Â®DØ6ªë!CŸó¦™Æ¸ƒG0õŽšŸâèìP ö€ XËi¨I„®pÆ®æ¨í÷´`R© Ì~~[â;Ô3„Ú‘‚Óýa¯F$ëJâáñE*="õ©)ñöZs£CJk×ú -ü™}è  !ÔI,BºæCG®íZêH©b õº>æ|鯠yÄî¡°ÖA§žûQÛ"QÈ'ºõò¾Xº+S½Ü>3¨(ì/7lÓ-'¥ˆã)ÎÂ*]9ʯCM8mÆï©—ý±Á|ò—¿e¯ÀšpèK¨{{=ji”¹„žO¶˜d[üZð+Z­4¨u‚¾/4›&çw*1ZÎWŽïwÙ߈–i ý\ö ×¹™3=½wŽ½eÑ£sý~ÒÙV¥GÂoyÒ÷|‹éqˆ˜“‡úÕϺ(í}ëõZØ'5•‹ÙÎ{ÚÀ]Æ µ õÒHM/rYg•ÕìKH`ó÷F -ÂÊ,*è+º'ñ|Æ‘bþ“U¾ÑØ¢®Ñ\z%ûºœ^@sÂ>uÐÚqIxLí¾‘¹9ÂÙk;Óƒ<Þ®¼! ³,Ùù=`iÏg{ñR@ š(¥Kƒ:â(¯Ù8H©™[k'ÌR%udÙ#J`*_NŸ9ãegñ½r25lµí¤dà!1³‚ºhÿ*%ûþ?þ`ÿ«2=ôEg8Ž¡øI¾¶é,ç@cdö!U$M ŒB þTe︤¹R4ÑúVîRRíUÉ‹"!ÿî@¾SEV1Ž¸p4TTçcc7Ó_ƒ HrU5u®’*rS(U{‰f pM‹ý üT˜zzÒP˱ëMP«º· E7eE˜)Ed%Ä» -"ªœ")Ƽa8g›õ¸d· ïÙYÏ ¼J‚óìØRwÎþzN¼´prpgØÏS’&c´l<¬ùl¡zf,RØtƒ2ˆãæ¥ç:bÀ~ÇÊ¥ý­e}~ªmÿ‰pE¨¥hü;Ìð8Wd[¹äLâuÓ“ßÏà!÷šOVR[À¬ ß“Ô{¡[ 4sÓ}1þvÝP¡="ë‹/;×N©£^\j2Q°Ý«î¸•<€åý“L«üioIü09k»•µ ;Më»ð‡3:ð7"5!Ìf@§z•V@÷ˆÚ\^ÍŒ´¶ôýGÕ Å#ž çñ¬:^G[¦ðe‰uqëë}Uè£òX¨€æË/õžíìà´ö€>qÝÆ´$íJŒW¹nP$F.Ç‘öYÕùw3’ãÍþ÷<÷¯$…6ÛØ~¶:®<ž6Dk¹X§L¯MßÞƒNâL¹ïßÎÜOÚ™ -m¬’8Ô‘H0WR¶Oa'…ÞØ ¥Pž’ -­œr,NBUh›D$‹zÿ Ô€ˆ6¯Õ«PŽwÒU=+!(QR -ç¹à1è°.ž*Î×/ï§áSTÞÛeøtر@´)JJ„ÖtÉ"f3÷Û®ùÿ(TDõ3{§ß½Ô?1|úî—Lq9`q;´ºa‡´UÅJÐVÖO¯MüÁÅDVH -—¦ÕûPÍAõ ¶¾Âÿ&=ÙaÒçžÝÏ·š¨ÍÇqåªþåk½3Jë0ȆDÉe_C–îÌÖÞ=|‹Oð~ÔíJ[doÛÑ*JBj8w×±C›+@€t³B¼Ç8Xz ¡4VOûþnaîHÂŽ«½¬'FzÈîûª¢R##µ÷=g -aKö`ź»îtåhMSý_ÖkÏ8 nòÆåÙÐjASž»¾føKÕó;jÙ1¯­Oª¾@¡#âïpÍÉ ,| ¯GÁdÃ5‡ -ÔóÈF‚Whª\ÅEµdLÛ‡7ÃNÙ¯ÖÙ×þèÖ÷_·¬}|û€Ë-†×ƹúšOÜnÀZÆž}~³Ôf°$›]ç’L -ß±”“€„G×ÄîïI ãòâñ6@Ô ¾\÷ê ¹“‡Mþßñ€óqFL–h]î¹ÞfˆôQ;Xõªwåв¬ -å@w··F)Ϫ¶Óf×!•º!\Þ¼wZÙ¸“¹PÅØg[꺸¤öãûÏ<“.¸NèS8Áàû4Cv°ã³Þ^¸[kµº†•åTasÿîéFðÚë¹ö»|´:YZ)–E®‘û2ï‘#ÙíÁÆ^3QuÐÖøw%Éüeÿöç/š¦³õÒ…ó,a4u\Uˆ×K$çУBú€FÌ–-zT9_wXÜb}|£+P0v 'ú<£âììXÎG[/à½JJœ ¶mýŽÓþ‰¯j¶ÎÁŒsx„”°…ŽHdâ§på©M#X¦L;ÂO,¸uG:å ˆóoK>`·Œ÷ÖÕX‡|¿qǺû‰KÂüÖ c졲H’7j9±ÀgL±å³vbÚ/ó~®ˆæþµì÷ßô3æ‡ $-b–ĹÂ⽬kûDæxms?¸@hŽ}µqHñ -ߪ€7'Lƒ-ðWº7äræïoþ㻫há•ÌU†âPõ©{5ОèÕcÑéÕiOÍI^§ƒªÿV†0·*¨6ð‹²ñ:£ÓŽ·÷\êü‘ýG¢÷¼É¡ OnyËï=‚Éõé»^=‚¶w†@Iû•ã~Ut®c#ùϼ@u5î#w5Ðnœl$:fv^f/ÄHçw>KûFsÑ&µÀÓ£…rp Çë­ÞØV®ïq¬Ì¢Khˆ;‡žõv 6?=2CÉbÉy¤FLHÂ9Õh¡È¿¿žkúQ9fZ–@*l¤•ækþ)šrž·šé«S \ƒLÍ?´½ö?ä>Û§´9â»ZG†ØA¨Kÿ²ŠøÀžj"ýˆLFBa ]®žôýsÃê@á 1² ò2ϪˆŸÈÐôŸm Õ(Ѹ -R,…9<®¹ßGÅ^(Œ>€'T4 §²zªxÞ•LuàÄDÚû¹ß‹hDÕ–3W†š+ÉX„Ç ùˆð ôÝðÖÍìì7{0²=¢ûÞlI{Ü® 9¶H‡a“KAçÏ ÷ºòôâTéàg¬I¢¼…Dp:Éb°hTŽª¾ñ©VEk/#T é;•’g’ (÷^ÅÖ¢rð‘RÒúÃÑà뿘Bµé9EëŠU ÉMmOdÞÿ -eAÖ -^]ܦr¡NϧÃ+Lyg†wtCèð_]]´FbÛq߬jµÈÃ¥vòD5(P·rI[¿ÃÓ«Åm•„¬Æ-ÑïBᄤ-,‘ë|G:`Jù¥*h¾éœÃ£¿Ö‰âª,a§W“#0Aš+ü¬‚ÇV˜£¡à¼¸9º·‘Æn…‚ŽCœÃ¯Âù …å×”¾Ú dr é*6äÒ‹çâÕ vIŒð§ÁgiæáM“9‚[>„4ÔÖAVw£ïð´k}N"5b;ߘ/na×Z4];EË«£âU—$VhÏ(¸€Ø¡Å´\7§`¿l€!uü‘ |Òc NpêÕ7Nh 4Nî s‘Ý”Õ.9ë…+˜('¯ðI5dKd•öðÏ´òª0‘܈4fRÉ2ƒUUh1nfn”Ó;h¿ãåÜÙªôå“I¶ø?òv&—þïÿÈt8”WÑ]eL5½  R|¶±.¦9WŽ]×ñ¾¥Uƨ—FŒ§·¨jC¾~Ôä´æľébùÝ4/ZÐŒK‡Þ0?ç+NॼÂ3›®=BPÎ4N¸€€NçòA$@ÒÉ\D®Û2µbp’!ú6®{­°Ã9¤o[o{ULdUR`sDS–ãD“ígæ¼8Iº<‘ QŒÑN ï"6®#óŠ=WºD‘\Øîî5ˆ½iÜ;:+Î`žÅ4 -­lÈünÌÚœ/‘© +@"Åùu¢MP…K¥†¿Ü\ûɯÜ0©ày?»Ýp/µ–l>\¯”£ìXÂü–Z¬£å ýn-?µ¥¥Vr¿s8Ù7I“x„ä=ðœœWèWX 0ÊN0»°!òRvTÙ,¯HÔ TÏ8y^­Ì«ÎÂè5™Ã_/ßÿZ‰•ÆýÜäRLXÁÌÎã¦ÀÈ@%4Zìæô.”î¬O½ÀdQÛo•ÂOãŒþëîb}?ÜåIg-êT‰0kµÂœ0•Wé-æ‚™Aªº€ëJšIADó ­–<œÀtל 8êt˜)ç#·xÏ:ˆÛÖ¸ªÌÝâÄç=…Úë;IÿQTŠ¹ŽN3N¹’°e Z7äX<7Žš JSu:d ’ÎTÂþÔB¤´Žð$ä$ó;H_:äWD Íˈ÷bŠÝ×ùB¾8¶¨âÔ)´—6òÒ;!>·@¹Ý™&À<üE¶0J7%ÖêG[Û«¨‡ ÃU&Éá²ë**Ci:_ ,€@!I‚ÉÍ3ÍÄ9æ&~Kÿi¬Ä”ãŸ<ù œ‹dOì+¶$¦A½Ý^ÏŸˆ³'WÚ£/·C Ê$\¤=’P1_ ¸A€tÔÄñŠ‹HrhÔyN?è¨#R¤ ú:îg±$ïø£;(ê\™{ÊühdÕJKl£¢ÂÉ -8Tåü4àÎR¡þÆTKŠ«â¶UIó¯G¸ù‰Ñ‚FŸ‚ÌgßVèÿ›ÉË'Ç?¨0¿5¹³"¿z!% -*ö—þ.¥[ÃU[h»šlLÒš,Ì'•‚þŶ'­í+Ö½@OŽàW¡£Ñ ¥w´íë´¯Ár÷*t@ DùýXG€¦ …]Ä’4Ĉ¶šÜ]?F…p'w½Ðv wõžÊöÀ‘‰x×.'UF9gÎG§ÍœGs¶ž&Xz¨9z‘Ôœ$­eUS[MÕu³m)Ö -˜çÌ ÎDšªV~Ã&Á›ã†W*¸ #숀þE ðáe“‡5"¶X;ìZùÄÙ„ô^á¹e¯›Nw¼Ç¬áŒFF!<Û]°°¹„$à\¥¨ ×w¨ó©¿jçÁmO.ö÷wøÏȉ۹êMø3Ù-ºDž”¿§¾ó8jN#X‹@æ[­tF†JÜíåG³îMN5y#HmT·ÈÉ)ˆÆï´II–6œßñ+—¦þ܉ÕÖ ;9FŠ¿Ô&3³(‚]öÁÚ@æ‚Æ·¹‚ ç/J—û -ψý¿-é+r´ÃæbDŒúšðò^_‰íq»¡ú•Á ñ‰úly·ãå8ÓŠ-Հ”Œ©vÔˆùZžèGŒœL–.;-üÇV Ò¥?™´¶YËGL¯Ü¯3pžUÏ^!Stp3@®íüÖ¶™› 4› 2N`1ª° °fz•doõ¹u3y¨Û}õñ· Å/?Ñ7ß+RÌÀ ¦D}DZ.9Š²"h-Íá#í«#Ÿ§¨3È‘¶=S»¹]hOíjG/­€ƒ ˜CŒV¨}LÜ®{¥‘T¿{4ŸR†œÓ‹T[lKWZÛ]~¢ù˜\˜•%‘….%¤yPÓö¶båŽÀúŽš{„ãNPð3Ú}ø 8r*õ+aZÝÐf8î|¯B˜§¥›üSŠpÉÞb¹Š}½ÎÔS{9-Aª¾`OÕ´V–ŒåŒ„Ý–¬\Þ3_·”V²¾ê.ò69³s…åÀÍ9Ý0®÷£(g[l%¢ˆ=EÙ¹gK‰Üå﫼CÌäx‘_mU“æÿÑë÷’‘@ïÑè~*É¢SsE’¨MžJ OÏ&@u‚“fɈ˜Ù¬^Ï ¯3û‚Ow€”O0A>áVØ£iV³ ¯(g kWr£s½¦Ÿ-¨©ŒS3•„ÅâVHBôe„¬uY™ûz!¸ÂýX·QÒ‹•éŠ:¶A2âÕïäQðâ¯ãû -ŠNsŽ€°Õ¤µ²%–,w¸JÑÔѽßþ«@1~E$–ù[÷0h¼ßÇXë{0˜*Ÿ#àòÚ‚ îç©VÿUŠ's_S%Û»¦G*~pÄOZ˜÷p\|VpÎÅ ç›õȘ×Ó×ÀÈX¿“ÊCêà÷ø¨1öÍ«©OÏÌB^¶ðAà&>ä6Æ -#/-qGU#4„úéØì«6¸¯Nè 9L"EÅ=~;V…+Ú£Bú0.4ga‹Ebü…Ÿ)MÞñíÈÚ#ß%Z-‰¤:¡ºH·DÆZ‡esy¬ ` -6ŸO•aI›Pí;ߪ‚ DÕ¼õ§.5ÀÈi= €}pü„E(°=íÁæ“1mé/&5–&œ0k~9åÉ(’³Èœõ’©¾ð“®*ç2™Œ2XäQÛœäC?Z ¿3çÑñ„;`ÁÛ¬ BL_O¿×|Ѩtü¶b™7>D•sâ¾È³%îêHŽJ{óX%¹ƒ.¶o! ´ßE•ÚßhqÇúÓ—/Vüé9MV+÷ˆylä *g•$7‡>M@¹êoJ$0:ÑøÅ;RÂñßÊÚ£rL;ùøÞÌè,m7r[k“Uåˆå‡ž»ÉÍí3e«Ÿë5È…\ÏäD¹I üøäÀY§F¿¢©KÒÚ*<ÑÁ¶á?“Ž’KQÎC«š³9ÕvƒãYq›½'‹ü›­`ÏìŠoH${IiýŸw]‰ÝTKþ[‰*ÿi¼@Dêý²É¥‚;ÔÔfˆÉ (ªüekA32 €ÑVOîÁÂèi$“N¦C=>'G„²‰‰®ŽÚ„¾˜ "Éæí ÿí.ï,'أ̈́¤ãsÓ4»·Ëi¾\9·òo~e[GáüÛ$=?ùK¹/Ï·¿"c-€Û¢gúÿ2wÉ–$Ùµ`GsxÈ3%FÚ¿[¨n ºˆ`ö¥k½ wOøû= ‘Ä kLÉ!›äi)Ùó: “Ó«ÌHM hT­Á±ÞQºˆ·ÙÅEP‹ëC¢çW!Àa¾º‰l^QéèÚÀmÏ\žOìvS^õm±Ú©qìd‘Þå¿<m’-¦¸ä‘ã&Bï¹;Ë0Ûl…"¢ÑXYäÏ¿ä+SÛÔìÇÐÊÖ3bÿGÆ7:ßgzžp«Z(€Ž¤ÍÜx}çŸLóù÷‘ÙŸÿ¨H1„ï„ð°Ö»%wŸ¦Lfˆj=ZÞÕν÷Ý´ª­Áѹ¬·YBW鲉Quá6[ý±¯¤Jíµ’Ø°xÄÔ0ñÔ ï݃୳¢îl.‰6¨1,¹kç .Þa2ÕÚw`C??òS«Š©0ÞÒä‡ì9/Dc7àö¹h+ }}£ñªQ¡ÄßÇ·^Í£ò&ì@,õ“ è£ï¹J8×À -è¿B§GuÆ.gòŠ9ž(Ž™ôTUóH×ú§¿è?ý*ûý_²KhÌDᥕkyiÈʼe—À˜PB/¾›÷y[;¤½­eEe®v†Í8¤s…}†öšÆÑmVp@ON|Å „wMÏÖùU=ÑBÄáhâ§g)è;¾Æ¦‚©‡Ñm"5#•h*ë LbÞã,<@ú½\YU8îí <í£á*Љ|Á¥kÉ%•Xêä­\É3¨©¸ ê‰õÙœâ‚ðÔÛ²wk¶àå°×ø*]ýiÎa"ºL°yËÌÝ«KÓ"ºRœ¢ÀL60:“{fy½N°é.¾L!NxEÊ-@9¨ÅJ£ø˱‘R÷¹¸ÑŽ,@Ê„3²l+Â$–7ã5”Ð…à}¤Aiÿ®· 8ÕÊvïvý†Š¼ ¿}„†q#_> -eëÀ‡u…eBÛkË ‹=‰á9µµ"®:|–]•¶ ¼Yë>q.èäµK€ó=ד#©(ÇY_-:wëKrnˆêû@å‹Ö鸳Ç)ü£Yr‰åš£î<Én†ú\)êòíõ· =µzõYN5ÙXD7ðŠ”>Ê%±'æ7±¢Ùß}©u œ†R™: Hh‹Ô±‡BÇfÈñÊ7¥\Nƒî#Nßã9¤ SŠµ7Øà1¶°¯Àv¶Úõ©(ߥÅ#ŸŠA”ßäÒy¶}©à -?§†?9ÓJíWbÁv²*ý™2"”žûù”e ¦{n±zšÔJŸv˜\ëå˜ùz¼ëtz^-õ+|ª‚ÑÅ|Ôvì‰NÌ>ਵå~ ^|Èñ•3ÞÔU°}¥fúãáôÒÜh{´¡ùñæý©‡±—¡Ø˜Ó‰R~'›û>wýFFE¬Ç°$#™:,¤“¶m–M‰,!#û»õôd`Ç”ýH^/Ê+Õ$OmŸ=3f”)F$”F™‘1Q×®9@ ý(ñU`ó†þ …ï˜3„{ -i’–ã}Yˆ:VM»ö¥âž`Y¾)ú¾éfp»ÛOõh ñ„Š2° lí«ç»Ïn̹µ÷§à”­Vo²…ô|ïÃ;5Hv’Ñ35¨Ç ^ÚtÖ.RxX-çØ’ûÔ6ì'áŒ}cWÿÐA²ÿ¼^Áª#B—ëå¸ FÐr­žÖ£èKý2`‘ãöï¢sük–ÏObßÿZºÕ¨L]jE«tÀ¢×å öø—抔rúhòðœB7ÍܼÄddÇî€eÒð_0úÃH¨r]Mïw‡¹7Ï5¿—–rmž ÛѳÓ{½‹õE¸„¶$#gÂ7(J SCy÷½ôT¡ ¡?c aéÇmïVŒUoê¹ó¯FC,‰°UVƒ$fåwôÐ6îòì iN…ŸxçJÙþ« -#ò[œ=¤Gss%uÏ€²ÈtBؘ¤±Îfùd³—'bâyŽÇÈ}ÆY©Ž0.ƒÀv/–’AØâŽøŒC8×#ZµÛÃrÝ $Ö©uÔ†ö›Ï½Ëë)B‡H°åà– æÜ墢›Ê¦º&dTú¶6„_™~j¿À8O¾mZ(½€­Ä.Ýh¤FŠ¥dÊe2þ7ð·«vkçyÄ_†€&­»¨}r¼÷¹rÃæ#Ø¢X¯­ï3à•õïIfÊaq J6Rï•N¾ùårÓÛ¦š#6œ3rá_qY2¹”·€æb HTŠÜcÌ/X¥øÀ&ø–pm³cµJ'CÖÍõ°wÌ‘YaöžHÁÉ8O]û¤ –ã¤ö¶¨½­p­ºþºU Â­5(ã˜hŽ9h‚È“90_!ÉÀèJ (b¿»?ñõû/õÍŸ$ç‘ñ -šO™%r´O6µ+õ -Jү͞;¢•O®³©ý}WKíóçòûï³Þ?S5óŽ@Wé[1vÖf{Oœ#Å;0wU.®Î³õ“øýIÀþ].ê<ê~Yû¦Ë åãNÍÉ®±óð +nÛ¥ïï–•׋ÄÔùV._äÒÖ¢O@¡æj[˜´‡aXá–zh*wüêBìºSPy¨SÑÿõfjuc›Ö˜¿AJñæg¨èSâýÜõM=™Ò)°7R~ÿRïb>€3Td4p„ŒwWç)¹[A#š¤ÇÐ,ñî©þú( ½Ï§t‘È_ŠP< ï^¿#o„E× -õ¥vÄÿ-uÊ ‰¡,ág-–,ÖÁYjF-“ø ˆË?íëãTy•¹¥œ¦l·×ÕFÜë¯'؃'vLIÇpŒþÄu:¶YY l©Â¸igÒ?RIÏSvL‘èT`œV–#páÁ5Â9ŒË=…uZ£JÉé€ØíÖ‚Fn=¢Õ~iŸª+ ¬ -c„½òƒs³½/Dì¯õðEù8ž “®ÒÔÉg¢"ŽowÕ[îßCÄ*üe]Öˆ7dÏZðûåµTM’d²ðŒâ½°¡Ì×µPäï_°Ë§üs`oZHÚöž'v^^dŸÖ§]×.¸•_Z>|ïªa ,’àfD *‚«Ÿ=™ÐƉ®*áÀÍmôz?JÅà­ÀÉ'*iMšÿIPwsÅC¸PÃ3…¥~Q‰áç©(R ¦–u» r OJËúcy1½´yƒS­d$× =Åï„Í Š~„=eJÃ_C¹Oh嘺Z=V÷Z#óëk÷ƒ–\7yÑ—¥ÌþÁ\RJÝ gI ü“&Ή r$§ -~ùÇ"ʨŸ1Î᯷¨cê>§ž4±j æÏ´Îð×£$‘ ‚s½¶H¯ö/º®W”oo4ç&ך>º2$ì8|äJì?Îhë¯wFNy”e˜œµ\Ø0Ò¯hwü¶lLI劌ªj„ NC†L–ÂãŠnPÈô]æv¢tÎý+ì÷¼×³Œ5qÓÓIÝë¬A€,Xî]Lù;â„Ø­†Ù¡I…:Ù¯à¯4.µ¯Ð¿¯LO -µB²z5ÂÚ">jïi~r<ŽbªL+`Xy/i@™V-Lª“ç‰Zêò3Ê ¿_»Ÿ0&ª¬Øa·’ç/U3´'Çø_¹òÐs{âµôŒÄ‡£Yý"fùe ùûWÿgfãæ‹ÞD4ºˆ)9蔢¿|l•éÖdPuîÌZZ!*Ò"2•p ys^eUe¾q»LPQËŒðÖäR5ÉveFJ G,Ì>CÖD·BÖþBŒx’!o”†$:o ±iY‰jš¹=+ÕV2e3‹ˆÛÇ<6Ò©&-I_ÏšrA°šµ‘†k0ï?R4Õ¤lŸ!3Þ~.õ÷ÄMüê5UDßÁ¸œæw0jIê÷G"l]}$Ï=aº-(•½Œ@5{˜2Ó‚Ì -™!F¡DÞv"ùCQcÐWX;¼#ö@YHcˆƒìõ­µ˜\šŸ ÓÛócŽ¬‘Æ¢®çHDB [UÒäFl¨—±è“Ä¿æÍyô/WÈ74‰~êÍ -´!bŠ…²:R`„”¡ +½ Wõ=ƒ,?bûrßQLç3¹‚>„*Špë0š{†N¸Äð+v¼—/ôLbÇSæÇw)±#P+îcFíäŸÐ=¯oå$kÑ·|ä­ŽojCNL'RÀW´­'¡ƒì}Dé7¯^ÿÿ%UÜ+ÐÒH¼£)¶¶çÊt^ÏLÌ•Ò7~mí”:¸[·f˜y‹k+B’œ³V´:²ipóu©(¤i)@ïAÛo äDZVoM=‰¶K]I nX ”!+üìP3Î\¹Åà'2üy{g)i¿5碖ò„ɦ¨gÈ D³g¹¯ºû<œÕ§„ýPFØòG¼£áœ¯bõ}lxï$ê —o¯Ày2kÿ™`΢%Ü]¢s‘‰#lÜ’£/ªÁC<œ"ÓxÊ[ø -‘mF)¥Úl¢ÑÊîd@I™ä£•úÁŸQ—w*Sb—DAÙJšpng\~ÆN‘֯ྜq‘üLP-ã¼>[×þŠøNLb 0b*½|õ¾±¢v_ñ N_ûC,Coyë ‹t¥g¤ŸB(œ¸kÁÅN@¼H˜þR󷢘;&Æ„púDÉ7ŸÇF]þDç½í4øß"¢Ê¨×ÚŒˆG ¢Œ òYß?3y7ƒäÂ#ÌW¢e]3ŠÏšúÞ”@¶a^4€w­kíd9"9½Ø#‘ÿâᆂ)¦3n“‰.-'IK8Da—MÝɤ|ù(6Ö±£µƒ@‚»Dø$æ9ƒ3¬”;Póš˜Þ†tçÙÁlDìÓš¾k&¨–øe¬ÿ\hD˜¹kÒÇW2ƒÐ-nö4½„¡’-(6Ô†u%,Öç®!¿ýP5¡hœP”¢rá`aÖñ߸1“KјúðÒQã=Û†¹þ ´3ÿ5¡*)ÍÊj9I€†€Ý{Îkïa´pÐ30´ÿÌ!N‚dF·ís@?i%wÖƵ½Ï°!ÖGKn„ÎðÏÙ?û;Õ»–÷þ%G6³6Ýëd -]Ç5÷ >ÑtãGý†«>ENµ]r˜pÐGï$ñPÆzMïûW¸pɾ#7YáR·i*9ÛaÒë ±sŽ­°˜Ð—ëÖǬºdÚ‰Àœ;îƒÊ!W¢)ëgÚ Ñhmš3±Èu+RkØECˆÖ•Í€Í !ˆO€¡0+Áz ¹’—hõ:±ô²”¥ŽÓðþRbjÇ–„ò¹{àžrÇl–ï%Û–!³díÛ\Ý+r§oƒù»XvjÚ±ôŒõÈ~7¨õZúo¦?÷X%ùÝŸ÷@•Âþ~îý¯È¾W†ÐF¢ž Ö~~^ˆþ1ªk›·Å{xOLœò¸k3ÅšåŸ-•‚¨D~4›Z^%hî9›2ìùÉ' ð ŠüÒ ]RªËÁS]ûåÓÂ÷& -£ÇÞÜ[¸ý>u»ëÜ=ù5öv|¤7¤oNU=8}ÞHu$@¯™%ôLç÷3AÊ{ï“cÁESêŒbVЋ³}Ë'na‡à„âò·¯:ò— Ø?ŽYŸ5ó@»ðP…7åŠ9˜­, ©qO‰úɉñ+ŠGg4';w†çk»‹­ëM^[¿0# ‚í+Î)p´áK:+ -F´ß[ù!ž_! É}äÉÓÝ·kæJ/4‰ÀÄKŽn‡(Ç,ZÛ!Âg>ö¥Ø¯nÈðˆß‚?§Ú«6²þeù-¨H³b?ngž‰"ìòzHŠwœcx§ŽñEå_ì$°ÞÞ¹‡4±#ÇØÔèüóõr`oÈüü¿ú'yŸaåCë4®µ]ÂÖšFH8ë«}Á€­•í«Àëî!ëå‘EVq¥XGð?ô‹åq|½RÈÝÏÌT|À>4ÈîrUàZ‹½Xú‹¥¥ì€îѬ]!ç™+=œ×‚ðëkJRðŒ8é}A¯®.[±sº†ˆ»›oÂW,šÚ—êêÒÙ`Ú#gò*mKöîël[lm±îµ×Âø‚ÖSáN‰êp!«§ðåXÏ_zwàö †˜Á5ä•Mp¹ƒ0¹òÃ'=‘ì·ªìÐK§‹C5ý -†lDZ›Ó×ö„Õ’yP ×û‹à %2ÿ¹‡A€Fá³5?ÑèÁׄ55ˆø fcÅN|’‰(²¹J±>’èà ¿¾f7¢èK`/LõÅþY®hz±ö[E™–Ff4ôÙÌÚ®* ”împ¨„ÚÂÇ^§¯°n *‹ÛÖŒøaÇ€5y¯ÐÏÒZ«¬ã_6*CBÙ}6*ÊÉ2g2àõ;ª±-„œ³gª3Páë¥XóaR!ÚB& `™í.t³EÁ[¤8¦~É•Eõ¢—˜Ì3$† ZÌ×7¶¿-‰Ll8Eæ#Í`¿£9#"ñr V¶-ehIß==šžwÝI†ááÛœ÷geÂÜÓRêõÍìÐÔ®Æد:òäèjP(±Æ@ç9bôRÓpM”'H¡àbB¢Ò±þ‹Úq òß(N©Jò†Å‚+Ë‚¾e¢lnµÅ±Ê<£{}mËÔÄ u.T²[¢|kNÆ®àÊpg¦ -ܯæøX½ lhÞjS|1:ƒ€O— }G²'‚÷q86‰/nIÅù 81ézÑæo~ÆI`Ã{ïcû¾p|ÌÆLæÏÍö’<}û-Ó÷¤Ur>c?“•özú‚ý1!§ôš‹W-å´mt…Ïÿð˜Ñ%ðúân'â9†>Ç<Ô+Ð.r)Z.ƒë<Ú׶ž»zL1´:z}'RîjÌ,/ È¡¡¯õ«g ãˆ»ÛE]/N)¿Ÿ!ˆf -·ÈÕ\G„ñÖJ>i{œWN§ ]½}|Lá¢ß€Ã/ÎxºÏµeýUAÞà^º2ôBe«ÕÔ÷Ž”KP±¤Š¾þ3ƒì9à@G©µ‹ÁS(VömžGoˆçna¾iÒ -ÕlÿÛÝqš{Oô#o§43³ãz‰Ry×­ä‘ÉÅш„o)LÄÏBì?©\? ºaK^áIJÓ÷'²»â]œQjÉ ZQ¤VkÌkµ•c1Ÿ~¸¬*žñ (è°ÍSŠú^l(6g^2Š®»y¡Q"¡ò$—Ž<¼x])0<Õr§ Elþ§Š½ŒPHþ­KÊu)]è=Öd[;ªnŒ^(fæ`ñ¼Óú˜µZE`ÏWm¤0ùÊ[¦…‚,XTAÞ³y>c®’®/;‰¼s‘ëh%—‘cŠi¼‰h˜d¦^åó¹UiMkžF#ºJÝ6„Õ»oÅe¿' -ŽÀì;ü¶§žxßO¾ÃýDê»duw Þ´˲ôуñ[‹0ð÷ÓfÓù…†ú7ÄHRQ^×Â…ð° Oê)ö£ÃR­\çgòþO–?Ž1ÿAþÌ ÈyŠô­„$ JÊ_|–LŽ ¼víD¸Ïø -½&©¶˜eÿ÷$xz7 “ÁTs£w’Püî„ÖUOoÒͤ +yoXèjØßB¾éC¨÷éNÄ®6~éRƒÁÑÿó:D±x’¼|¬ha’Hñ_>«3â(lr#TyYô8M hºGÒûŠ‘¯|mv©<ï+½Q ßtÑMßFb#ÿ§u 5Œe¬h±MŸ:¨Ž2oãñ¥óÁ£ÆÓZÕ2l…1^é–S¦P6¶ MÕs{‹9ϧµæèæmÀçùcêïp°ù 4ÞÇuf|êçÀnÚ&žL77JVM¡&ÉA¼fКÔ0Ð0yq”ᙧ2âŠ< 1q8,ï ‰%¹Öv•D êœ•*}ìÚÖÂ(ÔÑÌÁ“Ž,EK x~§}ùê‡I}ö'~4DéU#(ŽšÀ~˜glf‰¨]•ù£z3³×^5ðh±ˆŠ>ËXeCS%J™†×ÝpÅ[ÙY ¯“îYʆ¡â‘,ßò¯)†•ó†Ê™1¯.†!öžÐuÔÜéÔÜiãB5<±VåBTà¥ëŠ¢ÒfV¦|j™©PŒ2¥*cðœÞ0ÊN$½ÖÞ—<â”æ _;dËû²$ÆSØ×­üÇy=”)Ò÷#ë­ñ˳Z´¼¹¸¬jW£ÿ3uã´¦mE]›B½jª5#kC40Õ\2Åè -⧇»›iµ†¬G-"GâˆÚ%‰/€wsëw„|xáÜ¡-s ¢*ß!Rp%c(ÐÖ½Ù„?gÀEÓ‰þĈ§œ{íÞœ´Ñ€Ÿ¤)ÿéžîÒM­iÊ9£/–»™éM‹è•þ.ö©O’‚Z}o -Z$VІZ̧£95ŒLœعð åw` Íž‚âÌhù=å祰}ÙQÖÑñÝÔ† àø…§Œ%Âzìu=$‰½^¢RÚU9ÊãyºŠUjc'yêÌònŠ´¨¦>aõι—+ØߊDmØ`A^×. ¶ÛK¾ï)“´7ÍÂëI ([I sÍ<†eOL00ÒÂûW,~»r| ö´À#Ñ`Lh»Àt²î;óx¤îhÍíÁ(ä}WÍë{bJ–H”Õ®sý2“×qyïEî)~¾C’1(¤²ºÍ]ØC…݈¤×Á %œ¢V -çáWŒ©¿¼ ŒØþ½‘‡6êwÄÞâ¬!7Ðå:Áî˜<¨~D2X‹µ{¹‘ÎÀ,2`ŸÎ0×'¹÷¯€ÂÏÇþ¸ô˜C] ¦× Šu-Fp…ø“ &÷Ë*¯¯ ‡ 2Tßûs3/ivã{“…»ß"Võb¨‚„/׳ääcÁ:ÁÕ¨³Ý×;ÊÐì7GÔ_veëXoIŽ2§†Hk’iîLç  rA*¦†h µt-ò«°?*@öŸD"ûv. ‘ž‹DˆŽv0ƒ±¦UO„§KÏ•â ½³”ã× DõÐXÁÝ{Áô­Ïã^ññÞ@•˜ÙÍâýÚëì¢÷"ŽRYás1nUhK“bïÑ>Bfg¢QÁ–ÐEƒÉ7àá’Úë‹iDˆ CtŒßï¥yÿ‘uÿj'fUï–/ŽªL ³Õc¤Ä”+ Ùzl$ã5©†vF†0ž>ÀnzýÈšï§i‰Û†¶1ü -|¶ÖP#êX^)4Žñæ»?Ào -ž1s]?{r¶›Ô #Úþ Ê£îïÞ´8J%ì®k= G¿âÀŒeI3‚pí |Ñ­RzI³ï"h‰æÍûRëЫ{ejš²Âüý;̆Ÿª8΂† MD`Wzp¹›„“Àï×7fßg• v Úö§,ül\œlîèé†D°€¾@~èÓë9”ÏlqOûÙè³ÿÔ–ëóo ¨Z%äêø~[î9Hó%Oƒ³NG¼ëùöoßX`…ð›îåS˜ÎõÅÑîk‚ÎÑ|ÀqçûTCEÊÔ_«ŒÛͲB˜¶è´p'º76”8r^mTFQ4Ÿ™µ®º\H¼7ÜA¥ñÍ»ç QlU†¬‘¥nä'ƒÕu^>!zÒH½ƒ¨˜½ º&Ï!:3W‹wñnæ#RØõ½Ç;SʫᨫF¦·ƒÜÛAºzù(—<:ý€Rw^ ¹Ü'¢ï5èQà .PÕý¨êg9 ¹ß¸xsܽjgœ5{ìy÷ü‘6,£QÄ -™“uzEì`ÛTi¨?H%Êíê4°ÄéíJö³}0ŠGŽa¢N‡)82¡À!뎩öµñ™*ºtÌøx~}c~,MÔR½Û7}ç+Î7Z4ˆ.áåÇhdÑrÀ¬{–sð¯Óò‡ý”Øÿâ/?§ú™ 1·CJP(;wÄ7|€jV_¡¬Ý%ú°‡àêör©aƒ¬Ó²…òB -;ò<ï3Lìý?@æÓm9žÏT âr^¥Á󹎣¬þ‰þ¡k¦bV]q4IÆ’ï4ò™3½/°p¬tÞ’ŸIíDòÙ+ •2ŽYM §{4Ý‹) 25B³@Ñ!˜â×p•¢ô?sr¤ûpÒ¬ú,GÌÂ'ÜŠ¬ü+ËZxûbêî„~ÀÉÇ<´žâ¡|OfÔݦ-Ť5µ]ïwrü´}ŒòÎGÁ õ·'oØG+C®qæÍ<±þ‚@o5pɶ=béæÝÁr†ê0kó¡éŠðæ‡n@$gžãž5mÅ«£ˆgX (»&,àlmfJ¿”º¿~?Ç÷™XÑí„<·9õŒ•Jȵ=°]uylÞ;¾œÆ“ŽÜLÀVã§VÖøHwŸ}+›d)ÂX(IbÜÞDôLm,Ð*zš ÎQ¿¶=\ÿ;†_9H:™´±w\¶½NVÖ__¬àåZìô{-Fà%a$ÉÚÞrGT,È{?AdòúNÛá(€9 9×áêÓvÖLcœÓQê’t“¯YOPû6H®zç•Ždœò‘øpð 8ŸýPd˜ÖÅWú0êJVÝ–ô|ë¹tEá#[2½ˆŽIûf_¸Âô0ЋŒ§,ZÖª}&ôÁ³^óÜä -[¥x"†;È4uZ·ÐûÿówúL'ÍÔ51'ô“²Ž³ÎЊô:äFô)ªÀAö(‰–rëø[TtéÍaÇß°{5èð§Òß©sìŠS’êý:\øT -‰œ ˜WÐ6+ZèÐÿOLë7Þ^ÎvF´ð­æ}Ì+­«»ƒÂÇnï_GxsôkEoAAV#¿²ÞDDŸè5"Pæ+Çqmú -ömåqž¾äUG8§Kë6›Æ© -Í£Í|oÁº†WL,èÝa¬’|ë™’†c–öbæö Î*1n -Šý((p-jŸýI.tìVf Ýñb—»tRý€ÕõQ=g.‘›¶h?cv–@¾ýÔ>i?q]Ûº­Û«À!zÜÙBý.â‘åu‚–v8<Ÿ2ð,#‚]ªop'ȶk²!ê Ý÷9U pq°1¹Á L"*– ƒ-< e +R|¨–»ý{–®gw©Rõ¤{: -p¬Œy" F¸ÎU,uÞðçÖMXqœMYÛà.ËÔ;j6.$¯XŸT³ëÓ~\Çú™þ}7¸¦§™œ°åϵ…z¤u¼ý3ö•Î¤*ñzÝOp ½7ÊéHôh‰kÇ%  -äÃZó)ù -‘–“æÂÅÍ´¹mß̉ 6ßÁµŠ©JêîL}GÜ›(ŠÃÚ V,ÿõçâ¨üØ[>b§?Ô8ãYÿÛW¿f®&°mPáñÉJjÛR…•Ôï#T}–ë{ ÁUŽ®Ø|O§'Ê\ã¯Éuêõ°yVbiw :FÞz¨Ñ™ÿ$¤Wøà.wßç<£À×­Ý÷R⃓ ±ú>·ß97ŸùîåÛFSié …ú…ÚHIî¯æúìÎX>xv!ìH3ËùJºÇr;ËÀŠ&WVñ´¾Ùᜇ=H¹MŠîC,éõèãß¿ƒœ’ÄYÜ—4ê8C•«íßC©ñ"äèRªZ¥O½œwl!QãWËO@%¡{½ÆÕR×æ÷–ãö]%œ%Z%jØ`ˆ˜¦FÊh⤗#ÊžYó˃’ß9Ûÿ¥„Ì ™¡ý&9þ3ƒ(ãkºY…êÖ -kÚb€Ö ¬zÆÌf×—A:sñN¾ÇA[“ð¿óý—øÙvdÙ3:·,$FÝϹ ;chù >Ç´w‘äŒRªÉñ¤â« Ò¢}pô×þ‘—t5½³·Íö|V ¥ª†'`éÒ@J'¸q ±JoóH¡¶\CøN®VC~3Í?û °ã`Bw]Jô=µ´Tz(ƒ™¤QY{"U¦Á:ðßùOÿ ÍöãÀÃèöÛ„Gê[;0×bÐ vÿq…eBíâÞ’¢æei¨Auác¬æ9ÿ’.A‚‰V Áà¾7-ðº%‰950" v\ê¯üŒZ‰oR|£ÿ4‹åB–t°Ìœf[ÙÄuà’?Õ›„¥‘?¯”.æÌì¯÷Yô”ëVUGa»Ür9ØLµõÉŠÝ£0Úq¼#pm—©YÀ¡3L¹ÄE ùŒ°klfÏl'ðÞ6X 2/FNé н[Ê-E'ˆÚÒ†¾‰wA‰„`ÁV-U„ mk:ÓãÖš[‡æ&Ýpg'a|GŒˆˆ ²×Z³á²|GKúê…—>ÓʇcE멈; ²fŒ†:ùš1;êJ"*ŽL`×o1œðœƒkÖÝ%úYòÄÇœ[©^PÃJÈ/;ít®a¬m‡’ÛÍäðàHOQ:Žøë{üünç™4([ÙÕoyr-Ì”ˆôed2ßQJ‰g _ó -Ȳ_C¯7ªÎ­í É“-¼ÅJa!þcù(—׈HÞyswáðÙ&_Q6˜­$³ÑËC»Ÿ³þÛŒ9:Ÿº™"²3踂ŽoÌ>ƒ×õ+ª’w,oÖjöoÄøëuº÷}+Z -L{úľ?ÜëÖ×ÖT”,¦÷ëwɱ…1”4­T À…ÿ÷åw}R.tu˜\Ô©( ˆÌÊÉà#ÈnôÒÜNìË )­xiˆŠ¥Â%^›lÙAÕòÍCÁBÕÚñ¾¢´ÞpŠèŠ–û[žœÆ1°óù,Ý+#ÖÖõrÀ ÄJ_f ¸ª™#Ÿ{k[BÃG<ú&<ÍëÞ—º¡Ø(’eçc3Þ•Dð½ðÆ„+qÆÕÌ,,ýÄȳ>%nFöŠ»eTòÊkûQÔ z¼j„Š8Ù´B+}Ë´øœÙ#’ÙpF>JÃwVó)çMö¼âó¹h™Æ„\¿ÙrØgëŽçÕ?"ƒwUgôÞBywG¬üJOøtåŠÜ&Ð4š®¨%]Üw!üT½ŸM q@Âðêû”¾ñYüi•¼¬&)JXe%ÇÜîŠR ­Wx…„Ûûö%xìÜ6½ÑÛ¾3~i­¦¾5›oÚRlHžº}&C”*¿"<s6ù\m°àŠb£qm_‚ Òjô=ûg3²Ã‘ÄÿKdó$`MÛQÛÞ˜Ü#hR²Ø K s7'‹Z³hŠ 0©±ès­hFþåÀ{Ô XšT7Óæ–ŸÉbk1œ%‹}âòø\\9 ‰ø‰.zikß©_Ï£”U -•Žh¦ßï–׆d9¢&÷UaE7šCRìV¥Í#bmvo†ÈR§ÔãŠr»²û‰µ$Ù»é —ôŠ"ÖSr¿"Ÿ æAŒÛŽ+S†¬(=2e´ô»ˆeF0‚fPØV ¢Ø± ]ã«Åu~aˆÜÌ™²ßZ­©ä_ÉÕF¾[  ¨j¾Ìè:ÃéJ¨Zl›T¬5Çyôi÷„µ•ÝÑæìûؼ@³n䙡‘Ízâ¸ö-è‰éÒ¿¶E úä¾cm × ¸ØøÖÛ³‰{qOÈÈí -ç¿…ľzí.¾c‚°*j’ýÊ›n@EÑI ÈÞ3Bxr‚‹¾;£%ÌÚò45„, .ǵG(šÝ)[e=üöÕíJèáÍ:zËýÊ7}!û"Ì“C±çbîÜÕ>ºKUWh?uuÛM·ŸíÂP”•ŽÈvÞñ ;Á;ÍJUGŠº;ñFšžŸ(‚£Ý¨lå"Z¤À+"j‰°pGÈ‚ŽûùÄhArVA#È„;ŽÏ¾×4IWF„ºXaO+ƒoHõ3'Û©þqÞ,ÐéXêâB’ÿ¢¾­ävÍ–>?ÌAžo¢¡ -PQÏ’ÄPùÈC1x¡ÐW† F KÓ‚´C„+§‹ûr9EË“„ßà™#W†Ķœœ¹gÜ­ïwÛ endstream endobj 39 0 obj <>stream - åÑeè7c W¤eð&fá†"¯JÏ!±'°†ë©+¨ÕÜ,U7‘~æ´¥6Ú—!Ât`üõáÞ5vè/©è|r¥ƒî“<ûKÊ¡$Ë[ØJ2xÅì7Îø™ÜÚ²èú$¬[J¹Z(¯!ªŒðõé÷Ñî,­JoP«ü×#éÒñú!QFL‘>ð[œO±ÄSvd?Ê+?õ¯“eò[âay¡ëì ¹ÂàpèLÉ«ÆõÖÀÔPžÍ3µ'8cð¿ÿ=™òå«v~Ø9ïÓ‰|gƒPS!ÎånÙ 1giüDÃðŸÎVEô+è~ñöïjkY¨ -’õ«h6ü®™@dR৿|±áúà[QM‹M"ÂÞºÁ€ûbk÷šeqI5ívȽîÒ-"¬å{¥vrBcBpNHÆíi‹<2ânJ\IdTï D¾bX Ío%¬Ï,–y n®½yMˆw§fØ×ïàÅ1n;é]xÙó îtB_ÓGnArD‹‡³â·í(ê­+q¿á,ý§¡\´EeN}C¬Køåv'׫ìˆpT«!FãÄÖøç,Œ)ˆ Ø÷…š2‡VôÌä£Àý»Ñ5Ö98âk3bOR€sCÖ6ò¤ìM\Û’ÜNÔúD´ ì+וåJb¾"o`¨øhÖœªÅŒfB^ÞZ ¤ñž­ç ”à°áMi›gTQÁÝ·XÕô]Ú£³ý™(¾QÿñÍC -@ù¦ìGð !°ç,OÕVÔ[¥J[á¼6yK8‚tãr1¢ü2=ÕҨͼ19›dÝ ¡µpH†¼†*ø]bCA¶íU‡j}$¬j‡c€RÃýîc'UÌá=¦U#ž™z+I?"ò­Ú£¬ö·ÇnU ]&{@¼›äÎTxP˜ÅÿÏPT`i1ºP¤ žUBÌ+ÖX¢•3XW¬(µ ßU¶„–±AÁ”M -kþ­?¬÷a9‘΢óq{•€D9£6Wzb¸‚!ÓîclVéI³÷Y‰§Ã¢Òpç8·úÉKô½%×îkÄM%¯éµ•BN®ÎkÑ!:èÁ;ö’‹E, íM»=ì¶NyÛ2WmáµGšFx,î…ÖÉ% Œdj0Sq%ž²èêÄŠ'Fûiä!åˆàW¾–ŸÒ ]š™úXFQ1e¶X÷dÕÌNéôH¦®ã!%“u!¨Œ¸Óqš½Æ¬ ¸óü ”¼~ÆŒ%ŒÏeìÞ -)RèH öÎÝBûÞS’äJ,I#w’KùÜD§í•B8¶²M>|¿£¥²¶~Þ\›CLk}‰7`€Ü0©˜¸g†ÔðFJ*¦Fo>ÔÍÈ4ÊìwÆÌqX7È¿ÅûÓlŽTxyù¦üϵ3Ý7_ê¡®UNÌ×4Î3­ÐW>8¾8¶½m¶–¦v©ãWúeÚ˜ø°ÂoñÓAb¥.ëšN0ýÐõ·‹ª+ƒÖŠÔ™ m¡¶wmY2G-øIf9’tp9Á^Í$²¤¶ ®õÔ”IùÆ¡6sÙ†˜#Yº¸ˆž¯s:;u¯øŒ™´¾¨iÓÐÆAÒ?çüâZ‚kú¬!Ä4@@¸"ÊIp[U,”“Ð0®-\Ç}Äi„øÖšZô£”ÃœSSÈÑÕG²< -R¢åû|ÙAS9+9uKˆšÔ¦c=$03Æ$ëÖ¾6å\OôxJýpõ‰m§!¿ %þÛ~"FiO‰¹ì=€Óx V^Fl›y4ÙH€ z¥Æð«ÆíO€²¿'ޞؗ«‘1Ž­ª#û¾"ù§ àiQ©>@OžÐþÉšÃЈa'ÆmmF³ÑáPCNÕ¿îú½(Ä®®ƒýÇ}©PÆ®¹ì–´ˆ&ý¯N±ƒ‚´`Yú‘&UVm»ÿE¿W ó*”‹ ÐDLi*¦þIíãøIíãø×èùxÊ®‘kUL<ãQûÐœ(ù¹Dfž¡ e!V^¿\ûUn¤w˜:­ª2áQ*Ò@º¶+竉·^ô“ZW+D%öjeFþÊû‘×Ô“ ¨*Ý€Ö£ÏôÉ.Ý]*ž™Qo[×ék¹¨ƒp²O¶¹]YÔ3&Gºãny¥çlÙˆ)ÎXPÅ"e}·2mñ¸’pgyCPα3éÀi8®oª¯òq±²ÝãÔ[fòåð™˜9uË/ùš#ú;}-Š+êÝܛϲúô]V²qoVÒ;¼¢ñýH!y7hÒÜŒ–Eívy»± $¹µþÒŸœ5†¨lzsOy÷Ö‹é-›ÅäøŠ½ ²Lòt°?LY‹¶4F[Øšßé²ñ]ÒÍ8³ K+uâóPQÙR>»S#oô‚U&ëØ?oÏé3•X¶߈RS<ï(¬ -ž t‘œ@‘½kˆ—9ãûT3èÊiƒ‰üžû*ú؈P&¿b -{¿‰2ä'Óyw¡yq½ oµ×·|:,§î¬BELï8ú=û©¸-vµqî"ÇøN‹Ç93==¤“¢Ân¦­Ó§1 ²Ä&ðÉ|åU -&üSѽÓž5󬃪 ¡ùɱ¨ªß¬ä’4Þ](£ A\ñŒ}GˆèÀí]»ÂÅ”†Æ¥^ܾTÐyFUb)¤Â Ç~øÜBI$ qè“3r,á©tÝÑ"L ìÒw`—1ä´–œc¤¨˜»ÑÀQÅG*T¨…÷øô¦´d·VÇøž§ny©^±¢‰ I½›8{ÇàÉ-ÃÏZFV“öØm½zª“YF)ÉE†AzÚhUÄf’Êš"l5<†˜Þö+]¸¨·f¨7÷ØNîY/øŽú9ªåSkrŒZדz¦!PÄï~'uxu<Íb>ª£¶k»;”Ç´ÿI|}a|,I Ø7n©ÕOÛòM\d›ÕbÜsE˜¡)ï]`oSîëe¢œºÅŠoŸà‚ìI±ÐîcKúûcê›:œqLôµÇ’Öí£*F· -`Ð^k±À7»â¯_ -æ~˘-ËQ×?òëÏ/–ãŸÛäzý²ãsd²¤ °^žYŽÚÄ´^ŽíøÎ@éHXuV¬E|œ½^A—ö>µìgÜŸ­u06‚÷µ7+ågv?ë É. &99÷~v#¨²ÇàR^µvÈ(1¶½ãa¹D™j<µ¥hnö2¼|«Ä Ój™ññ;Ã-²'¬—àî>¢0PW"™Ñ?`!»FÕ½Z)+¬™M˜dô÷3þÔW¢%”Ú1¦3q¯löG9bÐÄaÞ“!+¨ÒbP…ë{ˆ†Þ³åSp$H8@”ó._/ {¢’s4çqFûï§Z»4K”÷ë³bYƒ®¯•z{f&@ÝŒÇ(6pÔóµÕáãïr*5¦+ˆêc!ôöó_~¶îNCc%feÈ«­¡4x®ŠÃÚ«rÁÄ¡×Þ¬w•Cýì5¡Ð„ ¦š˜ñ»0ŽøšæEÈ•¶èÒ#u8£½ø¦|VyYäÝ–èoäpé{-”¬sÆ‚R†²ÌZò ªòUyççŸ ~^[yÎϨ4Óê1ªAïoXääØ€„œFª‘OÝŒ<_oé °+swe"Ç´cO(zE±Þ;{ [ !þ•RøÜ“)Õ|:†ïÉx RJz×ç Ú¨C{òòq7MÕV(CÞè(a•¼Ùæâ×Òû¿ëR·¢oÀ)gšjhoöü³»‡§Í—cýÈ| -•ÚÚ¢…ôîxKD3öŽz9" |Zû4 ÃÄ‚9ÎýTÁ õÖ£2‡ÌÂ-VæY'yŸÐnD¤™ÊОÝßšR|IÜ@$ô’|ñE/¾–µù¸ÒE2·Ç2A*pfNýäô#»NõoÇtæÔ5S'or•2 H€©ý¨Ì@&Îȉ⠲ûö(RélÄt6ÉêYÔè.À Òœ1 -¤8ž*E‹F`ú"Ƕµ¥žIiÃM›ë„¢þ‡± ª(-Ê R´mHæR´IKã"@}‡qq’¡Î•èS…VþåQ.[ÿ¾b$A —å]_è¼QYxϤ³q€z²Ýdí<©×T¦;^µrªÂW?¢!M•JiìHf 3£µFdvÔí„¿š0¥ÅWB†Ûû ,(¥Å¯Œ{È*}) -Íš#G$ѼšÛ|ô úQ5U*‹–öF°ÁKïäYô&f@ 3¡Ú(°5R&Z%—¢µkoxÑ×þÅ”Fèò É“FÇ,Öú`Á6©º[}kÁ_ìiÒŠ¡M‡3YC\&ZÖ—uroïpµß7{H,ïØ‚%…ËÁ¶¶¯¬H:àGÂÜÃõ"â¦Î¼Þ`ô\Ü1m;¯f¨Ú=Š*éóî‡rÒQƒhÛ—ZÁÈù`ÊfÊÍöP€îu©† Ãv%éŽåëŒu×Íh–ƒÏ—»o¹®|ç¾®;–[˜3¶`{I=…ïHÁ×cÓÄ™E_ÑîÂ-89YF(X_1#{ñ>>Šâõ~ÞЗuwƒ¤˜¼'™ž° 0µ´!¾&›}£“— à gĈoÆ[vPFhDÀoŽ'LÀ¼:ÔD¯°ìêýÐ,“­úYJÒÇ~r*Å+Sìp]ÁØgJ}ª³£*siŠ÷¨ÎDŠOD`=d@0_ºMÑ/"A(7ÁƒÉ˜¾ïsôžèoD˜xÆ™V/ä« ð*?ÒÍ ïgFS™/b8°Dãê=ùË¿b ï_ŒùÈI‹!Ðìg}×À«ì<˵天Î!ŠE‚XO£©V57õV^ˆŽñ©ÿ“õ‡ò%¾ ß>“Pà‰ñÕž¬Ø"êi(l>U’2$š=à¡V :±Ê£é¸ð‹ï‡þ=Rhlijà7zªÃvÝ=šûx?kBÛYÊ¥«bLÚÜ!D|•D?úþ{PÔ×s¤aw,zÞè\Ôƒ}ž€>âö÷(ÐüÚöÔ½x¬Ýé¸ø (Ý8Éx¤ö-ÇÿÏ=$½ôB¸ºÄYçJµêJ/*7»îk¹ç¾NJ×âh‚o=4A8_›WT9‰\<_¡Y»^®ú¢ö -4I 9uÒjMȈ ¿×ç¨ÆeGdÏÁœ Íù…M×(úá@ÂPWŠtw -*Ço1"I¾ æç¬Ù†aÐ çØPäõÁ”U×­TtP#Ó˜!eûŠ¶5"Óg}à†PúxŽzà×3"’wíç´f¬„YbÆ6¥›Ì)öÕÈñy:í85‚Qû–{‰;,Í+ Œ| …òo,¥ÿ A`I°_x­{ì©Ï³p(^ã’®ýŠ7Ö-± -éA‰ë‰îS\™ -ν툜hÍ8®^DHçþk!‚\>…þÅBìâ·‹bÄ{&¬Zwƒy{‡$õzÑaÀåûÙÇe6r5ANyIÕ¯µšáØ»‚Ër-…Úù*?½éõyÎ7¥O4APp£[ÉÐ;$(œ›êfN†°x…µäHFou~]áõLd™ûªþÔrÚU­üzÅ­¸­\:_ ÖÉOlÜrÔ¢'¤€b:g;NH¤+ôú*-4|ésÕ¦ÊiÌn—l¢ -ÌÆÚÔ_~ìuÂÀÝÄ’°6†®ÐY”;ënÆþU„¦tZ…·¿Ÿ8…"ÕGøS5= ¿‰¿i'ÙÖÀñØ\)´ºz(ñ:å­Ö7¤d‡Á] œA"Å™}щ¹F Åï\¥æsŒˆªÉ{gÙƒuZ(…é"ùmr?å|s9²xqäƒcB?b6pžÀ¬KųMâ[uŒq¤zx)]ƒ&ڡٙDŽt€^üfEÝ©qFDmÍ)CÒŽ &û‰IûÆÏîy>ˆIÅ[á –Ž—üÆ5 W¶ÀºÎ,šÁ Gl…Ú#‹6Ç( œÉ?™ÖJºRùT£û+Ž)Ajm¸ÑCõÍï¶Až”B/ÊYågÈA*ðËþ÷³âÓ²`ß²æꙢÃWDmoËæŠt¤Ÿ53ÁöWþqÕ²Xoï`ùCñ͆ÅÉܲ‰|Û\º¾ò,Êû…Ì](ÄMÉ·Ã;¼Lúõv -pïqnÿ -CÒGgü\S÷ÑýÖSþLÝf‚ûkAp¯@²±'z„ãÍË´“"S ì ÁÆ‹B‘7?Nª kzAŸÒ*ù Ííp"N~äó€É'X2G‡Z™á 5`ã \Š@ÊÚ.i\®³_­“äˆ1”K~pM§z˜©»N©Ž© {ФŒr†áÓEŒFV¬‹ Z—‚x׺1;2ÄG™P^gÝðµãq’5O±¯‰hðz³ô(« r ð0>Pú2ccÑGZ ´UØ¢žy¨rì©b}„8köÒ>Øëq-$½x+ÛïÀÛ<)p[_ö,!áßÓjåàHá@ë Àpª_zùÓ—9A´KîÉ÷kc±Là`Ø× ÏD®ÔGmgøtT2g3d=fŠ—#j¿¿ŸèÿΘϣvÈÖ@GY0ªuˆAäú3_YµÏÅž˜O¬Ã÷ÐIÏzªµû†#Òù3"}†˜Í0J +/ðYÝq´ î»Å¢mûJï ¿öY10šX "Ák™—ÿöAì"Y£:fÕ ‘f…³è4úÎgHŠà¤£† }w‚=íù`Pq'§WrÅ_¾awÛ, ­æ;㫊âÀnÑ5;38¥ôó¤D”!3n#G†_ _H¾è%˜cоý¾N†7a+¬uðPЫ4a¡ÔYy«/ôgϧ î’¡çÞâù§U¢ ~¨VC¾ä ܯ3{ÀµÁÓåU3¿Žù9ÙàÏ;hÀ¸öî ÎeP—ñ„ìOË3ŸÖRŸ7¶ž†€pL|Ký_jçïwìè¯às'¶;ïˆÍ‡/…;¸ ÞZ6)›Æ²V7zbˆ¿qŽà`þ5;øyÌÿÍ|Rý×;K—¥zòc^c£Ë©’ÚÀS¹â]/A…ó„ŽÊ~÷ærÔ’÷»uøS:”9É×b¢ÆÔý 0»ŽpÅkêÊÌføTÛÙ‘¥U§Œ¿ã%wÿØoIg¤‹ŽsoëU‡®úµ=Û´ôÓÚn[­u(åb†ïx@·AmôB¢œÑbv]Ÿ ç’¿ ÌÕÁ $†E€@ôî'£ñž+­ãîÅåØq%ËyJìA×ïN(ÙUç+Á«Ôñ¶ÉÈ)´Ä÷¨49±‰Û§~¯œÎ€cœkz‹ÇE:%|@Ó¼ v¥Xð`”Nxè¥n©Þ¹’F1ì‚¢­ÐLHé ¿„¶Ôgå±³:Q4KH÷)\V÷gdæžÑŽ±¡Apú5†t¯x¼_{ˆô¶æò~iµ"Ðæý|F׈òÇüëíµd=¤<æ4”w˜8jv”eÕôzø·žìê,C¸0*~Fú3ͳðu[4“ÒW‰E&ýbªmêÁEv—((ð×M»ë/û+p°4*5kœÐJâO‰·¬KñóæH8‹2‰xÀ(O8ß÷Óúe}‰ŽÁfȸ¢"Ê~CBwÊ¡“OšÙ$}è…P €Ü;Òdç†*”,ê£.õàçÀœ'É6‚”$¼]/µ)àÆ) b·íyþ?/©Oj™Ìi=?Ï®0¯¨º›^6å¤ö•3ž(pGè%ÖaE*ݲþQ#þWúÜígôú÷V>á¡ÆA?±m8…PŠqȵ—õ¿}3‘¸ö¤Ó=G“W*Ýïþ"Òˆ=?ïž1Ìû¤ÿçÝ¿°@ÓæÐC­Ç7R»“ÇÙ×ÔÂz YI.ïÿ8t_qäùrlÛFè«|¿Úg‡‚ñI˦¾`Q#ÐâúÙ.Mzm÷ÿ -,.*{&Éo69~Q½ „oX}Wà©´e#I«–mcDÀ¯gmP·üàŠÍq5~EŽÊ¤¡QùÄkCÆÝã¤[ óP$Rƒ@¿ ó38׌¨Ñ+¨ÌÚçîÈ-#VìÝ’>ž“Fêúï·ÂÿþN±¦u^âL0f>²k¬íH1!:£#;tÆLâúRF' -HÕà)1ÁÀŽ£?×°ÿÆI({–wÆæøŒܧl¨3ÇAèôkž?à‚AûËJ¥þÙ9+ºcgþ’bú#p.ÜëIHg<ìô>˜ËV.RËb5Ú ‰8Äžc:àå=A7Òe™c{È:Ê$´xcÛÙOõ|ÀÏ”1 ÍkEó £àÐ}ä# -kƒœ횤+ AÛŒúÝ^4¢/$ÇcD¡$¢ê±úý'ÊZ'eA¸@ЫÏü ¡†¤OÁE# -Sw”ëv†`;#MÄÞê¿þa¥KÓò Ð;^kÓôÂþÎÛ×7%ä˜Ú/Ä'Ù^ Ú˜˜ßÂlÅÂô¡Ô~›#½å…|•§:2™FÔ¬Â.ñnN¡SåÍ.°ŒJZIÍ»lد²~±xœ,ï®Á[W3²ûÙœ4S\’,+þJ,göø¿=îM””žRP0Eƒ”‘€ù¾1‹7XNQ•K®0šZ¿ëúûàà~X7ßùË¿HUø­'ë_JU ®çД‡S¢ ë=Jí˜#“±Ä%² —ZZdX ûC%|¾«ë Üm$%âªóµµ£ Âe[ÛÊJÀL¸9(ñD¯)ºƒüùÄ®!=Ý 'n÷–— -ü‹âãS)×Znú·p(eEÙ§²Ãù¦¡¼~&.áö|4”샣fÚý-âG/ñ™§î÷Îw5%%dÇäA¼ÏŽ±E)„1Ï0úü€f˜åù¹#ÛóÆm»ÝQ¼E̸Ãþ>Þ¿„üu—ßöùðê=lÓWü«iü¬ÓÒ¢d½¦<…>`3ÔôßhQÑ…Q“V_5VŠ˜¡m+¥ékO)¿m-†=JjsŠ7h*¯Z}bLŒÞúâôšRvt\išHwü¥êdªŠÌiw<ªÍ“c -ƒŠ§íW”–Ö«“ίuüÌhÝ\‘×{ËÏÉæT¤Û;¹]‚9/íú«~`Xáàð­.b°óSÊÕ{Óšð‹4‹vÕ­ùÄÆmpÊëçWнU¶x™§£O´ªg G›*)!tÔ™ÆH ã7¯îÀW]«êÑi{ a¦/fŽŽý•^á­à¨?¢ç¡»J&°¾õš+‡Cïjã#ÎE†vm'fd¦ð#v^_  P)üœ‚€òšƒ™zVwóKqRÉÛ[‹8•Ï~•nÙšÀñ^8ãÜÝ8‰"ÜÂ}–rù›œ{ϼº¨‘q:„ËÏz"„§?Ì3;Ïnó{JŒ 1BÙÂ’óÓñ„Ü¡cÖƆz9‘\1Sëéx¸úÒ,Nò:å‹@›Ÿ;(Ž'Æ­áéÏrúÜlÖ$yôã¢"uÊK³èÈzU8 -R‚áç¬mÍâ|ó‹;d„†&Åêõ;#‰pÅq&°¸:d% âlS‘Ä3Ø4"EoÓ²lÆHç?Ûãùj49ðŸ¶÷>™jR2ˆڬgŠÚ¸j*Cqí‚HE-:Ò[§G_s*¿¯ÛKþýÉðÿTuÒ7Ãí [ž´!]ÀRdgˆAB‡dnXçî.гbƒ[ü{/òˆ-òßû•ó‡úRN°ç(ŒÄd68„57U0¼J¤¯œ`âð‘Ò»;kæ¡Êá¤û¶6ûà(ê8K7¿Îž*Ø -”‰¾u¬@E–Wñ|kÞ¥eáHÃcZË [Dè×ûo%7†€„kFëS¬Ú¸°ÑA¶íK98猛iÛ'Ø«9£ÕsO*,Χ(fjÝqz>E™¿÷³処W† àZP[BJ×åF€P:.áÂô{âYNø ¥€ -Zn9ØÂûŒÃg½bf ÉE•s½Re᫧†—mn…¾m¿?¦×mm@~_%J˜ -Ôd¶üÌ ÀAF÷˜[·ʈҸx¬ƒh—:~q;"Œšü­êÔ1P{ƒB2s<³¶;³ÑcþКœ=–ã9t™•­õDýèëóïƒ$Ùï¿òZ%ýú#B¥fÐmþ -ôÈ×õ½z¦ªš Ô¤Ä3Õ²²ú¡/÷Ô½#*Q'מº‚$r1yõwÀ0"Ý«ò>€'¯k™g‚¯(pŒø¾>÷Þꞈ8B8ûÊ@wú¶a~&£ËBjYÁG±ª©˜tTì噌ñµ°?cà}@h Ï=¹g\6îðëJ>vÌU¿m+^ûñ].MzÔŒÁh@žï'¢L>ùòxóP’'Ró#ð葦¤³6P_ï[ë±æÓOb:+"öÆ}ž×Q%{:¢+ü;h›f²<) EZ§êB?•xþF±IGšøôÿÏ]²ëñ³þÔ$ST¹¨wEÃV Ž c´º[·ÐÕ M»† -Y~¼°í-È#[Ëà"Çø–þσ¡¢«²©/Π—’b/ -íZ§ -ü kJÀýŠª+.Œ:óù6¬Ê~ûhÅ®Z_GDKœˆ ÜâC œAPâ“€û©U¡_­°!—’J‚#yW‘Çm)ÁÜgêrr •ðCèÉaO1ofhÿ{ÇLmxÉžÁy=Eáa268)»¦ä•äˆnœ† &ãçì@¾ŸªɈýr𣠜ù(¡àõÍ4PÛ\ =l\I(Åà=ˆ¾è¼C?Óß8Eêtí¯¼>—ºa†Ÿ#úåº3þ6¸wß »O´‡08«B„ܺ™O•nG\Ù@;Z”ÒdSF¿ámw¿€¯–8'tÅ¥Pò¹5Ô0 ½npg?…îI¢=ˆzt"]ðÿø  `¨×ç[ª#syÅ>”~ûëÀZ÷óÞmwdŠôûôØ üÇ? ¢\.¸{ÊaÀ JmÐlº~ÿùèUX>p9'Îæ,¢#<-ÛgÍÑ–*6>ݾXK²ùõõ’{aÕÕ©ëWÍ®(´ã¬E`½Z2OVÕMBf;ñë(`˜JJcÞj‰ãÄuôQ† YbÂê¹õWÂYÊT '™áFô:ç„kŠb 9¸‘ªÛs^ЇMø€;d‚¦šœÂdþÔ OSó%„âgÁ\†u÷àh¥u¾mXnuËž”ôNíK4¸Užïá“”ÅÌ>s%}›ö[fúdC¬ÇB.—×*³$£¹w=8^“(Qjwªn—I|ÃÔÕ”úõ±Gb¼¼; -¨J/UB3´dŸ® &U’59ÖµA]óP°°úz‘è^ÛãÄíE ÛX¼Q¦VäóFìu–j -œ7F0õ Òeçð–lDr0â7}‚Ží8üñ # 5blWúRÎ_õ…Š 9¡GesWÊŒÐB¿SĪKMÄœ¤'ëQ7 H^x(”öÞç“ °ü±OÑÛvIß8^v?‹õ˜">G»œ·J'¸M¢ÊÊVt2Ó_‘g’!‚Ç^Ùiç¿igý«·FZ^‹»R=ûƱNÁlÉYDÏâ¾7¯Š;l{q¶u»€ù¹y"3×ÙuÛ|•Aª¼½a ˜,î—wÅ|VŒY‡6À*Ë&Û=¹¦Ž£šö½c=“‰(^+µþT›Ì–ìÂú·¡ƒï> zºcQ´ Â'ô*\ÄM²_4>~®Å~ã¨þ3ƒÜzÏá‰yJË[¤iŠú-ŸÐo÷}ŽÊH¥V_X¤ôÌ6®Žà3OAŒ¸_;ÌÁjŽpÙ»ÅÝ”ƒÅklâŽi>?W ÁÉÿÕ#£·Õ¸‡ -BQ¯›ÁÄúòœž!m EÁg‹„DMA,Ä“Š—"!ù„ ôÔ¨.9<Å·#*âS¶T‘AETüÖ’,ru7äo8̱éÚ/õÖSøùfHõ¿fCÊxæ<ƒEÚÈ î°‘Ì_'ºé´†k7 ]áÀŒv¯È­Õä5P˜™0¶&Z„˜*òÓÐ$øqýù•Võ>ƒ D"–*]¨³·þ.=æÇ.v%ò[{£ÜÙºåt¹ÂHw ì5±åª; -…Øàá¹Pd¡´&¯Vîfð&|]É·œmRxÃ=¬{Á½Èk‰W÷¿ :ˆRžø¥T°·7Ìæ7™{fHÚøsïó Hõj_ÿ•;æP‰o7ÒÒ€9”Þ¦îíÕ*(Ö\¤u<Úg™¡~çc~ã{ïi)¼‰p”@×GüdZ‚íê"Æ͹ü×Obªë¾üÓùÿ[u®ÿtÀO¥ˆop€ÿÌ ã‰kd?kOPèQ´i -ÝX%×a;˜‹Aˆ5'Ú|úø Dq§Ùÿ×q¦)Û[ÚÅ”°È—vfs!îMW4¿4]Žˆ\乘ör¥8¾¡îDÂj,Â/¾G[àCuC¬%½—Ųµø{pãÕIo^ÉbËŠÔ_ž±»m%Y§Ñ3ChD?òëvl›&N;Öä $)žP§~ Ó–¶q'4—iw]åP•œÑب] É/Ÿ`NÝŒ5ˆ ¯X8Ø—ªtÚž½ñÑœ¥þøV&’EŒ•!±ã>ÕRÒK=oäíE5¯GI_Ó­öØ—{¨]¿À."½–ëg;¿ #o‡b|<ÊAIÏv.õˆFîMp´.å'ÔBü“5$¼rB0jA*î5÷!ÅÿòŒ°FZ‘ë©F\1pû½¿Ñž–<¡äIìÞ(x'‹9/óÝßõ’ÞHû{€ªk<[‚k†åÌyïÑãhAØj-ù:Â9T=´BÏ{0åM;N³¢•™õÔGß_(¸|è®S^@Aÿá…Ôe__+Î0w․ï7UÒô`oø–#„Ù5D¬1 -m Ïî¬DìÅ…(‰Éý>Û>aÓK Ëü0Õê AÞ@½ŸxxquSÃôNÄFõ÷‡dPí-²5#±Aa¾PdTô\Áʘ÷¦m½¾X1^w9wNÕëŒÍÖ%|lÄÑšþ!ÝKMÀ€ qA$¢:©òÉÃTÐvî îLÍ%3§Ÿ:0ÏšZžÓUÓŠ½øÕF¼‚ï–ƾ¶`VÕ:^^7gtUW:µ3Ú2šQgÙ„€éÊu¾”!¢*°ãsÜMë…¡ƒõÉ€¤£CŽE±SÓøø@¨Þ7’Ã6–•3´¨•œÑ—ê[<²iþ?½¦iО°¾Î‚$Enrýó5O -®!CA³o¾µÚ5Yg‹·g_þïÆ$½ÒÃ^¿Âúo£\5dÎ ;ÙΕ·74OÕk]ÿ·D®@ÓȯÏp{¤êj®¯"Êåëß¡/˜K¤ÞnÅœíÚ؈)ðV.Ú¢âÊypmWǽ½(Ï'S¼—w·E»PÍ/§&S‚ØGw—ºÓê” D¥¯+B§þ>¹ÜB^±“ð®€áŽzŽ‰9½cr{ðH¤Ak~ (Ò™" Þb¿8 e ímÏS¿¤G¥lm±ô‡LᎰyªÉ žÊSò¤àÍ£;gÀ=!/‰7ÀLR ÄÃK~kyˆ1âôÅûÁ°GM’V-šøâ©Þ®7ºÎ™Uw){Çü ,=–¯0Þ„~tÐè8<õbˆârÊG³W4âCôöÐÓfmqê}a¶Ö+Îv=©¨Ÿm4EXf“ÝÆi[‚!æq„]q‡Fghú–.•*dDê5½¢‘2=yØç¯^â‘–Z¡˜3µ:l{*É#0htù•@À„¡¡V‚‹=á´Ž;q~:GºcCovl{:C…v}6õN˜:\8Û±MýžzþÔ÷Ñp¤av€†ûRo”a…\A¡Þ1NfmwèÍ|Õ[i앯:òh8#or²}Ø'¶”÷êà ¤Oëh‡™©;rÆÕšÜÊf‚NWPÁ!Mñ†dRÅ8J5§ª…ên°w9 -Ä7co&*7gÖþ~Òl•£VLäzÊlÈ×ï— pEôçã–z”¢¶h†¨tØ@ÕHêݼ© ¡ŽåR”ë䘴ª ëJÃn­M„mrP´î -ïž´{@‘; ¸Ï(ëœÛðÓžCs½;¬!ÐÉ[ÛÇÞ[(·Q/ÁŒ?ÒX_sŒŠK}x~C¯$ý¤g.†4Á<ð=.±Ç[¾«ôñ{Z=LäwDŸ¯M0£]Î"P#¬rGæë“S'e€ª Ìð*€°k¡P†ÒBï*o9,&Ó×·Nª“œÄðæZËàSn7IÂŽ¶³yNW§ÓúY#îX¦£0G -H )<ó÷ÚEá•2xÒ0 ŽÊ;d:W©K½¤­’e]i eÈ„FgBK8‰ÖœÁ˜VT{Ä¡ëE´Žÿ·lYü±íTT«=º®Â~(?£‹2LðìIÅD€¨ÁÖ¢á©E'}úž•\Ñ -‚“ ¼§‰µ>V ÂŒÜ7«2ÖAÍ%® Ì†H³Š?Žµ1}~Ãô{bÑ’:ÒdµS<Ñ!^“äìÕiàß27,˜DšªúÛ:‚áYÒZçÊ)äTö¿—é#}¸ò>iŒ ¹5Öv9îÌ™5£ü]c€6™R¿‘ðé¢ÆÞŠn—.S—ÇôÒê2cí餡ÀÉBùZ7ó°Ì…i/wµª1ö(Z‘OL<îd¤(ØT^ZŠƒˆ˜Ò‰z7 µÊ™YÞ¡\¬›#U—¶Ìyçý‰ac2ñ*Î -äÂ1$€ k-£3ô–RÄÒæÞ6V+7S„´V¢Reîw‡üêÅB]H{‡py…<Ø[M U¬dxÓ„Æ<‘TªÛö¸»Tì'IüøÞ¯$þ†HÞhÁdó\Qô Þú9“RÅ€‡ŒBù±Å-åN}ój…Æïf¼üÇvMòŠÊÙˆ3!­<-ö¥U¨´ZzDÅ!L@¡ætJÔhíůTgŒ´$5«#†Ò¢Áµ9ÁD[š-L"ÂDÏ¿ ‹7÷"™Š²¯k4(3’Nœ¤ž_ýEùþ§áïÿòÿþÿhúNˆlê–•0¶™èòbÍ©¸Âõ‘çvy+Äc­º>"x½ÄëkM¬µ’¡YÞ­Gž—»"äÒâ5~ðíùß•ëL×¢qîœOœ:fü÷bŽ¶¥s#}9—Z1XÐÿ…Ž<Ñì¢IlWò ï|‡K3£†@]É­Öaœèw z”fÌkçæ4‰‚±Ç›~—çnð”Ñ@I­!ì¾ba+´ Q‚©ÞCZ0 N]X¦Š/Q3ÞÄ1 ;›\èz&î2k;Ñ‘Û¦¸ôíÖ¥ŠBd~ Ñ`ˆåè·¡I“äq¢‡ˆÐ®€¼¢ânQ}UDH=UàÐԱ堤â1ógÈM-ƒZ'ªFžêÙ¡¿^[?cR}„7#“=åèƒú[†4}¨×y9îdz>›_­¤’îÊ?@’¨#'7¥ù*\î‰)Bc÷äy…{¹óÎcß-‹gö áCÅã¨2¡˜P÷†ââ²jCiÂ]+0à¬îV‡¥îªD°~ñ£Üç×BHùd–ãКÀ+8·¤b*¡œ®–ø1H§âÒµb -þì#n"¤pV¿ðGáµA[}ù”î'¢é;66<]¬†µ2ÍYƒqïÌUÙShÔŸJëïH -ÝxNoi+ -Âß3j'_¦'65 ‘˜äpM{…S×y5®bXn½Ê÷Z·m†çXžéŠ!ÝÜ]À¿”ÃuÖËŠºÓœàkbÍèãcÊ -×c‹ËN+û ε¾eN* ›²¸kõFÁ o½f «Õl`_ÖwÜã ©#V¦Â)ïMai…,Ì1@Óêw±–ŽÓü}Ò#±™t\BeàÍ~Òá^z¥lkÒ‡zÒ6½ïÝÖEä:¢}õÚ‘DsÔBÔIÎÏò¶ý œ0w­½/ÞÍë4'Áô•¦¢ÏBe`ÿZþü5ø—geï{4Ö˜¼ÌZº¿?¥¶èÁÂßʳ€‘Ù:a¢À9CÑÿÕ_Ž8š$1wÌbæO6¢¿ÿ‹†9äc#λö·*j«ºS”ƒ5ÌÏÕ>€µÇfU±0hä!: #9#HéÏÂÉÄ%,ÃïÕBTNiòÒî죶`Ø–ÂÁ¡½B€x½Ì2¡!2ƒ»›»¿Wøó;äBe¯<Ün漃žYaf¬ŠI=æ;ö½[Á-ßÑ©QñDQ™r¾×bÓuŘ¹°kÿ#±ß+‰•gœ¤EO Ô)àèôNßôŽ×tn©6:ÕfrL#BŒÔ´ë3ÈUIí&Ìtð„Ê}}O]¹ÃÆkëR*ò'…¨'ÅյϬÃk]¿¯oÌŠ,g é†D{KãSX!âƯ©¿ÄÛ¬Š X–Ÿ©ÑÕ¡Þhzþûý_Ä‚¿?NþÜUBœ\cêh‹×Y<:«3ÇÉ›ò…¸$Œè5§ ‹n)“ò™ãäaàè{sì@/ïaôdëà&4¥É¬UKøÙ¬{Ì´®œéiˆ®e¿a°´|‡s'ωÐy’*¼ã·‘No°ö©°‰N¤$I½p#6; æ;¾'#WRϧVbÝ1È:%Š—“ÁýZ0@Ù½ÓJ¿y˜réª E­ч¯Ô+¬ƒk´dZ5•Wm¡§ßnEJ9{Ì_ÎTeSùPÓŸ©\О 9œÉš z€%`£ee$â$Bœ‚Òð]œ&ß’g¥¯©È¯P•Œ…áLZaN/øäRŠèŽi0±‡•õüŽü(Ä­âŠ1ØTô_±z*щÈ8ý4ÄSQ¨:Ø°›øKgSó-¸p!åä8/e^#­¦±¨X -Õñ½À)ç &Z€‰^»>—NG§ò]þ‚ÞÂ#‹·Í–Ñ9S8¦W«vüõÖYô5#×L-aGdjçïz&ú¹Ö™ªÛŽ!Wö§ 'ŒdÁ¼KÙ:q1F1%¦gÄ[œißÚí„?ˆVLȃÆ”QnæÎef¸Põ¡(|ÀÉˈ­Å büÆå:{Y @yOÀó_ÿù¡?\En¸kVdŽ°ä:eùO`LÄŽðIª/¬¦£ªaù9úñ•€Š:øyÄü‚9A-ÖþÌ‘ܸ[Èâ ›Ÿxi- ¶Ÿ ›¿ó—,¥ß΄ZJaÏjU׌RM“–©§´RèªY:SØجYéUH{ ¨zS¯3ËüfÛg¥$Yû!˜ú X"¹N%ä ø3+‰†Y j»t”XM‰GÚšé×={*œ1®çYQAILï®rœ[ÕD‰_”›C QÐÏ’Ü/Áµëw—Fœh"‡cï ÈÊüwzɾå\£µ±VÂÑk'Â÷å–¥Ê:îüÎZûGüÒãûìnþƒÀzˆ‘ù•G¡3@µ^CÖe›ð%z•ª›<æUm?ùgåU7| 4ó 8û~{r=³ÆÔÌ!Ðiv·¯Ýº¾¯X+â?r¡È®ˆ£í_ÖRx*Ð!–q3vèI™}&þ<—’{ K’„ã¸~Pßh’™Ü\5TìoJ“þbÈùÿ3w'Ù’ëX–`Gs Ø_$@‚d;»ÙÎ x7t>ûÄ>¢ßTŸ~ÙIópµì”ÂÅ-Nû¡û×j‹Šƒã¤ãã´²pÿ;ÆaaKâì=íà”˜Bö:÷2HÛRM^œÅßaÊZ'¸E„±öñÙ¦ZÒ+u⟧Æþ&È{o¥=Nß’¶§ŠyÑùMyòI%>LŽ?H¯|û†Û2ˆè8.µ–ž‡n¤¡;ßEO" Õi˜šëI­®–‚ù¾êß²´`(S­þŒ!°[à˜†äºÕ]õíà&ûõ93 - Ü#v] -2=#äPJ¢ý[5ô‹7.Q’ê¶=Ƨ0{¢µPhë›qÇi@Üûá=‡sX[¢`&óŸÄv¤Áœf:5= Ôx)Wÿ.é «AШËÐ’f›¤„pb´ÒÎ+´@‹=¨Ø]ˆ–ï¿Ó -Æ.&\ô-¤ʯ{¸_»2MDä>)DÀöɲP·ž)õªè>_¨³ßÿ%ˆoR?jP|õ×q Ædg¯êû©àýDËäUh­®î‡2Õ·¨/±dÓV¨šVÊY1Njâ3§àŒ²«‹'ÖÝ¥o·ëi}„ú*® xŠ:ű5ÏØA‘_°Ý$×¼ã¼Õžæþé5bS¸‡ÏyŠÉü«VÇ'ÉRfC>ˆšeú9Ñœ‹$ik%÷¤:g·¶ÄÁØ9bR¿|eaÇnoé=íÍh¶!´ÔúÒPßG…zY_ÀIôC>u Æ=PÕg­ µ¶Ú”гÀ6_”ŽdŠ? UaãIXêçYal¤9H_öS6lŽ]ÄüEÝÁˆ3èÛ¶g†~)-1ËGT-ß•qT®Š -K`‰‹$¯¯;±^t¼[FKQÝ&:¤ntèý®îôg?(XþGiX öl"geÂWÖ -Á=Íן­ÝF´W®³6Üã/†FsÜæàFn[‰ô*‡½èñÏ6c«!`¢ -¦[cÖ^ŠÓ®Þ™:wwF$;¾wlñFŒWÒ°2gMYˆ¹Ýä`!ŠQƒSHÇ2, §9B!}A%&$fÁ³j‹òµ~‰r.߉ÑØ6Gи¾\ݱ šs`† 2Ú„*Ï_Ñbptµ÷CmpYŽ´"zÙÇQ·Ér7ÊÑ(o -fvö?iÖþúàS­)…(rú 5§©xéƒóŶª]~} ”Q[„½ Ð@AG/טó‹–Ü÷ù|JKÑ4åÓX–yƒiúÜ™÷::™ª£÷;t%T†v¼µÞ¾2 ~ëv½ß~$[—ByÄ3á\l+d¿C:½?1›¤ÁWÕˆ ØU Êð¥öEóõn¸Ä«¨»½¢xñ?ãT‚ø³ð[07ñÍ=@%ˆsàÉ·¶Ö˜)ç=ätï|þeàh%¯å~^±I9Šu .‚6?kÌßá³3{ðΩ›Jlï+m§æÚ‚ËÄ ®Ì³‡t¾Í5PÓvGØÈÞy§å©©vß+VDwõ -³´˜çRx´N+iÇîÚ©(ö•LÜ$Ÿ:TFB@ öýc…ú؆w´ðWh}DHG…í,HmdtÏÈ<¾–ik@›s´ö°E+÷íêÔ -rêÇGYB7Rù‹€ðå$õü -U4«ãëŠrîùÇy~Ù8ÿMÁ‘îd\~")ê=Õsš[à<‡&ô’¸üv;äf/SXhóœIŠpt¨}ÍÂl¢o±Ò-þÄÞ=üooñNS1Òa¥$yÅ‚2{t4I+(Òó9P(€ÁŸ-½;MžEÿ†ç Ì©ù…p0' -ßÇQ·‹Œ÷(†ß¥ Ë@ŠIÇ2—ÞÈШòL&‹7Ö ƒÓ9Šò„J¦>B{·t’ÿ£/gi>–ùWÜ#ÎÚ¼Ãî¥Äé¸^˜mÖ¢ˆs}qyï¼Ý™ÇOyýLUžâs… …H„ Øòþ€Z ÖÂ$y•¥»^?õLÖ'ÿ;ž×ád×ök¥š“Ç‹™V^ÁCq¿Ký ;°òÜT>XxoS?Ìf¤dI œn*Ý-"<eš±ˆùRûC¿èèűWxöÅóûû¿Ø¾Ÿ ?jÐ| âRÇñ£õùNm-C·G²ù\Ó7TIqH¬!W*kécÊÁˆ0–YZçÃõçù{MFÍ‹ÉCs›ÎÀ^òÈn)7Êbô#b$×6B·yõÛ:o®h\4Ðj}ŠCÙ$bK˜èÑ2 -[åXa -ÎG‹Ðº4<Ñ9o1A­Øa†ZûK*f -ù©øÂÆÿ}¬D+0G¸{˜ñÎd•£*š=€@ä\‘µr?ÅoœoaTMž^4™+§~:Å­_K d<±¾£Êpäí“£§‘\§'‘LKùÙQ<…/Ž$)Iv`ÊaøK(¿TŸü%‡Ù·fàeYÏÆÁÓ·7Ô”½Ã.bˆÜPjôeÔ@¬Ë­<íƒxFâ<ÞLË=ý¨·¿¹$ÏóKe^Ë“‘„WÌ˹ïu»ºèrÔë)L"²v†àjòÉ¡ÌWAú:*šûÚ!czúkýÍÓüJ±=~õÀHœ‡K:‚ޱʉ ›õ;Èåï·ce!Ù=sŸ²ÛÔ›ý}àÅ'ý -Œ?üÎΧþ8—†‹ª›ä¹Ï.CÔ^æëSŸëð¥ŠKwâ)3Þ-^å”}¼çu A×Ì“êå4a~Ô:í”ØÊãwùP˜yw¤µî÷q· à··=ü|—‘pxÖ™xôñØ¥“{fàt„*J]@eë&¡Í ò#'úScÈ•j]OaîP-¦y„¬[v½Ô ‰ñ“)ˆ0¨XZPÕåúûË^ýÙ_¬‘ï7‰¹F"ÃIõfé“;Ñ·eÍ™¢ŽEVtM¨è×µh=”ŠÑâÂY2Êï­/8jlÉôâäNt£:½té¢oùŽJM{|L$•aÌàZ(5Ò—º?Í)ï°) ¸Xó 95íœ3‰Lz>”P—šÔÉÄO)d³•(ïH1¼¿å°{®­tx€½RD§å?Ö~.ü‹H/ü­!ã‘XG1xˆÒ®¢i?>ÛÎKŽú°¶bGvIÅMÞ®¤F%.À£Ñõ-å¾*.òÐ[m£ý‹ÚÛþ›*ß׿d×ývýÈE¤[g/Þ¾A?NÌÛf½CIòEÅÇBÎ`åwÄæ£}C‹‡Ùï -³žÈ ƒCä§æ!C¸Ÿ”y¢ø9R‚‚»ùÇx±§ ÞJü4ñbˆ ¨Õo¹*BÂh]-ÉuO•rU‚™„Aãwß?‚ï°û÷@ÞÚ{I·Ø€ô+š¾””éGþd7u5^{Î<Ê>Ûuž\¶ûs÷–´T-ƒZb !ázÀtA’Ê¡Ñ·“¥/ñEúñû¿˜RÿTøÚJ‘øIá ÅŒædh£ú[sÉÚ‘F7 œc)µ(¸¨HÎâÔôi9O%"(´Q÷#"±hí©Õ¥•D5Zk¨äü„gKÔðýS½ÅªÁÅY\:éΠ¯Èh Ž£·§M†,ƒDÉ$2 úfžü~Èróÿ»eÃ/Ö«q i¿Yųò kã¬z,“”¹GQñê1Dý‘1OŒÅÓÓk¤ŒÁðÛm† Ä4àç3bä5‹Â[ÏE »Þ™ba†ÜÃq¦ Ïñú¨Ž;Ð]¼Eâm6ÿ½×¼Nz Ï3ñ>R“º®6œ9Byé(¦d”§\DZ×—]F~i~wßRAìqÎð9aÞÐù‰ádÈA£ÇÀ¦†hÈ`×Ë?kˆ8‘ Ê P$‹QØ•f¦yk5Ö+%§øÂKQÆ‹_ëA^4üˆp¯™¾y¸Û¢Ù,ß?8ýÌ!ä´6°+ÅçóýúRÿTGxF]‡²|¦< Üq••/ Ûu3¤ÉH‡ ÉNàh¿RÇzŽZB<=Qh¬Ò9Y¶ÂÞklâCöJÏÓö·ÁF‘¿Ï=¡™"È{†P+Äšk'¯øÀ¶&P2/µ¼Ýo“Ùë¼RÆ;rzòÒ¸lg<λíýx¯!pR!Ù/óàDЩ8üÿ«ƒ®rŒßR'2(>eñ¨ºž5hÆg„_ø±ÄqîEB‹®šÇà:øÈsE¾¾_S?Ñ"AxR Wô·ŸÈÎÙUÕÀ¹ÄQ[λì ÌDG$ ¤ûWþ/ÉÒ^„ ßQδmœÒUù¤F¬p²Æ8Ø.› ÛBû'h‰#´ßÂOr ¯wò‡l!$Гi`Áë…QÜÛñóD@ƒM½ûïÞš,U.áÍ6Ö«Ÿ ’”¤àa6:oGl+Üç½A(TÊ´f¼˜ …­²‡FÛjÖñÛôrLÌ7öuZÆ™ÞG F"K0CËBeL>v¾J‚Yì¾ÖÒ®ai¥¶Ÿk©¬£Üßÿ“ëø„ûFL㨤­ÉMCû”Þ¸ð“©ÅdP‰åiÿ_¾g@n ŒÝ<ç«S&ŸŠJ¶–Îáz;ºƒð2)Üe^ìR¬ç]oyþ“x:~~}0uVöƒùb$Å‹P¿ïúÓ6î1ÄÕ¶ nöïÅíó£¿À2`Ô¹ÒÃúXUy¢îˆ»2ÂÿÕB@€D•oÄPӔݼŸ¨$: ÉÌ´+Ê×ÑNhþMùšÇ}íoU€›n˜¥.`?Ô –JnK`nƒOø$Ü_ q~òNYvž»£¢Äà÷ â‰°¶5­‡€w5ó#h¾aØö´BØZ^Å d°G´ÀÓz*ÒE©«Ùýïz*¤J_êkNµµNa&F_y7|;c䙇¡¼ãñD@BYlþC¨éÉuTßiúð‹hK‚™b›R+qó×ßÊŒúð“›eF+jTïAúgL/¶º¡ÒÖ|Vþ¸ÎßÞž~À"{r%•ûUˆi4EQ0(×”dl“ÌŽ3!ƒT ±æ ð„0Q<ªh¿TUµ6œtqmÐ!çXÊX÷”2÷«t)ht -FOÂÈÛÒU`^¢s<÷Ü«®9õÂw÷ÓŠ@ä½Æ¡ô¼Ù>ß]˸î‘A-L4XýˆS»¿ïgGbЭ;Šp>§™è@­jµÍׂÕmhôŸ}N_¤Îö>ÒÖêδZÕå€KOîÚpü5Û-ïD¶@!3Z"juüƒý·Y´»o^É 8ˆ5úO]ê=»e,Ëòò ‘(®mõhÃ+¾Ó·;ߟ9tn$öŸŸ™¾R/j6Fzç¼1íõ•½š r¦À++“qȼs•6$½pÃS|š~Œ5žw«).’Píâ?µ_Iªµûg¿œêE;c„å&!NÛ¯=«È?ª¿‰ýw‘S™ïäŒËõ.Óqp ¤*’ TÄŽB:­ê3…³Ô{ÉéÍ5æz»™G"1;9•O¦íþåƒWRNæq}®%[r0#‚[,• - ŽØ‰Îðý®ÈSÍ1îÅm"éyôŸh™ÁœMÇb7À!Ó¿‚[E`A3ä‚K¸—*Ô ù¿¿ïáé1gÿI`Ö!!½ê»¿S˜ùz+ÀßM`ñ#’‹½8×Bæhn×’ ‡|:óqa´ÿ‘–H{”f=Ò o ¹8ûߺNà]-èéžxD–˜åêÏÒÒ†Z°§`ûÙô-öTÜb€+‚ -ý’VÈ3»2—*¨âñ2ÿÁ’Ä[Ò ;Ò°ÿ¤Ú–±vB‹í9n^ÿÚcï'än0äðîY©Pw|ÕwAxÅ}w8I†Dóý5EÒSJº”'Û‹r˜6â¡už¥·-a ¤ZiÊ 9äxƒ‰­„A7·u‰èÌ`èê©»Î4^Ìâfx!*–3@žCN€: -°ë·‡Š§}JÊ{å§`!îmäÏŒÁy›oPf…šEUmh´"ÝÅbʬéGª¦ÐUt(‚>Ù¯ÊýGùÀK“‡&"yU†Ë¾´G*ˆdš…7SÌÁKðI7çˆêé ˜Y•óuL1’c£»ÚâýÑ -œÿ¼çÑ£^=NX9êZìá1;˜§ç`^®€YóVUé4ŸCˆNIœQo¯Üð 5®=¡Ac”j9Z¤áfP¯r¶jÿ%€wíÄSRjõàGœy1¿÷|Ì .‚Ôwãùþ‚!’ÍÅ¢‹I!«¸x7YûN›Ôõ@QL¦Ÿ$¢‡õ5‰“­G¡ù¬!ßÌôwÅ{n4ºæwè½Ìµ@84âiYqT‘îoq»=Í.¸ rض_ahÿ¾³N7x³"·¤pDÊv²õ-ñóa2‘Ùˆ%ß¼Ÿ™Ù=Ù×÷ˆ~Ëq5Œ·95·_U•UM>âN²‰šƒE»¥ }¤ —)¥;'<.ÇQÊaSé'ôdÌ© 3¼Çf^Gç%xñר[†ØÆ, -@{ Eqò<Ë€cM)'ìu± êO¥„Ö¼‹iÓ-Hsò‚ÊnOüö5{£`tç#Cöð"µaÓ†8o"nÚ“ýS œIζý½P®3šãê/ -„6]D~ü²åhŒs}^ (üæ’¤Q¨){„ü¬ç¾Ø !Pz(³‚ÏUô·<7Wí ¹Ž¢þ_™:¥¡JÎ÷Ê ¢\?h‡ Q¹Ò«¸!¸8;3G Ï&à:¬šÐ'nó8²,¨}oxÜ=…Kg§¬¡Ÿ^Z1ÁÚ¸Ï,½×­£·_ÖÎëOfÒõ™Ãíç`¸×Kë>.á‹£çäºÄÒêgJý¤5OB^Ĥk¬Q’Vã=3G¼/ô2Ú‡“· À&?|Ôw&«¨é«Úì°ñôÖ1šPÒ>¶ß ¿Æ““+- Wˆ]ìNÔ€àj…?®CÆç -yé=HÜß5Å6¶L¶bIlîl™œMÊ·‘qæ™!ùkvà±:ÙãjwŒ‡Ï³¶ÞAù0„<ù„fZ–Ê‹!ù=¶âkÈlC8Côë7q0† ô5‹kKÙzÄ.öb˜¢V½îg†Î§*ú%†Ë.O FO1e†ßª|¸ =G>‘X*ÓW­YÏ}ãzÆÆœ˜§ £K®P¯oN+j*‘ИWA¹‘=†—] -6à&f l„ú¸Êè¬}–~2oWIÆžAP*ÄG«1#2CÇ=…*„!;Ö±æg>K¤úÎ÷vö?ÎsËqÐ7:uÌbÀ3w½††QXõ÷Áî†ìh«¦È£iìh“öá•à[†lôÈ‘Ú%zbâ{‹ˆÇ‚NA‡ðsô‡µÜ|Ô'7Û2trfÐ>?<àþÅNëÙ[åÈ -èÑëû‡z7IËU1ŒþóâƒÑÀ=lS«Ð_SŒ™œw–]©8ÉÙù›að¿ÆÑçWËL®¢OZôG+fQ¶Çðò7'vÜÜ¥ Xû×QƒÒ7QŽ˜qáËø÷yتӯäLèöÄf¥5Ïç+‚;Τ=ZC·s3gÛ`is‡å FX×ázb3ÖÒ|±| ÆP+ð®È,¢-€[2¿9ïÎ'v/äfPµNŠgð¸ÙB~ÊO´Ï%¦•½† P²<3IjûžŠ®(ÇÎíÊcæäÚÔ6 k=G=ø7/p!¾°Ï É-dµh ÅØfS«ñÝtÏb€Õâ눖¿üžaÿ>æÌ´qµ}WÊÅ´c%~¥'¹ì]˜2ÆÚ÷ÑÁh4Å?v6õF -=ögÇ:þOqpm×1¶|´+Km3híì'gN¡çYwsí1ì-áæ&ióÈ–´•b]¶÷íí‚“MG_ï†l •Î±éêPÛ®ì´î$0îžàQõNRcŸ Î/°¾ëÜ4H7¸Ö]§pÇ´€s˾ç/’û'ú2[¢ .æÁ©Í›ìéI/ïñKWzüaŒÍ"ælCú}U¨<§¢§O´ìCŸ¤Ë$ôD•ÇŠ§çT§ßµÉfNLûŒ¹W8=¢ýº¹îJà CdWÜn¢Åöˆ$qGéZ¦tû^T¬8Q‰Ähnfqg¢ùq×ý(g2ËþÅ›œ°èE8|ï”ò‰ˆlc`²òEXösËÿÖ˜ÿ7² ´”ÆçgÁ¢»Ài3úàíýGÄ‹=ævú'äÛœZ4Š"b÷Ò\”ÉK+4ˆò¤ûy´Ø_#Ô‘«¯-•J×ÈjÒ­VYÀÔ€—¸kfÑyõeP(5žß….ŽQƒvh\]ºKÒC#=RÃîû -ˆF:EÝÁÕ^ß?Ó:uß”)ºW›ÕD­.Ü5zþГ4ÈåE¤Y^w<›Ì z©ã—‚¶ËdAýò=z½ë 4F 4F‰ÎmöÜSÃ+RÑ;-TD»þ¼e8îуЄóÃ@Ød,ÏS×¹1ŠFtµ’Þôh«Æqr‡&´±©Ôˆ+Ð@zŒà\˜­äW:bm>ùèuwCDŸ·ëÑ1¯Ð’ÙÕ®?vž¯@œÁ P\Q, 6 ˆ¨A$賃ÖÓH4 ]ó„k1VhD‹ÈEN~|tøc[ø<‰Jë*}Åó *âgn ‘êrª¥ÇSƒb5usн[žŠóô¤Lݲv^DxtžLVòÈîq[Ö4hJQÇš­æÂÐÞŽzòÛ†zXx² Y‰,WâÏDyn 2s<²”Óà^/êøà»2?¯J3ø})fŸ¤$…2džˆÃR²ù¡f`™ýõa/n6$! -Œž{ùvöýglHß½yñw¤²6m·k-€È'rr[»ÍPÌ£ªzó±´cŽÇ¥¾1K‚ÍïéÓà†ìUÑЊPHqw«i™¡ó¡{’ÈŸµTQO{ÍšNeƒ0×ð'¹Ph°üs¡Ð›"ô8Rì…Bn¨êû WE|,açßQÁ—Wß·_ZóùW_C»\`Ñiëq¹OfË¢“äßö4ç2-I†GŠÚó°D9ÚËÛ99Ìm³íç¶1WãàþÞ60²{¬ºí.rØâo¹ªÑXGs’5P¤Íy®”±ƒë‚€Ô÷·,’bª ½0\ënç)ÖcÓ°ö ù&O*¢Iï²=„ÙÑدÊlO$õöQ¥ƒë¯”Yç?™ ¦¢Ì‚1Ò~?w0–aø½ú/ÿU?Å]ŽËJKˆâp’<)3?­öÔæs\ï³”üä1Ú£2ñNÉ;ºjû{È)6Fèëí%«‹T÷³êZÃóOý®Ðrwö×u¶„í” ·ªRŒPåyBÕƒƒ¯bƒ? "3$€sÄ@¨ªùmñ|¯ëÌDôÒ’‡=^5r@šìé…¤ör0ž>ö«¿k/gØ€©ÁW™§ñMÁŽü¹[¦A%?bÕ©Ù¥/Ý°£¾CSp×>»S`ÿv¢¯Ôj.Ÿ#pö+ -"&IK·1+È^¸±" -ÀL«Üê‹,ߟ²­/{_(Ø®xÍÛ²&æÍê€MÊÌ[`]ý:åž-Ú#v,)Ï|¢Ž ÃËýu,ÝAšN3™o?æÌ„¢³UGü.ƒ«»É«oß; èc]6µq…>œ/úÁÚ!ÖŠ!#uç+Ögs„šwxØ'Nq~)ù‹}bþ/!æ -ƒt¹ÌzîÐtM$G•ƒ¶¼10ž#š’äB`ÐSb.Ì Œ"G -ãÆ’™»”ËK¡lù×eNøþ "»ÅÎa ÜУsíŒ;¯z¦X,%z†K^r…ƒä¨ßAe™ˆ6‡Ït–fé ëcˆ>C`—ï’!ÇžJw ô ¼‡­¿&ÈŒ0°Í/!oFx#R}DÁ¾ Ð †ç_VâtIP@öŒý™¢Š*J÷âi“:÷q&"8J„…°Ç¦mÎáé’’å:_gÌ$Å>ÖÌ"1Â|ƒ3_Ð×# WËz¯ù q8lÝO\ Ñ]“J1p×àÉQ:¥ÐRŽè½«“{*²¶—°‚¬î Žð3ø¿Ýˆ §;H?=õуxBRŠñ§¥©¤‹‘Ñ1¤Âó¸a»U}¯>wÉ–íöƒâÇêU8Zá±[r”‡júõç¿lWäJq\â ½Åðïtz|ôÚ)*ò³úöÚevª4šWEÙž+ƒ¾£­5[QùE°±?#¨ú-iÄ@½Z˜Tj¿J|3sA29í{ .t‘H‚Îg#͹{:0;ã¬1`+[€q#“ÌÄ{žƒGý’ç¡õä|1Ä”S6 ÚÌíSÎè<1þ¢°ð)ã^jÝ¡z7t:M½ üµ–l(Œ”kº\ÒrN3êqsÛBXe§Fkæ®!òƒ©-áéƒM×ܹ9qäŸÌLï¬$¼>Ã\yò¹¹5y9²Â<(1}ð¥Þu¨œ3”ÚQjs‡#íµ³±Àñ…ÖÕQ‹å˜Óïç™Ïþ’ÒîÈݸ“µs°«ž›~åGö]|^O u -Sù²¾ûFbÌ&5÷,…VûØp€ÞRjœ‡Ú97ÇÓö½¢ð’GÆ.ƒzÙŠ„’>‘nqkdŠ2ä`[÷¼·^fö–;<#àH†HM7WhîñI|HJ ©÷µõzÇŸ{0§´ý´Þ4rûR¤¶¸Ì1ùmò‡whð¤ÃÂÜ™DÝ0Øwµõ1«VcmÏ»ù÷„6‡xì.' -˜1Á=­ÐL}ð¥Ö¥´Ň—o’”RÏœAh¶–Vèø#‹ìH51=Udí }Çv_ð_¿Y|H’Ó-%Æ>jkéÚzû>äÜ¢qbûמ å”Êc9#ô,ôÈï²³à(j6ÓúlƒŠ=¦{Uâ(üÌÉ5ü-Ôˆ2Äñ«bœÌ݉àu0Z3ø(F”“3ÂjÒƒ­½ùl7G Ç5Dxfëc£p@Hîe@À¨ÑŠD›E@ugˆ:̃hë+Üb§ ÌÖ*Ü -Qøã×cã}õˆ5‡ñç‘®´KeÇU;ê‹^7kÏýŸ¿ÒÚÒ -ßXãÊͲUë‰^(›ùKÄäØgþ@¯ùr3ØIøÞ¿4§û!Ûß«œŸv'i¥QÌ’í9jñ<~UÒÕ—!ø=°±çkX~]dk9€'¸LkËgO¤šÉËé}.l mš39§Hç«ß8<æwÁ(ÅLŸŠ.§Ú ˜Pí QèM:Üê*3É.§-’kpE;‚„H±ÆÜüs´âkòxÆ%¼î{ ™ÿõ¹hrQ«‘ѧ¬>Ö–• àÊ~{Èf¤ë₾BÎÙ+…‰|Å–þÌv¯Å#ö¼bToïË–Ö‹«×wx’‹†/û|ìKD6Zµ[åšØs©›cé‚'?Ùh{ÚÆJ=@ôeáî¦0ÔǼ÷¿S˜Hƒï¬D1Þ Á© -Ô÷ÌuÑqŽùS‚)nÞ×®„ôóRßefö^¥ß.Á"ŒÍíY *‡Er‘CMHÊÔ9{¬DÂM/ÂØ©:j\¥Û¯¿(Sô_pdóúÌ:¹oµ–£-x™RÍE!5¿´^v¨…ƒáЈi{»ëÐëž±%)ï9¥îîmArû•òÎ0G?ioRiCb#7_³NC”ÙÎ-&‰{§ÀÂÚ¾=©è¨c™‚4g/¦Zó: -Š‚A«!Žã8ƒú>I]§=sʥͮ%yû΂ÂVžw>ìBIq_G*Oƒˆ«žÒ^·Ì(Å#Q ÉSJ™#üÂ9ÿýEȧ¨Ÿâ)¦’ÒÎUai·9ñÐõS†KYN·¥Õõoܪ¼|ð­ÖœÚèœEæ­'3ˆfÓ…ùA2»ÎS›-pÕ\€[=1ÞÌ /²eßÿåã$F!Ú÷avrWA¬Ñ뒙ø;2VïWòP&RC ÿ§p߉?7ä^%‰€3d¦¼êêêpÌõ•hc¤·iDg±¢±Úb.µZgA„r¢”§Ò}*'WR WEûúÎFóŒ7À<ÈVfAæPïùŠ„¿Oˆó{Š€´ä ÙlFZ¯•[l!GÔ«}žÃÌŒ‡“)Ièµý( ’B%¢´a4Àø¶ßIе#£ŒõÖ÷Ïv¨„h{ÌmûEÔ2H”È߀ƒý.ê—¿¿¾=ßÿ%Sˆ¥ˆ¿&Àòò€“Ý3ºxÖÑ3¢ÌPXƒ  Ésg óʱ͋qŽ`~ǦžfVýK´'[‰!\®¨_ôùqÓÍ´»Ê/­–º­ëh«Ë3æÊ|dJl®üßó÷Í°ldL_<ÔDéÛò~•™nQâP“>V-\j®|O…>Ò -ŒìFeÈz 8¹wI þlYˆ|Ž¾­|~~  [©'„SöJ+>‚TmI[ù©†ÏÈœ¬'í™Ñ°y?Të)}ÑË02;ì+M€ë -^áNží§®R]ŒÆûs¤FìY ‰¢ÀNù[Cvgì—„êtGþâ·.¹4wù{cªÎá¹ØqÆ[uÀÔØ6 B}ß^‡c¾Í|@N¯¼agHw¼›m<¿(­$«¹…vU²à‡tŠfĘR$ã²sÊ{4$¶òÉ뫨LoaF‘[¯G(j¦ÁXeg?‹9ßU›î[<ÎÕ Ñã†i+Âty·i:uMÁç~þìêÍÂ-x}ÿîÞšGáàtŸuZÎÓÁ÷2žr8§ªÁgÃÈ­b^)fR8ãƒõUóèyð_5"ËαãØ·׸͔óuˆ³#ïB³_Ø×BJ§~~¯(œà !úE;,#rÏ ½gF`‡Aç«¥¬!´Aè>ñudh ‚QŽ¢´ƒò¼“*‚/<¯yº-ð‘.Ïë¯ P¾<2dW,…yKãÀ &US¸#´n ¼ûu«1áŽØ¾/UݞΦ_"Ù1ãË\V£ÿÈÂ+,õó* ~§X[PŸØXω=ß÷ôül#f}Hh´¶ƒî¯v/Ï(w 6=·xúröÿ%‰4CN鹕Ȃö¼‡ -Ëú²ù©9sÃs½C£1(Õ ¶k¸ž -삾N´àߎàah®IƒîÚ£ ö´òdÑÎÛ.ű&RcØK[^Îû¦APW-„ïÿ8#%„|_<ÊË´ ÷ü” êv Eììþå(¿/ÿÝxÑlàê)”»aÞøÎ°Ì k@¥A¦X—Ë(Mßnê^÷Ë‹½½Êa6Ÿ{ˉ!Mö6üÔ®¡~í8«[NyA–ã3]Øùk¹ÎqT°p´¼›ë´çäB6òDWÊCÅÿ®Q.|p—Îú% ¸¿ºÕCÑ·ÎFµ¥¸ýŠ†Ñ¥ ®`àð4$2IÝŒœè¨ä¦Z-÷™=ê†ÊÈOQ¦†è9àuÜψ6ͽ@€Cbé׈{õì©Ý:9êÒs~À¥¤n ð-Oî”@-tÌüT×áPK!:âåœÑhq9òTg*¬‘œìg®ã§HWÈÁl% —ÄÈöˆÉdˆjnt·¥Œ÷ ¼IXî~þ®AôªvÀ6*t†0‘†‰Þ£Uhò%h#€ÑgÞËÙ±†Ì×ÆÆ^ÎQuGèÝoKÉÌ ;:W¸/ó¦þ*i´SFö¼ßA–B‹_ÕÏ™^Cĉ4ðg$ÔÖüc„ÿ¤R\ž«šç;×¹cSËÎjÈ+†„bVÝÍ£U$bAÌÁží΃Ç6e=Õh˾—àMµ@HñqkyG`€DèŽåŒ¼Ô.æÁi.pìMUj¯‰#t͹Þ÷õKIÌF<h¥Ù3•¬äh¹ )Mçý©Dû/íì›G`ª’W–FE·ú!ºÉQ‡Ìk§ØÅa¨[¡e¦ÃJ©Îd)û³ï ,oZ‡*‰ñìZ1]Öòã6ž!èòWTîÖ>q™h2Õ 2¸¤[¢nÈM*®fÛx‰AÕðY¯ÎÞÅ€JÔÌoú¤gp–‚7Ê—i¯àÿ‚ÀsžVðè 0xžŸ’ú/ÿ“ ðóÎéQ^:[„ÈUö9²'S§ÁÎôRÍ$QºAñ}ÄãƒÅuÁKlð ßhñÇó¸ë¤`l¸=L¡h~Σ‚8¯îLL‰!y€QÙqR%öv&¼ÒÝŸ±¤7º:¡—ñ©]h.û}Nž™¿Ï!ñ¡=þ¨!ûb|rÒÛRsƒT¶…tµŸu¸K2œÓŠƒ¢4¾Ï]™Ûú)&¨!Zù$Àà*$î9ã¶DO0ÁU|xÊöeÎf–uÈíÊâ­;¾ZD$(ª{œeÌv#M?[¡oïÓkÆUP49ª'x¸ ^~moñ¼½RÒyê†Ãß1*N]Û»ü¬ëàÏóá$•9t>®(…¦ 8¿̱!žKy5—Ø{+FÕ\ŒÛùWOFq/ªDù7#-ë±gÊ|;õ=“‘ö\"Ô=J8´yìlÉDâÏÂ%XÛJÞBí_êÙ!hþFÿ¢|¥i²8ò©'U¹ï \>èúø2 -š "´‘9v‚äœäñ¢_ž!qêQøŽ" ýñ'z3G ¡“@²v‹!Pæ¤øæ—F•ù¯ú)rÍÑ—ªi‰<†®m;Úu çvÞí+Ûúä.ªunO4§JÿÜÙŽ°µbcʦ¥â>øØSt‰®{î5‰ËÜÈŽàØc4× ÷¿Ž¸80lö郲݄Ñ,;×SßÂcJNï–oÕ¤ý¬7ïxÖÂí¨Ü¸Žt®¥UE¯-Røò^à§;wC<ænÁ(Ù 6Nœ@]ø¡ZëGIõÈ÷¤^³ñ -!1W9‚(à9³ b­›P*4/A2ófÁöÞ½lBt¯â–´ÞÞ ®”WR•$Ÿü!ðê#Arh‹Ð¢ð,Ö³ÝÁ|f2 Û¥‘~¡³Z¿££µW0q°à‹»Èž­)æ¡êŒ# ~‚±ðºœáø{óû»ÙÊ¿>˜éÿYì‚ÞdOóG“\:Lªn}Ö •1;ÛóyµÀ‚!¿­vÚB³(‹+ÎœÙ&7%{ô½¹Þk„øúßk„xžj§'šEò„HÜ>[ÁBfëw–#ƵbÏ|™ûÚ‡[òMPûûÝ_‘ç»þî `㸯y‹((:ã>wDaP/ÂNx%ŠkTÎù½¤}jD§ÛØ"_^Þ±N‰ˆ gw1ˆä^›¬íæÔÞáIÕ88(¶ Üƒw&RÞðYÝžêÑ;‰xp¶Nš+ ‰ÂáýmG½šh¼ï‰ŒkÈ%M†W8s3Zêóê¾Smó¶²GÑ1 êõæ±¼¢í®1s8ºAˆjÏ”ŽÎ'çT?”I?Z*\™zâ&]Û#ªÒ†Ü`OÃ톼“ùùx[ÀDÛ]ƒˆ- ÈÇ{÷°¢ÓÈ«±…Þ¨½„“k–+*Œ¢ÿÕÛ®3kûÚ_dW^!÷Úzé ¼»î™ä,ßúÏ¥Ä-eéùKûz¨ÿqá.2ÉÞãzNþ!ê)1Øñ)€bÍl¦´—MZ¸$_Šœ×ý”ë«jv½¦ýMØ媹~¨5ÌUvßµ›°`Ú¸XZd[ÈÚ4ÙóMy~ñÕ¬°Ô¡)–€d6Û+UFh Ó?îÑ7Kƒ-çàü‚ ÁŸ? $×ad?Oü¹“gVÎÆ*x=J> ÍÀšß>Ò¢á54çθx¼B·_gY_ARº5ÁNj©WñðÉÖ4- §z8É¿# ÿð—_0c÷ÿA`ÅXOpÊ'¦Ìˆ5¨æI9”âÁ$Ôw_ÂÒ-–Øóªhh·ö2¾;{X)ý9Ö$×3”ö]‘+3ˆµ -|[@®“íõ³‹!ÂWX’Ðg)¹™­[ °dWV„¸ù'ÿ\¹qÃC˜K'zíËòoºuµroqΕ$|¯!ò“^»QÓÊ&óÜß®`Ô#¦òœk7ºDoì¦#ŸVŒª‹×æ|®êÓ<Ò4Äö2JQACKsñª:@žóË©÷gˆ=[DdýÔHˆ©wb8„P@sS€H© Ô~Þ~£œ)ø¼hÕÝÜf…L¡—¼²y‰ |‚»ÿW~‰ØÓ9Þø«·ØÕ‘ ¦L|¾þwÅ_Ò¦—ñ´ŽŽ| 9’™)[®@¶‰×¹ÅE»D¡79Ü q~×ágiÍŸ¿ê:¤ùý¬ÉNƒÙVF0H¡Yù½ƒiÈþÎ@ß¢Õ PDÛx"eHbg´”û¬!q§MïÛø<ÄNðÏK-ÛsKKž·2NJ Ê‘¶ubÖ¤pR³øÝ”ñùªoõAÍðG"‹Žäü<ÑRHuM$"ßÚxùhNÅKÃ-6çJgzé¬ éí}^U"Ø©ÇmI¶˜õ ScAï™õ˜Ã£¥%h?5„jl‚G -p†(3:x¯^µ>¾!Œ®æSµwÍ]§r¯'?ø_naIÜO]çL[,t†QµGÞA -_‰‰qkfmå(V•Pê¤[ê‘ûú©+2- t1÷ë‘Ðñ‰`q^àübGlprälŠÃÄÓrmbÐJ†@3…oÁcý cZP ÑÍuFà:¾OÅ(ÈyºªWœ0Ý 'du×QþA%9õ5‹ /S+"Š13GvPr¸žòT²ùZµ¢³Þ[ì¦çMc!íðXºý2}sVþVqX§Þ7 øGö -Æ<~üò0EH¿e˜vá ô—¥Ñ?ß´2¾žÑ{W"j±W2œ])kòPôY‡ùÞ5Éù,ÿ‚ }£DÓjJÁšÓ.9#g`#?b½ñ´D?ªíü®¹BÔ~üÄžêF8wÚàùÙÜi «<7~šægLr|vÁ“|^„áF !Á©t€0¿Õšþˆ^þêëáqѸeí1äÆ"´Õé0UÄê®hî?wú@DX6…›'NºIz,UZ4¼RñORpÕuäkŒÄ ¾¯íì€Ù&ZÍ:kB}ÖÞèÕê÷M£S=…=oïN¶p¤6hIHÙfO€üƒ‡ú[šMûæ¸ßö¿Ò5r0kÁâSÌà Þù [µDèÚÂùƒ4Ûoj´Îÿí­ÕûíÞù£É3f’°»öàr¸©lÕÚ<éªÓ]»ëÐèòÓ£|B9ôMÔŠž÷€(Bïë}Îg>Aîë< 4¤³´µz± -*¡ ê<xâ!¼%býà<(ƒ[&'áÐë2‰k"Ñrÿþ/q`ºc‹¤¼Ï)¶¾¹“t¸¿CZˆâì®w׉_ã‡pÄ8".šóÚšóÃXÅh{‡€Ï³Úz—ö*£í+ÙjÁç´M÷h:&=; §õ”Lo…Á|Õ}xTßU\!KsM!#ø¡®–+»ºúlúΟŒÜãm©6 t\߶°WHMé ðgêlÀñ=Ò&ˆpaL¬¾ _†h§ŠûÜ5$Ôw‚³gàdZ(´ž†Þ ˆŠ3qœ³\„òÌ x&¥]ˆ¶z(‚agDçzÏžU–H§äZ­OĽ¯§è\ër53]ÔâÓ«ÅÔp± -»[Üæ§ì¸ÒºìúHbü¦€%äû§‚ÈâË0=1véqÝÕÜ¿ò­ã\¸Ag¼–I‡{5ÈóÎfL„~5rA-¸ÑôÓïȆú'Žänpã­~š Ï‚ði¸Ýqq9"”]x˜¡üËcJ²áïy®¤ÇÒUþFÕ WU?ÊìÏë“÷-wÚ§JÛ¼ìÅy´ÒÎñ™b6{P›©Õˆÿž™³íùR-ŠÛÛhq"¬CSËU?‚$º2"ÿ¶¯6ļ—¹=0 çB7¦¥t|§­lè4`^¬ ½-‚¥j·5„VȵgQh]%,¢æ‚7¬$u÷ò= u$ã®úvõIG-Ï9ÛÓÁ7~4Áaw®'ÒF¡Ëäoyâß…ÿRë<¹CC†«¿ÓØ:ËV\ƒ‰º1^ó†`!ÜÓ–ÝðfxyiŠŽø6ÒëТä$9 ¾ÆóÉÞcî¼’Ã3a&rM±Xc‡ÄDóÝ°²òN8kH müÕL-¡ Äy”Ä„­”ñY‘B× àR’åÖ…@à‰³³0çÕ¦ x‡–ŽÐ¢Ìp•õdÚÞ¦»¹×•+~twŽq2ԭتe’ÛÉNßf¡1ŒOtI^M“/á·^5_rÝÑ(³ÎÂçÛB®35#h`Ñôè%‘ä3€è ÏíØÛA‘‚—Ô»•æc -﹑×$ïJçn¡|åÒ\»p%{ÕݵbðkcX¦q”bn ˆñûœ™È°è]gÎ"†9¥ù=•=*Ý!íS˜ÐµÓºjÄ H©nœ*n¯r•|nõ\gŒåÔ^ñ kjNqæ#x€>TÑ_ô)~áÉ{®¾õÕ­ÌôóáÛ˜’ŒëÒ¢‰‚ó3olEEQÝÚÀ¸FŠ±¿S‡ö/v/_Óxó×—.è\±Çjç©,Л™BQßÒŸUßJUæÄrzÍÔ™´ C¿&waÜŸóïܪ UñùåÈ Ó×…f9D³!WäÕÊS¸S¸Œ~q.€&ï‡mh“ä·÷OÑz„0¶5¿‰ò9¯ØlºcšUIféÆŸÑ‚åú}¼êÍàõA“ôµÙ$¹^ôH¡”d¢¯Âs<ÛÙnJ5Hõ±ÊCóÝksÌ!Dj&ŠÈDÙý(슶èö>r¿AYätè幆èFÍ•?b<óß XH°\Èžû¶Ú'D£þM'8C”~ýïêö4½æû0™×Sk­†rœ4‡“±³'dŸO£(€ gw Ø»§ ºLµ6šGÛ¶ýö¥¾}Í&l±—eÉ­³ˆÚÝe?<ŸÖü™ùy0Ôñ§W°žazy­w¬e§Ø,¡©ÒÞ 'qÁ¶ìI‚ÐðiOªæ{µÈÂÊ#ˆ(çãqÀn¶ÍóÑî"•±e|üÑ -Ä ;WŒ†‘¥±vÖ5Ä\þßYk]&\ètzÕ ¢I7‘Æ*æJôA夊æç.Xî,¦Ï÷oï]ñÑæn¬…!|äÞrå¤ÛË—[”½qt]:«GÙQ )¯{Ú©Áýkwéƒ÷ñ#ƒbU«¬_˜Ö9H…l0{‰Å¯mrr‹°ê^°‰™N¹yÂ-ßxî -ÜÐË7žÿõ&œ:õ¿™¸…Oø‘-\CÀBö8Í÷ßa^’â Ðbh–[Æÿ`4‰OQCsx­&ë:!ýb{ÿÔOéC„lä+CTÏ Õÿ^ë–i1<¨ÎužáÜ&,&{kƒ‡$F#n‡“=· ÖÏiƒro^àC –<Ζõ¿ˆ:÷o>¸ÿW>„p‹ÙÒUÞχø -•·öA5Ç5BûYæ´‚óšUZfo—¨?Ù»{b+lh}ÌÄÙåZÌ´Ž5½)dÌG<ªJU»œXÁ¾7j‘ì•ì¼ÎVì¡›d…ºJ´½=QvŒÆz¤XaÌ=gKþ8W;u=ÒüþŒ(QkÏÍ•Ä™­ê”gha@©i+–ö<1´Ö–yåj°h„²Y€Y¿üO–ß—âêõÖþ÷ÏÿcñÆ|lðT–Ì{ËEû¯DZsŒÅ`ˆéùÖ& ¿~~Ô ®?Þkë£[S~¸Ö¶ÖáíŽh µ5È礳«ÜTƒ5ÉÎ5"¾ìv¿d_rb/hÉdºß袅s¿ç\—¦\ä+u®Ö#¹x`ºFy" ç£2òùn4Ì.í¹ñáì)î뾤ÚÀÅ=ÓQÒ? Q`{ ðŠÎV„Ä£¥~a‘ßš;ýø¥ú^z¾ò·{Ä5ˆtç!P_sø²[©ÄCÚ-(®?çp@BpµÊÏkçSˆ”‹Ÿ;_|Zh»,Îù ¦ªþTS¿?ÍèùZÞ×:qßœ›úºLƒn„Ü2éóM/`^‡¢†\ê„X¾ÿ$‚«É%ðÞ¬ã«K‘"”߸LôZ~®o˜Z;PãñŽÍçAÉz¬c݇Fì½À÷ßÂ4Ž Åï(xfð ƒV`ƒPORW±Ejf¶¼Ìc÷Îy³È¯,>®ZIö2›s×S©/ ]”ðë~"æûPœÖ–P8Øñ&¼í½XÅ9¶ŽøËi+_ðêÁ’Œ¸<Üg¨³éµ¹c…ìŽç’8ƒ —à;­ =ã=»xÁ„ÊE-ð“Cj.”«‡þ*|ÿ—Ø~ÍØÁÆ{TÒNâ!$®ÉŒ¸Â8/‚ÏœQ¯"IŸWP¤döìßgœr42ø§Œ;Hj¯€ßpFùíP¤çyd£gì1UZ@ÙÔF•KFÈV€ŽÆpÑ‹‹¥Þ_Cåµ{otHR‘òoÄÜ3½ºŸC#|zÖ¸úŽ(ª§/×òKÑãT¦øãT5([\§Ï;Ø|\iÞäÈO ->çÊÕÃ,,GÎØl°c†(~>à1‚ŠH -I õÏØs’ÙB -òç2hÔÙŸHjŸ =ñúõ£ ŽMÉŽºlK~Aߊ ·âÑ^lÁøAxϲå¡H=I%ë\j qPªc\\‹”U,wrúw@KÏ‘m,ÇSE4±*ÕÕ”ëFjhxëŠXÉ’]‡^ZÊlv¯rû¥c`È©¬!ÑQ%T» Cãn8 }S<ú€GlJ/›Éñ =å¤:pB“¨ÿNp ±ÍQCb/¸ÉÈx/x£#úÍ7¬)¯\jÑw9=ŽÄ%! l!õ¸ŽWaCœ[TîåÊç?® ÎÂÜäÌk¶mõH-àAêYA¶Ì!8Œ—Yb;¹Ìæ|Õ‘IóÚñTƒô>©Í<>ñÅ!ÎIEYÞ?‡ô°s[1Íæ’5 \ë¡Ð2ÎÆÍ\ ®òX'æBƒhèV–‡MVcÚtŽ‘ :?#ÈóAÜZc°+Ñìõ $Ò»ËÈcÏu m‹y„ é|Q+¢OÎ<ÇLúÚÐã#I.Q±{N›‡¦v-föˆAÇ„ïöxŠ“6fò¾™!ódg4fÄÅh·‰ÞùH”¶n{­½‡od -'qÿ`æ-¨ãõíK2U¯€ ½hTÅa§éÏ£F–¿)rÆ)ûù%`=Xÿ0&ö4« erNöÁYRnJ~È¥˜®[?Rr)ìC´Áúˆ‚ûÑí¢]\ -«ž»ÒZŽèå'’ÏÛء˪ýcÑäh -3n‹…ù±Æ 6ßåZýä„Ãl—ªŒ´ƒ‹³…+Žj$ç¸uºîéÝ,|Ý`t»§­¯_ºšC%!­›x÷°ŒÃí!°S8ÜÓYCΈVtFw†Ìe¦NEë¬÷§²…ëzÇ0†„ãéÛó>ÊS¸?£ØbîÞy—šxÓ)cLE%/ÁQü Fæ²_QtÎEü´ßÊ±Ó Ìm™zdª "ª4¸qì ýO~¤ÐÑâÓ.f dA˜ú}bĉóŠ³k y[#ìå,“ --®IŒÔ^eu¿sǃªlOf"c¾2–GÖ†(ÒËâÍÛc=Gt÷–g¼—óâÊÖ¯¢3âJÁ~~Äô¸ç0èáÙ©)C(z^„ë,Îo¬×˜QQ<Ëy~þ››ûvYîä—bªpœ2xÔÞsÂ=Jœ»™u¨CÒ -[€¬ú%Bá×bˬó”+²šl[5GUw5´xT–©†o#TvFÖ]á÷" -Z…e‡>”Ö¯ëùmKü«j@Wx¤e +¯KRfP#÷ÁˆasA „L²+J;âõKj -ŒƒÕ1rPc*a_Åtñnb¦Ô‡Èq]¬•KÇFÅR“Z7·h,~ -™ÚlÝã±#[¥ä´ß¹üëMf!ÆY%‰1‰4¤„µj„sËŒMÕu¬ÜìRsþÛĬ1½IÌý:õEBçí ôzö|*É?ÐÁ_† @MÍ5×Ç(SñR8“bR"}H³8Ë–ü$uQˆí\c`¸^jý3 ¿¤%Ä*˜3*DÀÏ2J¡u>ÙŠÈü…‰®éS&¾Dú{ŒrÆ>ã‹paØòíp•i5ù5@eÍl%…úú`ž¯=‚zWü²C¡„r8„®€·t"ØF£ùŽw#´×r™‘IýÞ0øþ/–ã÷‰É$&Pi ižQ öë¯à‹t«Î•J) ÁA(ö¯„Œì¿ü¥he[ŽŠ{ÁÌGJ1g>ñª7Îo3·ã€Sr3; JeJaw\ªE[¶º[þ6®’ƲD˜´Rq¼¦s ¶„z’{i\%éý7Ÿ*nE줛º¥3zž³{¦lÔ <I95Ý+fvsu3GÔå@ ª.7‰—fõˆÎdÝ‘·äT­å>Em䫆[RèÛ*³~›>f2Aøu&7ÞÈ^Úu3ü!_<Ùt‰˜IˆÛÍ&žØ=˜àØ@Û•%ýó_~mégs—i\Ó£WPI @œÁ=t Û|šbf»*§ÑZRÚ" lÃÕ ~pzKЫG[žÔÃ`çõ²Ãávü#Û—T/M¯ã:Ö6øbdê@]Q¾¥SI¿`^‡SKÆý¤ùä:Àtql! Á¢Y& ÖUîæÎ \›"ep^·º?ñ{>©jÏtDB!ƒÊv:ŒEGÕô š -9|³Wt›ð½Eã§r9bmTƒ¬¬µ -A%ïá=fOõáÎÙÈH“Ó²ò½!ŸF9xBéüï>Õÿ]áBöÿ(k™‰ªC±q=ÑÙbœ¤‘…w„1¶#pü#øפX¿9~ó‡Ï{<|5‡‘{·uÖ…b‡SÂqñŒ†,ìj÷kÈœ²sø–â@êÄ´ô¤_¤Úëp¡m5Sfz`cåýÊ™,ZFa“ç ÐC@†`ˆ³™ÃåS\¤ÈÕ¤µ•¾R>ƒÉ£NUñߧjß«"m·¥=&Ô7È–À×äáâRÙø(à9Ç¿ô@âl¦³©§Â€†êwrƧ…×?¨ò1ª¼xKòÂr¸•ÀM§æ)ÒôÙ6´3 Íú©==nÈ%œ¹H¨*%ùÓäÁÄŽ,E©š¹Eé6á WaÊöÄO,?EüäNA4‹ 0F JJ‚$ˆMµ_£Þ_PÜ#’?ù¥së-·…C2—ñd¸d:V¬ž;ÖTÒ„é@HRƒ>«è'ïçõú¨;”Ý‚µ’®N:):lóß Õ @¬0nÝÎK·ó*Óß$¦>ùK ‘(t-ä¶÷;ñ‡…1ø‘AØêB#`R‘]LoR ñµë”±Ç-@>nÅœqôÖqái• $®)CįCF -p˜RR_E6ò{[!ó;fàpFñÊ|¨6é µPŠj»ä_ù°—å¸ZGÜW[šp£J5 ŒrÔ/q‘¾…¥ü0i3^ú%°N¤vGƒ--ÈUõž‹o½ -Õ9RôÉ¥=ç*äi!Èñ”»9ºú׌7ë呧GÎâÐѧu⼄m*äÀ¡&Ù+üÈןjeis&)™&²ON¶CYÏ9Kädÿã_¢H4ïÚqF!D¶÷»ûÌ÷1¥¾MhæŒÐ[Lá%ýœ™Ðhû"4œi)JÁ8RxWvña^ÍÎî㥠+zéO&K‹ž4ºÌšQOJ†9³-g'㟛6b¨8•jÀûÅrEë—Ø´6‹i^îóVŸB˨®c”.åD2òl‡1ç `l ì«ÙCî“vÍ~Càœ‹%WC.8·¨øÊr*åÁókÁ=5D½‹{zÖC±ü°ëCÖ;ç4¶…y¥…wÄ{q£,r­$¢¢ÏTà.‰´ORÏòÊÚÍ(IZÇ©VR²Ÿ§Á#óª¿hízGºá±ø±ÑílOH–Ç¿cͶñÑ_~ûF*4ìwÄj”žõò•·d[ö(Ñ£ú>ÁÈ^¥³8²™ßQ[ß9DyÕÏïÌé§a«¯(JÄqǧ ªtH;¸¾D#d]ÊÆòðOApžå¸Tó#`ósçˆËèBQdçH»€[ðÊ&“ îð«Ø§—1—bJe“X5³Ž{[C¤÷¶$ö4µ²N§Ñû{3 @sNÆãgdèôêößÛÍ&ž+´”¼„D2gDFO| f‚“­Mݪ«[ÕóZüÊ!·Þšä®ž4õë_¶Ì§t.ûÞª°þï=À㣿¤°þÝL‘UX«Â:mqËNAó©AmðT¢Rîö•“Zs ± "ß›`~Ÿ£éAò~FQ ñÊóœ }‹žðš sK“/îAÑk$Îaû¶:½¼’ÃHi1Äþ&óÙÎT燿бz®ø¨×OÑXïä!#g•Ù©Ú60øXó<¡µ¬Ur$1ÎûKã¿•ÅuÉt±“]Vt¼‚Ó…[žYÏL ŸûÿAUt.‘zÔ…ª`¹¦‹xH46_NÏF*^)‚2•99ð™áB  㻵-!(,5d#VêôÜåÉŒÀµœ,ˆ_Ë;H”~0)~ú{œ¢d®¼Çò÷X®óï#{_¼7PÔ=~µ`š£äŽ²rÿ³ÐwÉñú] û£Æ8 J9ª¯PXÌj¶ÒJËæÆÞiEË#6Óü²¤”Ç^ý¼h„y*0ßù¡)÷š¼‡ÐæË»âªAxkwß«§^¸+O‡\@9%'glv˜0gIìJ³€Éf,r.É&'Úܦq”iÏU¢Ëv}³ôîÉí÷èǃK^ Ö³#ÿâCཫ HÀ´Ïþ¡ -J†Gß«£DûI5õÖ¾Ö:%å£Ì¬|@=>ʆæþQ²QóI]<Îœ­@Ùs‰RY§^[”Ÿ¸iчŒªRénΩw¡#4#¯?@z?ùKªxß.±ôD´"µ\WÚËjÀ7žU8§ýé;¶hYðx×ê~†Ù7v ƒFßq­3:ŒÐï 21WÞ’Œî ‚…ÜH4´FàKÄ„Á{¡!E6dÁ4pÕ uƒÕðã Ô°uÄÓ$ƒøã©œ#hÔ3±¹¦VKWðØ¥Þç‘A‚ŠÞB¹Y`Hr`g/AÒ€!ï Û2®WœÄ q -÷wØ7#ÒGQ]-Ez]™l­>ÃC¶hŽåTúÙfXþ<æ¯Ð…ÎqÎhó‹(Gß-f—ºÝõnmS`Adÿ<ýs‹/Tœª^?²ÂZØzÒê"ªù¶çèÐYÌ´§t6­5Dâ§~žnè?ÖòžDl…™ƒÎçvÛ}#!ˆßê—W]GÝÕ“ôÑÞ0|n¬cuOÚPrí¥ÒTHˆ3,‹yƽË¡æ!&EÊä”X0M€OÐè;8ŠÚB“1 -òH”]ø>?†wÜ!©²<ÖË9£ÙqCŒzóTî¢ý­ïÀ§ u·ÐªŸ(S¦H¬¡ÿD¶¿œíôÔ¬ös¹¦q˜Oé³° U3àyZGå¥Àïÿ`×·Øš9¥F\"gt¸_p!n’o’ÊP>[KÿIT˜M"5gxØ®´ÐÏœœý*´P˜¨IIZ(Ô-!ÆXŸyž«Eû¦@äf8Q˜ºG³åÀ:‚¸ÏÏðBÕåéd¯y3¢M,FQRQhõ}L¨ÌÉ‘Â<):Ú£êºg9þ‚1ùè/Þý·H-˜:í”Ï}(=aàɾ@ÆÒ†ÙJ©%ø¡¥âÏŽX”#S¼î,- gè¾pgáƲÑÚ„hDÒ1Þn{Áöš^Û37ÉÀ‡Aöy )‡—~;ûO(Ë}Öéö,ó+E0GòGK÷ðl¥¡ó4žÿ58KSì¼#D9| aŸbëϦ0\>Þ{EC]É›ˆªò]C’]Ê邉UŸÑå™kÄœç§Ï‘®gKq µÌò;$É#-PU^zWNÀQöºßƒëÊÕyZ~óîP*V­fh1¸VNŸÝæQxÂßúxÃëw„!Þ#XUƒP¨­Ÿ­übÉ;¾—ÌoçuâØ¢ËAiñ -èž"?m3t æì XÁÊ…~!±Ò·ÝAžÌí‚BõÂàïXçì"ç2y)ªÖÅ ñDñ)» ™ôöÒNá&hê´uÌS‘Gœ“fB:*cáãž;*\ ˆÞÓK|zµñŽ/Ñßÿ%åo_‰‚2zŒºNG=ÌüK«!@£+ÐXÖHĆZ ÁjHw;ÛúÐm !äÏßyÏ|ß²îé^±ê¡8'ÕÜYf>Š»¿ð]T߇*ù¡‹*òŠ„5E á4ÎFüÅ[1ñÏú)& -Ú];8× -€Éî—ÇCÁ¬ãÓC´¸à‹)/ñ¸iуølJå;SG›oj-›ù˜\„ È__~nVJØ£#;”¶¹AàRÔ]ûJ¨ô\î :åÝW¥Ü´¨+Ðlõ•IÙÔÏ÷X|$éæ,y²5¶Íø¿$*Z’•c ¼ˆŠ‰Ç™TŸ¶,ŒäíÒ:ªÕúß› ÏË Õ ¦y^ñÈšT[D±û;t†87%¥3•P¤G»“ž¬‚nì4»ù)Ÿõé„V9ü`XXîÇ#ݯ9×ïýëÃtÏ(4;Ë÷ooò_øó(/":æ}Y”즣 *Žè×in,¥n÷KMõ|þü—?È&HšxîØåvÀ6bùMÕÖ‡ftœPWÜgèSšŽ4W h]¤ih*óê2_¶ýY³©6ýQ ]ã0CbÙx'M‰Ê6ï…Áœ·~êäsÄ·ä‰ÿ«!\!¡5f´‘!Üæ±ÇhGHÒ„4¾##Ý5$Á+$ ÝÏO‘(%º¸%IYƒ~’¼¢+Ư–.µÔ£å:qY Ww{"Œ€‡ÝJ)C¤õ:\0ÿ±$‚Û¾dÁòàÑ`¾KÑæVô‚£"¢7ÌÆð‘Äš4\æ OWvœóCapœt7HÞþWDon¿­‡‘_ÚÎQ‹ói´(ÚÌY£ˆÝJwÍŠÎv’yÁ -0¢~Hß9$nÇ-$R»lH®‘ÿ<ÂïAx“ªI„2'”Ÿ"7]­~êp;Ò çëƒé÷¶If8©¼¶–iÒB¦if•à]·aÌÓTÇ’nîIi¾[ÿð—¯È*EBö;¾TŠNólóÅføË!fŒ¥¯7×ì ³Ã9Èñ{Kð7¯„5·Z³b”Í® ‹‰KÚèc+«ƒ¶I0Ú†iDÆÿìh¯zù·j®È5¬ «¸A€Îù£HÑúRZœ©ð«m³CÕeÙúkJq°˜ÛãߠƉU~®T}E[€ÌÝcüQ×Ù~Cöé#*^dÇ`áÎ-ñ†Käw ÓŠó?E ð ·ÌYìå²ûþþ©Ã¦ªÛë~p®=ð€æ©¨ ·+¢²1yÕ±f¢‹p¾>øTKjù¢±ÅlƧ¦‹S´sãd¥â[$!á®öQJ£×/s£ýù/¿™Â¼÷Ï2“s6Il»rgšÊ»H’r£W麩®1Ö à1×l»eŸî±k60ÚÞKìBÅTkEÑÖ„UuÎÁ%Ãu“¯†RÇŒÙ%ÒNÏ4ÅçíÖI Úê–élžp%ñu6@£|ÉìI!Ž£«ž÷±8þQóŸ‹X8?tÑHä½"‰Êù ê9•ý¡‚ω¯ÞÞÛð‹H"8wp;T¸®¡)ÏìÈkc k¹_%ADÕtá¶V9Þ¿WÀöøËÿ׳»yÎà‘PmŠºt lps¤„<<ƒ£= ˆŒhIø@c(ú|¾CYÍ]òQoQØÜ€¾Îœ£ üsGCÀZ9“”°•yœì¯2OQ_&.WÁ€GÔ<ËŽ5ƒØ³Rʾê~Ä?C3)´  Æùë®–.ûZÛˆð„Ž!'Èþ>âKZCºÓfc”³Voˆ`±ÚîõKM9–Ʊ!ó씈z+Nâa’Å›JZÏR2ÏÅ!5Ã=þϨ±=<×róvÐ.à XÓ¯ÇÑnɶ ¦³ÿh}Y žbTÝ­É2üŽ¢-°µ£Ç_ -«¡—ss£é ºÞ£˜U4ݵ9+Ž(¯ÆTA¦”^W™¸–zô¼¯ïŠUzì›#å”+"dâ¤`B©[Á®U(Ýéu‹{ž;-L¬ï­ -Ó¿Û~÷AÏ÷»ÙŒzÂÛô£s<þ0Çâæl?‹íþÓÆZY†~Ú6Î - -î¨Â"‘Ô œsq kÄžiOêò#=é®·÷¹£Fõˆ€HÏ®#eæð0w+Ég‡£ØÉU›í÷A¥¿Â ¹µ? q–Îsõ¢XZ¢UrÑ„vËçŠD®H>#ǽ¯CC¥ûg㬰’ýÝ×z9óyæ68C˜sµ;}‚ö~„ÂÆŽT‘Ÿê±þ¡vqT;ƒïTÜ žhœ“£÷÷L/ÚfÛùÙW}=›W‹‹À®& ö`Õù‹G Ü²þÀîÆÔ"Ä·áù÷´þOm½/‰þÛîEÁˆè®Ý犵×ðzFÐOíóÎc“—ÈXÊW;ž>´v¨¡k+öÞRŒ¼ ^ïC ä™>i úÚ -ÝɨÎç½1ÄoéÏÇ„ª5Ã#?ú*÷ªpU/ȧ!qëËá¥äTäÇbñ~¥öEjÁW~Å&f-*XEõC 3=®¾Áv!ÞÁóôå ÊA<7wV+úˆÍÌÕD ó ÑkãCÕ$ªcn]rëQÿìp,\´Péÿ¡Â°iZØ\z“—øKœP̨{ Îf§_Ay”Ña:0¿èØ ûEæø÷¿˜Lß¾9—ú°ÓP·rûzÚ¶Ôó:θ%Ç/y¹“ Šñ¬Ù¨.~ô}Ÿ¸ÜϤU´ËŽŽšj£JœØñIõ¼÷¾Gog°Šw±¡“ŸTF–Ü}‹¢·ò~’ ²˜ÈO R n´¨+;¶7h‚W¸¢'g±‘ìþÑLŠËn` - ôž3Œ“À|¡Wp¡ÏŸþ2ÿö|ÁÝ}ÿ—”ÙðÞî{Þy´%š§–2 0— ¼ˆ@‚×Óuà2|£ûU6•pçw©{5¿…“e1Ãóð®Ç u‹ÎU¹8€Ù‘Y}~¬Zƒ„=ú˜°œ÷89£nºñ}ÄýÍ}̵ÑË_E$Ú[Ô0ž²Ë-¹‘,6,G”]F êÂŽ±8£QçÞôy7妼œNûàŠÜ¡zƒ!:f´=(’/€Ø*¼ß>-=ýÙ⇟R/GeŠ¦)ÉËØ´h¯Ä=CF¬Ý4´ñK ÛØq¢÷þ¯ùÿý…—8ó|?ÔË•(¹xJ Ñ!\2¨ÞT¤:üÐHp6¡.¦pOlAò Y<Ì¥FBÛ¤1I ôâ½·{a`±¹JS€D3#.‚%ïÚµ/ygŸŸŸþŽÜGšb­¨ÚÄÎjÄNKoñ)Ó‰ÎÃ,ºš^õKú<¼Q8·¥–OxN’Åjˆ¢!7,Ù¨VßÉ$aÈ|1F ÁñÇ㛧Lä³IµˆÿQðÎíXב§AáBÉÅùæ¢JÀ-gï5„ü iÖµ!L«8Ä@k¿ß#jh- -i4´Ô»Ó¬òdá¤höÖuî`EØšÄо¨íŽºY† îb˜sñW¢Ct‹[ýÔ°žgºï\~žš6smQÕÃ-ç/hA+6«7¢sÖïU«.סHùAz7ì6áõT¯Õ/q€,‘4HY%šbquû€þ¼­*ÌΗäîÌ’õ”ˆR¾!ÅØ9D“ªI™{/s|¡æ'¡M¹mõS»NõS&ògCQ -Ü#vÅûÌW,“œSC~ðÁæ÷®âÇ ¡3äÛ–„5I³3¡iÉÎëží¹4nqÊøbÕ`€Uàq¿¡¶/ñßëú àÐd¼Îã*øÎtˆ!¿Öw§™Oõ -ÕIÖ‰4Y|Tw{ò¾3¶””úà!jÈ‘šÞ=CÏd¡g^ÙüÐs±‰à ÁßÕ9Ë?(G›v-k3û“ÃŽ¤æ–ºgȹ‡P Ü뿘ŸÊŽ¬VÕ‚À”—!€¤  @¬UÞ ?ùF׫ž›ú½]Pa.qÕÉ•Ú jµs‚h‘!¢k£µ›”)Î[ÂÁ%f¿×O een¡Q°·Æê+OM¾p¤†y­!àÝê - ú·êíµÖ¸s!ìŽ_åa7Ï%_ðÁŠûGïµûíô?Œ1aÎ}´”tUc†‚ÜcÔ™ÖžGuÑV¼¯ÔüD Ff{’éGlQxV/u@nlˆJzÃXJÌJŠ‹ÝBâ©r®ý-¾ÊÆΠŽS©)¡I'€&8`ºê[Hç*ÒÓóÃf@C ¥m‰™«l>Ow¥ -w¥× -Õšò[¨õÑÓhÉbè0ÞpP€Ð1áyþŠ˜“ëéï21|Ø0õ¸²áhKîwv’#iF?ty0êzƽ{Wùîl{Ž·sùÉ)ˆÆtöïÒå3Rüoeâdˆ¶¶£žêÆÜB™R?·I~ñðb8¾¦mþÑ!eóÆ¡cÓö¸[åfìlº^ŠmY×q:н‡-å©-PÓ€œb9÷D!h®Z¸Û“OýM'ø}¿­öt¶ºäS4D:ß]`µ z0$(Ôéª'£l•ø£#òÉlæ_gnîµè Ežà.˹[YSìâªóY– ïÛßIˆÑ´¬‰ra4ûÖâÉ=¤»ˆªoGì{I*¿ŠxSŠž?ø{Ïd¤?ÄboãËø|<õ†Éydï<ËúêIÄ“tƒ’?aä9-ÅóõÁÚýéï^@÷@ضÿˬwÆý¤wL·miaŒXü^6éqÄê«ñÉ—šÂþ‡.`wh/–÷k¢@®Ì±øÖ%^%¿9ÞN™4qE¢@cHÀ¦g9.͹4¹"§šç•ÿx]W÷ZP%ê·kU!š6›ƒïå{bo$i;ìW -žg@`ˆ¹‘xÉ(³ibX -ž¬ è{Íë=,â»rž*íS¯U®´Ç½bÛHðr¿Ç~¹ÅØ}C¯éǵ"ñ -­4\±ÜQ™÷ò‡4»ÔÊPZí/Ç_švHBÄL­êutþ+…ÓŒ ë^Ë3®Œau¦¨€÷©!E«!sÄlh_å‹}.¼Rf:'ä~7³EêåÏ9E1dP´¬‡*ÿU…»½>˜ïH -û`ƒòƒ4¦L\w‰[ß%ø£O› =º!?Qáö¥Ïbyünñº}©hl¿™¾&–B€áÏH[Iñ_å§]š²‰¥:…=Âi‰æÝ“ jäÝÇó~m½3·H‚ã?~æ„MÏÚÛkÐr&Š'œ37”ÜTWHFL˜bËö&˜v Ï™6ÞC"4æ:±® DvÎõSb ¤u;paPq$¶¹Ú|T»·ø -ËR]€Ë|5ç<ÞC:Š°±ÃJý먽½ -ÒÊüùX·£ô*k˜dÅ2½†\Ü‚[t^fÈ…u|._ahò¢hX•ønr=9)ÈŒW˜„k‚ß™¼NmOu–‘yô\ Ø¡Uˆs¾Ÿo‘ *1qÑ«Zé öš¯h ôYÒÞKÞÕÐ*¦îoaÝ×?üê©—©{¤ÛwÒ÷ÕÎG÷TÞ^ÆRè£)pÄ_*vcÌ@{:ŽOn%|6ÎTTëaÅ<–.“ûÓœeð °ý=ˆë¤ -ú -ä |g¸%ƒ3Q;tö( šß{PÇg†©¢1¡{€åÕ2Š+¢S!#å{ gňœ=Þ•{ýÒÍw‘„lt¼–ùÊBÉ‚ÖH¨°- Ýò7  ,Ñ£‹˜æÿÿ·—CÖ"ë,¹ îÿ¬âä<£üÖ|ïèKÖ"Uú3 ¤W¨ýPýàqh1½v•sooiS¼XñÚ¤½v¢‘‘VŒŠÕêúŒ¡ÀHJ5XÄ–"q±Ý(à‹ÎðzÄL¦))ÚûéßoåÌ}•F‹,¦dÑ¢ÌÒ×Uî¸ìš¥Ïb© ‚H›ÏOq}º/2Ðxq‰¡CÒÞ«^eD Þé¶òÔèÅàkƒ™I¸ÌÈýµ¡Ï:ç›y[·Lo¦”‰îÈFéê Cv ¡z©~ n!ïŽx9„=Ì]ê±ý­ñåhŠBf­únƒµ6dé¾²v‹s)hªíã÷Ï%Íú'•I<æµ¾BLœÝ!èåýirÔx®ýù%8‹S^îcÀû§âÀ-T,gĦß:±T~ hG|sbí´¼²ÂÅFµTùØmä—–Yjôy÷_£4©¿úÏß„¿þ%ü¯o·'ü/F"€á%t+‘'ÉÚzvS$€hmõœÏÔwü› -x(æª(¤ô¶³'Ïs™NË“FËŒíþ - x^§°8÷–‚!5k˜£×ùD¨P³\Ut1eOíõ3nv3,á̦‡:ų´}ÕɆÁžŠJžpÛ”IA'kP‹Ñ¸“jëcÓˆV-ÝÖC?lF0r†¸Œ]NÑ})Ÿq¾î8€ù)ƒ8ƒ:u¯û=ˆ".ѵ®Óõ©m¯Ûz(E·€m3ÿ{^‡O˜mù 1rF d ™éSLu5´uê»7Sv®OFðA¡…†Zñ*µ¶ÁÀ=m«;Î ŒCì[¸C¨Š‚C¥ §³>¢qg÷RסŸ¦P¨‰ûÙ¹Ì'¦»|«X-˜îEöÛ‘X¯ÄcÔþ*äÚóiµ«p?yÕí.Z÷ø-1ýä/K­6&y -´…û±Ás~8¢r½ Ú”iˆËµë-M=³¥¨kDU«¥­bë“ù’íûºSpåWÿ{êÑp_­ဒï˜#.ýüÙ‹ìøS¯È&Úó¢ñ“å(Ø’™(CìºáhŸña<ûÿËÜœW’cÙ¢–àê@Ê? ÖkzµÈéuJû‡mF8ÉòͪšA2áÇ:4»YÍ‹ ®MC’ñˆK{6LëËY§ ®¾rÄ)tq Ëœïç~\¯vP ƒ%Æà€¶ÖX„rÙU+÷Â’-³"šé¾ÞÌü×Ñixp¸¨éÍ)ÑTXŒ¬§î( §C¬ôÉ™÷Gßõåá¥á¾Í}6WJqL §Àµ?Õ›IÜÜÈ•æ&L9à ú¬ÓFSõŠ:ø·3â¥ÎÂæ–J¬^ê,¦Zö«3Éyóù!Ü¢ 2ô#ÕíÉó£˜ûßÿ%ηÑÀï ²¾`…Þ+Û‘ ~`±â@y·Ô½ûAeK±éeó3^!fa’ñWŒ#|¯ê=ž7⣠ÕõÚ´©íÛRg\£\®‡#¡–“ÖYv„–óì½u­úx-¹Xß«¹Ò†àB¯qÅ4IK¼ÇtШµ©>@LíuÀ¬9רèœ'J´<ŸgðX¿rp¬MbÙ*nsò!yì ³Ï+?R†hpøÍ7Loþ:âÙ´æ.-b¹Æ|ÏÇÏâ63Šçüð¬ÉŒÑ~ÿ(4dõ^[$Qoj|EWûcnD}ñþ<ú8Fõãûãàwó÷¯ï%Fƒ³¼Aþö-²ò'³²Ü6§­0êÛFüLñ ¹>¹¦šLæï_9ò4ŸÈÿ’Ïüíû]ƒÔð‡…_[ü ¶0l‡{²VÕCšd ‘\97nìv»ëùñÆÚò¤9\»L­É$½»ÖN—ÎÊ@bmQŒÝeŒTGò}9ÍI ]ÖëW(ª‚Ï„­ôa$³Îèò.ïÉ €žþþ•_¹bpEÎ6ÄóoVOýµë¢¬’–ŠŽB ï\pON{9µNñ­8ÞÏx BY½©¦×¶;·ôReÌëõà®cŒÓ~¶f߈½x‹l¶‚ WVh°)½ "²ô9’¢hé—>X÷÷×hƒíc¥ÿ#F!pû+Ø7~éPƒó>u¾ÀÞys·Zª1ç]!›vSÄbÿ Æ>ÏÒŸˆØõm³i·~¾§¨žÊ"SjœÇŒoØbíBªS53Ç« -·JðnßTf6èVoT·ÅáËÖÂ_¾ã?Ã9GH  jænfdj¦øg†ð`ÁÕ㺕KEIêŠB_L¹:tÀ®ìË‹†*¼×Gy®#<[ˆ8wøu@Ä æ¹"˜_¹ÑçË©°Ý ã“J ïzÀƒ¤ƒ_CŽìA é§÷‡Ý /$7É«ÙÌ - ÝQALäŒÃ3 ƒ§+›hƒ°™ƒ”0 >qÏÑ+LäZ‚?mAUKgƒlBT©ƒ]ÑCÆFØÙbÈô¼/« ]ÁÞAÛÙ Äp.àjØä9⬽¿~ãû“º^¼ˆ5ç1­§äË^bÆÂí,´ÁxÇôD\Z)qáõ@Õü`í®ÒÈ<ñÃ`Å‹ ÒÕ ÞçˆÜíCê-/hø×eO{ÌýWÞñ¯Kàw[\Caï"€¢…óú;3A,Šûé›ÞEr†6£Tn´îŽqI©u=?¸æ)ÝÈgm!s‰R@!ú»®tà“K4k·J >ÒFùqÏF'”2ùþáqðýª%ððD‚o´Id¶\ˆ(¼§ý -1‰U0v§Í‘-Ä—C¶®€w®ñÇžUrGYC“ècØ ˜ )´¢gêE«JÉw,>£+•2Ï\zƒ‰Tg4EóhÇÏ‚ÙìU3¾A‰`‘[7 ,_(€Úªà:üìÀ“}+–«œíþ—úø6zem°äm~÷µ»Ò§Ö)uŒàÍÕŒf]ÚÏdãÒ~†¥a[Ø?ò±÷IC]üØlŸ.Ç$tSÊ—Žò©­ þ‰žü”‘“–6óãeÜüãõ¼Á²ÀOŸ‰›jÙD¶Úó>)…ôx–'>‰Lx‡g[¯dï‘:Ùʉ9d§?Z*àu%YCˆ»G~f™ó⳦îΕŠë‘ kF‚®xš>ë;§Ö½Ÿe#øŸŸÛñ›‰+_Œ¹rê°YîéØHƒÌ[E‹îtåëqÄnž4µË `E¿;žuÇÚ®7òtÄ<TWÛáˆûëPìéðš„j4ý-QÑ]©²%3ˆ(¬ž4Dˆ¼t?Êâ~7ý‹vL²È1™èbVí©ÁqKñx£ƒÔãk˜€~þ—äXGešÒyôvÛµj ä)_éÌ×ïœ%ü@TcO‚]ç™Ü¥kšÄw“׬¼3äÛE·6ƒy#˜7ÎѽtY ©‡r•‚ãUªèùø¨½àS ôúÔgº>oÑLQg§}ºÊhØð#ìtÀœ)í× K 6¢Òu­…s;|y&5ãôŸ¤þR Gmõ핶Ä<ódhçZ9àd–O… ”êy¬AÉÏd>„ü!ߢq>¯!™ºŒâGQΉ‘+Íßjk!Ù#T–”<Ž ª4~Þ÷Úx®xàPFoØíóMSq!°¹Š+ÝËaês„Ý~éÆ¢ô|*©’ƒ4ª©ÏbÉ_ÀQ¯®ä¾ #º~Ï™:Cø;=ýML ¢PAi@§×«Ù¨å¢ õ -òá+`£ç!°JõÊ`óƒ[quþÍ=cŽ?i‘†oä3(»›½šŸx|55kü÷Iñâ[œÙ弘6 ½.v&Û‹¹N}dÄ‘í =E ŒoÕ`b°Âõ¿+R¯ÿòÙ—áÍÄ:ÅbŒuWÈÀyÜ>{ÇɧDªïU½§tXUw5?ðæžœt?êQag´½Ç®PÍZ¢ñ¨ºí†š[4^s%C‰Ä& š¡'ùˆÂ³—ÈXÄžÃËÚjb:Óíÿ'ükî^s²\"ûš»!Ó*µû¹æ®ºÎ.Š¤ÀèRWøÑ´¥Ó)|’ÍÛ‡Y+ØÝÊhG3.0}"°: kFíáËken× ¨8ûøˆ&¦¾®yª–îíˆÆšÞdB!òÛø‚éÇàè{½¢Dâ?fÇB ?ƒRx»dö”&«Ÿµ]Q$R!o_äµ_ý%»üI–Ðj -ÚúM/“%û®”ÌNbr¾ÝHžêfþW»bd{¨9ÅŒo²ùœƒ4 ÚìÛ–^‰6_§MLÞo{褆h—«}’œ,¹?g¬YÈ‚$…h¥ÐÝUSÙIÏŸID©Ó¨!'ÞÄ#EÈ4§ÃQ1@b ™_$ -¼¼Awö§| Ô¢E5 t®tYÁxë0~ÌxœȈ;ÆDXem˜!V¯°3Ú^.]ßVÒ:îG] Ga ´Y0žN«çF¾¦]u¦’ÜÒvn#\×2OÖaI:Ñ£~…O-Ûy©¹³i¹œŒíì5h†vJ%QÉŸŸÊiûšQ1÷àv@|„@ef0½G’Òª”½å3Б¾Ø|—„¼¿'¯àNôÄB¼I ±¹üÃ7é.sàtÆ4EÐ¥k$‡Ÿ±ÓòÕ±°éEá(C¨´$»‹ÉŽ·Š1΢c»ñÜSÌ+‰Ã°š!óX7>}¬‚/Wúvª¯5xÆëuF¼Œ»Ê¨_ÅüͬÊËfÄÝÌ<Èf‹öž}¼E“4îzÓ×ùÌîüÙ’l´¢4êÔGd¼`ÿI/Mµ9GñXŸÙ¡½få݃—5Ztõu¤LзŠ½yñº]´4qVöÜ ióôJÓ+oþ0g&"cÏuD³JvLšfö²KòibbŒnõýˆx½OÇjÏ¥nÈ“¸ÁˆÓ}+{F?äpž¿Z¹dðyš+ʦxøÝž -¢ïÊú`,¸™ÿçÿì}¨ð·+ŽƒÛºÔ£4”¹ú9síÝ3Zå+c¬GÙ!xÜ{ÚžDÄ2Ù9¶—[7dž `úÍKù¬·Cžd†P7—GOñW fý\_ ->þ•BÛÞâ—½Çl”´TæÞyp±Ãϧ·h–•4Êÿ‰ü졶ªZ~‘s§jšfb™…Ý+Pùü‰ ÿYÑt'£›ö*¬šgÃ'ú)­ØÉþøßÎD‰Ã 0\ºö'·¸8<¢&Äsæw ”S)dÌ=~·3eŠ=òL†;‰ÉûÑ|÷#°¶R¯HÍÔNÓF­RëRW*ÐÌŸg:öÔº‰iµ6ÉÜalóÍóF­K‘ÍM ú®5†c…m¥ÅÕÚípêi£µú™cÄ«WitÝ1“…À†Ô8 ѺÓ¥ZÏÌÃÀðNÄ8ÿÃ/ -€¥-”ƒF¼† ;´Áë#_š–qïkÈ!6ÝÒ²š ûû/µöK†Ä¾J÷Øò¶±E¼$¿QÚ_¹ä&Æ÷š2{dAÎO œ/þò¡­nFÑ•a —é‹F7èáÔBS­iα4乤ne0g=ÑÚ4ßÜÎ3ç°ŽR™#,äÃ97<ØNj¯—ëA´ífBC™†-_ù€yˆãhˆkÎá1šéuס«x -¦ YÖlJ­þSuM'28ÚHÏš¸ÿ>匸ÃÈ™ÓçJ¼ nCEÉCîæÏÜQ„¦H¸rîŠ »ðVÃÚ˜×rpŠW ‰ f,@òPß~íߣ¯¥Vn‡jWþ7|WùAt¢»è±è~gY ¶t˾$÷}"Á|A÷ûÁ¾ÿ;G jµ’}¯E(ŒŽk¯Œgð=[ÔÀÖÉy+ªZ8&K3:¹§nâþ~˜Íˆy!wz½~çqäTûÓêGà‡œ^æï”çÏ“$ýìï'çÜ*g+Ý¥ÎU59È„Ä:´ g|ÌæÖ¥vÞ«GêaìG "·Ëiþμe"üÌQ»eTJknvÞ.[ãÅ!’ÝsÔ·œ'Yn3sŽëJGîDÍëí¤ -7kïF>tûÄd׃gº8ᶈô¸aŠßƒèñU¿Ž…› Q®$ÔÆ°^‡£M…ÚÛ‹d ,¼F\ѤÔûÛÏ"3“-@UûVe?y¼†ðGmƒ]áoÞÎU^Ú'Øw(Y9wþÑ„’éÏãXl÷ÉœNµ1â.M½äEÃ)•^Q¬Ññª9ÓÜÉé{qƒRt‚.žY±ô n¢¯R ¼äi39±GµDä3YᎠ§ì {1ræÝ÷•eŒ4¦®…„LšRv¶%ò êù¡D3κÒ<^°ï©o?x¨w@ÄÓàhß–õ­zy‡z=VF>G²ùŠÙtʧ—™‹D}Äôø£9làQåøó{äå_ y…ñº‚^Ú<¨“áz›ýA¡X@ƒåú©È #jAoÑ\ uÙ¶˜ ; é%)Ï7kyý[¬N×{å -zI‘ Ï¥[6‘1ÇüÊO@ðzõ|h[òLûžrMå,B‚ÑiÉQÊÇr ‰¨§ÒÕI£uj¦dãY‡¢”êi>|¸ôæB&Ï8„{yÄÙvûÞ\bL¸îh}»&½J†E\GÓjv²:F=R¬]@N…=ç‡ÇJø¨wç³ÝP H+?øJ¯jŽ}™x þn!ÉUð?üó—!¼H)ï¥=‹p¨{ècÜ\x>*ý}þËùA Ölú§}>6›è.bu¬yîú:ó{E1z0BˆŠ·vÔÆ3ûYß/ Þm·¾0YqÍy½aL¶32Tõ…áƒG—Ï_¥c¾ájºß;€B’ý®ItzF°VÜú³ÐÈ©¤ß‚²—==‚€ -t3ž»¥ÇÀ§`nÔÝ ›•^LŸh óçãîa3(ÚD^Þ•IT¥‰ô%ú©æE~&Ý=i6ë‰Äô㊙¦·CúŸ'î_ßéýh˜ï™óŒS:¡GºA´XÏq† -!¿äíeø+x÷ã+TížöaçùŠJóqLr·ù5:ëüa¢sU0àÄ8ã9¦ÕæÓ~a(4Ïâs];_Äü¯ªË:ÍtP/­ÑU—I,Åš”0÷Ï€¶{…l×^š–’k,vC†´'Á_ýŽ'uÑ]‡<#æW–ÖýuPÏ8í„g;K ËG/eDF®Õ Î0a™)å¹#Äyš¬xž³â-X°÷×é´\T>’´ÌØ•F‰‰÷í*Ûú™AÑt­A$ïæl=S -QßuÞYWœZ¨rwä{’3„ ›c²4ò܂lj­šºöý”Ê8ŽÖv­ï vDóìiu¥¨Ú% B=Eö†9ÏȾêÚßLŠ—Fµªœþ¹ú»›sÈþƳùê/oª?â¾ÿKæj¤h Ü<8WÑž† PöB™«wì™fX 4š¸!*çÜs:º3ÿ<´jÈI‘'˜´ç®DHô"Ð×äÎÒ †(Ysõ›ifÈžÊÃxÚõ -=½ÈwpUÿgþv’OÍ!ÅÆ-‘f¢’êð`ðñkˆä¨þ‰ô _) -‚¦ìsa7õ`÷:±45ÌJHƒ8Q˜Ñêߪ²Ô|üØ/Üt€«ïqWý¸ í.õí‡xW -p”¶¥€ƒ–:G å*œ‘'a=š -·í`€f¤hòu ðÓÉr|Å0Lj·NÁNVJ¦/6µÛ‹q™w|ŽV7×Ç@(š6ñËû¤CáK²ýSøÚhF7!+‘;ü˜­¿ª¦4oã'U®âµŒFÑ+:Þ©¤³ê ãRj|ýs”+d"C€´äftÐ#©ÜÍŽÿ¨ö“±Ñ@Š¨àsªr*?Â^%¨j{àèùÈ-hïd‰9ˤs)­Û›˜éi„LjÖJ⪕!³ ö.͸ÒG®|t†;4]£–•÷GŸXÙüõ¤år¶ë¨4bN5±üÏ”dIã¹÷‡Œe§‹·]_ƒr4ÞÙž¥#~x ¤Áµ’y_j¤=öèbðáŒÑÑYCÈù_>`ñsLœ;$ÅF¼ûU´ÜàAY\.…»†}ì^¿Óã -b‡PÅûvú­EòÜá Ïëà¶l|?*OU΢- ìƒ#åÿ܉Ѥùûö¥7Ó§ -Òøì1(¸Cš7ŠÄR±Â(>£Ìe¢ÓÉ.x‹w豪aÓÈHÓÏ!ó:­îý½zg©3Bœ·È“…uXy»Ø¶˜Nx2ïþÖOgVIÙúzÓÝp‚Ö…#þ®êǶª€š7Æ["¿²…üpLKœþVºžÏ̈þ“KÑ¡zâž>F ÒráuËÀ+G|yË:؆µ{^óس̢æÊÔ¾â©h†ôðrWY˜ÂNœgîàýQØ!x5¢,˜d"–é3Ç0ãç~û÷è5Ü¡™TÞó9àü,I&ATÞú«¾LEŸk=nÅÛ}›Ó”ç^qÔêhMüÑ·ÌŠˆžb3ÚÆœÅI"ó¹é(7%UØ#d À°»ôþæfÉ|ÈQ•9ÿ ñüŠþÜóç{üœ/LJâ~Õæ¶ÈOƒZ%ÓFh¥ç<»ŸµsÌ-ˆúSÜGjç(^[‹fZv¼\å«z/¬løˆãºîUqÍ*)iÎßy~ùOù¼WQ2êƒly:Î#úg‚?ø\m©`ó ´•Õ |CuŒx–1°¥Ç@+o{·:âݪý8‡¸j)=†¨qiûã‰å8@iI~¡ågBÅÆ3:βûªÆE²ÑòL[º$cžõú±'Ÿ´=~¸^* [ró³v1ò—¥ÑO÷ªžÓõò¯zpÿbšÉ ¢Aúa×ðÛ÷ñ;ož;éf½VÏý+%¯íˆz> ¢ù¡4½U ŽÏІC7#¿ýýGuSróúÆd -;OÀzõ;Ë_f&* -ÙTgžå‚h6º(¶ë  ôÚ m‹µ …¦µA¡™~xmP#°ü«G¹<ƒ¸ ß,àkOåbu…±í°¬ßùR¨¬ÒYš†°äF·çwx7xÆdqíð•°-[íð !«~1 {ãsEB›ßqÖ”»]´D¬ÐÍܤÓW ] -õ˜‘ÒéX«ÈÂý`$ìúˆü¢_“túí¨}ÌþrþSxFe¢‡3óûU,õ€«/<ÄAÌ6ø‡?ðØQÈxoÕQ¾¸ýÛ÷˜ásª#æ„ œKžŽõ<á2é§÷^)Ë\6F‚²"'ÛúVîó+ ê¾g¢EºŽ5:4ÖsUR`‘#é{¡üD£|£dTˆªd܉Üu¯¦¥žÉ‘HõxjÐc›ˆaTÃ"§žîubÁøíÑ™ÀKÁlÊDoﻯ!Z1QÂJ›µÝÕ¥ìµéÏAšÌbÅMµA¯Šf' »n¹«2Ã¥¦0‡ô_¼z"Zv]ë†K– Ü/gûï]Q¶­ô ˜6”Õ¢ýÂmS_ìˆryU¢(„R½>i@jp¤mÛæ¿“#Lëz(.B¾-ÒL¾§f#±! ¹¼?mr EáUÑr>5›„Uª&©59£‘£Bà¹Hfè³[KsæcëŠ\Ö^cx6ߘ֎ ÿ—1ü¨Ük’ÑX#b¶%øgØsǘ¯î+9³ðõñŸ !³±=ÝûkÉ̳ù ù»¶§*Ç:0×¥(·ÙøÏ‘9 -§1¾+•£^FQf^áI~Å|Óì[¡ŒŽ_Ñ™ïC‘=—«§ñ±§Z¿XÏñ¨Bµ@¸‘+¯åànÈ.£‰‘ -øÁFð*µªŸ[¬ ðÒ Yìçx"n‘Ž»{(µ\0kw‚m›)@hõÜ>Yò|<ï>¶å‚iøöåÿΠåaS-±êLjv ùŽûŒ±üš/¾òÁž5xú˜ GÅ¡¦®yÇtÊüž“× -E˜÷u½5øŽ“ªÊ•Ÿ‡M¸?úºR"×]~ÔÍhœ)6< )†kHCT¡,ø Î,⓶¿2ø~Œ¦^?ƒ°“vq•£þuFZµÓýÑÜ-ž;1ÀTßyî:}¼Âà'ÓXÁ{ÅUœÇÐì5 ó»ùnç?°Ú?•}Û§"ï²Þ‚1mçBDàÍ]ßâ=HÅ€ÁŒZźFdS5eÞö®ƒ}³­›)õ¹"Ç66Yƒ7èºLé ow¯<ÞÄÜTÊt&JÑ$¹ãDµéÁߨ@Á¡A¼qƒÐ\=b!V -6)8øù_͈»ݽ†„ZvðO{@µºþýD8¨•÷÷‡Ç¼a_Õ§³ßKYóœ?£f® ~“ ]hRîì3|£ã€:8SRÏ£ÁòlkÈuƒœ…]u%ˆýX#.4S!Üu©ù;N´ú³x•CK1Ê,3¤‚?QSÈ}ÿêŠB%K¡µEª±kÊœÚÇÂâb‰¥ãêôøÒ•’Ú"ƒÅˆ ㈟7‡øþwœ¤Šî™d2àDñçq¥]›’ǽ -,XÏ·DNwzF~§Å`KÝ¡Jaš¦c°b˽ðÚ¾bñ˜èl‹¹\Ы…ò`Ç·‡Ï© XÓJ5y~#Îâg^ŒT• Û›U(PG8ÝíiB0°"{Tk½ibÒgPÝÉg¿.% …–ëÕ§:ÌWì}þN "QðX1?å˜Èa̳H|œgò¤â[qr0 -ÀkŸEU •ÌH.ý‰;ž˜W+eA’žÓ–ÈTå%‘Æ¥ü¬N\ÿ¨³aÆyêoˆò9_amhRìTmâ]h ĸ<Í•ŒÌœ`1ÞÜe†X»¦øüÏâ*”ÔÈLÝóeFxCm"Cs -å“;?ó¯>C¯Aœùv•tPx@¤Nĺmxš+ª@û³öþÎ D9 q"5â­ZbÕµ"EƒÕ,ôàÓ•}³aÅÜŠD›%]3›#LûˆƒÞ:ËTN÷àëcUJH*ÃV<åd¥"l«Á¸ó“í™’0VÖ<ö†ôñ@5ŒFÝý„*4#i #tn‚¨É7ÙÇèr)ùA‡yl$ÀzPŽðóψòñ^Á?´QŒÊç! -“I=Ý ¥×Uš8æèᅢî„:ÌBi„„1ô3Tƒ½2»:l…w:p]fÄ´È¡±(Œ!ûÜÍà¼è•¥–¨c„¦QŠ'™çsÐœà7¯Ì½'=`¢½•^Mb6£E–!ǧü€ëÂ-¹Û²ídªÀE®Ä¬+–Ò®e<”òá,íÍÝw¬ÕÃo¢픈pf#îpwÕÁmîþ#ëò-ò ÙÕHP¡»ñ’û±~_åÊK¦~zÅbøL¿AáâVK”8¥+üð`r+„õÌ’5dÄůaõšôŸ½ö ®ŽœÂ™AóUك΀Šm"üئ͔&ÍõƒSàœÿlj«”!¾WMŽY`!‰t¯£?ó©JARm„©g϶@éGד yÑr{û•‰FÐÕ«µayíÐÊϹ†€ýÁquHA¢û(êÎ -Z\Šª -fOñ>4ŠˆÅ÷Á æ§…³Z [a(ybì5'ôº@¤_|Û%ãC¤Ue”xƒšÓ.>¯<é œ¼-Ëw?~Eƒ"(°ë‡)7f® ³¿bþúR襎²Ì> ±($ÚêRjh‘(]ÅëG…„¹¢A|/?¾çTÛËåyì cÁ=¶mù–H‘“ÔRi?­1 'útëÏê\“ñ‡) }шø,PøƒER¼Yª>ýüw¯ˆ Hd"äÛB-J&…g43Bq‘'´ý_>< „׫çÃï•Bzò+B¾ÑŸV Ež;ã¨öÆU}‡T=¬?Âv}_}Æ3š+=:™†_dVœàÖÃq˜ÿÙœGÝŽ‰ÂÚ“€x~GGV:’‹„ÿ 3s}mlqÏÙ‹îB1Õk—Ž“?ûO˜=^rl•R׋Ä% ¹ç„Á‘æ€QµxÖ&j˜VÜê ŒÄbJmÑT7Í^e.‹K¾~÷m¹ñS˜÷ë€ÌÎyÇŽºAnÙgÄ~,þtç,÷–XÛðÃ<–{Ë…bϱú<éÐá_¶¤xµ«l #Je_tF?ÿå‹ò›¢¸‰OÔ„,3({ÖI¿©(<‰XLÛXRS1¾Š !Zš»;rÐSuåVÞØs#Š -ÒœºtOZŸ±¡÷fukHÍ›^2=ÖÌò‘ß 0{ëŽ^áWg ³DƒŸB[¯A£ÊuUVGÀ•¦Ù©æ;mäˆÈš"ñ£T¡ÐYCfJäÕª…—؞éÂ×Ï®19b½ú”PF¤úo1a4¿˜Â?˜åk5¢O(±âü¶B‹Üz¸AZŽêdz7 &¡”kM?3má-ðÈ?…Oú'Î@ÿ‚3žd$AÅÔó.Ÿ*Ý 1cád÷SMGd… ™hKµWÂPJjèNAaãh2çIª£FHŽ•ö×™mŠŽî§Uí"NÜ3¿jñóf^]å¡(ÙæÚ¹ÕÆF‚c¤JB¤…‰ÙÓ;[©\Ël.‘ŒLêI$ºuØ´Üîä>¼šÙ©‚x&¬}âà ß#aßsîÁGæíëÃófÛ*VÝXºE ?¶ÙH.®ä’K=äœêŸÑ,ù/D2wËHr!ýàC­`Ëæq褀’í°”{—ÁÄ&›Çèv¾þÂFŒ]ü¤Zû¤þ×>1ûúK·ä›ó Úâúç*Ï-¨|ƒl»4C½R˜Çn|¤R¤%K5äU!›çQ©Ök«¡3œ6býu/i,â‹'ø|OÊsMh~‘H¿uŠ(š˜G´Èk^ŸgŸ´B‘…’ýCÊ.›\3o -'mÅðÆñÐ9QÛÁ§kýHfº~é½ð^C´9-(ÿ,>þ™]=”è(dÉGrª\-aÈÎC˜nШ^Øõ)F¾>©z|T©ÏwýöüN4ÆsRþª5nì#R• J&ŠÄü _C ‹™úuäÓïQ怅­ƒ‚žÛM£ëŽP‹Æ Ë­¯ýTéÇšuV.uGÔtP¬^»òڛçïk -ñ;¼eàÏûêQŸëÕJ@žšuwuÙ3 Ù“{M €&Ͻ¢š5AµÎyˆôñÌr.…s]Ál<Å»Uaº­®ªÐôwضx˜×IDÎp­sÜœ¢\ÀË'+§pyšm{Î/Oƒ0SÕ]ã2^s~D¨DÉçX‡ÚƒZ²–E­äÎ…GsPKHþ:ƒ·¤‰Úü×µ.e«9k¶× ¦§²¥òUog™÷1€|?ð££V|ül †uGL$Y§IP2)Ü5Õ®ÉðG’å‘ÁëªS‹.üå±Ç¤rÞó'éOñ›"´~´‘üø—WÏø_c¤¿Ö+2÷R¶1ˆÙ3Õ¼'ÊŠ3¡ ¿–kãq®!W><¼M[q_ñA“|]êCoq¬p X^¿6QOC”k¡6¥pÅåç¡Çø×Q}†¨WCB:•êX{â„Ëî¢J¯矰Ӕúê<º"ï¡{j/Ðòèpºì`èyq^Ÿû¦üƒ³Dv-⽇¸oZÁœ;¥N½% £ÈÔñrj3ÜaàT*E–Pñ'1ã?4ù3UþžÁœ•°Ì=[L…6OÛzÖ“õ‚(%‰àï B}Üc¸%Ì}ÈÂЄ á2Œ¤!zÌ>¡î ~ÿNŽ+ËÍ]/åIð ‰ç ë¸öjZ]!F‹ò¹WD;ÑCD¥Ç*G>p´¢;ïnN^M1åñ•mŸ -„ˆ é½ gÒŽ4ËVn$>šŸe+Ðû`†OçÆ‘&7¬Ic;¶£†¨áªEÜø¯oEÂ4u¸-ÙOÎ’&ÑÐ’"ÌÚ‹Qû`Ï­ð܃¤V °Í3ð ,Å%Íýãgÿ u•:Ñ·ÑÜíéìiN¯X˜ËÒ’;¡J¯ho®¼ -Ü2ÈÇÙ -Q*W6’¢øm‰uWˆJÇa±c³%ðÁØŀ㠌HÍ\¯×Ç¿í…ëÒn=– +ÐUñ· Xy9\ÍŽ(¥1‰?jÅëeaꩦ¸€Þà…ç|”ö@[þ³p8ËùU³qkc-º~D5ÕŽW<é9WR8ܶQ4Š7°ágÌGW‚/þòÙ§@F¢a²-°ìT8ßÌØ’ó‰]Þõc+ÅþÎÞ!3dwˆ¥=l/-“³†hÖ6 é9X ÒEê\ µÈzÆ‹"2Zà-ÌŸXòÍ *W‚½ˆê–®ru|ðèP('ÔØ}¸ÑÆû3MsÁ‘›á’Â’K~ üþŸ v;D¬×¥@W$<…·d³´T€E“4.*Ÿ¬ü¨©ÕÜ…¾ ŒiçQB{ŠrWKÚZáÚ(ùqýäD#g®†*5Éz{-á`øêb¤‰i“͉àtËVh}0Q½OË tÙÌ ™ªsEHo†"Pï÷óz}n5 -VPmè[¿½Äaá]¸7 ™mï -þ!9JÖ#͉üA‘,¯ïMKz ’`Œøœ„HEÏ'Ž÷Š /ô <|¬&+>éàçÝí%i£•×S!-&šèÝé)ýoÔgÅXÂó,ZÜ7})_¢#AMwÖƒIí®ðPS†Â–›Q?uü“·ÛÊâæü$¶úé8=ÿH#ßk3ÛUÆzÎ/Ý9?ÊŽÜìÆe:âC{Æ™‡ ¦ØS8Róü /Æ]çÍ SM˜GNnü VgæÕBbýYê“ò¯Á:û³A,J@ª=œõ´ïº@&B\n)½Z'¡šØÌ2O½UEoj/î›"‰OxQ;Ê}F°í唬Ã>Òž’OãJôòÐÙ*l7"u¡/3¨àûŒkd[†ãÀ‹‰¶ÂS>¿ÌEáãÜžº xQÎ÷lÒikúÁwz•x€ùv¯7%Üc9}¶46è8¤¿ÃBVs°¿}­?ùƒþÎ|1Ä - Šµćs÷‰±ÛEÞ.´›&á0h¦B£b¨Ž FˆÝ%»³é»ÌE@=d(§ƒ # Wó8Ê<°§…Ê!W`Æì ¯¥EóǦ|ßÈ¥À—›Ò`+jìý+tóÀ™<ÄUj{±Y<_CN~&¾@Xvž|ž?ÄL5 ×V,’÷Æ´˜¿±>îÒGhÛÙû€ô`«ØEÍ!Ä(®Ÿ–= (λ=â°!1gRˆ£ûækI-T¨öü-!Ý›9B‚‘EÆ=2$¡ŠG*ï#î”å¢ýXWÚK¥”Ð罞|F¨O©ÄíuÇÊ£3§9JhÏéBÌ/:§oßO‰ ¸MçW€sNª[s#9ÚLS®0~£Oü¤Ÿd*×èƒÖº!c¤OE&ÝíÜÉ6òæåR½EŒ/z sˆÉEÁ*º¶¢ù;R­½Tcï JBÏl Æé9–;µãþÒpð&D¤v‘I{U7ÄvÄ ™r¼¤EH ŒVš*7Ñ7´g¡ -’À3†~ÒU÷{_¡ $î ™on¼6•£Fè ÷-2xÄGZà܄ׂo¸×2aÕ‚z¢›0k!¹êœêæ`ÆÑRúþ+½4̸—Õl|Úê=Ÿ0_ýå3×’hË\ L.jõP%ca³—T÷¦ µmÛó:¨açR—8²a«û¨­UiŽ¸Ép]¹@תÇîh¸^ÑGrô-H«Þ¢Ø÷mDƒ™¾†8£¨'÷׃ s¼ñ×¥”î)§¾.… º©¶>¯H甥HtVÀ¤¥šsžKœíÜn¼ÈðÕ‰‡(¬z$W<l`õ$m9üèh¿ÈýAƒrÿ”U„-×õ+±ã–§Ê¼(¨¹h²×‡Ñ¹¸sÉÜq;:©QŸ‹7uÿJ}L¤ûåFa›/lžrw‰›ÏWLy(~Ô2‡ŒÏh?#uÄv/ôÕ»†@‡èºçƒWø†Æ9ÿ,®^bs/k<Ïb4-|£!¤|x–l×¼e.aì»íÚ0.ïꎫ¢¯.Ú‡"þ}Ïö²¿IÙh‡b}*P$úÞuQJ{–ržz÷®ZðýTÿ'b'½kzi7?У÷ƒ'”„³¥~‹ ¥.©²m«~Vû¤éÒ>É·Ï($Bá T¸û\kÛ— +>GÝàwÚ-3p{p¹‘×tÁ$ ˜NeB°Í¿ƒ£ì/Õ».n¼à ûÛŠ>Ewk!‰¶µûS4*]Ò¹ÅF©ÞÞaP¤\çÍ@b?!&½t'«µkroU-øæ¡–žŽ{ƒ‰nC{8ÕH¼MÕ!$ÐÔ9Î ݺÕqáÈêkêŸ÷*(Ÿ¢¬ò9£|FÑ|áþΠs{ÎW•€W¤ì˜Y%KÐË„(‡\1º] ÏýŠÛéÃt¾¼¯¸]„‚gT?; ¢-ªga¡ ‚k‰èös ¦²E¶…'Þí¢W‚õ¡¨Já=miQU"®cãZý» ¶vCAÉgLwqðýWÉÓÇ3s·H Ñüd…sçXÞ¥Štª®Àgë±ózóçE&š­«®Õë³3Ç£:àCTÁíY•?pã'‰_zJ-—ürT¥ÆJ3˜Ë€i"­§bÊʾÌìéSÖ뫽¯•‹ ÆöŸe=ÿ 2C‚ǃþá^Uç´äü]âc©A¡ì·3Ĉ<ïü‹É‹ëï³RÇr:V¸c¿mHv’ÍV¯eñBèuž¹”_¹Ô®fHû”åÇõT¡,2é1üj‰‚s¶DU -Œç‰3`4+8Õýúæß?UE¹‚åsDX¼6’ÖJRòâ*Ò¬;{¼ÖG™Ú ¨Ä߈`¡8ìV¬^6‘ÒÈó*XHÒ„èùQ<à´ZtÆrb.ëªû+=M ;°ùŠrm¾ K€p¤P­ÃVW:iõ „±_½ÿ9øôR9¼'øTŸó3©4÷DÅv‹ÒS˜Ý^ߢÀs„nxT< ä«ãï¶"aøªM P±Ù䌽“q$õt?t;ûZBnUÑ¥5$†8b¨Ó?øRï…roÏy1CŸHìµø¾AGgõ:`Á{¸ô[1“¨\ÿ¦B˜úéø-ˆö?+:_u¼þ1ZtPŽIýtFEƒyÂó²âÌ •‹‚NŸ-Ú)ù{·î«° GÕ;%VþÑÚ¦Ç -nX±à± tvZOÀÑκ¥”<ð(ÒÂzÊØÆñU‹]Q* `!¡`܆‘¥Ju÷ -ÈXŽu¦…Ó‹xYâøùR+Po„/)yexïÑ_­^ ™Gµ é¾Ju{ñn³ÇýÒ™š{Ã#HdXBI²8Égé6^\€èw¼õ’¶Xa:í;®ˆvŠkØHoõ+ìVÎhë©"wFµì ˜Èœ=†ëÑQãQ'¨åw¬œ“òß»õ\èâyº6×Ú÷ÙüÇþ×3†kc¬xPRa{¼âUûókÑêmá¡ø -Þ踗ҲM\å¤ -›'õëz®÷ÂfËá1 -“zdtzHŒXª£K½)q/–rð£*ºŒç0щ¿Ÿì«§¡åŠ?å?ª¡fúÿôöß?Ðït²uï%íšÙPɶ衾Ⅼ'_}Öœ¢–% ¹J«¦ïÆ,¨(™¾mã3µÿŽ“½C-¼·pkÏ0Û‰â›1  ©Œáx/‘d½%Ƶgd¾þAõ.Šß*o%ÇÌ}Ë+‰Æ‰rlU¬la&P]&ÊX -tõˆ†ìù/³÷ýýý¯eT¶‚¢Ãˆ(´*¹ÞÛ»’æªvj4SÖ#(ú¬Ó|O ÜbÎëümn‚ gž7•$ƪñpSíŒ ií=%âù+WÖ q-¥Tôöè'Zå+*àghêƒr¥€s‡üÎÂ$ÛËX’WÑîHŠ ÝÞ-ïÝ:…–ÊwTü^å1‘1ÖzmS·ª6:’ -gâNš*¸÷XwL.{hÉÄã2Õ¤ƒÜ§bѪ a’’?ö¾*QlO]2åjq‚IR©ž¬O;}MÝâ&æ27VèlòüêÀ& òH+^žÂ¡šÎ# sÏøžôCx—«tHkýØzç­“áÜÉÔyô(ñÍ­åŠÈV.µ¥H`q\õúІçSš;òTÈfó«Ýá öÄÓÊ5â¦õTÐu‰1³*•ä -Iííóoõ”î@¡o˜à­Úòe“6÷èD: TB´»ÙÎr²égiÖC>–ÐÏŸò/ÿòØúƒWò;o˜³-8d‚ùjÁ=Ó0óüŽé½¡áîq4ºm[W\ØúY=¢qºRWh!_æ+­(|ŸkÕ0 Êñåhƒ¾™\ä–“}Üâ}uýQª+Íÿg‡|Å=tmQÀÎÓnnwhcõ¼®¸½£×T÷;”fè}Ưö|]ꌱ–¦V†ÌÓÿ`ÐKg€ÊíuÇ5+…ú†jäsâõàÁ¬¢xp?êCað)ìqÌ9#ŽRz7¾_áù.ŽŠ…Õ”ƒ“›Û–¶ìƒ†ÜúN-|½-ÅðµJN•M!ʽv¬¹˜&?Z$fn©¸±åjO•¢ea†r£´œÛšÐW‹¶÷7ô¿w?íñ÷'8Â}þýjG‡Ô‡A{€Z÷¥B¾¥ÀÙGfªœC*0ô[–†‡E€y‰”ñæË‹ayŽúò¸k*nÑrÔwjDþéÌöhé½i‘n(i)íÍ ’ß#‚9Î%Ùø·@æ¥:¶áQ¦8×½'«ªùMßLŽ¸4N]R“BÏEÜ5ßõ_QÔAQ¼ëè&àÇ)’ÙFžêŽT(pľ‚ Ü؇¾ñ Tj]ȼ¤D…¢¼¶0ÞŠ-cM¡cþ)‡¾¿~G ÝJµ¶– dÜíím©Lx›OiÂzªo¿Õ»_ñ6Uã%=Æš^«^r§÷ù/(¾‘áhØÿÁ2÷Û¿˜Sx^¬¿i –ÿ5‡™ƒ|XÍ)ƒ’ïÄä~9-ÏÚH#£;·¸9×æ2€>U‘,-žWO”Õt–ÒáÁUð-ä;yµ_ñ5m÷Ã6ƒNë·Õô¥èòÇ^æÜó;ÄM¹½ó·­$„úîk¹ÿõ®·VÄ®˜Þ$Tæé½`.Îœù;þÁ ÂɇµË>òþÄz83½ä#|M€ØxÕèÌŠU_8‰™’ÚDp)IÓ¡fRŸªÖËù·±'=”¬¤!ÌýRh_rR>lsŸð!kxNý^!)iÁ;u—£&oè7E‰ì¥eV3CAro†èba3},E5Œ\Õuh›z#cKØ+1¬ ‘$ ”¤Àz÷åÚ¬÷òÙC^?â[ŸPR£}ÖÜ%+ê>: cd"ì!Ÿ<ÐUKbeÞ½²5÷šPèàtÏñ² ˜{þð 85wƒÖñÏÅ_‡$3ÅB‚6 -Îs̄ζÿŠvG[…ô|w>çËuw@ õ€ÜÔ+\J_Nü=î9êó©y¯fö(qFKöÜ05žQXÜFZê z‰ÈK‘>š~þÃg¯ž¨ƒß>Ä£¼=c$þböšƒeæ g«Sšclm¤@rw¹A+æ|àÂçKMo@…cÜJÕÏWË*® uÂVG°@ÏX‹¬ZmKÀŒz&Lš·òÐ endstream endobj 40 0 obj <>stream -/R‘­!8+±®´oˆUKo²­ÊÏ»e¶îï —*þîÈwDhpÆžj¿.e×-ßJàˆ®.¼#¦w??Mçc÷È6«`Á¶3‘ Ü^¦Ë¶‘_°yÎRšÞ8Àˆ0wv£íÓæógÏ1ùÇgô÷÷o~M¦¦c>·¢êî(0X¥q:HxF{f’€¤,™+‚¤?C¢{M¦=öÖ÷ý>™(ô)$õ5™¨÷*¯% O[LUë>ó^Ó;âPó—]íÎÒ£LuPpAþŽÈs¯ s;lu© yңؑ·rö]ºNõ½E­Eקê…´Á‘õþQQ³¢0ôóÈ¡‹}©w×E Vð(ÖÔ£p]·ÞŽ„}äÓ|õ—ø9‹òVN§Ê¸ZFOzwV²K¶=É÷Èáݪ[6£0Îô,FÊU¥»²ÇD°lNé Æ8"²ø?ÜŽZ6K^´¶Øµ­±7;1Ëö:Ì0nSÞãåËzÍW?ÄÅ•¬ñIšqóñž«•)ýé…ŒÁVS*î/tjD¼k$CH%Y7OñfeŽ#ò÷ª§†À°ÎM7~«ƒEÉýÊ@•˜…àDz€}4 |¢’xPµ¢ÂÕzÎ’Xå|ûÁ—úß°ñý§¬õÁxaSáȼwU1µóƒ°ª!*é0j1ÀÉ÷!å@~ü#øºú¨=ƒæ3meŠ8kí<ü사9ÆTmêW¨Á ¥ì¦ÎûÃ÷B øˆµ­ßÙô$FÒ”(?Á’ÆÅg/'H(ú ìr3=²¦*UÒ…´©Í8øzª›ÙsmË”ã)N”J‹#Z†Œ8ôEõkDÃ’×>[r{£FìœeüO+ñ—ATí$8Ñ[I‰ñ¨ñòS ¸#‹ñx;—”c$Äãç´Œ Ì°Z8o‘>£TË­úáVQÏ£¡Ö–òSÜøŸcÉŸpûVvAÙÑ•³QÑJ ¼Ç~bn‘•qfDx†Ïõ²ˆOû†ã•Ô–b5Ü+è˧@jmR•Ð÷Ó½;bÉîDѪԛž NB‹ß憥tRR£d…œÏSÄ"u¾ÐFDÙÑzôÝ ÷h¦‰É°Õâþ¬!ÎyЂ{ISú jÒ~=Eó¦Á|(¤ßÆ»GSü|÷.bÞ·E¤$çD%vDF¾¨Ô}—Þ3щA…YÂ:ž×·'j&š²ÔH‰¤À÷Ïmàˆ &X„Àb¾U+oÜVâm{ÐãwÔ ^Âdú¯á|´eÁðïŪ1êêæ 'SRæXÄ‘çäŸûÙ«Æè¬õsGl"ÔO`ÈxŽ¡…~QRüI‘1‚²ÔHvéë’œ'B|€IÍd?ÛS¤ïôK9¹Å·¤ç©«ï!Ð8K3꣪¶Ð켋H#©ã,Ð…NÔH*®ØXe^¡éÏ-BdZ‡Ùž^H”•)Ó]u7s -Ýñzݯ‚9IÏEã/ênœw˜ÞÏËÔãd¼47‚»\äžLÈ6¢ÇwýN¼QA—·DÉÚt¾„œ±µZm‚üÔ)ËOOÕÁôž¯#ô»ÿΌ✖rX%]Q°ã§ n2÷†t=fÌu”s¯íª6¡í]3*í¶À¹J_^<ÁÂ~-{¨‘]šqñ]ßSËhîoó´ÝÊÝàH…ƒøïS#Ä&0‡ÖûÛ÷sâåôvÁJµ‡˜ÝYNoΊxÔì¥^hI³ Qû™0h”Tg_1ÎßÑû×ч1Á߉/!U“¬ø®(/“6ÿ]~*nÐ>Û“èÝc½1JVo+œ­ìbÛ“ºˆ:ÌšP´9·èß¿O¨ƒpÓ±Ø"g|·[¯y‰[clô²J+nîWk²<Ô±¢=¶_íŸwf:=°ëÏ:¢—y„¿ºvxâ¢Uf{–ÍcTŽwô¯5Ÿ0*ÉᶸŸI‚«Ý‰¯•0ü–ZÁ—6³p¿wjþºÝˆG̺‹ "¦TPЫ©7쉎xõ¶B8~ó¥ÊMäÛ-¼>(–ÀýÍ:Þl›ËÉ-Æyl {ì1b½ÊtIÖ±ÄçÂS~K%ûLA%,)ÜašPÀÜ€ý¬KéW¹>âG áV|âýü±èHµF—ø.PX¿‘"'àFZy\+×¼¸ÈÿŸ}ž‰ëìÊÊç4Ç·÷/ðÞrnÜ„}¬Ã*%CÐvo+Q€°\8 wÀ[ƒ€H|·ŸR"ý ÞúÌhù ÞŠj§"ƒŒ½”Ѐ‚Ö'e}m∋3š"*æd1"-zäHia”â+:–o,H*èüß[Veê°²‹2bšì$ ¡…z:û Д粋„Y>q—ngwðŠ`çgFôèt&¤O-•[(ïjGõ4B2‡“ƺc*ŪûoìæbÇa^²žûò~Ïxý,o D"üÁ³v¬ ¥sG ÕÓùäP‚”»|æKNÈ%ƒšë.&ñŽ80¿ë·˜¢â™ie:°*Ù:õ½u¯¨‡J=­B‰ª“>4ýEŽ!w¬Õú°7|RÚj[iä*WZ¤yvàvôR5»¼þªÉ¸P”Äç²½/0ê´;E]‘+fûWá µž#1«òR”\ Ãуë}‹XHèÍtª/Hžöád!• A3ðÒI1ðvîDä?–þ¤wIåC%Üy!’ëÖÖSCžø·`•…À.†ohAZ¨Í€Ïõ¤){¬+Ž¨’°x–Lë™iy„ÒÊýfI½íg§ -K¬àïgø‹óÿˆ”1ÁÆ#NEo)í*F" (VKè£èéö åó±¯ôã¥o]{óÄó‹©Ÿ#Ðß(/Ÿ:ÀÜÂö>õ֘眳ù¯—Ÿ$Sïz½|VYŠœEŸ"úÄÉtã–°/½R~ʽ’2¢ÛÉ,rïënzdXˆŠÅR£TOÉßAN$dZª§|ËQ^r)'gË)´×.³…Xïé÷à|éŒéqh %ªãð ¤v ]R®\ÙAÌÞÞ¥\QŸG æ~ó‹.N“ŸciœW $nR%2Áù»Ð®ÞzGÇ»ÇñóÅ¡›R;uä+GÇÛŽˆ¯¾{!Šÿ%Ýq!O:ÓêŒþ5ï“©Ü|¼¤FfN…­¹‡í°šQßîÆXG[:§—†e¸5B3-GÅ¡ú2×Ài?ÏWÆ+¢}‘VäÜAe -÷a·$|ÃN²c×°)ŸoÄï òÞ1j\?3ÿó&S"^1¯t` /!Ý토‰#Ž,·ß>ÓëB+A;-æãÓwæp„ÂvŽPÕ8÷ráeup³Î€çrh´†oÿýúbŒWOͲC‰<¦Ö\`á,ˆ£TáíΫ? J‰õÉn¶NÄ ¼dþÅ`§'"E¶õX2`AÝQê;Þ\ŠnåøÒ ¬d2vãF7>CâwAÃSbC¿„Î#ÑX?¦ÒP¸¯t¥ôÀ°Ì!7•F AkÎ#Ź/êØ) Ù a€²pŒQ"cEuXrϳ<#¸¤TY½yèغA ¢±œ2ÀÒ¥öòâ80vHó¯+Á†<)?å™fø{±¢¸`ZÌV)nìÙC?4¢C÷gžÈ#—úö;½k¦g`s¾k¦B¬k@÷UœàÄs*â«Že" }B"9™N±)'0o{ ù³qb6q*Ð¥~À_B6ºý„aÇ̦ðx¤©˜-¢ê#¶E²‘Œ#H'yÖ´ tøT«ÅÉi#.ìì$½Ç¢ ¢ŽcÖì˜yð2$>´Ò¢ÕŠy]”7ßâyp}T†¨Ki 2æ6ƒÍCã}ù®úùÝÑq •‡:pаšA-ÿsv1¦`¼§6ßÔ¼öÀ𷸹Mç¹c27ç#¤ lz8.Ôvj;›½ÂÜãÙƒÄ*TÑzGFcØ—'hôrÍÙÅi.”Ì-ùðŒÓŽ»^ L­!ð+±„†ÚιÉ8 -¸Ž  ê~Z†šf¥½“掎z¤: m:Ò`‹bŠBþÒ•ÖaŒúÙ2ãÈ#7WÒ¼tzÉÆöŠ=Q<ºE2eüŠ.'³X4ÿâx¼”âæGðˆ–(B ö-ÊXÈÏÐcÙÙO^P%§>éQp%fgœó&“o)C>7LÑÑË›n†@?˜åëTÕ›3˜yZ„÷ÈRP›QH‹v_! u¤RŦV§W|ç¼ü$ðñGDå‚c,Å™z5Lý‰êÅè¿Ì—̾Nm‰‰õQ¬Ò 1áþ âŽš“÷œå6 -7íª-C,^!ìD›9SgRY\Xm£V’^Ç%E =rgo‹ZP"Xιù;pª„üµ£9½NPÁÒdÄœðÈ°IÁ(¨Oeœ^W¢ 2vÖïx䟰ëÏù3Ì‹ð]Fê$oE’sß6÷‚LKòLçù-ìâó©JÓZ¥J'«ÏBáÓ"CÚׯ'MÝP¾-âÌ}ë`¡‹Ùü¿ýP/Ûœ#)Þ~§Åœº-NÉj(5iXÊàzHï[2÷ëÌ€vϵšâ·öoÿ’ùTì}ìT]ÑÚ)a!BƸ¢¡| GÍ=ÄNy¤K5£Eh¨àq¬ÔW–ɹ±(+M EÁ[[ -Ó»D½õú‘Vا1gÔ-P8éçrE"¹]ËØžÜîˆÆ1œBjêBϵEämÙýãˆ6ì¹HÂ:2n‹†Ì,=’ÅóXW:!¹cϧog—5 áJnøî+´0óɵ³#¬å}dÊA¬F×tœkß!-Àf‚_.ÄÐRÔ8÷£n§sÀ2F/Ï ñ„Fv㪇 -eL’Íeͤ$p:ßÁS–e^Mìsãvºf®àT5‚=}WfØün² 7ï;Sì)Ê>L’Zæí·bM\Ô‹ €×öEW¶Ó$ÁŸÍ–&hÞÁ  ¾.Æ,Ð< 7Z#îï!HMå[cÿÐ|ÿ·‰;%Òòú¥<ï2гrï± 5OŒwûy±vóç¼ -ueZjÙ©m¥Ã¹Þ=Æ–ûýï>m„6gÜÕýÎC¹9]-íã%KÝ2.Ã`Ž7ÏÌC%‚þ-*Do‘Î÷ V’ÐÆ~€æR¤¢›˜HKöÇí—³çB<{µ¢©•¡ñŒˆØæGýPØ¿¶•¬~÷BælºK³kŽý-‘-€>6vwO&Ö™;~Õ‹ ËžÄöÏñÓ/P(b„bÃS¿£yXÀ‰²­,‘ÒaéFü{ž@ ŸDh¤¦7zÖ4Á^ÜœÔt KÓqÅ)6[i²>œ4¢a£™KEdó9ú¢ÏΊDó¬4j¾ÒX’]©“.Ú ·Þ_«g¢ç¶ŸXn¥ÕR -J¹(×çåÅ^ úØ×ÄæçÊv$Š?˜¶%-Óp@[êPXx0˜ûJ—"6q*X¦&O>%ÍŬdGÌù$DôE}¾JNM\Í 8$Ħˊ‘>çvž‰ý8šù[Ù— wÓ¼›SãHV½äVù¨ßAG§[G‡²¿k»Wváà˜VÑYʈÄÊ@¥¬#¤iG*¶çÐlùÆ=*°5DÒ ÉŸodvß?ÕË.¦… Ïåi1qh wò“q #쎷sOk6{<˜Å„¾°ÿHMh0Èõ‘[q†¯É“bÕ+?dö†®§¼ëíÇÿ\ëOþÒCÒFàç Ýð–! -3ÍIÑ+€ðÐ׈Áþ9KÝF½¸EE#NrÞ}KÇV;{Tkx^!‰t$ëß¼×x9X«­­!º Dï“8ƒ f‡£z`ÝkCس‰Øò•ìÉzCîk× XŽÁ+ÉïðÆ&çž_ RÆ©¥½4ïf³ävȳ…(°É« -Ä51uq·|Òöòñö@|žˆ•Ì‹ |f²?×°b£}0°ZÌô ¥UsÈC!‰m]ŠüÜ¿t³ ºZŠ5ĸ֚ oŒBgú|ÂÝCsUܯ!…;ƒ³Í·„£œYKkq½¿Û‹£p&ÃÐø¤ø1w¶&öÚšù;9Ü ÕóÁ}^¾ÕO=ö©ÿ.öi·üÁä{-’=jæ´Pî}UY™ñÂ/—Œu)¸nVX€ægܪu¼Þ¾T’ÿü—ÏÞ)?˜Q¶(õ§ÊJ´âÛ¨2²ùœQ<áÕ®¨ÜkÏn8Ñb}®ÄÑ3×f“Ê9q¯3æ\P9ˆDÒš¬ô€Ï"rÜ1'Ðg=×l³ qàÞ×1CÖ»Ñé]——ioWhê ÒðÄ×úÎ¥åv¢܇נy¿ -¿åÌà©¢ä®T›rdtŒp³-%]¨u_,·HO`iK®Úxc»=#”¹e”Sèð+{êXÍ Óq4ûçü‚s‘ÀDž¨YØ8¾º“õn -°G©Éz$ËøöƒþßâýÒÃ"[¥z™­ØƒOtOâÈ9PpÒ's3ú´Ä(PÓøä£7>¹y}“y6¾Ód§F}Ä+è3z@æþA¼Î;Á½&Œ&š™¼¼>ôœOìmÇâJ<êýX2¡Ï:ôË`– p}«zJ¥‰»fú_Óï/#W%ߢ§DõWUa'cZ®UU°v[dbjÖ̓œi»ÚɱfŽ©ó_uÉYNtT‰o”f°8 -Äé åä$Ïi3"ÈÓªTBÈñ±g\)&ºe2® § %DUŒÁ';Ï{î´&øF8éŠVɪ¦ð¶j# ·^à7_jq•"~£ŠmçJJÙ† +ýÞ6‘ô!nmvÐi)®ÒÇ6åù‰ìöWé‡3ˆ,ŒùÍ,u-`·©¬tWzUrúÒKYŠ =]SÞ_o[.Ã-ê~½m‹ùÈjXÁ %Í‹¾½$à†ˆL¦þËÛ>žJ*€w ØQ<®°ËžW­MâKEz¯µÝÖñÚ·¶òŠßÖ Q­úutÕÏø䈆ULÏÅ2¢6£Ë­Û­ZnH•ãíï3n¶ïˢŔæ¹W-_yk>É´³š@í –ÅG{‹(îzŤ,Ï4H³ö¾tëè8îæs W&êœonžì󣿦sêë";[z:C0¦Õš©õ„jUð¡ ·K8Î%?ˆG$Ksëu©hŠÕƒ®ýš9l²šŽŸÙIíôz:,yêÁù…UA‰•gB±³Dž_'WŠÚåfIòÔX” -¨ÒùY»×|µó³’¼½mI³‹B0€Ÿ¯>És熎šçdá…žÐlöø!”GÍ[)Vß}Òïi6z!ý>‰z}$š~‰ô;àÇ”âä½-n 'ð00‰³A‰piôE!BϤ‹:ðVC¨boee¤™tþÙ'äá±B»=‰çœßa~q#O­4Û¶¾~¸ò‡öZ³¬xïŽ%÷­±G1ÊÖ¶z8ZÙÃjÜl÷Ö¥øÏÿ™·æO;ÙCýªZ!à±³i8èÈQmÃZÙ× S)Ã.V!̈–‰[…k{9`ÍÔ;² -î&N{ºûKqQO€²æühÛ}}Xܪ#è3bJ÷U&+™¾ž—ìEK&)ªóú"«Pt/½-wlÑÎCÖGà _·ÑÎX+ ÈGkGùÁ„X]”ÉÂÓÒuÆ@ò-]ód€Ò -×ã#†¢½ôX5*&o_J!ü'\ÁÄjdr“QÔK;öŽ];†cdþÊ/JE±¥*•²'ÝŒ'¢øèU”Á@”¼Ê,ðX0Ä1Þ¼WÑ=þnce)¦º£æn Ø%OŒàVöqΈ4ÑîÌ—¹Ã¶#ÑÁX9Ímw²óAaBNó8EëRlxžL¢«[¦v;ØæûN®/X -‡5ÐiØ¡“HAòâªÂöÜ1¶M-€´“+|Ù_&‚†pæy$€«ékjIJt§þ¬ y]hXì -òa±ê#’.ñ"i僼š|P㽚‘4W¯¤ºt†ŠÁ†œ²Ú‘ª8¾"brDóF«âôتœobx¢®fwŒ˜UÞŽªózjDón±„•OT.îç^µzŠ+4Ý¢êoJl‘Ô²ÂU«¿HŠž¶‚vgˆ®OÌÏÚ¾ -Ós÷W(Ÿ™Ì“+T8„,i«NqÖ¡Æ#&?3óŠ¡–Óš›”ž[oáy8ÇÞËœ-ó -KÎdÜZPðQºÑ Ý—sÄ*¦…!BBÐ)B cö¹‡˜•nÈIèsþ£§ø€Zi !Îpyª¡†Œ5ïn\k1ÀÊá`íÊäø:”Iç‹•A~±2°x_Ý5Ú7’Èwìl&ùÞâ,%=ë‡WƒN\ä,aÁæ·OTÿäô…³øg> <ÑÖç'Ùù™‰¬lQìqð)ER´a£ïTcÿF¬HüÙƒz`Ñqæ#¿"ãðo:ŽsÊíQÄз’y÷r<) àŸÂ.ÞÇ´E«RƒF:g#Â9ü=lžù«¸R°Œ’ s¤yK¾÷u»;›ŽèYÉ|t0}C¶̤n~¶|¼ éÔ{pŸÛZ²„NÕ!{Î|ŸMÃä‰çdm1ð?Z3{é$›GíÅÿ¼ÖõAÅ/ê¾wÀašû»ÖgAb-?üVÛÁ-€ú˜ó‘1{èší›Ù÷jåqî¦2Oñ&0SÝVË«>oÀÕŒ -å>‹$êD4@¯¯—ħ„î#ói…ô­/`ª¾-ˆ>,ËI -0"Èùl·hÐ5YPöFró’òÏžé¥åüÔ9ñDʯ<—ƒ/§„z¹ÕâÅJ ¹B;'<Êìˆ5z}Ãâ5QðZ­À-‘û±jå`Dµˆš;ŽL'h]¸úÈò¥PÀºêB‘äX6œ½>r@Ž30Ž XŽsñ=íÍ~¶ÂÉ‘§Òu$þ¸S -¢ÁQ¢d†Ð(=zh1=G «ÅÆ$»C£€‚Jîg9ûÀ=MBÐÙ ÁKQ¹µ%²SÞØP3îzÁãèpIÎ -!q…é'=Am­5·àBê -9³”\l{-€ùG›bíSWú÷ñƒŒÈV-nʼÝ#ƒÌ‡ŒÆ_ÎÇ혢)y¼Tš˜ŸëÚª:ñ1Óù‚ ÷Yvçg·û‚ ÚX½ž6YÞâÓð0Öèì{Å.¨°É¢·q¯äçëWLA¹:‡ø]XÍ=XËg]Jsä•VéÌb‚ÎÖ”,0•%Çq ‘Þ#øš¹½¶S±+=×t]j§Š²¥¸¥›à¨Öõiò€0­ D…ÃþÔs…-£v²ó”±\Œ8XDßô˜bh˜‡Rœô»î…ž 0sFØb¯<ð+µ‡…»Ùu0’ñüÿ˜P¤2cQ‰$UªMÀþ{úCÔ¯1†ïTãàÛËt¦Ô Ô‹K¬çƒÚ+]‹ öþ™Áþ -t©Xز}Št¯AÜ®·ÈŠÔWŽ—ÚÕ£o¸f…@`QÈœk[òxXÆ\J—päQÕ}¸lÊÁ¹tÃñ¸)¾\õvRG/åàt'›©Öæ:}³Aáä:=Ôtè“»`?Q´BYA2’Mϧ†¡=êØ^[ØáÞ…ô~ð¡–Æfµ”ØE©-ö„ØÄ_Ní.Rå7¨EÊ2{ŠÆziùWŸL>é š ¦Ów¯ãwš‡Æ+`LZ„L6“4Ý@4‘±õœ(+A5Á H|¾Ô ÔZaÅW óqžûYožÛ­L͘t(£ùÓL!ò; ýz²zÚhÔ ¥uù•`ŽÉ³"˜X<”Š÷¬ñVˆz,ŒjÐdÇþ©õUÖ?Û„¼Â~¨º-öeŸÂþß û}¾8ÆŸµ F¹Óô,ˆLØ'2eÚaç:3ŸˆŒìæÍ3ôÂÎvê¯90ÕS8yå_‘Äõ× *˜,à"‹°í!dt¬»¹MƱ½ ’›j·à÷)†ÍÓ½FcI8݃ÿÄu­dFíqþ#YÇB7€Åà—‘IrÁ ÉÇŽ»hk©%$íÜ\˜öå⚎?6ÑÙ_ig ÚfôSŽ[7Òý`%u¤0s`3õö*[á.x_Y²Ú¯ˆÚ9±À(‡»–_ð<Õ%*V}áýFhÓóü¾*ÃUE†Ÿ“aÕo¢7…ÊHòÓâfTÆDe½zZ¨CãZ2GÁŒ‚$O˜†aeþ[àâs 1¨}¹|¦i|òóæ•&ŽGÉ)ÉhH—fOÕæŽm;—€ò’F„"£ýü‡5$í懒R'Ys´Å“<é±0^¢¡X·–á‚Ð\âÎI†Š´·/~á´ÜL*;sc ÿÍ †vèærî…%=f°ÄèÝ, ëx´Ïˆù17Qž|Xa­ˆi yŽ¤\•L®;¨bt^3ÂÝxÌXäÜL²ç:Ì^íyôÙíiÿy½Š}Û û#hwõ²î€Ë›úŒÒæ‘‘ŤCvÚb–hÛéY,[ŽÏÖŸ€kÀm™Sv)Z*ºPÆc…FG '¾×)ó9çèð¥ªÖ5éÛ $B_}Ç›{Š0¬ò ×zÏÙ iL½c[£ü¶R[Å'™cq¬ÍÒù“AÏ+Cæc¶ƒë‹R½YžÔ*B£‡W9üL?âûGz½z:Ã{´øJØ"CvPÏȧ¤‘¶SEÉ -ߌ€g›û>–ná?óõ>©¡$âçÅÛË®|¶4³þkn±¤SI ØôwQõ¥Ö|Ǻ°‡©†}z%šÉ\D%“ó%%ŸCðK½õßì¼1 |¦b3©K©Pì`ð}ÚÈ´‡Ñµ%_[¤Äç ¨ÓCM;ã䤂::ú9‰Tôæéý ÷œ£ -áÁv‚˜¤ÖÉõØF#çsƒšÚI9,\5è’ÜHU÷*¨‹G-ª¸çíÌå=™“&wÎ0oùV—©!8M—ÿcƒêR‘k’±¢—”ßJ…b÷à<”öG|2oè,{··|«¢9FÜ1Ô@i¡A–ÓåÆŒ`F¡pM­Ä‚èd‰¤[jÑû§•¼œ~J¦61€ -eisæŠÚdßJ8}¯ð?&UÂ4,Ð|Â]ƒOçªÌyƒ -ÁáXC:žšÃ“ºPÕžYÂ9 ¡Ü¥÷ó˜N¿WÙ#OÀ½,CÂkÔSÔ†Z¯OCµa¿RíU°V+åÈ„ÛîR»¨þÄ&Mô•µ:.J2w[Jï¶dQ÷÷ÙB§)‚më–‘wøöV7g”yªÃ¶æ.Í;ñ~¾ænŒByš^«cJÀÔñ0³Ês]ê$°‰Q”¶„2“m…LGUø¹Uf>}ÿ©VÇ4u;Œ¢ÂUŠ¨:.>“ -H²“Wê<Ê{ðúOx Æý`âô›û-43êÛw_å.m覤‚ëê½à»^Û*ye) íáNÁ9C¶Ô«W0çRc¾5¡v`ýôׄbÉ}$<× 'cçug>©=‘j½V»½ðÃÐ=PÊ8þì¯+½NªW J£3¤ñ@ê†ç<ºÓbUuĸÙÞóP¸[³›6(«î}ÅPŒ$HÔD°ÊbTÁçîE¹¸ß-”X^ÇXÍ “Î¥BÔ§Vÿ¼6¨{Ù=Ó½~%j‹7\o€*.™D0 - ùó6·•«S2KŠf{½žêaBÿWIµ¿ýàý½§¢°z”TÙžC7Q£â3³­ÏLì"'•®^ÒU:]ÐéÏäÿŸü%Ÿ™<Ù™ÆèVflÅip‹£Zôã3‡2o…© -ÍUÎiGdÄc9ÌkOË£uîJ$„3䄬›éõ¸"HqäX:%g*ùG€}[p½C¯Ÿ¹ -·ÈnH¶^¸jªu/Ì9/¦jä÷ÏôÞ4€h‰³nY)r½•MϯĖ.ö(ú5ä`PÈ8WtŸ ù¿(b}û—…1 ¯ëÚ«^} -ç ŒÚô;óIª9=÷4@æ ‚së8”1ÃEî´lþ:‰Åò‹Òouœ‰TìÒ ì‡®ÛvEÝJÍÇÿÇܽ$KŽ[é¢AÍÁ' 0’ ²]ÝÓ¾P·rgöß¿à!eìHm¯ÖIÉʬ´étñXÿ½ƒéª80ÓkV]߇l0ÇÆ­Ò˜¹Sé¼b~œ©½ºº)Ö…ë”{Ðæ!¼2Xw„¯‘h^ÕR-6u®ƒ9çLb€®Ô§Nz¶ƒŸñSlŒylë‰Ëñ ßW7£Ô(¯äQú§`åPC¢É·B¯ÞÔµò„’gkܱâÔ” \@i(E61=®àkÕQÊR‡ÜkÕí÷ŸúPýe%£ßLI%£iS“_¹ò¢¯;¥IÚth+õ>T.‰êÖ*J´ì¯-°É_tìÃØm5æ†ò·íê F`€ùA`?XZÿ”öSã÷žJÃ5’–+ôÈ=Ò!oðŒ=H*¿Y -*]½ÒC”¥Uµ{p~oô7‘aøÔß÷h$&Ô<Ïžºç?ô³iÒf/åŒq2~PJ Ž>hMH;xk]FC:ë“3¼¢çUpˆ'2 áæÕ€Íq²Ç’gáÇ:êˆÆ™6ˆ¹´Dl(°õ† 0;oÔ$èz0™T0ÜIíôºÂaž‡Ü#¡Ð—Š—?ðaùŠÌ‰baòvüVèÓå˜Qýjv}5øþÄò»Ç8K7‡Iš(¬yÆÌqTêe¡ ‰Jº× n:ü*´Qi;Þb$¼,j`Ÿ´Ž iÑyp'îµ -XvAâ3~ÿï<¦³GÆ1Ám= -»½ª;î~¯!äˆ÷;-¦§†Ùl{>e~.”NDâ'<øbh«´õ ô]¯oþs¿óí.ÈËET½~pÔnYL ø‰!F–¾R)ÝÑBåÔBá Ï -`¹×™­±ñb¿«"¿™k„cÒ¹¶² „Ù0ͽÞÑŠf"Ôƒ¦Ôƒãv$‚eª'Ž• èà8\÷[ƒ® DO£´Í~aÿªâÿïlñZJˆ€:ä ?gÞüÚdÌ6/PçRâ0“ÂÃ3›è^߆ߴ^™vÛ¿êí m5z­gèàì•`ïJ$–|ó!§“x¶è#ÝsµÐ¼ ˜Iº4çOØAê¼ç|•sR”¤ŠñÅÊ*ô¢?õßZYL#‘Ñ]`¾êã™ó²vŒˆQþÈ ‘ŸBŒãZ:,B~{™ã[¹1ó­ÛàeJâˆoXY ,u$ƒþ1y5‚lñäV¾ÏÏsGâ1þDzáçbœš›2ÿ<£Ê?÷ؽ® ¥Ów† í• _Ö°g!<7ùw¦…T\–ž ÷¦ô”ïºYù'bœG–²]‹Ój¯²íyŠ¨fï9âUì00–Ò´×åºçIãGÙÛo«2äGÍ<·Å€òÖ onj&!…Ó–wÛI4t|_L„ÜJÏ›¦Ç_ê -ƒï1œS`Ϲ@VŠ«T:±„ãí‰Mè­è~°GB¯dÎ÷ñú`Mü´M5&j2­$`T@¶8 *Gзx8GÏÕ0þÊ>öI†o@•Ð!úhotï]^;ß‘åAå;ôXPq%TX šqëdPÏñ¿±ÆW®ö´ -ïu2°¼é&’ä¹Kíg.×,V˜× Ä”?_b¨|¹Ï¿Üa•¶ 9¸ss:/{CЧ¹¶ˆµW34iè·YÙ^xK#àm}Ž€?FwÜÙõ·¢u†ú¼bÀÎõg¾ÌjƒT7/zmžíÅBã)³s -Ý…‡ ¸F0Byå1óbDAõ ÷ÀM¢â¨Ý.Û”0 × aðŒ©Íi6ÚûMÔ¼-IÄÈÊìlàÛËíZ|[ÇÊMºT6Óg -‚nULÓò¦æl?!BÍ4ØÅ)Jºê¡+ȾÓÔÌåÛ*~ÔÕßXƒyÞlòwÿ“qéUƒ¢ý*û±¥l)ýª—© b汓§°¥;YrÏ ºŒ,7M^‹¢C `.îç"­è¿P c©x=YmÐyêEŒLVE0b‰ÝŽVO·ùc ¹bo5ˆ*¤”-uJ±ê„ëR†HåCGO!ŽËºjò…·*èÃs]>UÞºçaƒ¾­¯!¬¦ótpý~²ËÿÏ»!vªù[•«“Ã\é=²xKm˜ê3O‚3åõDÖNÙ®4¸üÑ÷/‚ÈU[þ:$²»Àzc?CØN¶þ^¼ó#¹ºÞž=):¹@úXÚßF@è—NÕS®Àº4œ>ú{È€hÉ®OŠÏÖWém³ü½:Þó`ñq,<Ž¶-z×Cò&éd@|S’š¨g[ŸBþöƒŒ9¤¸OÔK_¡ù©Ä=ÑÃàøËùaÖj±-ÑÁMíuYx†\g$‘Ëê8eöØu­BWþÈb5ÑÐs½ ÎÙeà!¶ò>Þ¼Û -ýï0ûù÷¥ Ë\኿ì¼ëë ÙÃn‚“Õt¨GqÔqð´"¿C÷ÕhŸÃJW6¬Ì¼+Nn4ócÓ0ÓáFFœæ~„z[CÄ H$9Åý[ ¢a¸•_N5(·]£:?Âhc¿¾·´(ªží?„ä?×ߺ¥SCéræPÊaOYjŽ]už'J9±A*QÉZ£‰«ê­à¿d¤¿ýË׶ñgÞÇÜÏ·Öã§ó^ðô9·ü±RWd]÷ò¡½@“×çÈ´±öˆžJ7^o›`…$â:2[¢`ü®3œ’òY§Å¹ÅÛ&G¦}”âKQpÐüŠîñT?•lÔG·DtËHx“`¡?½ sÓ›©–RÔ›4!É*äæߢ×(ÿY:2è/#†»WEk;óÅ%î‚~‡fš"HÌÚIÀ0Ê€,ºO7çºèeĸ7ˆQ*2$Ÿb’õ¼ýÞûˆLb[ºP CçRQ^7»Ø.'uÁpÌSÀŸ¸'¼˜ü¥|«øBqÚ{ †˜ü~j°¼G¿Ù&WÒ/«dG=Õlnª¥tFz$ÎOdyP¸¿ôf~ã,·mSÿîgëŒS×]r d:N„§÷±ÈŒ·2TóÌlæö—AÜÙZêw+F¢F63þ+’ÛöN'q;Ý -[TöJ¤„ v¿Û;çŒñÚÿ7mnâ›}¦›­z—*°!Néù^}þ[× ®|ÔùiA`ìÖå]ùè\A´ÈÀX z¤b‚¡’T·,ŸœøE¾pÓ¦Gõ ,a‹O&é<Wý*Á¦ìŠlÜÛæ‚ CTÑ ¹ æG|§ ŠíÍ•ªóõÁ«Zb)O~ûIr¿ÚÂJÔîı°"µáq¾h?.#œÁêA=Mðü’šüXcþl|•Äú¯bí[@ó?9øt@UÊÏV°ÈaäteÎßÊè:ß NøaGNùW1'»!-²ú-ßs³7›NÐä_Id‘%mûR5PØ!´‰ }B»í¡N_‹>»q9Ò7궯³GXàÂKYÜcuMN,Q¤ùXm‡ëØ ªG{ßi›šÑ<ºæÅ‘*¬b•ßþNé¸eRŒeŸ›oÒ¥y%ˆtEöþ&è²±}ÙÞ[€É åã+¨1Ý•”Cy³¡2öúàE-ôý<û?áCÑfœÃBû¢]8 Êß~.QJ¨3DZœÓ刜\ûÂjh_˜¿ŽÉý°,“À¨Jiæú4ÆŽFzîëzàtÊ­ónûÒˆ Úé[¶˜ĵ÷FðÖšiIÌÓï!ðâ©p¹z™D’\Ãz3ïZ‚¢Ô~ã- µ'¾¿$¿ÎªI «F)º^³„ã ý7öV1¸7(ÙŽ%üÒ!¯‡ 3¥„VL†„ÈPc"nyйߜ©WÐQTšÅB?ÃîÜ9Kô-ñÄÑ–l}É'à=ЋŒG¹LUÉöè±­L•©æ3ßw0ŸçpȈªn© à­Šb*W}¥wªC¥,Šª™õ¨ý¸+Í'©ÃüJù³UEŒ¤;o•£úÛ[²*ŽþA.Wúd3ôè9Öô.wWôs­F² ­Ðwûñ]D ]ÈÞ¬ÚÑÉoí½2˜ÿ¼øVê¸/ë×·ÂC Åï>J? -qz>AS‰Âÿ`G‡ÿúâ1¾xN¹ÞÈI(¶±~þÆЖ}w¬1 ¹øGóvOA^t¤7“Zå«m~Dºkø!Áµ“j¹Úìsåð'Z -ŸÁÆ TøçÊé~%AûÖ{ Î8÷¥ÐwèéÎT1Ëj¦0ÉNÕ[i¸SÖíaä/ÀÇÈËg­ÌèLçû$ >ð’'!3‚UÏóï$wÏ”øÕÌTÔG÷·Ï‹eÇྪ)q™¹³â¶£~º™#9j½ð¸H7óò:óðAHÆƽ†›åôŽ ¶˜íÙ€yTÃظµ^%u¦¬>pûæþ¼Üü÷Á5áŽÑW»óµìÓ‰È̵ûý»ZÁ¬àFK¶RIŽ†„Ðã>J¡I”IÏ rHKDž:4éC÷þ—%õk‘¯î{爼ٖÚîXIð¦ºÅÿ§gEí± Ql?_XX\r5Ç®@’C7®Éó¼Í[t§ô~Îý%žèv3`¨k_*6<(NZWNÌùÒÎd4«ŽÀ<órÊÎCà¼")AS©h}e÷å òáTç¢ûöG­¼}‡\Ú­Vøãd×GÌY9«×Dç¡a¿EŽ*€ÛÏoù³çùß7k^Ø/ºÒÆü9™L Ï&…®ú<>Ž - Nâóœ.Õ 3ùWÚÉ\º >ÞdVVÏ t¥à ¨ˆ!ä´Aï…”xËõAlWÏ|>TÔ=¾vþ"fi90êý…"E·Øº¶zõ¿ -Ö<_|…ýKR­o'é4µÒ9Î…g²ç»Pñ˜?ç\MÙ[Äþ8|Ö+›·A5š÷«QH¯-]¯;f&"ý^¹–n¹Ÿ·@oG•(e£³ -m U˜ËæN$ŒA -žüóuö¿.Ùh—âÇ?ó(ÜJ[A<œÀ½J®–ÕÿT:çÍ<¯VÜ'Kµ{dÙ »}»阫w#úqºoö¬3ÎÇ›Òd ‰sM»ïœx€&¦¹-Æñ¾¥ë®µ ]ŽPyÆê+ôçž‚:F‡kƃ…†Óä|ÇÃtég0Û‚`<"Øym¶âat¸™ŸôüΨæ¦"¾"L°0 -P¯4¾¹IÚ. h–Q³ÂÑ»>J¦Œq7 i íGäjžmêõU~=QHºo-ûbS6ÄèŸìö¿Tõ·}~öŽÎuz%fšVodS#ñC¬–Ó›wèºV´1êx‹åˆÜ-–Í;½J¯NTA$jܳ&_;«vì%Žª>%áb -Ž…›ÏéZ3ûêíð sÙ$6xÛIyk÷{HÍ[’SM²TÚpN®1ƒŒ‡86ÛÏF3Ž^ÜöÖ©¤%G@íĦïïQeÿ¶ÚóGÆ€ÜZÇj P¤ÚŽ3ÖA¦JBh<«‘½G–\8b¦„å!ûíí]ÂÚJõ«/ä Y-LÅ+žìësŽPEgµ-8Ì&šdˆSÅœLlüÒ=CÆ\}ó ¯…¦[>O²ÞÖ© -¶pg»nkµ( 8‘¢–‹ o¥º·Še?“Î -ŸŠeKÕbF¶):xÁ¹kÅ°j/•ol"„&åÞ¼e%®Ç¹A  (h¿†¿_Yj¿¸³h>)0rƒç‚óuh2ÿáÜEãFÜÛÛù6Ÿ§ÌJû\®Zíêråß­¯°pKÛ„@ĽÿˆöA1÷Šó£Ï9EUêØqñŽƒdÛƒCwMÌϹ¢:«^(1­y Zq&zsƒÅÝoÎò7m…½†øW¢C½o<3+–‡·VUfr"ñ>Ô] ͉°ed]@*Ì·£g àƒÁØ£Ôv¬#U©®þÏ`´aô©j2èYßG§c Pß= LÑ2CsDž…ßi-&\dß|Ÿwä'\oAüÛ=àPbQõÁ ý›œ.uÞ6Z‘GðUA'õTV·ÕšÆ§{$îä8íGüE—ÛºhTòp÷iY¬[då›´ø}‹´ÿôWåÕ¿q3jÇz”b_ >ÇŒ¡óS+¨tiP¼nÑ/þاъ´»Ä@·‚Õ»qß  ÔVhO|Î]—ãºÖìè»a±ôö};‡æ -ÈPÇܧ[|5hÿ­rÆ ü,0¿féIHw_‚ÕR -Ngû¯y-´væU\¥K©ó.Ø| ßN·q 9ŽÈùçËø×¢ãó¬Û¨°4 š]áŸðZ–ƒ‚¦ Š,›LøéÄÛ²=eòzP^D‡7ö†þ‡Ǭãõ©…ð—¶æ‡“Ó=¦YàW}¼JNW⣞#…‡¼òWA¦2„ç®ô^t»\µÄ«¸Å¹²sà¦èŠÇm{ÃçeI=JVŸÓ#r2b“3ùæ1Xx=ël?å;M7oe¡Ãõ|¨‡ká œƒt…Ò„¾ÄùóiwÎö=3ýô£žD˜3'â³÷.Q«µ>QOßc'…霃ªz/O0òio£èã ¯€Þq½Ó/š|û—îão„©ææþ¡cºiÝ.¦Z+IÜl[L1wäp“¶\«Ê×£˜Èêl®ê›©ŽN5ÉÜ}UX÷qÔr›~æùlQ)wž‰C]tõV˜k_ჼ -ÝvÀaéc“ðÐô¯\ÐÐ}Ñ{¶€yè•È´ï8TŠT1x»­ôø,G¦h=j8íîÏ÷ÁÑ€'P`Ö;TÔvTИˆ}F rµ Ç#˜Ø[AØ×^ˆ­tè¡âïй ò’àmŸø‘…mÄª©IÒÏqt剤#Ýœd˜SÁþö=½E0ÀÀž°o¾Â¯—Ýγü¨úg³ù³7Úâ1ÌŸ‘úi«ðKA¥}Cÿ:&ë)6ñªg†´˜žŽÔâ®Tëé>cë”“Ì 11y[£¦¥™ u8`¯ó.âMIÄudîIǺö¨'‘i÷n{äOÁÓ|g Ô¡ðónÕM:ÐÆzÍJˆªÏÁß%rúÁoz3´ÜÊ#žâÛ^X ·0bÙäþ ¶Lè-બÄô"ùéüî/_lÇOÈLësæ8mlïÓ­—_×öD5Ç J±Ü 딜 oÛ¢Oû¤[ÂêŠôüŠmñÚçA û̈Kq$WfèƦŸ˜þÔº?Ñä%bYç:úXM]ê#7_)‹øÂã_"íwVn,tŽ4¤RHgÊ°°R[ û·@ phU9Ã3(åí£úxk“ŠT`㮕øÕÄͽÚZ#‚Û 7}ÅÙê‹G$m¶¶ÚZ1›8¨n÷u¬CšOzwNB’½t=Ïý]ωǼò® o3›<ŽëYmxÎÒ» -L1[c›S¤n)S«°pœ@ùÆ;Äkt-Ž€ïà“Â2#¿{Oé ŽàM·Ö(­”ÜI^B߶:sÏÕ×Ý£”£!}Û#0ÖP„#P£õ9 ©ÒªžÓòÛÅ· -PG Ä+fS€â2æ -æ‹—V¾ä®æÄ ÑáF•ãal¾4ø¯/<Æëão P­É>GÖF ÍYÎ!·íüÖ´3ÄIR­Ñù(ŸC´†'0ò%¶ŸFò(ò¸ú®RÅ\šˆ†p[¦9[C°Æ)ÕC¦ 6Zª¹›AŒE1ì^»å©GÁÌÃfÐ{a¾(ˆm˜÷HþF0‰t³Bý¿ -{ylÑ]DïÉ“öZ¶í­pOr×ÃG•&¶‚ü2—?†ßæyÖ9‡õ"’Â\EFÔâ)jb> eöÜâ>Fs…>yýköèF˜ÆJ'xãàÚò±¿Ÿæ*ž!Ö÷ïé­í¬J.&P‚£ØU“üûU–{Nƒ9Ãs©üª¶ÀãuJÈ£Ú ¿š‡o_ÖÓ¯rŸAØ´± uð¼™á#_n¼žòp“‘Ç’ÜÀ™l[9øæU ФeÆõ³@ÂàáÒ•è5š³§‹PCõ=H @ñlþV·Wãá+ãùÞw˜ZÂçÓ¼<©ãµ-mÊx÷@ôŒéy_@¶‘-ô¤ Š¹Î׃ðIøóáÎÄ—tÙs†@;n‰uæ1!êËqüÑØñ‡Ø¨Z!ˆ6Ëo'¬‹.Ùd¼–ضÛPZ-[êBÕšCÀ©æOö¯=¦Ë#öˆžX¦Ø ÚùáI$‹¨7NÐ~v".롉º‰+”›Gú ñ8Òç2C³H„žB¯ù³•ŽO‚Wo†è£qh´–Žult -ùÇÐÛŸƒüJ¦7=ÑáË(L¨œ wìéÅzJ\-æ†öÔ;™På7m¡éTR¡ç¶ÚZSõã_ƒvòŒ´õ™‰þÏt†DCärl}y êÌc¡5Œ•+’¥]³t!NÅòq~ã ÃîÜÓ•íµø?ÄW6Qic> ke•$è*Û« ÀÝFp ÞCU1h s~“´°ÅiÑþ¶ÎzÌ™ é᫨>;ñ<ŠÏ âb@ç-ËBÎÖc ùx340íZ“'ÛCšÐuÓ]=«$6z^`–Å1òðK'€ŽÀ|Ô%»çv¡E˜_=OQlªL9·ߢ¹ø~dv0¡#V¿‹”ÇUŒ žÔsf‚$c99†»Œó¨jF6/s6ÍÁ»KX3’ñu}ìÑ)ª­˜åÿöÈb˜óÕ<‘!šCÓ0Tåä"½£í¹¯Ã­½mì -Ãu²…Øõ,eo£Ü%”ëß‹–s–šF½«œ*œÎrÅ,W™*ß(PG^DùÁûðÀÕç‹q¸h†æ攌;üÆtb®í9'DMîhÊu7:V1¾Œ˜h‹9³8­§û]ƒ¢s½Z<ßùü‚Rý¦¬›ÀXßož[)É>$†¤–‚„…-ÏboeÕFáhSWHƒûÕH»cn=¨A·|p†8¾·(ö̃I(  ù¨; +CF¨à¥[²°åó¿G¤cýô‹_’þÙ<‰ˆ­óÂÔ03l5Äf’±ƒB× 0þ˜Ì8ÂAÀ°7º0yÔ¥cq€3íðÉV‡,ƒR ´£çAƒwˆ$ÁýNÈLîsWià™C¬¤·dÅK9臖T Íñõü /V.†GK »Án¢¹ÏÆ ©šÒr"#0 Ž)æw“‡V¼Ó œGêFãÝËô¤!f8"O÷ð„Šöà©4ˆûšx`q»>Èà -‚’ö‚KÜóÕé…Ž%Ú WLðr›)n<ò–¹ó×\]ÁœÌ[ñ¦‰ÿ×IÖqGÄFÉßíâÉhÉ Ýá‚ÏY¢°¸£hs!Ì­jzjPÚA¾pÀýÇ›Üù…4GTËÝûHÍOlÒù(£,´y¤çŽ¾BœÏ:«^ìQ¨óÔ… -ºHÈE‹Su?0ú‚Oœ3ëkñ±´Ôð¡!Øñbé *4n€’÷«í‰+¶œó‰qéZï"µÐ­Õ$Ca†XõHܘÚR('ú´Ùò¨óGym z ½ -®!,ÔžççÐcŒ›‰Û) &Ô =ì‡]Bü¸VŒÈÛ5}†õø—‰ÜÄ~´éŸŸ±¦¯|qwŸÙûñ®KQ† ãUC"èMjHÕâƒÈw)¥q°J)y€Ùy WŠQŸ"N’tÚ#tsU·AÀw†«\(¿pz¿ÿ‹œ "þ„ðoñ¼­`,0Å3_<¡¹«çFÓ„Þö½íÂå£|„m7%;×ì:‡šŽ*KW7Gû0À¤…†s@}äê. ‚§’q*´ØFϽ×s®qÏ„,¥àÒ#ú¹*H¥©Â™Ž®šõ=S%Œ;"Rõ“bÇØÁÛnÑcJ-MÐ,fˆŸ[ê9 T64CJÅdDÚ¨UH @hR¨à戚G?-A02ÝCÓ¨Ž(®¯ëjööqœ|ò>; ‡åFë£k«8ù‰Ç…¸}×ä¨,èÞ¸_ß¿©E­ôS>oU|ô—e&72Ï¿û‹ÇàÇ÷±€TºÔÁ¥lÄ2îd+Óè=¤3ÆýD/8¯(¯u¢mѸ¤Ý´éXö̹;”ÓÓÄv£Ë ±b¤Ø¯e‡•$ÖÌX¦ŠOI…Ä°Íj¸ur=9ëéH3æd_©ˆÊ†ÊgÌý,Y…ixÛJ¤‘ž'3-ÈQWã&fY«|—°~¢´¥DŠÙzZòÐks­vÍ|2b©ª$ZË«Èå™þqµ¨ø¸ãå3B Ó ÒêÎoâ¡€àT)¨õŒ¶7ƒ§$)|íªúã\+Œt€Ä~K]š„˜4²Rjý`zlÔoú~­²ùÅ –©r5\€¡ ¢me*NôùÚÂÎl•ìnä¤ç)<] ¿\7)>÷I¸ó28EtŠcõ›„²zH±¯¿ŽísÔhñ…6Ý¡¦EªÄÌ 7Ñ‹n óu÷)殫`‹õšHÏì±BˆXß%|H 0Û>Õó”¡æÂðIôÆêl(zµ¼ƒò_–D„Äì’ÄXjLIK B6!µ¡¾æ=¸… nˆ…-÷ ¼¹¾TxÊõr°È¼.=Ô+¼ZRŠçQ?¼EÖ¡9¢ù)$7IÏœÑ{­ @Um“Ô¸ÁúžàµúBNà‰mõÞékn4%c£üî-—Ð/•†Áx¢)½†ÌU¿KÇ¥gˆúÌ×ÕÆšÁÄ ([òŸ°®ÔMtéï´>3{°J!{Í -®¥Vœ¢Îšš‘pnnÉѳ‹Ú_ÍE“Û"¯›w»Žòß N–Ðct/æë – êHl"茊7wÝ©ù¥¸Ž - Äc ¹#§ðÒþïÞ£ä»o# ÛTŒ¤G$GàZXÇBÄϵÙÔ`öl„pÐܸ@"=’È=Cpϳp°î¯ä -¡Ž‘ÿ©[g®ÂPÙCn†ñ Šìˆ«e«!óëži·¼)m[R›Í£æ£<îk ÿ*6\ÐH#¦BMr5#Ëo ŸI¡hA¢Æ­0þsr$n醞÷@ÜGÔýAvŸµJQÀ(·r§]ÞµLïz”Rðá(k¯®Ã•™ß~ÁÛ-,¨K[B(9725#1‡Å†ÍVfÈhÅ!;˜à_Y8ßÿ%iÕ_L‡ã…8JOZ5·ùûÖÓG-KõýL?ôÈIŸêÁãHÿúµ6;>¢ÔYñ…yRé„Õ뻜`Uçû.@áßR„­¯Óä§4‚+ð’ëHI pÕbgìÏ®m;Ö§ eÅ@jö±¤o¥¦R…ó¬8ƒÂˆ@UY±4³Æ}@°to9SkuUÕ¥hà}RÀ“ Z%nÙ÷Új3ãpžÌ¯¤ñi¦í‘ä¸ÖWRº‹T—l’Æ1°Î¨]Äô9½Î€œÙÐ@œ°¥ \‚\X#ùÆJT,u†š¥b"|ò9 ]C¹ kò‰*4û¥e%¤*™Õ­”N•u`X͹ÌrôÁe§iUde)%= ÞAì6ÃÏÃÈÖY±_¾ ç;ÇÙ-ý}Ö-âr’‘è1ô¥C±»ËÇ(t©¿PXžCz±•åT~Cšæn›R*€nììN‹ªº) M×ûŽ˜+~¾× _Ž4èœíùé@&Nòbhö„Ý¡ä (£›ÇîŠø•Ò<ƒ!|ûÕÃ먷Ð'©ù$ÂÙƒLîÝbyéÁ"ÛEªjp+‹ÆÙ.P7¸ä1‡ 8S=çZ°‰àýAÑéB>)xajš²Dªvù‰üˆ;lâ'•Yμbh¤õÂ@}³ÎmÇ~yâs1æ’’ŽÕ›˜ËmDzQ\ª‹8]Šƒç®!T“OÊO­-ŒûS·°7>ÿR,1U«û3™ì)×ëIj³œ&¯ªôÉ£eS-h|¯gç ]#î ãÉȃ \®nÍüæXÃ86^ìóª%ÈÕ!ðΑ,ZDË}¤†¨ð^ý¨ò›sÝ)gˆýrq×#ˆ{¬ðƒ®w»sY¯Ï™a˜¾ÑŽó˜õÇ"…0Ѽ[2$…=åÈ¥:Ècà(m¨RÀ xA¦œbý8ß”À“ÓŒföï‰ vGL¦ãýä¤,G1ÂË>+yUr}u´ÉtyàU_øÂpÜ‹t6öÔì+ÏF ~’#iñ.û˜ž1xô®x˜Xs_ý³÷7@êŲ6úŠváë¤5Ûœ6^Ë.aéé†ÄÎw±ÍAwdè}w@…†ÁFï©h%ó×RÈíàå7G̹8¥ÅEÖЂDÎãHlzúbÊK)Á;¤I¾ZñÜæ笚œìžsq‹Ä‘2’˜µ ßBÁ[0pgîae„¼sÞÏÕùܬç3ݳýªïC>3À‰sÁ¦.?;Ú&Üp -bˆ¸ì´¶x\ÑÏÑÕÎÔÀÜð˜q*ÿ³ž)—@_ùéóMduíÜHx²Ì„…nb²Yoa&ÅóÊc.€ûþE­¼ÏHàWÁé­˜;/‰Ò=âÙn6‰{'ä=TÙh侺‘ÕEÓ_´3¾ÿËÇ Šm·pá ’LH(Òå¸þdF ²èâãb+‚¹;/í5÷Z*š×÷_Ì}«òL'Å'E^•ûÓá'¨@u^ï¦p^3nêŠK>¨@f¡óð(kuÊ=u± õâ­ÎÑ -”Â/¿9“›Ú¥Pó5Jó[D†ïBe0]çàY%”doyÇvGËLzã/Kr¹ßŠ'ù‹ÉÜ|Ñ¯¿wæG…´ã>iö¸¤þQŵÄ5YVAnéâì%¤ÞeÈNž×«{÷…¡ÊH­?G¦˜šLÛçØœ†ØX„ -¶$AÊÀD”C²ÜyÒ?LVg䶯!Î`t+/¿),ê9U'¤éú6%‚Šøm—ívùé íý1†}>ŽÌ¢#"ƒŸ£UmŠÏ_]£ë·ÙÒ4{¬Â»G¥ÿ"[üÉ_Aü6 ø£°¨.Ó¡Ï'¼b›òĬz•ËÏà^%7ÊBôx›7´IH¯ïà†«¶`½åf;v%3L–z>RK‘ê* ÝIfúRIBL„-¸ÇK œ¤¡½Â„7äÀ?œ[ò<¶¾2ï¹½µ¶¨Ü%óÞ~”Ž­* pZ‹%ºFá1÷ ÙgpVŽOú¥D}a¾&õª5…anZ©ÖOgDÁ€ 9_ù^1ØûšãXKtÐp> -Ùf´_«<³^ßè×UþGá Qõ™µŸä§8¤—Ô¶¼UVH…'Ìd5y\)ž÷êÃÛtÞ´ƒçõ 7Ž -e׊©è6ܹ}áÔ×Õ.­0ætáÝÇX‘Ž8ÊÈ[ÁW#£ærH;nÃD™>߬ÈË BÐ1)v/¼ô|#Ýà -άǦ -ù38Û¯´oªÃ-òRYÔÞêûÌ÷ÇÈt‹õ“Éa±G¡õ(ÌSÊgx¶¯ÞÔBæQ´Áâ˜)èõ”Ë&£‡3H×h€ýú˜»/ßÿ%G·›g N^`Ý3)n'ÙÑ9S¬d†ÂF+FXV4ãÍvž„-¤¸u$êQÀ\¿?¤=´€-ŽçÉ¿B‚á`-wu¦2ÑÙãœ,ÃÀåT"‹ }Ud/È‘B[YÛŒTsÓ‰ÜRoi^¤|>O±=tÎLмÊjØʽ²8@·+,V‡!m_ôÚçë·yj‚c½¹ÚÔÐ{Aðªö[¨n0OקÉ„:£úõ$*ŸüÄѤï|l”¶ø]<¼‚³€ØŽ@(Âv×÷«ÉéÌcæÐ/B6ªé ?Ï*ëSÞ6Aë-{ô¶Zš· 6Š-ò lH±¤5BT(?Àò -õLàü Ø¶èh2õTV#} <ö÷*FûhÔ Û¡§$¤S+JiΖRŠ%çË|ù ¥H*UbQþ½¸†Ö‹„AqKáfP¡º˜Þ²SÞZÆ*W”¢šUõZ8àô¨j…[T|m:Õ}At4Ç!šžà,?©œÖmïI3¸>§AIÜ|\{Á—v8bP¶½Í $·ã]VC`­uý,HŽœŒUázyJ@Õ/‹tìðž -7„KŒJœHk¶Ø>Æè³{¶:pæ¤D‹9QŽFŒ)ÝÒÜ|èñ[õăw¿Ê^<ж/þhßÿ…ä -°øº«^!>ïуš³?ñ˜´Î”tøú È|4ÐÕ}±Åý:xèÌ׬òñ[×ð‚f¼Ž z¥ô#Üö4eÕ¹FOùË ò«ªx¿µyî#Wüƒð!T­ Щ>†9"ƒ¥™_gÈÑŠ(s?}°I9«çãI±‚Ö>‹´¤¹o0M zöü$ÂX#GÅF¹ˆRÏöþÌ -7wšÛÔ‹?£aÈiZ%Î͹ØHÐ…@Ó~Ö ûª…­|½ áPAiO¡†P’R£úfˆônî5ä&í‚d–4ü,eV4%²°k;Åóz~ã[ïÆÄ K#à¡ŸH°™#Éþú`=¼Ó=mmW é(Ú±MC—OåÛošGðÃÛ9À¹Îã”ÖJG1ë55{1ƒÐ#|B²=$‡–Å@DtP’zÒÅ8rÉhØC`¬A`}Iʼn×i ­¸«o¡ëë,ʆMœûàõÁ‹z¯'l8V™øW9ë€é¸m±·ãAšìòH½0åd$Í?&»ElÿEù»?XMgMº-s‘EæxÀha/ü‘A§|‡¶fîÕ(ü0©¬ˆL_­ø=„ÖüXŒO°¡3Ê5sHz3„¢ø‘sð‰×¼ÒuT§êH×µ<­½¸‘Çë.çÃÕ׃µUój¯d8EôNdfJq²b@Ÿ} çUªØ9K3‰¶oþî'žµ˜f€r~Ѧøˆƒ”`,3ãdÇ´€Þù`òÞúª´ µ/aÀ’uçnàAÏ0¹úÆ#ätÙr6Jç$T2¿íº ¿¼Ñoÿâ%/¸é¼/àC½uiñ¨fÌø)÷Ÿ/úÒKáƒîpªœþuÓw´ÞŸ¿¸éæ6Ž[»f@x‰ìã¸RŽÜ WÝcóg(^QÃwÄ -aãã:§žDŽËÈ‚Ë:¸/Ög ï´ ÒAÄ×­þŸG=ùÂóž)ˆ(^(OÒΣe(öÝöìÓƒé‡Æ6OÐhwõ¼Pø³=‘UŸ&øTxùïº é Ð3©hF•yÆÖó•Iæ q§“ÀÃŽ­ÃvM¸¥ûú饠 -9èôdP”œzHâÝRäÀUxÒÁëpz¢ï?Næ´¼¥¿EqrâE2 .äV„¸hj`t\  g©ë€»?Æ<Á.›=wÜkþßcž²nEœ;墸ž\9Cpð¸ðâb¢\¢æœè€`;×Y`(ãúþÞÆ:Àô8À¥EŠyw×ç¸lú”G]Q¨A,Àv~ÔZ•RO¶oÐ{)ªþV·AaŠô;ðªù9¾¸x‚ò ­!rOô¦ZýpŠßŠ´¯€ ŸMàH˜Ga¹Î0K"yc:«íKùñû¿$¦¹³ Uʸ¡¼þÑG4 {ˆ—I‚†û„£„·Ñk–3?ؤ΢SØAËÅJë¨3ÍÏ ¹‹M{GBÒ²{êGùuƒt<ñ˜AG–- 0z%‰ÂÅá:â@˜‹z]†¦d,š|è@‰€3¤¤©•îä»W3ˆÓô¼†Îþ¤Š7®h¢dˆèf$êZ_& ÐP‡ç{ø¿ G°6G¼æ Ü žïW&¨ -P¯s5Ó¥Òþœñî­E=Z ×px3ð&Bå\ΧɄʶ¾;¾éð4œvƒ†uóŸíZïa«[{¿LP‰y\Û¸U”øï{ÅWä;Dñ?Ù˜\3i‚WIÇëË_^Ÿ?þÖ -ò=`L‚ÚßLÀYMåsNwUK|õŽLMâ^Ÿ[Pܽ‹œ-Òh«¬“êžç„†æÏת Ï ê7¯µ"ÞòFãB«‰Ô”?Ì=I¶ëN##W0¬×z‡Sw¤‰z¨Ïn1¬ðòÍ5_ßÁ -EÏa£(JW^‘!Ä?ºœøqÊ©W¯$™n v_¥j†ŒÊD°çìî”!XÁ AÝóïRÝؤ?s®‘õ¾#&ß·LàpÊ|Ão­Ž­GߌFa³=†R;W%ËõO-øˆ¼¤´·£Ù øïŠ4B¹›3æNÆ|-‚°l¨$ÇeõÎyX\%K”¨Gì¦o0 !ÿB4†íŸËelI Ï} Aªé¬ˆoNÚ{ÜÞæq½醠0Wx•Mº+ße{Ö/ª?Í¢Hºzx(Ðœúù·øw “Dü{qF'ð _H$’]y÷=W‘‚uÞÓNÓG›@p_Aÿâ[0î¥)C=J\Ÿ¢ˆUCÅk3ªò†t’Æ#äxÞ*%XžO>%bd;œÛ=êÒ1˜ß udI<€PÊxD5mBv¤²_çê£Z‰º-ûÛî ‰C¦úm¸­½ëaS÷Þã¢ô>r5èÄñ—é=øÀ(n„ZÕÛK¢·Ç;ÛV؈·Ïoµ§45Ï€ {!M㎌+|… ˜¯– â dÚJç4i\D—¢éž)‡Îya¾ÈîùJWó°L2iÈL>¯;©´Xv±M©ÄÎ]âÜ&¢Ð*-Ù]Áñ“¥»Æm•sž$1†=raÁúj¨SFB'óðž|çÊE{bô¨°!kP€†^^Ñ¡;7bãaŒ÷%ÃöѯühÀðZ©ù¾eJ›u^j>%qeF Ù“ ÞåWOšk„*xL¯©ÁÒÇs«ÎÏ9çç`?¤×ì©Âz ‘j4d#U)}Ôsø™w#³ÃÞjöÔyÈõÞ4=|P¾EðüòÓNG¼‡–Hª-Þ÷}u·Í"ª8£ VÒÂó9îõ¢Žàq{jñ ˆóºì“ÓŸ>çpè"8?C4y²LGÂQC’ñ^‘0ϧ?üâkudC$M ]p¥ßû„‰˜æ)Aˆ–¯ŒÒmïÅEg±÷¯#r3²EsCÞƒÿú´G5BµáõÏšÀ'v;J&¡:ë#d_ïyHÊ®±Ï!!%Tž/>+g Åõ³é5DßÄWY:+=Ø8o*·öQ?} 1Š«V:ç÷ˆdFÍ1mœ)‚gœ¯¶ïOaDÅ(…~}CGù§Ð;ÅQ‹™ß)1Ø ÄǪ kËuó%Þ? äEyïß!µÇU§L”‘)¥òÞ½kI¥Ì1£`ÅÝuÊ€Ìèf‡CR¿er×H…&_d§–Уún?±#&u_?7 -I°ïÂÄGA/DŽ¬—ÉÃÓhòJ‡¦SV&õþ æ€æ[†8 æÖi4?º:W°èèx‹ ‡"è£ö 9å]¯òC6èhåwž‰7Dùd´Îíõ£Ô~¡õ>_úêTÝRo¯-yAž‘‡ -B=êz*ªˆ*Eö›$󸲛¯üª^Øb®ÏÚµu^)£ÔAóÍ«z»ˆÆ]× 0ÏBÎ(·©kþ Wå'.™¯ ù¸14¤Ÿ¸,¢ä½~g»ý{i*ݵßØÅ~pöÖ’Ñ+LHpr†‹ÑîÛû®˜W?q¢* -°3÷ÁºMb^=«ªûd.ÒãnùpØ£Uö`ÃóŸÝ"s¸çTxz°;€€ù¨WÉÎÐÝëÜ°®²ÉsÕù-­Ût¬õôÄM€ø•¶G®¢îuRá -¨ß·á¬<è þõÁ¯zOþé-NÖã*#Øù¦ŸxÈîk‹µ$6 ØbÄÌW™°˜îVjCýzFo_Îñß8õö)OдHN½š«e¥ñ®™³†È`rT9Ý ™P³dæwxIzÒBxzîħH}Ḑt½Ôç){ãGîÌíLÐûĤ$É3p+†Þ£ÂbÐÍšˆ5(­\ž¡[w‡¼8·Bf££îòš?™\´ür¯!1ˆg;SMÐ Ú#"Ç:ëªA{ÄÛƒzÜó9Ã~§t~«o3z¦Ùd–ÎÂÑða¼‘•© MAù¤ÒmÐ1Ùà^“ÓÈ©y>9ŸŽ›ºE1©—!”)ž§LÊ’—¯9me¯yó\_¶à4ó(NÓè&lTxIÎÿG9gù)Šr†ÐûßÉ’gGÎ+Dºê8/Ô4¡›kl(ôµ˜‰²gýª+Ò_DîêQÔ„åžœ.æ…mŸè]Ê/÷0ã ÙØLŸ¸Í‚OöI5‚€9 Î¨Fðüö)HµGøNk°vyVÐE±ï,?eÕ—8úø÷;>*$Rƒý' ÈfÿLÉH¼ƒ´Ú:híݽŽÅ\q`O -5_4sY·1Íùµ¦$,8¨÷{-Ĩ¡\ïóP¿,r ÉÖx‘Šæ:Jã¨#èqùÈGMؤ‚ˆYª¶eö©¶çL¥;+“;;kéQe§¶\;«:BšäˆSS{Æ]ÑÈs_Û€Ÿ$+°Ê³Ž)iÑíÂL¤°NÈÏì_§¼8‰Íp}Ÿ-–ž"•¸‡šã¢.â.½u…Å߃ªÉøatäCv“Užq@KY½…$U³˜§¾Îüàò;´¯²'õ`ö ËsÜÍÉ‘9Þ$µD>Ú“ Z£hs¶_Äœvq. M÷ÜéÔY†ºÑæOVK)°‚@í“þ)øM¸ÅGéÙ¦ÎÊöêÑî¤K"œ"[?üÜ\ܤ¡6ÂøzsÅ5Cs•Ž¦€ê$§mŠ¶ -AW:'“¯9ÂÁ†Ç°»Yª*V>jï&âj?BœA`8®ªñ<ªàaÂ:xçʸ# ¥uTB>OFb œáB›YÈqˆQ¯•+¾’Æ›Çñª&©…_ûS%Z? ÌËŒ¤±|<Å™uF†'EwR‡•¢¶x¿Ì –óLÊhNÿérç1ˆâ&dZ@¦Õyn*]N‘º•˜mn¶2½;"ãJ¶ùQâE+yÏĤéÌ—jî ˆ(ÑíM9$š#¦§/còNªÉº“Y’¶Y•ù -Z÷åMo©¡>*XkÈŽ­Ew‡lPdRÜ -#§'‘n¥ÜÑò9z™šÄ}{/+ - -Úó¸_¬¼÷ñþĪ]Q¯2¶Ne\ÍôFb yt1ªç¢Ç°éÁ–šÅNÆ$~Ú_P‡¿ùËW÷¬ƒt•˜ÆU/yRaæ9£«äÔl±0*Yö&¹<“< †YOZhHr[ÄWy· FUYK÷Žoé$â{é2[»¯`U|{_üû±j[ng“¡-SRlô$ŸUåì¤Iô1«æ¡Êc~¡=EšB»2àƒ×²vɆ–‰ßúÚ%·“øÍ=RÙG*µèQ«, Ê‘äQCˆy±ÃÕb\D û°?û»¿€¶;îת6gRb½;ûH€±RiÖž:[`ÿ«6õŸ_ÕÛ݈›çHÛªÌià¤Fr†ƒ³Z1Ów©[‘($xúØ#ù?Ù”w­Éœ_ºJçWÅiù•æÉ)8ê -–5‚Qç"ÿG’Ï(¥Îƒ²’XÀ8§_[¥Ë· ÄOÆQI*L.qÞw‚ìceÔ¦l®¨=iŒšgÕ-Î:ðŠªI«OßYƒ€ -©ÒÑ0ÝY~´8mÖ${Wªç¹vAàCºæXÛ™>5iœ×ù>·?y -{cMÊ0ßê“ÀLFåçºj$äQuÓÝáup€›Ì+.w@Ž0"|H[gÔ7§K†<²\j¶@ÀÑO8¤M¯ÿû_ŽL…ÎÇI—TaÃ+ƒýYƒ Jg Õ(Ñ:3Ã;Γ0¬oˆ ‘wÞ%0Ç ÞÜŸ¥ºnГ²XrЧÅ’4ð +”ðfѳõyCñ'=c$múæF¤Ôý;“©ñX‹ÉˆòM½> [Œ`(a®tŽÁ’÷/¡ï1PÝÌ¡rV]£)Ë8/t²@5¸[öy>…)Ômźéþù_)ZlyµódÊëĘߞxfâO§Ú@ôæzÖÜÄ$€¦æƒýRÛZè·1yÄt,üð~Åi¦[­6±2gçô5$ñÅ!¾¿„ù÷Ÿ7þ½û«{Õ‡] G¹ò¨­ƒ–óÒ %ÀUßœ¡7K¯OÑœ»iÈl\팇šêjg8ó‹wcˆSíN|[ç¹íwblvõ wèŽîW\jÈãˆ-WoÙÂÛá×û¬[as‘Ô>«Oq”ϯmü!v¦¿Qž©€’Ì< FùxfÈæ€r™.Îféw«ucžC{–LaîŸMsÒ“gý:™L½wÞ9˜ ò¡µ¶ëȧP5ƒìUÌö¤P˜fÌÖÞQZ˜™4îjKl*gTBtÄ+ÀR)¹IiBÖ“L¿N°†ZEigôsÔ8Üt´gÈ?™›^>sÆd¹n:€çrfOx× uÉ9¶{uÊB!êú¶¾Mc øP}©+-ahœôÚ(#9ÒVñ=Y;~v p8uJ×]­ë¹òŸ'܇¶ú[AÅPйù¸¿Eð桤?ƒì½3Æք΢º>h -4½öÞŸ¸ÏöÞ}?v@¼áÎwÌŒ}¤øU…œùr ùK¨Í  èx hÏâÐ-NØßVjýjO¹ØA·ÖRAÜ¢ßë'H€µT÷#§›¼³>'‰a¡ÄÄ5Ô£ú׳s¯BÛH'œˆV³ØÁvîµ6øŠ¢=”–ŽæŠAü¹iÆc¢Ï€¿ÔÙ³,Éhµ‚:Åš—«!›ý2o³ýŒ@É*Êù!›%è1è"ò;Å"˨Ø"TŠ±ÇÈ7F×ûºz ³-ª’!yéߌ‹Æš½ÝïS·W ’ŠæS®{}NK8©zËçÌ›$†D5d®IVÔÔö×/êͯUú㩲ñpL~ªãålÂb¬¿tñø _]šZsÞn,"´¥²õ4ðý_œOÉ3ü,˜ÉÛÍI$j<`˘ Œ3IA¤í¬ìaÛëUŸ4HÖ^#”ÙìÌkìF¤S·`G¼yÁ£‚˹—d]·çˆòwa  z¨*¼ø½)­Ý>ÇÑÇ+Ócš3Ç™o÷Gº`”öLçBÕH¸ÊЖ«`‹/û<)"fÝ-ù!Þ ^*Cx‰b™ÏûQ„ÔšG3ƒYŸÂ2ø§%Ìj˛ΣÒýLYòê¡-øér'â%saÆ-|—µÇâjŽØ*…½ÃBð»c…Æàv‹pÏ?:|Åî|G¼®³È]”8©æGÝÚG4X Ü^üÍY“ô'õ¹LNô® BYíäUY¹'»£{­ °¤È"DúûuóþßûÑ^¸9}vôiGDyQK§à´¤À  FfÖ©/“ -»æéðb]dzå {>ÊÅ'ÁØ­e!ù÷ï¡d”B‰â-yÝuˆ¹ÕÙ1èLÕ£b49¬‹ -ZMWÊÌñÐ8õ~g pÆëϸp¥¦§VŸ"Í~â¡™«ÊOŸÓ/¨²—8/·’â¼¹øX?!%ʻկºæÚ>— ú+˜OÄ‚—^.F9l¨ -eÐÝB[bàÈÄr)‰ÎÜn†¨@Ðð†¬!Ðv[êô3ä1Ä­QYd¬Lšä3ݦ;•|_¸)˜À}%2d­ãª}o" ìiαÔ{i€@1Ny}°(þ'³OãéAïÖ­ÈUæU®7†Ðu'¹½%74fÏÚôæç¬ÞjOò¬}Dø\ƒÛ@+~.û÷M<N¿ozÑîÍó¬ë—dOC$+—äæN0ËI]óÑ1ñJË£hnEå8Ê™ ¬æîDÁí•l&"3 öÓz}NÊ(¢2µWC¼6‰óÞ³þ•>ïÃxDì¶Õ£›AÈ"„dÌé[8½]uÙúAµ–êEM1g¿öäã+Äù/j-§=M+ŠhÐ^©sQzΘ¯æœûú— áçæ0 ¬r×’û÷,㣿üÇDdÊüI"òä|î=xgw"ÀÚ“^&Ü—HøŒÒ^Ú½óçßb}Ê I ôx¥3tTŸ¥›”89™ -¥å3#U,iWËç3ÓŠ"ÐÜ*úçÌ->/Ç¡¹"¤Õ|‹.¶˜Ã»œwBi!m=Poʵàî{._¶ª~|._þnèF{µ^ì_Ó™<•ÛÃõ›bâ=yÙü\>ã,¡á½¬µ4úŸõ }¨D%+ó:<¡—CjÎ÷¥àV°ÿ•‡0’R)ç4ðòŸ¯éÝ×Ñ-]Y;w–lZ‚æ¢RT *½€3W¸úè[aÔ¿œDßü!'Óáâ@ ›õY(ìLò[¿?rÕmu~Î#"z€zân -·-÷˜+3m;ýµ‹miú8ÐÀŸÍŽ¨®”U=;aš6À™Thä”Ëåžç2räZ°_¨%.Q÷ySòìn×}þ¼ž9ÐJk[ø}]0ucª‹? ¿”›²%‚0eÖPó×=àü¥AßЩل<[H—3 -XGiøQ_øwÉb¹ÄÆëÒgÆ8=öbµv ì9C6ûK4—À“3ÆMŽïµ¿¥_}€¨¦ûÄ!Âi‹i\R®ïA\lÑØkˆÐèÅ{Ãö3ý‘÷©ÝâLJs:¢º¡•Pìzã-Þ7àš9L^Ñx»ç•Ö×âêSmóõýò{7“© ]~ºáÔ2ûÚåhÁ -iÝëÏ•z=tNlÁ¾ªqü‹©ÐwÈü%ŠÓ…á+Bc‘ªØÏÈóÕò/?Èg‰‰¢Ã>Oa–×þ¸b9"s™ˆñH¼8ïåÿï¿\º¸§Þæq½#J¶d§Ÿ=×jݢj_N\ÒÒߣ -Â<ŠWü FåÄ÷n7âRg›…ôŸVܹ•PcyE3PCÉ“ Én…Í(ü÷‹ÞO”tpfó›mø[¡ïZ"ò|&„ýç­ì|<ðÒj)‘…!$ðH9Ã*¡¡»®2£ëcjž¼}ðÖAz׉ñ²0ú40HÆŸ*ûÅ)¤ºéÙleȳò‰Ó¦Üæ žý%÷ò_Ý-”ç&èÕ 5`¦l§§nK<Ù}¦‚(/×O¤/î0~î¢öаŽQïˆÚ ?ëž{‚5bÔ €AකáOÓ$ÏÐØVì‚ó9XH—&~ì±HÝÏ-îaa±½ž‡:YÒXKT?½iX€36 £9¨sOÄç“_{g8éι¤òó.í܃կëÛ„3³CðÎXâÃloGV£æµ|,Ü /hFõIå)Ûaø ›@Ƨ'`'8æïj=z,¡-cÕCZEùħÉ*‹Õ0w&c[‘‘]èI+.omú3;áû¿XSßV‚ÿH…ö‚/ˆŒõ±7 |ÿ+…ÞÓ¡ VT’´ÚL¶ §\¿ÿ¯ª\î}î!é‚¥9¢.üuÕå0¯vü¹~þ翸Ò=ylI6ÖWÜgö·ò(i•,í«þM——ø¦”/õï°Ë´ÑÛµjÛE2¨ùç3³Ê=ÈAèÙ™dþË{°0{ü/ú€ññ€18ª´ÚØØ„ò&Ï·IÝ¢§Æ%÷m%Û ›ÊÔF^`nÜ]sþ›!v´EƒOIóÚÓ쌰øE^÷r/šÁÌEù`;}ö’¹Á ÔÃG "e¸Ã{Œ¶’jfFÒù´'’wæ‹>Ö—ù6ïNþÞ„Wi].§¤žN-·±^)>%Ã#" ¹‡Ñ -Y˵ áJ´Ÿ~ƒ-4bD~3±S_a„ÔÜ퀭~SÚYMÙFò,¢[¢xü´‡âo :s¿>xOKÚEaë⎖QxmT‡ZáïþòÚ¢ògÔÚ÷ù<þüj›Ì^%ZÇ©Þ…óÊD‘T¸W]ùRTªŠ¡æ~|¡ÏÀ‚­8ó·W|øørøW4ôSÓ:6µùüg áG†Û<ø÷ -åÓaqœ f»{fXƒŸë@* Ð$ WÉͳ!ˆj—릋ú±Ì£ ’BI7D7ô‰øK®WÙ2Ú=º?ùUØ÷GDÒæ÷)ÿyòP¿M‘òâà¼æµþóŠð7:;»Ý©÷ñĵÅ£´Ý¤ÓÏÁ눯ƜƒRòÞ+ܱú]`ºùœÃ'Ì÷íÏësæZ›o|‹óëˈ†Þ£¶Y܉¼>ùUzº™qýǹäÌá+¨«ÍÅ2ŒH•o¬¯V†rF$&l<“ÞU¥qE·c…pd ù(ÞçWmá…žG|N× ¾ñÈ\óÇúÊ;¿ŠTÐ5Nõe.ÀS–.¥¤ˆQkô± NâÐFÑž/ÉPÙ©ùKø¤Hÿ$R-Û‹]1q‡f™§ÊœâK “g‰¢s´ˆCKØ‘£Þ&+ª]{ù훆„ ;b­UÁs÷RºNT¯š•j[$[_¥çõ°Á¾hŒU¹cØ®Ÿ§t­>Ž - -µXD)ºBÎi9UUÃÑÄrˆ~ ¨Ù˜è°©A\)¨Wa&=¼–U–wÚ‹óGkDß×eïýZå]‰h„ß+i›ÏÑÏÃBüé7ÿB*Ç“é™.*ÕçMм5ºQãú<·ÐžèhV·§önÅ…{áüÝ_rþ IòÑ_rPöt(Ðr–Ë9‚yÏáli§ZÌ0$Ãæ[n¤œ{5GaÿRCMxðØÕÛ¾¶j«l&EsˆÂ¾Ö÷L¾¯÷­¬y ‡&T:`bò’ .^˪>ýU–õ(Ld¶OIÔ‚Ù‰Q÷¹ö$È Á®xw[@Üí½Ÿ±WE— Ð\÷=Ñ!/À©ºlï¼×CåO&øÓþf²/øcÏe܉ý‡¶ŸK„– `Œq¶w1CkÆé*ó8ai¹Ã¹µ“²#Ò*©0ì!ž%¡[ÙžG<3—ê¨9\½Ûf­ÜÒþòÐZæq%ÌrÏÀÿÛûØæ@àSY«ÕÅ»Z~ºó ¥­3£™ÕúvT1˜ùÕUƒð@çÉH3äLñ*µÈXÜe¢¾ÂøÛÏëƒõ÷÷¨eü§Éø#GÄÉ`ên%¬‘ûÒÉN±N£-® ¡«vÈ<'톮¿¼q‚sðêY&vHxăý}ëÆ/¼e‡Ä‘ãb¢ÿZ¶2EÕ½@ÄÊC><Ñ‚xz=IØN b+²ij4Xäj9t÷*Îb:n)_)Mí”äXÑ_# ÀЋ÷¶béÑ"èa…”¹Ø|™#æ7çï²5ôVÁ¢¸Ñ¾Üßÿe5š¿™?VŸýƒbF_GÆÝtÚ¶žŒÃ53#?‚Ë·†ÊD‹Ô_ïõ–Á_秀¼ß2Jò™X/Y·u’ y½Bpú'ä’Dmü´ß¦+Ýá XÚþC¸-4/Žû]Ð=·ˆâëÞŸësn;âøº8#z1[¹ÄÆ&åçx\|¶œRª s©áy¥'Kâ 7ó½HÈ '…zaÀ„§”¬ª*,™ù/ê_ßÿ%'áþ#!8yYà=çè8ö”³ÇzɺOŒ70&^ÿåÛ6Õ ¸x÷î'¬óFñãšaGëyð¸/ÔT ¤MZZG{ä-Hßè«Q»6²ýa÷ëúC8i‡k£*©zEGV%ñ€Ù€ø}RefšîVâ»nÈK9,çŠ<ŸÔ…áJ–ŽÖüºZóY» -Èt‰ÇSsL;4~Æ@¦nFœóÊ2¡w 9îHwÆP5CæE̦qFùDЉíÜ#2eŒìm¸èo‘øš§3wFÆhOÌ¿všãýIçê#2XëI‚íˆÃ^–Ÿº‰ÔG¿~=ÊÏ™‘—’Š;ÒÏ ­…gˆêEÀ‰•S‘q'æÉ n¬”ÓjG<ÍÍC )–Ôóó"y\GjàÇ]ƒœÚwÕ^À$8œ‘sॢ:/¸uºÔ^÷]&ÅŸBLMÖç¬PU“«_‘]ߘÈ*ø™þHB^…CC\}ëQŠÑ4uÙÚÛw!<›}±ÿ(•HXå+*šP'ìÐÞ¯U¾Qà¹ã9SÑVÿ·²Þ …ïÿDÞIÁÝ5øÑ#/pE£…0HNJÇó—pŒˆÂ(0ãÉ´+˜²È®)ðÇûW‰‚mã -¦piúÃÊòvÖ?)Z P½èãÞW?@# y^_Ú f† :ïpÔ¾ËÌÒ»A¨c"ýè½x™}ôëG$#†”ü½&Ä-M¿ñž|bw'ŠÓ¾Þ³ØÅ™Nø¢âˆ9Ñ>ÿõ?ù>~Ë1«ĘAJˆ'!5%'ä {<`’²ÂU·7Ž§¢hÁÒ3NæÒ €Kžcü¨7¼9ò-1€ÖY@oto´ÆôKŠI¹Áй'Ýñü¨ T øŽ.øsü·°Óùh-äêá!Îä&>ƒâF«WäF0Dáq¤T·¯YÃ˺m^¼“ÌšÌü"¿Z 9©pÃoÜ•Î1þÖЙ³í­Ö²£9;/Ï”Ù^ZC²óÆ‚úPEa[··T†æîrènͨï;¸>…/ÒÅóÆ8âf^o¾pãþ:oØýŽKÊÊ^ÒÝÛœ×4 ױ癶§0æåÉ6\÷TDçi`Å0}ó›öÌ•j·ôµã8˜°e{]M*Ù­FHGù¦\D v¼[ˆ‚ù®ØÌŒ€e6½¶ rª^òƒûü¥‚Œ€NcVA¬9ta{ ãþ/–.?¯ æ|©àjfŠÏ{–ïoÿ‚Á§6@Òý> -Àö+t÷û¿„úðÝŒü‘1[* 'y°û=È€£å¥šû™Í0 ¦8m®œ•¢><ˆäÃ"¸VCp:ˆÅFà( ¿™ i?@ÊÄòGÏàIk*Ý~¾ê/O¶¢–xäšÉˆ»§Ô -írWÌ h9Eµ0OócBùáe…Ísð­+]HßøžW˜üx$Ñ®<ùëŒb#yµU õhBß9ä÷»=jOëä¡Ój]:žæÄóh]_Y‹Ø'aßK÷o`ê"§¡?Gª±E9^ -sh˜È´1þ¾&¬67-UÒ‚îs•jÉ0 qE7ÐüBì2÷ÜÅ~ 74+kKRØÕ2dL“ôÂaÝÐÉf¨ž¤Ýò8Aû5QÎ*Ú©ŠV4" ™ËÖ¹@ bœ–¦j`Ð{ Î4)«!§i7/fÍÌøD&eéRŒPóåÐ]Qù÷9m[Eô£Š7r;Åãb;מ@Ú㵞$ñ ðaÄ–[Éý­j5Yy–ÄEÄ(Ò7$í¢ŒýúàU½¨8ð€QªJ0EJ{üî0FÓFhâ¹7¿`,¾ÿË‚EöHȑ̸ßó~·•Žñ‚E^aƒ„¿õ`WÔ9WGõ’DiW¶ÔTÌ<ú·HS¡ô³w|¦ŒÇ6´|4|Œ*6J«ca¤±Õ)µª&UÒäFºZPïœIÕ±—ýq¥Lšä3T»ßu¾`î†ç¨QšŒ³^pa1`Ø k™çt€æ½2:…¤‘šÑN,ä¨oPŽYÕÜGŒ!íÛµÕ ©ÍUEæw“øŽ@D‹µ^xcHݼFæ|½¾Kÿ,Ê?[:±ÞÂ$/<à#{“»g¢VD ,Ýx±ˆê4‹ ,Þ5ÑCÚS‚5ÑWîésœ‹Ñ9À0ûMŒ¬_Œ -l™:&f±Ÿ'Ï^]ô=mŸpÃÏmåö6­¨C¾ -=fpUïjÄL…Û²‘¾JèVqæQo]Ý–æ“ânšß#/=¢“ûû°²ÞvZ Ud -ÃÈõÚê({Ôj¥6q©ÈH£•¸lÌÍpŸQ®c}&Ì%¡ª®¨‡Ö c¤‹­´÷Ôž«‡·+öÈÏ™a%ƒ«2ɤÌŸŒkt`‘§”Ræu -Ì܆ñVÏÖ8â¿XöäsÝl®p2z¢çkGò‡Ü»È@ÖØ Ó¹†ûˆï9jS$]·¾zop¬YÚö.,ÅÖÃþ¨Îdà©ÔÇVè~Y`éw¥ÍÑ™Ü -桉ºÇÿ&-Š^L0NÚu¬“ /¯‹ FX¿‰,6Ùæ–ûMž=Î\³­^ÔðŽ[-€’\¶Ó¾?û¿® 'y±{ñ±ž8¦<é]Õ ÄfUCç^Nc(¾˜v¦ï¾W¢¸´ÖÏÏO(µC9Q¥<Ìá+UÏ(—î¨_¦4%ˆ±`‘ã.RÑúºøû’è}ÿgÌ÷¯y2z vÓ»ÍNR |¶ðÜb`”6M‹’ ^Ý\]›y¢áïY¼3sÀÊ ,¶/ÎËg5)z‹ž®\\)Û’Ëî½Jv(ã¡ý髾µhÜPE8³•t’êGµÕ-ÃØòB•ÔyÖ8OodèÛꙬ - Æ7³GŸ9§¿F4AAL$„óAGiåFúó®!##±q&AÖ#ír¸ýÿÌÝKŽåH–àÔ| ÍH9îiµ4ªX€v/ûþc7*#<²ü -ÐÙÕhH^$/ióøQ 4hYï >oÅãïŽú’ÒÜ^?*D“óY=~²8¨ŸAê p_¶'ÐŽÕžTŽ:z‹ Ø¨ï°3’"3£jà“H/îðë;3ÀZ÷´gˆÏ -5´N#{ŸYqv.†=  ·±þ 'â´Ëò5ÕÝu¨R†àGÄñ¡Y¬/ƒJžúó†|#…'¤¾ãIÈ(Ò«÷$Ô¼*rnÇb‡J’ þYà7:¿-˜˜VÒ\ûxËáHUïîçšæŒ”Ù{ÊÐ[M¿ø–Œjôö:ËtSéš? |®o•¦´Ø¼66ìÕy§ì}ï> tÁ—¯ä{mš+Tñ¶läéŠ:6ŠÞg¦T¹Îå4¸kóq•Þ¶šŒƒ€¯(~?÷õÌì}uN•%ž„Dcu®r‡Š¢dÁU¨*‘Äxay|è¾Ýu½à>4‘8õ-P+¸Où¶,€_s1ë+Ø@.W‰49ïñ»¡6ïBü>ìeŸµR› "»~wÔµ¡Í«è“ƒ$¶)ÒÈ%ÓÜ“4îr÷µ‡9t×ß›¼sF}¬ÿ0uüõõ_’/~Y8ü^…C ×έ:ò¬è%ظ¾AäóÉõà«Ë²£‚‘7&¶S U¤,'YÕÐãÂ!xÎ[DŠ¿Ü[u•sm]Ð-`´ôªç1yÇÔ%å÷£ ÷¡ pýè oç«]ðlQŠ 5¡d…æÎwÄedµ ¬Ä{p¤œ}(ëG¿r @líS¬¡o¡Â²PÖ€2çþj@ÍIÔEgõ=7‡Uò€äÉ#nŠ#¬áÕ6–ž÷¸"4Œ%F -tõWY‘DM£+ýULñk$wŸ¹ùF¤Sä$¿¨‘—'Hš>£ -Ç<ÏžeØâ– }FMØÛ”±“®n9nU/9Y/t.b”+„B¯&êfàV‡Fæ•Ǭõu{sÒ˜êMrÓ³nÕöPfö(õ©kÑrùÐR÷OñûÊÍïHžN2DS"$@Ñ ‰/&!"í» ±ÉÓFÜkÈŒö8úpŸµk>róø%œ¼~ -bt=ÌOEó G¾ÐÍ#ß¡[ Óu=®ÖŽ'Rp `}†8žéPûµN½¥hdÐ…?€ -`ñ¢jÅ›[ÆvÝYQêc -%’¡ê3Á^ÝOÑÁÄó¼Ö‚qžÝeE‘’”h³s†Ó, ã0@‰„Œd£¯õ»#åÇfñJá‰-ÞÌű!WѱmÈrN®ì|ö’/1m¹\G™T:`S¸ -±4óql˜TPè¡´Ò‹ZœÒcj ‚_}MMÌRÍçbg\„{Ð(OMùfvŠ¡ü†RWº™E)P¸›C°‡–s„³µêJsË♆HLï4t…‰UºqÜS^ ÌwND +Ä´®•dpôt”êó»Ï³W ß0yì+;ÊŠªÿ3UÿfC #Æw-aã¼É>‚¶×B˜n •-,.)ù>ºà£0P)8×Q¸r•ˆGACD³k„\p–»æ]*gÞ(3òim\Ô|C ÎG›|eÜ"Êa!zÐ0ùás• æ{Y|è¹îv‰ƒdõ]à×bœÁ|$„kÃÞÈèh‘<0¤l¬¡îÕŽ_^àV8C”rª_ý¤À7ÍÉ&¼Ì¤HQPË4 B 9UÛbDêËó%_ÑMˆ6„–”è¨iù’y G'<°1e²À \ö”5¾Ê}-²] X˜+8š^uè>0¢uÎ)ç ³>7ŽDç;D>vÛ³nu`Ìvú¥I0O}}›k‹þúç0§€97öÚÊÌ‘öÅ·ÚƒÿólÅL µâÈëe ÷!ž¸4…nJ¾jîXÄó•>ë@ظš晧ڿþñÆÑò¯(Oµ+‚“„¥rDIä½Ø °Ù)¦0ÍW§z¯Mä–š<äbgîC Hp~æLÍA‡´«“ú A‡BôÁ÷±®ƒî6·¯[Ê•!Ü–.Y>tdUçžFL©ÆP§Þ‚X_»ž¦ Ê"'·š³D(D:[l•0bdX¹`0¢êSÇ÷ó-S™§Ÿw3'Æ-íxW· Õè'^H´G¦àĵ#gõµxœ©TxBj4ΓÃ;“¥”V"¿M{Ä㦆ˆ0žHÄ‚=˜=ôL(^ô{ö ^Xyb½»¹’UZ*4È5ªr{îÔU³ª„È…ícʇt¥Î\Ôöz‰P`Jô»BX‡ƒv=ú9ó#r[ÊP>÷”V{‹²P×¹¦¡–ãóñÆ—ú[ éΔXæ†ÕJF*.ðøzLuJà€™‡Yw´s ÒS=ïtÕç7ä/7…´ú¾†z=Q‚ýÁoií'J^~ópj›c«@û²Žë%fæ ’Âø«B8r+þ„^%¥w*Œ–å™ùyuaöä½|âíñÖxJÆP~ã Ž‰›!yî@žÚ³{”ó“üHð)"sÚÅÖ~›;ü3Zý\i¯ôs#ÌËÞÈlõöâd ’7ß]R+hš˜„äxOJzìÙ42HÞ×jÍ' ÔyÄè{)ë=¢+E¸C¸ -!uÑ)ßÕ€EñãD´ùW?ûE½óê¡ IŽùý“¥Çüx"—ˆ šO)¤±Û–úÖÞb–“]P<ŸƒlÐ)Ê,Õ3CÒF½læ½jm@ôJö[„‰iÒA7UŽXJåÄ¥‚à|!‡AÊ-–.7æxÞÜÊ?zœs2H±l[:T.T™9 Vu@¡+\cœ­(74•]vº¡…`ýf àþb{iÅ×ô¥Cˆ´5ÓKÇõfú}/‡à|Å<¹Sê—çªÐjƒŸaeøåámCÎGFDq:æB2»~Ö+žQ)Ŷyæ}ù¥€üó†"™©‚™ £££}¦|H’aÞù)“»Mü(Ð8D+æüÉÒ齿dúêu|ÏAW/㶣Þê–ê»#Â+ÑÄ·ÚÀ˜Î§†èfªÀvüå7F)J˜éD¸{þ·÷‰™Uà 3¡@ŒÙBíщÜUf@g#`£Üé÷ç -Èújî´…Ïy抸3‰¨lQq}²î q©ÄK¢ Ä$®ðuˆ«IÒ¡ŒÂKñƒt$ɵ„…üN9yÍE¾~¸Ù3÷FLÅü&‚yÂÖþÔ¼Õ¨­]–üÖ¼ý߉vÙùüDìb‘ð -ã˜{f‚ë+.Ž®%51ˆýŽ¶|FdLŠü@ÆøŒ¨Rlù¨ÄŸ›ç¯ `U‡>“¥5\¡o«—ÜCý.JJÄÂ2HºDQY/ßDÍáÃó„lKÎÝLwÁ€¸cæîXkñ‰Ï³°¿ÞiR¦›;éñé®æ2BNZ  {hÁ|i^Ñ:¨Ø;Q‹ -óa‰8(å˜Ø’‹o窋U %í¬A›LvfP„CX‹b‡Âœ[ pbñ_êo{|ãíÏÕÕ5(Ø5£¶˜j›»Ó$àj0k¾u#Š˜â9¥íú²ƒr«ùoÎË’•ÇíTEAZ s×ÏA’^òa1Ñã ÆuVeüÿ=’šÙ×±U»ì Õ÷$†ÑÙîùðaW^Š·H@Wí{®34‘àû11FQ -øúUýŽ¿…)ë^Üþ¨þ=Øì²…¼Za~²–ÞX)ÿú _K'}MBɼ]Q¿¯1ƒM™ðIe3à$ ¦B)µG•è£âÝ_ˆA *v3Ax© ˜´Y¢»Zá)øâ(•õQ] ÅôêvE¤=@"a¾zÞ¹dYçÖzHØúRnõœjÒËŒ"’u -bÚ^2•=e™y©¹e‹ØÈÄäªþßG¤oQ¾8–¶‚„dËB–"áLŽ™“í/Û|Ž@’”Ú¥ú0V?Joa¨”iAïéGöPÿ™¨|RÆVêaÖŽèþé]΄ofýÛöÒ’¾îJ¬±Pd:ÐÓÆÈyÛÓáÚ®Ó¿%vg‰r±x -;Ê÷e©ês£˜I,£ô}5¡­Kø ’ -o;À ÅIÄáœJÛ èœm(uãC±Å+Å‹)š2Q×(†þl{¿b½‡5H ¢1ƒåÿlqðݵÅØÇ2ˆQÇà=‹(à{Dë)Z§R¡ - 7ƒ;’<0]ª}Öé)by]fÖ‚bê”ÆŒÏÔ+¾jgSLmEŽc˜›9ݲ5Ï!©tƒ‘fzz¿*Í"ì;‚F'JH$G̾WÇK¥jþÃØÔUˆf|ÝÆb‚‘+<ŽŒ~%™‘FO!$ÜçΈó2ßÇ]Ct•á¢ÿU&8±²ñ¶º±@n6ìŸO…vOösEÀj.&E ¿z§‘PJͶzìŒG•2˜{¥çpõ{ÖÁ¾¬3ø°^,V»Ö݈¨$_•î¯ÔŠätÐ"ö±ÉÆfÐ|Dp“RÖP -¦ÛtUk6µ8ŒÕ1èáI ÉÈ™ñL4’0gP»ù':œ£³³º²v:*åKÅ:È9á¨n/›‰#âÑ6Ê,="r0XožUª&Üí_P˜ø‹¢Äc™lU@n¢·3¨ÖæxÌïîñúE„_€øö¾/“‰#½‡9K™û‰ØNF´2Ÿ,2ú飧*õ—R¤|Ï9Væ;9;`ÒÕÎ÷%EJt6Bk>ý~‡edß×NL¥ÀY -®ðW«á—ÄS²€pTàw{ôUŸð…¤Òi%}©Oypý-Ys§ÀŒHÄÌ$Kc=Ñg«ci¬¡ðn{µ«/>‰Ûõ»0ÛÎ ¨`I9L¤És3g]O› -† ðªš/ÉÓyõ"§6¡p+ͦø:mrFš;ÿV< î´oÌû_%Qô“·õõÖ_Vê?ΦJ’™e¢±Ý£vMsr³„Z±xy–"_ÝL=’-š×7ù32 錼¹>G"8t'$œ¥“7¿¯ÏžÕUf-¡4Xc&E„’ý¹Jo‡X9RÙ¬ÏújQQ®§vÒk:Ñú¢´/.빎FÒÀe‡_YC€j®¯b -CtK£–íXýðPUÈÝÇJ“ø®vৎÁ‹âç€Lxöüª‹; PºaùW -%´ -_æ2\¸ôŸ¯—s  RÞòaœ_†Ìr Ú¯4SœÌ‡èéÞÇvÖu`ÅàûÄã~g‹Iý·á±Pyöi,§ëm¿/¯Ï±¢ÈPiaV‡õøs¦%ðõ_’{ÝÁª†Ñ–Dè¾£Ro'aí=Í!CˆÜnÑr_ÙùsRL×X·ê#^gPŠWe甚‘Zž#YÖMÖøJ°’*žì<ΟhLk!ès¾’7^…“## -¼ñGÞŸ YwéŒÈ²1ñ¢=ƒ<Ò ï©, »\#ÍÙC-é~ý©V•^½IfÛÞ -3Dd“Ö–ˆu¯¿P§µÌPϤ¢r-Ø?Etþ¹9Z¶«_þ%-ÔEäçÌÞ‚á%}ˆ­£Ìv`†xŸÞ!æ(,pÏAÁ-¨ -Ó‰p ‘<Ä;-¡à!Dà€V©NÌ!¦Tdqõμ ü¬Ò#¾l+$Rz¼ð§añœk„4/oÿ‘0pýr̃â} ÀLúN\ÏìÿÊo:ibQò*µ˜;†œl’¸ÐI»x C']†+&"º¬¼ÑNç?ì1b’qÇ»š>g*u{Lí 晓(ªÑñš³åDlä9ÖGz ñ(o|§×|Âc§à±-û–# Þ(µÛ’ó—ä?R—½ Ž[ä®YЫùôSøþÖ_̧»7€õ–šÙ?ïÓñ“ž¨üÝ|BIQL9ÏjÊ!S÷¸îÈgìYŒxv±Û½I¦é!l8¯G6’[Ÿ¡dèþy§L…J®ÙXuP‘]¾u„®›;ÑðÐÍŒîˆ`î `Cüð1dĬ«…v9w F ȆoF'9SJ íKÂ0˜Q¾nuò#bzC$wŠm_Zpõ0”¿¥éjqsP›ÑˉÐPW^䎹_ý¦žJÂü~°=}§N1ÿo^Ížû†³žxço¤„ˆdÈCv°‹ù{¸’ÂÃ4ä0QˆøÙr°ÇÃúqÇ|È‹¼á\j÷Q¯.%7Æá|=þ§UŸXãùýD"ÙRO€Žt0ì¹E£xBÍÏ ‚Mó„«Yü’t&+_eû³œ½Å‘j.bø -”°ƒ©¶=ç1eòŸó*¤lÀ¤ïWì†R°îBžM‡ªå ’æI6?øQCôILâ«àO)·“l¼p˜€–¸¡`¸ó7Í(éc&³‘ cqÓØ›|rž‹Èj‹Ô÷H´=ˆl!FÌI1>«ðÈ~’LöÚçÁußsz=³:4bÑ)ùUœõf8íÌÙaõÞYb»î[úžÇYïwDŒz{³ý™^É£$@û{yòÿTp¹ÁŠÒ)E›‡–YàÑ+ :Â8?ðû[ÉdÊ‹çêî3ÿ~ñðO!‚|Dk®¿› -ϜϰÖO‘¯ÓG-üUDŠîx*:±2äSÖ¢éñ—³‰<¤tûZ¸3EÈ=¬ýL§ UEÐ,ÊW¥§—L§ïˆ ­ìœ<¨4ó(¾3Âu̧é-êêÁXèÔ4 ïŒÔÌr=äV¦A  ì «óÈ©äw³.$ Þ^7E )Ç3>Þx/(#H:tTʯ.6ÝYÆ^hYÖp3HÑÑsÓâi5gÏUž¦ ¾µ_üåíÀéŒ]ãgÓ£¢"G?±ŸæKD;qßPXž5D‚c2ÖS¬h¥3Y1ÆÂÖhõîg²TB¦G5VF¹Øbbá^?ÂõŒt…_O,S{Κ `%šL×FÈü‚q%ƒNKwçú“hFØB›ëŒ'—bÃ’ hŸ¾ -œè3<{V*pºÐ U¥Îu:Ì£‚À¾Ì±b•– œ\Ù™—ß2Çü^§¹õ\d┽{:\ó}dÔæ).o™nÐæÊÉ9“Š A¯R6¢——»v–ö‰,ôÅÞ'úŸ¡Ù#n7áª1Γi;ƒo #öö6úb¬ ˆ‘qÝêZÂ6pÑä ïå9b+m%¿ú)wÄiÒ„‡7r0ÙŒ»;ô«‘#ÏÒ‡L@ ¡J—l–’3È.¿²Ø5×[ÈmÄ=y­)w×û´M÷ù€vÎ-:;{°ÏTZÅ"l&?õψN,ÆñºÑ¶…"ùT0pFÀ7VÒSyù|)óuë?5Bg¾…ªÙÖ€ùkÉÍGë{ÄÀ…ÚÛVd<á¹n-†{ Ò-9#«en¦s'c›<5Á²b(¡&5¯Ží9³Ž-–5®êx®ýÅ•â¼ážŽhx2bãU0Õ`/é$z}gÆܸús¹3 ~±šáÑlŠäa¿Ì† ¬ ?åVJ= -ZÏ:C€´¶'²;ÚŸiAS‰Ó<êy9¡ÃÛYP2Žêß÷(nt:Và‚ªºcö‘ˆ -ó-âE{mv¤ æ·Ò—Ø?Ö 0ãQKù— $eáÔ“Œzzm@x÷Qܸ_¥k¯{îGÚvkê‰?Œ…vr¹qd{]g($=PO½Ì¬ÃÚ´h•ÚÚT=驵óÅ¢ÏT:ïk{ívª64x§zM¾Þú>îç•ui?Ùš^²ÒTùuÇ«17m›«Ì‰Ð -÷]Û3e÷Ci)Ÿ\&¦&;—ñkùì½Ð'|Ò’«ÊŒï]ù¯%eÎPÁV‹Ì!¦¢½AÎ#Ñ"uþ”€±×ŽtUÛ+ Ä3Ù£ý|gȦ…Ny Ÿ¶rì“× îÕU?ª×FÂü•MV bÒà aœC -ènl¡óV®>ç„“«”»;ç²!µÜ¢¢q‡KpØç®n\ Ý­T¯_9ÿ lm…6V]„ˆ‚¶lO}ª+J‘ÕÂQ -¶F§¯V‚<ã*H«÷àˆéE¸GeŽWº¶ø¨ˆgó_É~lâ­æzhIÄ׋…ô)}A«Ê¢Ó]IÔS³Â.¼Ã.‡ªmµµ »ˆÊÏ+Ō뼩|ÐoÛµŸÍõ”Ê¢œ_¡[Pó]*Ò^¾¹#êNC.ø¼bÖKGˆsÿ(ÝÞØC–\y ¢ -¯Ô˜¾…~ÅÞ¼W¡ã8¢`±ô"èd&éØ“è2@[šÚOîÓ„Zó„×[lÙ´ÈuµH)A°½Mß$Êy)k–@ ±Iœÿ›õe@æsŠ¨äa'>É ? å¶eÂÂ~üâ[ùõ_RÝÿâ  ¯†£ðÄl®¸ë*?M|ƒFñpÎCü¶+Ûïœÿ@‡l`´+6HúDƒá/bÔ#Œ€¬×Œæ7L·»£¯öKé6 Í-æIè³cxþ7IUÚ¾ß?èÀê[*+ -ñÿeöDA”Ò¨9à ã’Ch$ãx Df<'œ‰¡pE*³¯NVýŒõ“4~„óÃ]}= ä{‹ßMÿÒ í<¸îQ? ccz˜P¾•d~ÙÎ -CÙ¤¼ó®nÅWŸò¿WûèSkh!ÛõÎøú/k}ñhß+ ˜[D\ÒW¤«Ä““©]½)ÿ’ŤHv Gxïû{¯›Ñ×^~‹wü®hÿ=éû®óEò½_ -FÅ9›êŒCzj]±£·%Ó-À(ߣB×^-›h×_QñXƒÏªZº†<”x·šÎ#\›òÖî—/¯v‰|WF/ a¯•wûÎëí¬=AeK]á÷H¡éŘ·Ð_Ÿt,·Ÿú}©ÊÆ;4°;Ý :íH¥¤dn1×Ñ’A#ý 4É ýLfòøÈ[Ç5U–ii‹Á²€{Ïíb¦{ú†P7ƒOÜx0Í£bЀppÐ%¬×¡Äþ|½2ÂE<R’ëjÜih‡˜q—™Œ"K25£öÜj÷…ß#õa ¦¦NRÊsñÒ>^øHÓÛÔ©#ó­ ->?/¬Ú¥Y£•Yï%a‡ ½nÆË índ–ŒØû•#ã˜w¢$Öã'9wÕ§† ¨Fño~ÝÆ_âå\NeÑcD1oÇÁ=r'…8{µ•v®Ë8ö¯ßÍðÁ«y¬©¨àí¹QÆœ‡gÒ/CP vM…õ0Wá\Π|… U Ïô5¨E¦!òbW>jÿÙÖŒ8éŽ\:‚¢7æÞ eætnÔ^´ùi÷¨-áÀHí±E|8vóâÐG¶IäÁù ùY›üÜ¢…Öùòå["·j½ÑJÛzùŠ6ž~Ff‚’¤uÉêdH禢0p qìùŽÇz#G$H[$XoˆÇÈ›EkÔÄìLfŒJtNL`GFg6?C¸oÌ_{Ò¦œ©LÃÊ#Ý9OÉ#.H†Ol½y,ŒÜ*"§ªG`?K ôâÖj“—Împg]]§›\wž:âúüiâÇ?‘V·Éi|žAö¶^¸b/Œ/Z‰A<©ô¶£fµáÇw=ÌVÐ*XW†40dŒÄõZ¢.Rûæˆ'Žˆ’ÃQ/9ä{%ÕÊÌ—sàŒ‹}a®Ûš™ÊÈúR3-Ê¢= SÎ78—w†8 Ϙ4¾n5w…gÖ”žA‰™c@ü}Ò«¤¬µ7^‹w$L†û½Õoì²xkâò,~•fÍYmô, êPšÀú8ó#‹îñg±†'ùå0ùuHÖÞ¶"€öÝj:T{á$².z|¶hïµéÓ`ˆ¨–œ:Ex…'Nr¯!{ð~<ßõÖÑ’G«áâ|ÝÈt5½ÿ0DD»£½fr\jK¦1Ã’T­Íø'A›© °lÅZ’LsÌ!Ø Íá¢tÔ×wç<îVoÝPY;ˆÒî°™dÚÑüŽGˆÎn¥GݹLù–Š­à‹þQ†\3–Ýé(+^Ù ˆêFmae¹ -ø–µ5wù¬tÞ¦[Ú6Û]3ºƒ.x¢Ý78ÚGò3O¬ -šÐœ¹ -ÙúøQ¼öZ4òY‘ÐkÿJ– ×ü:ÂG>Þ˜}«BËÂ[¬ÆYR4uô|ÑSú£÷»š¥†Ó(à¡ PèJgø':pJ¶Ž—¸µöçUˆU‹òבVeØ÷ssÈ>Øq¤æ)ÛÓµµ‡fB)¢èÐÉà7pö•qî.Z sDÇ%rÜégÚ²hx@¾èY{ÝéYÜC¸\û¹ŠI¬¡#Œ»ÑO""¯)oÈÆ0ç‰ØW¯ù4èCqˆïtn¥ïjU{ž5+£ F®à®SŠÔ;£NF˜}¯ÙÝ•nËí+sƒÒ˜/ºe[£ì¶U™?ó‰¿dyßךÜÆnqd#jCF¶×Šœò I—÷wÈ«é«Æ²†èÖšªç×ÊtúëU«~Jj3{=o>öŒ<êZðnæÇݵ£…k0i5E}`™U»ö"}g½åµÑd~XÛÅΆ‘'>®{¡QᩦS ‘5$šڼü' éˆ/Í.üÚ®HXÐò“¥e/R³Ü“(uºïùê' ›ºL¦)D.Òp¿bôWo‡®¸ ˆ)Êï¯Vmëª6‡Î”ó5ËÌÜ-3 šò¸ëh¦ŸDÇŸ!EéßO7æY-S01ÎCó5´¤ø¯žC]Ñœ¿ñFêOvÑkÞ„úò\eK<ŒãYÛ¬R„ã³…FЂMTW¥º×~¿Ð)»Ì/×9B;€v=Îr¥9wž!'ÅyÊùÛ¹nõ˜ÎÎ…rÁhQi=š×9ׇ—®xéƒcœß%{G¤ß5 áˆn¹‘Ä™i³/¿ëYž69"GíÄ9{¶µÚV/I¾E[M*p³‰{jˆ–»º€ŒÞ;ÉÒ§ 5ôÅ×+F™Np7îÜ b“@ -ïõ%ç8_Bˆ¼·óT‘o÷ˆ‘º8ÃSé©KÚVÈȧȧ‡Éâ6ŠÁ´9£ÏyPžåUñÉëæü4•ÏÏ(ðÉ!µÕ„¼~JzIÍ‚:ã;gÄ9ÂbÚ 30ü˜ëßFùÑØE!ºcÁéûDìþŽÚÀÈùo-oÒ’={N]¶Ù.e´ùòÍ÷hæ“Ù¬ ¥ã:Eûùf;<;Ø~ÖÄ\i¤ÆªM2‡øQÊ¿J 9î:…G9ûXàþ–ˆ`@ê#ƒH@(ÀPŸ?J[C1Ø‹ååœP_7´möÆÙ½Z&­Ä=, ÅÎÛ…r Bë³]Úx;~ÐÐ|Ï7œ»5ÍÏyfžÑb?ks“àéü4s]„·£R`8 'ÚÆ1§æESáT €¹sòD-Îuv•=`‡³¾8ÿaF(tߘ/˯]ûSÕ&Ý…HUBX ¡yTnrê-á¼RêJYùWFåõk–ÿëuÀ€Q¦1ãÚ¶[ F01X¹Âzå ¢N‹€þa %L²£VÐûéO9óþ‡­Vy¬äÛV Л%™jÁœ rMý„‡Ô½‡™kþY·\gÄfï@%l#¤€Œþ^Ñwi]Y°'HîDIŸfY{&Pu2ŽO~Í¿8f.qlÖØóÐcíT/ľGÙgôXx!i&˜ Á­ïqð¦fÙæ‹ï“Fk5BüI¢­f^Y•„ÑÏ¿¿1ìíÕÃw•1Èqàènë âEo\ÀUÇm pcÊŒÓþxØèA<ò‘7„_dšýPLÃêFø­³_Áã©1ÌTƒ Rº„øœ±«"eÁ9ÆE‡ù à«"dõ¾¶O垃L—b#Ü}Š¤-êè:ù8}Ç/~[3{÷‚Á¡ž AEw”¯ú•Çe -´mQ‘[·¢ëD©¿ ^Þ>Jp“¾ó3eÂ8Ô7Ø°å#&ÛêW‹ðõõk.È'Éhlä.ó ÙŸxU]æåÖËeë®ÈV­† 1A´ëwdÈÜËmÔŸö5rKZS€ßœ¸Ó^…Æ(TĽqCªO à@R-@Þ éVßý7føªIÁç¿,½x›­ú*E³¾ÂMÝy/IüÂ5ýÍ©v†BÑ?©höŸÿÇ_>“xòùP_£ã‘¶SaÃhúì`cY‰c&Ï8)íHÛ8ƒÈ‡£~†l'ªúgkâÎ¥`u¦ƒR3— ä‘vÍ\úØL¥ÚkæjZl[Ø™„vF¤Åy'Ú+°»öžqü3líô†® -Ùf’mNíwˆŸ¾ˆ ­n¦éçÅeToð–ñCçezI$ZÝϺtÂw`JsºáÚÙ(F™Àz^¯„æ¸~cîï[6]\Qﯓ±ºãó³7Ü7Îsš5[:GÖš²Ë\ĵ5<¶Ò-àê-µD™ -‹`?^ßeãͤ_«±EG?t–‘/•4Qo»ºDìM2¨·oLŠb»Z‘<•Ž:2žôiö£tÑ¥smK;–c¾ï|¨ôýßDl&`$*Ñ™ È³±Ì!é~Ô‘ñ=ƒ„õY=%m/€xŒÃ8CñZ òéECÕ׈Í! Ëÿºâ9»±½¼9½”dŽ3MŒ‘JfcͬH›×ð¶"÷[üˆ:'ñMèg!*¨N±œ5ŽrRά·sŒ@ËkÏa3O)IÏ)©W\êr| ž×IÒ¬hº*Äz_¸É–›«$Û¢+»×­¶H)c;{šùëzèþG*¨èt0/Þ˜5æV ycO9"WŸSÌ“úÍ8shYªÕ HvÍ“ng¹y¡Ùó/µ-P¡˜9f)ÝIÀaÆúV€<Ñôª¤oáÊ#•e“^ŸÓ‰H™väsÀQŸ¯§újÅû9’ok0Šˆ`÷x’m%ÑÁ1'U,W]­øOgÒO¹Ï&ºsJÝQ>¡Å)·ÕsybaŸCêÑÚƒÒ¼Ö#‰·SÓ›Qq}±=?·õÙ/spnÞ#±JC‘ -¬÷×.ÌŸnâýG ŠÕ«UCj®c¾¢fO¼$è[kž*¬1xã^<¿¶(¨ypÔEPËè]gUÑnu*\XÈÿg Bžr­C¼_!Ž¸¹ŒjÒAcD'á‰~бÀ÷Â× â´ó†×ÿÆ’ñ[—³žXÐÁЦç:*ÑBÅ {.šÿ5¯Ó¢à«ši—{} œŒàãÉÎL)¯u*´nrs…¶'‚V€ë3²kŸå! -Ù{õ_ Š­—úþ¾Vz|ŠÉ÷ 5™ÇÁǺu÷úàašáñŠçyÛìÙÍÖóJ-1µªæq†5ƒ5è -]—©ËhÈ’µY=Öï1¹Õ -Y»âjå^Í5Â@¹‰Ïƒ£úO¯½€žaxÜH¢3Æçå9ãªA›§¯3v€þJÚ_.–¦ãäRmÖIORˆžn²&5'¦©ãnirI¿)Êô"n} ìŸt¥Û/ïÛKÁšûÈOY/í&÷<“¤±çÕ«Ì€Óz®?Fèr¡ ¼ùæÓêê|>Ÿ³ŽoRJÔ“+iLÌ,å‰uø œ·öò€ø±eϤ6ê_ˆÐ_uUÛ×|âGO·±ül+FNòOÉ4&õšÕs ún׊¦«#¡{qŽ·?òÚGáOÂ]z ¸Ô;sî®ñDrÛ—‡-qNºAQÇ ~zÿÔÙ~Bä·ßŒ™_¹ÃÆ¥#²G5kÕEßœOçÏŸyþŠ|BŒ²Š§ ê1áT`š[ø7óŸËx(ñZ>=4®ÆœsD487y”®p #f·~¡kÎ)výÒ„¶@„ÙÕQÐç#”¾-Lò4ÕF¹\çÃô;‘Éܯ”ú3„tyJCÔr+~IÚpsµ\a©Ô ä9ç/ŸSëWµè€ 3ä<⚨Ÿ-l eum7ŽLbrLÌ-™Ÿpƒõ;GµÓÏož"\!5›¹?â):˜Ï~Ý ¡(¢…:?ñGß’T!§C’Ÿ…Ä9O¶¬¢™Žx31)­À’éÊqsvt"aü¢eƒµ`;F5´GX”qïÜi®rÕÐGoq>îÿÈÞMQ{ê -A¿©$\ù×\³w<íCl÷ºÒ3ê$±AdG~A=î‚]Âtqù®+?Œå4Cã%›wó”6›´d„(9 5½;Š Q·Þ D Ý0‡>eúùÿÒlÜ¢¬n(GÏ¢ÎN4gui¸c”ewJ¶9ZúGªEY.†˜FÚô[*qYû:&[âÊ}= 9 ÁòlV2œÂû´íNþïøq§[¨šXæmþ™T’ìC CòFÏBý¦¾& qB›”!x‹ÌeϘšî±j*åb’gí®i ˜ŽŠ/à~©[ëõ¥Æl—n±ëbßêcBó¬›,t€Ç‰¿òy-´.gÑ4ÓÉ­nMŒ(}uûÐS é…ï[¸ŽØ¸,|#ü‚zàÜJ¶Æ6š‰C¶Æ5Bb{-ip‹A4I¡±ia.8qi-ú,VÈE¼¾zœGèïý##dç>¢ú£ JßéJš; gŒÛîS|˜*V(ß9h>-'„#°^ Q»?0p†þ"n!ˆ«mBsgKC­»f -:·dgéã5N‘3Àe_Ñ÷LO¶fÊGzŠinž[ì¯çˆ3p&lwÈ®±Chð0y{ì¦Ùª h˜}¡$€—Tâæu¸Å$\ -k_Òf’‹× ™ëN{ ŸguÆ 5§Eüºsbê¼R`0ÆR(ôßd Õ{:ÀѤ>jŸFL±Ùþ¨×„…1Ò<ïX÷ç½Ý§ÕN¬öI)¥ç½Ž æªN:Åß¹¡ëœn~6Çê>=®”à˜Þ82×ÑN›éýVÏ/bÍF?„ÕûHQ„{ü…ËŠÄâ=¾;¢H“mßöOEïßüå3¤LÜeKô©#ª˜ýîˉuí¯”Œö£\•¡Î:ô¥¨æ|¯OxÇ ðÀ–ë…QÛÐð¼¶ÕUçÒ(4èðü‘‚”eÄCðª¦O`óµ)iô”›æ…cüiVÝ œí¥œv³+jÕZñ©E2ßÙ|ï’®žléT) _žŠEÄt¥Cà \ДpÈ \ ñÅO ÕŒéä8ªc“î¾bò³I±o[•(ðW¤ ¶Â˜2O,ˆ á®’·N#^÷–úcwåùæNÖ6z«wÌ'`äïgõDƒ×‡¶ÛŽ±v蹟x·P¤5hCÃõÅeFÇhdéLCs «á✀\Ïõ¢ hpW<âê[©¦<—«ß ºZéQ #º{¤zÒš½=nI ñ …w«…~=û~¨Þ(=0¤eŸdË#Kž*®S¤ÎÛEOáâÊ¥1ÿY ý øÍ_~‰‡ÿSô‹NÚÒyývRñ^J 6ù-ä"<­ˆ]Ÿ¨éX½QW†x·z지m Õ×9ÙÕyÄ5¼÷qfµÈ]UµN6ýHÖ#óX¥* þ‘½âJyÎ=äO±%zÐð« PG„`kC@ïR±øëþßÓbUÙ¨La&niûÀè1úäßCx¹mhO¬ÎJ/õþ Y~Ë'xÿýS®–”\Hƒ€t£ŒX qž–^öt¡y -ž3:µÊá²Z486¥ë¹:5ø‚"Ž W,CKYK˜Æ” ýWÀé7U§TO9vìÓùáÎgÑ{œ¶°qKG¼Ãá,¢£Þ£°ç"_fljÙ­÷£¾ë`–®Uya`z&F}‘*Iå³îFÖKL_ï,„3u™`D‘Å(D©î_þEò’þoÑÈ\w -Óÿ ¿oÛœÿRŸ3bs&>OÊTz6É:aXL!±[qó˜ø¦îöp --RŒíRÇOØ+l /7Ÿ <ûY{E¿V ”%Š%ôõ«ì àC0à -ì5Qدõ°ó“|ñõ ¿9Ëñ«l5ÆÑ7±?ëÍÏcD}gýG–H{ÒÝâé]ïìa̓*Wóö¦LKØV‚! Qåçú›lQÐ 9 Û×¢¾ÓÎ+ÓòžCZ -kÑJx­ûÔ~¯äÀ”†¹œ{ì’Ê4>k -Ç«ÀNÊèØî Nžu™F·Od <µ6Ç‚s±£¤y¬ˆªÝQ¥q„;}{Æ‚ÎÎ+Ž5ð+¯·'ó÷-ÔžIycC!TÒ{¸86TŠÑÛÒh&ýËŒÛÊÃ<Ý`ËçAxRø{ÖjÜâAÿ´H2ì± -|H„ÜìáÈÝ"š°Ç©µÇ9j÷ÈÁêUæx’¦[‚¬ÛöZöiæ>Â< ¶ÕÙL’Ãw.s&ÙŽ'ðÊòö”·µÆõÖS5d\ߨ%²H‚Ñ÷^HX<ág "@ô.ê#£Ç_·Garÿ)ãˆ+÷'ÃÊý³ˆ-£‘Oòê÷לì‘lµFt¦…&àº{-‘;7ÃXš;yFÚ‘Ö«Îv%¼9Íèöæ>¸G,ºœÌí”J¢É"wöìM @iïx%¡Šo÷+}&b§ŒüüHŸ™ ª^¹‘Vcœ˜ç/=ÖoÚD߶ÙN½=¢Âûõ0Ñ°…cö›ö°LÙíû‹[I÷Á(<|6èÁ0v¬ý(†dcO¶©ôºET‹‚‰óZöZ¡ -™wÞžÀÚÓŒJòpîRÃs? -¦š¢ßa €aŸÝh'Î>ˆ°šÞê%Þ5¼³¸pó VÒ·¤„“Ýfnþhrwk‹‘)ÄË`ËfæVð¨Q$»îûµ9Ào¶‘³&òAnïmqqRQE½¿¶ññÆì[YÈJŽÑ.T¬œc†鬑H‹Æê'ŒÃs.B–‰Iç÷ŸX4¡üDñµóW“h)c•øბ¯Xžáˆ’4UÊ¥Ë ™†ž`&ÜT[„X>–)tC–‚¹òƉÜÁ­ÍH5°¯Rø-3`ƒbµd~Á>ãþô‘šúùD.°ácNÝoWœ{Xž¤äv¥çm÷Èì>ÞøI‹¶Y¨>¦œÕüÜÁAÖ)u“¤ç͆bm?­SùTbŸí˜¿˾¯Òú!Öµ³jC«‘LM±Š%‚c[CÈåa¡Ò¼MŽäñ’³.BúkÅêC –H£:žSq䣄%½ !"“ë=x3Œøf},= ‹[RQ=Û @Ü]˜…XÊt`k›:tÝ#‚ÓÇS¹ÖYËE%Wqó_é_(ßÂ9ÑÔ©çáßK€–ä«KÊFÅ` 9×S°P«s=ÏqÍÀ«ßm=½ô“…u[Ð9fVïÄ:«<†ÓÍu[   7ÈWÞT±¥ÍÃ0æ, -ñ3Œ½ÌgߪÃ>(kí/x‰s[jF¸£Å=q£ï9® “f7¥ ]‚RÈÅ€ÌÎZ@¨AfÀN“n5g¡)Nã9·R„'Ê=Pú>ÙíH‰ÝÒÄ|P(çƒ{O$ŽÏ(?´ŠúGÛ;ÐÅõ¼Wóaç7¦rI|Q³Scô’6çYW¾B/QáI ¤+˜òt’¶Bú=¢R.¨^¿¡’ÆsðÏ -³¿Áþ-PB|Þ÷ù‹·Ñ󅪪Ml…aÐqµê s_o'½«!¯ÚÑ^r RŽòК -^}±ÖEY¢SDnDû¶’®l÷¼`b®ð‘þ%—nš·Ø=¼ðØÿK`Åd_‹;EpåB8ÞÌ£VªB¼2{Éîcãp~¯n`|oá ²OïNšW·"4üì/®âx0Ur<ž×UÎ…†Èó0Úhirs{½À=J80nÐÓ6à™qßK©deX•íìY¸_~«UEÖNóÅ -ta…(1*­Ld’â£i /·e÷'lèý êòk]ÍJ”G€çÚÛJ<ã3ëöpíÑ5%sG5ã x“-ƒ‚âPLPäÞKŽD1*æ@¶“¢b¶¨ºŸ¯‹ b² Ûyï"Ó‘>£SVŠ71}1­ä‹ÜQr*E¬C’NŸôô¥K ô€ZJ|ãù&ß%¾•Ð9š ~ÛÜ¥úkuî™j–QAbÊ^‹~Uý8UÃhÀLçVﻪÑ}­!êõÁ+ó:q«ø YÂOU-bcá°¡¯‡öyuŠvLmÆRlsÚª7’'Š>™<Öë`35tÈk‹.±‚ˆd ¿DanÄl}„fÖC@™Óé„[JÉÛ¾œ/!°ž(1ÿVÄ*²'¦Ã˜Ìacé$“'¶ÁôÉ~ ~üÞ\ò3aùçÌá}Fˆí }wædÁ8¥õÀš§Gƒ7û9&ôµÊúö ŠjÛó¢ÍíóÉ苈ÛpgîÝj? -]²GD1ÁðœD|ù«-æa/Ö|Šc‘ÀYJ)Ì·þb‰¶Fw^m•m -¡tû¢\…_‰y b’ v.Í$hø;DújXáÕʤ¹~/¾`ìždãÿæ ÆœMµø–Ǧ ¢÷¢š¬éX¦«5óh5úÑÁ4ÊžP`ÚS”ÞwQ0Õ®Y-á¦ËGvñNõ1\Órc#<ãÙøÒçŒüiš$×øÉ«h¶Ÿµ³¾^Åß3膣”œ}©)õ¨ß TZíMxÿJÂOkO¹ƒÙ½Æ½i#I¿}ìMˆ¶`mÞƈíçýÚ©Ô?ñâéd¯nq4¸ÃZÆh¹6Û˜”%cðùÛ³êûìa´û*IH¢ão¼®]fR{„8ò¼NÐòºÐìV¨è'hã„ÍWJon¾Ñ^ÑäÕS‰5{{d9 ŠUU – î¦̉,/L]’rGQ6Z„yÞÚoƒ6XŽWhÉbÅ(O1ª£o®MîÄaýi -/S"À[Õ!rÿE]ï“‚Íþóv•"Æ×MÇïéoê—]þZÕðNøÇÇÎ$Ÿ§ÛÔEB\Wî×u"?AU±ªZ©n”ÃëãÕJ…ãÛê¿ò8Â"öPçs¿'AƒF½ìJC­Áv3‡\UÏ5¹p;Ľ/üÊ)q«oRø•¸@ô¾¯2FdKX¯‘¦§Ö1gΆŸìGõØLÞªÅ߉^Xpëí½¦íÒ `$Ù¢Íì‰ô†ŽéõœĬuå®ý*V¤óÜÙLÖ–qÔ§ÊÔoþòKüq%þ±Òð÷Ô®§ïöœÇ:øOoq2z˜—Q&ÓµDªš°½WôVÊ‚E\N_kÈ™ ð(øNµU$X‘À“ÕŽ¬!pÞ’ò>’10ËH9ªV‘g¼5ßì?:0Ÿ)*úEÚ¼bª=?1AÜéøCÒÏùªhEEX2}r^‹j+$D=ðÍg(Ö€ô©²õ±×Ïf©Ø5öçi¸«ßh ã…Àè#6R÷ü& 4vfPë#Qì[¹É}ý~€Ì7äÜFQ¶=lW± m|¥Ädÿ„ÕIú¦É4Î%ÝR(ó_ƒñßèÓ}NèÞ(~OAg”Ѹy¼aã yíqd1ƒÏ…ÿr4ß‘H4±û«ö)ÀK)ã÷µÏ’Ë z¾Es>8±¤x.<#×A2=öEÓ– -(tÀƵ«ÊŸð>ú)…V_\À9¿övRÂÉ…÷•}ƒt”%KdmÎêyº"ˆF-Bg¯€o’¹†ÐÁ³`_Osª<%mž[u®i…ã ¿~Õ©ºpø¤Fl!>›­™QTöX)]™QoÖtK’ªT™p•e£úiO­óI•'xûù| 9_d€¾•G©¯oÅmø¬2ÿY’êçªÁŸÊéwˆ8{ÿwVÆÌpõœ´(æ¤~õ#Ð-²G7 ¥Ç[à–sKĶË5ZxB€í«ø¬Ä8_ÝÂë¤úŒ È$:½ Ý—3R1òÓÕ~Ô²Bg -Ûz± -4õc5•æ¯SÄx^­El<²“ë@;âÃ¥…{õ…6Wšòç+ -*V/èOÖŠL[W°"þd¢¦Õ/¥fšzõÇê—¡Å£¿Ú˜’tÓ5ütùîŒîñRÓTrÚíõŽ‡ŸCH_«Ý(Ô‘l¬˜Ã’pw¬ "Ù‹%Ýo®Ž2. Ù„LÁ1|OÓrû”x}V)ßRŠ*.̪šH\žºÈ?UM*ñj¼¬ð%! L¹Ç©Tâñ@§}ªÏÑ„¾ÄtŒ¼¤½=z†zÇ«Ö)­owíé“&å%ܳUUÍë2Ì­å:à—Ô‚K ‰<öœº”yeyUr‚…''÷âÛ‘7£P;$”?r'0„ÍVrœ«:Ÿ&bÁÇ5Öcó5}Õœ1üŸÈ‡3;ÿ Æ"e¤ýù͈ª8Í7iw"Å´Ðåó¨…Œ -B*WŠ©ô5Žk_C¼×'¥A‡>ß²¡rV*H^œ€Jжò<‹ï,Óøŵ¸¾Ý;—Cìåy¶¡4õûµÞ°Æ#nX€7&ÄK, v•ì(QY|Š|®Éýüµ¡ŸÑ8À}/±¤_ë×':ÏO&~£¢pvÃ<¯aM+ìu`P´žÑŸÐÞw ø¡ÅºKeºhÍ©±Ñ1›V´Ìt§õñ,9“&_ݨìϸ¼ƒ$ŒߺëÞW‘àĉ¢Zô¿Ð‚æ/{‚>"†ØŽ @p„±w@m߃ÝÁ:I0¨Ë›ˆ.B&|+yP*.Z#—ÖAàx6l&Ó|¡òÄ´D)ÃzäGJJïa¼†Ì g«6—]6B^T³ ãúÒ˸ræéñöÂ3fYÙ«6ÙŒébLöJ­.sÇPh; çã45Äè=…ÿ3/sr¿jE‘¦‡>4©¨ÀŠ^.³­~OBìë¾â< ݈~Å¿›8I6]6NQ1škC‹b[êöXÉxÇ;übÜåú'`Ôç¿¿ óêîAy,ÉŽ_>á÷ šßËSðð] -"ä”{Ücû“©@R?ÒÄ•0›Oû™)jü¯yy¡¿ÇKE§ÇNGÁ´˜R8VDZÆÛªfµ2mMÝy§¦w5^9ó;Žp©ã$uÇ9‹âÞj”“Ø‚W: ” -°w®ý‘§ 6 \ïe «‹¯[•ßÐî熈ójÀ:<ÄO4b1}'ç)„¢îÀÚa” Ó€%-sÄnå÷ý׃<\*ze!m– ©3)øÞW@²gZ²÷â³qׯëû #G(îzûÂBD§ S‰EZC«p]ç -Ÿ2ö)ˆHMÔ«tz} -áw|ýö^9¿òy -¯œnzÇ6!•Ò D³«çpÖ#A–>*ÿ)õJèü©‡ûë˜Õ›16†,«ÆàJP<{Q(õ-¿G8Ô\Ž–ôˆ%+%5AN÷‰Á:Pû©î¤½ê:¨à1ߊæ­êJÛãèEå£ ,«yCQЭ@ýÁLϺL )õȶx-MU&$€Û¶†œ¾ÕCµNkZ¨“ĤTŸnà Î@üßUN‘†Ðª-•XȦ“w–Ê3äYWIÎ6="Üóõûû{à.BK(ANŸýe+Ù÷xÌÕcz9C@:J t†á|ÝQ*¹¢ú]CØÅ)Äj&žDðù÷#`ªÈbô´xuGmè’À¯Vý&‘ùƒ2ºb¬¡›Œ³×ˆ9TïˆêùñÆozí£þдÞiìe¥¯–è/°Ç`)øEH3Ȫ%=E6Æ¥¸¡~CZú­å/y_ñŽŽ-"Eš:X"·}„lT"t)­^Oh¿5” |xfìçÁ6gjA?Ûˆ¤-'u7š³¹)àÝÛK§ôÆÔë•Fö({Í)[8 Ø9)xïaÿV?‘ªôój"à3èˆÚ£ZƒCsq΢1ÉEwÉ·rð‹¨bª@ìÑ&{SN_ $ ·®¯Êâ:Ë¥¹øÏÒ``…À{žgQ…@\Ýþý?ã7TŸÏcòI¿„úžA´˜ÇŽmiß…¹zI Áwºž¾ôÐq'Û +=Ìl8-dðë|É8q¶*+gIÆ2¬&|/<š‡‘Ò§£Z¡Üܽ~ –­ôœÒÚT”Mw‡Þ-µÛžÒ¬!°ãÛÙ -qr`„š2â¬=´žnES(*ió8Ó¡:/5(æy4å"óùâvÁ%ÿÐöúцE7¼^’D+o…ø(º?ÙS¬æáÀ‚wÜÓÓvLÏ=ƒ½ØÁx®µ%x¢±|ËÄ£I„ñLduÕï i†_jÄŠró7†q_*ü'H›Ñ%$ 8‘À"B¾6åé¢ö#ç2Øâq†r¥u©¼¶)§UÙD» ܧð¾W¬0’·y‘éܢϹaYÜcÐ+`¨Õ{Évä:RtšC`¡[`‘ØÜ£Ÿ‘P):Èš[”÷úÒÑR·Òz=,‡’‘úÁ’I%~‡Í1ÖÏN>2P<¶°XæÉQ>sïè«ý _WÏjÕç„d¢RÔ_ø1tE±Hz­#Ìì#ªú%iy‹É9ß^2å¤ÚÒ9 `Ñ(wàt¦Íh="ZDeªd -5IQRËÛ„«!Ï=WAo•X[šæÔsëw¶°U:¢ò‰P2{ýUñ™µJô,èâ×EâŠi]EŽ{Q¿ÝéŽû O– IÇáë9‘ -qµm]„òö‹€]J°¿ÈÞµ_(º¯¥øŸ_Ç -ªèÙî(ÁÏ+ÆGàÙ ˆ<œ$…¶¶ò–‡N®‹»Ù›ŸØééÔã*šVåü€soP ïƒN¥m‹—“ˆ‚j|ã‹ýî†KZQ6Aj2Åíä×{RÂ}m¸Ð©¬Á_CnÑZ¤üÜß;BàÒÞ9Xߣ^ðuFƒ ߣ¦’Iúùs·W~DQUÛîysÖ’â¤5.ûÂÇÌ0h é•GBÅ–3 -C£Ø·…ÓÚø}~ô©O¹ÿÆ:'î0ú:3ÛZÌÊh!A7[x4ôý:ý”ô.FK{A…p àDFocì”°÷ÂêÔ€œ°…’5Å z4‰ M•»t=zKâÀsC›Á¼Õ÷Q1¶8¨GäaðX Ŭ#þà@EEÀïèÅÔ˜S£s÷9Rbô}®Tòû _|ªSîؼl&W›X_°ŒºPCFœüØÕÓE‰„Ÿìì÷÷Ú3v)ÀL ~ìcÀ*·«”õbv‰¹Ê—Æò%TÀŽÞªö½Oöß ÛE5Ÿ’äÚ0Ã;¾§SíÈ[Ãfi˯Ñ2¼SwÒ£ -PL†Eý©iÀ˜ SB´ZØÝ 8¡>*лhÐ()gkwvºØ¨RŒ„ÎêíäÉJmgyÌ`ÝíÛ«M)L“4é‰(ëÂZWäÆwf­v¶ÚCØóVȱùÛØz½9ág¬(]®$ -Ñ(u×µ>²rDW•Á ÁÂÛ^FQõ‘6··Ì­võ[Ѹ^JÑ9Â9Ž­¤õãžhÏû¢ÐPÛe_¨5®‡®d)Û.6.ú=™<”ûX€èæ'“ëŒ¼àƒªZþ)çûÃã/àÎ÷7 È$ ¨^§Z<* ƒ† MÒ:q_ä·×n<#aÝkv¾–r¨QÒ_KfäIdþ+Ö^7€©)W(ÞÄ@qtŽÊd6xµ±ÛÇWI« -£E™-2¯‡€šVåCbU}.©áIÏF* -Z¯L†^U×BSå±kÜ1š`ÕÀåq`“ñ9þv ;MËÄ%{8ŠÆæX¼ÕwƒbO«‰RåÉ.òîpÀœ)ÂïP,_ýe~è‹ýK#@¡´,Ž8KŸå¢<ƒ6Mvh¥¡˜%žE{pœÀô‰0âÑHzwë:"¦ùN¿ò׿̷?ðÀ£ýrJÀŸõkaµTF`ŽóÓCZ*•LIÝ{èâT÷ù&¾)¥öp̪â²|$,†È›÷¥²c !"X?ÒñÎ7%–›çÄÀ[ßòO? iqµò]y^õ¢ð!5}><°nª|W}ÒÛëƦèJ?uÞŠ€Ü¦ {éâÿóÔÞNg[K²ÏË(yhC³vìw] -«e&û~X#¾Û“YTÉþ«›|%®œï…½R0±q»Ê µ+vä”>æ&ÁKIT!i¯!H0‘Ø ŽgSö#a˜«¨kÎi!½GÝ -’±I;¢ëÕ¿ »>>FÜŠø®è¼ Ëó4ú÷@fZÿ†þÌ:”«N6GXãAq¯;çF“ á Á -á¬3wže3I­Á³^_L<´S¥ê7!F^ÙbÖ‡JnfŽÇÑÇ»9“몡?å†; €lQ„'‘0i©¸$›ªæ½ŽNßð(pêƒ8Ü<¢‡\d"Œàm’t‘Úð[[Ôÿþǃ”¼3¢£Õö’Ú|ocp´´c'‡×ë‡ÑRMµg{êts$œ-͵¯ž´ 0=Z­#2{왞(J4C'„¤3­%;§,Â!ØÚ5䎤ò×™½yþäàë”$Ï8gHÌ=vò™f|f)–ØL`œ!-¯ù„ï^³ôÆ~“¬öz Ë'®l×ëõ!9ŸßØ-ò¸wwÿݼ˜o˜®ð1ž]Ö®%3T~ïÈ.$Ù\áÁ<_‹xÊÌfª¹ èTë(ö´6…xû¹Dˆ ž?I¾þ‹âëÕ4g”¨r»£†‚±¸=–öü^OÖv ÆÜI·×€özFöãµEœp·=ð®µE<±è¸®Ú"ìE bd†Çz.Ésj©h …¢º¾O”sç|†Ê{7ˆh Xiv®Ý<ÿŒßvdáRHÒÀ8‚_4D_³ZÔ7‡ú;)*¥ìë  -­ó7m©Ðxbk› èõvðáŠJkïÚ F-©™úŽpãǾҊ™eék0ý{¥tqîtIëV_äƒdVc¿ mþÊq|ö¹]š™ÿçÆDØÚ¯1aÞ•"øq…S°×»É;®!±ÿ«’é½ßÙ^ƒÔ‚HŸ;“’¶3]Î/½sößqðéé¤ÎTÕÔ HòW ƒ¦b}ìO’³µážõtW½À®IÛfr_×áÑ5Zúg8ÜIîR½{FðAÃÒEª×)oTi³§™¯··&+H·fhÂ3¬Í‹7¨¬í~mDsS:ã—õÖy“Ð )P„  {—ó;v²8®OL²âJ´iLÍÜRU°s×'`××ImjÿV±>VræJØ~Æ9â°«"^@©'°HcæúúG`\íˆ|³¹¥ŽØbUúÄM›`7‡3Föß"&\EæxD±’-‹¨T5ö·'À€wÏéG³UC%$ñùttç‹q÷;ÓºFEF{¼Wû0C6z„HÏwÍ%^Äúéäsë*dS ¬õz} $x“öu£+"h4oGM¦+2«ë34 ¬6Ë;¥ççu+lÅuÏàÅ´„×oÒd×ÇÒ4C"B '·¿¾Ó³„( -¥°%×ùœñu’®ÞÃsê }ÓUˆ~ ‡¶†`Ê_qÔN;àË)ñÃTz{ê·QÅÏ®C6ã¤$J§F3×~¢ƒßzJù—8¿ñU¿gjÕsñ57Ø:=qŠjùd‡€f¹)¯Ò"°38&“5SIlj†úk¦:[7gùXH”ø<Öt5á[´•\Çb~©–â×ù,s! g ¹®+Â3\¢b\¯úL3Ku4] ¶¢Œ§ýøê-¨3Å¡wÌ’•êç/¦H‚R¬ÓMÊ{–ÓC„ÛjÈür¸:íyÝɾɬiœõ£Ä…T²u ÑniÙ$ð€é\k§F™‰y*šåö s˜¾C¢+õM ghéšøMgdpÈG^µrî ›ïøŠäÝÌ=””¡Zþ]wŠ¿Ï¡È›×÷åB/ZÜãžpG) ÕQ ºk¾¤l/-ÔZ/´úêv2 öo¢ßö ÛgoC™³M°A!ýŸm‹ÃTL¨t'æduìÙÂĆ*i í¾4df¸t†ØŠßˆ §Zdt‚¤½QÎV[ Ée:ú{‹D¹ç! »ˆÅï•ŽVCNL—-XøÜêлp=þxãG­×}Åz Áð»íìÍÀˆãô Š6¡ÖæŒûÕ°{ûÄWÞ>K)éhÚ4ÞÖñž¡±¨ØËi~N´ƒ]/äºaɧÂ1õž7«€ yyÜ?à è^ï<SBäO²!l‰éÅü -Cgü+f ‚ÓHKBCVó™ÆÜíç2`ª0T,ˆòÄ -Á¤Ä\æãõ²¦ -u~!࣠,4âg¸]ZÝðDŠÀG çétÆ9}>Aéì¿ý?S,? gÈä‹{Ý ]8¯^½æ¹ÚÎÐùM@MZ4~gd -ƒ¥¼ej¦Õ;8ܹÄÃ÷õ—zi”%=n;]'“ò' Ô¹h¼‰xiÎlâ#Ê‹ó«(iB³åïßÙŸíŸuVc-x’¹„_‰¦ªjÅP˜úžA$J~K-Šp E'r ¾éiÈCz+ñ¸;UÒ Ü²ãßòù((9Ô"섃þ,±ôh¨ÏBá½<í8Ý{ô+[ØHTâö88ž ’ -»@L²ÓŠnÁþ÷˜jŽgYZŤ·Ôã_)þáh¿^w:mO8wÒ§ƒQ/Ç¡?â?ùõûû{l\§pÅ •dÎhA©oô3»l¡—–òü2½¥k! 6¢Ósc{{æëO†òäÄÿ-Aף痾†¨¤Ì-ˆKȆ7¬‘Þ„èVäDþ9à}ä‘{¤ŒŽµ‡E)+|R¼QÄ0R¡µÈ¶®¾Æ¾- ÍcÅaI’u¬•èôN•}s…ÁÒX£m¬“»:Âøb†\¹9$س|µ™n·(§†ÿO×2²}/_á=hwDsvºJÉ(º¿àÝ„´çîlo|ª¿ÅΡvG®ÙrŒEõÓnÑ…OLÈÝ3;³ÍžÌˆ'ÏNeXí €"´¥— µÎ<º•­!f'xQž5H1Ê&A²~^‡g%8ÂÉ¢æ©ÞèâSøy‚úhõ 1›ihÝ}Ý*RKXgçµLn熥‡»Â<2ó.óî:_¶í TxdÈÉß@¢í˛لšßF9ó¼À¿õCÿå¨É ´:¶pìzŒv†ß ê]8Q¨øù®: zQ¾þV«Z"5P|’‘ì%Ý+øâA~+æw ©pë1²'Ÿ(­_ÿ%EGüæ’j]¾ù~“¯ -Ym¯²ã¥¯y)øï1œßô[4?bÇð¨RKçv -\t1—v™1‘ kGP)U ¹¢ù9n’ZªË¡¥)ê)7®øs^u#äxb§3¯HÉvóÐÄ8ÖÃ䉀䙢º†ÐPŽ|w%&¨{ƒØôk—·k¶¹œ9h~D-òÒµNeÉôÀvã€0¢Š© Ov>stream -î©æÔÐóTî?´–,›¦è ƒ\jCd~¨´M8Xý;ó; -°!OL˜¸Y·V?ªÅ8)…³¡× h˜«ŸGL¾ƒEÎ ýØ9¶û µzÄ.åI±*æ çA.aƒzVÞ!„>î$Ó²N¤|mvi§¤/I3”|hÊŸØ×_ÿ%M4y5’êsÇS Õ2VOì«E>Wžq‹ÓÉaN^ÄÉÒ"§ŒÓȦܦÂMùÿì<ûN‘‡Uý¦/zr‘t“χ&›êB=Aw÷'­ì´Ú‡ZÌ\òÛ}׈;Ž¦ðI3F\X6„pvæyŸ(ºÚœÒ?ÊvmÓ+©'ÎŽ­€ù-Îë(v´Ñ_©'nA©Ù×gð7¨Üli÷â§]õjØ :ãgý¨®iÇçâkýÔKîì^GüØæe0¡T¯ÑV“îâ”ÆJ‡™ò_jm‡W\.ìý3%ÞhÔž‘[lë¸ÿáI¼UÜ oçÎVÕ>~‚²½õóiä w„‰¥q·Ž%r°k.¥s>]9D@ûï-4/ƒâ GA‘Q—!*±Da¢˜!OÞÜòcˆ’üÓnítCÀ%Ç}賺·¡.Qˆ-û\‡óèúIR]cÆ‹s£Ñ¹Þ«× ÇÈ*Ýn˜åy Åÿ=&<>ûƒskI>rø-´ ”öo¦„XÐ LbTžçæ{ïaönuSã*[ÒzÇd# ›«x,@8žÜ‰µ`£G±º‹ÞËŸq„—“ ü|‚Œ0„æŒYôŠo|ª5¡b¥EV"`Ê4Q=UIß8¼$²~®‘×l_çíùi;úú/&Ô×/Ä„êÌNY´;î©J2ÃYïþÎîK¬ìZ/ÄFÜ×8ò•/–™)4ow ¹T#ßîô›Îô›lùÕDˆ8ÄsŒœ)ƒ†Sš°Äj®u,^Ãvœ QbÏ´ÇíÖR"N1ÿmî„Ćó\þ«{Ø&Ša´ùa_F6*„†,4Uæ©Çí’ÝÃ÷ìGÝ ™ˆ+ Q¦ÓHǬ¥[”Ð'WãÃÉË#°œ}\¦ª‡‡vØ©EüÎÄÍwÆu“PŒÒ‹Gñxøø ¬œH–#b‹bÚÈÝóÎÙ>5¾þ‹ÉôõëøžO#nòÚþëëTN6/>â£‚Þ°Ç qúu¾d)òeô%¢ä!L3G¹§æžDæ'tŒz¯Ñ t2mË6IêÜ1Rä3„Ž -«žWŽ»ÔvýúCNDÕš³W«;·GÌX‘ܨ×àü,ÅÙr>ƒ¶ëÀö«þ{KÃETŒ^¸ qþ»}ÿ‹|~õTFœ -ë:óˆlΓdÚÎزÇͬ¯œ>'v0¢Ä{ó6ôü½”_6Êñ’›ÛDL[^rŸÿ²ÅºœØIp.ŸtN¾þËÛá{‚›s^G‘јS®³ƒ€˜,=1Ž¦TæL Á­æ89ØP"ŽÎT”)—L,[V§]§\:·#>²¹9lãö½U˜Ì€…?‘ݺ8æ\ÊŽÅŠÙ¯;ê`n®H:ZqóˆD_07T¥¨ÓpÌ FŽïöÉG@”þ¯½BvøpHM¬¯ÈC^ݳÒ*ôï4Å¢…˜]qfh¤àFÛ{‰|c Gòúæ‡ÞA… ×š•~láûѬ$a«ÿ|{žŸ‹ ïüeœ¾x—ßóÙTc¨$ÌsäªèH‚²<Ÿfz³dˆÏ»½&ÑL7šHâë·fY(R™,®ïZÃ]ÛðHŸÚ•?ªšcm\8”ÁǼ·áÚÌÀf9Å©r–…Oüµç*G -”š“<½Î/’$tŸ µk…¼¥|ýØFçÜÕOjë¤DÉg…UR} ]Ò.ó2âÂ|zݵ„,ÙB¯è’‚¶N­=¬~ý8ýÕ‹÷‚"ˆ3-³3›ýˆ#äü/ãàU– jD«¨”ÕÃìc€@þUà -ˆÏd°¢É\áü¨A%ü.ÿw -]q^ î°U{NàJaVµO \15»}>ÏYçtuNÔn¥²Hþ¢[¥nÆȤ×öÈ ©Iç‘ÑItî¡­‚O^£p¸!€ÑÌH)Z -à õáãˆ\ÁjóZÊ„÷Ò«å0I«ïc„0t” ê¯PC€ã;œ#ïØ–=c 1t» ÏGÔ&0r¡A3ªÈx&ðá~M[¦>yh -ý€ð¡÷=N•µ-NÊ¥"|—n¯lÞ« A">“‹‚½<öH¼`³„ŠpêdÁÐsk0­81xš^XÁ÷µ®‚×8tŸŠ> ¢ 0~÷Ĺ·±&ÕKíwN°¿ÂjPž«}8‡pòùà -ÔÒå¿—3V=Öqw¬¶äµÓjMÜM˜Úo EÙÜoµÀ éB ®ÔdNeòïÎ"5Ó¶$Í– >ÆŒ-ºIϳfú°?ç¨b°ÙM©ìi±³Z‹w¿Bs_ÙƼŸð<©¥IbX”º0žƒçǦ¬8ÎZãˆ'†-2´ÎK=ªƒBzêT'<‹Œq9¶„ýõìs9_T´÷ô -­á×7TrÎW¦NÙ>‚O>°g÷3,§{í8˜ÙÞ9PDdÀôýAî"…¶ ÐG¸¯p¾k%¥tz?…D´¢t¤“ &rcrEÇ(”¢ÀÖyõQ½T-›Cîtå:hêãz…3ÓsFÏo¢§¦LK.µÎ» º”îÅ:ú²46ˆ Õ s¯©~vÍ܃s>`~y‘Ñ8(7œÆ%ò^S9åœAÉ(P®‘ˆl³2)(<éœ_õ<‘Âxƒ/Ú‹AS¢T'Àh‰éqþ¾4%bg¯bâAßè#2Û:eí1Ë´òT[”¶¬ÇØ&ÍÉÆZDÙ-bQ?©ÆÞoý%)P¶¾&= ÚgpGFp‰X/nÚÄ…66OmŒÜh©/ÒŽÌîêÌ+[Ô¶v´ýˆ•3 ú÷wéG^kôº³3ž1.†‡ÝMp3d®Sæ!‹U9M0ú©såxGss’Oj‚íøÙv¤m… xàˆx&ãÉö-îé²1)wZbG5"}zÿènU{L1é†YÜÎúÙg ¸{T¬³w>ˆb0ŒKý [°êˆ ­ànï3 Æ:äÂFâH¹Uc¨YšÈgíÒ˜ù_ÇIgeuØNÈ}ܯò[’FøiOÖÁ•‘ØÜf"„ë óÃ[V´÷™€ÄÎÒ7¯Æ{¼;N唊ªî¶ÛM‡š'ÙÄη¤ÆÒlE°1vΫÎÓ/fߪ9ÍS;+7 É"í­=ú¾©xßñu`¨÷¯—GÇ]õò!½Á/Ê_~Í¡ÂS¸$up›¸e<ׂŸÑÍÐœgÉÇé‹ö®Þ5dÆrÖDôræ†!à öŸËÎ¥œž19 ÌÜ*ílå«#q}ƨO€ÇkÒŸÑîQjk5‚X:¬Ù !2g+7Ž¼kÝEßrÅïȤ:V5'念‡PlQïê똋Cðr’+ R‹õ±òGv]OíÓý.&¹FÁ@žH)ŽâGÌ Z¶<ðžOÕõçÓÜýõ/fê\ÝP´œ‹aWç‚ÂÉßÏâŽm¡8ž•Š[!(üÔ =D(–šsý+ƒ \Î@S³µtI©1i¡ßbû·Cj(¹†ô;®(j›sÈf&žÞìYe¤Èaz<Ñ°ôI²‹ÞàŒÍvôy ÐY¥¯gý~œ¬¥ðZ7SʵsnÊ.µzcUrBc´Hê³·!9)º`Žpy,NxÛTèùF¿Êìâê$Xó0q}7»ŸHQß&Uº Ûì`3Æõ„Š1ôž—áߢ¸Ïš's”:ÉÄu«ædv zÄ#mî#ö'Ä€yNîÎ ³¢NVO7Ö̪è µíá9sþ¸bµM¡Ì*`úœ×ËRBEŒ†k>[Sl¿8¹}6´ÈÚž›<¨H>[öÍe·?™¸¬í áÑ`FqæŽØ1ï\0>s¦ˆœ!ÙùÂøB7ròüÁp2aÓeÿ›juFت¨¢%$?“ÄRžs(@”›=ÔªÛ†~àÅ:üøáqȸ¥4Ž½™ßN>Îבõ8/0·héI˜œåÖx¤Ìûç™OI‰OŸ`ÀêeEê4èWC‚"~-a<#i“þ(œôœZ¡G(ZÍ%]8¼ªPœžçËÛ«992DõX%x -C:1†ÿËܽ$[Žcé¡æp&PaA€d[]Í"»Ðì/¾a{f„{ÊO5®U¥)MY'ä&‰ÇzüZ’ÖZ.ÂJ¼%õ§G÷‰Ÿ;+æY,}Öh‘=cT¸IÊ0öO¨Ó–pƒVÑS¥Y@íKµƒDü¡óÈë•ž¤ ÕÄ·ŽVj<3¶*e{Èqlõ{èAKÏèôª!´„!ÎðŽ2dmëÁŸ„Ù†œæ•üõHºZwâbþpå÷wbBÒôO ~1¼2uý7©Óu:"Lƒp éD™F%gëC–LŸ]o4,„‘²ÆÉ\†L–5ipbÐQe~sd\Ñôz¼N¿ÚöŽçÓ›`+‰(©A앯ùÍìuåÙ‰þÍiiƒpjÔNYë•ù¬=£z“ß5r/Üã -mG *¶[¾ ›5“œ”‘³ìÄg+¶¤Hð)ŠnŸai(6Ô¦Yò=¾yžâLü—´„êðàÎn´—Ï=¾¾±™|:*r¶þ&m æ^ˆÚ½â -êç¿l@ uEϲ®û» Ýïÿ’Mïwg…=Q=*‚Ÿ#G®Ló2-B¡!¸½CòˆÉŒƒ‹^„¾ÖtM˜j‰‡¦’òKÏZºbÖ®lìå‹CtÌqÉRÜCºgOe®ìƒt….%°LŒåá jÛ$LF­Ã1º¾ì ·“úþ„ÜEâU÷dî8õ@ç]×9ªî®êE2îŒàÒµ.S`õ‹#$$CäzÀÑd3ä±iÊËÏE½E)¹º'ïاúy©ö0 Î\Ð; ÿq™àV"•[ö(sÍÇ~ºözÆ_ødß8Õ?ð¼ØÕâ2Q€Éô Õ¹qà.±’~¼bfY©á6=ç+øÎñâsü½3~ê¿|3>¾À;¸Ípš$*†ƒ uÈ6ëý‘gÛ*‚c{ tÉчڽ¶›ÁðJ'=SóIhyñµ_'’DNoXMɶsT.° -GÐV%ôê£;þü"‘žKKpÞå!KK]çaý·XNgN2×_d–ÞÆç/ßÒÌû/Ä,8[r$¡nÀ=ähµ^Äm¼}¥ª5µéÝl ¤Ú”{óœŠohÚ{ó ¯›q¿o¶Ô3˜Å©oyŽëDúNĈ¶¡i¦ôbR -TkzjäwÍÔì—wž@fØ•;qŒÕÐÌHds¿k_Acƒý†Ò¸‰vÅ©ë‰~.]"\ø+šIo“8Ò£|hGŸå‚s>…Í”lLnjѶ+¶WϵøˆÓä !çuì¢ïž“móUftOõiÖ´žNÙ¶÷;Nr@â£RÀÓ]RÍ£^nC?úß<¯~v-5›~¿›ý™“ÖèQ. w ¥èÿï_»Y×ýíëJ>C²|WtˆkhO ÄŽ5Ö¶>Aî?Ÿò/çF!jÔO¬+ž(ï+âç2ë·Eëà‰¶!øI/bì•-:Þ5€tmSϹ¤ðÊsî[Ý›xƒZl!I½IegšœY.JrIJb®<[6ofœt·ëØCTS¬¡#œÕÙH§ä]`kêápv¸íuì£Ì{Lu™&½7.w*(ÞSŽ–Ðõ 4„Wš~†¸*mOUݬ’\æF -%Xwñ=ùόጽ^»UÛ÷cóBs´ä­L_ -ÞK’_Ê/Í2d仆PhÛÕ<÷f`ìÈØsŠÑÕ\¯êTøReÇÅæO­kz[ ¥Ÿ'Fö™dÆè\Ù¾sÞ–úøa;i„Ưç'Õ°6KK¾ï·„ú7bmN8ð÷Š4Ø;æ/wâßÿÅJ¢F­4B¯û(dˆ`çaaD²‚ØŠA<,²3Ê×é!^x–œ{dŒŠq€×#a Ÿ»o¤s LM”Ë –GíN£­×uF)•ûÃÍ“‰‹®!úÄ Íäfkû̯!&*˜(ž£ÏP#ýé@¿ip;” 4®ìyPõ€[hZ j=CLˆ -Ò•­SEÔÎj.ÞAæë â|¢z¼€+ÁÔ†ú &)/R¦”äðk Y§eOs ^ð©^—âð_vPbâ.´‚ü§fVì…`«Ku>­þ5ö¶îF˜‘AmŒìç×S)éªoàßizÙÇ.û*¶µx(DÏç~@^Ú«Ý3‘œ/€¥úÊkNî`Ǥâ;Ž·pw{ó註‡œ}ý)µÀ6kÈJàˆznç+¥îì9ÅíjqAUNvt³°ï¿¨iüî/éOßVIg¾~´w=øÞušm³ÚÓøï”ßøÀ}½IíÈ·hL¼*½oˆ.õMU±‘~Þ÷",x™;u«òÌ eŽËÄÙÒÅKø1zg„[ò}Ƈ±aÒ¡µN|y(¸„Á «ÀŒ&‡PɆˆ‹ÃH˜]þŠ¸.ãwv˜DòsﻨL ú ªJGýŒn¦2pJm\kØ8Óü¬¶;: -ÿ'¾BÉk×[eE5³sV¸Wö 5„Ih¾<¸µ!`c“™cõóÚ%jPé–¾{“¶O9ŠÞ‘nY3X÷³•»¡ §Xoñð´Xž}äýa#ÍD'٤ײfòù’îÞX4TiÄ{Õ²uºÖ³.Ã?70¦Òg àä@fB¡ßO¿]JNì¡'a³7b=Z•‘xùù/:ü+ö†éùúÙ`ô;IõàwI韉dô=ôB¢=ž¤T›Wê¿>[ù90=ðÌÖ‰¹Öíy´ª/Xe„•Z•vâ¬áæ„ž;ää 弟,úN‰´§©»†È R*æIa;,Á¼úÁ÷ÑYø#Õ;’ÞkéûNÆ„ãéQÏ}þ‘¦2Ï£ã.ä°ó+à"°$¤ºÊGfæ±Íø+O{7=le)0jT²úð‚ÕzKã°Q/×ÕŠÁ͛Ҁ•Usûü°ªJž·Ñ‹ËäÆå]#Ní…hJ䙬£áœ(ÜKmÜœsd©q¼º˜ÅÜãñ -v<Ì´7 HòhGmüç}f“DL:³ÇØ¿é°;?v‡'bÊš)ÿ'iQ“(ÕnIÑDŽ®Öóà`Íòqdf‚™žIƒ0Ïo”M~8Uˆ²@ÐJ?.Þ)w¸ÎJÔ€þF‘à“!à®™ð÷>ôïÿb9€í*(g±S„Áq#ü³…µ…\<°íiñÊÖ éb ë|g§k³rÖÞ¬d¾Ïú•0­™R;sÎzRÊvŸ3Pß“.‚÷4ö1C§ -Ò¾þïº× ü -òäu3FdÄ )"¬O?ý[ñîŸíûP; rºžZè %˜¾Ol7U‰TÅA}ðNnÐû$Ü{ɹ|Ç{CV¯‰w¹Î­ìObNëíªs+²kñéÜC0¿ý Ãß¿ú?÷ ¸b^={ËÓoœàQ¾Å5Âæt´ƒ“'”`ÍúóÚCž˜ª½cNGÇȵòæ§Á`®À¦kꆱH‡ýç4áY‰¯poŽâEŽýW9™O1Žl 玦.­›+ˆ|äªØ?³â2­ŒX†êë:ôW£%Ö(5„n ÂCŒa¶H#žÚ’–m ×GrÓèƺΠ®Äú´ZÈDó:'4Çíõæ'£ÃêªȃŠ=@E  F\ú*¦xÃx׫ĢåV±æ@*–Cl ¯r)Uæ¦ßèkÒ}¹Žw}Îï-ý’®P–G|‰#r ´jÕGyÕ R~ŒwAO{5­ \*ÛTÒøùIeå÷±N˜3Ä^Éy+ÅÍôëe©fÜȧ„Á_e‘Màáé$øgKþ5Uv¹¦ -ªZb!2ñ—}¸ïššóôeª9¯ èÏ÷‰,»ò›1ñE£Qð<›Æ$m[$Ä™¸YÊÍìà c›¥„ ‘IÎëÀk!íýÎú>ë8$yCå;·"èžþ8Äû ¹û:£À]¾æCoóè{<Ÿu2|g"FëI×ÊPŽ¥…§NáN1fÍ -,=jŸÞrlé—2:‰²ÙU;<4õúõkY0Iz¢`töT®MšWĺôÓýKîij’ê â³þäÑ÷^Ǩ!6@ò&ž¤†hóAÙúZ}N6íPײþw#F›mÝIm?6íB“ú*¢äHçøL•d-±?® "a´ÉÐ*k'î^ÓÎñ-‰_)Ç'ѧ v÷èu'jª½çO@KVÚ« Lè×fΠŠ*o;ÜA!w$êæ­!o(+grѹ#´ ï.³D€°JyDòƒ<÷uàìÖÎ{°ûº•yškéäòô2bŽù12ä•‚}¯­Ó¾³æ”è\”ûÿ•1ƒÁd“>B2Dwúë?× ŸKkªêý™~ aÚßrå©d¡Óø/¼ÑÐïÚH¯M*3QˆAÜê[sçé‘#yаïzÉòU¤&v¥þÖñðDˆ_ÖËÊo|ß±CW–ÊЋ,û}k§Ï7^/ÂbÖŒzõ ô¿Âxãä/¨xëßg¾öjÄ÷…mXšÕ3AOúç0@Ÿf ö´ì­ßXÿ?°õ0<+ ý;;w ËÌ“gÕŠˆÌùu˜I-O¨˜Ø8ÄÿzAGXÂŽD< -îÀïý‰½ôÊDèë:6ŠI8•ÀçSeàÙ¢Y’.¥CÜ[|ƒO½7§U ÆQ -ÿK( #ÃÇ›QÒ®Ò![{\á…J³ë»“áصž7Þ}‰79µóŽ†Íz{sïIªvxaòÊ4k¶¾€NIÐý½8‰Ö.O¥µÎçSÉ1«Ýíšñ³V;/D½ûW £Cu’•/ÛwW@‰GA`pÝè²Öí z&E|³ -06µ×7 » +Â^ûªßÛá2^›lËf}ºSôÈ5p6aíÉÊY¿øMrr†C`\›ð}jE{žœMt}W¢@¼êAúê³J ±9KÜŸÕUÒ–'„å{U©F`Æ£’±m†[É~¨Ý•Û_/uÖ¤-ѧUºžï"4âÇ9y+$mѧÃg‘º3î[ý{’â¼vÚ"‚ƒ†y °ži÷òtÜÉû¸d±Ù’šoW©ÐŠu'ɽ÷Á°zÕœïO2¦ ½>&æ–éÊw¡dó ®A’¶]"?mOwTIéºç+%11-¯ÝßyY£Í°|) H]µW̹vŒ¬OuZbzøQÅ0˜c¤(º\÷r^ óÝ!%BûtV%aZ9$³äëû†¯ÐG=DäZγ¯ç†x;Œ5Du"Št€zëÙ ­1ªéþ"laazf¦÷7xã'yÞëï&X½×¬àçÅ‘5êó#1Bxôp .³z”WOA{(Öˉʳ¦p·Áð9íòæ¦ -e˵4é¡cדŠÍZ ˆG0T¡“¯üaä<¡Ûüw ¯#P¦:Â<š³LOÖã ùom·¦p ŦTWz­ô‘ -y«g o·&‰½&Þ*C„žòô2³á.œ¬A"&²f£jáÝ‘õ^ßšç„6”¢Ÿ©rò©mªlÆá0Aèç—ÎÒ­F ü±c :hF·u¤ÖY(ŽH#Á¡&ðƒõ Ã~·B:A3¾î˜wµIokoôÉš ýǬ“ƒg -…@·øJô2Ïž*éP ¨¯|ÃîßZÓ×7ŽÃÿà¶olëØ–¿ÁÚ:6vÅ'&2¨Ð”>'‘n¯k* U§G9X]c~=Ç7Ï|‘§‡AÿŒ­–â9# Ÿš„OàÇpkêŒPTžQƒFÜâ(ÑõŒ¼ý'i‰šåÔRmeßøFè -Àú“pÒ•$öÒayÓ$ÿý|„ú -x¹W*Xwá¡9³ÅæVñf¬Ž{‹I¯Ì|wôQu¸BÀœe2ØJÂõ]ÁÚÑ·"EIïVçx[ó9«ç¶³kÊÔ<¹grBƒŸû2XÑovœ·<ƒÜ_;Ó¬UÞpÇŒx{(‰¥ñ –K0¦>·5–ºá]’riPº©4(Ó>¿N™“Ö(ù²6¤Íؙ̛”™?ZÉI+Pµ zo»‚Ê«“‘º2…‰ÓaG¾¬ñfP”ÄÿiݥȜüÙ;½}U¥­‚°ù0÷X;\ûTä{ug¯ -ø=A@ª+îÖÈJ£z@úOÜݲKP{åy8ßÝ8ÓcÃ¥ù¡g,³!ÀW5Ö „©ë[b{¼·€ÖëbúV#nŠN”ı¹FÎögûäk ¥AcM­,Ü[@°3–~;÷J¨Xw”~㈒»öQðø˶öï‚&ì¿ÛÊN"çk&D³ºÇŠv­ n ûaã‚4•}õˆ¤äÚ~Hnxá -’ÇÞŸs2ÎòÿåþLQ€jŒ9vÚ{?Ñ{ Œ´÷yÊS=è– Éh¬e¤áÒŒ+-æzdb÷Q@ûÿŒ+ãÞç+§õF²ÏwjØœ -¹Ê|ôÓ$€”Xßüx{ùÄÿRKù»»Õïÿâ#ý¾ögŠzïZ+÷PàØ[ï°»# Ÿ©ÆŸ;¡€øh‡²\u¹lÈü¾UÕ»þHW£aŒ]‘Æ· -ÉO,r½Û­OÌÝÔHœ5dÆÜÐg¶Oª6ÙÇG(GAŽ´¨–h+áà ‹¦iŠ‘š³¤‘Œ{?•Ê‹_E‰ÏN^_𭧬cgB÷ËŽÄšÔôg¥@Z‘jD€zw!’œÄZ„µ‘¥êH6…Ã)ÙžïÔ*æ#[U~z·2½™¿2Óõì GF}£NÔ#¢ATX•þ“Ïïÿ£…ßÂf2FáŸ-™ª°L #æ­¢üˆ‹}ÚÕ©4¾ë¸’TdÁ½•ÍNpíç*Ì›öÍ57Ð"•¸ÛYr͸¥2|ÕLïþ9aUyè·†˜~ʘ™5ä%·ß%¥÷¹o5vR½m?ùÐHgµìu™•ý¼%iÞm¥Zv•íH`ê~íd8^ÙÃãéûNO¸…”þê24Ô>Õˆ¨wcr€òÿ€ñ(9 -Gá{>brës»?6Úó'19ÛYöq»ÌŽC® 4âÍ}ÊXAc²¥K »íÔû^XößßJùÆZþ3ÛÆ”¡vRJôŠö)xžâ€7%zp‘u°³ÑòHâÇïoP3ªÓêYwI¤À=¡5ë9_omP˜¯¥ªsìBÏIZ>ÏÛ¡¹†è€± rÄ÷.‘t+SûÈ5³å•›|­Cñ ÖÖ¼5d„-Å¡|Ϧ!4GéK"?É’ùQ ¡fö1ÊçÎ `Gßѯ½{Ÿ©­Ç#»¶BÄçr_"o#ú£ÜßÛtÿ{xÕâQe½µ;ÒKoKS´Æçyë<ñrW¿f–Ã+ L[`²s/Ósƒ/x[ON]wC¢Ø¹<­­‘ -ùÿ«q$‚ÜÊ©Œ;–®GŸ‘Ë0ÕVj½#ªeÄ)#Ñw·Œˆ[Á•þPÛ#(¸P ÓN¬@ë¬YÒ÷ Ò˜½µˆà!{¬eC‡gÿ`€€;¼÷‡¾Í„]´JàæÒ2Pbµ: -w-B³3ÏôŠÉD²ë«áÖÂ^Nø¾_ß•^–¹ö­ž€YŸ7B>/]†Zà}_¥¥“}F{¾ ¯ï°znõï—J+Ũ×R4,9ۑよÇï½,Œð î¾Åtd넶ÊÜè«°„0lá¾þã%øm3gôM\ð  - ä“™û"K¢ˆq+›Yñ¤Õ¯y(°¡=J—êºÐÂúF\Ž©—0ª€–™-nT$ü*J%"•`ǃ¬sÔ­z:x*°W¦T ‚o#’ìn¥—×bŸŽ)߯HrÓU}ÿùT,íåM0=ªo¦q¡8˜AVð†ãˆ¨Zw¢ÈÓgÚdžМzÙgíê i'¨Ì -Úƒƒx\ôy°åú¾ÕY~¿ú1³ÌÖ7Œá¯!á¶]mK£¿ 6¤RëYÃtõTkö¬#ÏÆëܵ1¹qÉœõvV@hË;öeNw¢ÊýL¡ózwV¿ÅzÕRé¨Óôúö5Uæ9Û´zîÆD]ϽT§„@+$l®Q·ê±gÒ'=K~IÏ%d/‰ÃúÅ`0jî ‘q!ºö€÷ŸDQ¬Ïì”6Zl*®cÖ ‘2 ·Äížçªø7ÐÚ¼Š^°Š¶† ˜›0×¾|…+²kw{J*ç -xz-ˆFb)×aªØj‹gsK *RÐë‹žÁ¤¾Ž_QFщ®¯o,«ú0RøÃâ+Å)hHÒ¾G™BY™ ²ÒÊHtö$s|"Žê/ç_ò ûÁùY¢üeþëJšâ÷¯äϼ’Hl“ÑèõJÖÔÔ^p"øŠô¥·DÔ2C(6(M­ÍøÊ[K§–`[@ñ¹ -%ÅÞÇžSÍd^[DÙ×Üäz±†n†!Ì]Ù˜]…ås\-Ýù·5CS‘ž‰¢Þ”ÏКäŸåHŠ; úW/é³<âföp…”„;•#¼i¶ üÄÃ+²˜=½Ä_^zxåÚ­ÕJ‘¸ÙYþü¾¡Ïfû|»Â`J1Ôÿ[Ó·N”ÕOŸ>¤élµì$ïHgk½o;ó‹i;ûØßÀ“¼þ]ÌïÈÂø§æ_Dÿ¢v22§ì@­Ä‡i£~æþ«²¦2s¤¨[èùlÙ§Z¾ëŠSî°³O!·#-Ÿ‰àìó%뿨Ž”—d„î#È”îÍù‘wÃkê>ÒË}Cݱi5Na=בÖuœŠ“Þ(wC½±7bXÐ -ÿ郿)ÄQÐo2µÍä5÷ä¼>#Ô€›ÙáÐC«õ`žû¤yd;©6ßõ2M‹F‡Éuç'½:(€Ï­ºJXwgZá[²?©Œ!,¾%™oð5DT7ô©£€ -:¡}Ÿ! ÅêÞ'ÊÍ^¼Ù窟Lßÿ¹)fõ‚~ÄubD/楴 ¬DU)SÌÉmŒ²YÞLK{UQ¡$âxò&zƒŒÍ»9Rô ž€0XJ£Ì~dJ œ¿aµ:®Öõµ×³¡*JÓ[À‘—öÆK²–³Ÿ¢ß#¥zGßvô–Òm!kÊ#¶]s-dsØ&~²Y©Dꚟ{ÛwZ“ãáZl|3øIú»nuå‡fVòÈç't"½hÒæä—¶ô)!®½ZÄëèëý~÷ë#ú€ŽCa«i…?b~n×Ç°”6ø ”žSȉw){Ò°·éšÑµ½Hм ,û?Aôš±Ö‹[# ETD¦õwîtáý HêÜéú#8/çïÜ^@j¢ ä¹@:­(¦g5„®ÈíC¡¸’L£û’ᄃ³QÝÍÏwâ$ð1-ßIŒL5_˜n¤ ¥jÑâ -8v/ç9ãaKUíÌêI²Wî0:;h׋äV+ƒŠí‘-ÙæjBV+ðô¡ºê#‘™ýØ©w¦Ô§¼º†¸>îu9~pWàÜíh [ R0×´R+Ù€_±®¼fx>½·D“ÏtùÖeÔò׫1âqæX¥×‡\D9Xñ8FM>Hér®©¸»³'öb஺ 4'õ"S# œc)­Ü~}?s¾¿=‘ -Jé}B/;¤ãH~ùycŠB»ìÓÈY}¶\GTªNu]Ê>Mª"Üù)ÁPÅm¥Ê‡Yý±W‘£– zGïY¾anÍŠ›uÙU‹åˆ€[ÌAúc3Þ©øŠ_™UdÉu7Ê”yån@´jne6›"Ñ”a©1‘¢%ôO½`}\3ØÞ -;¿6…V‚$ôloEC4,üâÆ—4Cˆs«úõyÕÊ‘E׉%É]Ú'ªTv|¯uÐÞx³“«kÒ¹€^qníŒgTÒ_žºù”´˜„Mš© -A_ÿÈ»Þã}*ú=˜‘”¶ÖÙ©OU./ôŒÐÍzðÅÃÈú5€h<¾bôlyrMÜ”¯ -.Ḿ©mž%C5å”çïµÿ8¡ÞøïPó)WkóŒîcÍ<$y'‡ŒmC‡EY娘Wz¬^v†e¿¢GJWŽ`܉­¬ù(g»hoñ_?ør ¾±nÒéÛ®äf—Y˜gBî×i&»üÑt8ü©Öy7oüa-he¶Ï˜¤„oŽ]~ @t0_ASÚ=‰ÆÞUÇ.S–%ÂíÎL|0•Í¿ÔWÐÔE8ªžƒ{BÁ¿s0ûŒDßÝ@¢2,®Œ©ÒÌ©+!;›ÆLGœn;ç¨\‡bqÈgöf]µ¶ë,¶z¦5#ˆ\»3¿®v!…DäÚË;“ÞǪóÙgBßØ]@‚Q&”š!6<Þ‡tТ<@´† ˜ã¨!q´ À°'9j&0Ìu0%}ÆužèÎ¥EAûkÛÃÜÿk¥¤òôß@éŸ âBs(àñgW$ -ƒî -)ðl%ZÅ|Xï*Å€Û$g+ºÙª´·‡H–!V%»vÜ_Q÷ŽQ -†Í(©º ‚(ÝL”ï¯RŒ¶u‡`šúÿVžoÑ„O]†Zç‘P,‘ÉŒm>Ós' ¢?ÔÊ4Å®x f…ÿ£ô®U³¾Á%R–·YêkG"¢³ ø·Ýæ&ÎCö>¬d¢±®G­»ÄÝ3|±T[å‹Ìwcð¹oÑÜ–þ¥<7IúÚÛRY—¹H_bjÑú½=2¤ò3„GÔ5cGÓ–z¨à{½ ˆfN$ªxVKi£3%&ÈÖö²ç)µEïÝiÞãÉܢ͕;“®€°øþNÖyÙ'¨¶”¥Ä,Ç£„/È:˜`õóÓôI\&$˜©1éÆŠÊVtù§Ø-wÑ¢hõg›y£×|Ïå³ËDçÆàK+û;Mc¥^n§›»>,­‘*AQ] ·LºføÚqEÁëfUt„ûÜÂIÁZÛ] >R‰}£À­JZéØAµ“gwé„*!=Ð{cóâVg"ßÐõŽjè/ÙÉÕÏÊyý4¯géD¼ŽÏP&•‚F QѨ:Ï~y=²w]·’ÕwGn´ í/#ßQP؈‰vœH1ß³*­53MUB’tg´&Ä¢'E3àÏÆ,fÄ#B±ÑjU>±Yÿ9v®ú‚ÁjÄÄ0ÈÚV¹KHβž8à|D–3èÕ6DÉŸÚ,ErLˆpG _FŽŠܶïz^'¬>ð± Ö"š -KÄ.Ö|M ï_¯!šÓ¡4ßiç Xµ&éE˜i[¦¹Y<7Àuýĸ—Û%€¬Ù„°æ&DFÕPA -%Yq6ÙÔìæ‘¢$"Jó?Êýg(hÅ¡7Žt à€nsd§Ï˜ƒ#£HªU]!¾2ïP÷{…T54ÚïhðKÑÞ$ ›NP+CÚñÑv>¬–ÊCDFNO³›ƒÂná‰(_QW¦lýî°åW–¦l©±‡sá½AÒÊYwDñc½áûÚ×ÙÆ]ÂY÷bÁ†ÿ…},gŠj;¼ŽNÌ0ªz ]. ÝÅ ©s´+Ú€´lÎ’ø©„1~êÿ}L -¥\&"^gŸªjè§VÌ]…RmU¿“§©Céºû÷²àvF¼Ô»NS‹8ÚnÙÆm"H“z#q|ƒe–>E¯þÚ$]–)÷hu'ËIí£§(Öÿ(YɧL²R#Un/©}çÇ<Ž7,þªÆû»‚*oît†¥©«Ù[d •%J¦¨U«vǬ—îÜ]C”s0BÉh6©uRË»íZ*ú‹ 슭dJ^Ž4:£WUA±\£?¿ÌBÁµ3Þt¨ÖÖÞÂyy“5)#ë;›ž>KOÎÄÒXžrnÏçó¡ÈprÇô®AœË {pûlžbIJTÞÞútÁ•^ -Ôo†ÄœhKú–qÕVSê ¦ÜÈnÿ´øxæ|Z{1á3àzO93©¥¿oköÂgßšîª%kbá&´]—£–έöu&ö̺B™ •Ô±,)Ú뢟5Dö)ìiܨjYÞ5Y]ô³¢{ªšG~¯ºY£yŸ4K‚ësˆYUËå.ÛJ“1Z¶þzHPÒSÂâQ{ÏHÌYû†©´û)gK4‹æ/§“.ù¤Ú¥z;ôq6å4΢ò¢‹ÞG鬪z#„ÓŽ»¶ÅÚE>zu7¯¢*iÇeg9&ô‡¡6‡†ñZ Õ1 _’¦K$¡×KÈdºnÉPnE¾ÇÖ<*vñ„ÔiÄŒÔkžÃ[TX¾â>A @AQ·)·j)]˳®Ê™Ù>!b¾Z3MQ—Y¹¾a‹ÓÆW|Î'>V?Õ#†/pJ\вԭðEÖhƒ{*‰™Ê8¦~òIŒTçø˜ê1ÍR9£÷üåÝ„{Kšuæ“ÜÌŽêÛGHÿfoûN„4Ž°[®¢ }P!<¢Âäƒ*Ú·Z»È:`4ÄŽm¸’nl¥Àys#V"¨¯j<ÛX†Ã×M‡j呤àšRÇ,GìØ9B•SÐñD­ºƒšÙW¬›O ^ÒÿÇÿRõÑÕâ×=Ÿt’¢kh¯/÷Ÿk;[GŒBˆ#UÅEÓ£_ù=é‡ÚÌÆÇ LÍïRS§H¥kÈó:·µÉÚAlHI]k¹"%x°¥Ù E@ Žôº|4l8ùT7~bi]CØ·B>™Zª—°H¤å àýÌtUC¨¼*½a2>óí¬ªÆZgµåkÑD|s¦Åw7#v=£ 3šNÍ×S½|Kì7Û~™f½D_Uœ‘‚«â€bõ·#oN…¯’“Âùø€¶¶³›H…uŸT j<‚d.¯¸ŒëQ2[éõ‹5T#žý掀jlBU~‹CÉÓðJ€”˜1ÃùÊìòÁZ?ï–8R¦"ÇuÜUé3³.±ÉUWî÷įû õ*¿†ó“cd3~R‹npcÞÎý)É6X¸Õv3<ÄÛõLÞ¢z\Ÿá"}t§,6s2QuFmÏå™h™Æ¥–î$HÁ|o£ÏV°"Ú'¥ÕFEæö“>ØŽØ„Ä\GÅóJwž•t¯íúÂî±:{xÓIÇÓê2¹w -î‰à½b†“µ?)qó¥hŸ<žðø?Ò5äÍ©¾‚výô]’ =3`Œ -¨´4z¦¾Â³ª@l»"A¥¼•›^“+„Ø®LÁS8ê¶}RÝûؤ K{k” lèb=ÂÓOÿ›BíHD‹¢ÿüœ6†&Kôˆ‚ -‰h >Ë,ÂçŒQÈ¥Q³cÑÆ«ŒÞ[˜$9ç ‚€Ô>¹•>2ª9ãg£þ@yDI?AoT èâžwÅ£jÁgª6AùX—\ˆSøBªwtAÞ”AΨ*yÕŽo ÙÅ¢7Ï‹®òp¢jžÈõáî4TVPqµç(«x+’âÈ3¹GôOéÆ]Xç6çsíJÓoRÖA:ÂýÂ9’ko´xw› Å¥ç&1 -4 c„7ôÕôc"µÜ~’Hþ•ô÷1Rkt«§`sÈP»\Ë Åè WÁr£-éîw-$d%§G†ª×)ƒ›¼{9R HgàÝÕñ¨ÔU؃„#uã;P€âãˆÐ’éNUô³ÕŸqd:ã©œf'é×Ö>H¬I0® |ÖHéUº|°D:Ù_|uÆi1j[¿‚˽Wu¹$ƒÉ™ë€W¦¤rò㎈ê‹sQÇ¡+€ õ…H{‰§ìc~ϹóBgAÈ:ǾKµÆFâ;g{]fÚÁ­Î–“½\Pf‰)F‰>QçÁq¶u•ùÞioyË°­\u6Å5ƒî¦Zñiï”ÇÛoI­A ª·W++u9¶7wÎRÊÍž÷¹ãrpqŽ/Ô:uÖsë(O¢ƒÕëð¥´ä‚‘‹Ì0‰™QãÖÆ ºeH¯.Z¨MûоßT(\í[Ýá=u è´§!Ž}>ª(-Óiy*€˜ÀÊXæê£0ü¤DõgȆ¬Ûë!ÄÎ]47Š Ö¡Ból§U€>ÙäˆÝLήԤžÅ¾¤Â\²[Œ‹G8VŒ-)— xÜÏÐÚk=I½ýSÌš^ÑíôÇš¾·BðÅÊmw0{7;æ^öí©xY3‘c¯É©r+1U>RÞˆÑaª¶‘û«&®Ö„Sçb -¤ImßÜõuiöœ¥{–®åˆ»UÎãÔ©[:¢¬}’Þv °à…\f- 1Ð-õÜLåA-ŸÓÆî‰_¡_ÓR2y”mµ~¼²ýÂwW -^õS¡J«­ÓyÉ(=‚+Cè ê™ÌÚøtù¸ÕؼٳùÒšk­ôåø'œÑ ÁÝ{÷œ0©gÒúO»þ;Dþÿ%Ò­%W=æÞÖÈKi£±û 5d–$p;âfïxèL¼õTJ8`V‚ê¿¡Šç'7ÑÁûˆV·¢MPG†ôU ÁípHå€è8ÂDƒ0-0zó¡à§žðnMÒùV_fŸÒ{?#£uðUüªÝÂS> -ß8 ?’ѳô8Ú“õ^ä·S8º.‰ZÉÌÁNÆ‹%·Ç9îÈ?Îç^ ±_x‡ÿì.kgÿH"þ+†®üEñíõ˜€ümì5G­ì–G!›ŽòpÅÚk€*ܼJ±XèéÞáyôHöòº½b:H;c}Ô¤pO´Q|åß»s~˜Ì³d‡Žðó²q^ØDw‚—‘œ}ª<Ñû\£‘“R‡øG’ä|©^иs™ X˜ÌþB˜ºc€c«¿î ÎRœ)D€è.,q ytRéÜ`Bü'þ½ªÝ÷þÅ<î5v†°_s"t³ÂKYl=L±ÇEèã&õ’f éE1ü¬;IeŽõ@5$W×ö+•Oáä…ᜧž±"à˜n¶J¯Ô7«À«…–®Â–‚Ƶºfš­ç¾•N9 ¶’0}·–O !xq§l·J9äÓyj:g¬ýZ¹ì;rd°‹t‰A°ÜŒgðO6¸Ÿ…«Ô)FäɯxÄ®·p^ 'À襕kˆ$³s äg‡ôÈÒt ûGÉ kîë˜GÐA_b€ ã8ð³ØC’³–Iƒ_!ß:ªð}¿@´ˆ=ÑGS¯Ç®‡cKù…©EÅ'Å€kËJx÷a5Ά>3‹¹Ô%E°¡“›žP"!Bõ©^l»iaïθâ\aªw`Ë&ÁU3£ÎHLL=èB•!Ò&Ρ§N¢º˜ X&msvÀóÄq ³iå6O 0° ¼kÈ}Â/#*nWL´ ºÒçNŒÖí?%Ž|ªßÃ|aŇÎ,Ñ¢0ëœw$SW`×fpÉñú„±ç¡ûÄ&Z<‚ìÛ`L8|Ouá?&/D{y°Á6AÖb Òî)D±¾?¯"Ä‘á>S×Ap®>¬Þ`4pèéùRVC‹ÚôQ¤M]48 å¨Ó¤9ŸPÕÎÊiºÌUSïŽ[ß×7¶ó+ŠEŽ$‡Ìƒæ‹±ÒÃŽ‘+Ò#ðxî£éLÒn_ʱó“üBûI½]¡E¶üw©AAËØ–ìàìÑRF¹·¬ßÈYÁ…-RAsô2Ä"š•-Ô;u_ÒÙϧ;0ƒ_GYHAùÞâ‹Ï,Œ ôåPs·]ÉtüëBPš® -­‰á^Uû’á3Äö½6é*×½gL%?½òð¼ã"­LÉòíÎrË~ÆãA4ó81Rç[`&á©:ï -ãTí,ŽXÑSÄdqN™ 4ìJ;ù«'¿Énþšò3ª{×yWÌ7¸µÒ=õÅÃçLw×9¨‘éú¾Ï ~f¨9VdjTNd›ÆZ¦Ç–c rôùbB&V,cÛ„yÑ9™²TÙܼ- ´„ÂÕ%ÃÇáíˆÖ· ªýÄ‹hÿB]-¤ú¿p|3wÿY½…²d°TµÁ+V;8îì -¢*5^wA&ë±qTl–Aªb•[—¡ÂmkQ ú߉ÎfŸ2âÌ1ÐIç]Ñãe‡*ÀX;k ¡â¬îulé?ÉîñYã=²GýŸ½ä½Ñ¿omŸZØöÍgJÝ[ã, ¨­ü`ux…©ñƒìÅy-ÿdØŠø’«OÅࢰÀVi¢‰S ˜«b˜@Eõ‰Îû½¨©*gkRr¿¸£^öÛ²›g7ìc溇¢9´ðÁ-ɲ6· °ïÝêJUð_7È@Y~Á´ÿ#ùêî&¹¶Õ i檜 ¹^~`ŸçN‹­§õ«±z’¶ Wª‰ãÍÈñ(I>Ä*?U…#q|ùVU!ì#¨ƒOñ $¢Cµ»4ñÊ( TnK4œå?¥a%'±?’·ÊS°‡1Ú˜ªsàŸÑ^þßyò„MÞ[¢1ªŸí”ZOª2J¯W:žë3ä åJ)ÀËUFØ™Ñõþ¯!<Ú?r+dƒÇ¶QY:{¶rík½0½°` ìçýÅwøƧú°ê©$üŽ(öšR=Š˜l†<ñ¦hfÐbìÐÎ`tK_×ôÓõ¯çí9þ]-ö[3JÊ#£4»/J©Xs;#8¼Kò•÷FDÔFáèßý•!W™¶~§£þÑì ×Ï~K‡Œ>¬Ý©)t‚2-wË+«›²ïìIõ/þfÏßQŒ¥Ùóû2óŸ¤î¦½+RÕ -Œ£ñwô¶{¤Ê¡§ýSß|Ý è¥ÊÌzT+n=¹Ÿ]¼,± õßJí¯×AÐþ­3dC_±µ4èh˜Dûüš[ã‡($Uz¶S$®]¿| P”|zëŸ*=gIÑù™êï«Sx¦ï—:}‹øí·†±a=Õû½:ýÿˆUõ Ž­‡©ýµ™SY“ÓPó¡@'?ZTSÕïűUÿŽ;¬ô ¼>ã²à€ó•EÇ÷¬ér¡ m`2Ù®µ†×ñŽõ{$0e6€Txƒt %É請½9!i[0„°Ž¼ù‡ú´Ü6Üøô&ÿ‚úM™ó§eöwYžÅ÷+¦MT¥R áx=WLÐÃnVŒ%SŠ2»ÐkqEÔúAÊ{J†˜+©G® Ž@lr£û,(R íJçÇÖ¡1'BVÝA¼ž~ -¤Î }t‡P< -óĘ7â:–Úç§\Q  Ø t ˜N1b?’öà¨Óæ¹Ñ ‘…Mau= ½„`¥VYs„T¨2¬÷0Ñ¥¢JÝ¿¾ñööÖ‰Î3Ž0 gTS&^lüMÓ2Ÿýî°h8Ê=º µ:óà<ÿ5²Î>ù“öRvÎÔ}ÊôD³Q…§Á’‡—ÚSj†h Ã/4«#/c¼ÂV:å#œ3ä-"„ZÖ=F -ÐÝü^M öAá并»ëÜŒ´lŒ)¿ˆ®-ÑG‘¥'¯ÓñŽu– -Ѐ†ózOJç§=5°²ÔI'˜RÔ—Z•[5€ó!]fcŽ"ê8|3ƒÎ[¾Ðø“¶Ð®gä²XF ¡†ÊqÊ£Þ“ü­Ä0%±€„Óèk@yê‡÷ -»Ž´y [¡¢|0wu>˜¤ZG+eòÈzG~ïw×ùÈ~k¤µRéö‹ipy[Ñ‚9‘D¯âÏ“G›R¦îtÕ.ÿß3âÓˆayƒŠÚ+`'eGñ— ƒ¿'+q,cÐHü+g_l7ž»jlɾÿ—æLô!ÿ2¨&®@>¿vë…Œäóç½®±‚' áq^ÙèCûR³RÑgýCuÅ6© KZRAÛL v ”D¢-b‘q{Z)€¥Hµöt4PÈâY¶44ÿ&%T£Y V†šBâñ$¯6wf>±Ò>Þȹõúé:¿~ï¤ùKÏb/Ér%m‰é¿±j?» ®0ÑcUÜÚ]ž`âÐß^JžŒ  çU®¨·Ò ê¤jÒ8ÍúE›÷¯ºdÙ]~ûJþÌ+%'¦`]¯öÂ3 ÏHÊ4Ëwžµ¢KP(@ç,^{ÇêñݹBœ< ¼>_ùP9Bð®AìΠP4jæJL€Ëi`î!HFëSßy : ýc›:àw®+r8u*vO„_q>û÷^VH{¢Ž^™Jßfɇ1þ8^°AfÏþ¸E›bŽL»äòŠp“[ͥ˜ŽJt¨2¿k‹jGÇ1”í\Š*÷ÍH,IIß“ûÆáÑ(½÷†ICŒÖ;žk­qùT¤Aϸªø% FaÛóöÊë¿S¬½€ú"®Ô>³¿Aœûµ×ØÁ‹ -Ÿ¢K/ŠfqÈâÄÔ¡^žà€„Úñ!ÊcqÂÒRŸùS´¨P?íGL+ëöOôÎ}„‡Ðë:4#ØÓ-­¯M!0NƒÉÖOÃAöb-~åòï,üÿf‡˜ëˆ’ZqcJƒÖ×R"Pk%®£µÿ̦§q¿Öª¹´×#ÿ¹a¬Lh ™„ÅI<˜ÌÎ:+h¼“¥ %ÕŸKE‹Õd ~E˜Ì¶ì7þ;.’¨òùu{ -¯óÁá¥Õ8èqûê2¿$5Û¸£ñ-¾ŸßØ»T)šJkrWÍ ;ÃJ½®sÚ ­#oSC”û{Ç•í(úAg¶g:@d5TK2†ä$Á¦â"€`¤D‘œ+ÜÈÏhñu%0’йˆR ¥>Ñ¢N‚$wE0 Ê‘ÿ|ï Ñn(‡³°ž×uÞHN¼ÁJÆŸFcyøSJw}sm3ѯårC*„Tç3 Ü¿— ÝŒŒóÞ zªLŒ/3rU 2ñG´8ê¬ÔŽn zRËCQUYV2ÝTX¹®UwB -3=Ú„'‡n?ÑK±è×½¯8tÜškÑäåœ:rxx*ûhqÑ\+áÔIïO]%Zˆ#0ÁÜ*&c”«×6Ÿßã ‘J£Æ¬ë”b,JôLêVPÚXD?ÈA4˜=GVñÞï¯QQÓ…)xGôY°ÈäBÈF›éQ¹Ž`IÎ~”žQÄ:_ˆ˜2¡»Ô)#ÇCÝðœÁææVZè3®3YóB5mB:².á^¿™>&à›´ãX©^ŽE©yˆxã­iœ#Û‚%¤ãV "ˆ89³ À2!$À•·ó„±K±xñc [nÈɳ7/CfTžì½o½@ÚÄ4ÀÀîö- -w24Ü™%>"Ñku>ñmk#Hü³>ç…ìzX¿³öâÍ|´òßÏ­+Ž#.¥5(2$ô’Ug !H¨£Tœ´'ºzÒ$/°Ç /´±Ú®‡b=kû˜†Î=ÙÛ¯œµ‡×IeϪŒïGyq ŽâÖ)[ÈIE1x(;ÄüʬNk?óx±?ãÃ7ëP¼xª¨N°ãÑŒój„~Ð6´Ž€ÎnZbfj]çþtb`®%œ:uœ]>>Çï•–{iïæ rnñÙc cÑš†M¨¿²å38‹„ã±5ÉØréòú¾døËx”@ÖU\¾zô’–mAšghÓ‚ÑZÿš_Ó£ôP~î€!’§Q|UÌ<¢j¦SþDB´†¬I<Ì~Ã^jT==¿ÑjP¤°ƒÚéªi“À´nc_‡\? f&uLË'³¹òýˆÔ¬t‘bUŒ?„Pw1…{ë|(#" GŒ´ P#*¿Éû£ú}ÆO¦^ÍQ0;‹&ë÷€ çbǽG ºñò¯BÜå:å×f¿W…m dk¹¾(,_ßÿvÂhÙ·X–Ñ NQ”"ûyÒŽ ˜¶Jùo$KŽ?ROVMi~ë/Á|ë/+HMø„âOîcVd®ß–î“pD=rF Aá[Xú¡£$ÖX)2òŒ}3"©âï¸&ÌüúgÜØÙ”¾uôä:ÑJg³úîyÀйAÍ^6òÞ’ÆõèþØÍôi-Vz +SÒºCºú'²tT9ÃZÀEÆ´4}{\Š÷•s‚³P þÏz5“K¥ç -?1èNIpù\Ÿ+çœ@3¿'Ð=m~›ëõÙ€)1PWlñÛ¯ð±P½ø#ѺîÇÜP§^òJ©Ub‡Ê$ -yº“=·!FxDuþø¯0µ¥idtÞRÏŽÉðÄãü3ƒÀgg9…× ò´CÑ$u½é3£³¿HWE7`}µ–|ˆ‚ìkÜñÖ‡9ã0?C/®AGrÓ–.ÆŠ\‡Ÿ{l±j·èQ[‘÷G}½Oð?c%Q6¸±ŒCÁ­®8嘣q -JdAPáVNYoªc½(†–õÜáLÖ‘Ø[ij†‚ý¤€´ƒ%§C‹‹Ò•i3¼PJ{Ä\çm¡ ¦*h>RîQŠ/u ÿ‹£)Ìöh¯qqÐ$·è„29TÝÚi¹ÓM¤WµþâRðT€ ·6àˆÄ¬ë¨ºr숅ç¨7ÌÃGt¹"¤3e;—“ŠA+‡—ÎñzÅ4Óë£6 {ñœç…ĵ'kÒè†ÜO1Kà$*c¢ùr†>vôÜŠ ¶G½"«lîk¿ÓÇ–'åVXáøT(Ž}Ç'”eàцîw¡yW\ᮘϴÇók¤OwªÔq2l€ãw ™Ÿ§n„Ì‹±¼AIéVuG±–d#¡³±Ã -Ú™»ö¶›ÇYñ‹n±’Qµ¤èïR „JÏerØÒ¿×A3BýÙÑ´ý±“Ò+ú€Ál~}cñ~ºßa8'%ƒ{±uD Gï6.5±n%¼X±¼oýÅ÷\ñ^oëý%ù¿ä˜ùí»ÿ³õ%Ítfb)ájÀÓ× -Tó²öãƒýuÚ—ÂÇ,M“:‰Ö›úÏvíù4œ+CI­y))wLKÃ*¡!”{Xš‚shÑ'dÓ&‡ÓMÆg×¾ÖŽ´âW|´7vèBÀûëÇÖ>ÂTÜÚ 1Í÷(L倘ºTì¡[-þKšIï.ø3k‰()ÀqW-b§Oû¼¼=­j{ ¬ûá·Ø^’PKW? 鎗âå®ý¥´¼f¼œ®ï-¤L:ùY<öŽ7Ô¥4ïˆÂÀnVmœ"À(xü›¶ÿŒä >û³´ã§Žð¯¼w~2T銛Mò]UF6^9/>}€q“ ¦jJµ]Â-pjÏÓ¥™ƒw†§XeK9#£FäpPø ߺïJ-'¹t¡¶ñ‘+ÿm ʸ· 7[‚Ø«#"ù5npFSýsvYSÙë¨!´×ÚåK;©”°ÏHÜkQæVšÜâg,òúÉÖMãôK¿þ¬[d~R5Û·Òâ†3ãÛÎÄX7%ÞM’sõÖ˜òTÀ¾|µ„‡U«…3›¬¡·ƒ”~Í€2ÏjX[yÔpy)Ö¯™Qf c1ª·Ëd Tšj fñ­MœFr§˜•0ã^×3tõw7Ø?bÊ#¨#ᬬJþ¯'K¦=ÄÀ?ÛƒVC«¦߃4þz5׆¶ÆŒáÊs¥ ªÝv¡ úÓCÏ+(G®rµÑR4•™|M½4:GúŽEãxT•žX„ÅäËáMì 4êpòÀ(d˜Zñ7y£ í ã®×* ¯xËw’onâC¬¶À¾¡uDIŠeD”å”B/Ïcu1›zôrçamÄæÌÏ­ U¾¿ùÎzÌgôѱfø[‡ä(óć:¤<Œ”ý±¡2UâH›€K‘îàm+z[´\W\ ˜ý„F¹ñð°E•å÷º‘ó²z…ñtõpÜ0 ×:/—“|L¼ê€_Ôæ™Ð’¨•š'î&äòvˆSVMAÊ€“`犻¸u¦%¾F—Tn¨ŸÂ¡²å%wf­Æq Õ@W`„R„]qà•EëÜ‹–+¥ÙŽ÷Åþó®I®ÐúPuÔ‡ãñ§_^Ëï§Wùí×”"OicwÎœr×^bD¼jD{Ø?þ¡‚IÜJSkå˜îÚ Fùî–ª„­¿¶gÞÁZM ¯Ì}NQ¶pRþ2éWô3v#=çËâ3Ër0Öm±!8kyõyb+û©È5˜ž:LµŸc:ƒ¹ñ~ÜOåøÂóþ4$•º^~Ÿ÷]+ö„Øì Àô5{{‚“7§!FZ–s®³"ÔÔY²b1/¤‡tùá¥ðýFá -ü9N€'Ae/t–§Ù§@&ëýó­ø¶•AyL=£[ùƹbÅ'gá©Üå»ÿ¸SMŠÙ*;€²R¼îYè€ÙŽp…æöúz(5Œ -Oz7¤dg© u ›zùØ?åëÃd˜ŽŒ‹ÕÑ­b»n}lF><ëzºCee EžðÙ2G¼((¾þú_…©DbÜ>aäîL’ˆÔ—&0`H¢JdïˆÌV^Ø&ìdÎã#3 ÒJns±årö„_¼>kbÆ, æ/uZ­–Èuì_4ôC`ø.³Õf½òhžAò ö¬ ¦§æ¡ûl]ØO!Øëe|•¶´L:KÜž Eh¿t£o²+otk’SNåg§ÀXvR>`zçQR|2„38 —™'1B¯ öÍ&.i/i¥7A.µïO¶w£2}¹J¹—í$z÷#eù¨¶Š`Û(^Ç8¢·žl~Ô¿×9`OŠ´R)§CNn«¦™»÷£ ¥ ævžèK’ úŒ§jHšr<ž³Ä§à´Ôê©"©LÒÊÞÁ§XÀI‹ö˜[)SvاˆøÕ 5%¨Žµw¸Î¸±ùÄ?kóÚô¸*wåw6UÅr¡>q•Î°@?0XÎÐñ³|Â6kQlœjâz¬IÝR«¶µ†ü«|î!h5íIß7ÍŽˆo‡|OÖp€ô „åŽT>&¬{áWÁµÖ¡Éø™Ôq¦ jñ@(<èÉ>‘n×CÊ÷Ü,;®,LãùðtÞ­‹°ì®ßÀ1fÄAœ”1­Œ„bž8äŽðYC®0Á(l•ÆÐ\/‹9ê -òÎþA…÷ƒ—"XþÆ3J¹qfÐú¯\M‘ƒÚ€9lI?g%mÉ31s$¨MÁß9 -ßÀ_äùÊ‚~7ç%æ•Â+ ,,§«®B½Ñ4×'ü* Bn«´¦:(s ô¸&•ž^\i" ¤6[ÖÎ/±\Ýöª ´?Þ-Æ®Nüõ3þ‹”hËAL(Qx$Äѱ}É»d[ï-Aq4ÇK{-5}›ñBˆeì¿Rby|ü«ÐA+h¢ZD‰];‹Ö“Lqw¼}ÄÔF3ˆ³à"ÌcFºŠÔ8Gú ¸ør4CFŠU%÷aªè¤Æ¯º -|°47ÁÛµû#+1 ƒÎáÔoé ›át¢ §']œâØ'wG;GxÀr[Ç¢læîZŒèWºÌ#‚áÿ{ãÛ¶•³ç虺(ÿHgô7ÇCl)‘ê©p}wþÄI|Aöþh/®Ëì ‚ýUÎq=E;niRÊ @ˆ­¢;t˜h]Çge\£ yãÃmóïOexá½ ÓÖ ƒ‰†S‰ý*'F ÔTö´7ýÇú]ÄyÞžÙØm†QÜJ!tÖˆˆæ¨ ÍXâX«F•iÒ@¢:Künù Y™fÊL‚Ë¡í-6ˆ¶Ò“9q÷úÅW´ølA˜kêgÈzŨkmwiB®`Š‘¬•A<ÕXìÏ] |8ÝŽ«×3Åxö‰?MA‰ ´p«X‘·‡Öw4ýnÊC^1Ùˆ¼Ô%.Üêiø’9ƒ¬¢_Cäœî®j%ß»F´ö z¦éÁ‘[Ñë¸Ã]!7¾?·´+4œ7¬‡§@®OðÑ3zÈļӷxdd6XMj÷¾ýLÈ_p­]+2ÂêÔæ*4fðûN/žxý†´h¡Ç µumŽ¾-æ'ÇJk”NÞ-]ÚIàb1ÛZƒHßœ#±rj†€HÊPz´—g$tzŒ÷Öç×Uaâ*Ø:×M%ú6Ÿq–æáEc„úþÀ~‰+ÞHèá4^ë.%—GrCÂ=ŽìÔ©kj”N°÷æL*$P‰JBO< 3¨D¥"-ZnŸÐ¼g]æ²GŸ1djõþT×ÏøãÀÛ MεôÎT'ö$aU›&|”ݯý‘B7ÉýX@Ûrh÷dû_5"îIÚÊïN½­ÇV¼2*“!(Db6øž;…~är¥8µB«H=¢EôÔÓ :éï_ülN¼k¿cM -ÄïBa4ƒáë‡ï§}*;— ·Ò¥yJ -÷Õ/¾ºÊÔæó!M¯ñeûÆæ;JÇ_tŽÎ_Ó¨ÿÂVhœ%Édß•Òµ´éŒ]ü{ògìklx¶œûøáƒkÙ‘7~âýµ¶†A]=ªF;Eë‡bÑkMÅ[Û/Aú..±À­Nn•ßñ‡é¥9Hšx<úî¥ONÁxM“µÕ¯¹Â¤5|œa 2pÇv,ïÙ2R_«Zž_fz+ò}æt‘"EóÎ!¼ƒ†(GEvUÚ:·:3òdŒg$1À;¥îÎ r^>¯ªæi_€±1Nºˆö¥Æ‹Kt½eix(&ñI§Nºò–†œ÷Iº7FN‘U–ª s+,½8î¬Ø)}Î’ä:ò}c“†—×ç¯ÝmÓöö<ƒ¨âÊ­Ö‹¯tˆ©q%ô˜®$Äs?•½/b¨oÿTXêÅ­æÍg¸âž·^êq¥œ<ÿP¿òH~ hl5¨CPqfÚ„¶›rgÁ†(¿ç:ºÑ4õéœK׈¦2þ~OšbÍrBg¹ÂÅAižÕ\©}Al¾õÝÓ@X›wU5}3¦QEºH)êÞ£^ Óí;mZ¹²683j˳úA-.”v¯}§SCaýÂ;»ƒO>‹†J.µ¼ûÔÄÓ²§ñäøÿkºÓ\S„VàQÓbýVøÓVEPûwzM÷yoÂè Q+ŒK(3¯º‹žÄØe!QÊó¿ß'>ø£•*_1—<7þˆ˜ñ ¶yŸ¥F¯»JÉ£®Æ ˆ§)OÌ,0×ö¨õ{*IÄ÷WŽ ‚5üB¦ÓIf× SVfpò=¶_0©Cf’ªZ¢2Z fÂÛãPQ+9€§X¢Fr½3Uã«odº®®‹Ï~÷¢lëwÿfæcÂV¹úå`<ýqÃz”ãõ­ú1è¼ 6³zXãg´ÇûØÒÍgD¿@u¢#°9û"@Õ†V÷ó£yj¥×u8&¡¬RgÈ.3Ë ªµ×µêâoÛ\·ä¼Wn ç7<3‘G¼«-d‰Wá=bcÁWjÎdå¥-þ¦NÄ“áƒ6–錘J–H4 ¹¥Ú#Œ(`´-¸ËHÓɾê{n\aW%ˆ*ΓšŠ­µ”Ù,‚«[ôñ©éiáËËqï `ÑUfts–iHW”R7=s«þ„Ë"‚6§ºß[Z¶%ù6È,ØýRì<Û·©Ý§“´^×z7{jðö®¿W¦“ô¯õ›óßt’6”“ ’ÚqÞ¢L#†o¢³Ò[ˆhì¬ÓZùáFžØ7¥œ¤xOðs‹?èë@oJØqda'Yþãit7KòGt¸ÆÚ«•´sî£×QÑYdè;ÿ¨!Wú3D‡„äì¾ïQñz¶]éõFνÇØØ0·jðŠ ¤QmÉ ^éÆj¥¤µy;‚wvá{Îàn ƒÔ™?"Ý¿Ø -,{A±VdµÒà!¿¬ÝL÷ âÖƒ -Ïsíþåу·…Ô˜3V­¹{GÀôfZÌCçÏf$L¹_b‹Ã›DžP‘ Ä¢…DöR¢Ò/†H#Cá’Þÿ ¤£¨høUî;,ÚÈQôúäPˆQx¾°0išçéLzÜ×mJÜÔ÷‚]`@ÎUÏWƒ}-Ï<÷š<…j™¤)_/rî—Btæ/L_áõ‚v¾q»kR˜lª÷­¤¦¹Z@7ÆУ"éus€IBmUu…žtôî²\@€gW5Ëž 4ð[“öcÌMXù²|åÁ!àj뤤T@"tî¢ÿÀ55êóë[Cb’é€h‰èˆìóÔx¯õ“=SÖ R} „FI§—º!‹>Á¹/_Ž7Fcщ*Wúôëú•Ôôs¼¸ùFbeŸb»úçw¤÷ëñ_r¨ÐéL§CÛÞ©YtÝ¡·,+nyLT¾+¦i†Å#äÝÏïpÊ2c(¿ðDYéˆ D<2×5×Þ3 ÂÉ4h™‘)qq`“÷ƱÐ_A]/Xľ@WƒãWMŠÜ{‹lc¦Ö\Z`ô²2ƒ Ç:¯4ä væ0º6¶£$J:صòáÎUooп;W!Z“³úëÿd‹„7Œ4Lñ¾_'÷ HLŠrÔNÑßR/CºÜ~.pÙn(#+~Æï}'iú]P’ÇYK\C$|÷5UÎì1Mìjµ›èß@§n!xÖ0(h‘¾ÝV'¿9c>‘}Ã^§”{•¦×ÔxðŽRp®³P(æJ[Q—Š9!E-¥1ê,üËA÷oΟm-"RVõùœüìŸ^,ÖkÑ&ü m³ž*èú@+Óoù?vž¯DÏ¢"Ý5×ÍmŸW\CŸôП€çG8ȽY³gUØR9~²¹½õÖ„#ª€ì >ŒßD`øн»u‹ú;3RÿÚSøúø¤ð?\Õ[rõÈ®ÒÄöÒ±¶Àçä"1 ÐŒ›zX<ëNG 1à‡èJ„²®u¿Ñ/ºS4œ©^"å‘HJŸúÜ°JB4N -=í–hÑ3é#n•¡ áîOh­Ç–!PoôN†¤ò­Kt@Ïç©¢Sè(,ÂÉJV”#‡Ou¤/¦¡Š²¢º»0Y!žÁ‹±}}žhi[ûÑu¤ô=Ã÷¢ÆÚ«;ó¤•Ôƒh¾¶%õ(P#RbïÁ-ß5äFáÃt·ˆ òTf´­¸D¡ÁÔ•à«:9 1Éÿ“®«Z (î€ðPGIúo•@2Ï\ù%ºÎ¹©áÂ8ñbûâ´A„1F€g ÂB^i¾Åv–ê› -æQN¼wu·fˆÝotà/JHIÀô«†ŒÀ,®´ìŒalu¡ŸÉläïªMñÞbý· ;ÑaÀƒ#}U½Ž×l{é¦Þ%ã.I–\×乶qØ¥D}ßAÑZ½hSĉh+•H>úÀJ ¨'e-D ;Äý׿âƒÇó¼ÕR… žëЭׇª¬ùejû: ñ£ƒŠ0ºÀ§•òlô¦FT íßïHÿ#’¯U‚µžóŽxIÞ}‹Ÿ:û•÷‰Ì/âsžõ)¤~ÕÁ“Ð,O©qLèBË0î·,ƒÛ³'Üå­?ð̧vEÁ,#·#GÉD)öÞUM]µÅ„ÅQ|µ8fÞ|/Öî­EýFº¾, ÞâÙ’ÿ?¬’‡žMߦõ›Ö5±Û;©,ØNÓ_c $F5ÐÚÓY4O|Ç¢K–EGǹ°¡¥æ´Bå ]ƒÖï£LGªà­ÐŸVîª(DÍM›Ì$KeeMUÝ´ÉU[Í\@ùŠ¿×Ž{„ÄLv ¡PG÷˜Ú:ˆkhÕ œ3í ¶·˜ª‘Û,Fºs…ùÙ+í¿¢¤ï õ[ ‡‹sw}Â8xÈ¡l¾ZddN¼ Ÿ!'h༠s~l”èšRgŒÇ®8¹ ¹íêmÔ‰R)¤ L ÒBÅ2á!1ÿ{©’]⨥~±f8Qc°„sn3t˜À@RÁÑ•¢aÎçŸêË›3YÁs¡ ØÒÎ0"€N‹,Y¦(»]1„5Äœ!d‘tÏû݈ë8AØÏRøŠþW<)¯½‡Èž3‚ W -FAUvO}Ã7ÐZQì•ßD“ê(Ñ6…ÿ^ü¤^Õžó‡k s’É^É[ðád -¯§ªUŸÎéíÞd¨êåLb`n#áh1üߧžûºÓ–ÅúG;Ìa‚}Ž¯ ħ޺ÂÊgÖ. ¶+UûàÕO²ù©ž¾µOüfKúÑMçç­y]èÚMÐ kCÝ1g¾K½íâ Ñ!IU<¿‚ÆüìÙðÂ&NjqHÄ&H½â+œï(¹)'éôÿY (~¢ePgo4[ÄD H„ È纗9’Zâ‚Nxl”¶Úì±'ž)-6Š C5ªs¡J ·SزjJ‡ŒIßö3§Š¢ôBåÃô&L(¾ˆ>Cà¿D_Z É^ï¬khhüEöωÿ)(‡rÃnˆ) $KJÆV€€kK‹Lkî´v[Ò~O IŸ–j6¶Î`r¡=×CFÞtíQŒTÓ]9bÇĬÞ"(Y/<ÒdVŽâƒW|LP[ ‘x·PàÛ§b%‚•~ÖOž0¬°Åaß:õi¤êäÌMpê h¯7•!hZÐ £…Ïík"CP'©æ˜Ã—³gðRëÇ2[[©š/ hªíª%Î+Í¡;"ÂÉ òkÔYÄþ–%à”3SÝ+e *SÀ«’}ML_‚x¢ÀÔ!×Cñ¹®#ø>c`/@c§J‚|„w n -š³çø˜¤wÊ ¥կ蘫ýò ÌO& M• œkT¥K;JÜ“|£-::…jÒ àkJ€{þ°¥Ñ%f:}[»-&ÖYo$­ÍÚØOÈhÞ*]#f¼z|{¦5©£¨î’þEç¾JéÙ&G¤½ÓΫAç þhØ~í¸r­¨×tóQšçªuG×cõGý^“*ŠQ³À[åoIN²ÇÛ"ïT †–ëð`±®Ð¾ÔŽèŽbÍÃöõMéƒ<Äâ@äþp‚—zÔO~¢-ªûÚ$hœÆÜ‘•UŸZóýù7açßY³ÕØg@ŠGÔå?ˆ†õd ÿfF1¿ ŠŸ3på$šG5$›ˆs’¾/% »Å‚jÝÊÉc_ä ·½%).½Èu¼×kÞüú-I~Z9C»ŒýLV^—ϵúh/€|ð:i£G±ŒC³¤àePBsøÍÒIƒˆ*JÀÓ­þÏ4¶˜*"!áÄù¬€%ÞYNš?äRajHç¹B“‘¾MùT‹¬ûÏ\%€ñ—€ÉäÑ ØÐHõ BrÔB¡ˆ‰Ãˆj²º}þ¹¾=— Ê4@3o$_:å¤×óûˆt6mú(­íûĉ‡g Ð7; 8ÇÊlu'…”»|¢ŒPm¾Š}T¶ÏÌÂ4fñêàú0[Ñã,q®b}DÜTVªoä4Ö ±²†ÍÖ{î:ä¬êus^·Ï¨Yã¶OŒÈÇ`"05‰tïãÖȤå À»‰E¼CâÅ<¯öõÛ -s<vÇ“Ì4õ«SD˜’zV}&‰Rföbÿœî옮„eÁÙ€pôeNÊ(•&N þfè±³èöœeµò›U÷C'ÄžãÃôâ?EÛ‰püPKwîôê医LjM¼øQ®=÷þµzñOÎò¿ìlý¶Ïügj©+õ¿B­£Ô9µ ï.Á±ëÄE×Ç. ¨«yŸgàcŸ+è šò”&f]2ŽNñA‡ø˜¬¬:‡›œë< ’) í3àüý˱—‚œ‚‡„70újÔ1=¸°À ¦ý±YóTŽÕOÏ3ž‰ÊÙ^çp$6ä*IÑ€Š±0óï°¤®CâÌ›z ù7þ»ú²úÜ mŠ—3êoóŽs°ei¥[~Š‰®¬‹*רr±0ÑÓç ¹{Ý».蛂=lùgƒ‚?b¯P¥$µl=YŠT¹Œc¥ ¿"´¢b -ªrEu¶×oü²'îÒäeSAk£|‚0çcC ZöFõ‹î–&¼+þ=Жz*‹å*@éÅd5¾X_$h²Ë‹Ÿ°»¢sÎf·™”ϤC*úöÚ£ ·9Œ¡-‡—¤ÓŒN©’QÕ6ëcædŒA¾…§ˆäW¬"õ£Ï«‡k_ÜæòĈ£ä ¹Rì+²Z±Ùì{ 1}þkãÿ" 2R£ïsD´GÜIú¿L!*чàÄÐ(w†\-p9ãÔ6TÊÌb¤±,tHoõ­iì©5Ö ë¾G=§šáÁ‡uã²ônrØ£Ú,Dñ{>ñ­#±@DÒt{&½êw+0¼G¤ˆÿO*ôô§È×ø\ÄÞw+]f™ÉèVb&Ô#ì©Î³˜‘ë3´P„JšIa¥C»DÅ‹ÜJ¥j{9må3+z³«ˆrðf©êk"¤Gæ7³K¹«·§$»j¦DJÜy¨ÈeRÀÃî•Â0¬]‚&ù¹"ݨ¿†Ð¤NâÕ)8žiáe%ÆǤv)JQ„ÀLG”î³"Ã{W~Ž*Äêzß[­`FÜøÄDúP™§J Nk‘ö‰ŸpÓëÚ¤“EÇ/<¤ßO½Ü~`e°­OiuîÁr]3¹g®«0®]>ÏÂÍ®$Gäås±PÿÙdægÏÐ2ì¢á²RªgÿDÎã>é‰aWWdïi‘?ýÓ_#^¦\O> >zˆcñ$>"=¦z®²qþ@&ÎÏ2± ->o"XÇÆCªìˆdy®sʬ֚\¿ìÙ¤$Vô^ÐU’ëv®=û¥€R1\=¤ús|$ø背ÐÚû¹•W ´&ïÝ™Zù\‘-½•°aÊabrNoQª<[qUé–¿¥ƒ1¾¶O9d®íÏš˜²®ã83˜ÑÇ=øÚ‚óP‚Aá|ãWˆI\’…¶ -ÿ¿…&¦DnÃãôk8»};bÄpè¤ú{iiGKp1ïû|ïj_Š|K’ØÎö;š¡õ¡:] -Dý”n¿b’ÒIvO^­€ï·]µš£¹“{­ï« ùéWë‚·†Rʯƒc–(&xÐXÍþrŽ}]œÞ»à<Úø@Ȳ-•ŸÌÍ~!}ÿ—Ð.Y'$)¡œÆ’ µ9F…ÐxÜ`t )I5H-Ú‡|]QPaX綾“‚ ¥°Ýc„³6êµïF©á®!ï©“::Øé$8-ýŒùÏA/¹ì£øepü±–‹áãS -Ö€…Cyâ:é´T éÑÈÅÄÅÏΦ/Fqq…[•Òð)Ò’ ÆV8‰UùVË¥;yâ -_{O]°ƒµ)Å4²ˆ‹‹øîDr -nM¯®Óâ–BQÀ^~±4æѹ‡àS®òªÆz¹Å,ÚwõTƒòr‹P÷µ¯³&×úIðsg®›:"]{ì_ÃÑ›„t‹K¯w|ð -]QML§<8¹üx–¼ûS© Þ%1kÐ3÷Læ“# ¼Á:Í÷sag¯LœGM -Ýí"¥PCh–¯¨“Ax}«'êÙr×·jvÊ¢ -xÒí3",ÿ;Ø6¶‘‚Š*â|#i! :-éH7ðÙØàþøŽ›;š/¼9ÎÔìÀO€Õ59+\¡þ ³ÞY›X¬×˜kñ$"]$–‡t;vÁ ØJø̉ÈAãGî»ÆØO² 1þ3„#5 ÕãþÔqß4|í¨rÓ¡6Á•ˆgígŽÊªI-ö/¿]ã;§´µ¯š¥]Šëz%ÅÕ€ûä”h’J\4Cë{ 9‚Ö-Ü¿Ö’Æ/x+?q`_±úGìÊK‡@0*Á[sK_Xen鮽¥S“⑘ä!‡VA“`P/ŽŒØñÏlŽ"ȯêÁ á²Ó\u}’ÖëÞk`è…&Ù~TÏo DÂÄ)w}cé~ Âñ`#TÌLÖ‹¨5Ûv?Û+Õóª~àJ,ô•Æü•cûìÿ?£›÷×d¹oÖ´©s1..ƒ­d¡Zl)îLCë1+çÿcîÞ’k×±kÑ–àÔA¸;H‚$ÀïS”üõ.À©ýEëS¹—¤´¦ÿìŒpØJ,r’Äc<ú£à°êd}´àV{-%îü0Vá1%C=;<ËbƒG&J£-éõ"Ûž ´®3ž~½ä76Ú™1SU$?lŌؚ@6DÐߪW#qŸæiáaÃ4Ò„‹ä©âr`—2²*Ë|åôé=®êäâáÁKäVG´Š€u¯bÚJ‚/˜ó³lS»† di^•û^‘•/%lñ<-W)>p1vcö>…­ÞŸz–¦ç‘#KfïÿßãîðêÇå»GÆ`}ÍöDb)Øxµ8^‘r¼c«3uО! ÕÑ”‰T—!TTÐÿÆâ“–_Húg„¨jP+ ˆ+Ö!©íšÿ!úë:× áŽýýÚ.4²òDg»wJjxê4þó„@*Å¡<<¢Ÿ:È¹Ç itòéókª|~[$_ÿ§kÈs·úéG™¢œðc3”ºó?V‘C€‚2-“Âw{ÂgRéÚ@hé¶\c‰å?Ñ€¢ž6ú‹B´#Ѽ˜¡aÒ3 dI=ÊÜûËU+oôᯑæ÷L*&£ÄË‘A:¹6qÊ##¼OqW6:™/-øZÀ3>áR’»»Çà8VÏœ„9ýw‰ñr LÞÜ”•²æ©Õu Þ˜û^–Cm’ðÏÙE‹ -:ÆJÁÁ $‹ Üz_vY$³âDZU•Yó¥i¾é§ô¢õ‰ÈU¨ë¶€%{¶û8®þè<”ïœàEÁRgëêÌû¾ú¬x¶"ŸNzþ÷9±Š~Åik©LK¿±qwXÆ -±G*wzV€³wzÓ5åo]æãK—|ùO¦IÀÐ㯜áJÇ™, °xZ -aþ.|Á|'‘¬ -‹…x×Im…?Æ`rìè^AÇñÑŠÑ{¦ËþŠ"‹»_¥Ä¢Iu•Yó"A·¼eß°øèN„ a<êèT&I&HÕ½(-»Pö¹žòÉáj3ÔRIÃõKÔÔ¿UÞÜX$BR|D€ †ˆ³ <šg6Ð3â9Ûß7wÚƒÆqNß‹Ù.{¦À[X(»¼³•Åëóµ)ÐÕ!ƒ¹K~ \UsÜ7Ͻ¸¨6EÔ}ÁìR»<@p^à­‘*:fæ -Ý á¶h,—oSäÏÓŸ ЊÊd¼¨¿‹xŒ >ìyí@o -Ý“8 -¡Ž±Îu#<Â|™ËíZµÃ¶Ì±Îè+ÞÞoùeú­e2w” ò¦×[^a}°íå¯[Vµ-Õ?h¶~$¦G¶¶…Íï‘ÒZûšó°oEþö%RJ7X†HÀ™Ëw™d+ÕžxûMƒ5Ýà]nÌÀ(Q?Í”2î-Rùê wzFÏÒD¤oÛð‡ü楕¯\·dFc³bjï… ?ãßtÁ”&žeª;lB³)ëiu\Ë2â EÒ>ÒóG»Ÿ±«§:°ëè$ž%‡lOõ›Ï‹G9÷% ï "Oµ])J ¿/ÁÈ=†âO¨nÕ_½# :SºRÁÔpŒD¯Â‰S›ÖSò3e™xÄÕäP›>÷Æ3T÷8GsgM—„¨w Nh!Éÿ—½7*bpdªM«´+·ÛŸr ëEYq¹Îj·ëj²)²^Ý;làÇ“âµÁ«7ÌwG¶Ô ïÂ5@W«‰Ùúao¯?T‰ÊíæßÄ‹Ž?xûûñÅ0&®è¿J¦ýA‡¨5™}EÄ{DµDZw‚1‚sÊF:m% -ÖËeóu‹x Dô”o™WGµn¾Æ3’-3³,Å9ÌñoÓ*ýÀ”_÷ÍÁN®“JèÁnì%ùe¦‚CüD§0ˆã¹ËØÃ_*z'Ý-"`%ØDzÄÞr•°•˜“˜u ð5Ão’Å°^j|ÿš½×FLIl⹞¥÷¨=÷OÂ蚒߯jÒ?Uv'¯—C…œ©ÿH¹H>!ð›ûÄ@©¶I¢WÁƒÃò`)ÿx(§ÿ/jö«Ž“ ‡”\l†% `ìozh8%gRÔó›oÑŸ»l®óÏæêŠëø¿Ã·ºq[’Bt{ã¥]% b·fï`#ýekã0m%Þø"ö£-Ûø7¯ø—iÕKB µ‹#"µ©Ò@xbq¿„%.ØæÓw±í)Ô†®;ùÒ´EŠÃÙ¾>5m9Ÿ·’v‹„Å<äç›K¥ -¤ôl€!é”ñƒôµÎ«Ô¼ ÙSÓ¹ìoJä3S§Ã‰‰ JZÝp¼þg¸Ô&ÖìkG -H"·ç3·ñÇ&õ¶*Hº -UÏü×àïô|M?ëŠÏ‡°QXŒ§ü]mêN<çŠre#;mG@O•¬ÆJ`6üï¬fDZ¶oŸÝQÆÇZwÕvã’Nßßu_]VõAÉ‚övz¢ðê75‚Ï÷œue»œ>ìë˳J<ò'j>l¿64Hv†ót%U#JhÃYÏsÈCTŽñâ5˜JRB7ÆݹÇäÎÿÄò¡´*)h¤G^®¶€QDTÒF4//6[Ò«ñ‰L„ö»EÔÑ7ÈnÉk-q…úð€×}`=î˜ Þ i¬éÎÕ*tØ(Ú -4Ä|™IÔ†hîF*S*lƒ¢W+¥ÀB-`úôëXOÄÀvÏš&@´> -]ËÙë26ÏFw¤­/yñ̼'(€7²Ïjµ€Éé¼ëFü«£Ï.÷ß& -þ&þL(x¡^]‰‘yEö?X&|ïïþó¿«ÃC;8œ]PuwᎻ™=9+<=iö®I5œ‚"ÊøšœuæI,/ýÛ’@by±S}å’œ4èJ¹IÖW0NYÑܵÕœDq× >'jðóàvPt^Wô(ϸ°˜Ïoz%U»½GÊÊÀz$·ø_ˆÐ·å!fÜÓÐ xj¯M¸÷¢Ð@¯QœÊ -pq;ºlaÌFS³†Ùš¢‘0Aù&Gñ<ŠÊóµ §±E£´ÔÀÃðî’Sâ^'ŒÐ'žS Šzê¸ó}9> q¼<ÑP~”»\0%ªàãűgBzø¿(ÿ©1„<úáO}ˆ™¹<\²{ÔOS„,?Àeèë[©H^¡*TÔ0¢Zw§|YC¸º*"<ã®o%µUBb;ÕkÌãÀR"ƒ”ód´@Э°07EWœÕK!¹†ü2?ÍzÁÓ{éÛÁ‹Bʃ"Æš¯iKéBTGfn2#(ÇT4¾þ”ÀKã‰ïßsò¿3H ]Èã5ô`?ܪÿT`ɽ#½hY—þ©j¶ê s× -O³ÇÒn¡BOÍì^í;tìö©ëôX~‰À€¥S í‰] -ê‹kö\ÁûÒLn/’“š6ÖürÛýBrJùv£9k’¥U"êi­r V(–R儹eƒ Þè1?®÷jkCTº;üRr×Û8€”wµ¯lˆÝr`à_Áƒ C¥&¾ ~iD|“¤\1V´Šî¾-='_æ %«¿ „ ZNÞ½rÓHìCjîœ4“ÿþFŸKGqLôädС| vÚrŒño¦ùÄ"FT¦ ð¬É´—3Å*†Ùë“DYxªlÈ®©lð.Œã!,?'] -W~o³4|Q[ö}åNXñð¬·eöh…õmm…çXç5zÌ -Z•2ðÀ‘ÀSÄ®„^9óI¢¶¾2 -€ }™Æ]<¦†¶–†ñÁåÏšrQ$ðM•øKC*Êž¶ëVÏFÜüÉk^jK‰6¦_ëÝ0/Š1Dbê'Š+GUS"öà£ud¹ä¹ÍGÑÃQúç;r<›ÁÌñx½>]R䳄4Hµ '­x¦>Ã<¼Ÿ9Éö(ÔÕu"§“M¤zn=Ó9ª-éß6/dÅK A<¡Y>òä;ªÝã îÔ$ÓÔ+hêõÁÑ[xeéˆh¾¼1ýþü.¶ôwThÁ¡ðndzÌN%Õ ­m%B{¦ͪü\t;umjå-©Ð{·€ –‚±¤aa< äåQ*°ytG* Š¯6¾”nîºS¬““Ú.‘å;ŽüFÇRºOëÊÏ›bërñíµèý?wdzÛRÌÔg‚´Š¯¯z_æ,ßѪÊVxŃ0—¦¾o^ìqF st?V!wÐ0›4*ï1ÄžU}j߈æ?üåOÆRH7DçCe[hjë_ÇõL2“´)K…7¶¾ƒgb~ÛÔ‚œB¥ë¹‚ -K„¾Þó ÔŸ/&æÖ/ ¾z£_Zðy×Õ(\Þ ù5Iaƒ×lÔGTa_»ÿ±KÆBƒ¾^ôåù¥¶^JNE_†ù±9W*³qÍ01#5Œ ½¶+ΙíÐE–À?¥‰[Mõ¥¥ð˜l'¤†ˆb\ë*p-‘™}J•7æ`P™77øÈr¦˜“©cŸôêˆ2bD±¸b1fˆæaÿû¬Tf-èÜ_ë±ã9;'ßxqûH‹¬CIÉSB(1„Y_‹¶úµøCR±ÚW†€]Çéò)A-‹ûÕˆ¼Ò³=ɾ>ÅöšG8Ç–NÖñ:y¤·’ƒŠÅcˆ7&ß«„ôÍ"á,°°"\U æ«òû£tÿ¤`t«oO>Ð÷ïTÖ}üQ~ýaÌÿ¤¶pü½ÅŸ¹Òч|'Ûõ¼ŽPî0àöúï[Ê[¸t¯ÄX'Ž0jûLŒ[¸FÛýƒRCùˆ+&Õ0•MpóÌ¥'0}ÙÏ€wwÈp—¯{ƒ„Ÿßô߶î”№׫-þß{¿§çíÀ¡æ‘2&mø$Ÿd@CñÇèqv‹½]ÑÛHÉ%ç«Ì£*Õßr1·”2$7ëâ8Šr&=À x_¤ôÇóŠìÊ(ž«]4Ù$ÌuäsüCHÜÖG¸Ó‘þ˜Ófອf¨³~= oF€6¡ DK©ªž¶M3¿Ãîuë$…Ëûå‰4vLï£dÞ - -«ÌûŠ0wã…<¢E2tºc§«V•sÿ[ÛuûÒdÍÂÿEVïèj5Üe¯M´ÕÑî:ÏóŽN6× 3÷§tˆv-Ê-ÇóXíuÒÂ@ØãçöºL Åײµ{uéG,ÁÀzy’ædn|½¡G KÔÖx¼k'x~–÷ÕìOinâœRý?±Ë,ãÎ[¾zGô¼„ï¢~!µñ”à~çòí`Â(œ@ñêíEüP«ïa*¸Ñ rè°&¸±sÍ­Yk¤&LÇ -+;ÑŽ¬ÕÞµ»RÀŒí–XýåŸóôþaÌRv« †ççü!ñý»€k-Ñ\ñÔ2(¢ —‰lDÁ=B.D¯‹<\µ¶=H»§2ß.Ý¡eül+±Úcô"ÑG‰“©£WÒVñan—s{`´vöþ”TäVòƒ+÷¢KXþ¬$¼ä%ÏTVîÕääKÓmç³ -3S„èGø¬¦=¢Ò¥`}™¿§†ÈÇUXÎØÒÉÚ{4¼*¸ÕSzÂŒ—^圱âÒBHb6/íå£2S´)~ôs]G¯,BÏ{pG$ùt…úç«¡oêØ í/ë”#`'¦XˆäMB91Èá<Œòs½ -!±Å‚\xcBüïÐrOOñ!$™{5 ]£±-ųN“}è ³«71‹Yã(Êç©™ŠøêqöÐĨf¯6¨ -«ü» ú(5oUð"†~fÕžŒ1mŽ¸4Qj׋EA»Š*“Íi=SW<µsï]H¤†ØíhŽÉ{_4º›…µÕ×–jå"¾ ìéŠF‡Êg'¹kˆšäÇ¢bh›Ì}ScòH&xzš )ѹ¾ëV7CCqæÈt!|^’€Š:g®C—odÿ¯.è@d†'mf^Ä™÷5 «ã - šÈؽ£€ÍkÐÜ÷#áùDö»Š½F½òÅNá:Fþû9ñ -–‘Õi[¯`yΨw±_¸êÞJ::VõÅ -ž‰âÿáb°­?À:!;Üÿœ½ó½ã;Ï„PÊÉ°^¾¶_lSæ»ÌÜ5x«‰O’ úˆTh¨ .éÿ{”*q%”&øñ† s“käæó/Õ Ï„:Ü9GM¨dZä™Cþ¨ë€uwb³½Ô$áú}kbR­!XpGnô%oDJ2áßòöš9dœW·爮V[RûÛÉHϨÄ9ól‹U|P¶¯PrÆ` -öòààŽu«{ î4 -ÐùR„rGRïZ¿øÎ#çѧ!Œ4ŸQ&ß»bþ=±ï;¬’ùn°Åàäök±ž¶hï¡1¸Ë'ÙÙëÓ°ÛFv*ê6%Ž@ÕÀ—Z)KGƒŸ[ Ç£Ò„Ÿ¶ÎYwBrYO{@j¡©wM•·&¥æ‡TZvýÄÍ´Š†f€ùUÔ)U•ˆ·“Àhµì?#N4[Äx Âø§mCV5;LŠ›![ -8,’ÐæÊ®†å÷WZÆ=½þÝùƒ^v$†ël¢}-†žVf*N­ˆå­>1ríÁž€‹<; ÏH…‹3YˆÑŸ»_ @7Í1ÄèÅn†`U /OË53ZRObA{‚By¿îsFHsÎï ö˜½ÔÞðë¤xIæ}SßaMÒ¢B>³úu@rëaŒ2^$£@è¯ûgæÅÿä€T²²Oý‡òàC4¢ŽVH/]b>¡ÁÎygÈ-Ì`CØ‚RŒÞ’õ?„?S²Ú9$ô@°N›Et$$ Rr(É\¯ä¨¢G]•\†lÉ%ž*éÐ9„¸&á~XçìQ¼2Þ…mõ‘S‰J݉$0«®ÔØÖr£b%=é$3h§Ü·©ÎÔL rOÆŸlˆ‰CýYãôJr£P§ñþÿ¬‡±¥Gáubvå+n©¼­ÖÔ“ºÄ'Â燬7|ÑÏ‘Ž;jH°Š¤}Žçs‹¿É¢¥]üx´hpýdû2”p€zŒü졾²Ïü>)^Ñ €7¿Œõþ÷v[O{{ÚtÓà\ˆä=1?düÏð_ÿbò‚ÏÈÏìyZAr‹ˆy“WŸ·ŸÏ°Ð;GQuô]äPˆrmŸo3]ÞRo•h¨b ÅöŒèxÌc—jÈQ©B õTi7@Š#5%'ªNû -NˆAèM‚êÔ  ¬±2-pS–†£ŠXï×”ŠÇÇ_êE;Ž-$k«óE;Ž ®ÑQ½zÆNÔUÃôê÷¸ñhNl%%ß¾…¾ó—L(X‚×oã…/킱Åøï¼üXõÝê-÷l W4ûZb1¨’Øh[½‘¹žÈòjÕî¯Ï<—h!âó™ÓíÛ¶V'Þ®vèV'Uó×GÔ¾àxϤaðœÂö³È­üÔá¾âUSü$ÜTuòÈ6P×£¶÷¶ôOý¡S× .±Jò[eæq¿ÚržÔøF øh8ý¿¼‰;—dRç!Äû„ªòõL;²ªÓÆå›’†Óžƒƒöêßam@#Žˆ¢óy2;TÀöÂv°ýM¦!¼ `gÁÃC‰†øìÈßþòömÀY/ã)&åÀ;›"_Ðl¸Ê7a̲ºÕÖã`œ!š®3£»j¾°|³gÜ…ˆ¬S®!{Äø#ÑO -íPª+GÔ”\EÁeYmÉá4B–3ÙALÙ1¶eeKÔÄs{å,üLO©W¯(ªÇNˆ°U°•³(°b¬&°Þ© ÌÄYîº_™Ø|¼K2X|ebn?Oö£êÈG8…·,÷î‘Çʉ‹f´­ B)b΋¹¦nZŸa¦HO|]úùþ‘fjpàE°WBû…f,E}€1ۉߧþÒK¨µ•›¨9ö‡Î÷[1£@µT¶MÕ­°Ö=tŒÑ£&úx¢ 1—â͹â,ÁÝ×ë5£#ƈ±ÑY!„˜X mŸÿõ¡Ûh}?¯xbÞÆ‚nOR½ƒÍ, \û™–Ú€òwÐ#!’ëlq¦¥fÈ=ø¹Ô+gñTw x¦H[·"óïˆ2‚s7òOñ¼ÜjOÿ=Brñ‘zbà?í–Scs`$¢Sëºiv¡=ÒµYtŽPé®uà+ ªK·­>ðG þž\Úqªì 8¨ýF&üFFâÂ~ׯ%æù¯h—Hv=Âç•{ ¢wÌRU6×Á©g }mëÍ(gŸdEÛ^wúeJÔÔ •ÓoF!Í´¤Óã v,àûõóOÏ·.ÄïIný[úw)û™Ò/…êyJ¡«¶¨}¤šIkØÄôXÞ9ăE§<Ôú,ç„­ÿœ[7–°Ð{¡Ê¦CB¿Q\U¾ì™ºr?Èmh€ã…Sa9߶倉ð îã¤Ô™|(®¡^õk©¯ÑB¸:#tÝ)mºs †¡8a ˜ò#Œ„µ3:àˆ;î;ë²KO·Û0g`z„”«EEˆíN²B8Ö „mî„œ3²üKío¦õiá%Ðh‰@íñ©äÞÕl„ºã®°ê´3xrLÐJñrnžÎñH|®ðX°úÉ&iÌ­ŶJI²}ØGH¡^JXW  Ñ@¬WŠû3¡A&‰ß½i Ó»¹Soé)ˆ!4èáÌc\w¥o›^´»°)æÄšë³`!w¤ºÿç:9®+6#¦åÛ‚—ìä÷;ëì£Ô²fÓ½2®…! hFY7·z(9•„ÈxÉÁñÜRͭ䯬zI¿«pª—ßJOwòGÕn…;ýôQ#Ô!´­oÂó*Ò¬³ö:ßÔT—žÂ³ª ì–¿MôzM SÝÚ}Õ^G© ÎM¤„Áƒp½"‹¼]Ïk˦{ó¼¶ìÆq€@`-ªßx [©Ë0â¹ôúÂb¸VûAâpD£†®…­½¿”1jTLºy›¸mKóE‘OƒœÒ«—é .¯W%«ãO‹ôßÿð?)ê"k_±G. Ó@qñ•ð-z€ÊÞ±UŽ [¥ÝG_ŠÌbUH_õáÓîO‰HŒ;Víš±¹/íêU"=®Pv%¸-ב:èÊ~D;0„‹+CT>ÉŸ%‹~F¨•:”ŽñÂùè m»½¨„ÑcöA"ëŠÆjš~ËÜ(à~DVT>ªY­W•.ÿ„{V êSÎT.ÑU³Œf=¯ð+Í„3ÕØ™ ¯Ì­Ìp•û'rɯVÂHŒß®xô ÔÇ«M0OÔN÷´¸„pÑ}Ü¢ºðˆ°xËU?–@Z÷ŠYU7¿Àƒæxâ€ý~‰p¤Á~õëStŠäNÃÅ^å?Þ -r+R¨iK."ª:_Utþ’˜üþ—Õã‚¡EA½ÊæžÆÊ•_ÓûÒ“œo–pG‘‰Z}æüߺAè[.C˜‹×ÊÞëEóÛ@õrv.À»'ð¨…ù)¾–¯üÁŠ\[õ$öÔuX 3[}ßËÇð£>P42,œ:°töí#‡ÉØïÚØXw6‘«¯‘Â0Ö÷ÒÜoï“4g³£j\•Üp³¼X Òà<%NS¤…“ÿë('@\pª·¹}Ó'ùü_¾tv–ÚÆ1@b: òTLƒà1ççÁ²™ƒ"ieºÆ¬í ŽVíŠ ñ‚•Žk¯Û¤‰¶þdŠ¥a÷Fåüœóbuw¿x9s -¨ñ[Y^¹—}5\pB1dˆ%üTr'µ{šÕj¤=cÜG<õzOÄ8ÃrU‹åÒK' -e,Ï'8§'p¶3¨^Üœ£I”õ)Ë£ï7Räó÷Ò–f^sĘ6“ùOKo_|¤h%¸÷ºV‘¾ÓüH9qo -ªúçl€öõ‹á~æΗî}DÖpÆIÈ·ãXîR²À^Ç@Ñ{PC0/¯±ÜœœKRÓ+Ì:H ÄèӶ嶊oµ#ÇmëȈ‘…©×4óF/Âb ŒÝFáê Vxï”»;}Û(ø÷ïâ l õ(ûùÜŠñswùy ä Ù3­FnŽr\Wa#A¤æ9Ž—¦f#ï -U×ÍêeGªI§«#F¥¨vjx|µƒð•”Ri·ÔaQÝømSÂMÏXJO¼°Ÿc}*m¥Ð½0n™9©t-â­ó>sâ—¹<7g€E4—jE«[IÀvÜ×ëåÜùˆËŸx£þëÿ”f^gð¤O¿…áBÞU‘õ¿ò’wtgû`ÑF5ö‰/T[!³‚Ëí.ÒÇ¥qŒ7rã—m{}qöÔÑŒ¾–îÕ•‚ùò©¥y™nNÈçXò«âAŒN¦ÇYš>ﻨZA¤²^ƒ=Ê볡ǚîµ`ùÈ<ïûÂö?¸I4ÂÜ[Emü±÷üb3Ò*´ GÕê”Ç·hñRš³À~zª0¸ç´S­Øb¾‰…û¯Pmæ‰Apè.|y”ºuà Î".6î°Ugà(FcNJ|n¨Ð9KúÝ^ -Ñ@£ß«EŠ§‚¹èÉû±D¸4ÔK{d•Á×SÓBIp=j‘HM‘ïNˆýpåˆàVÑJË{Ð1VwºB±•ˆ!cš˜±VÞ¸J88åä’€1±‘¥ z”øqù1GØ´Ï™ü)Þ³± -Òå®*÷Æ1wœmð|ÁªÌ¼°sÌ|¬Ä¹ö2ËŸPm/G\I‡´ˆ[ìݶË=3$£z&Õ*Ñóµvbé’SHh€0[X¹äX´fÀ%Ê¿“`"ŸdÉìX›„.ByJذĶ 8Iº•kñàÅÔS@/—Ô=Š½ô;bb«@ž½WóC¥LkËb‰Ð|)±ïçÓH,sÄqÔΦ?1§ÁÛ¢óE‰in&Ëwü`ùÌÚY2·‚ - Z -'1‘ž?ç¦ÝÖ¡z+~Òu&‘—Þ{ô3h]r˜êwü¾Æ¨“¢×–ßøÍÊ›·‡F}iÄΫ×ùÓãPÂë©Â´{n>IÅá²Æ²~DÅÌ›IÃ9AÚF|\t"‘î»-ª±&¨Ín=’Ga ¸s{îsÍs”Ìò^XÓžø.þséµg)ì0bëRÚê|LØïÌÅk1yãX%©;gæUBözyùp…+2d4¯3‰Zl$—Áé’IÏŠ#Þ“ì~qödã]O,aVï\ôÇ> -É5‡ \éú—†Cì‰ÂF$üB/µ=fU‰s³øqýPñù%}w°•¢·Q,¹¼ú[!¹Œ!‚2³³;f'fÃÈ–ílJNë•f//Û7ÂÇÿ%Ôµ3bå*Xû^Á~ÆÄ ©³ØóÍjý`NœgÅža~júVi…{|0´PŸ5";k\ÛóÞGÖÅ#rÒŽèÍ—¦ƒ$ †+ç±(E»*æ:á“¥;Š°Æõ¦¬ÙCì—±_5nÄ’+¥’ ¤<'à˜q=ÛÕ¶”gï%Ù,Zÿ3?‘=Öù*4ÓÓÓ—Òó¥6L0ˆj|!ûƘ ±ú5àNP¯w®¤²"‘‘«¢ëžD©gí);Ö 2ñ1)VaRøN¦sEÿÔ÷šNÁ½¯yû„ -@+~+rç/óáS§IWGî·­ôìHRFøœÉĈ ¢³!‘Hz]‘ø!óú1;ûâÌ™æà¯Öß1‡ -)å—Ž„}èz†yzÇia~ß#-Ëø8ÅÂTd9Ÿ~ÃsÊ^LÔ>a-¨®rZXZew@ 9«÷xÔGf£Î+•yæa5£–Bi³£Õ3AÚÅ"…‚ŽŒÀ(ç¸^‡°6Ç.u¼#Q‚ø@ˆ­Š¹h ™i¶ÙUç´L±ÁFü½ó¾è"Á¡†b¶è"ÚH§uEÏ?òO‰îÉzKŸdƒ“½œ·9ä+vïþH ?Ží{¡Ê™Þ"¨¹ƒõ Î&–¿´H¯”§6çÖiÌÍ=Cìêu?B sj#^醨ªŠü5ŽK²Î®Ìxlq¹ØtÐQÇIÒ °ý!O2è$Š~¦ÆZ×9H='ÅmâPŒ’ÎÕ™dÎý®×¢:Âi‘ü߸ÁBš™ ÑŸC’þ„|«$Kꇚǣ֛R>M å|»î$‘åˆ9ffÝÉ°mŸR«TzÚä\æÂÌkþÚ[˜3s a¯4V¯ß'žCNñ ™“«\Ú†wüòkÊJR©9'`©U*æ)Îñb6hSДÄQ¥z9ˆzžd/†°k§wJèì½'·r`(-ž%3vE¸r§…aPOuDo‹Í¨`£ÅL/oxG{¼èìù¹…-3#\ÔÌ š`[!‚žÈ!ØZˆîGѽöÒD×T½®RÉôglíÔ)£pQÝx‚㨉®% ¨³½Ä àè!Æ:t´UÉ´”M¬—¹éAsÔˆ&‡ÀÑ‘Úÿ¾¢^ ?ê}#5Àtöº©¹«4n­úÇô×›3?_©¾1îEa¦J â >m…¯<‚¯v1ŒÍU®5ËF}cÀ "Þ£ÌÍ•cþŒ>ÆšW”Ô-ñSÄ#­g 2Äb£mÆ­™Û¢Ÿúý0s}æ;¢Çn¾5HFrð.W1x7(Ÿˆ(¯_³ÅP×+$i¾qŒL¾BÕ£GFIÃ}¾f1†µôõ³ àäÝ“ š?ù _þ\×Ù!¶(CÇV™ÄœgüËÍNCb;Ò·Køé%Ÿ¨TÑ‘áÌíç/²o#"ùmÔ*ñQ§zùpçèw”i>¬´øÞk¸1<3¦GGI|/ÚžÆ3uZ?ǶDÞ `m^gÆq®Ä5½†xæô¤äëü¯€0{È+õܳ"X¹—$†;¡ayRdžý²*ö˜¥Ñyg†¥d}îÄ(kÿ¸´veõëý)ˆHm kRPªV>5©ZÞßq穯=¥‹hÑí«ªæoLõµ$ç6‰U×´îRìþNÍÀèi‘[GÆ -uSۖχ)ªqÇWåï‡óGñŸùï8“ãr¹©^5èïÑ g{ÜLI*ûë3;iùÐüš»àÓ×õš ²bŽY¼Ö2¹yµ(–™¼iØ•âÕ #:W-ºžŸ|þû ïcÄuAP6qdqê1£ëµNt(ÈÄl<×i!0GÔóÌw¾EEʘ±ºy¶R¡êÈ“G&ïë¤M„Nn›äP2¦ö -ã#‹Nã#‚0§˜e¶g½æ ’‹ÚýñöjëýÒšýªÌóþÎÛ¼$”@=®5H9vhlj¯Yñgz¡ìfóòwN |Œy]ç¬W˜ºõÎ@:ã~U%5f˜ûkç•‘Â}¿g«c%ŒÓý:k­ô#p z]Õ¹6¤YvpW!¼ __Çô~%íB|Özî$üææÒ²GigÁsźuÏU¸tÏY¸ðZ9_·+Ò^{”#Є¢É)f˜kÏÕuÚ^á$9¡WÛè ÓèŒ_¼Í·®3yNÅWÚ=sr…ŽÜ&4Ên¸¥g@~%ÏÄÑï ݶÕÜ/ eEÙ; -±O€¦åuwƇ{Ä8ü!µ5#,O›c6kCZ’ ëJñ!cyÉ àv݉ʊÃx£AÔ¤Á®õb[ý`¥+Sžwñ1wø;•6rõ)[ü o²ôÓˆ<¸Psx;_ßéA|…–K=°™ª¶Ùý2µN~¥ãRšëXê(s²d©«!räs“«ÊB3_¼Vέä=† ž\S˜ û‰µ[±ìÆ,K˜A¢‘êsö{Šl Ô*GZb*¾dM½{BBqë h²‡W–±:d´šK3u?#ÆÔ}$[½Këþ4âùAÊ`ûvhþtDþúîkëìQ›çNsHÿQµïì*ÞHãqMÆ…ÇnfÂœ’^ÙCœ.CP_gܦw{Öö°£¿¨¬îíõ™ `õѾ1h&‘*†*áÝBJêºk½rŠì!˜‚$ø³¥$9Vpé[°Ëœýzù 5Š1v¿¶³ò [ЪDÄÙ<à[ë­®EÖ_òLŸuÞ8zFDEeÛSôÓ—zÊQË1SŒ!ô n97P?yïF»”~ÔkÐÕX|Åö$=ì‡Õ¨Ø –¶Å0ð(¥3~Í[*¦Ô«sP%Èx[ª=¿‹&êÙÁ'&ÊÚ ’çÏ]DUH‘¹oµn{ÁÿqšéÚdŽ“"¤ ²ßëD¸ø¼©{î×{+»¬8¨6Ì7‚”8J] -†˜;m¬¦4´!¥é»”‰ÀŒi/Æ)w¹ÜþäXپ՘Ϸ«4OâÀGd‚±v ¤Häyµy/Ûâl:‘9ñZ†„éYû©ð¯4y‹nøèÿ!M¼#NÚFÝ ®Šîµ¾«ÕŒS”jO è[ -ÐpÕ¢ÎH_@¸‡Bg™*ï_eDñÊÈÏHœž:8Gí£=&Í´î5~™@RÎÝg„ÉnH¨‘'ÖK¤ñG€×`ÞÀž†Ì=Véin‡¢áÞÂãñPZò„Ð8©ƒÝ ]‰‚Ú²ç܉ÓÕ”·“¬íH:~)·'⎵k’6> çØ{d¼,l{„ËÌ ã[)÷wC³eÈþË>P¦$Ÿ•’ŽàÙmÔÉWpŸZ‰È<&è6¥tzU/ï²>áÉÒ=Â3"Ò¤Q@/¯éØÄ Ðê–Om€s_¦»F¶7»ñ”bŒ7îºÌ©Y›À^Ñ¢nëQ~ìõs0Ö5–Ï -¡NÔ'õ³+DŒ6â¡ž¤=ůÛW~Y”ïçâSe?eíŠR+Œ°º7sEÍY6‚Œº¢ê NzzóGX?%C•¯PäÆMØ"oÈ“®¡Œ)¶ÂïûºŠ˜^õ;·âZcÉ€.ö5HPu•xe¯,Ù~Mö(X!¿x®Mdu&*io¨dUÚB• À¥ö}=ã­:—E~²£4Ä{àÚõÅÃDç:VìH‘é5Dj3$qíæ]ÁÚIõÅÔÁ4£ˆtá¼ï­æX}[:–ó㟑3„¢h'pžbKZü` GLˆpæÆÇ/ð…&Á¼Aø‹Bˆ­øóU÷(A÷íùз·ͱŽ_è×yX2À¡S?¿bÀS„(Á®e¶x†ùÎbâ5Ä®ƒ£pnK3BfÏR­Û ~PNÄÄäÆ€·@'ì\ -Œ˜©%QfyK+@4!ßlnHó;A's@êì5dP'÷cö˜ûæÉéÁ·g/Um·:}Ò[7"¨[ |S—×"¡úÞ£_åV±‚tw<œ£à¢žR¬ \¾¥MG2TiàõŽçF -#}ÌŽq’.ök,•ÌI¶K'?ÞøR¯î'•4·HVºŸgŒKU î%–|SU:µcÔhb½‘2ëð§~=¿%?aSŸxÑvĵ·obƒ.;SÔ^vg>íwDæÆó€C3(±;Žhe‰€Á AêOR¥¸hua±à¶Âï°à&AV -¼b¥]‹hoxRD'ŠÐD‚“Fa­/Ù v¨ŒäJ¢‘ÏО =mTC`H¦³‘­ªß`„VÞГËí¯AçuLµ& …Íœ¼Þ~…†7À°DÀĆy;õÑŸ¶‘ºûê¦Ëèm…{ Ÿn™ÑÓCøi -èé¢rÀ¡gæ3œŠ›’'õ³:îÔ.aNŽÐ¯8k³éñþ(Qf…ª#®Á±‚žW-U׌é4Qæ˜dTŠ87»Ë¥EëÔ_4ˆÄîòŽLб›žen­£ƒ¿Ê\Øׄ–tCŽÐV÷¼=E/0CŽt®ùo<å¬[®©/J¹©œÌ ªë9—?ÕüW…¨†âNô*Kî+>¬¨m¾L1{î=29ªo¥˜ËÎ9J/ƒÖ’)¼‹Wo}ìåõÿ_3Á«Hà,©W:cwü)›¾WlÀiPK™¬!‘E#9Òó”^;g:Œ—bíëïP„œá-–î;p[êÉÔ"Æshѧ)Ûã¿Üö*Udˆr8a¨ý©%¥…$éfx±†ìåKÞú¿uóO|ä-õTt~¹3@DÉO ìˆ}yâ? óʧgÊwƒà~úP{!twh˜ýÕnr扵ʙYó88c×9Kñ¾ò¥bJ‘~ËYvóM>ñ–Lþ“¢Ûòp5i+3ã~®]Ú’Ì »kî“JF¦ñÜN™|<柱†xbTç#É>iX‡šÈ7½Zî*0%ÜËw -‡@,Â%sX«OèœÁõ1 ¶°N(-͇¼t–péhFŒß¶ä~~ÎqÖN|m~¢m¯G!ɸQqn*Ž§Bp9Ä€XâUÊj°r5È€í©°*‚RÞ¶ç+Ä(• ^ ê>9rÏ\&ª¤ö­‘<ƒx<üÁïÐHº´ýd›þXrãÁøÛáñjCÈ)hÛ\Ëì}D{¾²{qjPÂh|>;9ª´ôýú¹ßûýÐû®$wAq=ù4!œ ʃ&ÅFï(Å4äøÐ;Ø™‰s=ÅûP —À—¼yèÛ;Sa¦{&PpŒ3§ãZ"¨[á7“ò±‡æµT.¬ ¨/2WâQeº˜–™†*eCÐ ¤³m[¶z;Êr™ ¡³7æVlî´7š”oÄBú_ÿǘ¢ðÛŠ¨Ÿs3ËêùÁOÖ¬ßw/_©n¾bÇ2tãeTÕ“þX²²áæœÈ3Ð8zͲíC&}‘¿wlá‹ !ôv-…C•B¨FaÝRδxþ~DÖ‘&”üÆ­f>Ó<4 †ƒ¤+¢:Mœ1Š57.ûAèE¼îl> Ó=w 37åÊòd >ì\#þƒùN ì°1nk¯îYe#·rÈïq€œof_zÍÈ8ß0$\ý+÷ÅE{¤;òg•õËIŒØ–?ú3{Àî:ç_P!¼–I ·'æ|s³›ËÏì3õÁ÷ÏÏÙ¶¤»«Ÿ›[Íعã¹gRœÕpSA|bœ ÑøÐ9 èÅt”ó€Á¤ÕÜHé“+‰ÊF RLÚ^ñ¢Ïo}áTÝûâ’LÐE òßuÔÉn¨´/Rà@<âËà° eËâ£b’—¾t¡Rïè oyi19X¢NÀ=®óš\ÐÃä¨d¸W[C´´’çR®‡zì:zOû’}¢zë{-“½x  -ÿJ¹Œ#,§ýÂo0QyÃjY‚·-BP°ÕÍ¿ÃTCW†°É2N™¿å€ÆÍû#‘Õ 2¥\Ë1™;Œïíñ†EÇñsÞúKÎ à"Ðû‚…ñP: 5[RÛÇÖ‡ê¿l¥ÓZòÜØ gŒ@üà`I¤ÐÔv²³Í ÃöðÉIóÖTbN¾f+/  ÿc'ª7¹Õ‰¹}’¼Z†ÆG0ûÁa–潋=A=´ÚfYÖÅjDóô¦¬ è…9},ñÿ† OΤP[RüXÕ›JŸ̦®üÃn˜‹Cv]†)å•ê½gÈ|ï!VÝ­îñSéö ²ÏúJrà#Ðã#dFƒÈÃÓÁ áÅûÍx‚mëºÖöXöìy¿óß9[ÒÆ®!ñw‘ûíGÝêÉ&ôqF©ôV7VMìdÄj÷Á6ïêg áõu Á¼q~škzãä‰9¥­ºF¨â dóÙf ±‘Ë]Ò>O°]”"Ú‚c~Søþû½ªu•p».¬g>Kà&¾GQlºƒ‰â-€dÁÉs$W¨+ûäZ -59¨­.XfÝŒãö+;Þü<`žX“tê½´»Q½@“®ê©zÎR®¥5æ=®^€ºouRÑ×ÜN¶,5KdÞH«ä®!þÍè康ŽÒ]þØ1ï|d,\Ô} :½‚[<Ó´T ËevR‹)ÄÞKNÿŠ¯'=i9ÛÖÕÕS U4 µ¦‰ÐVÔ·Ú%¼%(¹óÁ˜×!t,ݤm™³ñA!Sk?Ã+2Â9-€$ȇ—ŠAÞÚ_Gì¬ï»ðu&z+â<ú`Ëuæ( àñrqa&%©[ÁZÜ='\¯%¥µ98•ûŠïøÔȼïçò¥§üÍhêPjbK¼q†w¡^C •vu5½Þ7¶‰WOŸ›c› -wA ß)‰¶t¶³ÁiÊmg„QŠ1£Ðâ²ûP•þô¯Î¨KœB-ˆìÚûÊÏ/B‘šÜJù*…ˆ åã½’AHEs¿"iÒCºÇ¢N­%J“÷®†ê«)@^Ñ4Ƹw^mp´•óïV*]¼¨O¹WXwcÞZJò¬XÎ!C 0ÿùL2ZÕIž}§†P"QA k˜È¾_°Åxul`Íu{„ePªâ'¬)­ìË•Ÿ+ÓÕ+¤2$|‰u\AiaÑKNðqÚz>JWT<ÒiUØ9âo¨òªZÒ&L¡ 4zúņ<1hR›ÜKÛþfÝ œ­Ò÷ àæ:l¥–ºÇ1˜é<ë½j:I<ó¶²ðA-Ž´èª9¥¹ÃRñLœµí)¦¶[6" ¤}>J$_Ö;XÂCqOšÚšÛ£D¤¶Á³†DŒÔ9¢PjϘÃpsÏÀC”ú0÷€àr'9í®WÍ¡oÔCÑ0³1oQ׶Ã{cVó$íÅSD¾¨BŠÉ¶Sø#N’ciá:ÓíΧÚqõ®/ö8Q™~è»r7؆D%ú4¾W­JP%h§Í§Š Î!‹ÜŠ‚ÐÌhaºFѽœAIË/~Õà‚àÇÌKEÐ+Þ3 *ÝUÉpyW¬2õñææä>J`£ïWciJÆgDíóL2oðºr¯ˆ¯œ¨z‹¶‡!‚`ÅJ{]äwòT\e ¾,Á‰Ó9Z9€H³òìð¢”yØmAŽ‹†z¼fŽ‚²zp]Öè°oYê:Y¾OxeùÁ¡o'Ò0ϺE #½cõ-µÛ)5Õl3Om -ÝÐlGô7Ymà!ì*{ÿxcÿ{ˆŽÆœ˜#ö^*̺šÁª…¿ºâ¸z%øJèÞ*œË"»÷WzW$ÃþJMéé -X<ëi""Ñ…ò-®Ðø‚t ^&B:¼fÒG¬0ÐìµËeys?Ñ7ž»ÚÒ'žQOO±‚!øìMq«œ 4T¥oýörWTöå{òÚd˜éì‹rÏ -#£%—³,b!Â4uWN2fÂ¥hz\%èZ}7¿.fv{”8s§ð¤8tß•±ñ|ž[­æ8.Ƈ!]7ƒ)ûËQ"8P êÝÚè¿lèeË–B¡Õܪ¡Ç=jeÍç­H¹×™1¢© .]µ€8Pï¡cQÆÍ/”`/ â»zÿrÅ u´´M o!X’%X¥¦à¥æª˜~][—’‚*R<¯@€Žg}+è6 -+-˜ƒz€1!ñ·|󹥂,cÎÕÝ„Žü¤‡bά6–öj€ -FÙÎÍyÒÖÌÉì츫½ Ü>#¬ñÆ]q®I—wV5Šè#l|ðU±/.¨(vf÷«I -<àã”xìTɘ¹“ŒµôheR<-á¥èå·ÈPdZü¡7–ï‹ MDA=j©¤Í>’»3„^`$òpóH¤¬ 1°î#î ÒÿDµþÒò™´Üo¼í§*ó±ÙÀþ.´kî©ŒpŸl™'Õê2”[¤®G8@ãÕ·|èç…‰Ôã¶&Úâ†5ç·•DÂV”RÑó‡ -áã»Z­ú'hXªu«mXn±!ñUE^U˜hQÕÖ>ç7½ŠQe ÄC¨õy‰¢± èL²cHHF16v#¶ßÑÔîDn kÏ–ÆÆQÎoGÜŸ›búY¿e3E§Ÿý%ª­G¥YÄL­®còáD ˜>Õ3âa»§ºN„¯ÏGç¾TÓ6'*õ]¿& -Ÿâ[Þg0s£P[ͦ볩&žeèÈ£µËžh0¢ŸõÊgD_4.s[$#žä‡nEùPÞ²Q¨®×9ÀeNÛõ“)Ñc=…èÒ@ñ¥tMÓ1ú¸U,•¹•äÓj½ˆ‰Ùâ½ü…Ì";½õÕïVóH&ì’wb±wôÆ{Ía†?šÕ,ÐóØx¢J \ÚÝs–ÛV[ØWÿwù¬Óñ“#£Îž±Qq§¼õð~{äõöºcU˜jR u¥°Ÿ Ž`YìRš^>+GD6À·@Ý *ã³–ï^z{­¤XSžÍEÇÕ|Í×Çï‹÷ߘûsGQQ¤é©›RU4Ζ%Ÿ¡'w•Zî\NQCa”jÕ?•”SEûg$ì¯ÈÿIÊÉÍC"j@Ø¢2ÝL6b°e,>²ó¥{:cü¡ì< -áœßµÓ,`bVÅÈøžg©-Ÿ¡(k³n‘,¸CxrhCÎJš ÿè³ìY0äÞ‚·0%“4E+SeqÏŠ„ó»¨üèÌÔâw”¨ºB^ ±ÑT@š$ÏüdÇ £(pª©?5$fiBô¨!Žâ±1=Ÿ{ AÀ »ÄSëNå"bÿÿ»@Ée}‘ÃÜ–oÁUÈ¿ø£\) -;DHè÷zÅ=ÕòVªÁ­÷ÏÉ3}µ"‡â&p»·w=•¯‘p¼(<Äߊ¨ñŒ—5ò Ùâ@~pPiW gS!¡ì˜p®üÕNü£VHëøÈ %jú5ÅÒ4Ù1ªzö³ùa%èQ'<"œ‚ÚãÍæV/ø¦œ3íkéÛ24=©×l©—ûÉ[¼ ¡:£ÖÉòd:($=jO™mÑãÙxÖ¡¢”ÜyÌØõP¦q®Ax;Aò–¿/õ*EŸbö½Jsíÿ ÙHQÓÚùw…ng‘á%E馄,Nš ®[õÂÃé Ùpl¡ˆì"™Ã}þjçºÿ„TŸûR­5¿SŠú9¿3,Œjs_E¨‰ë]&Ú‘¤U“†µrœw&±3¤rL$îjðKòn[ÙÞøœ’×жӬ`5´º¢îØN¨ÁØó9Ò‚yâó}ïDz)/-’|VJ«ç!á…r·/Kû%Á¿ß…{º˜’sDÔÎH))ã9é 2=%6)öy¿4Z¨ä_*¡ª†——5W‡æT«øšæÉœê,Hç¹Ói½¥»{60ÒpG4#“ó:`3+ßK‘UŠC¦¤rr¾=jg ]«´¦ ᾜ@¬Ö\;M'8Í{©§0ÝÄ$'Fõ‰»­*Ö”tSi ·;4oµÿ•H&-™}Y¯;®XD]#vs•ýGÌɳ-—÷rCÝáÊu¢y<°Reó½õBž=FR›OJ×°´ºjöY.Ñô¾ŠBxY°Î¦Ìx,ô'üøÓmjA@Õ«çË`úJŸ*õ1K[á`"¿ Ã' ÐùÚãìXsÿíVjK7W{‹–Ï•¹jy‡Âµ/qwm±ù{lã~_R/BõF–sl¸„hÖzzX\J Ó;ÙXl–¸ öOL`˜”üRÃwÙ2•ÏEç& ­–Îm¹¨€žï¤—®h“3!kº¯ˆ¬Ÿ»k'^1y7Hû©´­Í1¡±Sk¾3|’pM‰N¼Þ=9orè}º‰Tõˆoj²táüˆ"Å2=#­wUg;_ék‘;Ó}ìu§+Ä«dØÑÙÞSæS›UÏVúW<«EcGØÂÐeN·˜ãžTPŸ{M(0{ƒ¤sé6£ªœQÞðúækÒ˜eòya²êJ@Ƨ´’v -%wLò¹~¥@Ì°qÉ<ÝgÔõÜé×õ²J$ŽÝÖVElCT¢àO°´¼PkçsðµhçjÛš5?F‘?‰åü‰~{>‰êCPŠxs¯½åè98ÂûÈKK5w ¯ª®ÃªFoþÒûóÝï!MŒûçw³Ï_P5æª!xhGÔ°¶Ú -Sß©5¸×ÑkKô§¿¶Âƒ»Ë6>wÂ9ÓCFÍ‘Åေã3b´– u®üÍwïшyúœÙœn-u2ºžæò¨½òÈd[G<Ⱦ…1‡p÷:HxÅ–t[Ç„Cp´£¾C$+O~^Þ¦4çF -° ;Ùsã$Ú‹]óé=91ñ/QEîíI,n»k#‚#…7pî‘œÍGôžôI!ÉÆÞ -ß$ñ½†Wt/pæÌ’|1ÉÓ% ¹Û“o´™qný<ÎÔt«6] :k`º³Eg}°ævmÕ=Ü¢gÈ!¸ˆZI‘zdƒµ 5mB²¹ÿ¨Õ”ÂWvÙý¥nœE‹='m«Çò3lE`9£pnQ€iz½Lì6nLRá'ꑆÌKnÔÜ·rÑœ1<=;ìQ+'ë†CÚ| î´ì˜C1å‹*‹üW~O—ÐÇP4+'Ž¢‡UÐKeˆœp¤4å@ÖáÀõ0±wöû»4WIø®ÊY<6Ë”‹M -*,3šºŸU€#-ˆ9®ÿ\eY?êˆ-›ã¢ã6à;%u¹Ê|Z[Ós½Êç‘QV{¾‹×ÒP#mßu®G:,›Û6êX QÞõ¸VT‰½»ŽRÁ##”e¨fíåÚÑ‚JEò{f¥ŸÁ¯µ;ÐԴ“·)Ã!¾Z„ír -À¡J#  ­`¯WLRXÉ·jÚœ†#õ”^"„VÂ]Óƿ׿~BZcïöHtŸ HÀ÷IʦÕóÆä{”ÎÌ*öÙ=;>„6˜!Æ.±Ä-/Tî·è×lóˆô–eóÕD+KëÏwûsÌ'ÛR¨Çt´åHƒ×"Àò¼Ø–J ,ˆÁ÷?b`–þC$ Ï%nĪ8Õ†{”äí€î¹—õ¨‹ S¡Î¤´ñlriÿ -R¢Àxe,7µ“¶u2•–;tÁÆE}¤<´”"ïTþ”‘BÄ\IØã»zËž¯'NºÐùKGÇ‚^ eëº}êû чɑöhçºÉi›'ô9•¶¤^WzÙOÖ+6&­‹*˜=Ø -ëz§ºi6ëE‰e” ‡zø“òäQ_ºiÎî#Æ=Ø@ÿß‘G²áÓq@îw -­(¤Ý&DŸwkÙêͲ»Š’K!8 -‹qí 80ïáó(õ‘-·(àY¶jl¾œjà™ëõA½‘$¡X˜“­õL”«EcãÄÜr”÷Õò"¡‚bdy[R³„ÀÃy.HôFNzß/žAì·…¢¹~r°.¨#JþÑÆ u‚+E”„¹­KÙ(ÌG‰÷1Îê?¡šÉÄ0D‹äAw]¶Á˜)­þ':×Np»ñ¦iæLOÈ-ÈÈ⸃uÔ¾8¤ÿý¢ª•OÙU$–’—b£±kX´ñ#¼KUÛqØÇê>þò­¿Ñ0³­ž$FØ^+gÖ]³ù€[ ”¨ÓmsÑ̈ïÜ^pÏÛcs=ªm?zŽeq£ã¯+¨î7±-qZ¥zž¹î'È>'¦bªŽbì½(K‡ÆwG°žK=y‹˜9ÿš‚:ÎM\,Œ¥U¾âÆž¤(þ•ºBq¤¡eʹ“èQЙڄ.,Ç« ÜywÐkê=¿’- &קåüt¼¶n®sëòJ Î*‚…1|2 ÇG,º–rpÅTiÁ?*¡M¢…jÍÂK«äåz¤+§;ʳ ‘oÖ‘ =*fš1IóÖ EÁÛÞ¨‚ðÌè„hQȈփm,êVÝ ´7¢Œ-[û|7¬ÖŸ¥ÒœŠ&IbUNM’ [ÜšÚÈ^%ã;” -z•ƒcßóйL}÷£LXÈë÷Eû½¾åN)IYe¡%ˆ ¨Ï›ûiìªiyÞ1úm–/EdD¸JöŒgXÀvÈ/3ÇBäç±H“½lè%{é8ÊAlhT×þøŸŸÿò'18±)e¯™ØæÓV?[`X@ÉSlÚãßFÛ‹ ó‚·?@èJe»Ø;šVG[^eL8™sÆž¯ •NÎ|k£@òt•ö±[ýo8>¨ä[€Â|ÄXÊÑä áæØ"ëµTìIL¤ëÍÍÉ‘ê‹Š^ÕUfå™»÷¤m/ ·@J ®ì””tU×R•» -[­.¥[ÀD,mgEÀ`×Gî¶?? -&?@•]±jÁäϹêGvÛþŠh5-HC·Ú¡0ˆWWðiˆ  L+èS6PnÑÙ-Ð-ÕVq1í¢NÄOV‚}ÒûEêÛÌê•ø$ZU ¹ü”sDd¦Æ""@kû0ª8 tâ0mñÔã±FÒ)tL‡E(ÎÕ)°I(Ï„XB×2p”sJõè33oÌÀÏYÌ扵)eëî鮌õ&ûB“àØŽŒaÜ–ä9QsŠäÿ4 ÓöGqã?ÉßË┡)aõ:r{L@dgB#Q2‚|ââŵBvgOÂÖ£j=ôPHà›• -ÒÑ÷¥Ÿ=~i¼ e~?…õ±wq¡™×¾’eœ±Xš?ïØ®ÒWá7º4W¼Ê®;IFI;ã›zHD ÚÂçkQN"¦i A%åáŦ½¢&óŒÝ­ÍöÎ|D„WeçGä|ö›RJþN©1Ü/M½v®G‡: YêÄÑ^áCOk´Œ¿ƒí´ñÏuöd€i©÷”³â-U`35÷é MìkMÝðj=Òï• RiDñn¡CʶhøAS5n׺ÿžê¶ópÓ£I%øUi¤31× nj¯%àÇRÇŽA\RÜd[˜+©i:T2`J¿ƒ½ËþŠ“‚öSŠÍ47RßÿX;§Òûävƒ³àŽìc~¹½æį§(ß ²xŸ@f]÷X•ˆ- †rAíhºhÐ7OèWMc -ÏÑ*R»ªÑÖãu󤜥ý©ÅP1tÏh]Ù]!²Y{Ðë{¤ÏÝ*BóVW?+æúk-xŠè) -Ãg¡´òw° ‡n×´N{žjœq› #fåÀ ‡GÀÁÛQu§ù0êíÄYKYXŠé[Ê*%9çV`‹6¢PU ÀúPvy‘,˜L:kÆšéóËÒ`,Û®]2_î –u‡b®Ç’­œ3ƒ€•ÈÌÿ »—ÀE^x¢ñн˜Ù"kP$ðˆ¶« ?û0l¶‚“¼ $v½ß*`çW=Ïó=Ïß®¿3HŸö¤&]äVƒœ$›Íôo:þþÜ0Ôm^g-Q΃ˆH^9ÕÒ ÿ¤ÄRË ôÂE 1sÊ™žÆˆVD(9a"Ùá\-’w‚™ -¥á¡Þ JÞfeÉü Ÿ{”-Ÿ®³K4§t¶t|¬€¯Çþï‘­êŒÆ5Ãc_l‡±†ŠF·tUœ¨]õþ£2̓PÍ…M3gP*Êrè€n*Œš3G¬òHcÄ&\ý-Ø}X‘3žê[ÑÆ¡âTïã…ywöQŸRê wŠíVhï;•Îؤ‰˜#ÂÁÕËÂ…¿:ªWZ¦NÝôVðmÑ{®Aø¡ÎÃ{ÉtøüÜwç“j‰âaK·è ÐŠ…ª«BÚž¹3×ê@²xÖ#!Ãälý^ˆ¡y´QJl<±ºÓ†„—‡Ø€^94‹Î0aßãÌoÜÉU”5Óç;݉=D>µ .Nó™ÏÀ=c“;gÕ[QÃG€ÉPì{5Ëùöøßl„‡…ß4wÔ½ä°ôHI ±Q÷+ø÷^V@ÚAÜ\æ®2ãCvÈö9–?°)VóÖÂÒ"ápÀ§_Ìx Lf££–LJ¥²ÜÊÐ’Á&÷¤hÍËÑþÓ…úܪ“Š¥e¤{Ù îˆà¨õ:Ûü_‡ÖŽ!=¨ê¹ ¥°¼ŽŸœÂIËOÑDW"Þ+Ú— Ì-WÁIl=Y nóÛ§óž1ÒË»þŠp4M™ò0gNó’èywal5ìç}MóQ’¥®é‚¥«Dw&kdz ‹#ÝoO8ÒYŽVÏ+¬#\ Eàëãµ:™É*• Â+Ý•#örÀ5s;¬¿ÌKuÛ[«¿ÄÙÜ•yk„V}ƒˆÞߨ×_ÿ’ÀŽ’ÚÓþìér¼K®ˆd¸Ÿ‰ët‰ô¦Ç‘ÂAAwª(d!¨š’ECn¸zÄ!"i" ú.Jˆ˜ØœÚ¨™NTÁp Ïp ÐÓVcB°¢†Q½‡Ñ¡5n®§ég>Û`­¯íU•Ê½‰2©~?`5¤å1Á>• ÊÁþ^^%,†K¯@x-ª‘þèiBñ!8ïÒ5Ðjy‘iEéá=O àŠ¸ÄÈe˜âô(W–i|g1GbŸþûïô2`7ëN´ä}ÄêfCÖ0ºSí‹NNÄêtT"T}“ùÿîÀþÝ`å µ5]?aYPi½àçTD#DO±ƒ&$üÏî'Úoí¯@I&mºojÈÝÑŒQÅ= -ôœ„†¼éK"ùx ã{ã÷AáŒÎþòÒÀ—RäÌ•3Z~Ìg‚«Hka ?äÏý8Ãiœp½=*TìVfzj´ø×¹/¼'UWÜ¡Ø<@yz/úÔsÛ×bW‰¿ÒfL_XãÙ£| š $ö­œü^Ùì--%0==/d_Y’‡æ1Ö^hC >¶ð½ŠA`º… -#˜EYNÈRGÜl‹N Ùü*ÞUªwŽùḴú E©ù.’·ü"§V׉N‹=t;#b#©„É`|Ž¥²Du„‡±$znLÓ‚,Ÿë:0 aGýA§$j¢Œ·˜ î%=ÍÚÑË°à|"tÓrC?è®jÝ,èò•ŠOÁ"ëÜ’~|›ðê¨i$4ª§=bŒ;7ˆ‘¨ÑS%³pì]cuˆ‰Êèïª)/•´nBãO~.|·é†ï“vtÏcîàlãu¨†mìi3äMê½ÄR=‚A˜G4Ž$I</³Wø{„*÷d>wÚ<ó'mŸö%ûÐÁ÷¹ú}†¿{ÀÍõM`æ 4ûÛì¿3žãÖ:·€'™·Äpçx= -J¸…s­|ÅKíÍà „n½*+Àcð*ñŒÕô*$ -òÓo>Þ\=4Ýi¿<’0ç³ü®øN3B•m;?ÕvŽps:hÿQ×a´6ä8nžrB’““¡_[±¤Ù‹^µÚÂÒ) æ]CÎÊÅGû\G\|Ê0ÁVLz³0¢p3OÎ~”ör•È¼›ˆ®qÆlqVႦþQtÏ€>U^ȹ´Rœ–pÅ¡ýÒ{L€€e KEEû8ÖuŽýÑ­4¶˜éþœÜKé©Ÿ»Û~kRØ>‡(^<¼Îm.éæD8€å4©S\‡:0÷Q5è`f;ÆÆÎåb!ê9mþ0+0zâ«»™­RPã.Gdºþñƒ=Ú]ÿ•ï¹G ìð½â§þ‡xSbVóÚÓ¨%züúɪ6ݺ·ãŽ€pfÏS°¸P`„³/.¤ó:ÅþÝKÿGlýàŽB˜îßßÿr¼ ¿4üþ.U±ˆ²(ó•P ˆÎü"èýÈ@Zcg~”Ý%±¦{µHæmŸmLUù‚£Ü–6&M3éªûuWuð«à” צHÔˆÜLs–«R¾mÚ ÕÅP¨¦*‘¯-ÐØÊR/±y>¬~\Ù“RR@`~(‰)=å¹çæ¤.Z–eY…ª‡›¢ -Õ¸X Ê`"Ór:uG T; JKæŽØ“¹vµ’Tqh;‡8Ï-à Â{ ÁsßÉöHc½µý@#XÿÄ[*qˆ*5±°Í´g&úœçQÞ¸­GCJSêЦx« \zUû «NÖ $ÈôÎõDh±oËÖl‹³ª²ÛŠ‘ôXiÔ5ýý”ùnwõ§²(œíIc䮂ŖU~¤Üewÿ°Wâž©šAÑR³o©Ÿì¡Wrºa¬ª«Ç¥Pô2ÍÚ5%Pµ4ƒ¸¼­„'ç†bº…XL‰nž‡ÊwÞÿ™ªªý¾—¾é—.ÌxS‹%8…«yôVác î[Y訶·NSøÊÖ?‡ eyÛ$‚e1 Nõ{à^Ït8qF\ ª5Ƴ™ð ‚C‰y=öuÎñGZc»¢dèb¯¯?zêÿy&Ùwwò»^ð“¼R°èö”z åêë ¨»iW¥[#Q妒‡äöõõãT0ô> F•€øž6>jM´ücLLˆñöª²o´RZB»& Px¾ZŠlw|ìJïë—Ù·6õ°UXO´ÌÔÌœ;vˆËN…ô±}ª9¢´!'†°ŠòêwoåoŇ¯ þÂ5"&v«%Qù¢2Ë!lž3!ÁÏ€z—С(Ú $Xä@§z!«ã'ôëºJú À_rå÷8ðúæºeGÜÆz¥œÔ!çrc³:³R6=¢i¼¥¥qk‰pY%sÈœs‚Êb)^Õï}z¸ŒDo` º¸ô3´MG!á½¹3÷ntŸŸ+ÉKŠ§¬}†xg«ŒRn¥¥ÜÀ£×ÄŽè_óV0V$”¨ô1(&O„Ž6³ž¤Â)ñº`G\g7I7&Ä_)ôA -”œ«”>–`ˆC¸ÕsîRÏÔ6Ú¢~—ˆz^À•|ñ¤WX#3?£³£ß é;§£i·~L ­ó¥\êÃÒÝqÖ÷ä—D&tŒ' ÷ -Ć­Äß:ŠćBôVÇÕßµm7‘OQÇ ÊµIV•A½îFT£-_LaE5ŠÏNü‘×1"ߘèëÐ:èd=Y>›Ü¦uo¶nizÉuå)4šÐÂ0ÖXÏr@ûOÃÏùA”" Na&ü‹¨È>tºWcŒî\’ÙÊ È:w:RÙ3$ƒ2Óq.ì;öìxa¯!hîQ#¹èñŠ=ý`ÝË×¾£¿©¾p†¤ê/µ<û¬!¾fΫM×Âs’ «òŸ=ÅÜ¡å”ïÜBѽç€|¥$Aâº[¤ùÉ0 -h‘#^:-?ù¡x­dqBp¤çÕÈòq‚ºߎd\ë5„5Ò™öO­“N¾‹Ô?8ri9‡d@§µ£ìÔ1øÞJ ®ô"Xߣ:V=ý‡ýŒ…`4¿ãrø£DºÙ{XÚï}Á0;iÈ1P×Áí<ÝGPsb¿°¤=éŸ)'E—eÉ”=‘Ï¢íýu'–ði%0&c9Z$0Ð2dm{§^G\~V3¢„b\S RäDìï€úîö´×Cqg‰UÛš¤ŽÞN•2ï†>gL´R ¨¢kM# 7¦úêUGúŽAæöTñýM˜_ðîe^Š+ÌÓiŠÞsgb\ZÐû›PàùÎæ„9Ìwpm­“ ¸Y>ý­îäíú‘ê´ˆ²ýQ4€0ÛavøõìÍ0LÍO5‰>ÂSô ·Ž¬èØ X}1ÿ)YtBNs·dε™¸¡{vöéí8¤:­©ÛbñÀ!^ا ô+Äo¹^×Ö\ÙLöÈú-3žUqÚÈÄì.÷쑵Ç\gt\o¦ÚÊÎóÆšoT]v‹#š3G-µBšMíõLö6Æʫì#èV3œm-šÒÅóIµ§­;Ñ’ÍÆx‰*(š;×F ÃQÚY–(H¹›}­R‘ c>6„-CÌÇ[ÔYpr÷ÑzÕ­ ¿FŒÑk÷ÍÙØ©r4}€ÞŠÞJY™z -J³a5ü>Ç_5"ä„H(D$\íQ·Žk£NTqzÈ B²"Ë2úvòKaåtÿ¤Q·üå»fÓ—3¸Rò䩪# «¨û0¯”jsäíSM“žuÀ/ -x^Ö˜ ’?±ôa× ¬ï)”©KûÕ™“ïPÛ(z _Çcìº"eXR¯þ~Ô€8aX#.„ŠnO@5 ‰Îí—Š³¥0Wë%„ÃFôû’z5 ì."¨Äþ’©^|mÜ+³ôµ‚©Û×_F€‡ó†*çã?,ý?ßÇ$UéÍ.­ùJC Í)ªF3"‘14‰LQ':òF)ö©ƒd‡‘co|ᶕ ï•7 \Ä®“¸o¯¬N]ÍöQ¥ d/#íó8^mMÿž¦7µ­CHñ(ÜåÜDŒ$¤Ò;®hïæªg\#˜aD–%—q4'¿™»Ù “˜x{é£ÔX‚Ì°½£‹+HŸ €÷zèž$@Æ:–MQ#›ÓÀ-LŒ¦|œ1‘¼'‡‰#zçF„7 [dÍXïpj€OÝ ë aA% íµÇ×ö¨mp°Ž¸ÃÇxÚ¶t®HÅms_ -VÛ =2–ÊÃç+*‡ÌÖ -æ/O8w1ytr7eJF>ôŒÙY;J œŽšÌÌiÊÂx^‘È´Ã+ ^Õ[¥›ýÉ)’_½:lðN„à±×1¢â×¥2¯1ZsÀsü’;8Ivé.C0œj[2f?Æ<:@Ðï5„5Ö¼Ðeé†DOmé8–"™’ôE8–.O7®'ص3 m¥G¶Å%êÙã;ðÆä{áa:ŒìõÈ‚Nõµy¥MÁ"µÊr¶¼æ&Qx˜oè—oT_†¬%ñËWý;ƒè]ð.Ht^ƒh¤<šg^ä OÂ"™5Ö¾†H²tvÂûÍÂÂÞ˜IòV›¹EñdIȹ — Qàˆ| «‚è¿ ƒHŠŽ™¹à -Óô*Ùí %_ŸÙåÈüÉ›ƒï¥OÍ.{$c«Ð -`@ùVý%½ pw³@Í,¿°×ïk,åAtäƒu{ÜÉ¿ÆNvk},YéHgcï=åÔ'n’‘[©Î{&¬úyÙ6”W3™=4åŸ~,ø4R#¢±õµ(âˆÚx?R[œͱg¨ç!‹Òº:•½IVœyDæzNÕ]¹\KuKÀ0ÿ>{Ê+=üŽ,2mËüÊ©¦ÃÆl®|ÒU4b~2ð¤&*¬#pä‹ìÏù­8ÿuŒ-õÔ¹ú@Áƒo “)DÔîü»ÄîxEj ’Ñ•¡êEè9ª iݬ.~²Oaƒ.5êm„i X’áT¥ð0Äý%(Ë^6†*ZKYUEð\/qçêL>t$ndó`È¥Ó…tzEs³ÙŽân,Éíg$"[3¼é=ˆàqoèN_/âô &qHË‘ÎÒVF²™{š&[/\ÇåÅEgèZm%ù‹µÆ`Iª\¼#l 8¾`ÎÈÆzF¤ñóãõ¿ƒlÙ.øtE/dÉc?ô G”cŽè’]% °߯~>=bø<0ÎbÖ! œ0,GihÇ -<Å·-À¨÷¤lœäŠO½ì«šÍä?¬wŸ-f^Á<}¶Dn2Œhwb'-¹n“µ£qþ(×íQJ‹^9,£’ïêj`îÕ@”MÎ HWá)”Bˆ‚ƒÒó®ùÌc´ñ ¥ðÈTO8·þùT>{¨a™B Ú£œæjD ¤•F´º1 &óá ¼ñ­VÈ9R\AdGlÊœbùÕcQTÄvo=ÈV žž9uGË -® -ÖsêkSý»îeˆ¿ÂÛþ^àw aß¹ÌpwI}œZ é–ó†¹/–ºt$õ#‡·Åëü({Ô7Ë,¨‚'ð»’IÑ>ÂE+%6–ÖÞþFãLc2rÕKPÄd9´Ó‚Ùè;Q0b·¯!¬ÀôG¹ª4ÿ½³fnoû’Ρ*ÅD^ãÚ…çÏÕ‹cÙUx*2ïÊÌDGnÏñª[ÁØ(_¶"!´µö+Ô"B^[Ú.ý=(¢9%þ=ÿ‰øøG63›*{¿³XŒ§º)æ´áží›íÎó‡Åµy÷ü¤“ú+;n¡ŒReHûeQùOXvó›Án"–ìûr\RÇ`ÚnQùˆ9l–&ð(D9žÆÌäƒübô†s:r.Ä+cJvU…¶Áõ}¢,:jz ô˜«Pe ya¢ßMŠ%î^âOw)à*EÉlÔ R{½#·–"U„F¤£iB‹ÀFó!¾2bÛd~:9-À<……3&ÃO¾ŸRìO¥ AT¦ÒmûXÊ6ŠùÑ…¯! •}ÎÒÿ:dá)Éš è«"-¿¤&rEò»Þ_s^¿8Ž5W%£’ªkìíÓTPš\Ž–?º -Ò\ëâiÉùY~{˜zÝ—æ#ó]GÕ(ΩÄ®\¤~µ÷虵Õ^QÄR_òÁ*ÜRâˆJ–aÁÁ3CMe1ÂHÿ¥)vEqægFØ÷¿ü¡9|þ³7œŒ¬•+ù÷Þpˆ(ëOÁÔSƒ Ž´µ[S˜=ž ÒèÎîÕMq4Å›9)µ[êŽFh)È„ ÉyëZQ?<ìÌ‘Ä–«™±=a@y—£”Ã>p«µSæÚ£äÌzó(tÙ,ÌYöœ: Z|,'uæˆÈ5ÈÜÆfÌ*ËvšiqÚÎÞ¦û©l€”3ÊH Ë,õ¹ŒnúÚÏ•_PN@?«ô}æm¡îÙ°å):xdF5¡ƒê#˜;W@K=úzg"'ËŽkpÔ êŸeøÿ™»“$Ë™+;À+àÞ˜†ÆÆœÖXàT¹í^þë2#âg<Ä2™ŠŒò|è¼¹ÍiTù -pRËOÃ<6o?>Žík¢þ‡91S<~楮ELx¦è¸u6`º¿ãC]üoæ¶Ï­< -ã±:þDã…½þïqC« -×ç0A“o¢\–¸ö*:Gé²ÙÊm£„…6ï ȱdR±jÞAŒ*hâ]P2ítîX•œÊïMÉr¸tJ*Ó«—ŸäWî­ -2m›òê—Pw{ˆ=´ËrÛX²AqŽÕÊ㡼ü7´ÑgÃ3!õz¦ÊÁ7êdäÖRÌÚuéìš…XøïÜô##¾ÿ¾Çfå˜üℬ:Ñé-ÌòÈ°A5Sr5hIûJ–h;ÝÔ%jnBe®ñ‚¡ëÌ´u>¼Y~ ®íÏÃ72,¬èÛìíÒ…EÇ€®že7‚âà«bs½ÝÈE`×Õñ…;‚LÖ1ÅàwÄå3Éw†•ÜŸc žI¾dWP(>“;‘¼ŒÓ.ׄLiÄ‘KðL4h)ÿ‡ÏÌ«oþòI´aò~8× 9ÑEcõ4~é.ö²Oyt¨¨¨Ö©¦Ê-)ÕŠ.Zšâ¡È>ÑŽ‡\VÑ…J"UÆý#îÄe0ŽÄ°Y\E­u:n4^¶´¶>Š Gt{·‚ÈRdàðF¤tsؤÜjß[JÀu)m&¶u2Ì= Þ-¢†ßÉæ1fi"î{=¦E𩌛¶ä=9‚úKM飘å.©Ô“èŽðm .Yý‰>àpz$³Þ‰bÊ¡·2^@ô4²•%´…Ǫ¨F¸ØTÒÔ¸¾ $¾SúÚÖš>z Ñ°ÿŠá«x©0pÀmʈÚ`™Z¬X5‚DÝ·Vbv “ˆV}Sý3DVpTŽÛÄÕ-Û‘¶û]IÓžö= -€‚ÞU¥R¦îN8w\Ýp)ìÏþ¨½r{‰•òkª½7¢¼äÅ×#-8o7×Éú=ÓꕘFeöŠî+`㞆q(`íõÆÛ{ŒD¬DÈ£ûžÛ38鸃×jæ¼G´¶cÊu¯ño¾«Üº±úFÀû Oö \؃+xvãÁ—?ï -œsL82H×6µWmð"0Ù‰v³3¶ý5Ñnq…ÂuzÚ´ƒXõÂ#òÀaE­I× ^fð§[ŸZ»[ ÿÕÈ„ÍJ3þžPC+‡\(eâ\éL-êF¦™ÐR¡kÚ&¬x’R›wØó™· Sôh÷B6½«+Ž -ØgL üDÍg Ý™ñ@¯,ÝçRH(ÚxÖö'‘¶op¿r£'1p€»´–•*,ÇíøfÓÉ™^àØ<æÆVF›u‹ ™–C>;ïñŒPY¾°ÅÕZªª GÀÂ5„w@¨ûcïAÊ@—cK»plXE„ß¹úèîÄ!Èe¤û»€±Yë‹xÜHp}Ï@~‚·èôGyGŸ{$ÄKm󢽉K¸¤«M ´*P ÈË„BtÅé5¤©Mñµ[³‚o¹ `ÙµˆÊ`æºK––¦?Øßóy}Ž¿±zOn¹å 1à¯Ë=ïo5z|KßøRÊQJS·îÆR¶õ úŠý¸$q‹ÑœJÆ¢]Z¿ÆÞ¯Ô_¾ÔÏ÷/ÎcŸ6’œ qΖ n­–;3_©õœGãX·w:Ùè,Ï Ž¥6I*r^lƒ9n›rܽ…¤tD0îœEBEÝT¡C4ϽCFL9¬(}ý­`ä4ÞFTÞz¹e¦Ê¥CëPÙ¶y4è*tA{…r`Ÿ!UM7».µÑ.¿ —=‚@W‘P,¥êåu@àó*ËnŠPM"ØÚ]½Ük`âX„Ï*¡¦Áª[2݆®¡°=H¹£nydEW'à_wóÓwz츗4ë!dc>Dž‡ú–g@'Ýñ]s‘9[À È¦¯’Î/=âåKénýcÆÏtº`ÆRÙâ ìàsž=8øuÎ'?gBÎmºŒÒãwd×ùŸHôMjUßgìgð4€7ýÝïQp^=´Lç¥ÈíŽƶÐS%ŒèÞX[÷½qË\hðÃåáw DSllÄý$cž|õª“[kéõµÉi)î@Æš‰ó±»e~Oú_¶r Ê~7û¼c>åÀ¼”rënDÞ´/Z¸† bÖÀo£>WæªÁ'µÝÒ7¾ÔÇÇvSR?CR±(¶#¡‹‹À=qã&Q ûfºÆr]£V%!^è«Ðb©:øMä}‹nÎ  Îè¬4> 5×´Ùªâ2ö¼’±²=Y/tÒÌ›I&HLM_Kø‡gz¬Ä"0uF *¹;/aU´CÐÉ4‘Ȝ˖ܿ„¹wÅÅc‹Ýö§MØ·"Ë—¼™F§®Z~W—X¨®Dl±Žm}¼zµ»H¯ŒÝ´]+¸ã¾eV¯홳œ›ïj:Z -¨ØúŽ%¡å4#Å(Œ€ÔêXw‹ü3%æÀrî´ïz“ò„—[¹Ä³K;ë¢nìT>£™õzã‘Í&œ"þ([L –¸&- ï‰ÁÿkÙR¤q³¥ù©Ù^Ô¿ðÿ -iÿJíL£m"[_œW7Œd Çuñx«”l1„7È{žÐó-~†4©Ú^ø¶re93Þ÷™F?í7JM¼†ˆÀeéa‚)h”Š`oPÙ÷§ð>”ZÂù¾@pW¹-]Ô35õ›ÎžÏr ºlŽp7sEœWŠ”+Á½΂Ê©‰§vrä Ö•x;YzŠ£rº4Ôǯ()bó5VJóø“ÖEAZˆ]Ž|ëŒÜG¯ ˜ŸE”ŸI¹R`6Ó7‡-‡¯0„¦Î¢@ìØbfSEÀa$‡¨¾—¤#¢Û+±–d²gö¡ƒo–º¹Eî xe èKvçß­¹›A - -{<Ï’=pÐa“ç—b‡ŸºO&¬×}œÞQ ¾8´]Ðë*FëŠÚvÍ?ôùT›Jö+2c“;M+¡{ endstream endobj 42 0 obj <>stream -¾?M›s›rèBœ-«p„ÿh”A=/öEe£Ž·«gv]¸=5DŸ1e¶ {ÕÖéfÊfºÞëÐ1!zIxÆ#]?Nât”öæîÎîaÝgƒ¹ÑäAÎkŽGâ蘦§¹Œ^†£¶×ÃN.Å5a>° g}Ng)ZŸwYeYÎY|A ÇPÔÖ=Þâ>Ý´þóî÷`$aÞàöV¦=fàxˆ×9•õ¾ô xÒ Ù1£Ý/JdË—BÛwÑîÝâXOµ|/šXKžM2^%ÍZÇ!0 üp+>j3]heÓŸ=rü4 “=! *·;KL°5C`cû}ÏÁátcµ"°Ü öZ*Ÿ{ÁºÄ4[K~‹ºh„~™UË«µÖ€Ï뎇ϭ"^v¸Û^Cè°º—¦ÚWWꩶ£*1µl™o6R¼\"r©x2ò2§Â8?h2¿h5DùpQÜí³úÖRq0ëJo[§˜šJ(Ûç¬Ò¹Ï-o¸¨Ù¨‹~Y:¤Ö_é`kì™cZ>Ûò/CKþw"k€ªHkk¾^{‘ ¿—ºeºðw<÷çd/G§fÌäi0¶n -Šñ}pPŽP[Þ‹Š ™æ¤”†“¾ˆm,Ê-"â:½w¬þêþê\N­’O=ÆÞsU7Fƒñð:.?…ýùRJžôáô²A¸·fö²Ñæ†1úÁ’¦ü·!=Ÿp 2Œ ¸b¹)ÆK©à”)ß­E(ÍAÚeÝ|Ç´Ïq–ƒn»f!•i)÷Úg蚊²÷ÛçëƒÎmY‘”ùry9Yß4ððy æÄ`]Pï5û†j¿ÕI¨°©ß!HŠëeŽ j5€è2̳ ZcÿÈ¥F€&ŒÐa˜õß zà‚ä?ê© ×˜†-“4}¸´à§µ2¦»ãµQzœº öú½XPùÀ7óLi#ųS8ºÓ=\üp£Û0.¾ÎÄØ‹SŠ|T9)euà…™û‘Çâ½A»xΊ4  §Û:3Ú#ÿã¿kûdÝ©»Ž‚l#Xô!\ͬøqÿ›ª ñ´Ô(‡&²É}›G¡ì/ã˜uìï»4ˆHü¨G¯TçO+;©Î'Gäã›ÖøÏ_ùw¼Ù§T¢ [©6Þ¡Nžy¯>úøzãÄI#SxsÜÑU ²Ó`wXçáLƒºÑïelIî„àvÐ:‰6} Þù ¼‘ Ŧ>MÔmRz詸•ãþõr4mOÝó_q¹– Ý3 ÙŨ¾‚ë—"@c !î‰çTÒµ[ - ûd“¨!üpKã|û˜¸”0Y"%ZN¦›ãE™@.ÔJÛhÉCÛ—ß$¤ -TW¡›—¸Á0îòô%´úÅ®s]ÇAxób$Äž…OÞ1ºçÓ,Pbx¡[Œ‹sÇCZ8Nùåì ¹N®Û²MÒÔ¶!Ê[Ÿ¤~[2J -_%ªŽ?O®ÿ)"…¢úYÝ=:0ù>×5õPUÐ%bPw*¸ê(ÿÞ^B[û¢Cr}# þóþ®üžp_ôç{’a4*:¨†1÷>_C0©wG0Ló]Ö·¢NúœF|¯oFSñ`Ö‹9³Ð •s.—OB%ªç ¶\i¼ì-6§àúGMGï p¨6t¨H%¹h(bsÙ¾ "Ïò] Ì››Kàú>µ5¿ö–éŸ:^çØ)Xœ“þqìäHÛè—÷ ƒ - HÈŸ2"çuñ΀(Žã_ —Ù\«ã·7&‡Ý^[ß|Šo;~ßø¢Ø4 ¶ÀŠ+wŠZ­¶×|âÈ\/°JÛcãfÌ™V'÷JÚ²-Рhbä>)÷[6²u¢îCi9ëøïÅÜ—òÄ»üùžS·9úÇì¤ÿöŠHÀX™KBü3qFj7r—VFÜ^ŽŽw¶É£ `ì©:t{eOÕ/—r–ªÌÒ6^'æ¸ÇœM̺UßZÆ[Wû€%“9¦÷ÇŒ ÿãü{ZçíSZ»¥ˆIÝ‚®òH¥mg…ûLÉ®-B8óÃ"ù’’-ÿŽZ*êɧ&Å,¥ý·þN}‹–¾ÝO'çR¶Š ÕÔtfk Y9l¼fŸ` ..Šð•Œk•Ì—ãIƃÂå¼zÌ -þ辜ÊLöd`«½$†¸Ð£Û"õU— -ó%¾qc˜˜Íi®k½q·i ˜¸¿`nˆ¢ë é`1-ÛYá"îÍÏæ‡@‚Œ«dËÖHöL¿èéû8QQQ -o•hJ™ 1WJ»Æ¸#^’T‰ˆO¢ô7ë¦i’`¥”$p´aïïÁª_êÝÛògóáY’ÿùTu;Ô_Ë3èà©wó’îÅQcRƒO[ÂL-.UBn=€*JŸµ)P’<êó€Ç©vFZ·fÀ Ñ–6…UZ Š”%Æ.\åEå(—°¤J7åäAaG‚ÆWh¹ƒ¥©ÓDË5¸÷UÕÙ%¶ös×ùˆŸ*˜É<Å‹Ûˆ¨ÒS£V’Ö1ÛUŸ¹ÎH6nÓVü»3rõFïˆÝ­ L¹9}ZK -¢A*[±ªùZ/ç*K*ŽoÎÝ„B|u5«ÍÌôòœÍN-€’ Ó„AgPÃþyÚPˆt€Ì¾‚bø2£¾þ¥ýÑBiO¼ûŸcÊ…ÈM9ø®»“!a.ä€%œÃFl=g²£¼ÍÊ@d÷¼¤ÕWR|/ÚÖ>|ÅBr``1‡9ùe1§@è¤ß¶RÇì“/Òíè—,UfÞåÑ 9>ò®3Åõû£â|£Ë$MŸ:š€ñj•A‘ônáóLæ ¾Ó“ßÇ•ñ™»uîÒâ84pùI]!gÈðÀwÆhñ³„Jˆ«_Šãwx°¤ÊTö£îvd¼¶ƒ~,LÇ[!|’%J‘t——²Z -˜¤é¦´­U]Ï‚Öï)È„¾`?çF2ª/â q -þð"ÝJÓ…A÷nZ´ëíÒ iá“q8õ=)lË5k”®Ga¥­A[ŒD&Þñ”[l# 'ëY¤%'öq@wœApQÑÕà;8tÄ\„GV„#P‘mÕ…>æ ÓÑ7Ks3äÒ8ÂfñÇ™*hÓ>¯Ô"¬ø.[7|ðUÔ ã³¾óo¢0°‘#º˜:õ%_‹ò E–…Õø)—ëŒå4Z;€æ«”N¦W͘¤˜—Xàé8øÊWã9xí5ä8cËôð(GwnN¬}â@oVÏŒQ5ð-å/çŒaÀË<€ÍfdOâ *¸‘Î4DRvò~©§õDø-ìBIñ«Žš’ÙRãQ¨ÙêÁ·4_IøÉÚ­Uqmé9ä{¿rX=µX*t&ÔÇV/£Ik‚ß¼Ò~G7}]ŒòšŠÍÊ .Ð[ÙS-Ú–ß¹ÍNšjU·|c–×êÛ¢n%Fùç ×8T¢WÞ] -.@ö+O ‹¯i¢ô/HÖþI½í?èP9 õCöÑ„rG¤èká´±ë¶Xn¾Æ!BFä|JYÖVOëN°;õዬðWòFq!чÌ6Þ#%ölÍÑã»vδ=:½w-¾±ˆƒ¨R»+&Då'!Ѷy»+¨Éa‡ÌTórW¼ï¤ì×4¶åËl%^)Qñ³²·¹5¯h"*æxo¼¾ A?í¼7ÁêV<5ò?ô{ƒ9 åwØfk ˆ$:Ðä.r>·OTcµÝÏtäö§sJUÿ(Bô;ƒòAÛm[Õ€hsÇ™ºQ½Y<ýþ€3h?¤O²“ËúKa%iuoB·CuVÇ÷ÄM"*‘²”·WxÙ#zºJÏÝw[éhû5;°ŽrVýFÖlÌÏšèöDDrF@5}Ñ(CaN¶›õЛÖ+ÍÑPGMÓp9âÍöŒÉÙ|ÕN~ï±å®°Ý×Y/%¢ñ÷Vm©Œ-7šªJ:WÌÒlödº@G|´œË£Í$ ‚d¶(æ- bNQª‹"'xYW7,ço -› ÇÁ™%'+$]Q[Ò-Ë4·]pÓV¨Û3ÌVˆO¸C¤ÍJ8ÉÕV×±uôYŽÖïôô9È2ØÌùèꊭkŠ¶UKÊí×òP¹ x9>ý«=(áØå€Û!Z©¹<Ì•¸€^!Œ¾%µU¸®0ÿ±†ƒë"ì Ü²°ÃLþEꎌЫ5r#¡ã1+‚ðÝ>­4^<_-cg­ð‡žäï rR,# Z+m¦Ó*¸£®£—¨tQ÷k²^÷¦[A[5_»š’ÚÜ…ÏDŠš±’ƒ7e*#[²”teLˆbã3›!P=iQ¡ñ'‚Ê•˜à$0(ÅôB!Ö‚,›ƒ4(f~ËcîÆa-¸Pøšÿ Y¡Öù/ú¸©ì€Cè6‘Õ«û¢‚ê«_AiÞøjŠ÷Ù©v©Ø§-Õ” iMñwA§ÈËáOq„tÔ4kf.r“àû­îqbä+* 7Yî½ð¸ãl#*¹D¨äÅp}/‰ÙTÆ[>ƒþKf••c¾&ÉÆ÷p6wWr‘q%gcÌkGãH)Aïöôì¬#diXUç|×Û‘¦÷•£½4'£ø)×™_ŒRXºf­Õ9Ý4·6mÐu-“h V{é¼Ó¿;ƒB¤xP ¥óK¥?õœ›²~ FeõãJ2Ñ-T£»TqBÔF½¶nøV+ „;" 8Œ·è¢ÕS3úãE*—ÉñD‰, _a¥7¬y£jm•4%üA¦ÏÜËÖ@Šð‚é9 1ßq@× XÓãvÂÑé»Bîæ>½.±M6¤%¤8ñÚ @08å -{ ºÐE)L|påŸt¿ÎÇ£ü( †ú×–! ÎŒuOñk ªŸ I\|žÈßâëÈ~°’}Xx;ƒÖ¥’Ÿ¬·ùv ¥ÂèE~cúeY¨/¤<1•.þ&ý‹¼ûçVñýU‚èÓ˜9íxÄ‚øhâGÜϹ¬¾ÅÈõÅD•CÒ’ð,#°x²Àæ)Gªô ÛAç’LfºÛHýŸß¶±¿Ô>ÿe’ˆÔd—C Ú÷å½ÿÎ'æò$´"7]ƒvdïq»ŠŽÔà˜Q[Š×†,ÑoBg-æˆd?jžŒMæK¦Un ˆÃÔh 9c›“I w…÷«{•Ÿš§Ö %Îäi³Ä÷‹5â5§í¸uÞGÿxªê¸ÑºMRžPÅŽÎtÞR½>Öc ûW•Þ¢õ¤‰2£Ê˜‡< -Úç8ˆî5¬‰B@¸ÁYß“ÝY¢W5e¾²þUHãÃŒ¥e ;°ÕŽä— -wõÙ5!èQù•KŠî~¾8>4õ؇0¿ú° þ=ÅÈýΤóB¢ŽAä -zÄTl+°Âˆhf¬s˜ÆÇBO5’³ÈEk„8netnÒq4ö;DÇ¥Ä &"ÍI³³ïõ#°ÊkÉ¢­äÇÇi옃Xj£ ^{§†qîósbÏl…ÏC1Æ‘K%¶V¨“Ð"ŽÜͦѳð›h†HYH©€ÖÝÈ5¬+ (˜ä óäÌ4¯È=Æ^"ƒÆ´©ýOíòB½¹ ³Þ"‹âPó‚«ZË@ JÎH*†ÄØN±I<îr1nŒÅ™ô¨Â£Þ> ¶lÿ¾kgûß>yDEs‡é¬?©ÔF„¦àI lg63kÅd'|•ÉM—ÇçwwÉt·ôq9Ë))íO‡{‹u„ùVÄ$ -[]Ö7ÖÊÅüÊG Èòbc.G¥0÷*i¡CFo“ÏŒ:Ž;YÔ¢2>ƒ¤w,ÉL› ú8¯ bºTqá}› =;±l—íäƉ¿"?M£¤qχ¢.AÁzɱ_[²Jáøˆq_Í ;:Ü&¶9HôŸxkEµhG®—ìøù%%{0¦g)iÂsŒªùmHP±\ ·2þ#±EÚ×3¿ «m`¯×\µ÷J{‹ÙK ¾%k’ýÕVÎq‹{Ÿ—ýleˆT¿ëªV‡1WZã€]s·O¶l7œ/YäÊ$>E¾LtöOÔÑ5ßJ=ˆ—‰8øðçIj)%ðbð¶Ö)‰z‡Db“¬Ž+#šÇ]ƒ`·ˆ'&I2¶éÍ«†ÐÐ Yxž{­m¶h‰c2d'CãÅ/“]¨Äö¾ðÉîZiia ‰¾Ñ »Ü©Ö\{Ú®2ŽEÌÛ³£q—‡oëù ôéÒÀŒÇâòkR …Bx°TôuùÜšó²ÅïhÞ rçJŠëç×÷€¼"Ž¤ì‡fô*MâÃÙqà••©å˜l¨ÆÚ‚©ì -ÕD±ÒG-œú]†W)îþ»üÐWC¾ìX?®¸ß5ˆÍnùC¯såryTlVÊ:p|r¥¯ký{úßë~ÌmD^Ü” úÇ6‚ÖF߯ä¾dµ7fÜ¢PV¿ÃkFrOª=ûˆÂ„c¯î&аa5õy.±%!óy~œK'´¦ŠÔܲÓÎx?nÇ3ˆÝÖÎÜ`[ôªðx;æÓ˜˜ë#Íöà œìâRKvokÕ¦Î+^Æ4‡žmrXÒ‡Š x2ui/zÍ—Ïúó_¢Ð@ çŽÁËYF”×þÄP¿#ãNÖ˜Ùö¢Ôs¾;Ûç¾76ˆûéÿ>Bzq{Ï&.#*ÁºŠØôx þÅAsNñôº=§—.gØùœÞSD¢ñ¶s¤¨õzãKýé-½ñJïе¨Ò/…ÌHMe Š®ºíD0åW¸›Ùõ¶ð¥ÒË|V¥rš%9šóYW? -º%Bˆû º-ïG¡`8 yòÍî@€¡vžø–£Ç*±†AêKÿ8 ^/ÛýÌbëcŸæÛ±?G˜.‡¿e£Øoî#‹:êßç™ß½üs™WrÂÀÛóf¾?á©Œ/öºH"N媨T5Êû‡´×B=*ùKhÔ«¦êø?"†TŸ¼2€-&íãÀ®×—”>âjÔgDmWC0m­^ŸÐp¹‘sts½'ºsÃo­‰ØOìø¤Ó½œ&!+Ц– .øÊ_hÈûiénMÕû çæþb*ûù/“òÝôÂyLDºÙA¬äJ•ãwµ¼$éP»q‡‰Óôâ!Ì>÷êœÀ\&JÌ©•à¨†-EÊ+IŠ×wÕÍÅ–V&¬ËÓHgÀà"× <—›ðÏgÉ{:#Nœ”ÙOÕT‰w]ÉïjѵÜ/ÂØbÕþö²,¬Aã[,yW}U)RÚ³I¾§êw2>Üëø¤!º‚ÚÅÚËí°²•d²o—£#;rÇ %B €‰Ë Foäh[ß}RÛ—šqiadA^ ¹‘–!Ô È6]‘«ÎŽÉ–sÌç~ÌKQ'háQØÊžMŧã,‰H›<°FMù(ã`²“¾«å*J P8&ˆÊ¢÷’:øØï0? _<õ“5ÚN 3a™p¯G8’ ‡’}YÛ ÿ´Æ# FËðãä8´Qf(“fvùô¾ûÈÿ¬Û:O‹ñÂ6ön4´Óûî8¨2–4T³¯°8µ,g·ÑáÅ–¨žÕçÆlÔLϽl7øŠž£ÏJD;5â¤V±hµÕëÉÌ¡[sY ÉÒ󓣯Ïô¢žAÀÄ5Uî¼ä«‹›rFo0:F)@‘Tåµy×ÝÀÁ†Ã•²ð¼Ô¸W¹|Y¥× ]‘=Î^W~g¼·“ÚD•)!nQ¬S}P!A»U2g$Wú7¤1ω3Û$si(̽qˆr$]šB´q“AÅ>Ÿqö¸%ªïùTlK‚K¯V•¯ÀêdÇ -?ÊÞ×öžg‚ùr®àñƒ§í1øµMnygÕ^Wº;#;SÔ‹N¦÷Í®âJ|eWbVFÈo)ˆ¤A±¦¿8bnƒ–«ô [~ç,ƒÊÃ6+´$}@÷¨ºüŸòÃŽÒŽ4óš¿sG!\mrÜñÿŽó…J'§Åµg‚ñË™4YGÚݱAJ>´`è83†±ŸáŽ02°(ÞaöcUTšÚm}&`?&ÁÖç E82ÑòðßLÔ9Äi¤É9;Õ$ñc?âês>œj)vöóÙu0¡¯ÄëÇ (¬n1¬½÷’ãgVwœ`ô-~oœDO¨+õX&@ÜUl»Ý’€Z•¯â]…GqO9§—›Ç7Fr_ü=ÖOˆß™–sðH ÖG‡NdX'{®Ö K8"8œNÿ„Š!´î^âG„nR{›,YÏbxl³¬.Š³t<Øj]¼úGE…uù†”Zßbôð«¤jÏo¥aÿ®,‰ÎÇ!å½~~¤”9ª OÙÈ©0¥„Ò³0öiÉwŘ*8ˆ^R‘^¥²ZW»KY¤@ãt¡†Ä÷Þ$ñ• ˜ÓB‰œ‚Ú…ÚÕ]ìÀ€ö¨¤rj­$Ø/ÛË/Ò‹WKJŸºD]D¬2·»’q¥.Òò^圃Ò_°üÇ,£Åû},µ˜9ˆÂ àPto¼ÿ™Ì"0ô=Å൘ETòh1žô^C?’­Á> k }¸„š«L‘ÿTYC†ø¬ÄöyL˜E?ѺŠ=j‡ÔâÛReKXMœCÍ}WyH¯XsóØ‚ÀÚ -§ÔþšªÆ¸ûŒœOÕª#ißÛC7TèWîCß™õljo#ï×$?Ž­bZ¹†˜ñ@ï[ñK IAxìPˇ™U&þ‰wŸÕ’+~:¡& ¢–Œ×˜„k.jŸäó±Ê·dógÈÅèBQ{øé\D¯8ßL~ún×Þïy%P|𩵱Ž³„mU -ÏÜo´'©ñž–v‘$ÌÐÍZ{‹$ù_APýiÚþζ8Løbò÷òÒ¤™³V÷ØßÖuÕ7^“ªcj,ûö±{ÎýRËRì$óIÒβ¼w)n©ÂâßÑØZ!Ñ~E"XJR·œ­H][±ò•ðn¿ŠaŒ{ÏÞ¶Iÿ™­/_„bÐ=½ó‘ÂZG]†wõPrw6IÍ }¿µêC4BýŒï„lÉkt÷úèõꑺ7¨íªÍVL£/÷ùÅÝûüdHš¯úãÒ­½üŠ|8áÅmNb -/#öÇ3ÛóY…szã]ï5„ؤä ÷_ïGÖ'šýc?’5îi5fùÉR'2¬1ÑÁA´R‡ÑŠ¹2bÁ^:Á•‚®µæb)j½Ó×ZÝj ýyðµ§ÇF'¿¨÷™¶ýIãÆ·û¥¥„ÚG~â½/Ëq’«ý\û£?ËÍVñò¶õæ ×F=CðŒý¦r"Ѩ¸¬¤uúMùE çó_B'‹é7›í•jÝ -Ï"çþ»ˆ%ô¼Ø>Îú/…^ܸ«<ÜYQ£à]Õ™l á»ŠÈ­·üfÍA[i Ì%JÌ°çùÆ}—Ðv¹fz‡ÙJ‰`‹³ú[àÅ¥DþtÎŒôv©C*i­°Áb råµLéƒÐIn­E6÷žE@ -mC×e&0dëþ”›g°EÕœŒ}|ö|õû¼e2ï¨Pw\o|ªç4X"^†àºeúô59€J'-ž¸†Z#?‹µ±(ˆ!çðö}óÕ ûkÓHpcd”öOƒ,)0:%vô3ª‰–òÀ*g ²¹¹¿6Òë!B• BÛq Ppã%lÏ¡<çzMðÎ/z(m„Åé&#¦ß5F¿þŽ`®Ô0ÎAáÐù  ¸„¬IF¾Þx¨Ù\Òki_•Zt[øÏq·™°ù„ñ» -cU\ïa¼òqYgóߣãEñÏš¬—D°’µ…¹Ö£Ü=!HtjÆË?bó ôÙ ûs–á/~8܃!@d9‰×! e@FnØPE;…ŒŸd§†ì;¸­Y·T!ëHÖo iò.|g`A³µ*èJoW¡¢®9ë)þö‰(&IÍs™RVô_ X|¨>(Ý(3¶ ‹Â,Œ8²¹çnJ {(£sˆ„IÏžK‘†ï‘Œ¤!X·%&”|ºR=47$f„—Ž ¢”²RåW¥Ð8À£Ošš@Í(ÒÚh2RHÇÛõ].¸oõj–=|Ä׹Tãsüv›3“ëA >’ÍÐ1UàèÈÌQsíiü«ñ¿1)æID2Cv„ª~—;’mdÓ0M¼Œ­*0Q¨âööšMjæW¦jû^´?Ä`ó;Ÿ`<ó,‚¥Õa^#)þíÛëië5g‡D[G m–Ór„$ÇÑÇ(È£±Î! L&ìkÓˆ‰Ž³wýW1°H‘‰«r§›‚«Ò-.’p2<ÔÆvÌ×l½zÕÒ k#¬ÓbÉ£$@Ç/2}w ÁÏT°$^3/t‰;·M 7¼«{Aãˆ#®,î¶Ù’ —Åêšõx#Ââ;ÿÉ©G!pä…‰¡FR¡¬Oà3æ,÷2«`G^‚è÷(þ'+ y˸⬶ØÂÓÑÛûšÊ/ÄPõŽÈfIµ¯µpäέªu7H*„•UYêx&w7-=cj…xo;)°˜…Æ(•ðšëóØ—:À¦(×=ÓÄ)©¯’æù+f`”qÁMÂàÑÝŒD°Y)Rs[HéèõÆì›[¼jþ8ïЖ -½H—®Ì¶(3¿·j‹@é^­ì~+dSÏØþ>ðI2ù«#pvøÛ€òxK ™ tJíoUdCV©½¸F®4:–ßØ™}š9y/Ü&/ !ÐS†(Á`%ã.7¦á:¹e[5¶æ˜kŸ9¸V™"›SÈ Ç¡Èa'?LS YOùçGšmvÀ—=⸽ºì°aœswNAi˜¸Ê\ñ»™*'Ÿ„× -þõi{Ê{þñÐùAjé¼Q®8¢¤W¶’Û¯cÂw~‹i¸}^wÈÒÖ›yNRmú‘O,çÇIZ‰«ˆ¹ŽRêâ ¸ü:á¦à@{fÛêÐF:+“Hó]hlk–×ÛÚÊ^r ¢ ’£…p{‹–œí(GÁ66-côB-‹°+?s/U“?ão`HÊøWÜvF•ý±p@ïê¹;Ä»jœý±tNN7éýø"[⃞æ×îü:)f¶öâîYŠKâ2DÒ¡Mà}eˆè‚`Y±3ójˆOéÄô@R Rø°óDÇ2Í&þ×rAõ£ü ©¬qÕ—B_¿"[ÁcûWŠT¬»ƒ·ßÊ vÛQ -SæÚ[`¥ÜPK…bIx•!›BÆæÑrG_)ðåw$Öº -Xoߢ‡‹å˜‰“ìdVqÏhÚ‰±NÆ{±SÎþNk†Ú#öze¡Þî–×8M̯t€8¤\¤/mÙï›%ØýÓõ¿ŒÅýqR}š…Ë5…_¢Î ~ÆÁ3:ægJ‚(«Q¼ iAß}'|üŠzX¹[RçØÕ«A²f€ïws +|Ü5EP;Nh/¾ +@Èxecä+#4ó"Y}2B¦èÏÝPR ^6Fõ“néBJwmsKªãI¤'1j -yp%­²Õ£C€WY+¯i…M28°—èÙ@±«-öþR›‚‚tÒÂq½JÖ„º ÿ¹5HyB´ê¨EòE À·œô2DÉ0¶c¥oÝÙ¥Xc‹‹°¥ãÙv±' %?ö·5JåÁ´ä–m)<ϦþÕì"kÄWò"㈉캫­|‡â{* ó‹xåÝø$d$£‰Ÿ!dö4äêÂPëI_”°€`ƒ`ÂÛbÏ®YòHmâ>ãA»ùVÌhVÑÜ-ËõƒGSk¥˜þŽÓŽëFéFº¿@}²pzƒˆˆÛªÑ Ç{eÓœ!ì)äf˜­,c϶ÃF Äíæ)í1 b!ÇyXýÌNLF̱˜r5D…Môös]†­vßç£ÁuŇ–v/ Å Ø‘BõQ <ƒ>FH/Ì~¡ù{;¤MF«‹îÖ8Ft² -å>Ðÿ‘·'ðÅJ9C¥Ƙ² ðj\g<Œ‹Kä=kH'M(UÂœz\›~qS±ŸÊôîtUÆ#‘hsš¶ôiu§;D÷“Çæëþù²Q4‹rr~/T7Ÿ±á˜&Ìr ªú¢ Àhw‚)ò ™öÞ÷g>ò -ÙæbXr&,ƒ˜L¿l9v­%v0¯HI;Ze„…±qXðš£,þÆù0¶Y„:n­&Ôܬ'%iС^¡&—®èï÷”„7ÝÈ Ùœ.Éý^ßë ~!ý[) h¢”J/4Í°E* 7%–›ÏUX°ÄV‡>½_¹6/çÆðùêÐR”[¬/R?‚»27:&C•»ÏqD]*³Œ!ÒVú@ÅskýŒšµ_¿2r<;Ör—iL4`_Ã$[ ©Öx©¾ÂàÖMo‚a=JæÔ‹áz]ƒP90È÷õå¡×‚bët·º¥_å:1Åëçw7“rš¿ÈWe€¶3þßøá1ùKZÝ%DdþßΓB“?)Æ—®îÏq>SGZyL/\˜êQIíP³R®p<„ªÓ›:ö9èàÒ{ã=Û²ˆxÀŽxð¬-ë’äp«Ox_@®C™m‰— j·”k§ÉÝøö;ìjp—k]Þ”ªŽXÉݨD"c-]»=ѱz -’Áë燚I’öeØw/I˜¦kƒÞ]ëýúî/à_Lã~þKÞõ;ÜxÙé~Km¯ä÷ß ²_…¼õYXû:„m&ø-Ÿ—)Šðiˆó••¦A Ÿ’<¡¸=ª4gÌ -œV%21þË«$a¨¯€N;ô9¨Nm¯â`'{â œ£|ÄTWYm‚x¬uƒið×94z&ô.%ØKMîJMJñ&˜âÔcÝ!ÔÎõ‘ùTù‰näÃzãÓã;†ÝKŽŒjEµùÇ×·'³[Åw ;0À, - -Ð1³!ì6‚}¸)MÞô -Ôößèöe:´ôBµ‘¦S¡3d%6I³óÑ 9Bó=dwµ.ŽŒ@Ëv®‡GBñK>’+ÑsÚð¶~˜ì -‹ð«¾è€Üú4¶Ø»l%ð³ÈbÄ" Ç”¹“ÀG%Šj*ÜŒ¢Ö‘º¤AÞäptÓ¢XöL,š=ô -±–¤Ä¶e{,‡AЮöª€ ÇÓƒ§>÷88 ÄI/Z,g¸)GAÑĵ(KÜbjËKJçl™óýT(L(òU>?°¥X2Œâízú±JY -’­±{ð „ýȹÁò¢bP‘H„rUi¼ß Næ–R©U7!Öò®/vÉ8f¯Œ"7£wFBEâ‹©ìõEåýó˜l)?>ÿï¿•k -˜i ¶{-b%² T37Æ–ƒ©£o³WèJ]¡;–ZB8ßTDd§É÷”4]’þ‘ÓD#x¤/¥ {†µr–Lp[KJØÇ5•_ÃÍbL½ú­ZÞ”K(â‚T³;eÊ Gå€#è¾ÒXî‚ÄÛÈ„ðZ$u·OB¸_…ãtèÿÔ >Ç°?Vàï ¨Áfu†“œ‚2'‡©…DÏ}a;HX".\¼ÅÏzõÚù^ðªR¡êH@ôŽ·öO©.BMn6X[Ú- Ô´ÕÇ欨>£NÏ®ažH¸îø¼KËrO©ù燚aË&ÄYâ8i~ MRd  œ—†9rÙ`ÇÉiŠþ+)ldÝB¼ûSÃÚ_>+ŸŸÊzé(°a¤¬,Î}29 §”Ši˜¦ú6¶í ±#¢¢Šò¥¢qsÀŠàR 1ܧšöw,kCäoy­dEK•Ûï>KâAÆ_zÅ¡ÓÇ=€èAäJÎB¢o©'Ò¾jH#° ü½p1UÑ-"1wÇÖRXo”Ü‹åh•Àôõ9ï»o‹ûÒqµGo= ±7^Þ¡.ZF—Gp[iŠÅ¾‡ûÝ/o’ñJÊGŽ\Aæ¦å³GŸû#ýæ/>òY< -­ô(”ŒEÁ]ê?¥ùý·jðcß9t5Hœ–Es :5’«@>Uíú†ÂÑMǹÕ€>ÜÏÖó#¥d»pm›ñS ³ÅW­W˳7Çd´ô¢çwˆ/) P-­!Ìþ,fÂY«àÌhýö/ª'ðqÇ=Û7Ý“wþ’ùôã»ÿ]o„øˆ8µU| ,]Œr ;+8ôñŽù¼LÙõØ÷OwP5Iå‰ßÉœQÀìQ¨ŃÂM²JéÄ´m¬ë¸Òmwd~33Õ2(œHeO險à¹fËþ\º®&Ã="éά– ˜ÿØ™Ál´«t ‡vˆ¶Ùo^ 2š‚åöôsmÉênˆePÚ‚ ¸·§,fóϪë;¸qZÕS?ƒéžTœÉ¯…LàÜù¸¬©úV7ñô<֧˲Ç…õ2hËOäÇ_†t®^ë¡ Õÿyœ‰Ü;K²(vš«òPÀë½uºžd”®ìb {Ð0 Î>vŸQzßpî>Élýñ üΠÔÕÐ|¹žçMŽ‡Øbf¶¬û|“tQXÂGOçJxyd&*8Íï1¦@@ê»ÆG¤òŽ`-Á¨¢ŒY¿£œ6ŽÑe5ZVb?Në:, ÛIÂ#>F½ªºGl#Au®î‘eÒÛÓ6‡¾#,—ú=ú™E ì6¿sG†ÝòN/ÆG;D kç­)ToÊL".=〨Ö$œü$Èu„‘OŒLK¹xŒwÙQq6m×úË—MhûÖ“ >^«)øþ+VªËoïU§4uÈnëÊ)vMmUëciu÷‘Iîáíñ¢.N‡g¢0ÖŸ!0“´o–ÊG ŒŽôN‘µÎÈJY2̨B ¼‹(¤@{ÊÆG¸Ñ‚€G´XCp¾¨‘f`ûÅer,€¹Æ¹sFç2M »ñayè#p± ˆÇîdÏJ¢>¢Cl2>H÷ŒH8Ì£<4Ò¤€¶•%öijÉËئt4oÀø¹8Ðì{<õ’®å‘ÁðölS$7…àÌq¿ÞøLs:©thÖRe*E.S‰•åœ`‰˜Bð(7¼½8ƒ‹¦­&Ü—iù„åýjØü”™®%úé±N{¦sE†|­œp :)]b…Fc5ˆ$ÑŽ˜>¹µ¬#ê Ì¦s¤dîFÍÑe+@³ä¾ó3B`í­±ä3¼Vk:$bN§sp¯ì¸]¦£B®õŠ›³‡¤Àõ+–6€îÖª l…ªÓ 7Σ£o隣#C]c{w… ~Š¢è:?Ô‘ù¨”Õº.K+îa0ÖuTSÃ#»& Á܃`̇Zã~»Ç›vòšVÞ´—Šò9KgKEŸÇ¼¿¡E)’í¬çÝTT(yŸiúÙÙ¤s-GC8x•‘Á,=±hFG;'‡@ ½Ñ3»ª×>–~P?p“KUn -hQ” Žž_Q¨uÝ£l9ᶨv{ ¬Âåe3Äšµ `M^ÒS¸²Qûv©SC90ÖT=“­–·Î4c–œŒ3äg´g²ðw¿á{Ÿ!Pž )ýó|6éÔTÉ<Ú®ãÞ£˜«õ~JE²I#Îñ¾c¡zÆ¡GҊڥ˦º•ùÆ)aÎñ7öÙ]»R»Z"@? 5ÚnxÑhM_æÀ̽ܔŽ—èDÜé¿Eu„$Q¯7CáŠ\\2Î#¸7}õùøžÑÑ7+¶ê㉨x¨ü4-ØCìT S|SUÜ÷‡íq„Ë ïœKÑeáœré­èrzˆË^”i#Äèw¬ —9·T zEA²ï† ž»PtÓž$ ÿ™Ë§éÒ¯‡rjó åƒÜWEÇ’9×Ô!²|êPó¹t&A8ø\S+ãȱM‰úq.åÛZNíZ'j¼¼›Ê øŒÜ-'û16å ²¿è™8¶u0.‡ÞE“ a1Þ÷œæÑúô=U+}§íW¼ ðf¸‰ œè>ÄÐI,¥‚s£5\™{oîfæ8¹ÎlžÚúþ/-£5ÒFLÁ!Ö[A›¸«”ôØü.ü0ÿ={½©_øF¼ú;G1 —__¯AK¨Š.Ì™•ƒMÙË·Ó5µ@›Ï™ ¼»iÓ•K±ä–Ý¢¶ó öÕmA˦מ" üñ«ü2¤©aït"jÈ8Ëïˆ7ArœÉu„Dwê#&–œïkH5àÓvžiûáBE*"¹PGû8#ìYCÔ×èr§ÑmȾ¤e…|šxt@áÁ©;¦µ”—ˆ¦ù™wÓl®JŠ4WÞM´2"<{àýŒ8çÂËćñ†WZ¡Û8v£é¿+Ø:ùöµkF2[ w@ŸYJ»þ:7°ËѦHŽõ—Fh•+]ð‘€É^÷ŠŠJ÷nFSœŽ?%ƒ^Œåq“üP ™O~DsuK$¹øž.ü6¿Â»Ú-ºo¥KÅ6šóG.R!®À¤ç¦B™ZJ-ê,\×^þhªý@à:²pö?êÉ’¿Ð]ö? üÏ2V‹æIù#(óoa¯É‰ÙOCeÐ,|M'âÓ’ é9ó£HCDc-ãÚ6åŸ(œ@&7I-ºl Ôßp¢1 o:3T?â·5¡f,7J– ÷ÓÃ{ðË ÷ê„Ý{5N;h5¦–gÈ0 Úó²á$Ô\òQ'¯†(bÆ.?ã<‚£Ø‡1ätÔÀ|ô_In)|MSäOCê™´ünÒå½ÙVï‘s$ÚÖ¼=¨G|’ø„¸‚IÚéÑ^¯i+OJ ÕþB/:ë>—É< ÁÅgµ¿Ï¬•ž¼aÜÈݸXæ|Üy€öEŠ¦Öì€Mœ·ºáM@À¦Ïž<í®øn„&RƒDí`£G튔j ‚†ÎwsÄ„Ýb­!t 7Ï'åŠiŒ†õ½ÞóB™D#`Â@®)Á„'U´úìѼ´3ÎÊ6S¼…ˆ!úÓüaŸi|r˺rdIÂ÷˜œló•gì´±šSòâ¨ÕÓA>¾x¿üü—à䙿¦½=+|{ !p$' §æ¿^¢£«Šy˜|æ -XÍ$ݨª>‘d|¥ôš!T€„’×´²Já5›æÁ?’Ø–·X¬Óóˆ(‚T¢CZé¦ÙyE»„4ÿv×^añõ!wŒ: @ØR—9Ö# Óµ½c?F0&Ïo×ÖÈ ÇÆH€ÆG‰!Ù»^˜¾_¶ÆõÐÓ²´”{®R -×Z—þ®Ý’A•ˆòV{„FëL§õt"îTE] CF•Ü²~‡>ßµ#ÙG¾c‰« -ÐQáÌygà ­YÈ9ï@0¯Yö`ä¼k%…³#Al±C᫳¬§0[ÌèÈ´Ä&|Fß[áÞ às/²hîª -Ðs!‡/¸Š˜gj.Ë]öu¢U“Žl¹;‘:µVÊ[]é‡ï4§,~4Ô©=+ü¹à´Ü?µæÛ¦L‰I’ìz\oì!4ä}ž)ߪÓ~s¬þ\F«Ú Í¼s1\’\ ÄÉ ˆ·ìq\›f©–ï'î}€‚¢¢ªPnƒý8ÞXc­ÿ3¹4gó\G’ ¤^¾$$†’‘“˜¶%4#jï^ö8JÍ"$>îë©A¦êÍ÷Ù1ðYËj^k˜¤ÌzF<žŽV“ô4ÕglhoÕ ó‰½Iè]õBßA2vöP•Žì‚T ŽŠ"¢nŸבÖy~þˆßoó¼'zwZíóyÿ»­’a–„÷ÛíA·‰ñ¹“øŽ÷=k–üžâÇuôp)zX×r~„K`7à{ãŒÛ8²V×’ð!ÏZ˜m‘/àºîçú8¤éónÐ@ïVbi}Ûµ œç1ÌDšèùpñ© g]ÒÐ>êí„ -7\ÙÒëø0Õ¼(˜o4€2$¸ -^Lõ«Oÿy<‰\á,|×Û’¯äµO€¯lþÉÊ5þŠ÷lØÛb#Ú*À(¶M®}îM8\ÊV~ë¬À)xþe½Ï ¿‰qã»·~䊈X=¢Ý3U”ÊwÿX pv\¯$Ì{ •cAA)^Î`æ¹|FÙÖæø r²…ËüÚ§õbF€r«… ¦_ !ŠV¼8õÖ™>”bƒAšS¡ƒTZÕc!o5$ÄC쨩† ä†,‹çýÆþ÷´ è¨Gý‰#J†9) Q¼æ/ÎêiÑ_˜ä»m‰SÔUZêŸivïül‰¨+f7AoÿŒ¿ñˆ×ˆ…AƒIb”Âi%ÐcBxFFxKñµ£ÐRÜ©Ö­Ó`L¬ÈÊ| ‘aÝdöÉc=fûGºÐyû|ˆ)zŽ{>,Ú_›Ù—!jȼ,€PëwغQ¡&Ð5†¬ •G„Ó>-S¥H9ÉÛ9uÚ èKn€„Lc~üÎõ é„.¿š·³E_g"óçø°,OTâ5ªÎýYU•‹èÉ2Ý7•ï1¹I/Ds¦ƒöåøýù/ä+.‘T’¡¨sŸ<¯„¶02dƒ|±è8eC÷®P¸»ˆ †ÀßúgK™«ÑJ®EŒÊ•~œå&OœÁ`ÐÎ" Ä`&=ôDMëÅ_în“#Ù'Ò;¶éñó3Ýà[ÂÏøwÆÐv:¯9†´¦q—»Šì^A¢À²Lyûœµ%+]Ñþf-Ú0ÅYçËnRПˬ½#lµgU¢LÕ$2AÙ•™Œ“ZŠeõßeüëÚÝ…£Žˆ®,ëÒ z¨5•€Ÿ¶Adc®ÔEkˆŒ&–íËÆÿ]*­K ! øOäR)Ñò× $ÂUª}+ªã’Ý]y[wó’Ÿ«õ -½w6†Ÿ—b¦“ÖÑ.šÛ¯P½ÆqM¬ñ$×Îro–£ã‚(e>-@øÞú])Úg‚ØÏÉ|Š1ø­\›ly[ܹ‚«›[»6EzöÛG˜EÝê§e)¨Ã’¿H4¹hŠ‘Áïëû¹– `þmR²£tÀÐuŠ¦5ªœešââ㸞³—dÓè¼[ÆÒéÁâBhY55§“K5huÌK%šB*¡I¶AT‡ñŸáR&cž¬e*¤u78 êàn7 -•)<ð%›l.•Z~áØÌïyË@¡a¬Iú÷¸‹p±HÛš'#y”­ý-Å#\ô°08²×z$kž¶|ÈÀˆ.iÝõP{D…ز1^jªÝ¸¬1ÓùyRâÐgê3h9廈#q ŸÝ^þÙžÐG7eíeŽXц ë70ÄþP F+Çl™j¯!~e2Œ4#¥Ü¢Ê÷+%¹•;Ä+M*y!ÕQI2'© -õÁv³˜±:•6ûO2NÆë{ʱ«(²“#bÍQïWZtótÃD;ªŠÃÅzIr™÷"îi€FA>ƒÆ!Ð %ž“9aÇh†eK•E¯t7uâ%ÔWá=J­óõÆ”˜Sw· -Cšk=ŠÞ'XCÜføî/Xi¦á(ÑÜoäüKN×O‡ß)ú黉ó·yZá<6Êm jÕYÅV‡¤Í#Ù£vß8ÕsžyöÏ+’PyæÁ^z i)FG‹(_Ñ-ï Œå®W YL„¥•ãL‘µ™Ê!§dhk6eçÌÅÓ¯Ösß+?•ë:Ù!bÑÉéyPqX½K5EMM+FD·AgÚ¯aúô¤ýÄÒ‚PióRĦ×tž*<šˆwtj+oB€+˜ê’ýŽÌ~ð5oã% ~þ\7ÉûH×ÑëY[éØžd#GÌrs}KÛ;€Û¢ ràõ­€úgÆû†ßÿóáð;‡ÃxéD1Ó«AJ@¬2Hdä<ƒod%¨«^çÐA’Ä8~Jïy -icGHEµÍAÒÃñ&ô¨³ô‹b‘Ûj·$‘f5cÂÌA©³·-¹ÙÝkìVKÑ–r) XF7©¯™â±ò–äL¿ ªÀ@9*Y—âmÀ‰ÉFxW|Î"§†„ð*‘ímŠxûô¦ ßJÉ H›WWdé6.5æœæ9[”À†¼c~¨ –Ÿcã[y1ÃÎnq‰-éêA, âV}ýË¢ªˆ Ô_º[ŸÄ³~úƒíéçlèw²¡á)ÈØAk\8«<}ž±ûaß%;”ÞX³=9ž…Ìñø"_^êpUÙ%#ê‘Kl'\{hhUÞEžJÛáˆèb³¥í×=fØîw‹‘22xª$-]û;Âñf/nÚÁ#E@ªQ\Éš[’v³ù×&ÆR®Gýo¼óÃù Ò5 ˜ƒö®¥tµ:©$Ž-  ×õBùW¥°Øk”ä놷<²*,¹ãæeÏîsŽP‚g˜4eÔ((€Í^YŒQ=}?ƒíÈ#1Rå]u”œb—ŒÛSÈf6ñV‚\B <¸Ö(nËÔ!ÆY£áÝ×ú ˆ(ï=EÂÆîŽRP“95?©ú¿ó—Ÿ\É¿³iD#ܧ4ÑNDƒ=†!µ¹ÞÒÔÔY¹æ¶’£C]ș۸&ešmO—’úz• -†=] ±Í²>¥\êæ”íi­Kw œ§ ¡{?·ñœ¼ªlÂd²Öº¯-’DÀ.¹Ò»ú5*íY)S±¦b](ÍXâxjà¶s­¶ðWÜ°8I¼³æ+WÎÚ«l`ð•ÔóÍ—¥Túè‚G•\üú -+}/p!÷ì=_èå?ÿ%¡¬-â± -)ãLÛQHü;ƒ×i§×:ãŽ[ã Y—à5¯«Xü¸×Yö‡‚Ôqrû 3?q2"$ÑÃÙßWëa+Çh4¤ÄJvΑ†îp/mªÄà˜òHœsv½õ'RãTZÞ¿âä”FɧVƒ a )š‚ëZ]«ËeDa%^GoóŒŠxCÑ0€þð‚<Ö¦èÜø—-Ím»fËôŒ¶.û6•ù‡B0WÇ”§ñØ ZgŒ4£ÓSwS;3B€8R&O¹ÏgF®eýqUQÍ…‚±etÏw·OòAœv¿ª8ôÎù„{ã˘r<æꙉ5ïPÚgöíüî4ÕÖ5§-•h 3bO+‚¶€W_¹Šx–‚íý Ñ„[˜ªÞOÇï?Ͻÿ -÷72ÉßÉ$‰²™¨Š:•Ør+{GC^ëTŒöU›å…"ãÏ'=f6 -ûy|¤ÇÜ„Ì´š¹·WB3äcšûe9U¥¹žU-Ü 6ÔØëÙ¡àPO﮽W—ó´è”œúÑk—[ð-k¸Þé0Q‚²Wù4ƒ‘B†ìO#sô,=ÁoZ´xM0²ö¤ü;²)W‹­ zÇDµžúú}ä+í›)¿ …L§m¯€pÆA‡dÖ½ÇÒt¢„ bõQ³Ó]§,$¤ò,L‘øü~»¿õ—lº"WÎèòÍ)Hê<¹$¿–=W%+&[^åÜ[Fø¦»7—Ø -3k­ù;´ôâ¡5wBñû¬F÷7!#NuRÉ6qÌ#ìKeCìÓ©gt<•"ã)ñVO=U…KÓ†GÖ5> 8×H*nÕ¾‡ò]ã4¼Î??j[u’@ÙÙnÖs¹oÆ¡'ýÙ)€b@ µJ¬Ê(Qà¥Ùê-¯ ™Þh×?F{' †® Ò—7ŒñD– o½?r8dÙTo -¸0ž)„VAšìß­PRûõ|qŸ¶PF¹ÒOsâ¿ÃÐ4Y™>ÍòPƒƒº£6ÝÍwDþñÐWaBWŽà¨H0qLÝá`JNd q^NzŠâz{Å1Vg+ËÕ´èð‰¯²‘ñÇ<€úù;#\’{iÜå#GPx+£‚Ü AŒ…íà5_>)´KNÕkmk& î#b\”xZˆsš7¡ÄËG£Üæ¤Üÿ-xû—•ÛòEoùT­È«ïÑödÝ–6 äoµrZdõã‘*ïï%×Y´茞’ÊÑÊ|s¼bU¢5ÇZPhÝ5ëy¢ú°Ç2û®Ö+&Xà1BLƳ@>Ýñ9ñäúµ0PÁX~Õ-·D†½oéc6 -'Nï2 ´t)ÿô÷ãW˜Îôäïö˜@¶@¹Û•Šj~ÇJƒ7tšjÞÝ9祜7áäná——6‰›ŠFBör\Ë|.lÛdnŽC󠊇ù‡[áøò—e¾œñ"!Ø×®¯7>Õ£²kàã@¬a œ- -Eþ;@P‰U'Ö£1üŸ¨©¹$úY¾‡.ÿnËP¯R¢oN)-Þ5M³‰þÇÿaŽ£ö~Ÿól@ØóŦbµœEž™ï^Å•ˆØúñîÕÔùVg1oå-Eï²<_]¨ñLÚÓÑÉZæ8È£ê.ƒJäD†°#$±ó×|ÚÂ^·àÑéÞ¤ét}ØûÊã9©Ëøk Œ/qRQ3?³ò?òRjw¡èFŽ9pls¼õ‘éÕe‹Øö³>ŽZ<³¾(mUµ 84åÊ8ÿ¿‹bf+ù¢,õyŒoêTsi`G3ý¼bªM5…¨h5µ×-Ý«(V´GóÒã…å`Æ©žäžŽœ£×J]w¢jºËh«Éƒ3„5!`Ö’†)Lë¦ÅP¤áECS„`|ŵÓNÏ8w©"é#ÜÍXQ³QÜ ÑÖ0³µP!ïJ2ÛÄøÓÂðÚVÅsƒXÃzÇwK£0…$7þ 6¿[tªÌwÒòSU *ÎæåÕóί ­Œ,=zñJö[(²4’˸ï îðä[¯Û%Šèfv¶ñ;îwIŽ<"™|+~z9b'ùE”%ÎDŽŠ«–ê·s‡dÙ9Q£þeD\?.‡»F0áºàS¦¯!,N’ -õÜá³ ÉMƒm•2oBŒGh%ª¾ÛY¾½¯j~Édµ&à¼Túw©&Ü™KíxCä8[îFCÜÑɾ³@w ó6ø¨$½Ùª÷ÀM÷ºeʨD%–Zë0£±8b÷özc®?€{®'ƒ}ኚÈ8qx,a±ª²qäF²S:X‚±è-ÊÂ_å¹¾‹š>WßøÌcIv•Xv%í­AÚOiêðÕM:/o§ª}¤1¡N‹¨‡£?v?vo`¯´óKÖóàny¯”©¿=¸õ€u °§yn+é.ªôG¯µé Ñuú8¶Ç)~äh<û{Çv‘ÙeÓ)ŒÃ(,æÅòe’–Uº1ãÿžFlÛs1O}Å•ÿ^V?¬ýÏ_ˆ)}ãÝ3…Ö6‡ç82'r„,D•“cd/RÀ2w —€×%Zvê›HmMðJ¶Ñºªà"b̧«%¡˜ñml¡µ¶'}Eê>ëŒû1Nð?‰2&#à >ì¯ÿó7|,ÖéðÆVÊ­-ßp‹öVl¯v*ZiëŒø})ŸÑÞÒ6O\Ÿ"s½œ;ßùàj¬Ç®z(á`a –i܆Α!““6g½²Ðü¯7¶¶Ú‚)$´(R°,ÉŽƒKN*Si·“öbn´¤nô±òõ/•°8…v.C‘|z’í[wtÄFKŠ¼6¬#~ÊIÉÔ%ñ«Ùæ+ñiK1§_j’ßÊCVóª=íL›V¡g«SgÜó™(Ö’ÿÌ«ö7bŸ'ç<Ô ->q¡ËŒïŠ };K‰K»( -¨¾wíÁeítñ);ã ˆ:fÌ?37ϸbC…?{Ú |vp?­‚(|Ã!áŠöy²q;Ç}¶{Þ«¬ÒÖíÙõᓺ߹ FTI^ˆDQ¯ARv4Ðp¬r7Üt#G ¥‰Ë¡ˆØêí9hñ<—ø^iÃÄÛû„£ËÜô¡Æ\Ѓ©žz~F«"T{‘»…È""ÏŒûÄ¥Ê% -!ØçU€IÕ¸‰1£æRÜ'G¦Ò6£{¾äÉ'kðP7p¸»C ­µ ÍõkO/ýçÙ7Wué±A„ÞŠïT1—Ô uµˆ[‚þhÚñÕ3é¿$ñý‹<Ýç¿<¶yQéœó¤'ŽªÞÚÓúˆa¡bTfYçQßÉç÷ì»$e4uôŽR*¼‚‡Ûøîò˜ˆD©@=J$IŠÏ0'Î|[•Ñ´0tÆ[:ngê_ë’­ngìµ2Îw¾@ýŽ<[=aONúñß`zføÒ¢ ‚Îu< D®(…Ï4lÝ-qÚE¹$¥g9ÁHïµa­mÞíY‹cj ^íá+UÜ=‚‰#E>fR"Nš˜§s¶F0¢-Í®í©¸ëÇ>?ö=(ÇOåÌÙø(WÆ%9—bf­(4vaDO³2[F¶Ø¸!3Íg’ðCÑßœ›ùØS.Ç_læZ‰Ë€nmž-»Íøév”ªð4zˆ2{­ÄFà8 ÆVòRo,úâÚ«LàwF©S {1æîɵ¼`Šc~”¦iÈv ‘û«Œðõ |nù÷BÌë[9Ò? Ú³»c}D£uO¬Â~iG}«ëØûÔ;ôñ¶[d jsŠÐÀz—ž*(\TmGµe¤;X®Ò×ØœV©Õé ûé v=>4LŒA—¦ÖÉÊñ;GÌ~à=“V‘¿ÕÕ¹#«ý½87<9¥¶mî¸J4t[®Ò£„ÐL g3º¦ƒ(ÏÇ?Ö)'~‘5»£«WÏ­˜pÄ£¼5d©ûÕÒ©a¿qtYBZèi0P.¿Õ Ñ6÷>5ìmX¾á6õϽà=`ŠuêŸ÷¸§[ÉWý -T•ó_¾ú*0ÚÍø2"ùz*êM¤†{‰›Ú˜šYG+ êuTY™ü8»{¼z¥á'ĘI›¡ÃL=!z•Æ£CÓ›,gÏÏxl>*áeH¿³!{Õƒü’RíVs¢E¥á>WB§ÏAêl|2=²ÕûRç‘m\ais‚v4_ÖFQÎûqžM¢ÐÅSm‹þU(>:¬ÌïÄMgD™*/r·ÄMR¯³´ó/€&ú—| 4Ù¹¼š:Ty88Á’C½"}Ð)wøFô]ØoÐøÄ؆¶·d7/[ß31{žÄβ(ψ-ðÆð -k„n -øè>Y³º(©ÞŒyºå¶áPµxJêZ½èp@ñ¦ŒôÑ¢£’'Ë“^ðcËŽ¿˜›ÙP5”\÷V–s±ÅV9 *iƒo…¢ü¤³GX÷¨MG„ÝqÂÜq~_W0þ_߇¸eœ#ÊÂ%n×1Ñ{zÔ\²—A7 -_%n÷‡Dáú}å´U¾§?´WŸ&_ì¦ü"ºcpCJ}‘œxm _cšØ OΊãPV­:zG·±¤¸+Xâ°‰£€gäÍ|.¯Z•zËL¹3ýF;¢„œöÔ„)Ûx5~gWÝ;ØîQ†7„R&“* ûñzã‘þ;ýÞ! –ÓvOÝ’š[z !ózÑÄ®VןƒhHaåÌË#[…mr‚ˆ5OªÈ<Š2¤Ç)ô.›§`½c(B• ¸Ã1£Ç«µw,nrRåê¼Ô*UWQ‹õ£Æéá,Hsz ê bÊ®Ç^‰·xŸþ­àå[±Mî·hXԲū“픓´“,vrÇFï­“ËNK6ìñÙÉ’– ÒÇòõõ¯q’Opb(äØ@>Y¾·äúŽdS ‘ìrh…ΦEuõ /bØG<Ð/®R®Äf¶kÕã$óNæ¿aw‹dp.:œ›&è#ØÓ—ªyâ½1+ž¼‚¹ÜÆûeI[,ób/_*Ñèh>DxgK´páÄ9K³oxyÿù9 ~|tzp,<ò„ë«*‰ƒ ¯¼|›æÇU -®ŸdÓB…R³³ò¥[ö4ÑÖs*Ú]Ž*Gâo# €÷hØDK©¾/‰òñÑ—+Û#L3|8-:C¤Öª©T_1$¤ž`gÁ5CÖ_±h!b7UÄUFÊ|¶ÝÎî ¢‰½”S‡7y¥yÑ?é¹ÆI¾Í!jŒÔˆr¾äzødŸL¡(—Û5óŒ5œ¤ÍZýŒ(óÑ)œ|ä-V2[ÊL*á¬%ä{žd#Rs…[¯ÇfÃLj8WUÙ<çxÁÿ•@Õ‘é'0·”ÚhÁ¢¤Êˆ˜tóñ…þPöÇ-ÅC<¡BŽ[”4eEÆ ý»?,‰*U²±sö18ýù/vÂU¨†b‹…ök”#}tGùc›%®š;ˆHOÿMÛqª/a³c¢XÇåR2‡ð¹SK=¶ll²µWÉø¤Ysr˜Í·5¡ä¡F²Ç¡‡æÂÂAx]k_r{cJg¦6é+ÇB£‹—’±¦æþÈÚ Õ¯Äæö8iPÆmÔ¢ÊÙ´¯ro{rÄç ’]{Qåî”-®k.td³ a|=lj`.û)b!mÔz èø͇DC Y¬ð0¦Ë¯îƒjÆ´9Ït„…­==ÖŸ¿ÓGã(-Øc¯|‚ÊÕ˜Þö¹ÙÊ €ÎȺ•†yè–­0Ž¯¯ªïüÅtúùüΠ=%Ðñ"·RÅ¡¡¸Ú™âûøçΓ˵ŸóóÐ k!ljÈßûÊü*‰ T—²1¢H.aåµ-ÞÙä¯sÞ²ÝÝrG ·A[ªÒ.ß‚5šÿ¤¢[á¬ÆâçŸYê"êŽÃ‡É…j?¦ó¯¨;ï(lmJÌ‚Jƒ}Aöä^º} -ÊëšTÞ% ž;²;¹# Àt¯»¹Á TetlÆ_EÇso}ä˜X#'Ý¡wöÖÌýÿ/"y¿ÂœÐ˜Hõ9rB¤0½%ñ˜MGBN"æ=ˆ÷#‚T¨+Ù×k?6fwŠÉû#Z·&Œ×ㆾDn,µÊWyó.áÑŒW?~§97šÄ}rÃ7ôÒÔ+⸛žâ`DÚ@ ný–5¥§—‡Òê±ÊLM /”}Ò•VÿÁúwìÔÈ×öH…ôÃóEuãt.hPª'´É¼—‹*]À"î6q5‡¥X6I®¨°ÄØ*Á9c=ë±CíÐY"×_@'+Ööó‹Æ™‹ ðî'>Hz) àÿeîÎr%G²4A¯ ÷ I¡”ç|­]8ÐOå èÝ·|ÿµp÷kW ( ¼•¸!¦œd8Ã?ÔïÐX×2¬¦—æÃv˜¿t½yáR0Øšö·Œ}}UŒ-þƽ$ a/ÂÛi‰62DÑ/Ž:Uå'ú!•Óv xæ/{êkMÓP„LQl¥Ï»‚f?Þ -m;!y0Þ˨Zù4­`Cô~RU½jvJ–ß@:u)€¦Î¹¯v€«¶ø×@¸õõT©Ï³Ã¸®¥V)Ñ -x"*ð!çsåØœêá¨âeH#ÇL¯×ƒ«=î&j¤‹Ho> › -ËYc¢+æ*¢C jpìõØý¥ ®¯ DS’U›‘òÜ—'át#{M_PSµÔ0½æ ¡p&7¿C¼-õûRG“ð_ò‘È+*ê${Z‘µbɆHüžÈ_A´òhèçâG -&,?êõ¿³’ͯwTÊ’Z‘·°±®xÿ$[½Þ¿ã¤š4Ò…ú~é½Õ¬"0dl Z½SèB -É¢øå_Hct¾8„ûCiè_ºpßÿelßu²ÍÔþˆ’[‹àxÑßÌðnC ÉßðfÞ’&±Ç&à`fÖÉvÉúú’;üÅÉMT[j9« -~ Ý\iÕÍu{r´ä.â”:Ý£¾ f$S›#Á÷€«¯ Šz\Цzᾨ¬^tF‚'}US£dÊ®„—ÇŽuWÿý܈ þæGUÈ+»+½ítª«ØÎû›7 Ö:«ÌÆ'âXùVñ̇—G¶Æçµú+7iÞùYX´|vX—éëÈJöhgâ"ÓF9° |æ´æÒTËà{S"E_e¿ÿ‹ùôý+ù=ƒŽTýz ‰õ¼Úžî!C"¿é’ÅWçµ6é€íTÚ'ù·|ä=­º¯Â uÀëfÅŸœ¿ïp[5#w¨ø‹7™û9ã‰uwôkÝ1êWˆ¨(·-†dó`"G}V¤¤wDÞ@,÷ó]\ívwo¢®;Ƅ犳 q]XcÈ zr„‚ÑÞ±7§l]×;U˜§(±‹Ôû*Y`ÊÍ&;¡Ð‰´ ¹#¾÷ Ð ki# 7‡À„pç³áÖ-ßü·Ê‡-ƒìuÛ×ä¾ß²ûÁÐHÞ>%®É!7"Kr‡ɼ‰AS]Õ¿úp~ÿ—t -l@RˆË͘‰<¿xÅ¿µ»Ó>Û—èÒÌaˆ PWïWoK`žó¶‰;±áÒ(ûö˜y_U|¾fY×¢d>æÑ -TÎEW°œ[Ð>ùêèÄUÔEÞp [4N'š„a”à'xHL©É·!Úb]×Ëþah ^1Ãwô…°%û‰'Ãã¸Ï»HB¾jÃA½¯šûEü>Ì»Œèa²¦QþúŸÕ‰ÑOd~œ€€ gÓ÷›ñˆ=6v¶vûNØ–†X1óJA*¼>øJ‹$%Ì芖Ë„ƒ6óžªå—¿¼ø^ü©ZðÑ_2yBÛ† ê.¯¯öÕg²k`œo£ˆ®]F=%[½TcþG¹Ç+*— /ðÍÏ=H‡ã ð\µ'öÃó_vƒR×'\{'A”ÆÔüGìyËjf@àýónÚ’úNý¡Õ5ø-êT@ió‘‘}-éÓÓI}—ò CR•?,hò ˜ˆÈ‡1‘»æ¥‡³†¤ì-ipP-WŠÀ%Gt WÍ­‹ñlÏKžjó϶Ø/!ïÆ¡Õ2Ø[{}ð¡þûpy…uMá®ßïŒu’Ò=S¹É=Þ™¯¼-Rߣìœ@ áRŸ·2½D`‹T¡&n%K;ò×~¿MÔ0‡°¯ö~¯FÃÙ@[ªÒ²4î"69£¸m*.èì¾=b±H}!jáŠÚÞ$ÔX-TøM0íŒËrh¤NT— -Š'¥¦œ£%®”Ö{œ¥ÁWÎÐúæu¥îäÀ’cY³ÒæsJ­¹éDaï±ÕŸáá×sÓÑ É$«(Z~{dû¶Õ›7ÔfjpK1ª#dìWë%c.4û$×,ý[ˆÊ=VºéAñ¨&<å×÷sâ­0É?ÚÕÔ(=IŠrZFW ʨiF:½EÍF`l;µU¢è‹×ð÷I]UÞªõJi_¼ªz\éE*«ŠžÂÅal2jŒÆ#|‹§‚8Hj§Ç¼ã‰ç¤>™ÒÀÜPä+T>"Àñ - + -µ¢žR,÷3¬òˆþÔiv=º y ãÚt Cå©æß‚¸ÁꈠåëûGz›7i$Éï£pÇz, -_ê;1A¨gE¥^x/CÈ‘9í˜ô?U?ûK5TÁ¹±9,+høùáçùÜ ^:m5€§FîmÔ aaë)m9Œž0Ë ÁF›3äIŸ:ÐXøP3Ү׊\KÍ´¤5aRÂh=­Ìú”Ê#úÌïPA»·Ðüf’Î|!a}{þc4Ÿªv~óTÿ•Ãþ3U@ádçÊQÎ7|@…ëx;b¿9ï8Žµ~¼méŒꟕ (‡àqIÖƒ^×·ìÑÙZwÐ0½þù9Ø¡fÏJè|*_QË¥>AO{ð3€Hš¼¢æh§ÅKà sÛÒ‹x ¯Š¡}Y¹éwñS&„ú¹(ý\•ˆJ¯€8íEoíÄ'ÒÇPPbµe3ϱýWèê÷É‚JH"_FÒ[Ýç^!áô‚öý½žf003~Š¶±1F¬5W¢(vf±ˆå‹ÐsÈVlFàt`†ÔFYÎt*Êõ“œðLë4ö -žÁ⡯“ÖhØÉdõ¾TO5™1xYȸ’¢Š_¼iz†0ÃÞ«¦”›±œ.Š}¡†"n?ÓÔ¹žÃnŽ_rã¼âºgà?y³C¡"»ßØŸêäûó«Š°¨Ãé^{ºª&Žd2[Ä.§cÚ4Π¼$„“¼½Ôºwʼn¶‡Áÿ÷¤ÑBìÛ3}ûÞÝëMC²Àfï£SL•¡\1^ùå_æf±k£Y÷:ÁèÓÞ®Øñþ_¡ôêÅ^ør&Û¹^ˆeòŒV3dæzzà‡­^k™¹£T†Ðãg-Üß"—£on,ŽàûNÄ@` pï]ª%ŠÚpI"t“²â™ßÀ¨f*󃊴ÑSVÕi'šÜ‘uÞò;Y6|OŽà» ™›Å½méÞß5d‹²ÝJ©Xù„Õ3æ hÙ1³¹Z3ÄüHþ0‚|ÅZ´0D½Gõ„v.jÓÿA—Ø%çß~¨Ì§×úŸ㋵ç÷I!ìÛrÐïõ< - ©s[ƒ¨É¶¡ŽÏ'"¯® ×|-XuãË1H©ì¯j\UíÜZœÓö¸‹%ª&pHÒŸ [*=××¹‹ï«9IV,|1¯M½TIXðl+ˆÁTÙ„9ÁêsçÂÜr}¦Óè¿tŽñ…"KI²×O òŒ!ï¯ï•õTV¡w—–Z>Uš¹wµ±8dTÅ‚ -¥ý{0¥HÉΩܸ¿Zj}÷ŸùŠj¦ÊÄms®—}W/§Òj±ÉZµœùÏÚ¹/ùBCÑdH| žŽü›ªAâqy\8Îè~xŒ¬ÉR·7à@h¿"+=¤ô[æòž”ihæQr?ˆñ9Fà?¡aæôɼïø¸)öÖ9"üa+í -Ä3}ì{I•ŽcG-Ñv£ÂW-Rum6m=¢˜EÈxÒ²¨Ã2_Í(³[¤WMÄ #øÂ-Ò¤éW ñµÒÊåž²©žçÿÔwÛ+ÄÐã~Ú϶·´Z”äAÃ.ò_Ħ:°cåÇ'V©óJ'"«ÆV™ŠVŸâ¾¹)&DºÝ^/Àc0Ð^ŒnÈó•5ª&´0 ΠΤl£(¯%¿o‚]‡¾äb<…¦*àNƒ‡U3$V» ;3á³üVÍГì±Æs]ª“T@vDÔ¥æw€zç/Glà¢|c:i>^±3%|qþ’RŒfú»o8?^4¸ªO1žiÚQ'è«ýHçuæç‘rQ)ÔMl¾߯ªÿ;›ô÷³ï÷LtÁAº,W[³xPctr©€™IZZ૵’ž“j5µ¦æúÙÅÄí½¦tÕgÌô”ùÉ…n3©c/K’̾V¯+ž>?E¦ã^“˜hŠÝ,²wÛ -Ý¥Òñôs¥†´­5~£žUAÂÊ„# -ž;‹×GHjƹ”|.¢Õôÿ`ñþ¿ŸÃæ>ï•Ù‘—!&©œ6Ò2,üÇ<Ô¶+Êëð™Ç¢Ó‰¬ÎóZ ^-u¾ío܆OòÄFþÿ¸ø¥v´-wNÒâ’`qá?Ôàž¶€¼Ö®¥m}Á?öq=m{Œí©¶Fa5WB²xœ5ô„ÖQØ8ÑÆ]¡6S6Þ{[µ·[Kï f÷ø?h%ë?ðã²»UåQ2!™kôß«’ƒL§ä–Ojµ·Ì!)×Wu¼oÿ2Ó!—Gy [4F¹üERæ‰÷ôüÌ”MØ”¿S+3hnîó áãý2‚¸½ªéÆ°Œ ‹Gžûc‚vÖ­Ìd éä­õˆó™çý4¿Ì[±3[fô‡²yé#צS•óV feæ%ö9d;‰àˆ¸67"ò-Ì­¤Ä ò}P<Û²,‰·Ã9®ú $B`?’žëòÙÐæ:¡2 -®Šb'\^ Q¸thšÖ3(+¨{1‰?6 ¶Ë¥ònbÆç,Ë 2ÞÊ+#9½¾o?Ô?=?œ¡àóómGÙ„«!€víÁ™Mö]Í®¹59^qSÞ9^<=¢?óÍCbSep”õgÜ à æç9_L–ÎÜ2´8©¹Í`”o¬p P‰¢»/«-´ä”oYšÙá5ž.½†8ÞÝ•9䢿1 ìQŽë}UGÎ0Wzo:ÔÖ-Õ˜ÇwB{¼á”væ]Ü%DðŠß2@‹ÃÌ>¿qè怖syŽÄÏýLwœw°Kùuìl‡ñ€aäM{ÅŽîÎ¥¢ò½ïÑtÌÎÎFæ§y}ðÞù¹ö#|æE“¾ÞÏ|¬ˆøýú/8xê‰DŸ æÙ¿èhÿ—̧ï^½éj)›œ3Úˆ| DL:óàÊñ#²K(j²l|3á¸Æ¹¦GdŽOãçtêP%eZ’üìLˆÐ"úÑ‹¤vïµc i!®'h}@³v ©Z!ÑÈ_;†Z")—½!Ö+ÌêTâ/Lršh˜]}Aà}çïÿbö ¨¿ßèØËb²÷1ƒ“(qPQdÂöKä…pð‹/«¥Xh`û;­Ýâ·iž_‘åŒô„·å„ômû#ÊfíŒÁÉ$ì…OŽt!m™!Ûi˜3r-'ŸKlÞx”\b CPDãÞŸ·×$W§réÊÎs6Šüâ35ˆIñ¼_ÇÝ\´`·ˆEÁÍïUYf‡âÿU/kz?f+ª)hßÈú#õíKþ_ ã¤ëB/j ~Ý_ð2éYë‹U»°ÀŸEìƒNñôjµ{¯>Ù íŸ ¦A•éÞžÂ^-Ûú¿æÅ~ ¡x±üÝñ— êD¯€gTkç概°!éRó9¥à^Ì×ÚËvËy~hÌYÌKÏâñ´óÂ8ïâo-É3íyÇYÌ…·åi¡«¹Þ!ñ¹oçR@7¥VØë–Y¼¶±Äk?xª¥ÓÆ÷Ëÿ—YigS/håy÷·{f¢tˆ£ÓöåÝþâ/.±üTòÁhÛˆžÛ+wꀀ¯ü)Â^`6¥ @ ò·GJ%?L—-‚†ÌW« *s+Ý®“|ï\EÏQ¿"쇾E_%­9×yÖ°Mœ–ß™K¦9‘ïˆ{=Q´!¹Ê\CкGT<5Š¬ðc£žêL¸ÁÑ$ò:$Ä»¨\€:ò3Ô^QL籿æJ ªŸot¦…½‡N¨YGë}'^z¦:±­!ƒê/¾;®\ªÿ(öº„[oh§!ÞvÑ6~‡5sM…j7R…ÌPrÏ+¿Áæ›=CfÐa—&ElõT-ø–èAÆÄj@ö1Žò€:#¦jjòQëQTr©v‡Y_'UßàxÖ¯`™&]L㚪öÅb `rξPsF[=Ÿ¼‡ÞÀ¯¼¯È•¾ô’€úv.èeSàxNl¡ZÝÝ'"ã3™¿´€b4hÒ¶%Ûo»IyA&¿¾r-ý—?/ eÇüAÁ±Qû†Š…'<²s¡ì3QR•RpkõJŽ@` -ZÏÛ?2WE¡£ÞHctDè|OÞXÊÂzýœ¼{líÎ’ðWR(ž»”’Á{†§‡ÉTsŒLM€ø;Š·6À ‘óÏGódáõ­ †cÔÖ¥B/Š cš–Áñ²´™¯ó©A£lã¶XKeÈ|Ë'KöÍSÑjâYÍé'× tß5ëœñT^¨…BŸD9y»ãƒ™©9—£Â.g»+«àŽCÂüX‰¸2{‡Pd¾ÿTo°µ°–ZbÉl““n©ë\ÄÑSÝõež¹)o¿Ê‘J£‡6b/Ä/;í/þò‹½÷³)ÕÃz†H¥ ò ‘s¨µÛ£h ã?m½ó憹ëBþã^Ÿy¦fåèöÁݵÌLµÄäÇ?ç÷&ÍÜSÉïŸOÞ®¶=“è½ER8àÞˆ¤ÇSWZ˜Fœð¼Ÿr”Ú0æÎ0õSjúãTý5Ë?þajîþÍ«¿í•™»1¨ÕƒT2h5HR¥ƒÿZ³eC­cvw¬¯sEÖ(òç{BñŒ)Ÿ¼'=søšPdNcmw-âݳQk,=¹¤…~E\¶É(ëÞÑ@.}ê7¼ùoõ¬Œ³næ‰á³E‰|MÝQÇ'v¹fn˜æYØûZ#²Æ(ÄŒš¸„\‰šPí»[ÎË<4?‚sÉ,Q£å½UÈ›S¾×ïä ÏYsåÊÑ>ð{ÒI2b L/3{÷ßi)ØÅhN\Ÿ!ß{õVtáìa¼–T:Ýa_ÅÝ‚ÃK®ð ÓŸ/rû—VÁŒÃÔ\°têÙ겜 hLIþ^ƒPçH…íŠØ3‰û.ñ,ÃÜÉð¬¦Ñ—0»J -gáùo2BÞúh:aodõD²´Dÿýý'ûŒ*½A± aÕŠv÷Ìß¡Þ®4ÓΈÿ+Y¯S -hdX{æv‹—Pdµ3„𶸠-Ä¥öj\ÒéÕϘU0‹sѾ³U˜kanåõ³>†¸AhNOdN½˜ç‰ž¿­/Ï4Å|0€$µ• ß‹X ÁÇM)ªé–aûÌU˜Õ¦™dD â”lO2&ì~œˆ->ËðQ"ºS,‹SèLIÖé#EÑüÌœ(Zå‹yÂ#®ŠqŠ˜|Ë•W¥V^“ï逮®0àò;DÊfºÈ›!€mF{\§¾AXƒáóú`ò½ }.†õJIY#\q·rgK±uÆ!s²•nÛ8‹nÁL8GjmgúRïÿ…Éå×@žä„[o™Ô²…v¶^æ™=uC>MŽMO%7‚!œ‘I}ðþ|æyÂG±‰^e\gLÒÝ}8…å5·-Øv̺û=Èt}JΤ"?ÊÀwÎú÷ˆSiŽˆ 'ÕLÍ=VXX s-džÔ«‘Jt[ bXO RÄÔI¼Û²_ä`&R:¦óWÌMü&“K V“U0®(u¸›'º -›¤§Bd¿Ò“n:aq¦â:ÛNõæjí›,nBÄ»¬SL¨Á"8ž³$²§ñû/u²‘Cx®ãzîÖVhOi-Çõ!ÉÖÜïoL¡7>ïý¨ GÄs Í[Ýx¥Rë“ûu”GÄáZ“é–5±‚É^•oøö”O×t?¥Ø#æ©8øPZ%¬î©=.mígbsGFdŒóÛ·¯.Ûз'm½`¯7çÑï:¸:€ —G@ûâ]ghÉ¡/3sN)s®à[Pb½<Áƒ‹ó/[Í™•p¹~ÆvÔï°"ávÔï˜÷Zt}ÏA›-<{ÑGÑCç£ú<°æ¾ œ6(âmÏ?T'½e :º€‘A¨c¼ -ß9üãü_ÙC××ÔFDÆZc ý n¸Ö,ªÈ…Çå·ÿˆÎ‘öžöQœ™T$ 0có+æ¹Ç+§SËá(kõ¡gœÀ(ü¢ Öð¢kŠ>UÁø'¤;çñúÆÁäß™KχÔO}‘hjÝÌ“KWË™ÿÍ#-yã)nç&-¬±òªîÈ൳2HˆÄE#®*‘7þ¢ô| hž¯†{ô`SçnÅ}UFby¬òMŒâ÷ :Ùª5…ãðÍâÈ-Tçy{šx-bôž#)>OÎÂÈ\ˆuëñ´fãôã E‡ê¬:V~2%yù±/8A`e–¦£í—cø]]OIDìÇ <æ¦ -¿·üg>/© ŠÖ/7g¤—ˆOÔ“ë>¸ñO=9mî('žùí“×ó¿þ«åÛg)£õ«¡nªÚõNž±Má²0ÿ@…äáóu§1§Ä´r‹Ÿïk}¡Nñˆ?ÐÏ/¤0€ª¿¯4gþÎ}èº?µÌu¬èïÄìT'8ìŸ=Çaç}N'Âæ|φ 9ZîçþžÍ\ĈSµúÁÛ{ãfpe9]ü„·æíl#¼ù:Wà=.°¥$üWÐïÿ¢b(M¿R@z­þ}îy?á4ù9ùD“.±$.J×Ûd»œžÒ=sSü´jªšÇ<½N’6¯òíè /<üWñ`Ëkàjeq)‡X+×¹—h1;èl'ý·¹œãá5éê/•/kX”{ÏudÊmŒNêV¶Ø@?,8êBð<'9¹_ö†Ê‡j V†â*Ä+¤/–î-.ŽŒ j8ÌG|7߃†:ž+LbV9FÑÝN¸<’Ä°Ùó¹ÐÝ®G0@hù.P1² þðÁÇüGÈøð…~Ï =˜~¾gQ<‚ܘ[\ͦäÛG¹>”hjƒ¾Õ¡Êš wúŸ¾àñ™oé£àodÄŸÒæÁÒÞjé3I™g Öç¯x…ibCC¿”ˆÒÒ'Wñ¼¨mλ$:s-}ò }ëc«†GW\fʵŸK¤a:zJhk -ݽ¶ý3¼·’mMÛŽ~` a“zH=ã&Ë9„XÑ9î‹T“öã×g *IÛO|hwÑVrb™URF7èHÈI°.WâúJLvþýê‹0cù:‰‡¶‹øEkÂv°7'äñã‰ù¶!¬®¹îm?åü)¯Uã´€h'Ûbä““µý2š@¨‰¯T -~Yk¡g3^|˜ùµ›ú¯’ù‹zƒ¬FŽs‰WJ‡àà†/%uÊb(›”së‹oi¾hœðÈR¨ü³°éÀm=ÆsHÊ '°þþ²Ò„B¡k:p]F­ÅŠkàÊìÄvÀ›—!¹MðúAˆn]ð&óý‘Ù•z «Ç .¾Úó©áó碨+á‹‘(œaÉ"e2ÏÀ³ÅíÊ•äm ž5DxpD£4ñNå†öäy5#ŠÞa - ™åR¸ðºRêè'Í~6ì ,ßFŸ2 H1"¡¤è=tó¥øœ>×yk,£‡|©›ß‡ I½ð©A[Œš -´žI![ᘰ]Yá1Ÿ!ìíL­ßlIoû:Ê*¾CøˆiìÏ5Ïü¬‡l’­óàäÅ#쪧BþÏî” ¿ é~ÑLøŠ­ÓëÑU –`ùÔÈÉ@I Ûón¹ ”!ÿ·[{ú–d0om¤ôŒ¶£XmOÕù ÁÁÞC¡èé'\Hðú•öñ*QJœ&zTîls´ªèØBEºnÕLd8#':G_µ¨ýŠöH‹˜Q: -4ˆ)Ö\•Þ§@ÁfFq§ZöÀ(ýÖV?ãκXøLÛKç ‰l_7sŸq †8ùàý-Xÿ¹Ç˜xJCE3¢xào€î¸ÛÐÏðeGè:‰·¿8>ÿê/_>k‹±„^ ï Á®BÎéAQ¾Âw0LúÈP.;!TúŽKw¾+1>8_ÈÃ÷+¬hºávÎgˆ¬…-é*‘œ¢‚àžÁÏ\Hð;爠pµf nù­SRq31¯20ßëwX;S†Áø}}ðPo9F$|âƒ;wÈð°ѱPfm`•ÿü„²Ð48㎡N—|ù·ÖŸ_Šëç¢[RÿoN` '™=hë™É@ï^À2ODÕQ¢NÍ™)þ=Š™ƒŽ/ù'„‰NŒÉéYC2§Ÿx'Å©˜ËÌ~ÄX2Cl“`w•¿áfÀm8ßç!|®û=Ê·ˆ´G´ÓöØw)zÔˆ ¢ù™çWÊ`;:ƒ\Hõ}ˆ"+>ëBS%À=Z óýÿ`•d ±7Y¼Ñç¬!Âß+}»ùGÙÉwÐÔó®É’” ÑZ|K‚•l#@øSƒ.kžêÄ›!*§óW¥1­nyO[IyCSÕKÅ®×;U\F# ÚÑv  ¬±^_…bæ †Ø®x;Ðé4l Þñ¼>˜k£`¯­ -q]û•^À&'ÕC8c à/»¦òò´ ;ÅÛñÇ™ù<¿³ä]ž˜’:;¯õ–N4Ð f(¥ï¢6 [ºdÐL€£:Wå Óš÷¹¾= ‰ègÆo~ž õ–peàAéÙ•úV‹ÜëL?P‡ßóc>eá÷3 e0B»Îi ‰6£fâ…hμ&žhæ?{Ý/úi»è¥óefhE zsõí5 åèÜ •™Ç<…¿°½Sö.½ –"¬[æ—CJõYòðÞMWäww­A-hì-Þ¥!}‡›Û¯ÈC»Ô ƒÿþC­dlnçó« ÍžeÁà±Ç<óS[hŽÅÝK2Ã,¹Ù­§"ÑúC g+@ÿ_S¯µæï÷‰ß3æIiw¨å­÷¡Ê­´Ÿ€ÞVB,‰~Åœ1kE™8Œ#1&~î}aaš¬½o>•/»¯-‰b@íƒ.J›„¸ecöPU’ùV!® -ʉŠVÕZ¼GÐyâØ=LZC!RÏ?æR;fÞ©r;×g¯AxE_›+ð`|yÀ Û—j÷ãxfT5˜‡œ!MÔ®iy„[JNãZ—º€-áÈÂô7(hçÛ’8ñÒpP+Ÿgmm ÌJ#b°¶ôB»ÜvU©”^*DU'¦$Hȳú•Dî{‚ ?mv3ûe-H'믮‡Û—õ×1R‰¥EÀD¥sG8›‡JÙ¦ÍIMè†à€Ù^¼(Ã0ïYjÎ#Î¥s®jA23FŒ¨=‹Î‚'Œ€íÑSõ3äkð"Ž8ôÔYÍïU³¬‡}xJ­‰+A¤í"±î–Y•âÄŸ±3øþ©ÞµÂ@è^©­<„JtöH{]b„ÚYþ"¸kʶXí_Ä÷/çÃ/ÐF-RlX“æ–‡UßGŽCÐÝ’GÊci…³©}AäÍ<ŸXLÉybQQÁ4c;9@Z߃AbE’Þ¨ w°†Ì|“ÕÁñ3’TVa½†NW)œ‹'üaò_3’¼u©!Ê‚Øi̳LÔ[ŠÒØ·µf¾v6CÒØG”·¸6ËW˜'® &ðà­lAnp°8€ôæ§Ûž¿yÓ_¢ø_$g&YÜg€lMXúRŠ‰B»«äEZ£ƒé#§Xl·ˆD~û.'¢äJX_C«c¡È§8'!IàÖ¢€â‡ QV¡!¡ÎHð°yzX¢Ó´À´\Šõn_¯¯ÓfêLVJPûý3ý#¦ýtǤLÆRçqHàHŠÚtŽ©üÍkõja¯8 ñðfdˆ?æ‚jb99Ä&—•r«"]5õ•º`f£æ'‘Šž†õ¡NX%0ºå‚)@Ž¬¹7ÑH¬«$\uËPƒš Ï냧ZTØ”>Ó‚NÄ%ï'{0ïq¤åÙº3!/*ìâñý¥%ðl,²Å_RŸ¡M>¢ž¼¢Ÿ$ HÄ¥)G6‡+ú¼¢cÍAáAÒl0äÐ@¿r‰‘nËG¦gt£ ÁÂç¦Z„Út×㪳BÃó¦ ‡f}š5æÀlŠ™e&s«pÀùÞ¤ÒÀ”7—E_¡!¼ÙÜgàôÔ0Èé}¾CC:,*9wx/;:p›âÛ5ÙØQs®ß0ü3ê¥\~$6¼;ðWÝp¤Š=ÚR­ä …R’¢©—#tÌÑMä΃Cë`.±­7 -¢éõÂo¿Ô?ÃþåŒMõÌšfl¯ÖùÈÞôM™ÉÀšQZôuæÕ(†²†æ×´`9·Lº»;4mCóvxߎeßp~t -hño5ä&PÀÓ ®tÍdw†Œú¯99Ñç`“Ò€Y3*l»™Fµ»ß_TGŸç,3êRÙ(•õÒq•dÇSb’<¿Èù²aª!3"eè„Œª×©M`ófé¡RcáÒ¸ÇLµ—ƒŸ/L:o>U¤~Øšñ£6¥b‚ -lkõnáK¶V²Ÿgäic°Å“éìÓÅ$ o+ÖåCKðyjÜxæéLý`V¬Ù{âÈtÑ$à•¹ŠFqy׶¬¿;`6ç?+ë¾xjßsœYÌ^øŠ/ ¬ïÿ²’òoÞþïtþ®‘+ñ0 Âè sus}^’ ·ðê®,Ê™Jœ©’¬9Å^ù -]©æÔÜÑÓj]3눋É7ÌAÊ€þ=`ÖJ¹0¿ê†Ç–“CÖÄÌ“üv|8ZÌæ éš’ÁÉ¿…ôñžô·ô°¶ç qÇݸzœç’;0ÔP¹½‰¥6jEϵÓìá•m‘ -‰Þˆ~E5èÙ²J#ùãk6ÍE*øR„É‚ žê¶ßBFå¢B|Ÿ-”€™ü÷oä÷ȺQöD#>ZïÞ÷Š"\\Rç*ZœÕ a…¦LÁFáú÷g¾ ‚›^ŸU‰«ä(wGDöu?‚1¶ƒë(ùW )«¸©ˆPźáNíR†z¨ó,œIx†\‘‡¯ßG;—öà|yAåGô}îÂsþÐÅß Ä6rPéOÇ Ô ÎÏïbý͆„_ \Êý -~{„òä(}ÍÝp<ÄVãóŽ (š(Fq½›Gt|¥¡oê¶=”H¡<÷ølî®*7¶â/k¨‚þÝ(PZ‘£ÎÜ!bæ%H¼+Ö…*$nÁ˜.Uî?–°¯þ)ye EÛKY.ï^hÏ -¡¥ÅÀèU1[yö¨Þ‘, "®ÆVOÝBGá_÷)Á.î¦ 4²£íXv®K0°›?B$: ø[¹*ó+Koã#î@=ÄU3¶Qž!,3ö÷a¾¡½R­«Ka• rHPCèшÛï’KYàÈ\ÔœE=4€x‚âà¥PDºZHÌ'ìÍÖBt”Z.u&t¥Š*Åáèz¦‡a×L‰¹ß‰ &ûUöŠWìP)òmkH6àÜ„z LÒ_…²n=Ô­{ï´vòÃ2QžŠ’µ'`?z®­'I£¡^âdá(µ£­Æ1dÓá¼zìEˆúÁübN˜WY¥ 6ŽØR›à=ž±·ÀvÅä¡·[@Ú\ «ˆRÿÝK­6CϦƒnDdå}îs]é9¢›ïZß›ÖË3ÊF l -šTT`…0ã °¨qLUS̱/“QžË9ØÁáZdµ‹Á°µï¡¯ïµ¬-NÝ­42ÄJUÊÓÖÍn ʈÔå¥òÍŠz¯|r‡WI×µý¡16µÇÖÊJê \·ˆÉ„Ûm£ï¿âï? õúG‰¸ì|ÓóÐ>‰Æ½Ô{5‡uÉà|ÖÊ—Ó–CHõýÉi¼‹nWjÂG¾ùá(µ9aÈ“Kpx."äs iQÄ& ½õ5uµh×Öq/©`¢Ý<“•2ÜIØ&º»sÙÑ⇷-ÊÊx¹.¡ê ±H±æ¨òSðÃLgQ²ô7©ù XƒŽV´˜(áô–UÚ>¶Í¦Q†Èìñ2r7PQØÂœÖ×¥â\{©9ï¹eëïÁFd—1rËv¥Îôà¨ÃF‰S€^gÚ/Nf[9Ï™åEŒ¨/ß%æjfò¸°!MÉèÁÌË ‡2¿s<åyˆ£Ôç7fw|à²Òªr— 8‘û¸@žÑÙ}/Ç(Ë2Â_Ôø.6K@_µ—sQWšCfxÚïÈ>oäñkÙH/¸6}ØÝÝα±z¬GŠ…à}—‚³«®ZhîùQ²i¬q†\ª¯‘ÿ¹¬GFðyáÁ¢'!¾‡˜~•ÇÔ‰´EÛVÅŽŒÜYÙê‰*“‚ų­!ƒD‹Bjz^L"ã\q!Xx„8a:ólHr:"½íGú{üšîV¹7‡m\Ìý=d?Â÷–xî%N;ܦ~ÖÍ™}¡ìÓl|•üyVEW$ˆ-ÅLóª ô#Ö—†²=¢Þ\fú®;!Eâ&†è:væ!íì„b÷=R­ ˜¿E¤TRgæi—"ÕØé6@_úÓŒ;“èݵ˜àaîRf{K’ÿçÍèM¬’xžÇwµ¼øÜJC$ÝÒVUg¦À§Â¬‰ñkÍ·‰«^“ÏIMÊ´¦EÙˆQ¯{ÄÝ ÚaÑWRÔËĸ¢V™É4óY„¨O¶õC”gÛß5FUMÝ|+¨ÍÓ«Áâ ›Òólj5ß®ÃB-{¾&DEÞ6;.Û –MÉ…ðæ¨Z03kCÙ„ w”9¯hÞ`X±bÌ¥¶x‡?¾‚…Õ©jÜõž#ø猪_Ó€ˆ’è%è Ä:w„ O4ÐéÂÇŸcÈu/3am:åÕÍž˜’b~F ßÂKßöeX)S$xù—l(Jp÷¶ü#Ø•m1îeo”é>5Šå6+&’€@wm9'î¬ì!:.O-Ìøgí)¬^1zÅG"Š8*OKå‰3¨É—Œ¤²U‹AŽC2¥€:·¼ë’øÔo1bÂ5k¼øy éC°¹¢xNÓÿ‚·-ÎÈ täÞ8¶×Ä’¦ZŸ0yî'ÅʧÌ3¯p35¿>˜Ã«VÏý*å8 -+Ni±QH²›û  •FäúJV§ÆW‘·ÿMùÛ¿|¾ÔpŽX©Ì éW½‘+x'—85nöÇ5òX:Ìr´1nä8Ûñž¼Ê–¢s°d×g&”0S>Öõf€ŠÕ4Ä·Á“ ÍpýÆYkÄÒ«˜«!¬+"ýÊ[#HXtOŠp9¡6ne, ±à®5D»;Ž÷×2à}LÉ'¨Œ=¥ï€+NØMÙF« ^ -þoÌTò;[ºv”÷ÌôyìZBór•Vöd÷zÍ+zM-שAÚ8d©HÌßÁ·˜/÷f·•|R¤E4H¿%›'%É”•¹ã àÿr)Ë¥öt ±HÀä+3¢ÙÏѵö`žÙÀ<çUiîñcO–2oï^Cx3mqv¹à˜æ—‹c“RH]IsS÷4œˆ]6ñFqÜy&Oò¡!àõ »¥ˆÊf$fÆ{Ö4€!ÕЃ nÈ×Ê×¼,¬Â³§Y‡ø²ÆNíZØVÄC÷bäJóCJ¨XV˜!…rüGKu=WhÙšgí¢¼üpH¥×ÑC›¹~Ü+Ò`Ý Ë­6üùÑÓÍ5¿³Ÿ¡¥,y2²œ4ÚÓšÇÄå‚´(ËÉ,®2”øŸþþ/+~¢I©[}®¢áµ·-þ¾êEph0W™ )m -€¶÷ŒâªzkˆÐxaÄ6WÁ¨7{}m¯I{çÁ{W;ñŠ<xÄ– -»¨%µ°7•Q=ݨúˆL wC©Œ}<Öø;žëf¶´hz! -¨‡ðž«×¥vÂÄùWe_µNŽé·ô¶nXk"x±À•à,Â3e¯g"‚ÁHJëFˆMì:Ù¿J²DZvyv.W“k”f!xn‹—òÆNï÷J-AücèZ -•vvçÝ–#lC;S?ÞF~g.l½Y2­÷;^ Ç`£—»á8b‹ØV¼8t]\Œ‹àµ¾;ó°­®:˜é(2Î}Xá–YÐ+¡Ay­Ÿ›‡8U |½>˜1ªù¾ø{’æ-ŽCCmíjè µØ«$#n¢tª<£ê‘€±A«^– uá·^ÉÐpŽ‘t¶Òî±’ýxuºtÞS#qì…Q×ÞC<çÎ+ìŽws¼çç,M µF /oQx¨,m|®4©­‚¤ÈRo -#¾Úsãë‘4‰=œSŒ(iÔl2 Q^Ü&cï<‡@8á;-ŒõPÄ{¤È2O! ÉkØö*£°È¡ú1¢Lsåöh,™{Pðö0£îWÑÙÎ’¤ˆéò|Òû¤S&{ú¨•åÓE8{¬P!]Õ—±V®NþMw‰`‰¶mºq# ˆ5JŒïë_ -|zÑ1yÒ¥ÿ«Á÷Ix/dSÍ´Ö¶† Ž/ÞÁOñ8i§^²öFî¥æF\âïòr×Z#%ßV8×­;Š]w²RR9÷ ·ŸµCúGš§[ó[æ¶÷Gúšteâ°e ÁwÈ_Ì -tÄÞñ?Üþ– áHŽì,Ô^Úã[f‡½%Ãô{‘&æýìq>ŒÔÿXÇm“¨‚JiF|/ŽúPô¹=Àxþmù€®=ìðX‹ä|: ÛH¯eÍ‚W%ꊙº€M=MŽuf_q‚m¥Så2{ÕIÇ®ÿ,—iß)Ž·5uÓNœëô© ÍF¨‚ÅD7LSW)¦bÛ&YÒÖ9µks°úÀ ûõ®UxN®¤ö¶ÊI Î+¸¿šC-.¤eÉÞïr9YY–`ôÔ— ^™7ó©žÀšÎ†äºC _&:¾ÃAI„Ö\~‡ÛåçæClÒsNèÑcwDÑ‹f«¼>˜éoñì"›É^n2q@Ç eÇVÞé-¬ -»ëþ£Ò¥úΕzWÿ‚„ûþ/Vä÷qà\‘LàO¯Lß5ˆ³°Øét%*•ŒI¸÷žã0£¶mEÞ•>r eÞ*n•¨lÅãz,ÈQ²%„àôK ¹ ö<ú Ž¶iòat*mE¿á$—¸ê*óTõñök¬“dÎŒ9Qú‘ Á«RÿÕ$«“d7iUÙLM”-ížv<½îÏBõ¨·åca§ü*T'ü%x¡úãÐWñ’çÔÆà$$ÿΆoÿ0Tÿ”n²UÞ(Oôdã™ù7"ª bÙ‘Fk4a×F? aôîµ;Í,õYK]`Dòá,Eæw\BRƒáÚþŽKìöZ@gu’h­ÀvÜq_3•žˆÐº|e‰²‡>â7U¡üR?½HU3‡/:A¯Ô¯æ‰‚kl«ìëR³%øÓŠ—ô)5ÒÖöT -$Üšæ©M&ñß[ -Ñ;ÒÃý¬“(3Åñá„\i‹Š×òýé+›%"rË%:§‘d×.gfB~ËÕG{}ðþ;“ÓÎq7R}*íº´ìhzdýS6–ŽAè>m¥æŽ¶MMÚñò K§Î!k^K}>T„&/AÐß}åýÐ@eª¶ Ñ·0Ú¶®ÄâSÃ{HÑql¢«µ¬‰€ -h¾ïg)¬eSp•zí½«’ÇÄåç=æ—Þ}kïq@ e¬šõ•‰9Ì×±6 ®¤É2ïw O¸›Ûe`ЩhŒê»¢e"ÔTœâfîHD4§¡zBúfáiB'€ŒT«zyu!CŒ±*òúàt~q¬{-0(¦Ê^Â]uH Ÿç]É:éÝaK '¾Ÿ‹x'£´ß!üAò毘æïÿòyià$D ;5ú»c{–ž -ZOÜS{³ÝäË>@¼ÈÿÃ-Ïz(„e0«ZS×?œYÈœ÷j>¤CÄ؃"VC=c€M°ª2dV²f-²äÙàzϨd%¿—&Þ•Äì i&"}ÒuJȼ\FMÓ„hø¥s±I4~ö¨çƒŸ{L»æWr&K·XäÝ/¦èU<Ók*êQ䃉G¨Q_MQ_¿øìê?auòØT*ù!KºB…º¬7F-ARŒÞbR™ZS ÂNáG÷ÜkÐêNb†ÂŸÆ"Íí9YC¾ßøô¹eÍO5_Yå\¿²¿QËtå€Pr?¤&ù¢ m:œÅ®GÅ9äÀs»¸´žÐ–Û(‹ué-¸'Dì÷RÈ®æux’íÛSýGPaßHmìJ{UÚdSk­‡ª(_=ñöv4VX§Ê¹z†$˜ÎÕ6ä4¬¯§nÖRtÚöË9*ˆmâÊ+šk&x9Š›ôMFÿûVïÜMGâ´Õ'¿k‚Ú:òLô­„;»Õ2~íe½3óƒ˜ Ï|XV vûœùYλ®óH#åXìá‡zH"¬­¨txÀϯÿÅ æ“¿ãþí)ê.X¶Ë¢4™›õüêw Y¡¹¦¡­èñ‰{jáKa«r·¾bÁ<=ž…¸žÈ”ãïˆÄ5³ò­úž<ã¾—]O^ ù>p·`w_3fÈzëÝë[)‚mW´ÁÖ Ú3>Qÿ¥Øf¥ÏO"ž!œ£g¾"·4·ØÑ¢ŽlñœtsÑ[ïož@±kÕ|îºcnƒ¸¼{>úiÎí­¼ÔlÉ.' ‚1¬¾L ô‚„ÒÁ~•ËIÜbí}íÌË&ò ΄èùTwç:ÑÜÚ aæ…[™jéR³¬Þ|S!© úÜJ=Úþä\Év@ßrØŸ’[œ_dÄNìä´"ÑÖáú¢_iwfG;½xܾ¾fk¶¹µ¶òÎwMù_çuëõžlÚŸ¢¦šõzÂ(êµÜÓFŸ‡-$sL²é/èÆÿ\^¾ÖAPp¼¥îÉ(xoRZƒpßà) ïwÇs–A°ÇÙy0"j4VSx92¡’wÿ묄ýÆ´Üñ^íôaà¸#àäv4¤(û,AàHX$}ï/oÊ„¶6Wš’P³Y;5Yãjxî²)6¨A^!ÑcÓP·ÓÛ`ã2¤**­¯IùÛðO…Þ QÓ"~¡£û½Ü6ŽàpÊšç^ƒˆKªäsF¼H!Ó~=R±në‹Ï7ƒÿD†èÊ@&âR=~^#¾×&»¶\PÈC"v>¡L×þän¨¡²Éûs$Ü‹'8­m)J½HD˜'ë®XÖ¨ìƒ*ÇXBßö’²x"ö”ßé$¡Å|qiŠ”`Êé&A é±&¿U•ï×}õ:ð%Àë×ît-„<}©<åéžC -z»I‹mô¼ü‚íiBoÏh_dRÿ:&j\Ïܶ6ý}G±u•d槂 -º¢Æ%Ø(¾]™hQDñI±Vm8ÇXI0qÊíúqOzÄ;CˆC>H(¥WŦ(~¾Ê[1”¨KTô?g>Ét랧FËïxÕúÐNCì½ÖDƒ5œG}ÛA‰eµ5„rÇIš³GßÍS ¼iµïøwΔ¸ž‚Ûœ¼íà@`>e9BüF±ë¨v‘-¹Å'–ÚɯVƒç×/1%×Ïœ-…t ÖÀ‘Üc#=(qÖdmu0aÁJq†\Pè¬r#ãN)úÌeÂÎÖÏ ÓOÀŒivôÒæUKžª¡°Ôˆˆ[Qwš¡ä¢²ížn‘³nufC1ÝîÕßìG‡$Óù˜‘ý®ô¨uÌ©ßyTphñ­,o38w‘—yh”á~wwHµz ¶.Q—®£ ‘Áù®á$}…+š ­´Ðà½gD ÕrÕH+Í!:?ƒBsÎJ±ôÁ¸Ë7hçºÐaqúXÓ…P8MAþeÔ£ÁÅ 8ЂAîñ‰k;Ëîç‰&êÕ¯šß83º­¯—GÍMa–‰ØZ&Úñsj}«€—¢Y½y‹-7ÛúëƒEû®µƒöal‡²5  ÝqÚVj{ü¯Ù -îå÷]u@¼Þ*¶ß¿Pê¿ ýÂ6ˆ¼Èž0”±æU/äJ™N¹&–-"±1^r«Æ(T“³¤]æÓÎg£‚¼‡Ã™àqÏaæ#LäW0S¢Szæ\Œ÷Rdûg´Rñ6ÍéÀ!HR%TÙ8>{ Qù™P#=µÏqz‰iKÞµ#سcé“ÕLöã´ø¼3uŠQÕÁ$Ü´G+gG+yòP:Ch[œó;\x$†˜+ù C”õ -¢Òœ°³P¶Ol.ê½~çäL¥ôÙò;T‹ønœ!ÔË!$tÓ‡ˆz¡!ŸŠÄÍù•NÝæJϸ‡à–™ÊeWaæ0U1{œyCT=ãWË}cŽ8¤B˧¼,èHWr„·¡àJ]ØàB¶{läT2½Ïø:´˜b¯9Ñ_,¬#Ž¸wÜ£lfßξ·ÏÀéÌ É`EX¨8zM>«¿ð•cŒ«ˆ’Ô6JÞúÄò‚#ؾ0·í¯Ž*“ó—8D^ߨ«ö'ôz¸]á}Ô0Õù¶'罶\BåÑ’ô&˳¬Å‹Šzõq®ÍRø;¦+Ù®Tx ÛsçÊgûQJ³Ë]‹ipQ„j²wGlàÙWƒK´?ôjçKY§é¨iŽ»T_ž‚Pbu).VðŽ"ø¬K©P4ºà‘¦’yù'œ–”³yãÙFS*JIJ›²’Ýa+ë½PIv5ŸõâTz”¢®QÛ®ˆÝAn–]FæEþZ`ú,s2¢7W‚ç\ñî]D›+cD¤ÈsýLßâ9N9 1-dÜ9MCtÇT¢"ÊšøEu0Pg¨ºÊ´u¿ÄvÞüUië'fSñ~Þò¥µ{àRs‚oÜ‹4ª×a½i[ž¡¥ù™¸¢q­h)ãN‹Šï^F|PšÛ¶Ž>ÚR…ù¢ë|äjFúà*1ü 9j–ˆ«œVçCîOe¢¶Ô™òò\ûf¦¿SÒ0 j•™fV$PHq”C^ñ‡OÚ¡…¥~Køv²XIH·? ­¿¿†È¿ÐþZS(ÍBº1u_ƒN§=PƳb ËÆ<±¼iSô“›ÝIæN•›+rÛ±ïõ¨ùw­ÇÔ²»Znwð"q×!®õ8GÌДFYªHt ÐOîk„öw`Ç åâèöüM`âNžB¢>ÐMÒÅžêíYלE°»¢©/×ÅÌk¯Þß?Bð⎇BÌðª[q2ߌöÝh5½ð~ÈŸ«Îš;§æ‰Þ^…+Ñ]bÝ`¬ éK”£›3âdƒî¸T)Årù¢4_Izäš ,Å#IíwU ’ý+Š’ ‘hÒ˜K :/";fÍm®¼ô³šO\2Éé˜S®Ké—]wµx®RuQ³£ážŽ½-ßËiËƆ&µîéA2%Z±cµ§¸W4îïWÙg#gSøÒ-ôû<¸çaD…úÈï”Î)ï‡}5¨ö°é4ðØŽI¬2}D`ï …û^—`k$ß¿³iÁEÛ§çS'¯IRÖéî¶Ø®1ƒGïÜ3³vBŠ P†Œm°ã¹’]_ø7ŒÜÈÙ‡zµ%É•°”èjr5ö\ǯp]ËÌ?ë7‘е¾ÔR/ücÍ ›JJû]ÈÖ–žjÑ£~F™†’WáúZ|n¡ŸíõÁL·"õ¨U‚£˜ùuhéù¦C~eAèÓS…ØL;ñ¸Ë÷Ž¨uPŸ -Zy¸à"8qªF…© ‚ªÀ ²±mõ’4ì ó$:÷ÍR7t¤EaQõœ¤?æ1Ö÷$\nÛMÅ'`ä–\7Cˆ)ÁˆûÆïOçÚU²÷Ä<̯‚²ž)…åɼ sæ—æ—øÿb‡ÂŠ% ¯nù%V»ç‰­Š}šÊÚ(ÑÌJȲoa‚“!Hˆ-œ‘4³Ó@x Õ-/h±';ÌX\º£ˆÌr@¥÷˜¼¢¦FÝÒ\¯E·â~}ðÉWÓ±ÇÞhLzöa@~ ©Ú{ìàŠfE…ÍqÎb˜ƒg•­/‚Áßÿ%V>=ú¶§äú¾ÞlæŽ3Ø9 ±òa;yG ¹í…ýG¯;Á½BSÂW®É{é´œ/à~eˆ Î_~¾)ŠH"§á²À€š¿£©™ž@ ‚·“ÒP7¢Íy…øä+T…«ê§,;MH0P@é3ygM0.­6jMšŽz¯œÛC…›lï¦8✉WzŽóÙ¼®c+cb—î¦ßæ#›ð|JL;b‡I_L¨C0"ŒÐøÌï@ -]7ê¢`žWÁ$ö m‡Ø(À™) £ƒ}‘„Í»P©Ð%áx»Ö¥¨Hº -¬_XY[Ôå+@éô)ùl½ÉŠ±IQû† $e ’ù’…œ;bŽ”€¦×þ©/§Ztœª^oËÈ~ ÝÞrÇ8RºgmÃWÎæ„v4Å_L¿„HÆ/ÿ÷¼ü3} E¶"B‹ÂupÀ÷;óå‚ -Oÿi¬!ˆ—çɇ,{T|`ÅÛVì®H’X¹¢ˆ¶Þ>ÐÆœh¤½‰Yž©zj¯}ù,GxkÙyAG­gÔ6Fš ¿%ÇúÌœ-tÌ©Îe¯r/½öQÌíù° />W½#±ä\H°u%bâzjxÙ CREé9+Î8ÙÐc+Ñ×Æ«yƳ>øwˆ µ‹ ~9 ž(²Eâæ‘í@÷~NŸ8‰|´N2¥®è÷Ä|>:ìBë–¢/&Ff­ýJäyÛ -œ!—ö´þ‹Gö×JÄwH€üíëø=ƒ65ºHGÁªx -ÌhcÆ]ìòÑòüö3Gžâl=æÖ½ÜY £V&’<9ÌPVjFœï÷ÞáíUÉû -³ ËNj$û­%ÔçˆÕ{"*ÿ^·!ùKÂZÁò3ÜWÐ%J!t‘c«‚Ö#¼/€ÑÚHá”n#™Û­úÞEÑ­ÇÖpÑXŒ`û4è×b—4² ÈT9%û -ÇD·Œ;ùµ0^½¾;û×ÍU泉[xÃS8ã´Óöýø+Lbìäþ\þýþ/fPÀ|G¬yçT²'¤›9TŸ³hnTàÎÑßxJ°+>¬1ëÑËâ!7by]ÜaMUçŽ j ¸e/¾dnsßî#Ëü*ÿ¡‡K’ø)ðòóª7“Ô8[ßD;ï¥ÌUjîi°¦ìòÄîõüÆê(…§xá h¢«ðô´H÷Ø_¸ã¸¿ž:3"{jÒ÷‹*\Ñ#FKtG¶xˆ, >ò €æ¹RÄfi(Àß+ECXÞI·g@¨ÓEå'Â"5ƒ*Iq–ÆÞFôè¸SJ~³+ŽÈ1TÆ£uÉîÃ’=žZ‚Œ–朸0ïQ’Œ D, Óâ‹Âu¿Å:+Z}øP ]®ÄåIäêˆ)Ÿ¥n‚Îkƒô]>•¤õ(reB–ˆÖéUoÅ$MpŸ4¾æ* ‘t•žŠlô©0â*óX^§g.ÕÈÁŠ«eŘ1~R7•Ñ甩6+â©ñ“¢epAÄžÈÅ©«™÷V.OÀ„Ú½êmû(e:í¥´)3Oß.ÄE¹ÉYÅz”vô 2äµ½E×U^$ð¡@âMü–Kq;,*‘OþVCÈžÏ;ÉWY[šPcã+U:²Ñ””f^ýìq0½BÇ#âµE†„¼©¾*I‡‹J\o-”ôεúGK³ÿØK¹Bà¨a¹…—œ[6ˆ¨Ê²Ÿ]$dÇ;ßVCfÈHœ×ßê­û.†‡ Uì*k‰œ7ñ¢'EFÉÀ\c7_¨ž&iW¿B¿c—Ðþøùÿ=!RŒ|~”2ðÜ -dx©­êb°¬-r/Vû¶G¢94I•CÐ5›*QE¶ã ÐBïá7NhŸ5$ÿyÞÅSXV²§ÄûGlD2z~^/g¬7k'Z:,Ÿ£N’ N¶é{¯Â I`šÊˈß|­Ì‰¹Õ•æ¶ûÇ6;Ï>²Ñ Yr!8ªa[j†<°SêÑÿíÛ[ÛéêË*ãà–H÷ é,ì¼~ù$5BÞ Odýÿ„\=îþòöþÏ'ty +éÃÎô8¹ÆÛuKCYÛ¸¬Å©t´÷¸E ZÛÄÃF%Z̪n‹êŠƒÉ%ê8—ûøManÎbaGèœêßb0¯,ójªÿNé£T‰ž­×#nåC×¥‹»EÂh -~ÇǵµñÌM±5üîDUõKž]…ŠiHð{ʤDLZì|!z†¨2Çi©%C¤¨©TàÊŠl`0TÙÄïõþp Ñí–‘u\±k©¬ -D+] )×gÁÔ?@Þヷ_SêÔ™'‰îwbЙÃp’Dæ+Ælþ¬*øÞ¾¼CÛŸÆí{JqÛ¿uüÍ” -ß›ôÝ|k eq¤®˜Imj£ÕÃ.]Q_'ø|s‡Wï8V|NíTŒ~??ãó=Ùø^Õ™° ç+žWCWF_Kœ°kƤô^2BÀp‘½Ýè¤Èç{Íì`n«É'|ú‘ŒIŒTÖžg9bØèž -IÚ\¹vËg‚ÇHXí}]Eaˆ&‚âŽãvÞßÞÂeÔ<Š?7Ä󋺢—lH<Öƒ}:q™Yƒ2gY†´´>ÊÍUµ„Hçõñ¾R‹°‰Öf´OçöЬnéZõ3‰˜:Wiæà- CTå#*] ¾ž‚Ú^õþè2E €ƒÁ]UÉØp¡ç¤€t'Ê×ÇK©UŠEATM³ŒÀžGf°tT©i˜jgä2+vÈÔ¸Y%Û;xt‘pVŒÚü€¿™QÍUå(úÊs}sg|áÇmgDïV5]pâ.öyò~kEùÖêÁƒ†oõ†’JGýŒ®'$¨˜¬‡qîéÎjys—¸D"÷4¢ÆÉbÐKPì°ÌƒàÍZ0~ ç¿Bv$¡O¦õ9—&ßPàƒ^¦ÝH‘–¹©¼ïØJxÓx=¨öìÅîHóD~÷ÜÕn Ñø-dâ¹{RÜå qWgƒ<_Ÿ9i™JÑŒ¶ã[ÃÊíî@P­ª]§ i~’+‚Kþo¾oûªÿ¹ã>O›?GÅæO^êð#t•/—Ùˆ%ìeºj(ÿ<ƒ·«.å9g”• íy«]莪QÓk¨ö÷µGkºKè`ÃHŠæŸü#ÊãÄØùüEöd^JÙ5ÍQSe ð%h?—F$µv®ª>‡Gò#-×Ò›3|NÑgË×¼£s3·0ȶ%òA*Ÿ Ã\׉|šqauP{Z ¤8"1s›ÐNþ"aèûýØêõ ¨nVS–00íä\ˆÙ/Ê_30›‰ ¾Çs¾yò --öUÒ2šÄ @S¥ºö%—#JSZcŒôÒúÖsëɼŠÖÏRv®ix¡¼>½~»Ú ?DÜ\@ÔT õ£ÙÈ™¶±¢°!ù½™nåÑ¢“'nà£~E ²ù~{sòŽ@­XLú°¾ÚüÒ®^Îö˜"LUë¡.V½Ý~Çp¯–ïߧ³- ÆKÑáT§º˜KÙyåœé¾£‘Â.Ö”ˆÈén±.­Œö3*GŽ³9B³eW{‰tQ ¹á„ƒ Œ0C'õŽ@Ê~!‹eC?#ó2'¿bêQCÎ ®³>Ÿ×AÒÊDZ4ÎOîű óC r'‘ƒ)õ¨¦¶‚´WŠÈók¦/~‘Eùþ/«x,†UAvhmgœØæþ7¶Š÷}™öHTk;ëgàÚ##!ûÉ )²÷ûjÔ’}&Æ¡ÔønºCŸ±:{wÝ·ÀWÌ6ìþ¤–ž½B‚µ¬Õ¢p´­3:_ –F÷e"ìd߸bÎÙͳóTÁ -«ìPä}ÕV…ZsGcÙÞÒÕ-œÇ.¢,sò˜¸ƒÒ£­¼öè‡Q  W:zŽDصÓ#;C@ŽòØ›/;B<z©ÛujZé<‰ìÀ×8Åu¸·¶˹9«ØPG†Q—†7+¨Ì£+E‹ + âDîà‘Š¼+g[seòæR1S¼ÎÐ?Lçì90´§.DÕû¶Ðê[òÜá–œ©AJ b—AÒPðÐhX¦“ªLÉÆ[{(aÆ¢'Ÿò˜:±ã(†§ne>Õ^ý‡CK—‹ÉgŒ®qšôb:r¸¡#ÓhŽ¸1á¢ðÕ{jîaÿ3CL?×GƒÜyXüÌL# °ØÖ xšhwå‚sêX ¦›H®Gªv‡Ï†Ñ©$˜øT°zÙÄçîÄG ƒfQßXé¥ÖmOåÞÛ;š3Aò™zÔM/JÝw} »¸zWòš ñ‰ZX¸ó¡ØAî)]op ð%é@¼B5ùˆ%tÃTÊÏ(ïÌÀhÙÎÄ]ˆ™Ïx"É›_!Å'àoû -O»:ýƒ÷C]qkMmTÌm!ÀtÌýµÕ¥œ7JàåaÑ"=2 UÂýõÁŽôOð½ª^FìŸ÷…°øu×W¹@íŪÆJ"yüÔã6iº`²Umß®s2!+¥k‘㘽v‘ó¡Uq`Ÿå#E.˜pØ£*«¤Ó†Öqœ«}ÕиMg¯ëÍÍϲV‹:öu;Úû;T}:¬VO¾6é“ß9Ù"Ä}òŽBEËöšUË]%¦tÜ2$»íCŸÙKkAõñù›&=>A èêÌÐ˱¬| 8{ªdIðQÕŸçüÑßØgqjjiìk;ÓÙYÉ”×no?Ë¡ ç ÇÊ%Š6ËYÕE8ŸùS -OG%1i ›(<ßÀÖyÐ4}”ãW¤)7 ÚÚ†ŒH/bzlK¤kHCA!S±ˆ4C¹èB­!g2fø_Ž£QÓˆrS9BÕ¥æÏ:Oh{>1MBè£ÔɈsÀ=Ï냹¾´ZØ 4½¬·k€FŸå*ø «ì“¿XƒGL¶Ïl¬þê „RÞ¢“5æ÷½)ä^jƒ¶Q´2Í!ÛÍÈ™é]£¢ØEmS†Ýº8ôD/Û*[Òæô²­ŽJHD#¹­L!®•î-go1AiÁqgH*›ƒÊ¦ü¼¯ØeÇ*p:ã­—{3Ù ½k®žÅ¦×[¸ëàgÓ@í3Rk"ï½Ä’¸í8êfDEã'=f­3ÌYœÍ¾¤eCÓ €JúÁC%RW‰‹ÚEÍŽ&ÉiFÌ;–õ=é¶ÏŸ¨/õÌ9Fåßî·¬ù ¾ª)Ë¥†z}¡‡õxåS؃æ:k®‰X¹Qh•"˜ö"©Œõ+Qkä-$m+fc™GUd¤eT¯ÉÅ´L€Ãt¤nYeÑ{î•`F;†Äõž”„¼xÌôÒÍ¿ß &–YŒËrŽ?$ iª‡¬*ÅTÞ$k}¬ -st^Q}íû©¾0¼O¨ý¬&ƒàÅ›¿´È÷¤/¯jQ5‹-ZŽÉ”"¦s`›(ÿI-÷³¿X-Ÿò¨¥§^ˆj?2ƒR†Ù˜@„ý¸é -Õ A­F¿¾‚­k…çQ-ñX€Ùi+kxöÔF¶8zmD¢–à~OZÿ„¼Iƒ<í§,f µÅÙ°VÔ¿)¸Œg©íQ1Aü¤¦ôéz<Ó‡‘€ìû¾DîÃ<ó½Îrv–¯Üù£K¶'kybô|%/ÎdCO¹‚Aê±ND´†Æ‚~{oèΆqTK éºÉļKîÕ»áüL\Ø!!ØÝ[_¦|{¬ÚU‚Ǿ*§ÉA‰«–¾éG‚&cŸ¢`Wy»s"Z$|¶@½£Ñ<ëaÅ$m­G¼g¥@'¿¥A@ìnî2{-6 žH&ð qí]€oìˆQh›—{}0ýÞxK"’®¹àZÁ+AÚ“²ä/¡Î*‰oiüÇözÁ†^ñæ¿ifÎþb• õDZo€¸ÒJ­í¹õB—s¯r¸¿…œ 㸋PͲÃA”›ÄJ•Ù'émí V̪]µ¼½ó`òyîêh?óà;ö¯Awº¬îÝ^hèköê÷U% }Ï4ip -ˆwŒ4K 1Ä”ù-êUªì ’í°é2&'Þ”cÖ‰tˆ§Èª~H]v<Ý*ŠÎ Ná%1Òø€<•Wx9}ú’Á¦Â=•ç Ø =Æn3h]){‹ü¶ˆÔlšé£æsÈWŠÿ #Ù°ê-Z‡äôß^å5—xòÞ€;ë©Ä‘{-Ã$Ñ3IÌ1Öxk3É ÕSÛ*”Ó£UísV¼›‚ÊCM´à^g`ZY–TXôi”F¬ý±¶¡ÿ<÷Ö -¹·Ä´¾shpcØl‹3{•!ªˆº*R{sM‡›äë:ªã£¿$¨û¶˜ú{jù;€yÄÇŽ÷Ö@»–ßÁµåV¡š»O•½‡†ÏÆD¬8LÏSZtGJšUI:'$»ÑÔ "›p9žm¬ÁèЃ̥Ä)ù€:8Kàò²{±'GIS£¨Zô®Z¸È$)E;  -U!†l¾I“Ûu;¥b-ô”‡ˆ! -» -X«‡ÚX,ÅI!ÅÔŠÑÕD¾°°4=©2¡3˜ž˜ÓCÖÖ`‘x­ò\Ü\ØaiÿQìíüè/ÙÓ»<Âh)ÔV2â“Œ¤T{ _$YÔ”kevƒXØŽîWîB‰ÀnOî"_i¡mW5Õ%Æs—áêaë~g«skÐhi‘mJJ»Ç¯]Ãjè(_rÜ|¥–!_eÚ›í‹W0&‹²lOGþ!¶E¸PÉ‹‰p:¡úYEŒdé™Ä6ë> _'yÇ3«Ü™¨³¾Û}¤yäJV±žhÔšqïüô”[ßïø»¹Ô‡»¸ÿéqiƒùÐàc¾÷àˆn¤"òf9ºS8cX¡Ew¥É|&Û˜¥ì«ô.i¨´Õ ¡…:^§,œšm’mŒtï®G€/}}=ùlL@aü¥u¹ ¹¨ jYÄ+èÝíp¡ŒQƒÈʦ{˜ˆd~·žj‘2‹/‰&þ=µ£Ït¬Úžõ‚8öx$@ÌnÇQÅa‰ÚÃvEA4 Ǩ(þ%š¨NVüÿaÓ¿SÕÃ1»HÚ 5JAjY>Á•fžÈK¿¥Ù%¹JdrƒöøêÜNÓ^Í®r±’’Ï­P8¶]6b.‘à— OÖýÚéX80_ÌV6à ÑÕ-®w‘¶±ß-­þ¨§ át"¦OÍÏù¨: ¡ ‰lŠ]„M #æl3bF2Ù™ÇWç´V·Ô. «:ÀªÚõ £v¯t‹mßö|(j¹¼5@ é?†êâ½›ku%}ž*¬5h.æ¨þLÅøÒÕ“®EjH"žAZçÈ´:­v§Ž£×Êrh*— a‰älh0™Xç¹:æ}%¦pyãó;%³Q­r¼@‚÷là8¹¤Võžý®øä––Ìz&[‘|a ìN½J0BcdÂ#,+“™'Hö À4ÏBúÒçpLªlˆ%oû7{˜O0„3åFMQ5-¼Äd-åï=ÞÕGÚÇ\3÷ -{Ö;¨²º‹)ï)«Ô -3¯ÐUu;$¢LIéõÑò Ã!ˆc{cL¢ö$o«æÅ8Øi»®_Q„¦¿ïÆDúNzƒ÷z{ % |ÏÔ µ(O{ÔÞë+4}çÊuV—é›êUÑ -Œv鈶×&†*Üà׿lKò }™`Sÿ*«øÝRƒfåÕ£U«?·6ðùN~4WÜ—²…c9ëvXâÙ^æÆg}€Q_9ðØž·´{î´B!®$ï‡á6k¥ý×'{™٫ÔFØ"Û¼zÎV¢vw€©4ù,Â]û+Æ’ÙV{‹L&$hÀ7¤8Ü £ÏÕM—ôFo·É¦Ý2#fhžC|© x:šg†´›9ØbQ:'#ˆKˆ5DY=z#ûbùP7x˜e·Êm\Êy8_Vô MÕžÚ8çp(øyÍÝ­:ß|«Ø9dŸ3Ç{oÙ¸Ÿ(9=vr µ9¥ftù#îGΨ“ÎÈ\÷ ˆ#®q3¼Ml€>ÀSúgÈÒ(‰o!ÀÎ ¸GþEFâ_w ¤/×Vn^÷VÞ¥NŒˆ‡Ì_á˜ËL©÷½fH©µ¹Ñ£À™;sÐRŸÝ+¸t˜r^ê ‚yô͇“ƒG¶ö»éµõüJÕ…þg 6ïÑ‘q’n/ÑyãÚŸ´Ô#çóÒÚoI‰aˆæøÀªx@V¡Hÿçt²øȉû#_ Z/¦e7ž¾KÚoR"+pò1÷8¨~D -š¦I“ îˆu@|xy{„òçºü!˜Ë3/ùÎzO«gÚPí²›Î#CdK!u~ÄžC7È|ÿÞ“ºë<ìM‡ ê}PYîë‹0oÚj>5Îl7QéÉ´‘‡_ ËôýÄ{#·G”Ú7N­¤azŠD¯Gæþ‚;ztÏ#(ƒ„Z̼þÕº ¥­ùí_²çVŒO}îPýkÿ{Æ4`à ÑŽ»R‡#žÖ -.¡ŸwTÚ‰‚‰Ê$x\ÏÊ[‚êN÷Î[¤?¦N%¤¶Z‰ AF;ç«~´-Oœ°R¤²s›÷’õ7M¿9ƒ8îû³°Ô 7Z$“ËnCZJ‘ηsW?:çZNÕ]æŽhnûÉÖ9‚ÁÉTùÊÖQ«ÙR¦‰»½áƲKd­W˜»4º7‘A yµ±¯ýSR"cCŠüw:Q9&+XqËžÖâF½b@Ä°áû9ñ.ª ®‹è¯cUUª Ü™žÜ¥KHZ €PDÉ; Ž1T»«ÍýG3»ç£¿,¦÷ŒÑH¨í=še;HVâœÓ[R†´yFYà±’«"d°ʆè>þ.`y’—”{Æ}ˆ¨c¯©€jÿ¹Žºû¡Â“8#n+ÉɺeˆŽÌ³—"aE}„ãa>Ì \I{~SaìP“Sy‡€ç(7-±çãÝ°×……u,°9|¥7L롃Å×XZbQ§Ð–à1ë»*yƒ3\¼Í:ThQé+äê#±ã£¿$X»SaçYÖ°xÂTE‡ù×kC‚@¼eáìæ—¼9.…º¤E²¥F±ía"Î!¾Ì¥—ó­\| Õd£”ƒÆ /;rd<+‘ÜU¥õSG9!`Ï1ðþlšå‚CvEª !FBÛî ó×(1îÙCTÚÝÞæœë´UûÅؾ8®4-t5·Ë>¥’¥ªBPÎ[êAVU D•Ê™§Å¾~ü¼­ós……3®ˆ›¥y¸ióé.Ò®ßéh ±ï…u|H†B î«Ì|ªµûYd1i›&– b ¹‹òÇdRÒH†'•ñ\¡Ù.é¯h@Y}}oæœò 3%úïfĽÿa*wxUM'xÛ -¼Ì¦£+4°î¹ó;ÀÌݼyÒø£ ŒôL}‚'ŸWM3¿BuÉšºÔ0¯µ›ËiH`qû(wPt)7ŒØœˆW=EÈ9'Y6UŠñͧZ­¶¹ÎEÓ•{2Ë.e–uE<'È¡=c‰¯é°!ΰÁæÆòë/ÎJŸü%S*.›ð´M¿^nï-ÀÙjF!ýy­¸—‘ß9œ–ÔÎœˆE6æ2ÏñcdFyçu.NK–×gŸuî -üëí ÷°æ""¶/5)®Ñ -AÜÅꂺµÇØæ­j؈öìÏÒFì¡M`ÕoÉ°y˜lÇS~¯‚Ã_(^ô]£¼$ -á˜=·ï#÷9DhEzwÞõ"•£Ä(àUb˜ƒü",q=’½3Öo¸5Ç‹pöI™¢xØ›uMëë~1ÛSKªùíwz—ç[ „ &/³)åÝ´i~ý†kls - Öÿ$|}ô—ÿtªj˜`9V—š¢-½Ú¬­Çn3ŠºÜâ~òZÛËlž.ÆÆäàÓµ”äƒÈ®Ýc‘ |¸9–™g@$n4À³Îb4ÖÎⓃä'œOºŽjk@üc,‚ÀžRrk -Ø/i![÷”w5:wZ)|+‚-¼–@îzc¬csZÒkÈo-4’ö¨#º%ž¨‚dÙݺ¾g+ÓÂΨ*&*©¶4Í<¨]ÌØåk¨ ÀÝoŽÕ–Ñv¿·ò5ÄáûŠ©p… -ÕÄÄ ÎüNT?¨RaÜ0ïŽÎ¢`hÄïUÍ™#Ý3äùQÉ3émU±ÙÕhAc›§ÊÊüfS».íŒçŠÂ“¨ùybõ±±=Έ£ª—ßJµÒ%ÞOªî<œø zÍ1ü¼í‚9kR˜]G£{R›$øAñôLDbJ\`†êÚrÄ“KýWzKåÖµL¬GH®½ƒÕáõñuÃùÔiˆ/~Ê2o‚T/aYR—ŽăS€Ò§¯JßxÂaS¦QÀŸ¯ Rȹ‹ƒRW’FR05El¤™X†šº¡G`@qÎEÈÕ™»‘ˆv@çíX<“k~ï¤<èÔÿšy×pîfÇ®Ü4ðŽ6aÑùÜ„çp °9ä²(Ï]C½bb®d«°åÐkÝë–Òï ñ¢>óÈËùfx¯BdQ@$å.kVyÙLD#yð¸Š!qŒâ_Fä;’½6ƒöH£Æ¡©D¨šS3Qq}ùi@¨î9KÓxÔD2Ê·RÐÁÖÖËizfA6¨à¾>hã̈/1•?„ƒ}ö¡$ÕÇ-¾ -¯3ç(iíæ«Fhñ€ç(Ã/ÜOâV`ì€{Ÿ˜Ì"²àÂŽÒ -î)ÀªÚùç U P•b²Ð”½Ì„穲k!>/Έ®ÕÒl1Õ#y( ¤ý-þ¶ð°î0Ç£ÒT@^º VG$Ñî£tàòP PfüŽÑ³¥RæRÛêJ'™ìÊ5\^íX V}`)œ®U´cÐ{ˆ (Hk¯H~‡sF§u0â­M—bšÜXœÈ(4KÛ º×ú¥BŠH\/!¨¡è<RM`SxÄÚN„ˆûÚþæêâ - -‡‚äÛŸój£Eh¤êѶυ´Þœ0>CT˜áë0œz ¿G»ž½Ä×¹×ñqÓ>Œ¯Ÿ¢3Ìlˆþjщ¼c‘yõb*<êžSë¾")7Ì©“µq–À@~ŒT]4ŽÍÚDf­a¿ÐQ(M•Ýww}‰†£niï™®ÿŸ¹;É–\Dz;›ƒLÀÿ" ‚ ÛÙvNÀÖŠ–ëbö‰}.Dí«>5’Lóð"ü9TX¡¸Å)Ž’ÿoû'þêßa© ¼Ž `fæ“ì®ç?«®<_”£µB$VœôæwtÂPË£q¿ìºÆ›ð)5ôÿÚ£›Û¦D™!=&Bú==½ÿ] -ä7¢iîõŠT=ßòXÕ Ëû ªGŽï²’Ê™µÏ}®)UA"êl™ÄÞŽ|Ö[Â…ÉØ¢e:C!ú•¶ô=9ü'·‰£Ì çï\q»àl®øjHÜJ´êÍ§Ú `÷”‰v­_qü8r{iÁÎkRH‹Ãã]?íL5’[ÅŒëdliŠÅ.3õØ:ó¨±^ÞŒ¹uy/Øãù6Z¼­vBïx1öÐUƒÈÒ!¿çï üÖðÅÛò=$ ðd™ù˜›cc;έÕÁÏúnW]w©¸„ -‘P9z&N •‚èÇkþƒˆïù({îgTAÞ¼Ó_ßO¿·jç°U X?ι̕}~pz-*eþ²ñ²Ï+ߨ.GÖ%>¡mŽ¿pÕ"Ÿþk;óõGoÃyÿÀV;>ïñL¨ˆ£’©ˆ‘ña['X:gk)q™-3Þ'>…ä6gË“J -U:„k& þ¿Ô`æ'™C´iX4¹–‰tò*hã¼$÷ÏgËÀgPˆ«w°Ëó;ËÞ#«F3Z­HûEœ<çæ6‡Ü©ò^‘–jkˆ×Ù”%ÊÉØ ê,]O/©Ž[ž7‡ ªÒì…àÜ3Z¸Ò%õØÄÊ<‰!e°€=zé­^¸À`&¶ž){¤ÍëÉ;&YÿçÈ=Ý°Ž›’š”ti¿BÈâ8 N?çî†8ó¿ý¡¸#:R¾–+Aub;âð÷•pÖí"*›óg¼=:šÏ¨íÛ£E‡Néƒ9ñ¶D] leÙŽL3°'·¾ë`7á. ê&Áäñ˜‚ØŸïo¬Îê?³/@¥í«²K#g–È9ïµNȪQTß(»´À «ÛÚ\ùœ5wÛØ©¤²—˜¾!‚Z" µ-Õ”kF]|£ÖîsÓ "ú„_oÐÌK<û^?Ã?>êäu%£¹ð²‹)pv¨ÐT¤©+ñ§aÒì§k‡ÒÍxèä—PÕœöaòîá;ž9¶|ÕÒчâºÕRx˜÷çn¢+—3ÒÀ;¢*òÁûû€`Àá6ž^°˜zÞš&P– -ßó«ŠÖ2hÄFz¯híP`»«(F¤®†¤†=sãÔ"Nͯn8‹išÇ^\±ñ™A~÷^•‹¶.4w§¤(ŽEÒ -Ï\Ž§Ê#RŠGÌ°÷f¡âYs‘–dÙi'¸a…µ¨{†×¡$6Ü9Ö 8]¸ùÜÌμ›xÆy¬!Ò/à -ìç^ß±~0ÂqC}&%ªx1lR{ÚéÅŽ˜SÄõ)´!C’¦Î9Ïc\]g*¡õæ›4¹Á̈ qgUCõ2‡|¥tR'ê5#¦ÀsWÚ03ˆûË~•´±{™'u›ñÿ­9äŠ ñ=ÊyÀðLëÍ×d߸eBuGïPôHT uä3=32ï~m™˜W¢‚þåÄÿþ/Þ½¨è t¥wæ>á)£‘ÍÞm¤C«´“šÕxjй§ìO>`¦C\¬5ÏšwÍû‹~ÀÉ´S¹h¡Ì{þ|zÊ)3ß›k¼ÀF-ù·Ô8ç{÷÷¢hZ%–æ×I£ågHóD+jø±×µ±#A™R 4k1pÉsttš8Isy€ªÍ 5€¹Žè ± {ýѦó‹>Ùö5Mé™$ôfJx¤Î7ƒ ;¹lIE“O'úS¾—ñ5ü¯;¢QlÝ ÿ™ºR¯d‚WM ¯"ç ŠªÍ!°©™Ql Œhåå‹$XÔØÎÚˆ@í–*2-—=Öñb9?³»ÜsšîXO¨/‚ytQ5}Þû ìfà5"ˆ¨U¹Ç‡Y<¼G“qŒ/D™Øq‹'î‘ß!æ‹Ö²ŸÙáb}ÃÁRk®Ãž³Ú6«#’Ù‚¤áÚFÈ]Îd 0œ]àe¤¥ß1´^FP&ä *kHÂÕG:´RÑÃΕ\–ûUƵܹ‹p‹­‡Š®€"æv€ÙgƒŸa1Û©€ÜÜ™±€!L*Ÿ(ez&ߌ-8úËÌ°Q úº’ Ÿ"݈­üMo!ÙüÚÏy僷ÔNœÎºa%rº*œs„Â$Ã;Ì÷Ñ~(ë6¶\‰ŽCÌv:È·AÒ>/*Òáwæ„Ü.1ÄÎ¥èÞ‘ËBòžC¬¬'Î~xiõ-Ŷì¨YÞäRÝrŠh±˜GôËxí~¦«ýýn³!‚Þ¤ÖÎÿò•%Õµ´ö²­%ž­v7÷÷ËîÕö lïÐH¢ñP¸ FHƒ/ÇÊ+|8¸§”LËi¼ -<îõ‡ÜK~öËŸ’ÃýbXÛ÷ÓûG¦"€Ë‰Ok~3¿Ï’ -·À²hŽÔ¡Lï«ÂÙ*‡gÍòöÈhaú§jµyk¥}©:AÚ­.ÄEŒÓ=Œ— ¢?ªdac¹áPSÌVuAùÅhFW3Á†Ÿ¯Î?³I58Uìq¹7P ˜öíR‰ìów´M¶@¹M…›; *Ó>~îCê+íYûPꎌ»Þ )þ’t/j³J\á›^^j$)—èd÷óÌò¨q(º>Û‡þwÖÒ< 7Ízÿ°ž}Ær°Ûäì÷|Ϲõд¢F§ßBÇU¯ r>¾Ä þž×"¨`Ÿ±Ð¤ƒ8km#6bÅõ­mdbÂ÷õðóûAdî””ßo:Kl`ôzR.¨LAÿl4ÏCH%þäµâ,n9‚ü -¾9H’Q«1H‡²EKkÖ °[ èt¶‘+ž7¬„4~¥y=uúÌï¦hNƒuÔÞŠ*:E‰×6B4ZDuåít^=fŠC]ƒ¨: -‚dù¤0 óDˆè’ÖD̬¶ØÖÞxÔk‡ µgÃoqhhXpWµoÓw'x¨ð®K…(4'"kÔ ™Øé7ØGÎ!Ä™œÍ0eÙ‡³ïwò©ZŸaÅ·™„òµs~òV¾[]›Z ÑÚËT&媇ú~®ÂTè h91ÏN·W·,B¶¹`X`ë}Å08”h—G¦ÿþ”ö_w¾l|‘9¸ãÙ‚í=[òœˆ;JÑ|gš˜Šª‘_ü¯¹Ý"IxÃÊyÞô9æä9ãvC‘Ô™¡î:/âSå\‚q˜"l¾pP›Ì”¢öÈ û[™íÈæy×i×!8߀HÜAæ»Èû±:D³!mñµ–ff*D–Kåßå·Z(ƒpô…®ÊûîÖ;€‡¾q¶Ð¹Fü©UáúÅ/ݘ‹óÔ>úSVÜ]÷Kÿ"=‚=:}=Ò‚âZ†ÉPæ“óµ·žÔHr%XQòž)Ly}ˆVø2Y£NäÎib/÷´|8(Ôm¹’ãÅO-Ä—Êçܲ‡ŽXçKíV0l5Œ'’¡ƒÞ?ŒúRW”qœ.üß7tG*±Çõ^üÖçUZÅÁûÌÍ Ï­yCÖÆš4)ü‡¥ Z×RbjãÁÊ`F®E~“—󱿗dx¤¢{oT˜˜5mÖüN°¹ó·`Rê©ôâHÄ郩¾òDÍYöâHHÑö´sò¬ÖjhIUZÔ–ïF÷ˆÆIä`È%JiO=ù÷‚Û•Åü÷UØ*úøv³ý‘A’¬›kÏÁÐX´%:»}·]9]Ö²½EÓ‹ì÷“ÚÜûáž Î×ßGÈ`ùÝ -¡Ñ£bpÊžLòŒ‰jyg,zÔau ó¥o϶~¦¼tåîZî7)£à Î a¡czé}­vÍ¿¹sÌpéɘJ˜00ðWþW‰§Õ"ŧ¡Ù‰ÎÒ!ÚÛ`õ\ŠævÖ•[ÐÀ½VEt±PÀäçCݾ-Δ㾠-éq½âÇ€r쬀aÞ­¢à|ð šØe’fQ$#•ýQLñ‘ ô€'nd}rØyÚyóAq·Ï¬F±Éâ;×k^¤xiŒ=µFHÚ)׫¨7ÂRsÚõ{@ËHTïŸkä(%¼•ÀW‡yrA°¶˜)IoMÜ Òü¼Éÿ³bG`O§}P«dN8[oa%üÊ­Åh«°}T6Ê,nDÀ¡n9º¸óí?õ;/ ö:ðˆtî ôp‡ /õÞð‰m•%î3Òj‰÷ï -¯²¡ÿä©îýêâ×å©à¥·˜Óe;ctÔcú( ¿¾ÿT5£`JÕ ÔDž¨`lá_2Z¾«Žgm+Ê^z©-Ý_l¹î¿õPðz?žAìF-–­½G€ç£´»¶5€ÐÄ°óô¨@j9¶ÕÏ_ô#5Ûnoc½h\A{Kª2ÒéÏ®8Øè¾ß<ïþi@%NuØʾvãïÄ6¸ñÖn'[~MÔX‚þ³°&1WÂÄCc8ª¦õožËzÔ+oô:ð|½ãžå§êÖãÛA̦ö>x#+gTÊj<ó]DkC·ËPg~§n1$â6va"ºbβ㮓Ãn:¿ôöz!im×ññDn4md¸ôú.S§‰¿¦cpI~уž7 dfZt>(&éïüéÿñ/¿t²Óëëˆ<$wR"‚K¾½Ó¥oÀÆ8aAxt—KàÀóu¨úÍmŽ[F¤Óæˆ+JõÞ þ~FM‘ - nuÞÕ -äjç³$A²1‡ ¶Ó$9€?7€ßü ŸÌæïlà -ão G'rPpwÝÍ®¥6_r«SÓ‡ù®Ñº.¥–¥Œ£úº?5|¸ ñnþÙÉSž†#pÕ¥0ž´ØöÔ)AM•@SÔ÷Ûd‘Åí0I ŠFÀB„›úAÛ"èa§](â¸~kIB(î‘° -ŠEöX½ãKÓ -Í>ê臛{™¦ò]5æhÊ4*¯yãÑ|fG'Z5Ü©×üÁ9d^{)³–mß<Óq69ŒÄÚ¥Ö¿¯ÝX¦ˆ3o¥Z£ÙdYtîDëÅÞQõ³­!é½{Ì4r¨œ°ç¥DY$Uqã{ÅÓÿJ]>:c£0þ…íZd/䌒 ›çjÊù·LŽfJw}¼Ð8PiØ_1 k¿#ù,G-?Ñz¼B#G\çT\!¤ÒùWYÂ\ÈO A–Áttй‘`«z}-6 j´%ÑÃ3onìÌ'iu¤ÿpQ±7/$ÞKß…fwà¹]C‹ëÚvß%IÜci§Æ>ó`7 ¨%Cs Üù6W ™zK)¢k ·Ò ÒE;¢]¥y–VÙF;¯ûª!¡¢0{Õoëji$\u7ô©0Ø#™eéÄÑ·ÆÐØ×½îé -ðˆØÉ·³í+¾fz>õÅúÎ˳; ¯=p–¼FôÐþJýlîf„-.½jg š»*y۸ؼþ{BÖÛ©å¼¢Ö‚Ej>œ†°n¡íå‚-µtÂyÃ7E÷¨¾lV ZžK5åæÿ•ÒEjaQjÉ »íÜi)¢ÙÆÑ’±€p=ë©@‘r¨íW†°ª¿*¡÷ù˜!)ð™‰_ý̃št—ÍŠæMl,{랟‰Beø çÏÏMØ)5Õž!´Â’­Î© ÿóÈÒÉ¥ÒŠÁuÆï«ùùȼ‚ƒ¹ójöò´G~jž ‡må<ßïCvãE²ÕY*=Azšïá,¼”@inBÑÇz•)<ÜÀtšsÀ÷îÀ óffœAÏ'f?›:v›ÏÄZ§(eÅh³r4FÀß·ŸÔ+îP²‘y_ì2:@h;²œ0!›+/Y+q¢+ %u:ÍÉgq+¸œjÆUãA­. …îÌ=¨Â½¨…é«YQ(íØVOKÜVŠ‡ /¬oÁ"•€ Ne‡EŒFdêŠê|ªbqÜi½E>4Õ"$#å™g¿aD‚õ¬EL‚ÄŸ Õþ®ÔF£ØTá.1Rw[Œc ™±n¢øÒŽ…l*¸3ýjœŠú†>‡ÀÑsjZsGÝ - ïÌ6÷¼˜ºÐž ƒÍ{éõ‚—Ñ=‚»ë$Yÿ̪=;hø°´7ÖÎyëA‹;þ|G¶TöÜW\ý¤Rw>kéB‰ì"•ù ¤*’ P:hpߥm{aɤoÌÛ;þÿ/‘uvÀ³TŸ±Ü¹ªQCwDÅ+ÌØ¢ßÀ9›g/Yô³*¿Ë–°ü. -q ¾J4)‹÷¹þC&3ñ¿¹³Û0FÑn³ ø¶Òp³¶Xsw ôb6Í£I©2>Ûm­ó#”…ñ0¤à” ÌQo²½íá‚^>.1Ê•Ö0 mÈðÑUD -kŒÞx‹¢ðÒAAöÚK8ŠnhO<ìüÈú‡^+M”QD¬¹'†ö£v~L1¼S"VKŠŒ5³‚Ñ‘ù+Ú鸚y»½8_#½á©VבÍR‘N/׈pè}§ûg¦p’¡½¾‰Dv¨ÀŸÛ¥ÙC̯³‹¾ìX’ºÏ­AÞÒân¨<{.†³8[í´EpÃAämn{:é c¶Ç»òÊ«™ïjc4b;H1`ǘû|0õÞºO$t<Îr­l'÷ wiýþ‡-•Åä=jýNfùä/I=-\«Û»NŒõÄòDä\ Ø;¡úÎÐùY™+˜£EÄù=ç3÷˜'‚ù~Šã{2Ïæa÷ÀÃf„Û¡)£\þÏ̦'‹Å¦ºxØ{”Ù{¬”j®0Š%7r‚T/z$ú793*ñ<Ÿ˜yl׿I<a2OÚ‰wÝp]‚¨·‘Ÿüg±ôÔ„#ÕM$m¡=#NnÀzöO[É+ûÞŽÝ;WD‰[J`À¤‹¡9÷€òaN½û®]ažVí®lpì;í-ýïÄ"  ¼c[\x³(áª\G"Þ&öØZÍIíÓèFˆ…3mq U‡÷sÍlMÞRyJ/Fz3ƒPp÷{Œ°õO"Rè+ˆ -4“ÀÇ/ÏEŸ’/µ%Ÿòˆ¬$øþ–†½Iò´Q|§HpauŸ¼×¥ŠJ´…Q/kW¸BÂn9Ý_ÑPñàËß*“ï" îžµìiÝð{OÿÆä»䘹*ÊQC”.V4”SöpÞïÖìˆBéˆiÿ°f Axó†Û™ÖE¤.¢~ºà¤htŒpæR·»“ôŠÝ¨&oŽK~B} $0èЙ'«ŸÁ¦ˆÚk (̤Wn±IL7x쫈”¥Æ§•ÈSµ©oUɹs¿æYÈ鸿[z(=1J^ž}žÇ,k#&Ì+}k7­¯ª®ˆµ¶ôRFÍ`þ:¬‹[ySSÇ RWÂRÂV"â…©WwÑiÃì"÷%Õð¤=Êd*’«›Ra‹Wñõ®ôÌEŽ³„¼T¯7%paÚ¨ €ó2JÞaŽ™HÉ>å³wXÆ©=°µH]W !õu†ç˜ Ù>"Eíªœ}UÖõµGmò»;Ís&U)ääe±ŽèÛ<“$‡Bú;ìêêÄ! -˜vYM"¬ ={jêþr…ß9.Õ­jê|má|!Âüþ‘`œo:çÁ¹ŸKWïÉA“*:¸¹fpX_ü0êDݼ4‹vn‚Bv*þÓe>}µëK“Å0Š«l” Vô4C§ˆ6,d*@&jè<–º$æíº‘®¤º‡Û+;Š@bš˜§ðXsȨÕnVv Ö5güýFMÕ+æ+P{6à§]©&’Ù¼Z -ñ4R€¹¯ë^Z¤%ÙK't¤$ÆaÚµ’ŽüvD*:—bN„¼¹Ç_p=Ô*aWÔOÑZÕaäy–ØbìÒ°vQÕ¢Ÿq~UÍ_Ñç„ €x÷ÏÔsVæ›êÛè‘ |XÃ=”'…‚B…£Ä9¥¥"l'ß걓çjq¯ «@~×;µ)BõÚV¢Ö§“ -¸çwìÎéŽuuò=*îlç%0šr×!ˆZ€NT(rÀ´­ÔîŠ*åƒÖÖ¶2爘O0OåGhÜ¥èL´>è­êHlœ”7Ï'ïF aòQ,ëWåv\…¸hû×Ú5T–HŸ‹“#b,K6ï*ºøöY=ÛÏ™!bO.j-š}†pC(k³4o[X½0Ô½ý‰d‡Ò€ûu¥°e—«»ñœ±OrMÅ}õXĽDK·<î$îO$¸Ì6«<(·)gQ²º O8›ˆI -šþð±8KgGÙ÷—ÏVë -¾’¸ÏWçÑػ㣓œ#*ÖLøèºÕ ;"t'%4¡­!μ†Ìzìë4XÒZà }°/ý'8Ä‹Ù˜u\ì\'t¤<ªÚ'BŠýµôógxTUÇ*UÑ:A=¶2h¤æ#'ð;¢¸PKâ> KWåMkäŒóð“VžO<æqiŠ§\9é<"diïëxç¨v}¿MW<“𱌬|ÁÀWòׄJn£ÚCåiÔóÀƒPÎ7ë®–2jë<¢ÀFn©Xâþ§–·®áY›4ÏVæàš ÷¢ö5@{¤ü®džNXž®é¨+þI+…âÙÒeë~©)·fGe°>’ Åÿ¨)©ÒÕXçîõú`:¬Cì€ÓÅ…D -»B~’sŸüÍœíê -ELiþ×GIý#–½ÖOεóPÑ'Hõ£AÚuW2®µcÍ…˜¾ÜYõ>kfbO«VÞ‚3g“NÍ•ªaÜ[{kߓϪ8œ%V£D µ£à<ÕßÅt †¹CPzùviâl«ÏÔW21OÓÙͬ˞ krq æ€÷ *ùë¤ùS ÕC\;¥¼Uw|† Gj«Z¡P’P¨=ECÝ5¬O1F5Kã•Äùä ±i.­;ÁZéyÀwJŸb<üs/T­ç¬gºcm{Ç‘0åIÝ6¼ê<úBß`X¯d’W†ç uÑÊ^ßRV6˜ùAHþwÌ;ºQJFw{ŸÖzæ¬3^²$&\’á&¸âÖÔžZò7ÓfmÁ¤;‘[ôõì”`Ä ÿeªÑ@@4õ-³[Oìa3Ÿþáß)nÙ”ÿb[¦¢¿þ%[01œ;ukÒUžÒ=í ª?RÜ©ûPçrÄQ§6ß먽sgÃòSÀ> -nÜá‹UqoŒÔ+ŽŸµ=„õÛ&›‰&tæé8¶·l`ÙÜVf¼z¯¨ˆ~;øYJ -Ñh‹ëM½ Þúr#¼úUð.û£JaúY¬Êg¨ªjùJÝ_þ -DÇ):Èܪ§áž=ËÂØ”?c†$dŠVûÛnÚV(aøõPŠñxÿg\:Ã/F:#3sÂAF¼ú°Z@j£â};ÊÈÚ[K6OÎáý}7[Ĺ›Ïª–dÉ6–!Y´ -„ÂΊ¼œ4·!hî¬ÇÔù½9'G¶ê¡c*Ú¦—\“’¸êµ°µ EÄ~.+%xÀs?l0¥î  óª ËãzhΓêz}0ûþZe -=Z¯1ú5á©ÄíæÖìx¥ -ò$àJ3¤OšÁQWÖ` -÷ïµ»³ç/¿ìøùË/ÂEuì­á÷aÚTú™ŠÛ#žê Tæ&F‘KŠFo±6fžªsú´Â&JüP›ˆŸ‡£üD¨ûsV?çßœs¶û†ûxܤCGh¹•¿ûˆCN¢—³ïY[Ǿ2f¬ò¡g׳ ‡™PŽ½Uê%*ÁD$º[ßù¦‹»ÑJ.­6]ÓAÇ^?Ò4£€pK³Ÿ:nbI*ÝÂð¯ÙÄt‚ 8B¶…q0îwaß!.H×¹t™©ÙÛ6k½A¸ª –Õáú”]7Œêv}ó­͆¼ 1Èlè1Jj¸©±oJÐ -=Ì]Lͨ?ÙŠ¯°PÙG_O ÅÇ—÷øbÕü+®î|÷÷ˆ66€WÍ}%.:âéïŸ8—6M²kçi$ýk=‡$0…{·Û¥?ãÂœÊKt¥mVK»ñJ?û]ZêWм[(‰¿ƒ‡Ç/|ÃüåÕ……júvéþ¨A,!qžœ>ÐÍSiCµÓa=£Ú¸×:hAðÞѵÊF2ª)ŒV×~5N8Vª’=­KÌÇÚJÍÙŽÉÎSóò¨ª´”ìýyV´§EÕÖ®'vì~o{dÆ}m{w:±ÄÊ~wû6;9‚§Uƒ§%RFl¬j˜AIȲ·Ô?Ø÷þ#Ž¶ok­u´ùwa¾Í¬\ŸXæÃ)¦WL¶]´ª±s#%q¶Ë7ÆŠèMoç:sÂe©k±‡U›SDŸ‰àœäc}Ãfp+¿êB=;ÙföÙ á‹‘1Px»×ŒÛ¹·™9{ýÊo'Á?ÿñÁqñß5ˆ&ÑòU¥¿@?è-3‚ÏË[=Ñ{ªDEž0¢~˽VÐŒQŸ[ôQ•=œ3¢v×Ϋ7Ô—h“¨jë3Qw’FUô…~'Øçwýüø{¨ÎÂ1%³Ç]ÈY—tPõ ¹ Ï8p$›xì^OÜt"ü(:ìw?–Wêˆ^Å\×ZŠ×Ü€ù6)O[ƒDåM±ï¨K¡w¨~W9tƒ¸‹@nÿ ѽF²ÐÒe˪ò^šò‚@Ýíe´~%ä®÷¨ßÑ0“Ö+E¢ð9C”Èà|uJUŒâ—2Ÿ˜§‡ì†V‰=˨/¶£*䊭Rû²Èï¿çHÁ´þ¶ê?O‰Z„³Úy-HÃA<¬áZyŠøF^;BÛQ׉5ªâm_ùèÐ〼] ‚$xLjåóöQ'¸}·*QúNDpcó´âã>¢XRc`ºgD3³Nç–âéËô‚ƒ¸j–÷r ò;wêö{ì¾Ö Yfü@?¥…@)c½–fST?àa•Z–øè°éýYHå+ :ñþý—ïQÊ÷ ñ¯(2%”a,u¬ -vöaø7A&U˜âUƒ LfNØ4Cß»«Iä*2í§Ø°¾‚|ž“¨¤²uä©ûR¨ÕóŠ„aqCT…´c+éRÕ>Í)$†¶à§L -ga¢zÊS½'{¾­Lƒñ]Ï)dä!Å7ã9M¸Ô]>Lò et;Ï)%8ýEÓ³8¡¥Í"‰9GAåÆˈñ·5Q®Ù¿ÿ%›¸Ÿ\!¡m!ÔSúØŠæGtˆW4Á—Ø‚ùdøs/»¢Â’÷Ya¿mh~v^Y"žù6÷;}Žh­J XªwF”b2Nê EÿäU»­ß¡ºÝBµÚ*Žšç±5ºW©“˜!¼qÂc¬àx~o¡Ø|âšcwLØh!fqGœ³–i[éÊìDãÈ -ÀÄ\5Ñsßá´ÛR dŸS·$2j:ë\„²&ˆz±ÊL,‘=~'ú®ÌU·|úqs -]ýû/µR !õ¹€[­²kB×Õ YåÙÎü3&ï‘ÕhPÅ-ΑþQ5öõÙYMN'¢Ðl¢RM·íÐ]¦ÂrjO³ŠÌ¶™7’Ø-Ú( ²ã½Õ@·ö3]õr… (L®e endstream endobj 43 0 obj <>stream -ìmQXNZ“%éÕjÐÄonl42B别Îqä4-&Çõ8laÿ=Îx÷ÈBÔ^M‡ -Ðp¾×•pœ0!¦Òçú@[öY®´ÿ•î&ûÞÏ ÍïEòP¬Ñ â>! †¯r'Ö@&_¼nÛJg÷k×gÜçòbÖ⮨Ýï4%õtLY•‘ýŽ=œtøZÌ“4…Ø”©Kx/<·(,ƒåTñÎ^ØT傆iPgšÿï Ìc8Ðç ][:ûîjÝB§S"m°•Ðo‘q‰YØYd”ÐOݤÒjŠ¤_¬À¾ÿKl¹f‘üdÈ7Zqéôn5¡Èb ¬ "v‹ÄMWáfÃ'gE"Èb[Ch¯†ÄîZã5îÚ”]ÅcþÃôx©°kkÊ"Ÿ3ÝÖ“y!ã•ÉÛâ/x†ÌÛNâÓ‰ûJrþÎáK~§.õ$Œ¸#óº&–¨öÜ夕!·ö†ÙkˆÖ§^©YýŠcƒ–ì:”ƒ¾@œÂï'ºD=4ã=ú¡éL)wæRC‹t^HIá^sœÖbõÄî… -ƒZAêg¹Œ ­ ¨8Æv‚úgý¾ßC¢Í)m/O'„M#“¾8ZD1ÉÍô|)%Ü#–ã*ö¤Ôœ’W\ZR -¹ÆRˆ³lˆŸZµO Ð(+¥‘nÞÌ8½ }–1 - -)¨,ë;ÍE!;˜}k•\Ò]å•ÈcZ%·P-(²L[¦cº:dÆãxF|ð¾Â©òGÿ¢#ñý_b;õÍlú‘!ä^7Þö¶Ù ¿3ÍÎqÅT5ÂŦu?׶ Ú|Pìh™MÞ3Š‚ý*'}­H§^å<Gsÿ>Wˆ"™½ì­è-•^Ú æLçŸñ?8ÁêÀÜ—QìI4͆ép]ÓãˆÅ\„7ÞcwÕ=cµN ¨5Fˆ¤ß¯Äÿý¦¶W̃VÓš—ÇXѯš©²pN^±6v<ºjæî²v -G|çß €Fcö_ e´åÖ“½ÁF6¿S[Çš4…nÒ¾Åw|ÁT©íÖƒïZÝ"Š)m÷’͉.V¼Ž‘¹Â"ùç]ß2Õ¾Úž€{cÝÍ¿©(°œÙÀ³÷ <êÝð¡#QÞòé>ÚRrRºI¯ï–på@P‚N®ãÆ_Z0Y ¹¨7Ç·!Zy"}½ßÍ÷ÉQCmn‚AôcIk[i«é@æ+ï‘Â@¸ºŸÊʆxíüjq'Ž6ð¥•Ø²ÓÁôñ¤r17=Ö'ØÙÞ[´ÐØ‘Õ¯úd5Òî¨Û™‡÷Zµ“7¶ÚónzÌœŠ©*Ü:Ò݉rßõ6ülÎͳ”Bvˆ`ÖYRêëQ†”Tì%’+`cùêàÁÜ=¾û‹wJEøb`z¤ Î9êrbo¹@LQ(è]°°Ëí ?rž” ²8R§„^ÚÖ†½sÔüšSp;⼕üðÞÖeTó[•Ãáõ½³¹°®çé‹ŸÈ—Âyþè" Ô´¯÷ú¹@ÇâäV¬l :œJ¢i:ñ@—ªÜ±6Ê_ ›ìéÛÒUßR{øÙ£œåÛ/ÿñÑ_bnúígþÔà,þ¬ùíÍÍN7]–cFQÇ ×.MðY¶”3if²Áâðƒ»¾gïxÅÚ%Ön -!6øÀ{y&íÇZ´4à‡ºC‰–HŒòƒy  ]sü\’èG:$kIŠª6t°W9wÕ!-¬{O©H÷Ùh‘qâÜ N˵„8ÃdxÒÆŽ‘AÜrOpKÅòëýÉÇbM^å@?¢Ž©=€?<–€Ì'W¿ó;чՃ»¶åÆÄŽ‡N܆Y/Ý‘ê¹±j…# Æåö¾ŸõI_8h%ð¡‹E¦O¶¯>0ƒ(¹Ûˆ„ð6Ö$Ù¦Ü@79/ 7´ëŒ#Þ^¹)~? lÉÌ1‚ÚÖˆ$@À;‡ÀÒÊìÖ»…Ê1îÈëÿy˜¤}ö<ýça>Â>ÁÕÙê#ó¯ÉÛa$Έ¾ôìðƒ•ï‘ââÒ•’˜>Zåf?š¼Õ{1‘ÐÀÀ?«÷B‡,á_ØxëBÛŒ@Ì+Þm¬·w ¤±ú*¿ƒV¾ÿ‹ µb@ -æûêÿÎ ¿H·ºc¿Ük¦Lú¦1”nŠMp5¯lqÓG:ô‘>lwªGÑR±UÕ ûç ¢vª,*ýS+]]ÁTNYrÌO–ÿIYT×E¥n«¼ª=; wBÄUô|ª’‹‘7xÿh‡Ïj½W©¿Dhõ>ßc4'$wWK(Î}‚*GÊ=”ÈŒfæ‘÷ëƒ/µªT[×ó¡Í8‡äi|É9!²›ÍÑ.EÒ ~ÀªRì%X~†Ð¥~­íWÔø÷1£þ]hxªÚ[̨³päCÔÔÇÔ‘ö2]h‚rs~ªÂºEBD»ì¬´9"e²¯UˆÕ¥•g6¬(þa)iÚ!Ù7øiL qŒÃÁ‹hÁ—˱(Þ~Fʦå-.‹¢Î Ó\C"ðqÝq¾/j¥±|&¨"ŒgE‡‹¿òУ¸–Öœ®׳û_¥ Öm{µÎªrA O"(m³(ñ;fîW—$Ê ¹*¢ÕH“ö:–¥zÚS„`6ÿýgz+ÄâYBæ—Ó%)L¦#ÓI¶Ç‰ø¦•ŠY{à<xm¾¾Ê¿‘ˆûU§¬ý÷ÿåÔ°BW²N6„‡Â -ΰ÷Ç$bÊ©s—€Ñ¼E³‡wèàó2dOaæÑ÷xj«6Ê][ä‹@*ôY¢üiîæ€1s—‹ïUà‡ªZZh±õû¯G -…¢$²pÕ´Éù9¬×lâ”R9¾Û½t þù“nüpè¬ÚJÔï4ÖPÐ/ýþ)|­j¦ûOÞu§v–Ïý¨&3éºHäŒõT H6Ú!gÐQÙ¢.—¡»A‰Œa™^Î÷øf7ìÖÁÎ#ÌÅx–ÕW˜ÇžÀ²6 -ÕýôòÉëî0£ˆÎ‘n£PrÎÛ¹4Ê÷xoó€Ï·ÊNŽÖΊ/n蔹møäGm-K#k€¥²­ü ñômÕ0|¢¨W7ƒ‰Á’€ ‹5ÕJ‰! —»­1÷Oí滪Ë({Nœ1– ºÑo2+XS÷Ô9ÚK’,fàŠPRç Éy¦½r¼W&9‚ ¤´Yilñ[ßÐD…UKÁˆœ:Í ¾ç /äê+ÊdMß‘\øÕCNúCšO• º{d:±¬\óähd„Y€eÀåòrv<"O3÷éQ6j’ºË'« -{¨+Þ5„‰gÚ{°1>çIŒ(žgûQf,‘±‹â‘Ĭa?Ã~¶&¡õJQ’Ÿòs¢Ô)KȆ£šœýçúT< -gæ× BÇ®OÍŠ4õ ­%û¾·Õ -…!âÜâgã2$]ù 㹑ZÒ¡È3²s¯ShO©ÈvL$Ë'犪©ǦȆµUù2Ä"Y¯Ú•\Òñ«•JÏîØ"ˆ”o¥´;ÿ«ÈØ=¬J†WMä‹ÝCcЕTLí6Ä7®ü‹YçžÚúUMäbWÜ®¨=6q^¡V=ÝO¡È³ÛDóWìåŸ9¾ÿÎ?òòµðpYËöÙ|éq+õJP26Ò¤ŸQÂf"³ ã,¬q²3T(¡é\ -fæœv’ÌÞ¡SôÈkTtŸªèZ#ÚX.…P@ÔäAÎáÝzÊ3¬þj/r¤¨ABç×rŸdªk/\ß·q/ÅÏ®Ã#dg¾èªØ}¢ÎÃê¾yžèÅ7ÆâgN{{{G=Ñ} Ž] -šsМٌI¶2œðP©ŒˆÎÌ(ô³àöx·%ø‹‰ÂÇ.K1Pã-1¿ñ<<ÞïoÎö[AS7Óš=@í¨!O’j*c{G¥êÚÒ¯ZÞQ¨ÂÞÚ=icùæ£"º?OL¢ -¹ã+jY'CÃúÀZJ…¸Ïö™ÿýîÄñg,éãû÷TPì<}è…¹£Ã9 †æ¿*RU$º¹—öq­í >kïíhœE&(ìµAè3ÏxoGóRòHv_kç;’¿ŽúäC„ÃL*ϼý†¤€W« +6PÁó¾3‰Vú˜|'©à˜2)­Nº#©3&&Þñö!¿8ˆå‹°íUž³ÆHy”"ÈÉÂ8Ÿ}áNª¼õTD,G5+ƒÈ¬Èý•æÎ_þn™—¿üê«÷s3ê°|:iy”#èhõéæõêi¨<±=ô AÏÀÖãPVÖˆÊ.`ÓJôOì-sS—®•Í×J(‡4kkÄ!l2{j1í8ircRÁ{ ÚµU×?’ò!˜GÏ|ÔÃ^Ô>(!ùŠÚœ ^>.“÷wyÇtyØö\Ú~t„ÌI†±GâŒÄ"ro•€ŽÄcÁ`6gˆéÇ’m´!Y· ¹·ß#7ƒRñý³ße»”ôHÿÔ[D4ênhÉð¸ãAöý§ZÇÛtû¥”Å"‹D¨écíw±¦Âø1ÜQ©}ËO¿÷²¢þšÈÁ}oP–Óx ô-Ý+¤Õ™´0ü vΨ¹o!q`G tèæ!/Á£Ìm\Š•¿·½x¢L| çᱤŽ¥n¥*}3§Ù‚£“0b®àYÝá,øp„} ŽÄ3¢ñ»®’iýN×±cï(mËL³ýIéͳ–é´´s’‚ZëQÄãìïA;èå\ÄL&`ÎçÙŠN=Öƒßé (ö:Ì“û]Œš5}’#ý^'éFa.¤plJW|vÙg’`ؘ ‡aY±“2)œLyÃP”Mej»JýOcPùX•@´ß±ªÁYǛѠ Wtá_Ö[x„Á·zG¤£C¿¼nw¾uí¢ØÒ–Ü8 µ÷¥æ143`ÖM­=¡&5QzýÏÍN:}Àæg¼/Wâu/åÝ Hµ°Os®g髆 ‰Æd³Gð¢úBQylà#^ ”vøV2¥Ecµ9Rú"ª¥°vh§v³Tômì»l°ãÚ¢/a¤s¹Ë˜MÍè$”¡MJŽ>ûpÄ‹ýÍŽrBæLw‚†cPåC~>ªY¸Vo¹-±3„Í™N-¿ž¨)ê~ÊûÛÜ°þ Ýz¨N¬zldŽðFÏ™ˆMÊÑšðjC[Š´vÎSßܦyÛ–iR¿ãí„ ´¿Î' …“.Ìvçý©ÅfTCtÎê3þ+Âi¨õà<•ã‘54ÄS¿GGüÔ£­×'Pàšøæ$’ö)¢öæ«Í5©NOCñåWî#…z7·Xd`BÈ{s¦_ëR‹Ø (_”*Kfº!¹”¡®ñ,& -^r°ý6Ž3Ž„0ã¡ö½†ìݲÞÈ•ž2Kf¡~rþH«ú%çrè]y7nd ôö^#hžöÜKtfçm ~«o ‘3ˆ(r¥Øˆ)<µròJÎo þ¶.ÔˆðzFÊXKN°½AAK OÀÈ lÌ1V=´íCÕ*tÁ¬ï¡¥ónˆI> <ÃT !2æ*l%ß©§Hd‰û6Ïù„*OPó’‡5NS‹³Ê^ÏDÊ7#¡ŽRùç}*Ò2—5³ÊQìΚ£äH&Ð6<çð‘+ñצ„UljÐÄ—=¸»SGaˆ¢ö%¹lHg$÷D½o¥P - ¹“V…Zƒz¯=⛳pEâ?Q© -U¨}|jÀÎ:M°ó¶ô+v[‘†¥¹ñÏ/Ú¯èþöþ}é¦b}¬,Œä»,؉ç‘{ð‰[¯z€ÅM>úË:Nó—ÚNˆÊ Ê"QîËÆí¼ÿµ{±õ|SÏ;h7¤hEDPÓlNé0*Ú½Å4‹(òת2g¬¡2$Üz^]Š륳Ì;$Ê»’ßhÕ·hig -µ@vItpd[÷@€ñµì@¥{Ť”©´Â*/“7{•õåÜ&òEÞTÁBg†ræS÷#Ž¿m‚Ñl#”]‡{ÄÎPöxâE‡äùY•­:<à}ê€DªÃ37ñh0«¨0„žc|U…ˆ™4fì>ùËöEó=Ÿ¯ -™çjlæ4ÿ €9Š”;ZñCNš‡Å½÷@Ë ÕbùªÙ27o”SŒ` Ý­P ìI ¦ pb‚Êüã¥ÔßaU!ÆéÆ;£¼nŸ¢ØŠ8£Ì{dÏÍ›¢âĉôV¶\Ø¡;öìׯ³T(o’‘c¼ÅKfˆ0ïN¦¶ÝHt2Ü*Qw ,€`hò3to¢†[ôÅb^@ómÀ´ Á*‹Yo@ö”TÔ3ÅU°<*Š^ª¶º]u’Ŷ¤!Zƒ˜c`¯HS’ÒðÕI‚æóq•ÿŽÒ¥‹00[b}´9Oüò.ëN¤ÏÏ£t–œªF<¸=lTzüQ¨ƒîØäpg|Ò¥S¤÷=µFØ\…8ÄÞó¥`É\ÓÍWpZ”¹·(½!l×´(<¶Ô˜âüÕó-{Z®7é(/[H;•Ãq'¨'ʧMÍá\Z‰›,.8ijî÷¼X¦@4µå%Й%vè}0Ï×O%y²Ò â…³)ÿÜ{ù¸BåÄñ% Ú¬µK2Ç8£{ú$ñ¿Ë×ô/bHôR@ßË”—½--0æD9:®99aŸ›–s‘<©HÏï#ó õ/¦xJ´ ~ñC"of‡ ýYd§ ¦¬LŠ-1-¬[ŒZL*g؆:ý¸§êjÈDCÿ¡i!í"»ÒTü¶VBÀnd¾1<è»"†x”°æ6p-†‘uYZŽRCžbaÌ­gáÄh2òRòM ‰‚Ü£Zþ“o„êd^Z†‰!eAP˜O…«6¼´Jã™ÚªGOë<ß—²s£0ßýõÁ—z;K‡”a~ï±Î‘i„È€#¿ÕoS²ÅêÅ™QØÎÚ3©¢ißKGÛ/Šñ%œüÝ)ˆ‚½¾)úYQ§"<üvŸÕþŠzžÀî.ˆ`ù\›V u3að ðÄR_c9e†-yÔ‡á“Jnìóõ-‡Ç¦À“Ãñ©7ËQ~&}+!œ;0È0Yç˾k&@ó¡m”m¿ÞñÉH½³’èÒãT*;á!z³'³ÎW†mÞlÊë@$ØI>´;S7¶ -!/ ë—]Çèl¸/Ìäía2¸×¥ª“äÀ.E„zx7ÀZÀ&½<)Òm<³Ž4¾>øPoÍ49>P„ ="—‚›œÙÒq3î`Û.öCcþÌ/X`.¦óËôùëë„ò1Ð…{Ä;ž÷›qGwm´Ù8â2AàʨÃÆ›w ’7ε+Pï#Ù8ÞrE- Ìíü¹qˆú¶Ÿà-Çf·ï¡Â¹Lvô’ÉÞlF -¤æhAœÙN/µÏÅƲâéU9U¶Ês¹s ^Ž½pªL˜uq·ßW¦f['Òý”Û®Ú'Ò9èAUª–‚É ¹Ðo> ðÏŽ^“½9º[s´7$–Ô%T&ÑÛu) Ml»ï%| –¹D§b[V@'a\^»[<Á”ĵ’„øõº{[üÕ8†—êX‰.n:`C5"Sf¸j2ÉVÛáüÝ+¹|¹+W MÔ,nî¯fÅš½±º½÷À j»GRUøŒtMZ$Õx³ùA©4mbY9ƒÿÎÑ ñïuôRü -¹\ïþŒžßþüáÕÿ¨W¿ÓTØ®êm©M>&Ç}Ž÷Ñ©Bo±Ùð+àà -¢¡ËK²¶43úËÕþLNÅ 0L9Ôœ‹–uѸà¬Ô<¿ˆý]hç.®%õë©Q kÍ÷úàå-4ÁqŒƒDÕGöQÍ Í‡qT¼»Vá¹y¤$6nÊ ü"›?Iýý±ÆñwŽÆq¾+Šé|c 4x¤6'–o|A'öâ¸?™µ -ÖªŒ-Æ3 c@˜²¾˜üìM³;ÕI`„4ûõa2õ —µ§òqFø4VÙýý;ñ&mø¢Éë7ûµq"¸ß7C`(>•Ïëûgòêeʸ{ZmÕûÉ—>þema¦^‘QßÒ'2£ˆt?ø­.v¢¶Vøý9B;ÓXnë~zâÒ”ó¨ŠIþöHK*—boÀ8»·ç ù[ŒïgJBÚã -,{aù@]X«¤¯?)W@›Î©j1k9¦ÀB µ§ÀBëÀ\:>ò=çïDëó›ÄÊ× ”ŽOKc¬Ö‹ÒåƒâÑŽºØŸ¬·¹ÞÎe™“>?¶u?ÿãKþÿìTµ¹³Bï¸_©ó^!L’dy -›Ó'è­4®îˆîÑsn+2G4”ôòs+¬©(Ö:Mßâ±Ð7Ÿji"±/|MÇè€WÛ¨ÇM2·þŠ\{ö†v×aƒÊtF¶›ÒôOŠâ)I"Ý‘Ø_5iÚ]ÇëÐù2ÒÒ{b‹Núš ]]‰á±˜³×swõÚSµï,ÐÐ7ïï CSù¢)W]]áƒPÌb:­{duÂz8 Ù½ÂMò:RAHà·Ïø{²nËûŠüø¾fý£)mÒÚ*⎃£ih¨¤m«öM×LfÀTÐgÄŸ½`h-Ò§<ÖïUùÞӵ˔ù‘Ú·”DÛõªA -ŠUP©·Šþ*¦œ“XhÅÂö‡ëÿa ~“M;Ö•T>™ûÉs¥®œ".sßëJ¦Oà¬GaÁfÒñìµKë¨@2eP 5Ä7iαÑxîè?)W{A6(¿gž~Õ˜Ì6ÜÜ–ÖA¼…ç¨&Ÿþ¥åg¼k…1`z7Sj¬/5£eAØSÈ07ÌͦýzËÝtšÿ¿~Ô›`ÚI…Ä9Ý«]Rñ#¯W˜ÎÆm%cbfg'eÚ‚ÍøZ1ع%—˜ßÑû¿j/™IªOÑÚûf„å£|Ø/ 1ÓÊÛ$ÓK2&@;“þõ'”&»Œy5è{ÊW ,Þ̆"ï_õ_P˜Qôùµ$‘1ÿ&¬‹sç³N£?„uÄÛ ;=¾86Æ]Íê¾J±ÒÒâ ¹>Pøt†Õhm‘üŸaêñnߣž:™©å̇ö£“Îy*BXæ×Òÿ5ÞvÈ_¢|«ßü º’㵎4ÐyèŸë}¤Ù-Rõn«6þ͹·ÎÏ™›Îýÿ(€ósS˜jënjHT2[b” Ùç›Ê¨z¨ð¼S2o+ŽÑX`‘iº{76¢åÁçg€Ñ”±9_|©w y“!¿ýg±œ]z íÆÚs]ˆ~ïÅ@»ºb”ÖoL: »?×L?éŠaŒ<¢¨NŸ QFHiçbT07Ýý -Þ]#劥¡Añ”ÂqNÜñëɺç­=yk6'ðVPG3 |‡ðê éáØ!noæÉüìàúoØh0µ ‚µ´fbo~l1ƒÈS™n‘âpE‹êJú&}ì…¼ù&¢f… ²_±ö}"Ñ3¤‡rCá >{hˆ7BÔAþIcbí£‘[ ‚9ƨ‘SõøO’Lâb«À§>8çÉ_dDn;ȳŸ±yžµb`Íd“}ìs‚»—4 ù èS=·ª¬ ô­¬¿èk9s#n»iQ½ÒÕxÖ wØ# WäÏþk>ÿ_1E9c†ÅÛ¦ü-K|ÓLÀ—z G}¤$qѬÕu©›ó¤K5nžú'@S´'æy=§òÐøˆ@R†pKoJ {¾äŒwæËï#vœ&ÙÉèÛ…râÏG7m¢gÕßüÿï’,,7• DåÀµñ«öøȳ€É܃  +éš7Z®kÚjÔËI3þþœz!AÝ!xœ¼‰ú±f Ahl§B†Àu¢Úô%¦'cLÚ·L_×¥Ît£XL{A'ï@A¦ŸKP‘ ó𺇙èC/ac}µ`|¬øu<2RÓK‡"úm·w:¤WyH©¡9ÇRÓžu¸¤ý×~¿¼ãÎk¬‡Úwþ5s£ÚßbA„ÀlMûA½Nsgea#l*n&9 ´ÆñFKûïh¬É,˜ãþ IC,QuÈqÏâ21>ОŸ¯bÈy¬Ÿ!-±ç·ëT×@¸¬oe5œÇü;œ9óKý3åbRÕ{¤õ #µAÑòu)pó$Šßx̺^ÁÐð†õ,x¡‚êaIÇòۜɜˆà\sˆŸ‚¥Üðá&ÍH÷!œzâ çy®„vX¶ãõý,_Kñ‰RÏü?ÖŒ@øv£{•É;i,TïQ™ÏKØÅ-^>2¹_ÊN‰3~i>ü›T"ö–Ú(wÙ®>N'Ê3:ÜþD( -Z$Œ³R´fÚ"¥CFŸ%8 :OÜcí“kGV[» °4¸Î}µxe/5áÙð€AàÂþaž’à® 9h7™÷u€y±Ž$a[t^ÐÁ¨Õ #€Ki{îwšîU¨×tù DÐÒYd¡…-0ÖVл¿'bOºq… Žì^õÐ4—Rd¶‡b”ÀØÎÜ8ßw<—ÈÌÄ—î ¡/¥ºêQ—ŠÚ(ÕVu¥Ë‚btÞuêIs®‚„ÞÁ鲨Ôì[yæ‹Ÿ¸ зBwãu?Nñ¾×ç–”ÑÅ^‡}ÏÜ-\Ú ‚Œ<èMˆòn((r†Æ6 á 3³ùÎǬn3Ý—0)bÀp÷¥³NÆØצm…¿gç:å{wEB]M -'ài­ÎÆ^®ñß÷ÿg£–c»ÂJlåC8V+â㊱РÀÂàäì™®@Œ(ÎXV`xYl_´1ö_\³DsfÌ‚¤0е:o^×Ujñ”ßp,s:ÆÓM`#yW’¨8_3wDÏh_{š¬¡5+HŠ@k.Ê|ÀrG¹V„2 Höç~3 °r†-4êOw¤Ã¡¡xžy¹A:Í­°UU¾Haëé«_//ë!þŽÛb sóîjÇVS—° :ߺͫèî#æ7šØ õ㦯[B =¨î›†øÙ ‘F}Ó‘w\§GǪæ¢)µ†2#ª1ðÇsÿ«£‡ÃøôòPw!¹§Ø:¥°1c‡«ŠÒ ©ä¼½Þ‹$Þ" X×›¬m ½©¶‹‘{‰Økã‰á®º|¤-nj,\¾ï™KÒ)XúMÛ4Ų×Áö€ðºÇ¸²’Xgc˜Ÿ×Û,ä7&ê׳åWçÝ* -îa* ÁG,NìË|Èn˜‚y¾¦(xÄ•kdõfK°yë©5–GG±í7ç¹g¥Œ`^Bk@¾UÓBK%PÑÖFXðF‹¸$ìÂÔ7æ¡„=µ’F¯ëÑÈ’aEMÏqÏu·J"Ž3’_{¼[SdÈã”ZC"¥Õãè¹ÿ¬ ¢½Ä)g³¯7,k”uiù -|Àœj¹Áj^'̾U9'Ù§§‡é,·Íü"¸›ê‹{¦ò0°Š©îmûÓ´=¥…ÿ»‰úöëñ[ÛäÃܧ k;= -¯ïAä%Õ‚}B؈½€jaŹju'–‘öúŠsñwv¶†?ãÜëd},àÐë>aº½s§·R²Øö -©oé-á—cŤµ6_ß!¿bÞã“yT~ºiµ2°™RÌže—[)ˆb<°˜ÏS^jd¨þT*AÞ‰…`Û¡$-+ )xc>Í|‘ù\,‘ÚKÐ׫Òs9W1×$§ˆŽG¤[·«%Fõ#›ò§Á;³G¨9iÂðK™MKVžRÓ‹%zÂ&ÐI£Y³yÈ)Eæß ÒÿÛ¿Ô˜ßs~žÖ÷óçWQN4ŠCHƒˆó„ÃW­drX±´2W +·Ñ\Û±†Ìý BÒæ¢CSQÔ(seCåv„ä^;B÷=ϨˆTñqN½ç ê°~‡0^<º™PX snfÎßóÚֶܪÚ-÷{—št§£4s+ìn¦ÆÎKx€¶¥F­¤g[¡p$(ôrÔSR猂gjûï:çû…¶Š„VT„ð ³wï—ÅWKb™•ÍÞƒr‘†ùQ…6 þþ[½Uí¹raSä.àÙÌL9uA./¹î/î539ƒðcmýg -÷WÝÕßñàÁ¢}›óRÆud°‹i+?f² È?¢°_`X’9=–”U€èÒtúæ2\…Œž8(d”ªc蜫j‡¥¾‚i.8x <\n.i{à_ŒtÎ+€ì 9žH埕1²šal‘Î-j_Yß 4Lu-S'Ó$ž{Q†Õû¡ê…‰{½yØþGõþÅWíøûÿ¾µšYÙ@Z§.ß|Øç$¹Á+¦Gqv¿jqt„/F;oÙ£%䇌ᵖjC#E¡>Ž ÈÐöèð¨L¼ÿ•ðwÑÅgÓ÷DB{Çâ†5V„FDT[ÄvTô«^®>®k³ê ª?*&øýtÉÃ7€Ì£­æ¬*X˜ÁkÈ\¢ Ù3bmÒO;DZ¬d ±Lááî¾½Ô8jÉ„cùDY9¾Q þˆÈ/—öû¬8 ÕþˆüHÚÖß~©5¡(س\6««u y‚?8Ú§ò Ç_2”¼… ’-ÌTˆ l¬áŒg„ ›¿¢[oˆ©‚—aÈõ¯:ÎΨ\„º÷\…|ô©̽(ªú~lTdiZ”ÌÔŠ·•dìVW™å©M@R-âyô$ÿ(!ëü3Ѓ(<´ØÖžï}]Ybä)2!7Bî2g0ô>ñ$ôõÐxýÊ `´¥ ×si¬éy=¡¡¹efÐB±ÀRb3‡æÊȲ¥@›Ö%·çp~&pQ‘¹ø$Ï¿tÇ|Îón€ö0/8§B¥’¸G4z/ˆÑoýóýËäý½2’íð[ñÀÉò¬RiÕ3ª6".z5ÄD„qä½óˆ–—R85â@œJKN)5P…£PÝ »Šß~j/ö¤ªˆ¬,ïŠ>‚¨g_[]ÎM#±×ÃÚ¸ÙïG ì„Ì“Úð‡ê‹õ¡Å®óI[çŠäµ N¨¬R\múˆö2"õòøRÿ|ýQôãËIø‡Œ(‡S!æ­­Žê¡žiÑÜëØC‹¼¸çžõ…jo "6Ýû0j)#¢§¬“­d‹ptF%x%c­Ã(Ò8üΖ¨¡Žhx®Aó21aïR˨ôéaÞÔüŸu¨YTWD~’Žb‰C÷„MvÕ÷YmooåPpø¸OWLˆç |ª7–rh¦âUGlddœ)ŽâóU³9îm&g5œÕ¤Êh½6_ÕäNi¦"ÞÕ¥PØR4O‰|s8á©·²Å`m¾3º×|«ÿ•´ïµT žWÓ7q-M(f‡{Ô¤®RIãU±\šG·`÷·=F²%kɽåk ÄÞ8õ[^k}fÅw÷GàèOhG -{4jòú]¤âÂRT—CqÔ Ÿ Ää©q2ø¶'¼Ïç•Ô¹rÁÊ×™§úˆö^@ž_øÿ÷—}ÿAŽ÷“zµ²#żK»Jê qE+YÕù-Þ»­n.´/®hJÄ«âšÃÚÉ+®²é1(¦þePÐé”Íîu©} jy”7ˆâIêBÐËi´kBhîn¡ÎÜ«¾áÛÀÿF{ƒB¯Ðó Pêp†|ÐæϽ”töez¯ß9'°E?Û‹ws§²Õm­\ŠÚ'®3C›¦ÿ6g¥¥—Ù×¢©ü(Èüw0% ?èU„Ã{ïiËe‹Ô¢œÛvÇ0Í;¦œž -Ì·'îUïŒvç»fD=¬ëÚ´…]Ñ× á¯×û‘1ÙÈ =¤:`#z nêQDßz¬0¬ÌR~gÔÈ·U­©Ë=þâô0ü»}ÉŒ¤mÚît/Ñeü¡sÕâ¼ó1‰2õk碠2µ÷S„ØpÉ抑Èþ¤$‚vü« ‹xO«›üù öÃ.ì¥ ƒm}¼ù 3ÈL ûDD£o}ÖàåÊæ>"ä-åR‚ Š°Rݨ܉§cg.íZyÜ ÏŸs »Ó[ßøAsïôý!œ2ŒØd€|zæOý”7@1Â{z$%ú‰Ôd½…\ôNÈb› ë=öçn*¯Ì†ª¿›|szÕ¨ -J˜RÀîë_u0!qeïã'J˜F˜:Å:Ùƒ¶Lœu ’*+xJArË°hçxÖ;ÞŽ…QR> À¸ÖZôt›X|<×¹NÛ;ÐK¥ü¶*ŒZËà°ç~W½ºkÖ%9Öð€˜ï»êôA@< mMŠK}è))¤URë×k¾ËHø­ÿºãçöT•Ë$•È«’.¬Â¯ÊGI4(<÷_&Èï_Û Q…z©^Ü ÅH#êÎó/=:Èé$Ã_$uY¿3W<¦g‹DÕSý'Ä] -JýˆÜ]bˆä$´R[ÅßTïçÁR9J#h©6uTlÿ-1þÚ¼ú:fkÿó¾ü£ÄÀ,-ét©î½7Z“ê~{1˜ ½NJQp“Ûöýa3?" ÊÁ6Ö¼t  Ge§·±êW¼-k{'«á¸ÙûõÁaóö5½é¥²6‰:¦7ÍTÑÏ*¶—ŽE£ü -‚}›KâìŽ_«\¸úÛ›Ï?ða¯™íк˜³ü¬–Cóˆ pèc,ºGW$â+…€È„i¨/brSAäèwcƒ!¾^,L`jÈAqôP¨_Ñ5SÙµ‹íWuI.@·Ô‘–hÙ-Ç$‘I-/b3Üj©Ü:JÝÑ “;}d§¥—…}É¢}×ÆJÿeîuHü§³Ljõ§gE•%–(ßÕS–²á–E45Ñ9„Óî˜vÈÜ©ÖƒÂ^±CÎ¥`&Ø´ëokÕ]}Ï.ênˆBÐ!3T‚¤ÉæõmpPß«w–™à¾d/ð=…Þ”OÈ/…žÄPkÝ÷’À‰8)•Q0²—ü6£þ$,ù%H ¬{•«ÝoÅ=Ž8pšOÑ·€›Ž}ðƒ6¡3ÙË6¿ÝH+åy!Ç™|§ÑÁò‘ò% ,sæ9D#ØêÑz<ËÊ’§ÔâßuåÊ(ó›Ìß!5ÃÇC, ñø¨–… ’ÙÝY’@Õb«]Ë’×ÒÂØGPn_°Å¼¨-1B™"oI/ó•w³qpÕÚ«0®1´×ÿîõÕ'ϱCîOéÑn|/Qcì|þé/¯ýÛ¿ÿ‹ªÝ«yx§‘=,Ë ¿‰X³ÂÁ¹‡8Ktu‰Å`ol映;g)&²z^ÎÆsÑA_ñèп“èõxþÌóLxxÚçºä?ÿq¶ˆ:Ä–zs#?›r¹!GJñD¬?gȃK—t^jžæ<]…®ÊÝkˆ" å;F gLÎs/ì^r){!=]beÏxÍÿ²¶å…m_CtI W2øiÄ0DŽ€ká^rÛÒ‡~S@^–¿J:`¤y3È:æùEy¯1}ž'¾Ú“ö½gø<—’º×•!Ö89ž3⑃Ùn²"4ê±®4bŒÐ¢NlŒE{oóæÏÀÞ“Çc#u§¦"Šwc5žó¹·HêÎÉ‹†~¬‰‡rFŠI›r.D~CÀwh0B6GÂÖsP{žê;k[þ`Þ ±ìã¼Î*4Æò…^ΡÑÓp/`Í|©ËÅé1³S_3ôêaho${çÏ<Þ0¡Ë hˆ´fEó|þÎsÚ cÙth†`/ª—Wê É¥H~ÅL“â$LQŽ¯3Ä„ý -ع_kmõB'éëûµz”q "ê `’óð#6™¤íÒm|Ì¢+Ô¨t1„H©¥s¸H!øü…NÝ>úËÇ _YUñ@ì™E›á®qÑù´ä' z5£#<–»ïÙY½ãjãÕÖ×ÙØy°ûDŠ9w\57Ÿu%<òÂ'Y? ]_s])UánzÍ GÀ†qׯôÔ<‰£/¬K©ö^1`ˆN‘¦S¿uÿ1ï82iiXŽ¸â¥´kPF( 1 HHCvóLé5ä´¦ -RÕR«•D6VóÌâë'ùÛ@n®Šæ9’ÃrX$u:Ž„r÷I°œŽæ¬ØïÌñx½LÞ“àn šËAÌ¿·:e)½m|€G<)3ÄO^éîÎÓ‘Dh:ô•EÒÊÛÙ#¦åõR—d›»qB"~Ä -(¿¡g‡héÿÌ+ ë.>»3 2dÎ|…ùœçr*¬+^1ŠhwŠa1¿X¡ƒ¦„›°DXp0Ëè•?Ô}u ‚÷ËW Jw\‘_ÉKåÇmç´3¿tlÓO ; ºåYø­­¿|qÒ4ì–[te\Ieˆ„<èëƒIñ–äµgÆÛVªÔ¸W?#¿¬h9Ämnë=uÁÎnñŠ;|‰¡}•äýö/ÎGîT×¥eGDÕÏ1' é½®=zè°Ê$Âmø2ÈÎœHg±ÄH¿ ·‘ß¼þ‡!ÿWÖ»Šã*ý¶á„Ü¢0!u )tÐŒbžBNr/Bd[CË•m©ÞŠj€a)ξ“yéGvú<ÙÀ”ðÀ‹þW™q¢­,Å»ß2иTüSК"®òÇ[ÞrŠV7zðï¿ÕOÊž2‘¥ÕQñ ô0"WÃt™¡¯ÜÈ YPĶ?õ—_z:ýÅv¨69ÓŸPõÁ=˃"Nržé.Ì{ÌUbˆÆ„õŠÅ­ò5„Kƒù¹A€§dF'±6‚c߉,Àð<þÚ¢¢ªH‘µ|*\lL½÷8´"ö¥jho­í«R8®81‡h˜#£W{ÜÎ jí×åR§f+¬À]z¡8°Ï½ÏCÝÑ÷Áî\JÉ‹5åĹ&òfÜ-Ý¿VYuõ-ú²m%@Pª’g‚Ÿ}´¤ªx¥oGUg—èŒ2S8Nò'WÏèò9î­¿SPÙ 2¾Ï?ÿQg>îˆõû úÁ¥m<=¸”]?G€•Uûú¿ó3*ù›¹»·•:xÜ«…z »‚·è²Ieyî RE‚ŽïgÕÚN©Î즘D‰H"©9ï(ÌùË|èHÔ¤(mZ›Àþj69åÿÞx¹?úKBÑ1MÁˆ{ŽAE³p<³›Vt²Ñ³ :dtRUjr[Ú$Yéœù©09ªßÙ¨LDÂv¡µ`«wG«´ã lL’¥dÛVQzŒ¶Ëµ)©>Ìÿžì5æ[JvÏ!3uÅ|ŠËýËC±ÉÉsI_û™›y¨2_¡ŽK”ɳ[¡üõPÈ Ì:85åwŽ½z£f­!¼¿/´áö¼>x«|tD…c×€¼yÞÞEgA2Æ/y…ö+$à»?dCk©Üïš60ógy&ó QÂÚÑî=âÿíåò—ø‰ (šãCükûÂÎõ3÷}à--ËQ‰ÜF$l÷hî×;Õ¡¦$šç„¶>‰C²ËÜ…èC©£…ézko¡¯!ÜW<›&R†H8u¶ø¢-rE!}o«ÌÒe;`kËïPWºcÀ’ˆ6×-u5³ù52{6H0U !—rad°‚ΨÛCOÝ -oL•‘ÿ1ä€Jì´KFˆ -ZTÑæùFýå;Äeæ¦êò<“cBcÚ«Î2êPêätÊžEÐüª³6G>ÂÌA,DQV¥‡_˜3’öµësö=rbÉÇÒ³7^Xƒk“æú6W˜¾cÒ$<ˬœ-öʦÖøžGä¬røˆyîþN ‰âã:yîJ£FÊ”‘øª“˜»ŠU_‚¿÷%t2ùX]êA¤ªÖf&‘©7oïÜs -Ž8«œŠ8{à:l€oÖiÚ˜k°/…ßýŒX"(’ëü‚eF¤=_!†(¸•L'>XOoÏ2õöÄå¤tUï)â( -èë_6”¹+¬59ýëÂåßþ%ëþÛwÿ#‡›“l,±™ª‹ší Áí¬«ž=”ûç?Ë¥T­6ó´WU£Ç>ìŒÌXå¶Rß•,Då¡GøÄ`p7ƒñÑB­TûÅdÍu[ºTÐnmÄs•sFܺïUÜoAÓ(:xÒŽˆ‹ÛB˜ãQÏDfÇ5 -;ƒ.Õ¦0B×3?b•;¶ŸJ¬QêtDáeÍ=õ‚À1²þ« .€b -çôΕæ«Áè góNx cøÏ'‘vœ0cttVŠ}H+V%ê°Æ8•l¦=3ËY|Æ\g~Ìù¯ ²0Ù·ÞF*\2„é] hkï+…ïéÒÂWåJç•ì\(XU·O)v(·ögyÜ÷Uúê1§QÊB©¶AWþŠ!L!õ‰OÎdaü‡85¨cc9p< ȱùÐ,Ýè/*Ñßþ%›Y½õ¦Q‘ÕoodÅ0â出Kïõb­xEÆ`•ž·F!ÿY0+{œ–Ï5ÆÔ±(n”V¢sóü<(¥¤Qå°‡ -}ÉýÊ‘EË‚úãµÊXl䯸Ê#_ùÔ4r¯Ê³ú)þ D%"§#qÒ-·«fv§ - -“|)~BÞi÷÷¥„òdßêÐÒ´€›ûÓÏP3Fàu)žš~¸´Óu,ÛÕMŒ7·|$û€¬ù- -|ä¤Úøߪ7èEJ…pÃÅ#3݈î{‹ àª^¸ü4.Œ‰ôUÚðº¯?ëÔþ.¬ÿ»ÐËõ3;keÆîŒè -•Hf¨Z¬=1>‰½zÊDâX–Œâ°´æbR({UošÌé…¨—+{_££±”âÉÉMу—?îE@q]˸šò à£/ö/·¬eYµ·gYt°ØÑ‚?|ç¹ÿÅ!x¿c±ºòu‚IwÁX^¤cB=aÆj¸Bœ&íÎ~‡!éà»ð$ª7Êë¼9åçG'†l‰ amÛ)l{r)!¢£€0Jòª‡Hþ|yψ‰a^àáíŠt{êa†ÐµpÃÚœwë Ê#mày^Çu$ ”­.EŽß`îcg¢lÊç»Ð¾¢.p͸¼wudgÛ²ƒ ŽŽÄsªÑÍü*wÉ9q7ׄ¾)RÍd ~…ÇQ¤ìW¬"d0óE?}»›¹;Rò¾ïq¼M.G[,pžº› E“ÛÛÁUþ UÚ#>DPûgøx¯¸6Iz:mB‚—§`JùŠ÷“F„3#è½f ù–¸üím D-ãÒŠ¸Fü…ÃmjËs3ºJˆKDËMuž­Ä÷×ø! g¥úx–– ¡kxÅùäÿåBf8‡b«¶“îW,¾ægºÛÊ6l"ÝÕ[¨~#«m•öK+¬¤YÜS…Óp¸Ö¥îT£ ç µ»ˆ ô¸Ü2ó9.0ÓQÕ’´(¤é÷ú`¯X•KÔéÚJ>Èwn~ÿ’¥˜w?8Œ÷žg(Œ5êëÐÿ/¥Ë_‘µEè7Ö;à•ÉᥥwjpGæ.;V"W÷Q^Ôq :¶î¸pO]j/ÝÉ*s+*`Ãt!Tˆð½|ÃÑ’=×üæ‹Û¥Æó³bÓuvÕñ&\W‚º$ø®N›!` Aß®e” "äJƒ9¶øqùíú (ŒWäÿU ˆ®p|Y~»àÝvàõÑT0DtLų~Eœ{BŒ%[ÿ×ydSªo÷³=Ûm'P3?[W?tÜÕÞÖ;ÛÍ ­!ør@«ýýˆ@©0Þ…Kžc¶;ÝØJÔ·¤ŠÒH`þŒ&êL*æV Ù[3â!a.mÒì§x‡–Ôé¿& ‚›9’ú\*ÙÆŃtÑçŠm$Kr)5Ó'=®ì­C¬NÞ…+è·!Üp ªt>²KFŸsJk¾“¿`u7>&[6«Ã-F)•×÷žŸ2­ì#ñŽ²}d<†Ö¯–ÔóÙÌæt-Ð áجžáÓy„œ_stHÔ7P%鳃Õ?3¿²€~Ù j᳾ݰGÈád·œSÄV]žb …¯âdÉh —T‹5O•+ðܾÝ¿¬V2ìëD’NÕÇôÊô]ÃmïãÑ -agw®™«Î¦«ËÂ2³û9tÖD'jQcøÅâæYv…Dëzá\Ê/KúÄ!„°ËvÚèÖ?ºÏ÷•hÙjÈ=!±K…'æÒÃ[êDK­Æ'µ‡ - °* - -M^ÞL¼õN¯=Y|6,ˆL]Šuð¬sÞ|AŠsüÎîöε×*Žvƒ`_þO}«nÚ=A!¯1ó7€©îÐ[:åР߷“dŒ…Nv#ð`ú€÷k¹Ï=dÑAë…˜­Ð‡˜‹Ù]È® -ta¯ÄÞTt#Ü}”W^: ¡O çJT¸³$û ®ÆÿWxM.pðo,€žU_„Æ÷–ùÍû{×wb1š¡@¥©2ÏxCÂŽ¢Ì«”žï¼ó.v1%"K";÷^û—uxÆk+B©ˆDÄFDoŸÚpOpmìrSPœÛÁ¨g¾s‡|ËGнß8u‡÷Ñ<§Z -Áx)ËVUS…ÌãYŸf Ö£lŸ  ×ðp®Ê6º^A¤;æ‰.±w*·s%¬[æ ëZGyx?‚Æ]Ø*YϺ§7­Qš,Ë€>ºO‰p®ªÏ,WþQZ/ÔFd¶·Í®[8 ü¨¿÷"=HÍ9EÙ4_|ªw^ÃÒ9ëŽsu‹èiÔۻסzP“=2¥æ>:Æ4Ï1`þLtþ¯ùûÜøGÍpø”“¨¸Vn<8¬3=cå.ðÅ@õ®!©m¢ˆìk3¿“{œãxgüò?ürn·{Ó3Œ åX¿#Aì$ŽQØæ.œ*kôa~&Çå.H2/ Ä-­ñŒ…©®ÀG±¢NŸ‹IaÖ3ñs¯ÊZµôžgº5`=϶<šõÅrµfÂ'¥…‚Ú‰¥†z-ù·¹´QÖN¾-ÑŒ²aÌbž17 ÕhKÓL»åwÕÄ?üåO ë·‹êGUÄÂÍÒ¸{ÞÑÒÕÀ<élž‰ñ¸]4€‘¢9—b?…¸[®­":V°¢í½UXh]jy­¯”­ÌýH€‚œŽÚ:©rvE0êQ&"¢‰Ø2ùÊ,íT<æѳŠ9ãII¢¢(n]Ë3’ÞAÅÖ ÕBv¯©uºÿƒKÉAazFÒ#pç - òš¨¶kpø)GoÄä,a´r[K~¿¬¶(oEôÞ!ׯ¸0‘HÚ¯c½.Ø:jóý>Ÿm~Á5Àå€1gjÞHLH ÓàdO²¨*ÿ71ùÝ õ÷¿Da°'Tå^-U­Êá8œÕ'¢p‘¦¯H,)y)ÑAJ=*Md±kˆöÛ;0*‹À¹)Ç1¹†ÜA%ŒéUØNž˜QdÉ;nVêEÿ¾ßU^Üe³BIF£D5äj’½Ý¼bÜ £#µ $ÀÂ÷Ê6}K]VzÑüE×Þ[¹úÎò6ui­mœÑ;­ (…Ð`?Å:O ‘œaX®†@¸8 –V-Ì:ï”Á•¥œ»µ-RŽÉJÉ¥6â“RàªoÄ_bdu3äŠãuy¾­öæĘï;6QEº=èb<< —#N?C7FC!u‘Z€’×¢–"Y²•åt@µs·‹Áˆ=«{Š<»ô RL©ýzÛ΃qlÿs÷’d=Ž]‰z5Ÿ€ÂH‚ɶºÕ® dW1Íþâ[Ç#ÃÝC~nO2YUÊ?yHâ±ëÁɇ 'am¾“"®ýz±ýHýj§FЙ“ª‡Ý{YÄì±æ|²×|”ôv¾¹nA_VmüÙ5§Ÿ–åi•p“&Ðy»7d•«Ä°•68°¶èÈuàðY‘KTxþü+ô^{ -ÓUèfӳ͈/úzg}¦3Ù†xq‰XÏ€GÀ{,™dô„¨.l=”õe’Ó¤"žüÇJl¬[ÞLR—¦6&>j°ËPæó·c­§ÿyé¾r¸úLñ©¢D°S‹J¨äUÈMqh¾€•‹äÜ÷x^‰1¾¹¸ýPjù!yo‹Q'‡ôPx« -ò¨‰«2¤…{“y´è$Ö»Çy†§ñ½o<¯Ò~“¢Ô^ýp*Qyi§û]σÙZÞFè¹ÜT¯œÊѨnTDí(A#ÞazÌÝ'^¬%Q=%`ye¸ÏÃùÿìTƒÏe9¡¸J_÷Ù„&¬ë€Ç þ^ÙŠ½c´A ­ÔR©Vœ ×'Å›?5Ùúyl:§J‘„}ÑêíÅUéDÅm3¸ý´hÅ®! @WÆ3K_EÚYB”õMÅ)…»b–3T8b¼pU/Í™¿§Äé74S[N[Ý3‹äj±VzÉàk¡9Z¤¸¶ÚÄëì}-ÀÝ^éJÛ…U6´¦÷WÉÙÐÒ7¶¥ `e;—²{VøyÆ\ï#7 Í@88?~Ô°ú®üû5:·qtÑ8ÚÃœœ3\´|wŽÛ@{oni@ñ_Ëì–Ïj>)(–÷ ªW´©_„C6š#΃cÏïn{ÙºöìzŠ!6^ý¼ÌmÎÍ´•ï¥dSËŠ¿w° 4-öÀg]EqdÔ „–åb2ùg~KGdߘÑVxÖÕlO‘…¸R -çÚ ð†Ÿ^Ϥ]N8‡W†\Á6cߥ«ë™:‚\ÌâÆ*œÏ¹DœŠ‰úS·bbG/ˆxï¼N ¼`Ä}¬Þ0Êö¦¾TNo|©µë~¬ŠÖÅÃU.¡‚Ô¿[²¬EÙÝ\Wú_`pvÝömýƒÿƒ¾:ža($s3Yd|¹Ôµ‹‘Tj—ÒÒ í¥ôðÏgéìÑÝj£ç:’ V²ýHDæyûÛ‘‰:’D×[¹–äS–2mÄøwí¥M£ÙÖu(E2uUäÙ[YÍo¼±°ºôºi²lÏ–Äé~ËÓzjNwðLá)[¬GI(ÓÅæ"_‘˜$‚Ë1ÜÌ¡{úœÜÍ× ¼”ªA2s(©êÕª38Sgj3œ¾ìž˜¨‚´c 0€Ò!춧¼~ùTË£0™&~秴oD4tçÛ¹ Ú@Ú5Zɳ‡CO3Z{™7}3–øZ‚¢3úÕ‡é÷sèÏœxhîBÖØ›È!2:çÇ(?:ejV% wܯh%ŽXq9Zç& }¼Äg¤98}÷â©_Š?n£Žè(ÒÑLK¬Wñæa•‰8¿Œ‹:éšüž' ë\†Ð#ž*/S‹p -¨—žØ95Hå ý`9'çüÖQv<¬€¡Éˆ¨ç[Çx¾qu t_²!̉§ðVdi×Ñ4KÇô‰÷¯—~ÆŽ)ž€[P9˜ØªÉÿŠl˜ƒûTA=úiVFÿfi¡ÒTqzD¢ÞÎæA™ù(n4E£–òÇV.ÁÜ®‰xÿùÎ[þ¿«dxÊLMÐkµ?ïl¼±ç#t¬@˜fMp„Т¸Qt2;*¸ÿ¡Ùð]Sõ'M£wŽdõë«÷ʼáRç3˜Õ¡Õ“Ãá*®RÒɺù±6[µMdùÒÌa;ØHœ/Q#¢‚b_i)Ðìt”ˆŒsF¥^?+ßÊ8½3~-K+å¤ú¾ïýøL91ëA9ZÎe7¯‹¡.ÝÊPn~3<î™VPÓ¨_³ŽAEïoìêñ ¼Å[<>YX`¸ã%ŒË¿xÝ)ÈÀô™0ÊýîÐhߪ Ïâlí-滃qO‰üÎéùÈ©în{RCÌ ”FåD¶AƒèÂ\º|Š †(ƒ )õTxçGþƒ(^6÷9Ï2âÆ¿Ý– “xÛUÊŠ*ÉÒ¢¢ŠŒIûómÌÝžwÌ…fR4ŒÌ9µpÏC?²+ð2ha¤óì"º±®šª¥& üTÚ(å -cGDŒZèhóÈ Å€Æ4H«!q‚‚®avØs+G¬ÝØø 7o›…±\âZ\½Äšg]'ˆf‚â$@<8Q‚¹“ÏìÔįe6®zÌ! ëê4{óâ2¿KXÁFÞƼ#t±,`葱Úl§¦Fí*U! - ÍùùÖD^Å úàÇö@wn›øPÒŸçÕ]b±ÚÆ.Ø®× Å.  ƒz¥ÅÔ'Ë€=m ½;ÇZØUïä5êé -ýñ昗‘“ã4.™öÅÉñ‘FO~hHÄçYÓ”<*ÔýÜlžÆkÞ\Q\ƒU^Ïv }²§Ü94·âéƒwƒ^Âöu­™~RFö `^õ¥ž±½u{ Ö]ºíˆÁ¤~隀ÿ­9qrŽÅia !UoŠÃÇË÷/”Bgt}Žb•ÏÀèRM KtŠ°Í|§'u“œ‘ÿº‘¬eû÷„6›¿å¸o,H»Ì¤¬×C·6h­ÒŸ©Û‡—6hƒ6½zS×û û>?ãxm3[ôïýøÜfÔ0MÆZ@%T…T0kPJ†`p{ne¥c+º_¤¾J2äíÓI($uæ ÛÓR-o/´kMpañ+§ N„Ùw{JŸ]2}õ}å63éŒCx®üƧú_‘ÿ <"W5î -i^êÈ­(¬ßU†Ã÷Qc‹ÒQ_y¯K´i)ô¹‹í[0%ºKlàÜŠ+™ˆkôÒsˆô£ŽP«Ì3e U®¶•ò;)ÔÌͶG«]nΞ°ím}BnsEýúã‡zÉxâ[a”HÇM9b´¬ƒÉ“JÁ.á/©E¨X4B0ô1} /èÊT•ÿý<È_þnwö¹ióúÉ6ØÖ|%ÔLDŠøzTu÷“}³Bͬ.µƒ^’»¨wœ ëq ï±ã¥[X0|X«¶Ñ¨à>3féú¦ƒË€d~[L?P%㎠_<¥g,#¢í æFþ(/슻d4~ZýY Z^s×¥,Lמ IÐÅwïZ{ÝÌC\5â0ÐÒ%µuRüy«qèüýý½ví-Ðáè -k×æ[Bú(«Ž]‹'Z =‰×‡Ÿ[ÚËUXú¦¥ùU*iû"àšv#aaðR”ÀZšÁU ©I*Ò®}-Q æå9?{Ô•nl0 -mZæ5–33!<Úñm²®FQº´n¡ÑŸeBF‰!É 唇:€€¢ç~TÆ™Fs¤o´®“ă%Í•ç Oá‰eR†ÌUy(‡@­& 5 –QEù<ågì6ì­L¶q™í4Šƒ”>Iÿ7zŒEZüõlÜW -‰_f³¤Í/ñ2ÙîêÚ^Á¶4Žö#JDâÿ{ÆÊΛs'ôúüB\Œ›%Ëo€Í 6øÊG`”3߃q¤[tª›2+Æl<`Ïl80" ùõj® –G¼d{ Ù\Á'¸Œ¢P3àñI~=W ¤±–ÜñŠ84iä‚þ© Ï£S6VèᲇˆÒ¥OÚãçbkãÙ^1KÜêV4Yö¸Ÿõ=‰œj©Ï0Øɇà0=ßâ1gºx0ùðüðúå©È6õ0i·ÅJn½ä÷¤ -¹Õˆt+ËM³¬Þ<ˆzJëénz§`+ÖŒ¤ß¢µ¬ñI²È}ÎØ=„ÌhÑT¶s¿š„èŤ-[é*Ñuƒ“’ê­ ïäš´ç‹ÊJ#^£Ž+}Äï¸_Âà‘ä²óiª˜hÕ,‹¼ÙMUW`¹|rž9bÛJ~ ¤cs°QÓÒŒè‡Ýt[ä\-XÄðíèU›Ýך’ÁÕ |÷C}2º±Up¾ÀHExEiµ©þo:;-çyÕXõîÈr5D¼3'/9Àt53e6b~kÅÌ岑Õ,Ër¥%b’÷-½ÆÜj”Ô;«µŸ®È#¬zÂŒåÛªóŒaßÏg­qëšzÀ…—oq‹g-MEO•pÜ-Žß¶Ùuè[d@¬«Ë)š/yÕas„as_ŠÒß"(qà÷„ÛW¤ëRQo¼?«eï¸,xQñH„»Yýxͨ#åm©´¬3*ó`žù9=™2ü ÓV#åàZ¢±-6Ÿ2 ¹×<•—ˆ ç÷ ~³å³`¥ª¨â£ˆa´Hþ\R[N,KâÞÛsgñÇeÅ,¤X¬jpüE 2^2à%¨j°â¹V÷¶xè^?ݸûyo=Ö:"P×Þ^ ~¥Ž¼õzY'¼›BÉE1'¡0Ô"†¨5ðCs|cZ_(‰5¸G„ÒûUÂyåª=?G1­ãÚD€‰Ll¢‡>ÏkfLUŠF¤ŸmÙß©l(¨J¶ ‰ Æs ²$ÕÒŸzÊÔͯ‘ž†“!qd¯{Ø_î Ïd8ˆËw’´ý«(ádÈé#;]~cñ-"g ²¶äîŠo¼Sc§ÒzDy`×í†áŠAl €9ö1œ©06K[ :`†^—>Òó¬4W©KÕCkû5¨ˆl'éÍüš¹ûwíÅâdå#<ðn<ý~|î/ŸéUð¸ˆ@x³I«ì 0qƒ°æß°É– -`z'€mâë¿eQùË¿Çä4ý:F)ŽÚÁu÷ÂJâõÚq™a“ârªWª ´ô‰ùx³ƒt Þ-4PQ†ØsifxgÅ]ŽyÂü!ÁÈ 95<êHÐ:m™`'n¬v#’çÅòb(|,¢ÏhÚô装ì6úÁÂáßð´= 㑾oPO{e éóoø i»_5ˆ9«œ<‚¾‘òaDÇÁ ·‰ÊªHu~ØP$¯šn#6Ô -$}+/Tƒ»—YßÀm:Ù GÍc^òQÙKâyÄUy~y‡°¹ù-TnqüözýpðsW4$Cp¢¯˜!žd¹^:€\a:ÑJ×´‹ÕF»>£N§@a†ó±u¥n¼;°Ö×æx‚ì©UGºEÁ‡&rØø#ÂqÇßé^ÒÒ0{(Rä×´4Æc/+ycæÕ -‰\“š8Èž)4ì9TÙïí»{T.NEy«è#­´¿ï¯×7´Ìõ¥ùpÃ<_Îx/Â*½ÈýœO‘\7Ñ­¯ÔøZ\&P´œ¢e£>ãQ~&Êñ6Ç+¸¨¥:r ¸å˜ë>Y4wÐKºjø±I«À•¹”EQèZ´d`ÃôÔãE2‡(Zª=„R|í‹Nþ©æ$ \…@ë[,I áA­ò€rZW -™OqŸ¥I!mSϬyÙþˆkïM ($_BIgð_M«7! ’Ìâ^ÔÝCΙµ?+Ù&K§0¿ôÑ£lÀü#"y{¥t·ý\¬ aéGMº7¾ÕBÒω €EAa¶¢ÓØB296Âu@ 'Gñ«¿Á\Ÿ/Ūï1…B…G{SWº/[žÒJæ…v¬! ÂmÐì!K®­ÍùEÔ‘âž¼ø*…–º -\ µ&¥#ÀµkŒbnÓ”hawë¾f_H†H‘€x[8;óÝçÙáì/ä÷æâWóºê:;©–™†‚º±¥¾ßƒ¦ü( ƒ¹J/èN}¹ÚDÕH¼² 4ÏGÐŽªï‚å0wÑëú$€Çª+Ó°ùíå}r¨#†v–Hf!™Q…f|ðb_B n¼4 +¡!3·æãˆeGŠ_ -U_áñø[Í2ÉŠ"v@ç< ^ÜîSŽÏS¢ú [ŠáéEõ’}.öч‘ìÁ]ƒ;Úç.@çI´P)©Fʆî^¤«Ä,+B±ôÂâîtõZŒ Ö.Asý, ¥BŸ†hËŒà­\'b"ѱPÛã-æ¾39Yl)ÆPoû]Ê: Ðâiþn‹/•˜F‰Ë“'Éá_£/â{½åó,±€ßßßK×hH;¢v•×@Äz¶)†~I& à~²ã% Ÿ¹:û]ú?~¾|G¬ÿð§Ôw¶C)—LíŠBed, rÃÀ½ÜzÄ —E>¾G àéÿ̉ÁS,âx•=60ò:3P)ÇÊÜ5ÁÅîí\ÐÁ­†§âPê¥i…ã¿_µkxõs•…÷y=×B½Œ>¢èaÒ¼jÜÇò¼!Q–²JõƒÃÃG$Êì¹-“#bœÝJ9â¦^µ#.©¨²þª¶ Q‡³r½*Þs ª0!ô: ²ûlíUñö1&ŽUÜÂÍÇŸŽp~»?«-g¡Éybœ~Vú¦C ]H¥õ¾?ê#0áœÝ²šÂÖÞS¸Q϶E˜y1 «úà‘x5’—§î\§ãZJ~³4’ÞXðóÜ¥›£ ãd]ªŒ(‰1Þ*Çg?GF-°¿ûKºñȹ4³³¤6>¸F€—3¡Ì4ÉËDZ}õ÷ŠëY%®(tŸË<%ªYàÃ[òmgN4üøΉkæ«´âP’Sž;¿‘χlŒç‡1k•üòîÿÌgFܹØ~ïÛêl9ÒˆžïíÎg&!~QȨcf”ßÊ%X3ªEó³=5Yx©æ©Ý®Â¼£6…úâkf†3|ÕÊrq—ŠqúRò#äGKV”e÷±¥6®Þ‘Þ-²â^–@ÙU:@+—ïñê‡Ù®¬È­¬'Ø,k-ö¯DŒOæVd8OV¢ˆ=%áAĵÀÌò=Ô»,¦”K®œÁ̯êlàAª{–ÌaÊûã±Y™tá^82GIµï‘~…Ú»[ªg„Ï掱Nj¶Æ«q’"gbÓÝ ²¾¥\aPbz®÷Öcõ“µ–´¥J]µ<Pl{«0¥Ô€nzg(Îò\@Ï‚¢·oÂ?Ôš¿ŒÉÔ…LSãïÕ_ š‡‘Ø<4{”¼l ZdzÅŸÕ¢Q…Ø| ¼ûJ ª«T1ÐDä4D'l¼ ;Iˆ¤Y»–È<÷"jÛŽ‘ÃÎi®§y‘–RlfÞkÛHQ;:2GÑMwE'8W®;wâ@ã¶UC©ý‘IØtÊKkÌ ŸÍ^ƒè7̶s‘¸Ã¢ôE‹'Cˆ8Û„ñ›Fg»“ù¤>ae‡ŒÏ0€W±;Ùæu†V7¾d‰rìå¿@^û ìý -¯}îŠ0uL(ï:û™Ö’'×ÚŠð)âtŽ¢Uð—L%’üá‡}î[‰ìüB•~²è%¦˜!rõ¸xxÏXNù@›ÿ®<+XMQ¦¶³«wµ=–Tñr -])ÉN¾ámJS ß¾iŽª½P´È*l:O2É*ë¸ÑI¯Â‚!±oUx¿ûšOÅ”‹}¶uF·€µ¤”÷j™’µ|bS{÷àý³Ì½€>w„–={÷\¸‘<z´u‘4nS®¼P½Ò¿íQÊ5ê՜ջ;Ô9®sýb.rœ98Õn7òY Ø:7ÉNÀ—Œù—ÿªÐN¸<6¯«°2爷íë*ìÏ¥È/Åî ZùèÍóÿÖ9Ó"ôëc«ªíqÌÜVs˜S…Ýšþ7[/=ˆ3E¼7§?„zÔP[\†â¡ì…íËPü,»åcyº]»xk”\Ô·S}ûoߣáXÕ3*ËE»…3ßý,Ü®‚‘Aú^ó9Ú"Mí¥Ÿ|Ç1ǬÜà柀*7V²áX«\°"¾©I‚dï'µëÉ5?GºðÆŒ?bê²ßmqtå0, çt_Mèœ^Ç»(SL[¡Ôœ¶^–ÞúŸª+:ý‚ó\íòfÙLå«!wqbâîëP“!îY3î>5„ó2&‚Íjõç„Ê‚M¡n5CFMæ­n%z.äü¯‡RÖ‚k¸ÒO·æ!›•¶rÄ~œa×g \vþ¨²Ùð¶l¥[á•rä6Ð1:¥»® ÝTP -P,ÅÅÏײgòÍýþ\%ˆÈÝÙ¢1ºŒQ‘ÖAI[ *z‚­SÛŒ(Ñù-梛ƒï‰°Šê•ï܉ ‡(ŠÈv/LY¦¹†oý»× &þ°¸j Éë:n¨=ñ¿sÆu$‰®¨ÊÎHùÒ ¥^>V'‹ý^ª Y2O:‰†~_0+" â›r?EX7$Sý뽨´h:…£¨æN×Ie…ýï%Çž6á×£îøÞéyka{Þ°{Ðcg=/VS;‚£¨WÀŸ· O’!W:®òv®·Xßâò°ÞþÆVŠèQ[ ”Kùy¦€…0Éuâ½!E+Ar} ^U¿UC—@ëв¼öµaýÇll®÷&Îr "jÁÛ>ÕzN‡;(0}ì¾^ŽŸ–Zýñÿã;÷ôœù?Ÿ¥.1ô‚Á~ãÍ’ºßHce/àsðÂÆþsZö­£÷5¤y_¾RödœW¹ÄJÔ‡m™ô"½# b õ±t/ig2˜R_H–”Ú¯WÍŠæˆíy©ÉðªÐ^aó•š÷Ñ•âgùe"ˆÏÒ'†‘€leë1ME°Ã×­.\‡3þ]sq„BþšE˜•­> -ÞÉU룳!>cNÙŽs©;ÏOxi¯*Èáde)ö°€­jÞ›+tï‚P‚¥¦ÜÒ¿=« g/Ù÷'™R ;tpáݳiªÕá<Ìñ®ë}QIH!3á¼I—æù(”T4ت@v'à’ Ìim0}ÝK‘€uÎo|xüà”‹Ÿ !}ê>vüÓ7;€]ìáÞƒ±Ä´äX)¤ãØ*ŽÎYÇÏN>éðGL±ž(8¾Oa£Ó@ósD`Ûž×A]8ñõ£^—<’$Gå{} ä—¨­°Ù«ÐȈ‚ï9Ý`†ÙØNç§kz,…‰=É’ò£ÉÕAçô¦Ê6c”ó*ó ¥Öµ“*]3L2MÁÄß_Þ::j8gNü¾lÿtWá"´»ãÄ'S Ô7’bó¿H‘*†#)ïßÕ«þË7ªßïbÆ%,ÈÃKKé^½<!Þ ;‹ìs -tgDa?–l²¶è… {,}æØΓª*ÎÄ^ \Kƒ/rŒîBgS–Ž—¯†öC)4ëW1T¬þÄÅ8¨ö½¤•[ö%Eü±džÉ)ËÑgÉ?œðW©º&a*Û,í¤{Àä"yVÃO5à&ÞSX§·ã×Hn5KÎÕ˜U´àò¸Ó…üׯ¸Ø½2 þßó?~R”l¤ÕBø=Hð%œ‘öæbSÈ‘¿—>ŽŸÿòƒpÜoOb"lásÎGç3}ÖûˆC‡š–+CæÏ öt_š›Ô³RWÝÁÆÖÇyVÓ7ÇKI®sQØbMk*¡Ú]àÚ{^ü\oóÇóô» ~Pwš;ÄÌŽ…ÈÏ5{wZ8×ú¨…Šª×½n´g·à ˜O<aÎd+¢Qáñ¼:·±ÄQ/®˜çóÆ,X¸™ZxDªªR¸­Åô ìޘ ’ ¶g£La<À<ò¶Ô…BŸø&Ïp|3v%ªõ×ÿTGúºùUo¾N©ò³f ^ßðÏÿSØ6»‘øÍ]Œˆ¤ß:S <%ظۧñ¸#ìר?2gÃÜ uC&½UF¹0 -*+Ÿ/q.Z›c£ lP§Ä¤’@Â&HìSœËû nŽ˜©#oïâNŸØ%õ°g.sÔÏE.gP@¶!Õ8-ùÚ)‹ºê¹wN¢5}ærDç$µT#Rck@nsÚ¾ñòVæ·Ö¹…€[GHxwÅþÌ&•ÏŽ}A F1/}HÃ¥»”ëÛ'ýNûÉõ{}ùÏ‚xœ ;ïeßHƒJ-,Þ뢼ò‘ Û  -Ìi­Ç¢¯í5„?%†ë}/§OÀUFû*ãuü ãåëÌHÍïÚ#?!$°¦³út‘›°®µÙϪ,¦†QéDiÅY8B¡ÖÒKW”öE>Y¯(¢O‚Ìhø:Vá*]wéíy¾Êhtòϵ“?%¤,;[X·"Àš[êÈ3FÕ?EhQsRпÊã}zêX3ü‡"ºkÖ¡滫 âÿÈ/>ÏZmÕÑ/ãƒ#p+˜²'Ú!²Ì%r—mÊÎ8U~Œ»d ¾êÌ¿ì9çUÁö;U³·† ³×1nñB¨ÔE -kCû ºÉ¼‹äR?΂ün±öŽx'½òššrÆGðî’!~1­ŽÙÿËf…]¢®G.¾áüÂ>84ó胟Ï|e%”l‡AA²óz¨124öu#ÐP•0…ªè¡ÂQ$:vUAÌ«@ã¹îªëá®G§ü5ýŽtŽ-áÍïÓ¸V[(m|–o|ißµh«¨ g¨åŒÐ©?l…Ùúû¹6¾Õ<ÆO•¬·Ö BF#Ð^‰à)vJ%æœOxC,êŵ¾ÖV¹ìhëEËÁ.Æóù¦•çÐC5 ÕEæ¶Zw¢Ú6v»¹þª(î¦ôJrÙ r±°–o×X“% ®¥HØÜù ‹+ueMgÙàQÑb;®„öD¯æÍzá‡ÃH<¯†Yºï -ƒ˜ /lºtÆý.Xà,\øó£Ú ߈ ïaÓ_—f=q£d˜Åª±Rve5š1c{‘ºZ@FÉ»ê:\¹vBƒé_FÅøÐD õ¿ú ÜÈ0Åá¯ó‹Á¬¬`7šœCà™ýÂ/ªÔ7±c}J”xÞÏ‚LßwÈ´×Vf7intÐݸ'"£ÔõyS^5$jDª©w¼1û^àâ…Fk¼„y²s‹ç³Þ pø+þ÷¿XQÀNQ—ØÖ ãt˜“³ZM«¹¡§J7_ÓPs•À>O’3½]$Kú7Çúb-–©RöùQ÷"Å :Ô«i|D«ú©îPóOE&û¼×¾ênso®nÛ2Ć¥þÛ -c°Åî*ÖÑ gJuqviþRÛc3×6‡}ôÀO‹·úuÅ‹(õw.Y ôéÇé_ÍÙ¥\­¿¼ÃY]³ Ñ5;qå…ù¬®¶ë©Ì2æñWǨ[©èbÀI¢ Ñ»z1#¦ùæen¦ÂÀ)­T\ÔÅÐ/È(åÎ`LÝß—b™ütçþ4£ÕXooü‘^ºÂX¯<åWã Gžf^çŽ-Ù5âûºrn:"jÏêì'¥BUÿª ܃ÒH)ÖHÍUåGðz}Ò:Ÿˆ!Œ¨ îŒ× -‰eý HÏ©¥ÅÉg’†is¯h9Ô&Ë †rO'± dŸñ ñƒÇE¤)¶ùi«µi'Î?ØÇzÌu%R¶à#ETŠŸÞ(Ÿ-ð_ÑgzLZKÑ µ¥p‹šáûÙ²OÈ*ƒº"U¤UôaSĦ9[G9íB#8ž^‚¨y}ÎÿŒ=gò: =Óù€*çu¤lóÀ!õmk»ž˜Ž„!´ËPGPäÖñ „€Æé¡œªúLÂ× qQ‡CdNÑÿzgXÊ1=&4Ff€1j”´çKÖsé1ÞhΈø©ÌKòüˆè/_]¾þÅfõûÌ´YqûŒÿµaWUrœÖ’[I~¶¢-, žO‘1S1‚L#ͲË¢ƒtâ˜mÑoÃGˆÒ"ŒlÀ–-ù?õsÊVZt’´žì Ë72päãjâPv™­:U­jrøcÍnÔU1¡djr)=‡afPC5ogö ›’Èu¸P«^1Ž=s'ÃI•"-n8éPëÜ©‡¯ %4gÇ8¾ÈÞ½úÇFŽ mž+CyROÓ oµ/jˆßm„ëâ_<âõS½>½ñ.L -´Ö „œ†ÛÚ±K_gDzK•wN* ¦óH-ÝÀ/Æ1¢rYÖõ%ÏKŽ B_ÇŠlB‡‹`bÝ)B¤ª¡Ëçé­‹ˆe÷^CÈ -Œ-^UO†°-ÀD(¬ä{°…°J/·Ê9w…ƒ"åëÅlÒ¥‹ð¹…ÿÔláEå]Ô¾Þ_£¢(G†›ÄY5¬†šwQ õO¿šn^¥™|güŽ?Ï&™«è!c8¹ñAvZ¥#g6œ¶y;¹ © ”ÑúÝj=éRw´B…äüfJVÄîH7<\#‘¼ß;TÊ«QQáŽ@Fi¿ aÃìœo«è((Ú·:ðGY ÁV²½~p8ûñçkDGN§‰è&k¡L¥'ÞôQ)Uñ<„¦€‚†ý\c´.`B#›C´F·»¶Ž[Ä Ì¯À\ -ˆp*Ø_•oÅò‡ÀרsÒªC~ßëó=<ˆÁì{t Å¡ª…•A˜–&ð)¾i;kÉ+M¿šIênÔNôž>JÖÑe0<ÃçµÙå$´±Ãû;?£ym³çZCè>‘%¡ßMPä®Dö´×„Êé箢Y©çªµÚ‚ŒÐt#¨«Ž}¬ëÜ„^z¬,šMÈž}6è·_¿ÔK*=Î3òÁ¯÷Òì! ¢–¿°åò€f‘]dy!&GôBt˾2÷~ÿË»ª% é–Fw¨×ÓêS± ’ÆØ!°Q¢$ï,ó@™$íŒÄ ìÑAbÍ„^2Ïz÷…ùÐ D¾!pæƒ&tÍÝVýkªö¦+VŠ'8l¯°n†GˆƒÛë¸ÂžÝ!?zñ3šÎàMܘ¸ÐU' wGijc.óWXè½£+9¿§IÑñ¹ž$ØÎIí˜zEñÇDÄL (©Ö«ÙCzƒWæÓá‹^óZ;¤Þ€/iH§I8É¥š¼ºI[yŒu«+lJÚßQ|¼õ¨Ì°íY3÷)û‚0Þ|oXfÁoÏ\¥Ä¹+³9»öÔAÎPô[t÷û)ò!¤}w¦ºÅ'´Ç¶vrÿãóë_i\‰/ý·šOŠ2öo&Ù U9@‚YϵFÙ{„ëô³ý´@›Ñ(Ûj¼µ;Ö<Ñ+©­0Ž -÷ýµ‚üÇ¥­l£““n«ïFÐá p‹r‘}š+uÍGÐÛƘY‡ÝÆóõ¡tvü<—ê¬îŠÍîYcœ=ó©/*ä9›/-9¯øéëXU57rÅ©µ-S-a%·L‡Ú œ 1ÑÏn‘@—ʲƒ„‚ïƽ1M›²WÈ@{ì—ë;øOge± ïPæ¯8+Ÿ‰F¯/*Ìœ2­ßb ùQ“ˆ†|í¹;Ò£*—RÛ7&ßÿŽí÷ªkz˜¨¥ñl¹Ã^0oç -ˆdj.ó -*ÉÓ™X=a”*ÙÛ -Adú#[*SR@ù—îMuæ]•A 2…Q‹ö2@Æ­Ê\G1N ÿR;{ê¿MpáQÇx¶]¦€¿í¶Ýpªnmš +•è©øœ‘†NÆ CÎy©æÂÿ`±”Øë ÐSˆI$sLlšOŒoŸš¾Ä) ™² £%ݧ²bl~ù“J{Vvxà„­{2‰¾ãh4 úx¡1ýp÷ñÝÍñGSŠv6 Úd¦ÂTNÓœ°ODùÇ è„<˜Tó•<1¤—|©ýœåfï :ßT3äþ#ÇöQqÎÉEâ4]¥G>‡” Yø_nŠC”\¯0Ë jÈNód?r‡÷,_Œ@v0êó(™+Sý+>ÒEKgØbö’Œ5„n[Wù³ŸëVdÃÉÍŜØ-ÛZôLZ¤¤¸Ÿ;~G -óÒ]m艚®¶Çœíñ Ñ°¶G–ð,Î2è üÊCÍ[)cù—épºŽf?¸Â9Þ/=Ž'ÄçF‹õ„%Üó{G™gC¡„êÿšÐßo?ÇWˆ©“Ž:2y/ïBU*ÒCG‚2oÏZܬO"=t!¥Ò|( <®ï{ès%FCOð=Xï×ï -åNù¡Å¸G•2iÊ–¯€ÄËe.XmäVw *®·£>÷Š¿.Öï‰Ë&3ÊœPFùbƒ0ÈuZ’Z(¢#3”S€+Ô¯Â#%TÓÏ.xä8ûË´‘U!Ž.•5Ów´_ÝÔ»(ê“ø]‹¬!îñ)ˆ’<‡' „ZRó!)­Ÿ²ÇÑ—™ÛzÃdȺ Ã|íüC©\Ù r7†ø,`RpåG!V°ÆG­p¡{O|u-‡S„½í¶”ŸØ@çòµ~mW4!×.@Û4Ô¢í®þËFQ‰zíb¸Ç£\ç£`fb"¦¤±iê[ÀIú{¸Ü_Óóû;ˆ8/®1Ïž»9Š|Ö3UÁ©Õˆr§(¹d¼±&ùî¬è<Æ÷2¡|ž§öŠš›’Áµì^rO¦!  ÚÒ:›)~,‘ñ5舞ôÖêN§ˆ3›9ÈI™AaÈñ~ºÒGáÝ¿¶Æ¡ñØÍ[cú‚€{‚£æxçWlÐËÖ¸h`GÕÜJši"ïÁ¼k#s…|YjaVU(#áÅëV+Ž‘¯²M1±–“–uˆrY¸ :©ïF`s$CƒÁF©W,H=P,ج™'ìž«7m›ì5ó¯,g€îÚïªú%à¨oôÐ9i¤–2æ ¬„;}Çk£.Ë¢ÊùT yjè äå(g^±Îmy@RúÑDå²ø¸ã)òá(fÓüàö½‡Ô…ÔÍóU̹tôµºŸ¸‰>)bÖ­´”1-—ÔĦW"àüÕ3yD/é-ÿ>Ñ_Ü>úi×T\Þ ó›ðZ,®,c££Èà®Yl¿½Ê)Ü?yk¾í¿‰x1T­¥\ë3ß\yç¤iO_ -›š—>ZJTO€gäwŸ Ýê+£ó“‚McÊ]ˆ— šÏ·§nÓ,¿ã ùtnP$ôoZy¨Bbœ‘ùÞë¬môÝ7fŸQZíñ,‡×;ç¨í‰ó, Ù…qƒ;cm‡­ âÀrÅùd«_l‡år;j˜Oâ›\ÚX–¯ì ¡ Cš1ˆüŠ.g¶æeb7²¶É"<è^J“‰A`DX&ç×0]ÔxY€–… Åš•±¼èó´H*®1Øz;0ƒþ¯þ´rS2„šÐp eÍ2rÞœëVÿó¤xE¿Â~Ê:Dü‚§Ûi¯\Ñ0¾Ê=NøxÆÖ†øR²0ò`ä‰ó—¿Ù7Rè -è«f¦®>3hμtíÌg´B"º0?#{·Nò³vx '%:Ò/ÇÐA–=¨4Ù¾€Œ;á–‹¦¨·ƒÈØÆŠ>æ¿ßâSÒÐ:¥#Ú[÷Úu7ú/”³=ÿœ2!T%êêÐ3Í[‹/]jìBè¸”Õ -Ø=¥ïË;™dNæ™*öZê ‘ïV±w°hŒ'B9C°è[ÔÌÃK=)0=hà¾ëý±R»cr¬9,jÈÙPAÅK©“zïá컦†Sã5D&㇥Çþ?ªeð±ÅX1dG}gæ‹ìHÏ -£Sþp–  €RñlŸú7 ˆþ-ÁêWø÷E…â\ó>?“7ÎüŸÍ} ¿O1ëóµ±Q A²¿*O{kûkì‹Ày??ßý®‹µ†DÖ{W _[¨ù‘Ô:á,†ü}…ÚP[Q’Áä¶òG;WLùø ýwYnüú•é4©Úù?îh -}’À¼rêEÔÊI ÙI½¬BÚNóCàØÕùªØÅs?sƒ¡ƒx cYgôâÐeÊÏ!§xª>s«}íc£°jÔw "µš'-ÇxÞµîˇ&¹[àêø\_úYJ"ª[vL#0¬‰‚ÅEÄñ,A äië?\ ¤Äe°ÎØG°’빟d§ÇQçÀ¯3çÉqÿëÑX3ÜшçQºé. -Û-þç…úE»“Íœ•OüY•äb1Z.ÎDX²ldù¹ùÙ•>î~¯n²H“FšŽ5hƒ›¼´î? -‹.Œ¦{¤k틈΃îã¿ëØ;=S§^¿×ωccx»Î´AÐ~SºÙŠÄ°BY̪Ý*.LUªzq„Æ|+ì«Je¥üúÊB‹q”V[h ¤W|€ûv—%Ÿnê²{Êô×I¡^2ím{JÇô›LÝ7Æï…K´´î'kì!\P¸¦—¶—m$4 Pn4g:‚Mg‘ñ°l}ù~Ïìc>a§Ù׌Xãªô%Ñ(SÉ–Îc4»O±M/qOÈ9ö…3šÚ±Sç˜X%s¸LÍä.=Ò¹æqhŽ@2‡ŽÚÍÃ…ï*(¦—Q²?l½~Ìí¸RÏÓ…$îÕCR:ú3ú’¨ÀW µtsK‘|ÕSºlšßL惲à¾WÒøŠE“øå;½·t.È6]uÌs&ÞOU¼ç(QP)Œ‡¶#ÇTÁûõvŸß0†_0£#Ü™<ë™´JtøÃW»K§-§²É¢á ">VÆ{„ÖFqÆßjˆØ«w˜€¢Œ„–€ô5dF ð‘¯ÉWO¦m¶Q×Ê­ÔlQ$©Ê˜—4’u>àucÅ4ƒYl>õ)MË;â£Ë`ʽ Ñy” ªžÆ÷.BisøÅÙ£bãu r†0„٠̦ìш2°Xíåha^-õ%8¢¿® ;Ð21¹²ÅéTy®5u/‰»Óß.]ˆÝPòf^‰§u@ÙF=N_¬Êç§ìÁnsÖÛĪz .0/‰Â,È€RéXDb1‚Íe@G但Ìl.6b©¤_–Z¬£ƒþÈàÒ,ƪ{D{{›’³PÇFŒmLo§·fÜtHç{.}Fu<Ù0&Ó”C:7, ŠÆ3þÜ隬ï4ï„ O¼úÚ#Ìa3#¯Hú7CÈL2L9ÓaÔvàg‚ï¯,3c¨C•Y–ŽrÊmçñ¼=1ív+Њ“ÐþÌUƒôã‘ò›g‚D:JBÊÌ„ÐÿŸçø¥û–ÇuD‰v«.½Na”>–¦ãFe}¬Ð{Ëz:䨑tŽMmžH͉4ܱÇï ‹LviMÑ-K0£@(;áBÐ>¤;{/nÄ<"+€1†(,4#C`k¥ëÏ–¦)‘²&…áVb¾¿ìŸnb—‡du{D‡`ìÑS‘IG‚xM’š>RÏW_pF?_ßqLxã ù3ƒ|††ÆQ›·‰ôòÎù,O&£‚šl1¦\/k¬ÈyN-@€ KW¡ÀÞööbE¸ŒÅd•ßÖL»ñÞKN°·`D9µ,­;‰: ´:ˆ# v¯ƒX÷Qn4>•r/æòkÑâê2rcm’ëD-7ÆLiÅITCßJaúθˆ¤Õ†(Ù±P‘l-¾'v:ÈXþ[™¨[Ÿ× Œƒ½…¾¯…]”3:Ôïœø%þ¼IxÐ^Êy®GáˆzÓ]‰ìª@_±r®¹2'.'yRéÃ}#ǵ¥Ñ‡ûf†ýf°57™H+írU2ÈrW¯?ç#§pDP6•Û¡e?g0ä9m¹W`rÑ#Ã~ÿ Lî. 5ØpUF¯Iç´y¢¸q¨5„Q6ÿcHÂ^[ëåKîvà*H ¡žf N¾·OÙöÀ)tp>G¹±‘ŒJ¼Í4’ Ô÷ÄÇ \´Ós­’f§VØ~$hc~ Õ÷sÙ±•†^Íüûó -Ûb¾³žKyGòWã`\óÝOYŠuï\sÝш8oRÅÍEýð¹Äïrm {ɉ³™ ÿ›NZõK¾…Wû7Åø£B,|LÿöÊX›ÉéPxͪý•¦ÉÜ<öQ§ 8¡A^ Ù}c"`e €‚o”2ã.`Ç -’ɵ%¡?D;ŽÞ°Á‹¦Ûw­AOLØÅ%_g-N¼nu„cÆÑ>q§õø)Èokƒ¤<:æˆöß-jH îäX ÃsÏ%Ýᘠ¼ß¤~î3¥£ŽžÀ€a£cˆên„ò KþšŸ“|QëǺј¹É>–5ª*(ê2´]?f£¡jööÛ‡Zµ,Ùfà@ËðÎ,¥¢£  -"Æ´—·âæ“é%‡`÷÷íæ(pþ=úþ²å÷ˆ³8£ÊÿŽap2@vO”Ú>¨à&g9VŒLq,•Æ§¯0ÝM±ãú £áÑð*/3HOÛt´Ú(ø-ÌŒ—NßÈl!:pÓů=iî™:}‰qÞ‹£+ýNi]ÄUFeФ¹4ÂiµZ¾pX–‘òVdSîU”h÷+Ì­´ÿ¿¿“ÕYî½rŽ@ÎÄÏ1vY0qå{Ç,¬x&{ȃ¡ÉÄÂY;³oÈM² -B¬£Lª°ðÓõ{•žÚyËK< €ïÅ$…¸Ì@ - ¶h†‚Å“,‡óV°Àa+ïÈö$ã8ÙZ‚Wü8(Š@{Q´UT~B”rÌ<ÂÕ½œivÆ$­Lž³$8æ$SBÿø7w¸RgãæZ ¸<Á3ŸñEM¢#£Z™,èìx=cÉ*:Ølq´’¹A–EA,N%èÍ¡)ÕúP4@qc9R*Òéç^)‚,Á$m+ɉ][AË3®®ƒ¹BÅòŸì¡Qò(†  Ië„oRòx‰3ÖQ;E0¬[©12ÄEÀv¼{&ÍC¶FxÂþ2$Äå!kÄùÆô[«€ÜlîHÚE®¾dâ#`ÎòèŽlß€Þ Pž1™oÜA„ÎwWÉ÷¸©æ.-kÈã{y°É:¢…¹Øå¡Ê - -¯0_ÛSÄ{Ça!׉+XhRÞȆ\±,ë¹8šÌë:A^š âgÔ{ÀwëD8(9Ãè’‰>ñnßbR!ô•ïOôÈõÛô/˜¥o<ÓKÔ¸Gæ¢óÚKÔÿøîÅ=1àRŸ^n@ZDÏ£ ßÄŸÄÛ¿4ÏΚg×îý¨ièök@'ìED…”ÖŸ5h~`U¹wÆ\4‚iì· Í]Ã…2m‚åøy°Þ#e¤z”! Óú†ÿ?×¼'=qˆJªàŪ:=•öu…;ÇÃÜ UâˆyÄú53F¡‡u±Y}ã©A7ÊãXÑšI9wgÄJÚõõ*³4Ï:,ûeÿómV?ßK_ÿØ˯îˆå’£ðZ>–Ë ¯ˆ·ðà¹"¤ÖŽr5ÜÙÙ ~ Z®/PéÐf!ßÂó*o8÷íÕeGÛf z„Àq¥^u(¯Q•[ÝæùƒHb‹Ü¸Éì;mˆòAë"ŸçÐ1yÔ0¢×|Uø¡±Rý®-Ój¨ª_gIñb|c†c)20I–==}Ýikñä+tï{Ã[ûì%pܑ猀-•Sõ½Gn³äL”òu7%QÞÜ^@){Ëu‡ˆ"GXÖîòÙe‚–|p Š1mäþzaZ1öçŠ^j³‡îþyÄ.´¾¦oMûù*™C¯F…—ÆxaQŽ¸²˜§¥ ’½=fÚRåº2Ÿ†£óSŠÖdv5LÖrç<¹²ÆÞª…é% ”z,¬±Ðs¤ŒÀ§>özˬ‚œSÛz†Ð­Úð#¢ÀÂ}µ•ÕöðÅ^0—[.xµ+àRU–7¶n‘7éÆ]Œª7(~„î'×ÁÁ Q·<ŽöÕ|Eö@¬;|Q鎥*£ÊvÅùø¬£;¢D‰æ {Š¤Ë÷ ÃC|}å*b§T£Êkx¯–`ôŽZ«ÝqÛ ’/ávµfDô¨¬;ÑW§ÕZ3†—؃ùÑÜOiNNsíÛ§ƒ2&«,ŒaŽ•,¾„•T(ã1;ã÷àor+-]X ½Ñk¹Õ…k ¬—¸&·b˜:__Öwβ‡Û°!ÉZÚµ ×4\Âz…š-“M|å›’³llæi&MÀMgˆ™Ÿ éBã9Ê]7¦è›—EU±^`¶M´ògÅ#óÐës’F$ÈÇ»ã6ÔÁ%ì%Ãgô¨x¦AC¥-Ù(®6ËÛñžíêÄe¢éGŒ¥lˆ1K[ÆRñˆk’^" Æ( “¿±s„o¢í3zÿ"¾™#á,mD2€íYýx.= þ'€-/j¯¶äIãæäd™c‡ý¯‚ò†‡~.œ®A­…™Î†¾µh Të­.v½j*ðP6Ù¹ðâÐmÛ^²6Ñá8éàÈ ß¢Þ÷ÕÖÒFÑ¾É E›–ª$„ -Ðe­[å Qv¼»÷´Æm?êÈ‹8'|û¸“Õpcešœ>¸§ 9;Ør™/*…\ˆ©ŸfŠñR“CܲªLM¦³«¼¡…Cq)àPb‘yo!àœ£]FœÌí*¼/:èL%}¾XÿX”a­è ­_\ -²R‘;Q.!ðM˲ggLú4#¼ým­Zo¼hçË}ÖÒž¿v`M3d]G ¢€rDuÚAÌ R©n]Å÷¹Ua° þqaËôæKíµÚØt‰9¾~M‰²tzÍ-íLåX½ÐÚ°’P 0GNÂñø”kÞµíÍO©Òô,?Þ˜ê ||S,s„÷Ø”oñ5Z<>Ú!‚`8wtW ~üM ñ0ò÷žfGièš&±š¿œû€Ã$·‚2ÙWtŸ»êQA'n¶±…ì%1ó…Ù¹<ŸæEUÏ(_ÍO¦ŒÁjÃô •q7Q)Å¥òM&5ziɉPV$ý6#™&àƒ0߃snŸå¥”¹P Z\‡ Áà ¿Ÿ{idlÈ׬ è•E³xŒÐð’úF` $aë% 2w›æA„vÎç×ökùdó¹]F†g×½t^R§`~·Úhm8/Ò¹WÿS*‡EJÌ?¦~ÇÚÀX ‹âç Š dœ”¦EJ¦sÔBtêáãþsnãå¸1ææ´Ë¸Ý 6*ÝiaîQsu¸?ˆÀ‹)~¯ÄÙžîô^t(âÇœK¯ -w׋o´ütÚDt££œ”¯¼ø«…ÑO®ƒEZš šÖâ DUDz.ô_0ü¶eBÇ Wk·ë5Wë›S®¸êk°‹¸YÌ ­¯êàà¥q¾¾j<%¨Õ'ÖžƒNŒÈ»>^²L'ߣØÞkU(öA3ëÄQÕ$¸E.©&0„ç¨âfÌíí*Ñ1å(ƒ:Êþ9ˆüK{{OáÇ­µi¬Ë4MèË–Ž! žŽ]ÊFÂj; ù½f*De¬pøª'…*BZrÔüQdÃõdÕ¤²2˜††£¶÷“EÅÈ~Ý -°«{¯ -XÚLJrz«yˆ,žÁëR&ÀÒ…MŠñë”xG¤°šïô1îç7KhÜ.@šv§*|¦3Æ{<íÞÔoÚßþ'¡Ï‚åþƒ’ôïAß4»GâZE¸ùÊÓ8é/$chô® [Ü«"˜þ•B±ÿW ”¹ì•BdUíÏX+–¶gázmZ‡FHV©zšÊ\¹6Ò™z$ØäqöÏgn÷U^\•ã8Lï´:+Ç &³µ•Ö -FäLyrvî¬*våÊ»¿ÞÍ]Ôfû[1×æ›ÙVõ¸ŽæÓ% -^r/œì(‹ H/’.NÅFE&>ƒ÷5┆¿9üà„ |¸ß$d‚¤F¥0”qÚW<¹”+ð¨Õ0(`<,£¼4î¤fOõŒ[JnQ±ÕõÍó¤vúDη†RÄÍ£ -ì $ŒkzhÕ8%†¹K'Áâ„yE¢Ö e÷”¢vE}…’ -¨‡¶L ¹ÓsòÜ÷â›_-‹«jëzØW²&dþ³øæâàTB)a’NaSTJQ÷<´„ìwõî­ ó¡43èŠ`Ø¿¶s$¶nñ—#}Æ+àôu éðPÉ¢æé¨Ì=âÎé¹£mq}ô%CF§ÁFö€iàÇ™`‘CF.;O¥f ØãŒð? æ•º!h’Šx¯§Š™ÈEBn)žiàh6)²< ÿÓØþ͘©«îDwîÑT>÷¥·ÝÒ)¿è\\ñþŠÊÔÐQÞê™Z.z›Ô35êuÞ¯šË]};H4ÝöR`¢jM>}õ+Iäi/5B‰„Øݶ€þ c­«ÎÇ\ÆætRä¹?{ÌZ}¨p­®s†ôøÌ|$¶:XæÄYA¹eAžZ³ðÜ«ÿSCæþ6cwv6å™Ës1!¼Roñ -%À·#ðûà3»RÔ" Û£œÔ@>:N”¥GRnt“ü\>Ýz*´¦ÍÜ5f:c®ìŠŠ²Žä\¤Ð[_xÈÏ5TþÚ%wf.DG!r[ó¤cÔ Ð¡š¸/q@µÈâL‹Ë…#ëÊ&Ðíð¢Jo§>ž©Âáyê'H¹¸Ì\S‚ÎÚ†Ò3§E”Õ~Ý·þïË‚p\qšQJ(kṉ ÑýÈy¨mÖF˜ ª‰<@‹“œ -Sÿ¡ÿ£á§Û ãEâ$8'Ù«'¼z®¥ñˆª¨®h5“#±,ï«ù‰xúô3Öq¥ù'®Í`+D#tç<ÓeÎ¥w©ÝgþIõ©jú -óñ¢gc® #¨¸£Ú2B£Iôp+ïÓ‹žÁ]d¿fd'Ýj¯rÚÅ Yϱ6¢mæô˜÷[œvz=o¡9ItÏ|c@\¬¯”gÊj;øRSoQ{UÒË9‚nÙÆÚsnMŒzÿõ¡Vb|8Jf «­STÍ!òÌë„ £t’L2¤#mé-ëB4&!W@*ÛÂõI_Ìõ¨9ç£^1ŽG8Åáùt†à:4é#Ÿ>Øç[ôõgJv|´€°h7œú^C}[L£Âž)Õè»Ï|¦+š¥ù=s§EûÉSÑ¥œÿy³]Ñt©—9 @>ÌC[㤭W¶+è÷Hc%£æ§>ÎtŠ\[s¤%Q¢Ø+ ÀZDJ¢:÷™yw¢ÊoÌô>§²^ßåxÈû’RKááJ -¦ -€œ·òQµ):°w¤Úè]þ‘ý5Iá¯y?¤eÖ7Dîu~Ð1MÄCØâ! ú½=1þS@žic…è&)õï(  -pD±b¿ -Ô3JÚˆv;ÚôÂe¦|AቶE^m2²o]›gE<€~èÜ%k¡ÝÑ#‘¹\¿†WÕ‰Þ±VJ˜®BË«¼—“™ƒj|=—AÕ’üìè¿$±Ÿ$‡~ÔW<Ÿ˜¢<u昀LŒófâåsŒ´cmçj -¿ë÷ž´0iCvDƒ8%B£ÈcŽ¹é äóû‡ú,?YS#ÐñÅ3Á—S°?}ÏþO’å_å™~,·=ò:8:ž9y’Vf†h¨ =2 "í]5|ðJŒ ]vø@ìu¶ÎÚDiù 92ž}mènè;ísCçF§z_úàk¡7êÌvÉxRo‡‹C¶û P¸°7;¸.«{jByÙ³û?„,M+ž˜ƒmêyôr‰4ë˜:Eº‹²Ý\¦¤ë—Ö#à~ƒå³4ˆ° ÅL纓fñu“{(ЬîPXQ„†?òDW$¿Î•p¤q1ŸñNX³¢™i8VØ -Ð>S€õÎKŠøaïa®°p‹GÜ•_œº$Ó.€£…MDÈç-NžmÝ颿E>Ñ{8e7P±<sô‹zìï(8eÇ9¶É/åÓ¡)«~؇÷l±·ˆ*'F/5+1Ta­Ž$,±Ý^5wÔ3)X A‡7¬)múŸJà -?ǺŒRb‡4"‚{dÈvl@‘§0d&T—N¨V×Gé+?ò Ø=sôŠF¾.{Š¼¿^ªã[,G‚cŸç;Óz¿¯©×Ñá|@‘ W& ÔÆ·qº…{ ;ä -SíÖyìXÜsN|̓7ùп!~7ISWŽ„ç™ÈˆlF -â®8Ûy´ -®Â{ëBP¤Ló·• éüu7D×ßÏuœ#¢7/v©Ìüßdƒ¸ -=}J@´WV>Âäùp®MF `2üm‰À5P=ô÷7D’ˆšÖ±NÌ91˜áÌ­e͹xf  %Ø}ˆÃÉÏÅÐ '­Š rÅ=9…3‚vÝí“ê!u1`¯t:ß è\D¹Ýª¾ª(¹æ é"ܱéC«¬œ”æùn–<ÑâjȆl†¹÷R”ýeV¬Ò³,7_û èÎtîIi*‡4xÆ œØHÅѪ¤LøTE£þÊk_›&í Rùí°GÿN›Ô‘Ô‚S9bÄÉîQ“7r.¼/[H¦ÔrT™sn 壕@Y…ÃEãÉn˜…x*TÎÆ1÷G=÷àhÅd_Mƒ&Á«’2Bð -:qjŸ?‰oµ6¹{Å42’(qÃI6Ù›‰jíu -ì:€sZÞQ¤w<ÏWcó8Ú¾âÛ±ÅrΩS‘ÁXÆä¯+ kšòÁ@>¥d½L@nñŒùHä -ã¿GÊv¬$f…–"óÈn˜¹U|oiE‘ ÝWá,Ñ ¹úÐüD^æK+]‘ œÁàÜïYª`adØ¥£GÇÀ>¬}—í~-hV²ÉÔ2a×;è"Ó~þFÝøpøöÖ6kÎÊÔ¾b 3 8ˉØf¼æÔ.[¹mT2!‹¥ €òÑ h|­)Ü_,\ ù»1šf+j)m‡¹‚õ˜W¹ãëyZÚWœ“òN,íÈÃåiÄ™ºùH4Öeež©îa„ˆx¾ç¾Üµ/˜ ÞEÑpݹ×ÚäB¦Ú|AÆgj¡¹ÎN›æ OµÀ3—L—¦¥ï+µ"g$N+WI"µ4ó0På¨A*¾,ü?zòzÄIfp`LÑ­éÇî¡zI;’?ë ýQ*Ow¯qŠò¥ç”ŽøæŽ8ÙŒêà€êe ÚÓu ÏÚÌ!ÚÛŒÀChëF× Ý8³»S¤ôÆ9»¼š542*{nÕðº;­÷Ë­ÉCÕ8óÄ2“xi1G´]5‘2M?Ö3a;B»ño^Ÿ;{3·ù® ™g¯ü…ãF¯ãÒ,mëýþ2ùþ"‹š” Ï$Éš´Ãq µ˜ì›ž€¹­”û’Ñz4ØÍÇwWc²ÐWœüÙ‡ó-( ʪúsÐ Æ«…3‚©Ö·”Ñ+¨™«ÚDÊo®„‚-¡É1óH5†E'?1Å»»>3Ê€êÓó‰&§ôF•ú? ¡ä0#˜¸¢-¨~xrA î*+½¶Bæ‰M;Öýu«¨œ ¼û")=¢½Fcµ=àtßùÉ–,£ã›'é“!Î^Õ°=rW"‘°éÂ’Z`¬ù!2)@¿æÆI0 ®“³ï"B[dìLzQ‡pû-4VÀ4Ö èœE‚smIÕ Œ).·àLßã8Ý´äuûÛXš½GtœTɳW’!Óä‡v-Kf²ÓKP$OQ»ù7ÅOù¦¹N™.ü¼ú¾Ï ˆŠƒ±üèT^£¶ýÖ†Q -‘SœhdxȘ±ŽtuÊjB„%ÒÚ€N4‹6=ø-¬žÁ´SS†vªÎÖ<#ˆû-¾Y) -Å›¢Uñf”ò·('ÒŽWW;S®\®BRsÂ; ³ºU­rj¾‘úÖþPƒMôŸ8îœeOñpóý`™ö´MÜ—ZÙØ–ÆNè>Þsç³bœÂ9H$òäØÑŽ'äÈ2ØÙB÷²$J™üúƒýùQW!¹àÛ]†AŽ(_$Ÿgu3æVL½Yž_ÌOÔÞï¾*1Ú#›?ßD™&×÷Nx.—Ðè?î5d§ž²×Ÿg¿€»Ew -õ©1t ¾ öU _¿·ø4­É‚F’šj|â à8aI^肨zq„²rE±÷NA(¢úÍ*¾‰ócwÉb¯üv: ¶í-¸ì;C"š‘\Öí=DÈìg1>ã q$ŒE»5ˆÈ bâ ˆ¥ Öâýx T¶ÒBÑ=nË·ç^UµÖñeèWE­2ä;¸øÒSî²Éž@+׬õë[óðú¢4ø}ÌPýøNJ•… O!g?ÁofPX°…g¥’®µÐÖ+xeÇ• EËÛçY#æKƒsdܼÖª’|_3H½ø8Tîªê@þúŠ„ÿQ“¬‡¾N mñQ:1Ьš¬áT\Ø\Û Xb£-nA´Ç:AÄ~„kv,` ß´¬Õk­ TÏÈã<5J2êm•tÆÐŽ¶ißW—FQO¥b£˜_„y+†d›Dé2A$Djýä•´!BTwOœ -S‚ؼ4•¨¨¬¾{ßý|›n}t¾hꃃî©àÉù‘æGÉFu(g„$5‘”o*‰êl#¼ÞL“Áþ\+~U@ç8OÀ†—šË„Ã/ìªÞœvËvW‘”B8qNÍÛSå*¾,Tªî”hãI’g/k½,ã®4¢"Ïë½æ–™Ÿuš§AM0èZYÚ›S V±\œ:¹úVkëFPL±+ôÜ ïè¯džßqÉAl|ÖûåÕjQøÏ›Ê_KêÕ²Š¥è×ì?#dq=œEJ [™7õq‚˜¤v_©¸Û7óêŸL#–«–³e^B|y®üE¥¬U¥†¢ñVÚ`=†$%°ÝG­Ùsü¬!a3Ä[½Lµ'@¹¹ïD¤m)Ê_|_ëÂxŽƒGš-”£@àÊŒï‰0 LçsÈŒæ ølC­?kí놈´\ñŠ€Á‡-V¦†€kÍutG÷\‘Œ ^”çÏ, -6…# ;D#Ýk¿}ž‰ð—1%ÞKøˆª`Ö¥ -we»"‹D‹C ã#øJEy=¯…«ÔÂÂëâÏ–ï'žQê密PšA—7’ê -á£'î"›PnuEg¬kãŒb­I#™7”‘êL$³ƒh#Y=·½í)8×Ð)$÷±fwsèm9G¨S‚ï¤eà•8#ŽÍ8{Å:H£`ª"“S/s ú¿¬ì gàö0Z BzÓX€X -Ó¢D¼Ra¦~jY,Rà‘»BO_Ñù©Ãç᛺n”†‰Å}sb¤„@l!FFJHŠ%ƨ鷇§Ï5Âœxg›)Éaag‹‘h)>ÝÑŒ8Ò|îé‹©† CJ¹§Ï+1¶¡ÑƒHÏâ›(çù.Èû½!Ì—ú~'l†à bs‹*$š¬íµœ…íe -$ïKÔ·×€áÃŒ× –õøµG)pÚ á÷ Š¯—G‰‚ÖãæÓꥅ)ÛŒ³<‚ÁÓö¬,¥ òðXÓi«„ž[ùþù•ZáÒîõ•™ -5DÂ¥¾Ÿ¶ÓIQlO¿véùï„D6ºÌ=²Ÿ"ÑÀOY’½ÖÄ'5f½üXÛƒºž ÃÝ£Ï9Úsýä`oú¾[X¬Võút¾•”ø¯Söê5U¨Ü;–õ^“‚C_o/áŃM"p]_\¢õÜlwK ž÷ÊüT&)Š²(Úõ úYñ™×wTE÷î¼1µÖñ׆‹(G¡•®J ¾o†pÊD -Õz¤>DJKÔ·æ ¼…†¦¡#×þûŠêU΂.[Ìg‘6!Ä•B\{\Õ°ÍË•O’&sÔ&ÖÁæÿÈ8±ÜUmý&ëyTÌ”þxÊ1Æd]$=µ X…[á Q鶘*(;ãJÙŽìY¼urÑ]Ÿ–™ÛÏ‚èèÕ”0&Ê6LJص~Ë 0»`Ór£žžÅé,îõDéýË*©š¼ñò^Žô'õm8|´O1ÿ?:¢õ#½Žo!fñM«ì÷¿,”À/ç?¸Î©yðøÞãìÞ®¶Ú@,*!Fþ¤µ²…­Ž¹Û´‘Ï7@;!§]u½¾÷Ó!™X&Ïã¢ÉR±›oÄ !¤‚BúÕqÒÜŠ1dìÍd6gÀí%”­UÓäb$5ÙÑ Û+s¢…Ò€2(LòÁøt>ÝýÑî;-"Á'$±ðV§ü'(?晀æ'%CF”]r˜>‘!m¬? ÁaeƒzÀôw8js‚Ìyñ¬+HÎîqð"boCwìÉ%&Rëj_ó»Qãzi軀 “;’ϸ¹sXlñåT,r4"¦É,r£‚‘´“×8F5³€K#„¨àYš2Tj ’уMWçf6?ˆŽ{?÷òM‡d¼^#¬ä,æ#ö3ºØ[¶‘R= -6òò‘Tïîu§yNÛ¶ îõPó@ç0õ€Få::¨ðq­„–•Ê® |¯¯™Ù>b©äp•Š+Á"ÉÓÑ#)Ñ%ô@Ñ[ö²ÂxÄ6â s1}ÄoPßãß'N±~è5þ®\õgÑjè@ï‹ã«žæQôA—Ï%¥Ý´_7;a:¿&CVšÑùNqà%¼¢Šýåò|¥€U†“œLƤóËÙ Þ0b[ìæt[ìëVžê -@pDGËQå>ã*Æ=`˜&Dè•Ý -º¬/ÅIÜ =Œ >ϺŒI¨ª\‘5)^7”ñ`kˆ¾Ù<Úëý¡ÎàÉoýu'©¥Æk_¿X#µl]uÑK¤fXo-”™þ}d"·g £2k”éµ»48ì ß.@ÂGS€ØÌx!²ÂdEé-DÖÜ°g.sŒOì9›ÒxéìkQ¿ØöY’(PÔ[/÷Œ8úqCâó9sñÏ¢p®u™'6R°¢!Q “p0—cB‹F3=»íÉ°äYÈÔG—ÞÞ¯â`·Ù)öóŠL]ì2gP=>§ÿˆñOûXPMÒ^Î÷q¾ Ÿiï­âw Ÿ\Ýæ,;û«³Bc•Féâ{ÙþT¶¤9xÁ(!‡Ï—|>….Eï<ÜÛÇsb‘š„ {9Ž´pþÑÂù+>n{a`J?zª6EjúÆSß”~`5ýãY#ñÎáéTSÈÜ_ú¼â¿=–µ±¼`|-lb¥Ï¾žñ)!ä÷Ós™H1u¯@+Ý^|j; ™ÃxQÆìtþµ­ûÌ3§G6–Vo‚H0E÷Z#é\wB·Ž7íCzøœpqë¡ÎÍL’¸î„Ý®=>•»RH>K¨ü(7bâ¾"w˜|x à:s•/R¡‚„›®ì±0÷‰ik)/ÄɈƒùx}&‹\Wã‰'دŸr1£Y‹Áf)wÔLIH³æÜÞ9ðHæÒ¡÷ft\ ҀݶRZþŠˆD¿ûÚ=ùê8ϯȗ?3èt1ˆZ¤)®@.Ÿ¡˜¢ÿCWë¼Ú‚üŒj¬¥Ù_x¡(e͉ÉÂóÀ©1ï Â\‘JÉÇ“%‹$î†k‘£ab¡%®Ä"è ,‘mjk.†H‚þþ’¬9bšÚ–Xpd‘à innK²Æ¶›wÉP8Óy#ûÒ;ÙöqD‰áPBüúZ<ƒÔµeÖ %Ôù·¨¥×“ ÙæêW5Üs3¾ôƒÃ«kÌKBqÒÑRæŠaû -^ -EO0÷Uzœ3*{¶¥Ø8brµ·=È!ÁŠ_Gç¬>¹ÃUc{éðüŠ‡*6RZá¡d—M?ŠÌ@™9ÍuíYÖ¨–~ïaç:z´Ý¿(Ž}?ñ_ûåÿ¸í—rñ] }Æm5ºò4XE’„Ãa£D^¥ºýD†¥¶†ÓÚ\.º -ì¥_› 20èy%gŸeÙÖëÝßaŠ"‡ÀÔŽI†Ï:…¤ÒŒ¢ÌŸ»$$óœÚbeš!fœé^n=¿(ÓVJ4ÇÔƒŠ5ß×å]ØR›+”ÐíCM4àÆß^Þ'÷>’NS³ÀÙåVG©LÓlð§ÆMëüî7bï—!Kwÿ"&ºG:õȈšYFð…±qŠÏG¬*–S–&À-«Þ£t=OÂÜ÷±Ì”©;MÄrN²5§ÒÜ“ZâËü÷ -xÝ\I{ô<›e§º¥}ë›BüAˆ?žc]çÊË:#b„}W<&SÅá¤å8ì~Ýɶ¿‘jÙ·õ‹9Ïh4öºNÔâ®tXkDÃZÙù Ã‡Ñéæo1ÌÁçEb°hـ΅úQ¾HàíGósÑuW‘#ÙH•¿Òˆ™é³ÀBÙ,=1hª—½ÏíÌ~n<¡Þî2Ø›û²º£®ž)ú5,e”%æÏb.‹ôÛW-c/¬œÛQHËFèµÓ¼ç‘3=>ê¹ó20Œ(*7³ßíf"6÷¯æTÈV$E[­·”s¼&çyÖA’J(Iû¶§Y³ÐîãvÑø™[mEÔfß¾ô³t”7 ÚbÂ䈭Ñ"¤fxk÷\c=‹"3ý¼× =²êõ1(ÜQ“h‡½ -c°du‰~Ÿ} («Ãw¶àºª´EvÅy ÷Pà¸ì]Oª<¶¿öòeeÒ¦­}¼ŒYOŽ‰3Ñ9W`Kh frÙÎ0Í!2ž=ûÚ¥”Qf–ìá -ë%ßDŠ¢ZFFcøW`½w¾pGt©¼;ž±hÎñÛ±(;øþª´gæ\J¢-Z^Oˆ ½×¦¯rZF> Æ÷±ÄNÖñòŽ7pÍ7q§±ï…B¹Â¾ìу»îäøK®v>–¼äM«Å ÊîfÀ†Û »ó¥T&!!hç±{ ϧ¶ÅþŠ1'ö|¿ê^ؾÛ^ð z6ä¢b±`Ò!uÁÏðoÔtÿ™Éòm²~9ÕßcRíG¦om‰‹dO0Ûì|ÑHdÒ2ù’@µâúižžãÁHµ$?†GÀ'RÀïs‰ñœÀDêÖ2&èÊs2MØ+Ć×EZdÌ…ç·½u¡ø˜më랢ÃÅ}>çᥳžú‰Ùñõ¤ehˆ6ëÓâ ·´ -Ðv­^²70¹g$ïçsÞc!e©ßAuÂåÎ_Ü>xP³BòƒÅ.÷“Ü Y -*îQwºë^€ÐàXöâÆG3S·}­{ ãÀ)K -;í,WØî²K@f°ÏºÈè¦:Ë…ÊìKÔ°ÅOæ?£äL{#R$W{¡v‰Ë*÷—ïÄTT—b'W±6*ÒåÜ~@Ïß^ŒD4ÁxY„ö#íÀߊn²Þ³2c>h;QF½ï¾‡5?жkçsð€ú-ûq…3X|²\Uæ…¢:2U]6çTéèÍ®rr«vʅš;íÄòX+oeþZßPÍ줭wÔ ŽÑÌQ2„™7ÞÞ+âÞŸ3óDâºÔ¿™§f}RÂV@{ ¨g_ÏWÍÏÚh¼à±{d—FŠùIf¦Eä|UÒºft'²hÕû­X/*V ¸ŸR_ÒxˆÚ6Ž&<íÐ9ô[¢JJ~Ò£ô/乶ª5õX˜X¥'cƒ N%6W5ŒU‰ñü‰lpN¸#ßaÈ£|€{ºB¡Ì/u\Ý傇ûJšJOý=â=É›#C 5Þh¿ÕcßãIãê€"xãõ-ZŒ´ó&Ü€ÃÎ’K¨£pù(G”¯Ayù&ðõû_æghê;ïh¶êo,¸™¨ ÍÏšNpØ$ ×*7{z0…s'(m\Â2OTÀ+”'^ ܵµ‘Óµ\y”V%HÕ-$p¡ù®£F­+Ч+É´Ú‚Ùa\fŽ=/ΧðåÚÎÛ‚¢ùLž™GäÇX·øÁ­j3‘éìÌBÆÙ(K‰nC÷'¶àÔ*ÃèO‘P$¸Œ7…ÛRªxØ¥·Åø…ÝÉžòØÜ›þˆ…ó•óË\M# Pq±Þ)vƒ ‡Ûà©T)çÓ€^¬Ö/gð·§e!¹Ž6Ŧ€“ÿ0)´˜WtâVT/Oú[d–¹K§Dzð|Äïßšþ`ßÎbuÐ-P€Š™‚BÉ\Añ™:û -c“M<Ê1jÊ ƒÂ2åÆ/îÖ^( ÚNWB°µUîí–hœ=Å‹¾€ÊTêu„Ûmá[Ô ŽõPÙ»[^üR]oÑŸV›9t -Ï0ÜŠ¢Y‚ºº*¿´r…$©_Ì ßþØJh0J¿ó¿%[Ø|±:Ͷ§œ& -¬`EÎ$â#ÈRUJ¼¢Â¬:¸àyÕ]óߨì1ÄVv}íòÁâF2æzº:ª(Á>Ç:pIbeéc΃j”>3Í뀖{Zƒp´j’P¥¯ úa\E%^<¯ORwì-¥Zã¢#âÚ‡‚räDíœ@g5ä:¹fÀý™æH.mœ—¼yËÌ·¤{-*°>„yºžUQ‹c*G +×¹üÇùë€ë× «@–úÞØ”VÅØéA Ù ¬ïJzsj毿 6hsïMm)ÜéþՋQíj_T4ßù‹­¬èÐE±ð«© <¼ÅZð¦2b -‹p3óx÷7¾$EþB¸´8Å.ŠÉz¶‡œá¢3/|Ô8G¯!@<=Rõfï'ÇqáÝ[©žŸ4° ÝþÌùÿ;±üENCÉ!ªÇßáÝ¿ý‹)wµ8½Ã7.õl>ËG*\5… ºöŠþךÀB‘ö=ÁE¾EuQ‚1rm°¤LÖ^á¨c>ïw¯˜zOøU{E‹+ÀísŽµ-‘@•H™ÃÙØ ¶|¢kí9 gv=µW`\àåÜû¡‘å^)S˜Á -]©VFʼn}¨»ä@´,H¹Ü:“# ¦z&ű­NÝI'.Â)CÒ²KÑ¢ÕV« Ç»¶rñZ;ü ´AIþä:Tjm%z©+<"ª6C*Z(r…ô6Oøk¼ -ådDҊȉŸÜœÜ\Z V"L=ʪ¢ µ½âÀô+W£ÂsK-÷\_êD8Ð.iö=»ªÀ¡?ëK1Ÿw?‘¡ß£éŒIÁª IŠ‡Ò;¹žÀ8HO®ä›ù·¶ÚéÎý÷¨‘G·^—Pv^ S3¢88ÛØÒÓ?9{+cŸ!ùßÿ%;íw{’j×–›AŠ¾#6àhŒ?[vQŸ# ªÕÚ ñŽ¸IÁľ-8‘·­ 0ï¾$fv’*géklñLZ¹»¯0n9Ž@ôY¦Xú á¸Ï-ú?$>ùˆÝ‰Ñ®?¢[£`ÅŸo_…EÖ+ØEÊW=j¯•#MóSÉ>Ä&2ÄvùHu‘A,¾r,„¼!ÎRvIàõ¼LG$ã¾bL1÷ÓçŒ5Úzó›o6ÉëhŸ){Ý-«„2Bá(f68¡k(=dvŒ’™0yÌQˆÉß à~é'¶_…²“%œ±³~– eÓå#1xG!i4‚Çù¾úùÊÜéíœÄ3¤±´¨ó9Ö›œ…r"Mƒ‘! |QðØ–¢ð~þ·]ëÌ‹ß„{‹7C,d[d‘GoÂGg<¶«µ×e%”¶7úÈsÓØ"¬L–ðÍTÕ-¶^ëN= -U=¤Ö} ¹uò]Žê-UôZjäKšúDiçy¢-ù•÷Í]‹dCöáˆú˜IÝŠÁRò”ýƒæ¾ä$jˆœïY¹‡äó•%8¶æ¸Pä„ñ~¿èØ(šçÁ%x˜µÁ+²À%ê>ž5ˆÇsä!‡C«||@•X#D—Å=¨6’ =jÈƳn+K½†õ²qñ3ëïö$ý½aÞ1lÑÒà?òƒeGE‰¨5eEïECêþú`ú½§– -ùÜà¡ø8ð¨ý“K MhªïÜß™Þ+ô§“züboÿjÉö áƒWÿç?ÊL¶+ÝçVŸRâƘ¸ò !éÉ `»Kl‹1 ʼnžS¨&ÔŒí™F)*[„êåƒçœü"Ù¸/]/R’h¤—$C€1±eï,$¹Þ–÷ôÔûc‰³S5ŠV\Úš»j>ÄÊ¤ê… ‹*šnmº4ýEX­? Û4b<(Ç«)§±CwFŽW–?ieªwwo<À-ï±à3¥Í8x^Ü© ‰KÀžÏ¥›¦¬5ßÖ‡–;µ@¤ÔSÍG¿ %äVò³¸õWacÌCej3ê©ùÅõçö.”pôÅŠçÈ­\‡öo¯¥FÝüÝÏÔ7|*Â_}ééÅEÿÞ­˜†ã£n©öšøµt¬:Ÿñ–~jë®Sw=R£¡ðl 5dZ‰9±MV¾Ð?/‚rþ=ËkË¿N4Hn{I$ßóã¨#ªYsŒÌÇF‰²f¹£5¿å|uâˆÅzÃWv»;ªWýN¾±r…˜–fßcBm²µV{LødE"²y–^_ÝUn˜ÚX|mÏÍ_<«¨f5Î]P¯`t®M*ˆ¿?Û:*ÈH&™êÕU¨7Ó$3òÊUâJñ:–ÀTN—õƒ‰Õ©$!Áøj¹ü¥³Ö„W ‡%$|‹ïÈüëwå>>ÑŽVE¨ûŠB"+įï¿ÑK¾ßà{æ‰çx Äô—ªÔ-µ£Ýpë¥>€W°a|p¿~'.ó—y ^í¢Þ4Í2é×û6IK‘pjöèvžÄã½ -nj뼜n^†Ä¾“!`•w¬ëZJÎë£å{Ñþî«šŽH¨½ˆê­Å¼;‚ º@yÛeÒ§¹1ê‹•áá³b…c~—+ÖØ3ÕÆk•ìwfó2 ©èˆ!HA "àÂ^uıi_óˆÆ­F)Û_}Ìv.͵P@ãtiêVi )“U§®é¾²F—Æ(|àK‰ýOõ­Ëý]ÈÈä©Úq”¨×S· - å ¿³x'sžµÒ*ȹn¥l'Æ‚'Ì;¾Q“÷¾ë“ǸÁÂý¿Ÿ+í¢®§¡±ÄĈQ?~YA7$ÃàÎG¬9GýÝð<{$Ö®Ÿv5¡õÏÓõúMðéä¥,"œ¹Kù—œ1= Õ*ö™/qì&NV³Ž"¹üvÃæ­wߙێýÇ»‡[ÚtWœì™ Ž½¢tr)çDÒGüV·Q~¹4¥-^B'zùIŸhÏj|–[!5{ä¬{”Ñ ¢ôßB;ÍVM] àɶWn­nD šÑé<‚`{jm±u¿–üMŽ™?|صïTœMü'ͺ;k¶|*T+ìø0`pù‹I¼=èšÑÅ%;{Ò_Žs{&¼9¬žðæ×æð…¤a™B±‚ŠÞ¡²ÂbrsµVc•žþvõýf=V™*«~Ï -"Ó]£&AŒêî{[bŸZè~x£+/”âÍiµÜ42ÄzlèV½ÑjÎÛù#ZÅðQŽ¸Û -WÕ³ü@{\:‡z%l`x¤¦«,…ŠÏY4¡ó}æ„ÊÔTg‡ø¾Þ[iE{^4…X¼‘EVúéœö‚+çŒÖŽWÎäŸûù¶¯h SaE®Ûß[iVÏœþÆWÚiL -Ñu&…‘Þ™Ád”¾JF°™Ö[…éH]nyîeF6#ðžSá%w*œ³G{l±ÎvþªD÷£bðH6GôÇ / -Þç<*t¦Àf“öÒ¿Š¤Æ‚IrJ–z7P¢v!ïN¿MæC9%å§hõê -žÉ`Ne#ÇW[{ž÷Ø@û¾èã2/†së¡H›PÖÇœÏM¥Á\…øqL§·5³h3uÕm›µž"v}dn¹qȉy\ºƒô¥¹{$­á ×Ú;uA°hÏMÝX’©àÔÔ•7Ç„v¼ -Àªt¾à9CÕ¼|êDCé!¤»q®¦#0WÎ é>eW1O ºõjy¬ïûÂŽãçÓ±¶Ë-è=>pü¸Û©fýÌwf.^Ħ ë9ç+\0u^ϘúYúžùÌÔ¤€Š6¥5œøbúÒÇÞë:s&ÈëMÌzn8k}þr'iÀÓìõþÊcjf|O=Õï‹Ø7h0®õÈpNòí²Þ:—k„¹…‡ÜñƒI±ò„èwsVØ^|¡Ü]PYTNìÏŽgKø¬2šÛG$‘GZ¡Ç/šøßÿ%“wÿ£¨Uój1?ó°h”QÀîš¼3Kšò¬*8}loãî¼Ã ÑÀé{,8îB¹ãJà Až<{Í݇·‘Öëq×Âá|û»íJ+A¶V,¤[¢IÙTv¦îÐf'\Gµ$—,¼‡nª© ¤×#¡¶¨Ll¬XöV¼ <¥)fñ!GŽöâüz‡CÔc–1ÒÉ^ü7.ÖGùéåÑ/ ¬x†Rè<Ò9ÉÍ·Á=$×ÑâçŠLKÿ†Yv-%ó“P2žï½Wf…XL–—æg$Œ¡òè`3Uð±b”0¸ WñíŽ ú#£<ãö}¿bÝŠ <¹J•×Àoèý㨟+C½t žwN‚XÍ <”¯‘Ìî„äŽOgí|Ê×ÎŒ¿ðÖŒŸZiññ[P&‹á¿±yÿy¯úÙø}MÒÿüðæ-ŸŽmþ¸ë'&h;‰KÞ2–. -Ù€ëª÷ãÀ'hMS?Ÿ3¨AhøøE{ƒ†:Ï0Ç3×e©É¡Þo>¼ªÄÝïVdçÇåê¥Oó+ÏöµtÀ2MKsT“¾ŸàxÝ(wîŽ8ìU©D3µCä2wvŒòÔ{¤Ém&ªa{†(“÷§„añÿ÷Ø8¶wÝ„¸èŸ%OÊÓr¤åËK0”€~´’J sBlí±•7÷Ðc¿Aí’±ýFõŠÜ­pÛÞq'ÒZžÜ3Œ ¦wæCu”{ª­Ä‹Ï8ë´„wË4ë‰UI>{Ú¥x`3—áÓ|Ï+ÛÏU}³æç9ñeB±x­‚•§IÝ@¸¯ÏV^J½¢]ˆÁp`ä¸r—þßðÓ/QÉ}þnúè/YÔ-f6Húµ-)ä5žÎ¤5á£s§õWóùQ„ÅIÐG¾³ÇEË™II`d^€687¢Å® -YT¤Âh¾µµ(ÍŸ±i&pëVì5.´¾6¿mÀQ‚k~¶s ŽTüOG‡à -Ó¨ãÐ< GÖ\e´ÄPc˜4œé4oªé½Å=ÉÌ‹€Ïƒ`–Æ›Âdz \ÑüïÜ‘(up݃Ù`5‡Îةδ¼ -“s’%&š‘~F¦TX M©>hùà‡,jÝ®§6 *UŽŠ”híœÈO±ºÎúœsÁª>;Õï4Vmn=œ³œ;èƒ>¥¶MèâÕXÍbŸ‹õZªÇ@éÔKèI§—ÉTg~ß·%F Ù‚¢<©¿\¯{‰ÏhÚár«=^½, "dÐUtÀ ˆÓ->¥¸#ªøK¨w`CÖžs$Ÿ³Úɳúñ^f^Ó9éòù‚0ŸGS?–IJÎoÛÂpüQâ&ÃX¤Ý%µ­¹Ýâ*öõý¢zó[ceì#/¼YTpì¡aÖ.…äÄ[QŸ‰Ïûέó‹•4ߢ—r—ì<#sÆ£öU¥P¢kj«õGR°æÏ UöÆ -EÓ¢vD†6âÓHK˜³¥¨ oŹø]SÄa¸ËsMÿŽ¢~DçïuWZ(¹¤Íô`ý¹NÇâïE÷©Ô¤“?€Ó8jÄÉ©ÓÕ -Õxº¢š¯£»¿0øh1ÔQ|îAíŸZP`”…ÿT\ÐŽ­!¨3‘+dqw” -Žöˆ“ îº ZzQÕÀТŒ‘“ùéN~1;®òÑ÷o·»´nò1!Òü×Ö¿¾ß"ìdµ.*¥°ÖgÂjZ>k:ö`ûBä#Èè‘®HrCO„ùõ]ÎZOè{Äâ(°¥yàÆ»e‚côÄ‘/ßÀ*5]4w¸àL6æ¼…Éõ€ÉÎIÐjrB<°` ‚¯uœa=dÈ?R}×"ŽHÒd@J³áwB<7ÅOð©VØW˜ÄÕ•«^´“bœÖY ©ìqÇI6é‰8N.t¬;!œžtDžzò9·©âó§àR$ô5” Ùù}C •Þn!ª -,E‡¯†Ì‰©^" .dÑü.yÊVZó= ‰§™ódÃ[ Õ³>Êýþàj_ßΈì:ŠX‰wÕQTëGZÒëpûå/Ù_4_¾ÿË{à}3O·PQÏÃhfsWËivßQßPW}Þ/6_o*–”}ô¦SL¼aÈQ–êMëɪ4-€–É!Õ;m”O Á¤Ïï) œÎ¨#‚ëQXÄüÄrÐ’G«[EÃnÛb㙺ïÄ‹ÉË»ã‚ç]`ãûh+6ü[Ètˆ²Ü¨¼à5b\¦¨4­cœpÀ…öÀ1BèX½ðö…g”éÂ2dÈ"Ãäë¹Xlj‡àTÍY>—”öчß[a®ÎÀí-ˆ€o Ùƒ…b ÷ñúã‘—NæîwÀ°*8ó ŒëhµE‚_ê<ÆÒ¦ýÂm«Vó/‰æoj?Uƒ²­éT'sNÝŸ§ÓŸ¤•ØBJé jOL×Y;Ÿþ …£,Üî(·ÏXÎ_í]$~€:Ö߬§l®Ik§×eÊ ôäOÁ?>ÚY÷ZF»ŽmŠQÛHê&Æš)öƸ䕞w;Øi…n,J\ÿB ›Ï½Å‘K'--[gp'>[þ¯åøÞß«Ö³å;£Š)ö{ôt&Óˆ¼~÷—¯æªü~ÿ—´Íx¹jÃï•Ñ·W¼@åhô´Í\0n¢RÀçN0`Æ‘Ž÷§GÚ¡^µvÏR>ÈŒýÒËEl;±M‚>7›;”ã›~®–¥¨¢Å$ò©m¼À|ãÊuæ©Š<“‹Ò9ñ‰´®7¡Ï!zõóÁ_¦ãu³5¸ g*9Cƒ®hê˜lËh„û9Q¢±¹Î' c]yƒnˆhëpéVNÞ)šáýUô!šá”,\P–sze_áâòÁá7 s[³jÝUÑ®_Osÿ¿¨D{¢¾¸¯©ð!à/ÛVãqWC– B Y’žîÃÒ{¿¾ -öƒ¦¯]• -G•÷¢£ûœR5Oy‰ßì•d¦*×)Y½³£Í1¯V­ð ­î´=ŸÔùõ–¥ç{ImN,™À1¼™MÝû+°Ê ôeɳüÆ1ŠW<2¿Š¬åÀ¶uœ.Óc[ÆQ³—"ÍõDè5íqð0åNÛKä´ôb6a›ù³ÿP—‘£N…2_ôIÏÉ\§õ‘Ó—D}x%ôµö ú¾>XTo#¨bèšÆÊ•äÔém•q!nÖfWÃ(KB2Ç-ÎJò¼K~jÿ—·eîpc5ze‡ -ñ›èTïkµÌI-«ÎÁuÖ¼Ìt“`okíÓWßc™——6g·BáEA…˜Ò‡«$Å -*h‡ì¸ÝðS3‡'QÍ㇌CŒÊîrð”Ü#Ö¯yÂêÏgu»í¥-Þø {léOX¡5(èHmÍֳشwæ¤zÊT>j DZíO–Ú|y²²(o Ù-‡øĬö<Ê,‚¸ü®^ßµŽûƒª~zïÝþõ8ÍÖgHÄ;»ÜéÛ/õ©ðýYÚê–¾bçËÉÜi•¨á ”z·`hB¾E°ÔÍR&¿Ž`Ö~Ž¾ÿKdS5å!]Ìò{Ñcä,TfÇ3g” 9%j–kÏ -ó ‘ M%Í^^s¢Ç¹°â³h´áªêôže -$¸ÐûâTH-1ÀæI‘ÕœŸÃƒ°Ç¦¾Î­9¡(“C9õ\&Ýxé#ØÝ&œ™+½|F;ÔÌ™[‡À5âv1BQ¿jãPƒ(‘ýçÎÞ|E*Dêrb® û3~5b‚>“cÞipšT`OHüRƒ€¦·õPNH©˜P6­Ùh''; ’ZFci£ÔºâëF™Ù;?SÉØ«ö¼Ýç»_öÐîör›òþä¶C‡mQ0!Šbq+)Îëô( ˜>‹*î‚6F¸‚ÚŒ ©ÊO$ór@ö qÔ­ ÿçÔ&¨ü¼¶ÌKîÈÁwÉ™)ênÛ^.mÄÚ)`­ƒæß+óE6á!ÝÚË Ûv­MBãYAð±…J3³HêТ_FÁþNôg«ÎOþ’e²§`±I»Š -u)ºJ¸‰Z oJVà÷‡ ™Ik&¤Œ+%ˆÌ—8«“*@˜-Õ*ùëx'/DòüK™AdòÞªÖôÀÖäK|l®:@Ɖ^ï=ç8 ×]úâÏiûƒn;ÇëîŒ0¤Šyt†Vn«>û{¾n „$£I‰ƒFƒ÷p–»Õoî+x²"/rš‹zðf½ <Ä7óçŸã=ÉiÉÇŠºÄ¾sÚëþhØ–CÖÅÅñ)›>¦"ÞöÇŽÍ·½ð›†Ä#‡ËsSùÂy8Ù¨èdÌ)J‰Y?s½¾Í–¨öå2⾑ŒÕ€5dWÃ2 km‡HCš}oµø͈(^µøCJÒ?ó6F$¸á¼ñµ£ŒyVxIvqx·ÍŒ˜™ÍðRô̓‡ÒçGÓ\_Ì¿ÅìÜ"LH7ÊmHšÜ³TO´üÆoÿ²¥‡ô„Á±—¤ï/¿ÿùÖÈ·ßøÏŒaà3§*àöv ø¤õ<™*Ç„mÝ?Ÿ±yh* ¬³æ-šÁ××ßNÛ+ X·òÉÕŒªýÄŒ¼,äHjp.j«UÿšJ•4âéÑ°ÊGÜäUÀ8PP«ý_tuzÿ-@¶@‰Ïꌒ‚_Ò‡÷Læk¼èžWQ¶…¿ ©æÒÛ s¤¸Ï÷Œç©P×Yyû¼ö›†£¦ÒŸ–bÒÚ…Á3-W@ªû -¿PøgÑ9~þ‹”õ—âÄ÷¡-LµYb¢Aôäô4«‰`õ3`ЋQcg}3¸]…Åà ÚŠ„ó‰Æ*÷È Ò7œÖYaï#A¡+§±(–CÛÅnµÜäS Ö•d´ÞW=ý¼â¢a;/S9ç©•›˜ð)ӃƵÇ¢FTªWÃTˆj Êóã=£ÄïYµ¸'r_ðwòÈœ£fñÍVEì[T/J/Y^É£¹§Rº”"ˆy*MÄýá¿òŽóè¾´Š¡›Å¥!Îç÷—! Ôm¨Å@žk“T“‰iÈ•jyt9ã"‘òŠj¬Î\OZvíÑ)×4‹iAs†÷›/‚ÚX4)à^³Òå÷\Mç-æk?¿¾Ÿk×û%¸â`ü9w¶ÂdJ¾€áœ³ÉÜv±gjŽØ_Pï÷G1w…ö~¶úXyZÎ}BhLqS’›É‹…C"²Çrm&Ú¦èQ 8ð¾:{ |†ñŽÿ ³›Â­úŽXUKJ Çâ}×/¨f_ˆG‡—%Íõ¬Mëjø -Æ ÖáŸi nç’ê éÀ1ñÚâëåï²Çמ9_R%ƒ”ý -zg?¢Ô¶Qƒ‰ghÁ¹©in,^VÀƒþ9w ƒ×¢«DVŒïe¿–“Æ»Ä.|ûºUxQz9g–þÉý‘iÖˉM4Ä*D]AU@ÆVcFâ¬Ò°à¯ñúAá"ù¿«¦.Ü ä -uð*j³{š»adú®iAŠ»îeê–S·wìMHàÆ’)Ëú„´&â3çÍ^^NJõè]7þT1;¦“Z……ÆÝìôT®yê¦],»bU–!·õ·ëÉ«îA¼@¬õ¾nÅ·çÀ~®J/ƒ¶ÿ'}væ:)órd»K|(RúvŠ®u†\OèBó÷]«ö‘DVbç6Þ§z¢26H¸}-ÿ5´g¶DGù›\—@R¹ì×¢›ªÉÐøŠÎÁC†:IfpnLب¥¶ÏD²ç˜¿¢—Œœ:b+ãSñõÅ z–[ØÑ]>„…[=øúý^G" Ì8ˆ–åܲ1ÑâkŒyÅ3FP[L¹/(5gïfÿ#¨¨Ü–/å·õ=˺†d˜“ö8ËÖì7 õöê& §Eƒ±Iò¥öPZ•^ï‚VO{o‹=£´„;Ū¥<6?Cï\¦e2Sü<­ŽcÉ¿°ý'€(N4w:®c[I5:V/[ ð_l¯Lg…p  Õø¤?Ú#ϼ L‚B:f+ƒ+ÐX?Ð þ9ÿ–þÃ]:@Ìn)‘Ê]µB‚Ò™CIF|¯Áu®Stf€›>k}ÞZÒ„»ò ²o1ipõõòÏ?”æ®'jX_hîtöâé•8Ï/:±×aÈ‹üÀàÏäLc*ýOXt•Ÿñf‘«ÓØq[–¦køÄ%L¯5§1‡›çJºUYåb~˜#èY¶z ÕõèÓG§îŽ#å–Žˆß“Ô~†„{¾f Kó½RõTŸ[¯§R4áp¤Ýñ™‘nCÒ÷Bc–Yτǧ^ …ðÞ\¬ËŽ*/hÙØAB7½‚iˆ 9ûѶ*ñôžÿ8_ZÍ^ªÀéo©ƒ\‰4CöÿZ3§¤¤Ú5Èæ;,ø -¾Àߥãí=—çK¡±=(ûý¬â›:Ù ’Á¦‡ÿ jJJIK·~:èÓ¾ŒL à/hÔöÑ_ÞEòŸÞÆŸÙ -÷x©Gf|Ô ²+¹0Ù—¡"ª2´°¥áÙ©f͈Ôë 7²ö„ó…×,ƒç¿³í«OM)<βȤD[¾i…¶g5Ä œûè,µ2ÄôÓÏÝJêN‰lHþ<Ö9 ÂO(Á0‚ÐdI¢–ud©=±Ìy²Ÿ¯!¾Sú·­eHÔ´b~´'Ã?±A”ÑŸý…!Rçj,Æ: -Ñ(9 -˜Ý#œO?ö•02ÓE©UüÛ€ŸµI—¶­Ì/3„d‹+¥Ù;4’‚.uþB3`£VÙëßh—ÛÉxä]ø³N-½Ë(YJ•Uóá¶}[ùc¸Î[Ï’æÑ‹VYž Xi¥pÙ—Š!ðO‹B)"uŽ Åwù$0n_|§W`# Ð¾oK3Šló%eÏõ™#s#Uå{á.*¡é(þ¬ÝûÍ,Äíz¤öÞ¢ÙË jÍ£J÷ÏUPµàüBSIy¡¿a*T±taÆVj´«”Vq`ªh¹æ3û(½£úÄ®L„µ¿ŸøˆGL¿{•ö”‹z €"Uq‘¼ß‚ê;X³‰^iÓŠ»WÆØ£¼ñrÿªÚÙ/ŸbVçdŸøÆŒÜúºNS~¾Ê™'…Fº°â±R=UvBÖ áUŠ¤´ÝÏßT"o]í´±z,+sL#D4ö -T„ç7U!XHYð|å’QeQ‹äA_í\û©VYÇ༞×AV¯ºj‘‰ï»>É®â¼qǘ?÷¦ŽR¦=‹í¹Þxož·ºm¼=bBNŒÜžÒ×½ã'WBÁ-‘JÏz¡^[È=ü…©1´rÓrÿ7B†5"¥e”Vsm2‰%¿ª”9^°‡rdÜqÎlKUع]Aª¢c߶û¼\Dd‚0{j郆Âà¯tÂ< dÿܵyÏ)h›m+š" ð -ë?§ÍGõÕê0‚¿XôlLIRE¿~÷}Ÿ\>ùËÛ}þÏqL4Bâ=bÆ°jåtl&—ÕhlñÚ襣ð¾j(… ³A/0=¦*h:¥Â"·¯ˆ ûÛÞ¨ŸóhÀ¡ìŽ¸ažx‡ˆûÕ¸Çn8ÿ™Á “©ÀÈl–êé÷µØ;­]¸÷ëéžú·ÈÜ…²Þ•ÅF,ºA:}’!Ȉº*[ËXÐWÓ#[Ê)æ¬-)äÕí9Ù;óÞu+Bðü5j÷ºÒæ:cÜÚðm~Œ·Éû¢²@(J—S‘÷--šX%ÅÇnnDóô 2±gÍ´¸l§¥6øL=ñ@Q7n_̽U´ ?¨Es¶Uµ]MÃÝAâ fVÊ:'ghG›V\ÎÞÒ4þÕ:<‘Ÿì6Ñù×éXCôÛ%¢ÏPЭ Uƒ¨ÅR¬ÛX·Hö¢j+gŒÌ\Ífà‹,ð¾z&»3tºÞWÏG{ÔLöêÏ uÒ8½mr#•t|—£§ýå÷ŽdžT@ó^cçbŠñGÏrÅá¡œ~…jÁ#mæk¬[-×Qiñ~àôÜùÁ×`ö^¢™q6SG>Ní WŒ–ªh®ìº±Êw-„½–.;äi¦åƺâÝÅoÁC‰P.ÇaÐ~æÓN¡ÅQ|ÿúþ;­-7HJ‹K÷k+êÈóù—f$ F3WÒ‘‰ñDE‘\8ì7êí—ÏÏc2ê…Ìõ¦wù›²¶ÜÜ©·nÈd?PÕ:ˆ¹Ò8TÝ Úú<Ô‘U„¼¦úÌZšû…ÕÔÞϧcªÔ}½û55ˆ‘ZñgL4®Î'×¹ O¬+÷BwfÞÒa»ÕÜ wecÑóAðõÁ®gͨØ­ììÇ G F ék¾=û2Ö§ïòØ çˆÇùwE„7ðè5„¦;ªûÚ 3¦?ïú9)åoO$Mι·Î]aׂ‰UwÞŸíæîÉ2‘ú¢í úÆý®¯ ®Vû¤Ñ°nuF&å#ÇD‹Zs| žæ¡ªg·ã\ŸÜ'²0E~}0+^lÿƒ5Äîf†|™«#v™‘Œ­¬’xˆD4þØ §x“¾µÙ”ÈÎ_|ÌÕŸu¶_ÍÕ?8ðþÌw?i 4ïuF£9¼‡DÁÁCõ©!_T >z_Žª‡³~ÔEIØ5Ú³x‘[Ýá_·º#ÐV9jv̹÷laojÔf¯ø³—ZQ¹4¹sÐm„ønz¿¡ "ÌòŠ_èR /£€sUhrF¹ÑBØkˆf¡ŒŽjyíðHä­`«Ð=“Æ`ߌu«¹wAhQÁ¾küCÍ5T^Îëì¼3£È(Ã2³f{ÐË£ÛHÙÏ=#L­€E€ÜDœeMæ÷Éÿy‹¸ö°['x€Qe¿s˜œšÏ¶Â[³qOtç=Ä!¿E›¶“I½“@»Î8N†JüžßCŒ’\ëŠ6?â¯ÌEnž!»=Ua€,è|}¬èsVÅ…Áœš°Ö“^|–þÁ]YuS[ÝßARÇ‹·¸Ë\ÁÁ ºBÈxU>Ðåó¹å:h—ãÞÕ¹“Q‡Xs &ÇÒ;ÊÒgÄHK(u^qБæ–ñŠSÍ"aƒ±âŽqŒ ¹Ó#R¯Xy¹…ØWŽrÆ쑱¾A5®zÇ‚²ß£ø8ÔGùyb¾æ[R‰†ˆ f’=)[ŸÑD«9ÑËêºúûH8pXïáè›Ã*(Uò_ø½šÙÚK­†ìûôˆ"R¿:0 {ÌV¯°{G L*Ý:—E€tƒÁÜ7Þø+îÈ÷'›vDÐ…|Ù¨rë ¬ì2m6Ù…à)‡lùÂËÙR4œAÃI}“|ÔoþÁ¤¿œáý³õNRKuÉçP‰ON?¢Lýç?ÂQÍ¡zÐÁ¬ÒªìyÆ@" S[é¨òa&\PU‘çÕ­ö<gu‚[bìxdþ×?’äòãºc£üïuʨ¡ú -_FtDÔÈ ”Ö<®£#› äð”²Aâ̸¼æÒs”o1Ds'X Ø£¹¾*?#ëÊt*&×uXˆ»H¥ì¤¶MwáÈÎ'R!d—o2(E˜9–EÎõPF££”ŒÆÚt§€$ \zÝçg6/D*˜ãç -¥.ïN€ }Q&Wbå í³õÉk Ç˜a¹Ð\­b¼;Ó¯´óýçÍ9H|AÈZžXäÐWHçhxN¼ÒBmñÄ }|8rç­ -oHi=«0çgpÃâA›±3Ú„Tp‰—Û*jêVÓÔšyê¼0:ŠÐÅþU<}Hg‘‚¸Ö­´‰˜§JÂË0ÐÚ’€Š»{.CX °ö(ˆƒe&˜tk¯;yÿH;[9ôù™49ºAO †h¤>–ùCú8)Èrj%Éôq±#ª:"d,ÈC¼n-ø²G- JqQY$­L}¼ïïȯ›ÇëÔÛEÓ‘cÄWÌ•´¨¢,œÎÉ Ïø|¨n(’})øO±Ù®!{¸"„©Am¿›’Ñ¢ÿDµ,U{x¨JW„·N8”r=B™µ!Q’¶ÿ -â—@…ò`Ýiü‘#X“a;'ç¦á|„ËÕr£l €“”sëeCHgHŒÄÐ{°[ÊÜÒn+½9ˆcÂyź"­‹†vG)ü‰úž+KsëË¥Ü>w´=Ftή€HÛäõ®þ ¸§<¨-4еLÐj„ì2¢Û’/Eº¢ÁˆrQMu}0Í×âåpw%ÚÏê›G©¼ùóC{ÜâÍv>±üœ“ªjžŸò§OþÃï”x6œªQúZswÜ5b~%]8mɵ)fϬW-÷!€L·‰êy'íÛÛxQS1Ð܉}'ž’.úkÁ‘Ü®T†pgÎ$ÉÔ¹§ò+3Nß•!ª/¤Þ—ð‰âQ€‘øO>¬NÙÎ æ^¶r8rH1 -kv)a`íiÜQ­k,x…ùôP}h¨½„ÖŒÈÞ -!öE™j o—XaQXéòø„Ûs-/È;YFîÃǤW¿VRZò„ð†þ#hÍÖHÀ–€Õ¢A^¥îŸ¨¢^v¹íEVx¤?õ4¿OŽgu«ó•x¹rêJ”]­véR,1Ïú5Ø9?˜7ÿCŠKó]ãLX«[[òu€tH¨uœ`SpÈ[fÊZf'pNl’ÊðåäøSÀ.¼f+$sã¥õ_,Ú“óÞÏ y#·l/£wU3Y¦…Ù²ÜDT˶Ž9eGÔ[ Ÿä¸¥+.Ôd–Œ`²¯Õcóxp1RfóE¹P¢_&caá©0žå¨y‘½{Ì9îjRŠŽ›éD¤þ^[¯Ó…³¡ú¯H²®{†ôŸ×yȤ¸÷Eˆl5)fëÍ™‘Q%í®~èØ<2ꗑᜃÎ-~À;Q¯<ÃyqFÎuBcõ¨Õ«£³*•`‚(G©µâ9çgÝinÆñËñº¿‡ä•ò›Ó":ƒ™áû†¿ïøõþî‚ÖËÕkDEÂWº…9^c=^±6‰È÷ü´rb\Ž¯O$/òè„ô4;GäP9Z(ß™[rôS}éœ&éÉŸ®ô¿™è¯½OJs*šDÒ¾Ø8Ì©ùgå/B¿îgzõú6ûˆn4~øüþ/ ïZ€sÐ Orõ726„ùj=Lþ6Fv3Ò¢çQFÜ"ÐJ9ºÎO -ì'änÁâ°¦L»¡Ï0>_'[ápFþ ] OˆþU¿×},Šsõcul‰†?£‚%Ðå(^=fuá­LÀÀE¼è¶—” ˆÉ m«/k„GèO’%Ö øm7åCšGÍúMÙô»Ÿ|†sØ -æ}Îòœ(ÜâIŒ9$ýLÞäÈH9—ø»xݙÿµAâQ>Š˜®¯¡$‹l§†¤QDªQÕª&i÷ù8#ùÊSCâ݆žøü*óŽ^u¬ÊΈ–˜ZêLç×9ÉpUS@ë3?yWo`f¶mË{Dþè ->:C´ƒߺÏ5„ŒþŒÈ× œkw¾©˜¶•›» 7B|»JÂDÍb¬¦èÃ@€x¼QzI{²i«_é(‹ÍèlÞ´:Ž`-µœøjÎÖ ¦Œ})x\Zd•³6ï´…4­$_,ßµE7’9ê¤s¢>1Øfê°©í¤Ä—¿ˆ³ßôX‚ÿfûýä/¶•ïÛ-s[™»-¹Ã=žòÉ›·£Ž U]XÞˆÿ@N9K“œF)zÓ¦¨Þß’ œËé’(=gÊ¥@¸ºUàE¬ð«ëNÔÐA‡ÆžÆŽIÓyZ\åÛáj«Ípþ8W¶å¹—ÄR«ö¯¦r9ƒÄ“ endstream endobj 44 0 obj <>stream -ŽþyÖ”§¯*šuíÈ¿¨=Ý%› è=fzG¯fë>À•V1V Lú«Ò•– -øѵD1NRExÖuR좺m}=Q…ô³¾ØÿD\ôQ}¤ra“ï´E¼ç_‚ssè\¸ðs&Ì­AsÙ$ÂŽŽúuGÔ![–—BÀdç"à·mĶæ^‘ž q\ÛD³Zç¾ÅJT8[‘à`{â?Þ/rd‹qÕ¡éõ•Ît$äµG_Pà% vÃRŽ*s ‚—.’OsSn4uÐboÚ™žX ª¿Ì´Ça„½¥¼ w15ˆWF ·\¨fÖÀV±¯:ð¼ ¤ÀR  Iúè?DÖIïÌ:þÊ™G(ÁЪ@Æ4RC­Ø#ñeT–Ö÷l§q‹?XNo­õ~.uIgª=Ú¢DGÊG*×Äè³^¥~ÙШšjª¶ßÔ>ùËçÕÅûàõMž-e$ƒ>vU÷ª7äkÿ{Þ‹Þée3#QÞ_µ8 ›9KŽ¿)ÅaìÄ!Añ†f!xOªãР<á‰6û¼àçQaPXùõc( ƒŠb &r$Ôä©°Šð¾ƒ -‘o‘°ŒFèlb;Ô!уEBc5#îÈŽŒ"E´ ’#JöëN¸7vÓU, 9y¢¸Æ+בúb÷òèÜV%Åm®¶/+ë[Ý4wB2  Aã´UíŸ!$;v?£îÄ¿ðÿ BÉx›73ˆ*q‹-ndêöýãJæËöÏ ºh½ãê¯ÚjZå½êKÿ,ý}uúí:—•Ï–iAEa¯†%n§ß­Ôœ½AhE˜Þ×V„õ·<š&ܦ^BÒ¢å2PUh^xÌkÈÿ3¿¢ÖB -a(ߊ¿2ØŠYKìDÞkýjõ\Åúæˆj[D¢¶õÜd°Ïp÷GMda&‘ù«×ÜQˆ'¥·¡¼ýóLåÿþGvêpô@®Ûò]R.îoqçðvP¨»®U ¶gàÙ©3¡]žEz;©œªí‡™GÞô iü^ 4!Šd"úFD"ºÖç!„ÔBóþhÃO©|>C¼¶T;RÕVå€E;žRùõ/óh²2{§þâÈÏz¼ßÿ%­}‹j‰²Í*^žäc•Lm>&Lâ&)ÝÜŒ´í©è™M⌾ŒmâYn’°¤§Ä’ŽOsMŠKóÑ­—¶ÄÀ”0P#ÿ²–æ¢æKDᨠ-¦òƈ‚Åê;;½¼µþµúÎK¡êr®´V Lhèlã~¯3÷,(vT‡›¸0L¯™é×0v™gZ$íLºžNm×\¢s©GB¸ZKÔGîʯz9æˆò‚²U÷1ÃÆj~Tª »¿cíŸß|©ÿ¡¨:Œ2á|Š gî§ðtƒK&ªŽ…×­Ms?9wK¸¸S_±°¢jßÜ¿âß+¸ñ\éêÉ]›õ/tü.÷·£b©êTa$”L°[ìg¯´º ØR4#îb{R–1"(ª¸0y«ÒI ßMÌ'1]bŸtÅ·Ñw¡¡ö1U¿ÛÄ'?)²«®4®z(ú-GgK`XªfH8—`hd=×\}âÑˇkçõÝIï„ÙÚÊA,*ŽåÖ<^ò\X¿t -Ï2R푸ìgÿƒ‚Ô’¯óèœ÷UhÞVr£¼—¨úIìq§InŽh}ñ+ZG¬›r¾ÆÒ\ â)S¦~Æ쉀 ƒù}U”£Îwª4ÈpÆx@l)–1〗P¼bŽé9õCŠXÊCp{O-ÇÖ¸”¬H¬ŒUO{Ê^síJ¡”qùbПßbòm„ò‹òEõÔm<”$všy”lˆ‘qA“öÈâD5äàû’¨E×-×›hz$lz“²Å‚t<:­!Ã+!Õ_NÎÑùåYš^Õ9Ù–å8•ËúðP} ®8è†À¡Œ02³öåUdžvÏ€âAú¼/SRÅ?bÁ+— - ZÊD7TS_´/´£BçÙc¶âi܉Ñbþ¤\mãÜÒyo)Wr s¯=Uö:H§ «üæuƒƒJmg1¢é€9¯û´žjÎ\ I2ì†G 0ßÉ»ê0:®°î¨p–Ot:+%Ó·±¦)“F6±DD°ËÒ·{aIyÕ7“þìᦺŠ,b´'Gú»¢¨{¹ÿQfìI`°RðO¡q>1ñÔ'Jkqwå­Aäêuê ˆH!E„±úq–%€÷úTç]%¸¦™‰£_% -Ù²!uXã‹–n„Åü|¢ÆæáMŒK„ɼ3Õó ´¾¸c´òĆ$ÖÀ’.¾É‘ùP JŒì¡WôÒ¢qÕº£Ýψ¹ê -Ý3ZaÁˆÓ ÒZ¹F 1¯ˆenŠíßñoeˆ$(‹h>æbŠˆŒ^A9³Lýí_Ì8 ʧOIƒÿ*)òí_‰¨¯mÁm^uø·ð-Áºû -En¢ñØÿi-é¿žak˜Ï-!žžiöªÊ'³Wá k„ÉàrÀ‰gžˆ•¼\ 1]iý «)D%ÿZ·ºh»#1¦=rþÑcßãýP¿8¾Ê–Á^p¾¦¤FPöX{xc§ iÕM0ŸL/„þžÍꎰS3½"TF EpHÜÈFt°Î¡É?mîGE€=j‚••AžÄ+¦W¨×GÜ{¦dfA"däÊ3Œ\ í¤!†\ê±Oì=hK4L#øê–tPu•СçeP¼~{¡Œ5ꊢqÉcãÜ·ìxÕ‡ØCZªËQ^¤ç.SuÜŠãd_ -˜ëVø¶fÒ¾^2S6åxqzà5ò‹ç‘œZ7‘ ŸTEp&eãG -F1ö ì’ŒÆ[,ì丬‚¾"Ðüô¬â˜®0ÑS2>“±U° ×ã~ð|}0Ï—"m#5Ú¤Ie98 R×OÀ=þÂdÅ3ƈ>ˆƒI”ç¬õé¿Jßù¯Uµïÿòq­vd5Ÿ¤­Lˆš’„¾§~ú„•5Ó:ëw•TÐÖÆÌ"?yG;Æ ÌM•ÀRŸx…œýX;ÆúßP8‚g®*Zxð¡Þ&ªv>";/†ª–æ£õDÆöª$ñÊ÷ˆÔ¨ò…­ÆŠbëé•ê;‡”^ ÷3ôXPßû‰SÄ‚¸çãž|‹vÑHÅñXMMB!—I¾Wr¾~5ýÌVU4ÈT-w»‚‹¦§ï²†(¬(Ô߇Úb -,ʯšó\'X Hž\Çx”Ìú>ª@F00:aTaƒøé!`à<õÁ©¾âG·2è*ÞðòH€q:ý‚ÓÕœÙsíŠÆ¶BJ+ð -?îŒòïÇ)tüH³dnçU›ô-Ú;*©FËmB`«W´à8 !ìj{ü;°p»>úËZ(ß䛾E…:&œ+ZRØÝ@¢Çñ‰Á]Þq þÈŽi4~–Cñl1ïqûÉ ÚšçÒDâæ‡wX”Ç -`"C¤wë®Ò÷§Æýë-|³kfœúÂIf.¥™b‹Z©xìë›ý-•oª€|)3TŒ²f¨àҟϲõr% ë~ž8ÚˆåJò@£X:zmfpΰ¹C wOóè–´Ù ÿ"x|ô—õ•¿™øõ•jýüôؾV…™—¦ˆeÔ+!Bð”íwíò@1J}½ôQÏ,„‘ºÞM‡ÆC>!/­#@:W«o|óR)á/‰jÎ!NÉ=âÕéæÜÀ,rDqx;?תÎ8ô8WΘæà)zÛ‘]òPj‹Ôy'7ªÃòÕwÀÌh"MCb2_ó £[Ä♋Ú耮m¡C¦Ô•;YD\šç‰=Ù±'Ï­Z9Ê!µæF¼û¾ºosrÀóÑ—Y½gÒï„cžU=N^oƒ9nvŸx[5$f{××_êIR³Ó# º®ƒ§EÍèJí'i‘̨=4‚RÌÎ?ß› -ãáW´Å¡ŽWý@Œ Ï§äD˜èÓ¯|Äú—¢f£Ã#PéhïGi*àåP›,¶yˆ„›f«‚TŽŒ ·sæ;ÓáfæKzx[Uσ} ¨oãNc~>„“ˆç]ͬ"µ¶í'w -8ÃåÒq•ºˆ¸X-ú|y"0û6ù@™óƒY¬©îݹ˜¡~ѳ§düìªx‡|}îëƒÏô¿Â=Åû¸S ·?ë}PÜ팰îØoFø!L0B¿©á'‡Ìîéë•Ð¿sènÙÓÙ}ÈdbmàùÈ=BXG5ÿ‘\nبžòΘ+uN »€zNÕf œÅ{$±4ö±¸Á9ºa$ÃÖŽc›Füƹ…jIQw»¶PçRVyBŒJRÅñ©¨! J9alæC=šGD6C-Yçmý•AÁYáV:cû»ž*ìme¬»/å <èðsâKU˼·m_äà™J“·4¬Ô·™¬åœûH•‘ò¿*&Äo¶îùHçZtr¢©#Cpz#Šù·SâÿéÆÑ}P.[³ Çìž¿?Å"ÝÌvhDÁÛSƒÂB‹†Ä|qÿý„štÍ¡b!˜AÙ’æQï«Õ"eƒ#¬P®¨××ÿ›Ëœ‰ö.Aâäh”í0[ªüñtc´õs6 ûÍ‚t+ºÂk†—W‘º¼¨ÂÚx¯ðæ÷{Ð:[mlgF…¡|–ÐTͨ^ÒbgÁ¼øÄ"c¦× «æ*—º*±Z|Á{g¦>äÉÛ=M;0]º[ÑOFØ)xÎ`­švÚmjx}k ¬È™X™ð1–â}ç;C®{‹_nü™Âc“þ¸®5-!˜„ï {€â(sŒ´“ø¡Ñƒ#8¢™r"»Ùf¯j8 6ô´[ö(}}ð‚ ÿ~m×çdBq ¤Z®óHó¢ÌyÕÝ€êyë_€iºÛ·¥Ï7¬»•§Œ¢t-”™U¨; ‹F!°È 8n 1¯CsæLa ñû‡ú1ri¤ƒNU7…œœïö…¤žGu¬€ bÄ‘VNOBìÜ9“%‘cÞ¿2$:¼o©GfàhÀÏ{?kÈ<¢çÇ}²]Rˆ”aìÕ¦(ÁœíÕ~[0ÒOœÌ_¤nQ@dfü˜ùeÌÍ#þÝÕÐ-sƒˆ'@=”xoí;kGye®ôù¹SƒŽ?¤7ö§7¡üpWÇ‘RN€D™–.!fÀ lIÏÐ6º §åN!¢ð­™}­z>§ó ±wÎü´˜q@ð.žÂªD¶uËÇa•†7®èÒ=#ÕñÍ­¬:¢;:þ"oùOŽoEmÂú͉@‹ùÈ=DC‘TPpÎ3ç|¢\=R½”tî©riÞˆbOÝêA@C¿Œ}ùY%µNn¢K+¸ak(Qõ“9ƒÏ—÷hÇkãÏYÑñ»´qÎmõ†{j†¨yI| Ú"tNx¯Botn$Ðàµsiý¡NÅ«’(fzyä‘ò‰Os{¢šÌô;þ¼5DcáqiGµG™Š¸ò~þ¸N“%Í9äV:Þ§N/nÕ¦©î8ËN2„Þè¡„S.%þ’Õ>Ä1¹•Bã0 ÄúW<åÙ)?Üuùzd;ª:m&÷  ·‘!š´cZè=в¹š½ä—yg"€) œÇ*¢ú0 -í)ïøŒ;ÂVz&)Œ“Cr£OóÁf±èËó„š³?%™Ê™”™dmŒäâ™=x¨üù·ç^ -ÿ&J¿ÇÆ£ÿûaeŠŸÇØÄx_ÎèðaCkÂt4£„á³Þqíè¾5á(}Ñýí–oMOq~íyPgÈÜÒÄ÷VËW€)È’QyÉFúŒ Û‚Á]{¤[žê§Ï¡, 3ª¯@i$®t0eRó2d8}SIe[¾´b¦xq~W6v°WX˜½†ÌŒvÑ‘Ê­F¼w¾—1Ï“+þ 7 k™Mé‹-töüfe"½¸{ÜynîOW|wÎu•ùj'§]’[ )w'¡¦ÍB°Ž;OÔÑ-ž®‚)UÂÛë÷SøàSÕ‚;tÂÙ5÷¤Qg%lT…qýE/_8I)ðNy|‹až(ï)ˆéýï“e{>úKÎÅoðŸÙ+|ú$`iÓ¦õu”Ý _FÜ=6q1ª!~Â1ÂfË™7®ÀåñInýnQó!€w­Ý­§à°ËÊ®ìnØkÑ/⧴.à¯ûkímbÞ½œAKë Ib/õùl9ö%ñY¥pï›ð¯`(¶Åìܪp -. -øÞRE7˜Aäx¥"Up¶›ínÍÞ=J®˜¸….ê`ÿj&Gìå³ -NÅInò/:k×J#emK—àè -N¦Lðûì‹u«NbÓMB䬷åû°…˜G]®SÂ÷7fÒ½¶õ8”>ÚÕËÞ1’GZŒ{uë;—VŸ¸ˆ*´l+M/–ÜFÅ[œ®þÚÉ9~±¡ùyL¶CìF@ÿi{¬Í9E5ylµÞ@ ¨ÑOd¶ÚåiX4–§*D6òñîcÑ뵤çíÞíP¹ëv·Ã†ãus¬ýðæSÓ#ò²@xú&º#Áa¶ˆeR¹Ç`xÖLx¼F[¤îéúÌDzî¨Äýú™ÿY^>ðqŽ®É« Ž3ªP4SþcOe …õgÍÍ„Q•šS¾ÅapçgM«`ÈGÖ Ûx×Ðî„ôãÙtUK½ƒ,¢ŸuìÉrGv¶¿d…q”ÛÃjÏ«x.1ÈËlDk  {[·\ ¥^¨37çðk—’BU Á+…ƒ—Û¡¢_‡âv÷T×yBl‚"ÞÃqãû‰óöT0o<ÂãŽô$ ¯ÇÈö8ª70¿œ’…MüªÞ€€r\EÚ.®Ÿõ£¾ÿËÚœx›À€dÿ¹ã²kötˆö?ksU›ÚŠ D/h -‘w|?úÅKsfÓƒ3Bà]iô³–´]WHà?ôÒT¬‹4a1=ª"5¿UE¨Ùq}ü -*KÎI´í뎞æ$ mÒ!Œt:Ow”˜žà -ú¾§ƒÊØ<¥¦ô"!Î Ô„¬¹ë|U3í(ß%â¦ÀÝä,gf\ãGÄ9jn6Ôï Û£%Œ'ã©Ñy¥«[8æûL3¶A¨2s;Ð`È­˜ Ÿª8 ¨n5}˜Òo·Ø4t€}GÀ†yÁ…[Pìm0Óšºë4¡ÚÔdpÝÎÖÙƉ¶à<ß[áAàKWüÿíœx‹Uê·’l¡z•¦¤4¢^ÀÏoÿâ3˜ØÅUŪöïH²þ’¹[¯$¾=П_ÉŸ4ÿ9Èϳ͇¡øȥú˫ÅÁƒº -v{éåPüè±€yu¯€¡·S¹A‘æqÆlfo¦ˆ)MÞ[»7É;Tí|®E3þ¥¦ž½åŽó<ÙEqIƒ£“1óÍdÜ÷…¢{‹Z_n5â_5÷''aò/ìùj²TŽË(Åz›m®ÓüÔŠ|Ò0VŠ!™!¬ÊŸô^A_Ö:™?ΕÝ[>jÐÅTŒì]¯¼3ÖÂÛã:j„H2>Òà‹Éˆì¨×Ï!üØ·Fá–SøàFF¾1´êtð÷ÄíœIÎ|*šM3¶–í_õܤ2áƒvþ`Vüßim}Z:MoŒç~ëßöõ‡]“(^}°46(cf»P”´’éDê¶RfÜ"µG::eF*µT )ÿ HñÇxRgzFYsœ=W"ÛIàh$ž):Ì);éº+OíN#ï6.E¾!ß?íIIf'.¥b]ÓõÁÈâwe«@— -ºWݹÂÇbo.úE'=3ÖEv'ÑVÁ/ a£cb9ð“Æ ÏÊbùÅ&g+I}Ši”ö ¹o@9y²Üjd -oA\m Eƒ¯ŠÈù”Š¢ê6·8„C60ˆnœªËjºá²önÎ{19 ‰±”ÐzT°n¬±¶Vê£~¤¿ð¹º\¸£ó\¼®µ4F¿±Wcú›~Uöﻯs¹l¯§}£YŠÛBÞÞ”s•¯ë­d6yÀI -Î÷o¢Ž:‰è¹£zÔô¢Åžlz®Áâ¶ÅLªðžëò •Õžê3ñjçØVØ ¢¿ ´«J”yžHE*Z¢ê@0ÁãåVI¨ôI(ç-¼S>oÏó^Ô)¢/sƒÂ|jK9ê@LUH\‰ûŒ¯ Qƒ‹™ G ÕˆÀ¡¦žz{W.°1Ô{Es«VEÚbÏåNÍ¡NŬ»wëÑÖ…þ„TÀrõzaø òeçUDC)Ç®ÏX@ø.‡Æ™1sÄ - - òxˆVhŒTb.yFžtA$â¥L¡<ñÔƒÒÄ÷(SØà tW˜c¦ÕcöÌ XÏ"b@ºvÒ b‹ûB÷ð1›‰ìþ£”| ×8û£lßâÂpöø£ô@-hW>q!Ût9 R÷,Þ^”Iµë±´û×ÛCäÔÕjÖ­´ðí[gÔ® R±³Ìž}A- nDÚøì¤g%ŒûZî7Ú›W¤+ªq4sþ“ÀSøC¥;Ë}/&—Vד¾ûn~—ªÌÈ!ÁŸ™ñ'>ëçl© ¥Cí¼{ÒHßDÔôÿŸT ~ó—Hbü\üûþ/ ±¾„X:ZvCƒxñCNyd"MTîÐA³¶·5驉zû1”ÒDkœæÄv¯•ƒöÏÔ:Úå÷8V‚_µ­Ð?Àu¹¸RŽäYZAJ +YA†ºãYÔGÊöƒÚ)»HÎ -þ£YJ§í}QëB:¡ÁX´€ÆŸádLg›4GñÕ3õk{2á5 ôs¬©šÃ¢AÚ¿›“ 'yóâ É+YMÊüi&2mè·{ œä[á7žñ[dg~2æƹjŠÐ/•zkû×êJ·j“¸®N«ha[Zä¯ÚJ+„¢¯-±¹Ò^0O¼s=Òx ›ñ ¶–XR1œu\Á>yÙvG¨€ U‰qžQM½W -}ÍÏ$ÅÄŒ~)/š3c¨Ÿû å%6ygÈÌrœûk±©¸Êï%rÄmLG#:™áùò¶×¹Ú"àh¹an,é<;Á“Æó½`^Aj©‹ibñPEå9Ê äÄ)cnuLë+q¿ØH=”fÿ 5‚úÊÐA êÃÏ-ˆV÷­¹¶À‘]Ÿs(‚î9È»•nØ딾œK/Ó;2á+¹Yº8wÑÜLv÷†HðZ«g¡¨nlõ;úCq{"£i/ííµ,Gòxü”Í´Õ®fD¹¯À8 ¨á, ‰Ý¶‰c/ôÑzüˆ>xfG:"6cÑ«’É>'>KVýÏä¿ÖØqpžXyëCWÆbÂ1µH ÞyH½ˆœöU§Ô–SŸ>LIäJmçVÉàˆ™ÈÆä´Ž.×ScSFqÄsfߣ ="–F‘p9ÖÎËè ‚F-È‚îè“$»©‘£&Ñ|]qƒ\› ü„¶-TH¬…bØ~$ý)ZíµóÄZµ"›}MCôÈ0iî­–xcŒŠÅ¯ÙeZy–ß3µPCII5ÑzS·²ƒÈs¾ýî/æÆ/Püïÿ²PC©ERõª:à 3ù3Q1â®›;Rß|‹ç©Y´r‘kQ¥ë ZRRð£ÂgÔ ºeÀ¥oÞ/ÏýkUàçÞ“ÎbYwóP¼fÙ -{w0Ó¨µŸ¹Œ¤’ 2‡ž -i;Pr(ëNsib3(Z­é{ÅŠêùªõèuœJ 1š²Ë£=]= ˆ5$ùZÌm¯ó³FKÚTB»®Ú×€@nA’£Z:)õÏ0l §„îå+㸴jeŠ=\éê/ Š3ªömž'®O4ˆ¤4†©æ´`—Ÿø<<¹{^ñ@î« ¸öD–ò®dHíÖ*É[Ý.óÒ£0n¢N¦?ó¨(û©#õš\'¢ð)O_r:#µùʬú¤f®BÇ™õÁùR\%Õìÿ+)Þ¦Ô‰°·m+©ŠœÜ™zר*º|ZÏþí–X¥c@Cõƒ™¾•öiž  -œT"¬‹ù˽K˜aÒéÓQK×ÔÎýË^½ô—eÚôsþ?ÿQ2gÜ-ÕcªHf€¯r“OÌ’/K3å$Ê>Æþ÷uŒF6ý:WÁn:"Z2Ãàó-R(ÏDXÌ&:MpX_‡²Q쯩+â#ÅâBÚüž¿‚-Ò -žz¨ĦܔkUäÙæv2§SK!ñ -BnºÏr®ã23s­˜WNzRŽ›'x-zï®ÄèÛ;HǯM: -˜=g -ÜïŽæ^CÌÚñ˜eM¾²ËÒxÑ^ f«6Ó”¿IViËgªpDº~n‚PônIÔÒW `Gô[Øm² -¶ûûÔ[ø‰}S»õ±Þ4Žmho¦=š²Ç´ãèS`¬ïtA¾?jœ÷'å&ë£k²†þÃ7×T×gQêg×>"J6*ìÜÆ×-vBû þ-¶b¢ ýTëþÓÃ=c~Q*o{’©¨ï´ÈTÎ_¡Š×Í‹ëŽR¹A29JÖ[:yâ£U‡ûÌŒÀQ‘ý¤ˆ@£ópÌný×\âˆjREDO/AÚ+ÓºE*( -‰cî€{ÔWæWôœ{úe§R8bËÄJ‘ÊøXw²íx-d‰È ªÚÒ{ÈÖžË0U#Äš9 ôŠ–ízl. ˜¶Ñûo’{ôã^ÎeÚ¥TèNÇܼ €ØLȨ±õ›ßDfœ—Ö—«ÄUS0Źy9Š+¼™GšešTUΚg¨Ã$O¹R½l\¬NAfÌ6ºÏCô¼¾/¯FE‘Ç‘xñIùŒ6lÄrP³‡Õ$“b\õþsPÚe’»y,G Ñ}‡ˆ—ƒˆºË|Óºû~Ú¼Ó;‡UlgˆjêÆ%ÅÜ” Mï#|Cð½óŸˆö‹3#‘gÍnZ¨1ö®~2»wŠt¥tËÌÞ¹# J‡BúU €Ëƒs´ã«æ®Íç8K²æ.8K0ži¿‘ 5íIÅÙO€ŠË“‚Ÿ—O¶c§’!ÀŠ:Ž|}!´–H =¢_é…²I¯x‚ßÔVf qbêé®]Ãw©Ú†á:â휧´Æ" I,³W(†¬4«Hè™åµR/ÊB±MŸÐï ëöŠ{Öw—‘7øã2gzé9 ò©¶ÄêL‰ÎwÈ|swï„þù5ˆPWßÛ¿îôoƒ2dçía¿CB|;büÿÍëP2›Aí\§"/9ÙÆN¬'ï¼_IqiöÔ6ƒ‚Ç9ð¦ÑÔÌ×'½N+2“â ã@éYïFw·Q‚›Ÿ«gæ´`«©«þ³wbÑó ‡nøè‹+N4•rPÕï§ñ»Ú49GaF&'b¢ë~‘ŸåŸ‡žrÔjãЃ€ƒ#2~·Úö¿È¸ÿíjûö…Ôjc©‚4½EZuŽ„#KGŽY†lôn`è·u šÙÄÕD›ïWÆp¾"!ð~å-9jtœÈ(j8OHÅk^n4LñiG9=Þœ@BM®!ÂÞÎ)ÚñL€‹6²S÷æõÑêNÝÃCzyõ‹ÏTu„ðPA'ÏA"@”MC7´Ø.`úýÎ9¨€g$~Rá#RôMVkµú2"fï5'òÍ\ó2tClåƒVŸa× ·c‡ÿdê&z×ļDX¬Í*z‡§†Î@SMoG}ßIg£¸ò0íBEAuþF_å—èý/ñËYêÛíÏœ˜ƒ¤Va$ë@Ã>êË©ÜÒãv’Ò9­´*/KTŽóÓcšß~AÇ“´˜õþ¢ÔrÛIÎÄ3+˜;G€–¾!¸ÄîA±½ læU"R)+È^ˆZΛ-ÿÎy¯€³‘âuzŠDp–yÏœöZZJzjñõcb¯qU~øÌZ¤BúCà2ì –î%Ï[2 ¶-}/RE’ç߶{p‹Ï_v ÛÄó+„|ž$HúìWyØ?bO⋹ëÅË ùÄ¥>˜n÷Œèªƒ„ç+§Ïhƒáe$hÚÔ¤5çÃÈúº$ÌÄ1öÈ`5¢D…ø£çL1ˆè‰’ óÄQ4h‹¹0æuT¤z>ùiCr‰ŽÖ™!žß´Å²×ø½3 W_ :_õorÏr«Æ´ä"èsÅg„K`æ -+2d(,÷…&4d’ø}½>NÜíŸðdî’þ™ï‡pÌs#9àƒÂû¹ÉýÁåïl¡sî5Ä~D$]0‡`„2—R’7´ÀHò nå~æ:ôuO{™érÈ(ua¾æWͯ¿9–eÀü†ãÓéFû;C¸„ÁƒŒh¾rŠ~=ì=ÿÌëÃÛ‹*Ô=Ö Ò®Îd#î-<öú1ĬïÈŽd%}û¡V‰ØQ£½³5HAXiWœoIן°%©åµûý’€<dû÷Æ_p®…ÑÞH5Ü€H·’å×ÿ3èÚFèfÅE¼m©ªˆ{s ÕN!ǼxÓ«1¤EzéD°> ¹ã`Ê3Ó4鉑ŠÎÔÅ‚O9UþWn%Žˆ¨·4ØÂD\ÊNÿÔv@V²þoœ(Yú¶î„>R3Ý9Wñ[hxèÛ׎˜ÅCíèƒÏŒG#"J¤ÐÊó:TYLSÎnÎ2‡àw:—ŠÔTQÍ -ôhäNÂU•rÒ)³eP,*ônGÊ5üvnW]†JH ÄH£¬¦éQ'=3„Ö\Ù‡õ§ž îÏn” Ï3á½w*;g¡Á#0€a¼½†È*šš¶·©Ý™Óô5m¹¢ñO)Co¶ÕÑ/ט*†Õcé˜ÏvöúÆ”»P#Ôi¿ Ñ¸í¾±fòãÔ¨·µùñ–¤è!X5j‘a¯BÓçVŠ«’ÚÄu½ÙÏ¢ÇüÔv+‡¦Û³³™$dBÀ'lEùÉœ»ÜñÑøÚ»ˆ½¯[mìÇ¡Ž•æ _X·+*sµm?êÔôÞï£}ÄQEûhg¯ VuñP)m¥wGãl(AÌá™ÊúnéBRÃÂ>ê×ñ.åù—¢èùï=é‘æö¿ÿ)p‰_šdëÝÛͤÿÍ»ÿ³UiËÛè¬o÷ -“x¼§ê™†.…hõB¢ý1¯QQ® u×5]wFÀU—ØÛW è~6?×ap*é¹jän)_ YC ïP‘ƒGM:š«g’ï;ŸÙç |£V¡Âù‡Ö)¹”¶N¥îð4Éãå2-¶cûsnuª"E:{`ø¶š»gìÍç¢埻ŠQÞlÜ©@•8€uÒÿÏܽ$KŽcÙ‚AÎᎠ„$lW÷µk)R­g¨ÙÖ> =w3ó¸Z­ ”pM\%•Äç|ö‡Š€®]ʺÌIçÔ Œò™¶FR¾R¶œYbúî}„§¦ôF®Å~­3ÙÞƒ®°eùäRPÞ -m7E‚DJÉÔ#(“K© È3®•t@|\Ñ 1Úà -A/mÅvÇ9½Fz|F˜€-ÔòQÏÇj#{6®÷GmJ?x2ã}ÈCJ«ŸaVXxÇŸÁôÅ++~<Š¦ ²šê¡Ég˜o[DmFÄQÛ[\©KÊÇ¥ÄË+h«Nøïá3v®˜NBD=´ÝÅ„êô®ã¬àç (¶ñÚ«´l®˜.×^ÁÏ<Šè?5Ú¯ÇýyØ~Ñ¡·Ätu¯èçqÃZøcTôŸ°NQ¤¢ŸT¸)„gUi~yËŠ~þ!¥ËðmóG¥‰¶Õ̼f”97àì“ž}F¥ä£šŠŠyˆ³{d‹ªsË/f[õêÜ>QPÜÏc%g‡3îüh:Cÿd;q’`%¨;ÒY†Ð#½Açg‘üíƒÎmxßP§NÈÜ,‚Ø>£ØÈc‡t: jˆV#Å÷´ö½R‘´É_ýõŸë£OæÓo&ŽÕRú³_e"Ë_¯ã7DP¦‘Ì´- ɯ²‘å_æA‚ 2ÏÆÇï;¶×i¶…@­dú\_ #–»ÆP±hò€ë.ÿk^ê0kéÐ̯Þ_ëÇ=jäpϼÕaìNõgQªâ3÷Ô¶ùškT]\£rˆµä³?'2/ó𗹡=­Æ¥mÆð'ò{¾g.ª;52“§†hGÅ·sþn#°aðo·²‘‹­"ºöqçJj:ùÎÿW¤÷!–¹‘ù£”ù¢BÅÓö}8å6'ÈÌþ>xU+üŒ57úp«€Vækv¾’þs³¹(:áèÖVN=ÆêÚ%²Ah·¿ŸD?ùDþûGò#Ÿåp<¾–ûHJ¸±ˆSFý2Ä*¥íƱ»†„*ÓàTaÕIRSw9HçÀÞ¹µ]ú¡ë=wâ‹qñ æYµEý1ê8=_ñµÁÁ®Õ^¾hM§å|* -8syn:7Ãí3B˜SÒJÁHu·H#*SªöÚ -»½”ž-S˜ð}uÉ÷ºv´9Dõfñ ¸Tù"ipEõ3?é©b³©’¼Œðj«­lÞñüÞ›ö¼•òlô;jæÛš±ðGs÷À_üƒ‡m]ÿŠцà‡É•†Sò±ü¼,÷ªð\Êq§ô†¸æÏ!ÄG8¾ßé3O9EÕ=‡qƒa$„C$ଉtD±Ú~cï£ÇÊà Y Œ¸=Y-BtÝcÿç½/æó0–Ý.÷w·å$×"/HœñU»ÓL++ËèYHîPö";úÕàú¸$Z¥Ñÿ‰>ë… ãBpÞGú[Èt=ók®eÚλU\C˜„cˆQ~¡Ñ\E,\µ[^OÀÈ€¦xanÛ•^ŠB×\’¯(á±…QºÅwÛ•ªHùjŒŸ=óÁÉïÂmÃöô´$ˆü'¹¦U*å—_xóÔ_îs¹=!†GÕÈÛôøH3Í,¾iÞT³ë(£(9¥v -aœàdîzÝ\~·Ø1æâ^@Yïw¯óPа ÈîžýË`àØÌÇqëp ’×¾ÉÒœDn#:YÆá÷¿Âå x®9!{> -ýzçá€Wžt³ç[¦<à‘×kM½l7óµ„} ‰WõÊcÏo…¶œ ZOu%IóH',EâªOÌ`ç·„C#¸Z–3ž"14Šì÷œµ‚¨¿ìÄ‚ZÈgÁå¹$½€»-㕽d¡UÜ `GÝ€ìÖÅ-žífZ²Evkä4<ã |£ ßÙúgÂtˆ¡9”eA£¾{Ä1Ð ïðPxà¯ÀÈD,scóÖ—Ÿ÷-™q°ÈiZàÒÎ!V?5D­™¼Ã9‡ ÌùáYxãmyG=4¾´á}`çQ/â’jé» AuBëM¼úŒQ$èðAqþ/­¥s3GÜÌÒ¼(šÂͱG`P÷u—áoÁ1&T€²)3zˆ²„%Ç2Š—I8mµƒs¡Ž(Û‘Hñ\jÚ\¡Î1¶˜Ÿ8¬n  ¼VV+4ú‹OïŸQï"; ;*Æ•ºÇdG´Iüª¡t‘n(nÕ< ×cùr÷¿áâ>û$gŽÞ§nGMn«HÛ¾Yâà¼Áë¦9w³‹ƒ‡|qOÜ!Çî®SÜCo¬­Ô5bTóôç¹”ù³ÄÝ" -çÎa¸FE×c\ëБ``ðÁÙ­9GjL€‰Ëá–_wœ  ™I˜t+#ˆU ¥•ÜÊ¡‹Él÷TŸO·“[0!|cÄTÔü¿Fîú±éR¦T8ò ^!?×ýò5Š I•ñ¤jA9à†·€qêõ× [fòœµÃyL šÎ#YHž!-¢”Ñ5ñ|± eóÖ<çÎVgŽƒÓ°(’„Ôiј’‘oHË&CPð<ìãù°²Ê‚“x¢™ó¾È¡ø²iêf…åû„ãUΊ)E1ƒƒŸ¸ö¬€ìéÚœÙå·èís×`¬ód1Îï;X’‚ª!PÀt®B)I×WjÚÆßÐEô9”%çú˜ù«š­ÄGÒµö—M*@a·/0'>¤ññ™0äøW¤7Ågº“.E%Xë²\&Rð'–žkû˜Óƒºÿø9„ü³r¿ýïL±\ ïlË,r7?­›þÚyÈÚ*ümÀ5HÆáï„=·}ÌiÖÈûïuîá蔽»šék÷ÅçÚGI!†¹8åmVð²_tVJKÔöÏ@ê“ œáÉúÌ_ý -Lúä“Ï÷˜y´`Ķp%ê ±tqå 3¢Ã3Ö[ÔDHr¿ß§ŽãT=}â©dÃjAz‹= -Q½ Œî‡Å|:šß“¢)²SðK†D…©ø¡)%˜ûÅÒϱ›ÀàHs­¼ïò«æU™šƒëR¼v,Ÿu<~ô¢é=²ZD2ÚžrTh Liçùã'Û*³h‡¥ç×Åa÷‚ -iÝêckµCÍOÒµLEéÒ}Já!ïó˜ÈÍA{L3{T2m%|㘧£†'Ö~Èâ¨Ø*Î8Ÿ±r>ró©o Ôe(ID(âÑCžKï_ÌÂfè ¶H·Üz/G6oXûvœPMtèm°¯A1$ìcªd¬ê$Qã³JÊè¼ê7éWv¾{ÂYC´t)tqêJÊJt”taÖ Ç`'ylÒÒW¨­#BÌa¹‚Ì}h}P1•ŸNL4üæñ—Ù¸½Ûè ¿”\ˆŸõ˜>çX›sÛt»§Øs%Ç™ (Ó툇fù°ê:߯AªÕZgßjÅÈÖ«¸ó¤¤`øÔ½êʽ,Z¬6;ý&´Á^f.â!º‰_†D^ØWÇXÃúµ–=CÏüîÁŒCIüÔX0äŽ 8QØí-²Q¤ukß°s°gã ç ª¤¢¦Î&›Õ-¢!Ý´ï=ûM¹^§Ü¦ë){FM¡§÷ß™ºÀõ -Ïœªë~Ôç[½äÆœsÞpù0º0ÜqÕèï|J1¶kª˜O' ^ þ{Í'U‚-xÇ£¦vѾ¶g+èuøÐ]:i ܊̶‡ ˜gh*È5${ Uj=¡¦¡S7y:É­¾ç` «§ÆÁ€éÊ›‘ß~6s p»íéã0j[›@ÈxÒ ûd°,ÁT’aàöW0í'Ÿ¤Áðí.ñ#]¹ø7 >ïC¬2âgõôA3'ÞÏ°÷•@]q:žÛì“ã¥Ç…ÈYÕתújW8«…‡}aƒ]AÖÖGy©YC¶*wIR†Û`þƒ êwíŽÜì=Xν’ƒ3öÀ–Ÿw“ÿúÓ‚=«†1¿e½\ºïÙ0e|ד{‰¢Ù`™î©÷ù,r$±7nÚ1S½¡QWR˜ -ÚÚ-"߉œ",/70$ªrÑZ¾–õŒº 2Æ )ÍX0º÷Ä ‘ÜGdT@^k¢1—¼‚†*ådÈ]}õ+Uúì|ã6±}™Äj‚3Cü=[ßi&?ë5BÌ"]ÃÅÏ…â¡Z̺a‘ íÙ‘zÔà‰ÀÝú*±Å¿cĪóÀ5LF¼ã¸Wɪ[3ü‹PÆúMñ ‰NPÀB;„Ê\úûžÇ×£Ùe9+×îà*äÝ=|t&4máÇÝltŸòêÕˆ™¹] M¥t‘MäÌ”z»jó¥,¬imÈo Œï?YÑé•<äXï/{ã<„F$svAÝvˆë}?1ƒà‘»ÝyÉÊŒ;Üó!ôߘŒ5â?$ œ*CtÁ‰Oõv¯“t&d$õ6Ú{)_dŸÛ<úþ¯^h­ßcÍ8뎭ΡriQ]#àܪS!4rÝ•¾½åÍ™Tîä/¯š¹=kâp¥êˆUçDræœÕ0€fKiú\s.ÂŒGï8ÓXΞ#ÆõÎÝCsK/â«°ëv·TkæÚŽÈs®Oj«ªÒLò¢´*ˆ1ßϺ— Žø‚ß\ "6…2n©(úÐs儽­À(;²«'Å\EùDüOšbªxïº×ÊÜhõ[I±¤,NHf†€­*² -}‡¶–ôé|ªrN{d>c=Éz #"W4-¿¾Ÿ~ gêÔ¸/ßq¯ÛŠþ {„W˜ODê#µòý¥ÄýÕt"øï?ùxUõ´|t­bh¤›çæíåY'{•–?«ÊYQA½­­GrÔ:Zò¨Ÿ4üÀ¾ÒLj“ém¬¯Ù#Â#dLÍßíiÑ-Ç#-‰0rtöõâõ™KG8[ß³¥3£â5€‚¢øS7¬êEžŠÌ]aê+¯'ºÑÕÙßBoÿø•Î´?¶T гÉ]Ø @&T="ops‡Ð§2x‘îÎ>óbÙ÷~™|ÿÉû†ÿýrªW|Æä\ƒ. eÄWø–q“‹JZdi«Êœ‰6Çþnj½=fêk‹ÐL ׳»BâʶZ-éÚ¡SBºVÿ艞Àã2UáO=T§Þ>UÉwžéáUŸ*r?û¶²Fß±$n’çê+붬äëI¿°… nù¿›^òÙŽÌ”ö³½(õµ¹¹õ$Ët<’«^k:ˆqÎJU.%IUÕVéÌ mz¨–ü–éß!®øào|æÔÖìÔ@ð7y˪PW€TSù8:°Ëžr‡{1E\S-!}>Ž3OüÄÏlÍóý«õXçÉ.Ï9$å­;¹[Ð}ƒø–eµXe%Õ·T“Î4·`ÓÚ÷È/¬º†/“¢„Í`D‹­žä¾¯!7î%‘®ÑëJtÇ(P1FJ†=Tî âž'ß“Ü R3y>œ"/ŠKÑh1ä iJvåùAÖ™9-6Ð.…ÚNq ˆÔ//Ý7†5hôw]ŠW*GùUtÇÎüî<¿ŽÞ¸µhÈÕnyó±€ãÕ¯Ò–³’uAl‚î·J‹™+ß33З«%m¢_ʾ7šÐ9ßÏŠŸ•á¹-ƒû%™–wÚ3Ä-:nÆ4 [¼FY° uIBAgu¨~•eHäöw!$ó]îúL -eµrjŽ$Áuσ•åÄ«ŦРkœ. ¾È‚û½\]«D9¬¹¬gF©žbE»¤Ü˜£ä¹ˆuô/¸&œoÿ„a‡…úé6B‰=ãSäi«¤©9BwÛãw ù˜†¼Ír›§­Õ½ÔK6ä-= &aØÇ´ Y/v­/\/aÌÏ–¼Ã#bì*u]g‹5 ;¶Gø"Þξ~Õž¢›²4!Ñêþì/-®…ס‚t— å‘üôçù:ãþíé6àçk ‰eá™ÒaèºóW‘8ÙÂçÊ¥öºòéÚ[¸¸ª•¬Ø©´|-Ô&‚bv†g Ì9®þáz~@‡ @=u¥Çáì×ÄS}÷ÀÑÉ*xt•U¢Îm”cÒôÀ1rùQD¯î“…yòÌ…ðV9/MÇ÷R ÕL§ç 79ÂÆc+bõ§KU‘gÁr.U ¥8Ì`³m°¥é…$È'}=¾f™Hdz­+=zö„{1ÎØBÏLëŠ÷JAìƒGš6UkçDÇÕM×9´Ý¥$çR -¦$yHy'$£6Ô"¶'9r)RÊi¨oãí¦«ÊÍ2ãñIó÷¸}î·K5ƒ-Uƒ£š * ̲{lV3¥ÓYÀô˜Y´ikcª[Ž(Ž‘à3¯ -Ù±×5dûz­Î²›RŠ…JxnT9âF ›¡¢önÉÀó’ŠÖ¯rGîÓåzü€ÒêÝSüž'ÿžò|®tÈ»"%4b·”V%€ŠÚ}æk”Ô–ãT›'ª„+#ÒILPQõPÝ–Z®}9Ë75Õ0ÓØÖþÒÑ;8õö  -•EŸ[j¥î€s~í¹Ô·[í*¼ÌŠÞ~³ -幕]ehB>I¿5:8‚lxHjZœ„bNêH8~;Žß¨ëíw÷p¯ùÂn›Û‚ÍWLývaè|´{±ÂÎèø’Ñ7óú¤Ý ¶qቚ¼t:g”¶ñ<8 œ6ÂåF¢€OçñÖåôè5ˆ£;Tý³\~EMû£%eâ환(’ - Dï’,÷ïi5„Õˆ*ì]s÷äQ+8!c)‚tè*ê5|ÇjŒ|v{²©ùšœßLöÒù2ä¤0“ Ì£ !™sÇ᫵? ©E»žÌHÙÛœ¥ž³«s\wi3à^ß±œ*ßb;eHÖw —†è ÉöÈÁ¦%Ð"ÁÀîÿŠR!Dµ>Ìíú¥îxeêzxáÇÒ³ðùbÁ'IO`öõÁ”xãpg¦>w/¹y·c–\6/Å” Ô˜HÛÏülîîd\ƒÖÿ­¤ôŒÿ/\ 5u¿9!E3ñâÛ”s¯’{GJÝbÅw¤jS})Sûý=ÔÃøêÇq¯S?4'6wûÏSŸ4Ö’X8â>ðPêº÷å+M§‚zfa¶ìSEw[N²C‡X7ŠÙǻȴ‡EëiHëýXaSÄb6¿¹ÈNÞßèì(¼†Ì5C„Td´@ŠgH¿a*íŸEÜ° Ó ¥í¨‚¼ûƒú[è8VæIÛî ˜Ö–†œ‹?S©‹ýfnø5Ë_ÔYòš¿]b?þ«6Í„çÿ¬xÌ3GãÚ~0%bê¹×¶Ao2ÛØe“ìP'å ZJï¥qv9ÑúZÌRÊkÆ£û²d 73PiÆÚ7„*2e ±ê¸1›§X I1û’,Ü«À¿pœŸžûQÒZyÍK™wl~Ä ‘ ‚YVäûÞ A«j«!éEïìX&¼¢+~Þ­ÂãÄ0 -ÖúËžœQ¢µ„9'+:"vܸß÷Àö¯om»ßn†ÿó¼oaP?21Y_`D31À­¾ï%>4JZ;b„§/ØÖ\¥‚œK¸[˜EÀ/P…ÒÇçqê(¹žóc öõJ’ŸˆÄÞÌú Â>²á4™ç\M/—¹âÏ3ˆ1{·”#›ÆÖsžÐ~c¨á¸ épæ~/C¨-?ÒU[‹†’JP„qôÈ%>Aì$ƒÎŠj:óÃj  ~…‰Åwq®ªìpM5!vPÛHhNÄ|Í-ž¯}¯.m*rg\BŠW㊘5W—¶—|0EÚ{Ô¥B(œo©…‚ZƒÈyQç ~!¥ƒ4âxpDµòŠA|0€PÌßÄzÀ^é=_ßϾžë°-éÛ" ÌÐëRŒp} -k\€‹†ÈÂ÷˜]E$?JX¿Ë8}òI¢¬›2kÆŸå_Y¢:™¨üG:Ćè|HÏ -çÏ5MGÊéÚÎÌGÈ]÷õ”Qý û·Äq½Ýíí§F5ßz…éaÁÆ=ú£Zƒô°ýöj!6{Qözôâ57¶TiZ”¬a0ŽªêÕÍ8IU²‹Õ¢ÂwìqÖ>Þ0]æCì`.ö/CfÌ~¥ãTs—5wúÒˆ“®Š8 m|EŽ:éødO[WÂ1UPhõˆ#ÖÏç]—gÌŠæao Á¯¼BöÍ‹u”ÜœrOºÕA¶)dØø¡ª¿ÿöVÃ6dK)³±tlëR;EÓ'øæ¯ÿ7ÍsïŸô¦Cék1:¹)Ëžù53A§|=èI­b]Rf(c_L­—lƒ|çB¢ä®À{¨¯$9cW0¿ AáL‹("Ñ6.dŒ‡^nE“¿4A|òW8ñøÃ'Ÿ/…ƒ×=ê÷’w.bº!Gt3ÝÛ"§´hñ%.ÄYKÝõ>~>|ÂåØbÎ衵%¯)2Ë©BS‘= ¥B›™÷ÂUJÆíp]Q9eÝfýü1*—‰Z±©­ûU„´(£ äÌM?z64fêéhEXú4qßpàWœN_…ƒxn—q<ªÈãÀ!â¿¿™&ƒPàLË…ju*ÙwuœÐÙ”{ìosá<<½"9'‘BÕô\*îU⃕KmbàyÐw5x8µ2Zƒ÷:qt¿»Èá +±+¡µŸyiU1í…1zì^àB/MšwlÓ^f¨†ìAζ¢)-çTè{6ƘàK‘ø#Æ$È#m$ ýZù‚ž(|–æŸm'ç Ç…ê`Ž÷ÛùÿcIz‰Ìf¿Iç49¢¬ž£Ã°§O»µè­Cû0¸µ§õ¸MôßäGg¿KFë®.†ß´î~”Ú£^ˆ=ï9_A¼'ê- ¢ª7¨ÐpÐíÑz£Îç]qòø©ß§Œ1_Ùóê÷A*_w"5CNõ#nô ë a1ÉG -Z3ß,‰de’Nš_ÎÒ"M•§8˜l†@JÝcI]'›ä+¯Š¡@Öšzdõú]—ÂÂ?|o”Þ4 ©Y)t'¼€¿h;Z¾sK ¸hÝ_¥ó -öD¹â”/ÚÑ¥ÕðÀüÔ÷h Y sºåKrTÆ,ùà=ýì-a.@ÿΧ·~ E”?%Q‘u൘…—/š€¦ím¨¢ÔþöÏןÌm~ýó–É7=D¯Ÿ—ô›zàÃÔ~_¶hlŽasÂR $ ζ ï%ûLyqì ‹y*-¦Ý”‡Oíž½ C¼b›j(Ái„­”o¬ÂÌv. µº8ó1î ­4,K“*³ÀN(t]ác.1ÊÂßÿª×6×zn/ 5uZÖÛ¬UU©Þ¨Å©÷éŲB·éGU_‚'!ˆz1¥b?n±RÂÉ'Ýf¬§D¢´`pP®-éN*‚´/¬û=¹ã‚ž̊ÆBÈò’ú=q\¾¹’* -GÏý|õ!ÂÎq|O«'…¦ªÑ ÌH[[Î4óØ¿8ôÕ·’;2˜Ÿ‘z;[ý(ú~8:K%àúWö—kH,*e$±›WúN¼·Dƒm-úO+´ F¹èYïzî¶Wˆk÷»¢_¤Eì9n½Tž“4íæ)õ¸è8„5„qWûi±»=#Rh+¶Ó½9-XñõÝ[u“ RžÿÙâ¤`‚-Ë>?õ=:08;¹À:q¤ïÆNÁæäJQ£§o³µÉºc%J¢ôÕŒ”0A<´nSo›O8^Zkˆl¥Eõq«2˜‚8…9çk±ËÌCÀ¦W³/&¢¼pZšR½lØOÞÀ¡ùƒ¢\éåøٽŔ¼Í^Y~¼nÃJZ¼ìy3¤¢tÝ8z®fÜ\´’Æ¡{ýr>¨4ßÓ3·pØÜ ÏØP+®vö·[Ä«¤Ñ™+ ·²œÑKõüì995¢²Lcµ÷Ä&z×:…të€Vÿäw¶ýÖ ûƒÚ÷G•çéù¨Þ SÛÉfé˓ɤ<@‡•~Bèå”3eRH`wÔA>X‹i2Ìâ‘tÖ.EÀ/…NïÆý¢ˆ€ÕGy~ç åÏÖâÿ“n %bŠ[•°h_iÖ ùqUÇÖh4¬bÞhÉ™ký“­qÝQ<@Y=<-CÚi€zîHóZ^†­±Ò®@¹g\õ2ì+V s!ýï*é{Ä÷À¯1냭Íøú_à ê¼âÎùÔ=ÔæâS²’ -}yèdäõÛ5wÕ€°¾>x<¯ÒÝo*vu²>¿F-ß‚v"Ÿh™~G~â†]ËXwéí„×;”hÂ4O)Ra¸°¢QÔØfŽšó$øýN¶gWi¡ŠÄà^ý\Þá„é`¶-NÕ„­ŸE,ŠÏ¡ºuìR;`áŒKW•¨Ð¼á”(ÐŒ´g3„LÁ ]L…•ùŒè9`©|ËÁ1¯é¹Øbj°!Ä»˜¿6 3ø–VQ¡t?i¨믯Áv>râ]aBrcñ‘!v¦óHîÁ?£fÎåâá3@ØëÙò•Ã¥£ùÒÎOärZ=`^ ó¤ÕŸj%á·Õ‹z‚ -DÜ&æ‰ÏWi%!éÁU?àD•l®Ý3R#…Žˆgr×t (>roP÷8Ó]š1:ßÏ´è Kߣ^áBsòBahòœ9êæ–XJ#Bí –.Å2º)NßñÈ–ÍGk *fуöm´÷si:ÇpéaëŽ&ª„—óx5è9îÍ8‰0h¾‡X5ì‰)f¤­Á™]ƒÏ0M ^né[l‘T™Man™=ç–ýˆv!;îÀªý† Qq¡F¼Çè t),Yƒ<>Ö¦wX¦#§dñéiÉéý_cýð3mýˆNÝ.ȃR¨¦¹cýJi÷¥V{²VTï}JÝp”웾ñòç p*9E¸Ïàx›^%98âë­z>n{#dç)øú`—Xiý¥§ó¢~‹¯éˆ¯øú Òà¨ÏVÌ7ñ%W:V’úߌ־ÿ$Dþo—Èh&P&$‘¾ˆüà—þ®ŒxKاÜí\×U‹xFßÓã.’]oǶ׺Ú˜S@½÷em“ëêQ·ö~²SQ_õV"‡ŸO¤3² ]N^Á%S·(y€Ê @ž÷Úƒ¸Êü®Ú5çO¢; ÏM⳶֘öÖEx@|Çúñ F!{u‰“ -±Çò'Çoد@=˜èÕDŸõyå -w±0Ñ·ÈT•áçØJ׆£ j}¨é5„BÕ<ÿ¶ÞÏÏö²°l÷…«úÉñ·àgCš­{)¦I¾<î^E²ýUWúûOV–YX}²ø÷ŠªæÜmBÆ0~è²Æ8dÜ}%OÔ¥ÀÙ[m±w2HŠT•f¶åéÀ¼J9xÊÒLµ÷ŒI«R€`ÿYͯA<ü´ïf›8MUd²ÃQ‚ ¢èqèÁß•Ñ"ˆƒðòµÊr e‚ÃuÃö°#žW© b„ß3$"]Öƒÿùø^Êw†‰>ªŠØ¢(ùl©íû’¾Ótùüª]ö~tSò=RŠ˜×¡rÕ [¨ ´„ùÛS*Z3‘Ãn1q³‡rãaŽ·å@æ𣗖cLÔ×I7¡“Þ¯$]/÷,l>ÛÖ 0£»Oë\Êv ýÔi®d”—21ì#·œDˆù|·#nq#xLƒ¥1…‚N07®œei5UŒ7}|Õáƒ|ÚYd;n¾{:ŒßÎÀÿMöžþ</8ŒiÛGžÔ‚V‹L­Ú¥·‚8‰ã¶NnRç:É»nR˜9„ìÜ´£ªÑB¦'»Zm ëOú;÷¼ƒÚd@áF£~±«µ#y39墖sOIs×û*Úè(vôRkàxUTYiÜ•šŒ¥¹Å^ljV$ø…0œYGîx´mooäsGr'Fn+òQF™¿³ÿŸ*oŸXį A íôß–¤«"ÈÉlÙ1e£ŒØDŸ-“Ü)òW‚¡ û}Ó¿ß$°{ÁDm ÂÀ£€£¤o7Ü\ËñQíƒNÅýùïô"Ÿü!/þlëÕŠ7ÜFê£^3îÓ­ ®³è™Í%†ÕÕê²Q4øc¾Ëy+‹Úþ.g“€Rî~½ÂƒóÌQ$;ÑU×\x&5%ÐøÅåúhMº=Nà{E¯«Æ °wý,ò~d¿ú+<ØÁšâ^?¢®32A†Ì UËcžÐ¹eiñ -bÛÆ«º¥'}Æ’ø­[ò~<Ó‡­Xºƒû»©*S5U“d¯v¼úžyÖ¨TŸµŸ§¾ÖþœKÌò£MŠârãˆÍêhà¬çî1)ä¨dINâ½±lGÿ3è· -qÿÒÿ>˪ëú-0êGm”ÒS´J‘kÈ è½jXWÁ•xý´è챈]>e¼`/ÄN-ž¦ua”iÔoÁdÒåí–« -´õ>SÀš¨#ÌØI™!§î6~ÚˆÄqél†Ÿä†[O¾‡3õ™™XÎ(/ìΈ«næ¹!öà½"ù-Ú!iz„L_CZÀwÑçmDÀ’ù™˜ªón{"TŸZ°~eÓÇ»K<Ö*gÎÒ'âïgÅŠ£î+m4€ÒüÆœñÇ^Ö^…)¤+3—Sð9ä8y04‡n—ÿõ„+¼ï?‰èË?dw)¦Pô#úúCK(Jé=6Új…׺*Cáó4êÇW¿jVÍe_Ö¾ëéèæ,+Ð3—•¥ºIâu×ðÀê¯ïÙYp1A‰vŒÔ)& „5dcrAøë‘H~R~‹¦Kg‡ÃÄ´†@8©*§a {¯NTrã9=–j˜¢‡Ô£ç´²gäö>3ª%ÛîB[ð ¤ Ž|>ÃÙîÈíš ùdkFl(šœ)(ÞQtaˆ 1“÷„…¾êÝëÙÅÞGÁ÷:_CðNš°•bX -ÏUÒnØÁ´þsºÿsB¼ZEÞ·öιÅþŠîiÛsÓWþŠ7‹÷YÕ=ýˆS–~ÂJº¡¢ý}Ëýîƒ9g³˜2Ùåη¦p‹]E?þK®ÅˆRM%y°åG Ij¾_)œ5:ï–R¢¹#ËX‰|FOÿJˆN*…±D5IµàåÚÞ°¨kðj&<Ôúª”ÍYü VêUOÔ³PÖK"m»¿5zî`í2Dôd¡ñT½MUEŒm§KQw&„ÂY¿êŠžbLmFnyîÉÈ+”\γ¦Ç¼¹,DÔ’ä[Å 'êøø©„A)­¬§A(É:’ß!2«Ÿhÿ=Þ^KKLº—x»ï+})Êß‘†k|†ÀMRv2(êZ "#&¾¦£¼¤O‰…\\„÷ãÅ7REÜY¼3b¢\·ù¬ §`® -¼Ì¦™«f<8SPåÈ‹æ×K;j ²ðK“N&}6îD#ZgTä·VU†Ô¿(èžGÔéê*Õ_B'Éè3„´Eô ÀÆ'ïû7õ6bq›‹v"¯©»p}ˆæ¶löX½G«ožâ©Ä0d£¿¸hÄ~÷IÎòÆåˆÂJ+%Qb·¬ -êû9ÊS=ˆÒwÕ’M…S0!]Cé•`²ñ¬=K 3h|¦Ó›^쌪¶KPªs¤kJ´é_(GßYÍö>P1×I,ïE…D÷‚3ðˆVPÏ@y¼îF’Ÿê#±oé#ªÄ [ ’âD2Qèïk>ù" -5æÞ—K2fÒ†¯lGóíÍ8Q>µ´Z7u"…YImðRO,»Ç]žƒÈo¥ˆ~”2[vGroxtá¢íu;çÃ(–Õj°*‘åÞƒÙ·õžÈ9ÛŽÞãP=l¤Kîü|+hž[¤‚”›8 —}¿ÎÇcÎ[_ƒr“0¼Š¦ßØz>çŒ\Š¿|Ë{Äx™ ±M¶LWœ™{¤ØÏVòmÅI£o('ï“NnÅ^Cz`»é œ_uòšŠäuž×AÄ~žÛuŒnowI¬:œ,g·kº‘ý«u Ë'•¬!Êåä9ö2ò#'íøž!€òn®¤ìÇ•‡ÆsìKË ²ç?‚½Tã$êj§GÔa^Щô.g°ýõýzz›¨*Óó€RßLÛ©\Ž¥ÆÞøâ:raF2ÝVÕèfXvvÉ_å/¿ÿIJÿ~êþ¨©{¦dÕ4¤jõpZÆõˆTëDú|QÜÊÖà¯èvášX÷ÎpÔï«¿Ÿ¨ß€‘­UQŸ(8ÉËÐV0ʹîåuÁn&ÈØV c£Çì½ñŽošÁ[ÿÁ ¸Öé—†î' ®}E0©ê\) ¬W47©T ÄÜùÑM—Tdú_=Î{JDëE«wzÝÿø¢AîHöˆ/^áÐP0Uç‹Èý¥©ˆøÎ)Ìwã“U E]<Ê»axÊñ¶RöÛk½ÅJd6;—«¼pug]Ë2Âò'—}ÌÙ3è,« šgðÜÙ¹=,ƒp¥8I0`ˆ½ëN6º$JÓd«Ì­ÀÞ 6¹ß¹«„–?ÿWõ«ó£ŽðMpZòʯôf9/5îêSôÆÙc/_m¶yéµà%‡ªW vª.Eè“8â\ÿW f©µGLs/Áì\Šw´M¤'hÕÇÚ×Á†DNŸ®y ]\Hs·×ÛkZ5cìq1‰-plØBpÂ@öw¤J=dߺ•õsæŸfæ\y„m—5“+QÛ0Ú>¡ÿ'E¡BÓÀSeXÊ&øëìݸADJÂŽlA´‘¾íÄåXVUEh‹ø§è)Ù-Û²ÔîUæÙá$]BMdÇX -ûjl¨I˜š[aedEW) ¾Uƒ¤‡‰ºÅا|á¨zXUOç¿Òœ&ETb©q „ÏR¤ºVïý¤¢²UÈq€ÜS;C`Ó×ÀW¹Su¼^™Ó‰±“‡Ð:B­$ ¶,H|ÕÁ¬Þã-ºmá@#ålKmvÛó„M?S¼»˜Ýn¯ã§ç®O´ È6ÃZ,°e|Ir¯e9ŒGpèw@Æžì~y"Ä¥±Ñ¢Ñ¦b|œÏk ¹“rrû×ÿ]ñ1}vEâ=x®~Þ,ç·‹ y”‚|XfžÄ(h«d‹¹3DÅ^~{Vmˆör2.]²¾BqÌÌÜÛÈü›ƒàñfd”¯™Ù8Óø^¾wF`R™4·<¾FxÉNyõ%g H•ÏºÒ ö¾Ÿ©›§ºæ{h4!þ©ãró—€*q.þ¶±ª“©Ÿû×ëå5À¡"‘6ë¾,d Ü‹ˆh,?´]p,žpÑòI eNögªµÿ¦åøý'Y×ß>’yø”'qÑZ¿_ãù‹öps’žhD0÷û}jÄæ´‡Ö=þù=óDx•Üÿ4(é Θ†Æó§ïáãJ®‡_X[wóëÕ"¦‘a'®÷¼GÑS©³$¨®h‰?v€þT­œ-v2œRÝÌ a¨¨[õÙ”šïù/„Ö_•]¾ÿ$ïéÄ8!ÅjåçÁħ¢gžž<£}ÑÉñA!~•ÁÀ|M°^>5‚¸ÏüÚ-ºÀ©6“ÑûÒí+(è\T›k!ôyhžm ]móM;‘HdV.l‚~—Ê.Ô¾ãÜL*œÌ‡øØŠÒO]8º¶y7.u¨Jé÷\ér–à˜&C*wíU›•€Á’%¢÷#÷‚(¬mÛHöz=ï]mûú|U¹Òé{#^eucXy©QÎm§&D¿"© P÷¢éwñÃØ®µ ýûõb“ýgªAE¯Ô»îíϬí?~²-¢¤:Þ•0àWÚÁ'Ÿ£ùm-½Ú{Œúó/|,<9×r–T|)Ñ%ÒˆUÙW‘FÃeËaD;Â)àvuUaþ¨-çÆñ¢ ÀAÎjÆehF¢.1Æg˜vføx‡t³¨u¬EàUVÝsW€Yl£S!âÙR:AÕæÏ|ƈY–˜v(þÏô º®t”Á rü‘·M:«š¸–¼2„ä¸p?z/A¿_‘8¼è7 7[ -ƒñÖæorÖÀfžûZ ÈÔ£Ê*‘LÀD,…YŹEŸÆ×ÌG€â:çy_"ƒ¤Ø,>WLÀ‰Ì&9× ¢;òÙ£ìÆOÇÝ †‘TS¢ÇkƒaìÆwl.ññbñ¢³ÆmüƒX„!äÆ`)”½Ò ÅëíþÓ'Ξ¿ã¹?ú$$åï^ꌱ¢n(à4§áЮr²¥‚Î{Ëúcè˜`›wG(BÊgù£‘Ðí¸KÙ†ôû Ã^¬OÒ;-mÎ^}Í@/LF-ø'ÓÉÉã.Âï&­Ç‹á`ÜÈù¦b¥Ÿ-»À™bÊ/û‘º-‚i0äñ½ÙÞö¿£f'É´{TJ¼ý\סÛ/ãPôN¬æ}/[Òý`°7Gá«Ä¯>:Òк_š†˜s¾¨ô|ªÜ<¸Ê‡^Áf‚’\ -ö˜áU㬆ŒÈ!³?òèךûX›ä|Qz ,…¡ªÂ‘§­0ûŠ¦ùW4žoqv—†i6 9ngŽ›×ݱqÕé"{V‹¯€ÆÙÑö{QƒÈ’DP0¤j…vË%ëZôŠf¶ìœ×GJ•Ï°’U¾YOUùìŠÜ<èŸ"bÏ£vÀD4ÃÓj!&ßz˜l)ýš0ÿIȀ߽å¡|ªê·ƒÀ>b=Ý'ßâsÊA‚5uG´9̃­¦."rl/–ÀKdR”Å´¼û{j\¤vΫLú‚ÜM3ŽtV ²ô#ÌhPó“+Éì5êhaUJn;^†µ aÀ#ÆbîýÏXÔº×lîßûËWà ÃP.ýËqGø¡Háˆdj‰/ídbçéçU¦Õí0S…¡¾UCøÞ0Ô£¸º$iŽƒäøi€nQËs䦱~÷·'Ô±{±såg- méy(T8ï£èð)6¢fÏùs·êX€‘zóúÕ爖ÞbÌSé -$—:$¿×Ös†ëÀ TÌ·Ø=aIµçq';qÚG›Lµlà ÐJªÕ‘«ÆP”Š 7W†Lv ˆ¿ŠQ¤`.äWá´ýæÿý§Oþ V@=§GÙ>,­,÷‹Û“*‘'ô#Ïãâúr^{aç±:Z aM}.-¬¶ ‚ÂK¹$zû¾¤$î#Œ¥ž6¢;{!¥÷ -²¥yµý”O‹”8ÉêÚOÁªá6`D_-ž$5qÇŠ»>±,›}ø4MιEe­›MÀ°¶ö¨+‘•O+äFtåtÈ bÊÞÒ ˆËÑõ,kQ¶l<0êAÎ…ÞZÓjØTÎÕó®»!‹r…«öN'ÊÈq9CQøïòÑŠJ€d³PëŸÙÆúU+¶ŽÂˆŠtºŠW[ùà(D+’î×ümHýªhB¾Pt2g7¢*#Òt”(¶oê]3SåÞÏv}0ýT½‘¦ÓŒæB\bõ)óÍÍd`ŸçÖ€8YÐ_¸Ô6Œ_ùÖ¿ŽYþož@Qø»Ì3HŸzùœ3´ðî,‘NK@0yr—À ‰ÑÃ}§P·ÇAmÍ B¥âÿŪÌ3i¤«=ï…´èpËžÐQwQ¯ ®‡×#JuC°n>¡‘´Åz%©ÜR8T¬žs)å -coQõî)xDf3%(¬ózɱó©ѹ×,ŒŸ³Û¸Û«g¢Žñ¹~J¬¥~Õf})ÅÜoh«€®-÷”“ØU«x  ¬éóP3ÂînïãÐÞRIü9 aTITÏM=â›J—WRÔy“9Ñ Ëö%gòí¢H¶ºv¨?w b~“sÛÝYITi Üe”S9ØßÙï>ÉTýÖâG‚ûSšÞ^Í•æÝ̬ZYÚYP·í®à€Ac`U§ë•(Š9© vHÿPgûŠZî´ò0d“k0^@àîÑ&É–ÄŒ­òò3/=Ž/º=>{”–úê1¨¸Q.-®d²°¹ñ²Þ¼bĨd.ù#Ê_bÅ'2û`ÙÁùö°’ZbvæÍËÈ璉¯Ôu÷U•8a*6…Í·Ø TO’È#«’C‹gQçÃüUÅij‚”_Ϊ‡ÿÄÉ9–9èð$¥°,×ZD.rJ–›&zôQ÷{Øäp¶–ÏÑcÕ˜n[Q¾Âw¹ªöUŽ6Ê[˜±ÝA×<«íÕYQžµV G:U®3–‹E‡Üß8Û­Ì3pd2ß…Ýؽ|ü´¬›4Èõµ™BVâŽÊŸñæÝÒ—yJueºK~>:íKÒ÷™ëIÉt¤<ÔΤ)Y:)ùœc+n -”à;lÝU‘¹ÍTW³æëw ŽO> Ç%u’ð÷½‚ŠQo姻Xgv÷èýÝÅw:Y½ÃpÙn ­Úz¥ßaäk³q‰X-ÄQ ‡ 2e €`Hê[øOéc²ÕÔ¡›ò,é…9 ½ŒUzÞF †1*ŸwÂò3Ä5ìrϤ¤ãaowöe ï -æQC¡î†5Yt,Ïpδ5Ýj”×÷äZ#&÷aÀ=X&\¬¯¾’•#ª*‚㟿©3N±’ÆÖ­å¯b2ñLiŸDWØ£¾``§!®ˆ“5‘^ÎU–*Û"Íãêw|ÿ6ÿWeOÒÞ=6†‘ÌNEAýˆ²~£i»…~MÉ ¾li@sTŽƒ°~Äçí·ØäûOÂuä¡öÀbŬüW"Ε@Kfü¿ç$Ó+gdÀÔæŠÅNO#6È -3k„âæüE¤ cFû;$P½gM†·È¾—€¨å#!ʤ:bøFêèeµÀBÙ›÷°^á@Пçq‹C_å ÑÝ¡’¥¼R¬J‘-ðX/ñ½ðiIuH쟺Tçb¹Z=dJþȨÏiC®¨×î¥dC—N·)4I¥ò²˜ÁS s̆iœÇs¼øë#~—£ˆû*éÈÍNXw[ŠWáNPø¨!»/:¾mQCH|™f*Qn¡B£Rgý ØàZ%UúÚ- ‚W[CÎØ \$¤®5koꕃ­b¬Œø¾¸‡¤ã„’½Ò—¡îïl…jØC¨—ò ÀŪoaýÒîÈ*.¦Þ#*¿mOÝŽªâaàqæ é‘æ->¦¹GEgÔi4¦9QìÜÕIŠ_Nçú‰õœ+ÍðTËŠnʳªU˜¤ˆâc_>¼3ž÷ë5J!Z4#A éÚ–éãQ¹Žw[†NÕ VãY„y¤šÓ\¸ ¿GÌO¬"Ì«ö‘©k Š¤m5c‘[ZP®£¼žZ¹R&fÅ/½IŽí‹§—o ÚKŽ+{çуrAÑì«AdMµ”ŸÚ#¸TÃë»ß<â þ_¸ØûÓä‡Þ!ôÔa3Wƒ3¢&°àI‹[ÐXkÎ8g«ˆâ¨¦ÇS‹»Z¦>'x&9•å§s+äÙ ñ.r½ƒ5`V‘àu_cF)ø*üïùçU€Ñˆ¤¼ÑÀ½´CÝ 1/°œ+©ª7b<27‡ëˆ*ÃÊ|дø°Àâ/¶0cÃ9,I¨D ³ :bñÆpq‹¨Ô] æ9ªøJûÙÀˆ ß¹C8¹€®æ\)a*Þ…â -ÅH¿¿Vˆ†2 ûSÆ -qÆNs«0Å¿J«‰¤Ó~­"`•÷½K„ )ã“]Az­p{$yd—ìvG zɼŽ% Ú'q‡W’ËcnÂ)I…çÃpJ³ œû¿ëRl±^Ïôlµ5‘”{¨r&XbTW²œã+êƒE÷Ÿq|~?»d©sð\™OvÕ÷ªSìÚasÅ’"m–5/™}X²&Ö–Ë ¼x ï’%Ù^?µöå 6ŒUCÁÄ(Ÿ-•ÿlï4­6"¥)Yƒ4²éYÜ„RáQ=k)UD-‘n^éÍXùÏüÊ![(þRá$$¤ùž5&ò=m‘Q¹R-–òéšè¥–ú±ê -Ã.GIéÄó‹.ÏYÂ2~”Å5~V&1/áÀ™–T¨ûH«)½4>ŸÚ~ZŠ{ Z -ÇUʲ¢aÙ£¸äjçÒ¡,˜ùS‡‰Æ¨SKÛá‚î<¢5?MU·rr”©_•”I HË/íOR&¤ÁZ°Æ²–¥ârë=^E5Î¥¤:½ä—z?å¤;e×ñ|y«4@œQñì‰nÁÍÈçúó' ¶“ߤleùW—˜ñÑ'Ù:<"ïa¨üWé… -­ÏtWE’«|eû½Îˆ‚ÛE ¾áÀ(±Br[e|àì®S/' & é+ø®- úì£^uH +ñ f˜çñ§§ß@*´¡5Ž¥WÆš^.˜ø:`ãoxv/åoðqœ$Uï/ß{‡Tq!ùúûÐ/Á,RIÉzWH”üVÿW4µæU¤ÝKͤ…W˜ &Œ}Z×–Ø:âL°±BˆÕË -Cý:—ñ -‰Ò©Á™€-i…jJ¯‘‘ÚðvÂúg@2ÈN±i­S¤Ñï†ÁZ¼¯¥Ñu&3PmÉ—A’¶$³,ù2V'3Œ¯£["Ö~ -ÚrñÊ‹% $‚ý@›ŽÒ½’aF^X¢¢µÖ¿>˜€oí°!¡ê„‡›Î7]N|U;$¹5È¢3RNí0yw:ʽg¡ü®èûí'k¡hmàße¡jÏyh‡ª÷L©˜¬ký”~{8 ¤©Öœk¡(ƒ——ª7B+ĈlMÂCÒÑ£ú‡ðÐA<"m_8è+xº¦HÊ”ÄcÐÕ¢êv÷ôŒ<¡ÊɪlÙS÷58T¦ïFó_Þ:Öœ"R!R/úãM;Yê#«€å"VøfÚ]SäD;}{Ì•Øò^Ú±/¡Î ÖEuö«Ìt'ŸÓ”XܽÓdRž¾Kög§UËddÛß!T>È”ž‘ëaP¥2xÉ_.fN ß]k’6'Éaä~7”"ÄŒ çî%9¯¢/`'¼¾ÛèSôj_ÌŠ5{\cÄ// òË"KÉE©2ÝsKÓêÐ ßËé/ÈîÌÌ¿AŽ>‰òa¾ìÂ8ŠDgI).0é¡ -q¿´‘‘eï[˜˜ç-êü6 -Ï£8€#BßÉ0ž5¥b† U¹ÿœRþf”æyf¶ÆˆZÍ^·ƒ‘‚›Àš»¶ K÷bc†;Kr0…ãÛCà÷DŸ.®—x‰tzu)ÒTåïV®—=Š\pB`?ùžN±J§8H‚«ÃVPBé&¦já.¤?B¥'Ò.%‰…e«‚tì4Ÿú¿,Ißs‡ßŠI2È'Ž] -(WÛ_îPž‚­SC)x¾Ò„'ÞÁnù -ßbOÓ%S{K" E‰“UH™Ø¾bŒ „;yqhŸÁ›Ö”~ÄèloZ½×ât%ü+0žfÎF¤%‹>µlä%þEHDInµvœÓ=ôÜÞßb ~?—ñ¯ -¶ì7ç"ÑÇN ô–:ü‘Â|àÝÚ¼aý·ÀR¶ô;J<?ƒŠIÙ4^¢¼­öH >«žÞ!É»vp$JÅû˜7H@ox*o!cxÓ©Ô²Šˆ¡½X®Y“n±Ö˳q’Ù¾ƒÍºâÊ!D©`ÔžÙð0Ô>¶sq"i™=Ž³5Ÿú¿BÀbZGB8XQcÎg¾ g)¥ñ VÌŽü›!wʦœ¬žu3ÿöE½ê$[tNæ_†b¤Îè¯ÿD’8r73g£™Kxµ¨”:‰ô#ðW®ÊÄ£QÿÉ’)oŒ¡ÅÞŸZ„Ñ\6ÃgÀ™‚#ëIéìÞžµ¹mÞ=‡¯B“îˆûcý\¹Ð¥~½ -CF1÷ñ¬'Â;Eš¯À¼ˆÁv#¿ïNéÚŠ£}î1²”¢ÀÎv -fW½Ä3U8¼`ÊuiZGï¦ú(Š ¢J¯:I0ÂAbG¡çþâ ©Æ[Þºb7Å…£]ëˆxÁ|Š¯WêV²û©ƒ(¹¸S…k7£—ûå ÖB1'ÏŸ§]ÈÙy¬§ÇàHy— Sø?N\ýé62åîÀP÷?‰e¡„0+:möïQØD­% BgÞß:’Ý[-•óüó¥)“–ÔõîïöÁÀÖÓ£ŸrÀ?ÿ0_cÑu'¨bÓ?[=ší ²ÑºcÝZõ¡foø~ö­:Ï»ù€Ï¨­¦N‡bD¸Tª£®e¥–ΣúÑêÜ*ù+ ´WåàÛOVYúÄ£&n—fyíasñi`ÃpToóÖ;au'dÛÔà©æÎÄ䮚ÞO´Q}®0ðϸÏ~¸é’xZnh‹0z]ˆdðY˜t¨g,Y·¢ûƒÏõHÔGÑùã̽Sˆ–ýtü+2ªº³g¼3¡æbƒ\jgtÑ¥‘=Š¨x”¸•–^#@U °Ê+†€ŠnôCŽºJ±¶ZÛ¹-öå-`ƒçŸÈý8 -n!§‡Õ~‡;O…A˜=¿ú|€·)(E*rÕU{âÍÒþínR/sF£»¬¼C¸à“¤&(Ð 8 î¯!#ªà\n¯·hzXC*~5dO¡Œûh«e‹½Æ17‚ÞUÄ ðúð©6D³Ð¤Ÿ±n™ª<áèFò+³7^5u)ê°ÍZòªîÕ©8ßT¡Ïª?êvØ“\dî´*æ²b¹= -‰tEŒ¤±åU¿ê›‰þŸè{úT&VÚN›ràFªTo~{©O$2hM玽š”eW®,8H¼=©L^&m·nÍü;(þ„ís%$N-Dû†öÂþ6oD¾w*Ì‘Å »sb/WªÌyˆDCÈ1êùwû^ý¥À¹Ïòé5d \˜KÓÉ»ÅLC‰t ò’›Ã™4jê› -"IÈ“`îÅ°ü‰fvJ{sãf: µ`on”U¿iƒU5²MÚ\¬‡[=ÀÍñr!NÖØT,»ÙñÜëSP€ìO…ð»7õÊþu|dìíØK䨛Úǽìךpzt»Ê?Kïxfexßø&_ŸÊþýý“Ì'iŽ´«9ŒÜåég)í‹™OÀ^°ø„›ë™ÝÞ`XQ¼Öô@Lœã&Ë=ÕñëOh P¨û?̧¦ t…ÿòÔcÕ&p’9ã_"Ýhá_‘öÊK&sÌw<îŠR‡ÿ‘ZÌòZÝÍ3@¼kE»#Tù˜´šOªÔák:q2dÇ -š»üHL3"Œé9U_Ü܇0@¦ÜÙÒã -9î¸Ö¯šAɃ£]+ Qñ1® Iªjôd[S‘ÿáù·©Ì|}ÿžV¼@”Ðö.1‰ç˜©Jpj‹ZoçÅHã|{W‡d5”Œ7›Úßú}òñö”®¦chlµHÏ Ç·(ÝŸU\|¤äW†”ÕpYn„ÍO½]…ýª!g@_Nä%ý¥‹I¯B?b; >ó ˆ6ð ¶ ämžm} ¡‡»%Š(Ñ]ÿ-Pé«×o*Ü?·ª:„2H–@`-Y´7Êvè¢ûÛJ¼4ö !‹.{â\±ô*„ž«l<ã @ m;J ŽWÿBr}¿È€º~S?¢Eéùq¡ãÕWO8nNÐ-éç›Ó\ïFN|õ*ðù4é·l:>QÑ^¥½…{Oó«üGY^Á3TÞd8ᦸn&õ=S6õµÝPiH¨}Ib·ùg£Ôå^¤o”¦ Û\ GHmçù§Ol‚±Û?ú$êµ\Üæòóó÷Z9ÚçòãÍ®x¯&Ëî¢Jh繯A´uú•¤Ìªèà}Üø®Z8ŒÂâ!K;®"!䬅ÝkÄÃŒ/Ÿ»Ô[ÔOn}åá`gCÕé‘n¦<æDÌ”ç°!€;3ÀwD.J¨@€=ßÒá9»”’½,]É[3;XVW·tÒùÛ[pq;ÁÛÜG¨Ç†ÌݘL /ØV~/p¶ÄÞ~ö†½âä^‘¤jñŽÉrdõù,h´<žêçìArÓÎJ\Ûɳ:ŒÍ˜­lôæ¦ãåœï)íñ(§{û-ZÉÝÑR#¶;Ê -8C´NÐ`Жë7¡Æ^!œµÒéU¢"ë½då ´Å¨«q•?ó=7B.üB7X÷MÙ±¿™?|?ûþÇ-+ó3T=¶Hƒ^k6è l‡³zÙnô°W·dvë±ö¨xÏ Ph‹¡îPÔÒÃ| -Çh/ôŠyPχùi_ChuEÌûX»@ÖèW­Wì@ZMÅÿ8Å4U9öYï†Kôžs¶ã´ï¹G½ã-+|DÑa-‘jHÇ»µ9C¡ßY–HgÐîãNý&K·œ7D«~OϬ€->ß%¢¨v¤T[)ˆ0öÎ~• wõApg’±by[»rdæJ'³&®™ûþ]ëþ M÷ú}ÀR“%Ð(ûiŠvV¢7Q} UpâìKpù8Þ—0oL£€ÿ˳.DÝòˆîa áhçŽáUY -ˆd9¤Ì—u×”H+BÀ}meU¡?⪥²nŸgêÉMc'âÎG¼Üó+’¹ÆuÕª¿âu1ß«BéÄÕOR£ä••6§gz'­&q U'¿N ·/.Çé_Í5¾—â2}+¬|¬Íž]Ó À‘˜ô= Øal^Zâ·ÞùìÙz¯mmAðÆÓJÂÂ;²qˆ—Sž EݬžTdy¾ ¥_ÑXüE½©èÊ\g´°¾GAc»?÷gô#ìîxµ|»xÿ#Êhë>£¯…Xÿ›L«^ i‰~®G¢w¢µKÖ׳Ÿ›dw¿¢¬”©€jšžç²:ÉP‘yÊHãw÷x`@çò'¤¤TRš¸”ÂÁ®Ðº ¢Ggg¯Â­uõäÚ5ê~QÕ±¢ã£åwŠÞ‘Ÿ=z†Hh=ßɺ"¾ólŠRÛõ:ɽGiEVÔ𘶸 Š2¨íoO@`sò¬Ï$>Ï+I™æ¿Ý*g´ƒ`¢HC^¯“qh¢©¶í5Äë€oÚ²Ïó¬™ç’‰{‚5„%`¿ßu2ÞdZ÷ -sF+—rÅ• -ì¡D¹ˆMPŸ( ²3˜-×!eygGOA2åÎW1°¹]  ›ÙÙ«ÏØALMîg °~¿6Ë3Å×ÔãZƒ´"îÓ)°/ðWP$Üç¯7Îçú øJX'CÎ_îö¼wç1ï ®ú\­Ñ‹LVÓ¤«È‚®Ò•?ìAÎ|šƒ3Ö^5û¤h<¦ïýšµEâ.ô]2GIÙŽ¾@¯Y+{½¶;ÝxÿbOÁ*~ß’•ÿm?ÒÊLˆo˜«y}‡‰+S÷°c•ñ"=æ]º¦ú~{¬¨ÄXÓíkõXç û<ÇÏ\ÍÅP™ŽŸˆ‘¿qؽ=q0Yꩵ‰‰!#®Z%'á||È6kÈUñ„oG’ÿ†Úý¬ÚçÝ#1CgnµCçVk9«ÖküªõÃÑýLK!“Žw+€Í©öþª'Xö›÷ÔQ•¿žÌ÷ÖVo×^í‡ì°[€­TX©¶Š[ü¢}qºöc[ú{Ÿ­òÇê -ÝFÌÇÇ^õÁ -nWúÐ×ãOŒ>ÀK-½èJ&Ùå9ø˜ÅÔ“’«¬¯‡;”‡òu %®"­¶Ž#i®öž=8™ŒGV —x<6#¿sÖˆ‹ö¬ܶÞ-5-M—_W ìý\8 ´Œ˜Úm`÷˜U|ìÏ´Èy£¨}Òzÿ 6Ikš0°ÌJºf(;ð64©fÎ÷Ã÷ãz¢sZƒÎëˆ)å´; A¼öÁ•³øòIŠ0(½WTN¬éC íR oz=x5×ÒwEzg$oßK‘†k ¿Ë^e’~H¶æ F7ÖÞjV¥Oy$„`1‡¸„ƒ—ßâÂpÑØg’€kx‚˜Û­v[ˆR²Äà~ç°÷ˆªÏ‰9ÓÏPK4χˆrµJ !cpØ¢Ì<-ê ½àCƒU) R÷rÏîB”™n̯€Ùg|UÀ"q/ç{3 jóè*»ÚêÜ›bj’ZØe=ÎÇ…$/o 0 b˜î“Ur5ШÄiÿ|}0)^ÁOžB~¨ºUÙ¹"P±EÕ'‡,DK;ÄT9Té™{‹H˜ýRcüþ“´$¿}$ËyšôŒNØ^ƒˆ(:*—¯0•ÔX^—"k´*ÐÑÆÇïYÑù¢n é1ö;Tbb*L_ÀíÓOQõ¾u -âàM‹è¤;³íw€Â0ãê5Ãß^ñk{5€†¸L}¦e+7($¢½`2C:Ÿy§9µ Å;#ÀŽ6v¾÷K)V@zÝÅ£Ô Ä™Dý¾jÛ… m¢;ت§†íƾ樬Ñé,œÛ‡s7›¡äGI«…"bó³‚O^ õI'zF+]ïlÌ@}±*-œ¿N—ý£OÒ-üö‰üXü„OÜÑ8Ï“½ï4FÍ¡"ÉV0ÀÎÀ¤†Ìáèà -UÇÏ×Lƒú,5¹?¿g4~vѶª¯tÖfCaØc©èSP¼U9¬´Íx·Ó…ì¯ÂïÏÛ¢e¤¯ñ…-‡`1V™„ÀW½Ëôçz¨ Ž™'‰aü ‰¸î…d“l{žýónáÜ¡6¸©ÊÔ³éT £Ø|”úÌŽÂhå~žptjæ´·çî@·ð ØF® -püKÀö#ƒØ7áïG¶9Ȳ84¦îcePžíÅpëXáÄ>æ¿Ý{ Å+ Õ'ѺÎ7 ÅrlÀ8o•lÎAË+O­Èù&ÉAæz¤“yŸ›ÐõYq3îldŠS¯+4ìð}_ðÓ? a/°=ymiùÝ[3°i9ò…–V÷x­$ Ós×°ÞVþùÍÓû@pî–Žˆ×îêåBG PPSQáXþÀÇÙLoò¦_‘Z$0Mf1JJ1|n¢Ù¥jˆîJjOpu—G“ºè …Šš¸1&j‘-IÿÔ]<¹™ ¶¶ ­JØÊw{>ÀSÉ°x›ô®xx,V&¹J?¬ý¶DtTy˜–ÜU€Ï•œÈoñ1b¿—F“3t *©_-•‘y~ìÑÆ^zŠ_ [Á‹¾>xSkFá+êc( Ëa'v¤¹›hc>¡Ú-WåJìá¦? ~ˆä_‘TûGŸÄ³úÛ‡ÿ#ƒôõ`É%v_qêäÔNVÒûñU&ÆîœI[¯™Oº4ÈÇê¦DzÆõ6êK{ôQÕ]hoÕtyú–T&ÊTô…PHaÍ•_2d-q2ÁèÈ›'ÌÇòþK:ó -¢›t3No©ò©¬òÿ5wn-rgþûæF@dUUWWwÅW -±ÁH 6q®Ìb-ŠÀ» -+%±ÿ}êy¾êµì•<“`° k\êžé®ÃwxÚŠî¡;Í Âñe)bÈ9¢¶C Q!n´÷˜»ã‰BÆg+×iAÓsì=™þgX#@™Q~Xý9EÚYË_èßû>¯B|¢ž6¯ÜŠ ÞEÙöúà @uuÃ;}ºV£úE@寤ù;~…ÔR% Wd…1­×1}czÛA÷½PÔ€âŠÉ9Úï ”ã¤GdFÐxê+é…jà§KyPüX)Þ’èé÷7aéFduŸ‚âʾ¾2”Ø¡æ6ùÎy‹`$cþŒ«HŸC$f2_Ž-^šK°³²"+ºxñF<££D|#0N"Ò4²‹wxä3 5e}µö%êáìXá¹×lOá›À^3-0,(þ-0‘ ÅJ—ÃJa~h€œõx.m&«54'pÀ¿cê(gÕÖ*¸½é,Œ_úŽÃ¯þ±î𓦼ñó°ÅxÎÖª:Q­Ô ­¼fåN»V´*™ž¥OUì¨YBØÝ´E: ÂñظzÄ+Y-ˆ½@bÖÉ—$Šä0aY:rŒ0²v¿«Ö=C×Js“wÄÀ‹–ØÈ*.Ò´j0Âð'•Âo»d˜ 0q6^É=8¹=¸žËt¡YÂÓ0XQ­=/R–¦êÈ®ç*Âãhž«&Ž$!ÍÛ¶éèƒ$¥H|«Ná -`l/£ä2öœ œ‰Bê´¨5D5r!…¶2R8ÙŸ„–œòä»}šˆH=Øp^ -kÊU©i"]jÅ*S @¸kYM»`ü­Zñ@S—u}SÏν_œâÆ­2PÒÚ–¾Àoi¬Hµâ=aAÊÇšºêæÕiPË‘ €%þàƒˆžÇÓ{ j£gl87ñ«sŒ9œ´“)25¸ ÅO:ø¼/†[­lÖø.ÔÖwe‡–vÑbŒÊ”šõpŒ@ ÙA“ºîÑ“(çÅs”ïAÒã“Ô'öã÷Yá|ÞC°«‡-Äè4å¢!`wÚ»’Žçú x«BÔŒíqa«¢%›ºÆXàœBÔŒ¤ëz\ewF&±t3튡ª²Ëâ‚’•ÁrQÕ1oA./Ðl˜RN++‡É­T>T/mÑ8ªpà—Pgµ Á‰-í‡qݯ¯VUKОÃ)MM1]!린$ûJ%;ÓÄÖ21Ö@ƒôžþºÏ¦!ϳðkË|€+Vœcý'Wr ¢ïšSê³Û”qþ5ü=öb@ƒD>…ì/ÖUMaiµ¢‡Ô€a<ÀmÁÃÉ¥óP /òÉ#Ñ÷óŸ\¼®P¬à`…âh‹†6 [#3iá“Õ÷\ŽPÅ èAÑ6Ú³Yc·PtM â¤êÑÎﺇ¡x¸3¥PJF -clÁ¸Å™ºöi›V#¢+žŒô0‚¿G¸hÜ.[‚ -Õ¸ÞŽ;©Q4‚ Û4h6”‹9A¼€z›ÙMwEDø¯kJ¿ÊÓ’/$Žg{Z 0nØkÆŸC¼…hº¯ãÛÚŒôüQJÁâ5Mq[ÇEãeÁÑ!c¥ƒ &ÅÖZ 1O4;~lóGiÜ2fë¾=8Ác§© ‹v\†ÚW_¦3]õPw½´ÿ!¨üE;ܼæÛcBQªADÀ@0² ôx2ô]Háˆú,uòDÁDe؇¹;V`7¥üÐÜžÐÄÍÏ—>ˆNµÌ™‰ + !,3é&-e%0¿ Œ0ZÎÐ9$ Ïõ$t2pƒa‚jgIš_“0­š†‘í°/eZ–#4ZEUÑ[é øhíœ-œ8P¯ö ´Û&„?žð”å$˜¿ Ha-~™ìs*{«Å¢ï†Ø -Ëo‰•å6Ÿ(à§v?†dm±".»híÿ -ÚGfÇhį]:¡]UÔ¾5>"áÀ²©+"…°uõrÔÙ¤Õ$Ï ¥ÀVLš=¯î¡Hâ‘ÞŽ‡U1|1Ø7ÇÙILN™¡U \œ™Xðp*N÷Ýð‡"†ÉØntMky¹–ˆP¡ee«ð†L+EÞtŸ…àÆáaP@ßâNämUŠÍc1§ œÂ¨³‚kêv¼ÑˆŠ$¢ ‡^pÍ›†|%k¼4øÈW˜¦Ýy^|º²3QȵæÜÔýŨN{F½€ F°NkÇ_âwïM'OG¿„€¼RRÆL]^–_E[¡ØB E-¤î§PO¡×‘&­àf‚äÀØu8/¾2ðØm‰B°N×-Š˜Úúu¨º„S›qjX½Ó¹âûTŸ±Ùs&wbEnžb9ÎY‘)¢4bêèd–X(,q#´•ðè[-ÚÁ \ØýiJÅ“©ÅÙ©:Æ(nDH›>„¾I„Ùxÿ ¥×¹Ë,øŸàZ½apô ‹øb9& @a†`F -R‚úr¬‰ÛÊhÁæ‚q.âª}+mÞ -4ììåðÑËøƒ¤ûb§/‡~YýÞ³q8À*9]°vÆHà ºaÑHzCØõ,R¡-Ír–X2ÅZëß9x|ÁHÆ#YxçŸí¢OÌõξä[i‡zc¸MÂolD˜¼Â’,Lbó"'_\$ôð]1@Ì\À•×f¶´µÛŽt' -¸¤€®¼ÎKÕ¶pº¶Fν©Ðm=,+àEÃ\t!–œViž*ƒ@A*Y $®½l"-øw¹¤Ñ FŠ”eŒžC˜ÂèE©ÅÀ”æiªo1”TUÿÖÊA®HÅf´J±õðN#HÅ2)Ëa|9w›Äö¥o¶¢ÈBEO¾ â. ÁB ò^_µL̾9öyt²¶ÙÀ)O¶T³™Xv Ni÷%Œà0èÂ6óÔJ§õD«÷Õ‹Ö=SWy|èºX‚²ÙVê/½¯ñÁÖ´KÅùƒ<å„éÝ»†å¢OÔ9§WR6HùtQâ}{‹oÙ[Ø÷Áç%`)¼é¹­Nc•AÞÐYlº#%j¼zMZɃD¿_êÒŠd+¢M(œêzÊ %ŠÌI¯‘7ŠQ•a½ÚXµuUÈ’°oAg}Ù×9„ ›,—@Í`σÒy|>ÎS'Nâ.iãâ¿’FÇ´7´)+YÝꙀswE²¸…ö1ÆRìåã>}³®éÖ—Ñ)½–öôMÆ>Kº2wPa´!?èz¿#ŽÃe°Ì)†·1\í[j¨ã–ƒÆ£h‥jÛtWZÈqT=/ËBӻªX5yü2m Õ$% ˜ëF;¨Çq¬Ï…,úz1Ùcˆ´€=kÄhÀ;ÂÞظ¡#ä$­ÙÌŸ?‚ªp$J„ÎõZïà½[è -ïàûE*´ï¨@ãœNðâ¶Øëm¬‹w¥`ÆM?¢+•WM¬ba8¶®±»5¢q‹"¡ ·"eûº<Ê4þ—( d¦ŸsÏC*†ÌóÚãE.Ø@Š÷2vYüd¤ANdÄT£RN™¾cÍ-•‰g»Zž»þX¤†?ènl}"´üœeÄm€í9§t™Þt‘£š)Öžšd¦–•Ä¾éz^<ÔÀ z˜ftÓ)CßäYß°¤¿¢ÞM²v4ý{$è}Ú`Œí‚:¥ ¢]9.…bQýº¢H[PV ÚõD+´—2ñÞ lQŠµf;ËStØìS†B0ça -Í!‰{Å.òÙ³y±Qâ´:…«k !åÜ´ˆU5°hôpþE$j¼¹Æ—W‚~•l†rJ¨A ­ -m¦Þåä—4“D«ñxj̱ýh½‹>‘gsÖržMƒYžªåÕÆ£]…n—¶K¢™œrsÞÙ'I ‰J,˃êA ìÍ–Ì#Uè^ÐÏAß83Åw#‰»+ª¯] ÚaSyTˆRâDBÒµ=hmyZ+sXT]!·˜¼ ›9sÇN!ryŒápçÇ“¼£‡ãŽøÈòá¦#‚Qˆ]bÙ4d)âb禂찰*tŽ÷}bçÝ.š –óŸx }hï'Và0ò@sm‚R@þ46mÔ{¨ª¢W‰&‚+”kÖ:Ó6 xà:úç0 cËÎ3ªÿKëÂP²èõ›ø2P[´ì#œ^éVeÖ™¢k<¥}yHÑáß-Žl¡+j‡O^ÝSœPr¤¤†ï†5>"}å»Ù*BP"™*ìþ2 f0Ñ¡çxÈX1³~ׄz+æ¬2t6ú,.[ÐCÛºÇWFŠVpîGÐR&º‘þ*xŠD9II¾µ˜­S_Ñû|x •<ÑQB_•Î” ñgJUíÕ±Œ¤jûFî'¢‡1gjS¶OJZÖ `|wA¶½Ø®³®Ü ÌZ61  ¥9÷‘ûY7­[ÃidÆ —àŽ!\ˆo¢+°•µÇ†ª±·~ -,`Õ8«qÖa`eV«NÓ"LŒDÖC›NçOÄ’,?p#‡ ~µ¼–1MQ¬”9×çщåK¢Q‚˜A˜‘â¸J›(€eš‘ŽùÇec÷aІ»ªœK´ô>“GŒn^FÜ¢¬Ó>Nö¹ÀÖÃb5ûW 7ÃaUÛFDP¼œqª¶Âê…¤VÒ8ŽTg¤¦™hKö=BRágZ§U£MÞÆ6‡ÉD–Á”âœ'ŽbÀXIcxZy]g8§SÿsL6‹®ü½éôÙøõ$K¾Lš{æ컈ª[«Š˜„/Mx&e5È—ÉŽ:{e>qš9(.ÚIÕÂ$5a…XÌ9µ„ €¦† I ’ÇPÍ ÝÖµµ™ù`vdvÔ3Ø]jƹ‰õPb )5‹ú¦,ÄŠ&v¾Ì7îW!Ó³ÑW¨Œ”øt#0‹¤ ×­ò²ý.þt}{óÙÝ‹›oãÏ}ûúþ»ãÏåôŒ¿÷üîí«ëo^]¿yu÷r|úñÕOÜ{¬¿Ó—/®ÚÿûÊÏþ.¿wþï½üì÷¾d†\2æO¸zòü³õ«Oî^üéú»›û§O¯ž<ùËõË›Ïï¯_}ssõòÍõ¿oN×ww¯ß^¿½ùçø?§—÷7oƯ¹9½ùÇëÿðÉø+Çð'O>ùó§Wÿ§š†• endstream endobj 45 0 obj <>stream -é='ìdHôs-K”*˜ $™óÞ°´,§ÄÂC’ø5íáÆÂ&῵ §AÏBàbôá-‡¾Nv@Ç-¢H%ÛÀÙ’ö'¸¿ÉÜX™°Àpi£­ôaAòä!ÉVã9ä NwìŠHêöHæ" yª=]¨¦ú±‚»Ú›Ct¶ÆF÷ æ­4bö5¡4¬R΃Àˆx T§Ç[—Þ…Î_,ËØñÄö_ ÛØÈÅE`—Ý„¬4k!XÂìš”iÚóÞ”ÉɯÌäë Œ€dâÇż©V’}À‰ꃥr “V3;{Åé*ò‘vµQ¿"qÀ_U¯Q¤l"ùfì=ì3¸d›Ñ˜}: üá9艿Üý`›ˆö¦+Þ„Ö;wÔÖ|û/Ž1:I2”;8±½@ìARÞD"ÓÕVÃušÔ(Ç›T!›ej¨úuÁõ(« -8Ñ<ŠÌF žu/ò¢ÌU+!¢r-€àã.ä8h`¼œ•©zHàñ?‡Å [8TÑ&çEäÁ’£&WÉI‹… „aBê…”.mŒÇ†ŒãXBýõ`?Pº¯dI1îh€ÇƒGóq@†”d÷/´õé²Owˆ"‚„ŽM€ÿ±˜‘B¹uYƒñó¡}膡¸, @UÒhŸh9÷p¡¬ -.v¨°?õÉ -9-`‹lD=ö_CË..‡Ú1vVä=¡oH=ÿªü&}µ†ÞÆBÀûÍüdÝêþtbãkMõ†¢1P'jüU³Á&²Ù4KL͇ ¬ãñˆNøŒ¾X{c*Ù¢l-DœÁa\úy›Í¾­pŠ˜á£x·4UR[Á ·W'Go«ƒ?¶“¦ô¡Ó@mr¾éÑì!¥¬={Œù~ZÎG KäɈf¿2òaÕ„û#§xSèͧØkZ]f‘[}íîö1 :úe}k°3˜÷¬ìøæ,q@7ÁT§“í=£hÐöØêá±µŸ$¾‚PÈ ž¾ì9ˆgÿ8ŒÌƒ0½œ[¨ä¹³9ŽîŦ¸š.Ö$ŸÒíf"4 £Oø:h<”±Iø—ì]–^îþùuEéèEòçHÞ*š‚K¦ãÑi”y€ý÷”½J!zµÜA0ĹƒlÕ9²Ðô¸æÊŒ¨T¶š,Bæ±c!äU4¢æÝÞ8Îûš{ô@á†Ò¯ñ á¬c+êúû¹/yP"pKF·ìW*oR1›Ï¼‰Aù¼‘þMB” œÒPI¯üt‰wÉû“%°@¢œmóDÖ¥ZóÊ*Ç¥Ùî#e‹«Ìý®„«›ÌÈRüêë‚·ÔK*o!Ï:Ï–XÊI eKN!p¼1W­/ÙU`ÛJ¶ý°e³¥®ÏO¬žxT{ÕýׂhÐË•êdÛîÿýùú0Ÿéó•é;Ú4mc‰=ªIû½=ÂM:c«Š<§Ä¥¨0 nO…“!=f@÷ÉÙä0.çâ")Ð%é~pŸØÆ>àÔǬìèå`•º€0 î¥fÈYH,œ*U%î"$ì`3j?^®T—²Wqu鑲æóY¬êA[ …’¢ò¸«o¯oäÂ…“^rq“åQB¸`?ÀFG°F$Ð H3“d±‹š-=Ç™|4ÐKߧ`;}¿°Ðý!ַ윥ˆ–‚Œ%‘RVMûdµrvÑ]ê)ÖrÜ#_ICº‡ ~»ïDSª[EíÕzˆÈéžs¨xÝ#UÈ1¢t¯uEg,:°ÊÔ{E¢ìšb; 0r²Xô.]|`7ª/c¥êaâ²”¡Ñ$x®BèºìÂNrZ`­ÂÜgâ%­Vñîo ©’ª–Åvð]²îž€sº zà ò÷^~º«Ë”÷¹ô3î$Ó‚9y„^M’Ó–µMb§oM•Ãh– -Þv„9¨ôË•½Ùœë”š\‹€Ú’·)ÕU?åžGJ.RçÀì¡Æ©ºêüAØ}ýV·ÿ2Ž:Œ‘bN“šŠŠÇ‘Q}\ðCe€FÚq5óו“Ù,A}miÞý_TFßó©ˆÑûsL–ìõ:ûb<=Þ RÛä`Qܬ8Ð[J׋*BÚR~}#zÀVø"0—ˆ§ø’P³Æ¼KûUôOWàÕå»BMn?-ÂÒGèQX'xÇCûuˆax‹èã Qj0%åš ®‘[%l&¾¡¾ë¦f;GïÑ6lI{~¤hb½”³à Òò‡ê&á;–àmÕž"`‹øÜ{q íc»Þ:>’|‘¶Ÿ*PK!€¤—»tʧa!ñùÝÊ2ùOõ&tÐU¶‡þ¿waÚõ7C„€Ã ß£ ¨n &Q¼‡ÌU/ñõáÃÀ˜ÙÇ -3w$[߯XªÎëlÉg{ñ¶Š …æ´Õoå j÷ñQØs8Ò4Ô;PJضē[ñ¡·Cxª{ÐìÓñHmù•\sÚ—RîÝþd÷ÇÝ]7•°p¸ ¯Ÿu•×»áÚÿ•=ÔwC—¦/@ÕA&êöðnJ3òÐ’ÏX¢²!“žÑÇljü[žV¯¡‰$Ý$$Î>ÕeúI}2 RpÝ*7Âh4µàC¶:\2nD/M©@HI$îp(­ -;G2#J÷ù{sy|.ÛôtUÿÖY-bö[¡9…òØÐßhÌ \ªª¾jƒ‹] -Tuî òˆ/Ê2tX‡ChsûÑ°è9G}be×'öŽ'e…åp¤´¿œ3Ï?ñUá©eƒÅzÇ[(–!QÑ‘õeÈ´çj&Ù$µÚ£bI=8"¦ {œ,ȱqmª"Ø Á(¹ýxv¤úV øždÙ“ñyçtŠ§„”"^xvßýÈ”/’\-øî,”R‚§_Ó™»S¥Ð¯ˆKyÍ)õYôèöX > èGn|€} ×ú‡¢XÈZJÞp*5ŸØþ |:Ë -“šÒ‰Ò·›÷YHÜS_QfLs5®’Vÿ¤Œâ¾ -Ëlq,‡û 8Èeý|,‘¼ìÅFgâqwÉv“¨(=Ç”m;¶A§ÀC¦nØÁÐ%n¯û&’ÜAõ„ø@RíÆaþòë8;6ëÓæqSu Bo6 _Âʤ€™¿§p‡¼õ5œ}€ÒÎ9ÝàšDÃ2§¦I>i·N» C`Öª³iîkN@ì—´/ÚMR;¨†4;Ê=ø=‰Š¡?fJüqHÄp‡÷#Ò¾|]m؆-C×Û‹Ó¨P§ØU]ìRöô‹jQŠrµB&Ìc× Ü69Šnš ±>A"‰„Û‡©àH“Û„a/$+=.Ò‹r¬mS¢°9üWÕ„ïÙ³âÌ«|yDL^éœÍÌ)ËRw\všÑìጿ{ï.eÒÑ>ª‹?] æCd<Wdžg(¤?‘Õ‡SfÛÕ„µ*¥y~jײï©ãóßh›oõ×Ù5À×”!ö=õ™ë$§­k2åih{+%~¹tµ’Ûíõ=vœ)¸úGö ‰5uukÏ _X-“[åM!ëC%d ç×à!‚’?CÎ OÖŒä/dÿ[f­°øÎ323ÑÑ[-ï%}šäË[¤¥]ÅnØKËÛ/¿SŸñ!:œƒÖd5UEgXÙ‚‡rvN$kÛ;ÆàKñfËr»k¨TG’HúÎ#æmhr¿*«ð%|ã1Z„{®Ox(4ÌÇQš†ª3 -,%ë+ìOY;¾n‰jÑÌÁìÁ±—·ÕGDßG¨fa"§Ã’šÓjwÒÞ(~L2b\1ʬHMjÄ^`UH`-¦ÀšOª@Ðcè¹,ÄÊ{5OZJ6P­×£I" NÓ_ätöL9d;«H”£ÊJIþKMµÇbßL%¼ìö ³Ó^rðngR†¯g”“›YÁš‚j4ª8=¹î ª\…Œ­Z°Ãö!oY†ÂK/è¥#ч±÷4æ^á@7ôW¶­‡Ü±g©aœs°dšº@aœÓe{¶PÛFOßAl´’|ÛpE«X˜)Ö46‹£Ú€M.‹ðžÚ³Þäp²ü(í”+=û‡Ø¥™ü^÷a‡µ9ø€÷` –è°ˆEV0-ÂhŽAÐüjøÔ—€R¶#ÕYÚ½:9% iRžØ±h•VhØûiF‹ð%|/•Ûb2Öjì°Û…ÑQ=¸i®p!”±ÐƒÄ2É»šÛdINv”£¦Lûõ"#ÌêWA sj„âA¹D$áÛ82îMl²)&“ìaœ6µ” ÙÜÿ ?*ÌcEgüx ö¬®Ä~<8’ë`pµ$F½‡€BÁ{ n÷ƒÄƒìu°YæiȈ’ -?'*„¿´ Òv¾ýì~ÿ¤XL‚—Š “ÕÖõííW¢C Ž%ƒ…¹’W£"P!9\Ì+‰š“4Æ -̱)ÕÐ)< ôp¡hÂ}’¡E'&O¥¶•ZLÓ™0¤IÔ•$»‚* e 1—ÖiÎ×Y^ ±:uHÌʪ¼Î*Sîs…¶ãvßTY”¤„äXúCaf·ÿ”brœwØ -h†‰¹ç+?@MX3e s¹}YÙíhLÒž†º“ˆ°S5ã*,Ñ«ÈÝh™U ™ù#¿Ðb„æ,Ë£÷fÄå'¢±ô$Ï°ä$é'×=ø?xt!5`v¤À#ˆ’Í -'c †¼rÁ³˜áž ¤¡ŠCêýaDß‘®Àm;¤DzŒ)µh=Õ*ÝÇõ„LÞÇŪTÖ_ñOóضXú¼þ©jñÈóAGB6M ‰·ó˜F„Ü¡XHR4«A°Æ¼7‡É<Ô‹x^5\â¡Lt~@HiîC¢êèÍH&0±ù—dÎD~ªêB«JÚp¼–ŒžØNwr•Sö™FZ‡¬V =<Êg¢H ¨X¥K!&©ìù<Ì‚ò‰C¹WëÊ„†ä9j;V*2£Dç•ÒF¾øHÜ #S,#“%œœC;n/ÿI“V·­úÕ:›RmÝkzV®QÖºMR5´Ï¥G‡#éc+Gÿä¾ËÇ8Šä²BNðå°þ° ¬JT¼ÿˆ½KXÀlš‡{,0œÔ~/<Z«(Æ{§ÁL£^˽$ãNqV ¾–€ÄJRCÖÊ}%@[v2)œS0ovèà¾EÀ›ˆIXH¨väZ$x¶&ýy•o¢bévJ‚Â=ž©gaåIË,nàŠó,'X¦,ÑÉ3z|ohÚxL‹²ÊÞ’´²r¯ÕÇü1§˜¦'ŠqÒ¦[öÀh€ ™6(ű °'[¹[ÜHi€I¢úÐ%Þ¯ŠeEâ‡PÅ(ô-°¤/ôA¸I<Ê\œý–Ù°ÍÔ -Ôk^ñ/Ìî<Õ.åPæÕÂQ¨þ¸Ç丷¤AByôxìA2¬ê†Âˆï/ó9#m«7ÂG…ß BÝn¢ç¬DgV Ô'&ó( Á>¤îõ¡†U20¯¦vºj(òñúË r.zÚ¦ä÷ƒ ú¦ŠsŠÞQØÓ¾OÎÀwÄçð¾éÜd¨Å’Òÿ~_„¸ $(Ûˆ,€ÆD–I§kÔô3‚;YÆ1硦I1S@æv`BbùRn%ZÀÖ@0'Ñ‚÷‹âïHC¡rÂÌ@Á|c1E¿‰(~ˆ•±z(j~§27 12ºÚ1X$3Ú OÀ*zÃåéQB·û HûrØPžIÉê´ÏÒ,T†!ÆÂ3 €B76”ÈícÁPõrm äEŽ‡¬½sˆ—“ÍAzm‚¨¦4ø/ÙÚzž}¡IU±ÌôÜWµ¤íÖÓ(åaZ¤Ëß{œ¤ñÒ€(RŸNg˜º'¶?a­à™…¢!‹M¶P –w(·9,Y† áî!qkCÎÏu*B8¿4DFÝÞͬCB×#åA³¥æÀüavizLé¬Ó£Ë²m¯ÎÃAÃ<0y' -Á€Å,C;w¬™t}ÞµØïLYlåÅ0uÈ„’g‰`i¥rEß©Z i£'êVp(¡à¯½È= …—}ïó(Ç(ò&Ø™²—ƒf‰-3X5Ëòr…â­néŒ<ö¤Ð!Ø“J?e‘ƒ1µ±OU;?*AУfQ$÷s_êa HÎÕ/‹"£œ„myB¢€¹.Ç/=þs«öMÄõ|º“½{VI‘ÚÔì½ùN¤: 긨–›Í”‚7A̵Ж¥~SŠç eÐö<‘”h¦^¨iO?òÀIᚇLX?Œâ~» D÷êš•pðydêJ–Sœðj G*W’B¥ƒ|7ñs–MLUõ Tº?$þ ½Ò!ó]Cæ{ÙoTï\Jîþ%U·(õ¡d!õ²aIn;W/9û(†‰»°W—:Ì¥Cr^žÑ!·hé$Òo5¤jµ+ÛsŒÈ¥ð”Šv©{ì{jŒ3ìË:³tEÉ„ ;m^²¼ªa+)yGÉS_‘Ï2]Ä»îɼéH´R $Ë -:$•°¹ê974C×±P ð: #•^AÓaF¨Q Ryëz˜¡Ã»‡´bÉ -dŸÃ<¼‰Rú¸¿$ùz EL¹ÛBÇF„Rq’ìÔžrBÆ|ÍÅB牔¨¢¤Xk@6Ù«ÇõH–FSÑA³˜ Že«ÜiÓº\(‚"ÚŽ"žöÏ?W2drdr=×ÊÌÒðoÇÈhí$ËzŠm ±WþŠÃµt[‡cX²cر4¥BÕäŠ+0yȳh… —£$2ÇW—J¨§P!Ÿ+êXÒ·J°z¬ÎPF=¼qIzI»„MP —øà#ÎùÔʦkeaTo&hSÉõ7Iàf ÑÃwñlÝwâLq ÛÁ=Q“^B·zˆµSφq =Q#ùÛ…Ö0-G)t1Ñp$Ýj€¨ÈÆTºç#†WŽ<,•†Ô¢ò#¹˜"²UºgYÃƺ~KÍc}„n@ŽÈäH85Jó -'º‚HOBÈæh„Í‘ëÝÄç‘Vz¼i©gêó ÒUB¯§;„ÀµR<"ÛWc/Þ…îµ×”ZQeŠ¢/C(맯t}‹Ãt`®QÒQH/½'õ°ê8; TÇEúø£YÆ›$bc"ölψÚFUcÏŽÓùHØB”L X¢NiËJ ®ó7fØìÚjf?qp¡˜þ -]kõ‡ý$M?ÑmunbJ¢?Jé»Zh0ŠtwX¦ ]ˆ)MÙPwãNgü€ ¨¨ÝçWt+)iŠ?Ž[Þ*$™wéúp{o6}UºÃ#_[ñ’uË÷ŠÙjà ”÷× HXÉ a?ˆ=7ïûÍš–õ57£!UÏbÍI|Eœ 0O6˜xË#°¥Íƒ©òÃ»Ì àüÉ.sâ)ñ<Ÿâ%ŒJk±Ž;²â{¿5ðéPH•  r¡pÂÁj8wÁ*ï9ˆRz¢šn½C9†q€‚„È•]ú©ºs¶ýµ@Řª?B\òó_hK~fؼ¸Æ2‰À25ƒì=ñÙ²­ÊÌ©9>;¤·9w÷ñ€ö産žöùàb5îÖé-4ò°÷@ê/•Ù¢¯ƒÉüDq&«²]R{9Òn™Ý%:xõZÈñý×ø¡OÅ®|ZÀS¹ÇeJ´õ%•XAñï0‰ƒM¼¯)¤€oÅÚ½È/¬7¬§<‘Æž¿ƒ”¬:P¥€8(¡ÉG( 5‚ÂN’J9µ™u@ï¿?À~ØiÔ{6‰ŸðpzÀww]ƒãÚÇV50Cõ$,í(>p ²˜Þ–ÐA¡ZÙá6Þ‚3hÏMŠªIØ{\ç$›ÙÒmo£vxh: -B|Qkw½pŸŠäd¾vSd¸'¢ `ª:ª/‰LàceV¹‚ø!yÌ‹zd»ÁÙ…4Ù5¸ÝNè³¥×Î~pT«ü‘yŒh6\nÈs@BÓüŒÌf!Æ|h'¸A)ù²Îî·”b W`”Ö pAÔ1K ÷Øt=54¶k=%]ÏêìÚÌâbåP©ïÁ±]ôÔ…‘]4®©DE±‡1Ð>çJ§›ÓlÆc„ɬXˆÕ§ËÇE|dã½9Ö  ¡‡ÁÑî[¦~ >`ØlPuItCfi;Øw ì°Yx¥ ‡òG[‚3Ç Ý¡Ê3¨˜z^Ìù…(õ7Sú•ÙZŽí’°”Gè n@;3(c@˜Ø ·€gèvšuYZ¤JìÃË– C³v£xT€ìè·ê@w‰öòåâ¾Í"±7ç(¬¼Ió·û—õ ’Û;ì÷gÄçç~¦ûX†Ä]¤[Ÿ¤j4HQ{ ÁW¤íîüü¡]_ ÃR¶=Äã!:î×ÇŠ§×ˆ!‰”óö²¶÷ÑoÈGå”?ÐlÀhÙ§¿zœ+&‡BHÀ¢ÉÜþ]XdØmñ .L –ߟ¥ÈÍ2ãÀˆXPOÞÁ¬ÂâuÈA'ž(=µs6£ú¡l% °` šG•Œ.Ð1dñ«;&ѵËæ] V¡yj ˆ™= ²k‚gªÜT^ÿÆÀG– ¦b{|LŠ”´l -ü/ek]*#¥Ä¼5Î+“A‘$#)3åùelúøŒ­ üå¦÷ô»OAì+AðmÛmÒ$ÒÐ!ÊF -OZŒ#0ÚÒÕ›{tJTs 5zÉrNì{«Î#«‰ß¿d¬1,B×2B‹q\3m²Ï5õÒ}P;!)Õ‹ÿÆü¡¬<}^Å%ÈûÇÒê¾;¨d”]>b¬á+$Möé:O˜`O“i ˜[ø‹è¬ó^Hê³€vÁÖ~4¥zÎÓîRùe£$°<€×îT&RµE«Æ+¿BÐ s VîIP³¯™èŠ×œòk Æ_–ˆ½ôP‰ ©Ó†‹ÃZá;blA=ð2ÏJŒg9š¹ƒôßUõ|)ýJ&‹ ¡dî«¿@}CÀ_&BÈÝK`c_äлÎÕN|eŽå 3…Úg캦\€ÆŒ‡©Yz¡€ìn‡Laª8— <åÝ^}1IF|VXHVmž¨W¥åùl÷îÿ] 0fT€K°ÐHFD/Ö{ý /X´çÊ+HyáH,ìÈæ½È‚°K8 ¦×2F@ˆÅû-Înlt©rŒ +B ç5@P³ -8ØYp@ ˆ'@paýÜãVk«²Õ7|{tbx¦pæë|Ï–òñ #»?Žýžœ±‚1‹CÄn¼#Ëê6è½ýî ”/´|̹y(3oi;kð>> -žF 9©u$~‹"H‡#è먰 ³p¨×:Ñ5KDoû¤K<Ù0HË’EFÇQ´Q}IJG¯_Ò Oc¼ÙâÆù -Á\猀>0¦Y¸IR‡ãI[¡Û£Ú‹zÜO»ƒ£W!W`aSÒÆÒÇ{ -q¥ãŠÄž;Á´*kFŽœd3ñDôÛýéõ#õû)Á ‹Ø.èX†=’=ºP4)$ˆ2ˆòÄd¥ò(¶ƒÅPÔd±O|¼Ç­>ôZ#¶'•X^¥}N™Wæ¿°´wÛ—bŸjr`h¿¯GÎö*lBw¢°Ù—™r^-Ñ6wÊ1÷) g² 䎄҉¨Ü«©D›9MÖ Ø½ª(I) 3ÔXKóuZã{‡…Ùë…"6Ú§fF$ÈNÞ\ìlL}öK™ÖåLÉH¥I]þ¨Z[ŽÚÎÃŽ;8¼=À:9D¾m›c_ª%‹»˜¯Ö%cþ&=;²˜`°]ß»Åãd·xÚŠR˜iç/öœæ¢rƒÞ ªï¦A癈îÔ戅±`*dÃRöMò°a¦#óì}›ðEÁYì˜wÉ× ôV£FÆ wѦâ—5Á.:IÒv† Ó#¥!o{2{ƒ:å eÎPÉO‘€}?´ÏH¢ÉRþ1cÂÇÎa¡¥Y“´õþBÅJQ’jööØ;äÀÜ&Í\S¾z$S3*U*AÚ{¢a|íÁq®Ín—àZ;Ö2€§ÃÉA×mZ<,*a®Á„úvÑ„ð‘ôE' Ô0t!äO5Æ'E}‘¯«Í¤ŒÙÉÞóht´'¦NK…2#‚6—&üý¾4r3ÀˆÅ‘„R ³DŒ¦%Ž¤éæP”Y}ª,—“:5ÏbuÇô‚ª„¹‡@·ZsM -ÔÔ¡Ù -¢æ¬‡õ\Ìz-ÜÎ.^ÚC­uyÎT¨þÒ²ÄI ‰“ÜÎÕyF“*‰“®û¹þʃŒ#i:¿³Bý™<Œ”ˆÎ´̨"@^Ý‹¾ôæW„º/M\íF̆Àp‘$ry€Â»Aðn;¾ã5+ þ ú - MÙ×&_ƒñ|9‰´ç$2g :‚pûÖ$>Û-Ha‚=W Z8î’J¤;Ðé3üiÎ\ï`\Â#®ñ¡î+õ…!A…Ðz> ¶%÷ã*r¯Ûx'‘t˜ŠD,{ÓL¡ë(¸í€•ç’ÈÍp`]âa6ºšº}39WŽyÒÔM/šÔ5»¾*>ÇÆã’g®Ø’<à‘ˆÄ¼Ù îtÂœò}WÈÍœÀþ%YH?»Ì){1hf|Vc]N8?ÉÍRé“3ê8˜¦ ÎèV)†C¬¯ï£Üˆ’~VJCn„s9¨f´\¹nz 0ˆgƺfÇr§%ÄÁÄa*hÔFOI1ÞéE9dä*Á€–¿³aŒ§Œ V’Á•{I<§IGºÔ!M„—)p@–±|\€Š1’ :D±ÏÑm§Æt$'ÝpõÕ’'¸)èž‘p RS(2i‚íŒÇìÊÎß\¸àÑRÉÂ}ÐKŸ'™ÑeH&¢è§÷«•*1R«Tì bHü|’÷ÃZw±ÏLg‡Æþ8ß0ÅS½|Å°ˆdá¨`°‹¼ÒrÐ~ÉÈí}]Éaáøo&NùO!uiÁ ¶bñóç|°[ùìñjŽí{ÚOŸÕÙ9@~¨)öÊÚ1äeîÇ~òÀŽŸ³mY”›„Ÿ(–µËÃ%[ébA®œK@ ƒÓɺ¼¤À×6Yw4yH°I0¨ªÐNg¹»i-âVF=¸Ï%/ö<Å + =-p0õ+ž‘>i2–]bh åGJ” Hú+ŸA®Ãîˆhî`õMü’ÈF›Õº‡5t«S¢²Èeì7¤I§ -U9“]ä•bxÉëµ-Šù¶µ{Ià…)Ë°~’”ðK <´Ê¨Ÿq*6ŸXÎùó¿4B¾VÌÄÌ4ûdÄA Ÿ 4=B¢/ó»f »™4=Ì瘢9¦Ù°ÈZ‚t¡@ H-ìy”Ààè±%„Å»Ÿ"yO=Ž9Ê7ñò°"nËI)ªÊ3 gÄœ~ӄͽŪÐl -„Œ°XéºDÕév †ÊE܈²òêÓÆ´éºòðÀ’…»ö"HR† ŠïTÞjÄ{Šn“Blr§€÷üŽDšçx¹/²öpÐx‰õŠ…?¡ÌTïX5ÊAsÉm>(m’ôí—E!Çj”Îä©BÕÔ4bßl?¿çȧÑP™k='Ý´‡„DÑÖ³Bë(½ͼ¼ x<,Ìeñùvÿ¹1,‘ÐP’(¡€EÛ€69Ê·PÈ°Cðݺû©5l`->å ‰ªæ:Ôèw">=øH‘§ÒÆ2f~tìÜÃÁo#ÍóWë‰îÐj&K€xõïÆ·Hi”Üìö&4‹oµ£Žmòäž-šÖ:ô y–¶ê€ y9rû(”>{x¢ØµGRؤ֥>_*[@˜G \}5®^~6 þdØ鬓ՎFÌ츢„®Kæ€=ø9 ?£v#¡LF¸%{å;²€aA Q tûœb '+^z)>јÌvK^Žˆm'2tµl¶¹¹¦ƒ€OÁÜ_0†Ž|WVH‘u³ß³´ö“H–7P›B‡¬û8¿,ŠV‰ óKzR[ˆ ûW/Ñ屆Җ±°7/²},Ä)=$m•ý¾H9•ìñ°W~Ä»­LLM’š6’=†ê%ã¿eíºlÁ^/Çÿáy¹ßÛtþG„âÝÜËçÆèÁRµwë°ÃæÇó) |xW%^Â^™‡pèì—U«iì!Ê¥M&xª+NÞ¾H·Ñ*·“ìº~ÏÝF;3[)õ¿yã1²Ðâ­sÔ=×t›…ØωòE;·Ô£Påšæ^RÂÜs¢’úñE3©š2}{Ž}táy²ÿHAY¸ˆŽBK€t,´ ê&œ] P{{@]Ì>ÙŽ‘óyììeC¬„ °yÄš¾¸ -Òt †‘TéGÛ?Ë3Cö×þ¨6ÆÈþWö8)6µrxq†«Y?GV}§Ævt Ž›1ðÂ' ÂË‚6Lô¶˜'Ù¨(}“¬\=¦èíi©;ø´ã]F*úð’{Ü@Ÿ’AQ¯å¿’ä&uæåí¡¥¡ö^S¤ :$1ƒ§D½”µ dÍ×ùå -f@ÞYO3 VôiÈ’·ðÞÔè%w²z˜‹W|ü©«[Ifd>ÿtüBªQb;r’vhYòЬ-®îU1e -@ùéÀš.‚òõôãB÷¹Wq]m+×ñ{Ôû:qÒÒ;²¸Å„ê⬪d{–Gàøsÿ’2 2ZÌJi~:O‰Dꂨ¯x¶k¾;€ÈÂ2;X¬ýL´qùZaÇD=Núú²š\CLêreK2³ü8SÉ©ª‡™8ç³Fò\Ú®@[23›‰TQÊ1¯ ¡^ÅIµ‘Pö­; é¼¥f݆ÃXg¦êA–•Æc΋Áæó`ÐB!‰+z¬¸zx;g˜œô9§•/ -)aÒøñ&YvDÃ<¤+µië2Œ ¤ðýäSÅÆñX(¾E ’ì\ä_*¯"i“ºùb©€È—®¬k^Ó5¯dév•tà€Ø{²îŠ "åéåÈî›øÞ*>?£Sõ5%ÒúÔ0 O±b^ Â-½í$À¾ÓêR°É)lùkÚIÚIíwÑ»æšÚ\)¶’èQàAfÓHñîûI(·EÀ_ï×÷²+09=‘»À3ç—ìsnÆõ¬¾>Æb®!÷KrÎÓlî•É`õýˆ†ø+Æû/Ð2ë¾”à'`c†¥ œgO²]ñjytÌbLQÇŠ°ªrS’lVûŸYû@%8ϸö¯ÓÒ`¾D&ÍZ¬±a×W&Îa†Ìƒ(wÓôgŠºva˜I(ñ ñW[Z¦¾\_|1ˆ¬2ª±ú‚fiJ34ß.bÁ»³?K’Í”æªKs]ºo;PjÎ\¡w:ud’Á;™›ûX¨‡(^±r ˜6Ä:©aˆôQušœ¢F×-Ä°c)ím PŠõq-Ä“©•¤±ÑY.d2zXðú“ßok‹Ù/‡´E-]ۇ¿äOgŸ#0ˆü’–ìe91*Ä<`»ÍڒÄO^Ç¡@/ñS‹6;ú6ßxO2k–ØFʽ€éè°/?öÊAÉk”Ͳ1tŸšó7þv¸éâ€3¢tµ‡Ì ¦†Ö¥Vy^çžJÌ&{gæÕ²%d5–‡ž-à¸{9PWÓ9ºp0bì>¤¬~ƒ|‰X®§Y]Žy29êáΉ"ZðŠwà"ðLEDèƒê ÊÌmF~Y'…­IR¢Z¬¸!ÌK’±1VlˆX€„!第;’X–Aš¯XhŸ‚$”=eª:;­d7u’-ÈN¤pwÚ °âm¾.T €I´M˺íµkY–|ê=bGËÈûØ#2ðá|:¨t}¶Ô‰í%PUÉùu€L%(¤]Z^Ãé´IÐcª{ìom~‘Ù§ûz¨XÛÆnäÌø¾?ôÝä¦ÌT&¼/ZÿåÍ6Œ&c_…³â´UÌ¿š,±I@£GV3}1öˆÉG8„“8”°bæÀžÃªÑëT±8°b[„8¢Œ òÚŽãMF;ÐØ> -óãET^ W¿à©äÔÄéÑŽs]Ôgÿ»¿„'P~³Â˜H6ÜÉþDX(ë8Â)à![¤¤TPç û (aÒUà Ÿ} Q.¢†û€¡' ½“çœNd%ßD‘¤¹G4§{"µfXÌ©ØepŽëz|üëˆö °2cJEëyH‡Ù|ô¡¤ì¶ã䊺"?ã¯Íeš ~ûûÀÐ3Õ×T¢l¦Ù ²±PMØŽEÓRÍ<5´—Rˆülò\D-Á)c㨠Ìbσz´çä¬9Ïß1«˜Ðä¯á­ -8#{,)…e+’!œ®³Õa¨T#y2¤}Ì ükVµP`¹%ÀÆR°PŸ˜EïÔ%´ cQ"ÙÒæ³%<"å@¬ Z°ÚZ€F2FI,cmÚ¡A!®ÏE-ÑAáÙšªC&´tÃÚûÞ£ª›o"ð~+®gñfÇxÑx{å dŸ Lvh÷òwhuQnÿ BO ô£?Û(>ô8˜ÖÈÆø%©—@}q¾Ô9ibK„±tÕH{ésª«åà¾aÑ v²Ó •žÝKx(Ñ I30Ýö8 ¶¾‡YkCfIcuS.ýÛ—RàKÊ Uckuñ…Ú5ÙbõÒA í@š­¹ªä{RíráZrÏqÀYÛSóiO0>ÎM„©Ïi?_œ¡Ì£ÈtB³J KÒìHi¶à-Hc—ì4»{fÇMM3Ø8N{ùÜ]õصóá]‰iç¡GVc†.¨Î]VF-ˆÝ:a)Ž7Ç -D4ƒ¼¸·6)þ¯‰» @Ñèr÷K2ˆ Àk§D0$(º×Tÿ…ëRd}æü÷ÒhRlÅE„²!à<Ëc`³2åõæoßJ´´œß)·ZŸ8´ÉœƒÙ¨%B%P) -$¬D3B®T×T®%F•Ü ßB“Ðl⯌¤™ )DBâ„¡Ú0ãùÅä~šVËß?¥J:£}ùùû’üSLýN6 ð¯;6h0¸ýZjq€íþDKÛ#,SXîZqCCóT!ØŒˆ•ñ„DÇA²%]îV¡m!ç *Í ´Tâ²x†Š¢û•Ô8híöY‰¸ ¨2Mý`û%À–7_¤kÑ`_ÝÞqQ †›Bì=øS—n»þ=$K¶Y²äHYÇç…„rùøÿî(ð9ùóÇ…€¢dAŽ˜T.ˆLÔ{ûAÙI2 &bJãžÝ‘DãMÛÊ"B”ñ¡‡áå†6Ql8Pü„s eÑTŒoC¥Ü¢¦P¤±ØsºÝä%D`*¼ŒyA‹f´ ¼sç䜠dú’ÀD¯½HpÙ½‚ Q¸WŠ‚bKع|¥ƾjÔÐqX³¿a­Câ¥Û(øt¦Õ¬x$·•Êa{PnªGظ ­žäŽõE[zë·j]ãªæ•ÃõEÑi‡£eZêæ.^T€ÄïKùäÐÛ˜‘ªý"A58©¹Ûâ©é àM”âåfʃ\"‰UTéci^ÁÉ@ivXÿÙbð(|Jºõ‹Ð”böÙƤˆbÀÕ½ /‰ -h@6¤ÃÌW³ã«Øª„0^F±¡Ý͆OËøþ‹qŽ‹´3ØGPtˆâ(&)¯È,›Ï‡t–µ0²ÎÉ@C² ³õ â¨D²@Ü¡“ÌÐô•xp!”Õá^ý\àO™k`5V¢#ŒÉ04ADmÔªK"0C&⸪½´[¹!»Ò„\D³û -æï#æmù ®¬’Ë~Ü!–9ª!Ìfƒ}€uÈ&@-ˆ«Ž?gìr°Í±³_³ŸY—qÀ1ظÙ@»“åZú£X[H°õž×¶GžŸªh5YËÏE 3J€ØyWá~g¨Ñi‚j ÷CæÞÃö-ð8ˆ¸à^5ÌgyYù·æ®|`ån[Ïy¸!2Þ §¼hÖ%¥¨»æ¥KÀd)é ƒÍj2ì*ÎRftÎûÒ ÿnï§èò䈟rÞ³™öîýðWt\Q*T®™ïØ­,‰ -ÙçÙ<‘i³K,s´¾Ž¹̈,~‡ ãh;“bcðñ j’ÕXy¤ù÷Zö:@-KÁJV© Ùj3…u/ñëÙ[*•¬kØ1`X¸~óá€î51é=Ÿ.6&ÓŠŒ{пþ™Ì£j˜êDöìÔèSˆ³·*IeI†»G67L-L—ó8!=%`ÝP½#Ý…ž‡ýåÀâÈMBA³†ª4 ¤®|ÀÐÅÜþÄN­ÐãáÆ錥Î.*u¥ D·hÒB±=*¿Ó¥Å1»‰?ÐéÀ[EpÐ’3`©ÊËç&C’73]"5OàÌ(¦ÁçË[=6âÏ1WCE¹¾©½úÅLßã„™PY#EÜ+|PVÈS¬ãZ¸„U_¡ì×£•z5Saø§&¹3¼œ¿ TúÕÎ"f€¥/“óõÆ8‹Lá¦äK£²®õ³l“µW+¼~ßdô- jw¨E±L\ÿ˜ôÈÜ‹mÖª²ƒ~8,6Œdâ;©©¨ØŸ»ßɱ†ëW«©šQóx’gÝ" ƒÙˆ;¢ÛÛŸ¬Ôô_ø{jìÖž0èä9æK”Uèu+(è‚h©–x5åB0{y% -‹guVC`ÌBH;X™—ç'pg‘¥«¤ç°ˆD&©Vè<|¤nôVáÊŒgʉ«ž˜l_»Z§5íÿÙ¿õDZ‰ûÙ#ä¾Y˜–lèÞgîajHjBx¦"n„µ¶†%ÑhÓ.MŠâô('/l¹N"e©¢ V Æõèq=î#×)+ -s¼ÿa¾wû‘I]®Ü’]¦`¬Á YÂë…6ÛŸú$å&ÏŒüé€#bËŒ ©*×úÁ,gmîÒBͳ"ðØðÃô2úñ@LɶYÿxÌ8I/ª;öz×r²,7C$¸…KEÈ â8Ðn¸Ñq¤:ÿ–BJŒUÊ7â3•ÕËJÕîé$h ¶!PþEyDûY…  éÛԼъÜ'ûpý…ä(×Ñ;‚<Œ3†ÄžF -c÷z¬ö»Sî(!ü„INz™JÄÁlŽº÷àø…•žd¥Å·¿®–”Ø+B–ÛiiJ¨|ÆoTY¼2¹X}èù¾r4ß·ÁÜ*/g+ä6•.Uç«jÁIâ(í¢¢$0?Š’h)Ô±Ö–Zz½µð„Hû”…P†ÿîŸbÒ!¯”«‹­¾†ýS@<ÉFÙf!\½@=<@—g\pŠ!‚;V²Ù‘udy~¢’AÜŽ]]ái‚GدչYñœ®Q tÁd‹|°Ùè>ô–Ó’êãÁOn¡Æɳƒãáµ<á}¥]‘ Ó°}žÈ›SŒ†¢+°І’’Ó xZ!TäXWIgR`ø48 ë)i7ý¶:`dÂô’„qs‰?ÅsÿžfغìXÖx»e‰RØÜm©œýqGñÍŸp·"‹?ʼn~æ<ùíÏÿòÇÿéíÏþâ¯ÿöWøÃ÷¿ÿíwýÏßýͯ~óÛï¾ùñwÿý»ÿñ»¿úõoþðïÿã¿üî'ç{Þþìß½ýýþù3÷ôöç÷ý¯~rjNõãï¿ûŸ~óÃwûýïÿáûßþá»ÿíûÿîó¶·?ÿëßþá+½ù_ýßÿô½/èÛÿáõ7ýçþæoÿýÿû›ïÿõ»¿ûñ_ÿÙ]?ªýܵýÇßì_ú_¾ÿÍýo8ˆÄ9ËÛùËÿü›_ÿá¿Å~~´ÑóÛïÿñßí'ü?ÿþÇßþáÿç|óã?ýó;ïüñwNûó}yûþæû~=ίö¯~ñ?üÅ_×ïþê·¿Ž¿åß÷~üíßî1÷‡=ì~ùK7ÿå÷ÿuÿö»¿ø¿ãÈò‘¿üý¿üó;çù³ÿã·¿ýÕßÿúmþ»_¤·¿Øÿÿ÷ÿú ɸüËë?òK]%½ý¯û¿ü?»é_ßÚÛß¼ýŸÿWzû5õw¿ø%HË/(¾Õ!6ç~¹?ìæ)µ­ù¾ùÛÍ;²ÄøÛ'ù™æ{’ß¾®ñÿ;Ô…\~‘…åBjâר(JtM›à"‰&ÿ”1·ÁàŒy¤SC‘Ü•ö(A‹ú„‘÷â4jx×aÏkÿ«7Äva¹÷ö1÷P3yê·hDÆW‰ »}7'âdŸáfŒêÜÜظ||~ñsßþâu »Ü¸À¬+®'Ž\ÜLÁ÷͈ÅO²<¹€øÁUÏuñíñƒC…hd‡â»#Nuc†h¥Ÿzòó:H¡¸‚ýJ㦬”ü$ÀlGcœ!½ûÆñÌJž(7Çï—h"UýryýùêªJðX€ŠÅ%ÍÛH‚ î)8,*eZ¢¹×~.¿Ýû'`ð ^÷ÿá |ãWSdúP}@\±¢éîh~¿4Îó¾sz‹œ¿·å§§ŒßÒ®à¼Ôÿý[„1‡mú¢qÇþèžg½SŽFÏoÎ æ¼}£g#öó9QÿêïŸ ƒÃï׋Úç~Ž?œæÓˆäN4!œûÓˆÆä‡Õ-ùÍùst:Ü<üphœÉ]‰·¯þþ};r¾š¥TwÞ –Ñ»x-F³”÷Ö¿ùŠ²*'Õ#µ£JûÃi†÷áfv\Ñx'ù8Ò˜¦GÏÖâQäöneËNºNW#ѸÎ'ýîK—#T4“8mgëX@}õΣ¤Ø•Ï éE3EÕhÎO‚‡×ñÕ[xT#| -«gð „#ÝÚg‹Õb:÷áæñœ×¯šÏ„üñ´ç•"8ã:ÀÅ£–wpsó„ iÁ<Ïkª§­—xGóÉ1ü¤y¡uþ¾´h쯓þô|e WŽáკZõÂëæ#^©ªÓx†6j¥·qÄ·…³.MÍËWÁ Êí»æý±òîï¯à\Ú+l§˜W\c)Ũ¢8tƒœ²z¼|ÀßNcŠ÷Yãscö+?ßIœõì=ÿpšA@FóÔÃw‚&¥±ôÓøÄ0FJ˜‡š ľhœçÇôÍ|õ -b\ ~Ç '%éKr ‰3OĈÝxB[‹¶ _]qEs?k°ÒXnÌ}µŸ®2Ÿ/àõÉ_Õ³#«âùÂ^ScO¦ÜÏ„Óxçf&Ñ3µ–ÒîÜžÎܵ÷t}ÎI?üúë}¸á¾ÒìIôCãpÿËyæsÄðÉw–ø{=XK÷ù¦³Î‰Hø3ÝžÅU+ʧ™Â—ŸqõÛ+ô´S}úYÐ8ÓYÇ¡åó‹Ó\êéƒRbç¬Íáßç+8—&˜÷™©V€ÍæÔÎZœùÛh^#Çi6>ÑXΧ ÑÛWÏ{~P˜žò®÷Ñ|b&·å[ÙýNn|_nìõž¹ŸÕŽæòô{i·o¹óèz5æræaO™ñċqÞZÏ´ó,Öò¯ÞBÜ[Gô8Íw?÷ÃiN¯Åªê©Á¿|Ρí@4Îó$LûMœ@˜„h.ãô-7äÄ"&¤FcÍ÷.,¦@€õ=E¬¢xâ‡hÎ)ÇfCqa4ÖôLŽ hŒÉ>ã™O¬õ>oö¾b7¶X€8y³õù -ÞÇçç\Ÿ‰F»ÀÝE[™ï#¶Ÿ™*¢¹žM&ºÓ7"Np皟‹ç¹ã£å4ŽEIøê¼¾›^ŸÙî7°Ø‘»™ÍØ·Ñ|Ö}j¤w`Žtò:ÅWÏ?8ÉŸàw•s>ïö©‹Íã·§9µùî•Ä)âÜo_=ï¹Ã8ק;ì€mFìVÏ–ŠÆX?pMÜ(;7¦wYM7—~N -À8Áý¾J:¯ÿAõ›¯_ØëKR<òY@>Œo/ Ÿýù·ûþä¼ÿiÿ r4ÿã¿è¯x“žš—ã—¢y¯Lù4/ tȧíÞTL»±ŽùœïòìÌ)ŵÏZø_÷E"à<•'ÚRYñKxæÝ¿#O*¢«Ïé˜h;íOªÜ9xŒ¸Ò?5ö^îiÏ&f:Ævc5îT‘“±nˆ“öûçÀîÎÃ…E)‹øs¤sÎ3¨”ºxýþs7×cÜÛj÷kQLjÕ;Õ?§Q’ʯžç¼%ÍJžž„d§{ÆåzâfœLÚs×iÍÝq™âÎH\gÉÌ÷}œéU\Ÿ ÞÙ©£ø´DŽËª‘^£¾{*¥ù¹'Øûœ»>;mB…B²ßÂŒ…˜úW…æåž Õzæ}¦d?[€2ç1ž9÷‘|ô­l9æ©ç•£ys§Œæý 0r®@¡IüP¿=Çh÷%û!®/rW³VøÇÚ:i¤ü®ñœ ¥ûj¨Øù¾©¶8ʼníT¥»ë84½»…NùÍçMï†=ª¡ÑWÙ¥˜!_Ðó58œöÛÍ[ž9Ï÷ð[rß—Ùµî§HpÆ,ڂϼQÚZ¾¯õJÎ/‘½^¯¤F 4¢ãý–@¶ÜÓ¶y¿¥x?wÒt#ßæiîN:Zxø|Jl(¢ç3Îj„ÜÉ9Á+Uw6¤ëµYÒ©|vCwéËçK@Š2+˜ ý¾æšÏ„üdÏæøiôóò9occpó‚9>…^ËLdµ›8ㆽ$ŸjçÒjl=ÅàSDXE^n®ã| 'zX}ÃëîÕwc_'®'@óÜ";Ñ·Ô|ƒ3'œVDu’2ù elšz=Í”íÝXîd}Å80ÝI½¼{‚DLá]N$9ó]€YíNs¿a£Há7îüz8ÚÊ z"¿Áòyk³8¹@c:ëˆôµ¾‰¿Ïó¾ÌÇóâ\ïÖÁ“¡Sc:kCš1 =žQFZòÛh~Í¢g3ÌÆóFJ¸1·ó¡<¦cøç‹Ø?XΟß/ý¤™|üL€åÌš_ÒMÐ=§k?™ó™éí½vsgºŸÚ?·0žÓ7¥uD¯BäÍnÎ=Š8 É‹tî·Ñš*wk÷TA*"µûd=˜A'·6ïã>÷Õ}Šhp¤;ƒv°Fq‚»#í “|Ê_ãζéŒ#`Ígœér?ïK“Fœà†“m¤yÏ{‚ŒØQº±×~W‘íîã™ØNH>)pÞÉÀû¹°z—­39ócõ¼É'R}4¶už˜-¥â¥Ÿó>ŽfµíújªpãY¶ßG?4WïË$l}.¬Þœ Èó{sÆ~%õxgågö8+?åTg`SEÏt¿\M4Bì>ÁjNN‰è›¬+^XèíÞáÛÝk¬AOØ wÕŠØÄØ «ñk:ƒ®Y¿¿;³˜ã¦x»ÎœÏß¿*Iø:GÂõ¹9EL9”¼¦±æu?R'×iÆÜ_ØnU-.F½§pä×ï´¬$m4Þõþ<Ê=A_gdhåÆ–×ýlî 0}÷uœ´yV?ÛˆbãÁÅ…š!žzÄ gòŸ;À õy2ÑâÕ©˜­˜j, X%®óž §Ë–ñ\@:a}'ExN°_DDåp7žç~²‘ ‚/PËYS<sÕçuñ$š/"ã$z}—Jx"Á'ö•LZ'þ…Ÿ2ÎlVJ$©Ä5{î£Ö¿ï¸N÷Ç™òæÝœó ï ñD‰ÆÒÏô¬±uNê™]Kq‚¡Ë ò<“(SŒmć~/¯‘鹜®ãv}¼FqÒ”Î:NñŠ¿¯±ZÏo¥ØxWÏ9k^7þg-Áüôî¶$ÝäÆv#gA~ù³9>Rt7q«}¤{>W·Õw:{µqÓ&í5ç.ƒ¥~é$êw9}¢­_HÁk¿Ijõf“OºžœÿM§œUGE‹›KxÞ•"Î}½w›wcÁN4ö»Q™g?I5&Íåmî/UÂy}Fº7ŽxVžåœ€„ÒIóü=|Ñ›Ôg¡ˆätTDí¿FµîÓJåüýMfôÑnc»ÓU=û‰ö>TZ=¯ø1eµ#¬ròªaÿv¦Œ>Oì°›ós×ê¤Dñ™n@ñœÆ0kµ[+ïò_gÍhp®ë œQ×å&ô(û|HwW·?Ázmãæ-·ŽIX—âj¾ °H)ÑxóAJ+ø¬¯hsh:'Ðìþ“À ù›rÒ…µzéF…ó9ûe™2œÊ¿éåôŒ”,o ØÏf²›È5ÈÙžtQ~Qû­±f8V+Ï…†=¯Œw|õ}z¿Æ––Æ»µ…D5²”ñÂjìŠws½O¦Ý*únN7…ÖúÓÍû=ç5NcE²é$ñª@ƒçMŽ9ƒÝü -y¤huš_™¥Ò_}Ÿ‹ªìñÈƽ»áMô¬àôŠ™ÐJŒuXNãü(§|ú:ÄFvÉúUPó|ßü¹qOÿ)úžHt8:qcÄÔþ±QOcžå|ç0Îå|á´³Ÿ+7ö¯QÛ£çÍ^Ä\í7QÍT=Û»Vù4Ö›3xå'hwÊõý‚Ómwµ’õèùûWþ*ÝœRm 1o¦žÆy‹ ù4=ãÌ“ã¾L§o®-—s'lNp¸çOÎw›m‰çÜÇ’ã¶ØWß,b÷rUëûÌsv²:ÏÅáPŸ‚äÏ­´d¦Fÿu¹è$fgR8A=³¯tÜ8î§Ñoxƒ2ÑMà,zõ¬54BЊs¦;ó¥‹öˆû·Ñ<W4ƒºå]¿ZÔ *ûä3(ú>gÍùô3*ï4ß"AUÓ;d°´FÌ=NñJòáRÞΧØi¿™Ò| }¨Ü¡೶›¿<óa‰ –•RœÆ;c”Âpš|íÆvgΊ"M–¸VU9.ðŽ‚“ÐCékžyhtšò:+Õ¹-2Þg\Ôf¡ßUÏ¥¶(JÑó‘%âq1¤7…ƒOÉŦ΋9?ÀÔ•ÏC‘3¯Ÿv>+<îIW ‡ Ö¿ü ÜuÜÊriï'çzòðú±~PûëþX¿÷Ÿ£Þ]¤ì}J£ïOÐîæ7Çâ nžíˆª,Ñøœ4œPÜçè -¿[Õ£o=¸ùÞŒÚGÜ4¶Ï(¹Ÿ]d¸¨×׃ogÎ9Sù,íF$Œâ)”x4Ëy4„y缯º˜öÌç/~ºg½ð–wÜú®T•¢<.íÝs2 û¥0¸«÷ï@¼§j¢sfè ÈÆ3H^øîÙ²ÿ>ÀåãÇžz‚Ι<”[ï–¨<í…CÆÙæ|ö'__D¼ïì$9Ñn½Ÿŧõ,žð|=ËCê~SsÙ%`xx föFßñª+y#_Д9|84FR—ùä<…ü®RŸ{p­Ï,ßsöµxžŸgÚÍ)çÆpF¢~ØA¸µwž½U4šç™*³¼¯yÞ/O¢›Ÿ;µz¬–ÚÂÇ.”¾³Ü‰zÜÆ[¶¨³½Nð*¦hÙ؈:¯›Jˆ$WÉô¬©åËÌþ\À‹Œ'èœ÷B÷û»•)"NÙŸ0Qçø{à™gF{^¤2‰ÐÄ 8ˆü>Å’Ç%‰=È\òz]Wî'í ™'.ö¦H²Ó1´W$}*b^‡:FâäÛs»³ÄuÕçë¯6†K9SkÂk¸œ•I`ÂCŠ¨åNÕ½¼¸-:ÓºL™“å d}ŒØuæCeÎèîõlgêýûŠzDíªE®ñûm¼Ãmžàþriå '˜ÿ -W#>Úv®¿¿îu*á''}üb”ÑóÒôtg‚)ãܬè”n>·çæᇑˆ'ŸupIÊ“f{‘è÷€¹yº×ÉÐvC£z†?Z£íPó~uÒuVj]C4Ötæ[#SøÜ€% †h¥^®;"BÑ8Ÿ“¥Ù»ÞuOðâo=±‡­¡ãË\>—u*{ã]m‘¾ã5É{ Ô|÷diÛI‹–ç­Ùž™îêª÷­ª®&*CM| ö0ÕzÍŸžÀ p’nA5¾jéä"©™¤—ÉÚ®{èd¯1.Ü ("K$EV£ä•@£­"Ìò‘ÿ\#‚„^4 -´×*›Ì`/¡š4ÚÉþM— 5Ь!…ðPb]6Z ¹Úž<‰=h”Â8†F¶â@é—Ðy¹öÃÿÑI7èZi‚{²©„ŠR’‚¸3àZ;¥öÄ_שЪ‰ñ ™‚ö¸†7/yvu -üÃäèH­QbÒâ´%ª -ïó•®´WJàÝA¤Öb¾´$œJü4]t06e÷§bC”]Ø8•Cj$ŒM#™G¹[;ùY1nQ:0ÙÕ‘®mI¶Ë{ì RY {e+¹R–3Q5’\&7’ÚCéW£±STŠq‚sÎì”+í,ýj”Z 8_€ôK˜«†lÄ„#Od¾¤‘6F² Ã%„®#Fö‡ãN5NdÙ kYá“ø&„æ5ÊÒ!{sm”h¹Vq_Cn©š¡ÓG3ü-{Žu:åZ=©² ÖØ+YZ²ÅYmÉT²Q’_ðÉÄ:ùZ;Ù ¦Q,¬ÚF*.H~ZòºörõhT½J·U)@jÖéÉ"#‘CÈšP¶Bkeˆ É Š^Å‹˜t`¡l$™Tm«¤æi$`,5Ú)KoÍ"Ù$d³†B’;°%ê^I¾±U°‹Fò¥’´6Šò54*†Œì¼ú«=åòµ2Õ‹¡WÒd°üÃâÇ[Õ © 1"µ]†½Ô­bÞ$‘ЫÉ×e3 -;åí9ÅX€|›xêeþ#5ß·Fª{(5êmðÀeWHp.+Q‰ò¨z;‚4²RÆW’º"j%ʉUncɶґ±²“£lj¹Ò«ÔâCRÛ+pF#­J¹Ñ^i”•²W  fAq§¢f;Ìhekèô]¤™xõ-0 w ˜9Û2ÃÔDïà2ŽÊ몉ïÂFvºA£a*‘A¯¿Ú^ ÷7JIz25öÊX­T`†”Ŧ#@ÂÎFÉöRwACxé2¹Yc¯¬[9MV­VÜŽ@‹dA²l‘€ 'N}8ŸÚ":BÔ]AÀ$µáÝRddlåF ¦´lWTË*u ‡K¥äê ûÿ«ô„ÖF1¸Jí --15øÀ1¹QéÀ y5x‘Éséì…S딲PàŽÒë”(®…ôÃ1)Šs;TæÉèìm•õ(×!ÏÒRÆ() § -â·Uwé@OVƒÎV¢jù¬yåÊ3¦SvXH}¤ïëo—¥>—Ú²G[>6Ejü…(ðoÒ€É.¼ßŸ¸hmõÒIEÑJê†t@ô$αV`uZå ÐÓ²ÿ² h4 ËJœ´v²i†êBTÕ–ñ6(‰Ðe—7l¾Wj­Ž¤ûjÿª¸…m¯%./,5ª•,yÂ4¸¦¶bÛ•„cÛ.¾òÆ×*Õ³¤:G8Z¯\©ì‚“x”:,pª|­%Œ ç [KWª¯%§NN0(.¹nôªÔ‘Âþ#©Ñ^‰ÆÙèì,Yßj9Q#¹§¤F"s05¶$‘ÝNC¬0>CêÀ^ÉÍŸIöpÂAWöDݯ¸Æ^ñ¾Zu5døÍ¡&¼ -+ -…¤¶ák•2Tö)ùZµ5ÁÎÇR2¿-ñC⨉Ԩ¶%4vOÊ7ÓHK[–Z‰…@£’±¸ÔHòÍ4’{t`ñÚÈ[Û¡Å×®¶±!O Uœ“jÉšÖ€"R܈ö’ÓF#ûÈì»z,5ZeëFJg ýƒ)C¶4è\­‘¹«ÆâaÕH{_¥F;½EÙ+{5À÷eQIâh´JÈB#å¯ËJøl¿Óèð)¥r#lR“úÅYQòØd‹¥Ñu á#äF{;ÅJQT ÄĈ)µ”¿‚£œ”"U;È4ú.•ä$<¥&D\LöDwè¥T!©QO‚½TðJ^ü²FÐ+…#d•"5’h) ‹²Êõ’+]n–üNЫede?jÔkÉ,tÙjU1dË+È -#¼4]žAÙ˜ý*µ2IqDewª\CRj´”QÄ€MêÀ ”¢[,íC²QÒx,E÷ z¶ò½RþËVéÌ ¼ÌNJL“tüƒýNr'²Vk§  {RNË"´yG4!T¶AÙ+4L+åþI$¥µ´(J#wéÛj¥LŒFʳY&7ÛÙuD(6”D³!„O+ƒh´S”¬2­Zµ¢’5J=¸™"Cv‚ u¤Ì"ÞW¢|ßVaãz ù¾Æ–p{¹&j$ûí·ÇP*~!¼¼˜A¡KçʶĬ!¼)Í iHÁ:=!—°™Î–Ì6œ¸¡l¦ÃµT¤«É¾9>·%×Ê»Ùàð[âËÀ¥~!üDnG¢þZ­’£¨&½’IùJœ#3I¼ÅZýLB+ ²ÔÂ1+ö]± ôu}µ‡7ç/“›-$D#ûO´%RªQ‚fZƒ“9ŒÔ¯AɪÑX6 ¤JnÒÍdg34*vQ­˜K­AA=¥´”¼'D¼HU­AÉqÐH*t`YŸ8b.mõ‘¼¾’Üh”]¬Z…£“j:[)ò, £–Ì4+þDRôEg«ì’ÖHõÌfH»sí”Ú‡6Ê>X˦]¤-–ÉÍzÙ—f‘NLTÄÆìf¶µ }Ëtuîoo#­lãȃiäzW:K4OÎi•:°îÕHq ùZ=!ÄÄ£ÆLqIàÄÒZ¶WhÛ i¼%¶,sœDo«!ò}²3SCŠKJͶj‚H”§ÑÙ*roKîEüry -Ò¯Þ2½öD)ÀîS%@Š2A•EaÊ5ClºTuÄ©-¸_(ô 8 ¢K#)a)aªUBЬ¨[œ+(•À°•²¶¥·°—k†Àöm2·’DN½"tdé¡+IrŽ¦ËþstµBÅlåË°”Z–=Vz@àÄÛcñ f\EV­d ¤FVºŒkõPjÇÎ2eÒÍ×“È Nå‘úEÍ:¥H£½ä­€F qO©å°?bDÆ@ñÃéDJ%X{rM—”­\íJ¯Qj©h¤ü!©M—<{[ uèµJ¥]T/Pj$ûVen#u íbôp‘Èer³Å•¦—ÓŽp£ÂGå ‰^ñ´oTmQÊ·Úh$€¦×Il—Z[¹CƒâA7¨ÉRÔëwÉ´Ô뺤,ØÙ(¥d4Jh߈|]­H&®Êh©†£”╳õ:¥@ŒFªm+5êuº®|KêW¯Ù´„\Q£Ú’#oñÄç;)ñK•½RíD#Uÿáˆrùfd¤õ]âYäÍ -$à[¯—öÜJ½Ê…: QÑ]FÖ ¤([œ5¸øßv2{кfð3‚fÅÅhY”Þ²_×/#õ“lE€éÀR—u©ÑN§Tç”ó‚àYm,UoyK3Ы=Áy§/>Œà;KM ƒRNG#‘mùi Š›YvtC'[hÚ(åìl”;i;¦Ü¨V€¦¬:q (ò¤&B£‰Òá}¡Ëäf‹MÐÈhT´$ÑOpÜ =y/­ñ@O«ü«#×¥';‰¡E‰¡Xª¹Ùt‰€µO$vCâP*ËÂ숃femḞÔHR¦å@ÿw`)a°UªKÀšUîe«8”ä-fpJ¢Z²=U‚zµrí¿‡µ¡Éžà-‹ÄÍ*Åm•øñ;P‚:ØéA: %$º\kÓ¸“Š00þ -–ÕÛu‘ -ƒ.'5x øl¢\ô]f…Ä"-µ² ¶Ê&;­T:PB-¡_¤E£ì\Ķ|ÛbˆHÒœBhñ%ÉÕTñ™…d¤5j[Kq72.ZR!¯«×‰„ƒá6å8K ->lc±SÛ’k J¢ñä@£%g‚T÷ÆXüd­ ›é-⦔ÍÓY&VÝø,JÅ›$Û¨Ñg Ú•Ø-¨æ§UÒdÈ×5]’mt’)†Ó•ÂÅD_ Ôâ4Rª -ÔJu)-E1`G5¡•r2e|ŒíÉ>Y½¢?-Y¨:‚Å Q«ì+Gl4ªõJ JÚÒZY»ØÝ(5Zvû“²‚H¯8÷Q2YÅí®“Ã×øû:K!zÉç‚Í©ðd«l7„fƒ¥X›D' †®Ub52Ä3t)ÉŸŠt`Ù©·S:°WjÎèd‹í”zb6ʦOC× ëµò´]jW©I!p½RƒE«œfÐw)eƒ+ÝÊ×Ú’š¤èT×*Uô”tù(-½¥ÞôŸÊ“³=ÚZÏ—á«E+«ÁRúÿE³ÂŠº6êíþ¢Ñ`á%–mÛ$ñâ•uñ©mäOòf´U¤åõº\kÙ}Û¥×{¹„=&ŒDòð¦=¹Y§lZV“"²%/G'ÝCæÇöÊílI¼si²Ý—”e…kI‘;y£ôªµ4’(#\kC„KÙ*k/ïWþó+Èï†á-x ™7ƒx^I(o Ù° ŠÃWVŸ÷§H8“^‚­Â²pÍ7{…[Iˆ#[çði÷„:2n$¨S«ì«—:°ûóÍ”4_­ÂSÿm `p¦›ÚȲý'ÊéOš•ä^½\Q7*ÞZ9]ÕÈÜQ›å¸ÙeoF²‡HFЦ|v -F#!Lz*«Sxà`­vZK³ÎN¹™B™Ô–WøwÚMN…û·}ÿ÷™}ö$O6×@q;²?ÂNo§ìÔøËMÞ\a©ÖB² -þØH6 þÅöOÙm)'‹i¥¿Òü§*ÆЪl.ÇðrÙ_÷±L9çíߊNsÞàXM¹YöËü¡è®Ô.7*…Ðí»Ôœ G-Â¥jò}¥ò‹¼u㯪^“ßþ­`9ùM£À {¥'j¶”ï"õ¬ QÙÐnOJüE!é†ààQv>+š•b_¤¬=n$ ÎÞRrᯋ#X*ÿÙ˹åøûJ®JÛ®üñ¹–YÎCúc-7rÙh Í’ÐjÁÓJŠ&„Õ”‚Â:­V2H:ª^ N!µe‡ŒåÌ€¿ª&'ù§:Qþr³NyC²! ŠRÙäig£²ìå'<¾®#å€ìä´¸•Rõ§žüåý»g#¯2½”Uk9Ò‰TG$Þ†?\KÅøÏuIðCoIéÔËÎÕÿûØ›¿|¬eÊ! –º«vP¡„j£i@ì*l1QJ“mªÿi ZR˜‰˜p{møIª€üùþäÈ#`¯J¹tØŸA©ÕH!/Ku[’Ej9Ý7ÁÖz š.Õ1,çüùv–Ãrÿízå9”꼘Ü)Ï¡TBPË?m5JÙK™¿zrC­R/תþÿ¹¡N!ÍÊN¥X•rØÜŸzµ-¥U[x¬­åh)­Â pi-å”H © …õ$é‚(sÙZþ¹[r?½TIúƒ–œ¡j‹Ýä=ôÊ yz…t’²ðø`y¥8¢’PôŸØR3ŠœŸ`Ûµ0=9OèÏFô¶´$Ãi£<±A :Éf›ÔÀEÎíßî ¹x‹â¹’ BjŵmckÙþooÙø&'1A©;eg$)ùó§³X­%½LMÎ؃Íú[%x)…¾ðµŸ±†”ѱ$máB(ÄR蔄K’™J"–LÉþüDoÛJ‹Ü^ñ7ú“f[%x(f™ÜloI”S6ñµJüB~à?÷Knh/¥–J6O«œ% Uæ Êê³·ìZÔ+Zì”SDþÜ Ñžê.‰Ë먕 -ÙranR­Ï­Ç€~Ù_w²L9BÏ‚8qUIrp˜e뛲Ç -Û[ÂÕ¶ä\<[’…Soþ²[rdŸN)y-{ÿýåf{ Éõ#ÑhH‚T¨ZYïhpõì?E¦4º®±`ù8BÜ«²“”FþÓCÛô]â0xw‘¿Ül9úo #™!–³Hq -ñ²¿îÚ4Ð`=uš5Cÿá·Ésë]¬]=78t·£{HˆGðZ©q®‡—ÏÚ?4O5­]ëîï±Ú·Z£æip¨{NN ·µžˆ~·µ¶Áÿ˜HôŸ%è_Ônmkc½Üšm¬WלÎ@XÔz!Fc/«“ÔV¸eÙ[`«©Ôhùâ_6âï®…Ç -„÷Ѹ.ôñóøDú¿š³û:ôVÑ@ØZ3‹Κïæ³ÊcÞrv©5³b o [ºð;ÌÀ¿ÍÀÛ3ÈÍDkÈVDö½†÷ÿÿÅðù ò½µÝ×VGJm¿0¾?[:ZLjž(Ä–ááúˆ¢‘‚wDÎs]/óÚ´Ab0ºhî0.${ן ÊD{Dõr¡Vw3Ræn&~•³rMÞ#´7½:¨'ãÔ‹ó íÍz÷2™W[9›X•Éݯ;\4”[—5„ò íEy÷¤=CzRhlÙ5±}…À´AFWO+£[7ne7fuX/f zÿ€¨~Bláh!gë41qýDÆ7²“àÓ^Ú“÷O€ß=sód1º| çúAãÀÃxgåCs†Šñø”Íï±ñuïСƒ˜BÔ^2œ,A{%öq×vwvèÎ& `£+FqqÕc…„Šw¸ÈÜá|xúP<&ñ%c¸Äêw˜Ü!ÌêÐ^BPú`>0qÌ度;źY9òžVŽ¢—•ãdQP98yvstvï¶ÔQPQº.hÞM¬Ÿ•£ Éé2^µ|‰ Zú¥QåÈz[¹ðþÝè}¨UhLÑÿ—9²ªó—©]Vv3ù$ö¡}Òû™|2ûWFôtæü¬ŒžÝÜUF³owgó«%+\T.‚¯³®p¨Óª îK–sª¹‹TËÑ8ºGöâÖ fCŠ†2þiŒL€•ïgådé¾]·xžƒÊÑä†ä0¦<‡‰ó±rq ëA‹=hψ^‚obÑ?aš1Ä5¾dn"S2Š]›<ÀˆÞ=ƒQ\HÖ}ÊkmÆ#¨§6ˆ@²–1TŒ*-æìøˆ/Ø;S ¯ÅùÇ÷gâúœ É ï -‰ïòY­“…Œö)Bdù(~]ÆÎ/uȦØ2‘Oßü>[ù½ž«½ø“·ýC6¾b š?ŒJà"¢¹@òÀxÇ¡g@òœÔü.Ÿ¾a’˜X3 K®íÇøGöå£Ò†‰IÙÐÌ!¬ox>¾×Úè~|PÂ.±n¼˜Šú@z•ÏØ0 ýü€OÝð_>–‹.ɦ¤ƒR0~ñý˜4 µãù¤ú |Léh.®d4š<Ö7‹Æô*ž3 ž•2£¹yDï :ä“ó -ëÍxù÷äâêßa‚Ó‚\-Yè Z²ÌYE»ô02H¿¤TË—™UF}é$jUpO£èiú­í¬WDoÐÍFwß£›÷¥F•³ÚŠöˆîEû$ôeBrýfdË‘ŒQn=YÿÔ°¶¸ðÒ|Dõ(. =sÞ`Þ/ m–?œóMêG{Dô₲ó!ÃŒ‚jñüE*“àgÅd¼’ú™Ü‚z˜¤Çy+°½`§A‚ŽÖDõCö°œ:t#çÓ—ñíËGä2›ß²vMý²Å­Aò‰æÙ›q|BÍ8!©fÒÃX¯˜>¬oD!¼x_3Îœ¾yŠ¾} -Ì›S5VÂM…Œ |0A@:BÈÚ8YÈÛó1è&!kûTÐEbxÖ0¤ û1ž=AÇÂ:lÁù¬ëÍ' ¢sGòër‡r‘%#…ˆòQ ÷À6ƒ>Akb¬Õ6…ÏØ2YLA²_óQˆ×’6˜ -ÏòU6Š ËïkIHÛ8‰ÏÛû1—¿{:[vž.ÿʆO]ú(x~&@ßLáÁ\rã&(iÈŸ€ä.³í,‹Y­°E‡g‰í“Åئ æäõï¡{†µD{…÷âÓ±…Ã9ôa×ef×¥Â6=²b4¬z-û€è~Œ_l_¬S‘®e“˜ÜÑüyw7¹úc]‡Ÿæ}‡ó í ãA{GövaÌÝœi÷nŒ² ¾ñý($£´kHveZïQ}8ŸÈ>Ò‹&ÁÇ -ð ’ÿþ,šoÖƒwHof º?ú`¬cFò˜1€Gx‹«Äç eQ¿&„L=éµ™ø¤Ùì=ò{§òáe#éU!=Y¤Ka½Àšt\‡ÆËohõ(ɇ³€úf<¬;‚ždÜ‚zR¢GwXOð.`—…¬ “A_ -~ -þ ý…ðœáÒ{—Âtd²åñ&ÖŒÇòŒdþsz_^1 -°˜4먨ê±|DѰ߀í`Ř²1BÂq‘%£„ÈìábhÚ!,g8Ø` ÐQØþ#™Aº ëXXCq•cÁ–Â<ð‘™ÃÄ”æ÷„”&kaÐß°^a]rá¹Ãô¬°n„4¤«Ò׿'$׿ Ï cËGWŽ9“6Mâ2wLá“jÆÃ3‚lÀ8"1•ÍÙ5•ÍÛ7)ùj“»w*[7–­B˜¸vü ×ñY[&³Ië'pëò‡‚.Y’[&²H¶éòoì„ÌmSø¤ËÑX ùºŸñEú'8u [>šÍØò>—Ùþ]9°è=.0y –5Ë@0’Ù°üa 7q[HÊ@“o\zÒ}/å’ÑCútƒ]4ŠHHEú^³®7Œ |@— 1j^‹ì²¯§m™Äf"ûŽôí××äÑËä„9ŸÒ4‘-<ø1ß0ž Fóçßæž ÎÄdbbJGrhiíï³9ƒ@§Ò¾È¾¢{€ìr‰0¶e£YЛˆ€Ü²h|hŸÄ¾.nÝžá=AW‚­àÐúÇò™±ñ}'Ƴ›3²åú‰¸ -üîÄú[9#þeÐÇaÌ•Á=±=û‘up:SvVCןÿ„ËÚ3•]—5˜…u^:‚ Gøÿ^4œËÜ>™)8ü›ÚþW3†‰«ÍD—Žd"ËF0 ïÀ‡ŠkKä 4®ïéÈx#<êgEù¦õcÃK†3A™MÞ‘½œÍV€Iñ‡BœÌŒô·gtoÀº‚a|Hñp,ÈV€}ßÀú24}¿6¡?]8ÂÔpù¦©sæß>Q}±MDókj¸þ‰©¡óSªøÈÇ|RÓO¬ÒZ¬A‡®FcÙ°âï‚®Avk0È'àD<ÿèýží ˜pƒ_;^@óÀÇHvÙÙÐÃs Y†ç`âJG‡ånÁva°ÿœâ k¢ûa,‚l'+·#Œ‚t²‹hý‚L‚bÑsïÀz5ú™KoyOE²‹þ†Ÿ-£u’©þ¦ÁØtóSª`ׇ`¿ÏÐ^°Þá>°v¸ˆ’\Ú¦÷$kØι‡ô W‡ö„uã÷¤ƒÓÑ\!Îìê×8#ƒì…æˆFÜ_Ò¡ucËš\ƒ0׃µëÆÌú”E6–õAœÙ$z Z‘½(d™„÷|²ëò‡Ðh>Ñ8ó¡¹Ãˆ«PnÝÀ—¸žvÞîgåÌz!â‹l:âbAèþà'|óŒtØd„…ûò9{§ÃúÛÌF 9 ÎEs˜1t?àCÚ}M“g`ãê Ø¬Ëlò‰ìeôéé( µ€x¹ „°ihðMÐkSûSn¡=¯ók»;Ы­€Ÿ‚q3ºvg½ÑÚD÷žÎúÄöá<#{ƒßáºÁ|⸩hŽ§elL7ÜüÜÔxç3¼¾@Å15_ûŒi{¹Ä¸õ \˜ô6t}õ;X£ÊG=‚{8±B7½ ]ñš->idž 3­î >. ®?¬.(~èṀãKÁ‡‚¸t5â•ÌáÆ"úþ¢³†ŸáÂR3þ ý`Ma®€¸6ÂH½h„M¹€„b8–Qù#^Dý†xOTÂée#±®E6 Ù·€Ü#lñ´æ>`‹Îâ·†5È¢g}TzBÍ´>ù’Ýz߉Úto—³{c˜1ð5ãƒ0五Lxá0<öHö@®wî{M­‰îÜ‚öC׃®¿PPÖ Ê;º7Æ!€ BK‡±ÁyC0.ˆkDX¼Õ0—˜éh¸áj6c’±LHú`zmR?Xk ›XÇ¢µ ߇ŸÈ.,^¸Tónt‹ì üq!+iý„¡ÿû#¼·ÒÊÈzZ±kûÞf‘Nÿ‡‘÷¶Â˜é.¡n—²a"Y5 -­¥Þ´G\oÚ;® ñj'Äi]\UNܪn&qzÔºñF²ëÓ É[OàëÎ"’=>¸»‰GkdÀ3ª¬C£Ûºx! ~KЈïõÀöÛù´BHÞ0ðFÞlõ»€Ñ°®Jiz—-?¡aËkãcΈ8.]õ=½íþrf÷+Óο/£Ê.©Ùø²1°ŽÇ9¸€/ÂËŠMªoj¹þ7ºèðÇLHæ`°EÔjto„Ÿ0nyLj±Áƒì-øžÌ‰õ‹‚þf½1_ï-!<°.m0ÆýH¦@‚-Ç:ÙXÐe<æëÄĆwùœÓøœmS1ÿM¨Ÿ ¤4L >C!ºp”…ø ZB\ÉXŒ—³6¼O5]þœª;§gÃs‡‚?dY̨}ÏÙü!]qFCm¼?j}4׸éɪö=[|ÂéÌIÜšŒ`k)ŸèÞ&ŸØÞ|TÕhÀ«tõ:¶ä{5[3ð!íÓ‡OÙdM×\0P-wÿx–KÞð.ß4Ž‰k”0Bö¡éLÕEÛpë ®éÆ<®áò4öjÐ Ø®$Õ^ÊTµGØj¶K uïp©­1>F>÷ÀGláÑ™LåYí’Fø2@.–/5J|ɬ)àåâýðwðOÞàs"þÄþ‚µ‰äü<àO`³qˆ·Ð`ý£úî5­DúsUt/àt\ Âv‘•£ØÐ2Œ­¹Àü! —ï€Oظ2´È$ã›Ò *‚ÖGwf•ëß— ίŽï²I¹ú÷*®KǾ:1¶üðíbglé!g×t>{ÇTx ?Âpa©Cëññuãø”ÁB`Ü.­ñ]SýYiû›%Ô®W7¿ü‚Ž- òç(¬±¿,åÓpŒŒ=^çHO`|ü·b<öéƒ){Û4ìOÂþŸÊwÄÔ!à—a|Bzqàë _ -ˆ¯së’‚o\ E6ðctÈè >áW¤'1‡B|ËÚ‰øãÌ9€§`¾ПGºQLÝô>[¼&¼'Ÿ±~‡~§š.}ÆæíšÊD`# Ï -þU&<}ŒÆé%6wï46µe"æOè»lÑ¡™LJ›5Ýò`A˜/°lÞÁèæG_0•WõLJë»`¹˜ú±LùaSËíÙƺ³Z6cã{LXá0ÊÙ¼P„sÒ7XÃ:àë®Ì¡n|F5Ýøœ­8¥ƒ÷œœ‹O(ÕìžÉ”œÉìùˆK^ÿ.`{É-æpÇfÂx«.¨-Ÿ¯æ×Ä÷£Wô¾Î_;‰pøð;ÁºÆX*²t$^hnk X˜ ÊÌG×J|=,gö1" ö‚ «@xt÷‡tñ±™\Îé\xÕ(Æ3¢7ü‹®kÊ] >½Ñ06BXõ(Ý˸2 øX9$ïH߂̈8<¬Þ+º/ĄĬ2_íþì+DºŒÍh{û»y…öæClràÇCœ˜M(CUµ¡·?\Æï¿c¦Úž/b“ÆÃZ1z&õf=úÐZ×8·®hØ-1ñèè¢ÑÀÏ!6% 9²‡‰qåï_SZÞ×å þ¯ä†‰À•±¿<(e…x oÁ7(óõÀ±àÙÀ·€Æd—Õ>qçi˜7Ç£ùƒ¿!ÞŒ} Àù@×ä혜žË?ð1èDªö¬Ž)þv“Ød:u…øãÕ|Ÿà|Âz…õvMišDUŸÖšê/˜œ½ÒƒÀ¦ïš_‹¸wòÖ÷¸Ä­Ö`§1—D6]ÈÜü]wJO!^‰l6ãþç dSÂz"]:Q¨»:—o¼5ÏÜ7 0è#ZóHoõÃëÙ5¦ Ù®šï´LåI-ø½0¾® sFUÒ˜6<œmÜôf®sëÃÏ_ÌcFc -||Fü´æý£ûþá¾åJÙð©MÖBróDăßç’«Æe×ÆôÃþšÔ¶÷éòöLÁWÓeßÙ1?¼ q%6iÓ»lá×3éÆ[C|]à Æþ}>@áëlñ¾™|ÞÞÀÿ8Ö*` ‰¯ÃzI@|=m(Ž¯¡y9qMÚ8IH¬ùº;âëȾGÆ~猶0_†¶¸A\î–©\æ¦÷±Ÿ ühî¹–+ øö›ŽÜ®³ùÉbcã·c -ëƒÆ|5ÂâžçðÝ)!¸;àÌý‘ƒµ Ï $c¾˜ -ø#Øa><óuìOBòÀžÁçnŸ&$Wç1_/Ä|ËÚ2žóuð!Ÿž:éa¦äÐ,ºê„š.>2|ëà§Ä\ áRÌÕjÆALŒK®Ïäíĺj¼ü »¾cÕÜ9a’ ÷€çÁ:Å>[¿Äþ&³‡ð.bŒI5ãMõgt.[ÞÌc7½ëdB\ÙÁ¤¢Ýƒz`|ï—1Ez8¦x4Zãè}£ûÃÚø—¸á]ÀĔ走žbBËD¾þÆ®©c¼ã—Òl(ã‰îš7 q¶‰\áÞA¯2-×æRuˆcìÇ>Wàg|öÖ)¦ÆkŸ™Ú^Î7nùy±ö²VLc–¶}Š9¥ÑZ L² ؆/>`ÃTÑ1M—g3u?| -:¿'²©lùa{ĉfðéMÖ  ¸æ›ó…¶ÛN06tÕ·j,Ÿ9;>„ ›½ }¶Oa -¾™Á&µ[Séœ >jò±ý*$o°†y›~ Ú7¶ã—ÜßäÕb ¬â>hp¼»à°-~6ð?„ Žt—€t;èEðσü±¥Gl±ß(4àQ¶hßÇìúÎ…ÆæÓz6«ÉšO­{—ÏG<"wëT3Ò½˜3ýýSùR9‚E\°¹Ó -3Æ@`g`=‚m`÷Ö";d ëÉ°‚á8NÜÞ7¯'ìCO¬Ïflÿ€ÍÛ?]Ò_èþh1fŒ)ҧ øæЈ&À?A§ ûkëRÐ`ÿ# ^nFØüFð7àçøzK¬ŸÀT±c ¿™…ýþ ý󂲆˜ýÐð^Û󶈌a\Fù¦ýö2l?"F-sUŽŽ‚ -ËÄsÐ{0þ1}§ð~}Áæ5§7ïf^W<‚Fü€óŒí1V¾økµ˜¶ýCa6 -|¤ˆÏ3µmĪøü­Óéšózºá§LéÁY|lí;p­€phî¡é¦šk:ªî².<ô“Ö4üU8Ƹáuˆ™ŽÓÑúF:|³ ?á]‘½Ã17 -éw®õÎbjý£9 ⶀw‰Ã@:…õöï ¹à'ÎÏdÄþøña£GDOÏnKÐà󴢂ÓR>1½?œà‚Ö.Ö<…l;ø“ gp”9¥ÞÚŒì(p"ð1 ö^ºÌ!ÀpŒü—S1uW>ãóŽÌ¸]áSSkçSý%ƒË†ŽÏ§ïÒáí‹«—8¨Œˆ«»PnÝØU=ÅÔkÐ »Zá ªœ–ñX>á^b0ZÏà—ô þˆƒlb^˜6ñë >,ø=Ò7àGá"‹Àw3‰Ïß‚0jû‡BâO+÷—|]bfë1cý—Åò ñ%Àh ièY"Î@ò‹äR È„ýg`[ ÷}ÄT×`¿}°”[Á‚žÊØ:l=ø7ÓùÃL«{@nègˆ«òÁH>‘œ!=6ßp}1S3|)+Œ«»Í!ݱ̀2¦`$à{14i0ÌX:Î-¹þ=·¸Â± Ë„€ø ²q”ÛÚ 7æHļ¢û0Á½°!©y"pGðíÂ:ÿ;Æ¿i;>Àr3ôW|ÔÖÔòô ºåñºô[[ìïÍLmz2Ö)š4âO°~¬‡ÊFƒ=Ûʬ ë9<À©ÖŒô1p =µáú\Nûð?rÀ…(o+GÕŠeŽ*Äû»Ïøðaà~TPÞ Ð‹N¼Õ—‹hÕüO–ªæ~þ¥jérF¾VÄâjF1¹HßF ‡ôæóWd3Á_èÂy[þ@àåˆGð‰uãÁf&»œ—OBü SzÄÆÔzožqçËÅhMi(Á¿û—ŸÍS9,vPÑŒk7ø¸Ð¬ŠZ…Æña>ˆÝ.[B«–-¥T¬wlsDé(Ð×Ø?éÑæÇ5V­í‰zö„±âýýA: ˆíOy…a=„±EÙW¾öÛÏ°^Ì ¾N„‘Ù²£vLÕ -àL6°'²ƒH7ÐU_ÙaŸ}\ÑGM­·†8*èa¶ì -»ùöâÑ ·!†bŽDº<«å~Õ/ÅÍw\„MèÖÇ MõWõtjÛDX'lÎö)¦º“û}£¦k/˜ú›þ ùKW¸ªVðVLbë¦øëYà‡³·LÓëßCp¬!srâ£ù£ךã -Æ?t LÌx¡¹ð íE»ùt§x_+Œw!&Œì<]{ñS¶ú¬E¸û r·O=‚9gtåh!}ׇLËÃùì¦gK¨õ¯ç€_šOÙö>×0Žn{¼Zç ÌyC;ùaèqK¤/†à¼ˆ¨Ò‘ÀËÁ\ø/[zÐðøÐψ>b@Âù‘ý öÉ¥ošD—Ÿ×°ñ›Æ›ü3úÓá¥Ã¨â! ŸÎ‚Õ¢ª9ó©/rR9°¾8 -¸“n¾>›jD<4¢b$ø°\(dk¼‚za>QxÈr¾@FiïÐ^,âÒðîBËÅELÙ÷Ù6ì{*ãË‘X=x¥©ùÚçìƇ‹é¶'‹˜âƒÌ˜¸•VÜêÀž‚d?Á+°·èÒGŒ«Ÿ ïœÉ•~e'„d ßš ZOäž ¬)ÆÕŒ! -p£WDoðµ9›D³Ò¯‡^Æ:*oß4¦ä[&wÛÀt›Cð>’ÈUak.~Æ6\ý8]5 -xÄë÷àõXòöLpUßèùæ_2í–Š{¯»‹;:9ºàèGtTÃh:´d˜)8oÄ \Vöpbܺ9!9 €ÖË$ÐkZßà‹‚x‘ÃJ×n\(æìŸiNß4ã™u¹C!_ƒn@¶¼ñÒ<®ø¤=Îg€˜Ò¯$ÖË%Öcw€ù1Õ|¯aÊ¿²²vN¢ª±|sdN«yOÌÜ0YÌ?0 r:$LÓ<ûÞÁ–5ûØãªÞ>Ž}¡9CÑšŒc<‘YÃÀ¾±îÚ¾æÜçgGö;ò;vO36NŸ=Û_ŠCç •1”-Ü:Ùvc9÷͵Õæ#×°9û§-]´Låè,¨p.Ûú óÅÖ+¸M×—˜šNêA§0›;Ñgµ\rëD.½øÜc+ÆFÿ?ŽÇ¢9EœjÄEM5ˆ›".l¿C™6=˜cl¹ð ]´}*“¶a¢ŸØ8ZW4„Ž¨Áä|=Ëüj:ZëÃ1Êß<|^BjýD°™S€¶ê°–©ú^6 üyb(âÉ-ÖÀ…€§r%‡máïlóÕ9ìú 0?ié˜1¦äk&k÷ªìàÇTÝ “sd:ß2ô·©ú¬šÙðdäý¬p¦U`+¥<ƒÆ‰<ä5Àý Ìäk/Á¯¿»ˆ.9e 㲃cB¾ˆSC,8:øÒÀ"ŽÄï¼A¹ù6@<|mµóÖçs`-ÐEÇg2‘M£™u©k_ÂqàuC°.Nlœ`j8©§×ß›ò¾áÁ%ó—©Œ¢Ê™GöÙyam\3Ø„Ó™5¡½A¿‹Áù8¯˜O-¶ ÇŽ · r~]äò÷ÄçüHLj}bB€Óp£„ø -šKÈøÀìcPê`ìë÷Žïˇ–#½í¾øˆ=è?!ÍShúÀµ˜¿æî2d—ÊÅ~uð›˜nÉ ùàâÆ'_äû¸Fv J,Ääâ³7OA6Út-Ì­©­c¾ùзïOG1{ï!Nµ|‰Q¶ ó5ðí¤µ¾Ï웎8½=½áÁfãõtóÙlù÷Z>{Ï4>½í}ÐÇ8æ]6|tL@Ú@ƒÍÛ>•ª=­£›ïΦÅq¡´FħÌ>a¬» ã’·XSþ©ýL~™ý‘ŒŒgÒwOæ -ö~ „Mm@\¥ú]ð³òEhƒ€|¼â]3èêo4`³ÅôöÉ€ýˆsÚœÍS˜†ó³…Æ« ù¦«óé7qü -ü+ CM-¿à¶v8Ñ›/„ûƒ/Ö -Ó|k¿ë.y˜xÿÉ"ÂmsÛ¦ï†Ü]È;dKÚ2{§³éÛ߇.¬h8[1ËÁ±™\\ó8 ÐÀ͘ÖÎElÞ¶¹„ÆñLÚöIL2Z—‘•#¨¸²‘àk\:ºr¤‹GhOˆ™qÈ–Žâ7Þ]Ê!¬¾'çeù°€û@>Ydç(÷µˆö&»aðǃL†J¹jØS>†-:4û)¾àÒwNæâˤœà)€ÙÏÀ9ºË˜`›¿k΂xÄQV†÷„8Ä÷ 6> -ÈUR›­q^9â8ç6g'ŒÓTð)Ñþ!½qŽ ²‡Àk@o`œ>quãD„_ÁW+&WLr·M‡ü^¦ò˜cfÕ×–p»îRÆÖ‡³qŽÂÚÔc9*ØïXåð™pi­ÖX·'†õ•¹ýCœózø,’)Ì= w%±f<^«ó‚Oœ)>8ƒOY/½SòÆ÷˜Ò½3M®ýÍTAGgï˜LGUŽ„œ6¼|›‰þ_sZã?ÉõïB§²ñ=Ü7Ò #¢jNhLUÇm±ß$³ýÈ?2È —·ÿc¦þûOÅœíÓqNäCî"¬¬]“©Ïæs›_.çÒ÷ˆókÐ㲬öɈ3Øòé-Öp¬ƒ¯@kQø¯ ˆ£§ÞS=>8§¿áÀºH?q ã î 1?Æ+Û}˜cÚ/¢Îmo1Zà>THÚ@ Êè춶;ð$gW/+“/ÂÒImÖ\Ñ95à.nÃxl›Râx¼è×ã°µ1ýdA_Iù©Øß‹ï¼|©Y;?Ä|£àÀG÷ëtĦq%ðd¯@>„ƒ¶|Þ0·³†X=ƒÖø1!~ þBÈñ?êæIØ/1S¤{q,|Xð=„3؈̡XF ç€X3Èhñ!d?Äþtˆs¥­G2^5ëë’#j¾t¿-ð8ÉŸX7‘Úpc.Óþh‰iCÇl6sÏ.¬pßžá˜Ï¹Ö»‹9°ia%#ð»Âû n² ú|ÛÒ§¦–«³Á¯‚ß)=[Þ®©l’Ÿ²oÔp [¸ÿcÔ6 8ä‡ÑÕßiéÖ{óMm÷æ/”IÛ4‘MÝh yVàë15\2p'l±_­ È×¾qnþŽ©+65_ý”jøÁºdö{@¾êãc.kΗ…øÄyÁ§ù8tå)5Ýþl±±ñ† )J­òÁ¹’àS„Ø5¬w.k÷T>¶q’7Lˆ.¾"´&ã8P\)Þ7‚cÿ¥Ó\XÈÙ2 Û~Ì?êÇ㿯IíÏú¦ãÎQ÷Iê¹(àG3ºúJ9õ«z€oÉÅ5 ;øŒMkãû™¼¢z™ —& y›Ô2)9iGW_ÒÁZ0¹†ö@¼ÞÊéXÀÅ› ¸ÇËb‹F±ñµc!/…->n+AåX°©ø™!)ÿèLºâ²Žª»‚ýÀÒ­ ×'êšÚü>楀ÿ°ï“Ï>øÄ%À¦øœ-S…܃3ØòSZŒY‡E•Æœ'·ÿ#ŒƒÀ/Œì%Ö IM°%ÇíéÚÓì¿DrÆu*Ÿ¿ïc¤ttËÍ9TãùOéò#v8†ã¿q2ö BLâŽà3No™ˆñ²gTË¥¿Ñ5ßhpÜ* cY2 -âü|ÑÞYàOcJÛSµ?èÀöSµ§´tÍ)S~Àô2U‡ô4¶uÈf·!›½}Æ´ÈfCÎ'—»{¶ý›il½;›ÉÜü\ þCcSçg“¢‹OÙ1Õ_3p›ï­ 7?û’j¼ó–û¢C3éê3Zªñò§ÈfiACñc5ø¼ùô-€þ›#äìûˆ-=4 û@×ߟÇn»ãH5\þ„ÎÞ< òî Cù®í‰íâØÔ†'s©õϾ`J/h`A?B+SzÆžŽ¨ÁÄ4Ža3¶½O¯0—oíXN5^ûûa_pˆyAÊ&kˆ2Þ©ýð^•¤í“„„!‡ âîȶÏÄ9É ƒ  qÌcŽ”íÖlêŽILþi¦*t_À¬Qµ£L¡¥C©¸¦1tòÖ‰¦Š‹ö¦m?}Iïx¹ÂeÛ¯ Œþµ‚úö7Wê䛕ôÁW,½å§¥¦¶ŸæS›_/`w=uŽvx‰ßÞ -¾»»–9üX`ZŸ-bk~øÔ5³i²Ù?¦?ø%pî*3:o7’Iô)çÈ È1o¸åÈox¶|PLæ¾)ˆ?¡“Ú&Ð)[&šÒNr¬»eë¸ïí㱩¿‰¦ÓÿåNÿ›§ñüÿ¬t¹üv}ÿ—XîÙ£,æÞOñô•çAì©—~0væËS„+7â¹o^xÒG^óô±W‚ðÍm?óÑ›þ®{¯¯2·Þpë¯-›n}É´tÌGºçS˜WªøèÇLÓý/„MNn›nQ|Óƒ…|ÞÑ™ÀÝR&‰qEcÝÖe sMÝ<™+ƒØñq `3$oƒ8ØGfûCfÇ+'öÀ–=pc=\ɽåÅàÉo{HQm¯R·?5¶>ù‚ÝþЙÝû€aКö¾v0íùu³ãµçµ»ç!Mï}ê¾Çóû:ñàÕnÇÏ­ã]uc6>\hl¹ù™iãÓ9èóÈSü È%ȽéÉB®õþˆ×ríA6W˜·Ü Ì;®pLë…ÔúŽ/@÷±þkê˜ 1 Ó†G³M›ŸÏuÙúã|zËËÅôŽ§ËM»_/£v¾YNí~ãÀ|õÊÌ{ôÀ3–úæÈ~ûÜ›?ød%¿ ½ãžwð¶Y8Ô¹’=tO ÷>q³Ëþ_—üºÂtägš=ùÆËxúÝLßüÊqï‡ -¯Æº}ûC˜ÛÑÖŠ{®š™-Ë™¶ç‹áÙز“j°Ø® |D#rž‰ìúW Y¤ Vn;çÆ6=˜ç¼Ò¿»ÓªÐ¦ÀüA uýþt¤ùëËüÞg®ÜfaçC^l¿O‰[nÓ܆Î%|Û³ìægKØܯ>b¢«F±Ù{?¤+ÎiÁÿƒî©üÂg¡u^rNÃo|¸„ßðh1½þÙ<—ö_æë_œ7þö¹sû¿f;ž~krîx»Æøü¿#„W7 ¸WO²M7~  :‰d^¾I¥_þ–,þx®hõó}¾vÔ¯oòÏå™ß.r{r©ÂûÁññÖÕt4Žýݯ+¹“Ï|…s·£Í'îó{î‰æÝ7ÜÜ^ò_yâd¤ûásA¾ëfSÛ/ Œ÷?á¶=q1¼áÍî|a¢j¯Àÿ>c®íÞ -¤‡Í›®Ýw]õpÝqÝ |“Þðp ²Ðúx½ûž³p Ã?~ÓW<~Ç߸çŸËL‡1ÒG_™¹Ó‚¸“O}™c/Ìôá—,÷õÃUì÷|™“?yЧ_y1ž1ç_2g~ö¡NýÓƒúî7Wã‰_8êÔOîÌ¥§âƒ‹ÙâÃó9üÕK±Ì×Üé=Ïtû‹MÍ>7µþ4Ö³ï1Íz(;ÜÙ}wX¦ýþRÓæÇó¸¶ûæÃW¼@¾ÍG/­ávßcè]Oèm—s»ÐÂþ»îܱîÔŽ_—˜ö¼YÎî|`¢eÀ6Qß¼à©}Œ¦Ã/ܾNŽþ¾c5sû^(÷¤#ÃõÕù2áÇ;…ÆηÎ7ÞzRw~cn?Žf¾{áAmýi1]ŠlhLõàeÓĺáR°à¯r.½>ÃyÛÿÎã<ÅÓ—Ã=¿:.Öß\ÈþØeçï‹L»~[j<ñ3c:òšviÿÇcÍ ­KãoŸðÍ?.6·Ý3¹ï½æk>}6ÊõâÙä•çO§º¼#|sÃOÜÕÁóí0VÙôbð4Ù9vã«Åj…ØMŒu7tƃw4ûmýâçDá§ó -¸Ÿå²šÃýt'—ýõeóúešðúb‘ïýöŠ€Î U— êÊ.eÖx?ÞYE½þ¯$áeG‘×£ýµnÏÎWÏïˆ;òè›ÃÑx2ßýèÉžøÑÃØöû\Ó†×_ˆÇﺺ”`þêž/÷ís/zÿsô© œ¿évï\¡p±3–û®ÓŸ?wxöj”xáZ¼Ûµï3ùó¡â‰›ü™›ÂÉ»Áì‰ÇžôwOV -Ço#y¼â/~!H8u5éYÌüø<Íxãï~·þ¹ü?º’á|âæ‰TË£Ùô–×KØM¯–€nÛeLúz¢cË-sêGo·‡gŠW=8Yá~óTžÇãcÕ«[Í]ëˆ1žýÝ}Åþ·s\Ò¿žä¼:¦×ò%®*šö´âv¼'¸µjÕão«üìj -½»q£Ï½C®·Ïå€]cwuRü–»Nü4Nßs»u2×üí•uì¶Gq×=ô ûðnŠßƒö$s5 çÓ«r/äUƒ z<Û[éúã÷¥âO§‹W=?T{«Ëdû…¤Ê½+7žO­ -ìl©‘Üz<=Péó`gøêj‘ðª3õ“UÌ×QÎß.£Ú_dl|ú)“vd -½é—/ÝŽÞ<ö1mþïù¦¢ó3˯Í4~ýc¾-×óñ×uÞÔ»¾¸VÆÝ»“"<¹½òÙ•â“kù̹ÇþÆS?¹º|õOGã±góÃ}?æÊýPãß¼™§·’`NV?9RÅ_¿ëtà‹Z¯Û;¶½6¬Ø÷¯Ù+N¾]êÜñO_ÿ{›k×_N©i¸’Z—#»!õFQcHgK£çÓý5â/WJ…Ÿïr?=È统¿úÙ¡Êuw[ê}ïo­rùU™Ëõy;îþçkØ•i¶ÛÜ}÷Dn;ÂÛî™\7ß2ŠÍ–° WþFUÿ ¥¶¼Y$œ¸ïïzåbŠxñJwä¹»±ísåçf™ŠNÏp®8?ËtôG“xóbª÷£c ^O¿®7?¾XÀ?º‘áúãÅò°»é7 šÒn46_K®Í¸Y°^¼w.‹¾õ ÄýÕÉwJZ²ngW—ÝI©Þ|-±¢ýF|ÙŽ+ åè}+³.çVåŸÏ­Ì¿’]q»¦zåóCåìóçiô…WÌׯÝÅcwüÄwÅÓ×Ã…cw|ؽ4»½Ó‰s7×åÇ·Qì¯O²ü;·ÖwnjìÜÒäß¹³Y||=—>÷óç ÿ#oüËÇtõ}μu\~üíB‡ O5Ž›Þ|Âÿp?låósU«Ÿ¯bî=eÎ=YË\x„ð³ÐÚáÈl}´ŒßÃÌýp9ØíÎw¹>v×­~x¤Ò÷ÁþÆÐŽ cîVÔf\Ï«Œ»]T^x5£|ó夊Ý7ãÊOÞŒ,¹x7¼èìµèÒSWbJ/܈,¾z=²ø"úÿå«Q¥.Ä–¼_¾ùbreáÅœÚÀ;j\_}[Ì¿¾‘·êé‘ -Ó­ÿr9òÖ™Úÿÿ\˜Ý¿¹0‡^ðüáÇ+¹-o™–_[žÐ̾ßhîâã0ñEG!÷ä^ûäQšøêf‰ðÓåúõóî燹æWJÝ^\­`î=‰sùú¿\LÇ™øïoø™;Îg‰¯®»½:S‘r£¨9ÿvöúζM«Ÿ|]%¾8›ãñìëï§{꼞ºW^S|=­jﵸŠ³7£JO݉,ùöndÉiôóä­¨’“7£JŽ]‹-?€ÖÝú«ÉU/%W5_N©Ê¸š_cþéû"‡‹o9ÇÒ Ó\¶¼Ãí{Í _­÷=]%œî v»p.ÕíÎÙ÷‡çÊù3wƒŒí¯ç™Z_ÎÚ;ŒÂáâé»aÂ)d“Oü²J¼q3=ÿJÞúÄëëÝàNÞ_cÚþ_K\oëŒ[~YÈyÁswSBï6·øÝßÙäöâ,Ò…'«Ý^ž+~½]¼ú٪莪Ɔ›Iµ57Sïµ5‹¯Î³o%û<ÞSŸÚQÐv;¯¶îfRÅŽkqeðÙs5®ü+4O'.ÅV?—X{ì\BÕž µÓ*}n­äßÜËçvd˜¯_Ëä¾à/\¸ÉŸ¿)\¾ k£â\&’뼪Ü˹թ—Kj“®•Ô6K«É¿œ×H?yžäôÝ[ç»o}½‘~*¼”S_{6£zãÙ´ª¸[•H¾ššJ®d7ù¢g£^ÿ˜¸üÌÛåß½]nìü=úö¿»{}ñµ‚­× ·ÞßÙêùä@5{çr4wùRHêÍ’Í0Oð9q5¦|Çõøò—“Ê÷Þˆ-?{'²týøj¯g{j¨¿?Kv|þÖÇéåÿówþým¸é÷ŸÝ^)Š¿V^›5»¦þBjåæ³i•±W+«ƒn7Wßn©öíl¯0??]´êÙ± -ñùÝbñÑݱó~.wøï«©ÿ»”ûúï«:.Vw´­èloðz´§få‹o*W??Vͽy˜mzþ(æųþ·k…^wU­zy¸ÂéáÛ5N?zÑ®Çx=ÞW›|»|‹ë­“™ß¾]º¢í7ÍŠ¼#WD5 sˆo±¢¹ÃÖéò?yêÅ“ø5O¶Ô–ÜM­­»žRåùtg…Ó?Þ†,òÖuÙó·ÂÒWoÅ¥/ßrK^½e–ÿøv•Ó¯oƒþõ6”ýíZ:ûûµtÓ/¿$,¿÷VXVÙñ¡ãþ·‹¨3ÿZ-œëŒv»~-Ç÷ÞÞÆøë•-•—³7_J©¾[»^|v5Ÿ»}7‰½õ0†¿u5Y|q§(øNkSâµòÆ諵 ;N¥V>“Xy»éì¯kͯ.—­zvªÚó隰Φõy·r×'Ý(kô{ÐVÃü×ÓLúñãxîÙ]¤;/—y>;Xãÿ ½.¬³¶¶ævJmÆ­¼î§Î\êéϱÔãßbœo¾]µâìÛK÷ýhXVR;fiÑÖñ‹Ï½ãüä÷µü߯f]É«Ùp.¥Ù½ŠgS*2.æ—ûw´–{>Þ^μzÆÞ~Í|û“²ã^ìÝÇ >÷Ô§]/lÜ{:©âÄù¸ò§“«vŸIFßM®8z&±bãiÕO}y¶ ìkڣ?$T|.®´é¬ W£Šì/|ƒ>g‘®K»•Wcüç›ÄÅ÷Þ:;ýþ6ÔëÉΪMwjÎvD”¹SqàNLeQGF3ÿû¥þ÷«ù`Ý<]áòêaK¿~ûÅŠ¸¢Á‹¼»Ïw ±úÜEPéç.Q©?ù›Jk˜«²ÓÎQÙ¨?WÍÐ~¡²Ñ:¨f›â»-ˆ=2tAÛÿL[xëíBê—‡‰ KË7žÈªÚñ]Ze٩ܪړ9UN¥U”|Ÿ_‰°CÀݶZîÅóקWʼì©E¸¨¾äbNÃú3µϧTmE¸¡úrz-à<—»ÿ»ÆõÍÉÒW’ªN ývìndéîû‘%»F–±ÿº“½|ëO‡ÈŠ!ó–òªÆ[«&õ¥š¨¥¯®z}¬ÑïÓû¼£šõÎdÕܹ¼j±kµÈœÒý“ÏhÕ”¡ãUãTcÑUcU»R ±zG5¦‡µj|Ÿ)*ëá3U“Æ«UÓ¦ÍViUs³¯Œüò›·Ÿ.¿ö–3>ùïPêÌÛÕÜ™ŸÝÿPÑ~&­æÔ™ø²3çâ˾¿Szô‡øò¦Ë)ÕÕ3j³®æ7¦\-nh:“Q³ ÏiJEÝ™ÿ{ïU²î}Q‰JDDQ 0¢(&r¦»éî»› ŠDI’3Š%GA‚ ÌŽaÔ1ç€bÎŽŽŽifœ°÷žÙû¬§îbfŸ9á=Ï>×õ¼ß\\mÓMwÛµªêµêþÿšk¯¯k -}{¢žzõK*óîm~æãšæ{OR7=|–²Ï¡6ê×w>±Wß/3¼²wû¤oîŸsÐÌ·òªµOߧYÞ‡ÿ1Ïç¨àèÓü`’“4 -M°ž‰Æ´AÖfpF ¤‡ô‘ÒÅ·aø‘92EÖ#Ðk4Ó!- ®ÑpJ=©ïÜöÑÊã‰à)ù$ÄIÒƒÞŸÚ¤xûºœü±PñäM‰òÍ×Q/÷6—Þ©lm»\ÒÐ}¡¨~×¥Âzl¶_,n}š\)¼N¨þô"½ã—w«+b^uÖŠþ*D»]æ¹3Y¢LW›íèŠ&O²FÓíl‘{È*uïâ^3ŸÂí¦îQk5&ŽµFÑ!Š† -¤I~´p»4ðRÿý±&~F·Z¿J?Ò ÏéãŸ1SЬ9 -´ˆ.WwÛ/Lãß])¬?]Qßr²¼¡þBY}Ó…²†¦«¥ -×5ö\,nØv¡¨áàù‚†Sòê\È«;v6¿îž›û¯åÕº’Ûp¹?³výíŠ6îÇWeü˨_¾ÏOyÖÔ|ö)öÁÏSª›žä6y½DK‚V"[ [<ñ÷‡ï6+uÒ -è=|ƒoª†þý€¿ÿçC´^ gÀ Q3Â÷†H[Ý?FMG3æ…#·’;fÒØo|r°ŠP¾{ZýlWËÎóE /äÖn¿\Ø°ójA¶1µ§¯d× Tµ„|{¼ºøæÆV›_^Ê©;p5·¾åZqCô‹ ü/Ï*¸¿>Þóxcë—k6Ö>Í­ÿUˆuk<7zî?d­?·a(ùþÚøÂï¸M¦xÄãßàwµÿÒšÿ|¨‘Öý¹ÝjøúNÞ<§-ANžpß#L}Vp¯¾- üúvUس“uQÏúSïµ´¤Ýol>{DzxLB¶\Z×ýxølQ㙋¹¤ßÀ–»×ðèZNÓ«›YuÊﮕãÏ‹ô{%p’wBŒìg!Ýûºàá(ŠDF¸‡þ_ÐFõßÏü«õû932/B3sÆ|4R}ƒ:ä[üÇ1§Nο§pÿŸÛ08µñ߇¹78ÕÈïðÜPü£‡­ª‰Œ´& Ks7d»(ÍéV÷x(xP?|“òæËʾ+ùuU×64Æ<ÛÞùp£®FˆË ¯Ä±WC¾_{¯¡)êE_=<ßw=¯îŽÙÞK«ŒuÓªÛïç·„½;\ã ¥sjõ+‹‘ÿŸç^ßþÜÖ?žƒöjüþ·Ávk“3£‹Ï.þFfï E´A¿·U‡Ì?³ásÐÔ…ÑÈqå>-=‚üñì‹êÄÛ=Mõ—דqÿ¨«áÞù‚Í×/æ5=À}øðJÞæ'— -¶<¹ž»ùÞÍ즳—óšðøl<~1¿qó•’Fß‚jö2a`BæÖgþ·Çíüïh“Îï}ª†’öÃ?Ø'Û£q“åhŠS -š«lW[²]°òÿAˆŒx´w]Ç©²Æ}§‹›NŸ+h¹|!óÍ ý×r;N^Ìo9r1¯¡óbq}ö­º¸ T5`ÿ^ßt»°>üõ¾M>ý‚ï´Ésþ×í´†ä;kü©OÕ~ÿ›.þ«þ1R‰FhX¡ajf¸ŸL°M²Ä>~,2Ñšˆ†iN@†6ÈHg:iæ…¦.ËCŽ+Îi9_fro/äลûð y·ª›”ßõW}8]ódk=¶1 '°­ìÄnl ÄäÛ°m½x-«þé“Ôº=8çL{Z×èÿ³áÑýrÒ"©aøéô_l%<þÃ_À<5ÀV Úd>t"i82uD& ùdf<™ÌB¦Cf"c½È~×Ìôñë̜ф9áÈ!h§†[Ÿ`+~%DE>ÞW{g[5ÄfO7lƾïù¥Âž§Š·~}5¿ëíÕ¢mßÞÌßúþv^ÏÛ{9OogmîïÏi‡¼Ëå–0˜Æÿûø‡„öAœýe®eŒ4Íñ£a¸Áócÿ©n‰mÉX4BÓ™ ™ŽLtì©þ4r´g‹c?\4‡Û¢6ïR_TñÆÈÿ“Æ¿9—ÓrfCÃ_•¶^=Ÿßrób^Ë­Kù›oÝÈi¹z1·åÒ¥¼–ƒWó.`{zæjN#<ßx½¸Þõ…àæÞàß ð_™¶²Ô›ˆ,ô§"sãØ6OAfFvxþ-DÆ‹‘¹Ébdj²”´m´m0m£DãgÆ¢)>eÈ>ü Æ¯œ ¶.ׄù¢ï„åËô–A zòtQËÝ ùÍq?ÝíÏjxÑŸÓsîëûÙÝ/îät¾x˜¹årÎ%Êj\ï K&Nóþ_ÍÁ¶"ÒàÏŒÕ-±Îð¼2ÁsÎTc,~n,2Ä}8 ßL†Ø úÓqÛæ 3‹ùx|:ã1)B£æ#«ù hœK6š(*GS¸64…î@³ÃN¨/Þô—Ëú…ÙÌÛk©)·l*ºT[»áReÝu{ÝÁm;z=§ñöqýײ_ôg7¿¾•Ýòb «åü¥Ü¦¤»muî—…EÃFýËýöÇ|{žÊTÙkÃm…Ç£)~Þ{lÃÁ¿©Fæ:¶Øâ¾Ó›‰í¦=²4[ŠÆŽ§ÍÜh4iišà‘&ºæ¡±ËÒ‘•ûZ4Æ=ÙJëЬ ­ê Ëžs:.L¿Vð¯/嬾¶¥²ð|]ÍWG×·ß9Y¼åÁ¹âžóçó!gØx³¬eÍ㶖‹8Oxx3“¬C*¾;Sê´_˜ha1ó_öåš$Ö‚{/MìËô¬±í·Aæšã±‹Gä26Mð©ºnß84RÏ™èö߆OEf¦ØþÛø k»d=#›ƒ&,ÉEDuhœO²‹Ü§îPr_oé>a¼ë#Á ÇÁ±ª¯OÇßíÞX}¶²aß©¢ú³ùqÛnœÏ­=q#«îåôêïïg´½š±åæݬöŽ…u¿¾I¶S¤ý_ûl0þ÷ÇØ’ÁXX÷–žk¦¸-ÈÍTc 26“ô•¹•+ƒ¾h‡cæ¨Ð˜Y,¶)"d9Á™rFæã<ù¬`4ÖϹ€J4'ú€Æ¼M {‹eÂçÁAòão1‘{×—])¯m»PÚˆmgóYKã1ÚôôNöæoîeu¼}µå‡™onçv¾Ȇµ¤zïg‚ØÆzÁ¿Ôo²ƒö÷GúSõg4ÎÚ·Ã YŒuŶb2‰íÇHl3ñ<Ãsm´Åbd9j)²2wB£Ç‹‘ÕdŸ…lÝóÑ y'š¹âˆúœ‚›CgW?Õu<&ŒYÜýë(—‹ÂÏׂXþÃÓŒ GKŠÏÕÖn<]Uë|qÏ•‹¹MÇÏ4ÊúŸ|áüââÏ¡Ü×Ï ^ ±7„¥N뮘˜ŒÿÛ5èÛtHtVR—DYà¡G!#ÜOfæ#Kì'ÎŽDS=ÒÐD‡04qŠ»Ya{iil‡oàóæ¢Ñ£à˜RŒÆMgиY -4É9ÍU¢L ²_ñ•ÆœêWú ¾F9¿–¹¾\D…Hñ[!šûö]ñò§{kbìh -ÿæP]ìã-]g×µtœ[ß÷¤wsÈ·×òî×uݼ¹vÓ»ôê¶kE ^ïz¶ßªÿ±mÚ¤=Ä7GæÄ~葘_ÜC öÒTË -Ç £ñ|4GÆZp³F¦†vÈbœ/²Y†ããU§4—T¿±d—0vÙW‚-Ä“ËŽ “où»…cý[“›^-Ì»©¿0õØÐ%n9¦¸Ýœ¾%ß ñü§+%!ï¾Ü˜ø¸­ò9ÈÉ!Ö<¹ëżƓW³ëúodÖ>¹žÕða ·ãÞõœVêÓ“,×Üë&`#À¾ÿ+cSçw_ã"có!–h´Ñ 4~š/²ó\…lét49²MÏ:¢iWvY{Άë:s‹.™z\sföi­y%7uç•?ÕŸWöP^öÕ¡óÓÎq,0p9&عÜ/Û#Ø8U=5sí&{Þ<} -r¿‚Âï‰ÀË> -©)w›ë fvíısîíšf1#AÞþ.ô4T»ŸðôáOrqѾQËø5›ÉKþ‡q©Nú ìústBfcÝ‘¹­MvY…fP%h:UŠf*ªÑÜè]U÷ ÷ -VËNãþ¹-Ìs¹-,û·¨âÑü¨3åÈ>°Amaì.­%yý†KËž™87ÿ8Úí˜0Ç'^¾¿ÁÒ_?&sŸ®ú¾x¯ŠV~ Ol%›®ØÒ“ð7Þ$)Nþ´‚Þ÷›HÞówg®ïßGáNüÂ~« -æSj ’Ž Y¸öŠ®K÷?Æ{Ü\}þo…êÓÏ9‘_ï©QüpµTöÓOiò~Íð}&p^;þ2ÝgËëé¾»{ÿ~A"¾!0Ò!DþèC<÷ø~÷òn~øÛ¯šÂ^ŸiT|÷usÿCšôº ’ßþu%÷Íó¢Â;›Úú×n*¨jríø8n¼õüa\ªCÖ†c<Ïp;Mô& KËùhâÜ4Ç39PEh!…Å´k.¬½j´ì’0ÕãWA*>®V|"úË@FÀ¿ý¸Zþ·»éÒßž¤‰þ&¬ôøY{þ H}T~? -áì÷¯‹ü_ Á^•FûÆÖéxFViû]|d7…pQßo $¹»FÊŽ -Ì«÷yÑ/v·.µ¿%éq÷–è¯÷·‹$þˆ ÖbkNÍVö=¤Ùo•’îïçy¬i×›0eÉÓþ»Ã÷Ûs4ÉN†ãÜuÈ!ö¬Ö‚ _:v -æxì-ðúA ýþ"„ûý*Døü,(]žŽÎ7…¹îowÿ¿ +Òï×Õ÷ÝÌ­k(¨Ï»[U§øx¦ÖP|ðØó8#8xì½n -â'Bˆôµ°Jþá·LåOw«¸O/×ùœDAÑêÎs wu”°‘ê~ìÄÔž­ÈØ:ZWcÄç±avþÍ—?ò!ˆÞñƒ'S¸wœ,c‡¹odåés$ÈÒ`ÂY ú÷±©ã,c ³EæñÈ>¨C}qÕ °n?ù*ùYˆ•ü"$ˆ¾"|a[p_ D7Êïºà/y(„É?ß1â›':%xIê_ØIŠX‰»ÞØËþÁ¼û6Oö&}!Dûõý6_RvØZºùþ<º÷/æËœ¬ùÆlªúÔtªýô|æà½Å‘«JÕÍ+™!/TðÇ¿ á:>º±µl奙ì‘'lÈ“3ª77+Åý;{ý߶KÛÅa£Ñð¡VÈØpΣç¡qS}ÑLÿtä³WÓaÝ-ýÅ{ÿ1Æõ¶°Ï'±×a‰Wñ)s¯Ø.]¿¼Cæ~göÉ­Œè—»Û?^ØÈ~x\(÷v­è¢ oh0ǯЯ^£-Î-6ôk²=q—Wš$Ï*7¦ª·O¢÷?“=š§¾ Øÿ›»¬üÌ$zmÏHyX¶¶,,YK•¨Eê^v¾¿µ*èüƒêÔVÚx}–´âØDq÷Çù¢öw³|òŽ˜¹(sÕg9¢IØ7š¶ wAö¡5jÎï™{ì<Ï K=ÏãÛ!l÷»¾äQ¸×Ä3©AW”¿Í,`S︀Š.+Iݾ âîûöâÞoeU§§P]#éäf.½c$“½ËŠÉØ=š‰+7ðrõFî‹—"©öWR RªBÔå!êLA«¥¬ùâlYË…9²Íìéæ/çÊ7Ÿ›GmëÊzÁÑ'¿æɾÊõmãØü-VtïOÕ‰»+ƒúòÃn«ºq-WuþîÙ®îPWAeÖ™Š“×éùîá±þ´ÅÒ°é‹(4ÂÀ™ ÅùÎCmg‰‘sîÅáîO7Ÿo–þô]ýþ/¹Òs°[p–ä´š¬Àç2¹Ø@^sb*]Ôi…í€&W¬š‚þNîÈcž-sG<Mj…ˆv\ÁN›à´Ú‘¼X†¼ðß°Ý` -ÛFS-ýó¨ª#¶Ì†“Ùu'+zžú„í¸£âšú¹Ì# ƒ©Ø;™î~å´Ý[LWŸ˜!ßþ«ªïkFùíÝŠÀ'7ËöüÃY^Ði)këŸKøÈ=¹Vüár=÷ó£ ²_…té_„dìÏâ¼_ ”÷aawjßp'q(òT®Póo{4}üf-óU -sü'NžÔdä¶ÌI¤!ˆÔh”}eK•·õ‘©Ð‚‰ÓÐÒ©s‘ÈÅ+Ã4“sŒ#“rLBWą̊Iw\Zû|ùcƒ©#_K©¾<¥;_/•—v[Qå}6ÌÞWbØW+ßý“‡´ùÅ\yëwôîøI›fÓk;Ì©¸L&£n„¬ëÑ"f÷?ªïƒ‡´c`ž´æÜq÷›ù’#‚‡ä à&êûu¡ßîÌ÷Û%Ø{\=ß "Ÿ_¥çGÁßç¡àçwUð|® œø´àë·ñ„µ|®¶(,A# 4^ÃÇÛ-œ:¹88 ±Ñá)Úlb®>hhGø"ÒÐ$M°lV—%…Ï-]¶uÑ)랤Ê.7gã ôÙ´¦òÎû‹¨-Ï–0%Û¬™ÜV &¯ÑBŽm¡êô­˜Ð§JèSß(å¥ûÇË26ÓŇ&(? WݾUû ¯^Í`ö¿—É:Ÿ.dJvgÖÔË+NÚJ. -Rêã§lˆ‹¼O‹=²Ž¹'ìÔõL9<Ì#i§¾÷!ažô;!öòIo a¢¼f^¾AÈÝɷɹ.\†\.Bò„ v× ß÷HB÷U^ûå4ÙáŸü.þFùßÆ1Õ×B¨è•‚ûÍß{Ÿ0×ãà ~aZË–‰‘§D…DªT irÃpw_ZâàŠ|½TÈÓÉ9M·GžKÜY4|¼º‡§9/vAÞî"$ÓH¦’˜a\Ð5!¤¨w -Ô© -jFC#×ûÂ/äÀ튃/•²Þ\åÙMftѶ1òžïœäõ×fS)M&²ø -}ù¶ßœTçŸ&±Þ2¤Öeífs¨g¦ÖïOlEçwŽ–—°‘t>°—øÕ5`×ËZûg‰·~˜/êýq¡è0Že¯‿«®üõ;ô·EââmâÜFÉÖŸÒGÞÓ-ÏçJ2›Mäk7§TIø5§ù ‘ÃD[´dÖ|ä‡ç•3jÙ@KO•RfZìTí©™tßkoÅ÷ü¡ûtdÛž:n6è=¡Ohùî÷PÛÅ´ ,¢¶=v¦vJRqÚF–ÔbäêŽóûEhñ|Wäìè…ü¥ájPg))ѱŸd‡ltMÑ4}øh´Ç_åm§Øtp&Ôä©ÖµãZn/ *é›ÂÇkJåK‡)?8Y¾ãƒ3µí£ èfRû'Qe_L”×_%í~ã(Ùýó2YJ‹±§(Í< Ù™ŽGf¢E³æ 777äí'Bn^Ø·áøØ'€C>"yyˆ‘—IÂâ5äiµÆ =öj´Ak´þX:L’bÛCE¨~\Ó¹ù\×7ºû©3hvS yzLùÎ ì®'"ÕÞŒ]¶l‡ —Û2 -4±øÝ÷åŠ#TüágJfï·bjÇwîÔÎOžPF·Þ]çϯ¶ Ë·ÙÈ6_µgö½ àO¼—íý«§´òŒ­¼²º¤ë­C@×;GYÉî±’ð,- ¿JÝG®& MÕ¤6JWfj/œí€fŽ™„ÍsAžËü°Ý”!`D‚6¼rÝ.[UÇUEóÍ¥À]:Ð+”ÊQ€˜A„‡™[kµþ ûÂoØ: êH±Ãd×›3…íVòŽ'‹è=}©¾ï<èê¯ìص­æ ¾^<›RaLjò²[F‘ø¦îÒl¢+X~x -Õpj]{bSÒ7Ž.Û7A¶íÕRêÀ·"æÀ µõ“+µî  èeH5ÄÌ -uipº&]¦ÜÛç„B=ÐÔ“§kÎè?Ò+ò‡²Ù[,ٲöTÓýù0¾@óÇc˜ÄŠaôª2C:6W—ŠÉ -Ú‘lÑ^ùæG I rõ—vò-7Bm%öAª#·ÃùÃT§âùsVо 7_ŸËbß/ßøå¨M„ú5æÜ×aÒ¾eÙ沤Úáò‚žÑ²?9Ézß»@¼ƒÇ¦‹lç'Г/OÔ‚5 ±2\è´´ôÏ•vÞw sºG§iº,öE‹§/Dn‹¼ð¸”"‘ŒF2e˜º<ç– ùútB†®ty¢&0(‰ÆKxŠVgÊ•Ÿš> ë˜Rk¦\ž64hyºŽ‚ WW-OÓ Ìï϶ö/­'ÐÆ ³›Íé²ã©îGK™Î{NìÎg>PçÊUì²=Aºû±³b÷=³ÿ‰T¶ó­³tÏ.ô±wŒââã8Å©'+ØݯŲœØu;lØܺ‘TõÁ©ò½½Iíò±7JùßpÞqoŽ,¥ÖˆJ©3¦ºÞ/•÷¼[F7\µ—o88‘Îi·•/Om6qv!‡öÈc‰Ž'°¾d2|},0Bh~o'LLÊÐL%ãÛM¶ö€³ãkOª» ŸR4œKÈÐã‹š­èº3ÙMÇgÐõøÿëýÎ]¶ý›e\^‹%³¶Ú”+Þ7XtF)›Ñh~’Ím¶ š:kq>˜ŠcñÂmcÙœf ¢%™^7‚ŽÏСã õˆÞΆ/mÉ}ò&#jyæК‡÷@>hTÉVdhC4èíqŽLá+¾šA'êPáj"Y‚ñLÅèq9–0_d}ß¹Êö¾÷¤¶¿q¡v|p—w>]­Û7 ´ÝA§€éyá -k =D7jXS½¯<¹ÏýÙÏäì±ç*æËgœlû{gfã—Ó@wü†¼ùÒ\jÿ;jï÷Þò†ks¨ì. XW“§5šÐØfv=]1Õvw!Ôƒž?hGÊ”x®…ÆjPk7™ÀÚ€¼ûźþÊ\jyáPOçAÈÏ—G\Ò†áܺžñ MÉöŒÅçÒŒK­3…s,‹É* Z£) ‹ÓfÝ6à¬ÐRTôN媿šÚ: ÷I4®óºÇ‚ö(ÑÒƾBÖzq®¬ûñb<™Æc³Ù– èöþEüÖ»>ªÞû2®ç¦'³õ¶;³ó[/zß7"îø×Aü»Ï®•Þ¿\õ÷!çO§qG)Àæ2Ù#ñܵgv¿õƒZ\zϯ¾²ì^ wO-uX†ä±åúLç7.ìŽo¼A_ŒÊë%/;A­ç݈vZt¡hE·4ôÙ´jSzeòðÛª/n†ºÍÕ›ºt ©DµœËø e~’I¶}rdŠØ€†=ScÊÚ¯f±ÉÕ&Ê5ˆõØ’=6àg!¯–…Æi‚¶?µzhRkª†Ëâr°Í,Ðcc±­KÙ`̃ÞdzËHùò-ìådÂXÃã 8V°öH–öCƒŒÓÖÑÀ1àR -†ñkËL@—ÚTÐâæËvO½:ìÃœ5²¶­;àͨäJSÐ1­y÷Ã¥À ,Xà©Ö™ qwðé:1__¥AÇ rAcûJùælJ“©X«ÀbÛÈD¨±‘éCÀ‡°Ù-trÙ0IP,aHËÂcÉyæY@ðJu¢³QÐe>b‘„Gtè*-àá<Ōϭ±&øsà°Ò±™:°N&á£ÕqŒ &ÏÐfÖT1Ë3†È¸h¢SÄV~v­9W¶mÎÛ—n"»õ¾Óyˉ«=:›hÞmü˜ÀîSîº'=ynMÄ©¯Ò‚ö mȺ³saŽÊ[ÎÙƒ/çO> å¯<‹‡:=ÈáAÇ• Á¶l¥“R‹ýã–ÑLbå0X«¢3šÍ G𓇩9-öC '/@îËü‘4­Ê”)ß;l è6Ñ ×æ©öÜçU»ž2|åÓ‰vaQÛXz×+_þØó úÄ/\À~Áƒ^wh"ñ;À'ËhI­HÒò“Hpg¡®pÒåÚžÞRv‘pÊpÀfVcŸž£ lN:"Ft1 ¯/ЄËxÄ„Fkò‰xŒ% gñ\Êiµh)6°#ºÌ8†fC´ˆ^UÕá™ÊÊ}v ËÌ*6*^têØ–Ë ùÊ=Ó@ßpo˜£¯L\7xÝÀ× Û¯9²Ý·]!6ÍPÂÍÎm²LÅþGŠÀãwV*ö=`éÖ« `›€Â縩 ™Ê¤×š2±%zòð5ZÀÊà’k cÆÌK ªF­LÓ&,¡¢Nkfu‰¡4Ç'ØÞö/ƒs=âËSªM@ë]YÐkCx3…-£ûD˜®Ð~œ·€V:°ºdØGo‹Y½~“ZkByVÊÒ½¶DO -Ö9ëŽÌä»<”ÛïI[îz°õ'æÀ¸äñ ®™ƒ?gšÏ/`·Üp†|P±ã¹ˆê~¼ŒËjÅç©Ö˜2òöG ©?x€æ„¬ãéBz]ï8>§Í¾›·›žO,b¢‹õ¨•y:Š8u*2M›Šs‘¢å-æpÎãŠ&ÍC íæ#wgÐå U“„ÄhÐ)5ÆÌú=˜äJ#Љ-UÂN)3VfU»Ïxæb‘îá³>¶¯°|h qw\A‹™Ø¦Ä,}yÇMG¢ÿsêmsì{Õö˜cLB¹!žê¾8÷ò ŒAºç™«¼îÌ °“žžräå%FÀ»"}Re -Ü)Âq&s`ˆ:èPJ$ãCÕAË›hDÇgês ©:TÈ-¢)í!×vq±ªý¬ ¿éË™0®™È$-Ðc›®,d;®-Ý/.»ÊŒÄZÍÐׇA•Wc©,Ú28ªòÓAën½¸P¹ó®”Þñ'³ºÔP£I8ç%›Ç+6öNî »ã­ÓûÖ—øúž'ÎD‹©d×6ªD—ÎÑ¥ðü“âþ]i®ò¸Ýöh1_ÓïZqÀQƒ>ƒµ4Ð&b·¿ô$ÌSloجöQlN§%‰×«NÎà6?sâ;_xª:Ÿø©:ø1[n-¢"­Ò”+ì Z† £û@ߘÉë±"1dž#SÙ†kóùŽ—n|÷ U÷]qpÏm©bë_ÐOÄsy†¢ uŒ*·z”¢æð¶óš ßÞïŠÇ¦‰ÉplÔl‡}Ý¢§WfLLºãÑvÇ{_®÷­?¬[À:)èA3%]c‰ßOÄvzÝ®‰°>Ãäô‘Ç­Ó£“k©„ -jEº¶˜ Qs^â†/Z†àšˆZ®,wÐc¶ƒ?1¦¨o»¶{”äº8[¡:ˆÜšz>Ñ‘f wRv¹ºŸ—Œè[ô U‡go›Ä­)7R$l®Èl±`Ûï-SvÞñbk.Û+36™-dð¯ w½:ßgÜïì,ª Çeû^H”G„Mµü-V°³ Ž¢•«±=ŒÕà@S´ÕÚ«Êî£J­1 ÔÀì&+Ѓ_K˜Øß‚.>aᥗñµ—”›ïyµÝñQ´ô;–÷ªÃÀŒ†Qª ‡gã+( ©:„ƒ¿—*mP‹›ÝÔ;…Û{_¢8;xúZš°³°Ï&œ¡Ì–QÀÎ’«Â ;Kù'v–¼ï;WÈáú–È"±Ò ;«˜°Û&)ÇØ„ Oti{Æ€þ3Œ½?ØYʶ‡nü–§ndã9.ecÕåxlkÞ£VÃÚªO+ -:ÇÂwâ–çèp™C€/¯j½çÆw>÷î½Þ£.cÃÕ¿˜áe;'!:ˆ•}Ó¸ºöŠŽ;î «Ëu?÷G¦ýñ2ìóˆîhzÝëÆÓóˆ7p¶°Ÿ†[P|épàÕeVR®ïµåëÎ/à@‹{Cïdà_)·ßCŒ ײ˜L3ÅéñQƒZÜ„?Ô:àHtfù :×ð½«öÛf-ìG#üÞص:ƒëÀé„ñ«ì(e|¦Þ ;«ÖŠÝùÀÖ¹`g"‰x&)C€Ó×Ý€~H›ªû;;k$°³ˆ^é쬈̡Y[Æ(ªOÙôMRbFtøWæënWFë(ÂPÌØ` -KU^½hé«2±-Äýªˆ*Ô#ã»úô|x?ö‡Ãߟ7øþ¬.+Ь†ñÍÆ㱚Zj¤Zß3)¨£ß'dóÉ›ºïüSwžþCw~×éÕèºóÇ`ïÅsø4`h*³6VäõŒ•/OÕ†5 ùʵC¸5UÆàã›%¢BÕÄt°š8j…»løš› Îå3:Gÿ¶¾V[Æ«I¥ -„lj&á°®.FXì)ÅFÊÜf+UJõ ¯ æüúƒS™îÇnÀ%‚õEŽ{€-œs±¯ù¹{v–ŒSªÉ™²³ÆÁøD±(‰Ü¥Ø¿DªƒRE¤U-ÏÐQE¥é("Ö á#Ó‡‚7áSçÔRæ5[ýdãÓuÀßÞ>·\Ó`ü}ú5Œ!nUâXÛ©ÊúãŠûìå{§f[qëXfÛØ{k¥ ÅÍ%W3ø3oƵßq†˜ÖQ¹ºÊ˜=Ø7BØXEÝã`½…°sº­˜5ë‡Qañš„å„}&ñ‘°³¶Üqá»ïzÁZÊì,ð·Š5›L`<ó¦À+vð’!7ûwvVÁ ;+çûÁ1šà_“«FðáÉÚ0wAÃ_¹ºÒb °¹ª‚æ1„g¶¶Þ"0k“¶#ÁwðaéC{ ¿?(e£¼ôÖÿü~Èã•Å[ƃæ6aöA¬Qn¡ªãœkP鮩ŠÌJ3Ð}Þ3aÁ®Û7‰Ùö΃ßñ­td l<ò~ì;a €iùÊ^±~Û¤Aîz¢&°?ëpÍ˟⑘P÷—)„‹TƒÜÖ³E2%&6‡m[}~6Ä’||©A€$ùû1ÈO$ELàr UF¹Y`ÑæqÀA‡¼œ°MÁï”îµ…µ‚-Ý34ìaOä¬Ôîo½¹Ú3s7ì,ì•ä‚Ô%g8Ä; q°O„åÿ?|ö»jM¹‰2¹`xPLš^pT¡0ùðm6<^K‡ý'°»°MæËz&jdí¸fmK¹¶kK€ïªH­3 Ìí²FßyÓâ|¸Î1hs>áúöñLç€3øÂH(ÅqGÞæÑ\*n#p9ºoáx«ß%0»~´*:W/8­Î"(çõvتvÜ’Pl&¡A;¹õ¶ag~û®ITëÂÎb[n8vVv °³4|a.*ãÕ¹ÜmV ÕMØÔëwv– -øÉÀÎ*dg©þÌΊÿƒ•<„ ŠÒ Xœ[*qÜ1/°ãs7[¾7ð7 &˜*¥ÏÏ:K`+÷㼃_Žß¼Rƒ¢ÃÕ€K1áþÀû+O#7`jà¸dPçxû%>—ŠØ\=Â'‹Z3˜ßŠÌf ºáÂ\¶ïµ¯|ËãEÀ?¤‚—kÀþrß W#yBìš¡tH˜†Ÿ·Iiœ“Gd ׈$’†ªók+Maüäoð¹°†Üi*Ç?YM\åSWln_9òõ–#ˆ·Ù¨lk,”vÚË—Ä•Øoó`OÖa›‹s>­š0ÿ`¿µõ‰3¿í©agFzLšŽìŸì¬ÔßÙYkt ;kÏ Eõ {eRép™†øHì×gÖ@¬ ûN€ ã j,€ çu‡°õ‡gñ B¬×U€\àü°5Gg’± l™Lâ‹&ñm7`|rݯ½àz²ûžoÈŽÛ<‡í'péÄþÀOŒT'è¢-cùÕ¥Ãàš>ô Ÿ¥ ×¥˜Ú“³éækó¥ê{ÄÈ·>wv»qϨùàŠwŽçrºFƒ–5CÖœOM!ì,`…ÀzV9Ÿ¥Ý¸M¸½ÀÎ*dgñ _Îc[Î-AÖ[±ÏWå´X‘XÛ}`¶@<Ì4°ç«q»K;ÇñÅ]ãk¾xÇDeÝñy\ÓÙ„k ×\’JŒsp~ ¿²^[®£ß™ï¼íNwö/¡7ãϪ:< xvÀùÞ“rÃѪôf &2V‹¯Ú3 Ö4”»ïÒ‘µž'NTÛ…ù„ï„s,’kl84o½º”o¼èq’X‚èÀdM.$UÖ|”«ò ƒªŒNÓU¥l0å¶Üp êº%!,™µÕf Ð8 -à|]ŽÇ6£BÀ0‚ucàÑÀº‰Áç4SW•Ý2Ø]óñ¹VÀÜà‹·‡²ÎTÐkÃÂx…õËøl]~U®>Äa„ƒÇ’¢bß4¸žAïqùlx*‰÷!S”˜Ì6]u„5Eq÷x`~ƒÿf±¶œWÈG‰],h ÌjRçRsqá¥Wª*­|a0ã¾8”¯Á~çhïŽ ÄEÅÖ,>÷$7n:n'§¿3Ý ¸§Ãš†òÀC¥êÀ@ ÛuÛ… ŠÓ”Kyׯ£&·va|ç â°ØB}²žð'v[wz.ÉÛò[FóÕGgC<.o¿³òwydº¶Çølj«°Ò¥¿³8qÞ‡çñXîvVQÇ8`q0§šOƺ²p›Íàçvñ@rAàPWìœÌU´c[Î8à1¶«;Mb²fUÑ;>ƒ°áJð¸ÇÿÄĶw -ÏQ¶çò]w–)öÞ¡é½/}©Ž~GØc×Ü•åGg}“i¹Ãø½we!G¯¬ŒšÊÖ¡T«4£ ×Ð%û&> -¹Ö Lª¶±ÀYƒµm¨%ã«OÛÃ5+Â<„5L¥G§*‹vMµ*o‹5ì¿áêÏ Æ–d혤*ÿÂÆ=ékXoWAëX»æ»ïù`û=ƒKÌ3à£2tØÐD-EJÍØ×Ãî~!Rí{¨Pl{(f[f¬!îôáYlûõ%\Ûõ¥Êu;'¦90oqÌGbÖŠ}S`!ØE^“%0°Ø†sémO\©–Ëóàú.\o&µŽ1EzÄ'•ï™ -vb)EZ‘‰S²F© -Û­É5!lßé–3ó¨†c3áügòy™"Vø„©‚ý -±y%í°‡e2áA»qüH|ž÷lå[Çç8o™ ñ„”Ãö Ç¡ÌòÌ!ð½aï»ý©0iX·Þ|k1[v.Ìð_ì†vË#Øë©ÜóakÎÙ[¹É bVø\bcp_ÂÞKà=Á¾â÷`í -òþö+Î`Sɵé È{ÁÇÂç3ñZ°'ÖÂSį֒Iƒ™ˆ#©m‚kcLäJMoÓ*¿ ¸ÆœºÑdgþå Eùî)°† -× !fT$&öÑ#ÁwS•ôNVÔšë3Š<þð÷S–œÂ¥ÕŽ€=ll¶®" 8–øóÁ~eµ?©–}|–¬_òµ'çóˆðƒ€µl¸Œ¦‘g“ëÈÙ¸A¼¼êývn50Û‹·Úà…ð9àÚ9ØrÂâi¿æì"xÉ÷ WÅãb `@ÐO–Ò›ï,›§(DZp•À×n<`Gw=vVî¼#N \«'¬¯ä2c²÷ÖaPÙ[®ǵÀ7†Ç`Ÿ€»Üug‰¼ýúºþÌrM'û;xŽ ò[-Éu@X€½Ã™ ƒ1rëØG@µ_&±_ºc–¦%W­Ö€\¸UsŽÍØ„ýL½9ãOö[•íœ¨Ì¯·$×ÊêÌf;®-åêÏ:Àuz&,K[Ì­T£V`_V´Õšj½ìŸO®×dáñÛn sŒ+7í±¥:®:R-çSMçí¡•ÉÔ>&›¦£X·u"áÃÁ’üãù•Ø‡Ææë)ÓÍU¨–6ŽG¹Š­Ø†îcç\úpí ì ð¦ ézìêRCEJ… _ÐmTûh²Î‡s&’oà÷Á9¾´ØWŠd‚8rÈwá¼Cœ×[9u£)öc¦ÄÏïÛ.½Î ˜Î„' û³[ ³[ ±pnavm™ a5üålÂy…5‡Âç6´GÀ>Å6Žp®€ ëìxŽ)Rð¹+Àã ç'ÁÄ9Ð\¦îälèGˆ ™Ö“óù*ü¸„'üPàÃ:¬“§–‘k¥8&>›b먣cRq¼ {iKðy[ß7 Xt„W×é26˜Bžì:˜Ûä<€}ØxhÙ¼a÷Øï -ÜD`Â~A˜_$FÀçì•*·} ñKì·™[¿e<§eÛmèǧÑG§°©u¦Ï‘¶Á¾—¶ëŽ0Ÿcky‚pÂ¥½_/•ñRÄõ¾ôçv<ó¥«öLf–§hC}ĸï&§û)d>{0¾Ô|o2¯Ì>ŸÏÇçãóñùø||>>ŸÏÇçãóñùø||>>ŸÏÇçãóñùø||>>ŸÏÇçãóñùø||>>ŸÏÇçãóñùø||>þ>&Nt w I -1`½ &:{Ï Â¥!«“" Dƒf:'&¹E…%EÅÇ…$¦Y;â§f͘co=Scm;øJkü"kqbÔò¨8ü¤,,$&bŠõtx7ëïGy»Y;ZÛú…¤E$Í -²šb½ÈÚÖÙ{ö¬ ü6ü×Á—.Â7 -¾Âø -.‰kV¯„$áO|Ò%üxÚ–Š‹ ‰·&ÏZ㧭çM1˜eíŒol -üa°ßͶžE~Ø4üÀÿ²?•b={–µ¿µB5Ë:^/5°s˜ï°ÐÚaÞ¼ÙÖ±ð`î‚ó >öûóãÙ³àÞÿÏÉëãþý¿Çw«O­GTL„ã௤5¿ŸgÛ)øû°ž3Ý"’£Â"\ý9_kVd °¶#§š|yøåOßßÏåà`­²žÏŸ­[ñ/¾þúû»~¿[`m¿ÁìäLÙ[“0˜8‘|GŽÖö³çÏw0`½u½¼äîê¼ýä/ W“0ËÕáæχª‹©5Q@˜š¿8RÍË—GnRäã«@Rù -5YDŽ6ÔMÊ#³´å‘™ÚÒðd-‰r•º»³-[ì…¼=8$¢£ÕD«ÔEòhu¹9 / M5‰j…º4bµV@H‚›¯+_Y¬+ŽH×ò•!q()YX’¦<¢pˆ48UÓ7 ˆüß>*ü2ä¾ÌßKQ·\ê(`Ï»Ÿ_0yM¥FG¦iƒ6Š*½bh 涎Ú|¨%º¿1yzPCMjeV—A½*ÔÃ^f¨ïT¶Œ…Ú¨¯¤CWi* Þ4Tb2tAk–ÔlVœužP“ÆÇféM |¯X™®£HÍ“sЧ"Z'ðz¨iZ‘:”\­Wd¨JÄÿÿš Æ qÂ$dé­…ˆtmÔ(Ój”"Lô@«€ _¥5ŠP‡ÌE$jƒö¶”â£Á%n4‚ú:j6½<‘hÒ€¦©õ— É*“«ÉøP5êû£qûãÓu•™Uæʲ¾)ªÜ®±PÛ ŒRcÂÖhVi{éö‰*¨Û]?êLá|&®7"šHÙíVŠ‚íã¸ìÖQÌšC6© -?_mÂ¥U›2Ës‡¨â4@–KÈÑ'YMd_9ÑÀ)6"çjr›FAmìÇ'ûßAûÿ?tP´Í«ƒÞ¢Dµ\t¹A×Y©&‘†¨ùJ”4œdPëÀÅ}H?ò>‚—A=ªL«!Çí¡Ãð9ÅA[ÓÝÍA½*èÄ2QźTT©®<4U ´ÀÅ ‡*AòÀ• ¥é#’!¨ÇfWW„­Òðñç‘‹§ùC½lHš6W9ŒKÚhÄÆéËÙxu`>&ixã×y»‚Öo°hÁ÷#YP²&£JÐd"Sµ•+sõT±9úŠ5%ò«aÌUdT›Áþu9n/þêŠÔM¦Ê¬Í–ª¼NkÐFP¥T˜‚Þ#ÑÇJ+3…ñÃŤèÐQ)Úʵ­£ðùµT€ÎEvÓheý™…ŠêãöªìJ x ‡Ç—ª5 -D뤠ÔÝj´Ž…1”^c¸*Ó@•˜ª§JÝ` -Dãau¾!hP0!«µ@Ïteè ( &,N‹èLFç貉ÙúPŸ˜Õ8ê†ù¤CEd–ÑÊm°„.ej¥)“¥Ë¬X­ :_¤¶}zyœ&±J 4$©xl$—A ¾ªlÏ4EåÁª”3>6[ÏÒ…qu oµ%¤î/­ÎŒÔMÄêÃØTævŽ%:Z ð-ý‹¡.’ËÔž`VèËT¸/ðx µÌIxBÝÃÉÛH$eð|S%kJù 9û2I“Â7)¿RÝ_Ä#‘/;ÈÄ ŒÓ 4)x·©I¨@ú­ðý@ÿ• ÏõhúÁçH¨pµf¹ÑeÆm€h.¹Ö„[ž¯#S%jÀÿ:PÓ¬ˆÎÓ Ò?+¡öo°Vô+ŸÄF§ U¬,Ò“+4dJlGƒâ5+óõ”©›F¨Ë•±™zÊè4¨m L¯4ã v"]êf¹Ì¤ö 48 ö÷.amà~ z?©å¦Ê¤2c¨e$Ú|Éu#H?eÖä+Í„ÚI¨í†ÚVE>¯ im/ÔŸ¨JvÙª6£Z¿kŠ’hCl¤Ê„û\}b±ý¬Æ6ÛL¨½qEê\q_±ÑéC¡ŽÆŒ*¥ÐjaI=3ÔªlµB¤ÞuM‰oØ.«’ËM”I¥FŠÄ\°sðYDg ú<£’Ô¢’ºñõÛ&ªŠz'Â\û?¬½XT˶-¼sŽ`ÜbÎ9gPÉ¡»Wêb@TTr’3M“³b˜• ’sÁ¼ƒûœ³þš…ûÜóî½ÿ}ç~߃¯7ÐÍ–^«f9欪1DÞ—ƒfÖÐyÁ¬Eø¼™WÚ|‘KÂ\Ðg. t¬]Ƴ®ñsáì>?퓾PgᜉsÔ,Ö>|Ö=ð“Îc± Ö.N˜gßX§pE¡û$˜ß  ¸ -Zbð^I KˆG8ÿƒ0âô.裖#àìèL@\iªéšÚ†hBñi‹‡HBG[BðYôÿ#L"MÏŒà‹Íå°ŽÁÑ‹£ ~›ù&'äÉ#–Ãá,µ–Ÿ0 Íä¨#ö#©ã.c@'þ6èÕCŒ‘ÆGÀÙa˜[pæ -Ÿ“:íÞsà$®ï´×xáù i _:= ¡!<<t5ªÑI9!œÃ9ê6V`l=ô>°®*ʽXWa!`„ÈÂn,h.ÏxNlíRú„ã8ã(º”²ÎA>@l±(>ÑxâsT.±s@çÎç2GFc=² aøÌ Ä;s)>{ÆMä ›5Ä ’ç‹|2#~04ŠD¾KDwV6‰|¯/,_ð -ÚZ´¹Õ| Íàìñ³£à¬«È>`†ðlÀÖ6|èŠ`]3ÐF€|è(…slKá,¤ØÅœ³…sŠhþ`-9¬Û†Þ ă]¤hDÁ5Á\‚s®ÂÀ»«ñÞÈ—›à|œGƒsSÀg°öŽKâ<ø·AΰÒÖp–Ðu¼ÐÅœƒXô½¼´žÅ>ÙKÄŽÉóàü>ú[Š0—àÌkå=ôèñzÐò=ë=çt[©"Ìêº÷§íÇÂÙ?Œ©kAB`b…ÆïÖ¬ÃïÆý?pîîè©óhÉ0ðl £¼pÂy,èÕRFç†3‡íF‚Nhì‘¢ãrÀ_à,$èl€®hØ‚ž<0ב øµòuœ¤3é þSà,µqÁ‘‹#¨S—ƃ–ãwg™Ðçîr¬#gznƒ°Ÿ5EsÒxh:BüÚÄ(ÀyxCÐU£Èa­„“´±õR|dHç] äe8ŽµN^Äã9{lø\]š.p¾¸àkå>Î+â8ƒbö|ÐT¬ÁÏó˜ 8á4š2EظpFßÍ1„Çøì>höœšz$Â?Êâì(¸7ð,A÷tŠäÊ(¿ -AWÏâ☡3¥è=?]‡çªñ|C¯·a¯¥bKwÐaÏÖa­â +ñyÈÓ¾ûø™Œß£•t|Ã&üífÆ2x¢ž!à´…¼Ð.BArg ž»Κã{çuyœç]:¬~––ˆ_Ú qµAù ÍG‘CÌl‰Ç•Å€U€;´ñ1yÐTÂ|éèFäm"ŒƒØNìE¿i £ˆÏr¢q„Ï´¢ñ†Í -|>ÎáÂ=Aóë° .ÄžrçY±¶`Œ×ÿÉp/…€yX¿üÒD˜GÌÅŸ9x?œ7…˜­EÐsBqú×9x/ø,4â Êÿ/œó!÷_ôŸ -uh`=ÊÐû«AgŸE1*² ÃX‰§2ÓÀ#p4T°w@Àíeà@»'ÏÅzg¼&âë³ö™„ó³×Õ… Wx:²p6Z`nk,¡Gò|&äÁj8ÏLŸAãwÔy4Œ¼?Ðɳ®à_Z£"¯ìEŒ•ÿDÀTÐÕƒ¿Ï°»Â½Td7Q q ÚÝÔq×1<ðª0¿0°r‹æ?ŽOŸŒE´ù0C”ËõDè+ªUà{ÆRÎÕ_|ÑÖXøë\«ïƒ• õ‹ÏËúÞY> 1‡ÏF_@ü:½t} ühh`ÓN±3Á' |mhÛÈé´KâlxNɳÀ[ tÒÁ«€‡ê0ð xÚúÒð1”œ–NŠ$ªÉ$¿ÍíGíá¹°i8P®€ü(BõÆKÐ<å2Žµ™Žµ!’[Upý}Ün Ήh|‰µ;A›Ÿ {²tã!>1x¥*†¯í8àV¨&ø°å­IŸÀñøƒ¿ÊYÀù7€ŒhÒá<‹ò ‹ò à0æ(–±ç‰S„>Žx ÎCˆC`mÍ“¨n°°‹¹ÊÌÏç±N h/¡ù 1 8„Ï ÿü`¾Àœ|f½S`ý+ô~o>—‚¯褓Á·–AþM3˜ïðw`î€æèîÐ(Öpžým3›0Ï°6ú› óÏ7C5³ÑIy¨i”ïHði³úCãgA-+0²Æµ>ÓŽæ'Ü?¡­ïTì{€r,sÕ('Qh±I¢| >]ŸÌÙ ÉàE÷ÎÂëóQ­Bƒ^ðBxo”Ôí'å ™£¨9!zyà„ûÀÏ`œfANF\xŒÐÿîJ˜_›AS–90 ë"ì~H™X ˜[ ç›YÇùà¬ÿ$ÁqÛ‘ ¡ ºˆ†¨.Ò ´½ ê”ç8ÒØf¸>ÔëÂSòz”™ÔcPOÁ}ãYɃ&$Ö Eu:sÜq4hôa!ЕqC5®'#ài>9KÀcAÔ´Ï/À1çЙ‚”šÝtV¿&?ç7U^RçÐÖÄúÿƒvQ3øGÎ 7`DÃÀŽ’¾ÞÌ„åodl‚§ -ÌÎŒ€è\À`­ÇnâzØ9z(¨–Ž™ º„’ ˆX9Ž¾8ø§ÈÞwÔ3àÁzé0§p­€jmÄ‘F‚>èX‹/ n ÚB¶a - K>'Xÿòbä Œµ(gç æ÷³‡ôý²3aÖ±XÛÙkÖ<Šx¹™¾Ü}ˆÉi7 ¯´aQ¬cå3 ø5Ö¥=ë1<ð½G±q ¼ °ò5x]@mAD¿X }!k߉ä1ûQ˜‡'°‰˜ -~˜8%ÍÃÚÕˆã€ö -Æhø=Ä«Á+뻞óžDr s bc,š "¨÷á+Ê jZŒ;ßØvÔÏàS;4Î_Шæ3ærŒ…ëXÀma -ô?@CsF„,èB€.…­LÍ¥Q õGs-@uµªiôyF„k: ëT¢çûèc(v8Œ-J¨×A;^ <#Z  HšÛ„yÈ7>;Ï1„±Ð·ìDõÞpœÿpž÷š|ÐŽ uÊÕ¿GÃXÚQ/·`Ý$Ð šÕ¸ é@]kסoà nþªMFVlŸ˜ÇPÇéñ$Øü]@GŽ -}´š>wiä"Ò ýmÐ\Þ -ñè–ª^V"”o¡÷$qM˜\ð´‡Q½>Jdøè·ïÍj„‡Ë1†‚V hêâzÝg*hßboÿkËqýë’0Oä‘8x,ô Eö! -b»ˆ!F§ðY˜/û¦/"“+÷`/¥ S Ÿ‰õ$|âý3—QÒ7[ÈŒöäåwû±_dÜÛíLØË ‘ÈZøŒ‡\ ž9‚㎣Àçø*óv^°™qŒ ü:î0t̩زdjó^¬Ñ -ž]ÎÉsh§¤!Žà÷p%-+ßÁ$6(³ÉuØÄJÐ?Ò@DyÅ-~.Ô¥X 40!/§Šg†æÇè! È]Å„<]KGoÕÔåÐË€¸ÐÑâÕë(®`Na_T÷cMúFßB͉ê'ÐRÃsÅ=ôy ŸÀXùNdQÝ:çзÞ+8ŒðÓÔ~$Ôt šFŒM$æÖ¬UÐdÀeà;ÐxˆIÐ…g­ƒ'ƒ—:6•cŽ;aÏøMš9†Ø½Hà¡â³Þ¸WÚÏÐÛÅýNLj™"ÿ[Cú PZ"wÞs2p=¬…rÎcÖöJúEP¼Mpý£&ykP—ŸÙ¯L9†Ï€øÓYÈA_4¾ÇÁ½Âšð0ÏN`~ºSnÒ¹¸§=$¿k+p? ÷¢gƒÆ(ôe@»™…^Ÿ ôRß@õ:èAo\lƒr*ðG{ÙÌ!ý"Ä_NâÊto.ÿàûX3êõÓ.ã„ éyeÖ;ÃGi AÿôØA™¶›ÎØ¢úå‚ßè¯Ò¼'ƒ®8öË ¸»ü‘qýúÕ¡ׂ‡kŸ:¸ Œä &ðÁ**å2]½ö¸ü äEÖ!aõh½ µqø1> èó!SÉ(çÙ žã®ó@_¥B%Öí&“ëö0ÒÂmX¿åć÷¡KØL6üöZðÏ"¬ÇZp Ë5\ð³µp¿ù²²ÍüÔ–]X£ÌÂy,uøô¨×i¨×!O"Þ=ü¿ôò1—²˜çàZ€!À ÓÖ endstream endobj 46 0 obj <>stream -“„öqCõúyÿɸLj0òhS³—n/£Âž­eýsW²d -´ùÅQð:ö©@ïMb7¤É‹îè|ŒhãòƒçÍ)yÐbÞBŒjx˜/£öc`MHì›ñ³^wýÜ+DXÆød-Â=B¨ÝŽÚŒmoˆMúx?=&HéÓõÔõNmáý& ™õ^4tÿò˜`Ì]FS&ˆƒ˜ž5{6t*ä-±c èÜ(B}kS"‡X tš?=R€¦+Öö„^Ü/÷D¬×ùSËo¢ÐÕuˆÞb]¬¡z}ÔXðÞ°&¤[ü<Ö7{ ªWàºÙߦÕ4Üc€š°&ðÆJ¨éÙ ÜÕ€‰d\ñ6:ìÕ::¸?Ä´çdÕô1»QXÒ+}ðæèùQFÉ É˜¢­àSCûß]FYO„œ½ká)T{»ç,`]s” OãZåtÐEÿ4*Õ•(gƒ_d-žSàK%Š¯Þ/Lj8Þ#À1GÀ´Òñ¼FyŽD¹+öõV::kb~k§c¾"àmÉ¿òq¿áåÎÝÕëÀy$èžB½Ž½*,Ðœ·´‹u9Þ²á× =“•Dî)óQ¼´ƒ‚¾;î×€ÏQÔËMtðãÕà³>ѸnCµh‚·•Ô°Õë[˜Ó¡“p_xúŸõ:vo-x&Aÿ x:ÌUà@Cõ:Ì—q w××Ð8Bœ¹e,¹ÆÌÅõº ª×Ág-î;ûd-Æõ2ö˜È^¦V© -³ëõÙ[-:³[ƒŸôfö˜8Žî¹ââæˆç­äIÑù!-UTû# ƒ¹ ï< ^Nõ#䬹úWÐÏ€~Š6øÑaÀõ"wÙ\!®×Cp½²ðq½=D¨' Î E8 ¾Š”ìåfðCƒÞ:ô)q­†x)®ÕA£z îñsiðPBXH&Uî¯:2¥uâ$Cˆ“Á<Å=Û“®ã’#ØcBø/à±Î¸^ùÅ@€je=A™Xÿô˜ð™ ´A8ì¦B+ûq0w°‰kú/À‰IcÀµ! aB -›Ür®4Ç!‡ÒæèïÛN3ÐÔ\¿2ÕØÁ÷qÏkÒùå,$Õìdõß_ð™'‚{æu}©Ä#IIlå9b¸ Ö<”½ÙF'Wî£ãßp ‹õhƒý%“•°6hJýAQV£ÜJöj3ŽOÿË`†ñ»µ k·¿Xøe+‘§½ÇÀú¨à¸à«È=] Ærô5¨Ž£A£V`b7Ö†|?Âðzwð£ ø½Aÿáâá»DÛ±–Š?ì¡}#ÐÏC1¾` m‹=&|“•þé1ÁCzÿÿ_pý ªE›èJ0‚<órjOÐ[…XÃ8y>x^§ƒÚþ„=žO¸‡î7—ñ¹¾˜ ¼¿r¿Ðßm>àŒÁ - •ë¸†FµpüëÞYsc)`'äà¨.— nƒµþÑkPŸãßÇk‰ ó@ƒ• y±÷#,]ÆŸÇ> Ö¾“%'Ñ8vj®Û°ÇDÔ<:»Qç‹à1!&ôõEŽ)Xϱ-q‡1ÀSÀ¿é§ÇÄ,ð˜€k“üå1aî8zH'óùf±×õeBÄ];è‘¢šzžžY ±¿KPÎJð6£ËvÑÖ -ü.è)3W -bk¶Õ~vò<èWá5à݈¯Ãš`”ØÍo„™Ð›ÅÚvà«lb‹×ÜÀS„½Ü¤A¦½S¡Qm‹õhPÀðØ€½Ð'‚šŸ>í7÷à{Tó\¡Ç3¦©JAÏS<¶Éã£ÀgöðÐܥМ/Hè'ÁžàQà ~PA=?´‚¡ÂkпDœŠŽ¯Ú- |²û {F%”í\nU$Tìॷìž½K}Ä·uÕ5°ÇÕê<òŸJ€ °v¥«'& ´…8>áoa¯(èKšŸÇ½X;‡ØÄu5èk;? -zXB¨ïÞ@´÷ =[±_ö2ÐaÅkå–C½.ñ¥ËK±þõÅ°CºÅçGc>€æxAÿSxÅ/ŠKñiŸ‰¸¹5äöVÁ}û3C{+À)ðGA¹úÎØGýbÐT™ÕpØ‚=(Îzü‡ÇDjÍaz­å«ø—Ç_rNÇ ô!‚g¿ ‘ë¿zLxyL ‡="PÜHlQ tÔ~4}äÌHÜ_pK™µ#ö1@óúï˜ÿzÝXŒãî´ÿDÀð^¤ö(S©]*TÄ« ¸ßoã3‰¼Ò}æ)cã6ÖŸ`þŠ0E*b¿”[i‹ó£aÔÔàíÞY 7N¦×*cMuÐH…Zˆ<&§§¯Gèjë¨îu¬@= µi8püH©SÄÁZÄþ=‡ðd…^+ï8xƒÅ*€0ôÍ` þàÀX«êW”3¡_>xØÓzP—ƒ(x< œ œòÔ¼  ÷ë„^n;À¿Ù¯æÔðØ = Ð7Åàéõ?hZc MŠÐÖ" æ˜ãh xy ¼ÆýÉcGÃxãu ÓS#ðWs«p¯„'Qþ_ÜÓŽãÈ£ç1anùx‹0îÕnŒ‹À9¡× ž_‘O7Ò²—[g‚î"Œ)`%{¼qH¿4t&^GõLP‚uT¬÷ù`“Ù¨ ºþ€Û°†"±EXX˜^uHœÙÄ]éÔ£.w© ª·c 4OÿëKñùØc‚Š+Û=&Pý û-´t]ái9ðYßAèCaŸmï„àÝ‚=&܇<&Dÿê1aõ—Ç„ÍHÊø¸<)9\t!lÖ¶‡=^—VA¼b=Õ˜¼ldþ6‰[ªŒ'ôßÁ‹0E䓼4’ù±OÖÓ×[õØ+-Z0¾àU…±6¾|Ÿ8»ÒM©:ÈÈò¶6ÐÙ š é ½K¼ôQrV@õ7¬Ùýå1Ac‰ú½ìåMaV»™ýA]p¥Y™¹”µÖxÈcΣøÇlG -NxŽùËc‚JhÞ-Buà”ü Bn®{„ÏÅùå26ìÆj6âÙfQÀ­U˜@¹ïíAxcE%ïf“ª0©å¨Ô -e_è=S‘aíF~ÙÞ7— -w‘…ëéK¹K©g«1FÞ[–tÀµ¥Àé07‡uàûà¿à™¹˜‰-ßÍ$V+CÍCÙË n€õz¨{ðü.ûöPL°²Û…)u‡èì-ñÝZñV– -~ºŠ²KT¤l§ -ÎN„5ž©ÕpÚx˜ŠS¨AhÄÐ|Y¸Æ¢ù ½(X/ÒÓ$ ø=ìÓ‚x¡ØÿþZ‰÷•%˜Ïœ ˜û5À—B˜Tq€ Ëß„÷3Àšø–þ\ë¯Nð_…ñÄl¡£o ‘] îA@öI¼bˆ/¥/它=Cœ&eî½Cè¼ïÜcw’͆z÷0@s÷œ÷$¼Æcë;úàÐ[Çר -cKö Õì(ÛÃþŽÛ+Å>K gý*ñ:tàTè€'$’³‚¾V§Ã¾¨1“<)·OH-um<…ñ^¶´²ƒâËuºì•ZMArþvì ÜÒâ­ µÚ×x=üªG€þÿ/ªÙìÃ`]T‹jSÄÃE×›HÁ•ð˜_sÚ+}þÐúDÆ<òlèdð^§ýŸ¯ÄvÁÓp”¹z^"Ï„ùØcÂkÈcB({´|$°?âþbT¸§*a?7T§bbô:“R­Â¤Õ©âú$µEÖLÀ|òÈÈÿÃ'oî?}òÒ»Uaß®!5¤EŽ÷$;øû µW©,LkV§Â 7À}€ØÁkBàÕk©P£C/ ò!ª‘„7ëH“'¯N‹Õ˜æ¼W¹@…æ­¥m“AKjM¨—ð:ðÙàÉ‹]“æ6;•Övâ]ñA̓ÚدÔPˆò;Êó¢SNã$GÀ“ÔÂfà»øLÞW,ôŒœ‹õ•aíö–Áž”YÐfx°Jìvy¬ ÿžn°F ë+h,a¯`ððãY{N½þcÎc„6Q¨Ž¾¶Xödàöns'ð CõkP.Ö~†Øeìp_ú&’ÄÆCàwÏ:'ΚzQ°ßÇèb˜¢±µÛ$‘C ÖÔÿé c+Èj9(yXvĸ ÈŽ¾ÛƇu*M>¹ ×kÐÛOÈà{+Áç–JïPßN*¥iU°Uèwg…Ð;köè…5ûHEèÑѧ½&à5Øñ˜ QýŠ×…¼’æƒ:Ôüø²màSGZzŽœ¼4ÅÈ\Úûöð—Æ^õž‰¨V‰ù…ýËcòìÇ »µ†Šy±r¶Ø;{ p¿¿ú«¸¦õÏ\J'–î%U« “«RõÿôÉ£þòÉËi1ø¯>y *à“û0ñƒþŠE¬7|mö’Bùö‚§ö]ò¾¾ö€!ã(UÄñül-ë”2âðj3úr«:xmö„ôº¾ü§IÛèé¤Sä èµÀ^Ê>zïˆÍX3cQ.%ÌhÖbW„Þ“€=*ûa÷aH”çÀ ¼’¡ŸÉX îýxˆI›¡½j¸^¢¡×à> ðXe#‡ö|@œÕx.ìe Ê]x¬7À:Êá #` -Ö÷`mz°WA䙢„÷•ƒ¬Á÷˜?âQC¦Q–çFá=2(B]ƒuÏC f„‡Süð’ƒ^­Ø]:Opm%ìï¥Á83ÄjF&ø©ò/wîÃ{NyŽ‡=†°G÷ªvBÏ4í1¶CM óëÒõexÏà>Ô³Áà…jØ»â;ÏUð© °|—…iC×äž±€Ž¸»V^³WP¶ò»±„²‹ž{"†üÐÏØÿ)o3ö"r -W-~üoƒ?ôgî¬ c_nÈò6ྠxRö›}dì[x5ø‹‹ý¯¯Ä{ú`8ì]„¹ã{k ™Ñ{Íì×a½ï/ÃûkÀ x™oöT3l_¬¿Œê -4·ÁsÖQ¡¦„ºÛ!FxOâ7ÀuÁSÇ%q¬{š}Ôç}cêäÅÑxo«sÊ\X£…Ú‡<^Ê2ÊúÒð‡: &¬_B¿öøBŸß3s!î Áš)Â^¼–=,øÿÏ`.^š‚q<—`­b4ìázì?ýtXçòJ[„ýίßlFÜßuÜP?1~>øÊÓÙï4é-ûÀ’=2•>í<Þ3â1{ÀG–…œv>|:¾V¸T@ì¾@o›Dx*H­Þ}|M®è½Ÿ|(ŠŸÈ›±¯vÈýÕè¹P³Áþ0ð»¡.·dµ€^(íue>㙡û¬ ×#H¬ØÁ¿Ü€ûÒhnÀ~=¨·1Ï º±ÖŠ±ß{âÛ€ ÛpÞö‹ c5ë{ï—…õXç…žìÇ¡¢ 7SÙ½ü¤ºì¹à)¤éq¼WzŠ°v óõ½½\è˜4{½ØGÍ„^šKð:S>7‚×þ/FLÇÞÐßDþWWàÜ넹øu ÏqÌ oüÀ{Ô»…½(ÐGãÚSozz8ô–xF§å¡g,8åæO(Ÿ‘©{©Ø[ðºÕiŸ‰àëüÂл렟FGämVìqW¸ûFå®\&ãNã\‡r _ÊÙ×W`N ¾à?p{Îý™]‡ø—›÷Ñ—2ÀïBÿŸÜºÖ¤¨°Â P¿jv°™mºTfï!2©i7ŽûЇà!·•LªÜ…rÖVÀaXGEõñfèy ½¯.üœ#ò¿·Š‰x¸÷@Á·úZ“>™X¹“òË\ˆ½# WuâÔœïÀ&½{?™Ö«LG”mûø{Xéˆ7›¨‹ÑÓi‡¤™ŒÏµETZÇ~áåð[ÇýF8×5¬y†~Ç%X¤yŽÅgUÜ®/¹ḋ=dØK$3T™[=<ÑÓ–£âW Ö¢×ͧèG]"úr¯:ûv—Ñ¥ä%K‡qЗÀ{WÑ=I4O—±!ÏÖ3©Í$WH“ìJááôJIZ¹¡0®xö;¶ .qNÄ÷êJ2©v7ôg„¡/6Á¾ 2¹v7x ášq{&ë6ý^‹NïW¥S:öú <Àÿã\zë> -ÆýÎ#v#¡d"Š·BÅ0¹ “ÛÆ2Ï: Ÿ6eó:Ì…×:I2k@LjÜÅ¿Ü­Ì\ï4dîvÐ4CÁÝ=Á¯ºôäæN'EÝíá1Ú„Â{­"ñƒ:3ã¼’³Â‡ÕÆtF§?µ~· £G=”!æè°×ë!.!ƨ+ÝjìåvMX¯e³;!6u%WëHÉ*–¾\§F¦µ(ö1éÿ’[öÁš… ýÝ>Aæûý¼œÁƒÔÕ~ êFŽàö€6yó£yû£ýøƒ„yÖ‡p —!_|3¯Þ>è>,¼…®ñNÉ>h”ˆ¶f¶‰¨»Ý†ÐcæÝÿªÃÏýª+xò™bò?åýÝXðâ+Ë–·ÛˆÊ«_½=oüôí)ñj }µU‡Îz¯ï‰Ìß ¹çÄȼb6·W̤}Pc¾VbÌ$w0bt£Võ&•Þ©Ê@,\îR§n·Šr[Œ…yõ'ÄyM–ü;h }áSO?HØ¢k6¿çý¬OB=êgØç¦LAÇ :ÿÓªèÃQº¬×š.í³¢ß|>Nþq„|ý͈ÿò K~2¡+z¬Äå~âÎRau…#ýü uç=ŸÊÔ¤tì\þtæ}¯‹bvŠDZL˜{M Ý®%Èì:ÀfµëIU…ø–<­°`o·ÑÔ­êZ—{»ƒÝo6aŸu˜7¾j -î|ÔanvÈÜNžàQIæõK¨ü¯G˜·½VLE×Yº´ç4]õî]Ýu†|1 ¢ôЛÈ}BòÞ;¾àQ?Ÿ½×ÊR-ftc› ÛÝâcô¡4R4ØÂoå¬ ë8s²éóyº±Ëž~Ýw„Ìù¤AE ê3ê2XÓÄXŽx)äèWFÔ®1¼ö÷ln—X\TyÁüqÞqB½úh5ïæwuÁ­oZü—ŸiÁ“Š—ý› -?¶o+/éÛNaÊ †$«M`r·æ„¤¨ØΨ¼Øýpi‘§q~™ƒèEÝIñ­¡0»Ós•+}êP§Q(Ï14'Ü*r¼2_·ÿàW}AÉ7Sªï³«èSi°h°,˜ýô.€ùµÇŸýÔÀ|í÷¥ú½Då¡'Ú³¥§[Óe>•Áñ‘—buÝ”‘¿»‰ú[B¾»gÜ[*½o w¶Rõ]Ðý´¢_š3/ð³¾ï¤(‹óš­Œ +\$ÛN°¯Þ¥î’ôÓ‘¨´ÙÖ¸­$DTÞêȾnµ–´ŸWÛ‰Ëjœk -. K[mÄ/ë­„oê­DùÍg˜—]æÔëîâ¼FU–â‚2kQaµ5Â<#ƒË=;y÷—ÆÕm4¼ýUçœYÜTØÑè#é©“ôׄ {šýØþV?QosÕþÎWö7^ÙŸÆ‚²_ÍùU_ŽSï>:²ƒMþâO5áæ=cEŸZB¨ªwg¨—½úzŸž âÍz*ìÕ:2º~+yí£<'|Þdnô²ò¼$¯ÂJ’[uD|§ÉHr¿ÆÔèN ý¨CÄî¾m?ø}‰Ì¬7OF·½w¤KºOÑeÖˆ?]nѧsÞi ï×IØ·•gŒ›^ï¸oÖù$úDÇý$›–ô ‡fiœOm`´SchTHµOTf¥›ôv½ST~½mxyó…ÐâûˆÂ*‡ˆ²:Û°êZÛ°rôseµ]DY™cäƒrç¨Ìr÷èrÿ8«¦ôX£¯Â„u¦=O¤‚†?­yO8Còþ?xôío<úaŸPø¨ë0{õ£>úUUtµ›¢ï}£Øò®óâ¾–¶»Í‡é~ç%þP.úTN ¼÷`?wH>”E÷UKé¶n'Þóßy‚g“’–R_ñ‡š0ão¤u¡)A~içZ³®˜u?—‰ûŠýô>=Ös'þhÏÍ»¶¨Ø°Z/ÙÝ'iq½]Da“mø«fÛð"ô5¿Á.<¿Þ.üYcT.šwiÕ -wYJ¥‡Ì§:(Vò© T¯œcõ#ÊVð®r*콡èÁ‡Ãâ{=¦¢¢Ö3Æe%žÆMÅÁ&%QÂ7ÍÖüì‚ËýDÙ-|Ñ£Ž#â¢æó¢B”“_~1×Õ{U¦¹ÖJÓLÚ‹¥l~»…àúïš¼¤Æmü«_Ôè'}B¶¥Ùæ9%õdûÍdã¾b„…ù1Æý%Q¢¯af½¹2ûYRb½[\l½g’U[VŠøCiÓÕà~¼ëN‚gKp¢Wc`\|½›ôFS$<îT;E=Fãô²ÂQšWâ÷¬ÄEv§ÌEWî}¢3'Zø±-ˆílñ‘ÔÖ\b :,Eeõ¶ÂÒ&[Qe­#Ì iÉ%ײ€Ê€ÏÊð8·šð¸ä¯Ø ÊÀ$ªû½›ÁkŽgØÌ8†ð)¤Â?!®Ø'&£ØKæÔâ+99¼Ê/ùzoäÀ «ÎNGï5§Ãoýn ÿ¶eóí´°šàœàÚ«ö›—Í»sc˜¦J{¶²âœg}x&Œ<^V;DݨuŽÊ¨t‹º[çUÜd‘Öäs´÷N,ùk¯»þ{î¸Aÿ?, ¿sß?¹xê\Tí›PæYìíXcݘ}¦15æDk¶Tò¾(Ô´÷™Tü¾9Lü®9XÜÚÀ>úÕŒ¼ñw-öù¯GL[ʣϴd¥nÍN<úîNìá¾ÑfïŸÅ°;ýï³£ûz=„ßjBŽvÝ’™ö?’tr­ƒG©wµG»îŹ7F]5jÈ¿¤÷ŠÓÒÍú¶E7ðÉ|]»ä©zÎÉÓuSZ6Tþ!$ûº-º¯Æ…7{ÆÅ×zÈÌ{nJ ~ãÎétsFÚï9‘ÖN¬Õϱš8Zg35øÊ1øÁÙ0ßj¼™ï5Þ‚/_\tÚ8‘vtË2ýûœ:ù懙¨¤ÕÞ¸¶ÆÿDÛÝ$çÚèÔèJß¤Ì -8ûæ¸4qouÛØìÆ4t:ªÝÅ}M¡gš.'»ÖD%ÙWÇ%Þ(ô”=zãeÛ0ûyœäCe¤ioaŒyOnìùÖä´À†€4·ºÈ¤“Y±ôï=—¨®.g¶·age¤yïƒXËŽìøó­qq±q> ‰ì§Ö²ç³#ÙõÍÁ°ž3Õ-ætµî îЛ©š3W£„S1ìþ~JøkuˆoU`lz‰‡å=éb©OyP”eËå(ó®ëQô‡/¦ñ½=ýêÓ”Ç2Í].Ç;ï$xÕ†$Ý-r“¾,uŠÊ-r—Ý~ãŽþ_wéÓ7®ÒŒ·^1GºQŽí/Ž„üP÷ô­‹´ Ä)"aVYµ]Ø`Ë…èQŒ°Î«!0–ÿÇGW6ÎÐà;gs´û¦ìJ“KlqËÅð'MÒÜ&‡èПá÷Š`á÷ê ȇ&ƒERÞ‡ßÎk=ç”uB'©³’?h|NnODl߯IlÞ¹—غc?±q« -±~óbÍVebýV=bŸÀy˜ªã“)ªY[¡ÖÀ©‘_:]]Ê#¢2^úÊn¼öŠŽ, ÅåûËÒ ½¤áA1—w8ÝœÇö½÷7ê©Š<Öq'ñ¢„ðrÿÄ´7>q¥²Äb*½ã€çñšÿnaô1?"£ÊMöáÛ³fÛˆÛí¶á·;m#™M~:9Ÿ¶èÙJ'Ыæ* ‡+ó b.1˜Jèû•£gëf/!öï"G9u‰‡üÎݱtÊ\b1 ýÖ,b‚¼1Yn61s¸1wôRBiÚZbáÜÍÄŠûˆ-úVÄ~¿ª‡^p»tj8–ßý§ ù†3cß|¶2îz+Í~ã[øÆ9òM‰sdA¹CÄÓ·ÎQÉ•11å>q¾ÕAIÕa‰Éo|boá1õÆ¿½Yês¸ï©TÐõëEª¿Ïݱ9"¶®åbXcÛ…P4‡?ú=4›8½C9¿®=ä|}ª¦ÝåÉ:.w´‚ß*iæ|Y¯‘û÷-š¹š± K• ‹•Ö g."”£k˜AL Æã‰1ÄXô˜„~R$¦Jò3ˆÅÓ”ˆuÛ ˆ½ÆòÊŸWIœ§Ö©ëáNþÊÙ}x&ìë d›=…-½>¢ÞΠãïnÆ^ª ŽOxã^à%½Vä)E¸Yè-{\â"}]êy¥Ä#á±ôy‘«ôu¡kda©sdZ…‡,ºÊ'¾µõ|0×cþ¥Ã.ù×þ³A'»R#uç,q[x½ž¶Gd7lÃÎIJ¥JÄê5Ë Uk9 ï« -šž™ÓUÛË/™¯DL&&£‰QÄb8þ®K}#ä~þ<=3]õXô[#ÑOòø¹ñèó— +ˆõ…Ä.2PîàmnÛ_ì)Í ’Æ=Œ–øKc -ü£cÞ^ŠŽ+ô•ezG_)ðŠ¾›ïý¢ÀMú À-êñ+÷¨;hnÞ.q“Þ/v~SîéW”À|íòg?7ú ~ýä~¡-&öU+ÊÁíÂcZ\cõpº{ŒNËg-Gq8½xo£Ð»’ÃWc3=à#þã^ÿÏrøjà7áL F ›‚¾N$FÊMD?Í fNYM¬ÝbFô©Q0@ycsAuDÔßdÑv-.;ß+º°À52ógtö[h„1‘yÅÎ^U!q&{W„ÆCl>*r‰ºóÖUWâmÑ‘ÍþÚÄüÞàÒ_óÎ&4²Õ5JïwÎò ìõÜM{´ ¥ñ3Ð5ŒÆï$z‡ðýtMÓQÄMEßÁ÷ÃþËÕüçaøêþõº‡¡O»±èß›æ⪽' å€öª7¸•ºÜ1¦ë½¤³:Ä´íyÔñ¶Ùź¸8ÛzYì«WˆË¢˜„1Œ+ò•Á8æ¾ò’½,tÅãXú¸À-º©Ä%¦«Â)Jô±$ý{æÚ]£ßÏä}çì4J9µºæÄ4Bÿ¯?àå~Þø÷LaÔbáB]b­ÚEb¯{Í$Õîù©ÂÁ¦,94ñe€ìök¯˜§n²'ù±OŠ]ãï¿uÉ+p)zéó*ß=:ì­¿Ô½*<6´Ü?&¤ÒW–Y…ê®Jgérרär/™áWÎFí·UÕçÖts{¹uk·3å&áƒßÅÿsrøþËã8…¯ÿù†"q$z}ž{Cópþž>Ç!T@Ì$¦ŒXLÌQjÞ¬™ÿ¿÷~8züëµþõ\¯üÏ׆®{$¾3cÑ‹>'áÙ;„¨C$ÿóZÇàù§0y#±r‡±óÄ­j7¸5ü^ÎöHÛ½ð3Õ1Ò7~8O7¥E×å{$–ºÅ4 1l,vKl)òHi)uM¬«pŽyõÆ-ŧìI¡»,±ØG¦5ȉ7ì3&LÃsë¿Ã…ÿíÇ_×ùß}À5ù9¦#Ñçh|ý“Ð'ʉS7 –ñ‰ÊˆM¢¤a{2¹y:Ÿ9ó#M7}“_øËnåyÇä½öˆ{SàžXQà‘Z^âšü¼Ð=îA¡[tj¡·Ô¹2*U!Ñ(¿Kcª=¥f=·Â4Ë9­UË6þ¯¯c åñ{–ÿ—1öóµ±èÕqèsÊ°™Ä ùyĤa -hœ¦!Lšƒrü|bÚˆ%Ĥዉ‰ò‹ˆ)cV3+÷¹;½¡RÀ­cú -\o‘¢|9!Ú­2¡8G•˜±@ƒ˜»Œ&Öh…»œÛ&(¿áÖ‰Þ½p‰|yK‘_ôãB7)âf²Ø/)â™1P{Ü.@üq±ÒRçèú"×è¢b—htRíoÜá¡r‹6íD¹uò¿}]€“£12 ÇßaàЋÙÈ$b†ÜLbÖØåĬ)ëˆ93÷KÖˆ‰eÛO 7›£‡1g¥˜³$f.3çë3U‰Ù³á×ÖÆ»]ê'¨¾ãÔuû¹#¦Íw/YU¦'> Œ.{îßøÚ3±¤À#åø¸×ÅÎÒwe.±UN±êâê’êÊ\“jþàÄ‹¶ü¯Æ Þÿ$ÌÑ&ýdC9æÞÐkЫS…‘³ˆ9㖳Ư$§®Eؼ‚P˜²Í¿Ĭ©» Åi»‰éÓöâk›»Ü˜˜»HD,\gI¬Ðô'6›Ý•ßÚ3E9—[¾¿„Ûªû‘;z´áª?pÐçy^qµî±hœjË¢;Ê]`ÎuÖ;§wÔ¸¤v4:¦¼)wAµ„ÄznÏ’UÿëغV䳩r³ˆ©ò¨BBójšsÓåç£çæÑNBi£3ƯF׶‘P˜µŧ -ŠI]böFcbÞV+bÁ~gb‰n ±‚I VÉÄÓ§r»Ã~›±¯œÛ@õ•\¼P˜æUPUŠ¸W º¶‡¥.²Ç(Ç•—8Ë:Êc{*ã:ªœâò‹\cÎÕ&D©¾ávÍš4ûß·¿æà=dªéòŠ„âÈèšf£xœŽžŸ€2öÄ¡×äæŠc–#-sŠzWcþ©Þ!áC«CJE­SRr™g”ÁÞók„¶ÿ×1âÀÿñó_X2ąǠчæÚt4†³ðcºü/„â¤ux¬ç@1¨EÌFqøËF1ñËzaŠ.1g±&¡8[…P\ F(®7&æïDsÎ ˜ØhqG~KXÓ„W¹Yûª¸*UÜ6ý¯ž4o¾êç_™PpI†°3öâÒ(FcZkœ»ëœ’ûœR>78&÷V»¦vU9C/IªÑÆé-RÚþo›ü¿àã~LDׂØÇø„Ò/*Ä% tÊĬùVì#f"ü˜¹ a&šgh®Íµ›˜3{/1OQ™˜»P˜·Œ"n½' rŠÊnÅÁzNÅàwFÿwšýRìcÒÿ(ôLs‚ê9¨Ék>†ÚµÐMöü­sTy™cdK©Sô@•kr]©K¼àK‹Ó×Òi€€ïÿNlŽù™Ë!>+ŽšC̲–X¸J‹X£nM,'íˆeæÄj§Ã×ø¿¹1 tÌ&¯7£×]|2|sÞˆ->c·¶Žßâß8~‹óÛÑ[móGíôªš°ÿ1·f=·{ß n‘rH«Ân™z=§®ÕÈñµ;8¡v Çò¹‹jc£€³Àue#îìZ«Gé:û ÐÓ_oaÉÜ/|=¯[³÷±†-Z¶çˆK9ÄjÁ%b0œØdqM~[HÝÄÝW¹yûòÐøTs[öWs;ÿv5LÙz±äqíqQAÛØï¤[Ì(‹‚ä§ËÿÏyú!0Ó¦Ž^€¸þFâ%CbÁ*cbѶ“ÄR5;b¡7±ú ±vÏqbÕf’X¶Z•X¾\X½•%¶ -‚†m?÷xÔûâ±ûÓÿ¾P­†; ÙÀèôq&‚/ß]Ì;oD?¿½ÄûöÍ–ÿù‡ƒVÇÊúmµfJÏj­ëÜfrN_¯Œ£ «8~ÓÀi¦¹Þ‰yWënÖ÷,Æ´ç¥Lø±3ˆª°5,åÄüê'˜îv/Ïš°¤ªrû0¯ª˜Ƀ *mý7âRª6Ä=Ð1gÂâÿÒ úØ”G< q¬IˉÅ[Xb³Q²ÜîŽi€?ãùrXÿ;g©ÿ+g¥û‰;¢Õ„° žèVpíRNG¿‘3åpÝÜI½ -ŽÑ}ÁÒ—v¬Ñ÷º3O/­w3¯ê#Tÿ{7ÞGÎÖ°ƒ³ÐÎùs«¾®’abýòêçCÔ£A†[¶Aþbµ )o+u·Î@øà­H\QìhÒXÄ>é6a’Ò‘UÛèà¢uôƒÚ¤åe¸·"X¯œ£7ì"ÿÛë‹pq’ü\bòèyÄÔ‰ P½…X°R‹X§cGl;ysø6ßÊñ»oþý—ÕÜ.4Ÿô½äöò~¡xÈ2m¬¶Û}EíWœÝRé`ñîz‚d° ”höä÷÷ÙërzzÑŠz§ ×;k3RÏÕ{¢¶ìÁ"íbN“íiöfšÛœô/ÝùEÃPH趮ío{×~hð­B&ò;Œ #¶³w»…¢W-–L^‡)SÜzRTZ~Á {`/ÿÚ7UAV×òÊGUöÉ»Ãdþ—ÃZ/8åƒg³Ç-X¶×Ùÿú1åŒÕëøÄ&`˜ŠsÞ¤ƒÕœ²ÖGNbð™;£ý3Bx-ÒGy—üò̓ü6è©°AÃþê”íÊêÄÖ]«s# î÷6¿uµnËJ³m‰K1ë{ #?¸ëUq¬fæǵZþ¯çé<â”õ^qººi]õ#Ÿ.Õôãù¬“Üë7àßþ]Cp!vÏ茼–ŸHNÊc$ÐßwKG%4í¢¯}Ó•¼n¾ )¬»ÀKÜÅ?í;ÎÀ6~ªö™èñ»uÎ fmÆ9Z÷Ó‡ýŒËÄdy”×fn"–mãÛ%AÔe½³U^p«4r{5ã$üïÉ?;<™ßLò¢`íùÐæEÕ÷_ ß|;"xõ»ˆ.<%ªoö—·º2Dûd--Bcç‚æÑì7L(߬ÿäêú o×^ ˜ç ù±ùÙÔ¨»ý<~lí¾KüLÆ;åQ@úR&®|·QV%)*³3./ö–¼¨?ÅævŠØœvCAV÷AA\Ù^à…Zç¢ÇÃzÛôápïà_?`,&¯ ~Yy€X­eE춺9êà=n•Æ{ŽBõå1èÑipŒvÇê×qÆ:·þ¶Sûˆó mcâÐa`hL0c¹ÃãMÝS—²1e{˜ ‡ktµ ‰m f;Ï!vÃcÅlBmÿ6B—¿Yøªáõ´‹áÅl ÏNÂûÏsúµñžh ç1”©íHÐ’eSºTÙÔ5:"o#•Ös@t£…¡kzl„Å-g™¿D¼ZûûU“,·O?¶g½~Æ—íz÷9Uƒ«?ö\ˆŸ²{·2±Ni6±q΂H8|Êz¬¹½ó 3§K³áœ˜ÁÝ/ªÔ›Žã¢ª*'Þ£_ ywÕäÝøM•t)ß)pª «f÷ìå_JŸ'ÌYDÝìÒƒ}µüëßÔ c;6ñã?n#¯ÿ]Û0¦jiŸ¬(8å8†rˆšÁKkÚE]Ðä ¨&Wm1Œx½B/½w«þNMÿ.wP7çÇíëߪ}Û¬VÀíTÿÀéjþʉÔ9ÍFN[û-§¥û’Ó2(á½"à/bxøÜpÀ Ú)mŽÝ[Òÿò¬⟾Tì¨HŸöOÛÆÌà§Ö老í¡|®(Q®ñ³(7Ù,>ÂBq^åÉÃe/|ÈÝ"þ¥Û yaSIïû‹EOÚÌÄÕ•°Ròö­uû—Úºƒò¹¾²‰œÊz¾\¿3 ~q^¤ñ”Û­æôdŠªUöXõ ¹“ÔÎe׸Ïm1üÈ]„½|†Õœ©®[–Â!-#BUY]“*q`Ç>bÿŽ]ßÊa }­C—ÍiÒ'3Z”ùÿ{ïÕµíû¶" !D‘sΑ„²ÔÝ+¯Õ’PB9 œsBE‚r–ÈÉlŒÓvÄŒ1Ñi;í|ι÷sëí7ÿ£Ï¾uëU½/ï]%ƒ…Zݽ֘#Í9þ¿„|;?ƒI‡™t=/ëLq9˜E¹œÿD¥9Üâ^h`žîÄšãjÎÎç“Ž8rG„ºkK0¢õ>öÁìt扯öæçáÊõ'švíÑAièW¡ý›íÆ®o·‰}Þ‡3ŸÊÇ_%é/þÏýú¾o¶põ7—›^_j¸öo^þüç}ŸåTßþó Ï÷ÿ b÷ÍÛýâ?×zýzžWp‘Íξ:7?M磥YéSŽÝç©è¶oÜ£ó< éÜv¹ëv-[§sÛ¾×Ì¢‘ã,÷»ùè\·íÖ¹ïóÑ|yb -¶ -JÌr ,êTrz1fƒ´¢Æé˜q”Nãtùþ!åÊwªáôßösO8ó%ƒ3ýÙelùd5—zb¼!®f´qð¿viï=K/ÿ,ЬKfÇ$Ì3sGÎÍáŠ:¦ò…=Ó•—çúõoþsŸÏñ_–yç]Ÿ´Û'Bç!$Xú¤uõ¿öO7Cí­>j‚¥>8ËƘfíÇGZºnqeëo¥n'ËGø”ª±Æá\±n|¤ hf@·X<omJ©™ Ö½¹Vî»w€ï½»ËØ÷`‡8ðü4 Lh†3?îáËO¹y'&‹%ý³0ë…ùxéÈÀ\èóÿávNìãÓí O†º~ò^‘2ô‹XóÖ2áÈíÅÜàŸ\…7¾— ½/¶ÏÿÍ»ü£7_{y!ŸÓ0Ëo¬ÏéšèWyi–oãÇ }Ӈƻ›Ò,÷s±žj–•/ŸbéåbaL9î¤?ÜàèºÓG7gÄxÝDV7­tš©Û³u¯nÏ–m:oO_Ò!ö5ZpZ”qq*Æ*¡Ñ6<û¬à¿˜bË¡»¥%:Ê 9£13, é¥]¿¬^znâß]aÌêræZ>\­þu_zqX]~j´%ÇlU¹þÍAõòsüë>\G®é½•BQß >£~¼Xqf´¹¿úû^ÿ}þ—ÿ±ÏÿÊ¿ïõ?ó÷†ê‹sô%çg²Û'ãÊìq_<ÁóSt˜Íg¶îÌW^_À%W;Âî¹²Ks0Wj8”iëÉò0_MgTR­„‚¾é|Û×[Äög» › =#Òºü”7žûu?f»„ö{[¹Á'®Üð³=˜ç™O -º¦‹iõãù´ºq|fËã1çOýi—¡ëËMþgÝe¼ú«÷ÎMÂ[Ôø;?Jï}Î]ÿÕ ¿øŸûù[¿*úsÿc7Sjï-Yø&Xa~‹?õ7wí֣Ȁ/>,4}z7WxûgÍïÚÿ½ßðú?}|k?œçf×-›4O·Ôaûsºn¿—¨ÓGdÚ"3mýƒ¬¹Ð4[ƒ)Ùz×î}ºÌîØ°…xl\Rµ£Ÿo¯ -³D¬ãRíÔ”r'bFEdŒ’‚ã¬å˜ÔQÐJO?öS/=V…Ë?øsemÓùôZ'Cÿ£-ÂàÝ k(¤ÕŒ“sê' ýöh7? þèVaăךoßMuà šTtqŽÐòp£P{u±XÞëÂ7ÞY%Ô]ZÌò„ÅúÓߥ?þÉJÿ¬cNú¡©~5oÏ5$µŽÛ³Õ ë¶ê¶mØ£sÝr@ç­±Àœ¥>ºlÔºËusí&èæ9Œ×-;]·åȯ -ºg+õWVb&O;rc¹Ôz{@Ù™År`œµÞ(ëDS”5i‚…‘T:V)jš&»±Vèùp‡±çÞV–K/Çý7´Ý_g¸ü7wéÒO¼aðùvp„Ī1à.ð·Vp¶r]¶Š? µ^wi‘P10›;qw½xá…¿ðÚ #î­ðÎ÷Aâû/Bù7þ(°˜¸…+>5Ã]<Ê—²tÛ/èöïòÕùBç>¢`$ŸX?–ÏéŸ -Ý(>,o„¯Á|G¬%¸i³…™B1ûª{æV„Óò+//ä¢óFq‰%£…‚^Ï/±|ð¸³Ñbö‘u×–r]ŸozžíÎ}ï)žÿΗ?õã>ÒÛ¾øGáµ ˜Ó0ö<Ýfl¾½\æ×]˜B-‹3ßÂ[¿š Þ®oþp¹¾ï[ñþùôÒ1ƸœQBõ•…Æá?¹rƒÞ ÝL®æÒ®òµùÆ–Wéû~ÜâwîßwR[Ü|‚tk®Ò-Ÿ0G·qÞJÝÖUkt{÷îÕ¹{ùèö`±åÇþ’ÎÃGÐØï«;àá§ó Ž³2¦79A{þ3ÚÐZ„ÖŸÈ[rzæ{¸PKâÇxwƒÔût/ß÷ÌšÝ\|½P}jžxö©vážÛ+‡çJù­S¡‰%Ÿûʨ\¤Éמ«Â…Ÿ|¹á¿ìãNýÝ 3a|Û—›q å†)|õà\CÇÇë„‹?ûË·¾ 1\øŸnúÚ;‹Œµw—ùõþ¼Ñ¿÷—-†²s³üBrlüäKcˆ…ßÁ4k.¾ÞQ™m»yõFÝÊ™ t[×ïÖ¹íôb~Ó #ÚðjÅÙEZ×Çû•“Ÿïw :tÐ+ÔM:_AG<Ìü¦)˜õ‡î‹\5°s¤,Ìr[& Å3Œ]O·òçÿìÉùË~¾áÍåbfÛ$}A/^L­q¢™¼ÜÖ©”ß4ÿa5é -V_[Ì»½ŠoºµB(;3›¯¼8Ï0øýîòO>Âåý¸¿ïá*®Ì…^†Þ”hå+²ÔfXsQ•öþà6ÈÌ?ÇÛCSϘa=è?ò‡ -GŠ¹ÝÓÄÊk‹¸_m€}Aó—å3…Äš1|B¥#“oÇEg„v¤Xra®±ãñfšAnx}¹±û³Í˜­dñ/@»~?D¾öX3½}/N~÷Ñ!þòOþÆ“Ÿ®Yì7}}1f1¿&¼ûm°þÌ?\ ¹]“ IMcEýÓ Ãÿ¶Ëpú×ÝÈw˜mî6œúûnhÃmÐÓðUC,I§¥õîZ}ÏWù¼¾©þéÖ»·yê¶-Û¬Û»õ³K½ÎÇÀë j°¥1”Õ–ñ…£ùø,;}x¢5”¤ñ’jç7Oªo.ƒ¦t]M©MÎjxúÈ€ðŒQŠb©…§2öÍÛîn…Ö´1øÜ““øÊSs¸¾Ç;„ž‡»ÄSÏ=0ç*Õœ]=A¾ï‰«rî¡A¸ôTo8õ³«þüßvóoü"(<‰Un?=$žû£¯áÔã]bÅð\1¿y2×pe‰ñŸÝivùUãåÿbuÇÃ5†Ô¦q\j³×ûëcÿ/;ùc¯3V]™ÏçuN1”_cL;9ÞÕÕG·qÅ:Ýþí^:p<Áú2ØŸ,öL¡–¤ùQ:DBtêh¦’]2¿)6]^. ëÆõÝÝ.§–Œ•â³ìå’“3øæ[+Åú›+øöz§ÿ²Ï0ôÃN© ušÙ0A*½8¬ >«q‚˜u|⤘r -iêd²z0åâŃ³Ä¼“SHK2£y"—5Š+¶'½ª×ÑŸ)õã¸ðìКÇs0‡*á,[ÌICoOªº¾X®ys_<ÚŸ ±ð1è`Ï\l‘½”×5 ëÅpæ/{ ~uã†~ÜÍ ÿiŸ±çÙv©ââh»C§@èÿf4Ö =ÄŸ4kXs§¿w“†_x‹—ŸÅ7^hÂëÏ%ÃЯ®ÂÑ×—BwqÃxòk¹K¿xsþên<öÉ.·w -újÆôããùæ3{ŸmGÁµ¹óãÐó‡v¤Aekí`Œ—Y?½cß7Ûù–ÖráÅ#=ÜdVí×yyÊ:)©j¬TÑ?Ú”Rqÿ,v-¥´æ ¸Æ†èœ‘†€dk}p¬˜|û½-`õ@H©9½Djxs ´u ÷I×}³ =JZÚ,VÚ>Xkè{²­Ç-Âñ7V‹­ïoâ;ïn•¾ôÐNeú?wîïNýt€¿øƒtóÛùÁ—Y¦çŸT›¾ú°ó÷Aï½.]¬Àç -¹Ç'³µ»N8÷³fqùóÿéiÈ==eŸ¯Û±q§ÎS=Zèùa·8üƒ;ôŸ‚Þ©ÆÊkóŒ)'œ‹îß fk€ú6¤YvjñCsFÈ 5c‰õÙâ Myp•ÀÃC¬3]ÿ8\¾ðÐ]á`8û~ÙXh¢ógñ0 ýì -m3î䣸¿Ð¢³@ˆfueB¹£PÐ=M(bq–Õ"óBv‹³”qt‚}Ì™+`õQÊÑqBJ½“Ü0NŽ-Í'”:ð,ƒ‹ nq@ÕXK1½qæ[¹þ_]¹þovAÇC*êžI¬¢:Vç´~¼|1ðñÀå› CbJÉh¸ç¿õ’o~Áº};Üt^þ²Ž N±‘‹;fBG:@Bl=XÐ'âË a)6nž’Î[Œ°`×b´‡ éÊ%ZCKF$ý졹¤¥‚?+N/+ûçË5—–’VJÕ¥…ܱ·WSdà{7õÜW\ðkïG„ݸtåÓP©ÿ±;éQ°{-]}*¨|¯}óYEÐ7ï5`VZ%êÅ/EÌåƒÇ`lys…‘Õ ÆŽO7ú~Ún¬yk¡>²täW?ݶ¥›tî¬&€¦ÅÍ„’Ñ^àkq–~“¬=}ƒ™ß Ö‰Áé¶Ðž -i}×# í½½¤Ul­(pK ¡/¦7Là#SF nk¯}a:xõÓ(©ñUÐ¥ƒ¦×úÎZáòŸô•¿üÿ¾E(½<öB Ë)›Þ\%¦4Œ÷WC-ë‰eçç"΢®6Œµ†¶?w¸Â\rÝXCló™Eöb óu©UN2ô&3Z'ÃSm¼½Y 4J:è$cÙ8Vè=rÐbqÈÌ8m›Ž”Z4Fά]F|.pP¡Å-Wž[½:Ã\ÍYƒs uniF¥ÔN€Ž!´>Œ}_ï_€X°à©6ßY‹¼KºòœîŸX8ÚaÅF›¹ÐXf±ÒØqo“˜zb‚¯cé/2ß(„Zˆa#CÄÜÖ)|Jå¿€bHBbè:€yæiI:E½3#£øøÉ:þ`‚ øE¬Nq–󧀉…x+“= -}2?9Ê’åÆ,[!¹vœž5 E‘N‘­üܦIRåà8ÉÆÀ‡[òÛÈéu„ê á¡ÛÄûd½vþ+Y;ûLk_[FÚ…%í³ø³ß{Êo¼àoý‡äéŸûùŠ«ó)î€O–Õ:™;”dãå§×;‹¹z0àôáY¶nîzü"qÊX f7°˜žg6'm ]LâU2û‚&”¯AÖ £¬åDfc)%cÅD¶–òÚ¦Gí"åèåå¤ËÌrhñ`¼ éUÕ][©Ö^\].0«Äˆ8[èÔ‰­n–kÏ/…¾¡™o˜7ZM¬ ^7ø|ç'[ľû{›A3”¸Ùù'¦AS¹ôX1Ý|©\|$òmo›€c×ÜTh¦ -M„˜2{cH² XRJ16acX—~ÒA .2Ý–XB%=.Âá2G} ËO˜¿ƒö¯Àj=Šå© ã¡õ®žK¼™âÖéà>ÓŸŸÕ-ÐJ«ËÀbx[Âá#c„´¦ñÄ0(èš¡–_XDzRès6__)÷ÝÛ¯=ôSº¿Ü/¶ÜZ»”ÙöÌÏ…“ïm»?sE=¨ ¿ðáúžì”rÚØujš¦Œ±óñfîÔßöCsÂÐõl3_qz¶œ×> ïÍ}ï~¶žDUjÏEŒòWb-¹°t[.×"ÕÆÝWb5ÏÝÆëu›—oÐís….çA ¿ h+>µÑI8r~žR;:ñÐR%Žpj¥“šÓvß8ðÌ}}tŸˆÏ}*æ_q1ÚBRσ=Ðb&ß”˜3ÚØõùÒÿ¹ýsðÆ_®ý›-`Ž ñÕŽlmXz²ÚËÛŸ×Áùþç{ŒÍwVÀOº¹uøêÀ»¢{Z7Ü)â8Él -²„Þ±?§ê òAKhy“Ft\öh)>m”lCšòÌJílÓ:ßÙ-׿¾v-„%Ù@L<ñÑf±ë“Ðý’rëœ)×:ùÁ&¾çÓmà0hÓÔ’îÙàhÕ——Aëoû`³zêK=?üƒ›p¸ÜÑx0Úš8çes”£§—‚;(ÿì!œþÙ“b}ÿSWÒb*;;OŒ(³£òì8¶þôì~@WZª½¹œo¼Mn¼»Zqà¨áž¡—m"qè;7bž2#ætNóz¦Q¾^÷Ö -©ãù.¹ç7­ç©—ÖóÈKèþbXˆJzí©¸w´ ¡£‡ó Ð7 -úgPÎQu}‰xì“ r×w{å¾o<´¾/}ûïë•žÐOdky…RÔ6SËo˜ª4^[#ö|²[î¼»‡ÙæFÊÉXn#” Îf±n1ééÅV:‚‰Éw=Þ.ÿê)þÙ} ôI¡-”õ΢¸ŸÈütÅÙùèÏygfc+ìù”&'.¾Æ;”aë+Y¸nß«Û¶u§{B>\¸XîкŸì‚Ž/]#V×úéEcF…ÚøöFb %×8›–­s\ µÿž—xüÃMrþñ©Ð‘—’J©ßYÜÁò®G%.ÃŽý}&éõ]úÅW¸ú3¯?û·=BVýh2ú*‘,¶™9‡BrÍX褋—Â7€]z),Ö& ÷ØtS~—‹)ÿø Ô`üAøñ O/oQ -´€#â&ô¶ Å ½a>4Á†ô'+NÏWN¼»Ujýd¸¬Ð%<´_Y>(UöÌ…–¦¡óõÒ¥'œ:üÐO†·)ÊŠ˜6쾞þLTúï{0¿ºEè´W:÷•ŸxôúR>¥ØþbR±#XÇøSŒË³GnŽÞƒT<àÍgÄèŒ %gf‹™}Sa™¶`챘1º||ïO®âñ‡›™ ¹ ö“³º¦‘v]ÿ£=Æîû[Pó‚™…¾õáJ‡gÃæµÎ¯öCëMì}¶[lÿbØò¡ì‘àú5#t¾ÓØÿÕvè„¢'Bþ×¹üì\¡óÙ­ûkw¡÷»½ìù{ û`ʬ$¶ÁºGÏI­¿¶Zè½çª?ð þRPºîïãÛ>Ý f«Ôxe9´î¤’ss‰{UþÚb¥ÿOùôw~ÂÀ_÷q½ßìÊ.ͧþqù鹤owæ…¯éÚ¡êkOøú÷VÐ>FbÃX.¾b4j]V‚»î+²€¢”Ü2^f¿Ÿt$ÁŒav§Ã-½Hïlèj5×V™JHÉÕã”øª±Jvë±óáNµçÁ±ñÃujVý$ÒBF|…ÞõáBG0ΤßØYÜ –—]üÆO½ñ(„4Õ -»g¼dg!âÕÃÌÆXIñÐm›„Ï«åvÍÔÒ'AÕ”{bô`k‰¹Áâ-tñ‰…—Q6Nnúp£Úñp@û¥õî.by'”9š²ŽMÕª®­ã+ Œ´QÄÁfïKK7kq‹õ§K¾òSÞ¹azû“8hq»»zè|¼x³wßÝýÚÐþÊàCo®÷£­¤Åš²çéNVC¯æ êõp#wåV,ÿ%ö"«3xo M¯•ôÍC¬‡^ päÊ>¦j´!8Õuø%JÑ™ÙJÁÙÙĸ,?»¹4‹(8ze ®!lË ÅZ‚ËûW;¾Ügê|â®V^_†ü×(…ZøóAâ˜W`5°¸,4^£ºGN:êKv½mÁ5T;Ùõêxâ©VÞ\uì!!Žqr„¥”`£&! Îã·Ök]Ýzîù@s:ÏàqýÌŸûdqrº¦A;˜ï|±oüpµPvuPw‡ÕT—‰UWsƒwi7ï†ß¾¢Ýº{HîÏ{ø†Û+„£¬3;ÉÇ‚9‡}:±°kúzà#@ÿì±ïÅ^pOÜ÷yèÜö1¿ª—Xhñ’7§Å9 -aq6ˆ‹ÄÍJ¨V -úïÄ0ˆÈ…u§&Œ‘kY„Æ*t¹‰ÅüWï7åôC=4øÄáï<Œ=,;ëP±!8ÅlY£me£,¡Ç­Å•8jö`œ¦ìÁT[‰4m F“¦ò‘áy2ô¹Á€H- 783¦î¯¼ÁX!¶q|™#q ‹‡æ¤ÔL ŒÍsSE9Ò7亮R9´€;ýÂÍôÆÇ៼“+]}!Àf<öúéÌœ¦““Á"-îª ‰‡ ?0ðÌM>óÄ/`ðkƒÔûÝ>‘Åhbg±˜Mœ¡ìÖ©`gµbg©ÿÂÎ2žùËÔ‡Øßòñâ(W2³³J‰@l›¤<°)ˆ Oº´ý3¡ÿ Û{ÉÎRÛ¿Þ+w?ÛKvÌÖ¸^Œ±42ÛkÏQÁjȬ›ˆ|Z)ê™…÷$…ç’B³G€/¯µ=Ü+÷¼ØîŽçG[Ä â·€^yj| é ÖžY*5ßZ§t=Ø]]©ï…4…Î';YÌ#Ýy®ñÓŸö»î|ÿûµkÏ‚ƒ?¸“¯=òk/,¦¾!8%C³ø”F'ê+Ö^[&g=záúÐdkhË,/G\×úùâžã¼rN⃃ &¢fiPC(×#mîC ¶à"¯<è$[%†ÙMZÝhŸ+Cϼ‰UÊ®íKvV5ØY,¿‹/pP OL‡f¬–ÅâÞoì,1ºÔ^Ïò`ï½F/…XŠ{­€ƒVRpŒ ´–E³ÍHØkññX£žHRÙ."Ò|&hz“îõñ·×“78[,Nã+ ®|,xuÙ SÕ#§ÉÍïm’ Å]uz!øWêЗ¾È‘±—%d³œ)¶Ä^Ž0kq¨íÞÒ™ó:×xßu—–C³çшß“9ÊÜΠƇ)*4ØQj\¶½™Õ4C<õÈýA)ì,“ÎÏ<“Ôàtaß ¼0Ä!5&Íî7vÖd°³H¯ô%;+4{¤)§{¦Òp{©èÌ•Å3Òá,´#nWVÛTb(fUMÇR+h™-}-›ùBv_•ˆb{²ï†·7àù,Ž1?¿ÀüüœÞЬ†}‹qÌVÓÊÇiGútÝõêxàCuS߃ßuçù—ºógŸêÿOÝù¯wCwÞôÎ^¼PÓÀÐTs:¦+ý³Œái¶èI#3GHÉuNˆñàfùp-|ù@ #8jÅgçÊŸoBž+gõLEüÛÉ´5z½¢cvbMÖÃ%cˆÅžZ:NÍ?9CKm0óÚ°æ\Y"ô=Ù .ú‹–÷€-ι¯§Qçµï±³ ’ja~ggÍF1Ñ×ǤóÙ§gñ%ÌñI Í©…gÒ"ÒG)¡É#ä°Œ‘ðÑ`qŸ:¯yªZprü§—1 -ñžxìÚJ'îlã‡ôé“«œ·ª,×g~`‰Úrs£rôâr¥úÂb¶•¶ÍìÇÙôJ¡Å-¥Ô9 ìw‚o&u>pENŠ ‘o§FçÙãܱ±Júf£ßBìß¼¾Bò‘1\pœ5±œX̤ù’Õý`·Ü÷åôR^²³o•äúñ°g9«fxe`g—ŒÚì¿ÙYEfv–‰ÕûÑÖˆ/¦”º‰rHŠ-Ö.4üÕõNÈàsµ¢“3‰g–Ù2Å”S?…ùÀɈrpÆbo±ç¤uÆó¡·þ¯ÏG¯–vÏæ61ûk´¼»YëzwO@ùÙ%Jv­3tŸÁ{&lÅÅÂà/ûåáŸ|¡# v,Øxô|;ÑZß\§\`æ®'Zƒ•þúØóòæd¯jémPt~R˜jGô³} ªLl‰ù ±á½ÕÈ%å¸r¿ ·— óòÑëS¸•–Uíl*é˜ :êrb›"î”_X„Þ±`ËÏχ†=ΠfåÎýä.5ÝY Þ<ØY>8+)X‚e$v³qø'bg¹³×c_`¿kÉÕãÕ”¢±ÑéöÅ`4Ê!ñ¶bHœËâ'Ø]Ì'Ë•ýóÀ@CzçàšµßÛ!µ²|W%­ÙÙ”ßëF…Üóù>äùØç@Nmnâéœ#ôÜsEl!FB9Ë; -:¦Kiì3‚ËÑ÷Ë·îî6å¶L×¢òíÓ›§d³º¾jx‘6ü…Ñtö¡¢ =÷ã¡Üv#±³ˆß~v×ö>±³ÄÖ϶;+·ì,+O¬E5ÎRÊœ­nbS9Eì, üd°³ŠÌì,í_ÙYq/ÙY)#„€+Ndµ¥Êòvä¼`ÇçwÌ€¾7øÈIÀÓRËÙúlž¶=ŸÕr8{~`¤LJX€K…œ¸?x~͵¥ô¦ËKÌ:ÇCóTv-•˜|{â“E$ó[É>9…?öþZñÌ=ÝO¶‚Ȇ[á|9ϾP«Q“<’ -¶òrWtzžÕä¡9#ð3>~œÎ`:h)gÖN@ñõ~/zàNs!,ÿÉ91Eª}m x`s{î7ê<Ý:äÛbD*óÇSÔªS‹Àò¥¼’Åmþ¤‚ù\VËÈé ÄüÃy+nà©«<øÌ‹ØYÐHNeø•ö;+y”‰ØYçW( ·Ö©Iåc Æ ¼wð¶±Wªe5MVëÎ. (é™TD|kªÇ³ŽM&=ö¶¶¯€Å!piH˼b“7O–3ëYîufâ Øöñ6¹ûÞ>¡óã|ë;À€§^ÎçP-Óí‚Ï%5½¹Zò8÷¨V^].©:øÐGnys}@jéx18ÚÚ™eÇêµéÒÉO¶!“X-kæKšÙY±³î¾÷TN?óÇdÔĈ/͇åŽÐ‡§ÙpE#_²³Ð_s€øJ=óÕÚ ËÕÂúiß Û\¤úsK•¦·Öi•——]©ÄçŽÆY1¬qôM´‚.ñØ5f>Ç[ëˆSÖî‚ŸEìPÀ㪽¶J;ri‰šW?¾XÁóFXƒ¿νÔzk£ØõÕ.ô×hÏ,Á‚æ{[&«I ˆá]H¬{Sa§‹\Ù;ûj—‹_s¸¦bY¯‹•5Ê †ZB,;)ÿ…¦FíÏý¢Î_ÐB δÕbÊQßjYõ“Á_J,wD,ä´8+?ÃA ?œùa±¹☖Û9“b$‹kÄÙEÏ“Õm8Ÿ†~©™_2¥¾?À~Ú÷Ð3hø¾,1ÿ .¯7ø‰a–Ä.éž%.ƒ=}Ü!.ÇûRBÓ[«ù“Ÿl;K;óX0¼Øv–xôübÌ|H¥§æHy½Ó¡e-PÏùöbbg‚~V5³Ïò>ö™Øç;«ØÌÎ’½¾^l}wbõ[YÌ×òZgP®Éü>˜-ȇ…·ÖÉ ìs—÷Ì–K{gk¾tx¾Ú|s½tâMĵŞKRÙ8S«Àð«<½Hêºë*÷ÜßÇ÷ÜÝÎw°ßUwm)xvà|€÷¤VÝX¡eœœ"„ÅØÈuç—¢§¡žû’G^D½þ§»¸ö÷7߉ÕXTkT]]&·}¼C>þÁäI¾ú oJ±–‚ÒlÑóQ -ÁAU£Òí´Ôª R÷g{z¿ð#–Lfƒ3ržÕCþ~¬^72Û4FèƒGƒ¾å`Ä9ͶÓr[§ƒÝ…šOÎïšæ†\:43&Ôg*:=W„½¢—k''äFF fKJÍÅ¥ØÏ Æ{l¡ƒ’Fù>r0¥ìòBñÄÇ[ÐQJûæ€ùø-2¶®+êQò‹E'¦ƒYMs.¬'^PF 1Pµôê‰Ä`f÷y¨ÜÈâ6«ÑïÇyQi‹È®=ÕFàáf°ÏÉìô7¦»ƒtôÊ2ô4ÔË_«Úå{&±÷þn) ÖÚ¨—uØ¿"FM~ÓTb|ó†<,¦x4õþ…%6¿½–ê¶ÂÖérÃÕÈÇ6£~7†eØYŽ/¦µ9ƒ•¦”ÿÆâdu[dz¤—쬒®Ù`IXS'oc]-œkþ½½3aT ‚C]sj¡Ôpe¹Øzg#³±ÍRóÛ”³PϪæôbübÕ1»g¯‡\€|¸SlŠµ¬–ï}°S¹ð€ç/|çÉuÝÝ‚3vØsW«o¬ s“éùcä _‚n|výf|ðÅÃLC÷ü…þ/v³zÈï¾HÍíœÁî%û ìÚWôÍÇY7)Ýõ~¸û»Øo¦YÇè{ŠIÕç—ÀÏ#—RÒKÆQž’slªVÜéB{BÌ¿ó­wÖsÇÞX‰ëÎ:êyƒc ~1UX\!ŸWÖ‰3, ‰„ÏÍòGŠlÝ‹µç‰,?guËZäz‰ù'–‡ -áÙ#ð¾qöLzv @}ëŽ/¶‰-ï¬ÅZ@ü«úçŠÝw㬧zþkAl|wùêìzgä¬ø½äcؽÄÙKðžp.€âzW¨û;?r…O¥½é P+Ô½ˆ±øýBhœ Îd N‘xØÆ 03³ŽOSŸ {cBX¤µ·‡;Ëi|˜Ùö˜ÓŽŽ7óÌ__¡TŸ[Œ*ö ‘3*‰EŽÄÄb19Þ›Vvz¡Ò|{ú3J³?öþÔò+‹¥ô¦‰8!ÆäÚ)éàX²ßÿ•Ó>qRË>.ÇýK¹é­õ`?¬-°á²NLFMûȹìþ‚A^^Ã¥åÈGˆ[ f{éÀ\V£Ÿ{çðåÄâéüd ØExÕ{¨U™Ý"‚ïzºƒïx°>O©f¹¸JˆµG//ç{Ÿ¸ª§À‰Á^=±¾R*èì z8Ty~‘TÆòZðñÿðOà.÷>Ønìütßrg í餱ØÛakÌÌ7n›Fû€èGàìpö±)æ¹m&ÎpRî$—Ïã‚ÓmŒÚa+ÔàV!çN̪gq¦e’ÌòOç­*OÍW [¦Ñ^YËõÕb×';¤–w6bŸ^αõ•"-¸C,–• ¸pmnÄï§ýšfùӰƤÒ~¾þü"®ëã-\ë¸ï­Ã,ª= -|L1&}”R10Ÿøp8CRxjŽÉbhL¡½š~|X… -XÚ,•j˜½°6Éj®ÑØ{ƒoJ‰Ï°—;*©5ãå¢>æ£:§SŸÕLTo°çá/íë©×ù8òhÔ&¨wqÝ‘'a¿Ub6+ÅfŒ"Ûcš]3¥¨k&ÎòȱÙv*âyó둯€§…zg¶ÔÒþ¹8»u…÷ýb%í„3jgøoä‘ÄMO®r¢3G`Á§ÀâØŠóà½2Ÿ"e4;ƒéL"'ÚÞÚ ×±ßÁ>'ñÄÀ}ôÉÓÊÇÑ^)ˉÀgSycŽNHcù6ÎÒ–±ëväÌ°èˆW†}º¬ª ¨“À®ÃÚ¦ëÿpôêR:\unλ‚›!Î b}QŽÀ®-ü•–ß9“âRÍååÄgŸY:Ò=‡ì´rh.ôæR¾æÆb1­yò9úl8÷Òþé¬'bl…ÇÛ€®?ýíãkßùH§¿ó–†Ÿ{òuç -᩶˜AþÇîÝ|pzXXLëìÁ¸rGzß´®¯¯üç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«Ç«ÇÿÏùó÷ņì J -r8Ìwu_Àþ_t8)4ÑÁÇa~ÀJ×Ĥ½ÁIq±A‰é.[Ø·V­X³Îe¥>4(Úe‘ù']عø&F„GIJo‚ƒ¢C»,óEo/Î}¯Ë—E^A顉«V¯ -Xì²Õe‘«;û{ûgóÏne_Ë7nX»yÅšÕ«Ö»l\¿fíŠuk6nt‰Á·7lX±vÕÆ ÿòíh|{ýjü¾½v~à·oÿ÷/ùùöÿ’C‚C¬ƒ¿Ãf—E‹]DÁa•‹k¨C‚ùZìˆÝbþëîPöé~»0ìW»ˆn+÷†¦D‡îñ–<]DÅe• ]ó\~{Ù ®^µbÃ&Íe5ûï†5ì_ÅCÿßžÀ~äå“~ûc“Ë:öú«7±ÿ¬bÅ3W9ÌŸOï·r‹ËºÕ6ltÝí¸ º}{¼uîÞ‚ÎÛbá'„[âË[>héËYøø[xû†Yð”uû÷êužŠNoô°P¼ShgŒ,µó Í°ñÔè¼}ê ûdN²6†Ð¦Y{úÐk{økìwtûvº³?õ:)Üc8¢îåH?ã/EXðaé¶2Ñ2j&B²È”ß6£ôu%™Þè{Œ<ÓhËáêq/Åx0ŽcS+n…QŒCò¬ŒGCò$:ËÒ°4bYweÆ21B&ÇäØ‘ûS‰Ì¥ÄC¢¤qä¤Hš?¤Ci#å°Ã¶¦ØG-‘½~r•$I„ø{’FÍ°5`¤˜7YpJ°%äˆ - „$Ø`¤cÃRh¢-¤²õœ¤ã‚¢­¤Ä£ã0NÃG$Ûòá‰$! .)*{Æ%!¡j0Zäƒ"Æñ£ØçË°S³ë&©•gkù½³0Šì¯FXÁÉ6V Ï^>4_Øí!ö{0Šë™xdIåvÎPŠ†fK¹mS…ä2G1©Ž}¿a¼”Þ0AÏé¯ÅZA¶UŠÏM’9'¦Ð1p’¬)G×£ù'¦b” Ççé¸:¤ØëðQV¼h yD?-Ü2Úaöõ³ðÓYxú©:H.0š E“œ£·—¢óÎà€Q‡ñQƒcedŸ‡f×”ý?¤0÷íõÒa¼²®BD©Qng<˜fén_È—ú錦H+H_zøtŸ×óN°òð–u»Ý|uÞo J·•ckÇHIGlj1%£bœ% þ¦$+wösî{ Íh©"¼HR¬-ÞZK³U#óíµ˜¼ÑJrÙØ€ÜØÜ,%«ÁÇÍìó²÷`©¤ÕOPs:¦i=.2ÐRk&@ž‘ä¬Ò+'À~¤èÔQ|Dª­šÙ6•]ßi -d)rOLW[îlVn®Órk§àg$f;RlÚ(Œ4IQ'É›% ÚfÁæ2§˜²´Ä4{-­j$ H’áp¡#$#„ Ã6„ a%ÇÚ,dTž˜˜;㔦œãÓ1æ+'9*a9£H"(ÿØ4Œ\©iµäè;áÐa[ÈrÑ(³}><ÖZ M°äƒ’Æl#¥lFæµÊóK•Ú++´ÔcÎrL®½—c;Çäˆ0 -BczéÍÎ4æ]<¶©æ÷Ì"Ù«cïo’[ïnã”k–ŠŠF4v/˜=Ðèq³gŒß–öÍ”áÜb³íhÔ$£ÄI+èœ%%—•"SGÒ( Æ^Ù}£q¼°h)»{šSdï'‡ZîßåÁ|ä~^`ëMK±ÖËñVF9ŠÝË$kŽ}éåHKoYçã)š6¦X+’Œ Mek>ØÂ3é ·Š÷¹V1$wÆo„˜²ÑàÃïñãB,ü…p ’QfŸ#ËRJÓx)¼p”AK´Âk@–#ÈJT=d!èþDbTÏ4ù@æ?Íc»Ì'3Ÿ‰QØ¥²{%FeŒÄØ9lFK-‡ÑU?ÆhIÑÀŒöÐxjrÙ8²7æ—µ”êñjRù8%1ß~¿‹d±pϳjit”Ƽ Î×JNÏÇZSKæAâÊ<Þ7<—ÆÃJzg©yíÓ!µc*šK¯Yƒ„¼Ñr~ÛtŒšÑ¸sYß#} Émž"g6L$ù_ÈwGçÚ6†¤†Ûg`TMÎi˜¤$ŽÁú†¬+ü*¤¿ð^y»—°GŒë0Ÿû„<…cƒQ7ÈBÀ®<öûê<¼ô:H8Ef‹x·—Ig”Ùó™OâƒmŒZ˜%üÉ„§€ýÀ7ƒ"­øÐkŒ>{úuþbˆ¥ši+D䂬-^òò°1>0Í£¾X[‘¢±¦¸"öž«Çˆø|q%£•”šñ‚¬$/”¤Z'Èú¹ïuÓqj´¥‚±™ð;.0Áò$ƒÊb/É@0_¡FeØA"AI,ß©Q12{FÕòî¹[†€mÉQÌ>Ùý¤±§¼“Ó ËƒqZ)#§bFÿòG+yÌî0®[¬˜if­ìÔ|-»kÆíÙkMÂZˆ“_êùxy‡ôîáRGŠéé-“°V„Xvíã2í0ªG>•ùZÈ6pAñìþ"Y`ø:z߸ì9Çõ€ü¹A4Y± F°¸™kyY! ÉZ:˜a Y%HâñÌ/rj„%òŒ.B2Lœ…ü¾(×11û/M29-“ÅÔÊq}æX.À…¦Ù±å£!=%¹¼P)»²ˆdß‚“l$æKi4”­Éäu`„ý&ŸpÆøº2hb¨%I 0?)&ØðZ¨Y–›}ÄeŒqÓè~tåjLž=‰Òçnr¦>2Åò\–柜Nö iöï¸÷ðƒô³©-ÎÈ… )B>*ãÄdñ¹î!I¨ä°<.½Á™$™’Kƪ)•ãIþˆÙ|ÅßDf3 „ÅÊ96…F°!?•^î¤uÏV‹º\0¾KcŒl½b]Bfc{X7_QHΡ}&Þ'É@žšÙ!äîht±à$)Â6~Q­¼¸²É «®,‚,6¤?0†ˆ?á3i¤£ˆ½30fJã“ÌÖ épH®˜%͘/gׂÙ×xø~HÀŠ‰Å„Ä(;=,‡Dn¿'Ç:`¼l ‘ÙlJI&â{IE\dÎH!˜ù>HÖb¤¾­1æiÔ’aqÅÀã™ÿ¢ÀµÁ| »¦ãL±,^°øª@/*m”y”½‡Ú[+im±¾‘Öû7øä6rõ+„“Ÿnëß\IÒÂ5×—Ðøb\…£”Ù6Y:òú±íÑv©áãuRL­£¯~:ÊJÉhtVê./—®­Äh8]»’Ù¿‡ŒI?ÃòËdv_“YübëQÍ:1ÕT48¾ -~G "¢Tv¿a#˜ qjŒÍâš°õA²),’cóí1~JRðQ¸_‰•cq-ø<’/wÄ:’Ò~‹±Èû1 -›4"ä—˜¾ËÇ{¡Ñe–CJ,þ ì~QÌGìO«tBi’_sdÞaŸäJz\àCWCê ¹« fÂ×°¸5ö‰<‘î?p,f!çGÞù•ÝHÈQœeñEfñ~˜rfË„(Éit¦Qn–·Pb9IaF³º!*ÓŽr;¥ß¾O²*Jbë6 ?D#ÿ=ëk -þY.í™MrUìß轕 ̆²æ|íÅ…ˆß ÃzÇë`í@¢29"³5ŠsËI¶Á:#)öšå7†°š9 Ú -5£È⬠-Fëɇ¶MA-Ë$P­G#èl}âú)éN„)`1VŠ`5‹IB[¡é¶<‹ÇÀjÁ>¥Ã5cŽÀuÆ躟‘Õ*| zIÈ ñÞ„ÔíÑ–z)œÕ!‘–·æ‡úÈÏpŸ™ÏBLf¹ð(¥òʬ/ÄfHÀJ‰UcHž”ù~ä‡BP”5om I°¦xp¸r ‘n É[ÈêY]n–õK¶FoBˆ-¶ç“­ýP¯+±V¾Bˆ%ê1ÔS¸nÆ€x+H8’d,«Ó¥ˆì‘Ô#$d` -X[Ìîò´²3óDà:o£õ?–{t2×ý`›8ü‹‡ñÌÿØgèüv3¤0I®6˜Ñ<Ñšhí/©@¸ -ﮓêß[#%×:q!‰6èá@–k@NÈ ¿Iõpn#z(¬–>12‚¦T–ÄgVãÒ쪙ãQÏ™ys¬)ªX­Ír$[È@vZKe¹%¤€Òë!#, ÉU¦5M$_Ëb%”Àî™Ï6Ëñ ϓ꯯”IŠ¹d IÀ5ÞY'üp@:óŸ|N² $í_6ù5ÉÈ.r¢®=³=Ø5ò.øNÄk )P[Ñìçá+ÑJ¨päeŽ <9Ar£ð”ätÎ ©i–ã@*…|4~ŽåÕ@kRé!¶Àk ¶I>–­õ>þdqÁ}¿§÷ݘnƒúXYóúI±’ÒF)ÌRŠÊ·ƒß–˜OAÿ’”32ÿ!CÆ2éÇÙZi>áPÎHŽÕÕþ¬¦ñ3èüå` ’•d߇ï1Û Í²…t$êuH½sJ¢¤ù áLJeØb[Óc>}KøNVïYSü£8_âdúÈ‘Qw°X=9ù*HQ4ßYO2Gø@ÍÈj\H0g_x‹—~5pþËoú|°XǨã| &˜ÇÙ7áèëËĤò1ˆE|{mH¤ o…=ô¸=¥²x‹Þ“)¿}rQøoH³z}„šÀòÈ­ ï‡Ä4ó‡ˆåäC!í \ª×Ëœ UK(ʳ‹¨þÍkŸ¡uÌB‹ž¡šYç¬e4šes¦P¾\Ñ7—ïúb;¡R«Æ¡ŸIòe­³•Ê¡…Bˇëùþ{øïvÞ±õãMRý54”£ÊF#ÖqÃEd–ùªpâãRÃûë¤ì““‘ -Y#!;.œül3ßódIª±•Û5MÌé4çGn,ßÝ,u<Ú%w=Ü#w|á -¹³d!‹+mÓQ—’t$+—€@)îw¡ü˜})U×–Ju·VˆÇ>ÚàácÔ¡—»ðö4šëufWXS„á`u?IÈG¦ŽD¾¡ ædõ¤Ïhm2»GŸý)¾ÂQfu dÉÑ·CÞËdþ38Ó5$ A$%7Qn-Ç׌…_F¾ƒž0dÛa“q—jÇ}nƒ-¥ˆÜQr⑱JHîHØ&䑇j‡K©W©fôv©ß™Ý8Y­¼h–ó@Ãr¸”â±ÈõHº$©h Éø–tÎäÚ?ÚÈû‹ñÏ>Æ¡_v Ù a~j”%ú²äF‡kEîXçÌOP~™¨‚–éÔÓGéÈÙÅÔO¢þϱ©E_RË2z}É襰|ƒÕë!Bo\Kf1ùcæñÉf¹!–¿2?I5Tdÿ.,nù>IÜ¡^˳W ñX<8—äÉH’¨wä!Ÿ c1³~‚”Îê—Ô#ãÐ_SKÇBœð6UWgLõä¦ÞXäžœÙ3¹ îâ„T}}©ÐýÝ.ñØýMbÑÀLÄE9«}ŠØüú*®çëÀwIeý³Å”:'>’żd–ç”ö¹`(m÷\…Ž‡Ûø®‡Û¥–6’ÜJ,ì£ÔAÉ«Ÿ,7\ZÜd…H> ÒmÑG Wûæ -\oãñÏÖ{žn%I±¨\;á`œ êuõ:â$Ë›ÐÃ)oO¹TzãDZìÞ ×‚AŒ\XL¨£d¶šëõ”ʱÔcd>ñRÒrù¥…Bý›+äÊkKäÔãÎbXÚü;a%Ø{3e¶š%tÙµQSN8CÊÖxˆšX+ÈÉÌߢGfd5<Ö‹ž9 -{BZEÿoõz¶=ú5Ô+d¾L*žK=BÔnáÉ# Å Û”ÑÇû Á·ÜZ%œûÖK¹úØÄÿäÉÛ—H),o¤ÄràÃÖ¨éäÃG·´ì¥™„ú{S*³C’°‚¬2ügQÏlH°’'z¸^…$¯ù›ôž£’Áê:–Àß’Œ•¹^w@…÷FŽm3äŠSóYí¼˜êæ\vÿÌTã©Ç€š¾¦úüÔôr͵eð‰|ëGÅúwVŠ5ÈýaÓÅcyV?ˆ‡2FlcIßlä'Rxʈ€¢®9ü‰?lVF¬¼²Pˆ¯uDLCïZ‰eµwá™ÙrþÄiª%YL‡ŒpgÂQVW²˜ é4£š`D0RjÛýÝJç£=@… Ç€.Òæ´®Y\›Xì:ùîñØ{$õGùmfë$Ü3`@€¢4þe·~àÛm/ëuä<&vMQ¯Z"Š­ù˜L;’ÑdþVn¸±J)îrQ »g±:x.¤‘ƒBŽú5À5ßY+Ö¾± XD`©ncµ¤ -:í`õúz)îèêï+q¿×ëRýk+€8Bÿ y:Ö*r s½Žõ’gyzÚ_c÷vPÐ?GÍ?1êõ V¯ Áî-õˆçQ½Œï%å8ÊU§‘¼úL„„85_î¹·O9õ•Ÿ|ñ)'ýànìüp3!!"Ø5a¹xËs”x+^M´2KŸ²ÚŸù0¬M¼Hü£^GN…ú1€$’ W…~úIÌäÚ×—+Uç«…ǧ+T¯×Q½ÉW¼Gª×ÑCD=:>”ùa`…ãwÖ_†Þ:ú”T«±¼”juHê¡×PØ6]òˆùB¾ó‹-@ËñÝÏv²œÄŒ„`9Ö)õl£óí9S(!!”AB‰.åÎôçX­ìËé„ „ßeJ2óÃYõ“€„Pâ3í±v7’ß791¿fFB(í]宧{ðù Ž*†±×O®v‚ì$°àWáÛX]{•z®$!wäÌ®óÁ6nø—½Àô ­"'Â5+9·ÀTÔé¢Å;Â6‘ÛDáñ7Š]_ìÛ>ÞJ2”øœ±l~}­D8È.’òìþj¯:üµ?®püudŸ•çbF:rq!I­ÕÞ^.œráãJGûc”‹°„U û\p“Ð×"³GBR– Ê°ÁÞ‚ÓÑàLûݵ¯¯¦÷†þCËÙïR™o‡_$é5f„Û*6^_ d ~òÇRÕ%ÜÉ2Ë]wÍ@¿Šöw³|{fðQZ)[ßÌg¢7KRtÀ ¥Óž òÀcw¾÷;W‘Õ¶$Ëò!Èv§‰³è¡æãŽ8Pÿgõ°14ÍÆ×fá±O@ÏÓHl>"k°8``kW`kèFô“pfyŽÀ &B}ßÒ¾¨hýK–S‰m÷¶)Õ7W~€]3¡ý³­ÜÀ3W®ýó͆¾§Û‘§¡wéÇòm7wBBY­nàGB¸À7`ïÊÇWÓù{)dŸx-B;¡/–B½ìÃ6©®†æ¡”èa)¨ï™¿ARy¨C ?«9µ²©´Wcîuiå H®:­~¢Yf8e$ål„þ§Çì—Ù¥WæHý3ÄֺׅB}ûDóÙ - ~ -8ëÑw&ìyZo³!„Œ8\ôßHˆž{”¾/Ý…¬““^"!Œ¦$+²ô!³j'"¿÷@ÍÿW$D± Áb!˜Ý˜ÒY ž9R M´¥þBA÷,ÔŽ„`ëýwÊKÎÏ#»‹«t„*•ëùã.¡ç{W¡ñÕÔïO.Ãþ°ëTJ.pÀþÖ¯J~¨iáiXl£RFâ jj Ø€º‚<8ß÷å.’@‡¤)j!þ¥¯Ÿ¯ÎÇËOÇê~ Ô9Ø@=ŒÚO¨v„_>ô€› Û»ÅS·{ûªèµ"€ò:é l/úfØÃÂïALÒ²¨_YÌD¿Ø:Bp ˆºØN XÌDNŽ¸šr•¸.$ë9ð|ñÂ/îlM­H_wÈXàË H: QÿC‚š‚ÎË“×I‡²Gš€Þ`þšú“‡ÒFâ~Ó¾Fp¬ ýoƒk¥D³øŒm\¶=žB~ˆr‹¦7Ö+­ïl#¿ˆœ½N ºšn­ßÙ€<2‰¸§ð Âñ7Ö˜åFN¦}Ôâv죒»Ús×Mlz½ÊbõÞ3j'ÐþÎräC¶µÏ…ë~°]êÿÖõ×–Áf¸ß‘é¿!!’Fj„„¸°’újRÅ8ôÖ l=ñ8{ÂrM-çä4 g¡7†§@¯MÏi:ñ`´µšZ?‘¤èqæ¢d`|ì•üäOO¼½Ynzo£© Ç÷ýw #àSÔ²®946ž¼¹J<÷ÌW|ê‰û ´ùÚ¶»;µS_èåî{{¥ãoo€oO=ò€3z—tr¦Ug£@ý=»—H‘_íþè¡ ¿ðåOýêÆ >Ù%•ÏÃ(w„ñPº-Y<ê%Bh²Meuü”|CÝ…ZQÃtŠï,–Éõç—Éo®S«..¥<ù‹t¶‡ù{Ü+¡ã£mrç½=RÏÝ=BÏç»pÑ{šÞ_ƒ½xëÒ ¸Ž¯·ò¬˯-êÞ\F>ªúµÅ@NŠUg §£ÜûÈ÷K(š'¼»M긿 5yÜuöëQ÷Ðú@.s{Ì&äã·7)݈§¾ñÔ®|¤& µ·– -“„ä'.±Ú{†àxk1ПÙ)j‘ål½Ì_“ÙúF/ -ûE¾¼?GX–j•WW˜JçS>s¸jÎk#¡t~¾G®o-gÀž0£¿íõ­ \*îwòýõbó«„P3NP=œ92•œœ­•÷Í×j®­Ä™sNÓ=ƒzïè¥TŒ£{Îñ©¨Ç©‡‰Ü¤Ò1´Ç“^á„>8z+ZÛƒ}ÊÉO¶›kv¿3q¾ãÒ­¬>zö„—¤}èj'ô€p”êÎ,Ï>ô–o?1ݼ„£§›—`:ËÖûÙ^mà¡<ø¥×õÞ&B8™ÝòÑHãCªšö#€—b9úÿfŒ«Ù2œ±/Êdµ)ËÃÕsynðW !€!Kúf™÷'úgð‡Ž*]¬|k !!2jÇSU3´=/µ¸}!!JÌHåøë€} É|–ûkɬ(ìq!ü«SIR˜ý»Ô}ßUê}¸ê“ž§®Ø3¶X;¾éÃÚMÿk×÷Ã>œûñÑ fép:gÐ9KÁ¹¼>¤Ñ[¿Ø¥ô>q>XëÛ¡=! u°—Š½4ÄCV#)òA7߉Ó^¢?ó“+Ö‚pôíbz×$HߣÖD½DûÀ‡kÇ’/Îïœ)u¡÷ùØ»Ë=öz^T¯°øÎâ¼›coBB4*yü»–XC犕â¦é$‡Œ½#œ-ÙeH)W]_ª ÌÆžò_°aû+ì^â¬|òàóÔ„â1Ôë?”;JInfuôÙyJý͵ð„Z’<È0V¿Ö\#©fØ®”QE}uôML_ž^Î혆š½(œ÷ H«Ÿ˜P0Fͪv& üߎ¸·ÜðÓ½¦Ÿ…¾ÿ‡ ñÊs#ö©¼=Œ:Ä&ª×ÐÛ±öµ%ÀÒ -}ßìfSè~¼Sj~ƒräòb¥tx.!u±çŸÙ4 =:1®Äö`ÿ Á³ú•ö…J:g_ŽzÂØöÙF`åø˜b;.ºÜžÙÈt±ôÒ|à  -_ÜÁj•3å—HÄ œÇ«¿¸\8q{=b¶Vzj>r¿—ýUªi+‡ˆŸîT;ïïWºîïú¿úk'¼ÄÚyêÿbí¹k‡s˜´ÆÐ?b¶Hòðµgú‰Åœ;‚ˆ0I¥çæâŒðRvË$²ÿÚ7WÈ9ÝÓ`ð¨ÍÄgnRõÙ…„p,97¸h>ýØ>§i"z-8 #d›hM¶Áž™Ìb9ò(¥ÿ‰§ÌrEôž89Üça‘÷²‘Å9àÌ€6F?SŠb¹úñ°ÉdóY5êýyôÆrêS0$ªœÛd>ó:9;«3èŒ.Î2Ö\[äÂ~öQ¦Ú`Ÿ -û{ØBgÔân:W|öà€ -«dyTZÝx!&i‘añu ɔÇ fÄWNÛ4 ßÐ«Õ -[f¨Ug—à|¯„#rfØjÿàOßî¤3 -±Å£qÆgT¨ïØ~o z& 'ߎšë«üÜB:ó¿z¶è -V{àìJþÉé´V•l¸¾˜d¥¨×ü™ -ûg‹WVp}vpíŸmŽœŸ/d›ˆ3fûÂ5½½ŽÐA9 “ O¿8ôgª./æOÞYÏ{5õM€Š;âˆ>2a&«¯.\«<·„Îôá8Î.bíT\œÏ÷ÿ¸WúÅ[.½ºÎן…¼¬âÔ|V3¬Æ…äòáƒY]ÁÖâzä/´6°ŠšuwÖ‰Iø¢3ý,¿A® N^Ç4ì{bÏO Ϥ¸{,D§¤³­¹ÝÓ±G‹Ú‡Oúø¸³Pît7ê$ F¹H–K »ÈG?Y‡¼CÎé›N±9®Èöãµh+ÊÃb³ìUIžÎKS¿—^u+z©R½Q{m)¡K -qœÝÿ#8kÅâ,ê t˜}¨u×W+Õ×–ãÞbÏ{õ„Ž@û—èâŒ/úüÅCs¨/„=Sæ{i/=,<åRZù8ò£@$a¯6ZcáÐOÇ>WIï\“Á_7Ü\§4^]:ÎÜOl› ¼xê;®ïéN å”:'1.×ï™å1Û}•ÓR&ÐgÅçaµlþ½mžùS®çþNôUè3å³÷üûQf?M·×»îê2ö½Å¨Ùp> xaàù^nøùôBÅ’ÁYRq¿ ÎY¡×Ãu|¾Y®½³šúÒlmà¼êmÊskÎ/Â^1áÙ;>Þ ßÛƼ΋°ß±L®8Oçe±¿€}^ô4pG8öÁ:áÔîÆ·›å¤Úq|p•DO{×XïrÅ¥EJvç4B³d6OF¯ˆ­ù´”ÓHs#´÷ŸÖ8PÎÌ¿©•§Sì§ú£}:ý{T±½YJ_tF=¢ÀgQÐG3DšÏÔÇY£·dˆ³BϘ‹ÍµãÂ3l¦D*è™!6¼·F8ñùF¬àÇX]oi`>y1Îæ‚ûeÙGv¹©þíÕækpl -b*½gœAª¹µBhùb#бTÏ3ß -»Æ9Ñ€âî¹T—"ÿöƒÕÄÊ‘ëK±§hî ÎK­º¾\jþ`å¬ÈÃ2š&Ñ“»º”ò ô…Y¼$¿PÐ5ö"5¼½(.ê_2»g×uð ÀêIĘÐ|s í¡àz×ôϧž ö„°ïˆžqiÏ,ÊŸX<ã{>ß!œ¼½žö­âÊ:Ã>¿rôÊJôÓÄƷשJHúÖ6¾¯ùÚ*øe¾ùiŠu,f1ûÜbÊi™.¨êÒbŠýCß0<Ù)–ÍÆÏ¢hìz¶ {RBý«Q¿pí6ËCÏ}„¡ð·‘ݽäÛ¾ó‹­,fm€Æ>*«×¡ç­”žžÿƒ˜£V¾¶Tj¼±’z ÀLŸ}ìÇw|±E824‡PèUEÆÚP¼Â¥ï‡Ý|ï»ÄÆÏÖã:Â?â «ØøáZ!íØ1«s²Tvv®ÐûÍneà©7ðèÔoÄ\jìy"F¿ hÐû€â¡b;šU)87GÍ럅3d„þ¢g’áƒë®±<âÍU”ss‘ŠÏÏk®-掳×EΚÑêÌ%7Žãsº& …gfq-w×rgÿz@8ÿ‹áì?ö¯ý§ÿοðïýå pýWI8ýWOnø¯{ù¡?í“.þÑ Þz®½ó(A}÷I¬øú÷ª8ð£›tòã­å]óM1YÿkoW²î{âNâ®3qÜ¥›î^Ö†%!!Á= ®4. ˆ+ÜÝÝâ#™={ݪ•™ïì{îùÎÝçyn÷³¦›nȬªzë•Z«~ÿh]‚¹wö’|„ót?øÅN*¥]]œÝ‚›eÕ -ÌÓj1³ÔjŽ@Z¦ÌÈ_Y vI`úÕ•xbã´>#yµÝ—'5AÒ?LÍ s{*³Ï€ÌÖ'ÓF5Éä5tÝH®ˆñsiÊ’Mí8†ÅV3ëÒTÈÓÈ⽚ȷI*HæÉä ;úÝÿ‰YKeîÍÛ‚üz¥|ïn ÂŸï ¢+÷c·úÔð´5"£CGÕÁâg´ ™G,¹õ(:G´~ƒÎK¨WBv‚ìOíUA÷ŽÙÃzäýN’Š&óû0nÚÈ1^|·–õM‹|1d"(ë»H•öÛ_t'u“üûð€¿+Êk2¡r;x‚Üøwݘ  Ý”z4@âaU»ð°òXlÛANÖ'e¢p€½h:-*l´@¹Œ(§ #2†uy™Ýª¨VBRHè^2ºì0¡Nƒÿ¨›OåñHi½Ò™ÑÌdr—Oò~'~þæ,TwÉÊ&ë-Mò­ˆ[_´ˆÐÒHWàÿr;#Á–ТÌ\Óºùt+ŠÇÌz§ÕÕɨ¤ÂËö£Êä!ÑU~϶¡û_Äi-lAÚ.Zƒ"¯?Üë§Å„{æ -Â3{æS¸–ײ‹ýÖä½øÎÃß|a¥¿™Å_­y•ÿ0çÖÒD÷g'þPß ²ë£ Q7|ž*=ƒúN\[í)¬kr῱&ž ˆcBá«Ö3âçÍ6&ùâ[MQ|ƒ¦(©E‡LéЀ¾ç0W<ôùV2©[Ex»ÓØôv .HêÑ<ߎjESÏ„µ"ç%¦}ç›xelàG kǯ¹&$㉮ӡøHÞíe‘÷ÆŒ©‚Š*èâS/zÍÏ[Žó_÷X îôâx渞Øz˜wk@…ºÛË¡ò{HŽ!–?ÎÂò¾‘÷>ãyãÆT^/Aär©']ÁÃN¡¨°ÉÒôuÅEÁãzS2½W‹—Ò|KT…‡ -²92ôÝNd—ÈƈÛZü[Ýzèz-?«Ù¦‘8» ß«ã“·š´ðÔäû¨4èÿ’:”Ñ5 ,­OËVãæLhÙ£ºÄ½AC,wÜ¿ÿÁÏýÀ"ŸŽ‰©#Ð Qø«"êíð A္àlc^Î/l wšS»„Dþ­1s}1ä|1ž}"¨¢Çy¥šb¯¾ðùÕÝ—…ÕõN¦oËíLŸ—ŸåÕ‹ÉìNC2sXQ´Å&®ÀüýAÄ/Q©cZôæw*L©¤uŽ¹œ±ÅåIH‚] 5).u¿¬µä™ð FÄÂû½QV7.Ên%øiz‚Ì!#*cHòú3’´¢næo"$ûш¢}(Ü€ó<¬bŸ ½WOÖ§K¤©s³>«òâÇqÒ¿åd}Wf—Ò§ƒ>ÍþãŠp¬9ˆ?6pkú|ïüì@Ž~ð"F¿zˆ&*B,‡JNõÜ‹Ž· †úÄý­!¦5’=¯D-õ>°…Ä»/æü¢¡SŠÖkâ7my]"qn“©Yaù›"³'ç…ÅXægM^b·ÿÎW\Øt‚º?‚áÒÆChý­ó3»Œ b‹o7òÌÔ[™Ük£z“HëÕ¤-Üê×&r»8‚SÁëæS¢×m6¼¼ß °'ŸyÄó11¿´ç<¿hðùbDL<¥ø/{-¨âžSdÑG+¢tì8Y5tž¬±%ß:‰—ün…¿ûjÂ{ó™—|4#kmE=Õ7E½•~‚ú'òeŸ‘7Ì#²&t±äž£Ø­h>û êq¯PXØaF=l£È¬n},£_ŸÙÍ?©;Žì[ü¼æ4?·‹$ wú ù¹=„ðQ»ÿE~ï‹–÷Áºßƒá½\ìÉ Ž¿E_¬¨ò![ª¦ÿ"Y9xŽ¬ë»DÖ÷_À_ ÉÂAÅ&üÕˆØÇÞŒòø;ùDq‡%ÙÚu™?Ðák2V!œh æuÒ¶œ&ÚoûdG¶ö_#ßXá9u‰pCc£º ]Ód|9ÌKQ|@ëUœðÆmœ;ªó úE¢ÒZ{맯íEñÍZdÈ“­Üûß´±_õyo>‘سq‚›õ«*/vd?7ñ«’ yBWœÙ…™å7œ—–]5©.ó0¯,õ2-ªr¾j:#zÐ!dõ3¹ÊímT§0ÎQécº0'Ü/tº½‚×t€Wø «øjAŒ|r~¬ NTñ?öùS¿ úñ?¶ùS_Foã£ÞÂñêSÝY’siѾµAq5×cOôßÆÇsŽv„ï{$5ª”‡»‚D½Ds¿=ìO[òÝ„5õfŠ—ùM KW½n·5-©q?í:Å;|œx4“Ï…ÂÊvÓ®Š`au§ÿ]§ ¢û¢¨¬þª¨ªÁÅ´¡øº ²ó²èM³­à}³­°¨ýõ¦ßšx7`.|Ý -í±ÎFT\u^XRú<ã[ƒJÜ€G8ҦݜÜjržÓÆxÙèqAO«¯x°&T<Ú*l¿Éí¼)j$ºû¹Uÿ0ãVýaŠUýbÍ«û|’èûàÄŸhó}l³|+üØLÔõ] Þ ‰É»#,,üýN"ôí<ªy?~çƒ>úLð²ÍÚäM­øu­¸ ÎJ”×f"~Ô`a’×`F>éRÏúÄØ‹AŠzÓe-(n;'|ÝrVø¬û¤ài¯–÷Í{<Î!aü¥Þt[ ëêœÍîâŽú›fƒe‘§úòmºï$žxg5ú&V0Þè'ìªóT79oGM©ÊîóTg£»éÐûHË‘×1§{î'Øv¦Å]nMŒ³kŽ‹¾Ðž,=Ým2^JN {óš~9Ãyü»!Zÿ^ S¸e¬ÂSú”‰ìq=êö˜òm(vñÜ_®b§|ØO–Lœ0í}jÑS$1k. °êcÙÿ6†ßÐáÈ+ûffôˆVåú¼\˱tœl¨gÂZVàzo° Å¢ÿm´Mσ¤Ëíéé'»'š´Vø¡¸F=èÄÙíÆ‚ØOï*íL[ŠüÅoë.Rw:Ø¢]äg¨ÞvÏ3=Y±Ðæb*}¢ý«b Z åG™L‡‹>–†Z ?–8µH›Ìªrʯr‹J¯ôŠ¶íL‰A»µ,ˆ:Ùs?V4V"ë ´xM¶_åÒxÖ7m^âàaÒûÙFâögÓçM—DÏúObh`!•Û9‘ Ûy/¿’âîëþ—q'zŸÅ›Œ4Dð»Ú<…­7͇ʣD dE¿ ¯ä£ ÷éïlÞ‹!œ,ï>CÖu_æ5}=A¶¸£1±x-h¬v2.øU›u«q;süÑÃïÊFE´>§ã÷S6]ÒÔZÏØ„:¯¸À¦› ^M!‰—:S­ÅŠ>×… ?uó?ö -¾vZ=ŽºØžª;'Úlôi·ñû vîïêäÝÏ,aA7ŒóãÚ|¿Ç?S¡e{PîJf é31÷a—ˆæwº0“Œž(¹GJ¨;†Ç”ïdz?h ßtÛ˜ÔU{Šªë\ùφÍx™¿ªñ"+v`!¥Û8’ÊØó LÔ\íu¢ïEÂñÁ—ñâþê A_“¯ÉDu¤]{b¢OsP’wsPbrƒ‡Ô·9(UÔUqƒhé¹d6V.ñh K¹Ñz3&¢Í3&£ÁM’Õäq¯Î5¶7êF­t`¥T`ÝÍè+­±1æÃ#©áao¢jìùrÜLô¢íŒèM›­¨´Ñ^ø¢í$•ßIPw;Úý¹ôUêËÀ ›Îœø ·“m;³“l:ï'‹úý‰ŠO§9Uÿñš¾ŸÄêÿ<ÅzO³ _ÓZ¬´Á}ìÛ”åÝvæÃÑ–C¯£É®a'²bà,YÕ{æÏ'…·:ØdNŸàQ“˜_^{Á´íÿÉžÜ8ËÞgQ§z%^îHKwl—H}¢œ[C"ƒë}#3jÝ%¹ÍΑEÍaÕíö!e ×ÂKêëšBëB«áϵõW몜" -«]"3ª=¢‚«ý¤¶mi±&coCãMƒÏ$XËç¹ÏhþèŸ\2÷+—|<"<é7çg`“)_4…Ùùð+Á¯î·tóº|©>oÑXs˜ðcm1>ìÉÿÔë/« -7©—]ÎÜ—¿q±}˜ ¸éŒ¸£ò†h¬!Ôtì½Ä³)$9°õfê¥ÎÌÛ–/£E#e~VC/cO æżsµ+26´Ñ;:¿ÁYRÖ|5¼¤Í!ìm»CX)|-j¹VÔ|5ìEƒSdœw©õÑé5ÑɵžÑ¾õ±âÅ!¬jšÏ¯ÚÂͦUùÇÂÂ1sÑÃA aiçÓª -/Ó¶² ³ÞŠHÁûöó¼¬quìÖ¨º0«ƒ'|Òc%*m·–À˜üæ³…¨©Ù'°. Õ­Q’jÖ]&áuŸÆîþ¦ÇMl=ÀËþ¬E>ð;Ú=/·'§œé¾Ÿd:R}aQŒéhE¤ðKk¨åPAôµŽèÄ„fwil³W¢mWf²h¬2”êoñ8ÙŸïÕ”àÝ kv—ÜkpŽ@G^½säS8Nojœ$¯+ܤ/*\£óª\%Òjï¨S½9Q‚]üÞ_qcÃu~q°ªÙAPÙæ ¬mtBsCRqÚu@´­ŒWm˜Ô½!LšTáXH »¿£¹œvúÔ èŸ‚küâ¥e¾1éeÞÑÎ-Qо’’Âên&‚ç†O¸¾§ YïhC^ç·óè߶iÏM mÊ j αí¾Ëz  †j«½Æ¯­¹äÕ–Æ oê#ï5ºD¦×ºGæ79E–µ9„§¶¹ÄÊ‹Åò`Ó'GÿiÃùFÛcß>º™Ž= qiˆ”Öߌ¯òŠÊ(óŽrªŠ9ßšu¡5%æTg–D<\b1ôB"nõµ‰:»ýùO~±Äïý©Ïù‹•EGuÔ…ŽÌÔsY ÇûòbÍG^EY¿ˆá轉 ÿz•ò|m>Þÿ Úbô‰Ä¸—>mÜ9qœèkt<ÞÿPêÑ™mÒRtõ–Ö7ÊüºÏ(àÙ*£«IóY.I Œ’;v×þ.ÀG\NdKÃÚ½¤qžÑÖƒ÷%Æ¿Ò— hƒaZ¨?F‹ôGi¾ÞMNÐÆ_è ÆßéËÔ×ê[ƒöù³«a-4ˆêØÄ~Dkãï¿[ -+:¯™66øêÊOtiŒJ‰ª½‘˜Qã)½Ö.M Õò[ÛÝ©–^GAK½‡h¤-äBÛ­$·†ÈÄkõÒ„{%^ÑOÞ»E:´ÆAŸýR*«°*‰±,ˆµëLJ hñOuoŠH<Ó“Kþ6xèïwáµCßYa=TkÓ“g×)•Æ¶zJ}[ø;ýñÁONxÿWGN3maTFé?œ8d&]¬’³\·‚Vå |;+ø¥>øF]@lZ…§Æ=ɽ2O‰ou`¤MÇ­Hëþ»‘äظ7Õ:||ûÑ -ÆñãT{¿ëÉÞ¼xïÆàÄüRwÉ›JçÈ‚RèÜ÷ðo=$Ïß»IÒ˽c¬`Œ-‹@ñÕ¿.@ú¼ÜUR\áž}VUýÕЉûàð(ƒ¾Î»% –÷û7Ý.šcü¾||à~ôí6×زŽ+aÏÚ%mŽQ!¾É‚o5A‚oõ(šM”J¸c¿Ú鿤UŒœCæhŸ°•Ó0½${”+ÕôÀ^¥c`ÿ!5°{¿*ع÷(ض_ìÜÏʘ‹Œ¦Ó³yš™ÿØ¢ÕBkáŸ{Ý\«Ã#Óß܈¾÷Î;*¢Ä?ZZäVâ- +Œ¹s‡sí™RþÈ°ŸÉ`]ĉž<)Ì‹âêýRßûJÓ+=£s`ÞSë#Ey·ýÏÓ&ŠÂÓëÜ£ß@ÿö¢Ý!<·Û!,·×!‚úÞvÓ0çã>–ƒd®º¾ü¼|5X;iXå@,…Çjøþ§©KÁŽ¥€ššè -dµÅžrJG°qÞr° ,¿µÌ’[æÊ.‹'­˧n«¶ƒµË÷‚-[”Á>¶-P»Y·Pç}Ø°æóþ¸Œ¿§-ùï?Ùšö—K²Þ{Ç–¼w‰x_áQ\íþ¼Ü%2©Ö3&¦ÚWz£>0ѳ>4!é½oìfL=%qå×c#*oĘ<—`ý¿\!FG<œÚÃc›:®„¶vÙ‡À9}õÔk£Y:9¿l×q¹;_ïê­¹†®ù‹ôƒÊWëå|Þ©[ðç>½Ç´’^lËFÎI°~õ°vñ:°zÑz؆…`˜f‚i`:<æÀŸÁ°Zn!X¯°ì8` Ž™†Ë©\y9S5~b…V­ÍþLŸåüB_5{* à·Ox :†|…C½'ûîÇ^oŠ‹ï•Vì-¹Sê%~1*£Ä'úi…«ä]¥sÄí -Ï(è%/KÝ$ïJÜ"J*]"Rk<££ê|ã:;í‚èAÛ°Ï=W“~½x¦?%Âè7ú´F)½OÝûÂQáU™]Jê`ÓÆÕ`ë¶Í@Ó켬®Oö"=¯Œš'¯ÉmXµÌ³ÁT0ȃIÌS¶K>e€ì_?O‚ŸÌ„­žk2üIŽùl&|®œµìÜ-‡ñY\úgþh™—äu Dú2 JRì'‰)ö‹Š)¿%-¹^âu»Ø;*¿È3êU±»¤°Ø=òé[È<87s+Ü%ÊÜ¢ÞW;EܬŒ§¾ôûñ?µúa¿|ô°ïŠ‰}Û cp·}XL‡[ŒÎ mtÔäؼd3´ÃùðüѹMg%Ë´Í x 3•ÿñ@ßÿç‡,Óô›¨f)2óàël0Yv6üi!X5!qÈ6Ÿ”ºFæ•»I¤>Q§{2£ø¿tR¿µû»¶‡Ä5ô]‰èt‹dýFÛhD¿[¾ç¨X=s!lÃTæü'Ã3DïgÁ6-€7¾CïeþÖüç‡ Óºm· |¢±›ÿ½…p.þ|ì Pñï^¨yþÉhœ>AõûŠ{ëƒ-º^Fžìʉ¾Ò$•:4GǾ} sYh“h ¥¥7¢Ñ8¼õŽ~SâÆŒò¥O‹Ý£Ú*\cúkœ#…*à¿gmÐOSìQú ÷}U·’ÖR2²óàý¿~ 6ÊþÕ#è¿òõÙ¢)ÀÚµF`»ÖpÌ£aŽæ ­ƒ¬q¼\•’ðÆ?:÷wÌób÷ègEž±ÏÊÜ╻ż.öˆ)}ãó¶È#*´ÜOâQRí\{#:£Ö]µ.’{Õn‘IÕÞÑœ/ôe­·ô~Mß T­¯Éîؾ,–ÃØà4æ,þw›“eú_Ž±SôúŸÛðÃ'Ãï§0sïÇ<”aޣϦÂç èUgÅ`žüz°LQl>|ì7K“Õj¥µ°O.fCO‚rÊ<"ƒ+ü£ÏteD¡zض-5åe¨®„¹W”|½Ös²'G‚>Ï©t¬…9{k“Cø\×!,±ÙCj1Z ù…PõJØ”Kÿÿöý$xük[ÿþ µWî¯ï~´{2Ó3ÓaM‡Ï9ÌìýáQø ¹¿Ú:™‹æî?: ”N=׺Goã ÑV]Ã.Ô§ÇHÞßdìð\[jTS‘gBe‰{L ÃÖ2÷„ŽRÏäŽJ·„¦—˜·ïÝc }F?+ñˆN(óÖŸ E»”E`á,fnýW~áúø»ÿÕµiÚ_c:>§2ퟟ0&Îß Ölâ-*ö`0Qæh½ÂðmmÕvÿFÒ+¿è¯}b^¿ó”¾/öH¨)öL©®pKzYâ!-,qJ)ñ‘¸ÔFF¡Ã³.8 -ÆwIL½—ÄrðA¨^5­ÿó¦Ýÿãvüð†rÌ9Ëý˘ÊüõÝtøí øœ'³,”[æÈ,‚ã¤}Ò2ãWù `Τõ`¶Ü:0oÚV°x‘øIÙ(x'¯ZLï FŠ]aÞ"ñÅ„(÷Ú°á‡ê@“ñ×Ág:nI ‰z}e - #È× œü6ô­%Î’ÎŽ+‘÷`ÍéÐmø¶ÒJëÛxXOËÍþ¿Œ ø?|%úùïxæé,è•P›§n‹gïK(% -âÂ`Ñü}`Á¬`Á”`þŒí`>z?}X4þÞ"U°~·%8`’%§‘CofõÓ'­ÛÙ4ÜC¹Yþ+Ÿ¨ûºK½Ò;‹}nõ–{¤Ž”{ß®ñ¸5Vïž>ÒäšÒYïœP]횈ê.µZzÿ¹ÿ7þño?ˆÚ‡ò4^Šò«Á¼IŠð§9pQä‡ñSvô%«ÀÂI›Â”­@aÚ6°`æn°x¹X³æ~Zn`7•,³Ÿ*{8phžágÚ‚?ôÎUúÆ?êá‹ëqåEÒšwim©GBm•«´¼ÄMZZê.Í/w*†þôM¹k4ú<ºÒG¢ÞC«þ´CõÜ–¿ý&òÓÏ>å¯÷S?2í¯÷sá8*ʯKá8-SØ –/Ú –,?–mÐ+Ö±Á’u\°d=(.Ó ×è‚å›H°M? v隥òžÞ!ì{åñ*8ŠÉ[JoF=-q—ÀÜ,:¶Â[óÌT{äÃüæb••.QÍ¥nQ¥e®Q°ƒ¯´ù¡Ó!²ëö(ÁØ:÷ßnò“SÏ0‰yÿÃþ˜ƒÓ™ldX(»,™¾,™·,[| lØ&›žk÷ZÃã4Xö“,[‹ƒÅ1°x ,TÔK—ê0ßíàÄ€#®Í³4ûhm£QÚÊ¢=ÿºmmjP‹€¨ª—Þq­ï¼*Š=`Œ—¾+s‘ôU¹ÆŽ×9ÇŽ59JÇ››ª\PNª÷­Ûgü?3tþs˜mÎ_Ù㘇æÞïfÁoçE“—€e36€%3Šó·Cß¼,š· οC`Éü#@QáX pŒiÛòͦ`ù:!X»ÃlÑó{-óå… ÎS) 7«UÐû>ÐÇ·dû¡ôåkoic±Gl+§Æj稞j×x4çz›]Òz\SzZ’ßW»ÂZÂ/\½™>ºágÝÿ±mþh+`Æųù²KÀ|9X!Áy¥çܹUð³U`6Ã9ðP˜²,œ¹¶m7X´d?´OUh“F`énS°b¿-X£æ6€-T<Ø‚']Ïe„þºP¹šÞEŒT\±/Iõ.ˆð/ Š¬„¹WlÛãJ×è§0ÆUW¸D÷T»ÄÖºH{ꜥE¥n1—ã#5ßÓ‡—ÌYúoÛßó ù{©È)ÅÉk`›–B{\?Ÿ#öìßÉ.ŠÓ6CÇnÆè7÷‚e‹ŽUk1°nÏi°ñ˜X¯å6¨»ƒUÊWÁ -Ík`¥¦#Ø̉;MnÉò랣òŒÞ£OðK]/V$yE†¿x|3±á¥OrË;Ÿô¢"÷hT3„ÔøI/·ÇKK`ÐZãĬC ->¼¹®’KoX²dÇ¿Ë'1¹ÊaôšcÙŒÕÐ÷¯Š“ÖB?¿ -ZäBÆ6àsì"ؾ5`ñŒu@aÆèá1÷'°hôÿëôÀêmf`õvk°f×°þ¨Xo Öè‚mÖdø6Ï8ö€^«ÞF«À<ØFÔûÊç\cZHØÛ ¨¯¼%uo=¢[aÛªŠÜ"žW9Gö5\ ûØì?Öé˜\Ó蜘Tåiü}Èn›Àáÿ:f?ràÿøùo_ò#žGkœk à.aŽr+âœÌX)®P‡6¨–B;\¹[Vî$¡O1ËÖëÅ¥ª@qPÜi -V)Á9gvŸÎ“ÛÚ6K)›^¢\GïV­£°¿üqƺ=û¦_Y@D|ñõhè;cßÂ\ÚhLgƒKÂ@“sÒH‹sò§§¤¡z·”þ:´–$Ñí¢YëVü·ÆMî_üãÿ1¶f3·€Õ+UÁšÕº°*`É*uè+”Á¢ÅÐ,>}&œgp®-_r,[z ¬PTËײÀŠMX»û$جé¶óRÀŽ…²»=k¦î -뜮ô”^y$íûRµz·ö Íâ}êt4éyìëó.""äu°¤¶È'½¬Ä-æÙ;Ïhnõ7sÖKZÏø¬/J¾™S½ÝžÆý´V}LåF™‚⬵ÿm»~ĶiLvˆ¼ät&ËBz)˜ÇiÑœ­`ŒÇvYƒŸ´À†`ÃX»ê(Xýå²ùÛàbÞ°|éA˜S²Àš­X³S6ª^Û‚ÀvB -öžx!·;¬æÁ'ôRÕ1ZY½V3š ­Y#ôijxÔçxçýp›–ÌËG‘6íYÒÔ·7¤IïnJÏvd'˜ WD¹7G¦ÖÔ\ ­»_á¥3Jã» Îÿ·m›Ì´g›çEÆÌ`rþÌ+ÊA¿\ ¿æ Ëá|Tóåѱ,˜½ ,Y£Ö)Ãüøü«IGÃF½C¯R~AoFù¤òSzÓ‘ä,Q’Œ( œwȽfæ¡+O§õ¯§ZHoÑh¦U?ÒØésüÏe¾f£OB.´ÇKP=‡jr”k>Eµk‰{ôËr—Èê*§ˆŽJç¨ñ:·¤¦J×8ìs‡³º[¥òÈ¿ÿ;¶9í¯XŽìeÆŠS–å󶃵?ëƒmÚçÁfü*Ød¶:NÚæ÷~ònÿÊi{¼ßOÝqåÙ¤.¯å÷ùÖLßÐ9sŸ_ëÌ}.åS÷;MQò®›¥ö”Þ¦ÖLQ¾G¯S î\¤žCoÒn¦µõ[ižA-0è ùÜ úŠ}cl$ÊYP»²`îìVË"Ø@×P ž†èn/øÌcy?XªÌ·—Y·éèc—²Ì˜!¿¾PæËUÀ¢Uš@q3lR;¶c¾`+vì„=§ïÈnš}$›^¡üŽO=½O­ž>„üßáÀ–yûOfÊ퀽â(™C6wäºWÏ>æ×¥ ûe¹ÆSz7ÌOtô¡M9ß'ì¨Ïe^úC4_'°j…ATÇfvhÙf<ÿ6¿jè’àå×øƒ?ŒxéÿP¥rþi,xüÍŒzþÉŒ_0"?m<),îw2*£U„r äþû¸‡ÖCÐL›?u Ìõwƒ•«9`ÍϦ`Ý3`£ÖU°ã¶ê\Ûž?ïÅÁ¦­š`óf-°u?ìÇe^z:åе²éji®Õj ÕõZhcÃÚ ûüÍÕº÷^¸àSùuîׯ¼Oßõ»hJ'ó×­zɃ[õïÒ{ «i6«Š&8u´¯müÕÞìLõ5zXŽ¼ˆ±|-øÐH4;p*i¯þû)j ÛÛ«!4±®úZ¨w]pŒzÒÄšµ«÷ÿv)‹ª6˜{ÀyÛ©0c=X¶l?Ø°ÇìÖ¶0opw‡Ï$N:Q>O¹”þIë;ÍaÓŸ»c¿Ö9ÿóËEÞïW9t8ýNŸÒúF³´?Ñýo´Èà mI~ô6¤Mu‚Š—ëÛDNÓ¶žlPBëqkhK£œ?²Ýî,æ>¦‰þ1÷Ó=wãŽ÷çJ/µ§%ŸîÍM4bÒò´<þj—0§'ŸÙi÷i]Nœ±~Ëa¦Nû¯³á¸­T<6nãÂ<÷8`óVþ ïl¥ZÚÞAO4nð+mið¶ÒûF Õ:h%Õzæ­iø;}âjs¤$§Æ-RZç)qo ŽL¼ñDk(zÐö´ÞдÓ{ujh-VmƤÏóÆÿp~m ¦>÷Ý0|L«™œ–UÝsèii´ÝLj¥µ«ow o-ž ŸÇw+\Gdý®Ï/7Á3?i^÷×p3õ­ƒ¦lÝÍËf­ÿ?Ö‚þÃ6å`žs¬9›Áú}|°×$IöHpòŸ˜ùbÎþFÛ°¡m>ÒVúmÐ4Ó˜Q TÒ†ìVÚ‚7N;ÐgX54eôŠÖaKz¶±½óV°R‡örë~·"F‡Ý¹hN}Ú çýl¿‚Õœ„æ}xö'âÉÅ­Ú……½ÚŠ%¾ÞOä7 -Ë…¢š2'³Öâ@þ³3*iBƒŒ¨;@•î ;H³Ž7¢¡š V5Mî:Œÿ—íšýâ¹å`îÔ`þì5°ŽÞÖü¤v^ÎÜŸtàFíÌ#÷ÿ\©^O†ó‰¥ó†>ªãóJQÇ&uºû#Eƒ·´ÙQëxºïn¼x¢8„o÷âŽ\3*¡Y,ÿ(EÖ¹“X/Of¹ùÌ6ˆ.\gPFëñÛ}¨ö.göõ¼•º02w˜Ä¾ócØïº<ÛàÙ¡UXoi#£ÔþÝìˆçÙO¾ëà/zIìá¨1/÷7]Ì>VkrANߘ0ñ9†“„øûî)+ˆø¶Ãä¯Fâwíöâ’&{nÊÄaÞ¹3Œâæ\ˆšyÄðX³d/£å˜õt™¿ìRÌ•ƒqmñ°éʨD-U}EoUo¥éýJ‹yßûœð?z¼¨ßZýÍÆ_G¢kÔX—7ÑÐU:H. Ðýïœøê½ìgÿÔfÇ—ïà\ñŸöAób‹vcYcZDþ(—۸緘òI^)ôOÛHI«˜d¶âÒª«¦Õe>âWÍgù½B~N7ËÐÀ¤Uû¸÷Öê_Šš‰®·-˜4‹Y;ø×ËEs·€•?©ƒ­ú¶àˆíý)éŸu‡iÖ—'ÐÞ8M Ò|vmjøàJV.òº¦@G Œ9¦€ÂLeÍO;δðHÙÈ©:J>ÞfdÀÖ,Jë—#èزh©˜´h¯àmË)ây?Å)Þ…_ -˜ÃÜž3jÀÜ}Úeaá0±dùÉýšü”A-2üõn"uP]x¯ƒ"/ Ê:.`¿i`áo·qó¿èá%VDëˆk€¶Ôª£U•oVÍ?t<]îà‰¬IGNdÉ«žÎ™¢uåñMg -:Y_¶ãžè^·Æðlä‹“á#Z™;¸“þù ë­iœýý˜±}ܼ#GTÀŽÕKÁîeËOG˜Ÿ=?ÝúšËBKçëKÑ>1ãüÏšÄûž“º:gî“_8Üü_ô¸÷~Õä>ÚÈs˜…elÄsÛYÌ=š¯†-ŒsÿÐä¼Ùˆ_K_̳p™Ìµ°“ç¼ Ïì{Éd™>«=oRÔb½'9Ñ•;9O7°Ò&ö%ŽîÔs/\¤&t“Ý©"al^ú³2X«jöš‡Ë¨†4)jåÑÛ´ßÑÇ´‹àñúýÔáZ^÷´/EM7ò¸½È84{q`ê -väƒõ¬´æ½¬ìa%nðë-˜cêbÜ.Vºš´˜p¹³‚p¼»œ80KG]h98z0^qØ@(2“å‰Íd ϸeÜØ’]\iñnnBñ^<öÉ^»}Xƈ:ù¨‡Â_öò™û*oƯ!=’WàÙãÚ¢ç§Lªë<,jßšTU¸‰Š/sïLh¢}˜Sä–Ýz^™ µn¾^rÌÂWnëa ,œµ(L…õ¬C7ïdU·’¹š´†Þ0MâŸ?xâc¿ºqÞѤñ]Z•í·ÀøìK;ŸY¼ðç?áÞ)+ ˜Dœõ™‰˜‚†*š@kß`¤¬ øäif¯ÃŽóÌZg걘Ïâø½1ô„WürLZ½ .ÜLøçm"oäo¤wêYd6ˆ¨˜j%Ê)d!ÚƒAÞß„§õ«ñMGð°çÛyÔE9½„p¸1PÜQ`|ïOUžgÊ2n|õìùiÒQág:þ^B}kóç~§¯r~¥í`<;«ÛOcºôAÝ+9sUXæ@[xBÆ0¾m+Ù>t_ÓoO<ûJñ.ÅÌÓPÖlŽ`öhø½ØŒ<۬ǃ~Ç~ÚŒÔT©ÐBÎÒÎu¾õ%WóónsÅŽÁ‹ñ¤ÒÃè>_þÓVS¬°—ƒå|Òæd ã]O[ä¬#î÷³Ð}µ¼»_µ8±={xqàwÿ4àÄÔí¯%)bg¦Ž‘ ¹©m‡‰»ãXθ'©n'üÝVÚÐ~v!­ÅΧ5Œr¾2¸ûç~ƒ;ô^­bZI{Œ6Òû…jOІz­´A9­oô†Ö7® )ÖkZß äùjÃsn“,låŒÍÏÉééjƒC?­j=–À-í'“Üf"†&ÒAú"óK“¿ S—a°oq¿[kˆ_ÚF‘K€"yÎs&é³—Ò|Kî:JøÞ^M¸Å-!Ü£—ð /½®=c^õÊ5 ä]Ï]Ëu û/ðåfv ÍÁ&>» ¼H÷9}DËùÙŸsž…ö¢‘·[tMîU™}¸^©«ƒw«CÍ9,ðÎüÒÍÙØÕùDpÁOhÿ‰(µÍí= rÚY¢5Ç…í"QA‹9•1¦GÄ÷å%õ!Ó&4Ñ=Ÿ‚òæKœ¿iqÒz”°Ðg?ñ"žüÌ-øj`\òfXsª^ÚܨŸ6ƒãf¨û€ÞcÒºÞÀÂS^Y™´Ù"`$º"DZ‹š«©/G¨}ÐVÑ*[÷í£?´høçdµ´€ê5 «i¸,Ärfg›z¦®7óÎÞ‚ö‰<×£=ŽTvY^ý A~Ÿ›ýIç³÷¾½’—þA…'©Ø…ÙÇ(pÏÎäÝþCETÔy‰Ì!˜½.×Ñ~fìæݵ˜gÂRÜ#e9Ï/o;¥e/;ﻺñ/ÊÆqÕ;Y·Æ÷e9dTsÙ2še\÷‘qÙ?HƒG¿fùÜ^Âr‹V`ßúz/Ã¥Ý{ØN± -¼k yöaóØ|+•ý‡À ›ÁÑûœwøIשh/bé‰ìý‹‹xµÏÔ»÷IË2ăò6áÎa 07ébŽsÒB¶_î*Vxù&–C†‚®øŠ¬vVF_è(ÇÂíd Ø–2<»èùœ‹a³U•ÀÚ) -`!¬›vÌ_ Ôku¥#ÀPŸÅpˆY銞·œ2©}ï!®¬v!^ˆØÿÔâ>¡XAï×ksƒ­ŠëÁϳVÁ×å@Ë€œ“×&sO]›llq~fue2W|y’Šš&8ýç±ýJŒv)`6aë6ƒcf-‹bÿœýt¡ÝõùŒfÔÉ«Ó(‹s“ø6öÓ+ŸÝÆæ¶ ‰¼cÌ7n9î4Ÿ›Þ¢DÜÔF\CâJà<¾s¨"‘Þ¢.zVnmQöÜãdÃÃÓWÕ—ˆÛcÚhåù`-!i:@=ÚB^O]‡¿ÙIçnyÂNögNtÅcǨùŒ¥ìÀ×븗¤óÔ5a½°÷08²_¨*éCŽ¥ ÚgÉ9ã;mïÆm`Ýô`ý,°uîrpæ(¿2qO^#Íßöä‰n>ÞFIëšøælᛞ›Äáñ)>=‰a"-ŒK>sž˨¨Ç{ˆ”÷Çx)u‡a.½ ?7®~/7ï“.•;ŒsowE: -Äÿ9Hw¾Kh9Œ%µ&oè0s=8w3qãÖ,¦zy¿Û˜xØÍCcK¼í7#‹»­ð§ƒŒ‰J˜WÖ -î¯i,ü´¬¶´TX€…8÷'ݧâBçâÎéK7 -·vÂ"NBßqV¿1ÚÂJ ÁÅÛѾ"{\ŸôËÛ„q†]ðžI¸§Âxž óÁèEÄUÉ\ð3–Ts˜HéT#îöë“÷úXxÖ&ÃÛ~0hD<â¢}¼”Ž#¼ÈWÛ89c*hªeÑ=ßÄË11÷ÖÄQNäûmœ´/‡Ñùã>sx眧ù›x™ãªØí 5ÄÍÄs7b~7ð$å;9iCJì»ß”¹öÒùÚFf`Ϧ`Û‚µàÀúàðÎÝ@CCè Û`~¬gL=#èh±€Ž°-ÎÉñ"æ#ö ò/h6b-"Ö‰[Èbè{0+YF?.æÝ~*µCOëTEÌnÌÖ}µž¼Óa$º_'@¶Kúe®£Ü¤K‹·™'(lñ º„Äýa–ùAËú¬ö„áq‡Pò=–à·×qÊ÷FŒùÏ{,¹÷Óæ½ÙÌ ªÞÊN9`œ:ªÄõ½»Šmé,Ï柗ÕãYʰͯLÂlCgsN9M>´ëرr#8¼O h+@¿ÉH#±á…7îl%•k bkŽ!Ý5Ä¡C¼BO ŒY`ô0Ý"– ½þˆûÂ÷¿µí#…q`%á"Q$¼Wð’:ã÷&ô±œZxØ‹mäµ8EˆOÚÎgöä¹H—2ùMdé.†+P°‹zµx¾ðÍYƒû=XϽÝ Ë6"ò†ØØ­ÏêØüuˆ—Á_c'd9¦W'a§ýf#Ý>ô϶^3SkzUqöÿ?á1•tI^FúlÆbš÷#ûBÌ_˜W¬$.ÎÁÏûÍÆmܦcgœ¦"v$é}/¡í³9ìÉ6^rÕ!´·Æ?Qa½%¿ M$~]wŽÿ®åž7lÌ‹­ÜCÂØÏ y²íMDû׈w½œœ/ª\—$E¹<ÏôåÜ̯*Üì15”ï@ÛTãf}VC,0Þñ òhMƒ%´”e8-Òê=œ”æ¸kÚRcS‡IjGôÁ‘­‡€Æah—`ÄÅWh!˳‚µ¥­ÇLÜÖq:çø…IHƒ’a¼XÚËã‘ ¨€g[Óq]Åö‹„Ǧš¿:M@YÊŠŽ;L{¤­%ãª#Öbcà.±Š¸_ÖZ,­í‘Ò¤Bfué¡}®Tà͈'ˆ§µ« -î6q‰Ü7kD•sï“þt””´Ÿ¼ê8AÞdq³ÚTÈ™ëH·ÈÅXXþO¼ûºÌÞå§CB^Þ°îhÚ͵˜‡ÙGÎÇRÇŽñÒG•ñ¨ò½<ÿü ¸kâîõGkyWbTUÀí{ÖQ€t<‘Ö— _aìክdæ‡OÃè$ÎØOAÌTÆ.¡ß$#ò¶™½ÚXZõQ¾½÷\ÊÖqß;vù|úl;.ÿ¿ìšÜŒeÊ]ºŒ¸¶€òy°imàŽá HÇhE'I·Ø% S笯À\Üëö*Ò5v Ã’¼¹?ç8 ?ç5ƒáíø?Ù̼څÎÃŽ;MA¬yô7h>bTqO8NFû¤oò/ÜÂ|±·õšiŒYÊqM²gì¬ç Ê5iš/ÜœêÜûcÚXÆ–9®ÉK鬃´€>P—üçR7Ò×"6%啾 -öå"êJäÔÇÜ3ÎS¹&—'q,ÎÊ!Í<¾N iõ & 0û'*ìÅnÄÖA¼O†qힶ -±G–6Œܸ’=Ü´ö#p>*ÑOw‘Òâƒxbõaþ­F=Qv3—J¯Ñ&nÕkYÃ:øƒ#êY¯ ¿¡ÑQÜU n~ïöß›½v  -ÛÈç.Ñ‹áÜÝKÜ1@{qñ{ßõ¹.ÙK4µqpì€2àÙÌ$RÔÈÌ]ÄÃÜS—òü -Öóìbæ£\TK i¶šÄ·a¸‘¾Y=F+ç)üós­‡k’Eˆ)t•ŠuâÂòãüûM\Ä…!ÌÃÏ}ç"&:~gT›1¢ŠØfXlË4¾ˆE‡4 ˆ3°®<}6áž¼Œð„qÖ$ô„“du5dáµs‡õ‘]È<Â.t>q9lÿ¬çLü¼Ï,Æ`¤‹‹tCPáYYÒ!|Úߊ¥©bé=*ˆãAy&¯d´Š‚a#-?€ôÅ>ÒåEÚLˆ1DÚyÏA â^¯ÿq»P˜ß*¢î5sQ€˜Ñhí„aÜÝL^O^XO¸äínÄžAÚ 8¬“ðœq],gB››ÚuÍÄ0£þÌ·™”gü -”·`·‡4HŸûëî®1¬ePM†4€H¿ŒµÈFC;ë2͈#B>A1ßy§ì'óN»NÅ=rV¹­BÝÈßdĶšÇ´1`vò|¯„•ˆ#Ž8@ÄY÷H ñ Û본ÖvòÚú0$OÊÀ¾XØCˆéŠ›]˜„X2$ÃÏÎXÇ°TÐëì|¿ô üÀÜŸVŠî&,êõ.FSäV¿¶ðn3fñ°ø¤õãW—Ìò+­¨ô6]†GÇšzÔAKZmE=U7ÌzŠÂÐ^9Ä*>h$Ѿ|¤ÇÀ“¼Ø΃5/¡ò7mø(/ðå&Î)Ÿ©êªlpäçƒ@ÖˆéÄÄÍóÞ3 ±èœ,ÛüÒ$}–ô›€´p˜ŒØS–Òwz&qE ;í´× ÄŠBº%ˆ¡O:„-ÀOÙMAq[ô°Vlþ¨ò4þt'âÒ!¦&}»‡ÈçùŸ¹ìÛŸ•Ÿ¼uˆaOÂœ2âÅNÒ.LÁXh%ƒr=Ò÷Þ:gQ]Í5?; ±ý±‹7f!F v9x.÷¬+ô™ž3Hèëìýçóoòªt1︽¼¡!Œ< - N2£±í éX¡µG ±„`ú¡q·éPöžsø×ü—µ é "7ßïî&Ä«ƒ1Lõ#ëöZĺCºE 3Ê.hâ"Ö/­õÒ`´`‘žjä›=(ï¢ò»˜qÀ/xÌ䙞”ÃLÏüÐmDŒe+y uIû˜,¬1 }#a%CZ_‚bé"]‚ÛùÍa›Ø0Ò\K¦æ™±é)Y†³á™ºÅ”£±ù7?/ô‹`²ˆï¾ib¡xŽtXq§ihŒÍ?- sž¥ãdârÐ<â¸ã.ušáQˆ•ï¡HùÝ^ëveÄM$o5ë)µ*TÄã] ó.äá6¤Ù‰´û„wšxæ/ß]¶zõÂÁä^˜aCF¾Ýƒæ(Oún/Šåü—æü²®shŸªáÇ•05ƒ¾ì”aãcòrâBдV…;Æ.B5‚ÏBFåˆ8´é ÐT6Ë+ò\‘­œ.¬]ÕuØ0—†9¤à$£¿Íæ™É &7âO#~)bÿã—ä¹øqYÜü¢<ß!xpòˆÛ„GUìÝkæ‹îtü ‡[v¡wü*üN¿>ÿi· þüÊ8—ÖÂo<ÚÀĤOæ(]Œ¸$oÀæ¤;‹öÕ# 8ÎqÇÉÚº€ü"£SóÒ) Æt×éH›·:3 q1½Jh_ˆ Åâòa~zÿ´1;ï¹ä8—\ã–!=Ä.„ämc¸Ì0‡&Ímå^UpÁaЃmˆË…4«È“ç&#N)}ˆtïgÄ7ü¡oè:SxáÆ\¤×ô5ðÄ -%2­^åfˆÊèf»Å,CLAn›@ü¬á”àA ‰Ç•Dsi`°/‘n*b¦W#6¾3x–—å‘VeÁhl"Có’M™Ë`§&3ZBÞ)«‰‹¾³9¦0?þ± Xë1±Ü>L±Þ…žÙë½/ér¤ûÄhº¢öú±Ò‘VƤ·E\¼9‡¸¡Àh¸'­^¿¿™áI¡uÎÈÂü´:-aF[ܨEJžïFvɇºfŽâ9[tL®REõ  ³ÛKkW¦œã`?E,@š2¼Ä¶CXÖ'-Äœà&uÂod¯á»Æ/C禫¡ç ˆÓ>3°SîÓŒge1k‡ÉØÔöòº, -Ö<êàÀÆ}àжý@Sq9ÍeØfgäpûðùÄÍ{ë » yˆXªŒŽ°½ß|¡sÒôÌYF€¸OŒ>âSAÿŠîGA1±…¨”uÄbf|Ó發¤%†ÿójÄŒxúQ€Å÷(!Í1Â6`6œ²ú°ö24ƲA<½Kùf;ò“ÚÚ< £ÃHïŠûàHwŠÑqBšÌb3YÄ;6Æ„€Ë7—E,o†}Îi&e{efvYžaÊCHÅ—%¾Uã‡>Ùìš°¾$ø`dLÙ!2©Bq¿(—àEL®[rO©<‚tDîáË„ÞÉk€( o+bâq%‡„YC¸æ¬ä½1·‹˜ÙÎÂN\Ì"ÍdTj€#‡•º&d„—AZîˆF$·« Ž/ÓG°®esH€[ÙÈ# -aøëŒÚåÀùŒ6-œç¨/„éudôûƒ|·è¥ˆ#O]òžÍ¬wz%À¼Ë}¶àÜÕéðýJ†×—;Ê"àœ;ŸÔ ÇЈÉÈœ‚±í‡Î!q9p.⤓7ò6!߀´—ž²>+oâµ\ì–´Zì½Õ`¸9òã&2ú†€G™Ê #Š›ˆ·…X܈7Œ[—gø“7²7bÞ¦¤G.+bŽ2ðˆý -óAÊ/ebirßî£rÛ1af›XÜâÓrŒ¦ Óì*R^¯ýª‘Þ¢AÝmf“!…?ãv^³ß¼ä5i£Wòœë ”›£µÊëÖjÄ|FñqÆïœ5äµ´¥<ëk“‘ÆŒs—OV%£›AZj?¾cÒ2†]—Þ¢ÎK®WB5/ÒÌBëbÌ:œOædó¢Äf-Äz#S;ÕÈøÚcHû€Âi*ÒõCZ3Dr2/½ù(â„¢5Æÿ¢~¾~g‘ØyL”ܪK¤öiÀ¿WGÜñµ EÊâ¢<š÷hÍIZ°‹H­Sf6°M2 AR½&Wyi¶RáùÛëŽò¾»ŽÑ½ºþp‹ }@ŸŸÝÇ&n}ÔÄR{ŽQ¾¹˜õãëÙë¾]N7K\Pk%|Øn‚‡mg®c\›‹ÙÞ˜‰j]cX"ÝuuBq©Ë>ü÷Ž$ÒŒvÇ!ËèpÞ1ÒVAüBQ`ÁN±ÏíÔå€y[ÿ¹'é2±IY˜Ò C†¿ß+t UdXÈ(¾"ÞõEÙHãŒúK; ‹yÙƒ¶ðq‹%ÃTóH^ñ·vÊ£páEèmä([ÄSDí¹$­] WD T±KÌ -ăE±–ÑÜ€ñqñ-¼«¾óøïš´LâôÒjFËû¼ïl±cÔR‘Áv¤ñer il\™Æè`Ãó9ü`q“¡Ù[¨ûÍlÁÛº“â×ç‹[WUà?XÜiÕZ¢ŒZcÁí&C,µì0ÃâF5eJ‡2¬¡w#æ ªG„Ýgcæ0·‚ù/£½ë ÆĦy§­G±ñ‰›ùqÿ™\ {yT÷ ýgÎû5ŒÆåõ;›P®€˜EL’ÿêCd[\ê¬,ÒåCö/LhÔ'¶ë -ý -·¢ü—GYÉãf2ŠHó -i5À¸L„0uÿRÈ|ĸ„ý=é -a%´ë ýžmGut Å1ŒR–4;//¼ä5‡apF?ß'JjÒ5I©3BÌ]ÄyFzGX:ôgQ{]ç¤eˆŒ'vÅÃßï"|m$‚ßÀš*o3響»Ý¦"zVmmñê•èyõ ÎÝ u<ìÕv"¤dy-‘ñ±Hs]§#=’V u=¤€øwH»ƒLëÖ@º'ºšz@[úUóPS™¿õæD¶ž³ ësò(.2ºYçç!­´þÎhœtš†æð’û~Œƒˆ±Š¸ÜŒvô_© Ú‚ì&bð‘™}z¼˜Ç í¬^Ó¹v“¶,:#Ç#OË"·èœ÷lÑI÷Hã„aÊšÛO¦¦­ûL†©|3s=ñ¹‘„½ï\¤ÃtfÄÉ͆Hc…Ñ6¶õÍèze¬7± \hzÖu6ÒTÜL[ÿ7‹[à—±ËîÖ?-?nZñÖ…zÔM ›ÑÓ`ƒ:M±‹‘.Ãâö¿¿‰ÑÃF~àV§6?§mr»•K¥öi’0F3ÚY0f3:CNÒ¥H;‹'²d´³„ÿ¢ÅËù ŽêCt}ËÈcr¥ÚY>ŒV£msÉuÒ¦`´á.múJÄF¶÷·v–0¾UƒŸÜ©ÁØ1œãÒF–m iÍ£¿"­†kÁ Q>-ðLY…Ή:î:²rš‚ôåEqMü”n-¤{€GF–KZÊ0ú-H3Ü/k#ò1 1(çg*òù^ARƒ&âêRiÝÚˆñH$¶+ØÇpç±ðÿ;¿ìÿãΧh‰ -:-,JÞ¸ 3ZXdÐý-̺!ÒyðÎX…Û…ÏgÖƒ -¶Ño÷¡µpŽÕåIˆu̇y9Šë¢´st¿Ê9½b¤ƒ‹4…Ö²\¡%“ë1lîç'#]$F¯éA[^š,°vs%xbŸ 2: í,Ø·kg í,˜ßÙºÏzÄ,GÌX‘#Œ{ig‘g|fp`l¨Á8e)KŠàÿËÄ\Ž²°‘G¬eR mó²×ÀùŒ>ŒcÌšÈ%ß9ØÉS“>bz3Üëè×û7ÒÙ‚q&ç®ÏEzu&NaK…7³7ó#‹RˆÅ퟽ é_ 3Y(GFײ'˜3õžÁ?ùƒÅÍèÅÕ)1œY¤ùü¿Ø{ë訲n{´’<8º¡IãîÒЂ[B¼ªŽWbĈ@ÜÝÝ*î!$ÁÝÝ5DÃ]Z?¹÷»y{}å÷~c¼÷‡Q$©T¥Î9{íµæZ{¯9ÁsóN?0œµØFõ{ÝûhêÀTãCåÞÚQ¢{p?vVζþꃜ/´³T23Sè™øö‚NÖÝ †8$ºùõý¤5ÚY”¯ô³v–}poUHÅ7BÖ¹ª¨]“DÏ(¿sd_ªÛTüÕP JK)B=\úR0ñ…d\§è~Ô¾³Î/ÂûI<¨y„æý!UcÀY ûf݉­úÅ–·O²*o0´)m1¡ySuËŸ¼óÊϼó»;,þ'ïüÕàWÄÞ‹Gb44ÅÒÑBÄö±rG¿ž¨IÈ{qÞéCã¡›e¢Ø¤eª´Ö’CG-z÷x>ûîwÀ¹|PåWˆÐöSÚö´d¬µ,,±“T‡ukÌ@ªÅî;X /#ùfiôÚ0çMcªÛWC—õEK‚{ -sS#¹lãšõT;Ë’µäÌŸÚYߢF„˜hj¢’™¬± ñÅAñI²è-9õ‘œüûöÞ½x‡€ÞðÑÐâ¦úÔa¹_‰…cà?Y÷€>ˆ÷Tï‚Ü[®àÂbhüP~zïä!À­"ÁúÄLÕ§ ûg -)û¦QͶØâ±ÌŽ–µØ{ƒZ)¸¸9Ÿô! ù›Ð7ãÊZV“¢(8…÷]ÃúaßÕÆŠ©þõªýV=†ñN¨°uïAµœH̤1ò³vVEËJ¾úÞzÔR>kg!Þ -Þ™CaÏ|Pê0è•A; zÉÈÍþS;+J£¥"ù¾µkÄ•OúpÞΧ'æ.8üÅ­iC€às¥¨Âo¨žY z”*$sñ#;xÛ€^T{‹¼ßÊ7Cïßú_ß<^Œ­ÎmªÙ¬¡¾´D*¿´Ê*~÷4!8M¼ÏÐ{¦Z° û'1;Þ¬åë^™‚GÚ±ÐÆ£ï'±5¦èÌ!qÇ$îºWh%Bõ@¬y+x™)o¯ml)ÈÌ8-䎨g›XŠ2hbsÄg°Y—çKòîñzæf62ãŒl£‰…ŒQ9êHA)úª˜Òo¡ƒŽ¼œj›"îÄÚÕ‚ß;öØS€œU±çÕ.çÂ|èÍC;Ë{%9+mhgÉNÆ@;‹Ø8üÕÎÚ@>< ý.y§ }¢Y¹ú÷³vŠÖƒF#oçÑ“µs׶ø í.â“ù¤í  5Z;‡®YIÓO\É­¡ï*øåê«Â«  QÁWÞ]œu`*psS}ÂIJqLeÓ -Ī‘OpGDéhÎ\#t9ª ÞjX© -U–\ÂûYû玲 -&y}rÝ©®Q®ÚÝ*µ]fJp'7/¦ÚYT¿}÷$EñªÅÝYJµ³B‹ ¥c„¹(ºksá;Æ€«›jS'ÖSí, úÉÐΊÒhgIÕÎrÿ¬åÓ‹±rÒQ°$· næ…v|xéð{C˜Z`’o<™Ÿ¹_C[¾Ÿä¼#y¿µ³ŽBi§]*`ªûƒ÷§NÐÔ ¸DÃs\;A$÷Rp ïGõÉœ¼{Aó[.¥Ì»2ŸÝõÂH^Ñþ=ôÖŽ:Ø_®$äj4Opóî­´±ÕÙ¸AY(INnÒ ¯11SÈ,U›´ùÀ´a¨#"þ ÃßE ºÓ -;‚B -Fqi‡§A¯ÚÜFkå2£ rð6ëäKüqö(1¹~ -´|)®$q›‡?I >—ä2¼ÕüÃ~+EMÇ -~GçFªŽtWÿ>–jgù}ÒÎòÚY{g Y§ˆÛâYÊm´pîÐÛÆZ©”3RLß=Ã*¦r‚Õ¶(ªoMóñ ¼‘”½øÆRªW@âti(—=ô -¨6yîH>0“`¯]Ùâ›?ðMk˜²›?)‹..‚<­…aÍe* p]\Ιyà’ǾG1éÈ è’Š;ZMxõ™…V¾±CY[×*ç ¾$_ÍÞúŒ#¹¬F_R£ÅPí¬ÖŸøšgFÂÎNsìAF~@µ0<"ú+B{Y8úé*œ¢zÖÎB} šT_)¡r¢˜¶o¦™ù5ï‘Å\æžéBÎÙRÒÁЮ>›Ú&´e‚i,šÄ—Ü]ûäª_¬ÇzXÝjdS×ÌsÄB—ÎÔú‰ÚT:¦b,¿5~ Öô1&Œ{H_¬K19gç* o-‚v–´«‘×°€’äCæf$_—Ûf$4ŒP7† êƒQÓà¾RhÑhhw!çãÃËÇ@sƒ­‡ZgŠÚ9ž…½¢~éÚ—÷ ïF5`ˆ- ©û§c=ƒj¼o‰Ôcíü(ÞâNf n.EMDˆ­ÍoÄo–ø8h á¾"¥~1ª`44«iŸKöÕ…T/( •j Jþ)é3àP>›Äm’£ïSà¢ØJ–Ü{šA7€\'±ÓOšîz\Æ¡¨iˆŠÒÁ&[Õ¼’³ÚÒCnÁË°~E5jÂs¾¢_Ðys‹îOë ÑÎbsÏϧy[dÑh>ëø\àqyYËäïr‡€žr‚ñY¿b}h¥ ñŸ´8IÞGæñXî³vVLù·Ð6â0§ -ÏQu1zÇxÍß­úö@sAèP§ÖOæ²Íd‹.,&6¶„Ë=O1 ­Y¥îœŠ¿AµáâˆÝ“Ï ¾ ºSdŽ²i$—¯jY&ìkQ*÷=5R”7,Å;¬¹‹)ÇgÑ}“þáù}÷,mŽßpv8vÊÃvÿuUm“9³½q%ɇVàœá‹Äв1d,É5{ŸP={Ý8Ç°ÞBܾI4g.º°TÈÑ·T9k3SS ëÈíÐÝb‰ýÀÆ嬓¶\nE°¡¥ÌÒB%cz㺘‚k Q— ùd/K…¤Eµ<Ég Õã¡o…ýKT“(õÔ,eþí…Цís ¹,jQ¬£«.´¶p?…ô³Ù´ÃÓQËGŒfBû($ÏÐQƺ·2ÕG¡k½Ð¤* 5Ô¶ÑKÆg_€5+ªyˆ&ì"þø41f÷DèPKØé/h°%Gu“¤”Ã3a÷t¬Qo‚WTñ7¨]óÕ­†ÄÏâ¼"ôx§ >ì&/]Á7{8öõ°{›Hû -ÂŽ‡¦lQÓ÷T³5ª;}t[vûG®äöObBýdªiÍ[‚ù(fMÝ?û ᄈ‚¯¡Å枯ÜѱJQt}!Öw±ÞL{]cúј”²wü<°”à3˜â”¼¯¤è2º&Dü»²èÂBEÞÉÙ¸?ÐYG>o)¸iC¿‚jª¸B}^\ö°L¦ºA¸n‚iŒ óžMÛ;…%øœä-ó',8âŸeƒ{á¼±÷Œ­í\ @%êÖ¥?°ê‹ó1¿ØäíãÙŠ¶•Øë)î}È°Ù—P_œ©ÌŠ¿K} K콄Þöи‡Úòþ²+àSéÚ´½ò^ÄXü}ÆÞ]{2P ‡Nk½U×ÒÂJ£‰”?š*¸&¬1Î=Œ 7L+Èø­Ä.°Æì—1T£g~b–²g*j¨Xƒf¼¢PM,£‘pnRÜÎÉBî¹E¨ÏqÄþÈù‰ñ‡¦rþ9ñ'‚u í+øCÇ’ü}ø¯’¯'hÙ»‡ôCý’Ï9»šGT?Z[І *‰<›®#‡’ñ…ôò²Ì¡ºÕÐl­OrªÏµsørªÅSvk)´‹ðšï!W%v  eyÇOÊÒ–ïáó„‚% «„X›qp¦²ª}…Xßb ¬ÕS­/Ÿ¤!tï jØ”´w -Gp-ôñ3üt—«Z~”—ÝþN©¾0®éø‘ØÛ!sL£o\ü5]D={‡ƒóFi0rñ7ØG (»N±_7Aaë¯+—¶ê —€n04áØ LgÔ#x‚?yì·JªŸ(Fª¿¦keêcsÙò[?qê‹‹±NÏ؆ô4圵›I,‹©1P__Œ¿O×kBˆ=„—}9ÆÅn7Pfî¢(¿¹TQtu‘¢àòô¢²®Á} Éºù÷j&R}8ì!‰¬Ç;“êÙOôÏ­BZÚr©5ćî››$9W¬½Á¿@oJðèÇn ø¦壪‰*Më|$g¢ùyîô¥M,dæ– -p4r仸ïÀIXoåˆÍr[úPÛ†Æ4¹gBTù7ØËÃo î+"žçž\¼=-äƒØ³%Æn½˜W8¬ ~úÈá¿#©nºwòºçZð~ÃHFã<ô^‰Oárõ¡éLõ„±_1´˜jv‹À>йE60i(Õj¢>øÄ\ªóŠš Á¡˜ã\æq?‚ö)ñqTç -Z±¨³“9&ø’{EìŒä'ÀE°A’ÍgrÏÎÅ82Ågñéäoë¤zbÐ…þ0ê<¨“ûŦk¥AŸM¨y`Œ>:Æàmì¥#÷-q×$hÑQ½2¬Ó%Cží:Ìmzà2ŽL§û“÷LÀ~Wè&Bƒû1¿(F ÷þJ -/û†Æ¥Ôƒ3©&8¹f.±bµÓ¤ÚñÊŒSÓ•©Ç§²~¹Ã€çèµaßKÉí¥˜OTcËÑC:á;Ÿü$?üÔ„ÛùÔ˜«ë2R¦ïÌ8úöDð»‰Ðé!q`*Ðt@Ï›Îëc³»¿_Ž/Ç—ãËñåør|9¾_Ž/Ç—ãËñåør|9¾_Ž/Ç—ãËñåør|9¾_Ž/Ç—ãËñåør|9¾_Ž/Ç—ãËñåør|9þ>&N\³ÅnµÍ6=v½ÞÄZ‘Ÿ-l¶n³÷Ò3Ñ›h5{…׶ÕN¶ÛœÜ·Øxù,%OÍ™5oÁl {Wƒ)šW˜z99:m!OZÚÚ¸ÚO5˜w³ÆV,5˜²ÑÆßÞËjŽÕ«©ßLY±aî+ò6ò[ÍK¿'½™‹-Z4kþœÅ‹ /\8gÖÂy ¸ýקçÍŸµ`ÞâÅ®xzá\ü€§ç‡ü§?ÿ×ÿýo»êmÖcô¶è™ë-1˜2Õ€eôùŽÜïp?VzyoÝlf³\êÍ“+íɵþ—§§(¶l±q³·3 Ï§ MÕ›c°‚}ùÎ`ùô¹ßÑ»´Ào›£7q"=?éRƒs-Z¬Çnè»~#[³ÊX¶Á˜‘[Úi™1ŽÚxó›´M6Z&æ¶ZƦZëxÙÚÕ2C#Af!߬eiÖ œr‡žr‡àžv>ºf¢§öš¦²e?¬—mXËÉL”.Z&*Om¹‹öZCV¶z…¹lýZ3ÈÝ̤ÍÚö[uÍm‚ÈW²G€(‹’®àõh®Úì×›wØÚSµ%f€äE>ß;yÈV~”ôÁ> §%š¥•*-…`« ¢%&0vžºh–DC4gïÕ$à -N¦°qÕá¼2£QHéäÝSéèEÉq@.ƹ÷A#(Èa-åÖZ–ü&-D.äúÝúŠÁé#Ĥ]S¥ðª±h²6´[o]FÐk¯(¡x3ù;hxÅýôJLÉ™BËÆQµßr¡Å_1ÞqØméäù¬¡œÖ0Æ1¼·¹´E„´œGXJR0Šnp§d<±ƒé=ASDxÁWhÒBc݈ -ò9J+%g­ âG3ÉQá ˜65wÐ2³°Ñ22e “²DÓçJ‰*7 -2c5¬—ËÐk)¸éÈÉõ(mÉ=%?ƒäsÍê24΂°–qŠí«pŠï+ßä§ RrS³šÚÈä*gzšXÊÐÎnMlnë©chÌËV®3•£q×Æ¿'¿%m ·-c0ëÓ_κkC|Â\µMgy݆U ¶Ö ÎdK–V>=É£ãà×Stï'¹…õ¼ãY…fÁæÆ -AYúØH/'×KÎA[ðË&†”~-ET€¤AòMâIJÔåŸ4 öùúöQ:ùö‹¿"÷÷k„¡£Eõ…%BÖ©RhÚ(¼†#¶Ãmñëƒf JºUF 4d ÅcaóVÙ£TžÁz’—_?É/yÈ(ÙÄÖÈ Ã`l¶ê‚X7J+'Æv‹.%¼t ëËz…öG£¨*$4˜ùmQ‡>”ü(<ïk4“‰~iÃx×¾Ìæ­=A8F›Œˆí+·ô`í=uAf!øÛð‰ 2)iït!íÐ,É7OŸw íǺ‡ô…£ADKhr¡ ˆþ¹ú´Ã5º?lS ¯K ½ò®|Ç5ü€M.TC‚ÁxFõ·”ÈX{ MÕÛˆ=£±8¶zH"¨Pݖྴ‰& fˆQ6–óŽÄ9ûö¦M2hè%ãF \u¹àŠ¯9·¨~f¼½öÚå†ÄG®•Y0d¾I>=,x9ïBÆr[yXðÎÚÆ&¼ÌĈՈó¨¶èP2{_2çmµÌ*ˆdq~ ¢eíB{¡±ˆq‹ërAü3…–9ã¨E ¢É5 ›óÉÊ9Fö±”¼tð Ü@sµàÑ„t|œÑ„¨i‘&„œXÿÞ‚sL?¹è¡c)?jåÞCpŽì'úe—¼R†ˆnÁýDÿ>h²U¤és¶hâè‰^.8ƒ6Á $d|úRÑ2”xÈ/e˜¸-iš*)I Oîp:NÁê‘|ÚÉÙhâD“9šlr_A®A|/a¤¸ÝS¤Œãó¤ÄÝSEJRQ3I -F³xê‰ÿÔ4$ŸL|&š€`W´á–ŒëÐ õ°É7z0šric5šf¢jÆ¡i‰6ÞzÇ ¦öFü²ä“2TÜ?Xð -׃ŸÃߢ„_ó 4ÚKØwL”bvNÄ\ck&€¼KÓ¸X7ž6¾ÅTÃJFƒDHY;ž~<ÃúóáÅ£ÑDG¹ãªÇ hVDÃKhî(>0k8%61¹kh_*ˆCI”KÆ  É!xGÄüa-ü*HÍp®JKØ#‘ˆÏ€}‚xƒutÓE/`W†kMe†-d §’³Ä×+eÆU29OÞO|’ÒÖKW.9hÃÿPBG¿^°øf¹³ŽÒÞ­šºLå2sÖN›±ìÉ8…õa/>Äù°1¥µŸ.š˜1·ÐüE¶Ü£È9§ dq}î1ýŸÔ¡ RaÈ<„miC@X¸aõ:™BtÕÐäÑWaíÙÄ#”à•Ä^JpA|!|„èÐä‚Wô@øF¨²ÎÁ}Ðl)ÆWŒGC6ül‹w!öIÆ“6t…~ Â!4 -sŽA½)1šo&m^TÅÖN¢MàdÜÄ üQ\P>VŒÛ>àƒ1 K¶OS΀oöL/’|†€ä‹uðÐ¥ id[ðN[{¡éV L.lMÌûg Á %XIâa° u“Д)E[BÃ/&Éü¡¤v”@Žœ ì! GdU¸&Ì%4Ü -)‡fÐf✠óÑh†Æ84pÏP °Ò1øÛ FG3-뉦ÆðþB±;4âÁj&€tZŠ«Ÿ(—‘ù¬˜KhÞâ=b€Ÿ6óƒTxkìÓýÕ#0W˜-äÞ»öE"õ©ÄׂBaãAÆo3%<†¯£çñ ïA<îˆÝ-Y•Ä#X'œCû‚8—±ÚÖƒÛЄQ ûS¿¨´_Д ÂLLÄxP¬£"öëן„¨G²¾IƒÑÔ­ X@aï§Ël‰ïR-.ñàd!îÐJhg»M—#¾”6½’9i\rIدw>ó-@ðÆÚkSÒâ'YkO]¥d¯!'ׂ¸ŒuJJàêGq èÖ6ÀÒëÎѧ>ÒÄòP‚ à GS{é ù=Æ~¾ÖW­,²ê£ -FÑæF¿í0†”&„à8ÿ,}J6å3HôIJ‰ˆmÀGÑøëEl ð±˜C!y£hs9ˆµüã‡HQߊQåhL¦ šd¾b^‚@ ‰˜7 –(QEÉ78OJ€âmb‡ ò£M™…´¶ñgómÒþ) „¹›|h -¿Aj‚K|…Ϥ͢h²Œ¨ƒZÚJl ¤ E™Œ†¬ørr/ˆ} …ï¹-ë­GÅ>âvŽ¹ =-à÷xH=4NRð"6ë“:„’Aâ¹mQz -çÞŒ-ñ} ãY@$™cÄS¡¹GëÜOIüã²µî ð%äžVm!ñ‚ÄW.~}4Í­äÒNϦs‹äðt¾‘ßÁÛð)'g1…·—°™gfSÒäÔcÓhc¦{Â.°x$—xb[üàG.ëæÎ-m€©ü´‹Ž­/¤œÉg¦wzïbj¾±ò(‰#„5ݾô&ãêMâ™bPÁWª¨à«àwXëÍ: w¢x)çç ÄÇÁ6EÖú%¡#m*%ãD›kÉxÃF@žAÅÑŒ{Bæ%„!XˆßÞµ”ä> -ãå•4÷R€Ï£Dêñ08¿O1¸¯°>‚XŠØégÂœ mÊ&’#ñŸ!ãEc>b¿_ÒäY ] Ä˜Gf€ð›6ãý3©¯¤ß”!\PÊPˆuÀ‚Ì…Š$˜ ‘6²|4%ñŠ@¯Ï3n Ï1;Ç8þ„¶hÒV8xÒKˆ*Ë¥›ÆjÖ‹ŒŸchoŒÎ„=hº… -HOŘúñœGÒøTüá3h3}8îmÎ~“ä°[ˆ3Ná},!šáà« _‰XÁ“ùOí3nûxsÖAË‚ÄrS‘|%¹ -¾7çÜ´-Hþ%5‚&”ìásƒm±i ¦» §@dw´IÛ—àcú}ÆP>~ÏD6íÄtq³!…#!XÖ?gVúÊòQya;D,I3ѳžñz#±P¹k“Ò‡’äd*â¿{€DHØ–9”Ú‰ˆ"É7¨¿9á–°~|`ú0JRQÞ¹‚æßN}hL$ã«(½·"ÊÌS3@`û¤> ¦Ò>¸$vÀV$'ø¾†Ä­°OàD:þÚ 1 ˜¸Ä4"ãÑ8Kâ Oâ ü0ÅÄ–©øJH¶>mR'¸…Æ!‚!(ɧ+É\ûR,Bb'÷éyJ(2a“ðC´úÓ{0_0§àŸùØÊo)ù=·¸šqØa»2mÿdÄo«a¾ãs0w@¾ –Øs ·óÖÅ<£$ä3!8 ·#9³•«rF–Ä;%c@@}hñ(ä² -+OšëÑæz2?qÿÿ„!T€ÄXΉä$&1.dØû÷T’x Á0Ø'·5uD1pŸÑ”o&'¹ŠÒZ µ$àBœc…¼ÝUÛ‚s$yˆ³6ˆû `DëÀggⳓ î#$š†ù…Ø r[Î+y %^%¾ø±qé¡pðè!·óìAãÁÖ¤ -'ÿž óA£ÉË5„…Þ=P›`¶D÷SZ{÷0C¾.lÑ1eì´‘!ŸÂ}“[y耜’’á’[C4X7Ë<6›§$Ó1)éüQö…lÍóõÜ®GæÊ]”ð”’ÖxÄ ¾¦¹[£ô >Aï=±=Ø5p|'â5D7[0®äõ𕨠y& PnìEq0wöWP\R6†’hŒê£ñ:‚«!Ú@‰f·Åd¶DôÅ\ƒmRK悈|_I\Ø°ÖH†q—[ûë"†`®fþøèBàdÙrÎA›s ï ¿ÍŸ‚úÈ )f$þƒA2üóõÉ\êÒAfsHoÉ«ÍINcfi%3çmµ(a&y¾ÝLl×>¨'H1‘¯ƒÄ^!xé€tä„J‡€ž˜‡rë­=è#>uKøN’ïõ ñÆù=ˆ¢ŒŒ¼ƒÄêo€Ñ¨¯ÉFî……”À ä%ÈIŽ r f÷#cöÀ[Kž?6*sî.€`æ1ò8SKh…Ð 혌3Ømñ‹”vä³AþÜ -{Œ¨4€¨–Hâ-jOªð’±À¢ðß A&ùz/Ñ“àÉ÷ƒ<›øCÄrêCAZr_š¯Ç  /IHÚ=…æ¿a%cĨұÀ±¨ŠéúR@¶†02$kÅË Õã•å?RQ'ßäÁ¨gRb‹¸¢o…¤ÚÉŒúúBåöG«”5OWRáÊ¢›ßq™欑w‰ëX ñ…Sp/.¯27sYWpÁ…#§ Þ Tg -ï,QV¶ÿDÉb!Zþ5R¦Á‰Ç§±ù K¸ÒËùòÖU|iã -±hÈI\‰(¼”’Œq â.ÑÛ (>&!ùèt.ýô,6ïÆ"C¹ µ Ø…±‘\“¯»Âœ¢#$ï§äøξ½7äœ$©›ÄîQçA=óHÀ“¼„ë¨Û÷*6ÿiØ9Ȫ endstream endobj 47 0 obj <>stream -@®ÄyçPlÍ{¤‚_ÞAM„ô°IÔóžiƒ ênÉÚjsN¡}x¯ÄA‚]hoØ&ˆ+C¥­±´VjÔvi½38{¤˜´_CT‚<Ð`8ŸèAÀz””e[Ô@JPSö¢äÆbÅž†ÊýïMäµo–3ÁYÃaf¢‹6ê² ŽÃ½¢äô˜çÄOP|¬õhZÓG )q÷TZO¢õŸ¼¯@vŠº H¤yÔú¼QK!xƒäë XBm\ò&1ø10¤†H‰àWâ'i•BÃ}“!$¼OÉû¯»‡õ@^½c<%^£dKUã@D bx3³™Ã8’¿ø&F}•õ‚s*Ü“|h*„šiþ"íŒã³ &ÈV~,ˆñBœàRŽMg*ž.góš¿c£j¾A\äƒJF±¹'æ(*.ƒ0·ý[Ö'}ˆÒ™Ä]_#ã;±ŠØ>N /Móu’¯Cð‚Œ-­;ÇÕM ù2žÛ2€OÞ9…7¡ÎDÅ.ê'ò•Mk„úûfüþ[û|ƒ¼ìú*váDî¹Áâç:JÑKGCêJrâÃ07q/@¾L…ü1€’?ˆ õ Ô“ˆ=ði'f -É{¦Š‘ù£š¯§Ó|d¶8Gš¯£†ˆ|y|(ñÃxdò/,€0jë¨SÒ\àRš«ƒ,µ†ÈâÑ,Äœˆ/T–5.…hž²¢sÁ$± ‚É0OiÍÖ5¼ŸBeOÅ.„¿ˆ]@ì ßñ¹‚äʦ -cãùIì"NOð&~8(sÄ.À~˜;TH%¼ú`b¥5üšFìB(i]Á—w¬Âõü1”u Ÿï2„j ÷‚_…pŠ²˜äØiGhÍ•’ã%(kùAQ÷f5ˆ!x-ážÅ왤Š*3<¢À6m(ùbþõÅlyã2¶øæ÷”`× ‚ÎÜó9*tYn@IJ+î¯ëšãÞ0ùPûLÚ;k4\âþÉ”D.íÜL.¢Þ@éÛßë£ -'møW1²Ú㈘„ºãÜd¹ -›]¬-hH²ôézwÚ‰¹ôÜPØFp8ñ]"ñíð‹”TŽØsDÝD~Ħ P’]*v‘Pnð§ØE2Ä.Ò4ˆÿð/A%4ÿƒØGrQ`ssÅ@ˆ3˜ˆ ,É=Aü -[£~Ò'm(]§CnïHç­¡‡æâöLàRŽLÓø/òù f JÓiÖhMr#`ú•+œŽ9K})|'â?0ÉËUÛPÑò;äçôõt-±d È`¹ôs³i=Â-¬ð<ŒðL¤r%s`ó]š·Q±‹Ü1lýÃ4~øAìB’™™‰2jSXÏq©yPàI}»± \›ê³Ø…Cpo açÙRÌžÉÁ.\j¤$'BÍ3ºnšIÝ5 "kLéïÙìc³!ƂׂؙK>>MQز‚X"ïò1¨WÑ5àn‚×±f%Å’ùM|&j³”dÏ6þtÍ â&|MÛeÕÓ,Ém)1.ÁC $…OØöJ N„œŸuOÔ£õ|Oòa¹½Ÿ®©¥ƒ–á5Omˆ}+‚zAðû,ÉÜeÈœ‡(%êIسqJ/ 'B<ߤÅÈèê—S±ÅM?)§fQarϘ’;ß+j:W(Jî.±¬îø8 µK3‚·MÖm br’«[*ÿ»0€oÀÚ•‰©$3ß(PûÄgQÑ*Ô%|hmkç°MšWƒès³O/Ô°ä÷Äß Ž@ä! Ö•ë'ƒ–®•»ij]R|Í$JÄí—9\C ìÓ›â2Ç ~„ú§àNì—Ø¥ä7€ÖÏ[ÓS‘Z·÷Òì­àৠÔBb=êÎTÐÝ/uˆÂΣö†P1Œ­Qÿ)vQÙ²J¨¾· *ñYìB®Ú¦CmuÈ ´áÀ÷»Àˆá»ˆÖˆ]GÅ*ˆÝ¨üIäØ›µ÷êIë c‘;RA2P§ø7fïjwîIàG «¨|±œ©|¶‚ɾ8—Öû½ã*w<_yÊyGèaý óW¤~(gÞ!±•uñé=<È©!2/Ÿ+«ï-§äî kE.¤Ü¬mjf*3Ùh&#y¿ò¬ Fî§ôL¿aÔõëÙê¥F²•?®—AµVK'ˆ”êCu3¬aáïSÒ\ä¯$f¢^A>*.‚Z òr’Bl‚ÄL`rÄ ä¼ âÄ}¡„¥5]«äûÞl sj!ľ!vaºDçVZxX2œ â’ÈÿA®MÅ. ÙF#¥ŒÛÜ[Qâ¯i}r³_oŒ7]×°Ý¢K¿:xèâ^ ®$þ@ ×=¸ŸÒчú!Š-rN.Š.þ@ý"0'jË9=Í¿°8Sø&ÿä< ‘jÆHºŽ]b€uTJ<™sl.WûÐðÛXCQù_žP9A¨nZ/Õ¶YŠ;ž˜25ÏÖ*Jš¿£bdžpI{&)Š/S± ¦èÎ*vAògì·02±’™îÚ|‚"êPTð;¶ä[ˆÈP±‹HØ…øW± ÏbÞ=k'¥à¬Mñ.Ö„IœgŠ¾ç -n,áî¤5ƒä=SàGhΘ7BŒÝ?™­|²šÛñÒPYõnêÒBÔîñ|Hé×Lݳµ’£9/H½‘/ayPP¯˜At_D@öpäå¨!WGþ qRà?ÔÐD¿Þ’{Â@ˆqaíĹLîí…\èŽÑ -·¸~ ¨WnËû´´×­1•­XµN¶a¹Ì”s¦ë¡ÀLŽe Ò1;jX–J+yö¤ùDúñyØóe6{÷äH.k+Ö±9WŠ$¶ÑÚ{@Ú0ºþ‡½á ¤­6PT´üÈm²Š]d›Qü)váÿIìb[o‰Š]ì›±q[Â`ÔÖ,É|Rbï ÁšRHá×öB7:úõB­ÍB!ÉØM®=DßÌá”d{.bj&ÀWÁ^©±kÁù%|ÎåŪˆJŒ'êïÅ€OãÊǬY^xj»§Ó”ßÑa„ñ…hõµÅ ˤúF ¾¢i5—~|[ÿÀäÒ¨]Òý jMÞ5yòo¬Ù}»`©ØÅýŸøš†BÝ#SeýÛuŠí˹øº XãQní%ßìßSáÝç³ØSÒþƒHò2ø) ÂéûfIQY£i|'±ŒÏÜ;ƒÏ>³@LÞ?â`à;èÞâï1VLéø²¦U\eÃ*¦òîrŒ/jÏLΕyX» Ü»o’¢ôá÷Êì«sØø£“˜ô33¨J9<bšlòîIÀt›cxBѵ¸Â†¸ÒæåÈy˜À|}ä X¯GÞCç°<öí›àóÏ}'T´®gëI‡îÙH{;y&íôt& tã5Dá•2k–¶=ÌYk-sb§ÈAX‚È|¿Æ“ùZÖ‹L •2¼Ž -Æ\(%™¥ŠÝ1‘♭Ƀ±_BÙÝU|æåùt?Öì  úi­¢¡‚Åø( -¯,dsOÎ…Ø…P@k¨`Ï‘*¦ð[)¾z¢”zt6öth0MÅZ{GÈ'a0­±‡ä…|œÖ0@þ»-v ]ãñO‚:8j+RqË¡ðÖšœÄï@ìï80MŠÛ>5{*œIסS† qJ.}×Tvw«1®ÅNuªÁâ”Fë6Ê nL÷²UÝY-Õ´šð;î*Ê/GÅ)!®¾±¤ÿ á¦ëÎ"õ@ÉÙ³ô±.ª($¹)Ááâž6¥bÇ㻀À:S=V³>±}ŒrkÆ ˆÀ³Ig§Q±‹€´¡4J­‚š—]2–Š]ÄhÄ.„ü‹ hAÅö—¼I>Yi@…åHžJÉ’É﹊æ\U뚟Tv¬Àš ‰ اÌù/‚}£ÿì«~¾û~L, ):ÝgP6VÀ¾|>Hß‹— Uí똬«sq`;tM¢AXKEŽŽZâ!É‘„}­J›SÝ¥-v»^­À\`2ÎÏbýËG€Ô¹&ò%º¼5mõÅáec@ÏTu­‚½€0 è=Öú7‡ö¼sI½{‚yj>ü‘€ØzÄÐHþšz”’PÃv¹€dZWGÝDUúp=Kƃ-ý9 jQØïcå—9ÂÚ3b ”¢OÉý?‰Sblu«UÇïØ[_¹Àê’cÊØP.Cl¢ùj;§L;< ‚»Lõã5e*Ú–q¹W ‰§ -±uã©X0ÖüsF FǺÇèÑ5Ø¿ˆ](IþJ×…bÊÆB˜ù„¼øÎbæ)Ý¢û*\ãûÍƘ¡kä ãc -¾á?‹] N`?^æþ™LÁ¹…ˆÙRlýD`¿ÏõUšÓ&ÕNbKo/Ëš× -åÍ«™í÷ÿìc> öíê0ÿŸ‚}V@°û0éCýˆØ"%¾OÛ=•ŠZ‘ø}‡W¢P±{Æc„¹`õjÿigfñ!_Ã.à›±5블ݓ©8eÌžqÂVúç S†ä G­{a˜À¼á–öÞºX3ãI,Ž¶·ñ+¢ö¤àµ±¸ŠQ’8¡6ˆ6£žÉ¹ìz4G³çy -0;É3è]ìeL=:b -tÖ°Ž²ÉWëTXßÃÚjØ« FWÐ}å¦ÀDÐ’ŽòKʸmëE÷Èxˆ¼†°Ã‡ gÄ#¤økˆÚ¡V+EªÇˆÉ»§a/ qJ`fØêöC»Êkž,£{¶D÷ÇCìQ¡uÇ’¦¥¨™€\ŸúväĘ_ñ{&Ó=ðûÈgÓ ÊArì] /Mç*3³ŽÍ‚´U¥¹¦Èíß²Ù‡f)ª[~R”ÜYÌ$îÈä ÇžÐù™ -Q_@E‘B²F@€þm¡>“|pª²ðÂBEþù¹´nq,÷Ĩ#SÍ”#3 t.%í™F÷ôa8ö.bî$쟨Üþr5_ûƘ=2™î¯0pYBýD’3Ì…@ €&y™‹ _èÜÀ:*rJäÝA#ð {ú³ ¾Ö…¸OXé×X÷ÄšëHã>ƘqõëM÷¶†VŒÆ-rå6ˆ:çë3žñz%GžñT…3ÁÒu|Æ­À|Hõh›Ý£ôèz¼äªCqØ– ~"‰‡”èžî—ˆ¦õ^ú9È[QKMØ7™æiG§SQ–Hì'㟈½V$Î"¿H±1ýØ\!åèLŒ-Ö¬±VOE1PÇÄú%ê…Øã‹:tí8Zš)ñ½t-5,¼à Î/~0õ£ÂZ3l4óø*„€z:Ö¹bªÆSá5øë¬S „ì#s‘Çiê‰Åc!pÏÖ?5TTw,ƒ8%ï“>„uí‡s&8æGÚòˆi>YÃèµâzHnÛ…Am[Iü©¢²yê*ôšÂɹAØ>ƒØOιTà;ýÈ òÜTälØᦦkµ¢®kj¡lÌŽ±\ôvì³B­GQzw Ÿva.­K“¹ýzÈ·)ÎMÝ;kÅTx¾ôæøØ6ú=°_„ü|Â^º_ë XçEMûq˜¼« ˜ú—äe­Kømiƒ•¶Nt¯$jŠX»Æ|çL‚˾¦¢3¹#Q+"s`"] -ɦ}#tíß/{©&þMLÚ9•Æ~š”Œ¦¿w‰îÇ9ÇÒÝ£îÑ{QPG“[9köÔÛº÷@mÉÒÊ]5cŖо -Ç€žH‡ Q9†Íº<)¸»sÂj$¯×¶$>¸{s©¸ÖË‚3ô!Z…})\æù¹š{7 -1•ž3ö ¥žžÅ¨C—æóķ®±OÔ*ºb<ÍKÿ!hBrb!ñØt¬)jê;&@ìKL>6“˽ºˆbVà°€œôA÷É™NqêÂ$^R¿Qþ ì…Ë:?"c´~IìžÜ×)>…`ÄV žÆäžšG×Pp¿S·O¤5A¬ aÝ5ãØʱ?‘x¦¬¼ûSxn!]·r7¬ó ‡f£žÆfŸŸ±XÄ~eÑÕET˜0÷èøee1ñÓ4Ö‘˜XGböž©ÓB@BHɦÒØ_ûl½¼¦}_û-^‹ú¡¼¼ó¬I1™Wç"Q”´,ák»L˜Ú—ë•em?P»Ï81»EʲÆïIÌZ?ŒuT’/@Í[ˆÝ9þ1GL:<Ë>>›Ö@! ½»ÍLYÚ¸”I¬GE,P«rÞ¢KãÄiªŸ¯TV½\ÎfßYˆûÿˆ=¬löõùŒ_Þ06¨l$·{ù\`Ö€"}…wö`eHùH&r×X…ºa¾b÷ÇõÌÞ7&–»]#?úOåÅ߬”—?lbŽ½å˜uW+kß­áö¿°Ow8JxŠ—Ú·°'ž‰lÍËu\áÍï­âË'ªÜ‚ú¡.A÷®’{1K2O'óégæp•í«T;(mê…MÕ -›ª ¡èÆ2*¼ì1LZJï1òJeÙ½PŸ2ÎÍǾ eù½ jDsN‚í¹º§ÙúWFlõ›5lÅã•X7ÁBLÔÏUw.c vZÕñ“¢°Ö¥¹Œ“³á?•µOÖÀ·°åË!`  Ø™˜´o­¥Òý¢§Âá+·g"“}z6“{‘¢æéJeuÇJ¦¶c½PßaÊ׶m„€¥¢âá8GÔopîŠÒ楰ص²êÉrìav¾2d÷uZ@›=ôTaYýú'yÉ£¥Šúßײg^Z 7žnå®=sÏt:²G±ü>ò ¯•¶Zq:äÂò¾G -áh»5wä9«Ìº3W™usŽ¢°í;‹úŸ—1ÇžsÒ™VéØ=[`iW«‚©}µA^÷hr%ˆVòǪÄ㛸ã]"sè¹jÌ–G~5–ýÕDqêg†»üÁQ~í_ÖŠs¿ò|Ã#o±¡9ØúâMëÓ7·H›UìÎNc¶îÕœ—syb+)Ïø£/%®êíZŽø‚M»oYsåWYlrÓ1·õîqM¬…Z]¹æ¯:Ûè!ziÅ}­÷=¤úGJiçC†¯î4ê^špµ/ ¹ä“Ó!ÖÅ%šÌ¨o-¢©9—¿ džgÝZ(lb(T?ÝÀT½\eYÿË -yÉÛ%ÛûÑ¢þŸËÌ®u+,:º]ä¯þÍO|{?ûMQ={˜aýü®zóãó¥ÒƒæXrEæÒ¯›øË/Å[UÚ¼„ƒ]’ê@«µÍ±»n›.\ö·9qËS<|O¥¨ûe¼ìÑR~÷sKÕ±ÖÍÜ¾× -eѽ%¨ÿ¡fÌ×u™?d¦ÚqOn³¿ÙÞjï=òM¦úɶPóls ËB<Úa-œ¿ï,os“üÇFʼn_äÌé·*þÚcOþò göÌksâ ÇŸ}bË]yìÌ^þhÏ\{ëÈÞyéÉÞ~íÁ^ÿÙIyõöÊK¿YÉ/üÂ+¯~´aï¾ð7$JOn' ÍwƒÙ³Om˜ƒ¯äLýû ŠŠÇ?*j>®Æ|`?c¸ãODñX‡ w¸cë)jŸ­â뙪N49¾U§ïºðºXfÿ sf÷3cþÀcF<Ònßyl£Üû«¡âàcnßc…òèKʼnJåù7*æò¯öÜÍ—ÜÝg[ÙÛ/ÜÙ¦§ÛØæg^ÊsïDöØ ±Iyîµ <üT®8ñFÎîä™+vìÃ.oþyGœÕÛÛ9âû¶tyg·‡Ek·ƒ²ígöá³@öÒk{官lCƒ -F"/Ú&õå—"> ^e‘}o¦Åî­â>“¤k¾'ÏûJ%÷ײ'fXîû}bÿoFò ?³ŠSïËú¿­¾^dYöÛR¡âýU]—ÂæP‹³êÚ«†‘›n_‹¶¾|'H<×ê*íï„ú'æ«ìx½yCâ·ýí‚ ‰Á;ÆÈ‹[Ëýa¦¸õ›-óúçpñãí4ñý4þãÓdîIüǶdî×7 ì»71⻆ çGõj÷Îêü¸Æ´âœ»ñ…›ŸíËW¾û{„ø¦#Ãñé‘"ë—·Õâ«®4éIG -sÿ™/¹Ÿì¥÷Ü…÷öòºßW*ªß-—η{X_½¦:ÙåÌ_|åÈy¯dO¿ÅÛíþÖ]·ÒņÎ`þR§›pëÑVéFs€t§%ÔºåJ¼p»Ó[ºpßC¸~ßC¼ÜîÅ]xæÀ\z¾I<ÿØc“›t厧xµÙ“ø<+óšK-SŽL´(jgqà?ÖXœî6WÞxã(<~§zq7Sõ¦%SxўȿéL_¶§2žYÞùwË;ÿf­¸ó‡ƒ¼é'æé‡`þ}[’ô±%ËáÅñBñcG:ÓôÔ‹¹ðRÅîymªÈ¾>‡É¼8[™w‘r÷#<'œms°ºÐè£:×Cu´É^:Øf¥:Òbku°Å†=ñXäN=U)μภ]•6wñüƒ-â©GNÂÉ'¶Šƒ¿+Ž¿³`Iüå.Ûö.ÀâX÷Feýïëäe/¾gcNMbvü²Þútë6éÔ3'Eí¿­VdÜže‘Û2K~ö7Võ¨%ÙáÙÙâÍON•X½nÉá»Ú¢Äç7½¼™'=oIeo=s“_ýheyòfò3/•ìÍG®lÓ#oyëo›Ù"0&vÏOå ÷‚Íþmiͽùfuï–˜þç2“ËÝFÿpvëª-ªjŒ*,mŠ.NmM,nÍ(ÛÖYYæðâH¡ôKS¶øó£tþããTá·G©v/çmm¯,q~´+ßæÍÉË{ÿÜlvà«Ø=¿˜ŠG‘8ÿnŸt|:—yc>°+[ÿÒˆÆÜÃ]¿‡à¿Ý] -«Úr©â±!WÚô“²àæ"åÎëÄ Ü¬š¢¤†¦0þÔ+yÝßVÊsoÍVd\›i¡¾=[qú½Bºß½ùé™RÇgKTÏÒ„§­qVïr}ÚËÊb崙ÇÜO+«h‰,Š»ŸV%uÝJ`<Þfóö¦:²-«2áabAN[TAmK¸º¾54goSX.¹Þ¼„ÆäüÔÛÉy©M‰ù~ 6½:žË½zÃÜyëΞ}g#is•.´yH×îùŠgÚœ¸C ·§Ó\øОlù¾;€ûõy‚[箯Î;ËÝ:÷UHÏî%3·~v±¸óï’¼õŸNŠæ9›^ï63>ß½Ö´úÅB³– -7ùlzu+ßîåù|¶ëU0{ëùöÎO‚ŸÄš3v×Ó‘V³Ñ˺íR²ÓãÅvONå9?>RæÝQ½=¨]]w/%/äaFnzs\nmc„úÀýÜË÷ý³Ú}3n´f_m -ʾÓêŸÙ|Ï?³üÜØ}çNpα†ÐÜچȼô†¤"¶êB«·3…w­)¶/N©þÍÓòT·…òÈX²~³d¿„Ï6ñ;?˜±•¿®w>gØÿ1|Ã3éuG:ÿ¼+Ž{þ4Fz{?Küؘż{Åÿü$YõöN¶õëf5Ûõ<Äòìß-gž*„+­®ªŽÛ ÒÛ–Lë·×ÕQ­©«¶uÖí°{~6_z}#ÉþåÙÂÍ/;¾ØWЕ[˜y/&ÿPKˆúÆý€ì«mþYÛý³®‘¯—d]¾u¦%8÷(™wUÍ‘ùÛïFæW4FåÇ5§ª>^É0mèæͲïLµÜÙ½‚?üN½Ý$~a+^ëô²¾s+ÚºíFšÍ“[¹ÂõvOyý»UŠš7«Äú¹xâ±½t­ÝG¼Jbò…_l¥Öû±©M)Uá÷ÔU6n¨ùË\{þnhYöp±|ç/kÙS¯¾£=Ê»½¢ÒõѾrë×7ˆ/¼\`ýæV®øëÃL»—Gó;òËJïGÞ.ó誫ÞÞÎäž=ˆtzv°$º#­4æaJQñýõÞ–<6‡äž$ãtán°úü­ð¢3·ÂòÞ S5Ää9?Ù•'|èJåŸtÄ©îµÄóW»‰wîû ·ÛüÅÆ{Á˜ê[ñÄ®Sò““ ¢³Š"Z²ŠÊoŦ6¦”1Ï_E˜_궴hïvÞLüSúݤ’¢qÛoÄä‡<È#öU^žÕ”XîLÎMùî}¸ñõncÓKÝÆòÎß=ñ·ÝÚTe¶¤íJ»—¾ËãѾ‡çG ¸¶Æ@¾ñî¶èûYµ'<.4å»½1"÷Pkpî6ÿ쪶ÐÇ— •¼Œ4{Õídþæ?Ü,~ïöUüþ1Üúí©ŒÐ–Ü¢ÔæÄÂ’;Ñyµ7bò‚›ó -<Väy=¬,pî¬W«^]Ë°}yF-½jÏ”ž¶§I’ùØ)÷þˈ?û‡½mGCžWG]•{g}©ãÓƒ…›^ŸË³{u¦€ÿð$Qñêoìë—QÂo-éŽÏöçÛ¾9¡6ÒíbÞùÞ‘yz/ÈñÙá¢È‡¹;­\Ž7½ØmdR÷ÛB“”ScMʇ˜†–3©è˜kÞøAùúy¨ËóEYíÑEÅ÷¢ò^ìS›ÿ­{›ñón«¯ºE£·Ý’Ñ›nÞðm7kü¾ÛÖü×n/óv{s¿µÄr¿·Ä*~ù%̸«[ܘ×1ÙìH÷:åõÚ‰·:­ïµ$9w* ½—W™×˜PV{7ª(°½¨JzÙœÊ?là< 4GJ¯Û2¼ÚjÊÃ[rË›‹J÷^Î?q=<×ÿa1ñÙg‹Tosl_^-pxq´Ð§³¼*åArUDkN™ëãºBöï/â™gÏBù—íÄw6æ8¼ëNs@æûßôäqƒøº˜)…ò|ßÐÕmaþ{··ãó}ù;Ú -otøej Rm ÊË舫~¿›&üÞœŠxhóþšÚòíß|ŒÎv/7 ɸn³‡ÎjëmÚ?ZŠ²ïVÊ,ýI¶hÉJÙ¼E+dsü(›¹h¹lÎ"SÙ2E¨ÖšàSƒ×ÔýûÔµº×*yÖ»ýBBþÞK1y9W“ó‹.'åW_Qg]I-ˆ'ØÁ½½®ˆý*ÉêESÎæÇ‹.*ÉjH*­ºW´ývTþ.‚ -c‹€ó,Ûÿåbõáröö¦ˆü Ä¿i÷Ï>ðÈ?ëÀÿîŸm‰Æ»>.4õWZe$Ȧ6ë¡/+Ó—– •}Eäûi½¿’Íþj¢låJA¶A Ö^§ŠÒYú#›4x´ìkÙ(òªQ2=}Ù í¯d#{ÈF÷ž$3:K6nôÙÔ©Ëd ÍìòÍ s¨DñÏ7Q†mݦëwý1k}èž!†5ƒŒÃé¥Ý40ÜõËœ GÿµÐðx÷RÓ–[8É&Ì–9^f ?\Ãp™ž¬Ÿ¬¿¬¬/y $? “è —Mj ›½Ø\ö“u¶Îr¿³ýW”¼³¶£{Ù/Ý[,þè°z{.Sxý"…o-t¼Œ_>Iuzº¯0¾%­¸äz\^õ•õîkÑjâój¯Æ柼¦¾t;$gÇ­¨<âÕg¯…«/] Ϲz;4§ênT~^S\qg§OZ÷ ¬_”ÿñfkªë³Ê“¿w»¬¾Ö½pUÌ¥¡?ŠZs—®’Mžd ›1sŠl§ö†Øú†ÑµÃÖ8êLk $ ë-ë%Ó•õ ÿtÉuéZ2íO?÷ Ïô'WÝ—¼ª'ùI‡>ןüûFoªlÎÊ^|Ó™êÒµ»¨þrLÞÕ+á9µ×£óêoFå“sþFhvLSz‘Í«SY±w3Ša›'®…å¼®.º›çò¸.ÿ£+•û{{rX{FqËSÎð\Ó¿w»­Î¿4zþeý‡“kèMÏ¿'9C|¯G®i±¸!ä;|¯õ?®æ¿ZôêþzÝZäÆ®/ù{ÃÉ\œþ“«lyò£ákövO3y×½™{ö*Nõ¤9ݶël®S×®|¿Ö¢"ÿûù…/,KlcXt-!ãxôbLþ…«átÜàKO^‰Èk»VðìnH®øáV -ù{Ÿusfoº]-ïØp»{íRÙ`2Bÿ_¸FíOwÿë~ºgú½&ÊÆ3‘ÍZë'û)²eàšÝë•ïyß)Ï(½œàRLÁé+ù§.Gžº^|äfxÁù+‘×.D\¼™—y3IÙ”U˜ÑTÞ˜_ÛDò®ÆPõÞ†ðÜò†˜|‹_»½×^ì^´&nÿ°Ú³g-’ÔHm°=‹ÿjsÚôþëP;Å×ÿ~ KìI~ß‹Î=Í<Ô¢ßã¹Þä_?âUõd#eƒu'Ⱦ±Z6å{Ù"›jíµ»×*~~jóòDÚ®‘¹é·’ó]»jó{´Uå—!¯$Ø+/‰| lÍ+pz¼KçwÝŽÈm$˜ýa«¶ëúg•Ý,²}sTMð…¸Â/«×˜Q#ÿoï}òøëµ~~׫óéwšëîIïL_r‡ú’éìÕxTÒùt­}èüÓ4O6m‰‹l©ó~ݵ{»gÊ_vûÛwÎòjÞ^ ¾žHíн­*¯õrTéí«È>¼QÚq-ª¢ãvxiëÝЂ‹×# -ˆ}柺™_z#.ßè}·4w™$®7”έÿÍ/üŸŸ¯ó;pM}>iOò¯7½þ䉉CȾ,—M]î+›/–iýXÛ=Æøçnû¶} åç’ò÷Ÿ-8)ªèú•ÈÒ»W¢*n…—Ÿ½YtìjD^åÕXuhcnQMéy$¾« š£Õv/ög6tMŸ<ïÿø:4ÞP‡ž³Î_ÆTëÓïú’ßö#ÿk” ×#¨¥OÆi(ñI_“?V6Tw¢l` ²:ãeƒûÌÔ_/›¶,B¶tó%ÝWºgs¯¯„Ü¢&ñ1!/¢1«@üÐjõî|ºkGšø˜¼ÓÄWV’0_L¾ƒøÖ«·BÔ~¹{IÎéß™›oü{·ýÚ꧓¾7”d#uü?ŒìøJüü9^`žê¯„kÑ{¢lä€ù²¯†-•úlÄðÅ2ý! eÃôæȆõš-Òo–l¾ï;W¦ßŸ¼N…lÂ<;Ùb«zÕ»º§˜>ëvrhߟæÖ²# ØìйؼRû]û¿Ø{﨨²mÿw•¨AÅ€#Š`"HUTí\ "É9%JV‚d1kZí6Ûæœc»MOè>÷Ö[sÑ}îù{Ç}ç7Æ{ÿ¹{Tƒ@…½×\3¬½æ÷S6ôübÅö/¯– ¼¹Z¾ã»[%ÛßÝ-zó °ÿùÝüm7oö@Ýåv[µlœÆÿ7þñO?çy -Œ—¹– 2Ò4Çÿ‡G"?ŽŸê–Ø—LA4íɘyÈDÇ™ê/F­Ö¢©v8÷ó,B‹ù>µ¥Â€úŠºo~R… ßž/ì<[ÓþÉgU]W/”tÞºTÜyû‹’m·ov^½TÔùÅŇ¯·_ÄþôìÕÂ-ðó-×+ÚÜ_©\ç,pý¿>—?ý&ø]âÙÇüñýXâGtþø~<Gs­ih'Kdeæ€,¬V#ËÈÚVŒ,l¥Èbº™[z  S}Õ,Ùû5¡/ \.«È¿:]Ørº¡ä-_lj?q©¸ çf[:®•·á¯è{t¾bèÂ…â-P3l¾UÝ™ö´»ó®ßÊ#ë²ïÏV¹TÍ°°XðoÇrM’kAŽˆ£—&Žez6Ø÷Û"sÍiØÏOÁ9ئ þÏTÝ ŸßT4QÏ™èMž?ÆÏAf¦ØÿÛú"ûPd3? -M]‡¦¯*BÓ[ÑTß:du@ݱò¡ÞêªiîOT.8ŽW|yº"ñþàæ¦sõíN—·Ý9W²å1>·ŠZNÝÈoýê^vÓs»ß=Ïí»u?¿§÷FYkÐoߦÛ˲þ_Çl4þ¯ÿéKFsa¿W°í›ù½oå÷ýø(¯÷Û»Eý_ß)€µ¤6Ÿ*‘­ÍòkÜ4þÅ?ŽúC|.8ûПl&»¢©6>ø<\Åwì+Ö ³‰ØLtÄ>Ï3<׬,V"ËI«‘µ¹ ²š&BÖ³X4mq ²ó(Aó©~´`Ý1õÅ¥·Æ.jz®ë|B5yåào“Ü.©{½V‰¨Ÿç¿:^Yq¾¥e󙆶Û*†®\*Úzò|ééÍ_ÃDŸ«|ƒŽàúâÒ¯aü—/Kƒ¾VÅ{ÞP­vÙxÅÄÜ`Úÿz^£±M‡d‡à%uI–z2Âãd6n²ÄñxÆ¢(4Ç3 Íp G3fKд)«5ö—–Æöø1o ²š´ç”"4u‹¦.”¡™®)h~`=šÏv"‡uŸi,núZù§ªI®ïTkÜ¿R¹~PE‰Þ¨bùïÞVD?ßßÿhxkÄ7GZãŸîê8·±³÷ü¦Î„g#ÛB¿»Ö^ü°uàÖ­œÆ·w²›º¯•·{¿U1‹ü“ÿ×sÓ&çc@bóxdNü‡ÉùõÈWÈAÀ_šjYãÄ -ÏGsd¬djh,¦ú!Û58?N>­¹ªéí„U{TSÖ|¦²ƒ|rÍ Õ¬•}ÿ°pn{c²¼ñµ‘Sñ-}§ÌcWÕÜ6r=¦š½ö¡Ê5èUŠøU¢ðÓ•ÊзŸnNyÚÝõÔäkž€ÚõRñ–ϯ´Þ¼‘×òìz~ûû;E½®vÑ?=Ëw/ºn>üû¿c›:Är°OÈŒÍÇX"+£ùhÚ\?dï•Œì˜l4+ªÍË?¦i_}Y{qÍu%å—Ç.È<©¹ àŒÖÒÊ[ºKkŸë/­~¬¿´àêØeYÆ8—ß1p;¡²w{¨Z¹fŸÊ֥ṙûnÕ,¯‡*/¿Ç*Êÿ•JæÿL%H?¨23îw´BÎçµ çÎEw›;D¬ù¸!ÐÓPì}&0G¢Då&­2Ôlg­ú_ìRŒøõ æ8žè‚̦x s;1šå–ŒæÓ•h]…ȚВØ=Ž  WŽ¨¬×œÁãsWµÔí®Ê üߊºGFËb†5Èj‘ƒ²]Í)~֪⛆««_˜¸vülµö„j1ÎO¼ýþ¢ -‘üö!ÿéJ™ß·*Á»î†µû3;qã;æðïbáÆ·©²ÏYÇø=ú‡+¿û?ƒdÇ åOý*}£Pž¸#¿ø"ö;^Q¹Èë4L5þ÷¸ë!0ÓŒÇNŹþb4ÙF‚¦Î A¶Žqh¦g6²—T yÞihþª4×A³æy ;;O4o™€–ÑujËSOŒqʹ¢ë6øÓ<ï©Ü}©‚Þ¨BéŸ~-Œúr_³ìÇ«UÒ_~É¢~ü-×ï…Š÷þë<ß¾×óüöªnªÄ¢*VrGJ=yŸÈ?}˜Ïu¿$âÍg[Ã_ŸÝ"ûþË:öáû,Éu•‚ºûÛzþ›—åe÷{îÜÌi,¿Ó°Õ½÷ÃÔi6Ëþ »T‡ª çxžáó4Ñ›Ž,-—¡K‚Ðb¯xäH—#'&­ˆëÑtj¹j´æ ÕÏßT±êÃÙ§Šé¿ÞÉ úÏŸ7P¿Ÿ-ùýYVàßUë=U‰¼~TIü~U)üVEp?¼.x­ -ñ®¿håߪãÕ íIå+½¥ŠÜýûrqÑž‰Ò㪠öëwű¯övE}°3õé`_ì—{ň‹ˆÕâšO/’ï~Ìp§ÞÈŃ?,õLëÑ›>{©Óþ§ÃÛdsG4Ó^ŠóÜÈ1þœÖòš/ ûUæØö–{ÿ¨büÿªŠðÿMéû«JîöLåìzKµÄã[•GÀßU벶¶í¾UÔÚy§´­ø~C«ìÃÙRXCñŶçyVåèu\åà}Kå)z¦ -•¼V%SïÏ“ÿr¿ÿé«ÇU.Á±ê®K–#_O}”°‘|궜[$ËÝn%Oh6ŠŽÙ²»þî'{Ì ÿèÅ–íŸ*Í6÷‹ª3o±YLÿokAÿe›8ÏÂ9Ö8;4}©€‚{ÕW6¼2ÿ·öG2_ÂÄ¿ªâÅQ%þ Šô{‚}ÁCxKEû_Wˆ«Â©÷ªÜ oTq¢[*>ð´Ê[ÜöÊ^\~ÈZ4ð­ƒôÎß#Ù·ßK¿WeI^©býwÿ¾L\}ÔF²íáRfäGoöÓ¼´ãÆ"ºéô<ºçÌ2öðƒ Ù±«rÅ­+y¡/Ö '¿ å{?¬åZî8rõ_,àŽ=ãBŸ­S|{«^tSÅ-ZÁü祋ýâ8 +4~¬526œŠëè¥hê?´ 9Æí×tÜx[åþÿ˜ì~WµÏ'‘÷YÕ*ïŠÓæÞñºþÅGÌýÏ©<¹g·sc¿ÚÛ­üpq3÷þiõöMNà%•HTÓn.J\§)Ú¦-*ª0ôßrÌÖÿŠÊWxý´‚ú"_\uh²D†ò4Å{þ±šÞó›•Ô`H…åjq-– ‡¿‘ÉÏ=‹çϼ -ç¯<“_¿™´ëýjjÏ/ôð×îÌŽï=„“_…1~ -ó;­rY»a—ÞÔY«Iý¯‡1ŽóPȉ¯Qs-83ní]•‹ß÷*eЪÿwª`ì¯åbw™Ÿ~)e~ùPæ}ƒOΈÑr/´Ìi -LJÕ -:òÆ[öôjQò‹á¬g}oŽma¾_"º£|w~?߯ú¼uÀ§*Ñ9U`àÀ׋Å-§fŠ?ýÍ›ùìKŽþämuðo>tF‡‰48EÃ/ˆB´2Nƒè$þ~q¿5Ûýd·ç—@åù§ÊK2¤ýVP‰õ‚²ºŒýSÚõW¤ ©$Fkõtµ?ìR ×Àqmâ4Ë‘BË•uj.[¾äzZ5Ïý±jµï_UJê·¯ò˜ß_•ñ{\úþL+Üûãß½(g¾Í`.ÿIŸû›œ»ò!Aþði™âæó"þØ{9Ókæçê‡|œ'åì—tßtŸüO/q÷Õ’ÌC胦:.,¦w½ód¿•R÷—R…]ùŠ¾ÉòšÁ™|çÍ•ÁÃXå7²Cn^©Pž~˜ ýR.ì~)¡‡¿YKwÞX*­Ý7Í/µ]î·™jµƒ=`,ÍÆÏF“縣y~IheÒþ1k?QÍõùNÅâúr¬Ñù¾Wñþ¯U‚ø*$àÀ?œý# ´|üC·»IBO‡¨‡Åæꇗô϶ÞXÅ×·ô— Ç©“ótK´³'!O7GDw^p{´ž=õ5/Ýzq“Z;Žì?ßý֟쉎-Ðaó´AKVèûÚCèíÉ5ŸY̼v—ï{Æs÷^§É®û ¶¬ÛŠî¼¹”n8fÇÖšÅm<ö¥„Þý£—d×ëÕTÕ 5]»Û–ÝÿµöÕR{ñ”t¼ZBu}ïÈìýÉÖ;‹˜œ^s:!O‡Ím x²‚ÝûÞŸÞýÞSÒ{g©¤ùülÑà·ËÄÇTžâêµ»sòßûËü÷¨Vùû_UùžUù]Sñ¢3*?ÿͧl‹´Ó4‚Â5|}¼Óœ)ÈÍÑùŠ‘¡Í¥郆&pD€/" KÕÁåXÒøÚ2ÕÛ§=êÁ™Š‚Zs.±TŸËÚ:긂î{±Š­ÜaÃuY°Å[,(ì gnÇ…Ý8]ÉœþFNUœ&Ím4f*ŽL—Ÿ|¡¸{»öA*¯^Íe¾“JûŸ;±•{§±i-ÆTÝçvâK* ýá§È‹|N©VzæŸ4òHÚ¥ë•qtœgê.}Ÿ#ª¥’ïU™°—OrWX+=ú‹Ð¥ß逻8§úRøµ*[€ÏÕ’€Í§û‡—j­Y#B^b -TdjHÒÛÇ{øÉÐ*Gwäç­@^.>ÈežòZµv”E#$ª{z"וnÈÇ#IE ’)Ã5BSr CJ¦‡–Ì†Þ Ei³ô8ò#¯üCÝ]';ü•\:ò£;U°ÕŒ)ß1™úÞ…j»¶ˆÎØj"M¬Ó§vüð<•;ô†%½.9ÛÌ¡Ÿ™Þ´w]ºmSÒoEU²÷?rúÍ=hÏÏk‚ºn.m¿,päg§À£8—½¢Ýù‡"èÊ?8ÿ#_!ªØa!*Úb"Þþ‹sìÔùr‰8¯Ã„ÊÙ6Êh2 ‘j.Ëœã ;´já2äçS8zÙ@KO‘QmZìtËéÌî×>²OÈ„#þÐsVØÿTÊîzëC··g‹»-ùì– |A¿%›×ým·½qh -ÌCÉžwîôůCd_Ýtz6½ã½+{âkA:ðr%µïG/úзLý¡YL~“)]Ô9Q’ß;A\}pŠ¨ùê,QÖNe¦º' æ'ÏÕ1éêþâ5*}‹±dC“¡ëš@4mŒ š€ë¦Æ“‘ûŠµÈÝy% -ðb¢F+b5'yãxYdœƒÏø/Ê„JCÐÝR¤” -ÉùúÐ3Ìß÷W».?øBI7ŸŸOåöšÑm—I†ß¹0¦«K,S§±­ÊŽ½ -“z¡`vüàבn¹°€-´f²M¸»§ƒv }ü]èØ_<‚ýìtø×µA»Z-­=0MR¾ÏZš×=J¬Ôƒqñþ˜X† 7ÛºS}l&VkvOWœ}¥Òu9Ú~8ó)%ËÐ`‹­˜®ÇÎ\÷sÐÍ=#¢'tèCí}ç ½]l÷ôŽ§®ôðswèg°Oa‹{­¸ÌF&³ÁˆÉi3¥ÚqœßõÞEÚ{yОw.Ô‘wbúÜk%ûùksöMáËhúØ;©äÀožÌ©w2ÉÞ¿ºÑñz|¨š8$Yú·˜]?ú(N=Z|ûr‰òúÍöÌ…øèzJ?UŠê/O÷’F£yæÓÑ\ƒ)ø«òôç$&G[º>G;(‘³‡¾ ¢+»¬˜¬zcéÐ#gvÇk/Ð5d3댄üFsvè‘»âäÕ¨ð+§Jbî}Òrúf*»ãô ñ¥¦±mÙú#³¹ª¦ùìB¶áàlœ'Ì–Œüä"ÙrmAPn»±¤dç$qÝ[ij§‘»®V •ËÜ‘«³7 -D¨AŸ¥$®RÇa¦=²Õ5EÓ LмñVhÎ? ¿ -.î›*k<¼zò›ŽÛówWWîž-„$jJ(qÊXM¢E,ŒÔŠñ²ÒK¾ýø¶ÿòjªÿÎ -œKÛÃøK»î:HýèÃüŽ‘îx± -8 -lJÍ8à.0ͧæÓÛ­ {­à¶¿ñ&s½á »qûTzëÍ¥Üþ—Aì'/)[öÜסÜÅ—‘̉×,Ž‰ÎtÙ.ki\™Žˆ‰U÷òd‘§‹‰@ç>¦x,“Ò8žÉšºQLTáƒ}G‚:³¡ÅÛÂd¶ ?.·¾vä½W}hW¨C§”ë³Å8žÄùà36»mÔG6K÷ÞZÁö?wc÷~íÇíûJÄìúÖƒèmxÈ~ò­ú4¨þg+©ÖÓö’Ýï\ jYØóÍ~þN)Ýþa•¤õ²½dðçðù™¬ŠqTb¾[{x5üÞ•ÞñÁ t3麃3éêOfPmWJ¿uïýu4£ÓØ+0-™µÙ›NCŽÓ  £µk×"ÿ@´ÖÇ6œûñÈ7EÞž"äí+FâðD *«Å´Á¿@6h-‚ÖÇ„«Óì{èHuÂÛz~?ðl-3øÜ4»é¤b=¶v×tnϳ@Åþ;2°]®zØ–/ꜚXÂÞ‡”ìØ#…pô…œÝÿˆþÞƒÞõ“ô„1]÷à -%MLí[鶫ì7A©WÒýó’ÔŸµ£êoμq xë,­Ü;E‘¯%’Õ}©5qX¦&Ôh(YŸ§í´È-˜<­XꆼÖøc¿)EÀˆmxùÆ=vŠÞ«ž²Ž[«»:t W(¡”(HÄ"ÂÃ,j±€^Ð}j¶Ï„>R&³mælY5Õûl³ïƒ½û{O¦é3{.§ËœÅЋç2êŒIO^Aç$’ß´~±ˆè -ÖM·Ÿ^È´œšÏVîžÊT˜.ÝñõjúÐwì¡oÅôöŸÜé‡mA/C¢LѱëÔ%!Ùštlµ^p쟓Êô@SO’­:{ ÿȬ+ËôYrÕGíè­—}æ/Î+&³)uã˜äjC&¾H—ŽË Ú‘\ù~[jÛ'ÒƒÜô©=Õwà z+qü V»!}¢Pž¹“(œ´Ž9ô]Õq} ‡c?µùÓÙЛýkìù/Ã%»v•ôšKS[ÆS¥CVÒá_\¤#ïÜ ßÁ¶é&Ýõ“hQÑ)Z°¦!’G¨–ΛK$ý™ÂÁIA!Yšn+ýÐÊyNhí -ol—(eT®NEâÚ2©DŸIÊÕ•D§hƒ’h¼Ddh1Y­¦|íÉy éº®ÊŒ3ytÖØàèl¡®ˆÎÒQ– Nãºn®­'ÐÆ` -:Ì™ê]ÓèÁ'«Ùþ.Ü®¾ÐçÊ×í±=Afð©«lï){ð™Dºë«dßn̉·¬ìÒÓÙég븽¯EÒ]O\¸Ã¶\QëDºéðjÿÒ»|â[9uèw\w)WO(ï°fZO-àOÎgÚðû|ï!Ýù;¸Ó’Íi2å+LÖ“ÛlÊån1‡8ÉuXM\fâ\¼lÇ®°Ã‚hIf·N`su˜Ä2=¢·Só©ùšÞhDGç­yxôáƒF•t]®6ôIƒÞ_sl¶P÷Ù|&©L?ˆŽP ”#°g:¡T/쵄ù"Ýý½»tÿ;/zç·nôð{ªÿù*~ã™ í:ìÐ+wÐXí!¦cTÚùÚ‹~ÀzAq'^*ØO_ðÒï\ÙÍŸÎÝ9ˆTÇKèƒoèý?øPí×Ó°®Fem1aê°Ïx¾ -rºû¾ôƒž?hGJåx®…ÅkÐ9&°6@ ¾ZÅ´]YBG—õõpä‰üýħ֌ç7MmJ¾lh -¾–f|f«)\ci\þXipš¦$K¶ÇY\_pØW°ymf|öfS6¯ÝŒ.ÆõQúf#6½Ñ˜Mk2Jõ™ä -Ç`àâ7„p@å ê\V³)ô·ÒCï\é¡W. ãÁ—öM&¬¢\çt^u¾ðñ€Ë l&ÐâÒËdž »ïKáøS¹üðc¿ï¡jÐŒ†µ¢!T»×Ž«ê³­'¦íÜbО6ƒë$f÷{z÷/éÀ‹0g@ÃŒþLJ‘>_Úm y ½ãÛµ\Å~[àîáZj2`qÕ;§‚†P (Q€OPÍwj}†6[8–)Ù=Ïm~ãáYâpä±Ú ù ˆO×ʶMqÐbŠõ€ú„lR•4*]ËËG\Œ¾Ö =š®LhŠ&hÉpD?{§-ÑR¯Gf -ÕC3„ºƒs‰VJÍÁYtû™E„)²ýk/ùÞ‡tø'c¢ŽŸN =|=’zâCô(ðXóGž±òK“¯nl }u¡ zå@«D~à>}ùÀc Ú>›Oá„ÚvÝQ:øÝ*ªîóY’õcÝ]ÅhåÜåÈ× éDâfr¹¾?ðˆ‰êâ°TM?Q8ö›áˆ ÏÒí©ˆÎó¾Á]Öí´Ø2=Њn hèsYM¦Ìúô1·ŸÜV†¹Ë7ŸXºt ©Dwž[Âz/aÿ$ïøÉ™­8d ölÎ)[>[È¥7™É#Õ ×ã*÷ÙBœ…ºZ–  Úþô† H§5Œ—&bŸYªÇÅc_—Qc,€ÞdvçD*:C+ Ç@ŠG “LkØž€ck4h á84Ê8í²ŽŸQ:NÈ©6]F8/à ‚·P½wèÕáæ:ª‘µchÝ·ˆhF¥×›‚Ž!h}PƒW_€°`§Úzv ä]üád˜”}*$Fƒ‰å6‚Æ2Ž•Ô¶;˹Œ­¦"Y¼z‡}#©ÆEeÂtZ0éÕãÄÁñ„!-ˆ'טgA!ëÕ‰ÎFé€5ÄÈQÅbÂ’µ€_„ë3¡¨Ù˜XÏÃÊÄçéÀ:™XˆUÇ9‚‘«Í¦Õ±Ñ¹c¤|,Ñ)âA+¿ Åœ¯Þ1×ík@7‘ÛþЛí¿í·_D4ï6bÌN`÷É÷< Â>?Ÿyú³¬à}w”D²õܘ£TçyˆåÂçÏÄ+/¡OjxÐqeCB±/[¯Áf´àøØgŦԃµ*&·Ã j*\Íe¥?ršµy¬ @’ˆL-©"IÃ×®îÞbœKãRCøÛb*T 4¹AôKAûŸ OÕ’2ÑêLØ-!«Á”­Ý? | è61í×–*ö={ž³Bý'óˆvay÷fÏ×~‰—ÁÌ©¿ðAUžÌÆ#3HÜ>YnçDz]ª–¿X‚€; }õÀ€“Dçj{ùHøEÂ)Ãy—ׄcz¡.°9™È8MÐÅ$¼Jl_  %’ -ˆ ‹ÕR°¥—çRð\*ì²hÉ6²'ºÌ8‡æÂ’´ˆ^UÃÑòúö ËÌ*.&Qtê¸ÎËNBý¾¹ o8Ê7,Ô—§l¼nàk0=ל¹Á»î›f(áfmµLÙÁ'2åÉ{ëeqL×Õå0‡M@ãk ÜTÐLe³[LÙøJ=*"M X|z al‚Á¼óajôú,mÂ*ï·a7TJBp~‚ýhÿ²¸Ö#±<£É´Þå¥#¶„7SÖiÜ'Ât…óÇu h¥«KŠcð¶Ø ›Æ±™-&„aPÜk-¯ÚoGô¤`³õØaðŽ§|籬ï¾'×vj1Ø¥€pÏâ9Ûqa9×wÃêAÙðË@zðé>¿ _§S`ÊP=Oœè]?z‚æ„´÷¹³qdªPØm ŸÍg­'žObc+ôèõÅ:A²u:*K›Žƒk‘¡å#âqÍãŽg.ENöˇ+èr†©‰Cã4˜ŒfcvÓ¾élz½èă–*ágTËó›€Ýgö¯°bh ñý÷ÜA‹™ø¦”|}ª÷–3Ñÿ9ý&”=ñƒŒî~å Ì16©ÖÏ u?\{1lzáNµž~ÒË‹BÞÞ"¼+2 ¦À"'`2+CÕAï8ˆ–#©¦ZÞD#:1OŸOÊÔ¡CÓ´ˆ¦<ö‡|÷¥•ŠžsnB㧠À®Ù¨T-Ðã¶^qâz¯­Ý/¾ ÁŒäZ—–3ý×W‡AQÜl)/ï› -Eí¡y uÈt]r’ïº/a†¿ñb7TRaqš„s^¹mšlóÈ\àrÃo|Ù‘7~$Ö=s%ZL•{¦s1•º\l¡.çŸèJóõ'í™î'+…曎 53XKm"nçW^„yŠý —ß3‰+ì·$ùzÃçóùm/\„þW^ŠþgþŠþGþlßíÕÀB”eÕ›òeS@Ëtô`?è³ÅCÖ$ç¨96‡k¿¶Lèýj­0øÊW1x_2tW"Û~Ïôñ\ž/+횬(jš$k>º˜ë¿æ&ôÜtǶéHr2œÛ°•;¦âX7›èé%T“é}²Š~çǼ €u X'=h¶r` -‰û)ØOoÜ3ÖgØÂÝ“©„zLz‹1Tg@¯ËÖq¡j®«Ö¢•+Ö ¸'HG«Ë´Àؾ§. ãK®®kÅ1‘ñZÀ¨7Ÿq$ ´´:c¦Åó®…|èŽ?·åòr¡hË$БçSË ÉzgÙ6œwʳuñ÷“‰^ßÁ·"öÈF²çGw6·Ñ4E²õ8¶rÙ´ºñ “Îm<4 |°A—žJÐ -.h·RõÚ(‹¶XC Æ„Vóó@¢zŒ7Ao ´¸Ao˜‰LÖ"ú“Gfȶž_Áw^[ \VÐ% xÐ~Åù _Ýo ZšÒžsKùƒOiùð±ZÜÊX ´Áã2rƒ“ ÝõÅ~Õ™z´–ßûPÌm>6—I/3 .µÌXÇð•K,ÔƒÜÖø²í6 ù ñtÆØòÝS¹œÁITTŽ60öp̺|ÌÀw®Ü–N؆l ör{-‰vÝÐ#wªï®3Ô¼ÀÌ‚u1²W1<l^Ñóд޸çn\÷íÕÀ>Öå®°fؾ[k¨¡‡«@'ÖDˆÿ…ë\µÇ–íy¾ZÑ÷؇øj-~¾;è>(sêÍùð Z0ïaÍIÞxt;pÇU>|O<|Ÿ•õÞõ`º®;³•o>lZw|ù^[½ªúd¶lè?aä+1»ýzàÕj¾òà ²~\5bKôív¿)ÞŽ”ò4˜i¼0ŸÜÇHiO'mÔ‡Z7×€À]ñëÔ@‘Ok3ðëI`Æ`»“pÑêþÞR¢w lÐ/TÔ]¨¬Ø1“O«5’%ÕŒ—åuZp=ÖÈûïysÍ—ä¹æD â+è]o(1Æÿ;‹ÞŠó²¯Äòã"ˆ¦ZIŸõŸì,È£ùìã5ø$Ðí2‡óUôNVd6›ƒª²`«5èÁB¬%Ì oAŸ°ð²+„–ËŽòm<ƒ»ïùÊ:oº–wr¥¡2·}’¢æè|`|§c#S‡p°ñçRdjqs#³ùýŲswb”g®%‚·«/ -ôgFµ¸oz*vÞ’íx@\YA´¸¡¦ì¶×ЋAsêù†bC: çV8ÿ%ìE\g08Þ€6½¢|p:ÄzÐ d7žÉÄ×èKÃ3´ î~‰¬t÷TYñž©„qYµgä - YDòˆÍ‡çÀ5Û’ò êÀåû—o»ï¡ìyê#¯>6ò_ŠT bBÕXˆÀ¼VŽËlóQR÷©›Aã_omàÊ{ðõÚöÔO^}r>è¨Ã=$ˆc´£Î…&kÉSËÆ Î-§–*zø÷ß Í]ÐyÞ=„ýYû5ÂEÈïµí`¦çå*¦ùò"¶òÈL¶á,®©Ùq5‡gÓ;ž¸(NÞŒ -?}:]qêæ:ÉÞîLÓéùìæK ¸œâc9÷鸒^kX×>èß»ƒ|¹¸'>¾ÈËûU óе?ysŠ¤RC6*Q â"áf%×+Öß Ã &Oæ<µxœPã h¬‚.7agaÿ5pÏK6ò@|ÜðW¾T?Îc€µ®LWž® lYŠÓ ¸XuÐãV$–*bŠõ€qB4eÃ2´y¢i[¬O4•7 O@Ÿ•ãà œeßÃ`¬¶qR¥!á–휜^7!$¡Ð˜*²MƒÓÿÔâ–UïœI¼ôRž¸rí\ä% 6ã»VŒF9M D´¸köÏ"-+ퟟ‰.Ôá#óÆ_^Ñõ`­ÐÿÒ¸L4BSà÷ -ÓàÃãµ@k™SbÛ\öZgLøx8Ž‘5‘ÔÊqtÌzMà3¦7ѽÞrf)ÑâÎŽÓðN¬¼ºà¼¦IòM#vBë…åwÃA{Ь…ýh„ߟ£3ºœMÊØ"}`GÉóôFÙY-ÖÜ®G~°>Èg;K‰Ä"à™dŒNÜw^Ä!y|¦î쬉ÀÎ"z¥²³"óÆ*óû&ËšN;(KwÏ”ãxFtø×—ènWn×$ÂPÌ­1Ž¥¢¸Í´ôyØâq•Å”éûn:³ žãá¸Ñç>?À4«Á¾¹Dl«™UFŠMC3ƒ{oú†n»Hê¦Á{ÿÔgþÔßóLòßuç»î¼òì½x)ƒ˜ Myþ6+YñÐ*:SÖ$¨õ9cø´cˆñÀÍ -¤ÃÔDLˆµ²=¶Bó­åç -¹ý“ þÛ‰ÈÑ–²!j‰ a;Ñ$Ö åã‹=£ÂH^Ôa­ÈhåµÁœßtx;øt-p‰`}QŠó`Ëç\äG!oÂÎ’òr5Šý';k*¬AL*Q ‡Ç—(uˆOŠÈ챊è\EL–Ž,2mŒ•=|4°¸ Ÿº°u’¼¸Ãü'—˜­ñžð.ðµå·žuÆѧO«1†¼UŽs}ìæÈÛN:Ê6°—ÕîŸC˜m]SØ÷ï™°`7˜Éîxë) 'Y`Ç<ÇNX`;?smÚ1s”»ž¢ ¬DàÀz Üó - $"Õ¤2$æ£Ô v„õì@©›Ç>ƒkº°rI!±Ê HŠüYä(A¬2ZC‘[k¦,ß68èP—¶)Īýv°v@X°Uûf€†=ì)€š•Þûßrv ðæ{%ù`u`gQ,X`gaÿDØY>øýðØZyzéøà¸,½˜2`4 -IÚ\D¢–,ÇO`waŸ,TM"0ÔÈÚ9pͺï¬æ»¯­¾«,³ÕLY4`Œ -¡ÿ–äùpŸr*Ðæ&|ÂM=ÓØþ;®[#¡ -çÅÛ¬øL|ŽÀå¼ó­›nÊ‚6+El‘^HV«Ep®ëk†í÷)åž2ÙÎb´“»î:vá·ï™Iw]$ì,®ó†3agt;KÃæ¢2¸¤ò…¶Ï–gT˜páqšÊõ¹º¸^³â;®­„ŒÇµì(_r”ÅvÖƒÕÂö¯ýd#σ`2Ô„…‘T¬ÏDŒ‘DgjÑ1¥cÿdgÁú0_icÿ yý~{yI£%‰ï%]6|ãÞ¹²–ÏÕ‡æ»R–T {Å`ŽÃº‰¢¸×†k?¾x”Ïñ¹áÆTvÛÀßBì«þèBŦƒsä…à‹eðü°Màoçžï<åÈõ>tõ5rÏX‚ÅMØ÷¶M”§6™†s\ aÝ+Kzl„ê[¸ÁBírà1 ×”«°ábsu¤\¤:¡±“ä¿ éŸ]oêçÉ¡ I˜ž£­ˆ¯2„úV‘Û8˜'à“ùÍçÊd¸W±.,ø|>¥Êb!­HÔKÃÔÄ°çÇÈ¥ Ž) -z&“‰ãáìš'®Û`¬—ŽòK†mÙÆ ­ï5Ûzb!pÙí ¬°ËâNpeñòº# d¸V¬oŽ2…ëG9T8†BžÚ|ÒØo& -0xp.þrMØw,`°/è±f0\o`ÔAµ](´_r‚\î«;¸(Àùáš/ ¶ l™<‹f -Ý·\À>ùÁ×Þp¿N>øÀ/tø®Àcÿ \:Qð£Ô º¼oŠ°¡jÜÓ‡1aóuá¾Ûòù"¦ãÚ2`g)v?a©í/]€ÅmÞ7z>øŠ]ÓøÂ+вfÉšóéÙ„¬XϪÅöY5ˆÏ Ÿ/°³ÊFÙYBû§K¹ÎóË!FõVó…Ö$×Ä~˜-³[O9Mø¼«ú§ -S k¾bx†¼õäR~ë¹å„k ÷\R+”…¸>†_õˆß{ÓUè¿ëÁôß\ÅlïÕpt.ðì€ó¼'yÍñùŠì 6*^KhØ7Ö4ä{ï3‘µ¡g.t÷Åe„ï„k,RkÔ™'t]]-l¹ä y’HŠeº&š© k>òäCà Êc³t5¦|ß ÷àÛbÂ’Éi2ƒ\€ÁõP×ë¶mV€aëÆÀ£u ’ƒÎiž®¢ Ó -Ø]Pó E½ÖÀÜ*vNƒ²ÎT:b˽Âúeb®\¤yaÀ`[’Õ˜ ÷3ã=¡Ä€‹È$ù>ä`²ÊC³¸­WaMDV18 ˜ß¿9ìã€-×êQâK·Z³šô¹4_ZJxAÙu„ªÈª@Ìxl šqÜÆ5äû„cyQE¿ ‡¯=©€‡›ÏÛéLw~óáy°¦!?ôX®8tGÉ Üuãƒ4)‰€àþaÔµL"Œ/à¼A_¦OÖþ…ŵžYB궒N+¡éø"ÈÇ©ž{NP¿SQÙÚÎñ¹Ì.3`¥Éªþ`qâºÏã)üŸì¬òÞ©À6âaNuœ&ŒuyÙÛÑט ö@jAàP×íšÅ7¶ç:Ï:bsâ[Ïœ…¬YÕÌ†× l¸Jl÷øý  ¾ ¸SxŽrõ¸–¸·F¶ÿÃìÿÊî½é {ìàž»¼öø|²o2«hœ°ÿ¾4ôø•õQÇN&…¸¥Üy'ˆºí†ë!WøÌà‹ä=Öx,ñ9àk¿qpìu㣠ÇÊ*÷Ï$5sçYgYv‹™T¹^Å95aX— Ùw‹Ãö6Nq1êŒsC)’J”ˆÊù1œ»õ‹¥°.ëÉ1RZ¡FXžø½dmoû—“¨îä|fËõ¥À¦ésÀµ,¬EqÑqZÀÚ‚ë)køtWÿÉ\XˇÍÅèЊdMà(Ã=tYåY„Bîõ“ª{ -pÖ`mzÉ„¦3pÏŠ0a ì¢êøyùžÀ¡V÷ÙÀþ¾íìhnIæÑðLEí'ö`÷d¬a½ x\¥]“aíZ|à‹ý÷|>¥Ø@ˆÉÕáÂR´dÍ`_·÷U âÀc™lÇc×ygaöÀáN]Èõ\_Åw__-߸kašóç|$g­;0ö‚oµ×~j ³ã™;Ýyy)Üß…ûͤ×1®\ĤÚ}sÀÏC.%Ë*7"yJ~û$EY ¹'„ý;Óyv)Ý~b\à¬C=/•Å«¿‚0Up\!>¯²ö°Ì"Ü 8oœ?’ç=W¿ÏŽÃù9®[–@>!á±Ây(7>7ì=ãv>÷ ëÖÛn¯äÚÎ-¹ñ‹«²åúž¸Á^Où¾Ç,×|Þøê¼F3ÈYáu‰Ác {/÷ûH܃µ+¨û{®¸‚O%÷¦C#5 î… ¯ÏF&jÁž X N²AK* e"æn™L8'¸7ÆF­× ðõÁ9­ °]À=æÌÍ&£<óOçËj÷Ά5T¸ 9£,¥Ô0±pŒ† >›¢rd–¬õô2XŸ‘UbûßO^ux6ŸÕ2öDpñº²,àXâ×ÿ•ß= ⤠Xö‰ùz°~)´|¾˜G„¬-`Ãånu6¹\€ÇdÀËk:hùáV³½b»-®QŸîƒ/',žžkÎÀ.‚çzjUl·‚é}¶šÙvoøºç2É„ªáétx–¥Ø µp« ç&—ÛˆãL›¹€óOö[Uïš!/i³$÷ÊÚŽ-âz¯­æÛÎ9Â}z6<_[įW£×áXV¾Ý†îºì¯Oî×äc{(걄9ÆW Ù0ûìèÞ«Îtç¥eôÖ ЋÊÅåé“‹ÏÒ‘mÜ>ƒðá`IÉ®iÂzCãKôäY[ÌU(–6ÎGùºíØ‡îŸ 6‰k.}¸÷þxS²¤l=nC•¡,£ÎD(Ä>ªÇŠ¬óᚉÔøyp€/-ò“  ) †Úê]¸î'ÁýVÛ,Ÿ­ClÓøšÉJ{'Ã^!!OWñ¼õÄRÈW€§õ ìÙ’W ÙÂÞ ˜Wð9à~±,s«ÔÎà¿!$Üô´c²çXð™›Mq3%qx¯Ø§ðÙ­fÀt&ê€}Š}á\+ÖÙñ“eàkWŠí ×' âh Ûúù"GÈ Ù®Ï— ø5ðyžðC? ë<°NžYeDî•âœøl²í ŽÍÄù6쥭Ä×mÓî™À¢#¼2¸O—[c -u°ë`n“ëþaó‘¹d?pÍÞé°ß¸‰À „ý‚0¿HŽ€¯-ø+EQÏd—êÙ&8>g~Sß4b§Õ;m™Í'ç2uÇgs™­¦Ï‘sƒ}/Ý×a>ÆVt’pÂ%#_®¦>ù*ù*€~áÇ4ì›ÅFghCäxìf§ÇÙd>{0±Ê|n2¯-P}<>ÇÇããññøx|<>ÇÇããññøx|<>ÇÇããññøx|<>ÇÇããññøx|<>ÇÇããññøxüÿ|̘ᑱ645Ô€ó6˜áê³4ÿ[º!52Å Ð`Fð×”Ôµ1á©1‰ ¡)Y6ÎøG ç/v°Y ‰ ³±ýKüG6¢”˜è˜üCixh\äl›yðl.ÀŸöYkãlcçš™¼0xyðl›6v®>‹ã§áߎþé -ü0°w\¶tÑü%Ž‹—Ú8.]ºp¾Ã2§E6ñÿç—,Ÿ¿xéâE6qðãeËàðãÅKàþÛÿ|‘¸ÿùµã Ö° AN6v³m8Ö`¡k¤Aòè¥ðŒ‰‹týÖ-ŸÜ×ÿá"ÎË`ÁÚÈô˜ðH÷Þφ 4Ù+³ÿgO¾Ãïèèô϶ÄFa³hÙr›e‹ñpëþ?‡ßýñ”?¾,·qÀï½h9þßBü-yºÁŒäÓÁ(:Û8,Z¶ÌÑ€óÑõöa‘‡{ò `Q€4BMÌF«Ã#@SÑ¡jAáj¢(5o?y®• _?’PëÔ¤‘…ÚÐáHEåkSQyÚ’ˆt-±ž< -dbÕ•ÉêT¬º§/‡Öº!oO1õ3±bº$rƒVPh’_¢K­¯ÐEfkùI‚Q€( â“4_—¨O௲õÙ:²$P'i6%)¢JÝGë2Ç -Q´• 冊üþi5Æ FÂ&åëU„Èlm)t3J5Z®JD *ÀF$kA7!t ó‘)Ú ’-¡yD‡Æið)› “†‰IÓf¢Sˆz ¨oñ±y:Ð) ê©R*DM*„©qЉ‹Ï?1[Wž×`.¯Þ=[Q40ºƒä1jlxš¨*s¯Ú9C¶ëðë@G(\Ï”MFD½¨ ÇZVºs*_Ð5‰M«4äRðÏ›Lø¬&S6ºhl"A[ù¤B}¢–‘¿Õ‚ì'j5Fäš@×@ÑÖIÐÅ;çÉNuPiÀïÃÇj0|ˆ:(#ŠÑê   -Ì¢ (5±$TÍO,G ¶$…®>Ž(9øËP ¼)£RY¼…χ Ç×ÿT0=Öú#è,EW6¦B—Ž©Ò¥Â2µ@µ[Ê¥¢PD)×k€ê¥o Aç4·¡Á((u³_®Oq‰ê@gR¦jøà¿óqUÞ5P)‚ÏjDÒàtMV‘¤ÉFejË×é)â õei•ãƒ šÀæ¦Èr›Ì`§9…ÏuYf£©<›¥¢¸ßT u¦ ÌH”¬²ªMÁ~ø¸ &&C[žÓ5 __K(Rlµ’·u’5tPÔ[ÀßðØvø„Lè& ª$¥=¤C~T k -Ø|pv³…29Ï@‘’©§È¬1µ¢Æ°¡ÄÔ"ØÐ Z < -0LpŒž E!c u¹”}è¤Tæo±‚_!µÔP•¯CÔŠÚ-¡ÛJžYo*Äåë²ë6hƒ"éÂÁ¶ÏD'hr‘ÉZ ö ËĶ‘^iÝòŠê}seõ‡ç+2ÚÍ„ø=.1_ì:ø@‰º@H‡^V«épˆ+ÓÛ”õO!ŠWí— 7WB#_0ªÁ&—êKx,°=®ãTlÏÐy[18 TÉ-!O—t™d—+Š{¦ðiUãùõcI t¼âq#xQqZ|^Ÿ%_ª'"Õ=]|±ôDÏ7Eº¦DHÒ „X<–©š4~H„õê -ôãFé5Ê ¢™ç|¸š˜V"PZ…ÏJ­\DÁè¼aã+õA}^GLG¨±ÑjDAŸt+óé-&|t‰ŽT‘¢ïŠÐ},‹-ÖE2>ë¡Ko´«”&tÄÅf•­/×£äIR9ö£Á‰š²õ%zòÌÆ Š”Zcy|žž<6KºP•Ùõf|8t9dkC‡+Ÿ·™t‰ZtÉãñÑ%T <D™'³ÖTžZm ]‡DE/½u§¼¶‰Bý‰Ðå]ØÐ…*ËÂ×Ô'°ï…NEå;Åæã‹›öÌ–‡í3yÐ _¤O| öŸ£»Ø'cŸ ]2`W¤#›=:ÎÁfeFеJ:¡«¤tû4èê!©i•FÄÞ°_V¤×šÈS«Œd)Eàçൈ"Œyn=é%Þ›vÌP”Ì€¹&¯Ø>Ô­F;û†mIgXùÀya·¨ì(KvÚ’÷EƒäB}¡¨Ë -ºÌH§såà4tóAGHA«…Ó4(ÿ‚rw\.!Æ•ánkèRò›Ìei%ã`~ƒ¢+øUPý‚ÏÊ(ñX‚=B§ö`Ÿ LÁEÇkA—(B€]ùzŠ¯¿zÅa[ôfP€¿Q~>öILxŠ¥ˆRÿC¢3Ç€ý€o¦B×k0‘ñšÐõì'¢P¡ÎFæh³1…: h ï Êò`cLH¦tùÂÜ‚î(ÒÑ”XŠ?sí8Î/±\_–^gJC ¨j²ÔzcPôóYë…hyœº :f¢‹uédMPæ -¨8öì ÁGÈc³uAA–R6|#¨Œrëót Q^Õg ËàÀ¶„XlŸxeÅΙ¤K›ûZPl C“ðø­#ŠÀàëÈç†ñÀÏq¸ |.å”j@Wàbp\X_  ʲlpª&–­ ŠJ †Ç`¿HËcÔ!®EPÄ&P›åx\G‰í7©RŸ(ä·Mä2ª ë™Æ¹™©Å&Téƒê¿éÐ,Yåa;¢øžªÅc_JºBñœ †¼ÔÁ~Ó¶šAçºиHu¢*€ý$’¬Å("G¹ñ¹@\†nÒµ—Iò@y|¡é%çÝbFr@ð‘™8–àœ°¨ÃŠØ3¨ÂàßÃ؃$›Ñf¹¨‰•½Õ‚tÿáø ¹Œ!QOÉÇy\V“QcJ+/O¯6!ÊGØ6ÀG‘ø›‚m:ÁÇÂÊo· Ý× <•Ue¬(í›*/íµÎ]ÒÁˆç+ÌKP˜„Ž=˜7 ¼"#JÝ“ás…P¦ÆvJw¤k±¸ƒt(‚mü³;µú€(&ƒúWsرAõ:á+øLÒM ]ˆÅÖÐaJ:'±­j¨†ƒÚʨšöåøZ`û2ßê¯\J™¡aTŽØ‚ú -tBBn~OH*1€ÎBb)ØfÓ댉Z"ü,µÔ€^Ÿ?– ǾÔj¡›¾Ï1ìI—=¨…%–€úƒý»a \x€/Á×ÔH™€ãŽ¯2PÀ‹ÍÔíþÄŸ¡þÔ2·p ¾‘Ì7ü;ðÛµ'æ³׸ÆÏUáºcsHçbâFC>§k"¿éÓ9\×£U|ÓU>¾ÞP$?«!Ën6“5²šŽ.€®príÊ·O…Î{P#*‡@žŒÇùe×4¿ð|”çn¤,Ý1|ø.d¨‘|© -Î,‘a¶)E×ÌM& xHº.ñ8BDºOñxƒ€º餆ŽY¸&x~Åœ EzÐyJTÀGÁx¥T‡k)ŸG”Æ« añ™ÄXÈû¡3lTAy ÛéŸùðYH×2Î!yÿY<^$æCìϬ6†: T ˆräæ#ó@›t«b•g5_IµÆ|n­ Ð,À‚Ú Qù¯98 (\I¯QæH)7$ç—\9ŽÄçò‘i ,þ_¡‹™ŽJ&5–¬´w -ßplts)xü¢ ÆÂÀçEèJÒ¨‚ÊËwÙòIÕ†àSAÞƒt›Áµm1çÁoâìT¶Ù˜")P%¢2´ÀWB¬ðü'öY9dÄE©Ip,ÉñW\«À÷A|¼º×_”|”øAÔþì@Ýxl¨ò’ÎÖ‡ì€Xjp¤‹9çÇäûÍ&BÕÞ\ý§sA­šËï˜D/ ÐpY-¦\á¶Ið`ò{-€‚Šæ@â: ` ÖÎ%W­C¢LT‡œ”<\“)±ÿŽÊ9¨ìÈRMˆàXñQŽë â/A½/¡POÈi0%*½Ï]Iý“­Cb"_zÛ}gPÑgOÎ…w°OâÊûmÀ‡@^ *o[áš`2ø·Æ}BžHÆH8fAÎy(·Èñ8€z‰³8¾8¾€&9¶eB'Éo6#]Ü8o!qçD3× ±9º$Á±“ÿãçDQT’ðü›?Dº…ÿx̘SàŸ…Šþ©D© -ÿŽ|¶ÊíÓ€@ŠæLýY¿A} æ;¼ÌP'…Û‰s ”‘¦óŒ¨(à÷E~*×ÌÁqP3r8Þ1@T®zâC», –¥ƒ“I­GºÏñü„ë'ËÚhL8Æò1¸æÀ1‰Åó 2K›ÁñˆZ`Ÿü†ºñ@€ë ]ëb -×*Lˆ¬%A^Ÿ †º=N]ÂGã:d½:(Ûᇬ@~ãŒ}Ädœ ëȪÏù±Ô_ù”šqD™û~ÈÙÐXM:*I“ŠHÖ$ñ`Cõ8:&KÔnAÁP‚ëòQE¿4MX›`Êô˜4M1Ôë² ¡õÔSpݨà$ Po$j±¸NçcòÆ‚š¡L1®qËðAžV¹{Ðèž'+Éü?V°y"Ýwo%7üÖ—ÚýWiÏ—N ‚I”úÁ³['P‘)šÿkïU²­ —`ÎŽ˜sVÌ" º{§N€(æ8&r*9Ó䬀˜˜# ˆäœ‘ Áì眳ÿZÅ̹ç»÷þ÷;÷y>xz€nFºvU­õ¾kÕ~_sN:Üۘȗk¹¼ÕÜ©ÀñÔ†Š°ÄÇGBÜ$|Ø9 j(˜KG«‚‚ ü ÆÇGJžøSj~ðpKesØS„+`®1Ò`P2ÅiÙŒ-AÈ6d2(H# Qª<>‰ÄZœ³À„`X÷8f÷+ñeÎåBrWˆ‰ -³×¢ -ñ(ìùZöbÇN.»Åœ¾ÔLA‰ªË1Ÿ1€¯‰‚ìÏ£À\{¼ö`]î‚Ø ù\)€[0‡ðïC¬„ºÐñó£é}öCLp*l<8;\à”8ƒ¨LcŒ*)$FÃïa\ ®D‰õ¤÷æˆÛpØk°6IŒÅ{A -|¾â¼ ¯kˆ`ÞE–¶ƒ€?ƒ£lÿþ9=@MZÄÙ(q]‡CÜæpLú¨ý̈ã‡@AÂV1ï¥! ÊÇìsJa^mŽ9™Ð™‹­EIü<Ä>v^»{ƒj$ðuPy§$'”A•Ôûh»Á°E–?${ ÇX¨[BìÄ|o É$Ï{·<¨FÞsõO€ÑH¬ŠˆçëˆÂ¨{gÄÔ˜+-ÆìÍ^!uý#:¼t-8jÀ>g*”SpbÅ7&øþRöä¹1‹èÝøoƒ: -àVXn)jà:%ÅùjOr×ø™€E!~ƒJ0æëC¤Ç1¥Àý .ã!ärCAÕÔo _÷*µÄEÀ÷ÊBÂ]âgH=fŽ…š¡Ô>h²Ì.¬_QÑ)t*ÁËçÓæÐIe[‰ëÑ¿qPÏ$Ê>±³$¾ ˜È×ëèômúâ»íÄÙ1öÍ.äùjP3ô ¹Üm¨ýŽCÀ‘ð*ýf=š¿–sŒ™øÙï0Ç™˜âtJÃ6¢¦ -îZÎIÓY§Ä~ŒpáÞbVQ²‘K¨Õ'Uk‹Ê´@©¤_­ç·8Uà¥DµÔ*!/û‰gºÁÇø!ñËYÂ=ZÎFª˜ˆÔ2`]Šúù:^W°§ˆæýD=þÀ™¡€7$À91Õ3²7ñº‡:Ô¸cçG‹1oEr¨Ûî¥váøim?8¨9úw*œ`kñ±€±—ï@MÛaM‚‚»øxàXp=²ÖJÜ~çaâÆJv;…µ ÊŽ€Ce?{“Z¨4Cm—Ô;æH}oô+y<Œ1Üiϱ€õˆjÉI1DÁ×+ñ'*¾p=uõ£}ド(£G“q ëÏLzP ê² Æ 8®Qo‡}ŽãÁç å©JjúPCºpe©'‘úOÔ4P…º ¨,‹¡Öw -j)o`¾ -DP—Â9ð£½bJ¿ÒƯ8Nuÿ®/§ÀûDÝøúQ—Pwô¼4‡(“5¢ÔÙ ÔÊé ^ÌÚ‡Läl19saÔWÙ3ÞcAœ8ÛøÝ^NÆ„?Òtð½åà¶'¶O™Xæ ò矻„I~§ÉFUl`=.þyQì?•¸¿’J©Óç.Î'}{:h<}ç¼Sçx§©Á>Ä•k1 Õ[è¤ê­\dÁz¢´rÖ‡÷(‰KÈqèÍåàtŠBD9 TÛ@A8\àãåp½EŠâµ¢”ÆÍDMì ópf×ÑAÀ×Yàë'1n‚þ_ÊöKÙ†M"ûÏ `-ˆ!€ ³ÇýÆHìcûùúiß±¤Æˆc.ä P‘Ÿ»¹€ y¼\웳X|F1™µ9;^'Žø½ÉícûÕsñµ‘žŽž *¶¢]àNsD”Ä8ÞBL„9<ìÉ^ûaÐ’Oÿ“¯;Ž€z ©âXÆùdÎ!5Bàn{O nX›b¨ãýéAG>ZÉ\m3’Ü­—Ó™ïõ@íö/7ÎÆe(c…1ˆõÏÓ‰yKæ Š4*ÀÏ¡7%Åë¨W¢2ÄO”Y ¾JT8¡V×Ë=(kþ©º7Zb‡yÆo‰‚U?_ ÞQot‹›!>Ÿ5sçE„7;ãùëWŸš@j Àù Öø_[ œ^³b"[¸ž y±‚ ìkÚs,ù»ÏnQlôJ›ø„Û{zˆ…GÒl:ú•:8Ê°¾·0ÇGCNƒÚµäæÞîٳĮÙj§ —Ä9ÌÀéŒ Æ¼çlPMI+ƒ;8HIã*¶KkµÁ%0Äp -Us²¯q^cÃqîŠy©ÎFå•?‚oícU`ÎÀ\(E—>n\lÛò_Ì#Ç×ø:q•8ˆ÷üaûáDAÇ[q轕Ï$5©{òL̃瀪"`PPb'õp$Šx¾† |°ÁÑ™ð6Ì•@¥\€˜ÄÚm˜¯¯ãŽ!õ}ÉÑòu.äÎrp7‚úàtØ«€úù:ì— LOúkxaX¸¥Ï–ºF«¾n…ù:8Bà¹%ugŸÌ¹„/Ãs'F‹ý./$ÊFPg"nYóÄ)å:’¬3ñFŠÍèÐ%¾ÞHÜ öãk¾cqŒs$Ç”ié å~ÕSÌýq ƒ½ ïÔý¯¦þ9€¨#RÔ3 ž„׃8ðþ2‰ßÕERw…ª„ðõ Â×AíÞ#áëPC><b(ŽÃà€È(ž¯ç2¨­C’p5ŒK W5=¨5¸Ç©²àv„c!X¶ \åèä& ŒIúÝ 0&ƒ}Jj¶‡\GPò=Ä Bò/nà†Î¹^úÉœÂ\Ù”BŒÕñ?Ý |FINá8ì¢n’cö#`ï§×´ŸÓ–×úÝ $ñÕZâ¤Fm¨ƒCemðß?å?Ç@ý -â*8‹Ðq˜cÞ%5W¢w!{>•X¹…ÊìÙ½à-L×Ìëê|¹G¢šì˜çhX›€mˆ:¡âõz6©Lƒ{³™(PÂ8AÁ2âþŽ8A&©ÏäšÒÌ:s¸6ŒâÅZ²>}¯-€ wáÆ¢²øtç–¥FõiýQj¿ÄW©{šÌ#ä$¨k0‡‚š,ee7z ý¡“I¿;ðþ*òÞ þpãp»¤8¶C\$ªkxý·C¨Ò^Sàà*´Ä â|’Ú?Ý üÀ "°_™ò?ćxÂÿÀ ‚Ã\°¹¹‰œ` È3°!7°˜{‚2*¬5'ON }:àöìÉ~"5t×XUÎçê\Îÿîâþø…ÿ>¨èftœ ªfÐ? s#Àä+Q¼µö,‰¥;!ÿFÀ¼\Ž± Qåǯ?'¿Oz‰ñ3@-• zº‚Ô#»Œ¹ïìP˜oÒ×°>2ˆ|µ96®•äÎ?à`{Ôq½÷4‰C[„?X'‰}±…ÄEÀœPëw®ðG«YÅsuÀ™ s -±Q—GÜ ˜ØâÄ óg8oahbL$G•À ¡E±½ãgË -qƒpïwƒþ«ı¿Ü N f,÷+Ó’JïBOçy&¶d3]¸‘ø“Ô ü®.„8B8§}”ŠÔûÆ6¥mw©Ë€NíÓ‚º´ÄãʱSÂt&³]œÖçÕkàKp> ?ˆKæ ¯±ä\„]Ø$àåP?®üÜ;ÿA Mjsv¨ìèù1àV½OP–e"Þ®ãœ/©R‡}F€‚;}2d,¬Ot¿’žŽ)ÒÒÖCúzæÈ”;@ú¡€;™ä* Pšår¨a ip:>˜ð‰ {«á̬Qfß©ÁæÒ0viJ‰ž¿NŠs©½ÛN$ý?8Ëá -Š­ijTråV.½MŸ¸A„ä,…5CýÓ ÂöO7ˆ“CeÄ âúrPÓ—ž¡þ®S¤>I³AÍXóp%{µÉT|©Ñæ\¥H¬+Ñe• ÄÉå;8Å3uˆ lV­¨/Cí’œ%S¿ìEÀ€CÏî/7–¸AÔl_ì4d¶˜ÒY½zÔ¥Mî\æ\èñÐûœ‡ˆöÙ¦xûË ‚‰oØ"ż â” œ‚®/—y„ª’üŽs™8äÚRqØãµR¿K<€s9Ûƒã=Ì“P¸EœX®Í¥”h3)¥š0¿P{fÂóWCïFÎÖÞ×çS u›é°‚•ì¹œùLÐã¥$FùßYn“¬ß•ù€é6‡>à}pJð̘ËÅ”lá*4ó0öŠÉÀ _¼‡ìÀòpn¯ ±âéIrõN6«ÕPv»ÊJv­IÌ>ZÂØ%¨0§BÇS'üGCBh}l 9k9À¯Sà ,Æx¿Ì†¸&ÆûjQÐ/25 üqTÁ¸Pæ{w¹ÜûÒ<‚g~öç5ÀAB’Xª-É[CÎ3@ÏFÿìõ‚«&8¥ÂüP1ùë؈«À BjMjP€3Gr¯˜Y²siód9+àLG?¦IžAjïP:}~©±;)¦'5 PÇ=é=†ôxlχ:8ÔVdq•:’˜¢­ýœço{8ßqs±Ì'}Ô쉳$éCû‡¸7rAÙ‹Ø+ÕÆ⧕»åK‚{£¡ž÷_r–-µx‡ìbµ‰øR••”·¸7‚[vd¡:¨âƒJ5éG€³ÆPÿïw°ÀœÍ>t2ôE©ÌM1—^­§©K­Zàä¬WÚÌþþDú úçà±à’Îú>YLÜ ì'±j^RÏø™Ä Â«ß B¢¸¯ŽD-cÙ)ÌÜSÔˆóæ©DM¿Î%Whq©Õ:„Ÿ¤4jAÏ{ÀÑŽÿ?íTÿéh—Ö¡ç~LL¿j89g8Sçàïƒ*zl™¦$µA -X×Öé «ôR£C- ò!æH’ëÕ´ÕÃGe÷+w ²ßkÁ^`‚Ÿ-gm“T@õ¸&ð%Òþ9p,‰Å®‰3@EImÖ†õn€ñ Á#â,*àüŽó¼ôˆÓ9äp=xjÄwÙ‰r®Xâ®J”¡wgËàÌ΋bPQöË]"s»8 zB€‰ô(¡¿‚çΊAÌ|ÎyÒãžcH­Ÿó0É©Ì£¯Ì•„<\ñ¸,€¹¸…aþCTšaírv~¤®uyBÝNp¦;'LNµ(8ïcq6DÅò¸Û©ƒÿd¢~ÿ§{#Ì-•Ù¸C~¯xeþ+;öv³úTÆ"¹‰ð5¨í€{càÅàHˤµê€Ã&“\¯ÁEä«K.ÜZ$ñÎœCÜt¡ço®5:ö¨×(Òƒý7óWÒòJœ ÎåÀ'DqÅëÁQŽ>ì9œ:tn^#ª¬÷ÍyàM\å=0W‰þIü—ä 8rcýtäl™wÖ<À~ÕW §õ͘Ï&¼Õ&VèJ’*v0é5ÿt´cþr´Ën4ÿ¯ŽvµZàhç0Ƀú^‹D>ðÊ"âú„ó?œ;÷!âä}uœç@Î1R…¬ÿÀÇËÅNÉÓa]@<nÆ^lÒãü¯, î^WgƒS4m5‘v -Ÿµ8 ÃØGMî95zfbœËGIÒ Å+Bí‰ïU‚ó°€ûˆ[#Îsàd®ÆPÏäbìõxX“§úϪ‘8¸~ß[Fê8^€ªØ9¼ÿÌðÀì˜g3ºp–1 g¸ óAÐo€>Ê®3ƒ Oý=è AÎ*H=“Õȹrpn€¸„ùbu6hsøärFçCà5D¡bpFx8ÅM×7¨ÕÊÜ#gHý®,†ó½,¸7f†µš^iΧ¢‹mäŒÂÏ‘pÆΨºc|ù&¨™€ú<‰íÀ‰a»º€œùƒ¸|6\+0÷€³+®1ªd¯‚£dhîrpH–x¤öÉ=}v{9•V¹Š/^Ï\¸6±‹šg"úˆðÏÄ©éÙZâ䪪ùäß'¨ÏøÝZDÇ<_G)ž­"up:za4Ô‘‰Ã¤ÿÝ¥à.󽺘œéƒsàpvöÎùóèô®âŒc±÷Ýä| 8g.;Ÿ5s†UààB”ò!c^÷â:À/do@8%ðn‡hx3ýaßÖ÷—„éÐ÷„ž»×žä}˜cæÐÙ¡äl«s²*ôhûÐ'ÁõX1™9~n¸vOwQêÆÒn™jâࢵ€;ÄNiª$7õEúñ²Cʇq!Åù(Á“óž¤ÞKþðV¨¥ž¿¾€ðÀœ%ĵÄÎãù¿g­pž~.:x}HƒrWIüs–ÁÜBÏzõÄ5ê˜Ð¿„z!œñ…:¿gÆlR‚ž)Ž½¤—5,øÿ0ÎàΞGâ(¸#A¯ÖhȽ•Ä)êéÐçòJCœÉ ^‡>\+ »» -x\=1n&8À³Yï ¨´F poŸÏuï㘭àø*†œv:t"+ŒsX»_ ¶MãxJ¥Th@]…ŒÉ¿7p~Æë'üéZâ€tw)~np68Î4ÌÅæTf³6ÔBY¯K39Ït58gµ*¡t£8ðù*R—Æ{Îëß&87àÚBègö„7!6ÀÚ†û=à¼þ7–ŠÏ_#çe¡¿}^¨iÀy&ª`-“Õ¥/J¬Þ(>8Ž¶ÞOÎJBMz×°ßÅço.”8&N'®,öS V„÷À<Òr -#÷ÞÿÙ°‰ÄÅÇ7©ïåE$÷þ¯J^?è9‚;àMäŒú~·ápêh"‹ýgê­„Ú’Ðâ¨2ÔŒ©#Îé½vƒÁAJ8·”lhÞj&ºt=ìpü^Iˆc,àb8›KÜç _æ<\à\ -òlUÿ5ˆš -9•¼g8ƒðh9Y¶\c ŸÇ±Ö5œµðLžCx)àpüÀœXr!w ôûë—æ‚–Ô/wQ N0+à0»pò çäî.!8êÂ8_’¸à–ô¬.ôÙpá"õK¼îñu]Πà¨n$à.ÆD<\Mz(p½Òç‘š ô„ ï5cï”™?á|F§”ncbž®#}«£>£Áå úü’àÛ+ žÆ†=[nªÄ>¶@8÷E䬄¸LÇá8MrÎ2qξºˆ`Zp˜§ ¿›‹HîÏhß)ºØ ÁžË˜¿ õCQRÓèI1!«€¿Pñ•ÅÍ&LF×N:±~ Y÷Á÷ÀíMN,ÛŒs–:Äaè£b~¼jÞïËs!þ@ΑúÞYÂ…Ý[Aj à0}¥ÞŒN(ÛÄ\ȘM\ VuàÈ ’ïÀ½%­c;ڥɆ¯ƒëñΰ²a¯×0g£&²‰S8Ÿ+s˜ÔÖí’‹ÆàŒNêp_pèy†zÇ%5è²û<‡“{UܮΖº¤Ï„3dÄõÜ9àL2Äà Œ#¯$˜Ããªçym6³ˆRà¿ ˜Õ.v2u*lí”4…qÏžIE–¬¡®|ÚÉ\ë1^ùª#ÊùaB¿øfAç}ÜÅäörÌåO†Tæ§tFŸw£S(}Ô¸Wö¢ö¸ôeÃö~»”½Ø¥ÇżÙlq.ižü°Ã¨K³«øšÛ#Þ§ ÄAWr) Úò˵´UV™dWZe•Z"ÄjgâSÁåΠ䯤«¶@}Füt œË “ª¶€ëáœÛs™ïŒØ¬÷†lZ›Üºú&ð§"çÒš4pMmÜFÅ”º4ü`ÄO:£Mb ›Ô¤ àëLê{}1©¥’ó¢·Aƒ¯œÏÕyLØ£Œâ­:uñÝv:­q;“ѸS’Õh*Ψ7‡G*¹n+¼G¨ßÀ{§*6Á:uM§¶iÂÙæò{öz“\¢ÙÛï(aZ÷6Q|Ë&*ë».û¸ËBRøîgîUûaé㦽ìÝV|?ðïÊnU[p7E’›øÿk¡$9 –ÜÝ–-^E‡¾YIÅÔod}Ö`r;8ÙãꃲÜ*kÀ2²ìjŠÉx¯/ÊlÑ®.HpÖ€Un”&”ïßms7»DllŦ~‡ÑÌùlróvQäë•ôñ £€w3ÉMw*v[ܪÚÃ\üªË„¼Z ¸¿'ˉûZB­éi]x°ò1©wî± | +T‡Jpˆ¢xƒÄ÷á28ÿ"O«5“¤uéC Š=wg>æOS·ÌŒÇå™”wîl³¸ÚUfwxÑã"úù7õê7+&ÿ›èíßv Ëxk¦å‹£¸ëÝy¶ù“3Sþþ8WÐs®¼¬ÄCZ^í,~ÚmÃ<ì“0{¥Ò§u‡äj[Ü®²–_¬Èâ+udIµ;Ù”Æ8öl†y¥C-e“Z4¥—šÌ-/ÕÒ’¤V]‰ÿ£åÀ-=fËœ‚§Zþì3ÞÂ3cž8zÇψS8xBŸò#{µÍ”½ÖkÎå´r\N³˜{ܶKò¨v¯øY«äJMgöéÒ‰u›E;4¹«mîv+Ëâ9¤n÷™R·¾š°×>šÓ·ú̹[m s»SÈÝo–Hî4Ie¹Õ»-Ÿý,¹Waɦ·éŠRj¶PéZø¡ kŽ y¹Ö%¬1æR‡®øb‹ôkÅYm°6Mä—«iùµr1{±Z—NmÔ„ØÇ¥áø—Ô¨= *í•ñ~»0ûÃær>s­Ó˜ºÙgD_ÿhLßühÊ>è•s»qèâè§eÜ‹÷û$¹»$7ðoµÒâÜ:¹ô^Ó.î^³”¹Ý!€³ðîWcQÎWêág†Ëû¸Wôêï–ÔÓ¯bqIË)iI…£å‹7§-½9"»U!g/7³™ïõá½qáyk!¼‚ñýì£LœÓ%ãR{u9 v])²ä’Zµ»+›[Ÿî“Ð µÈe+RvLr»ËBœÓ-—^o“ȲZhÙå:FœÖd Éì2á2º 8¿KÀÍŠ»p{Y¤NDÃóÖ~‘œÇû<´h$½Í@’öNŸIíÒf}ÑÅ÷n¤Û*Èú¡aöŠ§üAÑû?ÎJ{kŽ¨ê/Gé¦/¶lÏGO¦ç›»ìCQðî÷w"´^‹‘öÕIºÞùËÛë‚-;J#÷µ>KÕVxãë(e^~Ý%Îë: -ª³—?¯?!¹Õ,“߬¶´Ê-=¼ëyž­Õý¢ãÒ;Ur*ó‹Ž(±e“øJ‡Pž[½»ÞMѱU¡þ5cqf³ ŽCfòKU"«{,®UÉo2im:¬…‹ízÌÍf4§ÑRò¬æ€ìYýaѭߨû_DÌ£^¹øUëqq^çöq·œ¹ßÉŸ´Ysù­ؼO{˜W½{Ùâ®ãìÛîcìëÏûé‚ß÷Ð/¿YˆžÓŸ¬ØÒÎc²Ö’ ²¶·¾’ŠRGöÉ;+æÖ{“õAŸJnÝJ]ü´ö{§áîµI¥¹VÜzŽÍj1¤2ڵř-¦òûå{a}Ë•ßlf™æÌ•vcñÍVFz·ÁJü¸ÕŠ¾öÕ€ºõј»ÞJÑ9mBê~'M?ë‘3y_÷poºŽq¥í?³o;²åïN²í'è§}R6·“…ÜD?í–ÐwÞ‰¨û="ñ&1“߸›­k>%îhô±è}.ýP$jâ ªyºþói¶®Ýž}Ù½‡Îþ¤Ï„áê=xô4I,ǸòÔ«aUËWþ®-Îi—É^•±yðìŒ,¾F— ¾¿Txý»u㛡èùg–zØdz~ÕÅt« ¿m’$Зg6SV·+È_ÚY”ºïzûÊÓ2¯ØAú´úìF£D’ÕfN°Ê¥n=ài Îs\z¯>Æ„êRÇK3DqÕëE¹¿˜QE߬™îÏ®ÒOo¥ŠÅŸÞùq¿túŠ?Õûq_{γ}=^Ò¾’à-Y‘G›Ò>eqá¥çböµ_WÐ}¿¹I{ƒ÷¾»kÙõ6Rú¾9PÖÖèÏÔ´ŸÁ×óûòƒ ÷üÃQæ÷íTZŸ¦ìYÃ1Ë‚Rùƒæâï÷2w?Ðì£N©ômƒ­esQ´¤ÉQü²é°¤¨ågYa…¬¸ÒÙ²2ÿœämÓ)Ùóšc’×5Ǥy '¸çí6ÌËŽ]Ògux=––å—TÇ1ÏÂübç&¡ÿÝy‚ØêÕ‚›ÿÐ<âÍéž½’Ö:ygiˆ¼§2DÒÙpAÜÓtAÚÕÀ´¼sÿÍJXü‡%Uü‹¨üË~æÝGGñ‡z_Ù§ÊP›Î{1ÒOALù»Ìó.9{µÛ” -{½’ y±‚ŽªQ§¯|4„ç$Oêm,ž—–?+=&Ï)ß#»Uo!¿[imq«ÒŠ½ß*従S;9îy³$¿þ¨ôYíéÖý’mÖÔ­ïÆÔ½>‹ó/÷¼ÅFZ^î(©©r“7V\°ê,Œ8ðîVâá–+‰û:îÆíéy#é«ò•6—ûHJªm™=–ÜÛ–ã\S•›e×ëˆÝÝÏ¢¶^O8Ö”wª.1îtMœâDCrìÁ–,…E_^ûá½—¨ú—C‚{¿CýGúsè‰kÆL:ås¹Ï€»Ôk± r—ÈíÉL³”êlÁ‡}–m¯C¬[ó"­j -ü÷´?ŽÞÝþ"Z\Ùè *üner—×z?™-Øí0ØØÀ1Œ’ÄåÚ,iN­µuû ÅáÖI§ÒÓ÷7ßK´¨+ò…¼ÆÝh¢%—Ì%9ø:½|{Ú²6ÏOþ¢ügîJ£™ìF³â ×Öàq¨5+¯¹˜„·Þ -¿bÿhXƒ{ºnGY|È“}zbýþ^¤cm$Y“YÅnQ·‹]£Òßz*Ž5¥DËðºÝÓ™µ¿õzŒ¬·"XÚÛ°»ã¹‚­ï³äòFtÖw=QbçfÖëá|æÒ—–ªOʶï§2þØA¿].ˆ¨\.zò•·TúÙ´?‰Û×ö0Þ¢»2\Ü\ï!í¨»°«ëM”¬£2€-j?,*ød!|ð»™èq;i9Ä–·œUÛÇvÖºÁœìîx¨T•8šçüªgz±jYfßF“;?4LòxCAãï7gĦ–yÄ$”{ÆT_Hð¬N<Ù”’hÓy7Fö¥=±ï½%\HáÀ®lV—!ɹwšeâ«ÿ]i¦,2jE²äV.¡|ýF¾üQOú¼å°Ey‰‡¬¤ÜEüð½•(ó×í¢ˆ¢Tð«e‚È·+¨G(YM‰ç¾wöv>‰—·—JÞUûX|(‰8ݘè]˜äU˜˜\éëS˜*k.:ÏÔ¶ž´ê}é^šr¾îBtx½GtF¥kdVµsøµr—<Þ¨óe~Š€·~Qågëb¢w½¿Á½ïÅ÷eŸôYÉ×’=¯?&{UuFú¸~?w»‰á®6™K>6ø ?ðvÜ׎󇛲ãO4]J>Öt9épÓõdY{•Sôù  øo2QõýTÅߘ¾æÍŒŸñº¦iëÌ.}Ü$yÓrz×û"Åî®g -¶ù½#[Ôq„-n;Žñó~éÅF36û‘änµ\ü¦ì„eýK¿ý­7ãv·=Œ:Ðz7ñTcZºCCd¬O•”S]pDP…ODF™[äͧˆ¼ÛÐ’†3Á…•öaåaÅÕ¶!U¶!%øç² -»°âbÇðÜ爌÷¨ ßØcõi1½/B$}ÕþÖ#©Ú?Ž òúî?„ìÍoBö^·Dr¿}—øòG36å«ŽôrÃÞùƈKÚO˺ƒÄÍ>\Ç;/YoM¨ôSY(Ó÷ÞCü¹ÍOÞ[fÙ]É6w8 Ÿü&¤¿£$ùÕ‡äoÏËz+C,{_GzT'Ô]H=Ù”yiwÇ…¬»ÐwOד˜}·âöv^¶kŽˆ ©òRÜ®tŠ,¬± +¨· }Ñ`ú -Í«µ Í«± }\鑃÷]j…»"½Ô]‘\æ¡ð©ˆ‘Ê6-áÅfaÅ‹„—y-ñ>‰4·w—ìN§µôUÓ Ëâ"OËúÂ@«¶¢Éë†ã¢¬>mêb¶4«Q$½ßºGöªá´´çäç_¬eÕ5Þåþ©®U‘©V-…‘â¼–ƒÔÕß „‰uëE—¿è²»%âÆS É)‡Z®'YvâX˜mÙS!ýZ²»+GaߨHL¨q‹©ñL<Öœ™,ë}µ׺ïo¿ïÙ˜àUçWãy­Ò)·*œ"àyz^êù¬È5öq‘‹âV±Kdl‰WÔ¶ì(ÉÇæq[£¼ªòœ8¿õ°´¸ÆVò¶ÞVZVå{#²è^×þ -¿2¿hϲÐX·ÊÐؤ"¯˜€2ÿD¦ã½›ùK^(hàìÃñ)¨Ô7>¶Ð':½ÐKáT…×WRRhù…¤ø½Ñ}\_óƦ/ycQÓ÷ãðon¸™R˜X”}¬åúE›Žœh®¾Ì^\VzÒ³&4æ Ï+"®U9G¤—¹EÜ®vŒ(¬· K­wŽÞÛu+†þ¥ËÝì=¿ß¼ç‡ßù3Ô÷O®–½ƒ+#b*.ÄÄ{FezE9VDE¯KŽ:Q—} )+RþþU°u×ãHÙû†Ù»†@YS‹Ÿøþ/»ék7?ùeucIÔ‰ÆÌÔ£MY {ßÝŠÙÕý4j÷ûÇÑâm¨÷¿Ú±Ý]’o•A{Ûo(¬{îGš·ñÍ›>ìeÞU9ìm¿ë^qÙ¢6ïœé ÞÐ$óÛ:ÿ‡3Mì’Æ›:'M4In\e^ö»„îîp>Øq96´Á36®ÊCaÓy=ÒüWþ¤qoaôž—öò2Ã^lÐ˳Æxkó¯ü óü)î[¥7÷½Ò›úòÅŸ™—E5.0»Ëëѯì–5Ù[VUúh¾è\•Uv>1£Ô#Ö¾!6UÖU ®kpãjÛ$µî²îúàõ“\+#í+b®x*î¿v°­‹Ã1ûI¬¼·,ܺ« Ú¦3'ætSRª­_ª[uxâ¡ÖÌö·ÎsL{»³¸«Çβp›®Ü˜Ã­Yq§›bccêíÁy|/×ÐíV¼WUPâíWn‘Ïß:Eä¼rWÜ|íŽÿ_÷ÈG¯]#ÓßxEïéÀ9¶§0ò«_¹ì£7.‘ùENaY8fWØ…|h<ô? -q¬óªõýþÑU¿™˜çOíí¸®¸TïSØx6ôa½CdN½CTp£O²ä{i ä{EäC«¯"…½¿ž6|Âkš8ÑÛwLy‡åI¥­B)Ú°Ý­Ý´ ©oÜŽV«k¡•k·¢eêšh¥º)Ò œè8>§“ù·Eºµ¼.ý¥ÍÕ¥$,"ýùyŵ—^Qá~ŠØ<_EZWdh~@ô9ŒŽ6dÆŠ»ßûZt–‡ïk½‹qQ|h‰oBêkŸØô·ŠlŒ¢Ë¼cç þ~Ðâc^Xz¹›â9ŽolÃn¶Ø†Þl³ ç~Ô_0Îþ´ÎÔ6r¬¶¡-QUC³NF3Ñd¤Š& iø¡†¿_fÙþ&2ëµWLÁkçð×EÎáù%aÞ8G$•yDG—øÄž¯Hô¨IHzísƒÌ©GdÜ›s1áoÏGïê~Iµÿr–éévwl‹©n<R×|&ï¡xêG‡A=oº3û—å;¯Ž7°»8ÖØåödÃÀ7jÙ_Vêçü}Á=~“ALí|MÁ~4Wmš=eR›<a…F ‘hŽcðO*h"RSž„æNPC+Ö›£m–aÊšgŸŒÔŠÿ0C·‘×3ûÂüÂÛYô> ‘twú‹>xJ»|¤]mûß]9WÿÚ'*-ß+òÊ+ÏH£2 -¼Š\"_¾u -¿Tä…ãqä“W®‘/ \à Þ:‡§–z(¢Ê}âššNòÇB¿´Ú%ýÒósÀ¡ö”p“ßøƒ;^ñë´½^NØ*µ°j“6Z0_ -]¶éXWÒ÷¾<ÙÀ3c¢Î~{åy3ÕÐX4 ECÐ 4|ÂãRÆŸÒŸ?ÄόģŽk0þI™<7þ4jZ¹Z‚6ÓþJ;nòKÄ=…ž‘Ï"cŸøGEæûFFçûFE¿9[p^‘^àu)ß+êvžGÔÓ|·ÈÜ|·ˆ/Ü#ná½y³È-òn¡kÔëÇð ñÜ×v_ñç:_ê—Oîgš£c^4áÜr&4ºÑ5zg'o²ÕâZ8u!^‡ãñû‡÷6¿+%2 -˜›øïtúxý?(‘ÑÀoÂ…† ‡¿ŽFƒ•FãŸ&¡)ã–¢åëv£>•“}8o|à]pŒÚ#íi -8Ø|%6+Ï+ª ß5<ãµgTÖ(cŸ:‡y•ÅZ½ê]kóþ+—ˆ[o\#c‹¼£¶fF‰ià~kðsiŽ«|w*8¼É5Âô7þðÅKÕ5[ÚÈIx CÉûŒß!|? -i"^qãñwðý€ÿ2šÿü1€Œî_Ç=ÂÜ ÇÿÞ$¼—l;„4ýZ&é\ã›ôñû¸ö÷>ò¶Š ëæ'û›³g«ccmk1/^`,‹×$Ìaì«ó -˜Çœ^Šç®dÞ –>Èw‹ª/r‰n/uŠ~,òÇÿžQ;Ï™õð‡„ßy;ý·¼î&4ÏÐÿë£ÒŸWþ;èÏk6yÈ<4{¶ Z®{ms¯£ÓÉï¤?•:œ*N -NxùÒ+úQ¾›âažGÌÃB׸»o\£Ÿå»G¿zîý"Ï=*äo¤{yhLp‰otPÙyEF9æ]eΑ×J\#’J¼‚¯ü)ݼºŽÏ‰Z6öJ+–«£)JcÈFÞÅÿ¹æ”ÈõW&ë¾þç1ô¯ÄÁøõ!dïõïÃä{xn(þ£ê(44MWÙn>ˆÔ­Ò”tëx]ês‡³U×ýÀìB÷ˆ "?Å¡æŒ(àÃÇêS€Ë€Wbìå‹¿ÚWGEïoÍŽ„ç³ßºE”aÌ^WmÖumCkÜc­{r"1¾j 2cê”ÿßk??þu¬=ãUþóµþq&Wf8¾BÃñç²{û#j Rþs¬ÃÈþ›O͘c†¦Î¢©sÍÊt4i–>R]À¢e†¡h³só(Í×ü -黧.áOƒ¢nyu!êA[$ÆfŠ˜"¯HŒ3£{ÜÌÇøc±·o£j^¹F½*t‰ÂcŒ4úÆïÚx0XiΚM8·Žý·Çqr(‰ É÷ý1°'hd š¤4M¾M·MŸ² Í[&C 6A³×ÚàÇA4}±MŸM£)ó)4e¦)š¤¢ƒ¦MÛI^[!ˆF[\jFé¼ãõLzø=Ö ·Ï+K LxìUüÄ+®î¥gBQ¾GÎñ±/ #ß»Äô•;ÅôV;ÄöÕ8%V»$&5øÁËæ¬3ÿ_ͼÿ1£ù=öç<Ø{ý¯Â¯ŽC“OEÓGÌCSG.F*ã—ãؼM· ï¿hêø-HeÂ4qÂ626Õ…–HuŽÍ^q-2ðEkwßVÞÜ9N3‡_¸½ˆW7ùÈïÝ[{Ù0è“g^±Uùî1uxžªJœ¢ZK\âaϵÕ8§µVº¤´Ö9&¿.qÁ\Â7L»†ß:o‰þÿzmö‘ùƒ|6^i*¯ŒÞW𞛨<?7Æs8?& ™ƒ&\ŠÇ¶MžªŽ×§^“&hÚjK4CýšµÝÍ3ñG‹¸x´ˆNB«¬)m ùu’F ¿Šé.:{¦ 9ÄëUx¸ß«Àˆ·{Uâ±Ý{ë¢x€s\I‘³¢µÄ9¦³Ì9¶µÜ)6ï•kôɪø×üæ©c¦ýÛóö×~ƒx™j¢² -R< i^ñó£pÆÝÿš’*R¶ÇCñN®}éž—ç¦Î\ê{ª!>¶ó„ºRGR‡”||~Nó&?oêÔÿv.H°`Dœ½â\6B Çþ9Heàlçgâ9‰¬Í øs¢Òd<¾Yhʈ9hˆY86âÇØÅhòDÿç µeVHm¹ šµêš»ÕÍ5‰@³ Ð2›Jë}jFl»ÁÏÖ®ç51>,k{ê}´*-8ôE`Ô§^‘å/ÜuxlÅy®áŠ"ÞUÚ…~ªqˆïmrH.­rJL*öŒ0ÿÑuz™Äöÿ:gýø?~þ+–ôcáax¶Fà½6ÏáTò˜¨üR³‚Ì•Ê m¼ Ñ4¼Z-C?­dqL1AÓç •iZHe–.RYi‰fnÂ{Î<­>xKy]Hý¨M—ù©åüj­r~½Ù×?Ù4\¾à[èŸNcgÌ Œ¥ñnªtNè¨vJê®uJþ\ë˜ÔUášÒ^î µ¤HýfÞtŽÚ†kÞ”ÿ%>öÇÑx,}Œ\„Ô~ÒB³Ôôñ84ÑÔ™Ú8Vh ÉSpü˜²ÇL¼Ïð^SºMŸ¶ ÍPÑDª³MÑŒ š½z?Z¨ãŽ–‹RЊ}¹J«=J‡® -m¾éÿÓ–´Ó¶ð«õ:ySÑç&‹Ö{>Þ/ÃßE–åy§¸F?|é¡–|ßeú„70¿‹ùEÁ÷]\[‹‡y;X·˜ß¦y¾p‚ʨÙÿã¸úsÛ0‚!J'( 2ô44ÏÓä1KÑtœç­²A‹umѼõÖhÞ"š=s+šãåôñËðrÞ¤:mÆ”¦hÖRÍZ)AóµN å&h9‹Öî{¬¼:´}ä†ûü4­^^Cû¿ÝäocÚÍäÞ÷xïmºv¸63zwÇ݈à Y±©/ÎÇ&½¼{¤ñr‚Õû¢(·šˆÔÒRûžr»Ðø"¯¨=<½Êèøÿ8¶Ád<£Hn‹THüA0ÿò0Äˉƒf` ¢Š÷£ -?jhâèehê,C4GãããOn í™´õ -?Sã1¿ð¤Æ~Á–ä¿MÝÙ=aCHç¸n¥#7ž}0t«_Ù8­\~ÑŽ^ËüÂìTü¥ÐǪç~ð‰†øHàsÀÉk>îZà¦xòÆ9¢¤Ø1¼ñ­ST_¹kRõ[—8êK£“¶ëÛ # ¾ÿ;ksØŸ¹Ö' c•!Ó‘ê¸åhöC´Lï8ZHÛ¡6ah©SîÀe¾¯¯ö{;l×ë¡+Î>¸ÂùÙ u>¥Ã×ù7\ç[7ró›¡ê¶yC6y•Úþ€_¶½†ß¢qŸ£Ô4Y;›_ WÃëÖñ"£V^bÔÈ‹…ø³gªb"³À¸²0vv­‹1e̾ñvz²«b:ç‹ÈÔëÆ4 ñ™slýÖ¥™3ˆë“T0ž¢‰&ÏÔA* ÍЂíÇÑrÊ-¥Î¡’P´æàåõAÕ£·\ægh<ÃóSÁ¯Û^Áo„ø·9 vœúþLå´V5`ãá+ƒ¶º•ŒÞæÛúÆ&¢ô¿iqÙÿ0—ÜûnÅ=úl%Îé–ÉTí—æ7Ÿ€óN&…¼¹¦4@y¢òÿœ÷ ;müÐYë¯F?© Ь%–hÎúCh¾®Z&ðFKwžBË·îGKÖÒhÁR´p¡.Zª.FêTÀ€ ' Ùh_8|{ÚßgëVòÚµ¼¹q7oE}ùîbÓv-LòùÍ9á·o¶¢Ï? ›yngæ¯K ’;—^å×—ðf¦Å<#(ç­Dõ}G¹†'î]•ûîîÇÑÖÏ’mLMŸ­à-/Uü8Àu´xyV†$–—؇x•Ek'}˜5[MýßX—JÀÚ0öÀû sˆ¹hútu4o9Z­w­§¼ÐFÚ m>”8pcø›q¯øź?xÿágɧGnÔ¯åæÿøú³è÷*;Á¶&¿ót¿ó¦zŸyáw^fô•ßÍ~êô2îä-wæ«Ž¦g4ب€7–ò»M²ÿØ`æzeŠðoδ÷ºl½··ýfìɆ´äƒm7MÌŒ»ûà 6ìé*ivÍ>ê–š¥}Z§{*qÄÜE› Oûï>FãyûIe=š¿Lˆqîy´þð‹AüÚFoJáUðÚÛ°ó3OýÊï6úÁï1øÎK·7ò›´Jù5:]¼Žñïü>»šˆÈìR׈ØrH·ª ɇçPC1ÀkO÷9¿^ï¿vg)¯kÚÈ[ :ù㢾?¥ßª‚¸/ïÎßã5M,*i­Ù€ tuÜG ç©´M&üÅ*‰ÃEUé‘°qb×Ü9LÖï†âÜ> :ó³ãy}–Ð!SÅÐ&pÈÒÕfhú¨¹ÿ¥ôkSã,Œ±Æ,Ds׉ÑZ‹$¥-A­ þíøLöË.³ïüa³_øc&Ÿø=†õ8Ôð”I)O½åÍêxkQï`ÞÁ2-å9“§üN³ÈÖef^·f˜¦v­–ÿ¾‡éyï&üÈÛ -ZùƒFÙ¨›ùæ¨ jÖÑ—?ïdîà„1Å«¨Ð§K©ÄgêÌíjsI´ÐѪ.?@ü°ÃŠKú°ƒ /_ϾZÁæ6²VÏd]¥¦%<»j3ýߎk8Ž‹c”UÑØ¡3Ðøѳ0^‡f-6D+ŒíÐúC×®?_6rËõ¿ÿ¤]ÁoÆûÉtçs~ëNï§*;§7r»«bô‚×e˾»/ÿÌö5xŠzºíM -xSS¿(Ó£ûšþ|j°©«÷h#Eî£BÞ@ÜÙàÍ54;™»õ“¾@‚LvÙ4»ò·mÔ•ú¢cA£E»±áµÄ·;$Ò‡¹g­Ö\aÓ!éÛ’3æY}ÛDW¾éP™íÚô¥:â‡ïvÑy_v>å5wüœ5bÖ‚m„gÿëÇxœ3–®¡œß-çgcvTðš†y¹ùgþ„Q/oãµÔ ç]úË7úÛO#ôí/Û ©‡Ô7®B&ÇN2¿Û½SÒðÆõxsfªmclòîî\ý±ÏÝ´œd|\nèûr†ñ}^Óôob’Ú¾Ú,üÑ|³û?vÒÛXêN¹èæoúÔ™˜ B‹ʆæ"DÉ)$ÐßwK™ÁÄ×of¯|3‘¿l8#/¨>#Lù°YtôüsÛ¸ñF'¢Fn1>fM]Kr´2©§øs]Bc•q^›²-X/Bä4]Ó´žòKµëøm¿òrÑwŽô­žÜou~V}Ï" ÷Çõ6{15=gè×ßöP/~“²…ŽHk[²Å"³–‘¿*¶³,)ô–?­9"Îi“Š³[TfÇ*¶xÐÿÚlÓQ#¡ß6qà(R;øטËÉc¡Ÿk£¥†ÇЖcׇì¸Ã/ÑÏ3˜_îƒAÏuòb³jÞÒøÆß6íq¤od‰vj ‘¹Àq”¥Ò®ƒ#­ÝSæ‹£‹·r÷–™ ÐúYÓЦ¹ÓÑx,š†t·¯GTlÞZÉ‹ÚÌ£vN¿Š>é?†œ?Ïî1"g¢:c¬mƒ–¬8¹]GœÒ©Ë†=[ͤvjK¯5rleç)Iaã *ã·TØ‹eÂÛ_ è‚{˜ºîÓ¦ünÝr^KãBñø{Ó•7ì˸e_Ö ­ƒÙCtÏÞ£ãþpÂά¯Ë˜}pǵ*,êþêh|—×0‹é\i–þeƒé]^ÇüòmægâÆmÙ¢‰V¨MC«§OG¢zhבãÃmì'ív:7 î3¿ýE‡yݺ_Z^î$¼ÿ‹@xûáµ_uDwç‹œüÇS¡óé› ¦äŒæÓ÷Öæ7ÿÐú?ŸOÛ§OY;ZŸ$Úb¹ï%«ÓÔòaÙq‹¼Ú3ÔÓ>V x»Rð`žiÚu“Äž•n¹“·K]•VjÊÑ|œ›§-Ñ@³µ¬ÐÚ]a´‚«UtoñËô^òÛôòðã.Žû©ïçëz^Ÿ w2j¸‰û¥Éæ!—g™¤Î0‹¸1×4­f­éå÷›„AÏQ©SèÓ18»¤)Œó•ŒÃUUæˆÿ¨ÚúHgË6$0ÀùJ`†¤2+%‘ÜJ‰ñˆ›.Œ)X%ŒÍ_-LÈ_KÇÜ_#Jx¹ŽÊèÖfï¶rô“619Wy!~ëž<ƒ¾Ü§'{TuÀ¢¤Üݺìe€Eq‘«,¯ê”ðʸ¯‚rŒ˜hzúüÏÌIºžMÝfí£¼t3…&RC†b>€y蕦H˵`¬N#¿Ãà=ÏÒ_>zн¿º -^ò¬ùU^ËÌ%n¢ù>|-O{…=ZL{¥ÌÀq` sÄ{$h -kê Ýu둉†³ɽBD;Î#kŽ¥mø±©íį›ã¸ÁxÆ«R±%먠܅Œß­ìùÛ $éMÖ™•2.ºdç< îÁ`®/ ÓÚ·3ñÕ[èÐGËEµeÙmŒô}U€¼±ÔßüÚßµD)Ó…ñ%k¨GX‹Æ"_˾בÜ÷z?áÞNð+ç³#úí<¥ŸÃoÐ?›=VÓtÒ“î`_¿”mè²—¶Ÿa~ãD'£ÇíÐÐGf+DîÑð}¼ò¸Ð@(Cæ-AÛ¯A&Ûµ¥ÔZy÷i—ñ6']&ì:î:Vî4…NzµÎùŠÔYR¹m*û³ž «s›è\Ú Ê?{s½ÝÎÕŠ®~ÓÄ´®Å}\O_ý»‘ º|mŸ¤BqÆ8DL¦Öof®öQÙ}º‚¤òu‚°—‹LÓºÔÍry]³Ûü“ì®þ]Ýè -¿V7Ÿß¤×Ë›üÂKõ>ðÆu¼‘ÑÞÐä9oh^Äs¦ÏxC£àGjÆG]›XS6ßuTÙ@_m\<m_¿˜š z÷™Áì ב ¡ >"à/"Øur Ä Ö)u:…¯-í{qÑñM›/söWazŒdm£'‰Rj6SÉÍ[ŸKjŒkÜTÆM1U„c¡ìYÙ¡]ÅO}è§Rѹ›³…!ãiï»s¥›wË*Ê<à¤üÍæf¯P˜Ò´‘ñ¹:›9>^ðd¡Y/ >|q\¤ÿˆß¢ëôpœÎ±¬ázgrÆèžÌ©—_'øÈŸ…³|‚ -ÞÚÄ-sòNC ¤£iŒÇ¤ƒ´7j í7#Ñ1‡aì•Vqv½Þ¨):î:ÜL(GpOº€#ùQ§Qp/{©VßâZ‘”܇뙪¢‹°ç¨€+óè“FSvÁã™ œÅpÿ‰,µÞî=à²LeK÷Jrd²œÚ]\F¯ߺU”Ô¶…Mû g>%ojN -nü¦+HkÝD…<\, -¿¿D˜óÍȼàʸcª6~—I;o…çÍXÿ¿Æ8¸n®‘µÇ S¤g&C&²³Ê‚ÓQcu %hëzmd¸S†ô4õ‘æÒµHoëŽ~/ñQ%]=¤µe;Ò×1ABSIäÖÊV'F[z¤Îµòº¼î ’y„©Â=ŽÜåV#«[û$·ßI…—?k‹œ£'Ó^—~¥ÔE­¢ÎDO )ºô‡¦,¯é${«›!÷ºØ'¨ÀýÌÔ…«³)„i´{ŠªÈ÷Ö³”Úµf·~h›_ùªaW²ÒôbŸºÉå¯Mr0–-äMÍËÿ&3/ükt÷÷ͦޗ¦šº*&˜]ü¶‘Îí¥Íc[Ö˜9ÆLÙ'L g&Þ3@S}#Z?o!ÚºRá}Gïw -÷²–žìŒïÐb§ÂŸ® ³;õ%wª%â»5bîV#¾Þ d²zô©ð{Ë·øéœ]ø$Î9e:ããmüTê|úL؇‚+½ÚT~»¥äUõQêÞgàÊ7maæG áå/Û™{ï®°å¸8¯åðÊ÷í¢“~£™SÞ£)·‹ª‚ĺõÌÓ÷rYSå¹ý÷¸ŽZw³g¼©Éc^ÇDѳÔØ%We»É~dÀW29›4Ö<‡×>šo"=®$°v$²<;ÐŒ> ¤µI ï¿Hãú´ßXQf‡ìÎbhf€n1»ëØ@ù选ҠÇkÄiå;éÔMQZå6öbóNЀߨ” ³»´ésYjŒKôÖ+}&Üë÷Çs.Î}ëWOŽÔ_Ž?Þ‘´«(ÏC’ÑcÂ{.hR÷zÍMsÑ1¿õUÇüö÷æÙ_¶ ýoÌx]›!tŒŸ$:ê3æÅüÇÌ$îÍÇk}2í›;Ÿ:å?Ö=åss6ÜW*Üg?Øã0SIÎ(3niªt\Ý&6¾It³Aψè Ýj¤EW{uáÞ.&¾|3u©A‹ÊlÒ†ûÅiS·$UölÈúlÐ8Ú>r¢( -çù¬>MaRÕó+½š¢»½fÔ‹N9ó¤SF?ï¶äòÚöR¹½BÁºô£^‰àê¯Û©ÃÞ#Œ9«f–Ç•áþ-:ë³¾ìQ틲×îò·%Î̳n™YÎ?t…÷yÓÀ×sõ„{ÑR•¹hɨ™ø«*Ò5b‘`¿ý`áûÁæÖÇR{ÎÊO ÔÜ®ƒ¶áø¹M}ñc£NúfŽ¹ŽXÙ(A®=3\zúÜxâµßng}t øð™a • ¾\o&½Y/enu˜S>qª´màxazí&æR§è2gƉBT˜ôZmÙÃ76Ö…Ü÷WÞ ·|Zr’¹Ô«÷ q7f3‘Õë™À»‹Øs©jtØó•LÐÍE',\þ¢)P­0wˆ/pϘfðlŽðdì8mÌÖnF[Ôµ‘Ö¦ÈX°{Üg)8ä3líüehÎð‰hî¨ héXU´ãÀWnɳ$!·WÀ=y² ÷–q±[-|²‰-ˆÄˆ•H´Á ã¤÷X‰Gøt.êÞ&åõ6QJùfŒ¥—Áü ã*Ö -o}Öçn¾§…—šÿ?öÞ<>ª*Û (big´QPSœ±Î)pJ HHÍC‹¤•ªX©‚ ƒÌ‚ Œ Ì*â<´Új·­ÝííÖn»mç±owßžnßá÷{ï÷>ïýqßZ댕JÈ©IU¬êÆÔYuö>{­½÷öÞg}oD_jóYˆ»àÝñ¦Ýóèg“=û?›ììo·Ò\¿ï…ñ¾]áyèÑÿÜ·s|/ëƾõ½ó§ˆÿßF½¯ÿ›lâ$Ïš'ǸâkFV{>ÍW2í–ê’jÌs[uš7µílï]G.ƼQÞæ§Vûb ;C½m;ϱp™o ü»ïv|oÅwìßgú7½x'¾b¤'µö ߪC`Ï_ðÁó}Kv+:ò¾W®óìÿídßÁ¯+|Ïüi¦ÿÙ?V{ŸüËTÊ·ýü¿Íö½ü¾§á>øÕ î]o•Õ>õ[ð= ŒeñÌ·ï'ÿ¨w=öÏkwý²¬öðÿšŒí÷v®;˼k¤ïÞ—®qý÷rÏãÿ¬À¼™ž-/\íÙôò8÷î÷˜ÚÙTóÌÿu³«cϹÓgGJøk˜’²ÑW–HWM,™Ìp%S¦L)™Q5»dÊ­`ÛÀ?¾mN ä¶Ù¾’[§U—Üz[MIMcòwçÎs1÷ ê|Gs-b®?¿·q¨§t':”ðãú™#pè«)ÞÃ_—cÎnOëªQ¾{Ÿ¼ÊÿôW³ëžû0„c׿éèØÀÊ=cN¬à3ŸºC¯~V|困ﹿV{ŽþÇTÏ“ÿ=ß óîý؉2 Þ½ý"ï½u=úžà{þos‚oþ¡ÉõÜÿ3½vëÛãÝ[?¸¾æÐߤ9‡þ>ɵþ™Ëkšî^¼cèmî¦!5só´n;³vÁ²NV*™xÙÕ%“ÅŠ’é7WÞt• F$æ†o|z|Ýþ÷¦…þíMˆ»†yè0_a­»¾dNµ¯„ð0Wî¼ßõǼ/ÁÍ]ï‘‚¸Ì·|÷¾5ûƸ÷5Ùûì?gzžúiÞí?.óß¹÷üÃ|ñþŽ-çÒ;yË÷\LþÍ®a)¯à½¯Lð<ðãÝù¦Ý·þ©+¼›ž¿ÊõøŸnò¼ø×Ù¾ÿRãyì¿+=_‹ù2jëS§Tûæ­½}É0ÏÂM£æ nCôsëšQ˜SÏuû’á˜gó?zçß}šùKü›^ïyèSŽ/Ìù ~Åe¾Ô–³¼wl:ÓÛ²òtO|Ùi˜;Ò¿ö¹±îG¿pÒ;ÈÛTæ>ð'¾[ ö¯¡îÕß7_ù¢®þ§&ƒ?ûl¾÷Å¿Îq?ükÞ¶ß}ÿ&້øþšïgÿÚXûÔÿ*w-ß+½ól÷ê#—ºŽþï[\ÇþQþŒÍ -דÿ]¹ÀÜóRÃqM£:Ü4”ò´ìù€¯=ø©ä]qøâ9·w«¸afÉ ×;K¦L¾ÆemÉl—·ÄnêŽBlÙz÷ÞÖ¥§×ÎK C JÊñÒÔ1ÜÛ¹ktàÞ7®Çœ>˜×µ¾cçùáy§5Ì[22hZ7¯sdý݇¯ôïý`2æzÂÜÞå_àÝôä•žÃ_Üä;øÉ-þ'¿¹ ßs lyz<æôþ²<ôÌ'.ß _Õºžü[yí³ÿUá}ýï¾Ð»_&Bo}5ßÿÌ¿U»žüâÿÆ£cý+w]èÙþÒµîçþ9ƒÞ]~ý/a÷‹ÿÄŸp®Žçx:vë9ô›ÜGþ~³÷÷÷æ—ÆyWì»Èµá‡Wº?|^yùìÉ.”L»±ªq<ëËå‚¿`{\õÑ¡”ócÝ”£Óï8s¦Ò¸½éßùb™ïè¿N÷þàÆ`ÇÚ³­KG×><Æ»ë͉þmoؽ»áyÇþcªë‰?ßXµçßÛGÖ=bmx—îí_úàh'ý+¾ˆrêÜ ñàbðÅ×<~¹ÅÃQ.É%»~àM.éM®Eùv6ÿh<ý]´íϼe§b®y,ƒïácŽ*×ü¥#ð=iÌ·Øüê„à–Û½­kΘãi2ÛÕP‚ãÙ“X=*°bÿ%8_\OýG¥ë¹L÷<ñ— -ÏÑŸê>øõÏ_¹Ý1OïÈ*1Çæò>¬ä°öûÓôÀÑogù_üÆíýÛ:ß¾ ¸žøG¹ïþ]‡yçÐn¸þÞóÂßgyžûÏîÞç<Ë]„ëjîÎÏónyèëчð<ò±ßÇ|þ˜;Ò†¹6·åÏÛÎõ÷á?ÜèÝý+Þ3oÍi·MB4­¤jf°$Þ|v`ã‘+17e`Í‘ËA–çï2vÅï:ÍÕÐ>¬¶1q -bvxùpbõ`N Ð–c׶ÿ˜ÃÜ:˜ï“r\¯:|9æ¥\Ú`+\{ßå]‡¿¼æã$߃¯³þ=¿½û>˜|ìãÛêŽ}ê -ùítßc¿Ÿê{ò¯·zŸÿóìÀÿÚüèã¥õß¼oý§¿\‹ïßG~þÓÎÀ«_„Pçú–?x!Ì]Á÷Ìߪð]\ï³ÿïL×òcMî-¹Iº¹ÄÝrテ®ðýó Ì/æYuèb÷¦W®r/zè\ôE§U fkC æ·¡¼‘럼šð£w¼cËÙ„õpçîó1§<â*!ÚºúWß›|îæ…ñÍôõgcNtïÓ¿ÍõÄßÊ1·™çáÏ$ì_ÌE‡˜¾8Ä•wl8Ó·êÀ%¾Õ`g!¾ðƒ®ð-Û}~`Éý£}Ë8ß³ -â£E÷Ÿã[´í\_ûös‚‰ÕgxïXWꌸ¸ˆB8 áÄPçŽÑø~«çÈ?Ê=Gþp æñ¬>paÝqΞ÷$ÄC|<ÄåEl&Ì1ä_´ö,Ìáâ{ö_«‚¯}¿ôy]àÙO]#`Îh\;¡B÷>3޿ဠs=yw¿ÃaîÄñBœä}êßgxžúçtסo&ãœÁfÄŸI­<#°ú‘1è·xÿËÿºçÆ"îîˆe0&C ÿ¦'®Ä1Š94<‰å#g×Ö¡NŠ9ßÝ :F¸®8Í{÷S—Áܾ%°ñ¥kf×4–L½izIÕœ`‰§qÑðàšG/Ã<â˜È—X5 -1°0?¡¯uC©«yÑðé3%³ü±! ‹1˜{sºz#©a˜KÆOù³ŸK¹TðïÆcW7ÜòÂu”+eó ×xø)K˜"ýizø™O=/ÿ"ÖüÚ[éÈK¿ŽŽ|1ƒòQ@_~ø•/üîç­uøÍÆÈ~¾ß•Ã\%áç?öã{ùˆÇàÞýc»b÷£¿–\‡ÿz£{ËO®©]°î´Êòš’®“Kf@L€9ÈnÞ±öŒ*Ä#®K­™›6³ºôfc‰¿±sæžjÚó³Ûöþ| -åN[¸fæŠBÜÌ¡ïïÜ>Ú»`Ñ©h·ë^þ]ýÜþza`Çë æ¥ÃœJž=ïð¾ÿ½Ö÷Ò»jÿïI¾u/ŽÅö¾-àSîü1ã_´ý¼9áèôõü럋vãj×ÜÄ0ÌíïiÛXŠ9=í÷íJ¬¹z”¿t]Çæsƒ˜orÉž Ýó:†Ïš6Ð(Á<É„±ã q¬píу¹„À)§{/Eƒ@Ç곂wn:ó2"_ˆƒŠ¹¸ƒ›ž¹óÕ +Wrd=~%æºCÜ"ʵhëhÌcˆ¹>܇?¿ ñ ñTw½Í£ßxéêoêî3Ü·ÇNñÜWp1Ç2ØJ÷£ÊþŽ‡FW‡Z†ÎñƒnôE‡ø›—œŠ6Ä¿|ÏEÞE›Îªih! iWS É1Ïæܾ`(åÙX}h ÚôQf×K¼sïŽøE§œ\¹ã"ÄÄB{Ž8¬Þ–e#q¬&¸p(øCÜMKGøÚ·žã›·ôTW`!å) -`®üå;/lzü*ˆÛoƼ‰þÇ>½Õwðw·v¾ÆRλû_.CÌNÄî ?ý‰{îO~Ö}ëÇ Ï~XO¹!w½Ããuïù™€¶<ø“¯çõMßÓÃó¸ún€.[pŠ¯c'ØÇ—úR[ϵ*ïÒ‡ÏÇ¡ÊÝ8ä–ªJœ×È%SožURÛ´x¸«®õ”»VÞZ¾4ø¡áo׸#C0'7æŸÆü¥˜ûßÛ˜îòÎêÛ6<Øyßhß½Ï]ƒ:ó6yx_¬{öÓ`ÝÓ_û‚[_¾žr®}ärïÓš|ýÛï›ÿw`Î ÿ3Í»ñ‡ãÈî >ÙÒ=z槇WÕÔ– î,¾Wpµó–Ž˜>£¶õ"á”à_¶lúŠÓ›Óü˜„W ã sBU»‚%¾¹ ‡S0Æ­=ÛŸ‚¹´bï%ˆG€¹‹B÷¿XFy™Á‡öÏmNùªî{ebxëóe˜— 1«ü±äÌSçßóKgpë³×a~CßpÅáÔƳ¯ñ5¼ûÞŸä?üûJôÍ0g(áf¯|è̃zá‹Pý-=ÿ™ß»÷=ç0bx@–ˆ›Š9S}KvŽöµ¬ånjŽXE; cÇÎËšÀÜ!ž#KhíA›¯mý™µ·ƒúsÿú Ö#[Þ±ý<Ìõ^}l,áͬÙs)â>¦+òq æJG¬.ØÄÛòµÝs–oñÎóÃ`Õþ1á ϧ|R¸Î¹ëÕ‰ÁÃN ?ñIMèÀÇÓü»ßäp\á=÷=üsÙà7冎~;ÛsøË›wí9í˜2î}_8=Oþ×4Ì9áÚÿµÓ»ñØÁ\‚m›1eÌ'‰oáºQž«FÎ %†zš;Gxâ(‹Žá3ªóT–HW‹%Î2GÉÔrÌË9wHM$~Š·cǹ¾{ž½Ê·hë9˜'s©ŽpǦsÃwmGì¾sϼzv¨ó>>æ§ýŠçQÐæan¡ÀÁ*13é¦Ô]g¸÷ÿvåÿyëoßëÿò<ò‡Iˆ9æk½÷L˜CgBì5kŽ·Ç ÷È7•î]oÛQONŸî.¹õÖêÄ»¢>è¸o4âNŽb2×G†b¾ã9žp‰+8w(æò¦ÑÉegZôDÚ‡SNyЇGÞ½¡nß;Ám?šˆãÚלŽùÁüýÊéßÿþ͘÷+°ü¾óÉ×zø]Ù{ð×7 Cݪ—„׸qêî}ñzÌuèÝû®3üäǵޣžîkÛp¦{n|ᜯôÊÐýÇ®CÜAÿÑ¿Ýæ;ö·™dë|UN¹˜Ö?}•?¶þtÿ§{`þÕB`^éÀÖ7ʼ|qCpÇæŠC5ì3\KÃÜDþ'þ80OAßøïÚw±ÅÁKÈ_¿ï'öÀ£ßÜ<ø‡éu¿ªª;øY•ïÀïnB,ÄPçÖÑ5‡.Ç\†˜GÏ3`~cߪ#cÈçØüêµþÞw÷ÿqJððn«;üqõíG~_z죙˜?æ²=´zïeu+·_Úñ -ç?ø~Epß•06%òÉÀ·ñ­ü -°u(Ÿ^bÓ™ˆ‰éÝÿÅþ£ÿ˜8ö·Y¸n뤘Ú·þÐåd÷S §7>=×g|+žºÌØ8Ê»h繞Ö-¥žùKFTû#CÊoœRrÃä›KpOh¶gÞÄrÇ\`¾_Þ‚y|IF×ÖÔúK¼Ñ–áˆQÞñS‰0ÐÚ·œKØ´0ÏQá#Vùü¥\ùàŘG>^{&­w®yü®Ug†’KN‡ï—Q¾¾þ^íûáß¼µOÿW¥oé¶Ñ˜“±:´l›‚sèkßr6æI÷o|ñÔ ˆˆyé͉á ˸´~å~[ýÊÇ` æ‹z¼aÈ̪Y%îÀíC0#ÚMÌ·…¹¸1ß°7zÇpÊ?¹ñظÐC?›Øóþ ˆËŠ9G s¿‚?Øtp,æÒtí{G ¼ð¥'|ô“š æâ®_x -aÚ@¿Ü~ì7þБßßzu’ïÈgSÏ|Zã¿ÿÕ뼋֔bzÍ™ˆuŒýÉ£Ð7ǵ‡ÀšÇl˜óí æó­}ê -ÿ‡/v7ß91öÀfœyù¼‡þZîð'Œ!Æ~Á¥û/¡ÜuG>«tøý$Œy3 ×ÅhnÝÑ+pÌ×íûtæzóúºÂÿÈïnBìƒàüe§!®bÍøüöf÷‘OoÄ<¡¸&Búå¼áé±¾}_ßTwàó¾Cœå+1ïCý[/4¶ ÇykNám¯°¾C–‡~TÓpôc_hÿï§z÷þÚ‰˜­/•a®»ÀÚgÆîÕ†—'„ŽüyfðØk|ýçTÏ¡?ÜXÿÂ8Z?Þpl,å·{êÛêúW~ ¿üeƒwÛÏí´‘Ú~¶§uãëÎq׫ó‡`Ä@ûîó‚P?å‘DÌwµþyC«nuQ¾cÄVÁü…u[^aê×=~u ýÞsB­›Ï-Ûs‘ß'7‡~t«Ç/…ðÒmP.d´¯˜ïºíî3ã, bgy¿ìù?Ô„_û¬‰rªÝ}`Œ†…~”7Üú°å”@+æÝ{ò[·|ÿeu‹w\€9Pë—?4óÁ¢­%Ì °·˜Ÿ°ð–¬?'¸ó—RøÑO¦5<òÑm¡=ÜBXÞw¬?³~é×m~ÅŽ_ iÄØX<’p°¡]uJ.nÿ¶cÏ}ZzçÃXýOßOb.îå·•Ì®ò*¹¸0­î‰ßÍ =þÉ,Ï¡_M¦\ÜSüêfˆ¡9Ì9ˆñH¸mÕ™ž¹à[ÿKØ‹gxÁÞ`núºµ‡¯B[ù}÷¼tµ·eó®ÆŽá÷ ~IhõSW„V=}a\nxúô0gù÷¿t-ÊÇ–+Š¸|8þÃ~<µ~ß—3›^½ý_w :dŽ72ćv1¯«ì²oÇ+÷Ó÷Ÿ‹9.AÞ#×0¼äõè—3ÛްcuÜCB;æ Ɔú#w §×œE98|S¬ÛÿÉŒ†ƒÎÆœ»˜çñŽóÏJïö·ì¾ûßè¿séXÄœÃ}:ÿÝûÇàºâ#`þ;Äîðþv -âžÌ˜z[Éô© Wkà‡Þ>DÛ«k]}¦¯99í"áfݱåÄJÁõwÂ0ˆ-‰ó.œ^uVp+ØA̱Šy¹ ; ôס¦‡Ž}R‹9øüGÿx›û ø1ˆ5Íé®ÆEÃ[ÖˆŸâö/Šù¸ë’kϬ‹­…'”SvnLjå´]uåT¾çèUAÌÏëÏFnÄ™©?ðé,ÄX!lãÖõgnáš'®jX´å·'Vœ‰˜*¡{_¥åâmzâjϱo§×¿þÞ¼Ûßgyà‡ßúpÌÜ6¥¦DÁizøBÄ¢\Ü›Ÿ»†ð°Q<öõôàS_Ö4<þ¹+pèSý`£ ; l6á -Ûs1bg¹ëš;+lÂÎr?õ•âþÖì*ùJ -vÖ: - l›ôŠRĦ lxÊK{ä2ÌÿŒcOÃÎ -?òù”௧Ð8†9^ëoꆱ…XóX&ŒX wÞ÷ô§C«^Žm -Ì[12]v*âË×íýdJðà·Ó÷À;ËLJºüMC¿1Ã7=y5êʃ¸õ©ë»ÞBû?šŠyu‡¿Ž9}û¾¼lå÷ìÈÈ;‰žwþÈŸ§Õ½òucã»o¯ ?ñYµëshÝqÖ>q¹wÑŽsi]që+×û|GĵðÚhû0Ìu¿ízÝáϪ±Ïñ¼úœ„WŒ8¸ˆ‰nê -7‘¯G¹¹çß1q‘¯ñ ›Ò#B-0nß7sŸ‡žøzag­ÙjØY÷"vøw­«JÃw?t)挭[ -vOÅÎòÇת?xÖw‰7Ð4Ô_Ïj˜{J ±e8æZö×ÃØ\€ãu˹„vŒÖDÒëÏòÄ C|&ÌéMy¯ü©H¹¸g ì4þkHn8ñê–m¿8|ϱñÁ]?—˜‹{ó±kÿ*üÄÇÕè#ã^–oøL‰µ£‚1%7áíýpå™EÌgÌsí¾ï…2ÌY‹çÑ¿·åΑÊ:ðÂø¨_¸ò ÄŽ -'—R°³vŽñ?ùÙL\ t vV}IM5â™tœŠ8]¸ï†xah‡Â-‹OW±³.Dì,ÊWªagE—V×ËBÛßêW?uuìåá_p÷é„ÛµtïÅ„¡¸tóhı¬[µ{ æÒ¯[ºú5[3ŠÆ÷öŸ:°<Øó”ò«”òwƒ9«q|û“0Vo8§îž#W7ìÿà¶È£ͦ¸éðGzÞy¯–wþé¯j³óÎ^yçë_dz߆Ц!†fø®G/ ­:r¹{Þâ¸&á^pç©öûÎE¸Y³=s‡T{oâFµ5O îø­Œ~npéÁ‹Ñþ!¶Ÿ·éÎ.ßíCjkC%0N†kÛÚ³‹½cÝ9á•©ëØ®àµáœ¿ç¥k}‡¿œ‚¸D¸¾è¿±eç¼z¦»¤jê­„å -„‡¸}:vÖ¸F„6±zv}É쩵`_š‡¢}ª‹.9­nÞÒ‘u±Î‘¡hû©Áæ%§¡ŽF,n§^±ëâðª‡Ç þô'—ŒD{Ox ÛÀCoKˆñCùéÛ7Ÿ‹~k|}Ð׆w¿!…î¾,tïs×fÛº½—ûÿhž½ÁµRÌÅXtß¹>¨ñÍû>*GŸ×C±•§‡ã+Fá¹ÂÆZ{ø -\o!ì߇ÇøÚï9ËÓ˜FXN`3ÉFjØY>ªþøV\KÑ°³ÐÞ†Ú·‡ã9¸tËhÄ+Cì,ÄKÆØÌÀÎZ­`gÕC¼{|Ú—úE÷ý Ø´hÎ]ÌánÛz.ú@¨sëV?|á™Ý¹û¢ú»¶]:ðB´ÁÆ%§ö”oè¸ÿ|,ùÖÍå1Ž¯;p%æÜ&Ì>ô5vÿÌY·ÿg• ž¾6´lëù˜÷ñž vãóWûÿû´àÑ¿VcYÄŽEl<*¶×|{~,„îyüjw=5 ±×qÏk–'XRŒå -•Ôš‡`ìˆëÙ³]áÄÄ€Îðoÿ9‹¾d0¹¡tNM¤dV•¯¤jvm‰¯~Þ)uKï=¿~í£W :Æå„mŠvgÃsãq퀰`7<;sØ㙌Y=ÏüuF`çÛ<âÍ#vÖl<+hŠØYn_ Ä‡ØY0ÆQ?vÖ xüCì÷ºö{Ï /Z}vC¼sÔí±5¥ˆÑljáoJ%À~"vèäà¦#W!"b¨ÑÚ9âš=òáMGÞ¿ñ]C‹w_¿ò 1*‚;ý|Üç@Ÿ -ss>á=û®ôü°m a$l¿cÕ£—ˆËqøwào}PQ¿|÷¥u WŽº½s×E Ë ®ß|t|ÝÑß¹ëŸþ$zâ›/æNÞû{‰°³¿ýé«={AØYþ=¿™DØYË÷ vÖ)3q.†“C+ƒ¹º ›úž' ;«ñ“;kµ‚UgÆÎJjØY‹Nõ5ÄNñø!¶ ƒßŽ>/bǯ|t æ÷Fü ôI ¬®cÌÏ]— ¶•‡¸#8Êß¾à·iâR¡Ï@¸?X~Ë+×Ñ?ÄÔ¿DÉsüÄUae¨eå(Â'‹µŸŠ˜ß¡e_ä}à¼ÿ©›é>ðådÄ?ôÜ>ï<_î…«QœÐÒ~š7ÒxJÕŒPI­bòè]§â=³k<%®ú¹Cƒwnëˆh0~Ãzq q§=MàÿÜõÐE­/_‹xˆÍ=sš»dæ w úÛþXèã…7?9±|ɯ»D}²t.Ä2ÁÎí„ù‡ç­<}U|üë*ÂÎÂéñΑ.;k±ŠÕ>²ž°³žµ‡¶¿)„ÓÎv¹#C°íˆ·{¥uKw^¾ïéëÖ¼ª!½šð­)_úÀ…”}ï¯&^Ø!Ä¥¡\öˆW@Øä». Þ¹ |¯§®B;èßûÞ ÁNõí{ï&ïžwˆOkax>‡b™6ä+°óÇæ’ÇsáM?¼qIÃ2;¸ûÇbCǺóüñaõ –žñÚ¥‡ß¿}°Ä² -¾¤‚å#ì¬On ->ö§™¡c_ÏÁ3ÈFëª3¼ÍËO­·x¸'¶ú4 ; ×—s€ð•6Þú\Yøîm—}¿{¯-°í™ëB;"ÔmzñzÄ® µ.?ÏŠáÇu“ºUûmþ^ã|ŽŸ„³þÞ‹¶#„x\[_aêîyáÚðŠm¡.aù¹±aˆ¿…8÷=oJþýŸÞ‚ëk´çŽX‚«¶ƒîÝ}a8½}4a8Çï&¬ûú»÷Ù‚›Åý Æ.ÏîA™úײù.éòG‡ºš†¢í$ÿsú/Ù:zæ4ɜڹCüwŽ¨kÙp&Æ·uK·]ˆ˜'¨“÷¿p]øð§³(2îUÌ¿ë4ÔùÔ†3Ñzê’§Ô¸æ©Á3?`{ЗB;V·|ßed#Á®Î.®yB܆çÓp½TÁ/9:Ö·íuÌ­/aÌåÛõ:ƒ8uK¸ðÁVìƒv§aý^Òá-?œ‚X1pp}SÁÞz®‚C6ýÔoˆý†~a¢ ø¨Ð×Äs'ˆŒã ß±@Ì`”7bÔ¡âßý -|à]'úŠ¸¯‚Øñˆ‹‚8?þ¯M¤±‰Ø2ËÈ]|ä··àø þ·[q¿.|ø“™‘£¿@".]õ,ÄOlJ8Ðk\lÛpîécŸø’wŽûR¾?a½¿ï@쬺§¾ð¹ûöÄÎòßÿì|ç#°îÉ++]Š¹¬}´æüÖÂÎB¬\ϺÆç†ÃÀð‹ØYkì¬à?ý{~&£ õV°ùu+öŒ!_ô>b¶ ?ì{èM!¸øÞpðŠàºCWÖüº£ã»Þ½#®-ןS¿âÄðÛtl|`ÿåÁƒ¿Ÿê=øÁÞG¡®û^¹ñìçñžÂ›_³×-yø"_sËðà}Ï^‡kág>ö¢_DkG¾ºÅóÈ/„ï1Å›x}pï{7|wúIÕµ‘oý¢aÈâ¸æ¾ãî35¼°óôºŽÍ£~SÙpèw5„%sçöóÑðB<4§âu7Œm_] báº1âÑàºù`„sºìôºå{.Eì.Œù‚+÷AÌàº'®ÄwLhiõ±±~¯¸~™\~zðŽ•g F00–B[ž¿÷3ã=qw©¿i1ùû胅ֿxÿ¡÷&ášHhÝá+óí·tb ¡\1%½¸ú¡K³šÞsÙñ®HxAK¶j]ç½? fèôCƒ;ÀnCŒ†þ>áØ _´î Í²§Øñp—Ÿ0NUL÷ÒÀý/]ká?×½øa½ÿÐï+ ‰aîÚ` î_FÍÊÆ⼡Ö²æ ZO0agùwý”§¸íî=—·¿Æ¢?îÞ÷‘ãwwó’nðñý‹÷žXi¡ *'Ä}0/hØYk÷_ØFœS¿Eëá5Uê=tŽŠ‡z˓׶¿Tæßó¶cÌØõSòYhÍj˱ Xaí‡qÏC_€tâNÁõo…XþÐG7‡žûÈë}î3=û?˜„gìpÏ=|ïkv:7Ù¹ò¬às»"¯ýjAó«o´6>ÿËæú'>œã;ò» -ˆ‡Ê±Í¨‹ÂË÷¾@öóny+N ­îjŠ™÷¼=)´dçù®úC}àS†õÝGlˆ»å‡ñƒcÜí u»À7t•¸jëK|ÍKNEÿùò=ô/"®K@œL˜=¸†@¸Ó¯0þ}¿¾1ðȯo -o|òÂ4GÌ[ðùÈgÝòü<_ˆ: ´ê¡KËÿÀ›¼÷ñ¯*={~)âþ.î7Ó»Žñµ£È&Ýû쵨çÑ— -u®=‡ü”»¸¸nÍ>í ~÷îy[ô<ðúD”â¬c<ï -µ Eü -ÂT»B:oý><Ãr á!ßà?’€yïßúìx?øç·ðèOÔ@?ê›·ìTl7ž=ó?ñõ­ˆèÅuëGwƒ÷;<δ_þÍGÆú|Qg=ÃÏ~îóïø™@ºzÙ¶óÑgÅzIÇ@_âÙKÄ{Âsd÷pí -ãþ}¿*GJ{Ó‘è)÷¢Åú}Ñäp<“káˆS俽m¸«¶AÁD\úà…ˆ©‚<áÞ˜¯yÁ°Y·ÍŸ6Tlƒq{Ì‹ï?OÁ3ÿ‘=tï3p ÷ Ñg ¥VŸI˜X`£ÑG¶խ?vMh×[\Ÿ ­‡ñí oxiB sçðL„¿eùé¡Nı„úQÝõÈÅh'CˆeŸ¼k®_wþDDÌ#ÂB¬-Ć[úÐ…gÓ>òrè_Ä C¼¼í/”¡?B¸ÕˆÙ¾î±±£>î£.',ž}ïOBì",Cñƪ0nÑB ïþ¯nò>úÑdÔy¡{Á—@\%´µ÷¿Xæ=ôeyøÉ\ˆƒ{õ„õµhÓ¹tö×ñ ЦgÇÖƒ_‹øÆxú q—}t£{߯eïî·9ÚÓY ¶ÇÌ1ßxï%´ˆëxvxÙ)>òÞËðgß/Éw -n8z•§±s¸»®íŒ%· -}Ä„ó/Ývf÷Að?ƒxÞjÓ“ãÂwトöÊv¿Êú÷¿S`÷;îÓûïQX0Ä3lÙÚÇlž½¿”°~Ú¯¹ ÆÃÊ}—à ¬;bón{v¼gÿ{“<{Þuxú¹€ï¢úãËF">¦¿¥sdhãcãÏÜýä•Á`C[îî|ðÄ* !–6ø£-}î:“s{o¨_o*Ôºd”¿mÙ¡Ž-çWµïRZ烘‰â (‡2B|éꙵ%s\žô£16ÁxåŽ~î·`ÌKFÒØFŒiYhõþËð,O0±ìô0Úó]¯‹è¯ žƃxf+¼îÈX<»ó -Ûûšű3êoô# 7½}ó¹tæ±àß?ìØh²óˆ÷ -:%°d×ùˆéLxÂx^qù^Âì£ïƒ8·¸F{ç¦ó«‰tðXÂyÅ5ðCqŽ¶½¦è#Ä>G8Wˆ‹ëì0ÇB »Õ0Î >A¿Ç Ä@¼o×OXìGô }{âÞuŸ„'†ø¡ˆ?Œë<¸N¾xÃ9´W ->â³…ûl¾Gç[ þ6ž¥]r»ç©«‹ŽðÊpŸnéæÑ'!vÎm’ê‡ûxÞüÌUxÞqƒÏ âü"d‹úªnå¾ËÈ.my±Œ0ÁçÀ=®¤qºé‰±Þûß¸Î»åµ þÅ»F£?G¼á¹—G~= çalÍkŽ8áµÇþõ&÷Ëœ8öÇY£ßÌôÞ÷ì5¾y#ðýôÿ ïÆ!NØ 4{0¹áLj7ÍëW'þOñSü?ÅOñSü?ÅOñSü?ÅOñSü?ÅOñSü?ÅOñSü?ÅOñSü?ÅOñSü?ÅOñSü?ÅOñSü?ÅÏIþŒ75Ñ4%’Ž”úo-W>Cl€ëÚH[:š*]:®aby*=%Ö˜Ž%‘T§m;'Ø&ÖF#qÛxåNÜd«NÅæÅ@t5FâÑ ¶ë±´V•gÆÛ$ÛøªHg4ÕÀ4ˆ l“mãËg°Lƒ_•['ÿÒ2ÉÁ;í¬( -6Iäe;'r¬­É"kç%N4‘ãHv8ðÉ7¨d£’ÈF%óK}¥‰Ò9¥NÛø 6¿¯ÔcúòQ©ö¶ù5‘4°šÐ…Ô0;™¨IÅéXb^Y™B®ˆ‚Ì?”ÎnÅ_€WúÉå>-GÙNÔ¿‚LQL³“MQüš!ì .n‰'à§2hV*6·=mCAB¥"]îhœ‹7¥¢ ü³Mœ‘H¿áÒ­Ô_Ð) åñÖù‘v‚m¢'k„]P{b^fE‘x»R"ÖwvwK"ÒBw@3ðžëû›•äÜÑÆtE²=Ñm«H.¶ÊQ3u -Üšn;~‡eúUfœUÞz‚‰£ÅyÍG§e>:€–±ÎIG¬)=ß27êÝyÍÑühlÞü´e–´Û¯·Ðô|Q“‰¤+K7öÒo†HÚènw,µ®R2Êô{‡sVYK´·T7¦#‹ràÌ\¤ßK·§æZå-mk[ÉÚíÂS{<šhŒZåL)j‘/í9ým ìŒh•Ÿ¹‘¶è´TôŽv‚uÛÐ¥” Ô#ìqéÞ=2ùQ·Åþ3š~œÎèç^Š%¬öQ²5šŠ¤“)ËÝcÈk-Kô¢,Íž0ÞÛïܸ’í©ÆèôT¤u~¬Ñ:[9p5€“§2ÙÒšl‹¥s˜;'£XmÂÄ)ÑfÛä<…bX ó—b˜¿CÀAZÖÿÅpàCÀæT䟌µ ² ÐòJt1,ƀŰcÀVDEã®ù‘¦dÇ Ø ,ãK€” 'y" ƒ.DÊ£ïAˆ47ÞÞ‹6,:ØÉææ¶hº÷¶âùÔdݘ5 „5³Îˆuÿ¢i Œ\´v¡Ìþjš?…7ïÛZ£Õí½4¼ð&}}<4Ù˜Œ'S“:æ÷¹™íjgÜúª•zw¿shùàB[{ª9ÒUOÌZå*£ÐàÒýí²[î(˜ŠíñHª2™hKGÖgYvÁغɕ˩‹[“‰h¸4 -R¤A5|¬ -i‰e©,ˆ1Vò|­ƒÍ¥[r\í W¤&K¤«r‰ýOÎZeÔ¥ÎØ*Õ½£<ñŽçnJŸÖò}S%ÇX(áR¶SòE'Xöçò–ûo€)]‰õæžv³lÎö²¢bî !Ÿ#‡…ÖµØÂPb‘T,=¿%š¶¾-\Hv'gï6§6òóV+ô2íÌ“‰$&´pâèÜ÷) -Å%šMÍ‹¢$ Ï%ÊÕ“Ä]ròÚQ_ey!­PÖ¬¯ 6YwQBñ7Åâë[Ú…N³vË™!R‘¦X»uû¥Ý^+Óâ"'S­ó“ñä<Ë–*–A÷Š–õ—IŠÚ­¨ÝúߦuÚÉW–7 -E©• š¼Öû&ßõso-Æ”/Ô㻃?G€ås®…–#`®uG @4õ`Í`½§ò]o3ֽϔë MlX¦BÎÚPny™»r~$‘ˆÆ]Ñx´1—H.»`¿¹ZË»}e2»`ÿÏ«^βž±²ÌP[n/Î Ô3–ùÉw•ƒµÍû“TƒÏM‰µµÆ#Ñ–h"=+ÒZx6‰±iÿ³e}e3¾Zí:úf=èÐnïÿÁhy!¥PtxKª²|ä¡ BÜR¢æ³·n -DõUâ«Ä³, ¹|T{s-¯£Êä·þ^mæI±Þ]y¯¬k´¯òeöÅc‰hÄò‰e!g%­Ÿ:5èÿ—,O.ƒ?ýñ³‰‹Lv)•¿ú¦9•l±~¦’nîwfÉ„å%ÚHcc{K{ïòæ¾2éÿýèxG¤³<ÃÜ¥#©œìrÿ,¯çrd¹9çr²7>Y6ré¤uÇ=9Œ¤¢[žQMM±tlQóI/Ðÿ[–gÒ\D´¾S ÜÝÿcÎ2?MÖó¸)÷PdIÄZrPÎ'é•ÂN×8øŽÍȃåØŒõ¾ÉûXh°-‰Íäë±ë ¹ -íÜLcñÜL,f[ï©|WÜ™¿F¶@lSñàLñàÌÀ3Ù8èÎXŽ+ -ãàŒõÊw#•ƒ¹Íûƒ3ƒÏ ¾ƒ3ì÷åàLcñàLþǃæàL©@T_Aœi,œ)¤ƒ3Ö»+ïõÀ@œ)„-¢Â9þ“à,®,àd…9­+vF1wÄÉ׎mÃ`J9…Ò4ä¶VdÉ«(+6x“òX_£-”<ƒŠ»/éyŠúÍ’~sõÛ`Ôo–»µ¨ßŠúmPê·©) |¿Ý·(Š`0j·¢÷VÔnEíö=wÞ­v+únEíöýÖnæm£†Ü¶ù™’³Ìü Ù¹-¨]Ââ$ÊaIßçId™ùâ$*N¢ÌIT[×Ä# ¹½’˜ÇKDÆ&Z>©©ôbm‡5M%úµE+=ÈÚØÉccEø/cƒ“á;ü ?ØÛÞ¶VLÁc•«BJÀcqjzöbšÂKX“Š¶${KrRX kb‰¦hs,Ñ+è²¹·Z£‘ô”R‰˜J @óØHÖ:Cƒ6‘M¾¸Rss€#/”·ú7LÎãË÷÷1¸dç”J¶´&ÛÀW¨nïe¾žvèmæ^[·¦òýeꜴ]¨†Jmrž^°¼v¸°—“-¦¾À[ûÿ 5“‚tùó,¯.ìeWÎÜKÂ@ -–±®Ç„‹¤béù-Ñ´uÕ\x¶'G7.ß-O_\ œ-PÞ*†^fžy>±ƒÄ”ÎöGŸŒW¡xGÅ·ùþsn™óqÙ¢¸ÿܛߓýg¸Í1“s …²çrRh;i°qƒm#m1†(¬ ÆÎô÷Âÿ¹²Å°æ™ê+F9µcF–õo1 -,FÅ(°£ÀAZλPŒ> -¬'Ù»åaX ‹a`1 ,†Å0𻇾d²i^*b}bcÀËØÁæ endstream endobj 48 0 obj <>stream -ÂIžÇâ ‹sàè{Æœ–]îBÌI‘ÓkèyþvÌ ~Ý·˜ach‰|Ï°1hŒ&-d£(åVÌ’?Ú¬1ïmѦðXs¢Ü2bmÒÜx¤qád›BJ¶FcéÎI9,>¶¥;ãÖSÕ»ûÿ x.ݘ³~ 95 û¬ð¦ÔàwÚ(¯iåIsÅ•îNé”ï Þ¹©¼q# -ß0GDÐBÑu–³8¨üW&„Io= èZ®ßYÌË5ÏUyŽ¸”®ÆH^F™~g­c~/YÇqÍ~/³ )Mv-Öÿï$[vÍÛSÍ‘Æhn˜Yèûû拱 A6hr±¼å1YkiÏa3H¿¿ß™rXNwÃEN;¦æjM‰‘½­Êe#ë$e ˜¢à*Uóžï×$ÛBñÿŠ&·hrcróÕ·ÕfdÎáHvÁ8ž+—S·&Ñ>pi,8ï©è<§¢óÔu©S¹è=å¡÷48—Õåò>ŒÂâªúÉMÆj_,ì%·“©/ðÖ~^ñXº&ëÍE+< g½ƒz9r`î !¯±®É„"‹¤béù-Ñ ' -ÉöäºY“÷v§Žºækœ¾°—;Í“‰-šÐ“ÛEZ4¡ý3ÒŠ&tMhŸ"|7£¹*‡|· Ö•Â÷ЂÎRuT2=£“®3 -§KN^; -oX þ HƒÊÏj¶™Â‹k"¤þχ[»%…ah -9·kcÔÿu÷M§Xí2úf]Ij·÷ûhœEÛz?¾Txú±%UYN‚Sš1w[V(zƒ^›e¡ÃòQq æ7u‚”Alˆ ;¥Hõ c¸Ì²›ï9EÊMR‘Á¼(W¨é8p‘ûÅ=3γ€}×Ýù¿§”s¬T(ê¡€O3˜5„å( EE°Ö‘æ Ñï‹v/”)UØ8öŒ¿9ššK º…¡|ÐâùÒÏéÈ\ër(„52Îfy+‹x÷æ¶L›Qfàµ'k O© ºÁf—lŒíû1ܦ‡ÛÀ7öû¢Ü*ø  -ÅCø´;I´5[G»ÈŸÑ,( ܃чœk'}|õ#ß—MúLJ˜WÀ+'t¯<?麡p¡¾)ÍB«z ôd´¡/VãÊg°LÃÔD“e…$) ³“‰¨‚’•)äŠè¼XÂüCéìVªCP~ru¶ÌMÆKÇ—75Ù¦E%S¨DJ[9üówà—hi»ñ§º”±;œët:Æ!³‚(Û;+H²èt²ÆÉ8¢rHÉ—’,ð,/q¢à‚CbDYæx‘s -,‹eÙÁI‚È8e†wrœÍ)5Ž;ù;áâ6ø²H6–±Í²…ê[6­¶´Œ± ‚ƒ“6ŽµË¢ ÚZJËX‡u2ÜlgEV°•q‚]tò² ïæXY$bAÞVæ´;X™uØ*K ËÙ2#ÙªJ±¨$:¡(ÇØ8À/œ(âÞÎȨž•ì¼ƒã‰Â;œ¬­‘Ú J*M­T´ËœƒS(ÐT¢°‚ ,ÇØ2'˜îrÚ9d -Îα¶2¨†‘ñù¬lgYÇ–ëR(cY;<ÖMìТq[U©lç±B¥xto<ˆ€/ó“Kääñ™ú="ôŸÌ\›‚ÃÉ™Ygð …­ÉþÊI2DˆÝ!òœ!çÊR¤uG‘'õè`EŽŠñ¢ƒŒ ËDà°oœÐÕãdd|–gDÕã´Ã8u˜I<:ªÆΕ%§gí¼äÞ%ŠÃ!#AkÏq6Q'A«2/(õÀpVDF$ G§Ò 04X;nåyè5ŒNÖé ’ÀCá0’BÈè ÐX -íb¼hx+á“$ìUÑfÐÁ–§!@{Y» ùs20ËŒzŒAh< / Øt4€e%£Ñ8¬U&Æp®ð,'Ü—¡Ìœh¨ŒçŒ9Fh¤NÓ„ óJ:@¬cŒRú -eÃ:Q6:»Z¡óáfQ”lœ e6Áîd@œf ÜBELs†8( S€†ÇçB{aRð& -ÌGPt –€$³PN«…1Û¢i ÊÒæRO))¯¦R‡mü›ßW:®¡;UÔ>*+(ÙºBjß–Ò’¾¨,,Ù¥e–ˆImkÈV\ãrV]ãú¤¼ X_Ô׸†n˜BÌY…Q±®J ˆÙjl\CŸËVeTWîÊ äÕu6®¡ - MŸTš2¼û Ô”Õµ%»QlHíƒjƒþÏU¹kÈVoÐ]Y -n\C_Tܸ†¸½ãËÉ„ÍÉ9HÇ™D¨Ã ÓOv0,ï``²äÒ)nxxT5ˆFCv¢.Ô(ÐÁN']ZæÛI+WE¾( ¾;Ny'©­\6…$(‘§ ¶ ›b”¢.ÅÎÊ¢ïÊ[U÷![ÍÄ1ôY¶ ¢ñìÈ,Q˜ˆF“ÒÝÑ öötG3—Í‹A37&[4¦DrñbÏtFˆ=®ATC,ZøˆT£©D-„.mép«Ñé#˜jN›óÄJTaÒRÃ8“høM¬FâÊo(Q§Ó)éP!JšžŠ5ÍŒv*õrÙ À •©´ñh¨¹"™ŒgÞ“À•ìéí±&%¬×Ñ[C©Sz §„`Ýeíét2Ñ\Qb÷1Ùµí Á)ê¼ë!p2ã&Æ6¯4 ðh{qöÉô#ý¨Nù½L± #£­Â IqÜlŠ³ÆÙèG¬M)„57HžE׌þViϤïx'Xø®•mT#øÚô„ªRí±NA²©·csL-Gg‹æy -¶àèw¬<(k‡ˆFžÑn‡oJC•Mõ Þ(a  Å ‡Úêp 1¢j‘SVi}Á_EN²éEÁ²)rSQFUëÍd”0„ÚƒWZ%ø¥æJ…ªÒŠ¹8< ù›lóR‘¦X†?:µÌ‰Ìš^bmóJÑ11ÐéV9ȉÌü ¦4¸ÌæoÉ*ŠÞØ[Q˜V}~jEŸ*0øTìÒk=¥¸š ‹Æp7‰»ÅÜ ê—²n¿™{0£oµ~)S;&cXde4UÃ*û¯6㥦¡iŒWu¤iÍüÂ)\r -“ŒÂ›ú‡;>‡e8>O“|&¥\™äϤ¨0)*Lª¨þ9Îø&&ùâ#óF‰É•EÇñXäÕáʪÕU‡)oü=AÃU’ 6Q¹fÐlWÚýÝÝß• NáSX`”–«NÜ`4³pÜ £ð”=ãÌ<õ^AW&•Íje0²6µ{Ô?'j0öÂ"ß…ÅìùÖ ‹üqXd”±È)CŽSF £ÿ9QãP4C¡ ]ÎfH4Ã.÷wi?GͧP€F£þ÷A±‡Y$ö0‹ÄfQ7­§UuIœM‘¹òß6¶D«ÓGìaúˆV§Ow¤ÏZJUu&êhE^ùs"Æ×Cïp=ô×CïpÙíDZ%ÒHiTñêOàØâz˜\3ƒëaftÓz‘Z¯8ÊŠÌ•ÿž°±uÒ$ïÑ£³ÌÕÆîäXA’ ®d‡Ì‹Ú†îʱ¢C_Í°ÑKÇÝ;4O0ÝSd•pê'cÖ¢™/͘±ÚN †Y[y+5F°é †Xêq'I‹mÌAêô v©—X¥{¥ŽÛ¢Ý{ʬ2xzònÈÍ>naIPÝ÷>>¹'·¨÷' â \äáy,£ý‡·++c¶øJ¶6%;Ž»»d¬286sØibµ&Z&q¯R¤-Y”*Ræ° ôÅD’éD@Kå¢DÇ'µ¸z‡F¨,Å“ʹ§^‘¾’®ÓðÈ -Ü&u‡Q—F3šy<’^•ö8§zÞ&@Gè!N½9¢q¯ÕÚUd¸vØí&Bb$§c‚qø´¢¢¼±±½¥6™Öß°†Âæ’¶‰³“éÚhc2ÕêCsEÀ/³vïÀ%$ÞÉ8²†yeùŒéªÂq7'S-Z-”0h›’s£ å3œ Ð\Wº3m0mºøDj2®« 0©ºjŸbŒYvØÛŒètè$Ê=’ÝA?èCÌ:É44 ¤.W*†/êÓÇ¡¬'‡1 µŠ°§ŒCAín§V+–»N‹ÊR9kîh ­RK‡>”tžºÊÆÚÒÎqFõrF ->NúHÎnµq6 ÷s$‡Cï ‘×&ª×¥y|Ë*•ÊÝìû§RA´;X<ébª´¢ñ$´´â»¶Tv*j$³¡ªµ*Îôfúµ=Øñ¢l|Y;ä]5GKi–z1”‰®€º(¨,%f¶¹†bë––­%»êÑ,]Û‹þ5™áãt힥ÿ»êÁnUcWýi¨m’­»k+{õ½ªWV²;žeµ¦_˜Ñ•–À˜Õ+KJ‹ÉUiQ¥xÌ -Ô«l®Õë oiÅwm©,wSé÷T¿örBĬCLá2íãYŽ“%žg¸Ãèà9^p2/CÈ,a°ìà`fÀw^”e<íÁÄÒ"#‰,‡Û¨“8ý_ö™Šªµ ϪýÔRÊkZ‡ÕuƒAÝï¦HUÏî±6BzëTäw>;b—% Ïöª|õOá&™‹·?Nì&jhFô@º´T9­T;äh¨ßÑÄY‘¶…ôM_"ªŒÇZ[c·¡ªb‰…Ѧ.ËI3‹¢©´FößH¿ÁÄ×ÐÿèíQ˜óÑIÚiƒÚH½rÚ-ƒ!6 `tQŒtÚü³ÒÇ;°ì•Í¥:<<Åã^Š~Ï÷ I¹Où¿lš@‚F±9ã¨-S"éÈ$Ð#œTêŸqzÉwþüŸÿéñóð÷žþŸÿéŸß{hxñ÷âï%ù0>ÿ{oóë»}P-€ƒJÕÙ­šÚš®´fw¯HM“Ó6­}É’NªV“Ÿ4®u°î-5ø¯S<¶Tº‹RkCG«ÉðÚÁ_ïR™¢Î8ÔŸ½¹Dï yº:S]Þ*2ioõëµíÄ]y+‘ªé¿ó”ågá[rYž–‰hr¬ŒâÝÍÅ›©Š,ËD4ü+å-¤LŸË ^׸ð»«\=/ðûÓ÷ÿ¿?½/|ç¯ý/|³ÿ<°(K+>ظ†i4Uhq€V{òÀ´L)à•Ï°•·§“6EÙÄ–D3+€¯žÛM-Š65@X× ÜdŽÊŒ·+™oWfïÉ1úñ}†Çå -ÅüŽ3¾¹ |9yY»!ྷq˜–§°Ô%§$,.à ÍðúW9÷d¢1x²þÊŒ¢!dA¦×]XåP©Äh /´*¢q¬ ˜ rv²©n T¥Ò8; ƒA­J§ñí -j•ÃQ”º”&h¬‹ÕŸC$eSPaÇ(¨s¬Wn¢P*»‘•²ª;¯e’äïŒ"-™4¥bœ;¬h~¸d—éfS£A:²œÑjT`Ù̮ӺEâM•›)Z« šÞê šÚjåzGéM0u±ÖRSëìè –ÕºÍ­‡³dEï<ð»à`3$h¦)L -`¿‘7 B@}$H"í¢ì4‹«ª”RÀ41•íN˜•¦ÊMM€:ÉŸ‰D,êëRÐoŸÞL“ütfô’Ãjåf‚&¿,I‘üp¨£ý1ËÏLS‡¶l`–®ÝÊBæ¼ÁéÆÉl†ü8L!™†*P$Z15j7S4 4C„f±©W®KBo‚YËhM5k!CËèLkZÆDеLWi¡ 9“pTšIÄ#h´.VO¡¡ËT ¿LÅÃ`ª„ ™â;/›¦pE•Ÿ‰¦Ë/ƒ†,•kR0š`ÈOo©!>dð«Ôl¾Ve—%%Ø{§#Stf±'B ¢YR"•zÍתȲ¤ƒ"W¼¡ “a&g²IUï2TÉ:d³È$;ÇK¼Ybx’Ù:첈/ëêõš ªÄ ’.13 9ÓëÕX×mLoŸ!0dp©Tk¾V–%²¶¬’SÃ$03‰cYÃt+Ó“ÿUÎ0,Î!CbødÞ¬±i”E¯ØDP%ft‰™IÈš^¯Æ»þlÓ´ÔZhˆLçB/¦ó©ÖkºVE–%Å*¨n ¥1’%³‡ççQVýr‰œvð dÃÁÓ®MþF2¹wF)ÕI3êU Ú³MÎE˜ÉìÛ±œ·ªÃ_AR =\#ŽF1ùuF)s½^ƒ@Ϯ̖‰LsOL"ÓH:kšç£3¯zG&‘i>”Idš£¥—Ò\1½^`™N2Df")¬iëÌk7‰Lm¡Idz)s­^AYWédøn&‘i$5ÍÙÑ™×<"“Ì4ÇÉ$3ͻҋ©î—^¯zm’˜F1fP¾´Juε›ä¥5Ï$0½˜Î·V±‰  ¬«l2œ5ó´TIÆôQí¬1ÁT÷Ç$0ÍK2 Ls¥ôbš³¥W¬L"ÓI†ÌL$…7­b{íáæy©6Ñ<1U6Œ‰©ñ®OLƒ OÌ.ò1{g†Ì8]B -gš7£±®9<†Àt·È˜î;i$Ý»ÒªÕ †À ’.03‰øÒ+Ö8×nLk¡!/Ââ£ZMת°ºÊÅ쎲Ò(Sš£q­y:†¬4wÈ•æ1iÍ£ÒêÔ® 9é]L& -ñ£Õ©ñ«=Õ‘Ö2CFZÛ5ŠÎ­Z§éZ•QWy˜ý/CFEãGÖվ¯æÜ2R CDš‹¤Q4J«R»6D¤St™(ÄŽV§Æ®úPCBZ» Ɇâ&ŠÎ«Z¥éZ•PWi˜.CBEcGsTô§ú2†„4‡Ç‘æéeTŸI¯S½6D¤St™(ʺ¶Z§Æ¯öTÓLS[fÈHk»^FãV«Ó¸VeÔUÇO”pRw€eßà„mã2¹Ãa«Ãô<ÓýF°~’”û”ÿZ؆Žý®ûÀê^•öÿÿ1®¿ë%™ÿ×¾?Åþéf •œ˜Ñ×ó}ýœ¬ÝVV9g¯Ôbƒâ¶êT &9] -^ZAîÊÒVlwÛ!]¨–7DÔMÔ.[" -µ›"JÁ®Û"]¨9lŒ`ɬ­…×¾lŽt';%ƒ$JXÛ.“U™«[ÞÝm™t¡Zß4‚Ýl›µ'T2kë¤ Õúæ ñÕuû„ˆ}Ù@éNv=‹»»•.Ô¶V d7›+DíÃö -•ëºÁ’I´¾ÅBLem²µ/Û,ÝIî8Rîfû¥ 5‡ Ìnœ½CÔ>mÂPɬm˜.Të1ÄXÖV Qû²Óôz–t7›4™DËÛ4˜ƒ7{£†2óöi«KfoÖt¡Zß®Á¤ÐY6Èjî[6ÝH­gw³•“I´¼™ƒ§ì³¶sð í>lè`±¬-L¢ÕMä&k[‰¹oìt#­žÛ͆O&Ñò–ðéºéC¹ÛsßöÁ£IY?™D‹[?ÈKÖæsßþéFVÇñ'²·…2‰–7† XöÖsߢb]·‡2‰V7ˆˆ›®[DDÌy“¨iϦem™ˆ9l™¼acÈä ç²…drzå L¢åm$à 6V…L^0›ÃVR7Ò:Ž`³·˜LÄ6™Lno†`û²Ñdrn3Û—Í&ÃßÍl_6œº‘–_7C°}ÙŠ2ù¹’Í};Êpg3ÄÚ‡-)“ƒ›!Ö¾lKu#+ Îm¦"èƕɱÍk_6­Lîk†dû²qeòh3UA6¯º‘WïÞ¬Y²¹ok™½û«fIæ¾E¦ûªfAæ¾Mf¸¤fAæ¼Uf8©f9æ¾]–-ÞT³sßH3œS³ sßL3|P³ sßP3¼R³$sßTË–OÏ’ì)©ÈI?ê/ïUÏ\_Ö¾5ÙÒŒgö›y’øŸì†Õ =X'¥¥ç@%óˆqF€n"]ÐHˆ&ð˜Ôžå{&!jã ¼{ôê¿SRß -áy:9¢<“yŽå”SÈN;ŒÙè.>€O‰SÜ =3jוˆà”³ TŠ¡ÄT·©æ®<Ó!F)†ÀK$-•‰J¨*5r›è-”U8’nHz±2ªY满 §Æ”ZA AŸä¤<ÿj‹Œ»º´ñD¥óGpBÐw"Í\Ö&HvIÄ ("â: 0Ë^M$7TÆ“mQÌ©º×l{}BvÒóí9¨ûp}®úÛ”DåNõ4žóè¤ÿã%aÙa~:¨äͧ¤9Ê/Z ºƒ’×Ù”¬8ÊS”“ÀʽàêØŒïXŸSÉ£oz’鉦†jP2e¸”á.–"ü+`† ËØ>üClhô2ãæ2™ž©ä­¢¿ø§à*”Ëdâ£L¦Æ뿘 -ÈŠ<äãåÖÌ–‰UáKºÍQé ÌLOH;loEe¶§´šVžÚÒN¯O¸lX&ɪ$cV £ßʨãŒîQÅ]¦ÈÛø£w­©Ã3F‚ù¿Æ=FAeH(õ©ãÂô\SsÔAq¢@•0O;Bu/9LCuYT½•äžºÚÊS{È ÚëSÁ/ë.®’³¶¿²qv™^F3„δzXàp=XûŽ•Êøf§òÕhßà†̬ê¸9ƒÀ›j)gP™V¡ö€ýQÝ´§›V7eUuœ6rø:0ë¾K^#Öz^#ßòVÿ#ä_úÛ)Ï´ây$[=­–$sÍí«gkâ)Å®XQš.% 3y‡¼ñ•Ó¿–™î(ãe;¾m“&Q.ÓQ$•Kå\;~¥“âôÍ@¡¤ÂN‰U¤ö­Jo–r·"º$\dÔ¡V]Yª=®ªÔÔôâÕ‚ØD3£š³‚(ãTK‹úMʼn¤¯š fñÕqV$§~U…¤ *l0ªˆ¨ •Ã:eÍ{R‹MÑ[ ß€‚”8XAÔ„Áè2b´2¾)ÍÐWeêZF"«XêÜaS4¶+µ†œXK¹Ð|žƒšÚ eo… eiáÉ=ƒYöV¸;8K“ßd~‹¹WL}et Ñ©¦¾6úLë2mhhCÇ°]zÔÜצ†œ8ï ^²‹=‚N"Ž9õVÜ)ö˜gÞâÓ»ï\kOÏ 4¦|Ki]ÐEOèS©LŸK]ç½Y#¨Ý^iÒ&•Rf6=t{B±BE¡—ŽCðŠ^‹;¹ãvœ…§÷Ðq–žžÑqýáÕr2'œåÊÈÂéÈ*“ŽC¸XY£6ëðjáÿTx>ÎT¡ ¶n•ýn|S~n6a±ëÅØŒÕuˆ5Ÿ§$8l 6S…̉Y®vÚªg•7ë¬8쌈/Z´ÐæIgyÄí´ó K!²Ä n<"}jß+qÈ8’A©Ò)¸ôŽût& -/,gô蘂½Œí2®å©ãË¡ìšTjã ¼-É ‰²qª©{œ,UE¹ÅnZUJòõŒ§”umGYצâ‰Yü”™V–‚3RfW™Y’ -æ,Äj"­Y T¬hç%ÁPPgA‹ÓñC­NÌhLg)Ú×J•3õRcÔ`¼»Kýf£Zf6?Ãôx£Y -Š­Ö^3+‹T *U¦•`Žwðôú#Љ<•R¥S8IÙa©Ê.¦¼-¨ßF­oÑ)ÔÊ+é’Þ­¦kSwô@Ðê¬Êz -éM¼ÇxlYƵV‹ÙžYý¡].ÍÕ!¯ƒ¨êJõJí ªûÝ]ëw—Ñ…]x§F)ÃM\ô–Úwm€àîÈ `ªÀ@¯D#hj,ÕIP¿ (ï!P%Ø™eçh„ÆR½-ú=ZkõJºrÔØÓ2×Dÿ¬*ÏŒ)¶I¶ñ±Tc<ÚÀ7L°ÑÛ<лp?ü˜ñBtè4™¥ÃIà ‡‹±ó2+óF›ƒGúQ‚ð_dhÏN`Ð/ì2¸q¸èŽ‡ð/âiDÁËNt ñ^èJ…žG$eç‘Î\:Dü‰Á9>¡’nA¡¡ÂÅìwÊêŸCbeZÇtâþ•$\²tEëÊHek¨ ‡XL¹o° “:T4ÝúÕádð|¹ì Þ*C¨åH>ü†a µ[À×2T¬(«íVh88JÆÈ‚Âuð4|$í¸‹boážÇ†d6¨2«‰'‘d1GØtUŽn°¬¨è'V<§a”$a¢ ¦¡Á¹‚ ò06À”¥:4u œ¢`½IÍÁW¦6B—àYG¬ŠC+P± œ:å$žÁí"õ‘Xȉ§N ú‚ÁM¾´TrâÚŠ„ùP©9˜d†$‰a•l¨N Ø‚ÃA¯)à±IŽ ¥ž.Ò‰ÓèÏz¥Ðiz¹°×,¯Ê×jå4ÞIxî0«{‘˜{Ó¹â®]¬à͵“¡Tv7Ó)ÍÜ;Z9æÝ¥«Õ<¸¹vv7Òºкy¨Ë¨gÇ5˜û¶¿z8ŒoH‚~8c(Ö†’SÔ߃ºh³T9;¬%vðŒƒ -𷕘YeO´f -Æ5†=¦PRV®ÓkóL·‰v–NÄ`·ÂˆâɼŠ"ÙHàógÄ"tÁüqN2‘S ¾:&yÀ(UP‚SD.…@ W˜’ȉ_ 2<µ†Y8AVŠpp dÕÞÁ+ÑÁ|o¸éº’© o¡Q8;D˜_„I'êÀ÷À^†fs«&5Âót<'1Á<9E%ž‚!ͱ4ùoä‹P’}dJ‹š0%D‡²öÍ‚‡ÁRÉ’\eé4Z1~à•Óü-àS@ûžµ99zëT®Ø<€ÊBp¨ «é5NžÎ?úÛA¥Õ÷AÍø Œ -AÆ™JI€`FAøB˃TŒõB -ŒSR™á´Ñ£Lqç˜ÀÅ¿,)*pcY–ãõ®Ð®ÑãadÌ,¬Qp:à®:GI•YT0hét™ Ý- ¯…\y*ð1‚Ýr‡]y%K7(<ŧ#\Pˆv•·eDå@¦RÈÐkQ z–ÀV•£’˜“……@Íï­œ”Ãb…äP”**4Ö葬N«ÌîÇã Ã]uW–v¬BÁíb#¿£Ëñº¡¹òN¾-!*«#œÈ¢Ï8?I™_l•‘Ôq%óm=’µ+§q™_] aXô”á´%†5­¥p’rl“ÅÕv†öy±ñN¤(Âdq%…A·_rAŠL+‹´‰‚F'¡!SÒOE¢s4ð, œS½F Á -<˜§HPó*Ó(U&ŠCAuTI˜JWÿ bRVÐœ"±‚GªoAó$2m7‚H4:·u`NʸJ©Ñ0:GÙÏ¡Bž*Ô„ÃFδN‚¡æ1ªÁm¥åQÚµ²<ª°!èOWI¸<¦^*o -Ú›â ÒÈ:90ã‰Kä‰l¡S‘¸$j— nµ*Am&LA ú%Om,Õ®áéør˜^X9èRIææäxRøŒâ¼àY4ÕñŒ¹F°¨ŽT(ÔE2GGƒÔrÙ½&Ôn5Ø×îkA­-ÉNÖTÒ €ÿ+3JÆSµCðy"g¦éq¡Ñ®¼N ŠY@§ê—ÕÚÀK»ÕIFcµrÙ­&-m'Ù/£ù: §ÔjË6OF£ú¬šTÕ_&«³“\d gKy€¤.Æ‹ÙfƒÙ&9ÈP©×M% —T‚¬=A­€F?y(Ê#ÔkZ“‘'B»EïAë5hãEk…Z…ÖÈ.làò–DcPÔÉ3zôú¡w´ -µKí‰ÚµÚ$­°Údµn#Üĵh•À³ô­°v©Õ­]«Ö -« Ëlwã‰Xyª8pÉDp³$\æïAdh!XǸͨ;2·"’*KGæöxnÊú1):‚;ñ-ÊW:\A>-O'«qáLTNœàÚ#¥M—´ÝJ&D»ÄR¢¬\r‚z«V©Np(U©—0=Uý͉=Q¨ˆéRV¬&VÉ)§ ðp”Þº25–®Ñ p¦ïP”U/i_M«“.”&–)ê[7/b—+µY=`s?SĪ ˆ£]o"8f‘Õà˜héz8”M¸“R•É"ò&õxÀ¡OëáìvÎSQM ºœJÊ]-¥Ù£/clšÇmÖ¨ÖÇ»²ukžsD4ÆIÆ ÍØBÆ0>q^´á+DNï¦çáÂë=b±2™?þpéSãz.97.{¸¨GÁ%Á)q¬a¬S ¢'øäèŠA%~€ß ¶>Ç™“¢Fd!ÒÔu«r¾…qfB­ï[ºÑN«Y»;›b”hVžÁg?C+ÕÒ¥V¼’³j̦%NÄQˆCȼÒ_\ B,®PÑdÝÑÅéòT4ÒÐœlloë6ZÅ×Z Se‰Ã×çx™cpR€¸=â…ÐU Sñˆ•Ã†²ŠLµ8"‰µùË»7Þ0,ú+ˆKx@#n\T©eêUö…ú½˜Ãx$ê\Ù!Ø:J%<ß%¨þqQ¥^”©WÙê÷¶ïî¿á)ðÖ贀͉1C™¬dæäî⟘aE 'f't šQVVjä¨5ÿP:»‘•_jâíðßê¹ ¢éRåm][Eª½m¾mV$™MÙªSMèü÷7›òce$•ikTïtkm¼­5m·Õ&ÛM³ï`++ŸY€eŽ["ófÑÖJU@iñHº×ûÝÉöÆùY?SmóµÒ0”ªÐó3ŠVΤ“‘¸­ÌVM4ÆâÚíMxUÑ¥@$}M|OÌkj÷ªÜMõUQªÈ \W4Ò2q‚Í®ôtoF_à>oRša4zÕÜv¸Ä|Ãh9•<Åu<‰e+ŸÑP‘‚YR U±¹0*]p§£;¥«Iâ1Á~b©»D®ºv!Ð`šÚ4/IšhÓþÓ~"yEmêASCc²ú7†XC§ªøêiøUQcètaK¹¯§GÉ&vG$¶Š$ ¾6I‚éIý'„&70mVx‰º„þ¯ü3œ]æxmíÓ£ù.O©e<—9IÏé¹8Íè±¢MY°vÒÿOÖC»×¢mó© ̬s½·"çááT~yîJwÆ£m¥g&’ ºã?¾<ÞÙÖi˜îš`›87Ò‰åà,Šj·L¬L¶´bwM‹Å1Éò$R±„M¹A¡*ÁÒDõ–ëÁ¿ŸèµÅÀ@c…Ù5¸Ò‘Æ…9ÔPi‹5š‹§’ £ÖËsôC¼:¥„Í>°žL×F“`ª›ðGå6U6Ë›ê®ÓÄ)ÑfÛd[©m¼©tQ&Û豶ɥ¶‰5‘TºÎ*“‰¦öXÚ -SÇ©™ÍAºÇ•ÖeH¢Z2²ôc ±¸¨ü@ßp9‘ÒO@¬ÈÈ¢$¡ÎÖj}»ǫÞ”§¾¦çi‹N]MT75YóÉÚ}F`eYœ"+Ó[ŒC–%'Ç ¼ètÊD!Ô "]É)‘Ð𤟌ˆ5 +}ÓÂ:†§E'´«&ä´yv¢¤yÁVÄ£‰¦%Yª,çYh°g”ï‘‘Ò‰SGÛ± ô•ÍR’ Ü`ÎgýøÝÛÐ¥˦ªËêææ¶hzu@åUU§ÜiÄ Ú˜¨/DQ%7>£>[HÑ6µ¹lÜ펥óIW¯nO·Ali:r0%ÖÖt*—3ÆÌüŒ„RÌ’*´ér¢íÖ ·'U±ÓºÏɆå B¬›œüPăf8 "ž,[›ÀI¼É¬¸b-­qݬ¨§nDÞ‰ô™“8ZZ1§'QV p8†a%‘ÚZvpà[ÂÃãA2zÍ”7c²sK”¾Œç˜»é^¸vrŽ¢--ÚÒ¢-ý~ÛÒ|9PX4ÅES¬¾PpòŒR·Ö@m•ÉT"šj;¾A [{´]*ÒµÿqåÏÚYžÁ¿íßÊÕÿOÉ°0®1 cÉ_Âï WtZ%(ßÎ"¤zÍÅûl˜®€ž6Õ…[ý¤>œu4—â#!:†ŸµºëÉþØ®ô(3ÑbúÎxå Š.ð~÷b¸Á16m?ì7ëÊuk€ 0“yÌm_ÓòZ¤óàÝbÒ_†º‰QÁ|µ÷Q‡®"çŠOOP^;„>—ý°L¾Õk¨Â²b±¤w÷Oµ_–ºV¼û—ëÇPøß -ÿ£üg näBó!Z8Hl'pîxžAgO‡ù°ð 0:ž¤\œB2à`£°"Áñ`T¡&G±°Ó¿èó :êë\òâs <$æP¢ZQc¡+4Dít%­¢/sÛRüÇb»B3g$.šÏ&sô:ÿ/.þ¼m¶&ùé -FÿË -F¤rñb]§Y*ÌþOAü=–{ü¿UAü½2t¿˜+’ê£iœ£DÆ')¡ã€þFÙ·”ò «ÿQÉ/Q‰x3à‘„þOkþkL‡ÿýKüÿ·(Lf½\7ÆÝÁò«ÌÿÒ¶þ*vþÏ ‹ -æ©‚‚üÔPh§ë„(¹Á”ïµ>õf;ìì[u'ø‡½Û5^®ÿÄ“GG:I®ØªÛßÂVo³ÆßC{CwÐ]OÝšRõ÷O?ünrï¦?[ËgÑ›- -\ì6ÃZ£œQ´ÇÈÃ5vDIVòËð’ -ÉuçÙ®º qSÃÑr=ÿg«šÕz~¶ü:òïµaø?1ø?{UÜ+ØSñÍUyWß´VµlÑ¿ƒ¥I™³4ÿgXš’¬ðkw7Û>ÿ­dêß÷X”ÿ«-ê9¹w$™¢Ô -ùã£VþÓ•FU½E÷?óî÷…>]6»Ζ=y@m­;n·C¼:µÞ/s‘ëQ~jý³#ãþÏ3õïgµþ1y¸ÚãÉv(<ûmu (A)Á5>BÎUϧÎëÃ8‚%à -…8Žgy|-@FC ¡èFBô¥·;Ã2)„X -•ž<ÎKP燘ûá…üz8\ˆíCd˜€ ÁCÉá4÷ >GS討Ó{ÄIï§ÿÕ•^‡é0FÙÝà›œPÆBy&4ª¯ -Kû«„ìuT-(¾†S Ãæp É¢kf9”Á -†~x»Ò}.¶]±’ ó0ì'VÉñ€6’æ ‚ 3,|?žâ¡MRèâtF8›•£Ào è¸V9¤(o@(8ª¶#Uõx<1’§Ïó1x%cBÒ¨¡?Ñ'¥8Ç…ˆ°0LŠàG0v§P³F„Ãð -V­ \Q²†† ¯€Á=“ªhaPÍÁ ¨0 Šf÷dÈ¡+,  šð¢it%ºø“ç´J?¼²îׇ¡F{ £à Ù(Pÿª~diTn‚Ï·ƒys@ehÕ \F¯„iX `3†"E8]~B*Wh? -„ @=±GÍ„aíaîÞå¡r-ŽeI^æ(lX‘󄤀âˆg¦ËZa lMv¥b!^š=b82ŒÞEG< htût1 ¢F«®ãiŘG•Ô!¥ÐÊÓxéi‰GÂat%jJq–¼03h ‚$' %||±¬žÓ¤TÓÅȡ‚P!ÐÍÙ ½¡,0Ì„XšB‚EŒèàd`G.L 4::ׂ;ZjJþ”†A2Â9&ÏÒ{ÑÎÂ@’ºùSÇ £X|ÞâðO†B qŠ¥ª@A…Ê™‘=¥¢a!LoaÁ1’”dà)–æ‡ÿÑ‚4u¤˜0 ’]ÙƒFAÃŒxTˆc  - )JÜ0½©NC â™3ˆÚX‰Úx„ÍèfIpB©âd4O“!‚(-³a@Õp4  ÁÁYP82Àj: TÇTËA *I²ú€qò €…'þQî!A±,ŠÊ……ŸÈ#ú>F·ºP€ k*¡›$÷P‘ºd%@‰„q àh}a’¢ !OP ²4I‘Â Ý œ¹~Ð9Ýx`ÇuجþÔY¤È'©âëw”d š;‰Ö8؇ö4#VT®AÎß…Â4-å²h¥xUTæFÒhná°D\ÀR(ï4L"æÂz–eB4ŒŒ"¯˜HxÀÑíäà è  -R½XŒ“Ê¢Aô ¯ . 'Ï°ŒqF˜;¶ŠHŠSð2®ÓƲ‚aÀFzà!˜÷ŽJoè›V B”sXÌ!…†’œ$ùb¦ -V;$ˆÖ.æ‡i’Â?ÐDÿªV8`ï‡ÒXÙ Ÿõª¬ ¤<` Í]¢y`20Ä€iàflÀÙ -3 UAÙ¢ûcQ´ *­eÐѱÂÐHDž h¤;4õꑾW•~KÒ—“,!! -Ô,(ZÔ…´ ®7åH”³ Óg8Á ˜Fw6ƒxÂuÉO¦ I¨ -¡/A¾ÊÇÐ’ð{u´SyÎ -@Aâ„XTQL"ÒH]b pXK!¡ú_©ÈµÈB… -IýQ]ð ƒçÀbGê¦Icè€^]–æ#\)§eä©Ìœ¿­¨i“y4¦FŠ”ìi -n@¡[àÀNG@5˜ ç½4vdà;w4(ŠÄwƬÂÔÄ¿ìM^‹0vû•æA²±èî5–,4è@3¬ È=,ˆBHµ1èž{ŸS ´íÔùâÒOVh'7°°–slh¿øÈø‚…?·7<€á9þ‚>EýÑ }a€AB–/b…ð~TÌ„@ ÓÆJ2 rÂX=°4~ jôŠmmTo£ †Á<ⱘ¥pC)guÅ#Tá -ë -6´§Ö • pLà`xÃúUÂÕƒ×øŒy’ ‘‚u V*ÆSNÓ@;.éeäz4Äíi ±/ ËTÉri„ÖhƒG~0æ_’A× _²ØŒ¹ ¤…k8r2ÔƧܼÅÖíÞÄåËÂ@£2`¸áõB7rHå‚ÈÇJl`°{ñ„@+Íxið2nÕü¯íå‘ØöÇ~ÎÞ˺þ£`y€flX ÛLP%øÄ01–.zÀÇkLþjÅ2ZK8òhª’ˆ -œoäéAôјþë䫨Íáêº?ž N¾$6ß“B9©1º›Ï¥@¦\¡™fùål0\œ×qœúD?ò¶¨3ýd Y()Bhˆ_1ºÉHõ’ØT°+]ùu÷_øÈÞÚû¿!\(˜ØH“ï­Jàf”뀎ú8%.„ÑCÐÅc/„2|â9ð¦ðàCƒfAîV¹xزRð‹©_Dë+…hïX°‚ƒ®wÒ/Ä ‡S @«£P£G`öÿäÆ<ú‹T ¤n@Õ‚ƒr\°×Úä -Xö nÂœö/€g 2ŒpM Ç ©'|úq\VÍ8Š%) - - -jO+ÃW|!c ú@w`aõBƒ—ƒb ¡0+ÆWÝE£((‡mvÿOaÉ…h¥M <BñIñaA+!,c!ñŽwÜ-ž`C¢5{4ÅÓ¿ˆãàÛ{1œ¤:€µÑ¹ ÊyB°RIäÚÖ!(à|b@«R¼Ê -ïÿ‰LE0ØQ@†ü•¢‚a”Єâ­OŠj’sÜx`H¬{AM¡VCàð„ŘèÑœNÿ"¹Ž"‰CIAÑÿÓsiq`{M ¢{ø+IÚö°wž^Ζëóüz¹[íe.’5 !{”lÐÌ ¾€Üø°xÆQhP+„£>¨LPÏô>„Cíê2O‘tHQûíp~€Úx-JQÛàÜ‘«@©†!q™à;b»œÛ;!;l8PBâxŒ…ÎAˆˆ]e> - -iEÁe‘~C ¤6Ë°¶ˆqܬ I*CØTn ÈÁP‡ôa2`†ÂT€ýÁøå ¢‹FÑg…E(Œê2îãÓ\“¡°°à$‰‰¼}ðµÁy¬Ñ‚aÆay TKã“ì`¡0¢€ž)F#ÕgÈ`–ÕŒê£8ážIr-µß•v;¿rAgy²ÙÊ:”IíÇItòÃ65å8$˯ú<ÚºÅP¿s½(IÈojGÝü?0¢Ýv»\¼.ÿÀ÷^7»½Twíßv{û_T×¹»jËÕÝJùš5Ìxÿ½Ö] g¯éÙr3VÅvÝí07ÎRÃÜd†®õ­7ДÃ.X¬1ëá`²=Ow×èÖÙÆx¹Z¡ÜøŽpt H÷wÎsÝ?–k¼ìÂSu«/æ¨+q=30µùp±…ñuÑ­ëÒ÷óþ&ÛÐGßï+å›å`¨ù0zîúç|¶€Çþîv»žô€Ä σÉ5¢ÿ']üBÿ²VàEÌëáBh#%ƒIOÑŸíþîw—c±yý£»ÞDe9-ò¦tѲÐý¾Ñi‡ÊÑ„fâH6ŠoSì,–‹¡ ÄÌ–ýép`3RKß_;¯Þd1€’&æÄÑnoð$NÏOÞú—–ÿ‡˜ 1ajþ“no64Cø'WõïÄè‘?L³:júS4šè»írþ×J²F6]t…!ÒyÀbfÉñßÎ ”Xú3”ÿ.ÝŒþñ¬ÿb6ØÌ&ý¿»,¦È)åÃà˜¡Í}îJ›Zå¿ZûI´ÿub*ÿ23•ýÕSw5`PÔ`Â!.|jVÿ˜ ðT'g&6ü‹gG\|t|mðÙ©S³Å#(ONOjùÏOÊ"×›Po >î¼<m«ë ¸“f¦vü΀M€…ac¹[÷‡)T=ò— ºþê!̇Ûîl¤ŸŽƒÿá8l1(a†ºdÑ/ûó R³ápPGLÕ:t­|Ú\®tŸ!r•=Ü+šülÙëÎêÃÕn&Uwiòè>ÎO‚Ù®–ë-²³’› XZ¥á¿ö01¦jÃõf5Ä•QùõdðŠ ×fÝÅ[Lf“í¿ÛîvhüVn¶\®õ_ ³yFYpx.µåd±EëqÔˆµÅd3ÜãvÆ€ë»Ùp-0wa1þ37YoÄW0¼0*7>…å… ,LJ ”÷y¢=ÚÒÃÙ,ûÏíž*LbßÃÑ Ìν1ì/X´Wa~ò@“ò™s¤¹%ÈõxÅL`?qåHbÏÓd¡Ö] §ãm»rn¹Ønê¨|eOÑ{ ‚¢“ç£Èöáb¸>_­‡›áúá9 -‡®PdRö8;ÈÌâp¤^]d -Õ˜J˜ëîÆêÿ:Y®Ïm»‹¾ZÑ©^éÏ&«óþy‹ÿ<_ß©7ÒúûC2 )žýˆYñK,´ŸËKãåÏWÝ u3™ïfû# -M€h$©x>],ûÓån{þ.ìSai†rïö8ãÏ»ëmoÙ]`æhG‹‚¶Ã¡Žlc¤e–BŬ ðfµgI³Õ¬º‘8Éô¬&Õ›ÝÙd£úiŽ/L…|²pžÜm—çu¼¢“?«¨3¥±ƒžo×ÝÅfÕ%ÕÿÌo2þyŒ]9ê&›% xxÞC•hb¢ äõöZCNeÜyc·B\°9ÇÙ1çU‰ÄnÃ|˜4P©}¯xÚHÔ¥0)u8îGÝðz h\.®ef'ÒkŒ. V’4TÝ^1øêªÛêÚ22Lå6Cúüv×E~^þ1œ³$Ðîl²žopQèF›*•&cc8»înAèáºÙà§ZÆå¾eyÙïÎPðUÞVÞ ‰Ô7‚*)$ „ÌBù˜ÛÍf’âË¿áé1±²éuÛÉ9 £+ÎŒÖE.Z…†&Íj¡k;üçVÍ%$õÚÅûÖ•å`xj9±Í®XM]Áò+×í÷§ÖÊVT¡:j™*ÙÉ%H¹ÓDûÎD’4!E·Ùyo8¨­—£Él¸×FúSÃ}*æ¦=Z4TÄ´{1{’ûŒJi˜¿?>gØp äÊÎP`³(…Ù¨éAHè´jʼ“æC¸GÓŽì(¹%­?U<.ù\ý¶­ÉðÀv™Éf{Ðê¦Í<¤YaEMèå»Åd{À¨|>à“¥HÆ ¡ŒõÙ 5TqÊèÒ랬ãZ<Ó˱ÇÊp0ÙÍÏëCÐv;±By/Ô°BÜï7†ÛÝJã kjȨ۞ö¬ÈÃ!_nÆ{1.#‰;ÅÕÝv¶Šñ;ÄA  ®Ü]¼ïºïÃóÚr%™;Rûôz¹BûûÂGê@­S€ÏùsU÷ª¨Ifò+ˆ'TÄ£õ»IÉ8 Ì–¡m¿\ɺdwS̹Ÿ¤Âr‹ŒËæ².Èz¹™ì阒ÆbþR%ù -i¼bžÇͱË/µ©É“ÑZâ ¹TÒhI~¡eOSÔi6•Y³( Þ·•h'Ý]üÑÝ4äv¥Ì¬I® Ê9Ï&àÉË4W¡{슌ԧ"¨ídØ" ‰ €½‰IÊ­ýÅòà3OØ;A‹84ËÀ‚bÀ´fD@¶Ö0ä{ÂPã4r¤Ò¢#U—;R¤œ{QÓªÊ#$Ž:«Š6MsoÓÈÛàù7‘{…B7‚T ¶0%÷•«‚k,‹¼G Ô CRo€ ¹{Œ7½f -¸jŒ3ľxÆÊ¢»[d6¬,ÿ€¾n†ïˆN5»m¬eƒºÄfÁCx7ˆ¦ öß«†H4Gií™å?Â6ni"ÅWptB÷­CŒ‡($—ˆ=¨ “U˜ÙZ­)Ôún3Ä9VðÆžFŒ$2PS扚˜;â8ðB›ãݼ·èNf"öOMCf¸¦3:#ÄhBxöÃ5ÒwrkQcØóH/ÎQm9›H8ØG ´ Kn½œƒúùÇr=Eö‘(½Ž«dñ¨•ÉÄž¹3ÃÍv²À†ˆBÞéÎQö‚|~&Á :Çh"3xÐmŒèi®'s´m™7Ï„ŒV®²\,U˜ƒÅ”ì-ÿ0ž¹_BMf<Ø~Ê÷LÙ:JÈcr®Ba6¬–«dÄ""%jðiˆ8¥(RÏBqæ"Œß£Â¤öd4®¿ÃGuYøç ØÃ7¢H¥B¦¦¦4æöqâ°ö: -&ÁdKö×Ë^w[îþ ÎwðÓ\®ÌLRÅêY~ieQÌj±#æDŽØ¸°èÏvj¹j‚$5#l:L`j2Ö®KJõúµÕ‚ æì=j•9œj1eÊù4‚{ X]i–„Žåca‹‹ SSv®¢]ÿ3öÍk.ú“S§Ãæ·]Nò¬lžÈׂY¥»+Áãš 7²EüÒ2˜\FŒ‡Æ¶» Œ|e,E Ô~¯Ø°WÛË>n}Ú®qµ:Ñð@3©îzs»ªÝ£¦Âœ&‹© ‹–º=ì¡`œH¦Í1¬"¸à ÆÃó@§çÀÆh!7çÿç›.r%ÀÒ<—Ó4"·óîý,œÀ ö{1s0¶Q—ðWÙÙ¿–»óØ9ðêP  Zèî%:Qò°ý« ˜ëùv‰ºèÏ'ØÃîžÏºÿB[f° @ú‚Øìúc4¼Â ¼ÉûâÐm$²ƒÑ-Gð“Íùn1EG¢L›ýõd¥u >Ó\‡Éõöô²‚±ŒÌÉÙ1ÈåòéÖ{jèZ€-,FËÓ-%ã÷§ ¿ -¦`z/E55‘¼Õd#](„C™!Zô¹ª:½—í˜3Ú½÷iGÕŸJ¬óóqä}a)æ,>—3=é6ìÝu–µEwùx…¹Š”Ÿœ•Äz×çrYª¶“ c#ˆMæ#óî#.Ñ—€'qé[m›<³'¢eËZjTܦޯoˉKfØHObñ~&p¾*:Ëäìî!¿Í|<§˜¿/9_–7ÉBc;öÆYÛ.—a.Ú©™³}fÏŒˆbO³³ Žq­Ûǧd3hé•·‹<'.§¹çDd˜{3>û.çÊFgvŒ¬Ü¨Ìè¹Í¥f‰Ù}d”oÓcîT ãÍ‘é“åÏÄ啳-ôCÞ¤_Þ_–ðÉñ™) -–”?üq‘løm a ÷ÝÁîÌθ¼ýl?tëJ™×èeÒN;¼©ß›7‘vÞåÒÃ'Þ*ÚÆÑ~¿;EŸ&Þì¨< “D°Ë­'o‘ÉKqšÙ¯œþµ÷i—,7ŸhüîD´8¦Ïìl´õœH.úι7V‰¹ùSlÂqÁ͈N®ûÒ;ûû™â¦hãœC®MƒÈ$ìÂú’•˜Ëï¦f\m.Ì S¶'Ò…K[;ëãCX—Â#k‹séå‹÷²5xŒP=Û3î6¾°Ã„â¬Ç†–ä‘m³· „§xjêfý"i¶e‚|¶U2Áî¥#gñ>¬=xÁ½à&gv¢g-0ø³7ž»?]¶³%¡yÚ—}:£:TH÷žðÆãY•¹z‰ý´c—ÑÁÇÍ ^Éý€¡¿j*$BF©â~χ®X52ø·%•yŨávÅ°ìG?ÙÌ|x3£`é3Ûí:/Rlïîöò:óO6Çým²vѯ$› «Ÿäž;6xgð½»ÚíQ$P­‚L_¦‡Î³ÀúZZ°Î23jdŸÐm×íÛ¢ma…PÏgöì+ém¥˜ûb.±^ï˜H¥}…W(š¬YX<Ï›Zò/jT*'.Ç»„'aaQWgö¨7¶så232ECø“'&B?q¶·LD›[k²YÜîŽQ©ZIÞ¥…¿_[ÐoMcŸIR§]æ\Ž•;=fëÙqéÉ ×k1¬Å¢ûèØ#£œO¾ILm—þ·âÔR Søàºï n|ÎKÉêKª’KŠa‚,õ¹ô`ÞÁÂSc ò©kúf·åØCêºi‹©Æpf‡Q «™üÔ>P·—HÂÐÄ(Ò^VÝ®Ÿ|ÛüÊÚãyo”¹Qa$ZØ,GéÉfÂ"ié¬Qn[áú0«è0â˜'×݈¼*ÞXñ¡(}9FÃS{Ù“ß¾æÉÆ[1œÄ\9¡ƒ‘óšM6+Ë÷D«Yèæ²áÛû3û¥—È>íѱÊú–7TÎÁFîaÍw|¶?ýtH0ƒÌº‚`v<:Ë cÎd}¶òÈÛÝv¯Aü9®>ö’¥Ncqf—‰péùk¢É9ªéòg“T¨ #3J:_: žË½º-c< ¹‹!°T¨§z*®>z>UhÙÛP¢ùeÖ×ìÇ‹·m`ìû o<ÆÒøi²áK,Ò“ÉíMÍ‚š43¥Òµ?w™ ¹ñS4—wÏBhÞ¬$ì©Y‘ µz<ð'¾šx˜DkHp¡‡ÅQrͳðg"Jä )|*vb<|ºÃoÝ„ü7ò=ÒòðŽØ²+¾Xr½§¥?à“›¯áw¤·%ø7ák•ƒvWuøô‘š Æ -¹Êʇ‡ÉûÁPæu²Áïß(Nq4ÂÐà  O÷ˆ¾ºS á7±3<+ ½­F‘8äc åKŒ÷èz¼’½]ÜV/¡ÑÝüyLÊQ)4~IHX¼¼•¡@ÀçƒóFêå%¹Ÿ‹b¡ –ö ¡Z<—ýBP–¼„,¨Øü1.áI¡Ýá«@/˜rdÅð\ök©œ‹™%Á3>I³ŸÊ¿=Â8E–²|VR9¦YEY¾1&$tÈæŒk"Ó†lΫœDÉÓÞáí»PŽFþ.žîc\õ@ñºìÑ!@Q®ª€;Ô£0FÀ„«ˆxG¯!©0¼‚øùPmŽ^0Êï㘒arâ;û…?¼#°á{ú[dˆis¥ †íf«Ò'¹HÖGï%0+K+•ÂH6[¥E.A’Ó3{Þž³$äbe¶ÉîÖ~™{Ý6=‰(Q³x³­ûkI•ù/dæ„Ì*P{Iòvgvìæ ¨`¯Ì5#ypXJ)Ö ¦ŸûQ×›â¢å\’ž^± œ;göð²HpY¿oÄdJWŸŒJêu˜¬/wÉƶ0ÏzËA—òé,—àí+É©¨»Η ðØüLVÓ©²aDëj®@ x È -SÎTflpÑþ]=É=ë™âÚó¦ÕA2tý\H–Ú·`'‹#»³Oó›êp-š~e6jû)°÷ƾƒ±ç€¬ Éw@qÜÝó°”ÝnÁIíH2û™ìmš²>;Cº®Ð™]î (MMð°ï“Õ\æ£ýšž|°ÁèhnûÖCNâoW,ÑàüûH4c³çCW@ÉbgÿMžëÙÇâ™w¿ûJpKÇßÀP«:Óco'œÄcq­”xRø"S û@îàƒ„‘¯Aí£€Ò6<( 4/§l zN…#.Ðùj×3ð,ö¿Òú— ”ŸKi˜ÐÀ›{{ó™|‡;ž¥*ÏTÔÝÞZÜO*'E ±¬öí63ºóæ‰a/—¡Üƒ¬ÀØS%}®4/®—KÇT¯¸Ü˳ǖË.vo”Ÿ‡ÅÕ¿ô]Ã\¿\LÝÖ0—ÎüFtðw{òö¦ÑÊeÜéÁØ—‚›îz5¢\ê­>²:æ@=La]Òã'KÄÏ¿¾J”õ€ ”RÓŠLûÚèÀ³iyIM©˜åð` "ƒùŒ"%o¶Ñðø4`ÿ6d¤ÅDØë®·Þ«V÷SÐäÑRÇ -îãآ컚lä°.±5•¸³ðx ð -Ä\;jˆHn„€»ovx‘ù[ªå¥ ]47Ù×ÙKDïœYŒ -ͬÑÑ®ö‘lÞu; 2#rO FsíßKguãÌÎÏrË‘Åû–M‰{å™ä^ZQ4æ_‚b“ŽL6Nr¯l ˆ{ß’sîÚ,šË„(o¼óì9îÑ„—/ë¶,ãÞTùuŒñ ëg‹»L >ð'"7Ó½Šj²ÖÜ"-†øü«Ïå$¼&$|:‡Áñðå ás#.Ÿ‰‰´ !5O¥æÕ8h1…áqäª;úP]ßd½v$ÓXºC¤Ä^Ø>ÉHûs]÷Þ‘ûœ¼+Ab ¾;ß;³c}éë°×¸o™î[.ðÓ·}X¿‡hf¸ª–±9ÁÚÊŒ ýFeJÍŠõžkÍÊMPtWÙWbñœ53£3{Ö·¬ì’¡Dò1Û¯ÍÜ*sIŒL¾Û§ Py+· Þ´’Ír±!ï?ü¥+±öó»d­|ŽL^Â#Xñ¶ÖEfÞê(Š){À@yºË½FÇA5P*þ9‡E~·$>¯úÃôV»d¸·z×-Ÿø,1ØcÍ‹OεäݪGd¯\ ]ìõ+ò?5›# —éT;‰èŠ'§&¶Cë-¹ðíÒ£Û7‰ ½Ïã&þ Á?8sé!;M63]25 ÛÂåûœ$"ÕÛ^0™„9¢"¼?ëß?Ód*Éz3ñyL¶Mz«l‘œ½Ë^±÷ùôMºuµ·YQ¿p—“þc¦Ð¯×RL›Ú-ü‹Íʃ¸Í¹ŠWeþú)·`q@h%6ZºHŽ2ï¶g†‹¬i0‡&žCWØêC†KDt—î û+¢îËl/ø°RZÞÂÈÂMÇE#÷js4`~Þ§ìËǶcÇøÌo³½”ÇufT¼ôc²ÙtXÄrUØw[£T"Y”!,}}Ò!k[.|1Q¸9–ñ5Cû˜¡0Z~šÌ6“_ç™·Ô&™"t› EÐE®’Û¿ð5÷æYŒ/éjš¸Œ¯Ú +g ¡ÑÅ05un‹)¦c½»Ž=1NS3ÍCÂßßñ.Z+*ÓmäŸòöüsèeº“5Þ–sÏ°ú‰ lŠà=Jd'³‰J&P_ʨ6Îú–9ÛÇ‚ÄØÁ•HÌðfctÈ÷[â[¼“ÏT‚¸¢rQŸõàÀј\m¯£œ«”çå‹ŒiõöɆÌ’Þj$x‹k}Oͭᙥ”¸^}J\yÐ/J›¾ìrÑÛj-ÙØÝQÇ -%´Îæ“ ¨­ÔF8cso;¯oT#—uÀXeP¶i/·dS„ùe°Ÿí-¯ý'Ú5íçl÷­`Mͽ™‚–hLf}\ÝÜÃJOdûöªÎd|w4S¤PF¯°ˆ×žO0¹ 7’FÆâócí¼Ï9v«aŠÙ¥ÀÓO[æ“ã&sW$h ;K}|Ô™Âàv›)º\9´ Zö’³î›FÙ­£`‘ w`¯e- V4c׶¢;_òÖó -²?'å⸸Û…Çü¥ŠĹ€ÒoFn^z»ÜÛ 5G4Fï7Ó䤦dâñ¹€;1}åpïÊŒ+»-mc¹×Y ¨€2yñ½Ãjl<\äÅÛ@ûA^õºÐî ÓM2Nç’'Û¥`2œßlQÚt…î}bã¾·fJÅçÏÌðòÅzX0ló¿1 ¢G+OA6Ò«:̾ÞåÁ¹º¾Î½nŸý¹„sØTnƱ˜Q #95PÍ ÿ8 ð|ùüC¸ù”í5“íÌ{È1QI¯½à’dÖ^K rJ\’,^‹3;Þ‚L/?sÉZb©lvKehe?I¾žé§»ÓŒgìs—tv˜}²ûð$nÒŽoänê&Ãþ€ÅBòXlaŒÕ¸æÛu0—Ù%ñ¶ä-XN&·HV@6Ý©iôÒ/(ªßAÀ5vÍÁ!qC†b§·ÖTÈ ‚D2ï2i)>oÚD,‹¸»KÖ—å76þéžÊ]f„ã”ó²œõ=§Àßg©ù!8‹‘º÷i‹zŠ™Þý°Ü¿Îw^WoXFîÙGóûÅã„1àþœ]ß^_és"ºô™ÂC’¢£Ô­k‹èé’½« ‚À©w¾œtß|[÷eŠ–!ŸÐxE²­ÏQ«„œ½ÌET}ïåï‹ÌhùìÅ¡#Íøµ(êSanæwÕK²ôàV¥8³ýõ%\½¦›<í+Fs¯¾VD5—}?ÍìR¥¶•ý¼¥ßÇ9'Úõ+†¼ï`Åæ“ËÏMewT3Ï“ŽM¾[}‘b·U9Ì_áFß‚ý[É9óöÜõ|ïƒÔ¾dŠ^[Hy¶½Èö{¾Ê"hSWâ¯s«-ü+²×.‘îg.}ñï’Í4÷ -^ç<ì ¿›\&‡¹Ãà„^hW2šˆ>ߥ¸E¡IfžßŸ)ð÷³DPê äEmü7¹„ +LÑlΑ/¯éH‹ƒo±lqòÎ6ý*kñ§‚ãÅ {Á¿·×0ÖkTG"ÓË£øÅëéfˆ ÿ«÷%í!Á'˜[ÁÜ¿ëåQäÔNÒÿo\¬ÕDå`Åe*S -7ç|º“¢Àë¨4zÑÓó8Ž—UÚ*;:dp»ÉšBÒ]àcÙ[ -ÍÔuˆòŽS’Q -¶ªšw…N×6Um¦•!8YLg›m 8¹"*VÚÓst`öùá@B!ãå?®'ƒãBÁF÷ae7ÛNV³¡T«Q:Lª6ùçpý ×¾«R„aCyeµPä—_wÔº»ˆ%6FÅÕÂKe”°£Å/á‚Ö£Ìh$¡*ñ« „·NÃúj%·D*åîf+¡¼Ñ+YD‡ì ó¾A?«Õ: Œ+Ö9óŒVr+j6Aõ!—%)½¶²“ ]™á48ïýë<³ž “â ó„Ñë‡ÓÏ4Smû*ZIÁýy`¾?þãDqÊf; ¬VÔ~¶Õ ‹%çía/ØöX+Ë ý5 fF T. ëþ¹ - ê*ƒJh¡¨GÐ)Ym›qw0\Ã@£þr±Åǜ뭅˜oÐhèá¼äS«*?#’ãuéKÑ,¤ÛL Ùž¼¤&lÜvfX瀖Iœ«¸Zú¸-¶Í®'§Y´zŸOs”ؽä¬e‚\®²=É(KÙY&†q@Ö´úëA`++&µÉ 5Û ScsˆßJó¥v¹Fò¹{¢r@ #ùœO­äÊ ÚĈt¬[þ/ƒr ÀeÐæÐÙr;ªNð’·ûB=ñàN½.÷ »›Þd;ï” ¦\×Ñ=ÖNŠaÎÖ2Ù©×b`CO¸.ÏÙ¢(D»fÇ}\;½E'­UgñøÃD›±1=@¢Ž—ë?MR´QKø¢F+ñøc˜›þjfT¿(´é/6¼ˆùv¸@§vJ„@鬻’a¸Ý' nÐÕ5'Úà³IzûÒ-ývk£jܤg\U†Û¬@ŒL Ëip³µP%ß=]zŽ›ƒ@ 2kC[îG‰DèV6¡SmeRßDkcáŒtÎ`¶Z–ûS.LüýhµèPèÒ´)…{>ßN¶†ô± ÌÈ>>!ðÑbøÞ=dDê2¨.s1Üœ2dÔ§œiX‚ù´^ë˜ðjûi¸UBÖ3ØD†KEØ:8‘¨YI ½½›´C‡\tg»è4‡·•<£Õn¹êðn°ù‘ö ª!<Ñé`§¯'¡ÁF–|ŸX# ¹»XHÇjaºDFðD}Àœ¦²4^jÔN°`4èû”T”[ø&^ïD½²óÝf(Õ5ê/ÅS³ÐyUIéEY¤å„'®ô`ÉÍf]e¨Å„Y%½èû ,¯‡èhš!û‚Z"GöèDnu£Ít²P*¥Ö¦c<9Ðϲ᪋Ë*˜•!š&™Ãñ«$% / ´.‹âUÆZ,–쎯S¯–NšëÚ×±TcT±•éèŲns]ODª‰-?ÉUžþ W;»1wþù矋û-c±xœV‹ÅU´žÙár¶úçá?€b¡ëÛ .Öèk3É=¿ÏàƒýþXÃùTb˜ÇãqŒÑóP¢õüé†N'þš}ëŠèkMÑáèúZ7úôµ¿&÷ô ->¸¬¨ïËd¸ö1‚OîúÚ<³ç¤>{P#k=oŸÞÐðÉwƒÞΦ'›f}Å euÿ>øIü4—®?Ñ×;ôÕŸ}‹ºÐ‡.üáC)jÜ[ã¯x.<ŒŒ ÍÏséÑsìOö]³½&aÍ]fvÉ]¾òЩ ¿–l~nì¹lÊC®]ÏÙÞEä§adJ;ú£ #“F{€|oÒfS™]¡2â­EÖÿüÓ±ºX'ŽâÎó¸WZ3;ú‹—-ÞÝH\<ÏôAøQöôb ¨±Yž½¯øGx~.Â4¥øy:‘ÚZ#huª¯ð¤P?Œ–ÃÔ’ºm\…駛™OuêÉúàÊ‘Êm +ðþùâÍħ±¬cyïɧžØ·Ô42ð%>;“”Œ”½VÎP¿²éS”põ¾Ë]æ -U–-õQôuÄ›­”I”FÆy³åÜ K_ìúþœKo|»æ¤È|1"¯½¶gvnþ \.û0Ê¥ðžl°4ò2Üü‘¼S¦-l®«ÙáÛ%KÛVòÛòó·¨þÙ‰rm3:5 Ù{ŠÚcþ±‡õ¶ËËdcg{SYj”O’k?*ƒîI»wOt&U©ÄÕK*¬åŸ˜†ÿ´Øܤßbe±®?,–L›š]§-–«âÓ*®•Ûólµ¸oî?-žïÁ4Ž×ZˆëzFßl9×2Lüɶý3obã~£G~DÛ‡ÅêxŽXl¾Ìå‚òö,vX׫†€ö"oq]Í-îlciñ”¢‹÷Öš²øZ/-‹ÿ)ûa ôünKð}³óÚ­…ÜpC mÛ^XèÝk 9.‹Ög{±päëÚNTh Ÿc®-‘;Ë‹%Úïà -YbŽ6g‰Ç‹UËU+önI,H¯%ñd,éGÛ«%ëX_XrµÏ„åÚ¾z²ª+›¥äß%-å®ýÕrsåu[j¶`¹í%'–F¡ÂXîˆû;Kk÷aµÜ/}YËÃkl Pžîï8ËËíìÉòÖ ù,½V½aܯ.,£A¼lƒ=b™:ˆkË<Òù´,«¼å³ßþ´l}ÔµÕRèUªbu¤l«k|gõ¦¯«c³•nÔJ“•ù™Ýúˆ­á§Ë,ž¬qg÷ÒšXw>­éq½aͽ”ká¹0³–Ÿ®«Öê ÂXë³Û…õnÛjZ;ôó”œY­¯ Ë«µ7È£h²Nî ƒ¹mõl]Õ€²uÞ±6kc}as0Wc›{ÒkÛü öÚFÒ°-ä ½¶ð¼»±]¾EG¶DýãÁ–¹)Vm× "g+·§1[íé–±5?b~[ÇævÚžù•ÕÖ­u׶á{cnû JgvÛ¢žÛ6Žäû…µz5ºpúÒïÞ—üø‚ˆ–>.B–»ùÿôúy/­,é 鼸¦“þ‹Š·º¨û6±‹v ’¿x -¶j݈íñbT)/¦o›ÝÅj›Ø-Ñ]ìÌnwÜß”íÞóÉNÞtvv›ôÚ/oq{Š5ìùñíØ^)ÅÜöHØ;Öí½ýe6ý´>&¬ýãõ­f_ús‡eÐeÎéàÖpÌW†³F‘ùèHL’®3»#϶JŽÊýtáh:|1ÇÃm¾çèÆñ> =:™vÀ±ó»ÛNǦíwú‡Á{'Óz8£·Ñ'gª´dœ…jcà¬=ůœí‰kå|Ù+ÎÛñ:çå«s;M\üsÆcW`Äß»Xž¸bá­+›‹Þ»* ]ÍEÃázzêö]ƒâªìš¥ÖµM•¬ngjØwÓÞš›+—ãî«—MÀÿÌlܵÀtà¾/¦Ûî·¥¥äžÄ›Åý9 ‡<ö̧×p<Ú<ìsæÓsuÍL=×ôÅÐsëXv=åðÅvÍ“g:y~ôl—¯^·ûýÙK…—¯Þèµµçͼ#ï/3ó¶+÷ko׺±{?*á€wk­sgvŸëÕ–òQ¹ÂïÒ¿|ôe7¹‰¯ÚßØ|çíë¿ÑYß¼7mû­‹æÔï·Ç½~.âOú“7Ÿmi6Zú[¡'ÚÿöÔ®ø§úØ¿{ ÞÔc%À:z³@b¼(RËÕ´Ê—néî:0+.fAk‘¾ úëõ^0<°…‚éÝõK°ÙÒÁ‡»âK1ü¬½ô—+yIÐw¶)qêæˆâ¶f#Zï™6Ñ»¥CÄà’ö‚«|f'ÉwŒ5Üc²Ð%*äÝ’£È™^’‹ëZ‹²N’sz©ø2;£Jé—uï ä)0X9ê³lsÒîLcA³ŒûN‡Þîè—*ÒÏüE‚þ(¿‡k»JœÙ™à6íeb—œƒ)v¼VæÞs±e†‹ ³¡/6!ïÔ³ ñmÎʧÓöÐ]°êõýïDèÓçáX'uņɷk6— -6Øf½ñÊöf®ûÉwœç‰a9Þ=È>Ë¿”î¹VœšrƒõÒÃmH·Tªíðåõõ*\J§˜ðCɦÂwöîÉ’ÌN<óNÎÙþØä¢Á1Ÿ»Nïr½êv“·Dܱ|È~ÉÜ7%ÿlïWòŸ^÷úšàKéëLm=½¾gã×sÏì½àk–â…¤ÛýQ¸{ì§ |~]t福b|hóÍñsqÜÆK®êã¦{xi•À äA^N¶eWxÝ)Ç^©Ô™½Üp&=åqëñ£â&-­J|’JWšwªò‘ŠZn¼Üûø&ÉfoÚ!ÛÍÍüò)S TrÑjæ… ªÖ¯ú™:kÔ¸k«®žvµ×íËöÖò<Ú݆sKëm•wÙÏì·#/ë®»‚5¢~EpõV8xUŸßÜÁ¡ã®‘Ö{—ª{Õ´XŸ¼Ípóê²Y‹¸ªÍ÷í¨wçé·lw©ÇRäî¡‘¸½[7£ÓVè%F¶*ãx¥5rd?ÚîTƒ9³·“öCØnm¯Gñì={ýüq_õº¢÷ïÓJ¿ã}»à:™ç»^çù‘ -?Xº“ÑCd\¾zhú"ˇyÖ]~$GÏc‰~y¼ôâOîXßò”¶Ožžz«ø³å9à8³?Go"½ç»ÛFùyÙœq/L—µ½T-£— ç¹{ ÜÝe_ .’{íß½on¦°{K/ÈéÛËã¶ß½(ôžºW‰V»û¨5º»Bí¶­×j½Ö Uë­=½z?|³mÙû ùØ_– -Ýë{ÿÜö‰õ`Þzt™"Öb©á,­èëÏþ¨Ú¾ß¦£ýNStá½Úp¼¾ƒooÓ·‹è¸œ·Æ³éz=ažÜÑÉí]äq²¸)ƒõùÁÖú…Æ“wþ±z/Ħ¼c6˜¶2InºùXug—ÙÛð¬³#‡sëý<>O\6æó*W\8]¼s‘u’O‹~ˆ-}\`»,^Óí帿\QDÕ¶ª5^ßVK¯¥xfÿ ¿ÅCŸíB׺¶Ðá:xn­_=\~ã¶xEæÊ‘¿zH_‰Ç ¢ûÛã•wœ#Y¸û¥ HÏÎðùQò_4}_o<ôÆy¯Z½ ôVüÞ«ñ–FŸ(æò6BïÜî?áQúª¹MeF|~zm«Çº™щïŸRÞXƒaýà²ø*÷ -þ$Ý´ø^M‹}HF-¾ñGÝâ}¼¹ ‚±Ž YÒî[fCm*0¤Ì”¹ª¾ÅéT˜ƒŸ8Àç³à"äÃSâúu˜^¯ãï«Ì®›=•m§*ÅÁ½Dóça¦xð6Ãi`tXüD…BwéNüÌnnê±^Ò·º)&Jü&¾¾lr˦•]?=xö¹X2ÖÇS#‚Õá*:£ˆb­ž&®ßBÁ3;õf¹®’>ç¸% -ã«£O)xñ)‰Æùë‹øåÙ™¡œqkFÕ$Ü„óÔ•íú¾æggvpI; £åÍzÝ´ÖO³p‰2  ²nÃyû=¸u¹à5/Iõ€ãõsÚ“ÐåðÎÏ[´-„À2ËzcÁj}Y?¿Ž:@co¡OÏŠÐúÂ>Ô0PÄ/ -°èµmÖ >h½´kõ^Fëê™b(âªÚ>wœ6P¦óHäJ—5Í™ZssÞÎÜdnµ€¹å´¡Î\Y›£Ræõ€‰ü­íA½7mÂ> ¶”[a à;ìÁnœùW Á¿K4ô9mÔ Ù§iNô!N” <€zŽfz=ýßI]1°²¹–磡.Ðp%ûÒÓúBTgÕÆ(>§q¶ä|(˵&ÐF…æt–óL‰ÑúÖ#Ñ,4ÜÚs-í¶rþ‘ÓÚ"¶] ÍáMp|Šêkä` DëÅ×Zæ-—åæJhÛßrèE–°+jÓ›ë„xðÅ^´Þ†–¾Ç¥ ô¹g­€Âº(ÁN#3gQèÓñò’d´V½Ö•„»PX—7úÞ¥;WkçÅÔzi±¬×¹Úõ1M•Jx­áË>%«¥ÒŽ\´D SÞ£úùÊÍ×(h{×µb¦îš7²ÜfPŸ -è™xµ´W'•:€MlÔÎÞ¬èöÊ«…vââU`êy)*ÅÃ-‘ÍÞåЮáR‰B»Å9tqšÛ©Ea,èo @¯È’_ÔÚ,6ñ@_Ý•Ë göõšÚ.,TzÛG`‰#·è§ûÌ¥€-jô®×Y¯¤SksÕS ÅFëâø9Ý/øußöÆcË™ÁÓšc­z*—–Dörà“ž7ŽlrâúQzÚ:ðåzóU÷)PryðÖ×»b™½}=}OïtŸÆÚµ ý·Ûì“Ëàéõ8£ÿô!ôè=`ìøùóÔUÕûÅÅÜ<}ªžÊ1ê=×}›dZþ®þÓX`tg€12Å3ŒþÛ7qz©ÿ´ú‰ê>8VËWŒYÞtHzûu­~zQ‰O¥§=µä³PDf®|ºåØÁCb¦¾wCøi|å¾\|ª,DÑÓ­§ÄOoÅ«½opkf¦d:,¶3¶a±‘¹ò6ša¿Å*+¯¼ùZ"¸u¦s÷O¹ÁÁƒ.lN ßœÄznXDkäùU]!ùÖVÊ«ùË ù9ŠÕÛh'ô]œcáŠ<ÎAà+á…¼àûR#ˆ¼F—Z@AžGH- ¢—„=‚å@Yòtžu€vžt‚i»¢Uö˜|®ØÓÑŠüœw h~&ziqË2 (Y@p-DÉæ:p8l2#ë”V ‘õ?ÓʸŸ1н=&•æÚ èÅÖ¿PðÁúSE^’ˆàW­¹Š@ó¬>PdýëÓ,ŠîÁS͵S×¾)µô"›B¹¦gvô܇?Z®wžXPÕR»&L´CVŸ'’4Ñ^ˆºX‡Þ8f\'ÆÍ!,sfóùû4å¼" -1´:Â÷e}²?i¿'½wÖŨüV?p 5äö¦–›a ð)#DH²¶J4ø»|u ÀÎ#Œ`UØíY.+©I#!=ÐC'\…ì²?ñ•kÐ-fÜÏA^@shâSh¢ˆëá!§‚ãaÆŽþØö¸‘|1Ulfð˜!»Îë=å˜Ç³vñ¯²<9¤Ü79xIêAÙHßéCå1QMTH÷^¹²%c”£8 Æ|÷>sð«µæG_DKåSóC4ÖO6´‚N«î -–ñg[îÓh̯‡õÃqS+¨5?bø1l¢Oc"²Œ:ÛLÛ?!va.˜Ü—döy“ÿ ÊŠTxßk±o`þum–s„(œ²FéÀãw‘µ=bä -†÷(…OV-zöóÿ¢èéøÖgv -÷CV ÖÁ(;Æ]ù҃ѼîœX¿T|{ªÕÜ€æ/Yêy—*i‹mM®âqZ\Ùñ[>µ«ÈmE55q]\äv`ÒÃòjÏaF³Ê—âê‹sQ’{ö)ãÿ…µgN)aöl¨ túS×]&bDZ;§(PBË^¿#ÆHK§\"ÑÄ*Aiâ‘OÝ®€ÆR|ñ»©T~ך÷*¹.‡¹NŒZÿŒïzÔÎx%Ï@fÙ">üG”‚x“BC -v­¯V¹àÒ^N»°œ²0ô^ïW3TJAyg b¸½ «G»uÆU¡ð$ñþËÑ4ƒã¥7vl…,‰`'*–$„{ɯ’µ |̉žAžô¤>rÚjI&Z}=›QÖ|ÞÆ«+×b0ø—¡b2k=åUr[JjdyäŠW‡ä`HC«ÞûÑCùEP&]µ¾çÙë‡mþAþ”IghóË×·k­¶”L­ Y-ÿ—ˆa¿÷ªÝ™ÚäùeÉ= ÔYðw:C]¿‰1ò71Fý&ÆèŸt&ªe‘Ð|*ÇHøýIKg7ûcë˜JÝôåÆ™]eÒjóÁöÊbÚ±ÑãÊ÷k¥Žÿ>Wn¯.NÑØ‘£Œ÷c5±³µ›5÷ñhPæh<ŽŸ`ã†v_ï e²Ü¦¨ëÌåõ«¢å˜wñt"c\ÑNþNT¶¥á@öër4”œªö »Öm^CA]ãÐõ™ib9lK*“ŒÄ¯\™50øcZ6ÖçÍûöD„D@/¨ñQ0áZŸ)ćƒuu¼™!¼¤Â/ °ÔjYÛß7#ž·.Ç×ü}}¤ÓW­fżÄÖè—çÍØ<'+my%Õð¦žÖü`]¾LTÄÈùyÿ…x³Ôñø€ú·_ +êÓç˜9Ó´Ò¾…¬ÞƼ®c¹µ(µ&ZÎL †5c[VÐû¥#U§S¶,Ú¬RfoÜŠ™]?æ;aj „½Z?öøŒdª`>¿®íd™*ÊÎ"&¨o=î*¶ý±L¾ohmRhíŠnR USú¢_§xiõUŽä×9÷¢2:e6ÿûùÂN…Þ^îç¤æ3£÷„®¤ýACëB¯3eXmóºTö&ú- òÄ„†Ù¸êŒ0!ÎÌø]¨3Á¦æŸ|öªui,Ô›­Úë+3? %Zk‰%š - -üúKm3õŸZíTaB¢ˆÀ£ÎÊ'œMÐ/¨³šå§íNW¢}‰÷aÕ¾'ÑŽWÿ$ô¢”h‡}±/÷ó {¯¸¥DSl½Üöq´mÅ‚aɯïC+¶µ­”•œUÞÜÞcsþ¾uœsõÝ Ø|g¥ëFŸÙÍš¬hAOHF]!{´¿:£¾nÕë‹6Ø{5-f²OOfÜž3»!;·TiMß --1þÇý¨R T½È¢=§ûy26+õ{9DHp?æS Ù°¥bB3ùc†ÚðéSeß ¿)u¡RZ~E‚ þÜãKƒ ¹7V`f£YÀ/?ɧ8WO“}Ô1XcBk›°ïMh1X·ŸÚ÷b/ -]øM-ýèÙ÷_Òb¨ŸoÙ÷G½€.TÚcßÖ†mc]ûª6ôkhÃ{c]¸Ïƒ5£ _×Aeyê”zöZ;‰hd=úZgdrTRo÷- @{–>²iî†ÎÆúúL&ùÍp7t65Ž¡kJZŒ™aq3Îåës¹é,5»&â -„®%Š²:Oææ¡A0é)æBqKµbêmŽSoó'M±“.¥~(ÒLæšr\‡  FtÔˆ½4H¥ó‹Ù¶Ð™ÆîÙ½y¥°ØY*ÿ÷6&ãÉ:{Ê• šH¾Ýï‰ë˜(Ÿ@ßÒWP„Á¤¸%”A`Øl®ªÈhMËœžg¡¨—ãF#.ók5Ámªœ]¿Ä^ukèŒ+èÖ…ôé5tû: Í -:ÑNþq q>Såjè4î+è´ª¿SCg\A'V þ¸†N辂î·üY qT-øÓ:ã -:jÁ/×ÐWÐÉVÿG5tÆtBßÏkèŒ+è¤,èŸÖÐWÐIûȦ*b«Ó´ÜªÛשsà3J^wP_Rz}·)¼~yàÆlÊ©d'ëÖ -¨#½§ñ¤éͨµý·ñÔÐÌ3“ùÈfñ´T*z<³ÊRU -¦¹Ç—Áû -ߤ§ã®ZËÙƒ¦çgœÜw¦9CA=—‰êIUùèXžLî3;¤“‘+<©dÉ’9ež’I‚Í>tKd4C:ZßKöÔ˜Ù¤èŽßb.Ò{:l Sëœ}™© S¦C~#ô~ö)£¹õÿÕb7ƒx±ÙH/*vûB¨JµLûº×Žoe"Ä1-šnˆèY˜uDö™ôrÅ®¿(:S¦`yŽˆnî”OnR&çL¸½æœÞn—˜Ê*¿Vd*oÜ£> ÷ž¸ße°—m8“aL8dŽã÷Óðú›,G;V0«{ý0§Öh JÓ$£ë7êøNd¡Ž?Qÿ‚H×¥;¤ƒ~䕹ŽšÅ…&‡d3È?,ž¹B+eÓ¨´ðtŸ2‚ù½õëøÔ;? ­SÔzÿ —È(º$ŒAg'²ê¿†±53_™¤FZÀ÷1vâ(„SS~·ñOuõLêþE?w_ËlÔÛKÚÆwf™]§yµœvõÓé.Nœ$œÔîIüçdåcêþÓ˜»Ínu\#ÉoÖÙÓ6Å®O‹ωìõQÁJëʪ9¹BñA¸dÒœ,q;Åû§ðyœÏÿtœ¬fd æ„å‚&tŠÏ=º&)ž¤Ì¯ÔØíü(œªŠUÚ‘z•>ˆÆ¾ÞùVyܾ–G&wtLÈ+WV­Ï¿o=oí&êMp|áÀïúö˜9_²÷Uy&¤NËÐX¤;Qu¢¢£§¤Ê•¢4.õ‘†Þxn·VY° -!u:åViÁjì=fTéMgª˜­£©9s:a§£¨É©{¬N&nnHÂîÛµÁ L—ŠåNÒü›7ÉiE—´oÊ;y툹¥Û¾&s® îEó(têɼ¾Ÿ_"·Ç“º«ƒ_yú9óóSߨr´ÿ¢LÌ7ºªíTÞ´G¹l0¨“—©˜’`õ}!™Ò`Hú÷¨œÙUƒÒÝPûÎtÒÏN]XùåjºcŒ½de”üKÕtZÖ¨Þ]œß¯¦ÓššêT_¨¦Óª¥ÓÙ¯üA5ùÊÇŸTÓiÕÒ};j­[M§å|ì)ùתéNdAÿR5ÆJ¢ -»_®¦;µþj:­Z:ݽ×oWÓéæöüj5:QÕÒégÛ~·šNKç*üf5–hÕØýa5ÝaH‡M):*§“ŸUÓiöâmŒ¿XM§µ~²Hï/UÓiÕÒiÞú÷£j:­®Lìð~±šîT}åïTÓiÕÒýc‡ì~c_©¦Óª¥SæöüF5Nü/WÓiupöëÕtZ¼-FG±šN«–Nï$ŠïWÓiu “0¿TM§…^å¿QM§>`ì·ªéLŸBó£j:½­šN9$um‚96T:€Ôó¦P:€Ý£›dw& é–/ÑvÔ¨æpeO2»ýú·lP ëâ‡÷Õiy]¦îM8}_݉š5i_ åüÌëú·’1ƾ丹í¯k]·ÝTÜR‰1ö·üJXË–y'UGÞô¬¦ÞÿZ+Ò Š8ܘc®Æ5ußñ«k\Õ©Z§6™«qím¾yÝ£¦Fîüæu©û©õ„=f¾ÆU³¡Q4U㚺ß_CiÞ†éüÚu½ÍaGïM5®è>ºÓ×=ªNj5Ô®{”å)Lv”^[ÞhÀM?T¦dñ”¢$.ÒCe} ‹ïuдxšI}ª‰µ}¥×¼Ÿr’-§òáIÑ%eí”K¿.\ r|*¯¹s^,WÊœ+Å=l®ÞE}ªSç6¼qîE]ù'¿ù‹È¥ØªPÖv± £¯zEx/@ó–òŒÕEwùugT]„gûdwOz¥iêz8!³KBðmB~ ²4ÍÝéá…ÆžDÛ³Ò+ÂëœÙÊðÞ ] Dîùº®”µÙ§ÁVïW¥ÉÀv €æ‰ˆ.Ðõæ½hÓzf·<ÙØ;=ûŒfZp¨Ö±¦ƒÇŸ¤ÌÝàhõµ[VmC3í¬Õ˜E«ÞM·\ï^§v™šD³>2:%Æ…w}.•êÔrŸû/Õº†¾3º„ æ˜«vƒŽb§?¸Åle"eH!éø]Ú6ÿÑÑjß¾xl­:¹åÛ¥M'O¦6}3KÍq"½Ô¸LGîñžj_IAÓÅ“S€öýª´“÷Ÿ˜¯Jó~ž´Ï"@ùŒ;3_u§“¥¥—=h0¿Àׇ¤]Åy:SÛìˆßâ£,­êÞM*qÕñ-ÔâêË1f=ïµã[ÿ4›=HgýsàMîâ½dM¸¿gæàŽßú“© ’?ûóc Q±›n\Ë|| Õ§™ñ›OœkýÈ2ª^ -8&ïSh¼{)౿SÊiOüÓoNÙ)ç.e:£žÅ&ûÍ»ÆÛ§ò‰í:Ùï^ÑÙïºvÈWêÄ~_Ës€É~“¹<؉ý&sy°{³«”ø¥\xwëp¯À‘ŒÕyDû«ì*w™âwU€û­ÜÓÿmà~k÷OÞ± -pg[`7ÛòO«GÖ°g àöWÌV¸ßJµ¢ZØßîïjÇ«lZ¸ß@Qò!ª÷[¸›¼¿*À_,¯æ×]Ñ‹`Ô~úßy”oð"Ûÿê£|»Fáþê£|;Gáþö£|ãHÈÿ­Gù¶eÿ—åñ+ÿåûm/µ¿ò(ß^ø¿÷(ŸõþýžÓ®¶óHÅÐoJƒ÷~×oo/÷w½¡öû®ßï*¹þ¼bÏWýØj×wý„¶ýãwýFòë«~ÐjÇwýöÞÐN1Ø?y×oÇrÄÃ÷†Úö®ß®á²ý ‡÷ý®ßÞX©A5ú!ßõÛÝ`çU¿¿RÉõ[´bÒ/‡×oo<Ó˜ä?Ä»~#[Û!œ{иånïúíMŸ¿¼Îð‡ïúýrWcŽ÷"Ty×ïOë+ö®ßÞ¯úí‹’÷Y~´û«~¿y¿rßïúýª´ß½ë·ïT‡z×ï×u¾ê÷§(èíïúýø]¿=fùþþþž·€wýöÿ ïúí=‹mË¿ð®ßè’~­ÂÝAZþÑ»~çìòªßx¢?×oïd9{ùïúí}”¾—¿ð®ßàú—Wýº·lþRÝÃn¯ú¸*íÊ;w²-ÿä]¿Ý×õ‹'þ'¼ß½ë÷ûlõßx×oïYúïñºîáÍ^¯úð=¾?|œ{ÌyóË»~‡¨{}Õo§Ýò®ß¯u£¯úíæ¿ô]¿½½²ßW¥ý*ëT1tøwýFÖµCñÑQ»¼ëw@küßõÛi–!ò=¾}þî=¾?dÃm³lÃì)¾m“íúªßtÔÙñ]¿½_õÛžÙ³ØbwýV•ö§ïúýr€c¯úí\Yðwýö¶™~Óµißïúí]&ÙY°‡žgïhÖÞãûc¬Ô¶÷øÝÓj‡Wýæ¯ÆïPïúmÏ*í§wúÁßõÛ;®åü_x×oïÜ\Òb‡×ol“¿¼ê·½ÏÕŸ¾ë··ÛÞ[ö]¿ýØߪyÚùU¿Ýýʃ½ë÷gèÁƒ¾ë··aØey~5 ø®ßÞ¯úMìZÎv°wýö~Õiù7ÞõÛ;c³ë½ð]¿=ðÀšjbúï¼ë·÷«~£¹¤Ã¼ë·wAìUœô®ßnKŠÛP7òƈrøµñª~ööÇ5¼‰u×Ät6Û!´ûx§æFñî΀”Ž‰ÇË×·…´VGcXϧ?Œùƒ E0xy' æîfGf¥¼ñyxžÛ»ûßË×Ù½ŸþÍî] t ¸÷åÒ±ìþ™—kÙÓëï/gOôd[?eÏçnMgÏO=›ýöíÚâ‰oßÏ=­¾}ÿ|üâ·ŸË??úåü?½üJó³«ì|ûùÆJubêêÝóÓ±žyqâÍ£÷ÝY9öéÛÒúÄôñXl];ñüörsäÎ¥W‹ówÖoœX¹yêM5?(<±ùæÉ…ÅúÊñÿt%ó딽-N}{÷tiúú»«·lîïÀö#Õ¥ÓS³ï>´SóŠ›~²?ˆ÷$‹Ù‡ó§N-.ïxX>m÷xöüêÌͽw*;ùªÐ¿Îάdëwî¯gG?ßøö½¹ÿÚµ¤ƒâÐ;ó§—ýå‘T“xáÂKß¾?ù|’ŸÝf°'°ë^¿ŒZ2'ºxë•v4'óëILÍ=]ÿÉÛ˜Òó™ Wï?˜šY|q„ÛKüç$OjÞЉ-.Ʊ­ðÚæžÒlYÞ0-—ò*.gßÕ¹Ÿ¯|?wùþýgók Ó?×g6/^”ÿùñÉúÖìãËâè‡%Ç1$ŒÜíçGûñ±•òÉw~°Ô áróÄàO3¼á÷XêlÎ¥ª_éëþºÐýuµ×㯽Á?[š¿pïÁ&«y"ÉæÞ—ó«ï^,…li¥˜Y¿¯²ä˳# ]h¼8{òÊÔ7~1?ú‹+ó¯¿XýųúÍàÔV÷¿øTšæí¹þ÷®‡áÁXhdK§ç†? s«ád7xcuaä/Ž­žîÿâbâÈ9£ÔžÌsX³’¡ÿýÔ¯nÿ5?swJ½qv8÷–U¢~¶îóžû¼9o¢Aü5ÐK#ö¹þMC®/ÑXnÁ8I²<7îzÚ¹üXûñ‡þú8_ºzû}®}g~~åè©£üv^è}Ì–^Y|ôyÿ+ÏuÆգŵ¥§Ž/¿ýtâîú© ÅC½0è -¸ñàæhè¨?ꛢWZ­ñÓê>fÜi¾‰gŒ'VŽ¬õgœ½ÔÜl›õ³'ÞÜY»øº«lÕ®„>©Ü‰Ã‹³¶Ž^xõþk«!/ -o|bš­¿ÐW~´Ý•½¼¾È-èXëOúëÝ%»ÑúÓƒ,¼¼rqVz’Ž_zù,®´£îê勼ÿ§×…'è.öý­Eïåè¢ 0ýàÁR÷¯ß?Îúz6¤¶—2Å®¬t«yÿ:ùų©|+qåó«_§»½¼ÿXuù—ü|“7'ß<›ýgõßjýúùÍÑfD˜!Q„iCÃÿ¤˜C__gK?ô}Õ‘ÊgYØgg ±/‚ÿ¬?½ŽÃ‘·´¿ÿ^ÏèjÊ÷ÖçfWNÖ÷\]~uºµªÎo}¯³Kó¥îêiÕqâƒìèH’/ÛlåÕÜ\þ¹9{íÑŒÄä‰K’–ó?—ݦ`sjaöÌäæ@çtÏwvúጅ¢˜æÜŒ®/Ân醼ø -~óá7Ça -ka·õ£OœHK àoþ{C¥Ìæl*sþçÄWÿUÿyø¹ÇÜòÓÏN=õÅ#2ì¶ËÆ>uqé݃½~<ùÝÂò¼µŽäØ…K;Å”$P¯]Þvûü⮾RKš¬Ô³ƒë|Ú1ÀÊéùáÏ\cuQ*qeÕ\2ƒ‘øþü‡#ŸÖÇåª,¼úÞæêó÷ç^JñåYhã*WW‹²šNqãîLí•[ɳ86žõ8Âk‹ÖaãË2­¸¯- æ~£Ï¿þnY:”¿C:îdo'$så)Ú”€Æçs—7ã…SÏïòÊàrvéaÝÄÙ×í¦ Ìþó¶—>î$dž?_Xb¯G«‹ÇÎîì#¤5>>ÖÜ‹Ãfo±Ì?woéÝÊéÓù¹µÏæµøò¥_¯ùÇ°pü ¼ª>®HK?Ÿß- ?ÜÚ¥ðòíãeyçß7ɉqßÜÌ.ݸraÛÖRTa\觭ùÇ™··ævŒ7Žœl\XΧ_}_Ï.ý¸µ¸ÓÖºìpsÛ·¶ñöÙ©·6<Ôàºý¿ö>oë>—'?;bú/?¯,Þ?;²zîˆþá­ ¾7Ö±#M0;:Á» cè÷ÍX=v&ù7vâLû0šƒþÙ‘#+g6²×ÓŸÖä1|ž¥ y2 ×§‡doŽ”K£qKÿôÁƒçS¼›ó_$½àÙÄMžœÔñŸ|ÄÑk ðZ -¼øg2'îõÒÏž~ßÊméML÷×Ö1q|ôÄðõó±üth'˼¾~5[ºþæÈ/¯÷•ÿ uÈ‹îuìô8ïÊûW³3g?ÕB‹ ý"ôƘfþSÿ^~NñÑ©tÅ÷ÇRÚë[S|ôA²ÐíKÆûøZÛ'v~öòÀÙ|Ú;òß]».éÙwÀÁj>Œ¸ŒNf¤#Xû~{ØLÆÝN‡ðóñõÁ!<?„ù‘Cxü-Ù?‚å0r«^?ëÁè#ô–0HïßÂܱ‘C8³úõó(ά}KGÐ}t¤›Ë ÁÄôÞ‡PÉþh^_ßBüöIK]†7ñÔç¥ é¾øxx4ïy² ÌÎœâô©©ç;’'°ÛqŠùmÔxPZœ]N01½÷*v›bq¿Ûð¾—íSô»l(f|}®Üÿm,üMgxŸ/,ì2Å~r¡·mž@'6¾üìÍ™™nŠg®Oö±†ŽrˆUXˆý)ª£ÃÛø~dãòêا–æ†ãèu£ß,ècÜ y(›=<-hŠ&‡§±ÙÃÓØÜÁ§Ÿ`q§ |bc$2:ÅömüéQŽÐXvÈÛXÚ]m]ìgŠ…ìÁZliÇÃ<Àm,íÈñC’Júeïm–°—²‰éÃnc;oìÄÓ{®"v ù~4òfqúêê|šâÛJ™M÷­†yÛ*âÂïObï5Œ”¤åŸœDìˆ"v’–qiº—ŠÛ|g;y/Šˆaü¯ãZ%n;¶b›„)Æ~_ŽQ9þ©riü¯Ùø_ÇRÆm2yÛ –Å0žž½>~Vþ~yza4¹²rëÝú3Ë.|{Ö{tn³º¶¹úâí¥ÿpêËxþÑÝ3Ž>¹*W{ÉéŒMö廦xzuýÎÊ›šqùÙ…oüœÉèüšþN01ý}ê鉵~ ò £íÁ‘“½¥wÏÆ ñm1ˆ…ôœFpÆêÊr—Rè{¼ù™»W®ì'46šFY{GsN8õÞ×a†Ì @јNèB~~éÕ2vûó~xçö—°ðun5û'<Û ¾½8Ì,];z%dÙLÏÑŽ|ã -·Ý€ÆpqÓÙ9ŽøâkÓùg©_h¹×¿\ÎÇ»ÙhŒýÁ{ù±ªóáUÏÍ÷¾vÛ3óñÂì‹\Çr¤×ö Ò„ƒ´ó¶ÅHô0Åÿø|<ÿpþˆ¿»rC,ýîÆ뫾cÌ®±zX…—W®¬„Ws‡}ñrÞ¼¤:]ü(jãJ ÜsnÍtyJÑíþeüæV?"þ÷¨„OîŽg6¿tq¨DÝ•ßx˜¸ô 'u2?sçÇ….É þC¼º5·¡ÛÿvkþL\©ÃüûåÓqëc6(ˆ´ÿÞøŒ7}cŸ¿×øš~ýÅZ4W.Ï/]½So,½[¸²fGÒ§óssí¥°8=W$TFâ—£§zúÅÜ¥°Y]‰;e—W_¬‡—_O¤Pœ8>߉@?ý¸åŠÔÄòTµÆnuhOñÆ…ÿÎþºÆrq·Ï/ë0¦VÂB¸K礛ìÃÇ«ù¹«Ï¶|:´Ëy4fvúg~|íÅÉüØr¯ÑQ6Ó¾WŒA¬=ü…S×·Žgÿ̬Ÿ o?†‘–|~Î_y±^ݼ{®ï_ý¤œ®âó©vs<û¸FPùûHó×XfC,s%®=®®²¸Zlöznó3w{ùñoO/÷ëˆûˆ0ÆÈ¢‘4e-lNgu~üù›ZÜ[}`í¿Óù¹ í¹Ÿ[öNl#W'°ôDEŸ¿Œ'âÅYDÄ»€®¦èX¸;¬9#—®¦™Ùi÷Ã×å®”Ùë™[S(ýÍ÷÷'²¶ªÍ°y¶º6Ÿ²”ºÆÁ%†DÃZše r0ü2¹ûÕkTµœyV¬·ÇV·Ö~nM=™.®­ÝRå/ÊhYðu¨}ŒîX:Þò’¹ú©Ó ×ßÌ¥Tí‰Ó·çœ]Ló$ù»qz.¼|{÷dvéÆ-}×FâêiÈ‹¯ #Õç­n‡{‘ÉCXÈ~å¦{Ä®|8º”„P‡‚N*ce}a 2Ž ’ÄÃ\ðÓlóåžµ°°:ýr˜¾Ý9y+MJPnÉ)Me,axîæêãµË§nëœNoJåŸ~¹vfþöµ33¯…§wÚݸ¿5v3;)`E·°9#ÊY¬%±B›Ÿû6UHZn¬^_3Jz$_yþñܳ”±[{u=úšFÖ#°Âý)è¡=–Ö³àû¸’Y ¯Î\̬ô¬¬ºŒM¹¾4H Ï -\x¶|ªŸó¾˜†øÄ.g#9óó_1‚ïôÒʲÙcM–Ž~?5÷ä·AJ÷sŠÿør÷89·;SÇÖêŸSÇO_/§ŽoßÜ–ŠôK]2rxŽÝ)>¸×ƒL7ÆbúwÂ`Ý/Ã.ìÃÄï ·«‰áÔèñ•8Žtè!p,ù:“ªò¡æÇNzÞä‰?Nô“±cÙ²a¾6,|Y}Û§Úk³VêqöÌÙ“­y¾üqãØ›§^þ×åm¼?J^áEjª¾°õêÑÄôÔñëãÔñÿ>Vd~ìœBßú9“ÎîW2}5´âì£ÛSݪ_>Ȳ7/‹j[%f‰ëg÷g–ìb”Ì|ÜÉ(á^öa–p/ûÍ‹%³3†¼ òâëýúÏ?oˆ|ÿås—Ï!„fÅHßWѼ€Y¹ñt6®ù¾9ÀM{K)\¼ÞÉäáHÃ*·}àÝÏMÀ°mw”?V¾vÉÖï;0KÚËJ13<›”Kòé¬÷¯Û•@ßf¤±ßžÕý¾[Ѻ?®ÆÙ‹Y–/žÉ.½¶‘¬¢gæ6õ9-{ZÝßûï†qn3@y1™ 2cï.ŽY7ç²K/þmFÍŽvzEZlfsˆ³³emäwJò“ÎϵW¤Xlˆq_.Ë›]|v2?~êæmK1nm~ÜS1óãï]\ú÷óôJ*‚‘^ g˸uñÄF|>û`9>ÏOÏ./ÿw™|årxùìú9m÷îfj+N©°·žOt˜¤íÖÕõ7OÃÉ¿Er \³«eráÉÊâÐ2±Û·Mv;’ü;n+‹Ù¥«ŸÏž -/ÿºlsW3nàì­ž‰Ï㳸“Ù1b]l7<öcvÜ9S?<»ô#[ßÛ™ÝMZþgv‘;­áÔ ]9æÎîäÌƧ?–/ƙݮìÃ×æ;XÚÝå}y>;C—½=†pû+W²1¬Xº}x½7ôAª»§—WÏÝ=~òÓٓٽc>'Ç‚Ê/__{´þðÎ|[?ÃñïUÖl -=NÝû•©,a›†HùQÏê~†U{üW÷7¡ÁGÜßôrñr·p+îÃŒ›˜þCCî@fœhìÏ ¹™q#›ÿCGcpƒì`aˆm˜*ùvS:HÅÌpØí¡ÂнÄGI{Ì? –?øA3c@‹X -OÊ »ÜvºR›ŒÏO¾~ñÿ[]I¢úïéÊ­«²$¦Î]ZÈßç}ßýÙ£ù•WÇJ¿L/JÓLÒš?§ºrÙŽ#C×þùÑQƒ%º{¥ë©÷^¶›yøë]Éç–½î¨t¥NG°Î.¡àšüÜÜ©¸vk±`Æ';Axâì±ç¥sÏ_ÁEo$ùîžÓ6Þi×ûrÑ_Þ]ë6uú±ÿjù~4$¦ßŽÒ¤kæz¿ïäs§š÷£$`¯{”XGYût½·!›FQ7€Ç¯$Pt‚ðô"Ú Û¯p8‘´Ýܹ愽êA—€òÆçN߉5;(Ûù¯•æyËÖ®ik/ÿ™“m}ú¼qÀ’g?'Xp†¾÷u(˜­¥Æ;#Zl¯ óA}š›a›O“ªÛ½š¿íÓLLïäÕümŸfà‹y5£>«t·YJ¿†[’4_Mœ½5u¦»—a”6‡q¸wu·ûÖø_q¸wu·Nÿ[÷®¤91½?â¼–íÃáÞ•4ÖÅßp¸w%Íq ü!îQÒì[þÐØ8ܽÏÊPg1t¸ÁÂ9ÜOòóK÷7ãÖã9Ü«“gä©}?6ÏfŲö÷÷ÈtY€Î-ºÆŒ³äR²¡[”è×uu£îÓ§EÃ^З7ê1Ÿ;r¨­ƒ~ÿl7¯{V÷é•¥ÿ©ÏŠ"Ï’n[ÓŸž^‘§6³fN7òÉ?v‰ÚñgÂ:]¹S¬~Ô.ßÛìiîmvð•õŸŸá¢õN>•a࿯?zAnv¹¼êœ*HûÅá…õMÈ$ü÷qþÑÇÑÒ I½=£¹k„{ø¬/‰®~ù D«ÈõƇޘ7ÕU $ýÒ“‡½ý?…ûb9\ÖscÚç´*Ïš>þ–ßÝ?ª¥M*#>íˆ\/V: ÜÑ,ìÊêâˆ$$Ý­ã†Èá1-·xìâ¿£"Ú¹ˆ+Gå\×Fîg½A9×µÑr®/Ë(ŒkKƒ>$%äÆñÎò×”BÆŒÌ}s> yù% Žn.òXâ@ý¬—†¼ªV™åffçÞß¼îtÓ¾zô8twõp}~Pm•’+q¶Z~ÝÿèXø<~¸=}a¡}Ö Œº¨©__ßIý¥fz¨ —ö;ãNóMì4c¶“ÅQ­Ü-®«Ÿ¾žùE•=|<,z>ÔÒqöçë÷Þ¸yÿþH’<®mž†¦ï÷Ò­=º˜Ê@p®»Zõw;Ó`íçãg)‚yaöÅV÷§ø–Š¡ûeš`ëÑÙž¿ò¨—ýÖ‡+Yú×[?ous?Ÿ}0 ¶G£eaò‡ÿü¢påË‘½<¿?µ:¢+oÿÐ{;wîÆ«®8þ°-X3Õüg[³§óùôÏ‹ƒb™§KÞÀ†ÂkÕWŽŸ|œNÛòÝ`dÞ¼÷ñÜ@Óv«¹¾zçéúÚ‡#¯ÎݺóäÄ…—‹gï[ħßÿíœýøR%NLq~ôK9†±ØwbqÁsó^d™¥u¹áE—[}±åÊÎ÷ë;îÎ óc… KòÄÏ[R Ò¢‹¼-®¨ù~ÐìÆåÙ®“ˆ¤×N=«ƒ½˜õËS\”Òu ²XçqçžÜЙå¾TZö6fƬñ³™3#%Á••//s±—{¦ɱ$(¿¬fk—N}Î~-K¶Jñ­$9–„K*ù´Û7Ƚ¾-zý8¨þºyì¬äX7÷ÆõÌÞæX@ìnt¼ÕJ&ŽŸÿßÊD]Õíd]–aréÖÏo¾]ÿöîí»O“ ''–Î] áî§×Ÿ×¿½ysçÍÿý±öùÕÏo>ý˜\ž\:w{õâŦ\{óêóë7“ )Tdáb·™ÎÈw Â&ƒzíŸvãýæÑ[§_¬ý“=\Ù^Šwâ˯x)›”ݽêMcf?žZˆë·§f6¿/ðׇ©tÁGñâØÔº¼“×çefMñœè·JVç©K{Ö3,dbz÷¥Ì曧©¼Eéß©©é7á2+»Í_×ûkø2½½5ÂQs;Σ=VªéÕŸkÖß­l|¸÷àɹÍjêåü(+‰ìdCLLÿ¡q ÂLô'Vĸ ±5êîX§’ËP©=ú‹çîrwœçw3l6îÎ "Í×Fñ¶(gXÔX0鎰¹¸Ž&¿:Þ;Û‰¹‡ÏòA w1j9œ©¤r±3n®â Ü_ê“ý}°_>Ëu?ôuÙ}Gü>ó§|ð§"MqáäGúV<êfÜzå‹4H =Ê -c`?9›åü(5*ÖÞë×gÿWö÷ò¨:¸¹6h¶00ÝF͵§½áGóã³×9Ô§úÏÃû8·OÃàOqtÜÕ·?+ÆS Ýzn®?¿÷pum¾™]_¿põ^ÓS,³7/—+‡i¶Ñ£]^ }³D$í\vÞž©O,ÿ‹ ÷Ͻ'i9·6ßÊ:éËÍ/gô‹«—Ö~¾¸q]¿8}f½=vëõêÓKG7W¾ûxe¥ê‚™ý+»ô±“YZobìG.ÍšÑúCî}…°{œ1‡Õïj1hE‘¥;IKÇ%ôŸÓ_¶IËÛE—õZžùÚÏóž]ô£ß¼(ûHòàÞ·~Õu}*%Šÿ¹»üxÇ–Hÿ<;ûä×Ûw5®$ìÒÂu~©º˜ªÅÌÂÛÓK}›e}XÞÿ”üó‡‰éT½Í#ƒ:ºJì+Gz¹¶qv~˜Øídz×] >ÛYNW–Óµ¬žÕ•'¦s=²ÅmÍ|ÆpmL4ž6Û´Oì‰Ç«£pâoßN/ý7¿CÅìP_†×¯—ý6¿­äöÛ·SgnLpñÉBo{Éí¿+w·æ. jƒïîP|t´6øQ¼´­¸8»pêúp‚›Ã œ”ñ?ǦØ<¾9ŠÊ+n§gé»ÜĵØA6çùSoð³¥ÁÏt/ ·éÖ0ó­0O1Ÿ±n^cƆºRà)X*‘=ùñÖ .÷ᶺÜÑpŸ|?hUîÄ6œÆÜ!JRw/ -‰Ú«r„0cwφU¹]¡twF@«oIç§÷>„§® !ü!4Ã#H=¼ö¬Îvû£Ýá7U¿¥ƒ³?}] >û×¾é韌žâ(!¥ ú4öËîù[º9Þ˜üÆÇCÓØíO‡¦±7?4ů|˜:4Ýûth{øù0¢W¼ü¶ÓÚËØI<ÚkÿþéQiìýÏÃÝÆíÝÙ5?Å˯‡YZìá›sÿ·ñðß9~HRè—ßlãý! ûá'C=µoÛyc'ÎH¼¿Û*~9Ü=ú{¿?N1uâÞ¿·Çð?/÷­yÛ*^¾ùú»“øÍþ!©qi¹ß“}HiùòÓß^èžzˆ¤ðøv´“÷¢Ë—?§Fÿúê葱¿?ÚÏOÒ“{4ÝDjæñ\AøõÁÔÂ×/W¦®>¸žŒ@Gä.%~l¿PèôÃ>,!9b‡~I®om =¾Ÿû1jGß¾]™)Ô|¤×Gä~ü1‚ %GÜýé¿7s] -×Ùƒa‹4ý•Rµå™Å.àßk¡;>„ò–‡(ݧ£ÿö—ì¤D}éÎQçú  %'¦÷ÛRò0 %µ—}¶”XvCª*†Xgy¥{Z(CÑ YëJgÖ4ƒjÖÖ¶YªVWYöŠÐdAç:ÎnH¬cY—E^„z²h«^õ“¦nµ‹¼û”––•uÝƶѧÔ¶º¦hu±¿žº‰•n¤ŠQóÔÚUhbS5•®®RÔe³<Ó÷=¤¬šRÛªÚPÝØdº -ÝiìŠ%y¬ÊB[+ªîü²"ÏôƒØè0Ê÷Šª -¥&Ñ^½«ªíe±ªµ Òýû¸¨Õ‰DyEÐéƪ-²¼(ëJô‘ç±ÕZb*}3ø'MÛEÈ« AyYVŲ -\ršzÕJšJë-³¶ÉŠ}ýD”×jQ±È am¹à\QŽ;סèç¢'ÆpA:Im®`·MÖåèdÛ¼*²j²å³R›Ñ16>؆Uæø(Ö:½eµvBÅõÔm/䙶^µ1月è³×f1pïEÞ@ u£åd¡lju]N¶uèEýGkË4g7¤‰"–ºîJüœeš,-&Ï[m0×ìm5ùj¢…Pë¬(sx·`š&ötÇ¡ÎEÌ⯸irÈ[„§‰=D' ’ÏsQðäïO/]²(YW´I±_QsºÎØfy­‰³’k¯ò’™Zˆ¶ëŸT™x8Ó¿6úA¨ÆDǾ~¢;ÞŸÐI—™x¿lJ‘HGÑ’nZ…FŠsÍÒôQGOuÌ#a##tE™NDªu9à¯ÐTEÐ!Xjd…Î9Š÷+E–˜§ìµµ ,UJú„Ð+ÅŸM–µeÙ ©{ØO{’’ bm±hCø¿¿©º'ig"oT+ÓGD—¢Îj°sÍK–Õ0ªH£Êuc⩺“> )ʶŠy ûçå¾Béþ'y%lÄÂÐJÑîë'º.ªvšE±‘è°Ò¢E=‰$j-–Š­®‹AQBAÿNLåI^ŠÎ²FÒV’7Næ…xTB¿,Ä>’xŒ=²H¼ªó˜iHž÷Äš¹‚C*±d›i[ì·ÒÆòØ@¿š®AÖiÈ»©ò u(y¤)zyÑJJ3Š5Dô‹+k½ OKfäuª´˜Ì+¸s#×mi€(¦n$ò’!~ØS!Â-g⪲(5(°©EÉ7Ü0D—›g•¤ÈP –´D9èR¼í¢F¶™T…DKdO™Î&‡lD¥â U½\į…KÃäÌ¿êè¢6„Þ5ËÒõ·¹.¦mÓé0éšR,Áž -®²¬Dº”²éIB\§®ÓÎ -bJ«—4çÑ1¢4t‘}ç–öÒEåMé¥jµov×’/’7µî;ïß’~ GÙì(/zºú7(ùØúðÄ89¢Tuæ"5t•þÄ™ëÃñØ-¡eKQÇ© tçwUúTÑ‹•=s8Q½$ ¿e̵¡PJ$é6¤Þ¥ «4DôÐ:ÚœˆèÿeȆHIÐKùI2ûSQ§ŽÊ’GMwåòñÄI2-ôV#J·[é!Ò¨"%kò‘¼–É0¹/ -§ûÎ,$þ#vªV/¢"CN’L®–HK‚V¦¡BMí_üRqßMió\$,w_·¯S*ÇeÍ/Ë®f)Z11…†€K -fJ· )¡>þ†¥’"³Ðw‰$k|ØØÍÏ,&jLYVrb°õ¢K@Z§¸‚‡ì1}¨"à1õµ7ÜNYM:)ië™n²¤´Ü@X‡!ÒÖu°Y™¼âRÜ’m…Ð ’®‰4Ä€fʈC´„ôÓ®åfê²ÿ¤ƒu[t™3„XU#[Jz¼@ÈÚ‘[Ѥ{"BË‹Åc¢g-ðTˆYèŽH?³AÓ8dmÈ–3GöƒK}’ÙËDØ©`@£u šx) %Ù° ¬çÊ_Šò=¼bºo?#Œ)ß1í5¨)Ú6‹a—³dth§’c(HÎáƒUµC25T»GV,ߎ( -t­?•@Ý`êË(è‘GÀ®`—<³aÜúS îž4Á‡"E0ˆËd -N´Tf;ì<©#¢g¤†$Kmý·x !ëØ›»Ôia¢gÄðîAÖ*ËäDê@«ô%íS4-s‡WIÚˆ@¥©õ¡–kÈ0ÚÅ»Ãò¼Â Á­L¦–ÏO꺀öC“†HÕ‰’ˆNUø°| - ˆø…(ÉQN|j- ="u‘§óep}I‚¶èj(.kÕIÆQ•§ÀÌ’ïŽJab¢“LJèVë@VËm+dÔ^‰ÊJçëF¤îåE–X»š‚‘Ä´Lc.!O~ˆ ùɬHßj/àcg¡ Ü2ðD *ïátâ§7¶Çr¦(&#i¥ZÜJ]ž¬Íï1²I‚½då"Ž—RÅU" ˧ÝáŠÊ!Ä€’jˆ™ËW'£vˆF†iðíÅ`ºÜÜF>¨–béÁÂF¨¨M†C’}2uGØ -2£r«C -J©¯62õo°²¥£¬šY 'ˆn¶¸NrI %úsÈÖH ­‘x–ìÕä%ñN‘QVÄmw-}ªj,*ì§ÉÖ™²X„n“§!E ËMð)<œY¢2Æp‘`¯ÑÔÐ(ñEq«ˆ§L¨G¼™-´\ tÊ¥²¾7ùqb­¤AùKg -B(hŒŽrp®uŸÖò$ñÅ “K·Ò¾eëï6=$dtã9FŽ;’íKÜ ë@ÂX“çMé/ñľÚQQz–@,â -á^šn` ¸V¬<$Å®%šÉ¡M:é¡Ý”ð -©23/é>TCí2¼FÑg¦ØI -p-*$@~%‘Mâ˜:±%/Wÿ×bäÖø°r×e:7Ý‚$8˜ -i¿æñ:àLÑsÔ UºE&I\—¤{ôYÍCĬ%Ó¤Y›èy™‰ÀÍ$˜t—x'-i;r_þ”}6`Í9 Á šÐl MT„”tLa‚0åˆúJ#çEaOTz]%zˆ˜Y"ˆ¬¬Ï¯‘—#€©WÖdðKJ— ¸¢A¹Ã·Š¤d9öˆ²E’æ·Ø7ÿ’X"ÍŠ,ÖíÊT|'Øt€¡KJÞRáÈpk´’NW’A’²ÓeFÃe|'êâis”)n=_ãû6ˆ>l¹ÒòºÂJ*A]µàª"²* d_Â8‰:e‡ˆÁ}SZ`¼ªD+b ÈŸ*Égüü¯XIÒŠ%\5Âù@ýsÚ - P”qgàt-W¤âÛ¼SòšO; 怜­Hža'‰¢ï[F™# º?Mõ‰ÐZÚtRò:Rd€YB„E‡CB²¥Ë!õ€ä#éJ®#'•Ui€‘&¬ÄQI†(b zƳ+'§-_(…>$>àB3maƒAxƒ-M}j ¼B‚8™“ÈÓiJýàæ4 Ñ -ÙθÿZpŽ*Ö ÔŒdHE²»ð—ˆñ’Ñ*qOƒDõ2c0¬Ð‡ø8bgzÓ‚}–Úhç$óeÌþT ²Ñ¤3kIP‘bŸ„› 6RÖ†`‚Hl*ËFФ]5$ítÀùÉiˆ¸É\¹³}âÄné<ƒ?…#  LëM AwÑlä’D€F‚9K«LFh½O2¤ðÑ-ä¢#ù®„¯Ã¯ò3’Ωª§{ÜwgÑTL'QãOËÅF³ jtmƲ¥Ø:Mì!xê2£2Â¥Y‘ÌÝ–|\[8‘Òéd ì­»LƒÄU -cà(hTéÏtz•³“ú'º‘h§¢ ²Æ)…β„„H•6£¤ê“siÖ­0£Há‹Iù‘ºÂõBˆŽ!8ÐÁ°A CÙ£â”#‰»¬LCÈ$µX«‘UN!‚ÕÒ HœSOæQ'ñ;tþbX·–R&U)WRfžˆJClºbçQa½„ ¥4HÆI@r~º#R·àF°#9ä2ö¹©‰Z‰ÁI -ënµyÅ¥ö@ÊI¦5ãI0af $Ô¬œ ‡xSˆÈ—ÄcÈp@kKbyJ`)šÚjjp-Ñwð,Yü èTÄT8 &ÌH ÒiiP‰)‡‚«jt{x8:yñ¸Ý -äB€Uu88ç ¡$Yñò~[°â ‘--kÈ‹=4$ö´Ã"K‘~ï*ˆj[ÒYº<ùdf“ÌD']š=¬×±ÈsGˆNø”Í‘Jî|~0G¢q.?kÒ€lµ– ´A‘0´Ì 0%!ä¤ëäëiLa +Pƒî©Y_[g2_ ˜Øè®ø£0pR#j©ËaÓ$ªÐUlaŠ`—}éƒZbÉR†p»ÔÐma/P½%A³JgœOîÃ5xð ¹3|¨¬ó॑ rsëÆ<*nÌÞâÁ#9$€e¼h\½ƒw¾óOFÁ¥áÖ¸ ÷0'Í“%Œº ¦›ÇDZg+Š™“Ìû˜‚ˆ7·6ÕFö^NZ•l¶2>dYÝÝ‘h04H¹ y @iõ1W$4=ij‰sªÙ.£d\$™˜áÆÒEóè’‘“`tˆs¥!2 #é-ÆŽ.™p~ÀPó É6À¨Œ¶~­ßJsM#t/r³@’aŸjˆþAº0Ö"b`ãaª›dŸÊ©ÇåÓ¦’IXAؾÂ-qäŽ#‚‚ºV”W]WiS`õϼï6A܉dþREê£"7šŽÿEÈ;×yFt»¼†Ø -¬{ 9Õä]DïÀ“ÐíQWÙE;b"j/L誟ŠÄûH±±hpx ¢L,‘£ù ¾pËZâÜéwI¨;€õkØšI¤ŠDHpö nS&lÄæ -9Ÿ"M®Á¸@u™Nh€RÈ°°/må:”†dh‚Ü“ôh¹"#–çä)º!&ûˆ;Y˜õ[À› €JB¡^Û‘ÆV‡,Ûƹ:\ÉÖnŤ•**At„C`« -ã“! qÈš0’ƒ–U"ii`R\•$°Žä -.ôÊ’Àt@c´%iå@£þð©’~(Ó,¹ëdð6šh-§+/ gˆª-ôb4`Ñ‘äü .‰Wý(¯}~R„¢aâÚ™ô•ä§ä/‚ã@Ò ‰ö©³’ˆƒ*2™GQ@&ÛFµN“0NI¸­`WF*î -±]ùŒÁ-úk0Íu€BºQDÑ’—eØù—Œ!xZÁ´/Á€·×_D¦Él€¢ 4Š}—àM3ÇLÛMì’\¨“ï ÑÕF⧄¼|~Áhך¼7î< à à¸yc׋BE–O¸"œÁÖ0MŠRÉl5”X´­ƒ²|tCén\L¾„žÉ1 I’XüIÁ”&V14Ù`€F3D×|Sèà– y -k"(­#o»YZâز[`*pëFM 4!vƒ  -"Õ3ÙPSDÑ80‡×´qiý‰;W ¢BÞ²d1,›f«ÁyÀ¬+Š·I\Po(ùêEŠ™Ði’£ Ð@‹²ÃÁ>|¯òÏ33g“B{ jARÆ×€|ŽÂ1‘z–ä<º¬uH«>Þý•½$!.×…_¹Bı*Äp#±w†Ô`Ð%J¼t){ŽƒÈÁÔo’£€š4ƾWC…˜&n@¼5±ƒl©!`ÇkÄ øEOJøvA3 ɜҡÔË¡îA„&J™WÝ Àp¢9RX¾*™Ý`‘ªÔNCæ9쪫2°¬zbDs7$ptLŸj9EBQQ=’9àmê*aK×ØåÝ<5`_Iy1¤Î/b¶VÀôò`ðC@«ˆzó® - Ø6ààÛ¤î¦i 4ŽUBêÂÎËó:Ý„äÐòB†ÔCá …M³@35UvÑñër¬l,Ýùµ1†`—œ!2=‘Z²y–8™ÍRtÚ$¼‰_˜QYVÝ€™1I`{®1\SÀ°Ø* ªQ«-áé PälYÊ 7·ìïÊGgé$UâJ ׆¯wC*°ëÔMøøا( á+‰Ý.¦´+sX&Xã -è¶îHÈI2áhÖv9¸ ¦@w’×5Vò5»7L±.«A⼘2¹D&Œ2Ž„­TDÝ$=Ti‰~+.¦8Œ”`èTt„"qCɧ§«¢ÃÆ9 -±³DY2÷¨iÉ- ¬pŽžYöÔ`!èSNALa€½ ’8*‰íÛKõ!Vt¶RZ¦ú²ÆUÙj¶Êòà/Å’h'-Z;/¥f“@mt¦ÓU¢=â‰Êè©Px¤æŠ W¢R 7qk§KH~Êö—kT lL@Ø`"ayÏBÎ0° -@µóšb}¥Ä[wƒgÆy`=R_ß:C¡;|I,Á“QÏÐTâ´¦H¬¢]C¤ŽpþZ¬i‡‘]d‹|wüà tê9+‘JKõ&ê‡uèÀÉ&2¢êQIOÝ`¬Ö{’ÿEíEa$3“`š`Èò‰XÖ.fTÞ j@<„±Sd²¥â Œ]M.ÜcGvÁ0º±QÀ0µ }JºXç’ð3÷’±jRôëI·kö$ÙÂ=˜'s´Õ(¯œ7pΚXõýh+hb*™ëtîI˜µ€ÎÒrè7E;”ª!r$Ú„äô:–ŠYK€9­¡³•'*~ŽŽh£¹Y&gH†˜,N.7ÜÐdcx» âC<å¯õ\dï0–³Dd©£kŒð66/ ¾’ç |Ÿ6N‘`]*lžëSv™° -š­n’¾h܆²a‰º»¦MD“;—QBÁ±6Ž†’hªS>ÜCpT¢ƒ’)Z Ð#¢ðH¶¥å´p”8\Ãj3o'ræ…zØñ?DÓ”Kܘ Ú¡\ÁI

ËñS$ÚwUJdF£1€·'ïZv~_Ž·PøíÒ?ºÀ’/e.©¨d,RbÕÉ%„”¶x¨Æ°ty1eMm—ž-ÉÈ[ÅåiíÛRRåŒ|…혦‘y@"ÂK±#† °##Ö·¬È늦EàplÚ9’ [ªÔUÓØ£%äk±Ô¦åD@CÀf3,Dð÷-˜'INˆâvJgêˆs[°¢Mæ%ÊKĤIirטìn“5Ù£² ÷ )˜QÉJRµœ,±Ø3²‚f6Ž1„òÊÒ5jÒ Zƒ”h…Ìè †Ð™™Ú#0j7 ŠîgP: ’ÉË°$®Ö<.%š]·N=ùSüEÆ,í!„hbt«‰.ùßs“¦Ï +ÅZ§®ô¼’Nÿ»ý%üq²¦Ou!¬¸Á†À*RpMkGøtr\`‚=30l8°?ÉÔýs…¨YJ·#`ÙÐmÚU'{I¥tÄE—ŽÖ=.hÏ¥R¤?±hÝáKZ¨)vèˆBc㤠ZÙ¡+¼±× ظ+X ¸#½3á ؃‘ªƒ+‡bBÿ„ž+=j"Öó5¾µÌ@i2%W¤¦ä>iü:Oþ1ÁiÅN}£))ÓQž¡VG? O5-µî»aÜv?G8;ï@HKð»çÙÑÍ8Qžxª¦\ÖM6t þN4¥ 3€Á@´63kZaÀÌ5ÕM'pHÈb¢„µ«`P¯„]*,a–EFŽ\šK2 vÉsÊ ËD5 h›ÑEÊÊç‡ÕÛꇦ?(ï,M¦é<¤%§W&; Š%£Péô?žÆéRÔRòrØUmÀ.£´œä—¶ ièDW–fòÞ»"‹£Ã/Ó ìç*Zžäl*ÿ›º­ K˜þÁq"θ6:@–Ò2Ÿ6©¥šÍ Ÿ1‚•´gæÊý¤ðÕSEn¢•&n·K°fÈHµN6iH4²‘ŽYË,$­é/"…‚2\9j4'èk/e m–t 0AoÑ(?ÑÕÐf,•C¬X·K”Þƒµƒx`@0 ÑÇ9? á‚t-çêQºžfY¢¾ Îè¡B¨A„ˆivCÔÁ‡8‚/µ=­ÑŸ"•"µåðò®™›.˜0i -r4tUkêÄN”wå¨Ñý@»AØÌ4tè4Îgi –†4)üçFAŽ#VhÕm‰^oô7(œËÔ`à'$Òb÷%HbZ±©z µè -Eg2»ð™ó¬p”Õœm$‘{‹r]Á<úDÑtÈrÏDB[†…£)€hÅ6úrp™4Žs„ÀdN¶#òSbO—ÍÙ“e‰Î’ºÉO–€ôü¥.6æ^g¡íG€lk€™Ö-¹Z›49_ò¶%Jœhªä­ ]Y§è-5JDü¦¿:ý`qD ‰ÝK*8ÆóÒ1²Á-c¶|Š -Ωä)€·Âü¶ecKÒ€z±î¢ -.=Í0#\”\·âÈÐ**¦OÕujk—SêÈÚʉ@»u‘Ž…h¤ŒÊ¸,Ng1µêÅ!óÚ¨Bâ4™eÅIëÒ¨À^Y’ãwÉ/:J¸%Ÿ’3MJaNkåäˆJ+ÒÇÆÿ8 8ÓÖZGaSý*ë„úf¿øÍŽÂ>UB4Ö„›ÉÔˆdaJ(À$Œæñz#-ÁhBP'ÿ;fN®K;õ!ó9É9‡€¨µ!#È1äNVÔyGnfé†Wâ2†Ðôsîê%† &ëæEžbMUÏDß¸Ó ƒ2qÀ<¦ü,AB!nì€æȽf -^ -²º|-TK‚?#;JãËÝ8Ë4W“˜psT‘:柂ä`—”ÜÒ'uà%£çîN•‘KŽÕQ‡/ÆjÛnHDvÓ–%A›I:ƒËi¨¬°“Dþ®iRo¤D ¼Üü—îIÛqæ° ­¢Ö9ûH‹G² ¶˜»î.GáhŠÔ¥9@9‹IáàÑ:F¶XŠ?ÿÁì"©3ùg™CUt" ØEÐ(‹:åUºe0ƒÜ]®À&!Ê ÚT…&’nûg““ÍÓ PIÔˆbM'Ü‚‡ª~DŸhºœçªÃ,|:Æù¾A¥#®e¸Fêji݉"¶ ä\‹\'ê+Á!а ±à!RWµ+Øê2¢ÃÉè”PÓe%&¥Ù6î …™›a´%ÉL)ɼí†Ð-‰Àº~Œ  ·Ôž«îk^08PŽKb«zLP él‡ñRmjX•ð#y"„h‘렆 _ÃÔÚ„Ž¢5ˆÖ/Ia¯NóÚõƶÊ˺‹4áVÓß²L©=©» E: 0øšî$‹5/MðŠÚ6C -zLsÀ´+pvt³È¹^I -\gx¼t³BÆvQ wF¤cf4Á¥€9ñÙ¬‘¥AÔ8·…áQÀiø]2” «)÷"8E㔺† ÉëPIJ!FÚÙ¸ÀÝ‚8³û¹8ÃQd~3À+cBJ® dµÕÉR -q9y@5‚ŠÆ«ÀÅt‹t(í›R~™EÜÉéw\Ž–À.FŽn ál¥¾i‚Ó¦@ð“¤&—‘[ìgÁ ×ë‚Šð-è¡mû‘!ÕU™A{’£÷uÛ9}aH ·vB0BeZ™{„W]OàfgÒÆ!Ð,¡ÂoòÐKVÚ¬¦m€"ÐŽ­ÕC(&6¶­à6©ÃÅD£JËÝ¡ùTEnœse#‡'u8/Í»’ôÁC®Žah)”A(R0*&åaÄCHÍ¥ì³ópðA47  ñ¨ÐŠÆ5ˆtîJž öËxÉš™â“ûH¼÷áàyFƒ8°Ú XOûB*ݺÒxºªQŽË©6 X\IëÀLeïûlã¦É±g,6©Ó29>•V@>A=~œî‚Ì(þ@'Φ7 "Y2L6=<ÈÍêR9ìÒ&_k ¶ Yšø€%.èÖtg˜Î*( £äjÎÒÜ (}ðnB²˜–I¼»ÁÀ ã@áÐ9pT\h†äØa zšàn~4% È†(2ÀC À¦$üåÃ5ÙWc´fÅH']Àû†ô½ïKp¡p9C¨ÏÇ<`.™Icác€oK©4agÐ÷®Nf=5°M -(”*Sš<êÞkÊÔjAáOFŸ¶”Vº3íŒèÖàb#¼„A‡OPÝ(w‚[¯Ü¨¢vûA:aâ³qÒ/bóÊV4Ýæ -ênrÖk¸'Hjð v‘{Ï)ÖÑï2z…˜Æ,Á¡¹ŠŒ¸¿$HOIS™Ò¶ƒ’6h±LIm¨«éÁx‘©1ñ‚%ô$.ºÓ:‰Îià|20¬ˆ-zûQˆ”–,rµ,w—æYš‡n`2v)$ ÖzÀ4j¢/©M -Å(Tòš#BXWŸSæSeæNƒo,¢¼l!€iN–Ð%…PÑLâ!ü¿ãî|ÐDE¨õŒÇà{ʳ(È«1¤S'/BL¹áFnQÄ L[z^»œ2ºÓ¡6Õ€Ü$X®!uja†=ºë]»”õiDåtcëÚî,µI!‹PòüÒÃÓtÁ6 ’90Pé³÷’D·¬œN^Àäh¾š¹˜·IC*ðýä¡ë2u/!^;ï•0SŒ=h—þÎœðˆ©°›¦˜Je›¥6hÐÐP8»^Ew8£ÄG4&)‘'÷36Ué4`Ç©¿ÀÀËà -EªÖ¢‚”õ6¹åxãQ4v‚[°ø -ZÀÚ‚Ea‹WesÄŸBÌÓô¦pAKCŸ¶Dýd@MÍ»£¿.èzsZ¶å4khºP»ì(ÅõOu2€U2~îÑÓÐo²áúÅÒ¡[NSRóÒ(wïÐ -\.SQT‰>aiL!l!:™bAr<:x6¥iÍ¥Ô‘Óû]dÞºË,HãÚÅn¬&áå£þî—èG (Š©ònÁ¸Ô¿¶&šÖÍ‹QûÑÍBÍ&& -0})5U$GJ«?k™(`¢ŠŽ²O3ªîN˜”º¸ =õ)¶œ4ª”RÂ¥H³¹?ÕRý¿C.ýõ‡¨Žž¤¿-¥­n¯ì ­H¡—{‚¸ô£#4 Ȭ  š%¡–Š ,Ofä i¨Šoü<»¢³èGŒ‹óÕvŸjIóPåi¸ƒÄ«B .™dˆ‰>ÿ4ab5–L*©òäYpݸ°¯¶×ì®\èw÷¹O›h»Þ&™E ƒ -£ÚÚo0O íòNcžÒ¨MêàK—Ûæ®i§©s%nÅ›È]“€Î’ЯӦˆºÝá*¾Æ):Ì3-é ‹Ìœ$¶û?Ñ©qD·¹ -Ý’§7#hËž»UݽP+äØÏó®55t»]„Ø0äÑ7„›½\º¶gôô¤blÑM®ÈâPÂAÀ†!äÑB¥Æfä~fÆ9»Ú¹½´¥6•E ˜Ó Ê¨h2¯ÈsJùk¤'Í”JÏCO@×”MCxˆh`åò†Èˆ)Y©ÝÀ•–k©—?e±þ§ -C¶uÌîTŒ ›¶†”Îån%㻬 '’±ØÝ”0Fý>\A)pÉàIÒó]Ô}ÕnÃW÷ké¨ÙÀã®Þ3¦¹6–€oѹ,¿:(ûÀ4Ó$ŸÄŒÑ•w±pO^ä*Ò.'h2?SÂ])“»‰í[wž•CGÏØdcì"‰n'GX‘~ ÍdAÓ(z€©¬üêD"ÖÌ/'e–ä¤)QnZÏà€L.º^‰RÃ̘¯v².‰ÝÒÈÚÍá*j%P†Â n´zâT¤¶1ÁŠó:¦—nj ôEãäšÃš¨RW·ÐK½Ò<<½„÷¹¡ É ÷ä•p§‡—†#%U^X²y i5½ø”á˜È‹œpŽ;_ÁÔ¾ñ☌¥’x$‰ôo]/‚É@Pæ®}%Óz~瘤‹ s ­‘H€0dI©o[»B²q»dÇ|ɇPã*¥6YÒî€Ë«k‡ãh2G½+Iƒ•TyÒY¥výý÷èÚ‰Z¢Wwª›·BRœ¨…º)êR‰Ñ8öΓER<©‰¯“Ùá~FnNY¢j4àt¬\jKãÙEºòÌC ¤?³sÄ H§;?ö@ø‰V…4q¡ò‰gŸ|6”6·Hi~‹~…‰MÔ0ù©&)£ç¬`}À5­‰ô`M¿tê'‚õg<ˆž>`÷KØ$C2G}Û:wËZc?ܬ´HLCP´Ô¤¹¯Ô¢ñ ´iƨͲ´+Ž+¸hȹkZ£ÑÏ•çÑýÒŒÄýæ‘B4*v—x „¥»2Bèj– Ž<=QS) ƒ@èÐÇ%¬kèðmÏ·V•Þ‡ àvE‹¥ûò’˜æÌ}p™Ý, /!4s ä¸{9•Á…«n=Ú‚T:¨ /x° -8¨¥[²D .#£¿R£!€zÅÿ€V*ŒU¢DZNYŒ‰£xŒ¦ö‹~ÐbÑ5(`]Ξlã4à̈fX š¨á•ÚmGJÒä•G¿A@e¡¼ÚÌœž*É[Êôã|µù)âÙ "ɾ–NLætÜæ¦yÒ,ï´Å’cˆƒéU{=wd6?'$EI'q§úÛŽé(‹"ÛäÆR“ …è`æ°ŽDëî´l)ø9Š—ºf -X§À̓A¦÷…§5è—!U.£Î£û`ÇÒrMtòžÓp=1}Ø+` -~ÈCx0‘ÊLÚ«R"ˆ§PÊøݪ“²•ÃZ j]²hyQ…ߧ¢õylmRo×h[”<("ÔåCêf°tK£ñÒ…ãS8ÔÎó4´0ËÁèÐV,õ q[¾‘ø¡B”ná6û.4¤Ñ0µ³ K½àÖÐ*z.P6J§ÜXa·ØÓDV»ÜN2A~gáR­ÖCœ3£Dd¨šÝÚƒur»½'²~´¨©G” Eö4…Ó<øqôp,s[„RZ¹a.ŽµNÒ£žG ÝðÕÈ?$Eî ekH l9œ.ÞjÀÿ-æî½™ùNJŒ'QéÉ‚;¸'êÊ²Ü r£‡ -eÚ"OÛlöµ¶}x'•@Õ¢LäñQžú2³I±3¨ ˜!”A‹/e»¸‘tí–”(ÕÂøg:a#ÙÑ楋4ðéŒOr2½ -eš¥_zìOCQnå‹"½ø„`7Óø|¥S’ßIÏOY„•éÚº|„â -çX<„BI»cS¹oùŒd¾ãÔˆq©á?Åð>Ûà7zBú‘ÒérꤢîH‚?UínèžS¹Í¸Üèj>¢3¢#VCº7£]Ž‚Y’‚†¶Núµ¬œgôZÚ-&D0À^]¬Bwx ]xâ7My A\×1zq)R¢4"4‘¹·“ØÝçë–C„Á~Ðf$ulB©géK`éN=–XM²¬nKïE·­\Aâ–ôRZ <£–XNð -æ+ÊŽl“{$)0™¦J½³üx±÷Š#³4Ç*¥Ìp²ð;.4Üaè“=Ë¡¦Š)ˆ„Xã‘”ÅÒa-]æ„:`sQ’R,Sƒ -ÚY¿8ÖâGԬ˩¦ñûœÅ|´S¡ü”Ë>©Îá“Ýã' ä~¬­Ñ|úÉáã}"´1ïØßOɈÓýû{#?(»w=ñž$ -€:pÙNœÊ˜lja>:ü(Šª¯Ä¾¦µ¹'?ƒP³iÝŒÀíe ÑÛFj„´‘¬ÔÔxÚtP³DqV–îhzIÁq‚üngߣ܇lkMÙ$^>rÀä²Fv=WëÊOŠZJGŒ3pty* É“œp¢O£MY£Ì“ sËvù„ñéDi +V~GÛO@PnEœò²õ[)4å]àÒÖ É tOîºC¥qtÖ1KÂÌ}²wèöóa~7•8>®å!~è5ãùkšQð 3€wGm’Óˆ¬7ï -Ðe…§¾e¼íâX9abG Ã9%¿XCOzà#K¬!˜Ô{;ÞRŠn±-â ïEPÝZPá@ÖÑŸs†}ÓÔ.1GÈä4:îhbÐ=°±PÓ+ƒâQÐFÔ8ø…ž%ÎVû‰Vz˜°²Mz¨Çí¥E× /ÒKµtƒ.šáÖé¡žN×,=4Ø›“³@óL2 ý©‰¤DWК *§&ÄÉ“UÑ pôµ-w2Ï9 «yë•j -¯FªŠàtë”ZÛòš -s´j ¯&¸hG _ƒ ]4~yÔ©üôüÜGÒ‚¤x»Al#.ˆÊò(冴< ²ùÕrT+-IM6ô5¬Z^§hºrošb¸?cæâLrYNƒûÙ¦Œ»=jT¹_€,\M -«v–€‡¶Ò6 gÊDè”NA$ÿÈm7d°³¼I_BlÓ†Öá~“ð9±ÞnîÆ&É š‹úQšéV¼o@¹GBU?m«y‹@CˆýÑZœae^vŸªÝ_> ¶è"ö–«ÓE¼–ß1 -<ÍB ö­|Zö–i¯”;a¤?𠥫¬Ïè<’^—ê?¥G~…ÐnAJM«´Y.Ò§€¸âl`ÕÚuo0C©Içýxái@PÍä]<„(-e  öüêfæWŸÝë!sçà—”Æå®åö‹|Ð'™ã„‹åõ_Óâ‚Îþ ¡"0¸#±_ó…Ö¥ú°¥ü2Bî&I 2¨ÔEâ<+W8U—î0hdˆ‡$·_¶?tCœîßСÿ&õ> –S6Ç5ñ¾‘4/-wýL­oÄÿkŠ¼²H"'Mcð&=&ý²M¸Àùëšçsœù Ý™5î;V‰“ºC¹ÜÖùçs¬û=ÒÖ´¬§[ÿ¡xÿ!zÛ¼¥S¸ZÞÑ£b³EŒ¼ñó–&5îà,§z•n¤Zw›ÊðÀ*Æ´b?i@c•„–eiB‰l¿æ—X¼Mi$ £tS¼o€ßZf3t¨ ú@&®¢¡r¤Ž£±ÏŒ˜£´Íõj‰ÍTúxëB0¸3ô)*i‚mG /:~5P"/u¯Z¿YY¹4߯ÏU©-ÌÌQ 4ªH?¢­ü.dîR5¼÷ñ¯í:LV4)°÷±± -»B<ÿXPë|{E_ïànx.aõVžJs‚ïi\ÓÑ’¥`0RÑŒÊCg¨:µ;}çõ•´À"Qj¦"µK†Ê¹e§°éµÉ°øœš”àŠîÎ!.Àº„[ˆOJ¸UŽ‘*©ãspŸb0ò9¼|‹îKǾóTGršž7µkc¸)~@÷¶É—ñh&-Xib7Du3\"eà»tÊ´t0$ÞŸ¢èŸæÚÆÈ×i¯‚–4@ÿWdÏ ÜxZÜ]vŸºXÒ¡hB\…æ]ÚxpW¹­!Ödúóyñº0B–A¼>çb–ÌöC2¬;Goª˜°.•Ï ªüô.a$#{üÿ˜»³¤Ëm+kØ#ðΔA€Žç¿µæûãY›'­²ôEæ¥+UåWÐa‡f7«¹ãŸp>×{)%ãñƒ®N¤mïá&ÆÊïDºä† -bk8’­3BÇu)±”ÝW])*ã@®´·?@¶#¢ÝÄ›±P6Ü[Qqåƒß{Ðï<¡߶| -,ª+ óøQ‰:ò­|r¼Lg•j©lד¼kjO@œüµå—gú‚jìÏ›ìžØðö‰ôG3Óõ m7JÇÙ$…‰RhoŒpÍcUÍ3}à@sTsíuKüÊëK£©cÈ÷i -L;Ú‚ëêð -€õ*Žõ vcdJ†?S‘;Â#@!ûŸJŒœ~ïóÖµx¢4ÔÆ(t¦Ü&+ Il®8Ãtq`üµÍ!ð'Â/ûìÌïGŒ âoâ WâÌ -u!R2B‰mbÕÌšÕöCÞÃO®4¢E« ?/uˆ«/sàfLžW+Ù#„V@*W‚Š`ö‡EÅzu5 ò(Áç¥aUGÀ:5ÓÙè¢Ô¦4{’*"ÅEƒ§,às0ÙœWl@ц<­;”‚–&JO¿Á™2æ`Ý_g -Ï»ÿ©DAùU$[Î ÿüÀŠš3âÄxÂó¥nª#-ºašÙ¯Ã3ÚRA9W®Xá_]¨=ûlÐ𠹣͗/~2{J¥Øøp_ ʺCdX^5™‡¹É™ºù,„C¶ZPi“xöHKUìœåÁð™ê× d3@?‡Y¹1*}P{Ö¯Ïo•è­º½6MÙø•ÕqÖì…ÙûãÍêÙ«—Ìä>@2#Ñ@,™…úÞÄKötùüFRðôþŠ\Õj18‘GA˜’ºpNí¬œ Mî"ycó‡2;_p0ŠJhšÔeïF²ö´§¢Ç¥‚jOM£É¹3A U^öQír­ ý° -GŽ@ŠTéË¿0 —Ð\‰q¯Ça«Šžî¬K-Ô_ïd¤A]ï•2‚|!^!)ƃ~Ôq†¼)BxÈõrwiXíIhr+ÑCæŽx¥šrŠO{·Êz¤DŽ+/^jIÒàäzg !B'­e?ãõKòT9y¯ÇŦ³¥n| 2¨Wàœ?+tÁ:OÉÑ£ïÉ<£Q;ãÿre®h\~ö€7M{$i®3¸Åëë·Ü).§„o~©Ñx ž1_*QLßÈÊ5…Þ«ŠOû“ófŸ köÏ`«A¯úUÝ–-ìqo4Ü_Ù«XDñ+>ÙL no”ðÈwiPó8cœ†Yñ°b׶WìÞçÿ趡>MÄ_áýYöºÙ¬;Í/†äêíjá—FŽi]=S'Gb5`hë!ýqzš -€·¶­—G\àÈŒx±€XJÚ÷÷# §ã¥Ö¾ßÄ},©hgd{y°Kxd*+ä±iׯÍÐR>¾b£Ïë„-œ‰ó¯Û•ZÌ©ÕnY,âðªX_‹›ÆâŠ6ÄÈá¦A±ÀXHE›Ë9J¾©O?­¦C,¼yjŸj»¢éç%ÖÃXTM)H„ê[‚{AÈbš‡ùÐQÕ¡ÐÞñ¾Ìý’Ï‚¾…ïýQ¸PZ]±· Æêªî ©ªK42Vú`äY*‡Â#Ih°C"n5%»ô‡àP§¶Å-Õ ]w&ω^$ß3zÔh=ãy3–HÄêË ˆÉÔi’`rSQãRIèñɺÕÕ]ËX„&^g§uF‰07£l Û­÷Ѿ…'&ÊøaWóä˜÷îZù(UäÍùŒ—^OvÐ)†H«û‘ˆîMJ†§=­ oÌ´çˆ_™K¬j¦‹ Õ0”„ïrj#Rìò‘(ð?U -‹n49{FˆáË u¹žh+’caß}¥¨UÎG˲eûç9Òº=¢)8l€Ú†´ˆGkÒá]Ši*XÝß7Êþû-µhÈȀ̕9ÍXöú߃àÝõ© ñ™->ÜQœkY0ʽÿ*³0v˜Ú# -\ -Q‰Ð¤XO;\ŠïR½l‚ï¸VÌ ‚Âdq$º”ú ÕÆá‹÷RPM“®êžð*{„—jÕ°¸}éعRèm!óIØgör¡‰o/¹R¬§Ã€í…8Œ¨g:ñuÂõB¡ËÝ(a›F‰u‰Ü;üH嬞ŠX`!¡OÌŸ;DÒ_œ“ž¼Gå>®NhƒÊ~vÙþä 'BóCYß3õô‘(â|êCÉÔòÏWqµéÍJO/«æpÒJ± sÈ5ëgæ‚´#€ïlßWüÝÇ/’/¡£Å׸´ö@8μ”ZŸ›ëdúc;w»u›fuÐR®]O4W{9—rf^Ѿ­RíoxÆG õ¦JcÚi]Ö?Ç)Qùc<ñ!§|¬`Û#.l\bdKZí|DåÑùUÏr"@ÚïXAû.‰;R•Œ®ˆ“º‘(8½w#ñE;MàóáOøDK_ùü5qhy Š—!¨‘2=±Wòt–ÿÀèß_ÓZØÿ«E#‘6{‹·L'Y¤ -™ªÎOLeƒB¥_È#YéÆ;ú“Ã'%käˆW<…ª4~B&T—ßyZ<‘fv!u¿ÀÑiœÇÛÏô%8Ôpû¨æ‡³&\ú~Ù4–6¯TóÏàÇÒ:MSÊÑYjrW]IX ¤ ŸÐÞY´µì@¿36‹ûÅ8ˆÎ´©sÒŠâÜYÍ8­.mÛžö±@’ØXeR0£ŠìwÄ<ßA‡×ãÛ·VM½¸ƒíhµÂÓ3ý*GœåÒiÛkË¿ö´ŠpEPÈV XfT¯ÉX>q¿«&<‘ÎÇá/U‡×'+]ohÏD/ Bà‚Òt‹¢–K™4©ã”³«&4mB²V '~™˜I  )”ül¿ô—¼fïB4 -Ò¾F€’ã[œöØžýó„¹2iGŠÙf<•÷`¤èw†$½§Ê´Ža6gÚ Í#wð¬“þ›Èö®çC2PP‰×iÕìè“éÂð‰KŠEt:}@òY{aÄmAJÒi}3ÂgÚ«­¥>#ET^äp¿ò~åâLMƒÒv¢+C endstream endobj 49 0 obj <>stream -ýhE$Ulo+pª|2„ƒâE±‹œõ=óIö&°c ¡Ò”PmVoê‡àÞ3æ`7A•ÒüÇ -{vï¶0®®t…ÍWn¾CTÌ&'ÀÔŒÐÑ D®Xƒ­GÉ[E‰0‰¼7 ”]»QƒH†‘Òé©äÅ£'‘ˆßQ£'^ Ç®Ptt×Ki5z%3¸œ¾å ‰oqÅ%ëA u¤»™º͉Ø9¬7éâÆúšFX sÉ©{ŠkvO&q™€ˆ[Xg¥Ì#ßÇŠâwd\+›zp@ ¦) áªß¬`³BÂAî5Ö7}RêK¬W}^ -¢Ãœ|â¬Zy7»"„;û²iöû²#ÂfÉHŸáfúL2Ô0/ó-$ØO™³Ü3ìŸý{©S6¿B„µÂÃOæâ°ÑŒº4á£ì$É=pL0°€4LN sùáü7Ì™Öj’g²Qº±É>Õׇžz(¬¥¾l©1¯½z sp¨#Öƒvc†„`Ó'”üâ¡ g–k¶~Žíq®’„ˆ#€k “NLHu2ØïáBA¯WàÓ ³45"O2o7&%"eæ2ZuÈK·fØÇÙD¥w·¨Vzö¼œUæÆ{‡4®ºw¶ÏoÄoØ;Gˆú}±9_ž }Nå©WïU·,ö‚;5†NÇŒyÝþ¿#QÎÏ´! Ið†Ö߇E L7¤i\âv™!LÔ Y„Föt™0âiõ -'rZ@ýôBïõ×;„ÐÙÿ¸Cçkî±Ó?é±ÿ µHnaYàÐrâ]€Žø"3¢$̯ĭ¸ü=ø&ÌÐ -ï]¦6iKµã,°SK(YW¢+E¢Þ&<ˆ#:*žo ~3.Æäv4)+Ðr‘eµImîüêƒvªNR±Ð {¼Î擦od;{ V¦ÅÃýmÿŒðg yú+ÓÊkìâë€K¶÷ Õ˜#HáçÕÁÁ ›¹Ù6¯¢ƒ+ñ~­£q@Þ׃…PC[+ƒ ¨9 ÆC§—¤•ti&Äç5˜MéY2áíå±· zìeË„Šæ~‡,~¹4¿=)6‘äçû&°%O}ÀëJ4 òºüÈphs]C¾¯ÏtåËEy_®„îéÐøé8´È÷¾ˆŒ2§VN°F¼î=dE¶7lÙD¼T¾¾^kÀKažÚ}üvK|–¨ºæÛ]Ôa$åçu˜‘?±ß¡&€èx$g‰®´¶uTdä:Ń/Ô7p\'abIBG²Y(ýWBéKªÈ6-fʺÕÒ„…ÙCRÌŽ˜Å]—²ßœøÝÄ}ÇýÒ‚`$•¬ôÐ Zö`l¢¸+¢·*®XÓÝh€ßdGáö~}EzKï-$®MÉ×Åñû•\éRâLkïØÃö÷ÑÉ«Ûj™fçwT_ïÛ̯­F k†ôÎck{úk² Ü½Ïâý†×÷RŒÎÐÌd>ïç$ÁMžD‰q/G8’Ë×+ Í@]|aÏ@«½ˆqÒPˆK'>²j-µýâC¿¢*^(ZKE–â Ý‘KŒtã\ï-ÃrÃÈ‹3d’e˜bα"Á·Ÿ,Ÿê)ùÁ!KÈš~&Ú^ìÚg~¦,ðû5.Iq†j4ܬ!*Þ 2Ê‹ßÒ~"Q9¦z.Õ0ÜÌR9Ãû©b3’‹O$lˆN,‘oÂæ(3x…\ê„]¸î®Ÿ8ÞÜR÷‡_u©tKXÛÞå[?Š‰,âoª‡:Á@¨õ3ÄÊ:Ç`D¸–+Ýñ?ƒ¦ÅØ·•4$ô[(ù¾?ò'o¤+{>hîlÕ»Üq ^÷ó¿q¦ æiäQïO¯Ñ– ÁbBA²Áá.§—3H+‹…ÚBöÒq´GRŒÓøH"V]œøë ÔçŽ] ‚³XÈkÖ~C|;‚=¯O·¯¥79뎣ðŽÌnÇbõGª<ëB¤‰#<-Ën\ù>…''‡jÐlÅ+ÞmÌ^R$âZõ`Œaà0Ró4%àîüJc“=µª>@ O¯ i²qó~_Œ:Ï…½§½¢)‚òg¼" ä+‘8€}QEÀ™‚f¡…û^J¼x”‚÷Ò‘~[ÖGðAm©Š½@˜§ÈöA2Òˆ7$‚îî°+X®e CxSL[jNXe=®Bû³\²3ÿý+>n”¥S‡QOyâé—K©ÀÉyâ&÷Ô IÏV†6Ñ4fÜÒöK;Þß¹yP/8ÁNAÖMóˆ:SÍU:æ´}ï']rò -¢Ÿç5¸É˜´´öµHF  'ÿšý¢î'†„ÄÎë$ÑsÞ«´Y*é¸$‰¶á¼³‚wUÅCñQ¥Præ ·ì€ÓI$ë*­‡µÏcÚÇ3{úýT@¦Y«‡‚2P€PÜÍ)DˆIêi§*$“8!ÇB -ÿr¦‡ÌÉRHzΨô™é™µ÷íñ[@¨­÷úšwÜå|­·rX)5d‡é5ÄuG[“X§™–WJe·ßÅíÚ{[ûaتÐrg ©wÅíßáá˜\éˆÒÜ"¼3AÄætH‚÷oâáµúQ<5B^oðröäD Úw£Ý _ÄxMcáwØ+ÍÖ§Nq*F´ó0qI¡Éq‹D騃!¾>ÐÕ¯Ô:9ËxbQ%s -ELáH+Áïô4øœKO„Zds°(;žU&:Të¿I7® -¶˜‹®tÃRDdÞ2UC)M˜p’!!CÝ¿3Ó%¤ß€±ûâ6 âtTt…¼«dDßÌ<áaènÒÉ»âÙXpPéˆÒ¢A^4x€T|*±õˆ§ÃÈ4)¯¤ ìîÙÙsF:IzG±®”ƒ]qxËFA™…. Aôô©ùŠŒ@>a–BÅÞ/~ïïO9‚ÁÓœE2nÉp0$)™²¤£ctf>Þ"ª—õ]9‘öfŠL%jLô ?†P=Šï_<œ´µ…¯±0¿ˆ¤¯S;æ.+ÌHúÇß+Q;æÊ>ÊRÍcêË Õ)Âà÷uuœË±†Ô͈†jQÙYý2\!ö‡yOf_+­ ÕÕ l˲Fc½G§¬‡¦Î¯ÌÚ(·4ò§:ì7 qŠb“£fŸWu‰H.ÛwÙ©è2ˆÙ_!2~/)ÙŠ¼,Eo‡ƒ:ø§µ‘üŽÀ{F†¬À\Ñ)sL)פÐt_/§áJÞ©B¦ú•c -TT:ø¶BĦïsFHèée‘Gtü¨;ßå7ùÿPüšFøeÆ­ ;Ñþ+Po-¹ð–u飿¤—Ýú8¸É¥k×ân£úƒôbu˜‘¥K†LWÒu¥d¶ˆsºôééxéœv«íâ+t»Wi]Å•Àjï]âiÜÒ\å«…®õ·ÅGN.Ž#:¹ÆÑ“Ù¾ýÃøÎó'†ŸÕÕ£<Ò£1ª›¹\6}èêêÝå(p¦«›®(½^´õ§Ì®hÝ$Cy2!¿šý¬&5FÕqRô;GâÛ¨î–s”,çq¼ýWê6ª*lý>åAèY-Ÿzr²d7Ÿ }hÅÐrGý+5áçizL>²ßN j`eLl‚ó° )SVÞ{1ðºEÆÍ׆wоÜ'43Õ'"Eåë•ÿ–¡"ÓÊ?{1áQ“Ø|édfÛðX1Í2‘]qTJŠKfÆLFÍ´)€œøBYí±]¬æ5ï¦ûoñÌâ±gÅ_«:«â¤lþa‹þ)ë½ëõ†¼Ïõj‘GKû_=çŽ\ýL¨:«üN¾ËO£ì)&tÊ×oȀȽ<ðĦ—Š1ôÏGʉ‹Èªéˆ=ILGÅó¶œgÄà[,Ÿ>ÑÞ ¼¿Ìý6‹%ÒÒéÔ„b+#$Æì?{źÀûq÷áhÃutX·3?CJ‘é:(@çæͧao—²ZjíàìËÕþ”SÓ)Œßñ¨rÖ§äÁO¡iñüËê} ˆÊø@ù¡Ó/?çÎ QGPҼ⸣1yÄ™'r:šè¦ñ aK†æ~x1 _eÁ@÷Ÿ#zdY ?ø{›®AÖkY*íQÌhÑñ›Sº EÝ òWäV¢gÀ•Qõ ­¸šFêäûЂB¥¾U6Èb:u5äˆ,úŒŒC¹ -k¨©ÃÝ6õQï¸g]è2`ìH+ZVv§ZÛ -E¡rX€_·Z‹²ÛõN‰ÃÇj·ˆ=‡lË|˜ÞY®Nâ5›A ÒÄHª‡ ÙhWEá/÷ëCã§+ -Œ>>ð<_aÍÕ;Ê;%¿€ÑïtŸÀëid ´;ýå¸þ*ÜþNX2䯕õÔÑ{)·k+S„~ÛQ(‚ŠD/þË“imÇåv¾:üUQ<0ôû§ÇŽ—·E)ª†Äàsh/£°ÌYâ{7ü¶÷©GmþŽP?‘²Ð Iç?/½ò}3ÚØÁzØ,?g­k‘ \Iáë Ãã®Ù¦…}„ äJ{îÛ[¨RÉ á÷JùÇâûD“׎u´×Z Ÿt;[Ã#… ÊroÁñ9P4â^A×ïßÛëcF¸5­öbÖÜ4#+Óx,9Ñ“â:ÐCL†u ÎÙ.¦QÀqç s’]ErJh_*;diòeyrè¹#º¼šû©Ô­b3]Á»Ð]~|“Œó'V,Är•þ_p×mñÉ<2Û\ -•,tk®BÏéïkÏX ¥“hï Ž"/Årjñ3¥ûûÖ] mAÿ÷„"¤n[ÊâQóq"J5‚IÑBg&Ù\Ÿ´Ń<©ïúfY!±ºÅ6çLô,w›NE fG&{¤Uœ®9L{Éà  ¥§†‹†\ -ž~¤F<_4`hÖˆ_÷¬uÃq°¼Gzð„|±S®îy±›NàÞƒ7£jz?ˆNžŠ)ÍÊâ4ÄýÁPtàRt À{÷˜#Cß¡Q´ÈқǮÔ+d«3ÜN…úp;&²ï0D€àqª™F.†ÑÔ®Õa§Ð}¢±ñPÖîAää:3²Phýd×UÊØ!Ùî›d -ÒEò^è!‰‚œMZ¨E¶º•t Ži¯ØÛö+3ýL&„ø&ÚßA -¥šJ×¾¥ÕéÚhTFïÑ á¼2â‰71TkŽ7¬ 3ßiÍ&ËÉÏ@B.%‰Ìd'ÀÒ“¤ãéŽO¡«kÓÎûµÊõT©õЩÈ‹gœåQýxئÇê`G"H½éÃô¬))ˆa2y¨®½þ x½TÑhóÒ[Þ#ü÷h||Þì•ó\éN}ûöYZý O/2aœ«9'µ´Žyïô4 jwˆcÖ+ìŒxëÅž¯Ÿ Í’Rf5ð500~,ö½ê÷jä‚øëÔ¹l“PÊ2õÅï‰RÙ"j¶÷$LÑ=ó˜!1{´‰J˜À”ßíOãÉâ± T˜v‰Úz|ùZö6Ée¤yŽ:ý‘Äq@± -®•Õ ÿδ 2'õmú#NjãÝ'4=¤_|SjPÜæÙ2bIÖÊw}ü4äA™ä[¡ƒhU˜ñlûî÷`€þ*zŒ´OÑu$=6h‡L2äê0_ú1ß;Î9¡P¹¿çHBøЪùÔý®%¿ó!¢õÿý &K7šÛQûµAÄqø?vE;d„-ÅaÒ{š•¯ XöFÑY{‰üÒ:y¿Ôágv$jöT=6$Nèc Ÿqžô„ -¾!b+Bi žƒ9ÙbôEWÊóIro*­úÞX1´‘bÀçR{ÏjŽÇ{ÒãLð´øJá¿q€:É×oçäé1¯€ò5{Èq³¯…EâÂ6ƒŠ?ž‰ -1š¯/øGùmÖÕ‰›²EÑMŽüòC\*ÄÓTä>¿}ƒFú=‡ù^¼Ac ààþ+Rzî燠ŠBו¼C`Ÿüá?†‘ÿ©…ìmäL9äH•›9áz§ -‡BmÁ©0‰”Þ¯ ÃiD7ÜqÏôRR÷la(pÚ#cY‡ .‰'b-t¿p´î‹R·×¬Ë 6K±z -ð2£Ê•ö(•Ïd£µw1Mÿâßzt˜¨ðµ’ň–Åþwæû‘—î\Þwm3k®@sa¼Ü ü©¨&±#¡DÑSl•ßVŒ*ö%ÔÈîFNòöƒ‰ÿdí{pGiõ.ƒºÍ'4ª>IÈÃ…§§1qvŸ¿é>Aìh{}ˆ?#Œv®ÌtC4Wõ¿OŽh/ŒñW¤(èÏH•Ÿª¤Ô P;…Nûü»@ÄJLûV$ø„dÔ¼¬õü,ÐÀ~$ÃÕ’}b6ï%· S/ªcÚÃýy]甇À¡ X`î-á|]Ċσ爭ô®‘E^ª× u 9géõhô‚ÚÏúiþãñ½HNx±`“CµøNSW»5C]¾o’éwh”5$ º¦!IÑ™LëHMÎ9ÒyRZy -ÍÌ1ª˜#6h…“Ó†UX b-ÓOíÕEÿÓ(gx†ô\@íÄÓ·U.¶Ì¶x·¾*&a¦ð9H¥Ñ õ'*T½dÅKúée‹ äèÚUZ3—¤»½¿³óZŸ3ˆÅQʲ= W‚~ÌHBì+*hü¯ج‡­Px— -m„pÊ!äÊríÈ)ðNUÖΠ©x¡ÊÔ<#Í¡'ú°1µ]õ#+äÝ/8a¤¡”¨]Ö Ø!ëåPcÖ#H9^-Ü`KO‘¢pöx5šè¢“nÅù´½éWBÞLJޥÖJ@~'°ÊÒƒ¹’~(F Ð/RÌãúQÆ6Cf„»b6’þ4[%•*Ô!¾|J¨E" ¦SÉÆ è~¹éáCÍVÁ\À,>¬’~Î.ξeˆŽ¨H%Œš}kÿ ¢[U²ÌmÙ§J ±Â°rÔ¼@wc÷}9lÒ¡”‹Zs ‰Š‡p¾”Tú랺ÉE‰£vlZ3§MÀnÒÞxMç `vÇ;=¿£BHvEù4A|4ŽjÙLöŠ<-^Ñ7Ok0štõP¨1ÑFn{CL0c}: }) -;ö‰í|SziÔZ&Æx2ü+g‰h(“°E$ªÓþ"ߟúß•ºîV-µMo礢ˆÚyö¾Þ¦¨5E -x$ë—Ш˜ç7ë¸Ón[e@‡ßÞ%à=¿usÒZR—(¤D¿Ô¾J(6ÙóÒ#™´Ê´W2BŽLôþ| -©HñoÕ#o/Ib©+µ8«Ü•o=UþVǽ"IšÙbdEÊBúì%¢¨Ÿ'‘O:ô÷ñßLí÷BxÏDQg`qê„ÕÍßIÇ5JKõŽÚÐ"²œ ©'󱺴ÿf™v -íØêŸ4utø(ó´U*AscìΪ}ÄÆùŽsm“ý”˜€å²AÖ¥ -ØBꊄ,%QYr»Î·©#ŸàTQ`³f,zÐ#}sW(¤÷\‰ë0×ýÖÇ›eRµHµ.ê&YßKÿë)Ì÷kóBŸêë5%f ïù¤[ªÔTv¶3þ=6-Ò!5DÑš“%úr¶b…ì!^îwöQ]¿yì9ûRî¤ÓÏ«üÞK Àg%Ú³?ê‰ ÎZR·púé³ ÷ЄšŸ÷™Ú˜Ê&t%õ¦»6?²ÐèäQC|ÞJ¿ÏN9"vãþlÔQÉo5i°ÍRŠtTB÷&‹|«ÉTYZÄóë¡bì«j;ÞhT16°Öì³’‰A./þU¤Ûã1„E¿\@Ö‹<žÿˆÌSña÷)P8Ô‰œè‰3T¸ŠÁ›ƒJQ*”®Óƒ–U)¾>¿‘T}3¿;~7„_GÂáìÃã ¼§°õ"™WDÏ̘©YæÐ?$vÿø—ù×fx›;Ñä1RËÛJ|©•éîõÏõ¢LÍTæïöÎËpíÙ°¦! †¦‹×{ßUÔrÁ¤?ûN÷±¸ç(âÃ[ö‰“,àZ8õ&Yo¾çmå¿ŒPú¦uI-«­PojG/v?§Àwj¹{H–ÊzïFY_]O?ǥ▅~® yÖöÊÍ^¿.Ü wC8“¾ºD©6Æ‘<Å\½žjÚÍ4l¯ Š>sˆlûƱ1O#Q6«|½ýjAŸN÷ -pu¼ÆÛßRÞž‚aûtŒeƒÃ?È(ÎȤ’ƒ<ªlÕo埯ïej²ûœ -hVëlÿNlx¾EUÙÕŽ¸÷s 7ö%O?!Œ÷]Š1\‘C퓨Ÿô!ö4úòÉ/qÉ>=¯½ö˜{’Â'–…}…d@³Údî!!. -×qíÎBO,$¸|WïLa‰ÑYÜa7“3ÂŽÝaI „é\| ;[£eÊÑ×B 99ˆÍ¶·¾Ž‚CÐ#ÞÆž -…f*íãþ|ë¿ ä‰Œê>tsË<if¼C¯E»Wç`¿ÀH-ƒÊ5Þ žŠvD>(«ˆfxËñ_A<»ª}ã8zb%ÐÚ"¼‚ô-jmE”íþ,Ómßþc»£g”ÒWÜ¢Ó>– R- ]ˆÁÞD÷ñþ¼ê¦™è4>ŸÂCÞ`$*Þ·o•pÍtoa*~nM"pAGRdâ³qFfþÑ`°¤xŠØçˆUµ…@‘¼’õ™ ©j~·_–Î,ý›sø™`Rn>F@ ½@Dœœ7†dëÉ?Ÿþ~k*N\IŽr¦½ÃŒ€'°¦GíÞþk¯nΓ7«½Sa=MNÛJÊXÚPZ·ÅŒ^²mr3²ùº‚wjWÈC‹lA "ÍsÅ[ŒÖS$p.½•ÆÕB@'±U#U¢.»¢cÇ6¶U¼Ã<™ÚÑe6èÏU;â:-iME¨nÚï˜Ñˆ¬*rÓOôôënÞÔîï6Õ¼'ÞšG)'ųG窾ÑuR3‡|ÈÏØD…@šG 2 ¶þ…{ˆþ(½MpðãÍÅ™ñ1KÒUÉ¥‚O:moL¹g7íM':\¢ñ>¸Âñ^‰XFD—©Ýü¡´ÃÑ›ïï•´Z¤½ -¥óe…_=Ô w¢Gì|Š¸%lýýê׬¸îÃ-úúŸ[ ìIypÔÁ%G0AaqNZvšoƒO —[ Áu‘Ù·ãª[F"ÒÇ´¢f(—§¾êª€ec•˜ôé’¸F”‡ƒÍ¨Añ¦jœ&v |§kÕ”wH ›À•"ydy€xÇSQ27vkõ4 ²D‰æÃS%µ*;‹On®rÇhl•ë{†,NÐae@–-ìz -ÂØÏ׋¢Ø»›‚Ü;ÓÚ<ÏžK£XX"JYCIñßþàqJµ¤4ÚkkšäšÒ¦èðþ_A¶ºHa59a´ÚÙF‹ *¸ˆ5›Y,cX°ÊªS[é6ËÑg†<å‹0K%3P äú›UÒÒÅä66á· —‚«lévì³nR”õÌFÁ ïL‹št ¸&®Â~åêgbÚ³Ê~x‰3 -k«%^ñ«,=ï}£‹‚eû8À#V<ËSûÈŠµn³JóoñˆÄ5Þ—{ÂN܃÷DòÐ_:tóŒÊG1ãÂÎÌ2ÏKàñ3æH¯õü­¿$Õ¦)yJZ8¹°USÄÖ 2ùgÍøÄÌh¢Þ±_²ÉIOú#ïB[;u:GAîùñÀWT~Á8ŽÃgtÕŸàæ”™»mŽrM¨•Ù×bÀM¡”ô}¤³Ï;á¾Ò 11mšÜÊUj›Ê€FÈlT1f“S] –—r½jbMsMFêIë8Ç7©:x¯œf[¦y€üzߥ0šUßÀÖõ‘h¶€šÛç7VT­üÔÊöÎc›|ÊYà€$À.d±b÷AE}ÓÑ~«Åc®GÔHÁ:YIÿÂâסoñ@ 3ÙÁ B®I*…“èKýYìÃTÊhCÆn’*à ¶dì9:¾‘|nId"1e(}}®¬s¥óx‚¯¨NÐï|Rayžâ´¡©ÌÿÏ(ßSW'÷ öp›>†8´žh×¥zÈ JBz{W‰'FÁª!ê‘)»ö\êÒO™Ä—Ft÷ 5¡r®=Fqã4TO—ñÔ#dRY…ÂËlQIäç(æÑŠ>ÎÞÎI,P¸0hDËŸÄwÌ‹Å&àß‘¦KÔP^áyì1žŠê§ŽÓ ÃN…ûN# Ń2ÕÄ(ç©wî!á"I^ÀHsÃp…G;B×-ŒýjSò¤þjÙu"~‘KÝ¡ êYeRÒ=~°gž›tÀC5ê÷BiPo“ž®*Dá Eq¼ƒ"ˆ8G½â›Pk¿—"Ö„¹.Ôò5ñr×OñÒÝï N ê¡.…dð ¨ïÕýÄ9wÒs¼ÂPTüÔý‘vôù¡OhîÓ3„VÁ‰dû”èI‹U¼¶Kï2åoˆšQ¹É‹ÉÛÉ}2ÕÁi$=T0ßnÙE® cíLA¨ -¬y¾SKl!ñôÒÜÅ‹‹±HBß<ôÔp2âµøÄÐ;²Ä¥Ï˜;.QzÉëïUCÁ(P®¥é¸:#ØÇx…NnŸÊ;õH“*=€e£9ùTi+º¦«××>îHö:íœ& -g=¤éCIPÜ7‚™ÝWœó`ljãz¯„ví2Üëvˆ¿;à(¾¹A%x5 %ç‡M¹9»òLôáâ{X®Ä QÑóLµ~ç¦Ð©«uþa5ÓágÆùÔ×zg<¨7°—æ®pKù„Œ—5mƒ e¾3w#‹LH?cöÝEÍb"Eû°3ŒW$SÚ;ÄýNîñ‹C*±CyïärB²Ð#̇ZÕ¡P(¢mv¬â -7ÿ“Gxí»¥Ç ëÈšýüÆ®ÿžNèÑgôäCæË UV¾®TȼqRñ²FÊÇ$Î{ö…™|ª«˜¼žßú üßáN ®+­óÄ:4V#µ²/ög‰6LÚ|‚E'n xseë5é¡‚±×]$<©‰ÌtÍò#¬Ü2‰J¯ÜPmÇýͨ ô£þòMÿŽ=ó=ÎçU0ŒévBjLÏ ±—±O—Þ‰Rˆ})Lš¸èÞ–F‰ðœQu†·\’ͽ|HÏê½AŠ©ÇYÒ9A -5B›oŠ¶ÂuNöðIž*WB/”{KœoÄŽï¬r;”@ Io€qI€éXK\UJÏþ±s-ÆÍa¶½üH ´%q¿WºÃmˆ>®ÏäU 3—Gc -4×gÚGr…’ÔĩŒ2[]HáWMç̧V™ñؾª’dHD}k—4eöˤÒ~F¢w®8›FrmFgNÈpuhéÁµ¼ÞœÌÞeVýÆ Èv}Ƽ’ùº —îlbu(/¦šäó±÷êg 5ç7âï ZÅÜŸ_Ïðw!²Òr0‹«óª}çÁŒlÆMW+¹÷È ïÒìyÒ×ßéSf ?ËÐÇú­¿p j -U*º;|¼f7ʬºSâ¡Vmk_Áü ¶:EøåQNQ½jÙÛwqÍÆjï)âkEÄc§ë¶í½EƒŸÜ±!x÷v©ÔÎhJÓ àF¤ýW…]ª©`Ç@{þïÏ¢öèZÞï©,Ò‚! -€ÌÈaç÷WXMìÈ¥FüZB0ÚÎOPIYX,û©hoÄ6uÑà/A5Ò™Ge¾{F ¬RZ¼"˜«Ìád™g`_.µÊsRñXÇõPP°‚2 j+¬Í7¢\ƒQ»_ *rÚÖ”ºê3ЋÔé ºd¬påÝ^aE•Óµöïìƒ*Øãëy¾Cèø%ÓG0D÷[·äé¯W} N’L¥]2j³½c¾õÆTôòJJ±0‹˜ÓõŸˆtHbï+¢@ ½½Æ†â,]åŸ+Ù0lô_Þ@ûŠ¡Fzð;[«Ô“è¿‚6‰³ˆ·æ¡(ý‹Ž_Iµ#ӟ߈Ž { V¨RjæC”eÐ6Uù˜8Ô/£ˆq½A -qa¨aš-wÍuò'ä5ÿ\чš‰Í¬™½ƒ·ÈU~—ÕbäÃ9Xe8ËÊî&Iƒ9#"`ycr¶D:È>]M}Ü߸Ÿ¡ãšÍدâ¨ÿ¼Ð~m.~ÓŽ³}çíÀ.õf%þÆ"‡ÓOºòŽ!~ÆNÊ랊zìEúA .J Ýfû¦ DLAl]eŸž©ÒDýyÄiïæP*î;£tyænîЖˆ6짢¯È]!ŠÖ-+nÏxvç:Ð>«":ù)£5å%~ð„¥ð´ºaVvJ{0TñðƒßÚWüsË8;Ú¦d§Ç龸s¾.?H.Œçê3Íø® -ì‰?e 5éä³úUâ{ÐÕ‘ AHN -ì¡5ã”rîI¤ŒþÈf7Â|4þ>nc’·-Õ°=dL - ¤?oÅïN ÎZÀz1(íT ˜-$\aÍ -°T¤š‚½±)ð˜rè`Ôo?5ÙÉa^Q4GŽÞ—Š—+5Õô;èºÀàªÖ¢fÞ¥9ÍõÕïÜ#dþ/‰˜Ƚ€þŠªŸÊf.%vŠÍsÄ[èfÊ(7AÇáäǼI2‘ë¬!¼é#ìD» “”Ì€M:—¢)³7bÅj²“qZš¨%SóG˜Œ`SGtWh‹ ÈÑ‹…â™!Á ¿÷‘3ÐùrØ=±à¼Ij°i0sKòãJ@mx«¡ÀÕ¡ ºÕŸÔaé=¯%Ÿ|8(Ç‘‚Ò™Y,c¢$ù‘ý’Âé\¯ë«E¯Î*‰·#uNaóð2È7ÞMÕåáÒ0kNpø¢&£ùJ™e¯!åõ8úÖQ;8Ócïõ Yè?g];u§¦´Ÿ %ȹºœgfqˆ¢ý}fSjºû„‡AË9¤]º7ZIW.…Œ)qR5ˆ©nmÌN?ü‰è+ê£?¡¯q.žAÖ¦á±2$6»L‹úó.»dO|ÃÚ} ëP…<Ú^¾isk ˆÇ‰éyFÛ­V8R8÷…‚Go ‰¨£üÕ&bByjQ@Âô¯ÿsÿCTð÷1 -äW§¶0ð¶~KÀ¾ú1Æó'ßS_ሮºüÄ‹1fÔL1£½wÙQÀŽÒÏ£Õú¤¨U=SÊS¶)±û¯>/!CÙ”íTýg–k×Þ¹¯ïÛA ’D—}´hNà¯i´éNÈf}_É6îèóЙhg1¨9s­9rö©ú))@¡?A-â=H ìV˜¥Št‡ò\ €¥(Y=%}R½!ì’Îõ æš?e\à¯hd‹eTePÓû¡ph¯×‚,åŽç2­G…X¡ !+$âžNy†‡?bÿ5¿šX²Þq4Ô<Ó¾_À;WØo/{6¼vwF`ð ˆFJT¢)mœðO¶¹|sé¹K–7 Xµ€òÝöK²cßh´ï8I|ˆbŠ®ÐȽî!ä•XÍÛ[Ùº^‘E»§Ä¤âÁ/€»“Lw.Eu’}+D$]s«ÅœùkêQ1FºZ:w-#h¿Üæp%ôM©×ê à~Šm‹ï\ç&4 _:´è3Í•î)z®øOd…LH˜GŠýé” £!{1ìØ!ôHɈÈp‡ ûÊÕB7»ÃSoÖªã*4©¾ç@i$ªC¾Ûþ“ 1Úž=5WÒšó95'”#ÒIÄù®æûHÚK2o%[Ï Qé›Ú· {&û[Uæntø›nï-¿o1 Tn°Ã‘—›ª°”õyËÄÙõJ}Þ?HX ÈnÖ$ïG:hh§OR9U„¾ Ç»±~@ˆZé˜a0D–äH»t°X$øÄò#”Æ{zŸ”FOE8DÉ®Ïê‡@.‹)V¬Õ÷¬³ÅÉënï¯Ø2§!÷û¥ˆ‡ØkÉ£½"2'ãØ—‹Ú/é9.=#ZžÞ¨Þ–vy©}ßû}¦(͉ZR±¶§×•Q%€°K‹útô±‰õÙŸqžS«ùŒ=¹@Š¶7ß×3"®gÕXp×VÌÌæñ½ÐAðî°2îÆyÖˆzÐufíj"Å·í¨@]‚áç1zDØV0ò²ª»†@'GV}åÔ&4;£?¡IÒ*£þQQ)E­"TÝ}H4öµ#’˜ŒÜÔ£žyÞ2„0¡u”š5© ?º ?ÙS6nM "µaÉ‹VÛ­ôÑDwBOÊÜ…Wxw©Õþâ8üiVÅÚ­1>Ñý m¾žœIsÁ–ÛÎüzo^‚ó\'•êD^•MlÿìÚðüÒ’ì^Cå øøz%íå:}Ù¼þüW>ؼÎIæ?Y%|~ܦ>³ÏbŠ<=Ææ$K£0'#ÛÁþ§I,þ~ñÝ`éQ"AÄ#ž”px˜]ZÔ»°±ávp¸Fn»gI¹Ér¦g£®Ø?þ0ãó„i^ÊMë¬ÒÕ¾Ò¥ê‹Ñ`V^زz`ˆ K°#äûªYŽˆ&²ûwšžÄþ –Å|z† o\ßK½‚:îïýT3ÝGÝo”§A¼(Vú½½~G4Æ/ȯ±+@–FMê†Æ?…'ÇIZJCª•­Zc^`°ò× 8a[,[{%ƒ„c±´ÐZ( -í!V–ÿŸB*Âû9sgaѼÂV]$ë3hBèÛÂ";ÑÜ0¶ -¼•,Ï(/-Êyld:þ;6´Íúi”£Û;“î¨÷hŸ^å½÷òÜV>@ØѽÒsÝߎÁ­„AÍúã!ï³ÿ´Éªû]ºˉvfö5˜«gZ‰ö<&%8g« ÿ¡$=™b<(i'Qä˲“ÒÕãÇ3CQ‰¼>_k–£EQiå9ªF¹6ÂøjKñj‘ï¹µÏm‚‘üÂŽmDÛ.:¥‡2«4›’ØY½Rdqxb˜ÿ3XèÚ!ÿZF1 ^3Ø´ •Ò@«CÌÔ¸5¾ýö¿äÜì~s‰‚r¢Œ„ó!¸k¡6?)º)Ëp½¯Â]Ñõ‘vê3sê´ð’μš‚ŠN[”9~c?úÂZ”®¨ÐF~VRÚpÊGíÂ8·"Ît¨–¥;£j)Zù¿tºßù  S„Ópož¿|"9±ÿàhÕR´j± 1‡lF -¡}B“b•ÌÚ¨i€–•ü!Ê C@ã +@cXò: EdŒ/µŠã^õÁãò3ºLAUÒE‚¥ŽrUàcº3Æ>a­bÀQ %n›#ƒôò÷åSÝÏHvæº/cñ"öÏYXIòÑ€9i=k©‘/Ei•bØìë•8ôå€>€1PñI´XÌ3jŠêòŸ",Ö:Ì·h?(…Å@ïŒD„M Cz¶%\­’ÒÈɘùRk_#¾I\††]DÒÔ[±¼µß QX§óèL?fÅ|Ežt«~ÅDâ°¼çEê2TèAG „=[´þP?RzzCf¨AwPÍ·Ì8•Û7T½Â—–öy”î ý¢Ïz"í;VâÅIŽqÙ'V2¡`Æq 6s×g›t‘ÚŸ!çø‹]GɔՅΩ èyåJT©},Òx©ÞÆ+T’!ƒgÅBt¬$Õth‚jÔP–¾åÏ×·çŠ/J¦mÔ—ŠµczÓù ÜÍ’ÝHS÷WS¬õ²î’mÜ_¨År2ØŽæÝÁÐÔZ?%2×Ò&\Q¨ù'T§Œ¤1JHG‹ð`*@¨N3»ù·x=ù3ÞǨEÅé%ø8¢I´´hLþ³ 8èµHEQ&GqݳÝñ$€Jà¤T°x’@ÝÇä‰ôI!v¯V”EÁ·a[GJrGÏeR½½8Ûj1¤êæc CÔ<†fƵf0Ê~”¨ ¹"vçîÒJéG[‹ŽX4Ÿzí3P QBÞŒqUb"T‹×þ hµËCÑÊŽ¾_çùð0 !b!»1~®ñ=;hó"!ìl(¤“èr¡<ú{0¨’Üwy¨‡$#¥‘ûE„ó‰ÓÍ8‡ÝOÎÚý>²âõŽA7‚ v’Ÿ9ÕŠÈrÒõ®ÓN–LuœœÂo„ßïž‹{³{î|ÖêÃöÝñ¸_¥[‚„Úù‹¿¡@O†!Ê}rÿê'zú‡ªïn‘ò. Ba½†¥ÍdaÏH.¬¢Ã3¸…»«"›ó‚>"*áo2¿x×O½¿4Rš\ú/–šÚY¤Aì¸{‚©¤Ê«b>RÙrȤL¯¾ïS¶jƒÓBd(Ü xc¦°ÏKèŽ`ÑY-ŒÈ¡H&F€ŸÒàÒ`¼#ØHˆDÉœ>’\ìQÕ-ãHv’ò¹½«<_¹vÛ½Ò¨I)Ÿ*‰æNbqN¬¿^õIï°z<#–&O\VC…†¢Ôl'­¥—CÙ1þ©".VÈObštC9‹²á~FY¾â®O njJzÚOœ½±IO«i%vu^%6§ÿ•i¥RÉ0 UÕd(u:lû¶ˆÎ¨»>)è‘1O=ŠˆF‡Ý3®œ¶¿õ3Z%U$KZuõåð…î •¸;’à - 'Ƥ)|èïøêŸÜ«[ÌEsÅZm/ÝÛɤºÇp8-Ñí5Hþ½@[Ç ÜS§±üUŸURÎ.¡.ÊÓkÉ‘µîRðg™;] -á”ÊN¨§O\ex wö­®Õ»—8¨—¯rükÜOörÌç YÉ*èÝ1`¿O…È8‚ʉâmdÙþPÐJh kQÎÆCÙR©:M`-ŽIk´M ¹Ð?A®¸Xí|iFPS¿bŸ_`ˆšUQåN1ˆÜÖ^ Ì> çØ&a¢´T>žCs{—WŒ †rÏ®'Ú€Ò"Àr‚µ>ƒòJ_y¨âWiJæ¨@ -Š¸êR"f¦tKâc¥yÊChÐgPg -+Š9 "ŒÐGálÅ·*#¢þQÜTÊŽøA’h¼‚íük_jÒžB?ÌÌN…ŀάÖ5Ø|‡ƒÌÿ­§0x !9èàÀÓʆw@æ˜BiËÞÊŸr¿LžŠ týć ŸÆYfp9c¸ßÑdü Ã¥ðÖÛ;ÓÓ¸¸…½ì(\J?Åi—ÒU›ÀƒÀC±®Â-÷¨z)3Ä3xàŽØ9¦²ç@ˆ’î«F8€oÛI}©È ßÁü]Ñ|0-ÆAð¡Ä’°5 â ËP~ä=jRp«×IíÎýª•ª|µ’ËýìR‹ Q»ãˆƒë °l #‰Ñ@Ú®›3œðS—U³H¡¢Å)ÛðÁŒËÙù¤#­dy’£P †ÐÎè݇ 7&±­ž“¨q^Yãù”š<…_z47z-:-œ¿çA)¤×äà)”Ï=ŽS:‘ÊãW=Ìbch§ÞRC¨ƒ•GÿüÆ1öö@ô»à2’ÝŽ„Jºú”sBÏ–[Šï¢À Ãß™ºHWRʱh/#µà¹Z#-.׎9Æ…mÔ °%÷¦‰L÷i1½‚£K>¯æ‰»2à¬Éø±®¸ù$Xkà†øBÅ –Ì…VŽ œQ&ïFÐdâ;âºÿ'ôÓ¯Dü‡~9‚î8À0>œ„Rªƒï¤ºpGP¦ºœS¸IY’¬V>¶Uµ lEè4Þ ð:b] -Q¢Y«8Ë(AÚ¡ Åˆë³È3zS´Z)IÄ$i¸ð8¿ÿ5Ctö:wÛ–r¼JV6.=ú½£þˆ6=·óx‡$öJÞTMb - ²#²ÓÆ@#K™&Úù{­œw‚)=/ÕÐ#àà†0,ñùåôFÙýV™ê¤~³(û:Âæ~²cd#x¢Å>~e#Øû3½IÊ¿ UŸÈnô¿ ýå?ªeGâ×ÿæ¬U?i4°¤ÿf«ÔzÛ•ž„U¯6Mý#ñIŒ‡ð¤H±ó/“¥DŸØmýÎEAg„¿?Z¾rÂí€"Î7{±8ÙÔ…%mBi-iÒ‰SrbáùNâ 1'ÚŸæhÑÝýü¥É@øßûiê9HšÎG%Ø4è·»”2Ñ|¼â”·Ä ¹Bgj_*ÔG5Ï·UÙŒ\A›gT8I=¤òžúý=jòh.µFÓEºñ™“rÅP âwä`â©DŽ5Ü&™ðàn š B']v—k¸AöºRÔïI?‘nx Lbè“­J³Œ{Ì^ÏS^þq¢ÇI»ÿÇg¶÷âÉE+ʱû$¦tüùý¯Bt;àWÍŠã˜zŽïèB~G0¨º…6•à¬Ç‚hžûÅx?ÕÇNœ¢ã]Ò¹ì«iGœ™Ò½å’jØêðßµ¡1`²’#­^r´P»i.ÌwGãÞQó²*<+2Y“6øLäÇ3t"ãMæÁfåÙŠ`·‚="Ón@wÅË8"aL‹ -•ÛSÕý8×gt$ŸL ¸5ÍçÔF8rïž{¾ 2Û‹`OÈ#Fz×Ìò ñt.ujù(tt^o‰b8±"°§…ãU LT+/úñl'É–Ïiçrʬ\äJ Á^¾èçœUŠ|=dD¦’oÎ_@Ϙj~¯4S}"Œà^®»Ezô,݈3©éL ²ìZ€¡@pÙ)ÂS¾ƒxÒœTdŽ+ *Q,13ŽñoikÏks/;ЬýO䘢ûU¥­COˆ´z» áe…ìw†xrÕ òüzqRÐ?Œ Q-®w„Ö»Úå¹4Uìû -µû©!ð+>+‘ïð]èî“ŒWL*Nþê¢Ñûó}ã˜(áÎ6`D—¡á'|L64(Ú W$wâ¡-.ty£óZy ¶4T7()çä5~ ™QXJ–LÊMÅâÀKí„èÎàÖöξ2⡨±¸ˆd 0ÐJzØÞA“åŒqïÿ΃3VM1{vV§/ù„Aü)@œ}pDÖ8Ñ¥g”Œäc¸ªÝ´q„!ââœØ›Ny ‚ S5Gs6{Pýçñv_[%=û ì3‚è)ÇAä0†škQùÅjSVYï¯]ë0“ßÈÂsèA¢:ˆ«|ÇÛ‘L -IÁÄ${Þ̶yN~'=¡ÿ\wóëSú›Cø8øx’ß«B‡ Ÿ4b¾9„ã•\ÛÞ»#FvE¸œµäHNZÒõÕ~ýš§ò b¶œ$Oõ¾ÑŸƒ•<¤«ØÞÑU¨ŠØä°¶Pퟅé­}éÑB Ø)(î)ÙœŸ5B³¢ëÂ@!mŒlÅÆ5FÕÏaõ¼úÜjä­BoÛ{*¡¯´÷ôó5£[JõvúßLa4öŠ"h]±çò‰cmÙ:Dó©½{›s£`4¹cíêýObìË(&¶¹s¼Húsyè½?í!Ö•X!PÚzá:TǬäCƒ¨R …À+pvƒ 9[”DÖ^J1 -’LEb6¨H„Àa?:1—ÊZ" Ñ•mI-ö€TJø¹VO®a¦Œq܉•wúÿÃ>TÀ¯'O.a¹cújËdž‹: ½+åb19o˜oÜ!ºBóÊË!±¢Mv8¸Ô^´‚Éûâ™—ŒƒD6‘„ý%À5x>Ò±ž6/ -毘²wSSG£$û -á±hF ç(3Çï*ú "fzIO‘¨ðfàž´—&Ð0žä`š:ñLÐå3“wÚ.RSö+)Ú‰ëg`H¦¶0qÕ†CŽ€Â-n§.”%Ë)! “3 Š­èd?¿^ußÍA’¡T ‡l 1ö‘Ô‘ |X6¹çˆ=̧„Ö#JV>Íûþ·6ü¯ÿ"ÕáÚ±NÝÛë;uŸ°‚î˜êíTªžï9ûk¢‡{¶Ø/lØÂà(Jh³¶‘Ó. ûTÛ ÷渳_êŠÌ»H4ÙBÀX+ëñþL*(—¥uœ†õŠ葉H™öNH„ÄņpЬɻ—®èhMàHC¨E‘iBü‚`Õ$¡B[úÍ(éÈ J˜ãÔ©E`Ð]Ge”;ÚàŽKøãÅSù \%唜¡ƒË¡HïeÈs1 †OxcJ¢Áëµ”t)§g<è%Î ÷Ù †×ÎÂeáI/˜ÓÔ}óþìJ®-¬!J˜Œ;G¯H…Ô2êTl$÷-sAÙjR5ç»9wô°ÙŠ/?? ü?"9‰‹†³Ð@"]ßâzñ³œ‰C‘¼½¸¢;ì룥ï”ú2Ö(,f|yWÌU Ï;çJ]x* b L”:u‹íBÞßZ)ñv"m·ê#¡§¯ò]*‹ÂVTÄìyS´(gJ4ÝØC+.³øΦG¦Þ_”(G*Ó-lp•"š3Ò‚$…²­3ÔÛ¶ --Åp©$Þ(-N‡Ï,!ÿÀ㈱ž©×Â'õoVH»-J9…ººÇd=ß+áKûHW"{T'ÏuÉOo#G¶Øßã@ÿ’ÛæSô•ÍNSJ 6zrÈ‹tK¾ -%'rçQ|b¾­C“n'ïCz4eâÂi£çBåÓõ~/õä *ð@v“vdŸ†æ2d•ðàöŒ:ã š•àx£ç¹¡XäRÖE‹fÕ↔+Ý¢WÒ°û¤ûž¦ÚG\Ì™W|¤ÂB%ÜY0CH?™üŸßØüÞrСoz!‰§ÅÊu AˆúOvmÆ'¤Üh¡¤{J¥Ú1B>ª¥ùß"ý¿Uìÿ±¬¼Y:RÔ‰͘%\ˆ/þfƒî -]0rô`EÇ(sTÞÇè4µè²K‰„Im*]VŒ9Vœl\ªeíÅ/%Ó™bìqFؾGƒKyoÿÝT#˜€‘Zt!ˆ=Ö³+±Ì;dOêä¤{ŠgÈLq±Ga¼† D¥É¥”©ÉFœÁäyÉ@sJFêì/3)Ìx \ÐÒ†ÐjÙÇ­÷S-Óë)¾\>‚Ç øV»Y³Å`6Ǧ-³"8lp! ”³Q—Ò%ࢥü¡}N÷£(C ‚ÿhÑRIxÆÍj…Œü”âbYõ½Îƒh½F£Ò£HÞòcŸ(H¸Š=ëºåxšj¸ðOI>€Y÷ø_Æ:eçÚ„5 ÉÝ2èôŽŸWÔ¥ øÐògˆ`AüñrÚ‚âtò-©‰N]ùÜÏÞ‚k›§C°çdƒ¾tU±¦Û;䈡s¬gP¤ö!÷Š½ï8°v`™‹žö,)IéØ;cò~iÓb -ì³ò$‚¯‹\†¨“ƒ´Êˆ¼0æ½—4iù3 )jåÀX-ÖÊÌ–ÁÙ¡~î)C†3x×/Ø©zó'UYpÄ'Š:gý ¼Îëõ¶QzTîQ±!¢*ê9G‡U1„ÁCîÆÚ-þ5G„ã=¢ÔL¶ÚRœ#fÑ=Ò X.¸tµ' ýµæÕ!²Ø$)ƒn8ªN1ÎÒ½CVã<kÛ+úâê $螤)*þÑ#òÔqGl(ÏË!ëDà•1cŽ×#‚†ÜÒSƒØѯB7žÇ]CÐõo‡ûb› V¯™ÂHm[= ñÈ.­ÈÉ+S î´Á˜\*‘ÚyÅP]}ëâF·ã£FE&Ž5rZ=ve=…ø Q_ˆœµm…^‡ÇêŠ×—WHÑã€2!GŸ±š‹Ü!uFG`šW´â÷ÍñfiWå†w<à'ÊÌ"zºóçBìÎ;0жhŽdÝD@²+°öøÜ ²ýy lh–+…QÆ¿qþþOÔyHLg÷p´ ^ó ªfÇ`£3êZé*0nœ)ÆþA—bï‰è ¥áŽÞX׬¡~§‡½J÷‰Ôˆe›ßÌfrĬâ¾»mÚš¼Éë·"]WÈÙ‹iPŒþ"c…í¢ÆâT# |4kcDÓi„@EwЙA'Y^ôD-è. %–ˆý¤Ag4Ò‰qfs—ïËÞgŽ¨P¼CÐ8ÐìG´¡(hŒµÐZ A`h~p^ÈÙòEƒR•÷ä¸À÷/ÒF3¦}+Y˜h5O…“8 - `6"Tge5÷¢¨Ý9è¤ÇW¶7í`¼š(îèáô¥B¶½Ë”¿¨ ÜeM2Ì­e_äùðGä‹‘ù ÕjùïœÓéMxÄâ^5+¼Ì©Êd ‘%±f&Î>™Éœë̬Ð}Ç»³C`®ãÖ¯WÇ]K!š¾z~†óž©û 2hä””î ðh‹·Àˆ›pò,4¡Ì.ÆRK;’Šì§‹v=÷¹b¡j61~î‚ðD¸íÁU]qAGÛXAìì![š^Pì;—jÑqî‹ÿmáÍSÐýqžõjvxÁCœ}Ex'XšÖmdmî£[©HôÚ²ìŽ8jµô7Îñ~¨'Ç.éÄ̉H^@)4¶\ê<|®¸­0¯¡‹,*TßÉD„¯QìióëâÝÑÆkù×Fx‡1ÍèQœÇ;,Ó Ï`C–s‚t¦ºÒ&þ‹äÏyÿÞ_®£Ôx…ÂHì°"ú J”ü,T®1ë„€†P©¥OE›¸´)Çq)œ¨FŠ: #_Gxš%4¼ÊŒõŽ­Â]”€½O¦7hqŸ) öhV`€‚Ô &a‘wÂÔT^ÏÚœzÀª!°.܈2­ô))ÝÜ|ìŸïÉP¤«Ÿ·ØÉ}7‚G'´;ÙuÞF-5Móá!å¨<ôK˜'ÔÎØi«¼2€Y4çûj€<Ä— Ų,W(fêîðàûy\÷‘&ÕNkz«Ì’K^.…´4ór/lÆ`ñ %lv™›Ñ ¢èg…$²Îpw©Ûße ¿!ò­:CÀ\):?\¬öSGåïŽëAW÷‚hŸŸš·ºÃþÈý.(/ q•…”Àã×tDE¥$ˆ¬‘K3‰MhŠí|pØKÝ1”Mhu¯–b{ZRWL$¤v¯…Û£ÿ5•Æ(ü“_ßß'ËŠ¡Ú€\ñÿRŒ»S{å×Sý¦ZN­íáW2ðžn1"ÒZ¤µš%y²°cœî%pTaf!ð¾Hš%ib؇Ì(Að¼ƒÀ“´7f_ök<ÒX"Édy ýlÁë1óòèMHâBEþ9@›e‡®§=>|Ð<4<Ì nhY–”âa¶ßýsI-k¸ÿ=‡¼Í×ÛéNÿÙ&ßvæJ ¡ž¯áÒ?ô¢r¥‘âõ MÒˆ¢+ª;•Zs»g’å,Ï!¬šëud´Õ¢“ œ˜¼Á/NGªÇf⎽Ç*rTœ…;±ºT>fr³”Üëaa“F nz•{\ún‘¨èÇnžÀ”³®Ý%_:Ðüò¥È*Lg„Ãx½>´®÷YÙÀÉJ=‚Däù~¯Äë5Õ3a›mû5õ†'+:( ˆð/ù<*i¡Ì¿âgy¤îö<Ÿ²½RQ'MÍt‰ÌžêŒ!O{Ú“Ïß(wÃô@g-‘²º®F¸nÍUƒˆ©ÓWþp¬gù—Ù`l\µad€·NÖ$†:!üוV(½.$ª4Ø r/W„Ú0ߘ¡h€ÐO^…b#!QCt·/Å`=C„–´fŠêKÙEˆDSCÏ¥°zž°‘ë;L¡õ…¨MŒ#«½‚@wM Àç¹ÁÿD ²(Oç;ä!/ûðݺPg Â&¼Õªjo³ƒ'´9(†ý~m8‘íhkÊ ØþjíIúï|uBÒÎÅÄ.{ éŒ,ôBüÖ‹=«ëÎ×OD²u§{ÍüzoûŸˆ‰âÐÊ7¯0$J šø-ûå@‰cë ø§Æóä½]‘,#sµÒÍDÊÞ¨Žy™/ø‚Ê$0ÍQZ"ZSâ÷3Eb9bô&Ï”¹®Wp亣禹õæšûT]Wt~öB t!œ ÝƒÖÏ´øÊ®´sF:u‡î]J3AâH hàbü²GiŸ<åMÛ¼¾›½ÒÌ,a˜\6¸«q/ƒˆ,²)qa×,•’_ZæòªüŽ*±Æˆž!” ‰Mº+¹ìòå*ݹ‚œ;(]»ë™âþÍåBC¿Ç s‚)Ü™S‡&tdm—,CÁkÕßÆx`®š»¶#‚ê“väÑ!KÏY"¹+Ê´X®àâ{šÛÍ!fÏxä‘ê8®´ã̬ìW¡Ò•ŠE*x$qVmKŠùu”þ•Â‡>9@}iJ_©˜Cu€ _#Cpj{d5ú¨â×ÙSŒŽrwTi ð©ã·¦üÕÈc®™)wCvç¢otWQe–îÞ£4SOõ‹Eõ¿áå§õy'!ŒöõRÖj!hþ܃‚’Ú 02VO Z+ZàdOsѬ Éhº#Bæ8©QjÖ…R’ˆZì`¯=©øO9²…¤Þ–êî±}F`˜¿Ý‚#b’#ÖàêJO™Þ!'“¾¼rÒ‘ù.œ;¤l çËUæá¿«ÃfЎ}8쉭£®µ_¿Ôâ)’(BqtO® F5¿Ò¿¼øÍjì° ®,ø\TÞLú’‚E\„pØŸó¬Gg<G‘i=¿ƒ\»#|MíwÛ Z¾OuX¢º¿&OUèz/Ñý"•ÌôPµÈ#É»ÇPbCF‚ÿØ á發Á(iýßùbŒ‡—æÇÊ ¼²ÓÓ<Ê5áaUºB}·0íŸü cúšOä]⌃ -êxöà½H~°¶!†—_*·¤^£ !"ì->Ô³ÜÔâ…x±¬c³a /ê-Ÿý‰Ñ¡XVÎ -ÎC8@»œDÂŽµ?ÖèÔ®Š²ÿ(œ3(ò™S¡¼üÎ +…üW9 q„¦€V~G ï¢õ×)Gß•œ­BæSÎŒ{}iHô¢ÝQƒíg"Íóþ|m÷û¶•Ü@ÌõW<4þa -¥ é@Ô€ÒåÏo¬ðïNý:ñíˆèf'â?²·œV]«v¢Ðc²Wš2‰EML%+‘xy¢ý_‰ô_ÿ%ÕR‰©Æ§›·§ æ…=•0è+_4ÇÒu•ŽâCDœAk2DyblkÄÀl}Â&¬"'ÃoËh:FU@8óEÁ¾Š¥Ú{‰È£)á8ö♈ñ.LÑ:xi8t)ãCh¬·ž’ëMòjDí9 2Òíz5›Ü 5è‰D1«B¯uÁžåœQ”2„Fyú³Z -$‘f,"̳Te£…éÝ=u¤W‘Bš|îoU–õübˆØ¾ƒRÎØ' iŠTe“Ó9èûõÞM/"mÅTeÉc͘+¦ø¾cỔťuý­aPe”4óy‹øñi;zÐöŸuE6Ð|ê¥&ÞÂ!¤U¶Š˜Z„Òòµ/Ð5°Ú!ÁÓ܇ÿ•â„B´›i.á °p -ÙL`d YMþVARSSÙ'ѱY!ˆ˜À–x„ †4[ÇNò³ì’¢AÑHtOFèl¬!‹ÙËÍÕQ;£Íìe Rz|Øj]ï•(]ôÅ{¦„Êw=óèt¡™£³gNg–£D äaq»>¿^Nÿ -”WqÎ5ɼ’e;ržA]r.=†"Ð_´|à[}¨†tj™¡·œgdó•ÙÄ‹OJ^éþ¡ïŠh«m…Guœ®È_žôýúoO3¾Æœ1P1a«¾ñA½™^³† ûæR•¬!6’'‚OÁSçJ€ÖXQy²@èqt'„Õ+wŒž<)EhCQS–Ä ‘~jgcüÃϤ3¤ÜÆ/0(WWb[É»H¨Ã*ÈŠ.mA¿1RÔ¨!ŠÂCDÆSÔ/Ï'lGE0L™ByÀ~ðF÷ 4<ÀŸ”¾¯ò?Y±ïÙ|•T‚¥;O9ãvy(aÐQpÕ -(mi€iíkeƒéÁp†ÔHq,LlÒ¿òŒ;罎2/,‰Þu–í¼J¼`­³fÕI#¦¨¡X¦‡Qn¿‚5~â×Hœ0†µCXk=\—Ÿã…ž -kspsäã€Do^5üŠÿè)èí-îõöfüPVªÉp‚Oµ¡òpT7Ä20û®µ¾èTp`Ä{>æ†oÖ"&ðBO¹‰&ΞïS!âu²ýyÞªŠC¢ìXQAs«Y¥÷G“ÙåéìŸßXº_¡¾À}ånÄ>Á.Ø àÁ*;ñÉ2#É°=EPÿTOþ{ùÿ -¿ÇŠeET¼²ŠÇ-fƒÀå€Ë3Vå+€Faç,}=Çà.…ø°8œ9ö‰.¬¸#•>AÍ›gâLV³û ƒYB„€“u¨Iù%ô.zeŒ„ž@ØgL5ôn$µÈËÔ@ú7ŽD}hõËàcþ(}؇ÄcZ¼E¹$6¶S¹X6p–3g“ ¨__«ÇAñ`ÐOAÿ}?ÕÄ‹¶¿UËQ†„$×ö$ž6¹}¦ÂÎò¯]¯“ ÛPÔ¨dݧè²Ü¯½°ÛʸŠ÷vÅ£_iib.ïý¬;H•ög‹ {¤=e@«ˆ—jñ'ÂÑ™~™Aû]÷´'ˆFƒb¾~Æ*„ -ÐŒ¹Âñòh¥.šH—ølÔG¶ÝH·>Ååa—JÏ|¿Õ ½„>vÎ/ǘÚtäEñ}ŒÐfá@•òSq®'þL¼¶/”/b––I‹åH囃 t,Hľ{ŠÒ?†jÏYéåœX.‡óUNaÈ sŸ¢º%ïK•› ëÆÐÿá$ÙhõÛΊiœ`_GãÉ ` -¤ùâòde4«*X@¤(g -jYáz!~ÎnFÃ7®2²› ·Ü1=Oúõ=Wm'õ}½á³´üb¬Ï@±ó‰7½–°‚ì)_†»`O«Ö"m ƒå] é‹?#ƤàúïX¶°üšé—8ãtìqƒ@ÇøjdnQÿ¨]îŸMæ¦7Ë芒¦ ¡Çò Ÿ†¤  -üQGäÎÕ2ž/€›)# -ó! æðˆ”ƒîšiv‡zÖgrv.ê qyyàt䇙‰úÚ“÷¦JÈ׈"<ɼhP«ƒwWq7äþ‘²õe;|U?–Bþ“’Vê€8¸ Mª4#ùnaEzøWYÉF°2 ]´egÛÉêJÏ÷ž¹£«Ä%êLg„T¯]…´§¥Uêwô•|zÏjˆ” B –ÚÉ/VîW™Ôc’]w)“ê' ÓR ýzF²þÞ·!f*ÏH‚wp"– ÿ[áã$Æÿž©ì/ÃÎŒÉþ¨y%Ž–;æÕLÐäjVZ„IßAdLÿùœ=Mï%TŒn—sð&”s ¤3‚y¨-‡r†D:\Å!:<û.Hó ´F°2Íë'21êí€:£yIöOW ¸:dÐKÆç$*FÄÃwDx#ƒK}*­g” {B΂¥fP<="¾»oXYþ -‘SéSCˆRgoã;#+cúíµ™6l€91™sîy(»1Ÿ! v!|<ö²ós^vÑ5J/© 5²ÌD ï½Áœæ ¨<ÉÚQP¢™&'9'G›+!›œ²—ð·Ô†yà†Ž²ôózoJlóâ;\ŸAÚÍé±w‹æSåµY,-¡k̵ü9Ë<™ºöYšØAï,Sî‰ÜÌçT{¸Gu©ïàtùxdcö+žÕ6>ñ&ô'‚gÀ&@MtäRœÌ Š‚’ç]*ã8-ûÝ´z(U#åÓ°œQ¬°+!ïïU ñÀÝÑÂÆ„g’ƒMiç$S=É{ïß¹£Â#Hnfº¢@Þ -©4J<Ó¯Êr)åö#¶BÏ]O•ïI[ˆÈ™—ƒ@1ãl¬!+VN!Ý‚[W&ͯ}ÍÁŽXÊ•KÅÀ¡”㊂C¸w\Efèè!ûŸ_`ÜÏøù;W¨`+ò5‰¿óli-TOQVк´ ‚&:‚Ò¡¬w=(ߢíiÀïßI})NDeiL\K'¯™€”0¦ìàÅ©¥2’rVaü °ÐOÐ - ÔŠ²k) öè©òb³ûA*×NO‚½ÿ#ÞC4IÁߊb½Ýõ -D%×€=äLJæ­ù™x õ¦Èp]ÇSeíøYôªd­pœÒļÈ``ú¶ÿ?s÷’d9Ž]z5‡3 #ÚŠºªùw/¾µé©ÌŒ( ÿ[WeVf%‚<$ñØõ¸ƒö:ÀÖ]ñÇÛC„œ%†™J`sŠ×®”ßâ‰aL6oY`W¯ê!žxrÉÌ f\5„¡»ðïOWêM8mT®r«aæNRè:ñq*!ÈȧäVRwÈÐ30˜àï=¯oýC?x:mÁfh-šÄ=þ±ë*-‘Š'YÆpÓAHÅ’O œÞ2jç]~›ú]±-„yòq)€)…vwøè œè­ç’ ¤ÉÏÂji¾ÙõF8´Œ'vh•žÔ|œF=ܺ˜@ÐiÒ^¼BeЃ•Þ½Tö VEM´oŽwÌR3&nâ{ËŽ xZj€§þg• cÿ/2oÊ9£†P¢ÁÍÆPz§«,¥Pû~o·¥ë³ÀèeMÙ'¶½£æðs%r•ð´Ï7¦ù-Æ„ùLõûH¢HÇŸbùèØ÷¸\ïQ{§ ‚_ý?ä£ÿ´/ý…Ž}ö§ÖÉ ò]Á¨Eq9ÂH¢E©%ûÈôíXx ¯>ûYz´íÀdŽá1Ä-¯‡Å)]bà"Òxø#ø(ÑÄ/÷×]LtäUíA<àöv §³O1õÈìOJ3*¢‹}g?&†¾r. œý¡úlåÔM^¦t׊¢†ê”ˆx!é/2zÌpNӲ뮭ˆä>`•{ _ÇÊë~íýô[±ˆÊ5&ú‰Ž:kNE9X—-BÀxõ2DÀ6!éùT¾£ˆ»Î¨‡]7-­µ©†#óu+Š(=ë("5¢3:Hu«òS/lÏ8Ï´*È=C‰*U¸5Ä.z–Õ•Çf?-d9[ékfÅ ‘Ôë“uóL]BŽã52Ež~÷•[ˆÀOlÄ@«÷ç„>¯/єثJ˜{KÕ¶p¾òT8Ïw¸è†}ôºÖ#íSpÉ=DçÔ_ÖØ»xoÑëS¹soÜ Ó‘ˆ-4Z¥áÜSå¦zb«"mg7d -¢öòfE—´KZÈ,±Õl9©qwBì?ÁWùºJ³ïìˆWX€¤=ð9/f½ÜÙᤨèÞ Dä þ7cÐÃ.L¡CÖ&¡Q†%e µ‡€ƒ›OF\QÊoøW],{{Ðï3Ð8‘3bMÙãÊMãEžiò'ýå- -+ØáñHçø›°oÿÅ_lDï¦ -ËW4@G—Ôö÷{”p÷F”ÂB4ÝŽ,¥=›@<¸¨Õ¶DšçS¿Â‘¾ƒùº‡¨hõŽÜÅAÌ2ghðõŒ^Ñ` zn%µŸ¨_¯¡KsUÙëbq¨öM }¸çÔŒPé~ù}…,̬J+³º9 *m‘ÛS“ÍäY™a§¶¥®¨p¯ ѹ£è@v‚Ÿ!aSŠ+¶íùýÂ~Ð-Ö¸LÔ;â5µ"ìtä׊áûžuוË8vsR§O×Y€`¨Ñð¡ hH×<£YŸúO§9µxÀ¶rSúºÏ܃ìëLToªÐ~0·BÝ›9¤<€±!6À54ŸM×Þ°­Tò÷ ¸1/:c5ò¹A?i*J%ão1¢Pñœàn˜ª3¡×o½?"æ½wo«ÿl!T¡"·Ò_eRvÙcÖ¹4Ýö¦d™%Á¨â`+ši>íž;µ.¯Sqà¯ôp2'ª8äÞêb6´#ÚxmÙµà;{"¯}(Ù£&Í1l¿œ'úaû±Gø¸Ê-]»¢®B´hOÅ/w·‚,>¸ñ‘lõ{l¼,÷ÄØsïÖòN!å ôãæ™&²]Ôûü™1¦sËG@-!![IŸä¶;Ä=ãBjõPƒž¼÷Tá‘ ¤Æ~椌іn¥ é”—¶ê{ël…$V¢Û·H¿¡ê”E³œ$¦v2´þsƒ·¨Ê_fÝ)”ò½ Ú‘!¸'åÍÑ˦Éô;4ô{OƒŠ#õë3( j!ïƒùvÃý͹ùÖßµBrÑ’6‘qØqNÁ? ¥ÖšÒ2“êÚgˆ«¬d„q 뤰­Uëø-ÜTaÛÞq]ü¼n{—Ë>1Ú0Äâ–àïyÖF1øÝkåÈn–U <L×ù:é…È*š}Ê%/GDgä¨¯Ë 0cÃî&‹z—ËiÔÌŽ³ÄÄïQžM)yoµ¿ß¿r~/î†i™ÅÕ1a¨qEµ1¾.Þ¡ŽÎÄjeŸ)4f×™ë?äüÿ„²ÝÃ÷ ÉZ0aPÌöª Æ#M+éú7Á7{ ]6B_’ƒ1/ù,\Ô+²þdÑ¿ìE6B’Ž²ð <ÀŸí\9çI7ýˆš€ „´ìçùë6—XW?Z+°;âq|É´…þ D2HƨóQvÝ5mô”€SlPÙA €D¾Ò¿~ΞëêT*=×Gvm~`é» ¬(­/ö“Ÿ¡Õ ¹AЦ¤Ü ”]ïˆ1 H'•ý!6‡Z¾{8¤­ñ 9YÐ:ÃíÈ­èNë·Ñz¸?iÞ_$Z•AïW¦mHÊ -/½¾°Ia=öc®Àì÷ì%cHdàù<ø‹G&a˜;†”RÇS¸"ø ÊŽu c¤ VlD«æû¥:P=]ÃðTOmë›Ã/šõ­AÆ Â°÷;CÁ>qTbL•ÄW>4]17’uÚH×QÿQÀ]$ ŠbïOàô÷?K«ûŒ]©Àñª·¿Ü›/ížœD*h#_iݵ€7€ì!X -×G¤r’#P -ÉÇÜ·FôŒ„Þ'I€zëï¬Ñ‰Eñʨþ~9ý饃è€ì ÖW - `®òþ*$¨`bX" Ìž\ߊ½ÔSÑþ©Ò÷Ï1‚·;Ú>#â#yð% ¾C½)ÍÆÄUµÈ±vÙË¥œQÀ>ôcÏ cæTS‰HgÐ ’7®b×剛x¦ p쨮®â²îÙ©f®[ë2’c^Ó‹ÛWé?bˆêt -ñ¿»4°œ×xVεY&%¤øe))q¸‘ -Í߃(«‹®‹oºŽ2'}Kz4ogêP˜ û‹‘Ü·bÖÑ‚0Ìàªä!;% µâĉWn…—½Fé¡ËÊ4Õɽlq÷…€ugÊáؘ2!ï;ö“ÖØawóp¾:}ás+i”ùUÖØüA7±BœÒjÃGI_1×ÐcÓ*Ö®‹•ë°"ŽÈˆ€£1ÇKO"n:ˆ…@Ø×Y¦½‘ƒetœ®Ô‹aÉa{0ÁvÎbÓä;V]¦cÊ)ñÚ&H;ùŒó‰ÆC]FŠòD™…›4ª²œDBø‡U.ÕÕQ©½@P´žÉ :sgEÕ¿ØŠr‚V/ã)è#@<~²yqˆº‚9%èH<ô^òÇ¡C^¢ÙUö`´!òfzË¥®Ò㋸éÒ™¨ÇCÎG‹s?TÙÏó'ö7ôŠ‘ðŒ,wG1ÀµÎ?¡Kb@™|å7Óœx‡œê5ˆ\øˆ3ò¡tÍŽÚ×Ý/çýy×­Ä»ûÕdGúÆ \r?lS,kgÂu’'O½?ì}kh²«Öû¤e#¬fB hZQÕ}dw™ÃÔ­À¨OuØ'JËOP8#f–­×u ¤d=§µ,Û_Ü;jõ.êb=ˆŸ -[±!²1b†‹'¿»’¤ØïO-- òžš–!¤‚q)V|­>ßØ'¾`hA^¨Ýã8D›ê)ùóô"Œ.ºùû½|ñCÚMRÚK üï‹ïüåËî PCôZ - œD¬Ü;(4pM-T Ëè9Â2M„@!¡VP½QoÐÊ(4Ì>k7‰\†|¯^û5G]¯Ë^eáHÑkžÙv8 D›D)ñ‰£Æ²[@ã@ölÒ”ãèΕRøÑ€ªmï@ì+z„´¢-:cÃ00¤e7‹…%¸¯“ Û"ŽÈ¼BÄ”JdoO )‘S Xï'ÊFô„ ¦.w_¤KóØãG8˜uQF¸ZËpœOââþ¾#èÑ…ôžˆ×§LÿW˜ îÔ@8íI]­y³JÒ~ñEµ:¢2èAš†‡[~p ŒôLÅ+Š7ð[Ð:Ëdžœ÷1fLàŸ!xdS²ÝùP¾B´'õfýœ €üÝöN¬è¸<Š½È~5º¹ÀY¿_P_]þƒa“8"Ç{4TÅü¢úPØ#¥‘€µwÎã~ DêÔ u;¢}~A?ü.è{H¬&!9©ÞÀHª9bE°P h?m±â‚ ¡$'€YCKJã=c1q` õv¿|L¨¯3©¤D”÷…,}«ÑÑYSl)dÓƒ ‚ggWÜ•ˆ”¦ØØïwÈ~£ž#â%VØr:5Ü«à%’1»&Œ`#PÿבCì™ï¯±áéHô°E̺ʑcnújè úé/[„^§¾øsÔ“åxàœNö{ ¶¥|Rð2Ld)ð ¹E%bÆbg¹¼ŒÁÂ¥¥íõG÷ ©è. |TÑ4³cÞàNâL”ìLZÎÂÞý Ö®©––w³Ô‡€Îá×÷Ö´Ø;ØY9bÃ5ùAäí%Šßi©À4Z¤ôׂˆ|ßL1ÑD4àƒí±§¦éD~#œ%^cu j±ÁHOÀ™SÖ½Dûû@§.ëI(6810¸1®YÄB]Or㻥)5ƒ˜N§ãŒ·ž'wê)¤ Eãïf„–ºJ4 -ÞEF–Ž†ìo{<%Np9T@. ìwã¹Ó7褜é¯*”_Cý¶¹u¬(sJʾv)ð¿1Øì±F«³Vw¢O5`¿=Ðý‡[78û*ëžØlJ“³šDÇüíVÃýv6ÐÇOÑ6ÉZI{3lã(›g²¦}¨rÿ?À°hr«M“(>ÏÒºŠ²cÎǬԅª>Ò¢jŒ¾g(Ó+^ILþ˜ÝßúK(4,ëï„3¨&‘þ]š‰WL&þ]|‰È,‹ªˆ¿“f¦[Aks¼t‰'˜tñe¿+ç`óp¦hý­Üe”73 þHÚÀØPºžÀDÕóWy3—ÉzJˆ¶U%¢Q²ñá£bI»L˜j'Ö5Ê2+Dã«dµóà]6m/ûú2‡½b??gyÐׂ»×C¥^­tñE®…ZÁ*Æòaü÷¥›Ejôq_…^ueÑ  ûGõ³ÐIÙDüv–×Z¤zòA—ÿL? /Ú ñ y~ùa%¡¸¤ ÍZ¼~Šôÿ—½÷ÑF–xÒ'`›7¾ÍŽ˜`ïÕ8ÀwOŠù7 ¸³]ÀUWàý3 òõ>×ï_©ã Æ-¯ÚKÍH-Êæ¨ å€l¤(¸ŸIÕ—$:Å–äGO âÚ–1”z¦¤ 4Š©F Tã3B ßáƘÁ*eÌb«[­0F£KäÞ9‹_APúØû\ß׉oÓ~ ƒýB`øL%~ÏcLN-]&èãèÕj‰®¥åŒ!ªŽ³x˜K^y+åí¿äÝ¡y¨g‘ÎC3tWÙ©í!ú jY\¬úx“á“Ô‡Îpê âÙÄ樨¡Iµ´Î7¹N‹æÔ½ö~¦te´K¨î~cF|õP[,CxOçk÷bûµñ¨B€Gë%nT"ÿùøÛ~Q éOÚâÇ_ÒÑ#Ç?Ü¢a™®üyzÖ= éSŸ©2=©\ïü–ÉiüÀ»ÌÐe3h¥à‡]‹–ÈÆ®kˆ¬ZÀ­-N÷¢ðÎVL†+C¦5\âQ¾q¬P±åñTB¾™Èá諈 7Õvý 5éÿˆŒäÑ@J 3?{É4 pð¨£?yõS]Ó9·R¢_:™× $æè'®CD{Æç¦ØÚ2méYW@L ðÀ[·R"Çz>¥öœ^ì-ÕD&s'³ñÔé—Ô)ìËD©AÝòŽ(h£å¸_âóÔl¢{i—!ñ¥PóÒŸ}jÈ*Âóª¦üžqHØGâÕ00²•Ž6´ØåVƒÃÅÇ B}È•–¸ ->èãó%ÂïUvÎ=à -²ò Bûˆ]ÎqÖ&ÁÂÜ®wwãt€òŸOYÉ/ž‰9Æ€ÈOúrv³ˆ8¯÷ÕLŸ @ñª;þÁA„´9KÞ{G174,ºtÛ×! ?¢|ßÓMñ©RòSr—¼6¡~qXçÄ¢«)hó†Ä¼ã«½Ÿ¾Ïwn©„„™xùÅpÓ˜X|5®ºŽJT䣤zûK¨_”bã™IÌ BbN¯)Cš>ÝNOÞ›°“ÍÊ;cÎp“% †dYyÉ{;)4éŨöÈZ…è-ñ+ÍûÒQåxEƒ†>ÄÙ®¿ùÀ—E&¦Ÿ2‘M!À1¾/ª9`F gžú‰Jó(É£v¿˜½¼ƒ/÷R}9ü¤ÔöåÕŠ6TÀ%CŸ¨¬ë•¡®Þ=˜=v+‘¸Ìþa?àÝçÉ‹¥—ÃàBÔÒïJå‹mŒ…A¬«Æ ½…Ø°#¸dÕÞ¦ˆöEïÌä<+É4Wn®{÷ɘ`‚ŸBGüSfßû\ëTüf¥Dt°_î+µ‚]%{{”ã§ü\GâÝŸà©Eña';T-²KKæN½ÒuöÇ‘Àˆ -wLm5÷( pƒò†—Ìsx[¯­£UtŸÔ=B|+¸æ»àxW`}òWŸ¿Ú†*ÂB Èù5÷fxúi Efï$üËÑð+ÊÓô¢cëWE€ß,¨?c³ œøolöЯÐ5%AZø6xõŽ}à˜ ÐœŽ€1vØïKþ4Šb®Î#[Û¼§òz$e%ÝñïÆÊ?¨•äÜSÝaDÊ@æÒü;ôç{•#Í=.-#Nø -z^áô÷”;§¿1“ à9xbËæ ƒÅŠv_ü”ÁÞ,¾îÔüï3js†Du¥ÇýdŸ"Ç"2ˆëw¸hn:²†É í_ÕFƒö±Ms jùŸ¡¥1òúÅ2úXEºñ3hxÅ¢u¬ú½¹"CÁžlxà=™ûôÈ’Ö  µŠËšy8¨âÞ1i )4¿~r‹~RƒÍ4dOßÈ\®àMý(1…8ÐÇðfœjî{ûøÄöœ ‰’üdˆ#AÉsÔ­H? -‘!‚æ„Å¥'ó\5„ùÉzBæ BŠÚ¾…sÁaöœ"ÒÑË`ãÙYY²(©¹¹¨º­'C£–쌊u\fý਒–|’|ÉšP(­ÔæVŽ1 F´;+Tí"—i¯i„œ—ºó  8Ÿ#­YÌxÑpKKi¤ºGfgZ\~qc$bï…Ì5$OBè÷HÞ˜[™ ÷Á¸Ãü<Ô£&­!šÒ4é΢deí¨²Ìt.såÉ€³×qò<©×´Èˆ˜8ÑIa¡?¿gg4’°®¯;XÚ J®CZ§÷[ ûó+5²?¢Ô.—IyùÉ ÝêxXÕئ¦ÎÀ9cüy¥ S#LëÐwzp)åK²IŠ1q® !²`£ƒ_ª¶Y>¥!`K€”Ül›²9®ω…IÔUß¾Gh§×s«Ýéq¨‚k­TWd{5iìóG¯OYª ¸æO-©¨GQÀrég°î¤5à<½ÅÉ€!#†Ó¸cü±¿õŸ÷Ðßï²_n4Ê ô¨I$ƒ‚W$}ý¤ƒ@Qù°¬„_ðÞ;tqR^ïb -YG‹ôkÿ2/f¯g Ì_˜ÿw°ˆèFw€¿ò‰BÎ õ†ÿ‰Ãñ” 콊¡Ä{¥¡ô]o3ƒ‚i;uxŽx«Üä Zœ£v„xûE-jÒ4]¤õ£—ƒõb0}ðoˆr©ÏGØÃUšÚÖN®(ÑåG;Ôžz·P+ƒê¨Öz½hÅÀ>šï ͱXΠS¨@x‘ìì#x8[Š3ðÒü丮šCà»g¼MÙ'A&9–±ÚÞöQ4{ú7ìÁ¾ùôÙP}Œ=ÝÒ^YK¹•N, >«!N– -$5‚»L¡Ÿ™Ä/M5HŠš€DÚàOI¿ï&"_#æ‡ÉýþJ±N_ñ -É›¡Âœ<ëNñ6 ö‘|øSÏÒp¶£Á5ƒš£‹p¾C€@¨Âo,›ý—ôm¯÷‹Û -Òô;Ft@[iÿŠËËϯlO³`ï`,18¦¶>ƒ· Tæòñ4‚ÎúG‘×?;L*•ÃG(cº¸ŸÊàO!Ñ:G¦xÎg.³âŒJÚ¨uuF Ph(ûŠ;Ç 8€]zÁâˆôÓT¶ÏV¬O1[²8ðº},¬pƒ©§ÄÞDEà¡v 'çÚikQÕ̆ƒ›Dº -Wòmå a#G-I:®œy(Mà $>4%½Æf½ÏM#ŸsŠÎ -àí -Û¨½áH®’¯HŽí)àísqØÍWï꯶ þ 㫼{ìßß!lØ×hg‚>¬c"¦x·úTÎlØFC™èÜ"š5q”vLÞ#×cIuM]­È®SCî'òÔSËoA ë‘]9΂^’ÒÔí.Lë°ix¥{;yº¬-ÙT8Å1«¿×a­I+00fh½¼äI¬ÐðÆ:çCw‡ÕWôõ ºÓJÐAºWö¿‡l-FÎ}Õ3ñJ)ØVm¢wTGéÉÖ3ÑTXª¹ ú‰âò”he?d£{Å“ž^u“•‡™yÇ’ÃÓ´¹ÏÏ7ΪÿÿËAÓ«¦b'¼ž;!\ñÇígH˜6g€U#†(ä­cÆaƒQS¡*,l:ë*¸6£€ïh?OØ8’y¼™;>BWÔ㊠f5Ò.*H¨òî¹B5)“ßrGÂUÀi¬0@W€€äµm^hZ'˜h,+U\OOOé¡÷œÁО<ï AüÓaµ7«\'*ȳ!jˆÚ½T -ÿ·¬”ôb¼>ž‚JNú|Žò5B6À ió–…’Cy¬Ç Nw µGüŽ×Š©Ç´øÎ(égÅd¦žê* {¨ô_AµúT i­ð0ò{ÐHÈÍIVž¯s>ÝÌg½@2_QO>Ùa¡»§‹QØCÈÉ¡&OEÊ’Zx²ð÷î™·ZƒÕœ–2™© Q2RU!9É°Ñ$©•Æ -æJ˜kŽ -Ä"2#ÎTÏmñtœ5hzÈÑâ«eiµln=Å¡ Q¶h <[íx>Ãò‰£À¿b„´ç_<»ê[WßØM&ÝñþUp„—´Q\ÇŠÁJºc/lÉЋ£$»·S;^ÜÍœU:Fï̯½óÇû­à‹õ[ÔÑÎ÷©0á#,°j·Ò_f°H¥Þ±-…¸Ûuµw#ÒéC›éò{@F$/¢þ+!¨ºÅ?ªt@3˜ZÞóSñ®V–jß» 0:âm©ßïÿÔ€a0ÃÈP“«f²c0Ç…ɠƽ Õ´1#zûj„úª-2_»œ¨2Q ±®ˆ\?"¶ ]Z€ÀÔ¯¸AJ»€ñOÔùlˆ`ñã½ÈäŽË©|èéÛ5LµdQp’Æɉg‘¾·¸ýŒÝf†(k+j [ 95èTîҧΤ”»E-·B1•Ç -Ë#—¯ƒeñIU’63YbHÓWI ¬™"BÀÈ­4LnÞSRÄs—üdIº^ê^¼ÉpþÔ@H‡&ªÞy -³=NU¯ÊÕÝÀœ{JJóCï~¥M«6_¯ -þŸ!ÿ•ñtà^ÞûkV)zR Xùà  Ôµ,h·è9‘¿Þ§z܆¬ qYó½Uظpg”ÏÔov4t§ç}Õ í?eLÔ3ýàºu8i×Æ8‚õÁÁ1gN¡³w-†wÈ>óÎ\(u^‘4×û踎j$Œ½4ö½³%“­m¯îÈØsB‡œ2‹$iÛû{—º×ˆIâKÞÃBs"ݽž¼ázïm:QéÈ¡Ñz¡mZÿßþ¤#)¶Î­®pÅÜ׫œ!Ø“,ÓÕέ(]óÕ°ÄŸR‰=‡@YjξãrJ2¢„E ‡¤J| ¨~»M|uù#K›—0%;9Ò#¢—=_´¯Gµÿ;bÅloQÄÉJŒ§%ð¿ÿy¾õ—DpôI:êuÏ[^Ã+x7ÛY²gdjT#¿jp|”ÁÌäÈ°¤€å' ®ñg=0½Ê p¢¶7D- Óz¾ê—þÉÇžˆ?8n€ø -PÝßÚ$­‹&ÔŽÆ€@tÊ4æVÊ»)ôhÚ|l8aZe`þÙZ•èV\Ç{Š29OÖ<Ü¿¯r6R.Сo)É áÎŽœF$ZCà¨"hSöX³W´j#ª¡{…bx¿Ôý‹—çÑ(5HüCŒí!MHíÎ%($þ=cëm1YX<¬½²u¼·êÎÁ@öFq‡°)0µ+½Ÿ¼"$Š’}bq†+-õ¯âI‚H­Ñ¢›Ð ™ÜöUúQØÉ&Õz±×šg+âH™9yÃüWÒ:ƒ¬|“Ç\í|‡èjëæ°Åh°2áu½¿`?å¼Ó™ea&롽 C\ßÕ¯„¢r‚`óꎠ÷=ëåèMîåI©oÏ¿K3P!>¼ïW@s‡Ñ>{¬¡8‹%¹=¾×éAhì¨'2á&È#¥?ž!|µÓÓÝë3"OžgGc£·¯[Q,£"xD|$vi¢é?ìz'[Y'|m26&`…ã|çßÞ–¯xU1*ÙïöGêóˆñj•¬ýßg„ï]ßcÊßéTy|W ](Ô߇_¶"Y¶ôVjˆ Xðøø¹2îO—û2tdoû`ÿz}øòì¶8þÖX‘ì`Ì }µÑ Á ÕB‘ÈǤY$ï{¢‰•®ÍÈpvŽš mà#8ï;@5A2„ÇÝQšär†ˆcßô¨É]AÎÏYIÚov¿/Ö…0rEIë* 8ÍR¹)xC%?ì^ÉO›<œ -è -Ëóo‹´ê~ÿšŸì TŸž­‚Þ<ü9î°Ù2v@þãŒ9ʨëXØx¶zR•7»WLVt•¦ÂÏ#ÿ¾?®‚xU˜š Y"ç*ÉÎ( *$Ê裻ÅÅ÷&+ˆ½¯\g¦pxÄ}ÁÈh*!Zµ«ÄN«EÖ­•sÅ|¡¥µcPéžQÄÎ|‰Ü?fov¯lCT¨OŒÕ÷!X» -韬ÈÖA´=Bdš6ŽM¤gD(ë7¡¯¸vn…ÍÈïÕà¼ßk1‹Ë+ƒ:òƒAôJpAž’±;$¡{D‹ƒÑVp¤:cÑKg§$Ùɹ‘âôØh*{Æ.oP)`Æ_õŽ?ÀóÔGࣅÃW|µ‚‰y¨œÂÏɆ"«6× RšDò†S8ߛ݂z\‡‹}oÖcÃvòà°‘«0äN#6Ï߸‚kOiÐÍ–‹p¤‰®hˆÎÆwœy9ö¿G‘;ºD1J"åŒÐfi=‹a±p…Ho÷¸ äNÞqÇ ² Ûl0€O3¿8°jlœ«× ² -hµxѽnµ¸BÆf$«wÆ«d¬šæ×ûþÔLt±*ïžtÉÏÔ{ïzP1)Q†ãïC5ëuÕ¿±ûýiÆdÚŸyüiÆf•²\L*²KGô}A£æ/1§Ë¯}½ý‚¬ßf®e¥%ƒ nx6±ÖŒø¿ƒU)`¯(Í™: Ú­7˜g}†N ^Ÿ–us”-Õ›ØrË­^¼?G„¾Ï¨Ñ¿+l³§GÕRZçH­CÛ9‡z.×a8?ãá]üs•wýu'v/®Bš8}³·ÞA‚á‚çVÑA0µ#çePÞ@N¬+Ë/y·Û#žE @›uú8}PÐþW«ncÌgå'äVíGôê‰b‚äI) …Ô’½g˜QÏäs¯3Ë´*õNÀâ šÞº§Ê¾ìr££*5ÚÎÐÄöëÛ_dÅ¢Ùij+ðk±azD¶÷ÄŠU힟æö•!û§]j› -«õ-Çé+5íl²íû vÑ!ûÔ÷ž“2”ÞÈe¢$ÔOèeŽ­#b·eR™çÞ'+tóÓcÂfH¼¼ÃD -šÍ+N5uès¾ƒ ¬²Ó¯Qꎀò“ƒ)#:pK$ Ø^ý(C¸}ŠmF/üÿE“õP×k¾4¢ú‚!Te"…cŽì!vÄpNá×ü¿ýíñp\=X–‘ý,V`+ü†ø}‡ñ‡«ßãXÑÑwˆ“…¾BÖª!,`ņ0¬Ÿo¬Þ7ôBagÒÌ)ªøbß;¢§ú-úAG*ýʳY0{..:/+Èý¿r¢ŽjÁüö/QŒ¶3t^K(ö½Ÿ ¢¾TgokT†#šAHû:E°V Ò‡À2\ã#Õº;žÊ™ý@5šÑz0ýK(üS.ÐúË<Ë«ȸŽE³@ÁÆgØgXayœØAÔ“Î@OÝÊÖvF0ÉÞû™™ LoõÅ·[K£#ïA¸ Ìþ ‘QâóàO\Îðµçò  NêU¸%f§È+i{pÉ›:{KM•­àL4KI`¶HÚš¼²øè™ÞàŠ´]Ã/7")‚ø™:´ÞÐq†<–%¸ózáòqv{"eé%[]ù˜#©c~˸ƒ¦;¢C·÷– T÷¥!zôv;å‘!¬Ï—’ù Uñ1K‰žç„`HV}¦z`Œxi¯}ÂyãcH‹k.qïwçÝG:…šº¯jïM O§"",.B&+z7£¦Ï ¬b9—¯²)$#R¬å.èNèxÁ2…LMÐ"W0HáoLô?E¶îèQ<óK[g3=fÄïü…ÿøQjûS¨ÀÐ×°áoIWº -çßñ¿àÌ«ŸE˜Ï¦Böf–ÔkHîšÄ,šÕÏÂWµ3ÆŽ­eЈ®•š_XÉÏ…§nɃ&2äQ7Ó{Þ ’ç. Ô¥ ´¢r·yŸ=5¬J‹jì Bzµ“{«Ò‰ª¡GØòžú€ˆ}3r—GÚƒ"¢ªO†¥aHÂs9ÄüIí‰å[Ý 溷(˜+Çê]XhÔ±¦¯A—«HÈŸÐì–„ï1Ñ9ÐŒ6ÐC­s椣ے¢¡‹ó‰¤â¨ìs׆z…ùí:#Ú0OªC¿…šÞL žiϹÆÖÏhH ÿôÒHoU·U´úÖ9[Àò›Á`Ä…Ò Q;;í Q£×ð®.R)4í$gŸºì[T?#R‰!ôPEª?1¹êº€ðHõnÂZ½i_î)m‘¦ä5G¬²Òè[ºcf­>„s©*ÎTiÙ-T›ÝÈ}ø „uÇ®â‰}KÑ)b·Ë$ÑD‘T“ŒÂ纈–ãSÍÞ0Ü3ºíÑj%ÆDšÒÄ4È$h|çTT¹?dyW,Ìþç_~ò¥aì‰Ó®„:øŠFkx/kÏIûèŸ?¥iïì$¬£7½gðïÝ—F<¹ˆÓ‘¹ ' ¡_ÿÊ&#†£1LHo„ß_[±å=WÎð3Ø PpÏSíþVPúZª£=jüÙð…D€×5_&—Rž–ýÄå¯Ìßßøªž-‹&ÙƒÅÄ X„¼ƒùŒáÅ{ 2%åG5î¬1˜)SHÅþç_g «ö­b(&\;ª7 %†”\ÿJ4ð$ ½7…Ì\ î -êÏÍL爖èæ.D'Šý}ÿÉò¨"%KŽkÔ¼c\xU¾‹4 @cõH«¥7`#Ê{¤"×£¨•©šë©Tµ$¡½ÌÕY4`¸íA»ðtáï3pñøÖwÜ„Ú¸·Ç@ý†CJå‡fY'~ïÞBh`ìó ŸåiFpGm1˜ÜNcHâ¨Níwô0»‰í?Þ+Úà†$Ѿ•€&£Ë» -S&_“52iQ À»Ä^iCKŽHvÇþ½:™IžPË»ëtJê˜m°\†¡ðZðç§|ã~·bo‰É.Äñ±¢ÙCØ4Úÿfp´B½`ej4ÍýÏ!Ó½ÄÉ×ÔCë'ðç°òîHÂhfœïyT×P΄ Õ²«~Π0JÄÝç\ÖGœ!ží‘o$¼ÀÍiÉ‘nò{Q Ï'}ÈÀ ’Þ­~O}ÏEžO)ãCáy=ð™ÐPpþßY:Â~ }GýAçMI—¤×Q o‹™Þ^ŽK°é¥|yή|çåŒ,ðÚ¤€w´À:`(#Æœ+Å÷=B{´%½c9›öø¤õ#«þ}ŒN™\t?|C®h‰]Ùz -¡% ø;Š ÿšŒÈ3]ä'u£šxlePŠµ¤±€ØWì·{KÕ#“Pxši“ó[.L1þÙû‚–S)¾0”B2Ñáîˆoð†°1=Ú¨4Y½pW‚¥Ôǘe×à}®¨q–¸Š}ÓYæIN(­fo{åÇ4$O+%”Áø+±îÆ%zâCæ: -7iNì{ƒ,–h_g¯b«Ì³Æh¾3ŸÖa6‰wMX™Vźy@ #Ô§s:tš÷çœâ£BSMñdg!¹D¤éHVôƒºÊ:öó9ñÅy_"~›¥i—h‰™Ó”þù¥£Ko‘ŒS¨Šl ÉŸ÷/?™™ÿLr¼¨@B³M§’dÚ ò0òØ¿3ˆŒeÔž£N‹Níµ›=yhèˆò`z5¶ÖàåöahˆÞöÄ/æ_Ž]e›ûçFó‹Ž´Ó[O|5¶8$?ûgþˆN/×½ÐRð°òPŒ÷‹þ¡”‚>BÞ»æ°Ä“ã:ªÈ#úК’ÀN•°!R(e±ú^†P'¬le©"9¢<ºìû!¸²ð$ð•ë–ß'K”3ÚVu£<æLåŽ?dA¼Â‰'Ã!ÔUI w¦j£ª¸W0_™qn”Îu¢ ´õrT˜ï÷N#¶¹„T²›Š—‚½îÔyóƒ÷øGë¨j] -³ZsïnWRžÃÝv<¤hÔ -4„µ?ÃÖ¾¢’ùÌØ%@!ÿ7u¨öo!htݱ?mˆGi×´d+D h>µ !s‚Ù fQÏm‚®,0w3âÒÛÙç$ƒã˜¼EÍ7pÍa(IŒHš,#µOé×Ô=¡c|c–ABÂ… -·½^dq‘S»_õW­îøåEv…¸úDøØ -]Y²Gᾞhè1CˆÝÄ:¼'žFd«&j-›_Ñdõ·ÎØ¥"Ó^±”¬WOQ -³„@¿yW倫À‘TNÙÄïwÆñµ&Äž¾Z&oîcÛ@<­`¦¦ùÞm‡nçj…BjòÖL— 1±Wïz{¿Y¼_ɣݣ…Á󞺑§ÐìË4’8;ÙÚsãHµW&ÙQaÕÊ÷÷Ÿêý?ÿ帯gR"í6~­;¶£kMÝ`2ZV¶c$oPS˜` ‚Ý¡ÓÌT´\%#€CH2¨´ØÉÁÄ•="hˆÅ¹W™'CÛÃmcx_Τ½Z~( ziÐAX“/„¶Šˆ‰þóžG=2ߨ€YiÀìWós©œ ý\þgn!–²Ä}¤d©'‘9Æ1Ú›â[åŽ÷2©NÀvÓSòH=†šúEz‹±ê ‰+u¬i0ýƒr™'š…Í)»d©¡2Žü+ìî Qîn½‚«(o*øŽÿqJÜœ®¨½TFØÒ¹ü}`ÎúÅ3F†"@ q«Z<Îz(ÈÝ6©Á•/…Ú•2®‡‚?r8™¹ÿgŠãª#âÎ×¾cÌZÎÎ^Mg¬8èÆt š¯¯D¦YF{¶âOÄ“b W}§+1 -£í¼=Bq+—"`gB”‹H1Ú á×”›Ä"sï†ÚÙ+ N­PYwæ”1DŠµ7Ö„áŪéð@ ËyÒ¹Œn‘2IÐÅV5—Ï;v2BÁw„ä±Å8h}¾±žþOP§ˆ\OJ«<Âluүܒµî£òB´pDà+‚gJöêçƒádj]ï©á2#òàwØ™º%…EòÖÚBUkä,.>ð\ªV¹l$•¡ž;E†^™ôˆë‚av~þÝs'´÷ÉV@eaÑ+"“ÀÑ)¡E¸!l3ž ž¯ä÷·‡¯Ï -µj€$ýl5B«¹MལR`rh9=ïEp¸‚É9""LOf^‘²›1Ê/Öäˆì²W3BÀRÕRF÷ÔÃeÅîMãx‚ú¬[¾‡Ü0åH‰ôßw²´Hñ©BFayGGMJL»/óäP n&½\¤B#zù„ûa\Èõ(o*HŠ¢AaÛ½’¿ÄÜðQ²‰ZJjÁ"ýò½Ëßv <ì‘zü-6BèšØ!‹!¶Yêû¸½bW!±n8¡Å 4á…» {l£‘Á7„mˆ³¢ÜÜ_[%7â€BÀ;œ½¡ *Ýyà tMÙ'ç܉vÀƒvÕÄÇäÌjÆ;iåݬ`¸…ÑopšÃeÁŽ)úÝÉ;¬ŸxGœ5dÚ…>g‚tž#ÀNœÙ^?g/¯}Û'¿§LšžòKnõüˆ.ÎåØ_7s‚ŸCXÈÂfÄë Ús;·ê‰Žù§ˆt°™ž”#ôþÖÊe.V§Ü32(·~§ß{…©³ÔA:¿8½†DûH]︰ä*¯lܪqzYSv«Å–pBg´Z5kè~’‘|ÜolG_ pÉ3në*ÿôø‰Ã#®tT•6ꑦúñc3ñsáò5-ý ž:ÿÃ_þîíÿ;ZµÁ$¥½Ó¢å)‰öæ›úžA.vQãÞ™šø9£ªÞ¸¼¯È£ÑvLp±‡h"£¢ßø’$Ñ/åv¨ÚOÔ2Ä8žÖ,¬§[#½JŒ>Ò¦¹hŠ„¡±¯Ô4Übà“!wzçààM|fÄÍ)<_ŽT¹ˆ¸hÔzÇÉ¢`)ƒo᧳4!M±Ÿ;À«wzŠñŠÅAÊŸÀxÃÝŸDr•7f¦r@Ûî} _â½€Áé%¨|‘Â/ô56êüây¡ ‚pÚZÑ ˜%3ÀjÝ~Â{K€}]Lm7ª¾Õ¬B!Ü@›`^T¶"ÔÁ!(‚Ј¶²èë³vD:ïSÝf^¼Cþ«l#€A)øDçÛ‘b”[%quýó°ÁIé -u¨8¢ÞæÐ ÿÆüj§ÊCMyÍ-h»)’¶EºRÍ3Ǧ´â)ÑöQ«7æü Øý³³ãù÷ÞÕÿ$}P±p”¹æ0÷Uö&ßëŠVØ í¥\sNäþ”ï|CR—zñ¦f`ö©m¨ Ž«`ö;šÂà'}žŠ0¸þž›éÔF*;îôñ©-¬õ§q>ÒSŠÎ!·p«üêÅd@j"ŽJÑ;¥ô±8¯ºÐ"ïÑë"~ç ×'åªøºLG.±£ 'Âe—ÅDi¯ŒMÑ|îãœXS ÜK§{ *}ä/â´4Ã?ãGFÁ’»Þ{—|ø›#hƒ~ý$>•£äÜ9†•ê¨Ï}`Ï QîœkäÓ› 9´Ìˆ ¯¯ôk<Ó^2qt}`†°>PêE³9δž•éÈ—CÁNñ™‘^e×Dl ÖøŠöqIAUë—¦Pq7¤;O.³z¤•[iêO‚G!é -ÜO±.„–2w5•p†°Uu.¿Íº`[6"™ó¬ê/ø…¢/á'×b<"Éï}~-Æ掷ÿÕbü9ÇÿYèê[¾gäeµÓÓËg±ß§Ý" 4ì`ƒáò¸Ê)åRHupÁœ­ qðû?Š#y–2É•ÒK4D¥šE©‚Þ냾2·óÉU¤Pð\-Ò'ö…;¶çÄD`zÙŽ`)Ý–ÕüâaDEdöhoÅæÛGÂbƒ¦“¤EѼD¡RA!ð{¶ú‚B¸½âE–wF1 ‡ö÷ÐgL¤yu#zÈâªC¹lÖ #ËF£ê ù –þ[*(=-tÐáSÈ¡­tDû¯ØV:|Š²ÀlîD¤:2äùcEsFNT‡Uš¾®è~³(ui##éI@‹¸?ŒÂåa-î’שôÍôÍ»i -"u]ìtŠü•°]oWäp½éN] ™XêkJ}€‚d‰š`vœ`àm½CRËñšñž¬Iø úýüz(ˆ@ºäÔº#jrÆ}èy_N®Ãœg|„©Ùú×—²`†½ª­(j©‡P‘-­öRxQÁ×2þŸÈn‘-òJœgp‘\%÷ç<Ôâ·¼?CYo€ßhxÁçÐÜÂUeWYí¯Uä0ûYZbѺÁ@ÐèoµŸEYlž•¹,Ò-ò6œ”Zub¥½[Q<þ®ã›–d€ÆjHOùf‰‡WŸRcС°¯ <¢TÑÿ¶àwåçœBªÈXè¥éíŒ8ÑIÉd€ÁÙ„¹0¸£}:Þ‹€Øób™‡“ì`µ«Bª{〲ušÈ8Ù‹Dc”òö’,6E«¢¤¸½²GW=ˆ¤‘©ébã„|4Osªtö÷,¡GYå²½Ž¯&Uf£ðßß—ŸH‹ÒÏdB˜P„6]'Ë«@NæÇοÊ3I_ О‰žhÏoñ¿5Æù¼7zƒ+aæz&fˆOPbë]ÙjDzs²LŸ¼á£§Z弩¶êºbå™ÚÜç_ê»È^LÙêš‹¸$eÿ”· ×v˜L1õH¸:×þ2W´`â¬ô×91³¿;| -2ñï£:Š¿˜U¨;BõwÑ°YO‚ÌtÍ5²Xg{µo`½æÌwq–”èM=¬Îþ@àR«=RåºÕPñ·aæx„c"q¡C+õ¿ó{,oIçÍ/.>¡¥ª™îä“öÈ"È5ƒy½"Ï&= î=©¨¶|±ŠD,¦*Q¼Â~ _ñsŸbîÌÇ9:Š(?Q¼Û3}hU׋—IK?Ƭ¼¥‡´€€Êúëe(µËÍ{ d²²q¬ÌnNÛlÌð*0_ Áõž1» 1÷‰À[?ùüJÆöN®¿Oœ zàûtŽ…L«Ï°àüÒnÎ/&† ò{ [$í‹%Kˤl/‰ˆïä1[ÏH0G9"Æ ŠjêµÊMOø½ -qí=ý ˆuoWdÆž¯±}‘œ³<Ö®õ#**í{óÉ”ÐFÕ‡[µïA%µK“RK1œå‘š•œ ~_úpºìG<Ë@δÓÑÂâuGû…A/Xo_’–aþGü¬ßÕÀÈWñ³ÝûõÅ¿d¯¾þ—!ø‡ÃáükêK®à |ûŠë´°]}Tÿ·Õa¢!=@mÅTb¹âþöp?ýåù+w1•˜Å|™|Ô%œScaT@O—g/ýED--LÍsÈAüÿ¸”,-UÇ…º}¾×Pݧý£C¼8#o>ùµ½q6*<ÐNØI)tÀá€wr?sl;%€H>~ ‘/}CX’¬t)¬í=äŽy⯨*FOX£ÆR~B„Rò1dé©GINLxRcº? äsía|h $™`ÚÑ7B¹ÒÇÐëÄ÷42Ž(m-òãgÉU®èüãböàëNÐR+àú¥É½¡0I‰±Ë;d/ÍÌhEí;8fï#Ы¨*ŠÔwD»÷C5ø+ïO‰“SÉþ#Úw!¾Í/%µÅ}Ç+ÞïhÞ£Z^qÂŽ½†i-Õ§j¥ ö¹ÜÜ ü¬•/ÙÈ)À÷Û+rÙòØÿumÀ¨<Š°d$êζ‡ßO¿¯î.¿†_Ï»ª• -€Ê7ÈcHx„‡€Ô—¦ÇOPø_°y¿Ãï½¢+´kQ(a!†¤ÕÒôÁWD)`ûXTÄÚë+*á4v<ÏHõ~ gèµØáP–qjíÓèG*d8Œq@Î"V ª_·Š}˜¤mQÃs½#Ü™ m˜#&1Ä}«!“—2¸ŽÐ™Íp%äø=à ˆ‰E]À}«(Ç#Ò þô;«ø -4,HŠýÕvll3Ÿô¿öSµˆ›+ZÍ×:ø¥(¬Ýž+@.Š|Ñ 2d'.§ €ÍŽ5ÿ­^ßfØUÉ°èé -‰IFø‘ÒJÔ§'¥‡•™æ¢†чl\Q~¸~’…ùý_¢æâ $ªaWüË[¢ØÝ£¹8ã ¸—û²úUƒDSg(½E”iî}3P·à€&ÍÈ—Ã×@˜‹&S˜6w‰.¢_Y´'œñ …| @I—\æ"Þ ]v»wÉE­Hñ%wJgfå°B¹Ž´ÓoM”gSh§ô3ª[±*PŒþš -8=ƒNNB°#@#xaêcÌp|è['úutäµÜæ+ö#bF?¡¹:2SG(UÇLwl -åFøÑÀò@âÙŽ¶T<)@‰Óc”Öy”2! -8ÖRg.eÝ€GK³Éi³?Ò¨äsF® -9pÏ­ç lÏ>嬸cÀ£§/!%[00î|éy±‚Ú"RR8_ùâ3nôðHd¨ÅM0K>q:2{ { mdz„êwŸ¯º–Æz¹•F­#ÛzÂøk¾’M7mgÜ”Ð×ç‘â‚Ý>xÎ<C¥«…<ù‘µ8â ¥9í8Ô»•PÊ­à6U£¹àV&î(¯ß÷× äUÍ Ð,#Ô8º6ë•aý¡ñ0Ï}ãŇ#×y™Á¶úDÛªƒÃV€v±VDp™ é·“±¼¾&¸èõ Q‹pÚé·ý+³WS}U6­(¤_%­Åm'|%À¢ÐƉ¸Ð€{È/N9åT^ظlê œzñÊô… ãMÑ1ßC@̧ ìŽÕðÿË S§Ä´ÆHÙQµ4jêí~ä ŽpÔX*F£ï·ÑWõ5\î+ùã-{h¿åÆû¥ ¢«ÅOë×€ŸÊ±ÿóÿbÎj/-õ¬2%~JGáéãéhbÈÏþabk’ëÅLM-®T.;û'EØŠ¢Í›:f¾áB%TÅœŒ¾ì6g‹5ù —¥M6kÁyÊðö -î7dZhL®àþØcÜqLVFAΣû½HÙ)zêé/AKú‘°WDùÊú,Ž•3œ3œÍ]Ð#Üq%h€0X’Æ;ÆVIˆ |åéG»ÂN%*ÝÓUIc×x/ÉýOõ'[iR™xãp«IU†ì†wxåÁÁp3ðÃï÷V‘6м^y;W 3‚¶ô -ÒÞµðcVòZzüÀñïºK׿o¤ ž¦Þ+Ý>c‡gkÄP£wÈñÅkä–.€úY ¿¤~ýT¢ûý_0Z|h®Ä6ØÚôŠ]ðÎacp»„b4HX ·8ÛÌØ{îEêž­Äþr_À¦Ý"µ³‚- -_¿‡3¨Q5ž3¬ËþaÚï7A«‰obƒDI0×IjûÌì!»ŽÛ}üèJº?¡Á‡êR{«¹ëó Û¾ ƒÓ<ȸ?¹_›óËÃAˆI8ðª©P½»%ß`<³ZêOÈ›| Mÿs¼NF²àŠîqø:fA9q¢~|ö„´ÌOX¤û¡žXŠˆ7b±²–¡ÌÙbR0qO£KÅÉ3`âE=[m6ÍÕ#iK8øcˆwÁXºDÔ|L4,Ň“¥Î7&Å×Öç’;=×ÔŽ— užÚzõ*öýèw3hWK¿ã7ök:Ö߶ÞD¸÷ßR¢LÞèÿ#3àR!1 ß U@/±*ßÔIþ´ø·ë„£ì -ezx3­ëë:Ó…åæ2ZR-n€GÌì2bσ@3δ‰z"#JýÙ¨þ¯#‚€€ -ËŸ~D°™ÿ‰þ„;Ѩ¨îOÚîÓ"þD䧑ÛCp‡Ô»¸U¨ÕVðÈxÌÎdË3–<ºÿ-ÍÊ=»ö¢Ÿ©ì4óPœ‚Þ=†ÛF×àV¹Ó-qÀ‡iÔ¨v¦¥ŽÎ- &>-ïO<¥#Ø¥R;©kM[ðx‚Þ‹ˆ>å˜ô®ª¸îo|ª7É—lA«%û¡ü_\jp‘°ãÛGzE±³÷FÎ:Zßq"WðýÛ¾õéÏgœ‚—YñI;×ìåË&ý’bVP.žw¦áˆ'~ñŽÙ*Õoe…‡ãh[ȱ…æs¥ïÞ…[Wi¦g×Åx/Q(}lx e;ÊÖWä¤$¶ây‡Öæ/ßµz íâ<ð¶ïr’DEý»³Gɼ˜Júûš` ƒL=FËí®AéjW)—¹µ„N™¨­WÔÛ4ªëM‘è'Óá8Ê’P¦åó1W[Y(î´·ÇÔÆŽÀ+ÌŠ˜vcˆù}+.Õu¦×‚ÌðŽÝ8wÅÏ7>Õ{ÄZꧡÍÀlÏØòéöç@ýù/VÓw9JNÏûï Šßÿ!Z°å\%1»P‘h.ìàÊ»ÔSè"p+ML¾·“´9)÷Ý<ÉТ"imÓ^¯Ä1Ý{Oú]^7+9)€’š»”ðWÿˆHÿ~&â€JFnÁ:X£><’n Š•!û$Â|àO,;m3ñ@É*óõ:Áo£Ì4/UTª Ê¦IZTæn憠(S -!yæXå™b¬ø_~¯SÓ9‘鸜Éå„Q6êA§GG¥ëçB *œDg>/™úyaÎ]ÌŽ´JŽrÌ–ž"=•*ÐY"`6C„gq¸Ùã)Ñ_Ñ]~¯£,&·¢||ò}D -ÀÁV9ƒè'@ÿ|cVü‰†,ÚW—r¦¾8O¨I;"mÕÑ ãEÿ©= ÒqàÝ™Ëë¿¥,ÿW/¡ªÓÿµºX{á>dxvÇ -Ä÷l†H‰hÆŒ‰&Ùå¤ÎO RÐá¤Ýù™Ô™iÄÙ 0©E§ íÉÀ÷™Ï|¬‰ÿ ÔE^Ðø“š+ŒU˜ëÑkP˜3:Wô³¼+°¡û(ªb~ vûÊ.Xjj½šbUqG'&4Ÿ jžÑ 9Ó0Œ ÿ; ÃIÛ«A“‘™M7èíêôRHÌæ˜ÌÞ:ynf ½Z-ûDÆwÍúzÓÿ„5˜Rçè_-›Ai²·IÿE6G5‡Z¢!30-Þýô?,(¥BôˆZZHc½:cìdçK£à¬lï“x©Ñ8ס¬ÂîJ Gl5’ÍûÆ”x÷\°œ‘²CÄÃ|]Ñ«‡²FÒÒ'póˆ Ø)G EK7éúI½ê÷ácÊ‘ŒŸå£ÿ€Æ‘ï©'¶‚ôÃ}+UpuÒ*0ÊØ» –νQ›Pj *|reÆÅÖågÏTD"½²Vq²~Ðdö:J…ûo;âe;âêg¨Ý„=GºŒ‘â™\×½çÉÿÔÅÇ"äØ[7êØõX%<+çaOέbý¾˜#À3‘§–˜îïíƒ*p‚\I*·‡Ä34ÀÙ½½‘¤ÿ?•ö¬™ýqƒ(:ƒlñn, Ìxƒd9KQç§í×GiÏvéÂÎzNN7ÃzÜC˜öJjš½*Cd9'ߌ#Š~ÿ•/E¶k¯M¾¯´r4ë#€¤9´ja˜R5—yÖ˜´}xB€<i»¿'«†³† =&Åv+&b±ˆÊľÇfÚ㺯C+éó(‡Ÿ ‰šÈÉ‘kí!QÞŽ$—vÀ{+ªqìR_èr}M]P±é:h3KþiˆQ¼P/§·žàõùÆLÿOâódŽýÅáŽ[ðð¾Ôƒè1¤œA³óSv9×/0ó¿ÄÕÿ£¿›¦B$Àîó( sý4•þ{–dÏs–Ø$ÆC«Pw¼XÔŸpGGŸ0",6ª†¯à~Ñ—â–ªa -m°øÚã®N£¸®±Ue¼—å'/®!ÛÙò¼>«‡5ÚH€Œ¼MM*°Î,·^=Z3XE¯„S òˆY ª“êÇG³pVßs0è'Âqø0cõÕõrS@ˆž””z~N¡ ¹lê”yu ÈÃÈÜ”}Û=‘g”ÓöUƒ" ²¿°Píɼ—Fr€h­!xÓ`¾Ñ^ -&‰ZÑZMJ›3wÚ×åqV•ê˜7c«ˆú|:¹æmø´6°^_izWÞ‡éåG)P»ˆá„æ*¶="¼”æôèSØ7²¤·/ãóáÛàÐW(í*Pɶá»:@ÔÆŽÙ÷w¦û¾Ô·W2ÐS¼/. ¬?¯Ô'÷?D¸TUcf…Ñt%¡àïS"Ë"÷œÃ`V¥?öt#*º©sºˆâ 4 —¤=DU&ÑÔÀ»1nuÔï³}G'·”0ÊËOamb)9”/ï˜\z„üÒl‰?W—ÅB%NåÖd™ðìÑXŠ=ê>ù¡'Á?>ʶ¸¤X윀hÆj;üvÑ~Qéø?9»[ñÕn´Ä¯&=gº¢+6'ro 2XÏDéhÍÿólÿý_¨Îã„- -G`ŒüKRÎAŽ³¢:cì í°’Ûâ,ìÚ¬»Î8›3Ór¯œ)]Ï«ÎJªé‡ -™7 ôû!¡.ƒ× -Qí[=á ×9JÌ#ÍûüìLí,Ñò8yâèû@© ÑðCn93_ -y:~X>ûzoõ”Dªqíµ\¡M^ê:OÐ[J‹TjoÕ–€Ä>ÏU;›È”GŒ¤ˆðp,êV éRš‰÷ûèãO¢pÓ¹n14| ŠôÜ ~±½?X’GmgÅ" ‘$W‡-¸J žðo·=5ÏD§’}–˜ªté÷S|dÊ/I#ÓB‹¹âü|cR¼qê;òŦë XQËÀ §Ì¡~ùfq!ªSSþ'GêßÿESÇXÓ…ÝŒBO]qE‡\oáßÿêj!á5ìÉ -p*©¨Ž¥üN„|¿ _ð ½ëèäÈ€÷WUÄ¿ã‚M#n´&FûGÁbÏE*@8H8wò—d®IãçÇóJÏÄù ¡´Há‹âö®`ˆ¢íƒÓ™¿ûï‰z¯q§9@§&ð@™ôIDI—§'jµ†[Sa~w½œHóHfàÜ#%çt -•—ö’«µˆfE¡ò¿ò’ç“8¾•ŒÐÅÿZßBãu¯½\çî‡jÿÚëÐÛéW”Cìéþ~¾ñ©Þíðê:¨ý©ºÙ1µt­²Ør¨ÌF„öÿ­Ó<‹ò'Ôìïÿ6‚n[icD²£Œ!ï#ø¬j–AúðŠÊ‘Ñ,+ÆøÈ“”ŽÁäúC`ç×u|ô–Cãhe0IdíþºJ©i¿¾ä‰•æGy^ãdPlëE{±ÅR¦§Ôßï¯_ÌÜ÷”5Ò–@ìØÿ#ÍU€zj“ü¢ô¢9£6Òª¹êF“ï9Š¹èòŸ>…ðûxÞÌ"x"ðŸ?EpÒªàe¨qwòí”+@/@^P Ríî"õ§¨‚ RiåîkÄŸFlº §ƒvƒ[ÙC4,•E_ÿHù¿¥ˆÍ|­h¤J¡VجˆçÌø“¸Žòp‡q…üý”øÚ [<±λö¾æÕõ뙿ü ¬ƒÎMD -¯Ú ÿÎüÎ_TKõvð”õJˆÏVóøU_|_=æÀº,˜@­ÔûÛ¤/˜ý\9MÍ8ÜE@wÚÁ(‚k|²M\ˆ£‡nÙUœå=o¨_NѪ}›7'¶:o«9(+–]ã“D–Ý}Šï+^”X?¡7÷Àí5+x]‘ô ÿ<ª@Dr„äÑÚ†YŸê£Öùž X…Veý1w•*?‘ -°óvÊ÷ -Ô~ïg?lN¼!»ì!ôTc¬\":ÿ“#QšàÁ xløøÄFÞ[Et16 ù4Á0t9ÐâÝàžï_æYÂ[‘”ºzŒ‚°¢aW¿Ç»cØgoÏ œÖçÁ>}Øðáѱö‡?ò­~;-¾Ô! ƒ…Ñùe®ò2ÞK]g&ôÖº› ¶V5(*|³Š"¿à"ü,r÷k¾U+‰\0¶!1EîV—é}½ìq„ï³»DâŸT¼@´Å.4†I4»ÞX¢(4"Ó÷Ï-£.‰Ç[Hpˆ1V NRá@¢ºÉ8zo™wpýdLw J§˜ÂY®£Ý$ æ¹VC¸Ÿ´0ûè`¬ÚŠÄ°°JûÃëàÎ÷N¬)ž$ÒÚ>FL[l¢®³#oÞZ¤!_þúw 0fÝêÉ¢§˜ü“µÑñ’ÖÜ -†j·„ÑÀnXám_ç@·ÍñrÇ‘?AغZªè:hŒ䟪¹5drͱK(ÄIüK¦FÒ±®—h#,øk`êwâ¶8gÈSÅVˆÑfšéwAvNŽ8ƒIýš ·ÌúÛ£‚aL»ø -wÖ Cd3ÅØOuâÐ÷5=Reçɳ¸Î·Òš!<éåØ c[‡Œ”8è :,34‘þâïp'—úh.Cn<ª%)Äs¸‰¶µè›,GƤ¾êW³†œÁ2éf¶û½‘µ"ê×ëj(lˆ1A3Ù&ÑZíÝó%°tÒÛÏM oÏQÝã‹ãb¶Ú’)3æíõ¡ˆ¶VñXÔ œa$ò [õYõš4šøDìéëL‡†;?ßX»ï%/34¬T ïˆm>˜‹GóW iâÔÅZ&<Þ0Ú[ªn…×ù'÷7-¼h““¦ÜÑû/Ð&ÿΠ¦ˆ z–®;ÑÌÄ]7lAÐ\ÞoéDû?Êmè,Í ™Qž¢›R¸Ó^aŽ¤™Z| Ö)®ÕUMÉ#pXœ*ßO 4@ÿ; ©þ‚’ï¤õWÛí.2‚£nt#Òšü;÷Ɇv3ŒóYNLU¨IçÓ¹¦tf@³u'OG EhùF=u–Gš*‘ß[‘ä®EÕÇ fW‚d*é|©C7¨ÓýjP’âïÃ%ÅZ4de—â>Ö éô&ò߀Ð-ïv¢ ûQòP<P¦îUº*g¸ªÑ-]1HøÈo [·©"y¦¶2?ߘ_VdœO_<ÿ‰BUÊR— ¬œ„¦HiSÍR@mžIS~ê$¶¿ù‚j?·¹måã(¤ô…‰ºÇˆ#»<× |ôñvýDÖ}ôèëÅÜýŒk9Ä2¼g†¹  bæ?;‘Ñ€âDœ\)E JKýâÊužˆšGýêØ Ž`U¯¬žBînœ®4¡e<69FhY=Ý‘´b§9^ƒHÞðXÞ+ÐužP€Çˆ8!KÚWÊ’³Ál¶ y‚ Mn —Çk¿¬ºåâÆQIS´È?È”1»ÚkÖ#ŽÝÃ9ð[,-ê+¢±Tñ/W—˜©ií|öVÞ…פ‡wG¢ü…$ ëk3\ p¨Útðθ翎¦Ñë[‚æ~Dq‰HO‹€Ñ«î~—CX‹²õX²r«ÆE‘8§*ÑÈ „žC ÕЧ$ß«º˜_³ïÜñA™×é¤ ¡åZùŒÅ‡:zqoúøDÑ1f­÷õ>ºˆKA´ÚJ5‹q.ÁGù´!Ì(wÆéó‰þ¡hÊfDu(šÁ¡ÉRZjÑ;ìØKÇz #y1‹ŠGˆŠ6á›LÀ,ד\çáâËN±{†9Æ( Rñ¼…üqaÓ‘½ÞË´› 0ä{•¸@ãÀv@‹ä¹ãÝ1#ÈT=y±¯FeŒ•½PÉ3¤•ôkÐèî$gÚ' ùÜg‘ Ÿ$*Æ*OØPµ<“;ÀÕ§‘ïSÔ±g7¹z„Ã=“gè§}É ¶ ¡v|nQ‰’»`¼Ë<‘”Æ\Nm?CŽœF(e´i iq„ìéåÿ뙎Rp¡ÅiÝ#î÷û¡öSÚG$µÕò÷{âäÊr—PuzŽµˆÅõú ÅŸù·Š9‘#²ðgÄÜ™|Ì©´þ1âÊGâÏ`„ä…òðŒ.¤‚u {ö‡dŠ–ø¦/õ)[ÊEû;DÝ­™®§Þ±ã‘H%ê™ ²=qçFuojúwvå|JrêG ³íý54u¾FÕM ~ÕøËsÖ“CeLê¯ -÷W¦5§àgÈÞFÔä÷+ê«<ˆ;s“é‰t_ Íp;¯ëÕ¡Næq ¡­aª«G`г²}ýÝåæ;ÑŠm1•¶0ƒKÝ‹@ =lXŽõ¼¢·¼²MŽ)“8õå?œ`m G_ó¬!\)âmL…Ûä -’ßýyÞªuðöñ%÷Õæ3k Ä÷a°ÅÐË݈c¥©ºÌ=Möü Áuµ%Ô2JÇîˆ*ñˆÝÑT«ä;‰³Ñ’„Þ’´Ÿ¼“ÍM§ãÿ¤˜x|훃½Ôa†å©è^ì3ž´õŒ¨•[±¹ˆæCp¾¡ôà?Ü|®·ú½¿=´îï§Ä—ñeÜqæcå îľ.æ—žÿ7àÊÈ_ •ž€¿r øÎ_¾ÏL?bG¥íQÞlx!Ò€@&¼3–{Gðú¥á…Åg4ˆÄCÒSꈆ»¿^Â)4°hÈþnPʉWq7jêo„×BÜbæÅMåyìmè -@¨ŒÐÂ"šp-ÎÌ|×,#0´dK2uÌÅ…,:Ò¶2Dó}_† d/R¡ó‰g/&MÁÑðÑe®³ü‘ó´ ×Ëy<˜99¡Y'`»8¶Xaš2À²ZÅÛÚ;ú­6 JP”A»øÜûé¸ucS߈â/ RmEa}ÅycòO{¸è,ôGz5Æm]Áƒ -²õ}‚Ø5ã;ºW9Ì ÖkJØsFV6Ø47¥²{¶Áq])˜ÏR³¶'ÿÄHøý_ÂQˆKÑ£¿³–«€>0ât·’Ìâ( ?uôۅ㉲¨ÊsµG|„üÑP.ÉÔqÓ›OJ·:æ¥ý.Hž’Õ˜ÊQGŸª)ÕY…#£H9¢hÈd$²ÍWŒÙöÚ ª-Š-éñ`NÅö÷J$óŒ'Iê}dñ´%—ÒûC ±ûë÷€½]‚ý©îëÀðvèÌ4`x$ |`)¥Ò´~”ìó…8ÑydòŒþ2/fú«z¨ /¦ôA©ªó7ONTggÀý*ÎäñßøT_h*efª’œI´è`(]{” -Nù«xŠ3¸(@=°¥ƒâçg%·_þåg†BÊRLKÚD$ŠI1$îb±JÈåckL‹ A¼°ÈHBÏ´Î4R=,uq„ò'[·6²¦ ¡ú«0M£Ši3O×Haæ(tÍQÊæ%X .pÉgä8£8ÐÜ;$f+¥h~ö†¡ê°×çÞ­ÞGRÖ 9{Ÿi‡î;ñ~$‘ísRçLvÔ'CÿÜY!Ê›©ìz‡àÿKÒM•›9• G6ºnÔX™£HÏA6Ù‰GÓˆXïÆî;zÔ—cFÚ°K‘ßÃeé>•ª0³zÌáάØ/‰¶·ËÍÒlGI<Ò)ÒZ§%P’¡-6;¥‰ &û¯%òRsxÏGLÝ—ü ê Èf}kÕ'~ÍndèÍ:Y™Ò—½ë ƒ©ìgг¿"Vý£ÐBн­û -MƒÜNöÎJÝ!©Înr]uâ<œ4ö/ÌŒ€Ý9+£„*¶¶8ò87Wò]S⦆¸§¾×9Å;0ËßOò¯²Vóiy¶?¯Ÿø¨ÿ‰U2kýêPR92¦¬Å3úŽÆè*ký ßþÆž,ɳETe¦íB¶äCja*m‹ gôï Âù).Œ×C›j>ò ¿àhj(}§,ù‹{a/ªØ³Gñ°)šV_þØb€³VuÛ€™µ†êhûmPãˆ!TûÀ+éAë-Á Ôlogüò=Wè’ï&â0d Ô+â-q§AIJngD·ng%C½Œ 9f(wÖ"žä€÷z½"ÕήO¯²ÀúÔäU1Ÿüº“ôS5Œ¯‡AÊOä¿|òP@/N‰^ˆ~Ϩç†i⣰G8ñ<ß!{cy7­× !-²{Ö ñã©k†( –ä>c_ë¡Zý4Gn+ -«˜Â½UÚs¦!L€,µººUÔß:ØÈñ.4…8ÿöDйÕï²+&¥,uún7‚ƒCÅ<”2ý\$l,‰GgÌJqw@¼ƒF&-?§O`n&bº«>¸þ¦éº7dŽÇ†P㳡“’ªïmu~¹³gè‘–¢cMRš)^P³¬!½<Ê°û7˜´ØVZØ+’‰^'o‡^ËXöV|}Þ™~GÆœ¯$kÏX=ö,¶¼ýĴФœ«†Œý½…\Y ¿_¾_Š~”#èŽs”zÿ¢á@¹ùárýþåáBœkÅO6IÍ?üY~-¶ü“‡ #œ®—ºò#Vò4€ƒFŠªü¿ÿÓ•Dï -«WTy…µîÏsƒï¬PXàDSK]p x%h{RHóûsÏr¤&"Ž(ôèúÍ…Dì|¤SWÖ‘wš&”0ØÑ;ofôzˆ…?eu°™_cJíõI6ÞPAºŠ³ª…AÉßBH%¢9É|ôÆF<ë÷ÊÜסéÒ¦¶ÊU¹1zH¾«=ΪéaÞ=²º_Oe¿Gû‰K§ åâµÊŠ'·bg ¤ 0><ª”–Š­­2C˜M{2 Þ+¡¦Ä†­é+¦A™r$qÜKÔ¹EŽg«G´¥œËŒHJ™ËÉ‚vf HÞg’ ãΖ÷”ã(\ƒäÜy@…Ãsi‹ïl¯Ñv/1СÇúá tr¥L窕Q9$üEq(€ C†S„¾ö}¾ÓŒ“˜NXÂÿ•[á½Ps­"ôÈ1å+¨@æD ÜÖnh¨©d~c¢Én¥-;!uŸ²äí- jW¹°úå¡Åi%?~qÀ'qþÓI埼?KŸ~•ÐT&ÏÈš?¯ -§¾;I¿=W"Nv!j(?Þ%ÓÛNY®RÝlo†¥²’2†öˆ¡:Ðg)žk~¨™E~”Q™ÊIžøù³Þ:nõYR´HédŒ™–Ô‰¿G?-j×Q¹»ÀãìŸo<ÔqÑFžŸ. ¯¢ŸL3ñʽQK¾¢Jix`2Èþ·}n|ë/Þ=e^™´}ò×’…ž•Áz÷DÃ3Ÿ1ç2ùœŒûÌ#åHuD‰>{a`ÛX-Öˆç«I0N ѺmaP„6²k¨*>ŒÖñ:ÙÖ #©ü¿‰ ‹Èûø.ƒa}‰éþ…風‚’ÃO;cl¶÷XÝL ?ôÁ”úŽTÍjöÔ|ÞAêýVjµNBAš5’ðëkÈÅÏωÀÇ$’@)팄ÙcÓiãµ³žªÑÚîÑŒVvkIº ‡,Ú~%?PûAWJ{&öÝ+pOÌo|ª/Ô!}T.­CgM|Þ¹„„<‹<øÏ?P5)éŒüÌÝK’ä8–-ÚätîB H¶o÷Î"»x³XûP£"Ü<Ë´Z7J²R$Õaü‚ÀùìO‹>Þø"•þý/Òi´’îuO‚èíg¹n ‚ïr†_©¥Eh.Î -m)FøY›sÇ v? ¬;ý­}dÆ®}Å#hvJ2½s§½*ž…›¯ñ/:-U¾eƒzä–¤sBeÓÝÏö>ŸãD|SëÉ–w~ÀÃâps¢–EW¼<è[ä¬q¡ê‚yÆÈ#nâ·=¥˜v-ÝscŠë„ÅôbÂM5¤Çél½j}“t"g"ÊÝe¥|;òkOœqø™ÝjèëRTP lwTãõÁkz©+Æ£ŒqFY§uac€ê[CEZOÛb–¬±Ê÷Œ.ZÛ«Fý7£Ûßר¯¿±¨3›¨ýÂYhíèMüè¼ðN†o°)Î_gÛˆwYž2ˆ¼}veüÿ½˜G˹†@­¨Ju=‘“ÛÌþºesÏ|>aùnMðµ"GrËÝrå!Aú­L;pU}´§®2ì0š¦ºÓˆ&SléO¶ti,ŸdäâiImqD\䨩²Ë€nN`gŽrÊ2˜y YÿA@{)ø]kÈ…ë•Ažñ¬!k¢Ö"^ùïu¢›nð†[˜I^ãx½‚‹—»>‚˜¡è`jÓ „w0ïuKß¿§ÿ¶v¸ÓÁ`’~FÞö0×˾ñˆl¾`|ÜR¶ÒCØâ4¨÷+¼+ÿÿ×hã(3;µDð.&ùëÇÐ#Fxøfàût+•ÉÈeL~HÅ?¶B´Öb4yÖ¬òúôüÂ溈V ­†0â!rJ*ñAïw}h^kt"J’2#˜ÿ" ;­¤«‰b´CI“c<1QõË·˜ÿÚ^Ë>Gùñ<‰ÆayPšúÂÙRö"Ÿì®îŠ k+âjÎ|1ìëê Š~¨;C™VV¨é•B¬‰¢Œâáë±Ýã”̸ïDcæÞã”NsÿÊÃA7‘ë4¤‰ÎMñ5_©HÝñØf~æùÁuc­é,Jóñ°[_ƒN[éI[)õÖrÙë~à GܳÐ:(uÌjßœ©ZÚáîrGm®Ähûø°Â~nD±a2®Ñ¡[K¥ =æÐq£ÇZ„É×\³¥¯~ˆc„ÛªêRNÄxG80{:ƒX”Õ‘ C‡B £‚ÿPÁØ´ -ŒÏ¨`%ûÄY<ÖȈ|^»Œþß[Ý7ðDž\# ±+æ¸é„UÝLi6ö¿ÖSúñÑ/ÁCÈ‹=g¿³©xŠÀH=x°à¡¬Y½q„]ƒ†q4-wP=¦ôz ΖK¸ÿ‡h(Y¸üõ±ö£.(ÖÌ{n}¾v`þ3ZÔ©ÝÔqp\ -Àòr(qRÑR@ypLë‰vÜk€4§J{FË"uŒ4Xpa>wª]QLwÊ:2Áfú2rœ•DìÑVzSP¿æƒ i,›Œl%)Ò8"ÃÂUÉn8X±ÇVjŠÈ+²èWQ쉡7¹^Ô;MMÂÓEÀ”°¯—júÚ>´ub÷.zÓ2›ä6‡ìØd}áØý¢MÞú«~ý¯¿Ð¯Wkƒç‘uŃÇö&oƒç€¢_÷ áò•3\ú‹V6'5J!­ôw9âÒ÷jû®%t»ßŠ›„üˆiÙ[ñuþÌEð€uAyrZŽ{aħ/âêŒyR´»¤ -øqoÑDé'¼–&‡kâBg ˜Oîl†˜ÿ­T8 -#‡9Xš¬UŸs0DÃ…zTÀÿã}1} 7´=gú榞J¯OÊð9J*G|½8†ñQH:e`¤,¬Ky°÷}Fzaƒ_)0˯‰ÅW–ðøEíˆG ¹‡ÅÀ8÷*èºM LþqkÚKG•=Fg‰\zSr¶DÒ’zÙªÒw‰:Iê@¢ ¾u h?¦Úg:á*qLeå¦}HØ Å“ü²zE~Ðg؃4VåGÞϳ ëú-©.@vßȪÅ)cö:æˇÂfôä B µH¥D†€¶®q‘oYàLÜĶAË“éÚ -b[Ç“UnNšòòÝ87÷W¦˜‚iVzzç”Â'˜´¨>^?ôlN9JCÖOç)VICUIã¨!kèdÍÏŒ‡ŽŒHgfäL%Àuž)t{4;5QêÏ%öñâƒÄ -šTÕvE¾sS`âÖCžÔ=­Ç†‘ GóˆxÄlrpkË 1c»ÓxºYkÜ9˜õý‡Í=rì%<2kÏ"÷Zr*D ”†¯´î ±éÁï ³&†²Dv£’œú Ê;ƒ[wçBKŽ -`ôyãaˆ›œFi""ÐSé˜Ar¶‘æó™!änä802Œõß{©(©•¯Y±ùJàa$¥¾k:Æ…†˜CCý¬w5cÑÇ’)º-èl±{Õaôdzô¸Ø!I]9¦ e)Í„¾×ô[›ñBša/xkRÅOÄV–Æú‚ÂÝüàë~€–G¸ëdW0±­0'L!З@Òg센ÍÇ€pmR -½Ê>‚¬T¿–Z÷Ëß ´zCŠ¦p×H,Y53ÙÀÕ+*c€ÒýQp’Ôuˆ/)!òŸ¡¦7S"J,gˆ^0ë»iG$YK—ÖoâS™¯´‚Ú!Á¥—ÝÂYð‚VþJê+òå{ä‚Ê€C‡ -x0Z;êyÙî¿—8š­w0WC°¾S ²Îÿ»…½èe(YÅ»V}s§”Z¸]r²TèîøÅípôcü½e$•o‘—ÎQ#4ÇŽ€yÝf2F9ú(Äär4]eæBÄ€&e,ð#³Žƒµ·Åó›äªKUbyäÕPn(Û^|ß³«g®Mo‡g´ÓÖ³9Ò.έÇwžê•+LïAÖqëÑÿó­­å_<&¾Å3dh¾¹–ãySwz¡H+³?‡YyèÆdC¬‚¡äaC`®¦ ×_L¾g§>M¥„ë;Æö˜bKÇá) ¼÷ˆ'¥,i’θ]øK§úû_Ò?eò«²²'‚{h*êÞü”[ kWœ{ŒÍúØ>”]‹|’WØl;ÒøQ¡uM+X…â@^‚(Ùd¯ - ‰_k‘N#·J‹îš:xD¢1ôÉ{H5€Wß1†Ã¥Ï}æÛ_#D2g¬Öúõ&8(y(xm­81g%Ãì»%û‘Ò±oÑ5 ò’Ü}¬‹i"c“#ñ϶VCˆG°Plu*Êâ±yÓÒºëŠ ¦’ {ó$û§ éÑŸ!,zL†û²¢¥¤0Ré? -ÀœA¥òNf:õÍ.ʹwUàB¿Ûƒ5ç8D2®ç͹Il/I¶}—ìx -8}ëÎ V˜{NuFµ(îË%´³9øY~¹+uG貤 - a%U®fÄÍÔ—÷èΠ屋”Ö±·@Š/ø¬>Ò(JáE)ÇëWÍÒ|6GxoA›íŒg%rb׬ÿ`ž?{+É-ÙÚöÐÈ4ßròy\•‡­C $±ò[-ùˆo^Zi)W~1lûþI0å2ÚMÁªtII^ÛóñeÏX¸1/rUP6ˆžG–kò"°xŒÖJ­ †Ž±Ó®&¦ªoô'ºÚÉ°(Y0!ÈÛ¡j_ž³àwzɘ5ñ0hÁ‹Ý´˜ƒq‚›1âÙa×ŽÊ Õˆwf‰¯HFT2Á­ Q -¼½ìG²Nv‚e䮀™Ð<ÇãxC|âT0d7ágˇ3·ñÒ­…ecøú·ëj×G\$wµ ìA¡öX!pÚTGµ3Bù¨‡ÚyùO«‘?Y‡ÎAfô‡÷dè©õ2.W»%=s׫šT¬T"] !Tî׿Ó÷½ê•³¸¢º¿}3|3+Þ@¨l±Ñ³³™èol[’»Šv/Ñ›ã!iÙÿÂôýV ü ­ÿ7E„-þCQìØŸæÖ¥¯z̘ì°ûcŠu²ƒ-(<*/Ã=]”’°ây ‚˜G:SLx -ýáìig -"ƒ]źú]# ƒn‰‰»5«¿u—ák ãé²™îO§Ò&Çâ•óŸMQâñqa~z¢Ý{\åp¿AvÙ£ùœÞkClÔŽ¾Ïœepüáöô^£´sG yžÙýРi ìr´¿ôãiˆÞH湤a'Îë{i¯Nw~h=øÂ:ŒQEåSž"}Ô¥·g-J˜X¸œ)Añ4dT$ Ú„äQ–Üf,Ñõzr½[=]›!Dj÷$fµ÷µ îhïôa ƒ÷´`4¢:Ûˆíœwf½7`cVÍÃ=é”M­yTÏùN¦¸•ŠåžnE™|žOC Ä€þÔ«ÄÓX™5Äñî]si£½!êTŠ gŒÓÞî‘Gع^Lò(1̃Éèz ²1þ3=RL=£ EäjFß²ÂÉRµû­õ €Kk¥k%i>©'!ÊYà×é?CîÎ$m#°Íà˜b÷‚yå!¦žZX>龧ݔ«Y³ÍDáq|2~&úGú‰…çu“€ž|ßWeûµ>p{Æ­%ÿwÆ‹Ï­R¹nþÊ©JÜ@{ª w¼#x5Ï5„ää–w{^rÑñÐy9’ãžq•(—ŠhÆøŒEšÌäî—!‰$¢Ÿ)Q¸)Ï­Äw Ñl=¿;žÑ[¼ Í]GÑz-¹zÆ;G ¡] 7·"ÑFçz–§Ë™ÏXÛ,¾n=gÒŸ¥I0K^Þ÷Z· ¨[õ¥s1­¾‘öàvÐV> KT>e{`}*èA¾‰7^:éõ|«ü×µÉ}?ùÞEï_¡¢ˆ†é KzýÂøPu>#$8M©9Š²P8¾/›Ó÷¿„AZb™i°Wˆô­›°î6 âS?1Í:%)&1îZ³@¦×Ã&ý¨8J)IÁúdnP—ó—\À´×]šº:2ªâ8G‚n ©«”õôÙ¢ûÖ©â½…(lŠšT‹u*ãAƒf„O ¨G(„pêE±6¯3©­ŒxÚ÷¢GXv(©]u&ý-ªZ3)a²PÐ’Ié9;J"LÍ ÌÌu0ê|a—𙈖§7†ØKFù -æh‹{ÕŠØ*ËMÍt÷I Íre -v5FYe¯P=æìçCé—¶³­´å\ï÷§wäU=âŒAàÑâ]XC è¹Û¶ÆëƒIñVÜ­¤àUŽ@_]%ñ™kvê³ÕcðxšÐŠGÉÊÿª˜äþû_,ðwtº…䌼Œ¦Ê°± -ÐÂ$–º,z =yy]ÜÖ8£ïÉ=•s øŒÊËã,ß׌VäÕ……‹Ú²§T3Tpü®>XðÓšûzä×¥÷’ûû[™úšk÷?Ò¦~ð!‘7Y ­Ka)ž’â8á’H´»èç­*ËâÕ£]B 줅±òÙ…GGKÄòäÁ EàÒÈ‚—Ôb„ÙÅhµˆµ‘eÙÕšöœ‡ž<8bCT߀ Pú²\דáZ¸ß`„9Ïvó0¶cD*ÄxíìžÈäÄNF~4ÇŽþ(—Ðÿˆg¹Hé$?E­íŠ‰ë#’¢÷®¹çéQsŠsd\ȉ”è3\€w!™žw\°õ¶°årx<€…­ee½mÄz­Ðþ”+Ömè]åYxqòJÝFùHºtò¥\—7Þßæ õoö…Wä“F”`(í¤~3R@A…`;WCv‹d^À¨!ßLò·§×IOMמõîÍ["žÛ~û‹ª€á$–dÿøâñúý/p¾ëí´Ôàh2›†PjFá -çΗ£EP"zpV¦òôÌVàûŽÑïŒ4õÚm5ø7>8÷ŒEÊNiÄÙÙ'Bõ=8‹aôñ â<”NTV¡¡êv71Wg\‚ͱAK1ƒñd%ÚDωߖWDª{€økÈä#°+îRdÍ©öÇåâ -Z¶Qv澎´”›º·˜·£þ9´cùÇ# ’S™+V¬åSƒtvK*ÀøýòhÖ¶…ÑA‡ì¨!-­åNÖ*´¥F¬33¢<15Ô"8"3Ä;Oéþª!·>ÏÉÛ<·‹y„fî‹{]ol>ÕN¶µ;¼™”/hÊ%·5'ò›ç±?·Ä°"¥4¢u"yáBYÒÊŽUÄúZïp˜êáÑvE#Úûëûùð§áu—jóßœÏ죴£@3o-T=p^ó×™©|”×Úï ¯u'þ dr„×GÄ‘YhÞÍI±kÏÉT,Ì[³Í–ßmÈ6‘·k÷Mrâg4âQèy”r9%¦PPTPIòŒôóc8mÞ„@}·hiÚ®©¾*&•Ù/}nZHê7HÛB:Ö‚…‰ò¨|‡¯ÀÀ ãÁJY4[â?ˆ‹·G&p–„]|fVAJëhd\)Pâ7êêØ®Ôïvz¯t°×ŽtœõP0šÖÌpuºN%”CmOãz=ÍRH7“Y­¯"bâ€:çðQSE÷f²SX+‡õTöúÙü|¤m×xE'_"¸ bΦ¦ÿ3zKÌ(}da@8Hj$Ôµ ¥m³Ãa¿†Î(5 ”Û¨{ºËóäÐ뤖±6WŸKThGIëû^igˆ×ÖÉ9hL©WCÀQ×|µy}0õÞ5>zhX„ÔQ¬[æ@Gr„£j|™¶Ê—¤ÆÝâ^´RÀµ÷ŒªßýMãøè—¬ì;àœSŽ×"5XS»GâÙCvZŠUì=œËÄÈHZãE`7hSjfßiaδßnð¹#~„k·û‰G× *zÅ“îFP¡·ëd­ A¯3EneàåÃ…ZäÎØ Ði ]ß$óÊÏZiÍÃBP:Ö êö·ò¸~ú<LuöÏBçr½Ùbä1ï7²ä²_fú’-ÍaØH\ÑË}FD>·§¬S#È«¶#Üç; ®L…Ü„6s L»™†õÄ8ýâåÝ£Þãôö³ÌŸÎµÎ_eѾæÛÄ+ïQ -ÑÚB£QbºÒN@,8 -‰wå@™ýr5|Õ×Ë/*Šö¢v„YIÂ~É(müµ¬!W£°B—«‚i‰+¨ÔËz¡ £Ö:ÕV^ï:y"tbPÏq(ι/s0:Àl7>BêàЯ¡dÔ¢*ò„ÈÒÞZÙߺdz˽Ú¥YþÑÆe`ŒGho]·Eª/à†Hõ.í¤ iÑùNWä ÜéÑžl‘p &>Õ€KÝÄ5‹_à>9ÞUüæŽø"öñ™Ç}–B.éíO 6Þ˹ˆLø%Dø“òÞÊ’‚ñ-數ªy>þ7©Ìv˜­ ²—žÀW—ö/&CSM‰ÇÙSçßóh¾(Œþ&(ü@=ñ¡:Ð2ìA>¨&hÑÓ^qØ!ŒÛµ|Dþ_¢í4 z!w°Ó#:>ï£ÈZh†—ßÛŒÙû C¢ÏgÌù3rm£…´ð’hÂËô°ÏŽº^€;Æ.êùë¿àÝ‚TÖA0uh”aF݈ó©*g˜P -+ëÄÝô]ãkm:äóˆÑG;¯Nµ3ý"a?SBTÍìQ „b£n z¼i‚KE¼…¯_í¨%^Dw°oÀV{z—WÁB ñ"X»ÎÏÕŽs·ÏïZE³†Ülè½*8Sþ*¨%¾ÌÁg‡Â;UNµÇ’‹î%¹Ùß½b¥¸ö¯Ïô43sUdxªfë\÷(\wÔ3p¬oZüðÐ%,;AÏo- ó˯{«†;ßIGÔ4Ü£Q©¢s° Ú‡‡s§Bz¤õ…uֵʱ¯3IÈ#jƒ»°éK@ýýgºvø&Ó´Â1¶"˜„z$ÌAc}ý×3˶•œVø¿zåAäbâÐYZKùzë §p+|rE æû1¤$zóÖx¢ñàÐçbtÇŒâ³YCF–¸qææ¯ËQYh¢,Btj°S`ŸWœØrÅ— ‘UÄnPže@!3ÖM—>¸‡_‡üŸœªÙä!*òø4=ahž³ñø Rg„_{Ân^g77à‘!ážcäÇlæ&”ž‹¤Qð–7ºñ'غ²aÊÔ“Æ$„½È£±µ=º¤™Åß} ÿïûóÚ>1;{¹¡?(ÐõR;©ˆg";'užØŸ°6š-hÖ‹PôUøÂ¥h…¢×"% ™YÐöãâÈ£ªÛg†öÕNÐdSYyœš®Äóó¢3ÛýÀrÃ#5Då4¢£à­†¬[1`çº^È-ÑðeN5™'Ù½"9sãFwv=ð¤Ê±ª¬/bzŸåÃt‚›F…ÉÕ„”¦Œ°BÊ´ebµ±6XAý¨»Ò”¢_ü$7B–#Ž«Ó·vßÔÀŽB´²â$½½ÃªöúàU=ó)ÒsŒqqÇ39¬Œj¾D ‹aЈàN§$AHCŒ“&2ï«¿ú´öý/i£¶€«ílaÝ‘XS¬#}>ÓEÝã‹ðŽö”1kf¶YFrš|¤Rõu9QXæ® À€yU Û•o3 =&©±ÜÝ9nSñzú€ì—îðØRg2¯ÂÚâ?´Ž£Öƒ ŒG_ÒzãB€µ¾ÓÛ[C‰Ý+2`qº´ÆU‹âƒ$}'HŽ\£4ò-v#hæ$Šì$|úFlö'’"@²ÔÈg¢å‹qŠÒ†ÌäJ"üMuÙ 4å1Ôþ¼+"’øAÍ5jSÁ -—GàŽÆhWØŽ’ 3̹×dʇU))§š©„Aæ?r:½[’½=r*óŒٞĨâî ›0ºë“ ÐP‘ƒ–Ѩ!´æÀ¯ÖWšxm1ØTýŸjýbÅçp ±Ñ Aÿ~ö½;^ÓúˆU)óI(ì^Dpn³È -P ·òE! ŽTðo·„R¤%ÜúwÚ/‘“¢/PS„žEËâƶE)& j”4þXƒ&!nŽ¿}èTƒ:§Õbú2„©)Û=†h·«LkÈÎŽ»H^ù}ªˆƒ +²¤ÿy€gÛ06lSÀ+Š;ƒ -’¼ú&—5„+I,µòµ÷ ³†¿æaÔç%€zR¦ŒDÝJu¶Hô÷›þm|àZʪHÓÔ™ì¶ñ‘EU+ -)Rö®…r„>ƒˆ°6“èOø@•žPQ ¢-œ¸(HÉ+ôÚ¸§îk3¯ÃÄçàL{‹ú””ªŸ5€=ÂÑC¢µf70Qѿ܃¥³Sλ@dt\ %ÔŽ=\År–µüÙ¶lyÈÕŽ“*cšÉKwö‰]øä¥áíGá %Áà/ÿ ü¾éö˘4+:!qBam‹;KÍKÍ‹H¾þæš·€€w®!Õ–Aº(.1H¢_4ÉÓƯ9µé©<¤²°“ZÏ~DÞM._N- û ¸oäBFqµ©#„Á]jÏ|‰cSßh 5¦ë+…{g²žðQÂùà¦Þ¼"P(Ü -+)zoLi6è};u~0¦#µ=&#+JêªY‘V.ðÑ—¾Ð÷¿Ä%â"Nv)a§x‡•&IÚC.´³NP…I$›çÀÛ%SEŒÈþœ’ÅË·µw8«@Ay}ëSm“ „î¯ËÍB£™ØP³\S™ç× 6½UÇ9C¢ž3i°æŽªÐ^:ŸS)åme§š!+,áÄ´ouž¿)mÙD”]AëàÌfô åæòºÃ’{Rêa Îgc¥'?¦õÑDq#’ŒÃT`ƒì½ƒ]7ZŠ‡ú>¡ÚGhsöðn«‹ÁLž¡²°FO›­.…$ÏŠÞQ¸ï×÷¯éÝw¤°%_\˧ŒÑ”uh¹tO½£€‘òœºä¿~§7¾}Ñ7Ú®¿îJ­ôÖè¬?0&•M»º 10¼Ù‘-¨3ôeWzÆvšêâ%qlÐe`šÇ+ljùZu_ 9ËÃÍ -N\ôÖÔb÷¾.RêÖc`.w4ùn¸* ²Ã +Ü|Ûø«HÅ¡×aåö ¶Wä±þùgD"á„=Ü«%tÅž$ %ƒ”À¬SqïBš¨¯¶t!rœ«0ë?[„8Õ¸g$()©×µUAÝì±I\—$·L¹¥×µÏáŠÏ ’Ö©ÈeÝ®‰yFEξ°¢˜;â¹ñìkJì|èzw5§êsn<â gzŒŒ âc'Þ„kO ÿl.Ð h9ŽVª@`OCÕ…–É'LÁÚãVª:)Ý¡Ó´Q‚uGÀ#iÐÄ u‹½9ÓæèÈiS¢§ÐÁkˆr¼ºêx&e¶AË"§R«a²{¡p=À ƒªGöN—|(ay¨#’†æú°·|’žÎŒ=Ç•º×냹þäN3Š¼„oú‘’'–ØMúª_HOÑøÙ+â£O…ˆ®}¶l_¨wßÿ¢Ïy§Ì”‰&ÖÓÌãL´Åq - U -A;êòû3Hr"ÎQÝÇÛbÈ©ØÖ™q-¼v?î>ÞI¥D/AŸl`wf¢^Õñ› •‘ -®!-Ž2l™iÃrD „&A«!æÏbpƒ,ƒðO÷`ñ® ²ã`laïÆâq§ËAô&ù¶Ê]„jH}AÁçbÊL…oÈûL+ßPW =rSëóš4jSÎ+l ™^²w¯ûV†8ì tÄH&:ªbiÌ^0½€^ tÉ!‰£º·gX’êÇÈó‹Tÿ˽©/©qú`R¼ ¿;u¬ U¯ý$ë°>ž~T—UF(ž‘`Nåw¤µvø\’ãl¿¶P·/)ίÌR})„}Úõ3%/Œºõ…T\¶•sEÃGX?Šû1ñõÊ“2'Iî`¤(®ˆg¬¤‡ÖÜ]|s‰tI4[3dÎÕÞ˺_†F"CØã~ŽCÌ%™"p{&Sªêdz¨¢#î XL&nv¬d©´NªÜç|쎀ä!(rS{/S @Éú¯¹hhEN‹=²˜ -¯‹â @žú,QÈ2ÏWµÅ)°€ô›ªD<–àaÖR}ÅM`œÏå`ÌQ°m‘30H1M~«ô°.Çcæ¬/ÿLäS'kkVòÜY æe3’€<·¾&){ uÀG£á|åï”å‹Ú¥hu—ÄÒ(|ÕuÓ%TËyz×(ý…Ó¿"þ¬»¿ª€þšÃs»½}y¾Á?µ‹ŸLàñ™oªˆ±¬>ÒA^/Ò":ýõk'™{Óâµ×Ý<7¶hBé#@ZWÖ½ŒÁò± -™Ñ:LÑÚ±EuÇ èYóQôØ8Kél)†]5Ä”ÌÔÉ#Úk:ž!è’ôm©¼ä  …„â -ÿ¢ÆgÐNØ´Ñ:.ÝRhABX[#Õ25%jI×Oïm§õIh­à‘Sq=”ï½*,ºHñ ²–H{²=*ö±õj#Iýþ‡¿Šeí˜7¶õ[Õ*¶@)- †`Ûæð}þJ ùÜcÌAŒ#â,».EXe‚°©”¹—M³¤ ‰²£M°ì­ÚY0oøbUÈÿË ûq,¬EL?Ö# ¥êAòÊq‚¦®\¼Ñ!ã .m##@Û”‡çv<èÐYŽxa–‘»ìY$æ×yÆ ñûå¾W(9hfyØu1ò«…ÍdÈ9 åÐ)ü®›B ê¬[N5Tð´ExäÍòÁÉ?™àòªœhZ² í»ìQb)égß¿ª?í™ÐU€ô¼ê­ïÁª¼·Z4Ö -RÚcÏ¢qFfîŠ)éH$ö›nû/諲e¤ÃYñÔŠ®ÆNcjÅŠÖÙ#¸VsHwÉO® ³ÈŸõ˜arÚ¸Ôkmç%ÇÏt‡œç¢@5q¿rªh4·R4 ð‚}t‡°ŒÂáúvk !׳Ì&“Íû m}˜Vy´Ê¹iÎæª)¹¶Û󣃾ð·7õ†gýºöNû(UÛAÙ¢JÂ, -œdûó ˜às³Œù*ß¾èXî_»ÈªE;‡ƒ8Ò*ÙPUD endstream endobj 50 0 obj <>stream - ^ûU‹ò [9(Z»Ñ@ˆÿìÚ”‹Æ3¢iwd #Œµ©¹‡û·>h%𶆘jYØ{!eœJ^1ˆI\é߬}F+†%Ëš^¥ÈÀ+a3E£”ûâÒ0//>Óy4Ñ Õ;v¬ÞT.&Á­ô;ùc€ŸŒ:ÕÚÐ#@Oz?²Uh¬å£­°¦¯7sëN)R\†ðÆPt\ù‹;ÉC8ªj=C0Óg”ï^Ρ| ì)(F—¬®ÓÌùbY(‹œq\(ÉÓ™¥ˆ‡h Âtû1HðSž“[d™Ž`„Zü›»Š«Œ<•qT¹µ¢<·n -Ÿm¯!8Cĸ"vöÁ¬xÇÁÂÆM¯H"Tx1Ñxb ’zîãÉ‹žª8ïLpñ‰“ëß7'ô‡e^î»™+V[_Ï…;0Ôý#ƒdpàF[Án{d7ØØIÎÇ;™ÿ -‚:C¶€yOÒiG0$qî\‡=ú3D¶sʲ5 m/”ÚE¼PjùðËõ¡_A%Fö Ø^aMƒÀæØ«^‚zx§6µ)ŠÁ{¤ÞœþÐ - Œ£6"\-eƒtçJëЬÕ -{àˆû+Ú]c„Ïp®<ë EyÝgH{¨Øgx¢Æé‰;—Á…íŒó‘*C -¹g³TÝèJQxOÛÈk¢ý[·Ø†tmÅCdܸh`´(ûou*o 2äeMÀÀe)ž§4\’(öN…ôJ&z±®x½%¬°•“­…L“LÊY˜‡y¼€‰ÈzDå^̼·Íøi`šrf¢°Ëô‚Éï‘}RŽpüò×>¾X{}ÿ ˆc3 °ØÀêñdv®¸ûè¯ Žyµ¦—(9Ú3ò‚­—þZóS‘/:|ÅíÃÎbÊ‚Š îÏ ®æ'é`37䊥8·[ ûªBû”#15Ôèûµ[Ü×0=ž«Ùsg˜Ž˜Ý~Ãß*ü¶âOJmš¬?9U«b ¬´xÛÖÁë'«¾Œ8ràÎÅX“¿ä _]LìÍE¯hÈ8LTˆ(qEgÑMÑ,¦ËBà³Î„¿(ÂÐ÷Xg‹ŒA¹<»b­_Jn²¼×¯ê™RGêx°ïdÉ‚  C‡òÍ"SjÂ:¨È!$ìÏ… -';™o|Qáÿþ—”ÍTŽµÖì«Ú@Z­È‡ë¹ü‘A¡Ð¨Ø6ñ¿A - l­‡æ$Ÿ®+‚ `ï——(‡˜ÎY ýì¾&Êõ8º…¿q«£EßÞ+XB @{A›=F€Jàå+þñ}«;¥2Dø|ŠØàÓ ¤P¦+²m+0ïá<Äý„±ãÉæ|*U<'GŸu*À¬0kΔ֮,Õë3=Ò2^C:?IîÙ|N%šaA/Òu ?iÊ_{æwÕ²ˆG¬t 4²1U'°SÊRoµ@D7ßu§ùÆ1[ɽG½¦E@¶`rW#‹¦»6ÚãÎÃ"ôo,C8(®lW Gkôƒ9ñÙÆ}ÝìU.S÷U~¶½—î6ÜkßË *ø;K+ÞLq¢Ø¿`c¾ÿåQÁš‘°¨Mß±1—†ÿjñè¹÷l)ÆÔÕÔU‹TF%ûšÙW %¯Þ|!Ï=oòlP4Gïq” $kŽ$üšzGaŒÐ¨SK>ù˜´ðhý9Ñšíþ§CGz" %ÉÈ#õiÀ*Q£»n³ìºþŠÔš'ðùôYש³«q"×þV¢Ë†XºÙÛk¾‘WpΘ#?à+d‰­•¤IlªÀjUföj0^ÑAcâYNjv°WvŠ¢»¥§·¶ÝùJðIÐ8ÕUíÀŸb€@oœ6zاçi½njwgض)ª¡“Áî³0\»^ùvÏv9.–W*QlçÓ ópE'äÐsª•ÙHÎFaÏ«”Z,çªs±‚>iï‡Ã%f£÷ϙފ_ì>ãÞðZŠÅ]3ˆ2§Ò iéǼp³ßlQ£ü‘U—JàãÞÞoª9¨5O§Î³ñmnûõ”DL,…1$NNµÛ¯ÎÚµ :+äVV§çv&þ&/÷,>°•e{)–þÏ_Ô[ÚMèWˆ­•ôÆ/ŒÃñh»}ÁL¿Ì“~Õë÷пzT¶¯vWd–XÙÜÂÐÌË!;Ù:SÃ~§heâ‰}+4Þ5HuI™½ ×5±Õ }=†ëhJùz²”›æŸC~ñd?ð]7Ö¢{¹E‰#½#:8¬ÕüJEQ³PǽÌ}TÍ*<Ç`öze¼Ám·™í1Žö;Ú%xOLäÑz Ó&N±{VB -µ*Ìä…íXÄï Q…$bqEBX^¼âÿ[ˆsTÉêŽX¹JiéÁŶÓeZ’Á¡¤ËZŽ^wmvÏx¤ÇõøƒõO°<ݘ§†0Qör$•uÕ“L§µ®»ˆÖ»49o [( ‹’Ðlñàq’z»2÷‚”ZAü„UŠ5î(ì!cÏõÃ,ñaÕ$SMYúrò“+Žan¾ÅýSÇöT+^" ¨-Ù‘^zqµƒÛ´žçá¸MÛ€CÔhéP ðGFt½)­B"BZĬHU)¶ØWÏþ 1$Y#&…¨¸ÌCF¼}@ÖŠEž–o//QÑÁ ƒÞÉ)Ï(OkSТÚø-^®Ç·Ã¾R? ߎ"p.íÕö6H¶‡d>°ïk|tåžñŸ\I~…D¬ ØkHÃ?ðƒž’IA,“v½¯fOG‰.þ™SA䀼tÑñ¬»"óÆ1K1Ça8êŸÏxäx:'Œ×ÊÛ±¡?˜~ÿ§Ž®pĸG©çQA§rtä%èå„*ãžB»{{ÄÉðjäzàë/VCºé{©hô·e™ÆÜNf¼úŒÏùzɸÅ×#rN-!d¦Ñ°…¾Àî¼úç@{ñâÖÊyä‰ìßÁ5Lk%ý•‡Õ‰”›éÿŽûº#„ 4À¾]ðÁz¿1éªg•åHõº++yîî”’×Ú÷óUð*_#Rοd!kA%NÖSYÖ¤Çåo@¼)åÅÍm­sæ¯À½ Q­ qc^ãOÑ0zðŒ¯ÞÓ¾펺Ç„#ýG>‚¡JÊ…ã"ÖLÓ6¸k²Â Ɔ°ÿÝø£_"–ŒpÇATëOÈGÁè”HÔN%éHÖÆÌñ„‰8b–t§«>ã»v¨ìJÀXï²9ÆÕ÷ʹFÐ^å§ÖH†<ŽãëeŽÀp®§ç·¾JN¯âŒ µ Úh]G ¶;—Ù{Súâ*qÚ7%m3&#Ê͈!™ŸLùŒ~îÆÑÔ4æN¨ k±ú¦)›¸ä*"ÆŒ¾sÜwùÔÔ6•ãœ‘Y¥â:9ÚÚ&ö¢Ëy¹`¥š›­"ö¯Éz¯¸oõ²%×—¥žÀ5îxàdŸáĹÁÜQl㼪„ ê O,ÞÄÁ àé -L_›ÁwzŒÔÇx&mµÀfoìæÊÞU•_§¸[c¨3‘Y?«"ËÞçÄOH•ˆX')Ù­Äz1h×ÔØò%%ÄCAí“L,æ8Wyf¨Æ¯Ã¸> äõäÛþhŸ+3Å)^È hWÀ>Èš^2ë3¨¡V_΄Â@_.Øê£UŽ¸d¯J\l³’lWÝ7gœ5ûdG]Mà4*Kô}DOà5·^§Âœ¹_\æ]ÊðqR™`SÃå_SÜK¾N¬È•@\Þ”<­5â`⦪ÒBZ^ëöO¸š•±("ŒGŸD5çôô ˆiOï9Qºð%û9_L‰wÓt$º`Û÷'¥}‚ºþ„ÉîÖg‰ Ô£‹~†eôĪÿÖ1 Hèk®§AaBÚÏâê§ •žî§¥­ýUrü³,Љ¹î´Î”•/f{'Áñõœ,€?s}ÖÒ”ŠœBwúôÝÛ»òæ¶Üyù÷v˜îiÂmïAƒ:HYP8¸1úX†Hµ¯ÐI4š2d½qˆ$Ê:Á(mQúãÈWI}5]ªmû¬×"õ2\oí’mÏÕ©ŽÒo¡‘L[Ó© Œ§Â½†Ä<Û¾¥‘RÈKºÅ‡úØ 5V˜I…G➸&0"Á•ò™Ãè|¬™Ó­ñ1WTFAaÝ qŠZ)¥ þ= -ŽKNÉ6i}|ìS_odÁ£’oÈÚ¾ÖÄÆ×;Ÿ!3Äz þuÆÿxm£ÇDþÚkHÅ©oäU­™Cšû O×{zš¨GdÈ~Ñ_á¼?oóˆ;‘vv{}?ÿžˆ!‹Ç¹ALÔôF¤¶³7óTF¾ÿ1ÈëÜ*ù/Cî%ˆøŬê“_”»ÙÍh§‡D[ó”ƒ(×Ì$j´Öç,CÝ*EZáÄe÷ô,;ÙðæˆS¹!„¶#¬ !uI"ð½À+Ü\u,¶(z—à2“qâ+dë­Š.wT¢ÈtèeÒñÚ˜°àOI -i­Jæâ:•œ(,‰‡¹*%yak΋;SJ‚‘¢ð ù’Dƒ¬¸¤Ô’vnd–8j¬§³ÝOÑjÆU‡RѬ!³fÒÅ}» ^¥q¤ÍQÚØ:SE¹Gܤ°þ(`L¸UHce´ÇTãŠák éò“‹br®æÛWõÖñ!Yy+MÄlìé4\€è­øÉhý»®e¤(tG¿EÌ»×/¿zz~òËÿ&÷‡5ºÂ,ÝøC%W3ü&•Œ2؃)ÖãÉo}Ýq::*ÏÛYkàu!£\|Ìàb¯\%•B¶_Ö%ì,ñÙŸÓ *4µqÃË@¢H‘¿¥H¥e&¬|MÊ4\„4Ôöe ²3ù½cž49¾wz‚ÏQ4xFÅŽäb¾æö¨¸e;iˆ]q ³3?jzEרÅ*K¼S' -P×e-x©ízžal)j^uK!@'Ü¥Ýß˪žÿ‹,ùô™Ðã8X/¾%éú×¥>ˆž0 --g²¬Q¸d‡~Äý'ˆáKNük%òw †µþd‚<}·Ì¤´îøOM‹v};ëÛ K¼æÊ$D¬øî!tö®+pýëñ¹"©È_WÀÚê‚«/ãÂâÇ C)î"2J¡lÇ'×^'šŽvOZqi„Ât>CZdc@²—ù´·Û¥¸†ÄRN%øàóŽXQC.¹›îfœW`&m¨B™pÚ¤’‚žqæ‚Œg0Ö"w`io0ç¹,§û¤P÷-RÐäµÊæ8+’‹ëPOüoRž3§Zëéã-&ÑrB`‰Í·¨#=f&¢¼s–[ÞT‹ì l`˜Y†®®~yöb‘ “ÀÏ÷õÁ”xÏ܃Íø©¦>¯èC¨¤„6jVÚðƒ ü&†ò(û¬ ôw3÷+üæ Á:«uzßñѲt´õF{8r;$m ´Á5ÝäkÚ–v´¤aÎ4zg²¼BPÝj§òá-²/Gé(©w…ü†àK€\¥ƒß>Ù™¢FÜlukàŠ+h¾÷p¯yÌ’ªÉ7€GtEf6ìQ;R^J&{ OwÒ=ÞŠ‚ÇöpÂåã[n,r@w½Qí™a¦Å¤aúÜÎh—ïšcG _"µ¼>x~ÿ¢ÌdÞ©%Ïo7w²f”¢XX)›Ô“À5ÐÙ…X{4)Vø´)¼Ð_‹DÈ!ˆ¡×ØßZQà]x~¨Z¡T\)±ŽºA^ôÔéÙ®Í4ó#°¸;ݸ5ëaš—GßñAp•µáéÎvÓóª}ˆ¹//FÕç^ƒ¶èI’'=ìgðEÌî½äÑפ}ZÁ€©Jï‘Åòb¼¬Ú7Wb¿\6E§*bMNÝ:ª°Xd‚æˆ[%W•Cû¤I¬J@åo CfËKåÔ - )ÅÉ5÷žUèn3¥;n•áP¯ñ¼JöcÄ&9lÔ 1èÊæU};)ÞÄË“@õ¥­ìjY÷Lª¢m¦skZo3ÝIAÓµùð£~xhðWÜðoÑÆ¿ˆÂ®¢ƒÎ¨Vuýj’š3&mS›¸Ê‘0—f1Ö€Ø!¾<Íp¸ö5™é¥C¸Ü]þ+ŸïGlwÌ$ ÞGk×ÈÃÃòúF7“{l#Õ Z ‘W‰zw-•ÿ/’¦“ÄQ 6ߎ.6oô³$$­?Êt}/beºÔT˜’èTHõµ­‰(Ýz&ûsÉáì+~f—ۊÇ;É3¾ Hhƒ@)$ƒÞ~j¬“x}ðÞ&1i—Á{¼å€Y«`;l± Þ£sݲ˜K€JÒ%æÚ×cã·ÿMk½²Ê_å*¿º»î: ­lªRcãAAÝ#fSd„ä–]=ÎôWô å,gpæL^˜­(ry •#Y’!¶ -ðÊZåL¿6òð•³Âè¢YüY -Sa‹ò$ø²gLÁë•.<‡û#ù<É „H`‹«€0 Á©N WERa dqyE¬ª[N…V/Æ ÒâJ¹¶G>ñÚ%W†h‚Z­óñf”‰vLù œé°FÄd/ŸÁ\2×æõŽ¯Üz”zoâý±×xÙs÷˜cõ(m¤KdNa­à¼©šP=i!Î~a¦¼ôšÔ Q~¾7ƬT^[’€_;Á_9¹ôÆäZÃh{jì÷ˆßmÅÁršB8.—Â]ŒÞãtw"!;ä»K ¹¨>ßñZš¯•ÕSc‰Yp¬èB»ÎÖã®^£Î{Ž ™5ÑEUƒW)ÎÉRøŠÜôͽ.—¬Îç„ñ:Ó¾Õ6Æò~8‚—EŸºSÙº­UcÍíA¼œÆCìVx­m.ºÂ^Ï–Î3cú/µ•j45[ 9ÖÕeº?÷L m4ÊttF ’•5´-ÏJ0~ª ¬Õƒž1ãxßÜyÑÿà%ý)W¾N3QÞ»W@Ô„¶Gl¿¢LÉÞ+ø¾·2¥/_äÊ  ÿfgúP™RñT'»¹2(ÚÑn5mÄ>å?R|åƸžísõ<ý”ïÒ"8÷‡>OÊä QO$j*-AAÿk­üóL†y™_ýHÛ°Í·j@„D©°Eséôá\‰ˆî¿ï}µ9Ã͸,#ý -9´åƒ›úgø…Á¢NAkìmÖ“D2e=‹  *ú>»2¢ºª$Ê×äæ/ª‡Z½;-Óöö¼:xv9¶ÕŽžÖÌFýµÏÇ/Ì\%rx•fCtQ€îHnÍÇûQ?êb­$ }>;èô¯s!O­Û{­¸/Nuóz»—ÅÂzûŽÐ ï#*&Çû’Á127£ëŽf«5…aà Wé2Ír<Åõ&Jq©n `bÜ:«ã®<ëÏ$3—¬¼xÞqè›Õ¹_Ë0=×òû0†ì³Õ uWk> ïõ”×½t\nGðèÜnÙ­ Ùµuã9«5ä Êiïi#„>Ðð Vƒº\ïIb5ÆìÁ1ií k«íÕ{ÉXŽŸDaˆ ¯óܸà,_ùÄ…ò&ÓÔ(ýϧ³©Çz_-âœi$Œ)@ î«âÀÓôÀµH4ö¨áÿhT …š¹CXeŒêzTGö0m¥¦ü¥"*ûB$Íô¶â¾;QdÄ@I¦ñt=3„þˆ?i‘“4„Ç„^Ó}‡0ôE z"þ^§rÒ˜•¬g´Žs ÂGÙúÚâ»°1¦#7‡+±&áH;4þqg•„šèÁž.Ɉ­[uËÓÆ-±$Ö×¹q6#Ç:Ž2ºb²ù(åÿ,QçµéD³y3Õ~æ‰tŒRéÊ“ÛBs=˦`-4¤³Ö˜+*t\ͨ±5[QŒ§ðQ—ºYjŸSéɈôƒ×ßþˆ±l„Å" ïÏhDäTgØkÆÎ㊭ÓQ±ÿÇ 7O‡AÁMád/a‡ÜÖNOŽiÉëûYñlÉ®ö“~¿¯Ãw¿{Fë†LŒVšðö\©ÒÑ´y·(ö®Xâ !òû_>÷ýRçJ©`ÌBâø/ÊWÂÚr¿ºÒ µS•3ØúÅ·/¹â ód…s–U]ùc)f̲ka~Eó - nˆ?F Kó°8×a"³Úbý]nj )*ó–ù3B8M>†­”=¨|ÝÀ®f€}….²{šëIÚF|j³f!„Ýiº’‹{ïVn]‘Œ¡k½º§- Ê¢~¥#TC„…–aä­œ*Þ wÝSgCºïÎ=ñ.?õF¢ZîÙ¬O­ä Ë>öü:B)s[bŽyªØ²¡§ ÒÁujh«”_¥Ïf¹Ø¡Â©ü—òWå§ó£_ â“óãU_éˆ}MÐÀkË_Ãî¶Åð$8ËNãgð•™å!4åFb¤è({:û|1îù9­…ÆÿZøasœ3jâp\"øh½Ðt‰ˆÜL{¸ˆG9°±Æ<%zt=j˜êÔý_슄dCŒÄVV§•x`,¡ˆ_+k%¬s KÕ@’®8BEç|âV§q¶FD¿E9Ô™Ò¨ÇÌg×SW, -#(Ê+`Œr–I­µxmìw=œ«Ã¾‹;×qrªÉˆ÷¾ TeÈJ4hé2÷禄;q„ -L¡çHý?Ňžç×ì Ü÷4à !®-3C›äõÁ¬xki‘ $@sdf“ס¨r‘±5RåÛ¡„…‰ãν\m¯COÀj¸S#.f#t*øMj¯ù)¡8¢>‘3E¥zê£ÒÁ|Åà -jte•+ÕYÏ4CÊ7º~kD2éNè¤GfĺúFõ»´†PC†Ûs¦S<×4Ž^ø,º-¬Ä3wÄ{ûïŽ ˜TÄØs­lî÷3‰Òž.A«£D‹ŽR/68ƒ‚Á£ -Ù©øIxíì‡w°!#É¥w-2›“ªèX#¬ÊWi¯£ÀÕA4I1nnzxÿþ—ºã¸cYŽ•û¼m -ù·ìnEÙ/C$5k·éo–¶„”Ýl z_ßϼw³Ä¶¨§"6ÊJžj+¢:œzz%¡íÀãÉÏÅœŠ1ƒKJØ¿b¾ùá0µÅªŸ 7Á[>¯È×ÜBtFÏtý!Îw›’ÕEG+Û[õŠìò›~-'„ê"Xî5d Ïp„lå.vÎÂõÚA{B’pÀ~šs¿Òìº,&kfh:=Š}öw éi–‚\©vŽQÖ¸kßR^p*@-ˆ¡ÃH8±ÇrÇ{;6DY_-ºR¸»kYÕÙƒS ƒ°?>ŒŽ1‚Φu¾Õ‰|dV®|ƒT–éCˆ½J•2•C`PëC'œFV=ÿh7û—õŽ·”?J!°1dæ“ õ¥¤Ÿ`o½¥ªì¤÷ÉïÄ:Ú u9Cg›s%ûz–Í7pãí½]óMßVô;BÉ̓!÷Ï€0Ðô5ãúvƒ°µhúYgPeå"|Ào%´Â.ÛA±Ø$“ï’:ûD?t™zRý')TvËœ ¬Õ­A®æùÍÀk5BÏûI#TúYкúˆ”þ°Õ!ì·¶ÿhT§Êõc§üCs{Ü“°GãbPYíU|Å—à$m”Ä›ÙaQý­fÄ  Ñ!Z3d:àf,Â(GRk§¡zîzT €¾ë~¼.Fsœ©‰é -Ää½ÐÄïA±G ¨ŽBGp' ¼òœeZP²u5‘•aŽ‘Øá„Š²Z&ïK‘T÷ -ûñJÑЗÆYÃa÷ç–(z0”l3¾„WÒ>)ë4:Xy4 ç÷ ¸Û=nnBß³ÕÓµ¤‰xÙÞÒ{)ÜÒXPÅØ®ZøÝ$¦ËÀ£ùeÛ´£{°oY -7CÚ+:Õg–¹_¥å¾ÿ%³)ªšw F€ïS!Òڶ爓ÜDI_|ñÚ{Þ]܉ùUè’ÊŒT™j…ȃ‘pø3«\Ft1e‹l­G\FM…FÇñbn×j…ŽÃ -לÖ&¸wh˜Í*rz²,Zʳ.+áyUõ)\FiÍr›ãÓ¥Æ>l0d>àÉ, ,80#Ô} Ñ$³Ù§x®‘îbÕ\ûšÃH©[+ïNw¾Ö¡¬ÀÚØ›Aø˜º;FÁ‘‡ƒ9çÓËqS÷ˆÒê}Ä•yö¸îˆù®z4°È‘f¹2ÑR'\ÑÀ×~`k¯ì„_Ï,‡ ˜ÏÜí0${ìPÚÐBFI¦@¬½š_ô¦‚ä°‚…§Dv)IÔ p™~Y÷¾ÿE©´ÚT5çGA@ŽXfá°ž!­U,oÏ*wÄ(8_718í¯9«‰ï±]:ÞË“[X );©YK÷‘6j@H·wäAîÒˆsÔyDpÛƒ¯<ø,HYÿržŠy*Mq¬3ĶQ -t•‹‘ô‹÷`\¢<@’.|Ê:W¨©ô½0[]OIƒ"̯íÌÃIþ,éÞ¿&ý#rƒ›«ÑáºlÉHF \ §"+­Å®Ñ VlWÜßÆr^wM^¢(®!0Tæuûþý«zo®=†x#½8å+À±m»ªW¨´éŸ…¸™vL9Öç´GÐ%“¬ým¶ô~ ŽQj ¯n£\›Þ^‡Â3d8¯HõZÞ×:ãLïúw-†0ÇYÏQ©öi0(Äéã< ¶V”ÕÝi.ð l<+£m†ÖQ$qîIKÙI§„mEdz:¼Žê'EélÍm™0\¹x4ÜOÇÉsÚ¶YåLm¤ Zÿ‘ÜZ/nh¬l3;6í ÷s-p”{ý2”õÄ,²g?žƒi&Ѹì ?~iúôÚ|AëäS:”&O#UÃ8iTÏŽÎ~ CÈ“©ÉoÏÓk,{fÞ žÚlÑpfîÓ¿I(¢ã\Ç 4ø×õpd¯À0Ž(Tfˆ…‚ ÀLí=¶hËíú}í9•î "n Šíþ­ŽlvzâqäÍŒ!‹¿’½€Æ#³ñ,cã AØHhK¥J°‚y+ÅzTœ:¾´ð]ù*sbO¼0<Ñ^Çn3‡*ØëƒIþöŠ­~fÍÆ.9o1‡nð.£–üô™—S|­µHÂ9ðÍA¸O[áW¹Êï±¾¯ ,ÎVô+¡Nà@%òGà5±V 4˜ô•Hm­ -V=HžAš‡äµú5ä”x`Ü–ƒfßê²q´)L•ÊÖ=Å#JAòh|Z¦yÁتl7uïBò¬xÃS5h~­'6ðð5œW,ó3)ÛEÂ6eû9ÂW³Z_ššNu¨««¨ÂXªU—t*}È‚,S!sW§BÈZà«â(¾`ý#¾TuÁ¤ú®×Sa¸N@Q ŽYOGùà í¬›**©FvnêZŸIt—©«‚'Ñîí Ë°EÌï¨rR›ô,ÒGóü¨‘oaxX;;cL= o\ZG]8"r†èZ“É8yA~?)Þ͈£Àöb1cWiB)jÐùß|ÝÕ“5œË<&þ!zGv¿Ôr¿ÿå/a‰ëJMóÙëxd -DXba¦­Þ{!\›6-V¶իÛõβb‘¬¡Y"‰ül—E¢ˆ®?×ãìS0z!ò¥AF2 Î´Þyžég|;Pr£«âÏ-ãa{LPk­é9I+¢ôìóµÔ¢ïGJ'œx”8ŸéC´àa´÷ñœ®\ËaÀ´|*sÊd›åÀäž.º)§¼:t.˜f:š]Ü°y6±^v¥÷U÷´@~©{¨P¼¨gœšÒîjæJÏLým#Ö•_îà’bT›D€ÊÎZD‰Tá¯Õ¨~ùœN=¢ŒM›AcwG†¿µ±ûŽ®"Å©û1f|á‚à‘r¼C( ®OX-ø!új¼ò•TÚ ˜c¯ÿ|œ³95+(áëƒñæ86tÒIwö–ÅfÞzÜ@*¹¹lR¬âÔŒxTƒö×™y®‹rf Bí6ÇŽ;´b“%Øé¦Sõ Ѫ˜GEkïŠø~À Xs?øFo2ê½Åä3§ºà?[ĺä@¹+†µ>©]¶×z߬ñPôÈÔÞÅ•æÉÆK¬óˆ1$ñÎáGW/êˆéž.x£=ºC´­ðz“6ѵý¦6œÕCœ SD†¬‰‡?ÎÆâMž·_kåGtS÷:yÑõ¶gJØ_A¡4mž§××ú+ho{êú,tv?æmÃ"€C·b‚Ðî)ùÎ-¸ÚXRDBAƒž+oí¿ÿåkæ¥j)ð!»ÿGoš9x®i” þÆÔ¿çÜÒ®1h“Çî ‡í’É¡c]z¼ð+:¤§L d-?T×nQ€P $âË gG2ƒîØ@á´ä {Ûˆ&ž/œ™ÙÓ‡gÞk„¾6ÖºÔÝ–öæÌQRÏA驃×ç‡í¢ -’tfÅ7ˆ¯-^M!IÊmàT7„{]LBgÀÍ)³À™OöÉy4°“›æ8îçTŒ=W«+Vh]‹[ë7œ˜vP¼ˆ¹O3ìÑGà‘H¬°zÆ÷®ÆA õË{*àî¼i‹(oÅòÈõ°;Ò5[»€G\R/§ü]W#Šj#º•ãõÁ¬øS6­ûz•ZPüŠ% -…óQ߈`ÝÐæw5„i³Ú‘Çþû”ò«%Ý×öÃÚ”¬¦O¢úG““íÚŒ -Œ1 £§KÊ,õ„T¾¥ï.@ÝHiµuõñO‰1wì̓$Õ^Ò½kº[«®ÔâNng‘Ëz•JpO‘fXt ¢6A‹²EÐÈ'ƒ­ÕŽN›ð'·ùÈœðLpK0 Ù–»"…GjÐR2z©{²Jûª¯ñh´WXÙbæÆN¦ã‡U+b9꒩耄™{}ðŸ;¬lÝdÖæÁ*AæV²¥),Ác€îr(û¶UÏ|]f™_*©Ç—6ÓüÊi¦YÇ@vpU%O;$l´c–ýÁôhÍÚ¸SÀSžT5t/lÏleËasŸ!h!Æ òŽVIO×1ö¶î÷K+W¡Ë4i‚Œ#ͨf[ÛÛv§ÐÞc HÓíñÊÈ#¢47i¼>¸«§:˜øúŽêç–:ßÁ«.æùeÚÁÈÿh%åýÈBö ;Ië_(“ßÿPqh¦r¶Ï§ €ï&ùdáÿ6)BÊø Ý·C:§È]Šô a5y UÌ$× âÄ”“zÖ¡ÔÒþ©ŒäŠ$¸,¦)z‡¡Û4N`²µ‹Àñƒ;æ/CT9ÄêÔkHl_I*ñÝXäzE'Ái&o_›Š -Ëú©‹ÔFDùÀFèGð¢žéÄ›¨`¦.Æî¦U{Dc'ªÁÁÈmÉ…ˆžL#Z&ËÐïyDnÉ%#èœ%$mmåD ßtígTn‰‚†Â~=âbLÖF 2ú:1Þ¨–œ”Ÿ’Ý–꟩š>°À_½€üS½dv†ßTNuV|®¥t6´¿Ö©9Uê‚li~c7·X3D¡ŒÞ+Aïø£é7}èßåÁ XkkIšöLŽ# W‘ÃÜÅËŠ¶ìFÈ|m¢€Xï‡C =Óëƒç÷^6S®Ã°h xz8³­XŒùt˜‚pîàkœ4\j¶ö¯ì»û¯¶û߈Ži¢ý•ÊÖJ6`‹¯h·ЗÆï•“åFOý—è¿×‰òDã1¸Ë)”ÛÂÔOð;u]¾F/‘6¢÷åÓe±]%Cö⦂ß&0ÚÔîVXM^,b"ì+`“W¾Oña}b,]0¹ -{¼k‘np}ÍbO3'åù«¬46}îœ ŠÇ%l(s¢N2);×QÌBë  P„h˜!È9Ôú˜uâsb¹ÊeDS/%Zh.˜x´Ž¦i=>Jë à6ê(Å!Cè«nãaókC' -z~ÿ¦ÞŒƒÀúÅ¿™P;‚ÔÞÓ«­ŽÂ ;…ähU¬éw‚rfßï#»oyïC¢KAäÕ0`¡‚Œu¢eÙ‡:ƒwŒ¬Ýƒ| ­Ù†œ—2©$è‘âFŠY²­¬ mP”Êà÷h…{m;¿B?žã€2iÞª{†4vVéxr\# J44Þž!²bL5¡„!BÈ®Þ -Å’×| ^\7´¥Aì«.~}!»ŒžØ0³›z8Jûz©cA8W£ÎÉ”(Ý °ÓPqçÉ?W¶¿r -k ¬G& .Ы3›4F uéJ—ÇÁ¼•'{’ÑÞÓÛWh‚áð¯³FÖT›·[žÕÏ$§îë£Tâ¨ØÈÁJ’Ûü]^@~ö×ZЯNCrlŽKKåŒý;g"ÃS -Y7Å ’¬òa¶+[dÔÍæó€pùH£ °Â+°)Îô¡.þ:dÝ”Fm‹ZvºQ5H[Ô«©Ÿª¼TŠ‡Ò£8§¬5'ˆE¿Ð­j›î?ᆪŸ¬Ø{ˆ¥ë—èL‡WïÏ GlÜÃÁ‡Çæÿ•ÚÍUvTá(NZ—í`œùvöý#ô‚¿G¨¯Ä‚,„1lñz"Y™ÑžÙ -‹@‚j{FÄz‹ŒFðôgD •®‚¹SÞ€È^Ò§^p . ûà}iþœk˜3:1¤tp*ëL”,Eu,¢Öˆh7‚1?di‘Zô=ó=žåc]‹ˆ×Š,IEÒ̺PC¢½ј„¦×½^ö‰iEæ Ñ ËCêOOq…dÒÂsÖ=…Ëx¢Í´VBî+¼å’£4¡ÍšgD¥ªP€CÉç¾hÏ1 -(/Ûƒ8:ZeìªK°8)Õ Ì0ùÔàHñ&Zê°6Z©Á„ô/æÈßÿÜÈ¥^¨î¶ü\3ïª Ë0FÞ +ïŽ#÷š/ý‘>æ/¹bµ3‚·›-^©„SÊ>«ª*Ù“!ñ ŠÌõ 9Iví¡šƒ°OT}GŠB¯[ФÏzAT0ßîH{‚9@4üíÏ´lìo–L©Û#÷Œ5Á<¨÷ë9“M+´¡³z˜Pà´¤ï¨K—R-ç?À=yÍ@eíRt†ðšMW ç$?ƒþ`½ëžÄýøý¨1mÈ2Ùvp#g" œ4L×>F9-ÄÑ# -:Q‹ôÌ»™œKLàG’;Ÿ-òe®ôY\ƒ5stúÕc¬¥Í¹žÕo•éµ }0%Þ‘'Te¹l>â¬Ýh¯;\Uü_kÁeÈ÷’ç&¿‚«‘ç™Ì¿Oè¾°›.ëáõ9\Åç¦ëß1û€tV°y¯…t|;ËqتqT¯\I$‹¸ÁZ¸¢eqä˜õì5âfÚl\b‰¯1ìYª hGáZÏ5kE1)‘%Õ2*•)ðÜ°½ØkÑgÙ¹|lôµ6¶»èQþ÷uϽ Ž¥wñ’–É>a¼=jÐZ#óG×+E+‹ç0F¤­b¡Ø­|+6ÚuYw=:æÛ‹+Ö*+ä-tÀ_ór©ggyÇá`ór1„pZ²úzWº—[l:_¼£w­)Äð³DF/ßc¯ðÖU¥ÒÖš7Í¡†¤R¨^ ìü”òU¶¹b zsÑN|§ˆÈ=Wß4œ BSÆXçÆnÐ#ï=t5laÃk™–¤Í½‚7]–mÄQˆC“!i7߉%”Àég ÝÓí›Ü;Y5RÓK·ÏŸ¥N¢´þÌ¢}ª³X†€ÂÏPËÜu !õÍôym…{ Ñ@B%^…S%¡Û¼Ï2åŽ ˆ‡óµ¦,!ÔàF[‘!Ý'÷=j+˜\Ë­Šâ2$lîüLËÛ¨¬›ŽH“*öz83Öغ`ë,†A $øÑ)nI·³Ã;¹3ä˜ÿÊ–XbÓ´ò]ÞO¹‰DÌ­õS]ñ†ÆeƦ*[©¬¸Æòò:`eF´r°=ŽòºòÊïT>×}¤hÕxMÓ±/Ǭõõ“1ZÖ²ÿï:SKÒŒº˜´æá^Œ®›XÈq¦äK¯Fž“' ,`Ó ]ÇYKÙÚà&¤b‰ã á¢äUsW#dˆBHý½Ý¹<Ž^§ÂÜïÅ?Øë¾Ñ«Ï<¡ï¿}~ooé=F%Êc”›<›»£?¦qŒu1˜à&_t¬ ƒÅYbåé¶Áí‹àöù«ëÂÓ_ ­=l„»¦,á#¤ÂÓ¸;ìF5ûxפmÊõýÃ5õ>Ó@qÜ“Œ©áŠÚ÷H\¶ AMkt³ðóTvÅÉ—Œ; Óx¹QC%ˆ1¥|¬€itD¯»ŒÚ8D#<ð«þ;ôL´.Fäìiä?C8::ÓáN=_dÅAl“ºÒ\>Q>b?wÆñè ì~Öê¼²móÕ ÆëƒøÖ4Йå&PŠ [pßHv<‰\D<·³b`›¢·1ñÎþËKüä—PË(…Ãy޼ߟö°½apQ²èµ¬[Á¨çxúëakל(VTø!éñ什i¯qø†h7R°„ë(8Lvß4°zÌfA$=k–¬¨‰ñóÂJu½=2¢†t —­´!2ÄŸ+0ÞÁ €£³¸ÇÏœJÆsÍXŸõí¹b’1+‚¥iÖC-“ç Nï1ƒðpÖN… ˜²ø Aíꊧ멶 ù|²PŽ¯‡ÅÆô„jÐaATãxWw4‰@K±¥ |ê8 [¶(ª‰­? &".93$çF0h­.ÿþ—AŒg×7HÊÃ#ÂðQSÿ¯<äÔpRôÖ2H­#â­šµ†ŒšnóyÈ(Ü;#¾š‘‚y}ðüÞÞ~]f¬é;·R,ZŸ€Y…PÀIìw¿lq)¥ µWÒxü}÷CÊE×1³È„þ•®ñGE¸_+ó!¡¤8EF?Q†¦Q¤Ï·~dÊÄC¾£g~½I(¤¾Û“Pî;š<¥Cîk”·:ª‘èKÍýxuÚn|nÛ^ô´§làHÕÇ•t·ØuàdÜølñ­!º[p’瘱˜'zd‘rœ.#O¹n×¥ô4Æ "IsA¿+u·jm×»ÇF'ëxN¥”ä@RwÍÑ”D^ç–B¹µª•¤§ ®ç~üÁ[zëÞ½À -ëeÜed¥‡Íb?Þ­JRÇçp¸ó€±év3âò¯mHÍË_mýZÿÛÿ¥¦ºå)RºÒ~[× ¬A"n=&ª]q úEn¼žaÀ*[*è?Öb[Óç›O2ãSñ8× ŒÏDtÌÈç±Òd=΢é€Ø -Ò•!Wª¯W‚kÈz[ŒX F2°%)X{Ìq\ÎñlH&K*£D±âV›[h°SÝtÛY§RÙC7Ñ^C.Öš´\ϪÎß\Bbþqx_qn_ƒ"œ²ôYƒ8þGœy°b‚ùüD»Øðº‹-¢˜ýËìùþ—· ÓÇ°|JF!${É O9R;M¾õ(÷xiž cHP-ék(ÉQBz„´q–W„‰¬TJ eØÌÿVpŸœ¯Îð×O~lh=ü^GÊÁw‘ò ÎÞºG¨òdð1ö*­­!òΛX ­oožgyF•T‘}wÅ›´ÞR·f~†³Ð{™væNéwÍ¢Ðzì†è¦ÝYZVæ½NrY XX ¢«·JÂ|ðÑ!ýkƒ>oñö¢\¿Öf$Úü,uUŒ^KøñúàUý3´ó¨`ëÑ*q¶r -èàYëºÄˆ²”¹•á®w˜o~WîÉåHâݵ#CÓ²Ž rÇÇCË{ œ_îX8¿3"‡ÛžŽçÚ6âãý†ñy­ 7¢Ÿ1gà-.×Hbêe†Ø¥"I (öã Š¤’SÅHq’œî†|>Ç A,¬çµÃýß\.ô¬bY Äm¥Ë`8FPàIMQjÿt`#J½þÝÆtÇ¢‡µ - ‡3bWÉšÒ6!T†©¹=¦eÙA@m-Í}–_1-2äö¢4À,ÿà]þ‰É'4y’¨Üv\ Š£>YMQÁÇ–>úfÖ#N‚ -/d°ãþâëñÉ/Š+òTx@J– ÷š;–V’Õ)~¤É®uJÍÇãP>ZB9åÿñ&z-TàA 9×Ûò±€H]WتCJä›Ú²c–æÚ½aÕ’€RЫ™¾eC`ÛÔÇ·¢U´µ~>æÜÇ Üf©_bßßÓ?KÀ8íPɽ–ei†¾ÜCˆH‡X\„¡Ã}Ç·Û§Z8î|ß_i¤M–¯Ù¦ŒsÌU>8ê:^†Ä)Œ‰ŒÂÄ*ßRO/E'cvá:œû­^øÒ;Aã`]ñJe0ÖhQPƒhåfhI·ÞÈXU!,Gi^ûs* à+rw=ƒH¢æúÑÖqâªtm! Ö©ö·«hÔM}óüÞ­ã]š ï&¾É² îI®q »TkBp-÷€¯ßeÛ¯TÅöå5·¯ì¦®Ç¹SøÚRAÓ‰^©å]´q8áy•á@p£•SÖØZ-ÖN»ëÈÍ>Cø‡vEYÞäfå'°À(ã7å³…+Ë›©Á<©_Ï•+bs*™¯–´Á›8z*.Œa%~GÖu/"fâÁ<Òs{ ¬nŠ/Ò¢4¥Tv–ÕY¹ Q± ¯fûk=öâ×_µ “}ýmiËçµ÷øÜte0÷7ÑE]-½Ë}yðdž×®ÏÏø(߸C6J(¯Äõ`€·´­÷˜ûºHÝzÜvÞÒq£Žªi 3ã -Ö÷ªtïì4ˆ•ù–öHy±Œ%Ó\;w*¯€GÚ¬ì‚0‚÷^½óº l0æä˜~@j¨íι§FæÖcf/q‰¤é…‰‚ÀµòÀòu¶ -¼Nï²t0j§ž¦°†\qâ[«17”+CÆ«ÈJlEöLÛ=“ë‘!LÝ»É%igJ.ð•=C”>cGvjÛÔ™ -‡ÃX$þ„‡ïPRÈ1Ï©ÖÚµ'âNdGb »î±]0b=—ÌBƒØ•RØÛ¤Ÿ¥;ãƒiñØ7‘+8·"›ú¾Å¥[š˜ N" “—\õÀ#~µÅ”j—‘Wü•Þ6_Æã±l!ÎÓ¤’bž áÂ-æ.#Ø™š½©'D+oEDèÅexÒÄ_Wø_ ш#h³”8…¬É“§€ÀIŒÆî  z¹ä©Bˆ6*iâ×3˜QIvôÎú‘Z´pžÎ6hg©Ç?ùŒÈs©š‘+R¹Ç™?©ªÌ^C,À}(4ÆzL"¯? ;¦]Ÿ0XÌãÆoí¹%}Cý)&Ðu%O ±ª'_{‰ÚéãÚ3D_ñæùÒêEÁô¤Ó™å–zDcÒóÃQ( ŸÖÈ|=_“«o°¡÷[íl¦­¼Õ];qÞµ0Rܪf÷{$†q zöl”‚/Dß[—6%ØX1Ff}þ­ÿí/Ÿ›U¤ìdìÚ]o9éød}Ùá,íâ’‚G¾c™7N×5¤*´¦Ê[“-n˜v±kÊsÁ'!Ôû!¨.bÿÍQ¢q‘½ÿ¶PºØ‡êlÄúhü  íà±d@( -à)xÝu­„5×»Á§·TVˆgHÛ/²‡Ï”£¹Ì%pÖn#.pEÑw{gúØöë¨3mA=­t9>C¹`™,‚}'~=A Ó°½?4‚ÐFúÂ-Æ٣׫t<O‡k}ú‚Ï™h1Û±! ž—0‚°9brO15ß“?>³²SÞÑI£þ±-ÔAYVeqD(:ÀŠ!¤+Ü3 …!J§‚Âa#Ÿ¨ýøÿ…·|ôK4È¢"ºÕK,¡Ñ_TDiÝ16™Ä?Æñ Zß›¶Ä Ÿ¦©%ØÚ¹¦È#|Ê’WIÓ”F…Sà‰žåéæ"Vµ–Å;z¥»š¡o ^.Gøp4\Ì -7¤×–ܛߦ -Å‚4K=•ˆ®îYêü=J¿š#ÖÓ™uØ}½´#‡ñIñÄk[]ÌN -#ŒWšùN,c[„DÓ%O<æqçlÞmI„{ òm™JkýYËq_šQÃ÷:UùpýÕÆëƒ7õFóÐ~"d`sˆ MœÌ5éwgµî­è/mÅöÆqii³"èÛÚÿF¿0æ7ž®¿Œ©vUBŸøÌô˜”ª%¯\Z¡¥éN¯wü3)îÙ‚˜ŒMé_I -dDˆ#"oñðÎ$ád|vž‰b<68Ί«²Y°1¥$-T%×NoÐk­TÉ|¨µ¬ˆ}] ˜Æø¾WÊÙ°œô4SX»ïÚ_.ׄVÖ&ÊQ 4ãä€æ–\ÂFe6F¹gŽãö0s¬²{ ¶Ï^6{.¸Ó¨Ö«ŒrÙOïO79Ñ€5“ÒQðç¡;Ú–FqÀyÎÅ -'Ñ 8ÿ'7¹¯=ã¯ôÖféÝâ(:A!Ñ-¸ÃâãHíŒù©SŽw”^¬Š½òë¸!Q.b<×ôôûÚô-~jËÇ)ú´Y#X!â~FOÁû‚£¾ÕH ¬ g)z¼æOœBæ܃¼@«+ŽxHŒ_s"f9„´>#`(ÂÈGP‘t¡WïЯz] “ßÓ•ÀtŠ0ä­Xž'ƒ‰L»_纔˜{ðä%Üö<<‹I1ˆ -¿Sc¢Ãù~Â3Õã¬$ëÉ@³ ¡¦8‘SYœÀ1€ô>xOïÙ´sàÙŽHŠU­øÖzÓKÝîâ7 íÔlö³Iñø\ËùxÂü ¾àwì7*m|yXÞ* Èï¹ÅÃc½ãtŸF10¦è)÷³Ï³$LIÏ%ÿIßhXƒv¶ûÉ©>J# ^à‘æt@#˜gÏZŽWDiV(ÕÆÕ9£ºá¬Lq¨ÏVc¾<î‚`sP;RÑ1VµÄn7‹­˜x¥iaf¯ÏÏTj¹·¨üݤÍHKKw)€_ŒµzT m¶ MJÓîs*a——ɳ¥/Æã#‘tKovçœåQžŠ‹ô”\N˜š“ö–ŒsðÑ¥¼}§gh^®“]²à]zåétåz ô³× Ë«ò ÿº–Áî$zgäË}À’ü¦ÅZA¢Ä!P­2 ,;´pª;¨Ì¿x¯â(¹œß·Ê¾·õ¯Ò´JãóCi;Eëη<|CÑ÷ßAù¦CÄsi¨Ez¦”z4†Ä‚Õû™Owê¤ryC@1'‘¿}âõÂDäì%ã:Òlhë lÙ8£øsŽH±÷óQWE;¸Ðµ.´8Ðå>¢®êMlºŽwàV¬ŸU¸($þuU€ØØÅFÈÕ •8‘Oc±B]µè®“êu 9€" Yr 1dÄ‹ †¼Ä…B¿T~Px€*¥`«Â"•5}zyp)¡?Gz\•/ÐîL a\¹³+й3Ä›;B›ìÏ[à"@#…—¨ÍëƒÃ±¨¶¡ŽN¶qÑi=Bx“ÈÍy?žÂ0p´iÀk?˜ïÊ ’Í­ïWUfØ'óG‹i~ÙƒVØ«û«nZ\û]ýâþeéýþ—ÿŸ¹{K–·Ò=ÍÁ'a’à³æp& ×Êôìß¿è¡ÊØ¡Ú~̺¬³LR¥\ؼ‚Àºü—€8À/÷9öYö;ðòƒZ$ùlåüáw²IÀ•ã1ò!$@°`ໄg¯|Òv ->)…¼y}µ/DŠý©T\FËâtØq?(ŽŽž@ô*Ï hOdV¨€™Éü, -d.^±¢@hŒ#6Ç®Úö¨¾ê ýòpË°¦¹½V”-DÖƒª Ìv”WŽ³g\W“.ÝH´Èr‰TÈz‹<À¿ƒ¾Ð÷® ¹M,ÃÄï4Èc.H¥ˆ«Þù ÀƒÆSñ²_m!Cz©@—ûÛéDEÚåþiu÷¨ëº ù.–JM2¦)ïAA€­Y}ö è Q¨ü -3CvªWP-w쯟2™Öù­!„•â8êι<ÊÛ.ÊUå`JµÅö⌑ ' "8=F1©¼v¦Ö-|â©’ù‹¿ÄáÂâ(ô”éóU²n³~ GVî‹7Ù7?Ø…ÄÛªÄZ»#M»»J¬"nPöYÕߣõJ1ÀWÚCåÄól´gHÜw\Ÿîµ!"€ð~Îï?" }— ˆöŒT‡©G÷Ä PBíèåë8|™ÿÌÕ3¤EŠ“ë…ºPu¯Qæu5ñ9Ðó±<:ÕLu+j5á£Âæ©+’D…z­°_’Ô+ -Ôõt@âO`jwRÉ€!¾ÅLIGA&vØåT;à/H"ôÎ}RÐ-ÓxŒÌ+OõaXÂY˜Ä'3N­u¦C­Lâ[‰’bA›êÞg)—¯‡c] —{ìoØâIC)[Ï{P*ÙDHðm¹bP5¨ï9öïgÅ{uš¼dC :Zù©\,9{LÓ ÀßÜÉÊ+£Ï’ÃM2;Ù>”î¯=ÃO~‘—`ÓØדW @,:’§f@IöÂ#E.U -6Hõ6ûyŒ:Z24¹™6Â3„í—¿Š_ 5=5L!ïν†0®i[Ëq=»^}RF­‚ ‘WnÑôN-(®âF4B#ôÓ:Ó©F»„¡úsÁ”(PÿhH±ö£‘.VŠYn{MH’öÙïWùºEà…"r¯ ¢5Œôó9ÑABéLë-cÔi&ü&ÛË\K¬•ès×G‹å:t&€É·óÁ¬ý@|æàëHå;}ˆæ«W˜+¢y3æô’†ÔËÑ -פð`†õž¢*lðú@˜ÉçÐ -¡×g}\@ª.Mjª=ƒÐ†Mð™ØÎt„ûœzANÅô«øP²’†Ëª6½†—[N´vžS)XÁõ¤‚ÄÈm¡îŠw— BÕ8WCø‡i$£Dk÷•IÉPÅ€D¡»Ãò®3EA`28Ê.2:±Ì6µð[‰ÀÔU&àmøûÁD¬U›ha6Ä —>s1ÝÖ‡xŠë"íVâ·ÍÚ0ôƒçÙCl¯¤ ">0 pNÎUœƒž†;9ñÙ?“¤-s?B˜#Œò”8nY-!¢ED°ÆõsÛJ:¶cßšªWäáƒÑÿµÐþ›_~iûã1kÉL*mÄšD[ûJ{è­‰‡Ô©etì©d[éLÁoAK2-Я—ñᢣyï·r±tÐPÜK°â @òJ·¢•,\$Žõˆe+>z—¦©…ÎcË ¶®4íM—õ X”TXú5d}çw¸órB“…H+xë™îaŽrñú ¬«:¼ ÇÁùÝyc6)»ˆaMðA )(l÷ßñÒè‘k¢èµÞnçø<éòàUîõíëTØÚ$s®³È‘Ï3^.™A{ö+$ÖW í‚Ö“}òxß¹>Ÿçëƒ7õ÷0‡(Ê(«‹:› H”`°øfeMPü¬¾a -b9 ©ÎšwÀ­ü¸§Ú!ékš©T•täÑú/À²ƒ¶%¶°&_å~i#7†¦þ÷ˆXÐd>» NßÛmF,BW<\Ñ“.­‡ÿÁM½#úðkeÝž‰WnÑSCŸ¹[E0ééíñ»+ãbixFûÿå÷—2ÐG¿¤œ† BI/iã{(ÆÞðõt€]â» -Ô-Ãj[/£¸—ãS@ ï.6dç5AõK×›[ÓìŠN‹!#ëš°)]5Ë+wnö£©Ó­AgÀöêΈ:„ÄØåqâzŽ#\ðßÉó¬!ÑÿõÇF]ÍÆkƵ¯$ÜfüÒŽ‹¸¿rBœÂÁYҼ퀵Ä -Wc¤:Äè×ëƒÇ÷·à×Ƹë ¬@nÂnñZZ/P¥a^Ä”þû¥‘ΠPE8ä\˜Ò}VÝôpÇ3„V‡y0XBSÒ,ÖòѲj7iiT¦.Û´úW*›%t/@c¦Ô©ì¨ò: +ÏK’BÒyÎÈû2DrßãVµòÍŠ€ŽÈlJó‹œ}íf.0\œE{ÆLA¹‹0÷`i˜c­w±§fÆŠX+æL.¥±Ùƒ_|Hž¤¢uçLtLÖ[ C(ƒ°þ$ßš WŽƒO™m*äh„•^º™jéFìˆåB¹TsªnŠnî=ˆ¼M .ò'àöw ¥.9=jÚ4ê{=ЯæÄßèpvz'RáUÒÑ éæàœ’ÛµØ¡h.Ú²ìa]ê_¯ãLÃ4<%ÞÇ9p’£îë%‚+uóH ™…s ¬ ”’u{.%¼%îå!Ìu¥ÕD‰J+†ã}³`rÚV2Ca ‘c“¯ã9ÝŸ!ZÕüÔ×’% -€âAJz¾q"ØuzÖZÇÙ=4hšýÖÒ®ÀÛóÍ2æ[ï;v·åâc7…P’wPߎ37‚ÿ?Û?:È64²v¹ä]Nr20+Óh%2ÑZsWß¾«wò°qnƒñšp&Ù:nÄ*È\˜R1@ß0Ó8u¤3óŒYÀL%˜Ý_Ä…Gïý5›ˆ×ÕðIÝh¥ÅÑÙÖÚ²'ž·û”×”)åQÜDØæµ®•„î¨!Ë%(‹›ÏÍŒ^ÙŠ†.ÄvŒ"Cñ®­0’ú`ÍóNÝœndVŽCùÐ(m½)Ò7€¾ë"Ôõ9¹¬Ø"dÞòÛ@zƒy_Ùè÷7õ·ðŒùFöç?ßt橉¬Ø -FvCe„wTÐ8bÆðjgÐÑš‰ñ.9Ï8>…Æ?G6–M7ÇѸ@énwPôÁÍãÏdˆÊë„ëä*K‹r]» -ã^¬`C(’ó,=¹Îs­@ç1(f_cB$Öz­F[—:Úï]§1ò‘k[bL¬wÒBêA†¶íTŽ“|Vï±ÈºáhJm´lo—|[¨IËÙòádü\çYý°”]Úyv¼>xÆÏ:F“FC[Ò…Ð/åà¦ÝVÚÛØ ±7WÂ-<ûPM´ÈS\ê_ô¾ÿåMŒ3¬—­õË„Å—P‹jåC3v•ücÊ…^eX¸µ‘R-朮³³•‹ÍøABþl}¬efL>*»*kœ¿­Vv̼N~½ö×è¡Ô=þêóEÎ1êqGîÄõ*›Yµ´"°H ô̆åb3¼?ýÉ3aè}CÌm5R:Í „`¦4kr" -b€g²Å<èã\AžGZrE]âÌþ8Ø%Ç$îçùÜRÄ6×ä34HiA–DéÌB?KßZä>£ÊŒ¥ýåÑé¿×{b.K %šÖ¶må¼ÉaðzŠñ;¥”Ɉe£¶ËTJù9 ú`NrJð:Ê -½juLˆš¸àf34å®hà`äÛé\?áP‹¯Ç–²H¾°óK¥tþRUüùb·°²¯—ï«d¾ØBÀ5q$šøs "ûÅÑÒŠâQA-ÊhòõKãJodOìY–»U4i¼®t€Òt›…ÚS¾›hõ=Ñ×?ÿÁÉMJ@õ;*-qbeeN¸†nÞ:13¦žh­³N4qɬŒbàÜ"QÞUÙrØhDÝQ;0Hg1ä ÜùrÌ`¹‹¶sä8]›Ž«ñ]†YQ±g…4_<¾w±Ž— ÏQmuŒ…Ó0eˆÒÉ“.†‘©=a]$ÌÔ’.ØÆGÕøƒšlŠDØ3ü™A -ªŒeSê+»ˆSirýŸØÀ«6Gùr|4dM,ÀƒÅñÛÞ‚X/ƒÇ¹âW{ ŸÕ›££XXš*x>1¤d˜ a\5â`}Ö -›/‚àŽÛ@ Ùaã+ðÉ R:2‡WvÀ@Z¯ËÙ‚;î ™_:ÿЇ8eç•¢c`÷ë!ì¨~hoñ ™ØŸvÒÖB§ä/\våí±‰º„3sñ<* Á!… Iš?ú$æøeHÛtüÔ FÚCšJ+ÅjkUkO£#°1þðå ÇW ÌZ•…[j)ŽÝÞÒ:JQú@6LöRf]Ý]I•ÇƒF‡WÚƒ˜."B|þT©û¸Ï4:Fbõ‰Ãw–Q¤såõÁM¥éMD@µr’$ïϼ¾—¦xzÏ›dç.Zq9ÊjRi«DfÀW¨Ü\C˜4ÙdO.h- êkMÚžÅJÓ_ePþ[¾˜F—ù8*µ–/¸‡– øÞ†ó¨E>f@d6Ä&ˆk‡ÊMŽÐ…·‘Oåd=ÝŸS]IeÚû¹©N -%ØMgˆ‰]«hn³Ü~B£ÜÈŠ¸Ául?y~hm†@ºl"‚=zmdÌ)¥ÞÛYK ó"°5¤Íd]QlÞ”uÅ®ç‚<¿ïÞÔãˆi%9¾E8lµZSì®Ý°âבô,Oô/¸¿ë‹Üõ#> DLoÄ̞Ʃ`œbqLîÆñÉ0LaåÌÃî¥Á­|ˆUÞ½ôÁ{ó1Çyç$f·ÓÆêŽäå¸JÂDmFG—OèL²Äçº=-§~¤;ݦâƒâp9Šð14Jç|†Œ¤•Ô¾œi÷ŠQ…Ž„§³ïÛ½¥„4ˆ?¡B¬Ž´osN;ë8 •61Ùþ<œ#_ñ ÒXC´šBˆÙBwòZið½ó}S›зrƒ_ibªlë[ròü ²4?gÚxºÊ·éÍS[ûÁ«z—#O-„ä ežãoAÃÑ$Ë6+<ÜxÀ§Tí0UÒ,;Ïß`>ùåi‰í#e©µÔ„ç¿]™Ô‘xXYZbëyLÖ× -0P½Æi"aªhK­ëáÊ>€ÿzÚÅ÷6D]k ‰Ûåfƒ2~Ž’ÀÒ+%^ÏZoùþt>û3> 9ëI(T›ê’û§ê°†PþP?VWXCF¸3k'³Ö?ýS³–{T@óuªH°é³K–“‡KÏ]­~½}Bp=ÍÑ€æàúúëƒø70ÊÞ-xWµZל}_¥½œ¸Ùùg Âi}V÷üXÝdÀtbÀ9ŠêŸêg5”O¼Æ¶Ž­ØÔkV_J¡!l6e‚0‘ª.ái0G«·³fö¢°Š¶Ì¦öwW’U‡¡nuG©¥…øGÁc=Â!ãzúèñùŒ1GõàEbzN%lá[âéí[5 ì›°Ê×# 1Ìöã›@érÀ3C=ßC02†i rœõN ¤†ûçŠ1DÕÙéÖMQÈ^«‘±fÓ:¢ex@d¿>xOïØܪD¯ÚöXdÂ8ïQ~?«“Ž/Û:ħ’9”Æúì¤aŽþ¦“®|KRîÀ -N¯ŠW)fÅ2ðéëÀYÚIñò‡Ž_ >$¾§-å°ËÞ2$ïˆUäI $ˆNG©ogpýÍ6Ä‚V¿r*˜£¨KÎ&mó‚ZŠtG*Îg\(&RG][Ç$bbòJ¨m‹pß+Nj@´£™A)n“>TmföR2”¨¢C`~äÍgU±õ’ÎÔ˽qLæÆt³^ð?RP8ê¶× H4å<Ë°¹AÅûk‰±|¬ÿXß!µý) Ô[Ðí:Ht_ýõÁ‹ú; 3®+%X:x¾=Ìô&¨¤›A¤l}¸G‘Û]׳¹ý3$zÜÝ a©yg³”ÅÖ©< `+#îG~JŸê‚Ä^¹œÿŒyõc=-çJä’5gÊkKÙ# !7ä„6ªvÉK ÂÒ~áÍÄGófÑ£ -EÌ*nƒÉÆ -…é§Ø”xÏ‘\Ì~‚»‘²yZ.*J‚äðñúàñýµ²¡Nïèllêo€¥û^±Þ¤©€6Sô»3L÷!qŸ~k”ùí/¢FåFÙZ‹Ü#Ã,ž .aLÁF£"ekôÈZêåíw -º­¶wxøÆ}>DZUh íR5aŸªÿVþ]YÒ -9N*¥‰bQzöpJmfKXŒB€”oÄÜ"=r_ûù–“Ð$VB$z¸X>”WçÎÑ'g‚+ß™:Ü1ê˜HwDO…(WIÍxï[š9ו°&eè%÷[öƒ²¾jÿ¾p´¢´Òÿ†ÎA]¹üñjè_6ä^Ú v¶­ †‚nðoÑÏ6lÕ£“ظjçVµ>V­†¬hjîQ®œjŽ5ͺó¹\ü™¨æî¡ôÉ—’b ÅD`§·½ö(N¡(%¯’‘¡”FŠ=‰ÎbÛµ;OK¡Ä ÓjDü;Ò›:·®²F‹C‚ň'…ôQƒGa€³[1–}lÄJþÏa‚+‰¡Íš¼¨÷„Ò¥¹ïGŠ,Š'Ä Œs”5Ö1 -§PîÓp[ó  -K¢#E°¯¶W_˜ÈmþE&.]ãýN¤Ý{å‰(M­—›2c[úç”_+"V¹U‘4»ÇMb¶ïqf?Ä¡W,’Sµæ8v>Ô(U!²',3¤P1%ÚfT/sÝ7æ´§^2zx{äT­@ü 7®àÚãÓ¿(*pOÏ6t’ÂâÐ7H‚Kÿuå™d©z¯g¯ÿí\½NÀ0*»(ˆÑlÈ6ô%Úøþ—lCߦ‡µŃ̢Þï'K!0vŒxÅíY» ¡Ya¬wߟôbðX¿#‹óIÒû¯dÐÉŽE®5î÷  îv-·ÿ•AÀYpi}”܈6#-Õ*¢ð¡‚’ò“óm÷ÈõÉE·QºAN¯2ÔN’ùÏœjnÁJ±Ž‚ó -b6_%‹Ê!:‘w•|SA(î)óT*?茵lh"…ÙS0oµ‡ô„7UÞ†s9ÜFØuoyÈk]:9-£uSÈ‹;@çúО-Ô§¼ßÏo P{ÆŸÕVK…$Wô˜—uåŽ0fNH‹°XÂ6õÊ€Ž¦MúQa¡V—ÔÖ{g\þôNlÒÙãQöéTBŸX0µX§‡ë$Ô Ë¸Û=#Ö¢ÕV—{Æþ£â³nè-˳ÞDTƒÛTN:âQ×¥Áw.dJvoÁdž *jºŽiåö/Å‚ï ªRƉPcÙHqEùºõíDvþ#ê±>¿í,õ¥g ÂëeŸº_{š5d…GÉ Ö†RÓŠuhO¬ÿ·¿‡ ¡Ö¢•š ƒD‚•ÇÞŸAéÛÀý¯¡Ô}‡Wz9†Wsª$$=ôízFL ,["Sƒhù´mXo-b£ÖÞ+†¾á«²]’¹ßChÓ¼DRÈÍÓôÏó7CÜ”/Yn9w7}+eA¶[]‘K§êJJ7x„lAxj 8FWžÁ]#÷€=ÏÎ62Hìϵ‡R©C“yMb¾Tö* åÞóÁ‹|0!þÂ+NeÊ\†]½®²o(ØW,¤<8ÄØUv÷ßÅ$ã< þÞm±_ãö3å˽à•ôôI;Λ¥¹ëí“‘=M›ÈëakéZ0…òù#Æ*Rãuªî)2¼¥Ê©<ÔµÅÄz U_S”@Êûƒ¹c\Ú¡Ð{JO%CÁmWÝ”Å\ä 5çLGôÊRÚ3bþ]¼Ð^<½w°rPÑN©µíUxÚØì:JO«EUie•öÒ±V -qkKÖñTLy}µíùí>ñ›Jæñ©¬Ò‚TªHx›DTìORCbpû à,׌÷¸l!­tW AX^W¶Û´—C1· ߆œ¤ë6V #ºÏâiÛ ¡‰uM©úa)¯‹Ã×Õ“k‘9ìÝ#m±Ò¨5„ VîL=¶ÛI0ì‡5[ Á@¼‡FêªLq!R¼r¦‰­¢7ÊÄM~oªuGâ}Nbqë>êáŒÚîïÌ™èp\>Ûó|ð|à´ý¥~äqFQY­×BxOD‰TrfD¥Š_+ p&Ūb€gM¶^ÞñÁ@e{_ „‰ÎBë£*Á)Ù˜ùàäÔ +ŸÚ‚éÙH f‹j´å“a0®5ÿóëƒIñ·(ïð $V¹„¨,Ѷ§Ù"´¸cÐøÕì—ÍÓt¨1bï¡#»Òˆ‹ô2Pê/ã9 -Çš@d% WÙ3oŸô“2‚DŠÌþº#ãRÙ³Žäô¾ŸAÐÝ·‘êÞ#¥²«Õ¸pë)4ØzŸÒ¸›Òì¿-‡_åÀÃý ‚X1 ÆGAÄBN,éÔo=w‰}Ÿž8Æ JHJ:s¯B½û ³l-ÔQc8Ï\nÃÉq« -è:\ñúò×á?WÇ—ý|ºd¿pb„^¯ï_Ô[Æ$,’Y¦ÊgjR ­— ‹Y ít܃l NI”ù*³Ìà‡[‰ÿ - óüñaŸÙŠ/(óƒfï|®7Tj†ñ¤ôÒD N…†Có”¾i:;sZúÄ{ ‰ð1Èå -z×ÉqqÓÙJÄÁˆ ˆj¤O¨™g tµ7Šä$€ÂŽHýÊtØʱ¾íÈXÈö˜éÜ@ÇÜT=înÉ$9p`…§<>¸ü¤5 Úž_FÖ -–!ÔîóßžíñÛ_®¯T×KxÊ A5+ì˜õ©@œF)Š"ß«Bh Ÿà5ç^Lj°.ù çå-SãÚK»e -[h}°{”¢t<Î}ü4T#w¸VŸYü³NeÀ”(Hž¢•!߬uõ,N¦à>MN%å¯âEpSï¹}Q¨Ûöþ8öYK_áЛ‹3bnÒü+U‰“BŽWMéä÷èù/ 믞]Kwfó¡Í*^ —\›53)HצAŠ¾±)9”†Kô¸×2äVÛ‚çÌ$6„æ%ÅöŽ¼2ÕÃ1ì[5¤ãƒÌó*$Vï'Z/ý(—nм¤ê¯½ç’‚…‹A"ÒÊÍjvJªd 4Š*©5®ã™á¯{8KóÊ µçc@}»+2¶ÙèÐó=Ç¡·@8º¥gob7 óúøÜ8záŠçÖ{ÝÛDlWŽÂÖœñtŽ´hÈV'æCjÃn‡‘cx=£¾ªý¨»"ÎÊ>°£ÞÕß!;ÀÄ ^[ÇI¥3Ѹ>Ö d™¿õº…BæødcDõ§‚»u‚”E<ë!QöʨªaØœ!‰¾Y\Á»­Ჺ†ðWÓ†¥JwïQ»ÀÃ^‹Y8Oö ƒ@·Y•uü¹ó"ØAÀécÜÏ©tÖox­½òonê'†ÑH°ß[©G:Û>ú•Q!ç^rW¶æ+ü¡=Eðí‹XÊo~ùKØÓ?Oͤ(“LæÑ'G¡ï®õÄcÖ:Kt·£R¡ËËáeíÿ9[²Ñª,_ |N’{3:ô}Ž£'.ˆ ̉•8ÄBÙŸã¬Ü鈣‘"„Ô±lýË€ìŽû³æû9ÑUN¨d)R5Z”Lk>­B\ÿ"üvÔ]Vrx7+lþ83̪&@9to (ÙèWe]´ä ¾°èªumÃW+¼¢ óŸ\ Y²¯Të¿üÀ–˜‚vªnñ³V›¨­Í K‹ð.ôª€ur_XWë?^ÇLDA|À¶w?•¶={Üé3$ËÖ ´¼=ÕÔDwWè*wÐ3®V'ÍÞ@Ÿ "£¹¸ˆ÷á¬ñtÏÐÿúAØð Þ·ö»Ðé"'Óâ:<ã_°_‘eÍ €7u˜“Ÿì‚P¡7í…µŠ9UDˆ“³žÍºƒ#Î(zb<¾¿ð»´L±4Gá(¶=ðÌeÑ?kûF%屘šÒ4cüež¯,[K•P.œâȬƒÜvð¶ÛL™œ]ÖÓèçíÏÎÜШàH·$Îñ\qk·viËötƒ¥ âów@å¨kOâñœu51Aàï<܉¨oàÒÙÁŽrÒA"Ü0ÃqÛs¦FÖž°ã`Öl)îˆQGÙƒK̺\T(\çëƒÇ÷®ÿªwÄ”>‹½´û‡u—Û˜ÕĈá²`ý-Û±þ¥Òþý/´‚$p»æÐ3°=¯˜ºæh”—™ÎkúíÝ\Á+ò, óÁõXQì|Ž#6Wé@,~]gœ½}°*ogïõW1‹V™…håy“˜c¤»NÐ~ê®DPóü•‹!ß½fNiÅDó~=Ðë .|½ÜtŸ>æ–& ýýxTðO9²Yf$ËSœçL늽=J‚y6hdÈÏgH‘ÑìçëƒÇ÷÷€-{ªG$6ó#MéÇCkLò›Ó•Ã[„¢¯`ç¡Án%' -x¬3AoeÈ)sÁ8å”%Y ¡(µEº 0W®B±¬ãØ„ÏB²¥Æð@T5ÃJ£Ôμ2ÅY~$êåé™\HÏßßÔ#6í:á½MÉvN`׺5Žób²”ýTš|¦£p(àRÖ5æ/iúñÑ/b¢µÞ˜à¢·´áJl‰ôç­ïz’ŸšöCrÄ÷ ¢ÕÞç1“½ýÎØÙbnP‘Ú’Œ­õÍÚgä©KÈfí“xª„™­ZS7¥A–Yaq©Àr½~K`ù‚<ùõëÖ´ã±í¦*x=B‚'Þõ·€ÓÇIe{¤‘Èœ:¶@®gµÓzYS’묀òZ~Äk¬å=ðÓ‘2æ+– -ÿ­Õ>ú%}s•5X‚ŸñŒRËÑsjdó Æšƒi;c;qG”dO†'Œ}­½‚-}ê;åOž!Ñn†shk„@ƒTïÚ2bL‘ABM ‡J¥!Ag GG¹ˆ–­ÅÏ‹zF†@ãh:Ïû}½M<}q£ ?4ƒnb,=Ðʺq²óȳ2Dð<<‹Ð×nÚ1Ī¹»«#øHºÛAÜFɆNøZ¶`<¶bî¢ò:LHg v:‰%ÖY*g+غöMŽ¡!÷úà=ý-¢³[f`k€L…Ÿ€=Š³S¸:+:c¯q7T÷Ç|Mw£8IVCfÉ€ƒ‘c›œžy×½@¼f4¸Q†&}uF$ѳ:U¥”ò`Ugö²¡<BéÍð@7¾#idÅ@q7KîéIÆÌd;ˆ| éð$·aÈIÇ8¢ßgÕþ -• äõ[µoœ5…db2 ‘[ôÚ¦¾ ìt‹=Ò -Îà­O]ÊÆöt‰ñO9È–¹'¬ÑÚ8jˆz°6(Ùú5DÔ+Y×Æ¡U Ÿl0¹ŸJ>‡„“nM4 Z¯W †ì+ƒªËã¨I@ØVª¶½Â·uSëŸÖÇ#– ¼p3Ô4ï,áÎÄ -Þ”_Ñ@‹  5; Q܉sÝû¬‹QuOô¬yr¶xDvˆ.¸û¾ì5*­_c.–ëšÏåÚ US¢þœ(8‡£×&½»Dnl{í¥¼¨wñ™†^ŠmU69@eåس 3ý=œÔ0ŽW87õïÔúòËö—ŠHûͧë—_™ƒxùPfK¾£ªu$£…±.¸©Ò‘ SêjgJ@q±&0Rд+­¡2}Ä6âqßÞÔßÃßdmMå‡C/¥?bðcÉrcÍÓ[:IGœ§RTaJZ›nÆ1ÝföªqØÓÈ&j[öÇ »¯(™îˆˆH=z%(-a𫸧÷fEIú÷f¹ÇÔéxkå3b¥ºwþ{³\A²mºeûFô—^í÷¿ï¬}®-e×àZ÷Z¡8X°êùË»äÝ)õ¶Š„Çk=MúPIhÁG QO¤ hŸ]C`ùVh°·gÕ‘å²N+k?£ |ã^ÉOcº§—­u½v¾É•´U%µôÈÄdž·¯Àœúà¦þšè¡€…­‚‹F/æ{ùàáÂyÕ×ÄH§O-Îà©xY»ðß º¢÷[¹†x„YŒGÈéQ’ôݽyj»ØZ€Qz”!Ü®@ÛŽ÷aÒ•>ƒ€}ÛˆîV8´¾ù&Í­×qJÔ¨­üˆªdÖþû!ߘK½J<9Ó™€<õÔ˜ d‘{å‚Mò%”/Òa£‡^°¿>x~oZ‘¾(ûÄ}»«¦0¶žéqÍ·þM4Öý(róÂPT–Ò’ûK}çúè—t×…¼8`)é5 Ø/îÕ]Û¹–¤9§„cСÍtñ׉'Ÿ!±rF¼®¨ n2 ~о5äƈ§æ—cYm¤zÕYËßéÄ^ÏQø%©Þ¬¨áxÙùÐ^züZ¢ÇCßžW²(R ôÁ=ý-̲T¡0‚×u‡ꮸ&5ï¨ÅDPž<,ÁÂP¶Ör¶Vô‚a yç`à öìýD·aã«· ¤¢«‹+\ÙëNg›è5œì8pæ¡cJœ|++¬=橾f-©WìFè(º€W iwŽ$>¸«7Û™yŒæKŒÙ^A´5Å"•r¯>Ô{”IÃœŽy††ì«„¡¿2ê¾ù!9°dY{<­Räï»Ò¼•ºÓÉw­Èªè‰ÝäTÊÿl”åÀ”;¤Oâd!‘Ϙ+ òp+ÎŒ +&Ññ ‰.ª^]óæP:¹ Ê \ÎØ`ݪ₋‰×FŠb6íê i |"–úýM½[½€ÕÔ!‰;%nf`ªêwD¡*‘ô†IŸîx1uÙ¼¯Ø²ñéUnø"¥¶}™÷¿æ5Z½)ð1F™\é¾#¥:ŹX«wÍFQÏM#ƒfà–†•Ñwl:IÕ–;¢™CʬÍB•!zùRÞ€°v³é¼VrÕ(X§Ò÷ÙH"¶±AS‚çÒâLië)KTZ ñ“ƒ -¶î냛úYÌ—¿nt$ªî ‡*ZËߣ5&WÐJŒ@·Ù%ƒNúK}Á/_$Ï~‹WB£8¼¦Hª6;O&H^éŒ^Pfª…N?#ÖÚ/a*Ûfp…kˆó€F»»× ÚÓq¥jCˆqá¶òFšñFI^øP<¾òú\9?Ÿ=ÐPu¥ç8=›£&ÞzbÜÔߢü3‰àžñr±èß^Šh„à†wq"6[ŒlGX1#~ÆqŠç3â½VÄ¢”ZC.‰ÇMù„5fñÝCèØõ§vµA©ÅÙP × =¾òËä8Æ`xI+¢âpÜÒ±cmÈ9¢ýN'GÞæ-ÊXãq¬Æ¶,}3ƒÖòÈÍyzéìà^‰`èuÅæôŠÂ0®Î×ÏïïT}¨Ä6ºeZMº=xÀ«Œhy±Í›“¸ ÄKõQ½BÑwø•ÿ=ÙÏ5À[ êd­+%ÅM;H*¾ÎA4z¶Ò#¹vå8 4a[ ßJŠûŽÉ#ßጰøÐ>^u”è·”Y‰Ýþ­6#‰É!í®ã¬7,’¼.ÐB…uÿQÏ&¦©ëñhŒg”½¢Ž‰ß¶¿ehUz¶ˆämíõ¯uO€iiúõ”7þˆ[’rƒb¯Gƒ©z™2<Î5¢4”Âò§šßg­û ->E÷•ŒÑÐ;6)*FCúµwBÕ’×o?ïÒ¤èã/ Ò~‰N%#ŸµþÙŠO ¸¬¨ØwÈ”Hââ!–=#­ !l:× e×ÃmODiÈ 7õÈ5Ä\9ÖÝ®‡}l†MDs±K©r-Z`+‚(#¨ÊN \³ ×ÜÕôÜC˜œW ¹¢š=á:Õ·wõŽWÄIrÞû¨ú©v~à\—Âuq"ÿ=ãÜ£t¹OddÛë·@´¯d±¯«öéE 1²¨•íÙë‰A”S˜§¿­üÐ[@ewCˆ¤RgcÖIçp‹{!H”š9I{æEJìT­t‹TïÍ·uƺþ -zu=ýQm8”ëL6Öóßé·ÅïæE 7y9¿º`.¸ê “ð²rR «¶“SS5C‚}ì$å® -Eñ(Ý–XW¼íðŵÞRmÐE[¯Ã'ñÜ7ä–Úv>¡žßÛ- -îAÝã§Õ,P©Hé' \H¦ìˆÃUYƒˆóŒ#Ž%_ ßÿ’bzgâKˆp}ýJ3|å/Îfâ¾RëC–Å®˜ô\Õ›ÎKçE7±§!rlS[:¼B  ? f-ìÐÁk÷P·ˆØ}y4ÆKBñÊ`Túx÷cõѱÜdÍŽ=¸54)b²«Z‰@/6r>³‹|`“¬˜aOÓ÷ÚëLZçW@UTU,C¢¦u°[w9☱<[j-óÁZ±Lzåã¥%Nç hÇ9¢©Äw­Ê8}B÷©o §¢ÔÙôã,IDÈ‘®þ˜˜ÎºC“ô¢Ó¡í\¡ÒB¯Äô&fþP¡1^aZ«·•AòÔgQ¡o<”²ßCÁ–Ê{8ëpCkÉdÙ¨=Cî; éºs*rM¶zÇTmj°l¸aÌÖÂ2"„/á:#àhÚ4äç3TûÅúVƒá–²Øš~ q¡Åá·ƒÖ'ï‘®)«+ƒü»Ö¨,æ<Êë= 2ç YëQô –Iø×Uµ—‡.oåÔ4cB´UqqêˆcÕ”#³üZQ+ÖN0Ž¨ð:mΤ½ÒDV')”3^:’¸9#„œ÷¿ÿÔ µÔnñÛbÿ²ÒlÞ4M:°¤!`Uä@o:6kTÙH  ±Ýñnd¾£k”Y³†¨M¡+éSåTëƒÁ€>Ë„[w ƒ¾G6k«Gcöò?«B}ŽCó±±·¾ò|ѤK<=H¦YÎ8 -íëï…'ÞÙj9&¡Óz8ô”ÓåÕxßàÔ`µº ÙJ÷XÝ—PÒ+ÒP ý9Šw{p?UðœéëÞW¹Ï Â(WnåFã:X-‰Q’úü{’åöû_,œíÕ}djù7ßÞA©Tê*¡O¼KŒœÀ®¨nR­‰KËšöÄX»€ ÓZã—lH¼{DXÖ´£Ég ×fQ´GuǦ'²®ð‡Ì|%°‰Y B T‚·«qð1PÛ¼†PO¡Ãlï-;¯þôýL(}„XV§`œÒAÖñm&Síql‚®—€#C’Û™rÔáüLhº>œN˹]e’x`4;ÕæËð"UQ®4ã#-j^î¦! Êßh`—»~-fÑiKÇ‚r3N½íÊ}­à-ˆEÞ5¦­M§E)¤‡)\wÌ^®ºÆxwkI‹—!…N¾Ÿg£"¹Ökx¶sé]W¼ŸÒ—‰ÎÕšx­8iw‹® ûÉñDíœ+Z1Ó¼âGG,뙲 ÙÇìJcåÿ·¦Q–ù3ÕxCNú÷ÚVF¢ø8î…®¥]¯3ÀOù(øûÚUàRoð¹nÕ%÷²‘¿¨¿ÖføC5ð`åêM «]&OKG.±+½½k!f) -=_¼éþ¼p’÷„Š¦Ãìco Ü|>ï@*?wˆæ³#«C/oθY‰Â…Ï*dØu?”öPŽîçKPPUCþ~}ðñÖ³©¢ ¬g=Ø`ï/.jßÿ¢ -l6R®¼ (QÇù>Õ©¨”Çü¼*=ÀÛª¿"ñì²}ÏQŽóg¶XÆ£6µ0”Vì.{Mõ0NN³îj·Zï·;Jâ,Cy—éɱ¬%ûcÐXCSã@óo÷KÒ;Qí°++œ~’¨ö‹ L3­+=â[ê(i©D‹Y|ãûKzûíÇoI¢jƒ¡ÀÍ6&ÎŒ˜`·Êõ$ªkÐŽ³¥Ô–Ø{½Æh]ÇuËÒhŧYw Ù¹ïòRЉëÉ[è •Ä¤Ï ©OWR7tƒ;& ‹ u«VPïÑ+â'™0”жÈ&†ñ bø‚¦§ƒ}žoûêSt¢ŸÖ™N÷ÝÉ´|[ ŠñÕ C#Š¢Œ/‘ %”Õn'„¹!hsƒtOÅ›gL•KIJ·Ë»1¼ãnÅYǵ¯L]öb>ôØ!¬Ã\&…ëµF´ðqîK'½!¡i‚SÙ³3Ñ]ž”¿(P]WÀ Š£='Š ¡–÷-ó(YR}†èOs7XëðUCî4À•¶âN[»IþÇ Wë ª—¬ºÂ„TÅwôy•#¨LêᲕÿprZ‹¶@ÀM ±;9ÉbeZC1 E0è¬rXÌÙÎÐX°ž^R¤§áŠ ÀQ³8LŽbÓAê)飂¾¦v;Âp9þ™²ÍM걉‹Œk*³m»’G1 ¹âHPßÂû*1IüßÔóí¯gÓXM2 -)Ó•IæS£:i73î±´¥¦»‚>¢~Ü —Ø¿ˆ |ÿK¢¦=‘7J‘w]1ßúÌj¥ÞþN¹¾NR -èhÅC›Ûµ^FÒwkTxŽˆÎ_ŸÍ.ú¤Z¤ŸE‹ÿ!Õx¢“¸y瓲E‰ |<‰T¢ ³lØÌŽÑaE<,ñ òá¯Ft63„¨É(ÚUí‘1ϸ£ö{öPrví—•dĈêÎqz̼íÄpýPoÐ̲h·l£n/ã¬À‰ŒðI_»dVŒAé;…d “w ÊŒ:$Jä=Z´ØgÄÙ#ïÇ_Ò[[üzíüžv¯†ÚS3#Mqêèl×s*ìd³jVlªHo³…ý^Ç{E"€{„õê8Ïßã -¥U±¥ÒϨÉ5";¬xàܯ'pòÛñŽ8:¦žÜr3*ž ÑxÚ -L¯tË`Œ¤9›j $ø -!ÇÉP¢`ü|éÅg€œù|Šjì²L¾yÔa4ÁÖmUT‹ø´Ê—¦I«È+€žè9Ë×›4˜o<'¢uQLûµl¢`œ©"`3çZìÄ[’™Ì,+pê05‚¥‹wTEÔÌ=Ñ=hú÷-Öž6„M©(sš‰Ùrœ5¹ägùÑøèŠ$âÔjHÔÙ0«ï¤*;/Æ&&Ûý¥×gAE>¶ŒOŠI½¦IÛ<ÏÎjfŠž•¨¦ª¥´³îêŒùÕÐÄCɉïàF©ZDM‹µÓ¬ç·G£œ¾3˜ºdGÁÔ[@E75Þ-_sÎÀ°³ÙI‹$^ÖŒhÏ|æ^UxiL&ûnÉpì;ÉÄ$W¬âˆLÆ©ã8Ú­ô(Ëï°hA¢3û’³tܸ¯õ§øC¥¾¹–Ð>Á5±Ä•†ÖÊÆW Í=³˜tÔšIáâB¼®E#šdç¬!˜}ƒ.Ô“1ú.c\½ÅîìÜÎ`× î¶rFËRÒm‘ßòº£¨fD*Û\)]ˆ˜>£š.Ú¨ÚOV"P»ÆÐûÙÁ_îY‹,¨šo¾^åu{$ï’ˆ|»u¼YÒ‰‹ŽØ±—ìÃ=£Í/ç­Óa«UCâ1TÜÚ[¼?B‘çÝ¿(¸|ÿËÇ]˜“»¾È­X[5ý츠} -/Zú¡‹ŸOyõŠ:5œÂJÌd‚V7dÍ'ßž'l–J.$·ÌˆØU²Ày¦ °ŽMß_ÐÑjLÖý¬dNÊ\êZkVöìÈoLÈöwUù$»„³»gæ*‹kɤ÷veÐúE«C#‰iIRT_š‡6Ë{{·þ:J þ`Ù)ÍÝø0ñ¸­8´Êé4ž6žµáîPÊ—$àÀ8H5])‘BÏõ*²m6é¸3]UÛq0Á2-d¾e¬ã±vá™sGY§Å7\5>ft÷í ¯ˆøßÁ(nø¸ŽQkLPjéKo°ÊAg£‚Ä. ’´+ÿ<ƒC£•SJÍ æAš¥å}ŸqßÔ†Ó#PB»´î¤’ãÿ¢â® ÇsáT¿ï)Œ?³ ;úõ—-l\¢ÖôKÃ÷ü‚¡ûþ_ÉóHHùšóHÖWѹ `ÀV<çõŠîĪÄ!gÄËÕ½¿y‹"Öpb‰Ì99^C9Öìf‘¶§´2!÷€4‰êzðºEÔw葧„„û÷©f9–µ‘êq /½ã®€; -bùŠ$ÕkñW¹ÐUÈW‡£¬)õõ©¾©C¨s+Eï‘6ºcÝÕcÜ•õIÜ⾜ˆéf¦ÚÏ€`r&:”§9 -ó¤ò«–¥{Ë·•[’ ž$Uá>xQÏ„jħ4ñ¡ 3¡$Ùú|jfAø}ý%ć‰m)vÏbýW¢ÿG¿„d<Ê -™àM"g}bL­®a¶‡æoŒ6±O@;¨_®Uê¢-½ÇY 車 wróP0¼³>k}c€Q×9lʽBÈ°S4½V¯Ö-^ÁLÅ c7"9Ì/V˜¬ø­ajD+¦b?&Iwº{ç›Î<îY†³!H`Ö<§3¤åT3åå-øÂ'ø;Yi“(=à ¾ðõÙÛ¦ÿ»Ç×€23άO»GŒ_˜ðSGoä:X?ÿøQ©¦ÔšUmˆ¯´J6ï[CÖ+šKçþ³Žó¥m{Þ?ûù »²~–Ô±-ê,6êEñmUtœ¨÷½Òá5¯É}áM;#ž‰«>ƒ¹D‹"¸'×µ¿õïcF#†Ù‘˜ÚR0‹dS–¡[h›¿ú•Îüý/¨øîCþ³Æ˜•zÙÑÕ¶¬ étE+9­Õ©Ù¼Ô´c¹jéáœA*] :«Š·Ë·pÖ¸€]êPÎßkýˆçθj>n¶ðô\Jpy^†Üi¶KTRcá Ûj,²KëF£ ¶áD®!—%ºôäžS¡é™1åZrë…Ä¡’¡KIµííÞé^ªèW—yîH-LûgÒ32„! x@ù ¥¦¦°VÎò2¨ã°%ò–Ÿ‡s–ÛÑŠŸöŸÅ°‡‹S)ñ³5¬Ø²à½år([Q ÅÆQv³À–/pë÷®¯ïŠÚyNõýâþ70\‹`õe˜ä(:¦%¾bÔ|«â¥?#M˜Éõ|/U”ý M1!æj}y„ÛÔ·îZèè8C¨Œã½Ê¤rû¿WBHbOžŸ ¬>žç”ÿ6KS¿Ò'x¸¸?ï2Žw¥åsKá>j%´Öc3ŸQñ³XÚ­™ÞÄRó†žH@ÌD³à† ríw†HZ‰n±C•Ì¿ƒ N—U&}†£2ò˜GŠqŠc9ŽbrUuª~u$Š½Xݹ'öÅ€‹ÇS(³7{ ÛáùÞDÎ8¼¬e{N„&¹Èh[ºê¨< -úOçÀþ¤;É*»¢ mì³ZÝb±f# öl€<¬Œ{¡OH–Ñ›kA Ñ¡p‹gIç?ØuímTo8C®HðnåèœwÀ´k}$>¿çLkJ…is¶BÜÚáøÐu&jÛô6ÎQ}ì;jôfôSü§»Œ†ÄüÄ•´7Ao -6%~’š|ÙøÅÇCÅôV$Y&A†`À'–+‹MIÿ$³s…Bö{¢œ3âÏFÃÊØVx(qp£XýðB/…Luô¦½'Ay  dúí‘ÏšQ‹‘ͤâ…}UµDô&ÈR̾ÆsýŸ@t¦/>âz-Ð^àl*NowrQêMšóª»þfNÄ¿H7²%Iy™@Ól(Z÷ŒzzÐÐ4€$‚ÍþÔ,ÖŽ+šÏˆõ[|Î!fX:Ðs#‡¾?­X+ÊÑùycÍ€cb®Ñ£ªË ¤to‘”~a®8ŠC8Çïþîú®õ$’ºú£|l·tµ;@ÝF˜‹N:•†}›1•ŽA´äÎ5w!¸s*aÙÞ½U×Wÿ[ªLôNäñýãû7¯ -x€-íþˆ{7t#…lÁ‘Ð}J§.Ô=»¸„¿ÊTýEo.êiû_¨© Sš_aäZÂB)!íõ±%+ÿgm¤0wŒJfÂ*ê¸"øõÅ0b"¨q$&mÅz“Ít¦ö(˜ÀªZ^ÀËÕ•Ó¥ïQÁoGT°%!z@E/öš,UHåqð–Áb;é^†† w÷‘6\C€YðF÷w\ h¯žö~6–é1†1Cu]×#Þ´QŸY=õñE’bá]Ô‡§¦“žåä]¸¶¹æ_{qð ›Jô³ÚgЈ²™Òi º¤‘û(maCð„¡­Ø‡1 “'„Ä'Ñm¡ãŠÁs{S C89Ãœ¨!³wíý>5 Õ²µ˜ÿŸ Q°ƒ§ã”þ!À]À”Ù@M·|ÒëýÔ˺Á\èvÃiQ+ cºçóÖó]÷(0Ê -xäD[¼­êM¶×6Ý9lcÒKÃY(!ÃN¶p1áZpŸ$fÈÜ«![,´â—–N•ì Mt’2(Î`äK¯,3#^²ˆØú99ùúe}¯3C nbKßû[½Ý—«Î¤§@‘eF¢¼fÅ&Ii±ìòlÖ–c7Ñ„vÔ ¿!eA]asœjM˜ž.< PR¡¶Ö«3."nꨜ{å›YŽãè¶'²A#Þ%Øm¯¦¹!TrùYØ¡ƒ÷]ÿp*ž@ÿ×÷{Ók5¹+šÆBkæÄ-®3Dª(GXasÅ:ÔQ¸ŒZ*¸ÊŠïg¥1jê k@ÄEþöœ'ºY|0ÖÖ Ýz¨›µX}z3áÆ -üz,RH-®–&ך½xËåÞY8t˜šo -~M#âÆ¥\+\ì… ƈV¿Z>p}ÍH_ØhÖÕ¯–ú'RËËQ7⇒3D=ô^¢Bû³l­ïMAî¶š× ¯ô*àÙ,œPÍÈgÈqÇ#,F=¯ãwy°•:65gC»á÷j‡8êÉ“—ý²RÝÀ‹g…P_äоÿåÌd¯Ö¨OñvÍjAðq«ãÙ 0ÓâƱæTü2n@¡u†0Ð㎥¬ÔÓÁ;âá¡ßÁz -}r«8ÆuzC@ ¬_õð›.·YËüZŒ»Rýµ=u²gÀJ8éõï;Q«!§oÐ W¯ÇUCMÐ~?׫4C1ž „S l"•y;]§Z³t§+a'w¾>;i#gH¿¼øæ‚5h' - ³‚ha;Ãp¢Ïj•s*­ÔHÄs˜“% lFü^NrëQ2kîuS¨Æ†[îåL@Þë4Àªw ±ª3á}J¤IúЉ¦öéLÜ6áöhÆàN S`µ4ù¨œ1 2áVlÓ² -um@^lÚûð4ÌÀE&äÌÜw`S¼¤ìŽ“eö/…—3xGý2dï2Í|»…é6 +­ö®DKÝÜ1‡Ý¬Q(æôzØ·óü]6³Ý­×®Â3ËAþÌCÙe„Zè8gžy+u3ÙR4ݼj²¾~kcñUfÿö*sF®X+ŨêÊ„ Õ %:m™@ò6½±=eL’#bI@÷Y‘Ö2£cvë±dH»ðÍ癄²?’y,fOÏÔŸöóÄf=Ë,/¦dD¡z%h>wçìÈ2{¸iÔç#ln#§}¤ë‹NlLjwo졇W—f1†ž)²VÈþ »÷8$³ XRá|zKO|¯Í…øŸ›] -e¤¥‘´iµŽ½BSÛ¦hIÇBj¸)¦@J±6”£v21ð ‹7³}ßÜ“¬ß ! -F€OëɉA—8w‹ÏŽ€hÔqâwX -ÏÏ[îz70gwmšéY‡ sVbÓ­ýäMÎ!Ù6á}[îÕ7‘±Ï4Uëêᜉ–çZ'Ò…À×æ×àÍL¸ƒ[žää’{oC7ƒ5´RÍŠ•“Ðsœ‘}›š£ôÕ«ÒjñzZ8xkkÙ}& ŽˆÒÀH.» nFŠô­zÌUø²£}T«GeÉ« X3S=n¯¬æªg“HUãqTíHë“÷rµD 2Ýpˆ»YY²ÉN…·£²¾AfbCí3ÇAÏ»£¸[)â“v‹äC}ãÓNÜ -&W´åiPlŽÃ³é·±&VºŒìw'< J 7«ÏÌA@î"½¹_ù$¿3kÿïRÔ%¯¨{6¦ÚꨚF§ë©ý™ú!~ä†Î’·r¦µAØÂã>L¢œ8ž^Oñu}eWåg*é^û”eÍàã)¤A§“¬¡Ûô¯`vãGÓcIП’œ€IâŒx"¼—´ƒ¼3Z‘;©Æ¥üÊ3Ð`F"U]}jѣʯ;™Á°„ì‹gÝTµV(6})Éw( œü2ÿ%$ǪCU0§Jª±&s S‡\k²¢U(Ñ.¢áÏ8Swîß±a/›H7ÚXÂn­Ä¨ F]ÌzÄ-œ,~˜É6bñG-¥{¼Ë¢K†3rÏ ÆB©Ð*è®êû,z¸­†ìG¤8”PÖ¾[¬àŽëÁÙÄ3›dxp*ihØÈÁ3¥ËÁhÇMgmý6“øeT§pXÅø†à#lEB · RYð - à);s”²ãâîT3M)6S[ç1P :üØ«d¼Q»@€oã©]±Gšm~\U.…ãÁˆ¥PŽížº•Æ†Ñ¨¨¶Y˜ÖÒ!g Ùœõ¦<‚5…×'œÐ®§dÊoæ -òáŸÏ7Ïšƒýsœ3þhÄB®°çZ«îy÷ñ>Õ¨qGwm¤»M¼Ò}\Ï]Á]®œTr壒²wÛÏT¯oœ|$l+繪•oQ: -¿†H;Ë$VŠ'V -„ ¯ª]â KíŸJØTµgh¥AÿÐ3NU›}ÇŒþ@~Ù¿6í(~“ã·SêÏ bg8÷V3ó(öXqž™™t´à¥©8>ó[íŠ]J´AEúŒ(`?6Z" gú3£ÖÆ´'0ŒÃJÄ{tâ€þgî¨íc>ÜŠ®O¥&!ÛŽ7„æ‘j%ìTpæeôUèꃦKG´Þ!¿ú£’nb)Ô¾˜û†RîzãX^w´sABÖPwSdgÕ³Í_5\îÙVa~$“Útñˆ@öºEùµyÚªêO"ѳ$†P‰Ó?tPÚ¡¥×®À´†èÒ;ŠZQåœ5µåæû»F…©Ø: f<ÚÆŒ²¯ç'ôÖÎìã9Ó3ÍËiÌUZ½ÎÙé†E·õÌó!زŽDÁ¿h}Å<8?[ÑþVR€¨ðëáë¶Ìˆj$sÙøCJˆs*¯ýA«èNõ= üVL‹´§®X¥>h•#–rbËñ!Z˜¦ÅÚYÅ€ë!RÝ£êØU¤HéRÆS†{egTAOgkÿ9.¡±êß3ëÅÉã)'cj®uïŽ^hÑÚÏ9¶ùn1Öi¸åTD(WÂ{fxC2Dθl> ®5 î”à¶ÀU é5j™±ž4aâP”zy6¥UFDmÛbøW°×ºàµ¤²qš^ª~¤i½N`° °b™ Ážg¾ˆ‰»æâúJöÈû‡t˜&ó¾}‘«ù~F¼—õÁ ñ'‹ -OKEâ°•†Ú(·Í5Êé¼I9ádLàüÑ8þoTmÿ -@N^¬¡Fôûæz0À‘¸Ö¤V’ëKC_[d“ÞFN ó«1 -ÿMü_9Ò–š—œÜŽ°x‹:>Õ]IlEXcUУdB±–~UÈOj~­ |áãS]‹¾kº’þ©@¦ØPº| -¢›ÊœÏÒ2ó µ)·gH‹Nó…êq»='9”Tø¡JáM}Wcb[!¨ØÅ€—Гm™"g•Éîè T± EQ)‰±" «.8’£Îä( yŠÿƒç÷‰ïXW\«BJ= Pè¶L½äDÄ"Ke ˜®Òl, -IKeͲÓÿjëøÑ/ÿWM/7¥ '4”¥íÓçÓòÒþ’3Uim‹k«#ÝÞù?48˜K“b~°JPÜ<Óέfîh\/ -nÄ'“Dé–TC]wè*¨€­?0 (×ýº°Ç¯0-åê•R ËŒ7l;Ÿ¤¨‘K¾£íe¯r:ÿÈh‹ŠÚÇç½”ÿ¿Î ¿$)à¡ðG‹e×F=ùL1èÏЀ+ébæŠD´ *§¯m†jlÚr·ÇŸÀ Ú•ZHEѾ£ÆQµLo:'@†«‹ê'õ -Cnª¨7)¹ë÷u\³ŠÂîOÜIGpìRí‰û; ~39IªœNµòø›Šô¹¶ ¬0 ´%é@0|‘Ä–|ˆ÷;ûÊ0³NGMŠ¥éž¥× µn‘+S 5„W[—ÒÍX·j¸Ó»Õˆt.DùÊb„_úöœ ù­«ÀÇÐr‘qÆ8@±æEÈ­2ìÏQ^õè hw„Ž÷™¹Ìã -•} ì»‡Ô¤É´^ôà `j¶ºk&É úçk ¥Õ×Y¶U•ñ¬!kÚqÀ¢¹G¾Ñ÷±RÙ~®“ V,L•ÒÈIššJm©´;†øñŠ2ѳ‡µª·g[A œ¡:⯲÷\u5£X¿s -Ò¦¢U.ù†èiÙ½-ˆ¬—À¬üºt\áÓBI ¢^W;C(ðméÛ׳Xì¾×+Y¿SiÀÒ˜€¬ŠéùšÌ¢nÂÅV>Règ -á`ÏsÔü$·K8YK ¤ó©:€‹÷EaŠPW'rª£GÏ ;­ÇFül´¡€xÍḠ‡V߶ûvaÌ y¬µ¡­!œgZÔmjÈT‚ ®t¾LM_,éwN…éHióŽ×ÌëÑÙZ”P¥†Ì(\e¹®æÄQïÌþf›xË/0ƒO·œjmŠðGšÇ ¨ j~¨#­<ŠêJhgxA[Ž#€\ûãZlB%[ïG<±/ŽBuÃ$A­Sñê"-„pÂóìеÍj¿†LY>ˆ¥%`é;xSýˆqU ÂÜÐàÜ%Vw„TOœŸs V¬ÍC¬ã]r¿î(ôîegúh¹šXK_p@GJ˜|…Õ0B^jx–bi.üÖèÂh‘Ävœ sîH»ëÅõ|;}’̬+Pb6û³ç§ƒ®i6³©#øã[?5Ç×Bqõó(Êä; Í<á‹&•ù¹R;IŽÅâ™æ¤£„{2Í­æH£ò…ÚǨI‹??G¾¨@ݘ¼E´¹A“í29áÖðѱ¯“Oĺé®mŒF†Å£fKÜbC]`=½í{mdÂRÔ62Ÿ/6ð}zÏrsÐ`›ÊÉ -¿kšÓˆÔ¦F¥Ð5çk+>ÆDQ— ©€÷É^·FŸ6纫>#£>éVv ÕÖ©8“r|Û,=7>blpt,Ìf²Îú+€ƒs¯ET°J’Aì<ê00çüOÒahqë¦øÓ“ŒYA[Ôí÷Ðà É.kQàV¡!:SíÆW¡Ý–eK”nS¥utæù±˜® ùÎóëÁ?KÏÏXzÄ+nïž©H­!Gøý[IP$rñ´•åkPJ°„¤ô3¼Í@âö¶kqb³9KæúȬ |°‡(Ù$9¶5Nôâ®ÖzxËMù×Þ¹¯-þG51ÓÕ>#y2¶:Sg¸ M#Öž2e"l2ý\q–`NK Ýšrž‰Ñ+(A`ê3<«–Ѓ°XïevÜF¨EÖe*^º1P=iy­õï¬!ë)ïÑm¹ŸUÖ4Ò …ž¬ÈÆz ñ¢ZJ -œÀP>*úWü5deÿ²:Úw4t}TN~ÙƒJãÀjMl‹ÆœSbO÷ФÀB&-„>»]û󉟜Ÿc†°ñ­3‡È©#›‘­‰#“ÜÒ)RÎìÑ¡{ö2zŒýÄ͸õ–Ù^m‰ÚÈ–zzJ¾©žüâd¾²®Y›”ö÷ºeâ¾ht¢t$¨lÄBÞDÒçEès(TnòóŒ€ ]jÝe4û5ï8îÙê.ˆî<˜ÖOäQ¦¼”`Ö VNÙA®×*G­Œ©gAO;æÆÇ÷„^\ஈfÏŽÉ©K_½biÙˆX¯=3¤¤Û}Øg"†A$FºÂi¶°I™]Ë ÓE—ˆ„@«!øg ×%¬/êî‘'òðÙL‰kÄ4>DæeHv+ßõ>Uéç;Ú¥v†ÑE¸ŽýÙ€ÔìÛ¨ÇåL Ä•L¾Xœ©foìrOYC >°GCn§VÅ1.CpoµPúõ\‹á÷¹ÁöôÈþJgÛ^/û”qÈm˜¨ä%@šŸ˜’†h*š¶]²RÇÊ^7ìšzÛW•…·Ä}wXu¤b‰éeZ:é§áõ<*ý¢‰ý'AÔ5ãÝá¯dR=-º/…ªr&•j¿,¤ó¢V`JéÂyŠ¨C£×­!*’0Ÿ†ÕÃÑ Ð…íX3ÿV@ñP=FB:ós'¶”ºìºïTsîÒS¯ð'Oo]«$2Å ˆ8¹ÀÍÿ!~øº×]ŒgR5Š·¶²™¥gÚƒ¦iNaíµòܼä2D©+…h¹IÞ&_Åâ#ΕwE@ÞÜFV}]!sø­å '~´G\ ì ‹o9åúB J—?‰!€dÞî;bcŒ;Jõï`k’ñ¥©5aˆeO¤Çä.›%Yr¤7é.në-´è%T€l„l·Ìc³²™ÙîV ߤƒ¡°{Edg˜HrjW<#Û&Ý·T­¬“ºòJ xé -:¹K#oÀ^hrhÑ(s¦Ö˜ùÎnÖì¦ä,ÝŽëM 3 ðvn­dó‘ÚmÙ=ð‘Nø­ÏD&[Å¿΄M$ Æšbå8kë4MôÔéj$HÏ>p%ÀöÁÙb({$˜Y#ŽLѵAZäM݇’åiRÒt)謣ڭ+¾íû(kbÍ}Ôu±XÏú\×ó#="Ô±žG[CÖ²ª¢!F¿¸߉i²ÝY*nY£òÎ9ixÞAEZ_é ßbÞhe¦rsÕS š‘ših"›â q¯)±ÖLB²é‰fC¼µfÈzúi-Af±»K0ˆköǎز¢…è/Î<3~…-à·¾5Y/ù§µgïïýE î&—ýʵ:b4J(Q´¼i¬‡GrDSe‡›ÄÔkAâÚžu®ÃÜhG¬=9)-¥Uðã«òÕ[é+:Ù`ÙïÔ¸©üßÂøL?/xMÐýÖ¼M–sÎzœ=¿…Ë|Rn®Ú#;Â=ÄýÃÞ3;†–º|b^8¼±‹ÛA.2&ðÔÇ™xöˆ”Òy½M€ -ÚH©G£™ØŠ‚ļ3÷g¹ã³Þù±õê•smgŠïu”5gCI/åTixðIƒSi?f¤jÐÖû@Ë kòÊ[CÈ^š£$ú$¬çx{¶qÍçùÁB)ëÔVÔÓÈ(S¯"Kl-Û¹Ì"1‘ÁÖ—ŽpCº¶x»[é£4[É_tÆys¬­L÷aÖ“º=úävÚ‹P§ò›ù ¢,qÅ~Í丨£SHÀö[C6ÅßM¹—T²Â«TpbÃû\‡‚WgB4Ù±löuªØ~©¬…h=ðµ •ã`£!zèˆû-ˆÆå„9hÆÝ öˆ¬uVÓ^o¼½–5S€ujкï—Sñ¼š˜G@ÈaÎBÊž+}…ËÎDXÆQ¢öI“4h|…%ad¸s™Z˜‚Ê1U‘UJÀi:¯»&rœ/)ÜžÚoÂmìÙ[¾†‹;Gº§vi¥Fié˜ÛóÉ`7è¯qÏPˆF]:Ãç¼j5g¹Ù™’ÎÁO—™®ã¬ý8.îT{î3OïNx¨Ê'båg‘P}F»‰îØÎ q¯ '*s»ºïLñh<½dwÔZÅñð=›—©®p!uüø?¨Â»3¥ ÿ·‹ßñã±õK+€âJgBqì–B•­œGM½Q2ä« h}d´ƒ2¬Î‚¼§N¥ ùDÁO·ÚÞ4Œ¼£»Jº3dFUsem¤åºD$&³v[o¢|CgŒÎmô„L·­×®nç¥ûÝñ ©¥/© ?á«ÜÔÆsPÐ -:Ô,¬+šÃR4‰·ªcgŒñ+ßµ¢_B ÄSz-½ê±9È® |]\ºyzÖûd ×¢Û¢ŸòJ%L.¤%Z!ý`MÈhùøŸòÅ€§Ð%”{Ždí -z[ì=åÞkPZ‘Êe'ÕŒ8·'V‹ïÃf߸>‹±‚ôˆ(KZ¢ ÓY£€ áø)QµVÛÒ¯ -ó‹ÙÖ÷¿äÃP!9y»yêÃü!]Ôó]ì8òÉÜà£Z™°™B*¾ A¼º¢0>ú3Dp†j?ú–¶”¡ö£† 5iÊÝOrS‹°_ǨÌ&Vž> - -A½Þ«ÂL¼MÞWNÇÕʸÑûP’¶Šà‚U¥»ÓÊ|M!†7Ê.xFà£!<´{{ñ[Ž.¬'?«îÖÂìS3lI?6å®ú&JÀ»çY­P0šv„Üp¼oiíHJ¤­ÖüIо[­B¬}ŠôBëËâxßé…RÇçÂwGvO©¿)™‰wˆ3zôn€Kç»ù- À¨³=£x•?Ï”’:Ãë—{±Ù‹Ë@ê@Õ£´@Œ´Î°)ÍÆCz‹˜ô»ôJ´€×¿»Î~úã"—¦C"€sPM;®§…¾Çø}Ûªý(‘gœ ÆÏÎ ˆY'‚Õ"fqIÏ$n;\ìÛK~†œF‰ñ°²)TV[‘Êq>ÀŠ3fqº\uª;¸jÄ ÞYM¢=ª:¬Xó"ïÅ ©]]Osg@P\¶E‹e¶€uSŽêÿìê*¶ùÝ|Ï -³³&<ÎÿËÜìZ²#Y‚ý‚ø‡ûù  I¥ŽcšãDM ÔÈßÿO‹k ùkÌÃn -ž ýºÚiôPI‘-»‰=Cf7 9äLcËo@%âY?§VÊú,x`¿~|ÿG¿V AÔ§v§¦fü׸ÄcƒZ½!væt{Ú¾(fá COÈ×e´m^yíK` ¬6•1»Ãtøwÿê0±¥Bxúl3FmøµýÓöqLiSgÄ}Þ -4ÍH©&Ô˜¥ò·ûÌ@&g¬À üë ÿ†Â‰Ü¥ŸÝc’¢ÿ‹ÍS廨¥MEçkõ:L8âú=¬K€£fÀI5òâ±}ÅA#¯Â¨†\¯P/oÕô)¡aѸH…)Èr:‹ŒÎ2Œžna§ÎZ—p™º#)<¹ü2qçÏlÓ®hýsdð$üýŠï—¾ÄE‡Žâ)Vk:tBείgwª7–çë¹ÁH1uG—d–ºÃvê˜Y÷fŽã³}r‹5)•`¾‡¨%’3O$Ùh„ˆ§!òü¶M'58ÃThóêÖ—O¥ñ¦;Ú´õÅÄY7}}ñnêþîæ{'‘yAgvŒC²GŽù3#G\I@¿±Ð7mwFR3¥§?8qÿK’mÌ1ÖÍaœü°­…X«@§Ô’7ðWÐÏþò7IxØy O;ÞcsÈÔªÇø:@ÌØÌD‰q´š¼³è4_«°±‚Þ+HÈ•K„6´$vÿ?ù7y±ü[;ÖVÓkrJÖç ä›É΀! ¶gÚ5ìOn•k‡22‰áFÀ€=ùÓÄ]!R>o]¢nC:‹â‡ÔMóî+ouÅÉ6éaçØ3OÖß©è4¹kRñ+þÕï¹F¢`|]Þ¢¯Ü0Žû3h-ÁÒ Åê£Ìi—r€\‹sÂ* y †1Ნ˜ hX¤*–ƒm“à @ç³ G´-Rá.öHœžàÖØa<|'yòq³Œ†å`R‚s˜¦¸}ë×¼ƒÍ=Ñ£»ÝüÔؽõµ.vN"ÕÅ@¯›Œ|Œ« -šëUpýtq˜j›¸ ß7,£«ÖÅE„vpe$|•£ŒÚ"o5 {B8Õß¡z0µ8R´P&“½ÝCKÍ_t"è‘$Ñ¥}þfHñ³¿Ì?-9`X’ @žyö’°¼ÜO)cv-¶²Lç8×ì]ŠŒÿfÿì~·}ïº.yAd*à'ÛÍ™9‡Êì,„ú¡ÂXe+þõSô§îô8×£C+jÎ[T¡dg¶7ÛÖ92)}äèÖVÜC³ј…Rñ0]àña±Èõâ”Ä®¦¦ìÈ0³…Añpb`UbT°ªíþÐà5¼NÖ³0äœaßiéÖ›9@“qÚfa<ľß<Ý$ú¢lÏÙÚ’Z1âàcJLr¤ã®íx--Í“ƒ%/c¯fqõsóÝØô•6C‘:dïY«ýÞ7äìdÀñ¡ZW–æSçÝ£7{‰›V—á8w¤¤¾Þu‰ÛýbdXß{ZøÔÇfá_¿¥4óñ(;†bI+m2Û>½‘^Ðh"cÃP>/™î•JÄ%x9Qoìé]BKöb;µ³Þ‰[ˆÉ7kB"°‚VçȲ-ç&[³šËã°_ª¤#¯o< »*´iôĈÇ0 ýDIq—l£cSàÀ³<Ýw‹&ü¯¶?ó"øû`>E Š÷…³sî¢Æ© Åè¡e÷IÚÞ ïÀ•P)ø„¤8Â#dþ’9h‘Ónç}l{]2+u6¾-µ¸aKó‰òµŠB§Œ‘Þ„Q»k«B¤°\ÅP©u(–+oýý2ñ¸59ÆÖ~!’¦A"—Ϲ·‡+š‰äÍÜ›y–tÛuɼÎiûV·®ÓöEPäAÐIúŽÏ³OÚÅãQÕ࣠-¬DçÌì4õ§nd)nomùåÅïÑ£}s”´žê–IÁ#Ú‘€õV9io:šï‘ÜÄš™÷p‰·‰œíÂL[G㋇«F‹ÿ^[ARB| Ô‘ú=¹ÉnM«±\~^î¯1§Ëý3@¿rØãÈÖË°?Hh»°üâAῺwè”—ŽHÍXþ„Hnɇ/%:î=lòÍ6—ámfkÇÞr=vÈdñþþöNøß=¼!úD57;ÿ«<•¬oÖ å6ú²Ï!fä—øä‡á˜rRƒ…r‹Ö%ôXw¥.¡ù•dö(À—PŒì+f¥!\Þ3F®¯w|Ò®+¬Œ+’oî牨—yYö2dCÒvˆ‘po€²Êl¥ìËÁ>‹ç?CÞf(X~­×—r2ëc€Ý £ð‡¶¿7óKsW.`€6³afsc¤ë=e#cl¬µ(zxAAßÎØÌD5håòV¤D€Á[ŽE;`ÃÅK¥`Ý(òz’96¬Kz1©Ø?ßJZÑüÔ7ˆÆŽSõJúq†ë#ceûò{¸pNFQÒÿëE±=Z ¡J2Ê[Ê:Œ×raô•¿ È2€ºj(ø·|ÒŸýåÿy -¼ÃgŠóQ{¶ûJ³‰Q` -4 -KÕmÐœ˹(¢䈣$9?½ìº;«½M¼’¿µoàq·Š}Ü7Îé.KÓÈÖ~y—ÿwaA7|ðsÇÚ2sù²Ÿõp1³{Å@]‰SïýùwýÙæŸPÕê)Ù~é)ýDë?x‡å\0™ŠÚGýG0:ùËD*™¶é^‹ è²æ§§d˜p'ò}÷”d·aqÁÿG2 EX 9~`pÄØß‘ÀëÏ$p~¡‡¥èÁS† £ð¼ªŒµ5dŒä­t-¬>îòIQâ'䪫ø×úÑXãƒß<CuHÜ1>QŸGé¦ …á¬(‚×ÆÑ®'~šï4Êb0OÓäóÚ8]òŒ‡éý½Fù¿¡¼ùçé¤ï‘ì×ý¸ÆÈîy×øzý¬n÷—ö<ó-»è™¿ËA2䘨”NÂ0Âêëû4R1´ëGÌÓ]ä½µë̿ͯ…M¬_:ç1¶7 égL„åÍId±É‰L¬9û¿ŸM?~ò—¿ãAWâÌÖê ÌÿÖý(Š9Þ{ïmqr KXˆ‹îȯ2Ë´'o{]iÐå;4rz¯cùH z.ÁœÓÑÖìKxfdŠ\²\LH® ö©:U¯L«×¿îàO| r¾ËØ" ª ¼ @#gêUÞP/G™ïcéñ"ímõFåxn÷Ãûú -i艎.»ÏÈIT0ÔÃ’å©KÂdöÜ­¯¤ó}Þ·âÍ]1 „ÕL0¯Ô™Gš*•:–p.â†ýâØŸ\Òã}A£’2n”á–?yÌ-þ¯§ÿ_|©¾Ñåýž°ã›û‰p'Ò“òÏX;á“ŠjÆ7F!sµj½Ú¨€è‹ F¹„ƒÖ€FȾ˜/hÒÕ秢ZDOxBÛ—¼ñq™ŸæUÞ=AãþXðµŽXiL[‹cŠÞ6æ…jö£çx[ɘšÎù‘/…ÌIjûÈY‰>ÊÌêMѵë+|^GðžîjJQÞoD¼Š‹nŸ]–8Ù³H–ŸX+ï2_ôQb>àþùa— \riÐïoHOvôDy<ÈŒò*äÄqþ:¢®‰8”Oö)÷½ñbP69+Ù.›frÇ~Ú¥FÌÖ£KIÙâ -{Œ¹oN“LU¬…3ͼ¨à7ÁOÁƒÖ%h37çgm:cœÿ㉨u_¢ž[_¼Õèu&ƒD&¹SÛ×´PÄ5ë“•#y¡ëÎý[* -¥®ø<Ž}ìÝ#yèŸéQvÅUŠûWÂaÊÓn¿ÎmS° ÏP®bX€?œ§K| )S -ÞŒœyÛ VATܾxÕ Ÿ£F×7rÄ‘ÌôP®.Añ­õd™DSüÜ¡¥aGo:ê’Umu™ioŒ¢PZ•d‹T=Dáõ@®oò“S`t/&œòÞÛsÖEíL!‘|ˆâ˜sšJåÇ"^hß+'voëGjåYø¹äŒúgmÈïQZ_ÆåÔzæRy|¯”똚¢×OuYI=‡Œ©ÏYýâ Pï¤ÊäªC3u½E4%ä$!æï« ï;¿•<î~(†°S/§Æ6²¤5;m#Á™›=Šð˜Jlû Þs_Ò±×åIE¥rÆ7r•»¨àG\Ú‚Â+ÃÑZ«d·°oe†xû’¸0¿BV"¼˜³Ð\àNlvhò·*ô%`K>5ƒ…».qj®³õDXýZµÑûzœôÐU \IOÐ ÏÔò`ý -¨p€­µiat™EßëuðÉÕ–·aÓS—ÁOC,æ׶üQH¦°)-$Ù*ùU¼´G$Óé4^çoAÝå±e$]7ùÑ©¢nñ×?rœõL‘Kr£í_Ϲ¡\9%myõæ*ˆãWþÄ?þêŸ[ÙP§Å}=ÀŽëwHN)¡c90(¥ÖÛ ã íÁb›Èp; :®àûo# -@ `¹X5ƒ ÿúB}ýaÕ -~~˜íWDb7Ž•p[ꃮ¯Bæšœd’µ;dìµñ¥Fz¢ -Œ¶aFÚÝ•J‘Ô:êjî÷)›Lsöà¿ÑÃ[-Y¬iL·À™ôúx¡=Vz´ÁÄ0Χ&³G<¦0EÏ/ß«Sæì™ð¦Ð?;Ík[]—ØDø©8ÎM¡£ÃfÏ;fj̇ÐãŽG5‹z`;UÅ~v}µõè6P²ópÏwíÒ—4᡹D¯œ¼Ö¶{zÀüL@Ä=p‘~Õ£)š«&ˆ¾ª`æÅ0Ïyëè[Ž˜ªä$ΗŠCeúýÃÉ X\÷O_í¦:9ÏMâ|™TÃZ×?]¿Ci”ÑÃ7]ï†ZÛ©ÁxÀØ“V -y©oBF‚n´–4V~Nækü[žëݶ?~7Ëöû?¡Â)&ÀœiÓˆþŒ­AÇEÉ%b”×½u>œ{Ï#yY~=^‰1‘^;Á[P!y,Ô 8‘ÅÅêå¾2_8R…u;]ÔP~?Ö›ç×8Ayù.œõ­ˆNØ$ôðºèã\ºÜ>B×kö8·ùÒ FgE½ž™uIL7‡›wèÄàš—l\CäZ5+>KUÏ;ˆsÝîŽ'ŒðêMX Ñ!®ïY|]OrÿÆû¯OË`õ•ÖEk1ÆúMms&\cäIEŽþçÚµùîxX7³ÆÊУõäiµð=x_%m] ²ñ‹¿ŠXgKü…ŸjÕÞ=yŽøúد:‡kyAqÕ‰–C¹Ã´TÙóĤ½Ÿ%m}Yï :¤8&šN¾’ƒšh_ãܶïáˆI»2ëL…cÆç`M«–%Cd#ã–'=ÏÙ¬8Žl³”Ìàš›w«2°5ÎHM·NyýZt[€¤ó£jÑñJÿÝþ“öíùF¦·Õoè•Y_o<` Òëd5±ul`IOõþHŸlƈZô|•†ôŒ¯CôV”à-†a-$V -WD´ŒReÝÊsúx±ìc.»]°±u+=Q -h:ÿ·~‡Ì|Û5 Ëfc‘pPV3‡õ ù¹>üÚ¶6æͼ¤øÒ¦Ü`¤0ˆO´S[ßþDºƒY‡Ý¯VÊPeRÙI5ŠaÕN=u,ÿxb·X¡9g¼ ²é'!€Ý‘º¤ÈÕf']8¢Ázê=(ÄxÖ½}ÛÊIjU°ãë¦yèEŸãÞÌ· ['}uš2Àl>r¹’«ql -]¿{})ŒºôŽ,:öÌᾦœÇêcÛ];cŒªëYygåæú©6äh9®Å5q€ðøôÌgÞúF¸‘ÌöÚ†9UÕC1qºÍ_îË‘:GDsíF -[ÎÖ’N2ÿX ABDB²{ý˜0UòŒÊv´KpMN/¿ãgl$Š0ºÆŸò:’+™´¸="¹1ŒÌÃîz+ÜBE¬ÛYV·§OH[fF³ZUÞ¼#™]B7UÏ€)ätNLSFø4 Xï¦Kjb¼sÖ„6v/~a²¬%¡9Œ«Å,¼GŽÿâykÓIQr[iÉÙoÓzB 7 . ”À×j·%‚¤ÖMŸ„¡Å›šÌ«ÓŠõlêS”Ù†åûe»At¼Ì–¿ìj¾õ+ü¨§®¹£F  ÇÚúœ¢“á,­Á~4{‹@lëÈÞ'^Œ=s‡‚CZ¡øßžåqS¸{^>cï ëʳÛ(÷TºwV*õ'Àïy×»N›&À͉”Õ·.9c^þÚó[ sìÀ´ÿÇ%ÒDC -{Ù{,#š[„ï‘%ñ"x«ÔŸdè„Èΰ”ƉWUÞœÀmfmq*ÉÁŒ¸ôloð–ä‚í, ó¾bè’§®¡ì9SÁ• ëZÊ?ÇG˜~¦D6¦ö}ÂEÒ5üôÝ’ŒÌ½‹%¾ÒV­%ÅtæʱÇá¤AˆÄû*º–y«ŠzÏšÊ'K&¸'r ÿ3¾9uöpê5dŠFð?CAY¾cÙýl¥ÞEÓ£p%Ç1`ÈaªýŽí’“Cf²CbD’ÌÚù£•1WYûL|q®Þ6¹b|Ê3.ùã# -$ Å:à2B«÷€’öw×%/(ù7{nRè ê}%*í%wüIÖv4c³æè°FÚHc0áZ¤t™“š8ƒ’wÆ7ðɤÇÒg'?CFJdºø˜·jS{‘D¯Àfe¬{g,ã­®ÀBlrÄ”\ÙôÖzœ®5qEˆ»zs‰ Õ×!»ú[/ füd1µÅù -ö´¨³ ›‰¡Ákx˜ö¥Ç´:£ -fLó’uí}á©Í­%ÊLIÉhNÆYïZó‰;fyïˆÐ>3k‹ØÇ]iáv–ò»C {“ô);ä¤lWÏg×IL&@«ö×ëLN˜€桳 -2´‘Onúºä-suÿ÷ÖÒgÞ汜ú•26SƒÁ>öÂ?xºbî=2ª<ñdÒþ±3J\誓Z–g‘±Ïøè]жÌXÞ'¬´ZæïgY™Ô».Že#§<©buä䟼e¶Òôc‡ëìÓÜoè÷óØ·Ê¿¤/ðBÝ?áà¢?u˜jVÅ:”º°0‚Û½¯@&ÚM¶„½æ¢7ŸÝ#µ­ ï¦x¬«×†”³¶JyXYXnFAÏv—ôÆ8KåÑ>e…çËÑ ‚–dÑ—_Ôgáóæwd×yHnzyÍÓÕm³ÄyÕ÷ŠÎäê{º%sý)@<ˆ½ù¡ìæCxI#: -jð°›=‚!bÊtœnßÉùŠ®V |SXöZÙS­N[:Ô2=-¿­ƒÀ±OCçµ:¢æ­lQ´Ž:vƒë6† úåÓ\•Ìӣ͇aøÚbG¿®(‰Ñ*~ŒûçˆR5›¥îs÷qÔáÎ%àòª[ƒ>ÂèL%ð壬K“yË£“ŸRÌ 䎂:Ó IÊÿîj}v}%UòçˆGåu$H6¯= 'ÃM*çasÄÖè>ò¿óÅ'è}¸ 6\«éÜhÓÁÿÚÊ3¬ÎC/¢„ÞÞm$ûÈññ(dæ;Î8íGA†/ñ2©û -ÀYäããÕ€¡äêq?I︡_WÌóhŠ*1]2© 2è(]ÌóQ¼Ü6ôû¨#§“ÃÊ™½hM‹CŽƒ×`¾ÓHœÉ%ûà¼LHøëKS2®ÕfK2¤4á¯]½¼õx˜Àð­±%ÝYñ¦|kú²îŸ\X: Ùä¹#þ/“ËQq§ -¾òyÃ4d¹t|ìÍqЙfÓ:¯Kx¢‡@”Ï2 ]»Ç¯ Ã`ej‚ª˜^õVƒÎaVhRÀãç}α>Ëa …ÍùÈnäÁÏwRÜC½[7·gÕ-o,×â¹++uíéyÅ {ß Îu¡an3Å>Ât9£‹[eº|ï_²n€8‹áæ -6 -!k ”ZKâHq×/µ*ßõ0rÏ&:D=Ü–7Öizeìç°[œG!¯ ¸(êÖ`u¨ˆ÷Ü&Û„3sc¦²$L\5ãþÇ4Ç­K†qW‡¹Å\¾X“Ó{Û‡fåt€ ãÔkƶI§²M¥$J[’ý\Ûþª£Ùȶž¨'}eÞùÁD0ÝæÖÖê™Ëp˜^ïŽQ¦ éˆ_“6²íÚÔŽóéñtRÔ6vXô¨U”ˆ¸ÌÞ†½ïŒ{c(Û¦÷ŠøŸYMYs×·ŽÔvIJ‡ãÚGž!?A@GÿÔG¨(CUv{,µá§|Ž¢ëç#¼Ž7Ô›‘ŒŠ3å™3³VçÚiÏuçZfÑÙ¢•Äž'öš]ŠP> ¶p)Ê'ú¦SÆ mŒïHÍa/âž[Ä&8õpÜ@×Åò%Š—½áUƒü=Ó ±Å÷‰•´[—†?ßøL¤3y¹_zT“Ëpݬ6ùas‡8¸ÆÚ±:¥B=G¦vh©ü–¨OŸRš.ªƒ<õ‰¸ã)<±užežÕˆ?pØ{è)ˆ O| -Ó¼xVAÎø(âæˆ!†û‘àè4éââØ“]%{i-I¨f–ö¬.½™1<ú‰«BƬ#̱»šÓ˜)Èùö„³¿ªTèÑßhkWáõ3Òç‹Ÿè^­ÖK3Èö)H.Úâ'}¨™"= ¦¯w¯ÿýãÕ-ssU©¼páΛHt"]“O¨×ãz™¯$)O™Ïeâzãç².Q¹’:ìÍ¢zœl:¸D4C÷VôФD®Úý[Q‰Ü®®ÃÚ<ë:Æ âFðrpW;ÉG?™¢ýNŒ]†+ßiý„&’Í}‘¸|ŒlO`=Cï#Üû"Qæ -4n×xtÆ ’ðiž3sú‡Monª@øà]¨¨[|{ÆŽ0˜¿¾ûmn<{?·^XÖ@•ÚÝÁO2 `™ó`'äþóì rÆèo"µ¿ý]2LtùëÝõ÷÷:Ðî$›RË%”€uCn “„£ÌÉa£"PVxôÝÖøx«¹ó€tEæNkÃÑŠçĆÁÉ 7ö»(QÆÍw¼ñF‡wÍ£LFú¹G ÃnÙh¶&ïÄý”dØx¶<ã»*hS5>0iʧc÷º"óÖjnú³´sNbûí®K8Òê -æøq¸KÀL_ND†*œL’éà"’*6ma{hÑ-‰O]u5%Ð*;ËÑóÐ4$Èd½•…kÞÏgþÍyøæ‚B[¦ÏÎyÇ#î­vüjœl™9æw“õ•ž=TéN«3R©–Lã O¯H:?&%ònÏ8td¢äºûÇ ‘Iq2¥W{{ç(sÒNïQÇÁÒ—äˆ9¹q¨sÇ„á8¶w¤_Ñ(Û­ !Ïë¥&iQcsÚÎLóPëdŽpµ ‰g|ØPSž<2™3 %%: us{ŒáN­%Ün¦2µ=¢˜Aéøõ8ž*‘‘Mk=\£.á£Ô{îò±åË 2OôP0úH>n¤ðUô…£)#BB=ÎmŽÎ`›—h¶:hg&àõV«Ns”4€Î[“¢[ô+pÛô›¾Îhà0ZϬóàqmŒë^]B¡&†µ·‰×Ç'zVgN…&Õõ” âÈZZ¯dþlÙ\®=,™“aÓ2¼±Qósã`3V˜O»ß—âµÇöyìkÆWK¾"£³÷£XA²}“M73 ¦kðßýÙÏřɹ“§#Þ`¼I•£Ì¬<¶Ëe¶òä{p§«é‚);7ÂÑ÷‘n-3«ý_h±LòSñ=@›6†¼>×Gˆ|èÇj¿µoæ+=V½p’WÁÃd;v´;:Ù|-Î+uÑZ<ö]_OÚwYðûúžrSD:ºF¾õ YtÇY£³> -áÆ–}ßRb$ªôM¢mÒA]„ì}¨TÙ¡äå5:S%SH?åTÚÂœ¸c;ß2fŒaÇÁ‰ÿ?'ú$½ºèÌþT™tÿé??ÿË_\@ìþ¿.Ißáï·Ìì`û8Å^Š…VEôy ^8[¥¯&BüŠ>#—0ûŸÇØJFaëær»ö^y’UNV²ïž ¦…ò\(Æ…]±h[{ŒAØž¤C>6¶mq‹1Ô¹¶„îV¢1ã½rgø3ñ¢,”CæŒVqm‡i`¼2O˜Äaû­jÒéÝ’ï{±öÈážx†™ß.,B³jè5Ë.ÊkRTô*ŒoÌŽiG?¿×|Å9Ã7‡A'U!ÈÒzzohÞX2bF‡›Dû–sÆŸüuct÷GÞë'—ÄÃî—RìßctwÒT¯ƒqÖ¹„ ôò‡6ÖâÕ“!ÕOÒv‹’£k&bwl:gÁÖÕ‡®K>èf¹I \EÚÀ–ª:Šó$÷â\•‡Ý œî´ÝDÊ%ëý1±û¥_„ßVØ*šO¢Øû,É<õ»0S˜£’Á^Ýî{¶|MxÎ,²ö¹>¿ z*G Œ¸˜_¸B³¾5ú¹c'ŒþŒ4Â"*û˜®Õ:’,©~”Y4g Ôl~z›qTöyb(y>™œÄx¸Ö»ÁSšxøÞÈòX¿2áê÷öHvÉ67xBÇ©hvXï$ù\ §ü ûu3Žºwå#‰BßóÈ3†Ä™'Åoöú%Rà)³1Ïg†Ÿ˜éõLü-8ãgùS“’CéWEõïiìÀè¼cÏQšÿçéô 8暦k#üÂ’Nœ÷¸8¥Olû*¤ûgü ŽÒñqg„LÀ\ªO¬ìúN½hFL´œRv{Ç b¨”ý÷£*‚Ý›ƒóW¥Œ‚ê¶Þg­å'ÖóZÍ‚µ!GÅ)—ØeqìñîÆ÷ÚŸêqcêÃq/GY˜ª°Fá;²¬…syæà-Ùem¼¬KWc³ñù¿¿‡e|ã ù=÷7ÞL¬ûÖÄc\€OcÇš‰ðÚ»/1Þ~”µût4ŒZ+ÿé?NG¢/L+·7\#›çS¨—pÆéÿ€òýŠ)Hÿ¼“~$J"õrœYCOÔî·âJá³¥….­§OÕ,°:Ü -Uçš–À†k¢ã¡Q º.¹S^;ãÎmµ¤Ýž’IjbQ`ûuVg¾ÎÐ.£¸^ÒÎÔµÙò°M·UßjÄ\|Áó“K|«+Ôjc²>ëÌ׬E­¬ p‰jéÅúIbR&ðë~­èë~/±8Õ¶è6)-¦×\{*¢$‹4Ãûzƒ^ƒ>ÕÅwê˜éÝrþÁÝ嘻ÖÍ•aáìZhô˜5e±EJW¾yw¡~wöÒ› 8ôÖhrê›&ÔkŽ$ôÁzÊGM+˜eßÖp¾öÀû’yÇùñ9ƒ®öZ¸‚:>³]w¹ˆÒí7“œ.¶C•i™ðêÞ°+7tLÊOù|è¿R^ÕcþEÖçOÿb—vû×_êsëaØáÉkÍ(Ó›ãlº0sˆÑ.^’ñé‚“ÄF€â§™$óqÿ«q—[ÿË^}ÝùtüÆxϱ£P3Ï̟ʆЄóAí{g/CÄ”\àC,î„V\Ï}H’oРÙÍÏÚ·eò±9»ò:ù,šÓþô8PºƒˆïçðÃûŽ¨è¿~€²GÓ€Ç]A\jF™Ô}ÿI:ÌŸ½Ï~ú—ŸÄÃ`A˜Éáêµ{K8ŸñÐU11a,XKOp‰µîŽäôœÑÅ -¼‚Qk¹kUI¯¦Gì&²í’ %ûåzb&4ÃùsÑF>ÿÃ{¿Žƒ…È‹º$¯£«âý=‹|ÇA߈\ç2Ï#6Žº®ÇB!_eöáuVÉÖ²ý¸ù£†¡³ôyxn1Ì?û~¼ò™°ó|qÚ ¯Õj.h­ùvÂò­lvo9®ô÷£õxE3“ÈWeKÏÇÎRîÄè«Û½Å°ÿòwúØ£q½'ëótåGÿ«aÚÏ,Ô”HÊ?ùÎ_ÖòADÄS´c›„~ý¯uvÿ† …9"’¾)xnL-5 I­‰™Àt7­±}G$5…и:6¼Z³ýp*FYÝÙÕëæ~qä 7àV"E/“N³Å|^¦öF‚éO Êõ„óh!xõ¸ ÃfŽÞJ³ÈÞð+¶ÍâB×bÞ‚àË—s;Êz«ŽZ.Ë•Ç–&¾‰ô¸ÆShlgç$.Çß -Óøu”ÐÅãüvçÓÄËí,'úÞéέ%2`æùÈö~)zÞíɈ*Æ+”£××ã”Ê‘±Å”"ò¬´"ÿ¯DÉEíQ—è70mÐX‡þ/¤@Â;$«\ÎìŸq¥©àáø­qüŠi3'ŠÃYm¥Ï©Ë++êa§Èzˆ1xíœyö\?œT?ò¢µîãQج̾Ï<ž4â­Å‘ë9_Q aG<\p‡ˆ‘ùê¡JºdU”‡ì¨#ò3œÄÄÁa«„.ê/ý÷U9·tR9[“˜»^öÀú‘â.vßß''€AÆrÓÔ0‰jõEnRÜØõ[IÞ¯¤ƒ»Ö0×VBØÈÉþó?ÖùýÛ¬ÚŠTâ »©øºƒÀŠU8ýÖCú -É߇Yõ„½¶§\Z_j°[Ç‘ùÈS†ª.Qp1-˜I“29z¤¼èìïx¥_ϘzŒ8¡³Šê8gy¾GFä¨ë0Oþ+ýêÓ*•íNWÊÚÛ=PC‰Ý"S’4˜WáZÈbÁ„ìî_CÛuçHl¼ÝÅe5«ÃúGCƒB3‡þ3ó@a€_ý8îuºÞJ|³ - eëju‘j‹€$f½ŽÙ²[MøS— ð"ßè=—pK4Üw ó¯ÉaŠ‡zÏ[…Ê·¿è;}uŠù'”¯DÖ U ôìaG–½‚‹›òðK%©Œ=WÔ~):«X$¡ëçþ'È÷c|Ä_ÙÄ^j¤Kº¿\"žGiñ†yb¬ð!B±âh6tµ¯}¤Á.á³ cb¢§ÞJ0ˆ.âÜ:æÓPب‹(…‡K&4=ÙÌ«v׉5$¡-ŸuöðzÊù޷ܽDÛÇ8«Å¤ÒE#C.´^gd1ã³ Õèt„´>Êsh2¬µQߚ낪ߩFz7DyY«9£¢0kØžÉÞÎ*¾…˜ÀÀ#çð:Fº„ /­:.*øÜ >¯rq‡Kbbî^B:ùÃþ® -‹ëŽñª¥5ÝŠ¶êÑ8¥ç£U".6ëK ’ -¬Œõ^‘…œgÀ…+MD«i°5\ËÎn^œ%«œL²VrO#¬CqZ÷¹—“A íQÅñ™BèKDØÞ)Îä±y¹®­Š{£²9wŠ‘ÝäºC}~ ©K2ÇŠa¶ÇPöŒÂc«´‚Z‡ÁQ—8nÁ[y¸¥„_õdªjÇþŽë+yW-Š€ö>YÅêAÝxáq3¹²}sKT½÷Ú­ŒˆªSðQÎxÓÃU’‘ÄWŧ]$Ñ|ûG*ôÜ1)ˆä׎ۋLŽ9Èz™d„\X!Q?±Ìu…Z­/À!Eª!šKîÀ ¦ÀVÉ–ù­ú ‚pîŸ3ŠÃ‡xÙÙ£¢Í ÿuÆÞ&¾¡3ÇU&khÚ -­â’¸dÆá!2ö­t¹£Ì:•bg}œ°…ù“EÙ ¹ÇÉz5 y2?O!CoÂl"´rò…Íuý½E\´vVÙw3Òxð÷ÆY١튧þ "ŽÒ/gîªTxc¶Þ|uÄÂHÎQ -Á¹º=©ç¨kñ®qî9ÛòFka%eRÛ_!ÚÊl8Ùžè1Í ,F™ŸE£,¥LÆQ¥Ì°zß·ºD„YÞŠsÆ -Í «.Â1±ß Ödñ­.ÑбYM/—(ÿsK¶×S(}þwB)†ê³2,q\<ãñzš|ݲ.•9ãC)S¹z*{1˜ÇR†^21c^åÌÏ3YB,ø¿qiÞÊÆl–u«¯½¬Fà1ËŠrþ|J8t2ìvh§±õZueXiK{ŸdšÚAgU$pãyæÚ%_/í•ab”:ír’µ×î´*£ßbÌ´nÕQuÍ¥bRårf/¿®D5ù»ör®“µÂzVªä›^G2Ò|cÎ3Êþ„ ½Ž„óGÿã½›1Fïq‡ÄHœ¹â[°diâå*†_üJ×íO—¼µ -mDÖ2v ‰±^†¸ó•Òí1]zHaW.‰~Ѽ³b?Õøeê^Ul>œà² -ÊñÎçH`ã¡„^•H -]>5Glr¼~¯-+¶i#má´ç¯Îåó·„*øµÈòCQ}:DŒ-¬½3BÆ€Ä3¥9W+ÄÙu÷®<7O²mÒp¿ÉD´ÒØ úëÁQ¨–Ä›æìáNÕêöMHƒt ËxB©ÎôØ~Úê -zî7ÅW-½k£ëîåN™ùßè Q¾û§‡²ñ¬ÎÆᎌÎÏuRú•äÜu¬ÄcŽÊ©[»˜Ô°#ﻊ•E:9½e†­À'­£)ÆÈ1‘UåÏÞ¬v½Ó©Y.ß)¼Ùòóúûõ ¬ –),ŽãÇ«btgÔ¤žÝßÿþ¶¶ª„V ¯úŠ™–„Y1º²l²ˤõW†|ÈúL_b~õæP¸±(爥ÍWAg|Ê‚½NæÆIÿ¨ø$‚®æ›@ï̸ŸÎ÷u,ÜÇ6|ĵ–°*Š’«¼×yï÷¡ÞÆP\•ø܇X˜µ›áRU<é“ÿËY¸nÄ¥8û± *ýƒSäÔídn±î\[¶ÄT¿&––Fùš¶Ô`¨”}û„Òœê(óiËØ›ùŠv¥ö‘ôƒìçèæ*Œ´ÊÃì«R¿%¶Æ¥*µ+Ê,ƒ£ÛìŽZž Ç€ƒŸ³'á  <ð°ÞÈ«=ß«ú¸$Óä<[å ´ ½m ÿÈVB¸0Ó„ä{‹©2CKô)ªß@p‘±/éåý6ŒNjì ývHl•ïÈ.bëHµ1"yG^AÜxvP¤·‡ƒþ\é"†áŒT¡é¸Öa}m¼Õ‹@ ª\g kµ¸—r,~|X˜]GŠ -ãd=xe>‹Ÿ’&´â$83Œ.ÜÒ4­Ÿ…ûz54n*Ïuÿνjf¢vqŒre¸…쬗)Lm¹¨Û“ZÄ"ëúÃù£KŠsÈé´—P.¹éõÀ´˜ÌwŒÅ!¥—º„dè­§0CTß~iáWyj"|”‰„KÜü`k3$%ÛË5&½S»Œ`Æ -üç Àýcfo°Uß!Oß2Í핵Žòx>ÖíƒUòžúf£¸±Éã~Xàxf6\Ëe†eŸˆÀ2¿çþ¡Ø”µ7n=ûD‰ÀÏnØ"Ï£"!÷X[:ð”'zÕFµ'Í€¾Ä–nÕLDŽ³ïî‰×SK/ ^k Ƕwó N†VK;³IPÞ*÷UlÂz`Ø0³ÇéX¯“²n_lJª{á•p‰_\ßծό ôß%eŸ0 4Œ¸,¹Xßk_áB_0ñ•–2&0˜qFíçhÆ®®g!!Òk¯I­K¢ìfPÚß—œ™Pvºè–ù1´4Y“~gnÎÚ¶4‚v -ݵ(˜1à !ÎÝ¢;ûF°Ú¢µ˜!µÇú2w­_ÓØlm˜wR$pߨ¶Fzf G6Vñ=ýë-N:¡'JAE¥üêì'W†Nl>x6¯FHœ&¾¸ûØM•-?Ì4c4®ì i1jÿY½pMây3aêÖ*ì®Ð0¿X;ÇŒxÞþw ¶¨]]ÛÌÙ•&ˆJá˜PTÖ˜ç:+Û3y¬ñÁp`zl‹™·â&â@¹’%soŒÍ)@(ãªúc£|Ž¢ë(U®q|yHŸPcbùDZÇL²Å‹üØý|[¥²€&¡™¾•À(L1ÆŒáä"|<™(ìÿ >¦ R3Xv_{úâàD³/¢áŸa -¼IÍÎ%î¦ -±ÅÇ)ÄwNzÅ[„qR†w°ÝxŒš{Õ{ãÃ3î -<!'IâtÇ)™É)0ûÎÆ9‚¿¡œÝ¢÷%ñcc¸Qt}Bzν5V†KÐuçÁo`“„·ó Éñ°Ò’Bè=×g&\*|F[Vþì—ßEW.¢ã&ôŽzt~•õ!ƒU£ÅV‹Œæyf.8ra@ÍŠzSäðÝ{œôq¥MWBëÑ•+æAd ,y˜»µ‚«á#f:ñ£²UqÅ!¨!£Úr–徔Ҹ̽òÐh×˼ù8BD–Lšá…Uj,p¥Op:°ø[•‡aJ'$ JARÌÀ8h]¢aZÏX|"?Wk‹Ÿ‚û±él¢ÊJ1S è2×Á9ÞˆŒéIÈ}…¿€ª„ WùMQ¯b¬G™Æ4ª±%ûò‘×öæÇéK¨KjwñøcsrJüŸ¤³cM̵tG}«uoœlóa£J(¶®Œ“è…)ï.áÞËCÊ:°7L#²‰±«µ¯áeÌàŽ8>>Xo«µ*ŽÐ5x}•9¹‘7"c°ŒzˆîãÍÌežBW/,æ—ÉFzÄÌ ÓSðÐŽõ`ªœì¥3)¥1aH ö* ðk؇ßm_„"&8$¾b¤ -„;Ÿ÷¹ÓÈ„ëâ+%À-Åæ™k¤úVØ&»í4ÛE|5í¥3I/f9w9sˆUxcŠ˜ƒæÒÕ{Ð$öÄ[¤´ZËtôˆFD·³\écm=#é*j¸n°×å'Èa)VCk]gw‹¿izЇ»ŒëgΓçªx !W†—ºúÚY½ÜI;)Ÿ¯êü'9ä$gÕ¶•SRá4ºy–@¼ñƪÁ“¹Ïýµ²1”/œ–ÿ} ŠÐT¸ühâˆjÝ<ñsp*Äp·öNGI¨;¢Ù£þÊtõÏðÞˆN׊CQò@d<_éÚ­»ªf¤u‡-Èö7hUç½~Ö>Ò ¥‘؉&Í"Yv5ÌU³ -ãDt›çöÝRÓ"%'=7óª$®ô(Úè™wÖë ¥G€wÒãI;šú×6þÔÒÂR”Ðbý‡&9 sï±ÜBR…=òòã‰BŒ!òè'ü¾«óƒ½á ßRÇî®ÀÓø‚áL 6⑸ -düÊŒ ïOlؓ쨵P" ë‘n´bZˆµ(Ì{ -GÁ#°ìܳî;–n"kÈçT•ø@Ÿ»G`ð¼â²ü#ú =õAºU8·/“¬ÀŠt%C|6ÁdFôóÄ•q_bŸ»?Þÿ:{þ“­g Ûr‘î~œ›*€™€ƒNàìø40F“ûú ç¥øˆ vS’@óË#Æ›Y³Ö£»¹ª{n½wS7 ‰û+ó0^ºˆ°Z—èý`f&Ã#V—àXا ²‹«¨fâGÉÚº’›ÿQrù[ßjH˜èL -ˆ«÷ÿY®ngÌ×é^Í´ø”ÛËnA{z†J+,õÚ];ôÈ„ƒwOâ)B1•‡\I÷HR”ßKs÷yfZŒ¤±¸÷E0h&C,K|žì™mrV¾j¶Núá®ÕØø•ZéoÌ(Í@=/íõÈ®N§èŠƒ»·Iö•´ßëú¸»2µbãhÙHÊ;ÝŽzž•í7ÚIAOPöYú*kô×ÇzŒîƆ΀¸ì§ž - ëÕ5Ñí„œÁ¥8ê¾ëYC[âÀŸºNúþ[6³ -TQRÐèj¶çP¤ê^ËDƒ“a)‚»¼´¢Ô‰4d‚RFߛӒgýÃïA)᪭(×ìSú–ÉϽk]‚зϰ’ è9ŸÎàú|”y(kDj’)M‹ÍjÚ…{%~H{>½~tßrwºÛÙ×—0¬MAlÛÑ£‘Ö¾}ÏáèkËMt -âË6åÐkŒ¨½ç68â endstream endobj 51 0 obj <>stream -G˜+,/:›ÆÍYvI‰6]çû~§8Tbbäçäi˜1AȾ<À$]×±»6Š@a\s~LÇIÚÎ}Ÿ<Ü×Ö¼^£¼šØžzß:ž¥ƒ§Û<óCé‰ãi_ˆ”-Ú:—Éve^î ÖíI6ü¼ÊÚ𮥋”t{E -{¬QŸG*~Â~ñ>³ýÁè(4_Vê癢Бî$SíÜÜ:°¸cÆìäIlÆHÄÔ½ÓY ½˜V¦‡¦ZÕøS?¥]T¬ÃË©»ÃÜ Îê¯Ï¸œìùöï±ñˆ& -èÜKkÜåô€™QÌåœÎ|'æ/ª»DÚ>÷%¤¯=ÚËêg(øž… -ŸLÔd²LCW=0üíö‚LºÑ^:[­Oåu”,ˆ -ù4ÌmX³L†%ïîCY/™›$Ì“ÿ¹‚ñéø–Œlñ=IÓ÷#Ç‚$$·«c¶',n -´Þ´Æ‹MR"+¡IÆÅEí~dÚ>ñà;j݈ ·óÝ[è]„WÜyž#YFÑ#ù¦uÉm²ŽÀüñ­;=ÒHâî#ç6 H" þÏÆÐ&7ù‘Só:J^ôæà†œMhZNwꈴíø³áÇÄ4Ú“î°šwêa³Õö'H'[kÍÿ6ÞÍ ]ËNë-ƒ—2Æâ¾Ð*Xã/ÿºÅklPÕóÍ'NÛ\Ÿç¤Ií3®LO; Q U£_þÏþ7ø-É«5ÕÊŽÙ%l[®;*—¼ÕˆWJ¥†áâý÷u¤ÓyTÓ×™A2%†6ºD‚Ë «:ÒÑò—+f¶{(‚{b;ϲ«?ãϧ<…]£Ð i'@4<6h!ü]p® f$ThqÓѤÎòW}O„5ÊHþª/~Ôåod:þ^?®ÑžâLÓœÈØ—]3ÎØIïÔžHëîôçìeth ÏÒ*Œ›ZîMÃÁ(±3k¹s[ƪtÕô°å/&kHïø$y°4>~ïuwŒ_FRC°÷LK½SKFÓ@‡ËÂ1…HÌ¡xìÞØËŠ4ä*5}CÐá '¸u ¿¦3¡1IïÈ—â”O_nªKÄ„_#AxXÉqÛ"é=í¨ŸKк{ÒéßÌ·2Tå¯7Ü¢¦œÜwâ Öì ¤©Ñe=~ò‹8BÔ¯Ä^ÇQ¼Öé£pY\kã(Kˆ–KØ šGØXjòw& *Y›¥‹ˆºÜ™Œ#c Œ¸ÜìodN¥\r»k6ÈÙ"Q·‘¥ÎÄszˆr ÙJ}KZ!:ÍÊrÕ⯇ðB-EJr4sóο[(gûª‰þ<£'û_뛡Å<½bœ…4ðEÄÓkä¢À8ôÆŠÅvŸP,ãŠ6Ío´q¤àËJöÀ@¿cÖ›Kk3aÇ­Ž]M!N•˜j› NšS€ú·õ+œÊD[÷(Ü‘Ò*¦½¡T@\_‘ƒJ›t±¬ÖÉ=.wI‘†güA¥×«¢lP'b?A ;³Àq¤RŠ±~7n_‡„ùBÒÇlvÞf°keqìŽBÃà Öwñ”×õµ\Ó@ '›dŒw$ªà‚øœ©SMͲG`:XOLW0'—œ‰é@cÎѺÃC~Wà¡S[ÙJ¤§ñÖx+©ƒpsG8ÓšÕ5^‰ˆ8N@å8ˆ•0)ˆ‰VËÜý -’¯À5Áo¾8 ÂQšï4Möôÿ‡³¸é¸c(~²(âk“sMx_®@Àº•Šƒþ “q•:²ÏÖljC5†fÑ’1ƒÏúˆ=œ¤r×sk ?³üì™7B®V÷¹>/"NB€ovñéðë"bÏ[àñç"Ì1Ø8Úü?C¢çد¤rµ\T‹@‡ðafr”¶>èÎ%§_í³ù"úÑõ)&H¬¢ÁÉ[õ -'èo•Q]KÐÚô%?¯sDÝßc0u²áŠév- ~’w&py€©Â­©Ó/„È4ãY,ŸéÚouGné ¸ê!?ô¢-8"-ücŽC²µH¯P5’¡Ò5_5vv3jËá»ý„}ÄÌ(xt#<­Ù›Çó2Ù@ø„×çuL;!ãO¼f¼û1æž—ðA|b|Þs t+í6_Î[¢¬BJ˜ÊAE)̽A8Mž‡ªûÍèÔu‰BD_l"“Ñ£È<Þ2¡~ãôÁóµ*E'\d)Ý'éáø2«Ï`£Rt ƒÙc¡c½>p2®­‚Эâ.9•ÈXúÄ5R­ÅhaUáxš•‹ `ÍðüÎ7ʈ«Ñ‹ä*n”Ž;9CoZ  ƒì¬âP…€¡åд©¿z4ù÷Ú%P Õsý#÷ü;—,äzñÉzv¥z5$$û_ÛcqÁ -ã›!<ä×4Ýh »V(°ü¸ù©TÏèü±½ïj“àFÒMI22JJ(è*B㯟p+`!­u|d3YW¦a‚¼9©ž,qsÏ›îH!¶®Ø<ª@Tò„¥íéXÖ™÷9ÊÈ-Û]7 -&ñŒKö\O‚`zµ+sÀwGªü*ù@:º^[Õ¿zÇ~„ÐJMžx®ŸçÝJÅÏק)ù#¿ù«ÍqýÑíù»Òÿÿú¿CC¢æ¯ôÖð ï;ù %ÁÄ‘áÍ3õršIØ - £ØåJÐ4Ì'ªåÀ»ãX0#héa˜Äô®1h¾ëfÄp3·à॰U¬Ó3ŽeHôW†ú¥Ä0à|Bx֔瀡[Œ]Amú÷Öæ&ËÊ·µ Fß딩›©©5.w†ž‘˜<þú«®õ25›€B·¦fñt'7ʘàÙ9]/7æŠ/š¹Yt_XO;Ç^öXtÄô'䢖K‚]øWÄëûÂM’FˆãÉÄî{+÷<üdžÀˆ3§`L ßÕ«^½æTÙõLè LËŸõ˜ZöûÃYŽ ŒÔ[i6ÐýæŸÍ-ÁwÞé—<ìÖEàL`Qñ@gÕú dþ9. ˉè©tt„»;QŽ# -˺$TEí]nÄ܈¨> ÇcùWÚHn“²·œ|RiHÉ]kØsR/£}»"Ëåd÷ 9ÆÖd¯³^BÞÅ]ðœ3™]ÍçŸsZŸ½*¹•«ÉZÛ¦Pj’²ú­„Çð{~­‡7|n@Ú ¤£a›ƒ…Î8F$pž&ûu„’XçþÕ“›…¼f §änº8#&Xœ+û:îaX4O#‰QÕGŒ³R­X¶ÈoJÏ”¸ &QgD'o…ü~^GJ¥$5ƒ Z,u=u#™§“ÕwΊ–æ,Á©pÝÏ7«üNÞ³°÷ò•2Pné¼­­lø]”K½×¢N7ã8ƒ[=£b7Õã÷sìÔ›ÊA¸Ý’VÅe—sdŽ¹5xí£LJc6FH#z£µ¢:sÉ͉xΙ€—2ê©KŒÍN+bóÈ©vÐÏg$ÃÝ°²;] SÍøݯÒãivèQÕloè Y~îÁód>ö/µê­žŸ*+Ôf¼‹ÑL†ƒ»¬Æ\;p|žÝ„ÄåŽî—ûgÄötª1Ö2i½–Í«7ï,n›*Šëöó‰½ã1—_a”°§yÓ ÉžµŒQ6lŸÛ?#1wH×çSÜrLt,eê0i½-WŒs·¼ã£„R¸Ïæ&†ÝñJ•†ßMÄǤ+ -ÇäLu¸ã,¤õ¥à0êÑIö:œõÆüéFéÕß„gP<®]rëس®ƒÖöœsã5%«^$û -ll†Ý­*n{ëÇÇeu¸!2Ï3ÞUŸü‚T쨾ÕÁÀ@/U°­SÃC×JÍ:Ë<߇ygbøŒ _°=wÖE :&’&õIÔQÉCßø IÏÀÌ.>Úº–¯¤Kl´QcI¶…¬š­Õ åuâ³ Gú~÷¤Q®¯ú4ì¤Y%: È6­Xó{9Y;ÞQõ([(ù2–`#NXùVÈa8Ogœ¯Ú¼6¶i|³>ò|âåWZ;ê*™ H •“v5Þ}…FV˜.JFWàL'²×Ccl¨ÊÚ þbƒõÝU+8i å ðüäüÈp°x—SiÛP¬]u ‡ÆŒ%Žw+ÌHÙ’J‰ -žg©%ªóý„M²Q&ží -­Ú"‚´ªÀ¬‡eìz^ŽÂgÙ?$…ÄL+Ôú -´>üy¡ÐÁ  ;Š½hÛþ)&âKóÝ'•ôº‡d*”•67b‰% qã 4Â/!øŠ ’~u¤×jØÔþDPz#}뢊°a÷§Â]àS¿›sÔsý„^I¨/oo]1Õ§Þ¢i2àv’¢VÑîø/¢­Ä:×$qN‘ùô €Wé9¯lö¡xì\Öø ÏŸïM8ÁñÙª ½<+ÒÕE(š‘½ûeÌ.¡fæPkÁþ&ßã|U|~.9<¤¹b݃Dˆ?Õ@äf\ètK[-ñ‰‡~úG4Ã.SÈF3žÈàßþÊT¦žšˆì¾Ü§¬ƒQ–Ó?á -³Ó£³˜Uî:™5.[2Rœe‡ -°=î Ñ*“4M»ŸøL²Ö{A’[L^úô3}ç9q<»%ÖK'¬ÄðœÁ<É8½ÉÂqp7e[½ -Õ% (¿S^…—Ð õ~7Nù¸ÂK“NîvÓ>J°¼ÙCTío¦/«?ÐhOhIuWLj1Dó¸~ã‰Ú6ÿoü°|¦dd…ɶ!²·ŒÿÀ¢_NùŽ£Âg>NÔUĪzýßñçü „Ø›Y\æ‰ú*§ôíƒ`n¼QètN›jöô!œ¬(xòÏ`4I P¿êzýN=\ý‡rΊ°Ë\þ,³+È£J¦8eª!¼MPzT-ÜÃ%ò¼õ$?Ú¤õ‹`0{þ¿"€Mwø&žâŽÅŸ á>ž ªUü*w6/sC*Pͯòò„hðsp3Í+ÂhU[’¶¾"¾£¯âW~ôÿ¹5˜³ÈoŽ¯õEiáŸ-VμgmG#6÷G ›a’ЯˆÜ ¸…lë‰jÉ N¨ùöU¦›öŸòV:…g&>q+‘o*äóµÚÁ46æÈÃdIÝû7÷¿#”ȧdÝ·ÍßïqO?P0‹ÇbÞ”è¸t6­néƒà€Â†—€0ëV—ð©h´ÅVAjlËÂ,›¼ §‘GÌé’Ìô˜0ë*…uüµÀù!¸Yfmå…¡¾Â']%þ ·$4j/ɨ;’G#˯ÄhÁ§žäO²jÞßé‘y¦ÃY»7ÏícbXŸ†ÿõ”ÒñËLÞÿNè±nßxŽ$@–ÄŽéG’%fQ‡˜¤0˜53Óú0†Aµ)ÃÅÎ×i—Œ9#büõóû‰}–+{¤¤™ñ°göº¶¾®7½r¨¿Ô—8!Þ³AÔ_<œ½ó×PãO)Vé§ÿ•hèäÌ>°áÞ@!ò>ÛñŽ7˜â ñžªo=#± .©à;½™arjí¿@÷íÛŠ$*¶$SJ0ú·Ú -6¤ç]9™P²v—Cn›êåq¸ïÒ©ÆÕÔ¶V +H#Їéê^>ñz.»Ø²1Æã¡tÖ’ÂtDz®LåÚª€ÆoŒozµ*žŠY‘dõi$Å z`»fð ΞÂD1LZÚ‰W·¢Qꤽ\3›ØÅô2ÈfgwêA`ÊQºD3WÞ‡C+Œs’cÚ‹#woû]'˜Å[±`â¡o1mW@OVæ¯rK?p‹RË8÷Þ¶y=.e3öL_ßX{õRbë,o>Á#Õmìgp -ã¡$ÒD4®Eœ\Ú^òä¨ÎC xÿUÛ¾žÿrÍ·OÉ„2 Ï‘HÆ<Ð -Tâ+9î¼ToÀÁV§›±âÍÿè{[‡yÚ=|ʽ•Q™aL:PÒ’¿*%Cù¯x@Ì;V¸¤ngi,|~Dí²‚ñVù^zŽócs/bVC PoeAÐ Ì…ÏüõÊ„v]b$}¢(+“U+ëû®m5ö_HÃÞí×6è&ÚÆ)ÛÂÇÄ~&ÎF ¡»éjÍ:ûé’ +ñ›<“‘Ù¤—û¼“|ÍgTÔU\G@þrçím{I(;e²~éà'+¹I¬‹y¹ÍáY798ö}ñBÄ.ۺʎ;„+±AòP2Ø”Nì½’®î@§µøÁ­œD»äí– -¿;—›œµnàv.u$N¦2¾ó28±:Ã2Œzس µÄ$Fø†|cϘ -#ìÐû¬-­lÉÙvœlÊíºÅÑ…ä–q[&Óf—Ñ póØ®*‘ƒZµâƒÿM8H®žlWÀ¾ÐèŸQ¤±…µƒ‚ùÓ ³|8,gÁ²ý‡e -£ ì -ÍíGð¾Óz¼õ+<Œw ó,`â©áŠVå’i{»“Àýýst5×X®ë™—–=>¦½Î(òxu6bQüO9YO§LAxd³ü9÷÷§ù ÿÉFDn%¡@nÔ[JdtÝ€"<«âTxÅàùm÷鄧iPUZEŽàÛªlýs èkDœ÷ïXñÒ1ÒÛ‘ì‘É°!!lX“ˆÇl1îæÍ‘jû—0^à#vн¾gÓøÄ™{®ÃìãÄnÖÔBÚÒ„¬?¨ñúŠ÷¦€rêŠ ûPm Qú´¹=Ö;¥):þþØT0°~’ î?¡5Ãv8Cvez;dEòÌÞ«Uùþú—ªu#ü2‰IsåR€ÁCC’e˜”@ÙFÄ jŽd¶üµì”;VpÑO -±”Å2õX~¹«žG¥&éùãðš9Y‰bÚ@óH¿0ó;`ybxO‰¥O{+|ïB˜‘¢[…=Á_²éݤw&èvíOAÿ2¿†Ec*  e~EÕ¯-;%Y»È%&ª'G—èX⃅».iò5(ëÂ3N%‰wá]•¥@`oq[GCðý -{U-õòÎÊFAÑáôÕ~>íã¤edÄûÈ’`B¿VJ«3¾æP!n¦K¶ïøÙnrêt:ê˜H™Ø#v–9 -®ä•túóÖ®¢®ÚŸ·‚Ð& Ûf"7êñÏ—Z¿yI©î¢OÀ¨*®¢y'n˜Ljî£NXÍsbþdîßÅÄó<%L AŠŒ„Çäþâ¡ö™ý»'à|·þ_ NP5m¾§Ì3ä§Ü\%BzfzÅ3’\5(< J¤½G íq:*D¦…}n† ^ŽJÁ% Ô> fàO`½¿1Ü73#›™@n¼‚½ÉÑ–5QœEBä‹ËX„è•ýœchê@l+ó%ù>0ïÊðÖå™*e #¥9>R¾=‰eèÉ–½ž»†k.Úb®3Ķi%×Ïëå¿F¨þ¬\N Æí’3å½k°+>bœ‹©f¸Îë‘fòxmU»Ež4ˆ3ÌŠN5 :x‡†¨òî¼W„õ[ -ÛlFùÒ¬”‰Ø×üÏ°hàŽaûå1Öa/¿´õàî 5"†ãc.ö„ê{\mû9Uá±ø–ÿõÔzÅJPÞÁE¼]GŒ±v²Ñ¹‡Ûïº$Éï³AMD(XÙT1E°j›OŽ2 O¦Ð¥ãŸ8®—Ž¼¹—„pdîÇÕëö™qã.L´êÙâý±®Ø “åÑ¡®†fÁìµï,%˜ -LZ·üè4꨿ñ¶ùÆ#õ KÖEÈ‚S“rÆ…Iæ–5Œ`‚øb~WB),r8j”Igâ\m]OV؉v(çMâäyyÖëz-öfœæNL9’Å^™ÅØ¥ƒîÀ²µè±ˆ:Ó…×|#Äë+4µ-ËŽ X ßì*.¢ëìÉü=÷EFA! Ž„ÕGbT5Ë¿=…vì/g#¯ÓѸ’<ùá»›Þˆ ’¤õ† :°AXkÕ%_J3Vßœ%Í(âØóæòo°ý0¢Ñc£Õë‹WŸ¼>n쨜1ŽÎØ%f U9„ûüØÙm.ÿï§IZ~*–Á¯ø¤Q aýŒ$Ž¨ÖDIÕ’ò6a^TjPøÅ •MP:õ…Áµ¿xÁéÚé3÷Üâ="™l{ --+%šûâ­*V2£¹÷%óÉy«2'v5E²[ߟÓ#Zzq^nÿˆ"–ÌŸÓí¿>Ì‘’ºÝh …š˜½µ¿šØ•ù1ÊüÎ|$ÙÑf~‰„LT²¤h6?´ß¤HÁ.ŒÇdá@‘÷P õLÂ: M )’sù˨Þ\ó'"^–î¯9;5Ið´I½ä=£‘;•§5·†Þ”i9Á¤Q.§óÐzH^Öèo1ÔKF ëË—Ñ}‚ŸæIúÓ]×LÂÎ@Ü©sPõÒH=[¸CÅ®-ëA·µnªDÝ¢_…Ïðð§X{l‘ÿßUÏD•àŽ°þU8¯¦Œk¯”´yVgª -1—ìDG¨äÙf?¸ÅìK†²hÎ=D@·³ÝŸe4Àôo˜òè5ëujPê¼kX©àóOpÌWMi¹(4åmÕ%IÑM Õ>I ã¶i÷o^¢;}RîÍ$ 0Á\÷Ô%‚[XG˜Zä5™ÅN&Y—¬º8ì!¡+ãmàå sdüïÙDf¢<êõyC¼ÖÝÇÇ‹×<¶ÇZnäºåTå•™ó×Kܽ¸bcsÚóLCü™*&íw!£EÁ¥Uô€…þ-^ZMõÕÈi«4èÅ€m¸*Îë‡ø2ç%þvmÌOÜÌùþ<¾ú'Q¾¾&ñÕ¿ô^û=.y‘v„RTÇæí9|'+qu†o¥5(³=‚Zš¡‹ÞÉ×u³"(éñ[¼^̆ÃÊjQs¯oAVÁrn…<Âé"ÀjÛ×6¼'NZ¶æóÝù_eýwÉPìﶞ|¤~P›´896\vÖêö±§.#óÔ¶©fÄòÓþ°ñÓ ÊøöTï™–œ°4oÄÎ&©]±µÌ;Mø÷ó³<ÞbÁx@Å×OzÇc0~×ëöâ&• +”åùNÓdÿ’Î0÷«$‡Œo4Éry¶žÄ©,Û$OÑÁN©£ÑУ¯ó7ɱoY?8ä=šïóýŽßàu£‰?ç·ÈÛhß°tðz–®Ì­ÞâX *,/ HÚùodÃ?ùË_eü/Ŭª& ±3§eV;HÿFÇôÞö–Å2iÄêáDä9[=¨ÂÍŠ*Ú¡Ç刭‡Æ{)!¼ÏÓ>“…§ÌMŸšFsDºjÖiä“;¼Ó™éýa~ÍdZuE!âÑ›3„É@(x.á•$OtVÒxz&ycø´øñ¤çSªx™®ïóðù¬Û¦ 2oÇ÷õ8Ɇ^_ã“rrÞ¡çç[a˜×5kd¬;FY™ÎˆøÙg{tÇ‹‰h¨…¸…ÑP‹’°îõ*ùõy'äCPådAS߉÷‹£»¡¦„o­Çâο$üâˆîèJ¼úú‚oœ<óYãpGÅîûIƒŸ|»mÿEC³ÐüLÅ®ù1µÈ÷£XÁëà)ÏGŽ¯D˜$»è »Zœ÷ÍÒn5„WÚþÄk6æÏ#°yùŸÆvùËœMçüÉ55cõbÒo×<©„<ïœQ#™®ñ°«m]$‚•i»›÷å’8vÔÍwsrZìŠA3 ”;Kê–<îV„ý;I(›C—ÂeÝF"^Àlc!s"y„¾©ûLycoS¸ ôàÞÆlÝFÈV*îÎÂ-Šgä¦:ŽAü¹Ý}Qä@¼H3uQ— +zÕ„Æ%X„òåt—Gÿ1 9¹zÜe Ý#0à‹3öª”A‰WpÄ33n ¦¾£i¨¯¸)fùñ@áxê–É·”0ìoIlmÆ¢Ú©oy•Õ5Ãí2BÇSV×뢵´Ùcβsæi•T¶&iº^f5Rg|6ßÏ­QO‚&8> $:ô®gì˜ôl.±¶ ÆæKWÂŽ˜ Ý%®¥•æ:½~¨$Ì„v‚Ptñ,ñŽ³¿\~õ˜ø¨ÔŒÍ}Ì@)zxOp.äŠß!#!” -Vqqñ£š‰ÿ¹¨ûÙþ.Ùãª0 éIaFÅÆqmí—.3R†ë—D ®¸#²èÄÔÁq;v?$V„_U-½¬’iOãg9>®lh?®²qýˆm!–t߶Š[ðZÐ5bˆ >l -ZÝ+8üÄ›™çV %ÝÐ`ÎR™–š -Ønî{ÏVFÄ4Ì9ÞÏH5 R_©”ÉÂlž(L#¯ýíö¨ f’ÁV™¨æyˆ ïSIÄdCSË ©iLu±Ç«d¥ùØ"&fCüô! rTƒ'¯Pa…ùNäê`æVÆ4¿ø¡j9OÌAî<OªýLUÎ<;JßiCÓiÊVóŽ÷MBþÝûGöDØ¿^óÑÂðQ‹Ié»Îòp¶j¯”<ˆLè¸?ŸØ\­˜zyïó[XóØá°¢•Nv’üæ$ÌìO¶žŒcC Ï3‰Ûg¶Ú*á(^dìlÃô9“gY.ÿ«œCç0·¾”Èæ'¤BÒìÝÕ:<‹|jò%ûOèVa®ÆtÕQòa.Ç˶ùT‰†&ÈŲWVo~™ä¶'pš‰o¢«¢ÆÚ¯¡ 9Ë£ëÞrEh–ÜëÚ @^Á‚d˜‘ÀQ¶?¨ó¥¯ÈÊTâŒØ.zQ[gKMt²ÎxÕ×[×ÇdRžS,æÛ3¹¹¼vóV=AVD[×Nµ±Km™‡~}cA|*ufÍM n`Í—@Œ BŠˆ+¢!{wËT/¨»öÿTê^“)Î?á¦%ùã¤îûìØቺŽ÷ùNq”pa¶{½{B$„Öº½ëÁ\GÊ™¨ Üú߈g¸y&ÿ3B7Ñš=÷ë0v}€­¦Èàü ÊMs‹“ç<ƒö ê%¾c;o&sÇzØúAP:«H%µPÊ‹¨>†ÅÙAEÖ_Iµ  ÿ¨Êwèé3µm‘°ÐǹCõ©ŽÄúØb­Ð:ª¸#¬p‚âTE ò‡ÿÔÔö|À‘íÏ„¯ r¿Ê¶È1Í^'…B§N/FåKÙÇ©?1/e4¡^ûigÂ4Y“ó“¼{[ÁÃ0¯ /¢ÔÓ¾–ð¿ûÝ'¡;€À»7™À¿O^4P`mêÜž^m2žH…³ýyÑ&ˆ®œè¢ºö˜ ÅÖpÓö³Ž>,¦Ò›ðMar¥rÝùd¡ûVGœÿÙ:@Äï½ér‹?»Ìz§ñ[Ä«\±Ï¶“rÝ»ýçIÕ -Ð5 Ð2vÜ6{FnwB:¾Bò)—@ØŠhôî´ók=‚‰.2°œo¨ÂJ/üŽPˆª–}Ìxû›Æýó6ãÏtÁÊKø?ä÷ºæLßzôö¡™„7®ˆzÃþûK¤•ÍÜ`•ln„]}~H/¢p€çiÜã¤Õ‹Š"¡Ãž¡Úqó;ÖSQçpŠa¾Ýã„Uoµ–’rÎzRè'|ÎlkçOœäŸsÓMŸÜ×úÝW x¹cê>ž‡%ê¼:”ÿUšAöŸpïÐ@ŠÍÃ&ù,{8¸…\W•^ÌV÷ºík[ÀÛoÕŸd©™°ùÞ‡]÷v¦—±Ûeß3Snó{Lž¬§’o±mE )4ˆGà*Sãr'"Tdœ—ÓÏì†Êá?òCúOÊí¿_“cè—3”} ióYd—rd¬^G æªÎؽ/e¨Þ@ W]"°†Y áÍ7PÒHQŒyþ‰À8:ãÂvA‰bÞõ¨S™ÏwÿÜ{@”÷ù_ènÅ}­º>(†ãª¡Ç‚j»¼~º¨›qüó?J*̦Iþhû¼Ð”Âû - T?¹äÿÔ˨ì4÷š%U™^‚nÆ×·£ÔLjìÔÄônɵšõ½¯œñmƒóK1éZЖ¹šã è8?¯c6 -;¸ 7ç:IŸ -©¹æç¿ßغ÷h’¾3WûoŸü:Ög¬lݼÀK^;(Ǫ‰×ÇÐ'ß(ßAê²P«©*†÷T¢ÑOÊ’ÀÃv(]{`É}ß»)cw®{±*¯¡©Þ»¯—])–¶å;YE›UÖΡt0ˆNj'G_=ÃâøÊ»DrêÕ­OVÙߦˆóoÎ:½æÛ]Ö¡^tZk¬Þ€yßE 7ö"á{R£­|njòÓýÿ2w'ٶ׹¨[à>¬ð "P¾Õ[¾PÕlÀëý‹ïŸØ©Cùl—lyÈÒØ -X@$3ù“H±x1¸°%ØOi&¾ÎŠ}áa$ô3¾¶íÍ'©T©¹'åн \¦—™ -v3öpgˆwUVB15èñQ#ìªq7)9¼bÍk–Ñ.m_^>€*Å•ÿœâdâ¬:)¯™Ü•=IÃ0â(Ë1¶™õ¼¤¾B³½Žù…\˜GpÊ«°J²ÒGGóV‰íÅKäá 5]€±=ɤ“Úÿ(dÚègä=}^ÃzylwJ¤nbÿÑŸø ~^oаÅ$=‚z¥Z¾OÝ©E©#4ʧPr‹¶KϪ—DW£øKqÿ,7vw -Z€ÏÑ’Òé$±.à'´7×¢@3þ;)Ýÿlóå×M¸ßßAìê±–Þö™2.P0E ·mH;iµÅšŽ`8ˆûÍ -öW2¾E>Cª³È¦¢Õ1¹Oqñ}³èé3¦ç4êSCûU1ÕŒ„ÜQà“ʦc¾¾€TÚë£ð$øo)÷Zøûuêþ·ç˛Ԩpé´¨àN„—„ü¤hO©ŒR2Ÿ!LÚ‰œÕi¤â;h¦Q螈ÁK®×W>[ê/Ù\åd†¢¶û\oÑïŸÞyLN”<©x@ü½¤\JòÚå (Þww„qÐYë|}É&£&Wµòê†Cã ìêk‹ùýrM¾Õ“ýßШøu7ê÷´wI\ŧ@H#~Âet°ÃôÊ„ÛSs=oIß|ã 6Æú£›wÆ«æÚŽM êŒÌê{Íø ã-¯pMÒÌ£GsPüÔ<–›„òÍÊ6³©ß™*PžìÇq섉s|Åß#šÄq=Z*w¥½zÒ\(˜^S©S•U±÷¸`·ãx~JÖÑ“îA”9g\ÙÆú£­älP4ùQp¨Œ ãFý¾›½5òòÕ?ËÊpàùNz}o‹­ÊÊh§aÒ£o˜¤B¼pDÖ+À$€Èû-Uz°wçÚ)?9ÂÙÿµCö·0o̱þÓüéÿ"Gä/?׶¦òÍŽfÏW -4J>D¥µzRŠ2?hÞ+å3¹VGU·A<¥GIÍÐ ƒ*Óì›ãþª´Ãr³UŸ¯J;qÉ©ëXA&Ìñ¶ö²JBÙ‡æ­_.w¾øRg“¿3­·Èªr)8½q~øa”ŽZiçN‡Û-uyãfîŽú¾ö _H@2³)¯Ò¡S§Âþr¶Zé¯ôø'XÖ+ñÆd ]Òñ"ÉU€°V ZšR˜4'“[Ýÿ)o* þê;Õl’˜› ÷#È\RCÎÖ E›b½ŸÅ«•æÍOSåúIÝêú›Éó-=é½PÔ©ïטN·ÅG ¬¤NXÓu­í| ×!ôo~ÇÎ#Þ´¡_³ .t‚%“æ¥)ò™%Ñ$îªAwPà—ê[Köv!¹j _6mC¡-š’ÒMŸeÿìøjÖú)b>£¥ gãמô^»²Ø õX;•IuZSÍN—¯*ü¤/hÃ$ë*)'ÞD»¾8“g|ðkæ‹ï žôrehŠ<÷àJ£U^ -Ø#¶ûB$ºE#¨Æñ=šçóSÎƨzQú>ô4ÖYºÞÃ^(ùÔe qî‚óÅñôÛ8~zc{þžý-ŒèŽ0‰ÎžQ¼\r~*Í=SÃ4 ìŽÜö±¶ö—òÜ¿ùKÿË¿ -Çï ÌOŸóË,]êå¦à¼÷þå•ÊXaIÚƒÖíÜÓceݽ5M“mO想‘†&7/î†we¥-~NÌ°d¦Å—0(H©]Ý×WUø)µ0Š£)‰éË0ž9¿ì«óÒÏÇþõ¯z›"»œ^º0ùH.@$…ŒœA#ï¯\¦¿ÿË¿Hf½9ÿ/ÊÁ¿gÐ,ëÅ@=ë'ÓŒ9£Ip¯œ£Qª“=V3?P2eiw3ðQŒ×˜ïÕ[ „aÏáQS…Â!ô/³iú¶ô2X¨»hWî<«L‡dá -‹^<~ü4Vöq׌:ƾÐõ©f$÷íKj‚õ³ub m„¤»ÜÒìq—~ß ¬«ŽUò·ú—­ÙÄb×xo„:íFÍžohÉoFwcÏ•ÙáP:ß(Úÿ—~µUc&Êþqv-ùNè%•CœIhL}{µ_ºE´³ïù -DµJ!¥Úèþ‡Ýk}¢›¶Ø”–˜|†<ðõQßÿÂX_ÒFW1O•õB—°'»ãá÷ùB— éÈ3gã ˹„^¸'ÎÎMȺN”Ñ9ð\ï#/µŠ‡·iM# »&ÕWúCM¸Ì»Þ;]QéT"~ó¬‹N“m^|t—D‘íb½¾:ƒ–ÇÔŽþªø›™òÎ7ùÔCs"^÷ýbsäÆûº â zvF-Q©såí)‹ŽûýÚ´G„ãUç¾Æ'¶¶I¬t»¿ýáJ¡å*®ùÑãªÍE™;. -áèœ?Aüÿæ/ÿÒtLè—–Ï¿Çò™9¨ÂÛ*9T+ûâqJkr–çsÐrÓÔ=ß#FÓ05í(Á~ý— Ÿ¯í’ŽãWÛKÜ5õ¹?0oz«V-bÔû¬EFþ5£D¤ïñjafDÉPâŠäâ;†,™ÿU;ÏV ¬þUþBµ"¤þô‚9íaxd¾N*±[×î=ÉØ’í;ß祙¯ƒY)ƒÞ‚B,KÎ*è­ˆÀ‘&Œ Al$;[î<]¾ÚêWüº¾áÍý¿!d‘o³‘e¾\Ö˜ùE.Ɖ‘ E óƦù^9Ò(wkí§')´#¯¶ÕEZ N¸ÄP=ÚBW%Q-ú¬»¹¯ž"a(å¹3dwí"™ÙRü'á#Ÿ®÷êîÜ0;Ï×W¾3/¯ñÇGPo4^¿š ¡±Óöæ[‘EÌõFw0ðùÿòúž"»Ï/›ìAäÆ;hñxŠ_Ç6àõõŽÊÊâˆÔK²ë—áÁˆ$¢®F¼Ï‹oÂÜ V1Vv96çp&àiÿ$Å6H‰ïsË<÷Îß‚+Èïíæý¾öX%¿xýF#s"/tÁᨃUV¢[Ô&â«üÔ k$µŒû“GiÎ%½›¢]?œ/‚®è!ÝѧPu§|¿CäÄ#ç(VøÝ€–DEW±~Üh$Ù¦àäV1zº^rÈœ)‘S`ïåVÞž÷Ë){®·#•˜5Þ[ýâÿßÿùðé—5Ôß«iÄät˜°Ÿ—JBç]:é±DW "òÛ‚òQ 'ƒ(ØÿY̶S漧<7¯w!ófú:2\Dƒl_-ù'@ÜSCL4k4Ђ±5ö{k¾7\X½²·Ô=*•H¦Â=ålë‡Æ~ß²öƾޢ"’qun¸ß)@'Ò OÁ…>D•Ôì¨Ö]Ò»¨v§®§ * 9¤WŸéÌ…Rî®°|rþDRÿ×1ùÄ¿®ÿ=¯ž9õÔ>xeÿEZt%ŽÚWïh#ô4‹WA”ãÚ«Ÿ4ãõÂC×31æçKŒ_Èåx/»w·”söaø>2œ™Ö— „/Ë®9£ó|AŠÝ¯QaƼKxçJü;ßéœ%f`i× MVìZÎ;S…üJÌùeÒó7€ÒïÈþ—”<%ç¨J ¼rú“áâX‹%á¤ÉûDŠ/f6ß%ää£T¨Ò—n?%Ôç_Šoo¹â—p˜ß3(½19éÑ_r1wcU ÔU4ä´LÈ{e -Š¯DeÖÈ{ìíïþ¼ Ôܲ¶û”iµÂÌU+(é*Ö‡¥h [A§¨‚ƒÝ×QvÝ[- -I!C!â - Ëèjµ÷>­gá$´”‡{ÍLßsGZ?bµ¢îÄ`VCœ±³‹Ú›Jg¦Žœrs­Ÿï^,;¨Ž‡ê’•nÂD1 ¡8¨NÀ™[¥†NÃQyï¬AßÏSÇ˾ŽjŸ~Ø °³†@0Š×ØØCHuIÀ$ñ†p+LÉð!qçVT†±Ä3ý¢j¡X€ýº¥®™óÞÁ¬NI" <Þ®ûw“nh‰˜% ÒÉDØT/G0{½¡ìíYM"A -Ô#øH9ƒjß>7zdÁ4Ug®¶8unE¾•!lšv¬2B¥3B¹–GÛYÖR=Z¢wŽ_Bõ,2à'Èþ´ê J¿®@|šÌüØ«qSáÔ§i Vsº iiv õIAxm`³ïì7‹Ì6sÅ:äp(7"Ië锪DÓTéuºÀº <ŸR2²<<\‹±`<”;êÌAW„Ÿ©¾wÓÚí dwøU;1Àm1Á1¹züÕœÓÜûv$¼ÿ­3r±¢Àõ›%$óc6{Ý "5û`Qz+òB{È^Œû@#éäW™nS•¹ i\<î?F`_ÕžÞmè@ÌG7ŠÂSÈ¥” I³f§B ÈÖcìa®X9&¹·6ûHL§Æ‰ÓŒh„_Ôõ$¹+wÍJÝ“±Tg E¤ fï;Fê4$çIrn æµ^†#CÇ=¤©zŠuxƒøP«Ô=Ù@[äN1nègJüÍl£ˆÌÅ”2I®Ãö„žY 1@Q³zÝ -ÑQ/däíÑ<ênjۓ -¡;*b4xf>ùèBB×ê -úÖ+â¿y[Ì<£íQ`ÑzÇÌÞ·7e÷©HŠäFº=t3'ƒó³cö‚Zó™1åB¨Í=䶵¨‚®ø=Ò¯²XN55CÄWÊzÇŠXé` [ >wßÖ'ú1iBô‚çÓµ‡h³ïõ ¼Œ¸kÏ3Ræò4“ƒ{„F÷¤¾ßÅKåT4Qu«xœ©—>W®C¿jxÁÒ߀{9ñ9#œ÷ñÆ»¤ìe#'£B";Ÿy–À™0:(~³Þ~B#2ÜjCêO¦Í|·æµ¾ÏŽ’,5¨éŽ ùzçùÃ{Øb™Wm$§\V:º½ìNñˆÃo/QÓ\UN ëR‹M¹Œ Z¶>o©’ÑlkGsnçÆ‘í\™¾õÐøSÓíQúÓL¦]³`æË_þB{º*"jùûÔÚ{ˆ#œ˜á¬T1i—aË;ÆÔÛÅ©#¥ [5ø×#žüæ:âV üzîºNÉö_ô4 îÁaUO»å}ÚXó«TîN¡VÕzx‘tPlúb1¤p"ëÞß^á*¿!ò¼¯¥÷9ªeù›SÉn<ñK³Çzå6÷)ö·Š¿?ÂÀ&Þ91‘l&R‘ÖG^û@-…#êWfq‹…÷iƒÉN²¨(àн{·sŠ½Íi-å,Ñ„è.ðiŽ‚“œ’–Óbe’5kíó®…\qLÎqh[xœË~sbŠÆïœ7FïžÕж†_5ýB¸€8S“3zÄÒ¥Óõ³UªL·UK!ŒZ('}Ø™ªÈØ“N½ûì Í/ Äç9xŸè¤ž…´Ûµ÷bžÁýåÜh_(´µ‡&߬—ÉvÀoê‘ŸØII­ïðÃÎ'ÙÜzÚûö؇E3îz‡ "uöL–‹—'h–‘÷Ìá4’”*º®–ͬŒ„lØîF {üÞ*¾´ÇÒ‚Ãà܃€†ç*ó‡ý‚}t¸—=Õ.3„~Á‘¬bÖ–%쉌oB¨m:›y¿j9Í0äéGÊhl|:"Š~Dâõc² R›v<êtâîH©ÞW–S(éKÓO˹óÐôÍz\­ø0W ûUv¦¥¶äG,çDì­¯#b6ZûY–h¶§ûæµÏÐZ̓вÃêƒÌ牟UÒQÒÖSåßNðdIqnØ)Ô¹Õ{¨øº¿ØÞQÓõ9ÃM }^!Ç ¦@{ˆµŒ£#Ò BaÐü¾#ÉÇá1ƒ`gzDäe'ÉŒs0†ž´rýùB -£ÕüÁÌ:áb;ç\åtá}ž1&QÝP80WÔ‚wÌÃ<êiŒ»ðÞ³¹oÜé§?5d])3„Zóââí‹(îϲ§š,¥ÅfbŒØYì{\¤Lr§×¢}§E -r_¦§!M-4*™§LAûyDz¡¬ºŒUçuŸ­>ÌšêŸw²–/§)`mvê#a‡ Ñõ1$Â’7ƒÔùû{“ÞÊŒö€ÆÝ™LoG»ˆ r½ì%p˜±Õž‘Ì ;Š¢]¾¼¯óD®ý …&QK æ»éÍÏþ/²EŠ5ûßùÝ4mÙIÄ2б1Í€ú¨Õ/ßq¥îöõ°™åR´×ÑjöEñžà€õØÔ.¦ÚiŽ±÷ Z)M$w²…ô™ucðÅ-6Ù—áXI` -ózŸEvió´¯î!Ö»óšáífˆ¿¨‘•ëë ;­é®ÖâŠTëþà S^ÅÉ~î±|nX?îO7VÌô¾Ï(ï!#0ègUÉCèA¯fe×’¢{(ÀÚ6û“ÑòéýÞ -.ऺƒ4Þ3äJîÖF‰¡íÃõˆzÔ,žH_‹4 Y…£n¥|¥ûƽx%ú#\§›|!¸=11dxÇ;j-Ë#CR¦íÕϧîDÀv…˜£ÐoŒ²$±¥'SëÒ‘$YŒ³†° Þ ´OFÜÞA7Òžñ †@âi“äNé3k,óËr´þá@|>ú{­½^n?öáIÿB³Û[‹_Mþ¹l4~Ì“¨ÿQ<Œ!ƒÛáÔ;Ìȵãu3r¤æSª¸J«,5ÊYâüz÷q¾'adgTÝA†$qØ µ =Ëå`rzÅxÞñrĬƒ ËãË y`Oý3žÎ1À*ñœÕ>BË‚®x¢êS\‡ñsô$ÖƒlÑÙ ­0ßA±:!¥=íø‘SHéÌÞ[ÌÙ'ÊÕ¢ÐM¾,Cqùÿº+GÇu+¤Wâw"›J2á Ì ËãY9ÃVÊLÎUM«ý÷'í#§I:¹Ê>Š`ÊÚWeŒ7!o†æ ªGV¶é(ƒZCª†„z±Ö½Øz"q”[µÛT9Ì)äø:ûU2"Û öÕ[ÂÜ.oBGûûÕN¡ö¬·] -Ïó(HßB„ö&à¡t¥´Ùh¤ßؼt6/”Nà#"¦rbÁé«Ä]*á Š­—Æoj-ú£´‚…H Žà¶½Í{BœÄ(kˆs…éM$TµÏ^ÇCö‚Óyã>Ø9çÒÞ|?J{»S¿bü†:í˜Új*"þx²C†Ð¾’ª–m¶˜‰ÔÅõ`Â?Ò-&Ó!…ÒÓ]šig|ìž’)¥8‰x/“×ì®ô§öâ(<ŠDåNm`„ÖZ zhB°³¹ª€i¥ h*pô@cdœôÛï*`JÙ!ÉznyÑ|/å!Q†:ùNçxçvOß!˜÷-Ð>ª:W«$Ÿ¬Ç%ÔIM6„]©09æ㬺Hÿ»¬ª4«-û_½®ƒ“¦%È5pÿlC°BÐתè\§aöÖ\IEB8uOdSç‘JCízóÝ–ÚÿräÜ•ÛgÎKQŠG&ÙÆ@ ½)’K\b ÁŽ«H¡£€3³=²ÀV[sŒ7µV?Wßž¹UR7åÌ-´ÃJ­ÕÖâÆT59¢#®ža/­ -1|åt±q™^ñeÏ­nt·½]Pg©í›ïýßQ·ƒ†™9q’,~8²¤jSDéH[fšÂúA—%3¤vÄLŠ·y™ÀY t''áè‘wlPœ1ø#ͬ˜6–Ü•#©/m}hé‰oÂ_r™• ‰ƒ(žæžY«oC€ÎÊ óÓ²æòÊe6ÂÅ}þdi+ûÒ®¸ëQ´eD0{3·EÀ ,ßÇôÂO¹2âËŒ”@ö‘Ó xz "´ªÿ?ˆÙÙ´.ÉŸQ%ÅÈ#xŽ˜…§Ò‹¥lr×Û[?Ê*yEtçÝ÷d»ƒ6D…©KoŸÿ¤vŸž LÊ4H_¶•õÚÚ‚[@nQì^U>rTå+¾Uú›˜Õž¶=FÑ5æÖ+a¶Q”:¹öjwY÷ÚÉ"¡&x&““Ó4£¶SwbF}GuÇ,á_ÎFKüŒjPº'4I–‰«“¯­«,^žãkˆéÃ"Cœø¬”Ô.r„]€©Ùê€×ùÄ'NF´Ë§J6¡DíHg$yúä´ŒÀ ºù![阮Ù#Ô㮀dˆëpK(–M"²v|­.³OæЋjÝ! .«23ƒG%°ßÒ "ª«ú‹ð&°<,¶{ýn¡ÎÂy„¥I,Ò¢¡ª|åÜ “¯V­Y"ÅÔ>&ÁvÅ™ÙÔG½`­”Vµ^#Ø8‘ëÉû õŸèmÅØ6øV@½KNò©niüÄjÞL¤³¥‡¥M•RŸM±w¹•œXë«—©<Â÷žæ Zo«‰ çïD¯/´g¾Û;¤Ë–ÞÕîZuwÆ¥‚ÞÜŸ™ˆ‹èó>ªî8eÉH£ž19GÔ9òîIM>éÜD°u툎>¢þñž™Ê8± 8ÅÆlº2Uåöb(mYû9UvE¸É­ÙËDÚôâtqéKao̤³<Á÷ÒÛqAZlGj)ñæùx:‘>¢.õIãk¯SDWhóä|„Žt*gŽdTuÑÕ|£"4$›½ß!Ð0È\-Y PXOƒÿª]!EÅ›²þó¦Q*"Ô ü,ƒ@8D€/þæHWüzk³~FìXeùí{Ï…œryž¾!—Ò Œf?j\%vÔ:«µø˜÷-€¥ékƒ1ìôH¬&­0DKCYÚ>±£Y©ƒìî©rýÔýóG|oïXOyàNøÖc>gŒ§ÅÃklw÷°g½þku¸z·ÀÙñÜ!ùÂ3seæÑõ6¤ýÎD)ƒ[˾SD—øsBrÏY·"˜Œ…¶?åüÌ(â„[~ÁVó›Ê™J¹»Ïä6õõWjΊûüP­…Ï?þcêhèUœ²Þ«÷ ñãPÚ8Ó{Üø(…þêÄïïAljûÁðWž¯`¼ -[pè×u’]‘ñØ¿Ÿ™^í©mY¡çÔ™ì8†Žˆô±¯¬ E‘í!HáOÈ1……XÑÜ™Ši6>ƒ ªQžtòl>w„_î˜/C”.JXO°—8‡iB•!&/D¾°aP8 ¥ÊTÇï—L‰k–,v’ åÞ .7Á>;¤áÎaêŽ:Ö‚:ªK¬”¶×Ï>ÎÔÏëþ6’o(¿HbR€›zó’#:H…vબÎÂ9i/ËÉf1ûòëà‰wTm‡¸Ôјç-•´[ Ô›9A›èÌê)¥ð…ûŽ”åU¤¨hÐÃ…{‘Taã2â8:‹ñÛ>£GztI\…ëšRJO!j—©€ršo¡(L®Àd½?²÷NòóûNj½êÐAÑûI¹­E¼kàc-ƒÂŠ©“ GÇ£N'üjÕ»9¿& PôÂHEãÝ×ÑÙÑm%ŒŸ5éW^ÊÊ-°E’pÅȸ§ ¾‡ðD÷ -´%GÔ’«ûÇàXœjKm€7|š×Ø?Šâ€4 ‚+3]¡l_IP´‡ŒØn¡»Î@TE¬Z÷V–6+µÝ âRºádZdX@¼…¢ٔ3†{¾þˆÖaXûò Ô1µ”’» -× 6 šÖgfz º£&ÿ"î»2‹çVk´!Cv´CüCHç¸1„ó´ hD{8¤îPÆÁ[nuá!œ3úH™éð$låÞ˜lA†à¶êÎ~~Û¿‘KlNjF¯5ÄvZø5d¦×Mh}±Î ³´G|Ìþ¬fC“ á=â¢OZïx;Gƒž!-1Tš,Ëó“Áu•TýŠ28Íš vƒ”Æ,Cþ Í ì<ÚMP}õ=×úìtc‘&ŽBô]èÏë/à°« :5œRòUÎÛ×9kÙqõô²'"ˆêjÏBa²’WÛC‚“J»{Ǹ=·"ùÐL€Cðþ"o$¯h{Aò±Áf¥Qâý`?jX¤i²£\åæ~ÃBÚyéÌ[wõ¸Î~Wê?sÆÀ™dÀžÖNñþ¬!Òô›S·YlHçs¾WÞ‘J±!fäE‰onÔ"Å‹[D¨ÅbñäöW ›>ùÑZ;Šø4'’/B±îm`êl´89vÇŽ!ûDJHæÀ¢3‘*b4L’wLðx+3’À0B.ánù˜4`/v®ð0ĎߢŸÍ­~7bfWÇ쵫Oö°¡h»´#\ñbî#\ܱOzJ3D Hxä¥ß¥[7Nç²ãJû,Õ*5„ÂÔž¯rÖÕ¤Éf Á7íC±HŒÅ’ `倫#j€¯2‡™l…rˆ¦bE M(¡‘ÝáØNºÏ¼€gÅWr%ú=ìïw$^$‰þv¼ -âÀmÕï—c2ÂÒÁ£íH)ú?LÀNåûŒ˜:Ùy £b2k•¸àð¢[})ØIrÔñ>z¬­a¤-eˆôñP^æKë=44“2TO“b©›‘#%ô¼t¡–•6s™Æ¥¼“ßÙÞʨÔ;†Þ_--¢~{$óŠŠLQ:Ùhó\MLÿ¤þ!&ÍÝÅ8– ¨·b’'¡ó£íÏ:¯æ0jÌAÃàÑœüÔBÝ».àg¼K— #ïº ¡ÿO‚ôFî…Q4j €mйvL~ì…ë.F¸™nÖ.QÁ4~wxÈHA³PN›ÑÞ'ÑÒyémÕ­@6hÓh8Vm¡ÞÛ“áñÈÊDÕ" 4k#V‚¼ÀgÁ¢TTïtþ'QÍ[<0m¹‘>ÖåϦ({>¡sÂö1]ËËÚRÍIf‚ÌL yW^°’ã€hJè2Sï0ÀŽÍJ=ìÄÐLÏ]j°”¼†¤2¿'þQ“n‚íTþy$9€ºq®É¤Á‰³„Ò|5hÀ§kM®ã©A 7òuÔ\Gi…äËÿב™æ“#P[ÒÌjäx~×]C|W† ¶ïÜ -… -ibÈHóK6ʦ˜N hÍ‚µÚ#­IŸ”Ëêã؉jÈ æö¢ÔÒç À°w¡,ßxb…3D­ÍçÈÆRè›=€ì‡Jp€=)h›®++÷ŒÌ$˜5ñÌnDˆRWËW¿I.VœûæC®›m(ÄK¾%Å£çÈÞV›!戊A©øÏ´€èÙø*]ES>X¯ÎRxïOÝZ@ƒ~”=:žDটIh#~çŒ1Ü~7uŽé†à1ìÔ(±´[Á›?rp+¦Õ ÉŠÐ’õ7 !¿?CpÖµ`0¹¦øšFèL±cbzgM›½;F¬ô¤m¼¾¾Tð$J¼9É,< -A¦MÕ#Ëã53Žq¿ëŽ0 ÊùÃüð -Ã1"µÕº#¨(>¹ØЩ¶L÷¯'Ö# XrÐGÌw²áˆðx—„–ÎÕú©U„[¨¦²¾ÖÔsÍš½%uÛ§)¢}„G”â#L†"Û/Ù! :SŸ}¾bUoÐ]öóS)IåUƒ&¸p/«¤ì,ªÅÞj8b,ˆ‘~g–HFô÷—ºM®v)>Ï;Õl¾yŽý0…¨|¾ Ë9ážî`“¼åŒo‘úŒKUZŒõ›ßO ðäª[ TiíLQ™®§IB‘@¤Àуt;WîJ_.a¼žRSÁ’ D5Q7£.-p™Ù¸ð%Ñ-ºRï0DóàÚÊ>ö -¿È°lRvõ»žX¸Û²~5hc©÷ÌRaœÁÊL-Å Ts@…‹å+ÈÕÎí°mI4G„±'FR´T€Tþ¨ôtóVnjIÜH‘˜¦ ù½ÁêZóÃUá¥õW‚µŒÊ5[ü›P•r«+äãtr”Z•±j]ÛÁÏ*‡ëð€¾vÂ4ßôX/·üIX‡ -Y'“ 9×i‹ª[äVû(ëÔ¨o¢å^Ì^pOü‡Aôî'É£t^±˜–ù6r™Ŭ˜ »Ó%H‘‘e UI¿J€{dˆÒ”°Îî8f;œC•U(]º™”Ï -‹Ñ‡0Õwò4b]Ìu%|±5P»÷€GX¿iÄÍQäIpiú>Îîôõöýhh=r~É´—Š\,Î3‚;‚$ÃAà"Qv‚¹XU}Ð PÓ3oO•@J„ôj D :¡ª -3šp¬jdJ† Pp“r¶Ö¿cÁ½¢Ÿ*˜ì¼äyò}{#âã%K^ЫAðÝßèŒï4œ{°ƒw½àHqšŽÉЭàNTNG³à6ª¯PÑa‹ú˜4V/*B·mâQEPÏßWÄf„ž²ºÌé¨0=íGÏu2d¦"…‡f®gš[ò#=äJM~ ù@ÇœKY™¢ï†# ´‘rÓÃ^{_'RŒÖ÷ŽLX‰«xfHB4¬(ŠS÷›à„^Ð"?æCÁ©¡‚øUȲï׫״íU¿ØäÉÏìçj„:Nר*[ž#•ý|¡.œ©6¥…£Î–¢ë¹G"È¿ÈÆÃpÖ¢ÌùéH¤Èƒù?‡¡¾&r…¼…`–çg¬ÇÏöýàkH©BBè.«®‚IïD+“Ÿä$ í«pTu6ÑSir#—•%M•2,ïtçc%Öz -ûÔl]‡‘,ШÓ;rº"òâd¼ä’wLÚÞŸ$¨“†‚5FüÒ sèÆvIFi -œ?bN©$úâ#ñF· ›ÆœXUnB;s\o„‰{!³¾M|®’϶1ŒTìEb5ÿßÙ¢—õ@(ÓöK.5cƒü#𸪼CºX»ße?"lV¶aw„?8¿£p’Ï ú¾cÜÙq²Œ0_½.7Nº€w¥ÄŤŠÒT -ËöýÎ7•»Ç´U<ñÚ#«œ²6Tºtω3$?*>.d>"`ûÃA¹5ø ÝB'8q·éMÇDôÇÓmM9^Ïù@u‡&V'¤ -¤€ØYCFœÃ!³œ†LáHw°±#t¢ÎŒÅs»U”p¨ŒôHÌTÀƽÞùôî%±j»KQŸ»ÛSñzªOM%Ì̳Ъc%:sÁrçD™Và=i·êu]É,Çgh¦÷ð0Žžu%-Ãé@OµÎq“Ž3GQ¼T=aBƒ° -@èê^Q¸3ˆYÖŠ6¯"£@Ò…-Þ*¡PþLÿ5_²³ø‚ÐN¤w­>´ŸLt¤x¦ÍccÖG‡˜ünÇ>Q°H†hÆ`>þ8°È9BæΙµ"½)fÝÇW÷•ÌKôñ~Û'LúÞÃbf9?ÙÄÈdÉç(V }„ÜKÝÏ-·nÇט±Ó~þó?‚†]øŽ#Á–A; Aÿ`3?]®šÐ>‘¤paØ™ ·‚©«ø#Ä S'G–X`\SÇ¡¿è”=€åu¡¾??èÛ -9G¾¦ž÷ˆqÏ«Ú¦ òàô¸÷VòùWÿK0Rì)7X¯ -Q‚!™gÇ÷—¨EþEoá[!!pÏH‘îM¨‘þèâå9Ò²P)Ô’ËYje g4Ḁ̀꥛»,É"’ôx -Ÿ2Ëмƒ1;K+.6è‡ÝÙsE8=$xpe<’Nh0„÷xeòŠ7½Êú´íB ž\<wÓ÷þBCÄ 'àˆUÎ5>Îðyîð˜.¼ï4c‘VöD„^Z³ ¬YÄÚržápkOEw¾0¾ÿÈ;yäYQ°þ}RÀÜ©U_ŽN`4¾¼>Y®bÍ™'Vy±™ªµ§é{&sSEØ3'Äêø<%Ïü¾&%ÈR5UC9>±’UžŒAèãœ#!ì¼B£ †Dî‰/Z.b·;Bxࢢ]‡äi{ˆ–Ñ$¹©ƒ©d^Ñó- ¹{‰i¨ž¨>ø•²TÊ¿¹NÐ7*‹ôJ¥y,Ó‰+BZŒÂ/ˆæ?e%€ì5¯ÿí‘‹ZÍnÓ.[t(‘eìîa{䦂¶™Å‡¢5¦hAßȘmEz"¾•ÖzØ·e?”zð~Ú(íà Váè 2¥‹„†!Ð žÏ)%ÐJ¡{Ú3ª;L pA\™’&½¼|ñ©®e©™|¶`Þf‰OÜi{í!€»Sñw`}궔0ŠCuš´Œ4±ÛøB×Râ(o´R Âxßo«‡–tØ* –.! IwÚ ¶Tx{Ÿ¶/B®ïCdVoù$êÙglí(ÑçˆP5³ðfü ¢€çˆž ;9Xöή’‹½¼‡ˆÜb«½“Ù¤'ëq­F×Ý#"c4„ÖPà: -e`\yÔ §ã!Á¹㉃­ý p[bðù̈wEa™K†08%\g8à Ú Y•êí=önFÚƒƒæìuÂòlÙ»æS0¾‘Ñ"Œ‘!-n¿‡¬ôyOX ²ùÌ>§ÞJ6¤ -• À-e´S`ö`‰A…EWÿÌŒnž†o†àsIð")$œñ¼ß—°ÁžM -"3qNO¡HpÏ»°×àԸ5µäDDøeY{c?ßWWÞ'ÎÜÊ+½”/ãÝaáDa »NØO°KÌ;žšI§²¦Û -~Q. ÂTÈÎL&š€gnï­T?Yô@}ÌÑzsäfFYÂqç<1¤Â!øÀŠÍ­¬lT_í&CØ%G»ÖœÍ­|O%È›GNž^%¾Ø}gÁàâÎÔ‹ó1™¼©ÁKãuâ>ÜV)'ÐRØC¼?)y…wþ‚0#¸;l×1õlûû47¹tæŽ+þ áÚä"—Ø,=ÒKÛ;¿0ð¬ß ME¡B+3àN‹ê:£Tå3Üš±p Ë‹QkåWo8ìL’ó¼Þ%ðöŸ}œÚ`cK—>(×áÑay`k`=B$|A¨—T{ùxY,2ïÚÚbb²ÅN}ÿé =´|ÚSûkZñ´&r§Rüà?3‹GÞ„J'44–Wäjq@îÄó^žƒEÛ„î£Rj p¤JqJiHßîØ«¿È°uE§/ûùqª«L@ñ3ìËK9T+±h§š·äaŽŠù[d®Q~!Ž5u ‹/Ö© J”ƒï=<†.ÜW‹;Ö -‡Ö'Šˆ&M£7 Ëמ¤:(Æâ^‡O{OLÞ -¹•r;­.aˆ½PëÜ)SÌ:k›Ñ'ŽiŽ–˜¤À¨•ˆn _bDdß1=Î@ô‚—ø¸ùX5¦š3ª“9TÁdO ÇbòÐÉùAní¨þÒ芢@ù-Ár¢ Ñ ØŠbÎoᣉA-º :†TŸFö| 1 - “ -@’ ¼ÀžŸt ѯîÑqÅãÎnsD¸×#?9![ -Þ/×ᦿˆP°Þð[ÁîâóÇøF„þ¥Þ³¡Š*×S:¨ô hŽ*ìF5t¨ÙÌ#eÓÉ\$õõä?™$þêO¶‰ßÌ$&#«+à¶!@Ì!ÎS+ ý3Î\g½Ö¶Umz=Å gȨÇ.¯×…½ßxÂór¿ç¬oÌ%Å5ìXf¾/¼;GÔ…ÓTk¨ìÑ(,ÎAã£Ô"!dÈ^"7 =ÄWE"_m¬™–d{Ù}—ÖÔÆÜ_Øÿ©9\ݹfí!c§˜§ ü"CJØh¶r-‡6m'ÿ ÔÏjQ©{2̸^ -6ÎVÁ;`Kó¡èŸÂ¶+Ú˜íþºÅç·|Ñô£ôKŸˆìD»ÂØž@T~¥ÀN ÑÊóæi#Dlª]u ø˜ÃOqìê#èhè <ý~O7Äd•Òþ®´KåqÚâíLA%]²ô¼Ö•†L!&9H ¹ ðæôµÖ»™Xî훆ÜýrþgÍÑýŒ½R5°¯îcM}ÏI¥î}¥ª˜5=Òw ›TqܲUuU, 8*’Z4æ»?Ó]µÄì¦ož‹úÜá²°Ä7ÄF&vJé¨DvùêÙ$û³ß·¿ -ŒÉ(©I²lÛí1jM G¡ *ëŽÞ>‚BHñU­?+C-¿9 -ÎÒ˜ÌsN6ÄlV ænu8ã‘Òs‰¤]×#'Žö¼jû¿±÷A“!N¢«+™r‘f­>ë{Š8¥l$•Çúïœmd-ÍjDìò%“šÙŒCœâ:'S%¦L·ÁÍcï‰åÁßÚþeùŸÊdgm&¿Ë—µ• GO×·PŸ9VŽ6&áX¿ç@ÑF#Ç›Ìð ç“­rs‡±L¬tÄ Z}áþvéådFãGˆ¡®¾±6~DoðcD ð™¸×#ˆ›Ǭ°!X±®¯Èíq ±¼Þ‚’zŽ -ÅýùD…2'EžWöŠ S Äň³=‚§$Ìx~=ñ>„úL‡V à-ûœVµ«˜ˆÊ˜B_6©Aqm€DºY¡'eÕk~%L˜›{šFØ>?\Ó÷Š!cÊ*«41|è>ÓH}|ÙžlJAªYtæã¢0S¸P¦Îœ€F¬tWABF>Õqž#%»öÞ¨cåi”nrñ”z÷ÞVÕ$EGþ+ü‰;Ì{TÓ©Þß¿ú --&ø!¢(Íá[| ›—Bñ°B@ÄEO Éë§LÚ"ûÛS¢M ”,h4=€äM[Ö£ùõÒØs•žtøaüp½Qm„ BTŸ7(("3d‹ )¿+íÔp á´SšQ"ÞZD â’•²ûy§–ûP4DŽO+Œ )¯øŠÏˆCî-ቺa@›yš‘ç—œ¥2†ÝœÇûÓu´µº¤…wÜ‹ãòk÷BFIèH(ßU¢êˆø’åU…üW‰¯[A¬ðˆ*¥•B À‡ÛréY}i -­â„k䃄L¢ñTR´Dq°ÙÉÅ6½¬ÜJþªhmNŸ_ß\ñY/k_¥i(x µš8ö‚Ì5?½àÊüã„ç®{Ow+’\™`Ïн 0®¯A2…2>wŸÖk䋱ú—(D…Pqª ÿj¦¿Æ‡rñx'ë³ÝÑP¿í´”wSâ…(I¡n°8Ëùð'#ÖëOŽ -WTùÿÕ 1ëï—⡵UzhÈÑIøNÍÉ`Ô\ -=Ø`Dü’léIÝ Ê7ùý8˜ˆRâÜOt-ï°ùÖ°ïåäñìß­½ƒpãš>SÉ”¶®®Ð¢Hø9’kß™?­+TPTô›¦¬˜: xp†æXhßOpwŸèªˆã®¯:Ãå‡>BÜ:㧣mÿ-íÕì²½Œ‘hõl†áäÝEÜ9bù¢¸[Éú6'òY×_c¥_ÿ!ßô²~¿gY…’Ü ¹hP›Q!TYÃMX-ê ÃÏD¤óïô$U°;ÕJÔ“^Šˆ‰Î5â•-u¢;L€Š|Y¤+1Ž'8zæpõªø”ÞP¸cØVX±Ñ‘ÑAŽ«iVO]&Û©È©—’Lƒ>’„ŽÒ”Å‘O*™0)Z3á}ëP>¥®L^N·n_ö®!–Ô`Ï>J³fDœ¨ßUŠ4’Ú›’Þ¡¼bœ£‹[R3ŠuBÕÊïJéÒt{ÌÊ·íŸU…»V@šQÍû "ËOR'š½¿yŒ/Á´Gë ïGN¸ßy: - -‘ ÚbáýPN–/ÁOÎ-÷_Œ!æߌÉýåÿ{mñ¢u¥ø$¦¾"ÑŃ«N¥3) º~_uàŒ8·kG’! 9yh+]Ê+“yëS -#N%o «â=õ™3ÎbV­¹"uÙ^¥’©~¥U¾"ÅÌw$ÊPºí=Ž÷ÈaŽBÁæ“=X®û³E6 d=¶_Z㨭’Æ‹¶:ÕGŠþÀŒçS[Óv‚~œ‘s¾F¢pHÕñžÄq«yò9ÎW‚dhü)Û\#Ûä~±*É{×;_•'ú#`mã=‰Mû'öÙ_ª*€—;ÐFy·R¹ìÈn_CöÉ7ãÔ÷Ç~ÜBEy37oîJCàˆ ì÷Žü2.7u!€âÌJ]!òè¯7òl¼÷iD´.…ZbÂgµÉþ÷6å?yÿë˜wÞþ×GKM[Õ -=<€uB‰B´š…Où~¶˜ KzC'0­ =¾y^ŽªÐ_AûE Q\¤…p½aG0»AâÖ-”m"ÁÖ/ã’Ì¥“´›Àäxõ˜ö$·ˆ·÷»PÃôkÜ›÷Aø £bLì7Öš¨u­?b­›”ÈyTuÊ>9¢QÒb·sÉ8¹4ÑÜ -Õ)Ú?wo(ÐÎœu÷û·€ªèë™7]ü™‰¢À1Sý·Îf`ò3x*øí˜Uýkòó7†Vÿbײ'úœù¦½?Âî¼ -“Ò¾Q1ûý?0êˆOà'w0ôv$P4S3ºÞ”7>¿ÝÑÔk?•œ®€tÚ¡L¦š“þ -~U Ç=›`È{¸ìŒ ‚\ùÿu'hj¢y^åb·2)`MS°t¹+ʆ1ùØß™:åA¾Ä2·B±"*ÉÔ¹µ9[ªsŠb#²&¡ÂAÒ¤­¥îÏÔ&êA˜=8ëDª¿EÐÄlɈ}p`qý‡—¬›·­lŒ4'âŠ<ý¥žW¼°•ßÏèr°#ÞùÊÝSÛï">¥’Æõ:í(Ü('«âÞЄGîž -%Á©çVÈ #©YÎý˜‹é¨ðæV£T­!Þ´Tå>žJ<—æü(+ K>Êjæœð¸êŽŠŒÜüˆ^ °¤ŽYª#0§ðba‘¸¼£a¥žàmêwtf›ø ‚×ò›Îè¼*ÖèPÂaƒ,7¾%^‰qÞé|IÞ[‰»¡¢V - ø$Az™Í†”äfô‡¨ÞªÙ°QDµOû­5ÄFyd-PDëâæ‰ì;Î(2Ì—Ñ+Q€½Ÿâ„('U슦¡L2bçEv[“!Õ¯,í°,å"Ø\Ù-F’Y·³€Ayu±cbE j¯•z˜EêB6ž|ó¡Yt…,îÊp¼öôdìáŒ+™Ä‘קқKó¡•ï íÿC¯Ðh}¾1×_˜(,ä™j±¬ÎUÇÂLSs߬ç/H~äiS¿0Ñ?C\òÏýÍ_þ¼&¯§Ö aÐIš.:ùd}o3]@¼ãŠ,Á ñÀ«(¦*($?“ýÜwý^µaÂãÛ-Ï`F£Ÿ}jœrD:ÓѲA7l«ÜÝ€(\áK–Š†[‘ŠÑ Hw¤š¢pEV}${pؤ1¶wt$g=@e ÖvxQì¼zÄ uÑ—ãV#$ˆI]…ç}=¢aE‡ÁûS !2Q -ïóÏ´JAšER9Í òª -A—P¥t<÷;å™±¨q½ähÁ•©¸¤ŸBÔ1ýŸžÈcïu#H;ˆ¹ï´\2WuåF\NÑ™¬tÇ”. RdèxŽÈG¿“õÏu¾ñ·¹Ž?“þ;Ûw‹Í/åÇ+;&Þd«¨—’4üXÎ@psf²–ìðÈíwJð=ÛÂ<+§,à˜˜æxJÙür$Å -­¼·’Šit/O½ö+RîG¸TôE1ûW\‰•–ãš«v»_tá~=¯PeGlÐF!ãEu6mÅ5mIf—JC KKµyȆˆß_6s—0ýÛªuBqU'4Á1 - ™²(PÌA ÅâÎëŒc¾Xêìg0J0Cu~{·ûŸ®n%A ƒF«2±–h7¡+üãLQ6â×xàçÚ×USÆ·86׋æÁw„ce@î¾jB8xAßÝëV¬b™ò´ë{&ó5£Xu— ËwLºø¾#è Ç)•„·Öö_ÿª¯Äa&AQÝ?%°„£¬H|Õ¥ü{òCÆ]w4öˆ¾}ñùHú7ì”ðœi·ÆîÇÁEÓÄ°B ø=èÔt‡ʆ -àG)d&ý+µá>¿ÎtFk;²»ã -`†+›`"˜r¢…w˜+˜âÂû4ƒ!pyͪÐŨ#ΣD/lm³ª é¼p… uøm2•×±‚+ÍugÌúEá2ȳæ›cRÓ wá~±„g\´œW‰²ÇÆöðÜ•ÆÓ¾´^ï3â,óǾ(%q’÷ý>ùÁ©äÍ7Ÿ°ãñúfñĤÐÜÚQÊm?â+Ç~/‚I,’~SÉÈić@³Ž3ˆ¢8£5–êÒD²ÜÛÇøyÌ»c-ðX{&KOm²gÙæ:+@Y¾$é†y‚»ÉÚóó™÷µç2@ ¥OÅðH‰5F›ìЫw:‚´Ÿ®¥;&cÄÙ‚zúü,)vÄïäŸë!D¦kÉŽ«$ɘ5+ú [i~8¿×Ö´·.‘pKéŠQÏ^_=N$PPAóÚG!Û„B†{bµÅ$ŒÍ1ÙôÀzB5ÜÛǸha‚¡Ÿ¹6Â4H¼Ð -&©D>Ú¤(bŸúìW²CØËÀ”ŽáC8;SSR¢ më+ûï€&ÕÌ~^PµÜz£E]çŽ - ±Ïö‚³ïjŗȸ'¾Zè‘G Qé(š×àç°¡¿•+käJ´¬ -T½’¯DÉ!Ú¯üä Nà Ó yb:Íåú µ>â|Ò3DI_†ÅÙ+CøÎì…}W‚…z;2®ú-¶;1]œü¹LZ6ÂÆ“ø«¶ÂªDù–‡ÔÛóIøíŽÅ€j7ÀdÚ#D1"KÜ|Λ&AK†L¹3ÇÕYWü=_Þ²îmcÕ»0ûâ$O`ÿkö) LÐSzÿå ý¿ÿÂÆï3 £–¾$VNTŸ~4±/I,ÙUÀùègÞSÐyÕIz+(…3jÃØð3µÚ¿tÇ-ÒóMúøÇ_QñéU­Äm3Pv¢ùãû›%õ¶¬œ;äÚúW²ˆ““Yª#\KN?G\rÅäf¹tÕLøW•Æõ“JãúY¥1\Œð°{í röA¹è0ËÏ Hnå ¹2(mG䱫‰¶8pÚ,Ô3Zß°1ŒoÚ·÷K;MÀ.j¡$õq’ñEver{…`â´¶]Ý"¾ëùƒòµ–.{C„7uË÷NÇ‹XmÑž¬í¨=£ÓH1‰žIJi<¥zVhcñV!†=ã›w¼Tâ¦ç·þ’zÅöu¬»ãüuZ -#‚[=f!¿gÐâGùcM+V8zéZÆ ½ŠÀDÝP;[É|kÛ«&‘p¿ÇQnVyeóUn†Cˆ™ûu¼×áimWŒi¢ƒºWùó3 NÇõzÎĤºûŽ|VŠûPëB¨:ÍhS늉Þ*DZ,jP wê:w<|ž›_ÕdùT“K»Ë£Ù)×·«wˆ›¾ôõÞ _iD*:Bg±@éˆOT½ÓÔ`ÆÝsŒú±Ýïö3žÈq{5°ŽE -‹ê ÷‚Ù”>[ÜY鄆}Eu±®®ôHçN1ža8 J†$Õd‘Ž•–~¯!¿˜_îÜÒ…}lˆ™XöÿÒƒûÎ_ölÜqNdá3Rx4ìvîÒ‘jM¿g‹áÑãH~š\ Ç?†\OœwW$±´g›@<ÁÜ0âd 2žt/Û;„*-«‡ f„„ÉÜíû† ýˆ p;&ˆš%ðâl(<îR$·hœ»ã¥AÊ.pDªé¢‡&XT„£BpÀ"“œÄÀÎÖÊùàSD«_»7‰â7pt1ÃÅlûòƒ˜‡²”îA«èàˆËGq@W¼+hƒ‚äƒ/Ü .*hf{#Mã{óÓf4¢#àP.Ó5Ü#xÌ?(Ø‘ÀÍvçÉJ´õ£ô‰f´Ê’E,‡<º¨!B»õ3RkÄÅ(²ûÒ~>y7Ú»]1›GfOf~gûHr¹m‰88ÓúG ­‘– q:¥óC¦†^Ð"{ç0å´®F )pP -¯Ÿ4DÀ`íhzDœ¸P’WIÓa€”›vå"C{yk!Ä'Q­÷*ÑðT€ôràäA¥}¸«£•IIuÝù ÿÊt>¡}ÚÌ >Ô‹?›ÞgÌzÓU Üu«ûŽ -iЄ­ ½w,tGòâ ñ`NÜOš_O̸g=]…š>Ô§³¯<Ų Ú×óŽH„¸ áö¦B;Cç_=›)¶Bøé%R VZ#6œOO{fæ0Ç«ùÍ·l¦„#Yu±ý!áP½"hŠÀÊ=5¶Ú¹ŠV? -Ï4Å2<®²‘\±¥þØ¥8ï4º³¦ÏG4¹({µñŠºi’WU î±Zð E–y璘J+.ïž]ûŸ¯AÄÛ.r2;ŠøÏ ÚkA"¢Ü=²¬ZÞ zQE"·eGWÍú½á¨SõÄÙqq8`¦Ÿ˜\`¡;»CÝs©„xå°ˆ§…ÂòùÉHò1Ÿ×A\`d§;ÁUuøAWÞÖžâ¡]ÑK÷˜Àö—÷2„;CTíJëò˜¡™Ì@¶¸/ì?/ææ]¿=ä1û Û"°ÜÑèJ‰±kjõWì4¡ZË^%6 eoúz¸ /éwN¡e‹#? ­ŽV–!WTžÈiߥ¯‹w†wâw² ‘কí<ü¨ônt‰u€2>h-äʦ“p¤‘ØØ—¸ ¦²Öj¼ 4¡Fô뽄%ñÁ&¯bØXJ˜’WYÚüÖ`re 3L×<%K£¯´Ê*à —œ±Š@¿b;1È=àBáO?­µj̨ :ØÆÚxÁC#fòOfÊ=à •®ZJ¦Ägü_wêâуVMòäÂyÝOEhE!þ`þ) 0s‚XVk -½Ô#:pÊ]å³zÅÿ5C"GhÝ«|Çx@f3Íí3pŽ2Çd]–„\Ùæ½ã¥ÒȈ¯à%iÒMSÀ€Eˆ¹t]ÄK»FjP6ɹ÷>¿¾Ó@0~Ç7½iy`«êæ -ñ¨r#ËìP¯±të¨rÚ(2ÕåM‰ðüïèjÕø…¥-[· -ý‚¥æ±´=&ìö'ööÔÓáCVÞ8èB.„JÉ,W‰BŠç„9sDûÙ™é˜hÏ3 §ÊÉí6 ½(‰ÄÅršaK H™è {_ ĵA¾D|~ÿ`xªŒì·}•3 肯Èø½ȇA;HÛÚåÔÁ¯â²@Mâ„L‹ùÊ® RvŽ#â¼-•€]º«Dî×]EJEæ·‹.‚†0ãšr¬¸-îÍÔ†T#¦ÒÁ÷ILà ßòÔ~Ì©!=}w¤r'ÈO.ÓÜ'•­„PAÊÑáCoæh±BªË’„z|¯¢`Ûˆß;Ä)$vŽe5ˆßå³uö¤Ì˜û^­œÓö×})ÉÙeðèø"IýDÍy]ÁêíéÑîÚÍËN³ý:þj–§£e>Ù[_ïq%7¬Ñ^5oŸì£¶_qS,@²{²tÝçΖðª! .$ðí?ãâÙë8Pjc‰A½+žÁ‘³Úç+úÞ×Ѫ(¾í%¤F6{-ì–|ô Cé8s*ÃMÁä5ñ£ëNÑ>ÂÑ@Qz¹xžlEÍGîàÎB⛘㺊ÿ_?ª3ªzÊJ™(ô³§b¸(ÏP’WÌ#‰ìÍceÄ*|$ŸG.s…?¿?Ø5gÍÛÒVóµ“Qž>ÆU>wLƨM#ö5‘¨VôçW"¾ûNGg\µü÷6ØéU€Ýé– Õ·ÀŠÊž!ô¨é´Ä_Ax`a…ØÒÁwÈíûœŒ’Ñ?†œ²çX²… ­í©sAôµ*jp}ÝJ P^}¥îtÒ•Ž¨@·¼ö‘LèjžuÂmgLóŠ Y€¨EÊÌZ*ìKÔúÇfÿžVîA‹s–yvž†F?EfBþÞ0½¥¤×oØ7 -÷“u+öÆð!X -•ôåÂœ‡°Ù‡£cãª>àÉoœ8oï‚ÞØGãy¥~–iC5œ’&€°Ù§´™®X¡ü\›ZI^_n¥÷q‚¼k˜Î¯ÅP—ɃEî!ßñOÜ;6Å5¯§ÅË!ÒL3@P})Cz¾ù)bgõ¶ÀvfR܃ŸÕÒš§v¾”àR²Û¡¥¾F5õÇaÙø-'áƒhf™jôÉ£€SêªKH}9G+±VQl¥Kf Ó9ý®8ñ<&búDX¥ãæ×Åì@,Ì<þR¨íõG¤ižWõVs‚g'XÒØ{1ï ¸µ¢1·2ÝÈWR´ˆz/¤Vkq_ÉU†<­¤›ÖY;›„ç üñ©Ä݃˜¡¤º –«;?\ÄKŽÖŒÚ/´üŽïîÅ<ëýá‘PLj_é7{8i‘Ç”3Cã{ÿVò§¶ãL%^Ã{;šO»{SɼmY# u†ìy¾? -ðÂÉ;a:Hš­ÔƒXKÜE» ‰äýõD~«n¥2\(ž¡|AÓ!ìB85-ø¢Gùë*…ÚÆèúÙ§ ÎE -õlÞWÁe?+¬8sÞ_r×E±pí Ý5E3–ƒ½#j­ m½+;”8B¢ÝôLè”QÉêO ¡Ø¡§æk2’`§ï!7³hÓ 3žLœp¾aŒöìî)3 :&™‡,ò;økºÁÊ6n%B’}¿°^YtŸ¬þŽ¥™ïVH73¤Á½·Ò,¤p@ÞBß@ _›ú‘å5@{å‡ãFçì6sX"ß;õžVq9X×bU§fÈõ@šÈõcˆ~‘Γ&Bi;®8ÊU[CÈDŸq¢_QšºË]M—IÁhb?7-³= -"ŽÁ+ Ðܲ{xBÚ5-#Ž –EÔ’ÊnËœ -(l@ ¶yœúíû[çýÕeîhý Éö›8£¥¢®Ä¤°ýc2 -yÒ‡ÔPÈçuô¸°9MQ!«×Uö -2?A‰ÿã?üp˜ùï2¯ªaÚÕH!ÄB&ŸaÏbøãŠq–2§ÅC-“ŸV>掽–(F²ÃUØVv}ÁrËÏäDl{¨ÃÖ 6æP…›Ä¼vÆ⿹TúbwM[íóñ ôQ¢Œa#-ˆ¾Wd•{jÐóJkºù¹i‘²ƒ±C˜õ·ûò/8†¿ùËúó¿®4æâXDÌEÑY©WŽ‚BÛý—ßSòePE¯Øeáù0NLÁxêaœAÂ¥nÄшâ4Oί"ÕY*¦¦õ¥&{å^°óÝa¾N¸üÔ >ÇñR8Yª•à…š! ogıè,¬rí4Êþˆâ/Žy¦}סøöé¤õN«ÅU Ñ)â!ºUGA§†1¼ºö“CÑ•rityvà}‘_‰˜P“öÅñêB²ëäèw9¶ê2;úVs¾RE]%Ø¡ðvÆÖ¯î!+B–W8¶W5ŠçüºÓÉ¡F¡å2^Ó¾·fô5ßËØ\a"ZÕ÷°à˜ci´r2wçŽü”Š*Åé¸ßáX½wr¬£úÎTb4!95'ð%ËZ -­ƒK@?K¹ThuRnàäó~§R-ÞÇéKÃOÍtoêý- >îA^ *ô 7¾¢¶´t¸ân”èYÎij ûó½óž=„ù[uŽ {½';ÈÄ‘Ì4ây•b0¶g–|r€?dô°#T¹•úc7 #îÔ ÆL¥•ËáÁéC}þߨmò¬áQ—­ê¶õëÌsoXqsžq)¿mnUCe:Ú r2µÊ£LZχâÛÌÚL^Þ5ßÊdWÄ5ÏXâˆÞï ¯ÐLB4Ö[Ô¥*$$òMÝMÛæóMâKÇKU£3^…)ˆé#áþìuŸ&ˆ²y9?¶ðôö@ÛýïE»~þËϦòßÚË ’ôE.:¡);"+öØ¡êsÝ0É#¼ñ’j/m¹wâ:v²PÀ$†ìå ó~£b¼w„mÅ­zæª/!ÄŒû~Íë8-ˆÕí‰ÜDÝ“pK1HZŒ*/ÚvbÅ—IKABIù(pÆ‘¿ÌžqàüC\©fÓò®ºq†Dà“,®vê×±upBo_K„¼ÄM^«v˜¢~s¡ØOý$´ UËsVðÌuh3ÝäEŸUQLˆüÁ<逈-]Ñ6ƒ‚SùlÝfÀ¬¾¼ÿêêÞ6_v›Ë}G8o¤Ü&¦qÃNTì›D;±÷ª!úˆ,õöãü÷ •©ÍóË}ßúŠ„¤£üAlvLÛÒUK‘:Ã;<áü -˜J©êÕ!+‹bùxs$P¬—×C—<ÃQÀ“yâÝS”pä:;Øê £ÖúÐh'3¯èXÅ?%È -ýÛU·JÛV»6˜âúN'˜‰µ'r¥Á±ReŒnòŒZOw…±,³“=mr'ëgA17Þ!Z0…ÉÂĉ<\تþçrEóz¶r”?ìYòÙöwÓæ¬´Ê R|è<Õ~ JÏ1–Äæ½Q,)šµp%ÌØÄnà¥ïï9/2µr)€‡gÒÌ8ĦdYê 9ÖßÒ©¥p*6jP,É!{rŽîw.7Ûg8„zâ#‹û—,neªSëéLÌ~호“Ž¯.˜S‹:ÕRødƒÈ—I`ÐsïäδtÈ+oí9÷Étý7ögÑê8#Ñ’ZNô)4sÕµ©‚æ/´–Ê[43¢%´_oÞ(òBöïþòW —).b'UÀx?O2¾Ô¹ö™ ´“X¦Á)€øÒâ–Kªó)Ÿä8dpŠ˜%óiÁ=§WÈfÑ4 䈳„;µ—÷ËŸˆý…K-–iocšq{úøÚÛÙ?VéäWc:²@ªX©n«M>‘ZyÝÑq& '‰ršähOP¨œ§¬ˆÃM-¢ÊÉXeg +½CådËþ‚ùMwŒe—ü÷ªÏáL{Qa‹§@ò”G3ã,]—8Ý|mÓ%§Öô€Oct¼j®j$ÚO‚ø„ê {"øÎD¤*f±fI·îJ±“\=Ü=}zF¨ùŠH+™þÆœø§'HŠÙe¡¹„Ê«¤öÀ@›Ìô¸z¤ÎàT@ ›Ø[ÿΊóÏøÅLø¿!بí{_@÷GJðG -€£%´HûǨUÂZð±*B~«Ä)6šß’½’~=]E„³–ý))Ó,;kϽZÙ6Ù{duŠò-–_µ/£ÀÈËs³»·lUÕ(?©ÚP«ÿe6êÈÚ?‰¡`WÁìëëp$•&|À\ ­â‘n&gnÅI›[l|„¡=ìÿ븆>èü_Ç=W1H×BF%̇®q'Áç¡aF4"”íBy«‘†+ob¾Ù7Ó¹õ¾á'"æg0™O”½¾ý“Žø:’thÓvâúL-¬t5yÑR£2Fƒ³Çµ¶>7s³ä9_8@¤í8…›Ýò>Á}þþ&d‰X}%ƒ4ˆ qv=_ßÞž´dF^ƒÆÒª^áõ°õYõ0ANóŒêé}­• - -W·<ÝM–UÔ0-ª½wqÇ;€ -ùÄ7Á+„¥rÞrf€VŠÌzŒ©àÎxÃî hÕû»ã‰øFq›GÂù½U«~MŸŠºQd¤tE6æ!Ý»Åg1òi•×ûÙ¿3æ—{­’{qð/” êhµ‚g䪎µ?°ËˆJÃŽ¸xÄŒðLýXxÔTu€êí™Åf=ø%Ä'w²•ß -¢Ñ{þĈZ ÄÍÔªr›Ñ-¥ãu×V°§mdËïFÌ8qÿî2jD9çÏ„¹¯áâÓH,^¹q¦LŽn/hþOûªŒÐW¬h[˜8çÈôqsÓz‚jÚTŽsμ¼û×o>R†â~Y'–è]S¸Áë–É€3¥lÇ@ …Í*ªûä?Þ Q¤ôv‹VóêH+YڮКE«8¡ß>;Ê€z¢€ÞãVj^WçbÁµ€÷Þ³8 ͫIJÕ‘P”C®Ã2Q†šœ"ë Þ“FÖJ‡Ž.Aرg=Œ˜“\úY*>`‡X¶(B4û0À÷:Ûûò˜bÝ6È4ž†PÊG:„{ÊàõPž$®¼av ß÷WPãcE -)·šwšÒQ½È^CÒÛËSp¡D¸wÝô¶ÇǹLsÈd䨫"¹b²;XÕ¬×=ÃYwÈ,ók?—&Œ8-ag…µ6 óã CÓÞGªâðª!ÝFÈ]¥gáÌ;¢0z,6dÔ_8€÷SJ}Á˜^cê±âñŸÿ‘S~ºâS¼_ºvz]c¸cŒ^5{·Ë^cHºŽÎ—ý~÷G›"æœwsu7úš! -Yö(.dþ¡ hç<üYfÒ÷zǽÞzê¾9ëdæ÷bòŽ;è61O Ì3ƒï_Z\p™¢ ØßMo•ŒßÜÀáH -GÆç{jMß½C‘–Ù_AnEcLé¯~Ó%†b–åýÔÇð2SUq•õdPDÄzn=¨q61_¿ZäsëNö=ÕÀñ ÍäSÝ áZË°´÷"VMÒõ~’ Ñ !а§êîO´¸Z¦„.UÜyt,ÇRÌí#;@¤ZöÀ¸f$ïCCÆcçN29äÓfGÝuœÜš‰9抧H«ÇJ»{–†^xXg«¥°'a´f Üi±‰'d¯üå³½ -Áæè^iÊ“ŠVGUFÅA®EÇïƒô‰®µ’ŒSù <»0ñœeœU[è #:mÒÈÈâ&"qê‰>b 98*4G QÒ ã\žãÉ$òñXsÒôÚßmµÐ„À¥§ïvyÌ1£ì’ If]kíŽ.¾F‘‚ßlNvÌ¢LœöëÊð¹ðù€9>‘vOõQ¨&w$°ôÚ^ÔAqV¤¯ûðàºbjˆ™ê‰¤ýûŸÄbê‘LÅ<°£ÜÛúL{¹I©JUþØé -Í`¬Iæì!1‹H:iîúQè´Üñ²1N€n±}Zï'ÁᶟNò°Y‚Êhïeb€3âÅ3÷2aÉRØç79jNxéþEÄÙŠë‘‹¤:–€”3^kG½-S¨áëI,œê¶ -½ØuÖn5p8p‘Ú{ N rÕ3ïoÑ󊜬nL®‚;=¬š¶û:juX«Rºñ^†ÿ€ÛP†<å&[ƒ¨ÝÜd^@Êp_³EØ„É[©ùµ°å :ó©KCçIú˜²ÔÉ@4·º‘QsSHÙY_bõø"õs½g.H=õÜQ6=Ò±½ýR~†ílµ´(…œe#ºbËÀE}e‚}ʨÜü]¡6;QÏktps®‰Gœ˜Œ%© û펟0D±%výïSÍñcD¶ˆ3*ùw*ñSß³wX?i^vOÑ2î#ê0§;íÅÖä;™91Ž@a"–,ªx¢ùY¦K>&ÖÕ>ä(@‰§*ù©Q9Àĉ zB«–†Þá>NÖ¤IèmR ~‑ÔzoY{;êØ>æ™*/ ¨Gõ}*(¢y§ñ›Ž#€­ßÙZ¹›–9JüNôsrŽ -Ü&…f“Æ\oF'flÿ?sw’m¹Ž¥‰yšÃ@ø" ÛêjÞÕ€f/|ÿÆ1½gfvÕÈ•‘™‘+ü:ì°B±‹¿1XáÙ2Bš§ÁùTFÛŸ*¢NäB$ †ë¾Žì7ØíQßÕÓº[¢0Y4Zl YolsQ¬!ìÖ[sQ㹺æmB¨Ô!DÙâ·+ùî£Æ 3ý§½`®°8[8Å@ÃáüÅ;^*aù¾eHñF)ÕSÁ:^Râ2 ¹uÍ×9¦õØ>o/~œ2LBIkÊ°üˆ½ F}…SfɈážhíÈ -GÒ,Üv‡£S·lPðø¥ëœDqÌw¸X°ÚÍÕið„PåÈ”€>w“nÚq›µ¥ÏeíÑ;«fͤ¦ª3MyÖ‰àqFäÁ yê-¼U_¿vµØ®îf Y3h-`W[#æ¿’ÅFG³šme‡òFÞ΂ÓRïf„‰ $—5—øÀÙìLY‰}`'†“tŒcÏš…ÕÙþ1^Á$»ÖGyƒÞë±9RÕD]ëe¼B“Mim;¦áç@ì4[¶‡âÄI©ê)˪®=p¨g¶ê²ßf NdØBx¥ÅåŒ!Þ]æšÂWZ QH`È:šz¿ Ò*Ò#¥ŠŠkÚë(BrGm“Â{ÕÛ¸;$ °s0Ì\ßûÔï‚GÔgÍÝpÏ‘¸B*úϵà&žÍYÅÉuš¬_tüVм F'ÕŽt¿žº8®y¼ö¾1Dåâžq ¯WÄïõ9 ùíA¸"Bk—¤ê´!¢êa6R–[ÑRÔ™_C±ôHåß ]V· œû•:vê©Á±ªœJª"ýªU•Å«ìùëåèO“]ãT+B…#ò÷¤R#Ðk³-?*[&ç‘lzMc/Èï]U¿é¨ÿ"tüÚÙ»ÕAHÑI[$3"Ÿg¤3"A7ej”–ù$Ôh²“Ž7´öèÌß)GI&§=«[U·Ê•ŒXݾ?’eBK7YýœÛAç9>ÞVI÷ù¥œ^xcŠÝÏVºGÂ1u——Bôª{†«Ä¨oõPžç5œ‚øîГ]pT @{ÁluRJŒÐ`êM%¨Î/Óô‰¯x]jö±BP]4GÙƒÔ×|–-¨ôdݨ»*§£>¹ Oƒö;q °2f8à°{3,ë71ÍÞçŽún%Ëì´:Õ¤2v»ã@¿;ЈJ+òÖ ¡èÍ -Q§ÖJdË΂rw3QVǼÉŸjpÎmõœ‚JRKlV%FQEñù9t$y¼©«¯w¾.n>kZ“ݾ·flª|s Ц¼õMÌ È{¼ûRø{ç¨ ®"·sÃY…pˇíò<K„;và#s€’JÄ)+  ¤N±äNbÞ3œÙËÞz¨¬ ×.ÏÔTÚÉ|½#R{îåŽÆ}?³†ˆ çÙ¿¾±î~tig)bg²ì6í€-f?#ÂH’nDž$ædiÖ4Œ­²uþÃJåùý_~²´³?(ÜËL0@ŸD)2Äã)íø¿Ò`D ·Ÿ»Ô`éЊå“!ÐYká#·\UÈÐú¹ –ÔäíÈy™{HÐöw -iú^ñ«3há:´§õLqÿˆ¼³.%ÍÉ”1èŽV%n\c”æ7Ñö %Ÿ,㹋ò6Ð+ª\!C½$ipw 1ùíÛ½ W£à£ƒ†³Òç;<;¢CôÚ—‚c@èòß|.…KôBÒ'Vå¦ûÈ(8¼ý+;W¿‚+‹ì}Jióß @¸1÷gî÷·I¶,¥\‡=TVJš³¾“0h‘©GîwÈ µŽ²™m±m'Ë{O/X•æYë¢4aÖ«Ê*¥¦¶´¿Sò/gsÄ=üÎÚ’†è,øžT5¬#Ž0ÎʃCÖ;u´š5/àΥȒ0; Ô ’ù­´ÈäAa"ê÷àw¿9%;¥ý,#TB6WC8ÙÁïÀÔ|}c¢o ÎC(f‰RÈWÕÆÉ\-hB\-±戣ô²‚ÿݾ G¼E/áŽc¿þ% ò½«¿Ò&{z¬õ FõÀtµHqRí ­¬u.¥Þ>F²^üœ7ð“ņÜ%…Æw}Ó'ç5g®j0·QEKÕ1'‰»÷‰ÈÒñ}ÎS*&ÇÇZe=¦ÜWºƒ ãœ~$Aw«Â3yO¿ŸAôÖíqü¬væ™ÄGéªÂÀZ× Kæ.þ‰›é+è±qˆ«¨êU5Eè¤ÁïÜìÐÛDÙM‹¸6BÅ[1}‚c>W¡@eα´£¥PœŸ|%0qôDjÝ+*“¤ˆNIzæ¹j±Qn &y¿\)_wI†ù½nffî¥Ü1AÂ` Þ'L¥ýK‹rCðw„ä‡"%ÒúËO>¿ýËoŽ …µek‘»ªÓò ²–þªAZ¥«Z¡(ôodʈÙì–êí¬åˆäÕ^kƒG.ˆvê¦k̨„~í¦k„áâ`»_~ä` _žêÌö•ýÊ+H·¿·ˆhßßÕ:¼œ¢É³NA-ú(òWKxÖÉ,Fìk®¬è¿˸rH \‹(ç#Á® úBÏÝLª÷º¬ïH3Z{(%_i?dgüéè#WÒ¢RÚžü+=`z•ÿwæB¨ +è´\ëìÃË×¥{i‰hËÒÐ » ^ðÖ›yã ëÖP)r¥¸†?„”Ç™äVû ï@=;gITŠjŒrB<És=J‚h6ÄÖ~Âi¿Ù»iÝ;1NH€G]jÆ«€#YÊ6¸üÐê˜ÕÄzx˜±Í=„˜¯¨óé÷?#âšE0[P›Î¤‹¸3¥”˜!ßY&À WšÚ=*-yÐ5BY*Š4NLæJ%µ·ü°g]iCËÜ›h;ª³e«)fÇš4ÿö2ùC“•sT–éÇþÕ|=Áþ×ÿ“M•Ä‰7® à2÷ZG…SçÄDFžFb{F_%µ¡Ú1#ïo¥¯€„&,²ƒŸùÀ)0´‚qfŸã=5Ãux%÷»fTâÊ‘Š,Âo|†0—ˆ4Ö™R®Ó_­@M ©Å3€ˆbì¡^KyMV9ð¥>œ—â¬ÿ%ÿˆkÌÏ ùoΖ5¼©—g@=.Š2¤C„b/—0¥Ðyiëó! ‘`Ìœ½Ö]Wì„@m<˵~]Ÿµw¸³,¶ô$à[V”]P‡D·{ßl¾ÆC£ù%zÖ‰†ƒ )S¹žîÖšñ])ôùÏ(ôbêÉi9MŸX’n|Ž~ƒô.¥ÚèüÒ y&z&Çx~ì -¼±(ÏýPTk Ddy{P¢ÅBSåwÒD±u¼A»ÜA9a#‘±÷º_áƒ]?öº€:Îç3u#ùý°GIDÀD¹ü;-»Ì]…]„Át†AxÔw£Nü*Æ|wîþo®=ýañW ‡qtU‘=S<ÑФ˜s&^ÔêzHžé>ëqØŒ8Ì“¹ù¸É’Ö]਎¹Mò> ç~4Bœnh+ -ÂC\û–…FRÉ8+~ã©>‰)ˆŸH¬cfèC -®üË(]¸kÞ9sh5ê¢ÏÈE‡þC»ù»ìOåJ’¯ò›ÒÕ_©=á|“[9Ÿ]»luz¶õ¸%ÉãùÔÑJ¤“|×zÜúðÜÛ§§ÑÂ÷˜ŸzÜiš¨Ñ\ŸAŠÂL>¸DÞ‘oïôAiÀ¾5„rrŸŸÔ>R­TB‘¢œ!Â`hýÔÚ”Çn®^ò’7Ø¿3c±—ÛïüÎÌ&@*!%:~JÀx°NoB÷jß`IèÞÄIåoÄwóŽFÛ…Sð\,\£üŠB}Ù;ݯïTî -ÚµgÆüãü¸æoFx¤èêÀõ* -zåÀazˆ×®ê“ „ˆœo™7 jÐÞýãœ0wUÎ\9Ô´· -‘áÖ@(&…+¢ç‹¤×ªZ‰ê/ ©<*ÕJ¼¨è§nDtqR΄ÿTr-ªVö{>wa¤•ùæÖïú<÷™’gH…µ¸ÊQiEÄLì;_T=¡æÚdÙàT"ÿü¥öf`>S×¢á$f‘„‘/ A6Ñz;Ôa”f¦Ä#cƒ ÁÕã…ô³ ïïþòOaT5Ô·'ê¦US {‚6~SÕ¸¢é0ÒT‹KãÃ^k­Øh–?þÕ¤f6òÆo+|o È¤¾Î]¾’™±Aÿ¯˜iZÕµ˜¢°"2ö]•)«ˆÜ"Zaj ¼È‰Bµ³î冔8³?ÝßDE`ê˜iD…9¨Ò7æU9Rñ¸Ý&¾+SdxÖ÷$n¢†~§Ì®@°è­û&lZœãÿ.”ö]"3¶7)·ekԚ͕:¯‚»¬‚>¿CÈ©G^-]9>ÇOÛ?B²mͤ àMº NËÿ‰ìÇ5ö j#h·«œÊÌDsþùñþþ½¿1%þ÷[AÆ蟦Ä–.)`Ñ I‰Ð¶æ-0÷„ëÀË ›ç ¹6 •E -Šk+£Ó{Õ6ÞëÌ¢ƒr9ÌÔ¸à•YíGÔ×I­A€þ;—‚‚õæ¨ûAn €6ß‚å±b„$L„Ëìµ~&¢jN©ƒätg‹9k‘‘Ôwß\'ÊijPÄîÂð£§**[CŽ%Ñÿ²H]äXøj¥êÕ¢B5ë‘P¬èR¬£õþ\Š-óÅHGNÀÔq‹ nXÔF‰÷MoÈÀ±šŒgÎzì?|¨ÿ!±•Ìíh}œèürÖÿ•A-Øõ¢Ã)0(ê -9wæ¨0R˜©åÛ+ÀyÏjHãG t¬œ£>’ÓYÄW(_gÐ:.ׂpºª× -7yÝüÒ½­ÿ7¡ë•3<ÒÚëôÀ,$‘“CS†O»2>~å®â&\B%g;Ãj:w;6Öñ´mR#SAÅŽ‡x.+@-æ=5|¶¯o¼¾ÿ%Þw9:Ñç,‰½ ˆZ(yµ¿Ö˜ÓÍ*.êÊ”4Í~9U|Ï ŸâéÖ‘û†%úÀA<‘P1¬›!šÐÿô0)-bHò(œÍ# qB] †%-HÏYßýÌ'˜Ñg'UL³¨Õï@Q‹û?B$Ñk*5òcÚõ¿$”Æ^ÀèYgâÙåg…bÀµ6ŽµócEí”c‹ž ƒ‰REU •UßÚ»Á¶ê˜nà¿+7^¡µa¥ˆöpÆåÉw¯ªW¡û~pÊ#è*bh«ÑR(D›Ô´®šw` õé¢ZÑ3w,%¸‹mÙr¸ûúrÎÄð¤~³¿Iû4Xì Ð…åxOájĤ{·¢žãË\=Ídür:{Õ¦ÏYo 9#yoí ñQù<¥I…ŸIoìbŽ^²&æFhc!;“›}¯)ßlÇÑÑ4Gi'Å ‚Œ–tžþ ð añ=CxeE(P7·c»ŽmÉ”Úï›ÞêYù6àÊVÞ`}KN‡z!(ÜJkR‡/Àhi24Öì¤ÕÓ€ÒÁ–÷¡?Hß2ƒRâP -v(”lLFZ:¥ÜƒIÀ?¹¬† AÃV^¡45¢~†Á±±89dÓ„g™Ò7"y# ý]bXgF+Zï] -¸jDÈ üíÃ"”eb§Ëœ!¨È}RÕ|Þ\mzR=båYδ;ùØ^ÈU˜ß;Èã­M "9`£ q¨4aõF+=ÚÊëB( -gªl9Ã% ÚñEç–h&¾©åƒÁzP-ÃDM±ƒ&2iRI +õšÿ’Ü»¤ 5ñÙn¶zBfº¢8‰ë–tûIþ-ø«\ÇJtÓÏŽïix6a[’+gUx¸J¡uŒ‚ôÄ®š†jG)ÊS@¦r -Ô&A ä$êÇ%:O×ç‚kœ“|…d¾’µ—J>š’`¹x60nÔ5ˆôô§ÜV¦vÀë#~]8ÈIKW¡\ QD¨-IƒÎÌ­£àÞâBfEí2æ` ºÓDfйýºàËI! l”åõµ& ¯—uJø´FûC[ÏR!µ6FÀ=pÃ~b'’gñ|Ñk]¤…û/C0<Ü( nCPÓ(Mì¬an¬çãKáR¯'R~ë©„L2‚VãÝ·ÜrZÊØÁÆRk+ÏÝÈ\o­ -ݘ®Íà‡.xg–¯@p-z„e·žêܲat}‡uÜjqz y(µƒ#aõp‹˜É˜Ïž*¶÷‡%5#6»ˆ0NIÆ'z¼œ5pYšð¸Ê¡©}­JËè]ŸüÈUœe5WÞ -”{FLöCô­HYƒ)› w3)Â÷öy€²ù"ønóŽÉ÷•ß 1ö‰X_|æŒÎ3ž hêWµˆ ‚W©#FÜ6—âø¨J)uû¼ÑØy•I ‰b¢„:n‡.êº_¶5$&KáÛéd×òÌ\S=›ÎÈWLì\;˜ª”H™cÌ,«Â®§>â‚a#jYûÆó‘et úRwY;R|g¬¼Y<JSã †¥¯Na8{aQl%¶°™Òc¶9›HrEK#<„xè/"™¨Ðïe7ÆwÅ¢ÚªcÌ¢ Eð§'ïìñªßÑÊÖš>Ž-K7q¬²keUsŠMûÖéyc û¼ØÔZ ²ë<Ôç*˜üΣƒr¥é9£}x¥à=œ*<4ÐÃ×7Ú‡²ÿ3=e9ŒC`% Ø"ÓæM<‘ N‡‡Îâµ¹å'ì¿ÇÒgÑÊ 8‘Lv“©.côÓ¡'Ëþz9”HØÂ-‚)4¤N¶c»Î£×ÝDf â‹ÜÓ¢ sÄa]>øi8 0ÅbéáBñQ³*àjXÑ—£¾uä+àe_¦J/ˆÆ²¨õ ÛKÂßýÅŚݠzÕå·ôWÉ - ×Íôð -dØ¢Þ<®¥…yÅíÌ~X¨HöµÒ^ög(0ò7Ä–; È7ÆuLåba —·>{”ˆÿºÇTyCL ^CöòÁ?]ÇàØooÐÂ#‹~£Ê¬Õ›ºkSX ãÄôDÍÅC5àÅï<”DàâôúFo’”ÎJIÉ—-a.u+XDN÷=@åN&0<ÈÊÍÚ·à# Ùň6œPFAð0®vx(R‘vâRuãiß*ãÚí[ežõÅ®GòW¼û³†ˆÞ¾ai\±?‰• -å®ñ›Asï(Ù#‡šì †úúäÚhâ„u‚¼»ŸÑY=Ôü®ªZ}†5!\ÐW¸[ÉjP!òèÒ¾™Ä—pr›ë[¬iðésø¿=pã3ßóð@•Ña¸ek̓¯öˆP\”¸m7Lå1¾°n^©IC@â¬ç*a£Ÿ•òV´Aþ+ð>œ§ö‡ÙwqN§Ô“­¶Ì÷@ l´ã–ìýT)£Ç\ö.",‡›äغLÅx¸4VÿH¨ì –p=uTgÌ|G‡.˺[çÞ }Tªò˜Ovådßéà©q~ ”OŒÆFL` )‚à¤éªª5ï2:…™òÄ:zŽwÅÚPRY'ÀÇñ-ò³âŽB.%’:SRƒ.õ駠¤[u èxt¤–8BLù„ÿ -¬4TÚD뀀ܹxIW ä*£®wð/,ÜFöP ’AùÐNý¦Ð -ùx]iÿeÈU˜z@[Ћ~¾!Šf] -Ö¦0`‹Ô&’Å·HÚ§€AS{íy ¸ý~M1:m–¼®§ÞNö ®“À5óˆl(w¤¸ò#kF÷lÅ j» -"g·3_±ù³YønÇ6OÄÑ 3ÆHq–óîzž£E±S<¦ls*]ž›s^Zö¨d‡e!„b®'5èîæDT¬ç P!|Ô±<ÊœB´Ü°g}ͪnòq=î3,ê»èÕ5"¶%3èüò‚‡½¾-w1⣖6‰98Ö6ÞíâŽÆÅI4Α_S3µÈgˆSûzmìܵÕÍ@ú -BÈH?u®2ØŒ‚;QPWÒ „ÉLà(pÞJÐzÀü gê¬Õ"ßÏ”'P‚–´º›ÈtÛ[)\@,äD³»×ƒ‹ÉÑjjo¯ÐÝ›ø©1CG5hP'yagšøN8KÆPÛæõ˜&pí(ákñ»>¶[ì`ùF‘ RÄ{i”k"ßÉA”­äìBN%1n´t)BÂdJëòL=“_¬ïs†ä<Á§Iž)U[¬ì·…ò–“æ"âõ2›±·ÓTP».˜e ‰Ì*að!’øV¾^÷ÚÒÊÌoPPhL~"tÞ9ðD´|˜ªù%†ëgà‰šÒÌ{¦yx'äÂVG`¿<¼MFdaãê‘B;’âÓ:Ó‚Ù0ÇèhšeX¹@ñ]#ˆzr$ò²ýÀ7ºÚ³ÊïÚj¶ÿ¡9yÄ›þK‘ Ör^Ÿ„âÃY—";4mœJœ[pj,Ò -,nzNüÝ×ô–Z§7:;½l}9Q:+ôZ xaá3dýÒö%M~  ¹úQ'>ÈÆ eÍ¡²~™øÞ×7–ÔVÐcztÓ ¥òçíÈ}é±·ôšÆ‡<¤T@³ì /}‹uHêÔÿº~Zç^ùûÿùþÊ÷"¯¤_lž*Š¢_bô1©ÞîŒ4„Ävzи7ô”¨œ©^´'•ÏJ;Öl>ùG’øÚ’A1Ùîz“»›çºå‡dÜÚBB¢GmåN–€wŒ˜½böq–m™ªØ’”ºoÚ¢Lãµ²b )Ho—¤tt£­c𣰚蛷ãUŽ¤qêÑèJê^ï¹¼5ÌÄ£Âï‘Òe¶v–‹¶À¹|1AˆÕ’cægƒ ÝÇnåAÃÒ–×T}üñª@Ç/ù ¶úŒo0€¸K=ÙÐT_ŸèõÓ+Ï’¸x‚!©j½f¢¢Ð›[–ãÆÄ2OÜQg_ºÒ¦M2zsÚ Ï§ê¯€r¦–ýTr‡ÂQË\êVñ\'±âeM¯çJJÑõTƒ!œ.àÉÿ®2€ÓhIˆ["z ‹Ø@&‹è˜ªõú´¬ãš#ÆrR+¼ýY­g[²2ËŽ‡ À¼mF}&Š¬VÙÄm´}nÐ;Ú¨ùgjé¥ÉpÁPP&r!=©HÜRl@ò¾ëGxÓÙšF”îj¦¯©D–®üU`ÿÓV¯œmæ‰4#׉£®Äžú Æ:h©ïn33âéêD#\ï€5t¥æ\H=œÄb_Í ˜'.š~-âxºÜ?Ã×AÜf‚œ¿ÙgÐãUŸÓÚtŠ†6‡eôR|x"Õyp[Ö÷’Ü$›B¯¿ÉŸTêãVðF»¹Ò ª s¬+Â@{ˆ­zï;£ÏÚ!Ö]~†+ŸÎ; -g´U¤1ÏgIPÐZ™ð,ÏÇÂGI‡ˆ‚H†Üù¬Žt‰BZ`;Î:a#ëÓbqŽVö…oF~ÛæDÍF÷díõªƒ­@M<0¢ª’F®>Îz&§¹=œõÇUd~SÅÜI”‚Ó«Œ„Ëž -’Þ$¸å9–`ôÊ¢¦ Ñ3xDø£ÌFîˆæÜLÉóHËçÒ¤šIé¼tXûZ#n%àxè÷$bç“ÅSOðµ‡H}ÛZÛyÄšnÓ`á_%o¶RƇlù¼r÷zH+‹¼™¨ÌQÀ§ØIÉÿñBu\Œcði帰A{)EÆ.¼4…þ9h>QççfÅäúÍï0››‡4¹~w©äÌxyHD÷~ª ZÄi -×ØòvXp‹FVzv1Z -Ú…+å禌Ë.ˆ0Íûõ%µ~°4*„ c `A#\ç*ÁœÖUŒÌ†8ÿ<ÌŒ6ȱ¬Ñäm¿ùk -AmLn±ðŽ-„ªÎÆ;7T±èò)Ï‚ù¯”Je )SˆcQÎÒ3¬ÛÜÇ鸜`¿YÐã:+ÝO¯±dÁ)«¯ß%AtFÉ`°¶}“YžUòcgpÙÃ'dŸ4“##Äð,AiÜÝ3Õ‰è”ý+üÝq¤FEyymíÔÊUÈTŠu@¨CDÍ'*t›jÛXrÇÕqž¥€ïn!vˆäfnZùüH7UUVJ!:‘<} Ö{ÄoÍÃ×t>ÂFéÑŽZw‰–³ZØ -&<.cú÷ì o^xÜQG™L±q…žUêÔ€×€0g2¤§Â|4 -¸ëŽB˸Ë.›êŠmï÷î‚_6hR3­ê®Âb‚qþº„†)´ù­§Wæ‘ö+këëSï da`6âõ€ÓÞc´9=¿Bƒť)@ü_½¿þá'# ËCê -r~ÐÁª¨\®eŽ¯Ü& ü_øšVNä>±ìÄo^üJ;_½È‹ór~‹ÅÖÚ‰ë¿ß̶ÙÎ’ÒzIí\•½ê‰ØkÕüµmE ¡'*'GCÚû2n QßÖ€=s¡l€H4¢ŠÉô¾hœ-’=•Â©°Ùœñê þhÂGâp&¿-×0î@=Ê”ÿ©èx‹ÉÖ›:`'‚ÖHdª¸©­c½hk«I -1»†fÔ£”QÉS žËá}ËC)ZÐw¦³ö¦ÕSeT2‚@¡;Kl†B¦V©B+ÛŽµÈT˜ë™TÜÓ‰šž×Ë@"ë#Ç`šÓU{/Úõ»BDìµqQ4Úiò -¶×ÿ¬édB¼“£œÐnUÍPÑUKŠl럧ÞÿŒpñÏM¡¿ª%Y(Öȉ”A’Ùˆøô”K}ôTŒ¡ðt~ú„~xÝeÅåcz|¬«ŸÝ¼“ Äg½ÏJªÔêº6Àaý½f½6{lF÷Š9A -f"u =;Ë*â]ä³´(kÇ×…¸M~Gݪ¸XrU•ñTßT,œw¼{F(¿ìMx.ùŠý.ó!Ý×HïFõÕªäcVe;ðytí§r4,?Ów‹ÖûÖþ­0ô­þ]\± ‰4ßÖà+X4Ÿ×ÒøŽåUä… 9ãZ‡.+zÐ’­òÈÏ[ìø…49~³éÒjccb¤äã§)½™rÑšPC§'Í Ý›ã¹ó6Jú`FKŒ•×›Jí ØmI¹öÂßÙU"ŽTH;š‹Ãf²¿R«¦à¹V½dXÎKŸÞ0!^TûÐDP:By–ìÓ‡-¿ª»DõÞ³ôÎÄêë–“OµPÞ„Ê»’\ñä™Ü RßYk¼y"¹vúÖL™+°ÃêgnDwrͱÀ¶Ïò,:QÎgÛH¢x—b:ýß¹›€Þ•“æ®H:=Ε鯪ž¦_™`Ai:ã±7+þf˜²1 V0^M \w½c@°'¶FG1Ÿ ²¥¯g.Ô‡€|YÅ÷éù¹-l‹œ{Ä<‰Ø¬0îÙüój¨5kB«R5:>­6tªº´%‘¬P[#l|u!ž¿³}Â-zþn#z<ó÷?Õ"ýã,û+U'2q§3 Ow„] Î 98Ò¹ÎyÖÏOv>†=5{”ØGØÑ°v5êXUÔ˜#=…' -À%?N|‡R%"2ûãæ,3Ò2¿#—OÁ¯; •Ï{Áåœ:(­Ö½RÆ(SÓøe1Ší:’VþfÈ·¶XïIV6,¥K˜$ãé^ýù;¬Õ–¾&'5Bn÷w«[ Ï êÐÛ’ˆcWõZªdBÑ’|†Jžh?& éßÙujšÒ}žxe*u„¦öð‰0Ìž¿t«ÃäS~Ì4¿Ä"óïŒ8ÚÌ(8¼ß?KP¿ÉÅå±6øÁ“âcE¥j ¼ÝóŠŽrŒkyˆz£F÷GÔ^)%–ée››ÙÊ¿Û‹×QKÉm2“(0Ê$=ßÄ»ï0H;Nw„rîI•mì2"¶M¼9ì=É“[»üÔºªÜ -Š[äqý_5ˆ¶Ž28OAÅHiîŠ&måhGŒiÞê¯d{Š«ñ‰¢JÚÐà|s¿`~¹á<Ïÿ -ôÝጫŸàg#D;È!Œ] Tr~éuŒ¬¯@ɾ{–F¨è韈‡ÔÙ=K ˜Fª— â³±¬U'´ •ºø#úç/,Z8áÉ~(D¡h½ó£X:ã¡í :Ë®Š£'§€ñ)qNLÌO‰óŽiÐxw5PQÁ32ëõ;°KÊŠkS×ë1Gü>’¤Ô-{ª¸nÁ†?mºETRÒböýS·û™.ø;¡òÛß3Íoýyð®è‰Jk0í Ž¸øº­çÈ šg\­Hœ°=Ñ럠&ùr¬ÿï)Qb¦NB$4ʯ9¢)Hì^ø(¹æC &8;™H75Ý+ªr¥ØL½ƒTV$Og·ôøGâ§üe)ÔœÓ5„¾ }ãÀ´¨³=‰ˆ67—TæÚÀÛÕ#¦toä ê`þ¹ê„wÃu„ÊÓúh±:oQjšîÀw+¦ìuÎà`ÈGëq“*þÉR‚ÅѼ5fàGŒJÖ!­¨Ð-àŒSG‹Ål×UÕc‹ÞÒŸ!ìo峧âw6à|n¹ÀEž§©¨Ò°—ƒ¸AI{Ô5‘…Ö·\S€ôë›~GÉ}êV±­bThÈ`•vÙzÒ¬uZ k| ŠæЃy,) ×d¢iÝ›6 /gá)Bœ±¬Ï™™´s%ˆü`t³& °ÑUÀ~Q_œ‰A!rÉ©Q¯¡hBúØ!☪«;"ô +›-%òt?’ôH:vùà ·k;ã«U,O¥€.F·kH/õSŒ•n…™õª±ùŠ§Ô )A?O‚Ü…XJSp¾`eOž‰*ê•ÓðLA€%›RÛ›‚@»Ã ²1èMm×?ðzk§~†¨ÉкéÅa Ø0Þ(1gaÐ -­ µ´#9ÔÆ¡z(´†9²/LpJÁ“v¥lËCGîÊ2D.AI±ýKÇÏFÑñ;‚´-:ÀJ®8ÖTæL(¸ràæ?µ“òŽSŸì²Ê½"äÒªÄAråIãä -B²¥ÂϧúxÖ¯"ZÊ?lÛ{b-8Û1¶8¼³t& Ж|™?ãUΖ&•@ô4g½0­oBª( ¡1®5vt[zÂCdÛb)B)R^±7hê-’~e¯VµV.tŸH´­!Ò²Ó/æÍUäDi³ïi·z±ºiÚîñ²pº­…ŠƒaHAð„ìp>,EÚµ-VR¹.(/¨íç­†/­Ñ7Û_”•\é—D…WWcšÒ±£kYBl1 ½¶ö4‡!ú£ Ù 3cöh__kë f~=1w]5ðøÉ'ñd°EðºEþäjõ+Ð ±vžç‡¼õÊÖÇ+ˆڦÆKêPì -ƒW8µ ¿cÚ뇼‰ëÎ ‰©z´Ž²Ct›ˆÆ«ð<±Šl1òž-T× f$ mæè^k¹‰ !‰kKÐ9Má0gšf¦–NêW1ˆÃ¹q§–~‡Î§€‰êȃ™¹oNFãÌu­>‰yG9•;|+0HÒU#¸î•Ð*x‡Øvì%ÌâG´ÂŽ1HÎM{i"¶ '8†g­©¦Q.Jê1ÓÌç´ïDùåÍÌ¡$ºQàJ‘$ï=6ºð5R 9t;Eã±w Rbn­iíž$h€äõ5ÆÆ>黇ô·4–)Jç™0È}|Âk9vIN!ÍêOÌDùëLð4ÿÒXk%Þ4 þàÔŠh§dÉ7\cfqÚ}Ä‚³ä¦hŽ¥³{ÏtZfxˆghLà4bKÀõ3qcèW†(E@ Õè¨9îÍ›ãs!˜É5OžòûiQÏ'5áx‰ -?%buXˆ§× §åGÇçÜFG~#¤cîÇÊK^–!±:Š ºœ/¼ø®³h5çí9ªWŒB,»Õ÷–­Í¦'hêÔ+šÀõÞ“Ö1ÁáßÑ®+BÈZU©…tÆ'b¥G­C¼wZ Q…1iFA¤ˆnÅ?E.ã}³kÿ¦‘Ϲ¶4€–Õš¿r†Ï¬y;—#1p«r›RnØ_ûªŽ]¬rALµõ#i¦g §W”Ëzíþ ,ÉsP¿i)“ÚB‰m·祒z…îÀkFe"{>Fû§­+jCHÁüpÛSg¡ºKŽº×¹‘ÏátôèC»åñCpMœžBÖ*¢Ñœíwˆˆ¸!ËUª}LØ™ævþÌ[æW¢Kê̱wƒæð dL DfF -ÍÿF9>³¢ÒÄDõ|ÓQÕÏq™àq~Žð„.bŽÜxB¹ª½îÑÑõ¢D£K}”‘®â0’HosÜðïÖûéFE$P+ÚGOÞ·U†˜ï8 -˜ËÖ®"ˆgÇ.]lRU¨ú?Ì.'2¼VýšÇq¤Q”k¢ú§¢¤ƒÄÓKœâ‰”Àº}^Ù’nÖ7*J˜m¦:ïʸ¤RØ9Ί­dêµð -A E .gírq[Ž˜ã–,zFÁ—ϘDÖqx&ðÞ¡A‡Ç÷¸ÃsH(+ò‚vŒð“øå+¬!65­‡ÑŸ Ûðù ÔþÚ^é„þqGå+è¾ëfòþ£Oп¾‘[WÎß±G÷ˆåJU)%y3fkGn¤ªl«ªâë½A -ýÔ¾*nªPþ]7,ª½øNŸõXwõ}[ƒŒ¦=/²aX\UG–Í1íÅ_÷y:Â{£8Ï(­NWr¡T®hS}ŒÐ'“ -‘ÎÍ„Ù3›Z=©Šn¥BaTfmš¦Žó")Vv‡'¬•Ûb·F@™9:ù;† ÿvS¹}œAÑzˆ–Ý5 Uh•Ó#ô¢ÔÁFz4ÿìL'èÝð>3U$ø4Õ¢ &GI-qÄ&(Õ‡g®,Êž?’”ËÛPMr©7bˆoXdm)³öXt~áy­1FxO: †4‰(õÆ™ ,ß¡ÅW0…÷}?dÐzL-Øtr~EÉ7[e¼cÆC[V8GéÕí¨{ýM²€HeWÒ9¢  ­¾6šœÂѱ‡N5c½6åæcP#µDO ¤;ö9…‰(Ö`óOþ¼Þè™#AÈ˱Lºöڹ⚪3ÔñW¨ªVñÚNÎHÑÿW#EŠH0ùqu¥žò«Vòö)d`&ßq€Qž¢PKB 9ô«<Ç`d¨Tžé¼#1}FM +RM—à ;Þ„¦Ä¹‡ Û³Vè\w£ìþ¿ÂÛ~oç2rŽwÖ¾ú7êo”òÏ].%ŸÊÆ+l(Zì òÀ¯÷âØ¢qcj„‹«Þ÷þ åc·|”‹_jn(n')äLÕ—Q^‚æ\3*døTCª×Z{Ǥ û,Ê FÊîÖo±nܤbì %õ§¥ÌÅñy-ƵÏw*§’b”ˆã~ÃÊÎÕ5„OâÓíUÁØ–k¬úôz‡›*ý FÜ÷þâkÝx¢ù€üΖœßq±½êð¡àJ"-aþÅùè®>—:ƒÃÑ¿zqÍÙ×u8ÿF²é¡ h!ò;8ÜÁ)Ezß<&"û|ÀÊžêå7p§%®Ø?Á2Tˆw'´`ÅW”Fçeíé¬(iFš;õ—ù„3#}ø«”f¦êH1ë¹vé -òt¶ÓÏ2RkãŒØ`‹çK<9ï"­övÌtß]&Ó¬…Å/µÖ$£ÚmíÄQ˳ÿ%1¡r®:ì†u0D%³§˜I¼íöÁd(=“; ŠôôÅä\¯Œ.9s»HlåvGŒL×®Úß]0V"W˜xßOÍî&w8U­ç‘N"¶:; ‡fðWØ¢˜C‘„ª¹KIá‡æºË§Rs½id£ÏS´{œ@ÃÙ]áÂ?åÖ£[B°ì剓A±þfn šêí¥g›Æo æ‹Ã£X³¢îa!'îùu¯],-õö×Á\AŽ(VèbÂX¹žŽl%†x#BøOõŸ)>ñU ªöÍN·û j‘)¨ë©àrgí²¶" ¢Ða¶ -PC3#ë8µYÖ˜†x=•ü½ú»ì^×…ÎñîTbä“ s>»–Ñ€Í$½â·ñEsþ®KUéÔs|7•È”JY9˜Ôvj£ÞþŒàÒÏü·õ\±ß(¶ˆºrä<×Õ·CÒßqbùËOçËÏC2¡Ô…ßô³ëdŸqjÆËA%KȘúÙ“žIuÒ¥¨jV\Ÿ›qSλ+ÔËÄ>å|ЩÕß%02±tÏñìͼ? s%­îsS§ÀˆAdW;‡•“Øž:AJl&—}mçæG”C…¦W¤»©2Æìâ¤Ëwþ8·NpögG=ôÉ97B÷Ç>¸àrÆê¡=]¦a¸Ï?ò°-F¢º<ÐÔ4ý *?¾Ñ&pÔ% Šù„qÛb»ðîp-¡áÜ,¡vتgëlQ3öUÀr­ -~¤5Ò3/õ°*[@Ò3À˜í“ÅeªÛÍ«•¡TeIÜr“œ&ëSáçC~Tsz†.Å'¹}ýqNì°çÆÃ&óhzƒ¤d¥cŠ -=ÊHƒÕL×>òü§‚Ãoþò«ÛΟ_õ_‰û4È°0†¯Ov¿% òÝS>×W¢ -6qyTïL \ŽhHéÜÔ\Õâ?€¬îÔM(ðG¬ù>_»X©Fm#‰‰ öe®1w\Îiù¿ñQ3äJy·Ëkr 4FГ£”dF:Ii™iF)ʦ@Æl=µÓ%ìJžëJ;‚ ÀŸñý}v…ÛÒgÙ×/| þœ»G MBcñuQå×z»>E8ÖFˆê7“Ã0O£÷ÐwŽù¤¹\¾œ´yÞ¨ÂÝ6Üǯü1Ç$Þ ¬¿³øRš>³W3‡![n^‚¯::ÉâøÃHV¤ÙñÈT¦ÿQôê¿)Dó÷ƒüúæ–:£— z‡J,5¨+DÇÑ’GÆëÞÖcê´Ï:e÷‡3ËÈô›[ÂÚç8;[¬Õ—œi¶™ÝѺ=†õr8/èH yûd+ú^ ¿CMcªhÜûˆØýTl-–…l?53fÎ͉2¤Rµî«"ëoí>x»ÖbôÏbT½tòŸg9}íhÚÚ×Ñ kÔŽÎå®Ø’¿:Jþƒ³0CÒù5»æº`îìäQWT%|¶|dN"ð<¾ÑÎ…Wœ8Æ7¯²$»Ý)‚Yˆ]¤@öÝ,<‚¢~[ë;|¥Iõâúæµ -ÀçP¸ŒúBêó>4÷ÞZO|T➈Bµ˜ÌÌŠ™ eéj˜ hÓµÒ*aQ -ïÁèÄÿ7í«ƒˆ@aŸ•EDÈyV-FÚœ•ÃG$ùÆ/~wº/q«©©ÝçÁ)°Ãîrœ=ZÍ&A’&A¥T±#4€ýÿ~ýù;™MßH -·ûDKm9)ðçC–ÒB<–‹\Aïàö|:ê&vY¨ÊtW˜GÐNt“SŒR]å·:Æõc—ƒU^_¸ö=’>ÔJžh][HOœ(dÚë‹$Çù‡`Öøí_Κ}Žõi¦—ùéÎÔ`›0žc\.-/öÐíA]ï`EM5éTÁÆhKî$/X-‚Ä…ÿa~¯ˆH¾³D»ª¨m;¿…VéQ¹<îMAè²Ä—!†>ch—Ž4“UÞÎOíè `¹çCvC¶Ü:µû¡ã)–È¡÷e -ß;gŸ&7Å“µ…ï!§r.¯ã]’ìhŒ´Ž2}c‰@ˆU~2j©¨¨Ó¼áܼ‰;&µÜ;~}† P=øÃгöw¢îÇfN-´µA–á¨BÔýPW;_`³úoQÊÏóÉýÏxM?|¼¿1-öôUdU§W³M© =eV±wŸ¬*€Á¸mè<͆‰Ô+ú¦J«ê¬&ú×eþ`õ* ÎJJ$…^b`iÑlŒÉ _õ«@åO¤÷m´wýLؤ6ñ­5¤Å‘ôŠð×Åmé9Z-ZÙ] AŠ®pµt¬2ŽY¬Ù4ò.ÚdÚ×AlKâPI,xþõb¼—òîzêôì -gê¿:ÆÐÏ«X4èkD—¸\¦Ö Nª…E KjåKú>†\+ÐVA¤®!>”K­€ ®kg˜ÉgäŸ$ãŽj¼FH?ÆAœ¤Qt£+w gÚ8e•¯÷ vx‰ÓÁÀZ½ .¸†89 -Þ§1uÁd:£Žhµ—:é¹R¹x Ìñþ¢­ØO–$ ÊUIf•zЬ2BXâ(N|"Ø6Zkë /ø¯¯€;yZ¢_±ï¸`ø¨ åS -”´¿ž¾ž‰}O!p†ÉoJî_¯KÑc§@ô“P !ß-À‰ËGX{à8‚2Û™Ÿ+8ÂST¯ïtŸÑ' oµ[žˆ¸'‹¢œAð‘&)"J-½Ì5§ç­;FüyãY³ÉJ{(bŒ|(²„ ‹f ã‰ÀŽ}ì)DÇ„åg ºµù¡ÁMböÙñu¼«0…‹à>h¡³ÔRÐIê$8ïݪŠ¾ö™.ÖýÄÇqvŸ>x§ óæì™Y˜kþ^Äò®ŠÒº¹£¹Å¡Ý‰«Wä÷î¨)§âæǤK+@®…ã8Ó sÃ*P$}”·.òcÂÄõàégç±ißP×뫯!R)ÚJ¶²Íkk–Ñn»¨¯è%ué‡%FQ4и=¬wª;´~‡ä*-iAJÅä„ Œ’ÊÒ7„øÅ+öD|«bqÚçSQ+`‘èsÌ„r«¡Ð.ŠKI(\Z_á©{(E­Ä9œ9ÁèCÄÈázöûÃé -"»§U~¥:@S­t¾ª9í‡þ¹ž`°cPQ(#€ 1d]Ç@?c±‘g­°»`k>Ôi ""o]³Ì#B÷]ß{ÆÌU§­Ìx0"•vßz{Œ¯(­¼ÀŒ‰Ú$xªyÆ)\¿4—:߸ùZŒ÷þÑM4ÝÖ?¶ ,­Tö}ËiËfÒ€|ÊÿSíÿÈq‡ð -\¿ .J…]ǽï9RлÏX ‘ŽÕ€ÿüá#©‰Äö.ðôÞ-¹>½ßÖ%ž3Rl²û(;Õ5Çnû¦åÉ…Ÿai¿þá7ª(”c"Æ/°ùœy³°æ’å¶Í,‹ÆÞ|2e8··“¿)ëj÷[¡ìfÞÏÉõ… ‹Êß#0|Ü5ÄÉpéo_ý#º0»*w´‘ý–ùó±Aѱ•¸0‰Œ©}Ø“*¡²ŽŸ»ùåiú¥!kc'+@ÔÝ©ðUÞ Ë:¾Wõ -ߧ8£dúÓîÏ™Á&5v#o(¶ï)§‚âr*|¯ÿ/ÈiG\Ì®8H(DùﳨA(¨i××–*åepï†BòÍ‹¢QZ´Ã±CTaÈaÛsƒ…#Ò¶ªhmÐéf2ùRœÂkEŽì;ʽk²÷íŒúšiÆ´öá Òz¥nr&>†Y¸{9Í6J GÔxå¸ÏM+TÑFÜ{ 2c’:"•É×d‰:¨W¯„&^QOXñ$/j²·GŒ æ˜Ìÿ½Ü1jâÄP Ôáz¹ê!Hãl;2b¾’Íâ®æJ |{°=ñþXó÷_x©W…pþ 6O”û#¨µ ³–“ ’ƒßXT»Ì'¿Á;Ã*?ƒ7}A]pb<Lêd:¦W¥µj }¯ŽUáVï_œlï_9÷O"I¡1¿ó3N&¼gB‚^äQ©‡°‘!ÛÛ<öxºï;°}â F¶ü«lg¹³©íÌ‹Z–4_®ÓF¢õ·€o§\Ãì™3#¨Ö£S¿qùÄîV®¦¾‘ëļ2ð›+µäm'DOy ÞO b4®«²*_K-ý •âËݶ¿Feˆ^C Xï'åª+4ö3BðGTž×êF -ïø7³ ÝâÇ–ÒSCÈP"†óå¸7,•«¸]kÈ+4 ¿½.EWFÈY›îËQ—ñ†ç8‰î¸à@OtÅd¤å`J¬kê%ôìwR´íضeGtc[òîñÜžo»3¥? ÈWîæé`à‘º¶8¸~Àƒ:ž¸ïÁJù¾´Kæù¼"\’]¥¾ø¤  bÊÆtܺ±ܬ¢5iîˆsÒ.`†µÂ‹•%RŸ³“Ì#r-¤TÄmÂïØ£e)/Í«‹;õ1`VWûºnÊ8Û^W"ýAƒ~D¸ ½NnàCï”} ê?×Dè ˆÔß2GJoÕjß=¤—F%bøNŠ¢†ÙÒB惚ßCyHyÌ_ìK|ª D»£ùSQüUr`ý¬ÉïþòféGè%®ˆ4ɺ#ÿ -^.“DH’iÝdRösƒF$Pé=0"ëG– ñHûì]“£N]00Àg× ÙI‘”â³YÁ=Û¹!#¹Ã¾«ê#¨¯é‰È`'’2#ÿÿ¾ÏS©š:É Y„‹äyC:](S‰ƒS÷l*Ÿñ;©äÙ…µõx*ƒ†)X|Å…Ü- ÃZ7ó”ãçš>’$ŸöÔBOÌ¥žÑ!m™y3-–‘±Ré9̵XAÃ9w¸ èÙ¼ç+CñÒÁ¬&i’Óù­·ù1}†§&n'Ö'ƒ½ÀÜÞ*¹&¦8ÍέDÐÄ¢=Ïøã#é_C‚ͯȯÚZ4UÕ§¯ŠìÌ„®¯(KD[K%¾Uã}ÂZäÞS'J>ðhÞÜþ9ÖÿyLæí?rÍ[Þ¥´•Â δo±%†UdÙÚ£‡Ï¹+¶q@U‹xöÄmdÙ`öÄ%.o¡–[À -ìa:vNjƒâ&ªf2\)h9â£So`9uø&n#c¦ó~†uRS;U&ªeé‚„‹1Úù’ÎýÜ(/ í㘟•vÄËfÄWµfÚê -¶¾µD(ZÇ’W««¨ ´OêDw‰ÁÇy§ÐÐåÀ(úUdð׿üÒÂÿóöüWé>Ö Òì=vÆBaÖ)àýÍX]<;òÚ¨a—BÂòÜÑÐXÎ- ü(§AÆ!gÉ -JÖ–0`À”f¤ýk*ŸÁú«¾™ji+–ÑÄ%î¼”Ê?WÒzQݼÊOÒ|»ÞÊÚ.MÆ‘ŽšW+gÛ;>[|ÙöÚRΰ8àûê4}1ñ‹&Ǫ E÷¾#;RÕB -T-G·üíc)û¦‹ÝkŠÇqh' -;ýGòIÒå.Bòï’O¥~JebJŸ8ȨVóQ»Ê §Æ+¹ñºe±â -Ó…üÖiÙåˆÐj᧽udÐB»V6ôRPÖÇÁ&·¤]á’ËzC~!ôŒ_4^©b›”]Å×ZRxS´=ÃMÌ zO›¨ÖËxgvU‡“Ï%3)s¼M in»lÆ_JUö8Â]¾\§Ê(ÌÇ¿K,Mã;eBŽtéõà gÌ<¯»,uÔqõ’Έ*¯ žfÀ‰ˆw“^7$Yt ³/ö\¹¡@ФìGû/âáɼõ;—) ŸÃ7,×Æ‹‰ÔD=øÊ K ëùúÆ ÜyžFÑ‘æ _ZßP‚»¢Üpâò¯Ô˜e(/Iâþ¾Ï¤Àôübbðü¾~¼UAäáÝ¿þëä±E`hÃ6ÛÑ ÙÆ vÍYìpƒì.Oô”Èw} Ž¨mr4è¥ÿZ u}ó‹EãbWcññfÓ:a¹ˆÜÏhi( ò>; ¢3›u(,Ùß:X~Æ>«×Ì–¾†¼ÕÔ\9Åú®ãJ©—®-Wºa…Æ(ªŽAgTºYó8ˆ º¨ÌY¯vÈAÌ{½ò÷ÝÚ -55ã tÛÝ•>¯˜‚E¤9CÄ'Þý1êR"¨yGÎ4¢Ñ¹å5 D -:_ƒž¶Êj\¶N–¶ j¬C>•T¥˜Me½b‘ÈqTÑ¥ÆÖêíåÎïD¸oevl„gý®Ž]C bÿ¨—“²†h¼Òºjùàªñcà»~'¸°ha¢~‘!ˆºqWáLµ^™ØóTÛPªKÉô*a'@5(þ5` Ù”¹íùôI<ÓìÝ[jNwî%Ö= Š§F¼Å¢:’L`*­¬ÉŒkû2Aj?|‚ÌY¹3ÒßÚœŸ«¨É°bŽÏ›£äGtz] FÀ^¼ôÔù;T< ¸\ßQÊ$g¸ Ôµ3°¤þ25ñZÄ`-Ì°œgW=Q$öíÕX ³6Ãsíñ|ž©ìŽ€‚€8QãfͲ’ÔáÙ1æg AÖŽÿÜ‘)þÆ7\lû¶¯o,¦-N ²L–YbxqÂÑøƒ‹.“ž)‘e~¤5eþ³%ðo¼Àÿ9Ä'ÅÖ‰qSÒ<Ò/ `"¦¿µ®þ£½µ©ÛÅœG|]ZàÄç„Nw[ÉÔ…K™=‡Ï¬ùeû¤qö5ÔœG(— Ò”R45’=œŸÜÌ1ó ã×ÌíòéçNt¨{NôµàÏ Ë×B]+'Õ*9U—¡ö0þ“\øËzõ™r²À©ÆœrñV) -©k9³ä”ÜOsð[#áö -L¢cŒ>¼þ—/®`'é•1A·êГºékG¢¬}:¯ˆ•/af­Iÿ„tºNÝÞÉÞ'†‚Ê,y¨ •¾±—¬%Lµ3êˆ^Nn¶¡õ3CÞ'¢r-ÖmgàDq0C}ËÁÑI>úJ°Ÿ³­øШøëgšÀiƽ*ù:/…ʺ§¸|ødždUQ"Äã½êJ]CRÑ—ÊSƒ|Ý`›ÖwX¿#-¼OŠýoÄŒî\O±ñë³ï‡ã»&Õá³d®Æ–Ÿw ¶pL#1žSh—åûJtUHâjØ¿&~rÒû­å»Ðxr¶ªiìx´Ae“[Ÿþ«5jêqMß©ãí‚lM8CŒ*ŒêÔ›s{ÅòCÝL!¦.›”YE„G:³dô¢x:XnÝ1örØßu"èüŸ©`°ý‘ÕtP¸„Ђ -^¢Ð&§ ‚5’0B{7J%\zfÕ.ÜPr9cb9vˆAÜkS×W#ÐýYø`t¤Øtö«Q´„1I -ôµFrÍ F]ˆM¬,&’VR ÍÐ[f*$ÏÀÄE"J¹:;º¾+D(3è1+!½K s #ÏN†”Šï±ÉÑf“ŸÔ¥nB¤‘N¥T˜Å6]ò5yÏ£ô5ƒ´•ÁI|®hÉè¾Ü#pÐ{+Çü’]|K³íÏǶPDÓŠN=E~Ç6³yZ¸MÚĤ¼–{GVFÝÖ®ó‰Dš¦M¹tW$B;£”lç³ÞØ ]Þõ;w@Ed'챚õ3ÍÌ'”¤Ü ×£¡Ê³òÙ¡ãÏ-·Ç~:CÒ Ç4쬸r%ǃ`*fŒ)›Eá|M È;­·•SäŒ:¼XùFÜZn¦|!ô¥MTOÆ -îäm_©‚žb|©D@÷)µ­ß f£®*ƒ¢f-7UAy_øò–-qY{éÃ-¹õPðo¾Ç¶Ùý;–½@ÿñ䆬½»pm=eš£Ô³€ZOmCD¸Q³´ëe§d$ˆë’P̘Iž<|sÁ÷¬Ðs-—+6áGÂ|íéKæà G|þ˜1kB)‘ÎU¬tþ×uøï‚bÔwJŸªí.à/ÑøüÅ;äç1ßrƒtÎqQ+Èqï©O•q0®p—´¡¸ ô2vBI‡Ì‘³^Sçsé°Iº½ùªÄÑ×1 ›}µÏÑÉ‘d†¬®šÛf;N!RZÛ |›x·ñùå{ÆìSKìT×6á·ÂÖ^©÷¿°%Bʯ‚ {Nd“TD¢ÃÎF¨Vl ÑuŒ¾Œ@RÒ•¼ƒ_9YôÁlÅŒ.ÏF”ÏqÆ-W:9½ËïVt–ÃÞ„Œúç+ñàV¥G£´öíx Ü@ը詬ÁAXý-FÓªÿT„l®=“L{Rý‚¦ˆ$Àõ‹›Ýõ 3üwQsô}ð¸¾k¬ø˜pÓk㢃nBQé×)s,#LƒŽ/@¶¸ùißAèPQŠ_v†@ oaéeÈ•º€c;oþe2 -= -ºpX»Lûž.é—!6òµY,Ú{ý `á ¨^§C*; ©°hÏ9NŸ¶/…ÎbŠÖD ¹BÌ+¿ò#苤V¾0ug„Øñj QϹ`æ§ÊHÉßAÓCq4†€nØäGõ·Þå6¤´r»q ªƒ¡K±‹º–SLaЗDÅj¨«ŸrÂz\d%“2”7|Þ±ì¬,Ž:ʤy1C¿/­Ì5w~ÄQz‡r¶ è0!L¥q r+oMú»Ü,Fè/ÏNÏó)‹’Û»‚'sʽԼVŽ÷Qw­—7aáòúRS{áíæi×tŠ&‹Èò>¨'jº¡¤üÙäC -åo(o•ø¨E{ÐY rÌBŽÔ®r%2›º[äHÎp›Eà#ˆé]âƒÑmpëAšVbΫ¬±{êtY¢ÚÇÑjrBM‡‚ô¨ÿy-}P¹X®¶Ã¹ §bçÊãâ;+¹ˆ•§ÿÙ,Ÿ¿EHŠ!¿ý˯°Ü4Ôz¼ËÍÕVO‡âo¿ðÌÖ8÷¥ -wƵ(Ï;©örpK0œ,ás×âg÷Ðœ·ïw3c4°·GCì­“%šC+» (›Æ¦È–¸ÛJ\¯÷¬KMÖåƒ.ÒhßëÌéð) -E²í4lˆ6;©1 ø³u\Ñ3ͯd“µ0ÊÀ¹¦Sx=‚ª3"žPgŒÆv®¿¦3m8v}ÆTδ,çHH¿€CtÄz娦æ¾ÒGK$QßZÄmû¬«{Ré‡Cüè¶>±-ZèM,!½ÂZg•Æ’ýF¶õ§190À9™Ý5ÁÖ&TuÑ¿ã¸ëÀÀX˜œ‹9Ñ¥Ô:©ˆ¾°šŽµ/C(Lr·+·KÍ)qÈô{¶kWçÑöì_Ÿê|h÷n5FWD¶vâøÆðñ3£Ttú,(]>¢æ+BYõ«ÝÎ5ôŠÐoÎÛÛbŽÔ>ü9÷€ÏAsÎmá§fÓ>¶[œmåã3£®˜#‰Mv¿aݼ9$;m53õj”f£tm,§Ö̺â™!×ËëyÄ#Ä_ŠMǹg”¼^@§ÞM§¤Ã½b4VÏ®TiÐ,ÒG’MÑ|ÿwê´Úaµ…Y§ÔqþX‘÷›‹yVÕ^§‹yTD#Ô¼*–;3"؆ÈÊéKdÉÝÜÑCáz”2W\/ª™’‡O|uR÷¿i†¸sÄ"âÕÆ}ë+€ï2­*®Ü‰Åy³TèÁ_šWë‘h)ƒgé§Ñ«Í;JWÔó?yµ02A¡:©kHež§¶E~Z-æ„ùX -Â"!}ˆ?OóMBätŒl ‡]Òb¨ådEµòïÔ«uqU,ŒŸM~ãƒñ“ Ùn!Ð:PJM¨ -OJ¶ Íuy¡ie Fòm%w*4#[¼žád˜ÞèøÖš(‘^Ôüã:«Ú#[Ý憄ÙÅ訿#®éä­?“ä”/\¯ëLÛók@Q4&ÌŸýš U;{"ÓÜquyŸ:òU©ºÄAŽˆúà…5®öyHfT“ß ÷¤uÎà -w3ÊQ3dù™gâôGËÅnÕ(šO3{$!mDVZÔtæ•Ô$á2û‡YZ#?2®,äA¿r&¾jf¶„´Æ¬!WdG& ÂC9š™² ê‚kâ6Ã\é* ¸#\ACoi´J¦jÈz2ñ÷׆ -±VT ´ÛËYGÏ`% ëQyTÓ¹eá,×̇`}%Щ3uÝÄ‘Ö¦3fÔÕ*Af›w&T¨úiÖK¢ÄÙe~0¢`Å›ÐoL²T¥‡cºÕŒ”0ÛU ¾ :îX´¢–W×õëÞ)ƒ]ª2¢ Êÿ®ïǃÌáÎׂâÊ7#yX??ß–‡pJÓqXÈ´Æõùd=³#M’ïôÿ¼Ÿ+ÝÚJקI"E]S嘟!¢vå*RÞ¶;8Z6˜õäi 9'?®@›÷ú¬6¼#ûqù›–M%' ÷nu^:|» ëÁ5 ¢õ”ÃB‰X!Þp2oïÊd§ÁñA3“Þ|¯8¿ÛcR)#¿ PÝ¡;r6$\6—;DœxÆÔMv0ÞWü[‹"ù(Õ7…¾ óÁÔí¥Fé8vLÑRƒÖø~=*|¢²üÏ(öJ‚£i¿ÁµÿÇ–˜¹†(Ñ?QŽ7b‰A@ÅñÈY¾Â\æN±f¯ yÄÚ(GD‚†ÇžAcmL4#2«’Ç‚¶â “×F"š;㉅p$g«X°·"kUef»€¾*®¯åU5~~no«´ìw•Û“œp–›Ê¨AtÓ/l>ÇK -¬ÑPÌ#ÍìR)±Å-`„ÑÝ>“ä]ÜÀ&gÊÔo` A-®¹¬×Ø’ªiëA‘É—ÖMð<&z¦Íøåýý^‘8Ÿ¹0*0ì瘥ã+š›éóÌ 3^(¤Ðí%•pȃsSQ?’uüS‰ªÓÏÕO~£óþ€ú*tž+ñ„ãºSz -=dï”è}ƒ6JI›vöÜH6l Ûù l‘!ƒûgÖ†—\štg®õfAyKœÓ¨)]4ó÷{P¶ÿ ¥£?gΆŽqÚÒŠŠ+qÊ,Ó1Êò£Ê¥H)OŸ;Q¹õÉOõñÿåDïŸjdr"SEãUsNòJq<„²]fA¬²ŒÎÔ!ÕŸë¼ûft ßhu«Â:ËtŸ1‹ÎäfŽ²GA\IJD†¥•Z’WœL‚«G!Y–r>áTjì\7yÅa+x÷w²¯BêÀŽ!²R® ¤š"¢éÑÎb(öµŒ~*^Ù&­ï:¦Éø{8ÔÏáìõ %É–M¤‹ƒ#î¸w\VP­tš§M(âçÖ\ßAamä uÖºFð‘m~jä3Þ´=* u27à†ûŒ®î>TOd@nê endstream endobj 8 0 obj [7 0 R] endobj 52 0 obj <> endobj xref 0 53 0000000000 65535 f -0000000016 00000 n -0000000144 00000 n -0000061431 00000 n -0000000000 00000 f -0000536092 00000 n -0000535728 00000 n -0000535542 00000 n -0002188030 00000 n -0000061482 00000 n -0000061895 00000 n -0000548368 00000 n -0000545685 00000 n -0000545572 00000 n -0000532323 00000 n -0000534981 00000 n -0000535029 00000 n -0000535612 00000 n -0000535643 00000 n -0000543744 00000 n -0000536334 00000 n -0000536591 00000 n -0000544065 00000 n -0000545720 00000 n -0000548442 00000 n -0000549162 00000 n -0000550518 00000 n -0000565801 00000 n -0000631389 00000 n -0000696977 00000 n -0000762565 00000 n -0000828153 00000 n -0000893741 00000 n -0000959329 00000 n -0001024917 00000 n -0001090505 00000 n -0001156093 00000 n -0001221681 00000 n -0001287269 00000 n -0001352857 00000 n -0001418445 00000 n -0001484033 00000 n -0001549621 00000 n -0001615209 00000 n -0001680797 00000 n -0001728914 00000 n -0001794502 00000 n -0001860090 00000 n -0001925678 00000 n -0001991266 00000 n -0002056854 00000 n -0002122442 00000 n -0002188053 00000 n -trailer <<86B2650A87464FF1BEA49FEBE523C26E>]>> startxref 2188267 %%EOF \ No newline at end of file diff --git a/vendor/requests/ext/requests-logo.png b/vendor/requests/ext/requests-logo.png deleted file mode 100644 index cb4bc642..00000000 Binary files a/vendor/requests/ext/requests-logo.png and /dev/null differ diff --git a/vendor/requests/ext/requests-logo.svg b/vendor/requests/ext/requests-logo.svg deleted file mode 100644 index 80406e7c..00000000 --- a/vendor/requests/ext/requests-logo.svg +++ /dev/null @@ -1 +0,0 @@ -requestsRequestshumanshttp for \ No newline at end of file diff --git a/vendor/requests/ext/ss-compressed.png b/vendor/requests/ext/ss-compressed.png deleted file mode 100644 index 149016d3..00000000 Binary files a/vendor/requests/ext/ss-compressed.png and /dev/null differ diff --git a/vendor/requests/ext/ss.png b/vendor/requests/ext/ss.png deleted file mode 100644 index 149016d3..00000000 Binary files a/vendor/requests/ext/ss.png and /dev/null differ diff --git a/vendor/requests/pytest.ini b/vendor/requests/pytest.ini deleted file mode 100644 index 13fa0000..00000000 --- a/vendor/requests/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -addopts = -p no:warnings --doctest-modules -doctest_optionflags= NORMALIZE_WHITESPACE ELLIPSIS \ No newline at end of file diff --git a/vendor/requests/requests/__init__.py b/vendor/requests/requests/__init__.py deleted file mode 100644 index 0ac7713b..00000000 --- a/vendor/requests/requests/__init__.py +++ /dev/null @@ -1,152 +0,0 @@ -# -*- coding: utf-8 -*- - -# __ -# /__) _ _ _ _ _/ _ -# / ( (- (/ (/ (- _) / _) -# / - -""" -Requests HTTP Library -~~~~~~~~~~~~~~~~~~~~~ - -Requests is an HTTP library, written in Python, for human beings. -Basic GET usage: - - >>> import requests - >>> r = requests.get('https://www.python.org') - >>> r.status_code - 200 - >>> b'Python is a programming language' in r.content - True - -... or POST: - - >>> payload = dict(key1='value1', key2='value2') - >>> r = requests.post('https://httpbin.org/post', data=payload) - >>> print(r.text) - { - ... - "form": { - "key1": "value1", - "key2": "value2" - }, - ... - } - -The other HTTP methods are supported - see `requests.api`. Full documentation -is at . - -:copyright: (c) 2017 by Kenneth Reitz. -:license: Apache 2.0, see LICENSE for more details. -""" - -import urllib3 -import warnings -from .exceptions import RequestsDependencyWarning - -try: - from charset_normalizer import __version__ as charset_normalizer_version -except ImportError: - charset_normalizer_version = None - -try: - from chardet import __version__ as chardet_version -except ImportError: - chardet_version = None - -def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): - urllib3_version = urllib3_version.split('.') - assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. - - # Sometimes, urllib3 only reports its version as 16.1. - if len(urllib3_version) == 2: - urllib3_version.append('0') - - # Check urllib3 for compatibility. - major, minor, patch = urllib3_version # noqa: F811 - major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.26 - assert major == 1 - assert minor >= 21 - assert minor <= 26 - - # Check charset_normalizer for compatibility. - if chardet_version: - major, minor, patch = chardet_version.split('.')[:3] - major, minor, patch = int(major), int(minor), int(patch) - # chardet_version >= 3.0.2, < 5.0.0 - assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0) - elif charset_normalizer_version: - major, minor, patch = charset_normalizer_version.split('.')[:3] - major, minor, patch = int(major), int(minor), int(patch) - # charset_normalizer >= 2.0.0 < 3.0.0 - assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) - else: - raise Exception("You need either charset_normalizer or chardet installed") - -def _check_cryptography(cryptography_version): - # cryptography < 1.3.4 - try: - cryptography_version = list(map(int, cryptography_version.split('.'))) - except ValueError: - return - - if cryptography_version < [1, 3, 4]: - warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) - warnings.warn(warning, RequestsDependencyWarning) - -# Check imported dependencies for compatibility. -try: - check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version) -except (AssertionError, ValueError): - warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " - "version!".format(urllib3.__version__, chardet_version, charset_normalizer_version), - RequestsDependencyWarning) - -# Attempt to enable urllib3's fallback for SNI support -# if the standard library doesn't support SNI or the -# 'ssl' library isn't available. -try: - try: - import ssl - except ImportError: - ssl = None - - if not getattr(ssl, "HAS_SNI", False): - from urllib3.contrib import pyopenssl - pyopenssl.inject_into_urllib3() - - # Check cryptography version - from cryptography import __version__ as cryptography_version - _check_cryptography(cryptography_version) -except ImportError: - pass - -# urllib3's DependencyWarnings should be silenced. -from urllib3.exceptions import DependencyWarning -warnings.simplefilter('ignore', DependencyWarning) - -from .__version__ import __title__, __description__, __url__, __version__ -from .__version__ import __build__, __author__, __author_email__, __license__ -from .__version__ import __copyright__, __cake__ - -from . import utils -from . import packages -from .models import Request, Response, PreparedRequest -from .api import request, get, head, post, patch, put, delete, options -from .sessions import session, Session -from .status_codes import codes -from .exceptions import ( - RequestException, Timeout, URLRequired, - TooManyRedirects, HTTPError, ConnectionError, - FileModeWarning, ConnectTimeout, ReadTimeout -) - -# Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler - -logging.getLogger(__name__).addHandler(NullHandler()) - -# FileModeWarnings go off per the default. -warnings.simplefilter('default', FileModeWarning, append=True) diff --git a/vendor/requests/requests/__version__.py b/vendor/requests/requests/__version__.py deleted file mode 100644 index 0d7cde1d..00000000 --- a/vendor/requests/requests/__version__.py +++ /dev/null @@ -1,14 +0,0 @@ -# .-. .-. .-. . . .-. .-. .-. .-. -# |( |- |.| | | |- `-. | `-. -# ' ' `-' `-`.`-' `-' `-' ' `-' - -__title__ = 'requests' -__description__ = 'Python HTTP for Humans.' -__url__ = 'https://requests.readthedocs.io' -__version__ = '2.26.0' -__build__ = 0x022600 -__author__ = 'Kenneth Reitz' -__author_email__ = 'me@kennethreitz.org' -__license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2020 Kenneth Reitz' -__cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/vendor/requests/requests/_internal_utils.py b/vendor/requests/requests/_internal_utils.py deleted file mode 100644 index 759d9a56..00000000 --- a/vendor/requests/requests/_internal_utils.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests._internal_utils -~~~~~~~~~~~~~~ - -Provides utility functions that are consumed internally by Requests -which depend on extremely few external helpers (such as compat) -""" - -from .compat import is_py2, builtin_str, str - - -def to_native_string(string, encoding='ascii'): - """Given a string object, regardless of type, returns a representation of - that string in the native string type, encoding and decoding where - necessary. This assumes ASCII unless told otherwise. - """ - if isinstance(string, builtin_str): - out = string - else: - if is_py2: - out = string.encode(encoding) - else: - out = string.decode(encoding) - - return out - - -def unicode_is_ascii(u_string): - """Determine if unicode string only contains ASCII characters. - - :param str u_string: unicode string to check. Must be unicode - and not Python 2 `str`. - :rtype: bool - """ - assert isinstance(u_string, str) - try: - u_string.encode('ascii') - return True - except UnicodeEncodeError: - return False diff --git a/vendor/requests/requests/adapters.py b/vendor/requests/requests/adapters.py deleted file mode 100644 index cfe4a344..00000000 --- a/vendor/requests/requests/adapters.py +++ /dev/null @@ -1,538 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.adapters -~~~~~~~~~~~~~~~~~ - -This module contains the transport adapters that Requests uses to define -and maintain connections. -""" - -import os.path -import socket - -from urllib3.poolmanager import PoolManager, proxy_from_url -from urllib3.response import HTTPResponse -from urllib3.util import parse_url -from urllib3.util import Timeout as TimeoutSauce -from urllib3.util.retry import Retry -from urllib3.exceptions import ClosedPoolError -from urllib3.exceptions import ConnectTimeoutError -from urllib3.exceptions import HTTPError as _HTTPError -from urllib3.exceptions import MaxRetryError -from urllib3.exceptions import NewConnectionError -from urllib3.exceptions import ProxyError as _ProxyError -from urllib3.exceptions import ProtocolError -from urllib3.exceptions import ReadTimeoutError -from urllib3.exceptions import SSLError as _SSLError -from urllib3.exceptions import ResponseError -from urllib3.exceptions import LocationValueError - -from .models import Response -from .compat import urlparse, basestring -from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, - get_encoding_from_headers, prepend_scheme_if_needed, - get_auth_from_url, urldefragauth, select_proxy) -from .structures import CaseInsensitiveDict -from .cookies import extract_cookies_to_jar -from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, - ProxyError, RetryError, InvalidSchema, InvalidProxyURL, - InvalidURL) -from .auth import _basic_auth_str -from .certs import contents as ca_bundle_contents - -try: - from urllib3.contrib.socks import SOCKSProxyManager -except ImportError: - def SOCKSProxyManager(*args, **kwargs): - raise InvalidSchema("Missing dependencies for SOCKS support.") - -DEFAULT_POOLBLOCK = False -DEFAULT_POOLSIZE = 10 -DEFAULT_RETRIES = 0 -DEFAULT_POOL_TIMEOUT = None - - -class BaseAdapter(object): - """The Base Transport Adapter""" - - def __init__(self): - super(BaseAdapter, self).__init__() - - def send(self, request, stream=False, timeout=None, verify=True, - cert=None, proxies=None): - """Sends PreparedRequest object. Returns Response object. - - :param request: The :class:`PreparedRequest ` being sent. - :param stream: (optional) Whether to stream the request content. - :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) ` tuple. - :type timeout: float or tuple - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use - :param cert: (optional) Any user-provided SSL certificate to be trusted. - :param proxies: (optional) The proxies dictionary to apply to the request. - """ - raise NotImplementedError - - def close(self): - """Cleans up adapter specific items.""" - raise NotImplementedError - - -class HTTPAdapter(BaseAdapter): - """The built-in HTTP Adapter for urllib3. - - Provides a general-case interface for Requests sessions to contact HTTP and - HTTPS urls by implementing the Transport Adapter interface. This class will - usually be created by the :class:`Session ` class under the - covers. - - :param pool_connections: The number of urllib3 connection pools to cache. - :param pool_maxsize: The maximum number of connections to save in the pool. - :param max_retries: The maximum number of retries each connection - should attempt. Note, this applies only to failed DNS lookups, socket - connections and connection timeouts, never to requests where data has - made it to the server. By default, Requests does not retry failed - connections. If you need granular control over the conditions under - which we retry a request, import urllib3's ``Retry`` class and pass - that instead. - :param pool_block: Whether the connection pool should block for connections. - - Usage:: - - >>> import requests - >>> s = requests.Session() - >>> a = requests.adapters.HTTPAdapter(max_retries=3) - >>> s.mount('http://', a) - """ - __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', - '_pool_block'] - - def __init__(self, pool_connections=DEFAULT_POOLSIZE, - pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, - pool_block=DEFAULT_POOLBLOCK): - if max_retries == DEFAULT_RETRIES: - self.max_retries = Retry(0, read=False) - else: - self.max_retries = Retry.from_int(max_retries) - self.config = {} - self.proxy_manager = {} - - super(HTTPAdapter, self).__init__() - - self._pool_connections = pool_connections - self._pool_maxsize = pool_maxsize - self._pool_block = pool_block - - self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) - - def __getstate__(self): - return {attr: getattr(self, attr, None) for attr in self.__attrs__} - - def __setstate__(self, state): - # Can't handle by adding 'proxy_manager' to self.__attrs__ because - # self.poolmanager uses a lambda function, which isn't pickleable. - self.proxy_manager = {} - self.config = {} - - for attr, value in state.items(): - setattr(self, attr, value) - - self.init_poolmanager(self._pool_connections, self._pool_maxsize, - block=self._pool_block) - - def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): - """Initializes a urllib3 PoolManager. - - This method should not be called from user code, and is only - exposed for use when subclassing the - :class:`HTTPAdapter `. - - :param connections: The number of urllib3 connection pools to cache. - :param maxsize: The maximum number of connections to save in the pool. - :param block: Block when no free connections are available. - :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. - """ - # save these values for pickling - self._pool_connections = connections - self._pool_maxsize = maxsize - self._pool_block = block - - self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, - block=block, strict=True, **pool_kwargs) - - def proxy_manager_for(self, proxy, **proxy_kwargs): - """Return urllib3 ProxyManager for the given proxy. - - This method should not be called from user code, and is only - exposed for use when subclassing the - :class:`HTTPAdapter `. - - :param proxy: The proxy to return a urllib3 ProxyManager for. - :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. - :returns: ProxyManager - :rtype: urllib3.ProxyManager - """ - if proxy in self.proxy_manager: - manager = self.proxy_manager[proxy] - elif proxy.lower().startswith('socks'): - username, password = get_auth_from_url(proxy) - manager = self.proxy_manager[proxy] = SOCKSProxyManager( - proxy, - username=username, - password=password, - num_pools=self._pool_connections, - maxsize=self._pool_maxsize, - block=self._pool_block, - **proxy_kwargs - ) - else: - proxy_headers = self.proxy_headers(proxy) - manager = self.proxy_manager[proxy] = proxy_from_url( - proxy, - proxy_headers=proxy_headers, - num_pools=self._pool_connections, - maxsize=self._pool_maxsize, - block=self._pool_block, - **proxy_kwargs) - - return manager - - def cert_verify(self, conn, url, verify, cert): - """Verify a SSL certificate. This method should not be called from user - code, and is only exposed for use when subclassing the - :class:`HTTPAdapter `. - - :param conn: The urllib3 connection object associated with the cert. - :param url: The requested URL. - :param verify: Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use - :param cert: The SSL certificate to verify. - """ - if url.lower().startswith('https') and verify: - - cert_loc = None - cert_data = None - - # Allow self-specified cert location. - if verify is not True: - cert_loc = verify - - if not cert_loc: - cert_data = ca_bundle_contents() - - if not cert_data and (not cert_loc or not os.path.exists(cert_loc)): - raise IOError("Could not find a suitable TLS CA certificate bundle, " - "invalid path: {}".format(cert_loc)) - - conn.cert_reqs = 'CERT_REQUIRED' - - if cert_data: - conn.ca_cert_data = cert_data - elif not os.path.isdir(cert_loc): - conn.ca_certs = cert_loc - else: - conn.ca_cert_dir = cert_loc - else: - conn.cert_reqs = 'CERT_NONE' - conn.ca_certs = None - conn.ca_cert_dir = None - conn.ca_cert_data = None - - if cert: - if not isinstance(cert, basestring): - conn.cert_file = cert[0] - conn.key_file = cert[1] - else: - conn.cert_file = cert - conn.key_file = None - if conn.cert_file and not os.path.exists(conn.cert_file): - raise IOError("Could not find the TLS certificate file, " - "invalid path: {}".format(conn.cert_file)) - if conn.key_file and not os.path.exists(conn.key_file): - raise IOError("Could not find the TLS key file, " - "invalid path: {}".format(conn.key_file)) - - def build_response(self, req, resp): - """Builds a :class:`Response ` object from a urllib3 - response. This should not be called from user code, and is only exposed - for use when subclassing the - :class:`HTTPAdapter ` - - :param req: The :class:`PreparedRequest ` used to generate the response. - :param resp: The urllib3 response object. - :rtype: requests.Response - """ - response = Response() - - # Fallback to None if there's no status_code, for whatever reason. - response.status_code = getattr(resp, 'status', None) - - # Make headers case-insensitive. - response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) - - # Set encoding. - response.encoding = get_encoding_from_headers(response.headers) - response.raw = resp - response.reason = response.raw.reason - - if isinstance(req.url, bytes): - response.url = req.url.decode('utf-8') - else: - response.url = req.url - - # Add new cookies from the server. - extract_cookies_to_jar(response.cookies, req, resp) - - # Give the Response some context. - response.request = req - response.connection = self - - return response - - def get_connection(self, url, proxies=None): - """Returns a urllib3 connection for the given URL. This should not be - called from user code, and is only exposed for use when subclassing the - :class:`HTTPAdapter `. - - :param url: The URL to connect to. - :param proxies: (optional) A Requests-style dictionary of proxies used on this request. - :rtype: urllib3.ConnectionPool - """ - proxy = select_proxy(url, proxies) - - if proxy: - proxy = prepend_scheme_if_needed(proxy, 'http') - proxy_url = parse_url(proxy) - if not proxy_url.host: - raise InvalidProxyURL("Please check proxy URL. It is malformed" - " and could be missing the host.") - proxy_manager = self.proxy_manager_for(proxy) - conn = proxy_manager.connection_from_url(url) - else: - # Only scheme should be lower case - parsed = urlparse(url) - url = parsed.geturl() - conn = self.poolmanager.connection_from_url(url) - - return conn - - def close(self): - """Disposes of any internal state. - - Currently, this closes the PoolManager and any active ProxyManager, - which closes any pooled connections. - """ - self.poolmanager.clear() - for proxy in self.proxy_manager.values(): - proxy.clear() - - def request_url(self, request, proxies): - """Obtain the url to use when making the final request. - - If the message is being sent through a HTTP proxy, the full URL has to - be used. Otherwise, we should only use the path portion of the URL. - - This should not be called from user code, and is only exposed for use - when subclassing the - :class:`HTTPAdapter `. - - :param request: The :class:`PreparedRequest ` being sent. - :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. - :rtype: str - """ - proxy = select_proxy(request.url, proxies) - scheme = urlparse(request.url).scheme - - is_proxied_http_request = (proxy and scheme != 'https') - using_socks_proxy = False - if proxy: - proxy_scheme = urlparse(proxy).scheme.lower() - using_socks_proxy = proxy_scheme.startswith('socks') - - url = request.path_url - if is_proxied_http_request and not using_socks_proxy: - url = urldefragauth(request.url) - - return url - - def add_headers(self, request, **kwargs): - """Add any headers needed by the connection. As of v2.0 this does - nothing by default, but is left for overriding by users that subclass - the :class:`HTTPAdapter `. - - This should not be called from user code, and is only exposed for use - when subclassing the - :class:`HTTPAdapter `. - - :param request: The :class:`PreparedRequest ` to add headers to. - :param kwargs: The keyword arguments from the call to send(). - """ - pass - - def proxy_headers(self, proxy): - """Returns a dictionary of the headers to add to any request sent - through a proxy. This works with urllib3 magic to ensure that they are - correctly sent to the proxy, rather than in a tunnelled request if - CONNECT is being used. - - This should not be called from user code, and is only exposed for use - when subclassing the - :class:`HTTPAdapter `. - - :param proxy: The url of the proxy being used for this request. - :rtype: dict - """ - headers = {} - username, password = get_auth_from_url(proxy) - - if username: - headers['Proxy-Authorization'] = _basic_auth_str(username, - password) - - return headers - - def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): - """Sends PreparedRequest object. Returns Response object. - - :param request: The :class:`PreparedRequest ` being sent. - :param stream: (optional) Whether to stream the request content. - :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) ` tuple. - :type timeout: float or tuple or urllib3 Timeout object - :param verify: (optional) Either a boolean, in which case it controls whether - we verify the server's TLS certificate, or a string, in which case it - must be a path to a CA bundle to use - :param cert: (optional) Any user-provided SSL certificate to be trusted. - :param proxies: (optional) The proxies dictionary to apply to the request. - :rtype: requests.Response - """ - - try: - conn = self.get_connection(request.url, proxies) - except LocationValueError as e: - raise InvalidURL(e, request=request) - - self.cert_verify(conn, request.url, verify, cert) - url = self.request_url(request, proxies) - self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) - - chunked = not (request.body is None or 'Content-Length' in request.headers) - - if isinstance(timeout, tuple): - try: - connect, read = timeout - timeout = TimeoutSauce(connect=connect, read=read) - except ValueError as e: - # this may raise a string formatting error. - err = ("Invalid timeout {}. Pass a (connect, read) " - "timeout tuple, or a single float to set " - "both timeouts to the same value".format(timeout)) - raise ValueError(err) - elif isinstance(timeout, TimeoutSauce): - pass - else: - timeout = TimeoutSauce(connect=timeout, read=timeout) - - try: - if not chunked: - resp = conn.urlopen( - method=request.method, - url=url, - body=request.body, - headers=request.headers, - redirect=False, - assert_same_host=False, - preload_content=False, - decode_content=False, - retries=self.max_retries, - timeout=timeout - ) - - # Send the request. - else: - if hasattr(conn, 'proxy_pool'): - conn = conn.proxy_pool - - low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) - - try: - low_conn.putrequest(request.method, - url, - skip_accept_encoding=True) - - for header, value in request.headers.items(): - low_conn.putheader(header, value) - - low_conn.endheaders() - - for i in request.body: - low_conn.send(hex(len(i))[2:].encode('utf-8')) - low_conn.send(b'\r\n') - low_conn.send(i) - low_conn.send(b'\r\n') - low_conn.send(b'0\r\n\r\n') - - # Receive the response from the server - try: - # For Python 2.7, use buffering of HTTP responses - r = low_conn.getresponse(buffering=True) - except TypeError: - # For compatibility with Python 3.3+ - r = low_conn.getresponse() - - resp = HTTPResponse.from_httplib( - r, - pool=conn, - connection=low_conn, - preload_content=False, - decode_content=False - ) - except: - # If we hit any problems here, clean up the connection. - # Then, reraise so that we can handle the actual exception. - low_conn.close() - raise - - except (ProtocolError, socket.error) as err: - raise ConnectionError(err, request=request) - - except MaxRetryError as e: - if isinstance(e.reason, ConnectTimeoutError): - # TODO: Remove this in 3.0.0: see #2811 - if not isinstance(e.reason, NewConnectionError): - raise ConnectTimeout(e, request=request) - - if isinstance(e.reason, ResponseError): - raise RetryError(e, request=request) - - if isinstance(e.reason, _ProxyError): - raise ProxyError(e, request=request) - - if isinstance(e.reason, _SSLError): - # This branch is for urllib3 v1.22 and later. - raise SSLError(e, request=request) - - raise ConnectionError(e, request=request) - - except ClosedPoolError as e: - raise ConnectionError(e, request=request) - - except _ProxyError as e: - raise ProxyError(e) - - except (_SSLError, _HTTPError) as e: - if isinstance(e, _SSLError): - # This branch is for urllib3 versions earlier than v1.22 - raise SSLError(e, request=request) - elif isinstance(e, ReadTimeoutError): - raise ReadTimeout(e, request=request) - else: - raise - - return self.build_response(request, resp) diff --git a/vendor/requests/requests/api.py b/vendor/requests/requests/api.py deleted file mode 100644 index 4cba90ee..00000000 --- a/vendor/requests/requests/api.py +++ /dev/null @@ -1,159 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.api -~~~~~~~~~~~~ - -This module implements the Requests API. - -:copyright: (c) 2012 by Kenneth Reitz. -:license: Apache2, see LICENSE for more details. -""" - -from . import sessions - - -def request(method, url, **kwargs): - """Constructs and sends a :class:`Request `. - - :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. - :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary, list of tuples or bytes to send - in the query string for the :class:`Request`. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. - :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. - :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. - ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` - or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string - defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers - to add for the file. - :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) How many seconds to wait for the server to send data - before giving up, as a float, or a :ref:`(connect timeout, read - timeout) ` tuple. - :type timeout: float or tuple - :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. - :type allow_redirects: bool - :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. - :param stream: (optional) if ``False``, the response content will be immediately downloaded. - :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. - :return: :class:`Response ` object - :rtype: requests.Response - - Usage:: - - >>> import requests - >>> req = requests.request('GET', 'https://httpbin.org/get') - >>> req - - """ - - # By using the 'with' statement we are sure the session is closed, thus we - # avoid leaving sockets open which can trigger a ResourceWarning in some - # cases, and look like a memory leak in others. - with sessions.Session() as session: - return session.request(method=method, url=url, **kwargs) - - -def get(url, params=None, **kwargs): - r"""Sends a GET request. - - :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary, list of tuples or bytes to send - in the query string for the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('get', url, params=params, **kwargs) - - -def options(url, **kwargs): - r"""Sends an OPTIONS request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('options', url, **kwargs) - - -def head(url, **kwargs): - r"""Sends a HEAD request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. If - `allow_redirects` is not provided, it will be set to `False` (as - opposed to the default :meth:`request` behavior). - :return: :class:`Response ` object - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', False) - return request('head', url, **kwargs) - - -def post(url, data=None, json=None, **kwargs): - r"""Sends a POST request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('post', url, data=data, json=json, **kwargs) - - -def put(url, data=None, **kwargs): - r"""Sends a PUT request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('put', url, data=data, **kwargs) - - -def patch(url, data=None, **kwargs): - r"""Sends a PATCH request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('patch', url, data=data, **kwargs) - - -def delete(url, **kwargs): - r"""Sends a DELETE request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('delete', url, **kwargs) diff --git a/vendor/requests/requests/auth.py b/vendor/requests/requests/auth.py deleted file mode 100644 index eeface39..00000000 --- a/vendor/requests/requests/auth.py +++ /dev/null @@ -1,305 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.auth -~~~~~~~~~~~~~ - -This module contains the authentication handlers for Requests. -""" - -import os -import re -import time -import hashlib -import threading -import warnings - -from base64 import b64encode - -from .compat import urlparse, str, basestring -from .cookies import extract_cookies_to_jar -from ._internal_utils import to_native_string -from .utils import parse_dict_header - -CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' -CONTENT_TYPE_MULTI_PART = 'multipart/form-data' - - -def _basic_auth_str(username, password): - """Returns a Basic Auth string.""" - - # "I want us to put a big-ol' comment on top of it that - # says that this behaviour is dumb but we need to preserve - # it because people are relying on it." - # - Lukasa - # - # These are here solely to maintain backwards compatibility - # for things like ints. This will be removed in 3.0.0. - if not isinstance(username, basestring): - warnings.warn( - "Non-string usernames will no longer be supported in Requests " - "3.0.0. Please convert the object you've passed in ({!r}) to " - "a string or bytes object in the near future to avoid " - "problems.".format(username), - category=DeprecationWarning, - ) - username = str(username) - - if not isinstance(password, basestring): - warnings.warn( - "Non-string passwords will no longer be supported in Requests " - "3.0.0. Please convert the object you've passed in ({!r}) to " - "a string or bytes object in the near future to avoid " - "problems.".format(type(password)), - category=DeprecationWarning, - ) - password = str(password) - # -- End Removal -- - - if isinstance(username, str): - username = username.encode('latin1') - - if isinstance(password, str): - password = password.encode('latin1') - - authstr = 'Basic ' + to_native_string( - b64encode(b':'.join((username, password))).strip() - ) - - return authstr - - -class AuthBase(object): - """Base class that all auth implementations derive from""" - - def __call__(self, r): - raise NotImplementedError('Auth hooks must be callable.') - - -class HTTPBasicAuth(AuthBase): - """Attaches HTTP Basic Authentication to the given Request object.""" - - def __init__(self, username, password): - self.username = username - self.password = password - - def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) - - def __ne__(self, other): - return not self == other - - def __call__(self, r): - r.headers['Authorization'] = _basic_auth_str(self.username, self.password) - return r - - -class HTTPProxyAuth(HTTPBasicAuth): - """Attaches HTTP Proxy Authentication to a given Request object.""" - - def __call__(self, r): - r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) - return r - - -class HTTPDigestAuth(AuthBase): - """Attaches HTTP Digest Authentication to the given Request object.""" - - def __init__(self, username, password): - self.username = username - self.password = password - # Keep state in per-thread local storage - self._thread_local = threading.local() - - def init_per_thread_state(self): - # Ensure state is initialized just once per-thread - if not hasattr(self._thread_local, 'init'): - self._thread_local.init = True - self._thread_local.last_nonce = '' - self._thread_local.nonce_count = 0 - self._thread_local.chal = {} - self._thread_local.pos = None - self._thread_local.num_401_calls = None - - def build_digest_header(self, method, url): - """ - :rtype: str - """ - - realm = self._thread_local.chal['realm'] - nonce = self._thread_local.chal['nonce'] - qop = self._thread_local.chal.get('qop') - algorithm = self._thread_local.chal.get('algorithm') - opaque = self._thread_local.chal.get('opaque') - hash_utf8 = None - - if algorithm is None: - _algorithm = 'MD5' - else: - _algorithm = algorithm.upper() - # lambdas assume digest modules are imported at the top level - if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': - def md5_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.md5(x).hexdigest() - hash_utf8 = md5_utf8 - elif _algorithm == 'SHA': - def sha_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha1(x).hexdigest() - hash_utf8 = sha_utf8 - elif _algorithm == 'SHA-256': - def sha256_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha256(x).hexdigest() - hash_utf8 = sha256_utf8 - elif _algorithm == 'SHA-512': - def sha512_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha512(x).hexdigest() - hash_utf8 = sha512_utf8 - - KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) - - if hash_utf8 is None: - return None - - # XXX not implemented yet - entdig = None - p_parsed = urlparse(url) - #: path is request-uri defined in RFC 2616 which should not be empty - path = p_parsed.path or "/" - if p_parsed.query: - path += '?' + p_parsed.query - - A1 = '%s:%s:%s' % (self.username, realm, self.password) - A2 = '%s:%s' % (method, path) - - HA1 = hash_utf8(A1) - HA2 = hash_utf8(A2) - - if nonce == self._thread_local.last_nonce: - self._thread_local.nonce_count += 1 - else: - self._thread_local.nonce_count = 1 - ncvalue = '%08x' % self._thread_local.nonce_count - s = str(self._thread_local.nonce_count).encode('utf-8') - s += nonce.encode('utf-8') - s += time.ctime().encode('utf-8') - s += os.urandom(8) - - cnonce = (hashlib.sha1(s).hexdigest()[:16]) - if _algorithm == 'MD5-SESS': - HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) - - if not qop: - respdig = KD(HA1, "%s:%s" % (nonce, HA2)) - elif qop == 'auth' or 'auth' in qop.split(','): - noncebit = "%s:%s:%s:%s:%s" % ( - nonce, ncvalue, cnonce, 'auth', HA2 - ) - respdig = KD(HA1, noncebit) - else: - # XXX handle auth-int. - return None - - self._thread_local.last_nonce = nonce - - # XXX should the partial digests be encoded too? - base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ - 'response="%s"' % (self.username, realm, nonce, path, respdig) - if opaque: - base += ', opaque="%s"' % opaque - if algorithm: - base += ', algorithm="%s"' % algorithm - if entdig: - base += ', digest="%s"' % entdig - if qop: - base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) - - return 'Digest %s' % (base) - - def handle_redirect(self, r, **kwargs): - """Reset num_401_calls counter on redirects.""" - if r.is_redirect: - self._thread_local.num_401_calls = 1 - - def handle_401(self, r, **kwargs): - """ - Takes the given response and tries digest-auth, if needed. - - :rtype: requests.Response - """ - - # If response is not 4xx, do not auth - # See https://github.com/psf/requests/issues/3772 - if not 400 <= r.status_code < 500: - self._thread_local.num_401_calls = 1 - return r - - if self._thread_local.pos is not None: - # Rewind the file position indicator of the body to where - # it was to resend the request. - r.request.body.seek(self._thread_local.pos) - s_auth = r.headers.get('www-authenticate', '') - - if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: - - self._thread_local.num_401_calls += 1 - pat = re.compile(r'digest ', flags=re.IGNORECASE) - self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) - - # Consume content and release the original connection - # to allow our new request to reuse the same one. - r.content - r.close() - prep = r.request.copy() - extract_cookies_to_jar(prep._cookies, r.request, r.raw) - prep.prepare_cookies(prep._cookies) - - prep.headers['Authorization'] = self.build_digest_header( - prep.method, prep.url) - _r = r.connection.send(prep, **kwargs) - _r.history.append(r) - _r.request = prep - - return _r - - self._thread_local.num_401_calls = 1 - return r - - def __call__(self, r): - # Initialize per-thread state, if needed - self.init_per_thread_state() - # If we have a saved nonce, skip the 401 - if self._thread_local.last_nonce: - r.headers['Authorization'] = self.build_digest_header(r.method, r.url) - try: - self._thread_local.pos = r.body.tell() - except AttributeError: - # In the case of HTTPDigestAuth being reused and the body of - # the previous request was a file-like object, pos has the - # file position of the previous body. Ensure it's set to - # None. - self._thread_local.pos = None - r.register_hook('response', self.handle_401) - r.register_hook('response', self.handle_redirect) - self._thread_local.num_401_calls = 1 - - return r - - def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) - - def __ne__(self, other): - return not self == other diff --git a/vendor/requests/requests/certs.py b/vendor/requests/requests/certs.py deleted file mode 100644 index 8788b087..00000000 --- a/vendor/requests/requests/certs.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -""" -requests.certs -~~~~~~~~~~~~~~ - -This module returns the preferred default CA certificate bundle. There is -only one — the one from the certifi package. - -If you are packaging Requests, e.g., for a Linux distribution or a managed -environment, you can change the definition of where() to return a separately -packaged CA bundle. -""" -from certifi import contents, where - -if __name__ == '__main__': - print(where()) diff --git a/vendor/requests/requests/compat.py b/vendor/requests/requests/compat.py deleted file mode 100644 index 0b14f501..00000000 --- a/vendor/requests/requests/compat.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.compat -~~~~~~~~~~~~~~~ - -This module handles import compatibility issues between Python 2 and -Python 3. -""" - -try: - import chardet -except ImportError: - import charset_normalizer as chardet - -import sys - -# ------- -# Pythons -# ------- - -# Syntax sugar. -_ver = sys.version_info - -#: Python 2.x? -is_py2 = (_ver[0] == 2) - -#: Python 3.x? -is_py3 = (_ver[0] == 3) - -try: - import simplejson as json -except ImportError: - import json - -# --------- -# Specifics -# --------- - -if is_py2: - from urllib import ( - quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, - proxy_bypass, proxy_bypass_environment, getproxies_environment) - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag - from urllib2 import parse_http_list - import cookielib - from Cookie import Morsel - from StringIO import StringIO - # Keep OrderedDict for backwards compatibility. - from collections import Callable, Mapping, MutableMapping, OrderedDict - - - builtin_str = str - bytes = str - str = unicode - basestring = basestring - numeric_types = (int, long, float) - integer_types = (int, long) - -elif is_py3: - from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag - from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment - from http import cookiejar as cookielib - from http.cookies import Morsel - from io import StringIO - # Keep OrderedDict for backwards compatibility. - from collections import OrderedDict - from collections.abc import Callable, Mapping, MutableMapping - - builtin_str = str - str = str - bytes = bytes - basestring = (str, bytes) - numeric_types = (int, float) - integer_types = (int,) diff --git a/vendor/requests/requests/cookies.py b/vendor/requests/requests/cookies.py deleted file mode 100644 index 56fccd9c..00000000 --- a/vendor/requests/requests/cookies.py +++ /dev/null @@ -1,549 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.cookies -~~~~~~~~~~~~~~~~ - -Compatibility code to be able to use `cookielib.CookieJar` with requests. - -requests.utils imports from here, so be careful with imports. -""" - -import copy -import time -import calendar - -from ._internal_utils import to_native_string -from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping - -try: - import threading -except ImportError: - import dummy_threading as threading - - -class MockRequest(object): - """Wraps a `requests.Request` to mimic a `urllib2.Request`. - - The code in `cookielib.CookieJar` expects this interface in order to correctly - manage cookie policies, i.e., determine whether a cookie can be set, given the - domains of the request and the cookie. - - The original request object is read-only. The client is responsible for collecting - the new headers via `get_new_headers()` and interpreting them appropriately. You - probably want `get_cookie_header`, defined below. - """ - - def __init__(self, request): - self._r = request - self._new_headers = {} - self.type = urlparse(self._r.url).scheme - - def get_type(self): - return self.type - - def get_host(self): - return urlparse(self._r.url).netloc - - def get_origin_req_host(self): - return self.get_host() - - def get_full_url(self): - # Only return the response's URL if the user hadn't set the Host - # header - if not self._r.headers.get('Host'): - return self._r.url - # If they did set it, retrieve it and reconstruct the expected domain - host = to_native_string(self._r.headers['Host'], encoding='utf-8') - parsed = urlparse(self._r.url) - # Reconstruct the URL as we expect it - return urlunparse([ - parsed.scheme, host, parsed.path, parsed.params, parsed.query, - parsed.fragment - ]) - - def is_unverifiable(self): - return True - - def has_header(self, name): - return name in self._r.headers or name in self._new_headers - - def get_header(self, name, default=None): - return self._r.headers.get(name, self._new_headers.get(name, default)) - - def add_header(self, key, val): - """cookielib has no legitimate use for this method; add it back if you find one.""" - raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") - - def add_unredirected_header(self, name, value): - self._new_headers[name] = value - - def get_new_headers(self): - return self._new_headers - - @property - def unverifiable(self): - return self.is_unverifiable() - - @property - def origin_req_host(self): - return self.get_origin_req_host() - - @property - def host(self): - return self.get_host() - - -class MockResponse(object): - """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. - - ...what? Basically, expose the parsed HTTP headers from the server response - the way `cookielib` expects to see them. - """ - - def __init__(self, headers): - """Make a MockResponse for `cookielib` to read. - - :param headers: a httplib.HTTPMessage or analogous carrying the headers - """ - self._headers = headers - - def info(self): - return self._headers - - def getheaders(self, name): - self._headers.getheaders(name) - - -def extract_cookies_to_jar(jar, request, response): - """Extract the cookies from the response into a CookieJar. - - :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) - :param request: our own requests.Request object - :param response: urllib3.HTTPResponse object - """ - if not (hasattr(response, '_original_response') and - response._original_response): - return - # the _original_response field is the wrapped httplib.HTTPResponse object, - req = MockRequest(request) - # pull out the HTTPMessage with the headers and put it in the mock: - res = MockResponse(response._original_response.msg) - jar.extract_cookies(res, req) - - -def get_cookie_header(jar, request): - """ - Produce an appropriate Cookie header string to be sent with `request`, or None. - - :rtype: str - """ - r = MockRequest(request) - jar.add_cookie_header(r) - return r.get_new_headers().get('Cookie') - - -def remove_cookie_by_name(cookiejar, name, domain=None, path=None): - """Unsets a cookie by name, by default over all domains and paths. - - Wraps CookieJar.clear(), is O(n). - """ - clearables = [] - for cookie in cookiejar: - if cookie.name != name: - continue - if domain is not None and domain != cookie.domain: - continue - if path is not None and path != cookie.path: - continue - clearables.append((cookie.domain, cookie.path, cookie.name)) - - for domain, path, name in clearables: - cookiejar.clear(domain, path, name) - - -class CookieConflictError(RuntimeError): - """There are two cookies that meet the criteria specified in the cookie jar. - Use .get and .set and include domain and path args in order to be more specific. - """ - - -class RequestsCookieJar(cookielib.CookieJar, MutableMapping): - """Compatibility class; is a cookielib.CookieJar, but exposes a dict - interface. - - This is the CookieJar we create by default for requests and sessions that - don't specify one, since some clients may expect response.cookies and - session.cookies to support dict operations. - - Requests does not use the dict interface internally; it's just for - compatibility with external client code. All requests code should work - out of the box with externally provided instances of ``CookieJar``, e.g. - ``LWPCookieJar`` and ``FileCookieJar``. - - Unlike a regular CookieJar, this class is pickleable. - - .. warning:: dictionary operations that are normally O(1) may be O(n). - """ - - def get(self, name, default=None, domain=None, path=None): - """Dict-like get() that also supports optional domain and path args in - order to resolve naming collisions from using one cookie jar over - multiple domains. - - .. warning:: operation is O(n), not O(1). - """ - try: - return self._find_no_duplicates(name, domain, path) - except KeyError: - return default - - def set(self, name, value, **kwargs): - """Dict-like set() that also supports optional domain and path args in - order to resolve naming collisions from using one cookie jar over - multiple domains. - """ - # support client code that unsets cookies by assignment of a None value: - if value is None: - remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) - return - - if isinstance(value, Morsel): - c = morsel_to_cookie(value) - else: - c = create_cookie(name, value, **kwargs) - self.set_cookie(c) - return c - - def iterkeys(self): - """Dict-like iterkeys() that returns an iterator of names of cookies - from the jar. - - .. seealso:: itervalues() and iteritems(). - """ - for cookie in iter(self): - yield cookie.name - - def keys(self): - """Dict-like keys() that returns a list of names of cookies from the - jar. - - .. seealso:: values() and items(). - """ - return list(self.iterkeys()) - - def itervalues(self): - """Dict-like itervalues() that returns an iterator of values of cookies - from the jar. - - .. seealso:: iterkeys() and iteritems(). - """ - for cookie in iter(self): - yield cookie.value - - def values(self): - """Dict-like values() that returns a list of values of cookies from the - jar. - - .. seealso:: keys() and items(). - """ - return list(self.itervalues()) - - def iteritems(self): - """Dict-like iteritems() that returns an iterator of name-value tuples - from the jar. - - .. seealso:: iterkeys() and itervalues(). - """ - for cookie in iter(self): - yield cookie.name, cookie.value - - def items(self): - """Dict-like items() that returns a list of name-value tuples from the - jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a - vanilla python dict of key value pairs. - - .. seealso:: keys() and values(). - """ - return list(self.iteritems()) - - def list_domains(self): - """Utility method to list all the domains in the jar.""" - domains = [] - for cookie in iter(self): - if cookie.domain not in domains: - domains.append(cookie.domain) - return domains - - def list_paths(self): - """Utility method to list all the paths in the jar.""" - paths = [] - for cookie in iter(self): - if cookie.path not in paths: - paths.append(cookie.path) - return paths - - def multiple_domains(self): - """Returns True if there are multiple domains in the jar. - Returns False otherwise. - - :rtype: bool - """ - domains = [] - for cookie in iter(self): - if cookie.domain is not None and cookie.domain in domains: - return True - domains.append(cookie.domain) - return False # there is only one domain in jar - - def get_dict(self, domain=None, path=None): - """Takes as an argument an optional domain and path and returns a plain - old Python dict of name-value pairs of cookies that meet the - requirements. - - :rtype: dict - """ - dictionary = {} - for cookie in iter(self): - if ( - (domain is None or cookie.domain == domain) and - (path is None or cookie.path == path) - ): - dictionary[cookie.name] = cookie.value - return dictionary - - def __contains__(self, name): - try: - return super(RequestsCookieJar, self).__contains__(name) - except CookieConflictError: - return True - - def __getitem__(self, name): - """Dict-like __getitem__() for compatibility with client code. Throws - exception if there are more than one cookie with name. In that case, - use the more explicit get() method instead. - - .. warning:: operation is O(n), not O(1). - """ - return self._find_no_duplicates(name) - - def __setitem__(self, name, value): - """Dict-like __setitem__ for compatibility with client code. Throws - exception if there is already a cookie of that name in the jar. In that - case, use the more explicit set() method instead. - """ - self.set(name, value) - - def __delitem__(self, name): - """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s - ``remove_cookie_by_name()``. - """ - remove_cookie_by_name(self, name) - - def set_cookie(self, cookie, *args, **kwargs): - if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): - cookie.value = cookie.value.replace('\\"', '') - return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) - - def update(self, other): - """Updates this jar with cookies from another CookieJar or dict-like""" - if isinstance(other, cookielib.CookieJar): - for cookie in other: - self.set_cookie(copy.copy(cookie)) - else: - super(RequestsCookieJar, self).update(other) - - def _find(self, name, domain=None, path=None): - """Requests uses this method internally to get cookie values. - - If there are conflicting cookies, _find arbitrarily chooses one. - See _find_no_duplicates if you want an exception thrown if there are - conflicting cookies. - - :param name: a string containing name of cookie - :param domain: (optional) string containing domain of cookie - :param path: (optional) string containing path of cookie - :return: cookie.value - """ - for cookie in iter(self): - if cookie.name == name: - if domain is None or cookie.domain == domain: - if path is None or cookie.path == path: - return cookie.value - - raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) - - def _find_no_duplicates(self, name, domain=None, path=None): - """Both ``__get_item__`` and ``get`` call this function: it's never - used elsewhere in Requests. - - :param name: a string containing name of cookie - :param domain: (optional) string containing domain of cookie - :param path: (optional) string containing path of cookie - :raises KeyError: if cookie is not found - :raises CookieConflictError: if there are multiple cookies - that match name and optionally domain and path - :return: cookie.value - """ - toReturn = None - for cookie in iter(self): - if cookie.name == name: - if domain is None or cookie.domain == domain: - if path is None or cookie.path == path: - if toReturn is not None: # if there are multiple cookies that meet passed in criteria - raise CookieConflictError('There are multiple cookies with name, %r' % (name)) - toReturn = cookie.value # we will eventually return this as long as no cookie conflict - - if toReturn: - return toReturn - raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) - - def __getstate__(self): - """Unlike a normal CookieJar, this class is pickleable.""" - state = self.__dict__.copy() - # remove the unpickleable RLock object - state.pop('_cookies_lock') - return state - - def __setstate__(self, state): - """Unlike a normal CookieJar, this class is pickleable.""" - self.__dict__.update(state) - if '_cookies_lock' not in self.__dict__: - self._cookies_lock = threading.RLock() - - def copy(self): - """Return a copy of this RequestsCookieJar.""" - new_cj = RequestsCookieJar() - new_cj.set_policy(self.get_policy()) - new_cj.update(self) - return new_cj - - def get_policy(self): - """Return the CookiePolicy instance used.""" - return self._policy - - -def _copy_cookie_jar(jar): - if jar is None: - return None - - if hasattr(jar, 'copy'): - # We're dealing with an instance of RequestsCookieJar - return jar.copy() - # We're dealing with a generic CookieJar instance - new_jar = copy.copy(jar) - new_jar.clear() - for cookie in jar: - new_jar.set_cookie(copy.copy(cookie)) - return new_jar - - -def create_cookie(name, value, **kwargs): - """Make a cookie from underspecified parameters. - - By default, the pair of `name` and `value` will be set for the domain '' - and sent on every request (this is sometimes called a "supercookie"). - """ - result = { - 'version': 0, - 'name': name, - 'value': value, - 'port': None, - 'domain': '', - 'path': '/', - 'secure': False, - 'expires': None, - 'discard': True, - 'comment': None, - 'comment_url': None, - 'rest': {'HttpOnly': None}, - 'rfc2109': False, - } - - badargs = set(kwargs) - set(result) - if badargs: - err = 'create_cookie() got unexpected keyword arguments: %s' - raise TypeError(err % list(badargs)) - - result.update(kwargs) - result['port_specified'] = bool(result['port']) - result['domain_specified'] = bool(result['domain']) - result['domain_initial_dot'] = result['domain'].startswith('.') - result['path_specified'] = bool(result['path']) - - return cookielib.Cookie(**result) - - -def morsel_to_cookie(morsel): - """Convert a Morsel object into a Cookie containing the one k/v pair.""" - - expires = None - if morsel['max-age']: - try: - expires = int(time.time() + int(morsel['max-age'])) - except ValueError: - raise TypeError('max-age: %s must be integer' % morsel['max-age']) - elif morsel['expires']: - time_template = '%a, %d-%b-%Y %H:%M:%S GMT' - expires = calendar.timegm( - time.strptime(morsel['expires'], time_template) - ) - return create_cookie( - comment=morsel['comment'], - comment_url=bool(morsel['comment']), - discard=False, - domain=morsel['domain'], - expires=expires, - name=morsel.key, - path=morsel['path'], - port=None, - rest={'HttpOnly': morsel['httponly']}, - rfc2109=False, - secure=bool(morsel['secure']), - value=morsel.value, - version=morsel['version'] or 0, - ) - - -def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): - """Returns a CookieJar from a key/value dictionary. - - :param cookie_dict: Dict of key/values to insert into CookieJar. - :param cookiejar: (optional) A cookiejar to add the cookies to. - :param overwrite: (optional) If False, will not replace cookies - already in the jar with new ones. - :rtype: CookieJar - """ - if cookiejar is None: - cookiejar = RequestsCookieJar() - - if cookie_dict is not None: - names_from_jar = [cookie.name for cookie in cookiejar] - for name in cookie_dict: - if overwrite or (name not in names_from_jar): - cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) - - return cookiejar - - -def merge_cookies(cookiejar, cookies): - """Add cookies to cookiejar and returns a merged CookieJar. - - :param cookiejar: CookieJar object to add the cookies to. - :param cookies: Dictionary or CookieJar object to be added. - :rtype: CookieJar - """ - if not isinstance(cookiejar, cookielib.CookieJar): - raise ValueError('You can only merge into CookieJar') - - if isinstance(cookies, dict): - cookiejar = cookiejar_from_dict( - cookies, cookiejar=cookiejar, overwrite=False) - elif isinstance(cookies, cookielib.CookieJar): - try: - cookiejar.update(cookies) - except AttributeError: - for cookie_in_jar in cookies: - cookiejar.set_cookie(cookie_in_jar) - - return cookiejar diff --git a/vendor/requests/requests/exceptions.py b/vendor/requests/requests/exceptions.py deleted file mode 100644 index c412ec98..00000000 --- a/vendor/requests/requests/exceptions.py +++ /dev/null @@ -1,127 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.exceptions -~~~~~~~~~~~~~~~~~~~ - -This module contains the set of Requests' exceptions. -""" -from urllib3.exceptions import HTTPError as BaseHTTPError - - -class RequestException(IOError): - """There was an ambiguous exception that occurred while handling your - request. - """ - - def __init__(self, *args, **kwargs): - """Initialize RequestException with `request` and `response` objects.""" - response = kwargs.pop('response', None) - self.response = response - self.request = kwargs.pop('request', None) - if (response is not None and not self.request and - hasattr(response, 'request')): - self.request = self.response.request - super(RequestException, self).__init__(*args, **kwargs) - - -class InvalidJSONError(RequestException): - """A JSON error occurred.""" - - -class HTTPError(RequestException): - """An HTTP error occurred.""" - - -class ConnectionError(RequestException): - """A Connection error occurred.""" - - -class ProxyError(ConnectionError): - """A proxy error occurred.""" - - -class SSLError(ConnectionError): - """An SSL error occurred.""" - - -class Timeout(RequestException): - """The request timed out. - - Catching this error will catch both - :exc:`~requests.exceptions.ConnectTimeout` and - :exc:`~requests.exceptions.ReadTimeout` errors. - """ - - -class ConnectTimeout(ConnectionError, Timeout): - """The request timed out while trying to connect to the remote server. - - Requests that produced this error are safe to retry. - """ - - -class ReadTimeout(Timeout): - """The server did not send any data in the allotted amount of time.""" - - -class URLRequired(RequestException): - """A valid URL is required to make a request.""" - - -class TooManyRedirects(RequestException): - """Too many redirects.""" - - -class MissingSchema(RequestException, ValueError): - """The URL schema (e.g. http or https) is missing.""" - - -class InvalidSchema(RequestException, ValueError): - """See defaults.py for valid schemas.""" - - -class InvalidURL(RequestException, ValueError): - """The URL provided was somehow invalid.""" - - -class InvalidHeader(RequestException, ValueError): - """The header value provided was somehow invalid.""" - - -class InvalidProxyURL(InvalidURL): - """The proxy URL provided is invalid.""" - - -class ChunkedEncodingError(RequestException): - """The server declared chunked encoding but sent an invalid chunk.""" - - -class ContentDecodingError(RequestException, BaseHTTPError): - """Failed to decode response content.""" - - -class StreamConsumedError(RequestException, TypeError): - """The content for this response was already consumed.""" - - -class RetryError(RequestException): - """Custom retries logic failed""" - - -class UnrewindableBodyError(RequestException): - """Requests encountered an error when trying to rewind a body.""" - -# Warnings - - -class RequestsWarning(Warning): - """Base warning for Requests.""" - - -class FileModeWarning(RequestsWarning, DeprecationWarning): - """A file was opened in text mode, but Requests determined its binary length.""" - - -class RequestsDependencyWarning(RequestsWarning): - """An imported dependency doesn't match the expected version range.""" diff --git a/vendor/requests/requests/help.py b/vendor/requests/requests/help.py deleted file mode 100644 index 4cd6389f..00000000 --- a/vendor/requests/requests/help.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Module containing bug report helper(s).""" -from __future__ import print_function - -import json -import platform -import sys -import ssl - -import idna -import urllib3 - -from . import __version__ as requests_version - -try: - import charset_normalizer -except ImportError: - charset_normalizer = None - -try: - import chardet -except ImportError: - chardet = None - -try: - from urllib3.contrib import pyopenssl -except ImportError: - pyopenssl = None - OpenSSL = None - cryptography = None -else: - import OpenSSL - import cryptography - - -def _implementation(): - """Return a dict with the Python implementation and version. - - Provide both the name and the version of the Python implementation - currently running. For example, on CPython 2.7.5 it will return - {'name': 'CPython', 'version': '2.7.5'}. - - This function works best on CPython and PyPy: in particular, it probably - doesn't work for Jython or IronPython. Future investigation should be done - to work out the correct shape of the code for those platforms. - """ - implementation = platform.python_implementation() - - if implementation == 'CPython': - implementation_version = platform.python_version() - elif implementation == 'PyPy': - implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, - sys.pypy_version_info.minor, - sys.pypy_version_info.micro) - if sys.pypy_version_info.releaselevel != 'final': - implementation_version = ''.join([ - implementation_version, sys.pypy_version_info.releaselevel - ]) - elif implementation == 'Jython': - implementation_version = platform.python_version() # Complete Guess - elif implementation == 'IronPython': - implementation_version = platform.python_version() # Complete Guess - else: - implementation_version = 'Unknown' - - return {'name': implementation, 'version': implementation_version} - - -def info(): - """Generate information for a bug report.""" - try: - platform_info = { - 'system': platform.system(), - 'release': platform.release(), - } - except IOError: - platform_info = { - 'system': 'Unknown', - 'release': 'Unknown', - } - - implementation_info = _implementation() - urllib3_info = {'version': urllib3.__version__} - charset_normalizer_info = {'version': None} - chardet_info = {'version': None} - if charset_normalizer: - charset_normalizer_info = {'version': charset_normalizer.__version__} - if chardet: - chardet_info = {'version': chardet.__version__} - - pyopenssl_info = { - 'version': None, - 'openssl_version': '', - } - if OpenSSL: - pyopenssl_info = { - 'version': OpenSSL.__version__, - 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, - } - cryptography_info = { - 'version': getattr(cryptography, '__version__', ''), - } - idna_info = { - 'version': getattr(idna, '__version__', ''), - } - - system_ssl = ssl.OPENSSL_VERSION_NUMBER - system_ssl_info = { - 'version': '%x' % system_ssl if system_ssl is not None else '' - } - - return { - 'platform': platform_info, - 'implementation': implementation_info, - 'system_ssl': system_ssl_info, - 'using_pyopenssl': pyopenssl is not None, - 'using_charset_normalizer': chardet is None, - 'pyOpenSSL': pyopenssl_info, - 'urllib3': urllib3_info, - 'chardet': chardet_info, - 'charset_normalizer': charset_normalizer_info, - 'cryptography': cryptography_info, - 'idna': idna_info, - 'requests': { - 'version': requests_version, - }, - } - - -def main(): - """Pretty-print the bug information as JSON.""" - print(json.dumps(info(), sort_keys=True, indent=2)) - - -if __name__ == '__main__': - main() diff --git a/vendor/requests/requests/hooks.py b/vendor/requests/requests/hooks.py deleted file mode 100644 index 7a51f212..00000000 --- a/vendor/requests/requests/hooks.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.hooks -~~~~~~~~~~~~~~ - -This module provides the capabilities for the Requests hooks system. - -Available hooks: - -``response``: - The response generated from a Request. -""" -HOOKS = ['response'] - - -def default_hooks(): - return {event: [] for event in HOOKS} - -# TODO: response is the only one - - -def dispatch_hook(key, hooks, hook_data, **kwargs): - """Dispatches a hook dictionary on a given piece of data.""" - hooks = hooks or {} - hooks = hooks.get(key) - if hooks: - if hasattr(hooks, '__call__'): - hooks = [hooks] - for hook in hooks: - _hook_data = hook(hook_data, **kwargs) - if _hook_data is not None: - hook_data = _hook_data - return hook_data diff --git a/vendor/requests/requests/models.py b/vendor/requests/requests/models.py deleted file mode 100644 index aa6fb86e..00000000 --- a/vendor/requests/requests/models.py +++ /dev/null @@ -1,966 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.models -~~~~~~~~~~~~~~~ - -This module contains the primary objects that power Requests. -""" - -import datetime -import sys - -# Import encoding now, to avoid implicit import later. -# Implicit import within threads may cause LookupError when standard library is in a ZIP, -# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. -import encodings.idna - -from urllib3.fields import RequestField -from urllib3.filepost import encode_multipart_formdata -from urllib3.util import parse_url -from urllib3.exceptions import ( - DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) - -from io import UnsupportedOperation -from .hooks import default_hooks -from .structures import CaseInsensitiveDict - -from .auth import HTTPBasicAuth -from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar -from .exceptions import ( - HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, - ContentDecodingError, ConnectionError, StreamConsumedError, InvalidJSONError) -from ._internal_utils import to_native_string, unicode_is_ascii -from .utils import ( - guess_filename, get_auth_from_url, requote_uri, - stream_decode_response_unicode, to_key_val_list, parse_header_links, - iter_slices, guess_json_utf, super_len, check_header_validity) -from .compat import ( - Callable, Mapping, - cookielib, urlunparse, urlsplit, urlencode, str, bytes, - is_py2, chardet, builtin_str, basestring) -from .compat import json as complexjson -from .status_codes import codes - -#: The set of HTTP status codes that indicate an automatically -#: processable redirect. -REDIRECT_STATI = ( - codes.moved, # 301 - codes.found, # 302 - codes.other, # 303 - codes.temporary_redirect, # 307 - codes.permanent_redirect, # 308 -) - -DEFAULT_REDIRECT_LIMIT = 30 -CONTENT_CHUNK_SIZE = 10 * 1024 -ITER_CHUNK_SIZE = 512 - - -class RequestEncodingMixin(object): - @property - def path_url(self): - """Build the path URL to use.""" - - url = [] - - p = urlsplit(self.url) - - path = p.path - if not path: - path = '/' - - url.append(path) - - query = p.query - if query: - url.append('?') - url.append(query) - - return ''.join(url) - - @staticmethod - def _encode_params(data): - """Encode parameters in a piece of data. - - Will successfully encode parameters when passed as a dict or a list of - 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary - if parameters are supplied as a dict. - """ - - if isinstance(data, (str, bytes)): - return data - elif hasattr(data, 'read'): - return data - elif hasattr(data, '__iter__'): - result = [] - for k, vs in to_key_val_list(data): - if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): - vs = [vs] - for v in vs: - if v is not None: - result.append( - (k.encode('utf-8') if isinstance(k, str) else k, - v.encode('utf-8') if isinstance(v, str) else v)) - return urlencode(result, doseq=True) - else: - return data - - @staticmethod - def _encode_files(files, data): - """Build the body for a multipart/form-data request. - - Will successfully encode files when passed as a dict or a list of - tuples. Order is retained if data is a list of tuples but arbitrary - if parameters are supplied as a dict. - The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) - or 4-tuples (filename, fileobj, contentype, custom_headers). - """ - if (not files): - raise ValueError("Files must be provided.") - elif isinstance(data, basestring): - raise ValueError("Data must not be a string.") - - new_fields = [] - fields = to_key_val_list(data or {}) - files = to_key_val_list(files or {}) - - for field, val in fields: - if isinstance(val, basestring) or not hasattr(val, '__iter__'): - val = [val] - for v in val: - if v is not None: - # Don't call str() on bytestrings: in Py3 it all goes wrong. - if not isinstance(v, bytes): - v = str(v) - - new_fields.append( - (field.decode('utf-8') if isinstance(field, bytes) else field, - v.encode('utf-8') if isinstance(v, str) else v)) - - for (k, v) in files: - # support for explicit filename - ft = None - fh = None - if isinstance(v, (tuple, list)): - if len(v) == 2: - fn, fp = v - elif len(v) == 3: - fn, fp, ft = v - else: - fn, fp, ft, fh = v - else: - fn = guess_filename(v) or k - fp = v - - if isinstance(fp, (str, bytes, bytearray)): - fdata = fp - elif hasattr(fp, 'read'): - fdata = fp.read() - elif fp is None: - continue - else: - fdata = fp - - rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) - rf.make_multipart(content_type=ft) - new_fields.append(rf) - - body, content_type = encode_multipart_formdata(new_fields) - - return body, content_type - - -class RequestHooksMixin(object): - def register_hook(self, event, hook): - """Properly register a hook.""" - - if event not in self.hooks: - raise ValueError('Unsupported event specified, with event name "%s"' % (event)) - - if isinstance(hook, Callable): - self.hooks[event].append(hook) - elif hasattr(hook, '__iter__'): - self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) - - def deregister_hook(self, event, hook): - """Deregister a previously registered hook. - Returns True if the hook existed, False if not. - """ - - try: - self.hooks[event].remove(hook) - return True - except ValueError: - return False - - -class Request(RequestHooksMixin): - """A user-created :class:`Request ` object. - - Used to prepare a :class:`PreparedRequest `, which is sent to the server. - - :param method: HTTP method to use. - :param url: URL to send. - :param headers: dictionary of headers to send. - :param files: dictionary of {filename: fileobject} files to multipart upload. - :param data: the body to attach to the request. If a dictionary or - list of tuples ``[(key, value)]`` is provided, form-encoding will - take place. - :param json: json for the body to attach to the request (if files or data is not specified). - :param params: URL parameters to append to the URL. If a dictionary or - list of tuples ``[(key, value)]`` is provided, form-encoding will - take place. - :param auth: Auth handler or (user, pass) tuple. - :param cookies: dictionary or CookieJar of cookies to attach to this request. - :param hooks: dictionary of callback hooks, for internal usage. - - Usage:: - - >>> import requests - >>> req = requests.Request('GET', 'https://httpbin.org/get') - >>> req.prepare() - - """ - - def __init__(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): - - # Default empty dicts for dict params. - data = [] if data is None else data - files = [] if files is None else files - headers = {} if headers is None else headers - params = {} if params is None else params - hooks = {} if hooks is None else hooks - - self.hooks = default_hooks() - for (k, v) in list(hooks.items()): - self.register_hook(event=k, hook=v) - - self.method = method - self.url = url - self.headers = headers - self.files = files - self.data = data - self.json = json - self.params = params - self.auth = auth - self.cookies = cookies - - def __repr__(self): - return '' % (self.method) - - def prepare(self): - """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" - p = PreparedRequest() - p.prepare( - method=self.method, - url=self.url, - headers=self.headers, - files=self.files, - data=self.data, - json=self.json, - params=self.params, - auth=self.auth, - cookies=self.cookies, - hooks=self.hooks, - ) - return p - - -class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): - """The fully mutable :class:`PreparedRequest ` object, - containing the exact bytes that will be sent to the server. - - Instances are generated from a :class:`Request ` object, and - should not be instantiated manually; doing so may produce undesirable - effects. - - Usage:: - - >>> import requests - >>> req = requests.Request('GET', 'https://httpbin.org/get') - >>> r = req.prepare() - >>> r - - - >>> s = requests.Session() - >>> s.send(r) - - """ - - def __init__(self): - #: HTTP verb to send to the server. - self.method = None - #: HTTP URL to send the request to. - self.url = None - #: dictionary of HTTP headers. - self.headers = None - # The `CookieJar` used to create the Cookie header will be stored here - # after prepare_cookies is called - self._cookies = None - #: request body to send to the server. - self.body = None - #: dictionary of callback hooks, for internal usage. - self.hooks = default_hooks() - #: integer denoting starting position of a readable file-like body. - self._body_position = None - - def prepare(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): - """Prepares the entire request with the given parameters.""" - - self.prepare_method(method) - self.prepare_url(url, params) - self.prepare_headers(headers) - self.prepare_cookies(cookies) - self.prepare_body(data, files, json) - self.prepare_auth(auth, url) - - # Note that prepare_auth must be last to enable authentication schemes - # such as OAuth to work on a fully prepared request. - - # This MUST go after prepare_auth. Authenticators could add a hook - self.prepare_hooks(hooks) - - def __repr__(self): - return '' % (self.method) - - def copy(self): - p = PreparedRequest() - p.method = self.method - p.url = self.url - p.headers = self.headers.copy() if self.headers is not None else None - p._cookies = _copy_cookie_jar(self._cookies) - p.body = self.body - p.hooks = self.hooks - p._body_position = self._body_position - return p - - def prepare_method(self, method): - """Prepares the given HTTP method.""" - self.method = method - if self.method is not None: - self.method = to_native_string(self.method.upper()) - - @staticmethod - def _get_idna_encoded_host(host): - import idna - - try: - host = idna.encode(host, uts46=True).decode('utf-8') - except idna.IDNAError: - raise UnicodeError - return host - - def prepare_url(self, url, params): - """Prepares the given HTTP URL.""" - #: Accept objects that have string representations. - #: We're unable to blindly call unicode/str functions - #: as this will include the bytestring indicator (b'') - #: on python 3.x. - #: https://github.com/psf/requests/pull/2238 - if isinstance(url, bytes): - url = url.decode('utf8') - else: - url = unicode(url) if is_py2 else str(url) - - # Remove leading whitespaces from url - url = url.lstrip() - - # Don't do any URL preparation for non-HTTP schemes like `mailto`, - # `data` etc to work around exceptions from `url_parse`, which - # handles RFC 3986 only. - if ':' in url and not url.lower().startswith('http'): - self.url = url - return - - # Support for unicode domain names and paths. - try: - scheme, auth, host, port, path, query, fragment = parse_url(url) - except LocationParseError as e: - raise InvalidURL(*e.args) - - if not scheme: - error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") - error = error.format(to_native_string(url, 'utf8')) - - raise MissingSchema(error) - - if not host: - raise InvalidURL("Invalid URL %r: No host supplied" % url) - - # In general, we want to try IDNA encoding the hostname if the string contains - # non-ASCII characters. This allows users to automatically get the correct IDNA - # behaviour. For strings containing only ASCII characters, we need to also verify - # it doesn't start with a wildcard (*), before allowing the unencoded hostname. - if not unicode_is_ascii(host): - try: - host = self._get_idna_encoded_host(host) - except UnicodeError: - raise InvalidURL('URL has an invalid label.') - elif host.startswith(u'*'): - raise InvalidURL('URL has an invalid label.') - - # Carefully reconstruct the network location - netloc = auth or '' - if netloc: - netloc += '@' - netloc += host - if port: - netloc += ':' + str(port) - - # Bare domains aren't valid URLs. - if not path: - path = '/' - - if is_py2: - if isinstance(scheme, str): - scheme = scheme.encode('utf-8') - if isinstance(netloc, str): - netloc = netloc.encode('utf-8') - if isinstance(path, str): - path = path.encode('utf-8') - if isinstance(query, str): - query = query.encode('utf-8') - if isinstance(fragment, str): - fragment = fragment.encode('utf-8') - - if isinstance(params, (str, bytes)): - params = to_native_string(params) - - enc_params = self._encode_params(params) - if enc_params: - if query: - query = '%s&%s' % (query, enc_params) - else: - query = enc_params - - url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) - self.url = url - - def prepare_headers(self, headers): - """Prepares the given HTTP headers.""" - - self.headers = CaseInsensitiveDict() - if headers: - for header in headers.items(): - # Raise exception on invalid header value. - check_header_validity(header) - name, value = header - self.headers[to_native_string(name)] = value - - def prepare_body(self, data, files, json=None): - """Prepares the given HTTP body data.""" - - # Check if file, fo, generator, iterator. - # If not, run through normal process. - - # Nottin' on you. - body = None - content_type = None - - if not data and json is not None: - # urllib3 requires a bytes-like body. Python 2's json.dumps - # provides this natively, but Python 3 gives a Unicode string. - content_type = 'application/json' - - try: - body = complexjson.dumps(json, allow_nan=False) - except ValueError as ve: - raise InvalidJSONError(ve, request=self) - - if not isinstance(body, bytes): - body = body.encode('utf-8') - - is_stream = all([ - hasattr(data, '__iter__'), - not isinstance(data, (basestring, list, tuple, Mapping)) - ]) - - if is_stream: - try: - length = super_len(data) - except (TypeError, AttributeError, UnsupportedOperation): - length = None - - body = data - - if getattr(body, 'tell', None) is not None: - # Record the current file position before reading. - # This will allow us to rewind a file in the event - # of a redirect. - try: - self._body_position = body.tell() - except (IOError, OSError): - # This differentiates from None, allowing us to catch - # a failed `tell()` later when trying to rewind the body - self._body_position = object() - - if files: - raise NotImplementedError('Streamed bodies and files are mutually exclusive.') - - if length: - self.headers['Content-Length'] = builtin_str(length) - else: - self.headers['Transfer-Encoding'] = 'chunked' - else: - # Multi-part file uploads. - if files: - (body, content_type) = self._encode_files(files, data) - else: - if data: - body = self._encode_params(data) - if isinstance(data, basestring) or hasattr(data, 'read'): - content_type = None - else: - content_type = 'application/x-www-form-urlencoded' - - self.prepare_content_length(body) - - # Add content-type if it wasn't explicitly provided. - if content_type and ('content-type' not in self.headers): - self.headers['Content-Type'] = content_type - - self.body = body - - def prepare_content_length(self, body): - """Prepare Content-Length header based on request method and body""" - if body is not None: - length = super_len(body) - if length: - # If length exists, set it. Otherwise, we fallback - # to Transfer-Encoding: chunked. - self.headers['Content-Length'] = builtin_str(length) - elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: - # Set Content-Length to 0 for methods that can have a body - # but don't provide one. (i.e. not GET or HEAD) - self.headers['Content-Length'] = '0' - - def prepare_auth(self, auth, url=''): - """Prepares the given HTTP auth data.""" - - # If no Auth is explicitly provided, extract it from the URL first. - if auth is None: - url_auth = get_auth_from_url(self.url) - auth = url_auth if any(url_auth) else None - - if auth: - if isinstance(auth, tuple) and len(auth) == 2: - # special-case basic HTTP auth - auth = HTTPBasicAuth(*auth) - - # Allow auth to make its changes. - r = auth(self) - - # Update self to reflect the auth changes. - self.__dict__.update(r.__dict__) - - # Recompute Content-Length - self.prepare_content_length(self.body) - - def prepare_cookies(self, cookies): - """Prepares the given HTTP cookie data. - - This function eventually generates a ``Cookie`` header from the - given cookies using cookielib. Due to cookielib's design, the header - will not be regenerated if it already exists, meaning this function - can only be called once for the life of the - :class:`PreparedRequest ` object. Any subsequent calls - to ``prepare_cookies`` will have no actual effect, unless the "Cookie" - header is removed beforehand. - """ - if isinstance(cookies, cookielib.CookieJar): - self._cookies = cookies - else: - self._cookies = cookiejar_from_dict(cookies) - - cookie_header = get_cookie_header(self._cookies, self) - if cookie_header is not None: - self.headers['Cookie'] = cookie_header - - def prepare_hooks(self, hooks): - """Prepares the given hooks.""" - # hooks can be passed as None to the prepare method and to this - # method. To prevent iterating over None, simply use an empty list - # if hooks is False-y - hooks = hooks or [] - for event in hooks: - self.register_hook(event, hooks[event]) - - -class Response(object): - """The :class:`Response ` object, which contains a - server's response to an HTTP request. - """ - - __attrs__ = [ - '_content', 'status_code', 'headers', 'url', 'history', - 'encoding', 'reason', 'cookies', 'elapsed', 'request' - ] - - def __init__(self): - self._content = False - self._content_consumed = False - self._next = None - - #: Integer Code of responded HTTP Status, e.g. 404 or 200. - self.status_code = None - - #: Case-insensitive Dictionary of Response Headers. - #: For example, ``headers['content-encoding']`` will return the - #: value of a ``'Content-Encoding'`` response header. - self.headers = CaseInsensitiveDict() - - #: File-like object representation of response (for advanced usage). - #: Use of ``raw`` requires that ``stream=True`` be set on the request. - #: This requirement does not apply for use internally to Requests. - self.raw = None - - #: Final URL location of Response. - self.url = None - - #: Encoding to decode with when accessing r.text. - self.encoding = None - - #: A list of :class:`Response ` objects from - #: the history of the Request. Any redirect responses will end - #: up here. The list is sorted from the oldest to the most recent request. - self.history = [] - - #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". - self.reason = None - - #: A CookieJar of Cookies the server sent back. - self.cookies = cookiejar_from_dict({}) - - #: The amount of time elapsed between sending the request - #: and the arrival of the response (as a timedelta). - #: This property specifically measures the time taken between sending - #: the first byte of the request and finishing parsing the headers. It - #: is therefore unaffected by consuming the response content or the - #: value of the ``stream`` keyword argument. - self.elapsed = datetime.timedelta(0) - - #: The :class:`PreparedRequest ` object to which this - #: is a response. - self.request = None - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - def __getstate__(self): - # Consume everything; accessing the content attribute makes - # sure the content has been fully read. - if not self._content_consumed: - self.content - - return {attr: getattr(self, attr, None) for attr in self.__attrs__} - - def __setstate__(self, state): - for name, value in state.items(): - setattr(self, name, value) - - # pickled objects do not have .raw - setattr(self, '_content_consumed', True) - setattr(self, 'raw', None) - - def __repr__(self): - return '' % (self.status_code) - - def __bool__(self): - """Returns True if :attr:`status_code` is less than 400. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - return self.ok - - def __nonzero__(self): - """Returns True if :attr:`status_code` is less than 400. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - return self.ok - - def __iter__(self): - """Allows you to use a response as an iterator.""" - return self.iter_content(128) - - @property - def ok(self): - """Returns True if :attr:`status_code` is less than 400, False if not. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - try: - self.raise_for_status() - except HTTPError: - return False - return True - - @property - def is_redirect(self): - """True if this Response is a well-formed HTTP redirect that could have - been processed automatically (by :meth:`Session.resolve_redirects`). - """ - return ('location' in self.headers and self.status_code in REDIRECT_STATI) - - @property - def is_permanent_redirect(self): - """True if this Response one of the permanent versions of redirect.""" - return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) - - @property - def next(self): - """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" - return self._next - - @property - def apparent_encoding(self): - """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" - return chardet.detect(self.content)['encoding'] - - def iter_content(self, chunk_size=1, decode_unicode=False): - """Iterates over the response data. When stream=True is set on the - request, this avoids reading the content at once into memory for - large responses. The chunk size is the number of bytes it should - read into memory. This is not necessarily the length of each item - returned as decoding can take place. - - chunk_size must be of type int or None. A value of None will - function differently depending on the value of `stream`. - stream=True will read data as it arrives in whatever size the - chunks are received. If stream=False, data is returned as - a single chunk. - - If decode_unicode is True, content will be decoded using the best - available encoding based on the response. - """ - - def generate(): - # Special case for urllib3. - if hasattr(self.raw, 'stream'): - try: - for chunk in self.raw.stream(chunk_size, decode_content=True): - yield chunk - except ProtocolError as e: - raise ChunkedEncodingError(e) - except DecodeError as e: - raise ContentDecodingError(e) - except ReadTimeoutError as e: - raise ConnectionError(e) - else: - # Standard file-like object. - while True: - chunk = self.raw.read(chunk_size) - if not chunk: - break - yield chunk - - self._content_consumed = True - - if self._content_consumed and isinstance(self._content, bool): - raise StreamConsumedError() - elif chunk_size is not None and not isinstance(chunk_size, int): - raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) - # simulate reading small chunks of the content - reused_chunks = iter_slices(self._content, chunk_size) - - stream_chunks = generate() - - chunks = reused_chunks if self._content_consumed else stream_chunks - - if decode_unicode: - chunks = stream_decode_response_unicode(chunks, self) - - return chunks - - def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): - """Iterates over the response data, one line at a time. When - stream=True is set on the request, this avoids reading the - content at once into memory for large responses. - - .. note:: This method is not reentrant safe. - """ - - pending = None - - for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): - - if pending is not None: - chunk = pending + chunk - - if delimiter: - lines = chunk.split(delimiter) - else: - lines = chunk.splitlines() - - if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: - pending = lines.pop() - else: - pending = None - - for line in lines: - yield line - - if pending is not None: - yield pending - - @property - def content(self): - """Content of the response, in bytes.""" - - if self._content is False: - # Read the contents. - if self._content_consumed: - raise RuntimeError( - 'The content for this response was already consumed') - - if self.status_code == 0 or self.raw is None: - self._content = None - else: - self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' - - self._content_consumed = True - # don't need to release the connection; that's been handled by urllib3 - # since we exhausted the data. - return self._content - - @property - def text(self): - """Content of the response, in unicode. - - If Response.encoding is None, encoding will be guessed using - ``charset_normalizer`` or ``chardet``. - - The encoding of the response content is determined based solely on HTTP - headers, following RFC 2616 to the letter. If you can take advantage of - non-HTTP knowledge to make a better guess at the encoding, you should - set ``r.encoding`` appropriately before accessing this property. - """ - - # Try charset from content-type - content = None - encoding = self.encoding - - if not self.content: - return str('') - - # Fallback to auto-detected encoding. - if self.encoding is None: - encoding = self.apparent_encoding - - # Decode unicode from given encoding. - try: - content = str(self.content, encoding, errors='replace') - except (LookupError, TypeError): - # A LookupError is raised if the encoding was not found which could - # indicate a misspelling or similar mistake. - # - # A TypeError can be raised if encoding is None - # - # So we try blindly encoding. - content = str(self.content, errors='replace') - - return content - - def json(self, **kwargs): - r"""Returns the json-encoded content of a response, if any. - - :param \*\*kwargs: Optional arguments that ``json.loads`` takes. - :raises simplejson.JSONDecodeError: If the response body does not - contain valid json and simplejson is installed. - :raises json.JSONDecodeError: If the response body does not contain - valid json and simplejson is not installed on Python 3. - :raises ValueError: If the response body does not contain valid - json and simplejson is not installed on Python 2. - """ - - if not self.encoding and self.content and len(self.content) > 3: - # No encoding set. JSON RFC 4627 section 3 states we should expect - # UTF-8, -16 or -32. Detect which one to use; If the detection or - # decoding fails, fall back to `self.text` (using charset_normalizer to make - # a best guess). - encoding = guess_json_utf(self.content) - if encoding is not None: - try: - return complexjson.loads( - self.content.decode(encoding), **kwargs - ) - except UnicodeDecodeError: - # Wrong UTF codec detected; usually because it's not UTF-8 - # but some other 8-bit codec. This is an RFC violation, - # and the server didn't bother to tell us what codec *was* - # used. - pass - return complexjson.loads(self.text, **kwargs) - - @property - def links(self): - """Returns the parsed header links of the response, if any.""" - - header = self.headers.get('link') - - # l = MultiDict() - l = {} - - if header: - links = parse_header_links(header) - - for link in links: - key = link.get('rel') or link.get('url') - l[key] = link - - return l - - def raise_for_status(self): - """Raises :class:`HTTPError`, if one occurred.""" - - http_error_msg = '' - if isinstance(self.reason, bytes): - # We attempt to decode utf-8 first because some servers - # choose to localize their reason strings. If the string - # isn't utf-8, we fall back to iso-8859-1 for all other - # encodings. (See PR #3538) - try: - reason = self.reason.decode('utf-8') - except UnicodeDecodeError: - reason = self.reason.decode('iso-8859-1') - else: - reason = self.reason - - if 400 <= self.status_code < 500: - http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) - - elif 500 <= self.status_code < 600: - http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) - - if http_error_msg: - raise HTTPError(http_error_msg, response=self) - - def close(self): - """Releases the connection back to the pool. Once this method has been - called the underlying ``raw`` object must not be accessed again. - - *Note: Should not normally need to be called explicitly.* - """ - if not self._content_consumed: - self.raw.close() - - release_conn = getattr(self.raw, 'release_conn', None) - if release_conn is not None: - release_conn() diff --git a/vendor/requests/requests/packages.py b/vendor/requests/requests/packages.py deleted file mode 100644 index 00196bff..00000000 --- a/vendor/requests/requests/packages.py +++ /dev/null @@ -1,26 +0,0 @@ -import sys - -try: - import chardet -except ImportError: - import charset_normalizer as chardet - import warnings - - warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer') - -# This code exists for backwards compatibility reasons. -# I don't like it either. Just look the other way. :) - -for package in ('urllib3', 'idna'): - locals()[package] = __import__(package) - # This traversal is apparently necessary such that the identities are - # preserved (requests.packages.urllib3.* is urllib3.*) - for mod in list(sys.modules): - if mod == package or mod.startswith(package + '.'): - sys.modules['requests.packages.' + mod] = sys.modules[mod] - -target = chardet.__name__ -for mod in list(sys.modules): - if mod == target or mod.startswith(target + '.'): - sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod] -# Kinda cool, though, right? diff --git a/vendor/requests/requests/sessions.py b/vendor/requests/requests/sessions.py deleted file mode 100644 index ae4bcc8e..00000000 --- a/vendor/requests/requests/sessions.py +++ /dev/null @@ -1,781 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.sessions -~~~~~~~~~~~~~~~~~ - -This module provides a Session object to manage and persist settings across -requests (cookies, auth, proxies). -""" -import os -import sys -import time -from datetime import timedelta -from collections import OrderedDict - -from .auth import _basic_auth_str -from .compat import cookielib, is_py3, urljoin, urlparse, Mapping -from .cookies import ( - cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) -from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT -from .hooks import default_hooks, dispatch_hook -from ._internal_utils import to_native_string -from .utils import to_key_val_list, default_headers, DEFAULT_PORTS -from .exceptions import ( - TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) - -from .structures import CaseInsensitiveDict -from .adapters import HTTPAdapter - -from .utils import ( - requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, - get_auth_from_url, rewind_body -) - -from .status_codes import codes - -# formerly defined here, reexposed here for backward compatibility -from .models import REDIRECT_STATI - -# Preferred clock, based on which one is more accurate on a given system. -if sys.platform == 'win32': - try: # Python 3.4+ - preferred_clock = time.perf_counter - except AttributeError: # Earlier than Python 3. - preferred_clock = time.clock -else: - preferred_clock = time.time - - -def merge_setting(request_setting, session_setting, dict_class=OrderedDict): - """Determines appropriate setting for a given request, taking into account - the explicit setting on that request, and the setting in the session. If a - setting is a dictionary, they will be merged together using `dict_class` - """ - - if session_setting is None: - return request_setting - - if request_setting is None: - return session_setting - - # Bypass if not a dictionary (e.g. verify) - if not ( - isinstance(session_setting, Mapping) and - isinstance(request_setting, Mapping) - ): - return request_setting - - merged_setting = dict_class(to_key_val_list(session_setting)) - merged_setting.update(to_key_val_list(request_setting)) - - # Remove keys that are set to None. Extract keys first to avoid altering - # the dictionary during iteration. - none_keys = [k for (k, v) in merged_setting.items() if v is None] - for key in none_keys: - del merged_setting[key] - - return merged_setting - - -def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): - """Properly merges both requests and session hooks. - - This is necessary because when request_hooks == {'response': []}, the - merge breaks Session hooks entirely. - """ - if session_hooks is None or session_hooks.get('response') == []: - return request_hooks - - if request_hooks is None or request_hooks.get('response') == []: - return session_hooks - - return merge_setting(request_hooks, session_hooks, dict_class) - - -class SessionRedirectMixin(object): - - def get_redirect_target(self, resp): - """Receives a Response. Returns a redirect URI or ``None``""" - # Due to the nature of how requests processes redirects this method will - # be called at least once upon the original response and at least twice - # on each subsequent redirect response (if any). - # If a custom mixin is used to handle this logic, it may be advantageous - # to cache the redirect location onto the response object as a private - # attribute. - if resp.is_redirect: - location = resp.headers['location'] - # Currently the underlying http module on py3 decode headers - # in latin1, but empirical evidence suggests that latin1 is very - # rarely used with non-ASCII characters in HTTP headers. - # It is more likely to get UTF8 header rather than latin1. - # This causes incorrect handling of UTF8 encoded location headers. - # To solve this, we re-encode the location in latin1. - if is_py3: - location = location.encode('latin1') - return to_native_string(location, 'utf8') - return None - - def should_strip_auth(self, old_url, new_url): - """Decide whether Authorization header should be removed when redirecting""" - old_parsed = urlparse(old_url) - new_parsed = urlparse(new_url) - if old_parsed.hostname != new_parsed.hostname: - return True - # Special case: allow http -> https redirect when using the standard - # ports. This isn't specified by RFC 7235, but is kept to avoid - # breaking backwards compatibility with older versions of requests - # that allowed any redirects on the same host. - if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) - and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): - return False - - # Handle default port usage corresponding to scheme. - changed_port = old_parsed.port != new_parsed.port - changed_scheme = old_parsed.scheme != new_parsed.scheme - default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) - if (not changed_scheme and old_parsed.port in default_port - and new_parsed.port in default_port): - return False - - # Standard case: root URI must match - return changed_port or changed_scheme - - def resolve_redirects(self, resp, req, stream=False, timeout=None, - verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): - """Receives a Response. Returns a generator of Responses or Requests.""" - - hist = [] # keep track of history - - url = self.get_redirect_target(resp) - previous_fragment = urlparse(req.url).fragment - while url: - prepared_request = req.copy() - - # Update history and keep track of redirects. - # resp.history must ignore the original request in this loop - hist.append(resp) - resp.history = hist[1:] - - try: - resp.content # Consume socket so it can be released - except (ChunkedEncodingError, ContentDecodingError, RuntimeError): - resp.raw.read(decode_content=False) - - if len(resp.history) >= self.max_redirects: - raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) - - # Release the connection back into the pool. - resp.close() - - # Handle redirection without scheme (see: RFC 1808 Section 4) - if url.startswith('//'): - parsed_rurl = urlparse(resp.url) - url = ':'.join([to_native_string(parsed_rurl.scheme), url]) - - # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) - parsed = urlparse(url) - if parsed.fragment == '' and previous_fragment: - parsed = parsed._replace(fragment=previous_fragment) - elif parsed.fragment: - previous_fragment = parsed.fragment - url = parsed.geturl() - - # Facilitate relative 'location' headers, as allowed by RFC 7231. - # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') - # Compliant with RFC3986, we percent encode the url. - if not parsed.netloc: - url = urljoin(resp.url, requote_uri(url)) - else: - url = requote_uri(url) - - prepared_request.url = to_native_string(url) - - self.rebuild_method(prepared_request, resp) - - # https://github.com/psf/requests/issues/1084 - if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): - # https://github.com/psf/requests/issues/3490 - purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') - for header in purged_headers: - prepared_request.headers.pop(header, None) - prepared_request.body = None - - headers = prepared_request.headers - headers.pop('Cookie', None) - - # Extract any cookies sent on the response to the cookiejar - # in the new request. Because we've mutated our copied prepared - # request, use the old one that we haven't yet touched. - extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) - merge_cookies(prepared_request._cookies, self.cookies) - prepared_request.prepare_cookies(prepared_request._cookies) - - # Rebuild auth and proxy information. - proxies = self.rebuild_proxies(prepared_request, proxies) - self.rebuild_auth(prepared_request, resp) - - # A failed tell() sets `_body_position` to `object()`. This non-None - # value ensures `rewindable` will be True, allowing us to raise an - # UnrewindableBodyError, instead of hanging the connection. - rewindable = ( - prepared_request._body_position is not None and - ('Content-Length' in headers or 'Transfer-Encoding' in headers) - ) - - # Attempt to rewind consumed file-like object. - if rewindable: - rewind_body(prepared_request) - - # Override the original request. - req = prepared_request - - if yield_requests: - yield req - else: - - resp = self.send( - req, - stream=stream, - timeout=timeout, - verify=verify, - cert=cert, - proxies=proxies, - allow_redirects=False, - **adapter_kwargs - ) - - extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) - - # extract redirect url, if any, for the next loop - url = self.get_redirect_target(resp) - yield resp - - def rebuild_auth(self, prepared_request, response): - """When being redirected we may want to strip authentication from the - request to avoid leaking credentials. This method intelligently removes - and reapplies authentication where possible to avoid credential loss. - """ - headers = prepared_request.headers - url = prepared_request.url - - if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): - # If we get redirected to a new host, we should strip out any - # authentication headers. - del headers['Authorization'] - - # .netrc might have more auth for us on our new host. - new_auth = get_netrc_auth(url) if self.trust_env else None - if new_auth is not None: - prepared_request.prepare_auth(new_auth) - - - def rebuild_proxies(self, prepared_request, proxies): - """This method re-evaluates the proxy configuration by considering the - environment variables. If we are redirected to a URL covered by - NO_PROXY, we strip the proxy configuration. Otherwise, we set missing - proxy keys for this URL (in case they were stripped by a previous - redirect). - - This method also replaces the Proxy-Authorization header where - necessary. - - :rtype: dict - """ - proxies = proxies if proxies is not None else {} - headers = prepared_request.headers - url = prepared_request.url - scheme = urlparse(url).scheme - new_proxies = proxies.copy() - no_proxy = proxies.get('no_proxy') - - bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) - if self.trust_env and not bypass_proxy: - environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) - - proxy = environ_proxies.get(scheme, environ_proxies.get('all')) - - if proxy: - new_proxies.setdefault(scheme, proxy) - - if 'Proxy-Authorization' in headers: - del headers['Proxy-Authorization'] - - try: - username, password = get_auth_from_url(new_proxies[scheme]) - except KeyError: - username, password = None, None - - if username and password: - headers['Proxy-Authorization'] = _basic_auth_str(username, password) - - return new_proxies - - def rebuild_method(self, prepared_request, response): - """When being redirected we may want to change the method of the request - based on certain specs or browser behavior. - """ - method = prepared_request.method - - # https://tools.ietf.org/html/rfc7231#section-6.4.4 - if response.status_code == codes.see_other and method != 'HEAD': - method = 'GET' - - # Do what the browsers do, despite standards... - # First, turn 302s into GETs. - if response.status_code == codes.found and method != 'HEAD': - method = 'GET' - - # Second, if a POST is responded to with a 301, turn it into a GET. - # This bizarre behaviour is explained in Issue 1704. - if response.status_code == codes.moved and method == 'POST': - method = 'GET' - - prepared_request.method = method - - -class Session(SessionRedirectMixin): - """A Requests session. - - Provides cookie persistence, connection-pooling, and configuration. - - Basic Usage:: - - >>> import requests - >>> s = requests.Session() - >>> s.get('https://httpbin.org/get') - - - Or as a context manager:: - - >>> with requests.Session() as s: - ... s.get('https://httpbin.org/get') - - """ - - __attrs__ = [ - 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', - 'cert', 'adapters', 'stream', 'trust_env', - 'max_redirects', - ] - - def __init__(self): - - #: A case-insensitive dictionary of headers to be sent on each - #: :class:`Request ` sent from this - #: :class:`Session `. - self.headers = default_headers() - - #: Default Authentication tuple or object to attach to - #: :class:`Request `. - self.auth = None - - #: Dictionary mapping protocol or protocol and host to the URL of the proxy - #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to - #: be used on each :class:`Request `. - self.proxies = {} - - #: Event-handling hooks. - self.hooks = default_hooks() - - #: Dictionary of querystring data to attach to each - #: :class:`Request `. The dictionary values may be lists for - #: representing multivalued query parameters. - self.params = {} - - #: Stream response content default. - self.stream = False - - #: SSL Verification default. - #: Defaults to `True`, requiring requests to verify the TLS certificate at the - #: remote end. - #: If verify is set to `False`, requests will accept any TLS certificate - #: presented by the server, and will ignore hostname mismatches and/or - #: expired certificates, which will make your application vulnerable to - #: man-in-the-middle (MitM) attacks. - #: Only set this to `False` for testing. - self.verify = True - - #: SSL client certificate default, if String, path to ssl client - #: cert file (.pem). If Tuple, ('cert', 'key') pair. - self.cert = None - - #: Maximum number of redirects allowed. If the request exceeds this - #: limit, a :class:`TooManyRedirects` exception is raised. - #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is - #: 30. - self.max_redirects = DEFAULT_REDIRECT_LIMIT - - #: Trust environment settings for proxy configuration, default - #: authentication and similar. - self.trust_env = True - - #: A CookieJar containing all currently outstanding cookies set on this - #: session. By default it is a - #: :class:`RequestsCookieJar `, but - #: may be any other ``cookielib.CookieJar`` compatible object. - self.cookies = cookiejar_from_dict({}) - - # Default connection adapters. - self.adapters = OrderedDict() - self.mount('https://', HTTPAdapter()) - self.mount('http://', HTTPAdapter()) - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - def prepare_request(self, request): - """Constructs a :class:`PreparedRequest ` for - transmission and returns it. The :class:`PreparedRequest` has settings - merged from the :class:`Request ` instance and those of the - :class:`Session`. - - :param request: :class:`Request` instance to prepare with this - session's settings. - :rtype: requests.PreparedRequest - """ - cookies = request.cookies or {} - - # Bootstrap CookieJar. - if not isinstance(cookies, cookielib.CookieJar): - cookies = cookiejar_from_dict(cookies) - - # Merge with session cookies - merged_cookies = merge_cookies( - merge_cookies(RequestsCookieJar(), self.cookies), cookies) - - # Set environment's basic authentication if not explicitly set. - auth = request.auth - if self.trust_env and not auth and not self.auth: - auth = get_netrc_auth(request.url) - - p = PreparedRequest() - p.prepare( - method=request.method.upper(), - url=request.url, - files=request.files, - data=request.data, - json=request.json, - headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), - params=merge_setting(request.params, self.params), - auth=merge_setting(auth, self.auth), - cookies=merged_cookies, - hooks=merge_hooks(request.hooks, self.hooks), - ) - return p - - def request(self, method, url, - params=None, data=None, headers=None, cookies=None, files=None, - auth=None, timeout=None, allow_redirects=True, proxies=None, - hooks=None, stream=None, verify=None, cert=None, json=None): - """Constructs a :class:`Request `, prepares it and sends it. - Returns :class:`Response ` object. - - :param method: method for the new :class:`Request` object. - :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary or bytes to be sent in the query - string for the :class:`Request`. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json to send in the body of the - :class:`Request`. - :param headers: (optional) Dictionary of HTTP Headers to send with the - :class:`Request`. - :param cookies: (optional) Dict or CookieJar object to send with the - :class:`Request`. - :param files: (optional) Dictionary of ``'filename': file-like-objects`` - for multipart encoding upload. - :param auth: (optional) Auth tuple or callable to enable - Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) ` tuple. - :type timeout: float or tuple - :param allow_redirects: (optional) Set to True by default. - :type allow_redirects: bool - :param proxies: (optional) Dictionary mapping protocol or protocol and - hostname to the URL of the proxy. - :param stream: (optional) whether to immediately download the response - content. Defaults to ``False``. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. When set to - ``False``, requests will accept any TLS certificate presented by - the server, and will ignore hostname mismatches and/or expired - certificates, which will make your application vulnerable to - man-in-the-middle (MitM) attacks. Setting verify to ``False`` - may be useful during local development or testing. - :param cert: (optional) if String, path to ssl client cert file (.pem). - If Tuple, ('cert', 'key') pair. - :rtype: requests.Response - """ - # Create the Request. - req = Request( - method=method.upper(), - url=url, - headers=headers, - files=files, - data=data or {}, - json=json, - params=params or {}, - auth=auth, - cookies=cookies, - hooks=hooks, - ) - prep = self.prepare_request(req) - - proxies = proxies or {} - - settings = self.merge_environment_settings( - prep.url, proxies, stream, verify, cert - ) - - # Send the request. - send_kwargs = { - 'timeout': timeout, - 'allow_redirects': allow_redirects, - } - send_kwargs.update(settings) - resp = self.send(prep, **send_kwargs) - - return resp - - def get(self, url, **kwargs): - r"""Sends a GET request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', True) - return self.request('GET', url, **kwargs) - - def options(self, url, **kwargs): - r"""Sends a OPTIONS request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', True) - return self.request('OPTIONS', url, **kwargs) - - def head(self, url, **kwargs): - r"""Sends a HEAD request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', False) - return self.request('HEAD', url, **kwargs) - - def post(self, url, data=None, json=None, **kwargs): - r"""Sends a POST request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - return self.request('POST', url, data=data, json=json, **kwargs) - - def put(self, url, data=None, **kwargs): - r"""Sends a PUT request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - return self.request('PUT', url, data=data, **kwargs) - - def patch(self, url, data=None, **kwargs): - r"""Sends a PATCH request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - return self.request('PATCH', url, data=data, **kwargs) - - def delete(self, url, **kwargs): - r"""Sends a DELETE request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - return self.request('DELETE', url, **kwargs) - - def send(self, request, **kwargs): - """Send a given PreparedRequest. - - :rtype: requests.Response - """ - # Set defaults that the hooks can utilize to ensure they always have - # the correct parameters to reproduce the previous request. - kwargs.setdefault('stream', self.stream) - kwargs.setdefault('verify', self.verify) - kwargs.setdefault('cert', self.cert) - kwargs.setdefault('proxies', self.rebuild_proxies(request, self.proxies)) - - # It's possible that users might accidentally send a Request object. - # Guard against that specific failure case. - if isinstance(request, Request): - raise ValueError('You can only send PreparedRequests.') - - # Set up variables needed for resolve_redirects and dispatching of hooks - allow_redirects = kwargs.pop('allow_redirects', True) - stream = kwargs.get('stream') - hooks = request.hooks - - # Get the appropriate adapter to use - adapter = self.get_adapter(url=request.url) - - # Start time (approximately) of the request - start = preferred_clock() - - # Send the request - r = adapter.send(request, **kwargs) - - # Total elapsed time of the request (approximately) - elapsed = preferred_clock() - start - r.elapsed = timedelta(seconds=elapsed) - - # Response manipulation hooks - r = dispatch_hook('response', hooks, r, **kwargs) - - # Persist cookies - if r.history: - - # If the hooks create history then we want those cookies too - for resp in r.history: - extract_cookies_to_jar(self.cookies, resp.request, resp.raw) - - extract_cookies_to_jar(self.cookies, request, r.raw) - - # Resolve redirects if allowed. - if allow_redirects: - # Redirect resolving generator. - gen = self.resolve_redirects(r, request, **kwargs) - history = [resp for resp in gen] - else: - history = [] - - # Shuffle things around if there's history. - if history: - # Insert the first (original) request at the start - history.insert(0, r) - # Get the last request made - r = history.pop() - r.history = history - - # If redirects aren't being followed, store the response on the Request for Response.next(). - if not allow_redirects: - try: - r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) - except StopIteration: - pass - - if not stream: - r.content - - return r - - def merge_environment_settings(self, url, proxies, stream, verify, cert): - """ - Check the environment and merge it with some settings. - - :rtype: dict - """ - # Gather clues from the surrounding environment. - if self.trust_env: - # Set environment's proxies. - no_proxy = proxies.get('no_proxy') if proxies is not None else None - env_proxies = get_environ_proxies(url, no_proxy=no_proxy) - for (k, v) in env_proxies.items(): - proxies.setdefault(k, v) - - # Look for requests environment configuration and be compatible - # with cURL. - if verify is True or verify is None: - verify = (os.environ.get('REQUESTS_CA_BUNDLE') or - os.environ.get('CURL_CA_BUNDLE')) - - # Merge all the kwargs. - proxies = merge_setting(proxies, self.proxies) - stream = merge_setting(stream, self.stream) - verify = merge_setting(verify, self.verify) - cert = merge_setting(cert, self.cert) - - return {'verify': verify, 'proxies': proxies, 'stream': stream, - 'cert': cert} - - def get_adapter(self, url): - """ - Returns the appropriate connection adapter for the given URL. - - :rtype: requests.adapters.BaseAdapter - """ - for (prefix, adapter) in self.adapters.items(): - - if url.lower().startswith(prefix.lower()): - return adapter - - # Nothing matches :-/ - raise InvalidSchema("No connection adapters were found for {!r}".format(url)) - - def close(self): - """Closes all adapters and as such the session""" - for v in self.adapters.values(): - v.close() - - def mount(self, prefix, adapter): - """Registers a connection adapter to a prefix. - - Adapters are sorted in descending order by prefix length. - """ - self.adapters[prefix] = adapter - keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] - - for key in keys_to_move: - self.adapters[key] = self.adapters.pop(key) - - def __getstate__(self): - state = {attr: getattr(self, attr, None) for attr in self.__attrs__} - return state - - def __setstate__(self, state): - for attr, value in state.items(): - setattr(self, attr, value) - - -def session(): - """ - Returns a :class:`Session` for context-management. - - .. deprecated:: 1.0.0 - - This method has been deprecated since version 1.0.0 and is only kept for - backwards compatibility. New code should use :class:`~requests.sessions.Session` - to create a session. This may be removed at a future date. - - :rtype: Session - """ - return Session() diff --git a/vendor/requests/requests/status_codes.py b/vendor/requests/requests/status_codes.py deleted file mode 100644 index d80a7cd4..00000000 --- a/vendor/requests/requests/status_codes.py +++ /dev/null @@ -1,123 +0,0 @@ -# -*- coding: utf-8 -*- - -r""" -The ``codes`` object defines a mapping from common names for HTTP statuses -to their numerical codes, accessible either as attributes or as dictionary -items. - -Example:: - - >>> import requests - >>> requests.codes['temporary_redirect'] - 307 - >>> requests.codes.teapot - 418 - >>> requests.codes['\o/'] - 200 - -Some codes have multiple names, and both upper- and lower-case versions of -the names are allowed. For example, ``codes.ok``, ``codes.OK``, and -``codes.okay`` all correspond to the HTTP status code 200. -""" - -from .structures import LookupDict - -_codes = { - - # Informational. - 100: ('continue',), - 101: ('switching_protocols',), - 102: ('processing',), - 103: ('checkpoint',), - 122: ('uri_too_long', 'request_uri_too_long'), - 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), - 201: ('created',), - 202: ('accepted',), - 203: ('non_authoritative_info', 'non_authoritative_information'), - 204: ('no_content',), - 205: ('reset_content', 'reset'), - 206: ('partial_content', 'partial'), - 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), - 208: ('already_reported',), - 226: ('im_used',), - - # Redirection. - 300: ('multiple_choices',), - 301: ('moved_permanently', 'moved', '\\o-'), - 302: ('found',), - 303: ('see_other', 'other'), - 304: ('not_modified',), - 305: ('use_proxy',), - 306: ('switch_proxy',), - 307: ('temporary_redirect', 'temporary_moved', 'temporary'), - 308: ('permanent_redirect', - 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 - - # Client Error. - 400: ('bad_request', 'bad'), - 401: ('unauthorized',), - 402: ('payment_required', 'payment'), - 403: ('forbidden',), - 404: ('not_found', '-o-'), - 405: ('method_not_allowed', 'not_allowed'), - 406: ('not_acceptable',), - 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), - 408: ('request_timeout', 'timeout'), - 409: ('conflict',), - 410: ('gone',), - 411: ('length_required',), - 412: ('precondition_failed', 'precondition'), - 413: ('request_entity_too_large',), - 414: ('request_uri_too_large',), - 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), - 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), - 417: ('expectation_failed',), - 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), - 421: ('misdirected_request',), - 422: ('unprocessable_entity', 'unprocessable'), - 423: ('locked',), - 424: ('failed_dependency', 'dependency'), - 425: ('unordered_collection', 'unordered'), - 426: ('upgrade_required', 'upgrade'), - 428: ('precondition_required', 'precondition'), - 429: ('too_many_requests', 'too_many'), - 431: ('header_fields_too_large', 'fields_too_large'), - 444: ('no_response', 'none'), - 449: ('retry_with', 'retry'), - 450: ('blocked_by_windows_parental_controls', 'parental_controls'), - 451: ('unavailable_for_legal_reasons', 'legal_reasons'), - 499: ('client_closed_request',), - - # Server Error. - 500: ('internal_server_error', 'server_error', '/o\\', '✗'), - 501: ('not_implemented',), - 502: ('bad_gateway',), - 503: ('service_unavailable', 'unavailable'), - 504: ('gateway_timeout',), - 505: ('http_version_not_supported', 'http_version'), - 506: ('variant_also_negotiates',), - 507: ('insufficient_storage',), - 509: ('bandwidth_limit_exceeded', 'bandwidth'), - 510: ('not_extended',), - 511: ('network_authentication_required', 'network_auth', 'network_authentication'), -} - -codes = LookupDict(name='status_codes') - -def _init(): - for code, titles in _codes.items(): - for title in titles: - setattr(codes, title, code) - if not title.startswith(('\\', '/')): - setattr(codes, title.upper(), code) - - def doc(code): - names = ', '.join('``%s``' % n for n in _codes[code]) - return '* %d: %s' % (code, names) - - global __doc__ - __doc__ = (__doc__ + '\n' + - '\n'.join(doc(code) for code in sorted(_codes)) - if __doc__ is not None else None) - -_init() diff --git a/vendor/requests/requests/structures.py b/vendor/requests/requests/structures.py deleted file mode 100644 index 8ee0ba7a..00000000 --- a/vendor/requests/requests/structures.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.structures -~~~~~~~~~~~~~~~~~~~ - -Data structures that power Requests. -""" - -from collections import OrderedDict - -from .compat import Mapping, MutableMapping - - -class CaseInsensitiveDict(MutableMapping): - """A case-insensitive ``dict``-like object. - - Implements all methods and operations of - ``MutableMapping`` as well as dict's ``copy``. Also - provides ``lower_items``. - - All keys are expected to be strings. The structure remembers the - case of the last key to be set, and ``iter(instance)``, - ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` - will contain case-sensitive keys. However, querying and contains - testing is case insensitive:: - - cid = CaseInsensitiveDict() - cid['Accept'] = 'application/json' - cid['aCCEPT'] == 'application/json' # True - list(cid) == ['Accept'] # True - - For example, ``headers['content-encoding']`` will return the - value of a ``'Content-Encoding'`` response header, regardless - of how the header name was originally stored. - - If the constructor, ``.update``, or equality comparison - operations are given keys that have equal ``.lower()``s, the - behavior is undefined. - """ - - def __init__(self, data=None, **kwargs): - self._store = OrderedDict() - if data is None: - data = {} - self.update(data, **kwargs) - - def __setitem__(self, key, value): - # Use the lowercased key for lookups, but store the actual - # key alongside the value. - self._store[key.lower()] = (key, value) - - def __getitem__(self, key): - return self._store[key.lower()][1] - - def __delitem__(self, key): - del self._store[key.lower()] - - def __iter__(self): - return (casedkey for casedkey, mappedvalue in self._store.values()) - - def __len__(self): - return len(self._store) - - def lower_items(self): - """Like iteritems(), but with all lowercase keys.""" - return ( - (lowerkey, keyval[1]) - for (lowerkey, keyval) - in self._store.items() - ) - - def __eq__(self, other): - if isinstance(other, Mapping): - other = CaseInsensitiveDict(other) - else: - return NotImplemented - # Compare insensitively - return dict(self.lower_items()) == dict(other.lower_items()) - - # Copy is required - def copy(self): - return CaseInsensitiveDict(self._store.values()) - - def __repr__(self): - return str(dict(self.items())) - - -class LookupDict(dict): - """Dictionary lookup object.""" - - def __init__(self, name=None): - self.name = name - super(LookupDict, self).__init__() - - def __repr__(self): - return '' % (self.name) - - def __getitem__(self, key): - # We allow fall-through here, so values default to None - - return self.__dict__.get(key, None) - - def get(self, key, default=None): - return self.__dict__.get(key, default) diff --git a/vendor/requests/requests/utils.py b/vendor/requests/requests/utils.py deleted file mode 100644 index dbb02a0d..00000000 --- a/vendor/requests/requests/utils.py +++ /dev/null @@ -1,1013 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.utils -~~~~~~~~~~~~~~ - -This module provides utility functions that are used within Requests -that are also useful for external consumption. -""" - -import codecs -import contextlib -import io -import os -import re -import socket -import struct -import sys -import tempfile -import warnings -import zipfile -from collections import OrderedDict -from urllib3.util import make_headers - -from .__version__ import __version__ -from . import certs -# to_native_string is unused here, but imported here for backwards compatibility -from ._internal_utils import to_native_string -from .compat import parse_http_list as _parse_list_header -from .compat import ( - quote, urlparse, bytes, str, unquote, getproxies, - proxy_bypass, urlunparse, basestring, integer_types, is_py3, - proxy_bypass_environment, getproxies_environment, Mapping) -from .cookies import cookiejar_from_dict -from .structures import CaseInsensitiveDict -from .exceptions import ( - InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) - -NETRC_FILES = ('.netrc', '_netrc') - -DEFAULT_CA_BUNDLE_PATH = certs.where() - -DEFAULT_PORTS = {'http': 80, 'https': 443} - -# Ensure that ', ' is used to preserve previous delimiter behavior. -DEFAULT_ACCEPT_ENCODING = ", ".join( - re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) -) - - -if sys.platform == 'win32': - # provide a proxy_bypass version on Windows without DNS lookups - - def proxy_bypass_registry(host): - try: - if is_py3: - import winreg - else: - import _winreg as winreg - except ImportError: - return False - - try: - internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, - r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') - # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it - proxyEnable = int(winreg.QueryValueEx(internetSettings, - 'ProxyEnable')[0]) - # ProxyOverride is almost always a string - proxyOverride = winreg.QueryValueEx(internetSettings, - 'ProxyOverride')[0] - except OSError: - return False - if not proxyEnable or not proxyOverride: - return False - - # make a check value list from the registry entry: replace the - # '' string by the localhost entry and the corresponding - # canonical entry. - proxyOverride = proxyOverride.split(';') - # now check if we match one of the registry values. - for test in proxyOverride: - if test == '': - if '.' not in host: - return True - test = test.replace(".", r"\.") # mask dots - test = test.replace("*", r".*") # change glob sequence - test = test.replace("?", r".") # change glob char - if re.match(test, host, re.I): - return True - return False - - def proxy_bypass(host): # noqa - """Return True, if the host should be bypassed. - - Checks proxy settings gathered from the environment, if specified, - or the registry. - """ - if getproxies_environment(): - return proxy_bypass_environment(host) - else: - return proxy_bypass_registry(host) - - -def dict_to_sequence(d): - """Returns an internal sequence dictionary update.""" - - if hasattr(d, 'items'): - d = d.items() - - return d - - -def super_len(o): - total_length = None - current_position = 0 - - if hasattr(o, '__len__'): - total_length = len(o) - - elif hasattr(o, 'len'): - total_length = o.len - - elif hasattr(o, 'fileno'): - try: - fileno = o.fileno() - except io.UnsupportedOperation: - pass - else: - total_length = os.fstat(fileno).st_size - - # Having used fstat to determine the file length, we need to - # confirm that this file was opened up in binary mode. - if 'b' not in o.mode: - warnings.warn(( - "Requests has determined the content-length for this " - "request using the binary size of the file: however, the " - "file has been opened in text mode (i.e. without the 'b' " - "flag in the mode). This may lead to an incorrect " - "content-length. In Requests 3.0, support will be removed " - "for files in text mode."), - FileModeWarning - ) - - if hasattr(o, 'tell'): - try: - current_position = o.tell() - except (OSError, IOError): - # This can happen in some weird situations, such as when the file - # is actually a special file descriptor like stdin. In this - # instance, we don't know what the length is, so set it to zero and - # let requests chunk it instead. - if total_length is not None: - current_position = total_length - else: - if hasattr(o, 'seek') and total_length is None: - # StringIO and BytesIO have seek but no useable fileno - try: - # seek to end of file - o.seek(0, 2) - total_length = o.tell() - - # seek back to current position to support - # partially read file-like objects - o.seek(current_position or 0) - except (OSError, IOError): - total_length = 0 - - if total_length is None: - total_length = 0 - - return max(0, total_length - current_position) - - -def get_netrc_auth(url, raise_errors=False): - """Returns the Requests tuple auth for a given url from netrc.""" - - netrc_file = os.environ.get('NETRC') - if netrc_file is not None: - netrc_locations = (netrc_file,) - else: - netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES) - - try: - from netrc import netrc, NetrcParseError - - netrc_path = None - - for f in netrc_locations: - try: - loc = os.path.expanduser(f) - except KeyError: - # os.path.expanduser can fail when $HOME is undefined and - # getpwuid fails. See https://bugs.python.org/issue20164 & - # https://github.com/psf/requests/issues/1846 - return - - if os.path.exists(loc): - netrc_path = loc - break - - # Abort early if there isn't one. - if netrc_path is None: - return - - ri = urlparse(url) - - # Strip port numbers from netloc. This weird `if...encode`` dance is - # used for Python 3.2, which doesn't support unicode literals. - splitstr = b':' - if isinstance(url, str): - splitstr = splitstr.decode('ascii') - host = ri.netloc.split(splitstr)[0] - - try: - _netrc = netrc(netrc_path).authenticators(host) - if _netrc: - # Return with login / password - login_i = (0 if _netrc[0] else 1) - return (_netrc[login_i], _netrc[2]) - except (NetrcParseError, IOError): - # If there was a parsing error or a permissions issue reading the file, - # we'll just skip netrc auth unless explicitly asked to raise errors. - if raise_errors: - raise - - # App Engine hackiness. - except (ImportError, AttributeError): - pass - - -def guess_filename(obj): - """Tries to guess the filename of the given object.""" - name = getattr(obj, 'name', None) - if (name and isinstance(name, basestring) and name[0] != '<' and - name[-1] != '>'): - return os.path.basename(name) - - -def extract_zipped_paths(path): - """Replace nonexistent paths that look like they refer to a member of a zip - archive with the location of an extracted copy of the target, or else - just return the provided path unchanged. - """ - if os.path.exists(path): - # this is already a valid path, no need to do anything further - return path - - # find the first valid part of the provided path and treat that as a zip archive - # assume the rest of the path is the name of a member in the archive - archive, member = os.path.split(path) - while archive and not os.path.exists(archive): - archive, prefix = os.path.split(archive) - member = '/'.join([prefix, member]) - - if not zipfile.is_zipfile(archive): - return path - - zip_file = zipfile.ZipFile(archive) - if member not in zip_file.namelist(): - return path - - # we have a valid zip archive and a valid member of that archive - tmp = tempfile.gettempdir() - extracted_path = os.path.join(tmp, member.split('/')[-1]) - if not os.path.exists(extracted_path): - # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition - with atomic_open(extracted_path) as file_handler: - file_handler.write(zip_file.read(member)) - return extracted_path - - -@contextlib.contextmanager -def atomic_open(filename): - """Write a file to the disk in an atomic fashion""" - replacer = os.rename if sys.version_info[0] == 2 else os.replace - tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) - try: - with os.fdopen(tmp_descriptor, 'wb') as tmp_handler: - yield tmp_handler - replacer(tmp_name, filename) - except BaseException: - os.remove(tmp_name) - raise - - -def from_key_val_list(value): - """Take an object and test to see if it can be represented as a - dictionary. Unless it can not be represented as such, return an - OrderedDict, e.g., - - :: - - >>> from_key_val_list([('key', 'val')]) - OrderedDict([('key', 'val')]) - >>> from_key_val_list('string') - Traceback (most recent call last): - ... - ValueError: cannot encode objects that are not 2-tuples - >>> from_key_val_list({'key': 'val'}) - OrderedDict([('key', 'val')]) - - :rtype: OrderedDict - """ - if value is None: - return None - - if isinstance(value, (str, bytes, bool, int)): - raise ValueError('cannot encode objects that are not 2-tuples') - - return OrderedDict(value) - - -def to_key_val_list(value): - """Take an object and test to see if it can be represented as a - dictionary. If it can be, return a list of tuples, e.g., - - :: - - >>> to_key_val_list([('key', 'val')]) - [('key', 'val')] - >>> to_key_val_list({'key': 'val'}) - [('key', 'val')] - >>> to_key_val_list('string') - Traceback (most recent call last): - ... - ValueError: cannot encode objects that are not 2-tuples - - :rtype: list - """ - if value is None: - return None - - if isinstance(value, (str, bytes, bool, int)): - raise ValueError('cannot encode objects that are not 2-tuples') - - if isinstance(value, Mapping): - value = value.items() - - return list(value) - - -# From mitsuhiko/werkzeug (used with permission). -def parse_list_header(value): - """Parse lists as described by RFC 2068 Section 2. - - In particular, parse comma-separated lists where the elements of - the list may include quoted-strings. A quoted-string could - contain a comma. A non-quoted string could have quotes in the - middle. Quotes are removed automatically after parsing. - - It basically works like :func:`parse_set_header` just that items - may appear multiple times and case sensitivity is preserved. - - The return value is a standard :class:`list`: - - >>> parse_list_header('token, "quoted value"') - ['token', 'quoted value'] - - To create a header from the :class:`list` again, use the - :func:`dump_header` function. - - :param value: a string with a list header. - :return: :class:`list` - :rtype: list - """ - result = [] - for item in _parse_list_header(value): - if item[:1] == item[-1:] == '"': - item = unquote_header_value(item[1:-1]) - result.append(item) - return result - - -# From mitsuhiko/werkzeug (used with permission). -def parse_dict_header(value): - """Parse lists of key, value pairs as described by RFC 2068 Section 2 and - convert them into a python dict: - - >>> d = parse_dict_header('foo="is a fish", bar="as well"') - >>> type(d) is dict - True - >>> sorted(d.items()) - [('bar', 'as well'), ('foo', 'is a fish')] - - If there is no value for a key it will be `None`: - - >>> parse_dict_header('key_without_value') - {'key_without_value': None} - - To create a header from the :class:`dict` again, use the - :func:`dump_header` function. - - :param value: a string with a dict header. - :return: :class:`dict` - :rtype: dict - """ - result = {} - for item in _parse_list_header(value): - if '=' not in item: - result[item] = None - continue - name, value = item.split('=', 1) - if value[:1] == value[-1:] == '"': - value = unquote_header_value(value[1:-1]) - result[name] = value - return result - - -# From mitsuhiko/werkzeug (used with permission). -def unquote_header_value(value, is_filename=False): - r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). - This does not use the real unquoting but what browsers are actually - using for quoting. - - :param value: the header value to unquote. - :rtype: str - """ - if value and value[0] == value[-1] == '"': - # this is not the real unquoting, but fixing this so that the - # RFC is met will result in bugs with internet explorer and - # probably some other browsers as well. IE for example is - # uploading files with "C:\foo\bar.txt" as filename - value = value[1:-1] - - # if this is a filename and the starting characters look like - # a UNC path, then just return the value without quotes. Using the - # replace sequence below on a UNC path has the effect of turning - # the leading double slash into a single slash and then - # _fix_ie_filename() doesn't work correctly. See #458. - if not is_filename or value[:2] != '\\\\': - return value.replace('\\\\', '\\').replace('\\"', '"') - return value - - -def dict_from_cookiejar(cj): - """Returns a key/value dictionary from a CookieJar. - - :param cj: CookieJar object to extract cookies from. - :rtype: dict - """ - - cookie_dict = {} - - for cookie in cj: - cookie_dict[cookie.name] = cookie.value - - return cookie_dict - - -def add_dict_to_cookiejar(cj, cookie_dict): - """Returns a CookieJar from a key/value dictionary. - - :param cj: CookieJar to insert cookies into. - :param cookie_dict: Dict of key/values to insert into CookieJar. - :rtype: CookieJar - """ - - return cookiejar_from_dict(cookie_dict, cj) - - -def get_encodings_from_content(content): - """Returns encodings from given content string. - - :param content: bytestring to extract encodings from. - """ - warnings.warn(( - 'In requests 3.0, get_encodings_from_content will be removed. For ' - 'more information, please see the discussion on issue #2266. (This' - ' warning should only appear once.)'), - DeprecationWarning) - - charset_re = re.compile(r']', flags=re.I) - pragma_re = re.compile(r']', flags=re.I) - xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') - - return (charset_re.findall(content) + - pragma_re.findall(content) + - xml_re.findall(content)) - - -def _parse_content_type_header(header): - """Returns content type and parameters from given header - - :param header: string - :return: tuple containing content type and dictionary of - parameters - """ - - tokens = header.split(';') - content_type, params = tokens[0].strip(), tokens[1:] - params_dict = {} - items_to_strip = "\"' " - - for param in params: - param = param.strip() - if param: - key, value = param, True - index_of_equals = param.find("=") - if index_of_equals != -1: - key = param[:index_of_equals].strip(items_to_strip) - value = param[index_of_equals + 1:].strip(items_to_strip) - params_dict[key.lower()] = value - return content_type, params_dict - - -def get_encoding_from_headers(headers): - """Returns encodings from given HTTP Header Dict. - - :param headers: dictionary to extract encoding from. - :rtype: str - """ - - content_type = headers.get('content-type') - - if not content_type: - return None - - content_type, params = _parse_content_type_header(content_type) - - if 'charset' in params: - return params['charset'].strip("'\"") - - if 'text' in content_type: - return 'ISO-8859-1' - - if 'application/json' in content_type: - # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset - return 'utf-8' - - -def stream_decode_response_unicode(iterator, r): - """Stream decodes a iterator.""" - - if r.encoding is None: - for item in iterator: - yield item - return - - decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') - for chunk in iterator: - rv = decoder.decode(chunk) - if rv: - yield rv - rv = decoder.decode(b'', final=True) - if rv: - yield rv - - -def iter_slices(string, slice_length): - """Iterate over slices of a string.""" - pos = 0 - if slice_length is None or slice_length <= 0: - slice_length = len(string) - while pos < len(string): - yield string[pos:pos + slice_length] - pos += slice_length - - -def get_unicode_from_response(r): - """Returns the requested content back in unicode. - - :param r: Response object to get unicode content from. - - Tried: - - 1. charset from content-type - 2. fall back and replace all unicode characters - - :rtype: str - """ - warnings.warn(( - 'In requests 3.0, get_unicode_from_response will be removed. For ' - 'more information, please see the discussion on issue #2266. (This' - ' warning should only appear once.)'), - DeprecationWarning) - - tried_encodings = [] - - # Try charset from content-type - encoding = get_encoding_from_headers(r.headers) - - if encoding: - try: - return str(r.content, encoding) - except UnicodeError: - tried_encodings.append(encoding) - - # Fall back: - try: - return str(r.content, encoding, errors='replace') - except TypeError: - return r.content - - -# The unreserved URI characters (RFC 3986) -UNRESERVED_SET = frozenset( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") - - -def unquote_unreserved(uri): - """Un-escape any percent-escape sequences in a URI that are unreserved - characters. This leaves all reserved, illegal and non-ASCII bytes encoded. - - :rtype: str - """ - parts = uri.split('%') - for i in range(1, len(parts)): - h = parts[i][0:2] - if len(h) == 2 and h.isalnum(): - try: - c = chr(int(h, 16)) - except ValueError: - raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) - - if c in UNRESERVED_SET: - parts[i] = c + parts[i][2:] - else: - parts[i] = '%' + parts[i] - else: - parts[i] = '%' + parts[i] - return ''.join(parts) - - -def requote_uri(uri): - """Re-quote the given URI. - - This function passes the given URI through an unquote/quote cycle to - ensure that it is fully and consistently quoted. - - :rtype: str - """ - safe_with_percent = "!#$%&'()*+,/:;=?@[]~" - safe_without_percent = "!#$&'()*+,/:;=?@[]~" - try: - # Unquote only the unreserved characters - # Then quote only illegal characters (do not quote reserved, - # unreserved, or '%') - return quote(unquote_unreserved(uri), safe=safe_with_percent) - except InvalidURL: - # We couldn't unquote the given URI, so let's try quoting it, but - # there may be unquoted '%'s in the URI. We need to make sure they're - # properly quoted so they do not cause issues elsewhere. - return quote(uri, safe=safe_without_percent) - - -def address_in_network(ip, net): - """This function allows you to check if an IP belongs to a network subnet - - Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 - returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 - - :rtype: bool - """ - ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] - netaddr, bits = net.split('/') - netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] - network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask - return (ipaddr & netmask) == (network & netmask) - - -def dotted_netmask(mask): - """Converts mask from /xx format to xxx.xxx.xxx.xxx - - Example: if mask is 24 function returns 255.255.255.0 - - :rtype: str - """ - bits = 0xffffffff ^ (1 << 32 - mask) - 1 - return socket.inet_ntoa(struct.pack('>I', bits)) - - -def is_ipv4_address(string_ip): - """ - :rtype: bool - """ - try: - socket.inet_aton(string_ip) - except socket.error: - return False - return True - - -def is_valid_cidr(string_network): - """ - Very simple check of the cidr format in no_proxy variable. - - :rtype: bool - """ - if string_network.count('/') == 1: - try: - mask = int(string_network.split('/')[1]) - except ValueError: - return False - - if mask < 1 or mask > 32: - return False - - try: - socket.inet_aton(string_network.split('/')[0]) - except socket.error: - return False - else: - return False - return True - - -@contextlib.contextmanager -def set_environ(env_name, value): - """Set the environment variable 'env_name' to 'value' - - Save previous value, yield, and then restore the previous value stored in - the environment variable 'env_name'. - - If 'value' is None, do nothing""" - value_changed = value is not None - if value_changed: - old_value = os.environ.get(env_name) - os.environ[env_name] = value - try: - yield - finally: - if value_changed: - if old_value is None: - del os.environ[env_name] - else: - os.environ[env_name] = old_value - - -def should_bypass_proxies(url, no_proxy): - """ - Returns whether we should bypass proxies or not. - - :rtype: bool - """ - # Prioritize lowercase environment variables over uppercase - # to keep a consistent behaviour with other http projects (curl, wget). - get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) - - # First check whether no_proxy is defined. If it is, check that the URL - # we're getting isn't in the no_proxy list. - no_proxy_arg = no_proxy - if no_proxy is None: - no_proxy = get_proxy('no_proxy') - parsed = urlparse(url) - - if parsed.hostname is None: - # URLs don't always have hostnames, e.g. file:/// urls. - return True - - if no_proxy: - # We need to check whether we match here. We need to see if we match - # the end of the hostname, both with and without the port. - no_proxy = ( - host for host in no_proxy.replace(' ', '').split(',') if host - ) - - if is_ipv4_address(parsed.hostname): - for proxy_ip in no_proxy: - if is_valid_cidr(proxy_ip): - if address_in_network(parsed.hostname, proxy_ip): - return True - elif parsed.hostname == proxy_ip: - # If no_proxy ip was defined in plain IP notation instead of cidr notation & - # matches the IP of the index - return True - else: - host_with_port = parsed.hostname - if parsed.port: - host_with_port += ':{}'.format(parsed.port) - - for host in no_proxy: - if parsed.hostname.endswith(host) or host_with_port.endswith(host): - # The URL does match something in no_proxy, so we don't want - # to apply the proxies on this URL. - return True - - with set_environ('no_proxy', no_proxy_arg): - # parsed.hostname can be `None` in cases such as a file URI. - try: - bypass = proxy_bypass(parsed.hostname) - except (TypeError, socket.gaierror): - bypass = False - - if bypass: - return True - - return False - - -def get_environ_proxies(url, no_proxy=None): - """ - Return a dict of environment proxies. - - :rtype: dict - """ - if should_bypass_proxies(url, no_proxy=no_proxy): - return {} - else: - return getproxies() - - -def select_proxy(url, proxies): - """Select a proxy for the url, if applicable. - - :param url: The url being for the request - :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs - """ - proxies = proxies or {} - urlparts = urlparse(url) - if urlparts.hostname is None: - return proxies.get(urlparts.scheme, proxies.get('all')) - - proxy_keys = [ - urlparts.scheme + '://' + urlparts.hostname, - urlparts.scheme, - 'all://' + urlparts.hostname, - 'all', - ] - proxy = None - for proxy_key in proxy_keys: - if proxy_key in proxies: - proxy = proxies[proxy_key] - break - - return proxy - - -def default_user_agent(name="python-requests"): - """ - Return a string representing the default user agent. - - :rtype: str - """ - return '%s/%s' % (name, __version__) - - -def default_headers(): - """ - :rtype: requests.structures.CaseInsensitiveDict - """ - return CaseInsensitiveDict({ - 'User-Agent': default_user_agent(), - 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING, - 'Accept': '*/*', - 'Connection': 'keep-alive', - }) - - -def parse_header_links(value): - """Return a list of parsed link headers proxies. - - i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" - - :rtype: list - """ - - links = [] - - replace_chars = ' \'"' - - value = value.strip(replace_chars) - if not value: - return links - - for val in re.split(', *<', value): - try: - url, params = val.split(';', 1) - except ValueError: - url, params = val, '' - - link = {'url': url.strip('<> \'"')} - - for param in params.split(';'): - try: - key, value = param.split('=') - except ValueError: - break - - link[key.strip(replace_chars)] = value.strip(replace_chars) - - links.append(link) - - return links - - -# Null bytes; no need to recreate these on each call to guess_json_utf -_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 -_null2 = _null * 2 -_null3 = _null * 3 - - -def guess_json_utf(data): - """ - :rtype: str - """ - # JSON always starts with two ASCII characters, so detection is as - # easy as counting the nulls and from their location and count - # determine the encoding. Also detect a BOM, if present. - sample = data[:4] - if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): - return 'utf-32' # BOM included - if sample[:3] == codecs.BOM_UTF8: - return 'utf-8-sig' # BOM included, MS style (discouraged) - if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): - return 'utf-16' # BOM included - nullcount = sample.count(_null) - if nullcount == 0: - return 'utf-8' - if nullcount == 2: - if sample[::2] == _null2: # 1st and 3rd are null - return 'utf-16-be' - if sample[1::2] == _null2: # 2nd and 4th are null - return 'utf-16-le' - # Did not detect 2 valid UTF-16 ascii-range characters - if nullcount == 3: - if sample[:3] == _null3: - return 'utf-32-be' - if sample[1:] == _null3: - return 'utf-32-le' - # Did not detect a valid UTF-32 ascii-range character - return None - - -def prepend_scheme_if_needed(url, new_scheme): - """Given a URL that may or may not have a scheme, prepend the given scheme. - Does not replace a present scheme with the one provided as an argument. - - :rtype: str - """ - scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) - - # urlparse is a finicky beast, and sometimes decides that there isn't a - # netloc present. Assume that it's being over-cautious, and switch netloc - # and path if urlparse decided there was no netloc. - if not netloc: - netloc, path = path, netloc - - return urlunparse((scheme, netloc, path, params, query, fragment)) - - -def get_auth_from_url(url): - """Given a url with authentication components, extract them into a tuple of - username,password. - - :rtype: (str,str) - """ - parsed = urlparse(url) - - try: - auth = (unquote(parsed.username), unquote(parsed.password)) - except (AttributeError, TypeError): - auth = ('', '') - - return auth - - -# Moved outside of function to avoid recompile every call -_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') -_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') - - -def check_header_validity(header): - """Verifies that header value is a string which doesn't contain - leading whitespace or return characters. This prevents unintended - header injection. - - :param header: tuple, in the format (name, value). - """ - name, value = header - - if isinstance(value, bytes): - pat = _CLEAN_HEADER_REGEX_BYTE - else: - pat = _CLEAN_HEADER_REGEX_STR - try: - if not pat.match(value): - raise InvalidHeader("Invalid return character or leading space in header: %s" % name) - except TypeError: - raise InvalidHeader("Value for header {%s: %s} must be of type str or " - "bytes, not %s" % (name, value, type(value))) - - -def urldefragauth(url): - """ - Given a url remove the fragment and the authentication part. - - :rtype: str - """ - scheme, netloc, path, params, query, fragment = urlparse(url) - - # see func:`prepend_scheme_if_needed` - if not netloc: - netloc, path = path, netloc - - netloc = netloc.rsplit('@', 1)[-1] - - return urlunparse((scheme, netloc, path, params, query, '')) - - -def rewind_body(prepared_request): - """Move file pointer back to its recorded starting position - so it can be read again on redirect. - """ - body_seek = getattr(prepared_request.body, 'seek', None) - if body_seek is not None and isinstance(prepared_request._body_position, integer_types): - try: - body_seek(prepared_request._body_position) - except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect.") - else: - raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/vendor/requests/requirements-dev.txt b/vendor/requests/requirements-dev.txt deleted file mode 100644 index e19c95ca..00000000 --- a/vendor/requests/requirements-dev.txt +++ /dev/null @@ -1,9 +0,0 @@ -pytest>=2.8.0,<=3.10.1 -pytest-cov -pytest-httpbin==1.0.0 -pytest-mock==2.0.0 -httpbin==0.7.0 -Flask>=1.0,<2.0 -trustme -wheel -MarkupSafe==2.0.1 diff --git a/vendor/requests/setup.cfg b/vendor/requests/setup.cfg deleted file mode 100644 index ed8a958e..00000000 --- a/vendor/requests/setup.cfg +++ /dev/null @@ -1,5 +0,0 @@ -[bdist_wheel] -universal = 1 - -[metadata] -license_file = LICENSE diff --git a/vendor/requests/setup.py b/vendor/requests/setup.py deleted file mode 100755 index ce5e5c80..00000000 --- a/vendor/requests/setup.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python -# Learn more: https://github.com/kennethreitz/setup.py -import os -import sys - -from codecs import open - -from setuptools import setup -from setuptools.command.test import test as TestCommand - -here = os.path.abspath(os.path.dirname(__file__)) - -class PyTest(TestCommand): - user_options = [('pytest-args=', 'a', "Arguments to pass into py.test")] - - def initialize_options(self): - TestCommand.initialize_options(self) - try: - from multiprocessing import cpu_count - self.pytest_args = ['-n', str(cpu_count()), '--boxed'] - except (ImportError, NotImplementedError): - self.pytest_args = ['-n', '1', '--boxed'] - - def finalize_options(self): - TestCommand.finalize_options(self) - self.test_args = [] - self.test_suite = True - - def run_tests(self): - import pytest - - errno = pytest.main(self.pytest_args) - sys.exit(errno) - -# 'setup.py publish' shortcut. -if sys.argv[-1] == 'publish': - os.system('python setup.py sdist bdist_wheel') - os.system('twine upload dist/*') - sys.exit() - -packages = ['requests'] - -requires = [ - 'charset_normalizer~=2.0.0; python_version >= "3"', - 'chardet>=3.0.2,<5; python_version < "3"', - 'idna>=2.5,<3; python_version < "3"', - 'idna>=2.5,<4; python_version >= "3"', - 'urllib3>=1.21.1,<1.27', - 'certifi>=2017.4.17' - -] -test_requirements = [ - 'pytest-httpbin==0.0.7', - 'pytest-cov', - 'pytest-mock', - 'pytest-xdist', - 'PySocks>=1.5.6, !=1.5.7', - 'pytest>=3' -] - -about = {} -with open(os.path.join(here, 'requests', '__version__.py'), 'r', 'utf-8') as f: - exec(f.read(), about) - -with open('README.md', 'r', 'utf-8') as f: - readme = f.read() - -setup( - name=about['__title__'], - version=about['__version__'], - description=about['__description__'], - long_description=readme, - long_description_content_type='text/markdown', - author=about['__author__'], - author_email=about['__author_email__'], - url=about['__url__'], - packages=packages, - package_data={'': ['LICENSE', 'NOTICE']}, - package_dir={'requests': 'requests'}, - include_package_data=True, - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*", - install_requires=requires, - license=about['__license__'], - zip_safe=False, - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Natural Language :: English', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy' - ], - cmdclass={'test': PyTest}, - tests_require=test_requirements, - extras_require={ - 'security': [], - 'socks': ['PySocks>=1.5.6, !=1.5.7'], - 'socks:sys_platform == "win32" and python_version == "2.7"': ['win_inet_pton'], - 'use_chardet_on_py3': ['chardet>=3.0.2,<5'] - }, - project_urls={ - 'Documentation': 'https://requests.readthedocs.io', - 'Source': 'https://github.com/psf/requests', - }, -) diff --git a/vendor/requests/tests/__init__.py b/vendor/requests/tests/__init__.py deleted file mode 100644 index 9be94bcc..00000000 --- a/vendor/requests/tests/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Requests test package initialisation.""" - -import warnings - -import urllib3 -from urllib3.exceptions import SNIMissingWarning - -# urllib3 sets SNIMissingWarning to only go off once, -# while this test suite requires it to always fire -# so that it occurs during test_requests.test_https_warnings -warnings.simplefilter('always', SNIMissingWarning) diff --git a/vendor/requests/tests/compat.py b/vendor/requests/tests/compat.py deleted file mode 100644 index f68e8014..00000000 --- a/vendor/requests/tests/compat.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- - -from requests.compat import is_py3 - - -try: - import StringIO -except ImportError: - import io as StringIO - -try: - from cStringIO import StringIO as cStringIO -except ImportError: - cStringIO = None - -if is_py3: - def u(s): - return s -else: - def u(s): - return s.decode('unicode-escape') diff --git a/vendor/requests/tests/conftest.py b/vendor/requests/tests/conftest.py deleted file mode 100644 index 099462cf..00000000 --- a/vendor/requests/tests/conftest.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- - -try: - from http.server import HTTPServer - from http.server import SimpleHTTPRequestHandler -except ImportError: - from BaseHTTPServer import HTTPServer - from SimpleHTTPServer import SimpleHTTPRequestHandler - -import ssl -import tempfile -import threading - -import pytest -from requests.compat import urljoin -import trustme - - -def prepare_url(value): - # Issue #1483: Make sure the URL always has a trailing slash - httpbin_url = value.url.rstrip('/') + '/' - - def inner(*suffix): - return urljoin(httpbin_url, '/'.join(suffix)) - - return inner - - -@pytest.fixture -def httpbin(httpbin): - return prepare_url(httpbin) - - -@pytest.fixture -def httpbin_secure(httpbin_secure): - return prepare_url(httpbin_secure) - - -@pytest.fixture -def nosan_server(tmp_path_factory): - tmpdir = tmp_path_factory.mktemp("certs") - ca = trustme.CA() - # only commonName, no subjectAltName - server_cert = ca.issue_cert(common_name=u"localhost") - ca_bundle = str(tmpdir / "ca.pem") - ca.cert_pem.write_to_path(ca_bundle) - - context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) - server_cert.configure_cert(context) - server = HTTPServer(("localhost", 0), SimpleHTTPRequestHandler) - server.socket = context.wrap_socket(server.socket, server_side=True) - server_thread = threading.Thread(target=server.serve_forever) - server_thread.start() - - yield "localhost", server.server_address[1], ca_bundle - - server.shutdown() - server_thread.join() diff --git a/vendor/requests/tests/test_help.py b/vendor/requests/tests/test_help.py deleted file mode 100644 index 3beb65f3..00000000 --- a/vendor/requests/tests/test_help.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- encoding: utf-8 - -import sys - -import pytest - -from requests.help import info - - -def test_system_ssl(): - """Verify we're actually setting system_ssl when it should be available.""" - assert info()['system_ssl']['version'] != '' - - -class VersionedPackage(object): - def __init__(self, version): - self.__version__ = version - - -def test_idna_without_version_attribute(mocker): - """Older versions of IDNA don't provide a __version__ attribute, verify - that if we have such a package, we don't blow up. - """ - mocker.patch('requests.help.idna', new=None) - assert info()['idna'] == {'version': ''} - - -def test_idna_with_version_attribute(mocker): - """Verify we're actually setting idna version when it should be available.""" - mocker.patch('requests.help.idna', new=VersionedPackage('2.6')) - assert info()['idna'] == {'version': '2.6'} diff --git a/vendor/requests/tests/test_hooks.py b/vendor/requests/tests/test_hooks.py deleted file mode 100644 index 014b4391..00000000 --- a/vendor/requests/tests/test_hooks.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- - -import pytest - -from requests import hooks - - -def hook(value): - return value[1:] - - -@pytest.mark.parametrize( - 'hooks_list, result', ( - (hook, 'ata'), - ([hook, lambda x: None, hook], 'ta'), - ) -) -def test_hooks(hooks_list, result): - assert hooks.dispatch_hook('response', {'response': hooks_list}, 'Data') == result - - -def test_default_hooks(): - assert hooks.default_hooks() == {'response': []} diff --git a/vendor/requests/tests/test_lowlevel.py b/vendor/requests/tests/test_lowlevel.py deleted file mode 100644 index 4127fb11..00000000 --- a/vendor/requests/tests/test_lowlevel.py +++ /dev/null @@ -1,309 +0,0 @@ -# -*- coding: utf-8 -*- - -import pytest -import threading -import requests - -from tests.testserver.server import Server, consume_socket_content - -from .utils import override_environ - - -def test_chunked_upload(): - """can safely send generators""" - close_server = threading.Event() - server = Server.basic_response_server(wait_to_close_event=close_server) - data = iter([b'a', b'b', b'c']) - - with server as (host, port): - url = 'http://{}:{}/'.format(host, port) - r = requests.post(url, data=data, stream=True) - close_server.set() # release server block - - assert r.status_code == 200 - assert r.request.headers['Transfer-Encoding'] == 'chunked' - - -def test_digestauth_401_count_reset_on_redirect(): - """Ensure we correctly reset num_401_calls after a successful digest auth, - followed by a 302 redirect to another digest auth prompt. - - See https://github.com/psf/requests/issues/1979. - """ - text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') - - text_302 = (b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /\r\n\r\n') - - text_200 = (b'HTTP/1.1 200 OK\r\n' - b'Content-Length: 0\r\n\r\n') - - expected_digest = (b'Authorization: Digest username="user", ' - b'realm="me@kennethreitz.com", ' - b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"') - - auth = requests.auth.HTTPDigestAuth('user', 'pass') - - def digest_response_handler(sock): - # Respond to initial GET with a challenge. - request_content = consume_socket_content(sock, timeout=0.5) - assert request_content.startswith(b"GET / HTTP/1.1") - sock.send(text_401) - - # Verify we receive an Authorization header in response, then redirect. - request_content = consume_socket_content(sock, timeout=0.5) - assert expected_digest in request_content - sock.send(text_302) - - # Verify Authorization isn't sent to the redirected host, - # then send another challenge. - request_content = consume_socket_content(sock, timeout=0.5) - assert b'Authorization:' not in request_content - sock.send(text_401) - - # Verify Authorization is sent correctly again, and return 200 OK. - request_content = consume_socket_content(sock, timeout=0.5) - assert expected_digest in request_content - sock.send(text_200) - - return request_content - - close_server = threading.Event() - server = Server(digest_response_handler, wait_to_close_event=close_server) - - with server as (host, port): - url = 'http://{}:{}/'.format(host, port) - r = requests.get(url, auth=auth) - # Verify server succeeded in authenticating. - assert r.status_code == 200 - # Verify Authorization was sent in final request. - assert 'Authorization' in r.request.headers - assert r.request.headers['Authorization'].startswith('Digest ') - # Verify redirect happened as we expected. - assert r.history[0].status_code == 302 - close_server.set() - - -def test_digestauth_401_only_sent_once(): - """Ensure we correctly respond to a 401 challenge once, and then - stop responding if challenged again. - """ - text_401 = (b'HTTP/1.1 401 UNAUTHORIZED\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') - - expected_digest = (b'Authorization: Digest username="user", ' - b'realm="me@kennethreitz.com", ' - b'nonce="6bf5d6e4da1ce66918800195d6b9130d", uri="/"') - - auth = requests.auth.HTTPDigestAuth('user', 'pass') - - def digest_failed_response_handler(sock): - # Respond to initial GET with a challenge. - request_content = consume_socket_content(sock, timeout=0.5) - assert request_content.startswith(b"GET / HTTP/1.1") - sock.send(text_401) - - # Verify we receive an Authorization header in response, then - # challenge again. - request_content = consume_socket_content(sock, timeout=0.5) - assert expected_digest in request_content - sock.send(text_401) - - # Verify the client didn't respond to second challenge. - request_content = consume_socket_content(sock, timeout=0.5) - assert request_content == b'' - - return request_content - - close_server = threading.Event() - server = Server(digest_failed_response_handler, wait_to_close_event=close_server) - - with server as (host, port): - url = 'http://{}:{}/'.format(host, port) - r = requests.get(url, auth=auth) - # Verify server didn't authenticate us. - assert r.status_code == 401 - assert r.history[0].status_code == 401 - close_server.set() - - -def test_digestauth_only_on_4xx(): - """Ensure we only send digestauth on 4xx challenges. - - See https://github.com/psf/requests/issues/3772. - """ - text_200_chal = (b'HTTP/1.1 200 OK\r\n' - b'Content-Length: 0\r\n' - b'WWW-Authenticate: Digest nonce="6bf5d6e4da1ce66918800195d6b9130d"' - b', opaque="372825293d1c26955496c80ed6426e9e", ' - b'realm="me@kennethreitz.com", qop=auth\r\n\r\n') - - auth = requests.auth.HTTPDigestAuth('user', 'pass') - - def digest_response_handler(sock): - # Respond to GET with a 200 containing www-authenticate header. - request_content = consume_socket_content(sock, timeout=0.5) - assert request_content.startswith(b"GET / HTTP/1.1") - sock.send(text_200_chal) - - # Verify the client didn't respond with auth. - request_content = consume_socket_content(sock, timeout=0.5) - assert request_content == b'' - - return request_content - - close_server = threading.Event() - server = Server(digest_response_handler, wait_to_close_event=close_server) - - with server as (host, port): - url = 'http://{}:{}/'.format(host, port) - r = requests.get(url, auth=auth) - # Verify server didn't receive auth from us. - assert r.status_code == 200 - assert len(r.history) == 0 - close_server.set() - - -_schemes_by_var_prefix = [ - ('http', ['http']), - ('https', ['https']), - ('all', ['http', 'https']), -] - -_proxy_combos = [] -for prefix, schemes in _schemes_by_var_prefix: - for scheme in schemes: - _proxy_combos.append(("{}_proxy".format(prefix), scheme)) - -_proxy_combos += [(var.upper(), scheme) for var, scheme in _proxy_combos] - - -@pytest.mark.parametrize("var,scheme", _proxy_combos) -def test_use_proxy_from_environment(httpbin, var, scheme): - url = "{}://httpbin.org".format(scheme) - fake_proxy = Server() # do nothing with the requests; just close the socket - with fake_proxy as (host, port): - proxy_url = "socks5://{}:{}".format(host, port) - kwargs = {var: proxy_url} - with override_environ(**kwargs): - # fake proxy's lack of response will cause a ConnectionError - with pytest.raises(requests.exceptions.ConnectionError): - requests.get(url) - - # the fake proxy received a request - assert len(fake_proxy.handler_results) == 1 - - # it had actual content (not checking for SOCKS protocol for now) - assert len(fake_proxy.handler_results[0]) > 0 - - -def test_redirect_rfc1808_to_non_ascii_location(): - path = u'Å¡' - expected_path = b'%C5%A1' - redirect_request = [] # stores the second request to the server - - def redirect_resp_handler(sock): - consume_socket_content(sock, timeout=0.5) - location = u'//{}:{}/{}'.format(host, port, path) - sock.send( - b'HTTP/1.1 301 Moved Permanently\r\n' - b'Content-Length: 0\r\n' - b'Location: ' + location.encode('utf8') + b'\r\n' - b'\r\n' - ) - redirect_request.append(consume_socket_content(sock, timeout=0.5)) - sock.send(b'HTTP/1.1 200 OK\r\n\r\n') - - close_server = threading.Event() - server = Server(redirect_resp_handler, wait_to_close_event=close_server) - - with server as (host, port): - url = u'http://{}:{}'.format(host, port) - r = requests.get(url=url, allow_redirects=True) - assert r.status_code == 200 - assert len(r.history) == 1 - assert r.history[0].status_code == 301 - assert redirect_request[0].startswith(b'GET /' + expected_path + b' HTTP/1.1') - assert r.url == u'{}/{}'.format(url, expected_path.decode('ascii')) - - close_server.set() - -def test_fragment_not_sent_with_request(): - """Verify that the fragment portion of a URI isn't sent to the server.""" - def response_handler(sock): - req = consume_socket_content(sock, timeout=0.5) - sock.send( - b'HTTP/1.1 200 OK\r\n' - b'Content-Length: '+bytes(len(req))+b'\r\n' - b'\r\n'+req - ) - - close_server = threading.Event() - server = Server(response_handler, wait_to_close_event=close_server) - - with server as (host, port): - url = 'http://{}:{}/path/to/thing/#view=edit&token=hunter2'.format(host, port) - r = requests.get(url) - raw_request = r.content - - assert r.status_code == 200 - headers, body = raw_request.split(b'\r\n\r\n', 1) - status_line, headers = headers.split(b'\r\n', 1) - - assert status_line == b'GET /path/to/thing/ HTTP/1.1' - for frag in (b'view', b'edit', b'token', b'hunter2'): - assert frag not in headers - assert frag not in body - - close_server.set() - -def test_fragment_update_on_redirect(): - """Verify we only append previous fragment if one doesn't exist on new - location. If a new fragment is encountered in a Location header, it should - be added to all subsequent requests. - """ - - def response_handler(sock): - consume_socket_content(sock, timeout=0.5) - sock.send( - b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /get#relevant-section\r\n\r\n' - ) - consume_socket_content(sock, timeout=0.5) - sock.send( - b'HTTP/1.1 302 FOUND\r\n' - b'Content-Length: 0\r\n' - b'Location: /final-url/\r\n\r\n' - ) - consume_socket_content(sock, timeout=0.5) - sock.send( - b'HTTP/1.1 200 OK\r\n\r\n' - ) - - close_server = threading.Event() - server = Server(response_handler, wait_to_close_event=close_server) - - with server as (host, port): - url = 'http://{}:{}/path/to/thing/#view=edit&token=hunter2'.format(host, port) - r = requests.get(url) - raw_request = r.content - - assert r.status_code == 200 - assert len(r.history) == 2 - assert r.history[0].request.url == url - - # Verify we haven't overwritten the location with our previous fragment. - assert r.history[1].request.url == 'http://{}:{}/get#relevant-section'.format(host, port) - # Verify previous fragment is used and not the original. - assert r.url == 'http://{}:{}/final-url/#relevant-section'.format(host, port) - - close_server.set() diff --git a/vendor/requests/tests/test_packages.py b/vendor/requests/tests/test_packages.py deleted file mode 100644 index b55cb68c..00000000 --- a/vendor/requests/tests/test_packages.py +++ /dev/null @@ -1,13 +0,0 @@ -import requests - - -def test_can_access_urllib3_attribute(): - requests.packages.urllib3 - - -def test_can_access_idna_attribute(): - requests.packages.idna - - -def test_can_access_chardet_attribute(): - requests.packages.chardet diff --git a/vendor/requests/tests/test_requests.py b/vendor/requests/tests/test_requests.py deleted file mode 100644 index b77cba00..00000000 --- a/vendor/requests/tests/test_requests.py +++ /dev/null @@ -1,2573 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Tests for Requests.""" - -from __future__ import division -import json -import os -import pickle -import collections -import contextlib -import warnings -import re - -import io -import requests -import pytest -from requests.adapters import HTTPAdapter -from requests.auth import HTTPDigestAuth, _basic_auth_str -from requests.compat import ( - Morsel, cookielib, getproxies, str, urlparse, - builtin_str) -from requests.cookies import ( - cookiejar_from_dict, morsel_to_cookie) -from requests.exceptions import ( - ConnectionError, ConnectTimeout, InvalidSchema, InvalidURL, - MissingSchema, ReadTimeout, Timeout, RetryError, TooManyRedirects, - ProxyError, InvalidHeader, UnrewindableBodyError, SSLError, InvalidProxyURL, InvalidJSONError) -from requests.models import PreparedRequest -from requests.structures import CaseInsensitiveDict -from requests.sessions import SessionRedirectMixin -from requests.models import urlencode -from requests.hooks import default_hooks -from requests.compat import MutableMapping - -from .compat import StringIO, u -from .utils import override_environ -from urllib3.util import Timeout as Urllib3Timeout - -# Requests to this URL should always fail with a connection timeout (nothing -# listening on that port) -TARPIT = 'http://10.255.255.1' - -# This is to avoid waiting the timeout of using TARPIT -INVALID_PROXY='http://localhost:1' - -try: - from ssl import SSLContext - del SSLContext - HAS_MODERN_SSL = True -except ImportError: - HAS_MODERN_SSL = False - -try: - requests.pyopenssl - HAS_PYOPENSSL = True -except AttributeError: - HAS_PYOPENSSL = False - - -class TestRequests: - - digest_auth_algo = ('MD5', 'SHA-256', 'SHA-512') - - def test_entry_points(self): - - requests.session - requests.session().get - requests.session().head - requests.get - requests.head - requests.put - requests.patch - requests.post - # Not really an entry point, but people rely on it. - from requests.packages.urllib3.poolmanager import PoolManager - - @pytest.mark.parametrize( - 'exception, url', ( - (MissingSchema, 'hiwpefhipowhefopw'), - (InvalidSchema, 'localhost:3128'), - (InvalidSchema, 'localhost.localdomain:3128/'), - (InvalidSchema, '10.122.1.1:3128/'), - (InvalidURL, 'http://'), - )) - def test_invalid_url(self, exception, url): - with pytest.raises(exception): - requests.get(url) - - def test_basic_building(self): - req = requests.Request() - req.url = 'http://kennethreitz.org/' - req.data = {'life': '42'} - - pr = req.prepare() - assert pr.url == req.url - assert pr.body == 'life=42' - - @pytest.mark.parametrize('method', ('GET', 'HEAD')) - def test_no_content_length(self, httpbin, method): - req = requests.Request(method, httpbin(method.lower())).prepare() - assert 'Content-Length' not in req.headers - - @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS')) - def test_no_body_content_length(self, httpbin, method): - req = requests.Request(method, httpbin(method.lower())).prepare() - assert req.headers['Content-Length'] == '0' - - @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS')) - def test_empty_content_length(self, httpbin, method): - req = requests.Request(method, httpbin(method.lower()), data='').prepare() - assert req.headers['Content-Length'] == '0' - - def test_override_content_length(self, httpbin): - headers = { - 'Content-Length': 'not zero' - } - r = requests.Request('POST', httpbin('post'), headers=headers).prepare() - assert 'Content-Length' in r.headers - assert r.headers['Content-Length'] == 'not zero' - - def test_path_is_not_double_encoded(self): - request = requests.Request('GET', "http://0.0.0.0/get/test case").prepare() - - assert request.path_url == '/get/test%20case' - - @pytest.mark.parametrize( - 'url, expected', ( - ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'), - ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment') - )) - def test_params_are_added_before_fragment(self, url, expected): - request = requests.Request('GET', url, params={"a": "b"}).prepare() - assert request.url == expected - - def test_params_original_order_is_preserved_by_default(self): - param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1))) - session = requests.Session() - request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict) - prep = session.prepare_request(request) - assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1' - - def test_params_bytes_are_encoded(self): - request = requests.Request('GET', 'http://example.com', - params=b'test=foo').prepare() - assert request.url == 'http://example.com/?test=foo' - - def test_binary_put(self): - request = requests.Request('PUT', 'http://example.com', - data=u"ööö".encode("utf-8")).prepare() - assert isinstance(request.body, bytes) - - def test_whitespaces_are_removed_from_url(self): - # Test for issue #3696 - request = requests.Request('GET', ' http://example.com').prepare() - assert request.url == 'http://example.com/' - - @pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://')) - def test_mixed_case_scheme_acceptable(self, httpbin, scheme): - s = requests.Session() - s.proxies = getproxies() - parts = urlparse(httpbin('get')) - url = scheme + parts.netloc + parts.path - r = requests.Request('GET', url) - r = s.send(r.prepare()) - assert r.status_code == 200, 'failed for scheme {}'.format(scheme) - - def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin): - r = requests.Request('GET', httpbin('get')) - s = requests.Session() - s.proxies = getproxies() - - r = s.send(r.prepare()) - - assert r.status_code == 200 - - def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin): - r = requests.get(httpbin('redirect', '1')) - assert r.status_code == 200 - assert r.history[0].status_code == 302 - assert r.history[0].is_redirect - - def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin): - r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307}) - assert r.status_code == 200 - assert r.history[0].status_code == 307 - assert r.history[0].is_redirect - assert r.json()['data'] == 'test' - - def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin): - byte_str = b'test' - r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307}) - assert r.status_code == 200 - assert r.history[0].status_code == 307 - assert r.history[0].is_redirect - assert r.json()['data'] == byte_str.decode('utf-8') - - def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin): - try: - requests.get(httpbin('relative-redirect', '50')) - except TooManyRedirects as e: - url = httpbin('relative-redirect', '20') - assert e.request.url == url - assert e.response.url == url - assert len(e.response.history) == 30 - else: - pytest.fail('Expected redirect to raise TooManyRedirects but it did not') - - def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin): - s = requests.session() - s.max_redirects = 5 - try: - s.get(httpbin('relative-redirect', '50')) - except TooManyRedirects as e: - url = httpbin('relative-redirect', '45') - assert e.request.url == url - assert e.response.url == url - assert len(e.response.history) == 5 - else: - pytest.fail('Expected custom max number of redirects to be respected but was not') - - def test_http_301_changes_post_to_get(self, httpbin): - r = requests.post(httpbin('status', '301')) - assert r.status_code == 200 - assert r.request.method == 'GET' - assert r.history[0].status_code == 301 - assert r.history[0].is_redirect - - def test_http_301_doesnt_change_head_to_get(self, httpbin): - r = requests.head(httpbin('status', '301'), allow_redirects=True) - print(r.content) - assert r.status_code == 200 - assert r.request.method == 'HEAD' - assert r.history[0].status_code == 301 - assert r.history[0].is_redirect - - def test_http_302_changes_post_to_get(self, httpbin): - r = requests.post(httpbin('status', '302')) - assert r.status_code == 200 - assert r.request.method == 'GET' - assert r.history[0].status_code == 302 - assert r.history[0].is_redirect - - def test_http_302_doesnt_change_head_to_get(self, httpbin): - r = requests.head(httpbin('status', '302'), allow_redirects=True) - assert r.status_code == 200 - assert r.request.method == 'HEAD' - assert r.history[0].status_code == 302 - assert r.history[0].is_redirect - - def test_http_303_changes_post_to_get(self, httpbin): - r = requests.post(httpbin('status', '303')) - assert r.status_code == 200 - assert r.request.method == 'GET' - assert r.history[0].status_code == 303 - assert r.history[0].is_redirect - - def test_http_303_doesnt_change_head_to_get(self, httpbin): - r = requests.head(httpbin('status', '303'), allow_redirects=True) - assert r.status_code == 200 - assert r.request.method == 'HEAD' - assert r.history[0].status_code == 303 - assert r.history[0].is_redirect - - def test_header_and_body_removal_on_redirect(self, httpbin): - purged_headers = ('Content-Length', 'Content-Type') - ses = requests.Session() - req = requests.Request('POST', httpbin('post'), data={'test': 'data'}) - prep = ses.prepare_request(req) - resp = ses.send(prep) - - # Mimic a redirect response - resp.status_code = 302 - resp.headers['location'] = 'get' - - # Run request through resolve_redirects - next_resp = next(ses.resolve_redirects(resp, prep)) - assert next_resp.request.body is None - for header in purged_headers: - assert header not in next_resp.request.headers - - def test_transfer_enc_removal_on_redirect(self, httpbin): - purged_headers = ('Transfer-Encoding', 'Content-Type') - ses = requests.Session() - req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1))) - prep = ses.prepare_request(req) - assert 'Transfer-Encoding' in prep.headers - - # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33 - resp = requests.Response() - resp.raw = io.BytesIO(b'the content') - resp.request = prep - setattr(resp.raw, 'release_conn', lambda *args: args) - - # Mimic a redirect response - resp.status_code = 302 - resp.headers['location'] = httpbin('get') - - # Run request through resolve_redirect - next_resp = next(ses.resolve_redirects(resp, prep)) - assert next_resp.request.body is None - for header in purged_headers: - assert header not in next_resp.request.headers - - def test_fragment_maintained_on_redirect(self, httpbin): - fragment = "#view=edit&token=hunter2" - r = requests.get(httpbin('redirect-to?url=get')+fragment) - - assert len(r.history) > 0 - assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment - assert r.url == httpbin('get')+fragment - - def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin): - heads = {'User-agent': 'Mozilla/5.0'} - - r = requests.get(httpbin('user-agent'), headers=heads) - - assert heads['User-agent'] in r.text - assert r.status_code == 200 - - def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin): - heads = {'User-agent': 'Mozilla/5.0'} - - r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads) - assert r.status_code == 200 - - def test_set_cookie_on_301(self, httpbin): - s = requests.session() - url = httpbin('cookies/set?foo=bar') - s.get(url) - assert s.cookies['foo'] == 'bar' - - def test_cookie_sent_on_redirect(self, httpbin): - s = requests.session() - s.get(httpbin('cookies/set?foo=bar')) - r = s.get(httpbin('redirect/1')) # redirects to httpbin('get') - assert 'Cookie' in r.json()['headers'] - - def test_cookie_removed_on_expire(self, httpbin): - s = requests.session() - s.get(httpbin('cookies/set?foo=bar')) - assert s.cookies['foo'] == 'bar' - s.get( - httpbin('response-headers'), - params={ - 'Set-Cookie': - 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT' - } - ) - assert 'foo' not in s.cookies - - def test_cookie_quote_wrapped(self, httpbin): - s = requests.session() - s.get(httpbin('cookies/set?foo="bar:baz"')) - assert s.cookies['foo'] == '"bar:baz"' - - def test_cookie_persists_via_api(self, httpbin): - s = requests.session() - r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'}) - assert 'foo' in r.request.headers['Cookie'] - assert 'foo' in r.history[0].request.headers['Cookie'] - - def test_request_cookie_overrides_session_cookie(self, httpbin): - s = requests.session() - s.cookies['foo'] = 'bar' - r = s.get(httpbin('cookies'), cookies={'foo': 'baz'}) - assert r.json()['cookies']['foo'] == 'baz' - # Session cookie should not be modified - assert s.cookies['foo'] == 'bar' - - def test_request_cookies_not_persisted(self, httpbin): - s = requests.session() - s.get(httpbin('cookies'), cookies={'foo': 'baz'}) - # Sending a request with cookies should not add cookies to the session - assert not s.cookies - - def test_generic_cookiejar_works(self, httpbin): - cj = cookielib.CookieJar() - cookiejar_from_dict({'foo': 'bar'}, cj) - s = requests.session() - s.cookies = cj - r = s.get(httpbin('cookies')) - # Make sure the cookie was sent - assert r.json()['cookies']['foo'] == 'bar' - # Make sure the session cj is still the custom one - assert s.cookies is cj - - def test_param_cookiejar_works(self, httpbin): - cj = cookielib.CookieJar() - cookiejar_from_dict({'foo': 'bar'}, cj) - s = requests.session() - r = s.get(httpbin('cookies'), cookies=cj) - # Make sure the cookie was sent - assert r.json()['cookies']['foo'] == 'bar' - - def test_cookielib_cookiejar_on_redirect(self, httpbin): - """Tests resolve_redirect doesn't fail when merging cookies - with non-RequestsCookieJar cookiejar. - - See GH #3579 - """ - cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar()) - s = requests.Session() - s.cookies = cookiejar_from_dict({'cookie': 'tasty'}) - - # Prepare request without using Session - req = requests.Request('GET', httpbin('headers'), cookies=cj) - prep_req = req.prepare() - - # Send request and simulate redirect - resp = s.send(prep_req) - resp.status_code = 302 - resp.headers['location'] = httpbin('get') - redirects = s.resolve_redirects(resp, prep_req) - resp = next(redirects) - - # Verify CookieJar isn't being converted to RequestsCookieJar - assert isinstance(prep_req._cookies, cookielib.CookieJar) - assert isinstance(resp.request._cookies, cookielib.CookieJar) - assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar) - - cookies = {} - for c in resp.request._cookies: - cookies[c.name] = c.value - assert cookies['foo'] == 'bar' - assert cookies['cookie'] == 'tasty' - - def test_requests_in_history_are_not_overridden(self, httpbin): - resp = requests.get(httpbin('redirect/3')) - urls = [r.url for r in resp.history] - req_urls = [r.request.url for r in resp.history] - assert urls == req_urls - - def test_history_is_always_a_list(self, httpbin): - """Show that even with redirects, Response.history is always a list.""" - resp = requests.get(httpbin('get')) - assert isinstance(resp.history, list) - resp = requests.get(httpbin('redirect/1')) - assert isinstance(resp.history, list) - assert not isinstance(resp.history, tuple) - - def test_headers_on_session_with_None_are_not_sent(self, httpbin): - """Do not send headers in Session.headers with None values.""" - ses = requests.Session() - ses.headers['Accept-Encoding'] = None - req = requests.Request('GET', httpbin('get')) - prep = ses.prepare_request(req) - assert 'Accept-Encoding' not in prep.headers - - def test_headers_preserve_order(self, httpbin): - """Preserve order when headers provided as OrderedDict.""" - ses = requests.Session() - ses.headers = collections.OrderedDict() - ses.headers['Accept-Encoding'] = 'identity' - ses.headers['First'] = '1' - ses.headers['Second'] = '2' - headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')]) - headers['Fifth'] = '5' - headers['Second'] = '222' - req = requests.Request('GET', httpbin('get'), headers=headers) - prep = ses.prepare_request(req) - items = list(prep.headers.items()) - assert items[0] == ('Accept-Encoding', 'identity') - assert items[1] == ('First', '1') - assert items[2] == ('Second', '222') - assert items[3] == ('Third', '3') - assert items[4] == ('Fourth', '4') - assert items[5] == ('Fifth', '5') - - @pytest.mark.parametrize('key', ('User-agent', 'user-agent')) - def test_user_agent_transfers(self, httpbin, key): - - heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'} - - r = requests.get(httpbin('user-agent'), headers=heads) - assert heads[key] in r.text - - def test_HTTP_200_OK_HEAD(self, httpbin): - r = requests.head(httpbin('get')) - assert r.status_code == 200 - - def test_HTTP_200_OK_PUT(self, httpbin): - r = requests.put(httpbin('put')) - assert r.status_code == 200 - - def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin): - auth = ('user', 'pass') - url = httpbin('basic-auth', 'user', 'pass') - - r = requests.get(url, auth=auth) - assert r.status_code == 200 - - r = requests.get(url) - assert r.status_code == 401 - - s = requests.session() - s.auth = auth - r = s.get(url) - assert r.status_code == 200 - - @pytest.mark.parametrize( - 'username, password', ( - ('user', 'pass'), - (u'имÑ'.encode('utf-8'), u'пароль'.encode('utf-8')), - (42, 42), - (None, None), - )) - def test_set_basicauth(self, httpbin, username, password): - auth = (username, password) - url = httpbin('get') - - r = requests.Request('GET', url, auth=auth) - p = r.prepare() - - assert p.headers['Authorization'] == _basic_auth_str(username, password) - - def test_basicauth_encodes_byte_strings(self): - """Ensure b'test' formats as the byte string "test" rather - than the unicode string "b'test'" in Python 3. - """ - auth = (b'\xc5\xafsername', b'test\xc6\xb6') - r = requests.Request('GET', 'http://localhost', auth=auth) - p = r.prepare() - - assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg==' - - @pytest.mark.parametrize( - 'url, exception', ( - # Connecting to an unknown domain should raise a ConnectionError - ('http://doesnotexist.google.com', ConnectionError), - # Connecting to an invalid port should raise a ConnectionError - ('http://localhost:1', ConnectionError), - # Inputing a URL that cannot be parsed should raise an InvalidURL error - ('http://fe80::5054:ff:fe5a:fc0', InvalidURL) - )) - def test_errors(self, url, exception): - with pytest.raises(exception): - requests.get(url, timeout=1) - - def test_proxy_error(self): - # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError - with pytest.raises(ProxyError): - requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'}) - - def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure): - with pytest.raises(InvalidProxyURL): - requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'}) - - with pytest.raises(InvalidProxyURL): - requests.get(httpbin(), proxies={'http': 'http://:8080'}) - - with pytest.raises(InvalidProxyURL): - requests.get(httpbin_secure(), proxies={'https': 'https://'}) - - with pytest.raises(InvalidProxyURL): - requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'}) - - def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin): - with override_environ(http_proxy=INVALID_PROXY): - with pytest.raises(ProxyError): - session = requests.Session() - request = requests.Request('GET', httpbin()) - session.send(request.prepare()) - - def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin): - with override_environ(http_proxy=INVALID_PROXY): - with pytest.raises(ProxyError): - session = requests.Session() - request = requests.Request('GET', httpbin()) - prepared = session.prepare_request(request) - session.send(prepared) - - def test_respect_proxy_env_on_send_with_redirects(self, httpbin): - with override_environ(http_proxy=INVALID_PROXY): - with pytest.raises(ProxyError): - session = requests.Session() - url = httpbin('redirect/1') - print(url) - request = requests.Request('GET', url) - session.send(request.prepare()) - - def test_respect_proxy_env_on_get(self, httpbin): - with override_environ(http_proxy=INVALID_PROXY): - with pytest.raises(ProxyError): - session = requests.Session() - session.get(httpbin()) - - def test_respect_proxy_env_on_request(self, httpbin): - with override_environ(http_proxy=INVALID_PROXY): - with pytest.raises(ProxyError): - session = requests.Session() - session.request(method='GET', url=httpbin()) - - def test_basicauth_with_netrc(self, httpbin): - auth = ('user', 'pass') - wrong_auth = ('wronguser', 'wrongpass') - url = httpbin('basic-auth', 'user', 'pass') - - old_auth = requests.sessions.get_netrc_auth - - try: - def get_netrc_auth_mock(url): - return auth - requests.sessions.get_netrc_auth = get_netrc_auth_mock - - # Should use netrc and work. - r = requests.get(url) - assert r.status_code == 200 - - # Given auth should override and fail. - r = requests.get(url, auth=wrong_auth) - assert r.status_code == 401 - - s = requests.session() - - # Should use netrc and work. - r = s.get(url) - assert r.status_code == 200 - - # Given auth should override and fail. - s.auth = wrong_auth - r = s.get(url) - assert r.status_code == 401 - finally: - requests.sessions.get_netrc_auth = old_auth - - def test_DIGEST_HTTP_200_OK_GET(self, httpbin): - - for authtype in self.digest_auth_algo: - auth = HTTPDigestAuth('user', 'pass') - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never') - - r = requests.get(url, auth=auth) - assert r.status_code == 200 - - r = requests.get(url) - assert r.status_code == 401 - print(r.headers['WWW-Authenticate']) - - s = requests.session() - s.auth = HTTPDigestAuth('user', 'pass') - r = s.get(url) - assert r.status_code == 200 - - def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin): - - for authtype in self.digest_auth_algo: - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) - auth = HTTPDigestAuth('user', 'pass') - r = requests.get(url) - assert r.cookies['fake'] == 'fake_value' - - r = requests.get(url, auth=auth) - assert r.status_code == 200 - - def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin): - - for authtype in self.digest_auth_algo: - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) - auth = HTTPDigestAuth('user', 'pass') - s = requests.Session() - s.get(url, auth=auth) - assert s.cookies['fake'] == 'fake_value' - - def test_DIGEST_STREAM(self, httpbin): - - for authtype in self.digest_auth_algo: - auth = HTTPDigestAuth('user', 'pass') - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) - - r = requests.get(url, auth=auth, stream=True) - assert r.raw.read() != b'' - - r = requests.get(url, auth=auth, stream=False) - assert r.raw.read() == b'' - - def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin): - - for authtype in self.digest_auth_algo: - auth = HTTPDigestAuth('user', 'wrongpass') - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) - - r = requests.get(url, auth=auth) - assert r.status_code == 401 - - r = requests.get(url) - assert r.status_code == 401 - - s = requests.session() - s.auth = auth - r = s.get(url) - assert r.status_code == 401 - - def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin): - - for authtype in self.digest_auth_algo: - auth = HTTPDigestAuth('user', 'pass') - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) - - r = requests.get(url, auth=auth) - assert '"auth"' in r.request.headers['Authorization'] - - def test_POSTBIN_GET_POST_FILES(self, httpbin): - - url = httpbin('post') - requests.post(url).raise_for_status() - - post1 = requests.post(url, data={'some': 'data'}) - assert post1.status_code == 200 - - with open('requirements-dev.txt') as f: - post2 = requests.post(url, files={'some': f}) - assert post2.status_code == 200 - - post4 = requests.post(url, data='[{"some": "json"}]') - assert post4.status_code == 200 - - with pytest.raises(ValueError): - requests.post(url, files=['bad file data']) - - def test_invalid_files_input(self, httpbin): - - url = httpbin('post') - post = requests.post(url, - files={"random-file-1": None, "random-file-2": 1}) - assert b'name="random-file-1"' not in post.request.body - assert b'name="random-file-2"' in post.request.body - - def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin): - - class TestStream(object): - def __init__(self, data): - self.data = data.encode() - self.length = len(self.data) - self.index = 0 - - def __len__(self): - return self.length - - def read(self, size=None): - if size: - ret = self.data[self.index:self.index + size] - self.index += size - else: - ret = self.data[self.index:] - self.index = self.length - return ret - - def tell(self): - return self.index - - def seek(self, offset, where=0): - if where == 0: - self.index = offset - elif where == 1: - self.index += offset - elif where == 2: - self.index = self.length + offset - - test = TestStream('test') - post1 = requests.post(httpbin('post'), data=test) - assert post1.status_code == 200 - assert post1.json()['data'] == 'test' - - test = TestStream('test') - test.seek(2) - post2 = requests.post(httpbin('post'), data=test) - assert post2.status_code == 200 - assert post2.json()['data'] == 'st' - - def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin): - - url = httpbin('post') - requests.post(url).raise_for_status() - - post1 = requests.post(url, data={'some': 'data'}) - assert post1.status_code == 200 - - with open('requirements-dev.txt') as f: - post2 = requests.post(url, data={'some': 'data'}, files={'some': f}) - assert post2.status_code == 200 - - post4 = requests.post(url, data='[{"some": "json"}]') - assert post4.status_code == 200 - - with pytest.raises(ValueError): - requests.post(url, files=['bad file data']) - - def test_post_with_custom_mapping(self, httpbin): - class CustomMapping(MutableMapping): - def __init__(self, *args, **kwargs): - self.data = dict(*args, **kwargs) - - def __delitem__(self, key): - del self.data[key] - - def __getitem__(self, key): - return self.data[key] - - def __setitem__(self, key, value): - self.data[key] = value - - def __iter__(self): - return iter(self.data) - - def __len__(self): - return len(self.data) - - data = CustomMapping({'some': 'data'}) - url = httpbin('post') - found_json = requests.post(url, data=data).json().get('form') - assert found_json == {'some': 'data'} - - def test_conflicting_post_params(self, httpbin): - url = httpbin('post') - with open('requirements-dev.txt') as f: - with pytest.raises(ValueError): - requests.post(url, data='[{"some": "data"}]', files={'some': f}) - with pytest.raises(ValueError): - requests.post(url, data=u('[{"some": "data"}]'), files={'some': f}) - - def test_request_ok_set(self, httpbin): - r = requests.get(httpbin('status', '404')) - assert not r.ok - - def test_status_raising(self, httpbin): - r = requests.get(httpbin('status', '404')) - with pytest.raises(requests.exceptions.HTTPError): - r.raise_for_status() - - r = requests.get(httpbin('status', '500')) - assert not r.ok - - def test_decompress_gzip(self, httpbin): - r = requests.get(httpbin('gzip')) - r.content.decode('ascii') - - @pytest.mark.parametrize( - 'url, params', ( - ('/get', {'foo': 'føø'}), - ('/get', {'føø': 'føø'}), - ('/get', {'føø': 'føø'}), - ('/get', {'foo': 'foo'}), - ('ø', {'foo': 'foo'}), - )) - def test_unicode_get(self, httpbin, url, params): - requests.get(httpbin(url), params=params) - - def test_unicode_header_name(self, httpbin): - requests.put( - httpbin('put'), - headers={str('Content-Type'): 'application/octet-stream'}, - data='\xff') # compat.str is unicode. - - def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle): - requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle) - - def test_invalid_ca_certificate_path(self, httpbin_secure): - INVALID_PATH = '/garbage' - with pytest.raises(IOError) as e: - requests.get(httpbin_secure(), verify=INVALID_PATH) - assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH) - - def test_invalid_ssl_certificate_files(self, httpbin_secure): - INVALID_PATH = '/garbage' - with pytest.raises(IOError) as e: - requests.get(httpbin_secure(), cert=INVALID_PATH) - assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH) - - with pytest.raises(IOError) as e: - requests.get(httpbin_secure(), cert=('.', INVALID_PATH)) - assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH) - - def test_http_with_certificate(self, httpbin): - r = requests.get(httpbin(), cert='.') - assert r.status_code == 200 - - def test_https_warnings(self, nosan_server): - """warnings are emitted with requests.get""" - host, port, ca_bundle = nosan_server - if HAS_MODERN_SSL or HAS_PYOPENSSL: - warnings_expected = ('SubjectAltNameWarning', ) - else: - warnings_expected = ('SNIMissingWarning', - 'InsecurePlatformWarning', - 'SubjectAltNameWarning', ) - - with pytest.warns(None) as warning_records: - warnings.simplefilter('always') - requests.get("https://localhost:{}/".format(port), verify=ca_bundle) - - warning_records = [item for item in warning_records - if item.category.__name__ != 'ResourceWarning'] - - warnings_category = tuple( - item.category.__name__ for item in warning_records) - assert warnings_category == warnings_expected - - def test_certificate_failure(self, httpbin_secure): - """ - When underlying SSL problems occur, an SSLError is raised. - """ - with pytest.raises(SSLError): - # Our local httpbin does not have a trusted CA, so this call will - # fail if we use our default trust bundle. - requests.get(httpbin_secure('status', '200')) - - def test_urlencoded_get_query_multivalued_param(self, httpbin): - - r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']}) - assert r.status_code == 200 - assert r.url == httpbin('get?test=foo&test=baz') - - def test_form_encoded_post_query_multivalued_element(self, httpbin): - r = requests.Request(method='POST', url=httpbin('post'), - data=dict(test=['foo', 'baz'])) - prep = r.prepare() - assert prep.body == 'test=foo&test=baz' - - def test_different_encodings_dont_break_post(self, httpbin): - r = requests.post(httpbin('post'), - data={'stuff': json.dumps({'a': 123})}, - params={'blah': 'asdf1234'}, - files={'file': ('test_requests.py', open(__file__, 'rb'))}) - assert r.status_code == 200 - - @pytest.mark.parametrize( - 'data', ( - {'stuff': u('ëlïxr')}, - {'stuff': u('ëlïxr').encode('utf-8')}, - {'stuff': 'elixr'}, - {'stuff': 'elixr'.encode('utf-8')}, - )) - def test_unicode_multipart_post(self, httpbin, data): - r = requests.post(httpbin('post'), - data=data, - files={'file': ('test_requests.py', open(__file__, 'rb'))}) - assert r.status_code == 200 - - def test_unicode_multipart_post_fieldnames(self, httpbin): - filename = os.path.splitext(__file__)[0] + '.py' - r = requests.Request( - method='POST', url=httpbin('post'), - data={'stuff'.encode('utf-8'): 'elixr'}, - files={'file': ('test_requests.py', open(filename, 'rb'))}) - prep = r.prepare() - assert b'name="stuff"' in prep.body - assert b'name="b\'stuff\'"' not in prep.body - - def test_unicode_method_name(self, httpbin): - files = {'file': open(__file__, 'rb')} - r = requests.request( - method=u('POST'), url=httpbin('post'), files=files) - assert r.status_code == 200 - - def test_unicode_method_name_with_request_object(self, httpbin): - files = {'file': open(__file__, 'rb')} - s = requests.Session() - req = requests.Request(u('POST'), httpbin('post'), files=files) - prep = s.prepare_request(req) - assert isinstance(prep.method, builtin_str) - assert prep.method == 'POST' - - resp = s.send(prep) - assert resp.status_code == 200 - - def test_non_prepared_request_error(self): - s = requests.Session() - req = requests.Request(u('POST'), '/') - - with pytest.raises(ValueError) as e: - s.send(req) - assert str(e.value) == 'You can only send PreparedRequests.' - - def test_custom_content_type(self, httpbin): - r = requests.post( - httpbin('post'), - data={'stuff': json.dumps({'a': 123})}, - files={ - 'file1': ('test_requests.py', open(__file__, 'rb')), - 'file2': ('test_requests', open(__file__, 'rb'), - 'text/py-content-type')}) - assert r.status_code == 200 - assert b"text/py-content-type" in r.request.body - - def test_hook_receives_request_arguments(self, httpbin): - def hook(resp, **kwargs): - assert resp is not None - assert kwargs != {} - - s = requests.Session() - r = requests.Request('GET', httpbin(), hooks={'response': hook}) - prep = s.prepare_request(r) - s.send(prep) - - def test_session_hooks_are_used_with_no_request_hooks(self, httpbin): - hook = lambda x, *args, **kwargs: x - s = requests.Session() - s.hooks['response'].append(hook) - r = requests.Request('GET', httpbin()) - prep = s.prepare_request(r) - assert prep.hooks['response'] != [] - assert prep.hooks['response'] == [hook] - - def test_session_hooks_are_overridden_by_request_hooks(self, httpbin): - hook1 = lambda x, *args, **kwargs: x - hook2 = lambda x, *args, **kwargs: x - assert hook1 is not hook2 - s = requests.Session() - s.hooks['response'].append(hook2) - r = requests.Request('GET', httpbin(), hooks={'response': [hook1]}) - prep = s.prepare_request(r) - assert prep.hooks['response'] == [hook1] - - def test_prepared_request_hook(self, httpbin): - def hook(resp, **kwargs): - resp.hook_working = True - return resp - - req = requests.Request('GET', httpbin(), hooks={'response': hook}) - prep = req.prepare() - - s = requests.Session() - s.proxies = getproxies() - resp = s.send(prep) - - assert hasattr(resp, 'hook_working') - - def test_prepared_from_session(self, httpbin): - class DummyAuth(requests.auth.AuthBase): - def __call__(self, r): - r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok' - return r - - req = requests.Request('GET', httpbin('headers')) - assert not req.auth - - s = requests.Session() - s.auth = DummyAuth() - - prep = s.prepare_request(req) - resp = s.send(prep) - - assert resp.json()['headers'][ - 'Dummy-Auth-Test'] == 'dummy-auth-test-ok' - - def test_prepare_request_with_bytestring_url(self): - req = requests.Request('GET', b'https://httpbin.org/') - s = requests.Session() - prep = s.prepare_request(req) - assert prep.url == "https://httpbin.org/" - - def test_request_with_bytestring_host(self, httpbin): - s = requests.Session() - resp = s.request( - 'GET', - httpbin('cookies/set?cookie=value'), - allow_redirects=False, - headers={'Host': b'httpbin.org'} - ) - assert resp.cookies.get('cookie') == 'value' - - def test_links(self): - r = requests.Response() - r.headers = { - 'cache-control': 'public, max-age=60, s-maxage=60', - 'connection': 'keep-alive', - 'content-encoding': 'gzip', - 'content-type': 'application/json; charset=utf-8', - 'date': 'Sat, 26 Jan 2013 16:47:56 GMT', - 'etag': '"6ff6a73c0e446c1f61614769e3ceb778"', - 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT', - 'link': ('; rel="next", ; ' - ' rel="last"'), - 'server': 'GitHub.com', - 'status': '200 OK', - 'vary': 'Accept', - 'x-content-type-options': 'nosniff', - 'x-github-media-type': 'github.beta', - 'x-ratelimit-limit': '60', - 'x-ratelimit-remaining': '57' - } - assert r.links['next']['rel'] == 'next' - - def test_cookie_parameters(self): - key = 'some_cookie' - value = 'some_value' - secure = True - domain = 'test.com' - rest = {'HttpOnly': True} - - jar = requests.cookies.RequestsCookieJar() - jar.set(key, value, secure=secure, domain=domain, rest=rest) - - assert len(jar) == 1 - assert 'some_cookie' in jar - - cookie = list(jar)[0] - assert cookie.secure == secure - assert cookie.domain == domain - assert cookie._rest['HttpOnly'] == rest['HttpOnly'] - - def test_cookie_as_dict_keeps_len(self): - key = 'some_cookie' - value = 'some_value' - - key1 = 'some_cookie1' - value1 = 'some_value1' - - jar = requests.cookies.RequestsCookieJar() - jar.set(key, value) - jar.set(key1, value1) - - d1 = dict(jar) - d2 = dict(jar.iteritems()) - d3 = dict(jar.items()) - - assert len(jar) == 2 - assert len(d1) == 2 - assert len(d2) == 2 - assert len(d3) == 2 - - def test_cookie_as_dict_keeps_items(self): - key = 'some_cookie' - value = 'some_value' - - key1 = 'some_cookie1' - value1 = 'some_value1' - - jar = requests.cookies.RequestsCookieJar() - jar.set(key, value) - jar.set(key1, value1) - - d1 = dict(jar) - d2 = dict(jar.iteritems()) - d3 = dict(jar.items()) - - assert d1['some_cookie'] == 'some_value' - assert d2['some_cookie'] == 'some_value' - assert d3['some_cookie1'] == 'some_value1' - - def test_cookie_as_dict_keys(self): - key = 'some_cookie' - value = 'some_value' - - key1 = 'some_cookie1' - value1 = 'some_value1' - - jar = requests.cookies.RequestsCookieJar() - jar.set(key, value) - jar.set(key1, value1) - - keys = jar.keys() - assert keys == list(keys) - # make sure one can use keys multiple times - assert list(keys) == list(keys) - - def test_cookie_as_dict_values(self): - key = 'some_cookie' - value = 'some_value' - - key1 = 'some_cookie1' - value1 = 'some_value1' - - jar = requests.cookies.RequestsCookieJar() - jar.set(key, value) - jar.set(key1, value1) - - values = jar.values() - assert values == list(values) - # make sure one can use values multiple times - assert list(values) == list(values) - - def test_cookie_as_dict_items(self): - key = 'some_cookie' - value = 'some_value' - - key1 = 'some_cookie1' - value1 = 'some_value1' - - jar = requests.cookies.RequestsCookieJar() - jar.set(key, value) - jar.set(key1, value1) - - items = jar.items() - assert items == list(items) - # make sure one can use items multiple times - assert list(items) == list(items) - - def test_cookie_duplicate_names_different_domains(self): - key = 'some_cookie' - value = 'some_value' - domain1 = 'test1.com' - domain2 = 'test2.com' - - jar = requests.cookies.RequestsCookieJar() - jar.set(key, value, domain=domain1) - jar.set(key, value, domain=domain2) - assert key in jar - items = jar.items() - assert len(items) == 2 - - # Verify that CookieConflictError is raised if domain is not specified - with pytest.raises(requests.cookies.CookieConflictError): - jar.get(key) - - # Verify that CookieConflictError is not raised if domain is specified - cookie = jar.get(key, domain=domain1) - assert cookie == value - - def test_cookie_duplicate_names_raises_cookie_conflict_error(self): - key = 'some_cookie' - value = 'some_value' - path = 'some_path' - - jar = requests.cookies.RequestsCookieJar() - jar.set(key, value, path=path) - jar.set(key, value) - with pytest.raises(requests.cookies.CookieConflictError): - jar.get(key) - - def test_cookie_policy_copy(self): - class MyCookiePolicy(cookielib.DefaultCookiePolicy): - pass - - jar = requests.cookies.RequestsCookieJar() - jar.set_policy(MyCookiePolicy()) - assert isinstance(jar.copy().get_policy(), MyCookiePolicy) - - def test_time_elapsed_blank(self, httpbin): - r = requests.get(httpbin('get')) - td = r.elapsed - total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6) - assert total_seconds > 0.0 - - def test_empty_response_has_content_none(self): - r = requests.Response() - assert r.content is None - - def test_response_is_iterable(self): - r = requests.Response() - io = StringIO.StringIO('abc') - read_ = io.read - - def read_mock(amt, decode_content=None): - return read_(amt) - setattr(io, 'read', read_mock) - r.raw = io - assert next(iter(r)) - io.close() - - def test_response_decode_unicode(self): - """When called with decode_unicode, Response.iter_content should always - return unicode. - """ - r = requests.Response() - r._content_consumed = True - r._content = b'the content' - r.encoding = 'ascii' - - chunks = r.iter_content(decode_unicode=True) - assert all(isinstance(chunk, str) for chunk in chunks) - - # also for streaming - r = requests.Response() - r.raw = io.BytesIO(b'the content') - r.encoding = 'ascii' - chunks = r.iter_content(decode_unicode=True) - assert all(isinstance(chunk, str) for chunk in chunks) - - def test_response_reason_unicode(self): - # check for unicode HTTP status - r = requests.Response() - r.url = u'unicode URL' - r.reason = u'Komponenttia ei löydy'.encode('utf-8') - r.status_code = 404 - r.encoding = None - assert not r.ok # old behaviour - crashes here - - def test_response_reason_unicode_fallback(self): - # check raise_status falls back to ISO-8859-1 - r = requests.Response() - r.url = 'some url' - reason = u'Komponenttia ei löydy' - r.reason = reason.encode('latin-1') - r.status_code = 500 - r.encoding = None - with pytest.raises(requests.exceptions.HTTPError) as e: - r.raise_for_status() - assert reason in e.value.args[0] - - def test_response_chunk_size_type(self): - """Ensure that chunk_size is passed as None or an integer, otherwise - raise a TypeError. - """ - r = requests.Response() - r.raw = io.BytesIO(b'the content') - chunks = r.iter_content(1) - assert all(len(chunk) == 1 for chunk in chunks) - - r = requests.Response() - r.raw = io.BytesIO(b'the content') - chunks = r.iter_content(None) - assert list(chunks) == [b'the content'] - - r = requests.Response() - r.raw = io.BytesIO(b'the content') - with pytest.raises(TypeError): - chunks = r.iter_content("1024") - - def test_request_and_response_are_pickleable(self, httpbin): - r = requests.get(httpbin('get')) - - # verify we can pickle the original request - assert pickle.loads(pickle.dumps(r.request)) - - # verify we can pickle the response and that we have access to - # the original request. - pr = pickle.loads(pickle.dumps(r)) - assert r.request.url == pr.request.url - assert r.request.headers == pr.request.headers - - def test_prepared_request_is_pickleable(self, httpbin): - p = requests.Request('GET', httpbin('get')).prepare() - - # Verify PreparedRequest can be pickled and unpickled - r = pickle.loads(pickle.dumps(p)) - assert r.url == p.url - assert r.headers == p.headers - assert r.body == p.body - - # Verify unpickled PreparedRequest sends properly - s = requests.Session() - resp = s.send(r) - assert resp.status_code == 200 - - def test_prepared_request_with_file_is_pickleable(self, httpbin): - files = {'file': open(__file__, 'rb')} - r = requests.Request('POST', httpbin('post'), files=files) - p = r.prepare() - - # Verify PreparedRequest can be pickled and unpickled - r = pickle.loads(pickle.dumps(p)) - assert r.url == p.url - assert r.headers == p.headers - assert r.body == p.body - - # Verify unpickled PreparedRequest sends properly - s = requests.Session() - resp = s.send(r) - assert resp.status_code == 200 - - def test_prepared_request_with_hook_is_pickleable(self, httpbin): - r = requests.Request('GET', httpbin('get'), hooks=default_hooks()) - p = r.prepare() - - # Verify PreparedRequest can be pickled - r = pickle.loads(pickle.dumps(p)) - assert r.url == p.url - assert r.headers == p.headers - assert r.body == p.body - assert r.hooks == p.hooks - - # Verify unpickled PreparedRequest sends properly - s = requests.Session() - resp = s.send(r) - assert resp.status_code == 200 - - def test_cannot_send_unprepared_requests(self, httpbin): - r = requests.Request(url=httpbin()) - with pytest.raises(ValueError): - requests.Session().send(r) - - def test_http_error(self): - error = requests.exceptions.HTTPError() - assert not error.response - response = requests.Response() - error = requests.exceptions.HTTPError(response=response) - assert error.response == response - error = requests.exceptions.HTTPError('message', response=response) - assert str(error) == 'message' - assert error.response == response - - def test_session_pickling(self, httpbin): - r = requests.Request('GET', httpbin('get')) - s = requests.Session() - - s = pickle.loads(pickle.dumps(s)) - s.proxies = getproxies() - - r = s.send(r.prepare()) - assert r.status_code == 200 - - def test_fixes_1329(self, httpbin): - """Ensure that header updates are done case-insensitively.""" - s = requests.Session() - s.headers.update({'ACCEPT': 'BOGUS'}) - s.headers.update({'accept': 'application/json'}) - r = s.get(httpbin('get')) - headers = r.request.headers - assert headers['accept'] == 'application/json' - assert headers['Accept'] == 'application/json' - assert headers['ACCEPT'] == 'application/json' - - def test_uppercase_scheme_redirect(self, httpbin): - parts = urlparse(httpbin('html')) - url = "HTTP://" + parts.netloc + parts.path - r = requests.get(httpbin('redirect-to'), params={'url': url}) - assert r.status_code == 200 - assert r.url.lower() == url.lower() - - def test_transport_adapter_ordering(self): - s = requests.Session() - order = ['https://', 'http://'] - assert order == list(s.adapters) - s.mount('http://git', HTTPAdapter()) - s.mount('http://github', HTTPAdapter()) - s.mount('http://github.com', HTTPAdapter()) - s.mount('http://github.com/about/', HTTPAdapter()) - order = [ - 'http://github.com/about/', - 'http://github.com', - 'http://github', - 'http://git', - 'https://', - 'http://', - ] - assert order == list(s.adapters) - s.mount('http://gittip', HTTPAdapter()) - s.mount('http://gittip.com', HTTPAdapter()) - s.mount('http://gittip.com/about/', HTTPAdapter()) - order = [ - 'http://github.com/about/', - 'http://gittip.com/about/', - 'http://github.com', - 'http://gittip.com', - 'http://github', - 'http://gittip', - 'http://git', - 'https://', - 'http://', - ] - assert order == list(s.adapters) - s2 = requests.Session() - s2.adapters = {'http://': HTTPAdapter()} - s2.mount('https://', HTTPAdapter()) - assert 'http://' in s2.adapters - assert 'https://' in s2.adapters - - def test_session_get_adapter_prefix_matching(self): - prefix = 'https://example.com' - more_specific_prefix = prefix + '/some/path' - - url_matching_only_prefix = prefix + '/another/path' - url_matching_more_specific_prefix = more_specific_prefix + '/longer/path' - url_not_matching_prefix = 'https://another.example.com/' - - s = requests.Session() - prefix_adapter = HTTPAdapter() - more_specific_prefix_adapter = HTTPAdapter() - s.mount(prefix, prefix_adapter) - s.mount(more_specific_prefix, more_specific_prefix_adapter) - - assert s.get_adapter(url_matching_only_prefix) is prefix_adapter - assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter - assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter) - - def test_session_get_adapter_prefix_matching_mixed_case(self): - mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix' - url_matching_prefix = mixed_case_prefix + '/full_url' - - s = requests.Session() - my_adapter = HTTPAdapter() - s.mount(mixed_case_prefix, my_adapter) - - assert s.get_adapter(url_matching_prefix) is my_adapter - - def test_session_get_adapter_prefix_matching_is_case_insensitive(self): - mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix' - url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url' - - s = requests.Session() - my_adapter = HTTPAdapter() - s.mount(mixed_case_prefix, my_adapter) - - assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter - - def test_header_remove_is_case_insensitive(self, httpbin): - # From issue #1321 - s = requests.Session() - s.headers['foo'] = 'bar' - r = s.get(httpbin('get'), headers={'FOO': None}) - assert 'foo' not in r.request.headers - - def test_params_are_merged_case_sensitive(self, httpbin): - s = requests.Session() - s.params['foo'] = 'bar' - r = s.get(httpbin('get'), params={'FOO': 'bar'}) - assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'} - - def test_long_authinfo_in_url(self): - url = 'http://{}:{}@{}:9000/path?query#frag'.format( - 'E8A3BE87-9E3F-4620-8858-95478E385B5B', - 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E', - 'exactly-------------sixty-----------three------------characters', - ) - r = requests.Request('GET', url).prepare() - assert r.url == url - - def test_header_keys_are_native(self, httpbin): - headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'} - r = requests.Request('GET', httpbin('get'), headers=headers) - p = r.prepare() - - # This is testing that they are builtin strings. A bit weird, but there - # we go. - assert 'unicode' in p.headers.keys() - assert 'byte' in p.headers.keys() - - def test_header_validation(self, httpbin): - """Ensure prepare_headers regex isn't flagging valid header contents.""" - headers_ok = {'foo': 'bar baz qux', - 'bar': u'fbbq'.encode('utf8'), - 'baz': '', - 'qux': '1'} - r = requests.get(httpbin('get'), headers=headers_ok) - assert r.request.headers['foo'] == headers_ok['foo'] - - def test_header_value_not_str(self, httpbin): - """Ensure the header value is of type string or bytes as - per discussion in GH issue #3386 - """ - headers_int = {'foo': 3} - headers_dict = {'bar': {'foo': 'bar'}} - headers_list = {'baz': ['foo', 'bar']} - - # Test for int - with pytest.raises(InvalidHeader) as excinfo: - r = requests.get(httpbin('get'), headers=headers_int) - assert 'foo' in str(excinfo.value) - # Test for dict - with pytest.raises(InvalidHeader) as excinfo: - r = requests.get(httpbin('get'), headers=headers_dict) - assert 'bar' in str(excinfo.value) - # Test for list - with pytest.raises(InvalidHeader) as excinfo: - r = requests.get(httpbin('get'), headers=headers_list) - assert 'baz' in str(excinfo.value) - - def test_header_no_return_chars(self, httpbin): - """Ensure that a header containing return character sequences raise an - exception. Otherwise, multiple headers are created from single string. - """ - headers_ret = {'foo': 'bar\r\nbaz: qux'} - headers_lf = {'foo': 'bar\nbaz: qux'} - headers_cr = {'foo': 'bar\rbaz: qux'} - - # Test for newline - with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_ret) - # Test for line feed - with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_lf) - # Test for carriage return - with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_cr) - - def test_header_no_leading_space(self, httpbin): - """Ensure headers containing leading whitespace raise - InvalidHeader Error before sending. - """ - headers_space = {'foo': ' bar'} - headers_tab = {'foo': ' bar'} - - # Test for whitespace - with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_space) - # Test for tab - with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_tab) - - @pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo'))) - def test_can_send_objects_with_files(self, httpbin, files): - data = {'a': 'this is a string'} - files = {'b': files} - r = requests.Request('POST', httpbin('post'), data=data, files=files) - p = r.prepare() - assert 'multipart/form-data' in p.headers['Content-Type'] - - def test_can_send_file_object_with_non_string_filename(self, httpbin): - f = io.BytesIO() - f.name = 2 - r = requests.Request('POST', httpbin('post'), files={'f': f}) - p = r.prepare() - - assert 'multipart/form-data' in p.headers['Content-Type'] - - def test_autoset_header_values_are_native(self, httpbin): - data = 'this is a string' - length = '16' - req = requests.Request('POST', httpbin('post'), data=data) - p = req.prepare() - - assert p.headers['Content-Length'] == length - - def test_nonhttp_schemes_dont_check_URLs(self): - test_urls = ( - 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==', - 'file:///etc/passwd', - 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431', - ) - for test_url in test_urls: - req = requests.Request('GET', test_url) - preq = req.prepare() - assert test_url == preq.url - - def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle): - r = requests.get( - httpbin_secure('redirect-to'), - params={'url': httpbin('get')}, - auth=('user', 'pass'), - verify=httpbin_ca_bundle - ) - assert r.history[0].request.headers['Authorization'] - assert 'Authorization' not in r.request.headers - - def test_auth_is_retained_for_redirect_on_host(self, httpbin): - r = requests.get(httpbin('redirect/1'), auth=('user', 'pass')) - h1 = r.history[0].request.headers['Authorization'] - h2 = r.request.headers['Authorization'] - - assert h1 == h2 - - def test_should_strip_auth_host_change(self): - s = requests.Session() - assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/') - - def test_should_strip_auth_http_downgrade(self): - s = requests.Session() - assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar') - - def test_should_strip_auth_https_upgrade(self): - s = requests.Session() - assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar') - assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar') - assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar') - # Non-standard ports should trigger stripping - assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar') - assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar') - - def test_should_strip_auth_port_change(self): - s = requests.Session() - assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar') - - @pytest.mark.parametrize( - 'old_uri, new_uri', ( - ('https://example.com:443/foo', 'https://example.com/bar'), - ('http://example.com:80/foo', 'http://example.com/bar'), - ('https://example.com/foo', 'https://example.com:443/bar'), - ('http://example.com/foo', 'http://example.com:80/bar') - )) - def test_should_strip_auth_default_port(self, old_uri, new_uri): - s = requests.Session() - assert not s.should_strip_auth(old_uri, new_uri) - - def test_manual_redirect_with_partial_body_read(self, httpbin): - s = requests.Session() - r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True) - assert r1.is_redirect - rg = s.resolve_redirects(r1, r1.request, stream=True) - - # read only the first eight bytes of the response body, - # then follow the redirect - r1.iter_content(8) - r2 = next(rg) - assert r2.is_redirect - - # read all of the response via iter_content, - # then follow the redirect - for _ in r2.iter_content(): - pass - r3 = next(rg) - assert not r3.is_redirect - - def test_prepare_body_position_non_stream(self): - data = b'the data' - prep = requests.Request('GET', 'http://example.com', data=data).prepare() - assert prep._body_position is None - - def test_rewind_body(self): - data = io.BytesIO(b'the data') - prep = requests.Request('GET', 'http://example.com', data=data).prepare() - assert prep._body_position == 0 - assert prep.body.read() == b'the data' - - # the data has all been read - assert prep.body.read() == b'' - - # rewind it back - requests.utils.rewind_body(prep) - assert prep.body.read() == b'the data' - - def test_rewind_partially_read_body(self): - data = io.BytesIO(b'the data') - data.read(4) # read some data - prep = requests.Request('GET', 'http://example.com', data=data).prepare() - assert prep._body_position == 4 - assert prep.body.read() == b'data' - - # the data has all been read - assert prep.body.read() == b'' - - # rewind it back - requests.utils.rewind_body(prep) - assert prep.body.read() == b'data' - - def test_rewind_body_no_seek(self): - class BadFileObj: - def __init__(self, data): - self.data = data - - def tell(self): - return 0 - - def __iter__(self): - return - - data = BadFileObj('the data') - prep = requests.Request('GET', 'http://example.com', data=data).prepare() - assert prep._body_position == 0 - - with pytest.raises(UnrewindableBodyError) as e: - requests.utils.rewind_body(prep) - - assert 'Unable to rewind request body' in str(e) - - def test_rewind_body_failed_seek(self): - class BadFileObj: - def __init__(self, data): - self.data = data - - def tell(self): - return 0 - - def seek(self, pos, whence=0): - raise OSError() - - def __iter__(self): - return - - data = BadFileObj('the data') - prep = requests.Request('GET', 'http://example.com', data=data).prepare() - assert prep._body_position == 0 - - with pytest.raises(UnrewindableBodyError) as e: - requests.utils.rewind_body(prep) - - assert 'error occurred when rewinding request body' in str(e) - - def test_rewind_body_failed_tell(self): - class BadFileObj: - def __init__(self, data): - self.data = data - - def tell(self): - raise OSError() - - def __iter__(self): - return - - data = BadFileObj('the data') - prep = requests.Request('GET', 'http://example.com', data=data).prepare() - assert prep._body_position is not None - - with pytest.raises(UnrewindableBodyError) as e: - requests.utils.rewind_body(prep) - - assert 'Unable to rewind request body' in str(e) - - def _patch_adapter_gzipped_redirect(self, session, url): - adapter = session.get_adapter(url=url) - org_build_response = adapter.build_response - self._patched_response = False - - def build_response(*args, **kwargs): - resp = org_build_response(*args, **kwargs) - if not self._patched_response: - resp.raw.headers['content-encoding'] = 'gzip' - self._patched_response = True - return resp - - adapter.build_response = build_response - - def test_redirect_with_wrong_gzipped_header(self, httpbin): - s = requests.Session() - url = httpbin('redirect/1') - self._patch_adapter_gzipped_redirect(s, url) - s.get(url) - - @pytest.mark.parametrize( - 'username, password, auth_str', ( - ('test', 'test', 'Basic dGVzdDp0ZXN0'), - (u'имÑ'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='), - )) - def test_basic_auth_str_is_always_native(self, username, password, auth_str): - s = _basic_auth_str(username, password) - assert isinstance(s, builtin_str) - assert s == auth_str - - def test_requests_history_is_saved(self, httpbin): - r = requests.get(httpbin('redirect/5')) - total = r.history[-1].history - i = 0 - for item in r.history: - assert item.history == total[0:i] - i += 1 - - def test_json_param_post_content_type_works(self, httpbin): - r = requests.post( - httpbin('post'), - json={'life': 42} - ) - assert r.status_code == 200 - assert 'application/json' in r.request.headers['Content-Type'] - assert {'life': 42} == r.json()['json'] - - def test_json_param_post_should_not_override_data_param(self, httpbin): - r = requests.Request(method='POST', url=httpbin('post'), - data={'stuff': 'elixr'}, - json={'music': 'flute'}) - prep = r.prepare() - assert 'stuff=elixr' == prep.body - - def test_response_iter_lines(self, httpbin): - r = requests.get(httpbin('stream/4'), stream=True) - assert r.status_code == 200 - - it = r.iter_lines() - next(it) - assert len(list(it)) == 3 - - def test_response_context_manager(self, httpbin): - with requests.get(httpbin('stream/4'), stream=True) as response: - assert isinstance(response, requests.Response) - - assert response.raw.closed - - def test_unconsumed_session_response_closes_connection(self, httpbin): - s = requests.session() - - with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response: - pass - - assert response._content_consumed is False - assert response.raw.closed - - @pytest.mark.xfail - def test_response_iter_lines_reentrant(self, httpbin): - """Response.iter_lines() is not reentrant safe""" - r = requests.get(httpbin('stream/4'), stream=True) - assert r.status_code == 200 - - next(r.iter_lines()) - assert len(list(r.iter_lines())) == 3 - - def test_session_close_proxy_clear(self, mocker): - proxies = { - 'one': mocker.Mock(), - 'two': mocker.Mock(), - } - session = requests.Session() - mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies) - session.close() - proxies['one'].clear.assert_called_once_with() - proxies['two'].clear.assert_called_once_with() - - def test_proxy_auth(self): - adapter = HTTPAdapter() - headers = adapter.proxy_headers("http://user:pass@httpbin.org") - assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'} - - def test_proxy_auth_empty_pass(self): - adapter = HTTPAdapter() - headers = adapter.proxy_headers("http://user:@httpbin.org") - assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='} - - def test_response_json_when_content_is_None(self, httpbin): - r = requests.get(httpbin('/status/204')) - # Make sure r.content is None - r.status_code = 0 - r._content = False - r._content_consumed = False - - assert r.content is None - with pytest.raises(ValueError): - r.json() - - def test_response_without_release_conn(self): - """Test `close` call for non-urllib3-like raw objects. - Should work when `release_conn` attr doesn't exist on `response.raw`. - """ - resp = requests.Response() - resp.raw = StringIO.StringIO('test') - assert not resp.raw.closed - resp.close() - assert resp.raw.closed - - def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin): - """Ensure that a byte stream with size 0 will not set both a Content-Length - and Transfer-Encoding header. - """ - auth = ('user', 'pass') - url = httpbin('post') - file_obj = io.BytesIO(b'') - r = requests.Request('POST', url, auth=auth, data=file_obj) - prepared_request = r.prepare() - assert 'Transfer-Encoding' in prepared_request.headers - assert 'Content-Length' not in prepared_request.headers - - def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin): - """Ensure that a byte stream with size > 0 will not set both a Content-Length - and Transfer-Encoding header. - """ - auth = ('user', 'pass') - url = httpbin('post') - file_obj = io.BytesIO(b'test data') - r = requests.Request('POST', url, auth=auth, data=file_obj) - prepared_request = r.prepare() - assert 'Transfer-Encoding' not in prepared_request.headers - assert 'Content-Length' in prepared_request.headers - - def test_chunked_upload_does_not_set_content_length_header(self, httpbin): - """Ensure that requests with a generator body stream using - Transfer-Encoding: chunked, not a Content-Length header. - """ - data = (i for i in [b'a', b'b', b'c']) - url = httpbin('post') - r = requests.Request('POST', url, data=data) - prepared_request = r.prepare() - assert 'Transfer-Encoding' in prepared_request.headers - assert 'Content-Length' not in prepared_request.headers - - def test_custom_redirect_mixin(self, httpbin): - """Tests a custom mixin to overwrite ``get_redirect_target``. - - Ensures a subclassed ``requests.Session`` can handle a certain type of - malformed redirect responses. - - 1. original request receives a proper response: 302 redirect - 2. following the redirect, a malformed response is given: - status code = HTTP 200 - location = alternate url - 3. the custom session catches the edge case and follows the redirect - """ - url_final = httpbin('html') - querystring_malformed = urlencode({'location': url_final}) - url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed) - querystring_redirect = urlencode({'url': url_redirect_malformed}) - url_redirect = httpbin('redirect-to?%s' % querystring_redirect) - urls_test = [url_redirect, - url_redirect_malformed, - url_final, - ] - - class CustomRedirectSession(requests.Session): - def get_redirect_target(self, resp): - # default behavior - if resp.is_redirect: - return resp.headers['location'] - # edge case - check to see if 'location' is in headers anyways - location = resp.headers.get('location') - if location and (location != resp.url): - return location - return None - - session = CustomRedirectSession() - r = session.get(urls_test[0]) - assert len(r.history) == 2 - assert r.status_code == 200 - assert r.history[0].status_code == 302 - assert r.history[0].is_redirect - assert r.history[1].status_code == 200 - assert not r.history[1].is_redirect - assert r.url == urls_test[2] - - -class TestCaseInsensitiveDict: - - @pytest.mark.parametrize( - 'cid', ( - CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}), - CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]), - CaseInsensitiveDict(FOO='foo', BAr='bar'), - )) - def test_init(self, cid): - assert len(cid) == 2 - assert 'foo' in cid - assert 'bar' in cid - - def test_docstring_example(self): - cid = CaseInsensitiveDict() - cid['Accept'] = 'application/json' - assert cid['aCCEPT'] == 'application/json' - assert list(cid) == ['Accept'] - - def test_len(self): - cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'}) - cid['A'] = 'a' - assert len(cid) == 2 - - def test_getitem(self): - cid = CaseInsensitiveDict({'Spam': 'blueval'}) - assert cid['spam'] == 'blueval' - assert cid['SPAM'] == 'blueval' - - def test_fixes_649(self): - """__setitem__ should behave case-insensitively.""" - cid = CaseInsensitiveDict() - cid['spam'] = 'oneval' - cid['Spam'] = 'twoval' - cid['sPAM'] = 'redval' - cid['SPAM'] = 'blueval' - assert cid['spam'] == 'blueval' - assert cid['SPAM'] == 'blueval' - assert list(cid.keys()) == ['SPAM'] - - def test_delitem(self): - cid = CaseInsensitiveDict() - cid['Spam'] = 'someval' - del cid['sPam'] - assert 'spam' not in cid - assert len(cid) == 0 - - def test_contains(self): - cid = CaseInsensitiveDict() - cid['Spam'] = 'someval' - assert 'Spam' in cid - assert 'spam' in cid - assert 'SPAM' in cid - assert 'sPam' in cid - assert 'notspam' not in cid - - def test_get(self): - cid = CaseInsensitiveDict() - cid['spam'] = 'oneval' - cid['SPAM'] = 'blueval' - assert cid.get('spam') == 'blueval' - assert cid.get('SPAM') == 'blueval' - assert cid.get('sPam') == 'blueval' - assert cid.get('notspam', 'default') == 'default' - - def test_update(self): - cid = CaseInsensitiveDict() - cid['spam'] = 'blueval' - cid.update({'sPam': 'notblueval'}) - assert cid['spam'] == 'notblueval' - cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}) - cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'}) - assert len(cid) == 2 - assert cid['foo'] == 'anotherfoo' - assert cid['bar'] == 'anotherbar' - - def test_update_retains_unchanged(self): - cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'}) - cid.update({'foo': 'newfoo'}) - assert cid['bar'] == 'bar' - - def test_iter(self): - cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'}) - keys = frozenset(['Spam', 'Eggs']) - assert frozenset(iter(cid)) == keys - - def test_equality(self): - cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'}) - othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'}) - assert cid == othercid - del othercid['spam'] - assert cid != othercid - assert cid == {'spam': 'blueval', 'eggs': 'redval'} - assert cid != object() - - def test_setdefault(self): - cid = CaseInsensitiveDict({'Spam': 'blueval'}) - assert cid.setdefault('spam', 'notblueval') == 'blueval' - assert cid.setdefault('notspam', 'notblueval') == 'notblueval' - - def test_lower_items(self): - cid = CaseInsensitiveDict({ - 'Accept': 'application/json', - 'user-Agent': 'requests', - }) - keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items()) - lowerkeyset = frozenset(['accept', 'user-agent']) - assert keyset == lowerkeyset - - def test_preserve_key_case(self): - cid = CaseInsensitiveDict({ - 'Accept': 'application/json', - 'user-Agent': 'requests', - }) - keyset = frozenset(['Accept', 'user-Agent']) - assert frozenset(i[0] for i in cid.items()) == keyset - assert frozenset(cid.keys()) == keyset - assert frozenset(cid) == keyset - - def test_preserve_last_key_case(self): - cid = CaseInsensitiveDict({ - 'Accept': 'application/json', - 'user-Agent': 'requests', - }) - cid.update({'ACCEPT': 'application/json'}) - cid['USER-AGENT'] = 'requests' - keyset = frozenset(['ACCEPT', 'USER-AGENT']) - assert frozenset(i[0] for i in cid.items()) == keyset - assert frozenset(cid.keys()) == keyset - assert frozenset(cid) == keyset - - def test_copy(self): - cid = CaseInsensitiveDict({ - 'Accept': 'application/json', - 'user-Agent': 'requests', - }) - cid_copy = cid.copy() - assert cid == cid_copy - cid['changed'] = True - assert cid != cid_copy - - -class TestMorselToCookieExpires: - """Tests for morsel_to_cookie when morsel contains expires.""" - - def test_expires_valid_str(self): - """Test case where we convert expires from string time.""" - - morsel = Morsel() - morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT' - cookie = morsel_to_cookie(morsel) - assert cookie.expires == 1 - - @pytest.mark.parametrize( - 'value, exception', ( - (100, TypeError), - ('woops', ValueError), - )) - def test_expires_invalid_int(self, value, exception): - """Test case where an invalid type is passed for expires.""" - morsel = Morsel() - morsel['expires'] = value - with pytest.raises(exception): - morsel_to_cookie(morsel) - - def test_expires_none(self): - """Test case where expires is None.""" - - morsel = Morsel() - morsel['expires'] = None - cookie = morsel_to_cookie(morsel) - assert cookie.expires is None - - -class TestMorselToCookieMaxAge: - - """Tests for morsel_to_cookie when morsel contains max-age.""" - - def test_max_age_valid_int(self): - """Test case where a valid max age in seconds is passed.""" - - morsel = Morsel() - morsel['max-age'] = 60 - cookie = morsel_to_cookie(morsel) - assert isinstance(cookie.expires, int) - - def test_max_age_invalid_str(self): - """Test case where a invalid max age is passed.""" - - morsel = Morsel() - morsel['max-age'] = 'woops' - with pytest.raises(TypeError): - morsel_to_cookie(morsel) - - -class TestTimeout: - - def test_stream_timeout(self, httpbin): - try: - requests.get(httpbin('delay/10'), timeout=2.0) - except requests.exceptions.Timeout as e: - assert 'Read timed out' in e.args[0].args[0] - - @pytest.mark.parametrize( - 'timeout, error_text', ( - ((3, 4, 5), '(connect, read)'), - ('foo', 'must be an int, float or None'), - )) - def test_invalid_timeout(self, httpbin, timeout, error_text): - with pytest.raises(ValueError) as e: - requests.get(httpbin('get'), timeout=timeout) - assert error_text in str(e) - - @pytest.mark.parametrize( - 'timeout', ( - None, - Urllib3Timeout(connect=None, read=None) - )) - def test_none_timeout(self, httpbin, timeout): - """Check that you can set None as a valid timeout value. - - To actually test this behavior, we'd want to check that setting the - timeout to None actually lets the request block past the system default - timeout. However, this would make the test suite unbearably slow. - Instead we verify that setting the timeout to None does not prevent the - request from succeeding. - """ - r = requests.get(httpbin('get'), timeout=timeout) - assert r.status_code == 200 - - @pytest.mark.parametrize( - 'timeout', ( - (None, 0.1), - Urllib3Timeout(connect=None, read=0.1) - )) - def test_read_timeout(self, httpbin, timeout): - try: - requests.get(httpbin('delay/10'), timeout=timeout) - pytest.fail('The recv() request should time out.') - except ReadTimeout: - pass - - @pytest.mark.parametrize( - 'timeout', ( - (0.1, None), - Urllib3Timeout(connect=0.1, read=None) - )) - def test_connect_timeout(self, timeout): - try: - requests.get(TARPIT, timeout=timeout) - pytest.fail('The connect() request should time out.') - except ConnectTimeout as e: - assert isinstance(e, ConnectionError) - assert isinstance(e, Timeout) - - @pytest.mark.parametrize( - 'timeout', ( - (0.1, 0.1), - Urllib3Timeout(connect=0.1, read=0.1) - )) - def test_total_timeout_connect(self, timeout): - try: - requests.get(TARPIT, timeout=timeout) - pytest.fail('The connect() request should time out.') - except ConnectTimeout: - pass - - def test_encoded_methods(self, httpbin): - """See: https://github.com/psf/requests/issues/2316""" - r = requests.request(b'GET', httpbin('get')) - assert r.ok - - -SendCall = collections.namedtuple('SendCall', ('args', 'kwargs')) - - -class RedirectSession(SessionRedirectMixin): - def __init__(self, order_of_redirects): - self.redirects = order_of_redirects - self.calls = [] - self.max_redirects = 30 - self.cookies = {} - self.trust_env = False - - def send(self, *args, **kwargs): - self.calls.append(SendCall(args, kwargs)) - return self.build_response() - - def build_response(self): - request = self.calls[-1].args[0] - r = requests.Response() - - try: - r.status_code = int(self.redirects.pop(0)) - except IndexError: - r.status_code = 200 - - r.headers = CaseInsensitiveDict({'Location': '/'}) - r.raw = self._build_raw() - r.request = request - return r - - def _build_raw(self): - string = StringIO.StringIO('') - setattr(string, 'release_conn', lambda *args: args) - return string - - -def test_json_encodes_as_bytes(): - # urllib3 expects bodies as bytes-like objects - body = {"key": "value"} - p = PreparedRequest() - p.prepare( - method='GET', - url='https://www.example.com/', - json=body - ) - assert isinstance(p.body, bytes) - - -def test_requests_are_updated_each_time(httpbin): - session = RedirectSession([303, 307]) - prep = requests.Request('POST', httpbin('post')).prepare() - r0 = session.send(prep) - assert r0.request.method == 'POST' - assert session.calls[-1] == SendCall((r0.request,), {}) - redirect_generator = session.resolve_redirects(r0, prep) - default_keyword_args = { - 'stream': False, - 'verify': True, - 'cert': None, - 'timeout': None, - 'allow_redirects': False, - 'proxies': {}, - } - for response in redirect_generator: - assert response.request.method == 'GET' - send_call = SendCall((response.request,), default_keyword_args) - assert session.calls[-1] == send_call - - -@pytest.mark.parametrize("var,url,proxy", [ - ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'), - ('https_proxy', 'https://example.com', 'socks5://proxy.com:9876'), - ('all_proxy', 'http://example.com', 'socks5://proxy.com:9876'), - ('all_proxy', 'https://example.com', 'socks5://proxy.com:9876'), -]) -def test_proxy_env_vars_override_default(var, url, proxy): - session = requests.Session() - prep = PreparedRequest() - prep.prepare(method='GET', url=url) - - kwargs = { - var: proxy - } - scheme = urlparse(url).scheme - with override_environ(**kwargs): - proxies = session.rebuild_proxies(prep, {}) - assert scheme in proxies - assert proxies[scheme] == proxy - - -@pytest.mark.parametrize( - 'data', ( - (('a', 'b'), ('c', 'd')), - (('c', 'd'), ('a', 'b')), - (('a', 'b'), ('c', 'd'), ('e', 'f')), - )) -def test_data_argument_accepts_tuples(data): - """Ensure that the data argument will accept tuples of strings - and properly encode them. - """ - p = PreparedRequest() - p.prepare( - method='GET', - url='http://www.example.com', - data=data, - hooks=default_hooks() - ) - assert p.body == urlencode(data) - - -@pytest.mark.parametrize( - 'kwargs', ( - None, - { - 'method': 'GET', - 'url': 'http://www.example.com', - 'data': 'foo=bar', - 'hooks': default_hooks() - }, - { - 'method': 'GET', - 'url': 'http://www.example.com', - 'data': 'foo=bar', - 'hooks': default_hooks(), - 'cookies': {'foo': 'bar'} - }, - { - 'method': 'GET', - 'url': u('http://www.example.com/üniçø∂é') - }, - )) -def test_prepared_copy(kwargs): - p = PreparedRequest() - if kwargs: - p.prepare(**kwargs) - copy = p.copy() - for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'): - assert getattr(p, attr) == getattr(copy, attr) - - -def test_urllib3_retries(httpbin): - from urllib3.util import Retry - s = requests.Session() - s.mount('http://', HTTPAdapter(max_retries=Retry( - total=2, status_forcelist=[500] - ))) - - with pytest.raises(RetryError): - s.get(httpbin('status/500')) - - -def test_urllib3_pool_connection_closed(httpbin): - s = requests.Session() - s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0)) - - try: - s.get(httpbin('status/200')) - except ConnectionError as e: - assert u"Pool is closed." in str(e) - - -class TestPreparingURLs(object): - @pytest.mark.parametrize( - 'url,expected', - ( - ('http://google.com', 'http://google.com/'), - (u'http://ジェーピーニック.jp', u'http://xn--hckqz9bzb1cyrb.jp/'), - (u'http://xn--n3h.net/', u'http://xn--n3h.net/'), - ( - u'http://ジェーピーニック.jp'.encode('utf-8'), - u'http://xn--hckqz9bzb1cyrb.jp/' - ), - ( - u'http://straße.de/straße', - u'http://xn--strae-oqa.de/stra%C3%9Fe' - ), - ( - u'http://straße.de/straße'.encode('utf-8'), - u'http://xn--strae-oqa.de/stra%C3%9Fe' - ), - ( - u'http://Königsgäßchen.de/straße', - u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe' - ), - ( - u'http://Königsgäßchen.de/straße'.encode('utf-8'), - u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe' - ), - ( - b'http://xn--n3h.net/', - u'http://xn--n3h.net/' - ), - ( - b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/', - u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/' - ), - ( - u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/', - u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/' - ) - ) - ) - def test_preparing_url(self, url, expected): - - def normalize_percent_encode(x): - # Helper function that normalizes equivalent - # percent-encoded bytes before comparisons - for c in re.findall(r'%[a-fA-F0-9]{2}', x): - x = x.replace(c, c.upper()) - return x - - r = requests.Request('GET', url=url) - p = r.prepare() - assert normalize_percent_encode(p.url) == expected - - @pytest.mark.parametrize( - 'url', - ( - b"http://*.google.com", - b"http://*", - u"http://*.google.com", - u"http://*", - u"http://☃.net/" - ) - ) - def test_preparing_bad_url(self, url): - r = requests.Request('GET', url=url) - with pytest.raises(requests.exceptions.InvalidURL): - r.prepare() - - @pytest.mark.parametrize( - 'url, exception', - ( - ('http://localhost:-1', InvalidURL), - ) - ) - def test_redirecting_to_bad_url(self, httpbin, url, exception): - with pytest.raises(exception): - r = requests.get(httpbin('redirect-to'), params={'url': url}) - - @pytest.mark.parametrize( - 'input, expected', - ( - ( - b"http+unix://%2Fvar%2Frun%2Fsocket/path%7E", - u"http+unix://%2Fvar%2Frun%2Fsocket/path~", - ), - ( - u"http+unix://%2Fvar%2Frun%2Fsocket/path%7E", - u"http+unix://%2Fvar%2Frun%2Fsocket/path~", - ), - ( - b"mailto:user@example.org", - u"mailto:user@example.org", - ), - ( - u"mailto:user@example.org", - u"mailto:user@example.org", - ), - ( - b"data:SSDimaUgUHl0aG9uIQ==", - u"data:SSDimaUgUHl0aG9uIQ==", - ) - ) - ) - def test_url_mutation(self, input, expected): - """ - This test validates that we correctly exclude some URLs from - preparation, and that we handle others. Specifically, it tests that - any URL whose scheme doesn't begin with "http" is left alone, and - those whose scheme *does* begin with "http" are mutated. - """ - r = requests.Request('GET', url=input) - p = r.prepare() - assert p.url == expected - - @pytest.mark.parametrize( - 'input, params, expected', - ( - ( - b"http+unix://%2Fvar%2Frun%2Fsocket/path", - {"key": "value"}, - u"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value", - ), - ( - u"http+unix://%2Fvar%2Frun%2Fsocket/path", - {"key": "value"}, - u"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value", - ), - ( - b"mailto:user@example.org", - {"key": "value"}, - u"mailto:user@example.org", - ), - ( - u"mailto:user@example.org", - {"key": "value"}, - u"mailto:user@example.org", - ), - ) - ) - def test_parameters_for_nonstandard_schemes(self, input, params, expected): - """ - Setting parameters for nonstandard schemes is allowed if those schemes - begin with "http", and is forbidden otherwise. - """ - r = requests.Request('GET', url=input, params=params) - p = r.prepare() - assert p.url == expected - - def test_post_json_nan(self, httpbin): - data = {"foo": float("nan")} - with pytest.raises(requests.exceptions.InvalidJSONError): - r = requests.post(httpbin('post'), json=data) \ No newline at end of file diff --git a/vendor/requests/tests/test_structures.py b/vendor/requests/tests/test_structures.py deleted file mode 100644 index e4d2459f..00000000 --- a/vendor/requests/tests/test_structures.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- - -import pytest - -from requests.structures import CaseInsensitiveDict, LookupDict - - -class TestCaseInsensitiveDict: - - @pytest.fixture(autouse=True) - def setup(self): - """CaseInsensitiveDict instance with "Accept" header.""" - self.case_insensitive_dict = CaseInsensitiveDict() - self.case_insensitive_dict['Accept'] = 'application/json' - - def test_list(self): - assert list(self.case_insensitive_dict) == ['Accept'] - - possible_keys = pytest.mark.parametrize('key', ('accept', 'ACCEPT', 'aCcEpT', 'Accept')) - - @possible_keys - def test_getitem(self, key): - assert self.case_insensitive_dict[key] == 'application/json' - - @possible_keys - def test_delitem(self, key): - del self.case_insensitive_dict[key] - assert key not in self.case_insensitive_dict - - def test_lower_items(self): - assert list(self.case_insensitive_dict.lower_items()) == [('accept', 'application/json')] - - def test_repr(self): - assert repr(self.case_insensitive_dict) == "{'Accept': 'application/json'}" - - def test_copy(self): - copy = self.case_insensitive_dict.copy() - assert copy is not self.case_insensitive_dict - assert copy == self.case_insensitive_dict - - @pytest.mark.parametrize( - 'other, result', ( - ({'AccePT': 'application/json'}, True), - ({}, False), - (None, False) - ) - ) - def test_instance_equality(self, other, result): - assert (self.case_insensitive_dict == other) is result - - -class TestLookupDict: - - @pytest.fixture(autouse=True) - def setup(self): - """LookupDict instance with "bad_gateway" attribute.""" - self.lookup_dict = LookupDict('test') - self.lookup_dict.bad_gateway = 502 - - def test_repr(self): - assert repr(self.lookup_dict) == "" - - get_item_parameters = pytest.mark.parametrize( - 'key, value', ( - ('bad_gateway', 502), - ('not_a_key', None) - ) - ) - - @get_item_parameters - def test_getitem(self, key, value): - assert self.lookup_dict[key] == value - - @get_item_parameters - def test_get(self, key, value): - assert self.lookup_dict.get(key) == value diff --git a/vendor/requests/tests/test_testserver.py b/vendor/requests/tests/test_testserver.py deleted file mode 100644 index aac52926..00000000 --- a/vendor/requests/tests/test_testserver.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- - -import threading -import socket -import time - -import pytest -import requests -from tests.testserver.server import Server - - -class TestTestServer: - - def test_basic(self): - """messages are sent and received properly""" - question = b"success?" - answer = b"yeah, success" - - def handler(sock): - text = sock.recv(1000) - assert text == question - sock.sendall(answer) - - with Server(handler) as (host, port): - sock = socket.socket() - sock.connect((host, port)) - sock.sendall(question) - text = sock.recv(1000) - assert text == answer - sock.close() - - def test_server_closes(self): - """the server closes when leaving the context manager""" - with Server.basic_response_server() as (host, port): - sock = socket.socket() - sock.connect((host, port)) - - sock.close() - - with pytest.raises(socket.error): - new_sock = socket.socket() - new_sock.connect((host, port)) - - def test_text_response(self): - """the text_response_server sends the given text""" - server = Server.text_response_server( - "HTTP/1.1 200 OK\r\n" + - "Content-Length: 6\r\n" + - "\r\nroflol" - ) - - with server as (host, port): - r = requests.get('http://{}:{}'.format(host, port)) - - assert r.status_code == 200 - assert r.text == u'roflol' - assert r.headers['Content-Length'] == '6' - - def test_basic_response(self): - """the basic response server returns an empty http response""" - with Server.basic_response_server() as (host, port): - r = requests.get('http://{}:{}'.format(host, port)) - assert r.status_code == 200 - assert r.text == u'' - assert r.headers['Content-Length'] == '0' - - def test_basic_waiting_server(self): - """the server waits for the block_server event to be set before closing""" - block_server = threading.Event() - - with Server.basic_response_server(wait_to_close_event=block_server) as (host, port): - sock = socket.socket() - sock.connect((host, port)) - sock.sendall(b'send something') - time.sleep(2.5) - sock.sendall(b'still alive') - block_server.set() # release server block - - def test_multiple_requests(self): - """multiple requests can be served""" - requests_to_handle = 5 - - server = Server.basic_response_server(requests_to_handle=requests_to_handle) - - with server as (host, port): - server_url = 'http://{}:{}'.format(host, port) - for _ in range(requests_to_handle): - r = requests.get(server_url) - assert r.status_code == 200 - - # the (n+1)th request fails - with pytest.raises(requests.exceptions.ConnectionError): - r = requests.get(server_url) - - @pytest.mark.skip(reason="this fails non-deterministically under pytest-xdist") - def test_request_recovery(self): - """can check the requests content""" - # TODO: figure out why this sometimes fails when using pytest-xdist. - server = Server.basic_response_server(requests_to_handle=2) - first_request = b'put your hands up in the air' - second_request = b'put your hand down in the floor' - - with server as address: - sock1 = socket.socket() - sock2 = socket.socket() - - sock1.connect(address) - sock1.sendall(first_request) - sock1.close() - - sock2.connect(address) - sock2.sendall(second_request) - sock2.close() - - assert server.handler_results[0] == first_request - assert server.handler_results[1] == second_request - - def test_requests_after_timeout_are_not_received(self): - """the basic response handler times out when receiving requests""" - server = Server.basic_response_server(request_timeout=1) - - with server as address: - sock = socket.socket() - sock.connect(address) - time.sleep(1.5) - sock.sendall(b'hehehe, not received') - sock.close() - - assert server.handler_results[0] == b'' - - def test_request_recovery_with_bigger_timeout(self): - """a biggest timeout can be specified""" - server = Server.basic_response_server(request_timeout=3) - data = b'bananadine' - - with server as address: - sock = socket.socket() - sock.connect(address) - time.sleep(1.5) - sock.sendall(data) - sock.close() - - assert server.handler_results[0] == data - - def test_server_finishes_on_error(self): - """the server thread exits even if an exception exits the context manager""" - server = Server.basic_response_server() - with pytest.raises(Exception): - with server: - raise Exception() - - assert len(server.handler_results) == 0 - - # if the server thread fails to finish, the test suite will hang - # and get killed by the jenkins timeout. - - def test_server_finishes_when_no_connections(self): - """the server thread exits even if there are no connections""" - server = Server.basic_response_server() - with server: - pass - - assert len(server.handler_results) == 0 - - # if the server thread fails to finish, the test suite will hang - # and get killed by the jenkins timeout. diff --git a/vendor/requests/tests/test_utils.py b/vendor/requests/tests/test_utils.py deleted file mode 100644 index 463516b2..00000000 --- a/vendor/requests/tests/test_utils.py +++ /dev/null @@ -1,783 +0,0 @@ -# -*- coding: utf-8 -*- - -import os -import copy -import filecmp -from io import BytesIO -import zipfile -from collections import deque - -import pytest -from requests import compat -from requests.cookies import RequestsCookieJar -from requests.structures import CaseInsensitiveDict -from requests.utils import ( - address_in_network, dotted_netmask, extract_zipped_paths, - get_auth_from_url, _parse_content_type_header, get_encoding_from_headers, - get_encodings_from_content, get_environ_proxies, - guess_filename, guess_json_utf, is_ipv4_address, - is_valid_cidr, iter_slices, parse_dict_header, - parse_header_links, prepend_scheme_if_needed, - requote_uri, select_proxy, should_bypass_proxies, super_len, - to_key_val_list, to_native_string, - unquote_header_value, unquote_unreserved, - urldefragauth, add_dict_to_cookiejar, set_environ) -from requests._internal_utils import unicode_is_ascii - -from .compat import StringIO, cStringIO - - -class TestSuperLen: - - @pytest.mark.parametrize( - 'stream, value', ( - (StringIO.StringIO, 'Test'), - (BytesIO, b'Test'), - pytest.param(cStringIO, 'Test', - marks=pytest.mark.skipif('cStringIO is None')), - )) - def test_io_streams(self, stream, value): - """Ensures that we properly deal with different kinds of IO streams.""" - assert super_len(stream()) == 0 - assert super_len(stream(value)) == 4 - - def test_super_len_correctly_calculates_len_of_partially_read_file(self): - """Ensure that we handle partially consumed file like objects.""" - s = StringIO.StringIO() - s.write('foobarbogus') - assert super_len(s) == 0 - - @pytest.mark.parametrize('error', [IOError, OSError]) - def test_super_len_handles_files_raising_weird_errors_in_tell(self, error): - """If tell() raises errors, assume the cursor is at position zero.""" - class BoomFile(object): - def __len__(self): - return 5 - - def tell(self): - raise error() - - assert super_len(BoomFile()) == 0 - - @pytest.mark.parametrize('error', [IOError, OSError]) - def test_super_len_tell_ioerror(self, error): - """Ensure that if tell gives an IOError super_len doesn't fail""" - class NoLenBoomFile(object): - def tell(self): - raise error() - - def seek(self, offset, whence): - pass - - assert super_len(NoLenBoomFile()) == 0 - - def test_string(self): - assert super_len('Test') == 4 - - @pytest.mark.parametrize( - 'mode, warnings_num', ( - ('r', 1), - ('rb', 0), - )) - def test_file(self, tmpdir, mode, warnings_num, recwarn): - file_obj = tmpdir.join('test.txt') - file_obj.write('Test') - with file_obj.open(mode) as fd: - assert super_len(fd) == 4 - assert len(recwarn) == warnings_num - - def test_super_len_with__len__(self): - foo = [1,2,3,4] - len_foo = super_len(foo) - assert len_foo == 4 - - def test_super_len_with_no__len__(self): - class LenFile(object): - def __init__(self): - self.len = 5 - - assert super_len(LenFile()) == 5 - - def test_super_len_with_tell(self): - foo = StringIO.StringIO('12345') - assert super_len(foo) == 5 - foo.read(2) - assert super_len(foo) == 3 - - def test_super_len_with_fileno(self): - with open(__file__, 'rb') as f: - length = super_len(f) - file_data = f.read() - assert length == len(file_data) - - def test_super_len_with_no_matches(self): - """Ensure that objects without any length methods default to 0""" - assert super_len(object()) == 0 - - -class TestToKeyValList: - - @pytest.mark.parametrize( - 'value, expected', ( - ([('key', 'val')], [('key', 'val')]), - ((('key', 'val'), ), [('key', 'val')]), - ({'key': 'val'}, [('key', 'val')]), - (None, None) - )) - def test_valid(self, value, expected): - assert to_key_val_list(value) == expected - - def test_invalid(self): - with pytest.raises(ValueError): - to_key_val_list('string') - - -class TestUnquoteHeaderValue: - - @pytest.mark.parametrize( - 'value, expected', ( - (None, None), - ('Test', 'Test'), - ('"Test"', 'Test'), - ('"Test\\\\"', 'Test\\'), - ('"\\\\Comp\\Res"', '\\Comp\\Res'), - )) - def test_valid(self, value, expected): - assert unquote_header_value(value) == expected - - def test_is_filename(self): - assert unquote_header_value('"\\\\Comp\\Res"', True) == '\\\\Comp\\Res' - - -class TestGetEnvironProxies: - """Ensures that IP addresses are correctly matches with ranges - in no_proxy variable. - """ - - @pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY']) - def no_proxy(self, request, monkeypatch): - monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1') - - @pytest.mark.parametrize( - 'url', ( - 'http://192.168.0.1:5000/', - 'http://192.168.0.1/', - 'http://172.16.1.1/', - 'http://172.16.1.1:5000/', - 'http://localhost.localdomain:5000/v1.0/', - )) - def test_bypass(self, url): - assert get_environ_proxies(url, no_proxy=None) == {} - - @pytest.mark.parametrize( - 'url', ( - 'http://192.168.1.1:5000/', - 'http://192.168.1.1/', - 'http://www.requests.com/', - )) - def test_not_bypass(self, url): - assert get_environ_proxies(url, no_proxy=None) != {} - - @pytest.mark.parametrize( - 'url', ( - 'http://192.168.1.1:5000/', - 'http://192.168.1.1/', - 'http://www.requests.com/', - )) - def test_bypass_no_proxy_keyword(self, url): - no_proxy = '192.168.1.1,requests.com' - assert get_environ_proxies(url, no_proxy=no_proxy) == {} - - @pytest.mark.parametrize( - 'url', ( - 'http://192.168.0.1:5000/', - 'http://192.168.0.1/', - 'http://172.16.1.1/', - 'http://172.16.1.1:5000/', - 'http://localhost.localdomain:5000/v1.0/', - )) - def test_not_bypass_no_proxy_keyword(self, url, monkeypatch): - # This is testing that the 'no_proxy' argument overrides the - # environment variable 'no_proxy' - monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/') - no_proxy = '192.168.1.1,requests.com' - assert get_environ_proxies(url, no_proxy=no_proxy) != {} - - -class TestIsIPv4Address: - - def test_valid(self): - assert is_ipv4_address('8.8.8.8') - - @pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain')) - def test_invalid(self, value): - assert not is_ipv4_address(value) - - -class TestIsValidCIDR: - - def test_valid(self): - assert is_valid_cidr('192.168.1.0/24') - - @pytest.mark.parametrize( - 'value', ( - '8.8.8.8', - '192.168.1.0/a', - '192.168.1.0/128', - '192.168.1.0/-1', - '192.168.1.999/24', - )) - def test_invalid(self, value): - assert not is_valid_cidr(value) - - -class TestAddressInNetwork: - - def test_valid(self): - assert address_in_network('192.168.1.1', '192.168.1.0/24') - - def test_invalid(self): - assert not address_in_network('172.16.0.1', '192.168.1.0/24') - - -class TestGuessFilename: - - @pytest.mark.parametrize( - 'value', (1, type('Fake', (object,), {'name': 1})()), - ) - def test_guess_filename_invalid(self, value): - assert guess_filename(value) is None - - @pytest.mark.parametrize( - 'value, expected_type', ( - (b'value', compat.bytes), - (b'value'.decode('utf-8'), compat.str) - )) - def test_guess_filename_valid(self, value, expected_type): - obj = type('Fake', (object,), {'name': value})() - result = guess_filename(obj) - assert result == value - assert isinstance(result, expected_type) - - -class TestExtractZippedPaths: - - @pytest.mark.parametrize( - 'path', ( - '/', - __file__, - pytest.__file__, - '/etc/invalid/location', - )) - def test_unzipped_paths_unchanged(self, path): - assert path == extract_zipped_paths(path) - - def test_zipped_paths_extracted(self, tmpdir): - zipped_py = tmpdir.join('test.zip') - with zipfile.ZipFile(zipped_py.strpath, 'w') as f: - f.write(__file__) - - _, name = os.path.splitdrive(__file__) - zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\/')) - extracted_path = extract_zipped_paths(zipped_path) - - assert extracted_path != zipped_path - assert os.path.exists(extracted_path) - assert filecmp.cmp(extracted_path, __file__) - - -class TestContentEncodingDetection: - - def test_none(self): - encodings = get_encodings_from_content('') - assert not len(encodings) - - @pytest.mark.parametrize( - 'content', ( - # HTML5 meta charset attribute - '', - # HTML4 pragma directive - '', - # XHTML 1.x served with text/html MIME type - '', - # XHTML 1.x served as XML - '', - )) - def test_pragmas(self, content): - encodings = get_encodings_from_content(content) - assert len(encodings) == 1 - assert encodings[0] == 'UTF-8' - - def test_precedence(self): - content = ''' - - - - '''.strip() - assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML'] - - -class TestGuessJSONUTF: - - @pytest.mark.parametrize( - 'encoding', ( - 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le', - 'utf-32-be', 'utf-32-le' - )) - def test_encoded(self, encoding): - data = '{}'.encode(encoding) - assert guess_json_utf(data) == encoding - - def test_bad_utf_like_encoding(self): - assert guess_json_utf(b'\x00\x00\x00\x00') is None - - @pytest.mark.parametrize( - ('encoding', 'expected'), ( - ('utf-16-be', 'utf-16'), - ('utf-16-le', 'utf-16'), - ('utf-32-be', 'utf-32'), - ('utf-32-le', 'utf-32') - )) - def test_guess_by_bom(self, encoding, expected): - data = u'\ufeff{}'.encode(encoding) - assert guess_json_utf(data) == expected - - -USER = PASSWORD = "%!*'();:@&=+$,/?#[] " -ENCODED_USER = compat.quote(USER, '') -ENCODED_PASSWORD = compat.quote(PASSWORD, '') - - -@pytest.mark.parametrize( - 'url, auth', ( - ( - 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' + - 'request.com/url.html#test', - (USER, PASSWORD) - ), - ( - 'http://user:pass@complex.url.com/path?query=yes', - ('user', 'pass') - ), - ( - 'http://user:pass%20pass@complex.url.com/path?query=yes', - ('user', 'pass pass') - ), - ( - 'http://user:pass pass@complex.url.com/path?query=yes', - ('user', 'pass pass') - ), - ( - 'http://user%25user:pass@complex.url.com/path?query=yes', - ('user%user', 'pass') - ), - ( - 'http://user:pass%23pass@complex.url.com/path?query=yes', - ('user', 'pass#pass') - ), - ( - 'http://complex.url.com/path?query=yes', - ('', '') - ), - )) -def test_get_auth_from_url(url, auth): - assert get_auth_from_url(url) == auth - - -@pytest.mark.parametrize( - 'uri, expected', ( - ( - # Ensure requoting doesn't break expectations - 'http://example.com/fiz?buz=%25ppicture', - 'http://example.com/fiz?buz=%25ppicture', - ), - ( - # Ensure we handle unquoted percent signs in redirects - 'http://example.com/fiz?buz=%ppicture', - 'http://example.com/fiz?buz=%25ppicture', - ), - )) -def test_requote_uri_with_unquoted_percents(uri, expected): - """See: https://github.com/psf/requests/issues/2356""" - assert requote_uri(uri) == expected - - -@pytest.mark.parametrize( - 'uri, expected', ( - ( - # Illegal bytes - 'http://example.com/?a=%--', - 'http://example.com/?a=%--', - ), - ( - # Reserved characters - 'http://example.com/?a=%300', - 'http://example.com/?a=00', - ) - )) -def test_unquote_unreserved(uri, expected): - assert unquote_unreserved(uri) == expected - - -@pytest.mark.parametrize( - 'mask, expected', ( - (8, '255.0.0.0'), - (24, '255.255.255.0'), - (25, '255.255.255.128'), - )) -def test_dotted_netmask(mask, expected): - assert dotted_netmask(mask) == expected - - -http_proxies = {'http': 'http://http.proxy', - 'http://some.host': 'http://some.host.proxy'} -all_proxies = {'all': 'socks5://http.proxy', - 'all://some.host': 'socks5://some.host.proxy'} -mixed_proxies = {'http': 'http://http.proxy', - 'http://some.host': 'http://some.host.proxy', - 'all': 'socks5://http.proxy'} -@pytest.mark.parametrize( - 'url, expected, proxies', ( - ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies), - ('hTTp://u:p@Other.Host/path', 'http://http.proxy', http_proxies), - ('hTTp:///path', 'http://http.proxy', http_proxies), - ('hTTps://Other.Host', None, http_proxies), - ('file:///etc/motd', None, http_proxies), - - ('hTTp://u:p@Some.Host/path', 'socks5://some.host.proxy', all_proxies), - ('hTTp://u:p@Other.Host/path', 'socks5://http.proxy', all_proxies), - ('hTTp:///path', 'socks5://http.proxy', all_proxies), - ('hTTps://Other.Host', 'socks5://http.proxy', all_proxies), - - ('http://u:p@other.host/path', 'http://http.proxy', mixed_proxies), - ('http://u:p@some.host/path', 'http://some.host.proxy', mixed_proxies), - ('https://u:p@other.host/path', 'socks5://http.proxy', mixed_proxies), - ('https://u:p@some.host/path', 'socks5://http.proxy', mixed_proxies), - ('https://', 'socks5://http.proxy', mixed_proxies), - # XXX: unsure whether this is reasonable behavior - ('file:///etc/motd', 'socks5://http.proxy', all_proxies), - )) -def test_select_proxies(url, expected, proxies): - """Make sure we can select per-host proxies correctly.""" - assert select_proxy(url, proxies) == expected - - -@pytest.mark.parametrize( - 'value, expected', ( - ('foo="is a fish", bar="as well"', {'foo': 'is a fish', 'bar': 'as well'}), - ('key_without_value', {'key_without_value': None}) - )) -def test_parse_dict_header(value, expected): - assert parse_dict_header(value) == expected - - -@pytest.mark.parametrize( - 'value, expected', ( - ( - 'application/xml', - ('application/xml', {}) - ), - ( - 'application/json ; charset=utf-8', - ('application/json', {'charset': 'utf-8'}) - ), - ( - 'application/json ; Charset=utf-8', - ('application/json', {'charset': 'utf-8'}) - ), - ( - 'text/plain', - ('text/plain', {}) - ), - ( - 'multipart/form-data; boundary = something ; boundary2=\'something_else\' ; no_equals ', - ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True}) - ), - ( - 'multipart/form-data; boundary = something ; boundary2="something_else" ; no_equals ', - ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True}) - ), - ( - 'multipart/form-data; boundary = something ; \'boundary2=something_else\' ; no_equals ', - ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True}) - ), - ( - 'multipart/form-data; boundary = something ; "boundary2=something_else" ; no_equals ', - ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True}) - ), - ( - 'application/json ; ; ', - ('application/json', {}) - ) - )) -def test__parse_content_type_header(value, expected): - assert _parse_content_type_header(value) == expected - - -@pytest.mark.parametrize( - 'value, expected', ( - ( - CaseInsensitiveDict(), - None - ), - ( - CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8'}), - 'utf-8' - ), - ( - CaseInsensitiveDict({'content-type': 'text/plain'}), - 'ISO-8859-1' - ), - )) -def test_get_encoding_from_headers(value, expected): - assert get_encoding_from_headers(value) == expected - - -@pytest.mark.parametrize( - 'value, length', ( - ('', 0), - ('T', 1), - ('Test', 4), - ('Cont', 0), - ('Other', -5), - ('Content', None), - )) -def test_iter_slices(value, length): - if length is None or (length <= 0 and len(value) > 0): - # Reads all content at once - assert len(list(iter_slices(value, length))) == 1 - else: - assert len(list(iter_slices(value, 1))) == length - - -@pytest.mark.parametrize( - 'value, expected', ( - ( - '; rel=front; type="image/jpeg"', - [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}] - ), - ( - '', - [{'url': 'http:/.../front.jpeg'}] - ), - ( - ';', - [{'url': 'http:/.../front.jpeg'}] - ), - ( - '; type="image/jpeg",;', - [ - {'url': 'http:/.../front.jpeg', 'type': 'image/jpeg'}, - {'url': 'http://.../back.jpeg'} - ] - ), - ( - '', - [] - ), - )) -def test_parse_header_links(value, expected): - assert parse_header_links(value) == expected - - -@pytest.mark.parametrize( - 'value, expected', ( - ('example.com/path', 'http://example.com/path'), - ('//example.com/path', 'http://example.com/path'), - )) -def test_prepend_scheme_if_needed(value, expected): - assert prepend_scheme_if_needed(value, 'http') == expected - - -@pytest.mark.parametrize( - 'value, expected', ( - ('T', 'T'), - (b'T', 'T'), - (u'T', 'T'), - )) -def test_to_native_string(value, expected): - assert to_native_string(value) == expected - - -@pytest.mark.parametrize( - 'url, expected', ( - ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'), - ('http://example.com/path', 'http://example.com/path'), - ('//u:p@example.com/path', '//example.com/path'), - ('//example.com/path', '//example.com/path'), - ('example.com/path', '//example.com/path'), - ('scheme:u:p@example.com/path', 'scheme://example.com/path'), - )) -def test_urldefragauth(url, expected): - assert urldefragauth(url) == expected - - -@pytest.mark.parametrize( - 'url, expected', ( - ('http://192.168.0.1:5000/', True), - ('http://192.168.0.1/', True), - ('http://172.16.1.1/', True), - ('http://172.16.1.1:5000/', True), - ('http://localhost.localdomain:5000/v1.0/', True), - ('http://google.com:6000/', True), - ('http://172.16.1.12/', False), - ('http://172.16.1.12:5000/', False), - ('http://google.com:5000/v1.0/', False), - ('file:///some/path/on/disk', True), - )) -def test_should_bypass_proxies(url, expected, monkeypatch): - """Tests for function should_bypass_proxies to check if proxy - can be bypassed or not - """ - monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000') - monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000') - assert should_bypass_proxies(url, no_proxy=None) == expected - - -@pytest.mark.parametrize( - 'url, expected', ( - ('http://172.16.1.1/', '172.16.1.1'), - ('http://172.16.1.1:5000/', '172.16.1.1'), - ('http://user:pass@172.16.1.1', '172.16.1.1'), - ('http://user:pass@172.16.1.1:5000', '172.16.1.1'), - ('http://hostname/', 'hostname'), - ('http://hostname:5000/', 'hostname'), - ('http://user:pass@hostname', 'hostname'), - ('http://user:pass@hostname:5000', 'hostname'), - )) -def test_should_bypass_proxies_pass_only_hostname(url, expected, mocker): - """The proxy_bypass function should be called with a hostname or IP without - a port number or auth credentials. - """ - proxy_bypass = mocker.patch('requests.utils.proxy_bypass') - should_bypass_proxies(url, no_proxy=None) - proxy_bypass.assert_called_once_with(expected) - - -@pytest.mark.parametrize( - 'cookiejar', ( - compat.cookielib.CookieJar(), - RequestsCookieJar() - )) -def test_add_dict_to_cookiejar(cookiejar): - """Ensure add_dict_to_cookiejar works for - non-RequestsCookieJar CookieJars - """ - cookiedict = {'test': 'cookies', - 'good': 'cookies'} - cj = add_dict_to_cookiejar(cookiejar, cookiedict) - cookies = {cookie.name: cookie.value for cookie in cj} - assert cookiedict == cookies - - -@pytest.mark.parametrize( - 'value, expected', ( - (u'test', True), - (u'æíöû', False), - (u'ジェーピーニック', False), - ) -) -def test_unicode_is_ascii(value, expected): - assert unicode_is_ascii(value) is expected - - -@pytest.mark.parametrize( - 'url, expected', ( - ('http://192.168.0.1:5000/', True), - ('http://192.168.0.1/', True), - ('http://172.16.1.1/', True), - ('http://172.16.1.1:5000/', True), - ('http://localhost.localdomain:5000/v1.0/', True), - ('http://172.16.1.12/', False), - ('http://172.16.1.12:5000/', False), - ('http://google.com:5000/v1.0/', False), - )) -def test_should_bypass_proxies_no_proxy( - url, expected, monkeypatch): - """Tests for function should_bypass_proxies to check if proxy - can be bypassed or not using the 'no_proxy' argument - """ - no_proxy = '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1' - # Test 'no_proxy' argument - assert should_bypass_proxies(url, no_proxy=no_proxy) == expected - - -@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows') -@pytest.mark.parametrize( - 'url, expected, override', ( - ('http://192.168.0.1:5000/', True, None), - ('http://192.168.0.1/', True, None), - ('http://172.16.1.1/', True, None), - ('http://172.16.1.1:5000/', True, None), - ('http://localhost.localdomain:5000/v1.0/', True, None), - ('http://172.16.1.22/', False, None), - ('http://172.16.1.22:5000/', False, None), - ('http://google.com:5000/v1.0/', False, None), - ('http://mylocalhostname:5000/v1.0/', True, ''), - ('http://192.168.0.1/', False, ''), - )) -def test_should_bypass_proxies_win_registry(url, expected, override, - monkeypatch): - """Tests for function should_bypass_proxies to check if proxy - can be bypassed or not with Windows registry settings - """ - if override is None: - override = '192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1' - if compat.is_py3: - import winreg - else: - import _winreg as winreg - - class RegHandle: - def Close(self): - pass - - ie_settings = RegHandle() - proxyEnableValues = deque([1, "1"]) - - def OpenKey(key, subkey): - return ie_settings - - def QueryValueEx(key, value_name): - if key is ie_settings: - if value_name == 'ProxyEnable': - # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD) - proxyEnableValues.rotate() - return [proxyEnableValues[0]] - elif value_name == 'ProxyOverride': - return [override] - - monkeypatch.setenv('http_proxy', '') - monkeypatch.setenv('https_proxy', '') - monkeypatch.setenv('ftp_proxy', '') - monkeypatch.setenv('no_proxy', '') - monkeypatch.setenv('NO_PROXY', '') - monkeypatch.setattr(winreg, 'OpenKey', OpenKey) - monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx) - assert should_bypass_proxies(url, None) == expected - - -@pytest.mark.parametrize( - 'env_name, value', ( - ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'), - ('no_proxy', None), - ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'), - ('a_new_key', None), - )) -def test_set_environ(env_name, value): - """Tests set_environ will set environ values and will restore the environ.""" - environ_copy = copy.deepcopy(os.environ) - with set_environ(env_name, value): - assert os.environ.get(env_name) == value - - assert os.environ == environ_copy - - -def test_set_environ_raises_exception(): - """Tests set_environ will raise exceptions in context when the - value parameter is None.""" - with pytest.raises(Exception) as exception: - with set_environ('test1', None): - raise Exception('Expected exception') - - assert 'Expected exception' in str(exception.value) diff --git a/vendor/requests/tests/testserver/server.py b/vendor/requests/tests/testserver/server.py deleted file mode 100644 index 132221f7..00000000 --- a/vendor/requests/tests/testserver/server.py +++ /dev/null @@ -1,128 +0,0 @@ -# -*- coding: utf-8 -*- - -import threading -import socket -import select - - -def consume_socket_content(sock, timeout=0.5): - chunks = 65536 - content = b'' - - while True: - more_to_read = select.select([sock], [], [], timeout)[0] - if not more_to_read: - break - - new_content = sock.recv(chunks) - if not new_content: - break - - content += new_content - - return content - - -class Server(threading.Thread): - """Dummy server using for unit testing""" - WAIT_EVENT_TIMEOUT = 5 - - def __init__(self, handler=None, host='localhost', port=0, requests_to_handle=1, wait_to_close_event=None): - super(Server, self).__init__() - - self.handler = handler or consume_socket_content - self.handler_results = [] - - self.host = host - self.port = port - self.requests_to_handle = requests_to_handle - - self.wait_to_close_event = wait_to_close_event - self.ready_event = threading.Event() - self.stop_event = threading.Event() - - @classmethod - def text_response_server(cls, text, request_timeout=0.5, **kwargs): - def text_response_handler(sock): - request_content = consume_socket_content(sock, timeout=request_timeout) - sock.send(text.encode('utf-8')) - - return request_content - - - return Server(text_response_handler, **kwargs) - - @classmethod - def basic_response_server(cls, **kwargs): - return cls.text_response_server( - "HTTP/1.1 200 OK\r\n" + - "Content-Length: 0\r\n\r\n", - **kwargs - ) - - def run(self): - try: - self.server_sock = self._create_socket_and_bind() - # in case self.port = 0 - self.port = self.server_sock.getsockname()[1] - self.ready_event.set() - self._handle_requests() - - if self.wait_to_close_event: - self.wait_to_close_event.wait(self.WAIT_EVENT_TIMEOUT) - finally: - self.ready_event.set() # just in case of exception - self._close_server_sock_ignore_errors() - self.stop_event.set() - - def _create_socket_and_bind(self): - sock = socket.socket() - sock.bind((self.host, self.port)) - sock.listen(0) - return sock - - def _close_server_sock_ignore_errors(self): - try: - self.server_sock.close() - except IOError: - pass - - def _handle_requests(self): - for _ in range(self.requests_to_handle): - sock = self._accept_connection() - if not sock: - break - - handler_result = self.handler(sock) - - self.handler_results.append(handler_result) - sock.close() - - def _accept_connection(self): - try: - ready, _, _ = select.select([self.server_sock], [], [], self.WAIT_EVENT_TIMEOUT) - if not ready: - return None - - return self.server_sock.accept()[0] - except (select.error, socket.error): - return None - - def __enter__(self): - self.start() - self.ready_event.wait(self.WAIT_EVENT_TIMEOUT) - return self.host, self.port - - def __exit__(self, exc_type, exc_value, traceback): - if exc_type is None: - self.stop_event.wait(self.WAIT_EVENT_TIMEOUT) - else: - if self.wait_to_close_event: - # avoid server from waiting for event timeouts - # if an exception is found in the main thread - self.wait_to_close_event.set() - - # ensure server thread doesn't get stuck waiting for connections - self._close_server_sock_ignore_errors() - self.join() - return False # allow exceptions to propagate diff --git a/vendor/requests/tests/utils.py b/vendor/requests/tests/utils.py deleted file mode 100644 index 9b797fd4..00000000 --- a/vendor/requests/tests/utils.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- - -import contextlib -import os - - -@contextlib.contextmanager -def override_environ(**kwargs): - save_env = dict(os.environ) - for key, value in kwargs.items(): - if value is None: - del os.environ[key] - else: - os.environ[key] = value - try: - yield - finally: - os.environ.clear() - os.environ.update(save_env) diff --git a/vendor/requests/tox.ini b/vendor/requests/tox.ini deleted file mode 100644 index 5e3d5377..00000000 --- a/vendor/requests/tox.ini +++ /dev/null @@ -1,18 +0,0 @@ -[tox] -envlist = py{27,36,37,38,39}-{default,use_chardet_on_py3} - -[testenv] -deps = -rrequirements-dev.txt -extras = - security - socks -commands = - pytest tests - -[testenv:default] - -[testenv:use_chardet_on_py3] -extras = - security - socks - use_chardet_on_py3 diff --git a/vendor/virtualenv/.coveragerc b/vendor/virtualenv/.coveragerc index 2d64afd7..a1dd90c5 100644 --- a/vendor/virtualenv/.coveragerc +++ b/vendor/virtualenv/.coveragerc @@ -18,7 +18,8 @@ source = src .tox/*/lib/python*/site-packages .tox/pypy*/site-packages - .tox\*\Lib\site-packages\ + .tox\*\Lib\site-packages + .tox\py\site-packages */src *\src diff --git a/vendor/virtualenv/.github/CONTRIBUTING.md b/vendor/virtualenv/.github/CONTRIBUTING.md index 6c292822..22b63533 100644 --- a/vendor/virtualenv/.github/CONTRIBUTING.md +++ b/vendor/virtualenv/.github/CONTRIBUTING.md @@ -1,11 +1,11 @@ -# Contributing to ``virtualenv`` +# Contributing to `virtualenv` -Thank you for your interest in contributing to virtualenv! There are many ways to contribute, and we appreciate all of them. -As a reminder, all contributors are expected to follow the [PSF Code of Conduct][coc]. +Thank you for your interest in contributing to virtualenv! There are many ways to contribute, and we appreciate all of +them. As a reminder, all contributors are expected to follow the [PSF Code of Conduct][coc]. [coc]: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md ## Development Documentation Our [development documentation](https://virtualenv.pypa.io/en/latest/development.html#development) contains details on -how to get started with contributing to ``virtualenv``, and details of our development processes. +how to get started with contributing to `virtualenv`, and details of our development processes. diff --git a/vendor/virtualenv/.github/FUNDING.yml b/vendor/virtualenv/.github/FUNDING.yml new file mode 100644 index 00000000..91b483e4 --- /dev/null +++ b/vendor/virtualenv/.github/FUNDING.yml @@ -0,0 +1 @@ +tidelift: "pypi/virtualenv" diff --git a/vendor/virtualenv/.github/ISSUE_TEMPLATE/bug-report.md b/vendor/virtualenv/.github/ISSUE_TEMPLATE/bug-report.md index 8b576e1e..674d2484 100644 --- a/vendor/virtualenv/.github/ISSUE_TEMPLATE/bug-report.md +++ b/vendor/virtualenv/.github/ISSUE_TEMPLATE/bug-report.md @@ -5,8 +5,9 @@ Describe what's the expected behaviour and what you're observing. **Environment** Provide at least: + - OS: -- ``pip list`` of the host python where ``virtualenv`` is installed: +- `pip list` of the host python where `virtualenv` is installed: ```console diff --git a/vendor/virtualenv/.github/ISSUE_TEMPLATE/bug_report.md b/vendor/virtualenv/.github/ISSUE_TEMPLATE/bug_report.md index 3a8b35e8..10d03bf5 100644 --- a/vendor/virtualenv/.github/ISSUE_TEMPLATE/bug_report.md +++ b/vendor/virtualenv/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,10 +1,9 @@ --- name: Bug report about: Create a report to help us improve -title: '' +title: "" labels: bug -assignees: '' - +assignees: "" --- **Issue** @@ -14,8 +13,9 @@ Describe what's the expected behaviour and what you're observing. **Environment** Provide at least: + - OS: -- ``pip list`` of the host python where ``virtualenv`` is installed: +- `pip list` of the host python where `virtualenv` is installed: ```console diff --git a/vendor/virtualenv/.github/ISSUE_TEMPLATE/config.yml b/vendor/virtualenv/.github/ISSUE_TEMPLATE/config.yml index 8143f9d5..1cc9cf63 100644 --- a/vendor/virtualenv/.github/ISSUE_TEMPLATE/config.yml +++ b/vendor/virtualenv/.github/ISSUE_TEMPLATE/config.yml @@ -1,13 +1,13 @@ # Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser -blank_issues_enabled: true # default +blank_issues_enabled: true # default contact_links: -- name: '💬 pypa/virtualenv @ Discord' - url: https://discord.gg/pypa - about: Chat with the devs -- name: 🤷💻🤦 Discourse - url: https://discuss.python.org/c/packaging - about: | - Please ask typical Q&A here: general ideas for Python packaging, questions about structuring projects and so on -- name: 📠PSF Code of Conduct - url: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - about: ⤠Be nice to other members of the community. ☮ Behave. + - name: "💬 pypa/virtualenv @ Discord" + url: https://discord.gg/pypa + about: Chat with the devs + - name: 🤷💻🤦 Discourse + url: https://discuss.python.org/c/packaging + about: | + Please ask typical Q&A here: general ideas for Python packaging, questions about structuring projects and so on + - name: 📠PSF Code of Conduct + url: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + about: ⤠Be nice to other members of the community. ☮ Behave. diff --git a/vendor/virtualenv/.github/ISSUE_TEMPLATE/feature-request.md b/vendor/virtualenv/.github/ISSUE_TEMPLATE/feature-request.md index ef66da0c..c5c3864f 100644 --- a/vendor/virtualenv/.github/ISSUE_TEMPLATE/feature-request.md +++ b/vendor/virtualenv/.github/ISSUE_TEMPLATE/feature-request.md @@ -1,22 +1,25 @@ --- name: Feature request about: Suggest an enhancement for this project -title: '' +title: "" labels: enhancement -assignees: '' - +assignees: "" --- **What's the problem this feature will solve?** + **Describe the solution you'd like** + **Alternative Solutions** + **Additional context** + diff --git a/vendor/virtualenv/.github/PULL_REQUEST_TEMPLATE.md b/vendor/virtualenv/.github/PULL_REQUEST_TEMPLATE.md index d4a1cd8c..4a6aabdd 100644 --- a/vendor/virtualenv/.github/PULL_REQUEST_TEMPLATE.md +++ b/vendor/virtualenv/.github/PULL_REQUEST_TEMPLATE.md @@ -1,8 +1,7 @@ -### Thanks for contributing, make sure you address all the checklists (for details on how see -[development documentation](https://virtualenv.pypa.io/en/latest/development.html#development))! +### Thanks for contributing, make sure you address all the checklists (for details on how see [development documentation](https://virtualenv.pypa.io/en/latest/development.html#development))! -- [ ] ran the linter to address style issues (``tox -e fix_lint``) +- [ ] ran the linter to address style issues (`tox -e fix_lint`) - [ ] wrote descriptive pull request text - [ ] ensured there are test(s) validating the fix -- [ ] added news fragment in ``docs/changelog`` folder +- [ ] added news fragment in `docs/changelog` folder - [ ] updated/extended the documentation diff --git a/vendor/virtualenv/.github/SECURITY.md b/vendor/virtualenv/.github/SECURITY.md new file mode 100644 index 00000000..90836425 --- /dev/null +++ b/vendor/virtualenv/.github/SECURITY.md @@ -0,0 +1,13 @@ +# Security Policy + +## Supported Versions + +| Version | Supported | +| --------- | ------------------ | +| 20.15.1 + | :white_check_mark: | +| < 20.15.1 | :x: | + +## Reporting a Vulnerability + +To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift +will coordinate the fix and disclosure. diff --git a/vendor/virtualenv/.github/dependabot.yml b/vendor/virtualenv/.github/dependabot.yml new file mode 100644 index 00000000..12301490 --- /dev/null +++ b/vendor/virtualenv/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" diff --git a/vendor/virtualenv/.github/workflows/check.yml b/vendor/virtualenv/.github/workflows/check.yml index fc8ee106..4bd3f7da 100644 --- a/vendor/virtualenv/.github/workflows/check.yml +++ b/vendor/virtualenv/.github/workflows/check.yml @@ -10,13 +10,6 @@ concurrency: cancel-in-progress: true jobs: - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - - uses: pre-commit/action@v2.0.3 - test: name: test ${{ matrix.py }} - ${{ matrix.os }} runs-on: ${{ matrix.os }} @@ -24,23 +17,25 @@ jobs: fail-fast: false matrix: py: + - "3.11.0-beta.5" - "3.10" - "3.9" - "3.8" - "3.7" - "3.6" - - "3.5" + - pypy-3.9-v7.3.9 + - pypy-3.8-v7.3.9 + - pypy-3.7-v7.3.9 - pypy-3.6-v7.3.3 - - pypy-3.7-v7.3.7 - - pypy-3.8-v7.3.7 - - "2.7" - - pypy-2.7 os: - ubuntu-20.04 - - macos-10.15 + - macos-12 - windows-2022 + exclude: + - { os: macos-12, py: "pypy-3.6-v7.3.3" } # PyPy 3.6 does not support macOS 11/12 include: - - { os: macos-10.15, py: brew@py3 } + - { os: macos-12, py: "brew@3.9" } + - { os: macos-12, py: "brew@3.8" } steps: - name: Install OS dependencies run: | @@ -48,54 +43,52 @@ jobs: echo "try $i" && \ ${{ runner.os == 'Linux' && 'sudo apt-get update -y && sudo apt-get install snapd fish csh -y' || true }} && \ ${{ runner.os == 'Linux' && 'sudo apt-get install curl wget -y' || true }} && \ - ${{ runner.os == 'Linux' && 'nushell_url=$(curl -s https://api.github.com/repos/nushell/nushell/releases/latest | grep "browser_" | cut -d\" -f4 | grep .tar.gz)' || true }} && \ + ${{ runner.os == 'Linux' && 'nushell_url=$(curl -s https://api.github.com/repos/nushell/nushell/releases/latest | grep "browser_" | grep "x86_64" | grep "linux" | grep "gnu" | cut -d\" -f4 )' || true }} && \ ${{ runner.os == 'Linux' && 'wget -O nushell.tar.gz $nushell_url' || true }} && \ - ${{ runner.os == 'Linux' && 'tar -zxf nushell.tar.gz --one-top-level=nushell --strip-components=2' || true }} && \ - ${{ runner.os == 'Linux' && 'sudo cp nushell/nu /usr/bin' || true }} && \ + ${{ runner.os == 'Linux' && 'tar -zxf nushell.tar.gz' || true }} && \ + ${{ runner.os == 'Linux' && 'sudo cp nu /usr/bin' || true }} && \ ${{ runner.os == 'Windows' && 'choco install nushell' || true }} && \ - ${{ runner.os == 'macOS' && 'brew update && brew install fish tcsh nushell' || true }} && \ + ${{ runner.os == 'macOS' && 'brew install fish tcsh nushell' || true }} && \ exit 0 || true; done exit 1 shell: bash - name: Setup python for tox - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Install tox - run: python -m pip install tox - - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - name: Use local virtualenv for tox - run: python -m pip install . - name: Install Python 2 for cross test - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "2.7" + - name: Setup brew python for test ${{ matrix.py }} + if: startsWith(matrix.py,'brew@') + run: | + set -e + PY=$(echo '${{ matrix.py }}' | cut -c 6-) + brew upgrade python@$PY || brew install python@$PY + echo "/usr/local/opt/python@$PY/libexec/bin" >>"${GITHUB_PATH}" + shell: bash - name: Setup python for test ${{ matrix.py }} - if: "!( startsWith(matrix.py,'brew@py') || endsWith(matrix.py, '-dev') )" - uses: actions/setup-python@v2 + if: "!( startsWith(matrix.py,'brew@') || endsWith(matrix.py, '-dev') )" + uses: actions/setup-python@v4 with: python-version: ${{ matrix.py }} - - name: Setup brew python for test ${{ matrix.py }} - if: startsWith(matrix.py,'brew@py') - run: | - import subprocess; import codecs; import os - subprocess.check_call(["bash", "-c", "brew upgrade python@3 || brew install python@3"]) - with codecs.open(os.environ["GITHUB_PATH"], "a", "utf-8") as file_handler: - file_handler.write("/usr/local/opt/python@3") - shell: python - name: Pick environment to run run: | - import platform; import os; import sys; import codecs - cpy = platform.python_implementation() == "CPython" - base =("{}{}{}" if cpy else "{}{}").format("py" if cpy else "pypy", *sys.version_info[0:2]) - env = "TOXENV={}\n".format(base) - print("Picked:\n{}for{}".format(env, sys.version)) - with codecs.open(os.environ["GITHUB_ENV"], "a", "utf-8") as file_handler: - file_handler.write(env) + import os; import platform; import sys; from pathlib import Path + env = f'TOXENV=py{"" if platform.python_implementation() == "CPython" else "py"}3{sys.version_info.minor}' + print(f"Picked: {env} for {sys.version} based of {sys.executable}") + with Path(os.environ["GITHUB_ENV"]).open("ta") as file_handler: + file_handler.write(env) shell: python + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Install tox + run: python -m pip install tox + - name: Use local virtualenv for tox + run: python -m pip install . - name: Setup test suite run: tox -vv --notest - name: Run test suite @@ -105,23 +98,25 @@ jobs: CI_RUN: "yes" DIFF_AGAINST: HEAD - name: Rename coverage report file - run: import os; import sys; os.rename(".tox/.coverage.{}".format(os.environ['TOXENV']), ".tox/.coverage.{}-{}".format(os.environ['TOXENV'], sys.platform)) + run: | + import os; import sys + os.rename(f".tox/.coverage.{os.environ['TOXENV']}", f".tox/.coverage.{os.environ['TOXENV']}-{sys.platform}") shell: python - name: Upload coverage data - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage-data path: ".tox/.coverage.*" coverage: name: Combine coverage - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: test steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install tox @@ -133,14 +128,14 @@ jobs: - name: Build package run: pyproject-build --wheel . - name: Download coverage data - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: name: coverage-data path: .tox - name: Combine and report coverage run: tox -e coverage - name: Upload HTML report - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: html-report path: .tox/htmlcov @@ -152,7 +147,7 @@ jobs: fail-fast: false matrix: os: - - ubuntu-20.04 + - ubuntu-22.04 - windows-2022 tox_env: - dev @@ -164,11 +159,11 @@ jobs: - { os: windows-2022, tox_env: readme } - { os: windows-2022, tox_env: docs } steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Setup Python "3.10" - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install tox @@ -180,22 +175,22 @@ jobs: publish: if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - needs: [check, coverage, lint] - runs-on: ubuntu-20.04 + needs: [check, coverage] + runs-on: ubuntu-22.04 steps: - name: Setup python to build package - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install https://pypi.org/project/build run: python -m pip install build - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Build sdist and wheel run: python -m build -s -w . -o dist - name: Publish to PyPi - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@v1.5.1 with: skip_existing: true user: __token__ diff --git a/vendor/virtualenv/.pre-commit-config.yaml b/vendor/virtualenv/.pre-commit-config.yaml index eca8489e..7ce43ab1 100644 --- a/vendor/virtualenv/.pre-commit-config.yaml +++ b/vendor/virtualenv/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + rev: v4.3.0 hooks: - id: check-ast - id: check-builtin-literals @@ -12,15 +12,19 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/asottile/pyupgrade - rev: v2.31.1 + rev: v2.37.3 hooks: - id: pyupgrade + args: ["--py36-plus"] + exclude: "^(src/virtualenv/create/via_global_ref/_virtualenv.py|src/virtualenv/create/via_global_ref/builtin/python2/site.py|src/virtualenv/discovery/py_info.py|tasks/__main__zipapp.py)$" + - id: pyupgrade + files: "^(src/virtualenv/create/via_global_ref/_virtualenv.py|src/virtualenv/create/via_global_ref/builtin/python2/site.py|src/virtualenv/discovery/py_info.py|tasks/__main__zipapp.py)$" - repo: https://github.com/PyCQA/isort rev: 5.10.1 hooks: - id: isort - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 22.6.0 hooks: - id: black args: [--safe] @@ -28,7 +32,7 @@ repos: rev: v1.12.1 hooks: - id: blacken-docs - additional_dependencies: [black==21.12b0] + additional_dependencies: [black==22.6] - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.9.0 hooks: @@ -39,12 +43,19 @@ repos: - id: tox-ini-fmt args: ["-p", "fix_lint"] - repo: https://github.com/asottile/setup-cfg-fmt - rev: v1.20.1 + rev: v2.0.0 hooks: - id: setup-cfg-fmt - args: [--min-py3-version, "3.5", "--max-py-version", "3.10"] + args: [--min-py3-version, "3.6 ", "--max-py-version", "3.10"] - repo: https://github.com/PyCQA/flake8 - rev: "4.0.1" + rev: "5.0.4" hooks: - id: flake8 - additional_dependencies: ["flake8-bugbear == 21.11.29"] + additional_dependencies: + - flake8-bugbear==22.7.1 + - flake8-comprehensions==3.10 + - flake8-pytest-style==1.6 + - flake8-spellcheck==0.28 + - flake8-unused-arguments==0.0.11 + - flake8-noqa==1.2.8 + - pep8-naming==0.13.1 diff --git a/vendor/virtualenv/docs/changelog.rst b/vendor/virtualenv/docs/changelog.rst index 91c1ece1..5c4542cd 100644 --- a/vendor/virtualenv/docs/changelog.rst +++ b/vendor/virtualenv/docs/changelog.rst @@ -5,6 +5,65 @@ Release History .. towncrier release notes start +v20.16.3 (2022-08-04) +--------------------- + +Bugfixes - 20.16.3 +~~~~~~~~~~~~~~~~~~ +- Upgrade embedded pip to ``22.2.2`` from ``22.2.1`` and setuptools to ``63.4.1`` from ``63.2.0`` - by :user:`gaborbernat`. (`#2395 `_) + + +v20.16.2 (2022-07-27) +--------------------- + +Bugfixes - 20.16.2 +~~~~~~~~~~~~~~~~~~ +- Bump embeded pip from ``22.2`` to ``22.2.1`` - by :user:`gaborbernat`. (`#2391 `_) + + +v20.16.1 (2022-07-26) +--------------------- + +Features - 20.16.1 +~~~~~~~~~~~~~~~~~~ +- Update Nushell activation scripts to version 0.67 - by :user:`kubouch`. (`#2386 `_) + + +v20.16.0 (2022-07-25) +--------------------- + +Features - 20.16.0 +~~~~~~~~~~~~~~~~~~ +- Drop support for running under Python 2 (still can generate Python 2 environments) - by :user:`gaborbernat`. (`#2382 `_) +- Upgrade embedded pip to ``22.2`` from ``22.1.2`` and setuptools to ``63.2.0`` from ``62.6.0`` - + by :user:`gaborbernat`. (`#2383 `_) + + +v20.15.1 (2022-06-28) +--------------------- + +Bugfixes - 20.15.1 +~~~~~~~~~~~~~~~~~~ +- Fix the incorrect operation when ``setuptools`` plugins output something into ``stdout``. (`#2335 `_) +- CPython3Windows creator ignores missing ``DLLs`` dir. (`#2368 `_) + + +v20.15.0 (2022-06-25) +--------------------- + +Features - 20.15.0 +~~~~~~~~~~~~~~~~~~ +- Support for Windows embeddable Python package: includes ``python.zip`` in the creator sources + - by :user:`reksarka`. (`#1774 `_) + +Bugfixes - 20.15.0 +~~~~~~~~~~~~~~~~~~ +- Upgrade embedded setuptools to ``62.3.3`` from ``62.6.0`` and pip to ``22.1.2`` from ``22.0.4`` + - by :user:`gaborbernat`. (`#2348 `_) +- Use ``shlex.quote`` instead of deprecated ``pipes.quote`` in Python 3 - by :user:`frenzymadness`. (`#2351 `_) +- Fix Windows PyPy 3.6 - by :user:`reksarka`. (`#2363 `_) + + v20.14.1 (2022-04-11) --------------------- @@ -967,6 +1026,7 @@ v20.0.0b1 (2020-01-28) * First public release of the rewrite. Everything is brand new and just added. * ``--download`` defaults to ``False`` +* No longer replaces builtin ``site`` module with `custom version baked within virtualenv code itself `_. A simple shim module is used to fix up things on Python 2 only. .. warning:: diff --git a/vendor/virtualenv/docs/conf.py b/vendor/virtualenv/docs/conf.py index b981af1d..455dc1c6 100644 --- a/vendor/virtualenv/docs/conf.py +++ b/vendor/virtualenv/docs/conf.py @@ -78,9 +78,9 @@ def setup(app): if cli_interface_doctree.exists(): cli_interface_doctree.unlink() - HERE = Path(__file__).parent - if str(HERE) not in sys.path: - sys.path.append(str(HERE)) + here = Path(__file__).parent + if str(here) not in sys.path: + sys.path.append(str(here)) # noinspection PyUnresolvedReferences from render_cli import CliTable, literal_data diff --git a/vendor/virtualenv/docs/extend.rst b/vendor/virtualenv/docs/extend.rst index afd4e80b..ba2a6fe7 100644 --- a/vendor/virtualenv/docs/extend.rst +++ b/vendor/virtualenv/docs/extend.rst @@ -7,13 +7,6 @@ Extend functionality - package it as a python library, - install it alongside the virtual environment. -.. warning:: - - The public API of some of these components is still to be finalized, consider the current interface a beta one - until we get some feedback on how well we planned ahead. We expect to do this by end of Q3 2020. Consider the class - interface explained below as initial draft proposal. We reserve the right to change the API until then, however such - changes will be communicated in a timely fashion, and you'll have time to migrate. Thank you for your understanding. - Python discovery ---------------- @@ -44,7 +37,7 @@ Creators are what actually perform the creation of a virtual environment. The bu all achieve this by referencing a global install; but would be just as valid for a creator to install a brand new entire python under the target path; or one could add additional creators that can create virtual environments for other python implementations, such as IronPython. They must be registered under and entry point with key -``virtualenv.discovery`` , and the class must implement :class:`virtualenv.create.creator.Creator`: +``virtualenv.create`` , and the class must implement :class:`virtualenv.create.creator.Creator`: .. code-block:: ini diff --git a/vendor/virtualenv/docs/installation.rst b/vendor/virtualenv/docs/installation.rst index 19a37dfe..46956fe2 100644 --- a/vendor/virtualenv/docs/installation.rst +++ b/vendor/virtualenv/docs/installation.rst @@ -97,7 +97,8 @@ supported on a best effort approach. CPython is shipped in multiple forms, and each OS repackages it, often applying some customization along the way. Therefore we cannot say universally that we support all platforms, but rather specify some we test against. In case of ones not specified here the support is unknown, though likely will work. If you find some cases please open a feature -request on our issue tracker. +request on our issue tracker. Note, as of ``20.16.0`` we no longer support running under Python less than 3.6, however +we still support creating environments for 2.7 and 3.5. Linux ~~~~~ diff --git a/vendor/virtualenv/docs/render_cli.py b/vendor/virtualenv/docs/render_cli.py index f905206a..e45a37ea 100644 --- a/vendor/virtualenv/docs/render_cli.py +++ b/vendor/virtualenv/docs/render_cli.py @@ -23,7 +23,7 @@ class CliTable(SphinxDirective): name = "table_cli" - option_spec = dict(module=unchanged_required, func=unchanged_required) + option_spec = {"module": unchanged_required, "func": unchanged_required} def run(self): module_name, attr_name = self.options["module"], self.options["func"] @@ -35,7 +35,7 @@ def run(self): for i in core_result["action_groups"]: content.append(self._build_table(i["options"], i["title"], i["description"])) for key, name_to_class in CUSTOM.items(): - section = n.section("", ids=["section-{}".format(key)]) + section = n.section("", ids=[f"section-{key}"]) title = n.title("", key) section += title self.state.document.note_implicit_target(title) @@ -44,21 +44,21 @@ def run(self): for name, class_n in name_to_class.items(): with self._run_parser(class_n, key, name): - cmd = ["--{}".format(key), name] + cmd = [f"--{key}", name] parser_result = parse_parser(parser_creator(cmd)) opt_group = next(i["options"] for i in parser_result["action_groups"] if i["title"] == key) results[name] = opt_group - core_names = set.intersection(*list({tuple(i["name"]) for i in v} for v in results.values())) + core_names = set.intersection(*[{tuple(i["name"]) for i in v} for v in results.values()]) if core_names: rows = [i for i in next(iter(results.values())) if tuple(i["name"]) in core_names] content.append( - self._build_table(rows, title="core", description="options shared across all {}".format(key)), + self._build_table(rows, title="core", description=f"options shared across all {key}"), ) for name, group in results.items(): rows = [i for i in group if tuple(i["name"]) not in core_names] if rows: content.append( - self._build_table(rows, title=name, description="options specific to {} {}".format(key, name)), + self._build_table(rows, title=name, description=f"options specific to {key} {name}"), ) return content @@ -223,7 +223,7 @@ def register_target_option(self, target) -> None: domain.add_program_option(None, key, self.env.docname, key) -def literal_data(rawtext, app, type, slug, options): +def literal_data(rawtext, app, type, slug, options): # noqa: U100 """Create a link to a BitBucket resource.""" of_class = type.split(".") data = getattr(__import__(".".join(of_class[:-1]), fromlist=[of_class[-1]]), of_class[-1]) diff --git a/vendor/virtualenv/pyproject.toml b/vendor/virtualenv/pyproject.toml index c2b3b9ac..b7a7ad82 100644 --- a/vendor/virtualenv/pyproject.toml +++ b/vendor/virtualenv/pyproject.toml @@ -1,8 +1,7 @@ [build-system] requires = [ - "setuptools >= 41.0.0", - "wheel >= 0.30.0", - "setuptools_scm >= 2", + "setuptools>=59.6", + "setuptools-scm>=6.4.2", ] build-backend = 'setuptools.build_meta' diff --git a/vendor/virtualenv/setup.cfg b/vendor/virtualenv/setup.cfg index e16bf223..9e559dc2 100644 --- a/vendor/virtualenv/setup.cfg +++ b/vendor/virtualenv/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = virtualenv -version = 20.14.1 +version = 20.16.3 description = Virtual Python Environment builder long_description = file: README.md long_description_content_type = text/markdown @@ -19,15 +19,8 @@ classifiers = Operating System :: MacOS :: MacOS X Operating System :: Microsoft :: Windows Operating System :: POSIX - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3 :: Only Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy Topic :: Software Development :: Libraries @@ -41,14 +34,12 @@ project_urls = [options] packages = find: install_requires = - distlib>=0.3.1,<1 - filelock>=3.2,<4 - platformdirs>=2,<3 - six>=1.9.0,<2 # keep it >=1.9.0 as it may cause problems on LTS platforms - importlib-metadata>=0.12;python_version<"3.8" - importlib-resources>=1.0;python_version<"3.7" - pathlib2>=2.3.3,<3;python_version < '3.4' and sys.platform != 'win32' -python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* + distlib>=0.3.5,<1 + filelock>=3.4.1,<4 + platformdirs>=2.4,<3 + importlib-metadata>=4.8.3;python_version < "3.8" + importlib-resources>=5.4;python_version < "3.7" +python_requires = >=3.6 package_dir = =src zip_safe = True @@ -88,22 +79,22 @@ virtualenv.seed = [options.extras_require] docs = - proselint>=0.10.2 - sphinx>=3 - sphinx-argparse>=0.2.5 - sphinx-rtd-theme>=0.4.3 - towncrier>=21.3 + proselint>=0.13 + sphinx>=5.1.1 + sphinx-argparse>=0.3.1 + sphinx-rtd-theme>=1 + towncrier>=21.9 testing = - coverage>=4 + coverage>=6.2 coverage-enable-subprocess>=1 - flaky>=3 - pytest>=4 + flaky>=3.7 + packaging>=21.3 + pytest>=7.0.1 pytest-env>=0.6.2 - pytest-freezegun>=0.4.1 - pytest-mock>=2 - pytest-randomly>=1 - pytest-timeout>=1 - packaging>=20.0;python_version>"3.4" + pytest-freezegun>=0.4.2 + pytest-mock>=3.6.1 + pytest-randomly>=3.10.3 + pytest-timeout>=2.1 [options.package_data] virtualenv.activation.bash = *.sh diff --git a/vendor/virtualenv/src/virtualenv/__init__.py b/vendor/virtualenv/src/virtualenv/__init__.py index 300b12e9..619ddf30 100644 --- a/vendor/virtualenv/src/virtualenv/__init__.py +++ b/vendor/virtualenv/src/virtualenv/__init__.py @@ -1,13 +1,10 @@ -from __future__ import absolute_import, unicode_literals - import sys from pathlib import Path __path_pack__ = Path(__path__[0]) if getattr(sys, "oxidized", False): - parents = 1 if sys.platform.startswith("win") else 2 - __path_assets__ = __path_pack__.parents[parents] / "assets" / "virtualenv" + __path_assets__ = __path_pack__.parents[1] / "assets" / "virtualenv" else: __path_assets__ = None @@ -16,8 +13,8 @@ from .run import cli_run, session_via_cli from .version import __version__ -__all__ = ( +__all__ = [ "__version__", "cli_run", "session_via_cli", -) +] diff --git a/vendor/virtualenv/src/virtualenv/__main__.py b/vendor/virtualenv/src/virtualenv/__main__.py index 3b06fd74..c3c5adf6 100644 --- a/vendor/virtualenv/src/virtualenv/__main__.py +++ b/vendor/virtualenv/src/virtualenv/__main__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, print_function, unicode_literals - import logging import os import sys @@ -18,41 +16,32 @@ def run(args=None, options=None, env=None): session = cli_run(args, options, env) logging.warning(LogSession(session, start)) except ProcessCallFailed as exception: - print("subprocess call failed for {} with code {}".format(exception.cmd, exception.code)) + print(f"subprocess call failed for {exception.cmd} with code {exception.code}") print(exception.out, file=sys.stdout, end="") print(exception.err, file=sys.stderr, end="") raise SystemExit(exception.code) -class LogSession(object): +class LogSession: def __init__(self, session, start): self.session = session self.start = start def __str__(self): - from virtualenv.util.six import ensure_text - spec = self.session.creator.interpreter.spec elapsed = (datetime.now() - self.start).total_seconds() * 1000 lines = [ - "created virtual environment {} in {:.0f}ms".format(spec, elapsed), - " creator {}".format(ensure_text(str(self.session.creator))), + f"created virtual environment {spec} in {elapsed:.0f}ms", + f" creator {str(self.session.creator)}", ] if self.session.seeder.enabled: - lines += ( - " seeder {}".format(ensure_text(str(self.session.seeder))), - " added seed packages: {}".format( - ", ".join( - sorted( - "==".join(i.stem.split("-")) - for i in self.session.creator.purelib.iterdir() - if i.suffix == ".dist-info" - ), - ), - ), - ) + lines.append(f" seeder {str(self.session.seeder)}") + path = self.session.creator.purelib.iterdir() + packages = sorted("==".join(i.stem.split("-")) for i in path if i.suffix == ".dist-info") + lines.append(f" added seed packages: {', '.join(packages)}") + if self.session.activators: - lines.append(" activators {}".format(",".join(i.__class__.__name__ for i in self.session.activators))) + lines.append(f" activators {','.join(i.__class__.__name__ for i in self.session.activators)}") return "\n".join(lines) diff --git a/vendor/virtualenv/src/virtualenv/activation/__init__.py b/vendor/virtualenv/src/virtualenv/activation/__init__.py index e9296d86..99984bc5 100644 --- a/vendor/virtualenv/src/virtualenv/activation/__init__.py +++ b/vendor/virtualenv/src/virtualenv/activation/__init__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from .bash import BashActivator from .batch import BatchActivator from .cshell import CShellActivator diff --git a/vendor/virtualenv/src/virtualenv/activation/activator.py b/vendor/virtualenv/src/virtualenv/activation/activator.py index 80d7e47f..fb813bc7 100644 --- a/vendor/virtualenv/src/virtualenv/activation/activator.py +++ b/vendor/virtualenv/src/virtualenv/activation/activator.py @@ -1,14 +1,9 @@ -from __future__ import absolute_import, unicode_literals - import os from abc import ABCMeta, abstractmethod -from six import add_metaclass - -@add_metaclass(ABCMeta) -class Activator(object): - """Generates an activate script for the virtual environment""" +class Activator(metaclass=ABCMeta): + """Generates activate script for the virtual environment""" def __init__(self, options): """Create a new activator generator. @@ -18,7 +13,7 @@ def __init__(self, options): self.flag_prompt = os.path.basename(os.getcwd()) if options.prompt == "." else options.prompt @classmethod - def supports(cls, interpreter): + def supports(cls, interpreter): # noqa: U100 """Check if the activation script is supported in the given interpreter. :param interpreter: the interpreter we need to support @@ -27,7 +22,7 @@ def supports(cls, interpreter): return True @classmethod - def add_parser_arguments(cls, parser, interpreter): + def add_parser_arguments(cls, parser, interpreter): # noqa: U100 """ Add CLI arguments for this activation script. @@ -36,10 +31,15 @@ def add_parser_arguments(cls, parser, interpreter): """ @abstractmethod - def generate(self, creator): - """Generate the activate script for the given creator. + def generate(self, creator): # noqa: U100 + """Generate activate script for the given creator. :param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \ virtual environment """ raise NotImplementedError + + +__all__ = [ + "Activator", +] diff --git a/vendor/virtualenv/src/virtualenv/activation/bash/__init__.py b/vendor/virtualenv/src/virtualenv/activation/bash/__init__.py index 22c90c38..e704126c 100644 --- a/vendor/virtualenv/src/virtualenv/activation/bash/__init__.py +++ b/vendor/virtualenv/src/virtualenv/activation/bash/__init__.py @@ -1,6 +1,4 @@ -from __future__ import absolute_import, unicode_literals - -from virtualenv.util.path import Path +from pathlib import Path from ..via_template import ViaTemplateActivator @@ -11,3 +9,8 @@ def templates(self): def as_name(self, template): return template.stem + + +__all__ = [ + "BashActivator", +] diff --git a/vendor/virtualenv/src/virtualenv/activation/batch/__init__.py b/vendor/virtualenv/src/virtualenv/activation/batch/__init__.py index 4149712d..6c35337f 100644 --- a/vendor/virtualenv/src/virtualenv/activation/batch/__init__.py +++ b/vendor/virtualenv/src/virtualenv/activation/batch/__init__.py @@ -1,8 +1,5 @@ -from __future__ import absolute_import, unicode_literals - import os - -from virtualenv.util.path import Path +from pathlib import Path from ..via_template import ViaTemplateActivator @@ -19,5 +16,10 @@ def templates(self): def instantiate_template(self, replacements, template, creator): # ensure the text has all newlines as \r\n - required by batch - base = super(BatchActivator, self).instantiate_template(replacements, template, creator) + base = super().instantiate_template(replacements, template, creator) return base.replace(os.linesep, "\n").replace("\n", os.linesep) + + +__all__ = [ + "BatchActivator", +] diff --git a/vendor/virtualenv/src/virtualenv/activation/cshell/__init__.py b/vendor/virtualenv/src/virtualenv/activation/cshell/__init__.py index b25c602a..b225ba31 100644 --- a/vendor/virtualenv/src/virtualenv/activation/cshell/__init__.py +++ b/vendor/virtualenv/src/virtualenv/activation/cshell/__init__.py @@ -1,6 +1,4 @@ -from __future__ import absolute_import, unicode_literals - -from virtualenv.util.path import Path +from pathlib import Path from ..via_template import ViaTemplateActivator @@ -12,3 +10,8 @@ def supports(cls, interpreter): def templates(self): yield Path("activate.csh") + + +__all__ = [ + "CShellActivator", +] diff --git a/vendor/virtualenv/src/virtualenv/activation/fish/__init__.py b/vendor/virtualenv/src/virtualenv/activation/fish/__init__.py index 8d0e19c2..4b240421 100644 --- a/vendor/virtualenv/src/virtualenv/activation/fish/__init__.py +++ b/vendor/virtualenv/src/virtualenv/activation/fish/__init__.py @@ -1,6 +1,4 @@ -from __future__ import absolute_import, unicode_literals - -from virtualenv.util.path import Path +from pathlib import Path from ..via_template import ViaTemplateActivator @@ -8,3 +6,8 @@ class FishActivator(ViaTemplateActivator): def templates(self): yield Path("activate.fish") + + +__all__ = [ + "FishActivator", +] diff --git a/vendor/virtualenv/src/virtualenv/activation/nushell/__init__.py b/vendor/virtualenv/src/virtualenv/activation/nushell/__init__.py index 994c1fb6..839c19c2 100644 --- a/vendor/virtualenv/src/virtualenv/activation/nushell/__init__.py +++ b/vendor/virtualenv/src/virtualenv/activation/nushell/__init__.py @@ -1,9 +1,5 @@ -from __future__ import absolute_import, unicode_literals - import os - -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text +from pathlib import Path from ..via_template import ViaTemplateActivator @@ -20,9 +16,14 @@ def replacements(self, creator, dest_folder): return { "__VIRTUAL_PROMPT__": "" if self.flag_prompt is None else self.flag_prompt, - "__VIRTUAL_ENV__": ensure_text(str(creator.dest)), + "__VIRTUAL_ENV__": str(creator.dest), "__VIRTUAL_NAME__": creator.env_name, - "__BIN_NAME__": ensure_text(str(creator.bin_dir.relative_to(creator.dest))), - "__PATH_SEP__": ensure_text(os.pathsep), - "__DEACTIVATE_PATH__": ensure_text(str(Path(dest_folder) / "deactivate.nu")), + "__BIN_NAME__": str(creator.bin_dir.relative_to(creator.dest)), + "__PATH_SEP__": os.pathsep, + "__DEACTIVATE_PATH__": str(Path(dest_folder) / "deactivate.nu"), } + + +__all__ = [ + "NushellActivator", +] diff --git a/vendor/virtualenv/src/virtualenv/activation/nushell/activate.nu b/vendor/virtualenv/src/virtualenv/activation/nushell/activate.nu index ffeff7d4..48c85b48 100644 --- a/vendor/virtualenv/src/virtualenv/activation/nushell/activate.nu +++ b/vendor/virtualenv/src/virtualenv/activation/nushell/activate.nu @@ -8,11 +8,11 @@ def-env activate-virtualenv [] { $name in (env).name } - let is-windows = ((sys).host.name | str downcase) == 'windows' - let virtual-env = '__VIRTUAL_ENV__' + let is_windows = ((sys).host.name | str downcase) == 'windows' + let virtual_env = '__VIRTUAL_ENV__' let bin = '__BIN_NAME__' - let path-sep = '__PATH_SEP__' - let path-name = if $is-windows { + let path_sep = '__PATH_SEP__' + let path_name = if $is_windows { if (has-env 'Path') { 'Path' } else { @@ -22,8 +22,8 @@ def-env activate-virtualenv [] { 'PATH' } - let old-path = ( - if $is-windows { + let old_path = ( + if $is_windows { if (has-env 'Path') { $env.Path } else { @@ -33,24 +33,24 @@ def-env activate-virtualenv [] { $env.PATH } | if (is-string $in) { # if Path/PATH is a string, make it a list - $in | split row $path-sep | path expand + $in | split row $path_sep | path expand } else { $in } ) - let venv-path = ([$virtual-env $bin] | path join) - let new-path = ($old-path | prepend $venv-path | str collect $path-sep) + let venv_path = ([$virtual_env $bin] | path join) + let new_path = ($old_path | prepend $venv_path | str collect $path_sep) # Creating the new prompt for the session - let virtual-prompt = if ('__VIRTUAL_PROMPT__' == '') { - $'(char lparen)($virtual-env | path basename)(char rparen) ' + let virtual_prompt = if ('__VIRTUAL_PROMPT__' == '') { + $'(char lparen)($virtual_env | path basename)(char rparen) ' } else { '(__VIRTUAL_PROMPT__) ' } # Back up the old prompt builder - let old-prompt-command = if (has-env 'VIRTUAL_ENV') && (has-env '_OLD_PROMPT_COMMAND') { + let old_prompt_command = if (has-env 'VIRTUAL_ENV') && (has-env '_OLD_PROMPT_COMMAND') { $env._OLD_PROMPT_COMMAND } else { if (has-env 'PROMPT_COMMAND') { @@ -61,28 +61,28 @@ def-env activate-virtualenv [] { } # If there is no default prompt, then only the env is printed in the prompt - let new-prompt = if (has-env 'PROMPT_COMMAND') { - if ($old-prompt-command | describe) == 'block' { - { $'($virtual-prompt)(do $old-prompt-command)' } + let new_prompt = if (has-env 'PROMPT_COMMAND') { + if ($old_prompt_command | describe) == 'block' { + { $'($virtual_prompt)(do $old_prompt_command)' } } else { - { $'($virtual-prompt)($old-prompt-command)' } + { $'($virtual_prompt)($old_prompt_command)' } } } else { - { $'($virtual-prompt)' } + { $'($virtual_prompt)' } } # Environment variables that will be batched loaded to the virtual env - let new-env = { - $path-name : $new-path - VIRTUAL_ENV : $virtual-env - _OLD_VIRTUAL_PATH : ($old-path | str collect $path-sep) - _OLD_PROMPT_COMMAND : $old-prompt-command - PROMPT_COMMAND : $new-prompt - VIRTUAL_PROMPT : $virtual-prompt + let new_env = { + $path_name : $new_path + VIRTUAL_ENV : $virtual_env + _OLD_VIRTUAL_PATH : ($old_path | str collect $path_sep) + _OLD_PROMPT_COMMAND : $old_prompt_command + PROMPT_COMMAND : $new_prompt + VIRTUAL_PROMPT : $virtual_prompt } # Activate the environment variables - load-env $new-env + load-env $new_env } # Activate the virtualenv diff --git a/vendor/virtualenv/src/virtualenv/activation/nushell/deactivate.nu b/vendor/virtualenv/src/virtualenv/activation/nushell/deactivate.nu index 904f7d0e..4dd132c3 100644 --- a/vendor/virtualenv/src/virtualenv/activation/nushell/deactivate.nu +++ b/vendor/virtualenv/src/virtualenv/activation/nushell/deactivate.nu @@ -3,9 +3,9 @@ def-env deactivate-virtualenv [] { $name in (env).name } - let is-windows = ((sys).host.name | str downcase) == 'windows' + let is_windows = ((sys).host.name | str downcase) == 'windows' - let path-name = if $is-windows { + let path_name = if $is_windows { if (has-env 'Path') { 'Path' } else { @@ -15,7 +15,7 @@ def-env deactivate-virtualenv [] { 'PATH' } - load-env { $path-name : $env._OLD_VIRTUAL_PATH } + load-env { $path_name : $env._OLD_VIRTUAL_PATH } let-env PROMPT_COMMAND = $env._OLD_PROMPT_COMMAND diff --git a/vendor/virtualenv/src/virtualenv/activation/powershell/__init__.py b/vendor/virtualenv/src/virtualenv/activation/powershell/__init__.py index 4fadc63b..6d561bf7 100644 --- a/vendor/virtualenv/src/virtualenv/activation/powershell/__init__.py +++ b/vendor/virtualenv/src/virtualenv/activation/powershell/__init__.py @@ -1,6 +1,4 @@ -from __future__ import absolute_import, unicode_literals - -from virtualenv.util.path import Path +from pathlib import Path from ..via_template import ViaTemplateActivator @@ -8,3 +6,8 @@ class PowerShellActivator(ViaTemplateActivator): def templates(self): yield Path("activate.ps1") + + +__all__ = [ + "PowerShellActivator", +] diff --git a/vendor/virtualenv/src/virtualenv/activation/python/__init__.py b/vendor/virtualenv/src/virtualenv/activation/python/__init__.py index 4f3d7190..e54a7f6b 100644 --- a/vendor/virtualenv/src/virtualenv/activation/python/__init__.py +++ b/vendor/virtualenv/src/virtualenv/activation/python/__init__.py @@ -1,11 +1,7 @@ -from __future__ import absolute_import, unicode_literals - import os import sys from collections import OrderedDict - -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text +from pathlib import Path from ..via_template import ViaTemplateActivator @@ -25,12 +21,12 @@ def _generate(self, replacements, templates, to_folder, creator): return generated def replacements(self, creator, dest_folder): - replacements = super(PythonActivator, self).replacements(creator, dest_folder) + replacements = super().replacements(creator, dest_folder) lib_folders = OrderedDict((os.path.relpath(str(i), str(dest_folder)), None) for i in creator.libs) win_py2 = creator.interpreter.platform == "win32" and creator.interpreter.version_info.major == 2 replacements.update( { - "__LIB_FOLDERS__": ensure_text(os.pathsep.join(lib_folders.keys())), + "__LIB_FOLDERS__": os.pathsep.join(lib_folders.keys()), "__DECODE_PATH__": ("yes" if win_py2 else ""), }, ) @@ -41,5 +37,10 @@ def _repr_unicode(creator, value): py2 = creator.interpreter.version_info.major == 2 if py2: # on Python 2 we need to encode this into explicit utf-8, py3 supports unicode literals start = 2 if sys.version_info[0] == 3 else 1 - value = ensure_text(repr(value.encode("utf-8"))[start:-1]) + value = repr(value.encode("utf-8"))[start:-1] return value + + +__all__ = [ + "PythonActivator", +] diff --git a/vendor/virtualenv/src/virtualenv/activation/python/activate_this.py.template b/vendor/virtualenv/src/virtualenv/activation/python/activate_this.py.template index 29debe3e..e8eeb843 100644 --- a/vendor/virtualenv/src/virtualenv/activation/python/activate_this.py.template +++ b/vendor/virtualenv/src/virtualenv/activation/python/activate_this.py.template @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Activate virtualenv for current interpreter: Use exec(open(this_file).read(), {'__file__': this_file}). diff --git a/vendor/virtualenv/src/virtualenv/activation/via_template.py b/vendor/virtualenv/src/virtualenv/activation/via_template.py index 14f09797..bfb97441 100644 --- a/vendor/virtualenv/src/virtualenv/activation/via_template.py +++ b/vendor/virtualenv/src/virtualenv/activation/via_template.py @@ -1,13 +1,7 @@ -from __future__ import absolute_import, unicode_literals - import os import sys from abc import ABCMeta, abstractmethod -from six import add_metaclass - -from virtualenv.util.six import ensure_text - from .activator import Activator if sys.version_info >= (3, 7): @@ -16,8 +10,7 @@ from importlib_resources import read_binary -@add_metaclass(ABCMeta) -class ViaTemplateActivator(Activator): +class ViaTemplateActivator(Activator, metaclass=ABCMeta): @abstractmethod def templates(self): raise NotImplementedError @@ -30,13 +23,13 @@ def generate(self, creator): creator.pyenv_cfg["prompt"] = self.flag_prompt return generated - def replacements(self, creator, dest_folder): + def replacements(self, creator, dest_folder): # noqa: U100 return { "__VIRTUAL_PROMPT__": "" if self.flag_prompt is None else self.flag_prompt, - "__VIRTUAL_ENV__": ensure_text(str(creator.dest)), + "__VIRTUAL_ENV__": str(creator.dest), "__VIRTUAL_NAME__": creator.env_name, - "__BIN_NAME__": ensure_text(str(creator.bin_dir.relative_to(creator.dest))), - "__PATH_SEP__": ensure_text(os.pathsep), + "__BIN_NAME__": str(creator.bin_dir.relative_to(creator.dest)), + "__PATH_SEP__": os.pathsep, } def _generate(self, replacements, templates, to_folder, creator): @@ -62,6 +55,10 @@ def instantiate_template(self, replacements, template, creator): return text @staticmethod - def _repr_unicode(creator, value): - # by default we just let it be unicode - return value + def _repr_unicode(creator, value): # noqa: U100 + return value # by default, we just let it be unicode + + +__all__ = [ + "ViaTemplateActivator", +] diff --git a/vendor/virtualenv/src/virtualenv/app_data/__init__.py b/vendor/virtualenv/src/virtualenv/app_data/__init__.py index e56e63d5..262ac078 100644 --- a/vendor/virtualenv/src/virtualenv/app_data/__init__.py +++ b/vendor/virtualenv/src/virtualenv/app_data/__init__.py @@ -1,7 +1,6 @@ """ Application data stored by virtualenv. """ -from __future__ import absolute_import, unicode_literals import logging import os @@ -15,7 +14,7 @@ def _default_app_data_dir(env): - key = str("VIRTUALENV_OVERRIDE_APP_DATA") + key = "VIRTUALENV_OVERRIDE_APP_DATA" if key in env: return env[key] else: @@ -23,7 +22,7 @@ def _default_app_data_dir(env): def make_app_data(folder, **kwargs): - read_only = kwargs.pop("read_only") + is_read_only = kwargs.pop("read_only") env = kwargs.pop("env") if kwargs: # py3+ kwonly raise TypeError("unexpected keywords: {}") @@ -32,7 +31,7 @@ def make_app_data(folder, **kwargs): folder = _default_app_data_dir(env) folder = os.path.abspath(folder) - if read_only: + if is_read_only: return ReadOnlyAppData(folder) if not os.path.isdir(folder): diff --git a/vendor/virtualenv/src/virtualenv/app_data/base.py b/vendor/virtualenv/src/virtualenv/app_data/base.py index 4ea54d9f..ecaee78a 100644 --- a/vendor/virtualenv/src/virtualenv/app_data/base.py +++ b/vendor/virtualenv/src/virtualenv/app_data/base.py @@ -1,18 +1,14 @@ """ Application data stored by virtualenv. """ -from __future__ import absolute_import, unicode_literals from abc import ABCMeta, abstractmethod from contextlib import contextmanager -import six - from virtualenv.info import IS_ZIPAPP -@six.add_metaclass(ABCMeta) -class AppData(object): +class AppData(metaclass=ABCMeta): """Abstract storage interface for the virtualenv application""" @abstractmethod @@ -24,7 +20,7 @@ def reset(self): """called when the user passes in the reset app data""" @abstractmethod - def py_info(self, path): + def py_info(self, path): # noqa: U100 raise NotImplementedError @abstractmethod @@ -36,7 +32,7 @@ def can_update(self): raise NotImplementedError @abstractmethod - def embed_update_log(self, distribution, for_py_version): + def embed_update_log(self, distribution, for_py_version): # noqa: U100 raise NotImplementedError @property @@ -48,7 +44,7 @@ def transient(self): raise NotImplementedError @abstractmethod - def wheel_image(self, for_py_version, name): + def wheel_image(self, for_py_version, name): # noqa: U100 raise NotImplementedError @contextmanager @@ -62,17 +58,16 @@ def ensure_extracted(self, path, to_folder=None): @abstractmethod @contextmanager - def extract(self, path, to_folder): + def extract(self, path, to_folder): # noqa: U100 raise NotImplementedError @abstractmethod @contextmanager - def locked(self, path): + def locked(self, path): # noqa: U100 raise NotImplementedError -@six.add_metaclass(ABCMeta) -class ContentStore(object): +class ContentStore(metaclass=ABCMeta): @abstractmethod def exists(self): raise NotImplementedError @@ -82,7 +77,7 @@ def read(self): raise NotImplementedError @abstractmethod - def write(self, content): + def write(self, content): # noqa: U100 raise NotImplementedError @abstractmethod @@ -93,3 +88,9 @@ def remove(self): @contextmanager def locked(self): pass + + +__all__ = [ + "ContentStore", + "AppData", +] diff --git a/vendor/virtualenv/src/virtualenv/app_data/na.py b/vendor/virtualenv/src/virtualenv/app_data/na.py index d5897871..784c76b2 100644 --- a/vendor/virtualenv/src/virtualenv/app_data/na.py +++ b/vendor/virtualenv/src/virtualenv/app_data/na.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from contextlib import contextmanager from .base import AppData, ContentStore @@ -22,17 +20,17 @@ def close(self): def reset(self): """do nothing""" - def py_info(self, path): + def py_info(self, path): # noqa: U100 return ContentStoreNA() - def embed_update_log(self, distribution, for_py_version): + def embed_update_log(self, distribution, for_py_version): # noqa: U100 return ContentStoreNA() - def extract(self, path, to_folder): + def extract(self, path, to_folder): # noqa: U100 raise self.error @contextmanager - def locked(self, path): + def locked(self, path): # noqa: U100 """do nothing""" yield @@ -40,11 +38,11 @@ def locked(self, path): def house(self): raise self.error - def wheel_image(self, for_py_version, name): + def wheel_image(self, for_py_version, name): # noqa: U100 raise self.error def py_info_clear(self): - """ """ + """nothing to clear""" class ContentStoreNA(ContentStore): @@ -52,15 +50,21 @@ def exists(self): return False def read(self): - """ """ + """nothing to read""" return None - def write(self, content): - """ """ + def write(self, content): # noqa: U100 + """nothing to write""" def remove(self): - """ """ + """nothing to remove""" @contextmanager def locked(self): yield + + +__all__ = [ + "AppDataDisabled", + "ContentStoreNA", +] diff --git a/vendor/virtualenv/src/virtualenv/app_data/read_only.py b/vendor/virtualenv/src/virtualenv/app_data/read_only.py index 858978cd..b11f4a63 100644 --- a/vendor/virtualenv/src/virtualenv/app_data/read_only.py +++ b/vendor/virtualenv/src/virtualenv/app_data/read_only.py @@ -8,27 +8,30 @@ class ReadOnlyAppData(AppDataDiskFolder): can_update = False - def __init__(self, folder): # type: (str) -> None + def __init__(self, folder: str) -> None: if not os.path.isdir(folder): - raise RuntimeError("read-only app data directory {} does not exist".format(folder)) + raise RuntimeError(f"read-only app data directory {folder} does not exist") + super().__init__(folder) self.lock = NoOpFileLock(folder) - def reset(self): # type: () -> None + def reset(self) -> None: raise RuntimeError("read-only app data does not support reset") - def py_info_clear(self): # type: () -> None + def py_info_clear(self) -> None: raise NotImplementedError def py_info(self, path): return _PyInfoStoreDiskReadOnly(self.py_info_at, path) - def embed_update_log(self, distribution, for_py_version): + def embed_update_log(self, distribution, for_py_version): # noqa: U100 raise NotImplementedError class _PyInfoStoreDiskReadOnly(PyInfoStoreDisk): - def write(self, content): + def write(self, content): # noqa: U100 raise RuntimeError("read-only app data python info cannot be updated") -__all__ = ("ReadOnlyAppData",) +__all__ = [ + "ReadOnlyAppData", +] diff --git a/vendor/virtualenv/src/virtualenv/app_data/via_disk_folder.py b/vendor/virtualenv/src/virtualenv/app_data/via_disk_folder.py index 3f6afd55..ad8292cf 100644 --- a/vendor/virtualenv/src/virtualenv/app_data/via_disk_folder.py +++ b/vendor/virtualenv/src/virtualenv/app_data/via_disk_folder.py @@ -1,12 +1,11 @@ -# -*- coding: utf-8 -*- """ A rough layout of the current storage goes as: virtualenv-app-data ├── py - -│   └── *.json/lock +│ └── *.json/lock ├── wheel -│   ├── house +│ ├── house │ │ └── *.whl │ └── -> 3.9 │ ├── img- @@ -22,7 +21,6 @@ ├── debug.py └── _virtualenv.py """ -from __future__ import absolute_import, unicode_literals import json import logging @@ -30,11 +28,8 @@ from contextlib import contextmanager from hashlib import sha256 -import six - from virtualenv.util.lock import ReentrantFileLock from virtualenv.util.path import safe_delete -from virtualenv.util.six import ensure_text from virtualenv.util.zipapp import extract from virtualenv.version import __version__ @@ -53,7 +48,7 @@ def __init__(self, folder): self.lock = ReentrantFileLock(folder) def __repr__(self): - return "{}({})".format(type(self).__name__, self.lock.path) + return f"{type(self).__name__}({self.lock.path})" def __str__(self): return str(self.lock.path) @@ -113,8 +108,7 @@ def wheel_image(self, for_py_version, name): return self.lock.path / "wheel" / for_py_version / "image" / "1" / name -@six.add_metaclass(ABCMeta) -class JSONStoreDisk(ContentStore): +class JSONStoreDisk(ContentStore, metaclass=ABCMeta): def __init__(self, in_folder, key, msg, msg_args): self.in_folder = in_folder self.key = key @@ -123,7 +117,7 @@ def __init__(self, in_folder, key, msg, msg_args): @property def file(self): - return self.in_folder.path / "{}.json".format(self.key) + return self.in_folder.path / f"{self.key}.json" def exists(self): return self.file.exists() @@ -132,11 +126,11 @@ def read(self): data, bad_format = None, False try: data = json.loads(self.file.read_text()) - logging.debug("got {} from %s".format(self.msg), *self.msg_args) + logging.debug(f"got {self.msg} from %s", *self.msg_args) return data except ValueError: bad_format = True - except Exception: # noqa + except Exception: pass if bad_format: try: @@ -147,7 +141,7 @@ def read(self): def remove(self): self.file.unlink() - logging.debug("removed {} at %s".format(self.msg), *self.msg_args) + logging.debug(f"removed {self.msg} at %s", *self.msg_args) @contextmanager def locked(self): @@ -157,21 +151,28 @@ def locked(self): def write(self, content): folder = self.file.parent folder.mkdir(parents=True, exist_ok=True) - self.file.write_text(ensure_text(json.dumps(content, sort_keys=True, indent=2))) - logging.debug("wrote {} at %s".format(self.msg), *self.msg_args) + self.file.write_text(json.dumps(content, sort_keys=True, indent=2)) + logging.debug(f"wrote {self.msg} at %s", *self.msg_args) class PyInfoStoreDisk(JSONStoreDisk): def __init__(self, in_folder, path): - key = sha256(str(path).encode("utf-8") if six.PY3 else str(path)).hexdigest() - super(PyInfoStoreDisk, self).__init__(in_folder, key, "python info of %s", (path,)) + key = sha256(str(path).encode("utf-8")).hexdigest() + super().__init__(in_folder, key, "python info of %s", (path,)) class EmbedDistributionUpdateStoreDisk(JSONStoreDisk): def __init__(self, in_folder, distribution): - super(EmbedDistributionUpdateStoreDisk, self).__init__( + super().__init__( in_folder, distribution, "embed update of distribution %s", (distribution,), ) + + +__all__ = [ + "AppDataDiskFolder", + "JSONStoreDisk", + "PyInfoStoreDisk", +] diff --git a/vendor/virtualenv/src/virtualenv/app_data/via_tempdir.py b/vendor/virtualenv/src/virtualenv/app_data/via_tempdir.py index 112a3fe6..7854642c 100644 --- a/vendor/virtualenv/src/virtualenv/app_data/via_tempdir.py +++ b/vendor/virtualenv/src/virtualenv/app_data/via_tempdir.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import logging from tempfile import mkdtemp @@ -13,7 +11,7 @@ class TempAppData(AppDataDiskFolder): can_update = False def __init__(self): - super(TempAppData, self).__init__(folder=mkdtemp()) + super().__init__(folder=mkdtemp()) logging.debug("created temporary app data folder %s", self.lock.path) def reset(self): @@ -23,5 +21,10 @@ def close(self): logging.debug("remove temporary app data folder %s", self.lock.path) safe_delete(self.lock.path) - def embed_update_log(self, distribution, for_py_version): + def embed_update_log(self, distribution, for_py_version): # noqa: U100 raise NotImplementedError + + +__all__ = [ + "TempAppData", +] diff --git a/vendor/virtualenv/src/virtualenv/config/__init__.py b/vendor/virtualenv/src/virtualenv/config/__init__.py index 01e6d4f4..e69de29b 100644 --- a/vendor/virtualenv/src/virtualenv/config/__init__.py +++ b/vendor/virtualenv/src/virtualenv/config/__init__.py @@ -1 +0,0 @@ -from __future__ import absolute_import, unicode_literals diff --git a/vendor/virtualenv/src/virtualenv/config/cli/__init__.py b/vendor/virtualenv/src/virtualenv/config/cli/__init__.py index 01e6d4f4..e69de29b 100644 --- a/vendor/virtualenv/src/virtualenv/config/cli/__init__.py +++ b/vendor/virtualenv/src/virtualenv/config/cli/__init__.py @@ -1 +0,0 @@ -from __future__ import absolute_import, unicode_literals diff --git a/vendor/virtualenv/src/virtualenv/config/cli/parser.py b/vendor/virtualenv/src/virtualenv/config/cli/parser.py index c8e2f551..5ab4fbde 100644 --- a/vendor/virtualenv/src/virtualenv/config/cli/parser.py +++ b/vendor/virtualenv/src/virtualenv/config/cli/parser.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import os from argparse import SUPPRESS, ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace from collections import OrderedDict @@ -12,7 +10,7 @@ class VirtualEnvOptions(Namespace): def __init__(self, **kwargs): - super(VirtualEnvOptions, self).__init__(**kwargs) + super().__init__(**kwargs) self._src = None self._sources = {} @@ -25,7 +23,7 @@ def set_src(self, key, value, src): def __setattr__(self, key, value): if getattr(self, "_src", None) is not None: self._sources[key] = self._src - super(VirtualEnvOptions, self).__setattr__(key, value) + super().__setattr__(key, value) def get_source(self, key): return self._sources.get(key) @@ -37,10 +35,7 @@ def verbosity(self): return max(self.verbose - self.quiet, 0) def __repr__(self): - return "{}({})".format( - type(self).__name__, - ", ".join("{}={}".format(k, v) for k, v in vars(self).items() if not k.startswith("_")), - ) + return f"{type(self).__name__}({', '.join(f'{k}={v}' for k, v in vars(self).items() if not k.startswith('_'))})" class VirtualEnvConfigParser(ArgumentParser): @@ -57,7 +52,7 @@ def __init__(self, options=None, env=None, *args, **kwargs): kwargs["add_help"] = False kwargs["formatter_class"] = HelpFormatter kwargs["prog"] = "virtualenv" - super(VirtualEnvConfigParser, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._fixed = set() if options is not None and not isinstance(options, VirtualEnvOptions): raise TypeError("options must be of type VirtualEnvOptions") @@ -105,20 +100,27 @@ def parse_known_args(self, args=None, namespace=None): self.options._src = "cli" try: namespace.env = self.env - return super(VirtualEnvConfigParser, self).parse_known_args(args, namespace=namespace) + return super().parse_known_args(args, namespace=namespace) finally: self.options._src = None class HelpFormatter(ArgumentDefaultsHelpFormatter): def __init__(self, prog): - super(HelpFormatter, self).__init__(prog, max_help_position=32, width=240) + super().__init__(prog, max_help_position=32, width=240) def _get_help_string(self, action): - # noinspection PyProtectedMember - text = super(HelpFormatter, self)._get_help_string(action) + + text = super()._get_help_string(action) if hasattr(action, "default_source"): default = " (default: %(default)s)" if text.endswith(default): - text = "{} (default: %(default)s -> from %(default_source)s)".format(text[: -len(default)]) + text = f"{text[: -len(default)]} (default: %(default)s -> from %(default_source)s)" return text + + +__all__ = [ + "HelpFormatter", + "VirtualEnvConfigParser", + "VirtualEnvOptions", +] diff --git a/vendor/virtualenv/src/virtualenv/config/convert.py b/vendor/virtualenv/src/virtualenv/config/convert.py index df408663..38d35518 100644 --- a/vendor/virtualenv/src/virtualenv/config/convert.py +++ b/vendor/virtualenv/src/virtualenv/config/convert.py @@ -1,16 +1,14 @@ -from __future__ import absolute_import, unicode_literals - import logging import os -class TypeData(object): +class TypeData: def __init__(self, default_type, as_type): self.default_type = default_type self.as_type = as_type def __repr__(self): - return "{}(base={}, as={})".format(self.__class__.__name__, self.default_type, self.as_type) + return f"{self.__class__.__name__}(base={self.default_type}, as={self.as_type})" def convert(self, value): return self.default_type(value) @@ -30,7 +28,7 @@ class BoolType(TypeData): def convert(self, value): if value.lower() not in self.BOOLEAN_STATES: - raise ValueError("Not a boolean: %s" % value) + raise ValueError(f"Not a boolean: {value}") return self.BOOLEAN_STATES[value.lower()] @@ -45,7 +43,7 @@ class ListType(TypeData): def _validate(self): """ """ - def convert(self, value, flatten=True): + def convert(self, value, flatten=True): # noqa: U100 values = self.split_values(value) result = [] for value in values: @@ -92,7 +90,7 @@ def get_type(action): return _CONVERT.get(default_type, TypeData)(default_type, as_type) -__all__ = ( +__all__ = [ "convert", "get_type", -) +] diff --git a/vendor/virtualenv/src/virtualenv/config/env_var.py b/vendor/virtualenv/src/virtualenv/config/env_var.py index 8f6211ca..5dc0c1de 100644 --- a/vendor/virtualenv/src/virtualenv/config/env_var.py +++ b/vendor/virtualenv/src/virtualenv/config/env_var.py @@ -1,7 +1,3 @@ -from __future__ import absolute_import, unicode_literals - -from virtualenv.util.six import ensure_str, ensure_text - from .convert import convert @@ -13,16 +9,18 @@ def get_env_var(key, as_type, env): :param env: environment variables to use :return: """ - environ_key = ensure_str("VIRTUALENV_{}".format(key.upper())) + environ_key = f"VIRTUALENV_{key.upper()}" if env.get(environ_key): value = env[environ_key] - # noinspection PyBroadException + try: - source = "env var {}".format(ensure_text(environ_key)) + source = f"env var {environ_key}" as_type = convert(value, as_type, source) return as_type, source except Exception: # note the converter already logs a warning when failures happen pass -__all__ = ("get_env_var",) +__all__ = [ + "get_env_var", +] diff --git a/vendor/virtualenv/src/virtualenv/config/ini.py b/vendor/virtualenv/src/virtualenv/config/ini.py index 0d945ee2..50da8844 100644 --- a/vendor/virtualenv/src/virtualenv/config/ini.py +++ b/vendor/virtualenv/src/virtualenv/config/ini.py @@ -1,20 +1,15 @@ -from __future__ import absolute_import, unicode_literals - import logging import os +from configparser import ConfigParser +from pathlib import Path from platformdirs import user_config_dir -from virtualenv.info import PY3 -from virtualenv.util import ConfigParser -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_str - from .convert import convert -class IniConfig(object): - VIRTUALENV_CONFIG_FILE_ENV_VAR = ensure_str("VIRTUALENV_CONFIG_FILE") +class IniConfig: + VIRTUALENV_CONFIG_FILE_ENV_VAR = "VIRTUALENV_CONFIG_FILE" STATE = {None: "failed to parse", True: "active", False: "missing"} section = "virtualenv" @@ -23,11 +18,10 @@ def __init__(self, env=None): env = os.environ if env is None else env config_file = env.get(self.VIRTUALENV_CONFIG_FILE_ENV_VAR, None) self.is_env_var = config_file is not None - config_file = ( - Path(config_file) - if config_file is not None - else Path(user_config_dir(appname="virtualenv", appauthor="pypa")) / "virtualenv.ini" - ) + if config_file is None: + config_file = Path(user_config_dir(appname="virtualenv", appauthor="pypa")) / "virtualenv.ini" + else: + config_file = Path(config_file) self.config_file = config_file self._cache = {} @@ -40,7 +34,7 @@ def __init__(self, env=None): else: if self.has_config_file: self.config_file = self.config_file.resolve() - self.config_parser = ConfigParser.ConfigParser() + self.config_parser = ConfigParser() try: self._load() self.has_virtualenv_section = self.config_parser.has_section(self.section) @@ -51,14 +45,12 @@ def __init__(self, env=None): def _load(self): with self.config_file.open("rt") as file_handler: - reader = getattr(self.config_parser, "read_file" if PY3 else "readfp") - reader(file_handler) + return self.config_parser.read_file(file_handler) def get(self, key, as_type): cache_key = key, as_type if cache_key in self._cache: return self._cache[cache_key] - # noinspection PyBroadException try: source = "file" raw_value = self.config_parser.get(self.section, key.lower()) @@ -74,11 +66,7 @@ def __bool__(self): @property def epilog(self): - msg = "{}config file {} {} (change{} via env var {})" - return msg.format( - "\n", - self.config_file, - self.STATE[self.has_config_file], - "d" if self.is_env_var else "", - self.VIRTUALENV_CONFIG_FILE_ENV_VAR, + return ( + f"\nconfig file {self.config_file} {self.STATE[self.has_config_file]} " + f"(change{'d' if self.is_env_var else ''} via env var {self.VIRTUALENV_CONFIG_FILE_ENV_VAR})" ) diff --git a/vendor/virtualenv/src/virtualenv/create/__init__.py b/vendor/virtualenv/src/virtualenv/create/__init__.py index 9e764247..7e40c927 100644 --- a/vendor/virtualenv/src/virtualenv/create/__init__.py +++ b/vendor/virtualenv/src/virtualenv/create/__init__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from pathlib import Path from virtualenv import __path_assets__ diff --git a/vendor/virtualenv/src/virtualenv/create/creator.py b/vendor/virtualenv/src/virtualenv/create/creator.py index 9b004611..159c8ff9 100644 --- a/vendor/virtualenv/src/virtualenv/create/creator.py +++ b/vendor/virtualenv/src/virtualenv/create/creator.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, print_function, unicode_literals - import json import logging import os @@ -8,15 +6,11 @@ from argparse import ArgumentTypeError from ast import literal_eval from collections import OrderedDict -from textwrap import dedent - -from six import add_metaclass +from pathlib import Path from virtualenv import __path_pack__ from virtualenv.discovery.cached_py_info import LogCmd -from virtualenv.info import WIN_CPYTHON_2 -from virtualenv.util.path import Path, safe_delete -from virtualenv.util.six import ensure_str, ensure_text +from virtualenv.util.path import safe_delete from virtualenv.util.subprocess import run_cmd from virtualenv.version import __version__ @@ -27,13 +21,12 @@ DEBUG_SCRIPT = _PATH_ASSETS / "debug.py" -class CreatorMeta(object): +class CreatorMeta: def __init__(self): self.error = None -@add_metaclass(ABCMeta) -class Creator(object): +class Creator(metaclass=ABCMeta): """A class that given a python Interpreter creates a virtual environment""" def __init__(self, options, interpreter): @@ -52,20 +45,17 @@ def __init__(self, options, interpreter): self.env = options.env def __repr__(self): - return ensure_str(self.__unicode__()) - - def __unicode__(self): - return "{}({})".format(self.__class__.__name__, ", ".join("{}={}".format(k, v) for k, v in self._args())) + return f"{self.__class__.__name__}({', '.join(f'{k}={v}' for k, v in self._args())})" def _args(self): return [ - ("dest", ensure_text(str(self.dest))), + ("dest", str(self.dest)), ("clear", self.clear), ("no_vcs_ignore", self.no_vcs_ignore), ] @classmethod - def can_create(cls, interpreter): + def can_create(cls, interpreter): # noqa: U100 """Determine if we can create a virtual environment. :param interpreter: the interpreter in question @@ -75,7 +65,7 @@ def can_create(cls, interpreter): return True @classmethod - def add_parser_arguments(cls, parser, interpreter, meta, app_data): + def add_parser_arguments(cls, parser, interpreter, meta, app_data): # noqa: U100 """Add CLI arguments for the creator. :param parser: the CLI parser @@ -114,16 +104,14 @@ def validate_dest(cls, raw_value): def non_write_able(dest, value): common = Path(*os.path.commonprefix([value.parts, dest.parts])) - raise ArgumentTypeError( - "the destination {} is not write-able at {}".format(dest.relative_to(common), common), - ) + raise ArgumentTypeError(f"the destination {dest.relative_to(common)} is not write-able at {common}") # the file system must be able to encode # note in newer CPython this is always utf-8 https://www.python.org/dev/peps/pep-0529/ encoding = sys.getfilesystemencoding() refused = OrderedDict() kwargs = {"errors": "ignore"} if encoding != "mbcs" else {} - for char in ensure_text(raw_value): + for char in str(raw_value): try: trip = char.encode(encoding, **kwargs).decode(encoding) if trip == char: @@ -132,23 +120,17 @@ def non_write_able(dest, value): except ValueError: refused[char] = None if refused: - raise ArgumentTypeError( - "the file system codec ({}) cannot handle characters {!r} within {!r}".format( - encoding, - "".join(refused.keys()), - raw_value, - ), - ) + bad = "".join(refused.keys()) + msg = f"the file system codec ({encoding}) cannot handle characters {bad!r} within {raw_value!r}" + raise ArgumentTypeError(msg) if os.pathsep in raw_value: - raise ArgumentTypeError( - "destination {!r} must not contain the path separator ({}) as this would break " - "the activation scripts".format(raw_value, os.pathsep), - ) + msg = f"destination {raw_value!r} must not contain the path separator ({os.pathsep})" + raise ArgumentTypeError(f"{msg} as this would break the activation scripts") value = Path(raw_value) if value.exists() and value.is_file(): - raise ArgumentTypeError("the destination {} already exists and is a file".format(value)) - if (3, 3) <= sys.version_info <= (3, 6): + raise ArgumentTypeError(f"the destination {value} already exists and is a file") + if sys.version_info <= (3, 6): # pre 3.6 resolve is always strict, aka must exists, sidestep by using os.path operation dest = Path(os.path.realpath(raw_value)) else: @@ -156,7 +138,7 @@ def non_write_able(dest, value): value = dest while dest: if dest.exists(): - if os.access(ensure_text(str(dest)), os.W_OK): + if os.access(str(dest), os.W_OK): break else: non_write_able(dest, value) @@ -187,14 +169,7 @@ def setup_ignore_vcs(self): # mark this folder to be ignored by VCS, handle https://www.python.org/dev/peps/pep-0610/#registered-vcs git_ignore = self.dest / ".gitignore" if not git_ignore.exists(): - git_ignore.write_text( - dedent( - """ - # created by virtualenv automatically - * - """, - ).lstrip(), - ) + git_ignore.write_text("# created by virtualenv automatically\n*\n") # Mercurial - does not support the .hgignore file inside a subdirectory directly, but only if included via the # subinclude directive from root, at which point on might as well ignore the directory itself, see # https://www.selenic.com/mercurial/hgignore.5.html for more details @@ -210,26 +185,29 @@ def debug(self): self._debug = get_env_debug_info(self.exe, self.debug_script(), self.app_data, self.env) return self._debug - # noinspection PyMethodMayBeStatic - def debug_script(self): + @staticmethod + def debug_script(): return DEBUG_SCRIPT def get_env_debug_info(env_exe, debug_script, app_data, env): env = env.copy() - env.pop(str("PYTHONPATH"), None) + env.pop("PYTHONPATH", None) with app_data.ensure_extracted(debug_script) as debug_script: cmd = [str(env_exe), str(debug_script)] - if WIN_CPYTHON_2: - cmd = [ensure_text(i) for i in cmd] - logging.debug(str("debug via %r"), LogCmd(cmd)) + logging.debug("debug via %r", LogCmd(cmd)) code, out, err = run_cmd(cmd) - # noinspection PyBroadException try: if code != 0: - result = literal_eval(out) + if out: + result = literal_eval(out) + else: + if code == 2 and "file" in err: + # Re-raise FileNotFoundError from `run_cmd()` + raise OSError(err) + raise Exception(err) else: result = json.loads(out) if err: @@ -239,3 +217,9 @@ def get_env_debug_info(env_exe, debug_script, app_data, env): if "sys" in result and "path" in result["sys"]: del result["sys"]["path"][0] return result + + +__all__ = [ + "Creator", + "CreatorMeta", +] diff --git a/vendor/virtualenv/src/virtualenv/create/debug.py b/vendor/virtualenv/src/virtualenv/create/debug.py index 0cdaa494..583d9991 100644 --- a/vendor/virtualenv/src/virtualenv/create/debug.py +++ b/vendor/virtualenv/src/virtualenv/create/debug.py @@ -28,7 +28,7 @@ def run(): except ImportError: # pragma: no cover # this is possible if the standard library cannot be accessed # noinspection PyPep8Naming - OrderedDict = dict # pragma: no cover + OrderedDict = dict # pragma: no cover # noqa: N806 result = OrderedDict([("sys", OrderedDict())]) path_keys = ( "executable", diff --git a/vendor/virtualenv/src/virtualenv/create/describe.py b/vendor/virtualenv/src/virtualenv/create/describe.py index 6f05ff1e..85752674 100644 --- a/vendor/virtualenv/src/virtualenv/create/describe.py +++ b/vendor/virtualenv/src/virtualenv/create/describe.py @@ -1,17 +1,11 @@ -from __future__ import absolute_import, print_function, unicode_literals - from abc import ABCMeta from collections import OrderedDict - -from six import add_metaclass +from pathlib import Path from virtualenv.info import IS_WIN -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text -@add_metaclass(ABCMeta) -class Describe(object): +class Describe(metaclass=ABCMeta): """Given a host interpreter tell us information about what the created interpreter might look like""" suffix = ".exe" if IS_WIN else "" @@ -59,26 +53,25 @@ def stdlib_platform(self): @property def _config_vars(self): if self._conf_vars is None: - self._conf_vars = self._calc_config_vars(ensure_text(str(self.dest))) + self._conf_vars = self._calc_config_vars(self.dest) return self._conf_vars def _calc_config_vars(self, to): - return { - k: (to if v.startswith(self.interpreter.prefix) else v) for k, v in self.interpreter.sysconfig_vars.items() - } + sys_vars = self.interpreter.sysconfig_vars + return {k: (to if v.startswith(self.interpreter.prefix) else v) for k, v in sys_vars.items()} @classmethod - def can_describe(cls, interpreter): + def can_describe(cls, interpreter): # noqa: U100 """Knows means it knows how the output will look""" return True @property def env_name(self): - return ensure_text(self.dest.parts[-1]) + return self.dest.parts[-1] @property def exe(self): - return self.bin_dir / "{}{}".format(self.exe_stem(), self.suffix) + return self.bin_dir / f"{self.exe_stem()}{self.suffix}" @classmethod def exe_stem(cls): @@ -86,32 +79,37 @@ def exe_stem(cls): raise NotImplementedError def script(self, name): - return self.script_dir / "{}{}".format(name, self.suffix) + return self.script_dir / f"{name}{self.suffix}" -@add_metaclass(ABCMeta) -class Python2Supports(Describe): +class Python2Supports(Describe, metaclass=ABCMeta): @classmethod def can_describe(cls, interpreter): - return interpreter.version_info.major == 2 and super(Python2Supports, cls).can_describe(interpreter) + return interpreter.version_info.major == 2 and super().can_describe(interpreter) -@add_metaclass(ABCMeta) -class Python3Supports(Describe): +class Python3Supports(Describe, metaclass=ABCMeta): @classmethod def can_describe(cls, interpreter): - return interpreter.version_info.major == 3 and super(Python3Supports, cls).can_describe(interpreter) + return interpreter.version_info.major == 3 and super().can_describe(interpreter) -@add_metaclass(ABCMeta) -class PosixSupports(Describe): +class PosixSupports(Describe, metaclass=ABCMeta): @classmethod def can_describe(cls, interpreter): - return interpreter.os == "posix" and super(PosixSupports, cls).can_describe(interpreter) + return interpreter.os == "posix" and super().can_describe(interpreter) -@add_metaclass(ABCMeta) -class WindowsSupports(Describe): +class WindowsSupports(Describe, metaclass=ABCMeta): @classmethod def can_describe(cls, interpreter): - return interpreter.os == "nt" and super(WindowsSupports, cls).can_describe(interpreter) + return interpreter.os == "nt" and super().can_describe(interpreter) + + +__all__ = [ + "Describe", + "Python2Supports", + "Python3Supports", + "PosixSupports", + "WindowsSupports", +] diff --git a/vendor/virtualenv/src/virtualenv/create/pyenv_cfg.py b/vendor/virtualenv/src/virtualenv/create/pyenv_cfg.py index 1a8d8244..9193a28c 100644 --- a/vendor/virtualenv/src/virtualenv/create/pyenv_cfg.py +++ b/vendor/virtualenv/src/virtualenv/create/pyenv_cfg.py @@ -1,12 +1,8 @@ -from __future__ import absolute_import, unicode_literals - import logging from collections import OrderedDict -from virtualenv.util.six import ensure_text - -class PyEnvCfg(object): +class PyEnvCfg: def __init__(self, content, path): self.content = content self.path = path @@ -31,10 +27,10 @@ def _read_values(path): return content def write(self): - logging.debug("write %s", ensure_text(str(self.path))) + logging.debug("write %s", self.path) text = "" for key, value in self.content.items(): - line = "{} = {}".format(key, value) + line = f"{key} = {value}" logging.debug("\t%s", line) text += line text += "\n" @@ -58,4 +54,9 @@ def update(self, other): return self def __repr__(self): - return "{}(path={})".format(self.__class__.__name__, self.path) + return f"{self.__class__.__name__}(path={self.path})" + + +__all__ = [ + "PyEnvCfg", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/_virtualenv.py.template b/vendor/virtualenv/src/virtualenv/create/via_global_ref/_virtualenv.py.template index 6c51ca0d..faee64cf 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/_virtualenv.py.template +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/_virtualenv.py.template @@ -48,7 +48,7 @@ if sys.version_info > (3, 4): # See https://github.com/pypa/virtualenv/issues/1895 for details. lock = [] - def find_spec(self, fullname, path, target=None): + def find_spec(self, fullname, path, target=None): # noqa: U100 if fullname in _DISTUTILS_PATCH and self.fullname is None: # initialize lock[0] lazily if len(self.lock) == 0: diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/api.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/api.py index f90eb049..005dd68f 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/api.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/api.py @@ -1,22 +1,17 @@ -from __future__ import absolute_import, unicode_literals - import logging import os from abc import ABCMeta from importlib.resources import read_text - -from six import add_metaclass +from pathlib import Path from virtualenv.info import fs_supports_symlink -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text from ..creator import Creator, CreatorMeta class ViaGlobalRefMeta(CreatorMeta): def __init__(self): - super(ViaGlobalRefMeta, self).__init__() + super().__init__() self.copy_error = None self.symlink_error = None if not fs_supports_symlink(): @@ -31,10 +26,9 @@ def can_symlink(self): return not self.symlink_error -@add_metaclass(ABCMeta) -class ViaGlobalRefApi(Creator): +class ViaGlobalRefApi(Creator, metaclass=ABCMeta): def __init__(self, options, interpreter): - super(ViaGlobalRefApi, self).__init__(options, interpreter) + super().__init__(options, interpreter) self.symlinks = self._should_symlink(options) self.enable_system_site_package = options.system_site @@ -57,7 +51,7 @@ def _should_symlink(options): @classmethod def add_parser_arguments(cls, parser, interpreter, meta, app_data): - super(ViaGlobalRefApi, cls).add_parser_arguments(parser, interpreter, meta, app_data) + super().add_parser_arguments(parser, interpreter, meta, app_data) parser.add_argument( "--system-site-packages", default=False, @@ -93,10 +87,10 @@ def install_patch(self): text = self.env_patch_text() if text: pth = self.purelib / "_virtualenv.pth" - logging.debug("create virtualenv import hook file %s", ensure_text(str(pth))) + logging.debug("create virtualenv import hook file %s", pth) pth.write_text("import _virtualenv") dest_path = self.purelib / "_virtualenv.py" - logging.debug("create %s", ensure_text(str(dest_path))) + logging.debug("create %s", dest_path) dest_path.write_text(text) def env_patch_text(self): @@ -106,8 +100,14 @@ def env_patch_text(self): return text.replace('"__SCRIPT_DIR__"', repr(os.path.relpath(str(self.script_dir), str(self.purelib)))) def _args(self): - return super(ViaGlobalRefApi, self)._args() + [("global", self.enable_system_site_package)] + return super()._args() + [("global", self.enable_system_site_package)] def set_pyenv_cfg(self): - super(ViaGlobalRefApi, self).set_pyenv_cfg() + super().set_pyenv_cfg() self.pyenv_cfg["include-system-site-packages"] = "true" if self.enable_system_site_package else "false" + + +__all__ = [ + "ViaGlobalRefMeta", + "ViaGlobalRefApi", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/builtin_way.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/builtin_way.py index 279ee809..e321593a 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/builtin_way.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/builtin_way.py @@ -1,17 +1,17 @@ -from __future__ import absolute_import, unicode_literals - from abc import ABCMeta -from six import add_metaclass - from virtualenv.create.creator import Creator from virtualenv.create.describe import Describe -@add_metaclass(ABCMeta) -class VirtualenvBuiltin(Creator, Describe): +class VirtualenvBuiltin(Creator, Describe, metaclass=ABCMeta): """A creator that does operations itself without delegation, if we can create it we can also describe it""" def __init__(self, options, interpreter): Creator.__init__(self, options, interpreter) Describe.__init__(self, self.dest, interpreter) + + +__all__ = [ + "VirtualenvBuiltin", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/__init__.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/__init__.py index 01e6d4f4..e69de29b 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/__init__.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/__init__.py @@ -1 +0,0 @@ -from __future__ import absolute_import, unicode_literals diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/common.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/common.py index c93f9f31..b2f79443 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/common.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/common.py @@ -1,45 +1,36 @@ -from __future__ import absolute_import, unicode_literals - from abc import ABCMeta from collections import OrderedDict - -from six import add_metaclass +from pathlib import Path from virtualenv.create.describe import PosixSupports, WindowsSupports from virtualenv.create.via_global_ref.builtin.ref import RefMust, RefWhen -from virtualenv.util.path import Path from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin -@add_metaclass(ABCMeta) -class CPython(ViaGlobalRefVirtualenvBuiltin): +class CPython(ViaGlobalRefVirtualenvBuiltin, metaclass=ABCMeta): @classmethod def can_describe(cls, interpreter): - return interpreter.implementation == "CPython" and super(CPython, cls).can_describe(interpreter) + return interpreter.implementation == "CPython" and super().can_describe(interpreter) @classmethod def exe_stem(cls): return "python" -@add_metaclass(ABCMeta) -class CPythonPosix(CPython, PosixSupports): +class CPythonPosix(CPython, PosixSupports, metaclass=ABCMeta): """Create a CPython virtual environment on POSIX platforms""" @classmethod def _executables(cls, interpreter): host_exe = Path(interpreter.system_executable) major, minor = interpreter.version_info.major, interpreter.version_info.minor - targets = OrderedDict( - (i, None) for i in ["python", "python{}".format(major), "python{}.{}".format(major, minor), host_exe.name] - ) + targets = OrderedDict((i, None) for i in ["python", f"python{major}", f"python{major}.{minor}", host_exe.name]) must = RefMust.COPY if interpreter.version_info.major == 2 else RefMust.NA yield host_exe, list(targets.keys()), must, RefWhen.ANY -@add_metaclass(ABCMeta) -class CPythonWindows(CPython, WindowsSupports): +class CPythonWindows(CPython, WindowsSupports, metaclass=ABCMeta): @classmethod def _executables(cls, interpreter): # symlink of the python executables does not work reliably, copy always instead @@ -63,3 +54,11 @@ def is_mac_os_framework(interpreter): value = "Python3" if interpreter.version_info.major == 3 else "Python" return framework_var == value return False + + +__all__ = [ + "CPython", + "CPythonPosix", + "CPythonWindows", + "is_mac_os_framework", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py index dc822bcb..9e292344 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/cpython2.py @@ -1,29 +1,23 @@ -from __future__ import absolute_import, unicode_literals - import abc import logging - -from six import add_metaclass +from pathlib import Path from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest -from virtualenv.util.path import Path from ..python2.python2 import Python2 from .common import CPython, CPythonPosix, CPythonWindows, is_mac_os_framework -@add_metaclass(abc.ABCMeta) -class CPython2(CPython, Python2): +class CPython2(CPython, Python2, metaclass=abc.ABCMeta): """Create a CPython version 2 virtual environment""" @classmethod def sources(cls, interpreter): - for src in super(CPython2, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) # include folder needed on Python 2 as we don't have pyenv.cfg host_include_marker = cls.host_include_marker(interpreter) if host_include_marker.exists(): - yield PathRefToDest(host_include_marker.parent, dest=lambda self, _: self.include) + yield PathRefToDest(host_include_marker.parent, dest=lambda self, _: self.include) # noqa: U101 @classmethod def needs_stdlib_py_module(cls): @@ -40,12 +34,10 @@ def include(self): @classmethod def modules(cls): - return [ - "os", # landmark to set sys.prefix - ] + return ["os"] # landmark to set sys.prefix def ensure_directories(self): - dirs = super(CPython2, self).ensure_directories() + dirs = super().ensure_directories() host_include_marker = self.host_include_marker(self.interpreter) if host_include_marker.exists(): dirs.add(self.include.parent) @@ -54,20 +46,18 @@ def ensure_directories(self): return dirs -@add_metaclass(abc.ABCMeta) -class CPython2PosixBase(CPython2, CPythonPosix): +class CPython2PosixBase(CPython2, CPythonPosix, metaclass=abc.ABCMeta): """common to macOs framework builds and other posix CPython2""" @classmethod def sources(cls, interpreter): - for src in super(CPython2PosixBase, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) # check if the makefile exists and if so make it available under the virtual environment make_file = Path(interpreter.sysconfig["makefile_filename"]) if make_file.exists() and str(make_file).startswith(interpreter.prefix): under_prefix = make_file.relative_to(Path(interpreter.prefix)) - yield PathRefToDest(make_file, dest=lambda self, s: self.dest / under_prefix) + yield PathRefToDest(make_file, dest=lambda self, s: self.dest / under_prefix) # noqa: U100 class CPython2Posix(CPython2PosixBase): @@ -75,12 +65,11 @@ class CPython2Posix(CPython2PosixBase): @classmethod def can_describe(cls, interpreter): - return is_mac_os_framework(interpreter) is False and super(CPython2Posix, cls).can_describe(interpreter) + return is_mac_os_framework(interpreter) is False and super().can_describe(interpreter) @classmethod def sources(cls, interpreter): - for src in super(CPython2Posix, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) # landmark for exec_prefix exec_marker_file, to_path, _ = cls.from_stdlib(cls.mappings(interpreter), "lib-dynload") yield PathRefToDest(exec_marker_file, dest=to_path) @@ -91,8 +80,7 @@ class CPython2Windows(CPython2, CPythonWindows): @classmethod def sources(cls, interpreter): - for src in super(CPython2Windows, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) py27_dll = Path(interpreter.system_executable).parent / "python27.dll" if py27_dll.exists(): # this might be global in the Windows folder in which case it's alright to be missing yield PathRefToDest(py27_dll, dest=cls.to_bin) @@ -100,3 +88,11 @@ def sources(cls, interpreter): libs = Path(interpreter.system_prefix) / "libs" if libs.exists(): yield PathRefToDest(libs, dest=lambda self, s: self.dest / s.name) + + +__all__ = [ + "CPython2", + "CPython2PosixBase", + "CPython2Posix", + "CPython2Windows", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py index fcd92b82..433dc4a6 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/cpython3.py @@ -1,30 +1,28 @@ -from __future__ import absolute_import, unicode_literals - import abc +import fnmatch +from itertools import chain +from operator import methodcaller as method +from pathlib import Path from textwrap import dedent -from six import add_metaclass - from virtualenv.create.describe import Python3Supports from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest from virtualenv.create.via_global_ref.store import is_store_python -from virtualenv.util.path import Path from .common import CPython, CPythonPosix, CPythonWindows, is_mac_os_framework -@add_metaclass(abc.ABCMeta) -class CPython3(CPython, Python3Supports): - """ """ +class CPython3(CPython, Python3Supports, metaclass=abc.ABCMeta): + """CPython 3 or later""" class CPython3Posix(CPythonPosix, CPython3): @classmethod def can_describe(cls, interpreter): - return is_mac_os_framework(interpreter) is False and super(CPython3Posix, cls).can_describe(interpreter) + return is_mac_os_framework(interpreter) is False and super().can_describe(interpreter) def env_patch_text(self): - text = super(CPython3Posix, self).env_patch_text() + text = super().env_patch_text() if self.pyvenv_launch_patch_active(self.interpreter): text += dedent( """ @@ -49,15 +47,23 @@ class CPython3Windows(CPythonWindows, CPython3): def setup_meta(cls, interpreter): if is_store_python(interpreter): # store python is not supported here return None - return super(CPython3Windows, cls).setup_meta(interpreter) + return super().setup_meta(interpreter) @classmethod def sources(cls, interpreter): - for src in super(CPython3Windows, cls).sources(interpreter): - yield src - if not cls.has_shim(interpreter): - for src in cls.include_dll_and_pyd(interpreter): - yield src + if cls.has_shim(interpreter): + refs = cls.executables(interpreter) + else: + refs = chain( + cls.executables(interpreter), + cls.dll_and_pyd(interpreter), + cls.python_zip(interpreter), + ) + yield from refs + + @classmethod + def executables(cls, interpreter): + return super().sources(interpreter) @classmethod def has_shim(cls, interpreter): @@ -76,16 +82,48 @@ def host_python(cls, interpreter): # starting with CPython 3.7 Windows ships with a venvlauncher.exe that avoids the need for dll/pyd copies # it also means the wrapper must be copied to avoid bugs such as https://bugs.python.org/issue42013 return cls.shim(interpreter) - return super(CPython3Windows, cls).host_python(interpreter) + return super().host_python(interpreter) @classmethod - def include_dll_and_pyd(cls, interpreter): + def dll_and_pyd(cls, interpreter): + folders = [Path(interpreter.system_executable).parent] + + # May be missing on some Python hosts. + # See https://github.com/pypa/virtualenv/issues/2368 dll_folder = Path(interpreter.system_prefix) / "DLLs" - host_exe_folder = Path(interpreter.system_executable).parent - for folder in [host_exe_folder, dll_folder]: + if dll_folder.is_dir(): + folders.append(dll_folder) + + for folder in folders: for file in folder.iterdir(): if file.suffix in (".pyd", ".dll"): - yield PathRefToDest(file, dest=cls.to_dll_and_pyd) + yield PathRefToDest(file, cls.to_bin) - def to_dll_and_pyd(self, src): - return self.bin_dir / src.name + @classmethod + def python_zip(cls, interpreter): + """ + "python{VERSION}.zip" contains compiled *.pyc std lib packages, where + "VERSION" is `py_version_nodot` var from the `sysconfig` module. + :see: https://docs.python.org/3/using/windows.html#the-embeddable-package + :see: `discovery.py_info.PythonInfo` class (interpreter). + :see: `python -m sysconfig` output. + + :note: The embeddable Python distribution for Windows includes + "python{VERSION}.zip" and "python{VERSION}._pth" files. User can + move/rename *zip* file and edit `sys.path` by editing *_pth* file. + Here the `pattern` is used only for the default *zip* file name! + """ + pattern = f"*python{interpreter.version_nodot}.zip" + matches = fnmatch.filter(interpreter.path, pattern) + matched_paths = map(Path, matches) + existing_paths = filter(method("exists"), matched_paths) + path = next(existing_paths, None) + if path is not None: + yield PathRefToDest(path, cls.to_bin) + + +__all__ = [ + "CPython3", + "CPython3Posix", + "CPython3Windows", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py index d64f0d99..9fc25509 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/cpython/mac_os.py @@ -1,32 +1,27 @@ -# -*- coding: utf-8 -*- """The Apple Framework builds require their own customization""" import logging import os import struct import subprocess from abc import ABCMeta, abstractmethod +from pathlib import Path from textwrap import dedent -from six import add_metaclass, text_type - from virtualenv.create.via_global_ref.builtin.ref import ExePathRefToDest, PathRefToDest, RefMust from virtualenv.info import IS_MAC_ARM64 -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text from .common import CPython, CPythonPosix, is_mac_os_framework from .cpython2 import CPython2PosixBase from .cpython3 import CPython3 -@add_metaclass(ABCMeta) -class CPythonmacOsFramework(CPython): +class CPythonmacOsFramework(CPython, metaclass=ABCMeta): @classmethod def can_describe(cls, interpreter): - return is_mac_os_framework(interpreter) and super(CPythonmacOsFramework, cls).can_describe(interpreter) + return is_mac_os_framework(interpreter) and super().can_describe(interpreter) def create(self): - super(CPythonmacOsFramework, self).create() + super().create() # change the install_name of the copied python executables target = self.desired_mach_o_image_path() @@ -42,7 +37,7 @@ def create(self): @classmethod def _executables(cls, interpreter): - for _, targets, must, when in super(CPythonmacOsFramework, cls)._executables(interpreter): + for _, targets, must, when in super()._executables(interpreter): # Make sure we use the embedded interpreter inside the framework, even if sys.executable points to the # stub executable in ${sys.prefix}/bin. # See http://groups.google.com/group/python-virtualenv/browse_thread/thread/17cab2f85da75951 @@ -61,8 +56,8 @@ def desired_mach_o_image_path(self): class CPython2macOsFramework(CPythonmacOsFramework, CPython2PosixBase): @classmethod def can_create(cls, interpreter): - if not IS_MAC_ARM64 and super(CPython2macOsFramework, cls).can_describe(interpreter): - return super(CPython2macOsFramework, cls).can_create(interpreter) + if not IS_MAC_ARM64 and super().can_describe(interpreter): + return super().can_create(interpreter) return False def current_mach_o_image_path(self): @@ -73,37 +68,34 @@ def desired_mach_o_image_path(self): @classmethod def sources(cls, interpreter): - for src in super(CPython2macOsFramework, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) # landmark for exec_prefix exec_marker_file, to_path, _ = cls.from_stdlib(cls.mappings(interpreter), "lib-dynload") yield PathRefToDest(exec_marker_file, dest=to_path) # add a copy of the host python image exe = Path(interpreter.prefix) / "Python" - yield PathRefToDest(exe, dest=lambda self, _: self.dest / "Python", must=RefMust.COPY) + yield PathRefToDest(exe, dest=lambda self, _: self.dest / "Python", must=RefMust.COPY) # noqa: U101 # add a symlink to the Resources dir resources = Path(interpreter.prefix) / "Resources" - yield PathRefToDest(resources, dest=lambda self, _: self.dest / "Resources") + yield PathRefToDest(resources, dest=lambda self, _: self.dest / "Resources") # noqa: U101 @property def reload_code(self): - result = super(CPython2macOsFramework, self).reload_code + result = super().reload_code result = dedent( - """ + f""" # the bundled site.py always adds the global site package if we're on python framework build, escape this import sysconfig config = sysconfig.get_config_vars() before = config["PYTHONFRAMEWORK"] try: config["PYTHONFRAMEWORK"] = "" - {} + {result} finally: config["PYTHONFRAMEWORK"] = before - """.format( - result, - ), + """, ) return result @@ -132,11 +124,11 @@ def fix_signature(self): # Reset the signing on Darwin since the exe has been modified. # Note codesign fails on the original exe, it needs to be copied and moved back. bak_dir.mkdir(parents=True, exist_ok=True) - subprocess.check_call(["cp", text_type(exe), text_type(bak_dir)]) - subprocess.check_call(["mv", text_type(bak_dir / exe.name), text_type(exe)]) + subprocess.check_call(["cp", str(exe), str(bak_dir)]) + subprocess.check_call(["mv", str(bak_dir / exe.name), str(exe)]) bak_dir.rmdir() metadata = "--preserve-metadata=identifier,entitlements,flags,runtime" - cmd = ["codesign", "-s", "-", metadata, "-f", text_type(exe)] + cmd = ["codesign", "-s", "-", metadata, "-f", str(exe)] logging.debug("Changing Signature: %s", cmd) subprocess.check_call(cmd) except Exception: @@ -153,29 +145,26 @@ def desired_mach_o_image_path(self): @classmethod def sources(cls, interpreter): - for src in super(CPython3macOsFramework, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) # add a symlink to the host python image exe = Path(interpreter.prefix) / "Python3" - yield PathRefToDest(exe, dest=lambda self, _: self.dest / ".Python", must=RefMust.SYMLINK) + yield PathRefToDest(exe, dest=lambda self, _: self.dest / ".Python", must=RefMust.SYMLINK) # noqa: U101 @property def reload_code(self): - result = super(CPython3macOsFramework, self).reload_code + result = super().reload_code result = dedent( - """ + f""" # the bundled site.py always adds the global site package if we're on python framework build, escape this import sys before = sys._framework try: sys._framework = None - {} + {result} finally: sys._framework = before - """.format( - result, - ), + """, ) return result @@ -205,7 +194,7 @@ def fix_mach_o(exe, current, new, max_size): unneeded bits of information, however Mac OS X 10.5 and earlier cannot read this new Link Edit table format. """ try: - logging.debug("change Mach-O for %s from %s to %s", ensure_text(exe), current, ensure_text(new)) + logging.debug("change Mach-O for %s from %s to %s", exe, current, new) _builtin_change_mach_o(max_size)(exe, current, new) except Exception as e: logging.warning("Could not call _builtin_change_mac_o: %s. " "Trying to call install_name_tool instead.", e) @@ -218,16 +207,16 @@ def fix_mach_o(exe, current, new, max_size): def _builtin_change_mach_o(maxint): - MH_MAGIC = 0xFEEDFACE - MH_CIGAM = 0xCEFAEDFE - MH_MAGIC_64 = 0xFEEDFACF - MH_CIGAM_64 = 0xCFFAEDFE - FAT_MAGIC = 0xCAFEBABE - BIG_ENDIAN = ">" - LITTLE_ENDIAN = "<" - LC_LOAD_DYLIB = 0xC - - class FileView(object): + MH_MAGIC = 0xFEEDFACE # noqa: N806 + MH_CIGAM = 0xCEFAEDFE # noqa: N806 + MH_MAGIC_64 = 0xFEEDFACF # noqa: N806 + MH_CIGAM_64 = 0xCFFAEDFE # noqa: N806 + FAT_MAGIC = 0xCAFEBABE # noqa: N806 + BIG_ENDIAN = ">" # noqa: N806 + LITTLE_ENDIAN = "<" # noqa: N806 + LC_LOAD_DYLIB = 0xC # noqa: N806 + + class FileView: """A proxy for file-like objects that exposes a given view of a file. Modified from macholib.""" def __init__(self, file_obj, start=0, size=maxint): @@ -240,15 +229,15 @@ def __init__(self, file_obj, start=0, size=maxint): self._pos = 0 def __repr__(self): - return "".format(self._start, self._end, self._file_obj) + return f"" def tell(self): return self._pos def _checkwindow(self, seek_to, op): if not (self._start <= seek_to <= self._end): - msg = "{} to offset {:d} is outside window [{:d}, {:d}]".format(op, seek_to, self._start, self._end) - raise IOError(msg) + msg = f"{op} to offset {seek_to:d} is outside window [{self._start:d}, {self._end:d}]" + raise OSError(msg) def seek(self, offset, whence=0): seek_to = offset @@ -259,7 +248,7 @@ def seek(self, offset, whence=0): elif whence == os.SEEK_END: seek_to += self._end else: - raise IOError("Invalid whence argument to seek: {!r}".format(whence)) + raise OSError(f"Invalid whence argument to seek: {whence!r}") self._checkwindow(seek_to, "seek") self._file_obj.seek(seek_to) self._pos = seek_to - self._start @@ -345,3 +334,10 @@ def do_file(file, offset=0, size=maxint): do_file(f) return mach_o_change + + +__all__ = [ + "CPythonmacOsFramework", + "CPython2macOsFramework", + "CPython3macOsFramework", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/common.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/common.py index b0cd401f..17cf733d 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/common.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/common.py @@ -1,41 +1,39 @@ -from __future__ import absolute_import, unicode_literals - import abc - -from six import add_metaclass +from pathlib import Path from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest, RefMust, RefWhen -from virtualenv.util.path import Path from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin -@add_metaclass(abc.ABCMeta) -class PyPy(ViaGlobalRefVirtualenvBuiltin): +class PyPy(ViaGlobalRefVirtualenvBuiltin, metaclass=abc.ABCMeta): @classmethod def can_describe(cls, interpreter): - return interpreter.implementation == "PyPy" and super(PyPy, cls).can_describe(interpreter) + return interpreter.implementation == "PyPy" and super().can_describe(interpreter) @classmethod def _executables(cls, interpreter): host = Path(interpreter.system_executable) - targets = sorted("{}{}".format(name, PyPy.suffix) for name in cls.exe_names(interpreter)) + targets = sorted(f"{name}{PyPy.suffix}" for name in cls.exe_names(interpreter)) must = RefMust.COPY if interpreter.version_info.major == 2 else RefMust.NA yield host, targets, must, RefWhen.ANY + @classmethod + def executables(cls, interpreter): + yield from super().sources(interpreter) + @classmethod def exe_names(cls, interpreter): return { cls.exe_stem(), "python", - "python{}".format(interpreter.version_info.major), - "python{}.{}".format(*interpreter.version_info), + f"python{interpreter.version_info.major}", + f"python{interpreter.version_info.major}.{interpreter.version_info.minor}", } @classmethod def sources(cls, interpreter): - for src in super(PyPy, cls).sources(interpreter): - yield src + yield from cls.executables(interpreter) for host in cls._add_shared_libs(interpreter): yield PathRefToDest(host, dest=lambda self, s: self.bin_dir / s.name) @@ -43,9 +41,13 @@ def sources(cls, interpreter): def _add_shared_libs(cls, interpreter): # https://bitbucket.org/pypy/pypy/issue/1922/future-proofing-virtualenv python_dir = Path(interpreter.system_executable).resolve().parent - for src in cls._shared_libs(python_dir): - yield src + yield from cls._shared_libs(python_dir) @classmethod - def _shared_libs(cls, python_dir): + def _shared_libs(cls, python_dir): # noqa: U100 raise NotImplementedError + + +__all__ = [ + "PyPy", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py index 78349d44..80313396 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/pypy2.py @@ -1,22 +1,17 @@ -from __future__ import absolute_import, unicode_literals - import abc import logging import os - -from six import add_metaclass +from pathlib import Path from virtualenv.create.describe import PosixSupports, WindowsSupports from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest -from virtualenv.util.path import Path from ..python2.python2 import Python2 from .common import PyPy -@add_metaclass(abc.ABCMeta) -class PyPy2(PyPy, Python2): - """ """ +class PyPy2(PyPy, Python2, metaclass=abc.ABCMeta): + """PyPy 2""" @classmethod def exe_stem(cls): @@ -24,12 +19,11 @@ def exe_stem(cls): @classmethod def sources(cls, interpreter): - for src in super(PyPy2, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) # include folder needed on Python 2 as we don't have pyenv.cfg host_include_marker = cls.host_include_marker(interpreter) if host_include_marker.exists(): - yield PathRefToDest(host_include_marker.parent, dest=lambda self, _: self.include) + yield PathRefToDest(host_include_marker.parent, dest=lambda self, _: self.include) # noqa: U101 @classmethod def needs_stdlib_py_module(cls): @@ -46,7 +40,7 @@ def include(self): @classmethod def modules(cls): # pypy2 uses some modules before the site.py loads, so we need to include these too - return super(PyPy2, cls).modules() + [ + return super().modules() + [ "os", "copy_reg", "genericpath", @@ -61,7 +55,7 @@ def lib_pypy(self): return self.dest / "lib_pypy" def ensure_directories(self): - dirs = super(PyPy2, self).ensure_directories() + dirs = super().ensure_directories() dirs.add(self.lib_pypy) host_include_marker = self.host_include_marker(self.interpreter) if host_include_marker.exists(): @@ -76,7 +70,7 @@ def skip_rewrite(self): PyPy2 built-in imports are handled by this path entry, don't overwrite to not disable it see: https://github.com/pypa/virtualenv/issues/1652 """ - return 'or path.endswith("lib_pypy{}__extensions__") # PyPy2 built-in import marker'.format(os.sep) + return f'or path.endswith("lib_pypy{os.sep}__extensions__") # PyPy2 built-in import marker' class PyPy2Posix(PyPy2, PosixSupports): @@ -84,7 +78,7 @@ class PyPy2Posix(PyPy2, PosixSupports): @classmethod def modules(cls): - return super(PyPy2Posix, cls).modules() + ["posixpath"] + return super().modules() + ["posixpath"] @classmethod def _shared_libs(cls, python_dir): @@ -96,11 +90,10 @@ def lib(self): @classmethod def sources(cls, interpreter): - for src in super(PyPy2Posix, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) host_lib = Path(interpreter.system_prefix) / "lib" if host_lib.exists(): - yield PathRefToDest(host_lib, dest=lambda self, _: self.lib) + yield PathRefToDest(host_lib, dest=lambda self, _: self.lib) # noqa: U101 class Pypy2Windows(PyPy2, WindowsSupports): @@ -108,7 +101,7 @@ class Pypy2Windows(PyPy2, WindowsSupports): @classmethod def modules(cls): - return super(Pypy2Windows, cls).modules() + ["ntpath"] + return super().modules() + ["ntpath"] @classmethod def _shared_libs(cls, python_dir): @@ -120,6 +113,12 @@ def _shared_libs(cls, python_dir): @classmethod def sources(cls, interpreter): - for src in super(Pypy2Windows, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) yield PathRefToDest(Path(interpreter.system_prefix) / "libs", dest=lambda self, s: self.dest / s.name) + + +__all__ = [ + "PyPy2", + "PyPy2Posix", + "Pypy2Windows", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py index cc72c145..ca5778c2 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/pypy/pypy3.py @@ -1,25 +1,20 @@ -from __future__ import absolute_import, unicode_literals - import abc - -from six import add_metaclass +from pathlib import Path from virtualenv.create.describe import PosixSupports, Python3Supports, WindowsSupports from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest -from virtualenv.util.path import Path from .common import PyPy -@add_metaclass(abc.ABCMeta) -class PyPy3(PyPy, Python3Supports): +class PyPy3(PyPy, Python3Supports, metaclass=abc.ABCMeta): @classmethod def exe_stem(cls): return "pypy3" @classmethod def exe_names(cls, interpreter): - return super(PyPy3, cls).exe_names(interpreter) | {"pypy"} + return super().exe_names(interpreter) | {"pypy"} class PyPy3Posix(PyPy3, PosixSupports): @@ -28,7 +23,7 @@ class PyPy3Posix(PyPy3, PosixSupports): @property def stdlib(self): """PyPy3 respects sysconfig only for the host python, virtual envs is instead lib/pythonx.y/site-packages""" - return self.dest / "lib" / "pypy{}".format(self.interpreter.version_release_str) / "site-packages" + return self.dest / "lib" / f"pypy{self.interpreter.version_release_str}" / "site-packages" @classmethod def _shared_libs(cls, python_dir): @@ -40,10 +35,9 @@ def to_lib(self, src): @classmethod def sources(cls, interpreter): - for src in super(PyPy3Posix, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) # PyPy >= 3.8 supports a standard prefix installation, where older - # versions always used a portable/developent style installation. + # versions always used a portable/development style installation. # If this is a standard prefix installation, skip the below: if interpreter.system_prefix == "/usr": return @@ -66,9 +60,15 @@ def sources(cls, interpreter): class Pypy3Windows(PyPy3, WindowsSupports): """PyPy 3 on Windows""" + @property + def less_v37(self): + return self.interpreter.version_info.minor < 7 + @property def stdlib(self): """PyPy3 respects sysconfig only for the host python, virtual envs is instead Lib/site-packages""" + if self.less_v37: + return self.dest / "site-packages" return self.dest / "Lib" / "site-packages" @property @@ -81,5 +81,11 @@ def _shared_libs(cls, python_dir): # glob for libpypy*.dll and libffi*.dll for pattern in ["libpypy*.dll", "libffi*.dll"]: srcs = python_dir.glob(pattern) - for src in srcs: - yield src + yield from srcs + + +__all__ = [ + "PyPy3", + "PyPy3Posix", + "Pypy3Windows", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/python2/python2.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/python2/python2.py index c29fbf47..e1e64a20 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/python2/python2.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/python2/python2.py @@ -1,27 +1,21 @@ -from __future__ import absolute_import, unicode_literals - import abc import json import os from importlib.resources import read_text - -from six import add_metaclass +from pathlib import Path from virtualenv.create.describe import Python2Supports from virtualenv.create.via_global_ref.builtin.ref import PathRefToDest from virtualenv.info import IS_ZIPAPP -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text from virtualenv.util.zipapp import read as read_from_zipapp from ..via_global_self_do import ViaGlobalRefVirtualenvBuiltin -@add_metaclass(abc.ABCMeta) -class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports): +class Python2(ViaGlobalRefVirtualenvBuiltin, Python2Supports, metaclass=abc.ABCMeta): def create(self): """Perform operations needed to make the created environment work on Python 2""" - super(Python2, self).create() + super().create() # install a patched site-package, the default Python 2 site.py is not smart enough to understand pyvenv.cfg, # so we inject a small shim that can do this, the location of this depends where it's on host sys_std_plat = Path(self.interpreter.system_stdlib_platform) @@ -34,14 +28,14 @@ def create(self): custom_site = get_custom_site() custom_site_text = read_text(custom_site, "site.py.template") - expected = json.dumps([os.path.relpath(ensure_text(str(i)), ensure_text(str(site_py))) for i in self.libs]) + expected = json.dumps([os.path.relpath(str(i), str(site_py)) for i in self.libs]) custom_site_text = custom_site_text.replace("___EXPECTED_SITE_PACKAGES___", expected) - reload_code = os.linesep.join(" {}".format(i) for i in self.reload_code.splitlines()).lstrip() + reload_code = os.linesep.join(f" {i}" for i in self.reload_code.splitlines()).lstrip() custom_site_text = custom_site_text.replace("# ___RELOAD_CODE___", reload_code) - skip_rewrite = os.linesep.join(" {}".format(i) for i in self.skip_rewrite.splitlines()).lstrip() + skip_rewrite = os.linesep.join(f" {i}" for i in self.skip_rewrite.splitlines()).lstrip() custom_site_text = custom_site_text.replace("# ___SKIP_REWRITE____", skip_rewrite) site_py.write_text(custom_site_text) @@ -56,8 +50,7 @@ def skip_rewrite(self): @classmethod def sources(cls, interpreter): - for src in super(Python2, cls).sources(interpreter): - yield src + yield from super().sources(interpreter) # install files needed to run site.py, either from stdlib or stdlib_platform, at least pyc, but both if exists # if neither exists return the module file to trigger failure mappings, needs_py_module = ( @@ -65,8 +58,8 @@ def sources(cls, interpreter): cls.needs_stdlib_py_module(), ) for req in cls.modules(): - module_file, to_module, module_exists = cls.from_stdlib(mappings, "{}.py".format(req)) - compiled_file, to_compiled, compiled_exists = cls.from_stdlib(mappings, "{}.pyc".format(req)) + module_file, to_module, module_exists = cls.from_stdlib(mappings, f"{req}.py") + compiled_file, to_compiled, compiled_exists = cls.from_stdlib(mappings, f"{req}.pyc") if needs_py_module or module_exists or not compiled_exists: yield PathRefToDest(module_file, dest=to_module) if compiled_exists: diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/ref.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/ref.py index 69f243bf..60d97b3e 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/ref.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/ref.py @@ -3,34 +3,29 @@ references to elements on the file system, allowing our system to automatically detect what modes it can support given the constraints: e.g. can the file system symlink, can the files be read, executed, etc. """ -from __future__ import absolute_import, unicode_literals import os from abc import ABCMeta, abstractmethod from collections import OrderedDict from stat import S_IXGRP, S_IXOTH, S_IXUSR -from six import add_metaclass - from virtualenv.info import fs_is_case_sensitive, fs_supports_symlink from virtualenv.util.path import copy, make_exe, symlink -from virtualenv.util.six import ensure_text -class RefMust(object): +class RefMust: NA = "NA" COPY = "copy" SYMLINK = "symlink" -class RefWhen(object): +class RefWhen: ANY = "ANY" COPY = "copy" SYMLINK = "symlink" -@add_metaclass(ABCMeta) -class PathRef(object): +class PathRef(metaclass=ABCMeta): """Base class that checks if a file reference can be symlink/copied""" FS_SUPPORTS_SYMLINK = fs_supports_symlink() @@ -49,7 +44,7 @@ def __init__(self, src, must=RefMust.NA, when=RefWhen.ANY): self._can_symlink = None if self.exists else False def __repr__(self): - return "{}(src={})".format(self.__class__.__name__, self.src) + return f"{self.__class__.__name__}(src={self.src})" @property def can_read(self): @@ -61,7 +56,7 @@ def can_read(self): except OSError: self._can_read = False else: - self._can_read = os.access(ensure_text(str(self.src)), os.R_OK) + self._can_read = os.access(str(self.src), os.R_OK) return self._can_read @property @@ -83,7 +78,7 @@ def can_symlink(self): return self._can_symlink @abstractmethod - def run(self, creator, symlinks): + def run(self, creator, symlinks): # noqa: U100 raise NotImplementedError def method(self, symlinks): @@ -94,12 +89,11 @@ def method(self, symlinks): return symlink if symlinks else copy -@add_metaclass(ABCMeta) -class ExePathRef(PathRef): +class ExePathRef(PathRef, metaclass=ABCMeta): """Base class that checks if a executable can be references via symlink/copy""" def __init__(self, src, must=RefMust.NA, when=RefWhen.ANY): - super(ExePathRef, self).__init__(src, must, when) + super().__init__(src, must, when) self._can_run = None @property @@ -125,7 +119,7 @@ class PathRefToDest(PathRef): """Link a path on the file system""" def __init__(self, src, dest, must=RefMust.NA, when=RefWhen.ANY): - super(PathRefToDest, self).__init__(src, must, when) + super().__init__(src, must, when) self.dest = dest def run(self, creator, symlinks): @@ -169,4 +163,14 @@ def run(self, creator, symlinks): make_exe(link_file) def __repr__(self): - return "{}(src={}, alias={})".format(self.__class__.__name__, self.src, self.aliases) + return f"{self.__class__.__name__}(src={self.src}, alias={self.aliases})" + + +__all__ = [ + "ExePathRef", + "ExePathRefToDest", + "PathRefToDest", + "PathRef", + "RefWhen", + "RefMust", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/via_global_self_do.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/via_global_self_do.py index 863ae16e..d00ee45c 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/via_global_self_do.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/builtin/via_global_self_do.py @@ -1,9 +1,5 @@ -from __future__ import absolute_import, unicode_literals - from abc import ABCMeta -from six import add_metaclass - from virtualenv.create.via_global_ref.builtin.ref import ExePathRefToDest, RefMust, RefWhen from virtualenv.util.path import ensure_dir @@ -13,19 +9,18 @@ class BuiltinViaGlobalRefMeta(ViaGlobalRefMeta): def __init__(self): - super(BuiltinViaGlobalRefMeta, self).__init__() + super().__init__() self.sources = [] -@add_metaclass(ABCMeta) -class ViaGlobalRefVirtualenvBuiltin(ViaGlobalRefApi, VirtualenvBuiltin): +class ViaGlobalRefVirtualenvBuiltin(ViaGlobalRefApi, VirtualenvBuiltin, metaclass=ABCMeta): def __init__(self, options, interpreter): - super(ViaGlobalRefVirtualenvBuiltin, self).__init__(options, interpreter) + super().__init__(options, interpreter) self._sources = getattr(options.meta, "sources", None) # if we're created as a describer this might be missing @classmethod def can_create(cls, interpreter): - """By default all built-in methods assume that if we can describe it we can create it""" + """By default, all built-in methods assume that if we can describe it we can create it""" # first we must be able to describe it if not cls.can_describe(interpreter): return None @@ -39,11 +34,11 @@ def _sources_can_be_applied(cls, interpreter, meta): for src in cls.sources(interpreter): if src.exists: if meta.can_copy and not src.can_copy: - meta.copy_error = "cannot copy {}".format(src) + meta.copy_error = f"cannot copy {src}" if meta.can_symlink and not src.can_symlink: - meta.symlink_error = "cannot symlink {}".format(src) + meta.symlink_error = f"cannot symlink {src}" else: - msg = "missing required file {}".format(src) + msg = f"missing required file {src}" if src.when == RefMust.NA: meta.error = msg elif src.when == RefMust.COPY: @@ -51,16 +46,13 @@ def _sources_can_be_applied(cls, interpreter, meta): elif src.when == RefMust.SYMLINK: meta.symlink_error = msg if not meta.can_copy and not meta.can_symlink: - meta.error = "neither copy or symlink supported, copy: {} symlink: {}".format( - meta.copy_error, - meta.symlink_error, - ) + meta.error = f"neither copy or symlink supported, copy: {meta.copy_error} symlink: {meta.symlink_error}" if meta.error: break meta.sources.append(src) @classmethod - def setup_meta(cls, interpreter): + def setup_meta(cls, interpreter): # noqa: U100 return BuiltinViaGlobalRefMeta() @classmethod @@ -72,7 +64,7 @@ def to_bin(self, src): return self.bin_dir / src.name @classmethod - def _executables(cls, interpreter): + def _executables(cls, interpreter): # noqa: U100 raise NotImplementedError def create(self): @@ -98,7 +90,7 @@ def create(self): finally: if true_system_site != self.enable_system_site_package: self.enable_system_site_package = true_system_site - super(ViaGlobalRefVirtualenvBuiltin, self).create() + super().create() def ensure_directories(self): return {self.dest, self.bin_dir, self.script_dir, self.stdlib} | set(self.libs) @@ -108,7 +100,13 @@ def set_pyenv_cfg(self): We directly inject the base prefix and base exec prefix to avoid site.py needing to discover these from home (which usually is done within the interpreter itself) """ - super(ViaGlobalRefVirtualenvBuiltin, self).set_pyenv_cfg() + super().set_pyenv_cfg() self.pyenv_cfg["base-prefix"] = self.interpreter.system_prefix self.pyenv_cfg["base-exec-prefix"] = self.interpreter.system_exec_prefix self.pyenv_cfg["base-executable"] = self.interpreter.system_executable + + +__all__ = [ + "BuiltinViaGlobalRefMeta", + "ViaGlobalRefVirtualenvBuiltin", +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/store.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/store.py index 134a5358..a9c559cc 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/store.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/store.py @@ -1,6 +1,4 @@ -from __future__ import absolute_import, unicode_literals - -from virtualenv.util.path import Path +from pathlib import Path def handle_store_python(meta, interpreter): @@ -20,7 +18,7 @@ def is_store_python(interpreter): ) -__all__ = ( +__all__ = [ "handle_store_python", "is_store_python", -) +] diff --git a/vendor/virtualenv/src/virtualenv/create/via_global_ref/venv.py b/vendor/virtualenv/src/virtualenv/create/via_global_ref/venv.py index aaa67947..071375a5 100644 --- a/vendor/virtualenv/src/virtualenv/create/via_global_ref/venv.py +++ b/vendor/virtualenv/src/virtualenv/create/via_global_ref/venv.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import logging from copy import copy @@ -10,19 +8,19 @@ from virtualenv.util.subprocess import run_cmd from .api import ViaGlobalRefApi, ViaGlobalRefMeta +from .builtin.pypy.pypy3 import Pypy3Windows class Venv(ViaGlobalRefApi): def __init__(self, options, interpreter): self.describe = options.describe - super(Venv, self).__init__(options, interpreter) - self.can_be_inline = ( - interpreter is PythonInfo.current() and interpreter.executable == interpreter.system_executable - ) + super().__init__(options, interpreter) + current = PythonInfo.current() + self.can_be_inline = interpreter is current and interpreter.executable == interpreter.system_executable self._context = None def _args(self): - return super(Venv, self)._args() + ([("describe", self.describe.__class__.__name__)] if self.describe else []) + return super()._args() + ([("describe", self.describe.__class__.__name__)] if self.describe else []) @classmethod def can_create(cls, interpreter): @@ -40,7 +38,19 @@ def create(self): self.create_via_sub_process() for lib in self.libs: ensure_dir(lib) - super(Venv, self).create() + super().create() + self.executables_for_win_pypy_less_v37() + + def executables_for_win_pypy_less_v37(self): + """ + PyPy <= 3.6 (v7.3.3) for Windows contains only pypy3.exe and pypy3w.exe + Venv does not handle non-existing exe sources, e.g. python.exe, so this + patch does it. + """ + creator = self.describe + if isinstance(creator, Pypy3Windows) and creator.less_v37: + for exe in creator.executables(self.interpreter): + exe.run(creator, self.symlinks) def create_inline(self): from venv import EnvBuilder @@ -71,7 +81,7 @@ def get_host_create_cmd(self): def set_pyenv_cfg(self): # prefer venv options over ours, but keep our extra venv_content = copy(self.pyenv_cfg.refresh()) - super(Venv, self).set_pyenv_cfg() + super().set_pyenv_cfg() self.pyenv_cfg.update(venv_content) def __getattribute__(self, item): @@ -81,3 +91,8 @@ def __getattribute__(self, item): if not callable(element) or item in ("script",): return element return object.__getattribute__(self, item) + + +__all__ = [ + "Venv", +] diff --git a/vendor/virtualenv/src/virtualenv/discovery/__init__.py b/vendor/virtualenv/src/virtualenv/discovery/__init__.py index babf6add..84bc7a44 100644 --- a/vendor/virtualenv/src/virtualenv/discovery/__init__.py +++ b/vendor/virtualenv/src/virtualenv/discovery/__init__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from pathlib import Path from virtualenv import __path_assets__ diff --git a/vendor/virtualenv/src/virtualenv/discovery/builtin.py b/vendor/virtualenv/src/virtualenv/discovery/builtin.py index 0f1844ce..7cc5687c 100644 --- a/vendor/virtualenv/src/virtualenv/discovery/builtin.py +++ b/vendor/virtualenv/src/virtualenv/discovery/builtin.py @@ -1,11 +1,8 @@ -from __future__ import absolute_import, unicode_literals - import logging import os import sys from virtualenv.info import IS_WIN -from virtualenv.util.six import ensure_str, ensure_text from .discover import Discover from .py_info import PythonInfo @@ -14,7 +11,7 @@ class Builtin(Discover): def __init__(self, options): - super(Builtin, self).__init__(options) + super().__init__(options) self.python_spec = options.python if options.python else [sys.executable] self.app_data = options.app_data self.try_first_with = options.try_first_with @@ -50,11 +47,8 @@ def run(self): return None def __repr__(self): - return ensure_str(self.__unicode__()) - - def __unicode__(self): spec = self.python_spec[0] if len(self.python_spec) == 1 else self.python_spec - return "{} discover of python_spec={!r}".format(self.__class__.__name__, spec) + return f"{self.__class__.__name__} discover of python_spec={spec!r}" def get_interpreter(key, try_first_with, app_data=None, env=None): @@ -110,10 +104,10 @@ def propose_interpreters(spec, try_first_with, app_data, env=None): paths = get_paths(env) tested_exes = set() for pos, path in enumerate(paths): - path = ensure_text(path) - logging.debug(LazyPathDump(pos, path, env)) + path_str = str(path) + logging.debug(LazyPathDump(pos, path_str, env)) for candidate, match in possible_specs(spec): - found = check_path(candidate, path) + found = check_path(candidate, path_str) if found is not None: exe = os.path.abspath(found) if exe not in tested_exes: @@ -124,7 +118,7 @@ def propose_interpreters(spec, try_first_with, app_data, env=None): def get_paths(env): - path = env.get(str("PATH"), None) + path = env.get("PATH", None) if path is None: try: path = os.confstr("CS_PATH") @@ -137,18 +131,15 @@ def get_paths(env): return paths -class LazyPathDump(object): +class LazyPathDump: def __init__(self, pos, path, env): self.pos = pos self.path = path self.env = env def __repr__(self): - return ensure_str(self.__unicode__()) - - def __unicode__(self): - content = "discover PATH[{}]={}".format(self.pos, self.path) - if self.env.get(str("_VIRTUALENV_DEBUG")): # this is the over the board debug + content = f"discover PATH[{self.pos}]={self.path}" + if self.env.get("_VIRTUALENV_DEBUG"): # this is the over the board debug content += " with =>" for file_name in os.listdir(self.path): try: @@ -178,9 +169,15 @@ def possible_specs(spec): # 4. then maybe it's something exact on PATH - if it was direct lookup implementation no longer counts yield spec.str_spec, False # 5. or from the spec we can deduce a name on path that matches - for exe, match in spec.generate_names(): - yield exe, match + yield from spec.generate_names() class PathPythonInfo(PythonInfo): - """ """ + """python info from path""" + + +__all__ = [ + "get_interpreter", + "Builtin", + "PathPythonInfo", +] diff --git a/vendor/virtualenv/src/virtualenv/discovery/cached_py_info.py b/vendor/virtualenv/src/virtualenv/discovery/cached_py_info.py index 35a0249f..75fbf708 100644 --- a/vendor/virtualenv/src/virtualenv/discovery/cached_py_info.py +++ b/vendor/virtualenv/src/virtualenv/discovery/cached_py_info.py @@ -4,20 +4,20 @@ cheap, especially not on Windows. To not have to pay this hefty cost every time we apply multiple levels of caching. """ -from __future__ import absolute_import, unicode_literals import logging import os -import pipes +import random import sys from collections import OrderedDict +from pathlib import Path +from shlex import quote +from string import ascii_lowercase, ascii_uppercase, digits +from subprocess import Popen from virtualenv.app_data import AppDataDisabled from virtualenv.discovery.py_info import PythonInfo -from virtualenv.info import PY2 -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text -from virtualenv.util.subprocess import Popen, subprocess +from virtualenv.util.subprocess import subprocess from . import _PATH_ASSETS @@ -32,7 +32,7 @@ def from_exe(cls, app_data, exe, env=None, raise_on_error=True, ignore_cache=Fal if raise_on_error: raise result else: - logging.info("%s", str(result)) + logging.info("%s", result) result = None return result @@ -53,7 +53,7 @@ def _get_from_cache(cls, app_data, exe, env, ignore_cache=True): def _get_via_file_cache(cls, app_data, path, exe, env): - path_text = ensure_text(str(path)) + path_text = str(path) try: path_modified = path.stat().st_mtime except OSError: @@ -83,10 +83,27 @@ def _get_via_file_cache(cls, app_data, path, exe, env): return py_info +COOKIE_LENGTH: int = 32 + + +def gen_cookie(): + return "".join(random.choice("".join((ascii_lowercase, ascii_uppercase, digits))) for _ in range(COOKIE_LENGTH)) + + def _run_subprocess(cls, exe, app_data, env): py_info_script = _PATH_ASSETS / "py_info.py" + # Cookies allow to split the serialized stdout output generated by the script collecting the info from the output + # generated by something else. The right way to deal with it is to create an anonymous pipe and pass its descriptor + # to the child and output to it. But AFAIK all of them are either not cross-platform or too big to implement and are + # not in the stdlib. So the easiest and the shortest way I could mind is just using the cookies. + # We generate pseudorandom cookies because it easy to implement and avoids breakage from outputting modules source + # code, i.e. by debug output libraries. We reverse the cookies to avoid breakages resulting from variable values + # appearing in debug output. + + start_cookie = gen_cookie() + end_cookie = gen_cookie() with app_data.ensure_extracted(py_info_script) as py_info_script: - cmd = [exe, str(py_info_script)] + cmd = [exe, str(py_info_script), start_cookie, end_cookie] # prevent sys.prefix from leaking into the child process - see https://bugs.python.org/issue22490 env = env.copy() env.pop("__PYVENV_LAUNCHER__", None) @@ -106,49 +123,53 @@ def _run_subprocess(cls, exe, app_data, env): out, err, code = "", os_error.strerror, os_error.errno result, failure = None, None if code == 0: + out_starts = out.find(start_cookie[::-1]) + + if out_starts > -1: + pre_cookie = out[:out_starts] + + if pre_cookie: + sys.stdout.write(pre_cookie) + + out = out[out_starts + COOKIE_LENGTH :] + + out_ends = out.find(end_cookie[::-1]) + + if out_ends > -1: + post_cookie = out[out_ends + COOKIE_LENGTH :] + + if post_cookie: + sys.stdout.write(post_cookie) + + out = out[:out_ends] + result = cls._from_json(out) result.executable = exe # keep original executable as this may contain initialization code else: - msg = "failed to query {} with code {}{}{}".format( - exe, - code, - " out: {!r}".format(out) if out else "", - " err: {!r}".format(err) if err else "", - ) - failure = RuntimeError(msg) + msg = f"{exe} with code {code}{f' out: {out!r}' if out else ''}{f' err: {err!r}' if err else ''}" + failure = RuntimeError(f"failed to query {msg}") return failure, result -class LogCmd(object): +class LogCmd: def __init__(self, cmd, env=None): self.cmd = cmd self.env = env def __repr__(self): - def e(v): - return v.decode("utf-8") if isinstance(v, bytes) else v - - cmd_repr = e(" ").join(pipes.quote(e(c)) for c in self.cmd) + cmd_repr = " ".join(quote(str(c)) for c in self.cmd) if self.env is not None: - cmd_repr += e(" env of {!r}").format(self.env) - if PY2: - return cmd_repr.encode("utf-8") + cmd_repr = f"{cmd_repr} env of {self.env!r}" return cmd_repr - def __unicode__(self): - raw = repr(self) - if PY2: - return raw.decode("utf-8") - return raw - def clear(app_data): app_data.py_info_clear() _CACHE.clear() -___all___ = ( +___all___ = [ "from_exe", "clear", "LogCmd", -) +] diff --git a/vendor/virtualenv/src/virtualenv/discovery/discover.py b/vendor/virtualenv/src/virtualenv/discovery/discover.py index 72748c3f..d44758cb 100644 --- a/vendor/virtualenv/src/virtualenv/discovery/discover.py +++ b/vendor/virtualenv/src/virtualenv/discovery/discover.py @@ -1,23 +1,17 @@ -from __future__ import absolute_import, unicode_literals - from abc import ABCMeta, abstractmethod -from six import add_metaclass - -@add_metaclass(ABCMeta) -class Discover(object): +class Discover(metaclass=ABCMeta): """Discover and provide the requested Python interpreter""" @classmethod - def add_parser_arguments(cls, parser): + def add_parser_arguments(cls, parser): # noqa: U100 """Add CLI arguments for this discovery mechanisms. :param parser: the CLI parser """ raise NotImplementedError - # noinspection PyUnusedLocal def __init__(self, options): """Create a new discovery mechanism. @@ -31,7 +25,6 @@ def __init__(self, options): def run(self): """Discovers an interpreter. - :return: the interpreter ready to use for virtual environment creation """ raise NotImplementedError @@ -45,3 +38,8 @@ def interpreter(self): self._interpreter = self.run() self._has_run = True return self._interpreter + + +__all__ = [ + "Discover", +] diff --git a/vendor/virtualenv/src/virtualenv/discovery/py_info.py b/vendor/virtualenv/src/virtualenv/discovery/py_info.py index df2cc13f..2a648e04 100644 --- a/vendor/virtualenv/src/virtualenv/discovery/py_info.py +++ b/vendor/virtualenv/src/virtualenv/discovery/py_info.py @@ -44,9 +44,13 @@ def abs_path(v): self.pypy_version_info = tuple(u(i) for i in sys.pypy_version_info) # this is a tuple in earlier, struct later, unify to our own named tuple - self.version_info = VersionInfo(*list(u(i) for i in sys.version_info)) + self.version_info = VersionInfo(*[u(i) for i in sys.version_info]) self.architecture = 64 if sys.maxsize > 2**32 else 32 + # Used to determine some file names. + # See `CPython3Windows.python_zip()`. + self.version_nodot = sysconfig.get_config_var("py_version_nodot") + self.version = u(sys.version) self.os = u(os.name) @@ -482,7 +486,7 @@ def _find_possible_folders(self, inside_folder): # or at root level candidate_folder[inside_folder] = None - return list(i for i in candidate_folder.keys() if os.path.exists(i)) + return [i for i in candidate_folder.keys() if os.path.exists(i)] def _find_possible_exe_names(self): name_candidate = OrderedDict() @@ -520,4 +524,21 @@ def _possible_base(self): if __name__ == "__main__": # dump a JSON representation of the current python # noinspection PyProtectedMember - print(PythonInfo()._to_json()) + argv = sys.argv[1:] + + if len(argv) >= 1: + start_cookie = argv[0] + argv = argv[1:] + else: + start_cookie = "" + + if len(argv) >= 1: + end_cookie = argv[0] + argv = argv[1:] + else: + end_cookie = "" + + sys.argv = sys.argv[:1] + argv + + info = PythonInfo()._to_json() + sys.stdout.write("".join((start_cookie[::-1], info, end_cookie[::-1]))) diff --git a/vendor/virtualenv/src/virtualenv/discovery/py_spec.py b/vendor/virtualenv/src/virtualenv/discovery/py_spec.py index cb63e151..058d8be3 100644 --- a/vendor/virtualenv/src/virtualenv/discovery/py_spec.py +++ b/vendor/virtualenv/src/virtualenv/discovery/py_spec.py @@ -1,19 +1,15 @@ -"""A Python specification is an abstract requirement definition of a interpreter""" -from __future__ import absolute_import, unicode_literals +"""A Python specification is an abstract requirement definition of an interpreter""" import os import re -import sys from collections import OrderedDict from virtualenv.info import fs_is_case_sensitive -from virtualenv.util.six import ensure_str PATTERN = re.compile(r"^(?P[a-zA-Z]+)?(?P[0-9.]+)?(?:-(?P32|64))?$") -IS_WIN = sys.platform == "win32" -class PythonSpec(object): +class PythonSpec: """Contains specification about a Python Interpreter""" def __init__(self, str_spec, implementation, major, minor, micro, architecture, path): @@ -87,7 +83,7 @@ def generate_names(self): for impl, match in impls.items(): for at in range(len(version), -1, -1): cur_ver = version[0:at] - spec = "{}{}".format(impl, ".".join(str(i) for i in cur_ver)) + spec = f"{impl}{'.'.join(str(i) for i in cur_ver)}" yield spec, match @property @@ -108,15 +104,12 @@ def satisfies(self, spec): return False return True - def __unicode__(self): - return "{}({})".format( - type(self).__name__, - ", ".join( - "{}={}".format(k, getattr(self, k)) - for k in ("implementation", "major", "minor", "micro", "architecture", "path") - if getattr(self, k) is not None - ), - ) - def __repr__(self): - return ensure_str(self.__unicode__()) + name = type(self).__name__ + params = "implementation", "major", "minor", "micro", "architecture", "path" + return f"{name}({', '.join(f'{k}={getattr(self, k)}' for k in params if getattr(self, k) is not None)})" + + +__all__ = [ + "PythonSpec", +] diff --git a/vendor/virtualenv/src/virtualenv/discovery/windows/__init__.py b/vendor/virtualenv/src/virtualenv/discovery/windows/__init__.py index 259be976..4663a3a6 100644 --- a/vendor/virtualenv/src/virtualenv/discovery/windows/__init__.py +++ b/vendor/virtualenv/src/virtualenv/discovery/windows/__init__.py @@ -1,12 +1,10 @@ -from __future__ import absolute_import, unicode_literals - from ..py_info import PythonInfo from ..py_spec import PythonSpec from .pep514 import discover_pythons class Pep514PythonInfo(PythonInfo): - """ """ + """A Python information acquired from PEP-514""" def propose_interpreters(spec, cache_dir, env): @@ -29,3 +27,9 @@ def propose_interpreters(spec, cache_dir, env): if interpreter is not None: if interpreter.satisfies(spec, impl_must_match=True): yield interpreter + + +__all__ = [ + "Pep514PythonInfo", + "propose_interpreters", +] diff --git a/vendor/virtualenv/src/virtualenv/discovery/windows/pep514.py b/vendor/virtualenv/src/virtualenv/discovery/windows/pep514.py index 048436a6..beb1d819 100644 --- a/vendor/virtualenv/src/virtualenv/discovery/windows/pep514.py +++ b/vendor/virtualenv/src/virtualenv/discovery/windows/pep514.py @@ -1,18 +1,10 @@ """Implement https://www.python.org/dev/peps/pep-0514/ to discover interpreters - Windows only""" -from __future__ import absolute_import, print_function, unicode_literals import os import re +import winreg from logging import basicConfig, getLogger -import six - -if six.PY3: - import winreg -else: - # noinspection PyUnresolvedReferences - import _winreg as winreg - LOGGER = getLogger(__name__) @@ -39,8 +31,7 @@ def discover_pythons(): (winreg.HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", r"Software\Python", winreg.KEY_WOW64_64KEY, 64), (winreg.HKEY_LOCAL_MACHINE, "HKEY_LOCAL_MACHINE", r"Software\Python", winreg.KEY_WOW64_32KEY, 32), ]: - for spec in process_set(hive, hive_name, key, flags, default_arch): - yield spec + yield from process_set(hive, hive_name, key, flags, default_arch) def process_set(hive, hive_name, key, flags, default_arch): @@ -49,8 +40,7 @@ def process_set(hive, hive_name, key, flags, default_arch): for company in enum_keys(root_key): if company == "PyLauncher": # reserved continue - for spec in process_company(hive_name, company, root_key, default_arch): - yield spec + yield from process_company(hive_name, company, root_key, default_arch) except OSError: pass @@ -77,9 +67,9 @@ def process_tag(hive_name, company, company_key, tag, default_arch): def load_exe(hive_name, company, company_key, tag): - key_path = "{}/{}/{}".format(hive_name, company, tag) + key_path = f"{hive_name}/{company}/{tag}" try: - with winreg.OpenKeyEx(company_key, r"{}\InstallPath".format(tag)) as ip_key: + with winreg.OpenKeyEx(company_key, rf"{tag}\InstallPath") as ip_key: with ip_key: exe = get_value(ip_key, "ExecutablePath") if exe is None: @@ -88,21 +78,21 @@ def load_exe(hive_name, company, company_key, tag): msg(key_path, "no ExecutablePath or default for it") else: - exe = os.path.join(ip, str("python.exe")) + exe = os.path.join(ip, "python.exe") if exe is not None and os.path.exists(exe): args = get_value(ip_key, "ExecutableArguments") return exe, args else: - msg(key_path, "could not load exe with value {}".format(exe)) + msg(key_path, f"could not load exe with value {exe}") except OSError: - msg("{}/{}".format(key_path, "InstallPath"), "missing") + msg(f"{key_path}/InstallPath", "missing") return None def load_arch_data(hive_name, company, tag, tag_key, default_arch): arch_str = get_value(tag_key, "SysArchitecture") if arch_str is not None: - key_path = "{}/{}/{}/SysArchitecture".format(hive_name, company, tag) + key_path = f"{hive_name}/{company}/{tag}/SysArchitecture" try: return parse_arch(arch_str) except ValueError as sys_arch: @@ -111,20 +101,20 @@ def load_arch_data(hive_name, company, tag, tag_key, default_arch): def parse_arch(arch_str): - if isinstance(arch_str, six.string_types): + if isinstance(arch_str, str): match = re.match(r"^(\d+)bit$", arch_str) if match: return int(next(iter(match.groups()))) - error = "invalid format {}".format(arch_str) + error = f"invalid format {arch_str}" else: - error = "arch is not string: {}".format(repr(arch_str)) + error = f"arch is not string: {repr(arch_str)}" raise ValueError(error) def load_version_data(hive_name, company, tag, tag_key): for candidate, key_path in [ - (get_value(tag_key, "SysVersion"), "{}/{}/{}/SysVersion".format(hive_name, company, tag)), - (tag, "{}/{}/{}".format(hive_name, company, tag)), + (get_value(tag_key, "SysVersion"), f"{hive_name}/{company}/{tag}/SysVersion"), + (tag, f"{hive_name}/{company}/{tag}"), ]: if candidate is not None: try: @@ -135,18 +125,18 @@ def load_version_data(hive_name, company, tag, tag_key): def parse_version(version_str): - if isinstance(version_str, six.string_types): + if isinstance(version_str, str): match = re.match(r"^(\d+)(?:\.(\d+))?(?:\.(\d+))?$", version_str) if match: return tuple(int(i) if i is not None else None for i in match.groups()) - error = "invalid format {}".format(version_str) + error = f"invalid format {version_str}" else: - error = "version is not string: {}".format(repr(version_str)) + error = f"version is not string: {version_str!r}" raise ValueError(error) def msg(path, what): - LOGGER.warning("PEP-514 violation in Windows Registry at {} error: {}".format(path, what)) + LOGGER.warning(f"PEP-514 violation in Windows Registry at {path} error: {what}") def _run(): diff --git a/vendor/virtualenv/src/virtualenv/info.py b/vendor/virtualenv/src/virtualenv/info.py index f4169215..752108b7 100644 --- a/vendor/virtualenv/src/virtualenv/info.py +++ b/vendor/virtualenv/src/virtualenv/info.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import logging import os import platform @@ -11,13 +9,10 @@ IMPLEMENTATION = platform.python_implementation() IS_PYPY = IMPLEMENTATION == "PyPy" IS_CPYTHON = IMPLEMENTATION == "CPython" -PY3 = sys.version_info[0] == 3 -PY2 = sys.version_info[0] == 2 IS_WIN = sys.platform == "win32" IS_MAC_ARM64 = sys.platform == "darwin" and platform.machine() == "arm64" ROOT = os.path.realpath(__path_pack__.parent) IS_ZIPAPP = False -WIN_CPYTHON_2 = IS_CPYTHON and IS_WIN and PY2 _CAN_SYMLINK = _FS_CASE_SENSITIVE = _CFG_DIR = _DATA_DIR = None @@ -41,7 +36,7 @@ def fs_supports_symlink(): if IS_WIN: with tempfile.NamedTemporaryFile(prefix="TmP") as tmp_file: temp_dir = os.path.dirname(tmp_file.name) - dest = os.path.join(temp_dir, "{}-{}".format(tmp_file.name, "b")) + dest = os.path.join(temp_dir, f"{tmp_file.name}-{'b'}") try: os.symlink(tmp_file.name, dest) can = True @@ -57,12 +52,10 @@ def fs_supports_symlink(): __all__ = ( "IS_PYPY", "IS_CPYTHON", - "PY3", - "PY2", "IS_WIN", "fs_is_case_sensitive", "fs_supports_symlink", "ROOT", "IS_ZIPAPP", - "WIN_CPYTHON_2", + "IS_MAC_ARM64", ) diff --git a/vendor/virtualenv/src/virtualenv/report.py b/vendor/virtualenv/src/virtualenv/report.py index 2a2954f1..0236f219 100644 --- a/vendor/virtualenv/src/virtualenv/report.py +++ b/vendor/virtualenv/src/virtualenv/report.py @@ -1,10 +1,6 @@ -from __future__ import absolute_import, unicode_literals - import logging import sys -from virtualenv.util.six import ensure_str - LEVELS = { 0: logging.CRITICAL, 1: logging.ERROR, @@ -26,10 +22,10 @@ def setup_report(verbosity, show_pid=False): msg_format = "%(message)s" if level <= logging.DEBUG: locate = "module" - msg_format = "%(relativeCreated)d {} [%(levelname)s %({})s:%(lineno)d]".format(msg_format, locate) + msg_format = f"%(relativeCreated)d {msg_format} [%(levelname)s %({locate})s:%(lineno)d]" if show_pid: - msg_format = "[%(process)d] " + msg_format - formatter = logging.Formatter(ensure_str(msg_format)) + msg_format = f"[%(process)d] {msg_format}" + formatter = logging.Formatter(msg_format) stream_handler = logging.StreamHandler(stream=sys.stdout) stream_handler.setLevel(level) LOGGER.setLevel(logging.NOTSET) @@ -46,8 +42,8 @@ def _clean_handlers(log): log.removeHandler(log_handler) -__all__ = ( +__all__ = [ "LEVELS", "MAX_LEVEL", "setup_report", -) +] diff --git a/vendor/virtualenv/src/virtualenv/run/__init__.py b/vendor/virtualenv/src/virtualenv/run/__init__.py index 6bca6844..23c146f4 100644 --- a/vendor/virtualenv/src/virtualenv/run/__init__.py +++ b/vendor/virtualenv/src/virtualenv/run/__init__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import logging import os from functools import partial @@ -48,7 +46,7 @@ def session_via_cli(args, options=None, setup_logging=True, env=None): parser, elements = build_parser(args, options, setup_logging, env) options = parser.parse_args(args) creator, seeder, activators = tuple(e.create(options) for e in elements) # create types - of_session = Session(options.verbosity, options.app_data, parser._interpreter, creator, seeder, activators) # noqa + of_session = Session(options.verbosity, options.app_data, parser._interpreter, creator, seeder, activators) return of_session @@ -69,7 +67,7 @@ def build_parser(args=None, options=None, setup_logging=True, env=None): discover = get_discover(parser, args) parser._interpreter = interpreter = discover.interpreter if interpreter is None: - raise RuntimeError("failed to find interpreter for {}".format(discover)) + raise RuntimeError(f"failed to find interpreter for {discover}") elements = [ CreatorSelector(interpreter, parser), SeederSelector(interpreter, parser), @@ -130,13 +128,13 @@ def add_version_flag(parser): parser.add_argument( "--version", action="version", - version="%(prog)s {} from {}".format(__version__, virtualenv.__path_pack__), + version=f"%(prog)s {__version__} from {virtualenv.__path_pack__}", help="display the version of the virtualenv package and its location, then exit", ) def _do_report_setup(parser, args, setup_logging): - level_map = ", ".join("{}={}".format(logging.getLevelName(l), c) for c, l in sorted(list(LEVELS.items()))) + level_map = ", ".join(f"{logging.getLevelName(l)}={c}" for c, l in sorted(LEVELS.items())) msg = "verbosity = verbose - quiet, default {}, mapping => {}" verbosity_group = parser.add_argument_group( title="verbosity", @@ -150,7 +148,7 @@ def _do_report_setup(parser, args, setup_logging): setup_report(option.verbosity) -__all__ = ( +__all__ = [ "cli_run", "session_via_cli", -) +] diff --git a/vendor/virtualenv/src/virtualenv/run/plugin/activators.py b/vendor/virtualenv/src/virtualenv/run/plugin/activators.py index 036b2c4f..74df18b1 100644 --- a/vendor/virtualenv/src/virtualenv/run/plugin/activators.py +++ b/vendor/virtualenv/src/virtualenv/run/plugin/activators.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from argparse import ArgumentTypeError from collections import OrderedDict from importlib.metadata import EntryPoint @@ -31,14 +29,14 @@ def __init__(self, interpreter, parser): possible = OrderedDict( (k, v) for k, v in self.options("virtualenv.activate").items() if v.supports(interpreter) ) - super(ActivationSelector, self).__init__(interpreter, parser, "activators", possible) + super().__init__(interpreter, parser, "activators", possible) self.parser.description = "options for activation scripts" self.active = None def add_selector_arg_parse(self, name, choices): self.default = ",".join(choices) self.parser.add_argument( - "--{}".format(name), + f"--{name}", default=self.default, metavar="comma_sep_list", required=False, @@ -50,7 +48,7 @@ def _extract_activators(self, entered_str): elements = [e.strip() for e in entered_str.split(",") if e.strip()] missing = [e for e in elements if e not in self.possible] if missing: - raise ArgumentTypeError("the following activators are not available {}".format(",".join(missing))) + raise ArgumentTypeError(f"the following activators are not available {','.join(missing)}") return elements def handle_selected_arg_parse(self, options): @@ -73,3 +71,8 @@ def handle_selected_arg_parse(self, options): def create(self, options): return [activator_class(options) for activator_class in self.active.values()] + + +__all__ = [ + "ActivationSelector", +] diff --git a/vendor/virtualenv/src/virtualenv/run/plugin/base.py b/vendor/virtualenv/src/virtualenv/run/plugin/base.py index 0d3a04a8..1e9f0f2b 100644 --- a/vendor/virtualenv/src/virtualenv/run/plugin/base.py +++ b/vendor/virtualenv/src/virtualenv/run/plugin/base.py @@ -1,12 +1,9 @@ -from __future__ import absolute_import, unicode_literals - import sys from collections import OrderedDict - from importlib.metadata import entry_points -class PluginLoader(object): +class PluginLoader: _OPTIONS = None _ENTRY_POINTS = None @@ -36,20 +33,26 @@ def options(cls, key): cls._OPTIONS = cls.entry_points_for(key) return cls._OPTIONS - def add_selector_arg_parse(self, name, choices): + def add_selector_arg_parse(self, name, choices): # noqa: U100 raise NotImplementedError def handle_selected_arg_parse(self, options): selected = getattr(options, self.name) if selected not in self.possible: - raise RuntimeError("No implementation for {}".format(self.interpreter)) + raise RuntimeError(f"No implementation for {self.interpreter}") self._impl_class = self.possible[selected] self.populate_selected_argparse(selected, options.app_data) return selected def populate_selected_argparse(self, selected, app_data): - self.parser.description = "options for {} {}".format(self.name, selected) + self.parser.description = f"options for {self.name} {selected}" self._impl_class.add_parser_arguments(self.parser, self.interpreter, app_data) def create(self, options): return self._impl_class(options, self.interpreter) + + +__all__ = [ + "PluginLoader", + "ComponentBuilder", +] diff --git a/vendor/virtualenv/src/virtualenv/run/plugin/creators.py b/vendor/virtualenv/src/virtualenv/run/plugin/creators.py index 0a59e32c..2514cdb9 100644 --- a/vendor/virtualenv/src/virtualenv/run/plugin/creators.py +++ b/vendor/virtualenv/src/virtualenv/run/plugin/creators.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from collections import OrderedDict, defaultdict, namedtuple from importlib.metadata import EntryPoint @@ -36,7 +34,7 @@ class CreatorSelector(ComponentBuilder): def __init__(self, interpreter, parser): creators, self.key_to_meta, self.describe, self.builtin_key = self.for_interpreter(interpreter) - super(CreatorSelector, self).__init__(interpreter, parser, "creator", creators) + super().__init__(interpreter, parser, "creator", creators) @classmethod def for_interpreter(cls, interpreter): @@ -60,10 +58,10 @@ def for_interpreter(cls, interpreter): describe = creator_class if not key_to_meta: if errors: - rows = ["{} for creators {}".format(k, ", ".join(i.__name__ for i in v)) for k, v in errors.items()] + rows = [f"{k} for creators {', '.join(i.__name__ for i in v)}" for k, v in errors.items()] raise RuntimeError("\n".join(rows)) else: - raise RuntimeError("No virtualenv implementation for {}".format(interpreter)) + raise RuntimeError(f"No virtualenv implementation for {interpreter}") return CreatorInfo( key_to_class=key_to_class, key_to_meta=key_to_meta, @@ -76,13 +74,11 @@ def add_selector_arg_parse(self, name, choices): choices = sorted(choices, key=lambda a: 0 if a == "builtin" else 1) default_value = self._get_default(choices) self.parser.add_argument( - "--{}".format(name), + f"--{name}", choices=choices, default=default_value, required=False, - help="create environment via{}".format( - "" if self.builtin_key is None else " (builtin = {})".format(self.builtin_key), - ), + help=f"create environment via{'' if self.builtin_key is None else f' (builtin = {self.builtin_key})'}", ) @staticmethod @@ -90,11 +86,17 @@ def _get_default(choices): return next(iter(choices)) def populate_selected_argparse(self, selected, app_data): - self.parser.description = "options for {} {}".format(self.name, selected) + self.parser.description = f"options for {self.name} {selected}" self._impl_class.add_parser_arguments(self.parser, self.interpreter, self.key_to_meta[selected], app_data) def create(self, options): options.meta = self.key_to_meta[getattr(options, self.name)] if not issubclass(self._impl_class, Describe): options.describe = self.describe(options, self.interpreter) - return super(CreatorSelector, self).create(options) + return super().create(options) + + +__all__ = [ + "CreatorSelector", + "CreatorInfo", +] diff --git a/vendor/virtualenv/src/virtualenv/run/plugin/discovery.py b/vendor/virtualenv/src/virtualenv/run/plugin/discovery.py index 17c06936..4be9f988 100644 --- a/vendor/virtualenv/src/virtualenv/run/plugin/discovery.py +++ b/vendor/virtualenv/src/virtualenv/run/plugin/discovery.py @@ -1,11 +1,10 @@ -from __future__ import absolute_import, unicode_literals - from importlib.metadata import EntryPoint from .base import PluginLoader class Discovery(PluginLoader): + """Discovery plugins""" _ENTRY_POINTS = { "virtualenv.discovery": [ EntryPoint( @@ -45,3 +44,9 @@ def get_discover(parser, args): def _get_default_discovery(discover_types): return list(discover_types.keys()) + + +__all__ = [ + "get_discover", + "Discovery", +] diff --git a/vendor/virtualenv/src/virtualenv/run/plugin/seeders.py b/vendor/virtualenv/src/virtualenv/run/plugin/seeders.py index ee3e5bc3..0837758b 100644 --- a/vendor/virtualenv/src/virtualenv/run/plugin/seeders.py +++ b/vendor/virtualenv/src/virtualenv/run/plugin/seeders.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from importlib.metadata import EntryPoint from .base import ComponentBuilder @@ -21,11 +19,11 @@ class SeederSelector(ComponentBuilder): def __init__(self, interpreter, parser): possible = self.options("virtualenv.seed") - super(SeederSelector, self).__init__(interpreter, parser, "seeder", possible) + super().__init__(interpreter, parser, "seeder", possible) def add_selector_arg_parse(self, name, choices): self.parser.add_argument( - "--{}".format(name), + f"--{name}", choices=choices, default=self._get_default(), required=False, @@ -44,7 +42,12 @@ def _get_default(): return "app-data" def handle_selected_arg_parse(self, options): - return super(SeederSelector, self).handle_selected_arg_parse(options) + return super().handle_selected_arg_parse(options) def create(self, options): return self._impl_class(options) + + +__all__ = [ + "SeederSelector", +] diff --git a/vendor/virtualenv/src/virtualenv/run/session.py b/vendor/virtualenv/src/virtualenv/run/session.py index 24836d28..2c8821c9 100644 --- a/vendor/virtualenv/src/virtualenv/run/session.py +++ b/vendor/virtualenv/src/virtualenv/run/session.py @@ -1,12 +1,8 @@ -from __future__ import absolute_import, unicode_literals - import json import logging -from virtualenv.util.six import ensure_text - -class Session(object): +class Session: """Represents a virtual environment creation session""" def __init__(self, verbosity, app_data, interpreter, creator, seeder, activators): @@ -49,7 +45,7 @@ def run(self): self.creator.pyenv_cfg.write() def _create(self): - logging.info("create virtual environment via %s", ensure_text(str(self.creator))) + logging.info("create virtual environment via %s", self.creator) self.creator.run() logging.debug(_DEBUG_MARKER) logging.debug("%s", _Debug(self.creator)) @@ -61,31 +57,31 @@ def _seed(self): def _activate(self): if self.activators: - logging.info( - "add activators for %s", - ", ".join(type(i).__name__.replace("Activator", "") for i in self.activators), - ) + active = ", ".join(type(i).__name__.replace("Activator", "") for i in self.activators) + logging.info("add activators for %s", active) for activator in self.activators: activator.generate(self.creator) def __enter__(self): return self - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): # noqa: U100 self._app_data.close() _DEBUG_MARKER = "=" * 30 + " target debug " + "=" * 30 -class _Debug(object): +class _Debug: """lazily populate debug""" def __init__(self, creator): self.creator = creator - def __unicode__(self): - return ensure_text(repr(self)) - def __repr__(self): return json.dumps(self.creator.debug, indent=2) + + +__all__ = [ + "Session", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/__init__.py b/vendor/virtualenv/src/virtualenv/seed/__init__.py index 01e6d4f4..e69de29b 100644 --- a/vendor/virtualenv/src/virtualenv/seed/__init__.py +++ b/vendor/virtualenv/src/virtualenv/seed/__init__.py @@ -1 +0,0 @@ -from __future__ import absolute_import, unicode_literals diff --git a/vendor/virtualenv/src/virtualenv/seed/embed/base_embed.py b/vendor/virtualenv/src/virtualenv/seed/embed/base_embed.py index c794e834..f29110bb 100644 --- a/vendor/virtualenv/src/virtualenv/seed/embed/base_embed.py +++ b/vendor/virtualenv/src/virtualenv/seed/embed/base_embed.py @@ -1,11 +1,5 @@ -from __future__ import absolute_import, unicode_literals - from abc import ABCMeta - -from six import add_metaclass - -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_str, ensure_text +from pathlib import Path from ..seeder import Seeder from ..wheels import Version @@ -13,10 +7,9 @@ PERIODIC_UPDATE_ON_BY_DEFAULT = True -@add_metaclass(ABCMeta) -class BaseEmbed(Seeder): +class BaseEmbed(Seeder, metaclass=ABCMeta): def __init__(self, options): - super(BaseEmbed, self).__init__(options, enabled=options.no_seed is False) + super().__init__(options, enabled=options.no_seed is False) self.download = options.download self.extra_search_dir = [i.resolve() for i in options.extra_search_dir if i.exists()] @@ -44,27 +37,27 @@ def distributions(cls): def distribution_to_versions(self): return { - distribution: getattr(self, "{}_version".format(distribution)) + distribution: getattr(self, f"{distribution}_version") for distribution in self.distributions() - if getattr(self, "no_{}".format(distribution)) is False + if getattr(self, f"no_{distribution}") is False } @classmethod - def add_parser_arguments(cls, parser, interpreter, app_data): + def add_parser_arguments(cls, parser, interpreter, app_data): # noqa: U100 group = parser.add_mutually_exclusive_group() group.add_argument( "--no-download", "--never-download", dest="download", action="store_false", - help="pass to disable download of the latest {} from PyPI".format("/".join(cls.distributions())), + help=f"pass to disable download of the latest {'/'.join(cls.distributions())} from PyPI", default=True, ) group.add_argument( "--download", dest="download", action="store_true", - help="pass to enable download of the latest {} from PyPI".format("/".join(cls.distributions())), + help=f"pass to enable download of the latest {'/'.join(cls.distributions())} from PyPI", default=False, ) parser.add_argument( @@ -77,18 +70,18 @@ def add_parser_arguments(cls, parser, interpreter, app_data): ) for distribution, default in cls.distributions().items(): parser.add_argument( - "--{}".format(distribution), + f"--{distribution}", dest=distribution, metavar="version", - help="version of {} to install as seed: embed, bundle or exact version".format(distribution), + help=f"version of {distribution} to install as seed: embed, bundle or exact version", default=default, ) for distribution in cls.distributions(): parser.add_argument( - "--no-{}".format(distribution), - dest="no_{}".format(distribution), + f"--no-{distribution}", + dest=f"no_{distribution}", action="store_true", - help="do not install {}".format(distribution), + help=f"do not install {distribution}", default=False, ) parser.add_argument( @@ -99,20 +92,20 @@ def add_parser_arguments(cls, parser, interpreter, app_data): default=not PERIODIC_UPDATE_ON_BY_DEFAULT, ) - def __unicode__(self): + def __repr__(self): result = self.__class__.__name__ result += "(" if self.extra_search_dir: - result += "extra_search_dir={},".format(", ".join(ensure_text(str(i)) for i in self.extra_search_dir)) - result += "download={},".format(self.download) + result += f"extra_search_dir={', '.join(str(i) for i in self.extra_search_dir)}," + result += f"download={self.download}," for distribution in self.distributions(): - if getattr(self, "no_{}".format(distribution)): + if getattr(self, f"no_{distribution}"): continue - result += " {}{},".format( - distribution, - "={}".format(getattr(self, "{}_version".format(distribution), None) or "latest"), - ) + ver = f"={getattr(self, f'{distribution}_version', None) or 'latest'}" + result += f" {distribution}{ver}," return result[:-1] + ")" - def __repr__(self): - return ensure_str(self.__unicode__()) + +__all__ = [ + "BaseEmbed", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/embed/pip_invoke.py b/vendor/virtualenv/src/virtualenv/seed/embed/pip_invoke.py index c935c021..2ca94380 100644 --- a/vendor/virtualenv/src/virtualenv/seed/embed/pip_invoke.py +++ b/vendor/virtualenv/src/virtualenv/seed/embed/pip_invoke.py @@ -1,18 +1,16 @@ -from __future__ import absolute_import, unicode_literals - import logging from contextlib import contextmanager +from subprocess import Popen from virtualenv.discovery.cached_py_info import LogCmd from virtualenv.seed.embed.base_embed import BaseEmbed -from virtualenv.util.subprocess import Popen from ..wheels import Version, get_wheel, pip_wheel_env_run class PipInvoke(BaseEmbed): def __init__(self, options): - super(PipInvoke, self).__init__(options) + super().__init__(options) def run(self, creator): if not self.enabled: @@ -28,7 +26,7 @@ def _execute(cmd, env): process = Popen(cmd, env=env) process.communicate() if process.returncode != 0: - raise RuntimeError("failed seed with code {}".format(process.returncode)) + raise RuntimeError(f"failed seed with code {process.returncode}") return process @contextmanager @@ -49,9 +47,14 @@ def get_pip_install_cmd(self, exe, for_py_version): env=self.env, ) if wheel is None: - raise RuntimeError("could not get wheel for distribution {}".format(dist)) + raise RuntimeError(f"could not get wheel for distribution {dist}") folders.add(str(wheel.path.parent)) cmd.append(Version.as_pip_req(dist, wheel.version)) for folder in sorted(folders): cmd.extend(["--find-links", str(folder)]) yield cmd + + +__all__ = [ + "PipInvoke", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/base.py b/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/base.py index 35e0ccae..0640929c 100644 --- a/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/base.py +++ b/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/base.py @@ -1,23 +1,19 @@ -from __future__ import absolute_import, unicode_literals - import logging import os import re import zipfile from abc import ABCMeta, abstractmethod +from configparser import ConfigParser from itertools import chain +from pathlib import Path from tempfile import mkdtemp from distlib.scripts import ScriptMaker, enquote_executable -from six import PY3, add_metaclass -from virtualenv.util import ConfigParser -from virtualenv.util.path import Path, safe_delete -from virtualenv.util.six import ensure_text +from virtualenv.util.path import safe_delete -@add_metaclass(ABCMeta) -class PipInstall(object): +class PipInstall(metaclass=ABCMeta): def __init__(self, wheel, creator, image_folder): self._wheel = wheel self._creator = creator @@ -27,7 +23,7 @@ def __init__(self, wheel, creator, image_folder): self._console_entry_points = None @abstractmethod - def _sync(self, src, dst): + def _sync(self, src, dst): # noqa: U100 raise NotImplementedError def install(self, version_info): @@ -71,10 +67,7 @@ def _shorten_path_if_needed(self, zip_ref): self._image_dir = Path(to_folder) def _records_text(self, files): - record_data = "\n".join( - "{},,".format(os.path.relpath(ensure_text(str(rec)), ensure_text(str(self._image_dir)))) for rec in files - ) - return record_data + return "\n".join(f"{os.path.relpath(str(rec), str(self._image_dir))},," for rec in files) def _generate_new_files(self): new_files = set() @@ -82,17 +75,17 @@ def _generate_new_files(self): installer.write_text("pip\n") new_files.add(installer) # inject a no-op root element, as workaround for bug in https://github.com/pypa/pip/issues/7226 - marker = self._image_dir / "{}.virtualenv".format(self._dist_info.stem) + marker = self._image_dir / f"{self._dist_info.stem}.virtualenv" marker.write_text("") new_files.add(marker) folder = mkdtemp() try: to_folder = Path(folder) - rel = os.path.relpath(ensure_text(str(self._creator.script_dir)), ensure_text(str(self._creator.purelib))) + rel = os.path.relpath(str(self._creator.script_dir), str(self._creator.purelib)) version_info = self._creator.interpreter.version_info for name, module in self._console_scripts.items(): new_files.update( - Path(os.path.normpath(ensure_text(str(self._image_dir / rel / i.name)))) + Path(os.path.normpath(str(self._image_dir / rel / i.name))) for i in self._create_console_entry_point(name, module, to_folder, version_info) ) finally: @@ -111,12 +104,11 @@ def _dist_info(self): self.__dist_info = filename break else: - msg = "no .dist-info at {}, has {}".format(self._image_dir, ", ".join(files)) # pragma: no cover - raise RuntimeError(msg) # pragma: no cover + raise RuntimeError(f"no .dist-info at {self._image_dir}, has {', '.join(files)}") # pragma: no cover return self.__dist_info @abstractmethod - def _fix_records(self, extra_record_data): + def _fix_records(self, extra_record_data): # noqa: U100 raise NotImplementedError @property @@ -127,10 +119,9 @@ def _console_scripts(self): self._console_entry_points = {} entry_points = self._dist_info / "entry_points.txt" if entry_points.exists(): - parser = ConfigParser.ConfigParser() + parser = ConfigParser() with entry_points.open() as file_handler: - reader = getattr(parser, "read_file" if PY3 else "readfp") - reader(file_handler) + parser.read_file(file_handler) if "console_scripts" in parser.sections(): for name, value in parser.items("console_scripts"): match = re.match(r"(.*?)-?\d\.?\d*", name) @@ -142,7 +133,7 @@ def _console_scripts(self): def _create_console_entry_point(self, name, value, to_folder, version_info): result = [] maker = ScriptMakerCustom(to_folder, version_info, self._creator.exe, name) - specification = "{} = {}".format(name, value) + specification = f"{name} = {value}" new_files = maker.make(specification) result.extend(Path(i) for i in new_files) return result @@ -187,7 +178,7 @@ def has_image(self): class ScriptMakerCustom(ScriptMaker): def __init__(self, target_dir, version_info, executable, name): - super(ScriptMakerCustom, self).__init__(None, str(target_dir)) + super().__init__(None, str(target_dir)) self.clobber = True # overwrite self.set_mode = True # ensure they are executable self.executable = enquote_executable(str(executable)) @@ -196,5 +187,10 @@ def __init__(self, target_dir, version_info, executable, name): self._name = name def _write_script(self, names, shebang, script_bytes, filenames, ext): - names.add("{}{}.{}".format(self._name, *self.version_info)) - super(ScriptMakerCustom, self)._write_script(names, shebang, script_bytes, filenames, ext) + names.add(f"{self._name}{self.version_info[0]}.{self.version_info[1]}") + super()._write_script(names, shebang, script_bytes, filenames, ext) + + +__all__ = [ + "PipInstall", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/copy.py b/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/copy.py index 29d0bc88..f5717e15 100644 --- a/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/copy.py +++ b/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/copy.py @@ -1,9 +1,7 @@ -from __future__ import absolute_import, unicode_literals - import os +from pathlib import Path -from virtualenv.util.path import Path, copy -from virtualenv.util.six import ensure_text +from virtualenv.util.path import copy from .base import PipInstall @@ -14,22 +12,27 @@ def _sync(self, src, dst): def _generate_new_files(self): # create the pyc files - new_files = super(CopyPipInstall, self)._generate_new_files() + new_files = super()._generate_new_files() new_files.update(self._cache_files()) return new_files def _cache_files(self): version = self._creator.interpreter.version_info - py_c_ext = ".{}-{}{}.pyc".format(self._creator.interpreter.implementation.lower(), version.major, version.minor) - for root, dirs, files in os.walk(ensure_text(str(self._image_dir)), topdown=True): + py_c_ext = f".{self._creator.interpreter.implementation.lower()}-{version.major}{version.minor}.pyc" + for root, dirs, files in os.walk(str(self._image_dir), topdown=True): root_path = Path(root) for name in files: if name.endswith(".py"): - yield root_path / "{}{}".format(name[:-3], py_c_ext) + yield root_path / f"{name[:-3]}{py_c_ext}" for name in dirs: yield root_path / name / "__pycache__" def _fix_records(self, new_files): extra_record_data_str = self._records_text(new_files) - with open(ensure_text(str(self._dist_info / "RECORD")), "ab") as file_handler: + with (self._dist_info / "RECORD").open("ab") as file_handler: file_handler.write(extra_record_data_str.encode("utf-8")) + + +__all__ = [ + "CopyPipInstall", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/symlink.py b/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/symlink.py index f958b654..4695de5e 100644 --- a/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/symlink.py +++ b/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/pip_install/symlink.py @@ -1,29 +1,20 @@ -from __future__ import absolute_import, unicode_literals - import os -import subprocess from stat import S_IREAD, S_IRGRP, S_IROTH +from subprocess import PIPE, Popen from virtualenv.util.path import safe_delete, set_tree -from virtualenv.util.six import ensure_text -from virtualenv.util.subprocess import Popen from .base import PipInstall class SymlinkPipInstall(PipInstall): def _sync(self, src, dst): - src_str = ensure_text(str(src)) - dest_str = ensure_text(str(dst)) - os.symlink(src_str, dest_str) + os.symlink(str(src), str(dst)) def _generate_new_files(self): # create the pyc files, as the build image will be R/O - process = Popen( - [ensure_text(str(self._creator.exe)), "-m", "compileall", ensure_text(str(self._image_dir))], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + cmd = [str(self._creator.exe), "-m", "compileall", str(self._image_dir)] + process = Popen(cmd, stdout=PIPE, stderr=PIPE) process.communicate() # the root pyc is shared, so we'll not symlink that - but still add the pyc files to the RECORD for close root_py_cache = self._image_dir / "__pycache__" @@ -32,7 +23,7 @@ def _generate_new_files(self): new_files.update(root_py_cache.iterdir()) new_files.add(root_py_cache) safe_delete(root_py_cache) - core_new_files = super(SymlinkPipInstall, self)._generate_new_files() + core_new_files = super()._generate_new_files() # remove files that are within the image folder deeper than one level (as these will be not linked directly) for file in core_new_files: try: @@ -47,15 +38,20 @@ def _generate_new_files(self): def _fix_records(self, new_files): new_files.update(i for i in self._image_dir.iterdir()) extra_record_data_str = self._records_text(sorted(new_files, key=str)) - with open(ensure_text(str(self._dist_info / "RECORD")), "wb") as file_handler: + with open(str(self._dist_info / "RECORD"), "wb") as file_handler: file_handler.write(extra_record_data_str.encode("utf-8")) def build_image(self): - super(SymlinkPipInstall, self).build_image() + super().build_image() # protect the image by making it read only set_tree(self._image_dir, S_IREAD | S_IRGRP | S_IROTH) def clear(self): if self._image_dir.exists(): safe_delete(self._image_dir) - super(SymlinkPipInstall, self).clear() + super().clear() + + +__all__ = [ + "SymlinkPipInstall", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/via_app_data.py b/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/via_app_data.py index 9a98a709..f31ecf60 100644 --- a/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/via_app_data.py +++ b/vendor/virtualenv/src/virtualenv/seed/embed/via_app_data/via_app_data.py @@ -1,17 +1,16 @@ """Bootstrap""" -from __future__ import absolute_import, unicode_literals import logging import sys import traceback from contextlib import contextmanager +from pathlib import Path from subprocess import CalledProcessError from threading import Lock, Thread from virtualenv.info import fs_supports_symlink from virtualenv.seed.embed.base_embed import BaseEmbed from virtualenv.seed.wheels import get_wheel -from virtualenv.util.path import Path from .pip_install.copy import CopyPipInstall from .pip_install.symlink import SymlinkPipInstall @@ -19,20 +18,19 @@ class FromAppData(BaseEmbed): def __init__(self, options): - super(FromAppData, self).__init__(options) + super().__init__(options) self.symlinks = options.symlink_app_data @classmethod def add_parser_arguments(cls, parser, interpreter, app_data): - super(FromAppData, cls).add_parser_arguments(parser, interpreter, app_data) + super().add_parser_arguments(parser, interpreter, app_data) can_symlink = app_data.transient is False and fs_supports_symlink() + sym = "" if can_symlink else "not supported - " parser.add_argument( "--symlink-app-data", dest="symlink_app_data", action="store_true" if can_symlink else "store_false", - help="{} symlink the python packages from the app-data folder (requires seed pip>=19.3)".format( - "" if can_symlink else "not supported - ", - ), + help=f"{sym} symlink the python packages from the app-data folder (requires seed pip>=19.3)", default=False, ) @@ -55,16 +53,16 @@ def _install(name, wheel): if not installer.has_image(): installer.build_image() installer.install(creator.interpreter.version_info) - except Exception: # noqa + except Exception: exceptions[name] = sys.exc_info() - threads = list(Thread(target=_install, args=(n, w)) for n, w in name_to_whl.items()) + threads = [Thread(target=_install, args=(n, w)) for n, w in name_to_whl.items()] for thread in threads: thread.start() for thread in threads: thread.join() if exceptions: - messages = ["failed to build image {} because:".format(", ".join(exceptions.keys()))] + messages = [f"failed to build image {', '.join(exceptions.keys())} because:"] for value in exceptions.values(): exc_type, exc_value, exc_traceback = value messages.append("".join(traceback.format_exception(exc_type, exc_value, exc_traceback))) @@ -93,16 +91,16 @@ def _get(distribution, version): ) if result is not None: break - except Exception as exception: # noqa + except Exception as exception: logging.exception("fail") failure = exception if failure: if isinstance(failure, CalledProcessError): - msg = "failed to download {}".format(distribution) + msg = f"failed to download {distribution}" if version is not None: - msg += " version {}".format(version) - msg += ", pip download exit code {}".format(failure.returncode) - output = failure.output if sys.version_info < (3, 5) else (failure.output + failure.stderr) + msg += f" version {version}" + msg += f", pip download exit code {failure.returncode}" + output = failure.output + failure.stderr if output: msg += "\n" msg += output @@ -115,16 +113,16 @@ def _get(distribution, version): with lock: name_to_whl[distribution] = result - threads = list( + threads = [ Thread(target=_get, args=(distribution, version)) for distribution, version in self.distribution_to_versions().items() - ) + ] for thread in threads: thread.start() for thread in threads: thread.join() if fail: - raise RuntimeError("seed failed due to failing to download wheels {}".format(", ".join(fail.keys()))) + raise RuntimeError(f"seed failed due to failing to download wheels {', '.join(fail.keys())}") yield name_to_whl def installer_class(self, pip_version_tuple): @@ -134,7 +132,12 @@ def installer_class(self, pip_version_tuple): return SymlinkPipInstall return CopyPipInstall - def __unicode__(self): - base = super(FromAppData, self).__unicode__() - msg = ", via={}, app_data_dir={}".format("symlink" if self.symlinks else "copy", self.app_data) - return base[:-1] + msg + base[-1] + def __repr__(self): + msg = f", via={'symlink' if self.symlinks else 'copy'}, app_data_dir={self.app_data}" + base = super().__repr__() + return f"{base[:-1]}{msg}{base[-1]}" + + +__all__ = [ + "FromAppData", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/seeder.py b/vendor/virtualenv/src/virtualenv/seed/seeder.py index 852e8525..0d26bd01 100644 --- a/vendor/virtualenv/src/virtualenv/seed/seeder.py +++ b/vendor/virtualenv/src/virtualenv/seed/seeder.py @@ -1,15 +1,9 @@ -from __future__ import absolute_import, unicode_literals - from abc import ABCMeta, abstractmethod -from six import add_metaclass - -@add_metaclass(ABCMeta) -class Seeder(object): +class Seeder(metaclass=ABCMeta): """A seeder will install some seed packages into a virtual environment.""" - # noinspection PyUnusedLocal def __init__(self, options, enabled): """ @@ -20,7 +14,7 @@ def __init__(self, options, enabled): self.env = options.env @classmethod - def add_parser_arguments(cls, parser, interpreter, app_data): + def add_parser_arguments(cls, parser, interpreter, app_data): # noqa: U100 """ Add CLI arguments for this seed mechanisms. @@ -31,10 +25,15 @@ def add_parser_arguments(cls, parser, interpreter, app_data): raise NotImplementedError @abstractmethod - def run(self, creator): + def run(self, creator): # noqa: U100 """Perform the seed operation. :param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \ virtual environment """ raise NotImplementedError + + +__all__ = [ + "Seeder", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/__init__.py b/vendor/virtualenv/src/virtualenv/seed/wheels/__init__.py index dbffe2e4..c563181f 100644 --- a/vendor/virtualenv/src/virtualenv/seed/wheels/__init__.py +++ b/vendor/virtualenv/src/virtualenv/seed/wheels/__init__.py @@ -1,11 +1,9 @@ -from __future__ import absolute_import, unicode_literals - from .acquire import get_wheel, pip_wheel_env_run from .util import Version, Wheel -__all__ = ( +__all__ = [ "get_wheel", "pip_wheel_env_run", "Version", "Wheel", -) +] diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/acquire.py b/vendor/virtualenv/src/virtualenv/seed/wheels/acquire.py index 37440265..d8e8d1e3 100644 --- a/vendor/virtualenv/src/virtualenv/seed/wheels/acquire.py +++ b/vendor/virtualenv/src/virtualenv/seed/wheels/acquire.py @@ -1,13 +1,10 @@ """Bootstrap""" -from __future__ import absolute_import, unicode_literals import logging import sys from operator import eq, lt - -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_str -from virtualenv.util.subprocess import Popen, subprocess +from pathlib import Path +from subprocess import PIPE, CalledProcessError, Popen from .bundle import from_bundle from .periodic_update import add_wheel_to_update_log @@ -43,7 +40,7 @@ def get_wheel(distribution, version, for_py_version, search_dirs, download, app_ def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env): - to_download = "{}{}".format(distribution, version_spec or "") + to_download = f"{distribution}{version_spec or ''}" logging.debug("download wheel %s %s to %s", to_download, for_py_version, to_folder) cmd = [ sys.executable, @@ -63,15 +60,11 @@ def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_ ] # pip has no interface in python - must be a new sub-process env = pip_wheel_env_run(search_dirs, app_data, env) - process = Popen(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) + process = Popen(cmd, env=env, stdout=PIPE, stderr=PIPE, universal_newlines=True) out, err = process.communicate() if process.returncode != 0: - kwargs = {"output": out} - if sys.version_info < (3, 5): - kwargs["output"] += err - else: - kwargs["stderr"] = err - raise subprocess.CalledProcessError(process.returncode, cmd, **kwargs) + kwargs = {"output": out, "stderr": err} + raise CalledProcessError(process.returncode, cmd, **kwargs) result = _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out) logging.debug("downloaded wheel %s", result.name) return result @@ -83,7 +76,7 @@ def _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder for marker in ("Saved ", "File was already downloaded "): if line.startswith(marker): return Wheel(Path(line[len(marker) :]).absolute()) - # if for some reason the output does not match fallback to latest version with that spec + # if for some reason the output does not match fallback to the latest version with that spec return find_compatible_in_house(distribution, version_spec, for_py_version, to_folder) @@ -104,18 +97,12 @@ def find_compatible_in_house(distribution, version_spec, for_py_version, in_fold def pip_wheel_env_run(search_dirs, app_data, env): - for_py_version = "{}.{}".format(*sys.version_info[0:2]) env = env.copy() - env.update( - { - ensure_str(k): str(v) # python 2 requires these to be string only (non-unicode) - for k, v in {"PIP_USE_WHEEL": "1", "PIP_USER": "0", "PIP_NO_INPUT": "1"}.items() - }, - ) + env.update({"PIP_USE_WHEEL": "1", "PIP_USER": "0", "PIP_NO_INPUT": "1"}) wheel = get_wheel( distribution="pip", version=None, - for_py_version=for_py_version, + for_py_version=f"{sys.version_info.major}.{sys.version_info.minor}", search_dirs=search_dirs, download=False, app_data=app_data, @@ -124,5 +111,12 @@ def pip_wheel_env_run(search_dirs, app_data, env): ) if wheel is None: raise RuntimeError("could not find the embedded pip") - env[str("PYTHONPATH")] = str(wheel.path) + env["PYTHONPATH"] = str(wheel.path) return env + + +__all__ = [ + "get_wheel", + "download_wheel", + "pip_wheel_env_run", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/bundle.py b/vendor/virtualenv/src/virtualenv/seed/wheels/bundle.py index 39cd3d33..66bbe56d 100644 --- a/vendor/virtualenv/src/virtualenv/seed/wheels/bundle.py +++ b/vendor/virtualenv/src/virtualenv/seed/wheels/bundle.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from ..wheels.embed import get_embed_wheel from .periodic_update import periodic_update from .util import Version, Wheel, discover_wheels @@ -15,9 +13,8 @@ def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do if version != Version.embed: # 2. check if we have upgraded embed if app_data.can_update: - wheel = periodic_update( - distribution, of_version, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env - ) + per = do_periodic_update + wheel = periodic_update(distribution, of_version, for_py_version, wheel, search_dirs, app_data, per, env) # 3. acquire from extra search dir found_wheel = from_dir(distribution, of_version, for_py_version, search_dirs) @@ -49,3 +46,9 @@ def from_dir(distribution, version, for_py_version, directories): for wheel in discover_wheels(folder, distribution, version, for_py_version): return wheel return None + + +__all__ = [ + "load_embed_wheel", + "from_bundle", +] diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/embed/__init__.py b/vendor/virtualenv/src/virtualenv/seed/wheels/embed/__init__.py index c294118b..e18e45c5 100644 --- a/vendor/virtualenv/src/virtualenv/seed/wheels/embed/__init__.py +++ b/vendor/virtualenv/src/virtualenv/seed/wheels/embed/__init__.py @@ -1,8 +1,7 @@ -from __future__ import absolute_import, unicode_literals +from pathlib import Path from virtualenv import __path_assets__ from virtualenv.seed.wheels.util import Wheel -from virtualenv.util.path import Path if __path_assets__: BUNDLE_FOLDER = __path_assets__ / "seed" / "wheels" @@ -11,28 +10,28 @@ BUNDLE_SUPPORT = { "3.11": { - "pip": "pip-22.0.4-py3-none-any.whl", - "setuptools": "setuptools-62.1.0-py3-none-any.whl", + "pip": "pip-22.2.2-py3-none-any.whl", + "setuptools": "setuptools-63.4.1-py3-none-any.whl", "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.10": { - "pip": "pip-22.0.4-py3-none-any.whl", - "setuptools": "setuptools-62.1.0-py3-none-any.whl", + "pip": "pip-22.2.2-py3-none-any.whl", + "setuptools": "setuptools-63.4.1-py3-none-any.whl", "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.9": { - "pip": "pip-22.0.4-py3-none-any.whl", - "setuptools": "setuptools-62.1.0-py3-none-any.whl", + "pip": "pip-22.2.2-py3-none-any.whl", + "setuptools": "setuptools-63.4.1-py3-none-any.whl", "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.8": { - "pip": "pip-22.0.4-py3-none-any.whl", - "setuptools": "setuptools-62.1.0-py3-none-any.whl", + "pip": "pip-22.2.2-py3-none-any.whl", + "setuptools": "setuptools-63.4.1-py3-none-any.whl", "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.7": { - "pip": "pip-22.0.4-py3-none-any.whl", - "setuptools": "setuptools-62.1.0-py3-none-any.whl", + "pip": "pip-22.2.2-py3-none-any.whl", + "setuptools": "setuptools-63.4.1-py3-none-any.whl", "wheel": "wheel-0.37.1-py2.py3-none-any.whl", }, "3.6": { @@ -59,9 +58,9 @@ def get_embed_wheel(distribution, for_py_version): return Wheel.from_path(path) -__all__ = ( +__all__ = [ "get_embed_wheel", "BUNDLE_SUPPORT", "MAX", "BUNDLE_FOLDER", -) +] diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/embed/pip-22.0.4-py3-none-any.whl b/vendor/virtualenv/src/virtualenv/seed/wheels/embed/pip-22.0.4-py3-none-any.whl deleted file mode 100644 index 7ba048e2..00000000 Binary files a/vendor/virtualenv/src/virtualenv/seed/wheels/embed/pip-22.0.4-py3-none-any.whl and /dev/null differ diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/embed/pip-22.2.2-py3-none-any.whl b/vendor/virtualenv/src/virtualenv/seed/wheels/embed/pip-22.2.2-py3-none-any.whl new file mode 100644 index 00000000..03099718 Binary files /dev/null and b/vendor/virtualenv/src/virtualenv/seed/wheels/embed/pip-22.2.2-py3-none-any.whl differ diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/embed/setuptools-62.1.0-py3-none-any.whl b/vendor/virtualenv/src/virtualenv/seed/wheels/embed/setuptools-62.1.0-py3-none-any.whl deleted file mode 100644 index 0a56be0e..00000000 Binary files a/vendor/virtualenv/src/virtualenv/seed/wheels/embed/setuptools-62.1.0-py3-none-any.whl and /dev/null differ diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/embed/setuptools-63.4.1-py3-none-any.whl b/vendor/virtualenv/src/virtualenv/seed/wheels/embed/setuptools-63.4.1-py3-none-any.whl new file mode 100644 index 00000000..6001025f Binary files /dev/null and b/vendor/virtualenv/src/virtualenv/seed/wheels/embed/setuptools-63.4.1-py3-none-any.whl differ diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/periodic_update.py b/vendor/virtualenv/src/virtualenv/seed/wheels/periodic_update.py index 4f0336bc..2088c9d9 100644 --- a/vendor/virtualenv/src/virtualenv/seed/wheels/periodic_update.py +++ b/vendor/virtualenv/src/virtualenv/seed/wheels/periodic_update.py @@ -2,40 +2,28 @@ Periodically update bundled versions. """ -from __future__ import absolute_import, unicode_literals import json import logging import os import ssl -import subprocess import sys from datetime import datetime, timedelta from itertools import groupby +from pathlib import Path from shutil import copy2 +from subprocess import PIPE, Popen from textwrap import dedent from threading import Thread - -from six.moves.urllib.error import URLError -from six.moves.urllib.request import urlopen +from urllib.error import URLError +from urllib.request import urlopen from virtualenv.app_data import AppDataDiskFolder -from virtualenv.info import PY2 -from virtualenv.util.path import Path -from virtualenv.util.subprocess import CREATE_NO_WINDOW, Popen +from virtualenv.util.subprocess import CREATE_NO_WINDOW from ..wheels.embed import BUNDLE_SUPPORT from ..wheels.util import Wheel -if PY2: - # on Python 2 datetime.strptime throws the error below if the import did not trigger on main thread - # Failed to import _strptime because the import lock is held by - try: - import _strptime # noqa - except ImportError: # pragma: no cov - pass # pragma: no cov - - GRACE_PERIOD_CI = timedelta(hours=1) # prevent version switch in the middle of a CI run GRACE_PERIOD_MINOR = timedelta(days=28) UPDATE_PERIOD = timedelta(days=14) @@ -106,7 +94,7 @@ def load_datetime(value): return None if value is None else datetime.strptime(value, DATETIME_FMT) -class NewVersion(object): +class NewVersion: def __init__(self, filename, found_date, release_date, source): self.filename = filename self.found_date = found_date @@ -142,12 +130,9 @@ def use(self, now, ignore_grace_period_minor=False, ignore_grace_period_ci=False return False def __repr__(self): - return "{}(filename={}), found_date={}, release_date={}, source={})".format( - self.__class__.__name__, - self.filename, - self.found_date, - self.release_date, - self.source, + return ( + f"{self.__class__.__name__}(filename={self.filename}), found_date={self.found_date}, " + f"release_date={self.release_date}, source={self.source})" ) def __eq__(self, other): @@ -163,7 +148,7 @@ def wheel(self): return Wheel(Path(self.filename)) -class UpdateLog(object): +class UpdateLog: def __init__(self, started, completed, versions, periodic): self.started = started self.completed = completed @@ -224,8 +209,8 @@ def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, e .strip() .format(distribution, for_py_version, wheel_path, str(app_data), [str(p) for p in search_dirs], periodic), ] - debug = env.get(str("_VIRTUALENV_PERIODIC_UPDATE_INLINE")) == str("1") - pipe = None if debug else subprocess.PIPE + debug = env.get("_VIRTUALENV_PERIODIC_UPDATE_INLINE") == "1" + pipe = None if debug else PIPE kwargs = {"stdout": pipe, "stderr": pipe} if not debug and sys.platform == "win32": kwargs["creationflags"] = CREATE_NO_WINDOW @@ -233,7 +218,7 @@ def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, e logging.info( "triggered periodic upgrade of %s%s (for python %s) via background process having PID %d", distribution, - "" if wheel is None else "=={}".format(wheel.version), + "" if wheel is None else f"=={wheel.version}", for_py_version, process.pid, ) @@ -286,7 +271,7 @@ def _run_do_update(app_data, distribution, embed_filename, for_py_version, perio download_time = datetime.now() dest = acquire.download_wheel( distribution=distribution, - version_spec=None if last_version is None else "<{}".format(last_version), + version_spec=None if last_version is None else f"<{last_version}", for_py_version=for_py_version, search_dirs=search_dirs, app_data=app_data, @@ -333,7 +318,7 @@ def release_date_for_wheel_path(dest): def _request_context(): yield None # fallback to non verified HTTPS (the information we request is not sensitive, so fallback) - yield ssl._create_unverified_context() # noqa + yield ssl._create_unverified_context() _PYPI_CACHE = {} @@ -346,7 +331,7 @@ def _pypi_get_distribution_info_cached(distribution): def _pypi_get_distribution_info(distribution): - content, url = None, "https://pypi.org/pypi/{}/json".format(distribution) + content, url = None, f"https://pypi.org/pypi/{distribution}/json" try: for context in _request_context(): try: @@ -401,20 +386,19 @@ def _run_manual_upgrade(app_data, distribution, for_py_version, env): search_dirs=[], periodic=False, ) - msg = "upgraded %s for python %s in %s {}".format( - "new entries found:\n%s" if versions else "no new versions found", - ) + args = [ distribution, for_py_version, datetime.now() - start, ] if versions: - args.append("\n".join("\t{}".format(v) for v in versions)) - logging.warning(msg, *args) + args.append("\n".join(f"\t{v}" for v in versions)) + ver_update = "new entries found:\n%s" if versions else "no new versions found" + logging.warning(f"upgraded %s for python %s in %s {ver_update}", *args) -__all__ = ( +__all__ = [ "add_wheel_to_update_log", "periodic_update", "do_update", @@ -425,4 +409,4 @@ def _run_manual_upgrade(app_data, distribution, for_py_version, env): "dump_datetime", "trigger_update", "release_date_for_wheel_path", -) +] diff --git a/vendor/virtualenv/src/virtualenv/seed/wheels/util.py b/vendor/virtualenv/src/virtualenv/seed/wheels/util.py index 1240eb2d..f09d8736 100644 --- a/vendor/virtualenv/src/virtualenv/seed/wheels/util.py +++ b/vendor/virtualenv/src/virtualenv/seed/wheels/util.py @@ -1,12 +1,8 @@ -from __future__ import absolute_import, unicode_literals - from operator import attrgetter from zipfile import ZipFile -from virtualenv.util.six import ensure_text - -class Wheel(object): +class Wheel: def __init__(self, path): # https://www.python.org/dev/peps/pep-0427/#file-name-convention # The wheel filename is {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl @@ -48,8 +44,8 @@ def name(self): return self.path.name def support_py(self, py_version): - name = "{}.dist-info/METADATA".format("-".join(self.path.stem.split("-")[0:2])) - with ZipFile(ensure_text(str(self.path)), "r") as zip_file: + name = f"{'-'.join(self.path.stem.split('-')[0:2])}.dist-info/METADATA" + with ZipFile(str(self.path), "r") as zip_file: metadata = zip_file.read(name).decode("utf-8") marker = "Requires-Python:" requires = next((i[len(marker) :] for i in metadata.splitlines() if i.startswith(marker)), None) @@ -75,7 +71,7 @@ def support_py(self, py_version): return True def __repr__(self): - return "{}({})".format(self.__class__.__name__, self.path) + return f"{self.__class__.__name__}({self.path})" def __str__(self): return str(self.path) @@ -97,10 +93,7 @@ class Version: bundle = "bundle" embed = "embed" #: custom version handlers - non_version = ( - bundle, - embed, - ) + non_version = (bundle, embed) @staticmethod def of_version(value): @@ -108,9 +101,16 @@ def of_version(value): @staticmethod def as_pip_req(distribution, version): - return "{}{}".format(distribution, Version.as_version_spec(version)) + return f"{distribution}{Version.as_version_spec(version)}" @staticmethod def as_version_spec(version): of_version = Version.of_version(version) - return "" if of_version is None else "=={}".format(of_version) + return "" if of_version is None else f"=={of_version}" + + +__all__ = [ + "discover_wheels", + "Version", + "Wheel", +] diff --git a/vendor/virtualenv/src/virtualenv/util/__init__.py b/vendor/virtualenv/src/virtualenv/util/__init__.py index 32d02925..e69de29b 100644 --- a/vendor/virtualenv/src/virtualenv/util/__init__.py +++ b/vendor/virtualenv/src/virtualenv/util/__init__.py @@ -1,11 +0,0 @@ -from __future__ import absolute_import, unicode_literals - -import sys - -if sys.version_info[0] == 3: - import configparser as ConfigParser -else: - import ConfigParser - - -__all__ = ("ConfigParser",) diff --git a/vendor/virtualenv/src/virtualenv/util/error.py b/vendor/virtualenv/src/virtualenv/util/error.py index ac5aa502..945a25e7 100644 --- a/vendor/virtualenv/src/virtualenv/util/error.py +++ b/vendor/virtualenv/src/virtualenv/util/error.py @@ -1,12 +1,11 @@ """Errors""" -from __future__ import absolute_import, unicode_literals class ProcessCallFailed(RuntimeError): """Failed a process call""" def __init__(self, code, out, err, cmd): - super(ProcessCallFailed, self).__init__(code, out, err, cmd) + super().__init__(code, out, err, cmd) self.code = code self.out = out self.err = err diff --git a/vendor/virtualenv/src/virtualenv/util/lock.py b/vendor/virtualenv/src/virtualenv/util/lock.py index 1cf968b4..512d1c95 100644 --- a/vendor/virtualenv/src/virtualenv/util/lock.py +++ b/vendor/virtualenv/src/virtualenv/util/lock.py @@ -1,16 +1,13 @@ """holds locking functionality that works across processes""" -from __future__ import absolute_import, unicode_literals import logging import os from abc import ABCMeta, abstractmethod from contextlib import contextmanager +from pathlib import Path from threading import Lock, RLock from filelock import FileLock, Timeout -from six import add_metaclass - -from virtualenv.util.path import Path class _CountedFileLock(FileLock): @@ -21,20 +18,20 @@ def __init__(self, lock_file): os.makedirs(parent) except OSError: pass - super(_CountedFileLock, self).__init__(lock_file) + super().__init__(lock_file) self.count = 0 self.thread_safe = RLock() def acquire(self, timeout=None, poll_interval=0.05): with self.thread_safe: if self.count == 0: - super(_CountedFileLock, self).acquire(timeout, poll_interval) + super().acquire(timeout, poll_interval) self.count += 1 def release(self, force=False): with self.thread_safe: if self.count == 1: - super(_CountedFileLock, self).release(force=force) + super().release(force=force) self.count = max(self.count - 1, 0) @@ -42,14 +39,13 @@ def release(self, force=False): _store_lock = Lock() -@add_metaclass(ABCMeta) -class PathLockBase(object): +class PathLockBase(metaclass=ABCMeta): def __init__(self, folder): path = Path(folder) self.path = path.resolve() if path.exists() else path def __repr__(self): - return "{}({})".format(self.__class__.__name__, self.path) + return f"{self.__class__.__name__}({self.path})" def __div__(self, other): return type(self)(self.path / other) @@ -62,27 +58,27 @@ def __enter__(self): raise NotImplementedError @abstractmethod - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): # noqa: U100 raise NotImplementedError @abstractmethod @contextmanager - def lock_for_key(self, name, no_block=False): + def lock_for_key(self, name, no_block=False): # noqa: U100 raise NotImplementedError @abstractmethod @contextmanager - def non_reentrant_lock_for_key(name): + def non_reentrant_lock_for_key(self, name): # noqa: U100 raise NotImplementedError class ReentrantFileLock(PathLockBase): def __init__(self, folder): - super(ReentrantFileLock, self).__init__(folder) + super().__init__(folder) self._lock = None def _create_lock(self, name=""): - lock_file = str(self.path / "{}.lock".format(name)) + lock_file = str(self.path / f"{name}.lock") with _store_lock: if lock_file not in _lock_store: _lock_store[lock_file] = _CountedFileLock(lock_file) @@ -103,7 +99,7 @@ def __enter__(self): self._lock = self._create_lock() self._lock_file(self._lock) - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): # noqa: U100 self._release(self._lock) self._del_lock(self._lock) self._lock = None @@ -144,7 +140,7 @@ def lock_for_key(self, name, no_block=False): @contextmanager def non_reentrant_lock_for_key(self, name): - with _CountedFileLock(str(self.path / "{}.lock".format(name))): + with _CountedFileLock(str(self.path / f"{name}.lock")): yield @@ -152,20 +148,20 @@ class NoOpFileLock(PathLockBase): def __enter__(self): raise NotImplementedError - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): # noqa: U100 raise NotImplementedError @contextmanager - def lock_for_key(self, name, no_block=False): + def lock_for_key(self, name, no_block=False): # noqa: U100 yield @contextmanager - def non_reentrant_lock_for_key(self, name): + def non_reentrant_lock_for_key(self, name): # noqa: U100 yield -__all__ = ( +__all__ = [ "NoOpFileLock", "ReentrantFileLock", "Timeout", -) +] diff --git a/vendor/virtualenv/src/virtualenv/util/path/__init__.py b/vendor/virtualenv/src/virtualenv/util/path/__init__.py index dc628de8..39a8db7d 100644 --- a/vendor/virtualenv/src/virtualenv/util/path/__init__.py +++ b/vendor/virtualenv/src/virtualenv/util/path/__init__.py @@ -1,18 +1,14 @@ -from __future__ import absolute_import, unicode_literals - -from ._pathlib import Path from ._permission import make_exe, set_tree from ._sync import copy, copytree, ensure_dir, safe_delete, symlink from ._win import get_short_path_name -__all__ = ( +__all__ = [ "ensure_dir", "symlink", "copy", "copytree", - "Path", "make_exe", "set_tree", "safe_delete", "get_short_path_name", -) +] diff --git a/vendor/virtualenv/src/virtualenv/util/path/_pathlib/__init__.py b/vendor/virtualenv/src/virtualenv/util/path/_pathlib/__init__.py deleted file mode 100644 index 746c8aed..00000000 --- a/vendor/virtualenv/src/virtualenv/util/path/_pathlib/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import absolute_import, unicode_literals - -import sys - -import six - -if six.PY3: - from pathlib import Path -else: - if sys.platform == "win32": - # workaround for https://github.com/mcmtroffaes/pathlib2/issues/56 - from .via_os_path import Path - else: - from pathlib2 import Path - - -__all__ = ("Path",) diff --git a/vendor/virtualenv/src/virtualenv/util/path/_pathlib/via_os_path.py b/vendor/virtualenv/src/virtualenv/util/path/_pathlib/via_os_path.py deleted file mode 100644 index b876f025..00000000 --- a/vendor/virtualenv/src/virtualenv/util/path/_pathlib/via_os_path.py +++ /dev/null @@ -1,151 +0,0 @@ -from __future__ import absolute_import, unicode_literals - -import os -import platform -from contextlib import contextmanager - -from virtualenv.util.six import ensure_str, ensure_text - -IS_PYPY = platform.python_implementation() == "PyPy" - - -class Path(object): - def __init__(self, path): - if isinstance(path, Path): - _path = path._path - else: - _path = ensure_text(path) - if IS_PYPY: - _path = _path.encode("utf-8") - self._path = _path - - def __repr__(self): - return ensure_str("Path({})".format(ensure_text(self._path))) - - def __unicode__(self): - return ensure_text(self._path) - - def __str__(self): - return ensure_str(self._path) - - def __div__(self, other): - if isinstance(other, Path): - right = other._path - else: - right = ensure_text(other) - if IS_PYPY: - right = right.encode("utf-8") - return Path(os.path.join(self._path, right)) - - def __truediv__(self, other): - return self.__div__(other) - - def __eq__(self, other): - return self._path == (other._path if isinstance(other, Path) else None) - - def __ne__(self, other): - return not (self == other) - - def __hash__(self): - return hash(self._path) - - def as_posix(self): - return str(self).replace(os.sep, "/") - - def exists(self): - return os.path.exists(self._path) - - @property - def parent(self): - return Path(os.path.abspath(os.path.join(self._path, os.path.pardir))) - - def resolve(self): - return Path(os.path.realpath(self._path)) - - @property - def name(self): - return os.path.basename(self._path) - - @property - def parts(self): - return self._path.split(os.sep) - - def is_file(self): - return os.path.isfile(self._path) - - def is_dir(self): - return os.path.isdir(self._path) - - def mkdir(self, parents=True, exist_ok=True): - try: - os.makedirs(self._path) - except OSError: - if not exist_ok: - raise - - def read_text(self, encoding="utf-8"): - return self.read_bytes().decode(encoding) - - def read_bytes(self): - with open(self._path, "rb") as file_handler: - return file_handler.read() - - def write_bytes(self, content): - with open(self._path, "wb") as file_handler: - file_handler.write(content) - - def write_text(self, text, encoding="utf-8"): - self.write_bytes(text.encode(encoding)) - - def iterdir(self): - for p in os.listdir(self._path): - yield Path(os.path.join(self._path, p)) - - @property - def suffix(self): - _, ext = os.path.splitext(self.name) - return ext - - @property - def stem(self): - base, _ = os.path.splitext(self.name) - return base - - @contextmanager - def open(self, mode="r"): - with open(self._path, mode) as file_handler: - yield file_handler - - @property - def parents(self): - result = [] - parts = self.parts - for i in range(len(parts) - 1): - result.append(Path(os.sep.join(parts[0 : i + 1]))) - return result[::-1] - - def unlink(self): - os.remove(self._path) - - def with_name(self, name): - return self.parent / name - - def is_symlink(self): - return os.path.islink(self._path) - - def relative_to(self, other): - if not self._path.startswith(other._path): - raise ValueError("{} does not start with {}".format(self._path, other._path)) - return Path(os.sep.join(self.parts[len(other.parts) :])) - - def stat(self): - return os.stat(self._path) - - def chmod(self, mode): - os.chmod(self._path, mode) - - def absolute(self): - return Path(os.path.abspath(self._path)) - - -__all__ = ("Path",) diff --git a/vendor/virtualenv/src/virtualenv/util/path/_permission.py b/vendor/virtualenv/src/virtualenv/util/path/_permission.py index 73bb6e81..ca923140 100644 --- a/vendor/virtualenv/src/virtualenv/util/path/_permission.py +++ b/vendor/virtualenv/src/virtualenv/util/path/_permission.py @@ -1,10 +1,6 @@ -from __future__ import absolute_import, unicode_literals - import os from stat import S_IXGRP, S_IXOTH, S_IXUSR -from virtualenv.util.six import ensure_text - def make_exe(filename): original_mode = filename.stat().st_mode @@ -21,7 +17,7 @@ def make_exe(filename): def set_tree(folder, stat): - for root, _, files in os.walk(ensure_text(str(folder))): + for root, _, files in os.walk(str(folder)): for filename in files: os.chmod(os.path.join(root, filename), stat) diff --git a/vendor/virtualenv/src/virtualenv/util/path/_sync.py b/vendor/virtualenv/src/virtualenv/util/path/_sync.py index 05f19d02..604379d9 100644 --- a/vendor/virtualenv/src/virtualenv/util/path/_sync.py +++ b/vendor/virtualenv/src/virtualenv/util/path/_sync.py @@ -1,33 +1,18 @@ -from __future__ import absolute_import, unicode_literals - import logging import os import shutil from stat import S_IWUSR -from six import PY2 - -from virtualenv.info import IS_CPYTHON, IS_WIN -from virtualenv.util.six import ensure_text - -if PY2 and IS_CPYTHON and IS_WIN: # CPython2 on Windows supports unicode paths if passed as unicode - - def norm(src): - return ensure_text(str(src)) - -else: - norm = str - def ensure_dir(path): if not path.exists(): - logging.debug("create folder %s", ensure_text(str(path))) - os.makedirs(norm(path)) + logging.debug("create folder %s", str(path)) + os.makedirs(str(path)) def ensure_safe_to_do(src, dest): if src == dest: - raise ValueError("source and destination is the same {}".format(src)) + raise ValueError(f"source and destination is the same {src}") if not dest.exists(): return if dest.is_dir() and not dest.is_symlink(): @@ -49,7 +34,7 @@ def copy(src, dest): is_dir = src.is_dir() method = copytree if is_dir else shutil.copy logging.debug("copy %s", _Debug(src, dest)) - method(norm(src), norm(dest)) + method(str(src), str(dest)) def copytree(src, dest): @@ -64,34 +49,30 @@ def copytree(src, dest): def safe_delete(dest): - def onerror(func, path, exc_info): + def onerror(func, path, exc_info): # noqa: U100 if not os.access(path, os.W_OK): os.chmod(path, S_IWUSR) func(path) else: raise - shutil.rmtree(ensure_text(str(dest)), ignore_errors=True, onerror=onerror) + shutil.rmtree(str(dest), ignore_errors=True, onerror=onerror) -class _Debug(object): +class _Debug: def __init__(self, src, dest): self.src = src self.dest = dest def __str__(self): - return "{}{} to {}".format( - "directory " if self.src.is_dir() else "", - ensure_text(str(self.src)), - ensure_text(str(self.dest)), - ) + return f"{'directory ' if self.src.is_dir() else ''}{str(self.src)} to {str(self.dest)}" -__all__ = ( +__all__ = [ "ensure_dir", "symlink", "copy", "symlink", "copytree", "safe_delete", -) +] diff --git a/vendor/virtualenv/src/virtualenv/util/path/_win.py b/vendor/virtualenv/src/virtualenv/util/path/_win.py index 02e16d07..d83eabbd 100644 --- a/vendor/virtualenv/src/virtualenv/util/path/_win.py +++ b/vendor/virtualenv/src/virtualenv/util/path/_win.py @@ -6,7 +6,7 @@ def get_short_path_name(long_name): import ctypes from ctypes import wintypes - _GetShortPathNameW = ctypes.windll.kernel32.GetShortPathNameW + _GetShortPathNameW = ctypes.windll.kernel32.GetShortPathNameW # noqa: N806 _GetShortPathNameW.argtypes = [wintypes.LPCWSTR, wintypes.LPWSTR, wintypes.DWORD] _GetShortPathNameW.restype = wintypes.DWORD output_buf_size = 0 @@ -17,3 +17,8 @@ def get_short_path_name(long_name): return output_buf.value else: output_buf_size = needed + + +__all__ = [ + "get_short_path_name", +] diff --git a/vendor/virtualenv/src/virtualenv/util/six.py b/vendor/virtualenv/src/virtualenv/util/six.py deleted file mode 100644 index 199cbed9..00000000 --- a/vendor/virtualenv/src/virtualenv/util/six.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Backward compatibility layer with older version of six. - -This is used to avoid virtualenv requiring a version of six newer than what -the system may have. -""" -from __future__ import absolute_import - -from six import PY2, PY3, binary_type, text_type - -try: - from six import ensure_text -except ImportError: - - def ensure_text(s, encoding="utf-8", errors="strict"): - """Coerce *s* to six.text_type. - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -try: - from six import ensure_str -except ImportError: - - def ensure_str(s, encoding="utf-8", errors="strict"): - """Coerce *s* to `str`. - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) - return s diff --git a/vendor/virtualenv/src/virtualenv/util/subprocess/__init__.py b/vendor/virtualenv/src/virtualenv/util/subprocess/__init__.py index f5066268..bc6ec4d3 100644 --- a/vendor/virtualenv/src/virtualenv/util/subprocess/__init__.py +++ b/vendor/virtualenv/src/virtualenv/util/subprocess/__init__.py @@ -1,24 +1,11 @@ -from __future__ import absolute_import, unicode_literals - import subprocess -import sys - -import six - -if six.PY2 and sys.platform == "win32": - from . import _win_subprocess - - Popen = _win_subprocess.Popen -else: - Popen = subprocess.Popen - CREATE_NO_WINDOW = 0x80000000 def run_cmd(cmd): try: - process = Popen( + process = subprocess.Popen( cmd, universal_newlines=True, stdin=subprocess.PIPE, @@ -27,14 +14,14 @@ def run_cmd(cmd): ) out, err = process.communicate() # input disabled code = process.returncode - except OSError as os_error: - code, out, err = os_error.errno, "", os_error.strerror + except OSError as error: + code, out, err = error.errno, "", error.strerror + if code == 2 and "file" in err: + err = str(error) # FileNotFoundError in Python >= 3.3 return code, out, err __all__ = ( - "subprocess", - "Popen", "run_cmd", "CREATE_NO_WINDOW", ) diff --git a/vendor/virtualenv/src/virtualenv/util/subprocess/_win_subprocess.py b/vendor/virtualenv/src/virtualenv/util/subprocess/_win_subprocess.py deleted file mode 100644 index ce531979..00000000 --- a/vendor/virtualenv/src/virtualenv/util/subprocess/_win_subprocess.py +++ /dev/null @@ -1,176 +0,0 @@ -# flake8: noqa -# fmt: off -## issue: https://bugs.python.org/issue19264 - -import ctypes -import os -import platform -import subprocess -from ctypes import Structure, WinError, byref, c_char_p, c_void_p, c_wchar, c_wchar_p, sizeof, windll -from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LPCWSTR, LPVOID, LPWSTR, WORD - -import _subprocess - -## -## Types -## - -CREATE_UNICODE_ENVIRONMENT = 0x00000400 -LPCTSTR = c_char_p -LPTSTR = c_wchar_p -LPSECURITY_ATTRIBUTES = c_void_p -LPBYTE = ctypes.POINTER(BYTE) - -class STARTUPINFOW(Structure): - _fields_ = [ - ("cb", DWORD), ("lpReserved", LPWSTR), - ("lpDesktop", LPWSTR), ("lpTitle", LPWSTR), - ("dwX", DWORD), ("dwY", DWORD), - ("dwXSize", DWORD), ("dwYSize", DWORD), - ("dwXCountChars", DWORD), ("dwYCountChars", DWORD), - ("dwFillAtrribute", DWORD), ("dwFlags", DWORD), - ("wShowWindow", WORD), ("cbReserved2", WORD), - ("lpReserved2", LPBYTE), ("hStdInput", HANDLE), - ("hStdOutput", HANDLE), ("hStdError", HANDLE), - ] - -LPSTARTUPINFOW = ctypes.POINTER(STARTUPINFOW) - - -class PROCESS_INFORMATION(Structure): - _fields_ = [ - ("hProcess", HANDLE), ("hThread", HANDLE), - ("dwProcessId", DWORD), ("dwThreadId", DWORD), - ] - -LPPROCESS_INFORMATION = ctypes.POINTER(PROCESS_INFORMATION) - - -class DUMMY_HANDLE(ctypes.c_void_p): - - def __init__(self, *a, **kw): - super(DUMMY_HANDLE, self).__init__(*a, **kw) - self.closed = False - - def Close(self): - if not self.closed: - windll.kernel32.CloseHandle(self) - self.closed = True - - def __int__(self): - return self.value - - -CreateProcessW = windll.kernel32.CreateProcessW -CreateProcessW.argtypes = [ - LPCWSTR, LPWSTR, LPSECURITY_ATTRIBUTES, - LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCWSTR, - LPSTARTUPINFOW, LPPROCESS_INFORMATION, -] -CreateProcessW.restype = BOOL - - -## -## Patched functions/classes -## - -def CreateProcess( - executable, args, _p_attr, _t_attr, - inherit_handles, creation_flags, env, cwd, - startup_info, -): - """Create a process supporting unicode executable and args for win32 - - Python implementation of CreateProcess using CreateProcessW for Win32 - - """ - - si = STARTUPINFOW( - dwFlags=startup_info.dwFlags, - wShowWindow=startup_info.wShowWindow, - cb=sizeof(STARTUPINFOW), - ## XXXvlab: not sure of the casting here to ints. - hStdInput=startup_info.hStdInput if startup_info.hStdInput is None else int(startup_info.hStdInput), - hStdOutput=startup_info.hStdOutput if startup_info.hStdOutput is None else int(startup_info.hStdOutput), - hStdError=startup_info.hStdError if startup_info.hStdError is None else int(startup_info.hStdError), - ) - - wenv = None - if env is not None: - ## LPCWSTR seems to be c_wchar_p, so let's say CWSTR is c_wchar - env = ( - unicode("").join([ - unicode("%s=%s\0") % (k, v) - for k, v in env.items() - ]) - ) + unicode("\0") - wenv = (c_wchar * len(env))() - wenv.value = env - - wcwd = None - if cwd is not None: - wcwd = unicode(cwd) - - pi = PROCESS_INFORMATION() - creation_flags |= CREATE_UNICODE_ENVIRONMENT - - if CreateProcessW( - executable, args, None, None, - inherit_handles, creation_flags, - wenv, wcwd, byref(si), byref(pi), - ): - return ( - DUMMY_HANDLE(pi.hProcess), DUMMY_HANDLE(pi.hThread), - pi.dwProcessId, pi.dwThreadId, - ) - raise WinError() - - -class Popen(subprocess.Popen): - """This superseeds Popen and corrects a bug in cPython 2.7 implem""" - - def _execute_child( - self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, to_close, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite, - ): - """Code from part of _execute_child from Python 2.7 (9fbb65e) - - There are only 2 little changes concerning the construction of - the the final string in shell mode: we preempt the creation of - the command string when shell is True, because original function - will try to encode unicode args which we want to avoid to be able to - sending it as-is to ``CreateProcess``. - - """ - if startupinfo is None: - startupinfo = subprocess.STARTUPINFO() - if not isinstance(args, subprocess.types.StringTypes): - args = [i if isinstance(i, bytes) else i.encode('utf-8') for i in args] - args = subprocess.list2cmdline(args) - if platform.python_implementation() == "CPython": - args = args.decode('utf-8') - startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW - startupinfo.wShowWindow = _subprocess.SW_HIDE - env = os.environ if env is None else env - comspec = env.get("COMSPEC", unicode("cmd.exe")) - if ( - _subprocess.GetVersion() >= 0x80000000 or - os.path.basename(comspec).lower() == "command.com" - ): - w9xpopen = self._find_w9xpopen() - args = unicode('"%s" %s') % (w9xpopen, args) - creationflags |= _subprocess.CREATE_NEW_CONSOLE - - super(Popen, self)._execute_child( - args, executable, - preexec_fn, close_fds, cwd, env, universal_newlines, - startupinfo, creationflags, False, to_close, p2cread, - p2cwrite, c2pread, c2pwrite, errread, errwrite, - ) - -_subprocess.CreateProcess = CreateProcess -# fmt: on diff --git a/vendor/virtualenv/src/virtualenv/util/zipapp.py b/vendor/virtualenv/src/virtualenv/util/zipapp.py index 85d9294f..e7578c48 100644 --- a/vendor/virtualenv/src/virtualenv/util/zipapp.py +++ b/vendor/virtualenv/src/virtualenv/util/zipapp.py @@ -1,11 +1,8 @@ -from __future__ import absolute_import, unicode_literals - import logging import os import zipfile from virtualenv.info import IS_WIN, ROOT -from virtualenv.util.six import ensure_text def read(full_path): @@ -21,7 +18,7 @@ def extract(full_path, dest): with zipfile.ZipFile(ROOT, "r") as zip_file: info = zip_file.getinfo(sub_file) info.filename = dest.name - zip_file.extract(info, ensure_text(str(dest.parent))) + zip_file.extract(info, str(dest.parent)) def _get_path_within_zip(full_path): @@ -31,3 +28,9 @@ def _get_path_within_zip(full_path): # paths are always UNIX separators, even on Windows, though __file__ still follows platform default sub_file = sub_file.replace(os.sep, "/") return sub_file + + +__all__ = [ + "read", + "extract", +] diff --git a/vendor/virtualenv/src/virtualenv/version.py b/vendor/virtualenv/src/virtualenv/version.py index f059df5b..a7a4f30a 100644 --- a/vendor/virtualenv/src/virtualenv/version.py +++ b/vendor/virtualenv/src/virtualenv/version.py @@ -1 +1 @@ -__version__ = "20.14.1" +__version__ = "20.16.3" diff --git a/vendor/virtualenv/tasks/__main__zipapp.py b/vendor/virtualenv/tasks/__main__zipapp.py index 3aa97ca2..87d4589a 100644 --- a/vendor/virtualenv/tasks/__main__zipapp.py +++ b/vendor/virtualenv/tasks/__main__zipapp.py @@ -30,7 +30,7 @@ def _load(self, of_file): def __enter__(self): return self - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): # noqa: U100 self._zip_file.close() def find_mod(self, fullname): @@ -121,13 +121,13 @@ def locate_file(self, path): from importlib.util import spec_from_file_location class VersionedFindLoad(VersionPlatformSelect, SourceLoader): - def find_spec(self, fullname, path, target=None): + def find_spec(self, fullname, path, target=None): # noqa: U100 zip_path = self.find_mod(fullname) if zip_path is not None: spec = spec_from_file_location(name=fullname, loader=self) return spec - def module_repr(self, module): + def module_repr(self, module): # noqa: U100 raise NotImplementedError else: @@ -135,7 +135,7 @@ def module_repr(self, module): from imp import new_module class VersionedFindLoad(VersionPlatformSelect): - def find_module(self, fullname, path=None): + def find_module(self, fullname, path=None): # noqa: U100 return self if self.find_mod(fullname) else None def load_module(self, fullname): diff --git a/vendor/virtualenv/tasks/make_zipapp.py b/vendor/virtualenv/tasks/make_zipapp.py index aa6f6250..67286f69 100644 --- a/vendor/virtualenv/tasks/make_zipapp.py +++ b/vendor/virtualenv/tasks/make_zipapp.py @@ -3,7 +3,6 @@ import io import json import os -import pipes import shutil import subprocess import sys @@ -12,6 +11,7 @@ from collections import defaultdict, deque from email import message_from_string from pathlib import Path, PurePosixPath +from shlex import quote from stat import S_IWUSR from tempfile import TemporaryDirectory @@ -20,7 +20,7 @@ HERE = Path(__file__).parent.absolute() -VERSIONS = ["3.{}".format(i) for i in range(10, 4, -1)] + ["2.7"] +VERSIONS = [f"3.{i}" for i in range(10, 5, -1)] def main(): @@ -46,7 +46,7 @@ def create_zipapp(dest, packages): zip_app.writestr("__main__.py", (HERE / "__main__zipapp.py").read_bytes()) bio.seek(0) zipapp.create_archive(bio, dest) - print("zipapp created at {}".format(dest)) + print(f"zipapp created at {dest}") def write_packages_to_zipapp(base, dist, modules, packages, zip_app): @@ -82,7 +82,7 @@ def write_packages_to_zipapp(base, dist, modules, packages, zip_app): del content -class WheelDownloader(object): +class WheelDownloader: def __init__(self, into): if into.exists(): shutil.rmtree(into) @@ -104,7 +104,7 @@ def run(self, target, versions): whl = self._get_wheel(dep, platform[2:] if platform and platform.startswith("==") else None, version) if whl is None: if dep_str not in wheel_store: - raise RuntimeError("failed to get {}, have {}".format(dep_str, wheel_store)) + raise RuntimeError(f"failed to get {dep_str}, have {wheel_store}") whl = wheel_store[dep_str] else: wheel_store[dep_str] = whl @@ -139,7 +139,7 @@ def _download(self, platform, stop_print_on_fail, *args): @staticmethod def get_dependencies(whl, version): with zipfile.ZipFile(str(whl), "r") as zip_file: - name = "/".join(["{}.dist-info".format("-".join(whl.name.split("-")[0:2])), "METADATA"]) + name = "/".join([f"{'-'.join(whl.name.split('-')[0:2])}.dist-info", "METADATA"]) with zip_file.open(name) as file_handler: metadata = message_from_string(file_handler.read().decode("utf-8")) deps = metadata.get_all("Requires-Dist") @@ -147,15 +147,14 @@ def get_dependencies(whl, version): return for dep in deps: req = Requirement(dep) - markers = getattr(req.marker, "_markers", tuple()) or () + markers = getattr(req.marker, "_markers", ()) or () if any(m for m in markers if isinstance(m, tuple) and len(m) == 3 and m[0].value == "extra"): continue py_versions = WheelDownloader._marker_at(markers, "python_version") if py_versions: marker = Marker('python_version < "1"') marker._markers = [ - markers[ver] - for ver in sorted(list(i for i in set(py_versions) | {i - 1 for i in py_versions} if i >= 0)) + markers[ver] for ver in sorted(i for i in set(py_versions) | {i - 1 for i in py_versions} if i >= 0) ] matches_python = marker.evaluate({"python_version": version}) if not matches_python: @@ -167,7 +166,7 @@ def get_dependencies(whl, version): platform_positions = WheelDownloader._marker_at(markers, "sys_platform") deleted = 0 for pos in platform_positions: # can only be ore meaningfully - platform = "{}{}".format(markers[pos][1].value, markers[pos][2].value) + platform = f"{markers[pos][1].value}{markers[pos][2].value}" deleted += WheelDownloader._del_marker_at(markers, pos - deleted) platforms.append(platform) if not platforms: @@ -207,7 +206,7 @@ def build_sdist(self, target): return self._build_sdist(self.into, folder) finally: # permission error on Windows <3.7 https://bugs.python.org/issue26660 - def onerror(func, path, exc_info): + def onerror(func, path, exc_info): # noqa: U100 os.chmod(path, S_IWUSR) func(path) @@ -227,7 +226,7 @@ def run_suppress_output(cmd, stop_print_on_fail=False): process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) out, err = process.communicate() if stop_print_on_fail and process.returncode != 0: - print("exit with {} of {}".format(process.returncode, " ".join(pipes.quote(i) for i in cmd)), file=sys.stdout) + print(f"exit with {process.returncode} of {' '.join(quote(i) for i in cmd)}", file=sys.stdout) if out: print(out, file=sys.stdout) if err: @@ -250,19 +249,19 @@ def get_wheels_for_support_versions(folder): wheel_versions.wheel = wheel for name, p_w_v in packages.items(): for platform, w_v in p_w_v.items(): - print("{} - {}".format(name, platform)) + print(f"{name} - {platform}") for wheel, wheel_versions in w_v.items(): - print("{} of {} (use {})".format(" ".join(wheel_versions.versions), wheel, wheel_versions.wheel)) + print(f"{' '.join(wheel_versions.versions)} of {wheel} (use {wheel_versions.wheel})") return packages -class WheelForVersion(object): +class WheelForVersion: def __init__(self, wheel=None, versions=None): self.wheel = wheel self.versions = versions if versions else [] def __repr__(self): - return "{}({!r}, {!r})".format(self.__class__.__name__, self.wheel, self.versions) + return f"{self.__class__.__name__}({self.wheel!r}, {self.versions!r})" if __name__ == "__main__": diff --git a/vendor/virtualenv/tasks/release.py b/vendor/virtualenv/tasks/release.py index fa797d35..8724bf2a 100644 --- a/vendor/virtualenv/tasks/release.py +++ b/vendor/virtualenv/tasks/release.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Handles creating a release PR""" from pathlib import Path from subprocess import check_call @@ -60,9 +59,9 @@ def tag_release_commit(release_commit, repo, version) -> TagReference: print("tag release commit") existing_tags = [x.name for x in repo.tags] if version in existing_tags: - print("delete existing tag {}".format(version)) + print(f"delete existing tag {version}") repo.delete_tag(version) - print("create tag {}".format(version)) + print(f"create tag {version}") tag = repo.create_tag(version, ref=release_commit, force=True) return tag diff --git a/vendor/virtualenv/tasks/update_embedded.py b/vendor/virtualenv/tasks/update_embedded.py index 81445594..ba43b303 100755 --- a/vendor/virtualenv/tasks/update_embedded.py +++ b/vendor/virtualenv/tasks/update_embedded.py @@ -1,19 +1,10 @@ -#!/usr/bin/env python -""" -Helper script to rebuild virtualenv.py from virtualenv_support -""" -from __future__ import print_function, unicode_literals +"""Helper script to rebuild virtualenv.py from virtualenv_support""" import codecs import os import re -import sys from zlib import crc32 as _crc32 -if sys.version_info < (3,): - print("requires Python 3 (use tox from Python 3 if invoked via tox)") - raise SystemExit(1) - def crc32(data): """Python version idempotent""" @@ -31,7 +22,7 @@ def crc32(data): def rebuild(script_path): - with open(script_path, "rt") as current_fh: + with open(script_path) as current_fh: script_content = current_fh.read() script_parts = [] match_end = 0 @@ -53,21 +44,21 @@ def rebuild(script_path): def handle_file(previous_content, filename, variable_name, previous_encoded): - print("Found file {}".format(filename)) + print(f"Found file {filename}") current_path = os.path.realpath(os.path.join(here, "..", "src", "virtualenv_embedded", filename)) _, file_type = os.path.splitext(current_path) keep_line_ending = file_type in (".bat",) - with open(current_path, "rt", encoding="utf-8", newline="" if keep_line_ending else None) as current_fh: + with open(current_path, encoding="utf-8", newline="" if keep_line_ending else None) as current_fh: current_text = current_fh.read() current_crc = crc32(current_text) current_encoded = b64.encode(gzip.encode(current_text.encode())[0])[0].decode() if current_encoded == previous_encoded: - print(" File up to date (crc: {:08x})".format(current_crc)) + print(f" File up to date (crc: {current_crc:08x})") return False, previous_content # Else: content has changed previous_text = gzip.decode(b64.decode(previous_encoded.encode())[0])[0].decode() previous_crc = crc32(previous_text) - print(" Content changed (crc: {:08x} -> {:08x})".format(previous_crc, current_crc)) + print(f" Content changed (crc: {previous_crc:08x} -> {current_crc:08x})") new_part = file_template.format(filename=filename, variable=variable_name, data=current_encoded) return True, new_part diff --git a/vendor/virtualenv/tasks/upgrade_wheels.py b/vendor/virtualenv/tasks/upgrade_wheels.py index 6bc70d06..60681809 100644 --- a/vendor/virtualenv/tasks/upgrade_wheels.py +++ b/vendor/virtualenv/tasks/upgrade_wheels.py @@ -1,7 +1,6 @@ """ Helper script to rebuild virtualenv_support. Downloads the wheel files using pip """ -from __future__ import absolute_import, unicode_literals import os import shutil @@ -70,31 +69,32 @@ def run(): outcome = (1 if STRICT else 0) if (added or removed) else 0 for key, versions in added.items(): - text = "* upgrade embedded {} to {}".format(key, fmt_version(versions)) + text = f"* upgrade embedded {key} to {fmt_version(versions)}" if key in removed: - text += " from {}".format(removed[key]) + text += f" from {removed[key]}" del removed[key] print(text) for key, versions in removed.items(): - print("* removed embedded {} of {}".format(key, fmt_version(versions))) + print(f"* removed embedded {key} of {fmt_version(versions)}") - support_table = OrderedDict((".".join(str(j) for j in i), list()) for i in SUPPORT) + support_table = OrderedDict((".".join(str(j) for j in i), []) for i in SUPPORT) for package in sorted(new_batch.keys()): for folder, version in sorted(folders.items()): if (folder / package).exists(): support_table[version].append(package) support_table = {k: OrderedDict((i.split("-")[0], i) for i in v) for k, v in support_table.items()} - + bundle = ",".join( + f"{v!r}: {{ {','.join(f'{p!r}: {f!r}' for p, f in l.items())} }}" for v, l in support_table.items() + ) msg = dedent( - """ - from __future__ import absolute_import, unicode_literals + f""" + from pathlib import Path from virtualenv.seed.wheels.util import Wheel - from virtualenv.util.path import Path BUNDLE_FOLDER = Path(__file__).absolute().parent - BUNDLE_SUPPORT = {{ {0} }} - MAX = {1} + BUNDLE_SUPPORT = {{ {bundle} }} + MAX = {repr(next(iter(support_table.keys())))} def get_embed_wheel(distribution, for_py_version): @@ -102,20 +102,14 @@ def get_embed_wheel(distribution, for_py_version): return Wheel.from_path(path) - __all__ = ( + __all__ = [ "get_embed_wheel", "BUNDLE_SUPPORT", "MAX", "BUNDLE_FOLDER", - ) + ] - """.format( - ",".join( - "{!r}: {{ {} }}".format(v, ",".join("{!r}: {!r}".format(p, f) for p, f in l.items())) - for v, l in support_table.items() - ), - repr(next(iter(support_table.keys()))), - ), + """, ) dest_target = DEST / "__init__.py" dest_target.write_text(msg) @@ -126,7 +120,7 @@ def get_embed_wheel(distribution, for_py_version): def fmt_version(versions): - return ", ".join("``{}``".format(v) for v in versions) + return ", ".join(f"``{v}``" for v in versions) def collect_package_versions(new_packages): diff --git a/vendor/virtualenv/tests/conftest.py b/vendor/virtualenv/tests/conftest.py index ad8643b9..68924dc3 100644 --- a/vendor/virtualenv/tests/conftest.py +++ b/vendor/virtualenv/tests/conftest.py @@ -1,23 +1,18 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - import logging import os import shutil import sys from contextlib import contextmanager from functools import partial +from pathlib import Path import pytest -import six from virtualenv.app_data import AppDataDiskFolder from virtualenv.discovery.builtin import get_interpreter from virtualenv.discovery.py_info import PythonInfo -from virtualenv.info import IS_PYPY, IS_WIN, fs_supports_symlink +from virtualenv.info import IS_WIN, fs_supports_symlink from virtualenv.report import LOGGER -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_str, ensure_text def pytest_addoption(parser): @@ -51,7 +46,7 @@ def pytest_collection_modifyitems(config, items): @pytest.fixture(scope="session") -def has_symlink_support(tmp_path_factory): +def has_symlink_support(tmp_path_factory): # noqa: U100 return fs_supports_symlink() @@ -99,9 +94,9 @@ def _link(src, dest): @pytest.fixture(autouse=True) -def ensure_logging_stable(): +def _ensure_logging_stable(): logger_level = LOGGER.level - handlers = [i for i in LOGGER.handlers] + handlers = list(LOGGER.handlers) filelock_logger = logging.getLogger("filelock") fl_level = filelock_logger.level yield @@ -114,16 +109,16 @@ def ensure_logging_stable(): @pytest.fixture(autouse=True) -def check_cwd_not_changed_by_test(): +def _check_cwd_not_changed_by_test(): old = os.getcwd() yield new = os.getcwd() if old != new: - pytest.fail("tests changed cwd: {!r} => {!r}".format(old, new)) + pytest.fail(f"tests changed cwd: {old!r} => {new!r}") @pytest.fixture(autouse=True) -def ensure_py_info_cache_empty(session_app_data): +def _ensure_py_info_cache_empty(session_app_data): PythonInfo.clear_cache(session_app_data) yield PythonInfo.clear_cache(session_app_data) @@ -142,16 +137,16 @@ def change_os_environ(key, value): @pytest.fixture(autouse=True, scope="session") -def ignore_global_config(tmp_path_factory): +def _ignore_global_config(tmp_path_factory): filename = str(tmp_path_factory.mktemp("folder") / "virtualenv-test-suite.ini") - with change_os_environ(ensure_str("VIRTUALENV_CONFIG_FILE"), filename): + with change_os_environ("VIRTUALENV_CONFIG_FILE", filename): yield @pytest.fixture(autouse=True, scope="session") -def pip_cert(tmp_path_factory): +def _pip_cert(tmp_path_factory): # workaround for https://github.com/pypa/pip/issues/8984 - if the certificate is explicitly set no error can happen - key = ensure_str("PIP_CERT") + key = "PIP_CERT" if key in os.environ: yield else: @@ -165,13 +160,11 @@ def pip_cert(tmp_path_factory): @pytest.fixture(autouse=True) -def check_os_environ_stable(): +def _check_os_environ_stable(): old = os.environ.copy() # ensure we don't inherit parent env variables to_clean = { - k - for k in os.environ.keys() - if k.startswith(str("VIRTUALENV_")) or str("VIRTUAL_ENV") in k or k.startswith(str("TOX_")) + k for k in os.environ.keys() if k.startswith("VIRTUALENV_") or "VIRTUAL_ENV" in k or k.startswith("TOX_") } cleaned = {k: os.environ[k] for k, v in os.environ.items()} override = { @@ -195,18 +188,18 @@ def check_os_environ_stable(): extra = {k: new[k] for k in set(new) - set(old)} miss = {k: old[k] for k in set(old) - set(new) - to_clean} diff = { - "{} = {} vs {}".format(k, old[k], new[k]) + f"{k} = {old[k]} vs {new[k]}" for k in set(old) & set(new) - if old[k] != new[k] and not k.startswith(str("PYTEST_")) + if old[k] != new[k] and not k.startswith("PYTEST_") } if extra or miss or diff: msg = "tests changed environ" if extra: - msg += " extra {}".format(extra) + msg += f" extra {extra}" if miss: - msg += " miss {}".format(miss) + msg += f" miss {miss}" if diff: - msg += " diff {}".format(diff) + msg += f" diff {diff}" pytest.fail(msg) finally: os.environ.update(cleaned) @@ -221,7 +214,7 @@ def coverage_env(monkeypatch, link, request): """ Enable coverage report collection on the created virtual environments by injecting the coverage project """ - if COVERAGE_RUN and "no_coverage" not in request.fixturenames: + if COVERAGE_RUN and "_no_coverage" not in request.fixturenames: # we inject right after creation, we cannot collect coverage on site.py - used for helper scripts, such as debug from virtualenv import run @@ -259,16 +252,16 @@ def finish(): yield finish -# no_coverage tells coverage_env to disable coverage injection for no_coverage user. -@pytest.fixture -def no_coverage(): +# _no_coverage tells coverage_env to disable coverage injection for _no_coverage user. +@pytest.fixture() +def _no_coverage(): pass if COVERAGE_RUN: import coverage - class EnableCoverage(object): + class EnableCoverage: _COV_FILE = Path(coverage.__file__) _ROOT_COV_FILES_AND_FOLDERS = [i for i in _COV_FILE.parents[1].iterdir() if i.name.startswith("coverage")] @@ -285,7 +278,7 @@ def __enter__(self, creator): self.targets.append((target, clean)) return self - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): # noqa: U100 for target, clean in self.targets: if target.exists(): clean() @@ -294,7 +287,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): @pytest.fixture(scope="session") def is_inside_ci(): - yield bool(os.environ.get(str("CI_RUN"))) + return bool(os.environ.get("CI_RUN")) @pytest.fixture(scope="session") @@ -318,9 +311,7 @@ def special_char_name(): @pytest.fixture() def special_name_dir(tmp_path, special_char_name): dest = Path(str(tmp_path)) / special_char_name - yield dest - if six.PY2 and sys.platform == "win32" and not IS_PYPY: # pytest python2 windows does not support unicode delete - shutil.rmtree(ensure_text(str(dest))) + return dest @pytest.fixture(scope="session") @@ -337,7 +328,7 @@ def current_fastest(current_creators): def session_app_data(tmp_path_factory): temp_folder = tmp_path_factory.mktemp("session-app-data") app_data = AppDataDiskFolder(folder=str(temp_folder)) - with change_env_var(str("VIRTUALENV_OVERRIDE_APP_DATA"), str(app_data.lock.path)): + with change_env_var("VIRTUALENV_OVERRIDE_APP_DATA", str(app_data.lock.path)): yield app_data @@ -362,7 +353,7 @@ def change_env_var(key, value): @pytest.fixture() def temp_app_data(monkeypatch, tmp_path): app_data = tmp_path / "app-data" - monkeypatch.setenv(str("VIRTUALENV_OVERRIDE_APP_DATA"), str(app_data)) + monkeypatch.setenv("VIRTUALENV_OVERRIDE_APP_DATA", str(app_data)) return app_data @@ -371,20 +362,20 @@ def cross_python(is_inside_ci, session_app_data): spec = str(2 if sys.version_info[0] == 3 else 3) interpreter = get_interpreter(spec, [], session_app_data) if interpreter is None: - msg = "could not find {}".format(spec) + msg = f"could not find {spec}" if is_inside_ci: raise RuntimeError(msg) pytest.skip(msg=msg) - yield interpreter + return interpreter @pytest.fixture(scope="session") def for_py_version(): - return "{}.{}".format(*sys.version_info[0:2]) + return f"{sys.version_info.major}.{sys.version_info.minor}" @pytest.fixture() -def skip_if_test_in_system(session_app_data): +def _skip_if_test_in_system(session_app_data): current = PythonInfo.current(session_app_data) if current.system_executable is not None: pytest.skip("test not valid if run under system") diff --git a/vendor/virtualenv/tests/integration/test_run_int.py b/vendor/virtualenv/tests/integration/test_run_int.py index 632d6259..d41dc1ef 100644 --- a/vendor/virtualenv/tests/integration/test_run_int.py +++ b/vendor/virtualenv/tests/integration/test_run_int.py @@ -1,19 +1,16 @@ -from __future__ import absolute_import, unicode_literals - import sys import pytest from virtualenv import cli_run from virtualenv.info import IS_PYPY -from virtualenv.util.six import ensure_text from virtualenv.util.subprocess import run_cmd -@pytest.mark.skipif(IS_PYPY, reason="setuptools distutil1s patching does not work") +@pytest.mark.skipif(IS_PYPY, reason="setuptools distutils patching does not work") def test_app_data_pinning(tmp_path): version = "19.1.1" if sys.version_info[0:2] == (3, 4) else "19.3.1" - result = cli_run([ensure_text(str(tmp_path)), "--pip", version, "--activators", "", "--seeder", "app-data"]) + result = cli_run([str(tmp_path), "--pip", version, "--activators", "", "--seeder", "app-data"]) code, out, err = run_cmd([str(result.creator.script("pip")), "list", "--disable-pip-version-check"]) assert not code assert not err diff --git a/vendor/virtualenv/tests/integration/test_zipapp.py b/vendor/virtualenv/tests/integration/test_zipapp.py index e5198849..4e2f2892 100644 --- a/vendor/virtualenv/tests/integration/test_zipapp.py +++ b/vendor/virtualenv/tests/integration/test_zipapp.py @@ -1,16 +1,13 @@ -from __future__ import absolute_import, unicode_literals - import shutil import subprocess import sys +from pathlib import Path import pytest from flaky import flaky from virtualenv.discovery.py_info import PythonInfo from virtualenv.run import cli_run -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text HERE = Path(__file__).parent CURRENT = PythonInfo.current_system() @@ -33,7 +30,7 @@ def zipapp_build_env(tmp_path_factory): [ "-vvv", "-p", - "{}3.{}".format(impl, version), + f"{impl}3.{version}", "--activators", "", str(create_env_path), @@ -60,7 +57,7 @@ def zipapp_build_env(tmp_path_factory): @pytest.fixture(scope="session") def zipapp(zipapp_build_env, tmp_path_factory): into = tmp_path_factory.mktemp("zipapp") - path = Path(HERE).parent.parent / "tasks" / "make_zipapp.py" + path = HERE.parent.parent / "tasks" / "make_zipapp.py" filename = into / "virtualenv.pyz" cmd = [zipapp_build_env, str(path), "--dest", str(filename)] subprocess.check_call(cmd) @@ -77,9 +74,9 @@ def zipapp_test_env(tmp_path_factory): @pytest.fixture() -def call_zipapp(zipapp, monkeypatch, tmp_path, zipapp_test_env, temp_app_data): +def call_zipapp(zipapp, tmp_path, zipapp_test_env, temp_app_data): # noqa: U100 def _run(*args): - cmd = [str(zipapp_test_env), str(zipapp), "-vv", ensure_text(str(tmp_path / "env"))] + list(args) + cmd = [str(zipapp_test_env), str(zipapp), "-vv", str(tmp_path / "env")] + list(args) subprocess.check_call(cmd) return _run diff --git a/vendor/virtualenv/tests/unit/activation/conftest.py b/vendor/virtualenv/tests/unit/activation/conftest.py index 6f2dd431..d9e76d0c 100644 --- a/vendor/virtualenv/tests/unit/activation/conftest.py +++ b/vendor/virtualenv/tests/unit/activation/conftest.py @@ -1,24 +1,18 @@ -from __future__ import absolute_import, unicode_literals - import os -import pipes import re -import shutil import subprocess import sys from os.path import dirname, normcase +from pathlib import Path +from shlex import quote +from subprocess import Popen import pytest -import six -from virtualenv.info import IS_PYPY, WIN_CPYTHON_2 from virtualenv.run import cli_run -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_str, ensure_text -from virtualenv.util.subprocess import Popen -class ActivationTester(object): +class ActivationTester: def __init__(self, of_class, session, cmd, activate_script, extension): self.of_class = of_class self._creator = session.creator @@ -51,19 +45,14 @@ def get_version(self, raise_on_fail): self._version = exception if raise_on_fail: raise - return RuntimeError("{} is not available due {}".format(self, exception)) + return RuntimeError(f"{self} is not available due {exception}") return self._version - def __unicode__(self): - return "{}(\nversion={!r},\ncreator={},\ninterpreter={})".format( - self.__class__.__name__, - self._version, - six.text_type(self._creator), - six.text_type(self._creator.interpreter), - ) - def __repr__(self): - return ensure_str(self.__unicode__()) + return ( + f"{self.__class__.__name__}(\nversion={self._version!r},\ncreator={self._creator},\n" + f"interpreter={self._creator.interpreter})" + ) def __call__(self, monkeypatch, tmp_path): activate_script = self._creator.bin_dir / self.activate_script @@ -71,25 +60,24 @@ def __call__(self, monkeypatch, tmp_path): # check line endings are correct type script_content = activate_script.read_bytes() for line in script_content.split(b"\n")[:-1]: - cr = b"\r" if sys.version_info.major == 2 else 13 if self.unix_line_ending: - assert line == b"" or line[-1] != cr, script_content.decode("utf-8") + assert line == b"" or line[-1] != 13, script_content.decode("utf-8") else: - assert line[-1] == cr, script_content.decode("utf-8") + assert line[-1] == 13, script_content.decode("utf-8") test_script = self._generate_test_script(activate_script, tmp_path) - monkeypatch.chdir(ensure_text(str(tmp_path))) + monkeypatch.chdir(tmp_path) - monkeypatch.delenv(str("VIRTUAL_ENV"), raising=False) - invoke, env = self._invoke_script + [ensure_text(str(test_script))], self.env(tmp_path) + monkeypatch.delenv("VIRTUAL_ENV", raising=False) + invoke, env = self._invoke_script + [str(test_script)], self.env(tmp_path) try: process = Popen(invoke, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) - _raw, _ = process.communicate() - raw = _raw.decode("utf-8") + raw_, _ = process.communicate() + raw = raw_.decode() except subprocess.CalledProcessError as exception: - output = ensure_text((exception.output + exception.stderr) if six.PY3 else exception.output) - assert not exception.returncode, output + output = exception.output + exception.stderr + assert not exception.returncode, output # noqa: PT017 return out = re.sub(r"pydev debugger: process \d+ is connecting\n\n", "", raw, re.M).strip().splitlines() @@ -100,22 +88,22 @@ def non_source_activate(self, activate_script): return self._invoke_script + [str(activate_script)] # noinspection PyMethodMayBeStatic - def env(self, tmp_path): + def env(self, tmp_path): # noqa: U100 env = os.environ.copy() # add the current python executable folder to the path so we already have another python on the path # also keep the path so the shells (fish, bash, etc can be discovered) - env[str("PYTHONIOENCODING")] = str("utf-8") - env[str("PATH")] = os.pathsep.join([dirname(sys.executable)] + env.get(str("PATH"), str("")).split(os.pathsep)) + env["PYTHONIOENCODING"] = "utf-8" + env["PATH"] = os.pathsep.join([dirname(sys.executable)] + env.get("PATH", "").split(os.pathsep)) # clear up some environment variables so they don't affect the tests - for key in [k for k in env.keys() if k.startswith(str("_OLD")) or k.startswith(str("VIRTUALENV_"))]: + for key in [k for k in env.keys() if k.startswith("_OLD") or k.startswith("VIRTUALENV_")]: del env[key] return env def _generate_test_script(self, activate_script, tmp_path): commands = self._get_test_lines(activate_script) - script = ensure_text(os.linesep).join(commands) - test_script = tmp_path / "script.{}".format(self.extension) - with open(ensure_text(str(test_script)), "wb") as file_handler: + script = os.linesep.join(commands) + test_script = tmp_path / f"script.{self.extension}" + with test_script.open("wb") as file_handler: file_handler.write(script.encode(self.script_encoding)) return test_script @@ -146,7 +134,7 @@ def assert_output(self, out, raw, tmp_path): assert self.norm_path(out[3]) == self.norm_path(self._creator.dest).replace("\\\\", "\\"), raw # Some attempts to test the prompt output print more than 1 line. # So we need to check if the prompt exists on any of them. - prompt_text = "({}) ".format(self._creator.env_name) + prompt_text = f"({self._creator.env_name}) " assert any(prompt_text in line for line in out[4:-3]), raw assert out[-3] == "wrote pydoc_test.html", raw @@ -157,41 +145,32 @@ def assert_output(self, out, raw, tmp_path): assert out[-1] == "None", raw def quote(self, s): - return pipes.quote(s) + return quote(s) def python_cmd(self, cmd): - return "{} -c {}".format(os.path.basename(sys.executable), self.quote(cmd)) + return f"{os.path.basename(sys.executable)} -c {self.quote(cmd)}" def print_python_exe(self): - return self.python_cmd( - "import sys; print(sys.executable{})".format( - "" if six.PY3 or IS_PYPY else ".decode(sys.getfilesystemencoding())", - ), - ) + return self.python_cmd("import sys; print(sys.executable)") def print_os_env_var(self, var): - val = '"{}"'.format(var) - return self.python_cmd( - "import os; import sys; v = os.environ.get({}); print({})".format( - val, - "v" if six.PY3 or IS_PYPY else "None if v is None else v.decode(sys.getfilesystemencoding())", - ), - ) + val = f'"{var}"' + return self.python_cmd(f"import os; import sys; v = os.environ.get({val}); print(v)") def print_prompt(self): return NotImplemented def activate_call(self, script): - cmd = self.quote(ensure_text(str(self.activate_cmd))) - scr = self.quote(ensure_text(str(script))) - return "{} {}".format(cmd, scr).strip() + cmd = self.quote(str(self.activate_cmd)) + scr = self.quote(str(script)) + return f"{cmd} {scr}".strip() @staticmethod def norm_path(path): # python may return Windows short paths, normalize if not isinstance(path, Path): path = Path(path) - path = ensure_text(str(path.resolve())) + path = str(path.resolve()) if sys.platform != "win32": result = path else: @@ -199,18 +178,18 @@ def norm_path(path): buffer_cont = create_unicode_buffer(256) get_long_path_name = windll.kernel32.GetLongPathNameW - get_long_path_name(six.text_type(path), buffer_cont, 256) + get_long_path_name(str(path), buffer_cont, 256) result = buffer_cont.value or path return normcase(result) class RaiseOnNonSourceCall(ActivationTester): def __init__(self, of_class, session, cmd, activate_script, extension, non_source_fail_message): - super(RaiseOnNonSourceCall, self).__init__(of_class, session, cmd, activate_script, extension) + super().__init__(of_class, session, cmd, activate_script, extension) self.non_source_fail_message = non_source_fail_message def __call__(self, monkeypatch, tmp_path): - env, activate_script = super(RaiseOnNonSourceCall, self).__call__(monkeypatch, tmp_path) + env, activate_script = super().__call__(monkeypatch, tmp_path) process = Popen( self.non_source_activate(activate_script), stdout=subprocess.PIPE, @@ -235,16 +214,14 @@ def raise_on_non_source_class(): @pytest.fixture(scope="session", params=[True, False], ids=["with_prompt", "no_prompt"]) def activation_python(request, tmp_path_factory, special_char_name, current_fastest): - dest = os.path.join(ensure_text(str(tmp_path_factory.mktemp("activation-tester-env"))), special_char_name) + dest = os.path.join(str(tmp_path_factory.mktemp("activation-tester-env")), special_char_name) cmd = ["--without-pip", dest, "--creator", current_fastest, "-vv", "--no-periodic-update"] if request.param: cmd += ["--prompt", special_char_name] session = cli_run(cmd) pydoc_test = session.creator.purelib / "pydoc_test.py" pydoc_test.write_text('"""This is pydoc_test.py"""') - yield session - if WIN_CPYTHON_2: # PY2 windows does not support unicode delete - shutil.rmtree(dest) + return session @pytest.fixture() @@ -252,10 +229,10 @@ def activation_tester(activation_python, monkeypatch, tmp_path, is_inside_ci): def _tester(tester_class): tester = tester_class(activation_python) if not tester.of_class.supports(activation_python.creator.interpreter): - pytest.skip("{} not supported".format(tester.of_class.__name__)) + pytest.skip(f"{tester.of_class.__name__} not supported") version = tester.get_version(raise_on_fail=is_inside_ci) - if not isinstance(version, six.string_types): - pytest.skip(msg=six.text_type(version)) + if not isinstance(version, str): + pytest.skip(msg=str(version)) return tester(monkeypatch, tmp_path) return _tester diff --git a/vendor/virtualenv/tests/unit/activation/test_activation_support.py b/vendor/virtualenv/tests/unit/activation/test_activation_support.py index d493c23a..4bca3638 100644 --- a/vendor/virtualenv/tests/unit/activation/test_activation_support.py +++ b/vendor/virtualenv/tests/unit/activation/test_activation_support.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from argparse import Namespace import pytest diff --git a/vendor/virtualenv/tests/unit/activation/test_activator.py b/vendor/virtualenv/tests/unit/activation/test_activator.py index 4a8a51c6..c8e973af 100644 --- a/vendor/virtualenv/tests/unit/activation/test_activator.py +++ b/vendor/virtualenv/tests/unit/activation/test_activator.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - from argparse import Namespace from virtualenv.activation.activator import Activator @@ -7,7 +5,7 @@ def test_activator_prompt_cwd(monkeypatch, tmp_path): class FakeActivator(Activator): - def generate(self, creator): + def generate(self, creator): # noqa: U100 raise NotImplementedError cwd = tmp_path / "magic" diff --git a/vendor/virtualenv/tests/unit/activation/test_bash.py b/vendor/virtualenv/tests/unit/activation/test_bash.py index 612ad378..ae568965 100644 --- a/vendor/virtualenv/tests/unit/activation/test_bash.py +++ b/vendor/virtualenv/tests/unit/activation/test_bash.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import pytest from virtualenv.activation import BashActivator @@ -10,7 +8,7 @@ def test_bash(raise_on_non_source_class, activation_tester): class Bash(raise_on_non_source_class): def __init__(self, session): - super(Bash, self).__init__( + super().__init__( BashActivator, session, "bash", diff --git a/vendor/virtualenv/tests/unit/activation/test_batch.py b/vendor/virtualenv/tests/unit/activation/test_batch.py index 973f0bad..1d951b22 100644 --- a/vendor/virtualenv/tests/unit/activation/test_batch.py +++ b/vendor/virtualenv/tests/unit/activation/test_batch.py @@ -1,31 +1,32 @@ -from __future__ import absolute_import, unicode_literals +from shlex import quote -import pipes +import pytest from virtualenv.activation import BatchActivator -def test_batch(activation_tester_class, activation_tester, tmp_path, activation_python): +@pytest.mark.usefixtures("activation_python") +def test_batch(activation_tester_class, activation_tester, tmp_path): version_script = tmp_path / "version.bat" version_script.write_text("ver") class Batch(activation_tester_class): def __init__(self, session): - super(Batch, self).__init__(BatchActivator, session, None, "activate.bat", "bat") + super().__init__(BatchActivator, session, None, "activate.bat", "bat") self._version_cmd = [str(version_script)] self._invoke_script = [] self.deactivate = "call deactivate" self.activate_cmd = "call" - self.pydoc_call = "call {}".format(self.pydoc_call) + self.pydoc_call = f"call {self.pydoc_call}" self.unix_line_ending = False def _get_test_lines(self, activate_script): # for BATCH utf-8 support need change the character code page to 650001 - return ["@echo off", "", "chcp 65001 1>NUL"] + super(Batch, self)._get_test_lines(activate_script) + return ["@echo off", "", "chcp 65001 1>NUL"] + super()._get_test_lines(activate_script) def quote(self, s): """double quotes needs to be single, and single need to be double""" - return "".join(("'" if c == '"' else ('"' if c == "'" else c)) for c in pipes.quote(s)) + return "".join(("'" if c == '"' else ('"' if c == "'" else c)) for c in quote(s)) def print_prompt(self): return "echo %PROMPT%" diff --git a/vendor/virtualenv/tests/unit/activation/test_csh.py b/vendor/virtualenv/tests/unit/activation/test_csh.py index 1fa5146d..f9539c5a 100644 --- a/vendor/virtualenv/tests/unit/activation/test_csh.py +++ b/vendor/virtualenv/tests/unit/activation/test_csh.py @@ -1,12 +1,10 @@ -from __future__ import absolute_import, unicode_literals - from virtualenv.activation import CShellActivator def test_csh(activation_tester_class, activation_tester): class Csh(activation_tester_class): def __init__(self, session): - super(Csh, self).__init__(CShellActivator, session, "csh", "activate.csh", "csh") + super().__init__(CShellActivator, session, "csh", "activate.csh", "csh") def print_prompt(self): return "echo 'source \"$VIRTUAL_ENV/bin/activate.csh\"; echo $prompt' | csh -i" diff --git a/vendor/virtualenv/tests/unit/activation/test_fish.py b/vendor/virtualenv/tests/unit/activation/test_fish.py index 7b229e01..450b89c5 100644 --- a/vendor/virtualenv/tests/unit/activation/test_fish.py +++ b/vendor/virtualenv/tests/unit/activation/test_fish.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import pytest from virtualenv.activation import FishActivator @@ -8,14 +6,14 @@ @pytest.mark.skipif(IS_WIN, reason="we have not setup fish in CI yet") def test_fish(activation_tester_class, activation_tester, monkeypatch, tmp_path): - monkeypatch.setenv(str("HOME"), str(tmp_path)) + monkeypatch.setenv("HOME", str(tmp_path)) fish_conf_dir = tmp_path / ".config" / "fish" fish_conf_dir.mkdir(parents=True) (fish_conf_dir / "config.fish").write_text("") class Fish(activation_tester_class): def __init__(self, session): - super(Fish, self).__init__(FishActivator, session, "fish", "activate.fish", "fish") + super().__init__(FishActivator, session, "fish", "activate.fish", "fish") def print_prompt(self): return "fish_prompt" diff --git a/vendor/virtualenv/tests/unit/activation/test_nushell.py b/vendor/virtualenv/tests/unit/activation/test_nushell.py index 65c4028b..a778d497 100644 --- a/vendor/virtualenv/tests/unit/activation/test_nushell.py +++ b/vendor/virtualenv/tests/unit/activation/test_nushell.py @@ -1,12 +1,4 @@ -from __future__ import absolute_import, unicode_literals - -import sys - -if sys.version_info > (3,): - from shutil import which -else: - from distutils.spawn import find_executable as which - +from shutil import which from virtualenv.activation import NushellActivator from virtualenv.info import IS_WIN @@ -19,7 +11,7 @@ def __init__(self, session): if cmd is None and IS_WIN: cmd = "c:\\program files\\nu\\bin\\nu.exe" - super(Nushell, self).__init__(NushellActivator, session, cmd, "activate.nu", "nu") + super().__init__(NushellActivator, session, cmd, "activate.nu", "nu") self.unix_line_ending = not IS_WIN diff --git a/vendor/virtualenv/tests/unit/activation/test_powershell.py b/vendor/virtualenv/tests/unit/activation/test_powershell.py index f3705cda..761237f9 100644 --- a/vendor/virtualenv/tests/unit/activation/test_powershell.py +++ b/vendor/virtualenv/tests/unit/activation/test_powershell.py @@ -1,33 +1,31 @@ -from __future__ import absolute_import, unicode_literals - -import pipes import sys +from shlex import quote import pytest from virtualenv.activation import PowerShellActivator -@pytest.mark.slow +@pytest.mark.slow() def test_powershell(activation_tester_class, activation_tester, monkeypatch): monkeypatch.setenv("TERM", "xterm") class PowerShell(activation_tester_class): def __init__(self, session): cmd = "powershell.exe" if sys.platform == "win32" else "pwsh" - super(PowerShell, self).__init__(PowerShellActivator, session, cmd, "activate.ps1", "ps1") + super().__init__(PowerShellActivator, session, cmd, "activate.ps1", "ps1") self._version_cmd = [cmd, "-c", "$PSVersionTable"] self._invoke_script = [cmd, "-ExecutionPolicy", "ByPass", "-File"] self.activate_cmd = "." self.script_encoding = "utf-16" def quote(self, s): - """powershell double double quote needed for quotes within single quotes""" - return pipes.quote(s).replace('"', '""') + """powershell double quote needed for quotes within single quotes""" + return quote(s).replace('"', '""') def _get_test_lines(self, activate_script): # for BATCH utf-8 support need change the character code page to 650001 - return super(PowerShell, self)._get_test_lines(activate_script) + return super()._get_test_lines(activate_script) def invoke_script(self): return [self.cmd, "-File"] diff --git a/vendor/virtualenv/tests/unit/activation/test_python_activator.py b/vendor/virtualenv/tests/unit/activation/test_python_activator.py index 0b42e114..a22b3607 100644 --- a/vendor/virtualenv/tests/unit/activation/test_python_activator.py +++ b/vendor/virtualenv/tests/unit/activation/test_python_activator.py @@ -1,19 +1,16 @@ -from __future__ import absolute_import, unicode_literals - import os import sys from ast import literal_eval from textwrap import dedent from virtualenv.activation import PythonActivator -from virtualenv.info import IS_WIN, WIN_CPYTHON_2 -from virtualenv.util.six import ensure_text +from virtualenv.info import IS_WIN def test_python(raise_on_non_source_class, activation_tester): class Python(raise_on_non_source_class): def __init__(self, session): - super(Python, self).__init__( + super().__init__( PythonActivator, session, sys.executable, @@ -25,15 +22,15 @@ def __init__(self, session): def env(self, tmp_path): env = os.environ.copy() - env[str("PYTHONIOENCODING")] = str("utf-8") + env["PYTHONIOENCODING"] = "utf-8" for key in {"VIRTUAL_ENV", "PYTHONPATH"}: env.pop(str(key), None) - env[str("PATH")] = os.pathsep.join([str(tmp_path), str(tmp_path / "other")]) + env["PATH"] = os.pathsep.join([str(tmp_path), str(tmp_path / "other")]) return env @staticmethod def _get_test_lines(activate_script): - raw = """ + raw = f""" import os import sys import platform @@ -45,9 +42,9 @@ def print_r(value): print_r(os.environ.get("PATH").split(os.pathsep)) print_r(sys.path) - file_at = {!r} + file_at = {str(activate_script)!r} # CPython 2 requires non-ascii path open to be unicode - with open(file_at{}, "r") as file_handler: + with open(file_at, "r") as file_handler: content = file_handler.read() exec(content, {{"__file__": file_at}}) @@ -57,14 +54,11 @@ def print_r(value): import pydoc_test print_r(pydoc_test.__file__) - """.format( - str(activate_script), - ".decode('utf-8')" if WIN_CPYTHON_2 else "", - ) + """ result = dedent(raw).splitlines() return result - def assert_output(self, out, raw, tmp_path): + def assert_output(self, out, raw, tmp_path): # noqa: U100 out = [literal_eval(i) for i in out] assert out[0] is None # start with VIRTUAL_ENV None @@ -78,23 +72,18 @@ def assert_output(self, out, raw, tmp_path): # sys path contains the site package at its start new_sys_path = out[5] - new_lib_paths = {ensure_text(j) if WIN_CPYTHON_2 else j for j in {str(i) for i in self._creator.libs}} + new_lib_paths = {str(i) for i in self._creator.libs} assert prev_sys_path == new_sys_path[len(new_lib_paths) :] assert new_lib_paths == set(new_sys_path[: len(new_lib_paths)]) # manage to import from activate site package dest = self.norm_path(self._creator.purelib / "pydoc_test.py") - found = self.norm_path(out[6].decode(sys.getfilesystemencoding()) if WIN_CPYTHON_2 else out[6]) + found = self.norm_path(out[6]) assert found.startswith(dest) def non_source_activate(self, activate_script): act = str(activate_script) - if WIN_CPYTHON_2: - act = ensure_text(act) - cmd = self._invoke_script + [ - "-c", - "exec(open({}).read())".format(repr(act)), - ] + cmd = self._invoke_script + ["-c", f"exec(open({act!r}).read())"] return cmd activation_tester(Python) diff --git a/vendor/virtualenv/tests/unit/config/cli/test_parser.py b/vendor/virtualenv/tests/unit/config/cli/test_parser.py index a2cd4e0d..a12fb3f3 100644 --- a/vendor/virtualenv/tests/unit/config/cli/test_parser.py +++ b/vendor/virtualenv/tests/unit/config/cli/test_parser.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import os from contextlib import contextmanager @@ -12,7 +10,7 @@ @pytest.fixture() def gen_parser_no_conf_env(monkeypatch, tmp_path): - keys_to_delete = {key for key in os.environ if key.startswith(str("VIRTUALENV_"))} + keys_to_delete = {key for key in os.environ if key.startswith("VIRTUALENV_")} for key in keys_to_delete: monkeypatch.delenv(key) monkeypatch.setenv(IniConfig.VIRTUALENV_CONFIG_FILE_ENV_VAR, str(tmp_path / "missing")) diff --git a/vendor/virtualenv/tests/unit/config/test___main__.py b/vendor/virtualenv/tests/unit/config/test___main__.py index b974712c..5496f2d2 100644 --- a/vendor/virtualenv/tests/unit/config/test___main__.py +++ b/vendor/virtualenv/tests/unit/config/test___main__.py @@ -1,17 +1,15 @@ -from __future__ import absolute_import, unicode_literals - import re import sys +from subprocess import PIPE, Popen, check_output import pytest from virtualenv.__main__ import run_with_catch from virtualenv.util.error import ProcessCallFailed -from virtualenv.util.subprocess import Popen, subprocess def test_main(): - process = Popen([sys.executable, "-m", "virtualenv", "--help"], universal_newlines=True, stdout=subprocess.PIPE) + process = Popen([sys.executable, "-m", "virtualenv", "--help"], universal_newlines=True, stdout=PIPE) out, _ = process.communicate() assert not process.returncode assert out @@ -39,7 +37,7 @@ def test_fail_no_traceback(raise_on_session_done, tmp_path, capsys): run_with_catch([str(tmp_path)]) assert context.value.code == 2 out, err = capsys.readouterr() - assert out == "subprocess call failed for [{}] with code 2\nout\nSystemExit: 2\n".format(repr("something")) + assert out == f"subprocess call failed for [{'something'!r}] with code 2\nout\nSystemExit: 2\n" assert err == "err\n" @@ -53,7 +51,8 @@ def test_fail_with_traceback(raise_on_session_done, tmp_path, capsys): assert err == "" -def test_session_report_full(session_app_data, tmp_path, capsys): +@pytest.mark.usefixtures("session_app_data") +def test_session_report_full(tmp_path, capsys): run_with_catch([str(tmp_path)]) out, err = capsys.readouterr() assert err == "" @@ -70,11 +69,12 @@ def test_session_report_full(session_app_data, tmp_path, capsys): def _match_regexes(lines, regexes): for line, regex in zip(lines, regexes): - comp_regex = re.compile(r"^{}$".format(regex)) + comp_regex = re.compile(rf"^{regex}$") assert comp_regex.match(line), line -def test_session_report_minimal(session_app_data, tmp_path, capsys): +@pytest.mark.usefixtures("session_app_data") +def test_session_report_minimal(tmp_path, capsys): run_with_catch([str(tmp_path), "--activators", "", "--without-pip"]) out, err = capsys.readouterr() assert err == "" @@ -86,9 +86,10 @@ def test_session_report_minimal(session_app_data, tmp_path, capsys): _match_regexes(lines, regexes) -def test_session_report_subprocess(session_app_data, tmp_path): +@pytest.mark.usefixtures("session_app_data") +def test_session_report_subprocess(tmp_path): # when called via a subprocess the logging framework should flush and POSIX line normalization happen - out = subprocess.check_output( + out = check_output( [sys.executable, "-m", "virtualenv", str(tmp_path), "--activators", "powershell", "--without-pip"], universal_newlines=True, ) diff --git a/vendor/virtualenv/tests/unit/config/test_env_var.py b/vendor/virtualenv/tests/unit/config/test_env_var.py index 34b216f4..0ba69e24 100644 --- a/vendor/virtualenv/tests/unit/config/test_env_var.py +++ b/vendor/virtualenv/tests/unit/config/test_env_var.py @@ -1,30 +1,30 @@ -from __future__ import absolute_import, unicode_literals - import os +from pathlib import Path import pytest from virtualenv.config.cli.parser import VirtualEnvOptions from virtualenv.config.ini import IniConfig from virtualenv.run import session_via_cli -from virtualenv.util.path import Path @pytest.fixture() -def empty_conf(tmp_path, monkeypatch): +def _empty_conf(tmp_path, monkeypatch): conf = tmp_path / "conf.ini" monkeypatch.setenv(IniConfig.VIRTUALENV_CONFIG_FILE_ENV_VAR, str(conf)) conf.write_text("[virtualenv]") -def test_value_ok(monkeypatch, empty_conf): - monkeypatch.setenv(str("VIRTUALENV_VERBOSE"), str("5")) +@pytest.mark.usefixtures("_empty_conf") +def test_value_ok(monkeypatch): + monkeypatch.setenv("VIRTUALENV_VERBOSE", "5") result = session_via_cli(["venv"]) assert result.verbosity == 5 -def test_value_bad(monkeypatch, caplog, empty_conf): - monkeypatch.setenv(str("VIRTUALENV_VERBOSE"), str("a")) +@pytest.mark.usefixtures("_empty_conf") +def test_value_bad(monkeypatch, caplog): + monkeypatch.setenv("VIRTUALENV_VERBOSE", "a") result = session_via_cli(["venv"]) assert result.verbosity == 2 assert len(caplog.messages) == 1 @@ -34,36 +34,36 @@ def test_value_bad(monkeypatch, caplog, empty_conf): def test_python_via_env_var(monkeypatch): options = VirtualEnvOptions() - monkeypatch.setenv(str("VIRTUALENV_PYTHON"), str("python3")) + monkeypatch.setenv("VIRTUALENV_PYTHON", "python3") session_via_cli(["venv"], options=options) assert options.python == ["python3"] def test_python_multi_value_via_env_var(monkeypatch): options = VirtualEnvOptions() - monkeypatch.setenv(str("VIRTUALENV_PYTHON"), str("python3,python2")) + monkeypatch.setenv("VIRTUALENV_PYTHON", "python3,python2") session_via_cli(["venv"], options=options) assert options.python == ["python3", "python2"] def test_python_multi_value_newline_via_env_var(monkeypatch): options = VirtualEnvOptions() - monkeypatch.setenv(str("VIRTUALENV_PYTHON"), str("python3\npython2")) + monkeypatch.setenv("VIRTUALENV_PYTHON", "python3\npython2") session_via_cli(["venv"], options=options) assert options.python == ["python3", "python2"] def test_python_multi_value_prefer_newline_via_env_var(monkeypatch): options = VirtualEnvOptions() - monkeypatch.setenv(str("VIRTUALENV_PYTHON"), str("python3\npython2,python27")) + monkeypatch.setenv("VIRTUALENV_PYTHON", "python3\npython2,python27") session_via_cli(["venv"], options=options) assert options.python == ["python3", "python2,python27"] def test_extra_search_dir_via_env_var(tmp_path, monkeypatch): monkeypatch.chdir(tmp_path) - value = "a{}0{}b{}c".format(os.linesep, os.linesep, os.pathsep) - monkeypatch.setenv(str("VIRTUALENV_EXTRA_SEARCH_DIR"), str(value)) + value = f"a{os.linesep}0{os.linesep}b{os.pathsep}c" + monkeypatch.setenv("VIRTUALENV_EXTRA_SEARCH_DIR", str(value)) (tmp_path / "a").mkdir() (tmp_path / "b").mkdir() (tmp_path / "c").mkdir() @@ -71,7 +71,8 @@ def test_extra_search_dir_via_env_var(tmp_path, monkeypatch): assert result.seeder.extra_search_dir == [Path("a").resolve(), Path("b").resolve(), Path("c").resolve()] -def test_value_alias(monkeypatch, mocker, empty_conf): +@pytest.mark.usefixtures("_empty_conf") +def test_value_alias(monkeypatch, mocker): from virtualenv.config.cli.parser import VirtualEnvConfigParser prev = VirtualEnvConfigParser._fix_default @@ -85,8 +86,8 @@ def func(self, action): mocker.patch("virtualenv.run.VirtualEnvConfigParser._fix_default", side_effect=func, autospec=True) - monkeypatch.delenv(str("SYMLINKS"), raising=False) - monkeypatch.delenv(str("VIRTUALENV_COPIES"), raising=False) - monkeypatch.setenv(str("VIRTUALENV_ALWAYS_COPY"), str("1")) + monkeypatch.delenv("SYMLINKS", raising=False) + monkeypatch.delenv("VIRTUALENV_COPIES", raising=False) + monkeypatch.setenv("VIRTUALENV_ALWAYS_COPY", "1") result = session_via_cli(["venv"]) assert result.creator.symlinks is False diff --git a/vendor/virtualenv/tests/unit/config/test_ini.py b/vendor/virtualenv/tests/unit/config/test_ini.py index 0f872566..dc0ce93a 100644 --- a/vendor/virtualenv/tests/unit/config/test_ini.py +++ b/vendor/virtualenv/tests/unit/config/test_ini.py @@ -1,12 +1,9 @@ -from __future__ import unicode_literals - from textwrap import dedent import pytest from virtualenv.info import fs_supports_symlink from virtualenv.run import session_via_cli -from virtualenv.util.six import ensure_str @pytest.mark.skipif(not fs_supports_symlink(), reason="symlink is not supported") @@ -20,7 +17,7 @@ def test_ini_can_be_overwritten_by_flag(tmp_path, monkeypatch): """, ), ) - monkeypatch.setenv(ensure_str("VIRTUALENV_CONFIG_FILE"), str(custom_ini)) + monkeypatch.setenv("VIRTUALENV_CONFIG_FILE", str(custom_ini)) result = session_via_cli(["venv", "--symlinks"]) diff --git a/vendor/virtualenv/tests/unit/create/conftest.py b/vendor/virtualenv/tests/unit/create/conftest.py index c709adea..1e028fb6 100644 --- a/vendor/virtualenv/tests/unit/create/conftest.py +++ b/vendor/virtualenv/tests/unit/create/conftest.py @@ -6,24 +6,23 @@ - invoking from an old style virtualenv (<17.0.0) - invoking from our own venv """ -from __future__ import absolute_import, unicode_literals import subprocess import sys +from pathlib import Path +from subprocess import Popen import pytest from virtualenv.discovery.py_info import PythonInfo from virtualenv.info import IS_WIN from virtualenv.run import cli_run -from virtualenv.util.path import Path -from virtualenv.util.subprocess import Popen CURRENT = PythonInfo.current_system() # noinspection PyUnusedLocal -def root(tmp_path_factory, session_app_data): +def root(tmp_path_factory, session_app_data): # noqa: U100 return CURRENT.system_executable @@ -84,7 +83,7 @@ def old_virtualenv(tmp_path_factory, session_app_data): exe_path = CURRENT.discover_exe(session_app_data, prefix=str(old_virtualenv_at)).original_executable return exe_path except Exception as exception: - return RuntimeError("failed to create old virtualenv {}".format(exception)) + return RuntimeError(f"failed to create old virtualenv {exception}") PYTHON = { @@ -98,7 +97,7 @@ def old_virtualenv(tmp_path_factory, session_app_data): def python(request, tmp_path_factory, session_app_data): result = request.param(tmp_path_factory, session_app_data) if isinstance(result, Exception): - pytest.skip("could not resolve interpreter based on {} because {}".format(request.param.__name__, result)) + pytest.skip(f"could not resolve interpreter based on {request.param.__name__} because {result}") if result is None: - pytest.skip("requires interpreter with {}".format(request.param.__name__)) + pytest.skip(f"requires interpreter with {request.param.__name__}") return result diff --git a/vendor/virtualenv/tests/unit/create/test_creator.py b/vendor/virtualenv/tests/unit/create/test_creator.py index 424a9b38..d3adb2ae 100644 --- a/vendor/virtualenv/tests/unit/create/test_creator.py +++ b/vendor/virtualenv/tests/unit/create/test_creator.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import ast import difflib import gc @@ -14,6 +12,7 @@ import zipfile from collections import OrderedDict from itertools import product +from pathlib import Path from stat import S_IREAD, S_IRGRP, S_IROTH from textwrap import dedent from threading import Thread @@ -27,20 +26,18 @@ from virtualenv.create.via_global_ref.builtin.cpython.cpython3 import CPython3Posix from virtualenv.create.via_global_ref.builtin.python2.python2 import Python2 from virtualenv.discovery.py_info import PythonInfo -from virtualenv.info import IS_PYPY, IS_WIN, PY2, PY3, fs_is_case_sensitive +from virtualenv.info import IS_PYPY, IS_WIN, fs_is_case_sensitive from virtualenv.run import cli_run, session_via_cli -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_str, ensure_text CURRENT = PythonInfo.current_system() def test_os_path_sep_not_allowed(tmp_path, capsys): - target = str(tmp_path / "a{}b".format(os.pathsep)) + target = str(tmp_path / f"a{os.pathsep}b") err = _non_success_exit_code(capsys, target) msg = ( - "destination {!r} must not contain the path separator ({}) as this" - " would break the activation scripts".format(target, os.pathsep) + f"destination {target!r} must not contain the path separator ({os.pathsep})" + f" as this would break the activation scripts" ) assert msg in err, err @@ -58,7 +55,7 @@ def test_destination_exists_file(tmp_path, capsys): target = tmp_path / "out" target.write_text("") err = _non_success_exit_code(capsys, str(target)) - msg = "the destination {} already exists and is a file".format(str(target)) + msg = f"the destination {str(target)} already exists and is a file" assert msg in err, err @@ -73,7 +70,7 @@ def test_destination_not_write_able(tmp_path, capsys): target.chmod(S_IREAD | S_IRGRP | S_IROTH) try: err = _non_success_exit_code(capsys, str(target)) - msg = "the destination . is not write-able at {}".format(str(target)) + msg = f"the destination . is not write-able at {str(target)}" assert msg in err, err finally: target.chmod(prev_mod) @@ -84,8 +81,8 @@ def cleanup_sys_path(paths): paths = [p.resolve() for p in (Path(os.path.abspath(i)) for i in paths) if p.exists()] to_remove = [Path(HERE)] - if os.environ.get(str("PYCHARM_HELPERS_DIR")): - to_remove.append(Path(os.environ[str("PYCHARM_HELPERS_DIR")]).parent) + if os.environ.get("PYCHARM_HELPERS_DIR"): + to_remove.append(Path(os.environ["PYCHARM_HELPERS_DIR"]).parent) to_remove.append(Path(os.path.expanduser("~")) / ".PyCharm") result = [i for i in paths if not any(str(i).startswith(str(t)) for t in to_remove)] return result @@ -96,7 +93,7 @@ def system(session_app_data): return get_env_debug_info(Path(CURRENT.system_executable), DEBUG_SCRIPT, session_app_data, os.environ) -CURRENT_CREATORS = list(i for i in CURRENT.creators().key_to_class.keys() if i != "builtin") +CURRENT_CREATORS = [i for i in CURRENT.creators().key_to_class.keys() if i != "builtin"] CREATE_METHODS = [] for k, v in CURRENT.creators().key_to_meta.items(): if k in CURRENT_CREATORS: @@ -113,14 +110,14 @@ def system(session_app_data): @pytest.mark.parametrize( - "creator, isolated", + ("creator", "isolated"), [ pytest.param( *i, marks=pytest.mark.xfail( reason="https://bitbucket.org/pypy/pypy/issues/3159/pypy36-730-venv-fails-with-copies-on-linux", strict=True, - ) + ), ) if _VENV_BUG_ON and i[0][0] == "venv" and i[0][1] == "copies" else i @@ -135,14 +132,14 @@ def test_create_no_seed(python, creator, isolated, system, coverage_env, special "-v", "-v", "-p", - ensure_text(python), - ensure_text(str(dest)), + str(python), + str(dest), "--without-pip", "--activators", "", "--creator", creator_key, - "--{}".format(method), + f"--{method}", ] if isolated == "global": cmd.append("--system-site-packages") @@ -154,27 +151,24 @@ def test_create_no_seed(python, creator, isolated, system, coverage_env, special # force a close of these on system where the limit is low-ish (e.g. MacOS 256) gc.collect() purelib = creator.purelib - patch_files = {purelib / "{}.{}".format("_virtualenv", i) for i in ("py", "pyc", "pth")} + patch_files = {purelib / f"{'_virtualenv'}.{i}" for i in ("py", "pyc", "pth")} patch_files.add(purelib / "__pycache__") content = set(creator.purelib.iterdir()) - patch_files - assert not content, "\n".join(ensure_text(str(i)) for i in content) - assert creator.env_name == ensure_text(dest.name) + assert not content, "\n".join(str(i) for i in content) + assert creator.env_name == str(dest.name) debug = creator.debug - assert "exception" not in debug, "{}\n{}\n{}".format(debug.get("exception"), debug.get("out"), debug.get("err")) + assert "exception" not in debug, f"{debug.get('exception')}\n{debug.get('out')}\n{debug.get('err')}" sys_path = cleanup_sys_path(debug["sys"]["path"]) system_sys_path = cleanup_sys_path(system["sys"]["path"]) our_paths = set(sys_path) - set(system_sys_path) - our_paths_repr = "\n".join(ensure_text(repr(i)) for i in our_paths) + our_paths_repr = "\n".join(repr(i) for i in our_paths) # ensure we have at least one extra path added assert len(our_paths) >= 1, our_paths_repr # ensure all additional paths are related to the virtual environment for path in our_paths: - msg = "\n{}\ndoes not start with {}\nhas:\n{}".format( - ensure_text(str(path)), - ensure_text(str(dest)), - "\n".join(ensure_text(str(p)) for p in system_sys_path), - ) + msg = "\n".join(str(p) for p in system_sys_path) + msg = f"\n{str(path)}\ndoes not start with {str(dest)}\nhas:\n{msg}" assert str(path).startswith(str(dest)), msg # ensure there's at least a site-packages folder as part of the virtual environment added assert any(p for p in our_paths if p.parts[-1] == "site-packages"), our_paths_repr @@ -182,10 +176,8 @@ def test_create_no_seed(python, creator, isolated, system, coverage_env, special # ensure the global site package is added or not, depending on flag global_sys_path = system_sys_path[-1] if isolated == "isolated": - msg = "global sys path {} is in virtual environment sys path:\n{}".format( - ensure_text(str(global_sys_path)), - "\n".join(ensure_text(str(j)) for j in sys_path), - ) + msg = "\n".join(str(j) for j in sys_path) + msg = f"global sys path {str(global_sys_path)} is in virtual environment sys path:\n{msg}" assert global_sys_path not in sys_path, msg else: common = [] @@ -196,7 +188,7 @@ def test_create_no_seed(python, creator, isolated, system, coverage_env, special break def list_to_str(iterable): - return [ensure_text(str(i)) for i in iterable] + return [str(i) for i in iterable] assert common, "\n".join(difflib.unified_diff(list_to_str(sys_path), list_to_str(system_sys_path))) @@ -207,7 +199,7 @@ def list_to_str(iterable): if sys.platform == "win32": exes = ("python.exe",) else: - exes = ("python", "python{}".format(*sys.version_info), "python{}.{}".format(*sys.version_info)) + exes = ("python", f"python{sys.version_info.major}", f"python{sys.version_info.major}.{sys.version_info.minor}") if creator_key == "venv": # for venv some repackaging does not includes the pythonx.y exes = exes[:-1] @@ -276,7 +268,7 @@ def _session_via_cli(args, options=None, setup_logging=True, env=None): mocker.patch("virtualenv.run.session_via_cli", side_effect=_session_via_cli) before = tmp_path.stat().st_mode cfg_path = tmp_path / "pyvenv.cfg" - cfg_path.write_text(ensure_text("")) + cfg_path.write_text("") cfg = str(cfg_path) try: os.chmod(cfg, stat.S_IREAD | stat.S_IRGRP | stat.S_IROTH) @@ -291,22 +283,6 @@ def _session_via_cli(args, options=None, setup_logging=True, env=None): assert "Error:" in err, err -@pytest.mark.skipif(not sys.version_info[0] == 2, reason="python 2 only tests") -def test_debug_bad_virtualenv(tmp_path): - cmd = [str(tmp_path), "--without-pip"] - result = cli_run(cmd) - # if the site.py is removed/altered the debug should fail as no one is around to fix the paths - site_py = result.creator.stdlib / "site.py" - site_py.unlink() - # insert something that writes something on the stdout - site_py.write_text('import sys; sys.stdout.write(repr("std-out")); sys.stderr.write("std-err"); raise ValueError') - debug_info = result.creator.debug - assert debug_info["returncode"] - assert debug_info["err"].startswith("std-err") - assert "std-out" in debug_info["out"] - assert debug_info["exception"] - - @pytest.mark.parametrize("creator", CURRENT_CREATORS) @pytest.mark.parametrize("clear", [True, False], ids=["clear", "no_clear"]) def test_create_clear_resets(tmp_path, creator, clear, caplog): @@ -342,12 +318,13 @@ def test_prompt_set(tmp_path, creator, prompt): assert cfg["prompt"] == actual_prompt -@pytest.mark.slow -def test_cross_major(cross_python, coverage_env, tmp_path, session_app_data, current_fastest): +@pytest.mark.slow() +@pytest.mark.usefixtures("current_fastest") +def test_cross_major(cross_python, coverage_env, tmp_path, session_app_data): cmd = [ "-p", - ensure_text(cross_python.executable), - ensure_text(str(tmp_path)), + cross_python.executable, + str(tmp_path), "--no-setuptools", "--no-wheel", "--activators", @@ -358,19 +335,20 @@ def test_cross_major(cross_python, coverage_env, tmp_path, session_app_data, cur major, minor = cross_python.version_info[0:2] assert pip_scripts == { "pip", - "pip{}".format(major), - "pip-{}.{}".format(major, minor), - "pip{}.{}".format(major, minor), + f"pip{major}", + f"pip-{major}.{minor}", + f"pip{major}.{minor}", } coverage_env() env = PythonInfo.from_exe(str(result.creator.exe), session_app_data) assert env.version_info.major != CURRENT.version_info.major -def test_create_parallel(tmp_path, monkeypatch, temp_app_data): +@pytest.mark.usefixtures("temp_app_data") +def test_create_parallel(tmp_path): def create(count): subprocess.check_call( - [sys.executable, "-m", "virtualenv", "-vvv", str(tmp_path / "venv{}".format(count)), "--without-pip"], + [sys.executable, "-m", "virtualenv", "-vvv", str(tmp_path / f"venv{count}"), "--without-pip"], ) threads = [Thread(target=create, args=(i,)) for i in range(1, 4)] @@ -393,7 +371,8 @@ def test_creator_replaces_altsep_in_dest(tmp_path): assert str(result) == dest.format(os.sep) -def test_create_long_path(current_fastest, tmp_path): +@pytest.mark.usefixtures("current_fastest") +def test_create_long_path(tmp_path): if sys.platform == "darwin": max_shebang_length = 512 else: @@ -409,8 +388,9 @@ def test_create_long_path(current_fastest, tmp_path): @pytest.mark.parametrize("creator", sorted(set(PythonInfo.current_system().creators().key_to_class) - {"builtin"})) -def test_create_distutils_cfg(creator, tmp_path, monkeypatch, session_app_data): - result = cli_run([ensure_text(str(tmp_path / "venv")), "--activators", "", "--creator", creator]) +@pytest.mark.usefixtures("session_app_data") +def test_create_distutils_cfg(creator, tmp_path, monkeypatch): + result = cli_run([str(tmp_path / "venv"), "--activators", "", "--creator", creator]) app = Path(__file__).parent / "console_app" dest = tmp_path / "console_app" @@ -418,18 +398,15 @@ def test_create_distutils_cfg(creator, tmp_path, monkeypatch, session_app_data): setup_cfg = dest / "setup.cfg" conf = dedent( - """ + f""" [install] - prefix={0}{1}prefix - install_purelib={0}{1}purelib - install_platlib={0}{1}platlib - install_headers={0}{1}headers - install_scripts={0}{1}scripts - install_data={0}{1}data - """.format( - tmp_path, - os.sep, - ), + prefix={tmp_path}{os.sep}prefix + install_purelib={tmp_path}{os.sep}purelib + install_platlib={tmp_path}{os.sep}platlib + install_headers={tmp_path}{os.sep}headers + install_scripts={tmp_path}{os.sep}scripts + install_data={tmp_path}{os.sep}data + """, ) setup_cfg.write_text(setup_cfg.read_text() + conf) @@ -457,105 +434,13 @@ def list_files(path): for root, _, files in os.walk(path): level = root.replace(path, "").count(os.sep) indent = " " * 4 * level - result += "{}{}/\n".format(indent, os.path.basename(root)) + result += f"{indent}{os.path.basename(root)}/\n" sub = " " * 4 * (level + 1) for f in files: - result += "{}{}\n".format(sub, f) + result += f"{sub}{f}\n" return result -@pytest.mark.parametrize("python_path_on", [True, False], ids=["on", "off"]) -@pytest.mark.skipif(PY3, reason="we rewrite sys.path only on PY2") -def test_python_path(monkeypatch, tmp_path, python_path_on): - result = cli_run([ensure_text(str(tmp_path)), "--without-pip", "--activators", ""]) - monkeypatch.chdir(tmp_path) - case_sensitive = fs_is_case_sensitive() - - def _get_sys_path(flag=None): - cmd = [str(result.creator.exe)] - if flag: - cmd.append(flag) - cmd.extend(["-c", "import json; import sys; print(json.dumps(sys.path))"]) - return [i if case_sensitive else i.lower() for i in json.loads(subprocess.check_output(cmd))] - - monkeypatch.delenv(str("PYTHONPATH"), raising=False) - base = _get_sys_path() - - # note the value result.creator.interpreter.system_stdlib cannot be set, as that would disable our custom site.py - python_paths = [ - str(Path(result.creator.interpreter.prefix)), - str(Path(result.creator.interpreter.system_stdlib) / "b"), - str(result.creator.purelib / "a"), - str(result.creator.purelib), - str(result.creator.bin_dir), - str(tmp_path / "base"), - str(tmp_path / "base_sep") + os.sep, - "name", - "name{}".format(os.sep), - str(tmp_path.parent / (ensure_text(tmp_path.name) + "_suffix")), - ".", - "..", - "", - ] - python_path_env = os.pathsep.join(ensure_str(i) for i in python_paths) - monkeypatch.setenv(str("PYTHONPATH"), python_path_env) - - extra_all = _get_sys_path(None if python_path_on else "-E") - if python_path_on: - assert extra_all[0] == "" # the cwd is always injected at start as '' - extra_all = extra_all[1:] - assert base[0] == "" - base = base[1:] - - assert not (set(base) - set(extra_all)) # all base paths are present - abs_python_paths = list(OrderedDict((os.path.abspath(ensure_text(i)), None) for i in python_paths).keys()) - abs_python_paths = [i if case_sensitive else i.lower() for i in abs_python_paths] - - extra_as_python_path = extra_all[: len(abs_python_paths)] - assert abs_python_paths == extra_as_python_path # python paths are there at the start - - non_python_path = extra_all[len(abs_python_paths) :] - assert non_python_path == [i for i in base if i not in extra_as_python_path] - else: - assert base == extra_all - - -@pytest.mark.skipif( - not (CURRENT.implementation == "CPython" and PY2), - reason="stdlib components without py files only possible on CPython2", -) -@pytest.mark.parametrize( - "py, pyc", - list( - product( - [True, False] if Python2.from_stdlib(Python2.mappings(CURRENT), "os.py")[2] else [False], - [True, False] if Python2.from_stdlib(Python2.mappings(CURRENT), "os.pyc")[2] else [False], - ), - ), -) -def test_py_pyc_missing(tmp_path, mocker, session_app_data, py, pyc): - """Ensure that creation can succeed if os.pyc exists (even if os.py has been deleted)""" - previous = Python2.from_stdlib - - def from_stdlib(mappings, name): - path, to, exists = previous(mappings, name) - if name.endswith("py"): - exists = py - elif name.endswith("pyc"): - exists = pyc - return path, to, exists - - mocker.patch.object(Python2, "from_stdlib", side_effect=from_stdlib) - - result = cli_run([ensure_text(str(tmp_path)), "--without-pip", "--activators", "", "-vv"]) - py_at = Python2.from_stdlib(Python2.mappings(CURRENT), "os.py")[1](result.creator, Path("os.py")) - py = pyc is False or py # if pyc is False we fallback to serve the py, which will exist (as we only mock the check) - assert py_at.exists() is py - - pyc_at = Python2.from_stdlib(Python2.mappings(CURRENT), "osc.py")[1](result.creator, Path("os.pyc")) - assert pyc_at.exists() is pyc - - def test_zip_importer_can_import_setuptools(tmp_path): """We're patching the loaders so might fail on r/o loaders, such as zipimporter on CPython<3.8""" result = cli_run([str(tmp_path / "venv"), "--activators", "", "--no-pip", "--no-wheel", "--copies"]) @@ -574,7 +459,7 @@ def test_zip_importer_can_import_setuptools(tmp_path): else: folder.unlink() env = os.environ.copy() - env[str("PYTHONPATH")] = str(zip_path) + env["PYTHONPATH"] = str(zip_path) subprocess.check_call([str(result.creator.exe), "-c", "from setuptools.dist import Distribution"], env=env) @@ -583,11 +468,12 @@ def test_zip_importer_can_import_setuptools(tmp_path): # # coverage is disabled, because when coverage is active, it imports threading in default mode. @pytest.mark.xfail( - IS_PYPY and PY3 and sys.platform.startswith("darwin"), + IS_PYPY and sys.platform.startswith("darwin"), reason="https://foss.heptapod.net/pypy/pypy/-/issues/3269", ) -def test_no_preimport_threading(tmp_path, no_coverage): - session = cli_run([ensure_text(str(tmp_path))]) +@pytest.mark.usefixtures("_no_coverage") +def test_no_preimport_threading(tmp_path): + session = cli_run([str(tmp_path)]) out = subprocess.check_output( [str(session.creator.exe), "-c", r"import sys; print('\n'.join(sorted(sys.modules)))"], universal_newlines=True, @@ -597,8 +483,8 @@ def test_no_preimport_threading(tmp_path, no_coverage): # verify that .pth files in site-packages/ are always processed even if $PYTHONPATH points to it. -def test_pth_in_site_vs_PYTHONPATH(tmp_path): - session = cli_run([ensure_text(str(tmp_path))]) +def test_pth_in_site_vs_python_path(tmp_path): + session = cli_run([str(tmp_path)]) site_packages = str(session.creator.purelib) # install test.pth that sets sys.testpth='ok' with open(os.path.join(site_packages, "test.pth"), "w") as f: @@ -625,7 +511,7 @@ def test_pth_in_site_vs_PYTHONPATH(tmp_path): def test_getsitepackages_system_site(tmp_path): # Test without --system-site-packages - session = cli_run([ensure_text(str(tmp_path))]) + session = cli_run([str(tmp_path)]) system_site_packages = get_expected_system_site_packages(session) @@ -639,15 +525,15 @@ def test_getsitepackages_system_site(tmp_path): assert system_site_package not in site_packages # Test with --system-site-packages - session = cli_run([ensure_text(str(tmp_path)), "--system-site-packages"]) + session = cli_run([str(tmp_path), "--system-site-packages"]) - system_site_packages = get_expected_system_site_packages(session) + system_site_packages = [str(Path(i).resolve()) for i in get_expected_system_site_packages(session)] out = subprocess.check_output( [str(session.creator.exe), "-c", r"import site; print(site.getsitepackages())"], universal_newlines=True, ) - site_packages = ast.literal_eval(out) + site_packages = [str(Path(i).resolve()) for i in ast.literal_eval(out)] for system_site_package in system_site_packages: assert system_site_package in site_packages @@ -666,7 +552,7 @@ def get_expected_system_site_packages(session): def test_get_site_packages(tmp_path): case_sensitive = fs_is_case_sensitive() - session = cli_run([ensure_text(str(tmp_path))]) + session = cli_run([str(tmp_path)]) env_site_packages = [str(session.creator.purelib), str(session.creator.platlib)] out = subprocess.check_output( [str(session.creator.exe), "-c", r"import site; print(site.getsitepackages())"], @@ -680,3 +566,102 @@ def test_get_site_packages(tmp_path): for env_site_package in env_site_packages: assert env_site_package in site_packages + + +def test_debug_bad_virtualenv(tmp_path): + cmd = [str(tmp_path), "--without-pip"] + result = cli_run(cmd) + # if the site.py is removed/altered the debug should fail as no one is around to fix the paths + cust = result.creator.purelib / "_a.pth" + cust.write_text('import sys; sys.stdout.write("std-out"); sys.stderr.write("std-err"); raise SystemExit(1)') + debug_info = result.creator.debug + assert debug_info["returncode"] == 1 + assert "std-err" in debug_info["err"] + assert "std-out" in debug_info["out"] + assert debug_info["exception"] + + +@pytest.mark.parametrize("python_path_on", [True, False], ids=["on", "off"]) +def test_python_path(monkeypatch, tmp_path, python_path_on): + result = cli_run([str(tmp_path), "--without-pip", "--activators", ""]) + monkeypatch.chdir(tmp_path) + case_sensitive = fs_is_case_sensitive() + + def _get_sys_path(flag=None): + cmd = [str(result.creator.exe)] + if flag: + cmd.append(flag) + cmd.extend(["-c", "import json; import sys; print(json.dumps(sys.path))"]) + return [i if case_sensitive else i.lower() for i in json.loads(subprocess.check_output(cmd))] + + monkeypatch.delenv("PYTHONPATH", raising=False) + base = _get_sys_path() + + # note the value result.creator.interpreter.system_stdlib cannot be set, as that would disable our custom site.py + python_paths = [ + str(Path(result.creator.interpreter.prefix)), + str(Path(result.creator.interpreter.system_stdlib) / "b"), + str(result.creator.purelib / "a"), + str(result.creator.purelib), + str(result.creator.bin_dir), + str(tmp_path / "base"), + f"{str(tmp_path / 'base_sep')}{os.sep}", + "name", + f"name{os.sep}", + f"{tmp_path.parent}{f'{tmp_path.name}_suffix'}", + ".", + "..", + "", + ] + python_path_env = os.pathsep.join(python_paths) + monkeypatch.setenv("PYTHONPATH", python_path_env) + + extra_all = _get_sys_path(None if python_path_on else "-E") + if python_path_on: + assert extra_all[0] == "" # the cwd is always injected at start as '' + extra_all = extra_all[1:] + assert base[0] == "" + base = base[1:] + + assert not (set(base) - set(extra_all)) # all base paths are present + abs_python_paths = list(OrderedDict((os.path.abspath(str(i)), None) for i in python_paths).keys()) + abs_python_paths = [i if case_sensitive else i.lower() for i in abs_python_paths] + + extra_as_python_path = extra_all[: len(abs_python_paths)] + assert abs_python_paths == extra_as_python_path # python paths are there at the start + + non_python_path = extra_all[len(abs_python_paths) :] + assert non_python_path == [i for i in base if i not in extra_as_python_path] + else: + assert base == extra_all + + +@pytest.mark.parametrize( + ("py", "pyc"), + product( + [True, False] if Python2.from_stdlib(Python2.mappings(CURRENT), "os.py")[2] else [False], + [True, False] if Python2.from_stdlib(Python2.mappings(CURRENT), "os.pyc")[2] else [False], + ), +) +@pytest.mark.usefixtures("session_app_data") +def test_py_pyc_missing(tmp_path, mocker, py, pyc): + """Ensure that creation can succeed if os.pyc exists (even if os.py has been deleted)""" + previous = Python2.from_stdlib + + def from_stdlib(mappings, name): + path, to, exists = previous(mappings, name) + if name.endswith("py"): + exists = py + elif name.endswith("pyc"): + exists = pyc + return path, to, exists + + mocker.patch.object(Python2, "from_stdlib", side_effect=from_stdlib) + + result = cli_run([str(tmp_path), "--without-pip", "--activators", "", "-vv", "-p", "2"]) + py_at = Python2.from_stdlib(Python2.mappings(CURRENT), "os.py")[1](result.creator, Path("os.py")) + py = pyc is False or py # if pyc is False we fallback to serve the py, which will exist (as we only mock the check) + assert py_at.exists() is py + + pyc_at = Python2.from_stdlib(Python2.mappings(CURRENT), "osc.py")[1](result.creator, Path("os.pyc")) + assert pyc_at.exists() is pyc diff --git a/vendor/virtualenv/tests/unit/create/test_interpreters.py b/vendor/virtualenv/tests/unit/create/test_interpreters.py index 76d40c7a..65d53084 100644 --- a/vendor/virtualenv/tests/unit/create/test_interpreters.py +++ b/vendor/virtualenv/tests/unit/create/test_interpreters.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import sys from uuid import uuid4 @@ -9,12 +7,12 @@ from virtualenv.run import cli_run -@pytest.mark.slow +@pytest.mark.slow() def test_failed_to_find_bad_spec(): of_id = uuid4().hex with pytest.raises(RuntimeError) as context: cli_run(["-p", of_id]) - msg = repr(RuntimeError("failed to find interpreter for Builtin discover of python_spec={!r}".format(of_id))) + msg = repr(RuntimeError(f"failed to find interpreter for Builtin discover of python_spec={of_id!r}")) assert repr(context.value) == msg diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/conftest.py b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/conftest.py new file mode 100644 index 00000000..7119fbe5 --- /dev/null +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/conftest.py @@ -0,0 +1,24 @@ +import sys +from pathlib import Path + +import pytest +from testing import path +from testing.py_info import read_fixture + +# Allows to import from `testing` into test submodules. +sys.path.append(str(Path(__file__).parent)) + + +@pytest.fixture() +def py_info(py_info_name): + return read_fixture(py_info_name) + + +@pytest.fixture() +def mock_files(mocker): + return lambda paths, files: path.mock_files(mocker, paths, files) + + +@pytest.fixture() +def mock_pypy_libs(mocker): + return lambda pypy, libs: path.mock_pypy_libs(mocker, pypy, libs) diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/cpython/cpython3_win_embed.json b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/cpython/cpython3_win_embed.json new file mode 100644 index 00000000..e8d0d01c --- /dev/null +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/cpython/cpython3_win_embed.json @@ -0,0 +1,61 @@ +{ + "platform": "win32", + "implementation": "CPython", + "version_info": { + "major": 3, + "minor": 10, + "micro": 4, + "releaselevel": "final", + "serial": 0 + }, + "architecture": 64, + "version_nodot": "310", + "version": "3.10.4 (tags/v3.10.4:9d38120, Mar 23 2022, 23:13:41) [MSC v.1929 64 bit (AMD64)]", + "os": "nt", + "prefix": "c:\\path\\to\\python", + "base_prefix": "c:\\path\\to\\python", + "real_prefix": null, + "base_exec_prefix": "c:\\path\\to\\python", + "exec_prefix": "c:\\path\\to\\python", + "executable": "c:\\path\\to\\python\\python.exe", + "original_executable": "c:\\path\\to\\python\\python.exe", + "system_executable": "c:\\path\\to\\python\\python.exe", + "has_venv": false, + "path": [ + "c:\\path\\to\\python\\Scripts\\virtualenv.exe", + "c:\\path\\to\\python\\python310.zip", + "c:\\path\\to\\python", + "c:\\path\\to\\python\\Lib\\site-packages" + ], + "file_system_encoding": "utf-8", + "stdout_encoding": "utf-8", + "sysconfig_scheme": null, + "sysconfig_paths": { + "stdlib": "{installed_base}/Lib", + "platstdlib": "{base}/Lib", + "purelib": "{base}/Lib/site-packages", + "platlib": "{base}/Lib/site-packages", + "include": "{installed_base}/Include", + "scripts": "{base}/Scripts", + "data": "{base}" + }, + "distutils_install": { + "purelib": "Lib\\site-packages", + "platlib": "Lib\\site-packages", + "headers": "Include\\UNKNOWN", + "scripts": "Scripts", + "data": "" + }, + "sysconfig": { + "makefile_filename": "c:\\path\\to\\python\\Lib\\config\\Makefile" + }, + "sysconfig_vars": { + "PYTHONFRAMEWORK": "", + "installed_base": "c:\\path\\to\\python", + "base": "c:\\path\\to\\python" + }, + "system_stdlib": "c:\\path\\to\\python\\Lib", + "system_stdlib_platform": "c:\\path\\to\\python\\Lib", + "max_size": 9223372036854775807, + "_creators": null +} diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/cpython/test_cpython3_win.py b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/cpython/test_cpython3_win.py new file mode 100644 index 00000000..90fdb38e --- /dev/null +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/cpython/test_cpython3_win.py @@ -0,0 +1,99 @@ +import pytest +from testing.helpers import contains_exe, contains_ref +from testing.path import join as path + +from virtualenv.create.via_global_ref.builtin.cpython.cpython3 import CPython3Windows + +CPYTHON3_PATH = ( + "virtualenv.create.via_global_ref.builtin.cpython.common.Path", + "virtualenv.create.via_global_ref.builtin.cpython.cpython3.Path", +) + + +@pytest.mark.parametrize("py_info_name", ["cpython3_win_embed"]) +def test_2_exe_on_default_py_host(py_info, mock_files): + mock_files(CPYTHON3_PATH, [py_info.system_executable]) + sources = tuple(CPython3Windows.sources(interpreter=py_info)) + # Default Python exe. + assert contains_exe(sources, py_info.system_executable) + # Should always exist. + assert contains_exe(sources, path(py_info.prefix, "pythonw.exe")) + + +@pytest.mark.parametrize("py_info_name", ["cpython3_win_embed"]) +def test_3_exe_on_not_default_py_host(py_info, mock_files): + # Not default python host. + py_info.system_executable = path(py_info.prefix, "python666.exe") + mock_files(CPYTHON3_PATH, [py_info.system_executable]) + sources = tuple(CPython3Windows.sources(interpreter=py_info)) + # Not default Python exe linked to both the default name and origin. + assert contains_exe(sources, py_info.system_executable, "python.exe") + assert contains_exe(sources, py_info.system_executable, "python666.exe") + # Should always exist. + assert contains_exe(sources, path(py_info.prefix, "pythonw.exe")) + + +@pytest.mark.parametrize("py_info_name", ["cpython3_win_embed"]) +def test_only_shim(py_info, mock_files): + shim = path(py_info.system_stdlib, "venv\\scripts\\nt\\python.exe") + py_files = ( + path(py_info.prefix, "libcrypto-1_1.dll"), + path(py_info.prefix, "libffi-7.dll"), + path(py_info.prefix, "_asyncio.pyd"), + path(py_info.prefix, "_bz2.pyd"), + ) + mock_files(CPYTHON3_PATH, [shim, *py_files]) + sources = tuple(CPython3Windows.sources(interpreter=py_info)) + assert CPython3Windows.has_shim(interpreter=py_info) + assert contains_exe(sources, shim) + assert not contains_exe(sources, py_info.system_executable) + for file in py_files: + assert not contains_ref(sources, file) + + +@pytest.mark.parametrize("py_info_name", ["cpython3_win_embed"]) +def test_exe_dll_pyd_without_shim(py_info, mock_files): + py_files = ( + path(py_info.prefix, "libcrypto-1_1.dll"), + path(py_info.prefix, "libffi-7.dll"), + path(py_info.prefix, "_asyncio.pyd"), + path(py_info.prefix, "_bz2.pyd"), + ) + mock_files(CPYTHON3_PATH, py_files) + sources = tuple(CPython3Windows.sources(interpreter=py_info)) + assert not CPython3Windows.has_shim(interpreter=py_info) + assert contains_exe(sources, py_info.system_executable) + for file in py_files: + assert contains_ref(sources, file) + + +@pytest.mark.parametrize("py_info_name", ["cpython3_win_embed"]) +def test_python_zip_if_exists_and_set_in_path(py_info, mock_files): + python_zip_name = f"python{py_info.version_nodot}.zip" + python_zip = path(py_info.prefix, python_zip_name) + mock_files(CPYTHON3_PATH, [python_zip]) + sources = tuple(CPython3Windows.sources(interpreter=py_info)) + assert python_zip in py_info.path + assert contains_ref(sources, python_zip) + + +@pytest.mark.parametrize("py_info_name", ["cpython3_win_embed"]) +def test_no_python_zip_if_exists_and_not_set_in_path(py_info, mock_files): + python_zip_name = f"python{py_info.version_nodot}.zip" + python_zip = path(py_info.prefix, python_zip_name) + py_info.path.remove(python_zip) + mock_files(CPYTHON3_PATH, [python_zip]) + sources = tuple(CPython3Windows.sources(interpreter=py_info)) + assert python_zip not in py_info.path + assert not contains_ref(sources, python_zip) + + +@pytest.mark.parametrize("py_info_name", ["cpython3_win_embed"]) +def test_no_python_zip_if_not_exists(py_info, mock_files): + python_zip_name = f"python{py_info.version_nodot}.zip" + python_zip = path(py_info.prefix, python_zip_name) + # No `python_zip`, just python.exe file. + mock_files(CPYTHON3_PATH, [py_info.system_executable]) + sources = tuple(CPython3Windows.sources(interpreter=py_info)) + assert python_zip in py_info.path + assert not contains_ref(sources, python_zip) diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/pypy/test_pypy3.py b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/pypy/test_pypy3.py index c4d6860e..49038018 100644 --- a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/pypy/test_pypy3.py +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/pypy/test_pypy3.py @@ -1,104 +1,47 @@ -from __future__ import absolute_import, unicode_literals - -import fnmatch +import pytest +from testing.helpers import contains_exe, contains_ref +from testing.path import join as path from virtualenv.create.via_global_ref.builtin.pypy.pypy3 import PyPy3Posix -from virtualenv.create.via_global_ref.builtin.ref import ExePathRefToDest, PathRefToDest -from virtualenv.discovery.py_info import PythonInfo -from virtualenv.util.path import Path - - -class FakePath(Path): - """ - A Path() fake that only knows about files in existing_paths and the - directories that contain them. - """ - - existing_paths = [] - - if hasattr(Path(""), "_flavour"): - _flavour = Path("")._flavour - - def exists(self): - return self.as_posix() in self.existing_paths or self.is_dir() - - def glob(self, glob): - pattern = self.as_posix() + "/" + glob - for path in fnmatch.filter(self.existing_paths, pattern): - yield FakePath(path) - - def is_dir(self): - prefix = self.as_posix() + "/" - return any(True for path in self.existing_paths if path.startswith(prefix)) - - def iterdir(self): - prefix = self.as_posix() + "/" - for path in self.existing_paths: - if path.startswith(prefix) and "/" not in path[len(prefix) :]: - yield FakePath(path) - - def resolve(self): - return self - - def __div__(self, key): - return FakePath(super(FakePath, self).__div__(key)) - - def __truediv__(self, key): - return FakePath(super(FakePath, self).__truediv__(key)) - -def assert_contains_exe(sources, src): - """Assert that the one and only executeable in sources is src""" - exes = [source for source in sources if isinstance(source, ExePathRefToDest)] - assert len(exes) == 1 - exe = exes[0] - assert exe.src.as_posix() == src - - -def assert_contains_ref(sources, src): - """Assert that src appears in sources""" - assert any(source for source in sources if isinstance(source, PathRefToDest) and source.src.as_posix() == src) - - -def inject_fake_path(mocker, existing_paths): - """Inject FakePath in all the correct places, and set existing_paths""" - FakePath.existing_paths = existing_paths - mocker.patch("virtualenv.create.via_global_ref.builtin.pypy.common.Path", FakePath) - mocker.patch("virtualenv.create.via_global_ref.builtin.pypy.pypy3.Path", FakePath) - - -def _load_pypi_info(name): - return PythonInfo._from_json((Path(__file__).parent / "{}.json".format(name)).read_text()) - - -def test_portable_pypy3_virtualenvs_get_their_libs(mocker): - paths = ["/tmp/pypy3.8-v7.3.8-linux64/bin/pypy", "/tmp/pypy3.8-v7.3.8-linux64/lib/libgdbm.so.4"] - inject_fake_path(mocker, paths) - path = Path("/tmp/pypy3.8-v7.3.8-linux64/bin/libpypy3-c.so") - mocker.patch.object(PyPy3Posix, "_shared_libs", return_value=[path]) - - sources = list(PyPy3Posix.sources(interpreter=_load_pypi_info("portable_pypy38"))) - assert_contains_exe(sources, "/tmp/pypy3.8-v7.3.8-linux64/bin/pypy") +PYPY3_PATH = ( + "virtualenv.create.via_global_ref.builtin.pypy.common.Path", + "virtualenv.create.via_global_ref.builtin.pypy.pypy3.Path", +) + + +# In `PyPy3Posix.sources()` `host_lib` will be broken in Python 2 for Windows, +# so `py_file` will not be in sources. +@pytest.mark.parametrize("py_info_name", ["portable_pypy38"]) +def test_portable_pypy3_virtualenvs_get_their_libs(py_info, mock_files, mock_pypy_libs): + py_file = path(py_info.prefix, "lib/libgdbm.so.4") + mock_files(PYPY3_PATH, [py_info.system_executable, py_file]) + lib_file = path(py_info.prefix, "bin/libpypy3-c.so") + mock_pypy_libs(PyPy3Posix, [lib_file]) + sources = tuple(PyPy3Posix.sources(interpreter=py_info)) assert len(sources) > 2 - assert_contains_ref(sources, "/tmp/pypy3.8-v7.3.8-linux64/bin/libpypy3-c.so") - assert_contains_ref(sources, "/tmp/pypy3.8-v7.3.8-linux64/lib/libgdbm.so.4") + assert contains_exe(sources, py_info.system_executable) + assert contains_ref(sources, py_file) + assert contains_ref(sources, lib_file) -def test_debian_pypy37_virtualenvs(mocker): +@pytest.mark.parametrize("py_info_name", ["deb_pypy37"]) +def test_debian_pypy37_virtualenvs(py_info, mock_files, mock_pypy_libs): # Debian's pypy3 layout, installed to /usr, before 3.8 allowed a /usr prefix - inject_fake_path(mocker, ["/usr/bin/pypy3"]) - mocker.patch.object(PyPy3Posix, "_shared_libs", return_value=[Path("/usr/lib/pypy3/bin/libpypy3-c.so")]) - sources = list(PyPy3Posix.sources(interpreter=_load_pypi_info("deb_pypy37"))) - assert_contains_exe(sources, "/usr/bin/pypy3") - assert_contains_ref(sources, "/usr/lib/pypy3/bin/libpypy3-c.so") + mock_files(PYPY3_PATH, [py_info.system_executable]) + lib_file = path(py_info.prefix, "bin/libpypy3-c.so") + mock_pypy_libs(PyPy3Posix, [lib_file]) + sources = tuple(PyPy3Posix.sources(interpreter=py_info)) assert len(sources) == 2 + assert contains_exe(sources, py_info.system_executable) + assert contains_ref(sources, lib_file) -def test_debian_pypy38_virtualenvs_exclude_usr(mocker): - inject_fake_path(mocker, ["/usr/bin/pypy3", "/usr/lib/foo"]) +@pytest.mark.parametrize("py_info_name", ["deb_pypy38"]) +def test_debian_pypy38_virtualenvs_exclude_usr(py_info, mock_files, mock_pypy_libs): + mock_files(PYPY3_PATH, [py_info.system_executable, "/usr/lib/foo"]) # libpypy3-c.so lives on the ld search path - mocker.patch.object(PyPy3Posix, "_shared_libs", return_value=[]) - - sources = list(PyPy3Posix.sources(interpreter=_load_pypi_info("deb_pypy38"))) - assert_contains_exe(sources, "/usr/bin/pypy3") + mock_pypy_libs(PyPy3Posix, []) + sources = tuple(PyPy3Posix.sources(interpreter=py_info)) assert len(sources) == 1 + assert contains_exe(sources, py_info.system_executable) diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/__init__.py b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/helpers.py b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/helpers.py new file mode 100644 index 00000000..5ae1df8c --- /dev/null +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/helpers.py @@ -0,0 +1,38 @@ +from functools import reduce +from pathlib import Path + +from virtualenv.create.via_global_ref.builtin.ref import ExePathRefToDest, PathRef + + +def is_ref(source): + return isinstance(source, PathRef) + + +def is_exe(source): + return type(source) is ExePathRefToDest + + +def has_src(src): + return lambda ref: ref.src.as_posix() == Path(src).as_posix() + + +def has_target(target): + return lambda ref: ref.base == target + + +def apply_filter(values, function): + return filter(function, values) + + +def filterby(filters, sources): + return reduce(apply_filter, filters, sources) + + +def contains_exe(sources, src, target=None): + filters = is_exe, has_src(src), target and has_target(target) + return any(filterby(filters, sources)) + + +def contains_ref(sources, src): + filters = is_ref, has_src(src) + return any(filterby(filters, sources)) diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/path.py b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/path.py new file mode 100644 index 00000000..b2e1b85c --- /dev/null +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/path.py @@ -0,0 +1,91 @@ +from abc import ABCMeta, abstractmethod +from itertools import chain +from operator import attrgetter as attr +from pathlib import Path + + +def is_name(path): + return str(path) == path.name + + +class FakeDataABC(metaclass=ABCMeta): + """Provides data to mock the `Path`""" + + @property + @abstractmethod + def filelist(self): + """To mock a dir, just mock any child file.""" + raise NotImplementedError("Collection of (str) file paths to mock") + + @property + def fake_files(self): + return map(type(self), self.filelist) + + @property + def fake_dirs(self): + return set(chain(*map(attr("parents"), self.fake_files))) + + @property + def contained_fake_names(self): + return filter(is_name, self.fake_content) + + @property + def fake_content(self): + return filter(None, map(self.fake_child, self.fake_files)) + + def fake_child(self, path): + try: + return path.relative_to(self) + except ValueError: + return None + + +class PathMockABC(FakeDataABC, Path): + """Mocks the behavior of `Path`""" + + _flavour = getattr(Path(), "_flavour", None) + + if hasattr(_flavour, "altsep"): + # Allows to pass some tests for Windows via PosixPath. + _flavour.altsep = _flavour.altsep or "\\" + + def exists(self): + return self.is_file() or self.is_dir() + + def is_file(self): + return self in self.fake_files + + def is_dir(self): + return self in self.fake_dirs + + def resolve(self): + return self + + def iterdir(self): + if not self.is_dir(): + raise FileNotFoundError(f"No such mocked dir: '{self}'") + yield from map(self.joinpath, self.contained_fake_names) + + +def MetaPathMock(filelist): # noqa: N802 + """ + Metaclass that creates a `PathMock` class with the `filelist` defined. + """ + return type("PathMock", (PathMockABC,), {"filelist": filelist}) + + +def mock_files(mocker, pathlist, filelist): + PathMock = MetaPathMock(set(filelist)) # noqa: N806 + for path in pathlist: + mocker.patch(path, PathMock) + + +def mock_pypy_libs(mocker, pypy_creator_cls, libs): + paths = tuple(set(map(Path, libs))) + mocker.patch.object(pypy_creator_cls, "_shared_libs", return_value=paths) + + +def join(*chunks): + line = "".join(chunks) + sep = ("\\" in line and "\\") or ("/" in line and "/") or "/" + return sep.join(chunks) diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/py_info.py b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/py_info.py new file mode 100644 index 00000000..f8a7814f --- /dev/null +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/builtin/testing/py_info.py @@ -0,0 +1,19 @@ +from pathlib import Path + +from virtualenv.discovery.py_info import PythonInfo + + +def fixture_file(fixture_name): + file_mask = f"*{fixture_name}.json" + files = Path(__file__).parent.parent.rglob(file_mask) + try: + return next(files) + except StopIteration: + # Fixture file was not found in the testing root and its subdirs. + error = FileNotFoundError + raise error(file_mask) + + +def read_fixture(fixture_name): + fixture_json = fixture_file(fixture_name).read_text() + return PythonInfo._from_json(fixture_json) diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/greet/setup.py b/vendor/virtualenv/tests/unit/create/via_global_ref/greet/setup.py index c1b48f6e..72061370 100644 --- a/vendor/virtualenv/tests/unit/create/via_global_ref/greet/setup.py +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/greet/setup.py @@ -8,7 +8,7 @@ ext_modules=[ Extension( "greet", - ["greet{}.c".format(sys.version_info[0])], # extension to package + [f"greet{sys.version_info[0]}.c"], # extension to package ), # C code to compile to run as extension ], ) diff --git a/vendor/virtualenv/tests/unit/create/via_global_ref/test_build_c_ext.py b/vendor/virtualenv/tests/unit/create/via_global_ref/test_build_c_ext.py index 0086cd1c..026ff187 100644 --- a/vendor/virtualenv/tests/unit/create/via_global_ref/test_build_c_ext.py +++ b/vendor/virtualenv/tests/unit/create/via_global_ref/test_build_c_ext.py @@ -1,15 +1,13 @@ -from __future__ import absolute_import, unicode_literals - import os import shutil import subprocess +from pathlib import Path +from subprocess import Popen import pytest from virtualenv.discovery.py_info import PythonInfo from virtualenv.run import cli_run -from virtualenv.util.path import Path -from virtualenv.util.subprocess import Popen CURRENT = PythonInfo.current_system() CREATOR_CLASSES = CURRENT.creators().key_to_class @@ -27,7 +25,7 @@ def builtin_shows_marker_missing(): @pytest.mark.xfail( - condition=bool(os.environ.get(str("CI_RUN"))), + condition=bool(os.environ.get("CI_RUN")), strict=False, reason="did not manage to setup CI to run with VC 14.1 C++ compiler, but passes locally", ) @@ -35,7 +33,7 @@ def builtin_shows_marker_missing(): not Path(CURRENT.system_include).exists() and not builtin_shows_marker_missing(), reason="Building C-Extensions requires header files with host python", ) -@pytest.mark.parametrize("creator", list(i for i in CREATOR_CLASSES.keys() if i != "builtin")) +@pytest.mark.parametrize("creator", [i for i in CREATOR_CLASSES.keys() if i != "builtin"]) def test_can_build_c_extensions(creator, tmp_path, coverage_env): env, greet = tmp_path / "env", str(tmp_path / "greet") shutil.copytree(str(Path(__file__).parent.resolve() / "greet"), greet) diff --git a/vendor/virtualenv/tests/unit/discovery/py_info/test_py_info.py b/vendor/virtualenv/tests/unit/discovery/py_info/test_py_info.py index 429294c6..3a7d0207 100644 --- a/vendor/virtualenv/tests/unit/discovery/py_info/test_py_info.py +++ b/vendor/virtualenv/tests/unit/discovery/py_info/test_py_info.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import copy import itertools import json @@ -8,6 +6,7 @@ import sys import sysconfig from collections import namedtuple +from pathlib import Path from textwrap import dedent import pytest @@ -16,7 +15,6 @@ from virtualenv.discovery.py_info import PythonInfo, VersionInfo from virtualenv.discovery.py_spec import PythonSpec from virtualenv.info import IS_PYPY, fs_supports_symlink -from virtualenv.util.path import Path CURRENT = PythonInfo.current_system() @@ -57,8 +55,8 @@ def test_bad_exe_py_info_raise(tmp_path, session_app_data): "spec", itertools.chain( [sys.executable], - list( - "{}{}{}".format(impl, ".".join(str(i) for i in ver), arch) + [ + f"{impl}{'.'.join(str(i) for i in ver)}{arch}" for impl, ver, arch in itertools.product( ( [CURRENT.implementation] @@ -70,9 +68,9 @@ def test_bad_exe_py_info_raise(tmp_path, session_app_data): ) ), [sys.version_info[0 : i + 1] for i in range(3)], - ["", "-{}".format(CURRENT.architecture)], + ["", f"-{CURRENT.architecture}"], ) - ), + ], ), ) def test_satisfy_py_info(spec): @@ -83,7 +81,7 @@ def test_satisfy_py_info(spec): def test_satisfy_not_arch(): parsed_spec = PythonSpec.from_string_spec( - "{}-{}".format(CURRENT.implementation, 64 if CURRENT.architecture == 32 else 32), + f"{CURRENT.implementation}-{64 if CURRENT.architecture == 32 else 32}", ) matches = CURRENT.satisfies(parsed_spec, True) assert matches is False @@ -106,7 +104,7 @@ def _generate_not_match_current_interpreter_version(): @pytest.mark.parametrize("spec", _NON_MATCH_VER) def test_satisfy_not_version(spec): - parsed_spec = PythonSpec.from_string_spec("{}{}".format(CURRENT.implementation, spec)) + parsed_spec = PythonSpec.from_string_spec(f"{CURRENT.implementation}{spec}") matches = CURRENT.satisfies(parsed_spec, True) assert matches is False @@ -132,7 +130,7 @@ def test_py_info_cached_symlink_error(mocker, tmp_path, session_app_data): assert spy.call_count == 2 -def test_py_info_cache_clear(mocker, tmp_path, session_app_data): +def test_py_info_cache_clear(mocker, session_app_data): spy = mocker.spy(cached_py_info, "_run_subprocess") result = PythonInfo.from_exe(sys.executable, session_app_data) assert result is not None @@ -168,7 +166,7 @@ def test_py_info_cached_symlink(mocker, tmp_path, session_app_data): @pytest.mark.parametrize( - "target, position, discovered", + ("target", "position", "discovered"), [ ( PyInfoMock("CPython", 64, VersionInfo(3, 6, 8, "final", 0)), @@ -227,7 +225,7 @@ def _make_py_info(of): mocker.patch.object(target_py_info, "_find_possible_folders", return_value=[str(tmp_path)]) # noinspection PyUnusedLocal - def func(k, app_data, resolve_to_host, raise_on_error, env): + def func(k, app_data, resolve_to_host, raise_on_error, env): # noqa: U100 return discovered_with_path[k] mocker.patch.object(target_py_info, "from_exe", side_effect=func) @@ -251,31 +249,25 @@ def func(k, app_data, resolve_to_host, raise_on_error, env): def test_py_info_ignores_distutils_config(monkeypatch, tmp_path): - (tmp_path / "setup.cfg").write_text( - dedent( - """ - [install] - prefix={0}{1}prefix - install_purelib={0}{1}purelib - install_platlib={0}{1}platlib - install_headers={0}{1}headers - install_scripts={0}{1}scripts - install_data={0}{1}data - """.format( - tmp_path, - os.sep, - ), - ), - ) + raw = f""" + [install] + prefix={tmp_path}{os.sep}prefix + install_purelib={tmp_path}{os.sep}purelib + install_platlib={tmp_path}{os.sep}platlib + install_headers={tmp_path}{os.sep}headers + install_scripts={tmp_path}{os.sep}scripts + install_data={tmp_path}{os.sep}data + """ + (tmp_path / "setup.cfg").write_text(dedent(raw)) monkeypatch.chdir(tmp_path) py_info = PythonInfo.from_exe(sys.executable) distutils = py_info.distutils_install for key, value in distutils.items(): - assert not value.startswith(str(tmp_path)), "{}={}".format(key, value) + assert not value.startswith(str(tmp_path)), f"{key}={value}" def test_discover_exe_on_path_non_spec_name_match(mocker): - suffixed_name = "python{}.{}m".format(CURRENT.version_info.major, CURRENT.version_info.minor) + suffixed_name = f"python{CURRENT.version_info.major}.{CURRENT.version_info.minor}m" if sys.platform == "win32": suffixed_name += Path(CURRENT.original_executable).suffix spec = PythonSpec.from_string_spec(suffixed_name) @@ -284,14 +276,14 @@ def test_discover_exe_on_path_non_spec_name_match(mocker): def test_discover_exe_on_path_non_spec_name_not_match(mocker): - suffixed_name = "python{}.{}m".format(CURRENT.version_info.major, CURRENT.version_info.minor) + suffixed_name = f"python{CURRENT.version_info.major}.{CURRENT.version_info.minor}m" if sys.platform == "win32": suffixed_name += Path(CURRENT.original_executable).suffix spec = PythonSpec.from_string_spec(suffixed_name) mocker.patch.object( CURRENT, "original_executable", - str(Path(CURRENT.executable).parent / "e{}".format(suffixed_name)), + str(Path(CURRENT.executable).parent / f"e{suffixed_name}"), ) assert CURRENT.satisfies(spec, impl_must_match=True) is False @@ -304,14 +296,15 @@ def test_py_info_setuptools(): PythonInfo() -def test_py_info_to_system_raises(session_app_data, mocker, caplog, skip_if_test_in_system): +@pytest.mark.usefixtures("_skip_if_test_in_system") +def test_py_info_to_system_raises(session_app_data, mocker, caplog): caplog.set_level(logging.DEBUG) mocker.patch.object(PythonInfo, "_find_possible_folders", return_value=[]) result = PythonInfo.from_exe(sys.executable, app_data=session_app_data, raise_on_error=False) assert result is None log = caplog.records[-1] assert log.levelno == logging.INFO - expected = "ignore {} due cannot resolve system due to RuntimeError('failed to detect ".format(sys.executable) + expected = f"ignore {sys.executable} due cannot resolve system due to RuntimeError('failed to detect " assert expected in log.message @@ -346,7 +339,8 @@ def test_custom_venv_install_scheme_is_prefered(mocker): "venv": venv_scheme, } if getattr(sysconfig, "get_preferred_scheme", None): - sysconfig_install_schemes[sysconfig.get_preferred_scheme("prefix")] = default_scheme + # define the prefix as sysconfig.get_preferred_scheme did before 3.11 + sysconfig_install_schemes["nt" if os.name == "nt" else "posix_prefix"] = default_scheme if sys.version_info[0] == 2: sysconfig_install_schemes = _stringify_schemes_dict(sysconfig_install_schemes) @@ -378,6 +372,6 @@ def test_custom_venv_install_scheme_is_prefered(mocker): mocker.patch("sysconfig._INSTALL_SCHEMES", sysconfig_install_schemes) pyinfo = PythonInfo() - pyver = "{}.{}".format(pyinfo.version_info.major, pyinfo.version_info.minor) + pyver = f"{pyinfo.version_info.major}.{pyinfo.version_info.minor}" assert pyinfo.install_path("scripts") == "bin" - assert pyinfo.install_path("purelib").replace(os.sep, "/") == "lib/python{}/site-packages".format(pyver) + assert pyinfo.install_path("purelib").replace(os.sep, "/") == f"lib/python{pyver}/site-packages" diff --git a/vendor/virtualenv/tests/unit/discovery/py_info/test_py_info_exe_based_of.py b/vendor/virtualenv/tests/unit/discovery/py_info/test_py_info_exe_based_of.py index f232e7ac..ae75eca7 100644 --- a/vendor/virtualenv/tests/unit/discovery/py_info/test_py_info_exe_based_of.py +++ b/vendor/virtualenv/tests/unit/discovery/py_info/test_py_info_exe_based_of.py @@ -1,18 +1,16 @@ -from __future__ import absolute_import, unicode_literals - import logging import os +from pathlib import Path import pytest from virtualenv.discovery.py_info import EXTENSIONS, PythonInfo from virtualenv.info import IS_WIN, fs_is_case_sensitive, fs_supports_symlink -from virtualenv.util.path import Path CURRENT = PythonInfo.current() -def test_discover_empty_folder(tmp_path, monkeypatch, session_app_data): +def test_discover_empty_folder(tmp_path, session_app_data): with pytest.raises(RuntimeError): CURRENT.discover_exe(session_app_data, prefix=str(tmp_path)) @@ -26,13 +24,13 @@ def test_discover_empty_folder(tmp_path, monkeypatch, session_app_data): @pytest.mark.parametrize("arch", [CURRENT.architecture, ""]) @pytest.mark.parametrize("version", [".".join(str(i) for i in CURRENT.version_info[0:i]) for i in range(3, 0, -1)]) @pytest.mark.parametrize("impl", [CURRENT.implementation, "python"]) -def test_discover_ok(tmp_path, monkeypatch, suffix, impl, version, arch, into, caplog, session_app_data): +def test_discover_ok(tmp_path, suffix, impl, version, arch, into, caplog, session_app_data): caplog.set_level(logging.DEBUG) folder = tmp_path / into folder.mkdir(parents=True, exist_ok=True) - name = "{}{}".format(impl, version) + name = f"{impl}{version}" if arch: - name += "-{}".format(arch) + name += f"-{arch}" name += suffix dest = folder / name os.symlink(CURRENT.executable, str(dest)) diff --git a/vendor/virtualenv/tests/unit/discovery/test_discovery.py b/vendor/virtualenv/tests/unit/discovery/test_discovery.py index c04caea2..458bee68 100644 --- a/vendor/virtualenv/tests/unit/discovery/test_discovery.py +++ b/vendor/virtualenv/tests/unit/discovery/test_discovery.py @@ -1,9 +1,8 @@ -from __future__ import absolute_import, unicode_literals - import logging import os import sys from argparse import Namespace +from pathlib import Path from uuid import uuid4 import pytest @@ -11,8 +10,6 @@ from virtualenv.discovery.builtin import Builtin, get_interpreter from virtualenv.discovery.py_info import PythonInfo from virtualenv.info import fs_supports_symlink -from virtualenv.util.path import Path -from virtualenv.util.six import ensure_text @pytest.mark.skipif(not fs_supports_symlink(), reason="symlink not supported") @@ -20,34 +17,34 @@ def test_discovery_via_path(monkeypatch, case, tmp_path, caplog, session_app_data): caplog.set_level(logging.DEBUG) current = PythonInfo.current_system(session_app_data) - core = "somethingVeryCryptic{}".format(".".join(str(i) for i in current.version_info[0:3])) + core = f"somethingVeryCryptic{'.'.join(str(i) for i in current.version_info[0:3])}" name = "somethingVeryCryptic" if case == "lower": name = name.lower() elif case == "upper": name = name.upper() - exe_name = "{}{}{}".format(name, current.version_info.major, ".exe" if sys.platform == "win32" else "") + exe_name = f"{name}{current.version_info.major}{'.exe' if sys.platform == 'win32' else ''}" target = tmp_path / current.install_path("scripts") target.mkdir(parents=True) executable = target / exe_name - os.symlink(sys.executable, ensure_text(str(executable))) + os.symlink(sys.executable, str(executable)) pyvenv_cfg = Path(sys.executable).parents[1] / "pyvenv.cfg" if pyvenv_cfg.exists(): (target / pyvenv_cfg.name).write_bytes(pyvenv_cfg.read_bytes()) - new_path = os.pathsep.join([str(target)] + os.environ.get(str("PATH"), str("")).split(os.pathsep)) - monkeypatch.setenv(str("PATH"), new_path) + new_path = os.pathsep.join([str(target)] + os.environ.get("PATH", "").split(os.pathsep)) + monkeypatch.setenv("PATH", new_path) interpreter = get_interpreter(core, []) assert interpreter is not None def test_discovery_via_path_not_found(tmp_path, monkeypatch): - monkeypatch.setenv(str("PATH"), str(tmp_path)) + monkeypatch.setenv("PATH", str(tmp_path)) interpreter = get_interpreter(uuid4().hex, []) assert interpreter is None -def test_relative_path(tmp_path, session_app_data, monkeypatch): +def test_relative_path(session_app_data, monkeypatch): sys_executable = Path(PythonInfo.current_system(app_data=session_app_data).system_executable) cwd = sys_executable.parents[1] monkeypatch.chdir(str(cwd)) diff --git a/vendor/virtualenv/tests/unit/discovery/test_py_spec.py b/vendor/virtualenv/tests/unit/discovery/test_py_spec.py index 52154f14..ba1b00ee 100644 --- a/vendor/virtualenv/tests/unit/discovery/test_py_spec.py +++ b/vendor/virtualenv/tests/unit/discovery/test_py_spec.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import itertools import sys from copy import copy @@ -16,8 +14,8 @@ def test_bad_py_spec(): assert spec.str_spec == text assert spec.path == text content = vars(spec) - del content[str("str_spec")] - del content[str("path")] + del content["str_spec"] + del content["path"] assert all(v is None for v in content.values()) @@ -47,7 +45,7 @@ def test_spec_satisfies_arch(): @pytest.mark.parametrize( - "req, spec", + ("req", "spec"), list(itertools.combinations(["py", "CPython", "python"], 2)) + [("jython", "jython")] + [("CPython", "cpython")], ) def test_spec_satisfies_implementation_ok(req, spec): @@ -77,10 +75,10 @@ def _version_satisfies_pairs(): return sorted(target) -@pytest.mark.parametrize("req, spec", _version_satisfies_pairs()) +@pytest.mark.parametrize(("req", "spec"), _version_satisfies_pairs()) def test_version_satisfies_ok(req, spec): - req_spec = PythonSpec.from_string_spec("python{}".format(req)) - sat_spec = PythonSpec.from_string_spec("python{}".format(spec)) + req_spec = PythonSpec.from_string_spec(f"python{req}") + sat_spec = PythonSpec.from_string_spec(f"python{spec}") assert sat_spec.satisfies(req_spec) is True @@ -102,10 +100,10 @@ def _version_not_satisfies_pairs(): return sorted(target) -@pytest.mark.parametrize("req, spec", _version_not_satisfies_pairs()) +@pytest.mark.parametrize(("req", "spec"), _version_not_satisfies_pairs()) def test_version_satisfies_nok(req, spec): - req_spec = PythonSpec.from_string_spec("python{}".format(req)) - sat_spec = PythonSpec.from_string_spec("python{}".format(spec)) + req_spec = PythonSpec.from_string_spec(f"python{req}") + sat_spec = PythonSpec.from_string_spec(f"python{spec}") assert sat_spec.satisfies(req_spec) is False diff --git a/vendor/virtualenv/tests/unit/discovery/windows/test_windows_pep514.py b/vendor/virtualenv/tests/unit/discovery/windows/test_windows_pep514.py index 70c85093..3d1ca005 100644 --- a/vendor/virtualenv/tests/unit/discovery/windows/test_windows_pep514.py +++ b/vendor/virtualenv/tests/unit/discovery/windows/test_windows_pep514.py @@ -1,18 +1,15 @@ -from __future__ import absolute_import, unicode_literals - import sys import textwrap from collections import defaultdict from contextlib import contextmanager +from pathlib import Path import pytest -import six - -from virtualenv.util.path import Path @pytest.mark.skipif(sys.platform != "win32", reason="no Windows registry") -def test_pep514(_mock_registry): +@pytest.mark.usefixtures("_mock_registry") +def test_pep514(): from virtualenv.discovery.windows.pep514 import discover_pythons interpreters = list(discover_pythons()) @@ -31,7 +28,8 @@ def test_pep514(_mock_registry): @pytest.mark.skipif(sys.platform != "win32", reason="no Windows registry") -def test_pep514_run(_mock_registry, capsys, caplog): +@pytest.mark.usefixtures("_mock_registry") +def test_pep514_run(capsys, caplog): from virtualenv.discovery.windows import pep514 pep514._run() @@ -54,14 +52,14 @@ def test_pep514_run(_mock_registry, capsys, caplog): assert not err prefix = "PEP-514 violation in Windows Registry at " expected_logs = [ - "{}HKEY_CURRENT_USER/PythonCore/3.1/SysArchitecture error: invalid format magic".format(prefix), - "{}HKEY_CURRENT_USER/PythonCore/3.2/SysArchitecture error: arch is not string: 100".format(prefix), - "{}HKEY_CURRENT_USER/PythonCore/3.3 error: no ExecutablePath or default for it".format(prefix), - "{}HKEY_CURRENT_USER/PythonCore/3.3 error: could not load exe with value None".format(prefix), - "{}HKEY_CURRENT_USER/PythonCore/3.8/InstallPath error: missing".format(prefix), - "{}HKEY_CURRENT_USER/PythonCore/3.9/SysVersion error: invalid format magic".format(prefix), - "{}HKEY_CURRENT_USER/PythonCore/3.X/SysVersion error: version is not string: 2778".format(prefix), - "{}HKEY_CURRENT_USER/PythonCore/3.X error: invalid format 3.X".format(prefix), + f"{prefix}HKEY_CURRENT_USER/PythonCore/3.1/SysArchitecture error: invalid format magic", + f"{prefix}HKEY_CURRENT_USER/PythonCore/3.2/SysArchitecture error: arch is not string: 100", + f"{prefix}HKEY_CURRENT_USER/PythonCore/3.3 error: no ExecutablePath or default for it", + f"{prefix}HKEY_CURRENT_USER/PythonCore/3.3 error: could not load exe with value None", + f"{prefix}HKEY_CURRENT_USER/PythonCore/3.8/InstallPath error: missing", + f"{prefix}HKEY_CURRENT_USER/PythonCore/3.9/SysVersion error: invalid format magic", + f"{prefix}HKEY_CURRENT_USER/PythonCore/3.X/SysVersion error: version is not string: 2778", + f"{prefix}HKEY_CURRENT_USER/PythonCore/3.X error: invalid format 3.X", ] assert caplog.messages == expected_logs @@ -72,7 +70,7 @@ def _mock_registry(mocker): loc, glob = {}, {} mock_value_str = (Path(__file__).parent / "winreg-mock-values.py").read_text() - six.exec_(mock_value_str, glob, loc) + exec(mock_value_str, glob, loc) enum_collect = loc["enum_collect"] value_collect = loc["value_collect"] key_open = loc["key_open"] @@ -96,14 +94,14 @@ def _v(key, value_name): mocker.patch.object(winreg, "QueryValueEx", side_effect=_v) - class Key(object): + class Key: def __init__(self, value): self.value = value def __enter__(self): return self - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb): # noqa: U100 return None @contextmanager @@ -127,12 +125,9 @@ def _o(*args): @pytest.fixture() def _collect_winreg_access(mocker): - if six.PY3: - # noinspection PyUnresolvedReferences - from winreg import EnumKey, OpenKeyEx, QueryValueEx - else: - # noinspection PyUnresolvedReferences - from _winreg import EnumKey, OpenKeyEx, QueryValueEx + # noinspection PyUnresolvedReferences + from winreg import EnumKey, OpenKeyEx, QueryValueEx + from virtualenv.discovery.windows.pep514 import winreg hive_open = {} @@ -190,7 +185,7 @@ def _v(key, value_name): yield print("") - print("hive_open = {}".format(hive_open)) - print("key_open = {}".format(dict(key_open.items()))) - print("value_collect = {}".format(dict(value_collect.items()))) - print("enum_collect = {}".format(dict(enum_collect.items()))) + print(f"hive_open = {hive_open}") + print(f"key_open = {dict(key_open.items())}") + print(f"value_collect = {dict(value_collect.items())}") + print(f"enum_collect = {dict(enum_collect.items())}") diff --git a/vendor/virtualenv/tests/unit/discovery/windows/winreg-mock-values.py b/vendor/virtualenv/tests/unit/discovery/windows/winreg-mock-values.py index 4e505496..2ac362fa 100644 --- a/vendor/virtualenv/tests/unit/discovery/windows/winreg-mock-values.py +++ b/vendor/virtualenv/tests/unit/discovery/windows/winreg-mock-values.py @@ -1,10 +1,4 @@ -import six - -if six.PY3: - import winreg -else: - # noinspection PyUnresolvedReferences - import _winreg as winreg +import winreg hive_open = { (winreg.HKEY_CURRENT_USER, "Software\\Python", 0, winreg.KEY_READ): 78701856, diff --git a/vendor/virtualenv/tests/unit/seed/embed/test_base_embed.py b/vendor/virtualenv/tests/unit/seed/embed/test_base_embed.py index 01c70ea5..3344c743 100644 --- a/vendor/virtualenv/tests/unit/seed/embed/test_base_embed.py +++ b/vendor/virtualenv/tests/unit/seed/embed/test_base_embed.py @@ -4,7 +4,7 @@ @pytest.mark.parametrize( - "args, download", + ("args", "download"), [([], False), (["--no-download"], False), (["--never-download"], False), (["--download"], True)], ) def test_download_cli_flag(args, download, tmp_path): diff --git a/vendor/virtualenv/tests/unit/seed/embed/test_bootstrap_link_via_app_data.py b/vendor/virtualenv/tests/unit/seed/embed/test_bootstrap_link_via_app_data.py index fdbd4d6b..2c8c3e86 100644 --- a/vendor/virtualenv/tests/unit/seed/embed/test_bootstrap_link_via_app_data.py +++ b/vendor/virtualenv/tests/unit/seed/embed/test_bootstrap_link_via_app_data.py @@ -1,10 +1,8 @@ -from __future__ import absolute_import, unicode_literals - import contextlib import os -import subprocess import sys from stat import S_IWGRP, S_IWOTH, S_IWUSR +from subprocess import Popen, check_call from threading import Thread import pytest @@ -15,22 +13,20 @@ from virtualenv.run import cli_run from virtualenv.seed.wheels.embed import BUNDLE_FOLDER, BUNDLE_SUPPORT from virtualenv.util.path import safe_delete -from virtualenv.util.six import ensure_text -from virtualenv.util.subprocess import Popen -@pytest.mark.slow +@pytest.mark.slow() @pytest.mark.parametrize("copies", [False, True] if fs_supports_symlink() else [True]) def test_seed_link_via_app_data(tmp_path, coverage_env, current_fastest, copies): current = PythonInfo.current_system() bundle_ver = BUNDLE_SUPPORT[current.version_release_str] create_cmd = [ - ensure_text(str(tmp_path / "en v")), # space in the name to ensure generated scripts work when path has space + str(tmp_path / "en v"), # space in the name to ensure generated scripts work when path has space "--no-periodic-update", "--seeder", "app-data", "--extra-search-dir", - ensure_text(str(BUNDLE_FOLDER)), + str(BUNDLE_FOLDER), "--download", "--pip", bundle_ver["pip"].split("-")[1], @@ -56,16 +52,16 @@ def test_seed_link_via_app_data(tmp_path, coverage_env, current_fastest, copies) assert pip in files_post_first_create assert setuptools in files_post_first_create for pip_exe in [ - result.creator.script_dir / "pip{}{}".format(suffix, result.creator.exe.suffix) + result.creator.script_dir / f"pip{suffix}{result.creator.exe.suffix}" for suffix in ( "", - "{}".format(current.version_info.major), - "{}.{}".format(current.version_info.major, current.version_info.minor), - "-{}.{}".format(current.version_info.major, current.version_info.minor), + f"{current.version_info.major}", + f"{current.version_info.major}.{current.version_info.minor}", + f"-{current.version_info.major}.{current.version_info.minor}", ) ]: assert pip_exe.exists() - process = Popen([ensure_text(str(pip_exe)), "--version", "--disable-pip-version-check"]) + process = Popen([str(pip_exe), "--version", "--disable-pip-version-check"]) _, __ = process.communicate() assert not process.returncode @@ -87,7 +83,7 @@ def test_seed_link_via_app_data(tmp_path, coverage_env, current_fastest, copies) assert setuptools not in files_post_first_uninstall # install a different setuptools to test that virtualenv removes this before installing new - version = "setuptools<{}".format(bundle_ver["setuptools"].split("-")[1]) + version = f"setuptools<{bundle_ver['setuptools'].split('-')[1]}" install_cmd = [str(result.creator.script("pip")), "--verbose", "--disable-pip-version-check", "install", version] process = Popen(install_cmd) process.communicate() @@ -112,13 +108,13 @@ def test_seed_link_via_app_data(tmp_path, coverage_env, current_fastest, copies) # pip is greedy here, removing all packages removes the site-package too if site_package.exists(): purelib = result.creator.purelib - patch_files = {purelib / "{}.{}".format("_virtualenv", i) for i in ("py", "pyc", "pth")} + patch_files = {purelib / f"{'_virtualenv'}.{i}" for i in ("py", "pyc", "pth")} patch_files.add(purelib / "__pycache__") post_run = set(site_package.iterdir()) - patch_files assert not post_run, "\n".join(str(i) for i in post_run) - if sys.version_info[0:2] == (3, 4) and os.environ.get(str("PIP_REQ_TRACKER")): - os.environ.pop(str("PIP_REQ_TRACKER")) + if sys.version_info[0:2] == (3, 4) and os.environ.get("PIP_REQ_TRACKER"): + os.environ.pop("PIP_REQ_TRACKER") @contextlib.contextmanager @@ -147,14 +143,15 @@ def read_only_app_data(temp_app_data): @pytest.mark.skipif(sys.platform == "win32", reason="Windows only applies R/O to files") -def test_base_bootstrap_link_via_app_data_not_writable(tmp_path, current_fastest, read_only_app_data, monkeypatch): +@pytest.mark.usefixtures("read_only_app_data") +def test_base_bootstrap_link_via_app_data_not_writable(tmp_path, current_fastest): dest = tmp_path / "venv" result = cli_run(["--seeder", "app-data", "--creator", current_fastest, "-vv", str(dest)]) assert result @pytest.mark.skipif(sys.platform == "win32", reason="Windows only applies R/O to files") -def test_populated_read_only_cache_and_symlinked_app_data(tmp_path, current_fastest, temp_app_data, monkeypatch): +def test_populated_read_only_cache_and_symlinked_app_data(tmp_path, current_fastest, temp_app_data): dest = tmp_path / "venv" cmd = [ "--seeder", @@ -167,7 +164,7 @@ def test_populated_read_only_cache_and_symlinked_app_data(tmp_path, current_fast ] assert cli_run(cmd) - subprocess.check_call((str(dest.joinpath("bin/python")), "-c", "import pip")) + check_call((str(dest.joinpath("bin/python")), "-c", "import pip")) cached_py_info._CACHE.clear() # necessary to re-trigger py info discovery safe_delete(dest) @@ -175,11 +172,11 @@ def test_populated_read_only_cache_and_symlinked_app_data(tmp_path, current_fast # should succeed with special flag when read-only with read_only_dir(temp_app_data): assert cli_run(["--read-only-app-data"] + cmd) - subprocess.check_call((str(dest.joinpath("bin/python")), "-c", "import pip")) + check_call((str(dest.joinpath("bin/python")), "-c", "import pip")) @pytest.mark.skipif(sys.platform == "win32", reason="Windows only applies R/O to files") -def test_populated_read_only_cache_and_copied_app_data(tmp_path, current_fastest, temp_app_data, monkeypatch): +def test_populated_read_only_cache_and_copied_app_data(tmp_path, current_fastest, temp_app_data): dest = tmp_path / "venv" cmd = [ "--seeder", @@ -202,22 +199,25 @@ def test_populated_read_only_cache_and_copied_app_data(tmp_path, current_fastest assert cli_run(["--read-only-app-data"] + cmd) -@pytest.mark.slow +@pytest.mark.slow() @pytest.mark.parametrize("pkg", ["pip", "setuptools", "wheel"]) -def test_base_bootstrap_link_via_app_data_no(tmp_path, coverage_env, current_fastest, session_app_data, pkg): - create_cmd = [str(tmp_path), "--seeder", "app-data", "--no-{}".format(pkg)] +@pytest.mark.usefixtures("session_app_data", "current_fastest", "coverage_env") +def test_base_bootstrap_link_via_app_data_no(tmp_path, pkg): + create_cmd = [str(tmp_path), "--seeder", "app-data", f"--no-{pkg}"] result = cli_run(create_cmd) assert not (result.creator.purelib / pkg).exists() for key in {"pip", "setuptools", "wheel"} - {pkg}: assert (result.creator.purelib / key).exists() -def test_app_data_parallel_ok(tmp_path, temp_app_data): +@pytest.mark.usefixtures("temp_app_data") +def test_app_data_parallel_ok(tmp_path): exceptions = _run_parallel_threads(tmp_path) assert not exceptions, "\n".join(exceptions) -def test_app_data_parallel_fail(tmp_path, temp_app_data, mocker): +@pytest.mark.usefixtures("temp_app_data") +def test_app_data_parallel_fail(tmp_path, mocker): mocker.patch("virtualenv.seed.embed.via_app_data.pip_install.base.PipInstall.build_image", side_effect=RuntimeError) exceptions = _run_parallel_threads(tmp_path) assert len(exceptions) == 2 @@ -232,11 +232,11 @@ def _run_parallel_threads(tmp_path): def _run(name): try: cli_run(["--seeder", "app-data", str(tmp_path / name), "--no-pip", "--no-setuptools"]) - except Exception as exception: # noqa + except Exception as exception: as_str = str(exception) exceptions.append(as_str) - threads = [Thread(target=_run, args=("env{}".format(i),)) for i in range(1, 3)] + threads = [Thread(target=_run, args=(f"env{i}",)) for i in range(1, 3)] for thread in threads: thread.start() for thread in threads: diff --git a/vendor/virtualenv/tests/unit/seed/embed/test_pip_invoke.py b/vendor/virtualenv/tests/unit/seed/embed/test_pip_invoke.py index f2e7a33e..f045886b 100644 --- a/vendor/virtualenv/tests/unit/seed/embed/test_pip_invoke.py +++ b/vendor/virtualenv/tests/unit/seed/embed/test_pip_invoke.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import itertools import sys from shutil import copy2 @@ -12,17 +10,17 @@ from virtualenv.seed.wheels.embed import BUNDLE_FOLDER, BUNDLE_SUPPORT -@pytest.mark.slow +@pytest.mark.slow() @pytest.mark.parametrize("no", ["pip", "setuptools", "wheel", ""]) def test_base_bootstrap_via_pip_invoke(tmp_path, coverage_env, mocker, current_fastest, no): extra_search_dir = tmp_path / "extra" extra_search_dir.mkdir() - for_py_version = "{}.{}".format(*sys.version_info[0:2]) + for_py_version = f"{sys.version_info.major}.{sys.version_info.minor}" new = BUNDLE_SUPPORT[for_py_version] for wheel_filename in BUNDLE_SUPPORT[for_py_version].values(): copy2(str(BUNDLE_FOLDER / wheel_filename), str(extra_search_dir)) - def _load_embed_wheel(app_data, distribution, for_py_version, version): + def _load_embed_wheel(app_data, distribution, for_py_version, version): # noqa: U100 return load_embed_wheel(app_data, distribution, old_ver, version) old_ver = "2.7" @@ -66,9 +64,9 @@ def _execute(cmd, env): str(tmp_path / "app-data"), ] for dist, version in versions.items(): - create_cmd.extend(["--{}".format(dist), version]) + create_cmd.extend([f"--{dist}", version]) if no: - create_cmd.append("--no-{}".format(no)) + create_cmd.append(f"--no-{no}") result = cli_run(create_cmd) coverage_env() diff --git a/vendor/virtualenv/tests/unit/seed/wheels/test_acquire.py b/vendor/virtualenv/tests/unit/seed/wheels/test_acquire.py index dcce0f6a..141d91d3 100644 --- a/vendor/virtualenv/tests/unit/seed/wheels/test_acquire.py +++ b/vendor/virtualenv/tests/unit/seed/wheels/test_acquire.py @@ -1,23 +1,20 @@ -from __future__ import absolute_import, unicode_literals - import os import sys from datetime import datetime +from pathlib import Path from subprocess import CalledProcessError import pytest from virtualenv.app_data import AppDataDiskFolder -from virtualenv.info import IS_PYPY, PY2 from virtualenv.seed.wheels.acquire import download_wheel, get_wheel, pip_wheel_env_run from virtualenv.seed.wheels.embed import BUNDLE_FOLDER, get_embed_wheel from virtualenv.seed.wheels.periodic_update import dump_datetime from virtualenv.seed.wheels.util import Wheel, discover_wheels -from virtualenv.util.path import Path @pytest.fixture(autouse=True) -def fake_release_date(mocker): +def _fake_release_date(mocker): mocker.patch("virtualenv.seed.wheels.periodic_update.release_date_for_wheel_path", return_value=None) @@ -41,7 +38,7 @@ def test_download_wheel_bad_output(mocker, for_py_version, session_app_data): as_path.iterdir.return_value = [i.path for i in available] result = download_wheel( - distribution, "=={}".format(embed.version), for_py_version, [], session_app_data, as_path, os.environ + distribution, f"=={embed.version}", for_py_version, [], session_app_data, as_path, os.environ ) assert result.path == embed.path @@ -56,11 +53,8 @@ def test_download_fails(mocker, for_py_version, session_app_data): with pytest.raises(CalledProcessError) as context: download_wheel("pip", "==1", for_py_version, [], session_app_data, as_path, os.environ), exc = context.value - if sys.version_info < (3, 5): - assert exc.output == "outerr" - else: - assert exc.output == "out" - assert exc.stderr == "err" + assert exc.output == "out" + assert exc.stderr == "err" assert exc.returncode == 1 assert [ sys.executable, @@ -80,10 +74,10 @@ def test_download_fails(mocker, for_py_version, session_app_data): ] == exc.cmd -@pytest.fixture +@pytest.fixture() def downloaded_wheel(mocker): wheel = Wheel.from_path(Path("setuptools-0.0.0-py2.py3-none-any.whl")) - yield wheel, mocker.patch("virtualenv.seed.wheels.acquire.download_wheel", return_value=wheel) + return wheel, mocker.patch("virtualenv.seed.wheels.acquire.download_wheel", return_value=wheel) @pytest.mark.parametrize("version", ["bundle", "0.0.0"]) @@ -111,8 +105,8 @@ def test_get_wheel_download_not_called(mocker, for_py_version, session_app_data, assert write.call_count == 0 -@pytest.mark.skipif(IS_PYPY and PY2, reason="mocker.spy failing on PyPy 2.x") -def test_get_wheel_download_cached(tmp_path, freezer, mocker, for_py_version, downloaded_wheel): +@pytest.mark.usefixtures("freezer") +def test_get_wheel_download_cached(tmp_path, mocker, for_py_version, downloaded_wheel): from virtualenv.app_data.via_disk_folder import JSONStoreDisk app_data = AppDataDiskFolder(folder=str(tmp_path)) diff --git a/vendor/virtualenv/tests/unit/seed/wheels/test_acquire_find_wheel.py b/vendor/virtualenv/tests/unit/seed/wheels/test_acquire_find_wheel.py index 18c46927..a4a28de8 100644 --- a/vendor/virtualenv/tests/unit/seed/wheels/test_acquire_find_wheel.py +++ b/vendor/virtualenv/tests/unit/seed/wheels/test_acquire_find_wheel.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import pytest from virtualenv.seed.wheels.acquire import find_compatible_in_house @@ -14,17 +12,17 @@ def test_find_latest(for_py_version): def test_find_exact(for_py_version): expected = get_embed_wheel("setuptools", for_py_version) - result = find_compatible_in_house("setuptools", "=={}".format(expected.version), for_py_version, BUNDLE_FOLDER) + result = find_compatible_in_house("setuptools", f"=={expected.version}", for_py_version, BUNDLE_FOLDER) assert result.path == expected.path -def test_find_less_than(for_py_version): +def test_find_less_than(): latest = get_embed_wheel("setuptools", MAX) - result = find_compatible_in_house("setuptools", "<{}".format(latest.version), MAX, BUNDLE_FOLDER) + result = find_compatible_in_house("setuptools", f"<{latest.version}", MAX, BUNDLE_FOLDER) assert result is not None assert result.path != latest.path -def test_find_bad_spec(for_py_version): +def test_find_bad_spec(): with pytest.raises(ValueError, match="bad"): find_compatible_in_house("setuptools", "bad", MAX, BUNDLE_FOLDER) diff --git a/vendor/virtualenv/tests/unit/seed/wheels/test_bundle.py b/vendor/virtualenv/tests/unit/seed/wheels/test_bundle.py index 767a2b49..81fa54b0 100644 --- a/vendor/virtualenv/tests/unit/seed/wheels/test_bundle.py +++ b/vendor/virtualenv/tests/unit/seed/wheels/test_bundle.py @@ -1,7 +1,6 @@ -from __future__ import absolute_import, unicode_literals - import os from datetime import datetime +from pathlib import Path import pytest @@ -10,7 +9,6 @@ from virtualenv.seed.wheels.embed import get_embed_wheel from virtualenv.seed.wheels.periodic_update import dump_datetime from virtualenv.seed.wheels.util import Version, Wheel -from virtualenv.util.path import Path @pytest.fixture(scope="module") @@ -42,7 +40,7 @@ def app_data(tmp_path_factory, for_py_version, next_pip_wheel): ], } ) - yield app_data_ + return app_data_ def test_version_embed(app_data, for_py_version): diff --git a/vendor/virtualenv/tests/unit/seed/wheels/test_periodic_update.py b/vendor/virtualenv/tests/unit/seed/wheels/test_periodic_update.py index 7adb3593..bc9635f7 100644 --- a/vendor/virtualenv/tests/unit/seed/wheels/test_periodic_update.py +++ b/vendor/virtualenv/tests/unit/seed/wheels/test_periodic_update.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import json import os import subprocess @@ -7,12 +5,13 @@ from collections import defaultdict from contextlib import contextmanager from datetime import datetime, timedelta +from io import StringIO +from itertools import zip_longest +from pathlib import Path from textwrap import dedent +from urllib.error import URLError import pytest -from six import StringIO -from six.moves import zip_longest -from six.moves.urllib.error import URLError from virtualenv import cli_run from virtualenv.app_data import AppDataDiskFolder @@ -29,12 +28,11 @@ release_date_for_wheel_path, trigger_update, ) -from virtualenv.util.path import Path from virtualenv.util.subprocess import CREATE_NO_WINDOW @pytest.fixture(autouse=True) -def clear_pypi_info_cache(): +def _clear_pypi_info_cache(): from virtualenv.seed.wheels.periodic_update import _PYPI_CACHE _PYPI_CACHE.clear() @@ -44,7 +42,7 @@ def test_manual_upgrade(session_app_data, caplog, mocker, for_py_version): wheel = get_embed_wheel("pip", for_py_version) new_version = NewVersion(wheel.path, datetime.now(), datetime.now() - timedelta(days=20), "manual") - def _do_update(distribution, for_py_version, embed_filename, app_data, search_dirs, periodic): # noqa + def _do_update(distribution, for_py_version, embed_filename, app_data, search_dirs, periodic): # noqa: U100 if distribution == "pip": return [new_version] return [] @@ -61,12 +59,13 @@ def _do_update(distribution, for_py_version, embed_filename, app_data, search_di for i in do_update_mock.call_args_list: packages[i[1]["distribution"]].append(i[1]["for_py_version"]) packages = {key: sorted(value) for key, value in packages.items()} - versions = list(sorted(BUNDLE_SUPPORT.keys())) + versions = sorted(BUNDLE_SUPPORT.keys()) expected = {"setuptools": versions, "wheel": versions, "pip": versions} assert packages == expected -def test_pick_periodic_update(tmp_path, session_app_data, mocker, for_py_version): +@pytest.mark.usefixtures("session_app_data") +def test_pick_periodic_update(tmp_path, mocker, for_py_version): embed, current = get_embed_wheel("setuptools", "3.5"), get_embed_wheel("setuptools", for_py_version) mocker.patch("virtualenv.seed.wheels.bundle.load_embed_wheel", return_value=embed) completed = datetime.now() - timedelta(days=29) @@ -81,8 +80,8 @@ def test_pick_periodic_update(tmp_path, session_app_data, mocker, for_py_version result = cli_run([str(tmp_path), "--activators", "", "--no-periodic-update", "--no-wheel", "--no-pip"]) assert read_dict.call_count == 1 - installed = list(i.name for i in result.creator.purelib.iterdir() if i.suffix == ".dist-info") - assert "setuptools-{}.dist-info".format(current.version) in installed + installed = [i.name for i in result.creator.purelib.iterdir() if i.suffix == ".dist-info"] + assert f"setuptools-{current.version}.dist-info" in installed def test_periodic_update_stops_at_current(mocker, session_app_data, for_py_version): @@ -251,11 +250,11 @@ def test_periodic_update_trigger(u_log, mocker, for_py_version, session_app_data def test_trigger_update_no_debug(for_py_version, session_app_data, tmp_path, mocker, monkeypatch): - monkeypatch.delenv(str("_VIRTUALENV_PERIODIC_UPDATE_INLINE"), raising=False) + monkeypatch.delenv("_VIRTUALENV_PERIODIC_UPDATE_INLINE", raising=False) current = get_embed_wheel("setuptools", for_py_version) process = mocker.MagicMock() process.communicate.return_value = None, None - Popen = mocker.patch("virtualenv.seed.wheels.periodic_update.Popen", return_value=process) + Popen = mocker.patch("virtualenv.seed.wheels.periodic_update.Popen", return_value=process) # noqa: N806 trigger_update( "setuptools", for_py_version, current, [tmp_path / "a", tmp_path / "b"], session_app_data, os.environ, True @@ -292,12 +291,12 @@ def test_trigger_update_no_debug(for_py_version, session_app_data, tmp_path, moc def test_trigger_update_debug(for_py_version, session_app_data, tmp_path, mocker, monkeypatch): - monkeypatch.setenv(str("_VIRTUALENV_PERIODIC_UPDATE_INLINE"), str("1")) + monkeypatch.setenv("_VIRTUALENV_PERIODIC_UPDATE_INLINE", "1") current = get_embed_wheel("pip", for_py_version) process = mocker.MagicMock() process.communicate.return_value = None, None - Popen = mocker.patch("virtualenv.seed.wheels.periodic_update.Popen", return_value=process) + Popen = mocker.patch("virtualenv.seed.wheels.periodic_update.Popen", return_value=process) # noqa: N806 trigger_update( "pip", for_py_version, current, [tmp_path / "a", tmp_path / "b"], session_app_data, os.environ, False @@ -346,7 +345,9 @@ def test_do_update_first(tmp_path, mocker, freezer): ] download_wheels = (Wheel(Path(i[0])) for i in pip_version_remote) - def _download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env): + def _download_wheel( + distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env # noqa: U100 + ): assert distribution == "pip" assert for_py_version == "3.9" assert [str(i) for i in search_dirs] == [str(extra)] @@ -407,7 +408,9 @@ def test_do_update_skip_already_done(tmp_path, mocker, freezer): extra = tmp_path / "extra" extra.mkdir() - def _download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env): # noqa + def _download_wheel( + distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env # noqa: U100 + ): return wheel.path download_wheel = mocker.patch("virtualenv.seed.wheels.acquire.download_wheel", side_effect=_download_wheel) @@ -497,7 +500,9 @@ def download(): yield Wheel(Path(path)) do = download() - return mocker.patch("virtualenv.seed.wheels.acquire.download_wheel", side_effect=lambda *a, **k: next(do)) + return mocker.patch( + "virtualenv.seed.wheels.acquire.download_wheel", side_effect=lambda *a, **k: next(do) # noqa: U100 + ) def test_download_stop_with_embed(tmp_path, mocker, freezer): @@ -590,7 +595,7 @@ def test_download_periodic_stop_at_first_usable(tmp_path, mocker, freezer): rel_date_gen = iter(rel_date_remote) release_date = mocker.patch( "virtualenv.seed.wheels.periodic_update.release_date_for_wheel_path", - side_effect=lambda *a, **k: next(rel_date_gen), + side_effect=lambda *a, **k: next(rel_date_gen), # noqa: U100 ) last_update = _UP_NOW - timedelta(days=14) @@ -622,7 +627,7 @@ def test_download_periodic_stop_at_first_usable_with_previous_minor(tmp_path, mo rel_date_gen = iter(rel_date_remote) release_date = mocker.patch( "virtualenv.seed.wheels.periodic_update.release_date_for_wheel_path", - side_effect=lambda *a, **k: next(rel_date_gen), + side_effect=lambda *a, **k: next(rel_date_gen), # noqa: U100 ) last_update = _UP_NOW - timedelta(days=14) diff --git a/vendor/virtualenv/tests/unit/seed/wheels/test_wheels_util.py b/vendor/virtualenv/tests/unit/seed/wheels/test_wheels_util.py index e487797b..7c3c4a91 100644 --- a/vendor/virtualenv/tests/unit/seed/wheels/test_wheels_util.py +++ b/vendor/virtualenv/tests/unit/seed/wheels/test_wheels_util.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import pytest from virtualenv.seed.wheels.embed import MAX, get_embed_wheel @@ -10,7 +8,7 @@ def test_wheel_support_no_python_requires(mocker): wheel = get_embed_wheel("setuptools", for_py_version=None) zip_mock = mocker.MagicMock() mocker.patch("virtualenv.seed.wheels.util.ZipFile", new=zip_mock) - zip_mock.return_value.__enter__.return_value.read = lambda name: b"" + zip_mock.return_value.__enter__.return_value.read = lambda name: b"" # noqa: U100 supports = wheel.support_py("3.8") assert supports is True diff --git a/vendor/virtualenv/tests/unit/test_run.py b/vendor/virtualenv/tests/unit/test_run.py index a6da7349..d71d0a76 100644 --- a/vendor/virtualenv/tests/unit/test_run.py +++ b/vendor/virtualenv/tests/unit/test_run.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, unicode_literals - import logging import pytest diff --git a/vendor/virtualenv/tests/unit/test_util.py b/vendor/virtualenv/tests/unit/test_util.py index ca3f31f1..d2a59067 100644 --- a/vendor/virtualenv/tests/unit/test_util.py +++ b/vendor/virtualenv/tests/unit/test_util.py @@ -1,11 +1,3 @@ -from __future__ import absolute_import, unicode_literals - -import subprocess -import sys - -import pytest - -from virtualenv.info import IS_WIN, PY2 from virtualenv.util.subprocess import run_cmd @@ -14,14 +6,3 @@ def test_run_fail(tmp_path): assert err assert not out assert code - - -@pytest.mark.skipif(not (PY2 and IS_WIN), reason="subprocess patch only applied on Windows python2") -def test_windows_py2_cwd_works(tmp_path): - cwd = str(tmp_path) - result = subprocess.check_output( - [sys.executable, "-c", "import os; print(os.getcwd())"], - cwd=cwd, - universal_newlines=True, - ) - assert result == "{}\n".format(cwd) diff --git a/vendor/virtualenv/tox.ini b/vendor/virtualenv/tox.ini index e2aa8e95..567e30d2 100644 --- a/vendor/virtualenv/tox.ini +++ b/vendor/virtualenv/tox.ini @@ -1,14 +1,13 @@ [tox] envlist = fix_lint + py311 + py310 py39 py38 py37 py36 - py35 - py27 pypy3 - pypy2 coverage readme docs @@ -29,7 +28,7 @@ setenv = COVERAGE_PROCESS_START = {toxinidir}/.coveragerc PYTHONIOENCODING = utf-8 _COVERAGE_SRC = {envsitepackagesdir}/virtualenv - {py27,pypy2, upgrade}: PYTHONWARNINGS = ignore:DEPRECATION::pip._internal.cli.base_command + {upgrade}: PYTHONWARNINGS = ignore:DEPRECATION::pip._internal.cli.base_command extras = testing commands = @@ -53,7 +52,7 @@ passenv = basepython = python3.10 skip_install = true deps = - pre-commit>=2 + pre-commit>=2.20 commands = pre-commit run --all-files --show-diff-on-failure python -c 'import pathlib; print("hint: run \{\} install to add checks as pre-commit hook".format(pathlib.Path(r"{envdir}") / "bin" / "pre-commit"))' @@ -67,8 +66,8 @@ setenv = COVERAGE_FILE = {toxworkdir}/.coverage skip_install = true deps = - coverage>=5.0.1 - diff_cover>=3 + coverage>=6.4.2 + diff_cover>=6.5.1 extras = parallel_show_output = true commands = @@ -78,12 +77,12 @@ commands = python -m coverage html -d {toxworkdir}/htmlcov python -m diff_cover.diff_cover_tool --compare-branch {env:DIFF_AGAINST:origin/main} {toxworkdir}/coverage.xml depends = + py311 + py310 py39 py38 py37 py36 - py35 - py27 pypy pypy3 @@ -91,8 +90,8 @@ depends = description = check that the long description is valid (need for PyPI) skip_install = true deps = - build>=0.0.4 - twine>=3 + build>=0.8 + twine>=4.0.1 extras = commands = python -m build -o {envtmpdir} --wheel --sdist . @@ -114,7 +113,7 @@ passenv = UPGRADE_ADVISORY skip_install = true deps = - black + black>=22.6 changedir = {toxinidir}/tasks commands = python upgrade_wheels.py @@ -125,9 +124,9 @@ passenv = * basepython = python3.10 deps = - gitpython>=3.1.24 + gitpython>=3.1.27 packaging>=21.3 - towncrier>=21.3 + towncrier>=21.9 changedir = {toxinidir}/tasks commands = python release.py --version {posargs} @@ -137,7 +136,7 @@ description = generate a DEV environment usedevelop = true deps = {[testenv:release]deps} - setuptools_scm[toml]>=3.4 + setuptools_scm[toml]>=7.0.5 extras = docs testing @@ -161,6 +160,8 @@ known_first_party = virtualenv [flake8] max-complexity = 22 max-line-length = 120 +noqa-require-code = true +dictionaries = en_US,python,technical,django ignore = E203, W503, C901, E402 [pep8] diff --git a/vendor/virtualenv/whitelist.txt b/vendor/virtualenv/whitelist.txt new file mode 100644 index 00000000..477692e5 --- /dev/null +++ b/vendor/virtualenv/whitelist.txt @@ -0,0 +1,231 @@ +1st +2nd +32key +64key +abi +addoption +addsitedir +altsep +appauthor +appname +arcname +argtypes +arm64 +attrgetter +autoclass +autodoc +autosectionlabel +autouse +b64 +bak +buf +bzrignore +calc +caplog +capsys +cfg +changelog +chdir +checkwindow +cigam +classs +codecs +codesign +colspec +commonprefix +conda +confs +confstr +copy2 +copytree +cov +cpython +cpython2 +cpython3 +cpython3windows +crc +crc32 +csh +cshell +cust +defpath +delenv +deque +distlib +distro +dll +docname +doctree +doctreedir +docutils +downloader +dword +dylib +endian +enquote +epilog +exe +executables +extlinks +extractall +filelock +fillvalue +filterby +fixturenames +fixup +fromlist +fs +gc +getattribute +geteuid +getfilesystemencoding +getinfo +getitem +getoption +getplugin +getsitepackages +gevent +groupby +hgignore +hkey +htmlhelp +img +impls +iread +irgrp +iroth +ish +iterdir +iwgrp +iwoth +iwusr +ixgrp +ixoth +ixusr +joinpath +kwonly +ld +levelno +libffi +libpypy +libpypy3 +libs +lpcwstr +lpwstr +lstat +makefile +maxint +methodcaller +mh +mktemp +modifyitems +morecols +namelist +nfat +nodot +noinspection +nok +nonwrappers +normcase +notset +nul +nushell +onerror +pardir +pathlist +pep514 +pep8 +platformdirs +platlib +pluginmanager +popleft +pos +posix +powershell +prefered +preimport +pseudorandom +pth +purelib +py27 +pyc +pyd +pydoc +pyenv +pygments +pyinfo +pypy +pypy2 +pypy3 +pypy37 +pypy38 +python2mac +python3mac +pythonmac +pythonpath +pythonw +pythonx +pyvenv +pyver +rawtext +readlink +readouterr +reentrant +refid +refspec +releaselevel +repo +restype +rfind +rglob +rmdir +rpartition +rst +rtd +scm +scr +sdist +setenv +shlex +simplefilter +sitepackages +skipif +sphinxarg +srcs +stdlib +strerror +stringify +subdirectory +subdirs +subinclude +submodules +symlinked +symlinks +sysconfig +t1 +t2 +tbody +tcl +tempdir +testpth +tgroup +tk +topdown +tox +truediv +unsecure +usefixtures +v37 +vcs +venv +venvlauncher +virtualenv +virtualenvs +whl +winapi +winreg +wintypes +wow64 +xfail +zipapp +zipimporter